[
  {
    "path": ".gitattributes",
    "content": "*.ipynb linguist-documentation\n"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "# These are supported funding model platforms\ngithub: rwightman\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a bug report to help us improve. Issues are for reporting bugs or requesting\n  features, the discussion forum is available for asking questions or seeking help\n  from the community.\ntitle: \"[BUG] Issue title...\"\nlabels: bug\nassignees: rwightman\n\n---\n\n**Describe the bug**\nA clear and concise description of what the bug is.\n\n**To Reproduce**\nSteps to reproduce the behavior:\n1.\n2.\n\n**Expected behavior**\nA clear and concise description of what you expected to happen.\n\n**Screenshots**\nIf applicable, add screenshots to help explain your problem.\n\n**Desktop (please complete the following information):**\n - OS: [e.g. Windows 10, Ubuntu 18.04]\n - This repository version [e.g. pip 0.3.1 or commit ref]\n - PyTorch version w/ CUDA/cuDNN  [e.g. from `conda list`, 1.7.0  py3.8_cuda11.0.221_cudnn8.0.3_0]\n\n**Additional context**\nAdd any other context about the problem here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "content": "blank_issues_enabled: false\ncontact_links:\n  - name: Community Discussions\n    url: https://github.com/rwightman/pytorch-image-models/discussions\n    about: Hparam request in issues will be ignored! Issues are for features and bugs. Questions can be asked in Discussions.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature request\nabout: Suggest an idea for this project. Hparam requests, training help are not feature requests.\n  The discussion forum is available for asking questions or seeking help from the community.\ntitle: \"[FEATURE] Feature title...\"\nlabels: enhancement\nassignees: ''\n\n---\n\n**Is your feature request related to a problem? Please describe.**\nA clear and concise description of what the problem is.\n\n**Describe the solution you'd like**\nA clear and concise description of what you want to happen.\n\n**Describe alternatives you've considered**\nA clear and concise description of any alternative solutions or features you've considered.\n\n**Additional context**\nAdd any other context or screenshots about the feature request here.\n"
  },
  {
    "path": ".github/workflows/build_documentation.yml",
    "content": "name: Build documentation\n\non:\n  push:\n    branches:\n      - main\n      - doc-builder*\n      - v*-release\n\njobs:\n   build:\n    uses: huggingface/doc-builder/.github/workflows/build_main_documentation.yml@main\n    with:\n      commit_sha: ${{ github.sha }}\n      package: pytorch-image-models\n      package_name: timm\n      path_to_docs: pytorch-image-models/hfdocs/source\n      version_tag_suffix: \"\"\n    secrets:\n      hf_token: ${{ secrets.HF_DOC_BUILD_PUSH }}\n"
  },
  {
    "path": ".github/workflows/build_pr_documentation.yml",
    "content": "name: Build PR Documentation\n\non:\n  pull_request:\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}\n  cancel-in-progress: true\n\njobs:\n  build:\n    uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@main\n    with:\n      commit_sha: ${{ github.event.pull_request.head.sha }}\n      pr_number: ${{ github.event.number }}\n      package: pytorch-image-models\n      package_name: timm\n      path_to_docs: pytorch-image-models/hfdocs/source\n      version_tag_suffix: \"\"\n"
  },
  {
    "path": ".github/workflows/tests.yml",
    "content": "name: Python tests\n\non:\n  push:\n    branches: [ main ]\n  pull_request:\n    branches: [ main ]\n\nenv:\n  OMP_NUM_THREADS: 2\n  MKL_NUM_THREADS: 2\n\njobs:\n  test:\n    name: Run tests on ${{ matrix.os }} with Python ${{ matrix.python }}\n    strategy:\n      matrix:\n        os: [ubuntu-latest]\n        python: ['3.10', '3.13']\n        torch: [{base: '1.13.0', vision: '0.14.0'}, {base: '2.9.1', vision: '0.24.1'}]\n        testmarker: ['-k \"not test_models\"', '-m base', '-m cfg', '-m torchscript', '-m features', '-m fxforward', '-m fxbackward']\n        exclude:\n          - python: '3.13'\n            torch: {base: '1.13.0', vision: '0.14.0'}\n    runs-on: ${{ matrix.os }}\n\n    steps:\n    - uses: actions/checkout@v6\n    - name: Set up Python ${{ matrix.python }}\n      uses: actions/setup-python@v6\n      with:\n        python-version: ${{ matrix.python }}\n    - name: Install testing dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install -r requirements-dev.txt\n    - name: Install torch on mac\n      if: startsWith(matrix.os, 'macOS')\n      run: pip install --no-cache-dir torch==${{ matrix.torch.base }} torchvision==${{ matrix.torch.vision }}\n    - name: Install torch on Windows\n      if: startsWith(matrix.os, 'windows')\n      run: pip install --no-cache-dir torch==${{ matrix.torch.base }} torchvision==${{ matrix.torch.vision }}\n    - name: Install torch on ubuntu\n      if: startsWith(matrix.os, 'ubuntu')\n      run: |\n        sudo sed -i 's/azure\\.//' /etc/apt/sources.list\n        sudo apt update\n        sudo apt install -y google-perftools\n        pip install --no-cache-dir torch==${{ matrix.torch.base }}+cpu torchvision==${{ matrix.torch.vision }}+cpu --index-url https://download.pytorch.org/whl/cpu\n    - name: Install requirements\n      run: |\n        pip install -r requirements.txt\n    - name: Force old numpy for old torch\n      if: ${{ matrix.torch.base == '1.13.0' }}\n      run: pip install --upgrade 'numpy<2.0'\n    - name: Run tests on Windows\n      if: startsWith(matrix.os, 'windows')\n      env:\n        PYTHONDONTWRITEBYTECODE: 1\n      run: |\n        pytest -vv tests\n    - name: Run '${{ matrix.testmarker }}' tests on Linux / Mac\n      if: ${{ !startsWith(matrix.os, 'windows') }}\n      env:\n        LD_PRELOAD: /usr/lib/x86_64-linux-gnu/libtcmalloc.so.4\n        PYTHONDONTWRITEBYTECODE: 1\n      run: |\n        pytest -vv --forked --durations=0 ${{ matrix.testmarker }} tests\n"
  },
  {
    "path": ".github/workflows/trufflehog.yml",
    "content": "on:\n  push:\n\nname: Secret Leaks\n\njobs:\n  trufflehog:\n    runs-on: ubuntu-latest\n    steps:\n    - name: Checkout code\n      uses: actions/checkout@v6\n      with:\n        fetch-depth: 0\n    - name: Secret Scanning\n      uses: trufflesecurity/trufflehog@main\n"
  },
  {
    "path": ".github/workflows/upload_pr_documentation.yml",
    "content": "name: Upload PR Documentation\n\non:\n  workflow_run:\n    workflows: [\"Build PR Documentation\"]\n    types:\n      - completed\n\njobs:\n  build:\n    uses: huggingface/doc-builder/.github/workflows/upload_pr_documentation.yml@main\n    with:\n      package_name: timm\n    secrets:\n      hf_token: ${{ secrets.HF_DOC_BUILD_PUSH }}\n      comment_bot_token: ${{ secrets.COMMENT_BOT_TOKEN }}"
  },
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# pyenv\n.python-version\n\n# celery beat schedule file\ncelerybeat-schedule\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# PyCharm\n.idea\n\noutput/\n\n# PyTorch weights\n*.tar\n*.pth\n*.pt\n*.torch\n*.gz\nUntitled.ipynb\nTesting notebook.ipynb\n\n# Root dir exclusions\n/*.csv\n/*.yaml\n/*.json\n/*.jpg\n/*.png\n/*.zip\n/*.tar.*"
  },
  {
    "path": "CITATION.cff",
    "content": "message: \"If you use this software, please cite it as below.\"\ntitle: \"PyTorch Image Models\"\nversion: \"1.2.2\"\ndoi: \"10.5281/zenodo.4414861\" \nauthors:\n  - family-names: Wightman\n    given-names: Ross\nversion: 1.0.11\nyear: \"2019\"\nurl: \"https://github.com/huggingface/pytorch-image-models\"\nlicense: \"Apache 2.0\""
  },
  {
    "path": "CLAUDE.md",
    "content": "# CLAUDE.md - PyTorch Image Models (timm)\n\n## Build/Test Commands\n- Install: `python -m pip install -e .`\n- Run tests: `pytest tests/`\n- Run specific test: `pytest tests/test_models.py::test_specific_function -v`\n- Run tests in parallel: `pytest -n 4 tests/`\n- Filter tests: `pytest -k \"substring-to-match\" tests/`\n\n## Code Style Guidelines\n- Line length: 120 chars\n- Indentation: 4-space hanging indents, arguments should have an extra level of indent, use 'sadface' (closing parenthesis and colon on a separate line)\n- Typing: Use PEP484 type annotations in function signatures\n- Docstrings: Google style (do not duplicate type annotations and defaults)\n- Imports: Standard library first, then third-party, then local\n- Function naming: snake_case\n- Class naming: PascalCase\n- Error handling: Use try/except with specific exceptions\n- Conditional expressions: Use parentheses for complex expressions"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nWe as members, contributors, and leaders pledge to participate in our\ncommunity a harassment-free experience for everyone, regardless of age, body\nsize, visible or invisible disability, ethnicity, sex characteristics, gender\nidentity and expression, level of experience, education, socio-economic status,\nnationality, personal appearance, race, caste, color, religion, or sexual\nidentity and orientation.\n\nWe pledge to act and interact in ways that contribute to an open, welcoming,\ndiverse, inclusive, and healthy community.\n\n## Our Standards\n\nExamples of behavior that contributes to a positive environment for our\ncommunity includes:\n\n* Demonstrating empathy and kindness toward other people\n* Being respectful of differing opinions, viewpoints, and experiences\n* Giving and gracefully accepting constructive feedback\n* Accepting responsibility and apologizing to those affected by our mistakes,\n  and learning from the experience\n* Focusing on what is best not just for us as individuals, but for the overall\n  community\n\nExamples of unacceptable behavior include:\n\n* The use of sexualized language or imagery, and sexual attention or advances of\n  any kind\n* Trolling, insulting or derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or email address,\n  without their explicit permission\n* Other conduct that could reasonably be considered inappropriate in a\n  professional setting\n\n## Enforcement Responsibilities\n\nCommunity leaders are responsible for clarifying and enforcing our standards of\nacceptable behavior and will take appropriate and fair corrective action in\nresponse to any behavior that they deem inappropriate, threatening, offensive,\nor harmful.\n\nCommunity leaders have the right and responsibility to remove, edit, or reject\ncomments, commits, code, wiki edits, issues, and other contributions that are\nnot aligned to this Code of Conduct, and will communicate reasons for moderation\ndecisions when appropriate.\n\n## Scope\n\nThis Code of Conduct applies within all community spaces, and also applies when\nan individual is officially representing the community in public spaces.\nExamples of representing our community include using an official e-mail address,\nposting via an official social media account, or acting as an appointed\nrepresentative at an online or offline event.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\nreported to the community leaders responsible for enforcement at\nfeedback@huggingface.co.\nAll complaints will be reviewed and investigated promptly and fairly.\n\nAll community leaders are obligated to respect the privacy and security of the\nreporter of any incident.\n\n## Enforcement Guidelines\n\nCommunity leaders will follow these Community Impact Guidelines in determining\nthe consequences for any action they deem in violation of this Code of Conduct:\n\n### 1. Correction\n\n**Community Impact**: Use of inappropriate language or other behavior deemed\nunprofessional or unwelcome in the community.\n\n**Consequence**: A private, written warning from community leaders, providing\nclarity around the nature of the violation and an explanation of why the\nbehavior was inappropriate. A public apology may be requested.\n\n### 2. Warning\n\n**Community Impact**: A violation through a single incident or series of\nactions.\n\n**Consequence**: A warning with consequences for continued behavior. No\ninteraction with the people involved, including unsolicited interaction with\nthose enforcing the Code of Conduct, for a specified period. This\nincludes avoiding interactions in community spaces and external channels\nlike social media. Violating these terms may lead to a temporary or permanent\nban.\n\n### 3. Temporary Ban\n\n**Community Impact**: A serious violation of community standards, including\nsustained inappropriate behavior.\n\n**Consequence**: A temporary ban from any sort of interaction or public\ncommunication with the community for a specified period of time. No public or\nprivate interaction with the people involved, including unsolicited interaction\nwith those enforcing the Code of Conduct, is allowed during this period.\nViolating these terms may lead to a permanent ban.\n\n### 4. Permanent Ban\n\n**Community Impact**: Demonstrating a pattern of violation of community\nstandards, including sustained inappropriate behavior, harassment of an\nindividual, or aggression toward or disparagement of classes of individuals.\n\n**Consequence**: A permanent ban from any public interaction within the\ncommunity.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage],\nversion 2.1, available at\n[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].\n\nCommunity Impact Guidelines were inspired by\n[Mozilla's code of conduct enforcement ladder][Mozilla CoC].\n\nFor answers to common questions about this code of conduct, see the FAQ at\n[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at\n[https://www.contributor-covenant.org/translations][translations].\n\n[homepage]: https://www.contributor-covenant.org\n[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html\n[Mozilla CoC]: https://github.com/mozilla/diversity\n[FAQ]: https://www.contributor-covenant.org/faq\n[translations]: https://www.contributor-covenant.org/translations\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "*This guideline is very much a work-in-progress.*\n\nContributions to `timm` for code, documentation, tests are more than welcome!\n\nThere haven't been any formal guidelines to date so please bear with me, and feel free to add to this guide.\n\n# Coding style\n\nCode linting and auto-format (black) are not currently in place but open to consideration. In the meantime, the style to follow is (mostly) aligned with Google's guide: https://google.github.io/styleguide/pyguide.html. \n\nA few specific differences from Google style (or black)\n1. Line length is 120 char. Going over is okay in some cases (e.g. I prefer not to break URL across lines).\n2. Hanging indents are always preferred, please avoid aligning arguments with closing brackets or braces.\n\nExample, from Google guide, but this is a NO here:\n```python\n   # Aligned with opening delimiter.\n   foo = long_function_name(var_one, var_two,\n                            var_three, var_four)\n   meal = (spam,\n           beans)\n\n   # Aligned with opening delimiter in a dictionary.\n   foo = {\n       'long_dictionary_key': value1 +\n                              value2,\n       ...\n   }\n```\nThis is YES:\n\n```python\n   # 4-space hanging indent; nothing on first line,\n   # closing parenthesis on a new line.\n   foo = long_function_name(\n       var_one, var_two, var_three,\n       var_four\n   )\n   meal = (\n       spam,\n       beans,\n   )\n\n   # 4-space hanging indent in a dictionary.\n   foo = {\n       'long_dictionary_key':\n           long_dictionary_value,\n       ...\n   }\n```\n\nWhile preferred `timm` style is *mostly* compatible with Black / Ruff. Since I've been following PEP 8 style since before Black was a thing, there's one area I can't agree on, function arg indents. From a Black example this:\n```python\ndef very_important_function(\n    template: str,\n    *variables,\n    file: os.PathLike,\n    engine: str,\n    header: bool = True,\n    debug: bool = False,\n):\n    with open(file, \"w\") as f:\n        ...\n```\n\nShould according to PEP 8 (https://peps.python.org/pep-0008/#indentation) have an extra level of indent on the args:\n\n```python\ndef very_important_function(\n        template: str,\n        *variables,\n        file: os.PathLike,\n        engine: str,\n        header: bool = True,\n        debug: bool = False,\n):\n    with open(file, \"w\") as f:\n        ...\n```\n\nI do like sadface though. So please don't run Black on existing files and convert all of the arg indents. Thanks!\n\nWhen there is discrepancy in a given source file (there are many origins for various bits of code and not all have been updated to what I consider current goal), please follow the style in a given file.\n\nPlease avoid formatting code that is unrelated to your PR.\n\nPR with pure formatting / style fixes will be accepted but only in isolation from functional changes, best to ask before starting such a change.\n\n# Documentation\n\nAs with code style, docstrings style based on the Google guide: guide: https://google.github.io/styleguide/pyguide.html\n\nThe goal for the code is to eventually move to have all major functions and `__init__` methods use PEP484 type annotations.\n\nWhen type annotations are used for a function, as per the Google pyguide, they should **NOT** be duplicated in the docstrings, please leave annotations as the one source of truth re typing.\n\nThere are a LOT of gaps in current documentation relative to the functionality in timm, please, document away!\n\n# Installation\n\nCreate a Python virtual environment using Python 3.10. Inside the environment, install torch` and `torchvision` using the instructions matching your system as listed on the [PyTorch website](https://pytorch.org/).\n\nThen install the remaining dependencies:\n\n```\npython -m pip install -r requirements.txt\npython -m pip install -r requirements-dev.txt  # for testing\npython -m pip install -e .\n```\n\n## Unit tests\n\nRun the tests using:\n\n```\npytest tests/\n```\n\nSince the whole test suite takes a lot of time to run locally (a few hours), you may want to select a subset of tests relating to the changes you made by using the `-k` option of [`pytest`](https://docs.pytest.org/en/7.1.x/example/markers.html#using-k-expr-to-select-tests-based-on-their-name). Moreover, running tests in parallel (in this example 4 processes) with the `-n` option may help:\n\n```\npytest -k \"substring-to-match\" -n 4 tests/\n```\n\n## Building documentation\n\nPlease refer to [this document](https://github.com/huggingface/pytorch-image-models/tree/main/hfdocs).\n\n# Questions\n\nIf you have any questions about contribution, where / how to contribute, please ask in the [Discussions](https://github.com/huggingface/pytorch-image-models/discussions/categories/contributing) (there is a `Contributing` topic).\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2019 Ross Wightman\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include timm/models/_pruned/*.txt\ninclude timm/data/_info/*.txt\ninclude timm/data/_info/*.json\n"
  },
  {
    "path": "README.md",
    "content": "# PyTorch Image Models\n- [What's New](#whats-new)\n- [Introduction](#introduction)\n- [Models](#models)\n- [Features](#features)\n- [Results](#results)\n- [Getting Started (Documentation)](#getting-started-documentation)\n- [Train, Validation, Inference Scripts](#train-validation-inference-scripts)\n- [Awesome PyTorch Resources](#awesome-pytorch-resources)\n- [Licenses](#licenses)\n- [Citing](#citing)\n\n## What's New\n\n## Feb 23, 2026\n* Add token distillation training support to distillation task wrappers\n* Remove some torch.jit usage in prep for official deprecation\n* Caution added to AdamP optimizer\n* Call reset_parameters() even if meta-device init so that buffers get init w/ hacks like init_empty_weights\n* Tweak Muon optimizer to work with DTensor/FSDP2 (clamp_ instead of clamp_min_, alternate NS branch for DTensor)\n* Release 1.0.25\n\n## Jan 21, 2026\n* **Compat Break**: Fix oversight w/ QKV vs MLP bias in `ParallelScalingBlock` (& `DiffParallelScalingBlock`)\n  * Does not impact any trained `timm` models but could impact downstream use.\n\n## Jan 5 & 6, 2026\n* Release 1.0.24\n* Add new benchmark result csv files for inference timing on all models w/ RTX Pro 6000, 5090, and 4090 cards w/ PyTorch 2.9.1\n* Fix moved module error in deprecated timm.models.layers import path that impacts legacy imports\n* Release 1.0.23\n\n## Dec 30, 2025\n* Add better NAdaMuon trained `dpwee`, `dwee`, `dlittle` (differential) ViTs with a small boost over previous runs\n  * https://huggingface.co/timm/vit_dlittle_patch16_reg1_gap_256.sbb_nadamuon_in1k (83.24% top-1)\n  * https://huggingface.co/timm/vit_dwee_patch16_reg1_gap_256.sbb_nadamuon_in1k  (81.80% top-1)\n  * https://huggingface.co/timm/vit_dpwee_patch16_reg1_gap_256.sbb_nadamuon_in1k (81.67% top-1)\n* Add a ~21M param `timm` variant of the CSATv2 model at 512x512 & 640x640\n  * https://huggingface.co/timm/csatv2_21m.sw_r640_in1k (83.13% top-1)\n  * https://huggingface.co/timm/csatv2_21m.sw_r512_in1k (82.58% top-1)\n* Factor non-persistent param init out of `__init__` into a common method that can be externally called via `init_non_persistent_buffers()` after meta-device init. \n  \n## Dec 12, 2025\n* Add CSATV2 model (thanks https://github.com/gusdlf93) -- a lightweight but high res model with DCT stem & spatial attention. https://huggingface.co/Hyunil/CSATv2\n* Add AdaMuon and NAdaMuon optimizer support to existing `timm` Muon impl. Appears more competitive vs AdamW with familiar hparams for image tasks.\n* End of year PR cleanup, merge aspects of several long open PR\n  * Merge differential attention (`DiffAttention`), add corresponding `DiffParallelScalingBlock` (for ViT), train some wee vits\n    * https://huggingface.co/timm/vit_dwee_patch16_reg1_gap_256.sbb_in1k\n    * https://huggingface.co/timm/vit_dpwee_patch16_reg1_gap_256.sbb_in1k\n  * Add a few pooling modules, `LsePlus` and `SimPool`\n  * Cleanup, optimize `DropBlock2d` (also add support to ByobNet based models)\n* Bump unit tests to PyTorch 2.9.1 + Python 3.13 on upper end, lower still PyTorch 1.13 + Python 3.10\n  \n## Dec 1, 2025\n* Add lightweight task abstraction, add logits and feature distillation support to train script via new tasks.\n* Remove old APEX AMP support\n\n## Nov 4, 2025\n* Fix LayerScale / LayerScale2d init bug (init values ignored), introduced in 1.0.21. Thanks https://github.com/Ilya-Fradlin\n* Release 1.0.22\n\n## Oct 31, 2025 🎃\n* Update imagenet & OOD variant result csv files to include a few new models and verify correctness over several torch & timm versions\n* EfficientNet-X and EfficientNet-H B5 model weights added as part of a hparam search for AdamW vs Muon (still iterating on Muon runs)\n\n## Oct 16-20, 2025\n* Add an impl of the Muon optimizer (based on https://github.com/KellerJordan/Muon) with customizations\n  * extra flexibility and improved handling for conv weights and fallbacks for weight shapes not suited for orthogonalization\n  * small speedup for NS iterations by reducing allocs and using fused (b)add(b)mm ops\n  * by default uses AdamW (or NAdamW if `nesterov=True`) updates if muon not suitable for parameter shape (or excluded via param group flag)\n  * like torch impl, select from several LR scale adjustment fns via `adjust_lr_fn`\n  * select from several NS coefficient presets or specify your own via `ns_coefficients`\n* First 2 steps of 'meta' device model initialization supported\n  * Fix several ops that were breaking creation under 'meta' device context\n  * Add device & dtype factory kwarg support to all models and modules (anything inherting from nn.Module) in `timm`\n* License fields added to pretrained cfgs in code\n* Release 1.0.21\n\n## Sept 21, 2025\n* Remap DINOv3 ViT weight tags from `lvd_1689m` -> `lvd1689m` to match (same for `sat_493m` -> `sat493m`)\n* Release 1.0.20\n\n## Sept 17, 2025\n* DINOv3 (https://arxiv.org/abs/2508.10104) ConvNeXt and ViT models added. ConvNeXt models were mapped to existing `timm` model. ViT support done via the EVA base model w/ a new `RotaryEmbeddingDinoV3` to match the DINOv3 specific RoPE impl\n  * HuggingFace Hub: https://huggingface.co/collections/timm/timm-dinov3-68cb08bb0bee365973d52a4d\n* MobileCLIP-2 (https://arxiv.org/abs/2508.20691) vision encoders. New MCI3/MCI4 FastViT variants added and weights mapped to existing FastViT and B, L/14 ViTs.\n* MetaCLIP-2 Worldwide (https://arxiv.org/abs/2507.22062) ViT encoder weights added.\n* SigLIP-2 (https://arxiv.org/abs/2502.14786) NaFlex ViT encoder weights added via timm NaFlexViT model.\n* Misc fixes and contributions\n\n## July 23, 2025\n* Add `set_input_size()` method to EVA models, used by OpenCLIP 3.0.0 to allow resizing for timm based encoder models.\n* Release 1.0.18, needed for PE-Core S & T models in OpenCLIP 3.0.0\n* Fix small typing issue that broke Python 3.9 compat. 1.0.19 patch release.\n\n## July 21, 2025\n* ROPE support added to NaFlexViT. All models covered by the EVA base (`eva.py`) including EVA, EVA02, Meta PE ViT, `timm` SBB ViT w/ ROPE, and Naver ROPE-ViT can be now loaded in NaFlexViT when `use_naflex=True` passed at model creation time\n* More Meta PE ViT encoders added, including small/tiny variants, lang variants w/ tiling, and more spatial variants.\n* PatchDropout fixed with NaFlexViT and also w/ EVA models (regression after adding Naver ROPE-ViT)\n* Fix XY order with grid_indexing='xy', impacted non-square image use in 'xy' mode (only ROPE-ViT and PE impacted).\n\n## July 7, 2025\n* MobileNet-v5 backbone tweaks for improved Google Gemma 3n behaviour (to pair with updated official weights)\n  * Add stem bias (zero'd in updated weights, compat break with old weights)\n  * GELU -> GELU (tanh approx). A minor change to be closer to JAX\n* Add two arguments to layer-decay support, a min scale clamp and 'no optimization' scale threshold\n* Add 'Fp32' LayerNorm, RMSNorm, SimpleNorm variants that can be enabled to force computation of norm in float32\n* Some typing, argument cleanup for norm, norm+act layers done with above\n* Support Naver ROPE-ViT (https://github.com/naver-ai/rope-vit) in `eva.py`, add RotaryEmbeddingMixed module for mixed mode, weights on HuggingFace Hub\n\n|model                                             |img_size|top1  |top5  |param_count|\n|--------------------------------------------------|--------|------|------|-----------|\n|vit_large_patch16_rope_mixed_ape_224.naver_in1k  |224     |84.84 |97.122|304.4      |\n|vit_large_patch16_rope_mixed_224.naver_in1k      |224     |84.828|97.116|304.2      |\n|vit_large_patch16_rope_ape_224.naver_in1k        |224     |84.65 |97.154|304.37     |\n|vit_large_patch16_rope_224.naver_in1k            |224     |84.648|97.122|304.17     |\n|vit_base_patch16_rope_mixed_ape_224.naver_in1k   |224     |83.894|96.754|86.59      |\n|vit_base_patch16_rope_mixed_224.naver_in1k       |224     |83.804|96.712|86.44      |\n|vit_base_patch16_rope_ape_224.naver_in1k         |224     |83.782|96.61 |86.59      |\n|vit_base_patch16_rope_224.naver_in1k             |224     |83.718|96.672|86.43      |\n|vit_small_patch16_rope_224.naver_in1k            |224     |81.23 |95.022|21.98      |\n|vit_small_patch16_rope_mixed_224.naver_in1k      |224     |81.216|95.022|21.99      |\n|vit_small_patch16_rope_ape_224.naver_in1k        |224     |81.004|95.016|22.06      |\n|vit_small_patch16_rope_mixed_ape_224.naver_in1k  |224     |80.986|94.976|22.06      |\n* Some cleanup of ROPE modules, helpers, and FX tracing leaf registration\n* Preparing version 1.0.17 release\n\n## June 26, 2025\n* MobileNetV5 backbone (w/ encoder only variant) for [Gemma 3n](https://ai.google.dev/gemma/docs/gemma-3n#parameters) image encoder\n* Version 1.0.16 released\n\n## June 23, 2025\n* Add F.grid_sample based 2D and factorized pos embed resize to NaFlexViT. Faster when lots of different sizes (based on example by https://github.com/stas-sl).\n* Further speed up patch embed resample by replacing vmap with matmul (based on snippet by https://github.com/stas-sl).\n* Add 3 initial native aspect NaFlexViT checkpoints created while testing, ImageNet-1k and 3 different pos embed configs w/ same hparams.\n\n | Model | Top-1 Acc | Top-5 Acc | Params (M) | Eval Seq Len |\n |:---|:---:|:---:|:---:|:---:|\n | [naflexvit_base_patch16_par_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_par_gap.e300_s576_in1k) | 83.67 | 96.45 | 86.63 | 576 |\n | [naflexvit_base_patch16_parfac_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_parfac_gap.e300_s576_in1k) | 83.63 | 96.41 | 86.46 | 576 |\n | [naflexvit_base_patch16_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_gap.e300_s576_in1k) | 83.50 | 96.46 | 86.63 | 576 |\n* Support gradient checkpointing for `forward_intermediates` and fix some checkpointing bugs. Thanks https://github.com/brianhou0208\n* Add 'corrected weight decay' (https://arxiv.org/abs/2506.02285) as option to AdamW (legacy), Adopt, Kron, Adafactor (BV), Lamb, LaProp, Lion, NadamW, RmsPropTF, SGDW optimizers\n* Switch PE (perception encoder) ViT models to use native timm weights instead of remapping on the fly\n* Fix cuda stream bug in prefetch loader\n  \n## June 5, 2025\n* Initial NaFlexVit model code. NaFlexVit is a Vision Transformer with:\n  1. Encapsulated embedding and position encoding in a single module\n  2. Support for nn.Linear patch embedding on pre-patchified (dictionary) inputs\n  3. Support for NaFlex variable aspect, variable resolution (SigLip-2: https://arxiv.org/abs/2502.14786)\n  4. Support for FlexiViT variable patch size (https://arxiv.org/abs/2212.08013)\n  5. Support for NaViT fractional/factorized position embedding (https://arxiv.org/abs/2307.06304)\n* Existing vit models in `vision_transformer.py` can be loaded into the NaFlexVit model by adding the `use_naflex=True` flag to `create_model`\n  * Some native weights coming soon\n* A full NaFlex data pipeline is available that allows training / fine-tuning / evaluating with variable aspect / size images\n  * To enable in `train.py` and `validate.py` add the `--naflex-loader` arg, must be used with a NaFlexVit\n* To evaluate an existing (classic) ViT loaded in NaFlexVit model w/ NaFlex data pipe:\n  * `python validate.py /imagenet --amp -j 8 --model vit_base_patch16_224 --model-kwargs use_naflex=True --naflex-loader --naflex-max-seq-len 256` \n* The training has some extra args features worth noting\n  * The `--naflex-train-seq-lens'` argument specifies which sequence lengths to randomly pick from per batch during training\n  * The `--naflex-max-seq-len` argument sets the target sequence length for validation\n  * Adding `--model-kwargs enable_patch_interpolator=True --naflex-patch-sizes 12 16 24` will enable random patch size selection per-batch w/ interpolation\n  * The `--naflex-loss-scale` arg changes loss scaling mode per batch relative to the batch size, `timm` NaFlex loading changes the batch size for each seq len\n\n## May 28, 2025\n* Add a number of small/fast models thanks to https://github.com/brianhou0208\n  * SwiftFormer - [(ICCV2023) SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications](https://github.com/Amshaker/SwiftFormer) \n  * FasterNet - [(CVPR2023) Run, Don’t Walk: Chasing Higher FLOPS for Faster Neural Networks](https://github.com/JierunChen/FasterNet)\n  * SHViT - [(CVPR2024) SHViT: Single-Head Vision Transformer with Memory Efficient](https://github.com/ysj9909/SHViT)\n  * StarNet - [(CVPR2024) Rewrite the Stars](https://github.com/ma-xu/Rewrite-the-Stars)\n  * GhostNet-V3 [GhostNetV3: Exploring the Training Strategies for Compact Models](https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/ghostnetv3_pytorch)\n* Update EVA ViT (closest match) to support Perception Encoder models (https://arxiv.org/abs/2504.13181) from Meta, loading Hub weights but I still need to push dedicated `timm` weights\n  * Add some flexibility to ROPE impl\n* Big increase in number of models supporting `forward_intermediates()` and some additional fixes thanks to https://github.com/brianhou0208\n  * DaViT, EdgeNeXt, EfficientFormerV2, EfficientViT(MIT), EfficientViT(MSRA), FocalNet, GCViT, HGNet /V2, InceptionNeXt, Inception-V4, MambaOut, MetaFormer, NesT, Next-ViT, PiT, PVT V2, RepGhostNet, RepViT, ResNetV2, ReXNet, TinyViT, TResNet, VoV\n* TNT model updated w/ new weights `forward_intermediates()` thanks to https://github.com/brianhou0208\n* Add `local-dir:` pretrained schema, can use `local-dir:/path/to/model/folder` for model name to source model / pretrained cfg & weights Hugging Face Hub models (config.json + weights file) from a local folder.\n* Fixes, improvements for onnx export\n    \n## Feb 21, 2025\n* SigLIP 2 ViT image encoders added (https://huggingface.co/collections/timm/siglip-2-67b8e72ba08b09dd97aecaf9)\n  * Variable resolution / aspect NaFlex versions are a WIP\n* Add 'SO150M2' ViT weights trained with SBB recipes, great results, better for ImageNet than previous attempt w/ less training.\n  * `vit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k` - 88.1% top-1\n  * `vit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k` - 87.9% top-1\n  * `vit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k` - 87.3% top-1\n  * `vit_so150m2_patch16_reg4_gap_256.sbb_e200_in12k`\n* Updated InternViT-300M '2.5' weights\n* Release 1.0.15\n\n## Feb 1, 2025\n* FYI PyTorch 2.6 & Python 3.13 are tested and working w/ current main and released version of `timm`\n\n## Jan 27, 2025\n* Add Kron Optimizer (PSGD w/ Kronecker-factored preconditioner) \n  * Code from https://github.com/evanatyourservice/kron_torch\n  * See also https://sites.google.com/site/lixilinx/home/psgd\n\n## Jan 19, 2025\n* Fix loading of LeViT safetensor weights, remove conversion code which should have been deactivated\n* Add 'SO150M' ViT weights trained with SBB recipes, decent results, but not optimal shape for ImageNet-12k/1k pretrain/ft\n  * `vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k` - 86.7% top-1\n  * `vit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k` - 87.4% top-1\n  * `vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k`\n* Misc typing, typo, etc. cleanup\n* 1.0.14 release to get above LeViT fix out\n\n## Jan 9, 2025\n* Add support to train and validate in pure `bfloat16` or `float16`\n* `wandb` project name arg added by https://github.com/caojiaolong, use arg.experiment for name\n* Fix old issue w/ checkpoint saving not working on filesystem w/o hard-link support (e.g. FUSE fs mounts)\n* 1.0.13 release\n\n## Jan 6, 2025\n* Add `torch.utils.checkpoint.checkpoint()` wrapper in `timm.models` that defaults `use_reentrant=False`, unless `TIMM_REENTRANT_CKPT=1` is set in env.\n\n## Dec 31, 2024\n* `convnext_nano` 384x384 ImageNet-12k pretrain & fine-tune. https://huggingface.co/models?search=convnext_nano%20r384\n* Add AIM-v2 encoders from https://github.com/apple/ml-aim, see on Hub: https://huggingface.co/models?search=timm%20aimv2\n* Add PaliGemma2 encoders from https://github.com/google-research/big_vision to existing PaliGemma, see on Hub: https://huggingface.co/models?search=timm%20pali2\n* Add missing L/14 DFN2B 39B CLIP ViT, `vit_large_patch14_clip_224.dfn2b_s39b`\n* Fix existing `RmsNorm` layer & fn to match standard formulation, use PT 2.5 impl when possible. Move old impl to `SimpleNorm` layer, it's LN w/o centering or bias. There were only two `timm` models using it, and they have been updated.\n* Allow override of `cache_dir` arg for model creation\n* Pass through `trust_remote_code` for HF datasets wrapper\n* `inception_next_atto` model added by creator\n* Adan optimizer caution, and Lamb decoupled weight decay options\n* Some feature_info metadata fixed by https://github.com/brianhou0208\n* All OpenCLIP and JAX (CLIP, SigLIP, Pali, etc) model weights that used load time remapping were given their own HF Hub instances so that they work with `hf-hub:` based loading, and thus will work with new Transformers `TimmWrapperModel`\n\n## Introduction\n\nPy**T**orch **Im**age **M**odels (`timm`) is a collection of image models, layers, utilities, optimizers, schedulers, data-loaders / augmentations, and reference training / validation scripts that aim to pull together a wide variety of SOTA models with ability to reproduce ImageNet training results.\n\nThe work of many others is present here. I've tried to make sure all source material is acknowledged via links to github, arxiv papers, etc in the README, documentation, and code docstrings. Please let me know if I missed anything.\n\n## Features\n\n### Models\n\nAll model architecture families include variants with pretrained weights. There are specific model variants without any weights, it is NOT a bug. Help training new or better weights is always appreciated.\n\n* Aggregating Nested Transformers - https://arxiv.org/abs/2105.12723\n* BEiT - https://arxiv.org/abs/2106.08254\n* BEiT-V2 - https://arxiv.org/abs/2208.06366\n* BEiT3 - https://arxiv.org/abs/2208.10442\n* Big Transfer ResNetV2 (BiT) - https://arxiv.org/abs/1912.11370\n* Bottleneck Transformers - https://arxiv.org/abs/2101.11605\n* CaiT (Class-Attention in Image Transformers) - https://arxiv.org/abs/2103.17239\n* CoaT (Co-Scale Conv-Attentional Image Transformers) - https://arxiv.org/abs/2104.06399\n* CoAtNet (Convolution and Attention) - https://arxiv.org/abs/2106.04803\n* ConvNeXt - https://arxiv.org/abs/2201.03545\n* ConvNeXt-V2 - http://arxiv.org/abs/2301.00808\n* ConViT (Soft Convolutional Inductive Biases Vision Transformers)- https://arxiv.org/abs/2103.10697\n* CspNet (Cross-Stage Partial Networks) - https://arxiv.org/abs/1911.11929\n* DeiT - https://arxiv.org/abs/2012.12877\n* DeiT-III - https://arxiv.org/pdf/2204.07118.pdf\n* DenseNet - https://arxiv.org/abs/1608.06993\n* DLA - https://arxiv.org/abs/1707.06484\n* DPN (Dual-Path Network) - https://arxiv.org/abs/1707.01629\n* EdgeNeXt - https://arxiv.org/abs/2206.10589\n* EfficientFormer - https://arxiv.org/abs/2206.01191\n* EfficientFormer-V2 - https://arxiv.org/abs/2212.08059\n* EfficientNet (MBConvNet Family)\n    * EfficientNet NoisyStudent (B0-B7, L2) - https://arxiv.org/abs/1911.04252\n    * EfficientNet AdvProp (B0-B8) - https://arxiv.org/abs/1911.09665\n    * EfficientNet (B0-B7) - https://arxiv.org/abs/1905.11946\n    * EfficientNet-EdgeTPU (S, M, L) - https://ai.googleblog.com/2019/08/efficientnet-edgetpu-creating.html\n    * EfficientNet V2 - https://arxiv.org/abs/2104.00298\n    * FBNet-C - https://arxiv.org/abs/1812.03443\n    * MixNet - https://arxiv.org/abs/1907.09595\n    * MNASNet B1, A1 (Squeeze-Excite), and Small - https://arxiv.org/abs/1807.11626\n    * MobileNet-V2 - https://arxiv.org/abs/1801.04381\n    * Single-Path NAS - https://arxiv.org/abs/1904.02877\n    * TinyNet - https://arxiv.org/abs/2010.14819\n* EfficientViT (MIT) - https://arxiv.org/abs/2205.14756\n* EfficientViT (MSRA) - https://arxiv.org/abs/2305.07027\n* EVA - https://arxiv.org/abs/2211.07636\n* EVA-02 - https://arxiv.org/abs/2303.11331\n* FasterNet - https://arxiv.org/abs/2303.03667\n* FastViT - https://arxiv.org/abs/2303.14189\n* FlexiViT - https://arxiv.org/abs/2212.08013\n* FocalNet (Focal Modulation Networks) - https://arxiv.org/abs/2203.11926\n* GCViT (Global Context Vision Transformer) - https://arxiv.org/abs/2206.09959\n* GhostNet - https://arxiv.org/abs/1911.11907\n* GhostNet-V2 - https://arxiv.org/abs/2211.12905\n* GhostNet-V3 - https://arxiv.org/abs/2404.11202\n* gMLP - https://arxiv.org/abs/2105.08050\n* GPU-Efficient Networks - https://arxiv.org/abs/2006.14090\n* Halo Nets - https://arxiv.org/abs/2103.12731\n* HGNet / HGNet-V2 - TBD\n* HRNet - https://arxiv.org/abs/1908.07919\n* InceptionNeXt - https://arxiv.org/abs/2303.16900\n* Inception-V3 - https://arxiv.org/abs/1512.00567\n* Inception-ResNet-V2 and Inception-V4 - https://arxiv.org/abs/1602.07261\n* Lambda Networks - https://arxiv.org/abs/2102.08602\n* LeViT (Vision Transformer in ConvNet's Clothing) - https://arxiv.org/abs/2104.01136\n* MambaOut - https://arxiv.org/abs/2405.07992\n* MaxViT (Multi-Axis Vision Transformer) - https://arxiv.org/abs/2204.01697\n* MetaFormer (PoolFormer-v2, ConvFormer, CAFormer) - https://arxiv.org/abs/2210.13452\n* MLP-Mixer - https://arxiv.org/abs/2105.01601\n* MobileCLIP - https://arxiv.org/abs/2311.17049\n* MobileNet-V3 (MBConvNet w/ Efficient Head) - https://arxiv.org/abs/1905.02244\n  * FBNet-V3 - https://arxiv.org/abs/2006.02049\n  * HardCoRe-NAS - https://arxiv.org/abs/2102.11646\n  * LCNet - https://arxiv.org/abs/2109.15099\n* MobileNetV4 - https://arxiv.org/abs/2404.10518\n* MobileOne - https://arxiv.org/abs/2206.04040\n* MobileViT - https://arxiv.org/abs/2110.02178\n* MobileViT-V2 - https://arxiv.org/abs/2206.02680\n* MViT-V2 (Improved Multiscale Vision Transformer) - https://arxiv.org/abs/2112.01526\n* NASNet-A - https://arxiv.org/abs/1707.07012\n* NesT - https://arxiv.org/abs/2105.12723\n* Next-ViT - https://arxiv.org/abs/2207.05501\n* NFNet-F - https://arxiv.org/abs/2102.06171\n* NF-RegNet / NF-ResNet - https://arxiv.org/abs/2101.08692\n* PE (Perception Encoder) - https://arxiv.org/abs/2504.13181\n* PNasNet - https://arxiv.org/abs/1712.00559\n* PoolFormer (MetaFormer) - https://arxiv.org/abs/2111.11418\n* Pooling-based Vision Transformer (PiT) - https://arxiv.org/abs/2103.16302\n* PVT-V2 (Improved Pyramid Vision Transformer) - https://arxiv.org/abs/2106.13797\n* RDNet (DenseNets Reloaded) - https://arxiv.org/abs/2403.19588\n* RegNet - https://arxiv.org/abs/2003.13678\n* RegNetZ - https://arxiv.org/abs/2103.06877\n* RepVGG - https://arxiv.org/abs/2101.03697\n* RepGhostNet - https://arxiv.org/abs/2211.06088\n* RepViT - https://arxiv.org/abs/2307.09283\n* ResMLP - https://arxiv.org/abs/2105.03404\n* ResNet/ResNeXt\n    * ResNet (v1b/v1.5) - https://arxiv.org/abs/1512.03385\n    * ResNeXt - https://arxiv.org/abs/1611.05431\n    * 'Bag of Tricks' / Gluon C, D, E, S variations - https://arxiv.org/abs/1812.01187\n    * Weakly-supervised (WSL) Instagram pretrained / ImageNet tuned ResNeXt101 - https://arxiv.org/abs/1805.00932\n    * Semi-supervised (SSL) / Semi-weakly Supervised (SWSL) ResNet/ResNeXts - https://arxiv.org/abs/1905.00546\n    * ECA-Net (ECAResNet) - https://arxiv.org/abs/1910.03151v4\n    * Squeeze-and-Excitation Networks (SEResNet) - https://arxiv.org/abs/1709.01507\n    * ResNet-RS - https://arxiv.org/abs/2103.07579\n* Res2Net - https://arxiv.org/abs/1904.01169\n* ResNeSt - https://arxiv.org/abs/2004.08955\n* ReXNet - https://arxiv.org/abs/2007.00992\n* ROPE-ViT - https://arxiv.org/abs/2403.13298\n* SelecSLS - https://arxiv.org/abs/1907.00837\n* Selective Kernel Networks - https://arxiv.org/abs/1903.06586\n* Sequencer2D - https://arxiv.org/abs/2205.01972\n* SHViT - https://arxiv.org/abs/2401.16456\n* SigLIP (image encoder) - https://arxiv.org/abs/2303.15343\n* SigLIP 2 (image encoder) - https://arxiv.org/abs/2502.14786\n* StarNet - https://arxiv.org/abs/2403.19967\n* SwiftFormer - https://arxiv.org/pdf/2303.15446\n* Swin S3 (AutoFormerV2) - https://arxiv.org/abs/2111.14725\n* Swin Transformer - https://arxiv.org/abs/2103.14030\n* Swin Transformer V2 - https://arxiv.org/abs/2111.09883\n* TinyViT - https://arxiv.org/abs/2207.10666\n* Transformer-iN-Transformer (TNT) - https://arxiv.org/abs/2103.00112\n* TResNet - https://arxiv.org/abs/2003.13630\n* Twins (Spatial Attention in Vision Transformers) - https://arxiv.org/pdf/2104.13840.pdf\n* VGG - https://arxiv.org/abs/1409.1556\n* Visformer - https://arxiv.org/abs/2104.12533\n* Vision Transformer - https://arxiv.org/abs/2010.11929\n* ViTamin - https://arxiv.org/abs/2404.02132\n* VOLO (Vision Outlooker) - https://arxiv.org/abs/2106.13112\n* VovNet V2 and V1 - https://arxiv.org/abs/1911.06667\n* Xception - https://arxiv.org/abs/1610.02357\n* Xception (Modified Aligned, Gluon) - https://arxiv.org/abs/1802.02611\n* Xception (Modified Aligned, TF) - https://arxiv.org/abs/1802.02611\n* XCiT (Cross-Covariance Image Transformers) - https://arxiv.org/abs/2106.09681\n\n### Optimizers\nTo see full list of optimizers w/ descriptions: `timm.optim.list_optimizers(with_description=True)`\n\nIncluded optimizers available via `timm.optim.create_optimizer_v2` factory method:\n* `adabelief` an implementation of AdaBelief adapted from https://github.com/juntang-zhuang/Adabelief-Optimizer - https://arxiv.org/abs/2010.07468\n* `adafactor` adapted from [FAIRSeq impl](https://github.com/pytorch/fairseq/blob/master/fairseq/optim/adafactor.py) - https://arxiv.org/abs/1804.04235\n* `adafactorbv` adapted from [Big Vision](https://github.com/google-research/big_vision/blob/main/big_vision/optax.py) - https://arxiv.org/abs/2106.04560\n* `adahessian` by [David Samuel](https://github.com/davda54/ada-hessian) - https://arxiv.org/abs/2006.00719\n* `adamp` and `sgdp` by [Naver ClovAI](https://github.com/clovaai) - https://arxiv.org/abs/2006.08217\n* `adamuon` and `nadamuon` as per https://github.com/Chongjie-Si/AdaMuon - https://arxiv.org/abs/2507.11005\n* `adan` an implementation of Adan adapted from https://github.com/sail-sg/Adan - https://arxiv.org/abs/2208.06677\n* `adopt` ADOPT adapted from https://github.com/iShohei220/adopt - https://arxiv.org/abs/2411.02853\n* `kron` PSGD w/ Kronecker-factored preconditioner from https://github.com/evanatyourservice/kron_torch - https://sites.google.com/site/lixilinx/home/psgd\n* `lamb` an implementation of Lamb and LambC (w/ trust-clipping) cleaned up and modified to support use with XLA - https://arxiv.org/abs/1904.00962\n* `laprop` optimizer from https://github.com/Z-T-WANG/LaProp-Optimizer - https://arxiv.org/abs/2002.04839\n* `lars` an implementation of LARS and LARC (w/ trust-clipping) - https://arxiv.org/abs/1708.03888\n* `lion` and implementation of Lion adapted from https://github.com/google/automl/tree/master/lion - https://arxiv.org/abs/2302.06675\n* `lookahead` adapted from impl by [Liam](https://github.com/alphadl/lookahead.pytorch) - https://arxiv.org/abs/1907.08610\n* `madgrad` an implementation of MADGRAD adapted from https://github.com/facebookresearch/madgrad - https://arxiv.org/abs/2101.11075\n* `mars` MARS optimizer from https://github.com/AGI-Arena/MARS - https://arxiv.org/abs/2411.10438\n* `muon` MUON optimizer from https://github.com/KellerJordan/Muon with numerous additions and improved non-transformer behaviour\n* `nadam` an implementation of Adam w/ Nesterov momentum\n* `nadamw` an implementation of AdamW (Adam w/ decoupled weight-decay) w/ Nesterov momentum. A simplified impl based on https://github.com/mlcommons/algorithmic-efficiency\n* `novograd` by [Masashi Kimura](https://github.com/convergence-lab/novograd) - https://arxiv.org/abs/1905.11286\n* `radam` by [Liyuan Liu](https://github.com/LiyuanLucasLiu/RAdam) - https://arxiv.org/abs/1908.03265\n* `rmsprop_tf` adapted from PyTorch RMSProp by myself. Reproduces much improved Tensorflow RMSProp behaviour\n* `sgdw` and implementation of SGD w/ decoupled weight-decay\n* `fused<name>` optimizers by name with [NVIDIA Apex](https://github.com/NVIDIA/apex/tree/master/apex/optimizers) installed\n* `bnb<name>` optimizers by name with [BitsAndBytes](https://github.com/TimDettmers/bitsandbytes) installed\n* `cadamw`, `clion`, and more 'Cautious' optimizers from https://github.com/kyleliang919/C-Optim - https://arxiv.org/abs/2411.16085\n* `adam`, `adamw`, `rmsprop`, `adadelta`, `adagrad`, and `sgd` pass through to `torch.optim` implementations\n* `c` suffix (eg `adamc`, `nadamc` to implement 'corrected weight decay' in https://arxiv.org/abs/2506.02285)\n  \n### Augmentations\n* Random Erasing from [Zhun Zhong](https://github.com/zhunzhong07/Random-Erasing/blob/master/transforms.py) - https://arxiv.org/abs/1708.04896)\n* Mixup - https://arxiv.org/abs/1710.09412\n* CutMix - https://arxiv.org/abs/1905.04899\n* AutoAugment (https://arxiv.org/abs/1805.09501) and RandAugment (https://arxiv.org/abs/1909.13719) ImageNet configurations modeled after impl for EfficientNet training (https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py)\n* AugMix w/ JSD loss, JSD w/ clean + augmented mixing support works with AutoAugment and RandAugment as well - https://arxiv.org/abs/1912.02781\n* SplitBachNorm - allows splitting batch norm layers between clean and augmented (auxiliary batch norm) data\n\n### Regularization\n* DropPath aka \"Stochastic Depth\" - https://arxiv.org/abs/1603.09382\n* DropBlock - https://arxiv.org/abs/1810.12890\n* Blur Pooling - https://arxiv.org/abs/1904.11486\n\n### Other\n\nSeveral (less common) features that I often utilize in my projects are included. Many of their additions are the reason why I maintain my own set of models, instead of using others' via PIP:\n\n* All models have a common default configuration interface and API for\n    * accessing/changing the classifier - `get_classifier` and `reset_classifier`\n    * doing a forward pass on just the features - `forward_features` (see [documentation](https://huggingface.co/docs/timm/feature_extraction))\n    * these makes it easy to write consistent network wrappers that work with any of the models\n* All models support multi-scale feature map extraction (feature pyramids) via create_model (see [documentation](https://huggingface.co/docs/timm/feature_extraction))\n    * `create_model(name, features_only=True, out_indices=..., output_stride=...)`\n    * `out_indices` creation arg specifies which feature maps to return, these indices are 0 based and generally correspond to the `C(i + 1)` feature level.\n    * `output_stride` creation arg controls output stride of the network by using dilated convolutions. Most networks are stride 32 by default. Not all networks support this.\n    * feature map channel counts, reduction level (stride) can be queried AFTER model creation via the `.feature_info` member\n* All models have a consistent pretrained weight loader that adapts last linear if necessary, and from 3 to 1 channel input if desired\n* High performance [reference training, validation, and inference scripts](https://huggingface.co/docs/timm/training_script) that work in several process/GPU modes:\n    * NVIDIA DDP w/ a single GPU per process, multiple processes with APEX present (AMP mixed-precision optional)\n    * PyTorch DistributedDataParallel w/ multi-gpu, single process (AMP disabled as it crashes when enabled)\n    * PyTorch w/ single GPU single process (AMP optional)\n* A dynamic global pool implementation that allows selecting from average pooling, max pooling, average + max, or concat([average, max]) at model creation. All global pooling is adaptive average by default and compatible with pretrained weights.\n* A 'Test Time Pool' wrapper that can wrap any of the included models and usually provides improved performance doing inference with input images larger than the training size. Idea adapted from original DPN implementation when I ported (https://github.com/cypw/DPNs)\n* Learning rate schedulers\n  * Ideas adopted from\n     * [AllenNLP schedulers](https://github.com/allenai/allennlp/tree/master/allennlp/training/learning_rate_schedulers)\n     * [FAIRseq lr_scheduler](https://github.com/pytorch/fairseq/tree/master/fairseq/optim/lr_scheduler)\n     * SGDR: Stochastic Gradient Descent with Warm Restarts (https://arxiv.org/abs/1608.03983)\n  * Schedulers include `step`, `cosine` w/ restarts, `tanh` w/ restarts, `plateau`\n* Space-to-Depth by [mrT23](https://github.com/mrT23/TResNet/blob/master/src/models/tresnet/layers/space_to_depth.py) (https://arxiv.org/abs/1801.04590)\n* Adaptive Gradient Clipping (https://arxiv.org/abs/2102.06171, https://github.com/deepmind/deepmind-research/tree/master/nfnets)\n* An extensive selection of channel and/or spatial attention modules:\n    * Bottleneck Transformer - https://arxiv.org/abs/2101.11605\n    * CBAM - https://arxiv.org/abs/1807.06521\n    * Effective Squeeze-Excitation (ESE) - https://arxiv.org/abs/1911.06667\n    * Efficient Channel Attention (ECA) - https://arxiv.org/abs/1910.03151\n    * Gather-Excite (GE) - https://arxiv.org/abs/1810.12348\n    * Global Context (GC) - https://arxiv.org/abs/1904.11492\n    * Halo - https://arxiv.org/abs/2103.12731\n    * Involution - https://arxiv.org/abs/2103.06255\n    * Lambda Layer - https://arxiv.org/abs/2102.08602\n    * Non-Local (NL) -  https://arxiv.org/abs/1711.07971\n    * Squeeze-and-Excitation (SE) - https://arxiv.org/abs/1709.01507\n    * Selective Kernel (SK) - (https://arxiv.org/abs/1903.06586\n    * Split (SPLAT) - https://arxiv.org/abs/2004.08955\n    * Shifted Window (SWIN) - https://arxiv.org/abs/2103.14030\n\n## Results\n\nModel validation results can be found in the [results tables](results/README.md)\n\n## Getting Started (Documentation)\n\nThe official documentation can be found at https://huggingface.co/docs/hub/timm. Documentation contributions are welcome.\n\n[Getting Started with PyTorch Image Models (timm): A Practitioner’s Guide](https://towardsdatascience.com/getting-started-with-pytorch-image-models-timm-a-practitioners-guide-4e77b4bf9055-2/) by [Chris Hughes](https://github.com/Chris-hughes10) is an extensive blog post covering many aspects of `timm` in detail.\n\n[timmdocs](http://timm.fast.ai/) is an alternate set of documentation for `timm`. A big thanks to [Aman Arora](https://github.com/amaarora) for his efforts creating timmdocs.\n\n[paperswithcode](https://paperswithcode.com/lib/timm) is a good resource for browsing the models within `timm`.\n\n## Train, Validation, Inference Scripts\n\nThe root folder of the repository contains reference train, validation, and inference scripts that work with the included models and other features of this repository. They are adaptable for other datasets and use cases with a little hacking. See [documentation](https://huggingface.co/docs/timm/training_script).\n\n## Awesome PyTorch Resources\n\nOne of the greatest assets of PyTorch is the community and their contributions. A few of my favourite resources that pair well with the models and components here are listed below.\n\n### Object Detection, Instance and Semantic Segmentation\n* Detectron2 - https://github.com/facebookresearch/detectron2\n* Segmentation Models (Semantic) - https://github.com/qubvel/segmentation_models.pytorch\n* EfficientDet (Obj Det, Semantic soon) - https://github.com/rwightman/efficientdet-pytorch\n\n### Computer Vision / Image Augmentation\n* Albumentations - https://github.com/albumentations-team/albumentations\n* Kornia - https://github.com/kornia/kornia\n\n### Knowledge Distillation\n* RepDistiller - https://github.com/HobbitLong/RepDistiller\n* torchdistill - https://github.com/yoshitomo-matsubara/torchdistill\n\n### Metric Learning\n* PyTorch Metric Learning - https://github.com/KevinMusgrave/pytorch-metric-learning\n\n### Training / Frameworks\n* fastai - https://github.com/fastai/fastai\n* lightly_train - https://github.com/lightly-ai/lightly-train\n\n### Deployment\n* timmx (Export timm models to ONNX, CoreML, LiteRT, TensorRT, and more) - https://github.com/Boulaouaney/timmx\n\n## Licenses\n\n### Code\nThe code here is licensed Apache 2.0. I've taken care to make sure any third party code included or adapted has compatible (permissive) licenses such as MIT, BSD, etc. I've made an effort to avoid any GPL / LGPL conflicts. That said, it is your responsibility to ensure you comply with licenses here and conditions of any dependent licenses. Where applicable, I've linked the sources/references for various components in docstrings. If you think I've missed anything please create an issue.\n\n### Pretrained Weights\nSo far all of the pretrained weights available here are pretrained on ImageNet with a select few that have some additional pretraining (see extra note below). ImageNet was released for non-commercial research purposes only (https://image-net.org/download). It's not clear what the implications of that are for the use of pretrained weights from that dataset. Any models I have trained with ImageNet are done for research purposes and one should assume that the original dataset license applies to the weights. It's best to seek legal advice if you intend to use the pretrained weights in a commercial product.\n\n#### Pretrained on more than ImageNet\nSeveral weights included or references here were pretrained with proprietary datasets that I do not have access to. These include the Facebook WSL, SSL, SWSL ResNe(Xt) and the Google Noisy Student EfficientNet models. The Facebook models have an explicit non-commercial license (CC-BY-NC 4.0, https://github.com/facebookresearch/semi-supervised-ImageNet1K-models, https://github.com/facebookresearch/WSL-Images). The Google models do not appear to have any restriction beyond the Apache 2.0 license (and ImageNet concerns). In either case, you should contact Facebook or Google with any questions.\n\n## Citing\n\n### BibTeX\n\n```bibtex\n@misc{rw2019timm,\n  author = {Ross Wightman},\n  title = {PyTorch Image Models},\n  year = {2019},\n  publisher = {GitHub},\n  journal = {GitHub repository},\n  doi = {10.5281/zenodo.4414861},\n  howpublished = {\\url{https://github.com/rwightman/pytorch-image-models}}\n}\n```\n\n### Latest DOI\n\n[![DOI](https://zenodo.org/badge/168799526.svg)](https://zenodo.org/badge/latestdoi/168799526)\n"
  },
  {
    "path": "UPGRADING.md",
    "content": "# Upgrading from previous versions\n\nI generally try to maintain code interface and especially model weight compatibility across many `timm` versions. Sometimes there are exceptions.\n\n## Checkpoint remapping\n\nPretrained weight remapping is handled by `checkpoint_filter_fn` in a model implementation module. This remaps old pretrained checkpoints to new, and also 3rd party (original) checkpoints to `timm` format if the model was modified when brought into `timm`.\n\nThe `checkpoint_filter_fn` is automatically called when loading pretrained weights via `pretrained=True`, but they can be called manually if you call the fn directly with the current model instance and old state dict.\n\n## Upgrading from 0.6 and earlier\n\nMany changes were made since the 0.6.x stable releases. They were previewed in 0.8.x dev releases but not everyone transitioned.\n* `timm.models.layers` moved to `timm.layers`:\n  * `from timm.models.layers import name` will still work via deprecation mapping (but please transition to `timm.layers`).\n  * `import timm.models.layers.module` or `from timm.models.layers.module import name` needs to be changed now.\n* Builder, helper, non-model modules in `timm.models` have a `_` prefix added, ie `timm.models.helpers` -> `timm.models._helpers`, there are temporary deprecation mapping files but those will be removed.\n* All models now support `architecture.pretrained_tag` naming (ex `resnet50.rsb_a1`).\n  * The pretrained_tag is the specific weight variant (different head) for the architecture.\n  * Using only `architecture` defaults to the first weights in the default_cfgs for that model architecture.\n  * In adding pretrained tags, many model names that existed to differentiate were renamed to use the tag  (ex: `vit_base_patch16_224_in21k` -> `vit_base_patch16_224.augreg_in21k`). There are deprecation mappings for these.\n* A number of models had their checkpoints remapped to match architecture changes needed to better support `features_only=True`, there are `checkpoint_filter_fn` methods in any model module that was remapped. These can be passed to `timm.models.load_checkpoint(..., filter_fn=timm.models.swin_transformer_v2.checkpoint_filter_fn)` to remap your existing checkpoint.\n* The Hugging Face Hub (https://huggingface.co/timm) is now the primary source for `timm` weights. Model cards include link to papers, original source, license. \n* Previous 0.6.x can be cloned from [0.6.x](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) branch or installed via pip with version.\n"
  },
  {
    "path": "avg_checkpoints.py",
    "content": "#!/usr/bin/env python3\n\"\"\" Checkpoint Averaging Script\n\nThis script averages all model weights for checkpoints in specified path that match\nthe specified filter wildcard. All checkpoints must be from the exact same model.\n\nFor any hope of decent results, the checkpoints should be from the same or child\n(via resumes) training session. This can be viewed as similar to maintaining running\nEMA (exponential moving average) of the model weights or performing SWA (stochastic\nweight averaging), but post-training.\n\nHacked together by / Copyright 2020 Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport torch\nimport argparse\nimport os\nimport glob\nimport hashlib\nfrom timm.models import load_state_dict\nfrom timm.models._helpers import _torch_load\ntry:\n    import safetensors.torch\n    _has_safetensors = True\nexcept ImportError:\n    _has_safetensors = False\n\nDEFAULT_OUTPUT = \"./averaged.pth\"\nDEFAULT_SAFE_OUTPUT = \"./averaged.safetensors\"\n\nparser = argparse.ArgumentParser(description='PyTorch Checkpoint Averager')\nparser.add_argument('--input', default='', type=str, metavar='PATH',\n                    help='path to base input folder containing checkpoints')\nparser.add_argument('--filter', default='*.pth.tar', type=str, metavar='WILDCARD',\n                    help='checkpoint filter (path wildcard)')\nparser.add_argument('--output', default=DEFAULT_OUTPUT, type=str, metavar='PATH',\n                    help=f'Output filename. Defaults to {DEFAULT_SAFE_OUTPUT} when passing --safetensors.')\nparser.add_argument('--no-use-ema', dest='no_use_ema', action='store_true',\n                    help='Force not using ema version of weights (if present)')\nparser.add_argument('--no-sort', dest='no_sort', action='store_true',\n                    help='Do not sort and select by checkpoint metric, also makes \"n\" argument irrelevant')\nparser.add_argument('-n', type=int, default=10, metavar='N',\n                    help='Number of checkpoints to average')\nparser.add_argument('--safetensors', action='store_true',\n                    help='Save weights using safetensors instead of the default torch way (pickle).')\n\n\ndef checkpoint_metric(checkpoint_path):\n    if not checkpoint_path or not os.path.isfile(checkpoint_path):\n        return {}\n    print(\"=> Extracting metric from checkpoint '{}'\".format(checkpoint_path))\n    checkpoint = _torch_load(checkpoint_path, map_location='cpu', weights_only=True)\n    metric = None\n    if 'metric' in checkpoint:\n        metric = checkpoint['metric']\n    elif 'metrics' in checkpoint and 'metric_name' in checkpoint:\n        metrics = checkpoint['metrics']\n        print(metrics)\n        metric = metrics[checkpoint['metric_name']]\n    return metric\n\n\ndef main():\n    args = parser.parse_args()\n    # by default use the EMA weights (if present)\n    args.use_ema = not args.no_use_ema\n    # by default sort by checkpoint metric (if present) and avg top n checkpoints\n    args.sort = not args.no_sort\n\n    if args.safetensors and args.output == DEFAULT_OUTPUT:\n        # Default path changes if using safetensors\n        args.output = DEFAULT_SAFE_OUTPUT\n\n    output, output_ext = os.path.splitext(args.output)\n    if not output_ext:\n        output_ext = ('.safetensors' if args.safetensors else '.pth')\n    output = output + output_ext\n\n    if args.safetensors and not output_ext == \".safetensors\":\n        print(\n            \"Warning: saving weights as safetensors but output file extension is not \"\n            f\"set to '.safetensors': {args.output}\"\n        )\n\n    if os.path.exists(output):\n        print(\"Error: Output filename ({}) already exists.\".format(output))\n        exit(1)\n\n    pattern = args.input\n    if not args.input.endswith(os.path.sep) and not args.filter.startswith(os.path.sep):\n        pattern += os.path.sep\n    pattern += args.filter\n    checkpoints = glob.glob(pattern, recursive=True)\n\n    if args.sort:\n        checkpoint_metrics = []\n        for c in checkpoints:\n            metric = checkpoint_metric(c)\n            if metric is not None:\n                checkpoint_metrics.append((metric, c))\n        checkpoint_metrics = list(sorted(checkpoint_metrics))\n        checkpoint_metrics = checkpoint_metrics[-args.n:]\n        if checkpoint_metrics:\n            print(\"Selected checkpoints:\")\n            [print(m, c) for m, c in checkpoint_metrics]\n        avg_checkpoints = [c for m, c in checkpoint_metrics]\n    else:\n        avg_checkpoints = checkpoints\n        if avg_checkpoints:\n            print(\"Selected checkpoints:\")\n            [print(c) for c in checkpoints]\n\n    if not avg_checkpoints:\n        print('Error: No checkpoints found to average.')\n        exit(1)\n\n    avg_state_dict = {}\n    avg_counts = {}\n    for c in avg_checkpoints:\n        new_state_dict = load_state_dict(c, args.use_ema)\n        if not new_state_dict:\n            print(f\"Error: Checkpoint ({c}) doesn't exist\")\n            continue\n        for k, v in new_state_dict.items():\n            if k not in avg_state_dict:\n                avg_state_dict[k] = v.clone().to(dtype=torch.float64)\n                avg_counts[k] = 1\n            else:\n                avg_state_dict[k] += v.to(dtype=torch.float64)\n                avg_counts[k] += 1\n\n    for k, v in avg_state_dict.items():\n        v.div_(avg_counts[k])\n\n    # float32 overflow seems unlikely based on weights seen to date, but who knows\n    float32_info = torch.finfo(torch.float32)\n    final_state_dict = {}\n    for k, v in avg_state_dict.items():\n        v = v.clamp(float32_info.min, float32_info.max)\n        final_state_dict[k] = v.to(dtype=torch.float32)\n\n    if args.safetensors:\n        assert _has_safetensors, \"`pip install safetensors` to use .safetensors\"\n        safetensors.torch.save_file(final_state_dict, output)\n    else:\n        torch.save(final_state_dict, output)\n\n    with open(output, 'rb') as f:\n        sha_hash = hashlib.sha256(f.read()).hexdigest()\n    print(f\"=> Saved state_dict to '{output}, SHA256: {sha_hash}'\")\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "benchmark.py",
    "content": "#!/usr/bin/env python3\n\"\"\" Model Benchmark Script\n\nAn inference and train step benchmark script for timm models.\n\nHacked together by Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport argparse\nimport csv\nimport json\nimport logging\nimport time\nfrom collections import OrderedDict\nfrom contextlib import suppress\nfrom functools import partial\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\n\nfrom timm.data import resolve_data_config\nfrom timm.layers import set_fast_norm\nfrom timm.models import create_model, is_model, list_models\nfrom timm.optim import create_optimizer_v2\nfrom timm.utils import setup_default_logging, set_jit_fuser, decay_batch_step, check_batch_size_retry, ParseKwargs,\\\n    reparameterize_model\n\ntry:\n    from deepspeed.profiling.flops_profiler import get_model_profile\n    has_deepspeed_profiling = True\nexcept ImportError as e:\n    has_deepspeed_profiling = False\n\ntry:\n    from fvcore.nn import FlopCountAnalysis, flop_count_str, ActivationCountAnalysis\n    has_fvcore_profiling = True\nexcept ImportError as e:\n    FlopCountAnalysis = None\n    has_fvcore_profiling = False\n\ntry:\n    from functorch.compile import memory_efficient_fusion\n    has_functorch = True\nexcept ImportError as e:\n    has_functorch = False\n\nhas_compile = hasattr(torch, 'compile')\n\nif torch.cuda.is_available():\n    torch.backends.cuda.matmul.allow_tf32 = True\n    torch.backends.cudnn.benchmark = True\n_logger = logging.getLogger('validate')\n\n\nparser = argparse.ArgumentParser(description='PyTorch Benchmark')\n\n# benchmark specific args\nparser.add_argument('--model-list', metavar='NAME', default='',\n                    help='txt file based list of model names to benchmark')\nparser.add_argument('--bench', default='both', type=str,\n                    help=\"Benchmark mode. One of 'inference', 'train', 'both'. Defaults to 'both'\")\nparser.add_argument('--detail', action='store_true', default=False,\n                    help='Provide train fwd/bwd/opt breakdown detail if True. Defaults to False')\nparser.add_argument('--no-retry', action='store_true', default=False,\n                    help='Do not decay batch size and retry on error.')\nparser.add_argument('--results-file', default='', type=str,\n                    help='Output csv file for validation results (summary)')\nparser.add_argument('--results-format', default='csv', type=str,\n                    help='Format for results file one of (csv, json) (default: csv).')\nparser.add_argument('--num-warm-iter', default=10, type=int,\n                    help='Number of warmup iterations (default: 10)')\nparser.add_argument('--num-bench-iter', default=40, type=int,\n                    help='Number of benchmark iterations (default: 40)')\nparser.add_argument('--device', default='cuda', type=str,\n                    help=\"device to run benchmark on\")\n\n# common inference / train args\nparser.add_argument('--model', '-m', metavar='NAME', default='resnet50',\n                    help='model architecture (default: resnet50)')\nparser.add_argument('-b', '--batch-size', default=256, type=int,\n                    metavar='N', help='mini-batch size (default: 256)')\nparser.add_argument('--img-size', default=None, type=int,\n                    metavar='N', help='Input image dimension, uses model default if empty')\nparser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N',\n                    help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty')\nparser.add_argument('--use-train-size', action='store_true', default=False,\n                    help='Run inference at train size, not test-input-size if it exists.')\nparser.add_argument('--num-classes', type=int, default=None,\n                    help='Number classes in dataset')\nparser.add_argument('--gp', default=None, type=str, metavar='POOL',\n                    help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.')\nparser.add_argument('--channels-last', action='store_true', default=False,\n                    help='Use channels_last memory layout')\nparser.add_argument('--grad-checkpointing', action='store_true', default=False,\n                    help='Enable gradient checkpointing through model blocks/stages')\nparser.add_argument('--amp', action='store_true', default=False,\n                    help='use PyTorch Native AMP for mixed precision training. Overrides --precision arg.')\nparser.add_argument('--amp-dtype', default='float16', type=str,\n                    help='lower precision AMP dtype (default: float16). Overrides --precision arg if args.amp True.')\nparser.add_argument('--precision', default='float32', type=str,\n                    help='Numeric precision. One of (amp, float32, float16, bfloat16, tf32)')\nparser.add_argument('--fuser', default='', type=str,\n                    help=\"Select jit fuser. One of ('', 'te', 'old', 'nvfuser')\")\nparser.add_argument('--fast-norm', default=False, action='store_true',\n                    help='enable experimental fast-norm')\nparser.add_argument('--reparam', default=False, action='store_true',\n                    help='Reparameterize model')\nparser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs)\nparser.add_argument('--torchcompile-mode', type=str, default=None,\n                    help=\"torch.compile mode (default: None).\")\n\n# codegen (model compilation) options\nscripting_group = parser.add_mutually_exclusive_group()\nscripting_group.add_argument('--torchscript', dest='torchscript', action='store_true',\n                             help='convert model torchscript for inference')\nscripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None, const='inductor',\n                             help=\"Enable compilation w/ specified backend (default: inductor).\")\nscripting_group.add_argument('--aot-autograd', default=False, action='store_true',\n                             help=\"Enable AOT Autograd optimization.\")\n\n# train optimizer parameters\nparser.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER',\n                    help='Optimizer (default: \"sgd\"')\nparser.add_argument('--opt-eps', default=None, type=float, metavar='EPSILON',\n                    help='Optimizer Epsilon (default: None, use opt default)')\nparser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',\n                    help='Optimizer Betas (default: None, use opt default)')\nparser.add_argument('--momentum', type=float, default=0.9, metavar='M',\n                    help='Optimizer momentum (default: 0.9)')\nparser.add_argument('--weight-decay', type=float, default=0.0001,\n                    help='weight decay (default: 0.0001)')\nparser.add_argument('--clip-grad', type=float, default=None, metavar='NORM',\n                    help='Clip gradient norm (default: None, no clipping)')\nparser.add_argument('--clip-mode', type=str, default='norm',\n                    help='Gradient clipping mode. One of (\"norm\", \"value\", \"agc\")')\n\n\n# model regularization / loss params that impact model or loss fn\nparser.add_argument('--smoothing', type=float, default=0.1,\n                    help='Label smoothing (default: 0.1)')\nparser.add_argument('--drop', type=float, default=0.0, metavar='PCT',\n                    help='Dropout rate (default: 0.)')\nparser.add_argument('--drop-path', type=float, default=None, metavar='PCT',\n                    help='Drop path rate (default: None)')\nparser.add_argument('--drop-block', type=float, default=None, metavar='PCT',\n                    help='Drop block rate (default: None)')\n\n\ndef timestamp(sync=False):\n    return time.perf_counter()\n\n\ndef cuda_timestamp(sync=False, device=None):\n    if sync:\n        torch.cuda.synchronize(device=device)\n    return time.perf_counter()\n\n\ndef count_params(model: nn.Module):\n    return sum([m.numel() for m in model.parameters()])\n\n\ndef resolve_precision(precision: str):\n    assert precision in ('amp', 'amp_bfloat16', 'float16', 'bfloat16', 'float32')\n    amp_dtype = None  # amp disabled\n    model_dtype = torch.float32\n    data_dtype = torch.float32\n    if precision == 'amp':\n        amp_dtype = torch.float16\n    elif precision == 'amp_bfloat16':\n        amp_dtype = torch.bfloat16\n    elif precision == 'float16':\n        model_dtype = torch.float16\n        data_dtype = torch.float16\n    elif precision == 'bfloat16':\n        model_dtype = torch.bfloat16\n        data_dtype = torch.bfloat16\n    return amp_dtype, model_dtype, data_dtype\n\n\ndef profile_deepspeed(model, input_size=(3, 224, 224), batch_size=1, detailed=False):\n    _, macs, _ = get_model_profile(\n        model=model,\n        input_shape=(batch_size,) + input_size,  # input shape/resolution\n        print_profile=detailed,  # prints the model graph with the measured profile attached to each module\n        detailed=detailed,  # print the detailed profile\n        warm_up=10,  # the number of warm-ups before measuring the time of each module\n        as_string=False,  # print raw numbers (e.g. 1000) or as human-readable strings (e.g. 1k)\n        output_file=None,  # path to the output file. If None, the profiler prints to stdout.\n        ignore_modules=None)  # the list of modules to ignore in the profiling\n    return macs, 0  # no activation count in DS\n\n\ndef profile_fvcore(model, input_size=(3, 224, 224), batch_size=1, detailed=False, force_cpu=False):\n    if force_cpu:\n        model = model.to('cpu')\n    device, dtype = next(model.parameters()).device, next(model.parameters()).dtype\n    example_input = torch.ones((batch_size,) + input_size, device=device, dtype=dtype)\n    fca = FlopCountAnalysis(model, example_input)\n    aca = ActivationCountAnalysis(model, example_input)\n    if detailed:\n        fcs = flop_count_str(fca)\n        print(fcs)\n    return fca.total(), aca.total()\n\n\nclass BenchmarkRunner:\n    def __init__(\n            self,\n            model_name,\n            detail=False,\n            device='cuda',\n            torchscript=False,\n            torchcompile=None,\n            torchcompile_mode=None,\n            aot_autograd=False,\n            reparam=False,\n            precision='float32',\n            fuser='',\n            num_warm_iter=10,\n            num_bench_iter=50,\n            use_train_size=False,\n            **kwargs\n    ):\n        self.model_name = model_name\n        self.detail = detail\n        self.device = device\n        self.amp_dtype, self.model_dtype, self.data_dtype = resolve_precision(precision)\n        self.channels_last = kwargs.pop('channels_last', False)\n        if self.amp_dtype is not None:\n            self.amp_autocast = partial(torch.amp.autocast, device_type=device, dtype=self.amp_dtype)\n        else:\n            self.amp_autocast = suppress\n\n        if fuser:\n            set_jit_fuser(fuser)\n        self.model = create_model(\n            model_name,\n            num_classes=kwargs.pop('num_classes', None),\n            in_chans=3,\n            global_pool=kwargs.pop('gp', 'fast'),\n            scriptable=torchscript,\n            drop_rate=kwargs.pop('drop', 0.),\n            drop_path_rate=kwargs.pop('drop_path', None),\n            drop_block_rate=kwargs.pop('drop_block', None),\n            **kwargs.pop('model_kwargs', {}),\n        )\n        if reparam:\n            self.model = reparameterize_model(self.model)\n        self.model.to(\n            device=self.device,\n            dtype=self.model_dtype,\n            memory_format=torch.channels_last if self.channels_last else None,\n        )\n        self.num_classes = self.model.num_classes\n        self.param_count = count_params(self.model)\n        _logger.info('Model %s created, param count: %d' % (model_name, self.param_count))\n\n        data_config = resolve_data_config(kwargs, model=self.model, use_test_size=not use_train_size)\n        self.input_size = data_config['input_size']\n        self.batch_size = kwargs.pop('batch_size', 256)\n\n        self.compiled = False\n        if torchscript:\n            self.model = torch.jit.script(self.model)\n            self.compiled = True\n        elif torchcompile:\n            assert has_compile, 'A version of torch w/ torch.compile() is required, possibly a nightly.'\n            torch._dynamo.reset()\n            self.model = torch.compile(self.model, backend=torchcompile, mode=torchcompile_mode)\n            self.compiled = True\n        elif aot_autograd:\n            assert has_functorch, \"functorch is needed for --aot-autograd\"\n            self.model = memory_efficient_fusion(self.model)\n            self.compiled = True\n\n        self.example_inputs = None\n        self.num_warm_iter = num_warm_iter\n        self.num_bench_iter = num_bench_iter\n        self.log_freq = num_bench_iter // 5\n        if 'cuda' in self.device:\n            self.time_fn = partial(cuda_timestamp, device=self.device)\n        else:\n            self.time_fn = timestamp\n\n    def _init_input(self):\n        self.example_inputs = torch.randn(\n            (self.batch_size,) + self.input_size, device=self.device, dtype=self.data_dtype)\n        if self.channels_last:\n            self.example_inputs = self.example_inputs.contiguous(memory_format=torch.channels_last)\n\n\nclass InferenceBenchmarkRunner(BenchmarkRunner):\n\n    def __init__(\n            self,\n            model_name,\n            device='cuda',\n            torchscript=False,\n            **kwargs\n    ):\n        super().__init__(model_name=model_name, device=device, torchscript=torchscript, **kwargs)\n        self.model.eval()\n\n    def run(self):\n        def _step():\n            t_step_start = self.time_fn()\n            with self.amp_autocast():\n                output = self.model(self.example_inputs)\n            t_step_end = self.time_fn(True)\n            return t_step_end - t_step_start\n\n        _logger.info(\n            f'Running inference benchmark on {self.model_name} for {self.num_bench_iter} steps w/ '\n            f'input size {self.input_size} and batch size {self.batch_size}.')\n\n        with torch.inference_mode():\n            self._init_input()\n\n            for _ in range(self.num_warm_iter):\n                _step()\n\n            total_step = 0.\n            num_samples = 0\n            t_run_start = self.time_fn()\n            for i in range(self.num_bench_iter):\n                delta_fwd = _step()\n                total_step += delta_fwd\n                num_samples += self.batch_size\n                num_steps = i + 1\n                if num_steps % self.log_freq == 0:\n                    _logger.info(\n                        f\"Infer [{num_steps}/{self.num_bench_iter}].\"\n                        f\" {num_samples / total_step:0.2f} samples/sec.\"\n                        f\" {1000 * total_step / num_steps:0.3f} ms/step.\")\n            t_run_end = self.time_fn(True)\n            t_run_elapsed = t_run_end - t_run_start\n\n        results = dict(\n            samples_per_sec=round(num_samples / t_run_elapsed, 2),\n            step_time=round(1000 * total_step / self.num_bench_iter, 3),\n            batch_size=self.batch_size,\n            img_size=self.input_size[-1],\n            param_count=round(self.param_count / 1e6, 2),\n        )\n\n        retries = 0 if self.compiled else 2  # skip profiling if model is scripted\n        while retries:\n            retries -= 1\n            try:\n                if has_deepspeed_profiling:\n                    macs, _ = profile_deepspeed(self.model, self.input_size)\n                    results['gmacs'] = round(macs / 1e9, 2)\n                elif has_fvcore_profiling:\n                    macs, activations = profile_fvcore(self.model, self.input_size, force_cpu=not retries)\n                    results['gmacs'] = round(macs / 1e9, 2)\n                    results['macts'] = round(activations / 1e6, 2)\n            except RuntimeError as e:\n                pass\n\n        _logger.info(\n            f\"Inference benchmark of {self.model_name} done. \"\n            f\"{results['samples_per_sec']:.2f} samples/sec, {results['step_time']:.2f} ms/step\")\n\n        return results\n\n\nclass TrainBenchmarkRunner(BenchmarkRunner):\n\n    def __init__(\n            self,\n            model_name,\n            device='cuda',\n            torchscript=False,\n            **kwargs\n    ):\n        super().__init__(model_name=model_name, device=device, torchscript=torchscript, **kwargs)\n        self.model.train()\n\n        self.loss = nn.CrossEntropyLoss().to(self.device)\n        self.target_shape = tuple()\n\n        self.optimizer = create_optimizer_v2(\n            self.model,\n            opt=kwargs.pop('opt', 'sgd'),\n            lr=kwargs.pop('lr', 1e-4))\n\n        if kwargs.pop('grad_checkpointing', False):\n            self.model.set_grad_checkpointing()\n\n    def _gen_target(self, batch_size):\n        return torch.empty(\n            (batch_size,) + self.target_shape, device=self.device, dtype=torch.long).random_(self.num_classes)\n\n    def run(self):\n        def _step(detail=False):\n            self.optimizer.zero_grad()  # can this be ignored?\n            t_start = self.time_fn()\n            t_fwd_end = t_start\n            t_bwd_end = t_start\n            with self.amp_autocast():\n                output = self.model(self.example_inputs)\n                if isinstance(output, tuple):\n                    output = output[0]\n                if detail:\n                    t_fwd_end = self.time_fn(True)\n                target = self._gen_target(output.shape[0])\n                self.loss(output, target).backward()\n                if detail:\n                    t_bwd_end = self.time_fn(True)\n            self.optimizer.step()\n            t_end = self.time_fn(True)\n            if detail:\n                delta_fwd = t_fwd_end - t_start\n                delta_bwd = t_bwd_end - t_fwd_end\n                delta_opt = t_end - t_bwd_end\n                return delta_fwd, delta_bwd, delta_opt\n            else:\n                delta_step = t_end - t_start\n                return delta_step\n\n        _logger.info(\n            f'Running train benchmark on {self.model_name} for {self.num_bench_iter} steps w/ '\n            f'input size {self.input_size} and batch size {self.batch_size}.')\n\n        self._init_input()\n\n        for _ in range(self.num_warm_iter):\n            _step()\n\n        t_run_start = self.time_fn()\n        if self.detail:\n            total_fwd = 0.\n            total_bwd = 0.\n            total_opt = 0.\n            num_samples = 0\n            for i in range(self.num_bench_iter):\n                delta_fwd, delta_bwd, delta_opt = _step(True)\n                num_samples += self.batch_size\n                total_fwd += delta_fwd\n                total_bwd += delta_bwd\n                total_opt += delta_opt\n                num_steps = (i + 1)\n                if num_steps % self.log_freq == 0:\n                    total_step = total_fwd + total_bwd + total_opt\n                    _logger.info(\n                        f\"Train [{num_steps}/{self.num_bench_iter}].\"\n                        f\" {num_samples / total_step:0.2f} samples/sec.\"\n                        f\" {1000 * total_fwd / num_steps:0.3f} ms/step fwd,\"\n                        f\" {1000 * total_bwd / num_steps:0.3f} ms/step bwd,\"\n                        f\" {1000 * total_opt / num_steps:0.3f} ms/step opt.\"\n                    )\n            total_step = total_fwd + total_bwd + total_opt\n            t_run_elapsed = self.time_fn() - t_run_start\n            results = dict(\n                samples_per_sec=round(num_samples / t_run_elapsed, 2),\n                step_time=round(1000 * total_step / self.num_bench_iter, 3),\n                fwd_time=round(1000 * total_fwd / self.num_bench_iter, 3),\n                bwd_time=round(1000 * total_bwd / self.num_bench_iter, 3),\n                opt_time=round(1000 * total_opt / self.num_bench_iter, 3),\n                batch_size=self.batch_size,\n                img_size=self.input_size[-1],\n                param_count=round(self.param_count / 1e6, 2),\n            )\n        else:\n            total_step = 0.\n            num_samples = 0\n            for i in range(self.num_bench_iter):\n                delta_step = _step(False)\n                num_samples += self.batch_size\n                total_step += delta_step\n                num_steps = (i + 1)\n                if num_steps % self.log_freq == 0:\n                    _logger.info(\n                        f\"Train [{num_steps}/{self.num_bench_iter}].\"\n                        f\" {num_samples / total_step:0.2f} samples/sec.\"\n                        f\" {1000 * total_step / num_steps:0.3f} ms/step.\")\n            t_run_elapsed = self.time_fn() - t_run_start\n            results = dict(\n                samples_per_sec=round(num_samples / t_run_elapsed, 2),\n                step_time=round(1000 * total_step / self.num_bench_iter, 3),\n                batch_size=self.batch_size,\n                img_size=self.input_size[-1],\n                param_count=round(self.param_count / 1e6, 2),\n            )\n\n        _logger.info(\n            f\"Train benchmark of {self.model_name} done. \"\n            f\"{results['samples_per_sec']:.2f} samples/sec, {results['step_time']:.2f} ms/sample\")\n\n        return results\n\n\nclass ProfileRunner(BenchmarkRunner):\n\n    def __init__(self, model_name, device='cuda', profiler='', **kwargs):\n        super().__init__(model_name=model_name, device=device, **kwargs)\n        if not profiler:\n            if has_deepspeed_profiling:\n                profiler = 'deepspeed'\n            elif has_fvcore_profiling:\n                profiler = 'fvcore'\n        assert profiler, \"One of deepspeed or fvcore needs to be installed for profiling to work.\"\n        self.profiler = profiler\n        self.model.eval()\n\n    def run(self):\n        _logger.info(\n            f'Running profiler on {self.model_name} w/ '\n            f'input size {self.input_size} and batch size {self.batch_size}.')\n\n        macs = 0\n        activations = 0\n        if self.profiler == 'deepspeed':\n            macs, _ = profile_deepspeed(self.model, self.input_size, batch_size=self.batch_size, detailed=True)\n        elif self.profiler == 'fvcore':\n            macs, activations = profile_fvcore(self.model, self.input_size, batch_size=self.batch_size, detailed=True)\n\n        results = dict(\n            gmacs=round(macs / 1e9, 2),\n            macts=round(activations / 1e6, 2),\n            batch_size=self.batch_size,\n            img_size=self.input_size[-1],\n            param_count=round(self.param_count / 1e6, 2),\n        )\n\n        _logger.info(\n            f\"Profile of {self.model_name} done. \"\n            f\"{results['gmacs']:.2f} GMACs, {results['param_count']:.2f} M params.\")\n\n        return results\n\n\ndef _try_run(\n        model_name,\n        bench_fn,\n        bench_kwargs,\n        initial_batch_size,\n        no_batch_size_retry=False\n):\n    batch_size = initial_batch_size\n    results = dict()\n    error_str = 'Unknown'\n    while batch_size:\n        try:\n            torch.cuda.empty_cache()\n            bench = bench_fn(model_name=model_name, batch_size=batch_size, **bench_kwargs)\n            results = bench.run()\n            return results\n        except RuntimeError as e:\n            error_str = str(e)\n            _logger.error(f'\"{error_str}\" while running benchmark.')\n            if not check_batch_size_retry(error_str):\n                _logger.error(f'Unrecoverable error encountered while benchmarking {model_name}, skipping.')\n                break\n            if no_batch_size_retry:\n                break\n        batch_size = decay_batch_step(batch_size)\n        _logger.warning(f'Reducing batch size to {batch_size} for retry.')\n    results['error'] = error_str\n    return results\n\n\ndef benchmark(args):\n    if args.amp:\n        _logger.warning(\"Overriding precision to 'amp' since --amp flag set.\")\n        args.precision = 'amp' if args.amp_dtype == 'float16' else '_'.join(['amp', args.amp_dtype])\n    _logger.info(f'Benchmarking in {args.precision} precision. '\n                 f'{\"NHWC\" if args.channels_last else \"NCHW\"} layout. '\n                 f'torchscript {\"enabled\" if args.torchscript else \"disabled\"}')\n\n    bench_kwargs = vars(args).copy()\n    bench_kwargs.pop('amp')\n    model = bench_kwargs.pop('model')\n    batch_size = bench_kwargs.pop('batch_size')\n\n    bench_fns = (InferenceBenchmarkRunner,)\n    prefixes = ('infer',)\n    if args.bench == 'both':\n        bench_fns = (\n            InferenceBenchmarkRunner,\n            TrainBenchmarkRunner\n        )\n        prefixes = ('infer', 'train')\n    elif args.bench == 'train':\n        bench_fns = TrainBenchmarkRunner,\n        prefixes = 'train',\n    elif args.bench.startswith('profile'):\n        # specific profiler used if included in bench mode string, otherwise default to deepspeed, fallback to fvcore\n        if 'deepspeed' in args.bench:\n            assert has_deepspeed_profiling, \"deepspeed must be installed to use deepspeed flop counter\"\n            bench_kwargs['profiler'] = 'deepspeed'\n        elif 'fvcore' in args.bench:\n            assert has_fvcore_profiling, \"fvcore must be installed to use fvcore flop counter\"\n            bench_kwargs['profiler'] = 'fvcore'\n        bench_fns = ProfileRunner,\n        batch_size = 1\n\n    model_results = OrderedDict(model=model)\n    for prefix, bench_fn in zip(prefixes, bench_fns):\n        run_results = _try_run(\n            model,\n            bench_fn,\n            bench_kwargs=bench_kwargs,\n            initial_batch_size=batch_size,\n            no_batch_size_retry=args.no_retry,\n        )\n        if prefix and 'error' not in run_results:\n            run_results = {'_'.join([prefix, k]): v for k, v in run_results.items()}\n        model_results.update(run_results)\n        if 'error' in run_results:\n            break\n    if 'error' not in model_results:\n        param_count = model_results.pop('infer_param_count', model_results.pop('train_param_count', 0))\n        model_results.setdefault('param_count', param_count)\n        model_results.pop('train_param_count', 0)\n    return model_results\n\n\ndef main():\n    setup_default_logging()\n    args = parser.parse_args()\n    model_cfgs = []\n    model_names = []\n\n    if args.fast_norm:\n        set_fast_norm()\n\n    if args.model_list:\n        args.model = ''\n        with open(args.model_list) as f:\n            model_names = [line.rstrip() for line in f]\n        model_cfgs = [(n, None) for n in model_names]\n    elif args.model == 'all':\n        # validate all models in a list of names with pretrained checkpoints\n        args.pretrained = True\n        model_names = list_models(pretrained=True, exclude_filters=['*in21k'])\n        model_cfgs = [(n, None) for n in model_names]\n    elif not is_model(args.model):\n        # model name doesn't exist, try as wildcard filter\n        model_names = list_models(args.model)\n        model_cfgs = [(n, None) for n in model_names]\n\n    if len(model_cfgs):\n        _logger.info('Running bulk validation on these pretrained models: {}'.format(', '.join(model_names)))\n        results = []\n        try:\n            for m, _ in model_cfgs:\n                if not m:\n                    continue\n                args.model = m\n                r = benchmark(args)\n                if r:\n                    results.append(r)\n                time.sleep(10)\n        except KeyboardInterrupt as e:\n            pass\n        sort_key = 'infer_samples_per_sec'\n        if 'train' in args.bench:\n            sort_key = 'train_samples_per_sec'\n        elif 'profile' in args.bench:\n            sort_key = 'infer_gmacs'\n        results = filter(lambda x: sort_key in x, results)\n        results = sorted(results, key=lambda x: x[sort_key], reverse=True)\n    else:\n        results = benchmark(args)\n\n    if args.results_file:\n        write_results(args.results_file, results, format=args.results_format)\n\n    # output results in JSON to stdout w/ delimiter for runner script\n    print(f'--result\\n{json.dumps(results, indent=4)}')\n\n\ndef write_results(results_file, results, format='csv'):\n    with open(results_file, mode='w') as cf:\n        if format == 'json':\n            json.dump(results, cf, indent=4)\n        else:\n            if not isinstance(results, (list, tuple)):\n                results = [results]\n            if not results:\n                return\n            dw = csv.DictWriter(cf, fieldnames=results[0].keys())\n            dw.writeheader()\n            for r in results:\n                dw.writerow(r)\n            cf.flush()\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "bulk_runner.py",
    "content": "#!/usr/bin/env python3\n\"\"\" Bulk Model Script Runner\n\nRun validation or benchmark script in separate process for each model\n\nBenchmark all 'vit*' models:\npython bulk_runner.py  --model-list 'vit*' --results-file vit_bench.csv benchmark.py --amp -b 512\n\nValidate all models:\npython bulk_runner.py  --model-list all --results-file val.csv --pretrained validate.py --data-dir /imagenet/validation/ --amp -b 512 --retry\n\nHacked together by Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport argparse\nimport os\nimport sys\nimport csv\nimport json\nimport subprocess\nimport time\nfrom typing import Callable, List, Tuple, Union\n\n\nfrom timm.models import is_model, list_models, get_pretrained_cfg, get_arch_pretrained_cfgs\n\n\nparser = argparse.ArgumentParser(description='Per-model process launcher')\n\n# model and results args\nparser.add_argument(\n    '--model-list', metavar='NAME', default='',\n    help='txt file based list of model names to benchmark')\nparser.add_argument(\n    '--results-file', default='', type=str, metavar='FILENAME',\n    help='Output csv file for validation results (summary)')\nparser.add_argument(\n    '--sort-key', default='', type=str, metavar='COL',\n    help='Specify sort key for results csv')\nparser.add_argument(\n    \"--pretrained\", action='store_true',\n    help=\"only run models with pretrained weights\")\n\nparser.add_argument(\n    \"--delay\",\n    type=float,\n    default=0,\n    help=\"Interval, in seconds, to delay between model invocations.\",\n)\nparser.add_argument(\n    \"--start_method\", type=str, default=\"spawn\", choices=[\"spawn\", \"fork\", \"forkserver\"],\n    help=\"Multiprocessing start method to use when creating workers.\",\n)\nparser.add_argument(\n    \"--no_python\",\n    help=\"Skip prepending the script with 'python' - just execute it directly. Useful \"\n         \"when the script is not a Python script.\",\n)\nparser.add_argument(\n    \"-m\",\n    \"--module\",\n    help=\"Change each process to interpret the launch script as a Python module, executing \"\n         \"with the same behavior as 'python -m'.\",\n)\n\n# positional\nparser.add_argument(\n    \"script\", type=str,\n    help=\"Full path to the program/script to be launched for each model config.\",\n)\nparser.add_argument(\"script_args\", nargs=argparse.REMAINDER)\n\n\ndef cmd_from_args(args) -> Tuple[Union[Callable, str], List[str]]:\n    # If ``args`` not passed, defaults to ``sys.argv[:1]``\n    with_python = not args.no_python\n    cmd: Union[Callable, str]\n    cmd_args = []\n    if with_python:\n        cmd = os.getenv(\"PYTHON_EXEC\", sys.executable)\n        cmd_args.append(\"-u\")\n        if args.module:\n            cmd_args.append(\"-m\")\n        cmd_args.append(args.script)\n    else:\n        if args.module:\n            raise ValueError(\n                \"Don't use both the '--no_python' flag\"\n                \" and the '--module' flag at the same time.\"\n            )\n        cmd = args.script\n    cmd_args.extend(args.script_args)\n\n    return cmd, cmd_args\n\n\ndef _get_model_cfgs(\n        model_names,\n        num_classes=None,\n        expand_train_test=False,\n        include_crop=True,\n        expand_arch=False,\n):\n    model_cfgs = set()\n\n    for name in model_names:\n        if expand_arch:\n            pt_cfgs = get_arch_pretrained_cfgs(name).values()\n        else:\n            pt_cfg = get_pretrained_cfg(name)\n            pt_cfgs = [pt_cfg] if pt_cfg is not None else []\n\n        for cfg in pt_cfgs:\n            if cfg.input_size is None:\n                continue\n            if num_classes is not None and getattr(cfg, 'num_classes', 0) != num_classes:\n                continue\n\n            # Add main configuration\n            size = cfg.input_size[-1]\n            if include_crop:\n                model_cfgs.add((name, size, cfg.crop_pct))\n            else:\n                model_cfgs.add((name, size))\n\n            # Add test configuration if required\n            if expand_train_test and cfg.test_input_size is not None:\n                test_size = cfg.test_input_size[-1]\n                if include_crop:\n                    test_crop = cfg.test_crop_pct or cfg.crop_pct\n                    model_cfgs.add((name, test_size, test_crop))\n                else:\n                    model_cfgs.add((name, test_size))\n\n    # Format the output\n    if include_crop:\n        return [(n, {'img-size': r, 'crop-pct': cp}) for n, r, cp in sorted(model_cfgs)]\n    else:\n        return [(n, {'img-size': r}) for n, r in sorted(model_cfgs)]\n\n\ndef main():\n    args = parser.parse_args()\n    cmd, cmd_args = cmd_from_args(args)\n\n    model_cfgs = []\n    if args.model_list == 'all':\n        model_names = list_models(\n            pretrained=args.pretrained,  # only include models w/ pretrained checkpoints if set\n        )\n        model_cfgs = [(n, None) for n in model_names]\n    elif args.model_list == 'all_in1k':\n        model_names = list_models(pretrained=True)\n        model_cfgs = _get_model_cfgs(model_names, num_classes=1000, expand_train_test=True)\n    elif args.model_list == 'all_res':\n        model_names = list_models()\n        model_cfgs = _get_model_cfgs(model_names, expand_train_test=True, include_crop=False, expand_arch=True)\n    elif not is_model(args.model_list):\n        # model name doesn't exist, try as wildcard filter\n        model_names = list_models(args.model_list)\n        model_cfgs = [(n, None) for n in model_names]\n\n    if not model_cfgs and os.path.exists(args.model_list):\n        with open(args.model_list) as f:\n            model_names = [line.rstrip() for line in f]\n            model_cfgs = _get_model_cfgs(\n                model_names,\n                #num_classes=1000,\n                expand_train_test=True,\n                #include_crop=False,\n            )\n\n    if len(model_cfgs):\n        results_file = args.results_file or './results.csv'\n        results = []\n        errors = []\n        model_strings = '\\n'.join([f'{x[0]}, {x[1]}' for x in model_cfgs])\n        print(f\"Running script on these models:\\n {model_strings}\")\n        if not args.sort_key:\n            if 'benchmark' in args.script:\n                if any(['train' in a for a in args.script_args]):\n                    sort_key = 'train_samples_per_sec'\n                else:\n                    sort_key = 'infer_samples_per_sec'\n            else:\n                sort_key = 'top1'\n        else:\n            sort_key = args.sort_key\n        print(f'Script: {args.script}, Args: {args.script_args}, Sort key: {sort_key}')\n\n        try:\n            for m, ax in model_cfgs:\n                if not m:\n                    continue\n                args_str = (cmd, *[str(e) for e in cmd_args], '--model', m)\n                if ax is not None:\n                    extra_args = [(f'--{k}', str(v)) for k, v in ax.items()]\n                    extra_args = [i for t in extra_args for i in t]\n                    args_str += tuple(extra_args)\n                try:\n                    o = subprocess.check_output(args=args_str).decode('utf-8').split('--result')[-1]\n                    r = json.loads(o)\n                    results.append(r)\n                except Exception as e:\n                    # FIXME batch_size retry loop is currently done in either validation.py or benchmark.py\n                    # for further robustness (but more overhead), we may want to manage that by looping here...\n                    errors.append(dict(model=m, error=str(e)))\n                if args.delay:\n                    time.sleep(args.delay)\n        except KeyboardInterrupt as e:\n            pass\n\n        errors.extend(list(filter(lambda x: 'error' in x, results)))\n        if errors:\n            print(f'{len(errors)} models had errors during run.')\n            for e in errors:\n                if 'model' in e:\n                    print(f\"\\t {e['model']} ({e.get('error', 'Unknown')})\")\n                else:\n                    print(e)\n\n        results = list(filter(lambda x: 'error' not in x, results))\n\n        no_sortkey = list(filter(lambda x: sort_key not in x, results))\n        if no_sortkey:\n            print(f'{len(no_sortkey)} results missing sort key, skipping sort.')\n        else:\n            results = sorted(results, key=lambda x: x[sort_key], reverse=True)\n\n        if len(results):\n            print(f'{len(results)} models run successfully. Saving results to {results_file}.')\n            write_results(results_file, results)\n\n\ndef write_results(results_file, results):\n    with open(results_file, mode='w') as cf:\n        dw = csv.DictWriter(cf, fieldnames=results[0].keys())\n        dw.writeheader()\n        for r in results:\n            dw.writerow(r)\n        cf.flush()\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "clean_checkpoint.py",
    "content": "#!/usr/bin/env python3\n\"\"\" Checkpoint Cleaning Script\n\nTakes training checkpoints with GPU tensors, optimizer state, extra dict keys, etc.\nand outputs a CPU  tensor checkpoint with only the `state_dict` along with SHA256\ncalculation for model zoo compatibility.\n\nHacked together by / Copyright 2020 Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport torch\nimport argparse\nimport os\nimport hashlib\nimport shutil\nimport tempfile\nfrom timm.models import load_state_dict\ntry:\n    import safetensors.torch\n    _has_safetensors = True\nexcept ImportError:\n    _has_safetensors = False\n\nparser = argparse.ArgumentParser(description='PyTorch Checkpoint Cleaner')\nparser.add_argument('--checkpoint', default='', type=str, metavar='PATH',\n                    help='path to latest checkpoint (default: none)')\nparser.add_argument('--output', default='', type=str, metavar='PATH',\n                    help='output path')\nparser.add_argument('--no-use-ema', dest='no_use_ema', action='store_true',\n                    help='use ema version of weights if present')\nparser.add_argument('--no-hash', dest='no_hash', action='store_true',\n                    help='no hash in output filename')\nparser.add_argument('--clean-aux-bn', dest='clean_aux_bn', action='store_true',\n                    help='remove auxiliary batch norm layers (from SplitBN training) from checkpoint')\nparser.add_argument('--safetensors', action='store_true',\n                    help='Save weights using safetensors instead of the default torch way (pickle).')\n\n\ndef main():\n    args = parser.parse_args()\n\n    if os.path.exists(args.output):\n        print(\"Error: Output filename ({}) already exists.\".format(args.output))\n        exit(1)\n\n    clean_checkpoint(\n        args.checkpoint,\n        args.output,\n        not args.no_use_ema,\n        args.no_hash,\n        args.clean_aux_bn,\n        safe_serialization=args.safetensors,\n    )\n\n\ndef clean_checkpoint(\n        checkpoint,\n        output,\n        use_ema=True,\n        no_hash=False,\n        clean_aux_bn=False,\n        safe_serialization: bool=False,\n):\n    # Load an existing checkpoint to CPU, strip everything but the state_dict and re-save\n    if checkpoint and os.path.isfile(checkpoint):\n        print(\"=> Loading checkpoint '{}'\".format(checkpoint))\n        state_dict = load_state_dict(checkpoint, use_ema=use_ema)\n        new_state_dict = {}\n        for k, v in state_dict.items():\n            if clean_aux_bn and 'aux_bn' in k:\n                # If all aux_bn keys are removed, the SplitBN layers will end up as normal and\n                # load with the unmodified model using BatchNorm2d.\n                continue\n            name = k[7:] if k.startswith('module.') else k\n            new_state_dict[name] = v\n        print(\"=> Loaded state_dict from '{}'\".format(checkpoint))\n\n        ext = ''\n        if output:\n            checkpoint_root, checkpoint_base = os.path.split(output)\n            checkpoint_base, ext = os.path.splitext(checkpoint_base)\n        else:\n            checkpoint_root = ''\n            checkpoint_base = os.path.split(checkpoint)[1]\n            checkpoint_base = os.path.splitext(checkpoint_base)[0]\n\n        temp_filename = '__' + checkpoint_base\n        if safe_serialization:\n            assert _has_safetensors, \"`pip install safetensors` to use .safetensors\"\n            safetensors.torch.save_file(new_state_dict, temp_filename)\n        else:\n            torch.save(new_state_dict, temp_filename)\n\n        with open(temp_filename, 'rb') as f:\n            sha_hash = hashlib.sha256(f.read()).hexdigest()\n\n        if ext:\n            final_ext = ext\n        else:\n            final_ext = ('.safetensors' if safe_serialization else '.pth')\n\n        if no_hash:\n            final_filename = checkpoint_base + final_ext\n        else:\n            final_filename = '-'.join([checkpoint_base, sha_hash[:8]]) + final_ext\n\n        shutil.move(temp_filename, os.path.join(checkpoint_root, final_filename))\n        print(\"=> Saved state_dict to '{}, SHA256: {}'\".format(final_filename, sha_hash))\n        return final_filename\n    else:\n        print(\"Error: Checkpoint ({}) doesn't exist\".format(checkpoint))\n        return ''\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "convert/convert_from_mxnet.py",
    "content": "import argparse\nimport hashlib\nimport os\n\nimport mxnet as mx\nimport gluoncv\nimport torch\nfrom timm import create_model\n\nparser = argparse.ArgumentParser(description='Convert from MXNet')\nparser.add_argument('--model', default='all', type=str, metavar='MODEL',\n                    help='Name of model to train (default: \"all\"')\n\n\ndef convert(mxnet_name, torch_name):\n    # download and load the pre-trained model\n    net = gluoncv.model_zoo.get_model(mxnet_name, pretrained=True)\n\n    # create corresponding torch model\n    torch_net = create_model(torch_name)\n\n    mxp = [(k, v) for k, v in net.collect_params().items() if 'running' not in k]\n    torchp = list(torch_net.named_parameters())\n    torch_params = {}\n\n    # convert parameters\n    # NOTE: we are relying on the fact that the order of parameters\n    # are usually exactly the same between these models, thus no key name mapping\n    # is necessary. Asserts will trip if this is not the case.\n    for (tn, tv), (mn, mv) in zip(torchp, mxp):\n        m_split = mn.split('_')\n        t_split = tn.split('.')\n        print(t_split, m_split)\n        print(tv.shape, mv.shape)\n\n        # ensure ordering of BN params match since their sizes are not specific\n        if m_split[-1] == 'gamma':\n            assert t_split[-1] == 'weight'\n        if m_split[-1] == 'beta':\n            assert t_split[-1] == 'bias'\n\n        # ensure shapes match\n        assert all(t == m for t, m in zip(tv.shape, mv.shape))\n\n        torch_tensor = torch.from_numpy(mv.data().asnumpy())\n        torch_params[tn] = torch_tensor\n\n    # convert buffers (batch norm running stats)\n    mxb = [(k, v) for k, v in net.collect_params().items() if any(x in k for x in ['running_mean', 'running_var'])]\n    torchb = [(k, v) for k, v in torch_net.named_buffers() if 'num_batches' not in k]\n    for (tn, tv), (mn, mv) in zip(torchb, mxb):\n        print(tn, mn)\n        print(tv.shape, mv.shape)\n\n        # ensure ordering of BN params match since their sizes are not specific\n        if 'running_var' in tn:\n            assert 'running_var' in mn\n        if 'running_mean' in tn:\n            assert 'running_mean' in mn\n            \n        torch_tensor = torch.from_numpy(mv.data().asnumpy())\n        torch_params[tn] = torch_tensor\n\n    torch_net.load_state_dict(torch_params)\n    torch_filename = './%s.pth' % torch_name\n    torch.save(torch_net.state_dict(), torch_filename)\n    with open(torch_filename, 'rb') as f:\n        sha_hash = hashlib.sha256(f.read()).hexdigest()\n    final_filename = os.path.splitext(torch_filename)[0] + '-' + sha_hash[:8] + '.pth'\n    os.rename(torch_filename, final_filename)\n    print(\"=> Saved converted model to '{}, SHA256: {}'\".format(final_filename, sha_hash))\n\n\ndef map_mx_to_torch_model(mx_name):\n    torch_name = mx_name.lower()\n    if torch_name.startswith('se_'):\n        torch_name = torch_name.replace('se_', 'se')\n    elif torch_name.startswith('senet_'):\n        torch_name = torch_name.replace('senet_', 'senet')\n    elif torch_name.startswith('inceptionv3'):\n        torch_name = torch_name.replace('inceptionv3', 'inception_v3')\n    torch_name = 'gluon_' + torch_name\n    return torch_name\n\n\nALL = ['resnet18_v1b', 'resnet34_v1b', 'resnet50_v1b', 'resnet101_v1b', 'resnet152_v1b',\n       'resnet50_v1c', 'resnet101_v1c', 'resnet152_v1c', 'resnet50_v1d', 'resnet101_v1d', 'resnet152_v1d',\n       #'resnet50_v1e', 'resnet101_v1e', 'resnet152_v1e',\n       'resnet50_v1s', 'resnet101_v1s', 'resnet152_v1s', 'resnext50_32x4d', 'resnext101_32x4d', 'resnext101_64x4d',\n       'se_resnext50_32x4d', 'se_resnext101_32x4d', 'se_resnext101_64x4d', 'senet_154', 'inceptionv3']\n\n\ndef main():\n    args = parser.parse_args()\n\n    if not args.model or args.model == 'all':\n        for mx_model in ALL:\n            torch_model = map_mx_to_torch_model(mx_model)\n            convert(mx_model, torch_model)\n    else:\n        mx_model = args.model\n        torch_model = map_mx_to_torch_model(mx_model)\n        convert(mx_model, torch_model)\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "convert/convert_nest_flax.py",
    "content": "\"\"\"\nConvert weights from https://github.com/google-research/nested-transformer\nNOTE: You'll need https://github.com/google/CommonLoopUtils, not included in requirements.txt\n\"\"\"\n\nimport sys\n\nimport numpy as np\nimport torch\n\nfrom clu import checkpoint\n\n\narch_depths = {\n    'nest_base': [2, 2, 20],\n    'nest_small': [2, 2, 20],\n    'nest_tiny': [2, 2, 8],\n}\n\n\ndef convert_nest(checkpoint_path, arch):\n    \"\"\"\n    Expects path to checkpoint which is a dir containing 4 files like in each of these folders\n        - https://console.cloud.google.com/storage/browser/gresearch/nest-checkpoints\n    `arch` is needed to \n    Returns a state dict that can be used with `torch.nn.Module.load_state_dict`\n    Hint: Follow timm.models.nest.Nest.__init__ and \n    https://github.com/google-research/nested-transformer/blob/main/models/nest_net.py\n    \"\"\"\n    assert arch in ['nest_base', 'nest_small', 'nest_tiny'], \"Your `arch` is not supported\"\n\n    flax_dict = checkpoint.load_state_dict(checkpoint_path)['optimizer']['target']\n    state_dict = {}\n\n    # Patch embedding\n    state_dict['patch_embed.proj.weight'] = torch.tensor(\n        flax_dict['PatchEmbedding_0']['Conv_0']['kernel']).permute(3, 2, 0, 1)\n    state_dict['patch_embed.proj.bias'] = torch.tensor(flax_dict['PatchEmbedding_0']['Conv_0']['bias'])\n    \n    # Positional embeddings\n    posemb_keys = [k for k in flax_dict.keys() if k.startswith('PositionEmbedding')]\n    for i, k in enumerate(posemb_keys):\n        state_dict[f'levels.{i}.pos_embed'] = torch.tensor(flax_dict[k]['pos_embedding'])\n    \n    # Transformer encoders\n    depths = arch_depths[arch]\n    for level in range(len(depths)):\n        for layer in range(depths[level]):\n            global_layer_ix = sum(depths[:level]) + layer\n            # Norms\n            for i in range(2):\n                state_dict[f'levels.{level}.transformer_encoder.{layer}.norm{i+1}.weight'] = torch.tensor(\n                    flax_dict[f'EncoderNDBlock_{global_layer_ix}'][f'LayerNorm_{i}']['scale'])\n                state_dict[f'levels.{level}.transformer_encoder.{layer}.norm{i+1}.bias'] = torch.tensor(\n                    flax_dict[f'EncoderNDBlock_{global_layer_ix}'][f'LayerNorm_{i}']['bias'])\n            # Attention qkv\n            w_q = flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['DenseGeneral_0']['kernel']\n            w_kv = flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['DenseGeneral_1']['kernel']\n            # Pay attention to dims here (maybe get pen and paper)\n            w_kv = np.concatenate(np.split(w_kv, 2, -1), 1)\n            w_qkv = np.concatenate([w_q, w_kv], 1)\n            state_dict[f'levels.{level}.transformer_encoder.{layer}.attn.qkv.weight'] = torch.tensor(w_qkv).flatten(1).permute(1,0)\n            b_q = flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['DenseGeneral_0']['bias']\n            b_kv = flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['DenseGeneral_1']['bias']\n            # Pay attention to dims here (maybe get pen and paper)\n            b_kv = np.concatenate(np.split(b_kv, 2, -1), 0)\n            b_qkv = np.concatenate([b_q, b_kv], 0)\n            state_dict[f'levels.{level}.transformer_encoder.{layer}.attn.qkv.bias'] = torch.tensor(b_qkv).reshape(-1)\n            # Attention proj\n            w_proj = flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['proj_kernel']\n            w_proj = torch.tensor(w_proj).permute(2, 1, 0).flatten(1)\n            state_dict[f'levels.{level}.transformer_encoder.{layer}.attn.proj.weight'] = w_proj\n            state_dict[f'levels.{level}.transformer_encoder.{layer}.attn.proj.bias'] = torch.tensor(\n                flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MultiHeadAttention_0']['bias'])\n            # MLP\n            for i in range(2):\n                state_dict[f'levels.{level}.transformer_encoder.{layer}.mlp.fc{i+1}.weight'] = torch.tensor(\n                    flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MlpBlock_0'][f'Dense_{i}']['kernel']).permute(1, 0)\n                state_dict[f'levels.{level}.transformer_encoder.{layer}.mlp.fc{i+1}.bias'] = torch.tensor(\n                    flax_dict[f'EncoderNDBlock_{global_layer_ix}']['MlpBlock_0'][f'Dense_{i}']['bias'])\n\n    # Block aggregations (ConvPool)\n    for level in range(1, len(depths)):\n        # Convs\n        state_dict[f'levels.{level}.pool.conv.weight'] = torch.tensor(\n            flax_dict[f'ConvPool_{level-1}']['Conv_0']['kernel']).permute(3, 2, 0, 1)\n        state_dict[f'levels.{level}.pool.conv.bias'] = torch.tensor(\n            flax_dict[f'ConvPool_{level-1}']['Conv_0']['bias'])\n        # Norms\n        state_dict[f'levels.{level}.pool.norm.weight'] = torch.tensor(\n                    flax_dict[f'ConvPool_{level-1}']['LayerNorm_0']['scale'])\n        state_dict[f'levels.{level}.pool.norm.bias'] = torch.tensor(\n                    flax_dict[f'ConvPool_{level-1}']['LayerNorm_0']['bias'])\n\n    # Final norm\n    state_dict[f'norm.weight'] = torch.tensor(flax_dict['LayerNorm_0']['scale'])\n    state_dict[f'norm.bias'] = torch.tensor(flax_dict['LayerNorm_0']['bias'])\n\n    # Classifier\n    state_dict['head.weight'] = torch.tensor(flax_dict['Dense_0']['kernel']).permute(1, 0)\n    state_dict['head.bias'] = torch.tensor(flax_dict['Dense_0']['bias'])\n\n    return state_dict\n\n\nif __name__ == '__main__':\n    variant = sys.argv[1] # base, small, or tiny\n    state_dict = convert_nest(f'./nest-{variant[0]}_imagenet', f'nest_{variant}')\n    torch.save(state_dict, f'./jx_nest_{variant}.pth')"
  },
  {
    "path": "distributed_train.sh",
    "content": "#!/bin/bash\nNUM_PROC=$1\nshift\ntorchrun --nproc_per_node=$NUM_PROC train.py \"$@\"\n\n"
  },
  {
    "path": "hfdocs/README.md",
    "content": "# Hugging Face Timm Docs\n\n## Getting Started\n\n```\npip install git+https://github.com/huggingface/doc-builder.git@main#egg=hf-doc-builder\npip install watchdog black\n```\n\n## Preview the Docs Locally\n\n```\ndoc-builder preview timm hfdocs/source\n```\n"
  },
  {
    "path": "hfdocs/source/_toctree.yml",
    "content": "- sections: \n  - local: index\n    title: Home\n  - local: quickstart\n    title: Quickstart\n  - local: installation\n    title: Installation\n  - local: changes\n    title: Changelog\n  title: Get started\n- sections:\n  - local: feature_extraction\n    title: Using Pretrained Models as Feature Extractors\n  - local: hparams\n    title: Hyper-Parameters (HParams)\n  - local: training_script\n    title: Using The Official Training Script\n  - local: hf_hub\n    title: Share and Load Models from the 🤗 Hugging Face Hub\n  title: Tutorials\n- sections:\n  - local: models\n    title: Model Summaries\n  - local: results\n    title: Results\n  - local: models/adversarial-inception-v3\n    title: Adversarial Inception v3\n  - local: models/advprop\n    title: AdvProp (EfficientNet)\n  - local: models/big-transfer\n    title: Big Transfer (BiT)\n  - local: models/csp-darknet\n    title: CSP-DarkNet\n  - local: models/csp-resnet\n    title: CSP-ResNet\n  - local: models/csp-resnext\n    title: CSP-ResNeXt\n  - local: models/densenet\n    title: DenseNet\n  - local: models/dla\n    title: Deep Layer Aggregation\n  - local: models/dpn\n    title: Dual Path Network (DPN)\n  - local: models/ecaresnet\n    title: ECA-ResNet\n  - local: models/efficientnet\n    title: EfficientNet\n  - local: models/efficientnet-pruned\n    title: EfficientNet (Knapsack Pruned)\n  - local: models/ensemble-adversarial\n    title: Ensemble Adversarial Inception ResNet v2\n  - local: models/ese-vovnet\n    title: ESE-VoVNet\n  - local: models/fbnet\n    title: FBNet\n  - local: models/gloun-inception-v3\n    title: (Gluon) Inception v3\n  - local: models/gloun-resnet\n    title: (Gluon) ResNet\n  - local: models/gloun-resnext\n    title: (Gluon) ResNeXt\n  - local: models/gloun-senet\n    title: (Gluon) SENet\n  - local: models/gloun-seresnext\n    title: (Gluon) SE-ResNeXt\n  - local: models/gloun-xception\n    title: (Gluon) Xception\n  - local: models/hrnet\n    title: HRNet\n  - local: models/ig-resnext\n    title: Instagram ResNeXt WSL\n  - local: models/inception-resnet-v2\n    title: Inception ResNet v2\n  - local: models/inception-v3\n    title: Inception v3\n  - local: models/inception-v4\n    title: Inception v4\n  - local: models/legacy-se-resnet\n    title: (Legacy) SE-ResNet\n  - local: models/legacy-se-resnext\n    title: (Legacy) SE-ResNeXt\n  - local: models/legacy-senet\n    title: (Legacy) SENet\n  - local: models/mixnet\n    title: MixNet\n  - local: models/mnasnet\n    title: MnasNet\n  - local: models/mobilenet-v2\n    title: MobileNet v2\n  - local: models/mobilenet-v3\n    title: MobileNet v3\n  - local: models/nasnet\n    title: NASNet\n  - local: models/noisy-student\n    title: Noisy Student (EfficientNet)\n  - local: models/pnasnet\n    title: PNASNet\n  - local: models/regnetx\n    title: RegNetX\n  - local: models/regnety\n    title: RegNetY\n  - local: models/res2net\n    title: Res2Net\n  - local: models/res2next\n    title: Res2NeXt\n  - local: models/resnest\n    title: ResNeSt\n  - local: models/resnet\n    title: ResNet\n  - local: models/resnet-d\n    title: ResNet-D\n  - local: models/resnext\n    title: ResNeXt\n  - local: models/rexnet\n    title: RexNet\n  - local: models/se-resnet\n    title: SE-ResNet\n  - local: models/selecsls\n    title: SelecSLS\n  - local: models/seresnext\n    title: SE-ResNeXt\n  - local: models/skresnet\n    title: SK-ResNet\n  - local: models/skresnext\n    title: SK-ResNeXt\n  - local: models/spnasnet\n    title: SPNASNet\n  - local: models/ssl-resnet\n    title: SSL ResNet\n  - local: models/swsl-resnet\n    title: SWSL ResNet\n  - local: models/swsl-resnext\n    title: SWSL ResNeXt\n  - local: models/tf-efficientnet\n    title: (Tensorflow) EfficientNet\n  - local: models/tf-efficientnet-condconv\n    title: (Tensorflow) EfficientNet CondConv\n  - local: models/tf-efficientnet-lite\n    title: (Tensorflow) EfficientNet Lite\n  - local: models/tf-inception-v3\n    title: (Tensorflow) Inception v3\n  - local: models/tf-mixnet\n    title: (Tensorflow) MixNet\n  - local: models/tf-mobilenet-v3\n    title: (Tensorflow) MobileNet v3\n  - local: models/tresnet\n    title: TResNet\n  - local: models/wide-resnet\n    title: Wide ResNet\n  - local: models/xception\n    title: Xception\n  title: Model Pages\n  isExpanded: false\n- sections:\n  - local: reference/models\n    title: Models\n  - local: reference/data\n    title: Data\n  - local: reference/optimizers\n    title: Optimizers\n  - local: reference/schedulers\n    title: Learning Rate Schedulers\n  title: Reference\n\n"
  },
  {
    "path": "hfdocs/source/changes.mdx",
    "content": "# Changelog\n\n## Dec 12, 2025\n* Add CSATV2 model (thanks https://github.com/gusdlf93) -- a lightweight but high res model with DCT stem & spatial attention. https://huggingface.co/Hyunil/CSATv2\n* Add AdaMuon and NAdaMuon optimizer support to existing `timm` Muon impl. Appears more competitive vs AdamW with familiar hparams for image tasks.\n* End of year PR cleanup, merge aspects of several long open PR\n  * Merge differential attention (`DiffAttention`), add corresponding `DiffParallelScalingBlock` (for ViT), train some wee vits\n    * https://huggingface.co/timm/vit_dwee_patch16_reg1_gap_256.sbb_in1k\n    * https://huggingface.co/timm/vit_dpwee_patch16_reg1_gap_256.sbb_in1k\n  * Add a few pooling modules, `LsePlus` and `SimPool`\n  * Cleanup, optimize `DropBlock2d` (also add support to ByobNet based models)\n* Bump unit tests to PyTorch 2.9.1 + Python 3.13 on upper end, lower still PyTorch 1.13 + Python 3.10\n  \n## Dec 1, 2025\n* Add lightweight task abstraction, add logits and feature distillation support to train script via new tasks.\n* Remove old APEX AMP support\n\n## Nov 4, 2025\n* Fix LayerScale / LayerScale2d init bug (init values ignored), introduced in 1.0.21. Thanks https://github.com/Ilya-Fradlin\n* Release 1.0.22\n\n## Oct 31, 2025 🎃\n* Update imagenet & OOD variant result csv files to include a few new models and verify correctness over several torch & timm versions\n* EfficientNet-X and EfficientNet-H B5 model weights added as part of a hparam search for AdamW vs Muon (still iterating on Muon runs)\n\n## Oct 16-20, 2025\n* Add an impl of the Muon optimizer (based on https://github.com/KellerJordan/Muon) with customizations\n  * extra flexibility and improved handling for conv weights and fallbacks for weight shapes not suited for orthogonalization\n  * small speedup for NS iterations by reducing allocs and using fused (b)add(b)mm ops\n  * by default uses AdamW (or NAdamW if `nesterov=True`) updates if muon not suitable for parameter shape (or excluded via param group flag)\n  * like torch impl, select from several LR scale adjustment fns via `adjust_lr_fn`\n  * select from several NS coefficient presets or specify your own via `ns_coefficients`\n* First 2 steps of 'meta' device model initialization supported\n  * Fix several ops that were breaking creation under 'meta' device context\n  * Add device & dtype factory kwarg support to all models and modules (anything inherting from nn.Module) in `timm`\n* License fields added to pretrained cfgs in code\n* Release 1.0.21\n\n## Sept 21, 2025\n* Remap DINOv3 ViT weight tags from `lvd_1689m` -> `lvd1689m` to match (same for `sat_493m` -> `sat493m`)\n* Release 1.0.20\n\n## Sept 17, 2025\n* DINOv3 (https://arxiv.org/abs/2508.10104) ConvNeXt and ViT models added. ConvNeXt models were mapped to existing `timm` model. ViT support done via the EVA base model w/ a new `RotaryEmbeddingDinoV3` to match the DINOv3 specific RoPE impl\n  * HuggingFace Hub: https://huggingface.co/collections/timm/timm-dinov3-68cb08bb0bee365973d52a4d\n* MobileCLIP-2 (https://arxiv.org/abs/2508.20691) vision encoders. New MCI3/MCI4 FastViT variants added and weights mapped to existing FastViT and B, L/14 ViTs.\n* MetaCLIP-2 Worldwide (https://arxiv.org/abs/2507.22062) ViT encoder weights added.\n* SigLIP-2 (https://arxiv.org/abs/2502.14786) NaFlex ViT encoder weights added via timm NaFlexViT model.\n* Misc fixes and contributions\n\n## July 23, 2025\n* Add `set_input_size()` method to EVA models, used by OpenCLIP 3.0.0 to allow resizing for timm based encoder models.\n* Release 1.0.18, needed for PE-Core S & T models in OpenCLIP 3.0.0\n* Fix small typing issue that broke Python 3.9 compat. 1.0.19 patch release.\n\n## July 21, 2025\n* ROPE support added to NaFlexViT. All models covered by the EVA base (`eva.py`) including EVA, EVA02, Meta PE ViT, `timm` SBB ViT w/ ROPE, and Naver ROPE-ViT can be now loaded in NaFlexViT when `use_naflex=True` passed at model creation time\n* More Meta PE ViT encoders added, including small/tiny variants, lang variants w/ tiling, and more spatial variants.\n* PatchDropout fixed with NaFlexViT and also w/ EVA models (regression after adding Naver ROPE-ViT)\n* Fix XY order with grid_indexing='xy', impacted non-square image use in 'xy' mode (only ROPE-ViT and PE impacted).\n\n## July 7, 2025\n* MobileNet-v5 backbone tweaks for improved Google Gemma 3n behaviour (to pair with updated official weights)\n  * Add stem bias (zero'd in updated weights, compat break with old weights)\n  * GELU -> GELU (tanh approx). A minor change to be closer to JAX\n* Add two arguments to layer-decay support, a min scale clamp and 'no optimization' scale threshold\n* Add 'Fp32' LayerNorm, RMSNorm, SimpleNorm variants that can be enabled to force computation of norm in float32\n* Some typing, argument cleanup for norm, norm+act layers done with above\n* Support Naver ROPE-ViT (https://github.com/naver-ai/rope-vit) in `eva.py`, add RotaryEmbeddingMixed module for mixed mode, weights on HuggingFace Hub\n\n|model                                             |img_size|top1  |top5  |param_count|\n|--------------------------------------------------|--------|------|------|-----------|\n|vit_large_patch16_rope_mixed_ape_224.naver_in1k  |224     |84.84 |97.122|304.4      |\n|vit_large_patch16_rope_mixed_224.naver_in1k      |224     |84.828|97.116|304.2      |\n|vit_large_patch16_rope_ape_224.naver_in1k        |224     |84.65 |97.154|304.37     |\n|vit_large_patch16_rope_224.naver_in1k            |224     |84.648|97.122|304.17     |\n|vit_base_patch16_rope_mixed_ape_224.naver_in1k   |224     |83.894|96.754|86.59      |\n|vit_base_patch16_rope_mixed_224.naver_in1k       |224     |83.804|96.712|86.44      |\n|vit_base_patch16_rope_ape_224.naver_in1k         |224     |83.782|96.61 |86.59      |\n|vit_base_patch16_rope_224.naver_in1k             |224     |83.718|96.672|86.43      |\n|vit_small_patch16_rope_224.naver_in1k            |224     |81.23 |95.022|21.98      |\n|vit_small_patch16_rope_mixed_224.naver_in1k      |224     |81.216|95.022|21.99      |\n|vit_small_patch16_rope_ape_224.naver_in1k        |224     |81.004|95.016|22.06      |\n|vit_small_patch16_rope_mixed_ape_224.naver_in1k  |224     |80.986|94.976|22.06      |\n* Some cleanup of ROPE modules, helpers, and FX tracing leaf registration\n* Preparing version 1.0.17 release\n\n## June 26, 2025\n* MobileNetV5 backbone (w/ encoder only variant) for [Gemma 3n](https://ai.google.dev/gemma/docs/gemma-3n#parameters) image encoder\n* Version 1.0.16 released\n\n## June 23, 2025\n* Add F.grid_sample based 2D and factorized pos embed resize to NaFlexViT. Faster when lots of different sizes (based on example by https://github.com/stas-sl).\n* Further speed up patch embed resample by replacing vmap with matmul (based on snippet by https://github.com/stas-sl).\n* Add 3 initial native aspect NaFlexViT checkpoints created while testing, ImageNet-1k and 3 different pos embed configs w/ same hparams.\n\n | Model | Top-1 Acc | Top-5 Acc | Params (M) | Eval Seq Len |\n |:---|:---:|:---:|:---:|:---:|\n | [naflexvit_base_patch16_par_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_par_gap.e300_s576_in1k) | 83.67 | 96.45 | 86.63 | 576 |\n | [naflexvit_base_patch16_parfac_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_parfac_gap.e300_s576_in1k) | 83.63 | 96.41 | 86.46 | 576 |\n | [naflexvit_base_patch16_gap.e300_s576_in1k](https://hf.co/timm/naflexvit_base_patch16_gap.e300_s576_in1k) | 83.50 | 96.46 | 86.63 | 576 |\n* Support gradient checkpointing for `forward_intermediates` and fix some checkpointing bugs. Thanks https://github.com/brianhou0208\n* Add 'corrected weight decay' (https://arxiv.org/abs/2506.02285) as option to AdamW (legacy), Adopt, Kron, Adafactor (BV), Lamb, LaProp, Lion, NadamW, RmsPropTF, SGDW optimizers\n* Switch PE (perception encoder) ViT models to use native timm weights instead of remapping on the fly\n* Fix cuda stream bug in prefetch loader\n  \n## June 5, 2025\n* Initial NaFlexVit model code. NaFlexVit is a Vision Transformer with:\n  1. Encapsulated embedding and position encoding in a single module\n  2. Support for nn.Linear patch embedding on pre-patchified (dictionary) inputs\n  3. Support for NaFlex variable aspect, variable resolution (SigLip-2: https://arxiv.org/abs/2502.14786)\n  4. Support for FlexiViT variable patch size (https://arxiv.org/abs/2212.08013)\n  5. Support for NaViT fractional/factorized position embedding (https://arxiv.org/abs/2307.06304)\n* Existing vit models in `vision_transformer.py` can be loaded into the NaFlexVit model by adding the `use_naflex=True` flag to `create_model`\n  * Some native weights coming soon\n* A full NaFlex data pipeline is available that allows training / fine-tuning / evaluating with variable aspect / size images\n  * To enable in `train.py` and `validate.py` add the `--naflex-loader` arg, must be used with a NaFlexVit\n* To evaluate an existing (classic) ViT loaded in NaFlexVit model w/ NaFlex data pipe:\n  * `python validate.py /imagenet --amp -j 8 --model vit_base_patch16_224 --model-kwargs use_naflex=True --naflex-loader --naflex-max-seq-len 256` \n* The training has some extra args features worth noting\n  * The `--naflex-train-seq-lens'` argument specifies which sequence lengths to randomly pick from per batch during training\n  * The `--naflex-max-seq-len` argument sets the target sequence length for validation\n  * Adding `--model-kwargs enable_patch_interpolator=True --naflex-patch-sizes 12 16 24` will enable random patch size selection per-batch w/ interpolation\n  * The `--naflex-loss-scale` arg changes loss scaling mode per batch relative to the batch size, `timm` NaFlex loading changes the batch size for each seq len\n\n## May 28, 2025\n* Add a number of small/fast models thanks to https://github.com/brianhou0208\n  * SwiftFormer - [(ICCV2023) SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications](https://github.com/Amshaker/SwiftFormer) \n  * FasterNet - [(CVPR2023) Run, Don’t Walk: Chasing Higher FLOPS for Faster Neural Networks](https://github.com/JierunChen/FasterNet)\n  * SHViT - [(CVPR2024) SHViT: Single-Head Vision Transformer with Memory Efficient](https://github.com/ysj9909/SHViT)\n  * StarNet - [(CVPR2024) Rewrite the Stars](https://github.com/ma-xu/Rewrite-the-Stars)\n  * GhostNet-V3 [GhostNetV3: Exploring the Training Strategies for Compact Models](https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/ghostnetv3_pytorch)\n* Update EVA ViT (closest match) to support Perception Encoder models (https://arxiv.org/abs/2504.13181) from Meta, loading Hub weights but I still need to push dedicated `timm` weights\n  * Add some flexibility to ROPE impl\n* Big increase in number of models supporting `forward_intermediates()` and some additional fixes thanks to https://github.com/brianhou0208\n  * DaViT, EdgeNeXt, EfficientFormerV2, EfficientViT(MIT), EfficientViT(MSRA), FocalNet, GCViT, HGNet /V2, InceptionNeXt, Inception-V4, MambaOut, MetaFormer, NesT, Next-ViT, PiT, PVT V2, RepGhostNet, RepViT, ResNetV2, ReXNet, TinyViT, TResNet, VoV\n* TNT model updated w/ new weights `forward_intermediates()` thanks to https://github.com/brianhou0208\n* Add `local-dir:` pretrained schema, can use `local-dir:/path/to/model/folder` for model name to source model / pretrained cfg & weights Hugging Face Hub models (config.json + weights file) from a local folder.\n* Fixes, improvements for onnx export\n    \n## Feb 21, 2025\n* SigLIP 2 ViT image encoders added (https://huggingface.co/collections/timm/siglip-2-67b8e72ba08b09dd97aecaf9)\n  * Variable resolution / aspect NaFlex versions are a WIP\n* Add 'SO150M2' ViT weights trained with SBB recipes, great results, better for ImageNet than previous attempt w/ less training.\n  * `vit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k` - 88.1% top-1\n  * `vit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k` - 87.9% top-1\n  * `vit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k` - 87.3% top-1\n  * `vit_so150m2_patch16_reg4_gap_256.sbb_e200_in12k`\n* Updated InternViT-300M '2.5' weights\n* Release 1.0.15\n\n## Feb 1, 2025\n* FYI PyTorch 2.6 & Python 3.13 are tested and working w/ current main and released version of `timm`\n\n## Jan 27, 2025\n* Add Kron Optimizer (PSGD w/ Kronecker-factored preconditioner) \n  * Code from https://github.com/evanatyourservice/kron_torch\n  * See also https://sites.google.com/site/lixilinx/home/psgd\n\n## Jan 19, 2025\n* Fix loading of LeViT safetensor weights, remove conversion code which should have been deactivated\n* Add 'SO150M' ViT weights trained with SBB recipes, decent results, but not optimal shape for ImageNet-12k/1k pretrain/ft\n  * `vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k` - 86.7% top-1\n  * `vit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k` - 87.4% top-1\n  * `vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k`\n* Misc typing, typo, etc. cleanup\n* 1.0.14 release to get above LeViT fix out\n\n## Jan 9, 2025\n* Add support to train and validate in pure `bfloat16` or `float16`\n* `wandb` project name arg added by https://github.com/caojiaolong, use arg.experiment for name\n* Fix old issue w/ checkpoint saving not working on filesystem w/o hard-link support (e.g. FUSE fs mounts)\n* 1.0.13 release\n\n## Jan 6, 2025\n* Add `torch.utils.checkpoint.checkpoint()` wrapper in `timm.models` that defaults `use_reentrant=False`, unless `TIMM_REENTRANT_CKPT=1` is set in env.\n\n## Dec 31, 2024\n* `convnext_nano` 384x384 ImageNet-12k pretrain & fine-tune. https://huggingface.co/models?search=convnext_nano%20r384\n* Add AIM-v2 encoders from https://github.com/apple/ml-aim, see on Hub: https://huggingface.co/models?search=timm%20aimv2\n* Add PaliGemma2 encoders from https://github.com/google-research/big_vision to existing PaliGemma, see on Hub: https://huggingface.co/models?search=timm%20pali2\n* Add missing L/14 DFN2B 39B CLIP ViT, `vit_large_patch14_clip_224.dfn2b_s39b`\n* Fix existing `RmsNorm` layer & fn to match standard formulation, use PT 2.5 impl when possible. Move old impl to `SimpleNorm` layer, it's LN w/o centering or bias. There were only two `timm` models using it, and they have been updated.\n* Allow override of `cache_dir` arg for model creation\n* Pass through `trust_remote_code` for HF datasets wrapper\n* `inception_next_atto` model added by creator\n* Adan optimizer caution, and Lamb decoupled weighgt decay options\n* Some feature_info metadata fixed by https://github.com/brianhou0208\n* All OpenCLIP and JAX (CLIP, SigLIP, Pali, etc) model weights that used load time remapping were given their own HF Hub instances so that they work with `hf-hub:` based loading, and thus will work with new Transformers `TimmWrapperModel`\n\n## Nov 28, 2024\n* More optimizers\n  * Add MARS optimizer (https://arxiv.org/abs/2411.10438, https://github.com/AGI-Arena/MARS)\n  * Add LaProp optimizer (https://arxiv.org/abs/2002.04839, https://github.com/Z-T-WANG/LaProp-Optimizer)\n  * Add masking from 'Cautious Optimizers' (https://arxiv.org/abs/2411.16085, https://github.com/kyleliang919/C-Optim) to Adafactor, Adafactor Big Vision, AdamW (legacy), Adopt, Lamb, LaProp, Lion, NadamW, RMSPropTF, SGDW\n  * Cleanup some docstrings and type annotations re optimizers and factory\n* Add MobileNet-V4 Conv Medium models pretrained on in12k and fine-tuned in1k @ 384x384\n  * https://huggingface.co/timm/mobilenetv4_conv_medium.e250_r384_in12k_ft_in1k\n  * https://huggingface.co/timm/mobilenetv4_conv_medium.e250_r384_in12k\n  * https://huggingface.co/timm/mobilenetv4_conv_medium.e180_ad_r384_in12k\n  * https://huggingface.co/timm/mobilenetv4_conv_medium.e180_r384_in12k\n* Add small cs3darknet, quite good for the speed\n  * https://huggingface.co/timm/cs3darknet_focus_s.ra4_e3600_r256_in1k\n\n## Nov 12, 2024\n* Optimizer factory refactor\n  * New factory works by registering optimizers using an OptimInfo dataclass w/ some key traits\n  * Add `list_optimizers`, `get_optimizer_class`, `get_optimizer_info` to reworked `create_optimizer_v2` fn to explore optimizers, get info or class\n  * deprecate `optim.optim_factory`, move fns to `optim/_optim_factory.py` and `optim/_param_groups.py` and encourage import via `timm.optim`\n* Add Adopt (https://github.com/iShohei220/adopt) optimizer\n* Add 'Big Vision' variant of Adafactor (https://github.com/google-research/big_vision/blob/main/big_vision/optax.py) optimizer\n* Fix original Adafactor to pick better factorization dims for convolutions\n* Tweak LAMB optimizer with some improvements in torch.where functionality since original, refactor clipping a bit\n* dynamic img size support in vit, deit, eva improved to support resize from non-square patch grids, thanks https://github.com/wojtke\n*\n## Oct 31, 2024\nAdd a set of new very well trained ResNet & ResNet-V2 18/34 (basic block) weights. See https://huggingface.co/blog/rwightman/resnet-trick-or-treat\n\n## Oct 19, 2024\n* Cleanup torch amp usage to avoid cuda specific calls, merge support for Ascend (NPU) devices from [MengqingCao](https://github.com/MengqingCao) that should work now in PyTorch 2.5 w/ new device extension autoloading feature. Tested Intel Arc (XPU) in Pytorch 2.5 too and it (mostly) worked.\n\n## Oct 16, 2024\n* Fix error on importing from deprecated path `timm.models.registry`, increased priority of existing deprecation warnings to be visible\n* Port weights of InternViT-300M (https://huggingface.co/OpenGVLab/InternViT-300M-448px) to `timm` as `vit_intern300m_patch14_448`\n\n### Oct 14, 2024\n* Pre-activation (ResNetV2) version of 18/18d/34/34d ResNet model defs added by request (weights pending)\n* Release 1.0.10\n\n### Oct 11, 2024\n* MambaOut (https://github.com/yuweihao/MambaOut) model & weights added. A cheeky take on SSM vision models w/o the SSM (essentially ConvNeXt w/ gating). A mix of original weights + custom variations & weights.\n\n|model                                                                                                                |img_size|top1  |top5  |param_count|\n|---------------------------------------------------------------------------------------------------------------------|--------|------|------|-----------|\n|[mambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k)|384     |87.506|98.428|101.66     |\n|[mambaout_base_plus_rw.sw_e150_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_in12k_ft_in1k)|288     |86.912|98.236|101.66     |\n|[mambaout_base_plus_rw.sw_e150_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_in12k_ft_in1k)|224     |86.632|98.156|101.66     |\n|[mambaout_base_tall_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_tall_rw.sw_e500_in1k)                  |288     |84.974|97.332|86.48      |\n|[mambaout_base_wide_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_wide_rw.sw_e500_in1k)                  |288     |84.962|97.208|94.45      |\n|[mambaout_base_short_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_short_rw.sw_e500_in1k)                |288     |84.832|97.27 |88.83      |\n|[mambaout_base.in1k](http://huggingface.co/timm/mambaout_base.in1k)                                                  |288     |84.72 |96.93 |84.81      |\n|[mambaout_small_rw.sw_e450_in1k](http://huggingface.co/timm/mambaout_small_rw.sw_e450_in1k)                          |288     |84.598|97.098|48.5       |\n|[mambaout_small.in1k](http://huggingface.co/timm/mambaout_small.in1k)                                                |288     |84.5  |96.974|48.49      |\n|[mambaout_base_wide_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_wide_rw.sw_e500_in1k)                  |224     |84.454|96.864|94.45      |\n|[mambaout_base_tall_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_tall_rw.sw_e500_in1k)                  |224     |84.434|96.958|86.48      |\n|[mambaout_base_short_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_short_rw.sw_e500_in1k)                |224     |84.362|96.952|88.83      |\n|[mambaout_base.in1k](http://huggingface.co/timm/mambaout_base.in1k)                                                  |224     |84.168|96.68 |84.81      |\n|[mambaout_small.in1k](http://huggingface.co/timm/mambaout_small.in1k)                                                |224     |84.086|96.63 |48.49      |\n|[mambaout_small_rw.sw_e450_in1k](http://huggingface.co/timm/mambaout_small_rw.sw_e450_in1k)                          |224     |84.024|96.752|48.5       |\n|[mambaout_tiny.in1k](http://huggingface.co/timm/mambaout_tiny.in1k)                                                  |288     |83.448|96.538|26.55      |\n|[mambaout_tiny.in1k](http://huggingface.co/timm/mambaout_tiny.in1k)                                                  |224     |82.736|96.1  |26.55      |\n|[mambaout_kobe.in1k](http://huggingface.co/timm/mambaout_kobe.in1k)                                                  |288     |81.054|95.718|9.14       |\n|[mambaout_kobe.in1k](http://huggingface.co/timm/mambaout_kobe.in1k)                                                  |224     |79.986|94.986|9.14       |\n|[mambaout_femto.in1k](http://huggingface.co/timm/mambaout_femto.in1k)                                                |288     |79.848|95.14 |7.3        |\n|[mambaout_femto.in1k](http://huggingface.co/timm/mambaout_femto.in1k)                                                |224     |78.87 |94.408|7.3        |\n\n* SigLIP SO400M ViT fine-tunes on ImageNet-1k @ 378x378, added 378x378 option for existing SigLIP 384x384 models\n  *  [vit_so400m_patch14_siglip_378.webli_ft_in1k](https://huggingface.co/timm/vit_so400m_patch14_siglip_378.webli_ft_in1k) - 89.42 top-1\n  *  [vit_so400m_patch14_siglip_gap_378.webli_ft_in1k](https://huggingface.co/timm/vit_so400m_patch14_siglip_gap_378.webli_ft_in1k) - 89.03\n* SigLIP SO400M ViT encoder from recent multi-lingual (i18n) variant, patch16 @ 256x256 (https://huggingface.co/timm/ViT-SO400M-16-SigLIP-i18n-256). OpenCLIP update pending.\n* Add two ConvNeXt 'Zepto' models & weights (one w/ overlapped stem and one w/ patch stem). Uses RMSNorm, smaller than previous 'Atto', 2.2M params.\n  * [convnext_zepto_rms_ols.ra4_e3600_r224_in1k](https://huggingface.co/timm/convnext_zepto_rms_ols.ra4_e3600_r224_in1k) - 73.20 top-1 @ 224\n  * [convnext_zepto_rms.ra4_e3600_r224_in1k](https://huggingface.co/timm/convnext_zepto_rms.ra4_e3600_r224_in1k) - 72.81 @ 224\n\n### Sept 2024\n* Add a suite of tiny test models for improved unit tests and niche low-resource applications (https://huggingface.co/blog/rwightman/timm-tiny-test)\n* Add MobileNetV4-Conv-Small (0.5x) model (https://huggingface.co/posts/rwightman/793053396198664)\n  * [mobilenetv4_conv_small_050.e3000_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small_050.e3000_r224_in1k) - 65.81 top-1 @ 256, 64.76 @ 224\n* Add MobileNetV3-Large variants trained with MNV4 Small recipe\n  * [mobilenetv3_large_150d.ra4_e3600_r256_in1k](http://hf.co/timm/mobilenetv3_large_150d.ra4_e3600_r256_in1k) - 81.81 @ 320, 80.94 @ 256\n  * [mobilenetv3_large_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv3_large_100.ra4_e3600_r224_in1k) - 77.16 @ 256, 76.31 @ 224\n\n### Aug 21, 2024\n* Updated SBB ViT models trained on ImageNet-12k and fine-tuned on ImageNet-1k, challenging quite a number of much larger, slower models\n\n| model | top1 | top5 | param_count | img_size |\n| -------------------------------------------------- | ------ | ------ | ----------- | -------- |\n| [vit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k) | 87.438 | 98.256 | 64.11 | 384 |\n| [vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k) | 86.608 | 97.934 | 64.11 | 256 |\n| [vit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k) | 86.594 | 98.02 | 60.4 | 384 |\n| [vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k) | 85.734 | 97.61 | 60.4 | 256 |\n* MobileNet-V1 1.25, EfficientNet-B1, & ResNet50-D weights w/ MNV4 baseline challenge recipe\n\n| model                                                                                                                    | top1   | top5   | param_count | img_size |\n|--------------------------------------------------------------------------------------------------------------------------|--------|--------|-------------|----------|\n| [resnet50d.ra4_e3600_r224_in1k](http://hf.co/timm/resnet50d.ra4_e3600_r224_in1k)                                         | 81.838 | 95.922 | 25.58       | 288      |\n| [efficientnet_b1.ra4_e3600_r240_in1k](http://hf.co/timm/efficientnet_b1.ra4_e3600_r240_in1k)                             | 81.440 | 95.700 | 7.79        | 288      |\n| [resnet50d.ra4_e3600_r224_in1k](http://hf.co/timm/resnet50d.ra4_e3600_r224_in1k)                                         | 80.952 | 95.384 | 25.58       | 224      |\n| [efficientnet_b1.ra4_e3600_r240_in1k](http://hf.co/timm/efficientnet_b1.ra4_e3600_r240_in1k)                             | 80.406 | 95.152 | 7.79        | 240      |\n| [mobilenetv1_125.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_125.ra4_e3600_r224_in1k)                             | 77.600 | 93.804 | 6.27        | 256      |\n| [mobilenetv1_125.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_125.ra4_e3600_r224_in1k)                             | 76.924 | 93.234 | 6.27        | 224      |\n\n* Add SAM2 (HieraDet) backbone arch & weight loading support\n* Add Hiera Small weights trained w/ abswin pos embed on in12k & fine-tuned on 1k\n\n|model                            |top1  |top5  |param_count|\n|---------------------------------|------|------|-----------|\n|hiera_small_abswin_256.sbb2_e200_in12k_ft_in1k    |84.912|97.260|35.01      |\n|hiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k |84.560|97.106|35.01      |\n\n### Aug 8, 2024\n* Add RDNet ('DenseNets Reloaded', https://arxiv.org/abs/2403.19588), thanks [Donghyun Kim](https://github.com/dhkim0225)\n\n### July 28, 2024\n* Add `mobilenet_edgetpu_v2_m` weights w/ `ra4` mnv4-small based recipe. 80.1% top-1 @ 224 and 80.7 @ 256.\n* Release 1.0.8\n\n### July 26, 2024\n* More MobileNet-v4 weights, ImageNet-12k pretrain w/ fine-tunes, and anti-aliased ConvLarge models\n\n| model                                                                                            |top1  |top1_err|top5  |top5_err|param_count|img_size|\n|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------|\n| [mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k)|84.99 |15.01   |97.294|2.706   |32.59      |544     |\n| [mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k)|84.772|15.228  |97.344|2.656   |32.59      |480     |\n| [mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k)|84.64 |15.36   |97.114|2.886   |32.59      |448     |\n| [mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k)|84.314|15.686  |97.102|2.898   |32.59      |384     |\n| [mobilenetv4_conv_aa_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e600_r384_in1k)     |83.824|16.176  |96.734|3.266   |32.59      |480     |\n| [mobilenetv4_conv_aa_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e600_r384_in1k)             |83.244|16.756  |96.392|3.608   |32.59      |384     |\n| [mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k)|82.99 |17.01   |96.67 |3.33    |11.07      |320     |\n| [mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k)|82.364|17.636  |96.256|3.744   |11.07      |256     |\n\n* Impressive MobileNet-V1 and EfficientNet-B0 baseline challenges (https://huggingface.co/blog/rwightman/mobilenet-baselines)\n\n| model                                                                                            |top1  |top1_err|top5  |top5_err|param_count|img_size|\n|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------|\n| [efficientnet_b0.ra4_e3600_r224_in1k](http://hf.co/timm/efficientnet_b0.ra4_e3600_r224_in1k)                       |79.364|20.636  |94.754|5.246   |5.29       |256     |\n| [efficientnet_b0.ra4_e3600_r224_in1k](http://hf.co/timm/efficientnet_b0.ra4_e3600_r224_in1k)                       |78.584|21.416  |94.338|5.662   |5.29       |224     |\n| [mobilenetv1_100h.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100h.ra4_e3600_r224_in1k)                     |76.596|23.404  |93.272|6.728   |5.28       |256     |\n| [mobilenetv1_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100.ra4_e3600_r224_in1k)                       |76.094|23.906  |93.004|6.996   |4.23       |256     |\n| [mobilenetv1_100h.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100h.ra4_e3600_r224_in1k)                     |75.662|24.338  |92.504|7.496   |5.28       |224     |\n| [mobilenetv1_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100.ra4_e3600_r224_in1k)                       |75.382|24.618  |92.312|7.688   |4.23       |224     |\n\n* Prototype of `set_input_size()` added to vit and swin v1/v2 models to allow changing image size, patch size, window size after model creation.\n* Improved support in swin for different size handling, in addition to `set_input_size`, `always_partition` and `strict_img_size` args have been added to `__init__` to allow more flexible input size constraints\n* Fix out of order indices info for intermediate 'Getter' feature wrapper, check out or range indices for same.\n* Add several `tiny` < .5M param models for testing that are actually trained on ImageNet-1k\n\n|model                       |top1  |top1_err|top5  |top5_err|param_count|img_size|crop_pct|\n|----------------------------|------|--------|------|--------|-----------|--------|--------|\n|test_efficientnet.r160_in1k |47.156|52.844  |71.726|28.274  |0.36       |192     |1.0     |\n|test_byobnet.r160_in1k      |46.698|53.302  |71.674|28.326  |0.46       |192     |1.0     |\n|test_efficientnet.r160_in1k |46.426|53.574  |70.928|29.072  |0.36       |160     |0.875   |\n|test_byobnet.r160_in1k      |45.378|54.622  |70.572|29.428  |0.46       |160     |0.875   |\n|test_vit.r160_in1k|42.0  |58.0    |68.664|31.336  |0.37       |192     |1.0     |\n|test_vit.r160_in1k|40.822|59.178  |67.212|32.788  |0.37       |160     |0.875   |\n\n* Fix vit reg token init, thanks [Promisery](https://github.com/Promisery)\n* Other misc fixes\n\n### June 24, 2024\n* 3 more MobileNetV4 hyrid weights with different MQA weight init scheme\n\n| model                                                                                            |top1  |top1_err|top5  |top5_err|param_count|img_size|\n|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------|\n| [mobilenetv4_hybrid_large.ix_e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.ix_e600_r384_in1k) |84.356|15.644  |96.892 |3.108  |37.76      |448     |\n| [mobilenetv4_hybrid_large.ix_e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.ix_e600_r384_in1k) |83.990|16.010  |96.702 |3.298  |37.76      |384     |\n| [mobilenetv4_hybrid_medium.ix_e550_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r384_in1k)       |83.394|16.606  |96.760|3.240   |11.07      |448     |\n| [mobilenetv4_hybrid_medium.ix_e550_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r384_in1k)       |82.968|17.032  |96.474|3.526   |11.07      |384     |\n| [mobilenetv4_hybrid_medium.ix_e550_r256_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r256_in1k)       |82.492|17.508  |96.278|3.722   |11.07      |320     |\n| [mobilenetv4_hybrid_medium.ix_e550_r256_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r256_in1k)       |81.446|18.554  |95.704|4.296   |11.07      |256     |\n* florence2 weight loading in DaViT model\n\n### June 12, 2024\n* MobileNetV4 models and initial set of `timm` trained weights added:\n\n| model                                                                                            |top1  |top1_err|top5  |top5_err|param_count|img_size|\n|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------|\n| [mobilenetv4_hybrid_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.e600_r384_in1k) |84.266|15.734  |96.936 |3.064  |37.76      |448     |\n| [mobilenetv4_hybrid_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.e600_r384_in1k) |83.800|16.200  |96.770 |3.230  |37.76      |384     |\n| [mobilenetv4_conv_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_large.e600_r384_in1k) |83.392|16.608  |96.622 |3.378  |32.59      |448     |\n| [mobilenetv4_conv_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_large.e600_r384_in1k) |82.952|17.048  |96.266 |3.734  |32.59      |384     |\n| [mobilenetv4_conv_large.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_large.e500_r256_in1k) |82.674|17.326  |96.31 |3.69    |32.59      |320     |\n| [mobilenetv4_conv_large.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_large.e500_r256_in1k)                   |81.862|18.138  |95.69 |4.31    |32.59      |256     |\n| [mobilenetv4_hybrid_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e500_r224_in1k)             |81.276|18.724  |95.742|4.258   |11.07      |256     |\n| [mobilenetv4_conv_medium.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r256_in1k)                 |80.858|19.142  |95.768|4.232   |9.72       |320     |\n| [mobilenetv4_hybrid_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e500_r224_in1k)             |80.442|19.558  |95.38 |4.62    |11.07      |224     |\n| [mobilenetv4_conv_blur_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_blur_medium.e500_r224_in1k)       |80.142|19.858  |95.298|4.702   |9.72       |256     |\n| [mobilenetv4_conv_medium.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r256_in1k)                 |79.928|20.072  |95.184|4.816   |9.72       |256     |\n| [mobilenetv4_conv_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r224_in1k)                 |79.808|20.192  |95.186|4.814   |9.72       |256     |\n| [mobilenetv4_conv_blur_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_blur_medium.e500_r224_in1k)       |79.438|20.562  |94.932|5.068   |9.72       |224     |\n| [mobilenetv4_conv_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r224_in1k)                 |79.094|20.906  |94.77 |5.23    |9.72       |224     |\n| [mobilenetv4_conv_small.e2400_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e2400_r224_in1k)                 |74.616|25.384  |92.072|7.928   |3.77       |256     |\n| [mobilenetv4_conv_small.e1200_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e1200_r224_in1k)                 |74.292|25.708  |92.116|7.884   |3.77       |256     |\n| [mobilenetv4_conv_small.e2400_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e2400_r224_in1k)                 |73.756|26.244  |91.422|8.578   |3.77       |224     |\n| [mobilenetv4_conv_small.e1200_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e1200_r224_in1k)                 |73.454|26.546  |91.34 |8.66    |3.77       |224     |\n\n* Apple MobileCLIP (https://arxiv.org/pdf/2311.17049, FastViT and ViT-B) image tower model support & weights added (part of OpenCLIP support).\n* ViTamin (https://arxiv.org/abs/2404.02132) CLIP image tower model & weights added (part of OpenCLIP support).\n* OpenAI CLIP Modified ResNet image tower modelling & weight support (via ByobNet). Refactor AttentionPool2d.\n\n### May 14, 2024\n* Support loading PaliGemma jax weights into SigLIP ViT models with average pooling.\n* Add Hiera models from Meta (https://github.com/facebookresearch/hiera).\n* Add `normalize=` flag for transforms, return non-normalized torch.Tensor with original dytpe (for `chug`)\n* Version 1.0.3 release\n\n### May 11, 2024\n* `Searching for Better ViT Baselines (For the GPU Poor)` weights and vit variants released. Exploring model shapes between Tiny and Base.\n\n| model | top1 | top5 | param_count | img_size |\n| -------------------------------------------------- | ------ | ------ | ----------- | -------- |\n| [vit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k) | 86.202 | 97.874 | 64.11 | 256 |\n| [vit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k)  | 85.418 | 97.48 | 60.4 | 256 |\n| [vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k)  | 84.322 | 96.812 | 63.95 | 256 |\n| [vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k)  | 83.906 | 96.684 | 60.23 | 256 |\n| [vit_base_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_base_patch16_rope_reg1_gap_256.sbb_in1k)  | 83.866 | 96.67 | 86.43 | 256 |\n| [vit_medium_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_rope_reg1_gap_256.sbb_in1k)  | 83.81 | 96.824 | 38.74 | 256 |\n| [vit_betwixt_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in1k)  | 83.706 | 96.616 | 60.4 | 256 |\n| [vit_betwixt_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg1_gap_256.sbb_in1k)  | 83.628 | 96.544 | 60.4 | 256 |\n| [vit_medium_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_reg4_gap_256.sbb_in1k)  | 83.47 | 96.622 | 38.88 | 256 |\n| [vit_medium_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_reg1_gap_256.sbb_in1k)  | 83.462 | 96.548 | 38.88 | 256 |\n| [vit_little_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_little_patch16_reg4_gap_256.sbb_in1k)  | 82.514 | 96.262 | 22.52 | 256 |\n| [vit_wee_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_wee_patch16_reg1_gap_256.sbb_in1k)  | 80.256 | 95.360 | 13.42 | 256 |\n| [vit_pwee_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_pwee_patch16_reg1_gap_256.sbb_in1k)  | 80.072 | 95.136 | 15.25 | 256 |\n| [vit_mediumd_patch16_reg4_gap_256.sbb_in12k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb_in12k) | N/A | N/A | 64.11 | 256 |\n| [vit_betwixt_patch16_reg4_gap_256.sbb_in12k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in12k)  | N/A | N/A | 60.4 | 256 |\n\n* AttentionExtract helper added to extract attention maps from `timm` models. See example in https://github.com/huggingface/pytorch-image-models/discussions/1232#discussioncomment-9320949\n* `forward_intermediates()` API refined and added to more models including some ConvNets that have other extraction methods.\n* 1017 of 1047 model architectures support `features_only=True` feature extraction. Remaining 34 architectures can be supported but based on priority requests.\n* Remove torch.jit.script annotated functions including old JIT activations. Conflict with dynamo and dynamo does a much better job when used.\n\n### April 11, 2024\n* Prepping for a long overdue 1.0 release, things have been stable for a while now.\n* Significant feature that's been missing for a while, `features_only=True` support for ViT models with flat hidden states or non-std module layouts (so far covering  `'vit_*', 'twins_*', 'deit*', 'beit*', 'mvitv2*', 'eva*', 'samvit_*', 'flexivit*'`)\n* Above feature support achieved through a new `forward_intermediates()` API that can be used with a feature wrapping module or directly.\n```python\nmodel = timm.create_model('vit_base_patch16_224')\nfinal_feat, intermediates = model.forward_intermediates(input)\noutput = model.forward_head(final_feat)  # pooling + classifier head\n\nprint(final_feat.shape)\ntorch.Size([2, 197, 768])\n\nfor f in intermediates:\n    print(f.shape)\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\ntorch.Size([2, 768, 14, 14])\n\nprint(output.shape)\ntorch.Size([2, 1000])\n```\n\n```python\nmodel = timm.create_model('eva02_base_patch16_clip_224', pretrained=True, img_size=512, features_only=True, out_indices=(-3, -2,))\noutput = model(torch.randn(2, 3, 512, 512))\n\nfor o in output:\n    print(o.shape)\ntorch.Size([2, 768, 32, 32])\ntorch.Size([2, 768, 32, 32])\n```\n* TinyCLIP vision tower weights added, thx [Thien Tran](https://github.com/gau-nernst)\n\n### Feb 19, 2024\n* Next-ViT models added. Adapted from https://github.com/bytedance/Next-ViT\n* HGNet and PP-HGNetV2 models added. Adapted from https://github.com/PaddlePaddle/PaddleClas by [SeeFun](https://github.com/seefun)\n* Removed setup.py, moved to pyproject.toml based build supported by PDM\n* Add updated model EMA impl using _for_each for less overhead\n* Support device args in train script for non GPU devices\n* Other misc fixes and small additions\n* Min supported Python version increased to 3.8\n* Release 0.9.16\n\n### Jan 8, 2024\nDatasets & transform refactoring\n* HuggingFace streaming (iterable) dataset support (`--dataset hfids:org/dataset`)\n* Webdataset wrapper tweaks for improved split info fetching, can auto fetch splits from supported HF hub webdataset\n* Tested HF `datasets` and webdataset wrapper streaming from HF hub with recent `timm` ImageNet uploads to https://huggingface.co/timm\n* Make input & target column/field keys consistent across datasets and pass via args\n* Full monochrome support when using e:g: `--input-size 1 224 224` or `--in-chans 1`, sets PIL image conversion appropriately in dataset\n* Improved several alternate crop & resize transforms (ResizeKeepRatio, RandomCropOrPad, etc) for use in PixParse document AI project\n* Add SimCLR style color jitter prob along with grayscale and gaussian blur options to augmentations and args\n* Allow train without validation set (`--val-split ''`) in train script\n* Add `--bce-sum` (sum over class dim) and `--bce-pos-weight` (positive weighting) args for training as they're common BCE loss tweaks I was often hard coding\n\n### Nov 23, 2023\n* Added EfficientViT-Large models, thanks [SeeFun](https://github.com/seefun)\n* Fix Python 3.7 compat, will be dropping support for it soon\n* Other misc fixes\n* Release 0.9.12\n\n### Nov 20, 2023\n* Added significant flexibility for Hugging Face Hub based timm models via `model_args` config entry. `model_args` will be passed as kwargs through to models on creation.\n  * See example at https://huggingface.co/gaunernst/vit_base_patch16_1024_128.audiomae_as2m_ft_as20k/blob/main/config.json\n  * Usage: https://github.com/huggingface/pytorch-image-models/discussions/2035\n* Updated imagenet eval and test set csv files with latest models\n* `vision_transformer.py` typing and doc cleanup by [Laureηt](https://github.com/Laurent2916)\n* 0.9.11 release\n\n### Nov 3, 2023\n* [DFN (Data Filtering Networks)](https://huggingface.co/papers/2309.17425) and [MetaCLIP](https://huggingface.co/papers/2309.16671) ViT weights added\n* DINOv2 'register' ViT model weights added (https://huggingface.co/papers/2309.16588, https://huggingface.co/papers/2304.07193)\n* Add `quickgelu` ViT variants for OpenAI, DFN, MetaCLIP weights that use it (less efficient)\n* Improved typing added to ResNet, MobileNet-v3 thanks to [Aryan](https://github.com/a-r-r-o-w)\n* ImageNet-12k fine-tuned (from LAION-2B CLIP) `convnext_xxlarge`\n* 0.9.9 release\n\n### Oct 20, 2023\n* [SigLIP](https://huggingface.co/papers/2303.15343) image tower weights supported in `vision_transformer.py`.\n  * Great potential for fine-tune and downstream feature use.\n* Experimental 'register' support in vit models as per [Vision Transformers Need Registers](https://huggingface.co/papers/2309.16588)\n* Updated RepViT with new weight release. Thanks [wangao](https://github.com/jameslahm)\n* Add patch resizing support (on pretrained weight load) to Swin models\n* 0.9.8 release pending\n\n### Sep 1, 2023\n* TinyViT added by [SeeFun](https://github.com/seefun)\n* Fix EfficientViT (MIT) to use torch.autocast so it works back to PT 1.10\n* 0.9.7 release\n\n### Aug 28, 2023\n* Add dynamic img size support to models in `vision_transformer.py`, `vision_transformer_hybrid.py`, `deit.py`, and `eva.py` w/o breaking backward compat.\n  * Add `dynamic_img_size=True` to args at model creation time to allow changing the grid size (interpolate abs and/or ROPE pos embed each forward pass).\n  * Add `dynamic_img_pad=True` to allow image sizes that aren't divisible by patch size (pad bottom right to patch size each forward pass).\n  * Enabling either dynamic mode will break FX tracing unless PatchEmbed module added as leaf.\n  * Existing method of resizing position embedding by passing different `img_size` (interpolate pretrained embed weights once) on creation still works.\n  * Existing method of changing `patch_size` (resize pretrained patch_embed weights once) on creation still works.\n  * Example validation cmd `python validate.py --data-dir /imagenet --model vit_base_patch16_224 --amp --amp-dtype bfloat16 --img-size 255 --crop-pct 1.0 --model-kwargs dynamic_img_size=True dyamic_img_pad=True`\n\n### Aug 25, 2023\n* Many new models since last release\n  * FastViT - https://arxiv.org/abs/2303.14189\n  * MobileOne - https://arxiv.org/abs/2206.04040\n  * InceptionNeXt - https://arxiv.org/abs/2303.16900\n  * RepGhostNet - https://arxiv.org/abs/2211.06088 (thanks https://github.com/ChengpengChen)\n  * GhostNetV2 - https://arxiv.org/abs/2211.12905 (thanks https://github.com/yehuitang)\n  * EfficientViT (MSRA) - https://arxiv.org/abs/2305.07027 (thanks https://github.com/seefun)\n  * EfficientViT (MIT) - https://arxiv.org/abs/2205.14756 (thanks https://github.com/seefun)\n* Add `--reparam` arg to `benchmark.py`, `onnx_export.py`, and `validate.py` to trigger layer reparameterization / fusion for models with any one of `reparameterize()`, `switch_to_deploy()` or `fuse()`\n  * Including FastViT, MobileOne, RepGhostNet, EfficientViT (MSRA), RepViT, RepVGG, and LeViT\n* Preparing 0.9.6 'back to school' release\n\n### Aug 11, 2023\n* Swin, MaxViT, CoAtNet, and BEiT models support resizing of image/window size on creation with adaptation of pretrained weights\n* Example validation cmd to test w/ non-square resize `python validate.py --data-dir /imagenet --model swin_base_patch4_window7_224.ms_in22k_ft_in1k --amp --amp-dtype bfloat16 --input-size 3 256 320 --model-kwargs window_size=8,10 img_size=256,320`\n\n### Aug 3, 2023\n* Add GluonCV weights for HRNet w18_small and w18_small_v2. Converted by [SeeFun](https://github.com/seefun)\n* Fix `selecsls*` model naming regression\n* Patch and position embedding for ViT/EVA works for bfloat16/float16 weights on load (or activations for on-the-fly resize)\n* v0.9.5 release prep\n\n### July 27, 2023\n* Added timm trained `seresnextaa201d_32x8d.sw_in12k_ft_in1k_384` weights (and `.sw_in12k` pretrain) with 87.3% top-1 on ImageNet-1k, best ImageNet ResNet family model I'm aware of.\n* RepViT model and weights (https://arxiv.org/abs/2307.09283) added by [wangao](https://github.com/jameslahm)\n* I-JEPA ViT feature weights (no classifier) added by [SeeFun](https://github.com/seefun)\n* SAM-ViT (segment anything) feature weights (no classifier) added by [SeeFun](https://github.com/seefun)\n* Add support for alternative feat extraction methods and -ve indices to EfficientNet\n* Add NAdamW optimizer\n* Misc fixes\n\n### May 11, 2023\n* `timm` 0.9 released, transition from 0.8.xdev releases\n\n### May 10, 2023\n* Hugging Face Hub downloading is now default, 1132 models on https://huggingface.co/timm, 1163 weights in `timm`\n* DINOv2 vit feature backbone weights added thanks to [Leng Yue](https://github.com/leng-yue)\n* FB MAE vit feature backbone weights added\n* OpenCLIP DataComp-XL L/14 feat backbone weights added\n* MetaFormer (poolformer-v2, caformer, convformer, updated poolformer (v1)) w/ weights added by [Fredo Guan](https://github.com/fffffgggg54)\n* Experimental `get_intermediate_layers` function on vit/deit models for grabbing hidden states (inspired by DINO impl). This is WIP and may change significantly... feedback welcome.\n* Model creation throws error if `pretrained=True` and no weights exist (instead of continuing with random initialization)\n* Fix regression with inception / nasnet TF sourced weights with 1001 classes in original classifiers\n* bitsandbytes (https://github.com/TimDettmers/bitsandbytes) optimizers added to factory, use `bnb` prefix, ie `bnbadam8bit`\n* Misc cleanup and fixes\n* Final testing before switching to a 0.9 and bringing `timm` out of pre-release state\n\n### April 27, 2023\n* 97% of `timm` models uploaded to HF Hub and almost all updated to support multi-weight pretrained configs\n* Minor cleanup and refactoring of another batch of models as multi-weight added. More fused_attn (F.sdpa) and features_only support, and torchscript fixes.\n\n### April 21, 2023\n* Gradient accumulation support added to train script and tested (`--grad-accum-steps`), thanks [Taeksang Kim](https://github.com/voidbag)\n* More weights on HF Hub (cspnet, cait, volo, xcit, tresnet, hardcorenas, densenet, dpn, vovnet, xception_aligned)\n* Added `--head-init-scale` and `--head-init-bias` to train.py to scale classiifer head and set fixed bias for fine-tune\n* Remove all InplaceABN (`inplace_abn`) use, replaced use in tresnet with standard BatchNorm (modified weights accordingly). \n\n### April 12, 2023\n* Add ONNX export script, validate script, helpers that I've had kicking around for along time. Tweak 'same' padding for better export w/ recent ONNX + pytorch.\n* Refactor dropout args for vit and vit-like models, separate drop_rate into `drop_rate` (classifier dropout), `proj_drop_rate` (block mlp / out projections), `pos_drop_rate` (position embedding drop), `attn_drop_rate` (attention dropout). Also add patch dropout (FLIP) to vit and eva models.\n* fused F.scaled_dot_product_attention support to more vit models, add env var (TIMM_FUSED_ATTN) to control, and config interface to enable/disable\n* Add EVA-CLIP backbones w/ image tower weights, all the way up to 4B param 'enormous' model, and 336x336 OpenAI ViT mode that was missed.\n\n### April 5, 2023\n* ALL ResNet models pushed to Hugging Face Hub with multi-weight support\n  * All past `timm` trained weights added with recipe based tags to differentiate\n  * All ResNet strikes back A1/A2/A3 (seed 0) and R50 example B/C1/C2/D weights available\n  * Add torchvision v2 recipe weights to existing torchvision originals\n  * See comparison table in https://huggingface.co/timm/seresnextaa101d_32x8d.sw_in12k_ft_in1k_288#model-comparison\n* New ImageNet-12k + ImageNet-1k fine-tunes available for a few anti-aliased ResNet models\n  * `resnetaa50d.sw_in12k_ft_in1k` - 81.7 @ 224, 82.6 @ 288\n  * `resnetaa101d.sw_in12k_ft_in1k` - 83.5 @ 224, 84.1 @ 288\n  * `seresnextaa101d_32x8d.sw_in12k_ft_in1k` - 86.0 @ 224, 86.5 @ 288 \n  * `seresnextaa101d_32x8d.sw_in12k_ft_in1k_288` - 86.5 @ 288, 86.7 @ 320\n\n### March 31, 2023\n* Add first ConvNext-XXLarge CLIP -> IN-1k fine-tune and IN-12k intermediate fine-tunes for convnext-base/large CLIP models.\n\n| model                                                                                                                |top1  |top5  |img_size|param_count|gmacs |macts |\n|----------------------------------------------------------------------------------------------------------------------|------|------|--------|-----------|------|------|\n| [convnext_xxlarge.clip_laion2b_soup_ft_in1k](https://huggingface.co/timm/convnext_xxlarge.clip_laion2b_soup_ft_in1k) |88.612|98.704|256     |846.47     |198.09|124.45|\n| convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384                                                               |88.312|98.578|384     |200.13     |101.11|126.74|\n| convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320                                                               |87.968|98.47 |320     |200.13     |70.21 |88.02 |\n| convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384                                                                  |87.138|98.212|384     |88.59      |45.21 |84.49 |\n| convnext_base.clip_laion2b_augreg_ft_in12k_in1k                                                                      |86.344|97.97 |256     |88.59      |20.09 |37.55 |\n\n* Add EVA-02 MIM pretrained and fine-tuned weights, push to HF hub and update model cards for all EVA models. First model over 90% top-1 (99% top-5)! Check out the original code & weights at https://github.com/baaivision/EVA for more details on their work blending MIM, CLIP w/ many model, dataset, and train recipe tweaks.\n\n| model                                              |top1  |top5  |param_count|img_size|\n|----------------------------------------------------|------|------|-----------|--------|\n| [eva02_large_patch14_448.mim_m38m_ft_in22k_in1k](https://huggingface.co/timm/eva02_large_patch14_448.mim_m38m_ft_in1k) |90.054|99.042|305.08     |448     |\n| eva02_large_patch14_448.mim_in22k_ft_in22k_in1k    |89.946|99.01 |305.08     |448     |\n| eva_giant_patch14_560.m30m_ft_in22k_in1k           |89.792|98.992|1014.45    |560     |\n| eva02_large_patch14_448.mim_in22k_ft_in1k          |89.626|98.954|305.08     |448     |\n| eva02_large_patch14_448.mim_m38m_ft_in1k           |89.57 |98.918|305.08     |448     |\n| eva_giant_patch14_336.m30m_ft_in22k_in1k           |89.56 |98.956|1013.01    |336     |\n| eva_giant_patch14_336.clip_ft_in1k                 |89.466|98.82 |1013.01    |336     |\n| eva_large_patch14_336.in22k_ft_in22k_in1k          |89.214|98.854|304.53     |336     |\n| eva_giant_patch14_224.clip_ft_in1k                 |88.882|98.678|1012.56    |224     |\n| eva02_base_patch14_448.mim_in22k_ft_in22k_in1k     |88.692|98.722|87.12      |448     |\n| eva_large_patch14_336.in22k_ft_in1k                |88.652|98.722|304.53     |336     |\n| eva_large_patch14_196.in22k_ft_in22k_in1k          |88.592|98.656|304.14     |196     |\n| eva02_base_patch14_448.mim_in22k_ft_in1k           |88.23 |98.564|87.12      |448     |\n| eva_large_patch14_196.in22k_ft_in1k                |87.934|98.504|304.14     |196     |\n| eva02_small_patch14_336.mim_in22k_ft_in1k          |85.74 |97.614|22.13      |336     |\n| eva02_tiny_patch14_336.mim_in22k_ft_in1k           |80.658|95.524|5.76       |336     |\n\n* Multi-weight and HF hub for DeiT and MLP-Mixer based models\n\n### March 22, 2023\n* More weights pushed to HF hub along with multi-weight support, including: `regnet.py`, `rexnet.py`, `byobnet.py`, `resnetv2.py`, `swin_transformer.py`, `swin_transformer_v2.py`, `swin_transformer_v2_cr.py`\n* Swin Transformer models support feature extraction (NCHW feat maps for `swinv2_cr_*`, and NHWC for all others) and spatial embedding outputs.\n* FocalNet (from https://github.com/microsoft/FocalNet) models and weights added with significant refactoring, feature extraction, no fixed resolution / sizing constraint\n* RegNet weights increased with HF hub push, SWAG, SEER, and torchvision v2 weights. SEER is pretty poor wrt to performance for model size, but possibly useful.\n* More ImageNet-12k pretrained and 1k fine-tuned `timm` weights:\n  * `rexnetr_200.sw_in12k_ft_in1k` - 82.6 @ 224, 83.2 @ 288\n  * `rexnetr_300.sw_in12k_ft_in1k` - 84.0 @ 224, 84.5 @ 288\n  * `regnety_120.sw_in12k_ft_in1k` - 85.0 @ 224, 85.4 @ 288\n  * `regnety_160.lion_in12k_ft_in1k` - 85.6 @ 224, 86.0 @ 288\n  * `regnety_160.sw_in12k_ft_in1k` - 85.6 @ 224, 86.0 @ 288  (compare to SWAG PT + 1k FT this is same BUT much lower res, blows SEER FT away)\n* Model name deprecation + remapping functionality added (a milestone for bringing 0.8.x out of pre-release). Mappings being added...\n* Minor bug fixes and improvements.\n\n### Feb 26, 2023\n* Add ConvNeXt-XXLarge CLIP pretrained image tower weights for fine-tune & features (fine-tuning TBD) -- see [model card](https://huggingface.co/laion/CLIP-convnext_xxlarge-laion2B-s34B-b82K-augreg-soup)\n* Update `convnext_xxlarge` default LayerNorm eps to 1e-5 (for CLIP weights, improved stability)\n* 0.8.15dev0\n\n### Feb 20, 2023\n* Add 320x320 `convnext_large_mlp.clip_laion2b_ft_320` and `convnext_large_mlp.clip_laion2b_ft_soup_320` CLIP image tower weights for features & fine-tune\n* 0.8.13dev0 pypi release for latest changes w/ move to huggingface org\n\n### Feb 16, 2023\n* `safetensor` checkpoint support added\n* Add ideas from 'Scaling Vision Transformers to 22 B. Params' (https://arxiv.org/abs/2302.05442) -- qk norm, RmsNorm, parallel block\n* Add F.scaled_dot_product_attention support (PyTorch 2.0 only) to `vit_*`, `vit_relpos*`, `coatnet` / `maxxvit` (to start)\n* Lion optimizer (w/ multi-tensor option) added (https://arxiv.org/abs/2302.06675)\n* gradient checkpointing works with `features_only=True`\n\n### Feb 7, 2023\n* New inference benchmark numbers added in [results](results/) folder.\n* Add convnext LAION CLIP trained weights and initial set of in1k fine-tunes\n  * `convnext_base.clip_laion2b_augreg_ft_in1k` - 86.2% @ 256x256\n  * `convnext_base.clip_laiona_augreg_ft_in1k_384` - 86.5% @ 384x384\n  * `convnext_large_mlp.clip_laion2b_augreg_ft_in1k` - 87.3% @ 256x256\n  * `convnext_large_mlp.clip_laion2b_augreg_ft_in1k_384` - 87.9% @ 384x384\n* Add DaViT models. Supports `features_only=True`. Adapted from https://github.com/dingmyu/davit by [Fredo](https://github.com/fffffgggg54).\n* Use a common NormMlpClassifierHead across MaxViT, ConvNeXt, DaViT\n* Add EfficientFormer-V2 model, update EfficientFormer, and refactor LeViT (closely related architectures). Weights on HF hub.\n  * New EfficientFormer-V2 arch, significant refactor from original at (https://github.com/snap-research/EfficientFormer). Supports `features_only=True`.\n  * Minor updates to EfficientFormer.\n  * Refactor LeViT models to stages, add `features_only=True` support to new `conv` variants, weight remap required.\n* Move ImageNet meta-data (synsets, indices) from `/results` to [`timm/data/_info`](timm/data/_info/).\n* Add ImageNetInfo / DatasetInfo classes to provide labelling for various ImageNet classifier layouts in `timm`\n  * Update `inference.py` to use, try: `python inference.py --data-dir /folder/to/images --model convnext_small.in12k --label-type detail --topk 5`\n* Ready for 0.8.10 pypi pre-release (final testing).\n\n### Jan 20, 2023\n* Add two convnext 12k -> 1k fine-tunes at 384x384\n  * `convnext_tiny.in12k_ft_in1k_384` - 85.1 @ 384\n  * `convnext_small.in12k_ft_in1k_384` - 86.2 @ 384\n\n* Push all MaxxViT weights to HF hub, and add new ImageNet-12k -> 1k fine-tunes for `rw` base MaxViT and CoAtNet 1/2 models\n\n|model                                                                                                                   |top1 |top5 |samples / sec  |Params (M)     |GMAC  |Act (M)|\n|------------------------------------------------------------------------------------------------------------------------|----:|----:|--------------:|--------------:|-----:|------:|\n|[maxvit_xlarge_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_xlarge_tf_512.in21k_ft_in1k)                    |88.53|98.64|          21.76|         475.77|534.14|1413.22|\n|[maxvit_xlarge_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_xlarge_tf_384.in21k_ft_in1k)                    |88.32|98.54|          42.53|         475.32|292.78| 668.76|\n|[maxvit_base_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_base_tf_512.in21k_ft_in1k)                        |88.20|98.53|          50.87|         119.88|138.02| 703.99|\n|[maxvit_large_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_large_tf_512.in21k_ft_in1k)                      |88.04|98.40|          36.42|         212.33|244.75| 942.15|\n|[maxvit_large_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_large_tf_384.in21k_ft_in1k)                      |87.98|98.56|          71.75|         212.03|132.55| 445.84|\n|[maxvit_base_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_base_tf_384.in21k_ft_in1k)                        |87.92|98.54|         104.71|         119.65| 73.80| 332.90|\n|[maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k)        |87.81|98.37|         106.55|         116.14| 70.97| 318.95|\n|[maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k)  |87.47|98.37|         149.49|         116.09| 72.98| 213.74|\n|[coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k)            |87.39|98.31|         160.80|          73.88| 47.69| 209.43|\n|[maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k)        |86.89|98.02|         375.86|         116.14| 23.15|  92.64|\n|[maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k)  |86.64|98.02|         501.03|         116.09| 24.20|  62.77|\n|[maxvit_base_tf_512.in1k](https://huggingface.co/timm/maxvit_base_tf_512.in1k)                                          |86.60|97.92|          50.75|         119.88|138.02| 703.99|\n|[coatnet_2_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_2_rw_224.sw_in12k_ft_in1k)                      |86.57|97.89|         631.88|          73.87| 15.09|  49.22|\n|[maxvit_large_tf_512.in1k](https://huggingface.co/timm/maxvit_large_tf_512.in1k)                                        |86.52|97.88|          36.04|         212.33|244.75| 942.15|\n|[coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k)            |86.49|97.90|         620.58|          73.88| 15.18|  54.78|\n|[maxvit_base_tf_384.in1k](https://huggingface.co/timm/maxvit_base_tf_384.in1k)                                          |86.29|97.80|         101.09|         119.65| 73.80| 332.90|\n|[maxvit_large_tf_384.in1k](https://huggingface.co/timm/maxvit_large_tf_384.in1k)                                        |86.23|97.69|          70.56|         212.03|132.55| 445.84|\n|[maxvit_small_tf_512.in1k](https://huggingface.co/timm/maxvit_small_tf_512.in1k)                                        |86.10|97.76|          88.63|          69.13| 67.26| 383.77|\n|[maxvit_tiny_tf_512.in1k](https://huggingface.co/timm/maxvit_tiny_tf_512.in1k)                                          |85.67|97.58|         144.25|          31.05| 33.49| 257.59|\n|[maxvit_small_tf_384.in1k](https://huggingface.co/timm/maxvit_small_tf_384.in1k)                                        |85.54|97.46|         188.35|          69.02| 35.87| 183.65|\n|[maxvit_tiny_tf_384.in1k](https://huggingface.co/timm/maxvit_tiny_tf_384.in1k)                                          |85.11|97.38|         293.46|          30.98| 17.53| 123.42|\n|[maxvit_large_tf_224.in1k](https://huggingface.co/timm/maxvit_large_tf_224.in1k)                                        |84.93|96.97|         247.71|         211.79| 43.68| 127.35|\n|[coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k)          |84.90|96.96|        1025.45|          41.72|  8.11|  40.13|\n|[maxvit_base_tf_224.in1k](https://huggingface.co/timm/maxvit_base_tf_224.in1k)                                          |84.85|96.99|         358.25|         119.47| 24.04|  95.01|\n|[maxxvit_rmlp_small_rw_256.sw_in1k](https://huggingface.co/timm/maxxvit_rmlp_small_rw_256.sw_in1k)                      |84.63|97.06|         575.53|          66.01| 14.67|  58.38|\n|[coatnet_rmlp_2_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_224.sw_in1k)                              |84.61|96.74|         625.81|          73.88| 15.18|  54.78|\n|[maxvit_rmlp_small_rw_224.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_small_rw_224.sw_in1k)                        |84.49|96.76|         693.82|          64.90| 10.75|  49.30|\n|[maxvit_small_tf_224.in1k](https://huggingface.co/timm/maxvit_small_tf_224.in1k)                                        |84.43|96.83|         647.96|          68.93| 11.66|  53.17|\n|[maxvit_rmlp_tiny_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_tiny_rw_256.sw_in1k)                          |84.23|96.78|         807.21|          29.15|  6.77|  46.92|\n|[coatnet_1_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_1_rw_224.sw_in1k)                                        |83.62|96.38|         989.59|          41.72|  8.04|  34.60|\n|[maxvit_tiny_rw_224.sw_in1k](https://huggingface.co/timm/maxvit_tiny_rw_224.sw_in1k)                                    |83.50|96.50|        1100.53|          29.06|  5.11|  33.11|\n|[maxvit_tiny_tf_224.in1k](https://huggingface.co/timm/maxvit_tiny_tf_224.in1k)                                          |83.41|96.59|        1004.94|          30.92|  5.60|  35.78|\n|[coatnet_rmlp_1_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_1_rw_224.sw_in1k)                              |83.36|96.45|        1093.03|          41.69|  7.85|  35.47|\n|[maxxvitv2_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxxvitv2_nano_rw_256.sw_in1k)                              |83.11|96.33|        1276.88|          23.70|  6.26|  23.05|\n|[maxxvit_rmlp_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxxvit_rmlp_nano_rw_256.sw_in1k)                        |83.03|96.34|        1341.24|          16.78|  4.37|  26.05|\n|[maxvit_rmlp_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_nano_rw_256.sw_in1k)                          |82.96|96.26|        1283.24|          15.50|  4.47|  31.92|\n|[maxvit_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_nano_rw_256.sw_in1k)                                    |82.93|96.23|        1218.17|          15.45|  4.46|  30.28|\n|[coatnet_bn_0_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_bn_0_rw_224.sw_in1k)                                  |82.39|96.19|        1600.14|          27.44|  4.67|  22.04|\n|[coatnet_0_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_0_rw_224.sw_in1k)                                        |82.39|95.84|        1831.21|          27.44|  4.43|  18.73|\n|[coatnet_rmlp_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_nano_rw_224.sw_in1k)                        |82.05|95.87|        2109.09|          15.15|  2.62|  20.34|\n|[coatnext_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnext_nano_rw_224.sw_in1k)                                |81.95|95.92|        2525.52|          14.70|  2.47|  12.80|\n|[coatnet_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_nano_rw_224.sw_in1k)                                  |81.70|95.64|        2344.52|          15.14|  2.41|  15.41|\n|[maxvit_rmlp_pico_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_pico_rw_256.sw_in1k)                          |80.53|95.21|        1594.71|           7.52|  1.85|  24.86|\n\n### Jan 11, 2023\n* Update ConvNeXt ImageNet-12k pretrain series w/ two new fine-tuned weights (and pre FT `.in12k` tags)\n  * `convnext_nano.in12k_ft_in1k` - 82.3 @ 224, 82.9 @ 288  (previously released)\n  * `convnext_tiny.in12k_ft_in1k` - 84.2 @ 224, 84.5 @ 288\n  * `convnext_small.in12k_ft_in1k` - 85.2 @ 224, 85.3 @ 288\n\n### Jan 6, 2023\n* Finally got around to adding `--model-kwargs` and `--opt-kwargs` to scripts to pass through rare args directly to model classes from cmd line\n  * `train.py --data-dir /imagenet --model resnet50 --amp --model-kwargs output_stride=16 act_layer=silu`\n  * `train.py --data-dir /imagenet --model vit_base_patch16_clip_224 --img-size 240 --amp --model-kwargs img_size=240 patch_size=12`\n* Cleanup some popular models to better support arg passthrough / merge with model configs, more to go.\n\n### Jan 5, 2023\n* ConvNeXt-V2 models and weights added to existing `convnext.py`\n  * Paper: [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](http://arxiv.org/abs/2301.00808)\n  * Reference impl: https://github.com/facebookresearch/ConvNeXt-V2 (NOTE: weights currently CC-BY-NC)\n@dataclass\n### Dec 23, 2022 🎄☃\n* Add FlexiViT models and weights from https://github.com/google-research/big_vision (check out paper at https://arxiv.org/abs/2212.08013)\n  * NOTE currently resizing is static on model creation, on-the-fly dynamic / train patch size sampling is a WIP\n* Many more models updated to multi-weight and downloadable via HF hub now (convnext, efficientnet, mobilenet, vision_transformer*, beit)\n* More model pretrained tag and adjustments, some model names changed (working on deprecation translations, consider main branch DEV branch right now, use 0.6.x for stable use)\n* More ImageNet-12k (subset of 22k) pretrain models popping up:\n  * `efficientnet_b5.in12k_ft_in1k` - 85.9 @ 448x448\n  * `vit_medium_patch16_gap_384.in12k_ft_in1k` - 85.5 @ 384x384\n  * `vit_medium_patch16_gap_256.in12k_ft_in1k` - 84.5 @ 256x256\n  * `convnext_nano.in12k_ft_in1k` - 82.9 @ 288x288\n\n### Dec 8, 2022\n* Add 'EVA l' to `vision_transformer.py`, MAE style ViT-L/14 MIM pretrain w/ EVA-CLIP targets, FT on ImageNet-1k (w/ ImageNet-22k intermediate for some)\n  * original source: https://github.com/baaivision/EVA\n\n| model                                     | top1 | param_count |  gmac | macts | hub                                     |\n|:------------------------------------------|-----:|------------:|------:|------:|:----------------------------------------|\n| eva_large_patch14_336.in22k_ft_in22k_in1k | 89.2 |       304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_336.in22k_ft_in1k       | 88.7 |       304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_196.in22k_ft_in22k_in1k | 88.6 |       304.1 |  61.6 |  63.5 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_196.in22k_ft_in1k       | 87.9 |       304.1 |  61.6 |  63.5 | [link](https://huggingface.co/BAAI/EVA) |\n\n### Dec 6, 2022\n* Add 'EVA g', BEiT style ViT-g/14 model weights w/ both MIM pretrain and CLIP pretrain to `beit.py`.\n  * original source: https://github.com/baaivision/EVA\n  * paper: https://arxiv.org/abs/2211.07636\n\n| model                                    |   top1 |   param_count |   gmac |   macts | hub                                     |\n|:-----------------------------------------|-------:|--------------:|-------:|--------:|:----------------------------------------|\n| eva_giant_patch14_560.m30m_ft_in22k_in1k |   89.8 |        1014.4 | 1906.8 |  2577.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_336.m30m_ft_in22k_in1k |   89.6 |        1013   |  620.6 |   550.7 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_336.clip_ft_in1k       |   89.4 |        1013   |  620.6 |   550.7 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_224.clip_ft_in1k       |   89.1 |        1012.6 |  267.2 |   192.6 | [link](https://huggingface.co/BAAI/EVA) |\n\n### Dec 5, 2022\n\n* Pre-release (`0.8.0dev0`) of multi-weight support (`model_arch.pretrained_tag`). Install with `pip install --pre timm`\n  * vision_transformer, maxvit, convnext are the first three model impl w/ support\n  * model names are changing with this (previous _21k, etc. fn will merge), still sorting out deprecation handling\n  * bugs are likely, but I need feedback so please try it out\n  * if stability is needed, please use 0.6.x pypi releases or clone from [0.6.x branch](https://github.com/rwightman/pytorch-image-models/tree/0.6.x)\n* Support for PyTorch 2.0 compile is added in train/validate/inference/benchmark, use `--torchcompile` argument\n* Inference script allows more control over output, select k for top-class index + prob json, csv or parquet output\n* Add a full set of fine-tuned CLIP image tower weights from both LAION-2B and original OpenAI CLIP models\n\n| model                                            |   top1 |   param_count |   gmac |   macts | hub                                                                                  |\n|:-------------------------------------------------|-------:|--------------:|-------:|--------:|:-------------------------------------------------------------------------------------|\n| vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k  |   88.6 |         632.5 |  391   |   407.5 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k)  |\n| vit_large_patch14_clip_336.openai_ft_in12k_in1k  |   88.3 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.openai_ft_in12k_in1k)  |\n| vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k  |   88.2 |         632   |  167.4 |   139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_large_patch14_clip_336.laion2b_ft_in12k_in1k |   88.2 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in12k_in1k) |\n| vit_large_patch14_clip_224.openai_ft_in12k_in1k  |   88.2 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in12k_in1k)  |\n| vit_large_patch14_clip_224.laion2b_ft_in12k_in1k |   87.9 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in12k_in1k) |\n| vit_large_patch14_clip_224.openai_ft_in1k        |   87.9 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in1k)        |\n| vit_large_patch14_clip_336.laion2b_ft_in1k       |   87.9 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in1k)       |\n| vit_huge_patch14_clip_224.laion2b_ft_in1k        |   87.6 |         632   |  167.4 |   139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in1k)        |\n| vit_large_patch14_clip_224.laion2b_ft_in1k       |   87.3 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in1k)       |\n| vit_base_patch16_clip_384.laion2b_ft_in12k_in1k  |   87.2 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_384.openai_ft_in12k_in1k   |   87   |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in12k_in1k)   |\n| vit_base_patch16_clip_384.laion2b_ft_in1k        |   86.6 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in1k)        |\n| vit_base_patch16_clip_384.openai_ft_in1k         |   86.2 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in1k)         |\n| vit_base_patch16_clip_224.laion2b_ft_in12k_in1k  |   86.2 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.openai_ft_in12k_in1k   |   85.9 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in12k_in1k)   |\n| vit_base_patch32_clip_448.laion2b_ft_in12k_in1k  |   85.8 |          88.3 |   17.9 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch32_clip_448.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.laion2b_ft_in1k        |   85.5 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in1k)        |\n| vit_base_patch32_clip_384.laion2b_ft_in12k_in1k  |   85.4 |          88.3 |   13.1 |    16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.openai_ft_in1k         |   85.3 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in1k)         |\n| vit_base_patch32_clip_384.openai_ft_in12k_in1k   |   85.2 |          88.3 |   13.1 |    16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.openai_ft_in12k_in1k)   |\n| vit_base_patch32_clip_224.laion2b_ft_in12k_in1k  |   83.3 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_base_patch32_clip_224.laion2b_ft_in1k        |   82.6 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in1k)        |\n| vit_base_patch32_clip_224.openai_ft_in1k         |   81.9 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.openai_ft_in1k)         |\n\n* Port of MaxViT Tensorflow Weights from official impl at https://github.com/google-research/maxvit\n  * There was larger than expected drops for the upscaled 384/512 in21k fine-tune weights, possible detail missing, but the 21k FT did seem sensitive to small preprocessing\n\n| model                              |   top1 |   param_count |   gmac |   macts | hub                                                                    |\n|:-----------------------------------|-------:|--------------:|-------:|--------:|:-----------------------------------------------------------------------|\n| maxvit_xlarge_tf_512.in21k_ft_in1k |   88.5 |         475.8 |  534.1 |  1413.2 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_512.in21k_ft_in1k) |\n| maxvit_xlarge_tf_384.in21k_ft_in1k |   88.3 |         475.3 |  292.8 |   668.8 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_384.in21k_ft_in1k) |\n| maxvit_base_tf_512.in21k_ft_in1k   |   88.2 |         119.9 |  138   |   704   | [link](https://huggingface.co/timm/maxvit_base_tf_512.in21k_ft_in1k)   |\n| maxvit_large_tf_512.in21k_ft_in1k  |   88   |         212.3 |  244.8 |   942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in21k_ft_in1k)  |\n| maxvit_large_tf_384.in21k_ft_in1k  |   88   |         212   |  132.6 |   445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in21k_ft_in1k)  |\n| maxvit_base_tf_384.in21k_ft_in1k   |   87.9 |         119.6 |   73.8 |   332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in21k_ft_in1k)   |\n| maxvit_base_tf_512.in1k            |   86.6 |         119.9 |  138   |   704   | [link](https://huggingface.co/timm/maxvit_base_tf_512.in1k)            |\n| maxvit_large_tf_512.in1k           |   86.5 |         212.3 |  244.8 |   942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in1k)           |\n| maxvit_base_tf_384.in1k            |   86.3 |         119.6 |   73.8 |   332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in1k)            |\n| maxvit_large_tf_384.in1k           |   86.2 |         212   |  132.6 |   445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in1k)           |\n| maxvit_small_tf_512.in1k           |   86.1 |          69.1 |   67.3 |   383.8 | [link](https://huggingface.co/timm/maxvit_small_tf_512.in1k)           |\n| maxvit_tiny_tf_512.in1k            |   85.7 |          31   |   33.5 |   257.6 | [link](https://huggingface.co/timm/maxvit_tiny_tf_512.in1k)            |\n| maxvit_small_tf_384.in1k           |   85.5 |          69   |   35.9 |   183.6 | [link](https://huggingface.co/timm/maxvit_small_tf_384.in1k)           |\n| maxvit_tiny_tf_384.in1k            |   85.1 |          31   |   17.5 |   123.4 | [link](https://huggingface.co/timm/maxvit_tiny_tf_384.in1k)            |\n| maxvit_large_tf_224.in1k           |   84.9 |         211.8 |   43.7 |   127.4 | [link](https://huggingface.co/timm/maxvit_large_tf_224.in1k)           |\n| maxvit_base_tf_224.in1k            |   84.9 |         119.5 |   24   |    95   | [link](https://huggingface.co/timm/maxvit_base_tf_224.in1k)            |\n| maxvit_small_tf_224.in1k           |   84.4 |          68.9 |   11.7 |    53.2 | [link](https://huggingface.co/timm/maxvit_small_tf_224.in1k)           |\n| maxvit_tiny_tf_224.in1k            |   83.4 |          30.9 |    5.6 |    35.8 | [link](https://huggingface.co/timm/maxvit_tiny_tf_224.in1k)            |\n\n### Oct 15, 2022\n* Train and validation script enhancements\n* Non-GPU (ie CPU) device support\n* SLURM compatibility for train script\n* HF datasets support (via ReaderHfds)\n* TFDS/WDS dataloading improvements (sample padding/wrap for distributed use fixed wrt sample count estimate)\n* in_chans !=3 support for scripts / loader\n* Adan optimizer\n* Can enable per-step LR scheduling via args\n* Dataset 'parsers' renamed to 'readers', more descriptive of purpose\n* AMP args changed, APEX via `--amp-impl apex`, bfloat16 supportedf via `--amp-dtype bfloat16`\n* main branch switched to 0.7.x version, 0.6x forked for stable release of weight only adds\n* master -> main branch rename\n\n### Oct 10, 2022\n* More weights in `maxxvit` series, incl first ConvNeXt block based `coatnext` and `maxxvit` experiments:\n  * `coatnext_nano_rw_224` - 82.0 @ 224 (G) -- (uses ConvNeXt conv block, no BatchNorm)\n  * `maxxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.7 @ 320  (G) (uses ConvNeXt conv block, no BN)\n  * `maxvit_rmlp_small_rw_224` - 84.5 @ 224, 85.1 @ 320 (G)\n  * `maxxvit_rmlp_small_rw_256` - 84.6 @ 256, 84.9 @ 288 (G) -- could be trained better, hparams need tuning (uses ConvNeXt block, no BN)\n  * `coatnet_rmlp_2_rw_224` - 84.6 @ 224, 85 @ 320  (T)\n  * NOTE: official MaxVit weights (in1k) have been released at https://github.com/google-research/maxvit -- some extra work is needed to port and adapt since my impl was created independently of theirs and has a few small differences + the whole TF same padding fun.\n\n### Sept 23, 2022\n* LAION-2B CLIP image towers supported as pretrained backbones for fine-tune or features (no classifier)\n  * vit_base_patch32_224_clip_laion2b\n  * vit_large_patch14_224_clip_laion2b\n  * vit_huge_patch14_224_clip_laion2b\n  * vit_giant_patch14_224_clip_laion2b\n\n### Sept 7, 2022\n* Hugging Face [`timm` docs](https://huggingface.co/docs/hub/timm) home now exists, look for more here in the future\n* Add BEiT-v2 weights for base and large 224x224 models from https://github.com/microsoft/unilm/tree/master/beit2\n* Add more weights in `maxxvit` series incl a `pico` (7.5M params, 1.9 GMACs), two `tiny` variants:\n  * `maxvit_rmlp_pico_rw_256` - 80.5 @ 256, 81.3 @ 320  (T)\n  * `maxvit_tiny_rw_224` - 83.5 @ 224 (G)\n  * `maxvit_rmlp_tiny_rw_256` - 84.2 @ 256, 84.8 @ 320 (T)\n\n### Aug 29, 2022\n* MaxVit window size scales with img_size by default. Add new RelPosMlp MaxViT weight that leverages this:\n  * `maxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.6 @ 320  (T)\n\n### Aug 26, 2022\n* CoAtNet (https://arxiv.org/abs/2106.04803) and MaxVit (https://arxiv.org/abs/2204.01697) `timm` original models\n  * both found in [`maxxvit.py`](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/maxxvit.py) model def, contains numerous experiments outside scope of original papers\n  * an unfinished Tensorflow version from MaxVit authors can be found https://github.com/google-research/maxvit\n* Initial CoAtNet and MaxVit timm pretrained weights (working on more):\n  * `coatnet_nano_rw_224` - 81.7 @ 224  (T)\n  * `coatnet_rmlp_nano_rw_224` - 82.0 @ 224, 82.8 @ 320 (T)\n  * `coatnet_0_rw_224` - 82.4  (T)  -- NOTE timm '0' coatnets have 2 more 3rd stage blocks\n  * `coatnet_bn_0_rw_224` - 82.4  (T)\n  * `maxvit_nano_rw_256` - 82.9 @ 256  (T)\n  * `coatnet_rmlp_1_rw_224` - 83.4 @ 224, 84 @ 320  (T)\n  * `coatnet_1_rw_224` - 83.6 @ 224 (G)\n  * (T) = TPU trained with `bits_and_tpu` branch training code, (G) = GPU trained\n* GCVit (weights adapted from https://github.com/NVlabs/GCVit, code 100% `timm` re-write for license purposes)\n* MViT-V2 (multi-scale vit, adapted from https://github.com/facebookresearch/mvit)\n* EfficientFormer (adapted from https://github.com/snap-research/EfficientFormer)\n* PyramidVisionTransformer-V2 (adapted from https://github.com/whai362/PVT)\n* 'Fast Norm' support for LayerNorm and GroupNorm that avoids float32 upcast w/ AMP (uses APEX LN if available for further boost)\n\n### Aug 15, 2022\n* ConvNeXt atto weights added\n  * `convnext_atto` - 75.7 @ 224, 77.0 @ 288\n  * `convnext_atto_ols` - 75.9  @ 224, 77.2 @ 288\n\n### Aug 5, 2022\n* More custom ConvNeXt smaller model defs with weights\n  * `convnext_femto` - 77.5 @ 224, 78.7 @ 288\n  * `convnext_femto_ols` - 77.9  @ 224, 78.9 @ 288\n  * `convnext_pico` - 79.5 @ 224, 80.4 @ 288\n  * `convnext_pico_ols` - 79.5 @ 224, 80.5 @ 288\n  * `convnext_nano_ols` - 80.9 @ 224, 81.6 @ 288\n* Updated EdgeNeXt to improve ONNX export, add new base variant and weights from original (https://github.com/mmaaz60/EdgeNeXt)\n\n### July 28, 2022\n* Add freshly minted DeiT-III Medium (width=512, depth=12, num_heads=8) model weights. Thanks [Hugo Touvron](https://github.com/TouvronHugo)!\n\n### July 27, 2022\n* All runtime benchmark and validation result csv files are finally up-to-date!\n* A few more weights & model defs added:\n  * `darknetaa53` -  79.8 @ 256, 80.5 @ 288\n  * `convnext_nano` - 80.8 @ 224, 81.5 @ 288\n  * `cs3sedarknet_l` - 81.2 @ 256, 81.8 @ 288\n  * `cs3darknet_x` - 81.8 @ 256, 82.2 @ 288\n  * `cs3sedarknet_x` - 82.2 @ 256, 82.7 @ 288\n  * `cs3edgenet_x` - 82.2 @ 256, 82.7 @ 288\n  * `cs3se_edgenet_x` - 82.8 @ 256, 83.5 @ 320\n* `cs3*` weights above all trained on TPU w/ `bits_and_tpu` branch. Thanks to TRC program!\n* Add output_stride=8 and 16 support to ConvNeXt (dilation)\n* deit3 models not being able to resize pos_emb fixed\n* Version 0.6.7 PyPi release (/w above bug fixes and new weighs since 0.6.5)\n\n### July 8, 2022\nMore models, more fixes\n* Official research models (w/ weights) added:\n  * EdgeNeXt from (https://github.com/mmaaz60/EdgeNeXt)\n  * MobileViT-V2 from (https://github.com/apple/ml-cvnets)\n  * DeiT III (Revenge of the ViT) from (https://github.com/facebookresearch/deit)\n* My own models:\n  * Small `ResNet` defs added by request with 1 block repeats for both basic and bottleneck (resnet10 and resnet14)\n  * `CspNet` refactored with dataclass config, simplified CrossStage3 (`cs3`) option. These are closer to YOLO-v5+ backbone defs.\n  * More relative position vit fiddling. Two `srelpos` (shared relative position) models trained, and a medium w/ class token.\n  * Add an alternate downsample mode to EdgeNeXt and train a `small` model. Better than original small, but not their new USI trained weights.\n* My own model weight results (all ImageNet-1k training)\n  * `resnet10t` - 66.5 @ 176, 68.3 @ 224\n  * `resnet14t` - 71.3 @ 176, 72.3 @ 224\n  * `resnetaa50` - 80.6 @ 224 , 81.6 @ 288\n  * `darknet53` -  80.0 @ 256, 80.5 @ 288\n  * `cs3darknet_m` - 77.0 @ 256, 77.6 @ 288\n  * `cs3darknet_focus_m` - 76.7 @ 256, 77.3 @ 288\n  * `cs3darknet_l` - 80.4 @ 256, 80.9 @ 288\n  * `cs3darknet_focus_l` - 80.3 @ 256, 80.9 @ 288\n  * `vit_srelpos_small_patch16_224` - 81.1 @ 224, 82.1 @ 320\n  * `vit_srelpos_medium_patch16_224` - 82.3 @ 224, 83.1 @ 320\n  * `vit_relpos_small_patch16_cls_224` - 82.6 @ 224, 83.6 @ 320\n  * `edgnext_small_rw` - 79.6 @ 224, 80.4 @ 320\n* `cs3`, `darknet`, and `vit_*relpos` weights above all trained on TPU thanks to TRC program! Rest trained on overheating GPUs.\n* Hugging Face Hub support fixes verified, demo notebook TBA\n* Pretrained weights / configs can be loaded externally (ie from local disk) w/ support for head adaptation.\n* Add support to change image extensions scanned by `timm` datasets/readers. See (https://github.com/rwightman/pytorch-image-models/pull/1274#issuecomment-1178303103)\n* Default ConvNeXt LayerNorm impl to use `F.layer_norm(x.permute(0, 2, 3, 1), ...).permute(0, 3, 1, 2)` via `LayerNorm2d` in all cases.\n  * a bit slower than previous custom impl on some hardware (ie Ampere w/ CL), but overall fewer regressions across wider HW / PyTorch version ranges.\n  * previous impl exists as `LayerNormExp2d` in `models/layers/norm.py`\n* Numerous bug fixes\n* Currently testing for imminent PyPi 0.6.x release\n* LeViT pretraining of larger models still a WIP, they don't train well / easily without distillation. Time to add distill support (finally)?\n* ImageNet-22k weight training + finetune ongoing, work on multi-weight support (slowly) chugging along (there are a LOT of weights, sigh) ...\n\n### May 13, 2022\n* Official Swin-V2 models and weights added from (https://github.com/microsoft/Swin-Transformer). Cleaned up to support torchscript.\n* Some refactoring for existing `timm` Swin-V2-CR impl, will likely do a bit more to bring parts closer to official and decide whether to merge some aspects.\n* More Vision Transformer relative position / residual post-norm experiments (all trained on TPU thanks to TRC program)\n  * `vit_relpos_small_patch16_224` - 81.5 @ 224, 82.5 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_relpos_medium_patch16_rpn_224` - 82.3 @ 224, 83.1 @ 320 -- rel pos + res-post-norm, no class token, avg pool\n  * `vit_relpos_medium_patch16_224` - 82.5 @ 224, 83.3 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_relpos_base_patch16_gapcls_224` - 82.8 @ 224, 83.9 @ 320 -- rel pos, layer scale, class token, avg pool (by mistake)\n* Bring 512 dim, 8-head 'medium' ViT model variant back to life (after using in a pre DeiT 'small' model for first ViT impl back in 2020)\n* Add ViT relative position support for switching btw existing impl and some additions in official Swin-V2 impl for future trials\n* Sequencer2D impl (https://arxiv.org/abs/2205.01972), added via PR from author (https://github.com/okojoalg)\n\n### May 2, 2022\n* Vision Transformer experiments adding Relative Position (Swin-V2 log-coord) (`vision_transformer_relpos.py`) and Residual Post-Norm branches (from Swin-V2) (`vision_transformer*.py`)\n  * `vit_relpos_base_patch32_plus_rpn_256` - 79.5 @ 256, 80.6 @ 320 -- rel pos + extended width + res-post-norm, no class token, avg pool\n  * `vit_relpos_base_patch16_224` - 82.5 @ 224, 83.6 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_base_patch16_rpn_224` - 82.3 @ 224 -- rel pos + res-post-norm, no class token, avg pool\n* Vision Transformer refactor to remove representation layer that was only used in initial vit and rarely used since with newer pretrain (ie `How to Train Your ViT`)\n* `vit_*` models support removal of class token, use of global average pool, use of fc_norm (ala beit, mae).\n\n### April 22, 2022\n* `timm` models are now officially supported in [fast.ai](https://www.fast.ai/)! Just in time for the new Practical Deep Learning course. `timmdocs` documentation link updated to [timm.fast.ai](http://timm.fast.ai/).\n* Two more model weights added in the TPU trained [series](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights). Some In22k pretrain still in progress.\n  * `seresnext101d_32x8d` - 83.69 @ 224, 84.35 @ 288\n  * `seresnextaa101d_32x8d` (anti-aliased w/ AvgPool2d) - 83.85 @ 224, 84.57 @ 288\n\n### March 23, 2022\n* Add `ParallelBlock` and `LayerScale` option to base vit models to support model configs in [Three things everyone should know about ViT](https://arxiv.org/abs/2203.09795)\n* `convnext_tiny_hnf` (head norm first) weights trained with (close to) A2 recipe, 82.2% top-1, could do better with more epochs.\n\n### March 21, 2022\n* Merge `norm_norm_norm`. **IMPORTANT** this update for a coming 0.6.x release will likely de-stabilize the master branch for a while. Branch [`0.5.x`](https://github.com/rwightman/pytorch-image-models/tree/0.5.x) or a previous 0.5.x release can be used if stability is required.\n* Significant weights update (all TPU trained) as described in this [release](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights)\n  * `regnety_040` - 82.3 @ 224, 82.96 @ 288\n  * `regnety_064` - 83.0 @ 224, 83.65 @ 288\n  * `regnety_080` - 83.17 @ 224, 83.86 @ 288\n  * `regnetv_040` - 82.44 @ 224, 83.18 @ 288   (timm pre-act)\n  * `regnetv_064` - 83.1 @ 224, 83.71 @ 288   (timm pre-act)\n  * `regnetz_040` - 83.67 @ 256, 84.25 @ 320\n  * `regnetz_040h` - 83.77 @ 256, 84.5 @ 320 (w/ extra fc in head)\n  * `resnetv2_50d_gn` - 80.8 @ 224, 81.96 @ 288 (pre-act GroupNorm)\n  * `resnetv2_50d_evos` 80.77 @ 224, 82.04 @ 288 (pre-act EvoNormS)\n  * `regnetz_c16_evos`  - 81.9 @ 256, 82.64 @ 320 (EvoNormS)\n  * `regnetz_d8_evos`  - 83.42 @ 256, 84.04 @ 320 (EvoNormS)\n  * `xception41p` - 82 @ 299   (timm pre-act)\n  * `xception65` -  83.17 @ 299\n  * `xception65p` -  83.14 @ 299   (timm pre-act)\n  * `resnext101_64x4d` - 82.46 @ 224, 83.16 @ 288\n  * `seresnext101_32x8d` - 83.57 @ 224, 84.270 @ 288\n  * `resnetrs200` - 83.85 @ 256, 84.44 @ 320\n* HuggingFace hub support fixed w/ initial groundwork for allowing alternative 'config sources' for pretrained model definitions and weights (generic local file / remote url support soon)\n* SwinTransformer-V2 implementation added. Submitted by [Christoph Reich](https://github.com/ChristophReich1996). Training experiments and model changes by myself are ongoing so expect compat breaks.\n* Swin-S3 (AutoFormerV2) models / weights added from https://github.com/microsoft/Cream/tree/main/AutoFormerV2\n* MobileViT models w/ weights adapted from https://github.com/apple/ml-cvnets\n* PoolFormer models w/ weights adapted from https://github.com/sail-sg/poolformer\n* VOLO models w/ weights adapted from https://github.com/sail-sg/volo\n* Significant work experimenting with non-BatchNorm norm layers such as EvoNorm, FilterResponseNorm, GroupNorm, etc\n* Enhance support for alternate norm + act ('NormAct') layers added to a number of models, esp EfficientNet/MobileNetV3, RegNet, and aligned Xception\n* Grouped conv support added to EfficientNet family\n* Add 'group matching' API to all models to allow grouping model parameters for application of 'layer-wise' LR decay, lr scale added to LR scheduler\n* Gradient checkpointing support added to many models\n* `forward_head(x, pre_logits=False)` fn added to all models to allow separate calls of `forward_features` + `forward_head`\n* All vision transformer and vision MLP models update to return non-pooled / non-token selected features from `forward_features`, for consistency with CNN models, token selection or pooling now applied in `forward_head`\n\n### Feb 2, 2022\n* [Chris Hughes](https://github.com/Chris-hughes10) posted an exhaustive run through of `timm` on his blog yesterday. Well worth a read. [Getting Started with PyTorch Image Models (timm): A Practitioner’s Guide](https://towardsdatascience.com/getting-started-with-pytorch-image-models-timm-a-practitioners-guide-4e77b4bf9055)\n* I'm currently prepping to merge the `norm_norm_norm` branch back to master (ver 0.6.x) in next week or so.\n  * The changes are more extensive than usual and may destabilize and break some model API use (aiming for full backwards compat). So, beware `pip install git+https://github.com/rwightman/pytorch-image-models` installs!\n  * `0.5.x` releases and a `0.5.x` branch will remain stable with a cherry pick or two until dust clears. Recommend sticking to pypi install for a bit if you want stable.\n\n### Jan 14, 2022\n* Version 0.5.4 w/ release to be pushed to pypi. It's been a while since last pypi update and riskier changes will be merged to main branch soon....\n* Add ConvNeXT models /w weights from official impl (https://github.com/facebookresearch/ConvNeXt), a few perf tweaks, compatible with timm features\n* Tried training a few small (~1.8-3M param) / mobile optimized models, a few are good so far, more on the way...\n  * `mnasnet_small` - 65.6 top-1\n  * `mobilenetv2_050` - 65.9\n  * `lcnet_100/075/050` - 72.1 / 68.8 / 63.1\n  * `semnasnet_075` - 73\n  * `fbnetv3_b/d/g` - 79.1 / 79.7 / 82.0\n* TinyNet models added by [rsomani95](https://github.com/rsomani95)\n* LCNet added via MobileNetV3 architecture\n\n### Jan 5, 2023\n* ConvNeXt-V2 models and weights added to existing `convnext.py`\n  * Paper: [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](http://arxiv.org/abs/2301.00808)\n  * Reference impl: https://github.com/facebookresearch/ConvNeXt-V2 (NOTE: weights currently CC-BY-NC)\n\n### Dec 23, 2022 🎄☃\n* Add FlexiViT models and weights from https://github.com/google-research/big_vision (check out paper at https://arxiv.org/abs/2212.08013)\n  * NOTE currently resizing is static on model creation, on-the-fly dynamic / train patch size sampling is a WIP\n* Many more models updated to multi-weight and downloadable via HF hub now (convnext, efficientnet, mobilenet, vision_transformer*, beit)\n* More model pretrained tag and adjustments, some model names changed (working on deprecation translations, consider main branch DEV branch right now, use 0.6.x for stable use)\n* More ImageNet-12k (subset of 22k) pretrain models popping up:\n  * `efficientnet_b5.in12k_ft_in1k` - 85.9 @ 448x448\n  * `vit_medium_patch16_gap_384.in12k_ft_in1k` - 85.5 @ 384x384\n  * `vit_medium_patch16_gap_256.in12k_ft_in1k` - 84.5 @ 256x256\n  * `convnext_nano.in12k_ft_in1k` - 82.9 @ 288x288\n\n### Dec 8, 2022\n* Add 'EVA l' to `vision_transformer.py`, MAE style ViT-L/14 MIM pretrain w/ EVA-CLIP targets, FT on ImageNet-1k (w/ ImageNet-22k intermediate for some)\n  * original source: https://github.com/baaivision/EVA\n\n| model                                     | top1 | param_count |  gmac | macts | hub                                     |\n|:------------------------------------------|-----:|------------:|------:|------:|:----------------------------------------|\n| eva_large_patch14_336.in22k_ft_in22k_in1k | 89.2 |       304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_336.in22k_ft_in1k       | 88.7 |       304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_196.in22k_ft_in22k_in1k | 88.6 |       304.1 |  61.6 |  63.5 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_large_patch14_196.in22k_ft_in1k       | 87.9 |       304.1 |  61.6 |  63.5 | [link](https://huggingface.co/BAAI/EVA) |\n\n### Dec 6, 2022\n* Add 'EVA g', BEiT style ViT-g/14 model weights w/ both MIM pretrain and CLIP pretrain to `beit.py`. \n  * original source: https://github.com/baaivision/EVA\n  * paper: https://arxiv.org/abs/2211.07636\n\n| model                                    |   top1 |   param_count |   gmac |   macts | hub                                     |\n|:-----------------------------------------|-------:|--------------:|-------:|--------:|:----------------------------------------|\n| eva_giant_patch14_560.m30m_ft_in22k_in1k |   89.8 |        1014.4 | 1906.8 |  2577.2 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_336.m30m_ft_in22k_in1k |   89.6 |        1013   |  620.6 |   550.7 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_336.clip_ft_in1k       |   89.4 |        1013   |  620.6 |   550.7 | [link](https://huggingface.co/BAAI/EVA) |\n| eva_giant_patch14_224.clip_ft_in1k       |   89.1 |        1012.6 |  267.2 |   192.6 | [link](https://huggingface.co/BAAI/EVA) |\n\n### Dec 5, 2022\n\n* Pre-release (`0.8.0dev0`) of multi-weight support (`model_arch.pretrained_tag`). Install with `pip install --pre timm`\n  * vision_transformer, maxvit, convnext are the first three model impl w/ support\n  * model names are changing with this (previous _21k, etc. fn will merge), still sorting out deprecation handling\n  * bugs are likely, but I need feedback so please try it out\n  * if stability is needed, please use 0.6.x pypi releases or clone from [0.6.x branch](https://github.com/rwightman/pytorch-image-models/tree/0.6.x)\n* Support for PyTorch 2.0 compile is added in train/validate/inference/benchmark, use `--torchcompile` argument\n* Inference script allows more control over output, select k for top-class index + prob json, csv or parquet output\n* Add a full set of fine-tuned CLIP image tower weights from both LAION-2B and original OpenAI CLIP models\n\n| model                                            |   top1 |   param_count |   gmac |   macts | hub                                                                                  |\n|:-------------------------------------------------|-------:|--------------:|-------:|--------:|:-------------------------------------------------------------------------------------|\n| vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k  |   88.6 |         632.5 |  391   |   407.5 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k)  |\n| vit_large_patch14_clip_336.openai_ft_in12k_in1k  |   88.3 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.openai_ft_in12k_in1k)  |\n| vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k  |   88.2 |         632   |  167.4 |   139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_large_patch14_clip_336.laion2b_ft_in12k_in1k |   88.2 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in12k_in1k) |\n| vit_large_patch14_clip_224.openai_ft_in12k_in1k  |   88.2 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in12k_in1k)  |\n| vit_large_patch14_clip_224.laion2b_ft_in12k_in1k |   87.9 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in12k_in1k) |\n| vit_large_patch14_clip_224.openai_ft_in1k        |   87.9 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in1k)        |\n| vit_large_patch14_clip_336.laion2b_ft_in1k       |   87.9 |         304.5 |  191.1 |   270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in1k)       |\n| vit_huge_patch14_clip_224.laion2b_ft_in1k        |   87.6 |         632   |  167.4 |   139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in1k)        |\n| vit_large_patch14_clip_224.laion2b_ft_in1k       |   87.3 |         304.2 |   81.1 |    88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in1k)       |\n| vit_base_patch16_clip_384.laion2b_ft_in12k_in1k  |   87.2 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_384.openai_ft_in12k_in1k   |   87   |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in12k_in1k)   |\n| vit_base_patch16_clip_384.laion2b_ft_in1k        |   86.6 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in1k)        |\n| vit_base_patch16_clip_384.openai_ft_in1k         |   86.2 |          86.9 |   55.5 |   101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in1k)         |\n| vit_base_patch16_clip_224.laion2b_ft_in12k_in1k  |   86.2 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.openai_ft_in12k_in1k   |   85.9 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in12k_in1k)   |\n| vit_base_patch32_clip_448.laion2b_ft_in12k_in1k  |   85.8 |          88.3 |   17.9 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch32_clip_448.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.laion2b_ft_in1k        |   85.5 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in1k)        |\n| vit_base_patch32_clip_384.laion2b_ft_in12k_in1k  |   85.4 |          88.3 |   13.1 |    16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.laion2b_ft_in12k_in1k)  |\n| vit_base_patch16_clip_224.openai_ft_in1k         |   85.3 |          86.6 |   17.6 |    23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in1k)         |\n| vit_base_patch32_clip_384.openai_ft_in12k_in1k   |   85.2 |          88.3 |   13.1 |    16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.openai_ft_in12k_in1k)   |\n| vit_base_patch32_clip_224.laion2b_ft_in12k_in1k  |   83.3 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in12k_in1k)  |\n| vit_base_patch32_clip_224.laion2b_ft_in1k        |   82.6 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in1k)        |\n| vit_base_patch32_clip_224.openai_ft_in1k         |   81.9 |          88.2 |    4.4 |     5   | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.openai_ft_in1k)         |\n\n* Port of MaxViT Tensorflow Weights from official impl at https://github.com/google-research/maxvit\n  * There was larger than expected drops for the upscaled 384/512 in21k fine-tune weights, possible detail missing, but the 21k FT did seem sensitive to small preprocessing\n\n| model                              |   top1 |   param_count |   gmac |   macts | hub                                                                    |\n|:-----------------------------------|-------:|--------------:|-------:|--------:|:-----------------------------------------------------------------------|\n| maxvit_xlarge_tf_512.in21k_ft_in1k |   88.5 |         475.8 |  534.1 |  1413.2 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_512.in21k_ft_in1k) |\n| maxvit_xlarge_tf_384.in21k_ft_in1k |   88.3 |         475.3 |  292.8 |   668.8 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_384.in21k_ft_in1k) |\n| maxvit_base_tf_512.in21k_ft_in1k   |   88.2 |         119.9 |  138   |   704   | [link](https://huggingface.co/timm/maxvit_base_tf_512.in21k_ft_in1k)   |\n| maxvit_large_tf_512.in21k_ft_in1k  |   88   |         212.3 |  244.8 |   942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in21k_ft_in1k)  |\n| maxvit_large_tf_384.in21k_ft_in1k  |   88   |         212   |  132.6 |   445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in21k_ft_in1k)  |\n| maxvit_base_tf_384.in21k_ft_in1k   |   87.9 |         119.6 |   73.8 |   332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in21k_ft_in1k)   |\n| maxvit_base_tf_512.in1k            |   86.6 |         119.9 |  138   |   704   | [link](https://huggingface.co/timm/maxvit_base_tf_512.in1k)            |\n| maxvit_large_tf_512.in1k           |   86.5 |         212.3 |  244.8 |   942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in1k)           |\n| maxvit_base_tf_384.in1k            |   86.3 |         119.6 |   73.8 |   332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in1k)            |\n| maxvit_large_tf_384.in1k           |   86.2 |         212   |  132.6 |   445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in1k)           |\n| maxvit_small_tf_512.in1k           |   86.1 |          69.1 |   67.3 |   383.8 | [link](https://huggingface.co/timm/maxvit_small_tf_512.in1k)           |\n| maxvit_tiny_tf_512.in1k            |   85.7 |          31   |   33.5 |   257.6 | [link](https://huggingface.co/timm/maxvit_tiny_tf_512.in1k)            |\n| maxvit_small_tf_384.in1k           |   85.5 |          69   |   35.9 |   183.6 | [link](https://huggingface.co/timm/maxvit_small_tf_384.in1k)           |\n| maxvit_tiny_tf_384.in1k            |   85.1 |          31   |   17.5 |   123.4 | [link](https://huggingface.co/timm/maxvit_tiny_tf_384.in1k)            |\n| maxvit_large_tf_224.in1k           |   84.9 |         211.8 |   43.7 |   127.4 | [link](https://huggingface.co/timm/maxvit_large_tf_224.in1k)           |\n| maxvit_base_tf_224.in1k            |   84.9 |         119.5 |   24   |    95   | [link](https://huggingface.co/timm/maxvit_base_tf_224.in1k)            |\n| maxvit_small_tf_224.in1k           |   84.4 |          68.9 |   11.7 |    53.2 | [link](https://huggingface.co/timm/maxvit_small_tf_224.in1k)           |\n| maxvit_tiny_tf_224.in1k            |   83.4 |          30.9 |    5.6 |    35.8 | [link](https://huggingface.co/timm/maxvit_tiny_tf_224.in1k)            |\n\n### Oct 15, 2022\n* Train and validation script enhancements\n* Non-GPU (ie CPU) device support\n* SLURM compatibility for train script\n* HF datasets support (via ReaderHfds)\n* TFDS/WDS dataloading improvements (sample padding/wrap for distributed use fixed wrt sample count estimate)\n* in_chans !=3 support for scripts / loader\n* Adan optimizer\n* Can enable per-step LR scheduling via args\n* Dataset 'parsers' renamed to 'readers', more descriptive of purpose\n* AMP args changed, APEX via `--amp-impl apex`, bfloat16 supportedf via `--amp-dtype bfloat16`\n* main branch switched to 0.7.x version, 0.6x forked for stable release of weight only adds\n* master -> main branch rename\n\n### Oct 10, 2022\n* More weights in `maxxvit` series, incl first ConvNeXt block based `coatnext` and `maxxvit` experiments:\n  * `coatnext_nano_rw_224` - 82.0 @ 224 (G) -- (uses ConvNeXt conv block, no BatchNorm)\n  * `maxxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.7 @ 320  (G) (uses ConvNeXt conv block, no BN)\n  * `maxvit_rmlp_small_rw_224` - 84.5 @ 224, 85.1 @ 320 (G)\n  * `maxxvit_rmlp_small_rw_256` - 84.6 @ 256, 84.9 @ 288 (G) -- could be trained better, hparams need tuning (uses ConvNeXt block, no BN)\n  * `coatnet_rmlp_2_rw_224` - 84.6 @ 224, 85 @ 320  (T)\n  * NOTE: official MaxVit weights (in1k) have been released at https://github.com/google-research/maxvit -- some extra work is needed to port and adapt since my impl was created independently of theirs and has a few small differences + the whole TF same padding fun.\n  \n### Sept 23, 2022\n* LAION-2B CLIP image towers supported as pretrained backbones for fine-tune or features (no classifier)\n  * vit_base_patch32_224_clip_laion2b\n  * vit_large_patch14_224_clip_laion2b\n  * vit_huge_patch14_224_clip_laion2b\n  * vit_giant_patch14_224_clip_laion2b\n\n### Sept 7, 2022\n* Hugging Face [`timm` docs](https://huggingface.co/docs/hub/timm) home now exists, look for more here in the future\n* Add BEiT-v2 weights for base and large 224x224 models from https://github.com/microsoft/unilm/tree/master/beit2\n* Add more weights in `maxxvit` series incl a `pico` (7.5M params, 1.9 GMACs), two `tiny` variants:\n  * `maxvit_rmlp_pico_rw_256` - 80.5 @ 256, 81.3 @ 320  (T)\n  * `maxvit_tiny_rw_224` - 83.5 @ 224 (G)\n  * `maxvit_rmlp_tiny_rw_256` - 84.2 @ 256, 84.8 @ 320 (T)\n\n### Aug 29, 2022\n* MaxVit window size scales with img_size by default. Add new RelPosMlp MaxViT weight that leverages this:\n  * `maxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.6 @ 320  (T)\n\n### Aug 26, 2022\n* CoAtNet (https://arxiv.org/abs/2106.04803) and MaxVit (https://arxiv.org/abs/2204.01697) `timm` original models\n  * both found in [`maxxvit.py`](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/maxxvit.py) model def, contains numerous experiments outside scope of original papers\n  * an unfinished Tensorflow version from MaxVit authors can be found https://github.com/google-research/maxvit\n* Initial CoAtNet and MaxVit timm pretrained weights (working on more):\n  * `coatnet_nano_rw_224` - 81.7 @ 224  (T)\n  * `coatnet_rmlp_nano_rw_224` - 82.0 @ 224, 82.8 @ 320 (T)\n  * `coatnet_0_rw_224` - 82.4  (T)  -- NOTE timm '0' coatnets have 2 more 3rd stage blocks\n  * `coatnet_bn_0_rw_224` - 82.4  (T)\n  * `maxvit_nano_rw_256` - 82.9 @ 256  (T)\n  * `coatnet_rmlp_1_rw_224` - 83.4 @ 224, 84 @ 320  (T)\n  * `coatnet_1_rw_224` - 83.6 @ 224 (G) \n  * (T) = TPU trained with `bits_and_tpu` branch training code, (G) = GPU trained\n* GCVit (weights adapted from https://github.com/NVlabs/GCVit, code 100% `timm` re-write for license purposes)\n* MViT-V2 (multi-scale vit, adapted from https://github.com/facebookresearch/mvit)\n* EfficientFormer (adapted from https://github.com/snap-research/EfficientFormer)\n* PyramidVisionTransformer-V2 (adapted from https://github.com/whai362/PVT)\n* 'Fast Norm' support for LayerNorm and GroupNorm that avoids float32 upcast w/ AMP (uses APEX LN if available for further boost)\n\n\n### Aug 15, 2022\n* ConvNeXt atto weights added\n  * `convnext_atto` - 75.7 @ 224, 77.0 @ 288\n  * `convnext_atto_ols` - 75.9  @ 224, 77.2 @ 288\n\n### Aug 5, 2022\n* More custom ConvNeXt smaller model defs with weights \n  * `convnext_femto` - 77.5 @ 224, 78.7 @ 288\n  * `convnext_femto_ols` - 77.9  @ 224, 78.9 @ 288\n  * `convnext_pico` - 79.5 @ 224, 80.4 @ 288\n  * `convnext_pico_ols` - 79.5 @ 224, 80.5 @ 288\n  * `convnext_nano_ols` - 80.9 @ 224, 81.6 @ 288\n* Updated EdgeNeXt to improve ONNX export, add new base variant and weights from original (https://github.com/mmaaz60/EdgeNeXt)\n\n### July 28, 2022\n* Add freshly minted DeiT-III Medium (width=512, depth=12, num_heads=8) model weights. Thanks [Hugo Touvron](https://github.com/TouvronHugo)!\n\n### July 27, 2022\n* All runtime benchmark and validation result csv files are up-to-date!\n* A few more weights & model defs added:\n  * `darknetaa53` -  79.8 @ 256, 80.5 @ 288\n  * `convnext_nano` - 80.8 @ 224, 81.5 @ 288\n  * `cs3sedarknet_l` - 81.2 @ 256, 81.8 @ 288\n  * `cs3darknet_x` - 81.8 @ 256, 82.2 @ 288\n  * `cs3sedarknet_x` - 82.2 @ 256, 82.7 @ 288\n  * `cs3edgenet_x` - 82.2 @ 256, 82.7 @ 288\n  * `cs3se_edgenet_x` - 82.8 @ 256, 83.5 @ 320\n* `cs3*` weights above all trained on TPU w/ `bits_and_tpu` branch. Thanks to TRC program!\n* Add output_stride=8 and 16 support to ConvNeXt (dilation)\n* deit3 models not being able to resize pos_emb fixed\n* Version 0.6.7 PyPi release (/w above bug fixes and new weighs since 0.6.5)\n\n### July 8, 2022\nMore models, more fixes\n* Official research models (w/ weights) added:\n  * EdgeNeXt from (https://github.com/mmaaz60/EdgeNeXt)\n  * MobileViT-V2 from (https://github.com/apple/ml-cvnets)\n  * DeiT III (Revenge of the ViT) from (https://github.com/facebookresearch/deit)\n* My own models:\n  * Small `ResNet` defs added by request with 1 block repeats for both basic and bottleneck (resnet10 and resnet14)\n  * `CspNet` refactored with dataclass config, simplified CrossStage3 (`cs3`) option. These are closer to YOLO-v5+ backbone defs.\n  * More relative position vit fiddling. Two `srelpos` (shared relative position) models trained, and a medium w/ class token.\n  * Add an alternate downsample mode to EdgeNeXt and train a `small` model. Better than original small, but not their new USI trained weights.\n* My own model weight results (all ImageNet-1k training)\n  * `resnet10t` - 66.5 @ 176, 68.3 @ 224\n  * `resnet14t` - 71.3 @ 176, 72.3 @ 224\n  * `resnetaa50` - 80.6 @ 224 , 81.6 @ 288\n  * `darknet53` -  80.0 @ 256, 80.5 @ 288\n  * `cs3darknet_m` - 77.0 @ 256, 77.6 @ 288\n  * `cs3darknet_focus_m` - 76.7 @ 256, 77.3 @ 288\n  * `cs3darknet_l` - 80.4 @ 256, 80.9 @ 288\n  * `cs3darknet_focus_l` - 80.3 @ 256, 80.9 @ 288\n  * `vit_srelpos_small_patch16_224` - 81.1 @ 224, 82.1 @ 320\n  * `vit_srelpos_medium_patch16_224` - 82.3 @ 224, 83.1 @ 320\n  * `vit_relpos_small_patch16_cls_224` - 82.6 @ 224, 83.6 @ 320\n  * `edgnext_small_rw` - 79.6 @ 224, 80.4 @ 320\n* `cs3`, `darknet`, and `vit_*relpos` weights above all trained on TPU thanks to TRC program! Rest trained on overheating GPUs.\n* Hugging Face Hub support fixes verified, demo notebook TBA\n* Pretrained weights / configs can be loaded externally (ie from local disk) w/ support for head adaptation.\n* Add support to change image extensions scanned by `timm` datasets/parsers. See (https://github.com/rwightman/pytorch-image-models/pull/1274#issuecomment-1178303103)\n* Default ConvNeXt LayerNorm impl to use `F.layer_norm(x.permute(0, 2, 3, 1), ...).permute(0, 3, 1, 2)` via `LayerNorm2d` in all cases. \n  * a bit slower than previous custom impl on some hardware (ie Ampere w/ CL), but overall fewer regressions across wider HW / PyTorch version ranges. \n  * previous impl exists as `LayerNormExp2d` in `models/layers/norm.py`\n* Numerous bug fixes\n* Currently testing for imminent PyPi 0.6.x release\n* LeViT pretraining of larger models still a WIP, they don't train well / easily without distillation. Time to add distill support (finally)?\n* ImageNet-22k weight training + finetune ongoing, work on multi-weight support (slowly) chugging along (there are a LOT of weights, sigh) ...\n\n### May 13, 2022\n* Official Swin-V2 models and weights added from (https://github.com/microsoft/Swin-Transformer). Cleaned up to support torchscript.\n* Some refactoring for existing `timm` Swin-V2-CR impl, will likely do a bit more to bring parts closer to official and decide whether to merge some aspects.\n* More Vision Transformer relative position / residual post-norm experiments (all trained on TPU thanks to TRC program)\n  * `vit_relpos_small_patch16_224` - 81.5 @ 224, 82.5 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_relpos_medium_patch16_rpn_224` - 82.3 @ 224, 83.1 @ 320 -- rel pos + res-post-norm, no class token, avg pool\n  * `vit_relpos_medium_patch16_224` - 82.5 @ 224, 83.3 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_relpos_base_patch16_gapcls_224` - 82.8 @ 224, 83.9 @ 320 -- rel pos, layer scale, class token, avg pool (by mistake)\n* Bring 512 dim, 8-head 'medium' ViT model variant back to life (after using in a pre DeiT 'small' model for first ViT impl back in 2020)\n* Add ViT relative position support for switching btw existing impl and some additions in official Swin-V2 impl for future trials\n* Sequencer2D impl (https://arxiv.org/abs/2205.01972), added via PR from author (https://github.com/okojoalg)\n\n### May 2, 2022\n* Vision Transformer experiments adding Relative Position (Swin-V2 log-coord) (`vision_transformer_relpos.py`) and Residual Post-Norm branches (from Swin-V2) (`vision_transformer*.py`)\n  * `vit_relpos_base_patch32_plus_rpn_256` - 79.5 @ 256, 80.6 @ 320 -- rel pos + extended width + res-post-norm, no class token, avg pool\n  * `vit_relpos_base_patch16_224` - 82.5 @ 224, 83.6 @ 320 -- rel pos, layer scale, no class token, avg pool\n  * `vit_base_patch16_rpn_224` - 82.3 @ 224 -- rel pos + res-post-norm, no class token, avg pool\n* Vision Transformer refactor to remove representation layer that was only used in initial vit and rarely used since with newer pretrain (ie `How to Train Your ViT`)\n* `vit_*` models support removal of class token, use of global average pool, use of fc_norm (ala beit, mae).\n\n### April 22, 2022\n* `timm` models are now officially supported in [fast.ai](https://www.fast.ai/)! Just in time for the new Practical Deep Learning course. `timmdocs` documentation link updated to [timm.fast.ai](http://timm.fast.ai/).\n* Two more model weights added in the TPU trained [series](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights). Some In22k pretrain still in progress.\n  * `seresnext101d_32x8d` - 83.69 @ 224, 84.35 @ 288\n  * `seresnextaa101d_32x8d` (anti-aliased w/ AvgPool2d) - 83.85 @ 224, 84.57 @ 288\n\n### March 23, 2022\n* Add `ParallelBlock` and `LayerScale` option to base vit models to support model configs in [Three things everyone should know about ViT](https://arxiv.org/abs/2203.09795)\n* `convnext_tiny_hnf` (head norm first) weights trained with (close to) A2 recipe, 82.2% top-1, could do better with more epochs.\n\n### March 21, 2022\n* Merge `norm_norm_norm`. **IMPORTANT** this update for a coming 0.6.x release will likely de-stabilize the master branch for a while. Branch [`0.5.x`](https://github.com/rwightman/pytorch-image-models/tree/0.5.x) or a previous 0.5.x release can be used if stability is required.\n* Significant weights update (all TPU trained) as described in this [release](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights)\n  * `regnety_040` - 82.3 @ 224, 82.96 @ 288\n  * `regnety_064` - 83.0 @ 224, 83.65 @ 288\n  * `regnety_080` - 83.17 @ 224, 83.86 @ 288\n  * `regnetv_040` - 82.44 @ 224, 83.18 @ 288   (timm pre-act)\n  * `regnetv_064` - 83.1 @ 224, 83.71 @ 288   (timm pre-act)\n  * `regnetz_040` - 83.67 @ 256, 84.25 @ 320\n  * `regnetz_040h` - 83.77 @ 256, 84.5 @ 320 (w/ extra fc in head)\n  * `resnetv2_50d_gn` - 80.8 @ 224, 81.96 @ 288 (pre-act GroupNorm)\n  * `resnetv2_50d_evos` 80.77 @ 224, 82.04 @ 288 (pre-act EvoNormS)\n  * `regnetz_c16_evos`  - 81.9 @ 256, 82.64 @ 320 (EvoNormS)\n  * `regnetz_d8_evos`  - 83.42 @ 256, 84.04 @ 320 (EvoNormS)\n  * `xception41p` - 82 @ 299   (timm pre-act)\n  * `xception65` -  83.17 @ 299\n  * `xception65p` -  83.14 @ 299   (timm pre-act)\n  * `resnext101_64x4d` - 82.46 @ 224, 83.16 @ 288\n  * `seresnext101_32x8d` - 83.57 @ 224, 84.270 @ 288\n  * `resnetrs200` - 83.85 @ 256, 84.44 @ 320\n* HuggingFace hub support fixed w/ initial groundwork for allowing alternative 'config sources' for pretrained model definitions and weights (generic local file / remote url support soon)\n* SwinTransformer-V2 implementation added. Submitted by [Christoph Reich](https://github.com/ChristophReich1996). Training experiments and model changes by myself are ongoing so expect compat breaks.\n* Swin-S3 (AutoFormerV2) models / weights added from https://github.com/microsoft/Cream/tree/main/AutoFormerV2\n* MobileViT models w/ weights adapted from https://github.com/apple/ml-cvnets\n* PoolFormer models w/ weights adapted from https://github.com/sail-sg/poolformer\n* VOLO models w/ weights adapted from https://github.com/sail-sg/volo\n* Significant work experimenting with non-BatchNorm norm layers such as EvoNorm, FilterResponseNorm, GroupNorm, etc\n* Enhance support for alternate norm + act ('NormAct') layers added to a number of models, esp EfficientNet/MobileNetV3, RegNet, and aligned Xception\n* Grouped conv support added to EfficientNet family\n* Add 'group matching' API to all models to allow grouping model parameters for application of 'layer-wise' LR decay, lr scale added to LR scheduler\n* Gradient checkpointing support added to many models\n* `forward_head(x, pre_logits=False)` fn added to all models to allow separate calls of `forward_features` + `forward_head`\n* All vision transformer and vision MLP models update to return non-pooled / non-token selected features from `forward_features`, for consistency with CNN models, token selection or pooling now applied in `forward_head`\n\n### Feb 2, 2022\n* [Chris Hughes](https://github.com/Chris-hughes10) posted an exhaustive run through of `timm` on his blog yesterday. Well worth a read. [Getting Started with PyTorch Image Models (timm): A Practitioner’s Guide](https://towardsdatascience.com/getting-started-with-pytorch-image-models-timm-a-practitioners-guide-4e77b4bf9055)\n* I'm currently prepping to merge the `norm_norm_norm` branch back to master (ver 0.6.x) in next week or so.\n  * The changes are more extensive than usual and may destabilize and break some model API use (aiming for full backwards compat). So, beware `pip install git+https://github.com/rwightman/pytorch-image-models` installs!\n  * `0.5.x` releases and a `0.5.x` branch will remain stable with a cherry pick or two until dust clears. Recommend sticking to pypi install for a bit if you want stable.\n\n### Jan 14, 2022\n* Version 0.5.4 w/ release to be pushed to pypi. It's been a while since last pypi update and riskier changes will be merged to main branch soon....\n* Add ConvNeXT models /w weights from official impl (https://github.com/facebookresearch/ConvNeXt), a few perf tweaks, compatible with timm features\n* Tried training a few small (~1.8-3M param) / mobile optimized models, a few are good so far, more on the way...\n  * `mnasnet_small` - 65.6 top-1\n  * `mobilenetv2_050` - 65.9\n  * `lcnet_100/075/050` - 72.1 / 68.8 / 63.1\n  * `semnasnet_075` - 73\n  * `fbnetv3_b/d/g` - 79.1 / 79.7 / 82.0\n* TinyNet models added by [rsomani95](https://github.com/rsomani95)\n* LCNet added via MobileNetV3 architecture\n\n"
  },
  {
    "path": "hfdocs/source/feature_extraction.mdx",
    "content": "# Feature Extraction\n\nAll of the models in `timm` have consistent mechanisms for obtaining various types of features from the model for tasks besides classification.\n\n## Penultimate Layer Features (Pre-Classifier Features)\n\nThe features from the penultimate model layer can be obtained in several ways without requiring model surgery (although feel free to do surgery). One must first decide if they want pooled or un-pooled features.\n\n### Unpooled\n\nThere are three ways to obtain unpooled features. The final, unpooled features are sometimes referred to as the last hidden state. In `timm` this is up to and including the final normalization layer (in e.g. ViT style models) but does not include pooling / class token selection and final post-pooling layers.\n\nWithout modifying the network, one can call `model.forward_features(input)` on any model instead of the usual `model(input)`. This will bypass the head classifier and global pooling for networks.\n\nIf one wants to explicitly modify the network to return unpooled features, they can either create the model without a classifier and pooling, or remove it later. Both paths remove the parameters associated with the classifier from the network.\n\n#### forward_features()\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('xception41', pretrained=True)\n>>> o = m(torch.randn(2, 3, 299, 299))\n>>> print(f'Original shape: {o.shape}')\n>>> o = m.forward_features(torch.randn(2, 3, 299, 299))\n>>> print(f'Unpooled shape: {o.shape}')\n```\n\nOutput:\n\n```text\nOriginal shape: torch.Size([2, 1000])\nUnpooled shape: torch.Size([2, 2048, 10, 10])\n```\n\n#### Create with no classifier and pooling\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('resnet50', pretrained=True, num_classes=0, global_pool='')\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Unpooled shape: {o.shape}')\n```\n\nOutput:\n\n```text\nUnpooled shape: torch.Size([2, 2048, 7, 7])\n```\n\n#### Remove it later\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('densenet121', pretrained=True)\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Original shape: {o.shape}')\n>>> m.reset_classifier(0, '')\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Unpooled shape: {o.shape}')\n```\n\nOutput:\n\n```text\nOriginal shape: torch.Size([2, 1000])\nUnpooled shape: torch.Size([2, 1024, 7, 7])\n```\n\n#### Chaining unpooled output to classifier\n\nThe last hidden state can be fed back into the head of the model using the `forward_head()` function.\n\n```py\n>>> model = timm.create_model('vit_medium_patch16_reg1_gap_256', pretrained=True)\n>>> output = model.forward_features(torch.randn(2,3,256,256))\n>>> print('Unpooled output shape:', output.shape)\n>>> classified = model.forward_head(output)\n>>> print('Classification output shape:', classified.shape)\n```\n\nOutput:\n\n```text\nUnpooled output shape: torch.Size([2, 257, 512])\nClassification output shape: torch.Size([2, 1000])\n```\n\n### Pooled\n\nTo modify the network to return pooled features, one can use `forward_features()` and pool/flatten the result themselves, or modify the network like above but keep pooling intact. \n\n#### Create with no classifier\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('resnet50', pretrained=True, num_classes=0)\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Pooled shape: {o.shape}')\n```\n\nOutput:\n\n```text\nPooled shape: torch.Size([2, 2048])\n```\n\n#### Remove it later\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('ese_vovnet19b_dw', pretrained=True)\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Original shape: {o.shape}')\n>>> m.reset_classifier(0)\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> print(f'Pooled shape: {o.shape}')\n```\n\nOutput:\n\n```text\nOriginal shape: torch.Size([2, 1000])\nPooled shape: torch.Size([2, 1024])\n```\n\n\n## Multi-scale Feature Maps (Feature Pyramid)\n\nObject detection, segmentation, keypoint, and a variety of dense pixel tasks require access to feature maps from the backbone network at multiple scales. This is often done by modifying the original classification network. Since each network varies quite a bit in structure, it's not uncommon to see only a few backbones supported in any given obj detection or segmentation library.\n\n`timm` allows a consistent interface for creating any of the included models as feature backbones that output feature maps for selected levels. \n\nA feature backbone can be created by adding the argument `features_only=True` to any `create_model` call. By default most models with a feature hierarchy will output up to 5 features up to a reduction of 32. However this varies per model, some models have fewer hierarchy levels, and some (like ViT) have a larger number of non-hierarchical feature maps and they default to outputting the last 3. The `out_indices` arg can be passed to `create_model` to specify which features you want.\n\n### Create a feature map extraction model\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('resnest26d', features_only=True, pretrained=True)\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> for x in o:\n...     print(x.shape)\n```\n\nOutput:\n\n```text\ntorch.Size([2, 64, 112, 112])\ntorch.Size([2, 256, 56, 56])\ntorch.Size([2, 512, 28, 28])\ntorch.Size([2, 1024, 14, 14])\ntorch.Size([2, 2048, 7, 7])\n```\n\n### Query the feature information\n\nAfter a feature backbone has been created, it can be queried to provide channel or resolution reduction information to the downstream heads without requiring static config or hardcoded constants. The `.feature_info` attribute is a class encapsulating the information about the feature extraction points.\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('regnety_032', features_only=True, pretrained=True)\n>>> print(f'Feature channels: {m.feature_info.channels()}')\n>>> o = m(torch.randn(2, 3, 224, 224))\n>>> for x in o:\n...     print(x.shape)\n```\n\nOutput:\n\n```text\nFeature channels: [32, 72, 216, 576, 1512]\ntorch.Size([2, 32, 112, 112])\ntorch.Size([2, 72, 56, 56])\ntorch.Size([2, 216, 28, 28])\ntorch.Size([2, 576, 14, 14])\ntorch.Size([2, 1512, 7, 7])\n```\n\n### Select specific feature levels or limit the stride\n\nThere are two additional creation arguments impacting the output features. \n\n* `out_indices` selects which indices to output\n* `output_stride` limits the feature output stride of the network (also works in classification mode BTW)\n\n#### Output index selection\n\nThe `out_indices` argument is supported by all models, but not all models have the same index to feature stride mapping. Look at the code or check feature_info to compare. The out indices generally correspond to the `C(i+1)th` feature level (a `2^(i+1)` reduction). For most convnet models, index 0 is the stride 2 features, and index 4 is stride 32. For many ViT or ViT-Conv hybrids there may be many to all features maps of the same shape, or a combination of hierarchical and non-hierarchical feature maps. It is best to look at the `feature_info` attribute to see the number of features, their corresponding channel count and reduction level.\n\n`out_indices` supports negative indexing, this makes it easy to get the last, penultimate, etc feature map. `out_indices=(-2,)` would return the penultimate feature map for any model.\n\n#### Output stride (feature map dilation)\n\n`output_stride` is achieved by converting layers to use dilated convolutions. Doing so is not always straightforward, some networks only support `output_stride=32`.\n\n```py\n>>> import torch\n>>> import timm\n>>> m = timm.create_model('ecaresnet101d', features_only=True, output_stride=8, out_indices=(2, 4), pretrained=True)\n>>> print(f'Feature channels: {m.feature_info.channels()}')\n>>> print(f'Feature reduction: {m.feature_info.reduction()}')\n>>> o = m(torch.randn(2, 3, 320, 320))\n>>> for x in o:\n...     print(x.shape)\n```\n\nOutput:\n\n```text\nFeature channels: [512, 2048]\nFeature reduction: [8, 8]\ntorch.Size([2, 512, 40, 40])\ntorch.Size([2, 2048, 40, 40])\n```\n\n## Flexible intermediate feature map extraction\n\nIn addition to using `features_only` with the model factory, many models support a `forward_intermediates()` method which provides a flexible mechanism for extracting both the intermediate feature maps and the last hidden state (which can be chained to the head). Additionally this method supports some model specific features such as returning class or distill prefix tokens for some models.\n\nAccompanying the `forward_intermediates` function is a `prune_intermediate_layers` function that allows one to prune layers from the model, including both the head, final norm, and/or trailing blocks/stages that are not needed. \n\nAn `indices` argument is used for both `forward_intermediates()` and `prune_intermediate_layers()` to select the features to return or layers to remove. As with the `out_indices` for `features_only` API, `indices` is model specific and selects which intermediates are returned.\n\nIn non-hierarchical block based models such as ViT the indices correspond to the blocks, in models with hierarchical stages they usually correspond to the output of the stem + each hierarchical stage. Both positive (from the start), and negative (relative to the end) indexing works, and `None` is used to return all intermediates.\n\nThe `prune_intermediate_layers()` call returns an indices variable, as negative indices must be converted to absolute (positive) indices when the model is trimmed.\n\n```py\nmodel = timm.create_model('vit_medium_patch16_reg1_gap_256', pretrained=True)\noutput, intermediates = model.forward_intermediates(torch.randn(2,3,256,256))\nfor i, o in enumerate(intermediates):\n    print(f'Feat index: {i}, shape: {o.shape}')\n```\n\n```text\nFeat index: 0, shape: torch.Size([2, 512, 16, 16])\nFeat index: 1, shape: torch.Size([2, 512, 16, 16])\nFeat index: 2, shape: torch.Size([2, 512, 16, 16])\nFeat index: 3, shape: torch.Size([2, 512, 16, 16])\nFeat index: 4, shape: torch.Size([2, 512, 16, 16])\nFeat index: 5, shape: torch.Size([2, 512, 16, 16])\nFeat index: 6, shape: torch.Size([2, 512, 16, 16])\nFeat index: 7, shape: torch.Size([2, 512, 16, 16])\nFeat index: 8, shape: torch.Size([2, 512, 16, 16])\nFeat index: 9, shape: torch.Size([2, 512, 16, 16])\nFeat index: 10, shape: torch.Size([2, 512, 16, 16])\nFeat index: 11, shape: torch.Size([2, 512, 16, 16])\n```\n\n```py\nmodel = timm.create_model('vit_medium_patch16_reg1_gap_256', pretrained=True)\nprint('Original params:', sum([p.numel() for p in model.parameters()]))\n\nindices = model.prune_intermediate_layers(indices=(-2,), prune_head=True, prune_norm=True)  # prune head, norm, last block\nprint('Pruned params:', sum([p.numel() for p in model.parameters()]))\n\nintermediates = model.forward_intermediates(torch.randn(2,3,256,256), indices=indices, intermediates_only=True)  # return penultimate intermediate\nfor o in intermediates:    \n    print(f'Feat shape: {o.shape}')\n```\n\n```text\nOriginal params: 38880232\nPruned params: 35212800\nFeat shape: torch.Size([2, 512, 16, 16])\n```\n"
  },
  {
    "path": "hfdocs/source/hf_hub.mdx",
    "content": "# Sharing and Loading Models From the Hugging Face Hub\n\nThe `timm` library has a built-in integration with the Hugging Face Hub, making it easy to share and load models from the 🤗 Hub.\n\nIn this short guide, we'll see how to:\n  1. Share a `timm` model on the Hub\n  2. How to load that model back from the Hub\n\n## Authenticating\n\nFirst, you'll need to make sure you have the `huggingface_hub` package installed.\n\n```bash\npip install huggingface_hub\n```\n\nThen, you'll need to authenticate yourself. You can do this by running the following command:\n\n```bash\nhuggingface-cli login\n```\n\nOr, if you're using a notebook, you can use the `notebook_login` helper:\n\n```py\n>>> from huggingface_hub import notebook_login\n>>> notebook_login()\n```\n\n## Sharing a Model\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnet18', pretrained=True, num_classes=4)\n```\n\nHere is where you would normally train or fine-tune the model. We'll skip that for the sake of this tutorial.\n\nLet's pretend we've now fine-tuned the model. The next step would be to push it to the Hub! We can do this with the `timm.models.hub.push_to_hf_hub` function.\n\n```py\n>>> model_cfg = dict(label_names=['a', 'b', 'c', 'd'])\n>>> timm.models.push_to_hf_hub(model, 'resnet18-random', model_config=model_cfg)\n```\n\nRunning the above would push the model to `<your-username>/resnet18-random` on the Hub. You can now share this model with your friends, or use it in your own code!\n\n## Loading a Model\n\nLoading a model from the Hub is as simple as calling `timm.create_model` with the `pretrained` argument set to the name of the model you want to load. In this case, we'll use [`nateraw/resnet18-random`](https://huggingface.co/nateraw/resnet18-random), which is the model we just pushed to the Hub.\n\n```py\n>>> model_reloaded = timm.create_model('hf_hub:nateraw/resnet18-random', pretrained=True)\n```\n"
  },
  {
    "path": "hfdocs/source/hparams.mdx",
    "content": "# HParams\nOver the years, many `timm` models have been trained with various hyper-parameters as the libraries and models evolved. I don't have a record of every instance, but have recorded instances of many that can serve as a very good starting point.\n\n## Tags\nMost `timm` trained models have an identifier in their pretrained tag that relates them (roughly) to a family / version of hparams I've used over the years.\n\n| Tag(s) | Description | Optimizer | LR Schedule | Other Notes |\n|--------|-------------|-----------|-------------|-------------|\n| `a1h` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `A1` recipe | LAMB | Cosine with warmup | Stronger dropout, stochastic depth, and RandAugment than paper `A1` recipe |\n| `ah` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `A1` recipe | LAMB | Cosine with warmup | No CutMix. Stronger dropout, stochastic depth, and RandAugment than paper `A1` recipe |\n| `a1`, `a2`, `a3` | ResNet Strikes Back `A{1,2,3}` recipe | LAMB with BCE loss | Cosine with warmup | — |\n| `b1`, `b2`, `b1k`, `b2k` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `B` recipe (equivalent to `timm` `RA2` recipes) | RMSProp (TF 1.0 behaviour) | Step (exponential decay w/ staircase) with warmup | — |\n| `c`, `c1`, `c2`, `c3` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `C` recipes | SGD (Nesterov) with AGC | Cosine with warmup | — |\n| `ch` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `C` recipes | SGD (Nesterov) with AGC | Cosine with warmup | Stronger dropout, stochastic depth, and RandAugment than paper `C1`/`C2` recipes |\n| `d`, `d1`, `d2` | Based on [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `D` recipe | AdamW with BCE loss | Cosine with warmup | — |\n| `sw` | Based on Swin Transformer train/pretrain recipe (basis of DeiT and ConvNeXt recipes) | AdamW with gradient clipping, EMA | Cosine with warmup | — |\n| `ra`, `ra2`, `ra3`, `racm`, `raa` | RandAugment recipes. Inspired by EfficientNet RandAugment recipes. Covered by `B` recipe in [ResNet Strikes Back](https://arxiv.org/abs/2110.00476). | RMSProp (TF 1.0 behaviour), EMA | Step (exponential decay w/ staircase) with warmup | — |\n| `ra4` | RandAugment v4. Inspired by MobileNetV4 hparams. | - |\n| `am` | AugMix recipe | SGD (Nesterov) with JSD loss | Cosine with warmup | — |\n| `ram` | AugMix (with RandAugment) recipe | SGD (Nesterov) with JSD loss | Cosine with warmup | — |\n| `bt` | Bag-of-Tricks recipe | SGD (Nesterov) | Cosine with warmup | — |\n\n## Config File Gists\nI've collected several of the hparam families in a series of gists. These can be downloaded and used with the `--config hparam.yaml` argument with the `timm` train script. Some adjustment is always required for the LR vs effective global batch size.\n\n| Tag | Key Model Architectures | Gist Link |\n|-----|------------------------|-----------|\n| `ra2` | ResNet, EfficientNet, RegNet, NFNet | [Link](https://gist.github.com/rwightman/07839a82d0f50e42840168bc43df70b3) |\n| `ra3` | RegNet | [Link](https://gist.github.com/rwightman/37252f8d7d850a94e43f1fcb7b3b8322) |\n| `ra4` | MobileNetV4 | [Link](https://gist.github.com/rwightman/f6705cb65c03daeebca8aa129b1b94ad) |\n| `sw` | ViT, ConvNeXt, CoAtNet, MaxViT | [Link](https://gist.github.com/rwightman/943c0fe59293b44024bbd2d5d23e6303) |\n| `sbb` | ViT | [Link](https://gist.github.com/rwightman/fb37c339efd2334177ff99a8083ebbc4) |\n| — | Tiny Test Models | [Link](https://gist.github.com/rwightman/9ba8efc39a546426e99055720d2f705f) |\n"
  },
  {
    "path": "hfdocs/source/index.mdx",
    "content": "# timm\n\n<img class=\"float-left !m-0 !border-0 !dark:border-0 !shadow-none !max-w-lg w-[150px]\" src=\"https://huggingface.co/front/thumbnails/docs/timm.png\"/>\n\n`timm` is a library containing SOTA computer vision models, layers, utilities, optimizers, schedulers, data-loaders, augmentations, and training/evaluation scripts.\n\nIt comes packaged with >700 pretrained models, and is designed to be flexible and easy to use.\n\nRead the [quick start guide](quickstart) to get up and running with the `timm` library. You will learn how to load, discover, and use pretrained models included in the library.\n\n<div class=\"mt-10\">\n  <div class=\"w-full flex flex-col space-y-4 md:space-y-0 md:grid md:grid-cols-2 md:gap-y-4 md:gap-x-5\">\n    <a class=\"!no-underline border dark:border-gray-700 p-5 rounded-lg shadow hover:shadow-lg\" href=\"./feature_extraction\"\n      ><div class=\"w-full text-center bg-gradient-to-br from-blue-400 to-blue-500 rounded-lg py-1.5 font-semibold mb-5 text-white text-lg leading-relaxed\">Tutorials</div>\n      <p class=\"text-gray-700\">Learn the basics and become familiar with timm. Start here if you are using timm for the first time!</p>\n    </a>\n    <a class=\"!no-underline border dark:border-gray-700 p-5 rounded-lg shadow hover:shadow-lg\" href=\"./reference/models\"\n      ><div class=\"w-full text-center bg-gradient-to-br from-purple-400 to-purple-500 rounded-lg py-1.5 font-semibold mb-5 text-white text-lg leading-relaxed\">Reference</div>\n      <p class=\"text-gray-700\">Technical descriptions of how timm classes and methods work.</p>\n    </a>\n  </div>\n</div>\n"
  },
  {
    "path": "hfdocs/source/installation.mdx",
    "content": "# Installation\n\nBefore you start, you'll need to setup your environment and install the appropriate packages. `timm` is tested on **Python 3+**.\n\n## Virtual Environment\n\nYou should install `timm` in a [virtual environment](https://docs.python.org/3/library/venv.html) to keep things tidy and avoid dependency conflicts.\n\n1. Create and navigate to your project directory:\n\n   ```bash\n   mkdir ~/my-project\n   cd ~/my-project\n   ```\n\n2. Start a virtual environment inside your directory:\n\n   ```bash\n   python -m venv .env\n   ```\n\n3. Activate and deactivate the virtual environment with the following commands:\n\n   ```bash\n   # Activate the virtual environment\n   source .env/bin/activate\n   \n   # Deactivate the virtual environment\n   source .env/bin/deactivate\n   ```\n\nOnce you've created your virtual environment, you can install `timm` in it.\n\n## Using pip\n\nThe most straightforward way to install `timm` is with pip:\n\n```bash\npip install timm\n```\n\nAlternatively, you can install `timm` from GitHub directly to get the latest, bleeding-edge version:\n\n```bash\npip install git+https://github.com/rwightman/pytorch-image-models.git\n```\n\nRun the following command to check if `timm` has been properly installed:\n\n```bash\npython -c \"from timm import list_models; print(list_models(pretrained=True)[:5])\"\n```\n\nThis command lists the first five pretrained models available in `timm` (which are sorted alphebetically). You should see the following output:\n\n```python\n['adv_inception_v3', 'bat_resnext26ts', 'beit_base_patch16_224', 'beit_base_patch16_224_in22k', 'beit_base_patch16_384']\n```\n\n## From Source\n\nBuilding `timm` from source lets you make changes to the code base. To install from the source, clone the repository and install with the following commands:\n\n```bash\ngit clone https://github.com/rwightman/pytorch-image-models.git\ncd pytorch-image-models\npip install -e .\n```\n\nAgain, you can check if `timm` was properly installed with the following command:\n\n```bash\npython -c \"from timm import list_models; print(list_models(pretrained=True)[:5])\"\n```\n"
  },
  {
    "path": "hfdocs/source/models/adversarial-inception-v3.mdx",
    "content": "# Adversarial Inception v3\n\n**Inception v3** is a convolutional neural network architecture from the Inception family that makes several improvements including using [Label Smoothing](https://paperswithcode.com/method/label-smoothing), Factorized 7 x 7 convolutions, and the use of an [auxiliary classifier](https://paperswithcode.com/method/auxiliary-classifier) to propagate label information lower down the network (along with the use of batch normalization for layers in the sidehead). The key building block is an [Inception Module](https://paperswithcode.com/method/inception-v3-module).\n\nThis particular model was trained for study of adversarial examples (adversarial training).\n\nThe weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('adv_inception_v3', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `adv_inception_v3`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('adv_inception_v3', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1804-00097,\n  author    = {Alexey Kurakin and\n               Ian J. Goodfellow and\n               Samy Bengio and\n               Yinpeng Dong and\n               Fangzhou Liao and\n               Ming Liang and\n               Tianyu Pang and\n               Jun Zhu and\n               Xiaolin Hu and\n               Cihang Xie and\n               Jianyu Wang and\n               Zhishuai Zhang and\n               Zhou Ren and\n               Alan L. Yuille and\n               Sangxia Huang and\n               Yao Zhao and\n               Yuzhe Zhao and\n               Zhonglin Han and\n               Junjiajia Long and\n               Yerkebulan Berdibekov and\n               Takuya Akiba and\n               Seiya Tokui and\n               Motoki Abe},\n  title     = {Adversarial Attacks and Defences Competition},\n  journal   = {CoRR},\n  volume    = {abs/1804.00097},\n  year      = {2018},\n  url       = {http://arxiv.org/abs/1804.00097},\n  archivePrefix = {arXiv},\n  eprint    = {1804.00097},\n  timestamp = {Thu, 31 Oct 2019 16:31:22 +0100},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1804-00097.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Adversarial Inception v3\n  Paper:\n    Title: Adversarial Attacks and Defences Competition\n    URL: https://paperswithcode.com/paper/adversarial-attacks-and-defences-competition\nModels:\n- Name: adv_inception_v3\n  In Collection: Adversarial Inception v3\n  Metadata:\n    FLOPs: 7352418880\n    Parameters: 23830000\n    File Size: 95549439\n    Architecture:\n    - 1x1 Convolution\n    - Auxiliary Classifier\n    - Average Pooling\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inception-v3 Module\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: adv_inception_v3\n    Crop Pct: '0.875'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v3.py#L456\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/adv_inception_v3-9e27bd63.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.58%\n      Top 5 Accuracy: 93.74%\n-->"
  },
  {
    "path": "hfdocs/source/models/advprop.mdx",
    "content": "# AdvProp (EfficientNet)\n\n**AdvProp** is an adversarial training scheme which treats adversarial examples as additional examples, to prevent overfitting. Key to the method is the usage of a separate auxiliary batch norm for adversarial examples, as they have different underlying distributions to normal examples.\n\nThe weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_efficientnet_b0_ap', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_efficientnet_b0_ap`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_efficientnet_b0_ap', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{xie2020adversarial,\n      title={Adversarial Examples Improve Image Recognition}, \n      author={Cihang Xie and Mingxing Tan and Boqing Gong and Jiang Wang and Alan Yuille and Quoc V. Le},\n      year={2020},\n      eprint={1911.09665},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: AdvProp\n  Paper:\n    Title: Adversarial Examples Improve Image Recognition\n    URL: https://paperswithcode.com/paper/adversarial-examples-improve-image\nModels:\n- Name: tf_efficientnet_b0_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 488688572\n    Parameters: 5290000\n    File Size: 21385973\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b0_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1334\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ap-f262efe1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.1%\n      Top 5 Accuracy: 93.26%\n- Name: tf_efficientnet_b1_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 883633200\n    Parameters: 7790000\n    File Size: 31515350\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b1_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.882'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '240'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1344\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ap-44ef0a3d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.28%\n      Top 5 Accuracy: 94.3%\n- Name: tf_efficientnet_b2_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 1234321170\n    Parameters: 9110000\n    File Size: 36800745\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b2_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.89'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '260'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1354\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ap-2f8e7636.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.3%\n      Top 5 Accuracy: 95.03%\n- Name: tf_efficientnet_b3_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 2275247568\n    Parameters: 12230000\n    File Size: 49384538\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b3_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.904'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '300'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1364\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ap-aad25bdd.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.82%\n      Top 5 Accuracy: 95.62%\n- Name: tf_efficientnet_b4_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 5749638672\n    Parameters: 19340000\n    File Size: 77993585\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b4_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.922'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '380'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1374\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ap-dedb23e6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.26%\n      Top 5 Accuracy: 96.39%\n- Name: tf_efficientnet_b5_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 13176501888\n    Parameters: 30390000\n    File Size: 122403150\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b5_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.934'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '456'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1384\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ap-9e82fae8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.25%\n      Top 5 Accuracy: 96.97%\n- Name: tf_efficientnet_b6_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 24180518488\n    Parameters: 43040000\n    File Size: 173237466\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b6_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.942'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '528'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1394\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ap-4ffb161f.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.79%\n      Top 5 Accuracy: 97.14%\n- Name: tf_efficientnet_b7_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 48205304880\n    Parameters: 66349999\n    File Size: 266850607\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b7_ap\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.949'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '600'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1405\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ap-ddb28fec.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.12%\n      Top 5 Accuracy: 97.25%\n- Name: tf_efficientnet_b8_ap\n  In Collection: AdvProp\n  Metadata:\n    FLOPs: 80962956270\n    Parameters: 87410000\n    File Size: 351412563\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AdvProp\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b8_ap\n    LR: 0.128\n    Epochs: 350\n    Crop Pct: '0.954'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '672'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1416\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ap-00e169fa.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.37%\n      Top 5 Accuracy: 97.3%\n-->"
  },
  {
    "path": "hfdocs/source/models/big-transfer.mdx",
    "content": "# Big Transfer (BiT)\n\n**Big Transfer (BiT)** is a type of pretraining recipe that pre-trains  on a large supervised source dataset, and fine-tunes the weights on the target task. Models are trained on the JFT-300M dataset. The finetuned models contained in this collection are finetuned on ImageNet.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnetv2_101x1_bitm', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `resnetv2_101x1_bitm`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('resnetv2_101x1_bitm', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{kolesnikov2020big,\n      title={Big Transfer (BiT): General Visual Representation Learning}, \n      author={Alexander Kolesnikov and Lucas Beyer and Xiaohua Zhai and Joan Puigcerver and Jessica Yung and Sylvain Gelly and Neil Houlsby},\n      year={2020},\n      eprint={1912.11370},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Big Transfer\n  Paper:\n    Title: 'Big Transfer (BiT): General Visual Representation Learning'\n    URL: https://paperswithcode.com/paper/large-scale-learning-of-general-visual\nModels:\n- Name: resnetv2_101x1_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 5330896\n    Parameters: 44540000\n    File Size: 178256468\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPUv3-512\n    ID: resnetv2_101x1_bitm\n    LR: 0.03\n    Epochs: 90\n    Layers: 101\n    Crop Pct: '1.0'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '480'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L444\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R101x1-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.21%\n      Top 5 Accuracy: 96.47%\n- Name: resnetv2_101x3_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 15988688\n    Parameters: 387930000\n    File Size: 1551830100\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPUv3-512\n    ID: resnetv2_101x3_bitm\n    LR: 0.03\n    Epochs: 90\n    Layers: 101\n    Crop Pct: '1.0'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '480'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L451\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R101x3-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.38%\n      Top 5 Accuracy: 97.37%\n- Name: resnetv2_152x2_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 10659792\n    Parameters: 236340000\n    File Size: 945476668\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    ID: resnetv2_152x2_bitm\n    Crop Pct: '1.0'\n    Image Size: '480'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L458\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R152x2-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.4%\n      Top 5 Accuracy: 97.43%\n- Name: resnetv2_152x4_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 21317584\n    Parameters: 936530000\n    File Size: 3746270104\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPUv3-512\n    ID: resnetv2_152x4_bitm\n    Crop Pct: '1.0'\n    Image Size: '480'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L465\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R152x4-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.95%\n      Top 5 Accuracy: 97.45%\n- Name: resnetv2_50x1_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 5330896\n    Parameters: 25550000\n    File Size: 102242668\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPUv3-512\n    ID: resnetv2_50x1_bitm\n    LR: 0.03\n    Epochs: 90\n    Layers: 50\n    Crop Pct: '1.0'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '480'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L430\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R50x1-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.19%\n      Top 5 Accuracy: 95.63%\n- Name: resnetv2_50x3_bitm\n  In Collection: Big Transfer\n  Metadata:\n    FLOPs: 15988688\n    Parameters: 217320000\n    File Size: 869321580\n    Architecture:\n    - 1x1 Convolution\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Group Normalization\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Weight Standardization\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPUv3-512\n    ID: resnetv2_50x3_bitm\n    LR: 0.03\n    Epochs: 90\n    Layers: 50\n    Crop Pct: '1.0'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '480'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnetv2.py#L437\n  Weights: https://storage.googleapis.com/bit_models/BiT-M-R50x3-ILSVRC2012.npz\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.75%\n      Top 5 Accuracy: 97.12%\n-->"
  },
  {
    "path": "hfdocs/source/models/csp-darknet.mdx",
    "content": "# CSP-DarkNet\n\n**CSPDarknet53** is a convolutional neural network and backbone for object detection that uses [DarkNet-53](https://paperswithcode.com/method/darknet-53). It employs a CSPNet strategy to partition the feature map of the base layer into two parts and then merges them through a cross-stage hierarchy. The use of a split and merge strategy allows for more gradient flow through the network. \n\nThis CNN is used as the backbone for [YOLOv4](https://paperswithcode.com/method/yolov4).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('cspdarknet53', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `cspdarknet53`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('cspdarknet53', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{bochkovskiy2020yolov4,\n      title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, \n      author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao},\n      year={2020},\n      eprint={2004.10934},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: CSP DarkNet\n  Paper:\n    Title: 'YOLOv4: Optimal Speed and Accuracy of Object Detection'\n    URL: https://paperswithcode.com/paper/yolov4-optimal-speed-and-accuracy-of-object\nModels:\n- Name: cspdarknet53\n  In Collection: CSP DarkNet\n  Metadata:\n    FLOPs: 8545018880\n    Parameters: 27640000\n    File Size: 110775135\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Mish\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - CutMix\n    - Label Smoothing\n    - Mosaic\n    - Polynomial Learning Rate Decay\n    - SGD with Momentum\n    - Self-Adversarial Training\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 1x NVIDIA RTX 2070 GPU\n    ID: cspdarknet53\n    LR: 0.1\n    Layers: 53\n    Crop Pct: '0.887'\n    Momentum: 0.9\n    Batch Size: 128\n    Image Size: '256'\n    Warmup Steps: 1000\n    Weight Decay: 0.0005\n    Interpolation: bilinear\n    Training Steps: 8000000\n    FPS (GPU RTX 2070): 66\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/cspnet.py#L441\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspdarknet53_ra_256-d05c7c21.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.05%\n      Top 5 Accuracy: 95.09%\n-->"
  },
  {
    "path": "hfdocs/source/models/csp-resnet.mdx",
    "content": "# CSP-ResNet\n\n**CSPResNet** is a convolutional neural network where we apply the Cross Stage Partial Network (CSPNet) approach to [ResNet](https://paperswithcode.com/method/resnet). The CSPNet partitions the feature map of the base layer into two parts and then merges them through a cross-stage hierarchy. The use of a split and merge strategy allows for more gradient flow through the network.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('cspresnet50', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `cspresnet50`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('cspresnet50', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{wang2019cspnet,\n      title={CSPNet: A New Backbone that can Enhance Learning Capability of CNN}, \n      author={Chien-Yao Wang and Hong-Yuan Mark Liao and I-Hau Yeh and Yueh-Hua Wu and Ping-Yang Chen and Jun-Wei Hsieh},\n      year={2019},\n      eprint={1911.11929},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: CSP ResNet\n  Paper:\n    Title: 'CSPNet: A New Backbone that can Enhance Learning Capability of CNN'\n    URL: https://paperswithcode.com/paper/cspnet-a-new-backbone-that-can-enhance\nModels:\n- Name: cspresnet50\n  In Collection: CSP ResNet\n  Metadata:\n    FLOPs: 5924992000\n    Parameters: 21620000\n    File Size: 86679303\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Polynomial Learning Rate Decay\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: cspresnet50\n    LR: 0.1\n    Layers: 50\n    Crop Pct: '0.887'\n    Momentum: 0.9\n    Batch Size: 128\n    Image Size: '256'\n    Weight Decay: 0.005\n    Interpolation: bilinear\n    Training Steps: 8000000\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/cspnet.py#L415\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspresnet50_ra-d3e8d487.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.57%\n      Top 5 Accuracy: 94.71%\n-->"
  },
  {
    "path": "hfdocs/source/models/csp-resnext.mdx",
    "content": "# CSP-ResNeXt\n\n**CSPResNeXt** is a convolutional neural network where we apply the Cross Stage Partial Network (CSPNet) approach to [ResNeXt](https://paperswithcode.com/method/resnext). The CSPNet partitions the feature map of the base layer into two parts and then merges them through a cross-stage hierarchy. The use of a split and merge strategy allows for more gradient flow through the network.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('cspresnext50', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `cspresnext50`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('cspresnext50', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{wang2019cspnet,\n      title={CSPNet: A New Backbone that can Enhance Learning Capability of CNN}, \n      author={Chien-Yao Wang and Hong-Yuan Mark Liao and I-Hau Yeh and Yueh-Hua Wu and Ping-Yang Chen and Jun-Wei Hsieh},\n      year={2019},\n      eprint={1911.11929},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: CSP ResNeXt\n  Paper:\n    Title: 'CSPNet: A New Backbone that can Enhance Learning Capability of CNN'\n    URL: https://paperswithcode.com/paper/cspnet-a-new-backbone-that-can-enhance\nModels:\n- Name: cspresnext50\n  In Collection: CSP ResNeXt\n  Metadata:\n    FLOPs: 3962945536\n    Parameters: 20570000\n    File Size: 82562887\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Polynomial Learning Rate Decay\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 1x GPU\n    ID: cspresnext50\n    LR: 0.1\n    Layers: 50\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 128\n    Image Size: '224'\n    Weight Decay: 0.005\n    Interpolation: bilinear\n    Training Steps: 8000000\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/cspnet.py#L430\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspresnext50_ra_224-648b4713.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.05%\n      Top 5 Accuracy: 94.94%\n-->"
  },
  {
    "path": "hfdocs/source/models/densenet.mdx",
    "content": "# DenseNet\n\n**DenseNet** is a type of convolutional neural network that utilises dense connections between layers, through [Dense Blocks](http://www.paperswithcode.com/method/dense-block), where we connect *all layers* (with matching feature-map sizes) directly with each other. To preserve the feed-forward nature, each layer obtains additional inputs from all preceding layers and passes on its own feature-maps to all subsequent layers.\n\nThe **DenseNet Blur** variant in this collection by Ross Wightman employs [Blur Pooling](http://www.paperswithcode.com/method/blur-pooling)\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('densenet121', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `densenet121`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('densenet121', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/HuangLW16a,\n  author    = {Gao Huang and\n               Zhuang Liu and\n               Kilian Q. Weinberger},\n  title     = {Densely Connected Convolutional Networks},\n  journal   = {CoRR},\n  volume    = {abs/1608.06993},\n  year      = {2016},\n  url       = {http://arxiv.org/abs/1608.06993},\n  archivePrefix = {arXiv},\n  eprint    = {1608.06993},\n  timestamp = {Mon, 10 Sep 2018 15:49:32 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/HuangLW16a.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n```\n@misc{rw2019timm,\n  author = {Ross Wightman},\n  title = {PyTorch Image Models},\n  year = {2019},\n  publisher = {GitHub},\n  journal = {GitHub repository},\n  doi = {10.5281/zenodo.4414861},\n  howpublished = {\\url{https://github.com/rwightman/pytorch-image-models}}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: DenseNet\n  Paper:\n    Title: Densely Connected Convolutional Networks\n    URL: https://paperswithcode.com/paper/densely-connected-convolutional-networks\nModels:\n- Name: densenet121\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 3641843200\n    Parameters: 7980000\n    File Size: 32376726\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Kaiming Initialization\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: densenet121\n    LR: 0.1\n    Epochs: 90\n    Layers: 121\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L295\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/densenet121_ra-50efcf5c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.56%\n      Top 5 Accuracy: 92.65%\n- Name: densenet161\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 9931959264\n    Parameters: 28680000\n    File Size: 115730790\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Kaiming Initialization\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: densenet161\n    LR: 0.1\n    Epochs: 90\n    Layers: 161\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L347\n  Weights: https://download.pytorch.org/models/densenet161-8d451a50.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.36%\n      Top 5 Accuracy: 93.63%\n- Name: densenet169\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 4316945792\n    Parameters: 14150000\n    File Size: 57365526\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Kaiming Initialization\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: densenet169\n    LR: 0.1\n    Epochs: 90\n    Layers: 169\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L327\n  Weights: https://download.pytorch.org/models/densenet169-b2777c0a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.9%\n      Top 5 Accuracy: 93.02%\n- Name: densenet201\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 5514321024\n    Parameters: 20010000\n    File Size: 81131730\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Kaiming Initialization\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: densenet201\n    LR: 0.1\n    Epochs: 90\n    Layers: 201\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L337\n  Weights: https://download.pytorch.org/models/densenet201-c1103571.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.29%\n      Top 5 Accuracy: 93.48%\n- Name: densenetblur121d\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 3947812864\n    Parameters: 8000000\n    File Size: 32456500\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Blur Pooling\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: densenetblur121d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L305\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/densenetblur121d_ra-100dcfbc.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.59%\n      Top 5 Accuracy: 93.2%\n- Name: tv_densenet121\n  In Collection: DenseNet\n  Metadata:\n    FLOPs: 3641843200\n    Parameters: 7980000\n    File Size: 32342954\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Block\n    - Dense Connections\n    - Dropout\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_densenet121\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/densenet.py#L379\n  Weights: https://download.pytorch.org/models/densenet121-a639ec97.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.74%\n      Top 5 Accuracy: 92.15%\n-->"
  },
  {
    "path": "hfdocs/source/models/dla.mdx",
    "content": "# Deep Layer Aggregation\n\nExtending  “shallow” skip connections, **Dense Layer Aggregation (DLA)** incorporates more depth and sharing. The authors introduce two structures for deep layer aggregation (DLA): iterative deep aggregation (IDA) and hierarchical deep aggregation (HDA). These structures are expressed through an architectural framework, independent of the choice of backbone, for compatibility with current and future networks. \n\nIDA focuses on fusing resolutions and scales while HDA focuses on merging features from all modules and channels. IDA follows the base hierarchy to refine resolution and aggregate scale stage-bystage. HDA assembles its own hierarchy of tree-structured connections that cross and merge stages to aggregate different levels of representation. \n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('dla102', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `dla102`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('dla102', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{yu2019deep,\n      title={Deep Layer Aggregation}, \n      author={Fisher Yu and Dequan Wang and Evan Shelhamer and Trevor Darrell},\n      year={2019},\n      eprint={1707.06484},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: DLA\n  Paper:\n    Title: Deep Layer Aggregation\n    URL: https://paperswithcode.com/paper/deep-layer-aggregation\nModels:\n- Name: dla102\n  In Collection: DLA\n  Metadata:\n    FLOPs: 7192952808\n    Parameters: 33270000\n    File Size: 135290579\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: dla102\n    LR: 0.1\n    Epochs: 120\n    Layers: 102\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L410\n  Weights: http://dl.yf.io/dla/models/imagenet/dla102-d94d9790.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.03%\n      Top 5 Accuracy: 93.95%\n- Name: dla102x\n  In Collection: DLA\n  Metadata:\n    FLOPs: 5886821352\n    Parameters: 26310000\n    File Size: 107552695\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: dla102x\n    LR: 0.1\n    Epochs: 120\n    Layers: 102\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L418\n  Weights: http://dl.yf.io/dla/models/imagenet/dla102x-ad62be81.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.51%\n      Top 5 Accuracy: 94.23%\n- Name: dla102x2\n  In Collection: DLA\n  Metadata:\n    FLOPs: 9343847400\n    Parameters: 41280000\n    File Size: 167645295\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: dla102x2\n    LR: 0.1\n    Epochs: 120\n    Layers: 102\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L426\n  Weights: http://dl.yf.io/dla/models/imagenet/dla102x2-262837b6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.44%\n      Top 5 Accuracy: 94.65%\n- Name: dla169\n  In Collection: DLA\n  Metadata:\n    FLOPs: 11598004200\n    Parameters: 53390000\n    File Size: 216547113\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: dla169\n    LR: 0.1\n    Epochs: 120\n    Layers: 169\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L434\n  Weights: http://dl.yf.io/dla/models/imagenet/dla169-0914e092.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.69%\n      Top 5 Accuracy: 94.33%\n- Name: dla34\n  In Collection: DLA\n  Metadata:\n    FLOPs: 3070105576\n    Parameters: 15740000\n    File Size: 63228658\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla34\n    LR: 0.1\n    Epochs: 120\n    Layers: 32\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L362\n  Weights: http://dl.yf.io/dla/models/imagenet/dla34-ba72cf86.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.62%\n      Top 5 Accuracy: 92.06%\n- Name: dla46_c\n  In Collection: DLA\n  Metadata:\n    FLOPs: 583277288\n    Parameters: 1300000\n    File Size: 5307963\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla46_c\n    LR: 0.1\n    Epochs: 120\n    Layers: 46\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L369\n  Weights: http://dl.yf.io/dla/models/imagenet/dla46_c-2bfd52c3.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 64.87%\n      Top 5 Accuracy: 86.29%\n- Name: dla46x_c\n  In Collection: DLA\n  Metadata:\n    FLOPs: 544052200\n    Parameters: 1070000\n    File Size: 4387641\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla46x_c\n    LR: 0.1\n    Epochs: 120\n    Layers: 46\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L378\n  Weights: http://dl.yf.io/dla/models/imagenet/dla46x_c-d761bae7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 65.98%\n      Top 5 Accuracy: 86.99%\n- Name: dla60\n  In Collection: DLA\n  Metadata:\n    FLOPs: 4256251880\n    Parameters: 22040000\n    File Size: 89560235\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla60\n    LR: 0.1\n    Epochs: 120\n    Layers: 60\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L394\n  Weights: http://dl.yf.io/dla/models/imagenet/dla60-24839fc4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.04%\n      Top 5 Accuracy: 93.32%\n- Name: dla60_res2net\n  In Collection: DLA\n  Metadata:\n    FLOPs: 4147578504\n    Parameters: 20850000\n    File Size: 84886593\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla60_res2net\n    Layers: 60\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L346\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net_dla60_4s-d88db7f9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.46%\n      Top 5 Accuracy: 94.21%\n- Name: dla60_res2next\n  In Collection: DLA\n  Metadata:\n    FLOPs: 3485335272\n    Parameters: 17030000\n    File Size: 69639245\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla60_res2next\n    Layers: 60\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L354\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2next_dla60_4s-d327927b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.44%\n      Top 5 Accuracy: 94.16%\n- Name: dla60x\n  In Collection: DLA\n  Metadata:\n    FLOPs: 3544204264\n    Parameters: 17350000\n    File Size: 70883139\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla60x\n    LR: 0.1\n    Epochs: 120\n    Layers: 60\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L402\n  Weights: http://dl.yf.io/dla/models/imagenet/dla60x-d15cacda.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.25%\n      Top 5 Accuracy: 94.02%\n- Name: dla60x_c\n  In Collection: DLA\n  Metadata:\n    FLOPs: 593325032\n    Parameters: 1320000\n    File Size: 5454396\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - DLA Bottleneck Residual Block\n    - DLA Residual Block\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: dla60x_c\n    LR: 0.1\n    Epochs: 120\n    Layers: 60\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dla.py#L386\n  Weights: http://dl.yf.io/dla/models/imagenet/dla60x_c-b870c45c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 67.91%\n      Top 5 Accuracy: 88.42%\n-->"
  },
  {
    "path": "hfdocs/source/models/dpn.mdx",
    "content": "# Dual Path Network (DPN)\n\nA **Dual Path Network (DPN)** is a convolutional neural network which presents a new topology of connection paths internally. The intuition is that [ResNets](https://paperswithcode.com/method/resnet) enables feature re-usage while DenseNet enables new feature exploration, and both are important for learning good representations. To enjoy the benefits from both path topologies, Dual Path Networks share common features while maintaining the flexibility to explore new features through dual path architectures. \n\nThe principal building block is an [DPN Block](https://paperswithcode.com/method/dpn-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('dpn107', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `dpn107`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('dpn107', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{chen2017dual,\n      title={Dual Path Networks}, \n      author={Yunpeng Chen and Jianan Li and Huaxin Xiao and Xiaojie Jin and Shuicheng Yan and Jiashi Feng},\n      year={2017},\n      eprint={1707.01629},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: DPN\n  Paper:\n    Title: Dual Path Networks\n    URL: https://paperswithcode.com/paper/dual-path-networks\nModels:\n- Name: dpn107\n  In Collection: DPN\n  Metadata:\n    FLOPs: 23524280296\n    Parameters: 86920000\n    File Size: 348612331\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn107\n    LR: 0.316\n    Layers: 107\n    Crop Pct: '0.875'\n    Batch Size: 1280\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L310\n  Weights: https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn107_extra-1ac7121e2.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.16%\n      Top 5 Accuracy: 94.91%\n- Name: dpn131\n  In Collection: DPN\n  Metadata:\n    FLOPs: 20586274792\n    Parameters: 79250000\n    File Size: 318016207\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn131\n    LR: 0.316\n    Layers: 131\n    Crop Pct: '0.875'\n    Batch Size: 960\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L302\n  Weights: https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn131-71dfe43e0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.83%\n      Top 5 Accuracy: 94.71%\n- Name: dpn68\n  In Collection: DPN\n  Metadata:\n    FLOPs: 2990567880\n    Parameters: 12610000\n    File Size: 50761994\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn68\n    LR: 0.316\n    Layers: 68\n    Crop Pct: '0.875'\n    Batch Size: 1280\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L270\n  Weights: https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68-66bebafa7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.31%\n      Top 5 Accuracy: 92.97%\n- Name: dpn68b\n  In Collection: DPN\n  Metadata:\n    FLOPs: 2990567880\n    Parameters: 12610000\n    File Size: 50781025\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn68b\n    LR: 0.316\n    Layers: 68\n    Crop Pct: '0.875'\n    Batch Size: 1280\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L278\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/dpn68b_ra-a31ca160.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.21%\n      Top 5 Accuracy: 94.42%\n- Name: dpn92\n  In Collection: DPN\n  Metadata:\n    FLOPs: 8357659624\n    Parameters: 37670000\n    File Size: 151248422\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn92\n    LR: 0.316\n    Layers: 92\n    Crop Pct: '0.875'\n    Batch Size: 1280\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L286\n  Weights: https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn92_extra-b040e4a9b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.99%\n      Top 5 Accuracy: 94.84%\n- Name: dpn98\n  In Collection: DPN\n  Metadata:\n    FLOPs: 15003675112\n    Parameters: 61570000\n    File Size: 247021307\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - DPN Block\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 40x K80 GPUs\n    ID: dpn98\n    LR: 0.4\n    Layers: 98\n    Crop Pct: '0.875'\n    Batch Size: 1280\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/dpn.py#L294\n  Weights: https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn98-5b90dec4d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.65%\n      Top 5 Accuracy: 94.61%\n-->"
  },
  {
    "path": "hfdocs/source/models/ecaresnet.mdx",
    "content": "# ECA-ResNet\n\nAn **ECA ResNet** is a variant on a [ResNet](https://paperswithcode.com/method/resnet) that utilises an [Efficient Channel Attention module](https://paperswithcode.com/method/efficient-channel-attention). Efficient Channel Attention is an architectural unit based on [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) that reduces model complexity without dimensionality reduction. \n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('ecaresnet101d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `ecaresnet101d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('ecaresnet101d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{wang2020ecanet,\n      title={ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks}, \n      author={Qilong Wang and Banggu Wu and Pengfei Zhu and Peihua Li and Wangmeng Zuo and Qinghua Hu},\n      year={2020},\n      eprint={1910.03151},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ECAResNet\n  Paper:\n    Title: 'ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks'\n    URL: https://paperswithcode.com/paper/eca-net-efficient-channel-attention-for-deep\nModels:\n- Name: ecaresnet101d\n  In Collection: ECAResNet\n  Metadata:\n    FLOPs: 10377193728\n    Parameters: 44570000\n    File Size: 178815067\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Efficient Channel Attention\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x RTX 2080Ti GPUs\n    ID: ecaresnet101d\n    LR: 0.1\n    Epochs: 100\n    Layers: 101\n    Crop Pct: '0.875'\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1087\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45402/outputs/ECAResNet101D_281c5844.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.18%\n      Top 5 Accuracy: 96.06%\n- Name: ecaresnet101d_pruned\n  In Collection: ECAResNet\n  Metadata:\n    FLOPs: 4463972081\n    Parameters: 24880000\n    File Size: 99852736\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Efficient Channel Attention\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: ecaresnet101d_pruned\n    Layers: 101\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1097\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45610/outputs/ECAResNet101D_P_75a3370e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.82%\n      Top 5 Accuracy: 95.64%\n- Name: ecaresnet50d\n  In Collection: ECAResNet\n  Metadata:\n    FLOPs: 5591090432\n    Parameters: 25580000\n    File Size: 102579290\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Efficient Channel Attention\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x RTX 2080Ti GPUs\n    ID: ecaresnet50d\n    LR: 0.1\n    Epochs: 100\n    Layers: 50\n    Crop Pct: '0.875'\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1045\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45402/outputs/ECAResNet50D_833caf58.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.61%\n      Top 5 Accuracy: 95.31%\n- Name: ecaresnet50d_pruned\n  In Collection: ECAResNet\n  Metadata:\n    FLOPs: 3250730657\n    Parameters: 19940000\n    File Size: 79990436\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Efficient Channel Attention\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: ecaresnet50d_pruned\n    Layers: 50\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1055\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45899/outputs/ECAResNet50D_P_9c67f710.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.71%\n      Top 5 Accuracy: 94.88%\n- Name: ecaresnetlight\n  In Collection: ECAResNet\n  Metadata:\n    FLOPs: 5276118784\n    Parameters: 30160000\n    File Size: 120956612\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Efficient Channel Attention\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: ecaresnetlight\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1077\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45402/outputs/ECAResNetLight_4f34b35b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.46%\n      Top 5 Accuracy: 95.25%\n-->"
  },
  {
    "path": "hfdocs/source/models/efficientnet-pruned.mdx",
    "content": "# EfficientNet (Knapsack Pruned)\n\n**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\\\( 2^N \\\\) times more computational resources, then we can simply increase the network depth by \\\\( \\alpha ^ N \\\\), width by \\\\( \\beta ^ N \\\\), and image size by \\\\( \\gamma ^ N \\\\), where \\\\( \\alpha, \\beta, \\gamma \\\\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\\\( \\phi \\\\) to uniformly scale network width, depth, and resolution in a principled way.\n\nThe compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.\n\nThe base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2), in addition to [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\nThis collection consists of pruned EfficientNet models.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('efficientnet_b1_pruned', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `efficientnet_b1_pruned`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('efficientnet_b1_pruned', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2020efficientnet,\n      title={EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks},\n      author={Mingxing Tan and Quoc V. Le},\n      year={2020},\n      eprint={1905.11946},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n```\n@misc{aflalo2020knapsack,\n      title={Knapsack Pruning with Inner Distillation},\n      author={Yonathan Aflalo and Asaf Noy and Ming Lin and Itamar Friedman and Lihi Zelnik},\n      year={2020},\n      eprint={2002.08258},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: EfficientNet Pruned\n  Paper:\n    Title: Knapsack Pruning with Inner Distillation\n    URL: https://paperswithcode.com/paper/knapsack-pruning-with-inner-distillation\nModels:\n- Name: efficientnet_b1_pruned\n  In Collection: EfficientNet Pruned\n  Metadata:\n    FLOPs: 489653114\n    Parameters: 6330000\n    File Size: 25595162\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b1_pruned\n    Crop Pct: '0.882'\n    Image Size: '240'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1208\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb1_pruned_9ebb3fe6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.25%\n      Top 5 Accuracy: 93.84%\n- Name: efficientnet_b2_pruned\n  In Collection: EfficientNet Pruned\n  Metadata:\n    FLOPs: 878133915\n    Parameters: 8310000\n    File Size: 33555005\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b2_pruned\n    Crop Pct: '0.89'\n    Image Size: '260'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1219\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb2_pruned_203f55bc.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.91%\n      Top 5 Accuracy: 94.86%\n- Name: efficientnet_b3_pruned\n  In Collection: EfficientNet Pruned\n  Metadata:\n    FLOPs: 1239590641\n    Parameters: 9860000\n    File Size: 39770812\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b3_pruned\n    Crop Pct: '0.904'\n    Image Size: '300'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1230\n  Weights: https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb3_pruned_5abcc29f.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.86%\n      Top 5 Accuracy: 95.24%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/efficientnet.mdx",
    "content": "# EfficientNet\n\n**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\\\( 2^N \\\\) times more computational resources, then we can simply increase the network depth by \\\\( \\alpha ^ N \\\\), width by \\\\( \\beta ^ N \\\\), and image size by \\\\( \\gamma ^ N \\\\), where \\\\( \\alpha, \\beta, \\gamma \\\\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\\\( \\phi \\\\) to uniformly scale network width, depth, and resolution in a principled way.\n\nThe compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.\n\nThe base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2), in addition to [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('efficientnet_b0', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `efficientnet_b0`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('efficientnet_b0', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2020efficientnet,\n      title={EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks},\n      author={Mingxing Tan and Quoc V. Le},\n      year={2020},\n      eprint={1905.11946},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: EfficientNet\n  Paper:\n    Title: 'EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks'\n    URL: https://paperswithcode.com/paper/efficientnet-rethinking-model-scaling-for\nModels:\n- Name: efficientnet_b0\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 511241564\n    Parameters: 5290000\n    File Size: 21376743\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b0\n    Layers: 18\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1002\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b0_ra-3dd342df.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.71%\n      Top 5 Accuracy: 93.52%\n- Name: efficientnet_b1\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 909691920\n    Parameters: 7790000\n    File Size: 31502706\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b1\n    Crop Pct: '0.875'\n    Image Size: '240'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1011\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b1-533bc792.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.71%\n      Top 5 Accuracy: 94.15%\n- Name: efficientnet_b2\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 1265324514\n    Parameters: 9110000\n    File Size: 36788104\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b2\n    Crop Pct: '0.875'\n    Image Size: '260'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1020\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.38%\n      Top 5 Accuracy: 95.08%\n- Name: efficientnet_b2a\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 1452041554\n    Parameters: 9110000\n    File Size: 49369973\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b2a\n    Crop Pct: '1.0'\n    Image Size: '288'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1029\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.61%\n      Top 5 Accuracy: 95.32%\n- Name: efficientnet_b3\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 2327905920\n    Parameters: 12230000\n    File Size: 49369973\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b3\n    Crop Pct: '0.904'\n    Image Size: '300'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1038\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.08%\n      Top 5 Accuracy: 96.03%\n- Name: efficientnet_b3a\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 2600628304\n    Parameters: 12230000\n    File Size: 49369973\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_b3a\n    Crop Pct: '1.0'\n    Image Size: '320'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1047\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.25%\n      Top 5 Accuracy: 96.11%\n- Name: efficientnet_em\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 3935516480\n    Parameters: 6900000\n    File Size: 27927309\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_em\n    Crop Pct: '0.882'\n    Image Size: '240'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1118\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_em_ra2-66250f76.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.26%\n      Top 5 Accuracy: 94.79%\n- Name: efficientnet_es\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 2317181824\n    Parameters: 5440000\n    File Size: 22003339\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_es\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1110\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.09%\n      Top 5 Accuracy: 93.93%\n- Name: efficientnet_lite0\n  In Collection: EfficientNet\n  Metadata:\n    FLOPs: 510605024\n    Parameters: 4650000\n    File Size: 18820005\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: efficientnet_lite0\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/efficientnet.py#L1163\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_lite0_ra-37913777.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.5%\n      Top 5 Accuracy: 92.51%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/ensemble-adversarial.mdx",
    "content": "# # Ensemble Adversarial Inception ResNet v2\n\n**Inception-ResNet-v2** is a convolutional neural architecture that builds on the Inception family of architectures but incorporates [residual connections](https://paperswithcode.com/method/residual-connection) (replacing the filter concatenation stage of the Inception architecture).\n\nThis particular model was trained for study of adversarial examples (adversarial training).\n\nThe weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('ens_adv_inception_resnet_v2', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `ens_adv_inception_resnet_v2`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('ens_adv_inception_resnet_v2', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1804-00097,\n  author    = {Alexey Kurakin and\n               Ian J. Goodfellow and\n               Samy Bengio and\n               Yinpeng Dong and\n               Fangzhou Liao and\n               Ming Liang and\n               Tianyu Pang and\n               Jun Zhu and\n               Xiaolin Hu and\n               Cihang Xie and\n               Jianyu Wang and\n               Zhishuai Zhang and\n               Zhou Ren and\n               Alan L. Yuille and\n               Sangxia Huang and\n               Yao Zhao and\n               Yuzhe Zhao and\n               Zhonglin Han and\n               Junjiajia Long and\n               Yerkebulan Berdibekov and\n               Takuya Akiba and\n               Seiya Tokui and\n               Motoki Abe},\n  title     = {Adversarial Attacks and Defences Competition},\n  journal   = {CoRR},\n  volume    = {abs/1804.00097},\n  year      = {2018},\n  url       = {http://arxiv.org/abs/1804.00097},\n  archivePrefix = {arXiv},\n  eprint    = {1804.00097},\n  timestamp = {Thu, 31 Oct 2019 16:31:22 +0100},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1804-00097.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Ensemble Adversarial\n  Paper:\n    Title: Adversarial Attacks and Defences Competition\n    URL: https://paperswithcode.com/paper/adversarial-attacks-and-defences-competition\nModels:\n- Name: ens_adv_inception_resnet_v2\n  In Collection: Ensemble Adversarial\n  Metadata:\n    FLOPs: 16959133120\n    Parameters: 55850000\n    File Size: 223774238\n    Architecture:\n    - 1x1 Convolution\n    - Auxiliary Classifier\n    - Average Pooling\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inception-v3 Module\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: ens_adv_inception_resnet_v2\n    Crop Pct: '0.897'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_resnet_v2.py#L351\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ens_adv_inception_resnet_v2-2592a550.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 1.0%\n      Top 5 Accuracy: 17.32%\n-->"
  },
  {
    "path": "hfdocs/source/models/ese-vovnet.mdx",
    "content": "# ESE-VoVNet\n\n**VoVNet** is a convolutional neural network that seeks to make [DenseNet](https://paperswithcode.com/method/densenet) more efficient by concatenating all features only once in the last feature map, which makes input size constant and enables enlarging new output channel. \n\nRead about [one-shot aggregation here](https://paperswithcode.com/method/one-shot-aggregation).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('ese_vovnet19b_dw', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `ese_vovnet19b_dw`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('ese_vovnet19b_dw', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{lee2019energy,\n      title={An Energy and GPU-Computation Efficient Backbone Network for Real-Time Object Detection}, \n      author={Youngwan Lee and Joong-won Hwang and Sangrok Lee and Yuseok Bae and Jongyoul Park},\n      year={2019},\n      eprint={1904.09730},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ESE VovNet\n  Paper:\n    Title: 'CenterMask : Real-Time Anchor-Free Instance Segmentation'\n    URL: https://paperswithcode.com/paper/centermask-real-time-anchor-free-instance-1\nModels:\n- Name: ese_vovnet19b_dw\n  In Collection: ESE VovNet\n  Metadata:\n    FLOPs: 1711959904\n    Parameters: 6540000\n    File Size: 26243175\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Max Pooling\n    - One-Shot Aggregation\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: ese_vovnet19b_dw\n    Layers: 19\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/vovnet.py#L361\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ese_vovnet19b_dw-a8741004.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.82%\n      Top 5 Accuracy: 93.28%\n- Name: ese_vovnet39b\n  In Collection: ESE VovNet\n  Metadata:\n    FLOPs: 9089259008\n    Parameters: 24570000\n    File Size: 98397138\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Max Pooling\n    - One-Shot Aggregation\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: ese_vovnet39b\n    Layers: 39\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/vovnet.py#L371\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ese_vovnet39b-f912fe73.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.31%\n      Top 5 Accuracy: 94.72%\n-->"
  },
  {
    "path": "hfdocs/source/models/fbnet.mdx",
    "content": "# FBNet\n\n**FBNet** is a type of convolutional neural architectures discovered through [DNAS](https://paperswithcode.com/method/dnas) neural architecture search. It utilises a basic type of image model block inspired by [MobileNetv2](https://paperswithcode.com/method/mobilenetv2) that utilises depthwise convolutions and an inverted residual structure (see components).\n\nThe principal building block is the [FBNet Block](https://paperswithcode.com/method/fbnet-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('fbnetc_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `fbnetc_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('fbnetc_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{wu2019fbnet,\n      title={FBNet: Hardware-Aware Efficient ConvNet Design via Differentiable Neural Architecture Search}, \n      author={Bichen Wu and Xiaoliang Dai and Peizhao Zhang and Yanghan Wang and Fei Sun and Yiming Wu and Yuandong Tian and Peter Vajda and Yangqing Jia and Kurt Keutzer},\n      year={2019},\n      eprint={1812.03443},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: FBNet\n  Paper:\n    Title: 'FBNet: Hardware-Aware Efficient ConvNet Design via Differentiable Neural\n      Architecture Search'\n    URL: https://paperswithcode.com/paper/fbnet-hardware-aware-efficient-convnet-design\nModels:\n- Name: fbnetc_100\n  In Collection: FBNet\n  Metadata:\n    FLOPs: 508940064\n    Parameters: 5570000\n    File Size: 22525094\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - FBNet Block\n    - Global Average Pooling\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: fbnetc_100\n    LR: 0.1\n    Epochs: 360\n    Layers: 22\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0005\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L985\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetc_100-c345b898.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.12%\n      Top 5 Accuracy: 92.37%\n-->"
  },
  {
    "path": "hfdocs/source/models/gloun-inception-v3.mdx",
    "content": "# (Gluon) Inception v3\n\n**Inception v3** is a convolutional neural network architecture from the Inception family that makes several improvements including using [Label Smoothing](https://paperswithcode.com/method/label-smoothing), Factorized 7 x 7 convolutions, and the use of an [auxiliary classifier](https://paperswithcode.com/method/auxiliary-classifier) to propagate label information lower down the network (along with the use of batch normalization for layers in the sidehead). The key building block is an [Inception Module](https://paperswithcode.com/method/inception-v3-module).\n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_inception_v3', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_inception_v3`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_inception_v3', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/SzegedyVISW15,\n  author    = {Christian Szegedy and\n               Vincent Vanhoucke and\n               Sergey Ioffe and\n               Jonathon Shlens and\n               Zbigniew Wojna},\n  title     = {Rethinking the Inception Architecture for Computer Vision},\n  journal   = {CoRR},\n  volume    = {abs/1512.00567},\n  year      = {2015},\n  url       = {http://arxiv.org/abs/1512.00567},\n  archivePrefix = {arXiv},\n  eprint    = {1512.00567},\n  timestamp = {Mon, 13 Aug 2018 16:49:07 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/SzegedyVISW15.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun Inception v3\n  Paper:\n    Title: Rethinking the Inception Architecture for Computer Vision\n    URL: https://paperswithcode.com/paper/rethinking-the-inception-architecture-for\nModels:\n- Name: gluon_inception_v3\n  In Collection: Gloun Inception v3\n  Metadata:\n    FLOPs: 7352418880\n    Parameters: 23830000\n    File Size: 95567055\n    Architecture:\n    - 1x1 Convolution\n    - Auxiliary Classifier\n    - Average Pooling\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inception-v3 Module\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_inception_v3\n    Crop Pct: '0.875'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v3.py#L464\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_inception_v3-9f746940.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.8%\n      Top 5 Accuracy: 94.38%\n-->"
  },
  {
    "path": "hfdocs/source/models/gloun-resnet.mdx",
    "content": "# (Gluon) ResNet\n\n**Residual Networks**, or **ResNets**, learn residual functions with reference to the layer inputs, instead of learning unreferenced functions. Instead of hoping each few stacked layers directly fit a desired underlying mapping, residual nets let these layers fit a residual mapping. They stack [residual blocks](https://paperswithcode.com/method/residual-block) ontop of each other to form network: e.g. a ResNet-50 has fifty layers using these blocks. \n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_resnet101_v1b', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_resnet101_v1b`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_resnet101_v1b', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/HeZRS15,\n  author    = {Kaiming He and\n               Xiangyu Zhang and\n               Shaoqing Ren and\n               Jian Sun},\n  title     = {Deep Residual Learning for Image Recognition},\n  journal   = {CoRR},\n  volume    = {abs/1512.03385},\n  year      = {2015},\n  url       = {http://arxiv.org/abs/1512.03385},\n  archivePrefix = {arXiv},\n  eprint    = {1512.03385},\n  timestamp = {Wed, 17 Apr 2019 17:23:45 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/HeZRS15.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun ResNet\n  Paper:\n    Title: Deep Residual Learning for Image Recognition\n    URL: https://paperswithcode.com/paper/deep-residual-learning-for-image-recognition\nModels:\n- Name: gluon_resnet101_v1b\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 10068547584\n    Parameters: 44550000\n    File Size: 178723172\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet101_v1b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L89\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1b-3b017079.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.3%\n      Top 5 Accuracy: 94.53%\n- Name: gluon_resnet101_v1c\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 10376567296\n    Parameters: 44570000\n    File Size: 178802575\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet101_v1c\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L113\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1c-1f26822a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.53%\n      Top 5 Accuracy: 94.59%\n- Name: gluon_resnet101_v1d\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 10377018880\n    Parameters: 44570000\n    File Size: 178802755\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet101_v1d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L138\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1d-0f9c8644.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.4%\n      Top 5 Accuracy: 95.02%\n- Name: gluon_resnet101_v1s\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 11805511680\n    Parameters: 44670000\n    File Size: 179221777\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet101_v1s\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L166\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1s-60fe0cc1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.29%\n      Top 5 Accuracy: 95.16%\n- Name: gluon_resnet152_v1b\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 14857660416\n    Parameters: 60190000\n    File Size: 241534001\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet152_v1b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L97\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1b-c1edb0dd.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.69%\n      Top 5 Accuracy: 94.73%\n- Name: gluon_resnet152_v1c\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 15165680128\n    Parameters: 60210000\n    File Size: 241613404\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet152_v1c\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L121\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1c-a3bb0b98.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.91%\n      Top 5 Accuracy: 94.85%\n- Name: gluon_resnet152_v1d\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 15166131712\n    Parameters: 60210000\n    File Size: 241613584\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet152_v1d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L147\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1d-bd354e12.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.48%\n      Top 5 Accuracy: 95.2%\n- Name: gluon_resnet152_v1s\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 16594624512\n    Parameters: 60320000\n    File Size: 242032606\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet152_v1s\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L175\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1s-dcc41b81.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.02%\n      Top 5 Accuracy: 95.42%\n- Name: gluon_resnet18_v1b\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 2337073152\n    Parameters: 11690000\n    File Size: 46816736\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet18_v1b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L65\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet18_v1b-0757602b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 70.84%\n      Top 5 Accuracy: 89.76%\n- Name: gluon_resnet34_v1b\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 4718469120\n    Parameters: 21800000\n    File Size: 87295112\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet34_v1b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L73\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet34_v1b-c6d82d59.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.59%\n      Top 5 Accuracy: 92.0%\n- Name: gluon_resnet50_v1b\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 5282531328\n    Parameters: 25560000\n    File Size: 102493763\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet50_v1b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L81\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1b-0ebe02e2.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.58%\n      Top 5 Accuracy: 93.72%\n- Name: gluon_resnet50_v1c\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 5590551040\n    Parameters: 25580000\n    File Size: 102573166\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet50_v1c\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L105\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1c-48092f55.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.01%\n      Top 5 Accuracy: 93.99%\n- Name: gluon_resnet50_v1d\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 5591002624\n    Parameters: 25580000\n    File Size: 102573346\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet50_v1d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L129\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1d-818a1b1b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.06%\n      Top 5 Accuracy: 94.46%\n- Name: gluon_resnet50_v1s\n  In Collection: Gloun ResNet\n  Metadata:\n    FLOPs: 7019495424\n    Parameters: 25680000\n    File Size: 102992368\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnet50_v1s\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L156\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1s-1762acc0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.7%\n      Top 5 Accuracy: 94.25%\n-->"
  },
  {
    "path": "hfdocs/source/models/gloun-resnext.mdx",
    "content": "# (Gluon) ResNeXt\n\nA **ResNeXt** repeats a [building block](https://paperswithcode.com/method/resnext-block) that aggregates a set of transformations with the same topology. Compared to a [ResNet](https://paperswithcode.com/method/resnet), it exposes a new dimension,  *cardinality* (the size of the set of transformations) \\\\( C \\\\), as an essential factor in addition to the dimensions of depth and width.\n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_resnext101_32x4d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_resnext101_32x4d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_resnext101_32x4d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/XieGDTH16,\n  author    = {Saining Xie and\n               Ross B. Girshick and\n               Piotr Doll{\\'{a}}r and\n               Zhuowen Tu and\n               Kaiming He},\n  title     = {Aggregated Residual Transformations for Deep Neural Networks},\n  journal   = {CoRR},\n  volume    = {abs/1611.05431},\n  year      = {2016},\n  url       = {http://arxiv.org/abs/1611.05431},\n  archivePrefix = {arXiv},\n  eprint    = {1611.05431},\n  timestamp = {Mon, 13 Aug 2018 16:45:58 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/XieGDTH16.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun ResNeXt\n  Paper:\n    Title: Aggregated Residual Transformations for Deep Neural Networks\n    URL: https://paperswithcode.com/paper/aggregated-residual-transformations-for-deep\nModels:\n- Name: gluon_resnext101_32x4d\n  In Collection: Gloun ResNeXt\n  Metadata:\n    FLOPs: 10298145792\n    Parameters: 44180000\n    File Size: 177367414\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnext101_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L193\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_32x4d-b253c8c4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.33%\n      Top 5 Accuracy: 94.91%\n- Name: gluon_resnext101_64x4d\n  In Collection: Gloun ResNeXt\n  Metadata:\n    FLOPs: 19954172928\n    Parameters: 83460000\n    File Size: 334737852\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnext101_64x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L201\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_64x4d-f9a8e184.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.63%\n      Top 5 Accuracy: 95.0%\n- Name: gluon_resnext50_32x4d\n  In Collection: Gloun ResNeXt\n  Metadata:\n    FLOPs: 5472648192\n    Parameters: 25030000\n    File Size: 100441719\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_resnext50_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L185\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext50_32x4d-e6a097c1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.35%\n      Top 5 Accuracy: 94.42%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/gloun-senet.mdx",
    "content": "# (Gluon) SENet\n\nA **SENet** is a convolutional neural network architecture that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_senet154', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_senet154`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_senet154', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun SENet\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: gluon_senet154\n  In Collection: Gloun SENet\n  Metadata:\n    FLOPs: 26681705136\n    Parameters: 115090000\n    File Size: 461546622\n    Architecture:\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_senet154\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L239\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_senet154-70a1a3c0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.23%\n      Top 5 Accuracy: 95.35%\n-->"
  },
  {
    "path": "hfdocs/source/models/gloun-seresnext.mdx",
    "content": "# (Gluon) SE-ResNeXt\n\n**SE ResNeXt** is a variant of a [ResNext](https://www.paperswithcode.com/method/resnext) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_seresnext101_32x4d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_seresnext101_32x4d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_seresnext101_32x4d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun SEResNeXt\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: gluon_seresnext101_32x4d\n  In Collection: Gloun SEResNeXt\n  Metadata:\n    FLOPs: 10302923504\n    Parameters: 48960000\n    File Size: 196505510\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_seresnext101_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L219\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_32x4d-cf52900d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.87%\n      Top 5 Accuracy: 95.29%\n- Name: gluon_seresnext101_64x4d\n  In Collection: Gloun SEResNeXt\n  Metadata:\n    FLOPs: 19958950640\n    Parameters: 88230000\n    File Size: 353875948\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_seresnext101_64x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L229\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_64x4d-f9926f93.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.88%\n      Top 5 Accuracy: 95.31%\n- Name: gluon_seresnext50_32x4d\n  In Collection: Gloun SEResNeXt\n  Metadata:\n    FLOPs: 5475179184\n    Parameters: 27560000\n    File Size: 110578827\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_seresnext50_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_resnet.py#L209\n  Weights: https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext50_32x4d-90cf2d6e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.92%\n      Top 5 Accuracy: 94.82%\n-->"
  },
  {
    "path": "hfdocs/source/models/gloun-xception.mdx",
    "content": "# (Gluon) Xception\n\n**Xception** is a convolutional neural network architecture that relies solely on [depthwise separable convolution](https://paperswithcode.com/method/depthwise-separable-convolution) layers.\n\nThe weights from this model were ported from [Gluon](https://cv.gluon.ai/model_zoo/classification.html).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('gluon_xception65', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `gluon_xception65`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('gluon_xception65', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{chollet2017xception,\n      title={Xception: Deep Learning with Depthwise Separable Convolutions}, \n      author={François Chollet},\n      year={2017},\n      eprint={1610.02357},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Gloun Xception\n  Paper:\n    Title: 'Xception: Deep Learning with Depthwise Separable Convolutions'\n    URL: https://paperswithcode.com/paper/xception-deep-learning-with-depthwise\nModels:\n- Name: gluon_xception65\n  In Collection: Gloun Xception\n  Metadata:\n    FLOPs: 17594889728\n    Parameters: 39920000\n    File Size: 160551306\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: gluon_xception65\n    Crop Pct: '0.903'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/gluon_xception.py#L241\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_xception-7015a15c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.7%\n      Top 5 Accuracy: 94.87%\n-->"
  },
  {
    "path": "hfdocs/source/models/hrnet.mdx",
    "content": "# HRNet\n\n**HRNet**, or **High-Resolution Net**, is a general purpose convolutional neural network for tasks like semantic segmentation, object detection and image classification. It is able to maintain high resolution representations through the whole process. We start from a high-resolution convolution stream, gradually add high-to-low resolution convolution streams one by one, and connect the multi-resolution streams in parallel. The resulting network consists of several (\\\\( 4 \\\\) in the paper) stages and the \\\\( n \\\\)th stage contains \\\\( n \\\\) streams corresponding to \\\\( n \\\\) resolutions. The authors conduct repeated multi-resolution fusions by exchanging the information across the parallel streams over and over.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('hrnet_w18', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `hrnet_w18`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('hrnet_w18', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{sun2019highresolution,\n      title={High-Resolution Representations for Labeling Pixels and Regions},\n      author={Ke Sun and Yang Zhao and Borui Jiang and Tianheng Cheng and Bin Xiao and Dong Liu and Yadong Mu and Xinggang Wang and Wenyu Liu and Jingdong Wang},\n      year={2019},\n      eprint={1904.04514},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: HRNet\n  Paper:\n    Title: Deep High-Resolution Representation Learning for Visual Recognition\n    URL: https://paperswithcode.com/paper/190807919\nModels:\n- Name: hrnet_w18\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 5547205500\n    Parameters: 21300000\n    File Size: 85718883\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w18\n    Epochs: 100\n    Layers: 18\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L800\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w18-8cb57bb9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.76%\n      Top 5 Accuracy: 93.44%\n- Name: hrnet_w18_small\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 2071651488\n    Parameters: 13190000\n    File Size: 52934302\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w18_small\n    Epochs: 100\n    Layers: 18\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L790\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnet_w18_small_v1-f460c6bc.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.34%\n      Top 5 Accuracy: 90.68%\n- Name: hrnet_w18_small_v2\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 3360023160\n    Parameters: 15600000\n    File Size: 62682879\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w18_small_v2\n    Epochs: 100\n    Layers: 18\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L795\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnet_w18_small_v2-4c50a8cb.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.11%\n      Top 5 Accuracy: 92.41%\n- Name: hrnet_w30\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 10474119492\n    Parameters: 37710000\n    File Size: 151452218\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w30\n    Epochs: 100\n    Layers: 30\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L805\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w30-8d7f8dab.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.21%\n      Top 5 Accuracy: 94.22%\n- Name: hrnet_w32\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 11524528320\n    Parameters: 41230000\n    File Size: 165547812\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    Training Time: 60 hours\n    ID: hrnet_w32\n    Epochs: 100\n    Layers: 32\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L810\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w32-90d8c5fb.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.45%\n      Top 5 Accuracy: 94.19%\n- Name: hrnet_w40\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 16381182192\n    Parameters: 57560000\n    File Size: 230899236\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w40\n    Epochs: 100\n    Layers: 40\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L815\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w40-7cd397a4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.93%\n      Top 5 Accuracy: 94.48%\n- Name: hrnet_w44\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 19202520264\n    Parameters: 67060000\n    File Size: 268957432\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w44\n    Epochs: 100\n    Layers: 44\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L820\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w44-c9ac8c18.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.89%\n      Top 5 Accuracy: 94.37%\n- Name: hrnet_w48\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 22285865760\n    Parameters: 77470000\n    File Size: 310603710\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    Training Time: 80 hours\n    ID: hrnet_w48\n    Epochs: 100\n    Layers: 48\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L825\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w48-abd2e6ab.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.32%\n      Top 5 Accuracy: 94.51%\n- Name: hrnet_w64\n  In Collection: HRNet\n  Metadata:\n    FLOPs: 37239321984\n    Parameters: 128060000\n    File Size: 513071818\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - ReLU\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: hrnet_w64\n    Epochs: 100\n    Layers: 64\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/hrnet.py#L830\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-hrnet/hrnetv2_w64-b47cc881.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.46%\n      Top 5 Accuracy: 94.65%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/ig-resnext.mdx",
    "content": "# Instagram ResNeXt WSL\n\nA **ResNeXt** repeats a [building block](https://paperswithcode.com/method/resnext-block) that aggregates a set of transformations with the same topology. Compared to a [ResNet](https://paperswithcode.com/method/resnet), it exposes a new dimension,  *cardinality* (the size of the set of transformations) \\\\( C \\\\), as an essential factor in addition to the dimensions of depth and width.\n\nThis model was trained on billions of Instagram images using thousands of distinct hashtags as labels exhibit excellent transfer learning performance.\n\nPlease note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('ig_resnext101_32x16d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `ig_resnext101_32x16d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('ig_resnext101_32x16d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{mahajan2018exploring,\n      title={Exploring the Limits of Weakly Supervised Pretraining},\n      author={Dhruv Mahajan and Ross Girshick and Vignesh Ramanathan and Kaiming He and Manohar Paluri and Yixuan Li and Ashwin Bharambe and Laurens van der Maaten},\n      year={2018},\n      eprint={1805.00932},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: IG ResNeXt\n  Paper:\n    Title: Exploring the Limits of Weakly Supervised Pretraining\n    URL: https://paperswithcode.com/paper/exploring-the-limits-of-weakly-supervised\nModels:\n- Name: ig_resnext101_32x16d\n  In Collection: IG ResNeXt\n  Metadata:\n    FLOPs: 46623691776\n    Parameters: 194030000\n    File Size: 777518664\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - IG-3.5B-17k\n    - ImageNet\n    Training Resources: 336x GPUs\n    ID: ig_resnext101_32x16d\n    Epochs: 100\n    Layers: 101\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8064\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L874\n  Weights: https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.16%\n      Top 5 Accuracy: 97.19%\n- Name: ig_resnext101_32x32d\n  In Collection: IG ResNeXt\n  Metadata:\n    FLOPs: 112225170432\n    Parameters: 468530000\n    File Size: 1876573776\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - IG-3.5B-17k\n    - ImageNet\n    Training Resources: 336x GPUs\n    ID: ig_resnext101_32x32d\n    Epochs: 100\n    Layers: 101\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8064\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n    Minibatch Size: 8064\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L885\n  Weights: https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.09%\n      Top 5 Accuracy: 97.44%\n- Name: ig_resnext101_32x48d\n  In Collection: IG ResNeXt\n  Metadata:\n    FLOPs: 197446554624\n    Parameters: 828410000\n    File Size: 3317136976\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - IG-3.5B-17k\n    - ImageNet\n    Training Resources: 336x GPUs\n    ID: ig_resnext101_32x48d\n    Epochs: 100\n    Layers: 101\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8064\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L896\n  Weights: https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.42%\n      Top 5 Accuracy: 97.58%\n- Name: ig_resnext101_32x8d\n  In Collection: IG ResNeXt\n  Metadata:\n    FLOPs: 21180417024\n    Parameters: 88790000\n    File Size: 356056638\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - IG-3.5B-17k\n    - ImageNet\n    Training Resources: 336x GPUs\n    ID: ig_resnext101_32x8d\n    Epochs: 100\n    Layers: 101\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8064\n    Image Size: '224'\n    Weight Decay: 0.001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L863\n  Weights: https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.7%\n      Top 5 Accuracy: 96.64%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/inception-resnet-v2.mdx",
    "content": "# Inception ResNet v2\n\n**Inception-ResNet-v2** is a convolutional neural architecture that builds on the Inception family of architectures but incorporates [residual connections](https://paperswithcode.com/method/residual-connection) (replacing the filter concatenation stage of the Inception architecture).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('inception_resnet_v2', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `inception_resnet_v2`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('inception_resnet_v2', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{szegedy2016inceptionv4,\n      title={Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning}, \n      author={Christian Szegedy and Sergey Ioffe and Vincent Vanhoucke and Alex Alemi},\n      year={2016},\n      eprint={1602.07261},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Inception ResNet v2\n  Paper:\n    Title: Inception-v4, Inception-ResNet and the Impact of Residual Connections on\n      Learning\n    URL: https://paperswithcode.com/paper/inception-v4-inception-resnet-and-the-impact\nModels:\n- Name: inception_resnet_v2\n  In Collection: Inception ResNet v2\n  Metadata:\n    FLOPs: 16959133120\n    Parameters: 55850000\n    File Size: 223774238\n    Architecture:\n    - Average Pooling\n    - Dropout\n    - Inception-ResNet-v2 Reduction-B\n    - Inception-ResNet-v2-A\n    - Inception-ResNet-v2-B\n    - Inception-ResNet-v2-C\n    - Reduction-A\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 20x NVIDIA Kepler GPUs\n    ID: inception_resnet_v2\n    LR: 0.045\n    Dropout: 0.2\n    Crop Pct: '0.897'\n    Momentum: 0.9\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_resnet_v2.py#L343\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/inception_resnet_v2-940b1cd6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 0.95%\n      Top 5 Accuracy: 17.29%\n-->"
  },
  {
    "path": "hfdocs/source/models/inception-v3.mdx",
    "content": "# Inception v3\n\n**Inception v3** is a convolutional neural network architecture from the Inception family that makes several improvements including using [Label Smoothing](https://paperswithcode.com/method/label-smoothing), Factorized 7 x 7 convolutions, and the use of an [auxiliary classifier](https://paperswithcode.com/method/auxiliary-classifier) to propagate label information lower down the network (along with the use of batch normalization for layers in the sidehead). The key building block is an [Inception Module](https://paperswithcode.com/method/inception-v3-module).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('inception_v3', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `inception_v3`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('inception_v3', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/SzegedyVISW15,\n  author    = {Christian Szegedy and\n               Vincent Vanhoucke and\n               Sergey Ioffe and\n               Jonathon Shlens and\n               Zbigniew Wojna},\n  title     = {Rethinking the Inception Architecture for Computer Vision},\n  journal   = {CoRR},\n  volume    = {abs/1512.00567},\n  year      = {2015},\n  url       = {http://arxiv.org/abs/1512.00567},\n  archivePrefix = {arXiv},\n  eprint    = {1512.00567},\n  timestamp = {Mon, 13 Aug 2018 16:49:07 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/SzegedyVISW15.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Inception v3\n  Paper:\n    Title: Rethinking the Inception Architecture for Computer Vision\n    URL: https://paperswithcode.com/paper/rethinking-the-inception-architecture-for\nModels:\n- Name: inception_v3\n  In Collection: Inception v3\n  Metadata:\n    FLOPs: 7352418880\n    Parameters: 23830000\n    File Size: 108857766\n    Architecture:\n    - 1x1 Convolution\n    - Auxiliary Classifier\n    - Average Pooling\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inception-v3 Module\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Gradient Clipping\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 50x NVIDIA Kepler GPUs\n    ID: inception_v3\n    LR: 0.045\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v3.py#L442\n  Weights: https://download.pytorch.org/models/inception_v3_google-1a9a5a14.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.46%\n      Top 5 Accuracy: 93.48%\n-->"
  },
  {
    "path": "hfdocs/source/models/inception-v4.mdx",
    "content": "# Inception v4\n\n**Inception-v4** is a convolutional neural network architecture that builds on previous iterations of the Inception family by simplifying the architecture and using more inception modules than [Inception-v3](https://paperswithcode.com/method/inception-v3).\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('inception_v4', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `inception_v4`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('inception_v4', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{szegedy2016inceptionv4,\n      title={Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning}, \n      author={Christian Szegedy and Sergey Ioffe and Vincent Vanhoucke and Alex Alemi},\n      year={2016},\n      eprint={1602.07261},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Inception v4\n  Paper:\n    Title: Inception-v4, Inception-ResNet and the Impact of Residual Connections on\n      Learning\n    URL: https://paperswithcode.com/paper/inception-v4-inception-resnet-and-the-impact\nModels:\n- Name: inception_v4\n  In Collection: Inception v4\n  Metadata:\n    FLOPs: 15806527936\n    Parameters: 42680000\n    File Size: 171082495\n    Architecture:\n    - Average Pooling\n    - Dropout\n    - Inception-A\n    - Inception-B\n    - Inception-C\n    - Reduction-A\n    - Reduction-B\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 20x NVIDIA Kepler GPUs\n    ID: inception_v4\n    LR: 0.045\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v4.py#L313\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/inceptionv4-8e4777a0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 1.01%\n      Top 5 Accuracy: 16.85%\n-->"
  },
  {
    "path": "hfdocs/source/models/legacy-se-resnet.mdx",
    "content": "# (Legacy) SE-ResNet\n\n**SE ResNet** is a variant of a [ResNet](https://www.paperswithcode.com/method/resnet) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('legacy_seresnet101', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `legacy_seresnet101`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('legacy_seresnet101', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Legacy SE ResNet\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: legacy_seresnet101\n  In Collection: Legacy SE ResNet\n  Metadata:\n    FLOPs: 9762614000\n    Parameters: 49330000\n    File Size: 197822624\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnet101\n    LR: 0.6\n    Epochs: 100\n    Layers: 101\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L426\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet101-7e38fcc6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.38%\n      Top 5 Accuracy: 94.26%\n- Name: legacy_seresnet152\n  In Collection: Legacy SE ResNet\n  Metadata:\n    FLOPs: 14553578160\n    Parameters: 66819999\n    File Size: 268033864\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnet152\n    LR: 0.6\n    Epochs: 100\n    Layers: 152\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L433\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet152-d17c99b7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.67%\n      Top 5 Accuracy: 94.38%\n- Name: legacy_seresnet18\n  In Collection: Legacy SE ResNet\n  Metadata:\n    FLOPs: 2328876024\n    Parameters: 11780000\n    File Size: 47175663\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnet18\n    LR: 0.6\n    Epochs: 100\n    Layers: 18\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L405\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet18-4bb0ce65.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 71.74%\n      Top 5 Accuracy: 90.34%\n- Name: legacy_seresnet34\n  In Collection: Legacy SE ResNet\n  Metadata:\n    FLOPs: 4706201004\n    Parameters: 21960000\n    File Size: 87958697\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnet34\n    LR: 0.6\n    Epochs: 100\n    Layers: 34\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L412\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet34-a4004e63.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.79%\n      Top 5 Accuracy: 92.13%\n- Name: legacy_seresnet50\n  In Collection: Legacy SE ResNet\n  Metadata:\n    FLOPs: 4974351024\n    Parameters: 28090000\n    File Size: 112611220\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnet50\n    LR: 0.6\n    Epochs: 100\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '224'\n    Interpolation: bilinear\n    Minibatch Size: 1024\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L419\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet50-ce0d4300.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.64%\n      Top 5 Accuracy: 93.74%\n-->"
  },
  {
    "path": "hfdocs/source/models/legacy-se-resnext.mdx",
    "content": "# (Legacy) SE-ResNeXt\n\n**SE ResNeXt** is a variant of a [ResNeXt](https://www.paperswithcode.com/method/resnext) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('legacy_seresnext101_32x4d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `legacy_seresnext101_32x4d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('legacy_seresnext101_32x4d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Legacy SE ResNeXt\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: legacy_seresnext101_32x4d\n  In Collection: Legacy SE ResNeXt\n  Metadata:\n    FLOPs: 10287698672\n    Parameters: 48960000\n    File Size: 196466866\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnext101_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 101\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L462\n  Weights: http://data.lip6.fr/cadene/pretrainedmodels/se_resnext101_32x4d-3b2fe3d8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.23%\n      Top 5 Accuracy: 95.02%\n- Name: legacy_seresnext26_32x4d\n  In Collection: Legacy SE ResNeXt\n  Metadata:\n    FLOPs: 3187342304\n    Parameters: 16790000\n    File Size: 67346327\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnext26_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 26\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L448\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26_32x4d-65ebdb501.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.11%\n      Top 5 Accuracy: 93.31%\n- Name: legacy_seresnext50_32x4d\n  In Collection: Legacy SE ResNeXt\n  Metadata:\n    FLOPs: 5459954352\n    Parameters: 27560000\n    File Size: 110559176\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_seresnext50_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L455\n  Weights: http://data.lip6.fr/cadene/pretrainedmodels/se_resnext50_32x4d-a260b3a4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.08%\n      Top 5 Accuracy: 94.43%\n-->"
  },
  {
    "path": "hfdocs/source/models/legacy-senet.mdx",
    "content": "# (Legacy) SENet\n\nA **SENet** is a convolutional neural network architecture that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\nThe weights from this model were ported from Gluon.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('legacy_senet154', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `legacy_senet154`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('legacy_senet154', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Legacy SENet\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: legacy_senet154\n  In Collection: Legacy SENet\n  Metadata:\n    FLOPs: 26659556016\n    Parameters: 115090000\n    File Size: 461488402\n    Architecture:\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: legacy_senet154\n    LR: 0.6\n    Epochs: 100\n    Layers: 154\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/senet.py#L440\n  Weights: http://data.lip6.fr/cadene/pretrainedmodels/senet154-c7b49a05.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.33%\n      Top 5 Accuracy: 95.51%\n-->"
  },
  {
    "path": "hfdocs/source/models/mixnet.mdx",
    "content": "# MixNet\n\n**MixNet** is a type of convolutional neural network discovered via AutoML that utilises [MixConvs](https://paperswithcode.com/method/mixconv) instead of regular [depthwise convolutions](https://paperswithcode.com/method/depthwise-convolution).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('mixnet_l', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `mixnet_l`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('mixnet_l', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2019mixconv,\n      title={MixConv: Mixed Depthwise Convolutional Kernels}, \n      author={Mingxing Tan and Quoc V. Le},\n      year={2019},\n      eprint={1907.09595},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: MixNet\n  Paper:\n    Title: 'MixConv: Mixed Depthwise Convolutional Kernels'\n    URL: https://paperswithcode.com/paper/mixnet-mixed-depthwise-convolutional-kernels\nModels:\n- Name: mixnet_l\n  In Collection: MixNet\n  Metadata:\n    FLOPs: 738671316\n    Parameters: 7330000\n    File Size: 29608232\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: mixnet_l\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1669\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_l-5a9a2ed8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.98%\n      Top 5 Accuracy: 94.18%\n- Name: mixnet_m\n  In Collection: MixNet\n  Metadata:\n    FLOPs: 454543374\n    Parameters: 5010000\n    File Size: 20298347\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: mixnet_m\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1660\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_m-4647fc68.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.27%\n      Top 5 Accuracy: 93.42%\n- Name: mixnet_s\n  In Collection: MixNet\n  Metadata:\n    FLOPs: 321264910\n    Parameters: 4130000\n    File Size: 16727982\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: mixnet_s\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1651\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_s-a907afbc.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.99%\n      Top 5 Accuracy: 92.79%\n- Name: mixnet_xl\n  In Collection: MixNet\n  Metadata:\n    FLOPs: 1195880424\n    Parameters: 11900000\n    File Size: 48001170\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: mixnet_xl\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1678\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_xl_ra-aac3c00c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.47%\n      Top 5 Accuracy: 94.93%\n-->"
  },
  {
    "path": "hfdocs/source/models/mnasnet.mdx",
    "content": "# MnasNet\n\n**MnasNet** is a type of convolutional neural network optimized for mobile devices that is discovered through mobile neural architecture search, which explicitly incorporates model latency into the main objective so that the search can identify a model that achieves a good trade-off between accuracy and latency. The main building block is an [inverted residual block](https://paperswithcode.com/method/inverted-residual-block) (from [MobileNetV2](https://paperswithcode.com/method/mobilenetv2)).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('mnasnet_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `mnasnet_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('mnasnet_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2019mnasnet,\n      title={MnasNet: Platform-Aware Neural Architecture Search for Mobile}, \n      author={Mingxing Tan and Bo Chen and Ruoming Pang and Vijay Vasudevan and Mark Sandler and Andrew Howard and Quoc V. Le},\n      year={2019},\n      eprint={1807.11626},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: MNASNet\n  Paper:\n    Title: 'MnasNet: Platform-Aware Neural Architecture Search for Mobile'\n    URL: https://paperswithcode.com/paper/mnasnet-platform-aware-neural-architecture\nModels:\n- Name: mnasnet_100\n  In Collection: MNASNet\n  Metadata:\n    FLOPs: 416415488\n    Parameters: 4380000\n    File Size: 17731774\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: mnasnet_100\n    Layers: 100\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4000\n    Image Size: '224'\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L894\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_b1-74cb7081.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.67%\n      Top 5 Accuracy: 92.1%\n- Name: semnasnet_100\n  In Collection: MNASNet\n  Metadata:\n    FLOPs: 414570766\n    Parameters: 3890000\n    File Size: 15731489\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: semnasnet_100\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L928\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_a1-d9418771.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.45%\n      Top 5 Accuracy: 92.61%\n-->"
  },
  {
    "path": "hfdocs/source/models/mobilenet-v2.mdx",
    "content": "# MobileNet v2\n\n**MobileNetV2** is a convolutional neural network architecture that seeks to perform well on mobile devices. It is based on an [inverted residual structure](https://paperswithcode.com/method/inverted-residual-block) where the residual connections are between the bottleneck layers.  The intermediate expansion layer uses lightweight depthwise convolutions to filter features as a source of non-linearity. As a whole, the architecture of MobileNetV2 contains the initial fully convolution layer with 32 filters, followed by 19 residual bottleneck layers.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('mobilenetv2_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `mobilenetv2_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('mobilenetv2_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1801-04381,\n  author    = {Mark Sandler and\n               Andrew G. Howard and\n               Menglong Zhu and\n               Andrey Zhmoginov and\n               Liang{-}Chieh Chen},\n  title     = {Inverted Residuals and Linear Bottlenecks: Mobile Networks for Classification,\n               Detection and Segmentation},\n  journal   = {CoRR},\n  volume    = {abs/1801.04381},\n  year      = {2018},\n  url       = {http://arxiv.org/abs/1801.04381},\n  archivePrefix = {arXiv},\n  eprint    = {1801.04381},\n  timestamp = {Tue, 12 Jan 2021 15:30:06 +0100},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1801-04381.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: MobileNet V2\n  Paper:\n    Title: 'MobileNetV2: Inverted Residuals and Linear Bottlenecks'\n    URL: https://paperswithcode.com/paper/mobilenetv2-inverted-residuals-and-linear\nModels:\n- Name: mobilenetv2_100\n  In Collection: MobileNet V2\n  Metadata:\n    FLOPs: 401920448\n    Parameters: 3500000\n    File Size: 14202571\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU6\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: mobilenetv2_100\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L955\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_100_ra-b33bc2c4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.95%\n      Top 5 Accuracy: 91.0%\n- Name: mobilenetv2_110d\n  In Collection: MobileNet V2\n  Metadata:\n    FLOPs: 573958832\n    Parameters: 4520000\n    File Size: 18316431\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU6\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: mobilenetv2_110d\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L969\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_110d_ra-77090ade.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.05%\n      Top 5 Accuracy: 92.19%\n- Name: mobilenetv2_120d\n  In Collection: MobileNet V2\n  Metadata:\n    FLOPs: 888510048\n    Parameters: 5830000\n    File Size: 23651121\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU6\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: mobilenetv2_120d\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L977\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_120d_ra-5987e2ed.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.28%\n      Top 5 Accuracy: 93.51%\n- Name: mobilenetv2_140\n  In Collection: MobileNet V2\n  Metadata:\n    FLOPs: 770196784\n    Parameters: 6110000\n    File Size: 24673555\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - Inverted Residual Block\n    - Max Pooling\n    - ReLU6\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: mobilenetv2_140\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L962\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_140_ra-21a4e913.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.51%\n      Top 5 Accuracy: 93.0%\n-->"
  },
  {
    "path": "hfdocs/source/models/mobilenet-v3.mdx",
    "content": "# MobileNet v3\n\n**MobileNetV3** is a convolutional neural network that is designed for mobile phone CPUs. The network design includes the use of a [hard swish activation](https://paperswithcode.com/method/hard-swish) and [squeeze-and-excitation](https://paperswithcode.com/method/squeeze-and-excitation-block) modules in the [MBConv blocks](https://paperswithcode.com/method/inverted-residual-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `mobilenetv3_large_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1905-02244,\n  author    = {Andrew Howard and\n               Mark Sandler and\n               Grace Chu and\n               Liang{-}Chieh Chen and\n               Bo Chen and\n               Mingxing Tan and\n               Weijun Wang and\n               Yukun Zhu and\n               Ruoming Pang and\n               Vijay Vasudevan and\n               Quoc V. Le and\n               Hartwig Adam},\n  title     = {Searching for MobileNetV3},\n  journal   = {CoRR},\n  volume    = {abs/1905.02244},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1905.02244},\n  archivePrefix = {arXiv},\n  eprint    = {1905.02244},\n  timestamp = {Tue, 12 Jan 2021 15:30:06 +0100},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1905-02244.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: MobileNet V3\n  Paper:\n    Title: Searching for MobileNetV3\n    URL: https://paperswithcode.com/paper/searching-for-mobilenetv3\nModels:\n- Name: mobilenetv3_large_100\n  In Collection: MobileNet V3\n  Metadata:\n    FLOPs: 287193752\n    Parameters: 5480000\n    File Size: 22076443\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x4 TPU Pod\n    ID: mobilenetv3_large_100\n    LR: 0.1\n    Dropout: 0.8\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L363\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.77%\n      Top 5 Accuracy: 92.54%\n- Name: mobilenetv3_rw\n  In Collection: MobileNet V3\n  Metadata:\n    FLOPs: 287190638\n    Parameters: 5480000\n    File Size: 22064048\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x4 TPU Pod\n    ID: mobilenetv3_rw\n    LR: 0.1\n    Dropout: 0.8\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L384\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_100-35495452.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.62%\n      Top 5 Accuracy: 92.71%\n-->"
  },
  {
    "path": "hfdocs/source/models/nasnet.mdx",
    "content": "# NASNet\n\n**NASNet** is a type of convolutional neural network discovered through neural architecture search. The building blocks consist of normal and reduction cells.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('nasnetalarge', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `nasnetalarge`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('nasnetalarge', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{zoph2018learning,\n      title={Learning Transferable Architectures for Scalable Image Recognition},\n      author={Barret Zoph and Vijay Vasudevan and Jonathon Shlens and Quoc V. Le},\n      year={2018},\n      eprint={1707.07012},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: NASNet\n  Paper:\n    Title: Learning Transferable Architectures for Scalable Image Recognition\n    URL: https://paperswithcode.com/paper/learning-transferable-architectures-for\nModels:\n- Name: nasnetalarge\n  In Collection: NASNet\n  Metadata:\n    FLOPs: 30242402862\n    Parameters: 88750000\n    File Size: 356056626\n    Architecture:\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 50x Tesla K40 GPUs\n    ID: nasnetalarge\n    Dropout: 0.5\n    Crop Pct: '0.911'\n    Momentum: 0.9\n    Image Size: '331'\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n    RMSProp \\\\( \\epsilon \\\\): 1.0\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/nasnet.py#L562\n  Weights: http://data.lip6.fr/cadene/pretrainedmodels/nasnetalarge-a1897284.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.63%\n      Top 5 Accuracy: 96.05%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/noisy-student.mdx",
    "content": "# Noisy Student (EfficientNet)\n\n**Noisy Student Training** is a semi-supervised learning approach. It extends the idea of self-training\nand distillation with the use of equal-or-larger student models and noise added to the student during learning. It has three main steps: \n\n1. train a teacher model on labeled images\n2. use the teacher to generate pseudo labels on unlabeled images\n3. train a student model on the combination of labeled images and pseudo labeled images. \n\nThe algorithm is iterated a few times by treating the student as a teacher to relabel the unlabeled data and training a new student.\n\nNoisy Student Training seeks to improve on self-training and distillation in two ways. First, it makes the student larger than, or at least equal to, the teacher so the student can better learn from a larger dataset. Second, it adds noise to the student so the noised student is forced to learn harder from the pseudo labels. To noise the student, it uses input noise such as RandAugment data augmentation, and model noise such as dropout and stochastic depth during training.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_efficientnet_b0_ns', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_efficientnet_b0_ns`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_efficientnet_b0_ns', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{xie2020selftraining,\n      title={Self-training with Noisy Student improves ImageNet classification}, \n      author={Qizhe Xie and Minh-Thang Luong and Eduard Hovy and Quoc V. Le},\n      year={2020},\n      eprint={1911.04252},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Noisy Student\n  Paper:\n    Title: Self-training with Noisy Student improves ImageNet classification\n    URL: https://paperswithcode.com/paper/self-training-with-noisy-student-improves\nModels:\n- Name: tf_efficientnet_b0_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 488688572\n    Parameters: 5290000\n    File Size: 21386709\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b0_ns\n    LR: 0.128\n    Epochs: 700\n    Dropout: 0.5\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1427\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ns-c0e6a31c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.66%\n      Top 5 Accuracy: 94.37%\n- Name: tf_efficientnet_b1_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 883633200\n    Parameters: 7790000\n    File Size: 31516408\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b1_ns\n    LR: 0.128\n    Epochs: 700\n    Dropout: 0.5\n    Crop Pct: '0.882'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '240'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1437\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ns-99dd0c41.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.39%\n      Top 5 Accuracy: 95.74%\n- Name: tf_efficientnet_b2_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 1234321170\n    Parameters: 9110000\n    File Size: 36801803\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b2_ns\n    LR: 0.128\n    Epochs: 700\n    Dropout: 0.5\n    Crop Pct: '0.89'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '260'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1447\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ns-00306e48.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.39%\n      Top 5 Accuracy: 96.24%\n- Name: tf_efficientnet_b3_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 2275247568\n    Parameters: 12230000\n    File Size: 49385734\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b3_ns\n    LR: 0.128\n    Epochs: 700\n    Dropout: 0.5\n    Crop Pct: '0.904'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '300'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1457\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ns-9d44bf68.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.04%\n      Top 5 Accuracy: 96.91%\n- Name: tf_efficientnet_b4_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 5749638672\n    Parameters: 19340000\n    File Size: 77995057\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b4_ns\n    LR: 0.128\n    Epochs: 700\n    Dropout: 0.5\n    Crop Pct: '0.922'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '380'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1467\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ns-d6313a46.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.15%\n      Top 5 Accuracy: 97.47%\n- Name: tf_efficientnet_b5_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 13176501888\n    Parameters: 30390000\n    File Size: 122404944\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b5_ns\n    LR: 0.128\n    Epochs: 350\n    Dropout: 0.5\n    Crop Pct: '0.934'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '456'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1477\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ns-6f26d0cf.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 86.08%\n      Top 5 Accuracy: 97.75%\n- Name: tf_efficientnet_b6_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 24180518488\n    Parameters: 43040000\n    File Size: 173239537\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b6_ns\n    LR: 0.128\n    Epochs: 350\n    Dropout: 0.5\n    Crop Pct: '0.942'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '528'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1487\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ns-51548356.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 86.45%\n      Top 5 Accuracy: 97.88%\n- Name: tf_efficientnet_b7_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 48205304880\n    Parameters: 66349999\n    File Size: 266853140\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    ID: tf_efficientnet_b7_ns\n    LR: 0.128\n    Epochs: 350\n    Dropout: 0.5\n    Crop Pct: '0.949'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '600'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1498\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ns-1dbc32de.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 86.83%\n      Top 5 Accuracy: 98.08%\n- Name: tf_efficientnet_l2_ns\n  In Collection: Noisy Student\n  Metadata:\n    FLOPs: 611646113804\n    Parameters: 480310000\n    File Size: 1925950424\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: Cloud TPU v3 Pod\n    Training Time: 6 days\n    ID: tf_efficientnet_l2_ns\n    LR: 0.128\n    Epochs: 350\n    Dropout: 0.5\n    Crop Pct: '0.96'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '800'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1520\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns-df73bb44.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 88.35%\n      Top 5 Accuracy: 98.66%\n-->"
  },
  {
    "path": "hfdocs/source/models/pnasnet.mdx",
    "content": "# PNASNet\n\n**Progressive Neural Architecture Search**, or **PNAS**, is a method for learning the structure of convolutional neural networks (CNNs). It uses a sequential model-based optimization (SMBO) strategy, where we search the space of cell structures, starting with simple (shallow) models and progressing to complex ones, pruning out unpromising structures as we go. \n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('pnasnet5large', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `pnasnet5large`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('pnasnet5large', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{liu2018progressive,\n      title={Progressive Neural Architecture Search}, \n      author={Chenxi Liu and Barret Zoph and Maxim Neumann and Jonathon Shlens and Wei Hua and Li-Jia Li and Li Fei-Fei and Alan Yuille and Jonathan Huang and Kevin Murphy},\n      year={2018},\n      eprint={1712.00559},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: PNASNet\n  Paper:\n    Title: Progressive Neural Architecture Search\n    URL: https://paperswithcode.com/paper/progressive-neural-architecture-search\nModels:\n- Name: pnasnet5large\n  In Collection: PNASNet\n  Metadata:\n    FLOPs: 31458865950\n    Parameters: 86060000\n    File Size: 345153926\n    Architecture:\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 100x NVIDIA P100 GPUs\n    ID: pnasnet5large\n    LR: 0.015\n    Dropout: 0.5\n    Crop Pct: '0.911'\n    Momentum: 0.9\n    Batch Size: 1600\n    Image Size: '331'\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/pnasnet.py#L343\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/pnasnet5large-bf079911.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 0.98%\n      Top 5 Accuracy: 18.58%\n-->"
  },
  {
    "path": "hfdocs/source/models/regnetx.mdx",
    "content": "# RegNetX\n\n**RegNetX** is a convolutional network design space with simple, regular models with parameters: depth \\\\( d \\\\), initial width \\\\( w_{0} > 0 \\\\), and slope \\\\( w_{a} > 0 \\\\), and generates a different block width \\\\( u_{j} \\\\) for each block \\\\( j < d \\\\). The key restriction for the RegNet types of model is that there is a linear parameterisation of block widths (the design space only contains models with this linear structure):\n\n\\\\( u_{j} = w_{0} + w_{a}\\cdot{j} \\\\)\n\nFor **RegNetX** we have additional restrictions: we set \\\\( b = 1 \\\\) (the bottleneck ratio), \\\\( 12 \\leq d \\leq 28 \\\\), and \\\\( w_{m} \\geq 2 \\\\) (the width multiplier).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('regnetx_002', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `regnetx_002`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('regnetx_002', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{radosavovic2020designing,\n      title={Designing Network Design Spaces},\n      author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár},\n      year={2020},\n      eprint={2003.13678},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: RegNetX\n  Paper:\n    Title: Designing Network Design Spaces\n    URL: https://paperswithcode.com/paper/designing-network-design-spaces\nModels:\n- Name: regnetx_002\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 255276032\n    Parameters: 2680000\n    File Size: 10862199\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_002\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L337\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_002-e7e85e5c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 68.75%\n      Top 5 Accuracy: 88.56%\n- Name: regnetx_004\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 510619136\n    Parameters: 5160000\n    File Size: 20841309\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_004\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L343\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_004-7d0e9424.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.39%\n      Top 5 Accuracy: 90.82%\n- Name: regnetx_006\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 771659136\n    Parameters: 6200000\n    File Size: 24965172\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_006\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L349\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_006-85ec1baa.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 73.84%\n      Top 5 Accuracy: 91.68%\n- Name: regnetx_008\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 1027038208\n    Parameters: 7260000\n    File Size: 29235944\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_008\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L355\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_008-d8b470eb.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.05%\n      Top 5 Accuracy: 92.34%\n- Name: regnetx_016\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 2059337856\n    Parameters: 9190000\n    File Size: 36988158\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_016\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L361\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_016-65ca972a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.95%\n      Top 5 Accuracy: 93.43%\n- Name: regnetx_032\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 4082555904\n    Parameters: 15300000\n    File Size: 61509573\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_032\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L367\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_032-ed0c7f7e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.15%\n      Top 5 Accuracy: 94.09%\n- Name: regnetx_040\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 5095167744\n    Parameters: 22120000\n    File Size: 88844824\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_040\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L373\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_040-73c2a654.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.48%\n      Top 5 Accuracy: 94.25%\n- Name: regnetx_064\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 8303405824\n    Parameters: 26210000\n    File Size: 105184854\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_064\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L379\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_064-29278baa.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.06%\n      Top 5 Accuracy: 94.47%\n- Name: regnetx_080\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 10276726784\n    Parameters: 39570000\n    File Size: 158720042\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_080\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L385\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_080-7c7fcab1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.21%\n      Top 5 Accuracy: 94.55%\n- Name: regnetx_120\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 15536378368\n    Parameters: 46110000\n    File Size: 184866342\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_120\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L391\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_120-65d5521e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.61%\n      Top 5 Accuracy: 94.73%\n- Name: regnetx_160\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 20491740672\n    Parameters: 54280000\n    File Size: 217623862\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_160\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L397\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_160-c98c4112.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.84%\n      Top 5 Accuracy: 94.82%\n- Name: regnetx_320\n  In Collection: RegNetX\n  Metadata:\n    FLOPs: 40798958592\n    Parameters: 107810000\n    File Size: 431962133\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnetx_320\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L403\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_320-8ea38b93.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.25%\n      Top 5 Accuracy: 95.03%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/regnety.mdx",
    "content": "# RegNetY\n\n**RegNetY** is a convolutional network design space with simple, regular models with parameters: depth \\\\( d \\\\), initial width \\\\( w_{0} > 0 \\\\), and slope \\\\( w_{a} > 0 \\\\), and generates a different block width \\\\( u_{j} \\\\) for each block \\\\( j < d \\\\). The key restriction for the RegNet types of model is that there is a linear parameterisation of block widths (the design space only contains models with this linear structure):\n\n\\\\( u_{j} = w_{0} + w_{a}\\cdot{j} \\\\)\n\nFor **RegNetX** authors have additional restrictions: we set \\\\( b = 1 \\\\) (the bottleneck ratio), \\\\( 12 \\leq d \\leq 28 \\\\), and \\\\( w_{m} \\geq 2 \\\\) (the width multiplier).\n\nFor **RegNetY** authors make one change, which is to include [Squeeze-and-Excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('regnety_002', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `regnety_002`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('regnety_002', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{radosavovic2020designing,\n      title={Designing Network Design Spaces},\n      author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár},\n      year={2020},\n      eprint={2003.13678},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: RegNetY\n  Paper:\n    Title: Designing Network Design Spaces\n    URL: https://paperswithcode.com/paper/designing-network-design-spaces\nModels:\n- Name: regnety_002\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 255754236\n    Parameters: 3160000\n    File Size: 12782926\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_002\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L409\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_002-e68ca334.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 70.28%\n      Top 5 Accuracy: 89.55%\n- Name: regnety_004\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 515664568\n    Parameters: 4340000\n    File Size: 17542753\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_004\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L415\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_004-0db870e6.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.02%\n      Top 5 Accuracy: 91.76%\n- Name: regnety_006\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 771746928\n    Parameters: 6060000\n    File Size: 24394127\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_006\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L421\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_006-c67e57ec.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.27%\n      Top 5 Accuracy: 92.53%\n- Name: regnety_008\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 1023448952\n    Parameters: 6260000\n    File Size: 25223268\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_008\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L427\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_008-dc900dbe.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.32%\n      Top 5 Accuracy: 93.07%\n- Name: regnety_016\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 2070895094\n    Parameters: 11200000\n    File Size: 45115589\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_016\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L433\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_016-54367f74.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.87%\n      Top 5 Accuracy: 93.73%\n- Name: regnety_032\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 4081118714\n    Parameters: 19440000\n    File Size: 78084523\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_032\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L439\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/regnety_032_ra-7f2439f9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.01%\n      Top 5 Accuracy: 95.91%\n- Name: regnety_040\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 5105933432\n    Parameters: 20650000\n    File Size: 82913909\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_040\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L445\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_040-f0d569f9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.23%\n      Top 5 Accuracy: 94.64%\n- Name: regnety_064\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 8167730444\n    Parameters: 30580000\n    File Size: 122751416\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_064\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L451\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_064-0a48325c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.73%\n      Top 5 Accuracy: 94.76%\n- Name: regnety_080\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 10233621420\n    Parameters: 39180000\n    File Size: 157124671\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_080\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L457\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_080-e7f3eb93.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.87%\n      Top 5 Accuracy: 94.83%\n- Name: regnety_120\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 15542094856\n    Parameters: 51820000\n    File Size: 207743949\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_120\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L463\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_120-721ba79a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.38%\n      Top 5 Accuracy: 95.12%\n- Name: regnety_160\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 20450196852\n    Parameters: 83590000\n    File Size: 334916722\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_160\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L469\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_160-d64013cd.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.28%\n      Top 5 Accuracy: 94.97%\n- Name: regnety_320\n  In Collection: RegNetY\n  Metadata:\n    FLOPs: 41492618394\n    Parameters: 145050000\n    File Size: 580891965\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - ReLU\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA V100 GPUs\n    ID: regnety_320\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 5.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/regnet.py#L475\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_320-ba464b29.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.8%\n      Top 5 Accuracy: 95.25%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/res2net.mdx",
    "content": "# Res2Net\n\n**Res2Net** is an image model that employs a variation on bottleneck residual blocks, [Res2Net Blocks](https://paperswithcode.com/method/res2net-block). The motivation is to be able to represent features at multiple scales. This is achieved through a novel building block for CNNs that constructs hierarchical residual-like connections within one single residual block. This represents multi-scale features at a granular level and increases the range of receptive fields for each network layer.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('res2net101_26w_4s', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `res2net101_26w_4s`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('res2net101_26w_4s', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{Gao_2021,\n   title={Res2Net: A New Multi-Scale Backbone Architecture},\n   volume={43},\n   ISSN={1939-3539},\n   url={http://dx.doi.org/10.1109/TPAMI.2019.2938758},\n   DOI={10.1109/tpami.2019.2938758},\n   number={2},\n   journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},\n   publisher={Institute of Electrical and Electronics Engineers (IEEE)},\n   author={Gao, Shang-Hua and Cheng, Ming-Ming and Zhao, Kai and Zhang, Xin-Yu and Yang, Ming-Hsuan and Torr, Philip},\n   year={2021},\n   month={Feb},\n   pages={652–662}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Res2Net\n  Paper:\n    Title: 'Res2Net: A New Multi-scale Backbone Architecture'\n    URL: https://paperswithcode.com/paper/res2net-a-new-multi-scale-backbone\nModels:\n- Name: res2net101_26w_4s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 10415881200\n    Parameters: 45210000\n    File Size: 181456059\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net101_26w_4s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L152\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net101_26w_4s-02a759a1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.19%\n      Top 5 Accuracy: 94.43%\n- Name: res2net50_14w_8s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 5403546768\n    Parameters: 25060000\n    File Size: 100638543\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net50_14w_8s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L196\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_14w_8s-6527dddc.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.14%\n      Top 5 Accuracy: 93.86%\n- Name: res2net50_26w_4s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 5499974064\n    Parameters: 25700000\n    File Size: 103110087\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net50_26w_4s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L141\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_4s-06e79181.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.99%\n      Top 5 Accuracy: 93.85%\n- Name: res2net50_26w_6s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 8130156528\n    Parameters: 37050000\n    File Size: 148603239\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net50_26w_6s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L163\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_6s-19041792.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.57%\n      Top 5 Accuracy: 94.12%\n- Name: res2net50_26w_8s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 10760338992\n    Parameters: 48400000\n    File Size: 194085165\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net50_26w_8s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L174\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_8s-2c7c9f12.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.19%\n      Top 5 Accuracy: 94.37%\n- Name: res2net50_48w_2s\n  In Collection: Res2Net\n  Metadata:\n    FLOPs: 5375291520\n    Parameters: 25290000\n    File Size: 101421406\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2Net Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2net50_48w_2s\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L185\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_48w_2s-afed724a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.53%\n      Top 5 Accuracy: 93.56%\n-->"
  },
  {
    "path": "hfdocs/source/models/res2next.mdx",
    "content": "# Res2NeXt\n\n**Res2NeXt** is an image model that employs a variation on [ResNeXt](https://paperswithcode.com/method/resnext) bottleneck residual blocks. The motivation is to be able to represent features at multiple scales. This is achieved through a novel building block for CNNs that constructs hierarchical residual-like connections within one single residual block. This represents multi-scale features at a granular level and increases the range of receptive fields for each network layer.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('res2next50', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `res2next50`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('res2next50', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{Gao_2021,\n   title={Res2Net: A New Multi-Scale Backbone Architecture},\n   volume={43},\n   ISSN={1939-3539},\n   url={http://dx.doi.org/10.1109/TPAMI.2019.2938758},\n   DOI={10.1109/tpami.2019.2938758},\n   number={2},\n   journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},\n   publisher={Institute of Electrical and Electronics Engineers (IEEE)},\n   author={Gao, Shang-Hua and Cheng, Ming-Ming and Zhao, Kai and Zhang, Xin-Yu and Yang, Ming-Hsuan and Torr, Philip},\n   year={2021},\n   month={Feb},\n   pages={652–662}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Res2NeXt\n  Paper:\n    Title: 'Res2Net: A New Multi-scale Backbone Architecture'\n    URL: https://paperswithcode.com/paper/res2net-a-new-multi-scale-backbone\nModels:\n- Name: res2next50\n  In Collection: Res2NeXt\n  Metadata:\n    FLOPs: 5396798208\n    Parameters: 24670000\n    File Size: 99019592\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - ReLU\n    - Res2NeXt Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x Titan Xp GPUs\n    ID: res2next50\n    LR: 0.1\n    Epochs: 100\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/res2net.py#L207\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2next50_4s-6ef7e7bf.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.24%\n      Top 5 Accuracy: 93.91%\n-->"
  },
  {
    "path": "hfdocs/source/models/resnest.mdx",
    "content": "# ResNeSt\n\nA **ResNeSt** is a variant on a [ResNet](https://paperswithcode.com/method/resnet), which instead stacks [Split-Attention blocks](https://paperswithcode.com/method/split-attention). The cardinal group representations are then concatenated along the channel dimension: \\\\( V = \\text{Concat} \\{ V^{1},V^{2},\\cdots,{V}^{K} \\} \\\\). As in standard residual blocks, the final output \\\\( Y \\\\) of otheur Split-Attention block is produced using a shortcut connection: \\\\( Y=V+X \\\\), if the input and output feature-map share the same shape.  For blocks with a stride, an appropriate transformation \\\\( \\mathcal{T} \\\\) is applied to the shortcut connection to align the output shapes:  \\\\( Y=V+\\mathcal{T}(X) \\\\). For example, \\\\( \\mathcal{T} \\\\) can be strided convolution or combined convolution-with-pooling.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnest101e', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `resnest101e`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('resnest101e', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{zhang2020resnest,\n      title={ResNeSt: Split-Attention Networks},\n      author={Hang Zhang and Chongruo Wu and Zhongyue Zhang and Yi Zhu and Haibin Lin and Zhi Zhang and Yue Sun and Tong He and Jonas Mueller and R. Manmatha and Mu Li and Alexander Smola},\n      year={2020},\n      eprint={2004.08955},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ResNeSt\n  Paper:\n    Title: 'ResNeSt: Split-Attention Networks'\n    URL: https://paperswithcode.com/paper/resnest-split-attention-networks\nModels:\n- Name: resnest101e\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 17423183648\n    Parameters: 48280000\n    File Size: 193782911\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest101e\n    LR: 0.1\n    Epochs: 270\n    Layers: 101\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '256'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L182\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest101-22405ba7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.88%\n      Top 5 Accuracy: 96.31%\n- Name: resnest14d\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 3548594464\n    Parameters: 10610000\n    File Size: 42562639\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest14d\n    LR: 0.1\n    Epochs: 270\n    Layers: 14\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8192\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L148\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_resnest14-9c8fe254.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.51%\n      Top 5 Accuracy: 92.52%\n- Name: resnest200e\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 45954387872\n    Parameters: 70200000\n    File Size: 193782911\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest200e\n    LR: 0.1\n    Epochs: 270\n    Layers: 200\n    Dropout: 0.2\n    Crop Pct: '0.909'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '320'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L194\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest101-22405ba7.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.85%\n      Top 5 Accuracy: 96.89%\n- Name: resnest269e\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 100830307104\n    Parameters: 110930000\n    File Size: 445402691\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest269e\n    LR: 0.1\n    Epochs: 270\n    Layers: 269\n    Dropout: 0.2\n    Crop Pct: '0.928'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '416'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L206\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest269-0cc87c48.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.53%\n      Top 5 Accuracy: 96.99%\n- Name: resnest26d\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 4678918720\n    Parameters: 17070000\n    File Size: 68470242\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest26d\n    LR: 0.1\n    Epochs: 270\n    Layers: 26\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8192\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L159\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_resnest26-50eb607c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.48%\n      Top 5 Accuracy: 94.3%\n- Name: resnest50d\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 6937106336\n    Parameters: 27480000\n    File Size: 110273258\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest50d\n    LR: 0.1\n    Epochs: 270\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8192\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L170\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50-528c19ca.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.96%\n      Top 5 Accuracy: 95.38%\n- Name: resnest50d_1s4x24d\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 5686764544\n    Parameters: 25680000\n    File Size: 103045531\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest50d_1s4x24d\n    LR: 0.1\n    Epochs: 270\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8192\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L229\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50_fast_1s4x24d-d4a4f76f.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.0%\n      Top 5 Accuracy: 95.33%\n- Name: resnest50d_4s2x40d\n  In Collection: ResNeSt\n  Metadata:\n    FLOPs: 5657064720\n    Parameters: 30420000\n    File Size: 122133282\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Split Attention\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - DropBlock\n    - Label Smoothing\n    - Mixup\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 64x NVIDIA V100 GPUs\n    ID: resnest50d_4s2x40d\n    LR: 0.1\n    Epochs: 270\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 8192\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnest.py#L218\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50_fast_4s2x40d-41d14ed0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.11%\n      Top 5 Accuracy: 95.55%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/resnet-d.mdx",
    "content": "# ResNet-D\n\n**ResNet-D** is a modification on the [ResNet](https://paperswithcode.com/method/resnet) architecture that utilises an [average pooling](https://paperswithcode.com/method/average-pooling) tweak for downsampling. The motivation is that in the unmodified ResNet, the [1×1 convolution](https://paperswithcode.com/method/1x1-convolution) for the downsampling block ignores 3/4 of input feature maps, so this is modified so no information will be ignored\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnet101d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `resnet101d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('resnet101d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{he2018bag,\n      title={Bag of Tricks for Image Classification with Convolutional Neural Networks}, \n      author={Tong He and Zhi Zhang and Hang Zhang and Zhongyue Zhang and Junyuan Xie and Mu Li},\n      year={2018},\n      eprint={1812.01187},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ResNet-D\n  Paper:\n    Title: Bag of Tricks for Image Classification with Convolutional Neural Networks\n    URL: https://paperswithcode.com/paper/bag-of-tricks-for-image-classification-with\nModels:\n- Name: resnet101d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 13805639680\n    Parameters: 44570000\n    File Size: 178791263\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet101d\n    Crop Pct: '0.94'\n    Image Size: '256'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L716\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet101d_ra2-2803ffab.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.31%\n      Top 5 Accuracy: 96.06%\n- Name: resnet152d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 20155275264\n    Parameters: 60210000\n    File Size: 241596837\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet152d\n    Crop Pct: '0.94'\n    Image Size: '256'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L724\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet152d_ra2-5cac0439.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.13%\n      Top 5 Accuracy: 96.35%\n- Name: resnet18d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 2645205760\n    Parameters: 11710000\n    File Size: 46893231\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet18d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L649\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet18d_ra2-48a79e06.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.27%\n      Top 5 Accuracy: 90.69%\n- Name: resnet200d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 26034378752\n    Parameters: 64690000\n    File Size: 259662933\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet200d\n    Crop Pct: '0.94'\n    Image Size: '256'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L749\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet200d_ra2-bdba9bf9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.24%\n      Top 5 Accuracy: 96.49%\n- Name: resnet26d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 3335276032\n    Parameters: 16010000\n    File Size: 64209122\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet26d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L683\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet26d-69e92c46.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.69%\n      Top 5 Accuracy: 93.15%\n- Name: resnet34d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 5026601728\n    Parameters: 21820000\n    File Size: 87369807\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet34d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L666\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet34d_ra2-f8dcfcaf.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.11%\n      Top 5 Accuracy: 93.38%\n- Name: resnet50d\n  In Collection: ResNet-D\n  Metadata:\n    FLOPs: 5591002624\n    Parameters: 25580000\n    File Size: 102567109\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet50d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L699\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet50d_ra2-464e36ba.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.55%\n      Top 5 Accuracy: 95.16%\n-->"
  },
  {
    "path": "hfdocs/source/models/resnet.mdx",
    "content": "# ResNet\n\n**Residual Networks**, or **ResNets**, learn residual functions with reference to the layer inputs, instead of learning unreferenced functions. Instead of hoping each few stacked layers directly fit a desired underlying mapping, residual nets let these layers fit a residual mapping. They stack [residual blocks](https://paperswithcode.com/method/residual-block) ontop of each other to form network: e.g. a ResNet-50 has fifty layers using these blocks. \n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnet18', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `resnet18`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('resnet18', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/HeZRS15,\n  author    = {Kaiming He and\n               Xiangyu Zhang and\n               Shaoqing Ren and\n               Jian Sun},\n  title     = {Deep Residual Learning for Image Recognition},\n  journal   = {CoRR},\n  volume    = {abs/1512.03385},\n  year      = {2015},\n  url       = {http://arxiv.org/abs/1512.03385},\n  archivePrefix = {arXiv},\n  eprint    = {1512.03385},\n  timestamp = {Wed, 17 Apr 2019 17:23:45 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/HeZRS15.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ResNet\n  Paper:\n    Title: Deep Residual Learning for Image Recognition\n    URL: https://paperswithcode.com/paper/deep-residual-learning-for-image-recognition\nModels:\n- Name: resnet18\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 2337073152\n    Parameters: 11690000\n    File Size: 46827520\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet18\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L641\n  Weights: https://download.pytorch.org/models/resnet18-5c106cde.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 69.74%\n      Top 5 Accuracy: 89.09%\n- Name: resnet26\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 3026804736\n    Parameters: 16000000\n    File Size: 64129972\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet26\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L675\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet26-9aa10e23.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.29%\n      Top 5 Accuracy: 92.57%\n- Name: resnet34\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 4718469120\n    Parameters: 21800000\n    File Size: 87290831\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet34\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L658\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet34-43635321.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.11%\n      Top 5 Accuracy: 92.28%\n- Name: resnet50\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 5282531328\n    Parameters: 25560000\n    File Size: 102488165\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnet50\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L691\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet50_ram-a26f946b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.04%\n      Top 5 Accuracy: 94.39%\n- Name: resnetblur50\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 6621606912\n    Parameters: 25560000\n    File Size: 102488165\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Blur Pooling\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnetblur50\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/resnet.py#L1160\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnetblur50-84f4748f.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.29%\n      Top 5 Accuracy: 94.64%\n- Name: tv_resnet101\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 10068547584\n    Parameters: 44550000\n    File Size: 178728960\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_resnet101\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L761\n  Weights: https://download.pytorch.org/models/resnet101-5d3b4d8f.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.37%\n      Top 5 Accuracy: 93.56%\n- Name: tv_resnet152\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 14857660416\n    Parameters: 60190000\n    File Size: 241530880\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_resnet152\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L769\n  Weights: https://download.pytorch.org/models/resnet152-b121ed2d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.32%\n      Top 5 Accuracy: 94.05%\n- Name: tv_resnet34\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 4718469120\n    Parameters: 21800000\n    File Size: 87306240\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_resnet34\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L745\n  Weights: https://download.pytorch.org/models/resnet34-333f7ec4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 73.3%\n      Top 5 Accuracy: 91.42%\n- Name: tv_resnet50\n  In Collection: ResNet\n  Metadata:\n    FLOPs: 5282531328\n    Parameters: 25560000\n    File Size: 102502400\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_resnet50\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L753\n  Weights: https://download.pytorch.org/models/resnet50-19c8e357.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.16%\n      Top 5 Accuracy: 92.88%\n-->"
  },
  {
    "path": "hfdocs/source/models/resnext.mdx",
    "content": "# ResNeXt\n\nA **ResNeXt** repeats a [building block](https://paperswithcode.com/method/resnext-block) that aggregates a set of transformations with the same topology. Compared to a [ResNet](https://paperswithcode.com/method/resnet), it exposes a new dimension,  *cardinality* (the size of the set of transformations) \\\\( C \\\\), as an essential factor in addition to the dimensions of depth and width.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('resnext101_32x8d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `resnext101_32x8d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('resnext101_32x8d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/XieGDTH16,\n  author    = {Saining Xie and\n               Ross B. Girshick and\n               Piotr Doll{\\'{a}}r and\n               Zhuowen Tu and\n               Kaiming He},\n  title     = {Aggregated Residual Transformations for Deep Neural Networks},\n  journal   = {CoRR},\n  volume    = {abs/1611.05431},\n  year      = {2016},\n  url       = {http://arxiv.org/abs/1611.05431},\n  archivePrefix = {arXiv},\n  eprint    = {1611.05431},\n  timestamp = {Mon, 13 Aug 2018 16:45:58 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/XieGDTH16.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: ResNeXt\n  Paper:\n    Title: Aggregated Residual Transformations for Deep Neural Networks\n    URL: https://paperswithcode.com/paper/aggregated-residual-transformations-for-deep\nModels:\n- Name: resnext101_32x8d\n  In Collection: ResNeXt\n  Metadata:\n    FLOPs: 21180417024\n    Parameters: 88790000\n    File Size: 356082095\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnext101_32x8d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnet.py#L877\n  Weights: https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.3%\n      Top 5 Accuracy: 94.53%\n- Name: resnext50_32x4d\n  In Collection: ResNeXt\n  Metadata:\n    FLOPs: 5472648192\n    Parameters: 25030000\n    File Size: 100435887\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnext50_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnet.py#L851\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnext50_32x4d_ra-d733960d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.79%\n      Top 5 Accuracy: 94.61%\n- Name: resnext50d_32x4d\n  In Collection: ResNeXt\n  Metadata:\n    FLOPs: 5781119488\n    Parameters: 25050000\n    File Size: 100515304\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: resnext50d_32x4d\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/resnet.py#L869\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnext50d_32x4d-103e99f8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.67%\n      Top 5 Accuracy: 94.87%\n- Name: tv_resnext50_32x4d\n  In Collection: ResNeXt\n  Metadata:\n    FLOPs: 5472648192\n    Parameters: 25030000\n    File Size: 100441675\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tv_resnext50_32x4d\n    LR: 0.1\n    Epochs: 90\n    Crop Pct: '0.875'\n    LR Gamma: 0.1\n    Momentum: 0.9\n    Batch Size: 32\n    Image Size: '224'\n    LR Step Size: 30\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L842\n  Weights: https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.61%\n      Top 5 Accuracy: 93.68%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/rexnet.mdx",
    "content": "# RexNet\n\n**Rank Expansion Networks** (ReXNets) follow a set of new design principles for designing bottlenecks in image classification models. Authors refine each layer by 1) expanding the input channel size of the convolution layer and 2) replacing the [ReLU6s](https://www.paperswithcode.com/method/relu6).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('rexnet_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `rexnet_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('rexnet_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{han2020rexnet,\n      title={ReXNet: Diminishing Representational Bottleneck on Convolutional Neural Network}, \n      author={Dongyoon Han and Sangdoo Yun and Byeongho Heo and YoungJoon Yoo},\n      year={2020},\n      eprint={2007.00992},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: RexNet\n  Paper:\n    Title: 'ReXNet: Diminishing Representational Bottleneck on Convolutional Neural\n      Network'\n    URL: https://paperswithcode.com/paper/rexnet-diminishing-representational\nModels:\n- Name: rexnet_100\n  In Collection: RexNet\n  Metadata:\n    FLOPs: 509989377\n    Parameters: 4800000\n    File Size: 19417552\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dropout\n    - ReLU6\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Linear Warmup With Cosine Annealing\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: rexnet_100\n    LR: 0.5\n    Epochs: 400\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/rexnet.py#L212\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rexnet/rexnetv1_100-1b4dddf4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.86%\n      Top 5 Accuracy: 93.88%\n- Name: rexnet_130\n  In Collection: RexNet\n  Metadata:\n    FLOPs: 848364461\n    Parameters: 7560000\n    File Size: 30508197\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dropout\n    - ReLU6\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Linear Warmup With Cosine Annealing\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: rexnet_130\n    LR: 0.5\n    Epochs: 400\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/rexnet.py#L218\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rexnet/rexnetv1_130-590d768e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.49%\n      Top 5 Accuracy: 94.67%\n- Name: rexnet_150\n  In Collection: RexNet\n  Metadata:\n    FLOPs: 1122374469\n    Parameters: 9730000\n    File Size: 39227315\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dropout\n    - ReLU6\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Linear Warmup With Cosine Annealing\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: rexnet_150\n    LR: 0.5\n    Epochs: 400\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/rexnet.py#L224\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rexnet/rexnetv1_150-bd1a6aa8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.31%\n      Top 5 Accuracy: 95.16%\n- Name: rexnet_200\n  In Collection: RexNet\n  Metadata:\n    FLOPs: 1960224938\n    Parameters: 16370000\n    File Size: 65862221\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dropout\n    - ReLU6\n    - Residual Connection\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - Linear Warmup With Cosine Annealing\n    - Nesterov Accelerated Gradient\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x NVIDIA V100 GPUs\n    ID: rexnet_200\n    LR: 0.5\n    Epochs: 400\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 512\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    Label Smoothing: 0.1\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/rexnet.py#L230\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rexnet/rexnetv1_200-8c0b7f2d.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.63%\n      Top 5 Accuracy: 95.67%\n-->"
  },
  {
    "path": "hfdocs/source/models/se-resnet.mdx",
    "content": "# SE-ResNet\n\n**SE ResNet** is a variant of a [ResNet](https://www.paperswithcode.com/method/resnet) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('seresnet152d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `seresnet152d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('seresnet152d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SE ResNet\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: seresnet152d\n  In Collection: SE ResNet\n  Metadata:\n    FLOPs: 20161904304\n    Parameters: 66840000\n    File Size: 268144497\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: seresnet152d\n    LR: 0.6\n    Epochs: 100\n    Layers: 152\n    Dropout: 0.2\n    Crop Pct: '0.94'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '256'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1206\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet152d_ra2-04464dd2.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.74%\n      Top 5 Accuracy: 96.77%\n- Name: seresnet50\n  In Collection: SE ResNet\n  Metadata:\n    FLOPs: 5285062320\n    Parameters: 28090000\n    File Size: 112621903\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: seresnet50\n    LR: 0.6\n    Epochs: 100\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1180\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet50_ra_224-8efdb4bb.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.26%\n      Top 5 Accuracy: 95.07%\n-->"
  },
  {
    "path": "hfdocs/source/models/selecsls.mdx",
    "content": "# SelecSLS\n\n**SelecSLS** uses novel selective long and short range skip connections to improve the information flow allowing for a drastically faster network without compromising accuracy.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('selecsls42b', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `selecsls42b`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('selecsls42b', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{Mehta_2020,\n   title={XNect},\n   volume={39},\n   ISSN={1557-7368},\n   url={http://dx.doi.org/10.1145/3386569.3392410},\n   DOI={10.1145/3386569.3392410},\n   number={4},\n   journal={ACM Transactions on Graphics},\n   publisher={Association for Computing Machinery (ACM)},\n   author={Mehta, Dushyant and Sotnychenko, Oleksandr and Mueller, Franziska and Xu, Weipeng and Elgharib, Mohamed and Fua, Pascal and Seidel, Hans-Peter and Rhodin, Helge and Pons-Moll, Gerard and Theobalt, Christian},\n   year={2020},\n   month={Jul}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SelecSLS\n  Paper:\n    Title: 'XNect: Real-time Multi-Person 3D Motion Capture with a Single RGB Camera'\n    URL: https://paperswithcode.com/paper/xnect-real-time-multi-person-3d-human-pose\nModels:\n- Name: selecsls42b\n  In Collection: SelecSLS\n  Metadata:\n    FLOPs: 3824022528\n    Parameters: 32460000\n    File Size: 129948954\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - ReLU\n    - SelecSLS Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Cosine Annealing\n    - Random Erasing\n    Training Data:\n    - ImageNet\n    ID: selecsls42b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/selecsls.py#L335\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-selecsls/selecsls42b-8af30141.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.18%\n      Top 5 Accuracy: 93.39%\n- Name: selecsls60\n  In Collection: SelecSLS\n  Metadata:\n    FLOPs: 4610472600\n    Parameters: 30670000\n    File Size: 122839714\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - ReLU\n    - SelecSLS Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Cosine Annealing\n    - Random Erasing\n    Training Data:\n    - ImageNet\n    ID: selecsls60\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/selecsls.py#L342\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-selecsls/selecsls60-bbf87526.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.99%\n      Top 5 Accuracy: 93.83%\n- Name: selecsls60b\n  In Collection: SelecSLS\n  Metadata:\n    FLOPs: 4657653144\n    Parameters: 32770000\n    File Size: 131252898\n    Architecture:\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - ReLU\n    - SelecSLS Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Cosine Annealing\n    - Random Erasing\n    Training Data:\n    - ImageNet\n    ID: selecsls60b\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/b9843f954b0457af2db4f9dea41a8538f51f5d78/timm/models/selecsls.py#L349\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-selecsls/selecsls60b-94e619b5.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.41%\n      Top 5 Accuracy: 94.18%\n-->"
  },
  {
    "path": "hfdocs/source/models/seresnext.mdx",
    "content": "# SE-ResNeXt\n\n**SE ResNeXt** is a variant of a [ResNext](https://www.paperswithcode.com/method/resneXt) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('seresnext26d_32x4d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `seresnext26d_32x4d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('seresnext26d_32x4d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{hu2019squeezeandexcitation,\n      title={Squeeze-and-Excitation Networks}, \n      author={Jie Hu and Li Shen and Samuel Albanie and Gang Sun and Enhua Wu},\n      year={2019},\n      eprint={1709.01507},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SEResNeXt\n  Paper:\n    Title: Squeeze-and-Excitation Networks\n    URL: https://paperswithcode.com/paper/squeeze-and-excitation-networks\nModels:\n- Name: seresnext26d_32x4d\n  In Collection: SEResNeXt\n  Metadata:\n    FLOPs: 3507053024\n    Parameters: 16810000\n    File Size: 67425193\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: seresnext26d_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 26\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1234\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26d_32x4d-80fa48a3.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.59%\n      Top 5 Accuracy: 93.61%\n- Name: seresnext26t_32x4d\n  In Collection: SEResNeXt\n  Metadata:\n    FLOPs: 3466436448\n    Parameters: 16820000\n    File Size: 67414838\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: seresnext26t_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 26\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1246\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26tn_32x4d-569cb627.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.99%\n      Top 5 Accuracy: 93.73%\n- Name: seresnext50_32x4d\n  In Collection: SEResNeXt\n  Metadata:\n    FLOPs: 5475179184\n    Parameters: 27560000\n    File Size: 110569859\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA Titan X GPUs\n    ID: seresnext50_32x4d\n    LR: 0.6\n    Epochs: 100\n    Layers: 50\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 1024\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/resnet.py#L1267\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext50_32x4d_racm-a304a460.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.27%\n      Top 5 Accuracy: 95.62%\n-->"
  },
  {
    "path": "hfdocs/source/models/skresnet.mdx",
    "content": "# SK-ResNet\n\n**SK ResNet** is a variant of a [ResNet](https://www.paperswithcode.com/method/resnet) that employs a [Selective Kernel](https://paperswithcode.com/method/selective-kernel) unit. In general, all the large kernel convolutions in the original bottleneck blocks in ResNet are replaced by the proposed [SK convolutions](https://paperswithcode.com/method/selective-kernel-convolution), enabling the network to choose appropriate receptive field sizes in an adaptive manner.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('skresnet18', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `skresnet18`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('skresnet18', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{li2019selective,\n      title={Selective Kernel Networks}, \n      author={Xiang Li and Wenhai Wang and Xiaolin Hu and Jian Yang},\n      year={2019},\n      eprint={1903.06586},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SKResNet\n  Paper:\n    Title: Selective Kernel Networks\n    URL: https://paperswithcode.com/paper/selective-kernel-networks\nModels:\n- Name: skresnet18\n  In Collection: SKResNet\n  Metadata:\n    FLOPs: 2333467136\n    Parameters: 11960000\n    File Size: 47923238\n    Architecture:\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Residual Connection\n    - Selective Kernel\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: skresnet18\n    LR: 0.1\n    Epochs: 100\n    Layers: 18\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/sknet.py#L148\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnet18_ra-4eec2804.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 73.03%\n      Top 5 Accuracy: 91.17%\n- Name: skresnet34\n  In Collection: SKResNet\n  Metadata:\n    FLOPs: 4711849952\n    Parameters: 22280000\n    File Size: 89299314\n    Architecture:\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Max Pooling\n    - Residual Connection\n    - Selective Kernel\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: skresnet34\n    LR: 0.1\n    Epochs: 100\n    Layers: 34\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/sknet.py#L165\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnet34_ra-bdc0ccde.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.93%\n      Top 5 Accuracy: 93.32%\n-->"
  },
  {
    "path": "hfdocs/source/models/skresnext.mdx",
    "content": "# SK-ResNeXt\n\n**SK ResNeXt** is a variant of a [ResNeXt](https://www.paperswithcode.com/method/resnext) that employs a [Selective Kernel](https://paperswithcode.com/method/selective-kernel) unit. In general, all the large kernel convolutions in the original bottleneck blocks in ResNext are replaced by the proposed [SK convolutions](https://paperswithcode.com/method/selective-kernel-convolution), enabling the network to choose appropriate receptive field sizes in an adaptive manner.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('skresnext50_32x4d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `skresnext50_32x4d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('skresnext50_32x4d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{li2019selective,\n      title={Selective Kernel Networks}, \n      author={Xiang Li and Wenhai Wang and Xiaolin Hu and Jian Yang},\n      year={2019},\n      eprint={1903.06586},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SKResNeXt\n  Paper:\n    Title: Selective Kernel Networks\n    URL: https://paperswithcode.com/paper/selective-kernel-networks\nModels:\n- Name: skresnext50_32x4d\n  In Collection: SKResNeXt\n  Metadata:\n    FLOPs: 5739845824\n    Parameters: 27480000\n    File Size: 110340975\n    Architecture:\n    - Convolution\n    - Dense Connections\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - Residual Connection\n    - Selective Kernel\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    Training Resources: 8x GPUs\n    ID: skresnext50_32x4d\n    LR: 0.1\n    Epochs: 100\n    Layers: 50\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 256\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/a7f95818e44b281137503bcf4b3e3e94d8ffa52f/timm/models/sknet.py#L210\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnext50_ra-f40e40bf.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.15%\n      Top 5 Accuracy: 94.64%\n-->"
  },
  {
    "path": "hfdocs/source/models/spnasnet.mdx",
    "content": "# SPNASNet\n\n**Single-Path NAS** is a novel differentiable NAS method for designing hardware-efficient ConvNets in less than 4 hours.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('spnasnet_100', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `spnasnet_100`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('spnasnet_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{stamoulis2019singlepath,\n      title={Single-Path NAS: Designing Hardware-Efficient ConvNets in less than 4 Hours}, \n      author={Dimitrios Stamoulis and Ruizhou Ding and Di Wang and Dimitrios Lymberopoulos and Bodhi Priyantha and Jie Liu and Diana Marculescu},\n      year={2019},\n      eprint={1904.02877},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SPNASNet\n  Paper:\n    Title: 'Single-Path NAS: Designing Hardware-Efficient ConvNets in less than 4\n      Hours'\n    URL: https://paperswithcode.com/paper/single-path-nas-designing-hardware-efficient\nModels:\n- Name: spnasnet_100\n  In Collection: SPNASNet\n  Metadata:\n    FLOPs: 442385600\n    Parameters: 4420000\n    File Size: 17902337\n    Architecture:\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Depthwise Separable Convolution\n    - Dropout\n    - ReLU\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: spnasnet_100\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L995\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/spnasnet_100-048bc3f4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.08%\n      Top 5 Accuracy: 91.82%\n-->"
  },
  {
    "path": "hfdocs/source/models/ssl-resnet.mdx",
    "content": "# SSL ResNet\n\n**Residual Networks**, or **ResNets**, learn residual functions with reference to the layer inputs, instead of learning unreferenced functions. Instead of hoping each few stacked layers directly fit a desired underlying mapping, residual nets let these layers fit a residual mapping. They stack [residual blocks](https://paperswithcode.com/method/residual-block) ontop of each other to form network: e.g. a ResNet-50 has fifty layers using these blocks. \n\nThe model in this collection utilises semi-supervised learning to improve the performance of the model. The approach brings important gains to standard architectures for image, video and fine-grained classification. \n\nPlease note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('ssl_resnet18', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `ssl_resnet18`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('ssl_resnet18', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1905-00546,\n  author    = {I. Zeki Yalniz and\n               Herv{\\'{e}} J{\\'{e}}gou and\n               Kan Chen and\n               Manohar Paluri and\n               Dhruv Mahajan},\n  title     = {Billion-scale semi-supervised learning for image classification},\n  journal   = {CoRR},\n  volume    = {abs/1905.00546},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1905.00546},\n  archivePrefix = {arXiv},\n  eprint    = {1905.00546},\n  timestamp = {Mon, 28 Sep 2020 08:19:37 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1905-00546.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SSL ResNet\n  Paper:\n    Title: Billion-scale semi-supervised learning for image classification\n    URL: https://paperswithcode.com/paper/billion-scale-semi-supervised-learning-for\nModels:\n- Name: ssl_resnet18\n  In Collection: SSL ResNet\n  Metadata:\n    FLOPs: 2337073152\n    Parameters: 11690000\n    File Size: 46811375\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - YFCC-100M\n    Training Resources: 64x GPUs\n    ID: ssl_resnet18\n    LR: 0.0015\n    Epochs: 30\n    Layers: 18\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L894\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet18-d92f0530.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.62%\n      Top 5 Accuracy: 91.42%\n- Name: ssl_resnet50\n  In Collection: SSL ResNet\n  Metadata:\n    FLOPs: 5282531328\n    Parameters: 25560000\n    File Size: 102480594\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - YFCC-100M\n    Training Resources: 64x GPUs\n    ID: ssl_resnet50\n    LR: 0.0015\n    Epochs: 30\n    Layers: 50\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L904\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet50-08389792.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.24%\n      Top 5 Accuracy: 94.83%\n-->"
  },
  {
    "path": "hfdocs/source/models/swsl-resnet.mdx",
    "content": "# SWSL ResNet\n\n**Residual Networks**, or **ResNets**, learn residual functions with reference to the layer inputs, instead of learning unreferenced functions. Instead of hoping each few stacked layers directly fit a desired underlying mapping, residual nets let these layers fit a residual mapping. They stack [residual blocks](https://paperswithcode.com/method/residual-block) ontop of each other to form network: e.g. a ResNet-50 has fifty layers using these blocks. \n\nThe models in this collection utilise semi-weakly supervised learning to improve the performance of the model. The approach brings important gains to standard architectures for image, video and fine-grained classification. \n\nPlease note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('swsl_resnet18', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `swsl_resnet18`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('swsl_resnet18', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1905-00546,\n  author    = {I. Zeki Yalniz and\n               Herv{\\'{e}} J{\\'{e}}gou and\n               Kan Chen and\n               Manohar Paluri and\n               Dhruv Mahajan},\n  title     = {Billion-scale semi-supervised learning for image classification},\n  journal   = {CoRR},\n  volume    = {abs/1905.00546},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1905.00546},\n  archivePrefix = {arXiv},\n  eprint    = {1905.00546},\n  timestamp = {Mon, 28 Sep 2020 08:19:37 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1905-00546.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SWSL ResNet\n  Paper:\n    Title: Billion-scale semi-supervised learning for image classification\n    URL: https://paperswithcode.com/paper/billion-scale-semi-supervised-learning-for\nModels:\n- Name: swsl_resnet18\n  In Collection: SWSL ResNet\n  Metadata:\n    FLOPs: 2337073152\n    Parameters: 11690000\n    File Size: 46811375\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnet18\n    LR: 0.0015\n    Epochs: 30\n    Layers: 18\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L954\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet18-118f1556.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 73.28%\n      Top 5 Accuracy: 91.76%\n- Name: swsl_resnet50\n  In Collection: SWSL ResNet\n  Metadata:\n    FLOPs: 5282531328\n    Parameters: 25560000\n    File Size: 102480594\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Bottleneck Residual Block\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnet50\n    LR: 0.0015\n    Epochs: 30\n    Layers: 50\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L965\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet50-16a12f1b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.14%\n      Top 5 Accuracy: 95.97%\n-->"
  },
  {
    "path": "hfdocs/source/models/swsl-resnext.mdx",
    "content": "# SWSL ResNeXt\n\nA **ResNeXt** repeats a [building block](https://paperswithcode.com/method/resnext-block) that aggregates a set of transformations with the same topology. Compared to a [ResNet](https://paperswithcode.com/method/resnet), it exposes a new dimension,  *cardinality* (the size of the set of transformations) \\\\( C \\\\), as an essential factor in addition to the dimensions of depth and width.\n\nThe models in this collection utilise semi-weakly supervised learning to improve the performance of the model. The approach brings important gains to standard architectures for image, video and fine-grained classification.\n\nPlease note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('swsl_resnext101_32x16d', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `swsl_resnext101_32x16d`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('swsl_resnext101_32x16d', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1905-00546,\n  author    = {I. Zeki Yalniz and\n               Herv{\\'{e}} J{\\'{e}}gou and\n               Kan Chen and\n               Manohar Paluri and\n               Dhruv Mahajan},\n  title     = {Billion-scale semi-supervised learning for image classification},\n  journal   = {CoRR},\n  volume    = {abs/1905.00546},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1905.00546},\n  archivePrefix = {arXiv},\n  eprint    = {1905.00546},\n  timestamp = {Mon, 28 Sep 2020 08:19:37 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1905-00546.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: SWSL ResNext\n  Paper:\n    Title: Billion-scale semi-supervised learning for image classification\n    URL: https://paperswithcode.com/paper/billion-scale-semi-supervised-learning-for\nModels:\n- Name: swsl_resnext101_32x16d\n  In Collection: SWSL ResNext\n  Metadata:\n    FLOPs: 46623691776\n    Parameters: 194030000\n    File Size: 777518664\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnext101_32x16d\n    LR: 0.0015\n    Epochs: 30\n    Layers: 101\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L1009\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x16-f3559a9c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.34%\n      Top 5 Accuracy: 96.84%\n- Name: swsl_resnext101_32x4d\n  In Collection: SWSL ResNext\n  Metadata:\n    FLOPs: 10298145792\n    Parameters: 44180000\n    File Size: 177341913\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnext101_32x4d\n    LR: 0.0015\n    Epochs: 30\n    Layers: 101\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L987\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x4-3f87e46b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.22%\n      Top 5 Accuracy: 96.77%\n- Name: swsl_resnext101_32x8d\n  In Collection: SWSL ResNext\n  Metadata:\n    FLOPs: 21180417024\n    Parameters: 88790000\n    File Size: 356056638\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnext101_32x8d\n    LR: 0.0015\n    Epochs: 30\n    Layers: 101\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L998\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x8-b4712904.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.27%\n      Top 5 Accuracy: 97.17%\n- Name: swsl_resnext50_32x4d\n  In Collection: SWSL ResNext\n  Metadata:\n    FLOPs: 5472648192\n    Parameters: 25030000\n    File Size: 100428550\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Grouped Convolution\n    - Max Pooling\n    - ReLU\n    - ResNeXt Block\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - IG-1B-Targeted\n    - ImageNet\n    Training Resources: 64x GPUs\n    ID: swsl_resnext50_32x4d\n    LR: 0.0015\n    Epochs: 30\n    Layers: 50\n    Crop Pct: '0.875'\n    Batch Size: 1536\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/resnet.py#L976\n  Weights: https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext50_32x4-72679e44.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.17%\n      Top 5 Accuracy: 96.23%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/tf-efficientnet-condconv.mdx",
    "content": "# (Tensorflow) EfficientNet CondConv\n\n**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\\\( 2^N \\\\) times more computational resources, then we can simply increase the network depth by \\\\( \\alpha ^ N \\\\), width by \\\\( \\beta ^ N \\\\), and image size by \\\\( \\gamma ^ N \\\\), where \\\\( \\alpha, \\beta, \\gamma \\\\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\\\( \\phi \\\\) to uniformly scale network width, depth, and resolution in a principled way.\n\nThe compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.\n\nThe base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2), in addition to [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\nThis collection of models amends EfficientNet by adding [CondConv](https://paperswithcode.com/method/condconv) convolutions.\n\nThe weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_efficientnet_cc_b0_4e', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_efficientnet_cc_b0_4e`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_efficientnet_cc_b0_4e', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1904-04971,\n  author    = {Brandon Yang and\n               Gabriel Bender and\n               Quoc V. Le and\n               Jiquan Ngiam},\n  title     = {Soft Conditional Computation},\n  journal   = {CoRR},\n  volume    = {abs/1904.04971},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1904.04971},\n  archivePrefix = {arXiv},\n  eprint    = {1904.04971},\n  timestamp = {Thu, 25 Apr 2019 13:55:01 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1904-04971.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF EfficientNet CondConv\n  Paper:\n    Title: 'CondConv: Conditionally Parameterized Convolutions for Efficient Inference'\n    URL: https://paperswithcode.com/paper/soft-conditional-computation\nModels:\n- Name: tf_efficientnet_cc_b0_4e\n  In Collection: TF EfficientNet CondConv\n  Metadata:\n    FLOPs: 224153788\n    Parameters: 13310000\n    File Size: 53490940\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - CondConv\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_cc_b0_4e\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1561\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.32%\n      Top 5 Accuracy: 93.32%\n- Name: tf_efficientnet_cc_b0_8e\n  In Collection: TF EfficientNet CondConv\n  Metadata:\n    FLOPs: 224158524\n    Parameters: 24010000\n    File Size: 96287616\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - CondConv\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_cc_b0_8e\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1572\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.91%\n      Top 5 Accuracy: 93.65%\n- Name: tf_efficientnet_cc_b1_8e\n  In Collection: TF EfficientNet CondConv\n  Metadata:\n    FLOPs: 370427824\n    Parameters: 39720000\n    File Size: 159206198\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - CondConv\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_cc_b1_8e\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.882'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '240'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1584\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.33%\n      Top 5 Accuracy: 94.37%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/tf-efficientnet-lite.mdx",
    "content": "# (Tensorflow) EfficientNet Lite\n\n**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\\\( 2^N \\\\) times more computational resources, then we can simply increase the network depth by \\\\( \\alpha ^ N \\\\), width by \\\\( \\beta ^ N \\\\), and image size by \\\\( \\gamma ^ N \\\\), where \\\\( \\alpha, \\beta, \\gamma \\\\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\\\( \\phi \\\\) to uniformly scale network width, depth, and resolution in a principled way.\n\nThe compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.\n\nThe base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2).\n\nEfficientNet-Lite makes EfficientNet more suitable for mobile devices by introducing [ReLU6](https://paperswithcode.com/method/relu6) activation functions and removing [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation).\n\nThe weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_efficientnet_lite0', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_efficientnet_lite0`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_efficientnet_lite0', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2020efficientnet,\n      title={EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks},\n      author={Mingxing Tan and Quoc V. Le},\n      year={2020},\n      eprint={1905.11946},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF EfficientNet Lite\n  Paper:\n    Title: 'EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks'\n    URL: https://paperswithcode.com/paper/efficientnet-rethinking-model-scaling-for\nModels:\n- Name: tf_efficientnet_lite0\n  In Collection: TF EfficientNet Lite\n  Metadata:\n    FLOPs: 488052032\n    Parameters: 4650000\n    File Size: 18820223\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - RELU6\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_lite0\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1596\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite0-0aa007d2.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 74.83%\n      Top 5 Accuracy: 92.17%\n- Name: tf_efficientnet_lite1\n  In Collection: TF EfficientNet Lite\n  Metadata:\n    FLOPs: 773639520\n    Parameters: 5420000\n    File Size: 21939331\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - RELU6\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_lite1\n    Crop Pct: '0.882'\n    Image Size: '240'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1607\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite1-bde8b488.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.67%\n      Top 5 Accuracy: 93.24%\n- Name: tf_efficientnet_lite2\n  In Collection: TF EfficientNet Lite\n  Metadata:\n    FLOPs: 1068494432\n    Parameters: 6090000\n    File Size: 24658687\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - RELU6\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_lite2\n    Crop Pct: '0.89'\n    Image Size: '260'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1618\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite2-dcccb7df.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.48%\n      Top 5 Accuracy: 93.75%\n- Name: tf_efficientnet_lite3\n  In Collection: TF EfficientNet Lite\n  Metadata:\n    FLOPs: 2011534304\n    Parameters: 8199999\n    File Size: 33161413\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - RELU6\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_lite3\n    Crop Pct: '0.904'\n    Image Size: '300'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1629\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite3-b733e338.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.83%\n      Top 5 Accuracy: 94.91%\n- Name: tf_efficientnet_lite4\n  In Collection: TF EfficientNet Lite\n  Metadata:\n    FLOPs: 5164802912\n    Parameters: 13010000\n    File Size: 52558819\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - RELU6\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_lite4\n    Crop Pct: '0.92'\n    Image Size: '380'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1640\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite4-741542c3.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.54%\n      Top 5 Accuracy: 95.66%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/tf-efficientnet.mdx",
    "content": "# (Tensorflow) EfficientNet\n\n**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\\\( 2^N \\\\) times more computational resources, then we can simply increase the network depth by \\\\( \\alpha ^ N \\\\), width by \\\\( \\beta ^ N \\\\), and image size by \\\\( \\gamma ^ N \\\\), where \\\\( \\alpha, \\beta, \\gamma \\\\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\\\( \\phi \\\\) to uniformly scale network width, depth, and resolution in a principled way.\n\nThe compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.\n\nThe base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2), in addition to [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\nThe weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_efficientnet_b0', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py\n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_efficientnet_b0`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_efficientnet_b0', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2020efficientnet,\n      title={EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks},\n      author={Mingxing Tan and Quoc V. Le},\n      year={2020},\n      eprint={1905.11946},\n      archivePrefix={arXiv},\n      primaryClass={cs.LG}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF EfficientNet\n  Paper:\n    Title: 'EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks'\n    URL: https://paperswithcode.com/paper/efficientnet-rethinking-model-scaling-for\nModels:\n- Name: tf_efficientnet_b0\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 488688572\n    Parameters: 5290000\n    File Size: 21383997\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: TPUv3 Cloud TPU\n    ID: tf_efficientnet_b0\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1241\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_aa-827b6e33.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.85%\n      Top 5 Accuracy: 93.23%\n- Name: tf_efficientnet_b1\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 883633200\n    Parameters: 7790000\n    File Size: 31512534\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b1\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.882'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '240'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1251\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_aa-ea7a6ee0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.84%\n      Top 5 Accuracy: 94.2%\n- Name: tf_efficientnet_b2\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 1234321170\n    Parameters: 9110000\n    File Size: 36797929\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b2\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.89'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '260'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1261\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_aa-60c94f97.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.07%\n      Top 5 Accuracy: 94.9%\n- Name: tf_efficientnet_b3\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 2275247568\n    Parameters: 12230000\n    File Size: 49381362\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b3\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.904'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '300'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1271\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_aa-84b4657e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.65%\n      Top 5 Accuracy: 95.72%\n- Name: tf_efficientnet_b4\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 5749638672\n    Parameters: 19340000\n    File Size: 77989689\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: TPUv3 Cloud TPU\n    ID: tf_efficientnet_b4\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.922'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '380'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1281\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_aa-818f208c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.03%\n      Top 5 Accuracy: 96.3%\n- Name: tf_efficientnet_b5\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 13176501888\n    Parameters: 30390000\n    File Size: 122403150\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b5\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.934'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '456'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1291\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ra-9a3e5369.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.81%\n      Top 5 Accuracy: 96.75%\n- Name: tf_efficientnet_b6\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 24180518488\n    Parameters: 43040000\n    File Size: 173232007\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b6\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.942'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '528'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1301\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_aa-80ba17e4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.11%\n      Top 5 Accuracy: 96.89%\n- Name: tf_efficientnet_b7\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 48205304880\n    Parameters: 66349999\n    File Size: 266850607\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b7\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.949'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '600'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1312\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ra-6c08e654.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 84.93%\n      Top 5 Accuracy: 97.2%\n- Name: tf_efficientnet_b8\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 80962956270\n    Parameters: 87410000\n    File Size: 351379853\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Label Smoothing\n    - RMSProp\n    - Stochastic Depth\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_b8\n    LR: 0.256\n    Epochs: 350\n    Crop Pct: '0.954'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '672'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1323\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ra-572d5dd9.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 85.35%\n      Top 5 Accuracy: 97.39%\n- Name: tf_efficientnet_el\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 9356616096\n    Parameters: 10590000\n    File Size: 42800271\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_el\n    Crop Pct: '0.904'\n    Image Size: '300'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1551\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_el-5143854e.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.45%\n      Top 5 Accuracy: 95.17%\n- Name: tf_efficientnet_em\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 3636607040\n    Parameters: 6900000\n    File Size: 27933644\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_em\n    Crop Pct: '0.882'\n    Image Size: '240'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1541\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_em-e78cfe58.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.71%\n      Top 5 Accuracy: 94.33%\n- Name: tf_efficientnet_es\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 2057577472\n    Parameters: 5440000\n    File Size: 22008479\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: tf_efficientnet_es\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1531\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_es-ca1afbfe.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.28%\n      Top 5 Accuracy: 93.6%\n- Name: tf_efficientnet_l2_ns_475\n  In Collection: TF EfficientNet\n  Metadata:\n    FLOPs: 217795669644\n    Parameters: 480310000\n    File Size: 1925950424\n    Architecture:\n    - 1x1 Convolution\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inverted Residual Block\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - FixRes\n    - Label Smoothing\n    - Noisy Student\n    - RMSProp\n    - RandAugment\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    - JFT-300M\n    Training Resources: TPUv3 Cloud TPU\n    ID: tf_efficientnet_l2_ns_475\n    LR: 0.128\n    Epochs: 350\n    Dropout: 0.5\n    Crop Pct: '0.936'\n    Momentum: 0.9\n    Batch Size: 2048\n    Image Size: '475'\n    Weight Decay: 1.0e-05\n    Interpolation: bicubic\n    RMSProp Decay: 0.9\n    Label Smoothing: 0.1\n    BatchNorm Momentum: 0.99\n    Stochastic Depth Survival: 0.8\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1509\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns_475-bebbd00a.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 88.24%\n      Top 5 Accuracy: 98.55%\n-->\n"
  },
  {
    "path": "hfdocs/source/models/tf-inception-v3.mdx",
    "content": "# (Tensorflow) Inception v3\n\n**Inception v3** is a convolutional neural network architecture from the Inception family that makes several improvements including using [Label Smoothing](https://paperswithcode.com/method/label-smoothing), Factorized 7 x 7 convolutions, and the use of an [auxiliary classifier](https://paperswithcode.com/method/auxiliary-classifier) to propagate label information lower down the network (along with the use of batch normalization for layers in the sidehead). The key building block is an [Inception Module](https://paperswithcode.com/method/inception-v3-module).\n\nThe weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_inception_v3', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_inception_v3`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_inception_v3', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/SzegedyVISW15,\n  author    = {Christian Szegedy and\n               Vincent Vanhoucke and\n               Sergey Ioffe and\n               Jonathon Shlens and\n               Zbigniew Wojna},\n  title     = {Rethinking the Inception Architecture for Computer Vision},\n  journal   = {CoRR},\n  volume    = {abs/1512.00567},\n  year      = {2015},\n  url       = {http://arxiv.org/abs/1512.00567},\n  archivePrefix = {arXiv},\n  eprint    = {1512.00567},\n  timestamp = {Mon, 13 Aug 2018 16:49:07 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/SzegedyVISW15.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF Inception v3\n  Paper:\n    Title: Rethinking the Inception Architecture for Computer Vision\n    URL: https://paperswithcode.com/paper/rethinking-the-inception-architecture-for\nModels:\n- Name: tf_inception_v3\n  In Collection: TF Inception v3\n  Metadata:\n    FLOPs: 7352418880\n    Parameters: 23830000\n    File Size: 95549439\n    Architecture:\n    - 1x1 Convolution\n    - Auxiliary Classifier\n    - Average Pooling\n    - Average Pooling\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Dropout\n    - Inception-v3 Module\n    - Max Pooling\n    - ReLU\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - Gradient Clipping\n    - Label Smoothing\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 50x NVIDIA Kepler GPUs\n    ID: tf_inception_v3\n    LR: 0.045\n    Dropout: 0.2\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v3.py#L449\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_inception_v3-e0069de4.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 77.87%\n      Top 5 Accuracy: 93.65%\n-->"
  },
  {
    "path": "hfdocs/source/models/tf-mixnet.mdx",
    "content": "# (Tensorflow) MixNet\n\n**MixNet** is a type of convolutional neural network discovered via AutoML that utilises [MixConvs](https://paperswithcode.com/method/mixconv) instead of regular [depthwise convolutions](https://paperswithcode.com/method/depthwise-convolution).\n\nThe weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_mixnet_l', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_mixnet_l`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_mixnet_l', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{tan2019mixconv,\n      title={MixConv: Mixed Depthwise Convolutional Kernels}, \n      author={Mingxing Tan and Quoc V. Le},\n      year={2019},\n      eprint={1907.09595},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF MixNet\n  Paper:\n    Title: 'MixConv: Mixed Depthwise Convolutional Kernels'\n    URL: https://paperswithcode.com/paper/mixnet-mixed-depthwise-convolutional-kernels\nModels:\n- Name: tf_mixnet_l\n  In Collection: TF MixNet\n  Metadata:\n    FLOPs: 688674516\n    Parameters: 7330000\n    File Size: 29620756\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: tf_mixnet_l\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1720\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.78%\n      Top 5 Accuracy: 94.0%\n- Name: tf_mixnet_m\n  In Collection: TF MixNet\n  Metadata:\n    FLOPs: 416633502\n    Parameters: 5010000\n    File Size: 20310871\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: tf_mixnet_m\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1709\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 76.96%\n      Top 5 Accuracy: 93.16%\n- Name: tf_mixnet_s\n  In Collection: TF MixNet\n  Metadata:\n    FLOPs: 302587678\n    Parameters: 4130000\n    File Size: 16738218\n    Architecture:\n    - Batch Normalization\n    - Dense Connections\n    - Dropout\n    - Global Average Pooling\n    - Grouped Convolution\n    - MixConv\n    - Squeeze-and-Excitation Block\n    - Swish\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - MNAS\n    Training Data:\n    - ImageNet\n    ID: tf_mixnet_s\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1698\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_s-89d3354b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.68%\n      Top 5 Accuracy: 92.64%\n-->"
  },
  {
    "path": "hfdocs/source/models/tf-mobilenet-v3.mdx",
    "content": "# (Tensorflow) MobileNet v3\n\n**MobileNetV3** is a convolutional neural network that is designed for mobile phone CPUs. The network design includes the use of a [hard swish activation](https://paperswithcode.com/method/hard-swish) and [squeeze-and-excitation](https://paperswithcode.com/method/squeeze-and-excitation-block) modules in the [MBConv blocks](https://paperswithcode.com/method/inverted-residual-block).\n\nThe weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tf_mobilenetv3_large_075', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tf_mobilenetv3_large_075`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tf_mobilenetv3_large_075', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/abs-1905-02244,\n  author    = {Andrew Howard and\n               Mark Sandler and\n               Grace Chu and\n               Liang{-}Chieh Chen and\n               Bo Chen and\n               Mingxing Tan and\n               Weijun Wang and\n               Yukun Zhu and\n               Ruoming Pang and\n               Vijay Vasudevan and\n               Quoc V. Le and\n               Hartwig Adam},\n  title     = {Searching for MobileNetV3},\n  journal   = {CoRR},\n  volume    = {abs/1905.02244},\n  year      = {2019},\n  url       = {http://arxiv.org/abs/1905.02244},\n  archivePrefix = {arXiv},\n  eprint    = {1905.02244},\n  timestamp = {Tue, 12 Jan 2021 15:30:06 +0100},\n  biburl    = {https://dblp.org/rec/journals/corr/abs-1905-02244.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TF MobileNet V3\n  Paper:\n    Title: Searching for MobileNetV3\n    URL: https://paperswithcode.com/paper/searching-for-mobilenetv3\nModels:\n- Name: tf_mobilenetv3_large_075\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 194323712\n    Parameters: 3990000\n    File Size: 16097377\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x4 TPU Pod\n    ID: tf_mobilenetv3_large_075\n    LR: 0.1\n    Dropout: 0.8\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L394\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_075-150ee8b0.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 73.45%\n      Top 5 Accuracy: 91.34%\n- Name: tf_mobilenetv3_large_100\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 274535288\n    Parameters: 5480000\n    File Size: 22076649\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x4 TPU Pod\n    ID: tf_mobilenetv3_large_100\n    LR: 0.1\n    Dropout: 0.8\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L403\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_100-427764d5.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 75.51%\n      Top 5 Accuracy: 92.61%\n- Name: tf_mobilenetv3_large_minimal_100\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 267216928\n    Parameters: 3920000\n    File Size: 15836368\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 4x4 TPU Pod\n    ID: tf_mobilenetv3_large_minimal_100\n    LR: 0.1\n    Dropout: 0.8\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 1.0e-05\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L412\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_minimal_100-8596ae28.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 72.24%\n      Top 5 Accuracy: 90.64%\n- Name: tf_mobilenetv3_small_075\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 48457664\n    Parameters: 2040000\n    File Size: 8242701\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: tf_mobilenetv3_small_075\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bilinear\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L421\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_075-da427f52.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 65.72%\n      Top 5 Accuracy: 86.13%\n- Name: tf_mobilenetv3_small_100\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 65450600\n    Parameters: 2540000\n    File Size: 10256398\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: tf_mobilenetv3_small_100\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bilinear\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L430\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_100-37f49e2b.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 67.92%\n      Top 5 Accuracy: 87.68%\n- Name: tf_mobilenetv3_small_minimal_100\n  In Collection: TF MobileNet V3\n  Metadata:\n    FLOPs: 60827936\n    Parameters: 2040000\n    File Size: 8258083\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Dropout\n    - Global Average Pooling\n    - Hard Swish\n    - Inverted Residual Block\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - RMSProp\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 16x GPUs\n    ID: tf_mobilenetv3_small_minimal_100\n    LR: 0.045\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Batch Size: 4096\n    Image Size: '224'\n    Weight Decay: 4.0e-05\n    Interpolation: bilinear\n    RMSProp Decay: 0.9\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/mobilenetv3.py#L439\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_minimal_100-922a7843.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 62.91%\n      Top 5 Accuracy: 84.24%\n-->"
  },
  {
    "path": "hfdocs/source/models/tresnet.mdx",
    "content": "# TResNet\n\nA **TResNet** is a variant on a [ResNet](https://paperswithcode.com/method/resnet) that aim to boost accuracy while maintaining GPU training and inference efficiency.  They contain several design tricks including a SpaceToDepth stem, [Anti-Alias downsampling](https://paperswithcode.com/method/anti-alias-downsampling), In-Place Activated BatchNorm, Blocks selection and [squeeze-and-excitation layers](https://paperswithcode.com/method/squeeze-and-excitation-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('tresnet_l', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `tresnet_l`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('tresnet_l', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@misc{ridnik2020tresnet,\n      title={TResNet: High Performance GPU-Dedicated Architecture}, \n      author={Tal Ridnik and Hussam Lawen and Asaf Noy and Emanuel Ben Baruch and Gilad Sharir and Itamar Friedman},\n      year={2020},\n      eprint={2003.13630},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: TResNet\n  Paper:\n    Title: 'TResNet: High Performance GPU-Dedicated Architecture'\n    URL: https://paperswithcode.com/paper/tresnet-high-performance-gpu-dedicated\nModels:\n- Name: tresnet_l\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 10873416792\n    Parameters: 53456696\n    File Size: 224440219\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    ID: tresnet_l\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L267\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_l_81_5-235b486c.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.49%\n      Top 5 Accuracy: 95.62%\n- Name: tresnet_l_448\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 43488238584\n    Parameters: 53456696\n    File Size: 224440219\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    ID: tresnet_l_448\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '448'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L285\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_l_448-940d0cd1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.26%\n      Top 5 Accuracy: 95.98%\n- Name: tresnet_m\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 5733048064\n    Parameters: 41282200\n    File Size: 125861314\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    Training Time: < 24 hours\n    ID: tresnet_m\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L261\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_m_80_8-dbc13962.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 80.8%\n      Top 5 Accuracy: 94.86%\n- Name: tresnet_m_448\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 22929743104\n    Parameters: 29278464\n    File Size: 125861314\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    ID: tresnet_m_448\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '448'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L279\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_m_448-bc359d10.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.72%\n      Top 5 Accuracy: 95.57%\n- Name: tresnet_xl\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 15162534034\n    Parameters: 75646610\n    File Size: 314378965\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    ID: tresnet_xl\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '224'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L273\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_xl_82_0-a2d51b00.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 82.05%\n      Top 5 Accuracy: 95.93%\n- Name: tresnet_xl_448\n  In Collection: TResNet\n  Metadata:\n    FLOPs: 60641712730\n    Parameters: 75646610\n    File Size: 224440219\n    Architecture:\n    - 1x1 Convolution\n    - Anti-Alias Downsampling\n    - Convolution\n    - Global Average Pooling\n    - InPlace-ABN\n    - Leaky ReLU\n    - ReLU\n    - Residual Connection\n    - Squeeze-and-Excitation Block\n    Tasks:\n    - Image Classification\n    Training Techniques:\n    - AutoAugment\n    - Cutout\n    - Label Smoothing\n    - SGD with Momentum\n    - Weight Decay\n    Training Data:\n    - ImageNet\n    Training Resources: 8x NVIDIA 100 GPUs\n    ID: tresnet_xl_448\n    LR: 0.01\n    Epochs: 300\n    Crop Pct: '0.875'\n    Momentum: 0.9\n    Image Size: '448'\n    Weight Decay: 0.0001\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/tresnet.py#L291\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/tresnet_l_448-940d0cd1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 83.06%\n      Top 5 Accuracy: 96.19%\n-->"
  },
  {
    "path": "hfdocs/source/models/wide-resnet.mdx",
    "content": "# Wide ResNet\n\n**Wide Residual Networks** are a variant on [ResNets](https://paperswithcode.com/method/resnet) where we decrease depth and increase the width of residual networks. This is achieved through the use of [wide residual blocks](https://paperswithcode.com/method/wide-residual-block).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('wide_resnet101_2', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `wide_resnet101_2`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('wide_resnet101_2', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/ZagoruykoK16,\n  author    = {Sergey Zagoruyko and\n               Nikos Komodakis},\n  title     = {Wide Residual Networks},\n  journal   = {CoRR},\n  volume    = {abs/1605.07146},\n  year      = {2016},\n  url       = {http://arxiv.org/abs/1605.07146},\n  archivePrefix = {arXiv},\n  eprint    = {1605.07146},\n  timestamp = {Mon, 13 Aug 2018 16:46:42 +0200},\n  biburl    = {https://dblp.org/rec/journals/corr/ZagoruykoK16.bib},\n  bibsource = {dblp computer science bibliography, https://dblp.org}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Wide ResNet\n  Paper:\n    Title: Wide Residual Networks\n    URL: https://paperswithcode.com/paper/wide-residual-networks\nModels:\n- Name: wide_resnet101_2\n  In Collection: Wide ResNet\n  Metadata:\n    FLOPs: 29304929280\n    Parameters: 126890000\n    File Size: 254695146\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Wide Residual Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: wide_resnet101_2\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bilinear\n  Code: https://github.com/rwightman/pytorch-image-models/blob/5f9aff395c224492e9e44248b15f44b5cc095d9c/timm/models/resnet.py#L802\n  Weights: https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.85%\n      Top 5 Accuracy: 94.28%\n- Name: wide_resnet50_2\n  In Collection: Wide ResNet\n  Metadata:\n    FLOPs: 14688058368\n    Parameters: 68880000\n    File Size: 275853271\n    Architecture:\n    - 1x1 Convolution\n    - Batch Normalization\n    - Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    - Wide Residual Block\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: wide_resnet50_2\n    Crop Pct: '0.875'\n    Image Size: '224'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/5f9aff395c224492e9e44248b15f44b5cc095d9c/timm/models/resnet.py#L790\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/wide_resnet50_racm-8234f177.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 81.45%\n      Top 5 Accuracy: 95.52%\n-->"
  },
  {
    "path": "hfdocs/source/models/xception.mdx",
    "content": "# Xception\n\n**Xception** is a convolutional neural network architecture that relies solely on [depthwise separable convolution layers](https://paperswithcode.com/method/depthwise-separable-convolution).\n\nThe weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).\n\n## How do I use this model on an image?\n\nTo load a pretrained model:\n\n```py\n>>> import timm\n>>> model = timm.create_model('xception', pretrained=True)\n>>> model.eval()\n```\n\nTo load and preprocess the image:\n\n```py \n>>> import urllib\n>>> from PIL import Image\n>>> from timm.data import resolve_data_config\n>>> from timm.data.transforms_factory import create_transform\n\n>>> config = resolve_data_config({}, model=model)\n>>> transform = create_transform(**config)\n\n>>> url, filename = (\"https://github.com/pytorch/hub/raw/master/images/dog.jpg\", \"dog.jpg\")\n>>> urllib.request.urlretrieve(url, filename)\n>>> img = Image.open(filename).convert('RGB')\n>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension\n```\n\nTo get the model predictions:\n\n```py\n>>> import torch\n>>> with torch.inference_mode():\n...     out = model(tensor)\n>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)\n>>> print(probabilities.shape)\n>>> # prints: torch.Size([1000])\n```\n\nTo get the top-5 predictions class names:\n\n```py\n>>> # Get imagenet class mappings\n>>> url, filename = (\"https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt\", \"imagenet_classes.txt\")\n>>> urllib.request.urlretrieve(url, filename) \n>>> with open(\"imagenet_classes.txt\", \"r\") as f:\n...     categories = [s.strip() for s in f.readlines()]\n\n>>> # Print top categories per image\n>>> top5_prob, top5_catid = torch.topk(probabilities, 5)\n>>> for i in range(top5_prob.size(0)):\n...     print(categories[top5_catid[i]], top5_prob[i].item())\n>>> # prints class names and probabilities like:\n>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]\n```\n\nReplace the model name with the variant you want to use, e.g. `xception`. You can find the IDs in the model summaries at the top of this page.\n\nTo extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.\n\n## How do I finetune this model?\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('xception', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\nTo finetune on your own dataset, you have to write a training loop or adapt [timm's training\nscript](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.\n\n## How do I train this model?\n\nYou can follow the [timm recipe scripts](../training_script) for training a new model afresh.\n\n## Citation\n\n```BibTeX\n@article{DBLP:journals/corr/ZagoruykoK16,\n@misc{chollet2017xception,\n      title={Xception: Deep Learning with Depthwise Separable Convolutions}, \n      author={François Chollet},\n      year={2017},\n      eprint={1610.02357},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n```\n\n<!--\nType: model-index\nCollections:\n- Name: Xception\n  Paper:\n    Title: 'Xception: Deep Learning with Depthwise Separable Convolutions'\n    URL: https://paperswithcode.com/paper/xception-deep-learning-with-depthwise\nModels:\n- Name: xception\n  In Collection: Xception\n  Metadata:\n    FLOPs: 10600506792\n    Parameters: 22860000\n    File Size: 91675053\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: xception\n    Crop Pct: '0.897'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/xception.py#L229\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/xception-43020ad28.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.05%\n      Top 5 Accuracy: 94.4%\n- Name: xception41\n  In Collection: Xception\n  Metadata:\n    FLOPs: 11681983232\n    Parameters: 26970000\n    File Size: 108422028\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: xception41\n    Crop Pct: '0.903'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/xception_aligned.py#L181\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_xception_41-e6439c97.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 78.54%\n      Top 5 Accuracy: 94.28%\n- Name: xception65\n  In Collection: Xception\n  Metadata:\n    FLOPs: 17585702144\n    Parameters: 39920000\n    File Size: 160536780\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: xception65\n    Crop Pct: '0.903'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/xception_aligned.py#L200\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_xception_65-c9ae96e8.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.55%\n      Top 5 Accuracy: 94.66%\n- Name: xception71\n  In Collection: Xception\n  Metadata:\n    FLOPs: 22817346560\n    Parameters: 42340000\n    File Size: 170295556\n    Architecture:\n    - 1x1 Convolution\n    - Convolution\n    - Dense Connections\n    - Depthwise Separable Convolution\n    - Global Average Pooling\n    - Max Pooling\n    - ReLU\n    - Residual Connection\n    - Softmax\n    Tasks:\n    - Image Classification\n    Training Data:\n    - ImageNet\n    ID: xception71\n    Crop Pct: '0.903'\n    Image Size: '299'\n    Interpolation: bicubic\n  Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/xception_aligned.py#L219\n  Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_xception_71-8eec7df1.pth\n  Results:\n  - Task: Image Classification\n    Dataset: ImageNet\n    Metrics:\n      Top 1 Accuracy: 79.88%\n      Top 5 Accuracy: 94.93%\n-->"
  },
  {
    "path": "hfdocs/source/models.mdx",
    "content": "# Model Summaries\n\nThe model architectures included come from a wide variety of sources. Sources, including papers, original impl (\"reference code\") that I rewrote / adapted, and PyTorch impl that I leveraged directly (\"code\") are listed below.\n\nMost included models have pretrained weights. The weights are either:\n\n1. from their original sources\n2. ported by myself from their original impl in a different framework (e.g. Tensorflow models)\n3. trained from scratch using the included training script\n\nThe validation results for the pretrained weights are [here](results)\n\nA more exciting view (with pretty pictures) of the models within `timm` can be found at [paperswithcode](https://paperswithcode.com/lib/timm).\n\n## Big Transfer ResNetV2 (BiT)\n\n* Implementation: [resnetv2.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/resnetv2.py)\n* Paper: `Big Transfer (BiT): General Visual Representation Learning` - https://arxiv.org/abs/1912.11370\n* Reference code: https://github.com/google-research/big_transfer\n\n## Cross-Stage Partial Networks\n\n* Implementation: [cspnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/cspnet.py)\n* Paper: `CSPNet: A New Backbone that can Enhance Learning Capability of CNN` - https://arxiv.org/abs/1911.11929\n* Reference impl: https://github.com/WongKinYiu/CrossStagePartialNetworks\n\n## DenseNet\n\n* Implementation: [densenet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/densenet.py)\n* Paper: `Densely Connected Convolutional Networks` - https://arxiv.org/abs/1608.06993\n* Code: https://github.com/pytorch/vision/tree/master/torchvision/models\n\n## DLA\n\n* Implementation: [dla.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/dla.py)\n* Paper: `Deep Layer Aggregation` - https://arxiv.org/abs/1707.06484\n* Code: https://github.com/ucbdrive/dla\n\n## Dual-Path Networks\n\n* Implementation: [dpn.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/dpn.py)\n* Paper: `Dual Path Networks` - https://arxiv.org/abs/1707.01629\n* My PyTorch code: https://github.com/rwightman/pytorch-dpn-pretrained\n* Reference code: https://github.com/cypw/DPNs\n\n## GPU-Efficient Networks\n\n* Implementation: [byobnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/byobnet.py)\n* Paper: `Neural Architecture Design for GPU-Efficient Networks` - https://arxiv.org/abs/2006.14090\n* Reference code: https://github.com/idstcv/GPU-Efficient-Networks\n\n## HRNet\n\n* Implementation: [hrnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/hrnet.py)\n* Paper: `Deep High-Resolution Representation Learning for Visual Recognition` - https://arxiv.org/abs/1908.07919\n* Code: https://github.com/HRNet/HRNet-Image-Classification\n\n## Inception-V3\n\n* Implementation: [inception_v3.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/inception_v3.py)\n* Paper: `Rethinking the Inception Architecture for Computer Vision` - https://arxiv.org/abs/1512.00567\n* Code: https://github.com/pytorch/vision/tree/master/torchvision/models\n\n## Inception-V4\n\n* Implementation: [inception_v4.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/inception_v4.py)\n* Paper: `Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning` - https://arxiv.org/abs/1602.07261\n* Code: https://github.com/Cadene/pretrained-models.pytorch\n* Reference code: https://github.com/tensorflow/models/tree/master/research/slim/nets\n\n## Inception-ResNet-V2\n\n* Implementation: [inception_resnet_v2.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/inception_resnet_v2.py)\n* Paper: `Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning` - https://arxiv.org/abs/1602.07261\n* Code: https://github.com/Cadene/pretrained-models.pytorch\n* Reference code: https://github.com/tensorflow/models/tree/master/research/slim/nets\n\n## NASNet-A\n\n* Implementation: [nasnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/nasnet.py)\n* Paper: `Learning Transferable Architectures for Scalable Image Recognition` - https://arxiv.org/abs/1707.07012\n* Code: https://github.com/Cadene/pretrained-models.pytorch\n* Reference code: https://github.com/tensorflow/models/tree/master/research/slim/nets/nasnet\n\n## PNasNet-5\n\n* Implementation: [pnasnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/pnasnet.py)\n* Paper: `Progressive Neural Architecture Search` - https://arxiv.org/abs/1712.00559\n* Code: https://github.com/Cadene/pretrained-models.pytorch\n* Reference code: https://github.com/tensorflow/models/tree/master/research/slim/nets/nasnet\n\n## EfficientNet\n\n* Implementation: [efficientnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/efficientnet.py)\n* Papers:\n  * EfficientNet NoisyStudent (B0-B7, L2) - https://arxiv.org/abs/1911.04252\n  * EfficientNet AdvProp (B0-B8) - https://arxiv.org/abs/1911.09665\n  * EfficientNet (B0-B7) - https://arxiv.org/abs/1905.11946\n  * EfficientNet-EdgeTPU (S, M, L) - https://ai.googleblog.com/2019/08/efficientnet-edgetpu-creating.html\n  * MixNet - https://arxiv.org/abs/1907.09595\n  * MNASNet B1, A1 (Squeeze-Excite), and Small - https://arxiv.org/abs/1807.11626\n  * MobileNet-V2 - https://arxiv.org/abs/1801.04381\n  * FBNet-C - https://arxiv.org/abs/1812.03443\n  * Single-Path NAS - https://arxiv.org/abs/1904.02877\n* My PyTorch code: https://github.com/rwightman/gen-efficientnet-pytorch\n* Reference code: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet\n\n## MobileNet-V3\n\n* Implementation: [mobilenetv3.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/mobilenetv3.py)\n* Paper: `Searching for MobileNetV3` - https://arxiv.org/abs/1905.02244\n* Reference code: https://github.com/tensorflow/models/tree/master/research/slim/nets/mobilenet\n\n## RegNet\n\n* Implementation: [regnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/regnet.py)\n* Paper: `Designing Network Design Spaces` - https://arxiv.org/abs/2003.13678\n* Reference code: https://github.com/facebookresearch/pycls/blob/master/pycls/models/regnet.py\n\n## RepVGG\n\n* Implementation: [byobnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/byobnet.py)\n* Paper: `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n* Reference code: https://github.com/DingXiaoH/RepVGG\n\n## ResNet, ResNeXt\n\n* Implementation: [resnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/resnet.py)\n\n* ResNet (V1B)\n  * Paper: `Deep Residual Learning for Image Recognition` - https://arxiv.org/abs/1512.03385\n  * Code: https://github.com/pytorch/vision/tree/master/torchvision/models\n* ResNeXt\n  * Paper: `Aggregated Residual Transformations for Deep Neural Networks` - https://arxiv.org/abs/1611.05431\n  * Code: https://github.com/pytorch/vision/tree/master/torchvision/models\n* 'Bag of Tricks' / Gluon C, D, E, S ResNet variants\n  * Paper: `Bag of Tricks for Image Classification with CNNs` - https://arxiv.org/abs/1812.01187\n  * Code: https://github.com/dmlc/gluon-cv/blob/master/gluoncv/model_zoo/resnetv1b.py\n* Instagram pretrained / ImageNet tuned ResNeXt101\n  * Paper: `Exploring the Limits of Weakly Supervised Pretraining` - https://arxiv.org/abs/1805.00932\n  * Weights: https://pytorch.org/hub/facebookresearch_WSL-Images_resnext (NOTE: CC BY-NC 4.0 License, NOT commercial friendly)\n* Semi-supervised (SSL) / Semi-weakly Supervised (SWSL) ResNet and ResNeXts\n  * Paper: `Billion-scale semi-supervised learning for image classification` - https://arxiv.org/abs/1905.00546\n  * Weights: https://github.com/facebookresearch/semi-supervised-ImageNet1K-models (NOTE: CC BY-NC 4.0 License, NOT commercial friendly)\n* Squeeze-and-Excitation Networks\n  * Paper: `Squeeze-and-Excitation Networks` - https://arxiv.org/abs/1709.01507\n  * Code: Added to ResNet base, this is current version going forward, old `senet.py` is being deprecated\n* ECAResNet (ECA-Net)\n  * Paper: `ECA-Net: Efficient Channel Attention for Deep CNN` - https://arxiv.org/abs/1910.03151v4\n  * Code: Added to ResNet base, ECA module contributed by @VRandme, reference https://github.com/BangguWu/ECANet\n\n## Res2Net\n\n* Implementation: [res2net.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/res2net.py)\n* Paper: `Res2Net: A New Multi-scale Backbone Architecture` - https://arxiv.org/abs/1904.01169\n* Code: https://github.com/gasvn/Res2Net\n\n## ResNeSt\n\n* Implementation: [resnest.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/resnest.py)\n* Paper: `ResNeSt: Split-Attention Networks` - https://arxiv.org/abs/2004.08955\n* Code: https://github.com/zhanghang1989/ResNeSt\n\n## ReXNet\n\n* Implementation: [rexnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/rexnet.py)\n* Paper: `ReXNet: Diminishing Representational Bottleneck on CNN` - https://arxiv.org/abs/2007.00992\n* Code: https://github.com/clovaai/rexnet\n\n## Selective-Kernel Networks\n\n* Implementation: [sknet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/sknet.py)\n* Paper: `Selective-Kernel Networks` - https://arxiv.org/abs/1903.06586\n* Code: https://github.com/implus/SKNet, https://github.com/clovaai/assembled-cnn\n\n## SelecSLS\n\n* Implementation: [selecsls.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/selecsls.py)\n* Paper: `XNect: Real-time Multi-Person 3D Motion Capture with a Single RGB Camera` - https://arxiv.org/abs/1907.00837\n* Code: https://github.com/mehtadushy/SelecSLS-Pytorch\n\n## Squeeze-and-Excitation Networks\n\n* Implementation: [senet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/senet.py)\nNOTE: I am deprecating this version of the networks, the new ones are part of `resnet.py`\n\n* Paper: `Squeeze-and-Excitation Networks` - https://arxiv.org/abs/1709.01507\n* Code: https://github.com/Cadene/pretrained-models.pytorch \n\n## TResNet\n\n* Implementation: [tresnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/tresnet.py)\n* Paper: `TResNet: High Performance GPU-Dedicated Architecture` - https://arxiv.org/abs/2003.13630\n* Code: https://github.com/mrT23/TResNet\n\n## VGG\n\n* Implementation: [vgg.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vgg.py)\n* Paper: `Very Deep Convolutional Networks For Large-Scale Image Recognition` - https://arxiv.org/pdf/1409.1556.pdf\n* Reference code: https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py\n\n## Vision Transformer\n\n* Implementation: [vision_transformer.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py)\n* Paper: `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - https://arxiv.org/abs/2010.11929\n* Reference code and pretrained weights: https://github.com/google-research/vision_transformer\n\n## VovNet V2 and V1\n\n* Implementation: [vovnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vovnet.py)\n* Paper: `CenterMask : Real-Time Anchor-Free Instance Segmentation` - https://arxiv.org/abs/1911.06667\n* Reference code: https://github.com/youngwanLEE/vovnet-detectron2\n\n## Xception\n\n* Implementation: [xception.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/xception.py)\n* Paper: `Xception: Deep Learning with Depthwise Separable Convolutions` - https://arxiv.org/abs/1610.02357\n* Code: https://github.com/Cadene/pretrained-models.pytorch\n\n## Xception (Modified Aligned, Gluon)\n\n* Implementation: [gluon_xception.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/gluon_xception.py)\n* Paper: `Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation` - https://arxiv.org/abs/1802.02611\n* Reference code: https://github.com/dmlc/gluon-cv/tree/master/gluoncv/model_zoo, https://github.com/jfzhang95/pytorch-deeplab-xception/\n\n## Xception (Modified Aligned, TF)\n\n* Implementation: [aligned_xception.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/aligned_xception.py)\n* Paper: `Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation` - https://arxiv.org/abs/1802.02611\n* Reference code: https://github.com/tensorflow/models/tree/master/research/deeplab\n"
  },
  {
    "path": "hfdocs/source/quickstart.mdx",
    "content": "# Quickstart\n\nThis quickstart is intended for developers who are ready to dive into the code and see an example of how to integrate `timm` into their model training workflow.\n\nFirst, you'll need to install `timm`. For more information on installation, see [Installation](installation).\n\n```bash\npip install timm\n```\n\n## Load a Pretrained Model\n\nPretrained models can be loaded using [`create_model`].\n\nHere, we load the pretrained `mobilenetv3_large_100` model.\n\n```py\n>>> import timm\n\n>>> m = timm.create_model('mobilenetv3_large_100', pretrained=True)\n>>> m.eval()\n```\n\n<Tip>\n    Note: The returned PyTorch model is set to train mode by default, so you must call .eval() on it if you plan to use it for inference.\n</Tip>\n\n## List Models with Pretrained Weights\n\nTo list models packaged with `timm`, you can use [`list_models`]. If you specify `pretrained=True`, this function will only return model names that have associated pretrained weights available. \n\n```py\n>>> import timm\n>>> from pprint import pprint\n>>> model_names = timm.list_models(pretrained=True)\n>>> pprint(model_names)\n[\n    'adv_inception_v3',\n    'cspdarknet53',\n    'cspresnext50',\n    'densenet121',\n    'densenet161',\n    'densenet169',\n    'densenet201',\n    'densenetblur121d',\n    'dla34',\n    'dla46_c',\n]\n```\n\nYou can also list models with a specific pattern in their name.\n\n```py\n>>> import timm\n>>> from pprint import pprint\n>>> model_names = timm.list_models('*resne*t*')\n>>> pprint(model_names)\n[\n    'cspresnet50',\n    'cspresnet50d',\n    'cspresnet50w',\n    'cspresnext50',\n    ...\n]\n```\n\n## Fine-Tune a Pretrained Model\n\nYou can finetune any of the pre-trained models just by changing the classifier (the last layer).\n\n```py\n>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)\n```\n\nTo fine-tune on your own dataset, you have to write a PyTorch training loop or adapt `timm`'s [training script](training_script) to use your dataset.\n\n## Use a Pretrained Model for Feature Extraction\n\nWithout modifying the network, one can call model.forward_features(input) on any model instead of the usual model(input). This will bypass the head classifier and global pooling for networks.\n\nFor a more in depth guide to using `timm` for feature extraction, see [Feature Extraction](feature_extraction).\n\n```py\n>>> import timm\n>>> import torch\n>>> x = torch.randn(1, 3, 224, 224)\n>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True)\n>>> features = model.forward_features(x)\n>>> print(features.shape)\ntorch.Size([1, 960, 7, 7])\n```\n\n## Image Augmentation\n\nTo transform images into valid inputs for a model, you can use [`timm.data.create_transform`], providing the desired `input_size` that the model expects.\n\nThis will return a generic transform that uses reasonable defaults.\n\n```py\n>>> timm.data.create_transform((3, 224, 224))\nCompose(\n    Resize(size=256, interpolation=bilinear, max_size=None, antialias=None)\n    CenterCrop(size=(224, 224))\n    ToTensor()\n    Normalize(mean=tensor([0.4850, 0.4560, 0.4060]), std=tensor([0.2290, 0.2240, 0.2250]))\n)\n```\n\nPretrained models have specific transforms that were applied to images fed into them while training. If you use the wrong transform on your image, the model won't understand what it's seeing!\n\nTo figure out which transformations were used for a given pretrained model, we can start by taking a look at its `pretrained_cfg`\n\n```py\n>>> model.pretrained_cfg\n{'url': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth',\n 'num_classes': 1000,\n 'input_size': (3, 224, 224),\n 'pool_size': (7, 7),\n 'crop_pct': 0.875,\n 'interpolation': 'bicubic',\n 'mean': (0.485, 0.456, 0.406),\n 'std': (0.229, 0.224, 0.225),\n 'first_conv': 'conv_stem',\n 'classifier': 'classifier',\n 'architecture': 'mobilenetv3_large_100'}\n```\n\nWe can then resolve only the data related configuration by using [`timm.data.resolve_data_config`].\n\n```py\n>>> timm.data.resolve_data_config(model.pretrained_cfg)\n{'input_size': (3, 224, 224),\n 'interpolation': 'bicubic',\n 'mean': (0.485, 0.456, 0.406),\n 'std': (0.229, 0.224, 0.225),\n 'crop_pct': 0.875}\n```\n\nWe can pass this data config to [`timm.data.create_transform`] to initialize the model's associated transform.\n\n```py\n>>> data_cfg = timm.data.resolve_data_config(model.pretrained_cfg)\n>>> transform = timm.data.create_transform(**data_cfg)\n>>> transform\nCompose(\n    Resize(size=256, interpolation=bicubic, max_size=None, antialias=None)\n    CenterCrop(size=(224, 224))\n    ToTensor()\n    Normalize(mean=tensor([0.4850, 0.4560, 0.4060]), std=tensor([0.2290, 0.2240, 0.2250]))\n)\n```\n\n<Tip>\n    Note: Here, the pretrained model's config happens to be the same as the generic config we made earlier. This is not always the case. So, it's safer to use the data config to create the transform as we did here instead of using the generic transform.\n</Tip>\n\n## Using Pretrained Models for Inference\n\nHere, we will put together the above sections and use a pretrained model for inference.\n\nFirst we'll need an image to do inference on. Here we load a picture of a leaf from the web:\n\n```py\n>>> import requests\n>>> from PIL import Image\n>>> from io import BytesIO\n>>> url = 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/timm/cat.jpg'\n>>> image = Image.open(requests.get(url, stream=True).raw)\n>>> image\n```\n\nHere's the image we loaded:\n\n<img src=\"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/timm/cat.jpg\" alt=\"An Image from a link\" width=\"300\"/>\n\nNow, we'll create our model and transforms again. This time, we make sure to set our model in evaluation mode.\n\n```py\n>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True).eval()\n>>> transform = timm.data.create_transform(\n    **timm.data.resolve_data_config(model.pretrained_cfg)\n)\n```\n\nWe can prepare this image for the model by passing it to the transform.\n\n```py\n>>> image_tensor = transform(image)\n>>> image_tensor.shape\ntorch.Size([3, 224, 224])\n```\n\nNow we can pass that image to the model to get the predictions. We use `unsqueeze(0)` in this case, as the model is expecting a batch dimension.\n\n```py\n>>> output = model(image_tensor.unsqueeze(0))\n>>> output.shape\ntorch.Size([1, 1000])\n```\n\nTo get the predicted probabilities, we apply softmax to the output. This leaves us with a tensor of shape `(num_classes,)`. \n\n```py\n>>> probabilities = torch.nn.functional.softmax(output[0], dim=0)\n>>> probabilities.shape\ntorch.Size([1000])\n```\n\nNow we'll find the top 5 predicted class indexes and values using `torch.topk`.\n\n```py\n>>> values, indices = torch.topk(probabilities, 5)\n>>> indices\ntensor([281, 282, 285, 673, 670])\n```\n\nIf we check the imagenet labels for the top index, we can see what the model predicted...\n\n```py\n>>> IMAGENET_1k_URL = 'https://storage.googleapis.com/bit_models/ilsvrc2012_wordnet_lemmas.txt'\n>>> IMAGENET_1k_LABELS = requests.get(IMAGENET_1k_URL).text.strip().split('\\n')\n>>> [{'label': IMAGENET_1k_LABELS[idx], 'value': val.item()} for val, idx in zip(values, indices)]\n[{'label': 'tabby, tabby_cat', 'value': 0.5101025700569153},\n {'label': 'tiger_cat', 'value': 0.22490699589252472},\n {'label': 'Egyptian_cat', 'value': 0.1835290789604187},\n {'label': 'mouse, computer_mouse', 'value': 0.006752475164830685},\n {'label': 'motor_scooter, scooter', 'value': 0.004942195490002632}]\n```"
  },
  {
    "path": "hfdocs/source/reference/data.mdx",
    "content": "# Data\n\n[[autodoc]] timm.data.create_dataset\n\n[[autodoc]] timm.data.create_loader\n\n[[autodoc]] timm.data.create_transform\n\n[[autodoc]] timm.data.resolve_data_config"
  },
  {
    "path": "hfdocs/source/reference/models.mdx",
    "content": "# Models\n\n[[autodoc]] timm.create_model\n\n[[autodoc]] timm.list_models\n"
  },
  {
    "path": "hfdocs/source/reference/optimizers.mdx",
    "content": "# Optimization\n\nThis page contains the API reference documentation for learning rate optimizers included in `timm`.\n\n## Optimizers\n\n### Factory functions\n\n[[autodoc]] timm.optim.create_optimizer_v2\n[[autodoc]] timm.optim.list_optimizers\n[[autodoc]] timm.optim.get_optimizer_class\n\n### Optimizer Classes\n\n[[autodoc]] timm.optim.adabelief.AdaBelief\n[[autodoc]] timm.optim.adafactor.Adafactor\n[[autodoc]] timm.optim.adafactor_bv.AdafactorBigVision\n[[autodoc]] timm.optim.adahessian.Adahessian\n[[autodoc]] timm.optim.adamp.AdamP\n[[autodoc]] timm.optim.adan.Adan\n[[autodoc]] timm.optim.adopt.Adopt\n[[autodoc]] timm.optim.lamb.Lamb\n[[autodoc]] timm.optim.laprop.LaProp\n[[autodoc]] timm.optim.lars.Lars\n[[autodoc]] timm.optim.lion.Lion\n[[autodoc]] timm.optim.lookahead.Lookahead\n[[autodoc]] timm.optim.madgrad.MADGRAD\n[[autodoc]] timm.optim.mars.Mars\n[[autodoc]] timm.optim.nadamw.NAdamW\n[[autodoc]] timm.optim.nvnovograd.NvNovoGrad\n[[autodoc]] timm.optim.rmsprop_tf.RMSpropTF\n[[autodoc]] timm.optim.sgdp.SGDP\n[[autodoc]] timm.optim.sgdw.SGDW"
  },
  {
    "path": "hfdocs/source/reference/schedulers.mdx",
    "content": "# Learning Rate Schedulers\n\nThis page contains the API reference documentation for learning rate schedulers included in `timm`.\n\n## Schedulers\n\n### Factory functions\n\n[[autodoc]] timm.scheduler.scheduler_factory.create_scheduler\n[[autodoc]] timm.scheduler.scheduler_factory.create_scheduler_v2\n\n### Scheduler Classes\n\n[[autodoc]] timm.scheduler.cosine_lr.CosineLRScheduler\n[[autodoc]] timm.scheduler.multistep_lr.MultiStepLRScheduler\n[[autodoc]] timm.scheduler.plateau_lr.PlateauLRScheduler\n[[autodoc]] timm.scheduler.poly_lr.PolyLRScheduler\n[[autodoc]] timm.scheduler.step_lr.StepLRScheduler\n[[autodoc]] timm.scheduler.tanh_lr.TanhLRScheduler\n"
  },
  {
    "path": "hfdocs/source/results.mdx",
    "content": "# Results\n\nCSV files containing an ImageNet-1K and out-of-distribution (OOD) test set validation results for all models with pretrained weights is located in the repository [results folder](https://github.com/rwightman/pytorch-image-models/tree/master/results).\n\n## Self-trained Weights\n\nThe table below includes ImageNet-1k validation results of model weights that I've trained myself. It is not updated as frequently as the csv results outputs linked above.\n\n|Model | Acc@1 (Err) | Acc@5 (Err) | Param # (M) | Interpolation | Image Size |\n|---|---|---|---|---|---|\n| efficientnet_b3a | 82.242 (17.758) | 96.114 (3.886) | 12.23 | bicubic | 320 (1.0 crop) |\n| efficientnet_b3 | 82.076 (17.924) | 96.020 (3.980) | 12.23 | bicubic | 300 |\n| regnet_32 | 82.002 (17.998) | 95.906 (4.094) | 19.44 | bicubic | 224 |\n| skresnext50d_32x4d | 81.278 (18.722) | 95.366 (4.634) | 27.5 | bicubic | 288 (1.0 crop) |\n| seresnext50d_32x4d | 81.266 (18.734) | 95.620 (4.380) | 27.6 | bicubic | 224 |\n| efficientnet_b2a | 80.608 (19.392) | 95.310 (4.690) | 9.11 | bicubic | 288 (1.0 crop) |\n| resnet50d | 80.530 (19.470) | 95.160 (4.840) | 25.6 | bicubic | 224 |\n| mixnet_xl | 80.478 (19.522) | 94.932 (5.068) | 11.90 | bicubic | 224 |\n| efficientnet_b2 | 80.402 (19.598) | 95.076 (4.924) | 9.11 | bicubic | 260 |\n| seresnet50 | 80.274 (19.726) | 95.070 (4.930) | 28.1 | bicubic | 224 |\n| skresnext50d_32x4d | 80.156 (19.844) | 94.642 (5.358) | 27.5 | bicubic | 224 |\n| cspdarknet53 | 80.058 (19.942) | 95.084 (4.916) | 27.6 | bicubic | 256 |\n| cspresnext50 | 80.040 (19.960) | 94.944 (5.056) | 20.6 | bicubic | 224 |\n| resnext50_32x4d | 79.762 (20.238) | 94.600 (5.400) | 25 | bicubic | 224 |\n| resnext50d_32x4d | 79.674 (20.326) | 94.868 (5.132) | 25.1 | bicubic | 224 |\n| cspresnet50 | 79.574 (20.426) | 94.712 (5.288) | 21.6 | bicubic | 256 |\n| ese_vovnet39b | 79.320 (20.680) | 94.710 (5.290) | 24.6 | bicubic | 224 |\n| resnetblur50 | 79.290 (20.710) | 94.632 (5.368) | 25.6 | bicubic | 224 |\n| dpn68b | 79.216 (20.784) | 94.414 (5.586) | 12.6 | bicubic | 224 |\n| resnet50 | 79.038 (20.962) | 94.390 (5.610) | 25.6 | bicubic | 224 |\n| mixnet_l | 78.976 (21.024 | 94.184 (5.816) | 7.33 | bicubic | 224 |\n| efficientnet_b1 | 78.692 (21.308) | 94.086 (5.914) | 7.79 | bicubic | 240 |\n| efficientnet_es | 78.066 (21.934) | 93.926 (6.074) | 5.44 | bicubic | 224 |\n| seresnext26t_32x4d | 77.998 (22.002) | 93.708 (6.292) | 16.8 | bicubic | 224 |\n| seresnext26tn_32x4d | 77.986 (22.014) | 93.746 (6.254) | 16.8 | bicubic | 224 |\n| efficientnet_b0 | 77.698 (22.302) | 93.532 (6.468) | 5.29 | bicubic | 224 |\n| seresnext26d_32x4d | 77.602 (22.398) | 93.608 (6.392) | 16.8 | bicubic | 224 |\n| mobilenetv2_120d | 77.294 (22.706 | 93.502 (6.498) | 5.8 | bicubic | 224 |\n| mixnet_m | 77.256 (22.744) | 93.418 (6.582) | 5.01 | bicubic | 224 |\n| resnet34d | 77.116 (22.884) | 93.382 (6.618) | 21.8 | bicubic | 224 |\n| seresnext26_32x4d | 77.104 (22.896) | 93.316 (6.684) | 16.8 | bicubic | 224 |\n| skresnet34 | 76.912 (23.088) | 93.322 (6.678) | 22.2 | bicubic | 224 |\n| ese_vovnet19b_dw | 76.798 (23.202) | 93.268 (6.732) | 6.5 | bicubic | 224 |\n| resnet26d | 76.68 (23.32) | 93.166 (6.834) | 16 | bicubic | 224 |\n| densenetblur121d | 76.576 (23.424) | 93.190 (6.810) | 8.0 | bicubic | 224 |\n| mobilenetv2_140 | 76.524 (23.476) | 92.990 (7.010) | 6.1 | bicubic | 224 |\n| mixnet_s | 75.988 (24.012) | 92.794 (7.206) | 4.13 | bicubic | 224 |\n| mobilenetv3_large_100 | 75.766 (24.234) | 92.542 (7.458) | 5.5 | bicubic | 224 |\n| mobilenetv3_rw | 75.634 (24.366) | 92.708 (7.292) | 5.5 | bicubic | 224 |\n| mnasnet_a1 | 75.448 (24.552) | 92.604 (7.396) | 3.89 | bicubic | 224 |\n| resnet26 | 75.292 (24.708) | 92.57 (7.43) | 16 | bicubic | 224 |\n| fbnetc_100 | 75.124 (24.876) | 92.386 (7.614) | 5.6 | bilinear | 224 |\n| resnet34 | 75.110 (24.890) | 92.284 (7.716) | 22 | bilinear | 224 |\n| mobilenetv2_110d | 75.052 (24.948) | 92.180 (7.820) | 4.5 | bicubic | 224 |\n| seresnet34 | 74.808 (25.192) | 92.124 (7.876) | 22 | bilinear | 224 |\n| mnasnet_b1 | 74.658 (25.342) | 92.114 (7.886) | 4.38 | bicubic | 224 |\n| spnasnet_100 | 74.084 (25.916)  | 91.818 (8.182) | 4.42 | bilinear | 224 |\n| skresnet18 | 73.038 (26.962) | 91.168 (8.832) | 11.9 | bicubic | 224 |\n| mobilenetv2_100 | 72.978 (27.022) | 91.016 (8.984) | 3.5 | bicubic | 224 |\n| resnet18d | 72.260 (27.740) | 90.696 (9.304) | 11.7 | bicubic | 224 |\n| seresnet18 | 71.742 (28.258) | 90.334 (9.666) | 11.8 | bicubic | 224 |\n\n## Ported and Other Weights\n\nFor weights ported from other deep learning frameworks (Tensorflow, MXNet GluonCV) or copied from other PyTorch sources, please see the full results tables for ImageNet and various OOD test sets at in the [results tables](https://github.com/rwightman/pytorch-image-models/tree/master/results).\n\nModel code .py files contain links to original sources of models and weights.\n"
  },
  {
    "path": "hfdocs/source/training_script.mdx",
    "content": "# Scripts\n\nA train, validation, inference, and checkpoint cleaning script included in the github root folder. Scripts are not currently packaged in the pip release.\n\nThe training and validation scripts evolved from early versions of the [PyTorch Imagenet Examples](https://github.com/pytorch/examples). I have added significant functionality over time, including CUDA specific performance enhancements based on\n[NVIDIA's APEX Examples](https://github.com/NVIDIA/apex/tree/master/examples).\n\n## Training Script\n\nThe variety of training args is large and not all combinations of options (or even options) have been fully tested. For the training dataset folder, specify the folder to the base that contains a `train` and `validation` folder.\n\nTo train an SE-ResNet34 on ImageNet, locally distributed, 4 GPUs, one process per GPU w/ cosine schedule, random-erasing prob of 50% and per-pixel random value:\n\n```bash\n./distributed_train.sh 4 --data-dir /data/imagenet --model seresnet34 --sched cosine --epochs 150 --warmup-epochs 5 --lr 0.4 --reprob 0.5 --remode pixel --batch-size 256 --amp -j 4\n```\n\n<Tip>\n    It is recommended to use PyTorch 1.9+ w/ PyTorch native AMP and DDP instead of APEX AMP. --amp defaults to native AMP as of timm ver 0.4.3.  --apex-amp will force use of APEX components if they are installed.\n</Tip>\n\n\n## Validation / Inference Scripts\n\nValidation and inference scripts are similar in usage. One outputs metrics on a validation set and the other outputs topk class ids in a csv. Specify the folder containing validation images, not the base as in training script. \n\nTo validate with the model's pretrained weights (if they exist):\n\n```bash\npython validate.py --data-dir /imagenet/validation/ --model seresnext26_32x4d --pretrained\n```\n\nTo run inference from a checkpoint:\n\n```bash\npython inference.py --data-dir /imagenet/validation/ --model mobilenetv3_large_100 --checkpoint ./output/train/model_best.pth.tar\n```\n\n## Training Examples\n\n### EfficientNet-B2 with RandAugment - 80.4 top-1, 95.1 top-5\n\nThese params are for dual Titan RTX cards with NVIDIA Apex installed:\n\n```bash\n./distributed_train.sh 2 --data-dir /imagenet/ --model efficientnet_b2 -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.3 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .016\n```\n\n### MixNet-XL with RandAugment - 80.5 top-1, 94.9 top-5\n\nThis params are for dual Titan RTX cards with NVIDIA Apex installed:\n\n```bash\n./distributed_train.sh 2 --data-dir /imagenet/ --model mixnet_xl -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .969 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.3 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.3 --amp --lr .016 --dist-bn reduce\n```\n\n### SE-ResNeXt-26-D and SE-ResNeXt-26-T\n\nThese hparams (or similar) work well for a wide range of ResNet architecture, generally a good idea to increase the epoch # as the model size increases... ie approx 180-200 for ResNe(X)t50, and 220+ for larger. Increase batch size and LR proportionally for better GPUs or with AMP enabled. These params were for 2 1080Ti cards:\n\n```bash\n./distributed_train.sh 2 --data-dir /imagenet/ --model seresnext26t_32x4d --lr 0.1 --warmup-epochs 5 --epochs 160 --weight-decay 1e-4 --sched cosine --reprob 0.4 --remode pixel -b 112\n```\n### EfficientNet-B3 with RandAugment - 81.5 top-1, 95.7 top-5\n\nThe training of this model started with the same command line as EfficientNet-B2 w/ RA above. After almost three weeks of training the process crashed. The results weren't looking amazing so I resumed the training several times with tweaks to a few params (increase RE prob, decrease rand-aug, increase ema-decay). Nothing looked great. I ended up averaging the best checkpoints from all restarts. The result is mediocre at default res/crop but oddly performs much better with a full image test crop of 1.0. \n\n### EfficientNet-B0 with RandAugment - 77.7 top-1, 95.3 top-5\n\n[Michael Klachko](https://github.com/michaelklachko) achieved these results with the command line for B2 adapted for larger batch size, with the recommended B0 dropout rate of 0.2.\n\n```bash\n./distributed_train.sh 2 --data-dir /imagenet/ --model efficientnet_b0 -b 384 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .048\n```\n### ResNet50 with JSD loss and RandAugment (clean + 2x RA augs) - 79.04 top-1, 94.39 top-5\n\nTrained on two older 1080Ti cards, this took a while. Only slightly, non statistically better ImageNet validation result than my first good AugMix training of 78.99. However, these weights are more robust on tests with ImageNetV2, ImageNet-Sketch, etc. Unlike my first AugMix runs, I've enabled SplitBatchNorm, disabled random erasing on the clean split, and cranked up random erasing prob on the 2 augmented paths.\n\n```bash\n./distributed_train.sh 2 --data-dir /imagenet -b 64 --model resnet50 --sched cosine --epochs 200 --lr 0.05 --amp --remode pixel --reprob 0.6 --aug-splits 3 --aa rand-m9-mstd0.5-inc1 --resplit --split-bn --jsd --dist-bn reduce\n```\n### EfficientNet-ES (EdgeTPU-Small) with RandAugment - 78.066 top-1, 93.926 top-5\n\nTrained by [Andrew Lavin](https://github.com/andravin) with 8 V100 cards. Model EMA was not used, final checkpoint is the average of 8 best checkpoints during training.\n\n```bash\n./distributed_train.sh 8 --data-dir /imagenet --model efficientnet_es -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2  --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .064\n```\n### MobileNetV3-Large-100 - 75.766 top-1, 92,542 top-5\n\n```bash\n./distributed_train.sh 2 /--data-dir imagenet/ --model mobilenetv3_large_100 -b 512 --sched step --epochs 600 --decay-epochs 2.4 --decay-rate .973 --opt rmsproptf --opt-eps .001 -j 7 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .064 --lr-noise 0.42 0.9\n```\n\n### ResNeXt-50 32x4d w/ RandAugment - 79.762 top-1, 94.60 top-5\nThese params will also work well for SE-ResNeXt-50 and SK-ResNeXt-50 and likely 101. I used them for the SK-ResNeXt-50 32x4d that I trained with 2 GPU using a slightly higher LR per effective batch size (lr=0.18, b=192 per GPU). The cmd line below are tuned for 8 GPU training.\n\n\n```bash\n./distributed_train.sh 8 --data-dir /imagenet --model resnext50_32x4d --lr 0.6 --warmup-epochs 5 --epochs 240 --weight-decay 1e-4 --sched cosine --reprob 0.4 --recount 3 --remode pixel --aa rand-m7-mstd0.5-inc1 -b 192 -j 6 --amp --dist-bn reduce\n```\n"
  },
  {
    "path": "hubconf.py",
    "content": "dependencies = ['torch']\nimport timm\nglobals().update(timm.models._registry._model_entrypoints)\n"
  },
  {
    "path": "inference.py",
    "content": "#!/usr/bin/env python3\n\"\"\"PyTorch Inference Script\n\nAn example inference script that outputs top-k class ids for images in a folder into a csv.\n\nHacked together by / Copyright 2020 Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport argparse\nimport json\nimport logging\nimport os\nimport time\nfrom contextlib import suppress\nfrom functools import partial\nfrom sys import maxsize\n\nimport numpy as np\nimport pandas as pd\nimport torch\n\nfrom timm.data import create_dataset, create_loader, resolve_data_config, ImageNetInfo, infer_imagenet_subset\nfrom timm.layers import apply_test_time_pool\nfrom timm.models import create_model\nfrom timm.utils import AverageMeter, setup_default_logging, set_jit_fuser, ParseKwargs\n\ntry:\n    from functorch.compile import memory_efficient_fusion\n    has_functorch = True\nexcept ImportError as e:\n    has_functorch = False\n\nhas_compile = hasattr(torch, 'compile')\n\n\n_FMT_EXT = {\n    'json': '.json',\n    'json-record': '.json',\n    'json-split': '.json',\n    'parquet': '.parquet',\n    'csv': '.csv',\n}\n\ntorch.backends.cudnn.benchmark = True\n_logger = logging.getLogger('inference')\n\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Inference')\nparser.add_argument('data', nargs='?', metavar='DIR', const=None,\n                    help='path to dataset (*deprecated*, use --data-dir)')\nparser.add_argument('--data-dir', metavar='DIR',\n                    help='path to dataset (root dir)')\nparser.add_argument('--dataset', metavar='NAME', default='',\n                    help='dataset type + name (\"<type>/<name>\") (default: ImageFolder or ImageTar if empty)')\nparser.add_argument('--split', metavar='NAME', default='validation',\n                    help='dataset split (default: validation)')\nparser.add_argument('--model', '-m', metavar='MODEL', default='resnet50',\n                    help='model architecture (default: resnet50)')\nparser.add_argument('-j', '--workers', default=2, type=int, metavar='N',\n                    help='number of data loading workers (default: 2)')\nparser.add_argument('-b', '--batch-size', default=256, type=int,\n                    metavar='N', help='mini-batch size (default: 256)')\nparser.add_argument('--img-size', default=None, type=int,\n                    metavar='N', help='Input image dimension, uses model default if empty')\nparser.add_argument('--in-chans', type=int, default=None, metavar='N',\n                    help='Image input channels (default: None => 3)')\nparser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N',\n                    help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty')\nparser.add_argument('--use-train-size', action='store_true', default=False,\n                    help='force use of train input size, even when test size is specified in pretrained cfg')\nparser.add_argument('--crop-pct', default=None, type=float,\n                    metavar='N', help='Input image center crop pct')\nparser.add_argument('--crop-mode', default=None, type=str,\n                    metavar='N', help='Input image crop mode (squash, border, center). Model default if None.')\nparser.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN',\n                    help='Override mean pixel value of dataset')\nparser.add_argument('--std', type=float,  nargs='+', default=None, metavar='STD',\n                    help='Override std deviation of of dataset')\nparser.add_argument('--interpolation', default='', type=str, metavar='NAME',\n                    help='Image resize interpolation type (overrides model)')\nparser.add_argument('--num-classes', type=int, default=None,\n                    help='Number classes in dataset')\nparser.add_argument('--class-map', default='', type=str, metavar='FILENAME',\n                    help='path to class to idx mapping file (default: \"\")')\nparser.add_argument('--log-freq', default=10, type=int,\n                    metavar='N', help='batch logging frequency (default: 10)')\nparser.add_argument('--checkpoint', default='', type=str, metavar='PATH',\n                    help='path to latest checkpoint (default: none)')\nparser.add_argument('--pretrained', dest='pretrained', action='store_true',\n                    help='use pre-trained model')\nparser.add_argument('--num-gpu', type=int, default=1,\n                    help='Number of GPUS to use')\nparser.add_argument('--test-pool', dest='test_pool', action='store_true',\n                    help='enable test time pool')\nparser.add_argument('--channels-last', action='store_true', default=False,\n                    help='Use channels_last memory layout')\nparser.add_argument('--device', default='cuda', type=str,\n                    help=\"Device (accelerator) to use.\")\nparser.add_argument('--amp', action='store_true', default=False,\n                    help='use Native AMP for mixed precision training')\nparser.add_argument('--amp-dtype', default='float16', type=str,\n                    help='lower precision AMP dtype (default: float16)')\nparser.add_argument('--model-dtype', default=None, type=str,\n                   help='Model dtype override (non-AMP) (default: float32)')\nparser.add_argument('--fuser', default='', type=str,\n                    help=\"Select jit fuser. One of ('', 'te', 'old', 'nvfuser')\")\nparser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs)\nparser.add_argument('--torchcompile-mode', type=str, default=None,\n                    help=\"torch.compile mode (default: None).\")\n\nscripting_group = parser.add_mutually_exclusive_group()\nscripting_group.add_argument('--torchscript', default=False, action='store_true',\n                             help='torch.jit.script the full model')\nscripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None, const='inductor',\n                             help=\"Enable compilation w/ specified backend (default: inductor).\")\nscripting_group.add_argument('--aot-autograd', default=False, action='store_true',\n                             help=\"Enable AOT Autograd support.\")\n\nparser.add_argument('--results-dir', type=str, default=None,\n                    help='folder for output results')\nparser.add_argument('--results-file', type=str, default=None,\n                    help='results filename (relative to results-dir)')\nparser.add_argument('--results-format', type=str, nargs='+', default=['csv'],\n                    help='results format (one of \"csv\", \"json\", \"json-split\", \"parquet\")')\nparser.add_argument('--results-separate-col', action='store_true', default=False,\n                    help='separate output columns per result index.')\nparser.add_argument('--topk', default=1, type=int,\n                    metavar='N', help='Top-k to output to CSV')\nparser.add_argument('--fullname', action='store_true', default=False,\n                    help='use full sample name in output (not just basename).')\nparser.add_argument('--filename-col', type=str, default='filename',\n                    help='name for filename / sample name column')\nparser.add_argument('--index-col', type=str, default='index',\n                    help='name for output indices column(s)')\nparser.add_argument('--label-col', type=str, default='label',\n                    help='name for output indices column(s)')\nparser.add_argument('--output-col', type=str, default=None,\n                    help='name for logit/probs output column(s)')\nparser.add_argument('--output-type', type=str, default='prob',\n                    help='output type colum (\"prob\" for probabilities, \"logit\" for raw logits)')\nparser.add_argument('--label-type', type=str, default='description',\n                    help='type of label to output, one of  \"none\", \"name\", \"description\", \"detailed\"')\nparser.add_argument('--include-index', action='store_true', default=False,\n                    help='include the class index in results')\nparser.add_argument('--exclude-output', action='store_true', default=False,\n                    help='exclude logits/probs from results, just indices. topk must be set !=0.')\nparser.add_argument('--no-console-results', action='store_true', default=False,\n                    help='disable printing the inference results to the console')\n\n\ndef main():\n    setup_default_logging()\n    args = parser.parse_args()\n    # might as well try to do something useful...\n    args.pretrained = args.pretrained or not args.checkpoint\n\n    if torch.cuda.is_available():\n        torch.backends.cuda.matmul.allow_tf32 = True\n        torch.backends.cudnn.benchmark = True\n\n    device = torch.device(args.device)\n\n    model_dtype = None\n    if args.model_dtype:\n        assert args.model_dtype in ('float32', 'float16', 'bfloat16')\n        model_dtype = getattr(torch, args.model_dtype)\n\n    # resolve AMP arguments based on PyTorch availability\n    amp_autocast = suppress\n    if args.amp:\n        assert model_dtype is None or model_dtype == torch.float32, 'float32 model dtype must be used with AMP'\n        assert args.amp_dtype in ('float16', 'bfloat16')\n        amp_dtype = torch.bfloat16 if args.amp_dtype == 'bfloat16' else torch.float16\n        amp_autocast = partial(torch.autocast, device_type=device.type, dtype=amp_dtype)\n        _logger.info('Running inference in mixed precision with native PyTorch AMP.')\n    else:\n        _logger.info('Running inference in float32. AMP not enabled.')\n\n    if args.fuser:\n        set_jit_fuser(args.fuser)\n\n    # create model\n    in_chans = 3\n    if args.in_chans is not None:\n        in_chans = args.in_chans\n    elif args.input_size is not None:\n        in_chans = args.input_size[0]\n\n    model = create_model(\n        args.model,\n        num_classes=args.num_classes,\n        in_chans=in_chans,\n        pretrained=args.pretrained,\n        checkpoint_path=args.checkpoint,\n        **args.model_kwargs,\n    )\n    if args.num_classes is None:\n        assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.'\n        args.num_classes = model.num_classes\n\n    _logger.info(\n        f'Model {args.model} created, param count: {sum([m.numel() for m in model.parameters()])}')\n\n    data_config = resolve_data_config(vars(args), model=model)\n    test_time_pool = False\n    if args.test_pool:\n        model, test_time_pool = apply_test_time_pool(model, data_config)\n\n    model = model.to(device=device, dtype=model_dtype)\n    model.eval()\n    if args.channels_last:\n        model = model.to(memory_format=torch.channels_last)\n\n    if args.torchscript:\n        model = torch.jit.script(model)\n    elif args.torchcompile:\n        assert has_compile, 'A version of torch w/ torch.compile() is required for --compile, possibly a nightly.'\n        torch._dynamo.reset()\n        model = torch.compile(model, backend=args.torchcompile, mode=args.torchcompile_mode)\n    elif args.aot_autograd:\n        assert has_functorch, \"functorch is needed for --aot-autograd\"\n        model = memory_efficient_fusion(model)\n\n    if args.num_gpu > 1:\n        model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpu)))\n\n    root_dir = args.data or args.data_dir\n    dataset = create_dataset(\n        root=root_dir,\n        name=args.dataset,\n        split=args.split,\n        class_map=args.class_map,\n    )\n\n    if test_time_pool:\n        data_config['crop_pct'] = 1.0\n\n    workers = 1 if 'tfds' in args.dataset or 'wds' in args.dataset else args.workers\n    loader = create_loader(\n        dataset,\n        batch_size=args.batch_size,\n        use_prefetcher=True,\n        num_workers=workers,\n        device=device,\n        img_dtype=model_dtype or torch.float32,\n        **data_config,\n    )\n\n    to_label = None\n    if args.label_type in ('name', 'description', 'detail'):\n        imagenet_subset = infer_imagenet_subset(model)\n        if imagenet_subset is not None:\n            dataset_info = ImageNetInfo(imagenet_subset)\n            if args.label_type == 'name':\n                to_label = lambda x: dataset_info.index_to_label_name(x)\n            elif args.label_type == 'detail':\n                to_label = lambda x: dataset_info.index_to_description(x, detailed=True)\n            else:\n                to_label = lambda x: dataset_info.index_to_description(x)\n            to_label = np.vectorize(to_label)\n        else:\n            _logger.error(\"Cannot deduce ImageNet subset from model, no labelling will be performed.\")\n\n    top_k = min(args.topk, args.num_classes)\n    batch_time = AverageMeter()\n    end = time.time()\n    all_indices = []\n    all_labels = []\n    all_outputs = []\n    use_probs = args.output_type == 'prob'\n    with torch.inference_mode():\n        for batch_idx, (input, _) in enumerate(loader):\n\n            with amp_autocast():\n                output = model(input)\n\n            if use_probs:\n                output = output.softmax(-1)\n\n            if top_k:\n                output, indices = output.topk(top_k)\n                np_indices = indices.cpu().numpy()\n                if args.include_index:\n                    all_indices.append(np_indices)\n                if to_label is not None:\n                    np_labels = to_label(np_indices)\n                    all_labels.append(np_labels)\n\n            all_outputs.append(output.float().cpu().numpy())\n\n            # measure elapsed time\n            batch_time.update(time.time() - end)\n            end = time.time()\n\n            if batch_idx % args.log_freq == 0:\n                _logger.info('Predict: [{0}/{1}] Time {batch_time.val:.3f} ({batch_time.avg:.3f})'.format(\n                    batch_idx, len(loader), batch_time=batch_time))\n\n    all_indices = np.concatenate(all_indices, axis=0) if all_indices else None\n    all_labels = np.concatenate(all_labels, axis=0) if all_labels else None\n    all_outputs = np.concatenate(all_outputs, axis=0).astype(np.float32)\n    filenames = loader.dataset.filenames(basename=not args.fullname)\n\n    output_col = args.output_col or ('prob' if use_probs else 'logit')\n    data_dict = {args.filename_col: filenames}\n    if args.results_separate_col and all_outputs.shape[-1] > 1:\n        if all_indices is not None:\n            for i in range(all_indices.shape[-1]):\n                data_dict[f'{args.index_col}_{i}'] = all_indices[:, i]\n        if all_labels is not None:\n            for i in range(all_labels.shape[-1]):\n                data_dict[f'{args.label_col}_{i}'] = all_labels[:, i]\n        for i in range(all_outputs.shape[-1]):\n            data_dict[f'{output_col}_{i}'] = all_outputs[:, i]\n    else:\n        if all_indices is not None:\n            if all_indices.shape[-1] == 1:\n                all_indices = all_indices.squeeze(-1)\n            data_dict[args.index_col] = list(all_indices)\n        if all_labels is not None:\n            if all_labels.shape[-1] == 1:\n                all_labels = all_labels.squeeze(-1)\n            data_dict[args.label_col] = list(all_labels)\n        if all_outputs.shape[-1] == 1:\n            all_outputs = all_outputs.squeeze(-1)\n        data_dict[output_col] = list(all_outputs)\n\n    df = pd.DataFrame(data=data_dict)\n\n    results_filename = args.results_file\n    if results_filename:\n        filename_no_ext, ext = os.path.splitext(results_filename)\n        if ext and ext in _FMT_EXT.values():\n            # if filename provided with one of expected ext,\n            # remove it as it will be added back\n            results_filename = filename_no_ext\n    else:\n        # base default filename on model name + img-size\n        img_size = data_config[\"input_size\"][1]\n        results_filename = f'{args.model}-{img_size}'\n\n    if args.results_dir:\n        os.makedirs(args.results_dir, exist_ok=True)\n        results_filename = os.path.join(args.results_dir, results_filename)\n\n    for fmt in args.results_format:\n        save_results(df, results_filename, fmt)\n\n    if not args.no_console_results:\n        print(f'--result')\n        print(df.set_index(args.filename_col).to_json(orient='index', indent=4))\n\n\ndef save_results(df, results_filename, results_format='csv', filename_col='filename'):\n    np.set_printoptions(threshold=maxsize)\n    results_filename += _FMT_EXT[results_format]\n    if results_format == 'parquet':\n        df.set_index(filename_col).to_parquet(results_filename)\n    elif results_format == 'json':\n        df.set_index(filename_col).to_json(results_filename, indent=4, orient='index')\n    elif results_format == 'json-records':\n        df.to_json(results_filename, lines=True, orient='records')\n    elif results_format == 'json-split':\n        df.to_json(results_filename, indent=4, orient='split', index=False)\n    else:\n        df.to_csv(results_filename, index=False)\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "onnx_export.py",
    "content": "\"\"\" ONNX export script\n\nExport PyTorch models as ONNX graphs.\n\nThis export script originally started as an adaptation of code snippets found at\nhttps://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html\n\nThe default parameters work with PyTorch 1.6 and ONNX 1.7 and produce an optimal ONNX graph\nfor hosting in the ONNX runtime (see onnx_validate.py). To export an ONNX model compatible\nwith caffe2 (see caffe2_benchmark.py and caffe2_validate.py), the --keep-init and --aten-fallback\nflags are currently required.\n\nOlder versions of PyTorch/ONNX (tested PyTorch 1.4, ONNX 1.5) do not need extra flags for\ncaffe2 compatibility, but they produce a model that isn't as fast running on ONNX runtime.\n\nMost new release of PyTorch and ONNX cause some sort of breakage in the export / usage of ONNX models.\nPlease do your research and search ONNX and PyTorch issue tracker before asking me. Thanks.\n\nCopyright 2020 Ross Wightman\n\"\"\"\nimport argparse\n\nimport timm\nfrom timm.utils.model import reparameterize_model\nfrom timm.utils.onnx import onnx_export\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Validation')\nparser.add_argument('output', metavar='ONNX_FILE',\n                    help='output model filename')\nparser.add_argument('--model', '-m', metavar='MODEL', default='mobilenetv3_large_100',\n                    help='model architecture (default: mobilenetv3_large_100)')\nparser.add_argument('--opset', type=int, default=None,\n                    help='ONNX opset to use (default: 10)')\nparser.add_argument('--keep-init', action='store_true', default=False,\n                    help='Keep initializers as input. Needed for Caffe2 compatible export in newer PyTorch/ONNX.')\nparser.add_argument('--aten-fallback', action='store_true', default=False,\n                    help='Fallback to ATEN ops. Helps fix AdaptiveAvgPool issue with Caffe2 in newer PyTorch/ONNX.')\nparser.add_argument('--dynamic-size', action='store_true', default=False,\n                    help='Export model width dynamic width/height. Not recommended for \"tf\" models with SAME padding.')\nparser.add_argument('--check-forward', action='store_true', default=False,\n                    help='Do a full check of torch vs onnx forward after export.')\nparser.add_argument('-b', '--batch-size', default=1, type=int,\n                    metavar='N', help='mini-batch size (default: 1)')\nparser.add_argument('--img-size', default=None, type=int,\n                    metavar='N', help='Input image dimension, uses model default if empty')\nparser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N',\n                    help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty')\nparser.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN',\n                    help='Override mean pixel value of dataset')\nparser.add_argument('--std', type=float,  nargs='+', default=None, metavar='STD',\n                    help='Override std deviation of of dataset')\nparser.add_argument('--num-classes', type=int, default=None,\n                    help='Number classes in dataset')\nparser.add_argument('--checkpoint', default='', type=str, metavar='PATH',\n                    help='path to checkpoint (default: none)')\nparser.add_argument('--reparam', default=False, action='store_true',\n                    help='Reparameterize model')\nparser.add_argument('--training', default=False, action='store_true',\n                    help='Export in training mode (default is eval)')\nparser.add_argument('--verbose', default=False, action='store_true',\n                    help='Extra stdout output')\nparser.add_argument('--dynamo', default=False, action='store_true',\n                    help='Use torch dynamo export.')\n\ndef main():\n    args = parser.parse_args()\n\n    args.pretrained = True\n    if args.checkpoint:\n        args.pretrained = False\n\n    print(\"==> Creating PyTorch {} model\".format(args.model))\n    # NOTE exportable=True flag disables autofn/jit scripted activations and uses Conv2dSameExport layers\n    # for models using SAME padding\n    model = timm.create_model(\n        args.model,\n        num_classes=args.num_classes,\n        in_chans=3,\n        pretrained=args.pretrained,\n        checkpoint_path=args.checkpoint,\n        exportable=True,\n    )\n\n    if args.reparam:\n        model = reparameterize_model(model)\n\n    if args.input_size is not None:\n        assert len(args.input_size) == 3, 'input-size should be N H W (channels, height, width)'\n        input_size = args.input_size\n    elif args.img_size is not None:\n        input_size = (3, args.img_size, args.img_size)\n    else:\n        input_size = None\n\n    onnx_export(\n        model,\n        args.output,\n        opset=args.opset,\n        dynamic_size=args.dynamic_size,\n        aten_fallback=args.aten_fallback,\n        keep_initializers=args.keep_init,\n        check_forward=args.check_forward,\n        training=args.training,\n        verbose=args.verbose,\n        use_dynamo=args.dynamo,\n        input_size=input_size,\n        batch_size=args.batch_size,\n    )\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "onnx_validate.py",
    "content": "\"\"\" ONNX-runtime validation script\n\nThis script was created to verify accuracy and performance of exported ONNX\nmodels running with the onnxruntime. It utilizes the PyTorch dataloader/processing\npipeline for a fair comparison against the originals.\n\nCopyright 2020 Ross Wightman\n\"\"\"\nimport argparse\nimport numpy as np\nimport onnxruntime\nfrom timm.data import create_loader, resolve_data_config, create_dataset\nfrom timm.utils import AverageMeter\nimport time\n\nparser = argparse.ArgumentParser(description='ONNX Validation')\nparser.add_argument('data', metavar='DIR',\n                    help='path to dataset')\nparser.add_argument('--onnx-input', default='', type=str, metavar='PATH',\n                    help='path to onnx model/weights file')\nparser.add_argument('--onnx-output-opt', default='', type=str, metavar='PATH',\n                    help='path to output optimized onnx graph')\nparser.add_argument('--profile', action='store_true', default=False,\n                    help='Enable profiler output.')\nparser.add_argument('-j', '--workers', default=2, type=int, metavar='N',\n                    help='number of data loading workers (default: 2)')\nparser.add_argument('-b', '--batch-size', default=256, type=int,\n                    metavar='N', help='mini-batch size (default: 256)')\nparser.add_argument('--img-size', default=None, type=int,\n                    metavar='N', help='Input image dimension, uses model default if empty')\nparser.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN',\n                    help='Override mean pixel value of dataset')\nparser.add_argument('--std', type=float,  nargs='+', default=None, metavar='STD',\n                    help='Override std deviation of of dataset')\nparser.add_argument('--crop-pct', type=float, default=None, metavar='PCT',\n                    help='Override default crop pct of 0.875')\nparser.add_argument('--interpolation', default='', type=str, metavar='NAME',\n                    help='Image resize interpolation type (overrides model)')\nparser.add_argument('--print-freq', '-p', default=10, type=int,\n                    metavar='N', help='print frequency (default: 10)')\n\n\ndef main():\n    args = parser.parse_args()\n    args.gpu_id = 0\n\n    # Set graph optimization level\n    sess_options = onnxruntime.SessionOptions()\n    sess_options.graph_optimization_level = onnxruntime.GraphOptimizationLevel.ORT_ENABLE_ALL\n    if args.profile:\n        sess_options.enable_profiling = True\n    if args.onnx_output_opt:\n        sess_options.optimized_model_filepath = args.onnx_output_opt\n\n    session = onnxruntime.InferenceSession(args.onnx_input, sess_options)\n\n    data_config = resolve_data_config(vars(args))\n    loader = create_loader(\n        create_dataset('', args.data),\n        input_size=data_config['input_size'],\n        batch_size=args.batch_size,\n        use_prefetcher=False,\n        interpolation=data_config['interpolation'],\n        mean=data_config['mean'],\n        std=data_config['std'],\n        num_workers=args.workers,\n        crop_pct=data_config['crop_pct']\n    )\n\n    input_name = session.get_inputs()[0].name\n\n    batch_time = AverageMeter()\n    top1 = AverageMeter()\n    top5 = AverageMeter()\n    end = time.time()\n    for i, (input, target) in enumerate(loader):\n        # run the net and return prediction\n        output = session.run([], {input_name: input.data.numpy()})\n        output = output[0]\n\n        # measure accuracy and record loss\n        prec1, prec5 = accuracy_np(output, target.numpy())\n        top1.update(prec1.item(), input.size(0))\n        top5.update(prec5.item(), input.size(0))\n\n        # measure elapsed time\n        batch_time.update(time.time() - end)\n        end = time.time()\n\n        if i % args.print_freq == 0:\n            print(\n                f'Test: [{i}/{len(loader)}]\\t'\n                f'Time {batch_time.val:.3f} ({batch_time.avg:.3f}, {input.size(0) / batch_time.avg:.3f}/s, '\n                f'{100 * batch_time.avg / input.size(0):.3f} ms/sample) \\t'\n                f'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\\t'\n                f'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'\n            )\n\n    print(f' * Prec@1 {top1.avg:.3f} ({100-top1.avg:.3f}) Prec@5 {top5.avg:.3f} ({100.-top5.avg:.3f})')\n\n\ndef accuracy_np(output, target):\n    max_indices = np.argsort(output, axis=1)[:, ::-1]\n    top5 = 100 * np.equal(max_indices[:, :5], target[:, np.newaxis]).sum(axis=1).mean()\n    top1 = 100 * np.equal(max_indices[:, 0], target).mean()\n    return top1, top5\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "pyproject.toml",
    "content": "[build-system]\nrequires = [\"pdm-backend\"]\nbuild-backend = \"pdm.backend\"\n\n[project]\nname = \"timm\"\nauthors = [\n    {name = \"Ross Wightman\", email = \"ross@huggingface.co\"},\n]\ndescription = \"PyTorch Image Models\"\nreadme = \"README.md\"\nrequires-python = \">=3.8\"\nkeywords = [\"pytorch\", \"image-classification\"]\nlicense = {text = \"Apache-2.0\"}\nclassifiers = [\n        'Development Status :: 5 - Production/Stable',\n        'Intended Audience :: Education',\n        'Intended Audience :: Science/Research',\n        'License :: OSI Approved :: Apache Software License',\n        'Programming Language :: Python :: 3.8',\n        'Programming Language :: Python :: 3.9',\n        'Programming Language :: Python :: 3.10',\n        'Programming Language :: Python :: 3.11',\n        'Programming Language :: Python :: 3.12',\n        'Topic :: Scientific/Engineering',\n        'Topic :: Scientific/Engineering :: Artificial Intelligence',\n        'Topic :: Software Development',\n        'Topic :: Software Development :: Libraries',\n        'Topic :: Software Development :: Libraries :: Python Modules',\n]\ndependencies = [\n        'torch',\n        'torchvision',\n        'pyyaml',\n        'huggingface_hub',\n        'safetensors',\n]\ndynamic = [\"version\"]\n\n[project.urls]\nhomepage = \"https://github.com/huggingface/pytorch-image-models\"\ndocumentation = \"https://huggingface.co/docs/timm/en/index\"\nrepository = \"https://github.com/huggingface/pytorch-image-models\"\n\n[tool.pdm.dev-dependencies]\ntest = [\n        'pytest',\n        'pytest-timeout',\n        'pytest-xdist',\n        'pytest-forked',\n        'expecttest',\n]\n\n[tool.pdm.version]\nsource = \"file\"\npath = \"timm/version.py\"\n\n[tool.pytest.ini_options]\ntestpaths = ['tests']\nmarkers = [\n    \"base: marker for model tests using the basic setup\",\n    \"cfg: marker for model tests checking the config\",\n    \"torchscript: marker for model tests using torchscript\",\n    \"features: marker for model tests checking feature extraction\",\n    \"fxforward: marker for model tests using torch fx (only forward)\",\n    \"fxbackward: marker for model tests using torch fx (only backward)\",\n]"
  },
  {
    "path": "requirements-dev.txt",
    "content": "pytest\npytest-timeout\npytest-xdist\npytest-forked\nexpecttest\n"
  },
  {
    "path": "requirements.txt",
    "content": "torch>=1.7\ntorchvision\npyyaml\nhuggingface_hub>=0.17.0\nsafetensors>=0.2\nnumpy\n"
  },
  {
    "path": "results/README.md",
    "content": "# Validation and Benchmark Results\n\nThis folder contains validation and benchmark results for the models in this collection. Validation scores are currently only run for models with pretrained weights and ImageNet-1k heads, benchmark numbers are run for all.\n\n## Datasets\n\nThere are currently results for the ImageNet validation set and 5 additional test / label sets.\n\nThe test set results include rank and top-1/top-5 differences from clean validation. For the \"Real Labels\", ImageNetV2, and Sketch test sets, the differences were calculated against the full 1000 class ImageNet-1k validation set. For both the Adversarial and Rendition sets, the differences were calculated against 'clean' runs on the ImageNet-1k validation set with the same 200 classes used in each test set respectively.\n\n### ImageNet Validation - [`results-imagenet.csv`](results-imagenet.csv)\n\nThe standard 50,000 image ImageNet-1k validation set. Model selection during training utilizes this validation set, so it is not a true test set. Question: Does anyone have the official ImageNet-1k test set classification labels now that challenges are done?\n\n* Source: http://image-net.org/challenges/LSVRC/2012/index\n* Paper: \"ImageNet Large Scale Visual Recognition Challenge\" - https://arxiv.org/abs/1409.0575\n\n### ImageNet-\"Real Labels\" - [`results-imagenet-real.csv`](results-imagenet-real.csv)\n\nThe usual ImageNet-1k validation set with a fresh new set of labels intended to improve on mistakes in the original annotation process.\n\n* Source: https://github.com/google-research/reassessed-imagenet\n* Paper: \"Are we done with ImageNet?\" - https://arxiv.org/abs/2006.07159\n\n### ImageNetV2 Matched Frequency - [`results-imagenetv2-matched-frequency.csv`](results-imagenetv2-matched-frequency.csv)\n\nAn ImageNet test set of 10,000 images sampled from new images roughly 10 years after the original. Care was taken to replicate the original ImageNet curation/sampling process.\n\n* Source: https://github.com/modestyachts/ImageNetV2\n* Paper: \"Do ImageNet Classifiers Generalize to ImageNet?\" - https://arxiv.org/abs/1902.10811\n\n### ImageNet-Sketch - [`results-sketch.csv`](results-sketch.csv)\n\n50,000 non photographic (or photos of such) images (sketches, doodles, mostly monochromatic) covering all 1000 ImageNet classes.\n\n* Source: https://github.com/HaohanWang/ImageNet-Sketch\n* Paper: \"Learning Robust Global Representations by Penalizing Local Predictive Power\" - https://arxiv.org/abs/1905.13549\n\n### ImageNet-Adversarial - [`results-imagenet-a.csv`](results-imagenet-a.csv)\n\nA collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occurring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1.\n\nFor clean validation with same 200 classes, see [`results-imagenet-a-clean.csv`](results-imagenet-a-clean.csv) \n\n* Source: https://github.com/hendrycks/natural-adv-examples\n* Paper: \"Natural Adversarial Examples\" - https://arxiv.org/abs/1907.07174\n\n### ImageNet-Rendition - [`results-imagenet-r.csv`](results-imagenet-r.csv)\n\nRenditions of 200 ImageNet classes resulting in 30,000 images for testing robustness.\n\nFor clean validation with same 200 classes, see [`results-imagenet-r-clean.csv`](results-imagenet-r-clean.csv) \n\n* Source: https://github.com/hendrycks/imagenet-r\n* Paper: \"The Many Faces of Robustness\" - https://arxiv.org/abs/2006.16241\n\n### TODO\n* Explore adding a reduced version of ImageNet-C (Corruptions) and ImageNet-P (Perturbations) from https://github.com/hendrycks/robustness. The originals are huge and image size specific.\n\n\n## Benchmark\n\nCSV files with a `model_benchmark` prefix include benchmark numbers for models on various accelerators with different precision. Currently only run on RTX 3090 w/ AMP for inference, I intend to add more in the future.\n\n## Metadata\n\nCSV files with `model_metadata` prefix contain extra information about the source training, currently the pretraining dataset and technique (ie distillation, SSL, WSL, etc). Eventually I'd like to have metadata about augmentation, regularization, etc. but that will be a challenge to source consistently. \n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt113-cu117-rtx3090.csv",
    "content": "model,infer_samples_per_sec,infer_step_time,infer_batch_size,infer_img_size,infer_gmacs,infer_macts,param_count\r\ntinynet_e,49277.65,20.77,1024,106,0.03,0.69,2.04\r\nmobilenetv3_small_050,45562.75,22.464,1024,224,0.03,0.92,1.59\r\nlcnet_035,41026.68,24.949,1024,224,0.03,1.04,1.64\r\nlcnet_050,37575.13,27.242,1024,224,0.05,1.26,1.88\r\nmobilenetv3_small_075,33062.39,30.961,1024,224,0.05,1.3,2.04\r\nmobilenetv3_small_100,30012.26,34.109,1024,224,0.06,1.42,2.54\r\ntf_mobilenetv3_small_minimal_100,28698.14,35.672,1024,224,0.06,1.41,2.04\r\ntf_mobilenetv3_small_075,27407.51,37.352,1024,224,0.05,1.3,2.04\r\ntinynet_d,27236.47,37.585,1024,152,0.05,1.42,2.34\r\ntf_mobilenetv3_small_100,25103.65,40.781,1024,224,0.06,1.42,2.54\r\nlcnet_075,24140.95,42.406,1024,224,0.1,1.99,2.36\r\nmnasnet_small,20706.43,49.443,1024,224,0.07,2.16,2.03\r\nlevit_128s,20595.72,49.709,1024,224,0.31,1.88,7.78\r\nlcnet_100,19684.75,52.01,1024,224,0.16,2.52,2.95\r\nmobilenetv2_035,18358.82,55.767,1024,224,0.07,2.86,1.68\r\nregnetx_002,18244.04,56.117,1024,224,0.2,2.16,2.68\r\nghostnet_050,17564.96,58.287,1024,224,0.05,1.77,2.59\r\nregnety_002,17006.07,60.202,1024,224,0.2,2.17,3.16\r\nmnasnet_050,15925.32,64.29,1024,224,0.11,3.07,2.22\r\nvit_tiny_r_s16_p8_224,15068.38,67.946,1024,224,0.44,2.06,6.34\r\nmobilenetv2_050,14843.74,68.974,1024,224,0.1,3.64,1.97\r\ntinynet_c,14634.69,69.959,1024,184,0.11,2.87,2.46\r\nsemnasnet_050,14248.78,71.855,1024,224,0.11,3.44,2.08\r\nlevit_128,14164.26,72.284,1024,224,0.41,2.71,9.21\r\nvit_small_patch32_224,13811.36,74.131,1024,224,1.15,2.5,22.88\r\nmixer_s32_224,13352.85,76.677,1024,224,1.0,2.28,19.1\r\ncs3darknet_focus_s,12798.44,79.999,1024,256,0.69,2.7,3.27\r\nlcnet_150,12783.12,80.094,1024,224,0.34,3.79,4.5\r\ncs3darknet_s,12395.11,82.602,1024,256,0.72,2.97,3.28\r\nregnetx_004,12366.39,82.791,1024,224,0.4,3.14,5.16\r\nmobilenetv3_large_075,12001.32,85.313,1024,224,0.16,4.0,3.99\r\nlevit_192,11882.81,86.163,1024,224,0.66,3.2,10.95\r\nresnet10t,11615.84,88.145,1024,224,1.1,2.43,5.44\r\nese_vovnet19b_slim_dw,11539.4,88.729,1024,224,0.4,5.28,1.9\r\ngernet_s,11496.77,89.058,1024,224,0.75,2.65,8.17\r\nmobilenetv3_rw,10873.77,94.16,1024,224,0.23,4.41,5.48\r\nmobilenetv3_large_100,10705.06,95.645,1024,224,0.23,4.41,5.48\r\nhardcorenas_a,10554.34,97.012,1024,224,0.23,4.38,5.26\r\ntf_mobilenetv3_large_075,10511.12,97.41,1024,224,0.16,4.0,3.99\r\ntf_mobilenetv3_large_minimal_100,10371.16,98.725,1024,224,0.22,4.4,3.92\r\nmnasnet_075,10345.17,98.972,1024,224,0.23,4.77,3.17\r\nhardcorenas_b,9695.74,105.601,1024,224,0.26,5.09,5.18\r\nregnety_004,9655.22,106.046,1024,224,0.41,3.89,4.34\r\nghostnet_100,9483.99,107.96,1024,224,0.15,3.55,5.18\r\nhardcorenas_c,9481.05,107.994,1024,224,0.28,5.01,5.52\r\ntf_mobilenetv3_large_100,9456.79,108.271,1024,224,0.23,4.41,5.48\r\nregnetx_006,9408.22,108.83,1024,224,0.61,3.98,6.2\r\nmobilenetv2_075,9313.88,109.932,1024,224,0.22,5.86,2.64\r\ntinynet_b,9291.99,110.191,1024,188,0.21,4.44,3.73\r\nmnasnet_b1,9286.4,110.258,1024,224,0.33,5.46,4.38\r\nmnasnet_100,9263.52,110.53,1024,224,0.33,5.46,4.38\r\ngluon_resnet18_v1b,9078.31,112.785,1024,224,1.82,2.48,11.69\r\nsemnasnet_075,9069.42,112.895,1024,224,0.23,5.54,2.91\r\nresnet18,9045.63,113.192,1024,224,1.82,2.48,11.69\r\nssl_resnet18,9045.4,113.196,1024,224,1.82,2.48,11.69\r\nswsl_resnet18,9040.4,113.258,1024,224,1.82,2.48,11.69\r\nlevit_256,8921.47,114.768,1024,224,1.13,4.23,18.89\r\nhardcorenas_d,8879.46,115.311,1024,224,0.3,4.93,7.5\r\nregnety_006,8666.48,118.144,1024,224,0.61,4.33,6.06\r\nseresnet18,8542.99,119.851,1024,224,1.82,2.49,11.78\r\nmobilenetv2_100,8507.29,120.356,1024,224,0.31,6.68,3.5\r\nspnasnet_100,8342.04,122.741,1024,224,0.35,6.03,4.42\r\nlegacy_seresnet18,8310.8,123.202,1024,224,1.82,2.49,11.78\r\nsemnasnet_100,8284.16,123.599,1024,224,0.32,6.23,3.89\r\nmnasnet_a1,8283.57,123.607,1024,224,0.32,6.23,3.89\r\nregnetx_008,7852.75,130.39,1024,224,0.81,5.15,7.26\r\nhardcorenas_f,7809.07,131.117,1024,224,0.35,5.57,8.2\r\nhardcorenas_e,7730.97,132.444,1024,224,0.35,5.65,8.07\r\nefficientnet_lite0,7722.75,132.584,1024,224,0.4,6.74,4.65\r\nlevit_256d,7689.03,133.165,1024,224,1.4,4.93,26.21\r\nxcit_nano_12_p16_224_dist,7674.8,133.413,1024,224,0.56,4.17,3.05\r\nxcit_nano_12_p16_224,7670.11,133.492,1024,224,0.56,4.17,3.05\r\nresnet18d,7636.48,134.082,1024,224,2.06,3.29,11.71\r\nghostnet_130,7625.58,134.274,1024,224,0.24,4.6,7.36\r\ntf_efficientnetv2_b0,7614.25,134.473,1024,224,0.73,4.77,7.14\r\nese_vovnet19b_slim,7588.4,134.932,1024,224,1.69,3.52,3.17\r\ndeit_tiny_distilled_patch16_224,7449.3,137.451,1024,224,1.27,6.01,5.91\r\ndeit_tiny_patch16_224,7398.73,138.391,1024,224,1.26,5.97,5.72\r\nvit_tiny_patch16_224,7390.78,138.538,1024,224,1.26,5.97,5.72\r\nregnety_008,7366.88,138.989,1024,224,0.81,5.25,6.26\r\ntinynet_a,7358.6,139.145,1024,192,0.35,5.41,6.19\r\ndla46_c,7311.64,140.038,1024,224,0.58,4.5,1.3\r\nfbnetc_100,7303.94,140.187,1024,224,0.4,6.51,5.57\r\nmobilevitv2_050,7248.37,141.262,1024,256,0.48,8.04,1.37\r\ntf_efficientnet_lite0,6816.26,150.218,1024,224,0.4,6.74,4.65\r\npit_ti_distilled_224,6788.49,150.832,1024,224,0.71,6.23,5.1\r\npit_ti_224,6762.99,151.401,1024,224,0.7,6.19,4.85\r\nefficientnet_b0,6687.26,153.115,1024,224,0.4,6.75,5.29\r\nvisformer_tiny,6618.81,154.698,1024,224,1.27,5.72,10.32\r\nrexnet_100,6608.65,154.937,1024,224,0.41,7.44,4.8\r\nmnasnet_140,6580.58,155.597,1024,224,0.6,7.71,7.12\r\nefficientnet_b1_pruned,6513.48,157.201,1024,240,0.4,6.21,6.33\r\nrexnetr_100,6491.35,157.737,1024,224,0.43,7.72,4.88\r\nmobilenetv2_110d,6395.98,160.089,1024,224,0.45,8.71,4.52\r\nresnet14t,6341.58,161.462,1024,224,1.69,5.8,10.08\r\nregnetz_005,6208.75,164.916,1024,224,0.52,5.86,7.12\r\ndla46x_c,6145.64,166.61,1024,224,0.54,5.66,1.07\r\nnf_regnet_b0,6055.0,169.104,1024,256,0.64,5.58,8.76\r\ntf_efficientnet_b0,5992.76,170.862,1024,224,0.4,6.75,5.29\r\nhrnet_w18_small,5908.15,173.308,1024,224,1.61,5.72,13.19\r\nedgenext_xx_small,5886.07,173.957,1024,288,0.33,4.21,1.33\r\nsemnasnet_140,5856.63,174.833,1024,224,0.6,8.87,6.11\r\nresnetblur18,5839.81,175.336,1024,224,2.34,3.39,11.69\r\nese_vovnet19b_dw,5825.11,175.779,1024,224,1.34,8.25,6.54\r\ndla60x_c,5790.89,176.817,1024,224,0.59,6.01,1.32\r\nmobilenetv2_140,5780.41,177.139,1024,224,0.6,9.57,6.11\r\nskresnet18,5648.81,181.265,1024,224,1.82,3.24,11.96\r\nmobilevit_xxs,5528.18,185.22,1024,256,0.42,8.34,1.27\r\nefficientnet_b0_gn,5401.88,189.551,1024,224,0.42,6.75,5.29\r\nconvnext_atto,5364.13,190.886,1024,288,0.91,6.3,3.7\r\ngluon_resnet34_v1b,5344.34,191.593,1024,224,3.67,3.74,21.8\r\nresnet34,5335.05,191.926,1024,224,3.67,3.74,21.8\r\nefficientnet_lite1,5334.12,191.959,1024,240,0.62,10.14,5.42\r\ntv_resnet34,5332.7,192.011,1024,224,3.67,3.74,21.8\r\nvit_base_patch32_224,5287.0,193.67,1024,224,4.41,5.01,88.22\r\nvit_base_patch32_clip_224,5281.4,193.877,1024,224,4.41,5.01,88.22\r\nlevit_384,5276.74,194.047,1024,224,2.36,6.26,39.13\r\npit_xs_distilled_224,5241.4,195.357,1024,224,1.41,7.76,11.0\r\npit_xs_224,5237.09,195.517,1024,224,1.4,7.71,10.62\r\nselecsls42,5225.99,195.932,1024,224,2.94,4.62,30.35\r\nselecsls42b,5201.55,196.853,1024,224,2.98,4.62,32.46\r\ngernet_m,5124.67,199.807,1024,224,3.02,5.24,21.14\r\npvt_v2_b0,5122.72,199.882,1024,224,0.57,7.99,3.67\r\ntf_efficientnetv2_b1,5122.21,199.903,1024,240,1.21,7.34,8.14\r\nmixnet_s,5079.84,201.57,1024,224,0.25,6.25,4.13\r\nconvnext_atto_ols,5062.64,202.255,1024,288,0.96,6.8,3.7\r\nseresnet34,5028.88,203.611,1024,224,3.67,3.74,21.96\r\nrexnetr_130,5003.96,204.626,1024,224,0.68,9.81,7.61\r\nfbnetv3_b,5003.0,204.666,1024,256,0.55,9.1,8.6\r\nmixer_b32_224,4982.51,205.508,1024,224,3.24,6.29,60.29\r\nxcit_tiny_12_p16_224_dist,4879.26,209.853,1024,224,1.24,6.29,6.72\r\nlegacy_seresnet34,4875.12,210.034,1024,224,3.67,3.74,21.96\r\nxcit_tiny_12_p16_224,4870.16,210.244,1024,224,1.24,6.29,6.72\r\nresnet34d,4834.78,211.786,1024,224,3.91,4.54,21.82\r\ntf_efficientnet_lite1,4822.03,212.348,1024,240,0.62,10.14,5.42\r\nresnet26,4794.98,213.545,1024,224,2.36,7.35,16.0\r\nmobilenetv2_120d,4786.27,213.934,1024,224,0.69,11.97,5.83\r\nrexnet_130,4770.1,214.659,1024,224,0.68,9.71,7.56\r\nefficientnet_b0_g16_evos,4743.69,215.854,1024,224,1.01,7.42,8.11\r\nefficientnet_es,4736.89,216.163,1024,224,1.81,8.73,5.44\r\nefficientnet_es_pruned,4735.25,216.239,1024,224,1.81,8.73,5.44\r\ntf_mixnet_s,4735.17,216.242,1024,224,0.25,6.25,4.13\r\ngmlp_ti16_224,4709.0,217.445,1024,224,1.34,7.55,5.87\r\nconvnext_femto,4672.08,219.162,1024,288,1.3,7.56,5.22\r\nmobilevitv2_075,4638.17,220.764,1024,256,1.05,12.06,2.87\r\nresmlp_12_224,4601.92,222.504,1024,224,3.01,5.5,15.35\r\nresmlp_12_distilled_224,4597.97,222.695,1024,224,3.01,5.5,15.35\r\ngmixer_12_224,4543.02,225.388,1024,224,2.67,7.26,12.7\r\nfbnetv3_d,4532.2,225.927,1024,256,0.68,11.1,10.31\r\ntf_efficientnet_es,4518.93,226.591,1024,224,1.81,8.73,5.44\r\nselecsls60,4510.1,227.034,1024,224,3.59,5.52,30.67\r\nmixer_s16_224,4509.29,227.075,1024,224,3.79,5.97,18.53\r\nregnetx_016,4507.02,227.189,1024,224,1.62,7.93,9.19\r\nselecsls60b,4490.35,228.033,1024,224,3.63,5.52,32.77\r\ncs3darknet_focus_m,4487.64,228.171,1024,288,2.51,6.19,9.3\r\ndla34,4481.03,228.505,1024,224,3.07,5.02,15.74\r\ncrossvit_tiny_240,4476.83,228.722,1024,240,1.57,9.08,7.01\r\nconvnext_femto_ols,4473.25,228.904,1024,288,1.35,8.06,5.23\r\nvit_tiny_r_s16_p8_384,4463.13,229.423,1024,384,1.34,6.49,6.36\r\ncs3darknet_m,4452.94,229.949,1024,288,2.63,6.69,9.31\r\nrepvgg_b0,4433.11,230.978,1024,224,3.41,6.15,15.82\r\nresnet26d,4354.59,235.143,1024,224,2.6,8.15,16.01\r\nrexnetr_150,4349.97,235.392,1024,224,0.89,11.13,9.78\r\nresnetaa34d,4309.77,237.588,1024,224,4.43,5.07,21.82\r\nefficientnet_b2_pruned,4309.58,237.598,1024,260,0.73,9.13,8.31\r\ndarknet17,4296.61,238.316,1024,256,3.26,7.18,14.3\r\nvit_small_patch32_384,4250.58,240.897,1024,384,3.45,8.25,22.92\r\ncrossvit_9_240,4201.98,243.683,1024,240,1.85,9.52,8.55\r\nnf_resnet26,4197.39,243.949,1024,224,2.41,7.35,16.0\r\nefficientnet_b0_g8_gn,4190.39,244.357,1024,224,0.66,6.75,6.56\r\nrexnet_150,4186.31,244.594,1024,224,0.9,11.21,9.73\r\necaresnet50d_pruned,4182.62,244.81,1024,224,2.53,6.43,19.94\r\nefficientformer_l1,4075.83,251.225,1024,224,1.3,5.53,12.29\r\npoolformer_s12,4050.19,252.815,1024,224,1.82,5.53,11.92\r\nregnety_016,4035.9,253.712,1024,224,1.63,8.04,11.2\r\nefficientnet_lite2,4013.48,255.128,1024,260,0.89,12.9,6.09\r\ncrossvit_9_dagger_240,3992.98,256.437,1024,240,1.99,9.97,8.78\r\nefficientnet_cc_b0_8e,3929.29,260.595,1024,224,0.42,9.42,24.01\r\nefficientnet_cc_b0_4e,3918.01,261.346,1024,224,0.41,9.42,13.31\r\ndarknet21,3914.26,261.596,1024,256,3.93,7.47,20.86\r\nefficientnet_b1,3876.9,264.116,1024,256,0.77,12.22,7.79\r\ntf_efficientnet_b1,3834.3,267.052,1024,240,0.71,10.88,7.79\r\nresnest14d,3793.21,269.944,1024,224,2.76,7.33,10.61\r\nsedarknet21,3784.73,270.549,1024,256,3.93,7.47,20.95\r\nresnext26ts,3775.5,271.211,1024,256,2.43,10.52,10.3\r\ntf_efficientnetv2_b2,3727.06,274.735,1024,260,1.72,9.84,10.1\r\nconvnext_pico,3702.78,276.537,1024,288,2.27,10.08,9.05\r\nedgenext_x_small,3692.42,277.311,1024,288,0.68,7.5,2.34\r\ntf_efficientnet_cc_b0_8e,3691.33,277.395,1024,224,0.42,9.42,24.01\r\ndpn48b,3689.99,277.494,1024,224,1.69,8.92,9.13\r\neca_resnext26ts,3675.59,278.583,1024,256,2.43,10.52,10.3\r\nseresnext26ts,3670.33,278.98,1024,256,2.43,10.52,10.39\r\ntf_efficientnet_cc_b0_4e,3665.41,279.357,1024,224,0.41,9.42,13.31\r\ntf_efficientnet_lite2,3662.0,279.618,1024,260,0.89,12.9,6.09\r\nnf_ecaresnet26,3619.99,282.862,1024,224,2.41,7.36,16.0\r\nnf_seresnet26,3618.8,282.955,1024,224,2.41,7.36,17.4\r\ngcresnext26ts,3594.7,284.852,1024,256,2.43,10.53,10.48\r\nmobilevitv2_100,3589.19,213.964,768,256,1.84,16.08,4.9\r\ngernet_l,3556.24,287.933,1024,256,4.57,8.0,31.08\r\nlegacy_seresnext26_32x4d,3545.88,288.774,1024,224,2.49,9.39,16.79\r\nconvnext_pico_ols,3532.27,289.886,1024,288,2.37,10.74,9.06\r\nresnet26t,3503.33,292.28,1024,256,3.35,10.52,16.01\r\nrepvgg_a2,3454.82,296.386,1024,224,5.7,6.26,28.21\r\nmixnet_m,3418.52,299.526,1024,224,0.36,8.19,5.01\r\nefficientnet_b3_pruned,3356.7,305.049,1024,300,1.04,11.86,9.86\r\nnf_regnet_b1,3352.23,305.456,1024,288,1.02,9.2,10.22\r\necaresnext50t_32x4d,3339.2,306.649,1024,224,2.7,10.09,15.41\r\necaresnext26t_32x4d,3337.18,306.833,1024,224,2.7,10.09,15.41\r\nseresnext26tn_32x4d,3327.66,307.711,1024,224,2.7,10.09,16.81\r\nseresnext26t_32x4d,3327.23,307.751,1024,224,2.7,10.09,16.81\r\nseresnext26d_32x4d,3303.57,309.954,1024,224,2.73,10.19,16.81\r\ntf_mixnet_m,3301.19,310.17,1024,224,0.36,8.19,5.01\r\nconvit_tiny,3286.62,311.554,1024,224,1.26,7.94,5.71\r\nmobilevit_xs,3278.19,234.265,768,256,1.05,16.33,2.32\r\npit_s_224,3268.88,313.245,1024,224,2.88,11.56,23.46\r\npit_s_distilled_224,3266.72,313.452,1024,224,2.9,11.64,24.04\r\nskresnet34,3242.45,315.8,1024,224,3.67,5.13,22.28\r\neca_botnext26ts_256,3224.24,317.583,1024,256,2.46,11.6,10.59\r\necaresnet101d_pruned,3223.88,317.616,1024,224,3.48,7.69,24.88\r\ndeit_small_distilled_patch16_224,3220.79,317.922,1024,224,4.63,12.02,22.44\r\necaresnetlight,3215.57,318.439,1024,224,4.11,8.42,30.16\r\ndeit_small_patch16_224,3209.05,319.085,1024,224,4.61,11.95,22.05\r\nvit_small_patch16_224,3199.98,319.99,1024,224,4.61,11.95,22.05\r\neca_halonext26ts,3173.71,322.639,1024,256,2.44,11.46,10.76\r\nconvnextv2_atto,3162.98,323.733,1024,288,0.91,6.3,3.71\r\nresnetv2_50,3158.28,324.214,1024,224,4.11,11.11,25.55\r\nnf_regnet_b2,3133.63,326.765,1024,272,1.22,9.27,14.31\r\nrexnetr_200,3133.12,245.111,768,224,1.59,15.11,16.52\r\nbotnet26t_256,3123.98,327.772,1024,256,3.32,11.98,12.49\r\ncoat_lite_tiny,3113.54,328.874,1024,224,1.6,11.65,5.72\r\nvit_small_r26_s32_224,3112.34,329.001,1024,224,3.56,9.85,36.43\r\nbat_resnext26ts,3103.95,329.89,1024,256,2.53,12.51,10.73\r\nhalonet26t,3103.39,329.95,1024,256,3.19,11.69,12.48\r\npvt_v2_b1,3095.14,330.828,1024,224,2.12,15.39,14.01\r\ncspresnet50,3063.22,334.278,1024,256,4.54,11.5,21.62\r\nresnet32ts,3055.79,335.09,1024,256,4.63,11.58,17.96\r\nrexnet_200,3051.5,251.668,768,224,1.56,14.91,16.37\r\nlambda_resnet26t,3046.2,336.144,1024,256,3.02,11.87,10.96\r\nssl_resnet50,3030.48,337.887,1024,224,4.11,11.11,25.56\r\ngluon_resnet50_v1b,3027.43,338.23,1024,224,4.11,11.11,25.56\r\ntv_resnet50,3027.39,338.232,1024,224,4.11,11.11,25.56\r\nswsl_resnet50,3027.07,338.268,1024,224,4.11,11.11,25.56\r\nresnet50,3025.4,338.455,1024,224,4.11,11.11,25.56\r\ndeit3_small_patch16_224_in21ft1k,3023.02,338.721,1024,224,4.61,11.95,22.06\r\ndeit3_small_patch16_224,3017.77,339.312,1024,224,4.61,11.95,22.06\r\ntresnet_m,3006.54,340.578,1024,224,5.74,7.31,31.39\r\nresnet33ts,3005.78,340.665,1024,256,4.76,11.66,19.68\r\nvit_small_resnet26d_224,2994.08,341.995,1024,224,5.07,11.12,63.61\r\nresnetv2_50t,2989.06,342.569,1024,224,4.32,11.82,25.57\r\nregnetx_032,2988.15,342.675,1024,224,3.2,11.37,15.3\r\ndpn68b,2981.13,343.481,1024,224,2.35,10.47,12.61\r\nhrnet_w18_small_v2,2978.67,343.765,1024,224,2.62,9.65,15.6\r\ndpn68,2975.29,344.155,1024,224,2.35,10.47,12.61\r\nresnetv2_50d,2971.15,344.633,1024,224,4.35,11.92,25.57\r\nefficientnet_em,2938.12,348.51,1024,240,3.04,14.34,6.9\r\nvit_base_patch32_plus_256,2934.64,348.925,1024,256,7.79,7.76,119.48\r\ncoat_lite_mini,2921.75,350.462,1024,224,2.0,12.25,11.01\r\ntf_efficientnet_b2,2919.63,350.718,1024,260,1.02,13.83,9.11\r\nseresnet33ts,2919.51,350.732,1024,256,4.76,11.66,19.78\r\neca_resnet33ts,2917.21,351.008,1024,256,4.76,11.66,19.68\r\nhaloregnetz_b,2890.29,354.276,1024,224,1.97,11.94,11.68\r\ncoatnet_pico_rw_224,2884.58,354.98,1024,224,2.05,14.62,10.85\r\ndla60,2883.99,355.049,1024,224,4.26,10.16,22.04\r\ngluon_resnet50_v1c,2872.58,356.463,1024,224,4.35,11.92,25.58\r\nresnet50t,2869.49,356.844,1024,224,4.32,11.82,25.57\r\ngcresnet33ts,2863.36,357.609,1024,256,4.76,11.68,19.88\r\ngluon_resnet50_v1d,2853.24,358.879,1024,224,4.35,11.92,25.58\r\ncspresnet50d,2852.98,358.911,1024,256,4.86,12.55,21.64\r\nresnet50d,2850.55,359.218,1024,224,4.35,11.92,25.58\r\nvovnet39a,2845.31,359.878,1024,224,7.09,6.73,22.6\r\ncspresnet50w,2835.31,361.148,1024,256,5.04,12.19,28.12\r\nvgg11,2827.53,362.143,1024,224,7.61,7.44,132.86\r\ntf_efficientnet_em,2826.28,362.303,1024,240,3.04,14.34,6.9\r\nvisformer_small,2818.88,363.251,1024,224,4.88,11.43,40.22\r\nvit_relpos_small_patch16_224,2792.87,366.637,1024,224,4.59,13.05,21.98\r\nvit_relpos_base_patch32_plus_rpn_256,2784.26,367.771,1024,256,7.68,8.01,119.42\r\nvit_srelpos_small_patch16_224,2781.72,368.106,1024,224,4.59,12.16,21.97\r\nresnest26d,2772.97,369.267,1024,224,3.64,9.97,17.07\r\ncs3darknet_focus_l,2770.5,369.596,1024,288,5.9,10.16,21.15\r\nefficientnet_b2a,2767.64,369.979,1024,288,1.12,16.2,9.11\r\nefficientnet_b2,2766.98,370.065,1024,288,1.12,16.2,9.11\r\nese_vovnet39b,2760.12,370.986,1024,224,7.09,6.74,24.57\r\nlegacy_seresnet50,2753.49,371.881,1024,224,3.88,10.6,28.09\r\ndensenet121,2749.79,372.378,1024,224,2.87,6.9,7.98\r\ntv_densenet121,2747.16,372.735,1024,224,2.87,6.9,7.98\r\neca_vovnet39b,2736.53,374.185,1024,224,7.09,6.74,22.6\r\ncoatnet_nano_cc_224,2716.19,376.986,1024,224,2.24,15.02,13.76\r\nconvnextv2_femto,2710.95,377.714,1024,288,1.3,7.56,5.23\r\nresnetv2_50x1_bit_distilled,2704.93,378.554,1024,224,4.23,11.11,25.55\r\nselecsls84,2697.2,379.64,1024,224,5.9,7.57,50.95\r\nflexivit_small,2693.55,380.153,1024,240,5.35,14.18,22.06\r\ntwins_svt_small,2691.25,380.48,1024,224,2.94,13.75,24.06\r\nmixnet_l,2678.25,382.327,1024,224,0.58,10.84,7.33\r\nseresnet50,2674.61,382.848,1024,224,4.11,11.13,28.09\r\nxcit_nano_12_p16_384_dist,2668.39,383.74,1024,384,1.64,12.15,3.05\r\ncs3darknet_l,2649.93,386.412,1024,288,6.16,10.83,21.16\r\ncoatnet_nano_rw_224,2633.36,388.844,1024,224,2.41,15.41,15.14\r\ncoatnext_nano_rw_224,2627.24,389.75,1024,224,2.47,12.8,14.7\r\nxcit_tiny_24_p16_224_dist,2617.14,391.253,1024,224,2.34,11.82,12.12\r\ndensenet121d,2616.98,391.278,1024,224,3.11,7.7,8.0\r\nxcit_tiny_24_p16_224,2614.91,391.584,1024,224,2.34,11.82,12.12\r\nresnet50_gn,2599.07,393.975,1024,224,4.14,11.11,25.56\r\nvit_relpos_small_patch16_rpn_224,2596.73,394.33,1024,224,4.59,13.05,21.97\r\nres2net50_48w_2s,2593.21,394.865,1024,224,4.18,11.72,25.29\r\nmobilevit_s,2587.93,296.749,768,256,2.03,19.94,5.58\r\nconvnext_nano,2579.36,396.983,1024,288,4.06,13.84,15.59\r\ntf_mixnet_l,2577.4,397.288,1024,224,0.58,10.84,7.33\r\nresnetaa50d,2573.35,397.912,1024,224,5.39,12.44,25.58\r\nvgg11_bn,2556.04,400.607,1024,224,7.62,7.44,132.87\r\nseresnet50t,2550.33,401.504,1024,224,4.32,11.83,28.1\r\necaresnet50d,2544.16,402.478,1024,224,4.35,11.93,25.58\r\ngcvit_xxtiny,2518.13,406.639,1024,224,2.14,15.36,12.0\r\ncs3sedarknet_l,2502.51,409.176,1024,288,6.16,10.83,21.91\r\nresnetrs50,2497.73,409.96,1024,224,4.48,12.14,35.69\r\nmobilevitv2_125,2489.87,308.438,768,256,2.86,20.1,7.48\r\nresnetblur50,2484.87,412.08,1024,224,5.16,12.02,25.56\r\ncspresnext50,2483.24,412.352,1024,256,4.05,15.86,20.57\r\ngluon_resnet50_v1s,2459.02,416.413,1024,224,5.47,13.52,25.68\r\nefficientnet_cc_b1_8e,2458.85,416.443,1024,240,0.75,15.44,39.72\r\nvit_base_resnet26d_224,2458.01,416.584,1024,224,6.97,13.16,101.4\r\ndensenetblur121d,2444.58,418.873,1024,224,3.11,7.9,8.0\r\ntv_resnext50_32x4d,2431.41,421.143,1024,224,4.26,14.4,25.03\r\nssl_resnext50_32x4d,2431.35,421.155,1024,224,4.26,14.4,25.03\r\nswsl_resnext50_32x4d,2430.87,421.236,1024,224,4.26,14.4,25.03\r\nresnext50_32x4d,2429.56,421.462,1024,224,4.26,14.4,25.03\r\ngluon_resnext50_32x4d,2428.35,421.674,1024,224,4.26,14.4,25.03\r\ndla60x,2414.82,424.035,1024,224,3.54,13.8,17.35\r\nefficientnet_lite3,2407.43,212.664,512,300,1.65,21.85,8.2\r\nregnetx_040,2406.98,425.416,1024,224,3.99,12.2,22.12\r\nsemobilevit_s,2404.63,319.371,768,256,2.03,19.95,5.74\r\ngcresnext50ts,2402.57,426.196,1024,256,3.75,15.46,15.67\r\nregnety_040s_gn,2385.11,429.317,1024,224,4.03,12.29,20.65\r\nresnetblur50d,2367.52,432.507,1024,224,5.4,12.82,25.58\r\nvovnet57a,2360.79,433.737,1024,224,8.95,7.52,36.64\r\ntf_efficientnet_cc_b1_8e,2357.71,434.307,1024,240,0.75,15.44,39.72\r\nresmlp_24_distilled_224,2351.85,435.39,1024,224,5.96,10.91,30.02\r\nresmlp_24_224,2345.81,436.509,1024,224,5.96,10.91,30.02\r\nres2net50_14w_8s,2341.48,437.317,1024,224,4.21,13.28,25.06\r\ncoatnet_rmlp_nano_rw_224,2340.53,437.494,1024,224,2.62,20.34,15.15\r\nsehalonet33ts,2339.44,328.271,768,256,3.55,14.7,13.69\r\nres2net50_26w_4s,2338.49,437.876,1024,224,4.28,12.61,25.7\r\nconvnext_nano_ols,2328.37,439.779,1024,288,4.38,15.5,15.65\r\nlambda_resnet26rpt_256,2324.88,165.158,384,256,3.16,11.87,10.99\r\ngmixer_24_224,2324.82,440.451,1024,224,5.28,14.45,24.72\r\ngcresnet50t,2321.78,441.028,1024,256,5.42,14.67,25.9\r\nresnext50d_32x4d,2317.05,441.929,1024,224,4.5,15.2,25.05\r\nresnest50d_1s4x24d,2309.9,443.296,1024,224,4.43,13.57,25.68\r\nseresnetaa50d,2309.78,443.319,1024,224,5.4,12.46,28.11\r\ndla60_res2net,2301.91,444.834,1024,224,4.15,12.34,20.85\r\nvit_base_r26_s32_224,2301.77,444.864,1024,224,6.81,12.36,101.38\r\ntwins_pcpvt_small,2290.09,447.132,1024,224,3.83,18.08,24.11\r\nregnetz_b16,2286.62,447.81,1024,288,2.39,16.43,9.72\r\nese_vovnet57b,2267.23,451.64,1024,224,8.95,7.52,38.61\r\ngluon_inception_v3,2265.31,452.024,1024,299,5.73,8.97,23.83\r\ninception_v3,2260.97,452.888,1024,299,5.73,8.97,23.83\r\nadv_inception_v3,2258.89,453.305,1024,299,5.73,8.97,23.83\r\ntf_inception_v3,2255.73,453.943,1024,299,5.73,8.97,23.83\r\ndensenet169,2232.91,458.582,1024,224,3.4,7.3,14.15\r\ntf_efficientnetv2_b3,2223.64,460.493,1024,300,3.04,15.74,14.36\r\nnf_ecaresnet50,2211.52,463.019,1024,224,4.21,11.13,25.56\r\nnf_seresnet50,2207.21,463.921,1024,224,4.21,11.13,28.09\r\nskresnet50,2206.75,464.017,1024,224,4.11,12.5,25.8\r\nedgenext_small,2206.31,464.109,1024,320,1.97,14.16,5.59\r\nseresnext50_32x4d,2197.09,466.058,1024,224,4.26,14.42,27.56\r\ngluon_seresnext50_32x4d,2196.94,466.091,1024,224,4.26,14.42,27.56\r\nxcit_small_12_p16_224_dist,2195.81,466.33,1024,224,4.82,12.58,26.25\r\nlegacy_seresnext50_32x4d,2193.34,466.856,1024,224,4.26,14.42,27.56\r\nxcit_small_12_p16_224,2190.16,467.534,1024,224,4.82,12.58,26.25\r\nrepvgg_b1g4,2188.83,467.817,1024,224,8.15,10.64,39.97\r\ntf_efficientnet_lite3,2188.37,233.953,512,300,1.65,21.85,8.2\r\nefficientnetv2_rw_t,2170.03,471.87,1024,288,3.19,16.42,13.65\r\ngmlp_s16_224,2164.56,473.061,1024,224,4.42,15.1,19.42\r\ndla60_res2next,2126.26,481.583,1024,224,3.49,13.17,17.03\r\ngc_efficientnetv2_rw_t,2126.09,481.621,1024,288,3.2,16.45,13.68\r\nskresnet50d,2112.57,484.703,1024,224,4.36,13.31,25.82\r\nmobilevitv2_150,2105.0,243.219,512,256,4.09,24.11,10.59\r\nmobilevitv2_150_in22ft1k,2104.51,243.274,512,256,4.09,24.11,10.59\r\nconvnextv2_pico,2092.16,489.434,1024,288,2.27,10.08,9.07\r\npoolformer_s24,2090.38,489.851,1024,224,3.41,10.68,21.39\r\ncs3sedarknet_xdw,2090.04,489.929,1024,256,5.97,17.18,21.6\r\nres2next50,2085.23,491.055,1024,224,4.2,13.71,24.67\r\ncspdarknet53,2084.51,491.231,1024,256,6.57,16.81,27.64\r\nfbnetv3_g,2084.48,491.238,1024,288,1.77,21.09,16.62\r\ncrossvit_small_240,2074.04,493.709,1024,240,5.63,18.17,26.86\r\ndeit3_medium_patch16_224_in21ft1k,2064.27,496.046,1024,224,8.0,15.93,38.85\r\ndeit3_medium_patch16_224,2063.34,496.268,1024,224,8.0,15.93,38.85\r\nxcit_nano_12_p8_224_dist,2049.01,499.742,1024,224,2.16,15.71,3.05\r\nxcit_nano_12_p8_224,2044.48,500.848,1024,224,2.16,15.71,3.05\r\nnf_regnet_b3,2035.39,503.085,1024,320,2.05,14.61,18.59\r\ncs3darknet_focus_x,2017.73,507.488,1024,256,8.03,10.69,35.02\r\nvit_relpos_medium_patch16_cls_224,2000.38,511.89,1024,224,8.03,18.24,38.76\r\nlambda_resnet50ts,1991.21,514.246,1024,256,5.07,17.48,21.54\r\nswin_tiny_patch4_window7_224,1978.72,517.495,1024,224,4.51,17.06,28.29\r\nsebotnet33ts_256,1959.75,195.932,384,256,3.89,17.46,13.7\r\ncoatnet_0_rw_224,1957.32,523.148,1024,224,4.43,18.73,27.44\r\necaresnet26t,1953.32,524.224,1024,320,5.24,16.44,16.01\r\nregnetx_080,1942.5,527.144,1024,224,8.02,14.06,39.57\r\ngcvit_xtiny,1941.57,527.393,1024,224,2.93,20.26,19.98\r\nresnetv2_101,1925.46,531.806,1024,224,7.83,16.23,44.54\r\nregnetx_064,1920.06,533.303,1024,224,6.49,16.37,26.21\r\nmixnet_xl,1918.85,533.64,1024,224,0.93,14.57,11.9\r\nedgenext_small_rw,1912.9,535.3,1024,320,2.46,14.85,7.83\r\nvit_relpos_medium_patch16_224,1907.96,536.687,1024,224,7.97,17.02,38.75\r\nvit_srelpos_medium_patch16_224,1900.57,538.773,1024,224,7.96,16.21,38.74\r\nresnest50d,1896.74,539.858,1024,224,5.4,14.36,27.48\r\ncrossvit_15_240,1894.86,540.397,1024,240,5.81,19.77,27.53\r\nvit_base_resnet50d_224,1892.78,540.989,1024,224,8.73,16.92,110.97\r\ngluon_resnet101_v1b,1879.26,544.883,1024,224,7.83,16.23,44.55\r\ntv_resnet101,1878.26,545.172,1024,224,7.83,16.23,44.55\r\nresnet101,1875.25,546.047,1024,224,7.83,16.23,44.55\r\ndla102,1873.79,546.472,1024,224,7.19,14.18,33.27\r\nefficientformer_l3,1868.08,548.142,1024,224,3.93,12.01,31.41\r\nmaxvit_rmlp_pico_rw_256,1866.73,411.402,768,256,1.85,24.86,7.52\r\nresnetv2_101d,1855.94,551.727,1024,224,8.07,17.04,44.56\r\npvt_v2_b2,1835.92,557.745,1024,224,4.05,27.53,25.36\r\nmaxvit_pico_rw_256,1829.44,419.787,768,256,1.83,22.3,7.46\r\nvgg13,1820.36,562.512,1024,224,11.31,12.25,133.05\r\nlamhalobotnet50ts_256,1818.57,563.067,1024,256,5.02,18.44,22.57\r\ncrossvit_15_dagger_240,1817.96,563.255,1024,240,6.13,20.43,28.21\r\ngluon_resnet101_v1c,1816.14,563.82,1024,224,8.08,17.04,44.57\r\nres2net50_26w_6s,1811.81,565.168,1024,224,6.33,15.28,37.05\r\ngluon_resnet101_v1d,1808.21,566.295,1024,224,8.08,17.04,44.57\r\nswin_s3_tiny_224,1803.67,567.72,1024,224,4.64,19.13,28.33\r\ncoatnet_rmlp_0_rw_224,1803.63,567.733,1024,224,4.72,24.89,27.45\r\nvit_relpos_medium_patch16_rpn_224,1770.72,578.284,1024,224,7.97,17.02,38.73\r\nhalonet50ts,1765.73,579.917,1024,256,5.3,19.2,22.73\r\nrepvgg_b1,1760.92,581.5,1024,224,13.16,10.64,57.42\r\ncoatnet_bn_0_rw_224,1753.99,583.799,1024,224,4.67,22.04,27.44\r\nwide_resnet50_2,1747.87,585.844,1024,224,11.43,14.4,68.88\r\nefficientnet_b3,1741.21,294.036,512,320,2.01,26.52,12.23\r\nefficientnet_b3a,1740.84,294.1,512,320,2.01,26.52,12.23\r\ndensenet201,1738.22,589.096,1024,224,4.34,7.85,20.01\r\ncoatnet_0_224,1727.45,296.376,512,224,4.58,24.01,25.04\r\ndarknetaa53,1721.33,594.876,1024,288,10.08,15.68,36.02\r\ntf_efficientnet_b3,1720.61,297.558,512,300,1.87,23.83,12.23\r\ncait_xxs24_224,1720.1,595.301,1024,224,2.53,20.29,11.96\r\nvit_large_patch32_224,1718.53,595.845,1024,224,15.41,13.32,327.9\r\nmobilevitv2_175,1697.71,301.572,512,256,5.54,28.13,14.25\r\nmobilevitv2_175_in22ft1k,1697.51,301.606,512,256,5.54,28.13,14.25\r\nxcit_tiny_12_p16_384_dist,1694.92,604.145,1024,384,3.64,18.26,6.72\r\npvt_v2_b2_li,1694.45,604.311,1024,224,3.91,27.6,22.55\r\ncoat_lite_small,1694.41,604.328,1024,224,3.96,22.09,19.84\r\nresnetaa101d,1692.59,604.976,1024,224,9.12,17.56,44.57\r\nlegacy_seresnet101,1686.93,607.005,1024,224,7.61,15.74,49.33\r\ntresnet_v2_l,1685.52,607.515,1024,224,8.81,16.34,46.17\r\nhrnet_w18,1679.12,609.832,1024,224,4.32,16.31,21.3\r\nvit_medium_patch16_gap_240,1667.0,614.264,1024,240,9.22,18.81,44.4\r\nvit_tiny_patch16_384,1660.88,616.528,1024,384,4.7,25.39,5.79\r\nregnetv_040,1659.81,616.926,1024,288,6.6,20.3,20.64\r\nconvnext_tiny_hnf,1659.73,616.951,1024,288,7.39,22.21,28.59\r\nseresnet101,1655.13,618.666,1024,224,7.84,16.27,49.33\r\nvit_base_patch32_384,1651.29,620.109,1024,384,13.06,16.5,88.3\r\nvit_base_patch32_clip_384,1649.72,620.7,1024,384,13.06,16.5,88.3\r\nregnety_040,1647.66,621.47,1024,288,6.61,20.3,20.65\r\nregnety_032,1645.25,622.383,1024,288,5.29,18.61,19.44\r\ngluon_resnet101_v1s,1642.29,623.505,1024,224,9.19,18.64,44.67\r\nvgg13_bn,1634.19,626.596,1024,224,11.33,12.25,133.05\r\nresnetaa50,1631.05,627.803,1024,288,8.52,19.24,25.56\r\nmixer_b16_224_miil,1628.71,628.706,1024,224,12.62,14.53,59.88\r\nmixer_b16_224,1627.79,629.061,1024,224,12.62,14.53,59.88\r\nconvnext_tiny,1626.95,629.384,1024,288,7.39,22.21,28.59\r\nnf_resnet101,1620.77,631.785,1024,224,8.01,16.23,44.55\r\nswinv2_cr_tiny_224,1618.15,632.807,1024,224,4.66,28.45,28.33\r\necaresnet101d,1609.33,636.276,1024,224,8.08,17.07,44.57\r\ntwins_pcpvt_base,1605.41,637.831,1024,224,6.68,25.25,43.83\r\ndla102x,1601.78,639.274,1024,224,5.89,19.42,26.31\r\nese_vovnet39b_evos,1601.47,639.4,1024,224,7.07,6.74,24.58\r\ndarknet53,1597.03,641.177,1024,288,11.78,15.68,41.61\r\nresnetblur101d,1596.24,641.494,1024,224,9.12,17.94,44.57\r\nresnet51q,1592.08,643.172,1024,288,8.07,20.94,35.7\r\nswinv2_cr_tiny_ns_224,1591.39,643.448,1024,224,4.66,28.45,28.33\r\nmixer_l32_224,1583.03,646.85,1024,224,11.27,19.86,206.94\r\nresmlp_36_distilled_224,1577.86,648.967,1024,224,8.91,16.33,44.69\r\nresmlp_36_224,1577.4,649.158,1024,224,8.91,16.33,44.69\r\nresnetv2_50d_gn,1561.87,655.61,1024,288,7.24,19.7,25.57\r\nbotnet50ts_256,1556.81,246.643,384,256,5.54,22.23,22.74\r\nnf_resnet50,1548.83,661.132,1024,288,6.88,18.37,25.56\r\nresnetv2_50d_frn,1547.35,661.764,1024,224,4.33,11.92,25.59\r\nhalo2botnet50ts_256,1546.64,496.545,768,256,5.02,21.78,22.64\r\nmvitv2_tiny,1534.63,667.247,1024,224,4.7,21.16,24.17\r\ngluon_resnext101_32x4d,1505.04,680.366,1024,224,8.01,21.23,44.18\r\nswsl_resnext101_32x4d,1504.46,680.63,1024,224,8.01,21.23,44.18\r\ncs3darknet_x,1504.38,680.665,1024,288,10.6,14.36,35.05\r\nssl_resnext101_32x4d,1503.93,680.869,1024,224,8.01,21.23,44.18\r\nresnext101_32x4d,1503.63,681.005,1024,224,8.01,21.23,44.18\r\nresnest50d_4s2x40d,1497.58,683.755,1024,224,4.4,17.94,30.42\r\nconvnextv2_nano,1488.75,515.858,768,288,4.06,13.84,15.62\r\nskresnext50_32x4d,1478.83,692.427,1024,224,4.5,17.18,27.48\r\nmobilevitv2_200,1478.44,519.454,768,256,7.22,32.15,18.45\r\ntresnet_l,1477.44,693.076,1024,224,10.88,11.9,55.99\r\nmobilevitv2_200_in22ft1k,1477.37,519.83,768,256,7.22,32.15,18.45\r\nvgg16,1475.59,693.946,1024,224,15.47,13.56,138.36\r\nregnetz_c16,1475.58,693.953,1024,320,3.92,25.88,13.46\r\nresnetv2_50d_evob,1468.61,697.244,1024,224,4.33,11.92,25.59\r\nvit_medium_patch16_gap_256,1467.03,697.996,1024,256,10.59,22.15,38.86\r\nres2net50_26w_8s,1466.52,698.239,1024,224,8.37,17.95,48.4\r\nsequencer2d_s,1465.84,698.562,1024,224,4.96,11.31,27.65\r\neca_nfnet_l0,1461.61,700.586,1024,288,7.12,17.29,24.14\r\nnfnet_l0,1460.27,701.228,1024,288,7.13,17.29,35.07\r\ncs3sedarknet_x,1435.72,713.217,1024,288,10.6,14.37,35.4\r\nresnet61q,1434.01,714.068,1024,288,9.87,21.52,36.85\r\nres2net101_26w_4s,1424.71,718.728,1024,224,8.1,18.45,45.21\r\nrepvgg_b2g4,1415.15,723.581,1024,224,12.63,12.9,61.76\r\nnest_tiny,1413.2,543.434,768,224,5.83,25.48,17.06\r\npoolformer_s36,1408.65,726.922,1024,224,5.0,15.82,30.86\r\nmaxvit_rmlp_nano_rw_256,1404.06,546.971,768,256,4.47,31.92,15.5\r\nconvit_small,1397.72,732.608,1024,224,5.76,17.87,27.78\r\njx_nest_tiny,1387.89,553.347,768,224,5.83,25.48,17.06\r\nmaxvit_nano_rw_256,1378.18,557.246,768,256,4.46,30.28,15.45\r\nnf_ecaresnet101,1373.28,745.649,1024,224,8.01,16.27,44.55\r\nnf_seresnet101,1369.04,747.958,1024,224,8.02,16.27,49.33\r\ngluon_seresnext101_32x4d,1358.35,753.84,1024,224,8.02,21.26,48.96\r\nlegacy_seresnext101_32x4d,1357.27,754.442,1024,224,8.02,21.26,48.96\r\nefficientnet_b3_gn,1357.0,282.964,384,320,2.14,28.83,11.73\r\nnfnet_f0,1356.65,754.786,1024,256,12.62,18.05,71.49\r\nseresnext101_32x4d,1356.0,755.148,1024,224,8.02,21.26,48.96\r\nresnetv2_152,1353.28,756.668,1024,224,11.55,22.56,60.19\r\nxception,1353.17,567.542,768,299,8.4,35.83,22.86\r\ntwins_svt_base,1350.54,758.199,1024,224,8.59,26.33,56.07\r\ncrossvit_18_240,1343.82,761.996,1024,240,9.05,26.26,43.27\r\nese_vovnet99b_iabn,1343.72,762.049,1024,224,16.49,11.27,63.2\r\nmaxxvit_rmlp_nano_rw_256,1341.45,763.341,1024,256,4.37,26.05,16.78\r\nregnetx_120,1339.05,764.708,1024,224,12.13,21.37,46.11\r\nvgg16_bn,1336.79,765.998,1024,224,15.5,13.56,138.37\r\ndpn92,1330.6,769.562,1024,224,6.54,18.21,37.67\r\ntv_resnet152,1329.75,770.054,1024,224,11.56,22.56,60.19\r\ngcvit_tiny,1328.61,770.718,1024,224,4.79,29.82,28.22\r\ngluon_resnet152_v1b,1328.2,770.954,1024,224,11.56,22.56,60.19\r\nresnet152,1327.13,771.578,1024,224,11.56,22.56,60.19\r\nese_vovnet99b,1316.93,777.554,1024,224,16.51,11.27,63.2\r\npvt_v2_b3,1316.31,777.917,1024,224,6.92,37.7,45.24\r\nxcit_tiny_12_p8_224_dist,1300.55,787.348,1024,224,4.81,23.6,6.71\r\nxcit_tiny_12_p8_224,1299.96,787.704,1024,224,4.81,23.6,6.71\r\ncrossvit_18_dagger_240,1298.96,788.312,1024,240,9.5,27.03,44.27\r\nhrnet_w32,1297.82,789.002,1024,224,8.97,22.02,41.23\r\ngluon_resnet152_v1c,1296.47,789.825,1024,224,11.8,23.36,60.21\r\nresnetv2_152d,1296.37,789.881,1024,224,11.8,23.36,60.2\r\ngluon_resnet152_v1d,1293.21,791.811,1024,224,11.8,23.36,60.21\r\nvit_small_resnet50d_s16_224,1288.35,794.801,1024,224,13.48,24.82,57.53\r\ncs3edgenet_x,1281.15,799.266,1024,288,14.59,16.36,47.82\r\nedgenext_base,1272.74,804.548,1024,320,6.01,24.32,18.51\r\nregnety_120,1268.38,807.318,1024,224,12.14,21.38,51.82\r\ndla169,1258.34,813.753,1024,224,11.6,20.2,53.39\r\nhrnet_w30,1252.2,817.74,1024,224,8.15,21.21,37.71\r\nxception41p,1249.06,409.896,512,299,9.25,39.86,26.91\r\nmaxxvitv2_nano_rw_256,1248.81,819.967,1024,256,6.26,23.05,23.7\r\necaresnet50t,1243.91,823.198,1024,320,8.82,24.13,25.57\r\nvgg19,1237.03,827.774,1024,224,19.63,14.86,143.67\r\nswin_small_patch4_window7_224,1228.67,833.406,1024,224,8.77,27.47,49.61\r\nefficientnet_el_pruned,1220.93,838.69,1024,300,8.0,30.7,10.59\r\ndensenet161,1220.41,839.05,1024,224,7.79,11.06,28.68\r\nefficientnet_el,1218.76,840.187,1024,300,8.0,30.7,10.59\r\ndeit_base_distilled_patch16_224,1211.4,845.292,1024,224,17.68,24.05,87.34\r\nvit_base_patch16_224,1209.0,846.969,1024,224,17.58,23.9,86.57\r\nvit_base_patch16_224_miil,1208.72,847.163,1024,224,17.59,23.91,94.4\r\ndeit_base_patch16_224,1208.56,847.275,1024,224,17.58,23.9,86.57\r\nvit_base_patch16_clip_224,1205.77,849.236,1024,224,17.58,23.9,86.57\r\ngluon_resnet152_v1s,1205.41,849.488,1024,224,12.92,24.96,60.32\r\ncoatnet_rmlp_1_rw_224,1201.89,851.979,1024,224,7.85,35.47,41.69\r\nmaxvit_tiny_rw_224,1200.3,853.107,1024,224,5.11,33.11,29.06\r\nmixnet_xxl,1193.04,643.721,768,224,2.04,23.43,23.96\r\ntf_efficientnet_el,1192.11,858.967,1024,300,8.0,30.7,10.59\r\nswinv2_tiny_window8_256,1191.01,859.761,1024,256,5.96,24.57,28.35\r\nvolo_d1_224,1190.57,860.079,1024,224,6.94,24.43,26.63\r\nrepvgg_b2,1183.91,864.916,1024,224,20.45,12.9,89.02\r\nlegacy_seresnet152,1181.09,866.978,1024,224,11.33,22.08,66.82\r\nxcit_small_24_p16_224_dist,1175.31,871.245,1024,224,9.1,23.64,47.67\r\nxcit_small_24_p16_224,1174.76,871.656,1024,224,9.1,23.64,47.67\r\ninception_v4,1168.76,876.127,1024,299,12.28,15.09,42.68\r\nseresnet152,1166.02,878.19,1024,224,11.57,22.61,66.82\r\ntwins_pcpvt_large,1163.18,880.331,1024,224,9.84,35.82,60.99\r\ndeit3_base_patch16_224,1159.4,883.201,1024,224,17.58,23.9,86.59\r\ndeit3_base_patch16_224_in21ft1k,1159.14,883.404,1024,224,17.58,23.9,86.59\r\ncait_xxs36_224,1156.4,885.493,1024,224,3.77,30.34,17.3\r\nvit_base_patch32_clip_448,1154.9,886.645,1024,448,17.93,23.9,88.34\r\nregnetx_160,1153.07,888.048,1024,224,15.99,25.52,54.28\r\ndm_nfnet_f0,1152.75,888.293,1024,256,12.62,18.05,71.49\r\nsequencer2d_m,1147.71,892.201,1024,224,6.55,14.26,38.31\r\nrepvgg_b3g4,1145.87,893.631,1024,224,17.89,15.1,83.83\r\nmvitv2_small_cls,1144.7,894.542,1024,224,7.04,28.17,34.87\r\nmvitv2_small,1143.83,895.224,1024,224,7.0,28.08,34.87\r\nefficientnet_lite4,1139.64,336.935,384,380,4.04,45.66,13.01\r\ntnt_s_patch16_224,1135.12,902.091,1024,224,5.24,24.37,23.76\r\nconvmixer_1024_20_ks9_p14,1130.85,905.497,1024,224,5.55,5.51,24.38\r\nvgg19_bn,1127.16,908.464,1024,224,19.66,14.86,143.68\r\nvit_relpos_base_patch16_clsgap_224,1124.58,910.547,1024,224,17.6,25.12,86.43\r\nvit_relpos_base_patch16_cls_224,1122.76,912.026,1024,224,17.6,25.12,86.43\r\ncoatnet_rmlp_1_rw2_224,1119.61,914.591,1024,224,8.11,40.13,41.72\r\nbeit_base_patch16_224,1109.32,923.073,1024,224,17.58,23.9,86.53\r\nxception41,1107.6,462.251,512,299,9.28,39.86,26.97\r\ntresnet_xl,1106.51,925.423,1024,224,15.17,15.34,78.44\r\nbeitv2_base_patch16_224,1106.05,925.798,1024,224,17.58,23.9,86.53\r\ncoat_tiny,1099.16,931.604,1024,224,4.35,27.2,5.5\r\nvit_base_patch16_gap_224,1085.51,943.323,1024,224,17.49,25.59,86.57\r\nmaxvit_tiny_tf_224,1081.57,710.062,768,224,5.6,35.78,30.92\r\nvit_relpos_base_patch16_224,1078.21,949.713,1024,224,17.51,24.97,86.43\r\nnf_regnet_b4,1075.82,951.823,1024,384,4.7,28.61,30.21\r\ncoatnet_1_rw_224,1074.48,953.005,1024,224,8.04,34.6,41.72\r\ndla102x2,1070.83,956.252,1024,224,9.34,29.91,41.28\r\npit_b_224,1066.8,479.928,512,224,12.42,32.94,73.76\r\npit_b_distilled_224,1063.31,481.504,512,224,12.5,33.07,74.79\r\ntf_efficientnet_lite4,1058.68,362.703,384,380,4.04,45.66,13.01\r\nefficientnetv2_s,1057.28,968.508,1024,384,8.44,35.77,21.46\r\nvit_large_r50_s32_224,1034.79,989.556,1024,224,19.58,24.41,328.99\r\nvit_small_patch16_36x1_224,1032.1,992.142,1024,224,13.71,35.69,64.67\r\nefficientnet_b3_g8_gn,1031.26,496.465,512,320,3.2,28.83,14.25\r\ntf_efficientnetv2_s,1029.13,995.002,1024,384,8.44,35.77,21.46\r\nflexivit_base,1028.55,995.558,1024,240,20.29,28.36,86.59\r\nvit_base_patch16_rpn_224,1016.66,1007.208,1024,224,17.49,23.75,86.54\r\nvit_small_r26_s32_384,1011.11,1012.73,1024,384,10.43,29.85,36.47\r\nvit_small_patch16_18x2_224,1005.34,1018.547,1024,224,13.71,35.69,64.67\r\nswinv2_cr_small_224,1000.71,1023.259,1024,224,9.07,50.27,49.7\r\nefficientnetv2_rw_s,995.91,1028.19,1024,384,8.72,38.03,23.94\r\nwide_resnet101_2,995.32,1028.801,1024,224,22.8,21.23,126.89\r\nswinv2_cr_small_ns_224,989.25,1035.114,1024,224,9.08,50.27,49.7\r\nvit_relpos_base_patch16_rpn_224,986.84,1037.641,1024,224,17.51,24.97,86.41\r\ncoatnet_1_224,984.69,519.944,512,224,8.7,39.0,42.23\r\nresnet200,983.36,1041.314,1024,224,15.07,32.19,64.67\r\ndpn98,982.09,1042.657,1024,224,11.73,25.2,61.57\r\nconvnext_small,981.97,1042.782,1024,288,14.39,35.65,50.22\r\ncs3se_edgenet_x,975.89,1049.279,1024,320,18.01,20.21,50.72\r\nregnety_080,969.67,1056.01,1024,288,13.22,29.69,39.18\r\npoolformer_m36,966.97,1058.965,1024,224,8.8,22.02,56.17\r\nresnest101e,963.69,1062.57,1024,256,13.38,28.66,48.28\r\nregnetz_b16_evos,955.65,803.632,768,288,2.36,16.43,9.74\r\ntwins_svt_large,954.95,1072.291,1024,224,15.15,35.1,99.27\r\npvt_v2_b4,952.02,1075.594,1024,224,10.14,53.74,62.56\r\ngluon_resnext101_64x4d,944.48,1084.183,1024,224,15.52,31.21,83.46\r\nregnetv_064,944.32,1084.367,1024,288,10.55,27.11,30.58\r\nregnety_064,944.18,1084.526,1024,288,10.56,27.11,30.58\r\nmaxvit_rmlp_tiny_rw_256,941.64,815.588,768,256,6.77,46.92,29.15\r\nregnetz_d8,936.16,1093.814,1024,320,6.19,37.08,23.37\r\nresnetrs101,936.12,1093.858,1024,288,13.56,28.53,63.62\r\nregnetz_d32,933.58,1096.833,1024,320,9.33,37.08,27.58\r\nig_resnext101_32x8d,930.9,1099.997,1024,224,16.48,31.21,88.79\r\nswsl_resnext101_32x8d,930.28,1100.725,1024,224,16.48,31.21,88.79\r\nresnext101_32x8d,929.98,1101.084,1024,224,16.48,31.21,88.79\r\nssl_resnext101_32x8d,929.0,1102.24,1024,224,16.48,31.21,88.79\r\nconvnextv2_tiny,925.13,553.423,512,288,7.39,22.21,28.64\r\nconvnextv2_small,924.53,1107.57,1024,224,8.71,21.56,50.32\r\nmaxvit_tiny_rw_256,921.72,833.209,768,256,6.74,44.35,29.07\r\ninception_resnet_v2,917.69,1115.834,1024,299,13.18,25.06,55.84\r\nens_adv_inception_resnet_v2,917.66,1115.871,1024,299,13.18,25.06,55.84\r\nmaxxvit_rmlp_tiny_rw_256,914.74,1119.428,1024,256,6.66,39.76,29.64\r\nxcit_tiny_24_p16_384_dist,912.61,1122.045,1024,384,6.87,34.29,12.12\r\ncait_s24_224,908.65,1126.929,1024,224,9.35,40.58,46.92\r\npvt_v2_b5,904.89,1131.615,1024,224,11.76,50.92,81.96\r\nnest_small,902.63,850.834,768,224,10.35,40.04,38.35\r\nrepvgg_b3,901.73,1135.583,1024,224,29.16,15.1,123.09\r\nmaxvit_tiny_pm_256,896.67,1141.994,1024,256,6.61,47.9,30.09\r\nxception65p,896.53,571.079,512,299,13.91,52.48,39.82\r\nswin_s3_small_224,896.35,856.792,768,224,9.43,37.84,49.74\r\njx_nest_small,892.32,860.663,768,224,10.35,40.04,38.35\r\nefficientnet_b4,890.89,431.018,384,384,4.51,50.04,19.34\r\ngmlp_b16_224,885.75,1156.072,1024,224,15.78,30.21,73.08\r\ngluon_seresnext101_64x4d,885.23,1156.747,1024,224,15.53,31.25,88.23\r\nhrnet_w40,881.9,1161.12,1024,224,12.75,25.29,57.56\r\nefficientformer_l7,877.43,1167.027,1024,224,10.17,24.45,82.23\r\ncoat_mini,874.29,1171.227,1024,224,6.82,33.68,10.34\r\nresnet101d,871.81,1174.559,1024,320,16.48,34.77,44.57\r\nswin_base_patch4_window7_224,870.1,1176.867,1024,224,15.47,36.63,87.77\r\nregnetz_040,868.17,884.605,768,320,6.35,37.78,27.12\r\nregnetz_040h,862.76,890.151,768,320,6.43,37.94,28.94\r\nmobilevitv2_150_384_in22ft1k,848.7,301.627,256,384,9.2,54.25,10.59\r\nresnetv2_50d_evos,844.34,909.573,768,288,7.15,19.7,25.59\r\ntf_efficientnet_b4,838.16,458.136,384,380,4.49,49.49,19.34\r\ncrossvit_base_240,835.31,919.411,768,240,21.22,36.33,105.03\r\nvit_base_r50_s16_224,821.15,1247.01,1024,224,21.67,35.31,114.69\r\nxcit_medium_24_p16_224_dist,819.59,1249.397,1024,224,16.13,31.71,84.4\r\nxcit_medium_24_p16_224,818.73,1250.697,1024,224,16.13,31.71,84.4\r\ngcvit_small,807.46,1268.151,1024,224,8.57,41.61,51.09\r\ngluon_xception65,806.21,635.055,512,299,13.96,52.48,39.92\r\nxception65,800.01,639.983,512,299,13.96,52.48,39.92\r\nmvitv2_base,799.31,1281.092,1024,224,10.16,40.5,51.47\r\nhrnet_w44,789.29,1297.348,1024,224,14.94,26.92,67.06\r\nvit_base_patch16_plus_240,780.68,1311.665,1024,240,27.41,33.08,117.56\r\nhrnet_w48,780.39,1312.147,1024,224,17.34,28.56,77.47\r\nswinv2_tiny_window16_256,778.19,657.926,512,256,6.68,39.02,28.35\r\ntresnet_m_448,775.99,1319.596,1024,448,22.94,29.21,31.39\r\nxcit_small_12_p16_384_dist,760.88,1345.804,1024,384,14.14,36.51,26.25\r\nvit_small_patch16_384,750.95,1022.685,768,384,15.52,50.78,22.2\r\nmaxvit_rmlp_small_rw_224,745.49,1373.585,1024,224,10.75,49.3,64.9\r\nsequencer2d_l,742.48,1379.149,1024,224,9.74,22.12,54.3\r\nswinv2_small_window8_256,738.39,1386.788,1024,256,11.58,40.14,49.73\r\nswin_s3_base_224,730.45,1401.854,1024,224,13.69,48.26,71.13\r\npoolformer_m48,729.44,1403.808,1024,224,11.59,29.17,73.47\r\ndensenet264d_iabn,727.43,1407.671,1024,224,13.47,14.0,72.74\r\nvit_relpos_base_patch16_plus_240,723.43,1415.468,1024,240,27.3,34.33,117.38\r\ndpn131,722.72,1416.854,1024,224,16.09,32.97,79.25\r\ntnt_b_patch16_224,722.12,1418.026,1024,224,14.09,39.01,65.41\r\ndeit3_small_patch16_384,717.36,1070.572,768,384,15.52,50.78,22.21\r\ndeit3_small_patch16_384_in21ft1k,716.76,1071.477,768,384,15.52,50.78,22.21\r\nswinv2_cr_base_224,715.64,1430.874,1024,224,15.86,59.66,87.88\r\neca_nfnet_l1,713.15,1435.867,1024,320,14.92,34.42,41.41\r\ncoatnet_2_rw_224,709.88,721.237,512,224,15.09,49.22,73.87\r\nswinv2_cr_base_ns_224,709.69,1442.871,1024,224,15.86,59.66,87.88\r\ncoatnet_rmlp_2_rw_224,708.85,722.285,512,224,15.18,54.78,73.88\r\nconvit_base,706.65,1449.076,1024,224,17.52,31.77,86.54\r\nmobilevitv2_175_384_in22ft1k,703.41,363.928,256,384,12.47,63.29,14.25\r\nmaxvit_small_tf_224,701.58,729.767,512,224,11.66,53.17,68.93\r\ndensenet264,701.03,1460.686,1024,224,12.95,12.8,72.69\r\necaresnet200d,694.19,1475.094,1024,256,20.0,43.15,64.69\r\nresnetv2_50x1_bitm,691.29,740.624,512,448,16.62,44.46,25.55\r\nseresnet200d,691.25,1481.355,1024,256,20.01,43.15,71.86\r\nxcit_tiny_24_p8_224,684.73,1495.467,1024,224,9.21,45.39,12.11\r\nxcit_tiny_24_p8_224_dist,684.22,1496.573,1024,224,9.21,45.39,12.11\r\nconvnext_base,682.42,1500.518,1024,288,25.43,47.53,88.59\r\nvolo_d2_224,663.51,1543.3,1024,224,14.34,41.34,58.68\r\ncoatnet_2_224,660.84,581.062,384,224,16.5,52.67,74.68\r\nlegacy_senet154,654.15,1565.387,1024,224,20.77,38.69,115.09\r\ngluon_senet154,654.04,1565.641,1024,224,20.77,38.69,115.09\r\nsenet154,653.94,1565.866,1024,224,20.77,38.69,115.09\r\nxcit_nano_12_p8_384_dist,646.53,1583.823,1024,384,6.34,46.08,3.05\r\ndpn107,646.38,1584.202,1024,224,18.38,33.46,86.92\r\nnest_base,640.55,799.298,512,224,17.96,53.39,67.72\r\njx_nest_base,633.53,808.151,512,224,17.96,53.39,67.72\r\nmobilevitv2_200_384_in22ft1k,626.31,408.731,256,384,16.24,72.34,18.45\r\nxception71,619.72,826.163,512,299,18.09,69.92,42.34\r\nhrnet_w64,618.15,1656.539,1024,224,28.97,35.09,128.06\r\nresnet152d,618.09,1656.699,1024,320,24.08,47.67,60.21\r\nregnetz_c16_evos,604.19,847.399,512,320,3.86,25.88,13.49\r\ngcvit_base,594.61,1722.135,1024,224,14.87,55.48,90.32\r\nregnety_160,594.3,1292.258,768,288,26.37,38.07,83.59\r\nmaxxvit_rmlp_small_rw_256,588.15,1741.023,1024,256,14.67,58.38,66.01\r\nxcit_small_12_p8_224,582.04,1759.324,1024,224,18.69,47.21,26.21\r\nxcit_small_12_p8_224_dist,581.74,1760.224,1024,224,18.69,47.21,26.21\r\nmaxvit_rmlp_small_rw_256,575.72,1333.976,768,256,14.15,66.09,64.9\r\nregnetx_320,551.07,1393.631,768,224,31.81,36.3,107.81\r\nseresnet152d,547.51,1870.27,1024,320,24.09,47.72,66.84\r\nresnetrs152,544.33,1881.196,1024,320,24.34,48.14,86.62\r\nvit_large_patch32_384,543.23,1884.997,1024,384,45.31,43.86,306.63\r\nhalonet_h1,540.47,473.65,256,256,3.0,51.17,8.1\r\nseresnet269d,540.42,1894.818,1024,256,26.59,53.6,113.67\r\nswinv2_base_window8_256,529.22,1451.182,768,256,20.37,52.59,87.92\r\nmaxxvitv2_rmlp_base_rw_224,523.43,1956.308,1024,224,24.2,62.77,116.09\r\nresnext101_64x4d,521.77,1962.525,1024,288,25.66,51.59,83.46\r\nregnetz_e8,521.5,1472.647,768,320,15.46,63.94,57.7\r\nmixer_l16_224,518.26,1975.807,1024,224,44.6,41.69,208.2\r\nvit_medium_patch16_gap_384,508.63,1006.611,512,384,26.08,67.54,39.03\r\nswin_large_patch4_window7_224,501.11,1532.586,768,224,34.53,54.94,196.53\r\nregnety_320,490.98,2085.591,1024,224,32.34,30.26,145.05\r\nswinv2_small_window16_256,487.64,1049.932,512,256,12.82,66.29,49.73\r\nseresnext101_32x8d,483.23,2119.074,1024,288,27.24,51.63,93.57\r\nvit_small_patch8_224,478.05,1071.009,512,224,22.44,80.84,21.67\r\nig_resnext101_32x16d,477.64,2143.862,1024,224,36.27,51.18,194.03\r\nswsl_resnext101_32x16d,476.69,2148.145,1024,224,36.27,51.18,194.03\r\nssl_resnext101_32x16d,476.06,2150.954,1024,224,36.27,51.18,194.03\r\nseresnext101d_32x8d,475.05,2155.547,1024,288,27.64,52.95,93.59\r\nnf_regnet_b5,470.14,1089.029,512,456,11.7,61.95,49.74\r\nxcit_large_24_p16_224_dist,468.86,2184.017,1024,224,35.86,47.27,189.1\r\nxcit_large_24_p16_224,468.75,2184.529,1024,224,35.86,47.27,189.1\r\nvolo_d3_224,463.72,2208.199,1024,224,20.78,60.09,86.33\r\nnfnet_f1,463.52,2209.163,1024,320,35.97,46.77,132.63\r\nefficientnet_b5,460.91,555.412,256,448,9.59,93.56,30.39\r\nresnet200d,453.15,2259.739,1024,320,31.25,67.33,64.69\r\nefficientnetv2_m,451.89,2266.018,1024,416,18.6,67.5,54.14\r\nseresnextaa101d_32x8d,447.26,2289.498,1024,288,28.51,56.44,93.59\r\nefficientnetv2_rw_m,437.1,1757.005,768,416,21.49,79.62,53.24\r\nswinv2_cr_large_224,422.08,1819.551,768,224,35.1,78.42,196.68\r\ncoatnet_rmlp_3_rw_224,421.87,910.226,384,224,33.56,79.47,165.15\r\nxcit_tiny_12_p8_384_dist,421.04,2432.044,1024,384,14.13,69.14,6.71\r\nswinv2_cr_tiny_384,419.77,609.847,256,384,15.34,161.01,28.33\r\nmaxvit_rmlp_base_rw_224,419.03,1832.808,768,224,23.15,92.64,116.14\r\nresnetv2_152x2_bit_teacher,418.89,2444.553,1024,224,46.95,45.11,236.34\r\nresnetv2_101x1_bitm,418.36,1223.813,512,448,31.65,64.93,44.54\r\ndm_nfnet_f1,409.02,1877.643,768,320,35.97,46.77,132.63\r\nxcit_small_24_p16_384_dist,407.47,2513.062,1024,384,26.72,68.58,47.67\r\ncoatnet_3_rw_224,404.39,633.033,256,224,33.44,73.83,181.81\r\ntf_efficientnet_b5,403.59,634.298,256,456,10.46,98.86,30.39\r\nconvnextv2_base,402.92,1270.715,512,288,25.43,47.53,88.72\r\nresnetrs200,396.11,2585.123,1024,320,31.51,67.81,93.21\r\ntresnet_l_448,395.6,2588.481,1024,448,43.5,47.56,55.99\r\neva_large_patch14_196,391.22,2617.408,1024,196,61.57,63.52,304.14\r\nvit_large_patch16_224,389.92,2626.132,1024,224,61.6,63.52,304.33\r\nregnetz_d8_evos,389.86,1969.937,768,320,7.03,38.92,23.46\r\nmaxvit_base_tf_224,387.71,1320.545,512,224,24.04,95.01,119.47\r\ncoatnet_3_224,387.35,660.882,256,224,36.56,79.01,166.97\r\ncrossvit_15_dagger_408,386.57,662.227,256,408,21.45,95.05,28.5\r\nvit_base_patch16_18x2_224,384.3,2664.545,1024,224,52.51,71.38,256.73\r\ndeit3_large_patch16_224,376.93,2716.643,1024,224,61.6,63.52,304.37\r\ndeit3_large_patch16_224_in21ft1k,376.54,2719.504,1024,224,61.6,63.52,304.37\r\ntf_efficientnetv2_m,374.38,2051.373,768,480,24.76,89.84,54.14\r\nconvnext_large,371.39,1378.579,512,288,56.87,71.29,197.77\r\nbeitv2_large_patch16_224,360.12,2843.465,1024,224,61.6,63.52,304.43\r\nbeit_large_patch16_224,359.86,2845.558,1024,224,61.6,63.52,304.43\r\nswinv2_base_window12to16_192to256_22kft1k,359.31,1068.705,384,256,22.02,84.71,87.92\r\nswinv2_base_window16_256,359.09,1069.342,384,256,22.02,84.71,87.92\r\neca_nfnet_l2,347.1,2212.621,768,384,30.05,68.28,56.72\r\nflexivit_large,333.31,3072.173,1024,240,70.99,75.39,304.36\r\nvit_large_r50_s32_384,332.86,3076.333,1024,384,57.43,76.52,329.09\r\nmaxxvitv2_rmlp_large_rw_224,330.79,3095.576,1024,224,44.14,87.15,215.42\r\nresnest200e,317.25,3227.754,1024,320,35.69,82.78,70.2\r\nmaxvit_tiny_tf_384,317.22,807.002,256,384,17.53,123.42,30.98\r\nconvmixer_768_32,309.28,3310.892,1024,224,19.55,25.95,21.11\r\ndeit_base_patch16_384,306.13,1254.335,384,384,55.54,101.56,86.86\r\nvit_base_patch16_384,306.13,1254.349,384,384,55.54,101.56,86.86\r\nvit_base_patch16_clip_384,305.56,1256.673,384,384,55.54,101.56,86.86\r\nxcit_small_24_p8_224_dist,305.18,3355.41,1024,224,35.81,90.78,47.63\r\ndeit_base_distilled_patch16_384,304.96,1259.16,384,384,55.65,101.82,87.63\r\nxcit_small_24_p8_224,304.86,3358.887,1024,224,35.81,90.78,47.63\r\nnasnetalarge,300.31,1278.679,384,331,23.89,90.56,88.75\r\nvolo_d1_384,299.05,1712.072,512,384,22.75,108.55,26.78\r\nvolo_d4_224,295.86,3461.069,1024,224,44.34,80.22,192.96\r\ndeit3_base_patch16_384,294.03,1305.985,384,384,55.54,101.56,86.88\r\ndeit3_base_patch16_384_in21ft1k,293.78,1307.085,384,384,55.54,101.56,86.88\r\ntresnet_xl_448,292.43,2626.294,768,448,60.65,61.31,78.44\r\npnasnet5large,285.95,1342.894,384,331,25.04,92.89,86.06\r\nvit_large_patch14_224,285.66,3584.705,1024,224,81.08,88.79,304.2\r\nvit_large_patch14_clip_224,285.43,3587.599,1024,224,81.08,88.79,304.2\r\ncrossvit_18_dagger_408,283.82,901.967,256,408,32.47,124.87,44.61\r\nxcit_medium_24_p16_384_dist,282.22,3628.317,1024,384,47.39,91.64,84.4\r\ncait_xxs24_384,275.38,3718.492,1024,384,9.63,122.66,12.03\r\nregnety_640,271.79,2825.663,768,224,64.16,42.5,281.38\r\nmaxvit_large_tf_224,268.97,1427.67,384,224,43.68,127.35,211.79\r\nnfnet_f2,263.0,3893.59,1024,352,63.22,79.06,193.78\r\nbeit_base_patch16_384,260.66,1473.146,384,384,55.54,101.56,86.74\r\nswinv2_cr_small_384,258.79,989.214,256,384,29.7,298.03,49.7\r\necaresnet269d,257.79,3972.16,1024,352,50.25,101.25,102.09\r\nresnetrs270,249.11,4110.633,1024,352,51.13,105.48,129.86\r\nmvitv2_large,248.64,2059.181,512,224,43.87,112.02,217.99\r\nefficientnet_b6,246.42,519.432,128,528,19.4,167.39,43.04\r\nconvnext_xlarge,241.35,2121.412,512,288,100.8,95.05,350.2\r\nconvnextv2_large,238.64,1072.708,256,288,56.87,71.29,197.96\r\ntf_efficientnet_b6,236.4,541.434,128,528,19.4,167.39,43.04\r\nswin_base_patch4_window12_384,235.04,816.885,192,384,47.19,134.78,87.9\r\ndm_nfnet_f2,234.34,3277.279,768,352,63.22,79.06,193.78\r\ncoatnet_4_224,228.52,1120.23,256,224,62.48,129.26,275.43\r\nvit_base_r50_s16_384,227.31,1689.303,384,384,67.43,135.03,98.95\r\nefficientnetv2_l,221.97,2306.653,512,480,56.4,157.99,118.52\r\nxcit_tiny_24_p8_384_dist,221.23,4628.611,1024,384,27.05,132.95,12.11\r\nig_resnext101_32x32d,220.61,2320.857,512,224,87.29,91.12,468.53\r\nswinv2_large_window12to16_192to256_22kft1k,219.46,1166.485,256,256,47.81,121.53,196.74\r\ntf_efficientnetv2_l,219.35,2334.183,512,480,56.4,157.99,118.52\r\nresmlp_big_24_224,214.31,4778.166,1024,224,100.23,87.31,129.14\r\nresmlp_big_24_224_in22ft1k,214.13,4782.043,1024,224,100.23,87.31,129.14\r\nresmlp_big_24_distilled_224,214.04,4784.169,1024,224,100.23,87.31,129.14\r\nxcit_medium_24_p8_224_dist,210.1,4873.763,1024,224,63.53,121.23,84.32\r\nxcit_medium_24_p8_224,210.01,4875.864,1024,224,63.53,121.23,84.32\r\nmaxvit_small_tf_384,208.79,919.556,192,384,35.87,183.65,69.02\r\nvit_base_patch8_224,199.59,1282.637,256,224,78.22,161.69,86.58\r\neca_nfnet_l3,199.58,2565.434,512,448,52.55,118.4,72.04\r\nvolo_d5_224,196.25,5217.924,1024,224,72.4,118.11,295.46\r\nxcit_small_12_p8_384_dist,194.27,2635.521,512,384,54.92,138.29,26.21\r\ncait_xs24_384,192.73,3984.863,768,384,19.28,183.98,26.67\r\nswinv2_cr_base_384,184.92,1384.392,256,384,50.57,333.68,87.88\r\ncait_xxs36_384,184.35,5554.56,1024,384,14.35,183.7,17.37\r\nswinv2_cr_huge_224,183.61,2091.395,384,224,115.97,121.08,657.83\r\nconvnext_xxlarge,183.01,2098.268,384,224,151.66,95.29,846.47\r\ncoatnet_rmlp_2_rw_384,178.88,715.532,128,384,47.69,209.43,73.88\r\nconvmixer_1536_20,173.51,5901.752,1024,224,48.68,33.03,51.63\r\nvolo_d2_384,168.46,1519.603,256,384,46.17,184.51,58.87\r\nresnetrs350,168.28,6085.136,1024,384,77.59,154.74,163.96\r\nxcit_large_24_p16_384_dist,160.71,4778.847,768,384,105.35,137.17,189.1\r\nresnetv2_152x2_bit_teacher_384,159.55,1604.488,256,384,136.16,132.56,236.34\r\nmaxvit_xlarge_tf_224,155.79,1643.178,256,224,97.49,191.02,474.95\r\nmaxvit_tiny_tf_512,155.64,822.373,128,512,33.49,257.59,31.05\r\nregnety_1280,155.18,2474.502,384,224,127.66,71.58,644.81\r\nvit_huge_patch14_224,154.03,6647.897,1024,224,167.43,139.43,658.75\r\nvit_huge_patch14_clip_224,153.92,6652.944,1024,224,167.4,139.41,632.05\r\nmaxxvitv2_rmlp_base_rw_384,153.34,1669.502,256,384,72.98,213.74,116.09\r\nefficientnetv2_xl,152.49,3357.61,512,512,93.85,247.32,208.12\r\ntf_efficientnetv2_xl,151.4,2536.254,384,512,93.85,247.32,208.12\r\ndeit3_huge_patch14_224_in21ft1k,149.08,6868.834,1024,224,167.4,139.41,632.13\r\ndeit3_huge_patch14_224,149.01,6871.974,1024,224,167.4,139.41,632.13\r\ncait_s24_384,148.46,3448.684,512,384,32.17,245.31,47.06\r\nresnest269e,147.61,3468.584,512,416,77.69,171.98,110.93\r\nnfnet_f3,147.43,3472.717,512,416,115.58,141.78,254.92\r\nefficientnet_b7,142.41,674.084,96,600,38.33,289.94,66.35\r\nresnetv2_50x3_bitm,138.27,1388.564,192,448,145.7,133.37,217.32\r\ntf_efficientnet_b7,137.89,696.181,96,600,38.33,289.94,66.35\r\nswin_large_patch4_window12_384,137.6,930.229,128,384,104.08,202.16,196.74\r\nig_resnext101_32x48d,132.29,2902.628,384,224,153.57,131.06,828.41\r\ndm_nfnet_f3,127.59,4012.898,512,416,115.58,141.78,254.92\r\ncoatnet_5_224,125.18,1022.512,128,224,145.49,194.24,687.47\r\nmaxvit_rmlp_base_rw_384,121.26,2111.079,256,384,70.97,318.95,116.14\r\nxcit_large_24_p8_224,119.97,6401.598,768,224,141.23,181.56,188.93\r\nxcit_large_24_p8_224_dist,119.94,6403.17,768,224,141.23,181.56,188.93\r\nresnetrs420,119.93,6403.598,768,416,108.45,213.79,191.89\r\nresnetv2_152x2_bitm,117.33,2181.801,256,448,184.99,180.43,236.34\r\nmaxvit_base_tf_384,113.69,1688.826,192,384,73.8,332.9,119.65\r\nswinv2_cr_large_384,113.07,1132.03,128,384,108.95,404.96,196.68\r\neva_large_patch14_336,102.65,2493.904,256,336,191.1,270.24,304.53\r\nvit_large_patch14_clip_336,102.47,2498.286,256,336,191.11,270.24,304.53\r\nvit_large_patch16_384,102.37,2500.639,256,384,191.21,270.24,304.72\r\nxcit_small_24_p8_384_dist,102.36,5001.728,512,384,105.24,265.91,47.63\r\neva_giant_patch14_224,101.75,10063.521,1024,224,267.18,192.64,1012.56\r\nvit_giant_patch14_224,100.42,7648.057,768,224,267.18,192.64,1012.61\r\nvit_giant_patch14_clip_224,100.32,7655.265,768,224,267.18,192.64,1012.65\r\ncait_s36_384,99.37,5152.338,512,384,47.99,367.4,68.37\r\ndeit3_large_patch16_384,99.34,2577.037,256,384,191.21,270.24,304.76\r\ndeit3_large_patch16_384_in21ft1k,99.27,2578.907,256,384,191.21,270.24,304.76\r\nregnety_2560,97.99,2612.623,256,224,257.07,87.48,826.14\r\nmaxvit_small_tf_512,97.85,981.11,96,512,67.26,383.77,69.13\r\nswinv2_base_window12to24_192to384_22kft1k,95.95,666.98,64,384,55.25,280.36,87.92\r\nefficientnet_b8,95.3,1007.298,96,672,63.48,442.89,87.41\r\ntf_efficientnet_b8,92.65,1036.1,96,672,63.48,442.89,87.41\r\nbeit_large_patch16_384,88.55,2890.891,256,384,191.21,270.24,305.0\r\nresnetv2_101x3_bitm,83.1,2310.491,192,448,280.33,194.78,387.93\r\nmaxvit_large_tf_384,80.34,1593.284,128,384,132.55,445.84,212.03\r\nnfnet_f4,79.54,4827.723,384,512,216.26,262.26,316.07\r\nvolo_d3_448,73.5,2612.274,192,448,96.33,446.83,86.63\r\ndm_nfnet_f4,71.41,3584.699,256,512,216.26,262.26,316.07\r\nxcit_medium_24_p8_384_dist,70.91,5415.294,384,384,186.67,354.73,84.32\r\nswinv2_large_window12to24_192to384_22kft1k,60.84,788.97,48,384,116.15,407.83,196.74\r\nvit_gigantic_patch14_clip_224,60.15,8511.823,512,224,483.96,275.37,1844.91\r\nvit_gigantic_patch14_224,60.11,8517.291,512,224,483.95,275.37,1844.44\r\nnfnet_f5,58.02,4412.387,256,544,290.97,349.71,377.21\r\nvit_huge_patch14_clip_336,57.29,4468.831,256,336,390.97,407.54,632.46\r\nconvnextv2_huge,56.06,1712.576,96,384,337.96,232.35,660.29\r\nvolo_d4_448,54.47,2349.801,128,448,197.13,527.35,193.41\r\ntf_efficientnet_l2,54.12,1182.593,64,475,172.11,609.89,480.31\r\nmaxvit_base_tf_512,52.65,1823.292,96,512,138.02,703.99,119.88\r\nswinv2_cr_giant_224,52.12,2455.882,128,224,483.85,309.15,2598.76\r\ndm_nfnet_f5,50.7,5049.339,256,544,290.97,349.71,377.21\r\nswinv2_cr_huge_384,48.86,1309.971,64,384,352.04,583.18,657.94\r\nmaxvit_xlarge_tf_384,46.24,2076.289,96,384,292.78,668.76,475.32\r\nnfnet_f6,44.3,5778.548,256,576,378.69,452.2,438.36\r\nxcit_large_24_p8_384_dist,40.2,6368.127,256,384,415.0,531.82,188.93\r\neva_giant_patch14_336,39.77,6436.237,256,336,620.64,550.67,1013.01\r\ndm_nfnet_f6,39.62,6461.626,256,576,378.69,452.2,438.36\r\nmaxvit_large_tf_512,38.67,1654.908,64,512,244.75,942.15,212.33\r\nvolo_d5_448,37.56,3408.043,128,448,315.06,737.92,295.91\r\nbeit_large_patch16_512,35.36,2715.28,96,512,362.24,656.39,305.67\r\nnfnet_f7,34.74,7370.0,256,608,480.39,570.85,499.5\r\ncait_m36_384,32.36,7912.123,256,384,173.11,734.81,271.22\r\nresnetv2_152x4_bitm,30.0,4266.89,128,480,844.84,414.26,936.53\r\nvolo_d5_512,26.35,4857.602,128,512,425.09,1105.37,296.09\r\nmaxvit_xlarge_tf_512,23.12,2076.455,48,512,534.14,1413.22,475.77\r\nefficientnet_l2,21.26,1505.032,32,800,479.12,1707.39,480.31\r\nswinv2_cr_giant_384,15.03,2129.6,32,384,1450.71,1394.86,2598.76\r\ncait_m48_448,13.69,9353.048,128,448,329.41,1708.23,356.46\r\neva_giant_patch14_560,10.36,4631.037,48,560,1906.76,2577.17,1014.45\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt210-cu121-rtx3090.csv",
    "content": "model,infer_img_size,infer_batch_size,infer_samples_per_sec,infer_step_time,infer_gmacs,infer_macts,param_count\ntinynet_e,106,1024.0,50604.03,20.225,0.03,0.69,2.04\nmobilenetv3_small_050,224,1024.0,46069.42,22.217,0.03,0.92,1.59\nlcnet_035,224,1024.0,41190.64,24.85,0.03,1.04,1.64\nlcnet_050,224,1024.0,37663.82,27.178,0.05,1.26,1.88\nmobilenetv3_small_075,224,1024.0,33398.64,30.649,0.05,1.3,2.04\nefficientvit_m0,224,1024.0,32179.13,31.812,0.08,0.91,2.35\nmobilenetv3_small_100,224,1024.0,29653.41,34.522,0.06,1.42,2.54\ntf_mobilenetv3_small_minimal_100,224,1024.0,28352.57,36.106,0.06,1.41,2.04\ntinynet_d,152,1024.0,27612.87,37.074,0.05,1.42,2.34\ntf_mobilenetv3_small_075,224,1024.0,27505.95,37.218,0.05,1.3,2.04\ntf_mobilenetv3_small_100,224,1024.0,24859.95,41.18,0.06,1.42,2.54\nefficientvit_m1,224,1024.0,24836.87,41.219,0.17,1.33,2.98\nlcnet_075,224,1024.0,24184.78,42.33,0.1,1.99,2.36\nefficientvit_m2,224,1024.0,21907.95,46.731,0.2,1.47,4.19\nmnasnet_small,224,1024.0,20764.95,49.303,0.07,2.16,2.03\nlevit_128s,224,1024.0,20669.44,49.531,0.31,1.88,7.78\nlcnet_100,224,1024.0,19774.93,51.772,0.16,2.52,2.95\nregnetx_002,224,1024.0,18945.55,54.04,0.2,2.16,2.68\nresnet10t,176,1024.0,18840.28,54.342,0.7,1.51,5.44\nefficientvit_m3,224,1024.0,18627.14,54.963,0.27,1.62,6.9\nmobilenetv2_035,224,1024.0,18464.78,55.447,0.07,2.86,1.68\nghostnet_050,224,1024.0,17741.46,57.707,0.05,1.77,2.59\nresnet18,160,1024.0,17592.15,58.198,0.93,1.27,11.69\nregnety_002,224,1024.0,17571.32,58.267,0.2,2.17,3.16\nlevit_conv_128s,224,1024.0,17529.9,58.404,0.31,1.88,7.78\nefficientvit_m4,224,1024.0,17446.52,58.683,0.3,1.7,8.8\nrepghostnet_050,224,1024.0,17090.91,59.904,0.05,2.02,2.31\nefficientvit_b0,224,1024.0,16784.26,60.999,0.1,2.87,3.41\nvit_tiny_r_s16_p8_224,224,1024.0,16479.31,62.128,0.43,1.85,6.34\nvit_small_patch32_224,224,1024.0,15974.78,64.091,1.12,2.09,22.88\nmnasnet_050,224,1024.0,15859.35,64.557,0.11,3.07,2.22\nmobilenetv2_050,224,1024.0,14885.11,68.783,0.1,3.64,1.97\ntinynet_c,184,1024.0,14726.2,69.525,0.11,2.87,2.46\npit_ti_224,224,1024.0,14628.51,69.989,0.5,2.75,4.85\npit_ti_distilled_224,224,1024.0,14546.3,70.385,0.51,2.77,5.1\nsemnasnet_050,224,1024.0,14351.42,71.341,0.11,3.44,2.08\nlevit_128,224,1024.0,14192.78,72.139,0.41,2.71,9.21\nrepghostnet_058,224,1024.0,13482.93,75.937,0.07,2.59,2.55\nmixer_s32_224,224,1024.0,13082.53,78.262,1.0,2.28,19.1\ncs3darknet_focus_s,256,1024.0,12838.86,79.748,0.69,2.7,3.27\nregnetx_004,224,1024.0,12620.59,81.127,0.4,3.14,5.16\nlevit_conv_128,224,1024.0,12584.5,81.359,0.41,2.71,9.21\ncs3darknet_s,256,1024.0,12531.56,81.703,0.72,2.97,3.28\nlcnet_150,224,1024.0,12510.06,81.844,0.34,3.79,4.5\nregnetx_004_tv,224,1024.0,12294.91,83.276,0.42,3.17,5.5\nefficientvit_m5,224,1024.0,12067.16,84.847,0.53,2.41,12.47\nmobilenetv3_large_075,224,1024.0,12041.45,85.029,0.16,4.0,3.99\nlevit_192,224,1024.0,11986.94,85.416,0.66,3.2,10.95\nresnet10t,224,1024.0,11963.05,85.587,1.1,2.43,5.44\ngernet_s,224,1024.0,11809.29,86.701,0.75,2.65,8.17\nese_vovnet19b_slim_dw,224,1024.0,11618.32,88.126,0.4,5.28,1.9\nvit_tiny_patch16_224,224,1024.0,11270.42,90.846,1.08,4.12,5.72\ndeit_tiny_patch16_224,224,1024.0,11259.37,90.936,1.08,4.12,5.72\ndeit_tiny_distilled_patch16_224,224,1024.0,11217.54,91.275,1.09,4.15,5.91\nrepghostnet_080,224,1024.0,11079.58,92.412,0.1,3.22,3.28\nmobilenetv3_rw,224,1024.0,10908.78,93.859,0.23,4.41,5.48\nlevit_conv_192,224,1024.0,10768.96,95.077,0.66,3.2,10.95\nmobilenetv3_large_100,224,1024.0,10731.24,95.412,0.23,4.41,5.48\nhardcorenas_a,224,1024.0,10620.31,96.408,0.23,4.38,5.26\ntf_mobilenetv3_large_075,224,1024.0,10495.83,97.552,0.16,4.0,3.99\nresnet14t,176,1024.0,10451.45,97.965,1.07,3.61,10.08\nmnasnet_075,224,1024.0,10423.24,98.231,0.23,4.77,3.17\ntf_mobilenetv3_large_minimal_100,224,1024.0,10369.07,98.745,0.22,4.4,3.92\nresnet34,160,1024.0,10330.89,99.109,1.87,1.91,21.8\nregnety_004,224,1024.0,9931.33,103.097,0.41,3.89,4.34\nnf_regnet_b0,192,1024.0,9884.05,103.59,0.37,3.15,8.76\nregnetx_006,224,1024.0,9823.29,104.232,0.61,3.98,6.2\nhardcorenas_b,224,1024.0,9755.67,104.953,0.26,5.09,5.18\nhardcorenas_c,224,1024.0,9572.88,106.958,0.28,5.01,5.52\nghostnet_100,224,1024.0,9528.83,107.453,0.15,3.55,5.18\ntf_mobilenetv3_large_100,224,1024.0,9484.05,107.96,0.23,4.41,5.48\ntinynet_b,188,1024.0,9358.37,109.409,0.21,4.44,3.73\nmnasnet_100,224,1024.0,9357.9,109.416,0.33,5.46,4.38\ntf_efficientnetv2_b0,192,1024.0,9316.15,109.906,0.54,3.51,7.14\nrepghostnet_100,224,1024.0,9303.14,110.06,0.15,3.98,4.07\nmobilenetv2_075,224,1024.0,9280.78,110.325,0.22,5.86,2.64\nresnet18,224,1024.0,9222.44,111.023,1.82,2.48,11.69\npit_xs_distilled_224,224,1024.0,9172.76,111.624,1.11,4.15,11.0\nsemnasnet_075,224,1024.0,9145.4,111.959,0.23,5.54,2.91\npit_xs_224,224,1024.0,9134.12,112.096,1.1,4.12,10.62\nregnety_006,224,1024.0,9106.78,112.433,0.61,4.33,6.06\nconvnext_atto,224,1024.0,8993.29,113.851,0.55,3.81,3.7\nhardcorenas_d,224,1024.0,8915.53,114.845,0.3,4.93,7.5\nlevit_256,224,1024.0,8893.96,115.124,1.13,4.23,18.89\nseresnet18,224,1024.0,8718.39,117.442,1.82,2.49,11.78\nconvnext_atto_ols,224,1024.0,8549.03,119.769,0.58,4.11,3.7\nmobilenetv2_100,224,1024.0,8479.08,120.757,0.31,6.68,3.5\nlegacy_seresnet18,224,1024.0,8452.0,121.144,1.82,2.49,11.78\nspnasnet_100,224,1024.0,8438.72,121.334,0.35,6.03,4.42\nrepghostnet_111,224,1024.0,8382.7,122.146,0.18,4.38,4.54\nsemnasnet_100,224,1024.0,8351.88,122.597,0.32,6.23,3.89\ndla46_c,224,1024.0,8209.51,124.721,0.58,4.5,1.3\nrepvgg_a0,224,1024.0,8124.8,126.024,1.52,3.59,9.11\nlevit_conv_256,224,1024.0,7997.32,128.032,1.13,4.23,18.89\nedgenext_xx_small,256,1024.0,7955.06,128.711,0.26,3.33,1.33\nregnetx_008,224,1024.0,7889.15,129.787,0.81,5.15,7.26\nresnet18d,224,1024.0,7873.83,130.041,2.06,3.29,11.71\nconvnext_femto,224,1024.0,7867.13,130.151,0.79,4.57,5.22\nese_vovnet19b_slim,224,1024.0,7834.56,130.693,1.69,3.52,3.17\nmobilevit_xxs,256,1024.0,7818.95,130.953,0.34,5.74,1.27\nhardcorenas_f,224,1024.0,7811.68,131.075,0.35,5.57,8.2\nhardcorenas_e,224,1024.0,7751.65,132.09,0.35,5.65,8.07\nefficientnet_lite0,224,1024.0,7716.09,132.699,0.4,6.74,4.65\nxcit_nano_12_p16_224,224,1024.0,7711.63,132.776,0.56,4.17,3.05\nghostnet_130,224,1024.0,7680.26,133.318,0.24,4.6,7.36\nlevit_256d,224,1024.0,7643.23,133.964,1.4,4.93,26.21\ntf_efficientnetv2_b0,224,1024.0,7637.19,134.07,0.73,4.77,7.14\nrepghostnet_130,224,1024.0,7550.55,135.609,0.25,5.24,5.48\nconvnext_femto_ols,224,1024.0,7514.81,136.254,0.82,4.87,5.23\nregnety_008,224,1024.0,7508.88,136.361,0.81,5.25,6.26\ntinynet_a,192,1024.0,7458.0,137.291,0.35,5.41,6.19\nfbnetc_100,224,1024.0,7362.02,139.082,0.4,6.51,5.57\ntf_efficientnetv2_b1,192,1024.0,7241.64,141.394,0.76,4.59,8.14\ncrossvit_tiny_240,240,1024.0,7093.57,144.345,1.3,5.67,7.01\nregnety_008_tv,224,1024.0,7067.28,144.882,0.84,5.42,6.43\nmobilevitv2_050,256,1024.0,7057.9,145.075,0.48,8.04,1.37\ncrossvit_9_240,240,1024.0,6964.15,147.028,1.55,5.59,8.55\ndla46x_c,224,1024.0,6837.04,149.761,0.54,5.66,1.07\ntf_efficientnet_lite0,224,1024.0,6819.73,150.142,0.4,6.74,4.65\nefficientnet_b0,224,1024.0,6721.47,152.337,0.4,6.75,5.29\nrexnet_100,224,1024.0,6689.15,153.073,0.41,7.44,4.8\nrexnetr_100,224,1024.0,6646.85,154.047,0.43,7.72,4.88\nlevit_conv_256d,224,1024.0,6618.0,154.719,1.4,4.93,26.21\nrepvit_m1,224,1024.0,6591.52,155.339,0.83,7.45,5.49\nefficientnet_b1_pruned,240,1024.0,6583.2,155.537,0.4,6.21,6.33\nrepghostnet_150,224,1024.0,6564.41,155.982,0.32,6.0,6.58\nmnasnet_140,224,1024.0,6559.1,156.108,0.6,7.71,7.12\nefficientvit_b1,224,1024.0,6458.82,158.532,0.53,7.25,9.1\nvisformer_tiny,224,1024.0,6456.3,158.594,1.27,5.72,10.32\ncrossvit_9_dagger_240,240,1024.0,6436.13,159.091,1.68,6.03,8.78\nresnet14t,224,1024.0,6404.13,159.886,1.69,5.8,10.08\ndla60x_c,224,1024.0,6404.11,159.885,0.59,6.01,1.32\nmobilenetv2_110d,224,1024.0,6387.15,160.311,0.45,8.71,4.52\nghostnetv2_100,224,1024.0,6375.73,160.599,0.18,4.55,6.16\nregnetz_005,224,1024.0,6372.66,160.676,0.52,5.86,7.12\nrepvit_m0_9,224,1024.0,6295.33,162.649,0.83,7.45,5.49\nedgenext_xx_small,288,1024.0,6241.41,164.053,0.33,4.21,1.33\nfbnetv3_b,224,1024.0,6166.1,166.058,0.42,6.97,8.6\nconvnext_pico,224,1024.0,6145.95,166.603,1.37,6.1,9.05\ncs3darknet_focus_m,256,1024.0,6145.46,166.616,1.98,4.89,9.3\npvt_v2_b0,224,1024.0,6126.38,167.135,0.53,7.01,3.67\ntf_efficientnet_b0,224,1024.0,6026.91,169.894,0.4,6.75,5.29\nnf_regnet_b0,256,1024.0,5970.36,171.503,0.64,5.58,8.76\nresnetblur18,224,1024.0,5963.74,171.694,2.34,3.39,11.69\nese_vovnet19b_dw,224,1024.0,5956.2,171.911,1.34,8.25,6.54\nhrnet_w18_small,224,1024.0,5950.21,172.083,1.61,5.72,13.19\nresnet50,160,1024.0,5943.32,172.284,2.1,5.67,25.56\nrepvgg_a1,224,1024.0,5891.09,173.812,2.64,4.74,14.09\ncs3darknet_m,256,1024.0,5871.36,174.395,2.08,5.28,9.31\nconvnext_pico_ols,224,1024.0,5852.38,174.961,1.43,6.5,9.06\nvit_base_patch32_clip_224,224,1024.0,5768.1,177.517,4.37,4.19,88.22\ntf_efficientnetv2_b2,208,1024.0,5753.76,177.96,1.06,6.0,10.1\nvit_base_patch32_224,224,1024.0,5748.7,178.117,4.37,4.19,88.22\nsemnasnet_140,224,1024.0,5744.77,178.239,0.6,8.87,6.11\nskresnet18,224,1024.0,5740.29,178.378,1.82,3.24,11.96\nvit_tiny_r_s16_p8_384,384,1024.0,5663.72,180.79,1.25,5.39,6.36\nresnet50d,160,1024.0,5651.35,181.185,2.22,6.08,25.58\nresnet18,288,1024.0,5636.85,181.651,3.01,4.11,11.69\nmobilenetv2_140,224,1024.0,5629.57,181.886,0.6,9.57,6.11\nvit_small_patch32_384,384,1024.0,5499.31,186.195,3.26,6.07,22.92\nconvnext_atto,288,1024.0,5487.38,186.599,0.91,6.3,3.7\nefficientnet_b0_gn,224,1024.0,5481.83,186.788,0.42,6.75,5.29\nselecsls42,224,1024.0,5458.22,187.596,2.94,4.62,30.35\nefficientnet_lite1,240,1024.0,5452.84,187.782,0.62,10.14,5.42\nfbnetv3_d,224,1024.0,5449.6,187.893,0.52,8.5,10.31\npit_s_224,224,1024.0,5438.08,188.291,2.42,6.18,23.46\nselecsls42b,224,1024.0,5414.81,189.1,2.98,4.62,32.46\nresnet34,224,1024.0,5413.46,189.147,3.67,3.74,21.8\npit_s_distilled_224,224,1024.0,5407.14,189.368,2.45,6.22,24.04\nefficientvit_b1,256,1024.0,5391.26,189.926,0.69,9.46,9.1\nseresnet18,288,1024.0,5348.84,191.432,3.01,4.11,11.78\ntf_efficientnetv2_b1,240,1024.0,5293.37,193.439,1.21,7.34,8.14\nlevit_384,224,1024.0,5286.23,193.7,2.36,6.26,39.13\nconvnextv2_atto,224,1024.0,5265.85,194.45,0.55,3.81,3.71\nrepvit_m1_0,224,1024.0,5259.32,194.683,1.13,8.69,7.3\nseresnet50,160,1024.0,5236.4,195.543,2.1,5.69,28.09\nconvnext_atto_ols,288,1024.0,5201.4,196.86,0.96,6.8,3.7\ngernet_m,224,1024.0,5195.05,197.1,3.02,5.24,21.14\nfbnetv3_b,256,1024.0,5178.49,197.729,0.55,9.1,8.6\nmixnet_s,224,1024.0,5129.76,199.608,0.25,6.25,4.13\nrepghostnet_200,224,1024.0,5125.91,199.759,0.54,7.96,9.8\nvit_base_patch32_clip_quickgelu_224,224,1024.0,5125.16,199.787,4.37,4.19,87.85\nseresnet34,224,1024.0,5104.13,200.612,3.67,3.74,21.96\nrepvit_m2,224,1024.0,5098.16,200.845,1.36,9.43,8.8\nrexnetr_130,224,1024.0,5082.35,201.471,0.68,9.81,7.61\nefficientnet_b0_g16_evos,224,1024.0,5016.04,204.134,1.01,7.42,8.11\nghostnetv2_130,224,1024.0,5011.79,204.307,0.28,5.9,8.96\nedgenext_x_small,256,1024.0,4992.08,205.112,0.54,5.93,2.34\necaresnet50t,160,1024.0,4989.39,205.225,2.21,6.04,25.57\ntiny_vit_5m_224,224,1024.0,4963.53,206.293,1.18,9.32,12.08\nrexnet_130,224,1024.0,4939.41,207.301,0.68,9.71,7.56\nlegacy_seresnet34,224,1024.0,4938.49,207.34,3.67,3.74,21.96\neva02_tiny_patch14_224,224,1024.0,4931.19,207.646,1.4,6.17,5.5\nresnet34d,224,1024.0,4924.89,207.912,3.91,4.54,21.82\ntf_efficientnet_lite1,240,1024.0,4918.8,208.17,0.62,10.14,5.42\nmixer_b32_224,224,1024.0,4917.45,208.227,3.24,6.29,60.29\nresnet50,176,1024.0,4914.58,208.348,2.62,6.92,25.56\nresnetrs50,160,1024.0,4904.24,208.788,2.29,6.2,35.69\nxcit_tiny_12_p16_224,224,1024.0,4900.19,208.961,1.24,6.29,6.72\nrepvit_m1_1,224,1024.0,4858.32,210.759,1.36,9.43,8.8\nlevit_conv_384,224,1024.0,4851.29,211.066,2.36,6.26,39.13\nefficientnet_es_pruned,224,1024.0,4832.02,211.909,1.81,8.73,5.44\nefficientnet_es,224,1024.0,4828.47,212.065,1.81,8.73,5.44\ndla34,224,1024.0,4823.61,212.277,3.07,5.02,15.74\nresnet26,224,1024.0,4806.46,213.036,2.36,7.35,16.0\nresnet18d,288,1024.0,4806.17,213.049,3.41,5.43,11.71\nresnext50_32x4d,160,1024.0,4797.48,213.435,2.17,7.35,25.03\ntf_mixnet_s,224,1024.0,4783.68,214.05,0.25,6.25,4.13\nconvnext_femto,288,1024.0,4774.19,214.475,1.3,7.56,5.22\nefficientnet_b1,224,1024.0,4707.45,217.516,0.59,9.36,7.79\ngmlp_ti16_224,224,1024.0,4694.71,218.108,1.34,7.55,5.87\ncs3darknet_focus_m,288,1024.0,4686.36,218.495,2.51,6.19,9.3\nmobilenetv2_120d,224,1024.0,4673.25,219.108,0.69,11.97,5.83\nselecsls60,224,1024.0,4656.74,219.885,3.59,5.52,30.67\nselecsls60b,224,1024.0,4628.67,221.219,3.63,5.52,32.77\ntf_efficientnet_es,224,1024.0,4617.85,221.737,1.81,8.73,5.44\nresmlp_12_224,224,1024.0,4607.73,222.224,3.01,5.5,15.35\nvit_small_patch16_224,224,1024.0,4586.65,223.246,4.25,8.25,22.05\ndeit_small_patch16_224,224,1024.0,4584.29,223.359,4.25,8.25,22.05\nfbnetv3_d,256,1024.0,4567.33,224.19,0.68,11.1,10.31\ngmixer_12_224,224,1024.0,4565.4,224.285,2.67,7.26,12.7\ndeit_small_distilled_patch16_224,224,1024.0,4564.97,224.306,4.27,8.29,22.44\nconvnext_femto_ols,288,1024.0,4561.96,224.454,1.35,8.06,5.23\nefficientnet_b0_g8_gn,224,1024.0,4561.27,224.488,0.66,6.75,6.56\nefficientnet_cc_b0_8e,224,1024.0,4542.29,225.426,0.42,9.42,24.01\nefficientnet_cc_b0_4e,224,1024.0,4540.5,225.515,0.41,9.42,13.31\nrepvgg_b0,224,1024.0,4526.99,226.188,3.41,6.15,15.82\nmixer_s16_224,224,1024.0,4518.8,226.598,3.79,5.97,18.53\ncs3darknet_m,288,1024.0,4513.42,226.868,2.63,6.69,9.31\nconvnextv2_femto,224,1024.0,4509.16,227.082,0.79,4.57,5.23\nregnetx_016,224,1024.0,4476.6,228.734,1.62,7.93,9.19\nnf_regnet_b1,256,1024.0,4444.68,230.377,0.82,7.27,10.22\nvit_base_patch32_clip_256,256,1024.0,4442.76,230.476,5.68,5.44,87.86\nmobilevitv2_075,256,1024.0,4419.22,231.704,1.05,12.06,2.87\nrexnetr_150,224,1024.0,4415.72,231.888,0.89,11.13,9.78\ndarknet17,256,1024.0,4402.14,232.603,3.26,7.18,14.3\nresnet26d,224,1024.0,4396.77,232.887,2.6,8.15,16.01\nresnetaa34d,224,1024.0,4381.9,233.677,4.43,5.07,21.82\nefficientnet_b2_pruned,260,1024.0,4356.91,235.018,0.73,9.13,8.31\nconvnext_nano,224,1024.0,4340.39,235.913,2.46,8.37,15.59\necaresnet50d_pruned,224,1024.0,4337.48,236.07,2.53,6.43,19.94\nefficientformer_l1,224,1024.0,4271.29,239.728,1.3,5.53,12.29\nnf_resnet26,224,1024.0,4216.31,242.856,2.41,7.35,16.0\ndeit3_small_patch16_224,224,1024.0,4203.29,243.607,4.25,8.25,22.06\nnf_regnet_b2,240,1024.0,4197.9,243.92,0.97,7.23,14.31\ntf_efficientnet_cc_b0_4e,224,1024.0,4196.5,244.002,0.41,9.42,13.31\ntf_efficientnet_cc_b0_8e,224,1024.0,4190.23,244.367,0.42,9.42,24.01\nregnety_016,224,1024.0,4161.97,246.026,1.63,8.04,11.2\nrexnet_150,224,1024.0,4147.2,246.903,0.9,11.21,9.73\nghostnetv2_160,224,1024.0,4116.92,248.718,0.42,7.23,12.39\ntiny_vit_11m_224,224,1024.0,4086.56,250.566,1.9,10.73,20.35\npoolformer_s12,224,1024.0,4071.24,251.51,1.82,5.53,11.92\nregnetz_005,288,1024.0,4056.8,252.404,0.86,9.68,7.12\nefficientnet_lite2,260,1024.0,4046.71,253.034,0.89,12.9,6.09\ndarknet21,256,1024.0,4001.6,255.887,3.93,7.47,20.86\nefficientvit_b1,288,1024.0,3997.55,256.145,0.87,11.96,9.1\nresnext50_32x4d,176,1024.0,3992.51,256.47,2.71,8.97,25.03\nedgenext_x_small,288,1024.0,3965.96,258.184,0.68,7.5,2.34\nefficientnet_b1,256,1024.0,3961.36,258.486,0.77,12.22,7.79\nconvnext_nano_ols,224,1024.0,3944.64,259.582,2.65,9.38,15.65\nresnest14d,224,1024.0,3932.19,260.404,2.76,7.33,10.61\ntf_efficientnet_b1,240,1024.0,3922.37,261.055,0.71,10.88,7.79\nflexivit_small,240,1024.0,3913.54,261.645,4.88,9.46,22.06\nmobilevit_xs,256,768.0,3904.8,196.672,0.93,13.62,2.32\nregnetz_b16,224,1024.0,3893.58,262.986,1.45,9.95,9.72\nsedarknet21,256,1024.0,3874.2,264.302,3.93,7.47,20.95\nresnext26ts,256,1024.0,3832.52,267.176,2.43,10.52,10.3\nmobileone_s1,224,1024.0,3826.99,267.562,0.86,9.67,4.83\ntf_efficientnetv2_b2,260,1024.0,3817.93,268.197,1.72,9.84,10.1\nedgenext_small,256,1024.0,3770.23,271.588,1.26,9.07,5.59\nconvnext_pico,288,1024.0,3731.48,274.411,2.27,10.08,9.05\ngernet_l,256,1024.0,3727.69,274.69,4.57,8.0,31.08\nseresnext26ts,256,1024.0,3724.62,274.916,2.43,10.52,10.39\neca_resnext26ts,256,1024.0,3723.07,275.031,2.43,10.52,10.3\ndpn48b,224,1024.0,3716.75,275.497,1.69,8.92,9.13\ntf_efficientnet_lite2,260,1024.0,3695.32,277.096,0.89,12.9,6.09\ngcresnext26ts,256,1024.0,3691.17,277.409,2.43,10.53,10.48\nefficientnet_b2,256,1024.0,3671.26,278.912,0.89,12.81,9.11\nnf_ecaresnet26,224,1024.0,3640.87,281.24,2.41,7.36,16.0\nresnetblur18,288,1024.0,3639.91,281.314,3.87,5.6,11.69\nnf_seresnet26,224,1024.0,3637.43,281.506,2.41,7.36,17.4\nresnet101,160,1024.0,3616.15,283.164,4.0,8.28,44.55\nvit_relpos_small_patch16_224,224,1024.0,3590.52,285.183,4.24,9.38,21.98\nresnet26t,256,1024.0,3578.9,286.111,3.35,10.52,16.01\nvit_srelpos_small_patch16_224,224,1024.0,3572.97,286.585,4.23,8.49,21.97\nconvnext_pico_ols,288,1024.0,3558.03,287.789,2.37,10.74,9.06\ncs3darknet_focus_l,256,1024.0,3544.69,288.872,4.66,8.03,21.15\ntf_efficientnetv2_b3,240,1024.0,3543.38,288.978,1.93,9.95,14.36\nlegacy_seresnext26_32x4d,224,1024.0,3516.72,291.169,2.49,9.39,16.79\npvt_v2_b1,224,1024.0,3507.87,291.903,2.04,14.01,14.01\nrepvit_m3,224,1024.0,3501.61,292.425,1.89,13.94,10.68\nrepvgg_a2,224,1024.0,3495.75,292.916,5.7,6.26,28.21\nefficientnetv2_rw_t,224,1024.0,3486.59,293.686,1.93,9.94,13.65\necaresnet101d_pruned,224,1024.0,3483.13,293.977,3.48,7.69,24.88\nese_vovnet19b_dw,288,1024.0,3478.51,294.369,2.22,13.63,6.54\nmixnet_m,224,1024.0,3474.22,294.731,0.36,8.19,5.01\nedgenext_small_rw,256,1024.0,3458.08,296.106,1.58,9.51,7.83\nconvnextv2_pico,224,1024.0,3458.0,296.113,1.37,6.1,9.07\ngc_efficientnetv2_rw_t,224,1024.0,3445.15,297.218,1.94,9.97,13.68\ncs3darknet_l,256,1024.0,3414.99,299.845,4.86,8.55,21.16\nefficientnet_b3_pruned,300,1024.0,3412.19,300.09,1.04,11.86,9.86\nnf_regnet_b1,288,1024.0,3373.08,303.57,1.02,9.2,10.22\ntf_mixnet_m,224,1024.0,3353.29,305.361,0.36,8.19,5.01\nconvit_tiny,224,1024.0,3342.83,306.316,1.26,7.94,5.71\neca_botnext26ts_256,256,1024.0,3341.38,306.449,2.46,11.6,10.59\necaresnext50t_32x4d,224,1024.0,3327.77,307.703,2.7,10.09,15.41\necaresnext26t_32x4d,224,1024.0,3321.66,308.269,2.7,10.09,15.41\nresnet34,288,1024.0,3320.08,308.416,6.07,6.18,21.8\nseresnext26t_32x4d,224,1024.0,3319.26,308.491,2.7,10.09,16.81\nvit_tiny_patch16_384,384,1024.0,3311.59,309.206,3.16,12.08,5.79\nvit_base_patch32_plus_256,256,1024.0,3301.22,310.177,7.7,6.35,119.48\nseresnext26d_32x4d,224,1024.0,3300.83,310.214,2.73,10.19,16.81\nskresnet34,224,1024.0,3294.57,310.803,3.67,5.13,22.28\nmobilevitv2_100,256,768.0,3290.58,233.384,1.84,16.08,4.9\nvit_relpos_small_patch16_rpn_224,224,1024.0,3279.29,312.245,4.24,9.38,21.97\neca_halonext26ts,256,1024.0,3270.39,313.1,2.44,11.46,10.76\ncoatnet_pico_rw_224,224,1024.0,3250.74,314.993,1.96,12.91,10.85\nrexnetr_200,224,768.0,3238.38,237.146,1.59,15.11,16.52\necaresnet26t,256,1024.0,3228.23,317.19,3.35,10.53,16.01\necaresnetlight,224,1024.0,3222.96,317.708,4.11,8.42,30.16\ncoatnext_nano_rw_224,224,1024.0,3218.47,318.153,2.36,10.68,14.7\ncs3sedarknet_l,256,1024.0,3218.11,318.188,4.86,8.56,21.91\ncoat_lite_tiny,224,1024.0,3216.35,318.362,1.6,11.65,5.72\nnf_regnet_b2,272,1024.0,3205.43,319.447,1.22,9.27,14.31\nconvnextv2_atto,288,1024.0,3199.9,319.999,0.91,6.3,3.71\nvit_small_r26_s32_224,224,1024.0,3174.89,322.52,3.54,9.44,36.43\nbotnet26t_256,256,1024.0,3173.81,322.63,3.32,11.98,12.49\nresnetv2_50,224,1024.0,3170.95,322.919,4.11,11.11,25.55\nfastvit_t8,256,1024.0,3164.9,323.538,0.7,8.63,4.03\ncrossvit_small_240,240,1024.0,3164.86,323.541,5.09,11.34,26.86\nbat_resnext26ts,256,1024.0,3139.26,326.18,2.53,12.51,10.73\nseresnet34,288,1024.0,3136.77,326.439,6.07,6.18,21.96\nhalonet26t,256,1024.0,3132.55,326.879,3.19,11.69,12.48\nlambda_resnet26t,256,1024.0,3123.88,327.786,3.02,11.87,10.96\nrexnet_200,224,768.0,3120.89,246.073,1.56,14.91,16.37\nvit_small_resnet26d_224,224,1024.0,3106.26,329.645,5.04,10.65,63.61\nhrnet_w18_small_v2,224,1024.0,3095.42,330.8,2.62,9.65,15.6\nmobileone_s2,224,1024.0,3085.91,331.82,1.34,11.55,7.88\nvit_relpos_base_patch32_plus_rpn_256,256,1024.0,3081.88,332.247,7.59,6.63,119.42\ntresnet_m,224,1024.0,3073.78,333.129,5.75,7.31,31.39\nresnet32ts,256,1024.0,3072.91,333.224,4.63,11.58,17.96\ncoatnet_nano_cc_224,224,1024.0,3066.72,333.896,2.13,13.1,13.76\nresnet101,176,1024.0,3047.24,336.031,4.92,10.08,44.55\nresnet33ts,256,1024.0,3032.6,337.653,4.76,11.66,19.68\nefficientvit_b2,224,1024.0,3030.14,337.927,1.6,14.62,24.33\nresnet50,224,1024.0,3021.24,338.922,4.11,11.11,25.56\ncoat_lite_mini,224,1024.0,3021.22,338.925,2.0,12.25,11.01\nresnet34d,288,1024.0,3013.98,339.739,6.47,7.51,21.82\ncspresnet50,256,1024.0,3012.57,339.898,4.54,11.5,21.62\nresnetv2_50t,224,1024.0,3011.73,339.991,4.32,11.82,25.57\ndpn68b,224,1024.0,3008.58,340.347,2.35,10.47,12.61\ncoatnet_nano_rw_224,224,1024.0,3001.39,341.165,2.29,13.29,15.14\ndpn68,224,1024.0,3001.33,341.17,2.35,10.47,12.61\nresnetv2_50d,224,1024.0,2992.98,342.12,4.35,11.92,25.57\nconvnext_tiny,224,1024.0,2986.71,342.841,4.47,13.44,28.59\nlevit_512,224,1024.0,2974.0,344.305,5.64,10.22,95.17\ndla60,224,1024.0,2959.44,345.999,4.26,10.16,22.04\nfbnetv3_g,240,1024.0,2957.87,346.184,1.28,14.87,16.62\ntf_efficientnet_b2,260,1024.0,2957.04,346.28,1.02,13.83,9.11\nefficientnet_em,240,1024.0,2948.76,347.254,3.04,14.34,6.9\ncrossvit_15_240,240,1024.0,2948.65,347.266,5.17,12.01,27.53\neca_resnet33ts,256,1024.0,2945.18,347.676,4.76,11.66,19.68\nseresnet33ts,256,1024.0,2940.4,348.24,4.76,11.66,19.78\nregnetx_032,224,1024.0,2932.49,349.18,3.2,11.37,15.3\ngcresnet33ts,256,1024.0,2919.42,350.744,4.76,11.68,19.88\nmobileone_s0,224,1024.0,2911.68,351.675,1.09,15.48,5.29\nresnet50t,224,1024.0,2893.61,353.872,4.32,11.82,25.57\nresnet50c,224,1024.0,2893.38,353.9,4.35,11.92,25.58\nrepvit_m1_5,224,1024.0,2891.53,354.126,2.31,15.7,14.64\nselecsls84,224,1024.0,2891.52,354.128,5.9,7.57,50.95\nefficientnet_cc_b1_8e,240,1024.0,2883.89,355.064,0.75,15.44,39.72\nhaloregnetz_b,224,1024.0,2883.33,355.134,1.97,11.94,11.68\nvgg11,224,1024.0,2881.16,355.4,7.61,7.44,132.86\nresnet50d,224,1024.0,2872.03,356.53,4.35,11.92,25.58\nresnest26d,224,1024.0,2863.53,357.59,3.64,9.97,17.07\ntf_efficientnet_em,240,1024.0,2860.98,357.908,3.04,14.34,6.9\nvisformer_small,224,1024.0,2837.73,360.841,4.88,11.43,40.22\ncspresnet50w,256,1024.0,2834.78,361.216,5.04,12.19,28.12\nvovnet39a,224,1024.0,2834.5,361.252,7.09,6.73,22.6\nwide_resnet50_2,176,1024.0,2833.12,361.428,7.29,8.97,68.88\ncspresnet50d,256,1024.0,2828.94,361.963,4.86,12.55,21.64\nresnet26,288,1024.0,2826.83,362.233,3.9,12.15,16.0\nresnext26ts,288,1024.0,2826.2,362.312,3.07,13.31,10.3\nefficientnet_b2,288,1024.0,2822.88,362.739,1.12,16.2,9.11\nregnetv_040,224,1024.0,2785.35,367.627,4.0,12.29,20.64\nlevit_512d,224,1024.0,2784.75,367.707,5.85,11.3,92.5\nlevit_conv_512,224,1024.0,2781.3,368.162,5.64,10.22,95.17\ndeit3_medium_patch16_224,224,1024.0,2780.75,368.235,7.53,10.99,38.85\ncrossvit_15_dagger_240,240,1024.0,2776.34,368.82,5.5,12.68,28.21\nregnety_040,224,1024.0,2768.62,369.849,4.0,12.29,20.65\nlegacy_seresnet50,224,1024.0,2766.98,370.066,3.88,10.6,28.09\neca_resnext26ts,288,1024.0,2756.51,371.473,3.07,13.32,10.3\nseresnext26ts,288,1024.0,2751.54,372.144,3.07,13.32,10.39\nregnety_032,224,1024.0,2744.75,373.065,3.2,11.26,19.44\nconvnext_tiny_hnf,224,1024.0,2744.61,373.082,4.47,13.44,28.59\nconvnextv2_femto,288,1024.0,2744.25,373.131,1.3,7.56,5.23\neca_vovnet39b,224,1024.0,2742.23,373.408,7.09,6.74,22.6\nresnetv2_50x1_bit,224,1024.0,2741.57,373.497,4.23,11.11,25.55\ngcresnext26ts,288,1024.0,2728.39,375.302,3.07,13.33,10.48\nresnetaa50,224,1024.0,2728.16,375.334,5.15,11.64,25.56\ndensenet121,224,1024.0,2725.3,375.726,2.87,6.9,7.98\nese_vovnet39b,224,1024.0,2723.97,375.912,7.09,6.74,24.57\nmixnet_l,224,1024.0,2712.93,377.44,0.58,10.84,7.33\ntf_efficientnet_cc_b1_8e,240,1024.0,2710.75,377.745,0.75,15.44,39.72\nmobilevit_s,256,768.0,2698.84,284.557,1.86,17.03,5.58\ncs3darknet_focus_l,288,1024.0,2695.52,379.878,5.9,10.16,21.15\nseresnet50,224,1024.0,2693.22,380.203,4.11,11.13,28.09\nxcit_nano_12_p16_384,384,1024.0,2679.82,382.104,1.64,12.14,3.05\nresnetaa34d,288,1024.0,2675.02,382.79,7.33,8.38,21.82\ntwins_svt_small,224,1024.0,2670.35,383.458,2.82,10.7,24.06\necaresnet50d_pruned,288,1024.0,2662.19,384.634,4.19,10.61,19.94\nconvnext_nano,288,1024.0,2634.79,388.635,4.06,13.84,15.59\nresnet50_gn,224,1024.0,2631.91,389.06,4.14,11.11,25.56\nresnetv2_50d_gn,224,1024.0,2623.43,390.317,4.38,11.92,25.57\nxcit_tiny_24_p16_224,224,1024.0,2616.39,391.368,2.34,11.82,12.12\ntf_mixnet_l,224,1024.0,2615.89,391.443,0.58,10.84,7.33\nres2net50_48w_2s,224,1024.0,2611.06,392.166,4.18,11.72,25.29\ngcvit_xxtiny,224,1024.0,2608.34,392.574,2.14,15.36,12.0\ncs3darknet_l,288,1024.0,2607.33,392.728,6.16,10.83,21.16\nresnetaa50d,224,1024.0,2596.72,394.332,5.39,12.44,25.58\nvgg11_bn,224,1024.0,2590.27,395.315,7.62,7.44,132.87\nvit_base_resnet26d_224,224,1024.0,2580.41,396.822,6.93,12.34,101.4\nvit_relpos_medium_patch16_cls_224,224,1024.0,2579.62,396.946,7.55,13.3,38.76\necaresnet50t,224,1024.0,2579.62,396.946,4.32,11.83,25.57\ncoatnet_rmlp_nano_rw_224,224,1024.0,2579.38,396.984,2.51,18.21,15.15\ndavit_tiny,224,1024.0,2578.68,397.091,4.47,17.08,28.36\nseresnet50t,224,1024.0,2574.91,397.672,4.32,11.83,28.1\nresnet26d,288,1024.0,2569.96,398.438,4.29,13.48,16.01\nmobilevitv2_125,256,768.0,2568.23,299.03,2.86,20.1,7.48\nnf_regnet_b3,288,1024.0,2563.17,399.494,1.67,11.84,18.59\necaresnet50d,224,1024.0,2560.76,399.87,4.35,11.93,25.58\nlevit_conv_512d,224,1024.0,2557.63,400.359,5.85,11.3,92.5\nresnet152,160,1024.0,2531.48,404.495,5.9,11.51,60.19\nefficientvit_b2,256,1024.0,2531.18,404.544,2.09,19.03,24.33\nmobileone_s3,224,1024.0,2513.71,407.355,1.94,13.85,10.17\nresnetrs50,224,1024.0,2512.05,407.624,4.48,12.14,35.69\ntwins_pcpvt_small,224,1024.0,2506.77,408.482,3.68,15.51,24.11\nresnetblur50,224,1024.0,2495.43,410.338,5.16,12.02,25.56\npoolformerv2_s12,224,1024.0,2489.38,411.337,1.83,5.53,11.89\nconvnextv2_nano,224,1024.0,2480.83,412.755,2.46,8.37,15.62\nregnetx_040,224,1024.0,2478.03,413.222,3.99,12.2,22.12\neca_nfnet_l0,224,1024.0,2476.91,413.407,4.35,10.47,24.14\ngcresnext50ts,256,1024.0,2473.39,413.995,3.75,15.46,15.67\nnfnet_l0,224,1024.0,2472.84,414.088,4.36,10.47,35.07\ntiny_vit_21m_224,224,1024.0,2468.7,414.781,4.08,15.96,33.22\ncs3sedarknet_l,288,1024.0,2463.79,415.609,6.16,10.83,21.91\nresnet50s,224,1024.0,2456.52,416.838,5.47,13.52,25.68\ndla60x,224,1024.0,2437.95,420.012,3.54,13.8,17.35\ndensenetblur121d,224,1024.0,2433.6,420.765,3.11,7.9,8.0\nedgenext_small,320,1024.0,2424.08,422.414,1.97,14.16,5.59\nresnext50_32x4d,224,1024.0,2410.12,424.862,4.26,14.4,25.03\ninception_next_tiny,224,1024.0,2404.04,425.937,4.19,11.98,28.06\nconvnext_nano_ols,288,1024.0,2397.01,427.188,4.38,15.5,15.65\nvit_relpos_medium_patch16_224,224,1024.0,2394.54,427.629,7.5,12.13,38.75\nefficientnet_lite3,300,512.0,2392.78,213.967,1.65,21.85,8.2\nvit_srelpos_medium_patch16_224,224,1024.0,2386.54,429.062,7.49,11.32,38.74\nregnetz_c16,256,1024.0,2383.36,429.635,2.51,16.57,13.46\nresnetblur50d,224,1024.0,2382.64,429.765,5.4,12.82,25.58\nvit_base_r26_s32_224,224,1024.0,2381.88,429.901,6.76,11.54,101.38\ngcresnet50t,256,1024.0,2372.96,431.518,5.42,14.67,25.9\nregnety_040_sgn,224,1024.0,2371.57,431.77,4.03,12.29,20.65\nres2net50_26w_4s,224,1024.0,2359.62,433.957,4.28,12.61,25.7\nvovnet57a,224,1024.0,2357.12,434.416,8.95,7.52,36.64\nresmlp_24_224,224,1024.0,2350.19,435.697,5.96,10.91,30.02\nmaxvit_pico_rw_256,256,768.0,2346.84,327.238,1.68,18.77,7.46\ninception_v3,299,1024.0,2346.46,436.391,5.73,8.97,23.83\nmaxvit_rmlp_pico_rw_256,256,768.0,2343.0,327.774,1.69,21.32,7.52\nseresnetaa50d,224,1024.0,2333.21,438.87,5.4,12.46,28.11\nfocalnet_tiny_srf,224,1024.0,2331.81,439.132,4.42,16.32,28.43\ncspresnext50,256,1024.0,2330.62,439.358,4.05,15.86,20.57\nres2net50_14w_8s,224,1024.0,2327.89,439.871,4.21,13.28,25.06\ndla60_res2net,224,1024.0,2327.26,439.99,4.15,12.34,20.85\ncoatnet_0_rw_224,224,1024.0,2319.62,441.438,4.23,15.1,27.44\nregnetz_b16,288,1024.0,2318.51,441.651,2.39,16.43,9.72\ngmixer_24_224,224,1024.0,2315.73,442.182,5.28,14.45,24.72\nresnext50d_32x4d,224,1024.0,2305.65,444.116,4.5,15.2,25.05\nlambda_resnet26rpt_256,256,768.0,2282.36,336.484,3.16,11.87,10.99\nese_vovnet57b,224,1024.0,2279.9,449.132,8.95,7.52,38.61\nresnest50d_1s4x24d,224,1024.0,2278.75,449.357,4.43,13.57,25.68\ndla60_res2next,224,1024.0,2268.77,451.333,3.49,13.17,17.03\nsehalonet33ts,256,1024.0,2262.52,452.582,3.55,14.7,13.69\nres2net50d,224,1024.0,2256.17,453.855,4.52,13.41,25.72\nvit_medium_patch16_gap_240,240,1024.0,2253.27,454.439,8.6,12.57,44.4\nres2next50,224,1024.0,2251.4,454.817,4.2,13.71,24.67\nresnet32ts,288,1024.0,2244.87,456.139,5.86,14.65,17.96\nedgenext_base,256,1024.0,2239.63,457.204,3.85,15.58,18.51\nefficientvit_l1,224,1024.0,2235.54,458.043,5.27,15.85,52.65\nskresnet50,224,1024.0,2226.66,459.87,4.11,12.5,25.8\nnfnet_f0,192,1024.0,2226.44,459.916,7.21,10.16,71.49\ntf_efficientnetv2_b3,300,1024.0,2226.35,459.935,3.04,15.74,14.36\nefficientnetv2_rw_t,288,1024.0,2225.5,460.11,3.19,16.42,13.65\nnf_ecaresnet50,224,1024.0,2219.3,461.395,4.21,11.13,25.56\ndarknetaa53,256,1024.0,2219.0,461.459,7.97,12.39,36.02\ndensenet169,224,1024.0,2218.3,461.604,3.4,7.3,14.15\nnf_seresnet50,224,1024.0,2217.49,461.772,4.21,11.13,28.09\nedgenext_small_rw,320,1024.0,2214.15,462.468,2.46,14.85,7.83\nresnet33ts,288,1024.0,2214.09,462.482,6.02,14.75,19.68\nxcit_small_12_p16_224,224,1024.0,2207.67,463.826,4.82,12.57,26.25\nfocalnet_tiny_lrf,224,1024.0,2205.41,464.301,4.49,17.76,28.65\nresnet51q,256,1024.0,2195.84,466.325,6.38,16.55,35.7\nrepvgg_b1g4,224,1024.0,2195.75,466.344,8.15,10.64,39.97\nseresnext50_32x4d,224,1024.0,2188.04,467.986,4.26,14.42,27.56\nvit_relpos_medium_patch16_rpn_224,224,1024.0,2187.29,468.147,7.5,12.13,38.73\ncs3darknet_focus_x,256,1024.0,2185.7,468.489,8.03,10.69,35.02\nlegacy_seresnext50_32x4d,224,1024.0,2184.4,468.766,4.26,14.42,27.56\ntf_efficientnet_lite3,300,512.0,2178.27,235.039,1.65,21.85,8.2\nresnet26t,320,1024.0,2173.03,471.22,5.24,16.44,16.01\ngc_efficientnetv2_rw_t,288,1024.0,2170.84,471.696,3.2,16.45,13.68\ngmlp_s16_224,224,1024.0,2161.42,473.752,4.42,15.1,19.42\nseresnet33ts,288,1024.0,2156.33,474.868,6.02,14.76,19.78\neca_resnet33ts,288,1024.0,2152.27,475.765,6.02,14.76,19.68\nfastvit_t12,256,1024.0,2151.9,475.846,1.42,12.42,7.55\nnf_regnet_b3,320,1024.0,2148.66,476.564,2.05,14.61,18.59\neva02_small_patch14_224,224,1024.0,2144.78,477.426,5.53,12.34,21.62\nresnet152,176,1024.0,2139.0,478.716,7.22,13.99,60.19\nvit_medium_patch16_reg4_gap_256,256,1024.0,2137.51,479.051,9.93,14.51,38.87\ngcresnet33ts,288,1024.0,2134.49,479.728,6.02,14.78,19.88\nskresnet50d,224,1024.0,2133.34,479.986,4.36,13.31,25.82\necaresnet101d_pruned,288,1024.0,2128.45,481.09,5.75,12.71,24.88\nfbnetv3_g,288,1024.0,2127.74,481.25,1.77,21.09,16.62\nvit_medium_patch16_reg4_256,256,1024.0,2119.83,483.047,9.97,14.56,38.87\neva02_tiny_patch14_336,336,1024.0,2106.54,486.094,3.14,13.85,5.76\nconvnextv2_pico,288,1024.0,2101.04,487.367,2.27,10.08,9.07\nnf_resnet50,256,1024.0,2100.31,487.536,5.46,14.52,25.56\nresnetrs101,192,1024.0,2100.21,487.558,6.04,12.7,63.62\npoolformer_s24,224,1024.0,2099.97,487.615,3.41,10.68,21.39\npvt_v2_b2,224,1024.0,2099.92,487.626,3.9,24.96,25.36\nefficientnet_b3,288,512.0,2089.91,244.977,1.63,21.49,12.23\ncs3sedarknet_xdw,256,1024.0,2078.01,492.768,5.97,17.18,21.6\ndarknet53,256,1024.0,2077.03,493.0,9.31,12.39,41.61\necaresnet50t,256,1024.0,2076.41,493.149,5.64,15.45,25.57\ncs3darknet_x,256,1024.0,2060.02,497.071,8.38,11.35,35.05\nxcit_nano_12_p8_224,224,1024.0,2059.06,497.302,2.16,15.71,3.05\nmobilevitv2_150,256,512.0,2058.61,248.702,4.09,24.11,10.59\nrexnetr_300,224,1024.0,2042.01,501.455,3.39,22.16,34.81\nlambda_resnet50ts,256,1024.0,2041.61,501.552,5.07,17.48,21.54\nfastvit_s12,256,1024.0,2028.81,504.718,1.82,13.67,9.47\ncoatnet_rmlp_0_rw_224,224,1024.0,2024.25,505.855,4.52,21.26,27.45\ngcvit_xtiny,224,1024.0,2023.42,506.063,2.93,20.26,19.98\nfastvit_sa12,256,1024.0,2022.28,506.347,1.96,13.83,11.58\ncrossvit_18_240,240,1024.0,2014.44,508.318,8.21,16.14,43.27\nvit_medium_patch16_gap_256,256,1024.0,1996.45,512.899,9.78,14.29,38.86\nresnet61q,256,1024.0,1996.22,512.958,7.8,17.01,36.85\ncoatnet_bn_0_rw_224,224,1024.0,1985.64,515.69,4.48,18.41,27.44\nvit_base_patch32_384,384,1024.0,1984.44,516.005,12.67,12.14,88.3\nvit_base_patch32_clip_384,384,1024.0,1981.44,516.784,12.67,12.14,88.3\ncspdarknet53,256,1024.0,1981.04,516.888,6.57,16.81,27.64\nsebotnet33ts_256,256,512.0,1977.98,258.841,3.89,17.46,13.7\necaresnet26t,320,1024.0,1973.79,518.786,5.24,16.44,16.01\nvit_base_resnet50d_224,224,1024.0,1971.35,519.428,8.68,16.1,110.97\ncs3sedarknet_x,256,1024.0,1962.3,521.825,8.38,11.35,35.4\nregnetx_080,224,1024.0,1962.04,521.894,8.02,14.06,39.57\nseresnext26t_32x4d,288,1024.0,1950.77,524.91,4.46,16.68,16.81\nmixnet_xl,224,1024.0,1948.29,525.576,0.93,14.57,11.9\nresnest50d,224,1024.0,1945.36,526.368,5.4,14.36,27.48\nseresnext26d_32x4d,288,1024.0,1940.04,527.813,4.51,16.85,16.81\ncoatnet_0_224,224,512.0,1939.29,264.004,4.43,21.14,25.04\nswin_tiny_patch4_window7_224,224,1024.0,1938.74,528.165,4.51,17.06,28.29\nresnetv2_101,224,1024.0,1935.15,529.146,7.83,16.23,44.54\nregnetx_064,224,1024.0,1933.12,529.703,6.49,16.37,26.21\ndla102,224,1024.0,1924.77,531.998,7.19,14.18,33.27\ncrossvit_18_dagger_240,240,1024.0,1921.19,532.991,8.65,16.91,44.27\nrexnetr_200,288,512.0,1914.7,267.396,2.62,24.96,16.52\nrexnet_300,224,1024.0,1911.46,535.706,3.44,22.4,34.71\nnest_tiny,224,1024.0,1908.27,536.601,5.24,14.75,17.06\ndm_nfnet_f0,192,1024.0,1907.3,536.873,7.21,10.16,71.49\necaresnetlight,288,1024.0,1897.75,539.574,6.79,13.91,30.16\nmaxxvit_rmlp_nano_rw_256,256,768.0,1897.05,404.83,4.17,21.53,16.78\nresnet101,224,1024.0,1885.15,543.183,7.83,16.23,44.55\nnest_tiny_jx,224,1024.0,1884.26,543.437,5.24,14.75,17.06\npvt_v2_b2_li,224,1024.0,1882.78,543.863,3.77,25.04,22.55\nvit_large_patch32_224,224,1024.0,1869.82,547.632,15.27,11.11,305.51\nvgg13,224,1024.0,1868.34,548.068,11.31,12.25,133.05\nresnetv2_101d,224,1024.0,1865.75,548.827,8.07,17.04,44.56\nefficientformer_l3,224,1024.0,1865.63,548.865,3.93,12.01,31.41\nresnetv2_50,288,1024.0,1863.99,549.347,6.79,18.37,25.55\nmobileone_s4,224,1024.0,1856.33,551.615,3.04,17.74,14.95\nres2net50_26w_6s,224,1024.0,1853.01,552.603,6.33,15.28,37.05\nefficientvit_b2,288,1024.0,1851.14,553.16,2.64,24.03,24.33\nlamhalobotnet50ts_256,256,1024.0,1841.89,555.938,5.02,18.44,22.57\nmaxvit_nano_rw_256,256,768.0,1833.65,418.827,4.26,25.76,15.45\nmaxvit_rmlp_nano_rw_256,256,768.0,1832.13,419.175,4.28,27.4,15.5\nconvnext_small,224,1024.0,1829.72,559.636,8.71,21.56,50.22\nresnet101c,224,1024.0,1824.57,561.217,8.08,17.04,44.57\nconvnext_tiny,288,1024.0,1817.02,563.549,7.39,22.21,28.59\nresnet101d,224,1024.0,1816.61,563.677,8.08,17.04,44.57\ngcresnext50ts,288,1024.0,1802.21,568.181,4.75,19.57,15.67\nefficientnetv2_s,288,1024.0,1800.9,568.595,4.75,20.13,21.46\npit_b_distilled_224,224,1024.0,1798.47,569.363,10.63,16.67,74.79\nresnet50,288,1024.0,1790.94,571.757,6.8,18.37,25.56\ntwins_pcpvt_base,224,1024.0,1774.55,577.037,6.46,21.35,43.83\nhalonet50ts,256,1024.0,1772.89,577.576,5.3,19.2,22.73\ndpn68b,288,1024.0,1770.85,578.24,3.89,17.3,12.61\npit_b_224,224,1024.0,1769.93,578.542,10.56,16.6,73.76\nhrnet_w18_ssld,224,1024.0,1769.77,578.594,4.32,16.31,21.3\nswin_s3_tiny_224,224,1024.0,1768.18,579.114,4.64,19.13,28.33\nefficientvit_l2,224,1024.0,1765.89,579.866,6.97,19.58,63.71\nhrnet_w18,224,1024.0,1763.75,580.57,4.32,16.31,21.3\ncoat_lite_small,224,1024.0,1746.27,586.38,3.96,22.09,19.84\nrepvgg_b1,224,1024.0,1745.5,586.64,13.16,10.64,57.42\nwide_resnet50_2,224,1024.0,1744.59,586.947,11.43,14.4,68.88\nefficientnet_b3,320,512.0,1740.17,294.213,2.01,26.52,12.23\ngcresnet50t,288,1024.0,1734.6,590.328,6.86,18.57,25.9\ndensenet201,224,1024.0,1731.46,591.397,4.34,7.85,20.01\ntresnet_v2_l,224,1024.0,1730.52,591.717,8.85,16.34,46.17\ntf_efficientnet_b3,300,512.0,1724.68,296.856,1.87,23.83,12.23\nefficientnetv2_rw_s,288,1024.0,1722.48,594.481,4.91,21.41,23.94\ndarknetaa53,288,1024.0,1719.51,595.509,10.08,15.68,36.02\nmaxxvitv2_nano_rw_256,256,768.0,1706.28,450.091,6.12,19.66,23.7\nresnetaa101d,224,1024.0,1701.55,601.792,9.12,17.56,44.57\nxcit_tiny_12_p16_384,384,1024.0,1700.55,602.144,3.64,18.25,6.72\ncait_xxs24_224,224,1024.0,1698.66,602.815,2.53,20.29,11.96\nresnet50t,288,1024.0,1694.77,604.2,7.14,19.53,25.57\nlegacy_seresnet101,224,1024.0,1693.62,604.611,7.61,15.74,49.33\ncs3edgenet_x,256,1024.0,1692.79,604.907,11.53,12.92,47.82\nresnet50d,288,1024.0,1684.01,608.061,7.19,19.7,25.58\nmobilevitv2_175,256,512.0,1675.38,305.592,5.54,28.13,14.25\nregnetv_064,224,1024.0,1674.09,611.663,6.39,16.41,30.58\nresnetv2_101x1_bit,224,1024.0,1672.61,612.204,8.04,16.23,44.54\nefficientnet_b3_gn,288,512.0,1669.75,306.623,1.74,23.35,11.73\nese_vovnet39b,288,768.0,1667.87,460.459,11.71,11.13,24.57\nregnety_032,288,1024.0,1666.89,614.307,5.29,18.61,19.44\nseresnet101,224,1024.0,1666.33,614.509,7.84,16.27,49.33\nregnety_064,224,1024.0,1666.11,614.593,6.39,16.41,30.58\nconvnext_tiny_hnf,288,1024.0,1663.94,615.393,7.39,22.21,28.59\nregnetv_040,288,1024.0,1658.56,617.391,6.6,20.3,20.64\nregnety_040,288,1024.0,1648.75,621.064,6.61,20.3,20.65\nregnety_080,224,1024.0,1645.74,622.202,8.0,17.97,39.18\nresnet101s,224,1024.0,1640.53,624.176,9.19,18.64,44.67\nmixer_b16_224,224,1024.0,1627.76,629.075,12.62,14.53,59.88\ndla102x,224,1024.0,1623.56,630.698,5.89,19.42,26.31\nnf_resnet101,224,1024.0,1622.48,631.12,8.01,16.23,44.55\nswinv2_cr_tiny_224,224,1024.0,1621.28,631.59,4.66,28.45,28.33\necaresnet101d,224,1024.0,1619.0,632.477,8.08,17.07,44.57\nconvnextv2_tiny,224,1024.0,1618.49,632.676,4.47,13.44,28.64\ndarknet53,288,1024.0,1615.64,633.795,11.78,15.68,41.61\nwide_resnet101_2,176,1024.0,1615.25,633.945,14.31,13.18,126.89\nrepvit_m2_3,224,1024.0,1614.73,634.149,4.57,26.21,23.69\nresnetaa50,288,1024.0,1610.23,635.923,8.52,19.24,25.56\nresnetblur101d,224,1024.0,1609.76,636.109,9.12,17.94,44.57\nefficientvit_b3,224,1024.0,1609.54,636.196,3.99,26.9,48.65\nregnetz_d32,256,1024.0,1603.03,638.779,5.98,23.74,27.58\nregnetz_b16_evos,224,1024.0,1602.47,639.001,1.43,9.95,9.74\nese_vovnet39b_evos,224,1024.0,1599.88,640.036,7.07,6.74,24.58\ndavit_small,224,1024.0,1599.81,640.066,8.69,27.54,49.75\nseresnet50,288,1024.0,1595.89,641.637,6.8,18.39,28.09\ncs3se_edgenet_x,256,1024.0,1593.53,642.587,11.53,12.94,50.72\nnf_regnet_b4,320,1024.0,1592.57,642.975,3.29,19.88,30.21\nswinv2_cr_tiny_ns_224,224,1024.0,1590.7,643.731,4.66,28.45,28.33\nsequencer2d_s,224,1024.0,1586.65,645.372,4.96,11.31,27.65\ntf_efficientnetv2_s,300,1024.0,1583.75,646.555,5.35,22.73,21.46\ndensenet121,288,1024.0,1581.16,647.615,4.74,11.41,7.98\nresnet51q,288,1024.0,1581.05,647.659,8.07,20.94,35.7\nregnetz_d8,256,1024.0,1580.57,647.855,3.97,23.74,23.37\nresmlp_36_224,224,1024.0,1577.5,649.116,8.91,16.33,44.69\nmixer_l32_224,224,1024.0,1577.26,649.215,11.27,19.86,206.94\nregnetz_040,256,1024.0,1574.58,650.32,4.06,24.19,27.12\nvit_base_patch16_224_miil,224,1024.0,1574.06,650.535,16.88,16.5,94.4\nbotnet50ts_256,256,512.0,1573.5,325.38,5.54,22.23,22.74\nresnet50_gn,288,1024.0,1570.23,652.122,6.85,18.37,25.56\nvit_base_patch16_clip_224,224,1024.0,1569.93,652.248,16.87,16.49,86.57\ncs3darknet_x,288,1024.0,1569.68,652.352,10.6,14.36,35.05\ndeit_base_distilled_patch16_224,224,1024.0,1568.26,652.942,16.95,16.58,87.34\nvit_base_patch16_224,224,1024.0,1568.03,653.038,16.87,16.49,86.57\ndeit_base_patch16_224,224,1024.0,1567.8,653.131,16.87,16.49,86.57\nregnetz_040_h,256,1024.0,1564.2,654.638,4.12,24.29,28.94\nresnetv2_50d_gn,288,1024.0,1555.81,658.164,7.24,19.7,25.57\nresnetv2_50d_frn,224,1024.0,1553.07,659.326,4.33,11.92,25.59\ntresnet_l,224,1024.0,1528.92,669.739,10.9,11.9,55.99\nregnety_080_tv,224,1024.0,1528.54,669.91,8.51,19.73,39.38\nresnetaa50d,288,1024.0,1524.48,671.692,8.92,20.57,25.58\nnf_resnet50,288,1024.0,1524.41,671.724,6.88,18.37,25.56\ncaformer_s18,224,1024.0,1522.76,672.449,3.9,15.18,26.34\nresnext101_32x8d,176,1024.0,1521.82,672.868,10.33,19.37,88.79\nseresnet50t,288,1024.0,1518.59,674.299,7.14,19.55,28.1\necaresnet50t,288,1024.0,1518.21,674.465,7.14,19.55,25.57\nmvitv2_tiny,224,1024.0,1518.01,674.556,4.7,21.16,24.17\nresnet101d,256,1024.0,1517.18,674.926,10.55,22.25,44.57\npvt_v2_b3,224,1024.0,1516.27,675.326,6.71,33.8,45.24\nmaxvit_tiny_rw_224,224,768.0,1513.7,507.357,4.93,28.54,29.06\necaresnet50d,288,1024.0,1510.36,677.975,7.19,19.72,25.58\nconvnextv2_nano,288,768.0,1503.98,510.637,4.06,13.84,15.62\nhalo2botnet50ts_256,256,1024.0,1499.3,682.975,5.02,21.78,22.64\ncs3sedarknet_x,288,1024.0,1498.9,683.158,10.6,14.37,35.4\nres2net50_26w_8s,224,1024.0,1498.8,683.201,8.37,17.95,48.4\nresnext101_32x4d,224,1024.0,1496.35,684.32,8.01,21.23,44.18\ndeit3_base_patch16_224,224,1024.0,1488.08,688.122,16.87,16.49,86.59\nregnetz_c16,320,1024.0,1478.43,692.615,3.92,25.88,13.46\nresnest50d_4s2x40d,224,1024.0,1478.06,692.785,4.4,17.94,30.42\nresnetblur50,288,1024.0,1477.0,693.285,8.52,19.87,25.56\nskresnext50_32x4d,224,1024.0,1470.18,696.502,4.5,17.18,27.48\nefficientvit_l2,256,1024.0,1466.16,698.41,9.09,25.49,63.71\neca_nfnet_l0,288,1024.0,1463.28,699.787,7.12,17.29,24.14\nmobilevitv2_200,256,768.0,1462.66,525.062,7.22,32.15,18.45\nnfnet_l0,288,1024.0,1461.21,700.775,7.13,17.29,35.07\nresnet61q,288,1024.0,1460.17,701.277,9.87,21.52,36.85\nvit_base_patch32_clip_448,448,1024.0,1456.81,702.892,17.21,16.49,88.34\nvit_small_patch16_36x1_224,224,1024.0,1454.45,704.036,12.63,24.59,64.67\nvit_small_resnet50d_s16_224,224,1024.0,1451.55,705.439,13.0,21.12,57.53\nbeit_base_patch16_224,224,1024.0,1443.54,709.354,16.87,16.49,86.53\nres2net101_26w_4s,224,1024.0,1442.54,709.848,8.1,18.45,45.21\nvit_base_patch16_siglip_224,224,1024.0,1439.5,711.343,17.02,16.71,92.88\nvit_base_patch16_gap_224,224,1024.0,1436.45,712.857,16.78,16.41,86.57\nregnety_040_sgn,288,1024.0,1436.16,712.999,6.67,20.3,20.65\nbeitv2_base_patch16_224,224,1024.0,1436.01,713.075,16.87,16.49,86.53\nconvit_small,224,1024.0,1431.38,715.383,5.76,17.87,27.78\nedgenext_base,320,1024.0,1423.6,719.289,6.01,24.32,18.51\nconvformer_s18,224,1024.0,1421.81,720.197,3.96,15.82,26.77\nfocalnet_small_srf,224,1024.0,1419.82,721.204,8.62,26.26,49.89\ndensenetblur121d,288,1024.0,1416.47,722.914,5.14,13.06,8.0\npoolformer_s36,224,1024.0,1415.39,723.463,5.0,15.82,30.86\nresnetv2_50d_evos,224,1024.0,1415.09,723.614,4.33,11.92,25.59\ncoatnet_rmlp_1_rw_224,224,1024.0,1413.05,724.664,7.44,28.08,41.69\nres2net101d,224,1024.0,1406.68,727.943,8.35,19.25,45.23\nlegacy_xception,299,1024.0,1405.99,728.302,8.4,35.83,22.86\nvit_small_patch16_18x2_224,224,1024.0,1405.24,728.689,12.63,24.59,64.67\nresnetblur50d,288,1024.0,1403.3,729.695,8.92,21.19,25.58\nresnext50_32x4d,288,1024.0,1402.5,730.115,7.04,23.81,25.03\ninception_next_small,224,1024.0,1397.1,732.931,8.36,19.27,49.37\nrepvgg_b2g4,224,1024.0,1392.83,735.183,12.63,12.9,61.76\ngcvit_tiny,224,1024.0,1390.57,736.376,4.79,29.82,28.22\nvit_relpos_base_patch16_clsgap_224,224,1024.0,1386.7,738.433,16.88,17.72,86.43\nvit_base_patch16_clip_quickgelu_224,224,1024.0,1384.47,739.621,16.87,16.49,86.19\nvit_relpos_base_patch16_cls_224,224,1024.0,1384.18,739.775,16.88,17.72,86.43\ndpn92,224,1024.0,1380.04,741.995,6.54,18.21,37.67\nseresnetaa50d,288,1024.0,1379.8,742.125,8.92,20.59,28.11\nvit_small_patch16_384,384,1024.0,1379.23,742.429,12.45,24.15,22.2\nnf_ecaresnet101,224,1024.0,1375.27,744.569,8.01,16.27,44.55\nnf_seresnet101,224,1024.0,1370.83,746.983,8.02,16.27,49.33\nefficientnet_b3_gn,320,384.0,1366.12,281.077,2.14,28.83,11.73\nvgg16_bn,224,1024.0,1361.56,752.067,15.5,13.56,138.37\nflexivit_base,240,1024.0,1360.19,752.822,19.35,18.92,86.59\nefficientformerv2_s0,224,1024.0,1357.83,754.133,0.41,5.3,3.6\nresnetv2_152,224,1024.0,1356.74,754.735,11.55,22.56,60.19\nseresnext101_32x4d,224,1024.0,1356.08,755.105,8.02,21.26,48.96\nlegacy_seresnext101_32x4d,224,1024.0,1355.29,755.543,8.02,21.26,48.96\nefficientnet_b3_g8_gn,288,768.0,1342.01,572.264,2.59,23.35,14.25\nefficientvit_b3,256,768.0,1340.35,572.972,5.2,35.01,48.65\nefficientnet_b4,320,512.0,1338.46,382.52,3.13,34.76,19.34\nnfnet_f0,256,1024.0,1336.25,766.311,12.62,18.05,71.49\nresnext50d_32x4d,288,1024.0,1335.71,766.62,7.44,25.13,25.05\nfocalnet_small_lrf,224,1024.0,1333.55,767.863,8.74,28.61,50.34\nresnet152,224,1024.0,1331.42,769.094,11.56,22.56,60.19\nese_vovnet99b,224,1024.0,1328.91,770.544,16.51,11.27,63.2\nresnetv2_152d,224,1024.0,1322.45,774.307,11.8,23.36,60.2\nregnetx_120,224,1024.0,1317.68,777.11,12.13,21.37,46.11\nhrnet_w32,224,1024.0,1308.75,782.414,8.97,22.02,41.23\nxception41p,299,512.0,1308.08,391.403,9.25,39.86,26.91\nvit_relpos_base_patch16_224,224,1024.0,1306.59,783.71,16.8,17.63,86.43\nxcit_tiny_12_p8_224,224,1024.0,1306.3,783.883,4.81,23.6,6.71\ncoatnet_1_rw_224,224,1024.0,1303.02,785.857,7.63,27.22,41.72\nresnet152c,224,1024.0,1301.97,786.489,11.8,23.36,60.21\ncoatnet_rmlp_1_rw2_224,224,1024.0,1300.63,787.299,7.71,32.74,41.72\ntwins_pcpvt_large,224,1024.0,1297.56,789.162,9.53,30.21,60.99\nmaxvit_tiny_tf_224,224,768.0,1297.26,592.007,5.42,31.21,30.92\nresnet152d,224,1024.0,1296.94,789.538,11.8,23.36,60.21\ncs3edgenet_x,288,1024.0,1296.8,789.626,14.59,16.36,47.82\nvit_base_patch16_xp_224,224,1024.0,1295.7,790.295,16.85,16.49,86.51\npoolformerv2_s24,224,1024.0,1287.82,795.129,3.42,10.68,21.34\ndla169,224,1024.0,1280.41,799.732,11.6,20.2,53.39\nefficientnet_el_pruned,300,1024.0,1280.32,799.789,8.0,30.7,10.59\nefficientnet_el,300,1024.0,1279.02,800.603,8.0,30.7,10.59\nseresnext50_32x4d,288,1024.0,1276.82,801.978,7.04,23.82,27.56\nhrnet_w30,224,1024.0,1276.63,802.098,8.15,21.21,37.71\ndeit3_small_patch16_384,384,1024.0,1274.41,803.494,12.45,24.15,22.21\necaresnet50t,320,1024.0,1274.01,803.751,8.82,24.13,25.57\nmaxxvit_rmlp_tiny_rw_256,256,768.0,1269.37,605.011,6.36,32.69,29.64\nvolo_d1_224,224,1024.0,1269.05,806.894,6.94,24.43,26.63\nvgg19,224,1024.0,1264.63,809.714,19.63,14.86,143.67\nconvnext_base,224,1024.0,1259.04,813.306,15.38,28.75,88.59\nrexnetr_300,288,512.0,1257.05,407.293,5.59,36.61,34.81\nvit_base_patch16_rpn_224,224,1024.0,1255.24,815.771,16.78,16.41,86.54\ndensenet161,224,1024.0,1254.96,815.95,7.79,11.06,28.68\nefficientformerv2_s1,224,1024.0,1251.09,818.477,0.67,7.66,6.19\nregnety_120,224,1024.0,1250.69,818.739,12.14,21.38,51.82\ntwins_svt_base,224,1024.0,1249.89,819.258,8.36,20.42,56.07\ntf_efficientnet_el,300,1024.0,1249.79,819.323,8.0,30.7,10.59\nsequencer2d_m,224,1024.0,1238.3,826.927,6.55,14.26,38.31\nnest_small,224,1024.0,1229.99,832.512,9.41,22.88,38.35\nmaxvit_tiny_rw_256,256,768.0,1229.06,624.855,6.44,37.27,29.07\nmaxvit_rmlp_tiny_rw_256,256,768.0,1228.3,625.245,6.47,39.84,29.15\nrepvgg_b2,224,1024.0,1219.54,839.651,20.45,12.9,89.02\nnest_small_jx,224,1024.0,1219.36,839.775,9.41,22.88,38.35\nmixnet_xxl,224,768.0,1211.88,633.716,2.04,23.43,23.96\nresnet152s,224,1024.0,1205.05,849.747,12.92,24.96,60.32\nswin_small_patch4_window7_224,224,1024.0,1202.25,851.724,8.77,27.47,49.61\ninception_v4,299,1024.0,1191.21,859.617,12.28,15.09,42.68\nswinv2_tiny_window8_256,256,1024.0,1191.2,859.622,5.96,24.57,28.35\nlegacy_seresnet152,224,1024.0,1187.19,862.527,11.33,22.08,66.82\ncoatnet_1_224,224,512.0,1184.08,432.392,8.28,31.3,42.23\nxcit_small_24_p16_224,224,1024.0,1178.16,869.138,9.1,23.63,47.67\nvit_relpos_base_patch16_rpn_224,224,1024.0,1177.44,869.665,16.8,17.63,86.41\neca_nfnet_l1,256,1024.0,1175.13,871.38,9.62,22.04,41.41\nseresnet152,224,1024.0,1173.43,872.64,11.57,22.61,66.82\nmaxvit_tiny_pm_256,256,768.0,1169.83,656.496,6.31,40.82,30.09\ncrossvit_base_240,240,1024.0,1165.77,878.374,20.13,22.67,105.03\nefficientnet_lite4,380,384.0,1155.38,332.349,4.04,45.66,13.01\nxception41,299,512.0,1153.48,443.864,9.28,39.86,26.97\nregnetx_160,224,1024.0,1153.37,887.82,15.99,25.52,54.28\nvgg19_bn,224,1024.0,1151.34,889.391,19.66,14.86,143.68\ncait_xxs36_224,224,1024.0,1139.1,898.942,3.77,30.34,17.3\ntresnet_xl,224,1024.0,1138.98,899.04,15.2,15.34,78.44\ntnt_s_patch16_224,224,1024.0,1134.46,902.62,5.24,24.37,23.76\ndavit_base,224,1024.0,1133.31,903.534,15.36,36.72,87.95\ndm_nfnet_f0,256,1024.0,1132.28,904.361,12.62,18.05,71.49\nresnetv2_101,288,1024.0,1131.44,905.029,12.94,26.83,44.54\nmvitv2_small_cls,224,1024.0,1129.19,906.833,7.04,28.17,34.87\nmvitv2_small,224,1024.0,1128.19,907.64,7.0,28.08,34.87\ncoat_tiny,224,1024.0,1126.07,909.345,4.35,27.2,5.5\nconvmixer_1024_20_ks9_p14,224,1024.0,1123.31,911.577,5.55,5.51,24.38\nvit_base_patch16_reg8_gap_256,256,1024.0,1115.77,917.744,22.6,22.09,86.62\nfastvit_sa24,256,1024.0,1114.43,918.841,3.79,23.92,21.55\nrepvgg_b3g4,224,1024.0,1113.37,919.717,17.89,15.1,83.83\nconvnext_small,288,1024.0,1110.94,921.731,14.39,35.65,50.22\nvit_base_patch16_siglip_256,256,1024.0,1108.01,924.168,22.23,21.83,92.93\nresnet101,288,1024.0,1104.31,927.267,12.95,26.83,44.55\ndla102x2,224,1024.0,1104.21,927.342,9.34,29.91,41.28\npvt_v2_b4,224,1024.0,1101.67,929.481,9.83,48.14,62.56\nvit_large_r50_s32_224,224,1024.0,1091.33,938.289,19.45,22.22,328.99\neva02_base_patch16_clip_224,224,1024.0,1090.31,939.167,16.9,18.91,86.26\nvgg13_bn,224,1024.0,1090.15,939.306,11.33,12.25,133.05\nresnet152d,256,1024.0,1089.57,939.806,15.41,30.51,60.21\nnf_regnet_b4,384,1024.0,1089.51,939.86,4.7,28.61,30.21\nefficientnet_b3_g8_gn,320,768.0,1085.43,707.541,3.2,28.83,14.25\nvit_small_r26_s32_384,384,1024.0,1083.82,944.797,10.24,27.67,36.47\nefficientvit_l2,288,1024.0,1083.69,944.906,11.51,32.19,63.71\nefficientnetv2_s,384,1024.0,1081.44,946.869,8.44,35.77,21.46\ntf_efficientnet_lite4,380,384.0,1073.72,357.628,4.04,45.66,13.01\npvt_v2_b5,224,1024.0,1068.28,958.536,11.39,44.23,81.96\nhrnet_w18_ssld,288,1024.0,1066.01,960.575,7.14,26.96,21.3\ntf_efficientnetv2_s,384,1024.0,1054.1,971.431,8.44,35.77,21.46\nregnety_160,224,1024.0,1046.76,978.242,15.96,23.04,83.59\nsamvit_base_patch16_224,224,1024.0,1027.37,996.713,16.83,17.2,86.46\nconvnext_tiny,384,768.0,1026.31,748.299,13.14,39.48,28.59\nwide_resnet50_2,288,1024.0,1025.91,998.129,18.89,23.81,68.88\nefficientnetv2_rw_s,384,1024.0,1024.66,999.343,8.72,38.03,23.94\nvgg16,224,1024.0,1020.44,1003.475,15.47,13.56,138.36\ncs3se_edgenet_x,320,1024.0,1009.45,1014.397,18.01,20.21,50.72\nvit_base_patch16_plus_240,240,1024.0,1002.7,1021.234,26.31,22.07,117.56\nswinv2_cr_small_224,224,1024.0,1001.72,1022.232,9.07,50.27,49.7\ndpn98,224,1024.0,998.61,1025.406,11.73,25.2,61.57\nefficientvit_b3,288,768.0,996.43,770.744,6.58,44.2,48.65\nresnetaa101d,288,1024.0,996.18,1027.911,15.07,29.03,44.57\nwide_resnet101_2,224,1024.0,994.0,1030.164,22.8,21.23,126.89\nregnetz_d32,320,1024.0,994.0,1030.165,9.33,37.08,27.58\nswinv2_cr_small_ns_224,224,1024.0,991.13,1033.149,9.08,50.27,49.7\nfocalnet_base_srf,224,1024.0,990.91,1033.385,15.28,35.01,88.15\nconvnextv2_small,224,1024.0,989.67,1034.674,8.71,21.56,50.32\nresnet200,224,1024.0,987.28,1037.18,15.07,32.19,64.67\nconvnextv2_tiny,288,768.0,983.87,780.578,7.39,22.21,28.64\nseresnet101,288,1024.0,983.64,1041.016,12.95,26.87,49.33\nvit_small_patch8_224,224,1024.0,981.8,1042.968,16.76,32.86,21.67\nregnetz_d8,320,1024.0,980.9,1043.922,6.19,37.08,23.37\nregnety_080,288,1024.0,977.86,1047.177,13.22,29.69,39.18\ninception_next_base,224,1024.0,977.1,1047.988,14.85,25.69,86.67\nvit_base_r50_s16_224,224,1024.0,974.47,1050.816,20.94,27.88,97.89\nresnest101e,256,1024.0,968.0,1057.838,13.38,28.66,48.28\nconvnext_base,256,1024.0,965.93,1060.101,20.09,37.55,88.59\nregnetz_c16_evos,256,768.0,965.5,795.429,2.48,16.57,13.49\nregnetz_040,320,512.0,964.02,531.096,6.35,37.78,27.12\npoolformer_m36,224,1024.0,963.9,1062.337,8.8,22.02,56.17\nregnetz_b16_evos,288,768.0,961.28,798.923,2.36,16.43,9.74\ninception_resnet_v2,299,1024.0,958.82,1067.962,13.18,25.06,55.84\nregnetz_040_h,320,512.0,958.46,534.182,6.43,37.94,28.94\nseresnet152d,256,1024.0,956.44,1070.629,15.42,30.56,66.84\necaresnet101d,288,1024.0,951.62,1076.05,13.35,28.19,44.57\nregnety_064,288,1024.0,949.24,1078.741,10.56,27.11,30.58\nresnetrs152,256,1024.0,948.32,1079.798,15.59,30.83,86.62\nresnext101_64x4d,224,1024.0,947.79,1080.397,15.52,31.21,83.46\nregnetv_064,288,1024.0,947.23,1081.038,10.55,27.11,30.58\nxception65p,299,512.0,944.43,542.118,13.91,52.48,39.82\nresnetblur101d,288,1024.0,942.52,1086.438,15.07,29.65,44.57\nresnetrs101,288,1024.0,941.79,1087.277,13.56,28.53,63.62\nfocalnet_base_lrf,224,1024.0,941.31,1087.831,15.43,38.13,88.75\nresnext101_32x8d,224,1024.0,939.44,1090.002,16.48,31.21,88.79\nrepvgg_b3,224,1024.0,933.91,1096.448,29.16,15.1,123.09\nhrnet_w40,224,1024.0,931.96,1098.75,12.75,25.29,57.56\nnfnet_f1,224,1024.0,924.88,1107.159,17.87,22.94,132.63\neva02_small_patch14_336,336,1024.0,923.99,1108.223,12.41,27.7,22.13\nresnet101d,320,1024.0,923.18,1109.193,16.48,34.77,44.57\nxcit_tiny_24_p16_384,384,1024.0,910.96,1124.082,6.87,34.29,12.12\nefficientnet_b4,384,384.0,908.88,422.486,4.51,50.04,19.34\ncait_s24_224,224,1024.0,904.24,1132.424,9.35,40.58,46.92\nmobilevitv2_150,384,256.0,899.17,284.697,9.2,54.25,10.59\nmaxvit_rmlp_small_rw_224,224,768.0,898.81,854.449,10.48,42.44,64.9\ncoat_mini,224,1024.0,894.78,1144.406,6.82,33.68,10.34\ncoat_lite_medium,224,1024.0,892.4,1147.459,9.81,40.06,44.57\nefficientnetv2_m,320,1024.0,889.26,1151.505,11.01,39.97,54.14\nseresnext101_64x4d,224,1024.0,888.73,1152.196,15.53,31.25,88.23\ngmlp_b16_224,224,1024.0,884.5,1157.706,15.78,30.21,73.08\nseresnext101_32x8d,224,1024.0,883.56,1158.934,16.48,31.25,93.57\nswin_s3_small_224,224,768.0,879.87,872.841,9.43,37.84,49.74\nvit_relpos_base_patch16_plus_240,240,1024.0,875.04,1170.215,26.21,23.41,117.38\nefficientformer_l7,224,1024.0,873.11,1172.808,10.17,24.45,82.23\nnest_base,224,1024.0,870.02,1176.974,16.71,30.51,67.72\npoolformerv2_s36,224,1024.0,869.16,1178.141,5.01,15.82,30.79\nmaxvit_small_tf_224,224,512.0,868.0,589.85,11.39,46.31,68.93\nseresnext101d_32x8d,224,1024.0,866.35,1181.949,16.72,32.05,93.59\nnest_base_jx,224,1024.0,862.67,1187.001,16.71,30.51,67.72\nlevit_384_s8,224,512.0,854.68,599.045,9.98,35.86,39.12\nregnetz_e8,256,1024.0,853.36,1199.952,9.91,40.94,57.7\nswin_base_patch4_window7_224,224,1024.0,852.78,1200.762,15.47,36.63,87.77\ncoatnet_2_rw_224,224,512.0,852.23,600.767,14.55,39.37,73.87\ntf_efficientnet_b4,380,384.0,851.5,450.956,4.49,49.49,19.34\ngcvit_small,224,1024.0,841.82,1216.401,8.57,41.61,51.09\nconvnextv2_nano,384,512.0,841.68,608.3,7.22,24.61,15.62\nresnetv2_50d_evos,288,1024.0,840.21,1218.735,7.15,19.7,25.59\nlevit_conv_384_s8,224,512.0,839.77,609.68,9.98,35.86,39.12\nxception65,299,512.0,839.39,609.953,13.96,52.48,39.92\nhrnet_w44,224,1024.0,835.38,1225.779,14.94,26.92,67.06\ncrossvit_15_dagger_408,408,1024.0,833.7,1228.252,16.07,37.0,28.5\ntiny_vit_21m_384,384,512.0,827.46,618.747,11.94,46.84,21.23\ntwins_svt_large,224,1024.0,824.23,1242.353,14.84,27.23,99.27\nseresnextaa101d_32x8d,224,1024.0,820.77,1247.602,17.25,34.16,93.59\nxcit_medium_24_p16_224,224,1024.0,820.51,1247.988,16.13,31.71,84.4\neva02_base_patch14_224,224,1024.0,819.51,1249.51,22.0,24.67,85.76\ncoatnet_rmlp_2_rw_224,224,512.0,814.13,628.885,14.64,44.94,73.88\nhrnet_w48_ssld,224,1024.0,812.33,1260.551,17.34,28.56,77.47\nhrnet_w48,224,1024.0,811.26,1262.228,17.34,28.56,77.47\ncaformer_s36,224,1024.0,810.13,1263.986,7.55,29.29,39.3\ntresnet_m,448,1024.0,809.9,1264.343,22.99,29.21,31.39\nresnet200d,256,1024.0,803.17,1274.938,20.0,43.09,64.69\nsequencer2d_l,224,1024.0,802.78,1275.557,9.74,22.12,54.3\nmaxxvit_rmlp_small_rw_256,256,768.0,801.57,958.106,14.21,47.76,66.01\nswinv2_base_window12_192,192,1024.0,799.54,1280.724,11.9,39.72,109.28\ndm_nfnet_f1,224,1024.0,798.67,1282.118,17.87,22.94,132.63\ncoatnet_2_224,224,512.0,796.89,642.486,15.94,42.41,74.68\nvit_medium_patch16_gap_384,384,1024.0,795.07,1287.922,22.01,32.15,39.03\nmvitv2_base_cls,224,1024.0,791.15,1294.298,10.23,40.65,65.44\nmvitv2_base,224,1024.0,785.87,1303.007,10.16,40.5,51.47\nefficientnetv2_rw_m,320,1024.0,785.27,1303.997,12.72,47.14,53.24\nresnet152,288,1024.0,781.77,1309.827,19.11,37.28,60.19\nswinv2_tiny_window16_256,256,512.0,775.64,660.087,6.68,39.02,28.35\nfastvit_sa36,256,1024.0,768.44,1332.545,5.62,34.02,31.53\nxcit_small_12_p16_384,384,1024.0,764.7,1339.074,14.14,36.5,26.25\nconvnext_base,288,1024.0,763.36,1341.427,25.43,47.53,88.59\nconvformer_s36,224,1024.0,754.92,1356.424,7.67,30.5,40.01\nregnety_120,288,768.0,738.36,1040.13,20.06,35.34,51.82\nswinv2_small_window8_256,256,1024.0,737.99,1387.548,11.58,40.14,49.73\ndpn131,224,1024.0,732.6,1397.744,16.09,32.97,79.25\nswinv2_cr_small_ns_256,256,1024.0,731.79,1399.291,12.07,76.21,49.7\nmobilevitv2_175,384,256.0,731.75,349.838,12.47,63.29,14.25\nconvit_base,224,1024.0,730.43,1401.91,17.52,31.77,86.54\nresnetv2_50x1_bit,448,512.0,729.61,701.734,16.62,44.46,25.55\npoolformer_m48,224,1024.0,727.01,1408.491,11.59,29.17,73.47\nmaxvit_rmlp_small_rw_256,256,768.0,724.69,1059.745,13.69,55.48,64.9\ntnt_b_patch16_224,224,1024.0,721.67,1418.912,14.09,39.01,65.41\neca_nfnet_l1,320,1024.0,720.22,1421.77,14.92,34.42,41.41\nswinv2_cr_base_224,224,1024.0,716.89,1428.383,15.86,59.66,87.88\nswin_s3_base_224,224,1024.0,715.81,1430.534,13.69,48.26,71.13\nvolo_d2_224,224,1024.0,711.4,1439.408,14.34,41.34,58.68\nswinv2_cr_base_ns_224,224,1024.0,711.07,1440.068,15.86,59.66,87.88\nconvnextv2_base,224,768.0,708.71,1083.64,15.38,28.75,88.72\ndensenet264d,224,1024.0,697.85,1467.348,13.57,14.0,72.74\necaresnet200d,256,1024.0,697.3,1468.506,20.0,43.15,64.69\nseresnet200d,256,1024.0,696.92,1469.301,20.01,43.15,71.86\nnf_regnet_b5,384,1024.0,694.76,1473.879,7.95,42.9,49.74\nseresnet152,288,1024.0,693.47,1476.616,19.11,37.34,66.82\nresnetrs200,256,1024.0,693.26,1477.057,20.18,43.42,93.21\ncoat_small,224,1024.0,689.68,1484.732,12.61,44.25,21.69\nconvnext_large,224,1024.0,686.69,1491.207,34.4,43.13,197.77\nxcit_tiny_24_p8_224,224,1024.0,684.2,1496.615,9.21,45.38,12.11\nefficientvit_l3,224,1024.0,667.4,1534.307,27.62,39.16,246.04\ndpn107,224,1024.0,666.43,1536.527,18.38,33.46,86.92\nresnet152d,320,1024.0,664.6,1540.768,24.08,47.67,60.21\nsenet154,224,1024.0,664.59,1540.791,20.77,38.69,115.09\nlegacy_senet154,224,1024.0,663.62,1543.045,20.77,38.69,115.09\nefficientformerv2_s2,224,1024.0,658.11,1555.962,1.27,11.77,12.71\nmaxxvitv2_rmlp_base_rw_224,224,768.0,650.48,1180.654,23.88,54.39,116.09\nxcit_nano_12_p8_384,384,1024.0,649.92,1575.56,6.34,46.06,3.05\nxception71,299,512.0,649.47,788.325,18.09,69.92,42.34\nvit_large_patch32_384,384,1024.0,643.51,1591.268,44.28,32.22,306.63\nmobilevitv2_200,384,256.0,640.82,399.48,16.24,72.34,18.45\ndavit_large,224,1024.0,630.01,1625.361,34.37,55.08,196.81\nhrnet_w64,224,1024.0,629.26,1627.299,28.97,35.09,128.06\nconvnext_small,384,768.0,628.81,1221.341,25.58,63.37,50.22\nregnetz_d8_evos,256,1024.0,626.83,1633.604,4.5,24.92,23.46\nregnety_160,288,768.0,626.54,1225.759,26.37,38.07,83.59\nconvnext_base,320,768.0,617.04,1244.641,31.39,58.68,88.59\nfastvit_ma36,256,1024.0,615.75,1662.995,7.85,40.39,44.07\ntf_efficientnetv2_m,384,1024.0,614.24,1667.09,15.85,57.52,54.14\ngcvit_base,224,1024.0,612.92,1670.669,14.87,55.48,90.32\nregnety_320,224,1024.0,612.34,1672.272,32.34,30.26,145.05\nefficientvit_l2,384,768.0,610.03,1258.949,20.45,57.01,63.71\npoolformerv2_m36,224,1024.0,609.2,1680.886,8.81,22.02,56.08\nregnetz_c16_evos,320,512.0,608.23,841.78,3.86,25.88,13.49\nresnetv2_50x3_bit,224,768.0,585.49,1311.719,37.06,33.34,217.32\nseresnet152d,320,1024.0,585.32,1749.453,24.09,47.72,66.84\nxcit_small_12_p8_224,224,1024.0,584.75,1751.159,18.69,47.19,26.21\nresnet200,288,1024.0,584.49,1751.952,24.91,53.21,64.67\nresnetrs152,320,1024.0,580.71,1763.336,24.34,48.14,86.62\ncaformer_m36,224,1024.0,580.7,1763.373,12.75,40.61,56.2\nresnext101_64x4d,288,1024.0,579.65,1766.578,25.66,51.59,83.46\nlevit_conv_512_s8,224,256.0,579.33,441.879,21.82,52.28,74.05\ncrossvit_18_dagger_408,408,1024.0,578.67,1769.56,25.31,49.38,44.61\nlevit_512_s8,224,256.0,564.15,453.77,21.82,52.28,74.05\nconvnextv2_tiny,384,384.0,553.95,693.189,13.14,39.48,28.64\nconvformer_m36,224,1024.0,546.86,1872.507,12.89,42.05,57.05\nefficientnet_b5,416,256.0,546.68,468.268,8.27,80.68,30.39\nseresnet269d,256,1024.0,545.35,1877.679,26.59,53.6,113.67\nefficientvit_l3,256,768.0,542.99,1414.373,36.06,50.98,246.04\nseresnext101_32x8d,288,1024.0,537.9,1903.669,27.24,51.63,93.57\nefficientnetv2_m,416,1024.0,531.24,1927.549,18.6,67.5,54.14\nresnetrs270,256,1024.0,529.33,1934.515,27.06,55.84,129.86\nmaxvit_rmlp_base_rw_224,224,768.0,529.1,1451.502,22.63,79.3,116.14\nswinv2_base_window8_256,256,1024.0,528.71,1936.775,20.37,52.59,87.92\nregnetz_e8,320,768.0,528.46,1453.264,15.46,63.94,57.7\nseresnext101d_32x8d,288,1024.0,527.36,1941.726,27.64,52.95,93.59\nconvnext_large_mlp,256,768.0,525.72,1460.834,44.94,56.33,200.13\nnfnet_f2,256,1024.0,524.14,1953.657,33.76,41.85,193.78\nhalonet_h1,256,256.0,522.84,489.621,3.0,51.17,8.1\nregnetx_320,224,1024.0,522.6,1959.408,31.81,36.3,107.81\nmixer_l16_224,224,1024.0,520.22,1968.376,44.6,41.69,208.2\nresnext101_32x16d,224,1024.0,519.8,1969.975,36.27,51.18,194.03\neca_nfnet_l2,320,1024.0,509.51,2009.758,20.95,47.43,56.72\necaresnet200d,288,1024.0,503.74,2032.793,25.31,54.59,64.69\nseresnet200d,288,1024.0,503.36,2034.329,25.32,54.6,71.86\ncaformer_s18,384,512.0,501.38,1021.162,11.45,44.61,26.34\nvolo_d3_224,224,1024.0,497.87,2056.757,20.78,60.09,86.33\nresnet200d,320,1024.0,493.82,2073.621,31.25,67.33,64.69\nswin_large_patch4_window7_224,224,768.0,492.35,1559.852,34.53,54.94,196.53\nvit_base_patch16_18x2_224,224,1024.0,492.32,2079.918,50.37,49.17,256.73\ndeit_base_patch16_384,384,1024.0,491.82,2082.046,49.4,48.3,86.86\nvit_base_patch16_clip_384,384,1024.0,491.74,2082.405,49.41,48.3,86.86\nvit_base_patch16_384,384,1024.0,491.42,2083.727,49.4,48.3,86.86\ndeit_base_distilled_patch16_384,384,1024.0,491.32,2084.164,49.49,48.39,87.63\nhrnet_w48_ssld,288,1024.0,490.92,2085.876,28.66,47.21,77.47\neva_large_patch14_196,196,1024.0,490.45,2087.863,59.66,43.77,304.14\nmaxvit_base_tf_224,224,512.0,488.88,1047.285,23.52,81.67,119.47\nefficientnet_b5,448,256.0,488.83,523.691,9.59,93.56,30.39\nvit_large_patch16_224,224,1024.0,488.5,2096.219,59.7,43.77,304.33\nswinv2_small_window16_256,256,512.0,486.59,1052.215,12.82,66.29,49.73\nswinv2_large_window12_192,192,768.0,485.58,1581.6,26.17,56.53,228.77\nconvformer_s18,384,512.0,484.08,1057.663,11.63,46.49,26.77\nseresnextaa101d_32x8d,288,1024.0,479.96,2133.497,28.51,56.44,93.59\ncoatnet_3_rw_224,224,256.0,478.44,535.067,32.63,59.07,181.81\ncoatnet_rmlp_3_rw_224,224,256.0,477.75,535.833,32.75,64.7,165.15\nxcit_large_24_p16_224,224,1024.0,472.07,2169.166,35.86,47.26,189.1\nvit_small_patch14_dinov2,518,1024.0,469.29,2181.987,29.46,57.34,22.06\ndeit3_base_patch16_384,384,1024.0,466.88,2193.286,49.4,48.3,86.88\ndeit3_large_patch16_224,224,1024.0,466.56,2194.777,59.7,43.77,304.37\nefficientnetv2_rw_m,416,768.0,466.5,1646.281,21.49,79.62,53.24\nnfnet_f1,320,1024.0,466.35,2195.774,35.97,46.77,132.63\nnf_regnet_b5,456,768.0,464.5,1653.385,11.7,61.95,49.74\ncoatnet_3_224,224,256.0,464.1,551.594,35.72,63.61,166.97\nvit_small_patch14_reg4_dinov2,518,1024.0,460.4,2224.119,29.55,57.51,22.06\npoolformerv2_m48,224,1024.0,459.37,2229.113,11.59,29.17,73.35\nbeitv2_large_patch16_224,224,1024.0,452.16,2264.697,59.7,43.77,304.43\nbeit_large_patch16_224,224,1024.0,452.15,2264.716,59.7,43.77,304.43\nresnetv2_101x1_bit,448,512.0,451.35,1134.365,31.65,64.93,44.54\ndm_nfnet_f2,256,1024.0,451.22,2269.395,33.76,41.85,193.78\nvit_base_patch16_siglip_384,384,1024.0,448.34,2283.991,50.0,49.11,93.18\nresnetv2_152x2_bit,224,1024.0,441.5,2319.35,46.95,45.11,236.34\nconvnext_xlarge,224,768.0,435.62,1762.988,60.98,57.5,350.2\nmaxvit_tiny_tf_384,384,256.0,434.99,588.503,16.0,94.22,30.98\nefficientformerv2_l,224,1024.0,431.02,2375.769,2.59,18.54,26.32\nconvnext_base,384,512.0,430.72,1188.698,45.21,84.49,88.59\nconvnextv2_base,288,512.0,429.59,1191.832,25.43,47.53,88.72\nresnetrs200,320,1024.0,428.05,2392.217,31.51,67.81,93.21\nflexivit_large,240,1024.0,424.67,2411.279,68.48,50.22,304.36\nconvnextv2_large,224,512.0,423.49,1208.977,34.4,43.13,197.96\nxcit_tiny_12_p8_384,384,1024.0,423.2,2419.661,14.12,69.12,6.71\nswinv2_cr_large_224,224,768.0,422.05,1819.675,35.1,78.42,196.68\ncaformer_b36,224,768.0,419.19,1832.111,22.5,54.14,98.75\nswinv2_cr_tiny_384,384,256.0,419.04,610.909,15.34,161.01,28.33\ntf_efficientnet_b5,456,256.0,418.1,612.278,10.46,98.86,30.39\nconvnext_large,288,512.0,415.42,1232.482,56.87,71.29,197.77\ndavit_huge,224,512.0,410.45,1247.402,60.93,73.44,348.92\nmaxxvitv2_rmlp_large_rw_224,224,768.0,409.41,1875.861,43.69,75.4,215.42\ntiny_vit_21m_512,512,384.0,408.26,940.575,21.23,83.26,21.27\nxcit_small_24_p16_384,384,1024.0,408.08,2509.308,26.72,68.57,47.67\ntf_efficientnetv2_m,480,768.0,405.02,1896.185,24.76,89.84,54.14\ntresnet_l,448,1024.0,403.56,2537.407,43.59,47.56,55.99\nbeit_base_patch16_384,384,1024.0,401.76,2548.786,49.4,48.3,86.74\nconvformer_b36,224,768.0,396.81,1935.431,22.69,56.06,99.88\nregnetz_d8_evos,320,768.0,395.82,1940.285,7.03,38.92,23.46\nseresnextaa101d_32x8d,320,1024.0,395.0,2592.386,35.19,69.67,93.59\nseresnet269d,288,1024.0,393.84,2600.059,33.65,67.81,113.67\ndm_nfnet_f1,320,1024.0,393.6,2601.642,35.97,46.77,132.63\nregnety_160,384,384.0,378.47,1014.589,46.87,67.67,83.59\nvit_large_r50_s32_384,384,1024.0,372.96,2745.589,56.4,64.88,329.09\nregnety_640,224,768.0,362.45,2118.906,64.16,42.5,281.38\neca_nfnet_l2,384,768.0,361.66,2123.504,30.05,68.28,56.72\nvit_large_patch14_224,224,1024.0,359.79,2846.069,77.83,57.11,304.2\nvit_large_patch14_clip_224,224,1024.0,359.08,2851.744,77.83,57.11,304.2\nswinv2_base_window12to16_192to256,256,384.0,358.35,1071.569,22.02,84.71,87.92\nswinv2_base_window16_256,256,384.0,358.25,1071.869,22.02,84.71,87.92\nvit_large_patch16_siglip_256,256,1024.0,351.53,2912.942,78.12,57.42,315.96\nvit_base_patch8_224,224,1024.0,350.95,2917.813,66.87,65.71,86.58\nefficientvit_l3,320,512.0,346.1,1479.341,56.32,79.34,246.04\nefficientnetv2_l,384,1024.0,342.83,2986.92,36.1,101.16,118.52\ntf_efficientnetv2_l,384,1024.0,338.97,3020.897,36.1,101.16,118.52\necaresnet269d,320,1024.0,337.13,3037.39,41.53,83.69,102.09\nresnest200e,320,1024.0,336.33,3044.627,35.69,82.78,70.2\nmaxvit_large_tf_224,224,384.0,336.26,1141.954,42.99,109.57,211.79\nconvnext_large_mlp,320,512.0,336.03,1523.669,70.21,88.02,200.13\ninception_next_base,384,512.0,335.9,1524.27,43.64,75.48,86.67\nresnetv2_101x3_bit,224,768.0,334.56,2295.509,71.23,48.7,387.93\neca_nfnet_l3,352,768.0,328.62,2337.043,32.57,73.12,72.04\nvit_large_patch14_clip_quickgelu_224,224,1024.0,324.15,3159.023,77.83,57.11,303.97\nrepvgg_d2se,320,1024.0,320.2,3197.943,74.57,46.82,133.33\nvit_base_r50_s16_384,384,1024.0,317.01,3230.175,61.29,81.77,98.95\nvolo_d4_224,224,1024.0,317.0,3230.22,44.34,80.22,192.96\nvolo_d1_384,384,512.0,314.1,1630.023,22.75,108.55,26.78\nvit_large_patch14_xp_224,224,1024.0,309.84,3304.92,77.77,57.11,304.06\nconvmixer_768_32,224,1024.0,308.6,3318.227,19.55,25.95,21.11\nxcit_small_24_p8_224,224,1024.0,305.72,3349.464,35.81,90.77,47.63\nresnetrs350,288,1024.0,304.48,3363.098,43.67,87.09,163.96\nnasnetalarge,331,384.0,300.79,1276.642,23.89,90.56,88.75\ncoat_lite_medium_384,384,512.0,299.62,1708.831,28.73,116.7,44.57\ntresnet_xl,448,768.0,296.15,2593.304,60.77,61.31,78.44\nmaxvit_small_tf_384,384,192.0,288.16,666.295,33.58,139.86,69.02\npnasnet5large,331,384.0,287.26,1336.778,25.04,92.89,86.06\nxcit_medium_24_p16_384,384,1024.0,282.76,3621.451,47.39,91.63,84.4\necaresnet269d,352,1024.0,281.17,3641.867,50.25,101.25,102.09\ncoatnet_4_224,224,256.0,280.04,914.128,60.81,98.85,275.43\ncait_xxs24_384,384,1024.0,277.04,3696.16,9.63,122.65,12.03\ncoatnet_rmlp_2_rw_384,384,192.0,273.87,701.059,43.04,132.57,73.88\nresnetrs270,352,1024.0,271.91,3765.914,51.13,105.48,129.86\nnfnet_f2,352,768.0,270.88,2835.244,63.22,79.06,193.78\ncaformer_s36,384,512.0,266.29,1922.686,22.2,86.08,39.3\nconvnext_xlarge,288,512.0,263.75,1941.25,100.8,95.05,350.2\nswinv2_cr_small_384,384,256.0,258.42,990.618,29.7,298.03,49.7\nefficientnet_b6,528,128.0,257.57,496.944,19.4,167.39,43.04\nconvformer_s36,384,512.0,257.36,1989.401,22.54,89.62,40.01\nconvnextv2_large,288,256.0,256.91,996.448,56.87,71.29,197.96\neva02_large_patch14_224,224,1024.0,256.79,3987.739,77.9,65.52,303.27\neva02_large_patch14_clip_224,224,1024.0,253.51,4039.312,77.93,65.52,304.11\nresnext101_32x32d,224,512.0,253.0,2023.672,87.29,91.12,468.53\nmaxvit_tiny_tf_512,512,192.0,249.39,769.864,28.66,172.66,31.05\ntf_efficientnet_b6,528,128.0,247.44,517.29,19.4,167.39,43.04\nnfnet_f3,320,1024.0,247.37,4139.575,68.77,83.93,254.92\nmvitv2_large_cls,224,768.0,246.55,3114.926,42.17,111.69,234.58\nvit_so400m_patch14_siglip_224,224,1024.0,246.49,4154.292,106.18,70.45,427.68\nefficientnetv2_xl,384,1024.0,244.46,4188.739,52.81,139.2,208.12\nmvitv2_large,224,512.0,242.6,2110.485,43.87,112.02,217.99\nconvnextv2_base,384,256.0,242.26,1056.699,45.21,84.49,88.72\nvit_base_patch16_siglip_512,512,512.0,241.2,2122.705,88.89,87.3,93.52\nconvnext_large,384,384.0,234.69,1636.209,101.1,126.74,197.77\nconvnext_large_mlp,384,384.0,234.65,1636.476,101.11,126.74,200.13\ndm_nfnet_f2,352,768.0,234.38,3276.685,63.22,79.06,193.78\ntf_efficientnetv2_xl,384,1024.0,230.18,4448.679,52.81,139.2,208.12\nefficientnetv2_l,480,512.0,229.94,2226.68,56.4,157.99,118.52\ntf_efficientnetv2_l,480,512.0,227.38,2251.742,56.4,157.99,118.52\nswin_base_patch4_window12_384,384,256.0,226.65,1129.483,47.19,134.78,87.9\nregnety_320,384,384.0,225.95,1699.504,95.0,88.87,145.05\nresnetrs420,320,1024.0,221.8,4616.729,64.2,126.56,191.89\nxcit_tiny_24_p8_384,384,1024.0,221.03,4632.753,27.05,132.94,12.11\nefficientvit_l3,384,384.0,220.15,1744.25,81.08,114.02,246.04\nswinv2_large_window12to16_192to256,256,256.0,218.91,1169.41,47.81,121.53,196.74\nmaxxvitv2_rmlp_base_rw_384,384,384.0,215.87,1778.825,70.18,160.22,116.09\nresmlp_big_24_224,224,1024.0,214.65,4770.604,100.23,87.31,129.14\ndm_nfnet_f3,320,1024.0,212.33,4822.62,68.77,83.93,254.92\nvolo_d5_224,224,1024.0,212.3,4823.349,72.4,118.11,295.46\nxcit_medium_24_p8_224,224,1024.0,210.35,4868.038,63.52,121.22,84.32\nseresnextaa201d_32x8d,320,1024.0,207.05,4945.752,70.22,138.71,149.39\neca_nfnet_l3,448,512.0,204.74,2500.737,52.55,118.4,72.04\nxcit_small_12_p8_384,384,512.0,195.78,2615.134,54.92,138.25,26.21\ncait_xs24_384,384,768.0,193.45,3970.037,19.28,183.98,26.67\ncaformer_m36,384,256.0,191.51,1336.728,37.45,119.33,56.2\nfocalnet_huge_fl3,224,384.0,190.45,2016.221,118.26,104.8,745.28\neva02_base_patch14_448,448,512.0,189.13,2707.053,87.74,98.4,87.12\nmaxvit_xlarge_tf_224,224,256.0,188.97,1354.682,96.49,164.37,506.99\nconvformer_m36,384,384.0,186.96,2053.847,37.87,123.56,57.05\ncait_xxs36_384,384,1024.0,185.14,5531.038,14.35,183.7,17.37\nswinv2_cr_base_384,384,256.0,184.66,1386.338,50.57,333.68,87.88\nresnetrs350,384,1024.0,184.39,5553.562,77.59,154.74,163.96\nregnety_1280,224,512.0,182.89,2799.45,127.66,71.58,644.81\nswinv2_cr_huge_224,224,384.0,181.27,2118.357,115.97,121.08,657.83\nvit_huge_patch14_clip_224,224,1024.0,179.25,5712.71,161.99,95.07,632.05\nvit_huge_patch14_224,224,1024.0,179.24,5713.082,161.99,95.07,630.76\nvolo_d2_384,384,384.0,177.67,2161.247,46.17,184.51,58.87\nmaxvit_rmlp_base_rw_384,384,384.0,177.21,2166.875,66.51,233.79,116.14\nvit_base_patch14_dinov2,518,512.0,175.93,2910.275,117.11,114.68,86.58\nvit_huge_patch14_gap_224,224,1024.0,175.35,5839.715,161.36,94.7,630.76\nvit_base_patch14_reg4_dinov2,518,512.0,175.34,2920.066,117.45,115.02,86.58\nconvnextv2_huge,224,256.0,174.19,1469.676,115.0,79.07,660.29\ndeit3_huge_patch14_224,224,1024.0,172.49,5936.531,161.99,95.07,632.13\nconvmixer_1536_20,224,1024.0,172.27,5944.074,48.68,33.03,51.63\nvit_huge_patch14_clip_quickgelu_224,224,1024.0,165.12,6201.386,161.99,95.07,632.08\nmaxvit_small_tf_512,512,96.0,163.95,585.546,60.02,256.36,69.13\nmaxvit_base_tf_384,384,192.0,162.75,1179.72,69.34,247.75,119.65\nxcit_large_24_p16_384,384,1024.0,162.01,6320.659,105.34,137.15,189.1\nresnetv2_152x2_bit,384,384.0,160.06,2399.153,136.16,132.56,236.34\nvit_huge_patch14_xp_224,224,1024.0,159.21,6431.544,161.88,95.07,631.8\nresnest269e,416,512.0,159.04,3219.278,77.69,171.98,110.93\neva_large_patch14_336,336,768.0,155.41,4941.906,174.74,128.21,304.53\nvit_large_patch14_clip_336,336,768.0,155.09,4951.819,174.74,128.21,304.53\nvit_large_patch16_384,384,768.0,154.94,4956.737,174.85,128.21,304.72\nconvnext_xxlarge,256,384.0,152.35,2520.42,198.09,124.45,846.47\ndavit_giant,224,384.0,151.56,2533.626,192.34,138.2,1406.47\nresnetv2_50x3_bit,448,192.0,150.44,1276.251,145.7,133.37,217.32\ncoatnet_5_224,224,192.0,149.61,1283.336,142.72,143.69,687.47\nefficientnetv2_xl,512,512.0,149.15,3432.877,93.85,247.32,208.12\ncait_s24_384,384,512.0,148.91,3438.219,32.17,245.3,47.06\nconvnext_xlarge,384,256.0,148.61,1722.573,179.2,168.99,350.2\ntf_efficientnetv2_xl,512,512.0,148.0,3459.525,93.85,247.32,208.12\nefficientnet_b7,600,96.0,147.91,649.053,38.33,289.94,66.35\ndeit3_large_patch16_384,384,1024.0,147.79,6928.856,174.85,128.21,304.76\nseresnextaa201d_32x8d,384,768.0,147.05,5222.537,101.11,199.72,149.39\nnfnet_f3,416,512.0,146.71,3489.974,115.58,141.78,254.92\nvit_giant_patch16_gap_224,224,1024.0,145.38,7043.632,198.14,103.64,1011.37\nconvnextv2_large,384,192.0,144.92,1324.86,101.1,126.74,197.96\nresnetv2_152x4_bit,224,512.0,144.91,3533.266,186.9,90.22,936.53\nvit_large_patch16_siglip_384,384,768.0,144.23,5324.878,175.76,129.18,316.28\ntf_efficientnet_b7,600,96.0,143.48,669.058,38.33,289.94,66.35\nnfnet_f4,384,768.0,142.67,5383.101,122.14,147.57,316.07\nvit_large_patch14_clip_quickgelu_336,336,768.0,140.95,5448.604,174.74,128.21,304.29\ncaformer_b36,384,256.0,138.42,1849.458,66.12,159.11,98.75\nswin_large_patch4_window12_384,384,128.0,135.49,944.717,104.08,202.16,196.74\nconvformer_b36,384,256.0,135.29,1892.221,66.67,164.75,99.88\nresnetrs420,416,1024.0,130.11,7870.213,108.45,213.79,191.89\nbeit_large_patch16_384,384,768.0,129.31,5939.365,174.84,128.21,305.0\ndm_nfnet_f3,416,512.0,127.57,4013.328,115.58,141.78,254.92\nregnety_640,384,256.0,126.8,2018.836,188.47,124.83,281.38\ndm_nfnet_f4,384,768.0,123.05,6241.189,122.14,147.57,316.07\nfocalnet_huge_fl4,224,512.0,122.81,4169.023,118.9,113.34,686.46\nxcit_large_24_p8_224,224,512.0,120.1,4263.036,141.22,181.53,188.93\nresnetv2_152x2_bit,448,256.0,117.91,2171.109,184.99,180.43,236.34\neva_giant_patch14_224,224,1024.0,116.71,8773.739,259.74,135.89,1012.56\neva_giant_patch14_clip_224,224,1024.0,116.64,8779.464,259.74,135.89,1012.59\nvit_giant_patch14_224,224,1024.0,114.18,8968.21,259.74,135.89,1012.61\nvit_giant_patch14_clip_224,224,1024.0,114.09,8975.383,259.74,135.89,1012.65\nswinv2_cr_large_384,384,128.0,112.81,1134.666,108.96,404.96,196.68\nmaxvit_large_tf_384,384,128.0,111.17,1151.411,126.61,332.3,212.03\neva02_large_patch14_clip_336,336,1024.0,110.28,9285.405,174.97,147.1,304.43\nmvitv2_huge_cls,224,384.0,107.61,3568.518,120.67,243.63,694.8\nconvnextv2_huge,288,128.0,105.35,1214.957,190.1,130.7,660.29\nxcit_small_24_p8_384,384,512.0,102.73,4983.926,105.23,265.87,47.63\nnfnet_f5,416,512.0,100.11,5114.164,170.71,204.56,377.21\ncait_s36_384,384,512.0,99.61,5140.29,47.99,367.39,68.37\nswinv2_base_window12to24_192to384,384,96.0,96.35,996.364,55.25,280.36,87.92\nefficientnet_b8,672,96.0,95.78,1002.248,63.48,442.89,87.41\nfocalnet_large_fl3,384,384.0,94.47,4064.948,105.06,168.04,239.13\ntf_efficientnet_b8,672,96.0,93.18,1030.252,63.48,442.89,87.41\nmaxvit_base_tf_512,512,96.0,92.2,1041.169,123.93,456.26,119.88\nfocalnet_large_fl4,384,256.0,90.17,2839.222,105.2,181.78,239.32\nresnetv2_101x3_bit,448,192.0,87.88,2184.819,280.33,194.78,387.93\ndm_nfnet_f5,416,512.0,86.64,5909.833,170.71,204.56,377.21\nnfnet_f4,512,384.0,81.51,4711.211,216.26,262.26,316.07\nvolo_d3_448,448,192.0,76.74,2501.831,96.33,446.83,86.63\nvit_so400m_patch14_siglip_384,384,512.0,75.92,6743.556,302.34,200.62,428.23\nnfnet_f6,448,512.0,75.59,6773.482,229.7,273.62,438.36\nvit_huge_patch14_clip_336,336,768.0,75.49,10173.683,363.7,213.44,632.46\nxcit_medium_24_p8_384,384,384.0,71.15,5396.903,186.67,354.69,84.32\ndm_nfnet_f4,512,384.0,69.56,5520.408,216.26,262.26,316.07\nvit_gigantic_patch14_224,224,512.0,66.18,7736.423,473.4,204.12,1844.44\nvit_gigantic_patch14_clip_224,224,512.0,66.18,7735.92,473.41,204.12,1844.91\nfocalnet_xlarge_fl3,384,256.0,66.07,3874.786,185.61,223.99,408.79\ndm_nfnet_f6,448,512.0,65.28,7842.994,229.7,273.62,438.36\nmaxvit_large_tf_512,512,64.0,63.68,1005.087,225.96,611.85,212.33\nfocalnet_xlarge_fl4,384,192.0,63.39,3028.979,185.79,242.31,409.03\nmaxvit_xlarge_tf_384,384,96.0,63.2,1518.995,283.86,498.45,475.32\nregnety_1280,384,128.0,62.14,2059.919,374.99,210.2,644.81\nbeit_large_patch16_512,512,256.0,61.47,4164.41,310.6,227.76,305.67\nconvnextv2_huge,384,96.0,60.73,1580.79,337.96,232.35,660.29\nswinv2_large_window12to24_192to384,384,48.0,60.6,792.119,116.15,407.83,196.74\neva02_large_patch14_448,448,512.0,59.6,8591.147,310.69,261.32,305.08\ntf_efficientnet_l2,475,128.0,59.14,2164.439,172.11,609.89,480.31\nnfnet_f5,544,384.0,58.55,6558.595,290.97,349.71,377.21\nvit_huge_patch14_clip_378,378,512.0,58.17,8801.788,460.13,270.04,632.68\nvolo_d4_448,448,192.0,57.2,3356.883,197.13,527.35,193.41\nnfnet_f7,480,384.0,57.05,6730.663,300.08,355.86,499.5\nvit_large_patch14_dinov2,518,384.0,56.81,6759.458,414.89,304.42,304.37\nvit_large_patch14_reg4_dinov2,518,384.0,56.51,6795.142,416.1,305.31,304.37\nvit_huge_patch14_clip_quickgelu_378,378,384.0,53.9,7123.722,460.13,270.04,632.68\nswinv2_cr_giant_224,224,192.0,52.42,3662.593,483.85,309.15,2598.76\ndm_nfnet_f5,544,384.0,50.82,7555.977,290.97,349.71,377.21\neva_giant_patch14_336,336,512.0,49.6,10322.486,583.14,305.1,1013.01\nswinv2_cr_huge_384,384,64.0,48.85,1310.056,352.04,583.18,657.94\nnfnet_f6,576,256.0,45.99,5566.397,378.69,452.2,438.36\nxcit_large_24_p8_384,384,256.0,40.54,6315.135,415.0,531.74,188.93\nvolo_d5_448,448,192.0,39.97,4803.918,315.06,737.92,295.91\ndm_nfnet_f6,576,256.0,39.68,6452.4,378.69,452.2,438.36\nnfnet_f7,608,256.0,35.92,7127.91,480.39,570.85,499.5\nmaxvit_xlarge_tf_512,512,48.0,35.73,1343.449,505.95,917.77,475.77\nregnety_2560,384,96.0,35.19,2728.299,747.83,296.49,1282.6\nconvnextv2_huge,512,48.0,34.07,1408.989,600.81,413.07,660.29\ncait_m36_384,384,256.0,32.53,7868.895,173.11,734.79,271.22\nresnetv2_152x4_bit,480,128.0,32.31,3961.512,844.84,414.26,936.53\nvolo_d5_512,512,96.0,27.94,3435.72,425.09,1105.37,296.09\nsamvit_base_patch16,1024,12.0,23.01,521.487,371.55,403.08,89.67\nefficientnet_l2,800,32.0,22.53,1420.616,479.12,1707.39,480.31\ntf_efficientnet_l2,800,32.0,22.12,1446.454,479.12,1707.39,480.31\nvit_giant_patch14_dinov2,518,192.0,17.14,11200.639,1553.56,871.89,1136.48\nvit_giant_patch14_reg4_dinov2,518,128.0,17.05,7505.847,1558.09,874.43,1136.48\nswinv2_cr_giant_384,384,32.0,15.01,2131.256,1450.71,1394.86,2598.76\neva_giant_patch14_560,560,192.0,15.01,12792.976,1618.04,846.56,1014.45\ncait_m48_448,448,128.0,13.76,9299.464,329.4,1708.21,356.46\nsamvit_large_patch16,1024,8.0,10.25,780.237,1317.08,1055.58,308.28\nsamvit_huge_patch16,1024,6.0,6.31,950.475,2741.59,1727.57,637.03\neva02_enormous_patch14_clip_224,224,,,,1132.46,497.58,4350.56\nvit_huge_patch16_gap_448,448,,,,544.7,636.83,631.67\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt240-cu124-rtx3090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_vit,160,109337.21,9.356,1024,0.37,0.04,0.48\ntest_byobnet,160,82185.02,12.45,1024,0.46,0.03,0.43\ntest_efficientnet,160,76411.59,13.392,1024,0.36,0.06,0.55\ntinynet_e,106,53275.73,19.211,1024,2.04,0.03,0.69\nmobilenetv3_small_050,224,47496.52,21.55,1024,1.59,0.03,0.92\nlcnet_035,224,42719.32,23.961,1024,1.64,0.03,1.04\nlcnet_050,224,38393.43,26.662,1024,1.88,0.05,1.26\nmobilenetv3_small_075,224,34935.91,29.301,1024,2.04,0.05,1.3\nefficientvit_m0,224,32556.1,31.443,1024,2.35,0.08,0.91\nmobilenetv3_small_100,224,31410.96,32.59,1024,2.54,0.06,1.42\ntf_mobilenetv3_small_minimal_100,224,29476.16,34.73,1024,2.04,0.06,1.41\ntinynet_d,152,29431.12,34.783,1024,2.34,0.05,1.42\ntf_mobilenetv3_small_075,224,28685.83,35.688,1024,2.04,0.05,1.3\ntf_mobilenetv3_small_100,224,26229.43,39.03,1024,2.54,0.06,1.42\nefficientvit_m1,224,25342.72,40.397,1024,2.98,0.17,1.33\nlcnet_075,224,24815.53,41.255,1024,2.36,0.1,1.99\nefficientvit_m2,224,22234.8,46.044,1024,4.19,0.2,1.47\nmobilenetv4_conv_small,224,21980.64,46.577,1024,3.77,0.19,1.97\nmnasnet_small,224,21439.71,47.752,1024,2.03,0.07,2.16\nlevit_128s,224,21017.47,48.711,1024,7.78,0.31,1.88\nlcnet_100,224,20320.08,50.384,1024,2.95,0.16,2.52\nmobilenetv4_conv_small,256,19758.75,51.816,1024,3.77,0.25,2.57\nregnetx_002,224,19130.47,53.516,1024,2.68,0.2,2.16\nefficientvit_m3,224,19121.62,53.542,1024,6.9,0.27,1.62\nmobilenetv2_035,224,19047.74,53.75,1024,1.68,0.07,2.86\nresnet10t,176,19017.0,53.837,1024,5.44,0.7,1.51\nghostnet_050,224,18326.71,55.865,1024,2.59,0.05,1.77\nlevit_conv_128s,224,17825.2,57.436,1024,7.78,0.31,1.88\nregnety_002,224,17806.29,57.495,1024,3.16,0.2,2.17\nefficientvit_m4,224,17783.18,57.573,1024,8.8,0.3,1.7\nresnet18,160,17690.73,57.874,1024,11.69,0.93,1.27\nrepghostnet_050,224,17490.98,58.535,1024,2.31,0.05,2.02\nefficientvit_b0,224,16914.4,60.53,1024,3.41,0.1,2.87\nmnasnet_050,224,16594.59,61.697,1024,2.22,0.11,3.07\nvit_tiny_r_s16_p8_224,224,16372.59,62.534,1024,6.34,0.44,2.06\ntinynet_c,184,15537.22,65.896,1024,2.46,0.11,2.87\nmobilenetv2_050,224,15294.16,66.944,1024,1.97,0.1,3.64\npit_ti_224,224,14941.65,68.524,1024,4.85,0.7,6.19\npit_ti_distilled_224,224,14919.02,68.627,1024,5.1,0.71,6.23\nsemnasnet_050,224,14881.16,68.802,1024,2.08,0.11,3.44\nlevit_128,224,14392.72,71.137,1024,9.21,0.41,2.71\nrepghostnet_058,224,13974.19,73.268,1024,2.55,0.07,2.59\nvit_small_patch32_224,224,13132.7,77.963,1024,22.88,1.15,2.5\nlcnet_150,224,13019.52,78.641,1024,4.5,0.34,3.79\ncs3darknet_focus_s,256,12823.07,79.845,1024,3.27,0.69,2.7\nregnetx_004,224,12790.25,80.052,1024,5.16,0.4,3.14\nlevit_conv_128,224,12771.21,80.17,1024,9.21,0.41,2.71\nmobilenetv3_large_075,224,12600.27,81.258,1024,3.99,0.16,4.0\ncs3darknet_s,256,12496.47,81.932,1024,3.28,0.72,2.97\nregnetx_004_tv,224,12440.42,82.303,1024,5.5,0.42,3.17\nefficientvit_m5,224,12202.57,83.907,1024,12.47,0.53,2.41\nlevit_192,224,12163.42,84.177,1024,10.95,0.66,3.2\nresnet10t,224,12120.7,84.474,1024,5.44,1.1,2.43\ngernet_s,224,11872.38,86.241,1024,8.17,0.75,2.65\nese_vovnet19b_slim_dw,224,11765.52,87.024,1024,1.9,0.4,5.28\nhardcorenas_a,224,11423.94,89.627,1024,5.26,0.23,4.38\nrepghostnet_080,224,11343.56,90.261,1024,3.28,0.1,3.22\nmobilenetv3_rw,224,11309.87,90.531,1024,5.48,0.23,4.41\nmobilenetv3_large_100,224,11135.72,91.947,1024,5.48,0.23,4.41\ntf_mobilenetv3_large_075,224,10970.94,93.328,1024,3.99,0.16,4.0\nmixer_s32_224,224,10941.26,93.581,1024,19.1,1.0,2.28\nmnasnet_075,224,10916.2,93.796,1024,3.17,0.23,4.77\nlevit_conv_192,224,10869.06,94.202,1024,10.95,0.66,3.2\nmobilenetv1_100,224,10802.87,94.78,1024,4.23,0.58,5.04\ntf_mobilenetv3_large_minimal_100,224,10586.88,96.713,1024,3.92,0.22,4.4\nresnet14t,176,10568.07,96.886,1024,10.08,1.07,3.61\nmobilenetv1_100h,224,10512.64,97.397,1024,5.28,0.63,5.09\nhardcorenas_b,224,10474.56,97.751,1024,5.18,0.26,5.09\nresnet34,160,10402.15,98.431,1024,21.8,1.87,1.91\nhardcorenas_c,224,10270.04,99.697,1024,5.52,0.28,5.01\nnf_regnet_b0,192,10246.95,99.922,1024,8.76,0.37,3.15\ndeit_tiny_patch16_224,224,10236.41,100.025,1024,5.72,1.26,5.97\nvit_tiny_patch16_224,224,10231.54,100.072,1024,5.72,1.26,5.97\nregnety_004,224,10230.97,100.079,1024,4.34,0.41,3.89\ndeit_tiny_distilled_patch16_224,224,10185.75,100.523,1024,5.91,1.27,6.01\nregnetx_006,224,10150.71,100.87,1024,6.2,0.61,3.98\ntinynet_b,188,9883.52,103.596,1024,3.73,0.21,4.44\nghostnet_100,224,9881.78,103.616,1024,5.18,0.15,3.55\ntf_mobilenetv3_large_100,224,9800.29,104.477,1024,5.48,0.23,4.41\nmnasnet_100,224,9763.25,104.873,1024,4.38,0.33,5.46\nrepghostnet_100,224,9586.17,106.811,1024,4.07,0.15,3.98\nhardcorenas_d,224,9568.31,107.009,1024,7.5,0.3,4.93\ntf_efficientnetv2_b0,192,9480.34,108.003,1024,7.14,0.54,3.51\nmobilenetv2_075,224,9478.95,108.018,1024,2.64,0.22,5.86\nsemnasnet_075,224,9435.3,108.519,1024,2.91,0.23,5.54\nregnety_006,224,9299.07,110.103,1024,6.06,0.61,4.33\nresnet18,224,9260.29,110.57,1024,11.69,1.82,2.48\npit_xs_224,224,9215.59,111.106,1024,10.62,1.4,7.71\npit_xs_distilled_224,224,9179.35,111.544,1024,11.0,1.41,7.76\nmobilenet_edgetpu_v2_xs,224,9103.13,112.477,1024,4.46,0.7,4.8\nconvnext_atto,224,9094.81,112.582,1024,3.7,0.55,3.81\nvit_xsmall_patch16_clip_224,224,9084.06,112.715,1024,8.28,1.79,6.65\nlevit_256,224,9041.44,113.246,1024,18.89,1.13,4.23\nvit_medium_patch32_clip_224,224,9037.75,113.292,1024,39.69,2.0,3.34\nmobilenetv1_100,256,8925.01,114.724,1024,4.23,0.76,6.59\nspnasnet_100,224,8850.63,115.688,1024,4.42,0.35,6.03\nseresnet18,224,8755.08,116.951,1024,11.78,1.82,2.49\nmobilenetv1_100h,256,8716.6,117.468,1024,5.28,0.82,6.65\nrepghostnet_111,224,8703.6,117.642,1024,4.54,0.18,4.38\nconvnext_atto_ols,224,8634.04,118.591,1024,3.7,0.58,4.11\nmobilenetv2_100,224,8629.73,118.65,1024,3.5,0.31,6.68\nsemnasnet_100,224,8576.31,119.389,1024,3.89,0.32,6.23\nlegacy_seresnet18,224,8496.1,120.516,1024,11.78,1.82,2.49\nhgnetv2_b0,224,8489.19,120.614,1024,6.0,0.33,2.12\nhardcorenas_f,224,8388.5,122.062,1024,8.2,0.35,5.57\nhardcorenas_e,224,8316.4,123.12,1024,8.07,0.35,5.65\nedgenext_xx_small,256,8267.32,123.851,1024,1.33,0.26,3.33\nrepvgg_a0,224,8195.29,124.938,1024,9.11,1.52,3.59\nregnetx_008,224,8145.67,125.701,1024,7.26,0.81,5.15\nlevit_conv_256,224,8113.72,126.196,1024,18.89,1.13,4.23\ndla46_c,224,8109.77,126.257,1024,1.3,0.58,4.5\nmobilenetv1_125,224,8097.26,126.453,1024,6.27,0.89,6.3\nefficientnet_lite0,224,7979.97,128.311,1024,4.65,0.4,6.74\nconvnext_femto,224,7952.03,128.762,1024,5.22,0.79,4.57\nresnet18d,224,7915.36,129.359,1024,11.71,2.06,3.29\nghostnet_130,224,7893.46,129.718,1024,7.36,0.24,4.6\nmobilevit_xxs,256,7881.37,129.917,1024,1.27,0.42,8.34\nese_vovnet19b_slim,224,7874.63,130.029,1024,3.17,1.69,3.52\nlevit_256d,224,7779.0,131.627,1024,26.21,1.4,4.93\nmobilenetv4_conv_medium,224,7776.66,131.666,1024,9.72,0.84,5.8\nmobilenet_edgetpu_100,224,7770.98,131.763,1024,4.09,1.0,5.75\nxcit_nano_12_p16_224,224,7759.26,131.961,1024,3.05,0.56,4.17\nrepghostnet_130,224,7749.8,132.123,1024,5.48,0.25,5.24\ntinynet_a,192,7745.18,132.201,1024,6.19,0.35,5.41\nregnety_008,224,7721.37,132.605,1024,6.26,0.81,5.25\ntf_efficientnetv2_b0,224,7710.08,132.803,1024,7.14,0.73,4.77\nfbnetc_100,224,7646.21,133.909,1024,5.57,0.4,6.51\nconvnext_femto_ols,224,7577.53,135.127,1024,5.23,0.82,4.87\nmobilenetv4_hybrid_medium_075,224,7514.13,136.267,1024,7.31,0.66,5.65\nmobilevitv2_050,256,7395.66,138.45,1024,1.37,0.48,8.04\ntf_efficientnetv2_b1,192,7389.78,138.56,1024,8.14,0.76,4.59\nregnety_008_tv,224,7307.18,140.123,1024,6.43,0.84,5.42\ntf_efficientnet_lite0,224,7042.0,145.403,1024,4.65,0.4,6.74\nefficientnet_b0,224,6924.93,147.857,1024,5.29,0.4,6.75\nmobilenetv4_conv_medium,256,6920.92,147.948,1024,9.72,1.1,7.58\ndla46x_c,224,6863.64,149.181,1024,1.07,0.54,5.66\nmnasnet_140,224,6835.02,149.805,1024,7.12,0.6,7.71\nresnet14t,224,6822.56,150.08,1024,10.08,1.69,5.8\nrepghostnet_150,224,6798.46,150.612,1024,6.58,0.32,6.0\nrexnet_100,224,6759.37,151.483,1024,4.8,0.41,7.44\nrexnetr_100,224,6733.45,152.067,1024,4.88,0.43,7.72\nefficientnet_b1_pruned,240,6731.65,152.107,1024,6.33,0.4,6.21\nmobilenetv1_125,256,6707.02,152.666,1024,6.27,1.16,8.23\nvisformer_tiny,224,6688.07,153.098,1024,10.32,1.27,5.72\nlevit_conv_256d,224,6662.02,153.697,1024,26.21,1.4,4.93\npvt_v2_b0,224,6589.46,155.389,1024,3.67,0.57,7.99\nefficientvit_b1,224,6579.31,155.629,1024,9.1,0.53,7.25\nfbnetv3_b,224,6563.03,156.015,1024,8.6,0.42,6.97\nrepvit_m1,224,6541.47,156.523,1024,5.49,0.83,7.45\nedgenext_xx_small,288,6532.74,156.738,1024,1.33,0.33,4.21\nmobilenet_edgetpu_v2_s,224,6516.09,157.14,1024,5.99,1.21,6.6\nmobilenetv2_110d,224,6504.09,157.429,1024,4.52,0.45,8.71\nvit_betwixt_patch32_clip_224,224,6503.93,157.433,1024,61.41,3.09,4.17\nregnetz_005,224,6484.34,157.909,1024,7.12,0.52,5.86\nese_vovnet19b_dw,224,6451.87,158.704,1024,6.54,1.34,8.25\ndla60x_c,224,6417.77,159.546,1024,1.32,0.59,6.01\nhgnetv2_b1,224,6310.48,162.26,1024,6.34,0.49,2.73\nrepvit_m0_9,224,6253.66,163.733,1024,5.49,0.83,7.45\ncrossvit_tiny_240,240,6253.35,163.742,1024,7.01,1.57,9.08\ncs3darknet_focus_m,256,6210.25,164.879,1024,9.3,1.98,4.89\ntf_efficientnet_b0,224,6210.04,164.879,1024,5.29,0.4,6.75\nconvnext_pico,224,6200.96,165.126,1024,9.05,1.37,6.1\nnf_regnet_b0,256,6157.62,166.288,1024,8.76,0.64,5.58\nsemnasnet_140,224,6069.35,168.703,1024,6.11,0.6,8.87\ncrossvit_9_dagger_240,240,6067.95,168.745,1024,8.78,1.99,9.97\nresnet50,160,6033.87,169.699,1024,25.56,2.1,5.67\nrepvgg_a1,224,5998.64,170.696,1024,14.09,2.64,4.74\nresnetblur18,224,5978.13,171.282,1024,11.69,2.34,3.39\ncs3darknet_m,256,5943.51,172.279,1024,9.31,2.08,5.28\nmobilenetv2_140,224,5943.08,172.292,1024,6.11,0.6,9.57\nconvnext_pico_ols,224,5910.37,173.245,1024,9.06,1.43,6.5\nmobilenetv4_hybrid_medium,224,5890.59,173.827,1024,11.07,0.98,6.84\nefficientnet_b0,256,5798.8,176.577,1024,5.29,0.52,8.81\ntf_efficientnetv2_b2,208,5794.22,176.717,1024,10.1,1.06,6.0\nhrnet_w18_small,224,5781.49,177.103,1024,13.19,1.61,5.72\ncrossvit_9_240,240,5771.83,177.403,1024,8.55,1.85,9.52\nskresnet18,224,5765.67,177.593,1024,11.96,1.82,3.24\nresnet50d,160,5707.52,179.403,1024,25.58,2.22,6.08\nresnet18,288,5648.23,181.286,1024,11.69,3.01,4.11\nvit_tiny_r_s16_p8_384,384,5625.7,182.012,1024,6.36,1.34,6.49\nefficientnet_b0_gn,224,5621.97,182.132,1024,5.29,0.42,6.75\nefficientnet_lite1,240,5579.58,183.516,1024,5.42,0.62,10.14\nghostnetv2_100,224,5564.84,184.002,1024,6.16,0.18,4.55\nfbnetv3_d,224,5563.09,184.06,1024,10.31,0.52,8.5\nconvnext_atto,288,5527.85,185.234,1024,3.7,0.91,6.3\nfbnetv3_b,256,5497.24,186.265,1024,8.6,0.55,9.1\nselecsls42,224,5467.25,187.287,1024,30.35,2.94,4.62\nefficientnet_blur_b0,224,5455.02,187.706,1024,5.29,0.43,8.72\nresnet34,224,5454.89,187.712,1024,21.8,3.67,3.74\nefficientvit_b1,256,5439.39,188.246,1024,9.1,0.69,9.46\ntiny_vit_5m_224,224,5432.07,188.5,1024,12.08,1.28,11.25\nselecsls42b,224,5418.51,188.972,1024,32.46,2.98,4.62\nlevit_384,224,5395.45,189.779,1024,39.13,2.36,6.26\ntf_efficientnetv2_b1,240,5369.96,190.68,1024,8.14,1.21,7.34\nseresnet18,288,5366.44,190.805,1024,11.78,3.01,4.11\nrepvit_m1_0,224,5361.91,190.966,1024,7.3,1.13,8.69\nconvnextv2_atto,224,5357.95,191.108,1024,3.71,0.55,3.81\nmixnet_s,224,5315.69,192.627,1024,4.13,0.25,6.25\nrepghostnet_200,224,5315.68,192.627,1024,9.8,0.54,7.96\nseresnet50,160,5298.11,193.266,1024,28.09,2.1,5.69\nedgenext_x_small,256,5274.46,194.132,1024,2.34,0.54,5.93\nrexnetr_130,224,5269.16,194.329,1024,7.61,0.68,9.81\nconvnext_atto_ols,288,5250.22,195.03,1024,3.7,0.96,6.8\nrepvit_m2,224,5242.24,195.319,1024,8.8,1.36,9.43\ngernet_m,224,5225.82,195.94,1024,21.14,3.02,5.24\nhgnetv2_b0,288,5205.77,196.695,1024,6.0,0.54,3.51\nseresnet34,224,5135.96,199.368,1024,21.96,3.67,3.74\nmobilenetv4_hybrid_medium,256,5134.61,199.421,1024,11.07,1.29,9.01\nresnet26,224,5121.5,199.932,1024,16.0,2.36,7.35\nvit_base_patch32_224,224,5098.65,200.828,1024,88.22,4.41,5.01\nmobilenetv3_large_150d,224,5094.2,201.003,1024,14.62,,\nvit_base_patch32_clip_224,224,5090.11,201.165,1024,88.22,4.41,5.01\necaresnet50t,160,5084.11,201.402,1024,25.57,2.21,6.04\nmobilenetv4_conv_blur_medium,224,5057.66,202.456,1024,9.72,1.22,8.58\nmobilenet_edgetpu_v2_m,224,5045.14,202.956,1024,8.46,1.85,8.15\ntf_efficientnet_lite1,240,5038.81,203.213,1024,5.42,0.62,10.14\nresnet50,176,5019.75,203.984,1024,25.56,2.62,6.92\nrepvit_m1_1,224,5014.8,204.185,1024,8.8,1.36,9.43\nlegacy_seresnet34,224,4973.36,205.887,1024,21.96,3.67,3.74\nresnet34d,224,4971.07,205.982,1024,21.82,3.91,4.54\ntf_mixnet_s,224,4958.21,206.516,1024,4.13,0.25,6.25\nresnetrs50,160,4955.95,206.604,1024,35.69,2.29,6.2\nxcit_tiny_12_p16_224,224,4922.03,208.034,1024,6.72,1.24,6.29\neva02_tiny_patch14_224,224,4913.38,208.399,1024,5.5,1.7,9.14\nmobilevitv2_075,256,4906.85,208.678,1024,2.87,1.05,12.06\npit_s_224,224,4895.76,209.15,1024,23.46,2.88,11.56\npit_s_distilled_224,224,4881.41,209.765,1024,24.04,2.9,11.64\nefficientnet_es_pruned,224,4880.76,209.792,1024,5.44,1.81,8.73\nefficientnet_es,224,4880.24,209.815,1024,5.44,1.81,8.73\nmobilenetv2_120d,224,4862.72,210.571,1024,5.83,0.69,11.97\nresnet18d,288,4851.45,211.06,1024,11.71,3.41,5.43\nresnext50_32x4d,160,4849.69,211.137,1024,25.03,2.17,7.35\nefficientnet_b1,224,4846.84,211.261,1024,7.79,0.59,9.36\nlevit_conv_384,224,4839.13,211.598,1024,39.13,2.36,6.26\nrexnet_130,224,4831.59,211.926,1024,7.56,0.68,9.71\ncs3darknet_focus_m,288,4831.24,211.944,1024,9.3,2.51,6.19\nconvnext_femto,288,4824.54,212.238,1024,5.22,1.3,7.56\ndla34,224,4805.9,213.06,1024,15.74,3.07,5.02\nefficientnet_b0_g16_evos,224,4800.56,213.298,1024,8.11,1.01,7.42\nresnet26d,224,4679.82,218.802,1024,16.01,2.6,8.15\ntf_efficientnet_es,224,4673.69,219.089,1024,5.44,1.81,8.73\nresmlp_12_224,224,4653.37,220.046,1024,15.35,3.01,5.5\ncs3darknet_m,288,4646.1,220.39,1024,9.31,2.63,6.69\nfbnetv3_d,256,4637.02,220.821,1024,10.31,0.68,11.1\nmobilenetv4_conv_aa_medium,256,4630.5,221.132,1024,9.72,1.58,10.3\nselecsls60,224,4624.54,221.418,1024,30.67,3.59,5.52\nrexnetr_150,224,4617.37,221.761,1024,9.78,0.89,11.13\nconvnext_femto_ols,288,4613.88,221.929,1024,5.23,1.35,8.06\nnf_regnet_b1,256,4603.23,222.442,1024,10.22,0.82,7.27\nvit_base_patch32_clip_quickgelu_224,224,4601.51,222.526,1024,87.85,4.41,5.01\nselecsls60b,224,4600.54,222.572,1024,32.77,3.63,5.52\nconvnextv2_femto,224,4593.84,222.896,1024,5.23,0.79,4.57\nregnetx_016,224,4589.73,223.096,1024,9.19,1.62,7.93\ndeit_small_patch16_224,224,4586.09,223.273,1024,22.05,4.61,11.95\nvit_small_patch16_224,224,4584.86,223.334,1024,22.05,4.61,11.95\ngmixer_12_224,224,4571.76,223.974,1024,12.7,2.67,7.26\ngmlp_ti16_224,224,4565.24,224.293,1024,5.87,1.34,7.55\nrepvgg_b0,224,4553.06,224.894,1024,15.82,3.41,6.15\ndeit_small_distilled_patch16_224,224,4543.3,225.376,1024,22.44,4.63,12.02\nmixer_s16_224,224,4530.96,225.99,1024,18.53,3.79,5.97\nvit_small_patch32_384,384,4513.63,226.858,1024,22.92,3.45,8.25\nefficientnet_cc_b0_4e,224,4507.68,227.158,1024,13.31,0.41,9.42\nefficientnet_cc_b0_8e,224,4491.22,227.99,1024,24.01,0.42,9.42\nmixer_b32_224,224,4485.82,228.265,1024,60.29,3.24,6.29\ntiny_vit_11m_224,224,4481.54,228.483,1024,20.35,2.04,13.49\nmobilenetv4_conv_medium,320,4477.38,228.695,1024,9.72,1.71,11.84\nnf_resnet26,224,4466.75,229.24,1024,16.0,2.41,7.35\nmobilenet_edgetpu_v2_l,224,4462.15,229.476,1024,10.92,2.55,9.05\nefficientnet_b2_pruned,260,4421.73,231.573,1024,8.31,0.73,9.13\nefficientformer_l1,224,4415.57,231.896,1024,12.29,1.3,5.53\nresnetaa34d,224,4392.45,233.118,1024,21.82,4.43,5.07\ndarknet17,256,4386.8,233.416,1024,14.3,3.26,7.18\nghostnetv2_130,224,4382.43,233.65,1024,8.96,0.28,5.9\nrexnet_150,224,4375.57,234.017,1024,9.73,0.9,11.21\nconvnext_nano,224,4355.17,235.113,1024,15.59,2.46,8.37\necaresnet50d_pruned,224,4352.57,235.254,1024,19.94,2.53,6.43\nefficientnet_b1,240,4341.0,235.879,1024,7.79,0.71,10.88\nnf_regnet_b2,240,4323.87,236.815,1024,14.31,0.97,7.23\npoolformer_s12,224,4258.87,240.428,1024,11.92,1.82,5.53\nregnety_016,224,4256.18,240.571,1024,11.2,1.63,8.04\nmobilenetv4_conv_blur_medium,256,4255.16,180.477,768,9.72,1.59,11.2\nvit_wee_patch16_reg1_gap_256,256,4234.72,241.8,1024,13.42,3.83,13.9\nmobilenet_edgetpu_v2_m,256,4230.57,242.038,1024,8.46,2.42,10.65\nvit_pwee_patch16_reg1_gap_256,256,4211.58,243.129,1024,15.25,4.37,15.87\ndeit3_small_patch16_224,224,4202.98,243.625,1024,22.06,4.61,11.95\nhgnetv2_b2,224,4202.72,243.64,1024,11.22,1.15,4.12\nedgenext_x_small,288,4195.02,244.088,1024,2.34,0.68,7.5\ntf_efficientnet_cc_b0_4e,224,4189.01,244.439,1024,13.31,0.41,9.42\nefficientnet_lite2,260,4184.61,244.696,1024,6.09,0.89,12.9\ntf_efficientnet_cc_b0_8e,224,4158.98,246.204,1024,24.01,0.42,9.42\nregnetz_005,288,4139.21,247.38,1024,7.12,0.86,9.68\nhgnetv2_b4,224,4136.11,247.566,1024,19.8,2.75,6.7\nefficientvit_b1,288,4112.03,249.015,1024,9.1,0.87,11.96\nresnest14d,224,4103.54,249.531,1024,10.61,2.76,7.33\nresnext26ts,256,4101.49,249.654,1024,10.3,2.43,10.52\nefficientnet_b0_g8_gn,224,4095.29,250.033,1024,6.56,0.66,6.75\nefficientnet_b1,256,4062.7,252.039,1024,7.79,0.77,12.22\ntf_efficientnet_b1,240,4009.73,255.369,1024,7.79,0.71,10.88\nedgenext_small,256,3998.75,256.069,1024,5.59,1.26,9.07\neca_resnext26ts,256,3985.65,256.911,1024,10.3,2.43,10.52\ndarknet21,256,3985.31,256.933,1024,20.86,3.93,7.47\nseresnext26ts,256,3983.57,257.043,1024,10.39,2.43,10.52\nregnetz_b16,224,3982.17,257.134,1024,9.72,1.45,9.95\nresnext50_32x4d,176,3977.56,257.434,1024,25.03,2.71,8.97\nconvnext_nano_ols,224,3963.35,258.357,1024,15.65,2.65,9.38\nvit_base_patch32_clip_256,256,3950.74,259.181,1024,87.86,5.76,6.65\nflexivit_small,240,3949.57,259.258,1024,22.06,5.35,14.18\ngcresnext26ts,256,3942.19,259.744,1024,10.48,2.43,10.53\nmobileone_s1,224,3939.35,259.93,1024,4.83,0.86,9.67\nhgnetv2_b1,288,3863.09,265.063,1024,6.34,0.82,4.51\nsedarknet21,256,3855.83,265.558,1024,20.95,3.93,7.47\ntf_efficientnetv2_b2,260,3852.83,265.768,1024,10.1,1.72,9.84\nnf_ecaresnet26,224,3841.08,266.581,1024,16.0,2.41,7.36\nefficientnet_b2,256,3835.68,266.957,1024,9.11,0.89,12.81\nnf_seresnet26,224,3835.46,266.972,1024,17.4,2.41,7.36\nmobilevit_xs,256,3825.9,200.727,768,2.32,1.05,16.33\ndpn48b,224,3821.05,267.978,1024,9.13,1.69,8.92\nmobilenetv4_conv_large,256,3819.0,268.122,1024,32.59,2.86,12.14\nvit_relpos_small_patch16_224,224,3815.41,268.375,1024,21.98,4.59,13.05\ntf_efficientnet_lite2,260,3814.92,268.409,1024,6.09,0.89,12.9\npvt_v2_b1,224,3812.97,268.546,1024,14.01,2.12,15.39\nresnet26t,256,3801.26,269.374,1024,16.01,3.35,10.52\nvit_srelpos_small_patch16_224,224,3792.64,269.986,1024,21.97,4.59,12.16\nlegacy_seresnext26_32x4d,224,3774.18,271.304,1024,16.79,2.49,9.39\nese_vovnet19b_dw,288,3768.37,271.725,1024,6.54,2.22,13.63\nconvnext_pico,288,3762.1,272.178,1024,9.05,2.27,10.08\ngernet_l,256,3741.79,273.656,1024,31.08,4.57,8.0\nmobilenetv4_hybrid_large_075,256,3731.28,274.426,1024,22.75,2.06,11.64\nresnet101,160,3660.13,279.761,1024,44.55,4.0,8.28\nedgenext_small_rw,256,3647.4,280.737,1024,7.83,1.58,9.51\nresnetblur18,288,3644.87,280.933,1024,11.69,3.87,5.6\ntf_efficientnetv2_b3,240,3640.22,281.291,1024,14.36,1.93,9.95\ncs3darknet_focus_l,256,3628.94,282.166,1024,21.15,4.66,8.03\nefficientnetv2_rw_t,224,3628.38,282.209,1024,13.65,1.93,9.94\nrepvit_m3,224,3609.67,283.663,1024,10.68,1.89,13.94\nmixnet_m,224,3606.44,283.926,1024,5.01,0.36,8.19\ncoatnet_pico_rw_224,224,3590.64,285.176,1024,10.85,2.05,14.62\nghostnetv2_160,224,3589.83,285.24,1024,12.39,0.42,7.23\ngc_efficientnetv2_rw_t,224,3589.63,285.255,1024,13.68,1.94,9.97\nconvnext_pico_ols,288,3587.72,285.408,1024,9.06,2.37,10.74\necaresnext50t_32x4d,224,3560.67,287.576,1024,15.41,2.7,10.09\necaresnext26t_32x4d,224,3560.44,287.594,1024,15.41,2.7,10.09\nseresnext26t_32x4d,224,3558.26,287.767,1024,16.81,2.7,10.09\neca_botnext26ts_256,256,3542.14,289.08,1024,10.59,2.46,11.6\nefficientnet_b3_pruned,300,3528.65,290.185,1024,9.86,1.04,11.86\nseresnext26d_32x4d,224,3528.55,290.194,1024,16.81,2.73,10.19\nnf_regnet_b1,288,3527.3,290.296,1024,10.22,1.02,9.2\ncoat_lite_tiny,224,3515.07,291.307,1024,5.72,1.6,11.65\nconvnextv2_pico,224,3514.61,291.344,1024,9.07,1.37,6.1\ncs3darknet_l,256,3497.24,292.792,1024,21.16,4.86,8.55\nrepvgg_a2,224,3492.05,293.227,1024,28.21,5.7,6.26\ntf_mixnet_m,224,3485.45,293.782,1024,5.01,0.36,8.19\nvit_relpos_small_patch16_rpn_224,224,3481.46,294.119,1024,21.97,4.59,13.05\nhgnet_tiny,224,3480.86,294.17,1024,14.74,4.54,6.36\neca_halonext26ts,256,3471.15,294.993,1024,10.76,2.44,11.46\nmobilevitv2_100,256,3470.14,221.307,768,4.9,1.84,16.08\necaresnet101d_pruned,224,3466.94,295.35,1024,24.88,3.48,7.69\necaresnet26t,256,3417.04,299.664,1024,16.01,3.35,10.53\nhgnetv2_b3,224,3369.26,303.913,1024,16.29,1.78,5.07\nresnetv2_50,224,3355.79,305.133,1024,25.55,4.11,11.11\nbotnet26t_256,256,3355.2,305.187,1024,12.49,3.32,11.98\nnf_regnet_b2,272,3353.24,305.366,1024,14.31,1.22,9.27\nbat_resnext26ts,256,3346.65,305.963,1024,10.73,2.53,12.51\ncoatnext_nano_rw_224,224,3342.84,306.315,1024,14.7,2.47,12.8\necaresnetlight,224,3334.95,307.041,1024,30.16,4.11,8.42\nresnet34,288,3329.62,307.532,1024,21.8,6.07,6.18\nrexnetr_200,224,3328.94,230.694,768,16.52,1.59,15.11\nskresnet34,224,3313.88,308.994,1024,22.28,3.67,5.13\nfastvit_t8,256,3313.63,309.016,1024,4.03,0.7,8.63\nhalonet26t,256,3312.1,309.159,1024,12.48,3.19,11.69\nvit_small_r26_s32_224,224,3304.01,309.916,1024,36.43,3.56,9.85\ncs3sedarknet_l,256,3303.09,310.003,1024,21.91,4.86,8.56\ncoatnet_nano_cc_224,224,3289.43,311.29,1024,13.76,2.24,15.02\ncoat_lite_mini,224,3284.89,311.72,1024,11.01,2.0,12.25\nlambda_resnet26t,256,3270.07,313.133,1024,10.96,3.02,11.87\nmobilenetv4_hybrid_medium,320,3262.62,313.848,1024,11.07,2.05,14.36\nconvnextv2_atto,288,3253.42,314.736,1024,3.71,0.91,6.3\nvit_small_resnet26d_224,224,3246.31,315.424,1024,63.61,5.07,11.12\nresnet32ts,256,3243.82,315.666,1024,17.96,4.63,11.58\nvit_tiny_patch16_384,384,3237.39,316.294,1024,5.79,4.7,25.39\nconvit_tiny,224,3231.06,316.913,1024,5.71,1.26,7.94\nresnet50,224,3219.06,318.095,1024,25.56,4.11,11.11\ncoatnet_nano_rw_224,224,3215.75,318.422,1024,15.14,2.41,15.41\nrexnet_200,224,3200.93,239.92,768,16.37,1.56,14.91\nresnet33ts,256,3195.24,320.467,1024,19.68,4.76,11.66\nresnetv2_50t,224,3185.32,321.463,1024,25.57,4.32,11.82\nmobileone_s2,224,3179.36,322.067,1024,7.88,1.34,11.55\nsam2_hiera_tiny,224,3178.9,322.114,1024,26.85,4.91,17.12\nresnetv2_50d,224,3167.44,323.278,1024,25.57,4.35,11.92\ncspresnet50,256,3155.9,324.462,1024,21.62,4.54,11.5\nseresnet34,288,3147.55,325.319,1024,21.96,6.07,6.18\nefficientvit_b2,224,3143.3,325.761,1024,24.33,1.6,14.62\nresnext26ts,288,3127.83,327.374,1024,10.3,3.07,13.31\nfbnetv3_g,240,3119.5,328.234,1024,16.62,1.28,14.87\nhrnet_w18_small_v2,224,3113.69,328.86,1024,15.6,2.62,9.65\nefficientnet_b1,288,3113.4,328.891,1024,7.79,0.97,15.46\nresnet26,288,3110.68,329.178,1024,16.0,3.9,12.15\ntresnet_m,224,3098.68,330.452,1024,31.39,5.75,7.31\nresnet101,176,3098.41,330.482,1024,44.55,4.92,10.08\nseresnet33ts,256,3096.38,330.698,1024,19.78,4.76,11.66\neca_resnet33ts,256,3095.76,330.765,1024,19.68,4.76,11.66\nconvnext_tiny,224,3090.39,331.339,1024,28.59,4.47,13.44\ndpn68b,224,3087.27,331.673,1024,12.61,2.35,10.47\ndpn68,224,3074.12,333.092,1024,12.61,2.35,10.47\ngcresnet33ts,256,3071.03,333.428,1024,19.88,4.76,11.68\nresnet50t,224,3051.2,335.595,1024,25.57,4.32,11.82\nresnet50c,224,3048.9,335.849,1024,25.58,4.35,11.92\nseresnext26ts,288,3040.9,336.731,1024,10.39,3.07,13.32\neca_resnext26ts,288,3040.18,336.812,1024,10.3,3.07,13.32\ntf_efficientnet_b2,260,3038.71,336.975,1024,9.11,1.02,13.83\nresnet34d,288,3032.3,337.687,1024,21.82,6.47,7.51\nregnetx_032,224,3027.88,338.176,1024,15.3,3.2,11.37\nresnet50d,224,3025.46,338.448,1024,25.58,4.35,11.92\ndla60,224,3019.09,339.163,1024,22.04,4.26,10.16\ngcresnext26ts,288,3012.63,339.891,1024,10.48,3.07,13.33\nefficientnet_em,240,3000.85,341.226,1024,6.9,3.04,14.34\nvit_medium_patch16_clip_224,224,2993.15,342.104,1024,38.59,8.0,15.93\nlevit_512,224,2990.53,342.404,1024,95.17,5.64,10.22\nresnest26d,224,2990.32,342.428,1024,17.07,3.64,9.97\nvit_base_patch32_plus_256,256,2981.28,343.466,1024,119.48,7.79,7.76\ncrossvit_small_240,240,2972.85,344.44,1024,26.86,5.63,18.17\nrepvit_m1_5,224,2972.23,344.512,1024,14.64,2.31,15.7\nmobileone_s0,224,2964.67,345.389,1024,5.29,1.09,15.48\ncspresnet50d,256,2953.53,346.693,1024,21.64,4.86,12.55\nefficientnet_b2,288,2950.49,347.05,1024,9.11,1.12,16.2\nhaloregnetz_b,224,2936.32,348.725,1024,11.68,1.97,11.94\nmobilevit_s,256,2931.67,261.956,768,5.58,2.03,19.94\ncspresnet50w,256,2926.16,349.933,1024,28.12,5.04,12.19\nlegacy_seresnet50,224,2915.2,351.251,1024,28.09,3.88,10.6\ntf_efficientnet_em,240,2908.61,352.048,1024,6.9,3.04,14.34\nvgg11,224,2887.76,354.59,1024,132.86,7.61,7.44\nresnetv2_50x1_bit,224,2880.89,355.434,1024,25.55,4.23,11.11\nvit_little_patch16_reg1_gap_256,256,2872.75,356.442,1024,22.52,6.27,18.06\nhiera_tiny_224,224,2872.44,356.48,1024,27.91,4.91,17.13\nresnetaa50,224,2865.28,357.372,1024,25.56,5.15,11.64\nregnetv_040,224,2864.51,357.468,1024,20.64,4.0,12.29\nefficientnet_cc_b1_8e,240,2860.03,358.028,1024,39.72,0.75,15.44\nselecsls84,224,2852.61,358.96,1024,50.95,5.9,7.57\nregnety_032,224,2852.43,358.981,1024,19.44,3.2,11.26\nvit_little_patch16_reg4_gap_256,256,2849.96,359.293,1024,22.52,6.35,18.33\nregnety_040,224,2844.62,359.967,1024,20.65,4.0,12.29\nvovnet39a,224,2844.48,359.985,1024,22.6,7.09,6.73\ncoatnet_rmlp_nano_rw_224,224,2836.87,360.951,1024,15.15,2.62,20.34\nseresnet50,224,2831.5,361.636,1024,28.09,4.11,11.13\nresnet26d,288,2828.9,361.967,1024,16.01,4.29,13.48\nwide_resnet50_2,176,2817.33,363.453,1024,68.88,7.29,8.97\nvit_relpos_base_patch32_plus_rpn_256,256,2816.85,363.51,1024,119.42,7.68,8.01\nmixnet_l,224,2808.18,364.638,1024,7.33,0.58,10.84\ncs3darknet_focus_l,288,2803.64,365.228,1024,21.15,5.9,10.16\nlevit_512d,224,2803.3,365.271,1024,92.5,5.85,11.3\nconvnextv2_femto,288,2792.8,366.646,1024,5.23,1.3,7.56\ndeit3_medium_patch16_224,224,2781.17,368.18,1024,38.85,8.0,15.93\ncrossvit_15_240,240,2780.78,368.231,1024,27.53,5.81,19.77\nres2net50_48w_2s,224,2776.63,368.782,1024,25.29,4.18,11.72\nresnet50_gn,224,2765.5,370.266,1024,25.56,4.14,11.11\nconvnext_tiny_hnf,224,2765.22,370.304,1024,28.59,4.47,13.44\ndensenet121,224,2764.41,370.412,1024,7.98,2.87,6.9\nlevit_conv_512,224,2753.82,371.836,1024,95.17,5.64,10.22\nresnetv2_50d_gn,224,2752.94,371.954,1024,25.57,4.38,11.92\nvisformer_small,224,2752.56,372.007,1024,40.22,4.88,11.43\nese_vovnet39b,224,2750.44,372.293,1024,24.57,7.09,6.74\nmobilevitv2_125,256,2748.02,279.464,768,7.48,2.86,20.1\nvit_relpos_medium_patch16_cls_224,224,2744.95,373.038,1024,38.76,8.03,18.24\neca_vovnet39b,224,2742.08,373.43,1024,22.6,7.09,6.74\ntiny_vit_21m_224,224,2740.23,373.681,1024,33.22,4.29,20.08\ntwins_svt_small,224,2734.38,374.479,1024,24.06,2.94,13.75\ngcvit_xxtiny,224,2725.34,375.722,1024,12.0,2.14,15.36\ntwins_pcpvt_small,224,2723.83,375.93,1024,24.11,3.83,18.08\nresnet50_clip_gap,224,2722.78,376.075,1024,23.53,5.39,12.44\nresnetaa50d,224,2721.94,376.192,1024,25.58,5.39,12.44\ntf_mixnet_l,224,2713.4,377.376,1024,7.33,0.58,10.84\ncrossvit_15_dagger_240,240,2708.78,378.02,1024,28.21,6.13,20.43\necaresnet50t,224,2707.73,378.166,1024,25.57,4.32,11.83\nseresnet50t,224,2705.39,378.493,1024,28.1,4.32,11.83\ncs3darknet_l,288,2703.4,378.772,1024,21.16,6.16,10.83\ntf_efficientnet_cc_b1_8e,240,2701.66,379.016,1024,39.72,0.75,15.44\ndavit_tiny,224,2698.07,284.638,768,28.36,4.54,18.89\nxcit_nano_12_p16_384,384,2693.55,380.156,1024,3.05,1.64,12.15\nresnetaa34d,288,2691.25,380.482,1024,21.82,7.33,8.38\necaresnet50d,224,2687.82,380.967,1024,25.58,4.35,11.93\necaresnet50d_pruned,288,2675.98,382.653,1024,19.94,4.19,10.61\nvit_base_resnet26d_224,224,2655.81,385.559,1024,101.4,6.97,13.16\nconvnext_nano,288,2652.03,386.11,1024,15.59,4.06,13.84\nresnetrs50,224,2651.0,386.252,1024,35.69,4.48,12.14\nnf_regnet_b3,288,2644.82,387.161,1024,18.59,1.67,11.84\nxcit_tiny_24_p16_224,224,2635.92,388.469,1024,12.12,2.34,11.82\ngcresnext50ts,256,2630.41,389.282,1024,15.67,3.75,15.46\nefficientvit_b2,256,2610.75,392.214,1024,24.33,2.09,19.03\nresnetblur50,224,2609.83,392.352,1024,25.56,5.16,12.02\nvgg11_bn,224,2600.05,393.829,1024,132.87,7.62,7.44\nresnet50s,224,2592.0,395.051,1024,25.68,5.47,13.52\nmobileone_s3,224,2581.04,396.729,1024,10.17,1.94,13.85\nresnext50_32x4d,224,2577.31,397.304,1024,25.03,4.26,14.4\nresnet152,160,2576.63,397.407,1024,60.19,5.9,11.51\nhgnetv2_b2,288,2574.93,397.67,1024,11.22,1.89,6.8\ninception_next_tiny,224,2572.75,398.008,1024,28.06,4.19,11.98\neca_nfnet_l0,224,2572.54,398.04,1024,24.14,4.35,10.47\npoolformerv2_s12,224,2568.37,398.686,1024,11.89,1.83,5.53\nedgenext_small,320,2567.9,398.756,1024,5.59,1.97,14.16\nnfnet_l0,224,2566.01,399.053,1024,35.07,4.36,10.47\ncs3sedarknet_l,288,2552.9,401.1,1024,21.91,6.16,10.83\ncspresnext50,256,2550.51,401.475,1024,20.57,4.05,15.86\nhgnetv2_b4,288,2544.67,402.4,1024,19.8,4.54,11.08\nvit_relpos_medium_patch16_224,224,2533.85,404.118,1024,38.75,7.97,17.02\nresnet50_clip,224,2532.71,404.3,1024,38.32,6.14,12.98\nlevit_conv_512d,224,2532.59,404.319,1024,92.5,5.85,11.3\nefficientnet_lite3,300,2522.45,202.967,512,8.2,1.65,21.85\nconvnextv2_nano,224,2515.35,407.09,1024,15.62,2.46,8.37\nres2net50_26w_4s,224,2515.04,407.14,1024,25.7,4.28,12.61\nvit_srelpos_medium_patch16_224,224,2513.47,407.394,1024,38.74,7.96,16.21\ngcresnet50t,256,2508.19,408.251,1024,25.9,5.42,14.67\ndla60x,224,2506.41,408.541,1024,17.35,3.54,13.8\ncoatnet_0_rw_224,224,2498.58,409.821,1024,27.44,4.43,18.73\nresnetblur50d,224,2484.69,412.114,1024,25.58,5.4,12.82\nresnest50d_1s4x24d,224,2468.01,414.899,1024,25.68,4.43,13.57\nregnetx_040,224,2467.33,415.012,1024,22.12,3.99,12.2\ndensenetblur121d,224,2464.18,415.544,1024,8.0,3.11,7.9\nmaxvit_pico_rw_256,256,2458.1,312.426,768,7.46,1.83,22.3\nresnext50d_32x4d,224,2457.94,416.598,1024,25.05,4.5,15.2\nres2net50_14w_8s,224,2455.12,417.077,1024,25.06,4.21,13.28\nmaxvit_rmlp_pico_rw_256,256,2451.7,313.241,768,7.52,1.85,24.86\nvit_base_r26_s32_224,224,2446.42,418.56,1024,101.38,6.81,12.36\nregnetz_c16,256,2443.63,419.039,1024,13.46,2.51,16.57\nseresnetaa50d,224,2442.34,419.253,1024,28.11,5.4,12.46\ndla60_res2net,224,2435.63,420.413,1024,20.85,4.15,12.34\nmobilenetv4_conv_large,320,2431.25,421.172,1024,32.59,4.47,18.97\nregnety_040_sgn,224,2430.55,421.294,1024,20.65,4.03,12.29\nresnet32ts,288,2428.67,421.62,1024,17.96,5.86,14.65\nregnetz_b16,288,2414.87,424.028,1024,9.72,2.39,16.43\nconvnext_nano_ols,288,2407.94,425.249,1024,15.65,4.38,15.5\nres2net50d,224,2403.99,425.948,1024,25.72,4.52,13.41\nres2next50,224,2396.84,427.213,1024,24.67,4.2,13.71\nresnet33ts,288,2391.23,428.221,1024,19.68,6.02,14.75\nresnet26t,320,2387.91,428.817,1024,16.01,5.24,16.44\nfocalnet_tiny_srf,224,2383.46,429.616,1024,28.43,4.42,16.32\nlambda_resnet26rpt_256,256,2377.83,322.974,768,10.99,3.16,11.87\nresmlp_24_224,224,2376.96,430.792,1024,30.02,5.96,10.91\nefficientnetv2_rw_t,288,2366.54,432.688,1024,13.65,3.19,16.42\nsehalonet33ts,256,2360.24,433.843,1024,13.69,3.55,14.7\nvovnet57a,224,2356.05,434.611,1024,36.64,8.95,7.52\ninception_v3,299,2349.22,435.874,1024,23.83,5.73,8.97\nedgenext_base,256,2342.04,437.215,1024,18.51,3.85,15.58\ngmixer_24_224,224,2339.36,437.716,1024,24.72,5.28,14.45\ntf_efficientnetv2_b3,300,2333.45,438.824,1024,14.36,3.04,15.74\ndla60_res2next,224,2330.49,439.381,1024,17.03,3.49,13.17\nhiera_small_224,224,2327.86,439.879,1024,35.01,6.42,20.75\nseresnext50_32x4d,224,2326.69,440.099,1024,27.56,4.26,14.42\nnf_ecaresnet50,224,2326.39,440.157,1024,25.56,4.21,11.13\nnf_seresnet50,224,2322.81,440.835,1024,28.09,4.21,11.13\nseresnet33ts,288,2321.27,441.126,1024,19.78,6.02,14.76\neca_resnet33ts,288,2320.92,441.194,1024,19.68,6.02,14.76\nskresnet50,224,2319.56,441.453,1024,25.8,4.11,12.5\nlegacy_seresnext50_32x4d,224,2319.29,441.503,1024,27.56,4.26,14.42\ngc_efficientnetv2_rw_t,288,2314.22,442.471,1024,13.68,3.2,16.45\nvit_relpos_medium_patch16_rpn_224,224,2310.7,443.143,1024,38.73,7.97,17.02\nhgnetv2_b5,224,2308.27,443.612,1024,39.57,6.56,11.19\nnfnet_f0,192,2304.96,444.248,1024,71.49,7.21,10.16\ngcresnet33ts,288,2301.68,444.881,1024,19.88,6.02,14.78\nresnet51q,256,2298.43,445.511,1024,35.7,6.38,16.55\ntf_efficientnet_lite3,300,2290.61,223.511,512,8.2,1.65,21.85\nfbnetv3_g,288,2283.27,448.453,1024,16.62,1.77,21.09\nese_vovnet57b,224,2280.74,448.967,1024,38.61,8.95,7.52\nvit_medium_patch16_gap_240,240,2270.51,450.989,1024,44.4,9.22,18.81\nhgnet_small,224,2266.61,451.766,1024,24.36,8.53,8.79\nfastvit_t12,256,2264.32,452.222,1024,7.55,1.42,12.42\npvt_v2_b2,224,2256.22,453.841,1024,25.36,4.05,27.53\nedgenext_small_rw,320,2251.75,454.745,1024,7.83,2.46,14.85\nrdnet_tiny,224,2248.51,455.402,1024,23.86,5.06,15.98\ndensenet169,224,2245.68,455.974,1024,14.15,3.4,7.3\ncs3darknet_focus_x,256,2244.78,456.159,1024,35.02,8.03,10.69\ncoatnet_rmlp_0_rw_224,224,2240.35,457.061,1024,27.45,4.72,24.89\ndarknetaa53,256,2230.29,459.123,1024,36.02,7.97,12.39\nfocalnet_tiny_lrf,224,2229.47,459.291,1024,28.65,4.49,17.76\nrepvgg_b1g4,224,2228.31,459.531,1024,39.97,8.15,10.64\nefficientvit_l1,224,2227.63,459.67,1024,52.65,5.27,15.85\nskresnet50d,224,2225.51,460.109,1024,25.82,4.36,13.31\nxcit_small_12_p16_224,224,2223.72,460.479,1024,26.25,4.82,12.58\nnf_resnet50,256,2213.33,462.642,1024,25.56,5.46,14.52\nnextvit_small,224,2207.92,463.775,1024,31.76,5.81,18.44\nmobilenetv4_hybrid_medium,384,2197.46,465.981,1024,11.07,3.01,21.18\npoolformer_s24,224,2197.0,466.076,1024,21.39,3.41,10.68\ncoatnet_bn_0_rw_224,224,2195.15,466.473,1024,27.44,4.67,22.04\nresnet152,176,2193.45,466.834,1024,60.19,7.22,13.99\nresnet50_mlp,256,2191.94,467.155,1024,26.65,7.05,16.25\necaresnet50t,256,2191.4,467.27,1024,25.57,5.64,15.45\nnf_regnet_b3,320,2189.28,467.722,1024,18.59,2.05,14.61\nefficientnet_b3,288,2184.31,234.387,512,12.23,1.63,21.49\nseresnext26t_32x4d,288,2172.28,471.381,1024,16.81,4.46,16.68\nfastvit_s12,256,2167.43,472.438,1024,9.47,1.82,13.67\nresnetrs101,192,2164.92,472.987,1024,63.62,6.04,12.7\ncs3darknet_x,256,2164.39,473.101,1024,35.05,8.38,11.35\nfastvit_sa12,256,2158.11,474.477,1024,11.58,1.96,14.03\neva02_small_patch14_224,224,2155.47,475.059,1024,21.62,6.14,18.28\ncs3sedarknet_xdw,256,2153.33,475.532,1024,21.6,5.97,17.18\nseresnext26d_32x4d,288,2151.74,475.88,1024,16.81,4.51,16.85\necaresnet26t,320,2151.68,475.897,1024,16.01,5.24,16.44\nrexnetr_300,224,2147.85,476.744,1024,34.81,3.39,22.16\neva02_tiny_patch14_336,336,2147.15,476.899,1024,5.76,4.68,27.16\nconvnextv2_pico,288,2134.41,479.746,1024,9.07,2.27,10.08\necaresnet101d_pruned,288,2128.71,481.03,1024,24.88,5.75,12.71\ngcvit_xtiny,224,2125.89,481.67,1024,19.98,2.93,20.26\ngmlp_s16_224,224,2125.3,481.803,1024,19.42,4.42,15.1\nlambda_resnet50ts,256,2124.77,481.923,1024,21.54,5.07,17.48\nmobilevitv2_150,256,2105.23,243.194,512,10.59,4.09,24.11\ncoatnet_0_224,224,2091.24,244.821,512,25.04,4.58,24.01\nxcit_nano_12_p8_224,224,2072.91,493.98,1024,3.05,2.16,15.71\ndarknet53,256,2072.88,493.984,1024,41.61,9.31,12.39\ncs3sedarknet_x,256,2064.75,495.935,1024,35.4,8.38,11.35\nhgnet_tiny,288,2057.8,497.609,1024,14.74,7.51,10.51\nhieradet_small,256,2057.47,373.263,768,34.72,8.51,27.76\nvit_medium_patch16_reg1_gap_256,256,2056.16,498.005,1024,38.88,10.63,22.26\nhgnetv2_b3,288,2054.95,498.299,1024,16.29,2.94,8.38\nrexnetr_200,288,2048.59,249.918,512,16.52,2.62,24.96\nvit_medium_patch16_reg4_gap_256,256,2044.7,500.797,1024,38.88,10.76,22.6\nresnet61q,256,2040.64,501.793,1024,36.85,7.8,17.01\nvit_base_resnet50d_224,224,2033.13,503.646,1024,110.97,8.73,16.92\nresnest50d,224,2025.99,505.42,1024,27.48,5.4,14.36\nregnetx_080,224,2024.58,505.775,1024,39.57,8.02,14.06\nrexnet_300,224,2023.97,505.926,1024,34.71,3.44,22.4\nmixnet_xl,224,2021.99,506.421,1024,11.9,0.93,14.57\nresnetv2_50,288,2021.6,506.519,1024,25.55,6.79,18.37\nvit_medium_patch16_gap_256,256,2021.46,506.555,1024,38.86,10.59,22.15\npvt_v2_b2_li,224,2015.57,508.034,1024,22.55,3.91,27.6\nresnetv2_101,224,2011.15,509.149,1024,44.54,7.83,16.23\necaresnetlight,288,2010.28,509.37,1024,30.16,6.79,13.91\nsebotnet33ts_256,256,2002.67,255.649,512,13.7,3.89,17.46\nswin_tiny_patch4_window7_224,224,1994.25,513.466,1024,28.29,4.51,17.06\ncspdarknet53,256,1989.31,514.74,1024,27.64,6.57,16.81\nmaxvit_nano_rw_256,256,1987.74,386.357,768,15.45,4.46,30.28\nmaxvit_rmlp_nano_rw_256,256,1983.0,387.281,768,15.5,4.47,31.92\nmaxxvit_rmlp_nano_rw_256,256,1975.69,388.707,768,16.78,4.37,26.05\ndm_nfnet_f0,192,1969.57,519.898,1024,71.49,7.21,10.16\ngcresnext50ts,288,1969.42,519.94,1024,15.67,4.75,19.57\nnest_tiny,224,1965.87,520.878,1024,17.06,5.83,25.48\ndla102,224,1956.25,523.44,1024,33.27,7.19,14.18\nresnet101,224,1956.19,523.457,1024,44.55,7.83,16.23\nefficientvit_b2,288,1950.05,525.102,1024,24.33,2.64,24.03\nnest_tiny_jx,224,1941.61,527.385,1024,17.06,5.83,25.48\nefficientformer_l3,224,1940.81,527.604,1024,31.41,3.93,12.01\nresnet50,288,1939.79,527.881,1024,25.56,6.8,18.37\nresnetv2_101d,224,1936.61,528.747,1024,44.56,8.07,17.04\ncrossvit_18_240,240,1935.28,529.112,1024,43.27,9.05,26.26\nlamhalobotnet50ts_256,256,1924.13,532.179,1024,22.57,5.02,18.44\nconvnext_tiny,288,1920.23,533.259,1024,28.59,7.39,22.21\nres2net50_26w_6s,224,1912.13,535.518,1024,37.05,6.33,15.28\nresnet101c,224,1894.68,540.45,1024,44.57,8.08,17.04\nmobileone_s4,224,1894.21,540.586,1024,14.95,3.04,17.74\ncrossvit_18_dagger_240,240,1889.12,542.039,1024,44.27,9.5,27.03\nresnet101d,224,1886.88,542.683,1024,44.57,8.08,17.04\ncoat_lite_small,224,1882.69,543.892,1024,19.84,3.96,22.09\ngcresnet50t,288,1876.91,545.567,1024,25.9,6.86,18.57\ntwins_pcpvt_base,224,1875.57,545.957,1024,43.83,6.68,25.25\nvgg13,224,1872.83,546.756,1024,133.05,11.31,12.25\nconvnext_small,224,1866.18,548.704,1024,50.22,8.71,21.56\ndpn68b,288,1861.61,550.049,1024,12.61,3.89,17.3\nregnetx_064,224,1854.58,552.136,1024,26.21,6.49,16.37\nmobilevitv2_175,256,1853.4,276.238,512,14.25,5.54,28.13\nhalonet50ts,256,1851.66,553.006,1024,22.73,5.3,19.2\nefficientnet_b3,320,1851.55,276.514,512,12.23,2.01,26.52\nresnet50t,288,1843.63,555.416,1024,25.57,7.14,19.53\nefficientnetv2_s,288,1830.25,559.474,1024,21.46,4.75,20.13\nresnet50d,288,1828.73,559.94,1024,25.58,7.19,19.7\nwide_resnet50_2,224,1825.62,560.893,1024,68.88,11.43,14.4\nswin_s3_tiny_224,224,1817.27,563.471,1024,28.33,4.64,19.13\ntf_efficientnet_b3,300,1797.7,284.798,512,12.23,1.87,23.83\nhrnet_w18_ssld,224,1790.44,571.914,1024,21.3,4.32,16.31\ntresnet_v2_l,224,1789.04,572.364,1024,46.17,8.85,16.34\nhrnet_w18,224,1781.62,574.732,1024,21.3,4.32,16.31\nrepvgg_b1,224,1778.73,575.68,1024,57.42,13.16,10.64\nmaxxvitv2_nano_rw_256,256,1768.07,434.362,768,23.7,6.26,23.05\ncs3edgenet_x,256,1767.31,579.4,1024,47.82,11.53,12.92\nresnetaa101d,224,1761.72,581.24,1024,44.57,9.12,17.56\nresnet101_clip_gap,224,1761.35,581.36,1024,42.52,9.11,17.56\nefficientvit_l2,224,1761.03,581.466,1024,63.71,6.97,19.58\nvit_large_patch32_224,224,1759.2,582.072,1024,305.51,15.39,13.3\nlegacy_seresnet101,224,1752.85,584.182,1024,49.33,7.61,15.74\nvit_base_patch32_clip_384,384,1750.87,584.842,1024,88.3,13.06,16.5\ndensenet201,224,1750.03,585.121,1024,20.01,4.34,7.85\nvit_base_patch32_384,384,1749.95,585.149,1024,88.3,13.06,16.5\nefficientnetv2_rw_s,288,1749.21,585.397,1024,23.94,4.91,21.41\npit_b_distilled_224,224,1743.25,587.399,1024,74.79,12.5,33.07\ndarknetaa53,288,1734.96,590.204,1024,36.02,10.08,15.68\nefficientnet_b3_gn,288,1734.87,295.112,512,11.73,1.74,23.35\nresnetv2_101x1_bit,224,1730.79,591.627,1024,44.54,8.04,16.23\nresnetaa50,288,1729.69,592.003,1024,25.56,8.52,19.24\nseresnet101,224,1724.67,593.727,1024,49.33,7.84,16.27\nregnety_032,288,1721.01,594.988,1024,19.44,5.29,18.61\nseresnet50,288,1715.52,596.892,1024,28.09,6.8,18.39\nregnetv_040,288,1714.23,597.341,1024,20.64,6.6,20.3\npit_b_224,224,1713.92,597.451,1024,73.76,12.42,32.94\nxcit_tiny_12_p16_384,384,1713.1,597.736,1024,6.72,3.64,18.26\nresnet101s,224,1707.16,599.813,1024,44.67,9.19,18.64\nregnety_040,288,1706.68,599.986,1024,20.65,6.61,20.3\nmaxvit_tiny_rw_224,224,1699.84,451.796,768,29.06,5.11,33.11\nregnetv_064,224,1694.43,604.321,1024,30.58,6.39,16.41\ncait_xxs24_224,224,1694.07,604.451,1024,11.96,2.53,20.29\nregnety_064,224,1688.52,606.436,1024,30.58,6.39,16.41\ndensenet121,288,1688.36,606.494,1024,7.98,4.74,11.41\nresnet50_gn,288,1685.84,607.404,1024,25.56,6.85,18.37\nresnet51q,288,1684.11,608.026,1024,35.7,8.07,20.94\nresnet101_clip,224,1683.83,608.126,1024,56.26,9.81,18.08\nnf_resnet101,224,1683.1,608.389,1024,44.55,8.01,16.23\nconvnext_tiny_hnf,288,1681.41,608.997,1024,28.59,7.39,22.21\nese_vovnet39b,288,1678.63,457.506,768,24.57,11.71,11.13\nrepvit_m2_3,224,1677.4,610.445,1024,23.69,4.57,26.21\nresnetv2_50d_gn,288,1676.39,610.823,1024,25.57,7.24,19.7\necaresnet101d,224,1674.43,611.54,1024,44.57,8.08,17.07\ncs3darknet_x,288,1672.69,612.175,1024,35.05,10.6,14.36\nvitamin_small_224,224,1669.96,613.177,1024,22.03,5.92,26.38\nconvnextv2_tiny,224,1666.61,614.408,1024,28.64,4.47,13.44\ncs3se_edgenet_x,256,1659.92,616.886,1024,50.72,11.53,12.94\nresnetblur101d,224,1659.81,616.926,1024,44.57,9.12,17.94\ndla102x,224,1657.53,617.775,1024,26.31,5.89,19.42\nregnetz_d32,256,1655.18,618.654,1024,27.58,5.98,23.74\nnf_resnet50,288,1650.24,620.505,1024,25.56,6.88,18.37\nefficientvit_b3,224,1649.16,620.91,1024,48.65,3.99,26.9\nmobilenetv4_conv_large,384,1643.43,623.074,1024,32.59,6.43,27.31\nresnetaa50d,288,1643.41,623.082,1024,25.58,8.92,20.57\nregnetz_d8,256,1642.29,623.507,1024,23.37,3.97,23.74\nhiera_small_abswin_256,256,1642.12,623.574,1024,34.36,8.29,26.38\necaresnet50t,288,1641.73,623.721,1024,25.57,7.14,19.55\nseresnet50t,288,1641.04,623.984,1024,28.1,7.14,19.55\nregnetz_b16_evos,224,1637.18,625.455,1024,9.74,1.43,9.95\nnextvit_base,224,1636.37,625.762,1024,44.82,8.29,23.71\ndavit_small,224,1634.36,469.897,768,49.75,8.8,30.49\nmixer_b16_224,224,1632.43,627.274,1024,59.88,12.62,14.53\necaresnet50d,288,1630.52,628.009,1024,25.58,7.19,19.72\nswinv2_cr_tiny_224,224,1629.93,628.235,1024,28.33,4.66,28.45\nmobilenetv4_hybrid_medium,448,1629.05,471.43,768,11.07,4.2,29.64\nregnety_080,224,1624.53,630.326,1024,39.18,8.0,17.97\nnf_regnet_b4,320,1623.35,630.784,1024,30.21,3.29,19.88\nregnetz_040,256,1621.31,631.576,1024,27.12,4.06,24.19\nvolo_d1_224,224,1620.03,632.078,1024,26.63,6.94,24.43\nese_vovnet39b_evos,224,1614.15,634.378,1024,24.58,7.07,6.74\ndarknet53,288,1612.47,635.036,1024,41.61,11.78,15.68\nregnetz_040_h,256,1612.19,635.152,1024,28.94,4.12,24.29\nresnetv2_50d_frn,224,1608.23,636.713,1024,25.59,4.33,11.92\ntf_efficientnetv2_s,300,1604.9,638.035,1024,21.46,5.35,22.73\nswinv2_cr_tiny_ns_224,224,1602.17,639.123,1024,28.33,4.66,28.45\nbotnet50ts_256,256,1595.84,320.823,512,22.74,5.54,22.23\nresmlp_36_224,224,1595.2,641.917,1024,44.69,8.91,16.33\ncs3sedarknet_x,288,1594.16,642.334,1024,35.4,10.6,14.37\npvt_v2_b3,224,1594.09,642.356,1024,45.24,6.92,37.7\nwide_resnet101_2,176,1589.86,644.071,1024,126.89,14.31,13.18\nhiera_base_224,224,1588.68,644.549,1024,51.52,9.4,30.42\nmvitv2_tiny,224,1580.5,647.884,1024,24.17,4.7,21.16\nsequencer2d_s,224,1577.41,649.155,1024,27.65,4.96,11.31\nresnetblur50,288,1577.39,649.165,1024,25.56,8.52,19.87\nresnet101d,256,1576.21,649.647,1024,44.57,10.55,22.25\nmobilevitv2_200,256,1575.87,324.89,512,18.45,7.22,32.15\nresnest50d_4s2x40d,224,1573.08,650.942,1024,30.42,4.4,17.94\nvit_base_patch16_224_miil,224,1571.45,651.616,1024,94.4,17.59,23.91\nvit_base_patch16_224,224,1570.66,651.945,1024,86.57,17.58,23.9\nresnext50_32x4d,288,1565.78,653.979,1024,25.03,7.04,23.81\ndeit_base_patch16_224,224,1564.98,654.312,1024,86.57,17.58,23.9\nvit_base_patch16_clip_224,224,1564.55,654.489,1024,86.57,17.58,23.9\ndeit_base_distilled_patch16_224,224,1562.81,655.219,1024,87.34,17.68,24.05\nresnext101_32x4d,224,1562.81,655.22,1024,44.18,8.01,21.23\nhalo2botnet50ts_256,256,1560.7,656.107,1024,22.64,5.02,21.78\nskresnext50_32x4d,224,1546.56,662.103,1024,27.48,4.5,17.18\ncaformer_s18,224,1545.75,662.45,1024,26.34,4.13,19.39\nvit_base_mci_224,224,1545.28,662.65,1024,86.35,17.73,24.65\neca_nfnet_l0,288,1542.25,663.956,1024,24.14,7.12,17.29\ntresnet_l,224,1541.09,664.452,1024,55.99,10.9,11.9\nnfnet_l0,288,1540.89,664.54,1024,35.07,7.13,17.29\nregnetz_c16,320,1536.02,666.649,1024,13.46,3.92,25.88\nvit_medium_patch16_rope_reg1_gap_256,256,1527.11,670.537,1024,38.74,10.63,22.26\nrdnet_small,224,1526.64,670.743,1024,50.44,8.74,22.55\nconvnextv2_nano,288,1526.13,503.224,768,15.62,4.06,13.84\nbeit_base_patch16_224,224,1524.53,671.669,1024,86.53,17.58,23.9\ncoatnet_rmlp_1_rw_224,224,1520.15,673.605,1024,41.69,7.85,35.47\nmixer_l32_224,224,1519.29,673.986,1024,206.94,11.27,19.86\nres2net50_26w_8s,224,1519.14,674.046,1024,48.4,8.37,17.95\nvit_small_resnet50d_s16_224,224,1515.04,675.876,1024,57.53,13.48,24.82\nregnety_080_tv,224,1514.62,676.066,1024,39.38,8.51,19.73\nres2net101_26w_4s,224,1512.48,677.014,1024,45.21,8.1,18.45\nbeitv2_base_patch16_224,224,1510.67,677.835,1024,86.53,17.58,23.9\nresnet61q,288,1508.28,678.909,1024,36.85,9.87,21.52\nresnetblur50d,288,1502.22,681.649,1024,25.58,8.92,21.19\ndensenetblur121d,288,1501.15,682.13,1024,8.0,5.14,13.06\nresnext101_32x8d,176,1496.15,684.414,1024,88.79,10.33,19.37\nedgenext_base,320,1496.01,684.475,1024,18.51,6.01,24.32\nresnext50d_32x4d,288,1492.98,685.867,1024,25.05,7.44,25.13\nrepvgg_b2g4,224,1487.15,688.554,1024,61.76,12.63,12.9\ndeit3_base_patch16_224,224,1485.67,689.239,1024,86.59,17.58,23.9\nfastvit_mci0,256,1480.55,691.623,1024,11.41,2.42,18.29\npoolformer_s36,224,1480.43,691.678,1024,30.86,5.0,15.82\nregnety_040_sgn,288,1480.21,691.781,1024,20.65,6.67,20.3\nseresnetaa50d,288,1478.73,692.475,1024,28.11,8.92,20.59\nres2net101d,224,1471.66,695.8,1024,45.23,8.35,19.25\nresnetv2_50d_evos,224,1466.16,698.411,1024,25.59,4.33,11.92\nvit_relpos_base_patch16_clsgap_224,224,1464.16,699.363,1024,86.43,17.6,25.12\nvit_relpos_base_patch16_cls_224,224,1462.1,700.351,1024,86.43,17.6,25.12\nvit_small_patch16_36x1_224,224,1460.83,700.959,1024,64.67,13.71,35.69\nvit_small_patch16_384,384,1459.09,701.796,1024,22.2,15.52,50.78\nefficientnet_b3_gn,320,1457.71,263.416,384,11.73,2.14,28.83\ninception_next_small,224,1454.59,703.968,1024,49.37,8.36,19.27\nefficientvit_l2,256,1451.75,705.344,1024,63.71,9.09,25.49\nconvformer_s18,224,1450.71,705.851,1024,26.77,3.96,15.82\nvit_base_patch16_siglip_gap_224,224,1449.9,706.247,1024,85.8,17.49,23.75\ndpn92,224,1447.04,707.638,1024,37.67,6.54,18.21\ngcvit_tiny,224,1443.56,709.345,1024,28.22,4.79,29.82\nconvit_small,224,1442.31,709.962,1024,27.78,5.76,17.87\nfocalnet_small_srf,224,1440.19,711.006,1024,49.89,8.62,26.26\nvit_betwixt_patch16_reg1_gap_256,256,1438.93,711.629,1024,60.4,16.32,27.83\nvit_base_patch16_siglip_224,224,1434.62,713.769,1024,92.88,17.73,24.06\nvit_betwixt_patch16_reg4_gap_256,256,1427.38,717.384,1024,60.4,16.52,28.24\nvit_base_patch16_gap_224,224,1426.06,718.052,1024,86.57,17.49,25.59\nmaxvit_tiny_tf_224,224,1425.39,538.79,768,30.92,5.6,35.78\nnf_ecaresnet101,224,1424.0,719.089,1024,44.55,8.01,16.27\ncoatnet_1_rw_224,224,1423.83,719.174,1024,41.72,8.04,34.6\nnf_seresnet101,224,1422.7,719.746,1024,49.33,8.02,16.27\ncoatnet_rmlp_1_rw2_224,224,1422.67,719.763,1024,41.72,8.11,40.13\nseresnext50_32x4d,288,1414.64,723.843,1024,27.56,7.04,23.82\nseresnext101_32x4d,224,1413.69,724.336,1024,48.96,8.02,21.26\nlegacy_xception,299,1413.65,543.263,768,22.86,8.4,35.83\nlegacy_seresnext101_32x4d,224,1412.48,724.957,1024,48.96,8.02,21.26\nhgnetv2_b5,288,1407.43,727.559,1024,39.57,10.84,18.5\nvit_small_patch16_18x2_224,224,1406.97,727.793,1024,64.67,13.71,35.69\nresnetv2_152,224,1397.01,732.984,1024,60.19,11.55,22.56\nefficientnet_b4,320,1389.63,368.432,512,19.34,3.13,34.76\nvit_base_patch16_clip_quickgelu_224,224,1387.46,738.029,1024,86.19,17.58,23.9\nnfnet_f0,256,1379.83,742.109,1024,71.49,12.62,18.05\nresnet152,224,1373.51,745.522,1024,60.19,11.56,22.56\nflexivit_base,240,1371.79,746.461,1024,86.59,20.29,28.36\necaresnet50t,320,1370.27,747.287,1024,25.57,8.82,24.13\nefficientvit_b3,256,1369.93,560.6,768,48.65,5.2,35.01\nvit_relpos_base_patch16_224,224,1369.33,747.799,1024,86.43,17.51,24.97\ncs3edgenet_x,288,1368.22,748.405,1024,47.82,14.59,16.36\nvgg16_bn,224,1364.4,750.503,1024,138.37,15.5,13.56\nresnetv2_152d,224,1363.32,751.094,1024,60.2,11.8,23.36\nmobilenetv4_conv_aa_large,384,1359.23,753.356,1024,32.59,7.07,32.29\nefficientformerv2_s0,224,1356.88,754.66,1024,3.6,0.41,5.3\nregnetx_120,224,1352.8,756.936,1024,46.11,12.13,21.37\nfocalnet_small_lrf,224,1350.27,758.334,1024,50.34,8.74,28.61\ntwins_pcpvt_large,224,1348.04,759.609,1024,60.99,9.84,35.82\ndeit3_small_patch16_384,384,1344.15,761.807,1024,22.21,15.52,50.78\nresnet152c,224,1340.85,763.681,1024,60.21,11.8,23.36\nrexnetr_300,288,1339.94,382.097,512,34.81,5.59,36.61\nmaxxvit_rmlp_tiny_rw_256,256,1339.27,573.433,768,29.64,6.66,39.76\nmaxvit_tiny_rw_256,256,1338.26,573.871,768,29.07,6.74,44.35\nresnet152d,224,1336.77,766.012,1024,60.21,11.8,23.36\nmaxvit_rmlp_tiny_rw_256,256,1336.57,574.593,768,29.15,6.77,46.92\nese_vovnet99b,224,1332.86,768.26,1024,63.2,16.51,11.27\npoolformerv2_s24,224,1332.11,768.696,1024,21.34,3.42,10.68\nxcit_tiny_12_p8_224,224,1314.83,778.795,1024,6.71,4.81,23.6\nxception41p,299,1314.22,389.574,512,26.91,9.25,39.86\nvit_base_patch32_clip_448,448,1306.81,783.576,1024,88.34,17.93,23.9\nconvnext_base,224,1301.11,787.008,1024,88.59,15.38,28.75\nefficientnet_el,300,1300.79,787.203,1024,10.59,8.0,30.7\nnextvit_large,224,1299.51,787.975,1024,57.87,10.78,28.99\nefficientnet_el_pruned,300,1297.09,789.448,1024,10.59,8.0,30.7\nvit_base_patch16_xp_224,224,1295.73,790.276,1024,86.51,17.56,23.9\ndla169,224,1285.68,796.451,1024,53.39,11.6,20.2\nregnety_120,224,1281.03,799.347,1024,51.82,12.14,21.38\nhrnet_w32,224,1280.36,799.766,1024,41.23,8.97,22.02\ncoatnet_1_224,224,1275.86,401.286,512,42.23,8.7,39.0\ntf_efficientnet_el,300,1270.05,806.254,1024,10.59,8.0,30.7\nhrnet_w30,224,1268.85,807.019,1024,37.71,8.15,21.21\nvgg19,224,1266.93,808.24,1024,143.67,19.63,14.86\nmixnet_xxl,224,1264.71,607.242,768,23.96,2.04,23.43\nmaxvit_tiny_pm_256,256,1264.25,607.461,768,30.09,6.61,47.9\nhiera_base_plus_224,224,1260.02,812.675,1024,69.9,12.67,37.98\nmobilenetv4_conv_large,448,1258.67,610.158,768,32.59,8.75,37.17\ntwins_svt_base,224,1256.07,815.231,1024,56.07,8.59,26.33\nvit_base_patch16_rpn_224,224,1255.52,815.59,1024,86.54,17.49,23.75\nnest_small,224,1253.67,816.789,1024,38.35,10.35,40.04\nhgnet_small,288,1251.61,613.599,768,24.36,14.09,14.53\nefficientformerv2_s1,224,1251.14,818.442,1024,6.19,0.67,7.66\ndensenet161,224,1249.24,819.689,1024,28.68,7.79,11.06\nresnet152s,224,1245.59,822.091,1024,60.32,12.92,24.96\nvit_mediumd_patch16_reg4_gap_256,256,1243.97,823.162,1024,64.11,17.87,37.57\nnest_small_jx,224,1243.46,823.5,1024,38.35,10.35,40.04\nsequencer2d_m,224,1232.89,830.559,1024,38.31,6.55,14.26\nvit_relpos_base_patch16_rpn_224,224,1232.22,831.004,1024,86.41,17.51,24.97\nrepvgg_b2,224,1228.43,833.576,1024,89.02,20.45,12.9\nswin_small_patch4_window7_224,224,1226.82,834.669,1024,49.61,8.77,27.47\nlegacy_seresnet152,224,1220.81,838.774,1024,66.82,11.33,22.08\nefficientnet_b3_g8_gn,288,1217.35,630.868,768,14.25,2.59,23.35\neca_nfnet_l1,256,1214.94,842.831,1024,41.41,9.62,22.04\nmobilenetv4_hybrid_large,384,1210.73,845.759,1024,37.76,7.77,34.52\nswinv2_tiny_window8_256,256,1208.62,847.237,1024,28.35,5.96,24.57\nseresnet152,224,1205.5,849.426,1024,66.82,11.57,22.61\ninception_v4,299,1197.2,855.32,1024,42.68,12.28,15.09\nrepvgg_b3g4,224,1194.99,856.9,1024,83.83,17.89,15.1\nfastvit_sa24,256,1192.29,858.839,1024,21.55,3.8,24.32\nresnetv2_101,288,1191.07,859.716,1024,44.54,12.94,26.83\nefficientnet_lite4,380,1188.14,323.185,384,13.01,4.04,45.66\nxcit_small_24_p16_224,224,1187.59,862.241,1024,47.67,9.1,23.64\nmvitv2_small_cls,224,1178.05,869.223,1024,34.87,7.04,28.17\ndm_nfnet_f0,256,1174.19,872.081,1024,71.49,12.62,18.05\ntnt_s_patch16_224,224,1171.01,874.446,1024,23.76,5.24,24.37\nregnetx_160,224,1167.74,876.901,1024,54.28,15.99,25.52\nmvitv2_small,224,1166.57,877.775,1024,34.87,7.0,28.08\nresnet101,288,1162.74,880.669,1024,44.55,12.95,26.83\nxception41,299,1160.72,441.096,512,26.97,9.28,39.86\ndavit_base,224,1158.29,663.034,768,87.95,15.51,40.66\nvgg19_bn,224,1153.27,887.9,1024,143.68,19.66,14.86\nconvnext_small,288,1152.13,888.774,1024,50.22,14.39,35.65\nvit_base_patch16_reg4_gap_256,256,1148.57,891.534,1024,86.62,23.5,33.89\ncoat_tiny,224,1146.04,893.499,1024,5.5,4.35,27.2\npvt_v2_b4,224,1144.8,894.466,1024,62.56,10.14,53.74\ncait_xxs36_224,224,1141.88,896.758,1024,17.3,3.77,30.34\nnf_regnet_b4,384,1139.48,898.646,1024,30.21,4.7,28.61\ntresnet_xl,224,1139.37,898.735,1024,78.44,15.2,15.34\ncrossvit_base_240,240,1129.38,906.68,1024,105.03,21.22,36.33\nvit_small_r26_s32_384,384,1128.5,907.387,1024,36.47,10.43,29.85\nvit_base_patch16_siglip_gap_256,256,1126.79,908.765,1024,85.84,23.13,33.23\ndla102x2,224,1125.59,909.733,1024,41.28,9.34,29.91\nresnet152d,256,1122.78,912.013,1024,60.21,15.41,30.51\nvit_base_patch16_siglip_256,256,1115.06,918.328,1024,92.93,23.44,33.63\nhiera_base_abswin_256,256,1108.56,923.708,1024,51.27,12.46,40.7\nwide_resnet50_2,288,1108.54,923.726,1024,68.88,18.89,23.81\neva02_base_patch16_clip_224,224,1105.74,926.062,1024,86.26,17.62,26.32\ntf_efficientnet_lite4,380,1103.18,348.074,384,13.01,4.04,45.66\nvit_large_r50_s32_224,224,1102.83,928.511,1024,328.99,19.58,24.41\nefficientnetv2_s,384,1096.79,933.624,1024,21.46,8.44,35.77\nvit_betwixt_patch16_rope_reg4_gap_256,256,1096.33,934.014,1024,60.23,16.52,28.24\nvgg13_bn,224,1093.1,936.772,1024,133.05,11.33,12.25\nefficientvit_l2,288,1092.38,937.387,1024,63.71,11.51,32.19\nhrnet_w18_ssld,288,1090.28,939.198,1024,21.3,7.14,26.96\nconvnext_tiny,384,1087.48,706.209,768,28.59,13.14,39.48\npvt_v2_b5,224,1085.62,943.223,1024,81.96,11.76,50.92\nsamvit_base_patch16_224,224,1070.47,956.58,1024,86.46,17.54,24.54\nregnety_160,224,1070.01,956.986,1024,83.59,15.96,23.04\ntf_efficientnetv2_s,384,1069.37,957.558,1024,21.46,8.44,35.77\ncs3se_edgenet_x,320,1057.79,968.048,1024,50.72,18.01,20.21\nresnetaa101d,288,1046.05,978.908,1024,44.57,15.07,29.03\nregnetz_d32,320,1039.68,984.905,1024,27.58,9.33,37.08\nefficientnetv2_rw_s,384,1037.06,987.392,1024,23.94,8.72,38.03\nmobilenetv4_conv_aa_large,448,1034.18,742.609,768,32.59,9.63,43.94\nregnetz_d8,320,1031.64,992.584,1024,23.37,6.19,37.08\nseresnet101,288,1031.63,992.588,1024,49.33,12.95,26.87\nvit_small_patch8_224,224,1031.53,992.686,1024,21.67,22.44,80.84\nefficientvit_b3,288,1028.85,746.455,768,48.65,6.58,44.2\nregnetv_064,288,1024.98,999.029,1024,30.58,10.55,27.11\nregnety_064,288,1024.29,999.702,1024,30.58,10.56,27.11\npoolformer_m36,224,1023.88,1000.107,1024,56.17,8.8,22.02\nvgg16,224,1021.7,1002.242,1024,138.36,15.47,13.56\nwide_resnet101_2,224,1020.62,1003.3,1024,126.89,22.8,21.23\nrdnet_base,224,1017.24,754.976,768,87.45,15.4,31.14\ndpn98,224,1014.12,1009.734,1024,61.57,11.73,25.2\nresnet200,224,1013.79,1010.054,1024,64.67,15.07,32.19\nconvnextv2_small,224,1013.29,1010.559,1024,50.32,8.71,21.56\nconvnextv2_tiny,288,1011.72,759.092,768,28.64,7.39,22.21\nregnetz_040,320,1006.41,508.726,512,27.12,6.35,37.78\nconvmixer_1024_20_ks9_p14,224,1004.68,1019.218,1024,24.38,5.55,5.51\nvit_base_patch16_plus_240,240,1004.42,1019.485,1024,117.56,27.41,33.08\nhgnetv2_b6,224,1002.06,1021.881,1024,75.26,16.88,21.23\nregnety_080,288,1001.83,1022.121,1024,39.18,13.22,29.69\necaresnet101d,288,1000.75,1023.221,1024,44.57,13.35,28.19\nswinv2_cr_small_224,224,1000.55,1023.43,1024,49.7,9.07,50.27\nregnetz_040_h,320,1000.36,511.803,512,28.94,6.43,37.94\nresnest101e,256,997.21,1026.848,1024,48.28,13.38,28.66\nconvnext_base,256,996.66,1027.418,1024,88.59,20.09,37.55\nvit_base_r50_s16_224,224,993.23,1030.967,1024,97.89,21.66,35.28\nresnetrs101,288,992.71,1031.506,1024,63.62,13.56,28.53\nresnetblur101d,288,992.25,1031.989,1024,44.57,15.07,29.65\nefficientnet_b3_g8_gn,320,990.46,775.388,768,14.25,3.2,28.83\nfocalnet_base_srf,224,989.98,1034.35,1024,88.15,15.28,35.01\nswinv2_cr_small_ns_224,224,989.16,1035.213,1024,49.7,9.08,50.27\nregnetz_b16_evos,288,988.59,776.854,768,9.74,2.36,16.43\nregnetz_c16_evos,256,986.94,778.15,768,13.49,2.48,16.57\nmaxvit_rmlp_small_rw_224,224,986.14,778.781,768,64.9,10.75,49.3\ninception_next_base,224,985.08,1039.5,1024,86.67,14.85,25.69\nseresnet152d,256,984.13,1040.502,1024,66.84,15.42,30.56\nresnetrs152,256,977.43,1047.637,1024,86.62,15.59,30.83\nresnet101d,320,975.08,1050.156,1024,44.57,16.48,34.77\ninception_resnet_v2,299,965.48,1060.586,1024,55.84,13.18,25.06\nmobilevitv2_150,384,965.44,265.153,256,10.59,9.2,54.25\nresnext101_64x4d,224,959.27,1067.461,1024,83.46,15.52,31.21\nresnext101_32x8d,224,954.55,1072.743,1024,88.79,16.48,31.21\nnfnet_f1,224,951.44,1076.249,1024,132.63,17.87,22.94\nxception65p,299,950.53,538.627,512,39.82,13.91,52.48\neva02_small_patch14_336,336,946.95,1081.358,1024,22.13,15.48,54.33\nresnext101_32x4d,288,940.09,1089.251,1024,44.18,13.24,35.09\nefficientnet_b4,384,937.43,409.62,384,19.34,4.51,50.04\ncoat_lite_medium,224,936.83,1093.037,1024,44.57,9.81,40.06\nfocalnet_base_lrf,224,931.65,1099.111,1024,88.75,15.43,38.13\nvit_mediumd_patch16_rope_reg1_gap_256,256,927.19,1104.398,1024,63.95,17.65,37.02\nrepvgg_b3,224,927.07,1104.549,1024,123.09,29.16,15.1\nvit_relpos_base_patch16_plus_240,240,923.53,1108.778,1024,117.38,27.3,34.33\nefficientformer_l7,224,921.36,1111.391,1024,82.23,10.17,24.45\nxcit_tiny_24_p16_384,384,918.06,1115.38,1024,12.12,6.87,34.29\ncoatnet_2_rw_224,224,914.51,559.852,512,73.87,15.09,49.22\nhrnet_w40,224,911.04,1123.981,1024,57.56,12.75,25.29\nefficientnetv2_m,320,906.73,1129.323,1024,54.14,11.01,39.97\ncait_s24_224,224,906.1,1130.105,1024,46.92,9.35,40.58\nmaxvit_small_tf_224,224,903.75,566.52,512,68.93,11.66,53.17\ncoat_mini,224,901.59,1135.767,1024,10.34,6.82,33.68\nswin_s3_small_224,224,900.95,852.422,768,49.74,9.43,37.84\nseresnext101_64x4d,224,899.36,1138.577,1024,88.23,15.53,31.25\npoolformerv2_s36,224,899.23,1138.739,1024,30.79,5.01,15.82\nvolo_d2_224,224,898.78,1139.307,1024,58.68,14.34,41.34\nseresnext101_32x8d,224,896.76,1141.857,1024,93.57,16.48,31.25\nmobilenetv4_conv_aa_large,480,895.59,857.521,768,32.59,11.05,50.45\ngmlp_b16_224,224,892.77,1146.973,1024,73.08,15.78,30.21\nmobilenetv4_hybrid_large,448,892.61,860.386,768,37.76,10.74,48.61\nnest_base,224,891.48,1148.639,1024,67.72,17.96,53.39\nregnetz_e8,256,885.16,1156.837,1024,57.7,9.91,40.94\nnest_base_jx,224,884.72,1157.412,1024,67.72,17.96,53.39\nresnetv2_50d_evos,288,884.63,1157.531,1024,25.59,7.15,19.7\nseresnext101d_32x8d,224,880.67,1162.73,1024,93.59,16.72,32.05\nswin_base_patch4_window7_224,224,877.35,1167.139,1024,87.77,15.47,36.63\ncoatnet_rmlp_2_rw_224,224,872.68,586.689,512,73.88,15.18,54.78\ntf_efficientnet_b4,380,871.25,440.737,384,19.34,4.49,49.49\nlevit_384_s8,224,867.41,590.255,512,39.12,9.98,35.86\nvit_base_patch16_rope_reg1_gap_256,256,867.28,1180.694,1024,86.43,23.22,33.39\ntiny_vit_21m_384,384,864.15,592.481,512,21.23,13.77,77.83\ngcvit_small,224,861.28,1188.917,1024,51.09,8.57,41.61\nconvnextv2_nano,384,858.36,596.473,512,15.62,7.22,24.61\ncrossvit_15_dagger_408,408,856.8,1195.136,1024,28.5,21.45,95.05\nseresnext101_32x4d,288,853.89,1199.198,1024,48.96,13.25,35.12\ncoatnet_2_224,224,853.24,600.055,512,74.68,16.5,52.67\nxception65,299,843.97,606.646,512,39.92,13.96,52.48\nmaxxvit_rmlp_small_rw_256,256,836.98,917.574,768,66.01,14.67,58.38\ntwins_svt_large,224,833.5,1228.541,1024,99.27,15.15,35.1\nresnet50x4_clip_gap,288,832.97,1229.323,1024,65.62,19.57,34.11\nlevit_conv_384_s8,224,832.4,615.075,512,39.12,9.98,35.86\nseresnextaa101d_32x8d,224,831.45,1231.569,1024,93.59,17.25,34.16\nmvitv2_base_cls,224,829.71,1234.159,1024,65.44,10.23,40.65\nhgnet_base,224,828.09,927.42,768,71.58,25.14,15.47\nhrnet_w44,224,826.04,1239.642,1024,67.06,14.94,26.92\nxcit_medium_24_p16_224,224,825.94,1239.785,1024,84.4,16.13,31.71\nresnet200d,256,824.85,1241.424,1024,64.69,20.0,43.09\neva02_base_patch14_224,224,824.57,1241.85,1024,85.76,23.22,36.55\nvit_medium_patch16_gap_384,384,824.05,1242.632,1024,39.03,26.08,67.54\nfastvit_sa36,256,823.61,1243.29,1024,31.53,5.64,34.61\ndm_nfnet_f1,224,823.18,1243.945,1024,132.63,17.87,22.94\ncaformer_s36,224,822.12,1245.551,1024,39.3,8.0,37.53\ntresnet_m,448,818.79,1250.62,1024,31.39,22.99,29.21\nmvitv2_base,224,817.05,1253.276,1024,51.47,10.16,40.5\nresnet152,288,811.97,1261.117,1024,60.19,19.11,37.28\nswinv2_base_window12_192,192,804.65,1272.585,1024,109.28,11.9,39.72\nmobilevitv2_175,384,803.11,318.749,256,14.25,12.47,63.29\nsequencer2d_l,224,800.9,1278.551,1024,54.3,9.74,22.12\nefficientnetv2_rw_m,320,796.23,1286.05,1024,53.24,12.72,47.14\nhrnet_w48_ssld,224,792.32,1292.387,1024,77.47,17.34,28.56\nfastvit_mci1,256,791.46,1293.799,1024,21.54,4.72,32.84\nhrnet_w48,224,790.96,1294.607,1024,77.47,17.34,28.56\nresnet50x4_clip,288,789.83,1296.468,1024,87.14,21.35,35.27\nconvnext_base,288,788.08,1299.342,1024,88.59,25.43,47.53\nswinv2_tiny_window16_256,256,783.75,653.259,512,28.35,6.68,39.02\nregnety_120,288,782.19,981.843,768,51.82,20.06,35.34\npoolformer_m48,224,772.39,1325.737,1024,73.47,11.59,29.17\nconvformer_s36,224,771.32,1327.576,1024,40.01,7.67,30.5\nmaxvit_rmlp_small_rw_256,256,770.25,997.069,768,64.9,14.15,66.09\nxcit_small_12_p16_384,384,769.74,1330.306,1024,26.25,14.14,36.51\nresnetv2_50x1_bit,448,763.06,670.968,512,25.55,16.62,44.46\ntnt_b_patch16_224,224,751.21,1363.12,1024,65.41,14.09,39.01\nnextvit_small,384,743.78,1376.73,1024,31.76,17.26,57.14\ndpn131,224,742.22,1379.624,1024,79.25,16.09,32.97\nswinv2_small_window8_256,256,741.58,1380.817,1024,49.73,11.58,40.14\neca_nfnet_l1,320,737.99,1387.537,1024,41.41,14.92,34.42\nconvit_base,224,737.69,1388.094,1024,86.54,17.52,31.77\nswinv2_cr_small_ns_256,256,735.25,1392.717,1024,49.7,12.07,76.21\nnf_regnet_b5,384,732.21,1398.493,1024,49.74,7.95,42.9\nconvnextv2_base,224,728.97,1053.529,768,88.72,15.38,28.75\nswin_s3_base_224,224,726.96,1408.586,1024,71.13,13.69,48.26\nvit_so150m_patch16_reg4_gap_256,256,726.07,1410.317,1024,134.13,36.75,53.21\nswinv2_cr_base_224,224,719.8,1422.605,1024,87.88,15.86,59.66\nseresnet152,288,718.93,1424.325,1024,66.82,19.11,37.34\nvit_so150m_patch16_reg4_map_256,256,718.36,1425.463,1024,141.48,37.18,53.68\nvitamin_base_224,224,715.12,715.949,512,87.72,22.68,52.77\necaresnet200d,256,715.01,1432.144,1024,64.69,20.0,43.15\nseresnet200d,256,714.8,1432.561,1024,71.86,20.01,43.15\nswinv2_cr_base_ns_224,224,714.1,1433.953,1024,87.88,15.86,59.66\nresnetrs200,256,710.99,1440.207,1024,93.21,20.18,43.42\nxcit_nano_12_p8_384,384,706.26,1449.87,1024,3.05,6.34,46.08\nconvnext_large,224,700.67,1461.443,1024,197.77,34.4,43.13\ndensenet264d,224,698.28,1466.454,1024,72.74,13.57,14.0\nmobilenetv4_conv_aa_large,544,697.25,550.726,384,32.59,14.19,64.79\nresnet152d,320,692.74,1478.183,1024,60.21,24.08,47.67\ncoat_small,224,688.09,1488.162,1024,21.69,12.61,44.25\nxcit_tiny_24_p8_224,224,688.06,1488.225,1024,12.11,9.21,45.39\nsenet154,224,671.82,1524.195,1024,115.09,20.77,38.69\nmaxxvitv2_rmlp_base_rw_224,224,671.73,1143.308,768,116.09,24.2,62.77\nlegacy_senet154,224,671.25,1525.511,1024,115.09,20.77,38.69\nefficientvit_l3,224,665.72,1538.173,1024,246.04,27.62,39.16\nmobilevitv2_200,384,664.44,385.277,256,18.45,16.24,72.34\ndpn107,224,662.3,1546.122,1024,86.92,18.38,33.46\nefficientformerv2_s2,224,659.15,1553.507,1024,12.71,1.27,11.77\nxception71,299,656.41,779.991,512,42.34,18.09,69.92\nregnety_160,288,653.9,1174.472,768,83.59,26.37,38.07\nconvnext_small,384,651.61,1178.605,768,50.22,25.58,63.37\nregnety_320,224,650.19,1574.91,1024,145.05,32.34,30.26\nvolo_d3_224,224,645.72,1585.818,1024,86.33,20.78,60.09\nfastvit_ma36,256,645.47,1586.419,1024,44.07,7.88,41.09\nregnetz_d8_evos,256,644.05,1589.928,1024,23.46,4.5,24.92\nconvnext_base,320,637.05,1205.552,768,88.59,31.39,58.68\npoolformerv2_m36,224,635.87,1610.368,1024,56.08,8.81,22.02\ndavit_large,224,635.02,1209.401,768,196.81,34.6,60.99\ngcvit_base,224,632.31,1619.451,1024,90.32,14.87,55.48\nvit_betwixt_patch16_reg4_gap_384,384,627.16,1632.736,1024,60.6,39.71,85.28\ntf_efficientnetv2_m,384,625.53,1636.99,1024,54.14,15.85,57.52\nregnetz_c16_evos,320,625.03,819.15,512,13.49,3.86,25.88\nhgnetv2_b6,288,616.55,1245.628,768,75.26,27.9,35.09\nhrnet_w64,224,612.17,1672.733,1024,128.06,28.97,35.09\nseresnet152d,320,607.38,1685.905,1024,66.84,24.09,47.72\nvit_large_patch32_384,384,604.74,1693.288,1024,306.63,45.31,43.86\nresnetrs152,320,603.9,1695.633,1024,86.62,24.34,48.14\nresnet200,288,603.31,1697.296,1024,64.67,24.91,53.21\nefficientvit_l2,384,599.94,1280.123,768,63.71,20.45,57.01\ncrossvit_18_dagger_408,408,597.37,1714.173,1024,44.61,32.47,124.87\ncaformer_m36,224,590.66,1733.627,1024,56.2,13.29,50.48\nregnetx_320,224,589.67,1736.551,1024,107.81,31.81,36.3\nxcit_small_12_p8_224,224,588.09,1741.223,1024,26.21,18.69,47.21\nresnext101_64x4d,288,587.4,1743.275,1024,83.46,25.66,51.59\nfastvit_mci2,256,583.9,1753.716,1024,35.82,7.91,43.34\nresnetv2_50x3_bit,224,582.22,1319.082,768,217.32,37.06,33.34\nlevit_conv_512_s8,224,573.27,669.829,384,74.05,21.82,52.28\nrdnet_large,224,571.45,895.96,512,186.27,34.74,46.67\nlevit_512_s8,224,570.5,448.717,256,74.05,21.82,52.28\nconvnextv2_tiny,384,570.14,673.511,384,28.64,13.14,39.48\nefficientnet_b5,416,567.51,451.086,256,30.39,8.27,80.68\nmaxvit_rmlp_base_rw_224,224,564.83,1359.676,768,116.14,23.15,92.64\nnextvit_base,384,564.24,1814.832,1024,44.82,24.64,73.95\nseresnet269d,256,557.61,1836.401,1024,113.67,26.59,53.6\nconvformer_m36,224,556.43,1840.292,1024,57.05,12.89,42.05\nseresnext101_32x8d,288,545.61,1876.8,1024,93.57,27.24,51.63\nvit_mediumd_patch16_reg4_gap_384,384,545.01,1878.868,1024,64.27,43.67,113.51\nresnetrs270,256,542.54,1887.403,1024,129.86,27.06,55.84\nefficientnetv2_m,416,542.39,1887.938,1024,54.14,18.6,67.5\nefficientvit_l3,256,537.65,1428.432,768,246.04,36.06,50.98\nnfnet_f2,256,537.3,1905.805,1024,193.78,33.76,41.85\nseresnext101d_32x8d,288,536.93,1907.142,1024,93.59,27.64,52.95\nconvnext_large_mlp,256,535.86,1433.184,768,200.13,44.94,56.33\nvolo_d1_384,384,534.71,1915.039,1024,26.78,22.75,108.55\nswinv2_base_window8_256,256,531.77,1925.628,1024,87.92,20.37,52.59\nhalonet_h1,256,529.72,483.267,256,8.1,3.0,51.17\nresnext101_32x16d,224,523.52,1955.962,1024,194.03,36.27,51.18\neca_nfnet_l2,320,520.54,1967.185,1024,56.72,20.95,47.43\necaresnet200d,288,520.38,1967.761,1024,64.69,25.31,54.59\nseresnet200d,288,520.31,1968.044,1024,71.86,25.32,54.6\nmixer_l16_224,224,519.58,1970.82,1024,208.2,44.6,41.69\ncaformer_s18,384,518.39,987.656,512,26.34,13.42,77.34\nregnetz_e8,320,511.76,2000.937,1024,57.7,15.46,63.94\nresnet200d,320,510.27,2006.761,1024,64.69,31.25,67.33\nvit_base_patch16_384,384,509.98,2007.91,1024,86.86,55.54,101.56\ndeit_base_patch16_384,384,508.8,2012.559,1024,86.86,55.54,101.56\nvit_base_patch16_clip_384,384,508.48,2013.827,1024,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,508.27,2014.657,1024,87.63,55.65,101.82\nmaxvit_base_tf_224,224,507.09,1009.673,512,119.47,24.04,95.01\nefficientnet_b5,448,506.95,504.97,256,30.39,9.59,93.56\nhgnet_base,288,502.62,1018.647,512,71.58,41.55,25.57\nswin_large_patch4_window7_224,224,500.32,1535.009,768,196.53,34.53,54.94\nseresnextaa101d_32x8d,288,496.57,2062.142,1024,93.59,28.51,56.44\nconvformer_s18,384,496.26,1031.706,512,26.77,11.63,46.49\nvit_base_patch16_18x2_224,224,493.7,2074.102,1024,256.73,52.51,71.38\ncoatnet_3_rw_224,224,492.8,519.468,256,181.81,33.44,73.83\ncoatnet_rmlp_3_rw_224,224,492.67,519.608,256,165.15,33.56,79.47\nswinv2_small_window16_256,256,489.28,1046.418,512,49.73,12.82,66.29\nvit_small_patch14_dinov2,518,488.22,1573.045,768,22.06,46.76,198.79\nvit_large_patch16_224,224,486.7,2103.941,1024,304.33,61.6,63.52\ndeit3_base_patch16_384,384,483.16,2119.352,1024,86.88,55.54,101.56\neva_large_patch14_196,196,482.51,2122.231,1024,304.14,61.57,63.52\nhrnet_w48_ssld,288,481.65,2125.993,1024,77.47,28.66,47.21\nswinv2_large_window12_192,192,480.44,1065.672,512,228.77,26.17,56.53\npoolformerv2_m48,224,479.37,2136.125,1024,73.35,11.59,29.17\nvit_small_patch14_reg4_dinov2,518,477.58,2144.131,1024,22.06,46.95,199.77\nnf_regnet_b5,456,475.93,1613.682,768,49.74,11.7,61.95\nxcit_large_24_p16_224,224,474.28,2159.06,1024,189.1,35.86,47.27\nnfnet_f1,320,473.95,2160.551,1024,132.63,35.97,46.77\nhiera_large_224,224,472.69,2166.325,1024,213.74,40.34,83.37\ncoatnet_3_224,224,472.05,542.3,256,166.97,36.56,79.01\nbeit_large_patch16_224,224,471.9,2169.953,1024,304.43,61.6,63.52\nbeitv2_large_patch16_224,224,471.02,2173.984,1024,304.43,61.6,63.52\nefficientnetv2_rw_m,416,467.89,1641.398,768,53.24,21.49,79.62\nbeit_base_patch16_384,384,466.33,2195.871,1024,86.74,55.54,101.56\ndeit3_large_patch16_224,224,466.0,2197.402,1024,304.37,61.6,63.52\nresnetv2_101x1_bit,448,465.55,1099.773,512,44.54,31.65,64.93\nvit_base_patch16_siglip_gap_384,384,464.63,2203.912,1024,86.09,55.43,101.3\ndm_nfnet_f2,256,462.44,2214.346,1024,193.78,33.76,41.85\nmaxvit_tiny_tf_384,384,461.19,555.072,256,30.98,17.53,123.42\nvit_base_patch16_siglip_384,384,459.33,2229.343,1024,93.18,56.12,102.2\nnextvit_large,384,453.31,2258.917,1024,57.87,32.03,90.76\nxcit_tiny_12_p8_384,384,445.81,2296.929,1024,6.71,14.13,69.14\nconvnext_base,384,444.68,1151.37,512,88.59,45.21,84.49\nresnetv2_152x2_bit,224,443.47,2309.045,1024,236.34,46.95,45.11\nconvnext_xlarge,224,442.0,1737.54,768,350.2,60.98,57.5\nconvnextv2_base,288,441.99,1158.384,512,88.72,25.43,47.53\nresnetrs200,320,441.67,2318.444,1024,93.21,31.51,67.81\nefficientformerv2_l,224,430.68,2377.646,1024,26.32,2.59,18.54\ntiny_vit_21m_512,512,429.7,893.628,384,21.27,27.02,177.93\nconvnextv2_large,224,427.59,1197.409,512,197.96,34.4,43.13\nswinv2_cr_tiny_384,384,426.11,600.773,256,28.33,15.34,161.01\ntf_efficientnet_b5,456,425.94,601.015,256,30.39,10.46,98.86\nflexivit_large,240,425.42,2407.038,1024,304.36,70.99,75.39\ncaformer_b36,224,424.34,1809.846,768,98.75,23.22,67.3\nconvnext_large,288,423.15,1209.95,512,197.77,56.87,71.29\nmaxxvitv2_rmlp_large_rw_224,224,421.82,1820.649,768,215.42,44.14,87.15\nswinv2_cr_large_224,224,421.57,1821.761,768,196.68,35.1,78.42\nseresnextaa101d_32x8d,320,419.27,1831.76,768,93.59,35.19,69.67\ntf_efficientnetv2_m,480,411.3,1867.24,768,54.14,24.76,89.84\nxcit_small_24_p16_384,384,410.23,2496.154,1024,47.67,26.72,68.58\ndavit_huge,224,408.33,1253.884,512,348.92,61.23,81.32\nregnetz_d8_evos,320,408.1,1881.86,768,23.46,7.03,38.92\ntresnet_l,448,406.59,2518.499,1024,55.99,43.59,47.56\nseresnet269d,288,404.46,2531.781,1024,113.67,33.65,67.81\ndm_nfnet_f1,320,403.9,2535.248,1024,132.63,35.97,46.77\nconvformer_b36,224,402.48,1908.173,768,99.88,22.69,56.06\nregnety_160,384,385.7,995.583,384,83.59,46.87,67.67\nvit_large_r50_s32_384,384,380.77,2689.262,1024,329.09,57.43,76.52\nvolo_d4_224,224,377.23,2714.52,1024,192.96,44.34,80.22\neca_nfnet_l2,384,365.9,2098.95,768,56.72,30.05,68.28\nregnety_640,224,364.69,2105.867,768,281.38,64.16,42.5\nvit_base_patch8_224,224,364.14,2812.13,1024,86.58,78.22,161.69\nvit_large_patch14_224,224,359.91,2845.139,1024,304.2,81.08,88.79\nvit_large_patch14_clip_224,224,359.73,2846.6,1024,304.2,81.08,88.79\nswinv2_base_window16_256,256,357.24,1074.897,384,87.92,22.02,84.71\nswinv2_base_window12to16_192to256,256,357.13,1075.233,384,87.92,22.02,84.71\nvit_large_patch16_siglip_gap_256,256,354.73,2886.676,1024,303.36,80.8,88.34\nvit_large_patch16_siglip_256,256,352.45,2905.34,1024,315.96,81.34,88.88\nmaxvit_large_tf_224,224,347.37,1105.422,384,211.79,43.68,127.35\nresnest200e,320,346.52,2955.04,1024,70.2,35.69,82.78\necaresnet269d,320,346.03,2959.242,1024,102.09,41.53,83.69\nefficientnetv2_l,384,345.28,2965.659,1024,118.52,36.1,101.16\nconvnext_large_mlp,320,342.48,1494.96,512,200.13,70.21,88.02\ntf_efficientnetv2_l,384,341.98,2994.345,1024,118.52,36.1,101.16\nefficientvit_l3,320,341.33,1500.013,512,246.04,56.32,79.34\nconvmixer_768_32,224,341.12,3001.86,1024,21.11,19.55,25.95\ninception_next_base,384,340.77,1502.454,512,86.67,43.64,75.48\neca_nfnet_l3,352,334.48,3061.425,1024,72.04,32.57,73.12\nresnetv2_101x3_bit,224,334.42,2296.52,768,387.93,71.23,48.7\nvit_base_r50_s16_384,384,334.22,2297.893,768,98.95,67.43,135.03\nvit_large_patch14_clip_quickgelu_224,224,325.86,3142.415,1024,303.97,81.08,88.79\nrepvgg_d2se,320,322.97,3170.518,1024,133.33,74.57,46.82\ncoat_lite_medium_384,384,313.99,1630.619,512,44.57,28.73,116.7\nresnetrs350,288,312.34,3278.479,1024,163.96,43.67,87.09\nvit_large_patch14_xp_224,224,310.38,3299.14,1024,304.06,81.01,88.79\nnasnetalarge,331,307.5,1248.755,384,88.75,23.89,90.56\nxcit_small_24_p8_224,224,306.97,3335.811,1024,47.63,35.81,90.78\ntresnet_xl,448,297.76,2579.254,768,78.44,60.77,61.31\npnasnet5large,331,297.12,1292.401,384,86.06,25.04,92.89\nvolo_d2_384,384,296.76,3450.594,1024,58.87,46.17,184.51\nmaxvit_small_tf_384,384,291.09,659.588,192,69.02,35.87,183.65\nvitamin_large2_224,224,290.22,1764.162,512,333.58,75.05,112.83\nvitamin_large_224,224,290.09,1764.937,512,333.32,75.05,112.83\necaresnet269d,352,288.69,3547.034,1024,102.09,50.25,101.25\ncoatnet_4_224,224,287.2,891.364,256,275.43,62.48,129.26\nxcit_medium_24_p16_384,384,284.24,3602.53,1024,84.4,47.39,91.64\ncoatnet_rmlp_2_rw_384,384,282.99,678.457,192,73.88,47.69,209.43\ncait_xxs24_384,384,280.88,3645.694,1024,12.03,9.63,122.66\nresnetrs270,352,280.56,3649.828,1024,129.86,51.13,105.48\ncaformer_s36,384,275.88,1855.885,512,39.3,26.08,150.33\nresnet50x16_clip_gap,384,269.91,1896.893,512,136.2,70.32,100.64\nnfnet_f2,352,268.71,3810.855,1024,193.78,63.22,79.06\nconvnext_xlarge,288,267.69,1912.637,512,350.2,100.8,95.05\nefficientnet_b6,528,265.01,482.998,128,43.04,19.4,167.39\nconvformer_s36,384,263.97,1939.565,512,40.01,22.54,89.62\neva02_large_patch14_224,224,260.65,3928.659,1024,303.27,81.15,97.2\nswinv2_cr_small_384,384,260.05,984.414,256,49.7,29.7,298.03\nmaxvit_tiny_tf_512,512,259.65,739.436,192,31.05,33.49,257.59\nconvnextv2_large,288,259.46,986.639,256,197.96,56.87,71.29\nresnet50x16_clip,384,258.3,1982.153,512,167.33,74.9,103.54\neva02_large_patch14_clip_224,224,256.99,3984.533,1024,304.11,81.18,97.2\nmvitv2_large_cls,224,256.7,2991.779,768,234.58,42.17,111.69\ntf_efficientnet_b6,528,254.23,503.468,128,43.04,19.4,167.39\nvit_so400m_patch14_siglip_gap_224,224,251.8,4066.704,1024,412.44,109.57,106.13\nresnext101_32x32d,224,251.59,2035.02,512,468.53,87.29,91.12\nnfnet_f3,320,250.99,4079.757,1024,254.92,68.77,83.93\nvit_so400m_patch14_siglip_224,224,250.86,4082.015,1024,427.68,110.26,106.73\nvit_base_patch16_siglip_gap_512,512,250.3,2045.523,512,86.43,107.0,246.15\nconvnextv2_base,384,249.33,1026.75,256,88.72,45.21,84.49\nmvitv2_large,224,249.02,2056.082,512,217.99,43.87,112.02\nvit_base_patch16_siglip_512,512,247.63,2067.618,512,93.52,108.22,247.74\nvolo_d5_224,224,246.42,4155.464,1024,295.46,72.4,118.11\nefficientnetv2_xl,384,242.5,4222.705,1024,208.12,52.81,139.2\nconvnext_large,384,238.93,1607.182,384,197.77,101.1,126.74\nconvnext_large_mlp,384,238.86,1607.59,384,200.13,101.11,126.74\ndm_nfnet_f2,352,235.33,3263.538,768,193.78,63.22,79.06\nxcit_tiny_24_p8_384,384,233.44,4386.633,1024,12.11,27.05,132.95\nswin_base_patch4_window12_384,384,232.72,1100.045,256,87.9,47.19,134.78\nefficientnetv2_l,480,231.29,2213.666,512,118.52,56.4,157.99\ntf_efficientnetv2_xl,384,229.94,4453.261,1024,208.12,52.81,139.2\ntf_efficientnetv2_l,480,229.17,2234.12,512,118.52,56.4,157.99\nresnetrs420,320,226.89,4513.089,1024,191.89,64.2,126.56\nvitamin_large_256,256,224.62,1709.558,384,333.38,99.0,154.99\nvitamin_large2_256,256,224.19,1712.805,384,333.64,99.0,154.99\nmaxxvitv2_rmlp_base_rw_384,384,219.77,1747.277,384,116.09,72.98,213.74\nswinv2_large_window12to16_192to256,256,218.3,1172.672,256,196.74,47.81,121.53\nregnety_320,384,216.75,1771.603,384,145.05,95.0,88.87\ndm_nfnet_f3,320,216.6,4727.524,1024,254.92,68.77,83.93\nresmlp_big_24_224,224,215.29,4756.307,1024,129.14,100.23,87.31\nefficientvit_l3,384,214.99,1786.092,384,246.04,81.08,114.02\nseresnextaa201d_32x8d,320,214.12,4782.305,1024,149.39,70.22,138.71\nxcit_medium_24_p8_224,224,211.66,4837.959,1024,84.32,63.53,121.23\nhiera_huge_224,224,211.18,2424.411,512,672.78,124.85,150.95\neca_nfnet_l3,448,206.31,2481.677,512,72.04,52.55,118.4\ncaformer_m36,384,198.38,1290.465,256,56.2,42.11,196.35\nxcit_small_12_p8_384,384,196.63,2603.882,512,26.21,54.92,138.29\ncait_xs24_384,384,196.17,3914.895,768,26.67,19.28,183.98\nrdnet_large,384,195.11,984.048,192,186.27,102.09,137.13\neva02_base_patch14_448,448,193.77,2642.327,512,87.12,107.11,259.14\nmaxvit_xlarge_tf_224,224,190.88,1341.125,256,506.99,97.52,191.04\nfocalnet_huge_fl3,224,190.8,2683.391,512,745.28,118.26,104.8\nconvformer_m36,384,189.9,1348.049,256,57.05,37.87,123.56\nresnetrs350,384,188.33,5437.373,1024,163.96,77.59,154.74\ncait_xxs36_384,384,188.29,5438.504,1024,17.37,14.35,183.7\nswinv2_cr_base_384,384,185.69,1378.646,256,87.88,50.57,333.68\nvit_huge_patch14_224,224,183.94,5567.143,1024,630.76,167.4,139.41\nvit_huge_patch14_clip_224,224,183.87,5569.209,1024,632.05,167.4,139.41\nvit_base_patch14_dinov2,518,181.94,2814.035,512,86.58,151.71,397.58\nregnety_1280,224,181.49,2821.098,512,644.81,127.66,71.58\nmaxvit_rmlp_base_rw_384,384,181.15,2119.803,384,116.14,70.97,318.95\nvit_base_patch14_reg4_dinov2,518,180.64,2834.287,512,86.58,152.25,399.53\nvitamin_xlarge_256,256,180.27,1420.118,256,436.06,130.13,177.37\nswinv2_cr_huge_224,224,179.14,2143.597,384,657.83,115.97,121.08\nvit_huge_patch14_gap_224,224,178.21,5745.966,1024,630.76,166.73,138.74\ndeit3_huge_patch14_224,224,176.76,5793.252,1024,632.13,167.4,139.41\nconvnextv2_huge,224,175.22,1461.021,256,660.29,115.0,79.07\nsam2_hiera_tiny,896,173.99,367.818,64,26.85,99.86,384.63\nvit_huge_patch14_clip_quickgelu_224,224,169.48,6042.139,1024,632.08,167.4,139.41\nmaxvit_base_tf_384,384,163.19,1176.531,192,119.65,73.8,332.9\nvit_huge_patch14_xp_224,224,162.96,6283.755,1024,631.8,167.3,139.41\nmaxvit_small_tf_512,512,162.91,589.27,96,69.13,67.26,383.77\nxcit_large_24_p16_384,384,162.76,6291.334,1024,189.1,105.35,137.17\nresnest269e,416,162.7,3146.963,512,110.93,77.69,171.98\nvit_large_patch16_384,384,159.33,4820.268,768,304.72,191.21,270.24\nresnetv2_152x2_bit,384,158.13,2428.302,384,236.34,136.16,132.56\neva_large_patch14_336,336,157.87,4864.597,768,304.53,191.1,270.24\nvit_large_patch14_clip_336,336,157.73,4868.972,768,304.53,191.11,270.24\nefficientnet_b7,600,155.34,618.001,96,66.35,38.33,289.94\nconvmixer_1536_20,224,153.83,6656.648,1024,51.63,48.68,33.03\ncoatnet_5_224,224,152.87,1255.992,192,687.47,145.49,194.24\nseresnextaa201d_32x8d,384,152.07,5050.135,768,149.39,101.11,199.72\nconvnext_xxlarge,256,152.04,2525.627,384,846.47,198.09,124.45\ndeit3_large_patch16_384,384,151.9,6741.438,1024,304.76,191.21,270.24\ncait_s24_384,384,150.93,3392.288,512,47.06,32.17,245.31\nconvnext_xlarge,384,150.77,1697.89,256,350.2,179.2,168.99\ndavit_giant,224,150.39,2553.341,384,1406.47,192.92,153.06\ntf_efficientnet_b7,600,150.32,638.606,96,66.35,38.33,289.94\nvolo_d3_448,448,150.3,3406.615,512,86.63,96.33,446.83\nnfnet_f3,416,148.76,3441.715,512,254.92,115.58,141.78\nvit_large_patch16_siglip_gap_384,384,148.02,5188.469,768,303.69,190.85,269.55\nvit_giant_patch16_gap_224,224,147.29,6952.477,1024,1011.37,202.46,139.26\nvit_large_patch16_siglip_384,384,147.15,5219.006,768,316.28,192.07,270.75\nsam2_hiera_small,896,147.12,435.006,64,33.95,123.99,442.63\nresnetv2_50x3_bit,448,147.08,1305.439,192,217.32,145.7,133.37\nbeit_large_patch16_384,384,146.92,6969.565,1024,305.0,191.21,270.24\nconvnextv2_large,384,146.46,1310.909,192,197.96,101.1,126.74\nresnetv2_152x4_bit,224,144.26,3549.137,512,936.53,186.9,90.22\nefficientnetv2_xl,512,144.24,3549.736,512,208.12,93.85,247.32\nvit_large_patch14_clip_quickgelu_336,336,143.45,5353.757,768,304.29,191.11,270.24\nnfnet_f4,384,143.08,3578.376,512,316.07,122.14,147.57\ntf_efficientnetv2_xl,512,143.04,3579.486,512,208.12,93.85,247.32\ncaformer_b36,384,142.66,1794.459,256,98.75,72.33,261.79\nconvformer_b36,384,137.39,1863.311,256,99.88,66.67,164.75\nswin_large_patch4_window12_384,384,136.15,940.102,128,196.74,104.08,202.16\nresnetrs420,416,133.29,7682.756,1024,191.89,108.45,213.79\ndm_nfnet_f3,416,128.51,3984.046,512,254.92,115.58,141.78\nregnety_640,384,127.6,2006.234,256,281.38,188.47,124.83\nvitamin_large2_336,336,125.13,1534.357,192,333.83,175.72,307.47\nvitamin_large_336,336,125.0,1536.041,192,333.57,175.72,307.47\ndm_nfnet_f4,384,124.43,4114.62,512,316.07,122.14,147.57\nfocalnet_huge_fl4,224,122.1,4193.181,512,686.46,118.9,113.34\nxcit_large_24_p8_224,224,120.47,4249.905,512,188.93,141.23,181.56\neva_giant_patch14_224,224,119.09,8598.315,1024,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,118.98,8606.661,1024,1012.59,267.18,192.64\nvit_giant_patch14_clip_224,224,116.63,8780.15,1024,1012.65,267.18,192.64\nvit_giant_patch14_224,224,116.59,8782.578,1024,1012.61,267.18,192.64\nresnetv2_152x2_bit,448,115.47,2217.012,256,236.34,184.99,180.43\nmaxvit_large_tf_384,384,114.31,1119.749,128,212.03,132.55,445.84\neva02_large_patch14_clip_336,336,113.18,6785.716,768,304.43,191.34,289.13\nswinv2_cr_large_384,384,111.86,1144.308,128,196.68,108.96,404.96\nmvitv2_huge_cls,224,111.11,3456.171,384,694.8,120.67,243.63\nconvnextv2_huge,288,106.25,1204.727,128,660.29,190.1,130.7\nxcit_small_24_p8_384,384,103.14,4964.351,512,47.63,105.24,265.91\nnfnet_f5,416,101.36,5051.238,512,377.21,170.71,204.56\nvitamin_xlarge_336,336,101.3,1895.3,192,436.06,230.18,347.33\nefficientnet_b8,672,101.16,948.953,96,87.41,63.48,442.89\ncait_s36_384,384,100.96,5071.452,512,68.37,47.99,367.4\ntf_efficientnet_b8,672,98.41,975.542,96,87.41,63.48,442.89\ndavit_base_fl,768,97.7,1310.076,128,90.37,190.32,530.15\nswinv2_base_window12to24_192to384,384,96.33,664.374,64,87.92,55.25,280.36\nfocalnet_large_fl3,384,94.31,4071.8,384,239.13,105.06,168.04\nresnet50x64_clip_gap,448,93.27,2744.574,256,365.03,253.96,233.22\nmaxvit_base_tf_512,512,91.79,1045.908,96,119.88,138.02,703.99\nfocalnet_large_fl4,384,91.43,4199.805,384,239.32,105.2,181.78\nresnet50x64_clip,448,90.11,2840.862,256,420.38,265.02,239.13\nvitamin_large2_384,384,88.05,2180.459,192,333.97,234.44,440.16\nvitamin_large_384,384,87.97,2182.658,192,333.71,234.44,440.16\ndm_nfnet_f5,416,87.96,5820.621,512,377.21,170.71,204.56\nvolo_d4_448,448,86.81,5898.056,512,193.41,197.13,527.35\nresnetv2_101x3_bit,448,85.75,2239.106,192,387.93,280.33,194.78\nnfnet_f4,512,81.27,4725.064,384,316.07,216.26,262.26\nvit_so400m_patch14_siglip_gap_384,384,81.27,6299.962,512,412.99,333.46,451.19\nvit_so400m_patch14_siglip_384,384,80.82,6335.238,512,428.23,335.4,452.89\nvit_huge_patch14_clip_336,336,79.74,6420.825,512,632.46,390.97,407.54\nsam2_hiera_base_plus,896,77.26,828.314,64,68.68,227.48,828.88\nnfnet_f6,448,75.59,6773.363,512,438.36,229.7,273.62\nbeit_large_patch16_512,512,75.53,6779.013,512,305.67,362.24,656.39\nvitamin_xlarge_384,384,75.14,1703.48,128,436.06,306.38,493.46\nxcit_medium_24_p8_384,384,71.62,5361.499,384,84.32,186.67,354.73\ndm_nfnet_f4,512,70.24,5466.713,384,316.07,216.26,262.26\nvit_gigantic_patch14_224,224,66.99,7643.451,512,1844.44,483.95,275.37\nvit_gigantic_patch14_clip_224,224,66.83,7661.682,512,1844.91,483.96,275.37\ndm_nfnet_f6,448,66.08,5811.539,384,438.36,229.7,273.62\nfocalnet_xlarge_fl3,384,65.69,3897.182,256,408.79,185.61,223.99\nregnety_1280,384,64.85,2960.449,192,644.81,374.99,210.2\nmaxvit_large_tf_512,512,64.1,998.492,64,212.33,244.75,942.15\nmaxvit_xlarge_tf_384,384,63.88,1502.728,96,475.32,292.78,668.76\nfocalnet_xlarge_fl4,384,63.84,4009.787,256,409.03,185.79,242.31\nvit_huge_patch14_clip_378,378,62.23,8227.954,512,632.68,503.79,572.79\neva02_large_patch14_448,448,61.3,8351.964,512,305.08,362.33,689.95\nconvnextv2_huge,384,61.13,1570.42,96,660.29,337.96,232.35\nswinv2_large_window12to24_192to384,384,61.02,786.614,48,196.74,116.15,407.83\ntf_efficientnet_l2,475,60.44,1588.24,96,480.31,172.11,609.89\nnfnet_f5,544,58.88,6521.51,384,377.21,290.97,349.71\nvit_large_patch14_dinov2,518,58.84,6526.064,384,304.37,507.15,1058.82\nvit_large_patch14_reg4_dinov2,518,58.65,6547.461,384,304.37,508.9,1064.02\nnfnet_f7,480,57.75,6649.626,384,499.5,300.08,355.86\nvolo_d5_448,448,57.67,4439.066,256,295.91,315.06,737.92\nvit_so400m_patch14_siglip_gap_448,448,57.55,6672.399,384,413.33,487.18,764.26\nvit_huge_patch14_clip_quickgelu_378,378,57.44,8913.629,512,632.68,503.79,572.79\nvit_huge_patch16_gap_448,448,53.54,7172.285,384,631.67,544.7,636.83\neva_giant_patch14_336,336,51.78,9887.539,512,1013.01,620.64,550.67\nswinv2_cr_giant_224,224,51.7,3713.689,192,2598.76,483.85,309.15\ndm_nfnet_f5,544,51.67,4954.931,256,377.21,290.97,349.71\nswinv2_cr_huge_384,384,48.44,1321.195,64,657.94,352.04,583.18\nnfnet_f6,576,45.98,5567.566,256,438.36,378.69,452.2\nvolo_d5_512,512,44.13,5800.796,256,296.09,425.09,1105.37\nxcit_large_24_p8_384,384,40.73,6285.389,256,188.93,415.0,531.82\ndm_nfnet_f6,576,39.87,6420.075,256,438.36,378.69,452.2\nnfnet_f7,608,36.14,7083.228,256,499.5,480.39,570.85\nmaxvit_xlarge_tf_512,512,35.64,1346.799,48,475.77,534.14,1413.22\nregnety_2560,384,34.99,2743.264,96,1282.6,747.83,296.49\nconvnextv2_huge,512,34.27,1400.46,48,660.29,600.81,413.07\ndavit_huge_fl,768,34.25,1868.539,64,360.64,744.84,1060.3\ncait_m36_384,384,33.17,7717.041,256,271.22,173.11,734.81\nresnetv2_152x4_bit,480,32.37,3954.731,128,936.53,844.84,414.26\nsam2_hiera_large,1024,23.78,2018.069,48,212.15,907.48,2190.34\nefficientnet_l2,800,22.94,1394.635,32,480.31,479.12,1707.39\nsamvit_base_patch16,1024,22.72,528.058,12,89.67,486.43,1343.27\ntf_efficientnet_l2,800,22.52,1420.719,32,480.31,479.12,1707.39\nvit_giant_patch14_dinov2,518,17.71,7227.529,128,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,17.61,7266.646,128,1136.48,1790.08,2771.21\neva_giant_patch14_560,560,16.99,7533.399,128,1014.45,1906.76,2577.17\nswinv2_cr_giant_384,384,14.93,2142.906,32,2598.76,1450.71,1394.86\ncait_m48_448,448,14.06,9102.618,128,356.46,329.41,1708.23\nsamvit_large_patch16,1024,10.4,769.501,8,308.28,1493.86,2553.78\nvit_so400m_patch14_siglip_gap_896,896,10.27,9344.729,96,416.87,2731.49,8492.88\nsamvit_huge_patch16,1024,6.35,944.29,6,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt240-cu124-rtx4090-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_efficientnet,160,188911.31,1.347,256,0.36,0.06,0.55\ntest_byobnet,160,178532.03,1.426,256,0.46,0.03,0.43\ntest_vit,160,155871.09,1.635,256,0.37,0.04,0.48\nlcnet_035,224,97850.77,2.608,256,1.64,0.03,1.04\ntf_mobilenetv3_small_minimal_100,224,93614.74,2.726,256,2.04,0.06,1.41\nlcnet_050,224,86660.68,2.946,256,1.88,0.05,1.26\nmobilenetv3_small_050,224,83744.91,3.049,256,1.59,0.03,0.92\ntinynet_e,106,83059.88,3.074,256,2.04,0.03,0.69\nmobilenetv3_small_075,224,82620.81,3.091,256,2.04,0.05,1.3\nmobilenetv3_small_100,224,79752.52,3.202,256,2.54,0.06,1.42\nmobilenetv4_conv_small,224,74963.65,3.407,256,3.77,0.19,1.97\ntf_mobilenetv3_small_075,224,68793.66,3.712,256,2.04,0.05,1.3\ntf_mobilenetv3_small_100,224,63377.08,4.031,256,2.54,0.06,1.42\ntinynet_d,152,62921.33,4.061,256,2.34,0.05,1.42\nlcnet_075,224,59549.33,4.29,256,2.36,0.1,1.99\nmnasnet_small,224,58424.52,4.373,256,2.03,0.07,2.16\nlevit_conv_128s,224,58296.58,4.383,256,7.78,0.31,1.88\nmobilenetv4_conv_small,256,54925.26,4.652,256,3.77,0.25,2.57\nlevit_128s,224,53157.92,4.807,256,7.78,0.31,1.88\nresnet10t,176,52732.02,4.846,256,5.44,0.7,1.51\nghostnet_050,224,52648.58,4.854,256,2.59,0.05,1.77\nregnetx_002,224,50885.47,5.023,256,2.68,0.2,2.16\nrepghostnet_050,224,49844.85,5.127,256,2.31,0.05,2.02\nresnet18,160,49720.41,5.14,256,11.69,0.93,1.27\nmobilenetv2_035,224,47284.89,5.404,256,1.68,0.07,2.86\nregnety_002,224,47175.78,5.418,256,3.16,0.2,2.17\nlcnet_100,224,46545.58,5.491,256,2.95,0.16,2.52\nmnasnet_050,224,45318.19,5.64,256,2.22,0.11,3.07\nrepghostnet_058,224,44344.16,5.764,256,2.55,0.07,2.59\nlevit_conv_128,224,43034.18,5.94,256,9.21,0.41,2.71\nvit_tiny_r_s16_p8_224,224,40905.75,6.25,256,6.34,0.44,2.06\nefficientvit_b0,224,40487.21,6.315,256,3.41,0.1,2.87\nregnetx_004,224,40146.51,6.368,256,5.16,0.4,3.14\nlevit_128,224,39948.51,6.4,256,9.21,0.41,2.71\nregnetx_004_tv,224,38547.28,6.633,256,5.5,0.42,3.17\nrepghostnet_080,224,37919.74,6.742,256,3.28,0.1,3.22\nlevit_conv_192,224,37779.54,6.768,256,10.95,0.66,3.2\nsemnasnet_050,224,37506.25,6.817,256,2.08,0.11,3.44\nhgnetv2_b0,224,37356.26,6.845,256,6.0,0.33,2.12\nmobilenetv2_050,224,37215.01,6.87,256,1.97,0.1,3.64\ngernet_s,224,36311.59,7.041,256,8.17,0.75,2.65\nefficientvit_m2,224,36122.02,7.079,256,4.19,0.2,1.47\npit_ti_224,224,36065.64,7.089,256,4.85,0.7,6.19\npit_ti_distilled_224,224,35852.04,7.132,256,5.1,0.71,6.23\nefficientvit_m1,224,34221.22,7.472,256,2.98,0.17,1.33\nresnet10t,224,33377.1,7.661,256,5.44,1.1,2.43\nvit_small_patch32_224,224,32872.0,7.779,256,22.88,1.15,2.5\nefficientvit_m3,224,32739.6,7.81,256,6.9,0.27,1.62\nmixer_s32_224,224,32505.37,7.867,256,19.1,1.0,2.28\nlevit_192,224,31250.48,8.183,256,10.95,0.66,3.2\nedgenext_xx_small,256,31029.41,8.242,256,1.33,0.26,3.33\nxcit_nano_12_p16_224,224,30599.92,8.357,256,3.05,0.56,4.17\ntinynet_c,184,30555.73,8.369,256,2.46,0.11,2.87\nnf_regnet_b0,192,30346.81,8.427,256,8.76,0.37,3.15\nefficientvit_m0,224,30313.3,8.437,256,2.35,0.08,0.91\nlcnet_150,224,30064.96,8.506,256,4.5,0.34,3.79\nresnet34,160,29702.78,8.608,256,21.8,1.87,1.91\nrepghostnet_100,224,29445.74,8.685,256,4.07,0.15,3.98\nefficientvit_m4,224,29026.74,8.81,256,8.8,0.3,1.7\ncs3darknet_focus_s,256,28372.65,9.014,256,3.27,0.69,2.7\nregnety_004,224,27981.57,9.139,256,4.34,0.41,3.89\ntf_mobilenetv3_large_minimal_100,224,27961.28,9.147,256,3.92,0.22,4.4\nmobilenetv3_large_075,224,27774.4,9.208,256,3.99,0.16,4.0\nresnet14t,176,27760.24,9.213,256,10.08,1.07,3.61\nmnasnet_075,224,27169.47,9.414,256,3.17,0.23,4.77\nconvnext_atto,224,27156.87,9.418,256,3.7,0.55,3.81\nhgnetv2_b1,224,27032.22,9.462,256,6.34,0.49,2.73\ncs3darknet_s,256,26830.41,9.533,256,3.28,0.72,2.97\nregnety_006,224,26488.55,9.656,256,6.06,0.61,4.33\nefficientvit_m5,224,26226.6,9.753,256,12.47,0.53,2.41\nresnet18,224,25948.79,9.857,256,11.69,1.82,2.48\ntf_efficientnetv2_b0,192,25767.89,9.926,256,7.14,0.54,3.51\ntf_mobilenetv3_large_075,224,25675.44,9.962,256,3.99,0.16,4.0\nghostnet_100,224,25628.25,9.98,256,5.18,0.15,3.55\nlevit_conv_256,224,25595.46,9.993,256,18.89,1.13,4.23\nconvnextv2_atto,224,25550.99,10.011,256,3.71,0.55,3.81\nconvnext_atto_ols,224,25437.32,10.056,256,3.7,0.58,4.11\nrepghostnet_111,224,25146.64,10.171,256,4.54,0.18,4.38\nmobilenetv3_rw,224,24721.37,10.347,256,5.48,0.23,4.41\nmobilenetv3_large_100,224,24474.85,10.451,256,5.48,0.23,4.41\ndeit_tiny_patch16_224,224,24312.15,10.521,256,5.72,1.26,5.97\nvit_tiny_patch16_224,224,24266.11,10.54,256,5.72,1.26,5.97\nrepvgg_a0,224,24061.2,10.63,256,9.11,1.52,3.59\nseresnet18,224,24041.64,10.639,256,11.78,1.82,2.49\nlegacy_seresnet18,224,24022.17,10.648,256,11.78,1.82,2.49\nhardcorenas_b,224,23830.5,10.734,256,5.18,0.26,5.09\ndeit_tiny_distilled_patch16_224,224,23769.88,10.761,256,5.91,1.27,6.01\nmnasnet_100,224,23629.36,10.825,256,4.38,0.33,5.46\nedgenext_xx_small,288,23592.08,10.842,256,1.33,0.33,4.21\nregnetx_008,224,23475.66,10.896,256,7.26,0.81,5.15\nhardcorenas_c,224,23258.07,10.998,256,5.52,0.28,5.01\nmobilenetv1_100,224,22998.4,11.123,256,4.23,0.58,5.04\nmobilenet_edgetpu_v2_xs,224,22908.62,11.166,256,4.46,0.7,4.8\nsemnasnet_075,224,22722.94,11.257,256,2.91,0.23,5.54\nlevit_256,224,22644.71,11.296,256,18.89,1.13,4.23\nmobilenetv1_100h,224,22625.21,11.306,256,5.28,0.63,5.09\ntf_mobilenetv3_large_100,224,22476.69,11.381,256,5.48,0.23,4.41\nconvnext_femto,224,22282.51,11.48,256,5.22,0.79,4.57\nresnet18d,224,22111.11,11.569,256,11.71,2.06,3.29\nlevit_conv_256d,224,22077.96,11.586,256,26.21,1.4,4.93\nspnasnet_100,224,22077.72,11.586,256,4.42,0.35,6.03\nmobilenetv4_hybrid_medium_075,224,22065.84,11.593,256,7.31,0.66,5.65\ndla46_c,224,21982.6,11.636,256,1.3,0.58,4.5\nvit_medium_patch32_clip_224,224,21846.57,11.709,256,39.69,2.0,3.34\nmobilenetv2_075,224,21759.16,11.757,256,2.64,0.22,5.86\nhardcorenas_a,224,21729.5,11.773,256,5.26,0.23,4.38\nmobilenetv4_conv_medium,224,21722.12,11.776,256,9.72,0.84,5.8\nhgnetv2_b0,288,21721.77,11.777,256,6.0,0.54,3.51\nregnety_008,224,21671.84,11.803,256,6.26,0.81,5.25\nhardcorenas_d,224,21580.66,11.853,256,7.5,0.3,4.93\nconvnextv2_femto,224,21531.01,11.881,256,5.23,0.79,4.57\nconvnext_femto_ols,224,21072.2,12.14,256,5.23,0.82,4.87\nrepghostnet_130,224,21064.38,12.144,256,5.48,0.25,5.24\nmobilenet_edgetpu_100,224,21052.52,12.151,256,4.09,1.0,5.75\npit_xs_224,224,20835.05,12.278,256,10.62,1.4,7.71\nefficientformerv2_s0,224,20726.97,12.342,256,3.6,0.41,5.3\nese_vovnet19b_slim_dw,224,20707.54,12.354,256,1.9,0.4,5.28\npit_xs_distilled_224,224,20528.33,12.461,256,11.0,1.41,7.76\nregnety_008_tv,224,20329.15,12.583,256,6.43,0.84,5.42\nfbnetc_100,224,20140.52,12.702,256,5.57,0.4,6.51\nvit_xsmall_patch16_clip_224,224,19971.81,12.809,256,8.28,1.79,6.65\ntf_efficientnetv2_b1,192,19947.06,12.825,256,8.14,0.76,4.59\nlevit_256d,224,19733.29,12.964,256,26.21,1.4,4.93\nsemnasnet_100,224,19697.19,12.988,256,3.89,0.32,6.23\nese_vovnet19b_slim,224,19687.27,12.994,256,3.17,1.69,3.52\nghostnet_130,224,19375.99,13.203,256,7.36,0.24,4.6\nregnetx_006,224,19266.74,13.277,256,6.2,0.61,3.98\nmobilenetv2_100,224,18995.91,13.468,256,3.5,0.31,6.68\ntinynet_b,188,18989.86,13.472,256,3.73,0.21,4.44\nhrnet_w18_small,224,18828.83,13.587,256,13.19,1.61,5.72\nrepghostnet_150,224,18415.28,13.892,256,6.58,0.32,6.0\nefficientnet_lite0,224,17985.67,14.225,256,4.65,0.4,6.74\nskresnet18,224,17812.46,14.363,256,11.96,1.82,3.24\nresnetblur18,224,17704.62,14.451,256,11.69,2.34,3.39\ntf_efficientnet_lite0,224,17685.01,14.466,256,4.65,0.4,6.74\ngmlp_ti16_224,224,17642.28,14.501,256,5.87,1.34,7.55\nmobilevit_xxs,256,17607.31,14.53,256,1.27,0.42,8.34\nedgenext_x_small,256,17443.49,14.667,256,2.34,0.54,5.93\nresnet14t,224,17428.63,14.68,256,10.08,1.69,5.8\nhardcorenas_e,224,17401.15,14.702,256,8.07,0.35,5.65\ntf_efficientnetv2_b0,224,17323.92,14.768,256,7.14,0.73,4.77\nmobilenetv4_hybrid_medium,224,17322.11,14.769,256,11.07,0.98,6.84\nrepvit_m1,224,17266.08,14.817,256,5.49,0.83,7.45\npvt_v2_b0,224,17238.77,14.841,256,3.67,0.57,7.99\nxcit_tiny_12_p16_224,224,17236.25,14.843,256,6.72,1.24,6.29\nmobilenetv1_125,224,17195.51,14.879,256,6.27,0.89,6.3\nhardcorenas_f,224,17171.48,14.9,256,8.2,0.35,5.57\nhgnetv2_b2,224,17062.45,14.995,256,11.22,1.15,4.12\nrepvgg_a1,224,17061.07,14.996,256,14.09,2.64,4.74\nnf_regnet_b0,256,16940.55,15.102,256,8.76,0.64,5.58\nmobilenetv1_100,256,16680.51,15.338,256,4.23,0.76,6.59\nmobilenetv1_100h,256,16453.67,15.55,256,5.28,0.82,6.65\nresnet50,160,16423.72,15.578,256,25.56,2.1,5.67\nrepvit_m0_9,224,16304.29,15.692,256,5.49,0.83,7.45\nhgnetv2_b1,288,16122.96,15.869,256,6.34,0.82,4.51\nmobilenetv4_conv_medium,256,16108.18,15.883,256,9.72,1.1,7.58\ncrossvit_tiny_240,240,16046.42,15.945,256,7.01,1.57,9.08\nconvnext_pico,224,16034.63,15.957,256,9.05,1.37,6.1\nconvnext_atto,288,15915.6,16.076,256,3.7,0.91,6.3\ngernet_m,224,15764.43,16.23,256,21.14,3.02,5.24\nvit_betwixt_patch32_clip_224,224,15727.09,16.268,256,61.41,3.09,4.17\nefficientvit_b1,224,15699.71,16.296,256,9.1,0.53,7.25\nresnet18,288,15598.12,16.402,256,11.69,3.01,4.11\ncrossvit_9_240,240,15301.84,16.72,256,8.55,1.85,9.52\nresnet50d,160,15291.83,16.732,256,25.58,2.22,6.08\nresnet34,224,15291.59,16.732,256,21.8,3.67,3.74\nconvnext_pico_ols,224,15194.28,16.84,256,9.06,1.43,6.5\ntinynet_a,192,15162.99,16.873,256,6.19,0.35,5.41\nmobilenet_edgetpu_v2_s,224,15100.58,16.944,256,5.99,1.21,6.6\nmnasnet_140,224,15010.98,17.045,256,7.12,0.6,7.71\nconvnextv2_pico,224,14927.0,17.141,256,9.07,1.37,6.1\nlevit_conv_384,224,14906.88,17.164,256,39.13,2.36,6.26\nconvnext_atto_ols,288,14901.48,17.17,256,3.7,0.96,6.8\nfbnetv3_b,224,14619.25,17.502,256,8.6,0.42,6.97\nconvnextv2_atto,288,14583.28,17.546,256,3.71,0.91,6.3\nregnetz_005,224,14395.17,17.774,256,7.12,0.52,5.86\nmobilenetv4_conv_blur_medium,224,14300.06,17.893,256,9.72,1.22,8.58\nseresnet18,288,14286.39,17.91,256,11.78,3.01,4.11\nefficientformerv2_s1,224,14252.25,17.953,256,6.19,0.67,7.66\nseresnet34,224,14239.31,17.969,256,21.96,3.67,3.74\nmobilevitv2_050,256,14216.39,17.998,256,1.37,0.48,8.04\nlegacy_seresnet34,224,14186.47,18.036,256,21.96,3.67,3.74\nmobilenetv2_110d,224,14184.27,18.039,256,4.52,0.45,8.71\ncrossvit_9_dagger_240,240,14117.97,18.123,256,8.78,1.99,9.97\nefficientformer_l1,224,14110.06,18.133,256,12.29,1.3,5.53\nrexnetr_100,224,14090.03,18.159,256,4.88,0.43,7.72\ncs3darknet_focus_m,256,14044.33,18.219,256,9.3,1.98,4.89\nresnet34d,224,13891.67,18.42,256,21.82,3.91,4.54\neva02_tiny_patch14_224,224,13809.61,18.529,256,5.5,1.7,9.14\ntf_efficientnetv2_b2,208,13703.38,18.673,256,10.1,1.06,6.0\nresnext50_32x4d,160,13655.32,18.739,256,25.03,2.17,7.35\nvit_tiny_r_s16_p8_384,384,13521.23,18.924,256,6.36,1.34,6.49\ndla34,224,13514.0,18.934,256,15.74,3.07,5.02\ncs3darknet_m,256,13508.36,18.942,256,9.31,2.08,5.28\nrepghostnet_200,224,13466.37,19.001,256,9.8,0.54,7.96\nselecsls42,224,13463.14,19.006,256,30.35,2.94,4.62\nrexnet_100,224,13432.99,19.048,256,4.8,0.41,7.44\nghostnetv2_100,224,13426.18,19.057,256,6.16,0.18,4.55\nselecsls42b,224,13399.31,19.096,256,32.46,2.98,4.62\nresnet18d,288,13378.17,19.127,256,11.71,3.41,5.43\nseresnet50,160,13373.47,19.133,256,28.09,2.1,5.69\nrepvgg_b0,224,13287.11,19.257,256,15.82,3.41,6.15\nhgnetv2_b3,224,13275.88,19.274,256,16.29,1.78,5.07\nresnet26,224,13255.66,19.304,256,16.0,2.36,7.35\nconvnext_femto,288,13199.45,19.386,256,5.22,1.3,7.56\nedgenext_x_small,288,13195.3,19.392,256,2.34,0.68,7.5\nresnet50,176,13189.38,19.401,256,25.56,2.62,6.92\nnf_regnet_b2,240,13112.1,19.515,256,14.31,0.97,7.23\nrepvit_m1_0,224,13097.62,19.537,256,7.3,1.13,8.69\nlevit_384,224,13010.17,19.668,256,39.13,2.36,6.26\nmobilenetv4_hybrid_medium,256,12899.45,19.837,256,11.07,1.29,9.01\nmobilenetv1_125,256,12862.06,19.894,256,6.27,1.16,8.23\necaresnet50t,160,12731.21,20.1,256,25.57,2.21,6.04\nsemnasnet_140,224,12618.94,20.277,256,6.11,0.6,8.87\nnf_regnet_b1,256,12525.77,20.429,256,10.22,0.82,7.27\nrepvit_m2,224,12475.42,20.511,256,8.8,1.36,9.43\nresnetrs50,160,12428.45,20.588,256,35.69,2.29,6.2\nconvnext_femto_ols,288,12403.21,20.631,256,5.23,1.35,8.06\ngmixer_12_224,224,12369.96,20.686,256,12.7,2.67,7.26\nmobilenetv2_140,224,12263.51,20.865,256,6.11,0.6,9.57\npit_s_distilled_224,224,12210.42,20.957,256,24.04,2.9,11.64\nresnetaa34d,224,12190.8,20.991,256,21.82,4.43,5.07\nconvnextv2_femto,288,12161.51,21.041,256,5.23,1.3,7.56\nfbnetv3_d,224,12090.69,21.164,256,10.31,0.52,8.5\nnf_resnet26,224,12044.22,21.246,256,16.0,2.41,7.35\nvisformer_tiny,224,11983.41,21.354,256,10.32,1.27,5.72\npoolformerv2_s12,224,11968.43,21.381,256,11.89,1.83,5.53\npit_s_224,224,11936.79,21.437,256,23.46,2.88,11.56\nefficientnet_es_pruned,224,11927.45,21.454,256,5.44,1.81,8.73\nvit_base_patch32_224,224,11925.14,21.458,256,88.22,4.41,5.01\nefficientnet_es,224,11898.4,21.507,256,5.44,1.81,8.73\nvit_base_patch32_clip_224,224,11887.61,21.526,256,88.22,4.41,5.01\nvit_base_patch32_clip_quickgelu_224,224,11884.79,21.532,256,87.85,4.41,5.01\nefficientnet_lite1,240,11883.98,21.533,256,5.42,0.62,10.14\ntiny_vit_5m_224,224,11858.98,21.578,256,12.08,1.28,11.25\nselecsls60,224,11842.43,21.608,256,30.67,3.59,5.52\ntf_efficientnet_es,224,11819.88,21.649,256,5.44,1.81,8.73\nese_vovnet19b_dw,224,11813.91,21.66,256,6.54,1.34,8.25\nrepvit_m1_1,224,11786.2,21.711,256,8.8,1.36,9.43\nselecsls60b,224,11784.48,21.714,256,32.77,3.63,5.52\nresnet26d,224,11759.24,21.761,256,16.01,2.6,8.15\ntf_efficientnet_lite1,240,11696.2,21.877,256,5.42,0.62,10.14\nefficientnet_b0,224,11670.69,21.926,256,5.29,0.4,6.75\nefficientvit_b1,256,11622.24,22.018,256,9.1,0.69,9.46\nmobilenet_edgetpu_v2_m,224,11435.95,22.377,256,8.46,1.85,8.15\nhgnetv2_b4,224,11396.46,22.454,256,19.8,2.75,6.7\nresmlp_12_224,224,11392.33,22.463,256,15.35,3.01,5.5\ndarknet17,256,11376.06,22.495,256,14.3,3.26,7.18\nnf_ecaresnet26,224,11300.75,22.644,256,16.0,2.41,7.36\nnf_seresnet26,224,11261.33,22.724,256,17.4,2.41,7.36\nconvnext_nano,224,11259.0,22.727,256,15.59,2.46,8.37\nvit_small_patch32_384,384,11156.92,22.936,256,22.92,3.45,8.25\nefficientnet_b1_pruned,240,11085.12,23.084,256,6.33,0.4,6.21\nresnext50_32x4d,176,11026.93,23.206,256,25.03,2.71,8.97\ntf_efficientnetv2_b1,240,11002.33,23.259,256,8.14,1.21,7.34\ndla46x_c,224,10979.44,23.307,256,1.07,0.54,5.66\nmobilenetv4_conv_aa_medium,256,10961.85,23.344,256,9.72,1.58,10.3\ncs3darknet_focus_m,288,10880.75,23.519,256,9.3,2.51,6.19\nmixer_s16_224,224,10874.1,23.533,256,18.53,3.79,5.97\nedgenext_small,256,10831.26,23.626,256,5.59,1.26,9.07\ndla60x_c,224,10780.61,23.737,256,1.32,0.59,6.01\nmobilenetv4_conv_blur_medium,256,10753.26,23.797,256,9.72,1.59,11.2\nmixer_b32_224,224,10749.19,23.807,256,60.29,3.24,6.29\nresnetblur18,288,10709.95,23.894,256,11.69,3.87,5.6\nrexnetr_130,224,10680.75,23.959,256,7.61,0.68,9.81\npoolformer_s12,224,10614.31,24.108,256,11.92,1.82,5.53\ncs3darknet_m,288,10519.8,24.326,256,9.31,2.63,6.69\nfbnetv3_b,256,10482.22,24.413,256,8.6,0.55,9.1\nresnet101,160,10466.64,24.45,256,44.55,4.0,8.28\nskresnet34,224,10457.92,24.469,256,22.28,3.67,5.13\nconvnextv2_nano,224,10414.74,24.572,256,15.62,2.46,8.37\ndarknet21,256,10326.92,24.78,256,20.86,3.93,7.47\nghostnetv2_130,224,10285.16,24.881,256,8.96,0.28,5.9\ngernet_l,256,10282.94,24.887,256,31.08,4.57,8.0\nhgnetv2_b2,288,10247.93,24.971,256,11.22,1.89,6.8\nmobilenetv2_120d,224,10245.9,24.977,256,5.83,0.69,11.97\nconvnext_nano_ols,224,10137.55,25.244,256,15.65,2.65,9.38\ndpn48b,224,10002.5,25.584,256,9.13,1.69,8.92\nnf_regnet_b2,272,9981.6,25.638,256,14.31,1.22,9.27\necaresnet50d_pruned,224,9963.17,25.685,256,19.94,2.53,6.43\nmobilenetv4_conv_medium,320,9962.23,25.687,256,9.72,1.71,11.84\nmobileone_s1,224,9945.03,25.732,256,4.83,0.86,9.67\nmobilenet_edgetpu_v2_l,224,9929.94,25.772,256,10.92,2.55,9.05\nefficientnet_b0_gn,224,9920.37,25.796,256,5.29,0.42,6.75\ntiny_vit_11m_224,224,9911.24,25.819,256,20.35,2.04,13.49\nconvnext_pico,288,9741.71,26.27,256,9.05,2.27,10.08\nresnext26ts,256,9713.54,26.346,256,10.3,2.43,10.52\nnf_regnet_b1,288,9675.15,26.45,256,10.22,1.02,9.2\nvit_small_patch16_224,224,9665.97,26.475,256,22.05,4.61,11.95\nrepvgg_a2,224,9665.68,26.476,256,28.21,5.7,6.26\ndeit_small_patch16_224,224,9661.41,26.487,256,22.05,4.61,11.95\ndeit3_small_patch16_224,224,9645.46,26.532,256,22.06,4.61,11.95\ntf_efficientnet_b0,224,9637.3,26.554,256,5.29,0.4,6.75\ntf_mixnet_s,224,9581.92,26.707,256,4.13,0.25,6.25\nrexnet_130,224,9568.98,26.744,256,7.56,0.68,9.71\ndeit_small_distilled_patch16_224,224,9556.61,26.778,256,22.44,4.63,12.02\nvit_wee_patch16_reg1_gap_256,256,9553.41,26.788,256,13.42,3.83,13.9\nxcit_tiny_24_p16_224,224,9465.47,27.036,256,12.12,2.34,11.82\nmixnet_s,224,9398.83,27.228,256,4.13,0.25,6.25\nvit_relpos_small_patch16_224,224,9365.71,27.325,256,21.98,4.59,13.05\nvit_srelpos_small_patch16_224,224,9347.26,27.377,256,21.97,4.59,12.16\ngc_efficientnetv2_rw_t,224,9344.17,27.386,256,13.68,1.94,9.97\nvit_pwee_patch16_reg1_gap_256,256,9263.81,27.625,256,15.25,4.37,15.87\nrexnetr_150,224,9243.8,27.684,256,9.78,0.89,11.13\nhrnet_w18_small_v2,224,9242.43,27.688,256,15.6,2.62,9.65\nvit_base_patch32_clip_256,256,9241.91,27.69,256,87.86,5.76,6.65\nconvnext_pico_ols,288,9219.94,27.757,256,9.06,2.37,10.74\nmobilevitv2_075,256,9169.72,27.908,256,2.87,1.05,12.06\nresnet26t,256,9168.78,27.912,256,16.01,3.35,10.52\nresnet34,288,9144.8,27.985,256,21.8,6.07,6.18\ngcresnext26ts,256,9136.94,28.009,256,10.48,2.43,10.53\nlegacy_seresnext26_32x4d,224,9127.31,28.037,256,16.79,2.49,9.39\nefficientformerv2_s2,224,9079.95,28.184,256,12.71,1.27,11.77\nregnetx_016,224,9048.99,28.281,256,9.19,1.62,7.93\nsedarknet21,256,9044.06,28.297,256,20.95,3.93,7.47\nmobilenetv4_hybrid_large_075,256,9021.75,28.366,256,22.75,2.06,11.64\nconvnextv2_pico,288,8940.75,28.624,256,9.07,2.27,10.08\nmobilenetv4_conv_large,256,8935.42,28.64,256,32.59,2.86,12.14\nefficientnet_lite2,260,8893.65,28.775,256,6.09,0.89,12.9\nefficientvit_b1,288,8878.52,28.825,256,9.1,0.87,11.96\ndpn68,224,8840.27,28.949,256,12.61,2.35,10.47\nregnety_016,224,8805.36,29.063,256,11.2,1.63,8.04\ntf_efficientnet_lite2,260,8796.92,29.092,256,6.09,0.89,12.9\nhgnet_tiny,224,8776.27,29.161,256,14.74,4.54,6.36\nseresnext26ts,256,8763.95,29.202,256,10.39,2.43,10.52\neca_resnext26ts,256,8762.16,29.207,256,10.3,2.43,10.52\nvit_relpos_small_patch16_rpn_224,224,8724.84,29.332,256,21.97,4.59,13.05\nfbnetv3_d,256,8723.73,29.335,256,10.31,0.68,11.1\nvit_small_r26_s32_224,224,8684.28,29.469,256,36.43,3.56,9.85\nefficientnet_b0,256,8654.63,29.57,256,5.29,0.52,8.81\nmobilenet_edgetpu_v2_m,256,8614.85,29.707,256,8.46,2.42,10.65\nbotnet26t_256,256,8607.7,29.728,256,12.49,3.32,11.98\nhalonet26t,256,8583.89,29.814,256,12.48,3.19,11.69\nefficientnet_blur_b0,224,8559.05,29.9,256,5.29,0.43,8.72\nresnest14d,224,8557.7,29.905,256,10.61,2.76,7.33\nedgenext_small_rw,256,8555.55,29.913,256,7.83,1.58,9.51\nflexivit_small,240,8505.35,30.09,256,22.06,5.35,14.18\necaresnext50t_32x4d,224,8481.39,30.175,256,15.41,2.7,10.09\necaresnext26t_32x4d,224,8479.72,30.18,256,15.41,2.7,10.09\nseresnext26t_32x4d,224,8460.91,30.245,256,16.81,2.7,10.09\ndpn68b,224,8448.18,30.292,256,12.61,2.35,10.47\nseresnet34,288,8419.97,30.395,256,21.96,6.07,6.18\nefficientnet_b0_g16_evos,224,8418.52,30.399,256,8.11,1.01,7.42\necaresnet101d_pruned,224,8411.86,30.422,256,24.88,3.48,7.69\nresnet101,176,8404.43,30.451,256,44.55,4.92,10.08\nseresnext26d_32x4d,224,8398.85,30.471,256,16.81,2.73,10.19\nresnet34d,288,8385.33,30.521,256,21.82,6.47,7.51\nrexnet_150,224,8361.13,30.608,256,9.73,0.9,11.21\nefficientnetv2_rw_t,224,8343.87,30.671,256,13.65,1.93,9.94\nrepvit_m3,224,8338.0,30.693,256,10.68,1.89,13.94\npvt_v2_b1,224,8309.51,30.799,256,14.01,2.12,15.39\nconvit_tiny,224,8301.19,30.83,256,5.71,1.26,7.94\necaresnetlight,224,8286.53,30.885,256,30.16,4.11,8.42\neca_nfnet_l0,224,8268.29,30.953,256,24.14,4.35,10.47\nxcit_nano_12_p16_384,384,8253.59,31.007,256,3.05,1.64,12.15\nresnet50,224,8237.46,31.069,256,25.56,4.11,11.11\nnfnet_l0,224,8226.21,31.11,256,35.07,4.36,10.47\ncs3darknet_focus_l,256,8211.8,31.166,256,21.15,4.66,8.03\ntresnet_m,224,8210.79,31.169,256,31.39,5.75,7.31\nmobileone_s2,224,8190.43,31.246,256,7.88,1.34,11.55\ncoatnext_nano_rw_224,224,8142.29,31.431,256,14.7,2.47,12.8\nregnetz_005,288,8113.29,31.543,256,7.12,0.86,9.68\nefficientnet_b1,224,8096.89,31.607,256,7.79,0.59,9.36\neca_botnext26ts_256,256,8088.97,31.637,256,10.59,2.46,11.6\nmobileone_s0,224,8070.39,31.71,256,5.29,1.09,15.48\ndla60,224,8036.02,31.847,256,22.04,4.26,10.16\nghostnetv2_160,224,8032.91,31.859,256,12.39,0.42,7.23\nresnet26,288,8028.16,31.879,256,16.0,3.9,12.15\nhgnetv2_b3,288,7993.28,32.017,256,16.29,2.94,8.38\nresnet32ts,256,7987.07,32.043,256,17.96,4.63,11.58\neca_halonext26ts,256,7959.44,32.153,256,10.76,2.44,11.46\ncs3darknet_l,256,7898.11,32.403,256,21.16,4.86,8.55\nresnet33ts,256,7888.41,32.444,256,19.68,4.76,11.66\nresnet50c,224,7858.28,32.567,256,25.58,4.35,11.92\nfastvit_t8,256,7845.11,32.622,256,4.03,0.7,8.63\nmobilenetv3_large_150d,256,7830.3,32.683,256,14.62,1.03,12.35\nefficientnet_b0_g8_gn,224,7820.23,32.725,256,6.56,0.66,6.75\nvit_small_resnet26d_224,224,7754.96,33.002,256,63.61,5.07,11.12\nlambda_resnet26t,256,7725.02,33.129,256,10.96,3.02,11.87\ncoat_lite_tiny,224,7696.35,33.252,256,5.72,1.6,11.65\nresnet50t,224,7687.2,33.293,256,25.57,4.32,11.82\nlevit_conv_512,224,7679.59,33.326,256,95.17,5.64,10.22\nmobilevit_xs,256,7674.3,33.348,256,2.32,1.05,16.33\nresnext26ts,288,7671.02,33.363,256,10.3,3.07,13.31\ntf_efficientnetv2_b2,260,7642.74,33.487,256,10.1,1.72,9.84\nresnet50d,224,7639.23,33.502,256,25.58,4.35,11.92\nmobilenetv4_hybrid_medium,320,7554.53,33.877,256,11.07,2.05,14.36\necaresnet26t,256,7544.42,33.923,256,16.01,3.35,10.53\nefficientnet_cc_b0_8e,224,7516.53,34.049,256,24.01,0.42,9.42\nnf_regnet_b3,288,7509.89,34.079,256,18.59,1.67,11.84\nefficientnet_cc_b0_4e,224,7508.02,34.087,256,13.31,0.41,9.42\ngmlp_s16_224,224,7502.53,34.112,256,19.42,4.42,15.1\nresnetv2_50,224,7495.78,34.144,256,25.55,4.11,11.11\nvit_tiny_patch16_384,384,7452.45,34.342,256,5.79,4.7,25.39\nvovnet39a,224,7432.2,34.436,256,22.6,7.09,6.73\ngcresnet33ts,256,7420.0,34.491,256,19.88,4.76,11.68\ntf_efficientnetv2_b3,240,7371.52,34.718,256,14.36,1.93,9.95\nwide_resnet50_2,176,7362.71,34.761,256,68.88,7.29,8.97\nresnet152,160,7349.12,34.825,256,60.19,5.9,11.51\nresnetaa34d,288,7340.2,34.867,256,21.82,7.33,8.38\nselecsls84,224,7334.1,34.895,256,50.95,5.9,7.57\nresnetaa50,224,7291.3,35.101,256,25.56,5.15,11.64\nefficientnet_em,240,7258.93,35.257,256,6.9,3.04,14.34\nlevit_conv_512d,224,7214.89,35.472,256,92.5,5.85,11.3\ntf_efficientnet_em,240,7200.67,35.543,256,6.9,3.04,14.34\ngcresnext26ts,288,7186.05,35.614,256,10.48,3.07,13.33\nvit_base_patch32_plus_256,256,7183.84,35.627,256,119.48,7.79,7.76\nres2net50_48w_2s,224,7183.73,35.627,256,25.29,4.18,11.72\ncoat_lite_mini,224,7158.54,35.75,256,11.01,2.0,12.25\nresnet26d,288,7151.59,35.787,256,16.01,4.29,13.48\nefficientvit_b2,224,7108.81,36.002,256,24.33,1.6,14.62\nrepvit_m1_5,224,7103.0,36.031,256,14.64,2.31,15.7\nese_vovnet19b_dw,288,7097.11,36.062,256,6.54,2.22,13.63\nresnet50_gn,224,7077.2,36.163,256,25.56,4.14,11.11\neca_resnet33ts,256,7061.62,36.243,256,19.68,4.76,11.66\nresnetv2_50t,224,7051.5,36.295,256,25.57,4.32,11.82\nseresnet33ts,256,7046.47,36.321,256,19.78,4.76,11.66\neca_vovnet39b,224,7017.69,36.47,256,22.6,7.09,6.74\nresnetv2_50d,224,7006.58,36.528,256,25.57,4.35,11.92\ncrossvit_small_240,240,6948.36,36.834,256,26.86,5.63,18.17\ninception_v3,299,6933.82,36.911,256,23.83,5.73,8.97\nlevit_512,224,6931.73,36.922,256,95.17,5.64,10.22\nseresnext26ts,288,6920.44,36.982,256,10.39,3.07,13.32\neca_resnext26ts,288,6919.11,36.989,256,10.3,3.07,13.32\nnf_ecaresnet50,224,6916.89,37.001,256,25.56,4.21,11.13\nresnetblur50,224,6906.06,37.06,256,25.56,5.16,12.02\nese_vovnet39b,224,6897.07,37.107,256,24.57,7.09,6.74\nhgnetv2_b4,288,6892.16,37.134,256,19.8,4.54,11.08\nnf_seresnet50,224,6872.82,37.239,256,28.09,4.21,11.13\nvgg11_bn,224,6872.74,37.239,256,132.87,7.62,7.44\nvgg11,224,6868.96,37.26,256,132.86,7.61,7.44\nresnext50_32x4d,224,6858.21,37.319,256,25.03,4.26,14.4\nresnetaa50d,224,6834.01,37.45,256,25.58,5.39,12.44\nsam2_hiera_tiny,224,6819.52,37.53,256,26.85,4.91,17.12\nlegacy_seresnet50,224,6808.13,37.592,256,28.09,3.88,10.6\nresnet50_clip_gap,224,6805.83,37.606,256,23.53,5.39,12.44\nconvnext_nano,288,6796.73,37.655,256,15.59,4.06,13.84\ndla60x,224,6789.78,37.695,256,17.35,3.54,13.8\nefficientnet_b1,240,6787.57,37.706,256,7.79,0.71,10.88\nmobileone_s3,224,6778.3,37.757,256,10.17,1.94,13.85\nedgenext_small,320,6777.59,37.762,256,5.59,1.97,14.16\nmobilevitv2_100,256,6775.09,37.776,256,4.9,1.84,16.08\nconvnext_tiny_hnf,224,6762.18,37.848,256,28.59,4.47,13.44\nxcit_small_12_p16_224,224,6719.89,38.086,256,26.25,4.82,12.58\ninception_next_tiny,224,6716.71,38.104,256,28.06,4.19,11.98\nconvnext_tiny,224,6713.74,38.121,256,28.59,4.47,13.44\ntwins_svt_small,224,6712.42,38.129,256,24.06,2.94,13.75\nresnet50s,224,6700.52,38.196,256,25.68,5.47,13.52\nvit_little_patch16_reg1_gap_256,256,6683.54,38.293,256,22.52,6.27,18.06\nvit_relpos_base_patch32_plus_rpn_256,256,6635.1,38.573,256,119.42,7.68,8.01\nese_vovnet39b_evos,224,6627.24,38.618,256,24.58,7.07,6.74\nvit_little_patch16_reg4_gap_256,256,6624.29,38.636,256,22.52,6.35,18.33\nskresnet50,224,6610.42,38.716,256,25.8,4.11,12.5\nrexnetr_200,224,6609.18,38.725,256,16.52,1.59,15.11\ncs3sedarknet_l,256,6604.77,38.75,256,21.91,4.86,8.56\nregnetz_b16_evos,224,6598.52,38.787,256,9.74,1.43,9.95\nefficientnet_b2_pruned,260,6597.68,38.791,256,8.31,0.73,9.13\nlevit_512d,224,6590.97,38.832,256,92.5,5.85,11.3\ncrossvit_15_240,240,6583.92,38.873,256,27.53,5.81,19.77\ndensenet121,224,6580.95,38.89,256,7.98,2.87,6.9\ntf_efficientnet_cc_b0_4e,224,6574.97,38.926,256,13.31,0.41,9.42\ntf_efficientnet_cc_b0_8e,224,6564.08,38.99,256,24.01,0.42,9.42\nseresnet50,224,6510.44,39.312,256,28.09,4.11,11.13\nregnetz_b16,224,6497.98,39.386,256,9.72,1.45,9.95\nresnetblur50d,224,6469.2,39.563,256,25.58,5.4,12.82\nregnetx_032,224,6433.96,39.779,256,15.3,3.2,11.37\nresnext50d_32x4d,224,6433.36,39.782,256,25.05,4.5,15.2\ncspresnet50,256,6423.77,39.843,256,21.62,4.54,11.5\nhaloregnetz_b,224,6420.84,39.86,256,11.68,1.97,11.94\nconvformer_s18,224,6401.93,39.977,256,26.77,3.96,15.82\ngmixer_24_224,224,6397.38,40.007,256,24.72,5.28,14.45\nresnest26d,224,6382.9,40.098,256,17.07,3.64,9.97\ncaformer_s18,224,6357.61,40.255,256,26.34,4.13,19.39\nresnet50_clip,224,6340.6,40.365,256,38.32,6.14,12.98\nhgnetv2_b5,224,6336.07,40.393,256,39.57,6.56,11.19\ntf_mixnet_m,224,6323.56,40.473,256,5.01,0.36,8.19\ncs3darknet_focus_l,288,6307.83,40.575,256,21.15,5.9,10.16\nrepvgg_b1g4,224,6303.26,40.604,256,39.97,8.15,10.64\nvit_medium_patch16_clip_224,224,6298.39,40.635,256,38.59,8.0,15.93\nresnet32ts,288,6297.13,40.644,256,17.96,5.86,14.65\ndeit3_medium_patch16_224,224,6292.44,40.675,256,38.85,8.0,15.93\nmixnet_m,224,6274.65,40.789,256,5.01,0.36,8.19\nconvnextv2_tiny,224,6265.42,40.849,256,28.64,4.47,13.44\nconvnextv2_nano,288,6252.04,40.936,256,15.62,4.06,13.84\nefficientformer_l3,224,6248.87,40.958,256,31.41,3.93,12.01\ncoatnet_pico_rw_224,224,6223.54,41.125,256,10.85,2.05,14.62\nresnet33ts,288,6212.45,41.198,256,19.68,6.02,14.75\nvit_base_resnet26d_224,224,6210.89,41.208,256,101.4,6.97,13.16\nskresnet50d,224,6206.47,41.238,256,25.82,4.36,13.31\ntiny_vit_21m_224,224,6202.51,41.264,256,33.22,4.29,20.08\necaresnet50t,224,6182.25,41.4,256,25.57,4.32,11.83\nrexnet_200,224,6181.08,41.407,256,16.37,1.56,14.91\nvit_relpos_medium_patch16_224,224,6168.22,41.493,256,38.75,7.97,17.02\npoolformerv2_s24,224,6162.46,41.532,256,21.34,3.42,10.68\nseresnet50t,224,6161.79,41.537,256,28.1,4.32,11.83\nsehalonet33ts,256,6154.72,41.584,256,13.69,3.55,14.7\necaresnet50d,224,6146.92,41.638,256,25.58,4.35,11.93\nvit_srelpos_medium_patch16_224,224,6143.0,41.665,256,38.74,7.96,16.21\ncs3darknet_l,288,6142.02,41.67,256,21.16,6.16,10.83\ncrossvit_15_dagger_240,240,6130.99,41.745,256,28.21,6.13,20.43\nvovnet57a,224,6130.59,41.749,256,36.64,8.95,7.52\ncspresnet50d,256,6120.99,41.814,256,21.64,4.86,12.55\nconvnext_nano_ols,288,6113.65,41.864,256,15.65,4.38,15.5\ncspresnet50w,256,6107.36,41.907,256,28.12,5.04,12.19\nvit_relpos_medium_patch16_cls_224,224,6098.64,41.967,256,38.76,8.03,18.24\nresnetrs50,224,6086.47,42.05,256,35.69,4.48,12.14\nxcit_nano_12_p8_224,224,6075.31,42.129,256,3.05,2.16,15.71\ngcresnext50ts,256,6060.87,42.228,256,15.67,3.75,15.46\nfbnetv3_g,240,6057.83,42.25,256,16.62,1.28,14.87\ndensenetblur121d,224,6056.82,42.256,256,8.0,3.11,7.9\ngcvit_xxtiny,224,6049.45,42.308,256,12.0,2.14,15.36\nvit_base_r26_s32_224,224,6038.4,42.386,256,101.38,6.81,12.36\nnf_regnet_b3,320,6032.74,42.424,256,18.59,2.05,14.61\nefficientnet_b1,256,6002.08,42.642,256,7.79,0.77,12.22\nmobilevit_s,256,5976.11,42.827,256,5.58,2.03,19.94\nresnet152,176,5947.93,43.031,256,60.19,7.22,13.99\ntf_efficientnet_b1,240,5946.3,43.042,256,7.79,0.71,10.88\nres2next50,224,5944.69,43.054,256,24.67,4.2,13.71\nres2net50_26w_4s,224,5895.46,43.412,256,25.7,4.28,12.61\nresnet26t,320,5888.07,43.468,256,16.01,5.24,16.44\nres2net50_14w_8s,224,5885.45,43.487,256,25.06,4.21,13.28\ndla60_res2next,224,5850.87,43.744,256,17.03,3.49,13.17\nresmlp_24_224,224,5846.3,43.778,256,30.02,5.96,10.91\ntwins_pcpvt_small,224,5845.62,43.784,256,24.11,3.83,18.08\ndla60_res2net,224,5837.47,43.845,256,20.85,4.15,12.34\nedgenext_base,256,5822.01,43.961,256,18.51,3.85,15.58\ngcresnet33ts,288,5797.71,44.145,256,19.88,6.02,14.78\nregnety_040,224,5793.29,44.179,256,20.65,4.0,12.29\neva02_tiny_patch14_336,336,5792.23,44.186,256,5.76,4.68,27.16\nresnetv2_50x1_bit,224,5786.08,44.234,256,25.55,4.23,11.11\nregnetv_040,224,5783.78,44.252,256,20.64,4.0,12.29\necaresnet50d_pruned,288,5762.03,44.418,256,19.94,4.19,10.61\nefficientvit_l1,224,5756.46,44.461,256,52.65,5.27,15.85\nvisformer_small,224,5750.7,44.507,256,40.22,4.88,11.43\neva02_small_patch14_224,224,5748.38,44.524,256,21.62,6.14,18.28\nese_vovnet57b,224,5743.03,44.566,256,38.61,8.95,7.52\nhgnet_small,224,5739.36,44.595,256,24.36,8.53,8.79\ngcresnet50t,256,5724.09,44.713,256,25.9,5.42,14.67\nnf_resnet50,256,5711.71,44.81,256,25.56,5.46,14.52\nresnet51q,256,5706.87,44.849,256,35.7,6.38,16.55\nhiera_tiny_224,224,5685.03,45.021,256,27.91,4.91,17.13\nefficientnet_b2,256,5683.07,45.037,256,9.11,0.89,12.81\nseresnext50_32x4d,224,5625.84,45.495,256,27.56,4.26,14.42\nlegacy_seresnext50_32x4d,224,5623.19,45.516,256,27.56,4.26,14.42\nsebotnet33ts_256,256,5614.57,45.585,256,13.7,3.89,17.46\nseresnetaa50d,224,5601.18,45.695,256,28.11,5.4,12.46\nres2net50d,224,5592.35,45.768,256,25.72,4.52,13.41\nregnety_032,224,5575.12,45.908,256,19.44,3.2,11.26\neca_resnet33ts,288,5547.26,46.139,256,19.68,6.02,14.76\nseresnet33ts,288,5546.78,46.143,256,19.78,6.02,14.76\nmobilenetv4_conv_large,320,5543.63,46.169,256,32.59,4.47,18.97\nfocalnet_tiny_srf,224,5540.33,46.197,256,28.43,4.42,16.32\nresnetv2_50d_frn,224,5526.17,46.316,256,25.59,4.33,11.92\ncoatnet_0_rw_224,224,5518.99,46.375,256,27.44,4.43,18.73\ndavit_tiny,224,5515.29,46.407,256,28.36,4.54,18.89\nfastvit_t12,256,5513.86,46.418,256,7.55,1.42,12.42\nvit_relpos_medium_patch16_rpn_224,224,5513.6,46.421,256,38.73,7.97,17.02\nresnetrs101,192,5499.51,46.54,256,63.62,6.04,12.7\nvit_medium_patch16_gap_240,240,5483.44,46.677,256,44.4,9.22,18.81\nefficientformerv2_l,224,5466.54,46.819,256,26.32,2.59,18.54\nregnetx_040,224,5462.75,46.854,256,22.12,3.99,12.2\nresnetv2_50d_gn,224,5452.78,46.939,256,25.57,4.38,11.92\ncoatnet_nano_rw_224,224,5433.12,47.109,256,15.14,2.41,15.41\nedgenext_small_rw,320,5425.79,47.173,256,7.83,2.46,14.85\nresnetv2_50d_evos,224,5402.37,47.378,256,25.59,4.33,11.92\nefficientvit_b2,256,5396.4,47.427,256,24.33,2.09,19.03\ndla102,224,5369.84,47.664,256,33.27,7.19,14.18\ncspresnext50,256,5358.14,47.768,256,20.57,4.05,15.86\nhrnet_w18_ssld,224,5312.43,48.178,256,21.3,4.32,16.31\nmobilevitv2_125,256,5304.51,48.251,256,7.48,2.86,20.1\ngc_efficientnetv2_rw_t,288,5303.02,48.263,256,13.68,3.2,16.45\nresnest50d_1s4x24d,224,5302.55,48.268,256,25.68,4.43,13.57\ncoatnet_nano_cc_224,224,5300.81,48.285,256,13.76,2.24,15.02\ndensenet169,224,5293.85,48.347,256,14.15,3.4,7.3\nresnet61q,256,5274.43,48.526,256,36.85,7.8,17.01\ndarknet53,256,5272.3,48.545,256,41.61,9.31,12.39\nhgnet_tiny,288,5268.94,48.578,256,14.74,7.51,10.51\nlambda_resnet26rpt_256,256,5256.58,48.689,256,10.99,3.16,11.87\nresnet50_mlp,256,5238.43,48.86,256,26.65,7.05,16.25\nrdnet_tiny,224,5230.54,48.933,256,23.86,5.06,15.98\nnextvit_small,224,5230.17,48.937,256,31.76,5.81,18.44\nnfnet_f0,192,5229.68,48.94,256,71.49,7.21,10.16\nefficientnet_b3_pruned,300,5219.72,49.033,256,9.86,1.04,11.86\ncs3darknet_focus_x,256,5215.35,49.076,256,35.02,8.03,10.69\nresnet101,224,5204.99,49.174,256,44.55,7.83,16.23\npoolformer_s24,224,5178.8,49.423,256,21.39,3.41,10.68\ndm_nfnet_f0,192,5176.64,49.442,256,71.49,7.21,10.16\nfastvit_s12,256,5169.44,49.511,256,9.47,1.82,13.67\nxcit_tiny_12_p16_384,384,5122.18,49.969,256,6.72,3.64,18.26\nefficientnet_lite3,300,5121.62,49.975,256,8.2,1.65,21.85\nfastvit_sa12,256,5118.51,50.004,256,11.58,1.96,14.03\nfocalnet_tiny_lrf,224,5112.56,50.063,256,28.65,4.49,17.76\nseresnext26t_32x4d,288,5093.87,50.247,256,16.81,4.46,16.68\ndarknetaa53,256,5089.88,50.286,256,36.02,7.97,12.39\ncs3sedarknet_l,288,5080.41,50.38,256,21.91,6.16,10.83\ntf_efficientnet_lite3,300,5063.4,50.55,256,8.2,1.65,21.85\nhrnet_w18,224,5061.86,50.564,256,21.3,4.32,16.31\nseresnext26d_32x4d,288,5057.03,50.614,256,16.81,4.51,16.85\ncs3darknet_x,256,5045.79,50.726,256,35.05,8.38,11.35\nresnet101c,224,5044.49,50.739,256,44.57,8.08,17.04\nswin_tiny_patch4_window7_224,224,5039.45,50.789,256,28.29,4.51,17.06\nmaxvit_pico_rw_256,256,4998.32,51.207,256,7.46,1.83,22.3\necaresnetlight,288,4963.55,51.567,256,30.16,6.79,13.91\nmaxvit_rmlp_pico_rw_256,256,4963.22,51.568,256,7.52,1.85,24.86\neca_nfnet_l0,288,4962.52,51.577,256,24.14,7.12,17.29\nresnet50,288,4962.25,51.58,256,25.56,6.8,18.37\nresnet101d,224,4941.95,51.792,256,44.57,8.08,17.04\nmobilenetv4_hybrid_medium,384,4939.03,51.822,256,11.07,3.01,21.18\nmobileone_s4,224,4931.74,51.899,256,14.95,3.04,17.74\nnfnet_l0,288,4931.09,51.906,256,35.07,7.13,17.29\nskresnext50_32x4d,224,4919.18,52.031,256,27.48,4.5,17.18\nvit_medium_patch16_gap_256,256,4903.6,52.197,256,38.86,10.59,22.15\ncoatnet_bn_0_rw_224,224,4887.13,52.371,256,27.44,4.67,22.04\ndpn68b,288,4849.7,52.777,256,12.61,3.89,17.3\nmobilenetv3_large_150d,320,4830.54,52.987,256,14.62,1.61,19.29\nvit_base_resnet50d_224,224,4826.53,53.03,256,110.97,8.73,16.92\necaresnet26t,320,4809.11,53.223,256,16.01,5.24,16.44\nvgg13,224,4804.86,53.27,256,133.05,11.31,12.25\nvgg13_bn,224,4801.12,53.312,256,133.05,11.33,12.25\nrepvgg_b1,224,4794.12,53.389,256,57.42,13.16,10.64\nhalonet50ts,256,4780.36,53.542,256,22.73,5.3,19.2\ncoatnet_rmlp_nano_rw_224,224,4765.2,53.712,256,15.15,2.62,20.34\ngcresnext50ts,288,4747.63,53.91,256,15.67,4.75,19.57\nlambda_resnet50ts,256,4746.66,53.923,256,21.54,5.07,17.48\nswinv2_cr_tiny_224,224,4745.98,53.93,256,28.33,4.66,28.45\nefficientnet_cc_b1_8e,240,4739.38,54.005,256,39.72,0.75,15.44\necaresnet50t,256,4722.81,54.195,256,25.57,5.64,15.45\necaresnet101d_pruned,288,4706.4,54.383,256,24.88,5.75,12.71\npvt_v2_b2,224,4697.71,54.485,256,25.36,4.05,27.53\nefficientnetv2_rw_t,288,4690.17,54.572,256,13.65,3.19,16.42\nefficientnet_b1,288,4690.13,54.573,256,7.79,0.97,15.46\nswinv2_cr_tiny_ns_224,224,4689.94,54.575,256,28.33,4.66,28.45\nvit_medium_patch16_reg1_gap_256,256,4686.95,54.609,256,38.88,10.63,22.26\ngcvit_xtiny,224,4677.16,54.722,256,19.98,2.93,20.26\nnf_resnet101,224,4669.02,54.819,256,44.55,8.01,16.23\nregnety_040_sgn,224,4665.34,54.863,256,20.65,4.03,12.29\nlamhalobotnet50ts_256,256,4659.1,54.936,256,22.57,5.02,18.44\nvit_medium_patch16_reg4_gap_256,256,4651.05,55.032,256,38.88,10.76,22.6\nresnet50t,288,4649.07,55.055,256,25.57,7.14,19.53\ndla102x,224,4635.33,55.219,256,26.31,5.89,19.42\nhiera_small_224,224,4619.55,55.408,256,35.01,6.42,20.75\nresnet50d,288,4619.02,55.413,256,25.58,7.19,19.7\nwide_resnet50_2,224,4612.63,55.49,256,68.88,11.43,14.4\nresnetv2_101,224,4600.01,55.642,256,44.54,7.83,16.23\nresnet101_clip_gap,224,4595.55,55.696,256,42.52,9.11,17.56\ntf_efficientnet_b2,260,4594.48,55.709,256,9.11,1.02,13.83\nresnetaa101d,224,4591.39,55.747,256,44.57,9.12,17.56\ntf_mixnet_l,224,4570.85,55.997,256,7.33,0.58,10.84\nefficientvit_l2,224,4564.31,56.076,256,63.71,6.97,19.58\ntf_efficientnetv2_b3,300,4554.51,56.198,256,14.36,3.04,15.74\nresnetv2_50,288,4548.51,56.273,256,25.55,6.79,18.37\nmixnet_l,224,4545.22,56.312,256,7.33,0.58,10.84\nresnet101s,224,4541.57,56.358,256,44.67,9.19,18.64\ngcresnet50t,288,4540.87,56.364,256,25.9,6.86,18.57\nmvitv2_tiny,224,4518.92,56.64,256,24.17,4.7,21.16\ncait_xxs24_224,224,4502.7,56.839,256,11.96,2.53,20.29\nresnet51q,288,4496.23,56.927,256,35.7,8.07,20.94\nnf_resnet50,288,4484.43,57.077,256,25.56,6.88,18.37\nresnest50d,224,4484.41,57.077,256,27.48,5.4,14.36\nnf_regnet_b4,320,4455.95,57.441,256,30.21,3.29,19.88\ncrossvit_18_240,240,4445.99,57.569,256,43.27,9.05,26.26\nresnetblur101d,224,4442.61,57.614,256,44.57,9.12,17.94\nefficientnet_b2,288,4434.83,57.715,256,9.11,1.12,16.2\nresnext101_32x4d,224,4430.11,57.776,256,44.18,8.01,21.23\nhalo2botnet50ts_256,256,4424.96,57.843,256,22.64,5.02,21.78\nbotnet50ts_256,256,4416.09,57.959,256,22.74,5.54,22.23\nvitamin_small_224,224,4405.81,58.094,256,22.03,5.92,26.38\nresnetv2_101d,224,4395.97,58.225,256,44.56,8.07,17.04\nresnetaa50,288,4392.86,58.267,256,25.56,8.52,19.24\nnf_ecaresnet101,224,4388.13,58.329,256,44.55,8.01,16.27\nresnet101_clip,224,4373.65,58.522,256,56.26,9.81,18.08\nnf_seresnet101,224,4356.81,58.747,256,49.33,8.02,16.27\nhieradet_small,256,4349.43,58.848,256,34.72,8.51,27.76\nmobilevitv2_150,256,4334.6,59.05,256,10.59,4.09,24.11\nrexnetr_300,224,4324.73,59.184,256,34.81,3.39,22.16\ntf_efficientnet_cc_b1_8e,240,4322.38,59.217,256,39.72,0.75,15.44\ntresnet_v2_l,224,4301.4,59.505,256,46.17,8.85,16.34\nswin_s3_tiny_224,224,4290.26,59.66,256,28.33,4.64,19.13\nresnext101_32x8d,176,4284.27,59.743,256,88.79,10.33,19.37\nres2net50_26w_6s,224,4277.02,59.844,256,37.05,6.33,15.28\nese_vovnet39b,288,4248.82,60.241,256,24.57,11.71,11.13\nlegacy_seresnet101,224,4227.93,60.54,256,49.33,7.61,15.74\nresnet50_gn,288,4222.36,60.62,256,25.56,6.85,18.37\ncs3sedarknet_x,256,4215.77,60.715,256,35.4,8.38,11.35\nwide_resnet101_2,176,4209.79,60.801,256,126.89,14.31,13.18\nfbnetv3_g,288,4208.81,60.814,256,16.62,1.77,21.09\ncrossvit_18_dagger_240,240,4205.84,60.858,256,44.27,9.5,27.03\nmaxxvit_rmlp_nano_rw_256,256,4202.69,60.903,256,16.78,4.37,26.05\nvit_base_patch32_384,384,4178.56,61.256,256,88.3,13.06,16.5\nvit_base_patch32_clip_384,384,4178.4,61.258,256,88.3,13.06,16.5\ntwins_pcpvt_base,224,4172.72,61.341,256,43.83,6.68,25.25\nresnetblur50,288,4164.2,61.467,256,25.56,8.52,19.87\nefficientvit_b2,288,4145.81,61.737,256,24.33,2.64,24.03\nresnet61q,288,4130.49,61.968,256,36.85,9.87,21.52\ndarknet53,288,4124.09,62.065,256,41.61,11.78,15.68\nresnext50_32x4d,288,4120.75,62.115,256,25.03,7.04,23.81\ncoatnet_rmlp_0_rw_224,224,4120.51,62.117,256,27.45,4.72,24.89\npoolformerv2_s36,224,4120.18,62.123,256,30.79,5.01,15.82\nresnetaa50d,288,4119.71,62.13,256,25.58,8.92,20.57\nseresnet101,224,4119.53,62.132,256,49.33,7.84,16.27\ncs3edgenet_x,256,4107.9,62.309,256,47.82,11.53,12.92\ncspdarknet53,256,4106.02,62.337,256,27.64,6.57,16.81\nvolo_d1_224,224,4101.98,62.399,256,26.63,6.94,24.43\nconvnext_tiny_hnf,288,4094.49,62.513,256,28.59,7.39,22.21\nconvnext_tiny,288,4073.96,62.828,256,28.59,7.39,22.21\nhrnet_w32,224,4056.25,63.101,256,41.23,8.97,22.02\nregnetx_080,224,4050.3,63.196,256,39.57,8.02,14.06\nnextvit_base,224,4038.08,63.385,256,44.82,8.29,23.71\npit_b_distilled_224,224,4034.42,63.444,256,74.79,12.5,33.07\npit_b_224,224,4022.18,63.637,256,73.76,12.42,32.94\nfastvit_mci0,256,4004.83,63.911,256,11.41,2.42,18.29\nconvnext_small,224,3998.3,64.017,256,50.22,8.71,21.56\ndarknetaa53,288,3994.45,64.079,256,36.02,10.08,15.68\ninception_next_small,224,3986.65,64.204,256,49.37,8.36,19.27\necaresnet101d,224,3983.05,64.261,256,44.57,8.08,17.07\ncoat_lite_small,224,3969.68,64.477,256,19.84,3.96,22.09\nregnetz_c16,256,3947.47,64.841,256,13.46,2.51,16.57\nregnetx_064,224,3944.55,64.89,256,26.21,6.49,16.37\ncs3sedarknet_xdw,256,3941.85,64.932,256,21.6,5.97,17.18\ncs3darknet_x,288,3934.32,65.058,256,35.05,10.6,14.36\nregnetz_b16,288,3931.2,65.11,256,9.72,2.39,16.43\nrexnetr_200,288,3927.78,65.166,256,16.52,2.62,24.96\nresnetblur50d,288,3921.99,65.263,256,25.58,8.92,21.19\nresmlp_36_224,224,3921.81,65.266,256,44.69,8.91,16.33\npvt_v2_b2_li,224,3910.42,65.455,256,22.55,3.91,27.6\nmaxxvitv2_nano_rw_256,256,3888.05,65.832,256,23.7,6.26,23.05\nresnext50d_32x4d,288,3885.38,65.879,256,25.05,7.44,25.13\nvit_large_patch32_224,224,3882.7,65.923,256,305.51,15.39,13.3\nseresnet50,288,3880.91,65.955,256,28.09,6.8,18.39\nres2net101_26w_4s,224,3875.61,66.043,256,45.21,8.1,18.45\nregnetz_c16_evos,256,3866.46,66.2,256,13.49,2.48,16.57\nrepvit_m2_3,224,3852.0,66.448,256,23.69,4.57,26.21\nxcit_tiny_12_p8_224,224,3851.28,66.461,256,6.71,4.81,23.6\ndensenet121,288,3850.05,66.481,256,7.98,4.74,11.41\nvgg16_bn,224,3843.29,66.6,256,138.37,15.5,13.56\nresnet101d,256,3841.99,66.622,256,44.57,10.55,22.25\nmixer_b16_224,224,3841.01,66.64,256,59.88,12.62,14.53\nregnetz_b16_evos,288,3840.11,66.654,256,9.74,2.36,16.43\nvgg16,224,3839.88,66.659,256,138.36,15.47,13.56\nvit_medium_patch16_rope_reg1_gap_256,256,3839.28,66.669,256,38.74,10.63,22.26\nmobilenetv4_conv_large,384,3820.17,67.003,256,32.59,6.43,27.31\nconvnextv2_tiny,288,3792.59,67.49,256,28.64,7.39,22.21\nrexnet_300,224,3771.96,67.859,256,34.71,3.44,22.4\nconvnextv2_small,224,3766.94,67.949,256,50.32,8.71,21.56\ndensenet201,224,3748.64,68.28,256,20.01,4.34,7.85\nres2net101d,224,3734.18,68.545,256,45.23,8.35,19.25\nhgnetv2_b5,288,3726.11,68.693,256,39.57,10.84,18.5\nnest_tiny,224,3705.24,69.082,256,17.06,5.83,25.48\nefficientnetv2_s,288,3702.94,69.124,256,21.46,4.75,20.13\necaresnet50t,288,3690.88,69.35,256,25.57,7.14,19.55\nseresnet50t,288,3690.42,69.359,256,28.1,7.14,19.55\nedgenext_base,320,3685.34,69.455,256,18.51,6.01,24.32\ncoatnet_0_224,224,3684.19,69.475,256,25.04,4.58,24.01\nconvit_small,224,3679.62,69.563,256,27.78,5.76,17.87\necaresnet50d,288,3676.55,69.621,256,25.58,7.19,19.72\nswinv2_tiny_window8_256,256,3670.64,69.733,256,28.35,5.96,24.57\nnest_tiny_jx,224,3669.11,69.761,256,17.06,5.83,25.48\neca_nfnet_l1,256,3668.26,69.777,256,41.41,9.62,22.04\nefficientvit_b3,224,3667.52,69.791,256,48.65,3.99,26.9\nmobilevitv2_175,256,3660.41,69.927,256,14.25,5.54,28.13\nresnet152,224,3646.55,70.193,256,60.19,11.56,22.56\nseresnext101_32x4d,224,3626.8,70.575,256,48.96,8.02,21.26\nlegacy_seresnext101_32x4d,224,3621.17,70.685,256,48.96,8.02,21.26\ninception_v4,299,3620.42,70.7,256,42.68,12.28,15.09\nxcit_small_24_p16_224,224,3616.94,70.768,256,47.67,9.1,23.64\ntresnet_l,224,3593.17,71.235,256,55.99,10.9,11.9\nresnet152c,224,3589.3,71.313,256,60.21,11.8,23.36\ndla169,224,3570.86,71.681,256,53.39,11.6,20.2\ndensenetblur121d,288,3562.85,71.841,256,8.0,5.14,13.06\ntnt_s_patch16_224,224,3559.68,71.906,256,23.76,5.24,24.37\nresnetv2_101x1_bit,224,3551.18,72.078,256,44.54,8.04,16.23\nconvnextv2_nano,384,3546.15,72.18,256,15.62,7.22,24.61\nrdnet_small,224,3539.46,72.314,256,50.44,8.74,22.55\nresnet152d,224,3528.53,72.542,256,60.21,11.8,23.36\nregnetv_040,288,3523.74,72.639,256,20.64,6.6,20.3\nefficientvit_l2,256,3518.25,72.753,256,63.71,9.09,25.49\nefficientnetv2_rw_s,288,3505.36,73.02,256,23.94,4.91,21.41\nvit_small_resnet50d_s16_224,224,3504.26,73.043,256,57.53,13.48,24.82\nmaxvit_nano_rw_256,256,3501.58,73.099,256,15.45,4.46,30.28\nvit_small_patch16_18x2_224,224,3500.73,73.117,256,64.67,13.71,35.69\nmaxvit_rmlp_nano_rw_256,256,3494.64,73.244,256,15.5,4.47,31.92\nres2net50_26w_8s,224,3491.2,73.317,256,48.4,8.37,17.95\nhgnet_small,288,3466.9,73.83,256,24.36,14.09,14.53\ncoatnet_rmlp_1_rw_224,224,3457.73,74.026,256,41.69,7.85,35.47\nmobilenetv4_hybrid_medium,448,3448.71,74.22,256,11.07,4.2,29.64\nresnest50d_4s2x40d,224,3439.65,74.416,256,30.42,4.4,17.94\nfocalnet_small_srf,224,3422.65,74.785,256,49.89,8.62,26.26\nregnety_040,288,3396.57,75.36,256,20.65,6.61,20.3\npoolformer_s36,224,3394.32,75.41,256,30.86,5.0,15.82\nmvitv2_small,224,3388.47,75.541,256,34.87,7.0,28.08\ndavit_small,224,3382.96,75.663,256,49.75,8.8,30.49\nese_vovnet99b,224,3373.76,75.869,256,63.2,16.51,11.27\nmixer_l32_224,224,3364.3,76.082,256,206.94,11.27,19.86\nvit_base_patch16_siglip_gap_224,224,3352.55,76.35,256,85.8,17.49,23.75\nvit_base_patch16_xp_224,224,3348.63,76.439,256,86.51,17.56,23.9\nvit_base_patch16_224_miil,224,3348.42,76.443,256,94.4,17.59,23.91\nseresnext50_32x4d,288,3348.33,76.445,256,27.56,7.04,23.82\nseresnetaa50d,288,3347.65,76.461,256,28.11,8.92,20.59\nvit_betwixt_patch16_reg1_gap_256,256,3346.86,76.479,256,60.4,16.32,27.83\nvit_base_patch16_224,224,3345.67,76.508,256,86.57,17.58,23.9\nconvformer_s36,224,3344.88,76.523,256,40.01,7.67,30.5\ndeit_base_patch16_224,224,3344.81,76.527,256,86.57,17.58,23.9\nvit_base_patch16_clip_quickgelu_224,224,3343.2,76.564,256,86.19,17.58,23.9\ndeit3_base_patch16_224,224,3342.68,76.576,256,86.59,17.58,23.9\nvit_base_patch16_clip_224,224,3342.03,76.591,256,86.57,17.58,23.9\npvt_v2_b3,224,3337.39,76.696,256,45.24,6.92,37.7\nhiera_small_abswin_256,256,3333.92,76.777,256,34.36,8.29,26.38\ncaformer_s36,224,3329.65,76.872,256,39.3,8.0,37.53\nefficientnet_b3,288,3326.65,76.944,256,12.23,1.63,21.49\nvit_base_patch16_siglip_224,224,3320.61,77.085,256,92.88,17.73,24.06\nvit_betwixt_patch16_reg4_gap_256,256,3319.25,77.116,256,60.4,16.52,28.24\nresnet152s,224,3317.78,77.15,256,60.32,12.92,24.96\ncs3se_edgenet_x,256,3309.24,77.349,256,50.72,11.53,12.94\nvit_base_patch16_gap_224,224,3303.26,77.489,256,86.57,17.49,25.59\nvit_small_patch16_36x1_224,224,3302.19,77.513,256,64.67,13.71,35.69\ndeit_base_distilled_patch16_224,224,3300.25,77.56,256,87.34,17.68,24.05\ncs3sedarknet_x,288,3293.75,77.714,256,35.4,10.6,14.37\nvit_relpos_base_patch16_224,224,3288.47,77.838,256,86.43,17.51,24.97\nnextvit_large,224,3287.18,77.867,256,57.87,10.78,28.99\nvit_base_mci_224,224,3282.25,77.986,256,86.35,17.73,24.65\nregnetv_064,224,3266.81,78.353,256,30.58,6.39,16.41\nresnetv2_50d_gn,288,3266.39,78.364,256,25.57,7.24,19.7\nvit_relpos_base_patch16_clsgap_224,224,3264.17,78.417,256,86.43,17.6,25.12\nrepvgg_b2,224,3260.0,78.517,256,89.02,20.45,12.9\nbeit_base_patch16_224,224,3259.83,78.519,256,86.53,17.58,23.9\nvit_relpos_base_patch16_cls_224,224,3259.47,78.529,256,86.43,17.6,25.12\nrepvgg_b2g4,224,3254.26,78.656,256,61.76,12.63,12.9\nresnetv2_50d_evos,288,3251.16,78.731,256,25.59,7.15,19.7\nregnety_080,224,3246.07,78.854,256,39.18,8.0,17.97\nbeitv2_base_patch16_224,224,3245.71,78.862,256,86.53,17.58,23.9\nregnety_064,224,3237.99,79.05,256,30.58,6.39,16.41\nsequencer2d_s,224,3229.54,79.258,256,27.65,4.96,11.31\ncs3edgenet_x,288,3219.84,79.497,256,47.82,14.59,16.36\nmaxvit_tiny_rw_224,224,3218.36,79.532,256,29.06,5.11,33.11\ncoatnet_1_rw_224,224,3216.02,79.59,256,41.72,8.04,34.6\nefficientnet_el_pruned,300,3211.46,79.704,256,10.59,8.0,30.7\nefficientnet_el,300,3210.88,79.719,256,10.59,8.0,30.7\nmixnet_xl,224,3208.22,79.783,256,11.9,0.93,14.57\nvgg19,224,3200.7,79.971,256,143.67,19.63,14.86\nvgg19_bn,224,3198.64,80.025,256,143.68,19.66,14.86\nfastvit_sa24,256,3196.7,80.071,256,21.55,3.8,24.32\nlegacy_xception,299,3195.72,80.097,256,22.86,8.4,35.83\ntf_efficientnet_el,300,3194.92,80.117,256,10.59,8.0,30.7\nregnety_032,288,3185.8,80.346,256,19.44,5.29,18.61\nvit_small_patch16_384,384,3183.86,80.395,256,22.2,15.52,50.78\nresnetv2_152,224,3179.36,80.509,256,60.19,11.55,22.56\ndeit3_small_patch16_384,384,3174.6,80.63,256,22.21,15.52,50.78\nhrnet_w30,224,3172.72,80.677,256,37.71,8.15,21.21\nfocalnet_small_lrf,224,3161.39,80.966,256,50.34,8.74,28.61\nswin_small_patch4_window7_224,224,3153.83,81.161,256,49.61,8.77,27.47\ntf_efficientnetv2_s,300,3151.03,81.233,256,21.46,5.35,22.73\nmobilevitv2_200,256,3150.51,81.247,256,18.45,7.22,32.15\nresnet101,288,3147.94,81.312,256,44.55,12.95,26.83\nvit_base_patch32_clip_448,448,3138.59,81.555,256,88.34,17.93,23.9\ndpn92,224,3109.73,82.312,256,37.67,6.54,18.21\nhiera_base_224,224,3102.53,82.503,256,51.52,9.4,30.42\nnfnet_f0,256,3101.19,82.539,256,71.49,12.62,18.05\nmobilenetv4_conv_aa_large,384,3093.53,82.743,256,32.59,7.07,32.29\ndm_nfnet_f0,256,3083.56,83.011,256,71.49,12.62,18.05\nresnetv2_152d,224,3076.59,83.198,256,60.2,11.8,23.36\nhrnet_w18_ssld,288,3064.15,83.535,256,21.3,7.14,26.96\ndla102x2,224,3061.68,83.603,256,41.28,9.34,29.91\nnf_regnet_b4,384,3049.71,83.932,256,30.21,4.7,28.61\ngcvit_tiny,224,3042.25,84.138,256,28.22,4.79,29.82\ncait_xxs36_224,224,3031.38,84.438,256,17.3,3.77,30.34\nxception41p,299,3019.53,84.771,256,26.91,9.25,39.86\ndensenet161,224,3006.8,85.129,256,28.68,7.79,11.06\necaresnet50t,320,2995.44,85.452,256,25.57,8.82,24.13\nregnety_080_tv,224,2993.18,85.516,256,39.38,8.51,19.73\nvit_base_patch16_rpn_224,224,2978.25,85.944,256,86.54,17.49,23.75\ntwins_pcpvt_large,224,2977.85,85.958,256,60.99,9.84,35.82\nregnetz_040,256,2959.14,86.499,256,27.12,4.06,24.19\nvit_small_r26_s32_384,384,2948.63,86.81,256,36.47,10.43,29.85\ngmlp_b16_224,224,2948.58,86.812,256,73.08,15.78,30.21\nlegacy_seresnet152,224,2940.44,87.052,256,66.82,11.33,22.08\nregnetz_040_h,256,2937.29,87.143,256,28.94,4.12,24.29\ntwins_svt_base,224,2935.09,87.21,256,56.07,8.59,26.33\nregnetz_d8,256,2918.46,87.707,256,23.37,3.97,23.74\nflexivit_base,240,2911.53,87.916,256,86.59,20.29,28.36\nvit_relpos_base_patch16_rpn_224,224,2909.18,87.987,256,86.41,17.51,24.97\nefficientformer_l7,224,2896.41,88.374,256,82.23,10.17,24.45\nregnetz_d8_evos,256,2895.78,88.394,256,23.46,4.5,24.92\nseresnet152,224,2888.03,88.63,256,66.82,11.57,22.61\nmvitv2_small_cls,224,2869.23,89.212,256,34.87,7.04,28.17\nswinv2_cr_small_224,224,2869.18,89.213,256,49.7,9.07,50.27\ndpn98,224,2866.56,89.294,256,61.57,11.73,25.2\nswinv2_cr_small_ns_224,224,2851.92,89.753,256,49.7,9.08,50.27\ninception_resnet_v2,299,2841.25,90.09,256,55.84,13.18,25.06\nhrnet_w40,224,2841.0,90.097,256,57.56,12.75,25.29\nvit_mediumd_patch16_reg4_gap_256,256,2831.24,90.41,256,64.11,17.87,37.57\nmaxxvit_rmlp_tiny_rw_256,256,2818.8,90.808,256,29.64,6.66,39.76\nregnety_040_sgn,288,2814.17,90.956,256,20.65,6.67,20.3\neva02_base_patch16_clip_224,224,2810.17,91.088,256,86.26,17.62,26.32\nwide_resnet50_2,288,2801.8,91.36,256,68.88,18.89,23.81\nefficientvit_b3,256,2799.72,91.426,256,48.65,5.2,35.01\npoolformerv2_m36,224,2797.01,91.515,256,56.08,8.81,22.02\nresnetv2_101,288,2796.77,91.523,256,44.54,12.94,26.83\nvit_betwixt_patch16_rope_reg4_gap_256,256,2794.77,91.589,256,60.23,16.52,28.24\nresnetaa101d,288,2788.15,91.807,256,44.57,15.07,29.03\nhgnetv2_b6,224,2778.72,92.115,256,75.26,16.88,21.23\nxcit_tiny_24_p16_384,384,2775.49,92.225,256,12.12,6.87,34.29\nmobilenetv4_hybrid_large,384,2771.92,92.344,256,37.76,7.77,34.52\nefficientvit_l2,288,2770.71,92.385,256,63.71,11.51,32.19\nlevit_conv_384_s8,224,2749.96,93.083,256,39.12,9.98,35.86\nresnet152d,256,2742.49,93.336,256,60.21,15.41,30.51\nmobilenetv4_conv_large,448,2739.84,93.426,256,32.59,8.75,37.17\ncoatnet_rmlp_1_rw2_224,224,2704.02,94.662,256,41.72,8.11,40.13\nresnetblur101d,288,2697.04,94.909,256,44.57,15.07,29.65\nrepvgg_b3g4,224,2696.88,94.914,256,83.83,17.89,15.1\nefficientnet_b3,320,2696.04,94.944,256,12.23,2.01,26.52\nconvnext_base,224,2678.52,95.564,256,88.59,15.38,28.75\nregnetx_120,224,2677.22,95.611,256,46.11,12.13,21.37\nresnext101_64x4d,224,2667.58,95.957,256,83.46,15.52,31.21\nlevit_384_s8,224,2646.86,96.708,256,39.12,9.98,35.86\nresnext101_32x8d,224,2646.71,96.714,256,88.79,16.48,31.21\nwide_resnet101_2,224,2643.91,96.816,256,126.89,22.8,21.23\ninception_next_base,224,2636.33,97.093,256,86.67,14.85,25.69\ntf_efficientnet_b3,300,2632.25,97.244,256,12.23,1.87,23.83\nresnet200,224,2631.56,97.27,256,64.67,15.07,32.19\nresnext101_32x4d,288,2614.05,97.922,256,44.18,13.24,35.09\nrexnetr_300,288,2612.58,97.977,256,34.81,5.59,36.61\nvit_base_patch16_siglip_gap_256,256,2590.2,98.824,256,85.84,23.13,33.23\nvit_large_r50_s32_224,224,2566.89,99.721,256,328.99,19.58,24.41\nvit_base_patch16_siglip_256,256,2564.67,99.808,256,92.93,23.44,33.63\nmaxvit_tiny_tf_224,224,2550.75,100.351,256,30.92,5.6,35.78\nefficientnet_b3_gn,288,2546.16,100.533,256,11.73,1.74,23.35\nregnetz_d32,256,2543.95,100.62,256,27.58,5.98,23.74\nsamvit_base_patch16_224,224,2528.02,101.254,256,86.46,17.54,24.54\neva02_small_patch14_336,336,2526.33,101.323,256,22.13,15.48,54.33\ncrossvit_base_240,240,2521.38,101.521,256,105.03,21.22,36.33\nconvnextv2_base,224,2520.76,101.546,256,88.72,15.38,28.75\nsequencer2d_m,224,2518.88,101.621,256,38.31,6.55,14.26\nregnety_120,224,2511.21,101.932,256,51.82,12.14,21.38\nregnetz_c16,320,2510.69,101.954,256,13.46,3.92,25.88\ncoat_tiny,224,2491.49,102.738,256,5.5,4.35,27.2\nvit_base_patch16_reg4_gap_256,256,2482.98,103.091,256,86.62,23.5,33.89\nseresnet101,288,2460.54,104.032,256,49.33,12.95,26.87\nrepvgg_b3,224,2459.38,104.081,256,123.09,29.16,15.1\nswinv2_tiny_window16_256,256,2451.11,104.433,256,28.35,6.68,39.02\nresnet101d,320,2444.46,104.716,256,44.57,16.48,34.77\nregnetz_c16_evos,320,2443.36,104.763,256,13.49,3.86,25.88\nxception41,299,2423.55,105.62,256,26.97,9.28,39.86\ntresnet_xl,224,2413.65,106.052,256,78.44,15.2,15.34\nefficientnet_lite4,380,2412.2,106.117,256,13.01,4.04,45.66\nconvnext_small,288,2411.61,106.142,256,50.22,14.39,35.65\ncoatnet_1_224,224,2409.54,106.233,256,42.23,8.7,39.0\nhrnet_w48_ssld,224,2404.14,106.471,256,77.47,17.34,28.56\nhrnet_w48,224,2403.21,106.513,256,77.47,17.34,28.56\ntf_efficientnet_lite4,380,2401.87,106.574,256,13.01,4.04,45.66\ncaformer_m36,224,2378.95,107.598,256,56.2,13.29,50.48\necaresnet101d,288,2377.02,107.688,256,44.57,13.35,28.19\nhiera_base_plus_224,224,2375.75,107.745,256,69.9,12.67,37.98\nfastvit_mci1,256,2370.49,107.983,256,21.54,4.72,32.84\nrdnet_base,224,2367.82,108.105,256,87.45,15.4,31.14\nmaxvit_tiny_rw_256,256,2365.28,108.22,256,29.07,6.74,44.35\nresnetrs101,288,2361.51,108.395,256,63.62,13.56,28.53\nconvformer_m36,224,2358.97,108.511,256,57.05,12.89,42.05\npvt_v2_b5,224,2356.7,108.615,256,81.96,11.76,50.92\npvt_v2_b4,224,2355.66,108.663,256,62.56,10.14,53.74\nmaxvit_rmlp_tiny_rw_256,256,2354.21,108.731,256,29.15,6.77,46.92\nseresnext101_64x4d,224,2353.54,108.76,256,88.23,15.53,31.25\nxcit_medium_24_p16_224,224,2347.78,109.028,256,84.4,16.13,31.71\nseresnext101_32x8d,224,2341.18,109.335,256,93.57,16.48,31.25\nvit_mediumd_patch16_rope_reg1_gap_256,256,2336.5,109.556,256,63.95,17.65,37.02\nregnetx_160,224,2330.93,109.816,256,54.28,15.99,25.52\nfastvit_sa36,256,2327.07,109.998,256,31.53,5.64,34.61\nvolo_d2_224,224,2323.74,110.155,256,58.68,14.34,41.34\nnest_small,224,2322.48,110.217,256,38.35,10.35,40.04\nconvnext_tiny,384,2310.31,110.798,256,28.59,13.14,39.48\nvit_base_r50_s16_224,224,2309.82,110.82,256,97.89,21.66,35.28\nnest_small_jx,224,2306.72,110.97,256,38.35,10.35,40.04\neca_nfnet_l1,320,2302.23,111.183,256,41.41,14.92,34.42\nhgnet_base,224,2292.28,111.669,256,71.58,25.14,15.47\ndavit_base,224,2283.75,112.086,256,87.95,15.51,40.66\nmvitv2_base,224,2283.18,112.113,256,51.47,10.16,40.5\nseresnext101d_32x8d,224,2279.47,112.295,256,93.59,16.72,32.05\nvit_base_patch16_plus_240,240,2273.79,112.577,256,117.56,27.41,33.08\npoolformer_m36,224,2266.85,112.921,256,56.17,8.8,22.02\nvit_small_patch8_224,224,2262.19,113.154,256,21.67,22.44,80.84\nfocalnet_base_srf,224,2247.74,113.88,256,88.15,15.28,35.01\nhiera_base_abswin_256,256,2242.95,114.124,256,51.27,12.46,40.7\nvit_relpos_base_patch16_plus_240,240,2237.99,114.378,256,117.38,27.3,34.33\nresnest101e,256,2234.57,114.552,256,48.28,13.38,28.66\nmobilenetv4_conv_aa_large,448,2229.25,114.827,256,32.59,9.63,43.94\nxcit_small_12_p16_384,384,2221.32,115.235,256,26.25,14.14,36.51\nxception65p,299,2220.07,115.3,256,39.82,13.91,52.48\ncait_s24_224,224,2217.42,115.437,256,46.92,9.35,40.58\nresnet152,288,2213.91,115.622,256,60.19,19.11,37.28\nswinv2_small_window8_256,256,2213.69,115.633,256,49.73,11.58,40.14\nefficientnet_b3_g8_gn,288,2210.95,115.777,256,14.25,2.59,23.35\nconvformer_s18,384,2192.78,116.736,256,26.77,11.63,46.49\nswinv2_cr_small_ns_256,256,2188.78,116.95,256,49.7,12.07,76.21\nswin_base_patch4_window7_224,224,2187.06,117.041,256,87.77,15.47,36.63\nefficientvit_b3,288,2171.71,117.869,256,48.65,6.58,44.2\nseresnextaa101d_32x8d,224,2165.9,118.185,256,93.59,17.25,34.16\nconvnextv2_tiny,384,2148.79,119.127,256,28.64,13.14,39.48\nseresnet152d,256,2148.49,119.143,256,66.84,15.42,30.56\nresnet50x4_clip_gap,288,2144.6,119.359,256,65.62,19.57,34.11\ncaformer_s18,384,2141.55,119.527,256,26.34,13.42,77.34\nresnetrs152,256,2135.82,119.85,256,86.62,15.59,30.83\nvit_base_patch16_rope_reg1_gap_256,256,2133.64,119.972,256,86.43,23.22,33.39\nswinv2_base_window12_192,192,2130.04,120.175,256,109.28,11.9,39.72\nseresnext101_32x4d,288,2120.08,120.739,256,48.96,13.25,35.12\neva02_base_patch14_224,224,2117.73,120.874,256,85.76,23.22,36.55\npoolformerv2_m48,224,2116.33,120.954,256,73.35,11.59,29.17\nfocalnet_base_lrf,224,2108.01,121.429,256,88.75,15.43,38.13\ndm_nfnet_f1,224,2107.34,121.469,256,132.63,17.87,22.94\ncs3se_edgenet_x,320,2106.36,121.525,256,50.72,18.01,20.21\nregnety_160,224,2105.14,121.597,256,83.59,15.96,23.04\nnfnet_f1,224,2090.25,122.462,256,132.63,17.87,22.94\nvit_medium_patch16_gap_384,384,2078.7,123.143,256,39.03,26.08,67.54\ndpn131,224,2074.97,123.363,256,79.25,16.09,32.97\nefficientnetv2_s,384,2071.25,123.585,256,21.46,8.44,35.77\nswin_s3_small_224,224,2068.44,123.754,256,49.74,9.43,37.84\nhrnet_w44,224,2065.62,123.922,256,67.06,14.94,26.92\nconvnext_base,256,2065.1,123.954,256,88.59,20.09,37.55\ncoat_lite_medium,224,2064.62,123.979,256,44.57,9.81,40.06\nefficientnet_b3_gn,320,2056.56,124.469,256,11.73,2.14,28.83\nmixnet_xxl,224,2051.52,124.774,256,23.96,2.04,23.43\nnf_regnet_b5,384,2048.92,124.932,256,49.74,7.95,42.9\nresnet50x4_clip,288,2033.61,125.874,256,87.14,21.35,35.27\nmaxvit_rmlp_small_rw_224,224,2024.79,126.421,256,64.9,10.75,49.3\nefficientnet_b4,320,2021.3,126.64,256,19.34,3.13,34.76\nxcit_tiny_24_p8_224,224,2021.23,126.644,256,12.11,9.21,45.39\nswinv2_cr_base_224,224,2018.07,126.843,256,87.88,15.86,59.66\nswinv2_cr_base_ns_224,224,2005.7,127.625,256,87.88,15.86,59.66\ntf_efficientnetv2_s,384,1994.9,128.316,256,21.46,8.44,35.77\nregnetv_064,288,1990.64,128.591,256,30.58,10.55,27.11\ntresnet_m,448,1981.93,129.156,256,31.39,22.99,29.21\nxcit_nano_12_p8_384,384,1964.01,130.335,256,3.05,6.34,46.08\nefficientnetv2_rw_s,384,1962.9,130.408,256,23.94,8.72,38.03\nresnet200d,256,1962.4,130.442,256,64.69,20.0,43.09\ntwins_svt_large,224,1960.0,130.601,256,99.27,15.15,35.1\ncrossvit_15_dagger_408,408,1958.74,130.685,256,28.5,21.45,95.05\nmvitv2_base_cls,224,1952.99,131.07,256,65.44,10.23,40.65\nmobilenetv4_hybrid_large,448,1943.33,131.721,256,37.76,10.74,48.61\ntnt_b_patch16_224,224,1941.85,131.823,256,65.41,14.09,39.01\nmobilenetv4_conv_aa_large,480,1934.33,132.335,256,32.59,11.05,50.45\ngcvit_small,224,1930.43,132.601,256,51.09,8.57,41.61\nregnety_064,288,1927.43,132.808,256,30.58,10.56,27.11\nhalonet_h1,256,1922.3,133.164,256,8.1,3.0,51.17\nregnety_080,288,1921.98,133.184,256,39.18,13.22,29.69\nconvit_base,224,1918.39,133.435,256,86.54,17.52,31.77\nmaxvit_tiny_pm_256,256,1916.11,133.593,256,30.09,6.61,47.9\ncoat_mini,224,1897.57,134.897,256,10.34,6.82,33.68\nfastvit_ma36,256,1894.76,135.098,256,44.07,7.88,41.09\nregnetz_040,320,1893.7,135.174,256,27.12,6.35,37.78\nregnetz_040_h,320,1880.89,136.095,256,28.94,6.43,37.94\nmobilevitv2_150,384,1864.53,137.289,256,10.59,9.2,54.25\nregnetz_d8,320,1863.04,137.398,256,23.37,6.19,37.08\nregnetz_d8_evos,320,1832.93,139.657,256,23.46,7.03,38.92\ndpn107,224,1826.37,140.156,256,86.92,18.38,33.46\nvitamin_base_224,224,1815.08,141.031,256,87.72,22.68,52.77\nefficientnet_b3_g8_gn,320,1805.58,141.771,256,14.25,3.2,28.83\nhrnet_w64,224,1800.18,142.196,256,128.06,28.97,35.09\ncoatnet_2_rw_224,224,1781.07,143.723,256,73.87,15.09,49.22\nxception65,299,1774.28,144.273,256,39.92,13.96,52.48\nfastvit_mci2,256,1766.58,144.901,256,35.82,7.91,43.34\nmaxxvit_rmlp_small_rw_256,256,1763.23,145.178,256,66.01,14.67,58.38\nnextvit_small,384,1756.58,145.726,256,31.76,17.26,57.14\nefficientnetv2_m,320,1752.86,146.036,256,54.14,11.01,39.97\ncoatnet_rmlp_2_rw_224,224,1741.02,147.029,256,73.88,15.18,54.78\nresnet152d,320,1737.66,147.314,256,60.21,24.08,47.67\nefficientvit_l3,224,1715.62,149.206,256,246.04,27.62,39.16\ntiny_vit_21m_384,384,1715.38,149.227,256,21.23,13.77,77.83\nseresnet152,288,1715.22,149.241,256,66.82,19.11,37.34\nlevit_conv_512_s8,224,1706.1,150.039,256,74.05,21.82,52.28\nxcit_small_12_p8_224,224,1700.59,150.524,256,26.21,18.69,47.21\npoolformer_m48,224,1686.87,151.749,256,73.47,11.59,29.17\nvolo_d3_224,224,1676.56,152.682,256,86.33,20.78,60.09\nlevit_512_s8,224,1666.95,153.563,256,74.05,21.82,52.28\ncaformer_b36,224,1666.61,153.592,256,98.75,23.22,67.3\ncoatnet_2_224,224,1651.34,155.015,256,74.68,16.5,52.67\nconvformer_b36,224,1649.96,155.143,256,99.88,22.69,56.06\nmaxvit_small_tf_224,224,1638.94,156.187,256,68.93,11.66,53.17\nsequencer2d_l,224,1635.78,156.489,256,54.3,9.74,22.12\nswin_s3_base_224,224,1634.71,156.59,256,71.13,13.69,48.26\nregnetz_e8,256,1630.84,156.963,256,57.7,9.91,40.94\nnest_base,224,1627.71,157.266,256,67.72,17.96,53.39\nhgnetv2_b6,288,1622.05,157.812,256,75.26,27.9,35.09\nconvnext_base,288,1617.77,158.232,256,88.59,25.43,47.53\neca_nfnet_l2,320,1615.7,158.43,256,56.72,20.95,47.43\nnest_base_jx,224,1615.45,158.458,256,67.72,17.96,53.39\nconvmixer_768_32,224,1615.29,158.475,256,21.11,19.55,25.95\nresnext101_64x4d,288,1613.61,158.64,256,83.46,25.66,51.59\nregnetz_d32,320,1612.01,158.797,256,27.58,9.33,37.08\nvit_so150m_patch16_reg4_gap_256,256,1607.89,159.204,256,134.13,36.75,53.21\nvit_so150m_patch16_reg4_map_256,256,1592.32,160.761,256,141.48,37.18,53.68\ndensenet264d,224,1591.16,160.877,256,72.74,13.57,14.0\nmobilevitv2_175,384,1573.85,162.648,256,14.25,12.47,63.29\nresnet200,288,1570.42,163.003,256,64.67,24.91,53.21\nefficientvit_l2,384,1551.96,164.941,256,63.71,20.45,57.01\nregnety_120,288,1541.43,166.07,256,51.82,20.06,35.34\nswinv2_base_window8_256,256,1538.74,166.358,256,87.92,20.37,52.59\nconvnextv2_base,288,1528.58,167.465,256,88.72,25.43,47.53\necaresnet200d,256,1516.5,168.798,256,64.69,20.0,43.15\nefficientnetv2_rw_m,320,1516.44,168.804,256,53.24,12.72,47.14\nseresnet200d,256,1510.21,169.5,256,71.86,20.01,43.15\nresnetrs200,256,1506.62,169.905,256,93.21,20.18,43.42\nmaxvit_rmlp_small_rw_256,256,1505.28,170.056,256,64.9,14.15,66.09\nmobilenetv4_conv_aa_large,544,1502.9,170.324,256,32.59,14.19,64.79\nmaxxvitv2_rmlp_base_rw_224,224,1492.62,171.499,256,116.09,24.2,62.77\ncoat_small,224,1479.25,173.047,256,21.69,12.61,44.25\nconvnext_large,224,1474.99,173.55,256,197.77,34.4,43.13\nvit_betwixt_patch16_reg4_gap_384,384,1474.89,173.562,256,60.6,39.71,85.28\nhrnet_w48_ssld,288,1457.57,175.623,256,77.47,28.66,47.21\nresnext101_32x16d,224,1452.32,176.26,256,194.03,36.27,51.18\nswinv2_small_window16_256,256,1451.76,176.326,256,49.73,12.82,66.29\nsenet154,224,1443.54,177.331,256,115.09,20.77,38.69\nlegacy_senet154,224,1435.62,178.308,256,115.09,20.77,38.69\nresnetv2_50x1_bit,448,1429.57,179.064,256,25.55,16.62,44.46\nnf_regnet_b5,456,1421.4,180.093,256,49.74,11.7,61.95\nseresnext101_32x8d,288,1405.19,182.171,256,93.57,27.24,51.63\nconvnextv2_large,224,1398.77,183.007,256,197.96,34.4,43.13\nefficientnet_b4,384,1396.87,183.256,256,19.34,4.51,50.04\ngcvit_base,224,1394.71,183.536,256,90.32,14.87,55.48\nvolo_d1_384,384,1386.95,184.567,256,26.78,22.75,108.55\nhgnet_base,288,1385.67,184.737,256,71.58,41.55,25.57\nseresnext101d_32x8d,288,1371.24,186.68,256,93.59,27.64,52.95\nconvnext_small,384,1370.68,186.757,256,50.22,25.58,63.37\nxception71,299,1367.4,187.205,256,42.34,18.09,69.92\nvit_large_patch32_384,384,1360.57,188.147,256,306.63,45.31,43.86\nseresnet152d,320,1353.12,189.182,256,66.84,24.09,47.72\ncrossvit_18_dagger_408,408,1350.07,189.608,256,44.61,32.47,124.87\nresnetrs152,320,1348.43,189.84,256,86.62,24.34,48.14\nnextvit_base,384,1347.63,189.95,256,44.82,24.64,73.95\nrdnet_large,224,1330.13,192.452,256,186.27,34.74,46.67\nefficientvit_l3,256,1327.54,192.828,256,246.04,36.06,50.98\nconvnext_base,320,1319.96,193.934,256,88.59,31.39,58.68\nresnetv2_50x3_bit,224,1315.05,194.657,256,217.32,37.06,33.34\nxcit_tiny_12_p8_384,384,1309.52,195.481,256,6.71,14.13,69.14\nseresnextaa101d_32x8d,288,1303.25,196.419,256,93.59,28.51,56.44\nregnety_160,288,1303.15,196.436,256,83.59,26.37,38.07\ndavit_large,224,1281.18,199.804,256,196.81,34.6,60.99\ntf_efficientnet_b4,380,1278.54,200.216,256,19.34,4.49,49.49\nxcit_large_24_p16_224,224,1277.51,200.379,256,189.1,35.86,47.27\nregnety_320,224,1269.25,201.683,256,145.05,32.34,30.26\nswinv2_large_window12_192,192,1267.31,201.991,256,228.77,26.17,56.53\nvit_mediumd_patch16_reg4_gap_384,384,1254.95,203.981,256,64.27,43.67,113.51\nregnetx_320,224,1253.43,204.228,256,107.81,31.81,36.3\nswin_large_patch4_window7_224,224,1249.84,204.814,256,196.53,34.53,54.94\nswinv2_cr_tiny_384,384,1248.53,205.031,256,28.33,15.34,161.01\nresnet200d,320,1245.29,205.564,256,64.69,31.25,67.33\ndm_nfnet_f2,256,1211.63,211.274,256,193.78,33.76,41.85\nnfnet_f2,256,1207.12,212.064,256,193.78,33.76,41.85\nmixer_l16_224,224,1198.62,213.568,256,208.2,44.6,41.69\nxcit_small_24_p16_384,384,1191.61,214.823,256,47.67,26.72,68.58\ntf_efficientnetv2_m,384,1187.13,215.635,256,54.14,15.85,57.52\necaresnet200d,288,1180.84,216.784,256,64.69,25.31,54.59\nseresnet200d,288,1176.89,217.511,256,71.86,25.32,54.6\nvit_small_patch14_dinov2,518,1176.63,217.56,256,22.06,46.76,198.79\nseresnet269d,256,1174.38,217.976,256,113.67,26.59,53.6\nvit_base_patch16_18x2_224,224,1171.44,218.522,256,256.73,52.51,71.38\nvit_small_patch14_reg4_dinov2,518,1167.53,219.256,256,22.06,46.95,199.77\nswinv2_cr_large_224,224,1159.63,220.749,256,196.68,35.1,78.42\nresnetrs270,256,1150.07,222.584,256,129.86,27.06,55.84\nconvformer_s36,384,1147.41,223.099,256,40.01,22.54,89.62\nconvnext_large_mlp,256,1136.31,225.279,256,200.13,44.94,56.33\neca_nfnet_l2,384,1126.24,227.293,256,56.72,30.05,68.28\nmaxvit_rmlp_base_rw_224,224,1122.54,228.041,256,116.14,23.15,92.64\ncaformer_s36,384,1120.64,228.427,256,39.3,26.08,150.33\nvit_base_patch16_siglip_gap_384,384,1108.75,230.879,256,86.09,55.43,101.3\nvit_base_patch16_384,384,1108.32,230.969,256,86.86,55.54,101.56\ndeit_base_patch16_384,384,1106.79,231.289,256,86.86,55.54,101.56\ndeit3_base_patch16_384,384,1106.51,231.346,256,86.88,55.54,101.56\nvit_base_patch16_clip_384,384,1104.66,231.734,256,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,1099.37,232.85,256,87.63,55.65,101.82\nvit_base_patch16_siglip_384,384,1099.06,232.916,256,93.18,56.12,102.2\nnextvit_large,384,1093.53,234.092,256,57.87,32.03,90.76\ndm_nfnet_f1,320,1086.93,235.515,256,132.63,35.97,46.77\nnfnet_f1,320,1081.87,236.614,256,132.63,35.97,46.77\nseresnextaa101d_32x8d,320,1057.25,242.126,256,93.59,35.19,69.67\nconvmixer_1024_20_ks9_p14,224,1053.87,242.903,256,24.38,5.55,5.51\nregnetz_e8,320,1043.8,245.246,256,57.7,15.46,63.94\nvit_large_patch16_224,224,1041.62,245.761,256,304.33,61.6,63.52\neva_large_patch14_196,196,1041.47,245.796,256,304.14,61.57,63.52\nbeit_base_patch16_384,384,1041.45,245.799,256,86.74,55.54,101.56\nswinv2_base_window16_256,256,1041.34,245.827,256,87.92,22.02,84.71\ndeit3_large_patch16_224,224,1040.35,246.06,256,304.37,61.6,63.52\nswinv2_base_window12to16_192to256,256,1035.65,247.176,256,87.92,22.02,84.71\nefficientnetv2_m,416,1034.88,247.361,256,54.14,18.6,67.5\neca_nfnet_l3,352,1034.06,247.553,256,72.04,32.57,73.12\nbeit_large_patch16_224,224,1021.33,250.639,256,304.43,61.6,63.52\nmobilevitv2_200,384,1019.97,250.974,256,18.45,16.24,72.34\nbeitv2_large_patch16_224,224,1019.31,251.137,256,304.43,61.6,63.52\nvolo_d4_224,224,972.56,263.212,256,192.96,44.34,80.22\nmaxvit_base_tf_224,224,971.51,263.496,256,119.47,24.04,95.01\nhiera_large_224,224,954.9,268.081,256,213.74,40.34,83.37\nmaxxvitv2_rmlp_large_rw_224,224,954.34,268.236,256,215.42,44.14,87.15\nresnetrs200,320,950.29,269.381,256,93.21,31.51,67.81\nresnetv2_152x2_bit,224,931.12,274.926,256,236.34,46.95,45.11\nconvnext_xlarge,224,930.04,275.246,256,350.2,60.98,57.5\nseresnet269d,288,924.08,277.021,256,113.67,33.65,67.81\nconvnext_base,384,920.56,278.081,256,88.59,45.21,84.49\nnasnetalarge,331,914.25,279.998,256,88.75,23.89,90.56\nflexivit_large,240,911.71,280.781,256,304.36,70.99,75.39\ninception_next_base,384,899.71,284.524,256,86.67,43.64,75.48\nefficientnetv2_rw_m,416,897.78,285.137,256,53.24,21.49,79.62\nconvnext_large,288,891.34,287.196,256,197.77,56.87,71.29\nxcit_small_24_p8_224,224,885.98,288.934,256,47.63,35.81,90.78\nvit_large_r50_s32_384,384,883.45,289.761,256,329.09,57.43,76.52\ntresnet_l,448,883.01,289.904,256,55.99,43.59,47.56\nconvnextv2_base,384,870.69,294.006,256,88.72,45.21,84.49\nresnetv2_101x1_bit,448,859.26,297.921,256,44.54,31.65,64.93\npnasnet5large,331,852.39,300.32,256,86.06,25.04,92.89\nefficientnet_b5,416,849.66,301.286,256,30.39,8.27,80.68\nconvnextv2_large,288,847.65,301.999,256,197.96,56.87,71.29\nefficientvit_l3,320,845.04,302.933,256,246.04,56.32,79.34\ndavit_huge,224,829.13,308.745,256,348.92,61.23,81.32\nconvformer_m36,384,815.63,313.855,256,57.05,37.87,123.56\ncoatnet_rmlp_3_rw_224,224,808.28,316.71,256,165.15,33.56,79.47\ncoatnet_3_rw_224,224,807.9,316.858,256,181.81,33.44,73.83\nxcit_medium_24_p16_384,384,806.79,317.297,256,84.4,47.39,91.64\nvit_large_patch16_siglip_gap_256,256,806.47,317.422,256,303.36,80.8,88.34\ncaformer_m36,384,804.97,318.01,256,56.2,42.11,196.35\nvit_large_patch16_siglip_256,256,801.91,319.228,256,315.96,81.34,88.88\nrepvgg_d2se,320,798.65,320.531,256,133.33,74.57,46.82\nvit_base_patch8_224,224,795.31,321.875,256,86.58,78.22,161.69\nvolo_d2_384,384,789.01,324.444,256,58.87,46.17,184.51\nvit_large_patch14_clip_quickgelu_224,224,782.99,326.939,256,303.97,81.08,88.79\nvit_large_patch14_xp_224,224,782.94,326.962,256,304.06,81.01,88.79\nvit_large_patch14_224,224,781.86,327.414,256,304.2,81.08,88.79\nvit_large_patch14_clip_224,224,781.23,327.678,256,304.2,81.08,88.79\ncoatnet_3_224,224,772.28,331.477,256,166.97,36.56,79.01\nresnest200e,320,765.73,334.308,256,70.2,35.69,82.78\nregnety_160,384,765.62,334.358,256,83.59,46.87,67.67\nvit_base_r50_s16_384,384,764.44,334.876,256,98.95,67.43,135.03\nswinv2_cr_small_384,384,761.16,336.319,256,49.7,29.7,298.03\ntf_efficientnetv2_m,480,760.12,336.778,256,54.14,24.76,89.84\nregnety_640,224,754.37,339.342,256,281.38,64.16,42.5\necaresnet269d,320,742.7,344.674,256,102.09,41.53,83.69\nresnetv2_101x3_bit,224,740.89,345.52,256,387.93,71.23,48.7\nefficientnet_b5,448,731.74,349.841,256,30.39,9.59,93.56\nconvnext_large_mlp,320,724.03,353.565,256,200.13,70.21,88.02\nvitamin_large2_224,224,716.86,357.1,256,333.58,75.05,112.83\nvitamin_large_224,224,716.07,357.492,256,333.32,75.05,112.83\nresnetrs350,288,710.31,360.394,256,163.96,43.67,87.09\nmvitv2_large,224,709.52,360.797,256,217.99,43.87,112.02\ncait_xxs24_384,384,704.92,363.148,256,12.03,9.63,122.66\nxcit_tiny_24_p8_384,384,690.01,370.998,256,12.11,27.05,132.95\nresnet50x16_clip_gap,384,680.63,376.11,256,136.2,70.32,100.64\nmaxvit_large_tf_224,224,679.37,376.81,256,211.79,43.68,127.35\ncoat_lite_medium_384,384,679.13,376.941,256,44.57,28.73,116.7\nefficientnetv2_l,384,676.62,378.337,256,118.52,36.1,101.16\ntiny_vit_21m_512,512,663.92,385.573,256,21.27,27.02,177.93\ntf_efficientnetv2_l,384,663.26,385.959,256,118.52,36.1,101.16\nmaxvit_tiny_tf_384,384,662.03,386.678,256,30.98,17.53,123.42\nresnet50x16_clip,384,652.97,392.041,256,167.33,74.9,103.54\ntf_efficientnet_b5,456,640.6,399.612,256,30.39,10.46,98.86\neca_nfnet_l3,448,638.11,401.174,256,72.04,52.55,118.4\nnfnet_f2,352,631.24,405.539,256,193.78,63.22,79.06\nswinv2_large_window12to16_192to256,256,628.73,407.156,256,196.74,47.81,121.53\ndm_nfnet_f2,352,628.21,407.495,256,193.78,63.22,79.06\nvolo_d5_224,224,617.89,414.298,256,295.46,72.4,118.11\nmvitv2_large_cls,224,615.19,416.117,256,234.58,42.17,111.69\necaresnet269d,352,613.97,416.948,256,102.09,50.25,101.25\neva02_large_patch14_clip_224,224,605.15,423.025,256,304.11,81.18,97.2\neva02_large_patch14_224,224,605.1,423.058,256,303.27,81.15,97.2\nvit_so400m_patch14_siglip_gap_224,224,597.92,428.139,256,412.44,109.57,106.13\nxcit_medium_24_p8_224,224,597.46,428.468,256,84.32,63.53,121.23\nvit_so400m_patch14_siglip_224,224,596.3,429.305,256,427.68,110.26,106.73\nvit_base_patch16_siglip_gap_512,512,595.41,429.941,256,86.43,107.0,246.15\nresnetrs270,352,593.7,431.181,256,129.86,51.13,105.48\nvit_base_patch16_siglip_512,512,592.23,432.25,256,93.52,108.22,247.74\ntresnet_xl,448,585.25,437.403,256,78.44,60.77,61.31\nnfnet_f3,320,580.98,440.622,256,254.92,68.77,83.93\ndm_nfnet_f3,320,578.4,442.591,256,254.92,68.77,83.93\nxcit_small_12_p8_384,384,575.87,444.537,256,26.21,54.92,138.29\nconvformer_b36,384,571.54,447.902,256,99.88,66.67,164.75\nconvnext_xlarge,288,564.23,453.705,256,350.2,100.8,95.05\ncaformer_b36,384,564.02,453.867,256,98.75,72.33,261.79\nefficientvit_l3,384,540.41,473.7,256,246.04,81.08,114.02\nswinv2_cr_base_384,384,538.78,475.138,256,87.88,50.57,333.68\nresmlp_big_24_224,224,534.87,478.611,256,129.14,100.23,87.31\nseresnextaa201d_32x8d,320,530.48,482.573,256,149.39,70.22,138.71\nswin_base_patch4_window12_384,384,526.43,486.278,256,87.9,47.19,134.78\nconvnextv2_huge,224,522.5,489.937,256,660.29,115.0,79.07\ncoatnet_4_224,224,516.83,495.312,256,275.43,62.48,129.26\ncait_xs24_384,384,514.59,497.468,256,26.67,19.28,183.98\nresnext101_32x32d,224,508.29,503.635,256,468.53,87.29,91.12\nconvnext_large,384,505.8,506.117,256,197.77,101.1,126.74\nconvnext_large_mlp,384,505.51,506.405,256,200.13,101.11,126.74\neva02_base_patch14_448,448,495.86,516.266,256,87.12,107.11,259.14\nresnetrs420,320,490.22,522.204,256,191.89,64.2,126.56\nconvnextv2_large,384,480.94,532.276,256,197.96,101.1,126.74\nvitamin_large_256,256,476.63,537.095,256,333.38,99.0,154.99\nvitamin_large2_256,256,476.34,537.415,256,333.64,99.0,154.99\nefficientnetv2_xl,384,473.56,540.575,256,208.12,52.81,139.2\ncait_xxs36_384,384,472.1,542.238,256,17.37,14.35,183.7\nregnety_320,384,471.4,543.046,256,145.05,95.0,88.87\ntf_efficientnetv2_xl,384,467.46,547.633,256,208.12,52.81,139.2\nswinv2_cr_huge_224,224,461.55,554.638,256,657.83,115.97,121.08\nmaxxvitv2_rmlp_base_rw_384,384,460.8,555.548,256,116.09,72.98,213.74\nrdnet_large,384,454.9,562.743,256,186.27,102.09,137.13\nfocalnet_huge_fl3,224,451.28,567.265,256,745.28,118.26,104.8\nxcit_large_24_p16_384,384,445.92,574.081,256,189.1,105.35,137.17\nefficientnetv2_l,480,432.31,592.158,256,118.52,56.4,157.99\nmaxvit_small_tf_384,384,430.69,445.784,192,69.02,35.87,183.65\ntf_efficientnetv2_l,480,425.62,601.47,256,118.52,56.4,157.99\nvit_base_patch14_dinov2,518,422.75,605.542,256,86.58,151.71,397.58\nvit_base_patch14_reg4_dinov2,518,420.52,608.763,256,86.58,152.25,399.53\nhiera_huge_224,224,415.21,616.545,256,672.78,124.85,150.95\nvit_huge_patch14_gap_224,224,405.72,630.968,256,630.76,166.73,138.74\ncoatnet_rmlp_2_rw_384,384,405.05,474.001,192,73.88,47.69,209.43\nvolo_d3_448,448,402.31,636.318,256,86.63,96.33,446.83\nresnetrs350,384,400.15,639.744,256,163.96,77.59,154.74\ncait_s24_384,384,399.34,641.043,256,47.06,32.17,245.31\ndeit3_huge_patch14_224,224,397.79,643.537,256,632.13,167.4,139.41\nvit_huge_patch14_224,224,397.77,643.573,256,630.76,167.4,139.41\nvit_huge_patch14_clip_quickgelu_224,224,397.73,643.637,256,632.08,167.4,139.41\nvit_huge_patch14_clip_224,224,397.45,644.094,256,632.05,167.4,139.41\nvit_huge_patch14_xp_224,224,395.98,646.484,256,631.8,167.3,139.41\nsam2_hiera_tiny,896,388.8,164.6,64,26.85,99.86,384.63\nvitamin_xlarge_256,256,386.93,661.605,256,436.06,130.13,177.37\nregnety_1280,224,379.67,674.264,256,644.81,127.66,71.58\nseresnextaa201d_32x8d,384,366.37,698.744,256,149.39,101.11,199.72\nmaxvit_xlarge_tf_224,224,358.95,713.18,256,506.99,97.52,191.04\nresnest269e,416,358.78,713.517,256,110.93,77.69,171.98\nmaxvit_tiny_tf_512,512,354.93,360.623,128,31.05,33.49,257.59\nefficientnet_b6,528,354.3,722.545,256,43.04,19.4,167.39\nvit_large_patch14_clip_quickgelu_336,336,344.63,742.82,256,304.29,191.11,270.24\nvit_large_patch16_siglip_gap_384,384,344.54,743.009,256,303.69,190.85,269.55\nvit_large_patch14_clip_336,336,343.67,744.879,256,304.53,191.11,270.24\nvit_large_patch16_384,384,343.49,745.276,256,304.72,191.21,270.24\neva_large_patch14_336,336,343.16,745.991,256,304.53,191.1,270.24\ndeit3_large_patch16_384,384,342.55,747.334,256,304.76,191.21,270.24\nvit_large_patch16_siglip_384,384,342.3,747.871,256,316.28,192.07,270.75\nnfnet_f3,416,340.62,751.561,256,254.92,115.58,141.78\ndm_nfnet_f3,416,339.53,753.969,256,254.92,115.58,141.78\nvit_giant_patch16_gap_224,224,338.53,756.187,256,1011.37,202.46,139.26\nnfnet_f4,384,335.52,762.977,256,316.07,122.14,147.57\ndm_nfnet_f4,384,332.75,769.343,256,316.07,122.14,147.57\nxcit_large_24_p8_224,224,331.23,772.865,256,188.93,141.23,181.56\ntf_efficientnet_b6,528,330.56,580.814,192,43.04,19.4,167.39\nbeit_large_patch16_384,384,327.97,780.534,256,305.0,191.21,270.24\nsam2_hiera_small,896,325.93,196.351,64,33.95,123.99,442.63\nconvnext_xxlarge,256,322.53,793.713,256,846.47,198.09,124.45\nswinv2_cr_large_384,384,319.63,600.684,192,196.68,108.96,404.96\nconvnext_xlarge,384,319.57,801.065,256,350.2,179.2,168.99\nmaxvit_rmlp_base_rw_384,384,319.22,801.939,256,116.14,70.97,318.95\nconvnextv2_huge,288,317.37,806.616,256,660.29,190.1,130.7\nswin_large_patch4_window12_384,384,316.67,606.299,192,196.74,104.08,202.16\nresnetv2_152x4_bit,224,316.17,809.666,256,936.53,186.9,90.22\nresnetv2_152x2_bit,384,314.26,814.59,256,236.34,136.16,132.56\ndavit_giant,224,312.39,819.474,256,1406.47,192.92,153.06\nxcit_small_24_p8_384,384,302.44,846.436,256,47.63,105.24,265.91\nmaxvit_base_tf_384,384,300.57,425.842,128,119.65,73.8,332.9\nswinv2_base_window12to24_192to384,384,298.98,321.077,96,87.92,55.25,280.36\ncoatnet_5_224,224,295.77,865.513,256,687.47,145.49,194.24\neva02_large_patch14_clip_336,336,288.34,887.823,256,304.43,191.34,289.13\nresnetrs420,416,284.53,899.724,256,191.89,108.45,213.79\nresnetv2_50x3_bit,448,281.44,682.202,192,217.32,145.7,133.37\nregnety_640,384,272.07,940.925,256,281.38,188.47,124.83\nfocalnet_huge_fl4,224,270.92,944.907,256,686.46,118.9,113.34\nvitamin_large_336,336,270.63,709.44,192,333.57,175.72,307.47\nvitamin_large2_336,336,270.54,709.67,192,333.83,175.72,307.47\nmvitv2_huge_cls,224,270.48,946.469,256,694.8,120.67,243.63\ncait_s36_384,384,267.14,958.275,256,68.37,47.99,367.4\nefficientnetv2_xl,512,266.88,959.22,256,208.12,93.85,247.32\ntf_efficientnetv2_xl,512,263.31,972.209,256,208.12,93.85,247.32\nvit_giant_patch14_224,224,257.22,995.234,256,1012.61,267.18,192.64\nvit_giant_patch14_clip_224,224,256.33,998.705,256,1012.65,267.18,192.64\neva_giant_patch14_224,224,256.2,999.216,256,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,254.62,1005.406,256,1012.59,267.18,192.64\nresnet50x64_clip_gap,448,235.46,1087.208,256,365.03,253.96,233.22\nnfnet_f5,416,232.61,1100.541,256,377.21,170.71,204.56\nmaxvit_small_tf_512,512,232.28,413.275,96,69.13,67.26,383.77\ndm_nfnet_f5,416,231.56,1105.537,256,377.21,170.71,204.56\nvolo_d4_448,448,231.52,1105.726,256,193.41,197.13,527.35\nresnet50x64_clip,448,228.54,1120.162,256,420.38,265.02,239.13\nresnetv2_152x2_bit,448,225.56,1134.941,256,236.34,184.99,180.43\nvitamin_xlarge_336,336,219.99,872.741,192,436.06,230.18,347.33\nefficientnet_b7,600,209.53,1221.782,256,66.35,38.33,289.94\nvitamin_large_384,384,205.2,623.762,128,333.71,234.44,440.16\nvitamin_large2_384,384,205.17,623.858,128,333.97,234.44,440.16\nfocalnet_large_fl3,384,204.28,1253.192,256,239.13,105.06,168.04\nxcit_medium_24_p8_384,384,203.5,1257.962,256,84.32,186.67,354.73\ntf_efficientnet_b7,600,198.11,646.087,128,66.35,38.33,289.94\nvit_so400m_patch14_siglip_gap_384,384,195.82,1307.314,256,412.99,333.46,451.19\nfocalnet_large_fl4,384,195.3,1310.76,256,239.32,105.2,181.78\nvit_so400m_patch14_siglip_384,384,195.2,1311.466,256,428.23,335.4,452.89\ndavit_base_fl,768,192.39,665.305,128,90.37,190.32,530.15\nmaxvit_large_tf_384,384,190.72,671.13,128,212.03,132.55,445.84\nnfnet_f4,512,190.38,1344.673,256,316.07,216.26,262.26\ndm_nfnet_f4,512,189.19,1353.158,256,316.07,216.26,262.26\nswinv2_large_window12to24_192to384,384,186.98,342.268,64,196.74,116.15,407.83\nconvnextv2_huge,384,178.56,1433.694,256,660.29,337.96,232.35\nnfnet_f6,448,177.22,1444.522,256,438.36,229.7,273.62\ndm_nfnet_f6,448,175.57,1458.065,256,438.36,229.7,273.62\nvit_huge_patch14_clip_336,336,173.02,1479.562,256,632.46,390.97,407.54\nsam2_hiera_base_plus,896,170.31,375.779,64,68.68,227.48,828.88\nbeit_large_patch16_512,512,169.73,1508.229,256,305.67,362.24,656.39\nresnetv2_101x3_bit,448,167.54,1145.984,192,387.93,280.33,194.78\nvitamin_xlarge_384,384,165.63,772.812,128,436.06,306.38,493.46\nconvmixer_1536_20,224,153.79,1664.601,256,51.63,48.68,33.03\neva02_large_patch14_448,448,153.5,1667.723,256,305.08,362.33,689.95\nvolo_d5_448,448,148.89,1719.408,256,295.91,315.06,737.92\nvit_gigantic_patch14_224,224,146.07,1752.63,256,1844.44,483.95,275.37\nvit_gigantic_patch14_clip_224,224,145.81,1755.702,256,1844.91,483.96,275.37\nmaxvit_base_tf_512,512,144.2,665.741,96,119.88,138.02,703.99\nregnety_1280,384,138.04,1390.849,192,644.81,374.99,210.2\nnfnet_f5,544,137.1,1867.295,256,377.21,290.97,349.71\nefficientnet_b8,672,136.75,935.971,128,87.41,63.48,442.89\ndm_nfnet_f5,544,136.7,1872.679,256,377.21,290.97,349.71\nfocalnet_xlarge_fl3,384,136.68,1873.042,256,408.79,185.61,223.99\nfocalnet_xlarge_fl4,384,135.52,1888.958,256,409.03,185.79,242.31\nvit_huge_patch14_clip_quickgelu_378,378,135.21,1893.33,256,632.68,503.79,572.79\nvit_so400m_patch14_siglip_gap_448,448,135.13,1894.403,256,413.33,487.18,764.26\nvit_huge_patch14_clip_378,378,134.97,1896.634,256,632.68,503.79,572.79\nnfnet_f7,480,133.4,1919.099,256,499.5,300.08,355.86\nvit_large_patch14_dinov2,518,133.12,1923.026,256,304.37,507.15,1058.82\nvit_large_patch14_reg4_dinov2,518,132.04,1938.813,256,304.37,508.9,1064.02\ntf_efficientnet_b8,672,130.88,977.99,128,87.41,63.48,442.89\nswinv2_cr_huge_384,384,130.75,489.486,64,657.94,352.04,583.18\nswinv2_cr_giant_224,224,125.01,1023.872,128,2598.76,483.85,309.15\nmaxvit_xlarge_tf_384,384,123.97,516.244,64,475.32,292.78,668.76\nvit_huge_patch16_gap_448,448,123.46,2073.454,256,631.67,544.7,636.83\ncait_m36_384,384,120.83,2118.7,256,271.22,173.11,734.81\nvolo_d5_512,512,113.48,2255.978,256,296.09,425.09,1105.37\nxcit_large_24_p8_384,384,112.54,2274.795,256,188.93,415.0,531.82\neva_giant_patch14_336,336,111.59,2294.19,256,1013.01,620.64,550.67\nnfnet_f6,576,107.92,2372.106,256,438.36,378.69,452.2\ndm_nfnet_f6,576,107.35,2384.806,256,438.36,378.69,452.2\nmaxvit_large_tf_512,512,101.74,629.046,64,212.33,244.75,942.15\nconvnextv2_huge,512,100.55,1273.014,128,660.29,600.81,413.07\ntf_efficientnet_l2,475,90.19,1064.381,96,480.31,172.11,609.89\nnfnet_f7,608,84.41,3032.802,256,499.5,480.39,570.85\nregnety_2560,384,76.59,1671.223,128,1282.6,747.83,296.49\ndavit_huge_fl,768,69.12,925.974,64,360.64,744.84,1060.3\nresnetv2_152x4_bit,480,66.53,1442.988,96,936.53,844.84,414.26\neva02_enormous_patch14_clip_224,224,63.84,4009.924,256,4350.56,1132.46,497.58\nsamvit_base_patch16,1024,61.83,258.784,16,89.67,486.43,1343.27\nmaxvit_xlarge_tf_512,512,61.08,785.831,48,475.77,534.14,1413.22\nsam2_hiera_large,1024,53.33,900.092,48,212.15,907.48,2190.34\nvit_giant_patch14_dinov2,518,40.02,4797.862,192,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,39.61,4847.503,192,1136.48,1790.08,2771.21\nswinv2_cr_giant_384,384,36.91,866.946,32,2598.76,1450.71,1394.86\neva_giant_patch14_560,560,35.98,7115.889,256,1014.45,1906.76,2577.17\nefficientnet_l2,800,32.95,1942.195,64,480.31,479.12,1707.39\ntf_efficientnet_l2,800,31.83,1005.239,32,480.31,479.12,1707.39\nsamvit_large_patch16,1024,26.29,304.233,8,308.28,1493.86,2553.78\nvit_so400m_patch14_siglip_gap_896,896,23.37,5478.218,128,416.87,2731.49,8492.88\nsamvit_huge_patch16,1024,15.57,770.869,12,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt240-cu124-rtx4090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_vit,160,188343.61,5.426,1024,0.37,0.04,0.48\ntest_byobnet,160,114439.82,8.933,1024,0.46,0.03,0.43\ntest_efficientnet,160,101055.26,10.121,1024,0.36,0.06,0.55\ntinynet_e,106,76644.06,13.346,1024,2.04,0.03,0.69\nmobilenetv3_small_050,224,70186.63,14.579,1024,1.59,0.03,0.92\nlcnet_035,224,64212.36,15.936,1024,1.64,0.03,1.04\nefficientvit_m0,224,54771.53,18.686,1024,2.35,0.08,0.91\nlcnet_050,224,53644.11,19.078,1024,1.88,0.05,1.26\nmobilenetv3_small_075,224,49475.98,20.686,1024,2.04,0.05,1.3\nmobilenetv3_small_100,224,43953.49,23.286,1024,2.54,0.06,1.42\ntinynet_d,152,40047.08,25.554,1024,2.34,0.05,1.42\nefficientvit_m1,224,38637.78,26.491,1024,2.98,0.17,1.33\ntf_mobilenetv3_small_075,224,37336.46,27.408,1024,2.04,0.05,1.3\ntf_mobilenetv3_small_minimal_100,224,37294.33,27.442,1024,2.04,0.06,1.41\nefficientvit_m2,224,34551.89,29.625,1024,4.19,0.2,1.47\ntf_mobilenetv3_small_100,224,34059.68,30.049,1024,2.54,0.06,1.42\nlcnet_075,224,33742.36,30.337,1024,2.36,0.1,1.99\nmobilenetv4_conv_small,224,32546.3,31.45,1024,3.77,0.19,1.97\nefficientvit_m3,224,29796.09,34.355,1024,6.9,0.27,1.62\nmnasnet_small,224,29488.73,34.713,1024,2.03,0.07,2.16\nlevit_128s,224,28915.97,35.397,1024,7.78,0.31,1.88\nghostnet_050,224,27810.93,36.808,1024,2.59,0.05,1.77\nefficientvit_m4,224,27584.68,37.111,1024,8.8,0.3,1.7\nvit_small_patch32_224,224,27011.35,37.895,1024,22.88,1.15,2.5\nlcnet_100,224,26901.09,38.053,1024,2.95,0.16,2.52\nregnetx_002,224,26857.85,38.105,1024,2.68,0.2,2.16\nresnet18,160,26266.23,38.966,1024,11.69,0.93,1.27\nresnet10t,176,25959.28,39.426,1024,5.44,0.7,1.51\nmobilenetv4_conv_small,256,25751.02,39.753,1024,3.77,0.25,2.57\nrepghostnet_050,224,25423.49,40.26,1024,2.31,0.05,2.02\nlevit_conv_128s,224,25136.44,40.725,1024,7.78,0.31,1.88\nregnety_002,224,25031.98,40.889,1024,3.16,0.2,2.17\nmobilenetv2_035,224,25024.38,40.909,1024,1.68,0.07,2.86\nvit_tiny_r_s16_p8_224,224,24696.86,41.446,1024,6.34,0.44,2.06\nefficientvit_b0,224,22706.84,45.084,1024,3.41,0.1,2.87\nmnasnet_050,224,22309.73,45.888,1024,2.22,0.11,3.07\npit_ti_224,224,21640.68,47.298,1024,4.85,0.7,6.19\npit_ti_distilled_224,224,21496.66,47.616,1024,5.1,0.71,6.23\ntinynet_c,184,20204.93,50.66,1024,2.46,0.11,2.87\nrepghostnet_058,224,19802.21,51.689,1024,2.55,0.07,2.59\nmixer_s32_224,224,19527.38,52.427,1024,19.1,1.0,2.28\nmobilenetv2_050,224,19456.73,52.616,1024,1.97,0.1,3.64\nsemnasnet_050,224,19111.19,53.559,1024,2.08,0.11,3.44\nlevit_128,224,19082.4,53.651,1024,9.21,0.41,2.71\nefficientvit_m5,224,18614.46,54.998,1024,12.47,0.53,2.41\nvit_medium_patch32_clip_224,224,18383.79,55.687,1024,39.69,2.0,3.34\nregnetx_004,224,18131.35,56.448,1024,5.16,0.4,3.14\ngernet_s,224,17973.35,56.961,1024,8.17,0.75,2.65\nlcnet_150,224,17737.51,57.717,1024,4.5,0.34,3.79\ndeit_tiny_patch16_224,224,17673.84,57.926,1024,5.72,1.26,5.97\nvit_tiny_patch16_224,224,17666.09,57.952,1024,5.72,1.26,5.97\ncs3darknet_focus_s,256,17602.37,58.161,1024,3.27,0.69,2.7\ndeit_tiny_distilled_patch16_224,224,17560.74,58.299,1024,5.91,1.27,6.01\nlevit_conv_128,224,17447.46,58.676,1024,9.21,0.41,2.71\nregnetx_004_tv,224,17439.64,58.689,1024,5.5,0.42,3.17\nlevit_192,224,16980.83,60.29,1024,10.95,0.66,3.2\ncs3darknet_s,256,16965.42,60.345,1024,3.28,0.72,2.97\nresnet10t,224,16340.38,62.643,1024,5.44,1.1,2.43\nresnet34,160,16119.94,63.502,1024,21.8,1.87,1.91\nrepghostnet_080,224,15726.41,65.091,1024,3.28,0.1,3.22\nlevit_conv_192,224,15726.05,65.102,1024,10.95,0.66,3.2\nmobilenetv3_large_075,224,15518.95,65.962,1024,3.99,0.16,4.0\nvit_xsmall_patch16_clip_224,224,15266.09,67.063,1024,8.28,1.79,6.65\nhardcorenas_a,224,14844.77,68.969,1024,5.26,0.23,4.38\nmnasnet_075,224,14443.19,70.885,1024,3.17,0.23,4.77\nese_vovnet19b_slim_dw,224,14337.06,71.41,1024,1.9,0.4,5.28\nnf_regnet_b0,192,14314.09,71.525,1024,8.76,0.37,3.15\nmobilenetv3_rw,224,14298.34,71.594,1024,5.48,0.23,4.41\nresnet14t,176,14213.29,72.022,1024,10.08,1.07,3.61\nmobilenetv3_large_100,224,14191.17,72.134,1024,5.48,0.23,4.41\npit_xs_224,224,14164.01,72.272,1024,10.62,1.4,7.71\npit_xs_distilled_224,224,14041.03,72.903,1024,11.0,1.41,7.76\nghostnet_100,224,13948.64,73.4,1024,5.18,0.15,3.55\nregnetx_006,224,13813.98,74.091,1024,6.2,0.61,3.98\nmobilenetv1_100,224,13760.09,74.406,1024,4.23,0.58,5.04\ntf_mobilenetv3_large_075,224,13614.3,75.186,1024,3.99,0.16,4.0\nhardcorenas_b,224,13584.5,75.367,1024,5.18,0.26,5.09\nresnet18,224,13537.15,75.62,1024,11.69,1.82,2.48\nhardcorenas_c,224,13512.54,75.768,1024,5.52,0.28,5.01\nmobilenetv1_100h,224,13446.31,76.143,1024,5.28,0.63,5.09\nregnety_004,224,13238.44,77.296,1024,4.34,0.41,3.89\ntf_efficientnetv2_b0,192,13104.66,78.117,1024,7.14,0.54,3.51\ntf_mobilenetv3_large_minimal_100,224,13074.99,78.292,1024,3.92,0.22,4.4\ntinynet_b,188,13009.4,78.686,1024,3.73,0.21,4.44\nmobilenet_edgetpu_v2_xs,224,12965.31,78.967,1024,4.46,0.7,4.8\nvit_betwixt_patch32_clip_224,224,12906.63,79.313,1024,61.41,3.09,4.17\nconvnext_atto,224,12819.99,79.862,1024,3.7,0.55,3.81\nrepghostnet_100,224,12769.67,80.164,1024,4.07,0.15,3.98\nmnasnet_100,224,12674.12,80.779,1024,4.38,0.33,5.46\nseresnet18,224,12647.44,80.94,1024,11.78,1.82,2.49\nhardcorenas_d,224,12555.09,81.548,1024,7.5,0.3,4.93\nlevit_256,224,12506.52,81.863,1024,18.89,1.13,4.23\nlegacy_seresnet18,224,12443.24,82.276,1024,11.78,1.82,2.49\ntf_mobilenetv3_large_100,224,12230.56,83.698,1024,5.48,0.23,4.41\nconvnext_atto_ols,224,12208.81,83.859,1024,3.7,0.58,4.11\nmobilenetv2_075,224,12206.99,83.872,1024,2.64,0.22,5.86\nedgenext_xx_small,256,12172.17,84.111,1024,1.33,0.26,3.33\nsemnasnet_075,224,12007.14,85.256,1024,2.91,0.23,5.54\nregnety_006,224,11941.12,85.707,1024,6.06,0.61,4.33\nlevit_conv_256,224,11783.71,86.886,1024,18.89,1.13,4.23\nrepghostnet_111,224,11492.86,89.073,1024,4.54,0.18,4.38\nspnasnet_100,224,11256.71,90.94,1024,4.42,0.35,6.03\nlevit_256d,224,11118.19,92.088,1024,26.21,1.4,4.93\nhardcorenas_f,224,11102.22,92.22,1024,8.2,0.35,5.57\nconvnext_femto,224,11040.93,92.732,1024,5.22,0.79,4.57\nresnet18d,224,11008.69,92.989,1024,11.71,2.06,3.29\nrepvgg_a0,224,10985.0,93.181,1024,9.11,1.52,3.59\ndla46_c,224,10958.84,93.426,1024,1.3,0.58,4.5\nmobilenetv1_100,256,10920.28,93.756,1024,4.23,0.76,6.59\nhardcorenas_e,224,10912.94,93.82,1024,8.07,0.35,5.65\nmobilenetv4_conv_medium,224,10887.04,94.044,1024,9.72,0.84,5.8\nghostnet_130,224,10826.86,94.566,1024,7.36,0.24,4.6\nese_vovnet19b_slim,224,10822.95,94.602,1024,3.17,1.69,3.52\nmobilenetv2_100,224,10810.08,94.71,1024,3.5,0.31,6.68\nmobilenetv1_100h,256,10741.59,95.317,1024,5.28,0.82,6.65\nsemnasnet_100,224,10684.42,95.813,1024,3.89,0.32,6.23\nregnetx_008,224,10649.58,96.116,1024,7.26,0.81,5.15\nconvnext_femto_ols,224,10568.01,96.88,1024,5.23,0.82,4.87\ncrossvit_tiny_240,240,10467.46,97.811,1024,7.01,1.57,9.08\nmobilenet_edgetpu_100,224,10392.92,98.516,1024,4.09,1.0,5.75\nefficientnet_lite0,224,10376.87,98.665,1024,4.65,0.4,6.74\nmobilenetv1_125,224,10367.58,98.748,1024,6.27,0.89,6.3\nmobilevit_xxs,256,10336.7,99.048,1024,1.27,0.42,8.34\nfbnetc_100,224,10321.13,99.198,1024,5.57,0.4,6.51\ntinynet_a,192,10287.99,99.507,1024,6.19,0.35,5.41\ntf_efficientnetv2_b0,224,10217.13,100.198,1024,7.14,0.73,4.77\nmobilenetv4_hybrid_medium_075,224,10204.4,100.333,1024,7.31,0.66,5.65\nhgnetv2_b0,224,10169.22,100.683,1024,6.0,0.33,2.12\nvit_base_patch32_clip_224,224,10127.52,101.084,1024,88.22,4.41,5.01\nvit_base_patch32_224,224,10112.91,101.231,1024,88.22,4.41,5.01\ntf_efficientnetv2_b1,192,10100.97,101.35,1024,8.14,0.76,4.59\nregnety_008,224,9957.68,102.806,1024,6.26,0.81,5.25\nlevit_conv_256d,224,9877.75,103.652,1024,26.21,1.4,4.93\ncrossvit_9_240,240,9867.74,103.759,1024,8.55,1.85,9.52\nrepghostnet_130,224,9782.41,104.652,1024,5.48,0.25,5.24\nregnety_008_tv,224,9578.99,106.856,1024,6.43,0.84,5.42\nedgenext_xx_small,288,9574.3,106.938,1024,1.33,0.33,4.21\nresnetblur18,224,9357.08,109.406,1024,11.69,2.34,3.39\nvit_small_patch32_384,384,9286.73,110.252,1024,22.92,3.45,8.25\nxcit_nano_12_p16_224,224,9284.43,110.262,1024,3.05,0.56,4.17\nmobilenet_edgetpu_v2_s,224,9204.18,111.24,1024,5.99,1.21,6.6\nvisformer_tiny,224,9194.72,111.341,1024,10.32,1.27,5.72\ndla46x_c,224,9155.27,111.834,1024,1.07,0.54,5.66\ncrossvit_9_dagger_240,240,9081.92,112.735,1024,8.78,1.99,9.97\nmobilenetv4_conv_medium,256,9068.95,112.899,1024,9.72,1.1,7.58\npvt_v2_b0,224,8956.42,114.305,1024,3.67,0.57,7.99\nresnet14t,224,8930.87,114.632,1024,10.08,1.69,5.8\nefficientnet_b0,224,8926.12,114.703,1024,5.29,0.4,6.75\nmnasnet_140,224,8908.91,114.927,1024,7.12,0.6,7.71\nfbnetv3_b,224,8904.43,114.985,1024,8.6,0.42,6.97\npit_s_224,224,8898.37,115.049,1024,23.46,2.88,11.56\npit_s_distilled_224,224,8880.77,115.275,1024,24.04,2.9,11.64\nmobilevitv2_050,256,8864.77,115.498,1024,1.37,0.48,8.04\ncs3darknet_focus_m,256,8810.63,116.207,1024,9.3,1.98,4.89\ntf_efficientnet_lite0,224,8709.74,117.542,1024,4.65,0.4,6.74\ndla60x_c,224,8644.34,118.444,1024,1.32,0.59,6.01\nefficientnet_b1_pruned,240,8640.04,118.503,1024,6.33,0.4,6.21\nefficientvit_b1,224,8637.15,118.543,1024,9.1,0.53,7.25\nconvnext_pico,224,8576.11,119.388,1024,9.05,1.37,6.1\nregnetz_005,224,8537.29,119.917,1024,7.12,0.52,5.86\nrexnet_100,224,8512.86,120.248,1024,4.8,0.41,7.44\nrepghostnet_150,224,8511.4,120.282,1024,6.58,0.32,6.0\nmobilenetv1_125,256,8425.89,121.511,1024,6.27,1.16,8.23\nrepvit_m1,224,8403.37,121.801,1024,5.49,0.83,7.45\nvit_base_patch32_clip_quickgelu_224,224,8389.12,122.035,1024,87.85,4.41,5.01\nese_vovnet19b_dw,224,8324.1,123.002,1024,6.54,1.34,8.25\nresnet18,288,8261.03,123.927,1024,11.69,3.01,4.11\nrepvgg_a1,224,8255.57,124.011,1024,14.09,2.64,4.74\nrexnetr_100,224,8248.96,124.109,1024,4.88,0.43,7.72\nconvnext_pico_ols,224,8234.27,124.344,1024,9.06,1.43,6.5\ncs3darknet_m,256,8195.28,124.934,1024,9.31,2.08,5.28\nresnet34,224,8192.41,124.968,1024,21.8,3.67,3.74\nresnet50,160,8139.65,125.774,1024,25.56,2.1,5.67\nmobilenetv4_hybrid_medium,224,8135.37,125.856,1024,11.07,0.98,6.84\nvit_tiny_r_s16_p8_384,384,8129.06,125.953,1024,6.36,1.34,6.49\nselecsls42,224,8099.05,126.406,1024,30.35,2.94,4.62\nmobilenetv2_110d,224,8095.22,126.469,1024,4.52,0.45,8.71\nnf_regnet_b0,256,8083.32,126.666,1024,8.76,0.64,5.58\nselecsls42b,224,8049.57,127.183,1024,32.46,2.98,4.62\nrepvit_m0_9,224,7983.61,128.234,1024,5.49,0.83,7.45\ntf_efficientnetv2_b2,208,7955.98,128.679,1024,10.1,1.06,6.0\nvit_base_patch32_clip_256,256,7846.08,130.484,1024,87.86,5.76,6.65\nhrnet_w18_small,224,7772.99,131.724,1024,13.19,1.61,5.72\nconvnext_atto,288,7751.5,132.09,1024,3.7,0.91,6.3\nefficientnet_b0_gn,224,7748.62,132.137,1024,5.29,0.42,6.75\nlevit_384,224,7713.47,132.738,1024,39.13,2.36,6.26\nseresnet18,288,7713.21,132.732,1024,11.78,3.01,4.11\ngernet_m,224,7709.22,132.814,1024,21.14,3.02,5.24\nvit_small_patch16_224,224,7693.38,133.087,1024,22.05,4.61,11.95\ndeit_small_patch16_224,224,7687.89,133.182,1024,22.05,4.61,11.95\nresnet50d,160,7681.19,133.288,1024,25.58,2.22,6.08\ntf_efficientnet_b0,224,7640.8,133.985,1024,5.29,0.4,6.75\ndeit_small_distilled_patch16_224,224,7620.92,134.352,1024,22.44,4.63,12.02\nedgenext_x_small,256,7580.19,135.075,1024,2.34,0.54,5.93\nseresnet34,224,7569.0,135.26,1024,21.96,3.67,3.74\nghostnetv2_100,224,7562.78,135.385,1024,6.16,0.18,4.55\nskresnet18,224,7543.01,135.725,1024,11.96,1.82,3.24\nmobilenetv2_140,224,7486.19,136.767,1024,6.11,0.6,9.57\nsemnasnet_140,224,7485.31,136.75,1024,6.11,0.6,8.87\nlegacy_seresnet34,224,7446.07,137.508,1024,21.96,3.67,3.74\nfbnetv3_d,224,7426.0,137.878,1024,10.31,0.52,8.5\nhgnetv2_b1,224,7416.62,138.054,1024,6.34,0.49,2.73\nconvnext_atto_ols,288,7379.61,138.745,1024,3.7,0.96,6.8\nmixer_b32_224,224,7373.37,138.863,1024,60.29,3.24,6.29\nvit_pwee_patch16_reg1_gap_256,256,7329.65,139.691,1024,15.25,4.37,15.87\nresnet34d,224,7202.58,142.142,1024,21.82,3.91,4.54\nlevit_conv_384,224,7171.07,142.782,1024,39.13,2.36,6.26\nefficientnet_lite1,240,7085.22,144.508,1024,5.42,0.62,10.14\nefficientnet_b0,256,7082.56,144.564,1024,5.29,0.52,8.81\ndla34,224,7078.3,144.651,1024,15.74,3.07,5.02\nmobilenet_edgetpu_v2_m,224,7064.66,144.928,1024,8.46,1.85,8.15\nmixnet_s,224,7052.75,145.177,1024,4.13,0.25,6.25\nfbnetv3_b,256,7022.87,145.793,1024,8.6,0.55,9.1\nseresnet50,160,6986.99,146.503,1024,28.09,2.1,5.69\ncs3darknet_focus_m,288,6969.39,146.91,1024,9.3,2.51,6.19\neva02_tiny_patch14_224,224,6896.46,148.467,1024,5.5,1.7,9.14\necaresnet50t,160,6885.36,148.705,1024,25.57,2.21,6.04\ntf_efficientnetv2_b1,240,6876.19,148.891,1024,8.14,1.21,7.34\nefficientvit_b1,256,6866.64,149.111,1024,9.1,0.69,9.46\nselecsls60b,224,6862.29,149.193,1024,32.77,3.63,5.52\nselecsls60,224,6832.72,149.84,1024,30.67,3.59,5.52\nvit_wee_patch16_reg1_gap_256,256,6818.08,150.174,1024,13.42,3.83,13.9\nmobilenetv4_conv_blur_medium,224,6816.69,150.205,1024,9.72,1.22,8.58\ndeit3_small_patch16_224,224,6812.85,150.286,1024,22.06,4.61,11.95\nefficientnet_es,224,6810.09,150.348,1024,5.44,1.81,8.73\nmixer_s16_224,224,6808.46,150.387,1024,18.53,3.79,5.97\nefficientnet_blur_b0,224,6806.84,150.42,1024,5.29,0.43,8.72\ntiny_vit_5m_224,224,6795.79,150.652,1024,12.08,1.28,11.25\nrepvit_m1_0,224,6784.56,150.903,1024,7.3,1.13,8.69\nregnetx_016,224,6766.08,151.313,1024,9.19,1.62,7.93\nresnet50,176,6703.95,152.718,1024,25.56,2.62,6.92\nconvnext_femto,288,6687.36,153.109,1024,5.22,1.3,7.56\nflexivit_small,240,6660.66,153.724,1024,22.06,5.35,14.18\nefficientnet_b0_g16_evos,224,6654.19,153.871,1024,8.11,1.01,7.42\nresmlp_12_224,224,6650.72,153.939,1024,15.35,3.01,5.5\nresnet26,224,6639.65,154.197,1024,16.0,2.36,7.35\nresnet18d,288,6633.67,154.335,1024,11.71,3.41,5.43\nrepvit_m2,224,6610.13,154.886,1024,8.8,1.36,9.43\nmobilenetv4_hybrid_medium,256,6587.07,155.438,1024,11.07,1.29,9.01\nconvnextv2_atto,224,6572.88,155.773,1024,3.71,0.55,3.81\nresnetrs50,160,6568.89,155.826,1024,35.69,2.29,6.2\nresnext50_32x4d,160,6540.25,156.542,1024,25.03,2.17,7.35\ncs3darknet_m,288,6499.55,157.533,1024,9.31,2.63,6.69\nrexnetr_130,224,6489.24,157.769,1024,7.61,0.68,9.81\nrexnet_130,224,6476.24,158.089,1024,7.56,0.68,9.71\nresnetaa34d,224,6464.72,158.371,1024,21.82,4.43,5.07\nrepghostnet_200,224,6438.16,159.023,1024,9.8,0.54,7.96\nconvnext_femto_ols,288,6405.94,159.835,1024,5.23,1.35,8.06\nxcit_tiny_12_p16_224,224,6316.09,162.101,1024,6.72,1.24,6.29\ngmixer_12_224,224,6305.79,162.375,1024,12.7,2.67,7.26\ntf_mixnet_s,224,6303.08,162.43,1024,4.13,0.25,6.25\nmobilenetv4_conv_aa_medium,256,6295.03,162.653,1024,9.72,1.58,10.3\nrepvit_m1_1,224,6283.35,162.917,1024,8.8,1.36,9.43\ntf_efficientnet_es,224,6275.13,163.154,1024,5.44,1.81,8.73\nefficientnet_b1,224,6258.96,163.587,1024,7.79,0.59,9.36\nefficientnet_es_pruned,224,6256.91,163.63,1024,5.44,1.81,8.73\nefficientnet_b0_g8_gn,224,6255.71,163.672,1024,6.56,0.66,6.75\nconvnext_nano,224,6249.99,163.825,1024,15.59,2.46,8.37\nrepvgg_b0,224,6210.44,164.855,1024,15.82,3.41,6.15\nhgnetv2_b0,288,6150.46,166.477,1024,6.0,0.54,3.51\necaresnet50d_pruned,224,6136.73,166.846,1024,19.94,2.53,6.43\nhgnetv2_b4,224,6125.62,167.15,1024,19.8,2.75,6.7\ntf_efficientnet_lite1,240,6121.06,167.26,1024,5.42,0.62,10.14\nresnet26d,224,6072.87,168.59,1024,16.01,2.6,8.15\nefficientnet_cc_b0_4e,224,6016.7,170.181,1024,13.31,0.41,9.42\nefficientnet_cc_b0_8e,224,6014.04,170.256,1024,24.01,0.42,9.42\nnf_regnet_b1,256,6005.41,170.494,1024,10.22,0.82,7.27\nmobilenetv4_conv_medium,320,5999.7,170.66,1024,9.72,1.71,11.84\nmobilenet_edgetpu_v2_l,224,5960.09,171.794,1024,10.92,2.55,9.05\nvit_relpos_small_patch16_224,224,5956.13,171.907,1024,21.98,4.59,13.05\nedgenext_x_small,288,5955.77,171.92,1024,2.34,0.68,7.5\nregnety_016,224,5939.38,172.381,1024,11.2,1.63,8.04\ndarknet17,256,5937.82,172.434,1024,14.3,3.26,7.18\nfbnetv3_d,256,5909.96,173.251,1024,10.31,0.68,11.1\nvit_srelpos_small_patch16_224,224,5903.24,173.446,1024,21.97,4.59,12.16\nnf_resnet26,224,5897.16,173.627,1024,16.0,2.41,7.35\nmobilevitv2_075,256,5875.41,174.271,1024,2.87,1.05,12.06\nnf_regnet_b2,240,5860.87,174.704,1024,14.31,0.97,7.23\nghostnetv2_130,224,5854.47,174.894,1024,8.96,0.28,5.9\nefficientnet_b2_pruned,260,5817.56,176.002,1024,8.31,0.73,9.13\nvit_base_patch32_plus_256,256,5803.45,176.415,1024,119.48,7.79,7.76\ntiny_vit_11m_224,224,5800.65,176.504,1024,20.35,2.04,13.49\nvit_tiny_patch16_384,384,5746.17,178.189,1024,5.79,4.7,25.39\ngmlp_ti16_224,224,5736.13,178.502,1024,5.87,1.34,7.55\nmobilenet_edgetpu_v2_m,256,5727.79,178.761,1024,8.46,2.42,10.65\nmobilenetv2_120d,224,5720.65,178.972,1024,5.83,0.69,11.97\nvit_relpos_small_patch16_rpn_224,224,5707.01,179.411,1024,21.97,4.59,13.05\nresnetblur18,288,5705.18,179.458,1024,11.69,3.87,5.6\nrexnetr_150,224,5686.03,180.063,1024,9.78,0.89,11.13\nconvnext_nano_ols,224,5675.19,180.417,1024,15.65,2.65,9.38\npoolformer_s12,224,5647.45,181.292,1024,11.92,1.82,5.53\nefficientformer_l1,224,5641.9,181.48,1024,12.29,1.3,5.53\nconvnextv2_femto,224,5629.97,181.868,1024,5.23,0.79,4.57\nrexnet_150,224,5597.8,182.901,1024,9.73,0.9,11.21\nefficientnet_lite2,260,5526.09,185.282,1024,6.09,0.89,12.9\nefficientnet_b1,240,5515.48,185.64,1024,7.79,0.71,10.88\ndarknet21,256,5507.84,185.899,1024,20.86,3.93,7.47\nedgenext_small,256,5504.6,186.01,1024,5.59,1.26,9.07\nmobilenetv4_conv_blur_medium,256,5452.15,140.844,768,9.72,1.59,11.2\nresnext50_32x4d,176,5448.72,187.904,1024,25.03,2.71,8.97\ntf_efficientnet_cc_b0_8e,224,5378.31,190.379,1024,24.01,0.42,9.42\ntf_efficientnet_cc_b0_4e,224,5350.74,191.362,1024,13.31,0.41,9.42\nresnet101,160,5347.38,191.468,1024,44.55,4.0,8.28\ngernet_l,256,5345.88,191.534,1024,31.08,4.57,8.0\nefficientvit_b1,288,5336.48,191.869,1024,9.1,0.87,11.96\nregnetz_005,288,5324.94,192.275,1024,7.12,0.86,9.68\nhgnet_tiny,224,5289.94,193.552,1024,14.74,4.54,6.36\nrepvgg_a2,224,5234.66,195.589,1024,28.21,5.7,6.26\nmobilenetv4_conv_large,256,5210.97,196.493,1024,32.59,2.86,12.14\nconvnext_pico,288,5203.54,196.772,1024,9.05,2.27,10.08\ncs3darknet_focus_l,256,5201.05,196.865,1024,21.15,4.66,8.03\nvit_relpos_base_patch32_plus_rpn_256,256,5200.74,196.879,1024,119.42,7.68,8.01\nmobilenetv3_large_150d,256,5199.32,196.924,1024,14.62,1.03,12.35\nresnest14d,224,5198.83,196.94,1024,10.61,2.76,7.33\nvit_medium_patch16_clip_224,224,5170.05,198.043,1024,38.59,8.0,15.93\nsedarknet21,256,5136.28,199.326,1024,20.95,3.93,7.47\ntf_efficientnetv2_b2,260,5112.53,200.262,1024,10.1,1.72,9.84\nregnetz_b16,224,5111.47,200.306,1024,9.72,1.45,9.95\nefficientnetv2_rw_t,224,5105.62,200.547,1024,13.65,1.93,9.94\nhgnetv2_b2,224,5071.32,201.903,1024,11.22,1.15,4.12\nedgenext_small_rw,256,5063.53,202.213,1024,7.83,1.58,9.51\nlegacy_seresnext26_32x4d,224,5035.51,203.34,1024,16.79,2.49,9.39\nmobilenetv4_hybrid_large_075,256,5015.06,204.168,1024,22.75,2.06,11.64\necaresnet101d_pruned,224,5009.62,204.385,1024,24.88,3.48,7.69\ntf_efficientnetv2_b3,240,5006.45,204.508,1024,14.36,1.93,9.95\ncrossvit_small_240,240,5004.67,204.591,1024,26.86,5.63,18.17\nconvnext_pico_ols,288,4990.08,205.192,1024,9.06,2.37,10.74\nresnext26ts,256,4989.4,205.207,1024,10.3,2.43,10.52\nresnet34,288,4989.09,205.219,1024,21.8,6.07,6.18\nefficientnet_b1,256,4979.81,205.61,1024,7.79,0.77,12.22\npvt_v2_b1,224,4972.11,205.919,1024,14.01,2.12,15.39\nmixnet_m,224,4961.12,206.387,1024,5.01,0.36,8.19\ndpn48b,224,4946.37,207.002,1024,9.13,1.69,8.92\nsam2_hiera_tiny,224,4939.34,207.285,1024,26.85,4.91,17.12\nnf_ecaresnet26,224,4936.32,207.425,1024,16.0,2.41,7.36\neca_resnext26ts,256,4936.16,207.428,1024,10.3,2.43,10.52\nmobilevit_xs,256,4915.37,156.229,768,2.32,1.05,16.33\ncs3darknet_l,256,4898.39,209.032,1024,21.16,4.86,8.55\ntf_efficientnet_b1,240,4891.58,209.309,1024,7.79,0.71,10.88\nnf_seresnet26,224,4877.41,209.923,1024,17.4,2.41,7.36\ngcresnext26ts,256,4873.11,210.116,1024,10.48,2.43,10.53\necaresnetlight,224,4830.89,211.948,1024,30.16,4.11,8.42\nseresnext26ts,256,4825.46,212.166,1024,10.39,2.43,10.52\ntf_efficientnet_lite2,260,4812.46,212.754,1024,6.09,0.89,12.9\nresnet26t,256,4797.54,213.414,1024,16.01,3.35,10.52\nconvnext_tiny,224,4796.99,213.45,1024,28.59,4.47,13.44\ncoatnext_nano_rw_224,224,4780.38,214.189,1024,14.7,2.47,12.8\necaresnext50t_32x4d,224,4760.92,215.063,1024,15.41,2.7,10.09\necaresnext26t_32x4d,224,4760.44,215.086,1024,15.41,2.7,10.09\nghostnetv2_160,224,4741.68,215.94,1024,12.39,0.42,7.23\nmobileone_s1,224,4716.61,217.083,1024,4.83,0.86,9.67\nefficientnet_b2,256,4713.06,217.249,1024,9.11,0.89,12.81\nese_vovnet19b_dw,288,4711.0,217.348,1024,6.54,2.22,13.63\nvit_little_patch16_reg1_gap_256,256,4683.45,218.614,1024,22.52,6.27,18.06\ngc_efficientnetv2_rw_t,224,4667.78,219.357,1024,13.68,1.94,9.97\nnf_regnet_b1,288,4666.05,219.44,1024,10.22,1.02,9.2\nvit_small_resnet26d_224,224,4663.1,219.577,1024,63.61,5.07,11.12\nvit_little_patch16_reg4_gap_256,256,4660.7,219.682,1024,22.52,6.35,18.33\nefficientnet_b3_pruned,300,4660.19,219.714,1024,9.86,1.04,11.86\nseresnext26t_32x4d,224,4649.25,220.222,1024,16.81,2.7,10.09\ntf_mixnet_m,224,4635.47,220.876,1024,5.01,0.36,8.19\ncrossvit_15_240,240,4634.12,220.945,1024,27.53,5.81,19.77\nvit_small_r26_s32_224,224,4628.86,221.202,1024,36.43,3.56,9.85\nseresnet34,288,4622.08,221.495,1024,21.96,6.07,6.18\nvit_relpos_medium_patch16_cls_224,224,4616.98,221.77,1024,38.76,8.03,18.24\ntresnet_m,224,4615.66,221.824,1024,31.39,5.75,7.31\ndeit3_medium_patch16_224,224,4613.13,221.958,1024,38.85,8.0,15.93\nseresnext26d_32x4d,224,4594.08,222.859,1024,16.81,2.73,10.19\nhgnetv2_b1,288,4576.22,223.749,1024,6.34,0.82,4.51\ncs3sedarknet_l,256,4561.19,224.486,1024,21.91,4.86,8.56\nlevit_512,224,4507.01,227.185,1024,95.17,5.64,10.22\nnf_regnet_b2,272,4499.96,227.541,1024,14.31,1.22,9.27\nrepvit_m3,224,4488.27,228.109,1024,10.68,1.89,13.94\nselecsls84,224,4485.13,228.28,1024,50.95,5.9,7.57\ncoatnet_pico_rw_224,224,4472.02,228.953,1024,10.85,2.05,14.62\nresnetv2_50,224,4451.45,230.006,1024,25.55,4.11,11.11\nmobilevitv2_100,256,4424.26,173.574,768,4.9,1.84,16.08\nwide_resnet50_2,176,4418.37,231.739,1024,68.88,7.29,8.97\nhiera_tiny_224,224,4417.32,231.797,1024,27.91,4.91,17.13\ncoat_lite_tiny,224,4413.07,232.018,1024,5.72,1.6,11.65\nresnet101,176,4401.33,232.627,1024,44.55,4.92,10.08\ncrossvit_15_dagger_240,240,4395.44,232.947,1024,28.21,6.13,20.43\nvgg11,224,4384.0,233.545,1024,132.86,7.61,7.44\nresnet34d,288,4379.98,233.76,1024,21.82,6.47,7.51\neca_botnext26ts_256,256,4372.52,234.17,1024,10.59,2.46,11.6\necaresnet26t,256,4365.68,234.537,1024,16.01,3.35,10.53\nconvit_tiny,224,4364.79,234.585,1024,5.71,1.26,7.94\nskresnet34,224,4357.69,234.957,1024,22.28,3.67,5.13\nvovnet39a,224,4329.41,236.505,1024,22.6,7.09,6.73\nconvnextv2_pico,224,4303.27,237.943,1024,9.07,1.37,6.1\neca_halonext26ts,256,4303.05,237.954,1024,10.76,2.44,11.46\ncspresnet50,256,4286.58,238.859,1024,21.62,4.54,11.5\nfbnetv3_g,240,4271.76,239.698,1024,16.62,1.28,14.87\nfastvit_t8,256,4268.46,239.88,1024,4.03,0.7,8.63\ndla60,224,4267.0,239.958,1024,22.04,4.26,10.16\nresnetv2_50t,224,4244.14,241.24,1024,25.57,4.32,11.82\nregnetx_032,224,4235.6,241.712,1024,15.3,3.2,11.37\nhrnet_w18_small_v2,224,4233.72,241.85,1024,15.6,2.62,9.65\nlevit_512d,224,4224.89,242.354,1024,92.5,5.85,11.3\nresnet32ts,256,4223.86,242.395,1024,17.96,4.63,11.58\nmobilenetv4_hybrid_medium,320,4223.22,242.451,1024,11.07,2.05,14.36\nresnet50,224,4217.6,242.763,1024,25.56,4.11,11.11\nlevit_conv_512,224,4205.6,243.467,1024,95.17,5.64,10.22\nresnetv2_50d,224,4198.97,243.837,1024,25.57,4.35,11.92\nefficientvit_b2,224,4196.18,244.014,1024,24.33,1.6,14.62\nlambda_resnet26t,256,4192.89,244.204,1024,10.96,3.02,11.87\ncoat_lite_mini,224,4171.78,245.439,1024,11.01,2.0,12.25\nresnet33ts,256,4170.49,245.507,1024,19.68,4.76,11.66\nregnety_032,224,4166.74,245.728,1024,19.44,3.2,11.26\nbotnet26t_256,256,4164.92,245.843,1024,12.49,3.32,11.98\nhalonet26t,256,4157.99,246.255,1024,12.48,3.19,11.69\nese_vovnet39b,224,4150.95,246.672,1024,24.57,7.09,6.74\neca_vovnet39b,224,4144.28,247.067,1024,22.6,7.09,6.74\ndpn68,224,4139.9,247.329,1024,12.61,2.35,10.47\nhgnetv2_b3,224,4136.91,247.511,1024,16.29,1.78,5.07\ncoatnet_nano_cc_224,224,4135.45,247.589,1024,13.76,2.24,15.02\nrexnetr_200,224,4135.03,185.7,768,16.52,1.59,15.11\nrexnet_200,224,4128.12,186.013,768,16.37,1.56,14.91\nvit_relpos_medium_patch16_224,224,4120.0,248.525,1024,38.75,7.97,17.02\neca_resnet33ts,256,4101.91,249.618,1024,19.68,4.76,11.66\nvit_srelpos_medium_patch16_224,224,4077.53,251.113,1024,38.74,7.96,16.21\ngcresnet33ts,256,4074.93,251.273,1024,19.88,4.76,11.68\ndpn68b,224,4071.04,251.513,1024,12.61,2.35,10.47\ncs3darknet_focus_l,288,4065.11,251.879,1024,21.15,5.9,10.16\nresnet26,288,4059.09,252.243,1024,16.0,3.9,12.15\nseresnet33ts,256,4037.86,253.571,1024,19.78,4.76,11.66\nresnet50t,224,4031.22,253.987,1024,25.57,4.32,11.82\nvisformer_small,224,4021.8,254.581,1024,40.22,4.88,11.43\nresnet50d,224,3994.33,256.335,1024,25.58,4.35,11.92\ndavit_tiny,224,3979.92,192.952,768,28.36,4.54,18.89\ncspresnet50w,256,3979.02,257.332,1024,28.12,5.04,12.19\nresnetaa34d,288,3962.03,258.426,1024,21.82,7.33,8.38\nresnet50c,224,3951.72,259.096,1024,25.58,4.35,11.92\nresnetv2_50x1_bit,224,3946.63,259.432,1024,25.55,4.23,11.11\ncspresnet50d,256,3928.57,260.626,1024,21.64,4.86,12.55\nefficientnet_b1,288,3916.7,261.423,1024,7.79,0.97,15.46\nresnext26ts,288,3914.08,261.582,1024,10.3,3.07,13.31\nconvnext_tiny_hnf,224,3907.17,262.058,1024,28.59,4.47,13.44\nvit_base_resnet26d_224,224,3907.09,262.061,1024,101.4,6.97,13.16\nbat_resnext26ts,256,3900.03,262.512,1024,10.73,2.53,12.51\nresnetaa50,224,3892.01,263.074,1024,25.56,5.15,11.64\ncoatnet_nano_rw_224,224,3890.95,263.144,1024,15.14,2.41,15.41\nregnetv_040,224,3888.77,263.292,1024,20.64,4.0,12.29\nvit_relpos_medium_patch16_rpn_224,224,3883.35,263.671,1024,38.73,7.97,17.02\ntwins_svt_small,224,3875.45,264.197,1024,24.06,2.94,13.75\neca_resnext26ts,288,3875.29,264.216,1024,10.3,3.07,13.32\nmobileone_s2,224,3859.75,265.286,1024,7.88,1.34,11.55\nhgnetv2_b4,288,3849.96,265.959,1024,19.8,4.54,11.08\nhaloregnetz_b,224,3847.23,266.147,1024,11.68,1.97,11.94\nlegacy_seresnet50,224,3845.77,266.241,1024,28.09,3.88,10.6\ncs3darknet_l,288,3845.41,266.274,1024,21.16,6.16,10.83\nefficientnet_cc_b1_8e,240,3844.11,266.369,1024,39.72,0.75,15.44\ntf_efficientnet_em,240,3839.63,266.664,1024,6.9,3.04,14.34\nmobilevit_s,256,3835.87,200.199,768,5.58,2.03,19.94\nlevit_conv_512d,224,3830.33,267.323,1024,92.5,5.85,11.3\nvgg11_bn,224,3829.44,267.372,1024,132.87,7.62,7.44\ntf_efficientnet_b2,260,3827.63,267.5,1024,9.11,1.02,13.83\ngcresnext26ts,288,3824.01,267.764,1024,10.48,3.07,13.33\nregnety_040,224,3819.46,268.036,1024,20.65,4.0,12.29\nresnet152,160,3816.3,268.292,1024,60.19,5.9,11.51\nresnetv2_50d_gn,224,3798.52,269.547,1024,25.57,4.38,11.92\nconvnext_nano,288,3784.13,270.586,1024,15.59,4.06,13.84\nmixnet_l,224,3783.23,270.648,1024,7.33,0.58,10.84\necaresnet50d_pruned,288,3782.37,270.708,1024,19.94,4.19,10.61\nseresnext26ts,288,3781.76,270.745,1024,10.39,3.07,13.32\nresnet50_gn,224,3769.97,271.59,1024,25.56,4.14,11.11\nres2net50_48w_2s,224,3744.75,273.407,1024,25.29,4.18,11.72\nrepvit_m1_5,224,3729.62,274.53,1024,14.64,2.31,15.7\ntiny_vit_21m_224,224,3725.54,274.83,1024,33.22,4.29,20.08\nresnest26d,224,3723.36,274.993,1024,17.07,3.64,9.97\nresnet26d,288,3717.52,275.422,1024,16.01,4.29,13.48\nefficientnet_b2,288,3712.61,275.797,1024,9.11,1.12,16.2\nefficientnet_em,240,3709.85,275.987,1024,6.9,3.04,14.34\nvovnet57a,224,3706.22,276.265,1024,36.64,8.95,7.52\nresnetaa50d,224,3694.51,277.138,1024,25.58,5.39,12.44\nresnet50_clip_gap,224,3684.19,277.913,1024,23.53,5.39,12.44\nregnetx_040,224,3677.93,278.37,1024,22.12,3.99,12.2\nconvnextv2_atto,288,3666.13,279.278,1024,3.71,0.91,6.3\ninception_v3,299,3663.18,279.511,1024,23.83,5.73,8.97\nhiera_small_224,224,3657.67,279.941,1024,35.01,6.42,20.75\ngcvit_xxtiny,224,3656.12,280.058,1024,12.0,2.14,15.36\ntwins_pcpvt_small,224,3652.03,280.363,1024,24.11,3.83,18.08\nseresnet50,224,3649.19,280.58,1024,28.09,4.11,11.13\nresnetblur50,224,3629.01,282.14,1024,25.56,5.16,12.02\ndensenet121,224,3614.99,283.247,1024,7.98,2.87,6.9\nvit_medium_patch16_gap_240,240,3607.76,283.806,1024,44.4,9.22,18.81\necaresnet50t,224,3581.41,285.899,1024,25.57,4.32,11.83\ncs3sedarknet_l,288,3578.3,286.15,1024,21.91,6.16,10.83\nvit_base_r26_s32_224,224,3567.73,286.987,1024,101.38,6.81,12.36\nmobilenetv4_conv_large,320,3558.21,287.765,1024,32.59,4.47,18.97\ninception_next_tiny,224,3555.09,288.019,1024,28.06,4.19,11.98\necaresnet50d,224,3553.36,288.157,1024,25.58,4.35,11.93\nmobileone_s0,224,3552.46,288.227,1024,5.29,1.09,15.48\ncoatnet_rmlp_nano_rw_224,224,3537.96,289.405,1024,15.15,2.62,20.34\ntf_mixnet_l,224,3536.96,289.484,1024,7.33,0.58,10.84\ndla60x,224,3535.6,289.605,1024,17.35,3.54,13.8\nese_vovnet57b,224,3532.21,289.885,1024,38.61,8.95,7.52\nedgenext_small,320,3521.78,290.744,1024,5.59,1.97,14.16\nmobilevitv2_125,256,3514.09,218.532,768,7.48,2.86,20.1\nresnet50_clip,224,3513.39,291.426,1024,38.32,6.14,12.98\nvit_base_patch32_384,384,3507.62,291.907,1024,88.3,13.06,16.5\nnf_regnet_b3,288,3505.93,292.061,1024,18.59,1.67,11.84\nvit_base_patch32_clip_384,384,3505.16,292.11,1024,88.3,13.06,16.5\nhgnetv2_b5,224,3503.99,292.219,1024,39.57,6.56,11.19\ntf_efficientnet_cc_b1_8e,240,3502.1,292.382,1024,39.72,0.75,15.44\nseresnet50t,224,3492.13,293.202,1024,28.1,4.32,11.83\nvit_large_patch32_224,224,3482.36,294.033,1024,305.51,15.39,13.3\nvit_medium_patch16_reg1_gap_256,256,3459.44,295.982,1024,38.88,10.63,22.26\nvit_medium_patch16_reg4_gap_256,256,3450.76,296.726,1024,38.88,10.76,22.6\nresnetblur50d,224,3438.8,297.748,1024,25.58,5.4,12.82\ncs3darknet_focus_x,256,3437.61,297.861,1024,35.02,8.03,10.69\neca_nfnet_l0,224,3436.69,297.944,1024,24.14,4.35,10.47\nconvnext_nano_ols,288,3435.64,298.035,1024,15.65,4.38,15.5\nresnetrs50,224,3434.24,298.144,1024,35.69,4.48,12.14\nresnext50_32x4d,224,3427.06,298.772,1024,25.03,4.26,14.4\ncoatnet_0_rw_224,224,3425.88,298.879,1024,27.44,4.43,18.73\nhgnet_small,224,3416.04,299.744,1024,24.36,8.53,8.79\ncspresnext50,256,3409.98,300.268,1024,20.57,4.05,15.86\ndla60_res2net,224,3403.55,300.841,1024,20.85,4.15,12.34\nres2net50_26w_4s,224,3401.73,300.973,1024,25.7,4.28,12.61\nmaxvit_pico_rw_256,256,3401.1,225.792,768,7.46,1.83,22.3\nresmlp_24_224,224,3399.81,301.162,1024,30.02,5.96,10.91\nres2net50_14w_8s,224,3398.1,301.29,1024,25.06,4.21,13.28\nmaxvit_rmlp_pico_rw_256,256,3396.4,226.105,768,7.52,1.85,24.86\nresnet50s,224,3396.16,301.49,1024,25.68,5.47,13.52\nconvnextv2_femto,288,3395.92,301.516,1024,5.23,1.3,7.56\nregnety_040_sgn,224,3390.72,301.971,1024,20.65,4.03,12.29\nxcit_tiny_24_p16_224,224,3375.55,303.331,1024,12.12,2.34,11.82\nefficientvit_b2,256,3369.65,303.868,1024,24.33,2.09,19.03\nnfnet_f0,192,3349.0,305.741,1024,71.49,7.21,10.16\nnfnet_l0,224,3341.81,306.39,1024,35.07,4.36,10.47\nhieradet_small,256,3335.5,230.232,768,34.72,8.51,27.76\nedgenext_base,256,3315.68,308.815,1024,18.51,3.85,15.58\nresnest50d_1s4x24d,224,3309.67,309.367,1024,25.68,4.43,13.57\nefficientnet_lite3,300,3296.2,155.31,512,8.2,1.65,21.85\ndla60_res2next,224,3295.03,310.75,1024,17.03,3.49,13.17\nmobilenetv3_large_150d,320,3287.23,233.606,768,14.62,1.61,19.29\ncs3darknet_x,256,3280.58,312.118,1024,35.05,8.38,11.35\ncrossvit_18_240,240,3276.68,312.485,1024,43.27,9.05,26.26\nlambda_resnet26rpt_256,256,3275.72,234.435,768,10.99,3.16,11.87\nresnet32ts,288,3273.65,312.761,1024,17.96,5.86,14.65\nresnext50d_32x4d,224,3267.01,313.407,1024,25.05,4.5,15.2\ndensenetblur121d,224,3260.14,314.077,1024,8.0,3.11,7.9\nres2net50d,224,3253.03,314.728,1024,25.72,4.52,13.41\ndarknetaa53,256,3249.03,315.15,1024,36.02,7.97,12.39\nedgenext_small_rw,320,3242.29,315.807,1024,7.83,2.46,14.85\nresnet33ts,288,3239.71,316.048,1024,19.68,6.02,14.75\nseresnetaa50d,224,3235.08,316.472,1024,28.11,5.4,12.46\ntf_efficientnetv2_b3,300,3232.69,316.734,1024,14.36,3.04,15.74\nfocalnet_tiny_srf,224,3226.46,317.357,1024,28.43,4.42,16.32\nefficientnetv2_rw_t,288,3225.91,317.41,1024,13.65,3.19,16.42\neva02_small_patch14_224,224,3225.8,317.422,1024,21.62,6.14,18.28\ngcresnext50ts,256,3223.82,317.616,1024,15.67,3.75,15.46\nres2next50,224,3218.65,318.1,1024,24.67,4.2,13.71\ngcresnet50t,256,3218.31,318.16,1024,25.9,5.42,14.67\ngmixer_24_224,224,3213.22,318.663,1024,24.72,5.28,14.45\neca_resnet33ts,288,3197.58,320.22,1024,19.68,6.02,14.76\nefficientvit_l1,224,3180.29,321.964,1024,52.65,5.27,15.85\nmobileone_s3,224,3179.46,322.047,1024,10.17,1.94,13.85\nrepvgg_b1g4,224,3167.88,323.216,1024,39.97,8.15,10.64\ngcresnet33ts,288,3167.19,323.296,1024,19.88,6.02,14.78\ncrossvit_18_dagger_240,240,3155.34,324.503,1024,44.27,9.5,27.03\nvit_medium_patch16_gap_256,256,3146.86,325.379,1024,38.86,10.59,22.15\nresnet152,176,3145.18,325.546,1024,60.19,7.22,13.99\nhgnetv2_b2,288,3131.53,326.979,1024,11.22,1.89,6.8\nseresnet33ts,288,3123.88,327.768,1024,19.78,6.02,14.76\nregnetz_b16,288,3121.28,328.043,1024,9.72,2.39,16.43\nlegacy_seresnext50_32x4d,224,3106.31,329.614,1024,27.56,4.26,14.42\nnextvit_small,224,3105.38,329.731,1024,31.76,5.81,18.44\nconvnextv2_nano,224,3104.37,329.834,1024,15.62,2.46,8.37\nxcit_nano_12_p16_384,384,3100.3,330.26,1024,3.05,1.64,12.15\nregnetz_c16,256,3087.39,331.642,1024,13.46,2.51,16.57\nresnet26t,320,3086.83,331.703,1024,16.01,5.24,16.44\npit_b_distilled_224,224,3072.82,333.214,1024,74.79,12.5,33.07\npoolformerv2_s12,224,3071.46,333.361,1024,11.89,1.83,5.53\ncs3sedarknet_x,256,3067.22,333.833,1024,35.4,8.38,11.35\necaresnet101d_pruned,288,3064.98,334.074,1024,24.88,5.75,12.71\npit_b_224,224,3054.02,335.268,1024,73.76,12.42,32.94\neva02_tiny_patch14_336,336,3050.79,335.63,1024,5.76,4.68,27.16\ngc_efficientnetv2_rw_t,288,3042.11,336.591,1024,13.68,3.2,16.45\nresnetrs101,192,3039.94,336.818,1024,63.62,6.04,12.7\nfbnetv3_g,288,3035.76,337.294,1024,16.62,1.77,21.09\nresnet51q,256,3034.29,337.446,1024,35.7,6.38,16.55\nseresnext50_32x4d,224,3033.27,337.56,1024,27.56,4.26,14.42\nrdnet_tiny,224,3025.51,338.425,1024,23.86,5.06,15.98\ndarknet53,256,2995.64,341.801,1024,41.61,9.31,12.39\necaresnetlight,288,2980.87,343.501,1024,30.16,6.79,13.91\nxcit_small_12_p16_224,224,2979.57,343.65,1024,26.25,4.82,12.58\nnf_ecaresnet50,224,2969.89,344.775,1024,25.56,4.21,11.13\nconvnext_small,224,2968.52,344.929,1024,50.22,8.71,21.56\ncoatnet_bn_0_rw_224,224,2961.32,345.762,1024,27.44,4.67,22.04\ndensenet169,224,2960.88,345.825,1024,14.15,3.4,7.3\nfocalnet_tiny_lrf,224,2958.28,346.126,1024,28.65,4.49,17.76\npvt_v2_b2,224,2957.1,346.245,1024,25.36,4.05,27.53\nnf_seresnet50,224,2955.47,346.454,1024,28.09,4.21,11.13\nfastvit_t12,256,2935.13,348.86,1024,7.55,1.42,12.42\nhgnet_tiny,288,2932.95,349.114,1024,14.74,7.51,10.51\nregnetx_080,224,2928.56,349.629,1024,39.57,8.02,14.06\nvit_base_resnet50d_224,224,2926.61,349.865,1024,110.97,8.73,16.92\nconvnext_tiny,288,2917.07,351.019,1024,28.59,7.39,22.21\nmobilevitv2_150,256,2915.63,175.59,512,10.59,4.09,24.11\ncoatnet_rmlp_0_rw_224,224,2907.73,352.136,1024,27.45,4.72,24.89\nskresnet50,224,2900.52,353.008,1024,25.8,4.11,12.5\npoolformer_s24,224,2899.97,353.076,1024,21.39,3.41,10.68\nmobilenetv4_hybrid_medium,384,2891.5,354.122,1024,11.07,3.01,21.18\nnf_regnet_b3,320,2880.71,355.451,1024,18.59,2.05,14.61\ntf_efficientnet_lite3,300,2879.81,177.761,512,8.2,1.65,21.85\nresnet50_mlp,256,2876.92,355.906,1024,26.65,7.05,16.25\nsehalonet33ts,256,2863.49,357.576,1024,13.69,3.55,14.7\ngcvit_xtiny,224,2857.49,358.335,1024,19.98,2.93,20.26\ncs3sedarknet_xdw,256,2854.33,358.703,1024,21.6,5.97,17.18\nseresnext26t_32x4d,288,2852.69,358.931,1024,16.81,4.46,16.68\ngmlp_s16_224,224,2850.78,359.18,1024,19.42,4.42,15.1\ndeit_base_patch16_224,224,2847.84,359.551,1024,86.57,17.58,23.9\nresnetv2_101,224,2835.29,361.131,1024,44.54,7.83,16.23\ndeit_base_distilled_patch16_224,224,2834.0,361.303,1024,87.34,17.68,24.05\nseresnext26d_32x4d,288,2821.4,362.912,1024,16.81,4.51,16.85\nnf_resnet50,256,2816.37,363.566,1024,25.56,5.46,14.52\nregnetx_064,224,2813.74,363.901,1024,26.21,6.49,16.37\necaresnet26t,320,2807.22,364.749,1024,16.01,5.24,16.44\ndla102,224,2806.5,364.845,1024,33.27,7.19,14.18\nnest_tiny,224,2805.62,364.963,1024,17.06,5.83,25.48\necaresnet50t,256,2802.02,365.426,1024,25.57,5.64,15.45\nefficientnet_b3,288,2792.77,183.313,512,12.23,1.63,21.49\nwide_resnet50_2,224,2792.33,366.693,1024,68.88,11.43,14.4\nvit_base_patch16_224_miil,224,2791.53,366.793,1024,94.4,17.59,23.91\nskresnet50d,224,2791.4,366.81,1024,25.82,4.36,13.31\nvit_base_patch16_224,224,2787.15,367.372,1024,86.57,17.58,23.9\nvit_base_patch16_clip_224,224,2783.55,367.849,1024,86.57,17.58,23.9\nfastvit_sa12,256,2781.99,368.06,1024,11.58,1.96,14.03\nvgg13,224,2780.3,368.277,1024,133.05,11.31,12.25\nresnet61q,256,2779.68,368.358,1024,36.85,7.8,17.01\nfastvit_s12,256,2776.35,368.806,1024,9.47,1.82,13.67\nmaxxvit_rmlp_nano_rw_256,256,2775.33,276.707,768,16.78,4.37,26.05\ncs3edgenet_x,256,2763.68,370.501,1024,47.82,11.53,12.92\nlambda_resnet50ts,256,2759.83,371.018,1024,21.54,5.07,17.48\ndm_nfnet_f0,192,2757.16,371.375,1024,71.49,7.21,10.16\nnest_tiny_jx,224,2755.91,371.543,1024,17.06,5.83,25.48\nrexnetr_300,224,2754.02,278.836,768,34.81,3.39,22.16\ncspdarknet53,256,2752.11,372.057,1024,27.64,6.57,16.81\nmixnet_xl,224,2741.75,373.463,1024,11.9,0.93,14.57\nresnet101,224,2736.75,374.138,1024,44.55,7.83,16.23\nresnetv2_50,288,2733.41,374.59,1024,25.55,6.79,18.37\nrepvgg_b1,224,2730.49,374.994,1024,57.42,13.16,10.64\ncoatnet_0_224,224,2722.37,188.051,512,25.04,4.58,24.01\nresnetv2_101d,224,2718.59,376.633,1024,44.56,8.07,17.04\nwide_resnet101_2,176,2695.47,379.875,1024,126.89,14.31,13.18\nvit_base_mci_224,224,2690.01,380.639,1024,86.35,17.73,24.65\nbeitv2_base_patch16_224,224,2684.73,381.393,1024,86.53,17.58,23.9\nres2net50_26w_6s,224,2681.29,381.848,1024,37.05,6.33,15.28\nrexnet_300,224,2677.08,286.849,768,34.71,3.44,22.4\nswin_tiny_patch4_window7_224,224,2659.27,385.037,1024,28.29,4.51,17.06\nmaxxvitv2_nano_rw_256,256,2649.05,289.896,768,23.7,6.26,23.05\nbeit_base_patch16_224,224,2644.21,387.233,1024,86.53,17.58,23.9\nresnet101d,224,2638.05,388.135,1024,44.57,8.08,17.04\nvit_base_patch32_clip_448,448,2613.19,391.83,1024,88.34,17.93,23.9\nresnet101c,224,2612.47,391.937,1024,44.57,8.08,17.04\nmixer_b16_224,224,2607.54,392.687,1024,59.88,12.62,14.53\nvit_relpos_base_patch16_clsgap_224,224,2605.09,393.051,1024,86.43,17.6,25.12\nefficientnetv2_s,288,2604.83,393.096,1024,21.46,4.75,20.13\ntwins_pcpvt_base,224,2604.17,393.184,1024,43.83,6.68,25.25\nefficientvit_b2,288,2600.38,393.768,1024,24.33,2.64,24.03\nconvnextv2_pico,288,2600.23,393.79,1024,9.07,2.27,10.08\nvit_relpos_base_patch16_cls_224,224,2599.77,393.861,1024,86.43,17.6,25.12\nefficientformer_l3,224,2594.92,394.593,1024,31.41,3.93,12.01\nresnet50,288,2592.09,395.018,1024,25.56,6.8,18.37\nmaxvit_nano_rw_256,256,2589.91,296.516,768,15.45,4.46,30.28\nmaxvit_rmlp_nano_rw_256,256,2586.52,296.905,768,15.5,4.47,31.92\ndeit3_base_patch16_224,224,2583.38,396.36,1024,86.59,17.58,23.9\npvt_v2_b2_li,224,2575.01,397.638,1024,22.55,3.91,27.6\nregnety_032,288,2560.26,399.931,1024,19.44,5.29,18.61\nefficientvit_l2,224,2558.08,400.28,1024,63.71,6.97,19.58\nrexnetr_200,288,2556.5,200.242,512,16.52,2.62,24.96\ncs3darknet_x,288,2555.27,400.72,1024,35.05,10.6,14.36\ndarknetaa53,288,2551.71,401.279,1024,36.02,10.08,15.68\ntresnet_v2_l,224,2548.5,401.775,1024,46.17,8.85,16.34\nhgnetv2_b3,288,2548.29,401.817,1024,16.29,2.94,8.38\ncs3se_edgenet_x,256,2541.11,402.95,1024,50.72,11.53,12.94\nresnest50d,224,2525.71,405.398,1024,27.48,5.4,14.36\ngcresnext50ts,288,2518.28,406.608,1024,15.67,4.75,19.57\ngcresnet50t,288,2515.86,406.998,1024,25.9,6.86,18.57\nxcit_nano_12_p8_224,224,2506.42,408.525,1024,3.05,2.16,15.71\nvit_small_patch16_384,384,2502.77,409.123,1024,22.2,15.52,50.78\nresnet101_clip_gap,224,2494.59,410.455,1024,42.52,9.11,17.56\nhiera_base_224,224,2493.69,410.614,1024,51.52,9.4,30.42\nresnetaa101d,224,2493.54,410.631,1024,44.57,9.12,17.56\nresnetv2_101x1_bit,224,2493.42,410.646,1024,44.54,8.04,16.23\ndavit_small,224,2490.93,308.299,768,49.75,8.8,30.49\ndpn68b,288,2486.84,411.746,1024,12.61,3.89,17.3\nefficientnetv2_rw_s,288,2485.89,411.904,1024,23.94,4.91,21.41\nhrnet_w18_ssld,224,2477.35,413.328,1024,21.3,4.32,16.31\nmobilenetv4_conv_large,384,2476.57,413.456,1024,32.59,6.43,27.31\ncait_xxs24_224,224,2472.37,414.153,1024,11.96,2.53,20.29\nmobilevitv2_175,256,2468.71,207.376,512,14.25,5.54,28.13\nresnet50t,288,2467.58,414.952,1024,25.57,7.14,19.53\nvit_betwixt_patch16_reg1_gap_256,256,2466.98,415.052,1024,60.4,16.32,27.83\nlamhalobotnet50ts_256,256,2464.83,415.423,1024,22.57,5.02,18.44\nhrnet_w18,224,2463.39,415.645,1024,21.3,4.32,16.31\nvit_base_patch16_siglip_gap_224,224,2459.11,416.38,1024,85.8,17.49,23.75\nflexivit_base,240,2458.77,416.449,1024,86.59,20.29,28.36\nvit_betwixt_patch16_reg4_gap_256,256,2454.92,417.093,1024,60.4,16.52,28.24\nlegacy_seresnet101,224,2446.63,418.514,1024,49.33,7.61,15.74\nresnet50d,288,2439.85,419.669,1024,25.58,7.19,19.7\nvit_base_patch16_siglip_224,224,2438.72,419.861,1024,92.88,17.73,24.06\nresnet101_clip,224,2414.07,424.148,1024,56.26,9.81,18.08\ntresnet_l,224,2412.56,424.417,1024,55.99,10.9,11.9\nese_vovnet39b,288,2409.2,318.758,768,24.57,11.71,11.13\ndarknet53,288,2407.72,425.266,1024,41.61,11.78,15.68\ncs3sedarknet_x,288,2391.52,428.157,1024,35.4,10.6,14.37\nmixer_l32_224,224,2388.54,428.693,1024,206.94,11.27,19.86\nresnetblur101d,224,2387.51,428.868,1024,44.57,9.12,17.94\ncoat_lite_small,224,2386.34,429.081,1024,19.84,3.96,22.09\nregnetv_040,288,2386.15,429.115,1024,20.64,6.6,20.3\nswin_s3_tiny_224,224,2384.47,429.416,1024,28.33,4.64,19.13\nregnety_080,224,2379.83,430.251,1024,39.18,8.0,17.97\nresnetaa50,288,2379.68,430.28,1024,25.56,8.52,19.24\nnextvit_base,224,2376.93,430.787,1024,44.82,8.29,23.71\nvit_base_patch16_gap_224,224,2375.57,431.026,1024,86.57,17.49,25.59\nsebotnet33ts_256,256,2374.22,161.709,384,13.7,3.89,17.46\nresnet101s,224,2366.29,432.714,1024,44.67,9.19,18.64\nconvnext_tiny_hnf,288,2365.38,432.876,1024,28.59,7.39,22.21\nmobileone_s4,224,2361.85,433.537,1024,14.95,3.04,17.74\nregnety_040,288,2353.01,435.158,1024,20.65,6.61,20.3\nvit_small_patch16_36x1_224,224,2352.87,435.189,1024,64.67,13.71,35.69\nregnetv_064,224,2352.23,435.302,1024,30.58,6.39,16.41\nvit_relpos_base_patch16_224,224,2344.88,436.673,1024,86.43,17.51,24.97\nseresnet101,224,2343.32,436.959,1024,49.33,7.84,16.27\ndla102x,224,2342.65,437.086,1024,26.31,5.89,19.42\nvit_small_resnet50d_s16_224,224,2336.25,438.285,1024,57.53,13.48,24.82\necaresnet101d,224,2325.28,440.352,1024,44.57,8.08,17.07\nregnety_064,224,2324.93,440.376,1024,30.58,6.39,16.41\nvolo_d1_224,224,2323.65,440.668,1024,26.63,6.94,24.43\nresnetv2_50d_gn,288,2323.37,440.707,1024,25.57,7.24,19.7\nhiera_small_abswin_256,256,2323.14,440.761,1024,34.36,8.29,26.38\ndensenet201,224,2317.12,441.908,1024,20.01,4.34,7.85\nresnet51q,288,2316.81,441.957,1024,35.7,8.07,20.94\nefficientnet_b3,320,2307.8,221.837,512,12.23,2.01,26.52\nresnet50_gn,288,2303.59,444.495,1024,25.56,6.85,18.37\nhalonet50ts,256,2300.62,445.077,1024,22.73,5.3,19.2\nnf_resnet101,224,2293.33,446.489,1024,44.55,8.01,16.23\nresnext101_32x8d,176,2291.04,446.931,1024,88.79,10.33,19.37\nmaxvit_tiny_rw_224,224,2283.98,336.235,768,29.06,5.11,33.11\nresmlp_36_224,224,2277.0,449.683,1024,44.69,8.91,16.33\nefficientnet_b3_gn,288,2272.8,225.253,512,11.73,1.74,23.35\nvit_base_patch16_clip_quickgelu_224,224,2269.68,451.136,1024,86.19,17.58,23.9\nlegacy_xception,299,2267.92,338.617,768,22.86,8.4,35.83\ntf_efficientnet_b3,300,2265.14,226.006,512,12.23,1.87,23.83\nresnetaa50d,288,2259.31,453.205,1024,25.58,8.92,20.57\nvitamin_small_224,224,2252.3,454.619,1024,22.03,5.92,26.38\nvit_medium_patch16_rope_reg1_gap_256,256,2246.04,455.888,1024,38.74,10.63,22.26\nvgg13_bn,224,2243.6,456.38,1024,133.05,11.33,12.25\nsequencer2d_s,224,2241.43,456.822,1024,27.65,4.96,11.31\nseresnet50,288,2233.67,458.373,1024,28.09,6.8,18.39\nefficientvit_b3,224,2229.48,459.279,1024,48.65,3.99,26.9\nrepvgg_b2g4,224,2226.86,459.812,1024,61.76,12.63,12.9\nvit_small_patch16_18x2_224,224,2225.09,460.18,1024,64.67,13.71,35.69\ntf_efficientnetv2_s,300,2217.63,461.725,1024,21.46,5.35,22.73\ndeit3_small_patch16_384,384,2216.36,461.997,1024,22.21,15.52,50.78\nresnetblur50,288,2215.39,462.19,1024,25.56,8.52,19.87\nres2net101_26w_4s,224,2210.59,463.166,1024,45.21,8.1,18.45\nregnety_080_tv,224,2199.69,465.492,1024,39.38,8.51,19.73\nxcit_tiny_12_p16_384,384,2198.84,465.672,1024,6.72,3.64,18.26\nhgnetv2_b5,288,2192.99,466.916,1024,39.57,10.84,18.5\nresnext101_32x4d,224,2191.44,467.242,1024,44.18,8.01,21.23\nres2net50_26w_8s,224,2190.96,467.347,1024,48.4,8.37,17.95\necaresnet50t,288,2189.92,467.571,1024,25.57,7.14,19.55\nvit_relpos_base_patch16_rpn_224,224,2183.57,468.919,1024,86.41,17.51,24.97\ndensenet121,288,2181.11,469.465,1024,7.98,4.74,11.41\necaresnet50d,288,2174.22,470.95,1024,25.58,7.19,19.72\npvt_v2_b3,224,2173.14,471.171,1024,45.24,6.92,37.7\nvgg16,224,2165.35,472.876,1024,138.36,15.47,13.56\nvit_base_patch16_xp_224,224,2165.22,472.901,1024,86.51,17.56,23.9\nvit_base_patch16_rpn_224,224,2161.0,473.826,1024,86.54,17.49,23.75\nrepvit_m2_3,224,2157.85,474.494,1024,23.69,4.57,26.21\ncs3edgenet_x,288,2155.75,474.984,1024,47.82,14.59,16.36\nmvitv2_tiny,224,2155.16,475.117,1024,24.17,4.7,21.16\nswinv2_cr_tiny_224,224,2153.94,475.376,1024,28.33,4.66,28.45\nnf_resnet50,288,2147.63,476.776,1024,25.56,6.88,18.37\nese_vovnet99b,224,2146.96,476.932,1024,63.2,16.51,11.27\nmobilevitv2_200,256,2143.14,358.33,768,18.45,7.22,32.15\nres2net101d,224,2140.6,478.302,1024,45.23,8.35,19.25\nseresnet50t,288,2138.78,478.747,1024,28.1,7.14,19.55\nedgenext_base,320,2134.2,479.781,1024,18.51,6.01,24.32\nresnet61q,288,2130.61,480.584,1024,36.85,9.87,21.52\nswinv2_cr_tiny_ns_224,224,2127.09,481.38,1024,28.33,4.66,28.45\ninception_next_small,224,2115.6,484.002,1024,49.37,8.36,19.27\nvit_mediumd_patch16_reg4_gap_256,256,2113.09,484.575,1024,64.11,17.87,37.57\nese_vovnet39b_evos,224,2111.78,484.876,1024,24.58,7.07,6.74\nrdnet_small,224,2110.32,485.203,1024,50.44,8.74,22.55\nresnext50_32x4d,288,2108.45,485.637,1024,25.03,7.04,23.81\neca_nfnet_l0,288,2099.14,487.794,1024,24.14,7.12,17.29\nmobilenetv4_hybrid_medium,448,2097.44,366.139,768,11.07,4.2,29.64\nconvnext_base,224,2095.37,488.672,1024,88.59,15.38,28.75\nresnetblur50d,288,2095.33,488.675,1024,25.58,8.92,21.19\nregnety_040_sgn,288,2093.1,489.199,1024,20.65,6.67,20.3\nresnet101d,256,2091.32,489.613,1024,44.57,10.55,22.25\ncoatnet_rmlp_1_rw_224,224,2085.77,490.917,1024,41.69,7.85,35.47\nxception41p,299,2084.2,245.637,512,26.91,9.25,39.86\nregnetz_040,256,2068.8,494.89,1024,27.12,4.06,24.19\nnf_regnet_b4,320,2065.2,495.816,1024,30.21,3.29,19.88\nregnetz_040_h,256,2063.38,496.243,1024,28.94,4.12,24.29\nefficientvit_l2,256,2060.07,497.046,1024,63.71,9.09,25.49\nresnest50d_4s2x40d,224,2047.78,500.025,1024,30.42,4.4,17.94\nnfnet_l0,288,2047.5,500.093,1024,35.07,7.13,17.29\nvit_base_patch16_reg4_gap_256,256,2044.87,500.737,1024,86.62,23.5,33.89\nregnetz_d32,256,2033.99,503.413,1024,27.58,5.98,23.74\ndpn92,224,2027.88,504.934,1024,37.67,6.54,18.21\nregnetz_d8,256,2018.69,507.232,1024,23.37,3.97,23.74\nresnext50d_32x4d,288,2012.24,508.858,1024,25.05,7.44,25.13\nfocalnet_small_srf,224,1995.65,513.095,1024,49.89,8.62,26.26\nhgnet_small,288,1995.54,384.841,768,24.36,14.09,14.53\nvgg19,224,1994.6,513.358,1024,143.67,19.63,14.86\ncrossvit_base_240,240,1994.0,513.516,1024,105.03,21.22,36.33\nregnetz_c16,320,1992.12,513.999,1024,13.46,3.92,25.88\nresnetv2_152,224,1991.8,514.076,1024,60.19,11.55,22.56\nmobilenetv4_conv_aa_large,384,1988.94,514.825,1024,32.59,7.07,32.29\nhiera_base_plus_224,224,1983.12,516.333,1024,69.9,12.67,37.98\nregnetx_120,224,1982.95,516.372,1024,46.11,12.13,21.37\nconvnextv2_tiny,224,1982.1,516.59,1024,28.64,4.47,13.44\nseresnetaa50d,288,1977.18,517.879,1024,28.11,8.92,20.59\nrepvgg_b2,224,1974.36,518.618,1024,89.02,20.45,12.9\nlegacy_seresnext101_32x4d,224,1971.37,519.379,1024,48.96,8.02,21.26\ndensenetblur121d,288,1970.87,519.545,1024,8.0,5.14,13.06\nconvit_small,224,1970.28,519.698,1024,27.78,5.76,17.87\ncoatnet_1_rw_224,224,1960.67,522.246,1024,41.72,8.04,34.6\nnfnet_f0,256,1953.19,524.243,1024,71.49,12.62,18.05\nbotnet50ts_256,256,1951.79,262.305,512,22.74,5.54,22.23\npoolformer_s36,224,1948.45,525.506,1024,30.86,5.0,15.82\nconvmixer_1024_20_ks9_p14,224,1942.47,527.139,1024,24.38,5.55,5.51\nresnet152,224,1941.85,527.301,1024,60.19,11.56,22.56\nskresnext50_32x4d,224,1938.97,528.085,1024,27.48,4.5,17.18\nresnetv2_152d,224,1938.05,528.334,1024,60.2,11.8,23.36\nfastvit_mci0,256,1935.03,529.168,1024,11.41,2.42,18.29\ninception_v4,299,1934.57,529.276,1024,42.68,12.28,15.09\nseresnext101_32x4d,224,1925.49,531.724,1024,48.96,8.02,21.26\nnextvit_large,224,1924.7,532.009,1024,57.87,10.78,28.99\nhalo2botnet50ts_256,256,1915.33,534.611,1024,22.64,5.02,21.78\ncoatnet_rmlp_1_rw2_224,224,1914.3,534.892,1024,41.72,8.11,40.13\nvgg16_bn,224,1909.7,536.182,1024,138.37,15.5,13.56\nresnetv2_50d_frn,224,1907.53,536.788,1024,25.59,4.33,11.92\ntwins_svt_base,224,1904.0,537.783,1024,56.07,8.59,26.33\ngcvit_tiny,224,1903.79,537.854,1024,28.22,4.79,29.82\nvit_base_patch16_siglip_gap_256,256,1894.78,540.402,1024,85.84,23.13,33.23\ndla169,224,1893.96,540.641,1024,53.39,11.6,20.2\nconvnextv2_nano,288,1893.91,405.49,768,15.62,4.06,13.84\nresnet152d,224,1891.46,541.352,1024,60.21,11.8,23.36\nefficientnet_el,300,1884.76,543.277,1024,10.59,8.0,30.7\nresnet152c,224,1882.54,543.916,1024,60.21,11.8,23.36\ntwins_pcpvt_large,224,1881.08,544.338,1024,60.99,9.84,35.82\nnf_ecaresnet101,224,1877.87,545.282,1024,44.55,8.01,16.27\nvit_base_patch16_siglip_256,256,1875.34,546.002,1024,92.93,23.44,33.63\nmaxvit_tiny_tf_224,224,1873.59,409.888,768,30.92,5.6,35.78\nmaxxvit_rmlp_tiny_rw_256,256,1870.47,410.571,768,29.64,6.66,39.76\nefficientnet_el_pruned,300,1867.55,548.285,1024,10.59,8.0,30.7\nefficientnet_b3_gn,320,1866.95,205.663,384,11.73,2.14,28.83\nseresnext50_32x4d,288,1856.61,551.512,1024,27.56,7.04,23.82\nregnetz_b16_evos,224,1854.56,552.123,1024,9.74,1.43,9.95\nnf_seresnet101,224,1848.05,554.073,1024,49.33,8.02,16.27\nrepvgg_b3g4,224,1843.59,555.409,1024,83.83,17.89,15.1\nregnety_120,224,1840.57,556.321,1024,51.82,12.14,21.38\nmobilenetv4_conv_large,448,1830.25,419.592,768,32.59,8.75,37.17\nfocalnet_small_lrf,224,1830.02,559.535,1024,50.34,8.74,28.61\ncaformer_s18,224,1825.4,560.94,1024,26.34,4.13,19.39\nnest_small,224,1821.81,562.055,1024,38.35,10.35,40.04\nefficientnet_b3_g8_gn,288,1817.33,422.578,768,14.25,2.59,23.35\ndensenet161,224,1806.94,566.679,1024,28.68,7.79,11.06\ntresnet_xl,224,1805.47,567.135,1024,78.44,15.2,15.34\nconvnext_small,288,1801.22,568.48,1024,50.22,14.39,35.65\nnest_small_jx,224,1800.35,568.755,1024,38.35,10.35,40.04\necaresnet50t,320,1797.24,569.733,1024,25.57,8.82,24.13\nvit_small_patch8_224,224,1790.17,571.984,1024,21.67,22.44,80.84\nvit_large_r50_s32_224,224,1790.0,572.042,1024,328.99,19.58,24.41\nefficientvit_b3,256,1789.45,429.156,768,48.65,5.2,35.01\ndavit_base,224,1784.6,430.324,768,87.95,15.51,40.66\ntf_efficientnet_el,300,1777.69,576.0,1024,10.59,8.0,30.7\nvit_base_patch16_plus_240,240,1770.19,578.441,1024,117.56,27.41,33.08\nmaxvit_tiny_rw_256,256,1767.83,434.406,768,29.07,6.74,44.35\nmaxvit_rmlp_tiny_rw_256,256,1764.91,435.129,768,29.15,6.77,46.92\nsequencer2d_m,224,1760.97,581.467,1024,38.31,6.55,14.26\nxception41,299,1760.86,290.748,512,26.97,9.28,39.86\nresnet152s,224,1745.67,586.565,1024,60.32,12.92,24.96\nefficientnet_b4,320,1740.06,294.222,512,19.34,3.13,34.76\ncoatnet_1_224,224,1739.09,294.386,512,42.23,8.7,39.0\nresnetv2_101,288,1737.4,589.353,1024,44.54,12.94,26.83\nhrnet_w30,224,1723.75,593.993,1024,37.71,8.15,21.21\nmixnet_xxl,224,1723.5,445.584,768,23.96,2.04,23.43\nconvformer_s18,224,1723.21,594.213,1024,26.77,3.96,15.82\nlegacy_seresnet152,224,1717.65,596.143,1024,66.82,11.33,22.08\nregnetx_160,224,1711.44,598.295,1024,54.28,15.99,25.52\nresnetv2_50d_evos,224,1709.91,598.829,1024,25.59,4.33,11.92\nrexnetr_300,288,1708.11,299.718,512,34.81,5.59,36.61\nmobilenetv4_hybrid_large,384,1707.76,599.592,1024,37.76,7.77,34.52\neva02_base_patch16_clip_224,224,1707.16,599.804,1024,86.26,17.62,26.32\nwide_resnet50_2,288,1707.11,599.82,1024,68.88,18.89,23.81\nmvitv2_small_cls,224,1705.56,600.369,1024,34.87,7.04,28.17\nhrnet_w32,224,1694.43,604.278,1024,41.23,8.97,22.02\nxcit_tiny_12_p8_224,224,1681.45,608.969,1024,6.71,4.81,23.6\nresnet101,288,1679.51,609.673,1024,44.55,12.95,26.83\ntnt_s_patch16_224,224,1675.64,611.078,1024,23.76,5.24,24.37\nwide_resnet101_2,224,1674.87,611.366,1024,126.89,22.8,21.23\nvgg19_bn,224,1663.12,615.681,1024,143.68,19.66,14.86\ncait_xxs36_224,224,1660.72,616.58,1024,17.3,3.77,30.34\nswin_small_patch4_window7_224,224,1658.9,617.245,1024,49.61,8.77,27.47\nseresnet152,224,1654.98,618.709,1024,66.82,11.57,22.61\nvit_betwixt_patch16_rope_reg4_gap_256,256,1646.55,621.878,1024,60.23,16.52,28.24\nconvnext_tiny,384,1644.05,311.408,512,28.59,13.14,39.48\nefficientformerv2_s0,224,1630.94,627.828,1024,3.6,0.41,5.3\ncs3se_edgenet_x,320,1622.94,630.928,1024,50.72,18.01,20.21\nmvitv2_small,224,1621.36,631.548,1024,34.87,7.0,28.08\nvit_relpos_base_patch16_plus_240,240,1621.32,631.559,1024,117.38,27.3,34.33\nefficientvit_l2,288,1608.39,636.638,1024,63.71,11.51,32.19\nconvnext_base,256,1605.42,637.811,1024,88.59,20.09,37.55\ndm_nfnet_f0,256,1604.65,638.122,1024,71.49,12.62,18.05\ndla102x2,224,1600.16,639.91,1024,41.28,9.34,29.91\nmaxvit_tiny_pm_256,256,1598.65,480.383,768,30.09,6.61,47.9\nefficientnet_lite4,380,1596.86,240.45,384,13.01,4.04,45.66\nxcit_small_24_p16_224,224,1593.77,642.475,1024,47.67,9.1,23.64\nsamvit_base_patch16_224,224,1591.15,643.539,1024,86.46,17.54,24.54\nvit_small_r26_s32_384,384,1589.79,644.083,1024,36.47,10.43,29.85\nregnety_160,224,1584.47,646.244,1024,83.59,15.96,23.04\nhiera_base_abswin_256,256,1579.74,648.184,1024,51.27,12.46,40.7\nhgnetv2_b6,224,1577.08,649.276,1024,75.26,16.88,21.23\nvit_base_r50_s16_224,224,1574.79,650.212,1024,97.89,21.66,35.28\npoolformerv2_s24,224,1572.19,651.293,1024,21.34,3.42,10.68\ncoat_tiny,224,1561.91,655.584,1024,5.5,4.35,27.2\npvt_v2_b4,224,1560.91,655.996,1024,62.56,10.14,53.74\npvt_v2_b5,224,1557.23,657.547,1024,81.96,11.76,50.92\neca_nfnet_l1,256,1553.7,659.05,1024,41.41,9.62,22.04\nrepvgg_b3,224,1551.91,659.803,1024,123.09,29.16,15.1\nxception65p,299,1551.33,329.999,512,39.82,13.91,52.48\nswinv2_tiny_window8_256,256,1547.17,661.823,1024,28.35,5.96,24.57\nresnetaa101d,288,1535.29,666.945,1024,44.57,15.07,29.03\nfastvit_sa24,256,1525.62,671.177,1024,21.55,3.8,24.32\nefficientnetv2_s,384,1507.35,679.311,1024,21.46,8.44,35.77\nefficientformerv2_s1,224,1505.34,680.214,1024,6.19,0.67,7.66\nresnet152d,256,1501.71,681.861,1024,60.21,15.41,30.51\ninception_next_base,224,1485.4,689.354,1024,86.67,14.85,25.69\nefficientnet_b3_g8_gn,320,1480.62,518.679,768,14.25,3.2,28.83\nregnety_080,288,1476.82,693.353,1024,39.18,13.22,29.69\ndpn98,224,1476.36,693.573,1024,61.57,11.73,25.2\nmobilenetv4_conv_aa_large,448,1470.08,522.395,768,32.59,9.63,43.94\nhrnet_w18_ssld,288,1462.78,700.012,1024,21.3,7.14,26.96\nresnetblur101d,288,1462.58,700.1,1024,44.57,15.07,29.65\nrdnet_base,224,1460.56,525.796,768,87.45,15.4,31.14\nhgnet_base,224,1454.32,528.06,768,71.58,25.14,15.47\nefficientnetv2_rw_s,384,1440.54,710.82,1024,23.94,8.72,38.03\nnf_regnet_b4,384,1439.96,711.11,1024,30.21,4.7,28.61\nseresnet101,288,1437.85,712.145,1024,49.33,12.95,26.87\nregnetv_064,288,1435.74,713.191,1024,30.58,10.55,27.11\nfocalnet_base_srf,224,1425.69,718.219,1024,88.15,15.28,35.01\neva02_small_patch14_336,336,1424.13,719.007,1024,22.13,15.48,54.33\necaresnet101d,288,1423.42,719.366,1024,44.57,13.35,28.19\nregnety_064,288,1420.12,720.99,1024,30.58,10.56,27.11\ntf_efficientnetv2_s,384,1419.64,721.279,1024,21.46,8.44,35.77\ntf_efficientnet_lite4,380,1419.26,270.533,384,13.01,4.04,45.66\ninception_resnet_v2,299,1412.75,724.752,1024,55.84,13.18,25.06\nresnext101_64x4d,224,1412.06,725.153,1024,83.46,15.52,31.21\ncrossvit_15_dagger_408,408,1397.53,732.691,1024,28.5,21.45,95.05\nresnext101_32x8d,224,1396.99,732.978,1024,88.79,16.48,31.21\nresnet200,224,1396.79,733.081,1024,64.67,15.07,32.19\nefficientvit_b3,288,1387.6,553.447,768,48.65,6.58,44.2\nresnetrs101,288,1381.07,741.426,1024,63.62,13.56,28.53\npoolformer_m36,224,1377.13,743.544,1024,56.17,8.8,22.02\nmaxvit_rmlp_small_rw_224,224,1365.13,562.563,768,64.9,10.75,49.3\nvit_mediumd_patch16_rope_reg1_gap_256,256,1360.17,752.819,1024,63.95,17.65,37.02\nresnext101_32x4d,288,1355.28,755.535,1024,44.18,13.24,35.09\nvit_so150m_patch16_reg4_gap_256,256,1352.83,756.904,1024,134.13,36.75,53.21\nvit_medium_patch16_gap_384,384,1340.12,764.084,1024,39.03,26.08,67.54\nvit_so150m_patch16_reg4_map_256,256,1340.1,764.094,1024,141.48,37.18,53.68\nswinv2_cr_small_224,224,1339.46,764.46,1024,49.7,9.07,50.27\nresnet101d,320,1328.63,770.69,1024,44.57,16.48,34.77\nregnetz_040,320,1328.31,385.424,512,27.12,6.35,37.78\nswinv2_cr_small_ns_224,224,1326.17,772.119,1024,49.7,9.08,50.27\nregnetz_040_h,320,1323.75,386.752,512,28.94,6.43,37.94\nfocalnet_base_lrf,224,1318.02,776.898,1024,88.75,15.43,38.13\neva02_base_patch14_224,224,1315.36,778.472,1024,85.76,23.22,36.55\nxception65,299,1311.64,390.314,512,39.92,13.96,52.48\nvit_base_patch16_rope_reg1_gap_256,256,1310.51,781.347,1024,86.43,23.22,33.39\nnest_base,224,1310.17,781.549,1024,67.72,17.96,53.39\nconvnextv2_small,224,1310.01,781.645,1024,50.32,8.71,21.56\nnfnet_f1,224,1306.81,783.562,1024,132.63,17.87,22.94\nefficientnetv2_m,320,1304.81,784.765,1024,54.14,11.01,39.97\nvolo_d2_224,224,1303.25,785.708,1024,58.68,14.34,41.34\ncoatnet_2_rw_224,224,1301.67,393.319,512,73.87,15.09,49.22\nregnetz_d32,320,1300.21,787.532,1024,27.58,9.33,37.08\nseresnext101_64x4d,224,1298.53,788.558,1024,88.23,15.53,31.25\nnest_base_jx,224,1295.76,790.243,1024,67.72,17.96,53.39\ngmlp_b16_224,224,1289.85,793.862,1024,73.08,15.78,30.21\nmobilevitv2_150,384,1286.35,198.995,256,10.59,9.2,54.25\nhrnet_w40,224,1285.98,796.256,1024,57.56,12.75,25.29\nregnetz_d8,320,1285.82,796.348,1024,23.37,6.19,37.08\nseresnext101_32x8d,224,1284.38,797.243,1024,93.57,16.48,31.25\nseresnet152d,256,1276.56,802.126,1024,66.84,15.42,30.56\nresnetrs152,256,1273.91,803.74,1024,86.62,15.59,30.83\nmobilenetv4_conv_aa_large,480,1272.27,603.62,768,32.59,11.05,50.45\nconvnextv2_tiny,288,1271.26,604.097,768,28.64,7.39,22.21\ncait_s24_224,224,1270.01,806.265,1024,46.92,9.35,40.58\nconvnext_base,288,1266.87,808.261,1024,88.59,25.43,47.53\nseresnext101d_32x8d,224,1264.56,809.737,1024,93.59,16.72,32.05\nresnest101e,256,1259.59,812.93,1024,48.28,13.38,28.66\nefficientformer_l7,224,1257.98,813.975,1024,82.23,10.17,24.45\ntwins_svt_large,224,1250.2,819.039,1024,99.27,15.15,35.1\nmaxvit_small_tf_224,224,1245.42,411.087,512,68.93,11.66,53.17\nmaxxvit_rmlp_small_rw_256,256,1239.49,619.588,768,66.01,14.67,58.38\nmobilenetv4_hybrid_large,448,1233.31,622.691,768,37.76,10.74,48.61\nresnet50x4_clip_gap,288,1228.45,833.533,1024,65.62,19.57,34.11\ncoatnet_rmlp_2_rw_224,224,1227.94,416.935,512,73.88,15.18,54.78\ncoat_mini,224,1224.8,836.033,1024,10.34,6.82,33.68\ncoatnet_2_224,224,1217.65,420.455,512,74.68,16.5,52.67\ncoat_lite_medium,224,1217.51,841.038,1024,44.57,9.81,40.06\nefficientnet_b4,384,1211.17,317.024,384,19.34,4.51,50.04\nswin_base_patch4_window7_224,224,1207.02,848.341,1024,87.77,15.47,36.63\nconvnext_large,224,1206.16,848.95,1024,197.77,34.4,43.13\ntresnet_m,448,1204.65,850.01,1024,31.39,22.99,29.21\nmvitv2_base_cls,224,1201.41,852.308,1024,65.44,10.23,40.65\nvit_large_patch32_384,384,1196.88,855.533,1024,306.63,45.31,43.86\nresnet152,288,1192.43,858.72,1024,60.19,19.11,37.28\nseresnext101_32x4d,288,1192.03,859.008,1024,48.96,13.25,35.12\ntiny_vit_21m_384,384,1188.05,646.409,768,21.23,13.77,77.83\nseresnextaa101d_32x8d,224,1183.29,865.35,1024,93.59,17.25,34.16\nresnet50x4_clip,288,1179.84,867.878,1024,87.14,21.35,35.27\nxcit_tiny_24_p16_384,384,1171.17,874.311,1024,12.12,6.87,34.29\nlevit_conv_384_s8,224,1166.16,439.026,512,39.12,9.98,35.86\ndm_nfnet_f1,224,1150.17,890.285,1024,132.63,17.87,22.94\nregnetz_e8,256,1147.85,892.072,1024,57.7,9.91,40.94\nswin_s3_small_224,224,1145.1,670.656,768,49.74,9.43,37.84\nmvitv2_base,224,1138.44,899.454,1024,51.47,10.16,40.5\nsequencer2d_l,224,1135.5,901.779,1024,54.3,9.74,22.12\nefficientnetv2_rw_m,320,1134.41,902.651,1024,53.24,12.72,47.14\ngcvit_small,224,1132.43,904.224,1024,51.09,8.57,41.61\nregnety_120,288,1127.97,680.842,768,51.82,20.06,35.34\nhrnet_w44,224,1125.91,909.412,1024,67.06,14.94,26.92\nlevit_384_s8,224,1123.29,455.784,512,39.12,9.98,35.86\nregnetz_b16_evos,288,1122.27,684.3,768,9.74,2.36,16.43\nhrnet_w48_ssld,224,1116.96,916.749,1024,77.47,17.34,28.56\nhrnet_w48,224,1113.97,919.215,1024,77.47,17.34,28.56\nregnetz_c16_evos,256,1112.53,690.289,768,13.49,2.48,16.57\ntf_efficientnet_b4,380,1110.38,345.798,384,19.34,4.49,49.49\nxcit_medium_24_p16_224,224,1105.25,926.46,1024,84.4,16.13,31.71\ntnt_b_patch16_224,224,1094.56,935.51,1024,65.41,14.09,39.01\nmobilevitv2_175,384,1091.31,234.563,256,14.25,12.47,63.29\ndpn131,224,1083.25,945.269,1024,79.25,16.09,32.97\nnextvit_small,384,1081.98,946.382,1024,31.76,17.26,57.14\nresnet200d,256,1077.87,949.987,1024,64.69,20.0,43.09\nvit_betwixt_patch16_reg4_gap_384,384,1077.83,950.026,1024,60.6,39.71,85.28\nefficientvit_l3,224,1066.9,719.814,768,246.04,27.62,39.16\nconvnextv2_nano,384,1066.76,359.947,384,15.62,7.22,24.61\nmaxvit_rmlp_small_rw_256,256,1061.31,723.613,768,64.9,14.15,66.09\npoolformerv2_s36,224,1054.78,970.792,1024,30.79,5.01,15.82\nfastvit_sa36,256,1050.89,974.39,1024,31.53,5.64,34.61\ndavit_large,224,1043.66,735.848,768,196.81,34.6,60.99\nconvit_base,224,1041.2,983.452,1024,86.54,17.52,31.77\nlegacy_senet154,224,1041.02,983.633,1024,115.09,20.77,38.69\nresnetv2_50d_evos,288,1038.94,985.591,1024,25.59,7.15,19.7\npoolformer_m48,224,1037.67,986.794,1024,73.47,11.59,29.17\nvitamin_base_224,224,1035.95,494.209,512,87.72,22.68,52.77\ncrossvit_18_dagger_408,408,1032.79,991.461,1024,44.61,32.47,124.87\nfastvit_mci1,256,1032.41,991.824,1024,21.54,4.72,32.84\nmaxxvitv2_rmlp_base_rw_224,224,1032.37,743.892,768,116.09,24.2,62.77\nswinv2_base_window12_192,192,1028.07,996.011,1024,109.28,11.9,39.72\nconvnext_base,320,1024.52,749.592,768,88.59,31.39,58.68\nxcit_small_12_p16_384,384,1021.49,1002.43,1024,26.25,14.14,36.51\nresnetv2_50x1_bit,448,1019.03,502.406,512,25.55,16.62,44.46\nsenet154,224,1016.88,1006.903,1024,115.09,20.77,38.69\ndensenet264d,224,1016.07,1007.782,1024,72.74,13.57,14.0\nconvnext_small,384,1015.93,755.929,768,50.22,25.58,63.37\nseresnet152,288,1013.71,1010.121,1024,66.82,19.11,37.34\nregnety_320,224,1007.78,1016.066,1024,145.05,32.34,30.26\ndpn107,224,1005.01,1018.86,1024,86.92,18.38,33.46\nxception71,299,1004.0,509.938,512,42.34,18.09,69.92\nhgnetv2_b6,288,981.26,782.643,768,75.26,27.9,35.09\nswinv2_cr_base_224,224,978.09,1046.904,1024,87.88,15.86,59.66\neca_nfnet_l1,320,976.83,1048.265,1024,41.41,14.92,34.42\nregnety_160,288,976.51,524.284,512,83.59,26.37,38.07\ncaformer_s36,224,971.41,1054.103,1024,39.3,8.0,37.53\nswinv2_cr_base_ns_224,224,970.9,1054.655,1024,87.88,15.86,59.66\nmobilenetv4_conv_aa_large,544,968.9,528.415,512,32.59,14.19,64.79\nswinv2_small_window8_256,256,964.83,1061.297,1024,49.73,11.58,40.14\nresnetv2_50x3_bit,224,964.09,796.571,768,217.32,37.06,33.34\nregnetx_320,224,962.49,1063.881,1024,107.81,31.81,36.3\nswinv2_cr_small_ns_256,256,961.42,1065.06,1024,49.7,12.07,76.21\nnf_regnet_b5,384,955.45,803.79,768,49.74,7.95,42.9\nswin_s3_base_224,224,954.89,1072.349,1024,71.13,13.69,48.26\nresnet152d,320,952.68,1074.832,1024,60.21,24.08,47.67\nswinv2_tiny_window16_256,256,949.54,404.378,384,28.35,6.68,39.02\nmobilevitv2_200,384,949.03,269.729,256,18.45,16.24,72.34\nefficientvit_l2,384,946.77,540.762,512,63.71,20.45,57.01\ncoat_small,224,946.54,1081.808,1024,21.69,12.61,44.25\nconvnextv2_base,224,943.15,814.261,768,88.72,15.38,28.75\nvolo_d3_224,224,938.88,1090.634,1024,86.33,20.78,60.09\necaresnet200d,256,930.32,1100.668,1024,64.69,20.0,43.15\nvit_mediumd_patch16_reg4_gap_384,384,927.5,1104.011,1024,64.27,43.67,113.51\ndeit_base_patch16_384,384,924.35,1107.782,1024,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,923.14,1109.233,1024,87.63,55.65,101.82\nconvnext_large_mlp,256,921.21,833.66,768,200.13,44.94,56.33\nvit_base_patch16_384,384,915.77,1118.154,1024,86.86,55.54,101.56\nconvformer_s36,224,915.39,1118.618,1024,40.01,7.67,30.5\nvit_base_patch16_clip_384,384,912.89,1121.683,1024,86.86,55.54,101.56\nvit_large_patch16_224,224,912.7,1121.913,1024,304.33,61.6,63.52\neva_large_patch14_196,196,906.88,1129.113,1024,304.14,61.57,63.52\nseresnet200d,256,906.73,1129.3,1024,71.86,20.01,43.15\nresnetrs200,256,903.32,1133.498,1024,93.21,20.18,43.42\nhgnet_base,288,882.61,580.058,512,71.58,41.55,25.57\nxcit_tiny_24_p8_224,224,882.26,1160.631,1024,12.11,9.21,45.39\nrdnet_large,224,880.89,581.2,512,186.27,34.74,46.67\nresnext101_64x4d,288,874.91,1170.378,1024,83.46,25.66,51.59\nfastvit_ma36,256,873.15,1172.691,1024,44.07,7.88,41.09\ntf_efficientnetv2_m,384,871.16,1175.422,1024,54.14,15.85,57.52\nbeit_large_patch16_224,224,866.7,1181.465,1024,304.43,61.6,63.52\nhrnet_w64,224,866.3,1181.977,1024,128.06,28.97,35.09\nefficientvit_l3,256,859.51,893.502,768,246.04,36.06,50.98\nmixer_l16_224,224,858.48,1192.773,1024,208.2,44.6,41.69\nvit_small_patch14_dinov2,518,855.54,1196.86,1024,22.06,46.76,198.79\nbeitv2_large_patch16_224,224,855.13,1197.449,1024,304.43,61.6,63.52\nresnet200,288,849.17,1205.852,1024,64.67,24.91,53.21\nxcit_nano_12_p8_384,384,849.15,1205.887,1024,3.05,6.34,46.08\nnextvit_base,384,845.41,1211.22,1024,44.82,24.64,73.95\ndeit3_base_patch16_384,384,844.28,1212.833,1024,86.88,55.54,101.56\ndeit3_large_patch16_224,224,842.04,1216.066,1024,304.37,61.6,63.52\ngcvit_base,224,837.14,1223.178,1024,90.32,14.87,55.48\nvit_base_patch16_18x2_224,224,836.78,1223.713,1024,256.73,52.51,71.38\nbeit_base_patch16_384,384,834.75,1226.684,1024,86.74,55.54,101.56\nhiera_large_224,224,833.73,1228.187,1024,213.74,40.34,83.37\nmaxvit_rmlp_base_rw_224,224,831.5,923.612,768,116.14,23.15,92.64\nvit_small_patch14_reg4_dinov2,518,820.76,1247.592,1024,22.06,46.95,199.77\nseresnet152d,320,810.78,1262.948,1024,66.84,24.09,47.72\nresnetrs152,320,806.91,1269.013,1024,86.62,24.34,48.14\nvit_base_patch16_siglip_gap_384,384,803.81,1273.896,1024,86.09,55.43,101.3\nresnext101_32x16d,224,802.86,1275.414,1024,194.03,36.27,51.18\nvolo_d1_384,384,801.94,1276.869,1024,26.78,22.75,108.55\nlevit_conv_512_s8,224,796.51,321.385,256,74.05,21.82,52.28\nvit_base_patch16_siglip_384,384,796.23,1286.031,1024,93.18,56.12,102.2\nefficientformerv2_s2,224,793.27,1290.823,1024,12.71,1.27,11.77\nflexivit_large,240,792.48,1292.113,1024,304.36,70.99,75.39\nseresnext101_32x8d,288,790.85,1294.776,1024,93.57,27.24,51.63\nconvnext_xlarge,224,789.11,973.216,768,350.2,60.98,57.5\nseresnext101d_32x8d,288,779.13,1314.209,1024,93.59,27.64,52.95\nfastvit_mci2,256,770.69,1328.66,1024,35.82,7.91,43.34\nxcit_small_12_p8_224,224,768.93,1331.684,1024,26.21,18.69,47.21\nefficientnetv2_m,416,757.66,1351.509,1024,54.14,18.6,67.5\nlevit_512_s8,224,756.91,338.197,256,74.05,21.82,52.28\nnfnet_f2,256,754.41,1357.331,1024,193.78,33.76,41.85\npoolformerv2_m36,224,753.91,1358.223,1024,56.08,8.81,22.02\ncoatnet_rmlp_3_rw_224,224,747.12,342.626,256,165.15,33.56,79.47\nswin_large_patch4_window7_224,224,734.47,1045.628,768,196.53,34.53,54.94\ncoatnet_3_rw_224,224,734.18,348.657,256,181.81,33.44,73.83\ncoatnet_3_224,224,734.12,348.691,256,166.97,36.56,79.01\nefficientnet_b5,416,732.18,349.614,256,30.39,8.27,80.68\nmaxvit_base_tf_224,224,727.37,703.885,512,119.47,24.04,95.01\nseresnextaa101d_32x8d,288,726.54,1409.397,1024,93.59,28.51,56.44\nregnetz_e8,320,726.44,1057.182,768,57.7,15.46,63.94\nconvnext_large,288,724.73,706.442,512,197.77,56.87,71.29\nconvnextv2_tiny,384,722.62,531.372,384,28.64,13.14,39.48\necaresnet200d,288,722.05,1418.16,1024,64.69,25.31,54.59\nregnetz_d8_evos,256,720.34,1421.53,1024,23.46,4.5,24.92\nresnetv2_152x2_bit,224,716.98,1428.18,1024,236.34,46.95,45.11\nseresnet269d,256,714.47,1433.198,1024,113.67,26.59,53.6\nconvnext_base,384,714.31,716.749,512,88.59,45.21,84.49\nregnetz_c16_evos,320,711.04,720.042,512,13.49,3.86,25.88\nseresnet200d,288,706.46,1449.448,1024,71.86,25.32,54.6\ncaformer_m36,224,705.55,1451.316,1024,56.2,13.29,50.48\nswinv2_base_window8_256,256,704.56,1090.005,768,87.92,20.37,52.59\ndavit_huge,224,697.25,734.284,512,348.92,61.23,81.32\nxcit_large_24_p16_224,224,695.71,1471.847,1024,189.1,35.86,47.27\nnextvit_large,384,694.8,1473.781,1024,57.87,32.03,90.76\nnfnet_f1,320,694.64,1474.11,1024,132.63,35.97,46.77\nresnetrs270,256,693.43,1476.685,1024,129.86,27.06,55.84\nmaxxvitv2_rmlp_large_rw_224,224,685.54,1120.26,768,215.42,44.14,87.15\nresnet200d,320,684.31,1496.359,1024,64.69,31.25,67.33\neca_nfnet_l2,320,677.83,1510.682,1024,56.72,20.95,47.43\nhrnet_w48_ssld,288,674.81,1517.452,1024,77.47,28.66,47.21\nconvformer_m36,224,673.4,1520.613,1024,57.05,12.89,42.05\nvit_large_patch14_224,224,671.42,1525.099,1024,304.2,81.08,88.79\nefficientnetv2_rw_m,416,660.69,1162.389,768,53.24,21.49,79.62\nvit_base_patch8_224,224,659.1,1165.195,768,86.58,78.22,161.69\nvit_large_patch14_clip_224,224,658.44,1555.16,1024,304.2,81.08,88.79\nresnetv2_101x1_bit,448,655.55,780.987,512,44.54,31.65,64.93\nswinv2_large_window12_192,192,647.21,791.055,512,228.77,26.17,56.53\nnf_regnet_b5,456,647.09,791.219,512,49.74,11.7,61.95\nefficientnet_b5,448,640.21,399.84,256,30.39,9.59,93.56\ntiny_vit_21m_512,512,639.87,600.092,384,21.27,27.02,177.93\ndm_nfnet_f2,256,635.02,1612.538,1024,193.78,33.76,41.85\ntresnet_l,448,634.37,1614.169,1024,55.99,43.59,47.56\nhalonet_h1,256,633.74,403.931,256,8.1,3.0,51.17\ncaformer_s18,384,629.68,813.073,512,26.34,13.42,77.34\nvit_large_patch16_siglip_gap_256,256,628.22,1629.977,1024,303.36,80.8,88.34\nmaxvit_tiny_tf_384,384,626.52,408.586,256,30.98,17.53,123.42\nvit_large_patch16_siglip_256,256,625.66,1636.644,1024,315.96,81.34,88.88\nvit_large_r50_s32_384,384,615.76,1662.953,1024,329.09,57.43,76.52\nregnety_640,224,613.16,1252.507,768,281.38,64.16,42.5\nswinv2_cr_large_224,224,606.35,1266.568,768,196.68,35.1,78.42\nswinv2_small_window16_256,256,601.52,638.354,384,49.73,12.82,66.29\nseresnextaa101d_32x8d,320,597.68,1284.933,768,93.59,35.19,69.67\nconvnextv2_large,224,595.67,859.51,512,197.96,34.4,43.13\nconvmixer_768_32,224,587.98,1741.521,1024,21.11,19.55,25.95\nconvnext_large_mlp,320,587.82,870.986,512,200.13,70.21,88.02\nconvformer_s18,384,584.32,876.206,512,26.77,11.63,46.49\nvolo_d4_224,224,581.51,1760.899,1024,192.96,44.34,80.22\nconvnextv2_base,288,576.25,888.468,512,88.72,25.43,47.53\nresnetrs200,320,573.92,1784.186,1024,93.21,31.51,67.81\ndm_nfnet_f1,320,570.82,1793.897,1024,132.63,35.97,46.77\nresnetv2_101x3_bit,224,568.43,1351.065,768,387.93,71.23,48.7\npoolformerv2_m48,224,567.13,1805.556,1024,73.35,11.59,29.17\nvit_large_patch14_clip_quickgelu_224,224,566.47,1807.657,1024,303.97,81.08,88.79\nxcit_tiny_12_p8_384,384,566.0,1809.156,1024,6.71,14.13,69.14\nregnety_160,384,565.42,679.108,384,83.59,46.87,67.67\nseresnet269d,288,556.07,1841.481,1024,113.67,33.65,67.81\nvit_large_patch14_xp_224,224,555.64,1842.897,1024,304.06,81.01,88.79\ntf_efficientnet_b5,456,553.22,462.718,256,30.39,10.46,98.86\ntf_efficientnetv2_m,480,552.28,1390.555,768,54.14,24.76,89.84\nxcit_small_24_p16_384,384,550.05,1861.633,1024,47.67,26.72,68.58\nefficientvit_l3,320,548.76,932.978,512,246.04,56.32,79.34\nvit_base_r50_s16_384,384,528.19,1938.65,1024,98.95,67.43,135.03\nswinv2_cr_tiny_384,384,527.59,485.193,256,28.33,15.34,161.01\ninception_next_base,384,523.15,978.654,512,86.67,43.64,75.48\ncaformer_b36,224,522.73,1469.167,768,98.75,23.22,67.3\nefficientformerv2_l,224,520.87,1965.917,1024,26.32,2.59,18.54\nmaxvit_large_tf_224,224,511.9,750.122,384,211.79,43.68,127.35\nefficientnetv2_l,384,505.24,2026.716,1024,118.52,36.1,101.16\nconvformer_b36,224,493.89,1554.99,768,99.88,22.69,56.06\nnasnetalarge,331,490.88,782.249,384,88.75,23.89,90.56\nvitamin_large2_224,224,490.43,1043.963,512,333.58,75.05,112.83\nvitamin_large_224,224,490.33,1044.155,512,333.32,75.05,112.83\ntf_efficientnetv2_l,384,486.38,2105.328,1024,118.52,36.1,101.16\neca_nfnet_l2,384,481.18,1596.052,768,56.72,30.05,68.28\nconvnext_xlarge,288,474.06,809.995,384,350.2,100.8,95.05\ntresnet_xl,448,468.78,1638.268,768,78.44,60.77,61.31\necaresnet269d,320,467.07,2192.338,1024,102.09,41.53,83.69\nvit_so400m_patch14_siglip_gap_224,224,464.21,2205.889,1024,412.44,109.57,106.13\nvit_so400m_patch14_siglip_224,224,463.58,2208.874,1024,427.68,110.26,106.73\nregnetz_d8_evos,320,459.77,1670.385,768,23.46,7.03,38.92\npnasnet5large,331,451.71,850.077,384,86.06,25.04,92.89\ncoatnet_4_224,224,448.81,570.366,256,275.43,62.48,129.26\nvolo_d2_384,384,446.3,1720.798,768,58.87,46.17,184.51\nswinv2_base_window16_256,256,443.63,865.555,384,87.92,22.02,84.71\nswinv2_base_window12to16_192to256,256,443.19,866.406,384,87.92,22.02,84.71\neca_nfnet_l3,352,438.82,2333.492,1024,72.04,32.57,73.12\nvit_base_patch16_siglip_gap_512,512,436.48,1172.98,512,86.43,107.0,246.15\nresnest200e,320,433.87,2360.1,1024,70.2,35.69,82.78\nrepvgg_d2se,320,433.46,2362.343,1024,133.33,74.57,46.82\nvit_base_patch16_siglip_512,512,432.68,1183.28,512,93.52,108.22,247.74\nresnetrs350,288,431.8,2371.351,1024,163.96,43.67,87.09\neva02_large_patch14_224,224,427.61,2394.657,1024,303.27,81.15,97.2\neva02_large_patch14_clip_224,224,422.17,2425.529,1024,304.11,81.18,97.2\nmaxvit_small_tf_384,384,417.56,459.79,192,69.02,35.87,183.65\nxcit_small_24_p8_224,224,413.85,2474.292,1024,47.63,35.81,90.78\ncoat_lite_medium_384,384,412.65,1240.736,512,44.57,28.73,116.7\ncait_xxs24_384,384,409.04,2503.383,1024,12.03,9.63,122.66\nconvnext_large,384,408.43,626.768,256,197.77,101.1,126.74\nconvnext_large_mlp,384,408.43,626.758,256,200.13,101.11,126.74\nresnet50x16_clip_gap,384,408.06,1254.695,512,136.2,70.32,100.64\ncoatnet_rmlp_2_rw_384,384,407.99,470.57,192,73.88,47.69,209.43\nresnext101_32x32d,224,402.22,1272.909,512,468.53,87.29,91.12\nnfnet_f2,352,397.35,1932.786,768,193.78,63.22,79.06\nmvitv2_large_cls,224,396.76,1935.67,768,234.58,42.17,111.69\nresnet50x16_clip,384,396.57,1291.043,512,167.33,74.9,103.54\necaresnet269d,352,387.68,2641.296,1024,102.09,50.25,101.25\nvolo_d5_224,224,384.55,2662.854,1024,295.46,72.4,118.11\nxcit_medium_24_p16_384,384,381.47,2684.348,1024,84.4,47.39,91.64\nmvitv2_large,224,376.52,1359.807,512,217.99,43.87,112.02\nvitamin_large2_256,256,375.97,1021.316,384,333.64,99.0,154.99\nvitamin_large_256,256,375.75,1021.913,384,333.38,99.0,154.99\nhiera_huge_224,224,370.3,1382.641,512,672.78,124.85,150.95\nnfnet_f3,320,368.66,2777.571,1024,254.92,68.77,83.93\nefficientvit_l3,384,368.28,1042.649,384,246.04,81.08,114.02\nresnetrs270,352,365.8,2799.336,1024,129.86,51.13,105.48\nefficientnetv2_xl,384,365.74,2799.773,1024,208.12,52.81,139.2\nconvnextv2_large,288,360.11,710.871,256,197.96,56.87,71.29\nregnety_320,384,355.95,1078.775,384,145.05,95.0,88.87\ntf_efficientnetv2_xl,384,352.9,2901.677,1024,208.12,52.81,139.2\nmaxvit_tiny_tf_512,512,350.4,365.274,128,31.05,33.49,257.59\nefficientnet_b6,528,348.25,367.527,128,43.04,19.4,167.39\nvit_huge_patch14_224,224,346.11,2958.6,1024,630.76,167.4,139.41\nresmlp_big_24_224,224,346.09,2958.721,1024,129.14,100.23,87.31\nvit_huge_patch14_clip_224,224,345.85,2960.828,1024,632.05,167.4,139.41\nmaxxvitv2_rmlp_base_rw_384,384,338.23,1135.306,384,116.09,72.98,213.74\ndm_nfnet_f2,352,333.3,2304.205,768,193.78,63.22,79.06\ncaformer_s36,384,330.56,1548.873,512,39.3,26.08,150.33\nvit_base_patch14_dinov2,518,330.07,1551.146,512,86.58,151.71,397.58\ndeit3_huge_patch14_224,224,328.48,3117.396,1024,632.13,167.4,139.41\nvit_base_patch14_reg4_dinov2,518,326.8,1566.682,512,86.58,152.25,399.53\nswinv2_cr_small_384,384,326.37,784.356,256,49.7,29.7,298.03\nconvnextv2_base,384,326.0,785.254,256,88.72,45.21,84.49\nefficientnetv2_l,480,324.02,1580.122,512,118.52,56.4,157.99\ntf_efficientnet_b6,528,322.38,397.021,128,43.04,19.4,167.39\nvit_huge_patch14_gap_224,224,320.65,3193.485,1024,630.76,166.73,138.74\neva02_base_patch14_448,448,313.01,1635.692,512,87.12,107.11,259.14\nconvformer_s36,384,312.77,1636.965,512,40.01,22.54,89.62\ntf_efficientnetv2_l,480,311.92,1641.431,512,118.52,56.4,157.99\nregnety_1280,224,311.67,1642.714,512,644.81,127.66,71.58\ndm_nfnet_f3,320,308.4,3320.307,1024,254.92,68.77,83.93\nfocalnet_huge_fl3,224,308.26,1660.931,512,745.28,118.26,104.8\nmaxvit_xlarge_tf_224,224,304.7,840.149,256,506.99,97.52,191.04\nconvmixer_1536_20,224,304.65,3361.196,1024,51.63,48.68,33.03\nvit_huge_patch14_clip_quickgelu_224,224,301.93,3391.487,1024,632.08,167.4,139.41\nxcit_tiny_24_p8_384,384,301.9,3391.864,1024,12.11,27.05,132.95\nseresnextaa201d_32x8d,320,301.18,3399.969,1024,149.39,70.22,138.71\nrdnet_large,384,300.8,638.262,192,186.27,102.09,137.13\nvitamin_xlarge_256,256,299.66,854.266,256,436.06,130.13,177.37\nresnetrs420,320,299.37,3420.532,1024,191.89,64.2,126.56\nswin_base_patch4_window12_384,384,298.5,857.581,256,87.9,47.19,134.78\nvit_large_patch16_384,384,297.48,2581.665,768,304.72,191.21,270.24\nvit_huge_patch14_xp_224,224,293.38,3490.267,1024,631.8,167.3,139.41\neva_large_patch14_336,336,293.21,2619.236,768,304.53,191.1,270.24\nvit_large_patch14_clip_336,336,291.64,2633.322,768,304.53,191.11,270.24\nswinv2_cr_huge_224,224,289.13,1328.114,384,657.83,115.97,121.08\ncait_xs24_384,384,285.75,2687.593,768,26.67,19.28,183.98\nxcit_medium_24_p8_224,224,285.74,3583.627,1024,84.32,63.53,121.23\nsam2_hiera_tiny,896,284.13,225.218,64,26.85,99.86,384.63\nswinv2_large_window12to16_192to256,256,282.09,680.616,192,196.74,47.81,121.53\nconvnext_xxlarge,256,281.32,909.972,256,846.47,198.09,124.45\nmaxvit_rmlp_base_rw_384,384,277.19,1385.303,384,116.14,70.97,318.95\ndavit_giant,224,275.78,1392.401,384,1406.47,192.92,153.06\nbeit_large_patch16_384,384,274.3,3733.061,1024,305.0,191.21,270.24\nconvnextv2_huge,224,273.48,936.071,256,660.29,115.0,79.07\ncait_xxs36_384,384,273.32,3746.438,1024,17.37,14.35,183.7\ndeit3_large_patch16_384,384,271.88,3766.274,1024,304.76,191.21,270.24\nvit_giant_patch16_gap_224,224,271.62,3769.888,1024,1011.37,202.46,139.26\neca_nfnet_l3,448,271.28,1887.306,512,72.04,52.55,118.4\nconvnext_xlarge,384,266.61,960.183,256,350.2,179.2,168.99\nvit_large_patch16_siglip_gap_384,384,265.41,2893.649,768,303.69,190.85,269.55\nxcit_small_12_p8_384,384,264.43,1452.163,384,26.21,54.92,138.29\nvit_large_patch16_siglip_384,384,264.41,2904.586,768,316.28,192.07,270.75\nresnetv2_152x2_bit,384,257.56,1490.858,384,236.34,136.16,132.56\ncoatnet_5_224,224,255.18,752.374,192,687.47,145.49,194.24\nvit_large_patch14_clip_quickgelu_336,336,250.08,3071.031,768,304.29,191.11,270.24\nresnetv2_152x4_bit,224,249.77,2049.841,512,936.53,186.9,90.22\nresnetv2_50x3_bit,448,246.98,777.372,192,217.32,145.7,133.37\nmaxvit_base_tf_384,384,242.99,790.147,192,119.65,73.8,332.9\nsam2_hiera_small,896,242.55,263.836,64,33.95,123.99,442.63\nswinv2_cr_base_384,384,240.19,1065.812,256,87.88,50.57,333.68\ncaformer_m36,384,240.01,1066.57,256,56.2,42.11,196.35\nxcit_large_24_p16_384,384,237.61,3232.218,768,189.1,105.35,137.17\nresnetrs350,384,236.9,4322.505,1024,163.96,77.59,154.74\nmaxvit_small_tf_512,512,235.03,408.44,96,69.13,67.26,383.77\nvolo_d3_448,448,231.1,2215.491,512,86.63,96.33,446.83\neva_giant_patch14_224,224,229.92,4453.698,1024,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,229.31,4465.497,1024,1012.59,267.18,192.64\nconvformer_m36,384,228.98,1117.982,256,57.05,37.87,123.56\nvit_giant_patch14_224,224,225.31,4544.745,1024,1012.61,267.18,192.64\nvit_giant_patch14_clip_224,224,224.09,4569.511,1024,1012.65,267.18,192.64\nregnety_640,384,219.6,1165.748,256,281.38,188.47,124.83\ncait_s24_384,384,215.82,2372.283,512,47.06,32.17,245.31\nvitamin_large_336,336,214.38,895.584,192,333.57,175.72,307.47\nvitamin_large2_336,336,214.36,895.665,192,333.83,175.72,307.47\nseresnextaa201d_32x8d,384,212.89,2405.003,512,149.39,101.11,199.72\nnfnet_f3,416,210.03,2437.746,512,254.92,115.58,141.78\nefficientnetv2_xl,512,209.86,2439.71,512,208.12,93.85,247.32\nfocalnet_huge_fl4,224,209.11,2448.416,512,686.46,118.9,113.34\nresnest269e,416,205.52,2491.203,512,110.93,77.69,171.98\nnfnet_f4,384,204.25,3760.055,768,316.07,122.14,147.57\nefficientnet_b7,600,202.28,474.573,96,66.35,38.33,289.94\ntf_efficientnetv2_xl,512,202.25,2531.445,512,208.12,93.85,247.32\nconvnextv2_large,384,201.98,950.553,192,197.96,101.1,126.74\ntf_efficientnet_b7,600,189.69,506.052,96,66.35,38.33,289.94\nresnetv2_152x2_bit,448,187.23,1367.275,256,236.34,184.99,180.43\neva02_large_patch14_clip_336,336,186.43,4119.531,768,304.43,191.34,289.13\nswin_large_patch4_window12_384,384,185.91,688.483,128,196.74,104.08,202.16\ndm_nfnet_f3,416,181.39,2822.569,512,254.92,115.58,141.78\ncaformer_b36,384,177.61,1441.35,256,98.75,72.33,261.79\nresnetrs420,416,176.81,4343.655,768,191.89,108.45,213.79\ndm_nfnet_f4,384,176.15,2906.576,512,316.07,122.14,147.57\nxcit_large_24_p8_224,224,176.14,2906.744,512,188.93,141.23,181.56\nmaxvit_large_tf_384,384,171.73,745.342,128,212.03,132.55,445.84\nvitamin_xlarge_336,336,171.61,1118.79,192,436.06,230.18,347.33\nmvitv2_huge_cls,224,171.54,2238.569,384,694.8,120.67,243.63\nconvformer_b36,384,169.76,1508.019,256,99.88,66.67,164.75\nconvnextv2_huge,288,165.43,773.711,128,660.29,190.1,130.7\nvit_so400m_patch14_siglip_gap_384,384,154.69,3309.867,512,412.99,333.46,451.19\nvitamin_large_384,384,154.64,1241.528,192,333.71,234.44,440.16\nvitamin_large2_384,384,154.6,1241.879,192,333.97,234.44,440.16\nvit_so400m_patch14_siglip_384,384,153.85,3327.91,512,428.23,335.4,452.89\nfocalnet_large_fl3,384,153.05,1672.622,256,239.13,105.06,168.04\nswinv2_cr_large_384,384,151.56,844.525,128,196.68,108.96,404.96\nresnet50x64_clip_gap,448,151.54,1689.315,256,365.03,253.96,233.22\ndavit_base_fl,768,150.78,848.881,128,90.37,190.32,530.15\nvit_huge_patch14_clip_336,336,150.49,3402.128,512,632.46,390.97,407.54\nresnetv2_101x3_bit,448,148.24,1295.156,192,387.93,280.33,194.78\nresnet50x64_clip,448,147.7,1733.217,256,420.38,265.02,239.13\nfocalnet_large_fl4,384,145.84,1755.371,256,239.32,105.2,181.78\nnfnet_f5,416,144.21,3550.232,512,377.21,170.71,204.56\ncait_s36_384,384,143.55,3566.739,512,68.37,47.99,367.4\nbeit_large_patch16_512,512,142.33,3597.213,512,305.67,362.24,656.39\nvolo_d4_448,448,141.31,2717.358,384,193.41,197.13,527.35\nxcit_small_24_p8_384,384,138.77,2767.038,384,47.63,105.24,265.91\nmaxvit_base_tf_512,512,136.61,702.73,96,119.88,138.02,703.99\nvit_gigantic_patch14_clip_224,224,131.25,3900.97,512,1844.91,483.96,275.37\nvit_gigantic_patch14_224,224,131.1,3905.342,512,1844.44,483.95,275.37\nefficientnet_b8,672,130.9,733.374,96,87.41,63.48,442.89\nsam2_hiera_base_plus,896,129.71,493.389,64,68.68,227.48,828.88\nvitamin_xlarge_384,384,129.66,987.169,128,436.06,306.38,493.46\ndm_nfnet_f5,416,123.97,4129.905,512,377.21,170.71,204.56\ntf_efficientnet_b8,672,123.56,776.89,96,87.41,63.48,442.89\nswinv2_base_window12to24_192to384,384,116.6,548.838,64,87.92,55.25,280.36\nnfnet_f4,512,116.51,3295.94,384,316.07,216.26,262.26\nvit_huge_patch14_clip_378,378,115.85,4419.399,512,632.68,503.79,572.79\nregnety_1280,384,113.91,1123.629,128,644.81,374.99,210.2\nnfnet_f6,448,108.83,4704.573,512,438.36,229.7,273.62\nvit_large_patch14_reg4_dinov2,518,108.71,3532.214,384,304.37,508.9,1064.02\nfocalnet_xlarge_fl3,384,108.53,1769.039,192,408.79,185.61,223.99\nvit_large_patch14_dinov2,518,108.27,3546.558,384,304.37,507.15,1058.82\nvit_so400m_patch14_siglip_gap_448,448,107.18,3582.703,384,413.33,487.18,764.26\nfocalnet_xlarge_fl4,384,103.7,1851.388,192,409.03,185.79,242.31\nvit_huge_patch14_clip_quickgelu_378,378,103.32,3716.757,384,632.68,503.79,572.79\nmaxvit_xlarge_tf_384,384,102.58,935.85,96,475.32,292.78,668.76\neva02_large_patch14_448,448,102.5,4995.169,512,305.08,362.33,689.95\neva_giant_patch14_336,336,100.67,5085.918,512,1013.01,620.64,550.67\ndm_nfnet_f4,512,98.99,3879.344,384,316.07,216.26,262.26\nvit_huge_patch16_gap_448,448,98.47,3899.461,384,631.67,544.7,636.83\nxcit_medium_24_p8_384,384,95.83,2671.483,256,84.32,186.67,354.73\nmaxvit_large_tf_512,512,95.8,668.018,64,212.33,244.75,942.15\nvolo_d5_448,448,94.17,2718.505,256,295.91,315.06,737.92\ndm_nfnet_f6,448,93.55,4104.519,384,438.36,229.7,273.62\nconvnextv2_huge,384,93.18,1030.237,96,660.29,337.96,232.35\nswinv2_cr_giant_224,224,86.22,1484.541,128,2598.76,483.85,309.15\nnfnet_f5,544,84.48,3030.209,256,377.21,290.97,349.71\nnfnet_f7,480,82.66,4645.539,384,499.5,300.08,355.86\ntf_efficientnet_l2,475,82.5,1163.656,96,480.31,172.11,609.89\nswinv2_large_window12to24_192to384,384,74.51,644.202,48,196.74,116.15,407.83\ndm_nfnet_f5,544,72.96,3508.675,256,377.21,290.97,349.71\nvolo_d5_512,512,72.05,3553.134,256,296.09,425.09,1105.37\nswinv2_cr_huge_384,384,71.81,891.159,64,657.94,352.04,583.18\nnfnet_f6,576,66.27,3862.681,256,438.36,378.69,452.2\nregnety_2560,384,62.8,1528.693,96,1282.6,747.83,296.49\ncait_m36_384,384,62.36,4105.492,256,271.22,173.11,734.81\ndavit_huge_fl,768,58.91,1086.419,64,360.64,744.84,1060.3\nxcit_large_24_p8_384,384,58.6,3276.243,192,188.93,415.0,531.82\nmaxvit_xlarge_tf_512,512,57.35,836.875,48,475.77,534.14,1413.22\ndm_nfnet_f6,576,56.67,4517.07,256,438.36,378.69,452.2\nresnetv2_152x4_bit,480,56.51,2265.038,128,936.53,844.84,414.26\nconvnextv2_huge,512,52.45,915.15,48,660.29,600.81,413.07\nnfnet_f7,608,52.01,4921.813,256,499.5,480.39,570.85\nsam2_hiera_large,1024,42.98,1116.814,48,212.15,907.48,2190.34\neva_giant_patch14_560,560,33.62,3807.521,128,1014.45,1906.76,2577.17\nvit_giant_patch14_dinov2,518,33.11,3866.111,128,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,32.88,3893.06,128,1136.48,1790.08,2771.21\nsamvit_base_patch16,1024,31.14,385.374,12,89.67,486.43,1343.27\nefficientnet_l2,800,30.89,1035.976,32,480.31,479.12,1707.39\ntf_efficientnet_l2,800,30.04,1065.17,32,480.31,479.12,1707.39\ncait_m48_448,448,27.15,4715.382,128,356.46,329.41,1708.23\nswinv2_cr_giant_384,384,23.32,1372.464,32,2598.76,1450.71,1394.86\nvit_so400m_patch14_siglip_gap_896,896,19.9,4824.742,96,416.87,2731.49,8492.88\nsamvit_large_patch16,1024,14.97,534.402,8,308.28,1493.86,2553.78\nsamvit_huge_patch16,1024,9.99,600.87,6,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu128-4090-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,299257.15,3.401,1024,0.37,0.04,0.48\r\ntest_vit2,160,246655.29,4.131,1024,0.46,0.05,0.64\r\ntest_mambaout,160,184236.44,5.538,1024,0.45,0.03,0.53\r\ntest_byobnet,160,182110.34,5.603,1024,0.46,0.03,0.43\r\ntest_efficientnet_ln,160,166457.43,6.13,1024,0.36,0.06,0.55\r\ntest_efficientnet,160,165005.66,6.185,1024,0.36,0.06,0.55\r\ntest_convnext,160,162362.28,6.286,1024,0.27,0.03,0.58\r\ntest_convnext3,160,149139.58,6.845,1024,0.47,0.05,0.63\r\ntest_efficientnet_evos,160,145930.26,6.996,1024,0.36,0.06,0.55\r\ntest_convnext2,160,145404.17,7.021,1024,0.48,0.05,0.63\r\ntest_efficientnet_gn,160,140747.2,7.254,1024,0.36,0.06,0.55\r\ntest_resnet,160,136832.43,7.462,1024,0.47,0.1,0.64\r\ntest_vit3,160,136661.41,7.473,1024,0.93,0.09,1.0\r\ntest_mambaout,192,129278.84,7.9,1024,0.45,0.04,0.77\r\nefficientvit_m0,224,121769.07,8.388,1024,2.33,0.08,0.91\r\ntinynet_e,106,117908.24,8.663,1024,2.04,0.03,0.69\r\nmobilenetv4_conv_small_035,224,111096.76,9.196,1024,1.91,0.05,0.98\r\ntest_vit4,160,109841.07,9.302,1024,1.02,0.11,1.07\r\nmobilenetv4_conv_small_050,224,94858.92,10.774,1024,2.24,0.07,1.18\r\nmobilenetv4_conv_small_035,256,85405.23,11.968,1024,1.91,0.06,1.28\r\nlcnet_035,224,83905.48,12.183,1024,1.64,0.03,1.04\r\nmobilenetv3_small_050,224,80530.41,12.694,1024,1.59,0.03,0.92\r\ntest_nfnet,160,77483.62,13.194,1024,0.38,0.29,1.2\r\nshvit_s1,224,77470.22,13.197,1024,6.31,0.24,1.39\r\nefficientvit_m1,224,77383.51,13.211,1024,2.96,0.17,1.33\r\nefficientvit_m2,224,72611.02,14.081,1024,4.17,0.2,1.47\r\nlcnet_050,224,69950.67,14.617,1024,1.88,0.05,1.26\r\nmobilenetv4_conv_small_050,256,69668.75,14.676,1024,2.24,0.09,1.55\r\nstarnet_s050,224,69228.13,14.771,1024,0.54,0.09,1.57\r\nefficientvit_m3,224,64254.11,15.915,1024,6.88,0.26,1.62\r\ntf_mobilenetv3_small_minimal_100,224,62704.2,16.309,1024,2.04,0.06,1.41\r\nefficientvit_m4,224,62060.25,16.479,1024,8.78,0.3,1.7\r\nshvit_s2,224,59940.94,17.062,1024,11.45,0.37,1.6\r\nmobilenetv3_small_075,224,57459.3,17.799,1024,2.04,0.05,1.3\r\nlevit_conv_128s,224,55147.19,18.547,1024,7.76,0.3,1.88\r\nmobilenetv4_conv_small,224,55113.3,18.558,1024,3.77,0.19,1.97\r\ntinynet_d,152,53383.28,19.16,1024,2.34,0.05,1.42\r\nlevit_128s,224,53359.36,19.169,1024,7.76,0.3,1.88\r\nmobilenetv3_small_100,224,52167.63,19.607,1024,2.54,0.06,1.42\r\nresnet10t,176,50575.62,20.226,1024,5.44,0.7,1.51\r\nrepghostnet_050,224,50341.69,20.318,1024,2.31,0.05,2.02\r\nlcnet_075,224,49845.6,20.522,1024,2.36,0.1,1.99\r\nresnet18,160,47919.27,21.348,1024,11.69,0.93,1.27\r\ntf_mobilenetv3_small_075,224,47586.44,21.496,1024,2.04,0.05,1.3\r\nmnasnet_small,224,47058.07,21.739,1024,2.03,0.07,2.16\r\nregnetx_002,224,46308.65,22.092,1024,2.68,0.2,2.16\r\nstarnet_s100,224,44255.46,23.117,1024,1.04,0.19,2.68\r\ntf_mobilenetv3_small_100,224,43737.78,23.39,1024,2.54,0.06,1.42\r\nghostnet_050,224,43432.63,23.555,1024,2.59,0.05,1.77\r\nregnety_002,224,42772.12,23.918,1024,3.16,0.2,2.17\r\nshvit_s3,224,42719.51,23.949,1024,14.21,0.6,2.33\r\nefficientvit_m5,224,42667.15,23.977,1024,12.44,0.52,2.41\r\nstarnet_s150,224,42183.01,24.253,1024,1.56,0.23,2.75\r\nmobilenetv4_conv_small,256,41224.68,24.817,1024,3.77,0.25,2.57\r\nfasternet_t0,224,41083.48,24.903,1024,3.91,0.34,1.97\r\nlcnet_100,224,39914.31,25.632,1024,2.95,0.16,2.52\r\nvit_tiny_r_s16_p8_224,224,39509.35,25.897,1024,6.34,0.44,2.06\r\nlevit_conv_128,224,38650.19,26.472,1024,9.19,0.41,2.71\r\nmobilenetv2_035,224,38533.85,26.552,1024,1.68,0.07,2.86\r\nlevit_128,224,37912.11,26.988,1024,9.19,0.41,2.71\r\nrepghostnet_058,224,37842.86,27.037,1024,2.54,0.06,2.59\r\nconvnext_zepto_rms,224,35626.29,28.721,1024,2.16,0.3,2.75\r\nmnasnet_050,224,34960.23,29.268,1024,2.22,0.11,3.07\r\nregnetx_004,224,34494.82,29.663,1024,5.16,0.4,3.14\r\nefficientvit_b0,224,34456.93,29.696,1024,3.41,0.1,2.87\r\nhgnetv2_b0,224,34263.7,29.864,1024,6.0,0.33,2.12\r\nrepvgg_a0,224,33548.69,30.501,1024,8.31,1.36,1.79\r\nrepghostnet_080,224,33275.57,30.751,1024,3.27,0.1,3.22\r\nvit_small_patch32_224,224,33183.33,30.838,1024,22.88,1.15,2.5\r\nregnetx_004_tv,224,32358.84,31.624,1024,5.5,0.42,3.17\r\nlevit_conv_192,224,31906.19,32.072,1024,10.92,0.66,3.2\r\nresnet10t,224,31761.84,32.219,1024,5.44,1.1,2.43\r\nconvnext_zepto_rms_ols,224,31744.89,32.236,1024,2.16,0.34,3.15\r\npit_ti_224,224,31694.6,32.286,1024,4.85,0.7,6.19\r\npit_ti_distilled_224,224,31506.87,32.479,1024,5.1,0.71,6.23\r\nmobilenetv2_050,224,30129.99,33.964,1024,1.97,0.1,3.64\r\ngernet_s,224,30031.98,34.075,1024,8.17,0.75,2.65\r\nsemnasnet_050,224,30006.76,34.104,1024,2.08,0.11,3.44\r\nlevit_192,224,29298.95,34.928,1024,10.92,0.66,3.2\r\nghostnetv3_050,224,29146.46,35.109,1024,2.85,0.05,2.28\r\ncs3darknet_focus_s,256,28705.28,35.651,1024,3.27,0.69,2.7\r\nresnet34,160,28380.11,36.06,1024,21.8,1.87,1.91\r\nmixer_s32_224,224,28227.3,36.256,1024,19.1,1.0,2.28\r\nedgenext_xx_small,256,27889.25,36.694,1024,1.33,0.26,3.33\r\nmobileone_s0,224,27780.32,36.839,1024,2.08,0.28,3.79\r\nnf_regnet_b0,192,27771.62,36.85,1024,8.76,0.37,3.15\r\ncs3darknet_s,256,27261.65,37.54,1024,3.28,0.72,2.97\r\nresnet14t,176,27225.75,37.589,1024,10.08,1.07,3.61\r\nfasternet_t1,224,27081.47,37.79,1024,7.6,0.85,3.15\r\nlcnet_150,224,27034.82,37.855,1024,4.5,0.34,3.79\r\nresnetv2_18,224,26374.1,38.805,1024,11.69,1.82,2.48\r\nrepghostnet_100,224,26125.61,39.172,1024,4.06,0.15,3.98\r\nshvit_s4,256,25936.71,39.459,1024,16.55,0.99,3.73\r\nconvnext_atto_rms,224,25791.33,39.682,1024,3.69,0.55,3.81\r\nconvnext_atto,224,25760.81,39.729,1024,3.7,0.55,3.81\r\nstarnet_s2,224,25721.38,39.789,1024,3.68,0.55,4.73\r\nhgnetv2_b1,224,25542.68,40.067,1024,6.34,0.49,2.73\r\nmobilenetv4_conv_small,320,25342.81,40.383,1024,3.77,0.39,4.01\r\nstarnet_s1,224,25329.03,40.407,1024,2.87,0.42,4.99\r\ntinynet_c,184,24978.57,40.972,1024,2.46,0.11,2.87\r\nresnet18,224,24941.46,41.035,1024,11.69,1.82,2.48\r\nconvnext_atto_ols,224,24161.8,42.359,1024,3.7,0.58,4.11\r\nrepvgg_a1,224,23821.75,42.964,1024,12.79,2.36,2.37\r\nvit_tiny_patch16_224,224,23698.81,43.182,1024,5.72,1.26,5.97\r\ntf_mobilenetv3_large_minimal_100,224,23658.56,43.26,1024,3.92,0.22,4.4\r\ndeit_tiny_patch16_224,224,23489.37,43.572,1024,5.72,1.26,5.97\r\nconvnextv2_atto,224,23483.74,43.582,1024,3.71,0.55,3.81\r\nvit_medium_patch32_clip_224,224,23425.1,43.691,1024,39.69,2.0,3.34\r\nlevit_conv_256,224,23423.81,43.695,1024,18.86,1.13,4.23\r\nregnety_004,224,23314.28,43.899,1024,4.34,0.41,3.89\r\nmobilenetv3_large_075,224,23264.78,43.993,1024,3.99,0.16,4.0\r\nrepghostnet_111,224,23193.04,44.128,1024,4.52,0.18,4.38\r\ndeit_tiny_distilled_patch16_224,224,23143.6,44.223,1024,5.91,1.27,6.01\r\ninception_next_atto,224,22684.0,45.12,1024,4.16,0.5,3.63\r\nmnasnet_075,224,22518.44,45.445,1024,3.17,0.23,4.77\r\nseresnet18,224,22441.32,45.609,1024,11.78,1.82,2.49\r\nlegacy_seresnet18,224,22431.98,45.627,1024,11.78,1.82,2.49\r\nresnetv2_18d,224,22151.1,46.181,1024,11.71,2.06,3.29\r\nxcit_nano_12_p16_224,224,22039.99,46.439,1024,3.05,0.56,4.17\r\nlevit_256,224,21939.6,46.652,1024,18.86,1.13,4.23\r\ntf_mobilenetv3_large_075,224,21662.52,47.248,1024,3.99,0.16,4.0\r\nvit_xsmall_patch16_clip_224,224,21603.25,47.378,1024,8.28,1.79,6.65\r\nconvnext_femto,224,21530.96,47.538,1024,5.22,0.79,4.57\r\nghostnet_100,224,21488.33,47.631,1024,5.18,0.15,3.55\r\nedgenext_xx_small,288,21486.36,47.635,1024,1.33,0.33,4.21\r\nresnet18d,224,21118.13,48.467,1024,11.71,2.06,3.29\r\nregnety_006,224,21048.26,48.627,1024,6.06,0.61,4.33\r\nmobilenetv1_100,224,20906.49,48.957,1024,4.23,0.58,5.04\r\nmobilenetv3_rw,224,20884.44,49.01,1024,5.48,0.23,4.41\r\nmobilenetv3_large_100,224,20718.56,49.401,1024,5.48,0.23,4.41\r\nhgnetv2_b0,288,20713.18,49.415,1024,6.0,0.54,3.51\r\nmobilenetv1_100h,224,20570.22,49.758,1024,5.28,0.63,5.09\r\nconvnext_femto_ols,224,20411.06,50.147,1024,5.23,0.82,4.87\r\ndla46_c,224,20376.4,50.232,1024,1.3,0.58,4.5\r\nlevit_conv_256d,224,20255.32,50.533,1024,26.16,1.39,4.93\r\ntf_efficientnetv2_b0,192,20213.18,50.638,1024,7.14,0.54,3.51\r\nrepghostnet_130,224,20075.08,50.986,1024,5.46,0.24,5.24\r\nhardcorenas_a,224,20017.29,51.134,1024,5.26,0.23,4.38\r\npit_xs_224,224,20006.87,51.16,1024,10.62,1.4,7.71\r\nregnetx_008,224,19960.11,51.279,1024,7.26,0.81,5.15\r\nconvnext_atto_rms,256,19827.14,51.625,1024,3.69,0.71,4.98\r\nconvnextv2_femto,224,19728.63,51.882,1024,5.23,0.79,4.57\r\nmnasnet_100,224,19664.9,52.05,1024,4.38,0.33,5.46\r\npit_xs_distilled_224,224,19592.92,52.242,1024,11.0,1.41,7.76\r\nmobilenet_edgetpu_v2_xs,224,19587.84,52.255,1024,4.46,0.7,4.8\r\nhardcorenas_b,224,19541.69,52.379,1024,5.18,0.26,5.09\r\nese_vovnet19b_slim_dw,224,19393.97,52.778,1024,1.9,0.4,5.28\r\ntf_mobilenetv3_large_100,224,19200.72,53.308,1024,5.48,0.23,4.41\r\nlevit_256d,224,19145.79,53.462,1024,26.16,1.39,4.93\r\nhardcorenas_c,224,18941.52,54.038,1024,5.52,0.28,5.01\r\nrepvgg_b0,224,18773.73,54.522,1024,14.34,3.06,3.07\r\nstarnet_s3,224,18726.77,54.659,1024,5.75,0.76,6.66\r\nmobilenetv4_conv_medium,224,18658.7,54.857,1024,9.72,0.84,5.8\r\nsemnasnet_075,224,18654.46,54.869,1024,2.91,0.23,5.54\r\nmobilenetv2_075,224,18642.64,54.906,1024,2.64,0.22,5.86\r\ncs3darknet_focus_s,320,18326.87,55.828,1024,3.27,1.08,4.22\r\nefficientformerv2_s0,224,18254.43,56.072,1024,3.6,0.41,5.3\r\nese_vovnet19b_slim,224,18236.15,56.13,1024,3.17,1.69,3.52\r\nmobilenet_edgetpu_100,224,17970.96,56.959,1024,4.09,1.0,5.75\r\nspnasnet_100,224,17880.63,57.246,1024,4.42,0.35,6.03\r\nregnety_008,224,17732.59,57.724,1024,6.26,0.81,5.25\r\nregnetx_006,224,17490.16,58.525,1024,6.2,0.61,3.98\r\nresnetblur18,224,17334.14,59.052,1024,11.69,2.34,3.39\r\nrepghostnet_150,224,17318.22,59.105,1024,6.55,0.31,6.0\r\nskresnet18,224,17187.34,59.556,1024,11.96,1.82,3.24\r\nresnet14t,224,17144.78,59.704,1024,10.08,1.69,5.8\r\nregnety_008_tv,224,17012.84,60.168,1024,6.43,0.84,5.42\r\nghostnet_130,224,16978.34,60.289,1024,7.36,0.24,4.6\r\nhrnet_w18_small,224,16969.51,60.319,1024,13.19,1.61,5.72\r\nhardcorenas_d,224,16892.32,60.597,1024,7.5,0.3,4.93\r\nmobileone_s1,224,16827.24,60.832,1024,4.76,0.83,6.27\r\nfasternet_t2,224,16816.55,60.869,1024,14.98,1.91,4.73\r\nvit_betwixt_patch32_clip_224,224,16728.33,61.191,1024,61.41,3.09,4.17\r\npvt_v2_b0,224,16669.43,61.408,1024,3.67,0.57,7.99\r\nrepvit_m0_9,224,16591.21,61.697,1024,5.07,0.82,6.17\r\nrepvit_m1,224,16554.34,61.835,1024,5.07,0.82,6.17\r\nmobilenetv4_hybrid_medium_075,224,16511.23,61.995,1024,7.31,0.66,5.65\r\nfbnetc_100,224,16481.03,62.11,1024,5.57,0.4,6.51\r\ntinynet_b,188,16468.49,62.155,1024,3.73,0.21,4.44\r\nsemnasnet_100,224,16415.36,62.358,1024,3.89,0.32,6.23\r\nmobilevit_xxs,256,16342.81,62.634,1024,1.27,0.42,8.34\r\nmobilenetv2_100,224,16292.93,62.827,1024,3.5,0.31,6.68\r\nmobilenetv1_125,224,16264.84,62.935,1024,6.27,0.89,6.3\r\nhgnetv2_b2,224,16223.4,63.095,1024,11.22,1.15,4.12\r\nmobilenetv1_100,256,16114.85,63.522,1024,4.23,0.76,6.59\r\nefficientnet_lite0,224,16109.2,63.543,1024,4.65,0.4,6.74\r\nconvnext_pico,224,15999.69,63.979,1024,9.05,1.37,6.1\r\ntf_efficientnetv2_b1,192,15951.21,64.172,1024,8.14,0.76,4.59\r\nresnetv2_18,288,15886.54,64.434,1024,11.69,3.0,4.11\r\nmobilenetv3_large_100,256,15863.93,64.526,1024,5.48,0.29,5.75\r\nmobilenetv1_100h,256,15844.63,64.604,1024,5.28,0.82,6.65\r\ntf_efficientnet_lite0,224,15811.23,64.741,1024,4.65,0.4,6.74\r\nresnet50,160,15725.42,65.095,1024,25.56,2.1,5.67\r\nconvnext_atto,288,15561.55,65.781,1024,3.7,0.91,6.3\r\nnf_regnet_b0,256,15463.97,66.19,1024,8.76,0.64,5.58\r\ngmlp_ti16_224,224,15397.58,66.481,1024,5.87,1.34,7.55\r\nedgenext_x_small,256,15346.05,66.704,1024,2.34,0.54,5.93\r\neva02_tiny_patch14_224,224,15344.69,66.711,1024,5.5,1.7,9.14\r\nhgnetv2_b1,288,15304.06,66.888,1024,6.34,0.82,4.51\r\nconvnext_pico_ols,224,15153.57,67.553,1024,9.06,1.43,6.5\r\nresnet18,288,15069.62,67.929,1024,11.69,3.01,4.11\r\ncrossvit_tiny_240,240,15037.74,68.072,1024,7.01,1.57,9.08\r\ncrossvit_9_240,240,14941.72,68.505,1024,8.55,1.85,9.52\r\ngernet_m,224,14792.15,69.204,1024,21.14,3.02,5.24\r\nhardcorenas_f,224,14753.74,69.383,1024,8.2,0.35,5.57\r\ntf_efficientnetv2_b0,224,14734.93,69.471,1024,7.14,0.73,4.77\r\nhardcorenas_e,224,14694.01,69.664,1024,8.07,0.35,5.65\r\nresnet34,224,14668.08,69.789,1024,21.8,3.67,3.74\r\nswiftformer_xs,224,14658.68,69.833,1024,3.48,0.61,6.45\r\nconvnextv2_pico,224,14602.82,70.102,1024,9.07,1.37,6.1\r\nresnet50d,160,14590.19,70.162,1024,25.58,2.22,6.08\r\nconvnext_atto_ols,288,14568.71,70.265,1024,3.7,0.96,6.8\r\nresnetv2_34,224,14502.15,70.589,1024,21.8,3.67,3.74\r\ncs3darknet_focus_m,256,14332.94,71.422,1024,9.3,1.98,4.89\r\nconvnextv2_atto,288,14321.52,71.478,1024,3.71,0.91,6.3\r\nxcit_tiny_12_p16_224,224,14311.76,71.526,1024,6.72,1.24,6.29\r\nmambaout_femto,224,14300.76,71.58,1024,7.3,1.16,8.34\r\nghostnetv2_100,224,14300.19,71.583,1024,6.16,0.18,4.55\r\nmobilenetv4_conv_medium,256,14293.62,71.617,1024,9.72,1.1,7.58\r\nlevit_conv_384,224,14242.55,71.875,1024,39.07,2.35,6.26\r\nrepvit_m1_0,224,14114.32,72.528,1024,6.81,1.11,7.19\r\nmobilenet_edgetpu_v2_s,224,13868.12,73.816,1024,5.99,1.21,6.6\r\nghostnetv3_100,224,13856.02,73.879,1024,6.15,0.17,4.55\r\nmnasnet_140,224,13813.41,74.107,1024,7.12,0.6,7.71\r\nefficientformer_l1,224,13809.92,74.127,1024,12.29,1.3,5.53\r\ncs3darknet_m,256,13743.27,74.486,1024,9.31,2.08,5.28\r\nmobileone_s2,224,13739.2,74.51,1024,7.81,1.3,7.56\r\nefficientvit_b1,224,13674.61,74.861,1024,9.1,0.53,7.25\r\nmobilenetv4_hybrid_medium,224,13558.47,75.502,1024,11.07,0.98,6.84\r\ncrossvit_9_dagger_240,240,13551.18,75.542,1024,8.78,1.99,9.97\r\nresnetv2_18d,288,13483.23,75.923,1024,11.71,3.4,5.43\r\nseresnet18,288,13472.63,75.983,1024,11.78,3.01,4.11\r\ntinynet_a,192,13343.21,76.72,1024,6.19,0.35,5.41\r\nvit_tiny_r_s16_p8_384,384,13310.02,76.912,1024,6.36,1.34,6.49\r\nresnet34d,224,13277.2,77.103,1024,21.82,3.91,4.54\r\nstarnet_s4,224,13223.45,77.415,1024,7.48,1.05,9.56\r\nrepghostnet_200,224,13178.14,77.68,1024,9.77,0.53,7.96\r\nresnetv2_34d,224,13124.58,77.999,1024,21.82,3.91,4.54\r\nresnext50_32x4d,160,13110.88,78.081,1024,25.03,2.17,7.35\r\ndla34,224,13106.5,78.106,1024,15.74,3.07,5.02\r\nseresnet34,224,13088.22,78.216,1024,21.96,3.67,3.74\r\nlegacy_seresnet34,224,13086.61,78.226,1024,21.96,3.67,3.74\r\nresnet26,224,13085.35,78.233,1024,16.0,2.36,7.35\r\nrepvgg_a2,224,13062.02,78.373,1024,25.5,5.12,3.13\r\nmobilevitv2_050,256,13058.4,78.394,1024,1.37,0.48,8.04\r\nmobilenetv4_conv_blur_medium,224,13040.5,78.502,1024,9.72,1.22,8.58\r\nconvnext_femto,288,13036.14,78.527,1024,5.22,1.3,7.56\r\nrepvit_m2,224,13021.95,78.613,1024,8.24,1.34,7.82\r\nrepvit_m1_1,224,13009.17,78.691,1024,8.24,1.34,7.82\r\nselecsls42,224,12858.29,79.615,1024,30.35,2.94,4.62\r\nresnet18d,288,12856.54,79.626,1024,11.71,3.41,5.43\r\nselecsls42b,224,12768.75,80.169,1024,32.46,2.98,4.62\r\nresnet50,176,12766.32,80.189,1024,25.56,2.62,6.92\r\nlevit_384,224,12747.58,80.306,1024,39.07,2.35,6.26\r\nefficientformerv2_s1,224,12719.62,80.482,1024,6.19,0.67,7.66\r\nregnetz_005,224,12696.29,80.63,1024,7.12,0.52,5.86\r\nhgnetv2_b3,224,12661.66,80.851,1024,16.29,1.78,5.07\r\nvit_base_patch32_clip_224,224,12645.01,80.958,1024,88.22,4.41,5.01\r\nmobilenetv2_110d,224,12502.84,81.878,1024,4.52,0.45,8.71\r\nmobilenetv1_125,256,12500.02,81.897,1024,6.27,1.16,8.23\r\nrexnetr_100,224,12494.42,81.932,1024,4.88,0.43,7.72\r\nvit_base_patch32_clip_quickgelu_224,224,12485.31,81.994,1024,87.85,4.41,5.01\r\nrexnet_100,224,12457.03,82.179,1024,4.8,0.41,7.44\r\nconvnext_femto_ols,288,12339.46,82.963,1024,5.23,1.35,8.06\r\nfbnetv3_b,224,12291.93,83.283,1024,8.6,0.42,6.97\r\nseresnet50,160,12269.69,83.435,1024,28.09,2.1,5.69\r\nvit_base_patch32_224,224,12161.54,84.178,1024,88.22,4.41,5.01\r\nswiftformer_s,224,12044.28,84.997,1024,6.09,0.99,7.81\r\nconvnextv2_femto,288,12018.31,85.181,1024,5.23,1.3,7.56\r\npit_s_distilled_224,224,12016.69,85.192,1024,24.04,2.9,11.64\r\nedgenext_x_small,288,11998.79,85.32,1024,2.34,0.68,7.5\r\nmambaout_kobe,224,11989.74,85.38,1024,9.14,1.52,10.0\r\nnf_resnet26,224,11977.31,85.472,1024,16.0,2.41,7.35\r\ngmixer_12_224,224,11927.77,85.828,1024,12.7,2.67,7.26\r\ntf_efficientnetv2_b2,208,11913.74,85.923,1024,10.1,1.06,6.0\r\npit_s_224,224,11887.91,86.115,1024,23.46,2.88,11.56\r\nresnetaa34d,224,11718.2,87.363,1024,21.82,4.43,5.07\r\nefficientnet_es_pruned,224,11664.15,87.767,1024,5.44,1.81,8.73\r\nnf_regnet_b2,240,11653.07,87.85,1024,14.31,0.97,7.23\r\necaresnet50t,160,11641.12,87.941,1024,25.57,2.21,6.04\r\nvit_small_patch32_384,384,11608.66,88.187,1024,22.92,3.45,8.25\r\nefficientnet_es,224,11599.57,88.256,1024,5.44,1.81,8.73\r\nresnet26d,224,11562.63,88.539,1024,16.01,2.6,8.15\r\ntiny_vit_5m_224,224,11531.97,88.773,1024,12.08,1.27,11.25\r\nresmlp_12_224,224,11522.82,88.846,1024,15.35,3.01,5.5\r\nresnetrs50,160,11507.49,88.961,1024,35.69,2.29,6.2\r\nsemnasnet_140,224,11449.45,89.414,1024,6.11,0.6,8.87\r\ntf_efficientnet_es,224,11427.38,89.587,1024,5.44,1.81,8.73\r\nese_vovnet19b_dw,224,11405.2,89.761,1024,6.54,1.34,8.25\r\npoolformerv2_s12,224,11383.67,89.931,1024,11.89,1.83,5.53\r\nfastvit_t8,256,11369.88,90.04,1024,4.0,0.69,6.59\r\nmobilenetv2_140,224,11347.13,90.219,1024,6.11,0.6,9.57\r\nvisformer_tiny,224,11304.17,90.563,1024,10.32,1.27,5.72\r\nnf_regnet_b1,256,11284.74,90.719,1024,10.22,0.82,7.27\r\ncs3darknet_focus_m,288,11262.7,90.896,1024,9.3,2.51,6.19\r\ndarknet17,256,11191.83,91.473,1024,14.3,3.26,7.18\r\nghostnetv2_130,224,11190.41,91.482,1024,8.96,0.28,5.9\r\nmobileone_s3,224,11134.26,91.946,1024,10.08,1.9,9.13\r\nconvnext_nano,224,11126.02,92.014,1024,15.59,2.46,8.37\r\nnf_seresnet26,224,11066.71,92.506,1024,17.4,2.41,7.36\r\nnf_ecaresnet26,224,11059.48,92.567,1024,16.0,2.41,7.36\r\nselecsls60,224,11024.72,92.859,1024,30.67,3.59,5.52\r\nmobilenet_edgetpu_v2_m,224,10982.96,93.213,1024,8.46,1.85,8.15\r\nselecsls60b,224,10972.28,93.304,1024,32.77,3.63,5.52\r\ncs3darknet_m,288,10830.29,94.527,1024,9.31,2.63,6.69\r\nghostnetv3_130,224,10817.07,94.639,1024,8.95,0.28,5.9\r\nefficientnet_b0,224,10785.82,94.916,1024,5.29,0.4,6.75\r\nefficientnet_lite1,240,10767.77,95.076,1024,5.42,0.62,10.14\r\nmixer_b32_224,224,10758.98,95.155,1024,60.29,3.24,6.29\r\nresnext50_32x4d,176,10697.19,95.704,1024,25.03,2.71,8.97\r\nmixer_s16_224,224,10620.55,96.395,1024,18.53,3.79,5.97\r\nefficientvit_b1,256,10615.4,96.44,1024,9.1,0.69,9.46\r\ntf_efficientnet_lite1,240,10601.4,96.568,1024,5.42,0.62,10.14\r\nresnetblur18,288,10512.67,97.384,1024,11.69,3.87,5.6\r\ndla46x_c,224,10480.01,97.687,1024,1.07,0.54,5.66\r\npoolformer_s12,224,10394.9,98.488,1024,11.92,1.82,5.53\r\nedgenext_small,256,10367.22,98.75,1024,5.59,1.26,9.07\r\nhgnetv2_b4,224,10301.25,99.376,1024,19.8,2.75,6.7\r\nmobilenetv4_hybrid_medium,256,10288.66,99.504,1024,11.07,1.29,9.01\r\nfbnetv3_d,224,10234.97,100.025,1024,10.31,0.52,8.5\r\nconvnextv2_nano,224,10197.84,100.391,1024,15.62,2.46,8.37\r\nmixnet_s,224,10191.22,100.455,1024,4.13,0.25,6.25\r\ndarknet21,256,10159.4,100.771,1024,20.86,3.93,7.47\r\nmobilenetv4_conv_aa_medium,256,10129.84,101.064,1024,9.72,1.58,10.3\r\ndla60x_c,224,10120.98,101.153,1024,1.32,0.59,6.01\r\nskresnet34,224,10067.56,101.69,1024,22.28,3.67,5.13\r\nmobilenetv4_conv_blur_medium,256,10061.28,76.309,768,9.72,1.59,11.2\r\nvit_small_patch16_224,224,9948.51,102.905,1024,22.05,4.61,11.95\r\nresnet101,160,9935.38,103.044,1024,44.55,4.0,8.28\r\nmobilenet_edgetpu_v2_l,224,9922.28,103.179,1024,10.92,2.55,9.05\r\ntf_efficientnetv2_b1,240,9872.84,103.696,1024,8.14,1.21,7.34\r\nvit_wee_patch16_reg1_gap_256,256,9817.45,104.282,1024,13.42,3.83,13.9\r\ntiny_vit_11m_224,224,9806.97,104.393,1024,20.35,2.03,13.49\r\nvit_base_patch32_clip_256,256,9786.41,104.612,1024,87.86,5.76,6.65\r\ndeit3_small_patch16_224,224,9785.42,104.623,1024,22.06,4.61,11.95\r\ndeit_small_patch16_224,224,9782.58,104.654,1024,22.05,4.61,11.95\r\nvit_relpos_small_patch16_224,224,9736.84,105.145,1024,21.98,4.59,13.05\r\nconvnext_nano_ols,224,9733.91,105.177,1024,15.65,2.65,9.38\r\ngernet_l,256,9702.76,105.514,1024,31.08,4.57,8.0\r\nrexnetr_130,224,9688.21,105.671,1024,7.61,0.68,9.81\r\ndeit_small_distilled_patch16_224,224,9687.44,105.68,1024,22.44,4.63,12.02\r\nhgnetv2_b2,288,9676.65,105.796,1024,11.22,1.89,6.8\r\nvit_srelpos_small_patch16_224,224,9669.55,105.876,1024,21.97,4.59,12.16\r\nconvnext_pico,288,9659.22,105.99,1024,9.05,2.27,10.08\r\nefficientnet_b1_pruned,240,9654.6,106.039,1024,6.33,0.4,6.21\r\ntf_mixnet_s,224,9645.65,106.137,1024,4.13,0.25,6.25\r\nvit_base_patch32_siglip_gap_256,256,9605.84,106.58,1024,87.47,5.67,6.54\r\nresnext26ts,256,9527.22,107.458,1024,10.3,2.43,10.52\r\ndpn48b,224,9501.16,107.752,1024,9.13,1.69,8.92\r\nvit_base_patch32_siglip_256,256,9484.78,107.94,1024,94.55,5.75,6.64\r\nfbnetv3_b,256,9424.6,108.628,1024,8.6,0.55,9.1\r\nvit_pwee_patch16_reg1_gap_256,256,9406.79,108.834,1024,15.25,4.37,15.87\r\nefficientnet_b0_gn,224,9298.88,110.097,1024,5.29,0.42,6.75\r\nswiftformer_l1,224,9274.65,110.386,1024,12.06,1.6,10.07\r\nfasternet_s,224,9214.21,111.109,1024,31.18,4.56,7.93\r\nmobilenetv2_120d,224,9151.9,111.866,1024,5.83,0.69,11.97\r\nconvnext_pico_ols,288,9140.55,112.006,1024,9.06,2.37,10.74\r\nnf_regnet_b2,272,9125.02,112.195,1024,14.31,1.22,9.27\r\ncsatv2,512,9081.59,112.733,1024,11.1,1.39,9.17\r\nrexnet_130,224,9077.4,112.784,1024,7.56,0.68,9.71\r\nmobilenetv4_conv_medium,320,9064.44,112.945,1024,9.72,1.71,11.84\r\ntf_efficientnet_b0,224,8993.25,113.84,1024,5.29,0.4,6.75\r\nresnet26t,256,8970.87,114.125,1024,16.01,3.35,10.52\r\nrepvit_m3,224,8959.89,114.264,1024,10.12,1.86,11.43\r\nnf_regnet_b1,288,8936.16,114.567,1024,10.22,1.02,9.2\r\nrepvgg_b1g4,224,8932.19,114.619,1024,36.13,7.31,5.32\r\nlegacy_seresnext26_32x4d,224,8930.96,114.634,1024,16.79,2.49,9.39\r\nghostnetv2_160,224,8876.2,115.341,1024,12.39,0.42,7.23\r\nvit_small_patch16_rope_ape_224,224,8872.8,115.386,1024,22.06,4.61,11.95\r\nconvnextv2_pico,288,8872.23,115.394,1024,9.07,2.27,10.08\r\nvit_small_patch16_rope_224,224,8869.72,115.426,1024,21.98,4.61,11.95\r\nresnet34,288,8861.18,115.538,1024,21.8,6.07,6.18\r\nsedarknet21,256,8847.24,115.719,1024,20.95,3.93,7.47\r\nresnetv2_34,288,8776.87,116.648,1024,21.8,6.07,6.18\r\nvit_relpos_small_patch16_rpn_224,224,8774.6,116.677,1024,21.97,4.59,13.05\r\ngcresnext26ts,256,8754.78,116.942,1024,10.48,2.43,10.53\r\necaresnet50d_pruned,224,8724.77,117.344,1024,19.94,2.53,6.43\r\nhrnet_w18_small_v2,224,8722.71,117.371,1024,15.6,2.62,9.65\r\nvit_dwee_patch16_reg1_gap_256,256,8710.62,117.532,1024,13.43,3.83,17.6\r\nmambaout_femto,288,8709.33,117.548,1024,7.3,1.91,13.79\r\nmobilevitv2_075,256,8689.9,117.815,1024,2.87,1.05,12.06\r\nresnest14d,224,8677.18,117.988,1024,10.61,2.76,7.33\r\nghostnetv3_160,224,8591.98,119.157,1024,12.38,0.41,7.23\r\nrexnetr_150,224,8556.04,119.658,1024,9.78,0.89,11.13\r\nflexivit_small,240,8538.34,119.907,1024,22.06,5.35,14.18\r\nseresnext26ts,256,8488.35,120.613,1024,10.39,2.43,10.52\r\neca_resnext26ts,256,8474.36,120.812,1024,10.3,2.43,10.52\r\nefficientnet_lite2,260,8446.7,121.208,1024,6.09,0.89,12.9\r\ncs3darknet_focus_l,256,8444.37,121.241,1024,21.15,4.66,8.03\r\nbotnet26t_256,256,8443.92,121.248,1024,12.49,3.32,11.98\r\nmobilenet_edgetpu_v2_m,256,8424.7,121.525,1024,8.46,2.42,10.65\r\nhalonet26t,256,8384.54,122.106,1024,12.48,3.19,11.69\r\nvit_dpwee_patch16_reg1_gap_256,256,8344.47,122.693,1024,15.25,4.37,19.05\r\ntf_efficientnet_lite2,260,8335.99,122.817,1024,6.09,0.89,12.9\r\nefficientvit_b1,288,8325.39,122.974,1024,9.1,0.87,11.96\r\nmobilenetv4_conv_large,256,8277.32,123.688,1024,32.59,2.86,12.14\r\ndpn68,224,8275.88,123.71,1024,12.61,2.35,10.47\r\npvt_v2_b1,224,8275.67,123.713,1024,14.01,2.12,15.39\r\necaresnext26t_32x4d,224,8270.41,123.789,1024,15.41,2.7,10.09\r\necaresnext50t_32x4d,224,8268.76,123.817,1024,15.41,2.7,10.09\r\nefficientnet_b0,256,8265.02,123.872,1024,5.29,0.52,8.81\r\nregnetx_016,224,8262.12,123.916,1024,9.19,1.62,7.93\r\nhgnet_tiny,224,8261.22,123.929,1024,14.74,4.54,6.36\r\nseresnext26t_32x4d,224,8260.28,123.944,1024,16.81,2.7,10.09\r\nseresnext26d_32x4d,224,8222.27,124.518,1024,16.81,2.73,10.19\r\nedgenext_small_rw,256,8184.78,125.087,1024,7.83,1.58,9.51\r\nefficientformerv2_s2,224,8157.83,125.499,1024,12.71,1.27,11.77\r\ncs3darknet_l,256,8146.81,125.67,1024,21.16,4.86,8.55\r\nefficientnet_blur_b0,224,8140.46,125.767,1024,5.29,0.43,8.72\r\ngc_efficientnetv2_rw_t,224,8103.91,126.335,1024,13.68,1.94,9.97\r\nmobileone_s4,224,8096.24,126.456,1024,14.84,2.98,11.81\r\nresnet101,176,8068.5,126.89,1024,44.55,4.92,10.08\r\nresnet34d,288,8051.99,127.152,1024,21.82,6.47,7.51\r\nresnet50,224,8039.05,127.355,1024,25.56,4.11,11.11\r\nefficientnet_b0_g16_evos,224,8007.55,127.855,1024,8.11,1.01,7.42\r\nresnetv2_34d,288,7978.04,128.33,1024,21.82,6.46,7.51\r\nrepvit_m1_5,224,7968.85,128.477,1024,14.05,2.27,12.84\r\neca_botnext26ts_256,256,7968.12,128.489,1024,10.59,2.46,11.6\r\nnfnet_l0,224,7933.12,129.056,1024,35.07,4.36,10.47\r\neca_nfnet_l0,224,7922.81,129.224,1024,24.14,4.35,10.47\r\necaresnetlight,224,7920.63,129.26,1024,30.16,4.11,8.42\r\nresnet26,288,7895.16,129.676,1024,16.0,3.9,12.15\r\nrexnet_150,224,7887.58,129.8,1024,9.73,0.9,11.21\r\nfbnetv3_d,256,7880.83,129.912,1024,10.31,0.68,11.1\r\nresnet32ts,256,7878.57,129.95,1024,17.96,4.63,11.58\r\nfastvit_t12,256,7876.61,129.982,1024,7.51,1.39,9.57\r\ndpn68b,224,7846.82,130.475,1024,12.61,2.35,10.47\r\nseresnet34,288,7809.79,131.095,1024,21.96,6.07,6.18\r\nconvit_tiny,224,7799.43,131.267,1024,5.71,1.26,7.94\r\nefficientnet_b0_g8_gn,224,7777.6,131.636,1024,6.56,0.66,6.75\r\nresnet33ts,256,7770.98,131.75,1024,19.68,4.76,11.66\r\nxcit_tiny_24_p16_224,224,7747.34,132.15,1024,12.12,2.34,11.82\r\neca_halonext26ts,256,7744.0,132.207,1024,10.76,2.44,11.46\r\nregnetz_005,288,7743.88,132.21,1024,7.12,0.86,9.68\r\ndla60,224,7731.87,132.415,1024,22.04,4.26,10.16\r\nvit_small_resnet26d_224,224,7731.6,132.419,1024,63.61,5.07,11.12\r\nvit_small_r26_s32_224,224,7730.11,132.444,1024,36.43,3.56,9.85\r\nresnet50c,224,7665.6,133.561,1024,25.58,4.35,11.92\r\ntresnet_m,224,7657.17,133.707,1024,31.39,5.75,7.31\r\ngmlp_s16_224,224,7580.51,135.06,1024,19.42,4.42,15.1\r\nhgnetv2_b3,288,7569.95,135.248,1024,16.29,2.94,8.38\r\ncoatnext_nano_rw_224,224,7556.99,135.48,1024,14.7,2.47,12.8\r\nvit_small_patch16_rope_mixed_ape_224,224,7556.04,135.498,1024,22.06,4.61,12.85\r\nlevit_conv_512,224,7549.13,135.616,1024,95.08,5.62,10.22\r\nvit_small_patch16_rope_mixed_224,224,7547.22,135.656,1024,21.99,4.61,12.85\r\nefficientnet_b1,224,7537.09,135.838,1024,7.79,0.59,9.36\r\nresnext26ts,288,7505.69,136.406,1024,10.3,3.07,13.31\r\nresnet50t,224,7489.76,136.697,1024,25.57,4.32,11.82\r\nvit_tiny_patch16_384,384,7487.24,136.743,1024,5.79,4.7,25.39\r\nbat_resnext26ts,256,7483.3,136.814,1024,10.73,2.53,12.51\r\nmobilenetv4_hybrid_large_075,256,7472.67,137.009,1024,22.75,2.06,11.64\r\ncoat_lite_tiny,224,7464.91,137.151,1024,5.72,1.6,11.65\r\nmobilevit_xs,256,7449.59,103.07,768,2.32,1.05,16.33\r\nefficientnetv2_rw_t,224,7445.32,137.513,1024,13.65,1.93,9.94\r\nresnet50d,224,7440.05,137.61,1024,25.58,4.35,11.92\r\nxcit_nano_12_p16_384,384,7427.06,137.85,1024,3.05,1.64,12.15\r\nresnetv2_50,224,7387.29,138.592,1024,25.55,4.11,11.11\r\nwide_resnet50_2,176,7380.23,138.725,1024,68.88,7.29,8.97\r\nmobilenetv3_large_150d,256,7377.44,138.778,1024,14.62,1.03,12.35\r\necaresnet26t,256,7309.0,140.078,1024,16.01,3.35,10.53\r\nmambaout_kobe,288,7305.93,140.133,1024,9.14,2.5,16.53\r\nvovnet39a,224,7266.84,140.89,1024,22.6,7.09,6.73\r\nvit_base_patch32_plus_256,256,7259.77,141.03,1024,119.48,7.79,7.76\r\ntf_efficientnetv2_b2,260,7228.41,141.64,1024,10.1,1.72,9.84\r\nregnety_016,224,7211.18,141.979,1024,11.2,1.63,8.04\r\nfastvit_s12,256,7195.35,142.292,1024,9.43,1.8,10.82\r\ngcresnet33ts,256,7135.76,143.479,1024,19.88,4.76,11.68\r\nresnetaa50,224,7133.49,143.525,1024,25.56,5.15,11.64\r\nresnetaa34d,288,7093.2,144.341,1024,21.82,7.33,8.38\r\nefficientnet_em,240,7075.86,144.694,1024,6.9,3.04,14.34\r\nnf_regnet_b3,288,7066.34,144.889,1024,18.59,1.67,11.84\r\nlevit_conv_512d,224,7053.51,145.146,1024,92.39,5.84,11.3\r\nres2net50_48w_2s,224,7032.65,145.584,1024,25.29,4.18,11.72\r\nfastvit_sa12,256,7026.11,145.719,1024,11.55,1.94,11.24\r\nvgg11_bn,224,7005.9,146.14,1024,132.87,7.62,7.44\r\nvgg11,224,6998.89,146.287,1024,132.86,7.61,7.44\r\nresnet26d,288,6998.8,146.288,1024,16.01,4.29,13.48\r\ntf_efficientnet_em,240,6989.73,146.477,1024,6.9,3.04,14.34\r\nresnet152,160,6971.33,146.863,1024,60.19,5.9,11.51\r\ntf_efficientnetv2_b3,240,6957.08,147.165,1024,14.36,1.93,9.95\r\ncoat_lite_mini,224,6950.62,147.301,1024,11.01,2.0,12.25\r\nresnetv2_50t,224,6948.12,147.356,1024,25.57,4.32,11.82\r\nseresnet33ts,256,6937.0,147.592,1024,19.78,4.76,11.66\r\neca_resnet33ts,256,6936.33,147.606,1024,19.68,4.76,11.66\r\nese_vovnet19b_dw,288,6926.42,147.817,1024,6.54,2.22,13.63\r\ncrossvit_small_240,240,6905.81,148.257,1024,26.86,5.63,18.17\r\ngcresnext26ts,288,6897.2,148.443,1024,10.48,3.07,13.33\r\nresnetv2_50d,224,6883.54,148.737,1024,25.57,4.35,11.92\r\nlevit_512,224,6873.76,148.949,1024,95.08,5.62,10.22\r\nvit_pe_core_tiny_patch16_384,384,6853.8,149.381,1024,6.14,4.74,25.62\r\nselecsls84,224,6821.65,150.086,1024,50.95,5.9,7.57\r\nefficientvit_b2,224,6779.45,151.021,1024,24.33,1.6,14.62\r\nvit_relpos_base_patch32_plus_rpn_256,256,6771.02,151.209,1024,119.42,7.68,8.01\r\nresnetblur50,224,6755.89,151.548,1024,25.56,5.16,12.02\r\neca_vovnet39b,224,6740.73,151.889,1024,22.6,7.09,6.74\r\nese_vovnet39b,224,6738.72,151.934,1024,24.57,7.09,6.74\r\nnf_ecaresnet50,224,6729.92,152.133,1024,25.56,4.21,11.13\r\nconvnext_nano,288,6727.51,152.189,1024,15.59,4.06,13.84\r\nresnext50_32x4d,224,6722.22,152.307,1024,25.03,4.26,14.4\r\nvit_little_patch16_reg1_gap_256,256,6718.18,152.396,1024,22.52,6.27,18.06\r\nmixnet_m,224,6716.09,152.445,1024,5.01,0.36,8.19\r\nnf_seresnet50,224,6708.46,152.619,1024,28.09,4.21,11.13\r\nconvnext_tiny_hnf,224,6707.56,152.64,1024,28.59,4.47,13.44\r\nconvnext_tiny,224,6698.28,152.852,1024,28.59,4.47,13.44\r\nseresnext26ts,288,6694.55,152.936,1024,10.39,3.07,13.32\r\necaresnet101d_pruned,224,6686.56,153.119,1024,24.88,3.48,7.69\r\neca_resnext26ts,288,6683.28,153.195,1024,10.3,3.07,13.32\r\nresnetaa50d,224,6671.04,153.476,1024,25.58,5.39,12.44\r\nresnet50_clip_gap,224,6667.45,153.558,1024,23.53,5.39,12.44\r\nefficientnet_cc_b0_4e,224,6666.69,153.576,1024,13.31,0.41,9.42\r\ninception_v3,299,6662.83,153.664,1024,23.83,5.73,8.97\r\nvit_medium_patch16_clip_224,224,6662.68,153.658,1024,38.59,8.0,15.93\r\neva02_tiny_patch14_336,336,6659.19,153.749,1024,5.76,4.68,27.16\r\nvit_little_patch16_reg4_gap_256,256,6656.67,153.805,1024,22.52,6.35,18.33\r\ninception_next_tiny,224,6656.05,153.82,1024,28.06,4.19,11.98\r\ncs3darknet_focus_l,288,6635.28,154.299,1024,21.15,5.9,10.16\r\nedgenext_small,320,6619.28,154.677,1024,5.59,1.97,14.16\r\ndla60x,224,6607.41,154.955,1024,17.35,3.54,13.8\r\ncs3sedarknet_l,256,6574.76,155.723,1024,21.91,4.86,8.56\r\ncrossvit_15_240,240,6574.05,155.741,1024,27.53,5.81,19.77\r\nmambaout_tiny,224,6571.71,155.796,1024,26.55,4.49,16.68\r\nefficientnet_cc_b0_8e,224,6555.71,156.177,1024,24.01,0.42,9.42\r\nresnet50s,224,6534.49,156.684,1024,25.68,5.47,13.52\r\neva02_small_patch14_224,224,6522.54,156.972,1024,21.62,6.14,18.28\r\nlegacy_seresnet50,224,6510.39,157.264,1024,28.09,3.88,10.6\r\nmobilevitv2_100,256,6487.34,118.362,768,4.9,1.84,16.08\r\nefficientnet_b1,240,6468.22,158.289,1024,7.79,0.71,10.88\r\nlevit_512d,224,6461.14,158.464,1024,92.39,5.84,11.3\r\nresnest26d,224,6448.37,158.777,1024,17.07,3.64,9.97\r\nskresnet50,224,6436.38,159.072,1024,25.8,4.11,12.5\r\ncs3darknet_l,288,6406.87,159.805,1024,21.16,6.16,10.83\r\ndeit3_medium_patch16_224,224,6404.87,159.855,1024,38.85,8.0,15.93\r\nregnetz_b16,224,6387.02,160.301,1024,9.72,1.45,9.95\r\nxcit_small_12_p16_224,224,6373.98,160.629,1024,26.25,4.82,12.58\r\nefficientnet_b2_pruned,260,6359.48,160.995,1024,8.31,0.73,9.13\r\ntf_mixnet_m,224,6355.08,161.106,1024,5.01,0.36,8.19\r\nese_vovnet39b_evos,224,6351.98,161.186,1024,24.58,7.07,6.74\r\nresnetblur50d,224,6346.95,161.314,1024,25.58,5.4,12.82\r\nmobilenetv4_hybrid_medium,320,6319.34,162.019,1024,11.07,2.05,14.36\r\nvit_relpos_medium_patch16_224,224,6310.42,162.248,1024,38.75,7.97,17.02\r\nresnext50d_32x4d,224,6302.23,162.458,1024,25.05,4.5,15.2\r\ncspresnet50,256,6301.99,162.466,1024,21.62,4.54,11.5\r\nresnet50_gn,224,6301.74,162.472,1024,25.56,4.14,11.11\r\nconvformer_s18,224,6287.89,162.829,1024,26.77,3.96,15.82\r\nrepvgg_b1,224,6285.23,162.898,1024,51.83,11.82,5.32\r\nmobilenetv4_conv_medium,384,6280.22,163.028,1024,9.72,2.46,17.05\r\nvit_srelpos_medium_patch16_224,224,6271.82,163.247,1024,38.74,7.96,16.21\r\nseresnet50,224,6261.94,163.504,1024,28.09,4.11,11.13\r\ncaformer_s18,224,6254.87,163.688,1024,26.34,4.13,19.39\r\nconvnextv2_tiny,224,6253.52,163.724,1024,28.64,4.47,13.44\r\nrexnetr_200,224,6250.86,122.839,768,16.52,1.59,15.11\r\nvit_base_resnet26d_224,224,6243.58,163.986,1024,101.4,6.97,13.16\r\nhaloregnetz_b,224,6233.28,164.256,1024,11.68,1.97,11.94\r\ntiny_vit_21m_224,224,6228.45,164.383,1024,33.21,4.27,20.08\r\nresnet32ts,288,6211.5,164.832,1024,17.96,5.86,14.65\r\nresnet50_clip,224,6209.61,164.882,1024,38.32,6.14,12.98\r\ndensenet121,224,6209.44,164.886,1024,7.98,2.87,6.9\r\nconvnextv2_nano,288,6202.86,165.062,1024,15.62,4.06,13.84\r\nhgnetv2_b4,288,6194.78,165.277,1024,19.8,4.54,11.08\r\nvit_relpos_medium_patch16_cls_224,224,6191.79,165.357,1024,38.76,8.03,18.24\r\nefficientformer_l3,224,6140.37,166.742,1024,31.41,3.93,12.01\r\ngmixer_24_224,224,6137.69,166.813,1024,24.72,5.28,14.45\r\nresnet33ts,288,6133.58,166.927,1024,19.68,6.02,14.75\r\ngcvit_xxtiny,224,6128.43,167.066,1024,12.0,2.14,15.36\r\ncrossvit_15_dagger_240,240,6116.89,167.382,1024,28.21,6.13,20.43\r\nregnetx_032,224,6109.86,167.575,1024,15.3,3.2,11.37\r\nregnetz_b16_evos,224,6107.53,167.639,1024,9.74,1.43,9.95\r\nvit_dlittle_patch16_reg1_gap_256,256,6063.7,168.846,1024,22.52,6.27,22.69\r\ncoatnet_pico_rw_224,224,6063.28,84.42,512,10.85,2.05,14.62\r\ntf_efficientnet_cc_b0_4e,224,6051.89,169.181,1024,13.31,0.41,9.42\r\nvovnet57a,224,6044.24,169.393,1024,36.64,8.95,7.52\r\nskresnet50d,224,6043.05,169.427,1024,25.82,4.36,13.31\r\nsehalonet33ts,256,6020.42,170.064,1024,13.69,3.55,14.7\r\ncspresnet50d,256,5989.64,170.939,1024,21.64,4.86,12.55\r\ncspresnet50w,256,5960.94,171.759,1024,28.12,5.04,12.19\r\nres2net50_26w_4s,224,5946.22,172.186,1024,25.7,4.28,12.61\r\necaresnet50t,224,5930.42,172.646,1024,25.57,4.32,11.83\r\nseresnet50t,224,5920.92,172.924,1024,28.1,4.32,11.83\r\nrexnet_200,224,5917.85,129.753,768,16.37,1.56,14.91\r\nresmlp_24_224,224,5897.19,173.619,1024,30.02,5.96,10.91\r\necaresnet50d,224,5895.15,173.679,1024,25.58,4.35,11.93\r\ncsatv2_21m,512,5885.81,173.953,1024,20.7,2.94,15.85\r\nconvnext_nano_ols,288,5882.86,174.042,1024,15.65,4.38,15.5\r\nxcit_nano_12_p8_224,224,5868.76,174.459,1024,3.05,2.16,15.71\r\nresnetrs50,224,5864.29,174.594,1024,35.69,4.48,12.14\r\npoolformerv2_s24,224,5847.92,175.082,1024,21.34,3.42,10.68\r\ntf_efficientnet_cc_b0_8e,224,5831.12,175.586,1024,24.01,0.42,9.42\r\nmobilevit_s,256,5814.4,132.062,768,5.58,2.03,19.94\r\ntwins_pcpvt_small,224,5799.45,176.543,1024,24.11,3.83,18.08\r\nefficientnet_b1,256,5775.65,177.272,1024,7.79,0.77,12.22\r\nfbnetv3_g,240,5764.71,177.608,1024,16.62,1.28,14.87\r\nres2net50_14w_8s,224,5747.51,178.141,1024,25.06,4.21,13.28\r\nresnet26t,320,5739.2,178.398,1024,16.01,5.24,16.44\r\nnf_regnet_b3,320,5736.47,178.484,1024,18.59,2.05,14.61\r\nhgnetv2_b5,224,5731.95,178.624,1024,39.57,6.56,11.19\r\nfastvit_mci0,256,5727.04,178.778,1024,11.36,2.39,14.72\r\nregnetv_040,224,5726.73,178.786,1024,20.64,4.0,12.29\r\ngcresnext50ts,256,5726.32,178.8,1024,15.67,3.75,15.46\r\ndensenetblur121d,224,5726.27,178.801,1024,8.0,3.11,7.9\r\nedgenext_base,256,5713.2,179.21,1024,18.51,3.85,15.58\r\ntf_efficientnet_b1,240,5686.7,180.045,1024,7.79,0.71,10.88\r\nefficientvit_l1,224,5680.91,180.229,1024,52.65,5.27,15.85\r\nres2next50,224,5676.71,180.362,1024,24.67,4.2,13.71\r\ndla60_res2net,224,5657.25,180.983,1024,20.85,4.15,12.34\r\nresnet152,176,5655.31,181.044,1024,60.19,7.22,13.99\r\ngcresnet33ts,288,5627.94,181.927,1024,19.88,6.02,14.78\r\nswiftformer_l3,224,5622.46,182.104,1024,28.49,4.01,15.77\r\nnf_resnet50,256,5613.86,182.383,1024,25.56,5.46,14.52\r\nvit_base_r26_s32_224,224,5613.71,182.388,1024,101.38,6.81,12.36\r\nresnet51q,256,5613.46,182.396,1024,35.7,6.38,16.55\r\nres2net50d,224,5610.98,182.476,1024,25.72,4.52,13.41\r\nvit_medium_patch16_gap_240,240,5603.73,182.709,1024,44.4,9.22,18.81\r\ndla60_res2next,224,5588.57,183.207,1024,17.03,3.49,13.17\r\nresnetv2_50x1_bit,224,5571.07,183.784,1024,25.55,4.23,11.11\r\nvisformer_small,224,5571.02,183.785,1024,40.22,4.88,11.43\r\nhgnet_small,224,5552.72,184.391,1024,24.36,8.53,8.79\r\ngcresnet50t,256,5544.35,184.67,1024,25.9,5.42,14.67\r\nvit_small_patch16_dinov3,256,5529.64,185.161,1024,21.59,6.26,17.03\r\ntwins_svt_small,224,5521.52,185.428,1024,24.06,2.94,13.75\r\nsebotnet33ts_256,256,5508.09,185.882,1024,13.7,3.89,17.46\r\nvit_small_patch16_dinov3_qkvb,256,5499.96,186.16,1024,21.6,6.26,17.03\r\nregnety_040,224,5491.85,186.434,1024,20.65,4.0,12.29\r\nefficientnet_b2,256,5485.24,186.658,1024,9.11,0.89,12.81\r\nvit_relpos_medium_patch16_rpn_224,224,5468.26,187.239,1024,38.73,7.97,17.02\r\nseresnet33ts,288,5455.88,187.664,1024,19.78,6.02,14.76\r\neca_resnet33ts,288,5446.3,187.993,1024,19.68,6.02,14.76\r\ncoatnet_0_rw_224,224,5437.39,188.303,1024,27.44,4.43,18.73\r\nseresnext50_32x4d,224,5428.09,188.624,1024,27.56,4.26,14.42\r\nlegacy_seresnext50_32x4d,224,5424.79,188.739,1024,27.56,4.26,14.42\r\nseresnetaa50d,224,5391.14,189.918,1024,28.11,5.4,12.46\r\ndavit_tiny,224,5361.11,143.227,768,28.36,4.54,18.89\r\ncspresnext50,256,5336.02,191.88,1024,20.57,4.05,15.86\r\nresnetv2_50d_frn,224,5329.87,192.101,1024,25.59,4.33,11.92\r\nresnetv2_50d_evos,224,5298.35,193.245,1024,25.59,4.33,11.92\r\nfocalnet_tiny_srf,224,5292.9,193.443,1024,28.43,4.42,16.32\r\ncoatnet_nano_rw_224,224,5289.06,193.583,1024,15.14,2.41,15.41\r\nmobilenetv4_conv_large,320,5262.93,194.544,1024,32.59,4.47,18.97\r\nefficientvit_b2,256,5250.96,194.989,1024,24.33,2.09,19.03\r\nresnetv2_50d_gn,224,5237.32,195.494,1024,25.57,4.38,11.92\r\ncs3darknet_focus_x,256,5220.33,196.133,1024,35.02,8.03,10.69\r\necaresnet50d_pruned,288,5220.06,196.143,1024,19.94,4.19,10.61\r\nedgenext_small_rw,320,5193.11,197.162,1024,7.83,2.46,14.85\r\ncs3sedarknet_l,288,5188.14,197.349,1024,21.91,6.16,10.83\r\ndarknet53,256,5186.89,197.397,1024,41.61,9.31,12.39\r\nnextvit_small,224,5159.7,198.437,1024,31.74,5.8,18.44\r\nlambda_resnet26rpt_256,256,5155.81,99.282,512,10.99,3.16,11.87\r\ncoatnet_nano_cc_224,224,5154.1,198.653,1024,13.76,2.24,15.02\r\nmobilevitv2_125,256,5132.67,149.606,768,7.48,2.86,20.1\r\nresnetrs101,192,5131.84,199.514,1024,63.62,6.04,12.7\r\nefficientformerv2_l,224,5126.68,199.714,1024,26.32,2.59,18.54\r\nlambda_resnet26t,256,5121.44,199.921,1024,10.96,3.02,11.87\r\nresnet61q,256,5120.05,199.975,1024,36.85,7.8,17.01\r\npoolformer_s24,224,5119.67,199.99,1024,21.39,3.41,10.68\r\nresnet50_mlp,256,5114.23,200.203,1024,26.65,7.05,16.25\r\ndla102,224,5113.71,200.222,1024,33.27,7.19,14.18\r\nresnest50d_1s4x24d,224,5108.15,200.44,1024,25.68,4.43,13.57\r\nnfnet_f0,192,5097.6,200.856,1024,71.49,7.21,10.16\r\nresnet101,224,5049.4,202.773,1024,44.55,7.83,16.23\r\ndm_nfnet_f0,192,5048.84,202.795,1024,71.49,7.21,10.16\r\nhrnet_w18,224,5035.55,203.327,1024,21.3,4.32,16.31\r\nhrnet_w18_ssld,224,5035.2,203.344,1024,21.3,4.32,16.31\r\ncs3darknet_x,256,5026.66,203.69,1024,35.05,8.38,11.35\r\ndarknetaa53,256,5021.62,203.895,1024,36.02,7.97,12.39\r\nswin_tiny_patch4_window7_224,224,5020.34,203.947,1024,28.29,4.51,17.06\r\nseresnext26t_32x4d,288,5013.28,204.235,1024,16.81,4.46,16.68\r\nrdnet_tiny,224,4994.21,205.014,1024,23.86,5.06,15.98\r\ndensenet169,224,4993.55,205.039,1024,14.15,3.4,7.3\r\nefficientnet_lite3,300,4989.69,102.589,512,8.2,1.65,21.85\r\nxcit_tiny_12_p16_384,384,4989.58,205.204,1024,6.72,3.64,18.26\r\nmaxvit_pico_rw_256,256,4974.85,154.353,768,7.46,1.83,22.3\r\nseresnext26d_32x4d,288,4966.98,206.133,1024,16.81,4.51,16.85\r\nmixnet_l,224,4966.59,206.152,1024,7.33,0.58,10.84\r\nmaxvit_rmlp_pico_rw_256,256,4966.08,154.624,768,7.52,1.85,24.86\r\nefficientnet_b3_pruned,300,4957.8,206.518,1024,9.86,1.04,11.86\r\nvit_medium_patch16_gap_256,256,4957.71,206.519,1024,38.86,10.59,22.15\r\nhgnet_tiny,288,4948.49,206.907,1024,14.74,7.51,10.51\r\ntf_efficientnet_lite3,300,4936.06,103.703,512,8.2,1.65,21.85\r\nskresnext50_32x4d,224,4921.21,208.049,1024,27.48,4.5,17.18\r\nfocalnet_tiny_lrf,224,4905.94,208.702,1024,28.65,4.49,17.76\r\ngc_efficientnetv2_rw_t,288,4902.29,208.857,1024,13.68,3.2,16.45\r\nresnet101c,224,4895.84,209.131,1024,44.57,8.08,17.04\r\nregnetx_040,224,4884.64,209.614,1024,22.12,3.99,12.2\r\nresnet50,288,4865.17,210.453,1024,25.56,6.8,18.37\r\nregnety_032,224,4857.51,210.784,1024,19.44,3.2,11.26\r\nvit_base_resnet50d_224,224,4815.29,212.633,1024,110.97,8.73,16.92\r\nnfnet_l0,288,4811.55,212.797,1024,35.07,7.13,17.29\r\nresnet101d,224,4806.39,213.027,1024,44.57,8.08,17.04\r\nvgg13,224,4804.56,213.107,1024,133.05,11.31,12.25\r\neca_nfnet_l0,288,4804.04,213.131,1024,24.14,7.12,17.29\r\ncoatnet_bn_0_rw_224,224,4799.82,106.647,512,27.44,4.67,22.04\r\necaresnetlight,288,4778.68,214.262,1024,30.16,6.79,13.91\r\ndpn68b,288,4776.35,214.365,1024,12.61,3.89,17.3\r\nvgg13_bn,224,4751.08,215.507,1024,133.05,11.33,12.25\r\nvit_medium_patch16_reg1_gap_256,256,4749.27,215.586,1024,38.88,10.63,22.26\r\nmobilenetv3_large_150d,320,4720.3,162.679,768,14.62,1.61,19.29\r\nswinv2_cr_tiny_224,224,4717.07,217.061,1024,28.33,4.66,28.45\r\ntf_mixnet_l,224,4715.34,217.136,1024,7.33,0.58,10.84\r\nswinv2_cr_tiny_ns_224,224,4713.04,217.246,1024,28.33,4.66,28.45\r\ngcvit_xtiny,224,4688.25,218.393,1024,19.98,2.93,20.26\r\nwide_resnet50_2,224,4684.21,218.581,1024,68.88,11.43,14.4\r\nvit_small_plus_patch16_dinov3,256,4682.18,218.678,1024,28.68,8.11,21.84\r\nhalonet50ts,256,4676.58,218.94,1024,22.73,5.3,19.2\r\necaresnet26t,320,4673.09,219.104,1024,16.01,5.24,16.44\r\nrepvit_m2_3,224,4660.7,219.686,1024,22.93,4.52,21.32\r\nvit_small_plus_patch16_dinov3_qkvb,256,4659.93,219.721,1024,28.69,8.11,21.84\r\npvt_v2_b2,224,4659.88,219.724,1024,25.36,4.05,27.53\r\ncoatnet_rmlp_nano_rw_224,224,4649.72,110.09,512,15.15,2.62,20.34\r\nvit_medium_patch16_reg4_gap_256,256,4628.85,221.195,1024,38.88,10.76,22.6\r\nrepvgg_b2g4,224,4614.82,221.87,1024,55.78,11.33,6.45\r\nhiera_tiny_224,224,4595.09,222.823,1024,27.91,4.91,17.13\r\nnf_resnet101,224,4584.27,223.346,1024,44.55,8.01,16.23\r\nefficientnet_b1,288,4569.39,224.076,1024,7.79,0.97,15.46\r\necaresnet50t,256,4546.18,225.221,1024,25.57,5.64,15.45\r\nresnet50t,288,4538.06,225.625,1024,25.57,7.14,19.53\r\nregnety_040_sgn,224,4534.38,225.806,1024,20.65,4.03,12.29\r\ngcresnext50ts,288,4516.85,226.683,1024,15.67,4.75,19.57\r\nefficientnetv2_rw_t,288,4513.76,226.838,1024,13.65,3.19,16.42\r\nresnet50d,288,4512.86,226.884,1024,25.58,7.19,19.7\r\nresnetv2_34d,384,4511.6,226.948,1024,21.82,11.49,13.35\r\nfasternet_m,224,4506.12,227.223,1024,53.52,8.74,15.34\r\nresnetv2_101,224,4500.58,227.502,1024,44.54,7.83,16.23\r\nswin_s3_tiny_224,224,4489.43,228.064,1024,28.33,4.64,19.13\r\ncspdarknet53,256,4488.58,228.111,1024,27.64,6.57,16.81\r\nefficientvit_l2,224,4486.87,228.197,1024,63.71,6.97,19.58\r\nvitamin_small_224,224,4479.45,114.275,512,22.17,5.92,26.38\r\ntf_efficientnet_b2,260,4476.9,228.706,1024,9.11,1.02,13.83\r\nresnetaa101d,224,4473.46,228.882,1024,44.57,9.12,17.56\r\nresnetv2_50,288,4472.33,228.94,1024,25.55,6.79,18.37\r\nresnet101_clip_gap,224,4470.45,229.037,1024,42.52,9.11,17.56\r\ndla102x,224,4467.09,229.208,1024,26.31,5.89,19.42\r\nresnest50d,224,4454.32,229.866,1024,27.48,5.4,14.36\r\nresnet51q,288,4443.7,230.416,1024,35.7,8.07,20.94\r\nmvitv2_tiny,224,4420.66,231.613,1024,24.17,4.7,21.16\r\ncrossvit_18_240,240,4419.04,231.701,1024,43.27,9.05,26.26\r\nnf_resnet50,288,4409.09,232.224,1024,25.56,6.88,18.37\r\ntf_efficientnetv2_b3,300,4407.99,232.281,1024,14.36,3.04,15.74\r\nresnet101s,224,4403.21,232.534,1024,44.67,9.19,18.64\r\nfastvit_sa24,256,4379.31,233.802,1024,21.5,3.77,20.35\r\ngcresnet50t,288,4375.67,233.997,1024,25.9,6.86,18.57\r\nres2net50_26w_6s,224,4371.88,234.2,1024,37.05,6.33,15.28\r\nvit_medium_patch16_rope_reg1_gap_256,256,4362.72,234.689,1024,38.74,10.63,22.26\r\nnf_regnet_b4,320,4356.08,235.049,1024,30.21,3.29,19.88\r\nese_vovnet57b,256,4353.25,235.203,1024,38.61,11.69,9.82\r\nefficientnet_b2,288,4340.85,235.875,1024,9.11,1.12,16.2\r\nbotnet50ts_256,256,4335.36,236.173,1024,22.74,5.54,22.23\r\nresnetblur101d,224,4325.56,236.708,1024,44.57,9.12,17.94\r\nhalo2botnet50ts_256,256,4325.51,236.711,1024,22.64,5.02,21.78\r\nefficientnet_cc_b1_8e,240,4320.06,237.008,1024,39.72,0.75,15.44\r\nresnetaa50,288,4319.7,237.031,1024,25.56,8.52,19.24\r\nresnetv2_101d,224,4305.35,237.82,1024,44.56,8.07,17.04\r\nresnext101_32x4d,224,4287.55,238.807,1024,44.18,8.01,21.23\r\nresnet101_clip,224,4270.83,239.742,1024,56.26,9.81,18.08\r\nvit_base_patch32_clip_384,384,4270.75,239.747,1024,88.3,13.06,16.5\r\nmaxxvit_rmlp_nano_rw_256,256,4264.18,240.115,1024,16.78,4.37,26.05\r\nmobilevitv2_150,256,4261.39,120.125,512,10.59,4.09,24.11\r\nvit_base_patch32_384,384,4236.06,241.707,1024,88.3,13.06,16.5\r\nnf_ecaresnet101,224,4232.64,241.905,1024,44.55,8.01,16.27\r\ncait_xxs24_224,224,4219.87,242.638,1024,11.96,2.53,20.29\r\nmobilenetv4_hybrid_medium,384,4215.41,242.894,1024,11.07,3.01,21.18\r\nwide_resnet101_2,176,4212.37,243.067,1024,126.89,14.31,13.18\r\nnf_seresnet101,224,4212.16,243.082,1024,49.33,8.02,16.27\r\ncs3edgenet_x,256,4207.75,243.337,1024,47.82,11.53,12.92\r\nrexnetr_300,224,4179.04,122.493,512,34.81,3.39,22.16\r\ncs3sedarknet_x,256,4172.48,245.393,1024,35.4,8.38,11.35\r\ncrossvit_18_dagger_240,240,4163.88,245.901,1024,44.27,9.5,27.03\r\nefficientvit_b2,288,4151.05,246.661,1024,24.33,2.64,24.03\r\nresnext101_32x8d,176,4148.13,246.835,1024,88.79,10.33,19.37\r\ntwins_pcpvt_base,224,4120.31,248.499,1024,43.83,6.68,25.25\r\nhieradet_small,256,4111.26,186.781,768,34.73,8.51,27.76\r\nresnetblur50,288,4108.86,249.195,1024,25.56,8.52,19.87\r\ntresnet_v2_l,224,4104.06,249.482,1024,46.17,8.85,16.34\r\nese_vovnet39b,288,4098.75,249.809,1024,24.57,11.71,11.13\r\ndarknet53,288,4096.9,249.922,1024,41.61,11.78,15.68\r\nfbnetv3_g,288,4096.24,249.961,1024,16.62,1.77,21.09\r\nregnetx_080,224,4094.28,250.081,1024,39.57,8.02,14.06\r\npit_b_distilled_224,224,4086.77,250.541,1024,74.79,12.5,33.07\r\nconvnext_tiny_hnf,288,4075.03,251.263,1024,28.59,7.39,22.21\r\nconvnext_tiny,288,4066.22,251.807,1024,28.59,7.39,22.21\r\nresnet61q,288,4057.06,252.376,1024,36.85,9.87,21.52\r\nresnext50_32x4d,288,4053.01,252.628,1024,25.03,7.04,23.81\r\ncoatnet_rmlp_0_rw_224,224,4050.12,126.393,512,27.45,4.72,24.89\r\nresnetaa50d,288,4038.39,253.543,1024,25.58,8.92,20.57\r\ncs3sedarknet_xdw,256,4033.35,253.859,1024,21.6,5.97,17.18\r\nlegacy_seresnet101,224,4020.33,254.68,1024,49.33,7.61,15.74\r\npit_b_224,224,4011.35,255.252,1024,73.76,12.42,32.94\r\ntf_efficientnet_cc_b1_8e,240,3983.71,257.021,1024,39.72,0.75,15.44\r\nconvnext_small,224,3977.46,257.428,1024,50.22,8.71,21.56\r\nrepvgg_b2,224,3971.11,257.838,1024,80.32,18.38,6.45\r\necaresnet101d_pruned,288,3968.54,258.004,1024,24.88,5.75,12.71\r\ndarknetaa53,288,3962.74,258.383,1024,36.02,10.08,15.68\r\ncs3darknet_x,288,3958.96,258.629,1024,35.05,10.6,14.36\r\nresmlp_36_224,224,3954.08,258.95,1024,44.69,8.91,16.33\r\nnextvit_base,224,3953.76,258.969,1024,44.79,8.29,23.71\r\nvit_large_patch32_224,224,3952.17,259.068,1024,305.51,15.39,13.3\r\ninception_next_small,224,3943.43,259.648,1024,49.37,8.36,19.27\r\npoolformerv2_s36,224,3941.4,259.781,1024,30.79,5.01,15.82\r\nseresnet101,224,3922.86,261.01,1024,49.33,7.84,16.27\r\nxcit_tiny_12_p8_224,224,3896.41,262.781,1024,6.71,4.81,23.6\r\npvt_v2_b2_li,224,3867.43,264.752,1024,22.55,3.91,27.6\r\nresnetblur50d,288,3859.28,265.311,1024,25.58,8.92,21.19\r\nregnetz_c16,256,3859.04,265.325,1024,13.46,2.51,16.57\r\nvgg16,224,3852.37,265.786,1024,138.36,15.47,13.56\r\nregnetz_b16,288,3850.01,265.949,1024,9.72,2.39,16.43\r\ncoat_lite_small,224,3838.91,266.717,1024,19.84,3.96,22.09\r\nmambaout_small,224,3833.42,267.1,1024,48.49,8.96,27.72\r\nhrnet_w32,224,3830.01,267.338,1024,41.23,8.97,22.02\r\nresnext50d_32x4d,288,3816.72,268.269,1024,25.05,7.44,25.13\r\nvgg16_bn,224,3805.83,269.036,1024,138.37,15.5,13.56\r\nresnet50_gn,288,3794.44,269.845,1024,25.56,6.85,18.37\r\nconvnextv2_tiny,288,3791.73,270.039,1024,28.64,7.39,22.21\r\nrexnetr_200,288,3788.38,135.126,512,16.52,2.62,24.96\r\nconvnext_nano,384,3786.19,270.433,1024,15.59,7.22,24.61\r\necaresnet101d,224,3780.17,270.864,1024,44.57,8.08,17.07\r\nseresnet50,288,3779.2,270.932,1024,28.09,6.8,18.39\r\ncsatv2_21m,640,3772.21,271.433,1024,20.7,4.72,26.68\r\nconvnextv2_small,224,3765.82,271.895,1024,50.32,8.71,21.56\r\ndensenet121,288,3752.73,272.843,1024,7.98,4.74,11.41\r\nregnetz_c16_evos,256,3749.84,273.053,1024,13.49,2.48,16.57\r\nres2net101_26w_4s,224,3744.39,273.452,1024,45.21,8.1,18.45\r\nmaxxvitv2_nano_rw_256,256,3728.15,274.644,1024,23.7,6.26,23.05\r\nvolo_d1_224,224,3722.0,275.096,1024,26.63,6.94,24.43\r\nrepvgg_b3g4,224,3715.6,275.571,1024,75.63,16.06,7.55\r\nhiera_small_224,224,3710.74,275.932,1024,35.01,6.42,20.75\r\nregnetz_b16_evos,288,3707.48,276.174,1024,9.74,2.36,16.43\r\ndensenet201,224,3703.97,207.318,768,20.01,4.34,7.85\r\nresnet101d,256,3697.52,276.919,1024,44.57,10.55,22.25\r\nmambaout_small_rw,224,3696.15,277.022,1024,48.5,8.96,27.72\r\nrexnet_300,224,3684.25,138.94,512,34.71,3.44,22.4\r\nregnetx_064,224,3682.52,278.047,1024,26.21,6.49,16.37\r\nconvit_small,224,3664.31,279.43,1024,27.78,5.76,17.87\r\nmobilenetv4_conv_large,384,3658.46,279.876,1024,32.59,6.43,27.31\r\nedgenext_base,320,3646.44,280.796,1024,18.51,6.01,24.32\r\nswinv2_tiny_window8_256,256,3643.94,280.991,1024,28.35,5.96,24.57\r\nmixer_b16_224,224,3640.78,281.235,1024,59.88,12.62,14.53\r\ncoatnet_0_224,224,3625.21,105.902,384,25.04,4.58,24.01\r\nefficientnetv2_s,288,3625.09,282.452,1024,21.46,4.75,20.13\r\nmaxvit_nano_rw_256,256,3619.18,106.077,384,15.45,4.46,30.28\r\nefficientvit_b3,224,3618.58,282.96,1024,48.65,3.99,26.9\r\nmaxvit_rmlp_nano_rw_256,256,3607.94,106.408,384,15.5,4.47,31.92\r\nres2net101d,224,3599.31,284.473,1024,45.23,8.35,19.25\r\nseresnet50t,288,3588.68,285.319,1024,28.1,7.14,19.55\r\necaresnet50t,288,3582.41,285.817,1024,25.57,7.14,19.55\r\nmobilevitv2_175,256,3573.54,143.251,512,14.25,5.54,28.13\r\necaresnet50d,288,3569.08,286.885,1024,25.58,7.19,19.72\r\nres2net50_26w_8s,224,3567.27,287.031,1024,48.4,8.37,17.95\r\nresnet152,224,3542.09,289.071,1024,60.19,11.56,22.56\r\ninception_v4,299,3536.67,289.514,1024,42.68,12.28,15.09\r\nregnetv_040,288,3508.4,291.847,1024,20.64,6.6,20.3\r\neca_nfnet_l1,256,3501.44,292.427,1024,41.41,9.62,22.04\r\ntresnet_l,224,3494.09,293.042,1024,55.99,10.9,11.9\r\nvit_pe_spatial_tiny_patch16_512,512,3489.2,293.453,1024,5.68,10.46,61.64\r\ntnt_s_legacy_patch16_224,224,3488.04,293.55,1024,23.76,5.24,24.37\r\nefficientvit_l2,256,3482.1,220.533,768,63.71,9.09,25.49\r\nvit_base_patch16_clip_quickgelu_224,224,3480.22,294.211,1024,86.19,17.58,23.9\r\nvit_base_patch16_clip_224,224,3474.87,294.662,1024,86.57,17.58,23.9\r\nconvnextv2_nano,384,3471.6,294.937,1024,15.62,7.22,24.61\r\ndensenetblur121d,288,3467.19,295.313,1024,8.0,5.14,13.06\r\nresnet152c,224,3464.51,295.544,1024,60.21,11.8,23.36\r\nresnetv2_101x1_bit,224,3464.3,295.563,1024,44.54,8.04,16.23\r\nhgnetv2_b5,288,3456.86,296.198,1024,39.57,10.84,18.5\r\nvit_small_resnet50d_s16_224,224,3455.52,296.312,1024,57.53,13.48,24.82\r\nvit_base_patch16_xp_224,224,3438.54,297.775,1024,86.51,17.56,23.9\r\nvit_betwixt_patch16_gap_256,256,3434.64,298.116,1024,60.37,16.25,27.69\r\nefficientnetv2_rw_s,288,3433.53,298.211,1024,23.94,4.91,21.41\r\nmixnet_xl,224,3433.24,298.235,1024,11.9,0.93,14.57\r\nseresnext101_32x4d,224,3429.01,298.605,1024,48.96,8.02,21.26\r\nxcit_small_24_p16_224,224,3425.38,298.919,1024,47.67,9.1,23.64\r\ncoatnet_rmlp_1_rw_224,224,3425.17,298.938,1024,41.69,7.85,35.47\r\ndla169,224,3420.41,299.355,1024,53.39,11.6,20.2\r\nlegacy_seresnext101_32x4d,224,3419.1,299.47,1024,48.96,8.02,21.26\r\nresnet152d,224,3419.07,299.473,1024,60.21,11.8,23.36\r\nfastvit_mci1,256,3417.09,299.646,1024,21.46,4.67,27.3\r\nlamhalobotnet50ts_256,256,3416.44,299.702,1024,22.57,5.02,18.44\r\ntnt_s_patch16_224,224,3412.6,300.041,1024,23.77,5.25,24.37\r\nresnest50d_4s2x40d,224,3408.36,300.413,1024,30.42,4.4,17.94\r\nrdnet_small,224,3401.32,301.035,1024,50.44,8.74,22.55\r\nmixer_l32_224,224,3400.86,301.074,1024,206.94,11.27,19.86\r\nvit_small_patch16_18x2_224,224,3397.9,301.335,1024,64.67,13.71,35.69\r\npoolformer_s36,224,3390.86,301.965,1024,30.86,5.0,15.82\r\nregnety_040,288,3386.42,302.36,1024,20.65,6.61,20.3\r\nvit_base_patch16_siglip_gap_224,224,3378.72,303.048,1024,85.8,17.49,23.75\r\nvit_base_patch16_224_miil,224,3377.29,303.177,1024,94.4,17.59,23.91\r\nvit_base_patch16_224,224,3375.59,303.329,1024,86.57,17.58,23.9\r\nhgnet_small,288,3373.99,303.474,1024,24.36,14.09,14.53\r\nmambaout_tiny,288,3371.82,303.67,1024,26.55,7.41,27.58\r\ndeit_base_patch16_224,224,3365.59,304.233,1024,86.57,17.58,23.9\r\nvit_betwixt_patch16_reg1_gap_256,256,3365.35,304.255,1024,60.4,16.32,27.83\r\nregnetv_064,224,3359.07,304.821,1024,30.58,6.39,16.41\r\nsequencer2d_s,224,3356.29,305.073,1024,27.65,4.96,11.31\r\nefficientnet_x_b3,288,3355.1,305.182,1024,13.3,3.91,15.6\r\nvit_small_patch16_36x1_224,224,3352.56,305.412,1024,64.67,13.71,35.69\r\ncs3edgenet_x,288,3348.54,305.78,1024,47.82,14.59,16.36\r\ncs3se_edgenet_x,256,3347.57,305.87,1024,50.72,11.53,12.94\r\nresnetv2_34d,448,3323.81,308.056,1024,21.82,15.64,18.16\r\ndeit3_base_patch16_224,224,3319.91,308.418,1024,86.59,17.58,23.9\r\nvit_relpos_base_patch16_224,224,3315.22,308.848,1024,86.43,17.51,24.97\r\nvit_relpos_base_patch16_clsgap_224,224,3311.61,309.189,1024,86.43,17.6,25.12\r\npvt_v2_b3,224,3309.64,309.374,1024,45.24,6.92,37.7\r\nvit_relpos_base_patch16_cls_224,224,3305.52,309.756,1024,86.43,17.6,25.12\r\ncs3sedarknet_x,288,3301.67,310.122,1024,35.4,10.6,14.37\r\nconvformer_s36,224,3296.75,310.586,1024,40.01,7.67,30.5\r\nbeitv2_base_patch16_224,224,3296.38,310.621,1024,86.53,17.58,23.9\r\nbeit_base_patch16_224,224,3294.42,310.803,1024,86.53,17.58,23.9\r\nseresnext50_32x4d,288,3284.25,311.766,1024,27.56,7.04,23.82\r\ncaformer_s36,224,3278.5,312.314,1024,39.3,8.0,37.53\r\nvit_base_patch16_siglip_224,224,3278.21,312.339,1024,92.88,17.73,24.06\r\ndeit_base_distilled_patch16_224,224,3275.61,312.59,1024,87.34,17.68,24.05\r\nefficientnet_b3,288,3271.54,156.477,512,12.23,1.63,21.49\r\nese_vovnet99b,224,3267.11,313.403,1024,63.2,16.51,11.27\r\nmvitv2_small,224,3265.77,313.53,1024,34.87,7.0,28.08\r\nvit_base_patch32_clip_448,448,3263.96,313.706,1024,88.34,17.93,23.9\r\nvit_betwixt_patch16_reg4_gap_256,256,3258.19,314.263,1024,60.4,16.52,28.24\r\nseresnetaa50d,288,3256.85,314.389,1024,28.11,8.92,20.59\r\ndavit_small,224,3251.97,236.14,768,49.75,8.8,30.49\r\nvit_base_mci_224,224,3249.88,315.063,1024,86.35,17.73,24.65\r\nhrnet_w30,224,3247.48,315.296,1024,37.71,8.15,21.21\r\nregnety_064,224,3243.49,315.685,1024,30.58,6.39,16.41\r\nbeit3_base_patch16_224,224,3240.44,315.984,1024,86.66,17.63,23.9\r\nvit_base_patch16_gap_224,224,3228.89,317.112,1024,86.57,17.49,25.59\r\nvgg19_bn,224,3216.24,318.359,1024,143.68,19.66,14.86\r\nnextvit_large,224,3215.77,318.406,1024,57.83,10.77,28.99\r\nvgg19,224,3214.49,318.532,1024,143.67,19.63,14.86\r\nresnet152s,224,3211.29,318.851,1024,60.32,12.92,24.96\r\nfocalnet_small_srf,224,3208.8,319.097,1024,49.89,8.62,26.26\r\nvit_pe_core_base_patch16_224,224,3207.28,319.244,1024,93.67,17.82,24.21\r\nresnetv2_50d_evos,288,3196.17,320.36,1024,25.59,7.15,19.7\r\nmaxvit_tiny_rw_224,224,3193.37,160.308,512,29.06,5.11,33.11\r\nlegacy_xception,299,3182.8,241.274,768,22.86,8.4,35.83\r\nfastvit_sa36,256,3178.52,322.138,1024,31.46,5.59,29.46\r\nefficientnet_el_pruned,300,3172.12,322.787,1024,10.59,8.0,30.7\r\nefficientnet_el,300,3171.64,322.838,1024,10.59,8.0,30.7\r\ngcvit_tiny,224,3171.46,322.854,1024,28.22,4.79,29.82\r\nresnetv2_50d_gn,288,3155.22,324.519,1024,25.57,7.24,19.7\r\ncoatnet_1_rw_224,224,3154.4,162.289,512,41.72,8.04,34.6\r\nvit_base_patch16_rope_224,224,3153.89,324.652,1024,86.43,17.58,23.9\r\nvit_small_patch16_384,384,3151.39,324.911,1024,22.2,15.52,50.78\r\ntf_efficientnet_el,300,3148.51,325.21,1024,10.59,8.0,30.7\r\nlambda_resnet50ts,256,3148.33,325.226,1024,21.54,5.07,17.48\r\nmobilevitv2_200,256,3125.79,122.826,384,18.45,7.22,32.15\r\nswin_small_patch4_window7_224,224,3122.47,327.922,1024,49.61,8.77,27.47\r\ndeit3_small_patch16_384,384,3109.12,329.33,1024,22.21,15.52,50.78\r\nfasternet_l,224,3105.49,329.716,1024,93.47,15.52,20.46\r\neva02_base_patch16_clip_224,224,3101.28,330.163,1024,86.26,17.62,26.32\r\nresnetv2_152,224,3098.89,330.417,1024,60.19,11.55,22.56\r\nregnety_080,224,3092.57,331.092,1024,39.18,8.0,17.97\r\nvit_base_patch16_rope_ape_224,224,3092.2,331.13,1024,86.59,17.58,23.9\r\ntf_efficientnetv2_s,300,3082.09,332.218,1024,21.46,5.35,22.73\r\nresnet101,288,3060.83,334.521,1024,44.55,12.95,26.83\r\ndla102x2,224,3047.15,336.027,1024,41.28,9.34,29.91\r\nnfnet_f0,256,3038.98,336.931,1024,71.49,12.62,18.05\r\nmobilenetv4_conv_aa_large,384,3031.92,337.716,1024,32.59,7.07,32.29\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,3031.26,337.79,1024,60.23,16.52,28.24\r\nnf_regnet_b4,384,3029.18,338.02,1024,30.21,4.7,28.61\r\ndm_nfnet_f0,256,3017.16,339.366,1024,71.49,12.62,18.05\r\nxception41p,299,3016.49,169.71,512,26.91,9.25,39.86\r\nhrnet_w18_ssld,288,3010.23,340.148,1024,21.3,7.14,26.96\r\ndpn92,224,3006.05,340.622,1024,37.67,6.54,18.21\r\nresnetv2_152d,224,3002.93,340.975,1024,60.2,11.8,23.36\r\nmvitv2_small_cls,224,2995.41,341.831,1024,34.87,7.04,28.17\r\nvit_base_patch16_rpn_224,224,2984.69,343.058,1024,86.54,17.49,23.75\r\ndensenet161,224,2977.79,257.884,768,28.68,7.79,11.06\r\nfocalnet_small_lrf,224,2977.53,343.883,1024,50.34,8.74,28.61\r\nmobilenetv4_hybrid_medium,448,2963.24,259.151,768,11.07,4.2,29.64\r\ntwins_pcpvt_large,224,2944.63,347.723,1024,60.99,9.84,35.82\r\nrepvgg_b3,224,2931.77,349.254,1024,110.96,26.21,7.55\r\nvit_pe_core_small_patch16_384,384,2930.85,349.36,1024,23.78,15.69,51.23\r\nmaxxvit_rmlp_tiny_rw_256,256,2929.18,349.561,1024,29.64,6.66,39.76\r\nvit_relpos_base_patch16_rpn_224,224,2924.44,350.123,1024,86.41,17.51,24.97\r\ngmlp_b16_224,224,2922.18,350.4,1024,73.08,15.78,30.21\r\nregnety_032,288,2913.45,351.449,1024,19.44,5.29,18.61\r\necaresnet50t,320,2906.64,352.272,1024,25.57,8.82,24.13\r\nswinv2_cr_small_224,224,2903.6,352.64,1024,49.7,9.07,50.27\r\nregnetz_040,256,2902.52,352.773,1024,27.12,4.06,24.19\r\nswinv2_cr_small_ns_224,224,2889.5,354.362,1024,49.7,9.08,50.27\r\nregnetz_040_h,256,2873.06,356.386,1024,28.94,4.12,24.29\r\nflexivit_base,240,2870.99,356.649,1024,86.59,20.29,28.36\r\nefficientformer_l7,224,2857.68,358.309,1024,82.23,10.17,24.45\r\nregnety_080_tv,224,2855.26,358.612,1024,39.38,8.51,19.73\r\nregnetz_d8,256,2850.18,359.251,1024,23.37,3.97,23.74\r\nwide_resnet50_2,288,2830.2,361.786,1024,68.88,18.89,23.81\r\ncait_xxs36_224,224,2827.44,362.139,1024,17.3,3.77,30.34\r\nvit_mediumd_patch16_reg4_gap_256,256,2826.35,362.272,1024,64.11,17.87,37.57\r\nefficientvit_b3,256,2799.94,274.267,768,48.65,5.2,35.01\r\nese_vovnet57b,320,2797.05,366.075,1024,38.61,18.26,15.34\r\ndpn98,224,2796.75,366.114,1024,61.57,11.73,25.2\r\neva02_small_patch14_336,336,2787.29,367.358,1024,22.13,15.48,54.33\r\ninception_resnet_v2,299,2777.63,368.634,1024,55.84,13.18,25.06\r\nvit_base_patch16_rope_mixed_ape_224,224,2777.09,368.705,1024,86.59,17.58,25.7\r\nlegacy_seresnet152,224,2776.33,368.807,1024,66.82,11.33,22.08\r\nregnety_040_sgn,288,2770.45,369.591,1024,20.65,6.67,20.3\r\nvit_base_patch16_rope_mixed_224,224,2765.4,370.264,1024,86.44,17.58,25.7\r\nefficientvit_l2,288,2763.98,277.837,768,63.71,11.51,32.19\r\nregnetz_d8_evos,256,2763.5,370.52,1024,23.46,4.5,24.92\r\nregnetx_120,224,2746.26,372.848,1024,46.11,12.13,21.37\r\npoolformerv2_m36,224,2734.87,374.4,1024,56.08,8.81,22.02\r\nseresnet152,224,2730.86,374.949,1024,66.82,11.57,22.61\r\nresnetv2_101,288,2726.63,375.53,1024,44.54,12.94,26.83\r\nlevit_conv_384_s8,224,2719.98,376.449,1024,39.06,9.95,35.86\r\nresnetaa101d,288,2709.1,377.961,1024,44.57,15.07,29.03\r\nxcit_tiny_24_p16_384,384,2706.27,378.354,1024,12.12,6.87,34.29\r\nhiera_small_abswin_256,256,2692.43,380.302,1024,34.36,8.29,26.38\r\nconvnext_base,224,2688.67,380.834,1024,88.59,15.38,28.75\r\ncoatnet_rmlp_1_rw2_224,224,2664.51,192.128,512,41.72,8.11,40.13\r\nmobilenetv4_conv_large,448,2664.36,288.226,768,32.59,8.75,37.17\r\nresnext101_64x4d,224,2658.09,385.214,1024,83.46,15.52,31.21\r\nwide_resnet101_2,224,2656.57,385.433,1024,126.89,22.8,21.23\r\nefficientnet_b3,320,2648.04,193.326,512,12.23,2.01,26.52\r\nlevit_384_s8,224,2647.24,386.794,1024,39.06,9.95,35.86\r\nsequencer2d_m,224,2646.13,386.952,1024,38.31,6.55,14.26\r\nswinv2_tiny_window16_256,256,2645.93,386.985,1024,28.35,6.68,39.02\r\nnest_tiny,224,2634.21,388.706,1024,17.06,5.83,25.48\r\nresnetblur101d,288,2632.62,388.942,1024,44.57,15.07,29.65\r\nresnet152d,256,2626.88,389.792,1024,60.21,15.41,30.51\r\nnest_tiny_jx,224,2616.76,391.299,1024,17.06,5.83,25.48\r\ninception_next_base,224,2614.86,391.583,1024,86.67,14.85,25.69\r\nfastvit_ma36,256,2614.26,391.673,1024,43.98,7.82,34.98\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,2607.63,392.66,1024,63.95,17.65,37.02\r\nvit_base_patch16_siglip_gap_256,256,2598.41,394.061,1024,85.84,23.13,33.23\r\nhrnet_w40,224,2594.47,394.658,1024,57.56,12.75,25.29\r\nresnext101_32x8d,224,2592.44,394.97,1024,88.79,16.48,31.21\r\nresnext101_32x4d,288,2591.49,395.11,1024,44.18,13.24,35.09\r\ntf_efficientnet_b3,300,2584.41,198.087,512,12.23,1.87,23.83\r\nregnety_120,224,2580.45,396.806,1024,51.82,12.14,21.38\r\nvit_base_patch16_siglip_256,256,2573.79,397.832,1024,92.93,23.44,33.63\r\nhgnetv2_b6,224,2562.29,399.617,1024,75.26,16.88,21.23\r\ntwins_svt_base,224,2557.49,400.364,1024,56.07,8.59,26.33\r\nefficientnet_b3_gn,288,2553.91,200.453,512,11.73,1.74,23.35\r\nconvnextv2_base,224,2539.98,403.129,1024,88.72,15.38,28.75\r\nrexnetr_300,288,2538.48,100.824,256,34.81,5.59,36.61\r\nmaxvit_tiny_tf_224,224,2531.5,202.225,512,30.92,5.6,35.78\r\nresnet200,224,2531.24,404.52,1024,64.67,15.07,32.19\r\ncrossvit_base_240,240,2527.77,405.075,1024,105.03,21.22,36.33\r\nvit_large_r50_s32_224,224,2506.42,408.519,1024,328.99,19.58,24.41\r\nvit_small_r26_s32_384,384,2503.26,409.041,1024,36.47,10.43,29.85\r\nregnetz_d32,256,2500.8,409.446,1024,27.58,5.98,23.74\r\nhiera_base_224,224,2499.51,409.653,1024,51.52,9.4,30.42\r\nfastvit_mci2,256,2497.53,409.98,1024,35.7,7.85,36.09\r\ncoat_tiny,224,2479.21,413.006,1024,5.5,4.35,27.2\r\nregnetz_c16,320,2457.45,416.667,1024,13.46,3.92,25.88\r\nmaxvit_tiny_rw_256,256,2449.28,156.755,384,29.07,6.74,44.35\r\nmaxvit_rmlp_tiny_rw_256,256,2444.21,157.082,384,29.15,6.77,46.92\r\nmambaout_base_tall_rw,224,2417.45,423.554,1024,86.48,16.15,38.74\r\nconvnext_small,288,2415.72,423.866,1024,50.22,14.39,35.65\r\nxception41,299,2411.59,212.283,512,26.97,9.28,39.86\r\nvit_base_patch16_reg4_gap_256,256,2407.43,425.325,1024,86.62,23.5,33.89\r\nmobilenetv4_hybrid_large,384,2404.63,425.821,1024,37.76,7.77,34.52\r\nefficientnet_lite4,380,2381.8,161.199,384,13.01,4.04,45.66\r\nseresnet101,288,2369.89,432.063,1024,49.33,12.95,26.87\r\nresnet101d,320,2369.55,432.126,1024,44.57,16.48,34.77\r\ncoatnet_1_224,224,2368.06,162.134,384,42.23,8.7,39.0\r\nregnetz_c16_evos,320,2367.08,432.576,1024,13.49,3.86,25.88\r\ntf_efficientnet_lite4,380,2363.83,162.424,384,13.01,4.04,45.66\r\ncaformer_m36,224,2357.27,434.375,1024,56.2,13.29,50.48\r\nconvformer_m36,224,2356.54,434.511,1024,57.05,12.89,42.05\r\nvit_base_patch16_plus_clip_240,240,2342.14,437.18,1024,117.21,27.41,33.08\r\nregnetx_160,224,2337.97,437.963,1024,54.28,15.99,25.52\r\nswinv2_base_window12_192,192,2333.36,438.828,1024,109.28,11.9,39.72\r\npvt_v2_b5,224,2331.22,439.228,1024,81.96,11.76,50.92\r\ntresnet_xl,224,2325.67,440.277,1024,78.44,15.2,15.34\r\npvt_v2_b4,224,2317.83,441.767,1024,62.56,10.14,53.74\r\nseresnext101_64x4d,224,2313.64,442.562,1024,88.23,15.53,31.25\r\nswin_s3_small_224,224,2303.65,444.487,1024,49.74,9.43,37.84\r\nxcit_medium_24_p16_224,224,2299.94,445.196,1024,84.4,16.13,31.71\r\necaresnet101d,288,2284.9,448.135,1024,44.57,13.35,28.19\r\nrdnet_base,224,2282.28,448.648,1024,87.45,15.4,31.14\r\nconvnext_tiny,384,2276.12,449.862,1024,28.59,13.14,39.48\r\nseresnext101_32x8d,224,2275.33,450.02,1024,93.57,16.48,31.25\r\nvit_base_patch16_rope_reg1_gap_256,256,2273.98,450.282,1024,86.43,23.22,33.39\r\nresnetrs101,288,2269.83,451.111,1024,63.62,13.56,28.53\r\npoolformer_m36,224,2268.47,451.381,1024,56.17,8.8,22.02\r\ndavit_base,224,2261.04,339.641,768,87.95,15.51,40.66\r\neva02_base_patch14_224,224,2252.73,454.536,1024,85.76,23.22,36.55\r\nmvitv2_base,224,2248.57,455.377,1024,51.47,10.16,40.5\r\neca_nfnet_l1,320,2246.57,455.781,1024,41.41,14.92,34.42\r\nhrnet_w44,224,2244.53,456.194,1024,67.06,14.94,26.92\r\nvit_relpos_base_patch16_plus_240,240,2243.17,456.469,1024,117.38,27.3,34.33\r\nhgnet_base,224,2237.83,457.559,1024,71.58,25.14,15.47\r\ncait_s24_224,224,2233.18,458.513,1024,46.92,9.35,40.58\r\nseresnext101d_32x8d,224,2232.75,458.602,1024,93.59,16.72,32.05\r\nmixnet_xxl,224,2230.06,459.154,1024,23.96,2.04,23.43\r\nmobilenetv4_conv_aa_large,448,2228.09,344.665,768,32.59,9.63,43.94\r\nhrnet_w48,224,2223.25,460.56,1024,77.47,17.34,28.56\r\nefficientnet_b3_g8_gn,288,2219.17,461.407,1024,14.25,2.59,23.35\r\nefficientvit_b3,288,2216.86,346.41,768,48.65,6.58,44.2\r\nmambaout_base_short_rw,224,2216.69,461.913,1024,88.83,16.31,38.08\r\nxception65p,299,2215.76,231.045,512,39.82,13.91,52.48\r\nswinv2_small_window8_256,256,2215.48,462.179,1024,49.73,11.58,40.14\r\nvit_base_patch16_plus_240,240,2212.99,462.695,1024,117.56,27.41,33.08\r\nxcit_small_12_p16_384,384,2212.44,462.811,1024,26.25,14.14,36.51\r\nhrnet_w48_ssld,224,2212.0,462.902,1024,77.47,17.34,28.56\r\nswinv2_cr_small_ns_256,256,2211.39,463.031,1024,49.7,12.07,76.21\r\nswin_base_patch4_window7_224,224,2205.57,464.256,1024,87.77,15.47,36.63\r\nresnest101e,256,2197.47,465.966,1024,48.28,13.38,28.66\r\nvit_base_r50_s16_224,224,2195.08,466.475,1024,97.89,21.66,35.28\r\nvit_small_patch8_224,224,2192.42,467.034,1024,21.67,22.44,80.84\r\nfocalnet_base_srf,224,2184.57,468.715,1024,88.15,15.28,35.01\r\nconvformer_s18,384,2163.1,473.371,1024,26.77,11.63,46.49\r\ncs3se_edgenet_x,320,2154.5,475.26,1024,50.72,18.01,20.21\r\nregnety_160,224,2151.79,475.857,1024,83.59,15.96,23.04\r\nresnet152,288,2145.86,477.149,1024,60.19,19.11,37.28\r\ncaformer_s18,384,2133.31,479.981,1024,26.34,13.42,77.34\r\nnfnet_f1,224,2128.64,481.033,1024,132.63,17.87,22.94\r\ndm_nfnet_f1,224,2124.5,481.97,1024,132.63,17.87,22.94\r\nconvnextv2_tiny,384,2123.58,482.181,1024,28.64,13.14,39.48\r\nseresnextaa101d_32x8d,224,2114.29,484.3,1024,93.59,17.25,34.16\r\nvolo_d2_224,224,2103.78,486.714,1024,58.68,14.34,41.34\r\nmambaout_small,288,2099.86,487.625,1024,48.49,14.81,45.82\r\nseresnext101_32x4d,288,2078.47,492.646,1024,48.96,13.25,35.12\r\nvit_base_patch16_dinov3_qkvb,256,2074.91,493.488,1024,85.66,23.6,34.06\r\nresnet50x4_clip_gap,288,2072.62,494.029,1024,65.62,19.57,34.11\r\npoolformerv2_m48,224,2068.98,494.905,1024,73.35,11.59,29.17\r\nefficientnet_b3_gn,320,2064.75,185.955,384,11.73,2.14,28.83\r\nconvnext_base,256,2059.57,497.167,1024,88.59,20.09,37.55\r\nregnetv_064,288,2059.07,497.286,1024,30.58,10.55,27.11\r\nsamvit_base_patch16_224,224,2056.72,497.85,1024,86.46,17.54,24.54\r\nswinv2_cr_base_224,224,2055.48,498.155,1024,87.88,15.86,59.66\r\nxcit_tiny_24_p8_224,224,2055.29,498.197,1024,12.11,9.21,45.39\r\nfocalnet_base_lrf,224,2054.88,498.301,1024,88.75,15.43,38.13\r\nswinv2_cr_base_ns_224,224,2054.61,498.364,1024,87.88,15.86,59.66\r\ngcvit_small,224,2051.05,499.23,1024,51.09,8.57,41.61\r\ndpn131,224,2041.84,501.483,1024,79.25,16.09,32.97\r\nseresnet152d,256,2037.31,502.598,1024,66.84,15.42,30.56\r\nmambaout_small_rw,288,2036.16,502.884,1024,48.5,14.81,45.82\r\nmvitv2_base_cls,224,2032.84,503.703,1024,65.44,10.23,40.65\r\nresnetrs152,256,2028.81,504.704,1024,86.62,15.59,30.83\r\nvit_base_patch16_dinov3,256,2027.27,505.084,1024,85.64,23.6,34.06\r\nnf_regnet_b5,384,2024.49,505.781,1024,49.74,7.95,42.9\r\nefficientnetv2_s,384,2022.74,506.219,1024,21.46,8.44,35.77\r\ncoat_lite_medium,224,2022.26,506.339,1024,44.57,9.81,40.06\r\nvit_medium_patch16_gap_384,384,2022.17,506.357,1024,39.03,26.08,67.54\r\nmaxvit_rmlp_small_rw_224,224,2019.35,253.517,512,64.9,10.75,49.3\r\ntiny_vit_21m_384,384,1998.76,256.136,512,21.22,13.72,77.83\r\nefficientnet_b4,320,1992.79,256.901,512,19.34,3.13,34.76\r\nregnety_064,288,1985.36,515.752,1024,30.58,10.56,27.11\r\nmambaout_base_wide_rw,224,1984.5,515.96,1024,94.45,17.78,42.6\r\nxcit_nano_12_p8_384,384,1977.45,517.809,1024,3.05,6.34,46.08\r\nresnet50x4_clip,288,1971.19,519.458,1024,87.14,21.35,35.27\r\nmaxvit_tiny_pm_256,256,1965.06,195.385,384,30.09,6.61,47.9\r\ntf_efficientnetv2_s,384,1945.46,526.329,1024,21.46,8.44,35.77\r\ncrossvit_15_dagger_408,408,1944.56,526.569,1024,28.5,21.45,95.05\r\nmambaout_base,224,1941.81,527.308,1024,84.81,15.83,36.95\r\nmobilenetv4_conv_aa_large,480,1939.55,395.944,768,32.59,11.05,50.45\r\ntresnet_m,448,1938.92,528.104,1024,31.39,22.99,29.21\r\ncoat_mini,224,1928.84,530.863,1024,10.34,6.82,33.68\r\nconvit_base,224,1921.89,532.784,1024,86.54,17.52,31.77\r\nefficientnetv2_rw_s,384,1912.31,535.453,1024,23.94,8.72,38.03\r\nfastvit_mci3,256,1910.92,535.842,1024,125.07,14.82,44.88\r\ntnt_b_patch16_224,224,1909.16,536.336,1024,65.43,14.1,39.01\r\nresnet200d,256,1896.88,539.807,1024,64.69,20.0,43.09\r\nmobilevitv2_150,384,1885.28,135.765,256,10.59,9.2,54.25\r\nhiera_base_plus_224,224,1876.24,545.748,1024,69.9,12.67,37.98\r\nregnetz_040,320,1857.99,551.11,1024,27.12,6.35,37.78\r\nmambaout_base_plus_rw,224,1849.93,553.499,1024,101.66,19.19,45.16\r\nregnetz_040_h,320,1844.54,555.126,1024,28.94,6.43,37.94\r\nregnety_080,288,1844.25,555.209,1024,39.18,13.22,29.69\r\nregnetz_d8,320,1829.25,559.766,1024,23.37,6.19,37.08\r\nvitamin_base_224,224,1820.06,140.631,256,87.72,22.68,52.77\r\nmaxxvit_rmlp_small_rw_256,256,1817.66,563.335,1024,66.01,14.67,58.38\r\nefficientnet_b3_g8_gn,320,1810.03,565.712,1024,14.25,3.2,28.83\r\nhiera_base_abswin_256,256,1805.96,566.985,1024,51.27,12.46,40.7\r\nmobilenetv5_base,256,1803.95,283.796,512,82.65,20.05,36.89\r\ndpn107,224,1798.14,569.453,1024,86.92,18.38,33.46\r\nswin_s3_base_224,224,1793.41,570.954,1024,71.13,13.69,48.26\r\nnextvit_small,384,1768.62,578.958,1024,31.74,17.25,57.14\r\nregnetz_d8_evos,320,1767.61,579.286,1024,23.46,7.03,38.92\r\nxception65,299,1766.63,289.791,512,39.92,13.96,52.48\r\ncoatnet_2_rw_224,224,1751.4,146.144,256,73.87,15.09,49.22\r\nhrnet_w64,224,1744.75,586.876,1024,128.06,28.97,35.09\r\nhalonet_h1,256,1735.25,442.563,768,8.1,3.0,51.17\r\nefficientnetv2_m,320,1723.44,594.136,1024,54.14,11.01,39.97\r\nsequencer2d_l,224,1713.57,597.556,1024,54.3,9.74,22.12\r\ncoatnet_rmlp_2_rw_224,224,1711.88,149.518,256,73.88,15.18,54.78\r\nxcit_small_12_p8_224,224,1711.26,598.363,1024,26.21,18.69,47.21\r\nefficientvit_l3,224,1708.68,299.622,512,246.04,27.62,39.16\r\nlevit_conv_512_s8,224,1696.89,452.549,768,73.97,21.77,52.28\r\npoolformer_m48,224,1687.7,606.719,1024,73.47,11.59,29.17\r\nresnet152d,320,1685.19,607.622,1024,60.21,24.08,47.67\r\nmobilenetv4_hybrid_large,448,1683.41,456.192,768,37.76,10.74,48.61\r\nnest_small,224,1654.71,618.815,1024,38.35,10.35,40.04\r\nseresnet152,288,1648.96,620.971,1024,66.82,19.11,37.34\r\nnest_small_jx,224,1647.94,621.357,1024,38.35,10.35,40.04\r\nlevit_512_s8,224,1647.73,621.437,1024,73.97,21.77,52.28\r\nconvformer_b36,224,1644.02,622.839,1024,99.88,22.69,56.06\r\ncaformer_b36,224,1640.04,624.352,1024,98.75,23.22,67.3\r\ncoatnet_2_224,224,1633.78,156.668,256,74.68,16.5,52.67\r\nmaxvit_small_tf_224,224,1629.75,235.595,384,68.93,11.66,53.17\r\nconvnext_base,288,1628.78,628.668,1024,88.59,25.43,47.53\r\ndensenet264d,224,1616.06,158.381,256,72.74,13.57,14.0\r\nresnext101_64x4d,288,1615.51,633.827,1024,83.46,25.66,51.59\r\nconvmixer_768_32,224,1613.6,634.582,1024,21.11,19.55,25.95\r\ntwins_svt_large,224,1607.44,637.006,1024,99.27,15.15,35.1\r\nvit_so150m_patch16_reg4_gap_256,256,1606.17,637.515,1024,134.13,36.75,53.21\r\nswinv2_small_window16_256,256,1602.61,638.932,1024,49.73,12.82,66.29\r\nregnety_120,288,1593.41,642.621,1024,51.82,20.06,35.34\r\nmobilevitv2_175,384,1589.43,161.04,256,14.25,12.47,63.29\r\nvit_so150m_patch16_reg4_map_256,256,1588.78,644.494,1024,141.48,37.17,53.68\r\neca_nfnet_l2,320,1584.75,646.132,1024,56.72,20.95,47.43\r\nregnetz_e8,256,1582.69,646.974,1024,57.7,9.91,40.94\r\nregnetz_d32,320,1579.46,648.295,1024,27.58,9.33,37.08\r\nmaxvit_rmlp_small_rw_256,256,1565.35,245.286,384,64.9,14.15,66.09\r\nefficientvit_l2,384,1564.79,245.375,384,63.71,20.45,57.01\r\nhgnetv2_b6,288,1556.62,657.809,1024,75.26,27.9,35.09\r\nswinv2_base_window8_256,256,1546.26,662.218,1024,87.92,20.37,52.59\r\nresnet200,288,1537.87,665.83,1024,64.67,24.91,53.21\r\nconvnextv2_base,288,1536.09,666.599,1024,88.72,25.43,47.53\r\nvolo_d3_224,224,1518.79,674.189,1024,86.33,20.78,60.09\r\nvit_so150m2_patch16_reg1_gap_256,256,1516.05,675.41,1024,136.06,37.0,56.93\r\nmobilenetv4_conv_aa_large,544,1508.59,339.365,512,32.59,14.19,64.79\r\nvit_pe_spatial_small_patch16_512,512,1499.81,682.725,1024,21.98,31.8,123.27\r\nefficientnetv2_rw_m,320,1495.17,684.844,1024,53.24,12.72,47.14\r\nmaxxvitv2_rmlp_base_rw_224,224,1485.22,689.435,1024,116.09,24.2,62.77\r\ncoat_small,224,1484.49,689.772,1024,21.69,12.61,44.25\r\nconvnext_large,224,1479.77,691.975,1024,197.77,34.4,43.13\r\ngcvit_base,224,1472.95,695.178,1024,90.32,14.87,55.48\r\nvit_betwixt_patch16_reg4_gap_384,384,1459.92,701.387,1024,60.6,39.71,85.28\r\necaresnet200d,256,1450.92,705.734,1024,64.69,20.0,43.15\r\nseresnet200d,256,1449.4,706.472,1024,71.86,20.01,43.15\r\nresnetrs200,256,1442.91,709.651,1024,93.21,20.18,43.42\r\nresnext101_32x16d,224,1429.22,716.451,1024,194.03,36.27,51.18\r\nconvnextv2_large,224,1406.78,727.878,1024,197.96,34.4,43.13\r\nsenet154,224,1405.57,728.502,1024,115.09,20.77,38.69\r\nlegacy_senet154,224,1401.54,730.601,1024,115.09,20.77,38.69\r\nnf_regnet_b5,456,1401.29,730.729,1024,49.74,11.7,61.95\r\nresnetv2_50x1_bit,448,1394.51,367.129,512,25.55,16.62,44.46\r\nefficientnet_b4,384,1383.02,277.629,384,19.34,4.51,50.04\r\nvit_large_patch32_384,384,1364.79,750.265,1024,306.63,45.31,43.86\r\nxception71,299,1362.53,375.744,512,42.34,18.09,69.92\r\nswinv2_large_window12_192,192,1361.95,751.839,1024,228.77,26.17,56.53\r\nnextvit_base,384,1358.81,753.575,1024,44.79,24.62,73.95\r\nseresnext101_32x8d,288,1357.77,754.153,1024,93.57,27.24,51.63\r\nconvnext_small,384,1357.48,754.309,1024,50.22,25.58,63.37\r\nhrnet_w48_ssld,288,1353.4,756.583,1024,77.47,28.66,47.21\r\nhgnet_base,288,1352.31,378.587,512,71.58,41.55,25.57\r\nmambaout_base_short_rw,288,1349.07,758.997,1024,88.83,26.96,62.94\r\nregnety_160,288,1342.87,762.52,1024,83.59,26.37,38.07\r\ncrossvit_18_dagger_408,408,1339.8,764.268,1024,44.61,32.47,124.87\r\nseresnext101d_32x8d,288,1330.78,769.448,1024,93.59,27.64,52.95\r\nefficientvit_l3,256,1321.24,290.613,384,246.04,36.06,50.98\r\nmambaout_base_tall_rw,288,1318.35,776.686,1024,86.48,26.69,64.04\r\nconvnext_base,320,1318.11,776.837,1024,88.59,31.39,58.68\r\nxcit_tiny_12_p8_384,384,1312.13,780.383,1024,6.71,14.13,69.14\r\nswinv2_cr_tiny_384,384,1307.91,391.441,512,28.33,15.34,161.01\r\nseresnet152d,320,1307.47,783.165,1024,66.84,24.09,47.72\r\nresnetrs152,320,1303.24,785.708,1024,86.62,24.34,48.14\r\nregnety_320,224,1300.95,787.087,1024,145.05,32.34,30.26\r\nrdnet_large,224,1291.85,792.633,1024,186.27,34.74,46.67\r\nresnetv2_50x3_bit,224,1288.61,595.967,768,217.32,37.06,33.34\r\nxcit_large_24_p16_224,224,1286.57,795.887,1024,189.1,35.86,47.27\r\ndavit_large,224,1283.28,598.44,768,196.81,34.6,60.99\r\nfastvit_mci4,256,1281.32,599.353,768,321.57,27.78,60.59\r\nvolo_d1_384,384,1280.13,799.888,1024,26.78,22.75,108.55\r\nseresnextaa101d_32x8d,288,1264.3,809.906,1024,93.59,28.51,56.44\r\nmobilevitv2_200,384,1263.68,151.913,192,18.45,16.24,72.34\r\ntf_efficientnet_b4,380,1263.5,303.894,384,19.34,4.49,49.49\r\nregnetx_320,224,1260.43,812.398,1024,107.81,31.81,36.3\r\nswin_large_patch4_window7_224,224,1255.29,815.725,1024,196.53,34.53,54.94\r\nvit_mediumd_patch16_reg4_gap_384,384,1235.56,828.74,1024,64.27,43.67,113.51\r\nresnet200d,320,1218.99,840.017,1024,64.69,31.25,67.33\r\nmambaout_base_wide_rw,288,1203.75,637.964,768,94.45,29.39,70.41\r\nnfnet_f2,256,1193.83,857.718,1024,193.78,33.76,41.85\r\nxcit_small_24_p16_384,384,1191.49,859.399,1024,47.67,26.72,68.58\r\ndm_nfnet_f2,256,1186.54,862.984,1024,193.78,33.76,41.85\r\nnest_base,224,1184.4,864.546,1024,67.72,17.96,53.39\r\nnest_base_jx,224,1180.29,867.552,1024,67.72,17.96,53.39\r\nmambaout_base,288,1178.5,868.859,1024,84.81,26.16,61.08\r\nswinv2_cr_large_224,224,1172.22,873.53,1024,196.68,35.1,78.42\r\ntf_efficientnetv2_m,384,1159.09,883.428,1024,54.14,15.85,57.52\r\nvit_small_patch14_dinov2,518,1154.08,887.259,1024,22.06,46.76,198.79\r\nvit_small_patch14_reg4_dinov2,518,1153.49,887.707,1024,22.06,46.95,199.77\r\nseresnet200d,288,1144.4,894.763,1024,71.86,25.32,54.6\r\necaresnet200d,288,1143.71,895.303,1024,64.69,25.31,54.59\r\nvit_base_patch16_18x2_224,224,1143.18,895.719,1024,256.73,52.51,71.38\r\nmixer_l16_224,224,1142.04,896.617,1024,208.2,44.6,41.69\r\nswinv2_base_window12to16_192to256,256,1137.96,674.863,768,87.92,22.02,84.71\r\nseresnet269d,256,1137.28,900.364,1024,113.67,26.59,53.6\r\nmambaout_base_plus_rw,288,1136.79,900.74,1024,101.66,31.72,74.64\r\nswinv2_base_window16_256,256,1136.19,675.909,768,87.92,22.02,84.71\r\nconvnext_large_mlp,256,1134.8,902.34,1024,200.13,44.94,56.33\r\nconvformer_s36,384,1134.02,902.928,1024,40.01,22.54,89.62\r\nmaxvit_rmlp_base_rw_224,224,1124.77,455.178,512,116.14,23.15,92.64\r\ncaformer_s36,384,1117.53,916.28,1024,39.3,26.08,150.33\r\nnaflexvit_base_patch16_siglip,384,1114.78,918.544,1024,92.93,56.12,102.2\r\nvit_base_patch16_clip_384,384,1111.23,921.469,1024,86.86,55.54,101.56\r\nnextvit_large,384,1102.71,928.597,1024,57.83,32.0,90.76\r\nnaflexvit_base_patch16_par_gap,384,1101.72,929.43,1024,86.63,55.86,102.34\r\nresnetrs270,256,1101.4,929.701,1024,129.86,27.06,55.84\r\nnaflexvit_base_patch16_parfac_gap,384,1101.36,929.736,1024,86.46,55.86,102.34\r\neca_nfnet_l2,384,1099.93,930.946,1024,56.72,30.05,68.28\r\nvit_base_patch16_siglip_gap_384,384,1095.71,934.519,1024,86.09,55.43,101.3\r\ndeit3_base_patch16_384,384,1090.32,939.149,1024,86.88,55.54,101.56\r\nnaflexvit_base_patch16_gap,384,1088.75,940.5,1024,86.63,55.86,102.34\r\nvit_base_patch16_siglip_384,384,1082.12,946.266,1024,93.18,56.12,102.2\r\nnfnet_f1,320,1081.55,946.765,1024,132.63,35.97,46.77\r\ndeit_base_patch16_384,384,1081.04,947.213,1024,86.86,55.54,101.56\r\nvit_base_patch16_384,384,1080.73,947.478,1024,86.86,55.54,101.56\r\ndm_nfnet_f1,320,1075.26,952.299,1024,132.63,35.97,46.77\r\nnaflexvit_base_patch16_map,384,1073.56,953.81,1024,93.72,56.23,102.46\r\ndeit_base_distilled_patch16_384,384,1071.38,955.755,1024,87.63,55.65,101.82\r\nrepvgg_d2se,320,1064.92,961.552,1024,120.39,66.99,23.42\r\nconvmixer_1024_20_ks9_p14,224,1048.67,976.451,1024,24.38,5.55,5.51\r\neva_large_patch14_196,196,1044.3,980.525,1024,304.14,61.57,63.52\r\ndeit3_large_patch16_224,224,1043.84,980.97,1024,304.37,61.6,63.52\r\ntiny_vit_21m_512,512,1043.25,245.364,256,21.26,26.93,177.93\r\nbeit_base_patch16_384,384,1036.56,987.857,1024,86.74,55.54,101.56\r\nvit_large_patch16_224,224,1035.76,988.611,1024,304.33,61.6,63.52\r\nbeitv2_large_patch16_224,224,1025.08,998.921,1024,304.43,61.6,63.52\r\nseresnextaa101d_32x8d,320,1020.48,1003.419,1024,93.59,35.19,69.67\r\ncoatnet_3_rw_224,224,1019.91,188.227,192,181.81,33.44,73.83\r\ncoatnet_rmlp_3_rw_224,224,1017.73,188.628,192,165.15,33.56,79.47\r\nbeit3_large_patch16_224,224,1016.54,1007.318,1024,304.57,61.72,63.52\r\nregnetz_e8,320,1014.2,757.218,768,57.7,15.46,63.94\r\nbeit_large_patch16_224,224,1014.08,1009.761,1024,304.43,61.6,63.52\r\nefficientnetv2_m,416,1012.91,1010.923,1024,54.14,18.6,67.5\r\neca_nfnet_l3,352,1008.7,1015.137,1024,72.04,32.57,73.12\r\nvit_large_patch16_rope_ape_224,224,987.14,1037.301,1024,304.37,61.6,63.52\r\nvit_large_patch16_rope_224,224,978.28,1046.696,1024,304.17,61.6,63.52\r\nmaxvit_base_tf_224,224,970.04,395.829,384,119.47,24.04,95.01\r\ncoatnet_3_224,224,966.21,198.69,192,166.97,36.56,79.01\r\nresnetv2_152x2_bit,224,940.83,1088.37,1024,236.34,46.95,45.11\r\nmaxxvitv2_rmlp_large_rw_224,224,938.45,1091.136,1024,215.42,44.14,87.15\r\nconvnext_xlarge,224,932.35,1098.271,1024,350.2,60.98,57.5\r\nresnetrs200,320,923.89,1108.325,1024,93.21,31.51,67.81\r\nconvnext_base,384,918.39,836.218,768,88.59,45.21,84.49\r\nflexivit_large,240,905.14,1131.289,1024,304.36,70.99,75.39\r\nvit_large_patch16_rope_mixed_ape_224,224,900.21,1137.469,1024,304.4,61.6,68.34\r\nvit_large_patch16_rope_mixed_224,224,899.82,1137.965,1024,304.2,61.6,68.34\r\nxcit_small_24_p8_224,224,898.58,1139.544,1024,47.63,35.81,90.78\r\nseresnet269d,288,898.46,1139.695,1024,113.67,33.65,67.81\r\nconvnext_large,288,895.46,1143.512,1024,197.77,56.87,71.29\r\nvolo_d4_224,224,894.49,1144.762,1024,192.96,44.34,80.22\r\nefficientnetv2_rw_m,416,883.19,1159.4,1024,53.24,21.49,79.62\r\ninception_next_base,384,882.49,870.236,768,86.67,43.64,75.48\r\nnasnetalarge,331,875.68,584.66,512,88.75,23.89,90.56\r\ntresnet_l,448,868.83,1178.568,1024,55.99,43.59,47.56\r\nconvnextv2_base,384,864.2,1184.862,1024,88.72,45.21,84.49\r\nresnetv2_101x1_bit,448,863.6,592.839,512,44.54,31.65,64.93\r\nconvnextv2_large,288,852.71,1200.851,1024,197.96,56.87,71.29\r\nefficientvit_l3,320,850.3,301.047,256,246.04,56.32,79.34\r\npnasnet5large,331,844.82,909.047,768,86.06,25.04,92.89\r\nvit_large_r50_s32_384,384,841.17,1217.313,1024,329.09,57.43,76.52\r\nefficientnet_b5,416,840.77,304.456,256,30.39,8.27,80.68\r\nmaxvit_tiny_tf_384,384,835.2,229.859,192,30.98,17.53,123.42\r\ndavit_huge,224,830.66,924.542,768,348.92,61.23,81.32\r\nxcit_medium_24_p16_384,384,810.21,1263.845,1024,84.4,47.39,91.64\r\nconvformer_m36,384,807.9,1267.453,1024,57.05,37.87,123.56\r\nvit_large_patch14_clip_quickgelu_224,224,805.67,1270.96,1024,303.97,81.08,88.79\r\nvit_large_patch14_clip_224,224,804.96,1272.085,1024,304.2,81.08,88.79\r\nvit_large_patch16_siglip_gap_256,256,804.5,1272.794,1024,303.36,80.8,88.34\r\nvit_large_patch16_siglip_256,256,802.46,1276.036,1024,315.96,81.34,88.88\r\ncaformer_m36,384,801.54,1277.513,1024,56.2,42.11,196.35\r\nswinv2_cr_small_384,384,796.81,642.536,512,49.7,29.7,298.03\r\nvit_large_patch14_xp_224,224,796.53,1285.557,1024,304.06,81.01,88.79\r\naimv2_large_patch14_224,224,790.71,1295.005,1024,309.2,82.3,85.2\r\nvit_large_patch14_224,224,782.83,1308.047,1024,304.2,81.08,88.79\r\nvit_base_patch8_224,224,781.11,1310.922,1024,86.58,78.22,161.69\r\nhiera_large_224,224,780.95,1311.196,1024,213.74,40.34,83.37\r\nresnest200e,320,779.93,1312.907,1024,70.2,35.69,82.78\r\nregnety_160,384,776.34,989.23,768,83.59,46.87,67.67\r\nregnety_640,224,756.81,1353.015,1024,281.38,64.16,42.5\r\ntf_efficientnetv2_m,480,742.41,1379.269,1024,54.14,24.76,89.84\r\nresnetv2_101x3_bit,224,740.91,1036.538,768,387.93,71.23,48.7\r\nefficientnet_x_b5,448,731.35,1050.081,768,33.44,23.35,68.87\r\necaresnet269d,320,729.18,1404.282,1024,102.09,41.53,83.69\r\nconvnext_large_mlp,320,725.78,1058.14,768,200.13,70.21,88.02\r\nefficientnet_b5,448,725.53,352.817,256,30.39,9.59,93.56\r\nvit_base_r50_s16_384,384,722.02,1418.217,1024,98.95,67.43,135.03\r\nvolo_d2_384,384,721.25,1419.714,1024,58.87,46.17,184.51\r\nvitamin_large_224,224,711.8,359.628,256,333.32,75.05,112.83\r\nmvitv2_large,224,711.75,1438.683,1024,217.99,43.87,112.02\r\nvitamin_large2_224,224,711.13,359.966,256,333.58,75.05,112.83\r\nvit_so150m_patch16_reg4_gap_384,384,702.65,1457.317,1024,134.42,87.97,165.47\r\ncait_xxs24_384,384,702.13,1458.383,1024,12.03,9.63,122.66\r\nresnetrs350,288,697.13,1468.857,1024,163.96,43.67,87.09\r\nxcit_tiny_24_p8_384,384,691.58,1480.64,1024,12.11,27.05,132.95\r\nefficientnet_h_b5,448,685.42,746.957,512,45.88,27.16,73.9\r\neva02_large_patch14_224,224,682.56,1500.211,1024,303.27,81.15,97.2\r\ncoat_lite_medium_384,384,682.15,750.546,512,44.57,28.73,116.7\r\neva02_large_patch14_clip_224,224,681.15,1503.309,1024,304.11,81.18,97.2\r\nmaxvit_large_tf_224,224,676.54,378.367,256,211.79,43.68,127.35\r\nnaflexvit_so150m2_patch16_reg1_gap,384,673.65,1520.053,1024,136.06,89.53,178.22\r\nvit_so150m2_patch16_reg1_gap_384,384,670.28,1527.699,1024,136.33,89.53,178.22\r\nresnet50x16_clip_gap,384,670.18,1527.912,1024,136.2,70.32,100.64\r\nswinv2_large_window12to16_192to256,256,670.11,764.022,512,196.74,47.81,121.53\r\nefficientnetv2_l,384,669.77,1528.864,1024,118.52,36.1,101.16\r\nnaflexvit_so150m2_patch16_reg1_map,384,667.49,1534.085,1024,142.46,90.33,179.2\r\nvit_large_patch16_dinov3_qkvb,256,663.1,1544.22,1024,303.13,82.43,90.56\r\nvit_large_patch16_dinov3,256,661.55,1547.834,1024,303.08,82.43,90.56\r\ntf_efficientnetv2_l,384,657.88,1556.488,1024,118.52,36.1,101.16\r\nmvitv2_large_cls,224,656.58,1559.566,1024,234.58,42.17,111.69\r\nresnet50x16_clip,384,644.31,1191.936,768,167.33,74.9,103.54\r\nmambaout_base_plus_rw,384,639.72,800.307,512,101.66,56.39,132.7\r\ntf_efficientnet_b5,456,634.44,403.477,256,30.39,10.46,98.86\r\nnfnet_f2,352,633.45,1616.521,1024,193.78,63.22,79.06\r\ndm_nfnet_f2,352,631.79,1620.762,1024,193.78,63.22,79.06\r\neca_nfnet_l3,448,622.75,822.135,512,72.04,52.55,118.4\r\nxcit_medium_24_p8_224,224,607.11,1686.649,1024,84.32,63.53,121.23\r\necaresnet269d,352,602.04,1700.86,1024,102.09,50.25,101.25\r\ncoatnet_4_224,224,595.45,322.42,192,275.43,62.48,129.26\r\nswin_base_patch4_window12_384,384,593.75,862.289,512,87.9,47.19,134.78\r\nefficientvit_l3,384,590.72,325.004,192,246.04,81.08,114.02\r\nvit_so400m_patch14_siglip_gap_224,224,588.95,1738.662,1024,412.44,109.57,106.13\r\nvit_base_patch16_siglip_gap_512,512,588.51,1739.951,1024,86.43,107.0,246.15\r\nvit_so400m_patch14_siglip_224,224,587.06,1744.263,1024,427.68,110.26,106.73\r\nvit_so400m_patch16_siglip_gap_256,256,586.65,1745.476,1024,412.65,109.62,106.13\r\nvit_base_patch16_siglip_512,512,584.15,1752.935,1024,93.52,108.22,247.74\r\nvit_so400m_patch16_siglip_256,256,583.58,1754.651,1024,427.89,110.31,106.73\r\nresnetrs270,352,581.56,1760.742,1024,129.86,51.13,105.48\r\nnfnet_f3,320,576.3,1776.815,1024,254.92,68.77,83.93\r\nxcit_small_12_p8_384,384,575.92,888.977,512,26.21,54.92,138.29\r\ndm_nfnet_f3,320,573.49,1785.517,1024,254.92,68.77,83.93\r\nvolo_d5_224,224,572.18,1789.62,1024,295.46,72.4,118.11\r\nconvnext_xlarge,288,563.82,1362.108,768,350.2,100.8,95.05\r\ncoatnet_rmlp_2_rw_384,384,562.07,170.771,96,73.88,47.69,209.43\r\nconvformer_b36,384,559.03,1373.793,768,99.88,66.67,164.75\r\nswinv2_cr_base_384,384,556.19,690.383,384,87.88,50.57,333.68\r\ncaformer_b36,384,555.13,1383.428,768,98.75,72.33,261.79\r\nvitamin_large_256,256,554.73,346.087,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,547.48,350.673,192,333.64,99.0,154.99\r\nvit_pe_spatial_base_patch16_512,512,542.42,1887.786,1024,86.43,107.13,246.54\r\nmaxvit_small_tf_384,384,541.29,236.448,128,69.02,35.87,183.65\r\nresmlp_big_24_224,224,530.74,1929.354,1024,129.14,100.23,87.31\r\neva02_base_patch14_448,448,527.68,1455.402,768,87.12,107.11,259.14\r\nconvnextv2_huge,224,522.17,1961.029,1024,660.29,115.0,79.07\r\nseresnextaa201d_32x8d,320,517.0,1485.454,768,149.39,70.22,138.71\r\ncait_xs24_384,384,511.16,2003.262,1024,26.67,19.28,183.98\r\nconvnext_large,384,505.61,1012.612,512,197.77,101.1,126.74\r\nconvnext_large_mlp,384,505.35,1013.134,512,200.13,101.11,126.74\r\nresnext101_32x32d,224,492.93,2077.367,1024,468.53,87.29,91.12\r\nmaxxvitv2_rmlp_base_rw_384,384,484.85,791.974,384,116.09,72.98,213.74\r\nconvnextv2_large,384,481.23,1595.878,768,197.96,101.1,126.74\r\nresnetrs420,320,476.83,2147.487,1024,191.89,64.2,126.56\r\nefficientnetv2_xl,384,471.09,2173.634,1024,208.12,52.81,139.2\r\ncait_xxs36_384,384,469.59,2180.579,1024,17.37,14.35,183.7\r\nvit_so150m2_patch16_reg1_gap_448,448,469.07,2183.031,1024,136.5,127.51,287.05\r\nswinv2_cr_huge_224,224,467.28,1643.528,768,657.83,115.97,121.08\r\nmaxvit_tiny_tf_512,512,467.26,205.428,96,31.05,33.49,257.59\r\ntf_efficientnetv2_xl,384,464.93,2202.476,1024,208.12,52.81,139.2\r\nfocalnet_huge_fl3,224,449.48,1708.589,768,745.28,118.26,104.8\r\nxcit_large_24_p16_384,384,447.36,1716.71,768,189.1,105.35,137.17\r\nefficientnet_x_b5,576,443.04,1155.631,512,33.44,38.59,113.83\r\nrdnet_large,384,442.54,578.452,256,186.27,102.09,137.13\r\nvitamin_xlarge_256,256,438.36,291.969,128,436.06,130.13,177.37\r\nregnety_320,384,428.57,1791.956,768,145.05,95.0,88.87\r\nefficientnetv2_l,480,428.55,1792.071,768,118.52,56.4,157.99\r\ntf_efficientnetv2_l,480,421.0,1824.205,768,118.52,56.4,157.99\r\nvit_base_patch14_dinov2,518,415.97,1846.271,768,86.58,151.71,397.58\r\nvit_base_patch14_reg4_dinov2,518,413.59,1856.895,768,86.58,152.25,399.53\r\nefficientnet_h_b5,576,411.27,933.659,384,45.88,44.9,122.13\r\nvit_huge_patch14_clip_quickgelu_224,224,407.97,2509.953,1024,632.08,167.4,139.41\r\nvit_huge_patch14_clip_224,224,407.29,2514.177,1024,632.05,167.4,139.41\r\nvit_huge_patch14_xp_224,224,404.75,2529.93,1024,631.8,167.3,139.41\r\nvit_huge_patch14_gap_224,224,401.44,2550.78,1024,630.76,166.73,138.74\r\nefficientnet_b6,528,400.09,319.9,128,43.04,19.4,167.39\r\ncait_s24_384,384,398.6,2568.948,1024,47.06,32.17,245.31\r\nvit_huge_patch14_224,224,398.03,2572.661,1024,630.76,167.4,139.41\r\ndeit3_huge_patch14_224,224,397.69,2574.844,1024,632.13,167.4,139.41\r\nmaxvit_xlarge_tf_224,224,395.23,485.765,192,506.99,97.52,191.04\r\naimv2_huge_patch14_224,224,392.82,2606.746,1024,680.85,179.01,126.22\r\nresnetrs350,384,392.25,2610.56,1024,163.96,77.59,154.74\r\nsam2_hiera_tiny,896,383.24,166.971,64,26.85,99.86,384.63\r\ntf_efficientnet_b6,528,372.38,343.713,128,43.04,19.4,167.39\r\nmaxvit_rmlp_base_rw_384,384,368.81,520.563,192,116.14,70.97,318.95\r\nvolo_d3_448,448,368.61,2083.495,768,86.63,96.33,446.83\r\nregnety_1280,224,361.15,2126.503,768,644.81,127.66,71.58\r\nseresnextaa201d_32x8d,384,359.11,1425.723,512,149.39,101.11,199.72\r\nresnest269e,416,358.79,2140.491,768,110.93,77.69,171.98\r\nhiera_huge_224,224,352.43,2179.094,768,672.78,124.85,150.95\r\nvit_large_patch14_clip_quickgelu_336,336,351.4,2914.056,1024,304.29,191.11,270.24\r\nvit_large_patch14_clip_336,336,350.75,2919.44,1024,304.53,191.11,270.24\r\nswin_large_patch4_window12_384,384,341.98,1122.854,384,196.74,104.08,202.16\r\neva_large_patch14_336,336,341.12,3001.813,1024,304.53,191.1,270.24\r\nvit_large_patch16_siglip_gap_384,384,341.09,3002.092,1024,303.69,190.85,269.55\r\ndeit3_large_patch16_384,384,340.52,3007.104,1024,304.76,191.21,270.24\r\nvit_large_patch16_siglip_384,384,339.92,3012.464,1024,316.28,192.07,270.75\r\naimv2_large_patch14_336,336,338.62,3023.973,1024,309.53,194.22,227.08\r\nnfnet_f3,416,338.05,2271.819,768,254.92,115.58,141.78\r\nvit_giant_patch16_gap_224,224,336.93,3039.142,1024,1011.37,202.46,139.26\r\nvit_large_patch16_384,384,336.09,3046.77,1024,304.72,191.21,270.24\r\nnfnet_f4,384,333.34,2303.927,768,316.07,122.14,147.57\r\nxcit_large_24_p8_224,224,332.23,3082.166,1024,188.93,141.23,181.56\r\ndm_nfnet_f4,384,330.73,2322.132,768,316.07,122.14,147.57\r\nvit_pe_core_large_patch14_336,336,329.08,3111.638,1024,317.15,192.33,271.43\r\nswinv2_base_window12to24_192to384,384,328.33,389.82,128,87.92,55.25,280.36\r\ncoatnet_5_224,224,328.16,390.027,128,687.47,145.49,194.24\r\nswinv2_cr_large_384,384,327.4,781.893,256,196.68,108.96,404.96\r\nbeit_large_patch16_384,384,326.85,3132.886,1024,305.0,191.21,270.24\r\nconvnext_xxlarge,256,321.89,1590.575,512,846.47,198.09,124.45\r\nresnetv2_152x4_bit,224,321.77,1591.188,512,936.53,186.9,90.22\r\nsam2_hiera_small,896,320.88,199.424,64,33.95,123.99,442.63\r\nresnetv2_152x2_bit,384,320.3,1598.46,512,236.34,136.16,132.56\r\nmaxvit_base_tf_384,384,319.75,400.287,128,119.65,73.8,332.9\r\nconvnext_xlarge,384,317.73,1208.533,384,350.2,179.2,168.99\r\nconvnextv2_huge,288,316.0,2430.353,768,660.29,190.1,130.7\r\ndavit_giant,224,311.8,2463.058,768,1406.47,192.92,153.06\r\ndm_nfnet_f3,416,310.5,2473.426,768,254.92,115.58,141.78\r\nvitamin_large2_336,336,309.73,309.92,96,333.83,175.72,307.47\r\nvitamin_large_336,336,309.58,310.074,96,333.57,175.72,307.47\r\neva02_large_patch14_clip_336,336,305.65,3350.206,1024,304.43,191.34,289.13\r\nxcit_small_24_p8_384,384,302.9,1690.284,512,47.63,105.24,265.91\r\nmaxvit_small_tf_512,512,302.68,211.415,64,69.13,67.26,383.77\r\nmvitv2_huge_cls,224,285.38,1794.064,512,694.8,120.67,243.63\r\nresnetrs420,416,281.03,3643.655,1024,191.89,108.45,213.79\r\nresnetv2_50x3_bit,448,279.16,687.765,192,217.32,145.7,133.37\r\nvit_huge_plus_patch16_dinov3,256,274.86,3725.531,1024,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,274.77,3726.774,1024,840.59,224.88,193.59\r\nfocalnet_huge_fl4,224,267.22,2874.036,768,686.46,118.9,113.34\r\ncait_s36_384,384,266.73,3839.038,1024,68.37,47.99,367.4\r\nefficientnetv2_xl,512,265.24,1930.269,512,208.12,93.85,247.32\r\nvit_giant_patch14_clip_224,224,261.97,3908.792,1024,1012.65,267.18,192.64\r\ntf_efficientnetv2_xl,512,261.36,2938.438,768,208.12,93.85,247.32\r\nnaflexvit_so400m_patch16_siglip,384,259.8,3941.392,1024,427.89,259.65,319.77\r\nvit_giant_patch14_224,224,257.36,3978.864,1024,1012.61,267.18,192.64\r\neva_giant_patch14_224,224,256.02,3999.639,1024,1012.56,267.18,192.64\r\neva_giant_patch14_clip_224,224,255.95,4000.71,1024,1012.59,267.18,192.64\r\nvit_so400m_patch16_siglip_gap_384,384,253.33,4042.198,1024,413.02,258.11,318.42\r\nvit_so400m_patch16_siglip_384,384,251.89,4065.264,1024,428.26,259.65,319.77\r\nbeit3_giant_patch14_224,224,249.28,4107.737,1024,1013.22,267.56,192.64\r\nvitamin_xlarge_336,336,247.84,387.323,96,436.06,230.18,347.33\r\nresnet50x64_clip_gap,448,235.36,2175.355,512,365.03,253.96,233.22\r\nresnetv2_152x2_bit,448,235.06,1089.075,256,236.34,184.99,180.43\r\nvitamin_large_384,384,234.7,272.662,64,333.71,234.44,440.16\r\nvit_giantopt_patch16_siglip_gap_256,256,234.52,4366.323,1024,1134.84,298.42,199.62\r\nvitamin_large2_384,384,234.51,272.878,64,333.97,234.44,440.16\r\nvit_giantopt_patch16_siglip_256,256,234.01,4375.817,1024,1163.17,299.66,200.43\r\nnfnet_f5,416,230.92,2217.153,512,377.21,170.71,204.56\r\naimv2_1b_patch14_224,224,230.58,4440.996,1024,1234.96,322.43,170.39\r\ndm_nfnet_f5,416,229.97,3339.581,768,377.21,170.71,204.56\r\nefficientnet_b7,600,229.78,417.756,96,66.35,38.33,289.94\r\nresnet50x64_clip,448,227.34,2252.146,512,420.38,265.02,239.13\r\nregnety_640,384,224.24,2281.347,512,281.38,188.47,124.83\r\nmaxvit_large_tf_384,384,223.45,429.596,96,212.03,132.55,445.84\r\ntf_efficientnet_b7,600,217.0,442.367,96,66.35,38.33,289.94\r\nvolo_d4_448,448,212.75,2406.506,512,193.41,197.13,527.35\r\nxcit_medium_24_p8_384,384,204.13,2508.106,512,84.32,186.67,354.73\r\nswinv2_large_window12to24_192to384,384,204.05,627.274,128,196.74,116.15,407.83\r\nvit_so400m_patch14_siglip_gap_378,378,195.44,5239.313,1024,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,195.37,5241.248,1024,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_378,378,194.84,5255.636,1024,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,194.52,5264.074,1024,428.23,335.4,452.89\r\ndavit_base_fl,768,191.95,1000.244,192,90.37,190.32,530.15\r\ndm_nfnet_f4,512,188.15,2040.907,384,316.07,216.26,262.26\r\nvitamin_xlarge_384,384,186.79,342.595,64,436.06,306.38,493.46\r\nvit_large_patch16_siglip_gap_512,512,185.75,4134.569,768,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,184.99,4151.617,768,316.74,364.0,657.48\r\nnfnet_f4,512,184.21,2779.471,512,316.07,216.26,262.26\r\naimv2_large_patch14_448,448,183.12,4193.83,768,309.98,367.84,491.78\r\nvit_intern300m_patch14_448,448,179.67,4274.467,768,304.01,362.05,656.39\r\nfocalnet_large_fl3,384,179.26,2856.155,512,239.13,105.06,168.04\r\nmaxvit_base_tf_512,512,178.54,358.437,64,119.88,138.02,703.99\r\nconvnextv2_huge,384,178.32,2871.21,512,660.29,337.96,232.35\r\nvit_pe_lang_large_patch14_448,448,177.77,4320.078,768,291.42,346.99,629.09\r\nvit_huge_patch14_clip_336,336,176.68,5795.708,1024,632.46,390.97,407.54\r\nnfnet_f6,448,176.06,2908.037,512,438.36,229.7,273.62\r\ndm_nfnet_f6,448,175.47,2917.876,512,438.36,229.7,273.62\r\nvit_pe_spatial_large_patch14_448,448,174.45,4402.464,768,303.96,362.05,656.39\r\nbeit_large_patch16_512,512,169.91,4520.111,768,305.67,362.24,656.39\r\naimv2_huge_patch14_336,336,169.85,4521.603,768,681.34,416.36,337.08\r\nresnetv2_101x3_bit,448,169.63,1131.832,192,387.93,280.33,194.78\r\nfocalnet_large_fl4,384,169.58,3019.271,512,239.32,105.2,181.78\r\nsam2_hiera_base_plus,896,168.31,380.22,64,68.68,227.48,828.88\r\neva02_large_patch14_448,448,162.11,3158.305,512,305.08,362.33,689.95\r\nvit_gigantic_patch14_clip_quickgelu_224,224,149.16,6864.925,1024,1844.91,483.96,275.37\r\nvit_gigantic_patch14_clip_224,224,149.04,6870.582,1024,1844.91,483.96,275.37\r\nefficientnet_b8,672,147.34,651.547,96,87.41,63.48,442.89\r\nvit_gigantic_patch14_224,224,146.66,6981.986,1024,1844.44,483.95,275.37\r\nfocalnet_xlarge_fl3,384,140.65,1820.057,256,408.79,185.61,223.99\r\ntf_efficientnet_b8,672,140.59,682.788,96,87.41,63.48,442.89\r\nvit_huge_patch14_clip_quickgelu_378,378,138.32,5552.43,768,632.68,503.79,572.79\r\nregnety_1280,384,138.19,1852.508,256,644.81,374.99,210.2\r\nvit_huge_patch14_clip_378,378,138.09,5561.633,768,632.68,503.79,572.79\r\nnfnet_f5,544,136.55,2812.206,384,377.21,290.97,349.71\r\ndm_nfnet_f5,544,136.18,2819.703,384,377.21,290.97,349.71\r\nvit_so400m_patch14_siglip_gap_448,448,135.08,5685.554,768,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,134.73,5700.093,768,413.53,487.4,764.26\r\nfocalnet_xlarge_fl4,384,134.38,1905.062,256,409.03,185.79,242.31\r\nvit_so400m_patch16_siglip_512,512,134.17,5723.828,768,428.77,490.13,766.65\r\nnfnet_f7,480,132.83,3854.489,512,499.5,300.08,355.86\r\nswinv2_cr_huge_384,384,132.45,966.36,128,657.94,352.04,583.18\r\nvit_large_patch14_dinov2,518,132.03,3877.992,512,304.37,507.15,1058.82\r\nvolo_d5_448,448,131.94,3880.656,512,295.91,315.06,737.92\r\nvit_large_patch14_reg4_dinov2,518,131.77,3885.407,512,304.37,508.9,1064.02\r\nmaxvit_xlarge_tf_384,384,130.72,489.548,64,475.32,292.78,668.76\r\nswinv2_cr_giant_224,224,127.69,2004.799,256,2598.76,483.85,309.15\r\nconvmixer_1536_20,224,125.49,8159.819,1024,51.63,48.68,33.03\r\nmaxvit_large_tf_512,512,124.77,384.675,48,212.33,244.75,942.15\r\nvit_huge_patch16_gap_448,448,123.52,6217.487,768,631.67,544.7,636.83\r\ncait_m36_384,384,120.57,4246.46,512,271.22,173.11,734.81\r\neva_giant_patch14_336,336,111.99,6857.697,768,1013.01,620.64,550.67\r\naimv2_3b_patch14_224,224,109.76,4664.713,512,2720.66,705.91,252.44\r\nbeit3_giant_patch14_336,336,109.43,4678.758,512,1013.67,621.52,550.67\r\nnfnet_f6,576,107.13,3584.383,384,438.36,378.69,452.2\r\ndm_nfnet_f6,576,106.86,2395.607,256,438.36,378.69,452.2\r\nvit_giantopt_patch16_siglip_gap_384,384,102.66,7481.28,768,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,102.27,7509.7,768,1163.66,696.85,568.91\r\nconvnextv2_huge,512,100.38,2550.351,256,660.29,600.81,413.07\r\naimv2_1b_patch14_336,336,99.93,5123.333,512,1235.61,743.59,454.16\r\nvolo_d5_512,512,98.66,3892.025,384,296.09,425.09,1105.37\r\ntf_efficientnet_l2,475,95.73,668.506,64,480.31,172.11,609.89\r\naimv2_huge_patch14_448,448,92.52,4150.526,384,682.03,774.02,731.38\r\nnfnet_f7,608,75.89,5059.751,384,499.5,480.39,570.85\r\nmaxvit_xlarge_tf_512,512,73.09,437.774,32,475.77,534.14,1413.22\r\nregnety_2560,384,70.0,2742.661,192,1282.6,747.83,296.49\r\ndavit_huge_fl,768,69.79,1375.544,96,360.64,744.84,1060.3\r\nresnetv2_152x4_bit,480,68.58,1866.45,128,936.53,844.84,414.26\r\nmobilenetv5_300m,768,67.68,945.655,64,294.13,435.74,842.16\r\nmobilenetv5_300m_enc,768,66.33,964.787,64,294.13,435.74,842.16\r\naimv2_1b_patch14_448,448,54.8,4671.385,256,1236.53,1367.03,983.56\r\ncait_m48_448,448,53.68,7154.023,384,356.46,329.41,1708.23\r\nsam2_hiera_large,1024,52.77,909.536,48,212.15,907.48,2190.34\r\nvit_gigantic_patch14_clip_378,378,51.13,7509.949,384,1845.7,1429.82,1047.37\r\naimv2_3b_patch14_336,336,48.07,5325.71,256,2721.64,1615.48,674.17\r\nvit_giant_patch14_dinov2,518,40.36,6343.01,256,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,40.29,6354.228,256,1136.48,1790.08,2771.21\r\nsamvit_base_patch16,1024,39.38,304.734,12,89.67,486.43,1343.27\r\nswinv2_cr_giant_384,384,37.26,1288.088,48,2598.76,1450.71,1394.86\r\nvit_pe_lang_gigantic_patch14_448,448,36.93,6931.668,256,1740.92,1931.99,1664.88\r\neva_giant_patch14_560,560,36.73,6970.578,256,1014.45,1906.76,2577.17\r\nefficientnet_l2,800,35.41,677.834,24,480.31,479.12,1707.39\r\nvit_pe_core_gigantic_patch14_448,448,35.25,7261.811,256,1882.03,2060.12,1774.21\r\nvit_pe_spatial_gigantic_patch14_448,448,34.78,7360.973,256,1851.89,2055.25,1771.04\r\ntf_efficientnet_l2,800,34.3,699.593,24,480.31,479.12,1707.39\r\naimv2_3b_patch14_448,448,26.54,4823.547,128,2723.02,2939.61,1462.76\r\nvit_so400m_patch14_siglip_gap_896,896,23.85,8051.729,192,416.87,2731.49,8492.88\r\nsamvit_large_patch16,1024,18.76,426.314,8,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,12.39,484.084,6,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu128-4090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,194812.41,5.235,1024,0.37,0.04,0.48\r\ntest_vit2,160,145856.32,6.999,1024,0.46,0.05,0.64\r\ntest_byobnet,160,108829.14,9.387,1024,0.46,0.03,0.43\r\ntest_efficientnet,160,100950.91,10.122,1024,0.36,0.06,0.55\r\ntest_vit4,160,98926.56,10.33,1024,1.02,0.11,1.07\r\ntest_vit3,160,95175.2,10.737,1024,0.93,0.09,1.0\r\ntest_efficientnet_gn,160,89330.61,11.441,1024,0.36,0.06,0.55\r\nefficientvit_m0,224,77772.14,13.144,1024,2.33,0.08,0.91\r\ntinynet_e,106,75453.03,13.549,1024,2.04,0.03,0.69\r\ntest_mambaout,160,72818.81,14.04,1024,0.45,0.03,0.53\r\ntest_resnet,160,72577.99,14.086,1024,0.47,0.1,0.64\r\ntest_convnext2,160,72190.85,14.162,1024,0.48,0.05,0.63\r\ntest_convnext,160,71317.63,14.336,1024,0.27,0.03,0.58\r\ntest_convnext3,160,70421.79,14.519,1024,0.47,0.05,0.63\r\nmobilenetv3_small_050,224,68070.81,15.019,1024,1.59,0.03,0.92\r\nmobilenetv4_conv_small_035,224,67106.27,15.237,1024,1.91,0.05,0.98\r\nlcnet_035,224,62790.81,16.286,1024,1.64,0.03,1.04\r\nmobilenetv4_conv_small_050,224,57208.51,17.876,1024,2.24,0.07,1.18\r\nmobilenetv4_conv_small_035,256,53371.67,19.164,1024,1.91,0.06,1.28\r\nlcnet_050,224,52471.08,19.493,1024,1.88,0.05,1.26\r\nefficientvit_m1,224,49531.88,20.651,1024,2.96,0.17,1.33\r\ntest_mambaout,192,49203.2,20.79,1024,0.45,0.04,0.77\r\nmobilenetv3_small_075,224,48267.08,21.192,1024,2.04,0.05,1.3\r\nshvit_s1,224,48051.75,21.288,1024,6.31,0.24,1.39\r\nefficientvit_m2,224,45378.53,22.543,1024,4.17,0.2,1.47\r\nmobilenetv4_conv_small_050,256,44621.71,22.926,1024,2.24,0.09,1.55\r\nmobilenetv3_small_100,224,42944.24,23.821,1024,2.54,0.06,1.42\r\ntest_efficientnet_ln,160,42771.86,23.92,1024,0.36,0.06,0.55\r\nefficientvit_m3,224,40863.87,25.036,1024,6.88,0.26,1.62\r\ntinynet_d,152,39774.01,25.723,1024,2.34,0.05,1.42\r\nshvit_s2,224,38960.54,26.26,1024,11.45,0.37,1.6\r\nefficientvit_m4,224,38549.12,26.541,1024,8.78,0.3,1.7\r\ntest_efficientnet_evos,160,37758.98,27.097,1024,0.36,0.06,0.55\r\nrepghostnet_050,224,37022.23,27.637,1024,2.31,0.05,2.02\r\ntf_mobilenetv3_small_minimal_100,224,36955.41,27.686,1024,2.04,0.06,1.41\r\ntf_mobilenetv3_small_075,224,36783.35,27.813,1024,2.04,0.05,1.3\r\ntest_nfnet,160,36605.69,27.952,1024,0.38,0.29,1.2\r\nstarnet_s050,224,33525.27,30.521,1024,0.54,0.09,1.57\r\ntf_mobilenetv3_small_100,224,33426.93,30.61,1024,2.54,0.06,1.42\r\nlcnet_075,224,32866.75,31.133,1024,2.36,0.1,1.99\r\nmobilenetv4_conv_small,224,32432.31,31.551,1024,3.77,0.19,1.97\r\nfasternet_t0,224,31555.1,32.428,1024,3.91,0.34,1.97\r\nlevit_128s,224,30353.33,33.713,1024,7.76,0.3,1.88\r\nlevit_conv_128s,224,30016.26,34.092,1024,7.76,0.3,1.88\r\nvit_small_patch32_224,224,29070.9,35.201,1024,22.88,1.15,2.5\r\nmnasnet_small,224,28987.83,35.303,1024,2.03,0.07,2.16\r\nrepghostnet_058,224,28968.94,35.325,1024,2.54,0.06,2.59\r\nghostnet_050,224,28098.57,36.42,1024,2.59,0.05,1.77\r\nregnetx_002,224,27300.12,37.485,1024,2.68,0.2,2.16\r\nresnet18,160,26589.31,38.488,1024,11.69,0.93,1.27\r\nlcnet_100,224,26168.46,39.108,1024,2.95,0.16,2.52\r\nresnet10t,176,26100.31,39.21,1024,5.44,0.7,1.51\r\nmobilenetv4_conv_small,256,25671.77,39.866,1024,3.77,0.25,2.57\r\nregnety_002,224,25657.33,39.888,1024,3.16,0.2,2.17\r\nshvit_s3,224,25254.66,40.524,1024,14.21,0.6,2.33\r\nvit_tiny_r_s16_p8_224,224,25109.74,40.758,1024,6.34,0.44,2.06\r\nefficientvit_m5,224,24952.35,41.015,1024,12.44,0.52,2.41\r\nmobilenetv2_035,224,24947.4,41.022,1024,1.68,0.07,2.86\r\nrepghostnet_080,224,23360.68,43.811,1024,3.27,0.1,3.22\r\nefficientvit_b0,224,22594.76,45.297,1024,3.41,0.1,2.87\r\nvit_medium_patch32_clip_224,224,22569.6,45.348,1024,39.69,2.0,3.34\r\npit_ti_224,224,22527.08,45.434,1024,4.85,0.7,6.19\r\npit_ti_distilled_224,224,22303.18,45.89,1024,5.1,0.71,6.23\r\nmnasnet_050,224,22073.66,46.366,1024,2.22,0.11,3.07\r\nstarnet_s100,224,20457.29,50.033,1024,1.04,0.19,2.68\r\nrepvgg_a0,224,20391.24,50.192,1024,8.31,1.36,1.79\r\nlevit_conv_128,224,20143.06,50.814,1024,9.19,0.41,2.71\r\ntinynet_c,184,20069.62,50.999,1024,2.46,0.11,2.87\r\nvit_xsmall_patch16_clip_224,224,20055.95,51.035,1024,8.28,1.79,6.65\r\nlevit_128,224,19989.7,51.203,1024,9.19,0.41,2.71\r\nfasternet_t1,224,19663.78,52.053,1024,7.6,0.85,3.15\r\nmixer_s32_224,224,19575.29,52.288,1024,19.1,1.0,2.28\r\nsemnasnet_050,224,19516.96,52.443,1024,2.08,0.11,3.44\r\nmobilenetv2_050,224,19510.99,52.46,1024,1.97,0.1,3.64\r\nstarnet_s150,224,19080.6,53.644,1024,1.56,0.23,2.75\r\nmobileone_s0,224,18890.62,54.18,1024,2.08,0.28,3.79\r\nrepghostnet_100,224,18579.03,55.093,1024,4.06,0.15,3.98\r\nvit_tiny_patch16_224,224,18553.34,55.169,1024,5.72,1.26,5.97\r\ndeit_tiny_patch16_224,224,18550.12,55.179,1024,5.72,1.26,5.97\r\ndeit_tiny_distilled_patch16_224,224,18466.64,55.428,1024,5.91,1.27,6.01\r\nregnetx_004,224,18382.83,55.679,1024,5.16,0.4,3.14\r\nlevit_conv_192,224,18017.24,56.808,1024,10.92,0.66,3.2\r\ngernet_s,224,17971.42,56.956,1024,8.17,0.75,2.65\r\nlevit_192,224,17888.75,57.22,1024,10.92,0.66,3.2\r\ncs3darknet_focus_s,256,17656.87,57.971,1024,3.27,0.69,2.7\r\nregnetx_004_tv,224,17598.16,58.165,1024,5.5,0.42,3.17\r\nshvit_s4,256,17331.87,59.059,1024,16.55,0.99,3.73\r\nlcnet_150,224,17264.67,59.289,1024,4.5,0.34,3.79\r\nmobilenetv4_conv_small,320,16909.72,60.534,1024,3.77,0.39,4.01\r\ncs3darknet_s,256,16878.73,60.645,1024,3.28,0.72,2.97\r\nrepghostnet_111,224,16761.94,61.067,1024,4.52,0.18,4.38\r\nmobilenetv3_large_075,224,16430.89,62.298,1024,3.99,0.16,4.0\r\nresnet10t,224,16384.4,62.476,1024,5.44,1.1,2.43\r\nghostnetv3_050,224,16352.57,62.597,1024,2.85,0.05,2.28\r\nresnet34,160,16346.64,62.619,1024,21.8,1.87,1.91\r\nconvnext_zepto_rms,224,16192.45,63.217,1024,2.16,0.3,2.75\r\nvit_betwixt_patch32_clip_224,224,15898.23,64.386,1024,61.41,3.09,4.17\r\nresnetv2_18,224,14951.21,68.467,1024,11.69,1.82,2.48\r\nmobilenetv3_rw,224,14879.18,68.797,1024,5.48,0.23,4.41\r\nconvnext_zepto_rms_ols,224,14813.49,69.103,1024,2.16,0.34,3.15\r\nrepvgg_a1,224,14679.98,69.73,1024,12.79,2.36,2.37\r\nhardcorenas_a,224,14589.12,70.165,1024,5.26,0.23,4.38\r\nmobilenetv3_large_100,224,14586.41,70.179,1024,5.48,0.23,4.41\r\npit_xs_224,224,14561.59,70.299,1024,10.62,1.4,7.71\r\npit_xs_distilled_224,224,14457.56,70.804,1024,11.0,1.41,7.76\r\nresnet14t,176,14333.87,71.416,1024,10.08,1.07,3.61\r\nese_vovnet19b_slim_dw,224,14329.81,71.436,1024,1.9,0.4,5.28\r\nnf_regnet_b0,192,14311.99,71.525,1024,8.76,0.37,3.15\r\nmnasnet_075,224,14285.7,71.656,1024,3.17,0.23,4.77\r\nghostnet_100,224,14181.4,72.159,1024,5.18,0.15,3.55\r\nregnetx_006,224,14041.47,72.904,1024,6.2,0.61,3.98\r\nrepghostnet_130,224,13809.49,74.128,1024,5.46,0.24,5.24\r\ntf_mobilenetv3_large_075,224,13775.21,74.312,1024,3.99,0.16,4.0\r\nmobilenetv1_100,224,13735.56,74.528,1024,4.23,0.58,5.04\r\nregnety_004,224,13729.83,74.559,1024,4.34,0.41,3.89\r\nresnet18,224,13726.29,74.577,1024,11.69,1.82,2.48\r\nhardcorenas_b,224,13492.56,75.87,1024,5.18,0.26,5.09\r\nmobilenetv1_100h,224,13425.71,76.249,1024,5.28,0.63,5.09\r\nhardcorenas_c,224,13371.08,76.56,1024,5.52,0.28,5.01\r\ntf_efficientnetv2_b0,192,13266.05,77.166,1024,7.14,0.54,3.51\r\nlevit_256,224,13220.48,77.432,1024,18.86,1.13,4.23\r\nlevit_conv_256,224,13158.97,77.79,1024,18.86,1.13,4.23\r\nmobilenet_edgetpu_v2_xs,224,13048.2,78.454,1024,4.46,0.7,4.8\r\ntinynet_b,188,12962.67,78.972,1024,3.73,0.21,4.44\r\nseresnet18,224,12882.74,79.463,1024,11.78,1.82,2.49\r\ntf_mobilenetv3_large_minimal_100,224,12879.58,79.481,1024,3.92,0.22,4.4\r\nconvnext_atto,224,12807.4,79.93,1024,3.7,0.55,3.81\r\ninception_next_atto,224,12781.19,80.094,1024,4.16,0.5,3.63\r\nedgenext_xx_small,256,12555.82,81.529,1024,1.33,0.26,3.33\r\nmnasnet_100,224,12523.79,81.741,1024,4.38,0.33,5.46\r\nhardcorenas_d,224,12459.65,82.162,1024,7.5,0.3,4.93\r\nlegacy_seresnet18,224,12343.17,82.936,1024,11.78,1.82,2.49\r\ntf_mobilenetv3_large_100,224,12319.35,83.097,1024,5.48,0.23,4.41\r\nsemnasnet_075,224,12261.12,83.491,1024,2.91,0.23,5.54\r\nmobilenetv2_075,224,12216.42,83.798,1024,2.64,0.22,5.86\r\nconvnext_atto_ols,224,12188.44,83.991,1024,3.7,0.58,4.11\r\nregnety_006,224,12176.78,84.07,1024,6.06,0.61,4.33\r\nvit_base_patch32_clip_224,224,12150.93,84.25,1024,88.22,4.41,5.01\r\nrepghostnet_150,224,11965.22,85.557,1024,6.55,0.31,6.0\r\nlevit_256d,224,11941.25,85.73,1024,26.16,1.39,4.93\r\nresnetv2_18d,224,11926.42,85.836,1024,11.71,2.06,3.29\r\nfasternet_t2,224,11762.49,87.033,1024,14.98,1.91,4.73\r\nstarnet_s2,224,11511.44,88.932,1024,3.68,0.55,4.73\r\nlevit_conv_256d,224,11464.7,89.291,1024,26.16,1.39,4.93\r\nmobilenetv3_large_100,256,11450.74,89.403,1024,5.48,0.29,5.75\r\nspnasnet_100,224,11392.69,89.858,1024,4.42,0.35,6.03\r\nrepvit_m0_9,224,11352.54,90.177,1024,5.07,0.82,6.17\r\nrepvit_m1,224,11338.73,90.287,1024,5.07,0.82,6.17\r\nrepvgg_b0,224,11242.84,91.054,1024,14.34,3.06,3.07\r\nmobileone_s1,224,11176.63,91.592,1024,4.76,0.83,6.27\r\ncs3darknet_focus_s,320,11161.42,91.721,1024,3.27,1.08,4.22\r\nresnet18d,224,11142.9,91.874,1024,11.71,2.06,3.29\r\nstarnet_s1,224,11073.61,92.449,1024,2.87,0.42,4.99\r\nghostnet_130,224,11040.29,92.727,1024,7.36,0.24,4.6\r\nconvnext_femto,224,11027.48,92.835,1024,5.22,0.79,4.57\r\ndla46_c,224,10979.15,93.244,1024,1.3,0.58,4.5\r\nvit_base_patch32_224,224,10936.95,93.605,1024,88.22,4.41,5.01\r\nhardcorenas_f,224,10936.02,93.612,1024,8.2,0.35,5.57\r\nconvnext_atto_rms,224,10926.43,93.694,1024,3.69,0.55,3.81\r\nmobilenetv1_100,256,10920.5,93.745,1024,4.23,0.76,6.59\r\nmobilenetv4_conv_medium,224,10907.56,93.856,1024,9.72,0.84,5.8\r\nsemnasnet_100,224,10904.65,93.879,1024,3.89,0.32,6.23\r\nmobilenetv2_100,224,10807.07,94.728,1024,3.5,0.31,6.68\r\nmobilenetv1_100h,256,10737.52,95.344,1024,5.28,0.82,6.65\r\nhardcorenas_e,224,10721.85,95.482,1024,8.07,0.35,5.65\r\nregnetx_008,224,10704.12,95.637,1024,7.26,0.81,5.15\r\nmobilenetv1_125,224,10702.25,95.658,1024,6.27,0.89,6.3\r\nese_vovnet19b_slim,224,10662.87,96.011,1024,3.17,1.69,3.52\r\nconvnext_femto_ols,224,10564.12,96.908,1024,5.23,0.82,4.87\r\ncrossvit_tiny_240,240,10564.05,96.904,1024,7.01,1.57,9.08\r\nmobilevit_xxs,256,10511.74,97.391,1024,1.27,0.42,8.34\r\nmobilenet_edgetpu_100,224,10492.81,97.566,1024,4.09,1.0,5.75\r\ntf_efficientnetv2_b0,224,10467.02,97.807,1024,7.14,0.73,4.77\r\ntinynet_a,192,10256.34,99.816,1024,6.19,0.35,5.41\r\nefficientnet_lite0,224,10236.84,100.007,1024,4.65,0.4,6.74\r\ntf_efficientnetv2_b1,192,10227.98,100.094,1024,8.14,0.76,4.59\r\nxcit_nano_12_p16_224,224,10194.28,100.424,1024,3.05,0.56,4.17\r\nhgnetv2_b0,224,10173.41,100.631,1024,6.0,0.33,2.12\r\nfbnetc_100,224,10059.48,101.771,1024,5.57,0.4,6.51\r\nregnety_008,224,9982.16,102.559,1024,6.26,0.81,5.25\r\ncrossvit_9_240,240,9943.46,102.958,1024,8.55,1.85,9.52\r\nvit_small_patch32_384,384,9875.39,103.667,1024,22.92,3.45,8.25\r\nedgenext_xx_small,288,9839.11,104.047,1024,1.33,0.33,4.21\r\nvit_base_patch32_clip_quickgelu_224,224,9834.05,104.105,1024,87.85,4.41,5.01\r\nregnety_008_tv,224,9646.11,106.133,1024,6.43,0.84,5.42\r\nrepvit_m1_0,224,9602.7,106.613,1024,6.81,1.11,7.19\r\nresnetblur18,224,9558.56,107.106,1024,11.69,2.34,3.39\r\nvit_base_patch32_clip_256,256,9350.67,109.487,1024,87.86,5.76,6.65\r\npvt_v2_b0,224,9298.73,110.099,1024,3.67,0.57,7.99\r\nvisformer_tiny,224,9276.68,110.36,1024,10.32,1.27,5.72\r\nresnetv2_18,288,9174.61,111.588,1024,11.69,3.0,4.11\r\ncrossvit_9_dagger_240,240,9173.57,111.601,1024,8.78,1.99,9.97\r\ndla46x_c,224,9169.79,111.647,1024,1.07,0.54,5.66\r\nmobilenet_edgetpu_v2_s,224,9152.23,111.862,1024,5.99,1.21,6.6\r\npit_s_224,224,9150.65,111.881,1024,23.46,2.88,11.56\r\npit_s_distilled_224,224,9136.19,112.058,1024,24.04,2.9,11.64\r\nmobilenetv4_conv_medium,256,9133.01,112.098,1024,9.72,1.1,7.58\r\nresnet14t,224,9065.61,112.93,1024,10.08,1.69,5.8\r\nmobileone_s2,224,8979.69,114.009,1024,7.81,1.3,7.56\r\nmobilenetv4_hybrid_medium_075,224,8974.46,114.077,1024,7.31,0.66,5.65\r\nrepghostnet_200,224,8900.71,115.023,1024,9.77,0.53,7.96\r\nrepvit_m1_1,224,8878.97,115.305,1024,8.24,1.34,7.82\r\nrepvit_m2,224,8876.04,115.343,1024,8.24,1.34,7.82\r\nmobilevitv2_050,256,8872.07,115.396,1024,1.37,0.48,8.04\r\ncs3darknet_focus_m,256,8833.29,115.902,1024,9.3,1.98,4.89\r\ntf_efficientnet_lite0,224,8813.06,116.167,1024,4.65,0.4,6.74\r\nfbnetv3_b,224,8797.7,116.37,1024,8.6,0.42,6.97\r\nefficientnet_b0,224,8794.71,116.41,1024,5.29,0.4,6.75\r\nresnetv2_34,224,8775.33,116.666,1024,21.8,3.67,3.74\r\nregnetz_005,224,8766.98,116.778,1024,7.12,0.52,5.86\r\nrexnet_100,224,8757.28,116.908,1024,4.8,0.41,7.44\r\nmnasnet_140,224,8753.65,116.956,1024,7.12,0.6,7.71\r\nrepvgg_a2,224,8700.45,117.67,1024,25.5,5.12,3.13\r\ndla60x_c,224,8678.6,117.967,1024,1.32,0.59,6.01\r\nefficientvit_b1,224,8602.48,119.011,1024,9.1,0.53,7.25\r\nconvnext_atto_rms,256,8561.91,119.576,1024,3.69,0.71,4.98\r\nconvnext_pico,224,8550.37,119.737,1024,9.05,1.37,6.1\r\nefficientnet_b1_pruned,240,8536.3,119.934,1024,6.33,0.4,6.21\r\nrexnetr_100,224,8506.38,120.356,1024,4.88,0.43,7.72\r\nmobilenetv1_125,256,8474.01,120.817,1024,6.27,1.16,8.23\r\nresnet18,288,8361.66,122.441,1024,11.69,3.01,4.11\r\nresnet34,224,8315.61,123.118,1024,21.8,3.67,3.74\r\nvit_tiny_r_s16_p8_384,384,8308.02,123.231,1024,6.36,1.34,6.49\r\nresnet50,160,8305.45,123.268,1024,25.56,2.1,5.67\r\nese_vovnet19b_dw,224,8302.61,123.31,1024,6.54,1.34,8.25\r\nselecsls42,224,8292.22,123.463,1024,30.35,2.94,4.62\r\nmobilenetv2_110d,224,8258.5,123.97,1024,4.52,0.45,8.71\r\ncs3darknet_m,256,8246.68,124.148,1024,9.31,2.08,5.28\r\nselecsls42b,224,8229.46,124.405,1024,32.46,2.98,4.62\r\nvit_small_patch16_224,224,8214.74,124.63,1024,22.05,4.61,11.95\r\nconvnext_pico_ols,224,8211.99,124.672,1024,9.06,1.43,6.5\r\ndeit_small_patch16_224,224,8206.39,124.757,1024,22.05,4.61,11.95\r\nstarnet_s3,224,8192.2,124.974,1024,5.75,0.76,6.66\r\ntf_efficientnetv2_b2,208,8167.2,125.356,1024,10.1,1.06,6.0\r\ndeit_small_distilled_patch16_224,224,8137.76,125.809,1024,22.44,4.63,12.02\r\nlevit_384,224,8114.78,126.166,1024,39.07,2.35,6.26\r\nfastvit_t8,256,8088.08,126.578,1024,4.0,0.69,6.59\r\nnf_regnet_b0,256,8057.64,127.06,1024,8.76,0.64,5.58\r\nghostnetv3_100,224,7965.58,128.529,1024,6.15,0.17,4.55\r\nseresnet18,288,7850.96,130.406,1024,11.78,3.01,4.11\r\nresnet50d,160,7823.71,130.861,1024,25.58,2.22,6.08\r\nlevit_conv_384,224,7809.48,131.099,1024,39.07,2.35,6.26\r\nedgenext_x_small,256,7784.64,131.512,1024,2.34,0.54,5.93\r\nskresnet18,224,7776.19,131.66,1024,11.96,1.82,3.24\r\nhrnet_w18_small,224,7770.75,131.751,1024,13.19,1.61,5.72\r\nconvnext_atto,288,7754.57,132.028,1024,3.7,0.91,6.3\r\ntf_efficientnet_b0,224,7726.91,132.5,1024,5.29,0.4,6.75\r\nseresnet34,224,7726.16,132.513,1024,21.96,3.67,3.74\r\ngernet_m,224,7690.21,133.132,1024,21.14,3.02,5.24\r\nefficientnet_b0_gn,224,7667.91,133.52,1024,5.29,0.42,6.75\r\nvit_base_patch32_siglip_gap_256,256,7652.76,133.785,1024,87.47,5.67,6.54\r\nsemnasnet_140,224,7635.09,134.09,1024,6.11,0.6,8.87\r\nghostnetv2_100,224,7627.44,134.228,1024,6.16,0.18,4.55\r\nresnetv2_34d,224,7616.6,134.42,1024,21.82,3.91,4.54\r\nvit_pwee_patch16_reg1_gap_256,256,7571.28,135.225,1024,15.25,4.37,15.87\r\nvit_base_patch32_siglip_256,256,7557.1,135.479,1024,94.55,5.75,6.64\r\nhgnetv2_b1,224,7527.9,136.004,1024,6.34,0.49,2.73\r\nmambaout_femto,224,7487.3,136.742,1024,7.3,1.16,8.34\r\nswiftformer_xs,224,7486.39,136.757,1024,3.48,0.61,6.45\r\nmobilenetv2_140,224,7462.69,137.192,1024,6.11,0.6,9.57\r\nmixer_b32_224,224,7398.44,138.384,1024,60.29,3.24,6.29\r\nconvnext_atto_ols,288,7377.73,138.773,1024,3.7,0.96,6.8\r\nlegacy_seresnet34,224,7354.5,139.211,1024,21.96,3.67,3.74\r\nmobileone_s3,224,7352.28,139.25,1024,10.08,1.9,9.13\r\nmobilenetv4_hybrid_medium,224,7343.12,139.426,1024,11.07,0.98,6.84\r\nfbnetv3_d,224,7330.36,139.669,1024,10.31,0.52,8.5\r\nresnet34d,224,7277.06,140.693,1024,21.82,3.91,4.54\r\nresnetv2_18d,288,7274.76,140.736,1024,11.71,3.4,5.43\r\nmobilenet_edgetpu_v2_m,224,7216.05,141.882,1024,8.46,1.85,8.15\r\ndeit3_small_patch16_224,224,7214.24,141.918,1024,22.06,4.61,11.95\r\nvit_wee_patch16_reg1_gap_256,256,7193.77,142.321,1024,13.42,3.83,13.9\r\nseresnet50,160,7149.07,143.211,1024,28.09,2.1,5.69\r\nmixnet_s,224,7123.21,143.731,1024,4.13,0.25,6.25\r\nflexivit_small,240,7110.07,143.998,1024,22.06,5.35,14.18\r\ndla34,224,7084.96,144.508,1024,15.74,3.07,5.02\r\ntf_efficientnetv2_b1,240,7054.63,145.129,1024,8.14,1.21,7.34\r\nefficientnet_b0,256,7035.98,145.514,1024,5.29,0.52,8.81\r\nefficientnet_lite1,240,7015.45,145.94,1024,5.42,0.62,10.14\r\nselecsls60b,224,7005.75,146.138,1024,32.77,3.63,5.52\r\nselecsls60,224,6998.0,146.301,1024,30.67,3.59,5.52\r\ntiny_vit_5m_224,224,6975.9,146.766,1024,12.08,1.27,11.25\r\nfbnetv3_b,256,6970.51,146.88,1024,8.6,0.55,9.1\r\ncs3darknet_focus_m,288,6901.62,148.348,1024,9.3,2.51,6.19\r\neva02_tiny_patch14_224,224,6891.46,148.566,1024,5.5,1.7,9.14\r\necaresnet50t,160,6878.5,148.842,1024,25.57,2.21,6.04\r\nmixer_s16_224,224,6875.09,148.921,1024,18.53,3.79,5.97\r\nefficientvit_b1,256,6868.68,149.058,1024,9.1,0.69,9.46\r\nresmlp_12_224,224,6855.12,149.354,1024,15.35,3.01,5.5\r\nregnetx_016,224,6830.89,149.883,1024,9.19,1.62,7.93\r\nresnet50,176,6828.7,149.931,1024,25.56,2.62,6.92\r\nresnet26,224,6826.51,149.98,1024,16.0,2.36,7.35\r\nresnet18d,288,6755.65,151.554,1024,11.71,3.41,5.43\r\nmobilenetv4_conv_blur_medium,224,6752.65,151.621,1024,9.72,1.22,8.58\r\nresnetrs50,160,6743.88,151.817,1024,35.69,2.29,6.2\r\nresnext50_32x4d,160,6733.61,152.05,1024,25.03,2.17,7.35\r\nefficientnet_blur_b0,224,6724.65,152.252,1024,5.29,0.43,8.72\r\nconvnext_femto,288,6689.16,153.06,1024,5.22,1.3,7.56\r\nconvnextv2_atto,224,6645.89,154.057,1024,3.71,0.55,3.81\r\nrexnetr_130,224,6642.14,154.144,1024,7.61,0.68,9.81\r\nrexnet_130,224,6641.34,154.162,1024,7.56,0.68,9.71\r\nresnetaa34d,224,6637.83,154.244,1024,21.82,4.43,5.07\r\nefficientnet_es,224,6567.44,155.897,1024,5.44,1.81,8.73\r\nefficientnet_es_pruned,224,6557.94,156.123,1024,5.44,1.81,8.73\r\nefficientnet_b0_g16_evos,224,6519.03,157.052,1024,8.11,1.01,7.42\r\nxcit_tiny_12_p16_224,224,6503.54,157.429,1024,6.72,1.24,6.29\r\ntf_mixnet_s,224,6475.36,158.109,1024,4.13,0.25,6.25\r\nmambaout_kobe,224,6461.8,158.446,1024,9.14,1.52,10.0\r\ncs3darknet_m,288,6445.43,158.848,1024,9.31,2.63,6.69\r\nfasternet_s,224,6413.07,159.65,1024,31.18,4.56,7.93\r\nconvnext_femto_ols,288,6404.65,159.86,1024,5.23,1.35,8.06\r\nefficientnet_b0_g8_gn,224,6380.32,160.469,1024,6.56,0.66,6.75\r\nvit_medium_patch16_clip_224,224,6367.55,160.792,1024,38.59,8.0,15.93\r\ngmixer_12_224,224,6296.57,162.604,1024,12.7,2.67,7.26\r\nmobilenet_edgetpu_v2_l,224,6274.8,163.169,1024,10.92,2.55,9.05\r\nmobilenetv4_conv_aa_medium,256,6249.76,163.823,1024,9.72,1.58,10.3\r\nconvnext_nano,224,6224.5,164.488,1024,15.59,2.46,8.37\r\ntf_efficientnet_lite1,240,6214.43,164.753,1024,5.42,0.62,10.14\r\necaresnet50d_pruned,224,6209.52,164.884,1024,19.94,2.53,6.43\r\nresnet26d,224,6184.65,165.548,1024,16.01,2.6,8.15\r\nefficientnet_b1,224,6181.86,165.622,1024,7.79,0.59,9.36\r\ntf_efficientnet_es,224,6181.76,165.625,1024,5.44,1.81,8.73\r\nswiftformer_s,224,6167.48,166.008,1024,6.09,0.99,7.81\r\nhgnetv2_b0,288,6158.77,166.243,1024,6.0,0.54,3.51\r\nhgnetv2_b4,224,6126.27,167.125,1024,19.8,2.75,6.7\r\nvit_base_patch32_plus_256,256,6123.83,167.192,1024,119.48,7.79,7.76\r\nedgenext_x_small,288,6118.95,167.32,1024,2.34,0.68,7.5\r\nrepvit_m3,224,6108.74,167.604,1024,10.12,1.86,11.43\r\nghostnetv3_130,224,6100.26,167.838,1024,8.95,0.28,5.9\r\nregnety_016,224,6027.68,169.859,1024,11.2,1.63,8.04\r\nmobilenetv2_120d,224,6024.37,169.951,1024,5.83,0.69,11.97\r\nvit_tiny_patch16_384,384,5997.92,170.703,1024,5.79,4.7,25.39\r\nnf_regnet_b1,256,5984.55,171.083,1024,10.22,0.82,7.27\r\nmobilenetv4_conv_medium,320,5970.31,171.492,1024,9.72,1.71,11.84\r\nnf_regnet_b2,240,5937.65,172.435,1024,14.31,0.97,7.23\r\nvit_relpos_small_patch16_224,224,5934.22,172.534,1024,21.98,4.59,13.05\r\nvit_srelpos_small_patch16_224,224,5916.91,173.04,1024,21.97,4.59,12.16\r\nghostnetv2_130,224,5913.39,173.142,1024,8.96,0.28,5.9\r\ndarknet17,256,5912.74,173.162,1024,14.3,3.26,7.18\r\nnf_resnet26,224,5887.09,173.916,1024,16.0,2.41,7.35\r\nmobilevitv2_075,256,5885.44,173.965,1024,2.87,1.05,12.06\r\ntiny_vit_11m_224,224,5883.48,174.021,1024,20.35,2.03,13.49\r\nfbnetv3_d,256,5861.4,174.679,1024,10.31,0.68,11.1\r\nmobilenetv4_hybrid_medium,256,5853.2,174.923,1024,11.07,1.29,9.01\r\nresnetblur18,288,5826.83,175.715,1024,11.69,3.87,5.6\r\nefficientnet_b2_pruned,260,5817.85,175.986,1024,8.31,0.73,9.13\r\nrexnetr_150,224,5815.52,176.057,1024,9.78,0.89,11.13\r\npoolformer_s12,224,5799.83,176.533,1024,11.92,1.82,5.53\r\nefficientnet_cc_b0_4e,224,5793.35,176.731,1024,13.31,0.41,9.42\r\nefficientnet_cc_b0_8e,224,5784.11,177.014,1024,24.01,0.42,9.42\r\nmobilenet_edgetpu_v2_m,256,5779.2,177.163,1024,8.46,2.42,10.65\r\ngmlp_ti16_224,224,5771.97,177.386,1024,5.87,1.34,7.55\r\nstarnet_s4,224,5763.11,177.659,1024,7.48,1.05,9.56\r\nrexnet_150,224,5755.56,177.89,1024,9.73,0.9,11.21\r\nvit_relpos_small_patch16_rpn_224,224,5731.66,178.631,1024,21.97,4.59,13.05\r\nvit_dwee_patch16_reg1_gap_256,256,5700.32,179.615,1024,13.43,3.83,17.6\r\nconvnext_nano_ols,224,5638.47,181.586,1024,15.65,2.65,9.38\r\nconvnextv2_femto,224,5637.5,181.617,1024,5.23,0.79,4.57\r\nefficientformer_l1,224,5636.31,181.655,1024,12.29,1.3,5.53\r\nedgenext_small,256,5621.06,182.144,1024,5.59,1.26,9.07\r\nrepvgg_b1g4,224,5601.29,182.79,1024,36.13,7.31,5.32\r\nresnext50_32x4d,176,5598.76,182.874,1024,25.03,2.71,8.97\r\nfastvit_t12,256,5516.68,185.595,1024,7.51,1.39,9.57\r\nefficientnet_lite2,260,5504.54,186.004,1024,6.09,0.89,12.9\r\nmobilenetv4_conv_blur_medium,256,5489.72,186.508,1024,9.72,1.59,11.2\r\nregnetz_005,288,5477.32,186.928,1024,7.12,0.86,9.68\r\ndarknet21,256,5470.39,187.166,1024,20.86,3.93,7.47\r\nresnet101,160,5462.45,187.438,1024,44.55,4.0,8.28\r\nefficientnet_b1,240,5460.77,187.496,1024,7.79,0.71,10.88\r\nresnest14d,224,5410.44,189.24,1024,10.61,2.76,7.33\r\nrepvit_m1_5,224,5370.76,190.638,1024,14.05,2.27,12.84\r\nefficientvit_b1,288,5341.29,191.689,1024,9.1,0.87,11.96\r\nresnetv2_34,288,5328.51,192.151,1024,21.8,6.07,6.18\r\ntf_efficientnet_cc_b0_4e,224,5320.14,192.452,1024,13.31,0.41,9.42\r\nhgnet_tiny,224,5285.92,193.698,1024,14.74,4.54,6.36\r\nmobilenetv3_large_150d,256,5283.03,193.805,1024,14.62,1.03,12.35\r\nsedarknet21,256,5281.43,193.859,1024,20.95,3.93,7.47\r\nvit_relpos_base_patch32_plus_rpn_256,256,5253.06,194.907,1024,119.42,7.68,8.01\r\ntf_efficientnetv2_b2,260,5217.44,196.241,1024,10.1,1.72,9.84\r\ngernet_l,256,5216.84,196.263,1024,31.08,4.57,8.0\r\nmobilenetv4_conv_large,256,5206.85,196.64,1024,32.59,2.86,12.14\r\ntf_efficientnet_cc_b0_8e,224,5197.64,196.989,1024,24.01,0.42,9.42\r\nconvnext_pico,288,5187.9,197.359,1024,9.05,2.27,10.08\r\nregnetz_b16,224,5186.38,197.416,1024,9.72,1.45,9.95\r\nmobileone_s4,224,5169.92,198.045,1024,14.84,2.98,11.81\r\ncs3darknet_focus_l,256,5153.59,198.672,1024,21.15,4.66,8.03\r\ntf_efficientnetv2_b3,240,5135.47,199.374,1024,14.36,1.93,9.95\r\nvit_dpwee_patch16_reg1_gap_256,256,5131.9,199.513,1024,15.25,4.37,19.05\r\nefficientnetv2_rw_t,224,5123.45,199.84,1024,13.65,1.93,9.94\r\nvit_little_patch16_reg1_gap_256,256,5122.21,199.889,1024,22.52,6.27,18.06\r\npvt_v2_b1,224,5117.9,200.058,1024,14.01,2.12,15.39\r\nhgnetv2_b2,224,5111.38,200.313,1024,11.22,1.15,4.12\r\nresnext26ts,256,5105.54,200.543,1024,10.3,2.43,10.52\r\nvit_little_patch16_reg4_gap_256,256,5068.87,201.993,1024,22.52,6.35,18.33\r\necaresnet101d_pruned,224,5068.48,202.0,1024,24.88,3.48,7.69\r\nedgenext_small_rw,256,5062.35,202.25,1024,7.83,1.58,9.51\r\nresnet34,288,5051.73,202.679,1024,21.8,6.07,6.18\r\nmixnet_m,224,5033.67,203.406,1024,5.01,0.36,8.19\r\ncrossvit_small_240,240,5028.96,203.597,1024,26.86,5.63,18.17\r\nfastvit_s12,256,5006.01,204.53,1024,9.43,1.8,10.82\r\ngc_efficientnetv2_rw_t,224,5002.27,204.682,1024,13.68,1.94,9.97\r\nlegacy_seresnext26_32x4d,224,5001.51,204.715,1024,16.79,2.49,9.39\r\nconvnext_pico_ols,288,4977.14,205.717,1024,9.06,2.37,10.74\r\ndpn48b,224,4972.63,205.898,1024,9.13,1.69,8.92\r\ntf_efficientnet_b1,240,4964.34,206.247,1024,7.79,0.71,10.88\r\nmobilevit_xs,256,4955.23,206.627,1024,2.32,1.05,16.33\r\nefficientnet_b1,256,4938.78,207.309,1024,7.79,0.77,12.22\r\nseresnext26ts,256,4937.42,207.372,1024,10.39,2.43,10.52\r\nnf_ecaresnet26,224,4927.37,207.795,1024,16.0,2.41,7.36\r\nnf_seresnet26,224,4922.91,207.983,1024,17.4,2.41,7.36\r\nghostnetv3_160,224,4922.02,208.021,1024,12.38,0.41,7.23\r\neca_resnext26ts,256,4921.09,208.058,1024,10.3,2.43,10.52\r\nresnet26t,256,4914.8,208.324,1024,16.01,3.35,10.52\r\ntf_efficientnet_lite2,260,4901.17,208.904,1024,6.09,0.89,12.9\r\ngcresnext26ts,256,4880.79,209.778,1024,10.48,2.43,10.53\r\nfastvit_sa12,256,4879.37,209.839,1024,11.55,1.94,11.24\r\ndeit3_medium_patch16_224,224,4876.31,209.971,1024,38.85,8.0,15.93\r\ncs3darknet_l,256,4871.75,210.167,1024,21.16,4.86,8.55\r\nconvnext_tiny,224,4814.88,212.65,1024,28.59,4.47,13.44\r\necaresnetlight,224,4807.46,212.976,1024,30.16,4.11,8.42\r\ntf_mixnet_m,224,4800.47,213.288,1024,5.01,0.36,8.19\r\nghostnetv2_160,224,4794.43,213.556,1024,12.39,0.42,7.23\r\nlevit_512,224,4765.53,214.853,1024,95.08,5.62,10.22\r\nswiftformer_l1,224,4753.92,215.376,1024,12.06,1.6,10.07\r\nseresnext26t_32x4d,224,4745.85,215.744,1024,16.81,2.7,10.09\r\necaresnext50t_32x4d,224,4731.98,216.374,1024,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,4725.1,216.691,1024,15.41,2.7,10.09\r\nese_vovnet19b_dw,288,4721.08,216.875,1024,6.54,2.22,13.63\r\nvit_small_resnet26d_224,224,4709.37,217.415,1024,63.61,5.07,11.12\r\nseresnet34,288,4709.22,217.422,1024,21.96,6.07,6.18\r\ncrossvit_15_240,240,4706.59,217.544,1024,27.53,5.81,19.77\r\ntresnet_m,224,4703.02,217.706,1024,31.39,5.75,7.31\r\nefficientnet_b2,256,4699.87,217.855,1024,9.11,0.89,12.81\r\nseresnext26d_32x4d,224,4695.18,218.072,1024,16.81,2.73,10.19\r\nnf_regnet_b1,288,4664.95,219.486,1024,10.22,1.02,9.2\r\nvit_small_r26_s32_224,224,4662.91,219.581,1024,36.43,3.56,9.85\r\ncoatnext_nano_rw_224,224,4661.95,219.626,1024,14.7,2.47,12.8\r\nvit_relpos_medium_patch16_cls_224,224,4646.03,220.379,1024,38.76,8.03,18.24\r\nresnetv2_34d,288,4623.75,221.442,1024,21.82,6.46,7.51\r\nefficientnet_b3_pruned,300,4616.16,221.805,1024,9.86,1.04,11.86\r\nlevit_conv_512,224,4578.3,223.639,1024,95.08,5.62,10.22\r\nhgnetv2_b1,288,4577.6,223.675,1024,6.34,0.82,4.51\r\nselecsls84,224,4576.28,223.734,1024,50.95,5.9,7.57\r\ncs3sedarknet_l,256,4553.42,224.857,1024,21.91,4.86,8.56\r\nresnetv2_50,224,4543.78,225.339,1024,25.55,4.11,11.11\r\ncoatnet_pico_rw_224,224,4521.26,226.461,1024,10.85,2.05,14.62\r\nmambaout_femto,288,4517.65,226.642,1024,7.3,1.91,13.79\r\nresnet101,176,4517.44,226.653,1024,44.55,4.92,10.08\r\nnf_regnet_b2,272,4514.25,226.813,1024,14.31,1.22,9.27\r\nlevit_512d,224,4513.34,226.859,1024,92.39,5.84,11.3\r\nskresnet34,224,4505.83,227.237,1024,22.28,3.67,5.13\r\nvgg11,224,4503.99,227.328,1024,132.86,7.61,7.44\r\ncoat_lite_tiny,224,4476.77,228.712,1024,5.72,1.6,11.65\r\nwide_resnet50_2,176,4435.27,230.853,1024,68.88,7.29,8.97\r\nrepvgg_b1,224,4430.21,231.113,1024,51.83,11.82,5.32\r\nmobilevitv2_100,256,4426.35,231.319,1024,4.9,1.84,16.08\r\nresnet34d,288,4421.66,231.564,1024,21.82,6.47,7.51\r\nregnetx_032,224,4409.8,232.182,1024,15.3,3.2,11.37\r\ncrossvit_15_dagger_240,240,4402.26,232.583,1024,28.21,6.13,20.43\r\nconvit_tiny,224,4379.04,233.817,1024,5.71,1.26,7.94\r\neca_botnext26ts_256,256,4370.04,234.297,1024,10.59,2.46,11.6\r\necaresnet26t,256,4363.89,234.629,1024,16.01,3.35,10.53\r\nresnet32ts,256,4340.01,235.92,1024,17.96,4.63,11.58\r\nvovnet39a,224,4332.01,236.356,1024,22.6,7.09,6.73\r\nresnetv2_50t,224,4323.27,236.833,1024,25.57,4.32,11.82\r\neca_halonext26ts,256,4309.34,237.596,1024,10.76,2.44,11.46\r\nresnet50,224,4302.84,237.959,1024,25.56,4.11,11.11\r\ncspresnet50,256,4298.14,238.219,1024,21.62,4.54,11.5\r\nconvnextv2_pico,224,4294.74,238.407,1024,9.07,1.37,6.1\r\ndla60,224,4293.32,238.486,1024,22.04,4.26,10.16\r\nresnet33ts,256,4282.81,239.072,1024,19.68,4.76,11.66\r\nresnetv2_50d,224,4276.86,239.405,1024,25.57,4.35,11.92\r\nrexnetr_200,224,4250.15,240.909,1024,16.52,1.59,15.11\r\nrexnet_200,224,4246.94,241.09,1024,16.37,1.56,14.91\r\nhrnet_w18_small_v2,224,4242.4,241.348,1024,15.6,2.62,9.65\r\ncoat_lite_mini,224,4225.28,242.324,1024,11.01,2.0,12.25\r\nfbnetv3_g,240,4223.76,242.413,1024,16.62,1.28,14.87\r\nlevit_conv_512d,224,4220.02,242.628,1024,92.39,5.84,11.3\r\nvit_small_patch16_rope_mixed_224,224,4196.99,243.96,1024,21.99,4.61,12.85\r\nvit_small_patch16_rope_224,224,4196.46,243.988,1024,21.98,4.61,11.95\r\nbotnet26t_256,256,4196.31,244.0,1024,12.49,3.32,11.98\r\nregnety_032,224,4190.43,244.341,1024,19.44,3.2,11.26\r\nresnet26,288,4185.62,244.623,1024,16.0,3.9,12.15\r\nvit_small_patch16_rope_ape_224,224,4169.06,245.593,1024,22.06,4.61,11.95\r\ncoatnet_nano_cc_224,224,4166.28,245.758,1024,13.76,2.24,15.02\r\nvit_small_patch16_rope_mixed_ape_224,224,4163.69,245.91,1024,22.06,4.61,12.85\r\nefficientvit_b2,224,4160.62,246.092,1024,24.33,1.6,14.62\r\nvisformer_small,224,4155.65,246.385,1024,40.22,4.88,11.43\r\nhgnetv2_b3,224,4152.43,246.578,1024,16.29,1.78,5.07\r\nmobilenetv4_hybrid_large_075,256,4149.31,246.764,1024,22.75,2.06,11.64\r\nvit_relpos_medium_patch16_224,224,4148.14,246.833,1024,38.75,7.97,17.02\r\ndpn68,224,4143.27,247.121,1024,12.61,2.35,10.47\r\nese_vovnet39b,224,4141.18,247.249,1024,24.57,7.09,6.74\r\nvit_dlittle_patch16_reg1_gap_256,256,4136.27,247.541,1024,22.52,6.27,22.69\r\neca_vovnet39b,224,4132.97,247.734,1024,22.6,7.09,6.74\r\nseresnet33ts,256,4129.07,247.973,1024,19.78,4.76,11.66\r\nvit_base_patch32_clip_384,384,4129.06,247.975,1024,88.3,13.06,16.5\r\nhalonet26t,256,4117.95,248.644,1024,12.48,3.19,11.69\r\ncoatnet_nano_rw_224,224,4113.95,248.884,1024,15.14,2.41,15.41\r\nvit_srelpos_medium_patch16_224,224,4107.78,249.259,1024,38.74,7.96,16.21\r\nresnet50t,224,4098.26,249.839,1024,25.57,4.32,11.82\r\nmobilenetv4_conv_medium,384,4094.66,250.059,1024,9.72,2.46,17.05\r\neca_resnet33ts,256,4093.99,250.096,1024,19.68,4.76,11.66\r\ndpn68b,224,4088.83,250.406,1024,12.61,2.35,10.47\r\ngcresnet33ts,256,4076.5,251.152,1024,19.88,4.76,11.68\r\nresnet50d,224,4063.89,251.952,1024,25.58,4.35,11.92\r\nvit_base_resnet26d_224,224,4055.98,252.441,1024,101.4,6.97,13.16\r\ncs3darknet_focus_l,288,4040.09,253.436,1024,21.15,5.9,10.16\r\nconvnextv2_atto,288,4025.07,254.382,1024,3.71,0.91,6.3\r\nresnetaa34d,288,4024.49,254.419,1024,21.82,7.33,8.38\r\nresnetv2_50x1_bit,224,4018.86,254.776,1024,25.55,4.23,11.11\r\nresnet50c,224,4012.17,255.199,1024,25.58,4.35,11.92\r\nbat_resnext26ts,256,4005.7,255.612,1024,10.73,2.53,12.51\r\nefficientnet_em,240,4000.12,255.969,1024,6.9,3.04,14.34\r\ndavit_tiny,224,3990.13,192.45,768,28.36,4.54,18.89\r\nresnext26ts,288,3987.14,256.802,1024,10.3,3.07,13.31\r\nregnetv_040,224,3982.3,257.111,1024,20.64,4.0,12.29\r\nresnetaa50,224,3977.29,257.438,1024,25.56,5.15,11.64\r\nmambaout_tiny,224,3965.38,258.211,1024,26.55,4.49,16.68\r\ncspresnet50w,256,3960.95,258.5,1024,28.12,5.04,12.19\r\ncspresnet50d,256,3926.18,260.789,1024,21.64,4.86,12.55\r\ntf_efficientnet_b2,260,3912.14,261.725,1024,9.11,1.02,13.83\r\nvgg11_bn,224,3911.97,261.735,1024,132.87,7.62,7.44\r\nregnety_040,224,3910.51,261.833,1024,20.65,4.0,12.29\r\nefficientnet_b1,288,3908.45,261.972,1024,7.79,0.97,15.46\r\nconvnext_tiny_hnf,224,3904.45,262.241,1024,28.59,4.47,13.44\r\nmambaout_kobe,288,3903.88,262.274,1024,9.14,2.5,16.53\r\nvit_relpos_medium_patch16_rpn_224,224,3890.52,263.179,1024,38.73,7.97,17.02\r\nresnet152,160,3888.26,263.333,1024,60.19,5.9,11.51\r\nresnest26d,224,3883.09,263.684,1024,17.07,3.64,9.97\r\nresnetv2_50d_gn,224,3877.22,264.083,1024,25.57,4.38,11.92\r\neca_resnext26ts,288,3850.54,265.91,1024,10.3,3.07,13.32\r\nmobilevit_s,256,3846.91,266.163,1024,5.58,2.03,19.94\r\nresnet50_gn,224,3846.32,266.205,1024,25.56,4.14,11.11\r\necaresnet50d_pruned,288,3839.25,266.695,1024,19.94,4.19,10.61\r\ntf_efficientnet_em,240,3838.09,266.775,1024,6.9,3.04,14.34\r\ncs3darknet_l,288,3836.94,266.855,1024,21.16,6.16,10.83\r\nhgnetv2_b4,288,3836.12,266.912,1024,19.8,4.54,11.08\r\nregnetx_040,224,3834.39,267.031,1024,22.12,3.99,12.2\r\nlegacy_seresnet50,224,3832.4,267.17,1024,28.09,3.88,10.6\r\nmixnet_l,224,3827.44,267.517,1024,7.33,0.58,10.84\r\nseresnext26ts,288,3825.31,267.664,1024,10.39,3.07,13.32\r\ngcresnext26ts,288,3822.84,267.841,1024,10.48,3.07,13.33\r\nres2net50_48w_2s,224,3810.11,268.735,1024,25.29,4.18,11.72\r\ntwins_pcpvt_small,224,3802.78,269.25,1024,24.11,3.83,18.08\r\nvit_medium_patch16_gap_240,240,3800.19,269.437,1024,44.4,9.22,18.81\r\nhaloregnetz_b,224,3797.48,269.628,1024,11.68,1.97,11.94\r\ntiny_vit_21m_224,224,3790.67,270.11,1024,33.21,4.27,20.08\r\nrepvgg_b2g4,224,3783.78,270.602,1024,55.78,11.33,6.45\r\nvit_base_patch32_384,384,3782.32,270.711,1024,88.3,13.06,16.5\r\nresnet26d,288,3778.75,270.966,1024,16.01,4.29,13.48\r\nconvnext_nano,288,3777.84,271.025,1024,15.59,4.06,13.84\r\nresnetaa50d,224,3772.34,271.426,1024,25.58,5.39,12.44\r\nresnet50_clip_gap,224,3754.78,272.695,1024,23.53,5.39,12.44\r\nefficientnet_cc_b1_8e,240,3736.95,273.996,1024,39.72,0.75,15.44\r\nseresnet50,224,3719.34,275.293,1024,28.09,4.11,11.13\r\nefficientnet_b2,288,3716.39,275.512,1024,9.11,1.12,16.2\r\nvit_base_r26_s32_224,224,3701.4,276.628,1024,101.38,6.81,12.36\r\nresnetblur50,224,3696.78,276.974,1024,25.56,5.16,12.02\r\nvovnet57a,224,3694.01,277.181,1024,36.64,8.95,7.52\r\ngcvit_xxtiny,224,3662.78,279.544,1024,12.0,2.14,15.36\r\ntf_mixnet_l,224,3646.49,280.794,1024,7.33,0.58,10.84\r\nvit_medium_patch16_reg4_gap_256,256,3638.65,281.399,1024,38.88,10.76,22.6\r\ninception_v3,299,3636.22,281.586,1024,23.83,5.73,8.97\r\nvit_medium_patch16_reg1_gap_256,256,3635.87,281.614,1024,38.88,10.63,22.26\r\nvit_large_patch32_224,224,3634.61,281.711,1024,305.51,15.39,13.3\r\ndensenet121,224,3630.79,282.008,1024,7.98,2.87,6.9\r\nmobilenetv4_hybrid_medium,320,3613.05,283.393,1024,11.07,2.05,14.36\r\nedgenext_small,320,3587.29,285.422,1024,5.59,1.97,14.16\r\nresnet50_clip,224,3579.76,286.028,1024,38.32,6.14,12.98\r\nfastvit_mci0,256,3577.34,286.223,1024,11.36,2.39,14.72\r\necaresnet50t,224,3564.15,287.281,1024,25.57,4.32,11.83\r\ninception_next_tiny,224,3561.01,287.534,1024,28.06,4.19,11.98\r\ncs3sedarknet_l,288,3560.33,287.59,1024,21.91,6.16,10.83\r\nseresnet50t,224,3558.74,287.718,1024,28.1,4.32,11.83\r\ndla60x,224,3556.82,287.873,1024,17.35,3.54,13.8\r\ncoatnet_rmlp_nano_rw_224,224,3556.21,287.922,1024,15.15,2.62,20.34\r\necaresnet50d,224,3542.53,289.035,1024,25.58,4.35,11.93\r\nmobilevitv2_125,256,3520.94,290.808,1024,7.48,2.86,20.1\r\nresnetblur50d,224,3517.08,291.126,1024,25.58,5.4,12.82\r\nres2net50_26w_4s,224,3514.05,291.377,1024,25.7,4.28,12.61\r\nresnetrs50,224,3504.19,292.198,1024,35.69,4.48,12.14\r\ntf_efficientnet_cc_b1_8e,240,3498.66,292.66,1024,39.72,0.75,15.44\r\nnf_regnet_b3,288,3495.19,292.946,1024,18.59,1.67,11.84\r\nxcit_tiny_24_p16_224,224,3493.87,293.06,1024,12.12,2.34,11.82\r\nresnext50_32x4d,224,3493.48,293.094,1024,25.03,4.26,14.4\r\nresmlp_24_224,224,3489.08,293.463,1024,30.02,5.96,10.91\r\nres2net50_14w_8s,224,3465.47,295.462,1024,25.06,4.21,13.28\r\nhgnetv2_b5,224,3460.95,295.848,1024,39.57,6.56,11.19\r\nregnety_040_sgn,224,3460.76,295.865,1024,20.65,4.03,12.29\r\nvit_pe_core_tiny_patch16_384,384,3459.23,295.996,1024,6.14,4.74,25.62\r\nresnet50s,224,3454.21,296.427,1024,25.68,5.47,13.52\r\ndla60_res2net,224,3444.42,297.267,1024,20.85,4.15,12.34\r\ncoatnet_0_rw_224,224,3443.62,297.336,1024,27.44,4.43,18.73\r\nmobilenetv4_conv_large,320,3440.88,297.574,1024,32.59,4.47,18.97\r\nconvnext_nano_ols,288,3424.04,299.038,1024,15.65,4.38,15.5\r\nresnest50d_1s4x24d,224,3422.68,299.153,1024,25.68,4.43,13.57\r\nhgnet_small,224,3421.73,299.24,1024,24.36,8.53,8.79\r\nmobilenetv3_large_150d,320,3420.42,299.354,1024,14.62,1.61,19.29\r\nconvnextv2_femto,288,3420.27,299.366,1024,5.23,1.3,7.56\r\nnfnet_l0,224,3410.42,300.232,1024,35.07,4.36,10.47\r\neca_nfnet_l0,224,3404.79,300.723,1024,24.14,4.35,10.47\r\nmaxvit_pico_rw_256,256,3399.42,301.203,1024,7.46,1.83,22.3\r\nmaxvit_rmlp_pico_rw_256,256,3395.79,301.522,1024,7.52,1.85,24.86\r\ncrossvit_18_240,240,3394.14,301.673,1024,43.27,9.05,26.26\r\ncs3darknet_focus_x,256,3390.11,302.031,1024,35.02,8.03,10.69\r\ncspresnext50,256,3375.82,303.308,1024,20.57,4.05,15.86\r\nxcit_nano_12_p16_384,384,3363.55,304.415,1024,3.05,1.64,12.15\r\nvit_medium_patch16_gap_256,256,3359.14,304.816,1024,38.86,10.59,22.15\r\nvit_base_patch16_clip_224,224,3355.89,305.112,1024,86.57,17.58,23.9\r\nres2net50d,224,3353.85,305.295,1024,25.72,4.52,13.41\r\nedgenext_base,256,3352.88,305.381,1024,18.51,3.85,15.58\r\nnfnet_f0,192,3351.17,305.542,1024,71.49,7.21,10.16\r\nresnet32ts,288,3349.34,305.706,1024,17.96,5.86,14.65\r\nefficientvit_b2,256,3348.83,305.752,1024,24.33,2.09,19.03\r\nefficientnet_lite3,300,3345.72,306.039,1024,8.2,1.65,21.85\r\nresnext50d_32x4d,224,3330.35,307.451,1024,25.05,4.5,15.2\r\ntf_efficientnetv2_b3,300,3308.62,309.469,1024,14.36,3.04,15.74\r\nresnet33ts,288,3305.75,309.739,1024,19.68,6.02,14.75\r\nseresnetaa50d,224,3301.04,310.181,1024,28.11,5.4,12.46\r\ndla60_res2next,224,3295.5,310.703,1024,17.03,3.49,13.17\r\ndensenetblur121d,224,3281.35,312.042,1024,8.0,3.11,7.9\r\nres2next50,224,3277.77,312.38,1024,24.67,4.2,13.71\r\nlambda_resnet26rpt_256,256,3277.55,156.192,512,10.99,3.16,11.87\r\ndarknetaa53,256,3266.25,313.485,1024,36.02,7.97,12.39\r\nefficientvit_l1,224,3263.76,313.719,1024,52.65,5.27,15.85\r\nnextvit_small,224,3243.08,315.725,1024,31.74,5.8,18.44\r\nefficientnetv2_rw_t,288,3242.13,315.817,1024,13.65,3.19,16.42\r\ncs3darknet_x,256,3238.55,316.167,1024,35.05,8.38,11.35\r\ngcresnext50ts,256,3230.7,316.935,1024,15.67,3.75,15.46\r\nfocalnet_tiny_srf,224,3228.64,317.136,1024,28.43,4.42,16.32\r\neva02_small_patch14_224,224,3225.91,317.406,1024,21.62,6.14,18.28\r\nedgenext_small_rw,320,3224.89,317.5,1024,7.83,2.46,14.85\r\ngcresnet50t,256,3221.39,317.851,1024,25.9,5.42,14.67\r\nfasternet_m,224,3213.9,318.591,1024,53.52,8.74,15.34\r\neca_resnet33ts,288,3213.4,318.638,1024,19.68,6.02,14.76\r\nresnet152,176,3212.45,318.734,1024,60.19,7.22,13.99\r\ngmixer_24_224,224,3211.3,318.851,1024,24.72,5.28,14.45\r\nvit_small_patch16_dinov3_qkvb,256,3207.43,319.235,1024,21.6,6.26,17.03\r\nvit_small_patch16_dinov3,256,3202.28,319.748,1024,21.59,6.26,17.03\r\nseresnet33ts,288,3190.27,320.951,1024,19.78,6.02,14.76\r\ncrossvit_18_dagger_240,240,3184.8,321.503,1024,44.27,9.5,27.03\r\ngcresnet33ts,288,3183.83,321.596,1024,19.88,6.02,14.78\r\nregnetz_c16,256,3164.03,323.614,1024,13.46,2.51,16.57\r\npit_b_224,224,3157.42,324.291,1024,73.76,12.42,32.94\r\npit_b_distilled_224,224,3153.51,324.694,1024,74.79,12.5,33.07\r\npoolformerv2_s12,224,3152.47,324.801,1024,11.89,1.83,5.53\r\nregnetz_b16,288,3150.08,325.047,1024,9.72,2.39,16.43\r\ngc_efficientnetv2_rw_t,288,3137.05,326.397,1024,13.68,3.2,16.45\r\nresnet26t,320,3134.52,326.662,1024,16.01,5.24,16.44\r\nconvnextv2_nano,224,3126.57,327.491,1024,15.62,2.46,8.37\r\nhgnetv2_b2,288,3124.84,327.673,1024,11.22,1.89,6.8\r\nseresnext50_32x4d,224,3116.39,328.561,1024,27.56,4.26,14.42\r\nlegacy_seresnext50_32x4d,224,3113.73,328.843,1024,27.56,4.26,14.42\r\nresnetrs101,192,3111.76,329.051,1024,63.62,6.04,12.7\r\ntwins_svt_small,224,3109.65,329.27,1024,24.06,2.94,13.75\r\necaresnet101d_pruned,288,3100.67,330.216,1024,24.88,5.75,12.71\r\nregnetx_080,224,3093.01,331.045,1024,39.57,8.02,14.06\r\nrdnet_tiny,224,3090.45,331.319,1024,23.86,5.06,15.98\r\nvit_base_patch32_clip_448,448,3081.57,332.274,1024,88.34,17.93,23.9\r\nrepvit_m2_3,224,3062.83,334.307,1024,22.93,4.52,21.32\r\npvt_v2_b2,224,3045.88,336.167,1024,25.36,4.05,27.53\r\nrepvgg_b3g4,224,3044.04,336.369,1024,75.63,16.06,7.55\r\ncs3sedarknet_x,256,3043.16,336.467,1024,35.4,8.38,11.35\r\neva02_tiny_patch14_336,336,3040.65,336.746,1024,5.76,4.68,27.16\r\nresnet51q,256,3037.92,337.049,1024,35.7,6.38,16.55\r\nfbnetv3_g,288,3029.88,337.943,1024,16.62,1.77,21.09\r\nvit_base_resnet50d_224,224,3022.47,338.772,1024,110.97,8.73,16.92\r\ncoatnet_bn_0_rw_224,224,3020.34,339.01,1024,27.44,4.67,22.04\r\nrepvgg_b2,224,3019.64,339.087,1024,80.32,18.38,6.45\r\nxcit_small_12_p16_224,224,3005.84,340.646,1024,26.25,4.82,12.58\r\ncoatnet_rmlp_0_rw_224,224,3005.32,340.704,1024,27.45,4.72,24.89\r\ndarknet53,256,3003.97,340.858,1024,41.61,9.31,12.39\r\nvit_base_patch16_224_miil,224,2995.8,341.788,1024,94.4,17.59,23.91\r\nvit_base_patch16_224,224,2992.79,342.132,1024,86.57,17.58,23.9\r\ndeit_base_patch16_224,224,2989.27,342.534,1024,86.57,17.58,23.9\r\ndensenet169,224,2985.32,342.987,1024,14.15,3.4,7.3\r\nconvnext_small,224,2977.1,343.934,1024,50.22,8.71,21.56\r\necaresnetlight,288,2976.26,344.031,1024,30.16,6.79,13.91\r\npoolformer_s24,224,2975.33,344.14,1024,21.39,3.41,10.68\r\ndeit_base_distilled_patch16_224,224,2970.21,344.733,1024,87.34,17.68,24.05\r\nefficientnet_x_b3,288,2969.95,344.762,1024,13.3,3.91,15.6\r\nskresnet50,224,2969.83,344.777,1024,25.8,4.11,12.5\r\ntf_efficientnet_lite3,300,2968.14,344.972,1024,8.2,1.65,21.85\r\nnf_ecaresnet50,224,2966.16,345.203,1024,25.56,4.21,11.13\r\nfocalnet_tiny_lrf,224,2960.42,345.873,1024,28.65,4.49,17.76\r\nnf_seresnet50,224,2959.03,346.034,1024,28.09,4.21,11.13\r\nswiftformer_l3,224,2946.97,347.45,1024,28.49,4.01,15.77\r\nresnet50_mlp,256,2934.38,348.943,1024,26.65,7.05,16.25\r\nregnetx_064,224,2933.49,349.048,1024,26.21,6.49,16.37\r\nsehalonet33ts,256,2930.75,349.368,1024,13.69,3.55,14.7\r\nvit_base_patch16_xp_224,224,2924.82,350.083,1024,86.51,17.56,23.9\r\nhgnet_tiny,288,2920.28,350.627,1024,14.74,7.51,10.51\r\nconvnext_tiny,288,2919.6,350.708,1024,28.59,7.39,22.21\r\nmobilevitv2_150,256,2916.76,263.282,768,10.59,4.09,24.11\r\nseresnext26t_32x4d,288,2912.9,351.515,1024,16.81,4.46,16.68\r\nresnetv2_101,224,2894.56,353.743,1024,44.54,7.83,16.23\r\nseresnext26d_32x4d,288,2879.74,355.563,1024,16.81,4.51,16.85\r\nvitamin_small_224,224,2875.04,178.06,512,22.17,5.92,26.38\r\nnf_regnet_b3,320,2872.34,356.478,1024,18.59,2.05,14.61\r\ngmlp_s16_224,224,2867.89,357.033,1024,19.42,4.42,15.1\r\ngcvit_xtiny,224,2861.09,357.88,1024,19.98,2.93,20.26\r\nskresnet50d,224,2860.9,357.903,1024,25.82,4.36,13.31\r\ncs3sedarknet_xdw,256,2845.9,359.791,1024,21.6,5.97,17.18\r\nvgg13,224,2841.91,360.295,1024,133.05,11.31,12.25\r\nvit_base_mci_224,224,2839.95,360.546,1024,86.35,17.73,24.65\r\nnf_resnet50,256,2830.0,361.815,1024,25.56,5.46,14.52\r\nrexnetr_300,224,2827.76,271.569,768,34.81,3.39,22.16\r\ndla102,224,2827.12,362.181,1024,33.27,7.19,14.18\r\nwide_resnet50_2,224,2805.63,364.957,1024,68.88,11.43,14.4\r\necaresnet50t,256,2803.23,365.269,1024,25.57,5.64,15.45\r\nresnetv2_50,288,2802.87,365.316,1024,25.55,6.79,18.37\r\nefficientnet_b3,288,2802.08,365.419,1024,12.23,1.63,21.49\r\nhiera_tiny_224,224,2792.32,366.696,1024,27.91,4.91,17.13\r\nresnet61q,256,2789.57,367.031,1024,36.85,7.8,17.01\r\nresnetv2_101d,224,2785.36,367.613,1024,44.56,8.07,17.04\r\necaresnet26t,320,2782.05,368.051,1024,16.01,5.24,16.44\r\nresnet101,224,2781.19,368.164,1024,44.55,7.83,16.23\r\nres2net50_26w_6s,224,2773.77,369.148,1024,37.05,6.33,15.28\r\nmixnet_xl,224,2764.45,370.393,1024,11.9,0.93,14.57\r\nrexnet_300,224,2755.97,278.637,768,34.71,3.44,22.4\r\nswin_tiny_patch4_window7_224,224,2748.8,372.502,1024,28.29,4.51,17.06\r\ndm_nfnet_f0,192,2744.52,373.084,1024,71.49,7.21,10.16\r\ncs3edgenet_x,256,2742.78,373.32,1024,47.82,11.53,12.92\r\ncspdarknet53,256,2741.47,373.497,1024,27.64,6.57,16.81\r\ncsatv2,512,2740.91,373.574,1024,11.1,1.39,9.17\r\ncoatnet_0_224,224,2733.97,187.249,512,25.04,4.58,24.01\r\nhieradet_small,256,2729.59,281.338,768,34.73,8.51,27.76\r\nvit_small_plus_patch16_dinov3,256,2724.6,375.811,1024,28.68,8.11,21.84\r\ndeit3_base_patch16_224,224,2724.48,375.829,1024,86.59,17.58,23.9\r\nvit_small_plus_patch16_dinov3_qkvb,256,2717.21,376.833,1024,28.69,8.11,21.84\r\nbeitv2_base_patch16_224,224,2714.29,377.239,1024,86.53,17.58,23.9\r\nbeit_base_patch16_224,224,2712.53,377.484,1024,86.53,17.58,23.9\r\nwide_resnet101_2,176,2702.95,378.821,1024,126.89,14.31,13.18\r\ntwins_pcpvt_base,224,2701.27,379.052,1024,43.83,6.68,25.25\r\nresnet101d,224,2693.25,380.186,1024,44.57,8.08,17.04\r\nvit_base_patch16_clip_quickgelu_224,224,2691.95,380.368,1024,86.19,17.58,23.9\r\nese_vovnet57b,256,2690.16,380.621,1024,38.61,11.69,9.82\r\nbeit3_base_patch16_224,224,2684.33,381.45,1024,86.66,17.63,23.9\r\nmaxxvit_rmlp_nano_rw_256,256,2682.57,381.698,1024,16.78,4.37,26.05\r\nfastvit_sa24,256,2668.71,383.681,1024,21.5,3.77,20.35\r\nresnet101c,224,2664.26,384.323,1024,44.57,8.08,17.04\r\nvit_small_patch16_384,384,2649.82,386.418,1024,22.2,15.52,50.78\r\npvt_v2_b2_li,224,2648.35,386.629,1024,22.55,3.91,27.6\r\nvit_betwixt_patch16_reg1_gap_256,256,2642.53,387.483,1024,60.4,16.32,27.83\r\nrexnetr_200,288,2638.88,291.008,768,16.52,2.62,24.96\r\nmixer_b16_224,224,2635.16,388.568,1024,59.88,12.62,14.53\r\nvit_betwixt_patch16_reg4_gap_256,256,2633.09,388.873,1024,60.4,16.52,28.24\r\nresnest50d,224,2633.04,388.88,1024,27.48,5.4,14.36\r\nresnet50,288,2631.02,389.18,1024,25.56,6.8,18.37\r\nvit_base_patch16_siglip_gap_224,224,2621.67,390.567,1024,85.8,17.49,23.75\r\nvit_relpos_base_patch16_clsgap_224,224,2616.9,391.279,1024,86.43,17.6,25.12\r\nresnetv2_34d,384,2615.96,391.42,1024,21.82,11.49,13.35\r\nvit_relpos_base_patch16_cls_224,224,2611.91,392.028,1024,86.43,17.6,25.12\r\nefficientnetv2_s,288,2611.55,392.08,1024,21.46,4.75,20.13\r\nconvnextv2_pico,288,2607.32,392.717,1024,9.07,2.27,10.08\r\nefficientvit_b2,288,2607.06,392.756,1024,24.33,2.64,24.03\r\ntresnet_v2_l,224,2606.4,392.851,1024,46.17,8.85,16.34\r\nvit_base_patch16_siglip_224,224,2602.48,393.447,1024,92.88,17.73,24.06\r\nefficientformer_l3,224,2593.86,394.753,1024,31.41,3.93,12.01\r\nefficientvit_l2,224,2592.54,394.954,1024,63.71,6.97,19.58\r\nmaxvit_nano_rw_256,256,2592.54,296.209,768,15.45,4.46,30.28\r\nmaxvit_rmlp_nano_rw_256,256,2588.96,296.619,768,15.5,4.47,31.92\r\nxcit_nano_12_p8_224,224,2562.46,399.59,1024,3.05,2.16,15.71\r\nresnetv2_101x1_bit,224,2560.2,399.944,1024,44.54,8.04,16.23\r\nresnet101_clip_gap,224,2558.72,400.175,1024,42.52,9.11,17.56\r\ndarknetaa53,288,2557.44,400.376,1024,36.02,10.08,15.68\r\nresnetaa101d,224,2554.49,400.839,1024,44.57,9.12,17.56\r\nregnety_032,288,2548.52,401.778,1024,19.44,5.29,18.61\r\nhgnetv2_b3,288,2546.43,402.109,1024,16.29,2.94,8.38\r\ncs3se_edgenet_x,256,2535.68,403.813,1024,50.72,11.53,12.94\r\ncs3darknet_x,288,2530.08,404.706,1024,35.05,10.6,14.36\r\ngcresnet50t,288,2527.87,405.06,1024,25.9,6.86,18.57\r\nflexivit_base,240,2522.93,405.854,1024,86.59,20.29,28.36\r\nese_vovnet39b,288,2522.47,304.439,768,24.57,11.71,11.13\r\nmaxxvitv2_nano_rw_256,256,2506.46,408.521,1024,23.7,6.26,23.05\r\nresnet50t,288,2504.84,408.784,1024,25.57,7.14,19.53\r\ndavit_small,224,2501.2,307.028,768,49.75,8.8,30.49\r\ncait_xxs24_224,224,2497.68,409.955,1024,11.96,2.53,20.29\r\ndpn68b,288,2495.65,410.282,1024,12.61,3.89,17.3\r\nhrnet_w18_ssld,224,2495.06,410.385,1024,21.3,4.32,16.31\r\nregnety_080,224,2489.88,411.237,1024,39.18,8.0,17.97\r\nresnet50d,288,2486.87,411.74,1024,25.58,7.19,19.7\r\nnextvit_base,224,2486.31,411.83,1024,44.79,8.29,23.71\r\nhrnet_w18,224,2482.95,412.388,1024,21.3,4.32,16.31\r\ngcresnext50ts,288,2481.84,412.573,1024,15.67,4.75,19.57\r\nvit_small_patch16_36x1_224,224,2480.81,412.741,1024,64.67,13.71,35.69\r\nvit_base_patch16_rpn_224,224,2480.36,412.817,1024,86.54,17.49,23.75\r\nregnetv_064,224,2479.79,412.914,1024,30.58,6.39,16.41\r\nvit_base_patch16_gap_224,224,2478.25,413.17,1024,86.57,17.49,25.59\r\ntresnet_l,224,2476.57,413.446,1024,55.99,10.9,11.9\r\nmobilevitv2_175,256,2472.18,207.082,512,14.25,5.54,28.13\r\nefficientnetv2_rw_s,288,2465.35,415.332,1024,23.94,4.91,21.41\r\nswin_s3_tiny_224,224,2461.36,416.006,1024,28.33,4.64,19.13\r\nresnet101_clip,224,2460.72,416.112,1024,56.26,9.81,18.08\r\nregnety_064,224,2449.06,418.088,1024,30.58,6.39,16.41\r\nresnetblur101d,224,2448.13,418.255,1024,44.57,9.12,17.94\r\nvit_betwixt_patch16_gap_256,256,2445.4,418.718,1024,60.37,16.25,27.69\r\nmambaout_small,224,2441.63,419.368,1024,48.49,8.96,27.72\r\nsebotnet33ts_256,256,2436.86,157.553,384,13.7,3.89,17.46\r\nlegacy_seresnet101,224,2436.55,420.243,1024,49.33,7.61,15.74\r\ncoat_lite_small,224,2436.07,420.324,1024,19.84,3.96,22.09\r\nresnetaa50,288,2421.03,422.937,1024,25.56,8.52,19.24\r\nregnetv_040,288,2420.23,423.076,1024,20.64,6.6,20.3\r\nmobilenetv4_conv_large,384,2412.31,424.465,1024,32.59,6.43,27.31\r\nresnet101s,224,2410.15,424.846,1024,44.67,9.19,18.64\r\nseresnet101,224,2406.75,425.445,1024,49.33,7.84,16.27\r\ndarknet53,288,2406.37,425.511,1024,41.61,11.78,15.68\r\nmixer_l32_224,224,2395.08,427.516,1024,206.94,11.27,19.86\r\nregnety_040,288,2392.72,427.93,1024,20.65,6.61,20.3\r\nvit_small_resnet50d_s16_224,224,2383.14,429.66,1024,57.53,13.48,24.82\r\nresnetv2_50d_gn,288,2373.55,431.397,1024,25.57,7.24,19.7\r\nmobilenetv4_hybrid_medium,384,2372.6,431.569,1024,11.07,3.01,21.18\r\nresnet51q,288,2370.49,431.956,1024,35.7,8.07,20.94\r\ncs3sedarknet_x,288,2370.39,431.971,1024,35.4,10.6,14.37\r\nconvnext_tiny_hnf,288,2368.21,432.37,1024,28.59,7.39,22.21\r\ndla102x,224,2357.13,434.402,1024,26.31,5.89,19.42\r\nresnet50_gn,288,2355.34,434.733,1024,25.56,6.85,18.37\r\nvit_relpos_base_patch16_224,224,2353.4,435.092,1024,86.43,17.51,24.97\r\nresmlp_36_224,224,2339.24,437.721,1024,44.69,8.91,16.33\r\ndensenet201,224,2338.18,437.922,1024,20.01,4.34,7.85\r\nresnext101_32x8d,176,2334.72,438.572,1024,88.79,10.33,19.37\r\ndeit3_small_patch16_384,384,2330.64,439.34,1024,22.21,15.52,50.78\r\nvolo_d1_224,224,2330.36,439.393,1024,26.63,6.94,24.43\r\ntf_efficientnet_b3,300,2325.98,440.219,1024,12.23,1.87,23.83\r\necaresnet101d,224,2324.47,440.503,1024,44.57,8.08,17.07\r\nsequencer2d_s,224,2321.03,441.158,1024,27.65,4.96,11.31\r\nefficientnet_b3,320,2312.88,442.715,1024,12.23,2.01,26.52\r\nvgg13_bn,224,2308.42,443.565,1024,133.05,11.33,12.25\r\nrepvgg_b3,224,2306.63,443.914,1024,110.96,26.21,7.55\r\nregnety_080_tv,224,2304.97,444.234,1024,39.38,8.51,19.73\r\nnf_resnet101,224,2303.84,444.451,1024,44.55,8.01,16.23\r\nresnetaa50d,288,2302.21,444.765,1024,25.58,8.92,20.57\r\nhalonet50ts,256,2291.32,446.879,1024,22.73,5.3,19.2\r\nseresnet50,288,2291.02,446.939,1024,28.09,6.8,18.39\r\ntf_efficientnetv2_s,300,2288.82,447.367,1024,21.46,5.35,22.73\r\nresnetblur50,288,2282.33,448.64,1024,25.56,8.52,19.87\r\nefficientnet_b3_gn,288,2280.99,336.672,768,11.73,1.74,23.35\r\nhiera_small_224,224,2276.19,449.85,1024,35.01,6.42,20.75\r\nmaxvit_tiny_rw_224,224,2273.57,450.369,1024,29.06,5.11,33.11\r\nres2net101_26w_4s,224,2273.49,450.385,1024,45.21,8.1,18.45\r\nres2net50_26w_8s,224,2271.73,450.732,1024,48.4,8.37,17.95\r\nlegacy_xception,299,2261.73,339.535,768,22.86,8.4,35.83\r\nresnext101_32x4d,224,2254.16,454.247,1024,44.18,8.01,21.23\r\nvit_medium_patch16_rope_reg1_gap_256,256,2251.19,454.846,1024,38.74,10.63,22.26\r\nfasternet_l,224,2249.45,455.198,1024,93.47,15.52,20.46\r\nvgg16,224,2235.61,458.014,1024,138.36,15.47,13.56\r\npvt_v2_b3,224,2233.97,458.352,1024,45.24,6.92,37.7\r\nvit_small_patch16_18x2_224,224,2233.2,458.51,1024,64.67,13.71,35.69\r\nswinv2_cr_tiny_224,224,2225.12,460.166,1024,28.33,4.66,28.45\r\nxcit_tiny_12_p16_384,384,2222.44,460.73,1024,6.72,3.64,18.26\r\nmambaout_tiny,288,2222.4,460.739,1024,26.55,7.41,27.58\r\nefficientvit_b3,224,2220.67,461.097,1024,48.65,3.99,26.9\r\nvit_relpos_base_patch16_rpn_224,224,2206.59,464.04,1024,86.41,17.51,24.97\r\nres2net101d,224,2203.46,464.699,1024,45.23,8.35,19.25\r\nvit_base_patch16_plus_clip_240,240,2200.68,465.287,1024,117.21,27.41,33.08\r\ndensenet121,288,2200.62,465.298,1024,7.98,4.74,11.41\r\nnf_resnet50,288,2198.24,465.804,1024,25.56,6.88,18.37\r\nswinv2_cr_tiny_ns_224,224,2198.15,465.815,1024,28.33,4.66,28.45\r\nvit_mediumd_patch16_reg4_gap_256,256,2192.01,467.126,1024,64.11,17.87,37.57\r\nhgnetv2_b5,288,2189.68,467.62,1024,39.57,10.84,18.5\r\nvit_base_patch16_reg4_gap_256,256,2183.16,469.019,1024,86.62,23.5,33.89\r\nseresnet50t,288,2182.82,469.094,1024,28.1,7.14,19.55\r\nresnet61q,288,2180.16,469.666,1024,36.85,9.87,21.52\r\necaresnet50t,288,2178.79,469.959,1024,25.57,7.14,19.55\r\necaresnet50d,288,2168.48,472.196,1024,25.58,7.19,19.72\r\nresnext50_32x4d,288,2164.78,473.004,1024,25.03,7.04,23.81\r\nmvitv2_tiny,224,2162.81,473.433,1024,24.17,4.7,21.16\r\nresnetblur50d,288,2158.56,474.366,1024,25.58,8.92,21.19\r\nedgenext_base,320,2157.19,474.662,1024,18.51,6.01,24.32\r\nrdnet_small,224,2154.4,475.281,1024,50.44,8.74,22.55\r\ncs3edgenet_x,288,2150.48,476.147,1024,47.82,14.59,16.36\r\nlambda_resnet26t,256,2145.91,477.165,1024,10.96,3.02,11.87\r\nmobilevitv2_200,256,2145.84,357.877,768,18.45,7.22,32.15\r\nese_vovnet99b,224,2145.07,477.349,1024,63.2,16.51,11.27\r\nconvnextv2_tiny,224,2141.7,478.1,1024,28.64,4.47,13.44\r\ncoatnet_rmlp_1_rw_224,224,2132.15,480.24,1024,41.69,7.85,35.47\r\nconvnext_nano,384,2132.14,360.177,768,15.59,7.22,24.61\r\nresnet101d,256,2131.58,480.371,1024,44.57,10.55,22.25\r\nregnety_040_sgn,288,2131.06,480.488,1024,20.65,6.67,20.3\r\nregnetz_040,256,2122.35,482.459,1024,27.12,4.06,24.19\r\ninception_next_small,224,2121.62,482.626,1024,49.37,8.36,19.27\r\nregnetz_040_h,256,2118.75,483.279,1024,28.94,4.12,24.29\r\nmambaout_small_rw,224,2109.04,485.503,1024,48.5,8.96,27.72\r\nese_vovnet39b_evos,224,2107.5,485.859,1024,24.58,7.07,6.74\r\nresnest50d_4s2x40d,224,2104.92,486.455,1024,30.42,4.4,17.94\r\nconvnext_base,224,2101.67,487.206,1024,88.59,15.38,28.75\r\nefficientvit_l2,256,2088.94,490.175,1024,63.71,9.09,25.49\r\neca_nfnet_l0,288,2087.13,490.597,1024,24.14,7.12,17.29\r\nnfnet_l0,288,2085.01,491.098,1024,35.07,7.13,17.29\r\nregnetz_d32,256,2075.8,493.279,1024,27.58,5.98,23.74\r\nnf_regnet_b4,320,2075.46,493.356,1024,30.21,3.29,19.88\r\nregnetz_d8,256,2075.06,493.456,1024,23.37,3.97,23.74\r\nregnetx_120,224,2074.3,493.632,1024,46.11,12.13,21.37\r\nresnext50d_32x4d,288,2052.65,498.842,1024,25.05,7.44,25.13\r\nxception41p,299,2047.89,374.996,768,26.91,9.25,39.86\r\ndpn92,224,2046.34,500.376,1024,37.67,6.54,18.21\r\nvgg19,224,2044.71,500.777,1024,143.67,19.63,14.86\r\nresnetv2_152,224,2044.47,500.84,1024,60.19,11.55,22.56\r\nseresnetaa50d,288,2026.19,505.358,1024,28.11,8.92,20.59\r\nnextvit_large,224,2016.43,507.803,1024,57.83,10.77,28.99\r\nvit_pe_core_base_patch16_224,224,2014.41,508.312,1024,93.67,17.82,24.21\r\nregnetz_c16,320,2012.96,508.675,1024,13.46,3.92,25.88\r\nvit_base_patch16_siglip_gap_256,256,2012.41,508.819,1024,85.84,23.13,33.23\r\nhgnet_small,288,2003.63,383.282,768,24.36,14.09,14.53\r\npoolformer_s36,224,2000.97,511.729,1024,30.86,5.0,15.82\r\nfocalnet_small_srf,224,1998.25,512.423,1024,49.89,8.62,26.26\r\nvit_base_patch16_siglip_256,256,1997.47,512.624,1024,92.93,23.44,33.63\r\ncrossvit_base_240,240,1991.44,514.175,1024,105.03,21.22,36.33\r\nskresnext50_32x4d,224,1990.85,514.324,1024,27.48,4.5,17.18\r\nresnetv2_152d,224,1985.38,515.745,1024,60.2,11.8,23.36\r\ndensenetblur121d,288,1984.16,516.063,1024,8.0,5.14,13.06\r\nresnet152,224,1976.86,517.969,1024,60.19,11.56,22.56\r\nconvit_small,224,1976.08,518.173,1024,27.78,5.76,17.87\r\nseresnext101_32x4d,224,1974.76,518.52,1024,48.96,8.02,21.26\r\nlegacy_seresnext101_32x4d,224,1972.3,519.163,1024,48.96,8.02,21.26\r\nvgg16_bn,224,1970.05,519.757,1024,138.37,15.5,13.56\r\ncoatnet_1_rw_224,224,1969.2,519.982,1024,41.72,8.04,34.6\r\nconvmixer_1024_20_ks9_p14,224,1967.37,520.468,1024,24.38,5.55,5.51\r\ncoatnet_rmlp_1_rw2_224,224,1967.15,520.521,1024,41.72,8.11,40.13\r\nresnetv2_50d_frn,224,1952.63,524.396,1024,25.59,4.33,11.92\r\ntwins_pcpvt_large,224,1948.99,525.37,1024,60.99,9.84,35.82\r\nbotnet50ts_256,256,1948.86,262.694,512,22.74,5.54,22.23\r\nnfnet_f0,256,1941.39,527.435,1024,71.49,12.62,18.05\r\nregnety_120,224,1939.94,527.826,1024,51.82,12.14,21.38\r\nmobilenetv4_conv_aa_large,384,1938.14,528.318,1024,32.59,7.07,32.29\r\nresnet152d,224,1936.56,528.748,1024,60.21,11.8,23.36\r\ninception_v4,299,1932.54,529.845,1024,42.68,12.28,15.09\r\nresnetv2_34d,448,1927.49,531.236,1024,21.82,15.64,18.16\r\nresnet152c,224,1925.07,531.904,1024,60.21,11.8,23.36\r\nfastvit_mci1,256,1914.2,534.923,1024,21.46,4.67,27.3\r\nhalo2botnet50ts_256,256,1913.4,535.15,1024,22.64,5.02,21.78\r\nseresnext50_32x4d,288,1911.6,535.652,1024,27.56,7.04,23.82\r\ndla169,224,1907.8,536.721,1024,53.39,11.6,20.2\r\ngcvit_tiny,224,1906.66,537.039,1024,28.22,4.79,29.82\r\nregnetz_b16_evos,224,1899.95,538.938,1024,9.74,1.43,9.95\r\ncaformer_s18,224,1894.71,540.427,1024,26.34,4.13,19.39\r\nvit_base_patch16_plus_240,240,1893.43,540.795,1024,117.56,27.41,33.08\r\nconvnextv2_nano,288,1893.22,405.634,768,15.62,4.06,13.84\r\nvit_small_patch8_224,224,1882.13,544.041,1024,21.67,22.44,80.84\r\nefficientnet_el_pruned,300,1879.75,544.731,1024,10.59,8.0,30.7\r\nefficientnet_el,300,1879.16,544.9,1024,10.59,8.0,30.7\r\nnf_ecaresnet101,224,1876.25,545.746,1024,44.55,8.01,16.27\r\nmaxvit_tiny_tf_224,224,1875.7,409.421,768,30.92,5.6,35.78\r\nnf_seresnet101,224,1874.58,546.23,1024,49.33,8.02,16.27\r\nefficientnet_b3_gn,320,1871.84,410.268,768,11.73,2.14,28.83\r\ntresnet_xl,224,1862.86,549.658,1024,78.44,15.2,15.34\r\nefficientnet_b3_g8_gn,288,1861.19,412.614,768,14.25,2.59,23.35\r\nlamhalobotnet50ts_256,256,1860.42,550.389,1024,22.57,5.02,18.44\r\nvit_pe_spatial_tiny_patch16_512,512,1854.69,552.089,1024,5.68,10.46,61.64\r\nvit_large_r50_s32_224,224,1846.47,554.547,1024,328.99,19.58,24.41\r\nfastvit_sa36,256,1835.75,557.785,1024,31.46,5.59,29.46\r\nfocalnet_small_lrf,224,1832.43,558.794,1024,50.34,8.74,28.61\r\nsequencer2d_m,224,1823.36,561.573,1024,38.31,6.55,14.26\r\ncsatv2_21m,512,1822.94,561.704,1024,20.7,2.94,15.85\r\nmaxxvit_rmlp_tiny_rw_256,256,1819.61,562.73,1024,29.64,6.66,39.76\r\ntf_efficientnet_el,300,1814.47,564.326,1024,10.59,8.0,30.7\r\ndensenet161,224,1809.99,565.722,1024,28.68,7.79,11.06\r\nconvnext_small,288,1807.14,566.616,1024,50.22,14.39,35.65\r\nmobilenetv4_conv_large,448,1806.89,566.696,1024,32.59,8.75,37.17\r\nefficientvit_b3,256,1794.4,570.637,1024,48.65,5.2,35.01\r\ndavit_base,224,1791.47,428.67,768,87.95,15.51,40.66\r\necaresnet50t,320,1785.83,573.377,1024,25.57,8.82,24.13\r\nresnet152s,224,1783.85,574.012,1024,60.32,12.92,24.96\r\nresnetv2_101,288,1777.27,576.14,1024,44.54,12.94,26.83\r\ntnt_s_legacy_patch16_224,224,1774.06,577.178,1024,23.76,5.24,24.37\r\nmaxvit_tiny_rw_256,256,1769.5,433.998,768,29.07,6.74,44.35\r\nmaxvit_rmlp_tiny_rw_256,256,1768.17,434.32,768,29.15,6.77,46.92\r\nvit_base_patch16_rope_mixed_224,224,1767.16,579.437,1024,86.44,17.58,25.7\r\nvit_base_patch16_rope_224,224,1767.01,579.487,1024,86.43,17.58,23.9\r\nvit_base_patch16_rope_mixed_ape_224,224,1762.51,580.968,1024,86.59,17.58,25.7\r\nvit_base_patch16_rope_ape_224,224,1762.46,580.978,1024,86.59,17.58,23.9\r\nregnetx_160,224,1759.34,582.012,1024,54.28,15.99,25.52\r\nrexnetr_300,288,1754.43,291.787,512,34.81,5.59,36.61\r\nresnetv2_50d_evos,224,1754.19,583.722,1024,25.59,4.33,11.92\r\nconvformer_s18,224,1752.61,584.246,1024,26.77,3.96,15.82\r\nxception41,299,1746.19,439.791,768,26.97,9.28,39.86\r\ncoatnet_1_224,224,1743.56,293.626,512,42.23,8.7,39.0\r\nmixnet_xxl,224,1741.1,441.076,768,23.96,2.04,23.43\r\ntnt_s_patch16_224,224,1729.75,591.964,1024,23.77,5.25,24.37\r\nefficientnet_b4,320,1728.31,444.339,768,19.34,3.13,34.76\r\nresnet101,288,1724.55,593.755,1024,44.55,12.95,26.83\r\nlegacy_seresnet152,224,1723.56,594.093,1024,66.82,11.33,22.08\r\nvgg19_bn,224,1721.14,594.927,1024,143.68,19.66,14.86\r\nwide_resnet50_2,288,1720.47,595.162,1024,68.88,18.89,23.81\r\nhrnet_w30,224,1719.8,595.39,1024,37.71,8.15,21.21\r\neva02_base_patch16_clip_224,224,1714.9,597.095,1024,86.26,17.62,26.32\r\nmvitv2_small_cls,224,1711.13,598.41,1024,34.87,7.04,28.17\r\nswin_small_patch4_window7_224,224,1708.14,599.457,1024,49.61,8.77,27.47\r\nese_vovnet57b,320,1702.4,451.103,768,38.61,18.26,15.34\r\nxcit_tiny_12_p8_224,224,1699.3,602.575,1024,6.71,4.81,23.6\r\nseresnet152,224,1694.04,604.445,1024,66.82,11.57,22.61\r\nwide_resnet101_2,224,1684.78,607.769,1024,126.89,22.8,21.23\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,1683.51,608.229,1024,60.23,16.52,28.24\r\nhrnet_w32,224,1676.55,610.75,1024,41.23,8.97,22.02\r\ncait_xxs36_224,224,1675.7,611.062,1024,17.3,3.77,30.34\r\nmobilenetv4_hybrid_medium,448,1665.77,614.707,1024,11.07,4.2,29.64\r\nregnety_160,224,1664.38,615.221,1024,83.59,15.96,23.04\r\nconvnext_tiny,384,1647.28,466.198,768,28.59,13.14,39.48\r\nvit_base_r50_s16_224,224,1645.77,622.175,1024,97.89,21.66,35.28\r\nefficientformerv2_s0,224,1631.05,627.791,1024,3.6,0.41,5.3\r\nmvitv2_small,224,1630.91,627.845,1024,34.87,7.0,28.08\r\ncs3se_edgenet_x,320,1625.87,629.792,1024,50.72,18.01,20.21\r\nvit_relpos_base_patch16_plus_240,240,1625.39,629.98,1024,117.38,27.3,34.33\r\npoolformerv2_s24,224,1613.49,634.625,1024,21.34,3.42,10.68\r\nvit_small_r26_s32_384,384,1611.45,635.429,1024,36.47,10.43,29.85\r\nvit_pe_core_small_patch16_384,384,1611.35,635.467,1024,23.78,15.69,51.23\r\nconvnext_base,256,1609.86,636.051,1024,88.59,20.09,37.55\r\nxcit_small_24_p16_224,224,1609.85,636.059,1024,47.67,9.1,23.64\r\nefficientvit_l2,288,1607.69,636.912,1024,63.71,11.51,32.19\r\npvt_v2_b4,224,1604.49,638.181,1024,62.56,10.14,53.74\r\npvt_v2_b5,224,1603.7,638.497,1024,81.96,11.76,50.92\r\ndla102x2,224,1602.33,639.043,1024,41.28,9.34,29.91\r\nmaxvit_tiny_pm_256,256,1601.93,479.396,768,30.09,6.61,47.9\r\nswinv2_tiny_window8_256,256,1597.61,640.931,1024,28.35,5.96,24.57\r\ndm_nfnet_f0,256,1585.03,646.019,1024,71.49,12.62,18.05\r\ncoat_tiny,224,1575.39,649.971,1024,5.5,4.35,27.2\r\nefficientnet_lite4,380,1574.86,325.083,512,13.01,4.04,45.66\r\nresnetaa101d,288,1564.22,654.617,1024,44.57,15.07,29.03\r\nhgnetv2_b6,224,1552.32,659.631,1024,75.26,16.88,21.23\r\nmambaout_base,224,1549.04,661.031,1024,84.81,15.83,36.95\r\nhiera_base_224,224,1547.56,661.663,1024,51.52,9.4,30.42\r\neca_nfnet_l1,256,1543.02,663.601,1024,41.41,9.62,22.04\r\nresnet152d,256,1539.23,665.24,1024,60.21,15.41,30.51\r\ntwins_svt_base,224,1537.79,665.863,1024,56.07,8.59,26.33\r\nregnety_080,288,1533.65,667.663,1024,39.18,13.22,29.69\r\nxception65p,299,1532.34,501.171,768,39.82,13.91,52.48\r\nregnetv_064,288,1515.3,675.75,1024,30.58,10.55,27.11\r\nefficientnet_b3_g8_gn,320,1512.63,507.698,768,14.25,3.2,28.83\r\nfastvit_ma36,256,1509.16,678.485,1024,43.98,7.82,34.98\r\nefficientnetv2_s,384,1507.05,679.448,1024,21.46,8.44,35.77\r\nefficientformerv2_s1,224,1504.71,680.496,1024,6.19,0.67,7.66\r\nhiera_small_abswin_256,256,1501.81,511.36,768,34.36,8.29,26.38\r\nregnety_064,288,1498.23,683.445,1024,30.58,10.56,27.11\r\nrdnet_base,224,1494.36,513.906,768,87.45,15.4,31.14\r\nresnetblur101d,288,1493.86,685.444,1024,44.57,15.07,29.65\r\ninception_next_base,224,1490.76,686.87,1024,86.67,14.85,25.69\r\ndpn98,224,1485.38,689.357,1024,61.57,11.73,25.2\r\nseresnet101,288,1471.03,696.088,1024,49.33,12.95,26.87\r\nhrnet_w18_ssld,288,1469.5,696.812,1024,21.3,7.14,26.96\r\nhgnet_base,224,1463.48,524.747,768,71.58,25.14,15.47\r\ntf_efficientnetv2_s,384,1458.83,701.904,1024,21.46,8.44,35.77\r\nresnext101_64x4d,224,1448.16,707.08,1024,83.46,15.52,31.21\r\nmobilenetv4_conv_aa_large,448,1443.0,532.203,768,32.59,9.63,43.94\r\nnf_regnet_b4,384,1442.79,709.71,1024,30.21,4.7,28.61\r\nefficientnetv2_rw_s,384,1438.52,711.819,1024,23.94,8.72,38.03\r\ntf_efficientnet_lite4,380,1436.59,534.576,768,13.01,4.04,45.66\r\nmambaout_base_tall_rw,224,1436.09,713.02,1024,86.48,16.15,38.74\r\nresnext101_32x8d,224,1433.38,714.369,1024,88.79,16.48,31.21\r\nresnet200,224,1429.86,716.126,1024,64.67,15.07,32.19\r\nvit_medium_patch16_gap_384,384,1427.69,717.216,1024,39.03,26.08,67.54\r\ninception_resnet_v2,299,1426.18,717.978,1024,55.84,13.18,25.06\r\ncrossvit_15_dagger_408,408,1421.55,720.317,1024,28.5,21.45,95.05\r\nfocalnet_base_srf,224,1421.55,720.316,1024,88.15,15.28,35.01\r\necaresnet101d,288,1421.15,720.515,1024,44.57,13.35,28.19\r\neva02_small_patch14_336,336,1419.45,721.381,1024,22.13,15.48,54.33\r\nvit_so150m_patch16_reg4_gap_256,256,1418.16,722.037,1024,134.13,36.75,53.21\r\nfastvit_mci2,256,1416.89,722.683,1024,35.7,7.85,36.09\r\npoolformer_m36,224,1413.48,724.427,1024,56.17,8.8,22.02\r\nresnetrs101,288,1410.32,726.05,1024,63.62,13.56,28.53\r\nmambaout_base_short_rw,224,1405.79,728.391,1024,88.83,16.31,38.08\r\nmambaout_small,288,1405.58,728.501,1024,48.49,14.81,45.82\r\nvit_so150m_patch16_reg4_map_256,256,1401.65,730.541,1024,141.48,37.17,53.68\r\nefficientvit_b3,288,1392.32,735.438,1024,48.65,6.58,44.2\r\nnest_tiny,224,1391.41,551.933,768,17.06,5.83,25.48\r\nresnext101_32x4d,288,1385.81,738.892,1024,44.18,13.24,35.09\r\nlambda_resnet50ts,256,1381.99,740.94,1024,21.54,5.07,17.48\r\nswinv2_cr_small_224,224,1380.84,741.544,1024,49.7,9.07,50.27\r\nnest_tiny_jx,224,1379.22,556.813,768,17.06,5.83,25.48\r\nnfnet_f1,224,1376.08,744.117,1024,132.63,17.87,22.94\r\nmobilenetv4_hybrid_large,384,1370.56,747.113,1024,37.76,7.77,34.52\r\nswinv2_cr_small_ns_224,224,1369.58,747.634,1024,49.7,9.08,50.27\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,1362.49,751.513,1024,63.95,17.65,37.02\r\nmaxvit_rmlp_small_rw_224,224,1360.32,752.74,1024,64.9,10.75,49.3\r\nregnetz_040,320,1355.64,377.655,512,27.12,6.35,37.78\r\nresnet101d,320,1352.05,757.336,1024,44.57,16.48,34.77\r\nregnetz_040_h,320,1350.5,379.092,512,28.94,6.43,37.94\r\nvit_base_patch16_dinov3_qkvb,256,1346.96,760.206,1024,85.66,23.6,34.06\r\nvit_base_patch16_dinov3,256,1340.21,764.034,1024,85.64,23.6,34.06\r\nvit_base_patch16_rope_reg1_gap_256,256,1336.19,766.336,1024,86.43,23.22,33.39\r\nseresnext101_64x4d,224,1333.42,767.923,1024,88.23,15.53,31.25\r\nconvnextv2_small,224,1319.31,776.14,1024,50.32,8.71,21.56\r\nregnetz_d32,320,1319.23,776.183,1024,27.58,9.33,37.08\r\nfocalnet_base_lrf,224,1317.32,777.308,1024,88.75,15.43,38.13\r\nregnetz_d8,320,1317.01,777.494,1024,23.37,6.19,37.08\r\nseresnext101_32x8d,224,1315.51,778.379,1024,93.57,16.48,31.25\r\nresnest101e,256,1314.62,778.907,1024,48.28,13.38,28.66\r\nvolo_d2_224,224,1310.55,781.325,1024,58.68,14.34,41.34\r\neva02_base_patch14_224,224,1309.73,781.814,1024,85.76,23.22,36.55\r\nseresnet152d,256,1308.39,782.609,1024,66.84,15.42,30.56\r\ncoatnet_2_rw_224,224,1307.99,391.415,512,73.87,15.09,49.22\r\nxception65,299,1307.23,587.475,768,39.92,13.96,52.48\r\nresnetrs152,256,1301.89,786.521,1024,86.62,15.59,30.83\r\nconvnextv2_tiny,288,1297.81,591.74,768,28.64,7.39,22.21\r\nseresnext101d_32x8d,224,1294.75,790.864,1024,93.59,16.72,32.05\r\nhrnet_w40,224,1291.92,792.591,1024,57.56,12.75,25.29\r\nmobilevitv2_150,384,1290.77,297.474,384,10.59,9.2,54.25\r\ngmlp_b16_224,224,1290.0,793.774,1024,73.08,15.78,30.21\r\nvitamin_base_224,224,1287.91,198.747,256,87.72,22.68,52.77\r\nefficientnetv2_m,320,1278.38,800.986,1024,54.14,11.01,39.97\r\ncait_s24_224,224,1278.29,801.047,1024,46.92,9.35,40.58\r\nconvnext_base,288,1271.73,805.174,1024,88.59,25.43,47.53\r\nefficientformer_l7,224,1259.12,813.24,1024,82.23,10.17,24.45\r\nmobilenetv4_conv_aa_large,480,1258.97,609.999,768,32.59,11.05,50.45\r\nmambaout_base_wide_rw,224,1255.12,815.834,1024,94.45,17.78,42.6\r\nresnet50x4_clip_gap,288,1252.66,817.435,1024,65.62,19.57,34.11\r\nmaxvit_small_tf_224,224,1247.57,410.372,512,68.93,11.66,53.17\r\nvit_large_patch32_384,384,1243.82,823.248,1024,306.63,45.31,43.86\r\ncoat_mini,224,1240.98,825.131,1024,10.34,6.82,33.68\r\nswin_base_patch4_window7_224,224,1240.78,825.261,1024,87.77,15.47,36.63\r\ncoat_lite_medium,224,1234.76,829.283,1024,44.57,9.81,40.06\r\ntresnet_m,448,1233.76,829.956,1024,31.39,22.99,29.21\r\ncoatnet_rmlp_2_rw_224,224,1231.2,415.827,512,73.88,15.18,54.78\r\ncoatnet_2_224,224,1225.56,417.744,512,74.68,16.5,52.67\r\nvit_so150m2_patch16_reg1_gap_256,256,1223.68,836.768,1024,136.06,37.0,56.93\r\nmambaout_small_rw,288,1222.15,837.84,1024,48.5,14.81,45.82\r\nseresnext101_32x4d,288,1221.15,838.526,1024,48.96,13.25,35.12\r\nresnet152,288,1217.44,841.082,1024,60.19,19.11,37.28\r\nseresnextaa101d_32x8d,224,1214.94,842.817,1024,93.59,17.25,34.16\r\ntiny_vit_21m_384,384,1214.68,632.23,768,21.22,13.72,77.83\r\nconvnext_large,224,1214.27,843.28,1024,197.77,34.4,43.13\r\nmvitv2_base_cls,224,1205.93,849.116,1024,65.44,10.23,40.65\r\nresnet50x4_clip,288,1203.83,850.596,1024,87.14,21.35,35.27\r\nmaxxvit_rmlp_small_rw_256,256,1201.19,852.463,1024,66.01,14.67,58.38\r\nefficientnet_b4,384,1198.13,427.307,512,19.34,4.51,50.04\r\nxcit_tiny_24_p16_384,384,1192.86,858.417,1024,12.12,6.87,34.29\r\nregnety_120,288,1190.83,644.902,768,51.82,20.06,35.34\r\nmambaout_base_plus_rw,224,1186.01,863.377,1024,101.66,19.19,45.16\r\nswin_s3_small_224,224,1181.97,649.737,768,49.74,9.43,37.84\r\nregnetz_e8,256,1178.56,868.829,1024,57.7,9.91,40.94\r\nsequencer2d_l,224,1175.05,871.418,1024,54.3,9.74,22.12\r\ncsatv2_21m,640,1173.8,654.261,768,20.7,4.72,26.68\r\nlevit_conv_384_s8,224,1167.08,438.674,512,39.06,9.95,35.86\r\nlevit_384_s8,224,1164.73,439.562,512,39.06,9.95,35.86\r\nhrnet_w44,224,1164.13,879.601,1024,67.06,14.94,26.92\r\nregnetz_b16_evos,288,1151.38,667.002,768,9.74,2.36,16.43\r\nhiera_base_plus_224,224,1151.15,889.521,1024,69.9,12.67,37.98\r\nvit_betwixt_patch16_reg4_gap_384,384,1150.77,889.812,1024,60.6,39.71,85.28\r\ndm_nfnet_f1,224,1149.19,891.038,1024,132.63,17.87,22.94\r\nxcit_medium_24_p16_224,224,1147.68,892.208,1024,84.4,16.13,31.71\r\nmvitv2_base,224,1144.54,894.657,1024,51.47,10.16,40.5\r\nregnetz_c16_evos,256,1138.18,674.738,768,13.49,2.48,16.57\r\ngcvit_small,224,1134.07,902.92,1024,51.09,8.57,41.61\r\nfastvit_mci3,256,1133.13,903.666,1024,125.07,14.82,44.88\r\nsamvit_base_patch16_224,224,1133.01,903.754,1024,86.46,17.54,24.54\r\ntf_efficientnet_b4,380,1125.42,454.916,512,19.34,4.49,49.49\r\nnextvit_small,384,1122.7,912.062,1024,31.74,17.25,57.14\r\nefficientnetv2_rw_m,320,1113.91,919.251,1024,53.24,12.72,47.14\r\ntnt_b_patch16_224,224,1109.81,922.653,1024,65.43,14.1,39.01\r\nresnet200d,256,1103.29,928.11,1024,64.69,20.0,43.09\r\nnaflexvit_base_patch16_siglip,384,1100.63,930.354,1024,92.93,56.12,102.2\r\nmobilevitv2_175,384,1095.1,350.63,384,14.25,12.47,63.29\r\nhrnet_w48_ssld,224,1095.02,935.115,1024,77.47,17.34,28.56\r\ndpn131,224,1094.58,935.483,1024,79.25,16.09,32.97\r\nhrnet_w48,224,1092.65,937.143,1024,77.47,17.34,28.56\r\ntwins_svt_large,224,1087.39,706.251,768,99.27,15.15,35.1\r\npoolformerv2_s36,224,1084.2,944.448,1024,30.79,5.01,15.82\r\nvit_base_patch16_clip_384,384,1075.06,952.485,1024,86.86,55.54,101.56\r\nefficientvit_l3,224,1070.98,717.066,768,246.04,27.62,39.16\r\nconvnextv2_nano,384,1065.87,360.242,384,15.62,7.22,24.61\r\npoolformer_m48,224,1065.38,961.138,1024,73.47,11.59,29.17\r\nresnetv2_50d_evos,288,1063.99,962.389,1024,25.59,7.15,19.7\r\nmaxvit_rmlp_small_rw_256,256,1061.61,723.4,768,64.9,14.15,66.09\r\nswinv2_base_window12_192,192,1056.52,969.194,1024,109.28,11.9,39.72\r\nregnety_320,224,1051.16,974.136,1024,145.05,32.34,30.26\r\ndavit_large,224,1050.56,731.016,768,196.81,34.6,60.99\r\nconvit_base,224,1048.86,976.27,1024,86.54,17.52,31.77\r\nresnetv2_50x1_bit,448,1047.34,488.833,512,25.55,16.62,44.46\r\nsenet154,224,1045.73,979.187,1024,115.09,20.77,38.69\r\nlegacy_senet154,224,1045.37,979.53,1024,115.09,20.77,38.69\r\ncrossvit_18_dagger_408,408,1038.35,986.151,1024,44.61,32.47,124.87\r\nseresnet152,288,1037.89,986.588,1024,66.82,19.11,37.34\r\nxcit_small_12_p16_384,384,1032.89,991.367,1024,26.25,14.14,36.51\r\nconvnext_base,320,1028.49,746.702,768,88.59,31.39,58.68\r\nregnety_160,288,1025.57,499.208,512,83.59,26.37,38.07\r\nconvnext_small,384,1018.33,754.152,768,50.22,25.58,63.37\r\nhiera_base_abswin_256,256,1018.09,754.321,768,51.27,12.46,40.7\r\ndpn107,224,1017.77,1006.092,1024,86.92,18.38,33.46\r\ndensenet264d,224,1016.79,1007.066,1024,72.74,13.57,14.0\r\nmaxxvitv2_rmlp_base_rw_224,224,1012.58,1011.247,1024,116.09,24.2,62.77\r\nswinv2_cr_base_224,224,1006.37,1017.486,1024,87.88,15.86,59.66\r\nxception71,299,1000.66,767.467,768,42.34,18.09,69.92\r\nswinv2_cr_base_ns_224,224,999.57,1024.399,1024,87.88,15.86,59.66\r\ncaformer_s36,224,994.19,1029.958,1024,39.3,8.0,37.53\r\nswinv2_small_window8_256,256,993.49,1030.688,1024,49.73,11.58,40.14\r\nregnetx_320,224,993.44,1030.73,1024,107.81,31.81,36.3\r\nswinv2_cr_small_ns_256,256,990.88,1033.387,1024,49.7,12.07,76.21\r\nresnetv2_50x3_bit,224,984.94,779.714,768,217.32,37.06,33.34\r\nswin_s3_base_224,224,983.58,1041.074,1024,71.13,13.69,48.26\r\nswinv2_tiny_window16_256,256,979.39,522.753,512,28.35,6.68,39.02\r\ndeit_base_patch16_384,384,974.78,1050.471,1024,86.86,55.54,101.56\r\nhgnetv2_b6,288,973.32,789.029,768,75.26,27.9,35.09\r\ndeit_base_distilled_patch16_384,384,973.31,1052.05,1024,87.63,55.65,101.82\r\neca_nfnet_l1,320,973.1,1052.273,1024,41.41,14.92,34.42\r\nvit_mediumd_patch16_reg4_gap_384,384,972.29,1053.162,1024,64.27,43.67,113.51\r\nresnet152d,320,971.28,1054.25,1024,60.21,24.08,47.67\r\nvit_base_patch16_384,384,966.77,1059.173,1024,86.86,55.54,101.56\r\nmobilenetv4_conv_aa_large,544,963.08,398.698,384,32.59,14.19,64.79\r\nconvnextv2_base,224,961.39,798.819,768,88.72,15.38,28.75\r\nnf_regnet_b5,384,961.09,799.065,768,49.74,7.95,42.9\r\neva_large_patch14_196,196,957.31,1069.633,1024,304.14,61.57,63.52\r\ncoat_small,224,954.17,1073.159,1024,21.69,12.61,44.25\r\nvit_large_patch16_224,224,954.12,1073.209,1024,304.33,61.6,63.52\r\nmobilenetv4_hybrid_large,448,952.39,806.361,768,37.76,10.74,48.61\r\nmobilevitv2_200,384,950.27,269.373,256,18.45,16.24,72.34\r\nvolo_d3_224,224,945.66,1082.815,1024,86.33,20.78,60.09\r\nmambaout_base,288,937.09,819.529,768,84.81,26.16,61.08\r\nseresnet200d,256,931.81,1098.915,1024,71.86,20.01,43.15\r\necaresnet200d,256,931.48,1099.295,1024,64.69,20.0,43.15\r\nefficientvit_l2,384,931.4,824.539,768,63.71,20.45,57.01\r\nconvnext_large_mlp,256,929.48,826.246,768,200.13,44.94,56.33\r\nresnetrs200,256,928.82,1102.44,1024,93.21,20.18,43.42\r\nconvformer_s36,224,919.7,1113.384,1024,40.01,7.67,30.5\r\nnaflexvit_base_patch16_gap,384,908.56,1127.029,1024,86.63,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,908.18,1127.501,1024,86.63,55.86,102.34\r\nnaflexvit_base_patch16_parfac_gap,384,904.58,1131.99,1024,86.46,55.86,102.34\r\nrdnet_large,224,903.55,566.629,512,186.27,34.74,46.67\r\nmambaout_base_short_rw,288,899.87,568.941,512,88.83,26.96,62.94\r\nresnext101_64x4d,288,899.37,1138.545,1024,83.46,25.66,51.59\r\nnaflexvit_base_patch16_map,384,897.73,1140.632,1024,93.72,56.23,102.46\r\nvit_small_patch14_dinov2,518,895.43,857.661,768,22.06,46.76,198.79\r\nxcit_tiny_24_p8_224,224,894.66,1144.536,1024,12.11,9.21,45.39\r\ntf_efficientnetv2_m,384,892.63,1147.151,1024,54.14,15.85,57.52\r\nnest_small,224,886.71,866.092,768,38.35,10.35,40.04\r\nhgnet_base,288,885.95,577.884,512,71.58,41.55,25.57\r\nnest_small_jx,224,881.72,870.997,768,38.35,10.35,40.04\r\ndeit3_large_patch16_224,224,881.59,1161.512,1024,304.37,61.6,63.52\r\nvit_small_patch14_reg4_dinov2,518,881.4,871.311,768,22.06,46.95,199.77\r\nnextvit_base,384,881.03,1162.251,1024,44.79,24.62,73.95\r\ndeit3_base_patch16_384,384,880.84,1162.506,1024,86.88,55.54,101.56\r\nbeit_large_patch16_224,224,877.09,1167.47,1024,304.43,61.6,63.52\r\nbeitv2_large_patch16_224,224,875.21,1169.983,1024,304.43,61.6,63.52\r\nxcit_nano_12_p8_384,384,874.56,1170.847,1024,3.05,6.34,46.08\r\nresnet200,288,874.42,1171.035,1024,64.67,24.91,53.21\r\nbeit3_large_patch16_224,224,870.27,1176.624,1024,304.57,61.72,63.52\r\nmixer_l16_224,224,865.36,1183.302,1024,208.2,44.6,41.69\r\nvit_pe_spatial_small_patch16_512,512,863.88,1185.324,1024,21.98,31.8,123.27\r\nbeit_base_patch16_384,384,863.81,1185.422,1024,86.74,55.54,101.56\r\nvit_base_patch16_18x2_224,224,859.09,1191.924,1024,256.73,52.51,71.38\r\nefficientvit_l3,256,858.95,596.049,512,246.04,36.06,50.98\r\nhrnet_w64,224,858.61,1192.59,1024,128.06,28.97,35.09\r\nvit_base_patch16_siglip_gap_384,384,850.4,1204.117,1024,86.09,55.43,101.3\r\nvit_base_patch16_siglip_384,384,845.19,1211.538,1024,93.18,56.12,102.2\r\ngcvit_base,224,838.26,1221.554,1024,90.32,14.87,55.48\r\nmaxvit_rmlp_base_rw_224,224,833.84,1228.028,1024,116.14,23.15,92.64\r\nseresnet152d,320,830.27,1233.301,1024,66.84,24.09,47.72\r\nflexivit_large,240,825.17,1240.929,1024,304.36,70.99,75.39\r\nresnetrs152,320,824.84,1241.424,1024,86.62,24.34,48.14\r\nmambaout_base_tall_rw,288,819.54,937.089,768,86.48,26.69,64.04\r\nseresnext101_32x8d,288,815.34,1255.885,1024,93.57,27.24,51.63\r\nresnext101_32x16d,224,814.77,1256.761,1024,194.03,36.27,51.18\r\nmambaout_base_wide_rw,288,813.06,629.692,512,94.45,29.39,70.41\r\nmobilenetv5_base,256,804.25,954.906,768,82.65,20.05,36.89\r\nvolo_d1_384,384,802.39,1276.162,1024,26.78,22.75,108.55\r\nlevit_conv_512_s8,224,798.47,480.894,384,73.97,21.77,52.28\r\nseresnext101d_32x8d,288,797.82,1283.466,1024,93.59,27.64,52.95\r\nefficientformerv2_s2,224,796.18,1286.119,1024,12.71,1.27,11.77\r\nconvnext_xlarge,224,795.1,965.894,768,350.2,60.98,57.5\r\nxcit_small_12_p8_224,224,790.4,1295.523,1024,26.21,18.69,47.21\r\nvit_large_patch14_clip_224,224,785.27,1303.968,1024,304.2,81.08,88.79\r\nlevit_512_s8,224,784.34,489.559,384,73.97,21.77,52.28\r\npoolformerv2_m36,224,773.44,1323.92,1024,56.08,8.81,22.02\r\nfastvit_mci4,256,772.08,1326.256,1024,321.57,27.78,60.59\r\nmambaout_base_plus_rw,288,765.05,669.21,512,101.66,31.72,74.64\r\nnfnet_f2,256,758.4,1350.189,1024,193.78,33.76,41.85\r\nefficientnetv2_m,416,756.15,1354.199,1024,54.14,18.6,67.5\r\nswin_large_patch4_window7_224,224,753.15,1019.686,768,196.53,34.53,54.94\r\ncoatnet_rmlp_3_rw_224,224,747.95,342.244,256,165.15,33.56,79.47\r\ncoatnet_3_rw_224,224,747.65,342.379,256,181.81,33.44,73.83\r\nrepvgg_d2se,320,745.15,1374.197,1024,120.39,66.99,23.42\r\nseresnextaa101d_32x8d,288,740.79,1382.29,1024,93.59,28.51,56.44\r\nregnetz_d8_evos,256,740.26,1383.275,1024,23.46,4.5,24.92\r\nresnetv2_152x2_bit,224,739.13,1385.394,1024,236.34,46.95,45.11\r\nregnetz_e8,320,738.72,1386.16,1024,57.7,15.46,63.94\r\ncoatnet_3_224,224,738.5,346.623,256,166.97,36.56,79.01\r\nseresnet269d,256,734.86,1393.442,1024,113.67,26.59,53.6\r\nconvnextv2_tiny,384,731.09,525.216,384,28.64,13.14,39.48\r\nconvnext_large,288,729.72,701.618,512,197.77,56.87,71.29\r\nmaxvit_base_tf_224,224,729.68,701.649,512,119.47,24.04,95.01\r\nefficientnet_b5,416,727.89,703.382,512,30.39,8.27,80.68\r\nregnetz_c16_evos,320,726.98,704.255,512,13.49,3.86,25.88\r\nnextvit_large,384,725.0,1412.378,1024,57.83,32.0,90.76\r\nseresnet200d,288,724.73,1412.903,1024,71.86,25.32,54.6\r\nswinv2_base_window8_256,256,724.5,1060.02,768,87.92,20.37,52.59\r\necaresnet200d,288,723.55,1415.218,1024,64.69,25.31,54.59\r\ncaformer_m36,224,722.08,1418.101,1024,56.2,13.29,50.48\r\nconvnext_base,384,716.38,714.679,512,88.59,45.21,84.49\r\nvit_large_patch14_224,224,714.44,1433.252,1024,304.2,81.08,88.79\r\nresnetrs270,256,710.88,1440.45,1024,129.86,27.06,55.84\r\ndavit_huge,224,703.93,1090.987,768,348.92,61.23,81.32\r\nvit_large_patch14_xp_224,224,701.15,1460.435,1024,304.06,81.01,88.79\r\nvit_base_patch8_224,224,700.85,1095.793,768,86.58,78.22,161.69\r\nresnet200d,320,700.66,1461.448,1024,64.69,31.25,67.33\r\nxcit_large_24_p16_224,224,698.78,1465.383,1024,189.1,35.86,47.27\r\naimv2_large_patch14_224,224,696.48,1470.218,1024,309.2,82.3,85.2\r\nnfnet_f1,320,692.43,1478.821,1024,132.63,35.97,46.77\r\neca_nfnet_l2,320,682.3,1500.778,1024,56.72,20.95,47.43\r\nconvformer_m36,224,673.89,1519.518,1024,57.05,12.89,42.05\r\nresnetv2_101x1_bit,448,672.87,760.892,512,44.54,31.65,64.93\r\nmaxxvitv2_rmlp_large_rw_224,224,669.15,1530.262,1024,215.42,44.14,87.15\r\nswinv2_large_window12_192,192,663.29,771.888,512,228.77,26.17,56.53\r\nhrnet_w48_ssld,288,662.87,1544.762,1024,77.47,28.66,47.21\r\nefficientnetv2_rw_m,416,662.11,1159.9,768,53.24,21.49,79.62\r\nvit_large_patch14_clip_quickgelu_224,224,656.97,1558.634,1024,303.97,81.08,88.79\r\nvit_large_patch16_siglip_gap_256,256,655.16,1562.938,1024,303.36,80.8,88.34\r\ntresnet_l,448,654.84,1563.697,1024,55.99,43.59,47.56\r\ntiny_vit_21m_512,512,652.43,588.54,384,21.26,26.93,177.93\r\nvit_large_patch16_siglip_256,256,652.21,1570.013,1024,315.96,81.34,88.88\r\nnest_base,224,650.08,787.567,512,67.72,17.96,53.39\r\nnf_regnet_b5,456,648.83,789.085,512,49.74,11.7,61.95\r\nnest_base_jx,224,646.7,791.688,512,67.72,17.96,53.39\r\ncaformer_s18,384,644.7,794.137,512,26.34,13.42,77.34\r\nefficientnet_x_b5,448,640.64,1198.766,768,33.44,23.35,68.87\r\nefficientnet_b5,448,638.98,801.248,512,30.39,9.59,93.56\r\nvit_large_r50_s32_384,384,636.71,1608.238,1024,329.09,57.43,76.52\r\nregnety_640,224,634.53,1210.31,768,281.38,64.16,42.5\r\nhalonet_h1,256,634.22,403.623,256,8.1,3.0,51.17\r\ndm_nfnet_f2,256,630.01,1625.337,1024,193.78,33.76,41.85\r\nmaxvit_tiny_tf_384,384,627.39,408.017,256,30.98,17.53,123.42\r\nswinv2_cr_large_224,224,622.69,1233.339,768,196.68,35.1,78.42\r\nvit_so150m_patch16_reg4_gap_384,384,621.79,1646.831,1024,134.42,87.97,165.47\r\nswinv2_small_window16_256,256,618.12,828.289,512,49.73,12.82,66.29\r\nseresnextaa101d_32x8d,320,613.15,1252.515,768,93.59,35.19,69.67\r\nvit_large_patch16_rope_mixed_224,224,604.69,1693.403,1024,304.2,61.6,68.34\r\nvit_large_patch16_rope_224,224,604.61,1693.632,1024,304.17,61.6,63.52\r\nefficientnet_h_b5,448,603.62,1272.288,768,45.88,27.16,73.9\r\nvit_large_patch16_rope_ape_224,224,601.66,1701.93,1024,304.37,61.6,63.52\r\nvit_large_patch16_rope_mixed_ape_224,224,601.29,1702.986,1024,304.4,61.6,68.34\r\nconvformer_s18,384,597.82,856.419,512,26.77,11.63,46.49\r\nconvnextv2_large,224,597.53,856.83,512,197.96,34.4,43.13\r\nregnety_160,384,595.98,644.29,384,83.59,46.87,67.67\r\nconvmixer_768_32,224,594.88,1721.315,1024,21.11,19.55,25.95\r\nconvnext_large_mlp,320,591.06,866.217,512,200.13,70.21,88.02\r\nresnetrs200,320,589.61,1736.707,1024,93.21,31.51,67.81\r\nvolo_d4_224,224,585.9,1747.719,1024,192.96,44.34,80.22\r\nresnetv2_101x3_bit,224,585.56,1311.531,768,387.93,71.23,48.7\r\npoolformerv2_m48,224,582.71,1757.27,1024,73.35,11.59,29.17\r\nconvnextv2_base,288,580.85,881.435,512,88.72,25.43,47.53\r\nxcit_tiny_12_p8_384,384,578.42,1770.314,1024,6.71,14.13,69.14\r\nseresnet269d,288,570.21,1795.81,1024,113.67,33.65,67.81\r\ndm_nfnet_f1,320,569.35,1798.511,1024,132.63,35.97,46.77\r\ntf_efficientnetv2_m,480,566.5,1355.666,768,54.14,24.76,89.84\r\nvitamin_large2_224,224,565.38,452.769,256,333.58,75.05,112.83\r\nvitamin_large_224,224,565.05,453.031,256,333.32,75.05,112.83\r\ntf_efficientnet_b5,456,562.81,682.261,384,30.39,10.46,98.86\r\nvit_base_r50_s16_384,384,554.25,1847.512,1024,98.95,67.43,135.03\r\nxcit_small_24_p16_384,384,554.06,1848.148,1024,47.67,26.72,68.58\r\nefficientvit_l3,320,547.27,935.528,512,246.04,56.32,79.34\r\nswinv2_cr_tiny_384,384,544.46,470.162,256,28.33,15.34,161.01\r\nnaflexvit_so150m2_patch16_reg1_gap,384,543.29,1884.787,1024,136.06,89.53,178.22\r\nvit_so150m2_patch16_reg1_gap_384,384,539.66,1897.447,1024,136.33,89.53,178.22\r\nnaflexvit_so150m2_patch16_reg1_map,384,536.81,1907.525,1024,142.46,90.33,179.2\r\ncaformer_b36,224,533.44,1439.679,768,98.75,23.22,67.3\r\ninception_next_base,384,524.06,976.957,512,86.67,43.64,75.48\r\nefficientformerv2_l,224,522.62,1959.334,1024,26.32,2.59,18.54\r\nmaxvit_large_tf_224,224,514.71,746.021,384,211.79,43.68,127.35\r\nhiera_large_224,224,507.8,1512.374,768,213.74,40.34,83.37\r\nefficientnetv2_l,384,507.12,2019.233,1024,118.52,36.1,101.16\r\ntf_efficientnetv2_l,384,500.17,2047.271,1024,118.52,36.1,101.16\r\nconvformer_b36,224,497.45,1543.856,768,99.88,22.69,56.06\r\nnasnetalarge,331,486.02,790.061,384,88.75,23.89,90.56\r\ntresnet_xl,448,484.98,2111.394,1024,78.44,60.77,61.31\r\nvit_so400m_patch14_siglip_gap_224,224,483.27,2118.879,1024,412.44,109.57,106.13\r\nvit_so400m_patch16_siglip_gap_256,256,482.73,2121.235,1024,412.65,109.62,106.13\r\nvit_so400m_patch14_siglip_224,224,482.64,2121.658,1024,427.68,110.26,106.73\r\neca_nfnet_l2,384,480.68,1597.691,768,56.72,30.05,68.28\r\nconvnext_xlarge,288,480.56,1065.4,512,350.2,100.8,95.05\r\nvit_so400m_patch16_siglip_256,256,480.33,2131.819,1024,427.89,110.31,106.73\r\nregnetz_d8_evos,320,472.61,1624.999,768,23.46,7.03,38.92\r\necaresnet269d,320,466.48,2195.156,1024,102.09,41.53,83.69\r\nvit_base_patch16_siglip_gap_512,512,465.03,1100.986,512,86.43,107.0,246.15\r\npnasnet5large,331,461.36,832.3,384,86.06,25.04,92.89\r\nvit_base_patch16_siglip_512,512,461.07,1110.433,512,93.52,108.22,247.74\r\nvit_large_patch16_dinov3,256,457.23,2239.517,1024,303.08,82.43,90.56\r\nvit_large_patch16_dinov3_qkvb,256,457.03,2240.501,1024,303.13,82.43,90.56\r\nswinv2_base_window12to16_192to256,256,455.6,842.817,384,87.92,22.02,84.71\r\nswinv2_base_window16_256,256,455.5,842.999,384,87.92,22.02,84.71\r\ncoatnet_4_224,224,453.87,564.011,256,275.43,62.48,129.26\r\nresnest200e,320,452.22,2264.34,1024,70.2,35.69,82.78\r\nvolo_d2_384,384,448.99,1710.489,768,58.87,46.17,184.51\r\nresnetrs350,288,444.25,2304.96,1024,163.96,43.67,87.09\r\neca_nfnet_l3,352,438.32,2336.147,1024,72.04,32.57,73.12\r\nvitamin_large_256,256,434.58,441.786,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,434.3,442.058,192,333.64,99.0,154.99\r\neva02_large_patch14_224,224,430.5,2378.586,1024,303.27,81.15,97.2\r\neva02_large_patch14_clip_224,224,424.81,2410.472,1024,304.11,81.18,97.2\r\nmaxvit_small_tf_384,384,418.43,458.83,192,69.02,35.87,183.65\r\nresnet50x16_clip_gap,384,417.94,1225.042,512,136.2,70.32,100.64\r\ncoat_lite_medium_384,384,415.83,1231.255,512,44.57,28.73,116.7\r\nxcit_small_24_p8_224,224,415.82,2462.546,1024,47.63,35.81,90.78\r\nresnext101_32x32d,224,413.78,1237.358,512,468.53,87.29,91.12\r\ncait_xxs24_384,384,412.39,2483.035,1024,12.03,9.63,122.66\r\nconvnext_large_mlp,384,411.01,622.832,256,200.13,101.11,126.74\r\nconvnext_large,384,410.94,622.945,256,197.77,101.1,126.74\r\ncoatnet_rmlp_2_rw_384,384,407.58,471.046,192,73.88,47.69,209.43\r\nresnet50x16_clip,384,404.76,1264.931,512,167.33,74.9,103.54\r\nmambaout_base_plus_rw,384,403.38,951.923,384,101.66,56.39,132.7\r\nvit_huge_patch14_clip_224,224,401.9,2547.858,1024,632.05,167.4,139.41\r\nnfnet_f2,352,401.68,1911.945,768,193.78,63.22,79.06\r\nmvitv2_large_cls,224,398.92,1925.16,768,234.58,42.17,111.69\r\nxcit_medium_24_p16_384,384,395.71,2587.718,1024,84.4,47.39,91.64\r\necaresnet269d,352,388.76,2633.965,1024,102.09,50.25,101.25\r\nvolo_d5_224,224,387.22,2644.445,1024,295.46,72.4,118.11\r\nefficientnet_x_b5,576,384.61,1331.179,512,33.44,38.59,113.83\r\nvit_so150m2_patch16_reg1_gap_448,448,380.41,2691.771,1024,136.5,127.51,287.05\r\nmvitv2_large,224,379.1,1350.522,512,217.99,43.87,112.02\r\nvitamin_xlarge_256,256,376.18,340.232,128,436.06,130.13,177.37\r\nresnetrs270,352,375.51,2726.908,1024,129.86,51.13,105.48\r\nvit_huge_patch14_224,224,370.78,2761.731,1024,630.76,167.4,139.41\r\nregnety_320,384,370.62,1036.063,384,145.05,95.0,88.87\r\nefficientnet_h_b5,576,369.53,1039.142,384,45.88,44.9,122.13\r\nnfnet_f3,320,368.16,2781.352,1024,254.92,68.77,83.93\r\nvit_huge_patch14_xp_224,224,368.0,2782.606,1024,631.8,167.3,139.41\r\nefficientnetv2_xl,384,367.08,2789.519,1024,208.12,52.81,139.2\r\nefficientvit_l3,384,366.53,1047.633,384,246.04,81.08,114.02\r\ntf_efficientnetv2_xl,384,363.3,2113.938,768,208.12,52.81,139.2\r\nconvnextv2_large,288,361.53,708.081,256,197.96,56.87,71.29\r\naimv2_huge_patch14_224,224,359.11,2851.436,1024,680.85,179.01,126.22\r\nvit_pe_spatial_base_patch16_512,512,357.3,2149.411,768,86.43,107.13,246.54\r\nresmlp_big_24_224,224,356.15,2875.169,1024,129.14,100.23,87.31\r\nmaxvit_tiny_tf_512,512,351.22,364.423,128,31.05,33.49,257.59\r\nvit_base_patch14_dinov2,518,350.33,1461.45,512,86.58,151.71,397.58\r\nefficientnet_b6,528,348.53,734.486,256,43.04,19.4,167.39\r\nvit_base_patch14_reg4_dinov2,518,346.66,1476.935,512,86.58,152.25,399.53\r\ndeit3_huge_patch14_224,224,346.45,2955.651,1024,632.13,167.4,139.41\r\nvit_huge_patch14_clip_quickgelu_224,224,344.03,2976.45,1024,632.08,167.4,139.41\r\nvit_large_patch14_clip_336,336,343.64,2979.851,1024,304.53,191.11,270.24\r\ncaformer_s36,384,338.23,1513.74,512,39.3,26.08,150.33\r\nvit_huge_patch14_gap_224,224,336.93,3039.2,1024,630.76,166.73,138.74\r\nswinv2_cr_small_384,384,336.53,760.673,256,49.7,29.7,298.03\r\nmaxxvitv2_rmlp_base_rw_384,384,335.07,1146.004,384,116.09,72.98,213.74\r\ndm_nfnet_f2,352,332.88,2307.103,768,193.78,63.22,79.06\r\ntf_efficientnet_b6,528,331.46,772.309,256,43.04,19.4,167.39\r\nconvnextv2_base,384,328.09,780.213,256,88.72,45.21,84.49\r\nefficientnetv2_l,480,325.06,1575.073,512,118.52,56.4,157.99\r\nregnety_1280,224,324.32,1578.677,512,644.81,127.66,71.58\r\ntf_efficientnetv2_l,480,320.56,1597.195,512,118.52,56.4,157.99\r\neva_large_patch14_336,336,313.85,3262.665,1024,304.53,191.1,270.24\r\nconvformer_s36,384,313.68,1632.225,512,40.01,22.54,89.62\r\neva02_base_patch14_448,448,313.09,1635.292,512,87.12,107.11,259.14\r\nvit_large_patch16_384,384,312.48,3276.996,1024,304.72,191.21,270.24\r\nfocalnet_huge_fl3,224,310.05,1651.304,512,745.28,118.26,104.8\r\nseresnextaa201d_32x8d,320,309.01,3313.806,1024,149.39,70.22,138.71\r\nconvmixer_1536_20,224,308.79,3316.091,1024,51.63,48.68,33.03\r\nrdnet_large,384,308.46,622.417,192,186.27,102.09,137.13\r\nresnetrs420,320,306.83,3337.273,1024,191.89,64.2,126.56\r\ndm_nfnet_f3,320,306.41,3341.85,1024,254.92,68.77,83.93\r\nswin_base_patch4_window12_384,384,306.32,835.712,256,87.9,47.19,134.78\r\nmaxvit_xlarge_tf_224,224,306.15,836.172,256,506.99,97.52,191.04\r\nxcit_tiny_24_p8_384,384,304.52,3362.665,1024,12.11,27.05,132.95\r\naimv2_large_patch14_336,336,300.0,2559.932,768,309.53,194.22,227.08\r\nswinv2_cr_huge_224,224,297.22,1291.942,384,657.83,115.97,121.08\r\nxcit_medium_24_p8_224,224,295.45,3465.828,1024,84.32,63.53,121.23\r\nsam2_hiera_tiny,896,291.63,219.428,64,26.85,99.86,384.63\r\ncait_xs24_384,384,289.48,2652.968,768,26.67,19.28,183.98\r\ndeit3_large_patch16_384,384,289.24,3540.287,1024,304.76,191.21,270.24\r\nswinv2_large_window12to16_192to256,256,289.09,664.118,192,196.74,47.81,121.53\r\nvit_giant_patch16_gap_224,224,288.84,3545.224,1024,1011.37,202.46,139.26\r\nvit_large_patch14_clip_quickgelu_336,336,287.21,2673.926,768,304.29,191.11,270.24\r\nconvnext_xxlarge,256,284.13,900.959,256,846.47,198.09,124.45\r\nbeit_large_patch16_384,384,282.28,3627.542,1024,305.0,191.21,270.24\r\nmaxvit_rmlp_base_rw_384,384,278.49,1378.813,384,116.14,70.97,318.95\r\ndavit_giant,224,278.19,1380.322,384,1406.47,192.92,153.06\r\nvit_large_patch16_siglip_gap_384,384,278.09,2761.666,768,303.69,190.85,269.55\r\nvit_large_patch16_siglip_384,384,276.97,2772.875,768,316.28,192.07,270.75\r\ncait_xxs36_384,384,275.79,3712.962,1024,17.37,14.35,183.7\r\nconvnextv2_huge,224,275.0,930.878,256,660.29,115.0,79.07\r\nconvnext_xlarge,384,271.1,944.261,256,350.2,179.2,168.99\r\neca_nfnet_l3,448,271.02,1889.134,512,72.04,52.55,118.4\r\nxcit_small_12_p8_384,384,265.56,1445.951,384,26.21,54.92,138.29\r\nresnetv2_152x2_bit,384,264.39,1452.399,384,236.34,136.16,132.56\r\ncoatnet_5_224,224,261.55,734.067,192,687.47,145.49,194.24\r\nresnetv2_152x4_bit,224,259.16,1975.624,512,936.53,186.9,90.22\r\nvit_giant_patch14_clip_224,224,258.64,3959.159,1024,1012.65,267.18,192.64\r\nnaflexvit_so400m_patch16_siglip,384,255.09,3010.698,768,427.89,259.65,319.77\r\nresnetrs350,384,253.82,4034.302,1024,163.96,77.59,154.74\r\nresnetv2_50x3_bit,448,252.62,760.022,192,217.32,145.7,133.37\r\nsam2_hiera_small,896,249.4,256.588,64,33.95,123.99,442.63\r\nswinv2_cr_base_384,384,247.09,1036.015,256,87.88,50.57,333.68\r\nvitamin_large2_336,336,245.6,390.856,96,333.83,175.72,307.47\r\nvitamin_large_336,336,245.46,391.08,96,333.57,175.72,307.47\r\ncaformer_m36,384,245.45,1042.964,256,56.2,42.11,196.35\r\nmaxvit_base_tf_384,384,243.83,787.409,192,119.65,73.8,332.9\r\nhiera_huge_224,224,241.96,1587.005,384,672.78,124.85,150.95\r\nvit_giant_patch14_224,224,241.69,4236.718,1024,1012.61,267.18,192.64\r\neva_giant_patch14_224,224,240.83,4251.918,1024,1012.56,267.18,192.64\r\neva_giant_patch14_clip_224,224,240.16,4263.86,1024,1012.59,267.18,192.64\r\nxcit_large_24_p16_384,384,238.88,3214.944,768,189.1,105.35,137.17\r\nmaxvit_small_tf_512,512,235.48,407.654,96,69.13,67.26,383.77\r\nvolo_d3_448,448,232.24,2204.541,512,86.63,96.33,446.83\r\nconvformer_m36,384,229.65,1114.714,256,57.05,37.87,123.56\r\nregnety_640,384,226.73,1129.09,256,281.38,188.47,124.83\r\nvit_pe_core_large_patch14_336,336,224.94,4552.265,1024,317.15,192.33,271.43\r\nbeit3_giant_patch14_224,224,223.16,4588.704,1024,1013.22,267.56,192.64\r\nseresnextaa201d_32x8d,384,219.11,3505.036,768,149.39,101.11,199.72\r\ncait_s24_384,384,218.68,2341.289,512,47.06,32.17,245.31\r\nnfnet_f3,416,216.99,2359.487,512,254.92,115.58,141.78\r\nresnest269e,416,214.62,2385.545,512,110.93,77.69,171.98\r\naimv2_1b_patch14_224,224,213.72,2395.63,512,1234.96,322.43,170.39\r\nvitamin_xlarge_336,336,212.61,451.505,96,436.06,230.18,347.33\r\nefficientnetv2_xl,512,210.36,2433.915,512,208.12,93.85,247.32\r\nnfnet_f4,384,210.26,3652.66,768,316.07,122.14,147.57\r\nfocalnet_huge_fl4,224,208.98,2449.975,512,686.46,118.9,113.34\r\nvit_so400m_patch16_siglip_gap_384,384,208.22,3688.387,768,413.02,258.11,318.42\r\ntf_efficientnetv2_xl,512,208.14,2459.812,512,208.12,93.85,247.32\r\nvit_so400m_patch16_siglip_384,384,207.19,3706.674,768,428.26,259.65,319.77\r\nconvnextv2_large,384,203.97,941.291,192,197.96,101.1,126.74\r\nefficientnet_b7,600,203.87,941.756,192,66.35,38.33,289.94\r\nvit_huge_plus_patch16_dinov3_qkvb,256,203.24,3778.765,768,840.59,224.88,193.59\r\nvit_huge_plus_patch16_dinov3,256,203.1,3781.302,768,840.51,224.88,193.59\r\nvit_giantopt_patch16_siglip_gap_256,256,201.15,5090.631,1024,1134.84,298.42,199.62\r\nvit_giantopt_patch16_siglip_256,256,200.76,5100.638,1024,1163.17,299.66,200.43\r\ntf_efficientnet_b7,600,195.6,654.382,128,66.35,38.33,289.94\r\nresnetv2_152x2_bit,448,192.05,1332.936,256,236.34,184.99,180.43\r\nswin_large_patch4_window12_384,384,190.18,673.033,128,196.74,104.08,202.16\r\neva02_large_patch14_clip_336,336,187.14,5471.91,1024,304.43,191.34,289.13\r\nvitamin_large_384,384,186.35,343.408,64,333.71,234.44,440.16\r\nvitamin_large2_384,384,186.18,343.728,64,333.97,234.44,440.16\r\nresnetrs420,416,181.49,4231.5,768,191.89,108.45,213.79\r\ncaformer_b36,384,181.23,1412.566,256,98.75,72.33,261.79\r\ndm_nfnet_f3,416,180.54,2835.845,512,254.92,115.58,141.78\r\nxcit_large_24_p8_224,224,178.53,2867.778,512,188.93,141.23,181.56\r\ndm_nfnet_f4,384,175.37,2919.45,512,316.07,122.14,147.57\r\nvit_huge_patch14_clip_336,336,174.09,4411.581,768,632.46,390.97,407.54\r\nmvitv2_huge_cls,224,172.57,2225.199,384,694.8,120.67,243.63\r\nmaxvit_large_tf_384,384,172.53,741.877,128,212.03,132.55,445.84\r\nconvformer_b36,384,169.79,1507.723,256,99.88,66.67,164.75\r\nconvnextv2_huge,288,166.27,769.821,128,660.29,190.1,130.7\r\naimv2_large_patch14_448,448,162.41,2364.336,384,309.98,367.84,491.78\r\nvit_so400m_patch14_siglip_gap_384,384,161.35,3173.103,512,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_378,378,161.3,3174.267,512,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_378,378,160.71,3185.742,512,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,160.62,3187.68,512,428.23,335.4,452.89\r\nvitamin_xlarge_384,384,159.75,400.599,64,436.06,306.38,493.46\r\nvit_intern300m_patch14_448,448,155.93,3283.471,512,304.01,362.05,656.39\r\naimv2_huge_patch14_336,336,155.89,3284.375,512,681.34,416.36,337.08\r\nresnet50x64_clip_gap,448,155.89,1642.117,256,365.03,253.96,233.22\r\nswinv2_cr_large_384,384,155.31,824.141,128,196.68,108.96,404.96\r\nvit_large_patch16_siglip_gap_512,512,152.52,3356.938,512,304.15,361.84,655.36\r\nresnetv2_101x3_bit,448,152.4,1259.792,192,387.93,280.33,194.78\r\nvit_large_patch16_siglip_512,512,152.02,3368.015,512,316.74,364.0,657.48\r\nresnet50x64_clip,448,151.99,1684.275,256,420.38,265.02,239.13\r\ndavit_base_fl,768,151.59,844.349,128,90.37,190.32,530.15\r\nfocalnet_large_fl3,384,150.65,1699.247,256,239.13,105.06,168.04\r\nbeit_large_patch16_512,512,148.31,3452.101,512,305.67,362.24,656.39\r\nvit_gigantic_patch14_clip_224,224,148.12,6913.08,1024,1844.91,483.96,275.37\r\nnfnet_f5,416,147.77,3464.835,512,377.21,170.71,204.56\r\ncait_s36_384,384,146.22,3501.457,512,68.37,47.99,367.4\r\nfocalnet_large_fl4,384,143.73,1781.112,256,239.32,105.2,181.78\r\nvolo_d4_448,448,141.42,2715.258,384,193.41,197.13,527.35\r\nxcit_small_24_p8_384,384,140.12,2740.426,384,47.63,105.24,265.91\r\nvit_gigantic_patch14_224,224,139.31,3675.287,512,1844.44,483.95,275.37\r\nmaxvit_base_tf_512,512,137.16,699.89,96,119.88,138.02,703.99\r\nvit_huge_patch14_clip_378,378,135.92,3766.995,512,632.68,503.79,572.79\r\nsam2_hiera_base_plus,896,132.41,483.323,64,68.68,227.48,828.88\r\nefficientnet_b8,672,130.91,977.71,128,87.41,63.48,442.89\r\nvit_gigantic_patch14_clip_quickgelu_224,224,129.37,3957.492,512,1844.91,483.96,275.37\r\ntf_efficientnet_b8,672,126.53,1011.583,128,87.41,63.48,442.89\r\ndm_nfnet_f5,416,123.93,4131.447,512,377.21,170.71,204.56\r\nvit_pe_spatial_large_patch14_448,448,121.88,4200.763,512,303.96,362.05,656.39\r\nswinv2_base_window12to24_192to384,384,119.63,534.95,64,87.92,55.25,280.36\r\nnfnet_f4,512,119.15,3222.727,384,316.07,216.26,262.26\r\nregnety_1280,384,117.78,1086.784,128,644.81,374.99,210.2\r\nvit_huge_patch14_clip_quickgelu_378,378,117.35,4362.909,512,632.68,503.79,572.79\r\nvit_large_patch14_dinov2,518,114.56,3351.965,384,304.37,507.15,1058.82\r\nvit_large_patch14_reg4_dinov2,518,113.91,3371.03,384,304.37,508.9,1064.02\r\nvit_pe_lang_large_patch14_448,448,113.59,4507.212,512,291.42,346.99,629.09\r\nvit_so400m_patch14_siglip_gap_448,448,112.1,3425.635,384,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,111.89,3431.977,384,413.53,487.4,764.26\r\nnfnet_f6,448,111.51,4591.46,512,438.36,229.7,273.62\r\nvit_so400m_patch16_siglip_512,512,111.39,3447.308,384,428.77,490.13,766.65\r\nfocalnet_xlarge_fl3,384,105.76,1815.489,192,408.79,185.61,223.99\r\neva_giant_patch14_336,336,105.26,4864.063,512,1013.01,620.64,550.67\r\naimv2_3b_patch14_224,224,104.1,1844.432,192,2720.66,705.91,252.44\r\nvit_huge_patch16_gap_448,448,104.02,3691.453,384,631.67,544.7,636.83\r\nmaxvit_xlarge_tf_384,384,103.12,930.964,96,475.32,292.78,668.76\r\neva02_large_patch14_448,448,102.94,4973.62,512,305.08,362.33,689.95\r\nfocalnet_xlarge_fl4,384,101.16,1897.968,192,409.03,185.79,242.31\r\nxcit_medium_24_p8_384,384,99.71,2567.289,256,84.32,186.67,354.73\r\ndm_nfnet_f4,512,98.87,3884.035,384,316.07,216.26,262.26\r\nbeit3_giant_patch14_336,336,97.69,3930.664,384,1013.67,621.52,550.67\r\nmaxvit_large_tf_512,512,96.32,664.395,64,212.33,244.75,942.15\r\nvolo_d5_448,448,94.4,2711.732,256,295.91,315.06,737.92\r\nconvnextv2_huge,384,93.6,1025.653,96,660.29,337.96,232.35\r\ndm_nfnet_f6,448,93.25,4117.75,384,438.36,229.7,273.62\r\naimv2_1b_patch14_336,336,93.15,2748.218,256,1235.61,743.59,454.16\r\nswinv2_cr_giant_224,224,88.92,1439.455,128,2598.76,483.85,309.15\r\nvit_giantopt_patch16_siglip_gap_384,384,87.76,5833.863,512,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,87.5,5851.245,512,1163.66,696.85,568.91\r\nnfnet_f5,544,86.83,2948.315,256,377.21,290.97,349.71\r\naimv2_huge_patch14_448,448,84.92,3014.448,256,682.03,774.02,731.38\r\nnfnet_f7,480,84.63,4537.186,384,499.5,300.08,355.86\r\ntf_efficientnet_l2,475,83.83,1145.093,96,480.31,172.11,609.89\r\nswinv2_large_window12to24_192to384,384,76.33,628.792,48,196.74,116.15,407.83\r\nswinv2_cr_huge_384,384,73.57,869.943,64,657.94,352.04,583.18\r\ndm_nfnet_f5,544,72.89,3512.098,256,377.21,290.97,349.71\r\nvolo_d5_512,512,72.2,3545.46,256,296.09,425.09,1105.37\r\nnfnet_f6,576,67.8,3775.999,256,438.36,378.69,452.2\r\nregnety_2560,384,65.25,1471.322,96,1282.6,747.83,296.49\r\ncait_m36_384,384,64.15,3990.858,256,271.22,173.11,734.81\r\nxcit_large_24_p8_384,384,60.01,3199.421,192,188.93,415.0,531.82\r\ndavit_huge_fl,768,59.49,1075.826,64,360.64,744.84,1060.3\r\nresnetv2_152x4_bit,480,58.41,2191.46,128,936.53,844.84,414.26\r\nmaxvit_xlarge_tf_512,512,57.65,832.643,48,475.77,534.14,1413.22\r\ndm_nfnet_f6,576,56.61,4521.792,256,438.36,378.69,452.2\r\nnfnet_f7,608,53.23,4809.562,256,499.5,480.39,570.85\r\nconvnextv2_huge,512,52.61,912.323,48,660.29,600.81,413.07\r\naimv2_1b_patch14_448,448,51.1,2505.105,128,1236.53,1367.03,983.56\r\nvit_gigantic_patch14_clip_378,378,50.84,5035.723,256,1845.7,1429.82,1047.37\r\naimv2_3b_patch14_336,336,45.69,2100.903,96,2721.64,1615.48,674.17\r\nsam2_hiera_large,1024,44.06,1089.483,48,212.15,907.48,2190.34\r\nvit_giant_patch14_dinov2,518,35.02,3655.546,128,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,34.9,3667.118,128,1136.48,1790.08,2771.21\r\neva_giant_patch14_560,560,34.78,3680.249,128,1014.45,1906.76,2577.17\r\nmobilenetv5_300m,768,31.86,2008.994,64,294.13,435.74,842.16\r\nmobilenetv5_300m_enc,768,31.14,2055.064,64,294.13,435.74,842.16\r\nefficientnet_l2,800,29.91,1069.897,32,480.31,479.12,1707.39\r\ntf_efficientnet_l2,800,29.24,1094.392,32,480.31,479.12,1707.39\r\nvit_pe_core_gigantic_patch14_448,448,27.66,4627.286,128,1882.03,2060.12,1774.21\r\ncait_m48_448,448,27.51,4653.456,128,356.46,329.41,1708.23\r\nvit_pe_lang_gigantic_patch14_448,448,27.23,4700.689,128,1740.92,1931.99,1664.88\r\nvit_pe_spatial_gigantic_patch14_448,448,25.6,5000.443,128,1851.89,2055.25,1771.04\r\naimv2_3b_patch14_448,448,25.22,1903.456,48,2723.02,2939.61,1462.76\r\nswinv2_cr_giant_384,384,23.93,1337.195,32,2598.76,1450.71,1394.86\r\nvit_so400m_patch14_siglip_gap_896,896,20.75,4627.142,96,416.87,2731.49,8492.88\r\nsamvit_base_patch16,1024,15.49,387.254,6,89.67,486.43,1343.27\r\nsamvit_large_patch16,1024,8.44,473.74,4,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,6.28,636.49,4,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu130-5090-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,414210.56,2.456,1024,0.37,0.04,0.48\r\ntest_vit2,160,347926.81,2.927,1024,0.46,0.05,0.64\r\ntest_byobnet,160,311431.34,3.272,1024,0.46,0.03,0.43\r\ntest_efficientnet,160,285687.11,3.57,1024,0.36,0.06,0.55\r\ntest_efficientnet_ln,160,284883.89,3.579,1024,0.36,0.06,0.55\r\ntest_mambaout,160,283871.26,3.593,1024,0.45,0.03,0.53\r\ntest_convnext,160,264008.57,3.865,1024,0.27,0.03,0.58\r\ntest_efficientnet_evos,160,252271.26,4.043,1024,0.36,0.06,0.55\r\ntest_efficientnet_gn,160,245856.18,4.151,1024,0.36,0.06,0.55\r\ntest_convnext3,160,242634.84,4.206,1024,0.47,0.05,0.63\r\ntest_convnext2,160,235033.01,4.341,1024,0.48,0.05,0.63\r\ntest_resnet,160,218542.67,4.669,1024,0.47,0.1,0.64\r\ntest_vit3,160,217920.99,4.684,1024,0.93,0.09,1.0\r\ntinynet_e,106,204046.45,5.002,1024,2.04,0.03,0.69\r\ntest_mambaout,192,197910.98,5.159,1024,0.45,0.04,0.77\r\nmobilenetv4_conv_small_035,224,182487.87,5.597,1024,1.91,0.05,0.98\r\ntest_vit4,160,181521.21,5.626,1024,1.02,0.11,1.07\r\nefficientvit_m0,224,159181.93,6.418,1024,2.33,0.08,0.91\r\nmobilenetv4_conv_small_050,224,158257.21,6.452,1024,2.24,0.07,1.18\r\nmobilenetv3_small_050,224,149806.88,6.82,1024,1.59,0.03,0.92\r\nlcnet_035,224,142292.49,7.182,1024,1.64,0.03,1.04\r\nmobilenetv4_conv_small_035,256,138790.66,7.359,1024,1.91,0.06,1.28\r\nlcnet_050,224,125772.48,8.125,1024,1.88,0.05,1.26\r\nmobilenetv4_conv_small_050,256,120659.46,8.47,1024,2.24,0.09,1.55\r\ntf_mobilenetv3_small_minimal_100,224,116685.76,8.758,1024,2.04,0.06,1.41\r\ntest_nfnet,160,116002.45,8.811,1024,0.38,0.29,1.2\r\nstarnet_s050,224,115831.75,8.824,1024,0.54,0.09,1.57\r\nefficientvit_m1,224,115101.94,8.88,1024,2.96,0.17,1.33\r\nshvit_s1,224,113759.4,8.985,1024,6.31,0.24,1.39\r\nefficientvit_m2,224,109069.43,9.373,1024,4.17,0.2,1.47\r\nmobilenetv3_small_075,224,106959.18,9.559,1024,2.04,0.05,1.3\r\nmobilenetv4_conv_small,224,97678.96,10.468,1024,3.77,0.19,1.97\r\nefficientvit_m3,224,96756.18,10.565,1024,6.88,0.26,1.62\r\nmobilenetv3_small_100,224,95781.96,10.675,1024,2.54,0.06,1.42\r\ntinynet_d,152,93101.23,10.982,1024,2.34,0.05,1.42\r\nefficientvit_m4,224,92538.71,11.05,1024,8.78,0.3,1.7\r\nshvit_s2,224,92149.39,11.094,1024,11.45,0.37,1.6\r\ntf_mobilenetv3_small_075,224,88218.21,11.592,1024,2.04,0.05,1.3\r\nrepghostnet_050,224,87785.6,11.649,1024,2.31,0.05,2.02\r\nlevit_conv_128s,224,82652.32,12.375,1024,7.76,0.3,1.88\r\nlcnet_075,224,81421.72,12.56,1024,2.36,0.1,1.99\r\nmnasnet_small,224,80628.0,12.682,1024,2.03,0.07,2.16\r\ntf_mobilenetv3_small_100,224,79788.29,12.818,1024,2.54,0.06,1.42\r\nlevit_128s,224,77736.44,13.153,1024,7.76,0.3,1.88\r\nresnet10t,176,77017.51,13.283,1024,5.44,0.7,1.51\r\nregnetx_002,224,74087.79,13.807,1024,2.68,0.2,2.16\r\nghostnet_050,224,73755.62,13.867,1024,2.59,0.05,1.77\r\nstarnet_s100,224,71458.92,14.315,1024,1.04,0.19,2.68\r\nmobilenetv4_conv_small,256,71279.8,14.347,1024,3.77,0.25,2.57\r\nresnet18,160,70854.49,14.432,1024,11.69,0.93,1.27\r\nstarnet_s150,224,68796.99,14.867,1024,1.56,0.23,2.75\r\nregnety_002,224,68790.1,14.869,1024,3.16,0.2,2.17\r\nrepghostnet_058,224,67992.31,15.041,1024,2.54,0.06,2.59\r\nshvit_s3,224,67804.26,15.087,1024,14.21,0.6,2.33\r\nfasternet_t0,224,67656.14,15.118,1024,3.91,0.34,1.97\r\nmobilenetv2_035,224,66377.27,15.41,1024,1.68,0.07,2.86\r\nefficientvit_m5,224,65555.33,15.605,1024,12.44,0.52,2.41\r\nlcnet_100,224,64980.77,15.742,1024,2.95,0.16,2.52\r\nlevit_conv_128,224,59472.18,17.202,1024,9.19,0.41,2.71\r\nmnasnet_050,224,59214.88,17.277,1024,2.22,0.11,3.07\r\nconvnext_zepto_rms,224,58047.4,17.626,1024,2.16,0.3,2.75\r\nrepghostnet_080,224,56778.37,18.018,1024,3.27,0.1,3.22\r\nefficientvit_b0,224,55180.9,18.542,1024,3.41,0.1,2.87\r\nhgnetv2_b0,224,55061.6,18.581,1024,6.0,0.33,2.12\r\nlevit_128,224,53545.67,19.104,1024,9.19,0.41,2.71\r\nconvnext_zepto_rms_ols,224,52038.88,19.662,1024,2.16,0.34,3.15\r\nregnetx_004,224,51371.68,19.916,1024,5.16,0.4,3.14\r\nmobilenetv2_050,224,50998.15,20.06,1024,1.97,0.1,3.64\r\nsemnasnet_050,224,50860.61,20.117,1024,2.08,0.11,3.44\r\nlevit_conv_192,224,49736.76,20.573,1024,10.92,0.66,3.2\r\nrepvgg_a0,224,49139.48,20.82,1024,8.31,1.36,1.79\r\nregnetx_004_tv,224,48633.83,21.039,1024,5.5,0.42,3.17\r\ngernet_s,224,48574.37,21.063,1024,8.17,0.75,2.65\r\npit_ti_224,224,48141.58,21.256,1024,4.85,0.7,6.19\r\nresnet10t,224,47783.44,21.416,1024,5.44,1.1,2.43\r\nvit_tiny_r_s16_p8_224,224,47476.1,21.549,1024,6.34,0.44,2.06\r\nmobileone_s0,224,47272.65,21.646,1024,2.08,0.28,3.79\r\nvit_small_patch32_224,224,46713.19,21.903,1024,22.88,1.15,2.5\r\nghostnetv3_050,224,46624.87,21.946,1024,2.85,0.05,2.28\r\npit_ti_distilled_224,224,46513.32,21.996,1024,5.1,0.71,6.23\r\ncs3darknet_focus_s,256,46271.93,22.113,1024,3.27,0.69,2.7\r\nrepghostnet_100,224,45835.87,22.323,1024,4.06,0.15,3.98\r\nlcnet_150,224,44761.54,22.859,1024,4.5,0.34,3.79\r\nedgenext_xx_small,256,43742.76,23.395,1024,1.33,0.26,3.33\r\ntinynet_c,184,43546.59,23.497,1024,2.46,0.11,2.87\r\nmobilenetv4_conv_small,320,43310.55,23.625,1024,3.77,0.39,4.01\r\nfasternet_t1,224,43268.19,23.651,1024,7.6,0.85,3.15\r\ncs3darknet_s,256,42904.78,23.85,1024,3.28,0.72,2.97\r\nresnet14t,176,42786.18,23.914,1024,10.08,1.07,3.61\r\nmixer_s32_224,224,42540.93,24.056,1024,19.1,1.0,2.28\r\nstarnet_s2,224,42536.99,24.058,1024,3.68,0.55,4.73\r\nnf_regnet_b0,192,41853.78,24.448,1024,8.76,0.37,3.15\r\nresnet34,160,41182.8,24.846,1024,21.8,1.87,1.91\r\nrepghostnet_111,224,41065.26,24.919,1024,4.52,0.18,4.38\r\nstarnet_s1,224,40458.09,25.293,1024,2.87,0.42,4.99\r\nhgnetv2_b1,224,40095.96,25.522,1024,6.34,0.49,2.73\r\ntf_mobilenetv3_large_minimal_100,224,40027.82,25.567,1024,3.92,0.22,4.4\r\nconvnext_atto_rms,224,39908.1,25.644,1024,3.69,0.55,3.81\r\nmobilenetv3_large_075,224,39459.16,25.934,1024,3.99,0.16,4.0\r\nconvnext_atto,224,39332.05,26.018,1024,3.7,0.55,3.81\r\nlevit_192,224,38758.16,26.404,1024,10.92,0.66,3.2\r\nmnasnet_075,224,38475.33,26.596,1024,3.17,0.23,4.77\r\nshvit_s4,256,37867.84,27.026,1024,16.55,0.99,3.73\r\nghostnet_100,224,37503.04,27.284,1024,5.18,0.15,3.55\r\nresnetv2_18,224,37088.72,27.592,1024,11.69,1.82,2.48\r\nregnety_004,224,36941.9,27.697,1024,4.34,0.41,3.89\r\nconvnext_atto_ols,224,36925.3,27.716,1024,3.7,0.58,4.11\r\ntf_mobilenetv3_large_075,224,36899.72,27.734,1024,3.99,0.16,4.0\r\ninception_next_atto,224,36121.3,28.334,1024,4.16,0.5,3.63\r\nlevit_conv_256,224,36070.07,28.373,1024,18.86,1.13,4.23\r\nconvnextv2_atto,224,35699.12,28.666,1024,3.71,0.55,3.81\r\nresnet18,224,35635.29,28.719,1024,11.69,1.82,2.48\r\nregnety_006,224,35466.03,28.855,1024,6.06,0.61,4.33\r\nmobilenetv3_rw,224,35002.39,29.239,1024,5.48,0.23,4.41\r\nxcit_nano_12_p16_224,224,34850.64,29.366,1024,3.05,0.56,4.17\r\nmobilenetv3_large_100,224,34721.93,29.473,1024,5.48,0.23,4.41\r\nmobilenetv1_100,224,34368.02,29.775,1024,4.23,0.58,5.04\r\nrepvgg_a1,224,34096.98,30.012,1024,12.79,2.36,2.37\r\ntf_efficientnetv2_b0,192,33968.0,30.128,1024,7.14,0.54,3.51\r\nhardcorenas_a,224,33810.51,30.271,1024,5.26,0.23,4.38\r\nrepghostnet_130,224,33754.84,30.32,1024,5.46,0.24,5.24\r\nmobilenetv1_100h,224,33743.72,30.326,1024,5.28,0.63,5.09\r\nedgenext_xx_small,288,33698.25,30.371,1024,1.33,0.33,4.21\r\ndla46_c,224,33672.32,30.381,1024,1.3,0.58,4.5\r\nhardcorenas_b,224,33549.42,30.507,1024,5.18,0.26,5.09\r\ndeit_tiny_patch16_224,224,33454.94,30.59,1024,5.72,1.26,5.97\r\nhgnetv2_b0,288,33184.33,30.838,1024,6.0,0.54,3.51\r\nvit_tiny_patch16_224,224,33176.46,30.834,1024,5.72,1.26,5.97\r\nmnasnet_100,224,33126.98,30.891,1024,4.38,0.33,5.46\r\nconvnext_femto,224,32896.08,31.113,1024,5.22,0.79,4.57\r\ndeit_tiny_distilled_patch16_224,224,32757.49,31.239,1024,5.91,1.27,6.01\r\nlegacy_seresnet18,224,32697.3,31.3,1024,11.78,1.82,2.49\r\nseresnet18,224,32686.93,31.311,1024,11.78,1.82,2.49\r\nese_vovnet19b_slim_dw,224,32674.81,31.32,1024,1.9,0.4,5.28\r\nhardcorenas_c,224,32413.69,31.575,1024,5.52,0.28,5.01\r\ntf_mobilenetv3_large_100,224,32334.0,31.653,1024,5.48,0.23,4.41\r\nresnetv2_18d,224,32251.11,31.734,1024,11.71,2.06,3.29\r\nmobilenet_edgetpu_v2_xs,224,32087.66,31.895,1024,4.46,0.7,4.8\r\nregnetx_008,224,31943.92,32.041,1024,7.26,0.81,5.15\r\nsemnasnet_075,224,31842.08,32.137,1024,2.91,0.23,5.54\r\nmobilenetv2_075,224,31783.85,32.202,1024,2.64,0.22,5.86\r\nconvnext_femto_ols,224,31197.24,32.807,1024,5.23,0.82,4.87\r\nlevit_conv_256d,224,31096.78,32.911,1024,26.16,1.39,4.93\r\nresnet18d,224,30990.52,33.027,1024,11.71,2.06,3.29\r\nmobilenetv4_conv_medium,224,30932.86,33.086,1024,9.72,0.84,5.8\r\nvit_medium_patch32_clip_224,224,30665.58,33.376,1024,39.69,2.0,3.34\r\nstarnet_s3,224,30655.67,33.385,1024,5.75,0.76,6.66\r\nconvnext_atto_rms,256,30565.76,33.487,1024,3.69,0.71,4.98\r\nspnasnet_100,224,30392.63,33.671,1024,4.42,0.35,6.03\r\nrepghostnet_150,224,30329.24,33.745,1024,6.55,0.31,6.0\r\nefficientformerv2_s0,224,30218.8,33.87,1024,3.6,0.41,5.3\r\nconvnextv2_femto,224,29638.54,34.534,1024,5.23,0.79,4.57\r\ncs3darknet_focus_s,320,29273.57,34.96,1024,3.27,1.08,4.22\r\nmobilenet_edgetpu_100,224,29107.21,35.163,1024,4.09,1.0,5.75\r\nhardcorenas_d,224,28946.0,35.355,1024,7.5,0.3,4.93\r\nregnety_008,224,28909.5,35.404,1024,6.26,0.81,5.25\r\nvit_xsmall_patch16_clip_224,224,28777.04,35.566,1024,8.28,1.79,6.65\r\nlevit_256,224,28560.23,35.838,1024,18.86,1.13,4.23\r\nmobilenetv4_hybrid_medium_075,224,28477.14,35.938,1024,7.31,0.66,5.65\r\nrepvit_m1,224,28461.14,35.962,1024,5.07,0.82,6.17\r\nrepvit_m0_9,224,28433.29,35.998,1024,5.07,0.82,6.17\r\nghostnet_130,224,28361.16,36.088,1024,7.36,0.24,4.6\r\ntinynet_b,188,28280.25,36.193,1024,3.73,0.21,4.44\r\nese_vovnet19b_slim,224,27947.52,36.624,1024,3.17,1.69,3.52\r\npit_xs_224,224,27930.78,36.645,1024,10.62,1.4,7.71\r\nsemnasnet_100,224,27831.26,36.777,1024,3.89,0.32,6.23\r\nmobileone_s1,224,27646.23,37.021,1024,4.76,0.83,6.27\r\nmobilenetv2_100,224,27591.89,37.096,1024,3.5,0.31,6.68\r\npit_xs_distilled_224,224,27583.88,37.107,1024,11.0,1.41,7.76\r\nfbnetc_100,224,27520.28,37.193,1024,5.57,0.4,6.51\r\nregnety_008_tv,224,27405.57,37.346,1024,6.43,0.84,5.42\r\npvt_v2_b0,224,27211.89,37.61,1024,3.67,0.57,7.99\r\nefficientnet_lite0,224,27089.77,37.781,1024,4.65,0.4,6.74\r\nresnet14t,224,26838.08,38.136,1024,10.08,1.69,5.8\r\nmobilevit_xxs,256,26720.68,38.304,1024,1.27,0.42,8.34\r\ntf_efficientnetv2_b1,192,26666.96,38.383,1024,8.14,0.76,4.59\r\nhrnet_w18_small,224,26651.0,38.403,1024,13.19,1.61,5.72\r\ntf_efficientnet_lite0,224,26644.88,38.415,1024,4.65,0.4,6.74\r\nrepvgg_b0,224,26580.35,38.504,1024,14.34,3.06,3.07\r\nmobilenetv1_100,256,26323.32,38.883,1024,4.23,0.76,6.59\r\nmobilenetv3_large_100,256,26247.58,38.996,1024,5.48,0.29,5.75\r\nskresnet18,224,25945.68,39.452,1024,11.96,1.82,3.24\r\nmobilenetv1_100h,256,25778.8,39.703,1024,5.28,0.82,6.65\r\nfasternet_t2,224,25545.45,40.066,1024,14.98,1.91,4.73\r\nhgnetv2_b2,224,25518.48,40.111,1024,11.22,1.15,4.12\r\nregnetx_006,224,25487.12,40.161,1024,6.2,0.61,3.98\r\nlevit_256d,224,25282.54,40.481,1024,26.16,1.39,4.93\r\nmobilenetv1_125,224,25207.97,40.604,1024,6.27,0.89,6.3\r\nhardcorenas_f,224,25113.42,40.754,1024,8.2,0.35,5.57\r\nresnetblur18,224,24983.11,40.97,1024,11.69,2.34,3.39\r\nhardcorenas_e,224,24963.14,41.0,1024,8.07,0.35,5.65\r\ngmlp_ti16_224,224,24842.07,41.199,1024,5.87,1.34,7.55\r\ntf_efficientnetv2_b0,224,24702.64,41.433,1024,7.14,0.73,4.77\r\nresnet50,160,24613.55,41.587,1024,25.56,2.1,5.67\r\nswiftformer_xs,224,24561.46,41.674,1024,3.48,0.61,6.45\r\nedgenext_x_small,256,24433.63,41.89,1024,2.34,0.54,5.93\r\nconvnext_pico,224,24319.33,42.09,1024,9.05,1.37,6.1\r\nhgnetv2_b1,288,24171.9,42.344,1024,6.34,0.82,4.51\r\nconvnext_atto,288,23691.55,43.206,1024,3.7,0.91,6.3\r\nnf_regnet_b0,256,23601.45,43.37,1024,8.76,0.64,5.58\r\nmobilenetv4_conv_medium,256,23138.89,44.232,1024,9.72,1.1,7.58\r\nconvnext_pico_ols,224,23132.69,44.25,1024,9.06,1.43,6.5\r\nghostnetv2_100,224,23081.58,44.345,1024,6.16,0.18,4.55\r\nmobilenetv4_hybrid_medium,224,23081.26,44.343,1024,11.07,0.98,6.84\r\nresnet50d,160,22959.12,44.585,1024,25.58,2.22,6.08\r\nmnasnet_140,224,22779.88,44.929,1024,7.12,0.6,7.71\r\ntinynet_a,192,22727.62,45.038,1024,6.19,0.35,5.41\r\nghostnetv3_100,224,22440.36,45.614,1024,6.15,0.17,4.55\r\nrepvit_m1_0,224,22420.15,45.657,1024,6.81,1.11,7.19\r\nmobilenet_edgetpu_v2_s,224,22405.68,45.68,1024,5.99,1.21,6.6\r\nresnetv2_18,288,22318.23,45.861,1024,11.69,3.0,4.11\r\nconvnext_atto_ols,288,22250.1,46.004,1024,3.7,0.96,6.8\r\nrepghostnet_200,224,22228.68,46.049,1024,9.77,0.53,7.96\r\nxcit_tiny_12_p16_224,224,22145.89,46.219,1024,6.72,1.24,6.29\r\nmobileone_s2,224,22093.7,46.333,1024,7.81,1.3,7.56\r\ncrossvit_9_240,240,21991.14,46.545,1024,8.55,1.85,9.52\r\nefficientformer_l1,224,21813.74,46.924,1024,12.29,1.3,5.53\r\nstarnet_s4,224,21726.92,47.109,1024,7.48,1.05,9.56\r\neva02_tiny_patch14_224,224,21685.11,47.199,1024,5.5,1.7,9.14\r\nconvnextv2_pico,224,21655.2,47.266,1024,9.07,1.37,6.1\r\nmobilevitv2_050,256,21629.6,47.326,1024,1.37,0.48,8.04\r\nefficientvit_b1,224,21583.69,47.426,1024,9.1,0.53,7.25\r\ncrossvit_tiny_240,240,21575.52,47.442,1024,7.01,1.57,9.08\r\ngernet_m,224,21480.82,47.651,1024,21.14,3.02,5.24\r\nconvnextv2_atto,288,21477.85,47.661,1024,3.71,0.91,6.3\r\nrepvit_m1_1,224,21463.88,47.692,1024,8.24,1.34,7.82\r\nresnet18,288,21423.42,47.781,1024,11.69,3.01,4.11\r\nrepvit_m2,224,21420.01,47.784,1024,8.24,1.34,7.82\r\nvit_betwixt_patch32_clip_224,224,21399.68,47.833,1024,61.41,3.09,4.17\r\ncs3darknet_focus_m,256,21376.51,47.887,1024,9.3,1.98,4.89\r\nregnetz_005,224,21351.61,47.942,1024,7.12,0.52,5.86\r\nrexnetr_100,224,21203.11,48.278,1024,4.88,0.43,7.72\r\nmobilenetv4_conv_blur_medium,224,21171.89,48.346,1024,9.72,1.22,8.58\r\nmobilenetv2_110d,224,21119.43,48.467,1024,4.52,0.45,8.71\r\nrexnet_100,224,21081.11,48.557,1024,4.8,0.41,7.44\r\nmambaout_femto,224,20985.44,48.777,1024,7.3,1.16,8.34\r\nlevit_conv_384,224,20944.46,48.875,1024,39.07,2.35,6.26\r\nefficientformerv2_s1,224,20898.14,48.983,1024,6.19,0.67,7.66\r\nfbnetv3_b,224,20872.84,49.037,1024,8.6,0.42,6.97\r\nresnet34,224,20513.27,49.901,1024,21.8,3.67,3.74\r\ncs3darknet_m,256,20430.95,50.104,1024,9.31,2.08,5.28\r\nresnetv2_34,224,20371.58,50.248,1024,21.8,3.67,3.74\r\nresnext50_32x4d,160,20233.74,50.591,1024,25.03,2.17,7.35\r\nresnet26,224,20103.43,50.92,1024,16.0,2.36,7.35\r\nresnet50,176,19890.75,51.463,1024,25.56,2.62,6.92\r\nswiftformer_s,224,19838.02,51.6,1024,6.09,0.99,7.81\r\nhgnetv2_b3,224,19763.53,51.795,1024,16.29,1.78,5.07\r\ntf_efficientnetv2_b2,208,19759.33,51.807,1024,10.1,1.06,6.0\r\ncrossvit_9_dagger_240,240,19755.12,51.816,1024,8.78,1.99,9.97\r\nconvnext_femto,288,19740.86,51.856,1024,5.22,1.3,7.56\r\nseresnet18,288,19639.0,52.121,1024,11.78,3.01,4.11\r\nseresnet50,160,19622.18,52.165,1024,28.09,2.1,5.69\r\nmobilenetv1_125,256,19302.37,53.034,1024,6.27,1.16,8.23\r\nresnetv2_18d,288,19228.59,53.237,1024,11.71,3.4,5.43\r\ndla34,224,19227.64,53.241,1024,15.74,3.07,5.02\r\nsemnasnet_140,224,19174.11,53.389,1024,6.11,0.6,8.87\r\nedgenext_x_small,288,19119.28,53.542,1024,2.34,0.68,7.5\r\nresnet34d,224,18917.64,54.113,1024,21.82,3.91,4.54\r\nnf_resnet26,224,18866.33,54.257,1024,16.0,2.41,7.35\r\nmobilenetv2_140,224,18854.89,54.292,1024,6.11,0.6,9.57\r\nresnetv2_34d,224,18828.13,54.371,1024,21.82,3.91,4.54\r\nseresnet34,224,18784.72,54.493,1024,21.96,3.67,3.74\r\nese_vovnet19b_dw,224,18779.95,54.51,1024,6.54,1.34,8.25\r\nconvnext_femto_ols,288,18771.14,54.534,1024,5.23,1.35,8.06\r\necaresnet50t,160,18735.05,54.636,1024,25.57,2.21,6.04\r\nlegacy_seresnet34,224,18723.03,54.677,1024,21.96,3.67,3.74\r\nresnet18d,288,18498.78,55.335,1024,11.71,3.41,5.43\r\nselecsls42,224,18415.95,55.589,1024,30.35,2.94,4.62\r\nresnetrs50,160,18394.01,55.654,1024,35.69,2.29,6.2\r\nselecsls42b,224,18339.83,55.818,1024,32.46,2.98,4.62\r\ntiny_vit_5m_224,224,18306.57,55.916,1024,12.08,1.27,11.25\r\nefficientnet_es_pruned,224,18246.23,56.104,1024,5.44,1.81,8.73\r\nefficientnet_es,224,18237.1,56.134,1024,5.44,1.81,8.73\r\nefficientnet_b0,224,18234.44,56.135,1024,5.29,0.4,6.75\r\nresnet26d,224,18045.52,56.727,1024,16.01,2.6,8.15\r\nefficientnet_lite1,240,18009.74,56.841,1024,5.42,0.62,10.14\r\nconvnextv2_femto,288,17976.64,56.943,1024,5.23,1.3,7.56\r\ntf_efficientnet_es,224,17888.65,57.227,1024,5.44,1.81,8.73\r\ntf_efficientnet_lite1,240,17804.39,57.496,1024,5.42,0.62,10.14\r\nnf_regnet_b2,240,17699.89,57.836,1024,14.31,0.97,7.23\r\nrepvgg_a2,224,17633.7,58.052,1024,25.5,5.12,3.13\r\nnf_ecaresnet26,224,17588.65,58.198,1024,16.0,2.41,7.36\r\ngmixer_12_224,224,17581.68,58.225,1024,12.7,2.67,7.26\r\nghostnetv2_130,224,17563.54,58.285,1024,8.96,0.28,5.9\r\nnf_seresnet26,224,17560.73,58.295,1024,17.4,2.41,7.36\r\nfbnetv3_d,224,17472.58,58.586,1024,10.31,0.52,8.5\r\nmobileone_s3,224,17400.35,58.834,1024,10.08,1.9,9.13\r\nmobilenet_edgetpu_v2_m,224,17359.06,58.971,1024,8.46,1.85,8.15\r\nmambaout_kobe,224,17313.92,59.126,1024,9.14,1.52,10.0\r\nnf_regnet_b1,256,17313.23,59.126,1024,10.22,0.82,7.27\r\nmobilenetv4_hybrid_medium,256,17200.62,59.515,1024,11.07,1.29,9.01\r\nghostnetv3_130,224,17029.19,60.114,1024,8.95,0.28,5.9\r\ncs3darknet_focus_m,288,16863.62,60.706,1024,9.3,2.51,6.19\r\nefficientvit_b1,256,16657.21,61.459,1024,9.1,0.69,9.46\r\nresnetaa34d,224,16581.68,61.736,1024,21.82,4.43,5.07\r\ndarknet17,256,16493.0,62.071,1024,14.3,3.26,7.18\r\nlevit_384,224,16444.85,62.253,1024,39.07,2.35,6.26\r\npit_s_distilled_224,224,16425.63,62.324,1024,24.04,2.9,11.64\r\ntf_efficientnetv2_b1,240,16416.62,62.359,1024,8.14,1.21,7.34\r\nresmlp_12_224,224,16406.95,62.396,1024,15.35,3.01,5.5\r\nmixnet_s,224,16368.93,62.541,1024,4.13,0.25,6.25\r\nresnext50_32x4d,176,16354.55,62.589,1024,25.03,2.71,8.97\r\nmobilenetv4_conv_blur_medium,256,16317.5,47.044,768,9.72,1.59,11.2\r\nmobilenetv4_conv_aa_medium,256,16197.66,63.202,1024,9.72,1.58,10.3\r\ncs3darknet_m,288,16196.4,63.207,1024,9.31,2.63,6.69\r\nedgenext_small,256,16126.54,63.478,1024,5.59,1.26,9.07\r\nselecsls60,224,16102.47,63.573,1024,30.67,3.59,5.52\r\nhgnetv2_b4,224,16095.96,63.599,1024,19.8,2.75,6.7\r\nefficientnet_b1_pruned,240,16072.13,63.687,1024,6.33,0.4,6.21\r\nselecsls60b,224,16032.27,63.851,1024,32.77,3.63,5.52\r\npit_s_224,224,16029.76,63.865,1024,23.46,2.88,11.56\r\npoolformer_s12,224,15987.09,64.036,1024,11.92,1.82,5.53\r\nfastvit_t8,256,15986.13,64.04,1024,4.0,0.69,6.59\r\nvit_base_patch32_224,224,15923.69,64.29,1024,88.22,4.41,5.01\r\nvit_base_patch32_clip_quickgelu_224,224,15833.77,64.656,1024,87.85,4.41,5.01\r\nrexnetr_130,224,15815.57,64.729,1024,7.61,0.68,9.81\r\nfbnetv3_b,256,15787.13,64.845,1024,8.6,0.55,9.1\r\nefficientnet_b0_gn,224,15775.27,64.892,1024,5.29,0.42,6.75\r\nvit_base_patch32_clip_224,224,15758.92,64.962,1024,88.22,4.41,5.01\r\nvit_small_patch32_384,384,15628.77,65.501,1024,22.92,3.45,8.25\r\nconvnext_nano,224,15570.35,65.75,1024,15.59,2.46,8.37\r\ntf_mixnet_s,224,15555.14,65.808,1024,4.13,0.25,6.25\r\nmobilenetv2_120d,224,15396.29,66.489,1024,5.83,0.69,11.97\r\npoolformerv2_s12,224,15325.59,66.8,1024,11.89,1.83,5.53\r\nmobilenet_edgetpu_v2_l,224,15323.51,66.807,1024,10.92,2.55,9.05\r\nvit_tiny_r_s16_p8_384,384,15278.16,67.004,1024,6.36,1.34,6.49\r\ndpn48b,224,15232.9,67.206,1024,9.13,1.69,8.92\r\ntf_efficientnet_b0,224,15210.6,67.302,1024,5.29,0.4,6.75\r\nmixer_s16_224,224,15196.15,67.371,1024,18.53,3.79,5.97\r\nswiftformer_l1,224,15149.07,67.579,1024,12.06,1.6,10.07\r\nhgnetv2_b2,288,15106.36,67.765,1024,11.22,1.89,6.8\r\nresnetblur18,288,15092.15,67.831,1024,11.69,3.87,5.6\r\nresnet101,160,15061.15,67.971,1024,44.55,4.0,8.28\r\ntiny_vit_11m_224,224,15017.82,68.166,1024,20.35,2.03,13.49\r\nrexnet_130,224,14983.93,68.324,1024,7.56,0.68,9.71\r\nskresnet34,224,14940.25,68.522,1024,22.28,3.67,5.13\r\nvisformer_tiny,224,14813.28,69.11,1024,10.32,1.27,5.72\r\nrepvit_m3,224,14750.81,69.398,1024,10.12,1.86,11.43\r\nmobilenetv4_conv_medium,320,14747.95,69.416,1024,9.72,1.71,11.84\r\nresnext26ts,256,14740.93,69.448,1024,10.3,2.43,10.52\r\ndarknet21,256,14711.04,69.589,1024,20.86,3.93,7.47\r\nmixer_b32_224,224,14604.65,70.092,1024,60.29,3.24,6.29\r\nconvnext_pico,288,14524.12,70.488,1024,9.05,2.27,10.08\r\nghostnetv2_160,224,14356.18,71.311,1024,12.39,0.42,7.23\r\nmobilevitv2_075,256,14339.67,71.393,1024,2.87,1.05,12.06\r\nrexnetr_150,224,14303.58,71.574,1024,9.78,0.89,11.13\r\nconvnextv2_nano,224,14184.44,72.173,1024,15.62,2.46,8.37\r\nlegacy_seresnext26_32x4d,224,14080.34,72.709,1024,16.79,2.49,9.39\r\nefficientnet_lite2,260,14041.16,72.912,1024,6.09,0.89,12.9\r\ngcresnext26ts,256,13997.36,73.14,1024,10.48,2.43,10.53\r\nefficientnet_b0,256,13927.1,73.507,1024,5.29,0.52,8.81\r\nghostnetv3_160,224,13903.72,73.627,1024,12.38,0.41,7.23\r\ngernet_l,256,13897.53,73.666,1024,31.08,4.57,8.0\r\ntf_efficientnet_lite2,260,13891.17,73.7,1024,6.09,0.89,12.9\r\nresnet26t,256,13842.42,73.954,1024,16.01,3.35,10.52\r\nconvnext_pico_ols,288,13841.41,73.965,1024,9.06,2.37,10.74\r\nconvnext_nano_ols,224,13816.92,74.092,1024,15.65,2.65,9.38\r\nefficientnet_blur_b0,224,13748.01,74.465,1024,5.29,0.43,8.72\r\nhrnet_w18_small_v2,224,13620.64,75.158,1024,15.6,2.62,9.65\r\nnf_regnet_b2,272,13555.48,75.523,1024,14.31,1.22,9.27\r\nvit_wee_patch16_reg1_gap_256,256,13542.25,75.597,1024,13.42,3.83,13.9\r\nnf_regnet_b1,288,13516.91,75.737,1024,10.22,1.02,9.2\r\neca_resnext26ts,256,13501.55,75.826,1024,10.3,2.43,10.52\r\nseresnext26ts,256,13493.25,75.871,1024,10.39,2.43,10.52\r\nvit_small_patch16_224,224,13438.54,76.163,1024,22.05,4.61,11.95\r\ndeit_small_patch16_224,224,13423.26,76.265,1024,22.05,4.61,11.95\r\nrexnet_150,224,13325.06,76.83,1024,9.73,0.9,11.21\r\ndeit3_small_patch16_224,224,13309.6,76.917,1024,22.06,4.61,11.95\r\nsedarknet21,256,13301.18,76.969,1024,20.95,3.93,7.47\r\nfbnetv3_d,256,13272.49,77.133,1024,10.31,0.68,11.1\r\necaresnext50t_32x4d,224,13259.35,77.211,1024,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,13259.27,77.21,1024,15.41,2.7,10.09\r\ncsatv2,512,13258.93,77.212,1024,11.1,1.39,9.17\r\ndeit_small_distilled_patch16_224,224,13255.98,77.229,1024,22.44,4.63,12.02\r\nmobilenet_edgetpu_v2_m,256,13252.62,77.25,1024,8.46,2.42,10.65\r\nseresnext26t_32x4d,224,13198.58,77.567,1024,16.81,2.7,10.09\r\nconvnextv2_pico,288,13186.99,77.633,1024,9.07,2.27,10.08\r\npvt_v2_b1,224,13153.21,77.831,1024,14.01,2.12,15.39\r\nefficientformerv2_s2,224,13145.02,77.878,1024,12.71,1.27,11.77\r\necaresnet50d_pruned,224,13140.08,77.911,1024,19.94,2.53,6.43\r\ngc_efficientnetv2_rw_t,224,13105.62,78.117,1024,13.68,1.94,9.97\r\nrepvit_m1_5,224,13101.66,78.14,1024,14.05,2.27,12.84\r\nfasternet_s,224,13075.1,78.299,1024,31.18,4.56,7.93\r\nseresnext26d_32x4d,224,13040.02,78.508,1024,16.81,2.73,10.19\r\nefficientvit_b1,288,12970.7,78.93,1024,9.1,0.87,11.96\r\ndla46x_c,224,12911.72,79.288,1024,1.07,0.54,5.66\r\nmambaout_femto,288,12856.88,79.627,1024,7.3,1.91,13.79\r\nbotnet26t_256,256,12814.17,79.893,1024,12.49,3.32,11.98\r\nmobilenetv4_conv_large,256,12805.04,79.952,1024,32.59,2.86,12.14\r\nhalonet26t,256,12789.2,80.051,1024,12.48,3.19,11.69\r\nregnetz_005,288,12773.49,80.146,1024,7.12,0.86,9.68\r\ndpn68,224,12741.65,80.349,1024,12.61,2.35,10.47\r\nefficientnet_b1,224,12725.34,80.451,1024,7.79,0.59,9.36\r\ndla60x_c,224,12634.78,81.029,1024,1.32,0.59,6.01\r\nvit_pwee_patch16_reg1_gap_256,256,12600.85,81.249,1024,15.25,4.37,15.87\r\nvit_base_patch32_siglip_gap_256,256,12590.35,81.317,1024,87.47,5.67,6.54\r\nmobileone_s4,224,12586.59,81.338,1024,14.84,2.98,11.81\r\nresnest14d,224,12528.39,81.716,1024,10.61,2.76,7.33\r\nvit_relpos_small_patch16_224,224,12527.03,81.726,1024,21.98,4.59,13.05\r\ndpn68b,224,12524.47,81.742,1024,12.61,2.35,10.47\r\ncs3darknet_focus_l,256,12438.86,82.297,1024,21.15,4.66,8.03\r\nvit_base_patch32_siglip_256,256,12435.52,82.326,1024,94.55,5.75,6.64\r\nresnet34,288,12424.96,82.395,1024,21.8,6.07,6.18\r\neca_botnext26ts_256,256,12407.66,82.513,1024,10.59,2.46,11.6\r\nvit_small_patch16_rope_ape_224,224,12407.02,82.516,1024,22.06,4.61,11.95\r\nresnet50,224,12400.78,82.556,1024,25.56,4.11,11.11\r\nvit_small_patch16_rope_224,224,12395.82,82.592,1024,21.98,4.61,11.95\r\nvit_base_patch32_clip_256,256,12375.0,82.729,1024,87.86,5.76,6.65\r\nresnetv2_34,288,12342.29,82.942,1024,21.8,6.07,6.18\r\nvit_srelpos_small_patch16_224,224,12334.66,82.999,1024,21.97,4.59,12.16\r\nrepvgg_b1g4,224,12287.89,83.314,1024,36.13,7.31,5.32\r\ncoat_lite_tiny,224,12287.19,83.318,1024,5.72,1.6,11.65\r\necaresnetlight,224,12275.02,83.4,1024,30.16,4.11,8.42\r\nresnet101,176,12251.92,83.562,1024,44.55,4.92,10.08\r\nmobilenetv3_large_150d,256,12207.21,83.861,1024,14.62,1.03,12.35\r\nmobilenetv4_hybrid_large_075,256,12106.04,84.564,1024,22.75,2.06,11.64\r\nmobilevit_xs,256,12104.12,63.431,768,2.32,1.05,16.33\r\nefficientnetv2_rw_t,224,12085.42,84.711,1024,13.65,1.93,9.94\r\nresnet26,288,12048.31,84.971,1024,16.0,3.9,12.15\r\neca_halonext26ts,256,12043.31,85.006,1024,10.76,2.44,11.46\r\nxcit_tiny_24_p16_224,224,12012.3,85.224,1024,12.12,2.34,11.82\r\ncs3darknet_l,256,12005.36,85.262,1024,21.16,4.86,8.55\r\ntf_efficientnetv2_b2,260,11917.72,85.905,1024,10.1,1.72,9.84\r\neca_nfnet_l0,224,11861.4,86.312,1024,24.14,4.35,10.47\r\nresnet50c,224,11833.71,86.513,1024,25.58,4.35,11.92\r\nbat_resnext26ts,256,11776.63,86.932,1024,10.73,2.53,12.51\r\ndla60,224,11773.75,86.952,1024,22.04,4.26,10.16\r\nhgnet_tiny,224,11772.6,86.962,1024,14.74,4.54,6.36\r\nflexivit_small,240,11758.39,87.071,1024,22.06,5.35,14.18\r\nhgnetv2_b3,288,11740.66,87.201,1024,16.29,2.94,8.38\r\nxcit_nano_12_p16_384,384,11692.68,87.557,1024,3.05,1.64,12.15\r\nnfnet_l0,224,11669.39,87.726,1024,35.07,4.36,10.47\r\nresnet50t,224,11668.03,87.741,1024,25.57,4.32,11.82\r\nresnet32ts,256,11620.05,88.105,1024,17.96,4.63,11.58\r\nresnet50d,224,11593.06,88.306,1024,25.58,4.35,11.92\r\nefficientnet_b0_g16_evos,224,11584.7,88.371,1024,8.11,1.01,7.42\r\nresnext26ts,288,11579.71,88.409,1024,10.3,3.07,13.31\r\necaresnet26t,256,11572.4,88.465,1024,16.01,3.35,10.53\r\nresnetv2_50,224,11544.86,88.678,1024,25.55,4.11,11.11\r\nresnet33ts,256,11425.05,89.611,1024,19.68,4.76,11.66\r\nvit_small_resnet26d_224,224,11397.71,89.827,1024,63.61,5.07,11.12\r\nedgenext_small_rw,256,11389.76,89.888,1024,7.83,1.58,9.51\r\nresnet34d,288,11375.36,90.001,1024,21.82,6.47,7.51\r\nconvit_tiny,224,11359.12,90.128,1024,5.71,1.26,7.94\r\nvit_dwee_patch16_reg1_gap_256,256,11343.36,90.255,1024,13.43,3.83,17.6\r\nresnetv2_34d,288,11334.8,90.323,1024,21.82,6.46,7.51\r\nfastvit_t12,256,11331.12,90.353,1024,7.51,1.39,9.57\r\nseresnet34,288,11327.14,90.382,1024,21.96,6.07,6.18\r\nese_vovnet19b_dw,288,11317.18,90.465,1024,6.54,2.22,13.63\r\ncoat_lite_mini,224,11249.08,91.01,1024,11.01,2.0,12.25\r\nefficientvit_b2,224,11243.83,91.054,1024,24.33,1.6,14.62\r\ntresnet_m,224,11196.85,91.437,1024,31.39,5.75,7.31\r\ncoatnext_nano_rw_224,224,11170.83,91.647,1024,14.7,2.47,12.8\r\nefficientnet_em,240,10970.96,93.321,1024,6.9,3.04,14.34\r\ngcresnext26ts,288,10911.22,93.827,1024,10.48,3.07,13.33\r\ntf_efficientnet_em,240,10908.14,93.858,1024,6.9,3.04,14.34\r\nefficientnet_b1,240,10904.08,93.89,1024,7.79,0.71,10.88\r\nresnetv2_50t,224,10884.36,94.06,1024,25.57,4.32,11.82\r\ntf_efficientnetv2_b3,240,10877.09,94.124,1024,14.36,1.93,9.95\r\nefficientnet_b2_pruned,260,10861.74,94.257,1024,8.31,0.73,9.13\r\nresnetv2_50d,224,10855.12,94.315,1024,25.57,4.35,11.92\r\nresnetaa50,224,10848.72,94.365,1024,25.56,5.15,11.64\r\nlevit_conv_512,224,10825.19,94.577,1024,95.08,5.62,10.22\r\nvit_small_patch16_rope_mixed_ape_224,224,10799.39,94.801,1024,22.06,4.61,12.85\r\nvit_small_patch16_rope_mixed_224,224,10795.53,94.836,1024,21.99,4.61,12.85\r\nresnet26d,288,10794.82,94.84,1024,16.01,4.29,13.48\r\nres2net50_48w_2s,224,10754.2,95.201,1024,25.29,4.18,11.72\r\nnf_ecaresnet50,224,10720.76,95.498,1024,25.56,4.21,11.13\r\nmobilevitv2_100,256,10715.5,71.653,768,4.9,1.84,16.08\r\nnf_regnet_b3,288,10670.78,95.942,1024,18.59,1.67,11.84\r\nmambaout_kobe,288,10661.34,96.02,1024,9.14,2.5,16.53\r\nnf_seresnet50,224,10652.78,96.105,1024,28.09,4.21,11.13\r\nregnetx_016,224,10647.49,96.152,1024,9.19,1.62,7.93\r\nseresnext26ts,288,10646.7,96.155,1024,10.39,3.07,13.32\r\neca_resnext26ts,288,10601.53,96.569,1024,10.3,3.07,13.32\r\ngcresnet33ts,256,10594.13,96.636,1024,19.88,4.76,11.68\r\nmixnet_m,224,10590.06,96.677,1024,5.01,0.36,8.19\r\nresnet152,160,10586.17,96.707,1024,60.19,5.9,11.51\r\ngmlp_s16_224,224,10572.21,96.84,1024,19.42,4.42,15.1\r\necaresnet101d_pruned,224,10549.25,97.05,1024,24.88,3.48,7.69\r\nmobilenetv4_hybrid_medium,320,10497.39,97.527,1024,11.07,2.05,14.36\r\nvit_dpwee_patch16_reg1_gap_256,256,10493.03,97.571,1024,15.25,4.37,19.05\r\nfastvit_s12,256,10413.53,98.316,1024,9.43,1.8,10.82\r\nwide_resnet50_2,176,10411.31,98.333,1024,68.88,7.29,8.97\r\nseresnet33ts,256,10351.34,98.9,1024,19.78,4.76,11.66\r\nlegacy_seresnet50,224,10334.42,99.064,1024,28.09,3.88,10.6\r\nresnetblur50,224,10326.2,99.143,1024,25.56,5.16,12.02\r\nrexnetr_200,224,10310.71,74.468,768,16.52,1.59,15.11\r\nregnety_016,224,10307.08,99.328,1024,11.2,1.63,8.04\r\neca_resnet33ts,256,10284.9,99.543,1024,19.68,4.76,11.66\r\nresnet50_clip_gap,224,10258.76,99.79,1024,23.53,5.39,12.44\r\nefficientnet_b0_g8_gn,224,10243.76,99.945,1024,6.56,0.66,6.75\r\nresnetaa50d,224,10243.5,99.947,1024,25.58,5.39,12.44\r\nvit_small_r26_s32_224,224,10232.01,100.061,1024,36.43,3.56,9.85\r\ndla60x,224,10219.59,100.18,1024,17.35,3.54,13.8\r\nresnext50_32x4d,224,10198.85,100.383,1024,25.03,4.26,14.4\r\ndensenet121,224,10178.85,100.582,1024,7.98,2.87,6.9\r\nmobilenetv4_conv_medium,384,10172.77,100.643,1024,9.72,2.46,17.05\r\nedgenext_small,320,10146.24,100.904,1024,5.59,1.97,14.16\r\nlevit_conv_512d,224,10137.38,100.995,1024,92.39,5.84,11.3\r\ncs3sedarknet_l,256,10058.34,101.787,1024,21.91,4.86,8.56\r\nfastvit_sa12,256,10046.22,101.909,1024,11.55,1.94,11.24\r\nvgg11,224,10040.59,101.97,1024,132.86,7.61,7.44\r\nresnetaa34d,288,10009.37,102.286,1024,21.82,7.33,8.38\r\nskresnet50,224,9992.39,102.458,1024,25.8,4.11,12.5\r\nseresnet50,224,9986.99,102.509,1024,28.09,4.11,11.13\r\nvovnet39a,224,9945.23,102.946,1024,22.6,7.09,6.73\r\nvgg11_bn,224,9937.97,103.022,1024,132.87,7.62,7.44\r\nresnet50s,224,9916.88,103.241,1024,25.68,5.47,13.52\r\ncoatnet_pico_rw_224,224,9897.38,51.713,512,10.85,2.05,14.62\r\ntf_mixnet_m,224,9840.09,104.048,1024,5.01,0.36,8.19\r\ncs3darknet_focus_l,288,9833.05,104.121,1024,21.15,5.9,10.16\r\nvit_tiny_patch16_384,384,9824.74,104.211,1024,5.79,4.7,25.39\r\ninception_next_tiny,224,9819.53,104.257,1024,28.06,4.19,11.98\r\nresnetblur50d,224,9773.26,104.754,1024,25.58,5.4,12.82\r\nvit_base_patch32_plus_256,256,9763.48,104.863,1024,119.48,7.79,7.76\r\nhaloregnetz_b,224,9728.19,105.242,1024,11.68,1.97,11.94\r\nxcit_nano_12_p8_224,224,9714.96,105.385,1024,3.05,2.16,15.71\r\nefficientnet_b1,256,9705.99,105.483,1024,7.79,0.77,12.22\r\ncspresnet50,256,9678.67,105.78,1024,21.62,4.54,11.5\r\nresnext50d_32x4d,224,9654.78,106.039,1024,25.05,4.5,15.2\r\nese_vovnet39b,224,9613.52,106.499,1024,24.57,7.09,6.74\r\nselecsls84,224,9603.9,106.604,1024,50.95,5.9,7.57\r\nconvnext_tiny_hnf,224,9581.63,106.854,1024,28.59,4.47,13.44\r\ntf_efficientnet_b1,240,9576.51,106.911,1024,7.79,0.71,10.88\r\nhgnetv2_b4,288,9573.96,106.938,1024,19.8,4.54,11.08\r\neca_vovnet39b,224,9572.72,106.952,1024,22.6,7.09,6.74\r\nfbnetv3_g,240,9542.73,107.288,1024,16.62,1.28,14.87\r\nrexnet_200,224,9536.21,80.517,768,16.37,1.56,14.91\r\nconvnext_tiny,224,9524.46,107.486,1024,28.59,4.47,13.44\r\ncs3darknet_l,288,9515.99,107.589,1024,21.16,6.16,10.83\r\nseresnet50t,224,9498.11,107.789,1024,28.1,4.32,11.83\r\necaresnet50t,224,9494.72,107.828,1024,25.57,4.32,11.83\r\nskresnet50d,224,9488.87,107.897,1024,25.82,4.36,13.31\r\nresnet50_clip,224,9459.45,108.233,1024,38.32,6.14,12.98\r\nconvnext_nano,288,9432.9,108.538,1024,15.59,4.06,13.84\r\necaresnet50d,224,9430.85,108.561,1024,25.58,4.35,11.93\r\ncrossvit_small_240,240,9416.28,108.726,1024,26.86,5.63,18.17\r\nresnest26d,224,9405.16,108.859,1024,17.07,3.64,9.97\r\ncaformer_s18,224,9396.02,108.962,1024,26.34,4.13,19.39\r\ninception_v3,299,9390.48,109.028,1024,23.83,5.73,8.97\r\ndensenetblur121d,224,9372.04,109.243,1024,8.0,3.11,7.9\r\nregnetz_b16,224,9356.64,109.418,1024,9.72,1.45,9.95\r\nresnetrs50,224,9342.62,109.588,1024,35.69,4.48,12.14\r\nefficientformer_l3,224,9337.29,109.646,1024,31.41,3.93,12.01\r\nmobilevit_s,256,9260.11,82.913,768,5.58,2.03,19.94\r\ncspresnet50d,256,9252.84,110.651,1024,21.64,4.86,12.55\r\nresnet32ts,288,9238.82,110.821,1024,17.96,5.86,14.65\r\neva02_tiny_patch14_336,336,9222.43,111.017,1024,5.76,4.68,27.16\r\nefficientnet_b2,256,9206.56,111.208,1024,9.11,0.89,12.81\r\nmambaout_tiny,224,9199.97,111.283,1024,26.55,4.49,16.68\r\nconvformer_s18,224,9197.19,111.32,1024,26.77,3.96,15.82\r\nvit_relpos_small_patch16_rpn_224,224,9157.73,111.801,1024,21.97,4.59,13.05\r\ngcresnext50ts,256,9128.53,112.153,1024,15.67,3.75,15.46\r\nxcit_small_12_p16_224,224,9119.95,112.262,1024,26.25,4.82,12.58\r\nresnet33ts,288,9114.58,112.329,1024,19.68,6.02,14.75\r\ncspresnet50w,256,9103.24,112.47,1024,28.12,5.04,12.19\r\ngmixer_24_224,224,9082.88,112.716,1024,24.72,5.28,14.45\r\nregnetz_b16_evos,224,9052.59,113.098,1024,9.74,1.43,9.95\r\neva02_small_patch14_224,224,9037.05,113.295,1024,21.62,6.14,18.28\r\nres2net50_26w_4s,224,9015.43,113.562,1024,25.7,4.28,12.61\r\nese_vovnet39b_evos,224,9009.08,113.646,1024,24.58,7.07,6.74\r\ntiny_vit_21m_224,224,9003.17,113.717,1024,33.21,4.27,20.08\r\ntwins_pcpvt_small,224,8997.37,113.789,1024,24.11,3.83,18.08\r\nres2next50,224,8958.51,114.284,1024,24.67,4.2,13.71\r\nres2net50_14w_8s,224,8945.19,114.456,1024,25.06,4.21,13.28\r\ncrossvit_15_240,240,8939.2,114.534,1024,27.53,5.81,19.77\r\nsehalonet33ts,256,8934.68,114.592,1024,13.69,3.55,14.7\r\ndla60_res2next,224,8903.08,114.999,1024,17.03,3.49,13.17\r\nresnet26t,320,8853.07,115.648,1024,16.01,5.24,16.44\r\nresnetv2_50x1_bit,224,8836.93,115.854,1024,25.55,4.23,11.11\r\nvit_base_resnet26d_224,224,8835.28,115.881,1024,101.4,6.97,13.16\r\nlevit_512,224,8810.26,116.208,1024,95.08,5.62,10.22\r\ndla60_res2net,224,8808.81,116.23,1024,20.85,4.15,12.34\r\nnf_resnet50,256,8801.02,116.333,1024,25.56,5.46,14.52\r\nvit_pe_core_tiny_patch16_384,384,8781.94,116.586,1024,6.14,4.74,25.62\r\nconvnextv2_tiny,224,8764.27,116.818,1024,28.64,4.47,13.44\r\nconvnextv2_nano,288,8756.18,116.923,1024,15.62,4.06,13.84\r\nswiftformer_l3,224,8738.04,117.17,1024,28.49,4.01,15.77\r\ncsatv2_21m,512,8726.07,117.331,1024,20.7,2.94,15.85\r\nresnetv2_50d_frn,224,8689.79,117.821,1024,25.59,4.33,11.92\r\nresnet152,176,8676.44,118.0,1024,60.19,7.22,13.99\r\nnf_regnet_b3,320,8664.25,118.169,1024,18.59,2.05,14.61\r\ngcresnet50t,256,8654.03,118.308,1024,25.9,5.42,14.67\r\nres2net50d,224,8611.93,118.877,1024,25.72,4.52,13.41\r\nvit_medium_patch16_clip_224,224,8602.37,119.02,1024,38.59,8.0,15.93\r\nefficientvit_b2,256,8597.15,119.09,1024,24.33,2.09,19.03\r\nvit_little_patch16_reg1_gap_256,256,8594.51,119.124,1024,22.52,6.27,18.06\r\nrepvgg_b1,224,8588.48,119.21,1024,51.83,11.82,5.32\r\nhgnetv2_b5,224,8582.22,119.297,1024,39.57,6.56,11.19\r\nvit_relpos_base_patch32_plus_rpn_256,256,8578.73,119.345,1024,119.42,7.68,8.01\r\nseresnext50_32x4d,224,8577.93,119.351,1024,27.56,4.26,14.42\r\nseresnetaa50d,224,8561.22,119.588,1024,28.11,5.4,12.46\r\ncoatnet_nano_rw_224,224,8560.46,119.602,1024,15.14,2.41,15.41\r\nvit_little_patch16_reg4_gap_256,256,8537.78,119.918,1024,22.52,6.35,18.33\r\ndeit3_medium_patch16_224,224,8533.45,119.978,1024,38.85,8.0,15.93\r\nefficientvit_l1,224,8530.92,120.016,1024,52.65,5.27,15.85\r\nlegacy_seresnext50_32x4d,224,8499.91,120.448,1024,27.56,4.26,14.42\r\nregnetx_032,224,8485.95,120.65,1024,15.3,3.2,11.37\r\nresnetv2_50d_evos,224,8485.4,120.661,1024,25.59,4.33,11.92\r\ngcvit_xxtiny,224,8436.87,121.353,1024,12.0,2.14,15.36\r\nresmlp_24_224,224,8428.65,121.469,1024,30.02,5.96,10.91\r\ncoatnet_nano_cc_224,224,8425.26,121.519,1024,13.76,2.24,15.02\r\ngcresnet33ts,288,8417.56,121.629,1024,19.88,6.02,14.78\r\nlambda_resnet26rpt_256,256,8378.56,91.645,768,10.99,3.16,11.87\r\nlevit_512d,224,8356.01,122.526,1024,92.39,5.84,11.3\r\nsebotnet33ts_256,256,8354.46,122.55,1024,13.7,3.89,17.46\r\nconvnext_nano_ols,288,8348.5,122.634,1024,15.65,4.38,15.5\r\nfastvit_mci0,256,8342.76,122.722,1024,11.36,2.39,14.72\r\ncrossvit_15_dagger_240,240,8332.85,122.864,1024,28.21,6.13,20.43\r\nvisformer_small,224,8329.09,122.924,1024,40.22,4.88,11.43\r\ncspresnext50,256,8314.72,123.136,1024,20.57,4.05,15.86\r\nefficientnet_lite3,300,8292.8,61.721,512,8.2,1.65,21.85\r\ndensenet169,224,8284.93,123.573,1024,14.15,3.4,7.3\r\nvovnet57a,224,8271.31,123.783,1024,36.64,8.95,7.52\r\nefficientnet_b3_pruned,300,8226.49,124.457,1024,9.86,1.04,11.86\r\nseresnet33ts,288,8226.05,124.464,1024,19.78,6.02,14.76\r\ntf_efficientnet_lite3,300,8208.63,62.357,512,8.2,1.65,21.85\r\nmaxvit_pico_rw_256,256,8192.89,93.722,768,7.46,1.83,22.3\r\neca_resnet33ts,288,8178.11,125.19,1024,19.68,6.02,14.76\r\nmobilenetv4_conv_large,320,8160.73,125.455,1024,32.59,4.47,18.97\r\nvit_relpos_medium_patch16_224,224,8156.76,125.523,1024,38.75,7.97,17.02\r\nmaxvit_rmlp_pico_rw_256,256,8139.75,94.329,768,7.52,1.85,24.86\r\nvit_relpos_medium_patch16_cls_224,224,8106.11,126.307,1024,38.76,8.03,18.24\r\nresnetrs101,192,8102.91,126.356,1024,63.62,6.04,12.7\r\nresnetv2_50d_gn,224,8101.88,126.373,1024,25.57,4.38,11.92\r\nresnet50_gn,224,8095.32,126.476,1024,25.56,4.14,11.11\r\nefficientnet_cc_b0_8e,224,8049.72,127.192,1024,24.01,0.42,9.42\r\nefficientnet_cc_b0_4e,224,8049.65,127.194,1024,13.31,0.41,9.42\r\ntwins_svt_small,224,8044.91,127.267,1024,24.06,2.94,13.75\r\nmobilevitv2_125,256,8044.11,95.452,768,7.48,2.86,20.1\r\nefficientformerv2_l,224,8024.29,127.59,1024,26.32,2.59,18.54\r\ncoatnet_0_rw_224,224,8024.07,127.597,1024,27.44,4.43,18.73\r\nresnet51q,256,8008.6,127.843,1024,35.7,6.38,16.55\r\nvit_srelpos_medium_patch16_224,224,7989.1,128.157,1024,38.74,7.96,16.21\r\nedgenext_base,256,7983.34,128.249,1024,18.51,3.85,15.58\r\ncs3sedarknet_l,288,7965.24,128.539,1024,21.91,6.16,10.83\r\npoolformer_s24,224,7957.59,128.663,1024,21.39,3.41,10.68\r\nseresnext26t_32x4d,288,7945.77,128.853,1024,16.81,4.46,16.68\r\nhrnet_w18_ssld,224,7898.11,129.632,1024,21.3,4.32,16.31\r\nseresnext26d_32x4d,288,7871.3,130.074,1024,16.81,4.51,16.85\r\nhrnet_w18,224,7870.17,130.089,1024,21.3,4.32,16.31\r\npoolformerv2_s24,224,7852.06,130.391,1024,21.34,3.42,10.68\r\nhgnet_small,224,7847.22,130.475,1024,24.36,8.53,8.79\r\ngc_efficientnetv2_rw_t,288,7838.37,130.618,1024,13.68,3.2,16.45\r\ndla102,224,7830.54,130.747,1024,33.27,7.19,14.18\r\nregnetv_040,224,7829.45,130.77,1024,20.64,4.0,12.29\r\nresnet50_mlp,256,7823.66,130.862,1024,26.65,7.05,16.25\r\nmobilenetv3_large_150d,320,7813.34,98.272,768,14.62,1.61,19.29\r\nvit_small_patch16_dinov3_qkvb,256,7811.91,131.064,1024,21.6,6.26,17.03\r\nvit_small_patch16_dinov3,256,7808.59,131.119,1024,21.59,6.26,17.03\r\nregnety_040,224,7756.52,131.994,1024,20.65,4.0,12.29\r\nnextvit_small,224,7744.99,132.187,1024,31.74,5.8,18.44\r\nmixnet_l,224,7743.41,132.223,1024,7.33,0.58,10.84\r\necaresnet50d_pruned,288,7712.58,132.75,1024,19.94,4.19,10.61\r\nefficientnet_b1,288,7686.9,133.191,1024,7.79,0.97,15.46\r\nresnest50d_1s4x24d,224,7658.5,133.691,1024,25.68,4.43,13.57\r\ncoatnet_rmlp_nano_rw_224,224,7603.57,67.319,512,15.15,2.62,20.34\r\nvit_medium_patch16_gap_240,240,7585.91,134.966,1024,44.4,9.22,18.81\r\nresnet101,224,7582.64,135.023,1024,44.55,7.83,16.23\r\ndavit_tiny,224,7576.45,101.344,768,28.36,4.54,18.89\r\ndpn68b,288,7521.15,136.13,1024,12.61,3.89,17.3\r\ntf_efficientnet_b2,260,7515.34,136.236,1024,9.11,1.02,13.83\r\nfocalnet_tiny_srf,224,7512.86,136.275,1024,28.43,4.42,16.32\r\nskresnext50_32x4d,224,7512.09,136.294,1024,27.48,4.5,17.18\r\nresnet50,288,7485.25,136.783,1024,25.56,6.8,18.37\r\nvit_dlittle_patch16_reg1_gap_256,256,7474.96,136.972,1024,22.52,6.27,22.69\r\ndarknetaa53,256,7459.21,137.262,1024,36.02,7.97,12.39\r\necaresnet26t,320,7453.81,137.356,1024,16.01,5.24,16.44\r\ndarknet53,256,7425.35,137.887,1024,41.61,9.31,12.39\r\ntf_efficientnet_cc_b0_8e,224,7392.24,138.507,1024,24.01,0.42,9.42\r\nxcit_tiny_12_p16_384,384,7389.49,138.553,1024,6.72,3.64,18.26\r\nresnet101c,224,7371.39,138.897,1024,44.57,8.08,17.04\r\necaresnetlight,288,7366.82,138.97,1024,30.16,6.79,13.91\r\ncs3darknet_focus_x,256,7347.56,139.346,1024,35.02,8.03,10.69\r\ntf_mixnet_l,224,7339.72,139.495,1024,7.33,0.58,10.84\r\nswin_tiny_patch4_window7_224,224,7332.9,139.627,1024,28.29,4.51,17.06\r\ncoatnet_bn_0_rw_224,224,7328.51,69.844,512,27.44,4.67,22.04\r\nvit_base_r26_s32_224,224,7322.31,139.831,1024,101.38,6.81,12.36\r\npvt_v2_b2,224,7303.16,140.188,1024,25.36,4.05,27.53\r\nresnet61q,256,7281.18,140.615,1024,36.85,7.8,17.01\r\ntf_efficientnet_cc_b0_4e,224,7280.65,140.629,1024,13.31,0.41,9.42\r\nefficientnetv2_rw_t,288,7280.17,140.639,1024,13.65,3.19,16.42\r\nefficientnet_b2,288,7279.13,140.651,1024,9.11,1.12,16.2\r\nresnet101d,224,7271.42,140.808,1024,44.57,8.08,17.04\r\necaresnet50t,256,7241.04,141.392,1024,25.57,5.64,15.45\r\ngcresnext50ts,288,7217.09,141.867,1024,15.67,4.75,19.57\r\nrdnet_tiny,224,7206.98,142.066,1024,23.86,5.06,15.98\r\nfocalnet_tiny_lrf,224,7167.39,142.848,1024,28.65,4.49,17.76\r\neca_nfnet_l0,288,7157.75,143.041,1024,24.14,7.12,17.29\r\nrepvit_m2_3,224,7144.31,143.313,1024,22.93,4.52,21.32\r\nnfnet_l0,288,7102.13,144.16,1024,35.07,7.13,17.29\r\nnf_resnet101,224,7101.1,144.183,1024,44.55,8.01,16.23\r\nedgenext_small_rw,320,7077.7,144.658,1024,7.83,2.46,14.85\r\nresnet50t,288,7033.73,145.563,1024,25.57,7.14,19.53\r\nmobilenetv4_hybrid_medium,384,6996.99,146.33,1024,11.07,3.01,21.18\r\nresnetv2_50,288,6988.52,146.5,1024,25.55,6.79,18.37\r\nswinv2_cr_tiny_224,224,6983.02,146.621,1024,28.33,4.66,28.45\r\nswinv2_cr_tiny_ns_224,224,6982.85,146.625,1024,28.33,4.66,28.45\r\nnf_resnet50,288,6981.39,146.66,1024,25.56,6.88,18.37\r\nresnet50d,288,6976.81,146.756,1024,25.58,7.19,19.7\r\ncspdarknet53,256,6973.65,146.818,1024,27.64,6.57,16.81\r\nhgnet_tiny,288,6967.67,146.94,1024,14.74,7.51,10.51\r\nvit_base_resnet50d_224,224,6942.41,147.482,1024,110.97,8.73,16.92\r\nhalonet50ts,256,6923.05,147.894,1024,22.73,5.3,19.2\r\nregnetx_040,224,6870.24,149.025,1024,22.12,3.99,12.2\r\nresnetv2_101,224,6870.1,149.031,1024,44.54,7.83,16.23\r\ntf_efficientnetv2_b3,300,6864.98,149.143,1024,14.36,3.04,15.74\r\nhiera_tiny_224,224,6860.46,149.242,1024,27.91,4.91,17.13\r\ngcresnet50t,288,6859.26,149.267,1024,25.9,6.86,18.57\r\ndla102x,224,6836.88,149.757,1024,26.31,5.89,19.42\r\nefficientvit_b2,288,6822.65,150.071,1024,24.33,2.64,24.03\r\nvgg13,224,6774.32,151.142,1024,133.05,11.31,12.25\r\nfbnetv3_g,288,6770.93,151.21,1024,16.62,1.77,21.09\r\nvit_medium_patch16_gap_256,256,6742.76,151.848,1024,38.86,10.59,22.15\r\nefficientvit_l2,224,6740.54,151.897,1024,63.71,6.97,19.58\r\nrexnetr_300,224,6733.43,76.021,512,34.81,3.39,22.16\r\nvitamin_small_224,224,6732.79,76.029,512,22.17,5.92,26.38\r\nnf_regnet_b4,320,6727.01,152.204,1024,30.21,3.29,19.88\r\nnfnet_f0,192,6718.96,152.383,1024,71.49,7.21,10.16\r\nresnet101_clip_gap,224,6715.05,152.473,1024,42.52,9.11,17.56\r\nvgg13_bn,224,6706.78,152.664,1024,133.05,11.33,12.25\r\nresnetaa101d,224,6692.23,152.994,1024,44.57,9.12,17.56\r\ncs3darknet_x,256,6677.51,153.328,1024,35.05,8.38,11.35\r\nmobilevitv2_150,256,6668.53,76.758,512,10.59,4.09,24.11\r\nregnety_032,224,6656.33,153.819,1024,19.44,3.2,11.26\r\nresnetv2_101d,224,6646.41,154.048,1024,44.56,8.07,17.04\r\nnf_ecaresnet101,224,6630.59,154.413,1024,44.55,8.01,16.27\r\nwide_resnet50_2,224,6604.74,155.019,1024,68.88,11.43,14.4\r\nnf_seresnet101,224,6591.87,155.321,1024,49.33,8.02,16.27\r\nres2net50_26w_6s,224,6579.31,155.615,1024,37.05,6.33,15.28\r\ndm_nfnet_f0,192,6573.56,155.756,1024,71.49,7.21,10.16\r\nresnet101s,224,6568.45,155.876,1024,44.67,9.19,18.64\r\nresnetaa50,288,6557.84,156.126,1024,25.56,8.52,19.24\r\nresnest50d,224,6548.63,156.35,1024,27.48,5.4,14.36\r\nresnetblur101d,224,6517.38,157.097,1024,44.57,9.12,17.94\r\nbotnet50ts_256,256,6512.87,157.207,1024,22.74,5.54,22.23\r\nvit_small_plus_patch16_dinov3,256,6500.79,157.501,1024,28.68,8.11,21.84\r\nvit_small_plus_patch16_dinov3_qkvb,256,6494.34,157.657,1024,28.69,8.11,21.84\r\ngcvit_xtiny,224,6437.37,159.052,1024,19.98,2.93,20.26\r\nfastvit_sa24,256,6430.21,159.227,1024,21.5,3.77,20.35\r\nresnet51q,288,6411.99,159.682,1024,35.7,8.07,20.94\r\nresnetv2_34d,384,6394.3,160.123,1024,21.82,11.49,13.35\r\nresnet101_clip,224,6388.48,160.27,1024,56.26,9.81,18.08\r\nvit_medium_patch16_reg1_gap_256,256,6371.97,160.682,1024,38.88,10.63,22.26\r\nrepvgg_b2g4,224,6340.35,161.485,1024,55.78,11.33,6.45\r\nvit_medium_patch16_reg4_gap_256,256,6322.02,161.952,1024,38.88,10.76,22.6\r\ncoatnet_rmlp_0_rw_224,224,6306.63,81.165,512,27.45,4.72,24.89\r\ncs3sedarknet_xdw,256,6304.6,162.402,1024,21.6,5.97,17.18\r\nswin_s3_tiny_224,224,6296.54,162.607,1024,28.33,4.64,19.13\r\ntresnet_v2_l,224,6245.22,163.944,1024,46.17,8.85,16.34\r\nhalo2botnet50ts_256,256,6236.12,164.185,1024,22.64,5.02,21.78\r\nrexnetr_200,288,6235.6,82.092,512,16.52,2.62,24.96\r\nresnetblur50,288,6229.51,164.359,1024,25.56,8.52,19.87\r\nmaxxvit_rmlp_nano_rw_256,256,6226.57,164.438,1024,16.78,4.37,26.05\r\nresnext101_32x4d,224,6223.85,164.5,1024,44.18,8.01,21.23\r\ndensenet201,224,6221.44,164.572,1024,20.01,4.34,7.85\r\nlegacy_seresnet101,224,6210.47,164.862,1024,49.33,7.61,15.74\r\necaresnet101d_pruned,288,6207.68,164.937,1024,24.88,5.75,12.71\r\nregnety_040_sgn,224,6166.91,166.026,1024,20.65,4.03,12.29\r\nresnetaa50d,288,6161.3,166.176,1024,25.58,8.92,20.57\r\nseresnet101,224,6159.05,166.241,1024,49.33,7.84,16.27\r\nresnext50_32x4d,288,6155.18,166.343,1024,25.03,7.04,23.81\r\ncoat_lite_small,224,6149.54,166.497,1024,19.84,3.96,22.09\r\nfasternet_m,224,6142.29,166.694,1024,53.52,8.74,15.34\r\nlambda_resnet26t,256,6135.65,166.876,1024,10.96,3.02,11.87\r\nmvitv2_tiny,224,6109.55,167.588,1024,24.17,4.7,21.16\r\ndensenet121,288,6108.89,167.603,1024,7.98,4.74,11.41\r\ntwins_pcpvt_base,224,6059.62,168.968,1024,43.83,6.68,25.25\r\nese_vovnet57b,256,6050.52,169.221,1024,38.61,11.69,9.82\r\nseresnet50,288,6027.24,169.869,1024,28.09,6.8,18.39\r\npvt_v2_b2_li,224,6017.17,170.16,1024,22.55,3.91,27.6\r\ncrossvit_18_240,240,5975.97,171.33,1024,43.27,9.05,26.26\r\necaresnet101d,224,5963.32,171.698,1024,44.57,8.08,17.07\r\nresnext101_32x8d,176,5958.23,171.845,1024,88.79,10.33,19.37\r\nefficientvit_b3,224,5953.9,171.97,1024,48.65,3.99,26.9\r\nvit_medium_patch16_rope_reg1_gap_256,256,5949.51,172.095,1024,38.74,10.63,22.26\r\nrexnet_300,224,5930.27,86.317,512,34.71,3.44,22.4\r\nhrnet_w32,224,5919.48,172.963,1024,41.23,8.97,22.02\r\nnextvit_base,224,5913.75,173.133,1024,44.79,8.29,23.71\r\ndarknetaa53,288,5901.7,173.49,1024,36.02,10.08,15.68\r\ndarknet53,288,5890.93,173.806,1024,41.61,11.78,15.68\r\nhieradet_small,256,5866.78,130.885,768,34.73,8.51,27.76\r\nresnetblur50d,288,5854.23,174.892,1024,25.58,8.92,21.19\r\nmaxvit_nano_rw_256,256,5852.65,65.595,384,15.45,4.46,30.28\r\ncs3sedarknet_x,256,5848.59,175.066,1024,35.4,8.38,11.35\r\nresnext50d_32x4d,288,5835.95,175.444,1024,25.05,7.44,25.13\r\nese_vovnet39b,288,5831.2,175.589,1024,24.57,11.71,11.13\r\nmaxvit_rmlp_nano_rw_256,256,5825.33,65.901,384,15.5,4.47,31.92\r\nwide_resnet101_2,176,5817.45,176.002,1024,126.89,14.31,13.18\r\nconvnext_tiny_hnf,288,5812.3,176.158,1024,28.59,7.39,22.21\r\nresnet61q,288,5810.49,176.214,1024,36.85,9.87,21.52\r\nconvnext_tiny,288,5786.63,176.935,1024,28.59,7.39,22.21\r\nvit_relpos_medium_patch16_rpn_224,224,5773.5,177.341,1024,38.73,7.97,17.02\r\necaresnet50t,288,5746.8,178.165,1024,25.57,7.14,19.55\r\nefficientnetv2_s,288,5740.99,178.346,1024,21.46,4.75,20.13\r\nseresnet50t,288,5734.09,178.557,1024,28.1,7.14,19.55\r\nxcit_tiny_12_p8_224,224,5703.27,179.525,1024,6.71,4.81,23.6\r\ncoatnet_0_224,224,5701.69,67.328,384,25.04,4.58,24.01\r\necaresnet50d,288,5698.67,179.665,1024,25.58,7.19,19.72\r\ninception_next_small,224,5696.55,179.738,1024,49.37,8.36,19.27\r\ncait_xxs24_224,224,5686.58,180.056,1024,11.96,2.53,20.29\r\nmobilenetv4_conv_large,384,5675.85,180.388,1024,32.59,6.43,27.31\r\nresmlp_36_224,224,5673.11,180.479,1024,44.69,8.91,16.33\r\ncrossvit_18_dagger_240,240,5639.8,181.548,1024,44.27,9.5,27.03\r\nregnetz_c16,256,5636.35,181.659,1024,13.46,2.51,16.57\r\ndensenetblur121d,288,5636.18,181.663,1024,8.0,5.14,13.06\r\nregnetx_080,224,5598.0,182.9,1024,39.57,8.02,14.06\r\nres2net101_26w_4s,224,5576.03,183.624,1024,45.21,8.1,18.45\r\nconvnext_small,224,5575.24,183.651,1024,50.22,8.71,21.56\r\nresnet101d,256,5523.61,185.367,1024,44.57,10.55,22.25\r\nhiera_small_224,224,5517.49,185.572,1024,35.01,6.42,20.75\r\nregnetz_b16,288,5503.45,186.047,1024,9.72,2.39,16.43\r\ncsatv2_21m,640,5492.06,186.432,1024,20.7,4.72,26.68\r\nregnetz_c16_evos,256,5483.1,186.737,1024,13.49,2.48,16.57\r\nvolo_d1_224,224,5456.89,187.633,1024,26.63,6.94,24.43\r\nresnetv2_101x1_bit,224,5438.06,188.284,1024,44.54,8.04,16.23\r\nvit_base_patch32_clip_384,384,5429.46,188.584,1024,88.3,13.06,16.5\r\npit_b_224,224,5428.47,188.613,1024,73.76,12.42,32.94\r\nvit_base_patch32_384,384,5419.86,188.913,1024,88.3,13.06,16.5\r\nefficientnet_b3,288,5413.39,94.562,512,12.23,1.63,21.49\r\nres2net101d,224,5406.73,189.371,1024,45.23,8.35,19.25\r\npit_b_distilled_224,224,5400.48,189.591,1024,74.79,12.5,33.07\r\ncs3edgenet_x,256,5380.86,190.281,1024,47.82,11.53,12.92\r\nswinv2_tiny_window8_256,256,5379.98,190.314,1024,28.35,5.96,24.57\r\nefficientnetv2_rw_s,288,5367.62,190.756,1024,23.94,4.91,21.41\r\nregnetx_064,224,5363.14,190.912,1024,26.21,6.49,16.37\r\nrepvgg_b2,224,5360.29,191.013,1024,80.32,18.38,6.45\r\nvgg16,224,5358.29,191.085,1024,138.36,15.47,13.56\r\nregnetz_b16_evos,288,5350.66,191.358,1024,9.74,2.36,16.43\r\nconvnextv2_tiny,288,5348.96,191.422,1024,28.64,7.39,22.21\r\npoolformer_s36,224,5331.89,192.033,1024,30.86,5.0,15.82\r\nresnet152,224,5330.92,192.068,1024,60.19,11.56,22.56\r\nefficientnet_cc_b1_8e,240,5318.44,192.519,1024,39.72,0.75,15.44\r\nseresnext101_32x4d,224,5317.68,192.542,1024,48.96,8.02,21.26\r\nvgg16_bn,224,5315.98,192.608,1024,138.37,15.5,13.56\r\ntnt_s_legacy_patch16_224,224,5303.77,193.054,1024,23.76,5.24,24.37\r\npoolformerv2_s36,224,5297.54,193.276,1024,30.79,5.01,15.82\r\nres2net50_26w_8s,224,5290.8,193.523,1024,48.4,8.37,17.95\r\nmobilevitv2_175,256,5288.47,96.794,512,14.25,5.54,28.13\r\nconvnext_nano,384,5272.98,194.181,1024,15.59,7.22,24.61\r\nmambaout_small_rw,224,5260.6,194.636,1024,48.5,8.96,27.72\r\nmambaout_small,224,5256.06,194.803,1024,48.49,8.96,27.72\r\neca_nfnet_l1,256,5252.79,194.922,1024,41.41,9.62,22.04\r\ncs3darknet_x,288,5251.07,194.989,1024,35.05,10.6,14.36\r\nrepvgg_b3g4,224,5243.03,195.286,1024,75.63,16.06,7.55\r\ntnt_s_patch16_224,224,5240.45,195.382,1024,23.77,5.25,24.37\r\nlegacy_seresnext101_32x4d,224,5240.42,195.383,1024,48.96,8.02,21.26\r\ndla169,224,5216.91,196.265,1024,53.39,11.6,20.2\r\nresnet152c,224,5209.71,196.536,1024,60.21,11.8,23.36\r\nefficientvit_l2,256,5186.93,148.047,768,63.71,9.09,25.49\r\nconvit_small,224,5178.77,197.712,1024,27.78,5.76,17.87\r\nconvnextv2_small,224,5172.63,197.946,1024,50.32,8.71,21.56\r\nseresnext50_32x4d,288,5166.25,198.186,1024,27.56,7.04,23.82\r\nresnet152d,224,5165.08,198.234,1024,60.21,11.8,23.36\r\nseresnetaa50d,288,5149.76,198.823,1024,28.11,8.92,20.59\r\nhgnetv2_b5,288,5144.21,199.037,1024,39.57,10.84,18.5\r\nefficientnet_x_b3,288,5114.45,200.2,1024,13.3,3.91,15.6\r\nvit_large_patch32_224,224,5114.44,200.196,1024,305.51,15.39,13.3\r\nmaxvit_tiny_rw_224,224,5105.55,100.264,512,29.06,5.11,33.11\r\nmixer_b16_224,224,5105.51,200.552,1024,59.88,12.62,14.53\r\nfastvit_mci1,256,5086.95,201.279,1024,21.46,4.67,27.3\r\nresnetv2_50d_evos,288,5086.74,201.286,1024,25.59,7.15,19.7\r\nedgenext_base,320,5073.82,201.799,1024,18.51,6.01,24.32\r\nmixnet_xl,224,5020.9,203.928,1024,11.9,0.93,14.57\r\ninception_v4,299,5016.72,204.095,1024,42.68,12.28,15.09\r\ncaformer_s36,224,4999.28,204.806,1024,39.3,8.0,37.53\r\ntf_efficientnet_cc_b1_8e,240,4996.54,204.924,1024,39.72,0.75,15.44\r\nmaxxvitv2_nano_rw_256,256,4978.33,205.673,1024,23.7,6.26,23.05\r\nvit_small_resnet50d_s16_224,224,4972.01,205.932,1024,57.53,13.48,24.82\r\ncoatnet_rmlp_1_rw_224,224,4937.91,207.355,1024,41.69,7.85,35.47\r\npvt_v2_b3,224,4932.13,207.598,1024,45.24,6.92,37.7\r\nmobilenetv4_hybrid_medium,448,4916.81,156.179,768,11.07,4.2,29.64\r\nconvformer_s36,224,4900.74,208.923,1024,40.01,7.67,30.5\r\nresnetv2_50d_gn,288,4878.33,209.888,1024,25.57,7.24,19.7\r\nxcit_small_24_p16_224,224,4876.85,209.954,1024,47.67,9.1,23.64\r\nresnet50_gn,288,4864.9,210.469,1024,25.56,6.85,18.37\r\ntf_efficientnetv2_s,300,4863.59,210.525,1024,21.46,5.35,22.73\r\nmixer_l32_224,224,4853.71,210.95,1024,206.94,11.27,19.86\r\nconvnextv2_nano,384,4849.46,211.138,1024,15.62,7.22,24.61\r\nsequencer2d_s,224,4841.5,211.48,1024,27.65,4.96,11.31\r\nhrnet_w30,224,4826.51,212.139,1024,37.71,8.15,21.21\r\nlegacy_xception,299,4816.33,159.437,768,22.86,8.4,35.83\r\nresnet152s,224,4799.69,213.327,1024,60.32,12.92,24.96\r\ntresnet_l,224,4797.86,213.41,1024,55.99,10.9,11.9\r\nmobilevitv2_200,256,4783.3,80.262,384,18.45,7.22,32.15\r\nresnetv2_152,224,4776.72,214.355,1024,60.19,11.55,22.56\r\nnextvit_large,224,4775.79,214.388,1024,57.83,10.77,28.99\r\nhgnet_small,288,4767.58,214.761,1024,24.36,14.09,14.53\r\nrdnet_small,224,4735.14,216.232,1024,50.44,8.74,22.55\r\nmobilenetv4_conv_aa_large,384,4728.13,216.558,1024,32.59,7.07,32.29\r\nresnetv2_34d,448,4722.69,216.807,1024,21.82,15.64,18.16\r\nfastvit_sa36,256,4721.96,216.838,1024,31.46,5.59,29.46\r\nefficientnet_el,300,4708.05,217.483,1024,10.59,8.0,30.7\r\ndensenet161,224,4703.98,217.669,1024,28.68,7.79,11.06\r\nefficientnet_el_pruned,300,4702.75,217.723,1024,10.59,8.0,30.7\r\ntf_efficientnet_el,300,4678.15,218.874,1024,10.59,8.0,30.7\r\nresnetv2_152d,224,4672.72,219.119,1024,60.2,11.8,23.36\r\nnf_regnet_b4,384,4664.22,219.523,1024,30.21,4.7,28.61\r\nregnetv_040,288,4662.18,219.617,1024,20.64,6.6,20.3\r\nhrnet_w18_ssld,288,4659.16,219.759,1024,21.3,7.14,26.96\r\necaresnet50t,320,4650.59,220.169,1024,25.57,8.82,24.13\r\nregnetv_064,224,4635.33,220.891,1024,30.58,6.39,16.41\r\ncoatnet_1_rw_224,224,4628.68,110.592,512,41.72,8.04,34.6\r\nmambaout_tiny,288,4615.58,221.837,1024,26.55,7.41,27.58\r\nvit_betwixt_patch16_gap_256,256,4614.07,221.911,1024,60.37,16.25,27.69\r\nvit_small_patch16_36x1_224,224,4611.65,222.027,1024,64.67,13.71,35.69\r\ncs3sedarknet_x,288,4599.59,222.605,1024,35.4,10.6,14.37\r\ndavit_small,224,4597.98,167.013,768,49.75,8.8,30.49\r\nvit_small_patch16_18x2_224,224,4593.52,222.903,1024,64.67,13.71,35.69\r\nregnetz_040,256,4589.99,223.075,1024,27.12,4.06,24.19\r\nregnety_040,288,4583.67,223.38,1024,20.65,6.61,20.3\r\nvit_pe_spatial_tiny_patch16_512,512,4578.75,223.624,1024,5.68,10.46,61.64\r\nefficientvit_b3,256,4571.03,167.999,768,48.65,5.2,35.01\r\nresnet101,288,4570.86,224.004,1024,44.55,12.95,26.83\r\nregnetz_040_h,256,4551.95,224.941,1024,28.94,4.12,24.29\r\nese_vovnet99b,224,4535.06,225.776,1024,63.2,16.51,11.27\r\ndla102x2,224,4533.78,225.838,1024,41.28,9.34,29.91\r\ncs3se_edgenet_x,256,4529.48,226.057,1024,50.72,11.53,12.94\r\nregnety_064,224,4521.43,226.458,1024,30.58,6.39,16.41\r\nxception41p,299,4520.63,113.239,512,26.91,9.25,39.86\r\nregnetz_d8,256,4506.28,227.219,1024,23.37,3.97,23.74\r\nregnety_080,224,4490.13,228.037,1024,39.18,8.0,17.97\r\nmvitv2_small,224,4483.28,228.382,1024,34.87,7.0,28.08\r\nswin_small_patch4_window7_224,224,4473.86,228.866,1024,49.61,8.77,27.47\r\nvgg19,224,4432.76,230.988,1024,143.67,19.63,14.86\r\ndeit3_base_patch16_224,224,4410.88,232.132,1024,86.59,17.58,23.9\r\nvit_base_patch16_clip_quickgelu_224,224,4404.1,232.489,1024,86.19,17.58,23.9\r\nvit_base_patch16_siglip_gap_224,224,4404.03,232.497,1024,85.8,17.49,23.75\r\nvgg19_bn,224,4397.91,232.819,1024,143.68,19.66,14.86\r\ndeit_base_patch16_224,224,4395.76,232.933,1024,86.57,17.58,23.9\r\nvit_base_patch16_xp_224,224,4393.56,233.05,1024,86.51,17.56,23.9\r\nvit_base_patch16_siglip_224,224,4393.29,233.066,1024,92.88,17.73,24.06\r\nvit_betwixt_patch16_reg1_gap_256,256,4392.12,233.125,1024,60.4,16.32,27.83\r\nvit_base_patch16_clip_224,224,4391.39,233.166,1024,86.57,17.58,23.9\r\nfocalnet_small_srf,224,4390.29,233.222,1024,49.89,8.62,26.26\r\nvit_base_patch16_224_miil,224,4385.22,233.495,1024,94.4,17.59,23.91\r\nresnest50d_4s2x40d,224,4381.6,233.684,1024,30.42,4.4,17.94\r\nvit_base_patch16_224,224,4378.83,233.832,1024,86.57,17.58,23.9\r\nefficientnet_b3,320,4374.14,117.031,512,12.23,2.01,26.52\r\nregnetz_d8_evos,256,4370.36,234.287,1024,23.46,4.5,24.92\r\ndeit_base_distilled_patch16_224,224,4369.28,234.348,1024,87.34,17.68,24.05\r\nvit_betwixt_patch16_reg4_gap_256,256,4335.97,236.145,1024,60.4,16.52,28.24\r\ntwins_pcpvt_large,224,4322.59,236.872,1024,60.99,9.84,35.82\r\nefficientnet_b3_gn,288,4310.07,118.769,512,11.73,1.74,23.35\r\nmaxxvit_rmlp_tiny_rw_256,256,4305.29,237.827,1024,29.64,6.66,39.76\r\nlegacy_seresnet152,224,4303.8,237.904,1024,66.82,11.33,22.08\r\nvit_base_patch16_gap_224,224,4302.84,237.966,1024,86.57,17.49,25.59\r\nfasternet_l,224,4291.71,238.581,1024,93.47,15.52,20.46\r\nseresnet152,224,4289.4,238.707,1024,66.82,11.57,22.61\r\nefficientformer_l7,224,4286.86,238.85,1024,82.23,10.17,24.45\r\ntf_efficientnet_b3,300,4281.86,119.556,512,12.23,1.87,23.83\r\ncs3edgenet_x,288,4247.56,241.059,1024,47.82,14.59,16.36\r\nvit_base_mci_224,224,4239.24,241.536,1024,86.35,17.73,24.65\r\nvit_small_patch16_384,384,4232.23,241.93,1024,22.2,15.52,50.78\r\nfocalnet_small_lrf,224,4219.11,242.687,1024,50.34,8.74,28.61\r\nswinv2_cr_small_ns_224,224,4214.47,242.954,1024,49.7,9.08,50.27\r\nswinv2_cr_small_224,224,4207.77,243.337,1024,49.7,9.07,50.27\r\nregnety_080_tv,224,4190.97,244.316,1024,39.38,8.51,19.73\r\ndeit3_small_patch16_384,384,4184.78,244.679,1024,22.21,15.52,50.78\r\nvit_base_patch16_rope_ape_224,224,4178.38,245.052,1024,86.59,17.58,23.9\r\nvit_relpos_base_patch16_224,224,4177.21,245.118,1024,86.43,17.51,24.97\r\nvit_base_patch16_rope_224,224,4175.08,245.242,1024,86.43,17.58,23.9\r\nvit_relpos_base_patch16_cls_224,224,4169.67,245.564,1024,86.43,17.6,25.12\r\nvit_relpos_base_patch16_clsgap_224,224,4168.62,245.624,1024,86.43,17.6,25.12\r\ngcvit_tiny,224,4166.28,245.756,1024,28.22,4.79,29.82\r\nresnetv2_101,288,4164.96,245.841,1024,44.54,12.94,26.83\r\nmobilenetv4_conv_large,448,4149.98,185.04,768,32.59,8.75,37.17\r\nbeit_base_patch16_224,224,4134.28,247.662,1024,86.53,17.58,23.9\r\nmvitv2_small_cls,224,4131.45,247.837,1024,34.87,7.04,28.17\r\nvit_pe_core_base_patch16_224,224,4120.58,248.489,1024,93.67,17.82,24.21\r\nbeitv2_base_patch16_224,224,4117.77,248.656,1024,86.53,17.58,23.9\r\nbeit3_base_patch16_224,224,4098.76,249.812,1024,86.66,17.63,23.9\r\nrexnetr_300,288,4087.78,62.605,256,34.81,5.59,36.61\r\nefficientvit_l2,288,4086.68,187.906,768,63.71,11.51,32.19\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,4085.06,250.649,1024,60.23,16.52,28.24\r\nhiera_small_abswin_256,256,4080.82,250.913,1024,34.36,8.29,26.38\r\ninception_resnet_v2,299,4077.33,251.123,1024,55.84,13.18,25.06\r\nmaxvit_tiny_tf_224,224,4067.69,125.85,512,30.92,5.6,35.78\r\nlamhalobotnet50ts_256,256,4058.85,252.267,1024,22.57,5.02,18.44\r\neva02_base_patch16_clip_224,224,4054.56,252.535,1024,86.26,17.62,26.32\r\nresnetaa101d,288,4054.19,252.554,1024,44.57,15.07,29.03\r\nvit_base_patch32_clip_448,448,4046.39,253.045,1024,88.34,17.93,23.9\r\ngmlp_b16_224,224,4046.11,253.066,1024,73.08,15.78,30.21\r\ncoatnet_rmlp_1_rw2_224,224,4040.49,126.696,512,41.72,8.11,40.13\r\nxcit_tiny_24_p16_384,384,4023.1,254.508,1024,12.12,6.87,34.29\r\nnest_tiny,224,4013.0,255.146,1024,17.06,5.83,25.48\r\nrepvgg_b3,224,4010.91,255.284,1024,110.96,26.21,7.55\r\ndpn98,224,3999.88,255.989,1024,61.57,11.73,25.2\r\nnest_tiny_jx,224,3984.16,256.995,1024,17.06,5.83,25.48\r\nwide_resnet50_2,288,3978.12,257.388,1024,68.88,18.89,23.81\r\nnfnet_f0,256,3947.86,259.36,1024,71.49,12.62,18.05\r\nmaxvit_tiny_rw_256,256,3943.74,97.346,384,29.07,6.74,44.35\r\nlevit_conv_384_s8,224,3928.35,260.65,1024,39.06,9.95,35.86\r\nresnet152d,256,3924.0,260.936,1024,60.21,15.41,30.51\r\nresnetblur101d,288,3919.63,261.227,1024,44.57,15.07,29.65\r\neva02_small_patch14_336,336,3918.9,261.278,1024,22.13,15.48,54.33\r\nmaxvit_rmlp_tiny_rw_256,256,3916.31,98.031,384,29.15,6.77,46.92\r\nefficientnet_lite4,380,3914.87,98.07,384,13.01,4.04,45.66\r\nresnet200,224,3906.8,262.085,1024,64.67,15.07,32.19\r\nese_vovnet57b,320,3900.24,262.527,1024,38.61,18.26,15.34\r\ntf_efficientnet_lite4,380,3894.47,98.581,384,13.01,4.04,45.66\r\nvit_base_patch16_rpn_224,224,3890.42,263.193,1024,86.54,17.49,23.75\r\nmobilenetv4_hybrid_large,384,3875.79,264.18,1024,37.76,7.77,34.52\r\ndm_nfnet_f0,256,3869.2,264.631,1024,71.49,12.62,18.05\r\nvit_mediumd_patch16_reg4_gap_256,256,3859.95,265.268,1024,64.11,17.87,37.57\r\nflexivit_base,240,3854.91,265.614,1024,86.59,20.29,28.36\r\nregnety_032,288,3828.43,267.453,1024,19.44,5.29,18.61\r\ncait_xxs36_224,224,3828.02,267.479,1024,17.3,3.77,30.34\r\nvit_base_patch16_rope_mixed_ape_224,224,3801.4,269.355,1024,86.59,17.58,25.7\r\nvit_base_patch16_rope_mixed_224,224,3796.07,269.732,1024,86.44,17.58,25.7\r\ncoat_tiny,224,3793.78,269.895,1024,5.5,4.35,27.2\r\nvit_pe_core_small_patch16_384,384,3784.37,270.568,1024,23.78,15.69,51.23\r\nresnext101_64x4d,224,3779.01,270.947,1024,83.46,15.52,31.21\r\nsequencer2d_m,224,3776.14,271.153,1024,38.31,6.55,14.26\r\nresnext101_32x4d,288,3754.55,272.716,1024,44.18,13.24,35.09\r\nhiera_base_224,224,3743.37,273.529,1024,51.52,9.4,30.42\r\nlambda_resnet50ts,256,3728.55,274.617,1024,21.54,5.07,17.48\r\nxception41,299,3726.06,137.394,512,26.97,9.28,39.86\r\nconvnext_base,224,3723.63,274.976,1024,88.59,15.38,28.75\r\nresnext101_32x8d,224,3712.78,275.786,1024,88.79,16.48,31.21\r\ninception_next_base,224,3711.75,275.858,1024,86.67,14.85,25.69\r\nseresnet101,288,3702.64,276.538,1024,49.33,12.95,26.87\r\nvit_relpos_base_patch16_rpn_224,224,3695.7,277.056,1024,86.41,17.51,24.97\r\nwide_resnet101_2,224,3654.33,280.189,1024,126.89,22.8,21.23\r\nregnety_040_sgn,288,3652.87,280.305,1024,20.65,6.67,20.3\r\nswinv2_tiny_window16_256,256,3646.68,280.783,1024,28.35,6.68,39.02\r\nhgnetv2_b6,224,3624.88,282.472,1024,75.26,16.88,21.23\r\nefficientvit_b3,288,3622.91,211.966,768,48.65,6.58,44.2\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,3620.1,282.845,1024,63.95,17.65,37.02\r\nregnetz_d32,256,3615.37,283.214,1024,27.58,5.98,23.74\r\npoolformer_m36,224,3611.91,283.49,1024,56.17,8.8,22.02\r\npoolformerv2_m36,224,3609.43,283.683,1024,56.08,8.81,22.02\r\nlevit_384_s8,224,3589.18,285.279,1024,39.06,9.95,35.86\r\necaresnet101d,288,3589.14,285.284,1024,44.57,13.35,28.19\r\ncoatnet_1_224,224,3588.21,106.997,384,42.23,8.7,39.0\r\nresnetrs101,288,3581.5,285.896,1024,63.62,13.56,28.53\r\nresnet101d,320,3562.5,287.42,1024,44.57,16.48,34.77\r\nregnetz_c16,320,3541.7,289.107,1024,13.46,3.92,25.88\r\nfastvit_ma36,256,3499.69,292.578,1024,43.98,7.82,34.98\r\nconvnextv2_base,224,3498.68,292.658,1024,88.72,15.38,28.75\r\nefficientnet_b3_gn,320,3477.41,110.407,384,11.73,2.14,28.83\r\nfastvit_mci2,256,3465.63,295.453,1024,35.7,7.85,36.09\r\nmobilenetv4_conv_aa_large,448,3464.37,221.663,768,32.59,9.63,43.94\r\npvt_v2_b4,224,3455.66,296.302,1024,62.56,10.14,53.74\r\nregnetx_120,224,3439.35,297.713,1024,46.11,12.13,21.37\r\ncaformer_m36,224,3434.22,298.156,1024,56.2,13.29,50.48\r\nregnetz_c16_evos,320,3431.37,298.404,1024,13.49,3.86,25.88\r\nvit_base_patch16_siglip_gap_256,256,3420.65,299.341,1024,85.84,23.13,33.23\r\nvit_base_patch16_siglip_256,256,3405.69,300.654,1024,92.93,23.44,33.63\r\nseresnext101_64x4d,224,3403.94,300.805,1024,88.23,15.53,31.25\r\ntwins_svt_base,224,3400.19,301.142,1024,56.07,8.59,26.33\r\nhrnet_w40,224,3392.05,301.859,1024,57.56,12.75,25.29\r\nconvformer_m36,224,3373.74,303.502,1024,57.05,12.89,42.05\r\nconvnext_small,288,3372.35,303.625,1024,50.22,14.39,35.65\r\neca_nfnet_l1,320,3359.48,304.79,1024,41.41,14.92,34.42\r\nseresnext101_32x8d,224,3358.89,304.84,1024,93.57,16.48,31.25\r\ncrossvit_base_240,240,3358.72,304.857,1024,105.03,21.22,36.33\r\nhrnet_w48_ssld,224,3355.91,305.107,1024,77.47,17.34,28.56\r\nhrnet_w48,224,3352.96,305.377,1024,77.47,17.34,28.56\r\nmambaout_base_tall_rw,224,3334.15,307.103,1024,86.48,16.15,38.74\r\nefficientnet_b4,320,3320.86,154.153,512,19.34,3.13,34.76\r\nxcit_medium_24_p16_224,224,3295.23,310.73,1024,84.4,16.13,31.71\r\npvt_v2_b5,224,3294.43,310.804,1024,81.96,11.76,50.92\r\nseresnext101d_32x8d,224,3290.51,311.175,1024,93.59,16.72,32.05\r\nmixnet_xxl,224,3278.59,312.311,1024,23.96,2.04,23.43\r\nregnety_120,224,3275.68,312.581,1024,51.82,12.14,21.38\r\nxcit_nano_12_p8_384,384,3274.31,312.72,1024,3.05,6.34,46.08\r\nxception65p,299,3255.09,157.274,512,39.82,13.91,52.48\r\nconvnext_tiny,384,3250.6,314.995,1024,28.59,13.14,39.48\r\ntresnet_xl,224,3249.07,315.146,1024,78.44,15.2,15.34\r\nvit_base_patch16_reg4_gap_256,256,3245.36,315.506,1024,86.62,23.5,33.89\r\nswinv2_small_window8_256,256,3222.83,317.713,1024,49.73,11.58,40.14\r\nhrnet_w44,224,3214.3,318.548,1024,67.06,14.94,26.92\r\nswinv2_cr_small_ns_256,256,3213.21,318.667,1024,49.7,12.07,76.21\r\nefficientnetv2_s,384,3209.8,318.998,1024,21.46,8.44,35.77\r\nvit_large_r50_s32_224,224,3206.06,319.377,1024,328.99,19.58,24.41\r\nresnet152,288,3205.35,319.445,1024,60.19,19.11,37.28\r\nseresnet152d,256,3200.77,319.902,1024,66.84,15.42,30.56\r\ndpn92,224,3200.33,319.943,1024,37.67,6.54,18.21\r\nmaxvit_tiny_pm_256,256,3196.16,120.122,384,30.09,6.61,47.9\r\nseresnext101_32x4d,288,3191.64,320.817,1024,48.96,13.25,35.12\r\ndavit_base,224,3182.69,241.283,768,87.95,15.51,40.66\r\nresnetrs152,256,3181.54,321.837,1024,86.62,15.59,30.83\r\nconvformer_s18,384,3164.31,323.588,1024,26.77,11.63,46.49\r\nswinv2_base_window12_192,192,3163.36,323.686,1024,109.28,11.9,39.72\r\ncoat_lite_medium,224,3145.93,325.48,1024,44.57,9.81,40.06\r\nseresnextaa101d_32x8d,224,3119.24,328.264,1024,93.59,17.25,34.16\r\nvit_small_r26_s32_384,384,3114.88,328.725,1024,36.47,10.43,29.85\r\nmvitv2_base,224,3111.96,329.032,1024,51.47,10.16,40.5\r\nvit_base_patch16_rope_reg1_gap_256,256,3107.44,329.513,1024,86.43,23.22,33.39\r\ncaformer_s18,384,3099.94,330.306,1024,26.34,13.42,77.34\r\nswin_base_patch4_window7_224,224,3096.06,330.726,1024,87.77,15.47,36.63\r\ntf_efficientnetv2_s,384,3095.87,330.743,1024,21.46,8.44,35.77\r\ncait_s24_224,224,3090.49,331.318,1024,46.92,9.35,40.58\r\nmaxvit_rmlp_small_rw_224,224,3072.03,166.644,512,64.9,10.75,49.3\r\nnf_regnet_b5,384,3069.41,333.59,1024,49.74,7.95,42.9\r\nrdnet_base,224,3064.44,334.132,1024,87.45,15.4,31.14\r\nxcit_small_12_p16_384,384,3061.35,334.474,1024,26.25,14.14,36.51\r\neva02_base_patch14_224,224,3055.09,335.159,1024,85.76,23.22,36.55\r\nresnest101e,256,3044.2,336.357,1024,48.28,13.38,28.66\r\nvolo_d2_224,224,3035.23,337.353,1024,58.68,14.34,41.34\r\nmambaout_base_short_rw,224,3031.27,337.792,1024,88.83,16.31,38.08\r\nvit_small_patch8_224,224,3019.05,339.155,1024,21.67,22.44,80.84\r\nxcit_tiny_24_p8_224,224,3018.01,339.274,1024,12.11,9.21,45.39\r\nhgnet_base,224,3017.28,339.357,1024,71.58,25.14,15.47\r\nefficientnetv2_rw_s,384,3013.18,339.82,1024,23.94,8.72,38.03\r\nmobilenetv4_conv_aa_large,480,3001.15,255.877,768,32.59,11.05,50.45\r\nconvnextv2_tiny,384,2998.05,341.534,1024,28.64,13.14,39.48\r\nvit_base_patch16_plus_240,240,2982.55,343.31,1024,117.56,27.41,33.08\r\nswin_s3_small_224,224,2980.65,343.526,1024,49.74,9.43,37.84\r\nfocalnet_base_srf,224,2978.81,343.739,1024,88.15,15.28,35.01\r\nvit_base_patch16_plus_clip_240,240,2978.45,343.782,1024,117.21,27.41,33.08\r\nsamvit_base_patch16_224,224,2971.69,344.565,1024,86.46,17.54,24.54\r\ncoat_mini,224,2966.49,345.168,1024,10.34,6.82,33.68\r\nswinv2_cr_base_224,224,2962.19,345.67,1024,87.88,15.86,59.66\r\nswinv2_cr_base_ns_224,224,2953.4,346.696,1024,87.88,15.86,59.66\r\nregnetz_040,320,2940.54,348.216,1024,27.12,6.35,37.78\r\nregnetz_040_h,320,2920.1,350.652,1024,28.94,6.43,37.94\r\nregnetx_160,224,2911.12,351.733,1024,54.28,15.99,25.52\r\ncs3se_edgenet_x,320,2896.53,353.5,1024,50.72,18.01,20.21\r\nvit_base_r50_s16_224,224,2894.34,353.776,1024,97.89,21.66,35.28\r\ndpn131,224,2894.14,353.798,1024,79.25,16.09,32.97\r\nresnet200d,256,2887.09,354.661,1024,64.69,20.0,43.09\r\nresnet50x4_clip_gap,288,2886.85,354.693,1024,65.62,19.57,34.11\r\nregnetz_d8,320,2884.8,354.943,1024,23.37,6.19,37.08\r\nconvnext_base,256,2850.25,359.242,1024,88.59,20.09,37.55\r\nmvitv2_base_cls,224,2849.48,359.345,1024,65.44,10.23,40.65\r\nmambaout_small_rw,288,2849.01,359.401,1024,48.5,14.81,45.82\r\nregnety_160,224,2848.37,359.482,1024,83.59,15.96,23.04\r\nmambaout_small,288,2846.32,359.74,1024,48.49,14.81,45.82\r\nnfnet_f1,224,2839.01,360.668,1024,132.63,17.87,22.94\r\nvit_relpos_base_patch16_plus_240,240,2837.89,360.811,1024,117.38,27.3,34.33\r\nfocalnet_base_lrf,224,2801.23,365.53,1024,88.75,15.43,38.13\r\ntiny_vit_21m_384,384,2796.88,183.044,512,21.22,13.72,77.83\r\nhiera_base_plus_224,224,2788.35,367.221,1024,69.9,12.67,37.98\r\ndm_nfnet_f1,224,2785.31,367.621,1024,132.63,17.87,22.94\r\nvit_base_patch16_dinov3,256,2779.18,368.434,1024,85.64,23.6,34.06\r\nvit_base_patch16_dinov3_qkvb,256,2778.04,368.584,1024,85.66,23.6,34.06\r\ntresnet_m,448,2774.15,369.098,1024,31.39,22.99,29.21\r\nregnetz_d8_evos,320,2773.21,369.229,1024,23.46,7.03,38.92\r\nvit_medium_patch16_gap_384,384,2758.18,371.238,1024,39.03,26.08,67.54\r\ngcvit_small,224,2751.39,372.153,1024,51.09,8.57,41.61\r\nregnetv_064,288,2749.7,372.382,1024,30.58,10.55,27.11\r\ntnt_b_patch16_224,224,2742.79,373.321,1024,65.43,14.1,39.01\r\nhiera_base_abswin_256,256,2740.82,373.588,1024,51.27,12.46,40.7\r\nresnet50x4_clip,288,2727.73,375.384,1024,87.14,21.35,35.27\r\npoolformer_m48,224,2721.05,376.304,1024,73.47,11.59,29.17\r\npoolformerv2_m48,224,2720.39,376.393,1024,73.35,11.59,29.17\r\nmobilenetv4_hybrid_large,448,2712.54,283.104,768,37.76,10.74,48.61\r\nmambaout_base_wide_rw,224,2703.92,378.686,1024,94.45,17.78,42.6\r\nxception65,299,2680.65,190.977,512,39.92,13.96,52.48\r\nregnety_080,288,2676.37,382.587,1024,39.18,13.22,29.69\r\nregnety_064,288,2672.79,383.097,1024,30.58,10.56,27.11\r\nefficientnet_b3_g8_gn,288,2665.55,384.138,1024,14.25,2.59,23.35\r\nnextvit_small,384,2663.64,384.414,1024,31.74,17.25,57.14\r\nefficientnetv2_m,320,2662.14,384.633,1024,54.14,11.01,39.97\r\ncoatnet_2_rw_224,224,2656.97,96.327,256,73.87,15.09,49.22\r\ndensenet264d,224,2634.66,145.731,384,72.74,13.57,14.0\r\nmobilevitv2_150,384,2627.75,97.404,256,10.59,9.2,54.25\r\nmobilenetv5_base,256,2622.5,195.216,512,82.65,20.05,36.89\r\ncrossvit_15_dagger_408,408,2619.67,390.865,1024,28.5,21.45,95.05\r\ncoatnet_rmlp_2_rw_224,224,2607.77,98.147,256,73.88,15.18,54.78\r\nfastvit_mci3,256,2604.85,393.09,1024,125.07,14.82,44.88\r\ndpn107,224,2603.7,393.263,1024,86.92,18.38,33.46\r\nvitamin_base_224,224,2602.78,98.34,256,87.72,22.68,52.77\r\nseresnet152,288,2577.86,397.202,1024,66.82,19.11,37.34\r\nmambaout_base_plus_rw,224,2555.0,400.758,1024,101.66,19.19,45.16\r\nhrnet_w64,224,2539.95,403.13,1024,128.06,28.97,35.09\r\nmaxvit_small_tf_224,224,2535.0,151.459,384,68.93,11.66,53.17\r\nresnet152d,320,2534.76,403.962,1024,60.21,24.08,47.67\r\nnest_small,224,2513.77,407.336,1024,38.35,10.35,40.04\r\nmaxxvit_rmlp_small_rw_256,256,2510.07,407.933,1024,66.01,14.67,58.38\r\nconvit_base,224,2505.22,408.727,1024,86.54,17.52,31.77\r\nnest_small_jx,224,2500.89,409.434,1024,38.35,10.35,40.04\r\nmambaout_base,224,2468.23,414.849,1024,84.81,15.83,36.95\r\nsequencer2d_l,224,2459.9,416.255,1024,54.3,9.74,22.12\r\ncoatnet_2_224,224,2452.39,104.366,256,74.68,16.5,52.67\r\nregnetz_e8,256,2444.0,418.968,1024,57.7,9.91,40.94\r\ntwins_svt_large,224,2438.23,419.952,1024,99.27,15.15,35.1\r\nmaxvit_rmlp_small_rw_256,256,2393.49,160.416,384,64.9,14.15,66.09\r\nefficientvit_l3,224,2381.07,215.011,512,246.04,27.62,39.16\r\neca_nfnet_l2,320,2369.54,432.133,1024,56.72,20.95,47.43\r\ncaformer_b36,224,2363.09,433.311,1024,98.75,23.22,67.3\r\nswin_s3_base_224,224,2363.06,433.311,1024,71.13,13.69,48.26\r\nmobilenetv4_conv_aa_large,544,2345.03,218.315,512,32.59,14.19,64.79\r\nresnet200,288,2342.89,437.042,1024,64.67,24.91,53.21\r\nlevit_conv_512_s8,224,2338.73,437.825,1024,73.97,21.77,52.28\r\nconvformer_b36,224,2333.37,438.829,1024,99.88,22.69,56.06\r\nxcit_small_12_p8_224,224,2330.78,439.315,1024,26.21,18.69,47.21\r\nefficientnetv2_rw_m,320,2318.12,441.714,1024,53.24,12.72,47.14\r\nefficientnet_b4,384,2306.17,166.486,384,19.34,4.51,50.04\r\necaresnet200d,256,2305.77,444.081,1024,64.69,20.0,43.15\r\nseresnet200d,256,2304.45,444.335,1024,71.86,20.01,43.15\r\nefficientvit_l2,384,2297.52,167.116,384,63.71,20.45,57.01\r\nresnetrs200,256,2297.17,445.744,1024,93.21,20.18,43.42\r\nregnetz_d32,320,2285.18,448.081,1024,27.58,9.33,37.08\r\nresnext101_64x4d,288,2281.33,448.838,1024,83.46,25.66,51.59\r\nconvnext_base,288,2243.74,456.355,1024,88.59,25.43,47.53\r\nconvmixer_768_32,224,2218.77,461.496,1024,21.11,19.55,25.95\r\nresnetv2_50x1_bit,448,2208.42,347.732,768,25.55,16.62,44.46\r\nswinv2_base_window8_256,256,2206.49,464.061,1024,87.92,20.37,52.59\r\nvolo_d3_224,224,2189.06,467.758,1024,86.33,20.78,60.09\r\nhgnetv2_b6,288,2179.8,469.743,1024,75.26,27.9,35.09\r\nswinv2_small_window16_256,256,2163.7,473.244,1024,49.73,12.82,66.29\r\nhalonet_h1,256,2162.62,473.478,1024,8.1,3.0,51.17\r\ncoat_small,224,2156.0,474.933,1024,21.69,12.61,44.25\r\nvit_so150m_patch16_reg4_gap_256,256,2125.65,481.715,1024,134.13,36.75,53.21\r\nnf_regnet_b5,456,2115.28,484.075,1024,49.74,11.7,61.95\r\nconvnextv2_base,288,2115.2,484.091,1024,88.72,25.43,47.53\r\nmobilevitv2_175,384,2110.81,121.26,256,14.25,12.47,63.29\r\ntf_efficientnet_b4,380,2108.74,182.079,384,19.34,4.49,49.49\r\nefficientnet_b3_g8_gn,320,2108.52,485.628,1024,14.25,3.2,28.83\r\nvit_so150m_patch16_reg4_map_256,256,2101.15,487.333,1024,141.48,37.17,53.68\r\nxception71,299,2074.03,246.842,512,42.34,18.09,69.92\r\nlevit_512_s8,224,2054.86,498.312,1024,73.97,21.77,52.28\r\nseresnet152d,320,2046.28,500.401,1024,66.84,24.09,47.72\r\nresnetrs152,320,2040.19,501.892,1024,86.62,24.34,48.14\r\nvit_so150m2_patch16_reg1_gap_256,256,2040.07,501.924,1024,136.06,37.0,56.93\r\nnextvit_base,384,2016.87,507.69,1024,44.79,24.62,73.95\r\nhrnet_w48_ssld,288,2001.22,511.66,1024,77.47,28.66,47.21\r\nseresnext101_32x8d,288,1995.71,513.075,1024,93.57,27.24,51.63\r\ngcvit_base,224,1990.03,514.54,1024,90.32,14.87,55.48\r\nmaxxvitv2_rmlp_base_rw_224,224,1982.6,516.469,1024,116.09,24.2,62.77\r\nregnety_120,288,1973.16,518.943,1024,51.82,20.06,35.34\r\nseresnext101d_32x8d,288,1957.95,522.973,1024,93.59,27.64,52.95\r\nxcit_tiny_12_p8_384,384,1948.64,525.473,1024,6.71,14.13,69.14\r\nresnext101_32x16d,224,1938.4,528.244,1024,194.03,36.27,51.18\r\nvit_pe_spatial_small_patch16_512,512,1937.8,528.41,1024,21.98,31.8,123.27\r\nconvnext_large,224,1931.56,530.118,1024,197.77,34.4,43.13\r\nmobilevitv2_200,384,1897.66,101.157,192,18.45,16.24,72.34\r\nconvnext_small,384,1894.63,540.45,1024,50.22,25.58,63.37\r\nvit_betwixt_patch16_reg4_gap_384,384,1890.52,541.631,1024,60.6,39.71,85.28\r\nresnet200d,320,1861.73,550.003,1024,64.69,31.25,67.33\r\nseresnextaa101d_32x8d,288,1855.18,551.941,1024,93.59,28.51,56.44\r\nmambaout_base_short_rw,288,1835.16,557.968,1024,88.83,26.96,62.94\r\nefficientvit_l3,256,1825.35,210.352,384,246.04,36.06,50.98\r\nconvnextv2_large,224,1824.98,561.077,1024,197.96,34.4,43.13\r\necaresnet200d,288,1824.46,561.239,1024,64.69,25.31,54.59\r\nswinv2_cr_tiny_384,384,1822.16,421.455,768,28.33,15.34,161.01\r\nseresnet200d,288,1819.14,562.881,1024,71.86,25.32,54.6\r\ntf_efficientnetv2_m,384,1815.2,564.102,1024,54.14,15.85,57.52\r\nconvnext_base,320,1814.2,564.415,1024,88.59,31.39,58.68\r\nhgnet_base,288,1807.07,424.973,768,71.58,41.55,25.57\r\nseresnet269d,256,1802.52,568.068,1024,113.67,26.59,53.6\r\nnest_base,224,1795.28,570.358,1024,67.72,17.96,53.39\r\nvolo_d1_384,384,1787.66,572.798,1024,26.78,22.75,108.55\r\nnest_base_jx,224,1787.42,572.871,1024,67.72,17.96,53.39\r\nswinv2_large_window12_192,192,1786.61,573.128,1024,228.77,26.17,56.53\r\nmambaout_base_tall_rw,288,1785.53,573.477,1024,86.48,26.69,64.04\r\ncrossvit_18_dagger_408,408,1769.51,578.668,1024,44.61,32.47,124.87\r\nregnety_160,288,1753.01,584.118,1024,83.59,26.37,38.07\r\nvit_large_patch32_384,384,1749.37,585.331,1024,306.63,45.31,43.86\r\nxcit_large_24_p16_224,224,1745.04,586.785,1024,189.1,35.86,47.27\r\nresnetrs270,256,1742.7,587.567,1024,129.86,27.06,55.84\r\nfastvit_mci4,256,1720.92,446.255,768,321.57,27.78,60.59\r\nregnety_320,224,1715.88,596.756,1024,145.05,32.34,30.26\r\ndavit_large,224,1703.97,450.692,768,196.81,34.6,60.99\r\nlegacy_senet154,224,1696.09,603.717,1024,115.09,20.77,38.69\r\nsenet154,224,1682.86,608.464,1024,115.09,20.77,38.69\r\nconvformer_s36,384,1681.95,608.794,1024,40.01,22.54,89.62\r\nvit_mediumd_patch16_reg4_gap_384,384,1672.43,612.261,1024,64.27,43.67,113.51\r\nswin_large_patch4_window7_224,224,1665.86,614.673,1024,196.53,34.53,54.94\r\ncaformer_s36,384,1646.54,621.887,1024,39.3,26.08,150.33\r\neca_nfnet_l2,384,1644.74,622.565,1024,56.72,30.05,68.28\r\nmambaout_base_wide_rw,288,1641.61,623.753,1024,94.45,29.39,70.41\r\nrdnet_large,224,1640.03,624.359,1024,186.27,34.74,46.67\r\nmaxvit_rmlp_base_rw_224,224,1639.54,312.265,512,116.14,23.15,92.64\r\nxcit_small_24_p16_384,384,1635.32,626.152,1024,47.67,26.72,68.58\r\nnextvit_large,384,1631.0,627.809,1024,57.83,32.0,90.76\r\nmixer_l16_224,224,1595.46,641.799,1024,208.2,44.6,41.69\r\nregnetx_320,224,1588.38,644.657,1024,107.81,31.81,36.3\r\nswinv2_cr_large_224,224,1585.11,645.99,1024,196.68,35.1,78.42\r\nefficientnetv2_m,416,1572.2,651.296,1024,54.14,18.6,67.5\r\nregnetz_e8,320,1569.3,652.497,1024,57.7,15.46,63.94\r\nresnetv2_50x3_bit,224,1560.8,656.052,1024,217.32,37.06,33.34\r\nnfnet_f2,256,1551.59,659.942,1024,193.78,33.76,41.85\r\nmambaout_base_plus_rw,288,1548.33,661.335,1024,101.66,31.72,74.64\r\nvit_small_patch14_dinov2,518,1543.68,663.332,1024,22.06,46.76,198.79\r\nswinv2_base_window16_256,256,1532.48,668.173,1024,87.92,22.02,84.71\r\nswinv2_base_window12to16_192to256,256,1529.75,669.371,1024,87.92,22.02,84.71\r\nvit_small_patch14_reg4_dinov2,518,1529.63,669.422,1024,22.06,46.95,199.77\r\ndm_nfnet_f2,256,1521.42,673.035,1024,193.78,33.76,41.85\r\neca_nfnet_l3,352,1503.81,680.911,1024,72.04,32.57,73.12\r\nmambaout_base,288,1493.32,685.698,1024,84.81,26.16,61.08\r\nvit_base_patch16_18x2_224,224,1485.68,689.227,1024,256.73,52.51,71.38\r\nseresnextaa101d_32x8d,320,1481.13,691.332,1024,93.59,35.19,69.67\r\ncoatnet_rmlp_3_rw_224,224,1470.67,130.532,192,165.15,33.56,79.47\r\nresnetrs200,320,1469.64,696.744,1024,93.21,31.51,67.81\r\ncoatnet_3_rw_224,224,1467.66,130.8,192,181.81,33.44,73.83\r\nconvnext_large_mlp,256,1464.45,699.213,1024,200.13,44.94,56.33\r\nnaflexvit_base_patch16_siglip,384,1463.15,699.836,1024,92.93,56.12,102.2\r\nmaxvit_base_tf_224,224,1442.07,266.264,384,119.47,24.04,95.01\r\nnaflexvit_base_patch16_gap,384,1436.61,712.768,1024,86.63,55.86,102.34\r\nnaflexvit_base_patch16_parfac_gap,384,1433.55,714.292,1024,86.46,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,1433.11,714.505,1024,86.63,55.86,102.34\r\nvit_base_patch16_384,384,1423.1,719.534,1024,86.86,55.54,101.56\r\nvit_base_patch16_siglip_gap_384,384,1422.51,719.831,1024,86.09,55.43,101.3\r\nnaflexvit_base_patch16_map,384,1422.09,720.047,1024,93.72,56.23,102.46\r\ndeit3_base_patch16_384,384,1422.04,720.068,1024,86.88,55.54,101.56\r\nseresnet269d,288,1421.19,720.495,1024,113.67,33.65,67.81\r\ndeit_base_patch16_384,384,1418.89,721.666,1024,86.86,55.54,101.56\r\nvit_base_patch16_clip_384,384,1418.81,721.71,1024,86.86,55.54,101.56\r\ntiny_vit_21m_512,512,1417.79,180.544,256,21.26,26.93,177.93\r\nvit_base_patch16_siglip_384,384,1414.07,724.119,1024,93.18,56.12,102.2\r\ndeit_base_distilled_patch16_384,384,1414.02,724.16,1024,87.63,55.65,101.82\r\nnfnet_f1,320,1411.45,725.472,1024,132.63,35.97,46.77\r\nefficientnet_b5,416,1406.92,181.939,256,30.39,8.27,80.68\r\nrepvgg_d2se,320,1387.39,738.051,1024,120.39,66.99,23.42\r\ndm_nfnet_f1,320,1383.23,740.268,1024,132.63,35.97,46.77\r\nresnetv2_152x2_bit,224,1381.49,741.201,1024,236.34,46.95,45.11\r\ncoatnet_3_224,224,1375.16,139.601,192,166.97,36.56,79.01\r\nefficientnetv2_rw_m,416,1366.71,749.225,1024,53.24,21.49,79.62\r\nresnetv2_101x1_bit,448,1356.7,566.056,768,44.54,31.65,64.93\r\ndeit3_large_patch16_224,224,1345.47,761.045,1024,304.37,61.6,63.52\r\nvit_large_patch16_224,224,1338.7,764.901,1024,304.33,61.6,63.52\r\neva_large_patch14_196,196,1338.59,764.96,1024,304.14,61.57,63.52\r\npnasnet5large,331,1310.4,586.057,768,86.06,25.04,92.89\r\nnasnetalarge,331,1304.76,392.389,512,88.75,23.89,90.56\r\nmaxvit_tiny_tf_384,384,1295.53,148.177,192,30.98,17.53,123.42\r\nbeit3_large_patch16_224,224,1293.48,791.641,1024,304.57,61.72,63.52\r\nbeit_large_patch16_224,224,1286.63,795.858,1024,304.43,61.6,63.52\r\nvit_large_patch16_rope_ape_224,224,1282.71,798.286,1024,304.37,61.6,63.52\r\nbeitv2_large_patch16_224,224,1282.42,798.466,1024,304.43,61.6,63.52\r\nvit_large_patch16_rope_224,224,1280.24,799.828,1024,304.17,61.6,63.52\r\nconvnext_base,384,1260.5,812.357,1024,88.59,45.21,84.49\r\nbeit_base_patch16_384,384,1259.53,812.979,1024,86.74,55.54,101.56\r\ninception_next_base,384,1256.74,814.78,1024,86.67,43.64,75.48\r\nconvmixer_1024_20_ks9_p14,224,1224.0,836.575,1024,24.38,5.55,5.51\r\nxcit_small_24_p8_224,224,1218.99,840.014,1024,47.63,35.81,90.78\r\nefficientnet_b5,448,1211.31,211.32,256,30.39,9.59,93.56\r\nconvnext_xlarge,224,1202.66,851.423,1024,350.2,60.98,57.5\r\nvolo_d4_224,224,1201.26,852.419,1024,192.96,44.34,80.22\r\nmaxxvitv2_rmlp_large_rw_224,224,1195.3,856.669,1024,215.42,44.14,87.15\r\nconvnextv2_base,384,1187.04,862.627,1024,88.72,45.21,84.49\r\nvit_large_patch16_rope_mixed_ape_224,224,1186.33,863.141,1024,304.4,61.6,68.34\r\nflexivit_large,240,1185.04,864.087,1024,304.36,70.99,75.39\r\nvit_large_patch16_rope_mixed_224,224,1183.92,864.896,1024,304.2,61.6,68.34\r\ntresnet_l,448,1174.38,871.926,1024,55.99,43.59,47.56\r\nefficientnet_x_b5,448,1173.13,654.639,768,33.44,23.35,68.87\r\nefficientvit_l3,320,1168.59,219.047,256,246.04,56.32,79.34\r\nconvnext_large,288,1162.57,880.783,1024,197.77,56.87,71.29\r\nconvformer_m36,384,1157.73,884.466,1024,57.05,37.87,123.56\r\ntf_efficientnetv2_m,480,1154.58,886.88,1024,54.14,24.76,89.84\r\necaresnet269d,320,1149.56,890.749,1024,102.09,41.53,83.69\r\ncaformer_m36,384,1131.56,904.921,1024,56.2,42.11,196.35\r\nconvnextv2_large,288,1102.41,928.851,1024,197.96,56.87,71.29\r\nresnetrs350,288,1102.03,929.169,1024,163.96,43.67,87.09\r\nxcit_medium_24_p16_384,384,1095.52,934.698,1024,84.4,47.39,91.64\r\nhiera_large_224,224,1090.3,939.17,1024,213.74,40.34,83.37\r\ndavit_huge,224,1083.25,708.951,768,348.92,61.23,81.32\r\nswinv2_cr_small_384,384,1080.4,473.881,512,49.7,29.7,298.03\r\nresnest200e,320,1067.47,959.257,1024,70.2,35.69,82.78\r\ntf_efficientnet_b5,456,1058.47,241.833,256,30.39,10.46,98.86\r\nefficientnet_h_b5,448,1053.11,486.157,512,45.88,27.16,73.9\r\nvit_large_r50_s32_384,384,1050.01,975.196,1024,329.09,57.43,76.52\r\naimv2_large_patch14_224,224,1043.78,981.027,1024,309.2,82.3,85.2\r\nvit_large_patch16_siglip_256,256,1039.25,985.297,1024,315.96,81.34,88.88\r\nvit_large_patch16_siglip_gap_256,256,1036.15,988.247,1024,303.36,80.8,88.34\r\nefficientnetv2_l,384,1033.18,991.086,1024,118.52,36.1,101.16\r\nxcit_tiny_24_p8_384,384,1030.08,994.073,1024,12.11,27.05,132.95\r\nvit_base_patch8_224,224,1026.96,997.088,1024,86.58,78.22,161.69\r\ntf_efficientnetv2_l,384,1020.76,1003.155,1024,118.52,36.1,101.16\r\nvit_large_patch14_clip_quickgelu_224,224,1003.4,1020.504,1024,303.97,81.08,88.79\r\nvit_large_patch14_xp_224,224,998.65,1025.366,1024,304.06,81.01,88.79\r\nvit_large_patch14_clip_224,224,998.25,1025.77,1024,304.2,81.08,88.79\r\nvit_large_patch14_224,224,997.81,1026.218,1024,304.2,81.08,88.79\r\nvolo_d2_384,384,992.95,1031.243,1024,58.87,46.17,184.51\r\nmaxvit_large_tf_224,224,992.26,257.975,256,211.79,43.68,127.35\r\ncoat_lite_medium_384,384,992.15,774.057,768,44.57,28.73,116.7\r\nregnety_640,224,983.37,1041.292,1024,281.38,64.16,42.5\r\nvitamin_large_224,224,974.43,262.699,256,333.32,75.05,112.83\r\nvitamin_large2_224,224,972.71,263.162,256,333.58,75.05,112.83\r\nregnety_160,384,960.85,1065.698,1024,83.59,46.87,67.67\r\nresnet50x16_clip_gap,384,952.74,1074.765,1024,136.2,70.32,100.64\r\necaresnet269d,352,951.17,1076.547,1024,102.09,50.25,101.25\r\nresnetv2_101x3_bit,224,946.39,1081.979,1024,387.93,71.23,48.7\r\neva02_large_patch14_224,224,934.03,1096.299,1024,303.27,81.15,97.2\r\nconvnext_large_mlp,320,932.69,1097.878,1024,200.13,70.21,88.02\r\neca_nfnet_l3,448,930.54,825.302,768,72.04,52.55,118.4\r\nvit_so150m_patch16_reg4_gap_384,384,926.57,1105.127,1024,134.42,87.97,165.47\r\nresnetrs270,352,921.71,1110.956,1024,129.86,51.13,105.48\r\ncait_xxs24_384,384,918.66,1114.642,1024,12.03,9.63,122.66\r\neva02_large_patch14_clip_224,224,914.56,1119.638,1024,304.11,81.18,97.2\r\nvit_base_r50_s16_384,384,913.85,1120.503,1024,98.95,67.43,135.03\r\nmvitv2_large,224,906.69,1129.36,1024,217.99,43.87,112.02\r\nresnet50x16_clip,384,893.45,1146.091,1024,167.33,74.9,103.54\r\nswinv2_large_window12to16_192to256,256,882.06,580.441,512,196.74,47.81,121.53\r\nnaflexvit_so150m2_patch16_reg1_gap,384,871.74,1174.63,1024,136.06,89.53,178.22\r\nvit_large_patch16_dinov3_qkvb,256,870.88,1175.8,1024,303.13,82.43,90.56\r\nvit_large_patch16_dinov3,256,868.94,1178.418,1024,303.08,82.43,90.56\r\nvit_so150m2_patch16_reg1_gap_384,384,866.31,1182.006,1024,136.33,89.53,178.22\r\nnaflexvit_so150m2_patch16_reg1_map,384,860.05,1190.603,1024,142.46,90.33,179.2\r\nmvitv2_large_cls,224,855.67,1196.698,1024,234.58,42.17,111.69\r\ncoatnet_4_224,224,837.97,229.105,192,275.43,62.48,129.26\r\ncoatnet_rmlp_2_rw_384,384,830.13,115.626,96,73.88,47.69,209.43\r\nmaxvit_small_tf_384,384,814.21,157.188,128,69.02,35.87,183.65\r\nxcit_medium_24_p8_224,224,813.97,1258.008,1024,84.32,63.53,121.23\r\nefficientvit_l3,384,811.91,236.461,192,246.04,81.08,114.02\r\nnfnet_f2,352,811.07,1262.495,1024,193.78,63.22,79.06\r\nmambaout_base_plus_rw,384,804.79,954.261,768,101.66,56.39,132.7\r\nconvformer_b36,384,798.08,962.285,768,99.88,66.67,164.75\r\ndm_nfnet_f2,352,793.6,1290.289,1024,193.78,63.22,79.06\r\nxcit_small_12_p8_384,384,791.33,970.503,768,26.21,54.92,138.29\r\ncaformer_b36,384,781.73,982.408,768,98.75,72.33,261.79\r\nswin_base_patch4_window12_384,384,777.58,987.656,768,87.9,47.19,134.78\r\ntresnet_xl,448,774.26,991.886,768,78.44,60.77,61.31\r\nvit_base_patch16_siglip_gap_512,512,761.91,1343.961,1024,86.43,107.0,246.15\r\nswinv2_cr_base_384,384,759.77,673.872,512,87.88,50.57,333.68\r\nvolo_d5_224,224,758.17,1350.596,1024,295.46,72.4,118.11\r\nvit_base_patch16_siglip_512,512,757.19,1352.337,1024,93.52,108.22,247.74\r\nresnetrs420,320,750.02,1365.276,1024,191.89,64.2,126.56\r\nnfnet_f3,320,748.37,1368.286,1024,254.92,68.77,83.93\r\nseresnextaa201d_32x8d,320,746.51,1371.682,1024,149.39,70.22,138.71\r\nvitamin_large_256,256,746.23,257.271,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,745.55,257.507,192,333.64,99.0,154.99\r\nvit_so400m_patch14_siglip_224,224,744.15,1376.045,1024,427.68,110.26,106.73\r\nvit_so400m_patch14_siglip_gap_224,224,738.96,1385.693,1024,412.44,109.57,106.13\r\nvit_so400m_patch16_siglip_gap_256,256,737.83,1387.832,1024,412.65,109.62,106.13\r\nvit_so400m_patch16_siglip_256,256,734.08,1394.92,1024,427.89,110.31,106.73\r\ndm_nfnet_f3,320,732.88,1397.204,1024,254.92,68.77,83.93\r\nefficientnetv2_xl,384,732.58,1397.774,1024,208.12,52.81,139.2\r\ntf_efficientnetv2_xl,384,723.93,1414.481,1024,208.12,52.81,139.2\r\nmaxvit_tiny_tf_512,512,721.29,133.068,96,31.05,33.49,257.59\r\nconvnext_xlarge,288,720.35,1421.498,1024,350.2,100.8,95.05\r\nefficientnet_x_b5,576,708.21,722.93,512,33.44,38.59,113.83\r\neva02_base_patch14_448,448,692.02,1479.706,1024,87.12,107.11,259.14\r\nvit_pe_spatial_base_patch16_512,512,688.26,1487.784,1024,86.43,107.13,246.54\r\ncait_xs24_384,384,685.72,1493.3,1024,26.67,19.28,183.98\r\nresmlp_big_24_224,224,675.28,1516.385,1024,129.14,100.23,87.31\r\nconvnextv2_huge,224,667.32,1534.464,1024,660.29,115.0,79.07\r\nefficientnet_b6,528,657.61,194.625,128,43.04,19.4,167.39\r\nefficientnetv2_l,480,657.1,1558.345,1024,118.52,56.4,157.99\r\nresnext101_32x32d,224,653.97,1565.796,1024,468.53,87.29,91.12\r\nconvnext_large,384,649.5,1182.41,768,197.77,101.1,126.74\r\ntf_efficientnetv2_l,480,648.11,1579.965,1024,118.52,56.4,157.99\r\nconvnext_large_mlp,384,647.75,1185.61,768,200.13,101.11,126.74\r\nefficientnet_h_b5,576,634.41,605.269,384,45.88,44.9,122.13\r\nmaxxvitv2_rmlp_base_rw_384,384,632.53,607.064,384,116.09,72.98,213.74\r\nconvnextv2_large,384,619.55,1239.581,768,197.96,101.1,126.74\r\ncait_xxs36_384,384,615.28,1664.269,1024,17.37,14.35,183.7\r\nresnetrs350,384,614.84,1665.457,1024,163.96,77.59,154.74\r\nvit_so150m2_patch16_reg1_gap_448,448,614.83,1665.488,1024,136.5,127.51,287.05\r\ntf_efficientnet_b6,528,612.39,208.999,128,43.04,19.4,167.39\r\nswinv2_cr_huge_224,224,600.31,1705.754,1024,657.83,115.97,121.08\r\nregnety_320,384,596.71,1287.004,768,145.05,95.0,88.87\r\nxcit_large_24_p16_384,384,592.2,1729.127,1024,189.1,105.35,137.17\r\nvitamin_xlarge_256,256,590.61,216.708,128,436.06,130.13,177.37\r\nfocalnet_huge_fl3,224,575.4,1779.613,1024,745.28,118.26,104.8\r\nrdnet_large,384,573.97,445.996,256,186.27,102.09,137.13\r\ncait_s24_384,384,569.93,1796.681,1024,47.06,32.17,245.31\r\nmaxvit_xlarge_tf_224,224,554.36,346.323,192,506.99,97.52,191.04\r\nsam2_hiera_tiny,896,538.05,118.923,64,26.85,99.86,384.63\r\nvit_base_patch14_dinov2,518,533.63,1918.924,1024,86.58,151.71,397.58\r\nvit_base_patch14_reg4_dinov2,518,532.65,1441.823,768,86.58,152.25,399.53\r\nresnest269e,416,522.3,1470.383,768,110.93,77.69,171.98\r\nvit_huge_patch14_gap_224,224,518.02,1976.749,1024,630.76,166.73,138.74\r\nmaxvit_rmlp_base_rw_384,384,516.8,371.495,192,116.14,70.97,318.95\r\nseresnextaa201d_32x8d,384,515.15,1490.81,768,149.39,101.11,199.72\r\naimv2_huge_patch14_224,224,514.31,1990.972,1024,680.85,179.01,126.22\r\ndeit3_huge_patch14_224,224,512.75,1997.04,1024,632.13,167.4,139.41\r\nregnety_1280,224,512.44,1498.677,768,644.81,127.66,71.58\r\nvit_huge_patch14_xp_224,224,509.71,2008.944,1024,631.8,167.3,139.41\r\nvit_huge_patch14_clip_quickgelu_224,224,505.52,2025.631,1024,632.08,167.4,139.41\r\nvit_huge_patch14_clip_224,224,505.41,2026.056,1024,632.05,167.4,139.41\r\nvit_huge_patch14_224,224,505.25,2026.694,1024,630.76,167.4,139.41\r\nvolo_d3_448,448,501.96,2039.953,1024,86.63,96.33,446.83\r\nhiera_huge_224,224,488.92,2094.368,1024,672.78,124.85,150.95\r\nswinv2_base_window12to24_192to384,384,464.94,550.591,256,87.92,55.25,280.36\r\nmaxvit_base_tf_384,384,455.41,281.04,128,119.65,73.8,332.9\r\nmaxvit_small_tf_512,512,454.03,140.94,64,69.13,67.26,383.77\r\ncoatnet_5_224,224,449.79,284.558,128,687.47,145.49,194.24\r\nresnetv2_152x4_bit,224,447.28,1144.671,512,936.53,186.9,90.22\r\nsam2_hiera_small,896,446.27,143.396,64,33.95,123.99,442.63\r\naimv2_large_patch14_336,336,443.13,2310.788,1024,309.53,194.22,227.08\r\nresnetrs420,416,441.12,2321.323,1024,191.89,108.45,213.79\r\nxcit_large_24_p8_224,224,440.67,2323.71,1024,188.93,141.23,181.56\r\nswin_large_patch4_window12_384,384,436.77,1172.205,512,196.74,104.08,202.16\r\nvit_large_patch14_clip_quickgelu_336,336,436.57,2345.555,1024,304.29,191.11,270.24\r\ndeit3_large_patch16_384,384,435.46,2351.483,1024,304.76,191.21,270.24\r\neva_large_patch14_336,336,434.73,2355.429,1024,304.53,191.1,270.24\r\nvit_large_patch16_siglip_gap_384,384,433.72,2360.932,1024,303.69,190.85,269.55\r\nvit_large_patch14_clip_336,336,433.48,2362.267,1024,304.53,191.11,270.24\r\nvit_large_patch16_siglip_384,384,433.37,2362.834,1024,316.28,192.07,270.75\r\nvit_large_patch16_384,384,433.32,2363.138,1024,304.72,191.21,270.24\r\nvit_giant_patch16_gap_224,224,430.67,2377.678,1024,1011.37,202.46,139.26\r\nresnetv2_152x2_bit,384,429.43,1192.241,512,236.34,136.16,132.56\r\nswinv2_cr_large_384,384,427.6,898.013,384,196.68,108.96,404.96\r\nvitamin_large_336,336,420.11,228.493,96,333.57,175.72,307.47\r\nvitamin_large2_336,336,419.41,228.876,96,333.83,175.72,307.47\r\nnfnet_f4,384,413.02,2479.258,1024,316.07,122.14,147.57\r\nxcit_small_24_p8_384,384,412.74,1860.714,768,47.63,105.24,265.91\r\nvit_pe_core_large_patch14_336,336,411.09,2490.892,1024,317.15,192.33,271.43\r\nefficientnetv2_xl,512,408.84,2504.642,1024,208.12,93.85,247.32\r\nconvnext_xxlarge,256,407.23,1885.89,768,846.47,198.09,124.45\r\nconvnext_xlarge,384,405.77,1261.768,512,350.2,179.2,168.99\r\ndavit_giant,224,403.6,1902.833,768,1406.47,192.92,153.06\r\ndm_nfnet_f4,384,403.44,2538.149,1024,316.07,122.14,147.57\r\ntf_efficientnetv2_xl,512,403.43,2538.23,1024,208.12,93.85,247.32\r\nconvnextv2_huge,288,401.02,1915.108,768,660.29,190.1,130.7\r\neva02_large_patch14_clip_336,336,395.65,2588.111,1024,304.43,191.34,289.13\r\nbeit_large_patch16_384,384,395.46,2589.376,1024,305.0,191.21,270.24\r\nnfnet_f3,416,384.25,2664.927,1024,254.92,115.58,141.78\r\nefficientnet_b7,600,380.57,252.23,96,66.35,38.33,289.94\r\ncait_s36_384,384,380.49,2691.24,1024,68.37,47.99,367.4\r\ndm_nfnet_f3,416,374.38,2735.152,1024,254.92,115.58,141.78\r\nresnetv2_50x3_bit,448,364.89,701.55,256,217.32,145.7,133.37\r\nmvitv2_huge_cls,224,362.96,2115.905,768,694.8,120.67,243.63\r\ntf_efficientnet_b7,600,359.73,266.838,96,66.35,38.33,289.94\r\nvit_huge_plus_patch16_dinov3_qkvb,256,356.21,2874.655,1024,840.59,224.88,193.59\r\nvit_huge_plus_patch16_dinov3,256,355.19,2882.918,1024,840.51,224.88,193.59\r\nfocalnet_huge_fl4,224,346.26,2957.287,1024,686.46,118.9,113.34\r\nvitamin_xlarge_336,336,330.66,290.309,96,436.06,230.18,347.33\r\nvit_giant_patch14_224,224,329.91,3103.802,1024,1012.61,267.18,192.64\r\neva_giant_patch14_224,224,328.58,3116.398,1024,1012.56,267.18,192.64\r\nvit_giant_patch14_clip_224,224,326.67,3134.635,1024,1012.65,267.18,192.64\r\neva_giant_patch14_clip_224,224,324.41,3156.505,1024,1012.59,267.18,192.64\r\nresnet50x64_clip_gap,448,321.99,2385.125,768,365.03,253.96,233.22\r\nnaflexvit_so400m_patch16_siglip,384,320.55,3194.447,1024,427.89,259.65,319.77\r\nmaxvit_large_tf_384,384,317.44,302.403,96,212.03,132.55,445.84\r\nvitamin_large_384,384,317.3,201.677,64,333.71,234.44,440.16\r\nbeit3_giant_patch14_224,224,317.02,3230.016,1024,1013.22,267.56,192.64\r\nvitamin_large2_384,384,317.0,201.874,64,333.97,234.44,440.16\r\nresnetv2_152x2_bit,448,315.63,1216.599,384,236.34,184.99,180.43\r\nvit_so400m_patch16_siglip_gap_384,384,311.51,3287.184,1024,413.02,258.11,318.42\r\nvit_so400m_patch16_siglip_384,384,309.73,3306.092,1024,428.26,259.65,319.77\r\nresnet50x64_clip,448,305.4,1676.442,512,420.38,265.02,239.13\r\nvit_giantopt_patch16_siglip_256,256,300.54,3407.202,1024,1163.17,299.66,200.43\r\nvit_giantopt_patch16_siglip_gap_256,256,300.52,3407.42,1024,1134.84,298.42,199.62\r\naimv2_1b_patch14_224,224,296.4,3454.769,1024,1234.96,322.43,170.39\r\nregnety_640,384,280.88,2734.191,768,281.38,188.47,124.83\r\nvolo_d4_448,448,280.44,2738.537,768,193.41,197.13,527.35\r\nxcit_medium_24_p8_384,384,275.55,1858.101,512,84.32,186.67,354.73\r\nswinv2_large_window12to24_192to384,384,275.27,464.977,128,196.74,116.15,407.83\r\ndavit_base_fl,768,264.46,725.981,192,90.37,190.32,530.15\r\nnfnet_f5,416,262.67,3898.427,1024,377.21,170.71,204.56\r\ndm_nfnet_f5,416,255.81,4002.974,1024,377.21,170.71,204.56\r\nmaxvit_base_tf_512,512,252.55,253.401,64,119.88,138.02,703.99\r\nvitamin_xlarge_384,384,248.58,257.444,64,436.06,306.38,493.46\r\nefficientnet_b8,672,243.43,394.351,96,87.41,63.48,442.89\r\nvit_so400m_patch14_siglip_378,378,240.15,4264.019,1024,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_gap_378,378,240.07,4265.46,1024,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,239.9,4268.338,1024,412.99,333.46,451.19\r\nfocalnet_large_fl3,384,239.13,3211.662,768,239.13,105.06,168.04\r\nvit_so400m_patch14_siglip_384,384,239.02,4284.175,1024,428.23,335.4,452.89\r\naimv2_large_patch14_448,448,236.11,3252.639,768,309.98,367.84,491.78\r\nfocalnet_large_fl4,384,233.15,3294.006,768,239.32,105.2,181.78\r\nvit_large_patch16_siglip_gap_512,512,231.91,4415.375,1024,304.15,361.84,655.36\r\ntf_efficientnet_b8,672,231.39,414.856,96,87.41,63.48,442.89\r\nvit_large_patch16_siglip_512,512,231.14,4430.092,1024,316.74,364.0,657.48\r\nresnetv2_101x3_bit,448,227.74,1124.069,256,387.93,280.33,194.78\r\nconvnextv2_huge,384,227.44,1125.566,256,660.29,337.96,232.35\r\nsam2_hiera_base_plus,896,227.12,281.765,64,68.68,227.48,828.88\r\nvit_intern300m_patch14_448,448,225.46,4541.749,1024,304.01,362.05,656.39\r\nvit_pe_lang_large_patch14_448,448,224.79,4555.393,1024,291.42,346.99,629.09\r\naimv2_huge_patch14_336,336,220.1,4652.381,1024,681.34,416.36,337.08\r\nnfnet_f6,448,218.84,3509.424,768,438.36,229.7,273.62\r\nvit_huge_patch14_clip_336,336,218.64,4683.554,1024,632.46,390.97,407.54\r\nvit_pe_spatial_large_patch14_448,448,215.5,4751.706,1024,303.96,362.05,656.39\r\ndm_nfnet_f6,448,214.61,3578.594,768,438.36,229.7,273.62\r\nnfnet_f4,512,207.2,3706.552,768,316.07,216.26,262.26\r\neva02_large_patch14_448,448,203.37,5035.036,1024,305.08,362.33,689.95\r\ndm_nfnet_f4,512,201.76,3806.433,768,316.07,216.26,262.26\r\nbeit_large_patch16_512,512,198.24,5165.436,1024,305.67,362.24,656.39\r\nvit_gigantic_patch14_clip_quickgelu_224,224,183.06,5593.867,1024,1844.91,483.96,275.37\r\ncait_m36_384,384,182.7,5604.918,1024,271.22,173.11,734.81\r\nvit_gigantic_patch14_224,224,182.65,5606.388,1024,1844.44,483.95,275.37\r\nvit_gigantic_patch14_clip_224,224,182.6,5607.999,1024,1844.91,483.96,275.37\r\nmaxvit_xlarge_tf_384,384,177.49,360.555,64,475.32,292.78,668.76\r\nfocalnet_xlarge_fl3,384,176.25,2904.969,512,408.79,185.61,223.99\r\nmaxvit_large_tf_512,512,175.5,273.486,48,212.33,244.75,942.15\r\nswinv2_cr_huge_384,384,171.68,1118.345,192,657.94,352.04,583.18\r\nvolo_d5_448,448,171.33,4482.521,768,295.91,315.06,737.92\r\nnfnet_f5,544,170.66,3000.119,512,377.21,290.97,349.71\r\nvit_huge_patch14_clip_quickgelu_378,378,170.61,6002.126,1024,632.68,503.79,572.79\r\nfocalnet_xlarge_fl4,384,170.49,3003.121,512,409.03,185.79,242.31\r\nvit_huge_patch14_clip_378,378,170.08,6020.505,1024,632.68,503.79,572.79\r\nconvmixer_1536_20,224,169.48,6041.959,1024,51.63,48.68,33.03\r\nswinv2_cr_giant_224,224,168.33,2281.259,384,2598.76,483.85,309.15\r\nvit_large_patch14_reg4_dinov2,518,166.37,3077.517,512,304.37,508.9,1064.02\r\ndm_nfnet_f5,544,165.87,3086.785,512,377.21,290.97,349.71\r\nvit_large_patch14_dinov2,518,165.3,4645.932,768,304.37,507.15,1058.82\r\nvit_huge_patch16_gap_448,448,156.25,6553.467,1024,631.67,544.7,636.83\r\nvit_so400m_patch14_siglip_gap_448,448,156.1,6559.871,1024,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,155.97,6565.506,1024,413.53,487.4,764.26\r\nvit_so400m_patch16_siglip_512,512,155.4,6589.393,1024,428.77,490.13,766.65\r\ntf_efficientnet_l2,475,153.43,417.096,64,480.31,172.11,609.89\r\nnfnet_f7,480,153.15,5014.703,768,499.5,300.08,355.86\r\nxcit_large_24_p8_384,384,150.99,3390.856,512,188.93,415.0,531.82\r\nregnety_1280,384,148.71,3442.951,512,644.81,374.99,210.2\r\neva_giant_patch14_336,336,140.73,7276.252,1024,1013.01,620.64,550.67\r\nbeit3_giant_patch14_336,336,136.61,7495.88,1024,1013.67,621.52,550.67\r\nnfnet_f6,576,132.39,3867.369,512,438.36,378.69,452.2\r\nvolo_d5_512,512,132.1,3875.73,512,296.09,425.09,1105.37\r\naimv2_3b_patch14_224,224,129.6,5926.014,768,2720.66,705.91,252.44\r\ndm_nfnet_f6,576,129.54,3952.46,512,438.36,378.69,452.2\r\nvit_giantopt_patch16_siglip_gap_384,384,129.46,7909.971,1024,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,128.95,7940.783,1024,1163.66,696.85,568.91\r\naimv2_1b_patch14_336,336,128.74,5965.425,768,1235.61,743.59,454.16\r\nconvnextv2_huge,512,127.94,2000.905,256,660.29,600.81,413.07\r\naimv2_huge_patch14_448,448,119.3,4291.5,512,682.03,774.02,731.38\r\nregnety_2560,384,101.58,2520.061,256,1282.6,747.83,296.49\r\nmobilenetv5_300m,768,101.55,630.195,64,294.13,435.74,842.16\r\nmobilenetv5_300m_enc,768,100.19,638.751,64,294.13,435.74,842.16\r\nmaxvit_xlarge_tf_512,512,98.17,325.956,32,475.77,534.14,1413.22\r\nresnetv2_152x4_bit,480,96.66,1324.141,128,936.53,844.84,414.26\r\ndavit_huge_fl,768,91.81,1394.161,128,360.64,744.84,1060.3\r\nnfnet_f7,608,91.7,5583.514,512,499.5,480.39,570.85\r\ncait_m48_448,448,82.63,6196.181,512,356.46,329.41,1708.23\r\naimv2_1b_patch14_448,448,70.31,5461.683,384,1236.53,1367.03,983.56\r\nsam2_hiera_large,1024,67.59,710.12,48,212.15,907.48,2190.34\r\nsamvit_base_patch16,1024,63.44,378.299,24,89.67,486.43,1343.27\r\nvit_gigantic_patch14_clip_378,378,62.22,8229.005,512,1845.7,1429.82,1047.37\r\naimv2_3b_patch14_336,336,56.63,6781.179,384,2721.64,1615.48,674.17\r\nefficientnet_l2,800,56.49,424.844,24,480.31,479.12,1707.39\r\ntf_efficientnet_l2,800,54.67,439.008,24,480.31,479.12,1707.39\r\nvit_giant_patch14_dinov2,518,51.9,7398.583,384,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,51.7,7427.253,384,1136.48,1790.08,2771.21\r\nswinv2_cr_giant_384,384,48.94,1307.569,64,2598.76,1450.71,1394.86\r\nvit_pe_lang_gigantic_patch14_448,448,46.56,10996.847,512,1740.92,1931.99,1664.88\r\neva_giant_patch14_560,560,46.09,8331.738,384,1014.45,1906.76,2577.17\r\nvit_pe_core_gigantic_patch14_448,448,43.77,11697.734,512,1882.03,2060.12,1774.21\r\nvit_pe_spatial_gigantic_patch14_448,448,43.69,11717.984,512,1851.89,2055.25,1771.04\r\naimv2_3b_patch14_448,448,31.39,6115.838,192,2723.02,2939.61,1462.76\r\nsamvit_large_patch16,1024,28.91,553.331,16,308.28,1493.86,2553.78\r\nvit_so400m_patch14_siglip_gap_896,896,28.35,9029.075,256,416.87,2731.49,8492.88\r\nsamvit_huge_patch16,1024,18.21,878.709,16,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu130-5090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,260723.52,3.909,1024,0.37,0.04,0.48\r\ntest_vit2,160,208210.41,4.898,1024,0.46,0.05,0.64\r\ntest_byobnet,160,162267.64,6.29,1024,0.46,0.03,0.43\r\ntest_vit4,160,155409.56,6.568,1024,1.02,0.11,1.07\r\ntest_vit3,160,147927.84,6.904,1024,0.93,0.09,1.0\r\ntest_efficientnet,160,144711.9,7.059,1024,0.36,0.06,0.55\r\ntest_resnet,160,109086.77,9.369,1024,0.47,0.1,0.64\r\ntinynet_e,106,98412.84,10.389,1024,2.04,0.03,0.69\r\nmobilenetv3_small_050,224,97600.62,10.472,1024,1.59,0.03,0.92\r\nefficientvit_m0,224,95354.79,10.717,1024,2.33,0.08,0.91\r\ntest_mambaout,160,92461.66,11.052,1024,0.45,0.03,0.53\r\nlcnet_035,224,90296.44,11.323,1024,1.64,0.03,1.04\r\ntest_convnext,160,90160.39,11.341,1024,0.27,0.03,0.58\r\ntest_convnext3,160,88179.63,11.591,1024,0.47,0.05,0.63\r\ntest_convnext2,160,87489.15,11.688,1024,0.48,0.05,0.63\r\nmobilenetv4_conv_small_035,224,82111.75,12.451,1024,1.91,0.05,0.98\r\nlcnet_050,224,79812.95,12.813,1024,1.88,0.05,1.26\r\nmobilenetv3_small_075,224,71451.28,14.312,1024,2.04,0.05,1.3\r\nmobilenetv4_conv_small_050,224,70112.73,14.588,1024,2.24,0.07,1.18\r\ntest_efficientnet_gn,160,67822.57,15.079,1024,0.36,0.06,0.55\r\nmobilenetv4_conv_small_035,256,66878.56,15.294,1024,1.91,0.06,1.28\r\nefficientvit_m1,224,65663.06,15.576,1024,2.96,0.17,1.33\r\nshvit_s1,224,64212.24,15.928,1024,6.31,0.24,1.39\r\nmobilenetv3_small_100,224,63997.85,15.983,1024,2.54,0.06,1.42\r\ntest_mambaout,192,63098.23,16.206,1024,0.45,0.04,0.77\r\nefficientvit_m2,224,60607.33,16.876,1024,4.17,0.2,1.47\r\ntest_efficientnet_evos,160,60376.36,16.939,1024,0.36,0.06,0.55\r\nmobilenetv4_conv_small_050,256,57897.91,17.668,1024,2.24,0.09,1.55\r\ntest_nfnet,160,56421.19,18.129,1024,0.38,0.29,1.2\r\ntinynet_d,152,56105.01,18.231,1024,2.34,0.05,1.42\r\nefficientvit_m3,224,54904.13,18.628,1024,6.88,0.26,1.62\r\ntf_mobilenetv3_small_minimal_100,224,54705.89,18.699,1024,2.04,0.06,1.41\r\ntf_mobilenetv3_small_075,224,53747.61,19.033,1024,2.04,0.05,1.3\r\nefficientvit_m4,224,52416.38,19.517,1024,8.78,0.3,1.7\r\nshvit_s2,224,52247.89,19.579,1024,11.45,0.37,1.6\r\nrepghostnet_050,224,50713.41,20.174,1024,2.31,0.05,2.02\r\nlcnet_075,224,50364.57,20.312,1024,2.36,0.1,1.99\r\ntf_mobilenetv3_small_100,224,49265.18,20.768,1024,2.54,0.06,1.42\r\nstarnet_s050,224,48330.27,21.167,1024,0.54,0.09,1.57\r\nlevit_128s,224,44458.39,23.012,1024,7.76,0.3,1.88\r\nlevit_conv_128s,224,44285.19,23.097,1024,7.76,0.3,1.88\r\nmobilenetv4_conv_small,224,43921.23,23.297,1024,3.77,0.19,1.97\r\nmnasnet_small,224,43438.98,23.556,1024,2.03,0.07,2.16\r\nfasternet_t0,224,42241.6,24.223,1024,3.91,0.34,1.97\r\nrepghostnet_058,224,40462.98,25.29,1024,2.54,0.06,2.59\r\nvit_small_patch32_224,224,40201.0,25.448,1024,22.88,1.15,2.5\r\nghostnet_050,224,40005.53,25.572,1024,2.59,0.05,1.77\r\nlcnet_100,224,39774.88,25.724,1024,2.95,0.16,2.52\r\nresnet10t,176,39571.11,25.861,1024,5.44,0.7,1.51\r\ntest_efficientnet_ln,160,39058.29,26.2,1024,0.36,0.06,0.55\r\nresnet18,160,38116.15,26.847,1024,11.69,0.93,1.27\r\nmobilenetv2_035,224,37902.6,26.999,1024,1.68,0.07,2.86\r\nregnetx_002,224,37681.27,27.153,1024,2.68,0.2,2.16\r\nregnety_002,224,35670.44,28.689,1024,3.16,0.2,2.17\r\nshvit_s3,224,35378.11,28.925,1024,14.21,0.6,2.33\r\nefficientvit_b0,224,35181.28,29.087,1024,3.41,0.1,2.87\r\nmobilenetv4_conv_small,256,34792.76,29.414,1024,3.77,0.25,2.57\r\nefficientvit_m5,224,34092.95,30.017,1024,12.44,0.52,2.41\r\npit_ti_224,224,33898.71,30.188,1024,4.85,0.7,6.19\r\npit_ti_distilled_224,224,33673.03,30.386,1024,5.1,0.71,6.23\r\nmnasnet_050,224,32925.05,31.08,1024,2.22,0.11,3.07\r\nrepghostnet_080,224,32615.57,31.376,1024,3.27,0.1,3.22\r\nlevit_conv_128,224,30433.6,33.625,1024,9.19,0.41,2.71\r\nlevit_128,224,30191.8,33.891,1024,9.19,0.41,2.71\r\ntinynet_c,184,30056.86,34.047,1024,2.46,0.11,2.87\r\nmobilenetv2_050,224,29903.33,34.226,1024,1.97,0.1,3.64\r\nstarnet_s100,224,29864.01,34.27,1024,1.04,0.19,2.68\r\nrepvgg_a0,224,29802.26,34.314,1024,8.31,1.36,1.79\r\nmixer_s32_224,224,29768.8,34.378,1024,19.1,1.0,2.28\r\nsemnasnet_050,224,29663.34,34.502,1024,2.08,0.11,3.44\r\nmobileone_s0,224,29328.65,34.894,1024,2.08,0.28,3.79\r\nfasternet_t1,224,27956.54,36.605,1024,7.6,0.85,3.15\r\nstarnet_s150,224,27526.59,37.181,1024,1.56,0.23,2.75\r\nlevit_conv_192,224,26633.44,38.424,1024,10.92,0.66,3.2\r\nrepghostnet_100,224,26621.79,38.448,1024,4.06,0.15,3.98\r\nlevit_192,224,26454.15,38.688,1024,10.92,0.66,3.2\r\nvit_medium_patch32_clip_224,224,26354.94,38.831,1024,39.69,2.0,3.34\r\ndeit_tiny_patch16_224,224,26244.36,38.991,1024,5.72,1.26,5.97\r\nvit_tiny_patch16_224,224,26140.26,39.15,1024,5.72,1.26,5.97\r\ndeit_tiny_distilled_patch16_224,224,26083.76,39.238,1024,5.91,1.27,6.01\r\nlcnet_150,224,25925.04,39.479,1024,4.5,0.34,3.79\r\ngernet_s,224,25861.26,39.579,1024,8.17,0.75,2.65\r\ncs3darknet_focus_s,256,25698.66,39.827,1024,3.27,0.69,2.7\r\nregnetx_004,224,25206.96,40.605,1024,5.16,0.4,3.14\r\nmobilenetv3_large_075,224,25060.02,40.838,1024,3.99,0.16,4.0\r\nresnet10t,224,24922.63,41.064,1024,5.44,1.1,2.43\r\ncs3darknet_s,256,24412.63,41.923,1024,3.28,0.72,2.97\r\nregnetx_004_tv,224,24247.98,42.21,1024,5.5,0.42,3.17\r\nrepghostnet_111,224,24016.74,42.618,1024,4.52,0.18,4.38\r\nresnet34,160,23608.05,43.35,1024,21.8,1.87,1.91\r\nghostnetv3_050,224,23463.85,43.62,1024,2.85,0.05,2.28\r\nvit_tiny_r_s16_p8_224,224,23061.24,44.382,1024,6.34,0.44,2.06\r\nmobilenetv4_conv_small,320,23012.9,44.478,1024,3.77,0.39,4.01\r\nvit_xsmall_patch16_clip_224,224,22563.55,45.357,1024,8.28,1.79,6.65\r\nmobilenetv3_rw,224,22479.89,45.533,1024,5.48,0.23,4.41\r\nconvnext_zepto_rms,224,22044.12,46.432,1024,2.16,0.3,2.75\r\nmobilenetv3_large_100,224,22035.55,46.453,1024,5.48,0.23,4.41\r\nhardcorenas_a,224,21962.16,46.606,1024,5.26,0.23,4.38\r\npit_xs_224,224,21580.01,47.429,1024,10.62,1.4,7.71\r\nese_vovnet19b_slim_dw,224,21524.96,47.55,1024,1.9,0.4,5.28\r\npit_xs_distilled_224,224,21435.99,47.748,1024,11.0,1.41,7.76\r\nnf_regnet_b0,192,21289.07,48.079,1024,8.76,0.37,3.15\r\nresnetv2_18,224,21288.64,48.083,1024,11.69,1.82,2.48\r\nrepvgg_a1,224,21259.42,48.145,1024,12.79,2.36,2.37\r\nshvit_s4,256,21175.32,48.338,1024,16.55,0.99,3.73\r\nmnasnet_075,224,21101.43,48.51,1024,3.17,0.23,4.77\r\nghostnet_100,224,20865.8,49.058,1024,5.18,0.15,3.55\r\nresnet14t,176,20793.01,49.227,1024,10.08,1.07,3.61\r\nrepghostnet_130,224,20777.0,49.265,1024,5.46,0.24,5.24\r\nmobilenetv1_100,224,20552.49,49.804,1024,4.23,0.58,5.04\r\ntf_mobilenetv3_large_075,224,20458.08,50.033,1024,3.99,0.16,4.0\r\nhardcorenas_b,224,20387.6,50.208,1024,5.18,0.26,5.09\r\nhardcorenas_c,224,20166.12,50.76,1024,5.52,0.28,5.01\r\nmobilenetv1_100h,224,20024.87,51.118,1024,5.28,0.63,5.09\r\nresnet18,224,19878.7,51.495,1024,11.69,1.82,2.48\r\nlevit_conv_256,224,19857.9,51.544,1024,18.86,1.13,4.23\r\nconvnext_zepto_rms_ols,224,19697.97,51.966,1024,2.16,0.34,3.15\r\ntf_mobilenetv3_large_minimal_100,224,19688.67,51.992,1024,3.92,0.22,4.4\r\ninception_next_atto,224,19494.7,52.507,1024,4.16,0.5,3.63\r\nlevit_256,224,19413.1,52.722,1024,18.86,1.13,4.23\r\nregnety_004,224,19264.0,53.138,1024,4.34,0.41,3.89\r\ntinynet_b,188,19146.58,53.465,1024,3.73,0.21,4.44\r\nregnetx_006,224,18902.26,54.151,1024,6.2,0.61,3.98\r\nvit_betwixt_patch32_clip_224,224,18833.05,54.35,1024,61.41,3.09,4.17\r\nmobilenet_edgetpu_v2_xs,224,18750.41,54.594,1024,4.46,0.7,4.8\r\nmobilenetv2_075,224,18687.44,54.778,1024,2.64,0.22,5.86\r\nseresnet18,224,18672.4,54.817,1024,11.78,1.82,2.49\r\nsemnasnet_075,224,18659.29,54.862,1024,2.91,0.23,5.54\r\nmnasnet_100,224,18609.59,55.008,1024,4.38,0.33,5.46\r\nhardcorenas_d,224,18487.21,55.371,1024,7.5,0.3,4.93\r\ntf_mobilenetv3_large_100,224,18228.82,56.158,1024,5.48,0.23,4.41\r\nlegacy_seresnet18,224,18083.14,56.609,1024,11.78,1.82,2.49\r\nrepghostnet_150,224,18035.8,56.752,1024,6.55,0.31,6.0\r\nresnetv2_18d,224,17989.03,56.906,1024,11.71,2.06,3.29\r\ntf_efficientnetv2_b0,192,17927.34,57.102,1024,7.14,0.54,3.51\r\nedgenext_xx_small,256,17670.7,57.927,1024,1.33,0.26,3.33\r\nlevit_256d,224,17522.8,58.417,1024,26.16,1.39,4.93\r\nregnety_006,224,17444.05,58.681,1024,6.06,0.61,4.33\r\nlevit_conv_256d,224,17129.24,59.754,1024,26.16,1.39,4.93\r\nmobilenetv3_large_100,256,17055.2,60.017,1024,5.48,0.29,5.75\r\nstarnet_s2,224,16964.74,60.339,1024,3.68,0.55,4.73\r\nspnasnet_100,224,16937.72,60.434,1024,4.42,0.35,6.03\r\nresnet18d,224,16876.34,60.658,1024,11.71,2.06,3.29\r\nconvnext_atto,224,16758.22,61.083,1024,3.7,0.55,3.81\r\nmobilenetv2_100,224,16735.23,61.17,1024,3.5,0.31,6.68\r\nsemnasnet_100,224,16621.89,61.586,1024,3.89,0.32,6.23\r\nghostnet_130,224,16612.23,61.623,1024,7.36,0.24,4.6\r\nrepvgg_b0,224,16499.22,62.046,1024,14.34,3.06,3.07\r\nmobileone_s1,224,16486.6,62.09,1024,4.76,0.83,6.27\r\nrepvit_m0_9,224,16485.5,62.092,1024,5.07,0.82,6.17\r\nrepvit_m1,224,16471.62,62.149,1024,5.07,0.82,6.17\r\nstarnet_s1,224,16439.05,62.27,1024,2.87,0.42,4.99\r\ndla46_c,224,16367.92,62.543,1024,1.3,0.58,4.5\r\nmobilevit_xxs,256,16359.91,62.57,1024,1.27,0.42,8.34\r\nhardcorenas_f,224,16297.96,62.806,1024,8.2,0.35,5.57\r\ncs3darknet_focus_s,320,16280.93,62.87,1024,3.27,1.08,4.22\r\nhardcorenas_e,224,16029.93,63.86,1024,8.07,0.35,5.65\r\nconvnext_atto_ols,224,15939.96,64.222,1024,3.7,0.58,4.11\r\nmobilenetv1_125,224,15839.87,64.626,1024,6.27,0.89,6.3\r\ncrossvit_tiny_240,240,15823.5,64.687,1024,7.01,1.57,9.08\r\nese_vovnet19b_slim,224,15804.9,64.772,1024,3.17,1.69,3.52\r\nmobilenet_edgetpu_100,224,15771.14,64.911,1024,4.09,1.0,5.75\r\nhgnetv2_b0,224,15737.65,65.047,1024,6.0,0.33,2.12\r\nxcit_nano_12_p16_224,224,15663.58,65.352,1024,3.05,0.56,4.17\r\nfasternet_t2,224,15371.18,66.6,1024,14.98,1.91,4.73\r\nmobilenetv1_100,256,15369.48,66.602,1024,4.23,0.76,6.59\r\nefficientnet_lite0,224,15306.46,66.876,1024,4.65,0.4,6.74\r\nmobilenetv4_conv_medium,224,15152.93,67.553,1024,9.72,0.84,5.8\r\nfbnetc_100,224,15071.51,67.922,1024,5.57,0.4,6.51\r\nmobilenetv1_100h,256,15032.49,68.101,1024,5.28,0.82,6.65\r\nregnetx_008,224,14979.69,68.337,1024,7.26,0.81,5.15\r\ncrossvit_9_240,240,14956.86,68.44,1024,8.55,1.85,9.52\r\ntinynet_a,192,14878.44,68.806,1024,6.19,0.35,5.41\r\ntf_efficientnetv2_b0,224,14874.46,68.825,1024,7.14,0.73,4.77\r\nregnety_008,224,14722.78,69.527,1024,6.26,0.81,5.25\r\nconvnext_femto,224,14445.48,70.869,1024,5.22,0.79,4.57\r\nrepvit_m1_0,224,14298.96,71.586,1024,6.81,1.11,7.19\r\ntf_efficientnetv2_b1,192,14098.56,72.609,1024,8.14,0.76,4.59\r\nvit_base_patch32_clip_224,224,14022.89,72.998,1024,88.22,4.41,5.01\r\nvit_base_patch32_224,224,13994.59,73.144,1024,88.22,4.41,5.01\r\nregnety_008_tv,224,13962.48,73.321,1024,6.43,0.84,5.42\r\nconvnext_femto_ols,224,13846.52,73.931,1024,5.23,0.82,4.87\r\nedgenext_xx_small,288,13812.32,74.116,1024,1.33,0.33,4.21\r\ndla46x_c,224,13800.82,74.18,1024,1.07,0.54,5.66\r\nresnetblur18,224,13657.0,74.956,1024,11.69,2.34,3.39\r\ncrossvit_9_dagger_240,240,13643.22,75.032,1024,8.78,1.99,9.97\r\nconvnext_atto_rms,224,13633.18,75.092,1024,3.69,0.55,3.81\r\nmobilenetv4_hybrid_medium_075,224,13466.91,76.019,1024,7.31,0.66,5.65\r\nrepghostnet_200,224,13454.15,76.092,1024,9.77,0.53,7.96\r\nrepvit_m1_1,224,13354.84,76.653,1024,8.24,1.34,7.82\r\nrepvit_m2,224,13316.41,76.878,1024,8.24,1.34,7.82\r\nefficientnet_b0,224,13282.18,77.07,1024,5.29,0.4,6.75\r\npvt_v2_b0,224,13265.0,77.17,1024,3.67,0.57,7.99\r\ndla60x_c,224,13182.93,77.656,1024,1.32,0.59,6.01\r\nrexnet_100,224,13149.12,77.858,1024,4.8,0.41,7.44\r\nefficientvit_b1,224,13146.45,77.871,1024,9.1,0.53,7.25\r\nmobilenet_edgetpu_v2_s,224,13110.87,78.084,1024,5.99,1.21,6.6\r\nvit_small_patch32_384,384,13089.46,78.209,1024,22.92,3.45,8.25\r\nmobileone_s2,224,13082.6,78.252,1024,7.81,1.3,7.56\r\nresnet14t,224,13062.52,78.37,1024,10.08,1.69,5.8\r\npit_s_224,224,13062.04,78.374,1024,23.46,2.88,11.56\r\nvisformer_tiny,224,13033.7,78.546,1024,10.32,1.27,5.72\r\npit_s_distilled_224,224,13016.95,78.645,1024,24.04,2.9,11.64\r\nfbnetv3_b,224,12996.05,78.772,1024,8.6,0.42,6.97\r\nmnasnet_140,224,12973.85,78.905,1024,7.12,0.6,7.71\r\nresnetv2_18,288,12934.74,79.148,1024,11.69,3.0,4.11\r\nrexnetr_100,224,12863.26,79.587,1024,4.88,0.43,7.72\r\nmobilevitv2_050,256,12793.32,80.019,1024,1.37,0.48,8.04\r\nmobilenetv2_110d,224,12750.08,80.294,1024,4.52,0.45,8.71\r\ntf_efficientnet_lite0,224,12723.97,80.459,1024,4.65,0.4,6.74\r\nresnetv2_34,224,12689.62,80.676,1024,21.8,3.67,3.74\r\nregnetz_005,224,12617.95,81.134,1024,7.12,0.52,5.86\r\ncs3darknet_focus_m,256,12470.21,82.091,1024,9.3,1.98,4.89\r\nrepvgg_a2,224,12400.22,82.561,1024,25.5,5.12,3.13\r\nmobilenetv4_conv_medium,256,12221.12,83.771,1024,9.72,1.1,7.58\r\nresnet34,224,12206.42,83.871,1024,21.8,3.67,3.74\r\nefficientnet_b1_pruned,240,12196.94,83.931,1024,6.33,0.4,6.21\r\nvit_base_patch32_clip_quickgelu_224,224,12176.16,84.078,1024,87.85,4.41,5.01\r\nstarnet_s3,224,12130.56,84.395,1024,5.75,0.76,6.66\r\nresnet18,288,12108.9,84.545,1024,11.69,3.01,4.11\r\ncs3darknet_m,256,12022.26,85.15,1024,9.31,2.08,5.28\r\nese_vovnet19b_dw,224,11959.06,85.606,1024,6.54,1.34,8.25\r\nskresnet18,224,11927.68,85.831,1024,11.96,1.82,3.24\r\nlevit_384,224,11920.09,85.881,1024,39.07,2.35,6.26\r\nhrnet_w18_small,224,11907.51,85.977,1024,13.19,1.61,5.72\r\nswiftformer_xs,224,11882.96,86.152,1024,3.48,0.61,6.45\r\nmobilenetv1_125,256,11832.02,86.525,1024,6.27,1.16,8.23\r\nghostnetv3_100,224,11679.72,87.655,1024,6.15,0.17,4.55\r\nselecsls42,224,11623.04,88.081,1024,30.35,2.94,4.62\r\nnf_regnet_b0,256,11610.61,88.173,1024,8.76,0.64,5.58\r\nresnet50,160,11604.38,88.223,1024,25.56,2.1,5.67\r\nselecsls42b,224,11598.51,88.266,1024,32.46,2.98,4.62\r\nhgnetv2_b1,224,11573.75,88.455,1024,6.34,0.49,2.73\r\nsemnasnet_140,224,11532.35,88.773,1024,6.11,0.6,8.87\r\nlevit_conv_384,224,11490.83,89.091,1024,39.07,2.35,6.26\r\nresnetv2_34d,224,11425.64,89.605,1024,21.82,3.91,4.54\r\nmobilenetv2_140,224,11397.26,89.824,1024,6.11,0.6,9.57\r\nseresnet18,288,11387.63,89.902,1024,11.78,3.01,4.11\r\nseresnet34,224,11364.36,90.081,1024,21.96,3.67,3.74\r\ntf_efficientnet_b0,224,11319.39,90.443,1024,5.29,0.4,6.75\r\nedgenext_x_small,256,11298.03,90.611,1024,2.34,0.54,5.93\r\nconvnext_pico,224,11240.88,91.074,1024,9.05,1.37,6.1\r\nfastvit_t8,256,11221.34,91.236,1024,4.0,0.69,6.59\r\nmixer_b32_224,224,11069.68,92.486,1024,60.29,3.24,6.29\r\nghostnetv2_100,224,11049.98,92.651,1024,6.16,0.18,4.55\r\ngernet_m,224,11040.03,92.735,1024,21.14,3.02,5.24\r\ntf_efficientnetv2_b2,208,11033.86,92.785,1024,10.1,1.06,6.0\r\nvit_base_patch32_clip_256,256,11017.14,92.927,1024,87.86,5.76,6.65\r\nmixnet_s,224,11015.26,92.943,1024,4.13,0.25,6.25\r\nresnet34d,224,11002.49,93.051,1024,21.82,3.91,4.54\r\nfbnetv3_d,224,10973.08,93.3,1024,10.31,0.52,8.5\r\nvit_small_patch16_224,224,10972.65,93.302,1024,22.05,4.61,11.95\r\nlegacy_seresnet34,224,10963.74,93.372,1024,21.96,3.67,3.74\r\ndeit_small_patch16_224,224,10956.91,93.437,1024,22.05,4.61,11.95\r\nresnet50d,160,10937.98,93.597,1024,25.58,2.22,6.08\r\nresnetv2_18d,288,10921.58,93.735,1024,11.71,3.4,5.43\r\ndeit_small_distilled_patch16_224,224,10879.89,94.099,1024,22.44,4.63,12.02\r\nmobilenetv4_hybrid_medium,224,10748.52,95.243,1024,11.07,0.98,6.84\r\nconvnext_pico_ols,224,10738.12,95.338,1024,9.06,1.43,6.5\r\nefficientnet_b0,256,10514.1,97.374,1024,5.29,0.52,8.81\r\nefficientnet_lite1,240,10458.11,97.896,1024,5.42,0.62,10.14\r\ndla34,224,10351.84,98.895,1024,15.74,3.07,5.02\r\nmobilenet_edgetpu_v2_m,224,10317.16,99.226,1024,8.46,1.85,8.15\r\ntiny_vit_5m_224,224,10306.86,99.33,1024,12.08,1.27,11.25\r\nvit_pwee_patch16_reg1_gap_256,256,10305.62,99.343,1024,15.25,4.37,15.87\r\nresmlp_12_224,224,10300.54,99.391,1024,15.35,3.01,5.5\r\nconvnext_atto_rms,256,10274.3,99.647,1024,3.69,0.71,4.98\r\nresnet18d,288,10268.74,99.701,1024,11.71,3.41,5.43\r\nefficientvit_b1,256,10263.96,99.745,1024,9.1,0.69,9.46\r\nefficientnet_es_pruned,224,10182.96,100.54,1024,5.44,1.81,8.73\r\nefficientnet_es,224,10180.06,100.57,1024,5.44,1.81,8.73\r\nfbnetv3_b,256,10178.24,100.584,1024,8.6,0.55,9.1\r\neva02_tiny_patch14_224,224,10165.01,100.718,1024,5.5,1.7,9.14\r\nseresnet50,160,10122.89,101.133,1024,28.09,2.1,5.69\r\nconvnext_atto,288,10035.15,102.021,1024,3.7,0.91,6.3\r\ntf_efficientnetv2_b1,240,10031.89,102.056,1024,8.14,1.21,7.34\r\nefficientnet_blur_b0,224,9998.68,102.395,1024,5.29,0.43,8.72\r\nvit_base_patch32_siglip_gap_256,256,9998.09,102.4,1024,87.47,5.67,6.54\r\nmobileone_s3,224,9983.55,102.549,1024,10.08,1.9,9.13\r\nselecsls60,224,9964.35,102.74,1024,30.67,3.59,5.52\r\nrexnetr_130,224,9955.07,102.843,1024,7.61,0.68,9.81\r\nselecsls60b,224,9938.53,103.014,1024,32.77,3.63,5.52\r\ndeit3_small_patch16_224,224,9930.64,103.09,1024,22.06,4.61,11.95\r\ncs3darknet_focus_m,288,9919.37,103.214,1024,9.3,2.51,6.19\r\nrexnet_130,224,9889.45,103.523,1024,7.56,0.68,9.71\r\nresnet50,176,9883.83,103.579,1024,25.56,2.62,6.92\r\nvit_base_patch32_siglip_256,256,9872.89,103.699,1024,94.55,5.75,6.64\r\nresnetaa34d,224,9842.38,104.019,1024,21.82,4.43,5.07\r\nresnet26,224,9841.66,104.024,1024,16.0,2.36,7.35\r\nvit_wee_patch16_reg1_gap_256,256,9839.29,104.05,1024,13.42,3.83,13.9\r\ntf_mixnet_s,224,9770.37,104.786,1024,4.13,0.25,6.25\r\necaresnet50t,160,9730.82,105.213,1024,25.57,2.21,6.04\r\nswiftformer_s,224,9679.18,105.759,1024,6.09,0.99,7.81\r\nmixer_s16_224,224,9657.24,106.015,1024,18.53,3.79,5.97\r\nxcit_tiny_12_p16_224,224,9645.51,106.139,1024,6.72,1.24,6.29\r\nconvnextv2_atto,224,9572.21,106.958,1024,3.71,0.55,3.81\r\nflexivit_small,240,9564.88,107.037,1024,22.06,5.35,14.18\r\ncs3darknet_m,288,9550.03,107.205,1024,9.31,2.63,6.69\r\nconvnext_atto_ols,288,9538.28,107.332,1024,3.7,0.96,6.8\r\nefficientnet_b0_g16_evos,224,9535.08,107.375,1024,8.11,1.01,7.42\r\nhgnetv2_b0,288,9506.66,107.695,1024,6.0,0.54,3.51\r\ntf_efficientnet_es,224,9490.96,107.874,1024,5.44,1.81,8.73\r\nresnetrs50,160,9470.05,108.104,1024,35.69,2.29,6.2\r\nmobilenetv4_conv_blur_medium,224,9403.61,108.873,1024,9.72,1.22,8.58\r\nefficientnet_b1,224,9315.96,109.899,1024,7.79,0.59,9.36\r\nmobilenetv2_120d,224,9315.03,109.911,1024,5.83,0.69,11.97\r\nrepvit_m3,224,9261.26,110.547,1024,10.12,1.86,11.43\r\nmobilenet_edgetpu_v2_l,224,9184.07,111.48,1024,10.92,2.55,9.05\r\nregnetx_016,224,9172.81,111.609,1024,9.19,1.62,7.93\r\ngmixer_12_224,224,9167.22,111.677,1024,12.7,2.67,7.26\r\nmambaout_femto,224,9124.82,112.201,1024,7.3,1.16,8.34\r\nresnet26d,224,9053.09,113.086,1024,16.01,2.6,8.15\r\ntf_efficientnet_lite1,240,9022.41,113.477,1024,5.42,0.62,10.14\r\nghostnetv3_130,224,8982.12,113.986,1024,8.95,0.28,5.9\r\necaresnet50d_pruned,224,8873.6,115.377,1024,19.94,2.53,6.43\r\nedgenext_x_small,288,8832.25,115.916,1024,2.34,0.68,7.5\r\ngmlp_ti16_224,224,8813.24,116.168,1024,5.87,1.34,7.55\r\ndarknet17,256,8798.82,116.353,1024,14.3,3.26,7.18\r\nresnext50_32x4d,160,8752.61,116.968,1024,25.03,2.17,7.35\r\nhgnetv2_b4,224,8718.39,117.432,1024,19.8,2.75,6.7\r\nnf_resnet26,224,8716.98,117.448,1024,16.0,2.41,7.35\r\nefficientformer_l1,224,8704.26,117.623,1024,12.29,1.3,5.53\r\nnf_regnet_b1,256,8702.42,117.646,1024,10.22,0.82,7.27\r\nmobilenetv4_conv_aa_medium,256,8702.27,117.65,1024,9.72,1.58,10.3\r\nconvnext_femto,288,8684.14,117.895,1024,5.22,1.3,7.56\r\nfbnetv3_d,256,8665.08,118.156,1024,10.31,0.68,11.1\r\nnf_regnet_b2,240,8644.82,118.431,1024,14.31,0.97,7.23\r\ntiny_vit_11m_224,224,8632.68,118.599,1024,20.35,2.03,13.49\r\nrexnetr_150,224,8631.61,118.614,1024,9.78,0.89,11.13\r\nghostnetv2_130,224,8630.64,118.628,1024,8.96,0.28,5.9\r\nstarnet_s4,224,8584.87,119.255,1024,7.48,1.05,9.56\r\nrexnet_150,224,8540.8,119.875,1024,9.73,0.9,11.21\r\nregnety_016,224,8430.92,121.438,1024,11.2,1.63,8.04\r\nmobilenetv4_hybrid_medium,256,8418.61,121.612,1024,11.07,1.29,9.01\r\nvit_base_patch32_plus_256,256,8343.45,122.711,1024,119.48,7.79,7.76\r\nresnetblur18,288,8302.8,123.307,1024,11.69,3.87,5.6\r\nconvnext_femto_ols,288,8292.39,123.462,1024,5.23,1.35,8.06\r\nedgenext_small,256,8288.16,123.515,1024,5.59,1.26,9.07\r\nefficientnet_b2_pruned,260,8248.18,124.131,1024,8.31,0.73,9.13\r\nfasternet_s,224,8222.47,124.519,1024,31.18,4.56,7.93\r\nmobilenetv4_conv_medium,320,8220.69,124.546,1024,9.72,1.71,11.84\r\nrepvit_m1_5,224,8220.0,124.55,1024,14.05,2.27,12.84\r\nefficientnet_b1,240,8144.32,125.712,1024,7.79,0.71,10.88\r\nrepvgg_b1g4,224,8136.69,125.827,1024,36.13,7.31,5.32\r\nconvnextv2_femto,224,8125.13,126.006,1024,5.23,0.79,4.57\r\ndarknet21,256,8112.01,126.211,1024,20.86,3.93,7.47\r\nresnest14d,224,8083.94,126.65,1024,10.61,2.76,7.33\r\nmobilevitv2_075,256,8083.58,126.656,1024,2.87,1.05,12.06\r\nefficientvit_b1,288,8041.28,127.322,1024,9.1,0.87,11.96\r\nmobilenet_edgetpu_v2_m,256,8032.57,127.459,1024,8.46,2.42,10.65\r\nconvnext_nano,224,8016.98,127.708,1024,15.59,2.46,8.37\r\nvit_relpos_small_patch16_224,224,7995.49,128.053,1024,21.98,4.59,13.05\r\nmambaout_kobe,224,7938.15,128.977,1024,9.14,1.52,10.0\r\nregnetz_005,288,7883.26,129.876,1024,7.12,0.86,9.68\r\nefficientnet_lite2,260,7875.49,130.005,1024,6.09,0.89,12.9\r\nvit_srelpos_small_patch16_224,224,7863.15,130.207,1024,21.97,4.59,12.16\r\nhgnetv2_b2,224,7862.11,130.226,1024,11.22,1.15,4.12\r\nvit_tiny_r_s16_p8_384,384,7832.79,130.71,1024,6.36,1.34,6.49\r\nsedarknet21,256,7812.85,131.043,1024,20.95,3.93,7.47\r\nmobilevit_xs,256,7774.12,131.693,1024,2.32,1.05,16.33\r\nmobilenetv3_large_150d,256,7769.47,131.776,1024,14.62,1.03,12.35\r\nvit_relpos_small_patch16_rpn_224,224,7762.68,131.893,1024,21.97,4.59,13.05\r\nresnetv2_34,288,7714.92,132.713,1024,21.8,6.07,6.18\r\npoolformer_s12,224,7686.93,133.191,1024,11.92,1.82,5.53\r\nresnext50_32x4d,176,7658.59,133.687,1024,25.03,2.71,8.97\r\nregnetz_b16,224,7658.39,133.686,1024,9.72,1.45,9.95\r\nmixnet_m,224,7654.02,133.766,1024,5.01,0.36,8.19\r\nvit_tiny_patch16_384,384,7617.54,134.402,1024,5.79,4.7,25.39\r\nfastvit_t12,256,7574.05,135.179,1024,7.51,1.39,9.57\r\nmobilenetv4_conv_blur_medium,256,7552.43,135.565,1024,9.72,1.59,11.2\r\nswiftformer_l1,224,7522.44,136.105,1024,12.06,1.6,10.07\r\nhgnet_tiny,224,7502.37,136.464,1024,14.74,4.54,6.36\r\nnf_ecaresnet26,224,7434.75,137.712,1024,16.0,2.41,7.36\r\nresnet34,288,7420.55,137.978,1024,21.8,6.07,6.18\r\npvt_v2_b1,224,7408.67,138.196,1024,14.01,2.12,15.39\r\nresnext26ts,256,7405.76,138.249,1024,10.3,2.43,10.52\r\nnf_seresnet26,224,7403.29,138.298,1024,17.4,2.41,7.36\r\nefficientnet_b1,256,7390.61,138.536,1024,7.79,0.77,12.22\r\nefficientnetv2_rw_t,224,7371.59,138.89,1024,13.65,1.93,9.94\r\ngernet_l,256,7325.44,139.767,1024,31.08,4.57,8.0\r\ntf_efficientnetv2_b2,260,7320.43,139.863,1024,10.1,1.72,9.84\r\nconvnext_nano_ols,224,7310.55,140.044,1024,15.65,2.65,9.38\r\ncs3darknet_focus_l,256,7299.44,140.264,1024,21.15,4.66,8.03\r\nghostnetv3_160,224,7282.75,140.587,1024,12.38,0.41,7.23\r\nefficientnet_cc_b0_4e,224,7264.13,140.949,1024,13.31,0.41,9.42\r\nvit_medium_patch16_clip_224,224,7258.95,141.048,1024,38.59,8.0,15.93\r\ndpn48b,224,7254.9,141.121,1024,9.13,1.69,8.92\r\ncrossvit_small_240,240,7249.21,141.236,1024,26.86,5.63,18.17\r\ntf_efficientnet_b1,240,7243.19,141.349,1024,7.79,0.71,10.88\r\ngc_efficientnetv2_rw_t,224,7238.75,141.441,1024,13.68,1.94,9.97\r\nefficientnet_cc_b0_8e,224,7231.15,141.591,1024,24.01,0.42,9.42\r\nresnet101,160,7221.79,141.768,1024,44.55,4.0,8.28\r\neca_resnext26ts,256,7201.53,142.172,1024,10.3,2.43,10.52\r\nseresnext26ts,256,7196.0,142.278,1024,10.39,2.43,10.52\r\nese_vovnet19b_dw,288,7157.1,143.055,1024,6.54,2.22,13.63\r\nvit_relpos_base_patch32_plus_rpn_256,256,7151.07,143.167,1024,119.42,7.68,8.01\r\ngcresnext26ts,256,7123.74,143.72,1024,10.48,2.43,10.53\r\nedgenext_small_rw,256,7112.07,143.957,1024,7.83,1.58,9.51\r\nresnet26t,256,7083.53,144.539,1024,16.01,3.35,10.52\r\ntf_mixnet_m,224,7082.67,144.558,1024,5.01,0.36,8.19\r\necaresnet101d_pruned,224,7075.79,144.697,1024,24.88,3.48,7.69\r\nmobilenetv4_conv_large,256,7049.14,145.243,1024,32.59,2.86,12.14\r\ncs3darknet_l,256,7034.33,145.548,1024,21.16,4.86,8.55\r\ntresnet_m,224,7024.17,145.761,1024,31.39,5.75,7.31\r\nhgnetv2_b1,288,7022.75,145.792,1024,6.34,0.82,4.51\r\nefficientnet_b2,256,7012.92,145.995,1024,9.11,0.89,12.81\r\nghostnetv2_160,224,7005.57,146.148,1024,12.39,0.42,7.23\r\necaresnetlight,224,6987.71,146.52,1024,30.16,4.11,8.42\r\nskresnet34,224,6983.57,146.607,1024,22.28,3.67,5.13\r\nlegacy_seresnext26_32x4d,224,6972.89,146.829,1024,16.79,2.49,9.39\r\nresnetv2_34d,288,6947.74,147.368,1024,21.82,6.46,7.51\r\nlevit_512,224,6930.6,147.728,1024,95.08,5.62,10.22\r\nseresnet34,288,6915.84,148.044,1024,21.96,6.07,6.18\r\ntf_efficientnetv2_b3,240,6905.76,148.26,1024,14.36,1.93,9.95\r\nmobileone_s4,224,6901.33,148.355,1024,14.84,2.98,11.81\r\ntf_efficientnet_lite2,260,6876.66,148.889,1024,6.09,0.89,12.9\r\ncoat_lite_tiny,224,6829.9,149.909,1024,5.72,1.6,11.65\r\nfastvit_s12,256,6791.25,150.752,1024,9.43,1.8,10.82\r\nnf_regnet_b1,288,6787.78,150.836,1024,10.22,1.02,9.2\r\nconvnext_pico,288,6779.11,151.026,1024,9.05,2.27,10.08\r\nvit_small_resnet26d_224,224,6750.12,151.677,1024,63.61,5.07,11.12\r\ncrossvit_15_240,240,6731.84,152.092,1024,27.53,5.81,19.77\r\nresnet34d,288,6693.96,152.952,1024,21.82,6.47,7.51\r\ntf_efficientnet_cc_b0_8e,224,6669.42,153.519,1024,24.01,0.42,9.42\r\ntf_efficientnet_cc_b0_4e,224,6668.62,153.537,1024,13.31,0.41,9.42\r\nresnetv2_50,224,6664.55,153.628,1024,25.55,4.11,11.11\r\nefficientnet_b3_pruned,300,6647.21,154.028,1024,9.86,1.04,11.86\r\nvit_little_patch16_reg4_gap_256,256,6646.29,154.044,1024,22.52,6.35,18.33\r\nlevit_conv_512,224,6640.92,154.176,1024,95.08,5.62,10.22\r\necaresnext50t_32x4d,224,6637.55,154.248,1024,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,6636.64,154.268,1024,15.41,2.7,10.09\r\nvit_dwee_patch16_reg1_gap_256,256,6633.92,154.337,1024,13.43,3.83,17.6\r\nseresnext26t_32x4d,224,6625.54,154.53,1024,16.81,2.7,10.09\r\ncs3sedarknet_l,256,6610.24,154.886,1024,21.91,4.86,8.56\r\ndeit3_medium_patch16_224,224,6609.07,154.916,1024,38.85,8.0,15.93\r\nfastvit_sa12,256,6597.09,155.2,1024,11.55,1.94,11.24\r\nnf_regnet_b2,272,6587.41,155.427,1024,14.31,1.22,9.27\r\nlevit_512d,224,6587.33,155.43,1024,92.39,5.84,11.3\r\nseresnext26d_32x4d,224,6555.46,156.185,1024,16.81,2.73,10.19\r\nconvnext_pico_ols,288,6481.95,157.954,1024,9.06,2.37,10.74\r\nconvit_tiny,224,6461.93,158.441,1024,5.71,1.26,7.94\r\ncoatnet_pico_rw_224,224,6448.22,158.778,1024,10.85,2.05,14.62\r\nrexnet_200,224,6421.33,159.444,1024,16.37,1.56,14.91\r\nhrnet_w18_small_v2,224,6412.83,159.66,1024,15.6,2.62,9.65\r\nvit_relpos_medium_patch16_cls_224,224,6401.68,159.938,1024,38.76,8.03,18.24\r\nvit_little_patch16_reg1_gap_256,256,6401.66,159.938,1024,22.52,6.27,18.06\r\nselecsls84,224,6397.67,160.036,1024,50.95,5.9,7.57\r\nhgnetv2_b3,224,6392.42,160.17,1024,16.29,1.78,5.07\r\nvgg11,224,6391.48,160.196,1024,132.86,7.61,7.44\r\neca_botnext26ts_256,256,6383.96,160.38,1024,10.59,2.46,11.6\r\nefficientvit_b2,224,6380.14,160.479,1024,24.33,1.6,14.62\r\nregnetx_032,224,6377.99,160.533,1024,15.3,3.2,11.37\r\ncoat_lite_mini,224,6375.91,160.581,1024,11.01,2.0,12.25\r\necaresnet26t,256,6358.01,161.036,1024,16.01,3.35,10.53\r\nresnet50,224,6356.02,161.077,1024,25.56,4.11,11.11\r\nrepvgg_b1,224,6328.15,161.79,1024,51.83,11.82,5.32\r\nrexnetr_200,224,6320.17,162.002,1024,16.52,1.59,15.11\r\nresnetv2_50t,224,6317.42,162.069,1024,25.57,4.32,11.82\r\nvovnet39a,224,6311.59,162.217,1024,22.6,7.09,6.73\r\ncrossvit_15_dagger_240,240,6307.86,162.312,1024,28.21,6.13,20.43\r\ndla60,224,6281.14,163.003,1024,22.04,4.26,10.16\r\nresnetv2_50d,224,6270.01,163.299,1024,25.57,4.35,11.92\r\neca_halonext26ts,256,6268.1,163.34,1024,10.76,2.44,11.46\r\nconvnextv2_pico,224,6217.41,164.675,1024,9.07,1.37,6.1\r\nlevit_conv_512d,224,6195.27,165.267,1024,92.39,5.84,11.3\r\nese_vovnet39b,224,6160.9,166.188,1024,24.57,7.09,6.74\r\nresnet101,176,6159.65,166.216,1024,44.55,4.92,10.08\r\nvit_small_patch16_rope_224,224,6137.26,166.827,1024,21.98,4.61,11.95\r\nvit_small_patch16_rope_mixed_224,224,6134.13,166.915,1024,21.99,4.61,12.85\r\neca_vovnet39b,224,6121.74,167.248,1024,22.6,7.09,6.74\r\nresnet32ts,256,6118.15,167.351,1024,17.96,4.63,11.58\r\nvit_small_patch16_rope_ape_224,224,6105.01,167.711,1024,22.06,4.61,11.95\r\nvit_small_patch16_rope_mixed_ape_224,224,6099.54,167.858,1024,22.06,4.61,12.85\r\nhalonet26t,256,6096.22,167.953,1024,12.48,3.19,11.69\r\nbotnet26t_256,256,6085.66,168.243,1024,12.49,3.32,11.98\r\nwide_resnet50_2,176,6069.81,168.683,1024,68.88,7.29,8.97\r\ndpn68,224,6060.3,168.943,1024,12.61,2.35,10.47\r\nefficientnet_em,240,6052.31,169.172,1024,6.9,3.04,14.34\r\nvit_dpwee_patch16_reg1_gap_256,256,6049.0,169.264,1024,15.25,4.37,19.05\r\ndpn68b,224,6045.34,169.359,1024,12.61,2.35,10.47\r\nresnet33ts,256,6025.3,169.932,1024,19.68,4.76,11.66\r\nresnet50t,224,6023.44,169.981,1024,25.57,4.32,11.82\r\nresnet50c,224,5999.65,170.657,1024,25.58,4.35,11.92\r\ncoatnet_nano_cc_224,224,5992.21,170.866,1024,13.76,2.24,15.02\r\nresnet26,288,5988.04,170.982,1024,16.0,3.9,12.15\r\nresnetaa34d,288,5979.19,171.241,1024,21.82,7.33,8.38\r\nresnet50d,224,5968.65,171.539,1024,25.58,4.35,11.92\r\ncspresnet50,256,5958.19,171.84,1024,21.62,4.54,11.5\r\nmobilevit_s,256,5953.93,171.96,1024,5.58,2.03,19.94\r\nfbnetv3_g,240,5949.59,172.091,1024,16.62,1.28,14.87\r\nconvnext_tiny,224,5940.47,172.338,1024,28.59,4.47,13.44\r\nmixnet_l,224,5936.46,172.474,1024,7.33,0.58,10.84\r\nmobilenetv4_hybrid_large_075,256,5918.49,172.988,1024,22.75,2.06,11.64\r\nefficientnet_b1,288,5918.33,173.0,1024,7.79,0.97,15.46\r\nmobilevitv2_100,256,5916.69,173.044,1024,4.9,1.84,16.08\r\nbat_resnext26ts,256,5901.57,173.492,1024,10.73,2.53,12.51\r\ncoatnext_nano_rw_224,224,5859.53,174.739,1024,14.7,2.47,12.8\r\nresnest26d,224,5853.45,174.916,1024,17.07,3.64,9.97\r\nresnetaa50,224,5817.8,175.992,1024,25.56,5.15,11.64\r\nresnext26ts,288,5817.38,175.998,1024,10.3,3.07,13.31\r\ncoatnet_nano_rw_224,224,5814.57,176.086,1024,15.14,2.41,15.41\r\nseresnet33ts,256,5813.96,176.106,1024,19.78,4.76,11.66\r\ncs3darknet_focus_l,288,5808.07,176.285,1024,21.15,5.9,10.16\r\neca_resnet33ts,256,5791.66,176.785,1024,19.68,4.76,11.66\r\ntwins_pcpvt_small,224,5775.4,177.284,1024,24.11,3.83,18.08\r\ngcresnet33ts,256,5771.96,177.389,1024,19.88,4.76,11.68\r\ntf_efficientnet_em,240,5769.99,177.451,1024,6.9,3.04,14.34\r\nconvnextv2_atto,288,5751.54,178.013,1024,3.71,0.91,6.3\r\nlegacy_seresnet50,224,5751.02,178.034,1024,28.09,3.88,10.6\r\nregnetv_040,224,5734.75,178.54,1024,20.64,4.0,12.29\r\nregnety_040,224,5725.83,178.813,1024,20.65,4.0,12.29\r\nregnety_032,224,5712.76,179.225,1024,19.44,3.2,11.26\r\nmobilenetv4_conv_medium,384,5698.83,179.668,1024,9.72,2.46,17.05\r\nvit_base_resnet26d_224,224,5696.19,179.747,1024,101.4,6.97,13.16\r\nvgg11_bn,224,5679.57,180.277,1024,132.87,7.62,7.44\r\nseresnext26ts,288,5653.36,181.106,1024,10.39,3.07,13.32\r\neca_resnext26ts,288,5653.21,181.112,1024,10.3,3.07,13.32\r\ncspresnet50w,256,5631.95,181.799,1024,28.12,5.04,12.19\r\nefficientnet_b2,288,5628.75,181.896,1024,9.11,1.12,16.2\r\ncs3darknet_l,288,5628.39,181.909,1024,21.16,6.16,10.83\r\ngcresnext26ts,288,5599.22,182.863,1024,10.48,3.07,13.33\r\nmambaout_femto,288,5596.9,182.936,1024,7.3,1.91,13.79\r\ncspresnet50d,256,5592.85,183.067,1024,21.64,4.86,12.55\r\nhaloregnetz_b,224,5567.42,183.907,1024,11.68,1.97,11.94\r\ntf_efficientnet_b2,260,5563.24,184.038,1024,9.11,1.02,13.83\r\nvisformer_small,224,5537.59,184.894,1024,40.22,4.88,11.43\r\nseresnet50,224,5536.68,184.924,1024,28.09,4.11,11.13\r\nresnet50_clip_gap,224,5511.79,185.763,1024,23.53,5.39,12.44\r\nresnetaa50d,224,5505.81,185.961,1024,25.58,5.39,12.44\r\nresnet26d,288,5497.62,186.242,1024,16.01,4.29,13.48\r\nregnetx_040,224,5469.17,187.207,1024,22.12,3.99,12.2\r\nvit_relpos_medium_patch16_224,224,5467.01,187.283,1024,38.75,7.97,17.02\r\nresnetblur50,224,5443.85,188.083,1024,25.56,5.16,12.02\r\necaresnet50d_pruned,288,5434.65,188.397,1024,19.94,4.19,10.61\r\nefficientnet_b0_gn,224,5426.8,188.671,1024,5.29,0.42,6.75\r\nhgnetv2_b4,288,5419.3,188.934,1024,19.8,4.54,11.08\r\nrepvgg_b2g4,224,5393.28,189.842,1024,55.78,11.33,6.45\r\ndensenet121,224,5369.35,190.686,1024,7.98,2.87,6.9\r\nvovnet57a,224,5365.64,190.822,1024,36.64,8.95,7.52\r\nvit_srelpos_medium_patch16_224,224,5364.47,190.864,1024,38.74,7.96,16.21\r\nres2net50_48w_2s,224,5360.17,191.013,1024,25.29,4.18,11.72\r\ntf_mixnet_l,224,5352.94,191.276,1024,7.33,0.58,10.84\r\ntiny_vit_21m_224,224,5325.64,192.256,1024,33.21,4.27,20.08\r\nvit_relpos_medium_patch16_rpn_224,224,5304.66,193.015,1024,38.73,7.97,17.02\r\nmobilenetv4_hybrid_medium,320,5303.42,193.061,1024,11.07,2.05,14.36\r\necaresnet50t,224,5300.89,193.155,1024,25.57,4.32,11.83\r\nseresnet50t,224,5300.38,193.174,1024,28.1,4.32,11.83\r\ncs3sedarknet_l,288,5276.29,194.052,1024,21.91,6.16,10.83\r\nedgenext_small,320,5270.26,194.275,1024,5.59,1.97,14.16\r\nresmlp_24_224,224,5263.71,194.518,1024,30.02,5.96,10.91\r\necaresnet50d,224,5258.17,194.717,1024,25.58,4.35,11.93\r\nresnet50_clip,224,5254.05,194.875,1024,38.32,6.14,12.98\r\nresnetblur50d,224,5177.79,197.746,1024,25.58,5.4,12.82\r\nresnetrs50,224,5174.24,197.884,1024,35.69,4.48,12.14\r\nxcit_tiny_24_p16_224,224,5163.38,198.295,1024,12.12,2.34,11.82\r\nresnet50s,224,5155.43,198.603,1024,25.68,5.47,13.52\r\nresnet152,160,5145.53,198.981,1024,60.19,5.9,11.51\r\nnf_regnet_b3,288,5135.66,199.363,1024,18.59,1.67,11.84\r\nconvnext_tiny_hnf,224,5116.62,200.108,1024,28.59,4.47,13.44\r\nmobilenetv3_large_150d,320,5088.72,201.209,1024,14.62,1.61,19.29\r\neca_nfnet_l0,224,5066.7,202.075,1024,24.14,4.35,10.47\r\ncoatnet_rmlp_nano_rw_224,224,5065.95,202.105,1024,15.15,2.62,20.34\r\nnfnet_l0,224,5060.98,202.311,1024,35.07,4.36,10.47\r\ndla60x,224,5058.09,202.42,1024,17.35,3.54,13.8\r\nhgnet_small,224,5056.54,202.49,1024,24.36,8.53,8.79\r\ninception_v3,299,5039.87,203.156,1024,23.83,5.73,8.97\r\nefficientvit_b2,256,5035.84,203.323,1024,24.33,2.09,19.03\r\ninception_next_tiny,224,5009.8,204.379,1024,28.06,4.19,11.98\r\nhgnetv2_b5,224,4985.95,205.357,1024,39.57,6.56,11.19\r\nxcit_nano_12_p16_384,384,4981.76,205.523,1024,3.05,1.64,12.15\r\nvit_medium_patch16_reg1_gap_256,256,4956.45,206.577,1024,38.88,10.63,22.26\r\nvit_medium_patch16_gap_240,240,4946.98,206.963,1024,44.4,9.22,18.81\r\nefficientnet_lite3,300,4943.54,207.118,1024,8.2,1.65,21.85\r\nvit_medium_patch16_reg4_gap_256,256,4935.51,207.453,1024,38.88,10.76,22.6\r\nedgenext_base,256,4903.01,208.827,1024,18.51,3.85,15.58\r\ndensenetblur121d,224,4900.95,208.917,1024,8.0,3.11,7.9\r\nconvnextv2_femto,288,4897.72,209.052,1024,5.23,1.3,7.56\r\nseresnetaa50d,224,4897.57,209.056,1024,28.11,5.4,12.46\r\ndla60_res2net,224,4891.3,209.33,1024,20.85,4.15,12.34\r\nvit_base_patch32_clip_384,384,4884.36,209.627,1024,88.3,13.06,16.5\r\nmambaout_kobe,288,4877.92,209.905,1024,9.14,2.5,16.53\r\nvit_base_patch32_384,384,4868.68,210.299,1024,88.3,13.06,16.5\r\nefficientvit_l1,224,4867.17,210.369,1024,52.65,5.27,15.85\r\nconvnext_nano,288,4860.89,210.637,1024,15.59,4.06,13.84\r\nfastvit_mci0,256,4844.9,211.335,1024,11.36,2.39,14.72\r\nlambda_resnet26rpt_256,256,4826.85,212.125,1024,10.99,3.16,11.87\r\nmambaout_tiny,224,4822.4,212.32,1024,26.55,4.49,16.68\r\nresnext50_32x4d,224,4821.12,212.375,1024,25.03,4.26,14.4\r\nres2net50_26w_4s,224,4820.44,212.404,1024,25.7,4.28,12.61\r\nnfnet_f0,192,4806.21,213.036,1024,71.49,7.21,10.16\r\nefficientnet_cc_b1_8e,240,4792.61,213.642,1024,39.72,0.75,15.44\r\ncrossvit_18_240,240,4767.69,214.758,1024,43.27,9.05,26.26\r\nhgnetv2_b2,288,4766.56,214.808,1024,11.22,1.89,6.8\r\nresnet32ts,288,4764.53,214.898,1024,17.96,5.86,14.65\r\ndla60_res2next,224,4754.31,215.36,1024,17.03,3.49,13.17\r\nresnest50d_1s4x24d,224,4747.28,215.682,1024,25.68,4.43,13.57\r\nvit_small_patch16_dinov3,256,4737.8,216.106,1024,21.59,6.26,17.03\r\nres2net50_14w_8s,224,4737.02,216.15,1024,25.06,4.21,13.28\r\nvit_small_patch16_dinov3_qkvb,256,4733.4,216.313,1024,21.6,6.26,17.03\r\ngcresnext50ts,256,4727.11,216.603,1024,15.67,3.75,15.46\r\nresnet33ts,288,4704.97,217.617,1024,19.68,6.02,14.75\r\nefficientnetv2_rw_t,288,4704.69,217.631,1024,13.65,3.19,16.42\r\ngmixer_24_224,224,4688.64,218.379,1024,24.72,5.28,14.45\r\nefficientnet_b0_g8_gn,224,4688.57,218.377,1024,6.56,0.66,6.75\r\ngcresnet50t,256,4685.5,218.525,1024,25.9,5.42,14.67\r\ndarknetaa53,256,4675.03,219.017,1024,36.02,7.97,12.39\r\ncs3darknet_focus_x,256,4671.36,219.186,1024,35.02,8.03,10.69\r\ntwins_svt_small,224,4670.08,219.248,1024,24.06,2.94,13.75\r\nmobilenetv4_conv_large,320,4666.19,219.432,1024,32.59,4.47,18.97\r\nvit_dlittle_patch16_reg1_gap_256,256,4638.54,220.74,1024,22.52,6.27,22.69\r\neva02_small_patch14_224,224,4631.34,221.074,1024,21.62,6.14,18.28\r\ndavit_tiny,224,4625.3,166.018,768,28.36,4.54,18.89\r\nresnext50d_32x4d,224,4617.4,221.746,1024,25.05,4.5,15.2\r\nres2net50d,224,4612.77,221.972,1024,25.72,4.52,13.41\r\nregnetz_c16,256,4609.96,222.102,1024,13.46,2.51,16.57\r\nres2next50,224,4606.2,222.286,1024,24.67,4.2,13.71\r\ngc_efficientnetv2_rw_t,288,4603.08,222.44,1024,13.68,3.2,16.45\r\nregnetz_b16,288,4573.5,223.875,1024,9.72,2.39,16.43\r\nnf_ecaresnet50,224,4551.15,224.974,1024,25.56,4.21,11.13\r\ncoatnet_0_rw_224,224,4547.51,225.156,1024,27.44,4.43,18.73\r\nnf_seresnet50,224,4546.62,225.201,1024,28.09,4.21,11.13\r\nresnet26t,320,4540.45,225.507,1024,16.01,5.24,16.44\r\ntf_efficientnetv2_b3,300,4538.1,225.624,1024,14.36,3.04,15.74\r\nskresnet50,224,4535.32,225.757,1024,25.8,4.11,12.5\r\nseresnet33ts,288,4535.14,225.766,1024,19.78,6.02,14.76\r\nhgnet_tiny,288,4526.44,226.205,1024,14.74,7.51,10.51\r\neca_resnet33ts,288,4526.14,226.22,1024,19.68,6.02,14.76\r\nvit_large_patch32_224,224,4522.01,226.426,1024,305.51,15.39,13.3\r\ncrossvit_18_dagger_240,240,4514.16,226.814,1024,44.27,9.5,27.03\r\ntf_efficientnet_cc_b1_8e,240,4510.64,227.0,1024,39.72,0.75,15.44\r\nedgenext_small_rw,320,4509.08,227.072,1024,7.83,2.46,14.85\r\ncspresnext50,256,4508.8,227.087,1024,20.57,4.05,15.86\r\nconvnextv2_nano,224,4497.8,227.641,1024,15.62,2.46,8.37\r\ngcresnet33ts,288,4490.3,228.026,1024,19.88,6.02,14.78\r\nresnetrs101,192,4476.18,228.745,1024,63.62,6.04,12.7\r\nrepvit_m2_3,224,4475.08,228.797,1024,22.93,4.52,21.32\r\neva02_tiny_patch14_336,336,4460.11,229.57,1024,5.76,4.68,27.16\r\nnextvit_small,224,4454.1,229.881,1024,31.74,5.8,18.44\r\nregnetx_080,224,4435.32,230.849,1024,39.57,8.02,14.06\r\nfbnetv3_g,288,4421.75,231.559,1024,16.62,1.77,21.09\r\nvit_pe_core_tiny_patch16_384,384,4418.17,231.751,1024,6.14,4.74,25.62\r\npvt_v2_b2,224,4414.17,231.953,1024,25.36,4.05,27.53\r\nvit_medium_patch16_gap_256,256,4411.99,232.074,1024,38.86,10.59,22.15\r\nconvnext_nano_ols,288,4410.17,232.17,1024,15.65,4.38,15.5\r\nswiftformer_l3,224,4408.28,232.267,1024,28.49,4.01,15.77\r\nresnet152,176,4397.53,232.838,1024,60.19,7.22,13.99\r\ngcvit_xxtiny,224,4388.33,233.323,1024,12.0,2.14,15.36\r\ncs3darknet_x,256,4385.96,233.45,1024,35.05,8.38,11.35\r\nxcit_small_12_p16_224,224,4381.9,233.667,1024,26.25,4.82,12.58\r\ndarknet53,256,4378.64,233.841,1024,41.61,9.31,12.39\r\nrepvgg_b3g4,224,4353.25,235.206,1024,75.63,16.06,7.55\r\nskresnet50d,224,4353.04,235.216,1024,25.82,4.36,13.31\r\ndensenet169,224,4350.51,235.352,1024,14.15,3.4,7.3\r\npit_b_224,224,4347.99,235.49,1024,73.76,12.42,32.94\r\nseresnext50_32x4d,224,4347.96,235.49,1024,27.56,4.26,14.42\r\nlegacy_seresnext50_32x4d,224,4344.28,235.689,1024,27.56,4.26,14.42\r\npit_b_distilled_224,224,4336.62,236.105,1024,74.79,12.5,33.07\r\nmaxvit_pico_rw_256,256,4335.94,236.144,1024,7.46,1.83,22.3\r\nmobilevitv2_125,256,4334.95,236.196,1024,7.48,2.86,20.1\r\nvit_base_resnet50d_224,224,4319.62,237.037,1024,110.97,8.73,16.92\r\nmaxvit_rmlp_pico_rw_256,256,4316.05,237.233,1024,7.52,1.85,24.86\r\nmixnet_xl,224,4284.54,238.976,1024,11.9,0.93,14.57\r\nresnetv2_101,224,4281.48,239.148,1024,44.54,7.83,16.23\r\ntf_efficientnet_lite3,300,4276.93,239.403,1024,8.2,1.65,21.85\r\necaresnet101d_pruned,288,4267.03,239.954,1024,24.88,5.75,12.71\r\npoolformerv2_s12,224,4264.76,240.087,1024,11.89,1.83,5.53\r\nsehalonet33ts,256,4257.77,240.478,1024,13.69,3.55,14.7\r\nefficientnet_x_b3,288,4247.97,241.034,1024,13.3,3.91,15.6\r\nswin_tiny_patch4_window7_224,224,4239.14,241.534,1024,28.29,4.51,17.06\r\nefficientnet_b3,288,4236.06,241.71,1024,12.23,1.63,21.49\r\nrepvgg_b2,224,4223.14,242.453,1024,80.32,18.38,6.45\r\necaresnetlight,288,4216.81,242.816,1024,30.16,6.79,13.91\r\nresnet50_mlp,256,4211.48,243.119,1024,26.65,7.05,16.25\r\nresnet51q,256,4200.9,243.734,1024,35.7,6.38,16.55\r\ncoatnet_bn_0_rw_224,224,4189.04,244.422,1024,27.44,4.67,22.04\r\nnf_resnet50,256,4184.05,244.718,1024,25.56,5.46,14.52\r\nfocalnet_tiny_srf,224,4171.66,245.447,1024,28.43,4.42,16.32\r\ndla102,224,4155.62,246.387,1024,33.27,7.19,14.18\r\ncsatv2,512,4152.16,246.594,1024,11.1,1.39,9.17\r\nresnet101,224,4150.63,246.689,1024,44.55,7.83,16.23\r\ngmlp_s16_224,224,4142.63,247.159,1024,19.42,4.42,15.1\r\ncs3sedarknet_x,256,4141.42,247.233,1024,35.4,8.38,11.35\r\nnf_regnet_b3,320,4129.39,247.957,1024,18.59,2.05,14.61\r\nvgg13,224,4116.63,248.729,1024,133.05,11.31,12.25\r\nresnetv2_101d,224,4111.97,249.003,1024,44.56,8.07,17.04\r\necaresnet50t,256,4088.46,250.438,1024,25.57,5.64,15.45\r\nrexnet_300,224,4086.18,250.578,1024,34.71,3.44,22.4\r\necaresnet26t,320,4080.43,250.929,1024,16.01,5.24,16.44\r\nrdnet_tiny,224,4055.14,252.499,1024,23.86,5.06,15.98\r\ncoatnet_rmlp_0_rw_224,224,4044.62,253.151,1024,27.45,4.72,24.89\r\nseresnext26t_32x4d,288,4044.43,253.164,1024,16.81,4.46,16.68\r\nrexnetr_300,224,4034.93,253.759,1024,34.81,3.39,22.16\r\nresnetv2_50,288,4027.84,254.209,1024,25.55,6.79,18.37\r\nese_vovnet57b,256,4027.18,254.253,1024,38.61,11.69,9.82\r\nregnetx_064,224,4024.4,254.426,1024,26.21,6.49,16.37\r\ntwins_pcpvt_base,224,4013.43,255.12,1024,43.83,6.68,25.25\r\ncspdarknet53,256,4004.16,255.712,1024,27.64,6.57,16.81\r\nseresnext26d_32x4d,288,4001.53,255.877,1024,16.81,4.51,16.85\r\npoolformer_s24,224,4000.29,255.96,1024,21.39,3.41,10.68\r\nresnet101c,224,4000.23,255.959,1024,44.57,8.08,17.04\r\ndm_nfnet_f0,192,3995.33,256.277,1024,71.49,7.21,10.16\r\nresnest50d,224,3992.89,256.43,1024,27.48,5.4,14.36\r\nresnet101d,224,3991.46,256.524,1024,44.57,8.08,17.04\r\nvit_small_plus_patch16_dinov3,256,3986.37,256.851,1024,28.68,8.11,21.84\r\nvit_small_plus_patch16_dinov3_qkvb,256,3983.89,257.013,1024,28.69,8.11,21.84\r\npvt_v2_b2_li,224,3982.16,257.122,1024,22.55,3.91,27.6\r\nfasternet_m,224,3961.29,258.482,1024,53.52,8.74,15.34\r\ncs3sedarknet_xdw,256,3946.56,259.443,1024,21.6,5.97,17.18\r\nfocalnet_tiny_lrf,224,3936.56,260.104,1024,28.65,4.49,17.76\r\nvit_base_patch16_clip_224,224,3928.88,260.614,1024,86.57,17.58,23.9\r\nhiera_tiny_224,224,3928.79,260.617,1024,27.91,4.91,17.13\r\nresnetv2_34d,384,3924.94,260.874,1024,21.82,11.49,13.35\r\ndeit_base_patch16_224,224,3924.33,260.914,1024,86.57,17.58,23.9\r\nvit_base_patch16_224_miil,224,3923.6,260.965,1024,94.4,17.59,23.91\r\ndeit_base_distilled_patch16_224,224,3917.28,261.379,1024,87.34,17.68,24.05\r\nvit_base_patch16_224,224,3915.56,261.5,1024,86.57,17.58,23.9\r\nefficientvit_b2,288,3913.04,261.666,1024,24.33,2.64,24.03\r\nrexnetr_200,288,3883.37,263.666,1024,16.52,2.62,24.96\r\nhgnetv2_b3,288,3883.27,263.669,1024,16.29,2.94,8.38\r\nefficientvit_l2,224,3871.11,264.5,1024,63.71,6.97,19.58\r\nefficientformer_l3,224,3858.99,265.332,1024,31.41,3.93,12.01\r\ntresnet_v2_l,224,3853.37,265.716,1024,46.17,8.85,16.34\r\nresnet50,288,3849.63,265.98,1024,25.56,6.8,18.37\r\nxcit_nano_12_p8_224,224,3841.03,266.571,1024,3.05,2.16,15.71\r\nres2net50_26w_6s,224,3833.61,267.091,1024,37.05,6.33,15.28\r\nwide_resnet50_2,224,3822.36,267.875,1024,68.88,11.43,14.4\r\nresnet61q,256,3813.3,268.513,1024,36.85,7.8,17.01\r\nhrnet_w18_ssld,224,3810.89,268.683,1024,21.3,4.32,16.31\r\nswin_s3_tiny_224,224,3787.96,270.304,1024,28.33,4.64,19.13\r\nhrnet_w18,224,3783.13,270.649,1024,21.3,4.32,16.31\r\nresnet101_clip_gap,224,3775.18,271.22,1024,42.52,9.11,17.56\r\nresnetaa101d,224,3773.53,271.342,1024,44.57,9.12,17.56\r\ncoatnet_0_224,224,3769.17,203.733,768,25.04,4.58,24.01\r\nvit_base_mci_224,224,3758.85,272.403,1024,86.35,17.73,24.65\r\nconvnextv2_pico,288,3756.16,272.596,1024,9.07,2.27,10.08\r\nese_vovnet39b,288,3745.84,273.344,1024,24.57,11.71,11.13\r\nvit_small_r26_s32_224,224,3739.14,273.839,1024,36.43,3.56,9.85\r\ngcresnext50ts,288,3713.54,275.727,1024,15.67,4.75,19.57\r\ndarknetaa53,288,3703.43,276.477,1024,36.02,10.08,15.68\r\nconvnext_small,224,3700.48,276.694,1024,50.22,8.71,21.56\r\ngcresnet50t,288,3689.72,277.506,1024,25.9,6.86,18.57\r\nlegacy_seresnet101,224,3683.94,277.942,1024,49.33,7.61,15.74\r\nefficientnetv2_s,288,3683.59,277.968,1024,21.46,4.75,20.13\r\ncoat_lite_small,224,3683.15,277.995,1024,19.84,3.96,22.09\r\nhieradet_small,256,3680.67,208.637,768,34.73,8.51,27.76\r\nvitamin_small_224,224,3675.74,139.265,512,22.17,5.92,26.38\r\nvit_base_patch32_clip_448,448,3674.24,278.675,1024,88.34,17.93,23.9\r\nsebotnet33ts_256,256,3658.0,139.941,512,13.7,3.89,17.46\r\nresnet101_clip,224,3656.71,280.009,1024,56.26,9.81,18.08\r\nresnet50t,288,3644.67,280.934,1024,25.57,7.14,19.53\r\ndeit3_base_patch16_224,224,3644.02,280.988,1024,86.59,17.58,23.9\r\nmixer_b16_224,224,3627.48,282.268,1024,59.88,12.62,14.53\r\nvgg13_bn,224,3627.41,282.273,1024,133.05,11.33,12.25\r\nfastvit_sa24,256,3624.41,282.508,1024,21.5,3.77,20.35\r\ndpn68b,288,3622.45,282.656,1024,12.61,3.89,17.3\r\nresnet50d,288,3616.32,283.138,1024,25.58,7.19,19.7\r\ncs3edgenet_x,256,3614.57,283.275,1024,47.82,11.53,12.92\r\nresnetblur101d,224,3610.33,283.61,1024,44.57,9.12,17.94\r\ntresnet_l,224,3604.54,284.066,1024,55.99,10.9,11.9\r\nresnet101s,224,3598.74,284.518,1024,44.67,9.19,18.64\r\nseresnet101,224,3591.44,285.1,1024,49.33,7.84,16.27\r\nmaxxvit_rmlp_nano_rw_256,256,3590.9,285.143,1024,16.78,4.37,26.05\r\nvit_betwixt_patch16_reg1_gap_256,256,3566.29,287.11,1024,60.4,16.32,27.83\r\nregnetv_064,224,3552.91,288.19,1024,30.58,6.39,16.41\r\nvit_betwixt_patch16_reg4_gap_256,256,3547.88,288.598,1024,60.4,16.52,28.24\r\nregnety_064,224,3547.73,288.613,1024,30.58,6.39,16.41\r\nwide_resnet101_2,176,3540.04,289.239,1024,126.89,14.31,13.18\r\nvit_relpos_base_patch16_clsgap_224,224,3537.84,289.423,1024,86.43,17.6,25.12\r\nconvnext_tiny,288,3534.92,289.658,1024,28.59,7.39,22.21\r\nresmlp_36_224,224,3532.93,289.817,1024,44.69,8.91,16.33\r\nvit_relpos_base_patch16_cls_224,224,3529.76,290.079,1024,86.43,17.6,25.12\r\nmobilenetv4_hybrid_medium,384,3519.17,290.953,1024,11.07,3.01,21.18\r\nbeit_base_patch16_224,224,3518.97,290.975,1024,86.53,17.58,23.9\r\nvit_small_patch16_384,384,3517.64,291.08,1024,22.2,15.52,50.78\r\nnf_resnet101,224,3516.27,291.194,1024,44.55,8.01,16.23\r\nresnetaa50,288,3515.26,291.276,1024,25.56,8.52,19.24\r\nbeitv2_base_patch16_224,224,3513.46,291.426,1024,86.53,17.58,23.9\r\nmobilevitv2_150,256,3504.46,292.177,1024,10.59,4.09,24.11\r\nefficientnet_b3,320,3503.23,292.278,1024,12.23,2.01,26.52\r\necaresnet101d,224,3490.14,293.375,1024,44.57,8.08,17.07\r\nefficientnetv2_rw_s,288,3487.8,293.575,1024,23.94,4.91,21.41\r\ngcvit_xtiny,224,3483.45,293.936,1024,19.98,2.93,20.26\r\nmixer_l32_224,224,3471.02,294.995,1024,206.94,11.27,19.86\r\ncait_xxs24_224,224,3468.58,295.198,1024,11.96,2.53,20.29\r\ncs3darknet_x,288,3468.36,295.217,1024,35.05,10.6,14.36\r\ndarknet53,288,3465.3,295.479,1024,41.61,11.78,15.68\r\nvit_small_patch16_36x1_224,224,3457.58,296.14,1024,64.67,13.71,35.69\r\ndensenet201,224,3439.4,297.703,1024,20.01,4.34,7.85\r\nnextvit_base,224,3437.33,297.884,1024,44.79,8.29,23.71\r\nregnety_040,288,3435.45,298.043,1024,20.65,6.61,20.3\r\nflexivit_base,240,3428.7,298.631,1024,86.59,20.29,28.36\r\nregnetv_040,288,3425.97,298.869,1024,20.64,6.6,20.3\r\nswinv2_cr_tiny_224,224,3423.78,299.055,1024,28.33,4.66,28.45\r\nvgg16,224,3417.92,299.577,1024,138.36,15.47,13.56\r\nefficientvit_b3,224,3411.84,300.11,1024,48.65,3.99,26.9\r\nregnety_032,288,3401.72,300.998,1024,19.44,5.29,18.61\r\ntf_efficientnet_b3,300,3389.25,302.107,1024,12.23,1.87,23.83\r\nvit_small_resnet50d_s16_224,224,3387.43,302.27,1024,57.53,13.48,24.82\r\ncs3se_edgenet_x,256,3383.55,302.617,1024,50.72,11.53,12.94\r\ndla102x,224,3380.08,302.925,1024,26.31,5.89,19.42\r\nswinv2_cr_tiny_ns_224,224,3374.09,303.459,1024,28.33,4.66,28.45\r\nhalonet50ts,256,3367.79,304.035,1024,22.73,5.3,19.2\r\nseresnet50,288,3364.52,304.33,1024,28.09,6.8,18.39\r\nmobilenetv4_conv_large,384,3360.3,304.715,1024,32.59,6.43,27.31\r\nvit_base_patch16_clip_quickgelu_224,224,3358.3,304.894,1024,86.19,17.58,23.9\r\nvit_base_patch16_siglip_gap_224,224,3336.94,306.85,1024,85.8,17.49,23.75\r\nresnetv2_50x1_bit,224,3331.77,307.324,1024,25.55,4.23,11.11\r\nese_vovnet39b_evos,224,3328.0,307.671,1024,24.58,7.07,6.74\r\nvit_base_patch16_siglip_224,224,3322.98,308.137,1024,92.88,17.73,24.06\r\nresnetaa50d,288,3321.66,308.262,1024,25.58,8.92,20.57\r\nregnety_080,224,3304.6,309.85,1024,39.18,8.0,17.97\r\nresnetblur50,288,3294.02,310.844,1024,25.56,8.52,19.87\r\nvit_base_patch16_gap_224,224,3279.71,312.202,1024,86.57,17.49,25.59\r\nresnet51q,288,3278.76,312.288,1024,35.7,8.07,20.94\r\nnf_resnet50,288,3276.97,312.458,1024,25.56,6.88,18.37\r\ncs3sedarknet_x,288,3274.25,312.722,1024,35.4,10.6,14.37\r\nsequencer2d_s,224,3260.81,314.011,1024,27.65,4.96,11.31\r\nmaxvit_nano_rw_256,256,3257.76,314.304,1024,15.45,4.46,30.28\r\nmaxvit_rmlp_nano_rw_256,256,3247.29,315.317,1024,15.5,4.47,31.92\r\nrepvgg_b3,224,3244.75,315.561,1024,110.96,26.21,7.55\r\ndensenet121,288,3233.29,316.683,1024,7.98,4.74,11.41\r\nvit_base_r26_s32_224,224,3231.49,316.86,1024,101.38,6.81,12.36\r\nvit_medium_patch16_rope_reg1_gap_256,256,3229.89,317.018,1024,38.74,10.63,22.26\r\necaresnet50t,288,3214.58,318.525,1024,25.57,7.14,19.55\r\nseresnet50t,288,3214.56,318.525,1024,28.1,7.14,19.55\r\nresnetv2_50d_gn,224,3193.59,320.616,1024,25.57,4.38,11.92\r\necaresnet50d,288,3188.39,321.142,1024,25.58,7.19,19.72\r\nxcit_tiny_12_p16_384,384,3186.17,321.366,1024,6.72,3.64,18.26\r\nvit_betwixt_patch16_gap_256,256,3183.45,321.64,1024,60.37,16.25,27.69\r\nhiera_small_224,224,3182.75,321.712,1024,35.01,6.42,20.75\r\ndeit3_small_patch16_384,384,3180.64,321.923,1024,22.21,15.52,50.78\r\npvt_v2_b3,224,3160.05,324.022,1024,45.24,6.92,37.7\r\ntf_efficientnetv2_s,300,3151.04,324.952,1024,21.46,5.35,22.73\r\nmaxxvitv2_nano_rw_256,256,3150.77,324.979,1024,23.7,6.26,23.05\r\nvit_small_patch16_18x2_224,224,3139.64,326.131,1024,64.67,13.71,35.69\r\nres2net101_26w_4s,224,3137.09,326.396,1024,45.21,8.1,18.45\r\nresnetblur50d,288,3131.46,326.98,1024,25.58,8.92,21.19\r\nese_vovnet99b,224,3123.5,327.817,1024,63.2,16.51,11.27\r\nresnext101_32x4d,224,3117.39,328.456,1024,44.18,8.01,21.23\r\nedgenext_base,320,3099.67,330.333,1024,18.51,6.01,24.32\r\nconvnext_tiny_hnf,288,3093.69,330.973,1024,28.59,7.39,22.21\r\nvit_relpos_base_patch16_224,224,3085.61,331.843,1024,86.43,17.51,24.97\r\nresnetv2_50d_frn,224,3080.64,332.375,1024,25.59,4.33,11.92\r\nregnety_080_tv,224,3075.39,332.939,1024,39.38,8.51,19.73\r\nhgnet_small,288,3072.33,333.274,1024,24.36,14.09,14.53\r\nregnetz_b16_evos,224,3068.65,333.674,1024,9.74,1.43,9.95\r\nres2net50_26w_8s,224,3061.23,334.477,1024,48.4,8.37,17.95\r\nhgnetv2_b5,288,3054.96,335.17,1024,39.57,10.84,18.5\r\neca_nfnet_l0,288,3052.89,335.397,1024,24.14,7.12,17.29\r\nvit_base_patch16_rpn_224,224,3052.47,335.444,1024,86.54,17.49,23.75\r\nconvnextv2_tiny,224,3048.39,335.892,1024,28.64,4.47,13.44\r\nnfnet_l0,288,3046.34,336.118,1024,35.07,7.13,17.29\r\nres2net101d,224,3044.24,336.351,1024,45.23,8.35,19.25\r\nvgg16_bn,224,3036.65,337.19,1024,138.37,15.5,13.56\r\nvit_base_patch16_xp_224,224,3035.85,337.283,1024,86.51,17.56,23.9\r\nvolo_d1_224,224,3021.62,338.866,1024,26.63,6.94,24.43\r\nresnext101_32x8d,176,3021.21,338.912,1024,88.79,10.33,19.37\r\nmambaout_small,224,3019.0,339.163,1024,48.49,8.96,27.72\r\nvit_mediumd_patch16_reg4_gap_256,256,3015.98,339.502,1024,64.11,17.87,37.57\r\nresnet101d,256,3011.55,339.999,1024,44.57,10.55,22.25\r\nregnetz_d32,256,3010.41,340.128,1024,27.58,5.98,23.74\r\nresnest50d_4s2x40d,224,3008.95,340.289,1024,30.42,4.4,17.94\r\nresnetv2_152,224,3004.88,340.751,1024,60.19,11.55,22.56\r\nlambda_resnet26t,256,3000.98,341.198,1024,10.96,3.02,11.87\r\nefficientvit_l2,256,2986.58,342.845,1024,63.71,9.09,25.49\r\nmambaout_small_rw,224,2979.17,343.697,1024,48.5,8.96,27.72\r\nresnet61q,288,2978.45,343.779,1024,36.85,9.87,21.52\r\nregnetz_d8,256,2974.13,344.282,1024,23.37,3.97,23.74\r\nnf_regnet_b4,320,2972.78,344.437,1024,30.21,3.29,19.88\r\nseresnetaa50d,288,2966.46,345.169,1024,28.11,8.92,20.59\r\ndensenetblur121d,288,2950.53,347.033,1024,8.0,5.14,13.06\r\nresnet152,224,2945.65,347.611,1024,60.19,11.56,22.56\r\nskresnext50_32x4d,224,2943.22,347.898,1024,27.48,4.5,17.18\r\ninception_next_small,224,2942.06,348.032,1024,49.37,8.36,19.27\r\nvit_relpos_base_patch16_rpn_224,224,2937.54,348.568,1024,86.41,17.51,24.97\r\ndavit_small,224,2934.66,261.679,768,49.75,8.8,30.49\r\ndpn92,224,2932.79,349.132,1024,37.67,6.54,18.21\r\nregnetz_c16,320,2932.6,349.154,1024,13.46,3.92,25.88\r\nresnext50_32x4d,288,2927.78,349.728,1024,25.03,7.04,23.81\r\nresnetv2_152d,224,2926.33,349.902,1024,60.2,11.8,23.36\r\nbotnet50ts_256,256,2923.78,262.654,768,22.74,5.54,22.23\r\nvgg19,224,2919.65,350.706,1024,143.67,19.63,14.86\r\nvit_base_patch16_reg4_gap_256,256,2907.36,352.191,1024,86.62,23.5,33.89\r\ntwins_pcpvt_large,224,2906.25,352.324,1024,60.99,9.84,35.82\r\nnf_ecaresnet101,224,2905.0,352.471,1024,44.55,8.01,16.27\r\nresnet50_gn,224,2904.38,352.546,1024,25.56,4.14,11.11\r\nnf_seresnet101,224,2900.75,352.985,1024,49.33,8.02,16.27\r\nregnetz_040,256,2895.54,353.623,1024,27.12,4.06,24.19\r\nresnetv2_34d,448,2894.88,353.705,1024,21.82,15.64,18.16\r\nmobilevitv2_175,256,2886.89,354.683,1024,14.25,5.54,28.13\r\nconvit_small,224,2878.72,355.694,1024,27.78,5.76,17.87\r\nlegacy_xception,299,2877.51,355.84,1024,22.86,8.4,35.83\r\nregnetz_040_h,256,2872.79,356.425,1024,28.94,4.12,24.29\r\nresnet152c,224,2871.04,356.64,1024,60.21,11.8,23.36\r\nresnet152d,224,2864.62,357.443,1024,60.21,11.8,23.36\r\ncs3edgenet_x,288,2856.95,358.399,1024,47.82,14.59,16.36\r\nmaxvit_tiny_rw_224,224,2853.84,358.79,1024,29.06,5.11,33.11\r\ndla169,224,2834.32,361.263,1024,53.39,11.6,20.2\r\ncaformer_s18,224,2831.93,361.571,1024,26.34,4.13,19.39\r\ncrossvit_base_240,240,2817.27,363.444,1024,105.03,21.22,36.33\r\nhalo2botnet50ts_256,256,2815.57,363.67,1024,22.64,5.02,21.78\r\nseresnext101_32x4d,224,2807.73,364.685,1024,48.96,8.02,21.26\r\nmobilenetv4_conv_aa_large,384,2804.36,365.123,1024,32.59,7.07,32.29\r\ncsatv2_21m,512,2802.95,365.305,1024,20.7,2.94,15.85\r\nresnext50d_32x4d,288,2802.87,365.313,1024,25.05,7.44,25.13\r\nlegacy_seresnext101_32x4d,224,2802.42,365.375,1024,48.96,8.02,21.26\r\nnextvit_large,224,2796.65,366.132,1024,57.83,10.77,28.99\r\nresnetv2_50d_evos,224,2789.97,367.009,1024,25.59,4.33,11.92\r\nrdnet_small,224,2787.43,367.338,1024,50.44,8.74,22.55\r\nregnetx_120,224,2782.68,367.968,1024,46.11,12.13,21.37\r\nnfnet_f0,256,2778.61,368.505,1024,71.49,12.62,18.05\r\ncoatnet_rmlp_1_rw_224,224,2774.73,369.022,1024,41.69,7.85,35.47\r\nfasternet_l,224,2743.07,373.283,1024,93.47,15.52,20.46\r\nconvnext_nano,384,2721.18,376.286,1024,15.59,7.22,24.61\r\nconvnextv2_nano,288,2718.69,282.469,768,15.62,4.06,13.84\r\ntresnet_xl,224,2708.78,378.009,1024,78.44,15.2,15.34\r\nxception41p,299,2702.82,378.838,1024,26.91,9.25,39.86\r\npoolformer_s36,224,2697.87,379.537,1024,30.86,5.0,15.82\r\nbeit3_base_patch16_224,224,2695.63,379.851,1024,86.66,17.63,23.9\r\nvit_base_patch16_plus_clip_240,240,2693.86,380.103,1024,117.21,27.41,33.08\r\nmambaout_tiny,288,2687.52,381.001,1024,26.55,7.41,27.58\r\ninception_v4,299,2679.37,382.152,1024,42.68,12.28,15.09\r\nefficientvit_b3,256,2667.2,383.901,1024,48.65,5.2,35.01\r\nmixnet_xxl,224,2662.4,384.594,1024,23.96,2.04,23.43\r\nresnet152s,224,2659.38,385.03,1024,60.32,12.92,24.96\r\nefficientnet_el,300,2648.29,386.644,1024,10.59,8.0,30.7\r\nhrnet_w32,224,2645.63,387.033,1024,41.23,8.97,22.02\r\nseresnext50_32x4d,288,2644.69,387.164,1024,27.56,7.04,23.82\r\nefficientnet_el_pruned,300,2641.73,387.605,1024,10.59,8.0,30.7\r\nswin_small_patch4_window7_224,224,2632.18,389.007,1024,49.61,8.77,27.47\r\nconvnext_base,224,2617.98,391.118,1024,88.59,15.38,28.75\r\nhrnet_w30,224,2617.84,391.139,1024,37.71,8.15,21.21\r\necaresnet50t,320,2616.28,391.367,1024,25.57,8.82,24.13\r\nvgg19_bn,224,2610.66,392.217,1024,143.68,19.66,14.86\r\nregnety_120,224,2606.65,392.82,1024,51.82,12.14,21.38\r\nvit_pe_core_base_patch16_224,224,2601.34,393.621,1024,93.67,17.82,24.21\r\nfastvit_mci1,256,2596.34,394.379,1024,21.46,4.67,27.3\r\nvit_base_patch16_siglip_gap_256,256,2592.6,394.949,1024,85.84,23.13,33.23\r\nconvformer_s18,224,2590.89,395.204,1024,26.77,3.96,15.82\r\nlegacy_seresnet152,224,2583.71,396.308,1024,66.82,11.33,22.08\r\nconvmixer_1024_20_ks9_p14,224,2582.99,396.413,1024,24.38,5.55,5.51\r\ntnt_s_legacy_patch16_224,224,2580.92,396.737,1024,23.76,5.24,24.37\r\nvit_base_patch16_siglip_256,256,2578.93,397.045,1024,92.93,23.44,33.63\r\ncoatnet_rmlp_1_rw2_224,224,2577.83,397.211,1024,41.72,8.11,40.13\r\nregnety_040_sgn,224,2576.79,397.369,1024,20.65,4.03,12.29\r\nefficientnet_b4,320,2576.03,397.49,1024,19.34,3.13,34.76\r\ncoatnet_1_rw_224,224,2572.53,398.021,1024,41.72,8.04,34.6\r\nmobilenetv4_conv_large,448,2567.34,398.837,1024,32.59,8.75,37.17\r\nfocalnet_small_srf,224,2563.19,399.482,1024,49.89,8.62,26.26\r\nsequencer2d_m,224,2554.86,400.779,1024,38.31,6.55,14.26\r\nresnetv2_101,288,2544.92,402.346,1024,44.54,12.94,26.83\r\nmobilenetv4_hybrid_medium,448,2543.63,402.549,1024,11.07,4.2,29.64\r\ndensenet161,224,2542.01,402.805,1024,28.68,7.79,11.06\r\ntf_efficientnet_el,300,2541.71,402.853,1024,10.59,8.0,30.7\r\nseresnet152,224,2539.91,403.143,1024,66.82,11.57,22.61\r\nvit_base_patch16_plus_240,240,2534.68,403.976,1024,117.56,27.41,33.08\r\nvit_small_patch8_224,224,2526.67,405.25,1024,21.67,22.44,80.84\r\nvit_base_patch16_rope_224,224,2519.16,406.464,1024,86.43,17.58,23.9\r\nvit_base_patch16_rope_mixed_224,224,2515.81,407.006,1024,86.44,17.58,25.7\r\nvit_base_patch16_rope_ape_224,224,2498.69,409.796,1024,86.59,17.58,23.9\r\nvit_base_patch16_rope_mixed_ape_224,224,2496.28,410.187,1024,86.59,17.58,25.7\r\ntnt_s_patch16_224,224,2494.77,410.437,1024,23.77,5.25,24.37\r\nfastvit_sa36,256,2493.59,410.631,1024,31.46,5.59,29.46\r\nmaxxvit_rmlp_tiny_rw_256,256,2475.65,413.604,1024,29.64,6.66,39.76\r\nrexnetr_300,288,2473.86,310.423,768,34.81,5.59,36.61\r\nresnet101,288,2470.02,414.55,1024,44.55,12.95,26.83\r\nswinv2_tiny_window8_256,256,2462.5,415.812,1024,28.35,5.96,24.57\r\nmvitv2_tiny,224,2449.32,418.051,1024,24.17,4.7,21.16\r\nvit_pe_spatial_tiny_patch16_512,512,2441.15,419.451,1024,5.68,10.46,61.64\r\nfocalnet_small_lrf,224,2431.27,421.158,1024,50.34,8.74,28.61\r\nxcit_tiny_12_p8_224,224,2419.39,423.224,1024,6.71,4.81,23.6\r\ngcvit_tiny,224,2409.75,424.918,1024,28.22,4.79,29.82\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,2384.84,429.356,1024,60.23,16.52,28.24\r\nmaxvit_tiny_tf_224,224,2376.15,430.919,1024,30.92,5.6,35.78\r\nxception41,299,2363.58,433.221,1024,26.97,9.28,39.86\r\nese_vovnet57b,320,2353.41,435.092,1024,38.61,18.26,15.34\r\nregnetx_160,224,2345.79,436.505,1024,54.28,15.99,25.52\r\nefficientvit_l2,288,2336.98,438.15,1024,63.71,11.51,32.19\r\nmobilevitv2_200,256,2331.27,439.222,1024,18.45,7.22,32.15\r\ncait_xxs36_224,224,2331.23,439.232,1024,17.3,3.77,30.34\r\nxcit_small_24_p16_224,224,2329.4,439.578,1024,47.67,9.1,23.64\r\nwide_resnet50_2,288,2318.37,441.669,1024,68.88,18.89,23.81\r\nefficientnet_lite4,380,2311.01,443.073,1024,13.01,4.04,45.66\r\ncoatnet_1_224,224,2307.62,332.79,768,42.23,8.7,39.0\r\nregnety_160,224,2299.4,445.305,1024,83.59,15.96,23.04\r\ncoat_tiny,224,2298.47,445.49,1024,5.5,4.35,27.2\r\nhiera_small_abswin_256,256,2295.99,445.973,1024,34.36,8.29,26.38\r\ndm_nfnet_f0,256,2295.58,446.044,1024,71.49,12.62,18.05\r\nwide_resnet101_2,224,2288.54,447.422,1024,126.89,22.8,21.23\r\neca_nfnet_l1,256,2285.07,448.105,1024,41.41,9.62,22.04\r\npvt_v2_b4,224,2270.35,451.008,1024,62.56,10.14,53.74\r\nhrnet_w18_ssld,288,2267.76,451.527,1024,21.3,7.14,26.96\r\nlamhalobotnet50ts_256,256,2258.99,453.276,1024,22.57,5.02,18.44\r\nresnetaa101d,288,2246.05,455.888,1024,44.57,15.07,29.03\r\nhgnetv2_b6,224,2229.14,459.349,1024,75.26,16.88,21.23\r\ndla102x2,224,2221.46,460.934,1024,41.28,9.34,29.91\r\npoolformerv2_s24,224,2214.64,462.351,1024,21.34,3.42,10.68\r\ntwins_svt_base,224,2197.98,465.859,1024,56.07,8.59,26.33\r\nmaxvit_tiny_rw_256,256,2193.52,466.806,1024,29.07,6.74,44.35\r\nmaxvit_rmlp_tiny_rw_256,256,2188.29,467.922,1024,29.15,6.77,46.92\r\npvt_v2_b5,224,2182.43,469.18,1024,81.96,11.76,50.92\r\nhiera_base_224,224,2174.72,470.842,1024,51.52,9.4,30.42\r\nconvnext_small,288,2170.22,471.816,1024,50.22,14.39,35.65\r\nhgnet_base,224,2169.11,472.062,1024,71.58,25.14,15.47\r\nresnetblur101d,288,2154.14,475.336,1024,44.57,15.07,29.65\r\nseresnet101,288,2153.52,475.474,1024,49.33,12.95,26.87\r\ncs3se_edgenet_x,320,2152.93,475.604,1024,50.72,18.01,20.21\r\nresnet152d,256,2150.53,476.138,1024,60.21,15.41,30.51\r\nmaxvit_tiny_pm_256,256,2146.14,477.112,1024,30.09,6.61,47.9\r\nresnetv2_101x1_bit,224,2145.38,477.284,1024,44.54,8.04,16.23\r\nnest_tiny,224,2139.01,478.703,1024,17.06,5.83,25.48\r\nvit_relpos_base_patch16_plus_240,240,2137.22,479.107,1024,117.38,27.3,34.33\r\ndavit_base,224,2130.3,360.488,768,87.95,15.51,40.66\r\nefficientnetv2_s,384,2122.25,482.483,1024,21.46,8.44,35.77\r\nswinv2_cr_small_224,224,2118.04,483.435,1024,49.7,9.07,50.27\r\nregnety_064,288,2114.73,484.202,1024,30.58,10.56,27.11\r\nregnetv_064,288,2110.55,485.154,1024,30.58,10.55,27.11\r\nnest_tiny_jx,224,2107.36,485.894,1024,17.06,5.83,25.48\r\nnf_regnet_b4,384,2103.13,486.873,1024,30.21,4.7,28.61\r\nmobilenetv4_conv_aa_large,448,2102.73,486.964,1024,32.59,9.63,43.94\r\nswinv2_cr_small_ns_224,224,2098.73,487.886,1024,49.7,9.08,50.27\r\ndpn98,224,2098.06,488.048,1024,61.57,11.73,25.2\r\nefficientvit_b3,288,2089.4,490.069,1024,48.65,6.58,44.2\r\necaresnet101d,288,2088.38,490.305,1024,44.57,13.35,28.19\r\nresnet200,224,2088.11,490.372,1024,64.67,15.07,32.19\r\nvit_pe_core_small_patch16_384,384,2087.87,490.432,1024,23.78,15.69,51.23\r\ntf_efficientnet_lite4,380,2075.73,493.3,1024,13.01,4.04,45.66\r\nresnetrs101,288,2067.18,495.339,1024,63.62,13.56,28.53\r\neva02_small_patch14_336,336,2041.87,501.477,1024,22.13,15.48,54.33\r\ntf_efficientnetv2_s,384,2040.88,501.723,1024,21.46,8.44,35.77\r\ncrossvit_15_dagger_408,408,2036.47,502.809,1024,28.5,21.45,95.05\r\ninception_next_base,224,2029.47,504.544,1024,86.67,14.85,25.69\r\nmobilenetv4_hybrid_large,384,2023.41,506.05,1024,37.76,7.77,34.52\r\nconvnext_tiny,384,2020.62,380.056,768,28.59,13.14,39.48\r\nfastvit_ma36,256,2014.72,508.239,1024,43.98,7.82,34.98\r\nresnext101_64x4d,224,2013.62,508.514,1024,83.46,15.52,31.21\r\nefficientnetv2_rw_s,384,2008.77,509.744,1024,23.94,8.72,38.03\r\nnfnet_f1,224,2006.41,510.343,1024,132.63,17.87,22.94\r\neva02_base_patch16_clip_224,224,2003.24,511.148,1024,86.26,17.62,26.32\r\nmambaout_base_tall_rw,224,2002.99,511.211,1024,86.48,16.15,38.74\r\ninception_resnet_v2,299,2000.07,511.961,1024,55.84,13.18,25.06\r\nregnety_080,288,1999.43,512.125,1024,39.18,13.22,29.69\r\nconvnext_base,256,1997.75,512.534,1024,88.59,20.09,37.55\r\nresnext101_32x8d,224,1994.31,513.434,1024,88.79,16.48,31.21\r\nxception65p,299,1955.08,523.739,1024,39.82,13.91,52.48\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,1953.59,524.14,1024,63.95,17.65,37.02\r\nmvitv2_small_cls,224,1946.85,525.957,1024,34.87,7.04,28.17\r\nresnest101e,256,1937.8,528.407,1024,48.28,13.38,28.66\r\nmambaout_base_short_rw,224,1934.42,529.338,1024,88.83,16.31,38.08\r\nresnetv2_50d_gn,288,1923.14,532.437,1024,25.57,7.24,19.7\r\nvit_base_patch16_dinov3,256,1920.96,533.046,1024,85.64,23.6,34.06\r\nvit_base_patch16_dinov3_qkvb,256,1920.64,533.136,1024,85.66,23.6,34.06\r\nresnet101d,320,1917.46,534.01,1024,44.57,16.48,34.77\r\nregnetz_d32,320,1916.26,534.347,1024,27.58,9.33,37.08\r\nconvnextv2_small,224,1899.56,539.044,1024,50.32,8.71,21.56\r\nrdnet_base,224,1899.05,539.188,1024,87.45,15.4,31.14\r\nswin_base_patch4_window7_224,224,1896.87,539.812,1024,87.77,15.47,36.63\r\ncoat_lite_medium,224,1896.44,539.939,1024,44.57,9.81,40.06\r\nvit_base_patch16_rope_reg1_gap_256,256,1894.56,540.473,1024,86.43,23.22,33.39\r\nregnetz_d8,320,1893.35,540.817,1024,23.37,6.19,37.08\r\npoolformer_m36,224,1879.82,544.711,1024,56.17,8.8,22.02\r\nseresnext101_64x4d,224,1878.5,545.094,1024,88.23,15.53,31.25\r\nhrnet_w40,224,1877.24,545.463,1024,57.56,12.75,25.29\r\nseresnet152d,256,1874.76,546.18,1024,66.84,15.42,30.56\r\nlambda_resnet50ts,256,1872.99,546.695,1024,21.54,5.07,17.48\r\nseresnext101_32x8d,224,1869.96,547.579,1024,93.57,16.48,31.25\r\ngmlp_b16_224,224,1867.99,548.158,1024,73.08,15.78,30.21\r\nresnext101_32x4d,288,1866.18,548.69,1024,44.18,13.24,35.09\r\nresnetrs152,256,1863.25,549.557,1024,86.62,15.59,30.83\r\nregnetz_040,320,1862.9,549.649,1024,27.12,6.35,37.78\r\nregnetz_040_h,320,1862.25,549.851,1024,28.94,6.43,37.94\r\nconvnextv2_tiny,288,1854.44,414.116,768,28.64,7.39,22.21\r\nfastvit_mci2,256,1851.53,553.032,1024,35.7,7.85,36.09\r\nefficientformer_l7,224,1849.39,553.673,1024,82.23,10.17,24.45\r\nvit_so150m_patch16_reg4_gap_256,256,1844.07,555.268,1024,134.13,36.75,53.21\r\nmobilenetv4_conv_aa_large,480,1843.63,416.546,768,32.59,11.05,50.45\r\nvit_medium_patch16_gap_384,384,1842.05,555.879,1024,39.03,26.08,67.54\r\nregnetz_b16_evos,288,1840.31,556.406,1024,9.74,2.36,16.43\r\ncoat_mini,224,1838.14,557.062,1024,10.34,6.82,33.68\r\nregnetz_c16_evos,256,1835.86,557.756,1024,13.49,2.48,16.57\r\nmaxvit_rmlp_small_rw_224,224,1835.73,557.793,1024,64.9,10.75,49.3\r\nseresnext101d_32x8d,224,1834.59,558.138,1024,93.59,16.72,32.05\r\nfocalnet_base_srf,224,1829.6,559.665,1024,88.15,15.28,35.01\r\nvit_so150m_patch16_reg4_map_256,256,1826.88,560.494,1024,141.48,37.17,53.68\r\ncait_s24_224,224,1825.94,560.782,1024,46.92,9.35,40.58\r\nswin_s3_small_224,224,1824.99,561.071,1024,49.74,9.43,37.84\r\nmvitv2_small,224,1810.99,565.416,1024,34.87,7.0,28.08\r\ncsatv2_21m,640,1807.64,566.46,1024,20.7,4.72,26.68\r\nefficientnetv2_m,320,1792.87,571.132,1024,54.14,11.01,39.97\r\nlevit_384_s8,224,1776.74,288.143,512,39.06,9.95,35.86\r\nefficientnet_b4,384,1773.12,433.108,768,19.34,4.51,50.04\r\nhrnet_w44,224,1756.93,582.811,1024,67.06,14.94,26.92\r\ntresnet_m,448,1756.93,582.809,1024,31.39,22.99,29.21\r\nresnet50_gn,288,1749.23,585.377,1024,25.56,6.85,18.37\r\nmambaout_base,224,1749.02,585.452,1024,84.81,15.83,36.95\r\nseresnextaa101d_32x8d,224,1744.31,587.023,1024,93.59,17.25,34.16\r\nresnet152,288,1743.4,587.334,1024,60.19,19.11,37.28\r\nlevit_conv_384_s8,224,1742.29,293.844,512,39.06,9.95,35.86\r\nmambaout_small,288,1737.99,589.165,1024,48.49,14.81,45.82\r\nmambaout_base_wide_rw,224,1727.55,592.723,1024,94.45,17.78,42.6\r\nvit_large_r50_s32_224,224,1727.26,592.822,1024,328.99,19.58,24.41\r\nfocalnet_base_lrf,224,1718.79,595.74,1024,88.75,15.43,38.13\r\nmambaout_small_rw,288,1717.36,596.237,1024,48.5,14.81,45.82\r\nxception65,299,1715.8,596.782,1024,39.92,13.96,52.48\r\nhrnet_w48_ssld,224,1715.01,597.058,1024,77.47,17.34,28.56\r\nhrnet_w48,224,1711.08,598.427,1024,77.47,17.34,28.56\r\nxcit_tiny_24_p16_384,384,1707.98,599.511,1024,12.12,6.87,34.29\r\nvolo_d2_224,224,1705.21,600.483,1024,58.68,14.34,41.34\r\nsamvit_base_patch16_224,224,1700.74,602.073,1024,86.46,17.54,24.54\r\ndm_nfnet_f1,224,1688.4,606.469,1024,132.63,17.87,22.94\r\nseresnext101_32x4d,288,1687.6,606.753,1024,48.96,13.25,35.12\r\nresnetv2_50d_evos,288,1678.21,610.15,1024,25.59,7.15,19.7\r\nvitamin_base_224,224,1663.12,153.901,256,87.72,22.68,52.77\r\nresnet50x4_clip_gap,288,1659.93,616.866,1024,65.62,19.57,34.11\r\ntf_efficientnet_b4,380,1657.09,463.44,768,19.34,4.49,49.49\r\nsequencer2d_l,224,1653.88,619.124,1024,54.3,9.74,22.12\r\ntiny_vit_21m_384,384,1653.3,619.341,1024,21.22,13.72,77.83\r\ncoatnet_rmlp_2_rw_224,224,1649.96,310.287,512,73.88,15.18,54.78\r\nxcit_medium_24_p16_224,224,1646.89,621.75,1024,84.4,16.13,31.71\r\nhiera_base_plus_224,224,1641.79,623.685,1024,69.9,12.67,37.98\r\nmambaout_base_plus_rw,224,1641.72,623.715,1024,101.66,19.19,45.16\r\nvit_so150m2_patch16_reg1_gap_256,256,1639.71,624.477,1024,136.06,37.0,56.93\r\ncoatnet_2_rw_224,224,1623.48,473.017,768,73.87,15.09,49.22\r\nregnetz_e8,256,1619.96,632.091,1024,57.7,9.91,40.94\r\ntwins_svt_large,224,1618.15,632.798,1024,99.27,15.15,35.1\r\nvit_base_r50_s16_224,224,1617.74,632.959,1024,97.89,21.66,35.28\r\nswinv2_base_window12_192,192,1615.44,633.858,1024,109.28,11.9,39.72\r\nresnet50x4_clip,288,1600.35,639.837,1024,87.14,21.35,35.27\r\neva02_base_patch14_224,224,1598.85,640.435,1024,85.76,23.22,36.55\r\nmaxxvit_rmlp_small_rw_256,256,1595.53,641.77,1024,66.01,14.67,58.38\r\nregnety_120,288,1578.19,648.818,1024,51.82,20.06,35.34\r\ntnt_b_patch16_224,224,1576.24,649.621,1024,65.43,14.1,39.01\r\nconvnext_base,288,1573.61,650.709,1024,88.59,25.43,47.53\r\nefficientnetv2_rw_m,320,1572.26,651.271,1024,53.24,12.72,47.14\r\nefficientnet_b3_gn,288,1570.39,326.011,512,11.73,1.74,23.35\r\ngcvit_small,224,1568.69,652.747,1024,51.09,8.57,41.61\r\ncoatnet_2_224,224,1564.39,327.258,512,74.68,16.5,52.67\r\nvit_betwixt_patch16_reg4_gap_384,384,1555.05,658.479,1024,60.6,39.71,85.28\r\nhiera_base_abswin_256,256,1552.5,659.558,1024,51.27,12.46,40.7\r\nresnet200d,256,1548.97,661.062,1024,64.69,20.0,43.09\r\nregnety_040_sgn,288,1548.79,495.849,768,20.65,6.67,20.3\r\nmaxvit_small_tf_224,224,1547.81,496.161,768,68.93,11.66,53.17\r\ndpn131,224,1543.2,663.528,1024,79.25,16.09,32.97\r\nmobilevitv2_150,384,1541.01,332.23,512,10.59,9.2,54.25\r\nswinv2_tiny_window16_256,256,1537.17,333.056,512,28.35,6.68,39.02\r\nswinv2_cr_base_224,224,1534.49,667.291,1024,87.88,15.86,59.66\r\nconvnextv2_nano,384,1534.09,333.723,512,15.62,7.22,24.61\r\nswinv2_small_window8_256,256,1528.83,669.772,1024,49.73,11.58,40.14\r\nswinv2_cr_base_ns_224,224,1522.46,672.566,1024,87.88,15.86,59.66\r\nswinv2_cr_small_ns_256,256,1520.32,673.503,1024,49.7,12.07,76.21\r\nseresnet152,288,1513.93,676.365,1024,66.82,19.11,37.34\r\nswin_s3_base_224,224,1508.83,678.649,1024,71.13,13.69,48.26\r\ncaformer_s36,224,1498.99,683.104,1024,39.3,8.0,37.53\r\npoolformerv2_s36,224,1491.92,686.341,1024,30.79,5.01,15.82\r\nefficientvit_l3,224,1491.36,686.596,1024,246.04,27.62,39.16\r\nconvit_base,224,1488.18,688.065,1024,86.54,17.52,31.77\r\nconvnext_large,224,1485.54,689.286,1024,197.77,34.4,43.13\r\nnextvit_small,384,1476.49,693.508,1024,31.74,17.25,57.14\r\nxcit_small_12_p16_384,384,1475.93,693.779,1024,26.25,14.14,36.51\r\nvit_large_patch32_384,384,1459.88,701.407,1024,306.63,45.31,43.86\r\nfastvit_mci3,256,1455.33,703.595,1024,125.07,14.82,44.88\r\nmobilenetv4_hybrid_large,448,1439.59,711.291,1024,37.76,10.74,48.61\r\nregnety_320,224,1438.09,712.029,1024,145.05,32.34,30.26\r\neca_nfnet_l1,320,1435.96,713.088,1024,41.41,14.92,34.42\r\nmaxvit_rmlp_small_rw_256,256,1428.64,716.744,1024,64.9,14.15,66.09\r\ndensenet264d,224,1428.19,716.963,1024,72.74,13.57,14.0\r\nmobilenetv4_conv_aa_large,544,1422.48,539.884,768,32.59,14.19,64.79\r\npoolformer_m48,224,1418.95,721.631,1024,73.47,11.59,29.17\r\nregnety_160,288,1410.53,725.945,1024,83.59,26.37,38.07\r\ncrossvit_18_dagger_408,408,1403.57,729.547,1024,44.61,32.47,124.87\r\nconvnextv2_base,224,1388.18,553.221,768,88.72,15.38,28.75\r\nsenet154,224,1388.06,737.702,1024,115.09,20.77,38.69\r\nlegacy_senet154,224,1378.59,742.763,1024,115.09,20.77,38.69\r\ncoat_small,224,1376.13,744.093,1024,21.69,12.61,44.25\r\ndpn107,224,1373.18,745.693,1024,86.92,18.38,33.46\r\nconvformer_s36,224,1372.3,746.164,1024,40.01,7.67,30.5\r\nresnet152d,320,1367.55,748.761,1024,60.21,24.08,47.67\r\nmvitv2_base_cls,224,1366.89,749.12,1024,65.44,10.23,40.65\r\nnest_small,224,1366.51,749.327,1024,38.35,10.35,40.04\r\nnf_regnet_b5,384,1359.26,753.326,1024,49.74,7.95,42.9\r\nhgnetv2_b6,288,1356.49,754.863,1024,75.26,27.9,35.09\r\nnest_small_jx,224,1353.74,756.396,1024,38.35,10.35,40.04\r\nxception71,299,1351.87,757.448,1024,42.34,18.09,69.92\r\necaresnet200d,256,1341.41,763.35,1024,64.69,20.0,43.15\r\nseresnet200d,256,1339.05,764.701,1024,71.86,20.01,43.15\r\nregnetx_320,224,1334.74,767.164,1024,107.81,31.81,36.3\r\nresnetrs200,256,1333.13,768.093,1024,93.21,20.18,43.42\r\nefficientformerv2_s0,224,1321.13,775.068,1024,3.6,0.41,5.3\r\nvit_mediumd_patch16_reg4_gap_384,384,1319.59,775.971,1024,64.27,43.67,113.51\r\nhrnet_w64,224,1314.88,778.761,1024,128.06,28.97,35.09\r\nmaxxvitv2_rmlp_base_rw_224,224,1314.38,779.05,1024,116.09,24.2,62.77\r\nxcit_nano_12_p8_384,384,1305.44,784.386,1024,3.05,6.34,46.08\r\nhgnet_base,288,1299.24,394.052,512,71.58,41.55,25.57\r\nefficientnet_b3_g8_gn,288,1297.79,394.495,512,14.25,2.59,23.35\r\nvit_small_r26_s32_384,384,1279.14,600.378,768,36.47,10.43,29.85\r\nmobilevitv2_175,384,1279.12,300.187,384,14.25,12.47,63.29\r\nefficientnet_b3_gn,320,1278.17,300.407,384,11.73,2.14,28.83\r\nefficientvit_l2,384,1277.63,801.461,1024,63.71,20.45,57.01\r\nconvnext_base,320,1267.77,605.76,768,88.59,31.39,58.68\r\nxcit_tiny_24_p8_224,224,1266.58,808.45,1024,12.11,9.21,45.39\r\nefficientformerv2_s1,224,1263.95,810.132,1024,6.19,0.67,7.66\r\nmvitv2_base,224,1263.54,810.4,1024,51.47,10.16,40.5\r\nconvnext_small,384,1258.15,610.399,768,50.22,25.58,63.37\r\ntf_efficientnetv2_m,384,1255.83,815.376,1024,54.14,15.85,57.52\r\nresnet200,288,1240.56,825.408,1024,64.67,24.91,53.21\r\nresnext101_64x4d,288,1236.43,828.171,1024,83.46,25.66,51.59\r\nvolo_d3_224,224,1227.13,834.439,1024,86.33,20.78,60.09\r\nlevit_conv_512_s8,224,1226.5,417.423,512,73.97,21.77,52.28\r\nvit_small_patch14_dinov2,518,1223.72,836.768,1024,22.06,46.76,198.79\r\ndavit_large,224,1223.67,627.596,768,196.81,34.6,60.99\r\nvit_small_patch14_reg4_dinov2,518,1201.09,852.54,1024,22.06,46.95,199.77\r\nseresnet152d,320,1193.26,858.132,1024,66.84,24.09,47.72\r\nresnetrs152,320,1185.41,863.813,1024,86.62,24.34,48.14\r\nvit_base_patch16_384,384,1177.68,869.481,1024,86.86,55.54,101.56\r\nregnetz_d8_evos,256,1176.69,870.216,1024,23.46,4.5,24.92\r\nvit_base_patch16_clip_384,384,1174.42,871.897,1024,86.86,55.54,101.56\r\nmambaout_base_short_rw,288,1172.23,655.135,768,88.83,26.96,62.94\r\ndeit_base_patch16_384,384,1170.45,874.855,1024,86.86,55.54,101.56\r\ndeit_base_distilled_patch16_384,384,1169.66,875.439,1024,87.63,55.65,101.82\r\nregnetz_c16_evos,320,1169.41,656.714,768,13.49,3.86,25.88\r\nvit_base_patch16_18x2_224,224,1162.93,880.511,1024,256.73,52.51,71.38\r\nlevit_512_s8,224,1159.97,331.019,384,73.97,21.77,52.28\r\ngcvit_base,224,1158.93,883.553,1024,90.32,14.87,55.48\r\nnaflexvit_base_patch16_siglip,384,1158.22,884.094,1024,92.93,56.12,102.2\r\nefficientvit_l3,256,1150.35,890.134,1024,246.04,36.06,50.98\r\nseresnext101_32x8d,288,1135.9,901.462,1024,93.57,27.24,51.63\r\nnextvit_base,384,1135.31,901.934,1024,44.79,24.62,73.95\r\nxcit_small_12_p8_224,224,1126.95,908.619,1024,26.21,18.69,47.21\r\nrdnet_large,224,1126.86,681.518,768,186.27,34.74,46.67\r\nconvnext_large_mlp,256,1126.46,681.758,768,200.13,44.94,56.33\r\nmambaout_base_tall_rw,288,1125.37,909.904,1024,86.48,26.69,64.04\r\nswin_large_patch4_window7_224,224,1124.59,910.534,1024,196.53,34.53,54.94\r\nresnext101_32x16d,224,1120.36,913.968,1024,194.03,36.27,51.18\r\nvit_pe_spatial_small_patch16_512,512,1119.67,914.527,1024,21.98,31.8,123.27\r\nnaflexvit_base_patch16_gap,384,1116.88,916.817,1024,86.63,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,1115.96,917.573,1024,86.63,55.86,102.34\r\nseresnext101d_32x8d,288,1115.19,918.2,1024,93.59,27.64,52.95\r\nefficientnetv2_m,416,1113.83,919.321,1024,54.14,18.6,67.5\r\nnaflexvit_base_patch16_parfac_gap,384,1111.58,921.189,1024,86.46,55.86,102.34\r\neva_large_patch14_196,196,1110.48,922.102,1024,304.14,61.57,63.52\r\nefficientnet_b5,416,1108.64,692.72,768,30.39,8.27,80.68\r\nvit_large_patch16_224,224,1108.08,924.089,1024,304.33,61.6,63.52\r\nnaflexvit_base_patch16_map,384,1107.47,924.606,1024,93.72,56.23,102.46\r\nmobilevitv2_200,384,1106.66,346.968,384,18.45,16.24,72.34\r\ndeit3_base_patch16_384,384,1097.89,932.669,1024,86.88,55.54,101.56\r\nswinv2_base_window8_256,256,1094.9,935.218,1024,87.92,20.37,52.59\r\nnfnet_f2,256,1084.06,944.572,1024,193.78,33.76,41.85\r\nmixer_l16_224,224,1081.58,946.742,1024,208.2,44.6,41.69\r\ncaformer_m36,224,1078.51,949.436,1024,56.2,13.29,50.48\r\ndeit3_large_patch16_224,224,1062.37,963.859,1024,304.37,61.6,63.52\r\npoolformerv2_m36,224,1058.78,967.123,1024,56.08,8.81,22.02\r\nseresnextaa101d_32x8d,288,1057.07,968.692,1024,93.59,28.51,56.44\r\nmambaout_base,288,1053.62,728.891,768,84.81,26.16,61.08\r\nvolo_d1_384,384,1053.53,971.938,1024,26.78,22.75,108.55\r\necaresnet200d,288,1052.58,972.81,1024,64.69,25.31,54.59\r\nseresnet200d,288,1051.26,974.042,1024,71.86,25.32,54.6\r\nseresnet269d,256,1050.08,975.132,1024,113.67,26.59,53.6\r\nefficientnet_b3_g8_gn,320,1046.1,367.055,384,14.25,3.2,28.83\r\nmambaout_base_wide_rw,288,1045.79,734.344,768,94.45,29.39,70.41\r\nconvnextv2_tiny,384,1045.43,367.284,384,28.64,13.14,39.48\r\nrepvgg_d2se,320,1036.96,987.473,1024,120.39,66.99,23.42\r\nbeitv2_large_patch16_224,224,1030.44,993.728,1024,304.43,61.6,63.52\r\nbeit_large_patch16_224,224,1030.17,993.984,1024,304.43,61.6,63.52\r\nhrnet_w48_ssld,288,1027.97,996.111,1024,77.47,28.66,47.21\r\nresnetrs270,256,1019.29,1004.596,1024,129.86,27.06,55.84\r\nbeit_base_patch16_384,384,1011.72,1012.106,1024,86.74,55.54,101.56\r\neca_nfnet_l2,320,1009.44,1014.398,1024,56.72,20.95,47.43\r\nvit_base_patch16_siglip_gap_384,384,1008.19,1015.657,1024,86.09,55.43,101.3\r\nregnetz_e8,320,1007.46,1016.394,1024,57.7,15.46,63.94\r\nvit_base_patch16_siglip_384,384,1005.01,1018.875,1024,93.18,56.12,102.2\r\nnest_base,224,1001.74,766.642,768,67.72,17.96,53.39\r\nconvformer_m36,224,999.48,1024.512,1024,57.05,12.89,42.05\r\nswinv2_large_window12_192,192,995.57,771.394,768,228.77,26.17,56.53\r\nnest_base_jx,224,992.42,773.843,768,67.72,17.96,53.39\r\nfastvit_mci4,256,991.89,1032.343,1024,321.57,27.78,60.59\r\nmaxvit_rmlp_base_rw_224,224,991.49,1032.77,1024,116.14,23.15,92.64\r\nnfnet_f1,320,989.71,1034.625,1024,132.63,35.97,46.77\r\nmambaout_base_plus_rw,288,989.63,776.023,768,101.66,31.72,74.64\r\nflexivit_large,240,989.16,1035.196,1024,304.36,70.99,75.39\r\nxcit_large_24_p16_224,224,985.79,1038.738,1024,189.1,35.86,47.27\r\nresnet200d,320,985.05,1039.516,1024,64.69,31.25,67.33\r\ncoatnet_rmlp_3_rw_224,224,973.48,394.438,384,165.15,33.56,79.47\r\ncoatnet_3_rw_224,224,973.35,394.488,384,181.81,33.44,73.83\r\nswinv2_small_window16_256,256,966.16,529.91,512,49.73,12.82,66.29\r\ncaformer_s18,384,955.65,535.738,512,26.34,13.42,77.34\r\nhalonet_h1,256,951.85,403.4,384,8.1,3.0,51.17\r\nefficientnetv2_rw_m,416,946.67,1081.665,1024,53.24,21.49,79.62\r\nefficientnet_b5,448,930.41,825.426,768,30.39,9.59,93.56\r\nswinv2_cr_large_224,224,929.96,1101.098,1024,196.68,35.1,78.42\r\nnextvit_large,384,920.62,1112.273,1024,57.83,32.0,90.76\r\nefficientnet_x_b5,448,915.16,1118.909,1024,33.44,23.35,68.87\r\ncoatnet_3_224,224,914.68,419.794,384,166.97,36.56,79.01\r\nnf_regnet_b5,456,913.96,1120.379,1024,49.74,11.7,61.95\r\ndm_nfnet_f2,256,913.46,1120.995,1024,193.78,33.76,41.85\r\nconvnext_large,288,891.7,861.253,768,197.77,56.87,71.29\r\nresnetv2_50x3_bit,224,891.57,861.373,768,217.32,37.06,33.34\r\ntresnet_l,448,888.87,1152.002,1024,55.99,43.59,47.56\r\nmaxvit_base_tf_224,224,886.15,866.653,768,119.47,24.04,95.01\r\naimv2_large_patch14_224,224,885.67,1156.162,1024,309.2,82.3,85.2\r\nconvformer_s18,384,883.51,579.481,512,26.77,11.63,46.49\r\nconvnext_base,384,880.61,581.388,512,88.59,45.21,84.49\r\nconvnext_xlarge,224,880.48,1162.974,1024,350.2,60.98,57.5\r\ntiny_vit_21m_512,512,868.62,589.413,512,21.26,26.93,177.93\r\nvit_base_patch8_224,224,854.85,1197.848,1024,86.58,78.22,161.69\r\nregnety_640,224,854.43,1198.435,1024,281.38,64.16,42.5\r\nresnetrs200,320,848.16,1207.295,1024,93.21,31.51,67.81\r\nmaxxvitv2_rmlp_large_rw_224,224,842.48,1215.427,1024,215.42,44.14,87.15\r\nconvnextv2_large,224,841.63,608.313,512,197.96,34.4,43.13\r\nseresnextaa101d_32x8d,320,840.14,1218.824,1024,93.59,35.19,69.67\r\nswinv2_cr_tiny_384,384,839.99,457.121,384,28.33,15.34,161.01\r\nresnetv2_50x1_bit,448,837.89,611.039,512,25.55,16.62,44.46\r\nconvnextv2_base,288,836.68,611.916,512,88.72,25.43,47.53\r\nefficientnet_h_b5,448,836.42,1224.243,1024,45.88,27.16,73.9\r\ntf_efficientnet_b5,456,835.97,612.433,512,30.39,10.46,98.86\r\nbeit3_large_patch16_224,224,830.95,1232.286,1024,304.57,61.72,63.52\r\nvit_large_patch14_clip_224,224,830.55,1232.884,1024,304.2,81.08,88.79\r\ndm_nfnet_f1,320,830.48,1232.996,1024,132.63,35.97,46.77\r\nvit_large_patch14_224,224,828.79,1235.515,1024,304.2,81.08,88.79\r\nseresnet269d,288,824.85,1241.415,1024,113.67,33.65,67.81\r\nxcit_tiny_12_p8_384,384,822.23,1245.374,1024,6.71,14.13,69.14\r\nregnety_160,384,817.33,939.624,768,83.59,46.87,67.67\r\ntf_efficientnetv2_m,480,803.45,1274.482,1024,54.14,24.76,89.84\r\npoolformerv2_m48,224,798.7,1282.064,1024,73.35,11.59,29.17\r\ncaformer_b36,224,794.98,1288.053,1024,98.75,23.22,67.3\r\nxcit_small_24_p16_384,384,783.62,1306.724,1024,47.67,26.72,68.58\r\nvit_large_patch16_rope_mixed_224,224,782.56,1308.504,1024,304.2,61.6,68.34\r\nvit_large_patch16_rope_mixed_ape_224,224,781.8,1309.777,1024,304.4,61.6,68.34\r\nvit_large_patch16_rope_224,224,781.48,1310.312,1024,304.17,61.6,63.52\r\ndavit_huge,224,781.43,982.785,768,348.92,61.23,81.32\r\nmaxvit_tiny_tf_384,384,780.75,491.812,384,30.98,17.53,123.42\r\nvit_large_patch16_rope_ape_224,224,780.61,1311.768,1024,304.37,61.6,63.52\r\nvit_large_patch16_siglip_gap_256,256,755.63,1355.146,1024,303.36,80.8,88.34\r\nvit_large_patch16_siglip_256,256,754.78,1356.657,1024,315.96,81.34,88.88\r\nregnetz_d8_evos,320,749.0,1367.133,1024,23.46,7.03,38.92\r\nvolo_d4_224,224,746.44,1371.818,1024,192.96,44.34,80.22\r\nconvmixer_768_32,224,745.84,1372.919,1024,21.11,19.55,25.95\r\nvit_so150m_patch16_reg4_gap_384,384,744.55,1375.3,1024,134.42,87.97,165.47\r\nvit_large_patch14_clip_quickgelu_224,224,743.06,1378.064,1024,303.97,81.08,88.79\r\nefficientvit_l3,320,737.25,1041.685,768,246.04,56.32,79.34\r\nconvformer_b36,224,736.81,1389.753,1024,99.88,22.69,56.06\r\nmobilenetv5_base,256,726.11,705.107,512,82.65,20.05,36.89\r\nconvnext_large_mlp,320,718.87,712.203,512,200.13,70.21,88.02\r\nvitamin_large2_224,224,711.29,359.882,256,333.58,75.05,112.83\r\nvitamin_large_224,224,709.57,360.757,256,333.32,75.05,112.83\r\nswinv2_base_window16_256,256,709.34,721.778,512,87.92,22.02,84.71\r\nswinv2_base_window12to16_192to256,256,709.29,721.821,512,87.92,22.02,84.71\r\ninception_next_base,384,696.91,1101.98,768,86.67,43.64,75.48\r\neca_nfnet_l2,384,695.67,1471.939,1024,56.72,30.05,68.28\r\nvit_large_patch14_xp_224,224,689.9,1484.246,1024,304.06,81.01,88.79\r\ntf_efficientnetv2_l,384,689.74,1484.597,1024,118.52,36.1,101.16\r\nefficientnetv2_l,384,689.38,1485.365,1024,118.52,36.1,101.16\r\ntresnet_xl,448,679.76,1506.393,1024,78.44,60.77,61.31\r\nvit_so150m2_patch16_reg1_gap_384,384,673.01,1521.502,1024,136.33,89.53,178.22\r\nnaflexvit_so150m2_patch16_reg1_gap,384,672.85,1521.867,1024,136.06,89.53,178.22\r\nhiera_large_224,224,672.7,1522.201,1024,213.74,40.34,83.37\r\necaresnet269d,320,669.08,1530.432,1024,102.09,41.53,83.69\r\nnaflexvit_so150m2_patch16_reg1_map,384,666.73,1535.835,1024,142.46,90.33,179.2\r\nresnest200e,320,666.37,1536.667,1024,70.2,35.69,82.78\r\neca_nfnet_l3,352,652.35,1569.679,1024,72.04,32.57,73.12\r\nefficientformerv2_s2,224,649.47,1576.636,1024,12.71,1.27,11.77\r\nresnetrs350,288,642.39,1594.01,1024,163.96,43.67,87.09\r\nresnetv2_152x2_bit,224,642.13,1594.665,1024,236.34,46.95,45.11\r\nnasnetalarge,331,640.53,799.323,512,88.75,23.89,90.56\r\npnasnet5large,331,627.28,816.201,512,86.06,25.04,92.89\r\nmaxvit_large_tf_224,224,621.62,823.632,512,211.79,43.68,127.35\r\ncoat_lite_medium_384,384,606.22,1266.839,768,44.57,28.73,116.7\r\nvit_large_patch16_dinov3_qkvb,256,592.88,1727.127,1024,303.13,82.43,90.56\r\nvit_large_patch16_dinov3,256,592.73,1727.576,1024,303.08,82.43,90.56\r\nxcit_small_24_p8_224,224,586.49,1745.959,1024,47.63,35.81,90.78\r\nvolo_d2_384,384,583.29,1755.524,1024,58.87,46.17,184.51\r\nvit_large_r50_s32_384,384,580.27,882.327,512,329.09,57.43,76.52\r\ncait_xxs24_384,384,579.64,1766.581,1024,12.03,9.63,122.66\r\necaresnet269d,352,573.6,1785.165,1024,102.09,50.25,101.25\r\nnfnet_f2,352,568.97,1799.731,1024,193.78,63.22,79.06\r\ncoatnet_4_224,224,561.53,683.83,384,275.43,62.48,129.26\r\nresnext101_32x32d,224,558.5,1375.092,768,468.53,87.29,91.12\r\nefficientnet_x_b5,576,558.01,917.526,512,33.44,38.59,113.83\r\nresnetrs270,352,557.26,1837.541,1024,129.86,51.13,105.48\r\nmambaout_base_plus_rw,384,556.94,919.288,512,101.66,56.39,132.7\r\nxcit_medium_24_p16_384,384,553.93,1848.591,1024,84.4,47.39,91.64\r\nvit_base_patch16_siglip_gap_512,512,549.6,1397.357,768,86.43,107.0,246.15\r\nresnet50x16_clip_gap,384,547.72,1402.156,768,136.2,70.32,100.64\r\nvit_base_patch16_siglip_512,512,546.9,1404.252,768,93.52,108.22,247.74\r\nvit_so400m_patch14_siglip_224,224,546.36,1874.203,1024,427.68,110.26,106.73\r\nvit_so400m_patch14_siglip_gap_224,224,546.36,1874.201,1024,412.44,109.57,106.13\r\nvit_so400m_patch16_siglip_gap_256,256,545.05,1878.685,1024,412.65,109.62,106.13\r\nvitamin_large2_256,256,544.68,352.476,192,333.64,99.0,154.99\r\nvitamin_large_256,256,543.81,353.038,192,333.38,99.0,154.99\r\nvit_so400m_patch16_siglip_256,256,543.28,1884.842,1024,427.89,110.31,106.73\r\nresnetv2_101x3_bit,224,542.89,1414.635,768,387.93,71.23,48.7\r\ncoatnet_rmlp_2_rw_384,384,540.41,473.691,256,73.88,47.69,209.43\r\nvit_base_r50_s16_384,384,540.05,948.04,512,98.95,67.43,135.03\r\nresnetv2_101x1_bit,448,535.77,955.608,512,44.54,31.65,64.93\r\nconvnext_xlarge,288,530.45,965.201,512,350.2,100.8,95.05\r\nresnet50x16_clip,384,528.32,1453.651,768,167.33,74.9,103.54\r\nnfnet_f3,320,525.53,1948.476,1024,254.92,68.77,83.93\r\neva02_large_patch14_224,224,525.39,1949.014,1024,303.27,81.15,97.2\r\nefficientnet_h_b5,576,524.33,976.458,512,45.88,44.9,122.13\r\nefficientvit_l3,384,522.49,734.917,384,246.04,81.08,114.02\r\nefficientnet_b6,528,519.61,738.988,384,43.04,19.4,167.39\r\nswinv2_cr_small_384,384,517.04,742.661,384,49.7,29.7,298.03\r\nmaxvit_small_tf_384,384,508.91,503.007,256,69.02,35.87,183.65\r\nconvnextv2_large,288,508.65,754.917,384,197.96,56.87,71.29\r\ncaformer_s36,384,505.97,1011.888,512,39.3,26.08,150.33\r\nefficientnetv2_xl,384,502.41,2038.132,1024,208.12,52.81,139.2\r\nregnety_320,384,501.39,1021.134,512,145.05,95.0,88.87\r\nconvnext_large,384,500.94,766.534,384,197.77,101.1,126.74\r\nconvnext_large_mlp,384,500.67,766.946,384,200.13,101.11,126.74\r\ntf_efficientnetv2_xl,384,495.57,2066.271,1024,208.12,52.81,139.2\r\neva02_large_patch14_clip_224,224,494.75,2069.722,1024,304.11,81.18,97.2\r\ntf_efficientnet_b6,528,493.13,778.687,384,43.04,19.4,167.39\r\nvolo_d5_224,224,492.58,2078.817,1024,295.46,72.4,118.11\r\ndm_nfnet_f2,352,476.36,2149.593,1024,193.78,63.22,79.06\r\nvit_so150m2_patch16_reg1_gap_448,448,472.75,2166.004,1024,136.5,127.51,287.05\r\nconvnextv2_base,384,471.24,543.228,256,88.72,45.21,84.49\r\nswin_base_patch4_window12_384,384,469.47,545.274,256,87.9,47.19,134.78\r\nconvformer_s36,384,467.93,1094.165,512,40.01,22.54,89.62\r\nefficientnetv2_l,480,461.43,1664.353,768,118.52,56.4,157.99\r\naimv2_huge_patch14_224,224,457.96,2235.956,1024,680.85,179.01,126.22\r\ntf_efficientnetv2_l,480,456.28,1683.173,768,118.52,56.4,157.99\r\nresmlp_big_24_224,224,454.62,2252.433,1024,129.14,100.23,87.31\r\nregnety_1280,224,449.87,1138.088,512,644.81,127.66,71.58\r\nswinv2_large_window12to16_192to256,256,445.07,575.168,256,196.74,47.81,121.53\r\ndm_nfnet_f3,320,444.51,2303.633,1024,254.92,68.77,83.93\r\nvitamin_xlarge_256,256,438.51,291.881,128,436.06,130.13,177.37\r\nresnetrs420,320,438.41,2335.688,1024,191.89,64.2,126.56\r\nmaxvit_tiny_tf_512,512,438.14,438.193,192,31.05,33.49,257.59\r\nmvitv2_large_cls,224,436.89,1757.857,768,234.58,42.17,111.69\r\nvit_pe_spatial_base_patch16_512,512,436.65,1758.82,768,86.43,107.13,246.54\r\nvit_base_patch14_dinov2,518,430.13,1785.465,768,86.58,151.71,397.58\r\nxcit_tiny_24_p8_384,384,429.89,2381.96,1024,12.11,27.05,132.95\r\nefficientformerv2_l,224,429.03,2386.769,1024,26.32,2.59,18.54\r\nmaxxvitv2_rmlp_base_rw_384,384,426.88,899.53,384,116.09,72.98,213.74\r\nvit_base_patch14_reg4_dinov2,518,425.46,1203.396,512,86.58,152.25,399.53\r\nvit_huge_patch14_clip_224,224,421.48,2429.5,1024,632.05,167.4,139.41\r\nvit_huge_patch14_224,224,421.46,2429.612,1024,630.76,167.4,139.41\r\nseresnextaa201d_32x8d,320,417.36,2453.467,1024,149.39,70.22,138.71\r\nxcit_medium_24_p8_224,224,412.81,2480.522,1024,84.32,63.53,121.23\r\ncait_xs24_384,384,411.85,2486.283,1024,26.67,19.28,183.98\r\ndeit3_huge_patch14_224,224,410.23,2496.131,1024,632.13,167.4,139.41\r\nmvitv2_large,224,405.42,1894.299,768,217.99,43.87,112.02\r\nswinv2_cr_huge_224,224,404.99,1264.195,512,657.83,115.97,121.08\r\nconvmixer_1536_20,224,401.91,2547.788,1024,51.63,48.68,33.03\r\neca_nfnet_l3,448,392.83,1955.013,768,72.04,52.55,118.4\r\nfocalnet_huge_fl3,224,388.85,1316.686,512,745.28,118.26,104.8\r\nvit_huge_patch14_gap_224,224,387.22,2644.499,1024,630.76,166.73,138.74\r\ncait_xxs36_384,384,386.89,2646.714,1024,17.37,14.35,183.7\r\nrdnet_large,384,385.09,498.569,192,186.27,102.09,137.13\r\nvit_huge_patch14_clip_quickgelu_224,224,384.63,2662.238,1024,632.08,167.4,139.41\r\nxcit_small_12_p8_384,384,382.77,2006.379,768,26.21,54.92,138.29\r\naimv2_large_patch14_336,336,377.67,2711.312,1024,309.53,194.22,227.08\r\nswinv2_cr_base_384,384,377.43,678.242,256,87.88,50.57,333.68\r\neva02_base_patch14_448,448,369.17,2080.343,768,87.12,107.11,259.14\r\nmaxvit_xlarge_tf_224,224,366.17,1048.656,384,506.99,97.52,191.04\r\ncaformer_m36,384,364.28,1054.093,384,56.2,42.11,196.35\r\nvit_huge_patch14_xp_224,224,363.75,2815.089,1024,631.8,167.3,139.41\r\neva_large_patch14_336,336,361.49,2832.723,1024,304.53,191.1,270.24\r\nresnetrs350,384,360.54,2840.182,1024,163.96,77.59,154.74\r\nvit_large_patch14_clip_336,336,360.44,2840.967,1024,304.53,191.11,270.24\r\nvit_large_patch16_384,384,360.39,2841.368,1024,304.72,191.21,270.24\r\nconvnextv2_huge,224,358.05,714.967,256,660.29,115.0,79.07\r\ndeit3_large_patch16_384,384,346.34,2956.614,1024,304.76,191.21,270.24\r\nsam2_hiera_tiny,896,344.71,185.637,64,26.85,99.86,384.63\r\nconvformer_m36,384,340.66,1127.211,384,57.05,37.87,123.56\r\nvit_giant_patch16_gap_224,224,327.6,3125.73,1024,1011.37,202.46,139.26\r\ncait_s24_384,384,326.88,2349.478,768,47.06,32.17,245.31\r\ncoatnet_5_224,224,324.24,789.532,256,687.47,145.49,194.24\r\nvit_large_patch14_clip_quickgelu_336,336,323.72,3163.216,1024,304.29,191.11,270.24\r\nresnest269e,416,323.54,2373.688,768,110.93,77.69,171.98\r\nmaxvit_rmlp_base_rw_384,384,322.23,1588.914,512,116.14,70.97,318.95\r\nbeit_large_patch16_384,384,321.53,3184.748,1024,305.0,191.21,270.24\r\nhiera_huge_224,224,320.0,1599.991,512,672.78,124.85,150.95\r\nxcit_large_24_p16_384,384,319.72,3202.795,1024,189.1,105.35,137.17\r\nvit_large_patch16_siglip_gap_384,384,318.01,3220.029,1024,303.69,190.85,269.55\r\nvit_large_patch16_siglip_384,384,317.36,3226.608,1024,316.28,192.07,270.75\r\nefficientnet_b7,600,313.81,815.743,256,66.35,38.33,289.94\r\nconvnext_xxlarge,256,310.82,1647.239,512,846.47,198.09,124.45\r\nvitamin_large2_336,336,309.2,310.456,96,333.83,175.72,307.47\r\nvitamin_large_336,336,309.18,310.474,96,333.57,175.72,307.47\r\ndavit_giant,224,306.66,1252.186,384,1406.47,192.92,153.06\r\nnfnet_f3,416,304.26,3365.485,1024,254.92,115.58,141.78\r\nregnety_640,384,302.05,1271.295,384,281.38,188.47,124.83\r\ntf_efficientnet_b7,600,300.67,638.544,192,66.35,38.33,289.94\r\nconvnext_xlarge,384,298.28,858.244,256,350.2,179.2,168.99\r\nvolo_d3_448,448,297.88,2578.215,768,86.63,96.33,446.83\r\nefficientnetv2_xl,512,292.61,2624.61,768,208.12,93.85,247.32\r\nnfnet_f4,384,292.21,3504.265,1024,316.07,122.14,147.57\r\nsam2_hiera_small,896,291.05,219.878,64,33.95,123.99,442.63\r\ntf_efficientnetv2_xl,512,289.75,2650.555,768,208.12,93.85,247.32\r\nmaxvit_base_tf_384,384,289.55,884.122,256,119.65,73.8,332.9\r\nconvnextv2_large,384,286.29,670.617,192,197.96,101.1,126.74\r\nmaxvit_small_tf_512,512,286.19,447.226,128,69.13,67.26,383.77\r\nswin_large_patch4_window12_384,384,285.72,671.968,192,196.74,104.08,202.16\r\nseresnextaa201d_32x8d,384,285.39,3588.099,1024,149.39,101.11,199.72\r\nvit_giant_patch14_224,224,277.63,3688.354,1024,1012.61,267.18,192.64\r\neva_giant_patch14_224,224,275.58,3715.743,1024,1012.56,267.18,192.64\r\nvit_giant_patch14_clip_224,224,274.01,3737.114,1024,1012.65,267.18,192.64\r\neva_giant_patch14_clip_224,224,272.59,3756.469,1024,1012.59,267.18,192.64\r\naimv2_1b_patch14_224,224,270.86,2835.412,768,1234.96,322.43,170.39\r\ncaformer_b36,384,267.85,955.745,256,98.75,72.33,261.79\r\nfocalnet_huge_fl4,224,267.26,1915.733,512,686.46,118.9,113.34\r\nresnetrs420,416,267.21,3832.211,1024,191.89,108.45,213.79\r\nvit_pe_core_large_patch14_336,336,265.98,3849.838,1024,317.15,192.33,271.43\r\ndm_nfnet_f3,416,261.06,2941.777,768,254.92,115.58,141.78\r\nnaflexvit_so400m_patch16_siglip,384,260.54,3930.228,1024,427.89,259.65,319.77\r\nvit_huge_plus_patch16_dinov3,256,257.75,3972.798,1024,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,257.61,3974.93,1024,840.59,224.88,193.59\r\nresnetv2_152x4_bit,224,254.68,2010.364,512,936.53,186.9,90.22\r\ndm_nfnet_f4,384,251.56,3052.912,768,316.07,122.14,147.57\r\nconvformer_b36,384,250.92,1020.23,256,99.88,66.67,164.75\r\nvitamin_xlarge_336,336,247.75,387.462,96,436.06,230.18,347.33\r\nxcit_large_24_p8_224,224,237.45,3234.3,768,188.93,141.23,181.56\r\nvitamin_large2_384,384,234.65,272.728,64,333.97,234.44,440.16\r\nvitamin_large_384,384,234.64,272.735,64,333.71,234.44,440.16\r\nswinv2_cr_large_384,384,233.08,823.721,192,196.68,108.96,404.96\r\nvit_so400m_patch16_siglip_gap_384,384,232.69,3300.539,768,413.02,258.11,318.42\r\nvit_giantopt_patch16_siglip_gap_256,256,232.64,4401.651,1024,1134.84,298.42,199.62\r\nvit_giantopt_patch16_siglip_256,256,232.16,4410.682,1024,1163.17,299.66,200.43\r\nvit_so400m_patch16_siglip_384,384,231.75,3313.831,768,428.26,259.65,319.77\r\nresnetv2_50x3_bit,448,224.5,855.185,192,217.32,145.7,133.37\r\nresnetv2_152x2_bit,384,221.33,1734.959,384,236.34,136.16,132.56\r\ncait_s36_384,384,218.21,3519.469,768,68.37,47.99,367.4\r\nbeit3_giant_patch14_224,224,216.43,3548.502,768,1013.22,267.56,192.64\r\nconvnextv2_huge,288,215.49,890.946,192,660.29,190.1,130.7\r\neva02_large_patch14_clip_336,336,215.1,3570.331,768,304.43,191.34,289.13\r\nnfnet_f5,416,207.2,4942.054,1024,377.21,170.71,204.56\r\nresnet50x64_clip_gap,448,206.56,1858.971,384,365.03,253.96,233.22\r\nmaxvit_large_tf_384,384,203.91,941.575,192,212.03,132.55,445.84\r\naimv2_large_patch14_448,448,203.82,2512.048,512,309.98,367.84,491.78\r\nxcit_small_24_p8_384,384,199.86,3842.727,768,47.63,105.24,265.91\r\nresnet50x64_clip,448,199.29,1926.845,384,420.38,265.02,239.13\r\naimv2_huge_patch14_336,336,196.79,3902.663,768,681.34,416.36,337.08\r\nmvitv2_huge_cls,224,196.75,2602.218,512,694.8,120.67,243.63\r\nefficientnet_b8,672,196.3,978.046,192,87.41,63.48,442.89\r\nswinv2_base_window12to24_192to384,384,193.55,495.973,96,87.92,55.25,280.36\r\nfocalnet_large_fl3,384,191.75,2002.604,384,239.13,105.06,168.04\r\ntf_efficientnet_b8,672,189.55,1012.906,192,87.41,63.48,442.89\r\nvitamin_xlarge_384,384,187.08,342.084,64,436.06,306.38,493.46\r\nvit_intern300m_patch14_448,448,184.79,4156.038,768,304.01,362.05,656.39\r\nvit_huge_patch14_clip_336,336,184.3,4167.137,768,632.46,390.97,407.54\r\nfocalnet_large_fl4,384,183.35,2094.282,384,239.32,105.2,181.78\r\nvolo_d4_448,448,181.5,2820.875,512,193.41,197.13,527.35\r\nvit_so400m_patch14_siglip_gap_378,378,181.49,4231.623,768,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,181.45,4232.516,768,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_378,378,181.04,4242.138,768,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,180.69,4250.432,768,428.23,335.4,452.89\r\ndm_nfnet_f5,416,178.02,4314.021,768,377.21,170.71,204.56\r\ndavit_base_fl,768,177.56,720.876,128,90.37,190.32,530.15\r\nvit_large_patch16_siglip_gap_512,512,174.25,2938.289,512,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,173.64,2948.538,512,316.74,364.0,657.48\r\nnfnet_f4,512,166.96,3066.663,512,316.07,216.26,262.26\r\nbeit_large_patch16_512,512,166.22,4620.467,768,305.67,362.24,656.39\r\nresnetv2_152x2_bit,448,162.25,1577.744,256,236.34,184.99,180.43\r\nmaxvit_base_tf_512,512,162.13,789.457,128,119.88,138.02,703.99\r\nsam2_hiera_base_plus,896,156.27,409.531,64,68.68,227.48,828.88\r\nregnety_1280,384,156.26,1228.678,192,644.81,374.99,210.2\r\nvit_gigantic_patch14_clip_224,224,156.04,6562.432,1024,1844.91,483.96,275.37\r\nvit_gigantic_patch14_224,224,155.93,6567.125,1024,1844.44,483.95,275.37\r\nnfnet_f6,448,155.81,4929.165,768,438.36,229.7,273.62\r\nvit_pe_lang_large_patch14_448,448,145.18,5290.118,768,291.42,346.99,629.09\r\nvit_gigantic_patch14_clip_quickgelu_224,224,144.3,5322.261,768,1844.91,483.96,275.37\r\nvit_pe_spatial_large_patch14_448,448,143.73,5343.273,768,303.96,362.05,656.39\r\nvit_huge_patch14_clip_378,378,143.72,5343.713,768,632.68,503.79,572.79\r\ndm_nfnet_f4,512,139.46,3671.327,512,316.07,216.26,262.26\r\nresnetv2_101x3_bit,448,137.53,1396.082,192,387.93,280.33,194.78\r\nvit_large_patch14_dinov2,518,136.31,3756.036,512,304.37,507.15,1058.82\r\nxcit_medium_24_p8_384,384,135.83,3769.401,512,84.32,186.67,354.73\r\nvit_large_patch14_reg4_dinov2,518,135.5,3778.503,512,304.37,508.9,1064.02\r\nfocalnet_xlarge_fl3,384,134.85,1898.402,256,408.79,185.61,223.99\r\ndm_nfnet_f6,448,134.33,3811.54,512,438.36,229.7,273.62\r\nvit_huge_patch14_clip_quickgelu_378,378,131.41,3896.141,512,632.68,503.79,572.79\r\nfocalnet_xlarge_fl4,384,130.53,1961.205,256,409.03,185.79,242.31\r\nvit_so400m_patch14_siglip_gap_448,448,126.55,4045.866,512,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,126.26,4055.228,512,413.53,487.4,764.26\r\nvit_so400m_patch16_siglip_512,512,125.74,4071.745,512,428.77,490.13,766.65\r\nswinv2_large_window12to24_192to384,384,124.01,516.075,64,196.74,116.15,407.83\r\naimv2_3b_patch14_224,224,123.27,3114.974,384,2720.66,705.91,252.44\r\neva02_large_patch14_448,448,122.68,4173.492,512,305.08,362.33,689.95\r\ntf_efficientnet_l2,475,122.44,1045.369,128,480.31,172.11,609.89\r\nconvnextv2_huge,384,121.87,787.693,96,660.29,337.96,232.35\r\nnfnet_f5,544,121.57,4211.464,512,377.21,290.97,349.71\r\nvolo_d5_448,448,120.86,4236.274,512,295.91,315.06,737.92\r\nmaxvit_xlarge_tf_384,384,120.79,1059.667,128,475.32,292.78,668.76\r\nnfnet_f7,480,120.09,4263.567,512,499.5,300.08,355.86\r\neva_giant_patch14_336,336,119.32,6436.2,768,1013.01,620.64,550.67\r\nvit_huge_patch16_gap_448,448,118.21,4331.242,512,631.67,544.7,636.83\r\naimv2_1b_patch14_336,336,117.85,3258.454,384,1235.61,743.59,454.16\r\nswinv2_cr_giant_224,224,116.53,2196.827,256,2598.76,483.85,309.15\r\nmaxvit_large_tf_512,512,114.38,839.304,96,212.33,244.75,942.15\r\naimv2_huge_patch14_448,448,106.91,3591.835,384,682.03,774.02,731.38\r\ndm_nfnet_f5,544,104.65,3669.494,384,377.21,290.97,349.71\r\nswinv2_cr_huge_384,384,103.35,928.902,96,657.94,352.04,583.18\r\nvit_giantopt_patch16_siglip_gap_384,384,99.99,5120.72,512,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,99.8,5130.164,512,1163.66,696.85,568.91\r\ncait_m36_384,384,99.47,3860.574,384,271.22,173.11,734.81\r\nnfnet_f6,576,95.74,4010.799,384,438.36,378.69,452.2\r\nbeit3_giant_patch14_336,336,95.19,2689.469,256,1013.67,621.52,550.67\r\nvolo_d5_512,512,92.94,4131.45,384,296.09,425.09,1105.37\r\nregnety_2560,384,88.24,2175.86,192,1282.6,747.83,296.49\r\nxcit_large_24_p8_384,384,81.65,4702.893,384,188.93,415.0,531.82\r\ndm_nfnet_f6,576,80.9,4746.333,384,438.36,378.69,452.2\r\nnfnet_f7,608,74.17,5177.136,384,499.5,480.39,570.85\r\nconvnextv2_huge,512,68.3,937.067,64,660.29,600.81,413.07\r\nmaxvit_xlarge_tf_512,512,67.2,952.417,64,475.77,534.14,1413.22\r\ndavit_huge_fl,768,66.07,968.61,64,360.64,744.84,1060.3\r\naimv2_1b_patch14_448,448,64.63,2970.542,192,1236.53,1367.03,983.56\r\nresnetv2_152x4_bit,480,55.5,2306.353,128,936.53,844.84,414.26\r\naimv2_3b_patch14_336,336,54.04,3552.743,192,2721.64,1615.48,674.17\r\nvit_gigantic_patch14_clip_378,378,53.54,7172.246,384,1845.7,1429.82,1047.37\r\nsam2_hiera_large,1024,47.77,1004.688,48,212.15,907.48,2190.34\r\nefficientnet_l2,800,44.28,1445.168,64,480.31,479.12,1707.39\r\ntf_efficientnet_l2,800,43.51,1103.218,48,480.31,479.12,1707.39\r\ncait_m48_448,448,43.12,4452.595,192,356.46,329.41,1708.23\r\nvit_giant_patch14_dinov2,518,43.11,5938.517,256,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,42.91,5966.36,256,1136.48,1790.08,2771.21\r\neva_giant_patch14_560,560,39.67,6453.747,256,1014.45,1906.76,2577.17\r\nmobilenetv5_300m,768,35.33,1811.355,64,294.13,435.74,842.16\r\nmobilenetv5_300m_enc,768,33.97,1883.958,64,294.13,435.74,842.16\r\nswinv2_cr_giant_384,384,33.14,1448.2,48,2598.76,1450.71,1394.86\r\nvit_pe_lang_gigantic_patch14_448,448,33.12,7730.152,256,1740.92,1931.99,1664.88\r\nvit_pe_core_gigantic_patch14_448,448,31.78,8054.523,256,1882.03,2060.12,1774.21\r\nvit_pe_spatial_gigantic_patch14_448,448,31.07,8238.692,256,1851.89,2055.25,1771.04\r\naimv2_3b_patch14_448,448,29.94,3206.076,96,2723.02,2939.61,1462.76\r\nsamvit_base_patch16,1024,25.35,315.505,8,89.67,486.43,1343.27\r\nvit_so400m_patch14_siglip_gap_896,896,24.15,5299.919,128,416.87,2731.49,8492.88\r\nsamvit_large_patch16,1024,13.37,448.772,6,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,9.69,619.462,6,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu130-pro6000maxq-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,388400.32,5.255,2048,0.37,0.04,0.48\r\ntest_vit2,160,335090.74,6.092,2048,0.46,0.05,0.64\r\ntest_byobnet,160,308359.19,6.621,2048,0.46,0.03,0.43\r\ntest_mambaout,160,277361.57,7.365,2048,0.45,0.03,0.53\r\ntest_efficientnet,160,276204.98,7.398,2048,0.36,0.06,0.55\r\ntest_efficientnet_ln,160,273155.86,7.473,2048,0.36,0.06,0.55\r\ntest_convnext,160,273109.93,7.482,2048,0.27,0.03,0.58\r\ntest_convnext3,160,247107.91,8.26,2048,0.47,0.05,0.63\r\ntest_efficientnet_evos,160,242877.17,8.417,2048,0.36,0.06,0.55\r\ntest_convnext2,160,241153.53,8.476,2048,0.48,0.05,0.63\r\ntest_efficientnet_gn,160,234654.06,8.712,2048,0.36,0.06,0.55\r\ntest_resnet,160,207628.42,9.836,2048,0.47,0.1,0.64\r\ntest_vit3,160,203026.58,10.067,2048,0.93,0.09,1.0\r\ntinynet_e,106,191507.74,10.678,2048,2.04,0.03,0.69\r\ntest_mambaout,192,191181.85,10.696,2048,0.45,0.04,0.77\r\nmobilenetv4_conv_small_035,224,176043.45,11.617,2048,1.91,0.05,0.98\r\ntest_vit4,160,160525.08,12.742,2048,1.02,0.11,1.07\r\nefficientvit_m0,224,159361.59,12.834,2048,2.33,0.08,0.91\r\nmobilenetv4_conv_small_050,224,150715.64,13.572,2048,2.24,0.07,1.18\r\nmobilenetv4_conv_small_035,256,131384.84,15.551,2048,1.91,0.06,1.28\r\nlcnet_035,224,131090.58,15.601,2048,1.64,0.03,1.04\r\nmobilenetv3_small_050,224,130328.49,15.69,2048,1.59,0.03,0.92\r\nmobilenetv4_conv_small_050,256,113257.84,18.057,2048,2.24,0.09,1.55\r\nlcnet_050,224,112231.69,18.229,2048,1.88,0.05,1.26\r\nefficientvit_m1,224,111232.75,18.395,2048,2.96,0.17,1.33\r\ntest_nfnet,160,109811.89,18.615,2048,0.38,0.29,1.2\r\nstarnet_s050,224,108883.56,18.793,2048,0.54,0.09,1.57\r\nshvit_s1,224,108551.35,18.848,2048,6.31,0.24,1.39\r\ntf_mobilenetv3_small_minimal_100,224,104418.19,19.577,2048,2.04,0.06,1.41\r\nefficientvit_m2,224,103014.85,19.862,2048,4.17,0.2,1.47\r\nmobilenetv3_small_075,224,95026.8,21.521,2048,2.04,0.05,1.3\r\nefficientvit_m3,224,93675.91,21.841,2048,6.88,0.26,1.62\r\nefficientvit_m4,224,89706.81,22.805,2048,8.78,0.3,1.7\r\nmobilenetv4_conv_small,224,87999.63,23.246,2048,3.77,0.19,1.97\r\nshvit_s2,224,86384.43,23.69,2048,11.45,0.37,1.6\r\ntinynet_d,152,86148.62,23.752,2048,2.34,0.05,1.42\r\nmobilenetv3_small_100,224,86019.54,23.773,2048,2.54,0.06,1.42\r\nrepghostnet_050,224,79007.64,25.899,2048,2.31,0.05,2.02\r\ntf_mobilenetv3_small_075,224,78831.46,25.962,2048,2.04,0.05,1.3\r\nlevit_128s,224,76995.72,26.568,2048,7.76,0.3,1.88\r\nresnet10t,176,76599.61,26.718,2048,5.44,0.7,1.51\r\nlevit_conv_128s,224,75697.3,27.034,2048,7.76,0.3,1.88\r\nlcnet_075,224,75330.99,27.148,2048,2.36,0.1,1.99\r\ntf_mobilenetv3_small_100,224,72232.9,28.314,2048,2.54,0.06,1.42\r\nresnet18,160,70836.8,28.885,2048,11.69,0.93,1.27\r\nmnasnet_small,224,68554.37,29.839,2048,2.03,0.07,2.16\r\nregnetx_002,224,68117.7,30.048,2048,2.68,0.2,2.16\r\nghostnet_050,224,67525.21,30.307,2048,2.59,0.05,1.77\r\nstarnet_s100,224,65603.55,31.189,2048,1.04,0.19,2.68\r\nmobilenetv4_conv_small,256,65559.75,31.217,2048,3.77,0.25,2.57\r\nshvit_s3,224,65418.15,31.28,2048,14.21,0.6,2.33\r\nefficientvit_m5,224,64717.6,31.606,2048,12.44,0.52,2.41\r\nfasternet_t0,224,64626.14,31.671,2048,3.91,0.34,1.97\r\nrepghostnet_058,224,63021.56,32.478,2048,2.54,0.06,2.59\r\nregnety_002,224,62975.36,32.496,2048,3.16,0.2,2.17\r\nstarnet_s150,224,62721.21,32.631,2048,1.56,0.23,2.75\r\nlcnet_100,224,61290.93,33.396,2048,2.95,0.16,2.52\r\nmobilenetv2_035,224,59292.9,34.515,2048,1.68,0.07,2.86\r\nconvnext_zepto_rms,224,54861.15,37.303,2048,2.16,0.3,2.75\r\nlevit_conv_128,224,54479.86,37.559,2048,9.19,0.41,2.71\r\nmnasnet_050,224,53512.34,38.249,2048,2.22,0.11,3.07\r\nlevit_128,224,52101.33,39.287,2048,9.19,0.41,2.71\r\nrepghostnet_080,224,51702.81,39.561,2048,3.27,0.1,3.22\r\nhgnetv2_b0,224,51424.45,39.784,2048,6.0,0.33,2.12\r\nefficientvit_b0,224,50182.11,40.792,2048,3.41,0.1,2.87\r\nrepvgg_a0,224,49190.54,41.615,2048,8.31,1.36,1.79\r\nconvnext_zepto_rms_ols,224,49011.81,41.767,2048,2.16,0.34,3.15\r\nresnet10t,224,47338.62,43.225,2048,5.44,1.1,2.43\r\nlevit_conv_192,224,47226.13,43.344,2048,10.92,0.66,3.2\r\nregnetx_004,224,47021.27,43.515,2048,5.16,0.4,3.14\r\nmobilenetv2_050,224,46490.67,44.016,2048,1.97,0.1,3.64\r\nsemnasnet_050,224,46255.71,44.238,2048,2.08,0.11,3.44\r\ngernet_s,224,45644.33,44.848,2048,8.17,0.75,2.65\r\nmobileone_s0,224,45613.19,44.874,2048,2.08,0.28,3.79\r\nregnetx_004_tv,224,45442.55,45.04,2048,5.5,0.42,3.17\r\nvit_tiny_r_s16_p8_224,224,45116.43,45.375,2048,6.34,0.44,2.06\r\npit_ti_distilled_224,224,44626.98,45.861,2048,5.1,0.71,6.23\r\nvit_small_patch32_224,224,44624.04,45.868,2048,22.88,1.15,2.5\r\npit_ti_224,224,44509.72,45.99,2048,4.85,0.7,6.19\r\ncs3darknet_focus_s,256,43980.26,46.544,2048,3.27,0.69,2.7\r\nedgenext_xx_small,256,43706.29,46.818,2048,1.33,0.26,3.33\r\nghostnetv3_050,224,43595.29,46.951,2048,2.85,0.05,2.28\r\nlcnet_150,224,43004.78,47.602,2048,4.5,0.34,3.79\r\nresnet34,160,42488.89,48.15,2048,21.8,1.87,1.91\r\nrepghostnet_100,224,42065.24,48.667,2048,4.06,0.15,3.98\r\nfasternet_t1,224,42040.7,48.691,2048,7.6,0.85,3.15\r\nresnet14t,176,41775.83,48.987,2048,10.08,1.07,3.61\r\nmobilenetv4_conv_small,320,40891.76,50.06,2048,3.77,0.39,4.01\r\nlevit_192,224,40880.47,50.068,2048,10.92,0.66,3.2\r\ntinynet_c,184,40802.86,50.162,2048,2.46,0.11,2.87\r\ncs3darknet_s,256,40429.66,50.629,2048,3.28,0.72,2.97\r\nmixer_s32_224,224,40386.6,50.692,2048,19.1,1.0,2.28\r\nnf_regnet_b0,192,39071.18,52.389,2048,8.76,0.37,3.15\r\nstarnet_s2,224,38804.03,52.751,2048,3.68,0.55,4.73\r\nhgnetv2_b1,224,37698.1,54.3,2048,6.34,0.49,2.73\r\nstarnet_s1,224,37520.04,54.549,2048,2.87,0.42,4.99\r\nconvnext_atto_rms,224,37474.19,54.619,2048,3.69,0.55,3.81\r\nrepghostnet_111,224,37461.73,54.637,2048,4.52,0.18,4.38\r\nconvnext_atto,224,37216.29,54.998,2048,3.7,0.55,3.81\r\nresnetv2_18,224,37184.68,55.058,2048,11.69,1.82,2.48\r\nshvit_s4,256,37011.15,55.311,2048,16.55,0.99,3.73\r\ntf_mobilenetv3_large_minimal_100,224,36969.67,55.366,2048,3.92,0.22,4.4\r\nrepvgg_a1,224,36448.04,56.151,2048,12.79,2.36,2.37\r\nmobilenetv3_large_075,224,36402.07,56.239,2048,3.99,0.16,4.0\r\nmnasnet_075,224,36287.07,56.414,2048,3.17,0.23,4.77\r\nresnet18,224,35768.11,57.213,2048,11.69,1.82,2.48\r\nghostnet_100,224,35119.38,58.293,2048,5.18,0.15,3.55\r\nconvnext_atto_ols,224,35012.79,58.463,2048,3.7,0.58,4.11\r\nregnety_004,224,34884.95,58.674,2048,4.34,0.41,3.89\r\nlevit_conv_256,224,34367.48,59.564,2048,18.86,1.13,4.23\r\nxcit_nano_12_p16_224,224,34099.26,60.038,2048,3.05,0.56,4.17\r\ntf_mobilenetv3_large_075,224,34037.55,60.144,2048,3.99,0.16,4.0\r\ninception_next_atto,224,33673.18,60.785,2048,4.16,0.5,3.63\r\nedgenext_xx_small,288,33500.61,61.102,2048,1.33,0.33,4.21\r\nmobilenetv1_100,224,33342.32,61.395,2048,4.23,0.58,5.04\r\nconvnextv2_atto,224,33222.23,61.616,2048,3.71,0.55,3.81\r\nresnetv2_18d,224,32982.83,62.061,2048,11.71,2.06,3.29\r\nmobilenetv1_100h,224,32788.45,62.434,2048,5.28,0.63,5.09\r\nmobilenetv3_rw,224,32672.6,62.644,2048,5.48,0.23,4.41\r\nregnety_006,224,32574.23,62.827,2048,6.06,0.61,4.33\r\nmobilenetv3_large_100,224,32507.57,62.964,2048,5.48,0.23,4.41\r\nseresnet18,224,32496.63,62.988,2048,11.78,1.82,2.49\r\nlegacy_seresnet18,224,32445.09,63.094,2048,11.78,1.82,2.49\r\nvit_tiny_patch16_224,224,32425.12,63.124,2048,5.72,1.26,5.97\r\nhardcorenas_a,224,32164.8,63.64,2048,5.26,0.23,4.38\r\ntf_efficientnetv2_b0,192,32122.42,63.724,2048,7.14,0.54,3.51\r\ndeit_tiny_patch16_224,224,31998.84,63.968,2048,5.72,1.26,5.97\r\nvit_medium_patch32_clip_224,224,31931.23,64.11,2048,39.69,2.0,3.34\r\nresnet18d,224,31688.39,64.6,2048,11.71,2.06,3.29\r\nhardcorenas_b,224,31627.19,64.73,2048,5.18,0.26,5.09\r\nconvnext_femto,224,31476.09,65.036,2048,5.22,0.79,4.57\r\ndeit_tiny_distilled_patch16_224,224,31457.9,65.078,2048,5.91,1.27,6.01\r\nmnasnet_100,224,31425.55,65.135,2048,4.38,0.33,5.46\r\nese_vovnet19b_slim_dw,224,31423.03,65.149,2048,1.9,0.4,5.28\r\nhgnetv2_b0,288,30953.03,66.122,2048,6.0,0.54,3.51\r\nrepghostnet_130,224,30806.08,66.44,2048,5.46,0.24,5.24\r\nhardcorenas_c,224,30623.25,66.85,2048,5.52,0.28,5.01\r\nlevit_256,224,30571.53,66.954,2048,18.86,1.13,4.23\r\nmobilenetv2_075,224,30464.45,50.379,1536,2.64,0.22,5.86\r\nsemnasnet_075,224,30359.41,50.565,1536,2.91,0.23,5.54\r\nmobilenet_edgetpu_v2_xs,224,30297.36,67.565,2048,4.46,0.7,4.8\r\ntf_mobilenetv3_large_100,224,30181.96,67.813,2048,5.48,0.23,4.41\r\ndla46_c,224,29941.13,68.377,2048,1.3,0.58,4.5\r\nconvnext_femto_ols,224,29833.74,68.62,2048,5.23,0.82,4.87\r\nregnetx_008,224,29767.32,68.764,2048,7.26,0.81,5.15\r\nlevit_conv_256d,224,29703.28,68.916,2048,26.16,1.39,4.93\r\nmobilenetv4_conv_medium,224,28800.38,71.078,2048,9.72,0.84,5.8\r\nvit_xsmall_patch16_clip_224,224,28755.77,71.186,2048,8.28,1.79,6.65\r\nspnasnet_100,224,28655.31,71.447,2048,4.42,0.35,6.03\r\nrepvgg_b0,224,28649.92,71.45,2048,14.34,3.06,3.07\r\nconvnext_atto_rms,256,28311.24,72.309,2048,3.69,0.71,4.98\r\nstarnet_s3,224,28098.72,72.856,2048,5.75,0.76,6.66\r\nconvnextv2_femto,224,28068.04,72.924,2048,5.23,0.79,4.57\r\ncs3darknet_focus_s,320,27784.68,73.674,2048,3.27,1.08,4.22\r\nefficientformerv2_s0,224,27674.27,73.982,2048,3.6,0.41,5.3\r\npit_xs_224,224,27626.31,74.093,2048,10.62,1.4,7.71\r\nrepghostnet_150,224,27603.48,74.162,2048,6.55,0.31,6.0\r\nmobilenet_edgetpu_100,224,27582.64,74.211,2048,4.09,1.0,5.75\r\nhardcorenas_d,224,27268.18,75.071,2048,7.5,0.3,4.93\r\nese_vovnet19b_slim,224,27182.65,75.308,2048,3.17,1.69,3.52\r\npit_xs_distilled_224,224,27163.98,75.364,2048,11.0,1.41,7.76\r\nmobileone_s1,224,27121.01,75.486,2048,4.76,0.83,6.27\r\nregnety_008,224,27089.63,75.55,2048,6.26,0.81,5.25\r\nlevit_256d,224,26893.6,76.115,2048,26.16,1.39,4.93\r\ntinynet_b,188,26765.38,76.487,2048,3.73,0.21,4.44\r\nsemnasnet_100,224,26570.54,57.779,1536,3.89,0.32,6.23\r\nmobilenetv2_100,224,26548.99,57.822,1536,3.5,0.31,6.68\r\nghostnet_130,224,26537.02,77.136,2048,7.36,0.24,4.6\r\nmobilenetv4_hybrid_medium_075,224,26503.28,77.232,2048,7.31,0.66,5.65\r\npvt_v2_b0,224,26440.39,77.427,2048,3.67,0.57,7.99\r\nrepvit_m1,224,26302.72,77.825,2048,5.07,0.82,6.17\r\nrepvit_m0_9,224,26272.19,77.916,2048,5.07,0.82,6.17\r\nfbnetc_100,224,25864.12,59.367,1536,5.57,0.4,6.51\r\nresnet14t,224,25799.47,79.353,2048,10.08,1.69,5.8\r\nefficientnet_lite0,224,25795.96,59.512,1536,4.65,0.4,6.74\r\nregnety_008_tv,224,25761.66,79.462,2048,6.43,0.84,5.42\r\nhrnet_w18_small,224,25686.61,79.683,2048,13.19,1.61,5.72\r\nskresnet18,224,25657.42,79.797,2048,11.96,1.82,3.24\r\nmobilenetv1_100,256,25624.19,59.905,1536,4.23,0.76,6.59\r\ntf_efficientnet_lite0,224,25399.61,60.448,1536,4.65,0.4,6.74\r\nfasternet_t2,224,25327.83,80.83,2048,14.98,1.91,4.73\r\ntf_efficientnetv2_b1,192,25235.59,81.117,2048,8.14,0.76,4.59\r\nmobilenetv1_100h,256,25190.3,60.952,1536,5.28,0.82,6.65\r\nresnetblur18,224,25179.93,81.307,2048,11.69,2.34,3.39\r\nmobilevit_xxs,256,25167.91,81.342,2048,1.27,0.42,8.34\r\nmobilenetv3_large_100,256,24925.54,61.599,1536,5.48,0.29,5.75\r\nmobilenetv1_125,224,24207.31,84.576,2048,6.27,0.89,6.3\r\nregnetx_006,224,24075.67,85.036,2048,6.2,0.61,3.98\r\nresnet50,160,23966.59,85.407,2048,25.56,2.1,5.67\r\nhgnetv2_b2,224,23842.22,85.864,2048,11.22,1.15,4.12\r\nhardcorenas_f,224,23736.95,86.252,2048,8.2,0.35,5.57\r\nedgenext_x_small,256,23637.69,86.601,2048,2.34,0.54,5.93\r\nhardcorenas_e,224,23623.44,86.663,2048,8.07,0.35,5.65\r\nconvnext_pico,224,23444.74,87.322,2048,9.05,1.37,6.1\r\ngmlp_ti16_224,224,23378.45,87.569,2048,5.87,1.34,7.55\r\nswiftformer_xs,224,23321.85,87.786,2048,3.48,0.61,6.45\r\ntf_efficientnetv2_b0,224,23319.8,87.791,2048,7.14,0.73,4.77\r\nresnet50d,160,22595.8,90.607,2048,25.58,2.22,6.08\r\nvit_betwixt_patch32_clip_224,224,22546.95,90.797,2048,61.41,3.09,4.17\r\nhgnetv2_b1,288,22496.7,91.005,2048,6.34,0.82,4.51\r\nresnetv2_18,288,22425.5,91.291,2048,11.69,3.0,4.11\r\nconvnext_pico_ols,224,22261.5,91.966,2048,9.06,1.43,6.5\r\nmobilenetv4_conv_medium,256,22015.31,92.989,2048,9.72,1.1,7.58\r\nconvnext_atto,288,21950.3,93.272,2048,3.7,0.91,6.3\r\nmobileone_s2,224,21874.76,93.586,2048,7.81,1.3,7.56\r\nmobilenetv4_hybrid_medium,224,21839.76,93.743,2048,11.07,0.98,6.84\r\nrepvit_m1_0,224,21824.8,93.807,2048,6.81,1.11,7.19\r\nnf_regnet_b0,256,21823.66,93.8,2048,8.76,0.64,5.58\r\ngernet_m,224,21777.53,94.001,2048,21.14,3.02,5.24\r\nmnasnet_140,224,21760.29,94.08,2048,7.12,0.6,7.71\r\nghostnetv2_100,224,21658.59,94.518,2048,6.16,0.18,4.55\r\ntinynet_a,192,21597.47,94.8,2048,6.19,0.35,5.41\r\neva02_tiny_patch14_224,224,21543.7,95.031,2048,5.5,1.7,9.14\r\nresnet34,224,21512.4,95.16,2048,21.8,3.67,3.74\r\nresnet18,288,21482.28,95.3,2048,11.69,3.01,4.11\r\nresnetv2_34,224,21411.42,95.616,2048,21.8,3.67,3.74\r\nefficientformer_l1,224,21246.0,96.356,2048,12.29,1.3,5.53\r\ncrossvit_9_240,240,21233.61,96.419,2048,8.55,1.85,9.52\r\ncrossvit_tiny_240,240,21232.56,96.423,2048,7.01,1.57,9.08\r\nghostnetv3_100,224,21122.81,96.926,2048,6.15,0.17,4.55\r\nlevit_conv_384,224,21090.64,97.067,2048,39.07,2.35,6.26\r\nmobilenet_edgetpu_v2_s,224,21039.07,97.31,2048,5.99,1.21,6.6\r\nrepghostnet_200,224,20945.55,97.748,2048,9.77,0.53,7.96\r\nxcit_tiny_12_p16_224,224,20875.22,98.073,2048,6.72,1.24,6.29\r\nconvnextv2_pico,224,20790.88,98.472,2048,9.07,1.37,6.1\r\nrepvit_m1_1,224,20706.75,98.866,2048,8.24,1.34,7.82\r\nmambaout_femto,224,20698.71,98.913,2048,7.3,1.16,8.34\r\nrepvit_m2,224,20696.18,98.922,2048,8.24,1.34,7.82\r\nconvnext_atto_ols,288,20678.29,99.006,2048,3.7,0.96,6.8\r\nmobilenetv4_conv_blur_medium,224,20602.86,49.682,1024,9.72,1.22,8.58\r\ncs3darknet_focus_m,256,20537.28,99.688,2048,9.3,1.98,4.89\r\nmobilevitv2_050,256,20418.68,75.197,1536,1.37,0.48,8.04\r\nmobilenetv2_110d,224,20249.02,75.82,1536,4.52,0.45,8.71\r\nrexnetr_100,224,20117.4,76.321,1536,4.88,0.43,7.72\r\nresnet34d,224,20031.44,102.208,2048,21.82,3.91,4.54\r\nresnetv2_18d,288,19896.83,102.889,2048,11.71,3.4,5.43\r\nresnetv2_34d,224,19863.15,103.062,2048,21.82,3.91,4.54\r\nrexnet_100,224,19826.14,77.442,1536,4.8,0.41,7.44\r\nstarnet_s4,224,19813.67,103.327,2048,7.48,1.05,9.56\r\nefficientvit_b1,224,19776.53,103.523,2048,9.1,0.53,7.25\r\ncs3darknet_m,256,19739.71,103.719,2048,9.31,2.08,5.28\r\nconvnextv2_atto,288,19701.34,103.912,2048,3.71,0.91,6.3\r\nregnetz_005,224,19581.36,104.557,2048,7.12,0.52,5.86\r\nseresnet18,288,19554.35,104.7,2048,11.78,3.01,4.11\r\nrepvgg_a2,224,19501.5,104.975,2048,25.5,5.12,3.13\r\nresnet50,176,19499.72,104.995,2048,25.56,2.62,6.92\r\nseresnet34,224,19460.64,105.206,2048,21.96,3.67,3.74\r\nefficientformerv2_s1,224,19438.47,105.307,2048,6.19,0.67,7.66\r\nresnet26,224,19387.4,105.597,2048,16.0,2.36,7.35\r\nfbnetv3_b,224,19361.29,105.73,2048,8.6,0.42,6.97\r\nlegacy_seresnet34,224,19325.58,105.926,2048,21.96,3.67,3.74\r\nresnext50_32x4d,160,19186.42,106.709,2048,25.03,2.17,7.35\r\nseresnet50,160,19174.91,106.769,2048,28.09,2.1,5.69\r\ncrossvit_9_dagger_240,240,19103.28,107.172,2048,8.78,1.99,9.97\r\ndla34,224,19083.77,107.287,2048,15.74,3.07,5.02\r\nresnet18d,288,19053.1,107.456,2048,11.71,3.41,5.43\r\nconvnext_femto,288,18881.7,108.426,2048,5.22,1.3,7.56\r\nselecsls42,224,18763.15,109.118,2048,30.35,2.94,4.62\r\nhgnetv2_b3,224,18759.53,109.136,2048,16.29,1.78,5.07\r\ntf_efficientnetv2_b2,208,18736.03,109.275,2048,10.1,1.06,6.0\r\nselecsls42b,224,18732.3,109.3,2048,32.46,2.98,4.62\r\nswiftformer_s,224,18656.58,109.74,2048,6.09,0.99,7.81\r\nmobilenetv1_125,256,18577.26,82.64,1536,6.27,1.16,8.23\r\nsemnasnet_140,224,18505.69,55.299,1024,6.11,0.6,8.87\r\nedgenext_x_small,288,18411.79,111.2,2048,2.34,0.68,7.5\r\necaresnet50t,160,18385.19,111.358,2048,25.57,2.21,6.04\r\nmobilenetv2_140,224,18250.34,56.076,1024,6.11,0.6,9.57\r\nnf_resnet26,224,18124.58,112.957,2048,16.0,2.41,7.35\r\nlevit_384,224,18088.21,113.182,2048,39.07,2.35,6.26\r\nese_vovnet19b_dw,224,17958.74,114.003,2048,6.54,1.34,8.25\r\nresnetrs50,160,17934.73,114.154,2048,35.69,2.29,6.2\r\nconvnext_femto_ols,288,17775.04,115.18,2048,5.23,1.35,8.06\r\nresnet26d,224,17704.51,115.635,2048,16.01,2.6,8.15\r\nvit_base_patch32_clip_quickgelu_224,224,17631.2,116.117,2048,87.85,4.41,5.01\r\nvit_base_patch32_clip_224,224,17582.34,116.446,2048,88.22,4.41,5.01\r\nresnetaa34d,224,17547.98,116.669,2048,21.82,4.43,5.07\r\nefficientnet_es_pruned,224,17532.05,116.788,2048,5.44,1.81,8.73\r\npoolformerv2_s12,224,17391.7,117.724,2048,11.89,1.83,5.53\r\nefficientnet_es,224,17390.24,117.726,2048,5.44,1.81,8.73\r\nmobileone_s3,224,17372.95,117.852,2048,10.08,1.9,9.13\r\nefficientnet_b0,224,17361.19,88.428,1536,5.29,0.4,6.75\r\nmambaout_kobe,224,17218.63,118.91,2048,9.14,1.52,10.0\r\ntiny_vit_5m_224,224,17203.03,119.011,2048,12.08,1.27,11.25\r\ntf_efficientnet_es,224,17199.98,119.039,2048,5.44,1.81,8.73\r\nefficientnet_lite1,240,17188.78,89.322,1536,5.42,0.62,10.14\r\ntf_efficientnet_lite1,240,17130.73,59.751,1024,5.42,0.62,10.14\r\nconvnextv2_femto,288,17059.94,120.008,2048,5.23,1.3,7.56\r\nvit_base_patch32_224,224,17057.55,120.03,2048,88.22,4.41,5.01\r\nnf_seresnet26,224,16879.29,121.292,2048,17.4,2.41,7.36\r\nnf_ecaresnet26,224,16875.61,121.319,2048,16.0,2.41,7.36\r\nresmlp_12_224,224,16601.89,123.312,2048,15.35,3.01,5.5\r\nfbnetv3_d,224,16578.11,123.5,2048,10.31,0.52,8.5\r\nmobilenetv4_hybrid_medium,256,16577.75,123.504,2048,11.07,1.29,9.01\r\nmobilenet_edgetpu_v2_m,224,16489.54,124.158,2048,8.46,1.85,8.15\r\ndarknet17,256,16442.33,124.526,2048,14.3,3.26,7.18\r\nghostnetv2_130,224,16434.43,124.562,2048,8.96,0.28,5.9\r\nselecsls60,224,16432.32,124.594,2048,30.67,3.59,5.52\r\nselecsls60b,224,16368.06,125.073,2048,32.77,3.63,5.52\r\npit_s_distilled_224,224,16253.04,125.966,2048,24.04,2.9,11.64\r\nnf_regnet_b2,240,16235.26,126.104,2048,14.31,0.97,7.23\r\nnf_regnet_b1,256,16220.93,126.221,2048,10.22,0.82,7.27\r\npoolformer_s12,224,16207.3,126.319,2048,11.92,1.82,5.53\r\ncs3darknet_focus_m,288,16170.78,126.62,2048,9.3,2.51,6.19\r\nghostnetv3_130,224,16085.79,127.28,2048,8.95,0.28,5.9\r\npit_s_224,224,16069.82,127.405,2048,23.46,2.88,11.56\r\ngmixer_12_224,224,15983.07,128.103,2048,12.7,2.67,7.26\r\nmobilenetv4_conv_blur_medium,256,15920.06,48.208,768,9.72,1.59,11.2\r\ncs3darknet_m,288,15684.76,130.542,2048,9.31,2.63,6.69\r\nresnext50_32x4d,176,15633.26,130.968,2048,25.03,2.71,8.97\r\nedgenext_small,256,15576.48,131.446,2048,5.59,1.26,9.07\r\nhgnetv2_b4,224,15570.42,131.489,2048,19.8,2.75,6.7\r\ntf_efficientnetv2_b1,240,15478.02,132.277,2048,8.14,1.21,7.34\r\nefficientvit_b1,256,15471.92,99.243,1536,9.1,0.69,9.46\r\nmobilenetv4_conv_aa_medium,256,15422.64,132.758,2048,9.72,1.58,10.3\r\nconvnext_nano,224,15347.63,133.41,2048,15.59,2.46,8.37\r\nefficientnet_b1_pruned,240,15271.66,134.071,2048,6.33,0.4,6.21\r\nresnetblur18,288,15203.35,100.985,1536,11.69,3.87,5.6\r\nvit_small_patch32_384,384,15189.03,134.796,2048,22.92,3.45,8.25\r\nskresnet34,224,15170.24,134.957,2048,22.28,3.67,5.13\r\nmixnet_s,224,15133.12,101.465,1536,4.13,0.25,6.25\r\nrexnetr_130,224,15097.46,67.794,1024,7.61,0.68,9.81\r\nmixer_b32_224,224,15085.51,135.725,2048,60.29,3.24,6.29\r\nefficientnet_b0_gn,224,15019.83,102.228,1536,5.29,0.42,6.75\r\nresnet101,160,14994.43,136.54,2048,44.55,4.0,8.28\r\nmobilenetv2_120d,224,14978.24,68.319,1024,5.83,0.69,11.97\r\ndarknet21,256,14973.9,136.727,2048,20.86,3.93,7.47\r\nfbnetv3_b,256,14888.71,103.129,1536,8.6,0.55,9.1\r\nvit_tiny_r_s16_p8_384,384,14801.7,138.318,2048,6.36,1.34,6.49\r\nfastvit_t8,256,14662.5,139.618,2048,4.0,0.69,6.59\r\nvisformer_tiny,224,14565.29,140.573,2048,10.32,1.27,5.72\r\nmobilenet_edgetpu_v2_l,224,14539.8,140.802,2048,10.92,2.55,9.05\r\ntf_efficientnet_b0,224,14500.04,105.893,1536,5.29,0.4,6.75\r\nswiftformer_l1,224,14491.99,141.269,2048,12.06,1.6,10.07\r\nmixer_s16_224,224,14402.06,142.156,2048,18.53,3.79,5.97\r\ntf_mixnet_s,224,14372.33,106.838,1536,4.13,0.25,6.25\r\nhgnetv2_b2,288,14320.53,142.977,2048,11.22,1.89,6.8\r\ntiny_vit_11m_224,224,14263.98,143.534,2048,20.35,2.03,13.49\r\nrepvit_m3,224,14255.92,143.622,2048,10.12,1.86,11.43\r\ngernet_l,256,14178.37,144.402,2048,31.08,4.57,8.0\r\nrexnet_130,224,14177.54,72.197,1024,7.56,0.68,9.71\r\ndpn48b,224,14129.28,144.9,2048,9.13,1.69,8.92\r\nconvnext_pico,288,14019.42,146.046,2048,9.05,2.27,10.08\r\nresnext26ts,256,13986.95,146.379,2048,10.3,2.43,10.52\r\nmobilenetv4_conv_medium,320,13977.29,109.861,1536,9.72,1.71,11.84\r\nconvnextv2_nano,224,13853.98,147.797,2048,15.62,2.46,8.37\r\nmobilevitv2_075,256,13843.69,73.943,1024,2.87,1.05,12.06\r\nvit_base_patch32_clip_256,256,13695.35,149.501,2048,87.86,5.76,6.65\r\nrexnetr_150,224,13634.01,75.071,1024,9.78,0.89,11.13\r\nresnet26t,256,13632.9,150.184,2048,16.01,3.35,10.52\r\ndeit_small_patch16_224,224,13538.43,151.229,2048,22.05,4.61,11.95\r\nfasternet_s,224,13523.83,151.404,2048,31.18,4.56,7.93\r\nvit_small_patch16_224,224,13517.15,151.465,2048,22.05,4.61,11.95\r\nconvnext_nano_ols,224,13493.82,151.738,2048,15.65,2.65,9.38\r\ndeit3_small_patch16_224,224,13474.72,151.946,2048,22.06,4.61,11.95\r\nefficientnet_b0,256,13405.63,76.354,1024,5.29,0.52,8.81\r\nefficientnet_lite2,260,13380.92,76.489,1024,6.09,0.89,12.9\r\ndeit_small_distilled_patch16_224,224,13359.01,153.264,2048,22.44,4.63,12.02\r\nconvnext_pico_ols,288,13350.11,153.373,2048,9.06,2.37,10.74\r\nvit_base_patch32_siglip_gap_256,256,13332.93,153.572,2048,87.47,5.67,6.54\r\nvit_wee_patch16_reg1_gap_256,256,13312.03,153.815,2048,13.42,3.83,13.9\r\nghostnetv2_160,224,13295.7,153.996,2048,12.39,0.42,7.23\r\nvit_base_patch32_siglip_256,256,13278.8,154.184,2048,94.55,5.75,6.64\r\nsedarknet21,256,13236.05,154.689,2048,20.95,3.93,7.47\r\ntf_efficientnet_lite2,260,13230.57,77.375,1024,6.09,0.89,12.9\r\nrepvgg_b1g4,224,13220.7,154.864,2048,36.13,7.31,5.32\r\nlegacy_seresnext26_32x4d,224,13178.88,155.352,2048,16.79,2.49,9.39\r\nefficientnet_blur_b0,224,13098.52,117.233,1536,5.29,0.43,8.72\r\ngcresnext26ts,256,13088.21,156.441,2048,10.48,2.43,10.53\r\nghostnetv3_160,224,12964.48,157.93,2048,12.38,0.41,7.23\r\nresnet34,288,12940.89,158.227,2048,21.8,6.07,6.18\r\nresnetv2_34,288,12869.98,159.095,2048,21.8,6.07,6.18\r\npvt_v2_b1,224,12828.13,79.793,1024,14.01,2.12,15.39\r\nvit_pwee_patch16_reg1_gap_256,256,12805.65,159.889,2048,15.25,4.37,15.87\r\nresnest14d,224,12767.7,160.366,2048,10.61,2.76,7.33\r\nnf_regnet_b1,288,12766.36,160.378,2048,10.22,1.02,9.2\r\nhrnet_w18_small_v2,224,12756.0,160.502,2048,15.6,2.62,9.65\r\nfbnetv3_d,256,12737.51,120.555,1536,10.31,0.68,11.1\r\nnf_regnet_b2,272,12718.76,160.976,2048,14.31,1.22,9.27\r\necaresnet50d_pruned,224,12710.83,161.087,2048,19.94,2.53,6.43\r\neca_resnext26ts,256,12667.73,161.63,2048,10.3,2.43,10.52\r\nseresnext26ts,256,12661.93,161.702,2048,10.39,2.43,10.52\r\nrepvit_m1_5,224,12657.7,161.757,2048,14.05,2.27,12.84\r\nconvnextv2_pico,288,12641.03,161.964,2048,9.07,2.27,10.08\r\nmobilenetv4_conv_large,256,12599.06,162.5,2048,32.59,2.86,12.14\r\ncs3darknet_focus_l,256,12596.97,162.544,2048,21.15,4.66,8.03\r\nrexnet_150,224,12578.61,81.375,1024,9.73,0.9,11.21\r\nbotnet26t_256,256,12576.53,162.802,2048,12.49,3.32,11.98\r\nmobilenet_edgetpu_v2_m,256,12575.62,162.814,2048,8.46,2.42,10.65\r\necaresnext50t_32x4d,224,12547.21,163.184,2048,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,12532.41,163.375,2048,15.41,2.7,10.09\r\nseresnext26t_32x4d,224,12498.8,163.812,2048,16.81,2.7,10.09\r\nvit_relpos_small_patch16_224,224,12467.4,164.223,2048,21.98,4.59,13.05\r\nvit_small_patch16_rope_224,224,12406.9,165.02,2048,21.98,4.61,11.95\r\nhalonet26t,256,12405.46,165.051,2048,12.48,3.19,11.69\r\nseresnext26d_32x4d,224,12356.26,165.705,2048,16.81,2.73,10.19\r\nefficientformerv2_s2,224,12347.96,165.821,2048,12.71,1.27,11.77\r\nvit_srelpos_small_patch16_224,224,12328.6,166.085,2048,21.97,4.59,12.16\r\nvit_small_patch16_rope_ape_224,224,12300.06,166.457,2048,22.06,4.61,11.95\r\nmobileone_s4,224,12289.77,166.601,2048,14.84,2.98,11.81\r\ncs3darknet_l,256,12242.87,167.237,2048,21.16,4.86,8.55\r\nresnet101,176,12217.49,167.589,2048,44.55,4.92,10.08\r\ngc_efficientnetv2_rw_t,224,12202.58,167.796,2048,13.68,1.94,9.97\r\nresnet50,224,12188.25,167.988,2048,25.56,4.11,11.11\r\ncsatv2,512,12167.31,168.286,2048,11.1,1.39,9.17\r\nefficientnet_b1,224,12107.54,126.823,1536,7.79,0.59,9.36\r\necaresnetlight,224,12052.89,169.882,2048,30.16,4.11,8.42\r\ndpn68,224,12023.49,170.296,2048,12.61,2.35,10.47\r\nresnet34d,288,12019.39,170.353,2048,21.82,6.47,7.51\r\nefficientvit_b1,288,11971.86,128.262,1536,9.1,0.87,11.96\r\nresnetv2_34d,288,11953.47,171.289,2048,21.82,6.46,7.51\r\nflexivit_small,240,11838.18,172.959,2048,22.06,5.35,14.18\r\nmobilenetv3_large_150d,256,11831.47,86.517,1024,14.62,1.03,12.35\r\nmobilevit_xs,256,11825.72,64.917,768,2.32,1.05,16.33\r\nmobilenetv4_hybrid_large_075,256,11821.96,173.193,2048,22.75,2.06,11.64\r\ndla46x_c,224,11812.25,173.333,2048,1.07,0.54,5.66\r\nregnetz_005,288,11790.25,173.669,2048,7.12,0.86,9.68\r\nhgnet_tiny,224,11767.54,174.014,2048,14.74,4.54,6.36\r\nresnet26,288,11736.54,174.465,2048,16.0,3.9,12.15\r\neca_botnext26ts_256,256,11712.04,174.825,2048,10.59,2.46,11.6\r\nseresnet34,288,11711.29,174.828,2048,21.96,6.07,6.18\r\nresnet50c,224,11704.86,174.937,2048,25.58,4.35,11.92\r\ncoat_lite_tiny,224,11700.15,175.002,2048,5.72,1.6,11.65\r\ndpn68b,224,11656.94,175.649,2048,12.61,2.35,10.47\r\nvit_small_resnet26d_224,224,11627.39,176.096,2048,63.61,5.07,11.12\r\nvit_small_r26_s32_224,224,11622.98,176.163,2048,36.43,3.56,9.85\r\nresnet32ts,256,11614.14,176.297,2048,17.96,4.63,11.58\r\neca_nfnet_l0,224,11608.33,176.389,2048,24.14,4.35,10.47\r\ndla60,224,11582.38,176.775,2048,22.04,4.26,10.16\r\nresnet50t,224,11524.51,177.671,2048,25.57,4.32,11.82\r\nvit_relpos_small_patch16_rpn_224,224,11512.87,177.852,2048,21.97,4.59,13.05\r\ndla60x_c,224,11509.87,177.894,2048,1.32,0.59,6.01\r\nnfnet_l0,224,11495.38,178.119,2048,35.07,4.36,10.47\r\nxcit_nano_12_p16_384,384,11462.43,178.63,2048,3.05,1.64,12.15\r\nresnet33ts,256,11456.8,178.714,2048,19.68,4.76,11.66\r\ntresnet_m,224,11456.19,178.726,2048,31.39,5.75,7.31\r\nresnet50d,224,11437.76,179.017,2048,25.58,4.35,11.92\r\nxcit_tiny_24_p16_224,224,11390.63,179.752,2048,12.12,2.34,11.82\r\neca_halonext26ts,256,11366.78,180.131,2048,10.76,2.44,11.46\r\nlevit_conv_512,224,11330.05,180.721,2048,95.08,5.62,10.22\r\nefficientnetv2_rw_t,224,11299.55,181.203,2048,13.65,1.93,9.94\r\nresnetv2_50,224,11273.05,181.636,2048,25.55,4.11,11.11\r\necaresnet26t,256,11269.85,181.681,2048,16.01,3.35,10.53\r\ntf_efficientnetv2_b2,260,11258.95,181.856,2048,10.1,1.72,9.84\r\nhgnetv2_b3,288,11246.47,182.054,2048,16.29,2.94,8.38\r\nbat_resnext26ts,256,11124.96,184.051,2048,10.73,2.53,12.51\r\nresnext26ts,288,11045.83,185.367,2048,10.3,3.07,13.31\r\nvit_dwee_patch16_reg1_gap_256,256,11026.66,185.686,2048,13.43,3.83,17.6\r\nwide_resnet50_2,176,10999.0,186.158,2048,68.88,7.29,8.97\r\nese_vovnet19b_dw,288,10881.13,141.121,1536,6.54,2.22,13.63\r\nresnetaa50,224,10852.37,188.665,2048,25.56,5.15,11.64\r\nconvit_tiny,224,10845.71,188.793,2048,5.71,1.26,7.94\r\ncoat_lite_mini,224,10763.72,190.235,2048,11.01,2.0,12.25\r\nefficientvit_b2,224,10714.47,143.323,1536,24.33,1.6,14.62\r\nvit_small_patch16_rope_mixed_224,224,10706.48,191.233,2048,21.99,4.61,12.85\r\nedgenext_small_rw,256,10691.49,191.518,2048,7.83,1.58,9.51\r\nresnetv2_50t,224,10682.88,191.667,2048,25.57,4.32,11.82\r\nvit_small_patch16_rope_mixed_ape_224,224,10679.4,191.728,2048,22.06,4.61,12.85\r\nvgg11,224,10677.69,191.765,2048,132.86,7.61,7.44\r\nresnet26d,288,10658.71,192.094,2048,16.01,4.29,13.48\r\nefficientnet_b0_g16_evos,224,10648.36,192.289,2048,8.11,1.01,7.42\r\ngcresnet33ts,256,10638.37,192.472,2048,19.88,4.76,11.68\r\nvgg11_bn,224,10636.23,192.5,2048,132.87,7.62,7.44\r\nresnet152,160,10633.87,192.545,2048,60.19,5.9,11.51\r\nresnetv2_50d,224,10629.34,192.633,2048,25.57,4.35,11.92\r\nefficientnet_em,240,10590.39,193.343,2048,6.9,3.04,14.34\r\nlevit_conv_512d,224,10562.28,193.851,2048,92.39,5.84,11.3\r\nresnetaa34d,288,10555.61,193.982,2048,21.82,7.33,8.38\r\nfastvit_t12,256,10502.53,146.213,1536,7.51,1.39,9.57\r\nvovnet39a,224,10497.59,195.049,2048,22.6,7.09,6.73\r\nvit_base_patch32_plus_256,256,10491.87,195.155,2048,119.48,7.79,7.76\r\ntf_efficientnet_em,240,10455.7,195.835,2048,6.9,3.04,14.34\r\nnf_ecaresnet50,224,10422.38,196.459,2048,25.56,4.21,11.13\r\nregnetx_016,224,10406.32,196.76,2048,9.19,1.62,7.93\r\nres2net50_48w_2s,224,10395.81,196.962,2048,25.29,4.18,11.72\r\ncoatnext_nano_rw_224,224,10390.17,197.067,2048,14.7,2.47,12.8\r\nefficientnet_b1,240,10387.16,147.834,1536,7.79,0.71,10.88\r\nnf_seresnet50,224,10385.27,197.159,2048,28.09,4.21,11.13\r\ngcresnext26ts,288,10360.17,197.642,2048,10.48,3.07,13.33\r\ngmlp_s16_224,224,10345.72,197.907,2048,19.42,4.42,15.1\r\nmobilevitv2_100,256,10301.35,74.524,768,4.9,1.84,16.08\r\nresnet50_clip_gap,224,10282.33,199.138,2048,23.53,5.39,12.44\r\nvit_dpwee_patch16_reg1_gap_256,256,10278.98,199.199,2048,15.25,4.37,19.05\r\necaresnet101d_pruned,224,10272.29,199.327,2048,24.88,3.48,7.69\r\nresnetaa50d,224,10271.61,199.342,2048,25.58,5.39,12.44\r\nseresnet33ts,256,10266.25,199.438,2048,19.78,4.76,11.66\r\neca_resnet33ts,256,10264.59,199.483,2048,19.68,4.76,11.66\r\nresnetblur50,224,10261.09,199.547,2048,25.56,5.16,12.02\r\nefficientnet_b2_pruned,260,10237.0,200.022,2048,8.31,0.73,9.13\r\nmobilenetv4_hybrid_medium,320,10193.65,150.65,1536,11.07,2.05,14.36\r\ntf_efficientnetv2_b3,240,10176.88,201.197,2048,14.36,1.93,9.95\r\nlevit_512,224,10110.37,202.527,2048,95.08,5.62,10.22\r\nvit_tiny_patch16_384,384,10103.21,202.662,2048,5.79,4.7,25.39\r\ncs3sedarknet_l,256,10049.75,203.748,2048,21.91,4.86,8.56\r\nlegacy_seresnet50,224,10040.17,203.913,2048,28.09,3.88,10.6\r\nseresnext26ts,288,10031.93,204.104,2048,10.39,3.07,13.32\r\neca_resnext26ts,288,10009.9,204.559,2048,10.3,3.07,13.32\r\nese_vovnet39b,224,10005.35,204.644,2048,24.57,7.09,6.74\r\neca_vovnet39b,224,9985.77,205.045,2048,22.6,7.09,6.74\r\nselecsls84,224,9930.36,206.191,2048,50.95,5.9,7.57\r\nresnet50s,224,9913.12,206.552,2048,25.68,5.47,13.52\r\nregnety_016,224,9901.83,206.779,2048,11.2,1.63,8.04\r\nedgenext_small,320,9892.99,206.976,2048,5.59,1.97,14.16\r\nnf_regnet_b3,288,9888.15,207.06,2048,18.59,1.67,11.84\r\nrexnetr_200,224,9885.86,77.655,768,16.52,1.59,15.11\r\ncs3darknet_focus_l,288,9884.97,207.138,2048,21.15,5.9,10.16\r\nskresnet50,224,9792.48,209.09,2048,25.8,4.11,12.5\r\nmambaout_femto,288,9778.33,209.401,2048,7.3,1.91,13.79\r\nresnetblur50d,224,9769.32,209.594,2048,25.58,5.4,12.82\r\nresnext50_32x4d,224,9763.01,209.729,2048,25.03,4.26,14.4\r\nmobilenetv4_conv_medium,384,9747.68,105.024,1024,9.72,2.46,17.05\r\nseresnet50,224,9715.55,210.744,2048,28.09,4.11,11.13\r\nresnet50_gn,224,9694.01,211.211,2048,25.56,4.14,11.11\r\nfastvit_s12,256,9684.9,158.566,1536,9.43,1.8,10.82\r\nrepvgg_b1,224,9662.72,211.9,2048,51.83,11.82,5.32\r\nmixnet_m,224,9662.57,211.914,2048,5.01,0.36,8.19\r\ndla60x,224,9659.85,211.973,2048,17.35,3.54,13.8\r\ncs3darknet_l,288,9626.37,212.71,2048,21.16,6.16,10.83\r\nvit_relpos_base_patch32_plus_rpn_256,256,9594.24,213.414,2048,119.42,7.68,8.01\r\nresnet50_clip,224,9544.32,214.528,2048,38.32,6.14,12.98\r\ninception_v3,299,9538.09,214.675,2048,23.83,5.73,8.97\r\nresnest26d,224,9535.48,214.724,2048,17.07,3.64,9.97\r\ncrossvit_small_240,240,9527.76,214.904,2048,26.86,5.63,18.17\r\ncoatnet_pico_rw_224,224,9525.32,53.729,512,10.85,2.05,14.62\r\ninception_next_tiny,224,9523.07,215.01,2048,28.06,4.19,11.98\r\nfastvit_sa12,256,9467.34,162.21,1536,11.55,1.94,11.24\r\nlevit_512d,224,9466.44,216.299,2048,92.39,5.84,11.3\r\ncspresnet50,256,9457.81,216.497,2048,21.62,4.54,11.5\r\nconvnext_tiny,224,9413.54,217.506,2048,28.59,4.47,13.44\r\nhgnetv2_b4,288,9387.56,218.122,2048,19.8,4.54,11.08\r\ndensenet121,224,9364.37,218.664,2048,7.98,2.87,6.9\r\nxcit_nano_12_p8_224,224,9357.97,218.8,2048,3.05,2.16,15.71\r\nefficientnet_b1,256,9334.87,109.653,1024,7.79,0.77,12.22\r\nvit_pe_core_tiny_patch16_384,384,9328.98,219.494,2048,6.14,4.74,25.62\r\nvit_medium_patch16_clip_224,224,9325.53,219.568,2048,38.59,8.0,15.93\r\nskresnet50d,224,9321.9,219.655,2048,25.82,4.36,13.31\r\nconvnext_tiny_hnf,224,9318.65,219.733,2048,28.59,4.47,13.44\r\necaresnet50t,224,9308.58,219.972,2048,25.57,4.32,11.83\r\neva02_tiny_patch14_336,336,9296.01,220.269,2048,5.76,4.68,27.16\r\nseresnet50t,224,9290.81,220.392,2048,28.1,4.32,11.83\r\nresnext50d_32x4d,224,9256.15,221.215,2048,25.05,4.5,15.2\r\nxcit_small_12_p16_224,224,9245.24,221.481,2048,26.25,4.82,12.58\r\nese_vovnet39b_evos,224,9219.85,222.088,2048,24.58,7.07,6.74\r\necaresnet50d,224,9219.32,222.102,2048,25.58,4.35,11.93\r\nvit_base_resnet26d_224,224,9202.48,222.502,2048,101.4,6.97,13.16\r\nefficientnet_b0_g8_gn,224,9202.4,222.504,2048,6.56,0.66,6.75\r\ntf_efficientnet_b1,240,9185.34,111.447,1024,7.79,0.71,10.88\r\nresnet32ts,288,9183.44,222.968,2048,17.96,5.86,14.65\r\nefficientformer_l3,224,9181.5,223.015,2048,31.41,3.93,12.01\r\nconvnext_nano,288,9176.59,223.135,2048,15.59,4.06,13.84\r\nrexnet_200,224,9152.74,83.882,768,16.37,1.56,14.91\r\nresnetrs50,224,9127.16,224.337,2048,35.69,4.48,12.14\r\ncaformer_s18,224,9115.29,224.64,2048,26.34,4.13,19.39\r\ncspresnet50d,256,9099.9,225.022,2048,21.64,4.86,12.55\r\ntf_mixnet_m,224,9098.04,225.047,2048,5.01,0.36,8.19\r\nfbnetv3_g,240,9084.08,169.043,1536,16.62,1.28,14.87\r\nresnet33ts,288,9068.71,225.79,2048,19.68,6.02,14.75\r\neva02_small_patch14_224,224,9051.07,226.229,2048,21.62,6.14,18.28\r\npoolformerv2_s24,224,9010.05,227.257,2048,21.34,3.42,10.68\r\nmobilevit_s,256,9009.25,85.219,768,5.58,2.03,19.94\r\ncrossvit_15_240,240,8975.93,228.125,2048,27.53,5.81,19.77\r\ncspresnet50w,256,8971.85,228.224,2048,28.12,5.04,12.19\r\nvit_little_patch16_reg1_gap_256,256,8970.39,228.27,2048,22.52,6.27,18.06\r\nvit_little_patch16_reg4_gap_256,256,8930.0,229.293,2048,22.52,6.35,18.33\r\ntwins_pcpvt_small,224,8908.66,229.845,2048,24.11,3.83,18.08\r\nconvformer_s18,224,8879.64,230.605,2048,26.77,3.96,15.82\r\ndeit3_medium_patch16_224,224,8878.32,230.624,2048,38.85,8.0,15.93\r\nefficientnet_b2,256,8820.08,116.066,1024,9.11,0.89,12.81\r\ngcvit_xxtiny,224,8808.8,232.452,2048,12.0,2.14,15.36\r\nhaloregnetz_b,224,8785.05,233.089,2048,11.68,1.97,11.94\r\ntiny_vit_21m_224,224,8777.36,174.956,1536,33.21,4.27,20.08\r\nvovnet57a,224,8748.1,234.062,2048,36.64,8.95,7.52\r\nres2net50_26w_4s,224,8739.82,234.281,2048,25.7,4.28,12.61\r\ndensenetblur121d,224,8720.16,234.821,2048,8.0,3.11,7.9\r\nregnetz_b16,224,8686.96,235.716,2048,9.72,1.45,9.95\r\nresnetv2_50x1_bit,224,8685.25,235.748,2048,25.55,4.23,11.11\r\nresnet26t,320,8679.77,235.905,2048,16.01,5.24,16.44\r\nconvnextv2_tiny,224,8668.1,236.22,2048,28.64,4.47,13.44\r\nresnet152,176,8663.89,236.33,2048,60.19,7.22,13.99\r\nhgnetv2_b5,224,8643.84,236.881,2048,39.57,6.56,11.19\r\ngcresnext50ts,256,8629.99,237.27,2048,15.67,3.75,15.46\r\nresmlp_24_224,224,8612.73,237.749,2048,30.02,5.96,10.91\r\nnf_resnet50,256,8606.14,237.922,2048,25.56,5.46,14.52\r\nregnetx_032,224,8539.38,239.788,2048,15.3,3.2,11.37\r\nswiftformer_l3,224,8533.23,239.954,2048,28.49,4.01,15.77\r\ngcresnet50t,256,8520.41,240.314,2048,25.9,5.42,14.67\r\nefficientvit_l1,224,8474.39,120.801,1024,52.65,5.27,15.85\r\nmambaout_kobe,288,8449.48,242.34,2048,9.14,2.5,16.53\r\nseresnetaa50d,224,8446.53,242.425,2048,28.11,5.4,12.46\r\ngcresnet33ts,288,8435.37,242.748,2048,19.88,6.02,14.78\r\ndla60_res2net,224,8415.65,243.294,2048,20.85,4.15,12.34\r\nres2net50_14w_8s,224,8407.34,243.549,2048,25.06,4.21,13.28\r\nconvnextv2_nano,288,8398.16,243.824,2048,15.62,4.06,13.84\r\nresnetv2_50d_frn,224,8394.24,243.94,2048,25.59,4.33,11.92\r\nsehalonet33ts,256,8389.8,244.06,2048,13.69,3.55,14.7\r\nres2next50,224,8384.43,244.216,2048,24.67,4.2,13.71\r\ncrossvit_15_dagger_240,240,8379.3,244.373,2048,28.21,6.13,20.43\r\ndla60_res2next,224,8361.87,244.878,2048,17.03,3.49,13.17\r\nvit_relpos_medium_patch16_224,224,8353.94,245.113,2048,38.75,7.97,17.02\r\ngmixer_24_224,224,8352.81,245.153,2048,24.72,5.28,14.45\r\nres2net50d,224,8324.61,245.972,2048,25.72,4.52,13.41\r\nvit_relpos_medium_patch16_cls_224,224,8321.83,246.055,2048,38.76,8.03,18.24\r\nvit_base_r26_s32_224,224,8311.12,246.375,2048,101.38,6.81,12.36\r\nregnetz_b16_evos,224,8305.15,246.554,2048,9.74,1.43,9.95\r\nefficientvit_b2,256,8285.4,123.556,1024,24.33,2.09,19.03\r\nvisformer_small,224,8244.61,248.369,2048,40.22,4.88,11.43\r\nresnetv2_50d_gn,224,8221.03,249.056,2048,25.57,4.38,11.92\r\nvit_srelpos_medium_patch16_224,224,8219.8,249.122,2048,38.74,7.96,16.21\r\nedgenext_base,256,8177.74,250.398,2048,18.51,3.85,15.58\r\nresnetv2_50d_evos,224,8171.76,250.575,2048,25.59,4.33,11.92\r\nconvnext_nano_ols,288,8156.42,251.051,2048,15.65,4.38,15.5\r\ndavit_tiny,224,8155.04,94.144,768,28.36,4.54,18.89\r\nseresnet33ts,288,8150.12,251.248,2048,19.78,6.02,14.76\r\npoolformer_s24,224,8135.18,251.705,2048,21.39,3.41,10.68\r\ncoatnet_nano_rw_224,224,8128.04,251.921,2048,15.14,2.41,15.41\r\neca_resnet33ts,288,8117.27,252.262,2048,19.68,6.02,14.76\r\ncsatv2_21m,512,8107.31,252.566,2048,20.7,2.94,15.85\r\nseresnext50_32x4d,224,8100.49,252.769,2048,27.56,4.26,14.42\r\nhgnet_small,224,8089.27,253.132,2048,24.36,8.53,8.79\r\nmaxvit_pico_rw_256,256,8085.45,94.952,768,7.46,1.83,22.3\r\nmaxvit_rmlp_pico_rw_256,256,8040.13,95.474,768,7.52,1.85,24.86\r\nlegacy_seresnext50_32x4d,224,8022.07,255.249,2048,27.56,4.26,14.42\r\ntwins_svt_small,224,7995.39,256.106,2048,24.06,2.94,13.75\r\nnf_regnet_b3,320,7993.16,256.155,2048,18.59,2.05,14.61\r\nmobilenetv4_conv_large,320,7990.86,192.175,1536,32.59,4.47,18.97\r\nresnet51q,256,7990.68,256.26,2048,35.7,6.38,16.55\r\ncoatnet_nano_cc_224,224,7979.82,256.605,2048,13.76,2.24,15.02\r\nresnetrs101,192,7972.38,256.823,2048,63.62,6.04,12.7\r\nsebotnet33ts_256,256,7921.73,258.488,2048,13.7,3.89,17.46\r\ncs3sedarknet_l,288,7920.05,258.543,2048,21.91,6.16,10.83\r\nefficientnet_lite3,300,7911.65,64.682,512,8.2,1.65,21.85\r\nvit_medium_patch16_gap_240,240,7895.18,259.354,2048,44.4,9.22,18.81\r\nlambda_resnet26rpt_256,256,7852.5,260.769,2048,10.99,3.16,11.87\r\ntf_efficientnet_lite3,300,7843.28,65.248,512,8.2,1.65,21.85\r\ndla102,224,7835.72,261.324,2048,33.27,7.19,14.18\r\nfastvit_mci0,256,7835.16,195.994,1536,11.36,2.39,14.72\r\ncoatnet_0_rw_224,224,7812.45,196.561,1536,27.44,4.43,18.73\r\nresnet50_mlp,256,7812.16,262.104,2048,26.65,7.05,16.25\r\nregnetv_040,224,7792.1,262.761,2048,20.64,4.0,12.29\r\nvit_small_patch16_dinov3_qkvb,256,7760.83,263.831,2048,21.6,6.26,17.03\r\nvit_small_patch16_dinov3,256,7753.85,264.075,2048,21.59,6.26,17.03\r\ncspresnext50,256,7747.69,264.296,2048,20.57,4.05,15.86\r\ndarknet53,256,7730.81,264.871,2048,41.61,9.31,12.39\r\nregnety_040,224,7706.73,265.691,2048,20.65,4.0,12.29\r\nefficientnet_b3_pruned,300,7695.26,266.084,2048,9.86,1.04,11.86\r\nmobilevitv2_125,256,7670.15,100.096,768,7.48,2.86,20.1\r\nefficientformerv2_l,224,7659.56,267.331,2048,26.32,2.59,18.54\r\ndensenet169,224,7656.6,267.431,2048,14.15,3.4,7.3\r\ndarknetaa53,256,7645.16,267.831,2048,36.02,7.97,12.39\r\nvit_dlittle_patch16_reg1_gap_256,256,7630.58,268.342,2048,22.52,6.27,22.69\r\nvit_relpos_medium_patch16_rpn_224,224,7615.19,268.893,2048,38.73,7.97,17.02\r\nnextvit_small,224,7599.0,269.457,2048,31.74,5.8,18.44\r\nresnet101,224,7587.28,269.867,2048,44.55,7.83,16.23\r\nseresnext26t_32x4d,288,7537.67,271.651,2048,16.81,4.46,16.68\r\nfocalnet_tiny_srf,224,7529.61,271.942,2048,28.43,4.42,16.32\r\necaresnet50d_pruned,288,7523.35,272.179,2048,19.94,4.19,10.61\r\nmobilenetv3_large_150d,320,7517.75,102.125,768,14.62,1.61,19.29\r\nefficientnet_cc_b0_4e,224,7512.83,272.564,2048,13.31,0.41,9.42\r\nefficientnet_cc_b0_8e,224,7508.95,272.705,2048,24.01,0.42,9.42\r\nseresnext26d_32x4d,288,7470.67,274.089,2048,16.81,4.51,16.85\r\nresnest50d_1s4x24d,224,7399.74,276.712,2048,25.68,4.43,13.57\r\nresnet101c,224,7398.92,276.748,2048,44.57,8.08,17.04\r\ncs3darknet_focus_x,256,7393.7,276.948,2048,35.02,8.03,10.69\r\nmambaout_tiny,224,7375.19,277.641,2048,26.55,4.49,16.68\r\ngc_efficientnetv2_rw_t,288,7357.13,278.329,2048,13.68,3.2,16.45\r\nresnet50,288,7345.33,278.757,2048,25.56,6.8,18.37\r\nresnet61q,256,7324.97,279.546,2048,36.85,7.8,17.01\r\nefficientnet_b1,288,7321.76,139.804,1024,7.79,0.97,15.46\r\ncoatnet_rmlp_nano_rw_224,224,7317.35,69.933,512,15.15,2.62,20.34\r\nxcit_tiny_12_p16_384,384,7297.74,280.581,2048,6.72,3.64,18.26\r\nresnet101d,224,7289.27,280.908,2048,44.57,8.08,17.04\r\nhrnet_w18_ssld,224,7282.28,281.17,2048,21.3,4.32,16.31\r\nhrnet_w18,224,7273.9,281.507,2048,21.3,4.32,16.31\r\necaresnetlight,288,7242.01,282.743,2048,30.16,6.79,13.91\r\necaresnet26t,320,7225.35,283.401,2048,16.01,5.24,16.44\r\npvt_v2_b2,224,7196.78,142.246,1024,25.36,4.05,27.53\r\nhgnet_tiny,288,7184.64,213.754,1536,14.74,7.51,10.51\r\nvit_base_resnet50d_224,224,7181.93,285.11,2048,110.97,8.73,16.92\r\ntf_efficientnet_b2,260,7164.22,142.895,1024,9.11,1.02,13.83\r\ncoatnet_bn_0_rw_224,224,7125.84,71.816,512,27.44,4.67,22.04\r\nswin_tiny_patch4_window7_224,224,7115.53,287.777,2048,28.29,4.51,17.06\r\nvgg13,224,7115.38,287.784,2048,133.05,11.31,12.25\r\necaresnet50t,256,7102.91,288.276,2048,25.57,5.64,15.45\r\nmixnet_l,224,7102.76,288.296,2048,7.33,0.58,10.84\r\nnfnet_f0,192,7077.82,289.307,2048,71.49,7.21,10.16\r\nfocalnet_tiny_lrf,224,7073.29,289.484,2048,28.65,4.49,17.76\r\nvgg13_bn,224,7058.78,290.077,2048,133.05,11.33,12.25\r\nnf_resnet101,224,7054.22,290.277,2048,44.55,8.01,16.23\r\nskresnext50_32x4d,224,7029.29,291.311,2048,27.48,4.5,17.18\r\ndpn68b,288,7001.02,292.486,2048,12.61,3.89,17.3\r\neca_nfnet_l0,288,6996.59,292.671,2048,24.14,7.12,17.29\r\nvit_medium_patch16_gap_256,256,6990.91,292.907,2048,38.86,10.59,22.15\r\ntf_efficientnet_cc_b0_4e,224,6984.65,293.177,2048,13.31,0.41,9.42\r\ntf_efficientnet_cc_b0_8e,224,6967.28,293.911,2048,24.01,0.42,9.42\r\nnfnet_l0,288,6956.67,294.34,2048,35.07,7.13,17.29\r\nefficientnet_b2,288,6942.01,147.475,1024,9.11,1.12,16.2\r\ndm_nfnet_f0,192,6928.68,295.53,2048,71.49,7.21,10.16\r\nresnet50t,288,6923.01,295.779,2048,25.57,7.14,19.53\r\nwide_resnet50_2,224,6917.1,296.029,2048,68.88,11.43,14.4\r\nrepvit_m2_3,224,6909.57,296.359,2048,22.93,4.52,21.32\r\nresnet50d,288,6891.42,297.134,2048,25.58,7.19,19.7\r\nrdnet_tiny,224,6873.18,297.922,2048,23.86,5.06,15.98\r\ncspdarknet53,256,6862.37,298.39,2048,27.64,6.57,16.81\r\nresnetv2_101,224,6839.35,299.393,2048,44.54,7.83,16.23\r\nmobilenetv4_hybrid_medium,384,6817.77,150.149,1024,11.07,3.01,21.18\r\nresnetv2_50,288,6816.77,300.384,2048,25.55,6.79,18.37\r\nefficientnetv2_rw_t,288,6806.71,300.829,2048,13.65,3.19,16.42\r\ngcresnext50ts,288,6805.94,300.864,2048,15.67,4.75,19.57\r\nresnet101_clip_gap,224,6805.43,300.893,2048,42.52,9.11,17.56\r\nresnetaa101d,224,6797.59,301.233,2048,44.57,9.12,17.56\r\nnf_resnet50,288,6781.23,301.961,2048,25.56,6.88,18.37\r\ngcvit_xtiny,224,6774.93,302.247,2048,19.98,2.93,20.26\r\nrepvgg_b2g4,224,6760.31,302.894,2048,55.78,11.33,6.45\r\nswinv2_cr_tiny_224,224,6754.03,303.186,2048,28.33,4.66,28.45\r\nswinv2_cr_tiny_ns_224,224,6752.18,303.261,2048,28.33,4.66,28.45\r\nefficientvit_l2,224,6739.9,151.877,1024,63.71,6.97,19.58\r\nhalonet50ts,256,6729.57,304.282,2048,22.73,5.3,19.2\r\nregnetx_040,224,6724.38,304.513,2048,22.12,3.99,12.2\r\ngcresnet50t,288,6720.75,304.681,2048,25.9,6.86,18.57\r\nedgenext_small_rw,320,6719.98,304.716,2048,7.83,2.46,14.85\r\ntf_mixnet_l,224,6698.15,305.714,2048,7.33,0.58,10.84\r\nvitamin_small_224,224,6697.95,76.419,512,22.17,5.92,26.38\r\nresnest50d,224,6684.2,306.347,2048,27.48,5.4,14.36\r\nhiera_tiny_224,224,6682.61,306.418,2048,27.91,4.91,17.13\r\ncs3darknet_x,256,6675.01,306.762,2048,35.05,8.38,11.35\r\nresnetv2_34d,384,6666.04,307.179,2048,21.82,11.49,13.35\r\nresnet101s,224,6620.56,309.295,2048,44.67,9.19,18.64\r\nvit_medium_patch16_reg1_gap_256,256,6609.0,309.826,2048,38.88,10.63,22.26\r\nresnetv2_101d,224,6601.29,310.199,2048,44.56,8.07,17.04\r\nresnetblur101d,224,6577.1,311.331,2048,44.57,9.12,17.94\r\nvit_medium_patch16_reg4_gap_256,256,6551.46,312.561,2048,38.88,10.76,22.6\r\nvit_small_plus_patch16_dinov3,256,6535.01,313.338,2048,28.68,8.11,21.84\r\nresnetaa50,288,6532.95,313.432,2048,25.56,8.52,19.24\r\nnf_ecaresnet101,224,6532.26,313.473,2048,44.55,8.01,16.27\r\nvit_small_plus_patch16_dinov3_qkvb,256,6529.77,313.587,2048,28.69,8.11,21.84\r\nnf_seresnet101,224,6514.95,314.299,2048,49.33,8.02,16.27\r\ndla102x,224,6514.88,314.32,2048,26.31,5.89,19.42\r\nrexnetr_300,224,6510.81,78.616,512,34.81,3.39,22.16\r\nefficientvit_b2,288,6503.55,157.414,1024,24.33,2.64,24.03\r\nmobilevitv2_150,256,6483.94,78.936,512,10.59,4.09,24.11\r\nresnet101_clip,224,6476.23,316.186,2048,56.26,9.81,18.08\r\nfbnetv3_g,288,6454.41,158.614,1024,16.62,1.77,21.09\r\nregnety_032,224,6405.82,319.651,2048,19.44,3.2,11.26\r\nres2net50_26w_6s,224,6399.74,319.963,2048,37.05,6.33,15.28\r\ntf_efficientnetv2_b3,300,6396.38,320.125,2048,14.36,3.04,15.74\r\nregnety_040_sgn,224,6364.16,321.746,2048,20.65,4.03,12.29\r\nese_vovnet57b,256,6355.25,322.204,2048,38.61,11.69,9.82\r\nresnet51q,288,6346.1,322.676,2048,35.7,8.07,20.94\r\nmvitv2_tiny,224,6318.15,324.081,2048,24.17,4.7,21.16\r\nfasternet_m,224,6315.67,324.222,2048,53.52,8.74,15.34\r\nnf_regnet_b4,320,6315.44,324.237,2048,30.21,3.29,19.88\r\nwide_resnet101_2,176,6304.23,324.798,2048,126.89,14.31,13.18\r\nbotnet50ts_256,256,6291.72,325.463,2048,22.74,5.54,22.23\r\ntresnet_v2_l,224,6241.49,328.065,2048,46.17,8.85,16.34\r\nresnetblur50,288,6205.8,247.467,1536,25.56,8.52,19.87\r\nresnetaa50d,288,6189.32,330.844,2048,25.58,8.92,20.57\r\nlegacy_seresnet101,224,6139.84,333.492,2048,49.33,7.61,15.74\r\nvit_medium_patch16_rope_reg1_gap_256,256,6137.31,333.638,2048,38.74,10.63,22.26\r\nfastvit_sa24,256,6109.3,251.373,1536,21.5,3.77,20.35\r\ntwins_pcpvt_base,224,6104.77,335.42,2048,43.83,6.68,25.25\r\ncoatnet_rmlp_0_rw_224,224,6104.55,83.842,512,27.45,4.72,24.89\r\nmaxxvit_rmlp_nano_rw_256,256,6096.55,251.899,1536,16.78,4.37,26.05\r\ncs3sedarknet_xdw,256,6083.53,336.6,2048,21.6,5.97,17.18\r\ncrossvit_18_240,240,6082.86,336.624,2048,43.27,9.05,26.26\r\necaresnet101d_pruned,288,6079.97,336.787,2048,24.88,5.75,12.71\r\nswin_s3_tiny_224,224,6074.72,337.087,2048,28.33,4.64,19.13\r\ndarknet53,288,6069.4,337.388,2048,41.61,11.78,15.68\r\nseresnet101,224,6053.31,338.282,2048,49.33,7.84,16.27\r\npvt_v2_b2_li,224,6050.17,169.22,1024,22.55,3.91,27.6\r\npoolformerv2_s36,224,6047.58,338.602,2048,30.79,5.01,15.82\r\nresnext101_32x4d,224,6044.04,338.797,2048,44.18,8.01,21.23\r\ndarknetaa53,288,6030.34,339.562,2048,36.02,10.08,15.68\r\nrepvgg_b2,224,6025.28,339.846,2048,80.32,18.38,6.45\r\nrexnetr_200,288,6018.56,85.036,512,16.52,2.62,24.96\r\nese_vovnet39b,288,6015.83,340.391,2048,24.57,11.71,11.13\r\nvit_base_patch32_clip_384,384,5955.83,343.812,2048,88.3,13.06,16.5\r\nresnext101_32x8d,176,5943.66,344.524,2048,88.79,10.33,19.37\r\nhieradet_small,256,5931.41,129.444,768,34.73,8.51,27.76\r\nresnetblur50d,288,5898.15,260.372,1536,25.58,8.92,21.19\r\ncoat_lite_small,224,5894.18,347.405,2048,19.84,3.96,22.09\r\necaresnet101d,224,5878.72,348.312,2048,44.57,8.08,17.07\r\nregnetx_080,224,5870.34,348.826,2048,39.57,8.02,14.06\r\nhalo2botnet50ts_256,256,5869.99,348.843,2048,22.64,5.02,21.78\r\nresnext50_32x4d,288,5865.11,349.124,2048,25.03,7.04,23.81\r\nseresnet50,288,5840.94,350.576,2048,28.09,6.8,18.39\r\nnextvit_base,224,5829.67,351.254,2048,44.79,8.29,23.71\r\nhrnet_w32,224,5818.87,351.896,2048,41.23,8.97,22.02\r\nresmlp_36_224,224,5811.89,352.335,2048,44.69,8.91,16.33\r\nresnet61q,288,5794.35,353.392,2048,36.85,9.87,21.52\r\nvit_base_patch32_384,384,5780.57,354.244,2048,88.3,13.06,16.5\r\ndensenet201,224,5765.54,355.17,2048,20.01,4.34,7.85\r\ncrossvit_18_dagger_240,240,5754.68,355.84,2048,44.27,9.5,27.03\r\ncs3sedarknet_x,256,5725.85,357.617,2048,35.4,8.38,11.35\r\nxcit_tiny_12_p8_224,224,5724.15,357.725,2048,6.71,4.81,23.6\r\nvgg16,224,5719.04,358.037,2048,138.36,15.47,13.56\r\nvit_large_patch32_224,224,5712.68,358.45,2048,305.51,15.39,13.3\r\nmaxvit_nano_rw_256,256,5710.24,67.211,384,15.45,4.46,30.28\r\nrexnet_300,224,5707.61,89.675,512,34.71,3.44,22.4\r\nefficientvit_b3,224,5702.33,179.535,1024,48.65,3.99,26.9\r\nvgg16_bn,224,5691.52,359.79,2048,138.37,15.5,13.56\r\nmaxvit_rmlp_nano_rw_256,256,5688.29,67.469,384,15.5,4.47,31.92\r\nconvnext_tiny,288,5685.0,360.195,2048,28.59,7.39,22.21\r\npit_b_224,224,5629.46,363.746,2048,73.76,12.42,32.94\r\nmobilenetv4_conv_large,384,5615.41,182.315,1024,32.59,6.43,27.31\r\npit_b_distilled_224,224,5605.37,365.311,2048,74.79,12.5,33.07\r\nconvnext_tiny_hnf,288,5603.84,365.408,2048,28.59,7.39,22.21\r\necaresnet50t,288,5601.3,365.586,2048,25.57,7.14,19.55\r\ndensenet121,288,5599.96,365.669,2048,7.98,4.74,11.41\r\nrepvgg_b3g4,224,5589.5,366.348,2048,75.63,16.06,7.55\r\nseresnet50t,288,5572.94,367.428,2048,28.1,7.14,19.55\r\nresnext50d_32x4d,288,5570.95,367.568,2048,25.05,7.44,25.13\r\necaresnet50d,288,5570.91,367.581,2048,25.58,7.19,19.72\r\nresnet101d,256,5568.01,367.756,2048,44.57,10.55,22.25\r\nresnet50_gn,288,5566.95,367.842,2048,25.56,6.85,18.37\r\ninception_next_small,224,5565.84,367.909,2048,49.37,8.36,19.27\r\nconvnext_small,224,5544.93,369.295,2048,50.22,8.71,21.56\r\ncoatnet_0_224,224,5511.48,69.622,384,25.04,4.58,24.01\r\nres2net101_26w_4s,224,5484.9,373.332,2048,45.21,8.1,18.45\r\nregnetz_c16,256,5481.69,373.558,2048,13.46,2.51,16.57\r\nefficientnetv2_s,288,5468.91,374.423,2048,21.46,4.75,20.13\r\npoolformer_s36,224,5432.9,376.909,2048,30.86,5.0,15.82\r\nhiera_small_224,224,5394.62,379.581,2048,35.01,6.42,20.75\r\nlambda_resnet26t,256,5382.49,380.453,2048,10.96,3.02,11.87\r\ncs3edgenet_x,256,5378.27,380.744,2048,47.82,11.53,12.92\r\nswinv2_tiny_window8_256,256,5373.99,381.049,2048,28.35,5.96,24.57\r\nresnet152,224,5357.05,382.243,2048,60.19,11.56,22.56\r\nresnetv2_101x1_bit,224,5342.83,383.258,2048,44.54,8.04,16.23\r\nregnetz_c16_evos,256,5333.28,383.961,2048,13.49,2.48,16.57\r\nres2net101d,224,5324.41,384.596,2048,45.23,8.35,19.25\r\ndla169,224,5301.89,386.226,2048,53.39,11.6,20.2\r\nresnet152c,224,5257.3,389.495,2048,60.21,11.8,23.36\r\ndensenetblur121d,288,5252.6,292.376,1536,8.0,5.14,13.06\r\nconvnextv2_tiny,288,5246.66,390.292,2048,28.64,7.39,22.21\r\ncs3darknet_x,288,5245.26,390.39,2048,35.05,10.6,14.36\r\nmixer_b16_224,224,5241.51,390.673,2048,59.88,12.62,14.53\r\nregnetz_b16,288,5228.12,391.678,2048,9.72,2.39,16.43\r\nmobilevitv2_175,256,5203.31,98.367,512,14.25,5.54,28.13\r\nresnet152d,224,5201.78,393.653,2048,60.21,11.8,23.36\r\nefficientvit_l2,256,5193.55,147.844,768,63.71,9.09,25.49\r\nedgenext_base,320,5185.15,394.919,2048,18.51,6.01,24.32\r\nhgnetv2_b5,288,5185.01,394.935,2048,39.57,10.84,18.5\r\nvolo_d1_224,224,5182.31,395.143,2048,26.63,6.94,24.43\r\nefficientnet_b3,288,5177.07,98.86,512,12.23,1.63,21.49\r\ntnt_s_legacy_patch16_224,224,5172.93,395.853,2048,23.76,5.24,24.37\r\nefficientnetv2_rw_s,288,5166.36,396.356,2048,23.94,4.91,21.41\r\nregnetx_064,224,5159.48,396.885,2048,26.21,6.49,16.37\r\ncait_xxs24_224,224,5144.59,398.036,2048,11.96,2.53,20.29\r\nres2net50_26w_8s,224,5142.51,398.195,2048,48.4,8.37,17.95\r\nconvnextv2_small,224,5136.54,398.664,2048,50.32,8.71,21.56\r\neca_nfnet_l1,256,5134.3,398.836,2048,41.41,9.62,22.04\r\nconvnext_nano,384,5130.63,399.123,2048,15.59,7.22,24.61\r\ncsatv2_21m,640,5117.26,400.168,2048,20.7,4.72,26.68\r\ninception_v4,299,5091.74,402.171,2048,42.68,12.28,15.09\r\nvit_small_resnet50d_s16_224,224,5090.96,402.229,2048,57.53,13.48,24.82\r\ntnt_s_patch16_224,224,5088.36,402.441,2048,23.77,5.25,24.37\r\nseresnetaa50d,288,5085.74,402.644,2048,28.11,8.92,20.59\r\nseresnext101_32x4d,224,5071.22,403.788,2048,48.96,8.02,21.26\r\nlegacy_seresnext101_32x4d,224,5030.87,407.021,2048,48.96,8.02,21.26\r\nregnetz_b16_evos,288,5023.35,407.647,2048,9.74,2.36,16.43\r\npvt_v2_b3,224,4993.57,205.02,1024,45.24,6.92,37.7\r\nmixer_l32_224,224,4992.05,410.204,2048,206.94,11.27,19.86\r\nmaxvit_tiny_rw_224,224,4975.75,102.863,512,29.06,5.11,33.11\r\nefficientnet_cc_b1_8e,240,4971.29,411.927,2048,39.72,0.75,15.44\r\nxcit_small_24_p16_224,224,4954.57,413.298,2048,47.67,9.1,23.64\r\nconvit_small,224,4947.35,413.902,2048,27.78,5.76,17.87\r\nresnetv2_50d_gn,288,4940.33,414.5,2048,25.57,7.24,19.7\r\nhgnet_small,288,4906.42,313.02,1536,24.36,14.09,14.53\r\nvit_base_patch16_clip_quickgelu_224,224,4898.12,418.069,2048,86.19,17.58,23.9\r\nresnetv2_50d_evos,288,4897.55,418.116,2048,25.59,7.15,19.7\r\nresnetv2_34d,448,4891.57,418.628,2048,21.82,15.64,18.16\r\nvit_base_patch16_clip_224,224,4887.98,418.94,2048,86.57,17.58,23.9\r\ntresnet_l,224,4874.76,420.074,2048,55.99,10.9,11.9\r\ndavit_small,224,4868.33,157.714,768,49.75,8.8,30.49\r\nseresnext50_32x4d,288,4859.74,421.374,2048,27.56,7.04,23.82\r\nresnet152s,224,4843.71,422.762,2048,60.32,12.92,24.96\r\nvit_base_patch16_xp_224,224,4842.19,422.903,2048,86.51,17.56,23.9\r\nefficientnet_x_b3,288,4830.74,423.894,2048,13.3,3.91,15.6\r\nfastvit_mci1,256,4818.38,318.731,1536,21.46,4.67,27.3\r\ncoatnet_rmlp_1_rw_224,224,4805.01,319.611,1536,41.69,7.85,35.47\r\ndeit3_base_patch16_224,224,4797.69,426.82,2048,86.59,17.58,23.9\r\nmobilenetv4_hybrid_medium,448,4788.38,160.354,768,11.07,4.2,29.64\r\nvgg19,224,4787.5,427.725,2048,143.67,19.63,14.86\r\nese_vovnet99b,224,4782.8,428.148,2048,63.2,16.51,11.27\r\nregnetv_040,288,4781.99,428.225,2048,20.64,6.6,20.3\r\nvit_betwixt_patch16_gap_256,256,4776.07,428.746,2048,60.37,16.25,27.69\r\ncaformer_s36,224,4773.6,428.971,2048,39.3,8.0,37.53\r\nregnetv_064,224,4767.11,429.56,2048,30.58,6.39,16.41\r\nvgg19_bn,224,4764.77,429.768,2048,143.68,19.66,14.86\r\nvit_base_patch16_224_miil,224,4763.91,429.841,2048,94.4,17.59,23.91\r\ndeit_base_patch16_224,224,4758.55,430.33,2048,86.57,17.58,23.9\r\nvit_pe_spatial_tiny_patch16_512,512,4756.83,430.484,2048,5.68,10.46,61.64\r\nresnetv2_152,224,4753.66,430.779,2048,60.19,11.55,22.56\r\ndeit_base_distilled_patch16_224,224,4749.1,431.176,2048,87.34,17.68,24.05\r\nvit_base_patch16_siglip_gap_224,224,4745.95,431.469,2048,85.8,17.49,23.75\r\nvit_base_patch16_224,224,4741.58,431.866,2048,86.57,17.58,23.9\r\nnextvit_large,224,4735.17,432.46,2048,57.83,10.77,28.99\r\nvit_base_patch16_siglip_224,224,4733.64,432.588,2048,92.88,17.73,24.06\r\nlegacy_xception,299,4724.81,162.5,768,22.86,8.4,35.83\r\nmixnet_xl,224,4710.88,434.678,2048,11.9,0.93,14.57\r\ntf_efficientnet_cc_b1_8e,240,4699.55,435.749,2048,39.72,0.75,15.44\r\nregnety_040,288,4688.71,436.746,2048,20.65,6.61,20.3\r\nmaxxvitv2_nano_rw_256,256,4686.15,218.474,1024,23.7,6.26,23.05\r\nvit_small_patch16_36x1_224,224,4682.27,437.334,2048,64.67,13.71,35.69\r\nsequencer2d_s,224,4675.08,438.023,2048,27.65,4.96,11.31\r\nmobilevitv2_200,256,4667.75,82.235,384,18.45,7.22,32.15\r\nresnetv2_152d,224,4664.84,438.975,2048,60.2,11.8,23.36\r\nvit_small_patch16_18x2_224,224,4662.07,439.239,2048,64.67,13.71,35.69\r\nconvnextv2_nano,384,4655.77,439.827,2048,15.62,7.22,24.61\r\nmambaout_small,224,4654.19,439.986,2048,48.49,8.96,27.72\r\nvit_base_mci_224,224,4652.53,440.146,2048,86.35,17.73,24.65\r\nconvformer_s36,224,4647.66,440.592,2048,40.01,7.67,30.5\r\ntf_efficientnetv2_s,300,4645.97,440.755,2048,21.46,5.35,22.73\r\nmvitv2_small,224,4631.93,442.097,2048,34.87,7.0,28.08\r\nmobilenetv4_conv_aa_large,384,4627.83,221.222,1024,32.59,7.07,32.29\r\nregnety_064,224,4626.77,442.593,2048,30.58,6.39,16.41\r\nvit_base_patch16_gap_224,224,4604.05,444.77,2048,86.57,17.49,25.59\r\nrdnet_small,224,4570.15,448.07,2048,50.44,8.74,22.55\r\nvit_betwixt_patch16_reg1_gap_256,256,4569.76,448.108,2048,60.4,16.32,27.83\r\nrepvgg_b3,224,4562.87,448.779,2048,110.96,26.21,7.55\r\nresnet101,288,4547.58,450.297,2048,44.55,12.95,26.83\r\ncoatnet_1_rw_224,224,4545.89,112.591,512,41.72,8.04,34.6\r\nvit_pe_core_base_patch16_224,224,4534.2,451.615,2048,93.67,17.82,24.21\r\necaresnet50t,320,4528.76,452.153,2048,25.57,8.82,24.13\r\nmambaout_small_rw,224,4526.91,452.35,2048,48.5,8.96,27.72\r\ncs3sedarknet_x,288,4520.18,453.027,2048,35.4,10.6,14.37\r\nvit_betwixt_patch16_reg4_gap_256,256,4514.19,453.626,2048,60.4,16.52,28.24\r\nfastvit_sa36,256,4501.81,341.15,1536,31.46,5.59,29.46\r\ndensenet161,224,4492.51,455.802,2048,28.68,7.79,11.06\r\nvit_relpos_base_patch16_224,224,4492.03,455.871,2048,86.43,17.51,24.97\r\nhrnet_w30,224,4484.75,456.614,2048,37.71,8.15,21.21\r\nfasternet_l,224,4478.31,457.267,2048,93.47,15.52,20.46\r\ncs3se_edgenet_x,256,4475.02,457.603,2048,50.72,11.53,12.94\r\nxception41p,299,4474.75,114.388,512,26.91,9.25,39.86\r\nvit_relpos_base_patch16_clsgap_224,224,4473.99,457.694,2048,86.43,17.6,25.12\r\nvit_relpos_base_patch16_cls_224,224,4471.12,458.001,2048,86.43,17.6,25.12\r\nvit_base_patch16_rope_ape_224,224,4470.17,458.082,2048,86.59,17.58,23.9\r\nvit_base_patch16_rope_224,224,4464.45,458.686,2048,86.43,17.58,23.9\r\nefficientnet_el,300,4451.43,460.031,2048,10.59,8.0,30.7\r\nefficientnet_el_pruned,300,4446.13,460.58,2048,10.59,8.0,30.7\r\nfocalnet_small_srf,224,4443.42,460.859,2048,49.89,8.62,26.26\r\nbeit_base_patch16_224,224,4435.45,461.684,2048,86.53,17.58,23.9\r\nvit_base_patch32_clip_448,448,4434.51,461.78,2048,88.34,17.93,23.9\r\nbeitv2_base_patch16_224,224,4424.17,462.863,2048,86.53,17.58,23.9\r\ntf_efficientnet_el,300,4416.06,463.71,2048,10.59,8.0,30.7\r\nefficientvit_b3,256,4414.64,173.927,768,48.65,5.2,35.01\r\neva02_base_patch16_clip_224,224,4364.74,469.17,2048,86.26,17.62,26.32\r\nswin_small_patch4_window7_224,224,4354.49,470.257,2048,49.61,8.77,27.47\r\ngcvit_tiny,224,4347.9,470.967,2048,28.22,4.79,29.82\r\nvit_base_patch16_rpn_224,224,4341.77,471.633,2048,86.54,17.49,23.75\r\ntwins_pcpvt_large,224,4340.11,471.834,2048,60.99,9.84,35.82\r\nbeit3_base_patch16_224,224,4325.71,473.396,2048,86.66,17.63,23.9\r\nhrnet_w18_ssld,288,4315.55,474.505,2048,21.3,7.14,26.96\r\ndla102x2,224,4306.17,475.545,2048,41.28,9.34,29.91\r\ncs3edgenet_x,288,4289.28,477.418,2048,47.82,14.59,16.36\r\nregnety_080,224,4276.03,478.888,2048,39.18,8.0,17.97\r\nlegacy_seresnet152,224,4267.42,479.856,2048,66.82,11.33,22.08\r\nvit_small_patch16_384,384,4264.93,480.143,2048,22.2,15.52,50.78\r\ndeit3_small_patch16_384,384,4255.32,481.224,2048,22.21,15.52,50.78\r\nmaxxvit_rmlp_tiny_rw_256,256,4240.83,362.139,1536,29.64,6.66,39.76\r\nregnetz_040,256,4239.89,482.974,2048,27.12,4.06,24.19\r\nseresnet152,224,4229.55,484.151,2048,66.82,11.57,22.61\r\nfocalnet_small_lrf,224,4223.93,484.809,2048,50.34,8.74,28.61\r\nnf_regnet_b4,384,4223.57,484.847,2048,30.21,4.7,28.61\r\nefficientformer_l7,224,4220.77,485.151,2048,82.23,10.17,24.45\r\nefficientnet_b3_gn,288,4220.48,121.278,512,11.73,1.74,23.35\r\nregnetz_040_h,256,4218.28,485.46,2048,28.94,4.12,24.29\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,4213.76,485.965,2048,60.23,16.52,28.24\r\npoolformerv2_m36,224,4182.7,489.571,2048,56.08,8.81,22.02\r\nefficientnet_b3,320,4168.37,122.799,512,12.23,2.01,26.52\r\nflexivit_base,240,4161.91,492.021,2048,86.59,20.29,28.36\r\nwide_resnet50_2,288,4159.62,492.288,2048,68.88,18.89,23.81\r\nresnest50d_4s2x40d,224,4152.35,493.157,2048,30.42,4.4,17.94\r\nresnetv2_101,288,4124.48,496.497,2048,44.54,12.94,26.83\r\nmobilenetv4_conv_large,448,4108.12,186.905,768,32.59,8.75,37.17\r\nresnetaa101d,288,4093.82,500.218,2048,44.57,15.07,29.03\r\ntf_efficientnet_b3,300,4090.14,125.145,512,12.23,1.87,23.83\r\ngmlp_b16_224,224,4082.27,501.635,2048,73.08,15.78,30.21\r\nvit_relpos_base_patch16_rpn_224,224,4080.86,501.798,2048,86.41,17.51,24.97\r\nswinv2_cr_small_224,224,4075.02,502.522,2048,49.7,9.07,50.27\r\nswinv2_cr_small_ns_224,224,4073.57,502.686,2048,49.7,9.08,50.27\r\nregnetz_d8,256,4069.81,503.168,2048,23.37,3.97,23.74\r\nefficientvit_l2,288,4063.66,188.956,768,63.71,11.51,32.19\r\nmambaout_tiny,288,4061.68,504.16,2048,26.55,7.41,27.58\r\nnfnet_f0,256,4055.54,504.932,2048,71.49,12.62,18.05\r\nnest_tiny,224,4036.06,507.366,2048,17.06,5.83,25.48\r\nese_vovnet57b,320,4032.15,507.862,2048,38.61,18.26,15.34\r\nregnetz_d8_evos,256,4029.19,508.233,2048,23.46,4.5,24.92\r\nvit_mediumd_patch16_reg4_gap_256,256,4008.66,510.828,2048,64.11,17.87,37.57\r\nvit_base_patch16_rope_mixed_ape_224,224,4008.05,510.912,2048,86.59,17.58,25.7\r\nrexnetr_300,288,4004.27,63.903,256,34.81,5.59,36.61\r\ninception_resnet_v2,299,4003.39,511.51,2048,55.84,13.18,25.06\r\nnest_tiny_jx,224,4000.07,511.93,2048,17.06,5.83,25.48\r\nvit_base_patch16_rope_mixed_224,224,3999.96,511.95,2048,86.44,17.58,25.7\r\nxcit_tiny_24_p16_384,384,3979.64,514.572,2048,12.12,6.87,34.29\r\nresnet152d,256,3973.03,515.427,2048,60.21,15.41,30.51\r\nregnety_080_tv,224,3969.07,515.922,2048,39.38,8.51,19.73\r\nvit_pe_core_small_patch16_384,384,3967.96,516.087,2048,23.78,15.69,51.23\r\nresnetblur101d,288,3958.67,387.957,1536,44.57,15.07,29.65\r\nhiera_small_abswin_256,256,3958.09,517.357,2048,34.36,8.29,26.38\r\ndm_nfnet_f0,256,3952.02,518.15,2048,71.49,12.62,18.05\r\nlevit_conv_384_s8,224,3939.45,519.818,2048,39.06,9.95,35.86\r\nwide_resnet101_2,224,3938.48,519.949,2048,126.89,22.8,21.23\r\neva02_small_patch14_336,336,3928.28,521.293,2048,22.13,15.48,54.33\r\nmaxvit_tiny_tf_224,224,3920.1,130.564,512,30.92,5.6,35.78\r\ncoatnet_rmlp_1_rw2_224,224,3916.18,130.707,512,41.72,8.11,40.13\r\ndpn98,224,3904.02,524.545,2048,61.57,11.73,25.2\r\nresnet200,224,3884.82,527.116,2048,64.67,15.07,32.19\r\nregnety_040_sgn,288,3873.73,528.628,2048,20.65,6.67,20.3\r\nmaxvit_tiny_rw_256,256,3862.04,99.392,384,29.07,6.74,44.35\r\nlevit_384_s8,224,3850.54,531.818,2048,39.06,9.95,35.86\r\nmaxvit_rmlp_tiny_rw_256,256,3834.18,100.116,384,29.15,6.77,46.92\r\nmobilenetv4_hybrid_large,384,3818.33,268.137,1024,37.76,7.77,34.52\r\nconvnext_base,224,3777.15,542.143,2048,88.59,15.38,28.75\r\nlamhalobotnet50ts_256,256,3766.47,543.704,2048,22.57,5.02,18.44\r\nefficientnet_lite4,380,3756.78,102.181,384,13.01,4.04,45.66\r\nmvitv2_small_cls,224,3751.96,545.786,2048,34.87,7.04,28.17\r\ntf_efficientnet_lite4,380,3737.11,102.717,384,13.01,4.04,45.66\r\nresnext101_64x4d,224,3734.4,548.361,2048,83.46,15.52,31.21\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,3731.18,548.818,2048,63.95,17.65,37.02\r\nhgnetv2_b6,224,3725.95,549.6,2048,75.26,16.88,21.23\r\ninception_next_base,224,3706.7,552.456,2048,86.67,14.85,25.69\r\nregnetx_120,224,3705.62,552.613,2048,46.11,12.13,21.37\r\nregnety_032,288,3705.51,552.631,2048,19.44,5.29,18.61\r\nvit_large_r50_s32_224,224,3700.29,553.399,2048,328.99,19.58,24.41\r\nvit_base_patch16_siglip_gap_256,256,3679.97,556.475,2048,85.84,23.13,33.23\r\nhiera_base_224,224,3654.4,560.364,2048,51.52,9.4,30.42\r\nresnext101_32x4d,288,3653.35,560.516,2048,44.18,13.24,35.09\r\nvit_base_patch16_siglip_256,256,3651.81,560.768,2048,92.93,23.44,33.63\r\nxception41,299,3643.9,140.466,512,26.97,9.28,39.86\r\nsequencer2d_m,224,3643.56,562.026,2048,38.31,6.55,14.26\r\nseresnet101,288,3627.03,564.588,2048,49.33,12.95,26.87\r\nresnext101_32x8d,224,3625.26,564.872,2048,88.79,16.48,31.21\r\npoolformer_m36,224,3623.66,565.122,2048,56.17,8.8,22.02\r\nswinv2_tiny_window16_256,256,3602.37,568.453,2048,28.35,6.68,39.02\r\ncrossvit_base_240,240,3564.7,574.461,2048,105.03,21.22,36.33\r\nresnet101d,320,3532.1,579.765,2048,44.57,16.48,34.77\r\necaresnet101d,288,3526.68,580.661,2048,44.57,13.35,28.19\r\nconvnextv2_base,224,3522.91,581.283,2048,88.72,15.38,28.75\r\npvt_v2_b4,224,3505.13,292.095,1024,62.56,10.14,53.74\r\nresnetrs101,288,3501.35,584.861,2048,63.62,13.56,28.53\r\ncoatnet_1_224,224,3497.86,109.736,384,42.23,8.7,39.0\r\nvit_base_patch16_reg4_gap_256,256,3493.77,586.135,2048,86.62,23.5,33.89\r\nregnety_120,224,3493.55,586.169,2048,51.82,12.14,21.38\r\nvit_small_r26_s32_384,384,3479.51,588.546,2048,36.47,10.43,29.85\r\ncait_xxs36_224,224,3472.35,589.74,2048,17.3,3.77,30.34\r\nefficientvit_b3,288,3449.69,222.587,768,48.65,6.58,44.2\r\nregnetz_d32,256,3446.82,594.113,2048,27.58,5.98,23.74\r\ncoat_tiny,224,3435.52,596.065,2048,5.5,4.35,27.2\r\npvt_v2_b5,224,3423.25,598.203,2048,81.96,11.76,50.92\r\nregnetz_c16,320,3411.94,600.189,2048,13.46,3.92,25.88\r\ntwins_svt_base,224,3411.58,600.253,2048,56.07,8.59,26.33\r\nefficientnet_b3_gn,320,3402.14,112.832,384,11.73,2.14,28.83\r\nmobilenetv4_conv_aa_large,448,3389.59,226.536,768,32.59,9.63,43.94\r\ndavit_base,224,3384.57,226.873,768,87.95,15.51,40.66\r\nhrnet_w48_ssld,224,3370.45,607.573,2048,77.47,17.34,28.56\r\nxcit_medium_24_p16_224,224,3365.86,608.398,2048,84.4,16.13,31.71\r\nhrnet_w48,224,3364.33,608.686,2048,77.47,17.34,28.56\r\nvit_base_patch16_plus_clip_240,240,3336.4,613.776,2048,117.21,27.41,33.08\r\nconvnext_small,288,3332.18,614.551,2048,50.22,14.39,35.65\r\nvit_base_patch16_rope_reg1_gap_256,256,3317.95,617.186,2048,86.43,23.22,33.39\r\nseresnext101_64x4d,224,3316.19,617.521,2048,88.23,15.53,31.25\r\nfastvit_ma36,256,3306.17,464.529,1536,43.98,7.82,34.98\r\nregnetz_c16_evos,320,3304.35,619.721,2048,13.49,3.86,25.88\r\ncaformer_m36,224,3289.61,622.506,2048,56.2,13.29,50.48\r\neva02_base_patch14_224,224,3277.31,624.834,2048,85.76,23.22,36.55\r\neca_nfnet_l1,320,3271.08,626.029,2048,41.41,14.92,34.42\r\nlambda_resnet50ts,256,3264.21,627.371,2048,21.54,5.07,17.48\r\nfastvit_mci2,256,3259.68,471.163,1536,35.7,7.85,36.09\r\nvit_base_patch16_plus_240,240,3248.22,630.445,2048,117.56,27.41,33.08\r\ntresnet_xl,224,3242.32,631.588,2048,78.44,15.2,15.34\r\nxception65p,299,3241.74,157.897,512,39.82,13.91,52.48\r\nvit_base_r50_s16_224,224,3234.75,633.065,2048,97.89,21.66,35.28\r\nseresnext101_32x8d,224,3232.47,633.506,2048,93.57,16.48,31.25\r\nconvformer_m36,224,3226.48,634.685,2048,57.05,12.89,42.05\r\nhgnet_base,224,3226.41,634.694,2048,71.58,25.14,15.47\r\nswinv2_cr_small_ns_256,256,3218.06,636.352,2048,49.7,12.07,76.21\r\nresnet152,288,3212.56,637.439,2048,60.19,19.11,37.28\r\nregnetx_160,224,3201.39,639.656,2048,54.28,15.99,25.52\r\nmvitv2_base,224,3198.61,640.224,2048,51.47,10.16,40.5\r\nswinv2_small_window8_256,256,3194.84,640.984,2048,49.73,11.58,40.14\r\nhrnet_w40,224,3183.87,643.191,2048,57.56,12.75,25.29\r\nconvnext_tiny,384,3181.51,643.667,2048,28.59,13.14,39.48\r\nseresnext101d_32x8d,224,3175.14,644.943,2048,93.59,16.72,32.05\r\nefficientnet_b4,320,3166.4,161.651,512,19.34,3.13,34.76\r\nxcit_nano_12_p8_384,384,3158.18,648.41,2048,3.05,6.34,46.08\r\nxcit_small_12_p16_384,384,3158.1,648.429,2048,26.25,14.14,36.51\r\npoolformerv2_m48,224,3155.64,648.939,2048,73.35,11.59,29.17\r\nseresnet152d,256,3154.28,649.213,2048,66.84,15.42,30.56\r\nresnest101e,256,3140.87,651.988,2048,48.28,13.38,28.66\r\nresnetrs152,256,3135.41,653.126,2048,86.62,15.59,30.83\r\nmaxvit_tiny_pm_256,256,3134.19,122.482,384,30.09,6.61,47.9\r\nswinv2_base_window12_192,192,3131.95,653.854,2048,109.28,11.9,39.72\r\nmixnet_xxl,224,3102.65,329.998,1024,23.96,2.04,23.43\r\nregnety_160,224,3093.51,661.98,2048,83.59,15.96,23.04\r\nhrnet_w44,224,3090.16,662.698,2048,67.06,14.94,26.92\r\nvit_small_patch8_224,224,3076.73,665.577,2048,21.67,22.44,80.84\r\nvit_relpos_base_patch16_plus_240,240,3073.49,666.28,2048,117.38,27.3,34.33\r\nfocalnet_base_srf,224,3071.01,666.83,2048,88.15,15.28,35.01\r\nefficientnetv2_s,384,3061.05,668.99,2048,21.46,8.44,35.77\r\nswin_base_patch4_window7_224,224,3059.13,669.419,2048,87.77,15.47,36.63\r\nseresnext101_32x4d,288,3040.16,673.58,2048,48.96,13.25,35.12\r\nmaxvit_rmlp_small_rw_224,224,3036.16,168.581,512,64.9,10.75,49.3\r\nsamvit_base_patch16_224,224,3032.64,675.261,2048,86.46,17.54,24.54\r\nconvformer_s18,384,3027.18,507.354,1536,26.77,11.63,46.49\r\nxcit_tiny_24_p8_224,224,3021.61,677.718,2048,12.11,9.21,45.39\r\ncaformer_s18,384,3012.13,509.888,1536,26.34,13.42,77.34\r\nnfnet_f1,224,2998.89,682.857,2048,132.63,17.87,22.94\r\nseresnextaa101d_32x8d,224,2981.71,686.787,2048,93.59,17.25,34.16\r\nmobilenetv4_conv_aa_large,480,2954.17,259.935,768,32.59,11.05,50.45\r\ndm_nfnet_f1,224,2942.07,696.054,2048,132.63,17.87,22.94\r\ntf_efficientnetv2_s,384,2941.21,696.255,2048,21.46,8.44,35.77\r\nrdnet_base,224,2938.84,696.811,2048,87.45,15.4,31.14\r\nconvnextv2_tiny,384,2932.51,698.315,2048,28.64,13.14,39.48\r\nvit_base_patch16_dinov3,256,2925.62,699.955,2048,85.64,23.6,34.06\r\nvolo_d2_224,224,2925.04,700.101,2048,58.68,14.34,41.34\r\ncait_s24_224,224,2923.25,700.526,2048,46.92,9.35,40.58\r\nvit_base_patch16_dinov3_qkvb,256,2923.15,700.551,2048,85.66,23.6,34.06\r\nresnet200d,256,2896.31,707.042,2048,64.69,20.0,43.09\r\nswinv2_cr_base_224,224,2896.1,707.09,2048,87.88,15.86,59.66\r\nswinv2_cr_base_ns_224,224,2895.68,707.197,2048,87.88,15.86,59.66\r\nefficientnetv2_rw_s,384,2889.89,708.623,2048,23.94,8.72,38.03\r\nconvnext_base,256,2886.78,709.381,2048,88.59,20.09,37.55\r\ncs3se_edgenet_x,320,2869.34,713.684,2048,50.72,18.01,20.21\r\nmambaout_base_tall_rw,224,2858.85,716.311,2048,86.48,16.15,38.74\r\nswin_s3_small_224,224,2854.51,717.406,2048,49.74,9.43,37.84\r\nvit_medium_patch16_gap_384,384,2851.2,718.23,2048,39.03,26.08,67.54\r\ntresnet_m,448,2847.64,359.533,1024,31.39,22.99,29.21\r\nregnetv_064,288,2842.66,720.392,2048,30.58,10.55,27.11\r\nfocalnet_base_lrf,224,2841.39,720.713,2048,88.75,15.43,38.13\r\nresnet50x4_clip_gap,288,2838.99,721.317,2048,65.62,19.57,34.11\r\ndpn92,224,2834.09,722.578,2048,37.67,6.54,18.21\r\ndpn131,224,2829.89,723.635,2048,79.25,16.09,32.97\r\ngcvit_small,224,2783.98,551.67,1536,51.09,8.57,41.61\r\ncoat_lite_medium,224,2783.59,735.675,2048,44.57,9.81,40.06\r\nconvit_base,224,2779.75,736.699,2048,86.54,17.52,31.77\r\nmambaout_base_short_rw,224,2769.77,739.359,2048,88.83,16.31,38.08\r\ntnt_b_patch16_224,224,2769.76,739.357,2048,65.43,14.1,39.01\r\nregnety_064,288,2755.33,743.221,2048,30.58,10.56,27.11\r\nnf_regnet_b5,384,2738.33,747.837,2048,49.74,7.95,42.9\r\ncoat_mini,224,2727.53,750.809,2048,10.34,6.82,33.68\r\nhiera_base_plus_224,224,2727.06,750.936,2048,69.9,12.67,37.98\r\nmobilenetv5_base,256,2725.03,187.845,512,82.65,20.05,36.89\r\npoolformer_m48,224,2721.32,752.515,2048,73.47,11.59,29.17\r\ntiny_vit_21m_384,384,2701.93,189.455,512,21.22,13.72,77.83\r\nresnet50x4_clip,288,2698.59,758.859,2048,87.14,21.35,35.27\r\nregnetz_040,320,2697.58,759.142,2048,27.12,6.35,37.78\r\nregnetz_040_h,320,2684.17,762.939,2048,28.94,6.43,37.94\r\nvitamin_base_224,224,2681.85,95.418,256,87.72,22.68,52.77\r\nhrnet_w64,224,2676.83,765.024,2048,128.06,28.97,35.09\r\nhiera_base_abswin_256,256,2669.84,767.033,2048,51.27,12.46,40.7\r\nmobilenetv4_hybrid_large,448,2669.83,287.612,768,37.76,10.74,48.61\r\nxception65,299,2644.18,193.589,512,39.92,13.96,52.48\r\ncoatnet_2_rw_224,224,2621.71,97.582,256,73.87,15.09,49.22\r\ncrossvit_15_dagger_408,408,2596.05,788.823,2048,28.5,21.45,95.05\r\nregnetz_d8,320,2592.78,789.828,2048,23.37,6.19,37.08\r\nmambaout_small,288,2588.37,791.165,2048,48.49,14.81,45.82\r\nefficientnetv2_m,320,2583.18,792.764,2048,54.14,11.01,39.97\r\ncoatnet_rmlp_2_rw_224,224,2574.63,99.4,256,73.88,15.18,54.78\r\nmvitv2_base_cls,224,2571.65,796.321,2048,65.44,10.23,40.65\r\nregnetz_d8_evos,320,2559.18,800.192,2048,23.46,7.03,38.92\r\nnextvit_small,384,2551.15,802.703,2048,31.74,17.25,57.14\r\nefficientnet_b3_g8_gn,288,2541.95,805.61,2048,14.25,2.59,23.35\r\nmobilevitv2_150,384,2538.23,100.826,256,10.59,9.2,54.25\r\nseresnet152,288,2529.73,809.506,2048,66.82,19.11,37.34\r\nresnet152d,320,2525.96,810.709,2048,60.21,24.08,47.67\r\nmambaout_small_rw,288,2524.94,811.047,2048,48.5,14.81,45.82\r\nnest_small,224,2517.21,813.536,2048,38.35,10.35,40.04\r\nfastvit_mci3,256,2506.26,408.53,1024,125.07,14.82,44.88\r\nnest_small_jx,224,2501.94,818.495,2048,38.35,10.35,40.04\r\nefficientvit_l3,224,2499.32,204.805,512,246.04,27.62,39.16\r\nregnety_080,288,2497.58,819.932,2048,39.18,13.22,29.69\r\ndpn107,224,2480.09,825.699,2048,86.92,18.38,33.46\r\ndensenet264d,224,2462.67,623.671,1536,72.74,13.57,14.0\r\nmaxvit_small_tf_224,224,2462.67,155.886,384,68.93,11.66,53.17\r\nmaxxvit_rmlp_small_rw_256,256,2455.3,417.01,1024,66.01,14.67,58.38\r\ncoatnet_2_224,224,2430.1,105.311,256,74.68,16.5,52.67\r\nxcit_small_12_p8_224,224,2428.17,843.359,2048,26.21,18.69,47.21\r\nlevit_conv_512_s8,224,2427.9,843.467,2048,73.97,21.77,52.28\r\nlevit_512_s8,224,2406.39,850.999,2048,73.97,21.77,52.28\r\nmambaout_base_wide_rw,224,2375.7,861.99,2048,94.45,17.78,42.6\r\nmaxvit_rmlp_small_rw_256,256,2366.48,162.21,384,64.9,14.15,66.09\r\nsequencer2d_l,224,2361.19,867.308,2048,54.3,9.74,22.12\r\ntwins_svt_large,224,2329.9,878.948,2048,99.27,15.15,35.1\r\nresnet200,288,2327.81,879.728,2048,64.67,24.91,53.21\r\nefficientvit_l2,384,2313.57,165.938,384,63.71,20.45,57.01\r\neca_nfnet_l2,320,2308.15,887.224,2048,56.72,20.95,47.43\r\nmambaout_base,224,2299.81,890.438,2048,84.81,15.83,36.95\r\ncaformer_b36,224,2297.35,891.402,2048,98.75,23.22,67.3\r\nmobilenetv4_conv_aa_large,544,2290.96,223.442,512,32.59,14.19,64.79\r\nswin_s3_base_224,224,2281.18,897.709,2048,71.13,13.69,48.26\r\nseresnet200d,256,2270.89,901.788,2048,71.86,20.01,43.15\r\necaresnet200d,256,2262.24,905.223,2048,64.69,20.0,43.15\r\nmambaout_base_plus_rw,224,2262.14,905.273,2048,101.66,19.19,45.16\r\nconvnext_base,288,2261.35,905.587,2048,88.59,25.43,47.53\r\nefficientnetv2_rw_m,320,2257.54,907.122,2048,53.24,12.72,47.14\r\nresnetrs200,256,2249.74,910.263,2048,93.21,20.18,43.42\r\nvit_so150m_patch16_reg4_gap_256,256,2247.25,911.281,2048,134.13,36.75,53.21\r\nhgnetv2_b6,288,2244.6,912.344,2048,75.26,27.9,35.09\r\nswinv2_base_window8_256,256,2242.61,913.156,2048,87.92,20.37,52.59\r\nconvformer_b36,224,2241.51,913.606,2048,99.88,22.69,56.06\r\nregnetz_e8,256,2240.72,913.933,2048,57.7,9.91,40.94\r\nvit_so150m_patch16_reg4_map_256,256,2225.17,920.315,2048,141.48,37.17,53.68\r\nresnext101_64x4d,288,2211.7,925.911,2048,83.46,25.66,51.59\r\nefficientnet_b4,384,2207.28,173.93,384,19.34,4.51,50.04\r\nregnetz_d32,320,2185.82,936.878,2048,27.58,9.33,37.08\r\nvit_so150m2_patch16_reg1_gap_256,256,2154.09,950.688,2048,136.06,37.0,56.93\r\nresnetv2_50x1_bit,448,2148.86,953.002,2048,25.55,16.62,44.46\r\nswinv2_small_window16_256,256,2131.39,960.81,2048,49.73,12.82,66.29\r\nconvnextv2_base,288,2119.27,966.306,2048,88.72,25.43,47.53\r\nvolo_d3_224,224,2114.76,968.363,2048,86.33,20.78,60.09\r\ncoat_small,224,2052.42,997.772,2048,21.69,12.61,44.25\r\nvit_pe_spatial_small_patch16_512,512,2049.77,999.068,2048,21.98,31.8,123.27\r\nmobilevitv2_175,384,2047.81,124.976,256,14.25,12.47,63.29\r\nconvnext_large,224,2044.15,1001.82,2048,197.77,34.4,43.13\r\nxception71,299,2036.43,251.379,512,42.34,18.09,69.92\r\ntf_efficientnet_b4,380,2019.49,190.103,384,19.34,4.49,49.49\r\ngcvit_base,224,2018.15,507.335,1024,90.32,14.87,55.48\r\nhrnet_w48_ssld,288,2007.61,1020.044,2048,77.47,28.66,47.21\r\nseresnet152d,320,2005.11,1021.319,2048,66.84,24.09,47.72\r\nresnetrs152,320,1998.63,1024.63,2048,86.62,24.34,48.14\r\nmaxxvitv2_rmlp_base_rw_224,224,1984.54,515.937,1024,116.09,24.2,62.77\r\nconvmixer_768_32,224,1981.33,1033.589,2048,21.11,19.55,25.95\r\nefficientnet_b3_g8_gn,320,1973.05,1037.917,2048,14.25,3.2,28.83\r\nregnety_120,288,1971.1,1038.951,2048,51.82,20.06,35.34\r\nvit_betwixt_patch16_reg4_gap_384,384,1964.35,1042.517,2048,60.6,39.71,85.28\r\nresnext101_32x16d,224,1957.98,1045.918,2048,194.03,36.27,51.18\r\nvit_large_patch32_384,384,1951.19,1049.555,2048,306.63,45.31,43.86\r\nnextvit_base,384,1939.49,1055.886,2048,44.79,24.62,73.95\r\nseresnext101_32x8d,288,1935.43,1058.097,2048,93.57,27.24,51.63\r\nxcit_tiny_12_p8_384,384,1928.78,1061.743,2048,6.71,14.13,69.14\r\nhgnet_base,288,1925.06,531.872,1024,71.58,41.55,25.57\r\nconvnextv2_large,224,1924.3,1064.216,2048,197.96,34.4,43.13\r\nresnetv2_50x3_bit,224,1923.34,1064.745,2048,217.32,37.06,33.34\r\nefficientvit_l3,256,1917.89,200.174,384,246.04,36.06,50.98\r\nseresnext101d_32x8d,288,1894.35,1081.04,2048,93.59,27.64,52.95\r\nnf_regnet_b5,456,1872.62,1093.588,2048,49.74,11.7,61.95\r\nconvnext_small,384,1867.68,1096.479,2048,50.22,25.58,63.37\r\ndavit_large,224,1863.23,412.139,768,196.81,34.6,60.99\r\nresnet200d,320,1843.17,1111.062,2048,64.69,31.25,67.33\r\nmobilevitv2_200,384,1841.67,104.218,192,18.45,16.24,72.34\r\nxcit_large_24_p16_224,224,1840.93,1112.409,2048,189.1,35.86,47.27\r\nconvnext_base,320,1824.69,1122.312,2048,88.59,31.39,58.68\r\nregnety_320,224,1813.49,1129.248,2048,145.05,32.34,30.26\r\nswinv2_large_window12_192,192,1808.17,1132.571,2048,228.77,26.17,56.53\r\nnest_base,224,1807.41,1133.047,2048,67.72,17.96,53.39\r\nswinv2_cr_tiny_384,384,1798.03,854.21,1536,28.33,15.34,161.01\r\ncrossvit_18_dagger_408,408,1797.86,1139.07,2048,44.61,32.47,124.87\r\nregnety_160,288,1796.82,1139.727,2048,83.59,26.37,38.07\r\nnest_base_jx,224,1794.82,1140.992,2048,67.72,17.96,53.39\r\necaresnet200d,288,1782.24,1149.053,2048,64.69,25.31,54.59\r\nseresnet200d,288,1781.0,1149.853,2048,71.86,25.32,54.6\r\nseresnextaa101d_32x8d,288,1779.78,1150.632,2048,93.59,28.51,56.44\r\nseresnet269d,256,1774.19,1154.265,2048,113.67,26.59,53.6\r\nmixer_l16_224,224,1754.79,1167.028,2048,208.2,44.6,41.69\r\ntf_efficientnetv2_m,384,1741.46,1175.958,2048,54.14,15.85,57.52\r\nvit_mediumd_patch16_reg4_gap_384,384,1729.55,1184.057,2048,64.27,43.67,113.51\r\nresnetrs270,256,1714.69,1194.325,2048,129.86,27.06,55.84\r\nvolo_d1_384,384,1711.28,1196.7,2048,26.78,22.75,108.55\r\nswin_large_patch4_window7_224,224,1706.08,1200.351,2048,196.53,34.53,54.94\r\nfastvit_mci4,256,1704.23,450.583,768,321.57,27.78,60.59\r\nxcit_small_24_p16_384,384,1691.66,1210.579,2048,47.67,26.72,68.58\r\nrdnet_large,224,1675.42,916.718,1536,186.27,34.74,46.67\r\nmambaout_base_short_rw,288,1669.59,1226.592,2048,88.83,26.96,62.94\r\nmaxvit_rmlp_base_rw_224,224,1651.54,309.962,512,116.14,23.15,92.64\r\nlegacy_senet154,224,1644.26,1245.492,2048,115.09,20.77,38.69\r\nsenet154,224,1641.21,1247.811,2048,115.09,20.77,38.69\r\nvit_base_patch16_18x2_224,224,1615.16,1267.915,2048,256.73,52.51,71.38\r\nnaflexvit_base_patch16_siglip,384,1609.94,1272.026,2048,92.93,56.12,102.2\r\nswinv2_cr_large_224,224,1607.01,1274.35,2048,196.68,35.1,78.42\r\nregnetx_320,224,1600.11,1279.838,2048,107.81,31.81,36.3\r\nnfnet_f2,256,1595.2,1283.781,2048,193.78,33.76,41.85\r\neca_nfnet_l2,384,1594.95,1283.989,2048,56.72,30.05,68.28\r\nmambaout_base_tall_rw,288,1591.41,1286.839,2048,86.48,26.69,64.04\r\nconvformer_s36,384,1584.42,969.37,1536,40.01,22.54,89.62\r\nvit_small_patch14_dinov2,518,1582.8,1293.846,2048,22.06,46.76,198.79\r\nnextvit_large,384,1582.09,1294.425,2048,57.83,32.0,90.76\r\ndm_nfnet_f2,256,1578.08,1297.706,2048,193.78,33.76,41.85\r\ncaformer_s36,384,1575.93,974.604,1536,39.3,26.08,150.33\r\nvit_base_patch16_clip_384,384,1575.33,1299.976,2048,86.86,55.54,101.56\r\nvit_small_patch14_reg4_dinov2,518,1568.83,1305.363,2048,22.06,46.95,199.77\r\nnaflexvit_base_patch16_gap,384,1564.97,1308.591,2048,86.63,55.86,102.34\r\nnaflexvit_base_patch16_parfac_gap,384,1564.95,1308.6,2048,86.46,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,1562.48,1310.674,2048,86.63,55.86,102.34\r\nconvnext_large_mlp,256,1553.4,1318.327,2048,200.13,44.94,56.33\r\nnaflexvit_base_patch16_map,384,1546.16,1324.5,2048,93.72,56.23,102.46\r\nrepvgg_d2se,320,1539.49,1330.238,2048,120.39,66.99,23.42\r\nvit_base_patch16_384,384,1538.07,1331.465,2048,86.86,55.54,101.56\r\ndeit3_base_patch16_384,384,1536.49,1332.841,2048,86.88,55.54,101.56\r\ndeit_base_patch16_384,384,1534.09,1334.918,2048,86.86,55.54,101.56\r\nvit_base_patch16_siglip_gap_384,384,1533.17,1335.726,2048,86.09,55.43,101.3\r\ndeit_base_distilled_patch16_384,384,1529.98,1338.51,2048,87.63,55.65,101.82\r\nvit_base_patch16_siglip_384,384,1517.59,1349.443,2048,93.18,56.12,102.2\r\nefficientnetv2_m,416,1515.24,1351.527,2048,54.14,18.6,67.5\r\ndeit3_large_patch16_224,224,1515.09,1351.663,2048,304.37,61.6,63.52\r\nswinv2_base_window16_256,256,1513.99,1352.649,2048,87.92,22.02,84.71\r\neva_large_patch14_196,196,1513.95,1352.69,2048,304.14,61.57,63.52\r\nvit_large_patch16_224,224,1509.84,1356.367,2048,304.33,61.6,63.52\r\nswinv2_base_window12to16_192to256,256,1506.68,1359.218,2048,87.92,22.02,84.71\r\ncoatnet_rmlp_3_rw_224,224,1497.08,128.207,192,165.15,33.56,79.47\r\ncoatnet_3_rw_224,224,1495.74,128.329,192,181.81,33.44,73.83\r\neca_nfnet_l3,352,1466.27,1396.676,2048,72.04,32.57,73.12\r\nnfnet_f1,320,1465.71,1397.212,2048,132.63,35.97,46.77\r\nmaxvit_base_tf_224,224,1438.24,266.956,384,119.47,24.04,95.01\r\nresnetrs200,320,1435.39,1426.72,2048,93.21,31.51,67.81\r\nmambaout_base_wide_rw,288,1432.88,1429.227,2048,94.45,29.39,70.41\r\nbeit_large_patch16_224,224,1432.02,1430.073,2048,304.43,61.6,63.52\r\nbeit3_large_patch16_224,224,1431.98,1430.125,2048,304.57,61.72,63.52\r\nvit_large_patch16_rope_ape_224,224,1430.77,1431.32,2048,304.37,61.6,63.52\r\nbeitv2_large_patch16_224,224,1430.12,1431.974,2048,304.43,61.6,63.52\r\ndm_nfnet_f1,320,1430.01,1432.077,2048,132.63,35.97,46.77\r\nvit_large_patch16_rope_224,224,1429.94,1432.159,2048,304.17,61.6,63.52\r\nseresnextaa101d_32x8d,320,1427.02,1435.092,2048,93.59,35.19,69.67\r\nresnetv2_152x2_bit,224,1421.5,1440.665,2048,236.34,46.95,45.11\r\nregnetz_e8,320,1421.3,1440.87,2048,57.7,15.46,63.94\r\ncoatnet_3_224,224,1397.63,137.336,192,166.97,36.56,79.01\r\nseresnet269d,288,1391.06,1472.199,2048,113.67,33.65,67.81\r\nmambaout_base,288,1384.54,1479.133,2048,84.81,26.16,61.08\r\ntiny_vit_21m_512,512,1365.54,187.43,256,21.26,26.93,177.93\r\nmambaout_base_plus_rw,288,1360.8,1504.933,2048,101.66,31.72,74.64\r\nefficientnet_b5,416,1342.76,190.613,256,30.39,8.27,80.68\r\nbeit_base_patch16_384,384,1338.12,1530.44,2048,86.74,55.54,101.56\r\nflexivit_large,240,1330.89,1538.747,2048,304.36,70.99,75.39\r\nefficientnetv2_rw_m,416,1325.84,1544.617,2048,53.24,21.49,79.62\r\nconvnext_xlarge,224,1314.72,1557.679,2048,350.2,60.98,57.5\r\nresnetv2_101x1_bit,448,1314.07,1558.452,2048,44.54,31.65,64.93\r\nvit_large_patch16_rope_mixed_ape_224,224,1299.68,1575.702,2048,304.4,61.6,68.34\r\nvit_large_patch16_rope_mixed_224,224,1296.32,1579.787,2048,304.2,61.6,68.34\r\nxcit_small_24_p8_224,224,1275.97,1604.982,2048,47.63,35.81,90.78\r\nconvnext_base,384,1275.36,1204.292,1536,88.59,45.21,84.49\r\nnasnetalarge,331,1270.14,403.057,512,88.75,23.89,90.56\r\npnasnet5large,331,1249.84,614.416,768,86.06,25.04,92.89\r\nmaxvit_tiny_tf_384,384,1243.11,154.403,192,30.98,17.53,123.42\r\nmaxxvitv2_rmlp_large_rw_224,224,1230.45,832.155,1024,215.42,44.14,87.15\r\nefficientvit_l3,320,1227.37,208.525,256,246.04,56.32,79.34\r\nvolo_d4_224,224,1226.26,1670.055,2048,192.96,44.34,80.22\r\nconvnext_large,288,1222.96,1674.544,2048,197.77,56.87,71.29\r\ndavit_huge,224,1211.8,633.703,768,348.92,61.23,81.32\r\nvit_large_r50_s32_384,384,1202.04,1703.707,2048,329.09,57.43,76.52\r\ninception_next_base,384,1197.79,1709.749,2048,86.67,43.64,75.48\r\nconvnextv2_base,384,1195.99,1284.228,1536,88.72,45.21,84.49\r\nvit_large_patch16_siglip_gap_256,256,1165.16,1757.632,2048,303.36,80.8,88.34\r\nvit_large_patch16_siglip_256,256,1163.43,1760.244,2048,315.96,81.34,88.88\r\ntresnet_l,448,1161.61,881.481,1024,55.99,43.59,47.56\r\nxcit_medium_24_p16_384,384,1160.98,1763.952,2048,84.4,47.39,91.64\r\nefficientnet_b5,448,1156.01,221.4,256,30.39,9.59,93.56\r\nvit_large_patch14_clip_224,224,1155.99,1771.584,2048,304.2,81.08,88.79\r\nvit_large_patch14_clip_quickgelu_224,224,1155.07,1772.989,2048,303.97,81.08,88.79\r\nconvnextv2_large,288,1154.47,1773.907,2048,197.96,56.87,71.29\r\nefficientnet_x_b5,448,1150.89,667.26,768,33.44,23.35,68.87\r\nvit_large_patch14_xp_224,224,1140.27,1795.992,2048,304.06,81.01,88.79\r\naimv2_large_patch14_224,224,1138.44,1798.895,2048,309.2,82.3,85.2\r\nresnest200e,320,1129.78,1812.665,2048,70.2,35.69,82.78\r\necaresnet269d,320,1125.49,1819.594,2048,102.09,41.53,83.69\r\nvit_large_patch14_224,224,1123.49,1822.827,2048,304.2,81.08,88.79\r\ntf_efficientnetv2_m,480,1111.81,1381.468,1536,54.14,24.76,89.84\r\nvit_base_patch8_224,224,1109.94,1845.072,2048,86.58,78.22,161.69\r\nconvformer_m36,384,1108.87,923.406,1024,57.05,37.87,123.56\r\nconvmixer_1024_20_ks9_p14,224,1098.6,1864.125,2048,24.38,5.55,5.51\r\ncaformer_m36,384,1097.83,932.678,1024,56.2,42.11,196.35\r\nresnetv2_101x3_bit,224,1097.58,1865.86,2048,387.93,71.23,48.7\r\nregnety_640,224,1091.69,1875.917,2048,281.38,64.16,42.5\r\nhiera_large_224,224,1090.91,1877.26,2048,213.74,40.34,83.37\r\nresnetrs350,288,1076.23,1902.878,2048,163.96,43.67,87.09\r\nswinv2_cr_small_384,384,1053.71,1457.637,1536,49.7,29.7,298.03\r\nvitamin_large_224,224,1042.8,245.443,256,333.32,75.05,112.83\r\nvitamin_large2_224,224,1039.46,246.239,256,333.58,75.05,112.83\r\nefficientnet_h_b5,448,1036.08,494.104,512,45.88,27.16,73.9\r\nvit_base_r50_s16_384,384,1016.35,2014.987,2048,98.95,67.43,135.03\r\nregnety_160,384,1015.56,2016.555,2048,83.59,46.87,67.67\r\nmaxvit_large_tf_224,224,1015.14,252.142,256,211.79,43.68,127.35\r\nxcit_tiny_24_p8_384,384,1013.6,2020.45,2048,12.11,27.05,132.95\r\ntf_efficientnet_b5,456,1012.96,252.689,256,30.39,10.46,98.86\r\nefficientnetv2_l,384,1010.4,2026.858,2048,118.52,36.1,101.16\r\nconvnext_large_mlp,320,990.16,1551.206,1536,200.13,70.21,88.02\r\ntf_efficientnetv2_l,384,990.07,2068.478,2048,118.52,36.1,101.16\r\nvit_so150m_patch16_reg4_gap_384,384,983.34,2082.637,2048,134.42,87.97,165.47\r\nresnet50x16_clip_gap,384,976.91,2096.34,2048,136.2,70.32,100.64\r\nvolo_d2_384,384,965.44,2121.254,2048,58.87,46.17,184.51\r\nvit_large_patch16_dinov3_qkvb,256,952.84,2149.29,2048,303.13,82.43,90.56\r\nvit_large_patch16_dinov3,256,951.38,2152.606,2048,303.08,82.43,90.56\r\neva02_large_patch14_224,224,951.03,2153.387,2048,303.27,81.15,97.2\r\neva02_large_patch14_clip_224,224,949.1,2157.768,2048,304.11,81.18,97.2\r\nnaflexvit_so150m2_patch16_reg1_gap,384,942.89,2171.971,2048,136.06,89.53,178.22\r\nresnet50x16_clip,384,934.09,2192.427,2048,167.33,74.9,103.54\r\nmvitv2_large,224,930.71,2200.411,2048,217.99,43.87,112.02\r\nnaflexvit_so150m2_patch16_reg1_map,384,929.78,2202.611,2048,142.46,90.33,179.2\r\nvit_so150m2_patch16_reg1_gap_384,384,928.31,2206.088,2048,136.33,89.53,178.22\r\necaresnet269d,352,928.14,2206.508,2048,102.09,50.25,101.25\r\nresnetrs270,352,894.96,2288.307,2048,129.86,51.13,105.48\r\neca_nfnet_l3,448,888.42,2305.161,2048,72.04,52.55,118.4\r\nswinv2_large_window12to16_192to256,256,885.94,2311.595,2048,196.74,47.81,121.53\r\ncoat_lite_medium_384,384,870.14,1765.161,1536,44.57,28.73,116.7\r\nxcit_medium_24_p8_224,224,868.11,2359.085,2048,84.32,63.53,121.23\r\ncoatnet_4_224,224,858.34,223.642,192,275.43,62.48,129.26\r\nefficientvit_l3,384,858.13,223.693,192,246.04,81.08,114.02\r\nnfnet_f2,352,841.26,2434.37,2048,193.78,63.22,79.06\r\nvit_so400m_patch14_siglip_224,224,823.8,2485.981,2048,427.68,110.26,106.73\r\ndm_nfnet_f2,352,822.97,2488.498,2048,193.78,63.22,79.06\r\nvit_so400m_patch14_siglip_gap_224,224,822.63,2489.493,2048,412.44,109.57,106.13\r\nxcit_small_12_p8_384,384,821.98,2491.475,2048,26.21,54.92,138.29\r\nvit_so400m_patch16_siglip_gap_256,256,821.67,2492.421,2048,412.65,109.62,106.13\r\nvit_so400m_patch16_siglip_256,256,820.23,2496.789,2048,427.89,110.31,106.73\r\ncoatnet_rmlp_2_rw_384,384,819.17,117.155,96,73.88,47.69,209.43\r\nvit_base_patch16_siglip_gap_512,512,817.92,2503.827,2048,86.43,107.0,246.15\r\ncait_xxs24_384,384,813.66,2516.955,2048,12.03,9.63,122.66\r\nvit_base_patch16_siglip_512,512,810.63,2526.374,2048,93.52,108.22,247.74\r\nvitamin_large_256,256,803.32,238.963,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,800.84,239.704,192,333.64,99.0,154.99\r\nconvnext_xlarge,288,793.29,1936.167,1536,350.2,100.8,95.05\r\nmvitv2_large_cls,224,792.43,2584.376,2048,234.58,42.17,111.69\r\nmaxvit_small_tf_384,384,790.14,161.95,128,69.02,35.87,183.65\r\ntresnet_xl,448,787.11,975.667,768,78.44,60.77,61.31\r\nvolo_d5_224,224,784.21,2611.486,2048,295.46,72.4,118.11\r\nnfnet_f3,320,783.15,2615.01,2048,254.92,68.77,83.93\r\nresmlp_big_24_224,224,782.51,2617.131,2048,129.14,100.23,87.31\r\nmambaout_base_plus_rw,384,775.85,1319.776,1024,101.66,56.39,132.7\r\nconvformer_b36,384,772.27,994.402,768,99.88,66.67,164.75\r\ncaformer_b36,384,766.8,1001.515,768,98.75,72.33,261.79\r\nswin_base_patch4_window12_384,384,765.52,2675.255,2048,87.9,47.19,134.78\r\ndm_nfnet_f3,320,765.36,2675.774,2048,254.92,68.77,83.93\r\nvit_pe_spatial_base_patch16_512,512,764.5,2678.798,2048,86.43,107.13,246.54\r\neva02_base_patch14_448,448,744.24,2751.736,2048,87.12,107.11,259.14\r\nswinv2_cr_base_384,384,743.02,1378.101,1024,87.88,50.57,333.68\r\nconvnextv2_huge,224,732.26,2097.546,1536,660.29,115.0,79.07\r\nresnetrs420,320,732.11,2797.317,2048,191.89,64.2,126.56\r\nefficientnetv2_xl,384,716.0,2860.271,2048,208.12,52.81,139.2\r\nseresnextaa201d_32x8d,320,710.09,2884.086,2048,149.39,70.22,138.71\r\ntf_efficientnetv2_xl,384,704.92,2905.231,2048,208.12,52.81,139.2\r\nefficientnet_x_b5,576,697.88,733.591,512,33.44,38.59,113.83\r\nconvnext_large,384,692.83,1477.937,1024,197.77,101.1,126.74\r\nconvnext_large_mlp,384,692.65,1478.326,1024,200.13,101.11,126.74\r\nresnext101_32x32d,224,688.08,2976.341,2048,468.53,87.29,91.12\r\nmaxvit_tiny_tf_512,512,685.09,140.08,96,31.05,33.49,257.59\r\nconvnextv2_large,384,655.9,1561.161,1024,197.96,101.1,126.74\r\nvit_so150m2_patch16_reg1_gap_448,448,648.06,3160.138,2048,136.5,127.51,287.05\r\nefficientnetv2_l,480,645.25,2380.404,1536,118.52,56.4,157.99\r\nxcit_large_24_p16_384,384,635.3,3223.587,2048,189.1,105.35,137.17\r\ntf_efficientnetv2_l,480,634.07,1614.887,1024,118.52,56.4,157.99\r\nefficientnet_b6,528,631.87,202.527,128,43.04,19.4,167.39\r\nvitamin_xlarge_256,256,627.49,203.945,128,436.06,130.13,177.37\r\nmaxxvitv2_rmlp_base_rw_384,384,626.65,612.722,384,116.09,72.98,213.74\r\nefficientnet_h_b5,576,625.35,613.994,384,45.88,44.9,122.13\r\nswinv2_cr_huge_224,224,625.05,3276.447,2048,657.83,115.97,121.08\r\ncait_xs24_384,384,613.6,3337.614,2048,26.67,19.28,183.98\r\nresnetrs350,384,598.09,3424.143,2048,163.96,77.59,154.74\r\nfocalnet_huge_fl3,224,597.04,2572.64,1536,745.28,118.26,104.8\r\nregnety_320,384,594.7,2582.752,1536,145.05,95.0,88.87\r\ntf_efficientnet_b6,528,586.19,218.31,128,43.04,19.4,167.39\r\nrdnet_large,384,583.28,877.725,512,186.27,102.09,137.13\r\nvit_base_patch14_dinov2,518,582.06,3518.446,2048,86.58,151.71,397.58\r\nvit_huge_patch14_clip_224,224,580.55,3527.643,2048,632.05,167.4,139.41\r\nvit_huge_patch14_clip_quickgelu_224,224,579.94,3531.309,2048,632.08,167.4,139.41\r\nvit_base_patch14_reg4_dinov2,518,578.57,3539.685,2048,86.58,152.25,399.53\r\nvit_huge_patch14_gap_224,224,577.84,3544.152,2048,630.76,166.73,138.74\r\nvit_huge_patch14_xp_224,224,576.97,3549.489,2048,631.8,167.3,139.41\r\nmaxvit_xlarge_tf_224,224,574.41,334.205,192,506.99,97.52,191.04\r\naimv2_huge_patch14_224,224,572.26,3578.732,2048,680.85,179.01,126.22\r\ndeit3_huge_patch14_224,224,571.81,3581.53,2048,632.13,167.4,139.41\r\nvit_huge_patch14_224,224,566.9,3612.549,2048,630.76,167.4,139.41\r\nsam2_hiera_tiny,896,544.7,117.461,64,26.85,99.86,384.63\r\ncait_xxs36_384,384,543.87,3765.527,2048,17.37,14.35,183.7\r\nregnety_1280,224,543.28,3769.614,2048,644.81,127.66,71.58\r\nmaxvit_rmlp_base_rw_384,384,516.99,371.329,192,116.14,70.97,318.95\r\nhiera_huge_224,224,509.27,4021.379,2048,672.78,124.85,150.95\r\nvit_large_patch14_clip_quickgelu_336,336,501.98,4079.779,2048,304.29,191.11,270.24\r\nvit_large_patch14_clip_336,336,501.05,4087.347,2048,304.53,191.11,270.24\r\ncait_s24_384,384,500.7,4090.179,2048,47.06,32.17,245.31\r\nvolo_d3_448,448,491.83,4163.959,2048,86.63,96.33,446.83\r\nseresnextaa201d_32x8d,384,490.74,4173.221,2048,149.39,101.11,199.72\r\neva_large_patch14_336,336,489.33,4185.239,2048,304.53,191.1,270.24\r\ndeit3_large_patch16_384,384,488.74,4190.301,2048,304.76,191.21,270.24\r\nvit_large_patch16_siglip_gap_384,384,488.37,4193.466,2048,303.69,190.85,269.55\r\nvit_large_patch16_384,384,487.4,4201.781,2048,304.72,191.21,270.24\r\nvit_large_patch16_siglip_384,384,486.53,4209.338,2048,316.28,192.07,270.75\r\nresnest269e,416,486.35,4210.912,2048,110.93,77.69,171.98\r\naimv2_large_patch14_336,336,483.11,4239.101,2048,309.53,194.22,227.08\r\nvit_giant_patch16_gap_224,224,479.73,4269.017,2048,1011.37,202.46,139.26\r\nresnetv2_152x4_bit,224,478.45,4280.395,2048,936.53,186.9,90.22\r\nxcit_large_24_p8_224,224,473.92,4321.318,2048,188.93,141.23,181.56\r\ncoatnet_5_224,224,471.71,271.31,128,687.47,145.49,194.24\r\nvit_pe_core_large_patch14_336,336,471.34,4344.973,2048,317.15,192.33,271.43\r\nresnetv2_50x3_bit,448,471.02,2173.936,1024,217.32,145.7,133.37\r\nresnetv2_152x2_bit,384,469.47,4362.258,2048,236.34,136.16,132.56\r\ndavit_giant,224,461.59,1663.748,768,1406.47,192.92,153.06\r\nsam2_hiera_small,896,459.97,139.098,64,33.95,123.99,442.63\r\nconvnext_xxlarge,256,457.59,2237.743,1024,846.47,198.09,124.45\r\nmaxvit_base_tf_384,384,456.83,280.144,128,119.65,73.8,332.9\r\nconvnext_xlarge,384,451.1,1702.436,768,350.2,179.2,168.99\r\nvitamin_large_336,336,447.04,214.706,96,333.57,175.72,307.47\r\nvitamin_large2_336,336,445.14,215.625,96,333.83,175.72,307.47\r\nswin_large_patch4_window12_384,384,441.21,4641.725,2048,196.74,104.08,202.16\r\nconvnextv2_huge,288,440.64,2323.843,1024,660.29,190.1,130.7\r\nmaxvit_small_tf_512,512,437.01,146.413,64,69.13,67.26,383.77\r\nnfnet_f3,416,436.09,4696.167,2048,254.92,115.58,141.78\r\nbeit_large_patch16_384,384,434.27,4715.87,2048,305.0,191.21,270.24\r\nxcit_small_24_p8_384,384,429.82,4764.724,2048,47.63,105.24,265.91\r\nswinv2_cr_large_384,384,428.3,2390.768,1024,196.68,108.96,404.96\r\nresnetrs420,416,426.52,4801.592,2048,191.89,108.45,213.79\r\neva02_large_patch14_clip_336,336,425.16,4816.988,2048,304.43,191.34,289.13\r\ndm_nfnet_f3,416,422.83,4843.506,2048,254.92,115.58,141.78\r\nnfnet_f4,384,419.87,4877.683,2048,316.07,122.14,147.57\r\nswinv2_base_window12to24_192to384,384,415.94,1846.353,768,87.92,55.25,280.36\r\ndm_nfnet_f4,384,405.09,5055.643,2048,316.07,122.14,147.57\r\nefficientnetv2_xl,512,404.04,2534.344,1024,208.12,93.85,247.32\r\ntf_efficientnetv2_xl,512,398.74,2568.03,1024,208.12,93.85,247.32\r\nvit_huge_plus_patch16_dinov3,256,392.23,5221.314,2048,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,392.2,5221.77,2048,840.59,224.88,193.59\r\nmvitv2_huge_cls,224,374.38,4102.689,1536,694.8,120.67,243.63\r\nvit_giant_patch14_clip_224,224,373.75,5479.564,2048,1012.65,267.18,192.64\r\neva_giant_patch14_224,224,368.17,5562.592,2048,1012.56,267.18,192.64\r\nvit_giant_patch14_224,224,367.85,5567.397,2048,1012.61,267.18,192.64\r\nefficientnet_b7,600,366.99,261.545,96,66.35,38.33,289.94\r\neva_giant_patch14_clip_224,224,365.92,5596.849,2048,1012.59,267.18,192.64\r\nnaflexvit_so400m_patch16_siglip,384,358.94,5705.584,2048,427.89,259.65,319.77\r\nvitamin_xlarge_336,336,353.42,271.587,96,436.06,230.18,347.33\r\naimv2_1b_patch14_224,224,352.34,5812.561,2048,1234.96,322.43,170.39\r\nregnety_640,384,351.95,2909.418,1024,281.38,188.47,124.83\r\nresnetv2_152x2_bit,448,349.83,2927.075,1024,236.34,184.99,180.43\r\nvit_so400m_patch16_siglip_gap_384,384,349.66,5856.995,2048,413.02,258.11,318.42\r\nbeit3_giant_patch14_224,224,349.41,5861.277,2048,1013.22,267.56,192.64\r\nvit_so400m_patch16_siglip_384,384,347.58,5892.15,2048,428.26,259.65,319.77\r\ntf_efficientnet_b7,600,345.61,277.727,96,66.35,38.33,289.94\r\nvit_giantopt_patch16_siglip_gap_256,256,345.23,5932.171,2048,1134.84,298.42,199.62\r\nvit_giantopt_patch16_siglip_256,256,344.0,5953.367,2048,1163.17,299.66,200.43\r\nvitamin_large_384,384,340.28,188.043,64,333.71,234.44,440.16\r\nvitamin_large2_384,384,339.38,188.535,64,333.97,234.44,440.16\r\nresnet50x64_clip_gap,448,338.87,6043.465,2048,365.03,253.96,233.22\r\nfocalnet_huge_fl4,224,334.09,4597.459,1536,686.46,118.9,113.34\r\ncait_s36_384,384,333.35,6143.539,2048,68.37,47.99,367.4\r\nresnet50x64_clip,448,322.43,6351.754,2048,420.38,265.02,239.13\r\nmaxvit_large_tf_384,384,319.52,300.4,96,212.03,132.55,445.84\r\nnfnet_f5,416,298.03,6871.657,2048,377.21,170.71,204.56\r\nxcit_medium_24_p8_384,384,292.63,5248.954,1536,84.32,186.67,354.73\r\ndm_nfnet_f5,416,287.07,7134.127,2048,377.21,170.71,204.56\r\nvolo_d4_448,448,285.71,7167.993,2048,193.41,197.13,527.35\r\ndavit_base_fl,768,282.13,680.471,192,90.37,190.32,530.15\r\nresnetv2_101x3_bit,448,271.33,3773.97,1024,387.93,280.33,194.78\r\nvit_so400m_patch14_siglip_gap_378,378,269.58,7596.988,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,269.45,7600.484,2048,412.99,333.46,451.19\r\nvitamin_xlarge_384,384,268.53,238.297,64,436.06,306.38,493.46\r\nvit_so400m_patch14_siglip_378,378,268.44,7629.294,2048,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,268.1,7638.944,2048,428.23,335.4,452.89\r\nvit_large_patch16_siglip_gap_512,512,262.77,7793.845,2048,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,261.48,7832.232,2048,316.74,364.0,657.48\r\naimv2_large_patch14_448,448,258.06,7936.045,2048,309.98,367.84,491.78\r\nswinv2_large_window12to24_192to384,384,257.82,1985.844,512,196.74,116.15,407.83\r\nvit_intern300m_patch14_448,448,257.06,7966.899,2048,304.01,362.05,656.39\r\nvit_pe_lang_large_patch14_448,448,254.31,8052.954,2048,291.42,346.99,629.09\r\nmaxvit_base_tf_512,512,251.41,254.523,64,119.88,138.02,703.99\r\nconvnextv2_huge,384,251.07,2039.22,512,660.29,337.96,232.35\r\nvit_pe_spatial_large_patch14_448,448,249.99,8192.131,2048,303.96,362.05,656.39\r\nvit_huge_patch14_clip_336,336,249.59,8205.239,2048,632.46,390.97,407.54\r\naimv2_huge_patch14_336,336,249.0,8224.783,2048,681.34,416.36,337.08\r\nsam2_hiera_base_plus,896,238.65,268.133,64,68.68,227.48,828.88\r\nefficientnet_b8,672,230.48,416.46,96,87.41,63.48,442.89\r\neva02_large_patch14_448,448,225.63,9076.642,2048,305.08,362.33,689.95\r\nnfnet_f4,512,223.96,9144.616,2048,316.07,216.26,262.26\r\ndm_nfnet_f4,512,219.82,9316.853,2048,316.07,216.26,262.26\r\ntf_efficientnet_b8,672,219.41,437.478,96,87.41,63.48,442.89\r\nbeit_large_patch16_512,512,218.52,9372.21,2048,305.67,362.24,656.39\r\nfocalnet_large_fl3,384,217.39,4710.337,1024,239.13,105.06,168.04\r\nfocalnet_large_fl4,384,217.15,4715.486,1024,239.32,105.2,181.78\r\nnfnet_f6,448,215.73,9493.492,2048,438.36,229.7,273.62\r\nvit_gigantic_patch14_clip_quickgelu_224,224,213.8,9578.823,2048,1844.91,483.96,275.37\r\nvit_gigantic_patch14_clip_224,224,213.23,9604.65,2048,1844.91,483.96,275.37\r\nvit_gigantic_patch14_224,224,209.63,9769.681,2048,1844.44,483.95,275.37\r\ndm_nfnet_f6,448,208.15,9839.125,2048,438.36,229.7,273.62\r\nregnety_1280,384,195.39,3930.588,768,644.81,374.99,210.2\r\nvit_huge_patch14_clip_quickgelu_378,378,194.68,10519.586,2048,632.68,503.79,572.79\r\nvit_huge_patch14_clip_378,378,194.35,10537.484,2048,632.68,503.79,572.79\r\nvit_large_patch14_dinov2,518,187.8,10905.272,2048,304.37,507.15,1058.82\r\nvit_large_patch14_reg4_dinov2,518,187.6,10916.598,2048,304.37,508.9,1064.02\r\nswinv2_cr_giant_224,224,186.41,10986.597,2048,2598.76,483.85,309.15\r\nvit_so400m_patch14_siglip_gap_448,448,185.67,11030.247,2048,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,185.59,11034.767,2048,413.53,487.4,764.26\r\nvolo_d5_448,448,185.25,11055.145,2048,295.91,315.06,737.92\r\nvit_so400m_patch16_siglip_512,512,184.6,11094.381,2048,428.77,490.13,766.65\r\nmaxvit_xlarge_tf_384,384,184.31,347.182,64,475.32,292.78,668.76\r\ncait_m36_384,384,176.43,11607.889,2048,271.22,173.11,734.81\r\nswinv2_cr_huge_384,384,176.16,2906.453,512,657.94,352.04,583.18\r\nmaxvit_large_tf_512,512,176.02,272.651,48,212.33,244.75,942.15\r\nvit_huge_patch16_gap_448,448,173.81,11782.653,2048,631.67,544.7,636.83\r\nfocalnet_xlarge_fl3,384,168.59,4555.351,768,408.79,185.61,223.99\r\naimv2_3b_patch14_224,224,165.46,12377.568,2048,2720.66,705.91,252.44\r\nfocalnet_xlarge_fl4,384,164.48,4669.179,768,409.03,185.79,242.31\r\nnfnet_f5,544,163.71,12509.782,2048,377.21,290.97,349.71\r\ndm_nfnet_f5,544,162.81,9434.248,1536,377.21,290.97,349.71\r\nnfnet_f7,480,162.07,12636.269,2048,499.5,300.08,355.86\r\nxcit_large_24_p8_384,384,161.44,6342.984,1024,188.93,415.0,531.82\r\neva_giant_patch14_336,336,158.84,12893.158,2048,1013.01,620.64,550.67\r\nbeit3_giant_patch14_336,336,151.15,13549.209,2048,1013.67,621.52,550.67\r\nconvmixer_1536_20,224,149.71,6840.02,1024,51.63,48.68,33.03\r\naimv2_1b_patch14_336,336,148.87,13757.185,2048,1235.61,743.59,454.16\r\nvit_giantopt_patch16_siglip_gap_384,384,148.39,13801.206,2048,1135.33,694.1,567.12\r\ntf_efficientnet_l2,475,147.94,432.569,64,480.31,172.11,609.89\r\nvit_giantopt_patch16_siglip_384,384,147.83,13853.858,2048,1163.66,696.85,568.91\r\nvolo_d5_512,512,141.88,10825.71,1536,296.09,425.09,1105.37\r\nconvnextv2_huge,512,141.77,1805.628,256,660.29,600.81,413.07\r\naimv2_huge_patch14_448,448,134.49,15227.381,2048,682.03,774.02,731.38\r\nnfnet_f6,576,128.32,15960.04,2048,438.36,378.69,452.2\r\ndm_nfnet_f6,576,125.45,16324.898,2048,438.36,378.69,452.2\r\nregnety_2560,384,105.09,4871.935,512,1282.6,747.83,296.49\r\nresnetv2_152x4_bit,480,103.97,4924.279,512,936.53,844.84,414.26\r\nmobilenetv5_300m,768,102.42,624.823,64,294.13,435.74,842.16\r\ndavit_huge_fl,768,102.29,1876.867,192,360.64,744.84,1060.3\r\nmobilenetv5_300m_enc,768,100.71,635.404,64,294.13,435.74,842.16\r\nmaxvit_xlarge_tf_512,512,100.56,318.169,32,475.77,534.14,1413.22\r\nnfnet_f7,608,99.62,15418.602,1536,499.5,480.39,570.85\r\neva02_enormous_patch14_clip_224,224,95.67,21407.611,2048,4350.56,1132.46,497.58\r\naimv2_1b_patch14_448,448,81.03,18955.185,1536,1236.53,1367.03,983.56\r\ncait_m48_448,448,80.08,19179.896,1536,356.46,329.41,1708.23\r\nvit_gigantic_patch14_clip_378,378,72.76,28148.145,2048,1845.7,1429.82,1047.37\r\nsam2_hiera_large,1024,72.29,663.943,48,212.15,907.48,2190.34\r\naimv2_3b_patch14_336,336,72.17,21283.305,1536,2721.64,1615.48,674.17\r\nvit_7b_patch16_dinov3,256,62.44,32800.288,2048,6716.03,1775.1,515.87\r\nsamvit_base_patch16,1024,61.77,1036.1,64,89.67,486.43,1343.27\r\nvit_giant_patch14_dinov2,518,58.91,26071.728,1536,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,58.73,26154.797,1536,1136.48,1790.08,2771.21\r\nefficientnet_l2,800,54.74,438.397,24,480.31,479.12,1707.39\r\nvit_pe_lang_gigantic_patch14_448,448,53.82,28540.947,1536,1740.92,1931.99,1664.88\r\ntf_efficientnet_l2,800,52.85,454.065,24,480.31,479.12,1707.39\r\nswinv2_cr_giant_384,384,52.03,7380.817,384,2598.76,1450.71,1394.86\r\neva_giant_patch14_560,560,51.98,29548.852,1536,1014.45,1906.76,2577.17\r\nvit_pe_core_gigantic_patch14_448,448,51.26,29963.529,1536,1882.03,2060.12,1774.21\r\nvit_pe_spatial_gigantic_patch14_448,448,50.57,30370.881,1536,1851.89,2055.25,1771.04\r\naimv2_3b_patch14_448,448,39.74,25770.045,1024,2723.02,2939.61,1462.76\r\nvit_so400m_patch14_siglip_gap_896,896,33.28,23076.785,768,416.87,2731.49,8492.88\r\nsamvit_large_patch16,1024,28.98,2208.375,64,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,18.55,2587.235,48,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nchw-pt291-cu130-pro6000maxq.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,268105.47,7.62,2048,0.37,0.04,0.48\r\ntest_vit2,160,210746.16,9.69,2048,0.46,0.05,0.64\r\ntest_byobnet,160,150080.19,13.627,2048,0.46,0.03,0.43\r\ntest_vit4,160,141431.38,14.459,2048,1.02,0.11,1.07\r\ntest_vit3,160,140515.84,14.553,2048,0.93,0.09,1.0\r\ntest_efficientnet,160,135532.98,15.091,2048,0.36,0.06,0.55\r\ntest_efficientnet_gn,160,117396.88,17.42,2048,0.36,0.06,0.55\r\ntest_resnet,160,102118.32,20.028,2048,0.47,0.1,0.64\r\ntest_convnext,160,97402.6,21.006,2048,0.27,0.03,0.58\r\ntest_convnext3,160,96816.67,21.135,2048,0.47,0.05,0.63\r\ntest_convnext2,160,95884.41,21.341,2048,0.48,0.05,0.63\r\ntest_mambaout,160,91986.68,22.24,2048,0.45,0.03,0.53\r\ntinynet_e,106,90886.07,22.492,2048,2.04,0.03,0.69\r\nmobilenetv3_small_050,224,87873.89,23.286,2048,1.59,0.03,0.92\r\nefficientvit_m0,224,87661.14,23.338,2048,2.33,0.08,0.91\r\nlcnet_035,224,80921.16,25.277,2048,1.64,0.03,1.04\r\nmobilenetv4_conv_small_035,224,75614.63,27.061,2048,1.91,0.05,0.98\r\nlcnet_050,224,70543.62,28.982,2048,1.88,0.05,1.26\r\nmobilenetv4_conv_small_050,224,65378.61,31.305,2048,2.24,0.07,1.18\r\ntest_mambaout,192,63077.05,32.429,2048,0.45,0.04,0.77\r\nmobilenetv3_small_075,224,62520.56,32.733,2048,2.04,0.05,1.3\r\nmobilenetv4_conv_small_035,256,61583.9,33.23,2048,1.91,0.06,1.28\r\nefficientvit_m1,224,60008.11,34.102,2048,2.96,0.17,1.33\r\nshvit_s1,224,56478.4,36.232,2048,6.31,0.24,1.39\r\ntest_efficientnet_evos,160,55755.44,36.709,2048,0.36,0.06,0.55\r\nmobilenetv3_small_100,224,55699.99,36.745,2048,2.54,0.06,1.42\r\nefficientvit_m2,224,55055.35,37.177,2048,4.17,0.2,1.47\r\nmobilenetv4_conv_small_050,256,53124.17,38.53,2048,2.24,0.09,1.55\r\ntest_nfnet,160,52385.04,39.066,2048,0.38,0.29,1.2\r\ntinynet_d,152,51336.09,39.874,2048,2.34,0.05,1.42\r\ntest_efficientnet_ln,160,50842.32,40.255,2048,0.36,0.06,0.55\r\ntf_mobilenetv3_small_minimal_100,224,49576.3,41.287,2048,2.04,0.06,1.41\r\nefficientvit_m3,224,49337.92,41.484,2048,6.88,0.26,1.62\r\ntf_mobilenetv3_small_075,224,48302.96,42.373,2048,2.04,0.05,1.3\r\nefficientvit_m4,224,47080.88,43.477,2048,8.78,0.3,1.7\r\nshvit_s2,224,46237.52,44.27,2048,11.45,0.37,1.6\r\nlcnet_075,224,44966.7,45.522,2048,2.36,0.1,1.99\r\ntf_mobilenetv3_small_100,224,43920.4,46.6,2048,2.54,0.06,1.42\r\nrepghostnet_050,224,42998.4,47.604,2048,2.31,0.05,2.02\r\nlevit_128s,224,42488.84,48.175,2048,7.76,0.3,1.88\r\nstarnet_s050,224,41255.35,49.615,2048,0.54,0.09,1.57\r\nlevit_conv_128s,224,40236.65,50.864,2048,7.76,0.3,1.88\r\nmobilenetv4_conv_small,224,39545.74,51.76,2048,3.77,0.19,1.97\r\nvit_small_patch32_224,224,39333.33,52.041,2048,22.88,1.15,2.5\r\nfasternet_t0,224,38296.98,53.449,2048,3.91,0.34,1.97\r\nresnet10t,176,37970.64,53.912,2048,5.44,0.7,1.51\r\nmnasnet_small,224,37764.72,54.189,2048,2.03,0.07,2.16\r\nresnet18,160,36909.39,55.464,2048,11.69,0.93,1.27\r\nlcnet_100,224,36226.5,56.513,2048,2.95,0.16,2.52\r\nrepghostnet_058,224,34869.61,58.701,2048,2.54,0.06,2.59\r\nghostnet_050,224,34831.19,58.766,2048,2.59,0.05,1.77\r\nmobilenetv2_035,224,33655.07,60.826,2048,1.68,0.07,2.86\r\nregnetx_002,224,33356.97,61.372,2048,2.68,0.2,2.16\r\nmobilenetv4_conv_small,256,32614.49,62.771,2048,3.77,0.25,2.57\r\nshvit_s3,224,31750.23,64.474,2048,14.21,0.6,2.33\r\nregnety_002,224,31342.77,65.313,2048,3.16,0.2,2.17\r\nefficientvit_m5,224,31163.21,65.688,2048,12.44,0.52,2.41\r\nefficientvit_b0,224,30816.63,66.427,2048,3.41,0.1,2.87\r\npit_ti_224,224,30144.65,67.912,2048,4.85,0.7,6.19\r\nvit_medium_patch32_clip_224,224,30080.57,68.049,2048,39.69,2.0,3.34\r\nvit_tiny_r_s16_p8_224,224,29885.07,68.491,2048,6.34,0.44,2.06\r\npit_ti_distilled_224,224,29820.97,68.649,2048,5.1,0.71,6.23\r\nmnasnet_050,224,28794.31,71.097,2048,2.22,0.11,3.07\r\nlevit_128,224,28621.7,71.531,2048,9.19,0.41,2.71\r\nrepghostnet_080,224,28562.03,71.673,2048,3.27,0.1,3.22\r\nrepvgg_a0,224,28105.02,72.844,2048,8.31,1.36,1.79\r\nlevit_conv_128,224,28012.83,73.083,2048,9.19,0.41,2.71\r\nmixer_s32_224,224,27708.36,73.891,2048,19.1,1.0,2.28\r\nfasternet_t1,224,27240.67,75.148,2048,7.6,0.85,3.15\r\nmobileone_s0,224,27057.87,75.661,2048,2.08,0.28,3.79\r\nmobilenetv2_050,224,26917.34,76.054,2048,1.97,0.1,3.64\r\ntinynet_c,184,26624.77,76.883,2048,2.46,0.11,2.87\r\nsemnasnet_050,224,26622.79,76.886,2048,2.08,0.11,3.44\r\nstarnet_s100,224,26073.72,78.513,2048,1.04,0.19,2.68\r\nlevit_192,224,25419.26,80.538,2048,10.92,0.66,3.2\r\nvit_xsmall_patch16_clip_224,224,25134.86,81.441,2048,8.28,1.79,6.65\r\ndeit_tiny_distilled_patch16_224,224,24487.25,83.607,2048,5.91,1.27,6.01\r\nvit_tiny_patch16_224,224,24449.12,83.726,2048,5.72,1.26,5.97\r\ndeit_tiny_patch16_224,224,24264.13,84.377,2048,5.72,1.26,5.97\r\nlevit_conv_192,224,24122.57,84.873,2048,10.92,0.66,3.2\r\ncs3darknet_focus_s,256,24055.61,85.09,2048,3.27,0.69,2.7\r\nresnet10t,224,24024.95,85.217,2048,5.44,1.1,2.43\r\nlcnet_150,224,23750.6,86.203,2048,4.5,0.34,3.79\r\ngernet_s,224,23696.23,86.394,2048,8.17,0.75,2.65\r\nstarnet_s150,224,23612.89,86.697,2048,1.56,0.23,2.75\r\nrepghostnet_100,224,23559.65,86.9,2048,4.06,0.15,3.98\r\ncs3darknet_s,256,23205.36,88.227,2048,3.28,0.72,2.97\r\nresnet34,160,23179.6,88.324,2048,21.8,1.87,1.91\r\nmobilenetv3_large_075,224,22280.42,91.896,2048,3.99,0.16,4.0\r\nregnetx_004,224,22178.03,92.307,2048,5.16,0.4,3.14\r\nmobilenetv4_conv_small,320,21982.41,93.14,2048,3.77,0.39,4.01\r\nconvnext_zepto_rms,224,21922.71,93.387,2048,2.16,0.3,2.75\r\nshvit_s4,256,21584.19,94.858,2048,16.55,0.99,3.73\r\nregnetx_004_tv,224,21569.3,94.927,2048,5.5,0.42,3.17\r\nvit_betwixt_patch32_clip_224,224,21461.49,95.392,2048,61.41,3.09,4.17\r\nrepghostnet_111,224,21209.34,96.539,2048,4.52,0.18,4.38\r\nrepvgg_a1,224,21204.72,96.542,2048,12.79,2.36,2.37\r\nghostnetv3_050,224,20810.5,98.384,2048,2.85,0.05,2.28\r\nese_vovnet19b_slim_dw,224,20797.27,98.447,2048,1.9,0.4,5.28\r\nresnetv2_18,224,20600.63,99.383,2048,11.69,1.82,2.48\r\nresnet14t,176,20479.33,99.976,2048,10.08,1.07,3.61\r\nmobilenetv3_rw,224,20035.22,102.194,2048,5.48,0.23,4.41\r\npit_xs_224,224,19987.26,102.434,2048,10.62,1.4,7.71\r\npit_xs_distilled_224,224,19916.46,102.808,2048,11.0,1.41,7.76\r\nmobilenetv1_100,224,19815.09,103.324,2048,4.23,0.58,5.04\r\nmobilenetv3_large_100,224,19787.52,103.478,2048,5.48,0.23,4.41\r\nhardcorenas_a,224,19644.38,104.223,2048,5.26,0.23,4.38\r\nconvnext_zepto_rms_ols,224,19469.42,105.149,2048,2.16,0.34,3.15\r\nmobilenetv1_100h,224,19289.75,106.132,2048,5.28,0.63,5.09\r\nresnet18,224,19210.05,106.587,2048,11.69,1.82,2.48\r\nnf_regnet_b0,192,19099.3,107.201,2048,8.76,0.37,3.15\r\nmnasnet_075,224,19046.81,107.495,2048,3.17,0.23,4.77\r\nlevit_256,224,18960.69,107.977,2048,18.86,1.13,4.23\r\nghostnet_100,224,18764.05,109.115,2048,5.18,0.15,3.55\r\ntf_mobilenetv3_large_075,224,18444.78,111.003,2048,3.99,0.16,4.0\r\nrepghostnet_130,224,18437.19,111.053,2048,5.46,0.24,5.24\r\nhardcorenas_b,224,18387.19,111.345,2048,5.18,0.26,5.09\r\nlevit_conv_256,224,18311.48,111.798,2048,18.86,1.13,4.23\r\nhardcorenas_c,224,18124.6,112.972,2048,5.52,0.28,5.01\r\ntf_mobilenetv3_large_minimal_100,224,18012.44,113.654,2048,3.92,0.22,4.4\r\nseresnet18,224,17953.78,114.041,2048,11.78,1.82,2.49\r\ninception_next_atto,224,17926.59,114.207,2048,4.16,0.5,3.63\r\nregnety_004,224,17702.06,115.668,2048,4.34,0.41,3.89\r\nresnetv2_18d,224,17654.5,115.975,2048,11.71,2.06,3.29\r\nregnetx_006,224,17585.44,116.426,2048,6.2,0.61,3.98\r\nmobilenet_edgetpu_v2_xs,224,17426.53,117.496,2048,4.46,0.7,4.8\r\nlegacy_seresnet18,224,17352.9,117.998,2048,11.78,1.82,2.49\r\nconvnext_atto,224,17113.17,119.645,2048,3.7,0.55,3.81\r\nlevit_256d,224,17094.61,119.774,2048,26.16,1.39,4.93\r\nmobilenetv2_075,224,17081.6,119.865,2048,2.64,0.22,5.86\r\ntinynet_b,188,17023.65,120.27,2048,3.73,0.21,4.44\r\nsemnasnet_075,224,16983.16,120.562,2048,2.91,0.23,5.54\r\nmnasnet_100,224,16964.23,120.7,2048,4.38,0.33,5.46\r\nedgenext_xx_small,256,16933.7,120.909,2048,1.33,0.26,3.33\r\nvit_base_patch32_clip_224,224,16868.27,121.375,2048,88.22,4.41,5.01\r\nhardcorenas_d,224,16694.48,122.644,2048,7.5,0.3,4.93\r\nresnet18d,224,16589.88,123.396,2048,11.71,2.06,3.29\r\ntf_mobilenetv3_large_100,224,16568.22,123.575,2048,5.48,0.23,4.41\r\nrepvgg_b0,224,16487.59,124.179,2048,14.34,3.06,3.07\r\nregnety_006,224,16451.8,124.455,2048,6.06,0.61,4.33\r\ntf_efficientnetv2_b0,192,16442.9,124.523,2048,7.14,0.54,3.51\r\nconvnext_atto_ols,224,16248.26,126.015,2048,3.7,0.58,4.11\r\nmobilenetv3_large_100,256,16050.81,127.569,2048,5.48,0.29,5.75\r\nrepghostnet_150,224,16034.4,127.698,2048,6.55,0.31,6.0\r\nmobilenetv1_100,256,15972.64,128.188,2048,4.23,0.76,6.59\r\nlevit_conv_256d,224,15774.49,129.805,2048,26.16,1.39,4.93\r\nmobilenetv1_100h,256,15563.32,131.561,2048,5.28,0.82,6.65\r\nese_vovnet19b_slim,224,15559.93,131.588,2048,3.17,1.69,3.52\r\nspnasnet_100,224,15488.98,132.196,2048,4.42,0.35,6.03\r\nmobileone_s1,224,15409.45,132.873,2048,4.76,0.83,6.27\r\nmobilenetv2_100,224,15379.81,133.135,2048,3.5,0.31,6.68\r\ncs3darknet_focus_s,320,15352.57,133.362,2048,3.27,1.08,4.22\r\nsemnasnet_100,224,15296.6,133.85,2048,3.89,0.32,6.23\r\nrepvit_m1,224,15295.64,133.867,2048,5.07,0.82,6.17\r\nrepvit_m0_9,224,15280.75,133.982,2048,5.07,0.82,6.17\r\nvit_base_patch32_224,224,15261.32,134.156,2048,88.22,4.41,5.01\r\nstarnet_s2,224,15113.45,135.474,2048,3.68,0.55,4.73\r\nfasternet_t2,224,15110.07,135.508,2048,14.98,1.91,4.73\r\nmobilenetv1_125,224,15061.41,135.95,2048,6.27,0.89,6.3\r\nghostnet_130,224,15031.15,136.219,2048,7.36,0.24,4.6\r\nconvnext_femto,224,14972.58,136.755,2048,5.22,0.79,4.57\r\nmobilevit_xxs,256,14887.74,137.527,2048,1.27,0.42,8.34\r\nhgnetv2_b0,224,14878.9,137.618,2048,6.0,0.33,2.12\r\ndla46_c,224,14854.05,137.847,2048,1.3,0.58,4.5\r\nconvnext_atto_rms,224,14817.14,138.185,2048,3.69,0.55,3.81\r\nhardcorenas_f,224,14653.92,139.725,2048,8.2,0.35,5.57\r\nmobilenet_edgetpu_100,224,14651.33,139.755,2048,4.09,1.0,5.75\r\nhardcorenas_e,224,14476.14,141.445,2048,8.07,0.35,5.65\r\ncrossvit_tiny_240,240,14417.63,142.019,2048,7.01,1.57,9.08\r\nstarnet_s1,224,14397.49,142.219,2048,2.87,0.42,4.99\r\nconvnext_femto_ols,224,14272.74,143.457,2048,5.23,0.82,4.87\r\nvit_base_patch32_clip_quickgelu_224,224,14060.69,145.609,2048,87.85,4.41,5.01\r\nregnetx_008,224,14000.89,146.249,2048,7.26,0.81,5.15\r\nmobilenetv4_conv_medium,224,13967.49,146.603,2048,9.72,0.84,5.8\r\nregnety_008,224,13937.94,146.911,2048,6.26,0.81,5.25\r\nrepvit_m1_0,224,13907.55,147.227,2048,6.81,1.11,7.19\r\nefficientnet_lite0,224,13899.63,147.311,2048,4.65,0.4,6.74\r\ncrossvit_9_240,240,13889.91,147.407,2048,8.55,1.85,9.52\r\ntf_efficientnetv2_b0,224,13561.92,150.977,2048,7.14,0.73,4.77\r\nfbnetc_100,224,13546.69,151.15,2048,5.57,0.4,6.51\r\ntinynet_a,192,13453.17,152.196,2048,6.19,0.35,5.41\r\nxcit_nano_12_p16_224,224,13413.93,152.637,2048,3.05,0.56,4.17\r\nedgenext_xx_small,288,13304.18,153.905,2048,1.33,0.33,4.21\r\nresnet14t,224,13264.14,154.371,2048,10.08,1.69,5.8\r\nmobilevitv2_050,256,13200.14,155.125,2048,1.37,0.48,8.04\r\nvit_small_patch32_384,384,13188.43,155.241,2048,22.92,3.45,8.25\r\nregnety_008_tv,224,13164.82,155.538,2048,6.43,0.84,5.42\r\nvit_base_patch32_clip_256,256,13145.17,155.755,2048,87.86,5.76,6.65\r\nmobileone_s2,224,13110.16,156.182,2048,7.81,1.3,7.56\r\nrepvit_m1_1,224,13088.85,156.436,2048,8.24,1.34,7.82\r\ntf_efficientnetv2_b1,192,13065.82,156.695,2048,8.14,0.76,4.59\r\nrepvit_m2,224,13014.84,157.328,2048,8.24,1.34,7.82\r\nrepvgg_a2,224,12920.58,158.479,2048,25.5,5.12,3.13\r\nvisformer_tiny,224,12868.91,159.103,2048,10.32,1.27,5.72\r\npvt_v2_b0,224,12787.79,160.12,2048,3.67,0.57,7.99\r\ncrossvit_9_dagger_240,240,12735.41,160.78,2048,8.78,1.99,9.97\r\nresnetv2_34,224,12706.5,161.14,2048,21.8,3.67,3.74\r\nresnetblur18,224,12638.09,162.019,2048,11.69,2.34,3.39\r\npit_s_224,224,12623.54,162.202,2048,23.46,2.88,11.56\r\npit_s_distilled_224,224,12588.64,162.652,2048,24.04,2.9,11.64\r\nresnetv2_18,288,12553.56,163.108,2048,11.69,3.0,4.11\r\ndla46x_c,224,12462.24,164.307,2048,1.07,0.54,5.66\r\nmobilenetv4_hybrid_medium_075,224,12455.02,164.398,2048,7.31,0.66,5.65\r\nmobilenetv1_125,256,12286.41,166.662,2048,6.27,1.16,8.23\r\nrepghostnet_200,224,12232.59,167.392,2048,9.77,0.53,7.96\r\nmobilenet_edgetpu_v2_s,224,12151.56,168.502,2048,5.99,1.21,6.6\r\nefficientnet_b0,224,12127.59,168.843,2048,5.29,0.4,6.75\r\nresnet34,224,12089.14,169.355,2048,21.8,3.67,3.74\r\ncs3darknet_focus_m,256,12050.58,169.922,2048,9.3,1.98,4.89\r\nrexnet_100,224,12050.41,169.918,2048,4.8,0.41,7.44\r\nmobilenetv4_conv_medium,256,11982.21,170.891,2048,9.72,1.1,7.58\r\nefficientvit_b1,224,11951.43,171.327,2048,9.1,0.53,7.25\r\nresnet50,160,11932.44,171.596,2048,25.56,2.1,5.67\r\nmnasnet_140,224,11917.29,171.825,2048,7.12,0.6,7.71\r\nconvnext_atto_rms,256,11826.75,173.139,2048,3.69,0.71,4.98\r\ndla60x_c,224,11820.72,173.225,2048,1.32,0.59,6.01\r\nselecsls42,224,11799.98,173.513,2048,30.35,2.94,4.62\r\nrexnetr_100,224,11794.02,173.614,2048,4.88,0.43,7.72\r\nselecsls42b,224,11785.2,173.732,2048,32.46,2.98,4.62\r\nconvnext_pico,224,11768.89,173.986,2048,9.05,1.37,6.1\r\nmobilenetv2_110d,224,11767.48,174.008,2048,4.52,0.45,8.71\r\nlevit_384,224,11662.06,175.574,2048,39.07,2.35,6.26\r\ntf_efficientnet_lite0,224,11617.5,176.243,2048,4.65,0.4,6.74\r\ncs3darknet_m,256,11603.19,176.468,2048,9.31,2.08,5.28\r\nfbnetv3_b,224,11583.03,176.777,2048,8.6,0.42,6.97\r\nresnet18,288,11572.43,176.948,2048,11.69,3.01,4.11\r\nregnetz_005,224,11566.89,177.008,2048,7.12,0.52,5.86\r\nresnetv2_34d,224,11485.73,178.262,2048,21.82,3.91,4.54\r\nskresnet18,224,11439.21,179.0,2048,11.96,1.82,3.24\r\nresnet50d,160,11337.73,180.602,2048,25.58,2.22,6.08\r\nese_vovnet19b_dw,224,11332.65,180.672,2048,6.54,1.34,8.25\r\nefficientnet_b1_pruned,240,11277.91,181.561,2048,6.33,0.4,6.21\r\nseresnet34,224,11242.99,182.119,2048,21.96,3.67,3.74\r\ndeit_small_patch16_224,224,11193.44,182.923,2048,22.05,4.61,11.95\r\nvit_small_patch16_224,224,11167.93,183.338,2048,22.05,4.61,11.95\r\nconvnext_pico_ols,224,11165.69,183.388,2048,9.06,1.43,6.5\r\nhrnet_w18_small,224,11093.38,184.587,2048,13.19,1.61,5.72\r\ndeit_small_distilled_patch16_224,224,11079.16,184.814,2048,22.44,4.63,12.02\r\nhgnetv2_b1,224,11066.63,185.02,2048,6.34,0.49,2.73\r\nswiftformer_xs,224,11040.78,185.449,2048,3.48,0.61,6.45\r\nresnet34d,224,10995.38,186.229,2048,21.82,3.91,4.54\r\nnf_regnet_b0,256,10934.08,187.27,2048,8.76,0.64,5.58\r\nseresnet18,288,10864.06,188.476,2048,11.78,3.01,4.11\r\nstarnet_s3,224,10802.64,189.55,2048,5.75,0.76,6.66\r\nlevit_conv_384,224,10800.0,189.591,2048,39.07,2.35,6.26\r\nlegacy_seresnet34,224,10779.01,189.961,2048,21.96,3.67,3.74\r\nedgenext_x_small,256,10710.55,191.18,2048,2.34,0.54,5.93\r\nvit_base_patch32_siglip_gap_256,256,10708.17,191.215,2048,87.47,5.67,6.54\r\nresnetv2_18d,288,10707.69,191.23,2048,11.71,3.4,5.43\r\nghostnetv3_100,224,10615.04,192.906,2048,6.15,0.17,4.55\r\nsemnasnet_140,224,10611.2,192.968,2048,6.11,0.6,8.87\r\nvit_base_patch32_siglip_256,256,10588.89,193.37,2048,94.55,5.75,6.64\r\nmixer_b32_224,224,10547.05,194.145,2048,60.29,3.24,6.29\r\ngernet_m,224,10520.34,194.634,2048,21.14,3.02,5.24\r\nmobilenetv2_140,224,10516.98,194.704,2048,6.11,0.6,9.57\r\nefficientnet_b0_gn,224,10404.34,196.813,2048,5.29,0.42,6.75\r\nconvnext_atto,288,10402.06,196.848,2048,3.7,0.91,6.3\r\ntf_efficientnet_b0,224,10396.83,196.958,2048,5.29,0.4,6.75\r\nseresnet50,160,10365.41,197.54,2048,28.09,2.1,5.69\r\nfastvit_t8,256,10276.22,199.261,2048,4.0,0.69,6.59\r\ntf_efficientnetv2_b2,208,10201.26,200.714,2048,10.1,1.06,6.0\r\nmobilenetv4_hybrid_medium,224,10193.75,200.873,2048,11.07,0.98,6.84\r\nghostnetv2_100,224,10190.71,200.939,2048,6.16,0.18,4.55\r\nvit_tiny_r_s16_p8_384,384,10157.64,201.576,2048,6.36,1.34,6.49\r\nselecsls60,224,10056.75,203.605,2048,30.67,3.59,5.52\r\ndla34,224,10027.75,204.202,2048,15.74,3.07,5.02\r\nselecsls60b,224,10025.33,204.238,2048,32.77,3.63,5.52\r\nfbnetv3_d,224,10012.33,204.509,2048,10.31,0.52,8.5\r\necaresnet50t,160,9991.88,204.931,2048,25.57,2.21,6.04\r\nvit_pwee_patch16_reg1_gap_256,256,9981.27,205.144,2048,15.25,4.37,15.87\r\ndeit3_small_patch16_224,224,9980.69,205.17,2048,22.06,4.61,11.95\r\nresnet18d,288,9970.6,205.369,2048,11.71,3.41,5.43\r\nresmlp_12_224,224,9957.25,205.644,2048,15.35,3.01,5.5\r\nmixnet_s,224,9926.05,206.29,2048,4.13,0.25,6.25\r\nvit_wee_patch16_reg1_gap_256,256,9916.58,206.46,2048,13.42,3.83,13.9\r\nmobileone_s3,224,9876.07,207.336,2048,10.08,1.9,9.13\r\nresnet26,224,9842.43,208.039,2048,16.0,2.36,7.35\r\nefficientnet_b0,256,9813.42,208.657,2048,5.29,0.52,8.81\r\nresnetaa34d,224,9785.13,209.263,2048,21.82,4.43,5.07\r\nresnet50,176,9757.14,209.871,2048,25.56,2.62,6.92\r\nflexivit_small,240,9739.53,210.243,2048,22.06,5.35,14.18\r\nconvnext_atto_ols,288,9720.97,210.643,2048,3.7,0.96,6.8\r\nmobilenet_edgetpu_v2_m,224,9719.99,210.664,2048,8.46,1.85,8.15\r\nconvnextv2_atto,224,9718.61,210.699,2048,3.71,0.55,3.81\r\nresnetrs50,160,9688.21,211.355,2048,35.69,2.29,6.2\r\nefficientvit_b1,256,9675.92,211.624,2048,9.1,0.69,9.46\r\nefficientnet_lite1,240,9604.59,213.196,2048,5.42,0.62,10.14\r\nfbnetv3_b,256,9600.46,213.286,2048,8.6,0.55,9.1\r\nmambaout_femto,224,9596.83,213.375,2048,7.3,1.16,8.34\r\neva02_tiny_patch14_224,224,9577.58,213.798,2048,5.5,1.7,9.14\r\nmixer_s16_224,224,9519.53,215.111,2048,18.53,3.79,5.97\r\ncs3darknet_focus_m,288,9392.99,217.999,2048,9.3,2.51,6.19\r\ntiny_vit_5m_224,224,9385.29,218.176,2048,12.08,1.27,11.25\r\ntf_efficientnetv2_b1,240,9317.25,219.762,2048,8.14,1.21,7.34\r\nrexnetr_130,224,9232.45,221.795,2048,7.61,0.68,9.81\r\nefficientnet_es_pruned,224,9230.02,221.846,2048,5.44,1.81,8.73\r\nefficientnet_es,224,9226.16,221.938,2048,5.44,1.81,8.73\r\nresnext50_32x4d,160,9181.24,223.029,2048,25.03,2.17,7.35\r\nresnet26d,224,9152.56,223.731,2048,16.01,2.6,8.15\r\nhgnetv2_b0,288,9105.68,224.878,2048,6.0,0.54,3.51\r\nconvnext_femto,288,9086.46,225.353,2048,5.22,1.3,7.56\r\nswiftformer_s,224,9077.5,225.581,2048,6.09,0.99,7.81\r\nrepvit_m3,224,9057.78,226.068,2048,10.12,1.86,11.43\r\ncs3darknet_m,288,9044.52,226.397,2048,9.31,2.63,6.69\r\nefficientnet_blur_b0,224,8993.55,227.683,2048,5.29,0.43,8.72\r\ngmixer_12_224,224,8963.79,228.436,2048,12.7,2.67,7.26\r\nregnetx_016,224,8943.78,228.949,2048,9.19,1.62,7.93\r\nvit_base_patch32_plus_256,256,8843.05,231.549,2048,119.48,7.79,7.76\r\ntf_mixnet_s,224,8841.02,231.606,2048,4.13,0.25,6.25\r\nrexnet_130,224,8801.48,232.657,2048,7.56,0.68,9.71\r\nvit_medium_patch16_clip_224,224,8711.31,235.055,2048,38.59,8.0,15.93\r\nnf_resnet26,224,8676.46,236.005,2048,16.0,2.41,7.35\r\nmobilenet_edgetpu_v2_l,224,8652.9,236.647,2048,10.92,2.55,9.05\r\nxcit_tiny_12_p16_224,224,8644.01,236.88,2048,6.72,1.24,6.29\r\nmobilenetv4_conv_blur_medium,224,8637.49,237.065,2048,9.72,1.22,8.58\r\nmobilevitv2_075,256,8616.42,237.654,2048,2.87,1.05,12.06\r\nmobilenetv2_120d,224,8611.61,237.775,2048,5.83,0.69,11.97\r\ntf_efficientnet_es,224,8610.18,237.817,2048,5.44,1.81,8.73\r\nconvnext_femto_ols,288,8594.93,238.244,2048,5.23,1.35,8.06\r\necaresnet50d_pruned,224,8581.25,238.626,2048,19.94,2.53,6.43\r\nefficientnet_b0_g16_evos,224,8573.11,238.852,2048,8.11,1.01,7.42\r\nfasternet_s,224,8469.25,241.786,2048,31.18,4.56,7.93\r\nefficientnet_b1,224,8463.08,241.962,2048,7.79,0.59,9.36\r\nedgenext_x_small,288,8396.42,243.882,2048,2.34,0.68,7.5\r\nconvnext_nano,224,8388.21,244.112,2048,15.59,2.46,8.37\r\nmambaout_kobe,224,8329.79,245.826,2048,9.14,1.52,10.0\r\ntf_efficientnet_lite1,240,8324.39,245.982,2048,5.42,0.62,10.14\r\nrepvgg_b1g4,224,8312.86,246.321,2048,36.13,7.31,5.32\r\nmobilenetv4_hybrid_medium,256,8311.57,246.36,2048,11.07,1.29,9.01\r\ngmlp_ti16_224,224,8303.55,246.608,2048,5.87,1.34,7.55\r\nmobilenetv4_conv_medium,320,8302.95,246.627,2048,9.72,1.71,11.84\r\nregnety_016,224,8300.88,246.686,2048,11.2,1.63,8.04\r\nefficientformer_l1,224,8261.23,247.868,2048,12.29,1.3,5.53\r\nghostnetv3_130,224,8248.49,248.255,2048,8.95,0.28,5.9\r\nconvnextv2_femto,224,8238.95,248.542,2048,5.23,0.79,4.57\r\nmobilenetv4_conv_aa_medium,256,8201.43,249.669,2048,9.72,1.58,10.3\r\nfbnetv3_d,256,8196.52,249.833,2048,10.31,0.68,11.1\r\nnf_regnet_b1,256,8172.8,250.551,2048,10.22,0.82,7.27\r\nhgnetv2_b4,224,8132.62,251.789,2048,19.8,2.75,6.7\r\npoolformer_s12,224,8105.51,252.63,2048,11.92,1.82,5.53\r\ntiny_vit_11m_224,224,8074.4,253.598,2048,20.35,2.03,13.49\r\ndarknet17,256,8025.29,255.162,2048,14.3,3.26,7.18\r\nrepvit_m1_5,224,8010.09,255.641,2048,14.05,2.27,12.84\r\nrexnetr_150,224,7994.2,256.157,2048,9.78,0.89,11.13\r\nghostnetv2_130,224,7970.59,256.903,2048,8.96,0.28,5.9\r\nnf_regnet_b2,240,7940.71,257.87,2048,14.31,0.97,7.23\r\nresnest14d,224,7926.26,258.344,2048,10.61,2.76,7.33\r\nedgenext_small,256,7795.6,262.678,2048,5.59,1.26,9.07\r\nvit_tiny_patch16_384,384,7747.02,264.32,2048,5.79,4.7,25.39\r\nvit_relpos_small_patch16_224,224,7732.63,264.796,2048,21.98,4.59,13.05\r\nvit_relpos_small_patch16_rpn_224,224,7708.94,265.616,2048,21.97,4.59,13.05\r\nresnetblur18,288,7702.72,199.378,1536,11.69,3.87,5.6\r\nrexnet_150,224,7700.55,265.915,2048,9.73,0.9,11.21\r\nresnetv2_34,288,7690.85,266.236,2048,21.8,6.07,6.18\r\nefficientnet_b2_pruned,260,7647.2,267.782,2048,8.31,0.73,9.13\r\nstarnet_s4,224,7610.26,269.069,2048,7.48,1.05,9.56\r\nvit_srelpos_small_patch16_224,224,7578.67,270.183,2048,21.97,4.59,12.16\r\nresnext50_32x4d,176,7572.96,270.396,2048,25.03,2.71,8.97\r\nconvnext_nano_ols,224,7570.56,270.493,2048,15.65,2.65,9.38\r\nvit_relpos_base_patch32_plus_rpn_256,256,7552.97,271.104,2048,119.42,7.68,8.01\r\nefficientnet_b0_g8_gn,224,7547.12,271.324,2048,6.56,0.66,6.75\r\nhgnetv2_b2,224,7541.91,271.51,2048,11.22,1.15,4.12\r\nresnet101,160,7518.14,272.367,2048,44.55,4.0,8.28\r\ndarknet21,256,7492.29,273.312,2048,20.86,3.93,7.47\r\nmobilenetv3_large_150d,256,7442.38,275.143,2048,14.62,1.03,12.35\r\nefficientnet_b1,240,7439.82,275.238,2048,7.79,0.71,10.88\r\ncs3darknet_focus_l,256,7400.09,276.716,2048,21.15,4.66,8.03\r\nefficientvit_b1,288,7391.51,277.042,2048,9.1,0.87,11.96\r\nnf_seresnet26,224,7385.41,277.264,2048,17.4,2.41,7.36\r\nnf_ecaresnet26,224,7375.15,277.651,2048,16.0,2.41,7.36\r\nefficientnet_lite2,260,7315.16,279.932,2048,6.09,0.89,12.9\r\nresnet34,288,7291.44,280.839,2048,21.8,6.07,6.18\r\nvit_dwee_patch16_reg1_gap_256,256,7288.25,280.942,2048,13.43,3.83,17.6\r\nregnetz_005,288,7277.77,281.367,2048,7.12,0.86,9.68\r\npvt_v2_b1,224,7240.45,141.398,1024,14.01,2.12,15.39\r\nsedarknet21,256,7235.28,283.018,2048,20.95,3.93,7.47\r\nhgnet_tiny,224,7211.08,283.969,2048,14.74,4.54,6.36\r\nfastvit_t12,256,7166.88,285.721,2048,7.51,1.39,9.57\r\ncs3darknet_l,256,7144.98,286.588,2048,21.16,4.86,8.55\r\nmobilenet_edgetpu_v2_m,256,7144.78,286.596,2048,8.46,2.42,10.65\r\nmobileone_s4,224,7130.24,287.198,2048,14.84,2.98,11.81\r\nmobilenetv4_conv_blur_medium,256,7127.73,287.289,2048,9.72,1.59,11.2\r\nconvnext_pico,288,7112.94,287.893,2048,9.05,2.27,10.08\r\nswiftformer_l1,224,7095.68,288.587,2048,12.06,1.6,10.07\r\nlegacy_seresnext26_32x4d,224,7079.87,289.236,2048,16.79,2.49,9.39\r\ndpn48b,224,7078.92,289.255,2048,9.13,1.69,8.92\r\ngernet_l,256,7047.22,290.564,2048,31.08,4.57,8.0\r\ncrossvit_small_240,240,7022.64,291.588,2048,26.86,5.63,18.17\r\nregnetz_b16,224,7015.72,291.879,2048,9.72,1.45,9.95\r\necaresnetlight,224,6969.75,293.809,2048,30.16,4.11,8.42\r\nvit_small_resnet26d_224,224,6945.55,294.817,2048,63.61,5.07,11.12\r\nresnetv2_34d,288,6943.73,294.903,2048,21.82,6.46,7.51\r\nmixnet_m,224,6918.06,295.996,2048,5.01,0.36,8.19\r\nmobilevit_xs,256,6913.74,296.181,2048,2.32,1.05,16.33\r\nlevit_512,224,6909.22,296.38,2048,95.08,5.62,10.22\r\nvit_little_patch16_reg1_gap_256,256,6898.66,296.826,2048,22.52,6.27,18.06\r\ntresnet_m,224,6887.65,297.291,2048,31.39,5.75,7.31\r\nefficientnet_b1,256,6867.82,298.164,2048,7.79,0.77,12.22\r\nvit_little_patch16_reg4_gap_256,256,6858.16,298.571,2048,22.52,6.35,18.33\r\nresnext26ts,256,6852.6,298.824,2048,10.3,2.43,10.52\r\nefficientnet_cc_b0_8e,224,6830.52,299.803,2048,24.01,0.42,9.42\r\ndeit3_medium_patch16_224,224,6830.14,299.818,2048,38.85,8.0,15.93\r\necaresnext50t_32x4d,224,6820.14,300.254,2048,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,6813.98,300.519,2048,15.41,2.7,10.09\r\ntf_efficientnetv2_b2,260,6800.39,301.111,2048,10.1,1.72,9.84\r\nseresnet34,288,6797.82,301.231,2048,21.96,6.07,6.18\r\nseresnext26t_32x4d,224,6795.18,301.354,2048,16.81,2.7,10.09\r\nskresnet34,224,6777.58,302.138,2048,22.28,3.67,5.13\r\nhgnetv2_b1,288,6770.08,302.46,2048,6.34,0.82,4.51\r\necaresnet101d_pruned,224,6769.17,302.511,2048,24.88,3.48,7.69\r\nresnet26t,256,6769.15,302.518,2048,16.01,3.35,10.52\r\nrepvgg_b1,224,6752.56,303.251,2048,51.83,11.82,5.32\r\nresnetv2_50,224,6745.93,303.547,2048,25.55,4.11,11.11\r\nmobilenetv4_conv_large,256,6743.71,303.645,2048,32.59,2.86,12.14\r\nconvnext_pico_ols,288,6736.91,303.964,2048,9.06,2.37,10.74\r\nseresnext26d_32x4d,224,6715.98,304.906,2048,16.81,2.73,10.19\r\ntf_efficientnet_b1,240,6697.37,305.764,2048,7.79,0.71,10.88\r\nefficientnetv2_rw_t,224,6687.52,306.21,2048,13.65,1.93,9.94\r\nseresnext26ts,256,6675.18,306.764,2048,10.39,2.43,10.52\r\nghostnetv3_160,224,6666.73,307.157,2048,12.38,0.41,7.23\r\ncs3sedarknet_l,256,6666.25,307.178,2048,21.91,4.86,8.56\r\neca_resnext26ts,256,6665.67,307.203,2048,10.3,2.43,10.52\r\nresnet34d,288,6625.18,309.091,2048,21.82,6.47,7.51\r\nedgenext_small_rw,256,6623.25,309.174,2048,7.83,1.58,9.51\r\ngcresnext26ts,256,6614.11,309.609,2048,10.48,2.43,10.53\r\ngc_efficientnetv2_rw_t,224,6596.19,310.444,2048,13.68,1.94,9.97\r\nlevit_conv_512,224,6562.03,312.054,2048,95.08,5.62,10.22\r\nselecsls84,224,6557.97,312.243,2048,50.95,5.9,7.57\r\nvit_small_r26_s32_224,224,6546.08,312.813,2048,36.43,3.56,9.85\r\nvit_dpwee_patch16_reg1_gap_256,256,6526.73,313.74,2048,15.25,4.37,19.05\r\nlevit_512d,224,6517.58,314.195,2048,92.39,5.84,11.3\r\ncrossvit_15_240,240,6515.65,314.284,2048,27.53,5.81,19.77\r\nefficientnet_b2,256,6511.81,314.457,2048,9.11,0.89,12.81\r\nese_vovnet19b_dw,288,6488.58,315.593,2048,6.54,2.22,13.63\r\nefficientnet_cc_b0_4e,224,6488.16,315.619,2048,13.31,0.41,9.42\r\nconvnext_tiny,224,6481.77,315.92,2048,28.59,4.47,13.44\r\nghostnetv2_160,224,6471.18,316.444,2048,12.39,0.42,7.23\r\nresnet50,224,6459.19,317.039,2048,25.56,4.11,11.11\r\nresnetv2_50t,224,6448.31,317.57,2048,25.57,4.32,11.82\r\ntf_efficientnetv2_b3,240,6417.79,319.07,2048,14.36,1.93,9.95\r\nvgg11,224,6407.04,319.591,2048,132.86,7.61,7.44\r\ntf_efficientnet_lite2,260,6405.3,319.694,2048,6.09,0.89,12.9\r\nresnetv2_50d,224,6399.72,319.971,2048,25.57,4.35,11.92\r\nvit_relpos_medium_patch16_cls_224,224,6367.51,321.587,2048,38.76,8.03,18.24\r\ncoatnet_pico_rw_224,224,6352.93,322.342,2048,10.85,2.05,14.62\r\ncoat_lite_tiny,224,6334.76,323.259,2048,5.72,1.6,11.65\r\nvovnet39a,224,6329.07,323.543,2048,22.6,7.09,6.73\r\nconvnextv2_pico,224,6313.65,324.338,2048,9.07,1.37,6.1\r\nnf_regnet_b1,288,6290.28,325.542,2048,10.22,1.02,9.2\r\nregnetx_032,224,6259.59,327.14,2048,15.3,3.2,11.37\r\nfastvit_s12,256,6259.48,327.143,2048,9.43,1.8,10.82\r\ndla60,224,6256.11,327.326,2048,22.04,4.26,10.16\r\nwide_resnet50_2,176,6255.27,327.364,2048,68.88,7.29,8.97\r\ntf_efficientnet_cc_b0_8e,224,6255.06,327.383,2048,24.01,0.42,9.42\r\ntf_efficientnet_cc_b0_4e,224,6226.56,328.878,2048,13.31,0.41,9.42\r\ntf_mixnet_m,224,6224.74,328.96,2048,5.01,0.36,8.19\r\nefficientnet_b3_pruned,300,6202.44,330.147,2048,9.86,1.04,11.86\r\nresnet50t,224,6160.96,332.371,2048,25.57,4.32,11.82\r\nhgnetv2_b3,224,6157.68,332.555,2048,16.29,1.78,5.07\r\nresnet50d,224,6124.71,334.341,2048,25.58,4.35,11.92\r\nese_vovnet39b,224,6121.53,334.514,2048,24.57,7.09,6.74\r\ncrossvit_15_dagger_240,240,6114.2,334.909,2048,28.21,6.13,20.43\r\nfastvit_sa12,256,6106.9,335.319,2048,11.55,1.94,11.24\r\nresnet50c,224,6098.25,335.798,2048,25.58,4.35,11.92\r\nresnet101,176,6095.19,335.959,2048,44.55,4.92,10.08\r\neca_vovnet39b,224,6090.22,336.234,2048,22.6,7.09,6.74\r\nnf_regnet_b2,272,6082.96,336.636,2048,14.31,1.22,9.27\r\ncoatnext_nano_rw_224,224,6076.87,336.971,2048,14.7,2.47,12.8\r\nlevit_conv_512d,224,6054.34,338.236,2048,92.39,5.84,11.3\r\necaresnet26t,256,6041.0,338.983,2048,16.01,3.35,10.53\r\ncspresnet50,256,6040.01,339.03,2048,21.62,4.54,11.5\r\nvit_small_patch16_rope_224,224,5975.67,342.663,2048,21.98,4.61,11.95\r\nhrnet_w18_small_v2,224,5970.57,342.969,2048,15.6,2.62,9.65\r\nvit_small_patch16_rope_mixed_224,224,5966.32,343.214,2048,21.99,4.61,12.85\r\nefficientvit_b2,224,5957.19,343.74,2048,24.33,1.6,14.62\r\nvit_small_patch16_rope_ape_224,224,5956.3,343.794,2048,22.06,4.61,11.95\r\ncoat_lite_mini,224,5945.61,344.414,2048,11.01,2.0,12.25\r\neca_botnext26ts_256,256,5942.01,344.622,2048,10.59,2.46,11.6\r\nvit_base_resnet26d_224,224,5940.23,344.719,2048,101.4,6.97,13.16\r\nvit_small_patch16_rope_mixed_ape_224,224,5939.17,344.774,2048,22.06,4.61,12.85\r\ncoatnet_nano_cc_224,224,5911.6,346.387,2048,13.76,2.24,15.02\r\nresnetaa34d,288,5906.32,346.706,2048,21.82,7.33,8.38\r\nresnetaa50,224,5899.92,347.064,2048,25.56,5.15,11.64\r\nresnet32ts,256,5877.56,348.414,2048,17.96,4.63,11.58\r\ndpn68b,224,5870.58,348.822,2048,12.61,2.35,10.47\r\nconvnextv2_atto,288,5867.85,348.976,2048,3.71,0.91,6.3\r\ndpn68,224,5861.83,349.334,2048,12.61,2.35,10.47\r\neca_halonext26ts,256,5830.64,351.205,2048,10.76,2.44,11.46\r\nmobilenetv4_hybrid_large_075,256,5817.16,352.024,2048,22.75,2.06,11.64\r\ncoatnet_nano_rw_224,224,5802.54,352.91,2048,15.14,2.41,15.41\r\nresnet33ts,256,5791.16,353.601,2048,19.68,4.76,11.66\r\nrexnet_200,224,5774.94,354.595,2048,16.37,1.56,14.91\r\nbotnet26t_256,256,5761.55,355.419,2048,12.49,3.32,11.98\r\nregnety_032,224,5755.67,355.786,2048,19.44,3.2,11.26\r\ncs3darknet_focus_l,288,5749.55,356.161,2048,21.15,5.9,10.16\r\nresnest26d,224,5745.05,356.44,2048,17.07,3.64,9.97\r\nresnetv2_50x1_bit,224,5733.93,357.134,2048,25.55,4.23,11.11\r\nvit_base_patch32_clip_384,384,5729.28,357.407,2048,88.3,13.06,16.5\r\nrexnetr_200,224,5713.5,358.41,2048,16.52,1.59,15.11\r\nhalonet26t,256,5713.24,358.431,2048,12.48,3.19,11.69\r\nlegacy_seresnet50,224,5683.08,360.326,2048,28.09,3.88,10.6\r\ncspresnet50d,256,5665.26,361.466,2048,21.64,4.86,12.55\r\nresnet50_clip_gap,224,5643.53,362.857,2048,23.53,5.39,12.44\r\nregnetx_040,224,5643.21,362.872,2048,22.12,3.99,12.2\r\nresnetaa50d,224,5632.93,363.534,2048,25.58,5.39,12.44\r\nvisformer_small,224,5623.65,364.121,2048,40.22,4.88,11.43\r\nmobilevitv2_100,256,5607.49,365.186,2048,4.9,1.84,16.08\r\nseresnet50,224,5607.29,365.183,2048,28.09,4.11,11.13\r\nrepvgg_b2g4,224,5604.04,365.399,2048,55.78,11.33,6.45\r\nresnet26,288,5602.5,365.505,2048,16.0,3.9,12.15\r\nseresnet33ts,256,5600.55,365.634,2048,19.78,4.76,11.66\r\nresnetv2_50d_gn,224,5589.94,366.333,2048,25.57,4.38,11.92\r\neca_resnet33ts,256,5586.1,366.582,2048,19.68,4.76,11.66\r\ncspresnet50w,256,5585.14,366.643,2048,28.12,5.04,12.19\r\nregnetv_040,224,5575.17,367.303,2048,20.64,4.0,12.29\r\nresnet50_gn,224,5571.89,367.52,2048,25.56,4.14,11.11\r\ncs3darknet_l,288,5566.29,367.886,2048,21.16,6.16,10.83\r\ngcresnet33ts,256,5559.58,368.327,2048,19.88,4.76,11.68\r\nvgg11_bn,224,5558.11,368.423,2048,132.87,7.62,7.44\r\ndavit_tiny,224,5549.83,138.355,768,28.36,4.54,18.89\r\nregnety_040,224,5536.14,369.889,2048,20.65,4.0,12.29\r\nfbnetv3_g,240,5522.0,370.837,2048,16.62,1.28,14.87\r\nefficientnet_em,240,5513.48,371.398,2048,6.9,3.04,14.34\r\nres2net50_48w_2s,224,5448.27,375.864,2048,25.29,4.18,11.72\r\nefficientnet_b1,288,5435.07,376.773,2048,7.79,0.97,15.46\r\nresnet152,160,5418.8,377.898,2048,60.19,5.9,11.51\r\nbat_resnext26ts,256,5401.99,379.078,2048,10.73,2.53,12.51\r\nvovnet57a,224,5401.64,379.087,2048,36.64,8.95,7.52\r\ntwins_pcpvt_small,224,5399.09,379.269,2048,24.11,3.83,18.08\r\nresnetblur50,224,5399.08,379.288,2048,25.56,5.16,12.02\r\necaresnet50t,224,5397.25,379.404,2048,25.57,4.32,11.83\r\nseresnet50t,224,5387.6,380.085,2048,28.1,4.32,11.83\r\nvit_relpos_medium_patch16_224,224,5381.37,380.522,2048,38.75,7.97,17.02\r\nmobilenetv4_conv_medium,384,5380.03,380.628,2048,9.72,2.46,17.05\r\nmobilenetv4_hybrid_medium,320,5371.55,381.213,2048,11.07,2.05,14.36\r\nresnext26ts,288,5369.82,381.343,2048,10.3,3.07,13.31\r\nresnet50_clip,224,5365.97,381.622,2048,38.32,6.14,12.98\r\nmixnet_l,224,5346.26,383.033,2048,7.33,0.58,10.84\r\necaresnet50d,224,5337.92,383.624,2048,25.58,4.35,11.93\r\nconvnext_tiny_hnf,224,5327.24,384.406,2048,28.59,4.47,13.44\r\nmambaout_femto,288,5312.02,385.5,2048,7.3,1.91,13.79\r\nvit_dlittle_patch16_reg1_gap_256,256,5287.91,387.245,2048,22.52,6.27,22.69\r\nvit_relpos_medium_patch16_rpn_224,224,5283.83,387.547,2048,38.73,7.97,17.02\r\nconvit_tiny,224,5279.48,387.874,2048,5.71,1.26,7.94\r\nvit_srelpos_medium_patch16_224,224,5271.63,388.444,2048,38.74,7.96,16.21\r\ntf_efficientnet_em,240,5270.44,388.528,2048,6.9,3.04,14.34\r\necaresnet50d_pruned,288,5268.05,388.704,2048,19.94,4.19,10.61\r\nresnetrs50,224,5248.48,390.174,2048,35.69,4.48,12.14\r\nvit_base_r26_s32_224,224,5248.19,390.167,2048,101.38,6.81,12.36\r\ndensenet121,224,5247.78,390.224,2048,7.98,2.87,6.9\r\neca_resnext26ts,288,5226.49,391.805,2048,10.3,3.07,13.32\r\nseresnext26ts,288,5223.93,392.005,2048,10.39,3.07,13.32\r\nresnet26d,288,5218.85,392.379,2048,16.01,4.29,13.48\r\ncs3sedarknet_l,288,5197.81,393.973,2048,21.91,6.16,10.83\r\nresnet50s,224,5190.29,394.538,2048,25.68,5.47,13.52\r\nvit_large_patch32_224,224,5185.73,394.887,2048,305.51,15.39,13.3\r\nvit_base_patch32_384,384,5171.09,395.997,2048,88.3,13.06,16.5\r\ngcresnext26ts,288,5170.73,396.029,2048,10.48,3.07,13.33\r\nresnetblur50d,224,5162.63,396.659,2048,25.58,5.4,12.82\r\ntf_efficientnet_b2,260,5153.17,397.39,2048,9.11,1.02,13.83\r\nefficientnet_b2,288,5147.06,397.857,2048,9.11,1.12,16.2\r\nvit_medium_patch16_reg1_gap_256,256,5131.03,399.087,2048,38.88,10.63,22.26\r\nresmlp_24_224,224,5121.91,399.806,2048,30.02,5.96,10.91\r\nmobilevit_s,256,5117.93,400.12,2048,5.58,2.03,19.94\r\nvit_medium_patch16_reg4_gap_256,256,5102.87,401.286,2048,38.88,10.76,22.6\r\nhgnetv2_b4,288,5099.86,401.538,2048,19.8,4.54,11.08\r\nvit_medium_patch16_gap_240,240,5081.04,402.999,2048,44.4,9.22,18.81\r\nconvnext_nano,288,5079.29,403.174,2048,15.59,4.06,13.84\r\nhaloregnetz_b,224,5066.18,404.205,2048,11.68,1.97,11.94\r\ngcvit_xxtiny,224,5024.24,407.572,2048,12.0,2.14,15.36\r\neca_nfnet_l0,224,5019.16,407.995,2048,24.14,4.35,10.47\r\nconvnextv2_femto,288,4999.51,409.601,2048,5.23,1.3,7.56\r\ncoatnet_rmlp_nano_rw_224,224,4993.65,410.069,2048,15.15,2.62,20.34\r\nhgnet_small,224,4990.97,410.3,2048,24.36,8.53,8.79\r\nnfnet_l0,224,4984.7,410.812,2048,35.07,4.36,10.47\r\nresnext50_32x4d,224,4983.26,410.936,2048,25.03,4.26,14.4\r\nseresnetaa50d,224,4974.23,411.679,2048,28.11,5.4,12.46\r\ndla60x,224,4965.98,412.36,2048,17.35,3.54,13.8\r\nres2net50_26w_4s,224,4958.75,412.958,2048,25.7,4.28,12.61\r\nedgenext_small,320,4956.43,413.15,2048,5.59,1.97,14.16\r\ntiny_vit_21m_224,224,4949.95,413.681,2048,33.21,4.27,20.08\r\ninception_v3,299,4905.54,417.443,2048,23.83,5.73,8.97\r\nmobilenetv3_large_150d,320,4899.0,418.01,2048,14.62,1.61,19.29\r\nresnest50d_1s4x24d,224,4885.04,419.192,2048,25.68,4.43,13.57\r\nnf_regnet_b3,288,4837.11,423.356,2048,18.59,1.67,11.84\r\nnfnet_f0,192,4833.91,423.64,2048,71.49,7.21,10.16\r\nefficientvit_b2,256,4809.98,425.741,2048,24.33,2.09,19.03\r\ndla60_res2net,224,4797.4,426.852,2048,20.85,4.15,12.34\r\nresnext50d_32x4d,224,4776.48,428.724,2048,25.05,4.5,15.2\r\nres2net50_14w_8s,224,4773.25,429.013,2048,25.06,4.21,13.28\r\ntf_mixnet_l,224,4767.37,429.537,2048,7.33,0.58,10.84\r\nmobilenetv4_conv_large,320,4764.52,429.798,2048,32.59,4.47,18.97\r\nres2net50d,224,4761.72,430.044,2048,25.72,4.52,13.41\r\nhgnetv2_b5,224,4738.99,432.116,2048,39.57,6.56,11.19\r\nregnety_040_sgn,224,4733.32,432.628,2048,20.65,4.03,12.29\r\ncs3darknet_focus_x,256,4727.89,433.133,2048,35.02,8.03,10.69\r\ncrossvit_18_240,240,4712.29,434.56,2048,43.27,9.05,26.26\r\nfastvit_mci0,256,4691.08,436.528,2048,11.36,2.39,14.72\r\ndensenetblur121d,224,4689.62,436.664,2048,8.0,3.11,7.9\r\nedgenext_base,256,4685.94,437.015,2048,18.51,3.85,15.58\r\ninception_next_tiny,224,4677.45,437.8,2048,28.06,4.19,11.98\r\nvit_base_patch16_clip_224,224,4670.9,438.411,2048,86.57,17.58,23.9\r\nmambaout_kobe,288,4650.76,440.305,2048,9.14,2.5,16.53\r\nres2next50,224,4641.36,441.202,2048,24.67,4.2,13.71\r\nvit_pe_core_tiny_patch16_384,384,4622.98,442.953,2048,6.14,4.74,25.62\r\ndla60_res2next,224,4617.3,443.507,2048,17.03,3.49,13.17\r\ngmixer_24_224,224,4608.95,444.308,2048,24.72,5.28,14.45\r\ncspresnext50,256,4607.36,444.457,2048,20.57,4.05,15.86\r\nhgnetv2_b2,288,4605.54,444.634,2048,11.22,1.89,6.8\r\nxcit_tiny_24_p16_224,224,4600.77,445.088,2048,12.12,2.34,11.82\r\nmambaout_tiny,224,4589.13,446.226,2048,26.55,4.49,16.68\r\ndarknetaa53,256,4587.07,446.427,2048,36.02,7.97,12.39\r\nvit_small_patch16_dinov3_qkvb,256,4578.48,447.255,2048,21.6,6.26,17.03\r\ngcresnet50t,256,4575.52,447.556,2048,25.9,5.42,14.67\r\nvit_small_patch16_dinov3,256,4575.29,447.571,2048,21.59,6.26,17.03\r\nnf_seresnet50,224,4565.42,448.547,2048,28.09,4.21,11.13\r\nnf_ecaresnet50,224,4560.25,449.049,2048,25.56,4.21,11.13\r\nconvnext_nano_ols,288,4557.93,449.283,2048,15.65,4.38,15.5\r\nresnet32ts,288,4553.94,449.683,2048,17.96,5.86,14.65\r\nconvnextv2_nano,224,4551.24,449.951,2048,15.62,2.46,8.37\r\nnextvit_small,224,4549.39,450.125,2048,31.74,5.8,18.44\r\nmaxvit_pico_rw_256,256,4542.2,450.84,2048,7.46,1.83,22.3\r\nefficientvit_l1,224,4538.4,451.222,2048,52.65,5.27,15.85\r\nmaxvit_rmlp_pico_rw_256,256,4533.52,451.699,2048,7.52,1.85,24.86\r\ncoatnet_0_rw_224,224,4529.32,452.125,2048,27.44,4.43,18.73\r\nrepvgg_b2,224,4525.78,452.475,2048,80.32,18.38,6.45\r\nxcit_nano_12_p16_384,384,4524.18,452.629,2048,3.05,1.64,12.15\r\neva02_small_patch14_224,224,4522.81,452.768,2048,21.62,6.14,18.28\r\nvit_medium_patch16_gap_256,256,4521.63,452.874,2048,38.86,10.59,22.15\r\nresnetrs101,192,4509.7,454.084,2048,63.62,6.04,12.7\r\nskresnet50,224,4507.09,454.349,2048,25.8,4.11,12.5\r\nrepvgg_b3g4,224,4506.36,454.422,2048,75.63,16.06,7.55\r\nvit_base_resnet50d_224,224,4496.11,455.458,2048,110.97,8.73,16.92\r\nresnet33ts,288,4491.44,455.942,2048,19.68,6.02,14.75\r\nefficientnet_cc_b1_8e,240,4487.7,456.323,2048,39.72,0.75,15.44\r\nregnetx_080,224,4481.56,456.94,2048,39.57,8.02,14.06\r\npoolformerv2_s12,224,4478.03,457.294,2048,11.89,1.83,5.53\r\ngcresnext50ts,256,4465.31,458.605,2048,15.67,3.75,15.46\r\ncrossvit_18_dagger_240,240,4464.16,458.723,2048,44.27,9.5,27.03\r\npit_b_224,224,4455.86,459.576,2048,73.76,12.42,32.94\r\nseresnext50_32x4d,224,4455.54,459.608,2048,27.56,4.26,14.42\r\npit_b_distilled_224,224,4449.41,460.243,2048,74.79,12.5,33.07\r\nregnetz_c16,256,4438.42,461.377,2048,13.46,2.51,16.57\r\ntwins_svt_small,224,4425.89,462.665,2048,24.06,2.94,13.75\r\nlegacy_seresnext50_32x4d,224,4413.45,463.994,2048,27.56,4.26,14.42\r\nfocalnet_tiny_srf,224,4411.28,464.224,2048,28.43,4.42,16.32\r\ncs3darknet_x,256,4393.0,466.151,2048,35.05,8.38,11.35\r\nefficientnetv2_rw_t,288,4387.47,466.742,2048,13.65,3.19,16.42\r\npvt_v2_b2,224,4364.72,234.573,1024,25.36,4.05,27.53\r\nresnet152,176,4363.52,469.299,2048,60.19,7.22,13.99\r\nlambda_resnet26rpt_256,256,4363.25,469.327,2048,10.99,3.16,11.87\r\nseresnet33ts,288,4354.34,470.287,2048,19.78,6.02,14.76\r\neca_resnet33ts,288,4347.84,470.984,2048,19.68,6.02,14.76\r\nskresnet50d,224,4343.22,471.497,2048,25.82,4.36,13.31\r\ntf_efficientnetv2_b3,300,4333.55,472.534,2048,14.36,3.04,15.74\r\nresnet26t,320,4330.81,472.853,2048,16.01,5.24,16.44\r\ngcresnet33ts,288,4315.25,474.554,2048,19.88,6.02,14.78\r\nrepvit_m2_3,224,4311.75,474.936,2048,22.93,4.52,21.32\r\nresnetv2_101,224,4310.9,475.027,2048,44.54,7.83,16.23\r\ngc_efficientnetv2_rw_t,288,4297.87,476.478,2048,13.68,3.2,16.45\r\nhgnet_tiny,288,4294.49,476.839,2048,14.74,7.51,10.51\r\nmobilevitv2_125,256,4293.39,476.98,2048,7.48,2.86,20.1\r\nregnetz_b16,288,4286.39,477.749,2048,9.72,2.39,16.43\r\ndarknet53,256,4285.33,477.863,2048,41.61,9.31,12.39\r\nvit_base_patch32_clip_448,448,4264.53,480.193,2048,88.34,17.93,23.9\r\nswiftformer_l3,224,4261.58,480.531,2048,28.49,4.01,15.77\r\nefficientnet_lite3,300,4251.09,481.718,2048,8.2,1.65,21.85\r\ndla102,224,4222.19,485.012,2048,33.27,7.19,14.18\r\nrdnet_tiny,224,4221.36,485.111,2048,23.86,5.06,15.98\r\nfasternet_m,224,4221.15,485.133,2048,53.52,8.74,15.34\r\ntf_efficientnet_cc_b1_8e,240,4208.76,486.564,2048,39.72,0.75,15.44\r\npoolformer_s24,224,4200.2,487.551,2048,21.39,3.41,10.68\r\nvit_base_patch16_224_miil,224,4199.26,487.638,2048,94.4,17.59,23.91\r\ndeit_base_patch16_224,224,4193.35,488.346,2048,86.57,17.58,23.9\r\nresnet101,224,4193.25,488.357,2048,44.55,7.83,16.23\r\ndensenet169,224,4190.58,488.67,2048,14.15,3.4,7.3\r\nvit_base_patch16_224,224,4189.64,488.777,2048,86.57,17.58,23.9\r\nxcit_small_12_p16_224,224,4185.73,489.234,2048,26.25,4.82,12.58\r\neva02_tiny_patch14_336,336,4185.09,489.31,2048,5.76,4.68,27.16\r\ndeit_base_distilled_patch16_224,224,4183.78,489.459,2048,87.34,17.68,24.05\r\nedgenext_small_rw,320,4179.06,490.019,2048,7.83,2.46,14.85\r\nregnetx_064,224,4174.77,490.518,2048,26.21,6.49,16.37\r\nresnetv2_101d,224,4159.3,492.346,2048,44.56,8.07,17.04\r\necaresnetlight,288,4148.84,493.581,2048,30.16,6.79,13.91\r\nresnet50_mlp,256,4141.67,494.433,2048,26.65,7.05,16.25\r\ncs3sedarknet_x,256,4140.79,494.549,2048,35.4,8.38,11.35\r\necaresnet101d_pruned,288,4137.47,494.945,2048,24.88,5.75,12.71\r\nfocalnet_tiny_lrf,224,4125.24,496.416,2048,28.65,4.49,17.76\r\nwide_resnet50_2,224,4100.94,499.344,2048,68.88,11.43,14.4\r\nvit_base_mci_224,224,4086.62,501.098,2048,86.35,17.73,24.65\r\nvgg13,224,4085.01,501.278,2048,133.05,11.31,12.25\r\nfbnetv3_g,288,4083.55,501.485,2048,16.62,1.77,21.09\r\nnf_resnet50,256,4070.51,503.089,2048,25.56,5.46,14.52\r\ncs3sedarknet_xdw,256,4062.97,504.018,2048,21.6,5.97,17.18\r\nconvnext_small,224,4056.27,504.854,2048,50.22,8.71,21.56\r\nresnet51q,256,4053.68,505.174,2048,35.7,6.38,16.55\r\nresnet101d,224,4052.02,505.379,2048,44.57,8.08,17.04\r\nresnet101c,224,4040.53,506.823,2048,44.57,8.08,17.04\r\nhiera_tiny_224,224,4034.6,507.569,2048,27.91,4.91,17.13\r\nsehalonet33ts,256,4029.69,508.171,2048,13.69,3.55,14.7\r\ngmlp_s16_224,224,4019.44,509.487,2048,19.42,4.42,15.1\r\nnf_regnet_b3,320,4008.71,510.82,2048,18.59,2.05,14.61\r\ncoatnet_rmlp_0_rw_224,224,4000.65,511.871,2048,27.45,4.72,24.89\r\necaresnet50t,256,3997.57,512.27,2048,25.57,5.64,15.45\r\nvitamin_small_224,224,3989.02,128.317,512,22.17,5.92,26.38\r\ncoatnet_bn_0_rw_224,224,3965.99,516.346,2048,27.44,4.67,22.04\r\nese_vovnet57b,256,3963.3,516.697,2048,38.61,11.69,9.82\r\nswin_tiny_patch4_window7_224,224,3941.47,519.553,2048,28.29,4.51,17.06\r\nresnest50d,224,3940.22,519.724,2048,27.48,5.4,14.36\r\ngcvit_xtiny,224,3937.42,520.089,2048,19.98,2.93,20.26\r\nefficientnet_x_b3,288,3926.4,521.554,2048,13.3,3.91,15.6\r\nconvnext_tiny,288,3925.51,521.669,2048,28.59,7.39,22.21\r\ndm_nfnet_f0,192,3924.43,521.82,2048,71.49,7.21,10.16\r\nvit_small_plus_patch16_dinov3,256,3913.11,523.303,2048,28.68,8.11,21.84\r\nvit_small_plus_patch16_dinov3_qkvb,256,3909.27,523.822,2048,28.69,8.11,21.84\r\nseresnext26t_32x4d,288,3898.36,525.307,2048,16.81,4.46,16.68\r\necaresnet26t,320,3887.55,526.767,2048,16.01,5.24,16.44\r\ntresnet_v2_l,224,3887.34,526.78,2048,46.17,8.85,16.34\r\nresnetv2_34d,384,3885.49,527.036,2048,21.82,11.49,13.35\r\ncsatv2,512,3879.59,527.843,2048,11.1,1.39,9.17\r\ndeit3_base_patch16_224,224,3875.66,528.38,2048,86.59,17.58,23.9\r\nseresnext26d_32x4d,288,3860.4,530.473,2048,16.81,4.51,16.85\r\nres2net50_26w_6s,224,3858.82,530.688,2048,37.05,6.33,15.28\r\ntwins_pcpvt_base,224,3849.92,531.901,2048,43.83,6.68,25.25\r\npvt_v2_b2_li,224,3847.37,266.104,1024,22.55,3.91,27.6\r\nefficientnet_b3,288,3847.19,532.288,2048,12.23,1.63,21.49\r\nresnetv2_50,288,3846.22,532.421,2048,25.55,6.79,18.37\r\nvit_base_patch16_xp_224,224,3838.74,533.457,2048,86.51,17.56,23.9\r\nresnet101_clip_gap,224,3833.59,534.183,2048,42.52,9.11,17.56\r\nresnetaa101d,224,3829.48,534.743,2048,44.57,9.12,17.56\r\nconvnextv2_pico,288,3823.88,535.536,2048,9.07,2.27,10.08\r\nmixnet_xl,224,3820.49,536.011,2048,11.9,0.93,14.57\r\nvit_base_patch16_clip_quickgelu_224,224,3815.91,536.653,2048,86.19,17.58,23.9\r\nhieradet_small,256,3805.65,201.773,768,34.73,8.51,27.76\r\nbeit3_base_patch16_224,224,3787.29,540.703,2048,86.66,17.63,23.9\r\nhgnetv2_b3,288,3777.44,542.119,2048,16.29,2.94,8.38\r\ntf_efficientnet_lite3,300,3716.76,550.954,2048,8.2,1.65,21.85\r\nefficientformer_l3,224,3712.98,551.527,2048,31.41,3.93,12.01\r\nresnet101_clip,224,3708.39,552.208,2048,56.26,9.81,18.08\r\nefficientvit_b2,288,3708.09,552.259,2048,24.33,2.64,24.03\r\nresnet61q,256,3696.27,554.023,2048,36.85,7.8,17.01\r\ncs3edgenet_x,256,3689.99,554.969,2048,47.82,11.53,12.92\r\nbeit_base_patch16_224,224,3678.91,556.652,2048,86.53,17.58,23.9\r\nvit_betwixt_patch16_reg1_gap_256,256,3677.7,556.804,2048,60.4,16.32,27.83\r\nbeitv2_base_patch16_224,224,3669.56,558.061,2048,86.53,17.58,23.9\r\nresnet50,288,3666.34,558.547,2048,25.56,6.8,18.37\r\nflexivit_base,240,3665.34,558.706,2048,86.59,20.29,28.36\r\nmixer_b16_224,224,3664.58,558.81,2048,59.88,12.62,14.53\r\nrexnetr_300,224,3646.38,561.591,2048,34.81,3.39,22.16\r\nefficientvit_l2,224,3645.89,561.682,2048,63.71,6.97,19.58\r\nresnetv2_101x1_bit,224,3644.97,561.824,2048,44.54,8.04,16.23\r\nvit_betwixt_patch16_reg4_gap_256,256,3642.66,562.17,2048,60.4,16.52,28.24\r\ncspdarknet53,256,3640.6,562.494,2048,27.64,6.57,16.81\r\nwide_resnet101_2,176,3633.14,563.645,2048,126.89,14.31,13.18\r\nlegacy_seresnet101,224,3622.54,565.304,2048,49.33,7.61,15.74\r\ncoatnet_0_224,224,3619.02,565.849,2048,25.04,4.58,24.01\r\nseresnet101,224,3614.64,566.533,2048,49.33,7.84,16.27\r\nresnet101s,224,3611.74,566.992,2048,44.67,9.19,18.64\r\nresnetblur101d,224,3609.77,567.299,2048,44.57,9.12,17.94\r\nese_vovnet39b,288,3604.46,568.133,2048,24.57,11.71,11.13\r\nmaxxvit_rmlp_nano_rw_256,256,3597.46,426.92,1536,16.78,4.37,26.05\r\ndarknetaa53,288,3596.7,569.367,2048,36.02,10.08,15.68\r\ntresnet_l,224,3588.68,570.624,2048,55.99,10.9,11.9\r\nvit_relpos_base_patch16_clsgap_224,224,3587.1,570.886,2048,86.43,17.6,25.12\r\ndpn68b,288,3582.3,571.65,2048,12.61,3.89,17.3\r\nvit_relpos_base_patch16_cls_224,224,3581.52,571.762,2048,86.43,17.6,25.12\r\nvit_small_patch16_384,384,3565.1,574.392,2048,22.2,15.52,50.78\r\nregnetv_064,224,3563.14,574.725,2048,30.58,6.39,16.41\r\ngcresnet50t,288,3545.63,577.569,2048,25.9,6.86,18.57\r\nswin_s3_tiny_224,224,3537.01,578.975,2048,28.33,4.64,19.13\r\nhrnet_w18_ssld,224,3528.66,580.345,2048,21.3,4.32,16.31\r\nrexnet_300,224,3527.45,580.547,2048,34.71,3.44,22.4\r\nrepvgg_b3,224,3527.38,580.552,2048,110.96,26.21,7.55\r\nvit_small_resnet50d_s16_224,224,3517.85,582.12,2048,57.53,13.48,24.82\r\necaresnet101d,224,3517.44,582.19,2048,44.57,8.08,17.07\r\nnf_resnet101,224,3515.66,582.491,2048,44.55,8.01,16.23\r\nresnet50t,288,3514.78,582.627,2048,25.57,7.14,19.53\r\nhrnet_w18,224,3514.47,582.694,2048,21.3,4.32,16.31\r\nregnety_064,224,3510.8,583.291,2048,30.58,6.39,16.41\r\nmobilevitv2_150,256,3501.51,584.846,2048,10.59,4.09,24.11\r\nvgg13_bn,224,3501.09,584.896,2048,133.05,11.33,12.25\r\nxcit_nano_12_p8_224,224,3499.99,585.081,2048,3.05,2.16,15.71\r\nrexnetr_200,288,3497.9,585.45,2048,16.52,2.62,24.96\r\nvit_small_patch16_36x1_224,224,3494.3,586.04,2048,64.67,13.71,35.69\r\nresnet50d,288,3493.03,586.257,2048,25.58,7.19,19.7\r\nnextvit_base,224,3485.65,587.502,2048,44.79,8.29,23.71\r\nvit_base_patch16_siglip_gap_224,224,3484.63,587.666,2048,85.8,17.49,23.75\r\ndavit_small,224,3483.65,220.425,768,49.75,8.8,30.49\r\nvit_base_patch16_siglip_224,224,3477.63,588.858,2048,92.88,17.73,24.06\r\ngcresnext50ts,288,3463.91,591.184,2048,15.67,4.75,19.57\r\ncoat_lite_small,224,3458.94,592.04,2048,19.84,3.96,22.09\r\nefficientnetv2_s,288,3458.59,592.109,2048,21.46,4.75,20.13\r\nmixer_l32_224,224,3452.26,593.19,2048,206.94,11.27,19.86\r\ncs3se_edgenet_x,256,3451.03,593.397,2048,50.72,11.53,12.94\r\nregnety_032,288,3443.27,594.726,2048,19.44,5.29,18.61\r\nvgg16,224,3442.01,594.934,2048,138.36,15.47,13.56\r\nresmlp_36_224,224,3440.84,595.156,2048,44.69,8.91,16.33\r\nmobilenetv4_hybrid_medium,384,3438.15,595.615,2048,11.07,3.01,21.18\r\nsebotnet33ts_256,256,3431.45,596.782,2048,13.7,3.89,17.46\r\nregnetv_040,288,3418.52,599.041,2048,20.64,6.6,20.3\r\ncs3darknet_x,288,3407.94,600.895,2048,35.05,10.6,14.36\r\nvit_base_patch16_gap_224,224,3404.65,601.473,2048,86.57,17.49,25.59\r\nregnety_040,288,3401.24,602.085,2048,20.65,6.61,20.3\r\nvit_base_patch16_rpn_224,224,3389.42,604.176,2048,86.54,17.49,23.75\r\nfastvit_sa24,256,3372.7,607.176,2048,21.5,3.77,20.35\r\nregnety_080,224,3368.63,607.916,2048,39.18,8.0,17.97\r\nresnetaa50,288,3368.36,607.96,2048,25.56,8.52,19.24\r\ndarknet53,288,3362.78,608.97,2048,41.61,11.78,15.68\r\ndla102x,224,3358.59,609.731,2048,26.31,5.89,19.42\r\nresnetblur50,288,3305.01,464.706,1536,25.56,8.52,19.87\r\ndensenet201,224,3298.49,620.85,2048,20.01,4.34,7.85\r\nefficientnetv2_rw_s,288,3297.46,621.034,2048,23.94,4.91,21.41\r\nhiera_small_224,224,3285.41,623.305,2048,35.01,6.42,20.75\r\nresnetv2_50d_gn,288,3259.77,628.207,2048,25.57,7.24,19.7\r\nresnet50_gn,288,3244.98,631.083,2048,25.56,6.85,18.37\r\nefficientvit_b3,224,3234.91,633.047,2048,48.65,3.99,26.9\r\nmaxvit_nano_rw_256,256,3229.63,634.075,2048,15.45,4.46,30.28\r\nmobilenetv4_conv_large,384,3228.44,634.318,2048,32.59,6.43,27.31\r\nconvnext_tiny_hnf,288,3226.81,634.626,2048,28.59,7.39,22.21\r\nmaxvit_rmlp_nano_rw_256,256,3225.38,634.917,2048,15.5,4.47,31.92\r\nresnetaa50d,288,3225.03,634.981,2048,25.58,8.92,20.57\r\nvit_betwixt_patch16_gap_256,256,3223.94,635.185,2048,60.37,16.25,27.69\r\ncs3sedarknet_x,288,3218.76,636.221,2048,35.4,10.6,14.37\r\nhalonet50ts,256,3218.23,636.322,2048,22.73,5.3,19.2\r\nseresnet50,288,3216.76,636.617,2048,28.09,6.8,18.39\r\ndeit3_small_patch16_384,384,3210.69,637.821,2048,22.21,15.52,50.78\r\nvit_medium_patch16_rope_reg1_gap_256,256,3206.97,638.538,2048,38.74,10.63,22.26\r\nvit_base_patch16_plus_clip_240,240,3205.49,638.851,2048,117.21,27.41,33.08\r\npvt_v2_b3,224,3196.53,320.308,1024,45.24,6.92,37.7\r\nese_vovnet39b_evos,224,3166.93,646.627,2048,24.58,7.07,6.74\r\nefficientnet_b3,320,3157.71,648.513,2048,12.23,2.01,26.52\r\nnf_resnet50,288,3155.65,648.946,2048,25.56,6.88,18.37\r\nresnetblur50d,288,3155.51,486.723,1536,25.58,8.92,21.19\r\nswinv2_cr_tiny_224,224,3154.41,649.204,2048,28.33,4.66,28.45\r\nese_vovnet99b,224,3153.49,649.387,2048,63.2,16.51,11.27\r\nresnet51q,288,3153.22,649.442,2048,35.7,8.07,20.94\r\nres2net101_26w_4s,224,3152.12,649.67,2048,45.21,8.1,18.45\r\nconvnextv2_tiny,224,3148.46,650.432,2048,28.64,4.47,13.44\r\nvit_small_patch16_18x2_224,224,3139.28,652.317,2048,64.67,13.71,35.69\r\nefficientnet_b3_gn,288,3137.14,652.776,2048,11.73,1.74,23.35\r\nvit_mediumd_patch16_reg4_gap_256,256,3130.46,654.153,2048,64.11,17.87,37.57\r\nregnety_080_tv,224,3126.79,654.935,2048,39.38,8.51,19.73\r\nmaxxvitv2_nano_rw_256,256,3121.35,328.024,1024,23.7,6.26,23.05\r\nresnet101d,256,3116.26,657.147,2048,44.57,10.55,22.25\r\nres2net50_26w_8s,224,3114.9,657.432,2048,48.4,8.37,17.95\r\nvit_relpos_base_patch16_224,224,3107.72,658.939,2048,86.43,17.51,24.97\r\nresnext101_32x4d,224,3102.44,660.078,2048,44.18,8.01,21.23\r\nswinv2_cr_tiny_ns_224,224,3102.07,660.154,2048,28.33,4.66,28.45\r\necaresnet50t,288,3100.73,660.436,2048,25.57,7.14,19.55\r\nseresnet50t,288,3100.73,660.439,2048,28.1,7.14,19.55\r\nvit_base_patch16_reg4_gap_256,256,3093.73,661.927,2048,86.62,23.5,33.89\r\ncait_xxs24_224,224,3089.89,662.762,2048,11.96,2.53,20.29\r\nsequencer2d_s,224,3085.87,663.618,2048,27.65,4.96,11.31\r\necaresnet50d,288,3078.4,665.227,2048,25.58,7.19,19.72\r\nresnetv2_152,224,3073.52,666.287,2048,60.19,11.55,22.56\r\nres2net101d,224,3067.03,667.699,2048,45.23,8.35,19.25\r\ndensenet121,288,3057.63,669.753,2048,7.98,4.74,11.41\r\nresnetv2_152d,224,3016.29,678.928,2048,60.2,11.8,23.36\r\nresnest50d_4s2x40d,224,3016.12,678.97,2048,30.42,4.4,17.94\r\nresnet152,224,3002.38,682.074,2048,60.19,11.56,22.56\r\nvit_relpos_base_patch16_rpn_224,224,2999.9,682.624,2048,86.41,17.51,24.97\r\nmaxvit_tiny_rw_224,224,2993.97,684.0,2048,29.06,5.11,33.11\r\nvgg16_bn,224,2983.35,686.407,2048,138.37,15.5,13.56\r\nedgenext_base,320,2978.92,687.452,2048,18.51,6.01,24.32\r\nvgg19,224,2973.91,688.597,2048,143.67,19.63,14.86\r\nxcit_tiny_12_p16_384,384,2967.3,690.127,2048,6.72,3.64,18.26\r\ntf_efficientnet_b3,300,2966.76,690.258,2048,12.23,1.87,23.83\r\neca_nfnet_l0,288,2963.42,691.042,2048,24.14,7.12,17.29\r\nmambaout_small,224,2961.35,691.529,2048,48.49,8.96,27.72\r\nnfnet_l0,288,2954.79,693.062,2048,35.07,7.13,17.29\r\nhgnet_small,288,2950.93,693.968,2048,24.36,14.09,14.53\r\nresnext101_32x8d,176,2950.21,694.139,2048,88.79,10.33,19.37\r\nresnetv2_50d_frn,224,2948.5,694.54,2048,25.59,4.33,11.92\r\nmobilevitv2_175,256,2939.92,696.578,2048,14.25,5.54,28.13\r\nnf_regnet_b4,320,2936.19,697.463,2048,30.21,3.29,19.88\r\nregnety_040_sgn,288,2931.68,698.519,2048,20.65,6.67,20.3\r\nresnet152d,224,2927.32,699.573,2048,60.21,11.8,23.36\r\nrdnet_small,224,2926.89,699.678,2048,50.44,8.74,22.55\r\nresnet152c,224,2917.52,701.923,2048,60.21,11.8,23.36\r\nnf_seresnet101,224,2913.16,702.977,2048,49.33,8.02,16.27\r\nhgnetv2_b5,288,2907.99,704.217,2048,39.57,10.84,18.5\r\nnf_ecaresnet101,224,2906.23,704.651,2048,44.55,8.01,16.27\r\nskresnext50_32x4d,224,2905.74,704.763,2048,27.48,4.5,17.18\r\nconvnext_base,224,2891.75,708.177,2048,88.59,15.38,28.75\r\ntf_efficientnetv2_s,300,2886.81,709.364,2048,21.46,5.35,22.73\r\ncs3edgenet_x,288,2882.56,710.424,2048,47.82,14.59,16.36\r\nresnet61q,288,2873.55,712.649,2048,36.85,9.87,21.52\r\nseresnetaa50d,288,2870.23,713.482,2048,28.11,8.92,20.59\r\ndensenetblur121d,288,2864.05,536.251,1536,8.0,5.14,13.06\r\nregnetz_040,256,2862.02,715.516,2048,27.12,4.06,24.19\r\ndla169,224,2861.03,715.773,2048,53.39,11.6,20.2\r\ncrossvit_base_240,240,2857.27,716.715,2048,105.03,21.22,36.33\r\nregnetx_120,224,2850.13,718.516,2048,46.11,12.13,21.37\r\nregnetz_040_h,256,2849.26,718.73,2048,28.94,4.12,24.29\r\nvolo_d1_224,224,2848.64,718.897,2048,26.63,6.94,24.43\r\nresnetv2_34d,448,2848.46,718.933,2048,21.82,15.64,18.16\r\nregnetz_b16_evos,224,2844.81,719.854,2048,9.74,1.43,9.95\r\nresnext50_32x4d,288,2842.64,720.406,2048,25.03,7.04,23.81\r\nconvnext_nano,384,2837.13,721.81,2048,15.59,7.22,24.61\r\npoolformer_s36,224,2830.85,723.396,2048,30.86,5.0,15.82\r\nnextvit_large,224,2827.97,724.142,2048,57.83,10.77,28.99\r\nefficientvit_l2,256,2826.76,724.455,2048,63.71,9.09,25.49\r\ndpn92,224,2818.77,726.503,2048,37.67,6.54,18.21\r\nvit_pe_core_base_patch16_224,224,2817.86,726.723,2048,93.67,17.82,24.21\r\ncaformer_s18,224,2814.81,727.527,2048,26.34,4.13,19.39\r\ncoatnet_rmlp_1_rw_224,224,2814.75,727.538,2048,41.69,7.85,35.47\r\nregnetz_d32,256,2812.21,728.203,2048,27.58,5.98,23.74\r\nregnetz_d8,256,2806.09,729.782,2048,23.37,3.97,23.74\r\nseresnext101_32x4d,224,2792.14,733.431,2048,48.96,8.02,21.26\r\ntwins_pcpvt_large,224,2788.78,734.318,2048,60.99,9.84,35.82\r\nfasternet_l,224,2788.23,734.464,2048,93.47,15.52,20.46\r\ninception_next_small,224,2785.71,735.138,2048,49.37,8.36,19.27\r\nbotnet50ts_256,256,2778.34,737.083,2048,22.74,5.54,22.23\r\nregnetz_c16,320,2773.94,738.249,2048,13.46,3.92,25.88\r\nmvitv2_tiny,224,2769.37,739.449,2048,24.17,4.7,21.16\r\nlegacy_seresnext101_32x4d,224,2766.86,740.141,2048,48.96,8.02,21.26\r\nnfnet_f0,256,2758.73,742.318,2048,71.49,12.62,18.05\r\nconvnextv2_nano,288,2749.84,744.714,2048,15.62,4.06,13.84\r\nlambda_resnet26t,256,2745.45,745.924,2048,10.96,3.02,11.87\r\nresnext50d_32x4d,288,2741.1,747.09,2048,25.05,7.44,25.13\r\nfocalnet_small_srf,224,2719.28,753.097,2048,49.89,8.62,26.26\r\ntresnet_xl,224,2717.01,753.7,2048,78.44,15.2,15.34\r\nvit_base_patch16_plus_240,240,2696.01,759.584,2048,117.56,27.41,33.08\r\nvit_base_patch16_siglip_gap_256,256,2695.32,759.769,2048,85.84,23.13,33.23\r\nresnet152s,224,2687.32,762.036,2048,60.32,12.92,24.96\r\nvit_base_patch16_siglip_256,256,2680.09,764.092,2048,92.93,23.44,33.63\r\nresnetv2_50d_evos,224,2677.16,764.929,2048,25.59,4.33,11.92\r\nhalo2botnet50ts_256,256,2675.36,765.454,2048,22.64,5.02,21.78\r\nregnety_120,224,2674.93,765.581,2048,51.82,12.14,21.38\r\ngcvit_tiny,224,2670.83,766.761,2048,28.22,4.79,29.82\r\nmambaout_small_rw,224,2664.6,768.543,2048,48.5,8.96,27.72\r\nvit_large_r50_s32_224,224,2629.24,778.875,2048,328.99,19.58,24.41\r\nmambaout_tiny,288,2627.66,779.346,2048,26.55,7.41,27.58\r\ninception_v4,299,2619.03,781.909,2048,42.68,12.28,15.09\r\ncsatv2_21m,512,2612.08,784.009,2048,20.7,2.94,15.85\r\nmobilenetv4_conv_aa_large,384,2605.21,786.071,2048,32.59,7.07,32.29\r\ncoatnet_rmlp_1_rw2_224,224,2599.48,787.797,2048,41.72,8.11,40.13\r\ncoatnet_1_rw_224,224,2599.0,787.946,2048,41.72,8.04,34.6\r\nlegacy_xception,299,2598.29,788.156,2048,22.86,8.4,35.83\r\nvgg19_bn,224,2598.26,788.161,2048,143.68,19.66,14.86\r\nconvit_small,224,2588.59,791.109,2048,27.78,5.76,17.87\r\nvit_small_patch8_224,224,2584.12,792.466,2048,21.67,22.44,80.84\r\nconvformer_s18,224,2577.84,794.398,2048,26.77,3.96,15.82\r\nlegacy_seresnet152,224,2566.03,798.076,2048,66.82,11.33,22.08\r\nseresnext50_32x4d,288,2565.18,798.339,2048,27.56,7.04,23.82\r\nseresnet152,224,2564.48,798.546,2048,66.82,11.57,22.61\r\nhrnet_w32,224,2555.92,801.235,2048,41.23,8.97,22.02\r\nefficientnet_b3_gn,320,2550.87,802.815,2048,11.73,2.14,28.83\r\nfocalnet_small_lrf,224,2550.38,802.978,2048,50.34,8.74,28.61\r\necaresnet50t,320,2542.99,805.3,2048,25.57,8.82,24.13\r\nfastvit_mci1,256,2535.25,807.755,2048,21.46,4.67,27.3\r\nvit_base_patch16_rope_224,224,2534.36,808.022,2048,86.43,17.58,23.9\r\nvit_base_patch16_rope_mixed_224,224,2522.12,811.949,2048,86.44,17.58,25.7\r\ndavit_base,224,2521.8,304.501,768,87.95,15.51,40.66\r\nvit_base_patch16_rope_ape_224,224,2521.59,812.115,2048,86.59,17.58,23.9\r\nvit_base_patch16_rope_mixed_ape_224,224,2512.6,815.028,2048,86.59,17.58,25.7\r\nresnetv2_101,288,2500.66,818.932,2048,44.54,12.94,26.83\r\nese_vovnet57b,320,2499.85,819.196,2048,38.61,18.26,15.34\r\nefficientvit_b3,256,2493.53,821.262,2048,48.65,5.2,35.01\r\nhrnet_w30,224,2489.01,822.777,2048,37.71,8.15,21.21\r\nmaxxvit_rmlp_tiny_rw_256,256,2480.93,619.073,1536,29.64,6.66,39.76\r\nvit_pe_spatial_tiny_patch16_512,512,2464.81,830.83,2048,5.68,10.46,61.64\r\nswin_small_patch4_window7_224,224,2464.01,831.121,2048,49.61,8.77,27.47\r\nmobilenetv4_hybrid_medium,448,2461.64,831.909,2048,11.07,4.2,29.64\r\ndensenet161,224,2459.53,832.635,2048,28.68,7.79,11.06\r\nregnetx_160,224,2447.12,836.85,2048,54.28,15.99,25.52\r\nconvnext_small,288,2442.48,838.443,2048,50.22,14.39,35.65\r\ntnt_s_legacy_patch16_224,224,2440.97,838.947,2048,23.76,5.24,24.37\r\nmaxvit_tiny_tf_224,224,2432.69,841.819,2048,30.92,5.6,35.78\r\nwide_resnet101_2,224,2428.32,843.324,2048,126.89,22.8,21.23\r\neva02_base_patch16_clip_224,224,2428.02,843.433,2048,86.26,17.62,26.32\r\nresnet101,288,2427.51,843.609,2048,44.55,12.95,26.83\r\nmixnet_xxl,224,2424.16,844.776,2048,23.96,2.04,23.43\r\nsequencer2d_m,224,2422.61,845.315,2048,38.31,6.55,14.26\r\nmobilenetv4_conv_large,448,2421.99,845.528,2048,32.59,8.75,37.17\r\nxception41p,299,2410.28,849.634,2048,26.91,9.25,39.86\r\nmobilevitv2_200,256,2382.29,859.63,2048,18.45,7.22,32.15\r\nregnety_160,224,2377.02,861.526,2048,83.59,15.96,23.04\r\nwide_resnet50_2,288,2367.52,864.978,2048,68.88,18.89,23.81\r\nefficientnet_el,300,2359.2,868.028,2048,10.59,8.0,30.7\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,2354.08,869.916,2048,60.23,16.52,28.24\r\ntnt_s_patch16_224,224,2346.93,872.562,2048,23.77,5.25,24.37\r\nefficientnet_el_pruned,300,2345.65,873.052,2048,10.59,8.0,30.7\r\nvit_base_r50_s16_224,224,2339.51,875.326,2048,97.89,21.66,35.28\r\nfastvit_sa36,256,2325.44,880.642,2048,31.46,5.59,29.46\r\npoolformerv2_s24,224,2318.12,883.423,2048,21.34,3.42,10.68\r\nefficientnet_b4,320,2312.43,885.598,2048,19.34,3.13,34.76\r\npvt_v2_b4,224,2307.35,443.751,1024,62.56,10.14,53.74\r\nconvmixer_1024_20_ks9_p14,224,2295.98,891.938,2048,24.38,5.55,5.51\r\nswinv2_tiny_window8_256,256,2294.82,892.393,2048,28.35,5.96,24.57\r\ncoatnet_1_224,224,2292.25,893.385,2048,42.23,8.7,39.0\r\nresnet152d,256,2289.31,894.532,2048,60.21,15.41,30.51\r\nxcit_tiny_12_p8_224,224,2287.83,895.103,2048,6.71,4.81,23.6\r\ntf_efficientnet_el,300,2264.44,904.356,2048,10.59,8.0,30.7\r\npvt_v2_b5,224,2256.51,907.548,2048,81.96,11.76,50.92\r\nhiera_base_224,224,2243.75,912.701,2048,51.52,9.4,30.42\r\ndm_nfnet_f0,256,2232.83,917.163,2048,71.49,12.62,18.05\r\nrexnetr_300,288,2227.76,919.251,2048,34.81,5.59,36.61\r\nresnetaa101d,288,2221.36,921.898,2048,44.57,15.07,29.03\r\nresnetblur101d,288,2216.85,692.821,1536,44.57,15.07,29.65\r\nhiera_small_abswin_256,256,2213.94,924.989,2048,34.36,8.29,26.38\r\nmaxvit_tiny_rw_256,256,2213.45,925.199,2048,29.07,6.74,44.35\r\neca_nfnet_l1,256,2212.23,925.703,2048,41.41,9.62,22.04\r\nmaxvit_rmlp_tiny_rw_256,256,2210.02,926.639,2048,29.15,6.77,46.92\r\nconvnext_base,256,2206.13,928.27,2048,88.59,20.09,37.55\r\nconvnext_tiny,384,2203.31,929.453,2048,28.59,13.14,39.48\r\nxcit_small_24_p16_224,224,2200.46,930.651,2048,47.67,9.1,23.64\r\ndla102x2,224,2196.95,932.138,2048,41.28,9.34,29.91\r\ncs3se_edgenet_x,320,2195.02,932.971,2048,50.72,18.01,20.21\r\nresnet200,224,2191.26,934.574,2048,64.67,15.07,32.19\r\nefficientvit_l2,288,2189.98,935.114,2048,63.71,11.51,32.19\r\nvit_pe_core_small_patch16_384,384,2185.8,936.897,2048,23.78,15.69,51.23\r\nhgnet_base,224,2178.72,939.941,2048,71.58,25.14,15.47\r\nvit_relpos_base_patch16_plus_240,240,2163.72,946.45,2048,117.38,27.3,34.33\r\nlamhalobotnet50ts_256,256,2148.24,953.294,2048,22.57,5.02,18.44\r\nmvitv2_small_cls,224,2147.16,953.769,2048,34.87,7.04,28.17\r\nhgnetv2_b6,224,2145.02,954.714,2048,75.26,16.88,21.23\r\nvit_small_r26_s32_384,384,2141.32,956.347,2048,36.47,10.43,29.85\r\nefficientnet_b3_g8_gn,288,2139.84,957.03,2048,14.25,2.59,23.35\r\ntwins_svt_base,224,2134.57,959.375,2048,56.07,8.59,26.33\r\nregnetv_064,288,2132.01,960.537,2048,30.58,10.55,27.11\r\nregnety_064,288,2114.03,968.716,2048,30.58,10.56,27.11\r\nseresnet101,288,2112.81,969.263,2048,49.33,12.95,26.87\r\nnest_tiny,224,2096.89,976.622,2048,17.06,5.83,25.48\r\nxception41,299,2093.03,978.423,2048,26.97,9.28,39.86\r\nnest_tiny_jx,224,2082.85,983.213,2048,17.06,5.83,25.48\r\ncait_xxs36_224,224,2078.66,985.2,2048,17.3,3.77,30.34\r\nmvitv2_small,224,2068.33,990.112,2048,34.87,7.0,28.08\r\ncoat_tiny,224,2065.08,991.688,2048,5.5,4.35,27.2\r\ndpn98,224,2062.85,992.743,2048,61.57,11.73,25.2\r\necaresnet101d,288,2058.48,994.853,2048,44.57,13.35,28.19\r\nmaxvit_tiny_pm_256,256,2057.64,995.259,2048,30.09,6.61,47.9\r\nresnext101_64x4d,224,2048.19,999.849,2048,83.46,15.52,31.21\r\nregnety_080,288,2043.49,1002.151,2048,39.18,13.22,29.69\r\nresnetrs101,288,2037.57,1005.065,2048,63.62,13.56,28.53\r\nnfnet_f1,224,2031.89,1007.879,2048,132.63,17.87,22.94\r\nhrnet_w18_ssld,288,2021.16,1013.235,2048,21.3,7.14,26.96\r\nresnext101_32x8d,224,2016.95,1015.336,2048,88.79,16.48,31.21\r\nnf_regnet_b4,384,2006.93,1020.417,2048,30.21,4.7,28.61\r\nrdnet_base,224,2006.39,1020.683,2048,87.45,15.4,31.14\r\nmobilenetv4_hybrid_large,384,1998.05,1024.937,2048,37.76,7.77,34.52\r\nmobilenetv4_conv_aa_large,448,1997.4,768.936,1536,32.59,9.63,43.94\r\ninception_next_base,224,1985.27,1031.543,2048,86.67,14.85,25.69\r\neva02_small_patch14_336,336,1980.92,1033.805,2048,22.13,15.48,54.33\r\nvit_so150m_patch16_reg4_gap_256,256,1970.32,1039.361,2048,134.13,36.75,53.21\r\nfocalnet_base_srf,224,1966.92,1041.17,2048,88.15,15.28,35.01\r\nseresnet152d,256,1966.73,1041.262,2048,66.84,15.42,30.56\r\ncrossvit_15_dagger_408,408,1962.53,1043.494,2048,28.5,21.45,95.05\r\nswinv2_cr_small_224,224,1960.24,1044.723,2048,49.7,9.07,50.27\r\nresnet101d,320,1954.74,1047.645,2048,44.57,16.48,34.77\r\nvit_so150m_patch16_reg4_map_256,256,1954.47,1047.796,2048,141.48,37.17,53.68\r\nresnetrs152,256,1953.13,1048.512,2048,86.62,15.59,30.83\r\nefficientnetv2_s,384,1948.36,1051.083,2048,21.46,8.44,35.77\r\nconvnextv2_small,224,1942.2,1054.423,2048,50.32,8.71,21.56\r\nswinv2_cr_small_ns_224,224,1940.18,1055.531,2048,49.7,9.08,50.27\r\ninception_resnet_v2,299,1937.42,1057.028,2048,55.84,13.18,25.06\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,1935.27,1058.178,2048,63.95,17.65,37.02\r\nefficientvit_b3,288,1931.11,1060.471,2048,48.65,6.58,44.2\r\nvit_base_patch16_dinov3,256,1929.71,1061.233,2048,85.64,23.6,34.06\r\nvit_base_patch16_dinov3_qkvb,256,1925.62,1063.484,2048,85.66,23.6,34.06\r\nvit_base_patch16_rope_reg1_gap_256,256,1908.33,1073.118,2048,86.43,23.22,33.39\r\nefficientnet_lite4,380,1906.8,1073.989,2048,13.01,4.04,45.66\r\nconvnextv2_tiny,288,1905.36,1074.81,2048,28.64,7.39,22.21\r\nresnest101e,256,1901.33,1077.086,2048,48.28,13.38,28.66\r\nseresnext101_64x4d,224,1898.08,1078.935,2048,88.23,15.53,31.25\r\npoolformer_m36,224,1894.73,1080.849,2048,56.17,8.8,22.02\r\nfastvit_ma36,256,1887.71,1084.851,2048,43.98,7.82,34.98\r\nvit_medium_patch16_gap_384,384,1882.27,1087.981,2048,39.03,26.08,67.54\r\ntf_efficientnetv2_s,384,1875.59,1091.849,2048,21.46,8.44,35.77\r\nseresnext101_32x8d,224,1871.97,1093.975,2048,93.57,16.48,31.25\r\neva02_base_patch14_224,224,1870.91,1094.606,2048,85.76,23.22,36.55\r\nefficientnetv2_rw_s,384,1866.28,1097.322,2048,23.94,8.72,38.03\r\nseresnext101d_32x8d,224,1849.02,1107.551,2048,93.59,16.72,32.05\r\nvitamin_base_224,224,1844.87,138.733,256,87.72,22.68,52.77\r\ngmlp_b16_224,224,1828.96,1119.708,2048,73.08,15.78,30.21\r\nresnext101_32x4d,288,1828.38,1120.058,2048,44.18,13.24,35.09\r\ntresnet_m,448,1823.32,561.547,1024,31.39,22.99,29.21\r\nfocalnet_base_lrf,224,1814.06,1128.908,2048,88.75,15.43,38.13\r\nregnetz_d32,320,1801.66,1136.666,2048,27.58,9.33,37.08\r\nregnetz_040,320,1789.67,1144.281,2048,27.12,6.35,37.78\r\nxception65p,299,1786.01,1146.63,2048,39.82,13.91,52.48\r\nswin_base_patch4_window7_224,224,1785.97,1146.657,2048,87.77,15.47,36.63\r\nmambaout_base,224,1784.99,1147.289,2048,84.81,15.83,36.95\r\nlevit_384_s8,224,1783.29,1148.383,2048,39.06,9.95,35.86\r\nmaxvit_rmlp_small_rw_224,224,1782.86,1148.664,2048,64.9,10.75,49.3\r\nregnetz_040_h,320,1782.55,1148.859,2048,28.94,6.43,37.94\r\nhrnet_w40,224,1778.96,1151.194,2048,57.56,12.75,25.29\r\nregnetz_d8,320,1778.14,1151.705,2048,23.37,6.19,37.08\r\nfastvit_mci2,256,1774.33,1154.183,2048,35.7,7.85,36.09\r\nvit_large_patch32_384,384,1773.8,1154.517,2048,306.63,45.31,43.86\r\nmambaout_base_tall_rw,224,1764.46,1160.632,2048,86.48,16.15,38.74\r\nmambaout_base_short_rw,224,1763.51,1161.27,2048,88.83,16.31,38.08\r\nresnet152,288,1751.58,1169.173,2048,60.19,19.11,37.28\r\nefficientnetv2_m,320,1739.69,1177.162,2048,54.14,11.01,39.97\r\ntf_efficientnet_lite4,380,1733.81,1181.144,2048,13.01,4.04,45.66\r\nseresnextaa101d_32x8d,224,1732.4,1182.116,2048,93.59,17.25,34.16\r\nlevit_conv_384_s8,224,1731.11,1182.999,2048,39.06,9.95,35.86\r\nvit_so150m2_patch16_reg1_gap_256,256,1728.34,1184.889,2048,136.06,37.0,56.93\r\nconvnext_base,288,1728.14,1185.028,2048,88.59,25.43,47.53\r\nlambda_resnet50ts,256,1725.78,1186.668,2048,21.54,5.07,17.48\r\nswin_s3_small_224,224,1723.58,1188.16,2048,49.74,9.43,37.84\r\nregnetz_c16_evos,256,1718.23,1191.873,2048,13.49,2.48,16.57\r\nmobilenetv4_conv_aa_large,480,1713.85,896.176,1536,32.59,11.05,50.45\r\nregnetz_b16_evos,288,1712.2,1196.076,2048,9.74,2.36,16.43\r\nefficientformer_l7,224,1707.72,1199.198,2048,82.23,10.17,24.45\r\nmambaout_small,288,1698.52,1205.694,2048,48.49,14.81,45.82\r\ncoat_lite_medium,224,1695.37,1207.955,2048,44.57,9.81,40.06\r\nhiera_base_plus_224,224,1675.82,1222.036,2048,69.9,12.67,37.98\r\ncait_s24_224,224,1674.84,1222.757,2048,46.92,9.35,40.58\r\ncsatv2_21m,640,1671.01,1225.56,2048,20.7,4.72,26.68\r\nhrnet_w48_ssld,224,1669.93,1226.354,2048,77.47,17.34,28.56\r\nhrnet_w48,224,1665.91,1229.31,2048,77.47,17.34,28.56\r\ndm_nfnet_f1,224,1665.53,1229.589,2048,132.63,17.87,22.94\r\nefficientnet_b3_g8_gn,320,1664.95,1230.02,2048,14.25,3.2,28.83\r\ncoat_mini,224,1664.36,1230.453,2048,10.34,6.82,33.68\r\nresnet200d,256,1662.06,1232.146,2048,64.69,20.0,43.09\r\nhrnet_w44,224,1660.96,1232.966,2048,67.06,14.94,26.92\r\ncoatnet_2_rw_224,224,1652.38,1239.363,2048,73.87,15.09,49.22\r\nconvnext_large,224,1651.7,1239.873,2048,197.77,34.4,43.13\r\nseresnext101_32x4d,288,1649.36,1241.627,2048,48.96,13.25,35.12\r\nsamvit_base_patch16_224,224,1647.92,1242.74,2048,86.46,17.54,24.54\r\nresnet50x4_clip_gap,288,1629.39,1256.851,2048,65.62,19.57,34.11\r\ngcvit_small,224,1622.45,946.661,1536,51.09,8.57,41.61\r\nvolo_d2_224,224,1599.93,1279.993,2048,58.68,14.34,41.34\r\nvit_betwixt_patch16_reg4_gap_384,384,1595.2,1283.784,2048,60.6,39.71,85.28\r\nmaxxvit_rmlp_small_rw_256,256,1585.89,645.653,1024,66.01,14.67,58.38\r\ncoatnet_rmlp_2_rw_224,224,1584.13,1292.765,2048,73.88,15.18,54.78\r\nxcit_medium_24_p16_224,224,1577.64,1298.083,2048,84.4,16.13,31.71\r\nxcit_tiny_24_p16_384,384,1577.62,1298.098,2048,12.12,6.87,34.29\r\nresnetv2_50d_evos,288,1574.37,1300.781,2048,25.59,7.15,19.7\r\nresnet50x4_clip,288,1571.69,1303.0,2048,87.14,21.35,35.27\r\ntiny_vit_21m_384,384,1567.93,1306.115,2048,21.22,13.72,77.83\r\npoolformerv2_s36,224,1561.34,1311.628,2048,30.79,5.01,15.82\r\nmaxvit_small_tf_224,224,1561.16,1311.791,2048,68.93,11.66,53.17\r\nsequencer2d_l,224,1560.26,1312.541,2048,54.3,9.74,22.12\r\nefficientnet_b4,384,1557.83,1314.584,2048,19.34,4.51,50.04\r\nxception65,299,1557.23,1315.08,2048,39.92,13.96,52.48\r\nswinv2_base_window12_192,192,1544.54,1325.912,2048,109.28,11.9,39.72\r\ncoatnet_2_224,224,1543.17,995.299,1536,74.68,16.5,52.67\r\nconvnextv2_nano,384,1543.14,1327.092,2048,15.62,7.22,24.61\r\nnaflexvit_base_patch16_siglip,384,1542.82,1327.373,2048,92.93,56.12,102.2\r\nmambaout_base_wide_rw,224,1542.58,1327.584,2048,94.45,17.78,42.6\r\nefficientnetv2_rw_m,320,1538.3,1331.295,2048,53.24,12.72,47.14\r\nmambaout_small_rw,288,1536.63,1332.727,2048,48.5,14.81,45.82\r\nregnetz_e8,256,1531.16,1337.484,2048,57.7,9.91,40.94\r\ntnt_b_patch16_224,224,1530.92,1337.694,2048,65.43,14.1,39.01\r\nregnety_120,288,1530.2,1338.326,2048,51.82,20.06,35.34\r\nmvitv2_base_cls,224,1513.89,1352.766,2048,65.44,10.23,40.65\r\ntwins_svt_large,224,1513.15,1353.394,2048,99.27,15.15,35.1\r\nseresnet152,288,1512.42,1354.063,2048,66.82,19.11,37.34\r\nvit_base_patch16_clip_384,384,1511.12,1355.226,2048,86.86,55.54,101.56\r\ndpn131,224,1505.45,1360.325,2048,79.25,16.09,32.97\r\nhiera_base_abswin_256,256,1502.42,1363.07,2048,51.27,12.46,40.7\r\ncaformer_s36,224,1488.51,1375.816,2048,39.3,8.0,37.53\r\nregnety_320,224,1467.21,1395.784,2048,145.05,32.34,30.26\r\nmambaout_base_plus_rw,224,1466.55,1396.388,2048,101.66,19.19,45.16\r\ndavit_large,224,1459.41,526.198,768,196.81,34.6,60.99\r\nswinv2_tiny_window16_256,256,1454.67,1407.822,2048,28.35,6.68,39.02\r\nnextvit_small,384,1452.14,1410.269,2048,31.74,17.25,57.14\r\nmvitv2_base,224,1450.65,1411.727,2048,51.47,10.16,40.5\r\nefficientvit_l3,224,1449.28,1413.053,2048,246.04,27.62,39.16\r\nswinv2_cr_base_224,224,1431.32,1430.798,2048,87.88,15.86,59.66\r\npoolformer_m48,224,1431.0,1431.107,2048,73.47,11.59,29.17\r\ncrossvit_18_dagger_408,408,1430.88,1431.227,2048,44.61,32.47,124.87\r\nswinv2_small_window8_256,256,1429.29,1432.83,2048,49.73,11.58,40.14\r\nswinv2_cr_small_ns_256,256,1426.9,1435.225,2048,49.7,12.07,76.21\r\nresnet152d,320,1425.54,1436.59,2048,60.21,24.08,47.67\r\nswin_s3_base_224,224,1424.52,1437.622,2048,71.13,13.69,48.26\r\nconvit_base,224,1424.09,1438.052,2048,86.54,17.52,31.77\r\nxcit_small_12_p16_384,384,1422.71,1439.44,2048,26.25,14.14,36.51\r\nseresnet200d,256,1420.3,1441.896,2048,71.86,20.01,43.15\r\nconvnextv2_base,224,1419.73,1442.462,2048,88.72,15.38,28.75\r\nswinv2_cr_base_ns_224,224,1417.1,1445.149,2048,87.88,15.86,59.66\r\necaresnet200d,256,1416.68,1445.57,2048,64.69,20.0,43.15\r\nresnetrs200,256,1404.94,1457.667,2048,93.21,20.18,43.42\r\nmobilevitv2_150,384,1403.81,1094.11,1536,10.59,9.2,54.25\r\nfastvit_mci3,256,1399.54,1463.265,2048,125.07,14.82,44.88\r\nmobilenetv4_hybrid_large,448,1398.0,1464.892,2048,37.76,10.74,48.61\r\nregnety_160,288,1394.16,1468.931,2048,83.59,26.37,38.07\r\neca_nfnet_l1,320,1393.19,1469.952,2048,41.41,14.92,34.42\r\ntf_efficientnet_b4,380,1391.89,1471.319,2048,19.34,4.49,49.49\r\nconvnext_base,320,1388.79,1474.606,2048,88.59,31.39,58.68\r\nsenet154,224,1387.31,1476.184,2048,115.09,20.77,38.69\r\neva_large_patch14_196,196,1369.63,1495.238,2048,304.14,61.57,63.52\r\nresnetv2_50x1_bit,448,1369.44,1495.429,2048,25.55,16.62,44.46\r\nlegacy_senet154,224,1369.26,1495.646,2048,115.09,20.77,38.69\r\nvit_base_patch16_384,384,1369.26,1495.628,2048,86.86,55.54,101.56\r\nconvnext_small,384,1368.78,1496.158,2048,50.22,25.58,63.37\r\nvit_large_patch16_224,224,1365.08,1500.206,2048,304.33,61.6,63.52\r\ndeit_base_patch16_384,384,1364.26,1501.115,2048,86.86,55.54,101.56\r\ndeit_base_distilled_patch16_384,384,1362.63,1502.914,2048,87.63,55.65,101.82\r\nconvformer_s36,224,1362.6,1502.939,2048,40.01,7.67,30.5\r\ndensenet264d,224,1362.28,1503.301,2048,72.74,13.57,14.0\r\nvit_mediumd_patch16_reg4_gap_384,384,1359.01,1506.903,2048,64.27,43.67,113.51\r\nmaxxvitv2_rmlp_base_rw_224,224,1348.1,759.542,1024,116.09,24.2,62.77\r\nmaxvit_rmlp_small_rw_256,256,1347.13,1520.214,2048,64.9,14.15,66.09\r\nnest_small,224,1337.84,1530.766,2048,38.35,10.35,40.04\r\nnest_small_jx,224,1332.35,1537.07,2048,38.35,10.35,40.04\r\nresnetv2_50x3_bit,224,1322.47,1548.559,2048,217.32,37.06,33.34\r\nmobilenetv4_conv_aa_large,544,1320.2,775.581,1024,32.59,14.19,64.79\r\nhrnet_w64,224,1314.16,1558.368,2048,128.06,28.97,35.09\r\nhgnetv2_b6,288,1309.99,1563.314,2048,75.26,27.9,35.09\r\ndpn107,224,1307.33,1566.496,2048,86.92,18.38,33.46\r\nregnetx_320,224,1303.8,1570.731,2048,107.81,31.81,36.3\r\nmobilevitv2_175,384,1300.81,787.152,1024,14.25,12.47,63.29\r\nhgnet_base,288,1299.62,1575.779,2048,71.58,41.55,25.57\r\nnaflexvit_base_patch16_gap,384,1295.84,1580.371,2048,86.63,55.86,102.34\r\ncoat_small,224,1294.05,1582.579,2048,21.69,12.61,44.25\r\nnaflexvit_base_patch16_par_gap,384,1291.86,1585.251,2048,86.63,55.86,102.34\r\nnf_regnet_b5,384,1290.8,1586.57,2048,49.74,7.95,42.9\r\nnaflexvit_base_patch16_parfac_gap,384,1285.7,1592.835,2048,86.46,55.86,102.34\r\nresnet200,288,1280.66,1599.11,2048,64.67,24.91,53.21\r\nnaflexvit_base_patch16_map,384,1278.02,1602.411,2048,93.72,56.23,102.46\r\ndeit3_large_patch16_224,224,1269.14,1613.631,2048,304.37,61.6,63.52\r\ndeit3_base_patch16_384,384,1263.13,1621.303,2048,86.88,55.54,101.56\r\nconvnext_large_mlp,256,1254.28,1632.746,2048,200.13,44.94,56.33\r\nbeit3_large_patch16_224,224,1248.19,1640.717,2048,304.57,61.72,63.52\r\nvit_small_patch14_dinov2,518,1242.61,1648.08,2048,22.06,46.76,198.79\r\nseresnet152d,320,1228.92,1666.447,2048,66.84,24.09,47.72\r\nvit_small_patch14_reg4_dinov2,518,1224.67,1672.214,2048,22.06,46.95,199.77\r\nlevit_512_s8,224,1221.16,838.49,1024,73.97,21.77,52.28\r\nmixer_l16_224,224,1221.06,1677.167,2048,208.2,44.6,41.69\r\nresnetrs152,320,1219.12,1679.837,2048,86.62,24.34,48.14\r\nrdnet_large,224,1219.11,1259.887,1536,186.27,34.74,46.67\r\nbeitv2_large_patch16_224,224,1218.83,1680.239,2048,304.43,61.6,63.52\r\nefficientvit_l2,384,1218.5,1680.696,2048,63.71,20.45,57.01\r\nbeit_large_patch16_224,224,1217.52,1682.053,2048,304.43,61.6,63.52\r\ngcvit_base,224,1211.46,845.214,1024,90.32,14.87,55.48\r\nlevit_conv_512_s8,224,1205.21,849.596,1024,73.97,21.77,52.28\r\nflexivit_large,240,1201.05,1705.103,2048,304.36,70.99,75.39\r\nxcit_tiny_24_p8_224,224,1183.02,1731.106,2048,12.11,9.21,45.39\r\nefficientformerv2_s0,224,1180.05,1735.463,2048,3.6,0.41,5.3\r\nxcit_nano_12_p8_384,384,1178.47,1737.778,2048,3.05,6.34,46.08\r\nxception71,299,1175.21,1742.597,2048,42.34,18.09,69.92\r\nvit_base_patch16_18x2_224,224,1174.16,1744.162,2048,256.73,52.51,71.38\r\nresnext101_64x4d,288,1173.12,1745.706,2048,83.46,25.66,51.59\r\ntf_efficientnetv2_m,384,1172.96,1745.942,2048,54.14,15.85,57.52\r\nvit_pe_spatial_small_patch16_512,512,1172.36,1746.846,2048,21.98,31.8,123.27\r\nvolo_d3_224,224,1151.52,1778.468,2048,86.33,20.78,60.09\r\nbeit_base_patch16_384,384,1130.9,1810.89,2048,86.74,55.54,101.56\r\nefficientformerv2_s1,224,1130.73,1811.143,2048,6.19,0.67,7.66\r\nefficientvit_l3,256,1130.69,1811.189,2048,246.04,36.06,50.98\r\nvit_base_patch16_siglip_gap_384,384,1130.6,1811.354,2048,86.09,55.43,101.3\r\nnextvit_base,384,1130.41,1811.66,2048,44.79,24.62,73.95\r\nvit_base_patch16_siglip_384,384,1123.61,1822.623,2048,93.18,56.12,102.2\r\nvit_large_patch14_clip_224,224,1119.32,1829.614,2048,304.2,81.08,88.79\r\nseresnet269d,256,1113.89,1838.535,2048,113.67,26.59,53.6\r\nmobilenetv5_base,256,1113.64,1838.961,2048,82.65,20.05,36.89\r\nresnext101_32x16d,224,1099.71,1862.244,2048,194.03,36.27,51.18\r\nconvnext_xlarge,224,1097.72,1865.62,2048,350.2,60.98,57.5\r\nregnetz_d8_evos,256,1095.99,1868.57,2048,23.46,4.5,24.92\r\nxcit_small_12_p8_224,224,1092.2,1875.053,2048,26.21,18.69,47.21\r\nrepvgg_d2se,320,1091.35,1876.499,2048,120.39,66.99,23.42\r\nregnetz_c16_evos,320,1085.73,1886.235,2048,13.49,3.86,25.88\r\nnfnet_f2,256,1083.8,1889.593,2048,193.78,33.76,41.85\r\nseresnext101_32x8d,288,1082.48,1891.891,2048,93.57,27.24,51.63\r\nswin_large_patch4_window7_224,224,1079.46,1897.192,2048,196.53,34.53,54.94\r\npoolformerv2_m36,224,1079.31,1897.443,2048,56.08,8.81,22.02\r\nresnetrs270,256,1078.29,1899.255,2048,129.86,27.06,55.84\r\nseresnet200d,288,1077.29,1900.998,2048,71.86,25.32,54.6\r\necaresnet200d,288,1076.06,1903.171,2048,64.69,25.31,54.59\r\nconvnextv2_tiny,384,1070.08,1435.344,1536,28.64,13.14,39.48\r\nmambaout_base,288,1069.59,1914.68,2048,84.81,26.16,61.08\r\nseresnext101d_32x8d,288,1068.59,1916.499,2048,93.59,27.64,52.95\r\ncaformer_m36,224,1066.62,1920.026,2048,56.2,13.29,50.48\r\nmambaout_base_short_rw,288,1060.34,1931.38,2048,88.83,26.96,62.94\r\nmaxvit_rmlp_base_rw_224,224,1052.93,1945.004,2048,116.14,23.15,92.64\r\nmobilevitv2_200,384,1051.65,1460.505,1536,18.45,16.24,72.34\r\nswinv2_base_window8_256,256,1046.14,1957.605,2048,87.92,20.37,52.59\r\nresnet200d,320,1040.89,1967.486,2048,64.69,31.25,67.33\r\nvit_large_patch14_224,224,1019.58,2008.601,2048,304.2,81.08,88.79\r\nefficientnetv2_m,416,1017.47,2012.77,2048,54.14,18.6,67.5\r\nresnetv2_152x2_bit,224,1014.95,2017.77,2048,236.34,46.95,45.11\r\nmambaout_base_tall_rw,288,1008.03,2031.616,2048,86.48,26.69,64.04\r\nseresnextaa101d_32x8d,288,1000.54,2046.839,2048,93.59,28.51,56.44\r\naimv2_large_patch14_224,224,996.59,2054.93,2048,309.2,82.3,85.2\r\nvit_base_patch8_224,224,990.63,2067.302,2048,86.58,78.22,161.69\r\nconvformer_m36,224,990.2,2068.194,2048,57.05,12.89,42.05\r\nconvnext_large,288,989.96,2068.71,2048,197.77,56.87,71.29\r\ndavit_huge,224,988.41,776.967,768,348.92,61.23,81.32\r\nnest_base,224,984.08,2081.061,2048,67.72,17.96,53.39\r\nnfnet_f1,320,981.85,2085.807,2048,132.63,35.97,46.77\r\nhrnet_w48_ssld,288,980.97,2087.671,2048,77.47,28.66,47.21\r\nnest_base_jx,224,979.24,2091.343,2048,67.72,17.96,53.39\r\neca_nfnet_l2,320,977.55,2094.974,2048,56.72,20.95,47.43\r\nvolo_d1_384,384,970.9,2109.311,2048,26.78,22.75,108.55\r\nconvnext_base,384,969.37,1584.46,1536,88.59,45.21,84.49\r\nswinv2_large_window12_192,192,962.46,2127.811,2048,228.77,26.17,56.53\r\nefficientnet_b5,416,959.55,2134.272,2048,30.39,8.27,80.68\r\nxcit_large_24_p16_224,224,953.69,2147.371,2048,189.1,35.86,47.27\r\ncaformer_s18,384,947.09,1621.74,1536,26.34,13.42,77.34\r\nvit_large_patch14_xp_224,224,942.77,2172.262,2048,304.06,81.01,88.79\r\nvit_large_patch14_clip_quickgelu_224,224,941.53,2175.105,2048,303.97,81.08,88.79\r\ncoatnet_rmlp_3_rw_224,224,939.76,1634.403,1536,165.15,33.56,79.47\r\nfastvit_mci4,256,939.55,2179.714,2048,321.57,27.78,60.59\r\ncoatnet_3_rw_224,224,939.42,1634.993,1536,181.81,33.44,73.83\r\nregnetz_e8,320,933.14,2194.666,2048,57.7,15.46,63.94\r\nmambaout_base_wide_rw,288,927.6,2207.756,2048,94.45,29.39,70.41\r\ntresnet_l,448,925.61,1106.23,1024,55.99,43.59,47.56\r\nnextvit_large,384,923.71,2217.079,2048,57.83,32.0,90.76\r\nswinv2_small_window16_256,256,918.69,2229.201,2048,49.73,12.82,66.29\r\nmaxvit_base_tf_224,224,917.28,2232.629,2048,119.47,24.04,95.01\r\ncoatnet_3_224,224,916.65,1117.047,1024,166.97,36.56,79.01\r\nregnety_640,224,911.21,2247.483,2048,281.38,64.16,42.5\r\nresnetv2_101x1_bit,448,908.17,2255.009,2048,44.54,31.65,64.93\r\ndm_nfnet_f2,256,893.75,2291.435,2048,193.78,33.76,41.85\r\nvit_large_patch16_siglip_gap_256,256,893.61,2291.77,2048,303.36,80.8,88.34\r\nvit_large_patch16_siglip_256,256,891.75,2296.551,2048,315.96,81.34,88.88\r\nresnetrs200,320,882.44,2320.786,2048,93.21,31.51,67.81\r\nmaxxvitv2_rmlp_large_rw_224,224,880.7,1162.667,1024,215.42,44.14,87.15\r\nmambaout_base_plus_rw,288,879.3,2329.042,2048,101.66,31.72,74.64\r\nswinv2_cr_large_224,224,877.92,2332.713,2048,196.68,35.1,78.42\r\nconvformer_s18,384,876.09,1753.17,1536,26.77,11.63,46.49\r\nefficientnetv2_rw_m,416,874.54,2341.748,2048,53.24,21.49,79.62\r\nvit_large_r50_s32_384,384,872.48,2347.258,2048,329.09,57.43,76.52\r\nconvnextv2_large,224,868.98,2356.718,2048,197.96,34.4,43.13\r\nvit_so150m_patch16_reg4_gap_384,384,866.49,2363.501,2048,134.42,87.97,165.47\r\nvit_large_patch16_rope_224,224,863.38,2372.001,2048,304.17,61.6,63.52\r\nvit_large_patch16_rope_ape_224,224,861.36,2377.581,2048,304.37,61.6,63.52\r\nvit_large_patch16_rope_mixed_224,224,860.12,2380.991,2048,304.2,61.6,68.34\r\nvit_large_patch16_rope_mixed_ape_224,224,858.07,2386.689,2048,304.4,61.6,68.34\r\nconvnextv2_base,288,853.79,2398.66,2048,88.72,25.43,47.53\r\nhalonet_h1,256,847.94,1207.582,1024,8.1,3.0,51.17\r\nseresnet269d,288,846.68,2418.805,2048,113.67,33.65,67.81\r\nefficientnet_x_b5,448,844.44,2425.209,2048,33.44,23.35,68.87\r\nnf_regnet_b5,456,838.31,2442.95,2048,49.74,11.7,61.95\r\nefficientnet_b5,448,825.67,2480.364,2048,30.39,9.59,93.56\r\nseresnextaa101d_32x8d,320,818.87,2500.953,2048,93.59,35.19,69.67\r\npoolformerv2_m48,224,810.67,2526.234,2048,73.35,11.59,29.17\r\ndm_nfnet_f1,320,806.45,2539.468,2048,132.63,35.97,46.77\r\nregnety_160,384,804.32,1909.635,1536,83.59,46.87,67.67\r\nvitamin_large2_224,224,803.08,318.725,256,333.58,75.05,112.83\r\nconvnext_large_mlp,320,800.72,1918.212,1536,200.13,70.21,88.02\r\nresnetv2_101x3_bit,224,800.21,2559.253,2048,387.93,71.23,48.7\r\nvitamin_large_224,224,798.61,320.515,256,333.32,75.05,112.83\r\nefficientnet_h_b5,448,796.58,2570.934,2048,45.88,27.16,73.9\r\ncaformer_b36,224,791.27,2588.169,2048,98.75,23.22,67.3\r\nswinv2_cr_tiny_384,384,788.96,1297.846,1024,28.33,15.34,161.01\r\ntiny_vit_21m_512,512,785.13,1956.291,1536,21.26,26.93,177.93\r\nxcit_tiny_12_p8_384,384,769.68,2660.792,2048,6.71,14.13,69.14\r\nnaflexvit_so150m2_patch16_reg1_gap,384,762.56,2685.623,2048,136.06,89.53,178.22\r\nmaxvit_tiny_tf_384,384,760.51,1346.421,1024,30.98,17.53,123.42\r\nvit_base_r50_s16_384,384,756.49,2707.161,2048,98.95,67.43,135.03\r\nnaflexvit_so150m2_patch16_reg1_map,384,752.81,2720.385,2048,142.46,90.33,179.2\r\ntf_efficientnetv2_m,480,752.48,2721.616,2048,54.14,24.76,89.84\r\nvit_so150m2_patch16_reg1_gap_384,384,750.44,2728.985,2048,136.33,89.53,178.22\r\nxcit_small_24_p16_384,384,748.66,2735.486,2048,47.67,26.72,68.58\r\nefficientvit_l3,320,737.95,2775.174,2048,246.04,56.32,79.34\r\nconvformer_b36,224,735.17,2785.686,2048,99.88,22.69,56.06\r\nvolo_d4_224,224,720.61,2841.956,2048,192.96,44.34,80.22\r\nhiera_large_224,224,716.63,2857.763,2048,213.74,40.34,83.37\r\ntf_efficientnet_b5,456,709.33,2887.185,2048,30.39,10.46,98.86\r\necaresnet269d,320,700.12,2925.089,2048,102.09,41.53,83.69\r\nregnetz_d8_evos,320,694.71,2947.93,2048,23.46,7.03,38.92\r\ntresnet_xl,448,691.9,1479.905,1024,78.44,60.77,61.31\r\nswinv2_base_window12to16_192to256,256,679.0,2262.086,1536,87.92,22.02,84.71\r\nswinv2_base_window16_256,256,678.01,2265.417,1536,87.92,22.02,84.71\r\nresnest200e,320,666.77,3071.438,2048,70.2,35.69,82.78\r\nefficientnetv2_l,384,666.1,3074.53,2048,118.52,36.1,101.16\r\nconvnext_xlarge,288,662.31,2319.107,1536,350.2,100.8,95.05\r\neca_nfnet_l2,384,659.41,3105.751,2048,56.72,30.05,68.28\r\ntf_efficientnetv2_l,384,658.75,3108.854,2048,118.52,36.1,101.16\r\nresnetrs350,288,657.35,3115.404,2048,163.96,43.67,87.09\r\nvit_large_patch16_dinov3,256,655.04,3126.453,2048,303.08,82.43,90.56\r\nvit_large_patch16_dinov3_qkvb,256,654.37,3129.673,2048,303.13,82.43,90.56\r\nvit_so400m_patch14_siglip_gap_224,224,647.29,3163.869,2048,412.44,109.57,106.13\r\nvit_so400m_patch14_siglip_224,224,646.74,3166.565,2048,427.68,110.26,106.73\r\nvit_so400m_patch16_siglip_gap_256,256,646.61,3167.239,2048,412.65,109.62,106.13\r\nconvmixer_768_32,224,645.85,3170.968,2048,21.11,19.55,25.95\r\nvit_so400m_patch16_siglip_256,256,644.14,3179.364,2048,427.89,110.31,106.73\r\nmaxvit_large_tf_224,224,641.76,3191.191,2048,211.79,43.68,127.35\r\neca_nfnet_l3,352,622.06,3292.249,2048,72.04,32.57,73.12\r\nvitamin_large_256,256,615.28,312.011,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,614.91,312.196,192,333.64,99.0,154.99\r\nvit_base_patch16_siglip_gap_512,512,609.9,3357.851,2048,86.43,107.0,246.15\r\nvit_base_patch16_siglip_512,512,606.11,3378.831,2048,93.52,108.22,247.74\r\ninception_next_base,384,601.99,3401.976,2048,86.67,43.64,75.48\r\neva02_large_patch14_224,224,584.77,3502.136,2048,303.27,81.15,97.2\r\nefficientformerv2_s2,224,581.55,3521.547,2048,12.71,1.27,11.77\r\necaresnet269d,352,581.11,3524.206,2048,102.09,50.25,101.25\r\neva02_large_patch14_clip_224,224,577.4,3546.847,2048,304.11,81.18,97.2\r\ncoatnet_4_224,224,571.34,1792.223,1024,275.43,62.48,129.26\r\nvit_huge_patch14_clip_224,224,569.17,3598.157,2048,632.05,167.4,139.41\r\nnfnet_f2,352,563.91,3631.71,2048,193.78,63.22,79.06\r\nxcit_small_24_p8_224,224,563.07,3637.129,2048,47.63,35.81,90.78\r\nnasnetalarge,331,562.16,2732.249,1536,88.75,23.89,90.56\r\nresnetrs270,352,561.92,3644.565,2048,129.86,51.13,105.48\r\nconvnext_large,384,558.88,1832.168,1024,197.77,101.1,126.74\r\nconvnext_large_mlp,384,558.36,1833.888,1024,200.13,101.11,126.74\r\nresnext101_32x32d,224,555.03,3689.846,2048,468.53,87.29,91.12\r\ncoat_lite_medium_384,384,543.86,2824.198,1536,44.57,28.73,116.7\r\nvolo_d2_384,384,542.33,3776.215,2048,58.87,46.17,184.51\r\npnasnet5large,331,536.73,3815.654,2048,86.06,25.04,92.89\r\nresnet50x16_clip_gap,384,536.62,3816.441,2048,136.2,70.32,100.64\r\nxcit_medium_24_p16_384,384,536.31,3818.639,2048,84.4,47.39,91.64\r\nvitamin_xlarge_256,256,529.04,241.912,128,436.06,130.13,177.37\r\nvit_so150m2_patch16_reg1_gap_448,448,528.6,3874.278,2048,136.5,127.51,287.05\r\nconvnextv2_large,288,527.58,2911.368,1536,197.96,56.87,71.29\r\nvit_huge_patch14_224,224,526.66,3888.622,2048,630.76,167.4,139.41\r\nnfnet_f3,320,522.99,3915.927,2048,254.92,68.77,83.93\r\nresnet50x16_clip,384,522.75,3917.689,2048,167.33,74.9,103.54\r\naimv2_huge_patch14_224,224,519.31,3943.617,2048,680.85,179.01,126.22\r\nresmlp_big_24_224,224,518.03,3953.4,2048,129.14,100.23,87.31\r\ncoatnet_rmlp_2_rw_384,384,513.33,1496.045,768,73.88,47.69,209.43\r\nmaxvit_small_tf_384,384,512.74,1497.772,768,69.02,35.87,183.65\r\ncait_xxs24_384,384,512.72,3994.356,2048,12.03,9.63,122.66\r\nefficientnet_x_b5,576,506.61,4042.503,2048,33.44,38.59,113.83\r\nvit_huge_patch14_xp_224,224,501.15,4086.516,2048,631.8,167.3,139.41\r\ncaformer_s36,384,499.96,3072.176,1536,39.3,26.08,150.33\r\ndeit3_huge_patch14_224,224,498.07,4111.773,2048,632.13,167.4,139.41\r\nefficientvit_l3,384,498.02,3084.117,1536,246.04,81.08,114.02\r\nmambaout_base_plus_rw,384,496.56,2062.119,1024,101.66,56.39,132.7\r\nvit_pe_spatial_base_patch16_512,512,495.12,4136.3,2048,86.43,107.13,246.54\r\nvit_huge_patch14_clip_quickgelu_224,224,491.0,4171.006,2048,632.08,167.4,139.41\r\nvit_base_patch14_dinov2,518,489.1,4187.19,2048,86.58,151.71,397.58\r\nmvitv2_large_cls,224,488.87,4189.167,2048,234.58,42.17,111.69\r\nefficientnetv2_xl,384,488.09,4195.866,2048,208.12,52.81,139.2\r\nvit_large_patch14_clip_336,336,487.56,4200.42,2048,304.53,191.11,270.24\r\nregnety_320,384,486.6,3156.499,1536,145.05,95.0,88.87\r\nswinv2_cr_small_384,384,486.47,2104.922,1024,49.7,29.7,298.03\r\nvit_base_patch14_reg4_dinov2,518,485.31,4219.928,2048,86.58,152.25,399.53\r\ntf_efficientnetv2_xl,384,481.35,4254.626,2048,208.12,52.81,139.2\r\nconvnextv2_base,384,480.93,2129.137,1024,88.72,45.21,84.49\r\nvolo_d5_224,224,477.44,4289.451,2048,295.46,72.4,118.11\r\nefficientnet_h_b5,576,469.61,4360.954,2048,45.88,44.9,122.13\r\nmvitv2_large,224,467.26,4382.934,2048,217.99,43.87,112.02\r\ndm_nfnet_f2,352,466.94,4385.915,2048,193.78,63.22,79.06\r\nregnety_1280,224,464.61,4407.932,2048,644.81,127.66,71.58\r\nconvformer_s36,384,462.83,3318.616,1536,40.01,22.54,89.62\r\nvit_huge_patch14_gap_224,224,462.64,4426.679,2048,630.76,166.73,138.74\r\nresnetrs420,320,461.12,4441.278,2048,191.89,64.2,126.56\r\nswin_base_patch4_window12_384,384,450.09,2275.037,1024,87.9,47.19,134.78\r\neva_large_patch14_336,336,447.15,4580.033,2048,304.53,191.1,270.24\r\nefficientnet_b6,528,445.7,2297.426,1024,43.04,19.4,167.39\r\nvit_large_patch16_384,384,445.22,4599.89,2048,304.72,191.21,270.24\r\neva02_base_patch14_448,448,439.37,4661.102,2048,87.12,107.11,259.14\r\ndm_nfnet_f3,320,434.01,4718.725,2048,254.92,68.77,83.93\r\nmaxxvitv2_rmlp_base_rw_384,384,432.51,887.795,384,116.09,72.98,213.74\r\nswinv2_large_window12to16_192to256,256,431.14,2375.026,1024,196.74,47.81,121.53\r\nmaxvit_tiny_tf_512,512,429.7,1787.237,768,31.05,33.49,257.59\r\nseresnextaa201d_32x8d,320,425.82,4809.484,2048,149.39,70.22,138.71\r\naimv2_large_patch14_336,336,425.33,4814.968,2048,309.53,194.22,227.08\r\ntf_efficientnet_b6,528,422.29,2424.798,1024,43.04,19.4,167.39\r\nefficientnetv2_l,480,416.49,4917.253,2048,118.52,56.4,157.99\r\nrdnet_large,384,416.35,1229.67,512,186.27,102.09,137.13\r\ndeit3_large_patch16_384,384,416.07,4922.23,2048,304.76,191.21,270.24\r\nswinv2_cr_huge_224,224,415.17,3699.59,1536,657.83,115.97,121.08\r\nvit_large_patch14_clip_quickgelu_336,336,411.79,4973.295,2048,304.29,191.11,270.24\r\ntf_efficientnetv2_l,480,409.96,4995.549,2048,118.52,56.4,157.99\r\nconvnext_xxlarge,256,407.45,2513.129,1024,846.47,198.09,124.45\r\nconvnextv2_huge,224,402.62,2543.277,1024,660.29,115.0,79.07\r\ndavit_giant,224,402.2,1909.44,768,1406.47,192.92,153.06\r\nfocalnet_huge_fl3,224,401.86,3822.146,1536,745.28,118.26,104.8\r\nxcit_medium_24_p8_224,224,401.37,5102.403,2048,84.32,63.53,121.23\r\nxcit_tiny_24_p8_384,384,398.06,5144.916,2048,12.11,27.05,132.95\r\nvit_giant_patch16_gap_224,224,396.27,5168.156,2048,1011.37,202.46,139.26\r\nsam2_hiera_tiny,896,389.58,164.24,64,26.85,99.86,384.63\r\nefficientformerv2_l,224,385.18,5316.9,2048,26.32,2.59,18.54\r\nmaxvit_xlarge_tf_224,224,382.64,2676.079,1024,506.99,97.52,191.04\r\nbeit_large_patch16_384,384,376.83,5434.757,2048,305.0,191.21,270.24\r\nvit_large_patch16_siglip_gap_384,384,375.85,5448.896,2048,303.69,190.85,269.55\r\nvit_large_patch16_siglip_384,384,374.66,5466.224,2048,316.28,192.07,270.75\r\nconvnext_xlarge,384,374.52,2050.575,768,350.2,179.2,168.99\r\ncait_xs24_384,384,369.96,5535.737,2048,26.67,19.28,183.98\r\nresnetv2_152x4_bit,224,369.01,5549.989,2048,936.53,186.9,90.22\r\nvit_giant_patch14_clip_224,224,368.17,5562.507,2048,1012.65,267.18,192.64\r\nxcit_small_12_p8_384,384,366.17,4194.734,1536,26.21,54.92,138.29\r\neca_nfnet_l3,448,361.23,5669.425,2048,72.04,52.55,118.4\r\ncaformer_m36,384,360.18,2842.979,1024,56.2,42.11,196.35\r\nconvmixer_1536_20,224,357.54,5727.916,2048,51.63,48.68,33.03\r\nresnetrs350,384,356.16,5750.234,2048,163.96,77.59,154.74\r\nswinv2_cr_base_384,384,354.87,2885.429,1024,87.88,50.57,333.68\r\nnaflexvit_so400m_patch16_siglip,384,353.73,5789.616,2048,427.89,259.65,319.77\r\nhiera_huge_224,224,352.98,5801.907,2048,672.78,124.85,150.95\r\nvitamin_large2_336,336,346.92,276.671,96,333.83,175.72,307.47\r\nresnetv2_152x2_bit,384,346.0,4439.298,1536,236.34,136.16,132.56\r\nvitamin_large_336,336,345.82,277.556,96,333.57,175.72,307.47\r\nvit_giant_patch14_224,224,345.24,5932.042,2048,1012.61,267.18,192.64\r\neva_giant_patch14_224,224,344.68,5941.6,2048,1012.56,267.18,192.64\r\neva_giant_patch14_clip_224,224,343.43,5963.376,2048,1012.59,267.18,192.64\r\ncoatnet_5_224,224,343.28,2237.194,768,687.47,145.49,194.24\r\ncait_xxs36_384,384,341.73,5993.043,2048,17.37,14.35,183.7\r\nconvformer_m36,384,337.63,3032.87,1024,57.05,37.87,123.56\r\nresnetv2_50x3_bit,448,335.26,2290.73,768,217.32,145.7,133.37\r\nsam2_hiera_small,896,333.73,191.738,64,33.95,123.99,442.63\r\nxcit_large_24_p16_384,384,326.1,6280.261,2048,189.1,105.35,137.17\r\nmaxvit_rmlp_base_rw_384,384,325.83,4713.999,1536,116.14,70.97,318.95\r\naimv2_1b_patch14_224,224,324.17,6317.564,2048,1234.96,322.43,170.39\r\nbeit3_giant_patch14_224,224,318.62,6427.735,2048,1013.22,267.56,192.64\r\nresnest269e,416,316.22,4857.27,1536,110.93,77.69,171.98\r\nvit_pe_core_large_patch14_336,336,315.01,6501.391,2048,317.15,192.33,271.43\r\nseresnextaa201d_32x8d,384,300.35,5114.003,1536,149.39,101.11,199.72\r\nconvnextv2_large,384,298.11,2576.17,768,197.96,101.1,126.74\r\nnfnet_f3,416,298.05,6871.321,2048,254.92,115.58,141.78\r\nmaxvit_base_tf_384,384,297.81,2578.806,768,119.65,73.8,332.9\r\nvitamin_xlarge_336,336,297.07,323.11,96,436.06,230.18,347.33\r\nregnety_640,384,294.47,3477.335,1024,281.38,188.47,124.83\r\nvit_huge_plus_patch16_dinov3,256,289.75,7067.988,2048,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,289.58,7072.267,2048,840.59,224.88,193.59\r\ncait_s24_384,384,287.05,7134.648,2048,47.06,32.17,245.31\r\nnfnet_f4,384,286.75,7141.976,2048,316.07,122.14,147.57\r\nvit_giantopt_patch16_siglip_gap_256,256,283.13,7233.475,2048,1134.84,298.42,199.62\r\nmaxvit_small_tf_512,512,282.19,1814.321,512,69.13,67.26,383.77\r\nvit_giantopt_patch16_siglip_256,256,282.08,7260.278,2048,1163.17,299.66,200.43\r\nefficientnetv2_xl,512,279.7,7322.091,2048,208.12,93.85,247.32\r\nvolo_d3_448,448,279.34,7331.436,2048,86.63,96.33,446.83\r\nswin_large_patch4_window12_384,384,277.95,1841.984,512,196.74,104.08,202.16\r\ntf_efficientnetv2_xl,512,275.93,7422.11,2048,208.12,93.85,247.32\r\nvit_so400m_patch16_siglip_gap_384,384,274.87,7450.828,2048,413.02,258.11,318.42\r\nfocalnet_huge_fl4,224,274.19,5601.826,1536,686.46,118.9,113.34\r\nvit_so400m_patch16_siglip_384,384,273.71,7482.337,2048,428.26,259.65,319.77\r\ncaformer_b36,384,267.1,2875.223,768,98.75,72.33,261.79\r\nefficientnet_b7,600,264.29,2905.858,768,66.35,38.33,289.94\r\nvitamin_large_384,384,263.19,243.122,64,333.71,234.44,440.16\r\nvitamin_large2_384,384,262.58,243.698,64,333.97,234.44,440.16\r\nresnetv2_152x2_bit,448,254.04,4030.834,1024,236.34,184.99,180.43\r\nresnetrs420,416,253.83,8068.477,2048,191.89,108.45,213.79\r\ntf_efficientnet_b7,600,253.29,3032.093,768,66.35,38.33,289.94\r\neva02_large_patch14_clip_336,336,252.07,8124.686,2048,304.43,191.34,289.13\r\nconvformer_b36,384,250.26,3068.717,768,99.88,66.67,164.75\r\ndm_nfnet_f3,416,249.24,8216.826,2048,254.92,115.58,141.78\r\nvit_huge_patch14_clip_336,336,245.21,8351.896,2048,632.46,390.97,407.54\r\nxcit_large_24_p8_224,224,243.43,8412.861,2048,188.93,141.23,181.56\r\nconvnextv2_huge,288,243.26,3157.079,768,660.29,190.1,130.7\r\ndm_nfnet_f4,384,239.37,8555.596,2048,316.07,122.14,147.57\r\naimv2_large_patch14_448,448,228.6,6719.084,1536,309.98,367.84,491.78\r\naimv2_huge_patch14_336,336,225.69,9074.139,2048,681.34,416.36,337.08\r\nvitamin_xlarge_384,384,225.65,283.591,64,436.06,306.38,493.46\r\nswinv2_cr_large_384,384,222.26,2303.537,512,196.68,108.96,404.96\r\nvit_intern300m_patch14_448,448,221.83,9232.057,2048,304.01,362.05,656.39\r\nmvitv2_huge_cls,224,218.54,7028.401,1536,694.8,120.67,243.63\r\nvit_so400m_patch14_siglip_gap_378,378,213.54,9590.628,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,213.47,9593.638,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_378,378,212.7,9628.567,2048,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,212.55,9635.261,2048,428.23,335.4,452.89\r\nmaxvit_large_tf_384,384,210.76,2429.253,512,212.03,132.55,445.84\r\nvit_gigantic_patch14_clip_224,224,210.38,9734.53,2048,1844.91,483.96,275.37\r\ndavit_base_fl,768,209.13,918.028,192,90.37,190.32,530.15\r\nresnet50x64_clip_gap,448,206.95,4947.983,1024,365.03,253.96,233.22\r\nvit_large_patch16_siglip_gap_512,512,205.7,9956.074,2048,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,204.87,9996.294,2048,316.74,364.0,657.48\r\nresnetv2_101x3_bit,448,204.16,3761.615,768,387.93,280.33,194.78\r\nnfnet_f5,416,203.57,10060.304,2048,377.21,170.71,204.56\r\nresnet50x64_clip,448,202.16,5065.173,1024,420.38,265.02,239.13\r\nvit_gigantic_patch14_224,224,198.81,10301.123,2048,1844.44,483.95,275.37\r\nfocalnet_large_fl3,384,192.3,5324.909,1024,239.13,105.06,168.04\r\nbeit_large_patch16_512,512,192.18,10656.742,2048,305.67,362.24,656.39\r\ncait_s36_384,384,191.34,10703.677,2048,68.37,47.99,367.4\r\nvit_huge_patch14_clip_378,378,191.18,10712.161,2048,632.68,503.79,572.79\r\nxcit_small_24_p8_384,384,190.06,8081.799,1536,47.63,105.24,265.91\r\nfocalnet_large_fl4,384,184.99,5535.315,1024,239.32,105.2,181.78\r\nvit_gigantic_patch14_clip_quickgelu_224,224,184.81,11081.456,2048,1844.91,483.96,275.37\r\nswinv2_base_window12to24_192to384,384,182.63,2102.506,384,87.92,55.25,280.36\r\nsam2_hiera_base_plus,896,176.99,361.544,64,68.68,227.48,828.88\r\nvolo_d4_448,448,173.85,11780.395,2048,193.41,197.13,527.35\r\nefficientnet_b8,672,171.93,2977.951,512,87.41,63.48,442.89\r\ndm_nfnet_f5,416,170.6,12004.585,2048,377.21,170.71,204.56\r\nvit_pe_spatial_large_patch14_448,448,170.53,12009.204,2048,303.96,362.05,656.39\r\nregnety_1280,384,166.57,4610.547,768,644.81,374.99,210.2\r\nvit_huge_patch14_clip_quickgelu_378,378,166.14,12326.788,2048,632.68,503.79,572.79\r\ntf_efficientnet_b8,672,165.99,3084.485,512,87.41,63.48,442.89\r\nmaxvit_base_tf_512,512,165.19,3099.395,512,119.88,138.02,703.99\r\nvit_large_patch14_dinov2,518,163.03,9421.607,1536,304.37,507.15,1058.82\r\nvit_large_patch14_reg4_dinov2,518,162.3,9463.84,1536,304.37,508.9,1064.02\r\nvit_pe_lang_large_patch14_448,448,160.96,12723.323,2048,291.42,346.99,629.09\r\nnfnet_f4,512,156.16,9836.231,1536,316.07,216.26,262.26\r\naimv2_3b_patch14_224,224,156.02,13126.67,2048,2720.66,705.91,252.44\r\neva_giant_patch14_336,336,149.03,13742.55,2048,1013.01,620.64,550.67\r\nvit_so400m_patch14_siglip_gap_448,448,148.45,13795.535,2048,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,148.28,13811.94,2048,413.53,487.4,764.26\r\nvit_so400m_patch16_siglip_512,512,147.65,13870.21,2048,428.77,490.13,766.65\r\nnfnet_f6,448,147.53,13882.302,2048,438.36,229.7,273.62\r\nfocalnet_xlarge_fl3,384,144.34,5320.773,768,408.79,185.61,223.99\r\nvit_huge_patch16_gap_448,448,140.75,14550.388,2048,631.67,544.7,636.83\r\nfocalnet_xlarge_fl4,384,138.73,5536.072,768,409.03,185.79,242.31\r\neva02_large_patch14_448,448,138.63,14773.458,2048,305.08,362.33,689.95\r\nbeit3_giant_patch14_336,336,138.53,14783.744,2048,1013.67,621.52,550.67\r\naimv2_1b_patch14_336,336,137.67,11156.72,1536,1235.61,743.59,454.16\r\nconvnextv2_huge,384,137.34,2796.02,384,660.29,337.96,232.35\r\nxcit_medium_24_p8_384,384,136.02,7528.349,1024,84.32,186.67,354.73\r\ndm_nfnet_f4,512,131.09,11716.806,1536,316.07,216.26,262.26\r\nswinv2_cr_giant_224,224,127.85,8009.034,1024,2598.76,483.85,309.15\r\nmaxvit_xlarge_tf_384,384,125.87,3050.834,384,475.32,292.78,668.76\r\ndm_nfnet_f6,448,124.34,16471.427,2048,438.36,229.7,273.62\r\naimv2_huge_patch14_448,448,122.82,8337.679,1024,682.03,774.02,731.38\r\nvit_giantopt_patch16_siglip_gap_384,384,122.07,16776.663,2048,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,121.74,16822.805,2048,1163.66,696.85,568.91\r\nmaxvit_large_tf_512,512,118.95,3228.098,384,212.33,244.75,942.15\r\nswinv2_large_window12to24_192to384,384,118.18,2166.133,256,196.74,116.15,407.83\r\nvolo_d5_448,448,116.47,13187.82,1536,295.91,315.06,737.92\r\nnfnet_f5,544,115.46,13302.955,1536,377.21,290.97,349.71\r\nnfnet_f7,480,112.63,18183.255,2048,499.5,300.08,355.86\r\ntf_efficientnet_l2,475,111.31,4599.897,512,480.31,172.11,609.89\r\nswinv2_cr_huge_384,384,104.68,3668.422,384,657.94,352.04,583.18\r\ndm_nfnet_f5,544,97.98,15675.905,1536,377.21,290.97,349.71\r\nregnety_2560,384,93.07,5501.404,512,1282.6,747.83,296.49\r\neva02_enormous_patch14_clip_224,224,92.95,22033.952,2048,4350.56,1132.46,497.58\r\ncait_m36_384,384,92.04,11125.434,1024,271.22,173.11,734.81\r\nvolo_d5_512,512,89.81,11402.122,1024,296.09,425.09,1105.37\r\nnfnet_f6,576,89.17,17224.943,1536,438.36,378.69,452.2\r\ndavit_huge_fl,768,82.99,2313.49,192,360.64,744.84,1060.3\r\nxcit_large_24_p8_384,384,82.73,9282.643,768,188.93,415.0,531.82\r\nresnetv2_152x4_bit,480,82.43,6211.115,512,936.53,844.84,414.26\r\nconvnextv2_huge,512,77.37,3308.478,256,660.29,600.81,413.07\r\naimv2_1b_patch14_448,448,75.03,10235.437,768,1236.53,1367.03,983.56\r\ndm_nfnet_f6,576,74.84,20524.279,1536,438.36,378.69,452.2\r\nvit_gigantic_patch14_clip_378,378,72.01,28439.686,2048,1845.7,1429.82,1047.37\r\nmaxvit_xlarge_tf_512,512,70.88,2708.602,192,475.77,534.14,1413.22\r\nnfnet_f7,608,69.6,14713.466,1024,499.5,480.39,570.85\r\naimv2_3b_patch14_336,336,68.11,15034.816,1024,2721.64,1615.48,674.17\r\nsam2_hiera_large,1024,58.07,826.496,48,212.15,907.48,2190.34\r\nvit_7b_patch16_dinov3,256,52.28,29382.0,1536,6716.03,1775.1,515.87\r\nvit_giant_patch14_dinov2,518,50.98,15065.061,768,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,50.77,15127.18,768,1136.48,1790.08,2771.21\r\neva_giant_patch14_560,560,49.1,20855.197,1024,1014.45,1906.76,2577.17\r\nmobilenetv5_300m,768,45.87,8371.866,384,294.13,435.74,842.16\r\nmobilenetv5_300m_enc,768,44.56,8617.41,384,294.13,435.74,842.16\r\nefficientnet_l2,800,40.33,4760.903,192,480.31,479.12,1707.39\r\ncait_m48_448,448,39.97,12809.823,512,356.46,329.41,1708.23\r\ntf_efficientnet_l2,800,39.48,4863.355,192,480.31,479.12,1707.39\r\nvit_pe_core_gigantic_patch14_448,448,39.42,25978.159,1024,1882.03,2060.12,1774.21\r\nvit_pe_lang_gigantic_patch14_448,448,39.04,26228.202,1024,1740.92,1931.99,1664.88\r\naimv2_3b_patch14_448,448,37.52,13645.343,512,2723.02,2939.61,1462.76\r\nvit_pe_spatial_gigantic_patch14_448,448,36.73,27877.193,1024,1851.89,2055.25,1771.04\r\nswinv2_cr_giant_384,384,34.38,7446.187,256,2598.76,1450.71,1394.86\r\nvit_so400m_patch14_siglip_gap_896,896,28.07,13677.958,384,416.87,2731.49,8492.88\r\nsamvit_base_patch16,1024,23.88,1004.835,24,89.67,486.43,1343.27\r\nsamvit_large_patch16,1024,12.87,1864.93,24,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,9.46,1690.971,16,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-amp-nhwc-pt113-cu117-rtx3090.csv",
    "content": "model,infer_samples_per_sec,infer_step_time,infer_batch_size,infer_img_size,infer_gmacs,infer_macts,param_count\ntinynet_e,72737.62,14.068,1024,106,0.03,0.69,2.04\nmobilenetv3_small_050,54822.3,18.668,1024,224,0.03,0.92,1.59\nlcnet_035,53629.35,19.084,1024,224,0.03,1.04,1.64\nlcnet_050,45492.41,22.499,1024,224,0.05,1.26,1.88\nmobilenetv3_small_075,39215.51,26.102,1024,224,0.05,1.3,2.04\ntinynet_d,37346.61,27.409,1024,152,0.05,1.42,2.34\nmobilenetv3_small_100,36280.34,28.214,1024,224,0.06,1.42,2.54\ntf_mobilenetv3_small_minimal_100,31726.33,32.265,1024,224,0.06,1.41,2.04\ntf_mobilenetv3_small_075,31503.43,32.494,1024,224,0.05,1.3,2.04\nlcnet_075,29817.69,34.332,1024,224,0.1,1.99,2.36\ntf_mobilenetv3_small_100,29444.91,34.767,1024,224,0.06,1.42,2.54\nmnasnet_small,25354.86,40.376,1024,224,0.07,2.16,2.03\nlcnet_100,24134.76,42.417,1024,224,0.16,2.52,2.95\nregnetx_002,23983.4,42.686,1024,224,0.2,2.16,2.68\nlevit_128s,22675.73,45.148,1024,224,0.31,1.88,7.78\nregnety_002,21709.37,47.158,1024,224,0.2,2.17,3.16\nmobilenetv2_035,21673.44,47.236,1024,224,0.07,2.86,1.68\nmnasnet_050,20010.27,51.163,1024,224,0.11,3.07,2.22\nghostnet_050,18932.82,54.075,1024,224,0.05,1.77,2.59\ntinynet_c,18428.42,55.556,1024,184,0.11,2.87,2.46\nsemnasnet_050,17215.18,59.471,1024,224,0.11,3.44,2.08\nmobilenetv2_050,17194.94,59.542,1024,224,0.1,3.64,1.97\ncs3darknet_focus_s,16189.76,63.24,1024,256,0.69,2.7,3.27\nlcnet_150,15557.15,65.811,1024,224,0.34,3.79,4.5\ncs3darknet_s,15369.47,66.615,1024,256,0.72,2.97,3.28\nlevit_128,15337.67,66.754,1024,224,0.41,2.71,9.21\ngernet_s,15288.68,66.966,1024,224,0.75,2.65,8.17\nmobilenetv3_large_075,14216.3,72.019,1024,224,0.16,4.0,3.99\nmixer_s32_224,14182.92,72.188,1024,224,1.0,2.28,19.1\nvit_tiny_r_s16_p8_224,14125.39,72.482,1024,224,0.44,2.06,6.34\nresnet10t,14112.07,72.551,1024,224,1.1,2.43,5.44\nvit_small_patch32_224,13799.47,74.195,1024,224,1.15,2.5,22.88\nregnetx_004,13610.2,75.225,1024,224,0.4,3.14,5.16\nlevit_192,13524.14,75.706,1024,224,0.66,3.2,10.95\nmobilenetv3_rw,12956.58,79.021,1024,224,0.23,4.41,5.48\nhardcorenas_a,12803.61,79.966,1024,224,0.23,4.38,5.26\nmobilenetv3_large_100,12749.93,80.304,1024,224,0.23,4.41,5.48\nmnasnet_075,12532.36,81.697,1024,224,0.23,4.77,3.17\ntf_mobilenetv3_large_075,12186.51,84.017,1024,224,0.16,4.0,3.99\ntinynet_b,12083.18,84.735,1024,188,0.21,4.44,3.73\nregnety_004,11918.36,85.906,1024,224,0.41,3.89,4.34\ntf_mobilenetv3_large_minimal_100,11715.94,87.392,1024,224,0.22,4.4,3.92\nhardcorenas_c,11548.05,88.662,1024,224,0.28,5.01,5.52\nhardcorenas_b,11510.71,88.949,1024,224,0.26,5.09,5.18\nese_vovnet19b_slim_dw,11501.95,89.018,1024,224,0.4,5.28,1.9\nghostnet_100,11332.61,90.348,1024,224,0.15,3.55,5.18\nmnasnet_100,11138.43,91.923,1024,224,0.33,5.46,4.38\ngluon_resnet18_v1b,11098.78,92.252,1024,224,1.82,2.48,11.69\nresnet18,11083.1,92.383,1024,224,1.82,2.48,11.69\nswsl_resnet18,11062.48,92.555,1024,224,1.82,2.48,11.69\nssl_resnet18,11061.11,92.565,1024,224,1.82,2.48,11.69\ntf_mobilenetv3_large_100,11018.56,92.922,1024,224,0.23,4.41,5.48\nmnasnet_b1,10993.58,93.135,1024,224,0.33,5.46,4.38\nhardcorenas_d,10910.47,93.843,1024,224,0.3,4.93,7.5\nsemnasnet_075,10898.09,93.951,1024,224,0.23,5.54,2.91\nmobilenetv2_075,10893.76,93.988,1024,224,0.22,5.86,2.64\nseresnet18,10385.56,98.588,1024,224,1.82,2.49,11.78\nlegacy_seresnet18,10064.41,101.734,1024,224,1.82,2.49,11.78\nspnasnet_100,10009.21,102.296,1024,224,0.35,6.03,4.42\ntf_efficientnetv2_b0,9930.95,103.1,1024,224,0.73,4.77,7.14\nlevit_256,9858.1,103.863,1024,224,1.13,4.23,18.89\ntinynet_a,9720.11,105.337,1024,192,0.35,5.41,6.19\nhardcorenas_f,9714.91,105.393,1024,224,0.35,5.57,8.2\nsemnasnet_100,9623.78,106.393,1024,224,0.32,6.23,3.89\nmnasnet_a1,9623.77,106.393,1024,224,0.32,6.23,3.89\nmobilenetv2_100,9598.91,106.667,1024,224,0.31,6.68,3.5\nhardcorenas_e,9571.87,106.966,1024,224,0.35,5.65,8.07\ndla46_c,9568.4,107.007,1024,224,0.58,4.5,1.3\nefficientnet_lite0,9361.14,109.377,1024,224,0.4,6.74,4.65\nfbnetc_100,9352.03,109.484,1024,224,0.4,6.51,5.57\nresnet18d,9334.83,109.687,1024,224,2.06,3.29,11.71\nese_vovnet19b_slim,9109.47,112.4,1024,224,1.69,3.52,3.17\nregnety_006,9097.63,112.542,1024,224,0.61,4.33,6.06\nregnetz_005,8607.49,118.955,1024,224,0.52,5.86,7.12\nxcit_nano_12_p16_224_dist,8577.2,119.375,1024,224,0.56,4.17,3.05\nxcit_nano_12_p16_224,8554.61,119.689,1024,224,0.56,4.17,3.05\nlevit_256d,8382.88,122.143,1024,224,1.4,4.93,26.21\nregnetx_006,8379.52,122.192,1024,224,0.61,3.98,6.2\nghostnet_130,8278.59,123.681,1024,224,0.24,4.6,7.36\ntf_efficientnet_lite0,8080.51,126.714,1024,224,0.4,6.74,4.65\nefficientnet_b0,7965.17,128.548,1024,224,0.4,6.75,5.29\nmnasnet_140,7779.42,131.618,1024,224,0.6,7.71,7.12\ndeit_tiny_distilled_patch16_224,7467.68,137.113,1024,224,1.27,6.01,5.91\nrexnetr_100,7464.12,137.179,1024,224,0.43,7.72,4.88\ndeit_tiny_patch16_224,7430.15,137.806,1024,224,1.26,5.97,5.72\nresnet14t,7429.68,137.815,1024,224,1.69,5.8,10.08\nvit_tiny_patch16_224,7424.93,137.902,1024,224,1.26,5.97,5.72\nregnetx_008,7394.88,138.463,1024,224,0.81,5.15,7.26\nmobilenetv2_110d,7247.12,141.287,1024,224,0.45,8.71,4.52\nhrnet_w18_small,7232.93,141.561,1024,224,1.61,5.72,13.19\ntf_efficientnet_b0,7016.18,145.938,1024,224,0.4,6.75,5.29\nregnety_008,6938.46,147.571,1024,224,0.81,5.25,6.26\nmobilevitv2_050,6848.87,149.503,1024,256,0.48,8.04,1.37\npit_ti_distilled_224,6811.68,150.317,1024,224,0.71,6.23,5.1\npit_ti_224,6784.24,150.927,1024,224,0.7,6.19,4.85\ngernet_m,6679.85,153.286,1024,224,3.02,5.24,21.14\nefficientnet_b1_pruned,6642.37,154.15,1024,240,0.4,6.21,6.33\nresnet34,6496.42,157.614,1024,224,3.67,3.74,21.8\ngluon_resnet34_v1b,6494.61,157.658,1024,224,3.67,3.74,21.8\ntv_resnet34,6481.01,157.989,1024,224,3.67,3.74,21.8\ntf_efficientnetv2_b1,6476.52,158.098,1024,240,1.21,7.34,8.14\nsemnasnet_140,6454.5,158.637,1024,224,0.6,8.87,6.11\nnf_regnet_b0,6452.24,158.693,1024,256,0.64,5.58,8.76\nese_vovnet19b_dw,6335.13,161.627,1024,224,1.34,8.25,6.54\nmobilenetv2_140,6271.56,163.266,1024,224,0.6,9.57,6.11\nrexnet_100,6226.48,164.447,1024,224,0.41,7.44,4.8\nefficientnet_lite1,6187.91,165.472,1024,240,0.62,10.14,5.42\nefficientnet_es_pruned,6115.4,167.434,1024,224,1.81,8.73,5.44\nefficientnet_es,6115.12,167.443,1024,224,1.81,8.73,5.44\nvisformer_tiny,6103.09,167.772,1024,224,1.27,5.72,10.32\nseresnet34,6058.13,169.019,1024,224,3.67,3.74,21.96\nfbnetv3_b,6018.76,170.124,1024,256,0.55,9.1,8.6\nselecsls42,5953.76,171.98,1024,224,2.94,4.62,30.35\nselecsls42b,5921.2,172.924,1024,224,2.98,4.62,32.46\nresnet26,5895.21,173.69,1024,224,2.36,7.35,16.0\nedgenext_xx_small,5893.72,173.732,1024,288,0.33,4.21,1.33\nlevit_384,5880.4,174.126,1024,224,2.36,6.26,39.13\nresnet34d,5865.98,174.555,1024,224,3.91,4.54,21.82\nlegacy_seresnet34,5850.24,175.025,1024,224,3.67,3.74,21.96\ndla34,5827.3,175.712,1024,224,3.07,5.02,15.74\ntf_efficientnet_es,5781.29,177.112,1024,224,1.81,8.73,5.44\ncs3darknet_focus_m,5721.39,178.967,1024,288,2.51,6.19,9.3\nresnetblur18,5636.65,181.657,1024,224,2.34,3.39,11.69\nrexnetr_130,5590.0,183.173,1024,224,0.68,9.81,7.61\nmobilevit_xxs,5524.87,185.333,1024,256,0.42,8.34,1.27\ntf_efficientnet_lite1,5524.68,185.339,1024,240,0.62,10.14,5.42\ncs3darknet_m,5478.07,186.916,1024,288,2.63,6.69,9.31\nconvnext_atto,5460.54,187.516,1024,288,0.91,6.3,3.7\nxcit_tiny_12_p16_224_dist,5457.72,187.611,1024,224,1.24,6.29,6.72\nxcit_tiny_12_p16_224,5456.63,187.649,1024,224,1.24,6.29,6.72\nskresnet18,5413.1,189.159,1024,224,1.82,3.24,11.96\ndarknet17,5401.37,189.571,1024,256,3.26,7.18,14.3\nmixnet_s,5392.58,189.878,1024,224,0.25,6.25,4.13\nresmlp_12_224,5366.15,190.814,1024,224,3.01,5.5,15.35\nresmlp_12_distilled_224,5364.91,190.857,1024,224,3.01,5.5,15.35\nconvnext_atto_ols,5288.94,193.6,1024,288,0.96,6.8,3.7\nvit_base_patch32_clip_224,5280.68,193.903,1024,224,4.41,5.01,88.22\nvit_base_patch32_224,5280.52,193.908,1024,224,4.41,5.01,88.22\npit_xs_distilled_224,5272.13,194.218,1024,224,1.41,7.76,11.0\npit_xs_224,5271.0,194.259,1024,224,1.4,7.71,10.62\nrepvgg_b0,5252.66,194.939,1024,224,3.41,6.15,15.82\nmixer_b32_224,5221.71,196.094,1024,224,3.24,6.29,60.29\npvt_v2_b0,5210.31,196.521,1024,224,0.57,7.99,3.67\nresnetaa34d,5171.78,197.986,1024,224,4.43,5.07,21.82\nselecsls60,5160.83,198.407,1024,224,3.59,5.52,30.67\nselecsls60b,5119.51,200.008,1024,224,3.63,5.52,32.77\nmobilenetv2_120d,5111.95,200.304,1024,224,0.69,11.97,5.83\nresnet26d,5108.26,200.449,1024,224,2.6,8.15,16.01\ngmixer_12_224,5064.97,202.162,1024,224,2.67,7.26,12.7\ngmlp_ti16_224,5007.93,204.464,1024,224,1.34,7.55,5.87\nmixer_s16_224,4998.69,204.842,1024,224,3.79,5.97,18.53\ntf_mixnet_s,4989.18,205.231,1024,224,0.25,6.25,4.13\nefficientnet_b0_g16_evos,4930.67,207.667,1024,224,1.01,7.42,8.11\nrexnetr_150,4900.22,208.959,1024,224,0.89,11.13,9.78\nfbnetv3_d,4881.14,209.776,1024,256,0.68,11.1,10.31\ndarknet21,4850.41,211.105,1024,256,3.93,7.47,20.86\nnf_resnet26,4816.48,212.591,1024,224,2.41,7.35,16.0\nefficientnet_lite2,4781.65,214.14,1024,260,0.89,12.9,6.09\nconvnext_femto,4749.12,215.607,1024,288,1.3,7.56,5.22\ntf_efficientnetv2_b2,4718.26,217.018,1024,260,1.72,9.84,10.1\nsedarknet21,4656.51,219.895,1024,256,3.93,7.47,20.95\ndla46x_c,4636.77,220.831,1024,224,0.54,5.66,1.07\nconvnext_femto_ols,4618.33,221.714,1024,288,1.35,8.06,5.23\nresnext26ts,4603.25,222.441,1024,256,2.43,10.52,10.3\nefficientformer_l1,4566.14,224.248,1024,224,1.3,5.53,12.29\ndpn48b,4506.78,227.201,1024,224,1.69,8.92,9.13\ncrossvit_tiny_240,4481.69,228.473,1024,240,1.57,9.08,7.01\ndla60x_c,4459.27,229.622,1024,224,0.59,6.01,1.32\neca_resnext26ts,4456.63,229.759,1024,256,2.43,10.52,10.3\nseresnext26ts,4453.99,229.896,1024,256,2.43,10.52,10.39\nlegacy_seresnext26_32x4d,4441.15,230.558,1024,224,2.49,9.39,16.79\ngernet_l,4396.56,232.898,1024,256,4.57,8.0,31.08\nmobilevitv2_075,4393.87,233.041,1024,256,1.05,12.06,2.87\ngcresnext26ts,4384.92,233.516,1024,256,2.43,10.53,10.48\ntf_efficientnet_b1,4370.6,234.282,1024,240,0.71,10.88,7.79\ntf_efficientnet_lite2,4293.9,238.467,1024,260,0.89,12.9,6.09\nrexnet_130,4262.16,240.243,1024,224,0.68,9.71,7.56\nefficientnet_b1,4239.44,241.53,1024,256,0.77,12.22,7.79\nvit_small_patch32_384,4239.1,241.55,1024,384,3.45,8.25,22.92\ncrossvit_9_240,4212.37,243.082,1024,240,1.85,9.52,8.55\ncrossvit_9_dagger_240,4095.03,250.049,1024,240,1.99,9.97,8.78\nnf_ecaresnet26,4091.86,250.24,1024,224,2.41,7.36,16.0\nnf_seresnet26,4088.47,250.449,1024,224,2.41,7.36,17.4\nefficientnet_cc_b0_8e,4076.51,251.183,1024,224,0.42,9.42,24.01\nefficientnet_cc_b0_4e,4073.3,251.382,1024,224,0.41,9.42,13.31\necaresnet50d_pruned,4055.39,252.492,1024,224,2.53,6.43,19.94\nefficientnet_b2_pruned,4030.92,254.025,1024,260,0.73,9.13,8.31\necaresnext50t_32x4d,4018.73,254.796,1024,224,2.7,10.09,15.41\necaresnext26t_32x4d,4017.09,254.9,1024,224,2.7,10.09,15.41\nseresnext26t_32x4d,4014.43,255.069,1024,224,2.7,10.09,16.81\nseresnext26tn_32x4d,4014.36,255.074,1024,224,2.7,10.09,16.81\nrepvgg_a2,3987.84,256.77,1024,224,5.7,6.26,28.21\npoolformer_s12,3982.67,257.103,1024,224,1.82,5.53,11.92\nseresnext26d_32x4d,3979.57,257.303,1024,224,2.73,10.19,16.81\nvit_tiny_r_s16_p8_384,3963.05,258.374,1024,384,1.34,6.49,6.36\nresnet26t,3939.46,259.923,1024,256,3.35,10.52,16.01\nnf_regnet_b1,3911.64,261.772,1024,288,1.02,9.2,10.22\nrexnet_150,3881.93,263.775,1024,224,0.9,11.21,9.73\nnf_regnet_b2,3879.78,263.921,1024,272,1.22,9.27,14.31\nresnetv2_50,3865.49,264.896,1024,224,4.11,11.11,25.55\nregnetx_016,3852.41,265.794,1024,224,1.62,7.93,9.19\ntf_efficientnet_cc_b0_4e,3812.08,268.608,1024,224,0.41,9.42,13.31\ntf_efficientnet_cc_b0_8e,3803.67,269.202,1024,224,0.42,9.42,24.01\nconvnext_pico,3747.49,273.239,1024,288,2.27,10.08,9.05\necaresnetlight,3744.45,273.459,1024,224,4.11,8.42,30.16\ndpn68,3724.59,274.917,1024,224,2.35,10.47,12.61\nedgenext_x_small,3714.71,275.646,1024,288,0.68,7.5,2.34\ngluon_resnet50_v1b,3672.76,278.798,1024,224,4.11,11.11,25.56\nssl_resnet50,3671.85,278.866,1024,224,4.11,11.11,25.56\nefficientnet_em,3671.25,278.913,1024,240,3.04,14.34,6.9\nresnet50,3668.58,279.116,1024,224,4.11,11.11,25.56\nswsl_resnet50,3668.32,279.136,1024,224,4.11,11.11,25.56\ntv_resnet50,3667.14,279.225,1024,224,4.11,11.11,25.56\ndpn68b,3667.07,279.229,1024,224,2.35,10.47,12.61\nrexnetr_200,3659.45,279.811,1024,224,1.59,15.11,16.52\nconvnext_pico_ols,3651.34,280.434,1024,288,2.37,10.74,9.06\nbotnet26t_256,3594.28,284.883,1024,256,3.32,11.98,12.49\nbat_resnext26ts,3569.91,286.828,1024,256,2.53,12.51,10.73\nresnetv2_50t,3547.32,288.657,1024,224,4.32,11.82,25.57\nmixnet_m,3537.26,289.477,1024,224,0.36,8.19,5.01\nregnety_016,3531.88,289.919,1024,224,1.63,8.04,11.2\ntf_efficientnet_em,3529.62,290.106,1024,240,3.04,14.34,6.9\nresnetv2_50d,3525.02,290.482,1024,224,4.35,11.92,25.57\nhalonet26t,3515.15,291.299,1024,256,3.19,11.69,12.48\nresnet32ts,3492.62,293.179,1024,256,4.63,11.58,17.96\nhrnet_w18_small_v2,3482.81,294.001,1024,224,2.62,9.65,15.6\ngluon_resnet50_v1c,3481.59,294.107,1024,224,4.35,11.92,25.58\ndla60,3466.91,295.351,1024,224,4.26,10.16,22.04\nresnet33ts,3460.78,295.875,1024,256,4.76,11.66,19.68\ntf_efficientnet_b2,3402.3,300.962,1024,260,1.02,13.83,9.11\nconvit_tiny,3399.61,301.199,1024,224,1.26,7.94,5.71\nresnet50t,3373.72,303.51,1024,224,4.32,11.82,25.57\ntf_mixnet_m,3366.38,304.167,1024,224,0.36,8.19,5.01\nefficientnet_b3_pruned,3360.1,304.74,1024,300,1.04,11.86,9.86\nseresnet33ts,3354.27,305.27,1024,256,4.76,11.66,19.78\nresnet50d,3351.47,305.527,1024,224,4.35,11.92,25.58\neca_resnet33ts,3350.95,305.574,1024,256,4.76,11.66,19.68\nvit_small_resnet26d_224,3346.77,305.954,1024,224,5.07,11.12,63.61\ncs3darknet_focus_l,3335.18,307.018,1024,288,5.9,10.16,21.15\ngluon_resnet50_v1d,3334.65,307.068,1024,224,4.35,11.92,25.58\nmobilevitv2_100,3324.63,307.994,1024,256,1.84,16.08,4.9\nvovnet39a,3320.12,308.408,1024,224,7.09,6.73,22.6\nlegacy_seresnet50,3312.33,309.135,1024,224,3.88,10.6,28.09\nefficientnet_b0_gn,3307.86,309.554,1024,224,0.42,6.75,5.29\ngcresnet33ts,3307.01,309.633,1024,256,4.76,11.68,19.88\npit_s_distilled_224,3301.25,310.173,1024,224,2.9,11.64,24.04\npit_s_224,3299.97,310.295,1024,224,2.88,11.56,23.46\nmobilevit_xs,3252.28,314.844,1024,256,1.05,16.33,2.32\ndeit_small_distilled_patch16_224,3233.6,316.663,1024,224,4.63,12.02,22.44\nefficientnet_b2a,3223.97,317.608,1024,288,1.12,16.2,9.11\nefficientnet_b2,3223.9,317.615,1024,288,1.12,16.2,9.11\ndeit_small_patch16_224,3218.99,318.1,1024,224,4.61,11.95,22.05\nvit_small_patch16_224,3218.38,318.16,1024,224,4.61,11.95,22.05\ncs3darknet_l,3210.26,318.965,1024,288,6.16,10.83,21.16\nese_vovnet39b,3206.21,319.369,1024,224,7.09,6.74,24.57\neca_vovnet39b,3203.77,319.612,1024,224,7.09,6.74,22.6\nconvnextv2_atto,3196.73,320.315,1024,288,0.91,6.3,3.71\ncoatnet_pico_rw_224,3189.82,321.008,1024,224,2.05,14.62,10.85\nseresnet50,3181.57,321.841,1024,224,4.11,11.13,28.09\npvt_v2_b1,3147.37,325.339,1024,224,2.12,15.39,14.01\ncoat_lite_tiny,3146.41,325.439,1024,224,1.6,11.65,5.72\nres2net50_48w_2s,3127.52,327.404,1024,224,4.18,11.72,25.29\neca_botnext26ts_256,3112.32,329.003,1024,256,2.46,11.6,10.59\necaresnet101d_pruned,3103.16,329.973,1024,224,3.48,7.69,24.88\nefficientnet_b0_g8_gn,3073.2,333.192,1024,224,0.66,6.75,6.56\nssl_resnext50_32x4d,3071.68,333.356,1024,224,4.26,14.4,25.03\ndla60x,3071.64,333.359,1024,224,3.54,13.8,17.35\nswsl_resnext50_32x4d,3070.7,333.464,1024,224,4.26,14.4,25.03\ntv_resnext50_32x4d,3069.81,333.56,1024,224,4.26,14.4,25.03\nresnext50_32x4d,3069.72,333.57,1024,224,4.26,14.4,25.03\ngluon_resnext50_32x4d,3068.47,333.704,1024,224,4.26,14.4,25.03\nvit_small_r26_s32_224,3061.92,334.417,1024,224,3.56,9.85,36.43\nskresnet34,3055.95,335.073,1024,224,3.67,5.13,22.28\ndeit3_small_patch16_224_in21ft1k,3048.82,335.855,1024,224,4.61,11.95,22.06\ndeit3_small_patch16_224,3047.23,336.031,1024,224,4.61,11.95,22.06\neca_halonext26ts,3035.71,337.305,1024,256,2.44,11.46,10.76\nhaloregnetz_b,3032.47,337.665,1024,224,1.97,11.94,11.68\nvit_relpos_base_patch32_plus_rpn_256,3026.45,338.338,1024,256,7.68,8.01,119.42\nvit_relpos_small_patch16_rpn_224,3019.95,339.067,1024,224,4.59,13.05,21.97\nvit_relpos_small_patch16_224,3008.26,340.383,1024,224,4.59,13.05,21.98\nvit_srelpos_small_patch16_224,3000.96,341.213,1024,224,4.59,12.16,21.97\nxcit_nano_12_p16_384_dist,3000.48,341.266,1024,384,1.64,12.15,3.05\ncs3sedarknet_l,2995.41,341.845,1024,288,6.16,10.83,21.91\nresnetaa50d,2993.03,342.116,1024,224,5.39,12.44,25.58\nvgg11,2983.47,85.796,256,224,7.61,7.44,132.86\nselecsls84,2973.16,344.402,1024,224,5.9,7.57,50.95\nresnetrs50,2963.42,345.535,1024,224,4.48,12.14,35.69\nseresnet50t,2957.12,346.271,1024,224,4.32,11.83,28.1\nresnest14d,2954.69,346.556,1024,224,2.76,7.33,10.61\ngluon_resnet50_v1s,2953.65,346.677,1024,224,5.47,13.52,25.68\ncoat_lite_mini,2952.61,346.799,1024,224,2.0,12.25,11.01\necaresnet50d,2945.96,347.583,1024,224,4.35,11.93,25.58\ndensenet121,2933.45,349.064,1024,224,2.87,6.9,7.98\ntv_densenet121,2929.69,349.514,1024,224,2.87,6.9,7.98\nvit_base_patch32_plus_256,2929.65,349.519,1024,256,7.79,7.76,119.48\nrexnet_200,2927.94,349.723,1024,224,1.56,14.91,16.37\nxcit_tiny_24_p16_224_dist,2927.0,349.834,1024,224,2.34,11.82,12.12\nxcit_tiny_24_p16_224,2921.97,350.436,1024,224,2.34,11.82,12.12\ncoatnet_nano_cc_224,2867.38,357.108,1024,224,2.24,15.02,13.76\ngcresnext50ts,2857.34,358.363,1024,256,3.75,15.46,15.67\nlambda_resnet26rpt_256,2853.55,358.839,1024,256,3.16,11.87,10.99\nresnext50d_32x4d,2845.08,359.908,1024,224,4.5,15.2,25.05\nmixnet_l,2828.6,361.996,1024,224,0.58,10.84,7.33\ndensenet121d,2824.08,362.584,1024,224,3.11,7.7,8.0\nefficientnet_lite3,2821.84,362.87,1024,300,1.65,21.85,8.2\ncspresnet50,2793.65,366.534,1024,256,4.54,11.5,21.62\ncoatnet_nano_rw_224,2781.93,368.077,1024,224,2.41,15.41,15.14\nvgg11_bn,2760.38,370.949,1024,224,7.62,7.44,132.87\nvovnet57a,2755.77,371.572,1024,224,8.95,7.52,36.64\nresmlp_24_224,2750.33,372.306,1024,224,5.96,10.91,30.02\nresmlp_24_distilled_224,2740.33,373.665,1024,224,5.96,10.91,30.02\nconvnextv2_femto,2735.91,374.269,1024,288,1.3,7.56,5.23\nflexivit_small,2735.78,374.287,1024,240,5.35,14.18,22.06\ngcresnet50t,2732.04,374.8,1024,256,5.42,14.67,25.9\nlegacy_seresnext50_32x4d,2722.84,376.065,1024,224,4.26,14.42,27.56\nseresnext50_32x4d,2721.47,376.256,1024,224,4.26,14.42,27.56\ngluon_seresnext50_32x4d,2720.58,376.379,1024,224,4.26,14.42,27.56\nvisformer_small,2719.93,376.468,1024,224,4.88,11.43,40.22\ntwins_svt_small,2713.39,377.374,1024,224,2.94,13.75,24.06\nresnetv2_50x1_bit_distilled,2708.81,378.014,1024,224,4.23,11.11,25.55\nres2net50_14w_8s,2692.9,380.248,1024,224,4.21,13.28,25.06\nresnetblur50,2685.97,381.228,1024,224,5.16,12.02,25.56\nvit_base_resnet26d_224,2684.6,381.421,1024,224,6.97,13.16,101.4\ntf_mixnet_l,2680.8,381.958,1024,224,0.58,10.84,7.33\nseresnetaa50d,2658.93,385.106,1024,224,5.4,12.46,28.11\ndla60_res2net,2656.16,385.506,1024,224,4.15,12.34,20.85\ncspresnet50d,2655.05,385.668,1024,256,4.86,12.55,21.64\ncoatnext_nano_rw_224,2655.0,385.674,1024,224,2.47,12.8,14.7\nese_vovnet57b,2654.33,385.773,1024,224,8.95,7.52,38.61\ntf_efficientnetv2_b3,2654.14,385.8,1024,300,3.04,15.74,14.36\ncspresnet50w,2641.68,387.621,1024,256,5.04,12.19,28.12\nres2net50_26w_4s,2629.64,389.395,1024,224,4.28,12.61,25.7\nregnetz_b16,2626.71,389.828,1024,288,2.39,16.43,9.72\nconvnext_nano,2611.78,392.059,1024,288,4.06,13.84,15.59\nefficientnetv2_rw_t,2601.49,393.609,1024,288,3.19,16.42,13.65\nfbnetv3_g,2595.29,394.549,1024,288,1.77,21.09,16.62\ngmixer_24_224,2595.15,394.571,1024,224,5.28,14.45,24.72\nmobilevit_s,2586.09,395.952,1024,256,2.03,19.94,5.58\ncoatnet_rmlp_nano_rw_224,2569.7,398.478,1024,224,2.62,20.34,15.15\ngcvit_xxtiny,2561.41,399.768,1024,224,2.14,15.36,12.0\ntf_efficientnet_lite3,2530.94,404.582,1024,300,1.65,21.85,8.2\nefficientnet_cc_b1_8e,2530.65,404.628,1024,240,0.75,15.44,39.72\ndensenetblur121d,2522.66,405.908,1024,224,3.11,7.9,8.0\nresnetblur50d,2509.45,408.045,1024,224,5.4,12.82,25.58\nnf_ecaresnet50,2490.39,411.168,1024,224,4.21,11.13,25.56\ninception_v3,2485.21,412.025,1024,299,5.73,8.97,23.83\nnf_seresnet50,2482.66,412.449,1024,224,4.21,11.13,28.09\ntf_inception_v3,2481.38,412.658,1024,299,5.73,8.97,23.83\ngc_efficientnetv2_rw_t,2480.59,412.793,1024,288,3.2,16.45,13.68\nadv_inception_v3,2479.41,412.983,1024,299,5.73,8.97,23.83\nrepvgg_b1g4,2473.34,414.003,1024,224,8.15,10.64,39.97\nmobilevitv2_125,2472.28,414.18,1024,256,2.86,20.1,7.48\ngluon_inception_v3,2468.42,414.827,1024,299,5.73,8.97,23.83\nnf_regnet_b3,2461.52,415.991,1024,320,2.05,14.61,18.59\nxcit_small_12_p16_224_dist,2446.89,418.478,1024,224,4.82,12.58,26.25\nxcit_small_12_p16_224,2446.42,418.558,1024,224,4.82,12.58,26.25\ncspresnext50,2438.96,419.836,1024,256,4.05,15.86,20.57\nconvnext_nano_ols,2435.0,420.521,1024,288,4.38,15.5,15.65\nregnetx_032,2429.42,421.489,1024,224,3.2,11.37,15.3\ndensenet169,2426.29,422.031,1024,224,3.4,7.3,14.15\nsehalonet33ts,2419.4,423.234,1024,256,3.55,14.7,13.69\ntf_efficientnet_cc_b1_8e,2406.19,425.557,1024,240,0.75,15.44,39.72\nsemobilevit_s,2402.02,426.294,1024,256,2.03,19.95,5.74\nresnetv2_101,2330.6,439.36,1024,224,7.83,16.23,44.54\ntwins_pcpvt_small,2312.72,442.754,1024,224,3.83,18.08,24.11\nxcit_nano_12_p8_224_dist,2295.5,446.077,1024,224,2.16,15.71,3.05\nxcit_nano_12_p8_224,2292.87,446.587,1024,224,2.16,15.71,3.05\ngmlp_s16_224,2290.73,447.007,1024,224,4.42,15.1,19.42\ncs3darknet_focus_x,2287.2,447.697,1024,256,8.03,10.69,35.02\nvit_base_r26_s32_224,2275.25,450.047,1024,224,6.81,12.36,101.38\ngluon_resnet101_v1b,2260.37,453.01,1024,224,7.83,16.23,44.55\ntv_resnet101,2258.59,453.368,1024,224,7.83,16.23,44.55\nresnet101,2258.28,453.43,1024,224,7.83,16.23,44.55\nskresnet50,2234.62,458.23,1024,224,4.11,12.5,25.8\necaresnet26t,2232.29,458.709,1024,320,5.24,16.44,16.01\nedgenext_small,2226.69,459.86,1024,320,1.97,14.16,5.59\ndla102,2219.96,461.255,1024,224,7.19,14.18,33.27\nres2next50,2214.71,462.347,1024,224,4.2,13.71,24.67\ndla60_res2next,2210.67,463.194,1024,224,3.49,13.17,17.03\nresnetv2_101d,2203.82,464.633,1024,224,8.07,17.04,44.56\ngluon_resnet101_v1c,2194.65,466.578,1024,224,8.08,17.04,44.57\nresnest26d,2170.04,471.869,1024,224,3.64,9.97,17.07\nvgg13,2149.71,476.331,1024,224,11.31,12.25,133.05\ngluon_resnet101_v1d,2137.49,479.053,1024,224,8.08,17.04,44.57\nskresnet50d,2115.22,484.098,1024,224,4.36,13.31,25.82\nconvnextv2_pico,2108.5,485.64,1024,288,2.27,10.08,9.07\nvit_base_resnet50d_224,2101.17,487.333,1024,224,8.73,16.92,110.97\ncoatnet_0_rw_224,2082.49,491.706,1024,224,4.43,18.73,27.44\ncrossvit_small_240,2081.5,491.94,1024,240,5.63,18.17,26.86\ndeit3_medium_patch16_224_in21ft1k,2076.53,493.118,1024,224,8.0,15.93,38.85\ndeit3_medium_patch16_224,2072.34,494.116,1024,224,8.0,15.93,38.85\nmobilevitv2_150,2071.36,494.349,1024,256,4.09,24.11,10.59\nmobilevitv2_150_in22ft1k,2070.3,494.603,1024,256,4.09,24.11,10.59\nsebotnet33ts_256,2067.91,247.581,512,256,3.89,17.46,13.7\nwide_resnet50_2,2057.08,497.78,1024,224,11.43,14.4,68.88\nvit_relpos_medium_patch16_rpn_224,2044.85,500.757,1024,224,7.97,17.02,38.73\nefficientformer_l3,2041.79,501.507,1024,224,3.93,12.01,31.41\npoolformer_s24,2040.35,501.863,1024,224,3.41,10.68,21.39\nvit_relpos_medium_patch16_224,2037.47,502.572,1024,224,7.97,17.02,38.75\ncspdarknet53,2035.94,502.949,1024,256,6.57,16.81,27.64\nresnet51q,2034.41,503.329,1024,288,8.07,20.94,35.7\nvit_srelpos_medium_patch16_224,2033.15,503.638,1024,224,7.96,16.21,38.74\nmaxvit_rmlp_pico_rw_256,2008.78,509.748,1024,256,1.85,24.86,7.52\nvit_relpos_medium_patch16_cls_224,2007.24,510.141,1024,224,8.03,18.24,38.76\ndla102x,2006.55,510.315,1024,224,5.89,19.42,26.31\nlegacy_seresnet101,2003.12,511.188,1024,224,7.61,15.74,49.33\nswin_tiny_patch4_window7_224,1995.14,513.235,1024,224,4.51,17.06,28.29\nrepvgg_b1,1985.42,515.747,1024,224,13.16,10.64,57.42\nresnetaa101d,1982.98,516.381,1024,224,9.12,17.56,44.57\ncoatnet_rmlp_0_rw_224,1981.75,516.703,1024,224,4.72,24.89,27.45\ntf_efficientnet_b3,1975.92,518.226,1024,300,1.87,23.83,12.23\ngcvit_xtiny,1969.68,519.869,1024,224,2.93,20.26,19.98\nhrnet_w18,1967.17,520.531,1024,224,4.32,16.31,21.3\ngluon_resnet101_v1s,1965.68,520.926,1024,224,9.19,18.64,44.67\nmaxvit_pico_rw_256,1965.38,521.006,1024,256,1.83,22.3,7.46\nresnetaa50,1958.15,522.93,1024,288,8.52,19.24,25.56\nseresnet101,1954.63,523.871,1024,224,7.84,16.27,49.33\nefficientnet_b3,1949.54,525.239,1024,320,2.01,26.52,12.23\nefficientnet_b3a,1949.11,525.356,1024,320,2.01,26.52,12.23\nedgenext_small_rw,1932.68,529.816,1024,320,2.46,14.85,7.83\nregnetx_040,1932.62,529.839,1024,224,3.99,12.2,22.12\ncs3sedarknet_xdw,1925.4,531.825,1024,256,5.97,17.18,21.6\ncoatnet_bn_0_rw_224,1920.71,533.123,1024,224,4.67,22.04,27.44\nxcit_tiny_12_p16_384_dist,1911.65,535.652,1024,384,3.64,18.26,6.72\nssl_resnext101_32x4d,1910.73,535.909,1024,224,8.01,21.23,44.18\nswsl_resnext101_32x4d,1910.43,535.993,1024,224,8.01,21.23,44.18\nresnext101_32x4d,1909.99,536.115,1024,224,8.01,21.23,44.18\ngluon_resnext101_32x4d,1909.34,536.298,1024,224,8.01,21.23,44.18\ndarknet53,1903.77,537.866,1024,288,11.78,15.68,41.61\ndarknetaa53,1898.12,539.468,1024,288,10.08,15.68,36.02\ncrossvit_15_240,1892.46,541.083,1024,240,5.81,19.77,27.53\nhalonet50ts,1881.53,544.226,1024,256,5.3,19.2,22.73\nvgg13_bn,1879.72,544.749,1024,224,11.33,12.25,133.05\nmixnet_xl,1872.46,546.86,1024,224,0.93,14.57,11.9\nres2net50_26w_6s,1870.88,547.321,1024,224,6.33,15.28,37.05\necaresnet101d,1869.88,547.616,1024,224,8.08,17.07,44.57\ndensenet201,1869.57,547.706,1024,224,4.34,7.85,20.01\nnf_resnet101,1858.48,550.976,1024,224,8.01,16.23,44.55\ncoatnet_0_224,1857.28,275.661,512,224,4.58,24.01,25.04\npvt_v2_b2,1854.85,552.053,1024,224,4.05,27.53,25.36\ncrossvit_15_dagger_240,1850.69,553.295,1024,240,6.13,20.43,28.21\nresmlp_36_224,1846.41,554.574,1024,224,8.91,16.33,44.69\nresmlp_36_distilled_224,1845.04,554.99,1024,224,8.91,16.33,44.69\nresnet61q,1841.84,555.954,1024,288,9.87,21.52,36.85\nswin_s3_tiny_224,1817.5,563.398,1024,224,4.64,19.13,28.33\ncait_xxs24_224,1796.55,569.968,1024,224,2.53,20.29,11.96\ncs3darknet_x,1789.33,572.268,1024,288,10.6,14.36,35.05\nvit_medium_patch16_gap_240,1785.54,573.481,1024,240,9.22,18.81,44.4\nnf_resnet50,1784.84,573.708,1024,288,6.88,18.37,25.56\nresnet50_gn,1764.31,580.385,1024,224,4.14,11.11,25.56\nmixer_b16_224_miil,1761.45,581.327,1024,224,12.62,14.53,59.88\nmixer_b16_224,1759.76,581.885,1024,224,12.62,14.53,59.88\nresnetblur101d,1757.96,582.482,1024,224,9.12,17.94,44.57\neca_nfnet_l0,1726.58,593.068,1024,288,7.12,17.29,24.14\nnfnet_l0,1721.83,594.705,1024,288,7.13,17.29,35.07\nvit_large_patch32_224,1717.59,596.169,1024,224,15.41,13.32,327.9\nvgg16,1717.44,596.224,1024,224,15.47,13.56,138.36\nregnetz_c16,1710.89,598.505,1024,320,3.92,25.88,13.46\npvt_v2_b2_li,1709.89,598.855,1024,224,3.91,27.6,22.55\nresnest50d_1s4x24d,1705.52,600.391,1024,224,4.43,13.57,25.68\ncoat_lite_small,1704.55,600.733,1024,224,3.96,22.09,19.84\nresnetv2_50d_frn,1697.1,603.368,1024,224,4.33,11.92,25.59\ncs3sedarknet_x,1689.8,605.975,1024,288,10.6,14.37,35.4\nseresnext101_32x4d,1687.65,606.747,1024,224,8.02,21.26,48.96\ngluon_seresnext101_32x4d,1687.1,606.945,1024,224,8.02,21.26,48.96\nlegacy_seresnext101_32x4d,1684.69,607.813,1024,224,8.02,21.26,48.96\nregnetv_040,1682.92,608.454,1024,288,6.6,20.3,20.64\nmobilevitv2_175,1677.66,457.769,768,256,5.54,28.13,14.25\nregnety_040,1677.03,610.59,1024,288,6.61,20.3,20.65\nmobilevitv2_175_in22ft1k,1677.0,457.949,768,256,5.54,28.13,14.25\nconvnext_tiny_hnf,1676.16,610.908,1024,288,7.39,22.21,28.59\nres2net101_26w_4s,1675.37,611.195,1024,224,8.1,18.45,45.21\nvit_tiny_patch16_384,1665.76,614.72,1024,384,4.7,25.39,5.79\nsequencer2d_s,1661.32,616.362,1024,224,4.96,11.31,27.65\nese_vovnet39b_evos,1661.21,616.404,1024,224,7.07,6.74,24.58\nvit_base_patch32_384,1649.27,620.868,1024,384,13.06,16.5,88.3\nvit_base_patch32_clip_384,1648.64,621.105,1024,384,13.06,16.5,88.3\nmixer_l32_224,1645.23,622.393,1024,224,11.27,19.86,206.94\nconvnext_tiny,1642.14,623.562,1024,288,7.39,22.21,28.59\nbotnet50ts_256,1639.64,312.25,512,256,5.54,22.23,22.74\nswinv2_cr_tiny_224,1630.02,628.199,1024,224,4.66,28.45,28.33\nresnetv2_50d_evob,1627.44,629.196,1024,224,4.33,11.92,25.59\ntwins_pcpvt_base,1615.12,633.996,1024,224,6.68,25.25,43.83\nresnetv2_152,1614.43,634.268,1024,224,11.55,22.56,60.19\nhrnet_w32,1605.06,637.96,1024,224,8.97,22.02,41.23\nswinv2_cr_tiny_ns_224,1600.43,639.811,1024,224,4.66,28.45,28.33\nxception41p,1598.79,480.351,768,299,9.25,39.86,26.91\ntv_resnet152,1582.54,647.049,1024,224,11.56,22.56,60.19\ngluon_resnet152_v1b,1581.57,647.444,1024,224,11.56,22.56,60.19\nresnet152,1581.02,647.671,1024,224,11.56,22.56,60.19\nxception,1579.88,648.138,1024,299,8.4,35.83,22.86\nhalo2botnet50ts_256,1572.75,651.076,1024,256,5.02,21.78,22.64\nres2net50_26w_8s,1568.85,652.695,1024,224,8.37,17.95,48.4\nvit_medium_patch16_gap_256,1564.22,654.626,1024,256,10.59,22.15,38.86\nresnetv2_152d,1557.03,657.648,1024,224,11.8,23.36,60.2\nefficientnet_el_pruned,1555.14,658.449,1024,300,8.0,30.7,10.59\nmaxvit_rmlp_nano_rw_256,1551.85,659.845,1024,256,4.47,31.92,15.5\nregnetx_064,1550.52,660.413,1024,224,6.49,16.37,26.21\nefficientnet_el,1549.97,660.646,1024,300,8.0,30.7,10.59\ngluon_resnet152_v1c,1548.96,661.078,1024,224,11.8,23.36,60.21\nnf_ecaresnet101,1546.58,662.091,1024,224,8.01,16.27,44.55\nnf_seresnet101,1539.38,665.191,1024,224,8.02,16.27,49.33\nmvitv2_tiny,1537.54,665.985,1024,224,4.7,21.16,24.17\nnfnet_f0,1525.01,671.456,1024,256,12.62,18.05,71.49\nvgg16_bn,1523.86,671.963,1024,224,15.5,13.56,138.37\ncs3edgenet_x,1521.21,673.136,1024,288,14.59,16.36,47.82\ngluon_resnet152_v1d,1520.11,673.621,1024,224,11.8,23.36,60.21\nmaxvit_nano_rw_256,1517.43,674.812,1024,256,4.46,30.28,15.45\ntf_efficientnet_el,1506.16,679.862,1024,300,8.0,30.7,10.59\nconvnextv2_nano,1500.71,511.746,768,288,4.06,13.84,15.62\nresnest50d,1492.63,686.022,1024,224,5.4,14.36,27.48\nese_vovnet99b,1489.17,687.617,1024,224,16.51,11.27,63.2\ndla169,1471.11,696.059,1024,224,11.6,20.2,53.39\nregnety_032,1467.85,697.604,1024,288,5.29,18.61,19.44\nskresnext50_32x4d,1463.28,699.785,1024,224,4.5,17.18,27.48\nxcit_tiny_12_p8_224_dist,1458.7,701.981,1024,224,4.81,23.6,6.71\nxcit_tiny_12_p8_224,1458.23,702.211,1024,224,4.81,23.6,6.71\nconvit_small,1457.54,702.541,1024,224,5.76,17.87,27.78\nmobilevitv2_200_in22ft1k,1456.59,527.247,768,256,7.22,32.15,18.45\nmobilevitv2_200,1456.02,527.451,768,256,7.22,32.15,18.45\necaresnet50t,1438.32,711.929,1024,320,8.82,24.13,25.57\ngluon_resnet152_v1s,1432.22,714.961,1024,224,12.92,24.96,60.32\nnest_tiny,1415.33,542.618,768,224,5.83,25.48,17.06\nregnety_040s_gn,1412.65,724.867,1024,224,4.03,12.29,20.65\nvgg19,1393.71,183.67,256,224,19.63,14.86,143.67\njx_nest_tiny,1389.62,552.657,768,224,5.83,25.48,17.06\nlegacy_seresnet152,1383.83,739.96,1024,224,11.33,22.08,66.82\ndensenet161,1376.52,743.891,1024,224,7.79,11.06,28.68\npoolformer_s36,1370.67,747.069,1024,224,5.0,15.82,30.86\nvit_small_resnet50d_s16_224,1367.59,748.748,1024,224,13.48,24.82,57.53\ntwins_svt_base,1362.65,751.463,1024,224,8.59,26.33,56.07\nseresnet152,1361.7,751.99,1024,224,11.57,22.61,66.82\nxception41,1356.44,566.173,768,299,9.28,39.86,26.97\nmaxvit_tiny_rw_224,1350.45,758.254,1024,224,5.11,33.11,29.06\ncrossvit_18_240,1348.85,759.154,1024,240,9.05,26.26,43.27\nmaxxvit_rmlp_nano_rw_256,1347.73,759.767,1024,256,4.37,26.05,16.78\nefficientnet_lite4,1343.74,571.528,768,380,4.04,45.66,13.01\ngcvit_tiny,1339.65,764.364,1024,224,4.79,29.82,28.22\npvt_v2_b3,1325.92,772.282,1024,224,6.92,37.7,45.24\ncrossvit_18_dagger_240,1313.78,779.419,1024,240,9.5,27.03,44.27\nvolo_d1_224,1312.37,780.255,1024,224,6.94,24.43,26.63\nxcit_small_24_p16_224_dist,1307.3,783.278,1024,224,9.1,23.64,47.67\ntresnet_m,1305.71,784.234,1024,224,5.74,7.31,31.39\ninception_v4,1305.41,784.412,1024,299,12.28,15.09,42.68\nrepvgg_b2,1305.22,784.529,1024,224,20.45,12.9,89.02\nxcit_small_24_p16_224,1303.71,785.433,1024,224,9.1,23.64,47.67\nsequencer2d_m,1295.72,790.281,1024,224,6.55,14.26,38.31\nedgenext_base,1283.77,797.633,1024,320,6.01,24.32,18.51\nhrnet_w30,1280.53,799.653,1024,224,8.15,21.21,37.71\ndm_nfnet_f0,1275.46,802.834,1024,256,12.62,18.05,71.49\ncoatnet_rmlp_1_rw_224,1268.37,807.322,1024,224,7.85,35.47,41.69\nmaxxvitv2_nano_rw_256,1259.7,812.877,1024,256,6.26,23.05,23.7\nefficientnetv2_s,1254.49,816.255,1024,384,8.44,35.77,21.46\nvgg19_bn,1246.52,205.36,256,224,19.66,14.86,143.68\nnf_regnet_b4,1235.79,828.604,1024,384,4.7,28.61,30.21\nswin_small_patch4_window7_224,1235.74,828.641,1024,224,8.77,27.47,49.61\ntf_efficientnet_lite4,1232.22,623.25,768,380,4.04,45.66,13.01\nregnetz_d32,1223.51,836.919,1024,320,9.33,37.08,27.58\nmixnet_xxl,1219.27,629.871,768,224,2.04,23.43,23.96\ntf_efficientnetv2_s,1219.16,839.906,1024,384,8.44,35.77,21.46\ndeit_base_patch16_224,1213.08,844.121,1024,224,17.58,23.9,86.57\ndeit_base_distilled_patch16_224,1212.98,844.19,1024,224,17.68,24.05,87.34\nvit_base_patch16_clip_224,1211.82,844.996,1024,224,17.58,23.9,86.57\nvit_base_patch16_224_miil,1211.26,845.389,1024,224,17.59,23.91,94.4\ndpn92,1210.45,845.948,1024,224,6.54,18.21,37.67\nvit_base_patch16_224,1210.28,846.074,1024,224,17.58,23.9,86.57\ncoatnet_rmlp_1_rw2_224,1208.65,847.215,1024,224,8.11,40.13,41.72\ncait_xxs36_224,1205.51,849.419,1024,224,3.77,30.34,17.3\nmaxvit_tiny_tf_224,1200.3,639.828,768,224,5.6,35.78,30.92\nswinv2_tiny_window8_256,1200.06,853.274,1024,256,5.96,24.57,28.35\nefficientnetv2_rw_s,1199.87,853.413,1024,384,8.72,38.03,23.94\ndla102x2,1198.52,854.374,1024,224,9.34,29.91,41.28\nregnetx_160,1195.08,856.833,1024,224,15.99,25.52,54.28\ndpn98,1183.92,864.908,1024,224,11.73,25.2,61.57\nvit_base_patch16_rpn_224,1180.39,867.498,1024,224,17.49,23.75,86.54\ntwins_pcpvt_large,1168.64,876.22,1024,224,9.84,35.82,60.99\ndeit3_base_patch16_224,1164.77,879.134,1024,224,17.58,23.9,86.59\ndeit3_base_patch16_224_in21ft1k,1164.5,879.334,1024,224,17.58,23.9,86.59\nregnetz_d8,1163.64,879.982,1024,320,6.19,37.08,23.37\nswsl_resnext101_32x8d,1158.15,884.156,1024,224,16.48,31.21,88.79\nresnext101_32x8d,1158.05,884.232,1024,224,16.48,31.21,88.79\nssl_resnext101_32x8d,1158.02,884.255,1024,224,16.48,31.21,88.79\nwide_resnet101_2,1157.66,884.531,1024,224,22.8,21.23,126.89\nig_resnext101_32x8d,1157.3,884.8,1024,224,16.48,31.21,88.79\ncoatnet_1_rw_224,1155.72,886.014,1024,224,8.04,34.6,41.72\nvit_base_patch16_gap_224,1154.73,886.777,1024,224,17.49,25.59,86.57\nvit_base_patch32_clip_448,1154.21,887.173,1024,448,17.93,23.9,88.34\nresnet200,1149.71,890.646,1024,224,15.07,32.19,64.67\nmvitv2_small,1146.92,892.812,1024,224,7.0,28.08,34.87\nxception65p,1145.07,670.686,768,299,13.91,52.48,39.82\ncs3se_edgenet_x,1143.17,895.738,1024,320,18.01,20.21,50.72\nvit_relpos_base_patch16_rpn_224,1143.15,895.76,1024,224,17.51,24.97,86.41\nvit_relpos_base_patch16_224,1141.31,897.204,1024,224,17.51,24.97,86.43\ntnt_s_patch16_224,1135.32,901.935,1024,224,5.24,24.37,23.76\nresnetrs101,1134.67,902.454,1024,288,13.56,28.53,63.62\nvit_relpos_base_patch16_clsgap_224,1128.94,907.03,1024,224,17.6,25.12,86.43\nvit_relpos_base_patch16_cls_224,1126.78,908.771,1024,224,17.6,25.12,86.43\ninception_resnet_v2,1126.73,908.809,1024,299,13.18,25.06,55.84\nens_adv_inception_resnet_v2,1125.41,909.877,1024,299,13.18,25.06,55.84\nbeit_base_patch16_224,1112.26,920.631,1024,224,17.58,23.9,86.53\ncoat_tiny,1108.72,923.572,1024,224,4.35,27.2,5.5\nbeitv2_base_patch16_224,1108.55,923.711,1024,224,17.58,23.9,86.53\nmvitv2_small_cls,1101.66,929.491,1024,224,7.04,28.17,34.87\nresnetv2_50d_gn,1092.35,937.413,1024,288,7.24,19.7,25.57\npit_b_distilled_224,1078.48,474.731,512,224,12.5,33.07,74.79\npit_b_224,1075.34,476.117,512,224,12.42,32.94,73.76\nhrnet_w40,1059.78,966.217,1024,224,12.75,25.29,57.56\ncoatnet_1_224,1045.17,489.859,512,224,8.7,39.0,42.23\nresnet101d,1039.88,984.712,1024,320,16.48,34.77,44.57\nflexivit_base,1037.21,987.248,1024,240,20.29,28.36,86.59\ngluon_resnext101_64x4d,1034.86,989.491,1024,224,15.52,31.21,83.46\nvit_small_patch16_36x1_224,1033.13,991.146,1024,224,13.71,35.69,64.67\nvit_large_r50_s32_224,1030.67,993.517,1024,224,19.58,24.41,328.99\nmaxvit_rmlp_tiny_rw_256,1029.25,746.162,768,256,6.77,46.92,29.15\nxcit_tiny_24_p16_384_dist,1027.64,996.444,1024,384,6.87,34.29,12.12\nefficientnet_b4,1014.08,504.879,512,384,4.51,50.04,19.34\nmaxvit_tiny_rw_256,1008.0,1015.861,1024,256,6.74,44.35,29.07\nvit_small_patch16_18x2_224,1006.7,1017.169,1024,224,13.71,35.69,64.67\nswinv2_cr_small_224,1005.28,1018.603,1024,224,9.07,50.27,49.7\nregnetx_080,1004.51,1019.384,1024,224,8.02,14.06,39.57\nrepvgg_b3,994.23,1029.925,1024,224,29.16,15.1,123.09\nswinv2_cr_small_ns_224,993.75,1030.424,1024,224,9.08,50.27,49.7\nrepvgg_b2g4,988.97,1035.405,1024,224,12.63,12.9,61.76\nconvnext_small,988.3,1036.113,1024,288,14.39,35.65,50.22\ngluon_xception65,987.82,777.458,768,299,13.96,52.48,39.92\nvit_small_r26_s32_384,982.68,1042.031,1024,384,10.43,29.85,36.47\nxception65,978.83,784.597,768,299,13.96,52.48,39.92\nregnetz_040,975.77,787.056,768,320,6.35,37.78,27.12\nregnetz_040h,971.51,790.512,768,320,6.43,37.94,28.94\ngluon_seresnext101_64x4d,965.3,1060.794,1024,224,15.53,31.25,88.23\nmaxvit_tiny_pm_256,964.03,1062.189,1024,256,6.61,47.9,30.09\nefficientformer_l7,962.55,1063.825,1024,224,10.17,24.45,82.23\ntwins_svt_large,962.19,1064.229,1024,224,15.15,35.1,99.27\ntf_efficientnet_b4,957.62,534.646,512,380,4.49,49.49,19.34\npvt_v2_b4,957.38,1069.569,1024,224,10.14,53.74,62.56\npoolformer_m36,954.91,1072.334,1024,224,8.8,22.02,56.17\ncait_s24_224,954.44,1072.866,1024,224,9.35,40.58,46.92\nregnetz_b16_evos,950.47,808.013,768,288,2.36,16.43,9.74\nresnest50d_4s2x40d,938.07,1091.586,1024,224,4.4,17.94,30.42\nhrnet_w48,936.07,1093.917,1024,224,17.34,28.56,77.47\ngmlp_b16_224,930.95,1099.935,1024,224,15.78,30.21,73.08\nconvnextv2_tiny,930.82,550.041,512,288,7.39,22.21,28.64\nconvnextv2_small,928.68,1102.629,1024,224,8.71,21.56,50.32\nmaxxvit_rmlp_tiny_rw_256,918.72,1114.583,1024,256,6.66,39.76,29.64\nmobilevitv2_150_384_in22ft1k,915.49,419.435,384,384,9.2,54.25,10.59\npvt_v2_b5,909.79,1125.516,1024,224,11.76,50.92,81.96\nnest_small,903.21,850.284,768,224,10.35,40.04,38.35\nswin_s3_small_224,899.98,853.339,768,224,9.43,37.84,49.74\nxcit_medium_24_p16_224_dist,898.61,1139.525,1024,224,16.13,31.71,84.4\nxcit_medium_24_p16_224,898.6,1139.542,1024,224,16.13,31.71,84.4\njx_nest_small,892.03,860.939,768,224,10.35,40.04,38.35\ncoat_mini,880.8,1162.569,1024,224,6.82,33.68,10.34\nswin_base_patch4_window7_224,875.38,1169.764,1024,224,15.47,36.63,87.77\ndpn131,865.2,1183.527,1024,224,16.09,32.97,79.25\nresnetv2_50d_evos,854.82,1197.895,1024,288,7.15,19.7,25.59\nxcit_small_12_p16_384_dist,853.54,1199.694,1024,384,14.14,36.51,26.25\nsequencer2d_l,839.78,1219.347,1024,224,9.74,22.12,54.3\ncrossvit_base_240,839.43,914.892,768,240,21.22,36.33,105.03\nhrnet_w44,821.37,1246.671,1024,224,14.94,26.92,67.06\neca_nfnet_l1,818.87,1250.489,1024,320,14.92,34.42,41.41\nvit_base_r50_s16_224,817.55,1252.502,1024,224,21.67,35.31,114.69\nmaxvit_rmlp_small_rw_224,816.34,1254.368,1024,224,10.75,49.3,64.9\ngcvit_small,815.24,1256.055,1024,224,8.57,41.61,51.09\nregnety_080,811.28,1262.191,1024,288,13.22,29.69,39.18\ndensenet264,804.85,1272.268,1024,224,12.95,12.8,72.69\nmvitv2_base,804.14,1273.395,1024,224,10.16,40.5,51.47\nrepvgg_b3g4,802.85,1275.443,1024,224,17.89,15.1,83.83\nvit_base_patch16_plus_240,782.25,1309.022,1024,240,27.41,33.08,117.56\nswinv2_tiny_window16_256,781.61,655.045,512,256,6.68,39.02,28.35\nmaxvit_small_tf_224,777.04,658.899,512,224,11.66,53.17,68.93\nxcit_tiny_24_p8_224,771.1,1327.958,1024,224,9.21,45.39,12.11\nxcit_tiny_24_p8_224_dist,770.21,1329.496,1024,224,9.21,45.39,12.11\ncoatnet_2_rw_224,763.52,670.562,512,224,15.09,49.22,73.87\nvit_relpos_base_patch16_plus_240,763.4,1341.361,1024,240,27.3,34.33,117.38\nefficientnet_b3_gn,763.0,671.023,512,320,2.14,28.83,11.73\ncoatnet_rmlp_2_rw_224,759.73,673.906,512,224,15.18,54.78,73.88\nvit_small_patch16_384,753.82,1018.79,768,384,15.52,50.78,22.2\nhrnet_w64,750.36,1364.663,1024,224,28.97,35.09,128.06\nxception71,749.7,1024.396,768,299,18.09,69.92,42.34\nresnet152d,742.37,1379.356,1024,320,24.08,47.67,60.21\nswinv2_small_window8_256,741.95,1380.134,1024,256,11.58,40.14,49.73\nmobilevitv2_175_384_in22ft1k,739.09,519.544,384,384,12.47,63.29,14.25\necaresnet200d,736.17,1390.959,1024,256,20.0,43.15,64.69\nseresnet200d,733.28,1396.444,1024,256,20.01,43.15,71.86\nswin_s3_base_224,733.27,1396.459,1024,224,13.69,48.26,71.13\nconvit_base,731.09,1400.636,1024,224,17.52,31.77,86.54\nresnest101e,726.65,1409.184,1024,256,13.38,28.66,48.28\ndeit3_small_patch16_384,726.49,1057.125,768,384,15.52,50.78,22.21\ndeit3_small_patch16_384_in21ft1k,726.32,1057.368,768,384,15.52,50.78,22.21\nvolo_d2_224,722.61,1417.079,1024,224,14.34,41.34,58.68\ntnt_b_patch16_224,721.24,1419.762,1024,224,14.09,39.01,65.41\nxcit_nano_12_p8_384_dist,720.41,1421.4,1024,384,6.34,46.08,3.05\nswinv2_cr_base_224,719.23,1423.721,1024,224,15.86,59.66,87.88\npoolformer_m48,719.07,1424.046,1024,224,11.59,29.17,73.47\ncoatnet_2_224,715.36,715.711,512,224,16.5,52.67,74.68\nswinv2_cr_base_ns_224,712.96,1436.239,1024,224,15.86,59.66,87.88\ndpn107,691.0,1481.897,1024,224,18.38,33.46,86.92\nconvnext_base,687.14,1490.219,1024,288,25.43,47.53,88.59\nresnetv2_50x1_bitm,684.31,374.087,256,448,16.62,44.46,25.55\nefficientnet_b3_g8_gn,664.63,770.341,512,320,3.2,28.83,14.25\nregnety_064,657.71,1556.911,1024,288,10.56,27.11,30.58\nregnetv_064,652.6,1569.096,1024,288,10.55,27.11,30.58\nxcit_small_12_p8_224,651.3,1572.214,1024,224,18.69,47.21,26.21\nxcit_small_12_p8_224_dist,651.08,1572.755,1024,224,18.69,47.21,26.21\nresnetrs152,649.95,1575.501,1024,320,24.34,48.14,86.62\nmobilevitv2_200_384_in22ft1k,647.42,395.4,256,384,16.24,72.34,18.45\nseresnet152d,645.69,1585.88,1024,320,24.09,47.72,66.84\ntresnet_l,644.38,1589.105,1024,224,10.88,11.9,55.99\ntresnet_v2_l,642.3,1594.246,1024,224,8.81,16.34,46.17\nnest_base,640.98,798.76,512,224,17.96,53.39,67.72\nregnetx_120,640.37,1599.07,1024,224,12.13,21.37,46.11\nseresnext101_32x8d,639.53,1601.159,1024,288,27.24,51.63,93.57\nregnetz_e8,639.43,1601.423,1024,320,15.46,63.94,57.7\nese_vovnet99b_iabn,636.1,1609.798,1024,224,16.49,11.27,63.2\njx_nest_base,634.61,806.787,512,224,17.96,53.39,67.72\nregnety_120,625.75,1636.422,1024,224,12.14,21.38,51.82\nefficientnetv2_m,624.53,1639.618,1024,416,18.6,67.5,54.14\nseresnext101d_32x8d,621.55,1647.466,1024,288,27.64,52.95,93.59\nresnext101_64x4d,619.77,1652.21,1024,288,25.66,51.59,83.46\nswsl_resnext101_32x16d,612.21,1672.624,1024,224,36.27,51.18,194.03\nig_resnext101_32x16d,611.98,1673.243,1024,224,36.27,51.18,194.03\nmaxvit_rmlp_small_rw_256,611.67,1255.571,768,256,14.15,66.09,64.9\nssl_resnext101_32x16d,611.31,1675.063,1024,224,36.27,51.18,194.03\nregnety_320,605.31,1691.684,1024,224,32.34,30.26,145.05\ngcvit_base,602.42,1699.782,1024,224,14.87,55.48,90.32\nregnetz_c16_evos,596.93,857.706,512,320,3.86,25.88,13.49\nmaxxvit_rmlp_small_rw_256,590.18,1735.046,1024,256,14.67,58.38,66.01\nlegacy_senet154,585.86,1747.854,1024,224,20.77,38.69,115.09\nsenet154,585.53,1748.836,1024,224,20.77,38.69,115.09\nseresnextaa101d_32x8d,585.08,1750.175,1024,288,28.51,56.44,93.59\ngluon_senet154,584.86,1750.843,1024,224,20.77,38.69,115.09\nconvmixer_768_32,581.95,1759.577,1024,224,19.55,25.95,21.11\nseresnet269d,574.5,1782.4,1024,256,26.59,53.6,113.67\nnf_regnet_b5,565.36,905.602,512,456,11.7,61.95,49.74\nmixer_l16_224,553.66,1849.49,1024,224,44.6,41.69,208.2\nresnet200d,545.14,1878.401,1024,320,31.25,67.33,64.69\nnfnet_f1,544.28,1881.353,1024,320,35.97,46.77,132.63\nvit_large_patch32_384,543.45,1884.237,1024,384,45.31,43.86,306.63\nefficientnetv2_rw_m,543.37,1884.512,1024,416,21.49,79.62,53.24\nvit_medium_patch16_gap_384,539.24,949.475,512,384,26.08,67.54,39.03\nefficientnet_b5,533.21,960.212,512,448,9.59,93.56,30.39\nswinv2_base_window8_256,531.81,1925.495,1024,256,20.37,52.59,87.92\nmaxxvitv2_rmlp_base_rw_224,525.72,1947.791,1024,224,24.2,62.77,116.09\nxcit_large_24_p16_224_dist,509.19,2011.039,1024,224,35.86,47.27,189.1\nxcit_large_24_p16_224,509.15,2011.169,1024,224,35.86,47.27,189.1\nswin_large_patch4_window7_224,504.4,1522.593,768,224,34.53,54.94,196.53\nhalonet_h1,503.39,508.543,256,256,3.0,51.17,8.1\nvolo_d3_224,502.58,2037.467,1024,224,20.78,60.09,86.33\nswinv2_small_window16_256,488.97,1047.084,512,256,12.82,66.29,49.73\ntresnet_xl,481.58,2126.301,1024,224,15.17,15.34,78.44\nvit_small_patch8_224,479.11,1068.641,512,224,22.44,80.84,21.67\ntf_efficientnet_b5,476.47,805.919,384,456,10.46,98.86,30.39\nmaxvit_rmlp_base_rw_224,472.06,2169.196,1024,224,23.15,92.64,116.14\nresnetrs200,471.68,2170.964,1024,320,31.51,67.81,93.21\nxcit_tiny_12_p8_384_dist,471.45,2172.002,1024,384,14.13,69.14,6.71\ndm_nfnet_f1,461.24,2220.087,1024,320,35.97,46.77,132.63\ntf_efficientnetv2_m,458.93,1673.426,768,480,24.76,89.84,54.14\nxcit_small_24_p16_384_dist,457.16,2239.891,1024,384,26.72,68.58,47.67\ncoatnet_rmlp_3_rw_224,439.5,582.463,256,224,33.56,79.47,165.15\nmaxvit_base_tf_224,430.05,1190.542,512,224,24.04,95.01,119.47\nswinv2_cr_large_224,423.86,1811.887,768,224,35.1,78.42,196.68\nresnetv2_152x2_bit_teacher,423.36,2418.743,1024,224,46.95,45.11,236.34\nswinv2_cr_tiny_384,423.1,907.565,384,384,15.34,161.01,28.33\ncoatnet_3_rw_224,421.95,606.701,256,224,33.44,73.83,181.81\nresnetv2_101x1_bitm,419.35,610.453,256,448,31.65,64.93,44.54\ncoatnet_3_224,405.07,631.982,256,224,36.56,79.01,166.97\nconvnextv2_base,403.59,1268.593,512,288,25.43,47.53,88.72\neca_nfnet_l2,401.73,2548.946,1024,384,30.05,68.28,56.72\nregnetz_d8_evos,394.39,1947.294,768,320,7.03,38.92,23.46\nconvmixer_1024_20_ks9_p14,393.5,2602.254,1024,224,5.55,5.51,24.38\neva_large_patch14_196,392.3,2610.234,1024,196,61.57,63.52,304.14\ncrossvit_15_dagger_408,390.72,655.182,256,408,21.45,95.05,28.5\nvit_large_patch16_224,390.66,2621.182,1024,224,61.6,63.52,304.33\nvit_base_patch16_18x2_224,384.38,2663.987,1024,224,52.51,71.38,256.73\ndeit3_large_patch16_224_in21ft1k,377.58,2711.976,1024,224,61.6,63.52,304.37\ndeit3_large_patch16_224,377.53,2712.348,1024,224,61.6,63.52,304.37\nconvnext_large,373.02,2058.836,768,288,56.87,71.29,197.77\nbeit_large_patch16_224,360.62,2839.572,1024,224,61.6,63.52,304.43\nbeitv2_large_patch16_224,360.58,2839.86,1024,224,61.6,63.52,304.43\nswinv2_base_window12to16_192to256_22kft1k,360.56,1065.006,384,256,22.02,84.71,87.92\nswinv2_base_window16_256,360.23,1065.959,384,256,22.02,84.71,87.92\nregnety_160,353.5,2172.566,768,288,26.37,38.07,83.59\nnasnetalarge,345.63,1111.004,384,331,23.89,90.56,88.75\nmaxvit_tiny_tf_384,344.01,744.157,256,384,17.53,123.42,30.98\nxcit_small_24_p8_224,342.37,2990.915,1024,224,35.81,90.78,47.63\nxcit_small_24_p8_224_dist,342.26,2991.817,1024,224,35.81,90.78,47.63\nflexivit_large,335.35,3053.52,1024,240,70.99,75.39,304.36\nmaxxvitv2_rmlp_large_rw_224,332.33,3081.271,1024,224,44.14,87.15,215.42\nvit_large_r50_s32_384,329.8,3104.921,1024,384,57.43,76.52,329.09\npnasnet5large,328.89,1167.534,384,331,25.04,92.89,86.06\ntresnet_m_448,325.8,3143.01,1024,448,22.94,29.21,31.39\nvolo_d1_384,323.04,1584.906,512,384,22.75,108.55,26.78\nvolo_d4_224,318.96,3210.439,1024,224,44.34,80.22,192.96\nxcit_medium_24_p16_384_dist,312.74,3274.268,1024,384,47.39,91.64,84.4\nnfnet_f2,310.6,3296.869,1024,352,63.22,79.06,193.78\nvit_base_patch16_384,307.09,1250.42,384,384,55.54,101.56,86.86\ndeit_base_patch16_384,306.8,1251.599,384,384,55.54,101.56,86.86\nvit_base_patch16_clip_384,306.29,1253.685,384,384,55.54,101.56,86.86\ndeit_base_distilled_patch16_384,305.48,1257.017,384,384,55.65,101.82,87.63\necaresnet269d,305.06,3356.684,1024,352,50.25,101.25,102.09\nmaxvit_large_tf_224,301.43,1273.908,384,224,43.68,127.35,211.79\ndeit3_base_patch16_384_in21ft1k,298.01,1288.526,384,384,55.54,101.56,86.88\ndeit3_base_patch16_384,297.88,1289.093,384,384,55.54,101.56,86.88\nresnetrs270,296.97,3448.186,1024,352,51.13,105.48,129.86\nregnetx_320,289.44,2653.413,768,224,31.81,36.3,107.81\nefficientnet_b6,287.31,890.997,256,528,19.4,167.39,43.04\nvit_large_patch14_224,286.23,3577.501,1024,224,81.08,88.79,304.2\nvit_large_patch14_clip_224,285.99,3580.5,1024,224,81.08,88.79,304.2\ncrossvit_18_dagger_408,285.18,673.248,192,408,32.47,124.87,44.61\ncait_xxs24_384,281.48,3637.936,1024,384,9.63,122.66,12.03\nig_resnext101_32x32d,275.12,1860.956,512,224,87.29,91.12,468.53\ntf_efficientnet_b6,274.07,700.545,192,528,19.4,167.39,43.04\ndm_nfnet_f2,264.79,2900.408,768,352,63.22,79.06,193.78\nbeit_base_patch16_384,261.27,1469.733,384,384,55.54,101.56,86.74\nefficientnetv2_l,260.33,1966.694,512,480,56.4,157.99,118.52\nswinv2_cr_small_384,259.75,985.56,256,384,29.7,298.03,49.7\ntf_efficientnetv2_l,257.29,1989.923,512,480,56.4,157.99,118.52\nresnest200e,254.36,1006.453,256,320,35.69,82.78,70.2\nmvitv2_large,249.99,2048.061,512,224,43.87,112.02,217.99\nxcit_tiny_24_p8_384_dist,248.25,4124.916,1024,384,27.05,132.95,12.11\nconvnext_xlarge,242.63,2110.182,512,288,100.8,95.05,350.2\nresmlp_big_24_224_in22ft1k,241.9,4233.056,1024,224,100.23,87.31,129.14\nresmlp_big_24_224,241.74,4235.988,1024,224,100.23,87.31,129.14\nresmlp_big_24_distilled_224,241.44,4241.249,1024,224,100.23,87.31,129.14\nconvnextv2_large,239.52,1068.782,256,288,56.87,71.29,197.96\ncoatnet_4_224,238.62,1072.827,256,224,62.48,129.26,275.43\nswin_base_patch4_window12_384,236.12,813.144,192,384,47.19,134.78,87.9\nxcit_medium_24_p8_224_dist,233.5,3289.007,768,224,63.53,121.23,84.32\nxcit_medium_24_p8_224,233.5,3289.104,768,224,63.53,121.23,84.32\neca_nfnet_l3,229.87,2227.284,512,448,52.55,118.4,72.04\nvit_base_r50_s16_384,226.32,1696.687,384,384,67.43,135.03,98.95\nmaxvit_small_tf_384,224.01,857.105,192,384,35.87,183.65,69.02\nxcit_small_12_p8_384_dist,221.54,1733.28,384,384,54.92,138.29,26.21\nswinv2_large_window12to16_192to256_22kft1k,220.1,1163.101,256,256,47.81,121.53,196.74\nvolo_d5_224,210.88,4855.76,1024,224,72.4,118.11,295.46\nvit_base_patch8_224,199.67,1282.079,256,224,78.22,161.69,86.58\ncait_xs24_384,197.64,3885.811,768,384,19.28,183.98,26.67\nresnetrs350,196.19,5219.377,1024,384,77.59,154.74,163.96\ncait_xxs36_384,188.27,5439.03,1024,384,14.35,183.7,17.37\nswinv2_cr_base_384,185.68,1378.725,256,384,50.57,333.68,87.88\ncoatnet_rmlp_2_rw_384,184.84,1038.746,192,384,47.69,209.43,73.88\nswinv2_cr_huge_224,184.09,2085.934,384,224,115.97,121.08,657.83\nconvnext_xxlarge,183.68,2787.486,512,224,151.66,95.29,846.47\nvolo_d2_384,180.56,2126.753,384,384,46.17,184.51,58.87\nxcit_large_24_p16_384_dist,176.39,5805.281,1024,384,105.35,137.17,189.1\nregnety_640,174.81,4393.396,768,224,64.16,42.5,281.38\nmaxvit_xlarge_tf_224,171.63,1491.6,256,224,97.49,191.02,474.95\nnfnet_f3,170.11,4514.791,768,416,115.58,141.78,254.92\ndensenet264d_iabn,167.13,6126.84,1024,224,13.47,14.0,72.74\nefficientnet_b7,166.38,1153.975,192,600,38.33,289.94,66.35\nmaxvit_tiny_tf_512,163.72,781.809,128,512,33.49,257.59,31.05\nefficientnetv2_xl,162.7,3146.865,512,512,93.85,247.32,208.12\ntf_efficientnetv2_xl,161.32,3173.821,512,512,93.85,247.32,208.12\ntf_efficientnet_b7,160.43,1196.798,192,600,38.33,289.94,66.35\nresnetv2_152x2_bit_teacher_384,159.54,1604.579,256,384,136.16,132.56,236.34\ntresnet_l_448,154.66,6620.743,1024,448,43.5,47.56,55.99\nvit_huge_patch14_224,154.27,6637.58,1024,224,167.43,139.43,658.75\nvit_huge_patch14_clip_224,154.17,6642.017,1024,224,167.4,139.41,632.05\nmaxxvitv2_rmlp_base_rw_384,153.9,1663.429,256,384,72.98,213.74,116.09\ncait_s24_384,152.41,3359.254,512,384,32.17,245.31,47.06\ndeit3_huge_patch14_224_in21ft1k,150.05,6824.53,1024,224,167.4,139.41,632.13\ndeit3_huge_patch14_224,149.59,6845.356,1024,224,167.4,139.41,632.13\ndm_nfnet_f3,145.48,3519.403,512,416,115.58,141.78,254.92\nresnetrs420,142.37,5394.528,768,416,108.45,213.79,191.89\nswin_large_patch4_window12_384,138.37,925.016,128,384,104.08,202.16,196.74\nresnetv2_50x3_bitm,133.5,1438.189,192,448,145.7,133.37,217.32\nmaxvit_rmlp_base_rw_384,131.6,1945.285,256,384,70.97,318.95,116.14\nxcit_large_24_p8_224_dist,131.32,3898.808,512,224,141.23,181.56,188.93\nxcit_large_24_p8_224,131.27,3900.391,512,224,141.23,181.56,188.93\ncoatnet_5_224,130.48,1471.508,192,224,145.49,194.24,687.47\nmaxvit_base_tf_384,122.48,1567.652,192,384,73.8,332.9,119.65\nresnest269e,119.17,2148.198,256,416,77.69,171.98,110.93\nresnetv2_152x2_bitm,117.29,2182.534,256,448,184.99,180.43,236.34\nxcit_small_24_p8_384_dist,116.59,3293.649,384,384,105.24,265.91,47.63\ntresnet_xl_448,115.63,8855.938,1024,448,60.65,61.31,78.44\nswinv2_cr_large_384,113.43,1128.479,128,384,108.95,404.96,196.68\nmaxvit_small_tf_512,106.82,1198.298,128,512,67.26,383.77,69.13\nefficientnet_b8,106.21,1205.18,128,672,63.48,442.89,87.41\ntf_efficientnet_b8,102.86,1244.358,128,672,63.48,442.89,87.41\neva_large_patch14_336,102.71,2492.371,256,336,191.1,270.24,304.53\nvit_large_patch14_clip_336,102.52,2496.99,256,336,191.11,270.24,304.53\nvit_large_patch16_384,102.5,2497.593,256,384,191.21,270.24,304.72\ncait_s36_384,101.88,5025.316,512,384,47.99,367.4,68.37\neva_giant_patch14_224,101.84,10055.112,1024,224,267.18,192.64,1012.56\nvit_giant_patch14_224,100.71,7625.752,768,224,267.18,192.64,1012.61\nvit_giant_patch14_clip_224,100.43,7646.856,768,224,267.18,192.64,1012.65\ndeit3_large_patch16_384_in21ft1k,99.81,2564.809,256,384,191.21,270.24,304.76\ndeit3_large_patch16_384,99.8,2564.994,256,384,191.21,270.24,304.76\nswinv2_base_window12to24_192to384_22kft1k,96.12,665.832,64,384,55.25,280.36,87.92\nnfnet_f4,89.33,5731.574,512,512,216.26,262.26,316.07\nbeit_large_patch16_384,88.56,2890.58,256,384,191.21,270.24,305.0\nmaxvit_large_tf_384,86.44,1480.84,128,384,132.55,445.84,212.03\nregnety_1280,82.49,4654.845,384,224,127.66,71.58,644.81\nxcit_medium_24_p8_384_dist,79.96,3201.705,256,384,186.67,354.73,84.32\nresnetv2_101x3_bitm,79.41,2417.67,192,448,280.33,194.78,387.93\nvolo_d3_448,77.64,2473.021,192,448,96.33,446.83,86.63\ndm_nfnet_f4,77.54,4952.036,384,512,216.26,262.26,316.07\nnfnet_f5,67.46,5691.915,384,544,290.97,349.71,377.21\ntf_efficientnet_l2,63.66,1507.989,96,475,172.11,609.89,480.31\nswinv2_large_window12to24_192to384_22kft1k,60.94,787.651,48,384,116.15,407.83,196.74\nvit_gigantic_patch14_224,60.18,8507.121,512,224,483.95,275.37,1844.44\nvit_gigantic_patch14_clip_224,60.11,8517.85,512,224,483.96,275.37,1844.91\nvolo_d4_448,57.87,3317.675,192,448,197.13,527.35,193.41\nmaxvit_base_tf_512,57.86,2212.256,128,512,138.02,703.99,119.88\ndm_nfnet_f5,57.78,6645.368,384,544,290.97,349.71,377.21\nvit_huge_patch14_clip_336,57.4,4460.085,256,336,390.97,407.54,632.46\nig_resnext101_32x48d,56.43,6804.709,384,224,153.57,131.06,828.41\nconvnextv2_huge,56.31,1704.92,96,384,337.96,232.35,660.29\nconvmixer_1536_20,55.47,18461.426,1024,224,48.68,33.03,51.63\nswinv2_cr_giant_224,52.39,3665.046,192,224,483.85,309.15,2598.76\nnfnet_f6,51.81,7411.574,384,576,378.69,452.2,438.36\nmaxvit_xlarge_tf_384,50.76,1891.335,96,384,292.78,668.76,475.32\nswinv2_cr_huge_384,49.01,1305.73,64,384,352.04,583.18,657.94\nregnety_2560,47.69,8051.463,384,224,257.07,87.48,826.14\nxcit_large_24_p8_384_dist,44.91,4275.004,192,384,415.0,531.82,188.93\ndm_nfnet_f6,44.62,5737.462,256,576,378.69,452.2,438.36\nnfnet_f7,41.13,6224.782,256,608,480.39,570.85,499.5\nmaxvit_large_tf_512,41.04,1559.597,64,512,244.75,942.15,212.33\neva_giant_patch14_336,39.89,6418.269,256,336,620.64,550.67,1013.01\nvolo_d5_448,39.88,3209.812,128,448,315.06,737.92,295.91\nbeit_large_patch16_512,35.33,2716.953,96,512,362.24,656.39,305.67\ncait_m36_384,32.89,7783.487,256,384,173.11,734.81,271.22\nresnetv2_152x4_bitm,30.46,3151.929,96,480,844.84,414.26,936.53\nvolo_d5_512,27.89,4590.0,128,512,425.09,1105.37,296.09\nmaxvit_xlarge_tf_512,24.38,1968.424,48,512,534.14,1413.22,475.77\nefficientnet_l2,23.13,1383.428,32,800,479.12,1707.39,480.31\nswinv2_cr_giant_384,15.06,2124.735,32,384,1450.71,1394.86,2598.76\ncait_m48_448,13.86,9235.876,128,448,329.41,1708.23,356.46\neva_giant_patch14_560,10.52,3043.009,32,560,1906.76,2577.17,1014.45\n"
  },
  {
    "path": "results/benchmark-infer-amp-nhwc-pt210-cu121-rtx3090.csv",
    "content": "model,infer_img_size,infer_batch_size,infer_samples_per_sec,infer_step_time,infer_gmacs,infer_macts,param_count\ntinynet_e,106,1024.0,75290.96,13.591,0.03,0.69,2.04\nmobilenetv3_small_050,224,1024.0,56785.93,18.023,0.03,0.92,1.59\nefficientvit_m0,224,1024.0,50656.23,20.205,0.08,0.91,2.35\nlcnet_035,224,1024.0,48853.22,20.951,0.03,1.04,1.64\nlcnet_050,224,1024.0,42147.98,24.285,0.05,1.26,1.88\nmobilenetv3_small_075,224,1024.0,42002.46,24.369,0.05,1.3,2.04\nmobilenetv3_small_100,224,1024.0,38516.23,26.573,0.06,1.42,2.54\ntinynet_d,152,1024.0,37989.71,26.944,0.05,1.42,2.34\nefficientvit_m1,224,1024.0,37486.44,27.306,0.17,1.33,2.98\ntf_mobilenetv3_small_minimal_100,224,1024.0,33948.13,30.153,0.06,1.41,2.04\nefficientvit_m2,224,1024.0,33551.67,30.51,0.2,1.47,4.19\ntf_mobilenetv3_small_075,224,1024.0,33262.15,30.775,0.05,1.3,2.04\ntf_mobilenetv3_small_100,224,1024.0,31002.71,33.019,0.06,1.42,2.54\nlcnet_075,224,1024.0,30664.19,33.384,0.1,1.99,2.36\nefficientvit_m3,224,1024.0,29423.78,34.792,0.27,1.62,6.9\nefficientvit_m4,224,1024.0,27882.1,36.716,0.3,1.7,8.8\nmnasnet_small,224,1024.0,25015.02,40.925,0.07,2.16,2.03\nregnetx_002,224,1024.0,24564.71,41.67,0.2,2.16,2.68\nlcnet_100,224,1024.0,24268.72,42.183,0.16,2.52,2.95\nlevit_128s,224,1024.0,22705.11,45.089,0.31,1.88,7.78\nregnety_002,224,1024.0,22248.91,46.012,0.2,2.17,3.16\nresnet10t,176,1024.0,22236.3,46.04,0.7,1.51,5.44\nmobilenetv2_035,224,1024.0,22055.42,46.418,0.07,2.86,1.68\nlevit_conv_128s,224,1024.0,21863.15,46.826,0.31,1.88,7.78\nghostnet_050,224,1024.0,20782.95,49.261,0.05,1.77,2.59\nmnasnet_050,224,1024.0,20672.17,49.525,0.11,3.07,2.22\nrepghostnet_050,224,1024.0,20617.05,49.657,0.05,2.02,2.31\nefficientvit_m5,224,1024.0,19010.14,53.856,0.53,2.41,12.47\ntinynet_c,184,1024.0,18737.07,54.641,0.11,2.87,2.46\nefficientvit_b0,224,1024.0,18023.56,56.804,0.1,2.87,3.41\nsemnasnet_050,224,1024.0,17573.38,58.26,0.11,3.44,2.08\nmobilenetv2_050,224,1024.0,17491.5,58.532,0.1,3.64,1.97\nregnetx_004,224,1024.0,17164.74,59.647,0.4,3.14,5.16\nrepghostnet_058,224,1024.0,16947.81,60.41,0.07,2.59,2.55\nregnetx_004_tv,224,1024.0,16485.73,62.101,0.42,3.17,5.5\nvit_small_patch32_224,224,1024.0,16428.86,62.319,1.12,2.09,22.88\ncs3darknet_focus_s,256,1024.0,16333.25,62.684,0.69,2.7,3.27\nlcnet_150,224,1024.0,15841.02,64.632,0.34,3.79,4.5\ngernet_s,224,1024.0,15617.62,65.556,0.75,2.65,8.17\ncs3darknet_s,256,1024.0,15597.89,65.64,0.72,2.97,3.28\nlevit_128,224,1024.0,15372.6,66.601,0.41,2.71,9.21\nvit_tiny_r_s16_p8_224,224,1024.0,15191.19,67.397,0.43,1.85,6.34\nlevit_conv_128,224,1024.0,14904.31,68.695,0.41,2.71,9.21\nmobilenetv3_large_075,224,1024.0,14843.63,68.964,0.16,4.0,3.99\npit_ti_distilled_224,224,1024.0,14746.15,69.432,0.51,2.77,5.1\npit_ti_224,224,1024.0,14700.08,69.649,0.5,2.75,4.85\nmixer_s32_224,224,1024.0,14362.24,71.288,1.0,2.28,19.1\nresnet10t,224,1024.0,14254.88,71.825,1.1,2.43,5.44\nrepghostnet_080,224,1024.0,13967.84,73.293,0.1,3.22,3.28\ntf_efficientnetv2_b0,192,1024.0,13629.52,75.121,0.54,3.51,7.14\nmobilenetv3_rw,224,1024.0,13582.75,75.38,0.23,4.41,5.48\nlevit_192,224,1024.0,13511.34,75.778,0.66,3.2,10.95\nmnasnet_075,224,1024.0,13417.36,76.309,0.23,4.77,3.17\nmobilenetv3_large_100,224,1024.0,13322.79,76.851,0.23,4.41,5.48\nhardcorenas_a,224,1024.0,13314.34,76.899,0.23,4.38,5.26\nlevit_conv_192,224,1024.0,12952.02,79.05,0.66,3.2,10.95\nregnety_004,224,1024.0,12651.55,80.929,0.41,3.89,4.34\ntf_mobilenetv3_large_075,224,1024.0,12636.69,81.023,0.16,4.0,3.99\nnf_regnet_b0,192,1024.0,12264.41,83.481,0.37,3.15,8.76\ntinynet_b,188,1024.0,12262.56,83.495,0.21,4.44,3.73\ntf_mobilenetv3_large_minimal_100,224,1024.0,12182.74,84.043,0.22,4.4,3.92\nhardcorenas_b,224,1024.0,12118.5,84.488,0.26,5.09,5.18\nhardcorenas_c,224,1024.0,12088.28,84.699,0.28,5.01,5.52\nresnet14t,176,1024.0,11843.82,86.448,1.07,3.61,10.08\nmnasnet_100,224,1024.0,11686.43,87.612,0.33,5.46,4.38\nregnety_006,224,1024.0,11675.48,87.69,0.61,4.33,6.06\nese_vovnet19b_slim_dw,224,1024.0,11663.91,87.781,0.4,5.28,1.9\nrepghostnet_100,224,1024.0,11508.79,88.956,0.15,3.98,4.07\ntf_mobilenetv3_large_100,224,1024.0,11443.62,89.472,0.23,4.41,5.48\nvit_tiny_patch16_224,224,1024.0,11342.82,90.267,1.08,4.12,5.72\nhardcorenas_d,224,1024.0,11329.99,90.369,0.3,4.93,7.5\ndeit_tiny_distilled_patch16_224,224,1024.0,11311.9,90.514,1.09,4.15,5.91\ndeit_tiny_patch16_224,224,1024.0,11286.31,90.719,1.08,4.12,5.72\nsemnasnet_075,224,1024.0,11132.28,91.974,0.23,5.54,2.91\nresnet18,224,1024.0,11101.69,92.228,1.82,2.48,11.69\nghostnet_100,224,1024.0,11039.87,92.744,0.15,3.55,5.18\nmobilenetv2_075,224,1024.0,10984.87,93.208,0.22,5.86,2.64\nspnasnet_100,224,1024.0,10557.11,96.986,0.35,6.03,4.42\ntf_efficientnetv2_b1,192,1024.0,10473.04,97.765,0.76,4.59,8.14\nregnetx_008,224,1024.0,10422.45,98.23,0.81,5.15,7.26\nseresnet18,224,1024.0,10416.31,98.297,1.82,2.49,11.78\ntf_efficientnetv2_b0,224,1024.0,10174.51,100.633,0.73,4.77,7.14\nlegacy_seresnet18,224,1024.0,10133.12,101.044,1.82,2.49,11.78\nrepghostnet_111,224,1024.0,10094.28,101.428,0.18,4.38,4.54\nhardcorenas_f,224,1024.0,10012.95,102.257,0.35,5.57,8.2\ntinynet_a,192,1024.0,9946.05,102.945,0.35,5.41,6.19\ndla46_c,224,1024.0,9943.77,102.967,0.58,4.5,1.3\nhardcorenas_e,224,1024.0,9851.75,103.931,0.35,5.65,8.07\nsemnasnet_100,224,1024.0,9823.16,104.233,0.32,6.23,3.89\nlevit_256,224,1024.0,9811.76,104.354,1.13,4.23,18.89\nrepvgg_a0,224,1024.0,9709.7,105.449,1.52,3.59,9.11\nmobilenetv2_100,224,1024.0,9654.78,106.051,0.31,6.68,3.5\nregnety_008,224,1024.0,9643.2,106.178,0.81,5.25,6.26\nfbnetc_100,224,1024.0,9552.51,107.186,0.4,6.51,5.57\nefficientnet_lite0,224,1024.0,9466.4,108.161,0.4,6.74,4.65\nlevit_conv_256,224,1024.0,9461.49,108.218,1.13,4.23,18.89\nresnet18d,224,1024.0,9458.4,108.253,2.06,3.29,11.71\npit_xs_224,224,1024.0,9332.33,109.714,1.1,4.12,10.62\nese_vovnet19b_slim,224,1024.0,9277.16,110.369,1.69,3.52,3.17\nregnety_008_tv,224,1024.0,9213.78,111.127,0.84,5.42,6.43\npit_xs_distilled_224,224,1024.0,9203.86,111.241,1.11,4.15,11.0\nconvnext_atto,224,1024.0,9104.06,112.467,0.55,3.81,3.7\nrepghostnet_130,224,1024.0,8873.05,115.395,0.25,5.24,5.48\nghostnet_130,224,1024.0,8870.81,115.424,0.24,4.6,7.36\nconvnext_atto_ols,224,1024.0,8829.55,115.964,0.58,4.11,3.7\nregnetz_005,224,1024.0,8796.44,116.392,0.52,5.86,7.12\nxcit_nano_12_p16_224,224,1024.0,8604.96,118.991,0.56,4.17,3.05\nlevit_256d,224,1024.0,8322.97,123.022,1.4,4.93,26.21\nregnetx_006,224,1024.0,8320.1,123.064,0.61,3.98,6.2\ntf_efficientnet_lite0,224,1024.0,8163.21,125.431,0.4,6.74,4.65\nfbnetv3_b,224,1024.0,8152.31,125.598,0.42,6.97,8.6\nefficientnet_b0,224,1024.0,8085.72,126.633,0.4,6.75,5.29\nlevit_conv_256d,224,1024.0,8055.13,127.113,1.4,4.93,26.21\nedgenext_xx_small,256,1024.0,8014.51,127.757,0.26,3.33,1.33\nmnasnet_140,224,1024.0,7984.3,128.241,0.6,7.71,7.12\nconvnext_femto,224,1024.0,7977.79,128.346,0.79,4.57,5.22\ntf_efficientnetv2_b2,208,1024.0,7861.13,130.251,1.06,6.0,10.1\nmobilevit_xxs,256,1024.0,7827.79,130.801,0.34,5.74,1.27\nrepghostnet_150,224,1024.0,7766.69,131.835,0.32,6.0,6.58\nconvnext_femto_ols,224,1024.0,7757.32,131.994,0.82,4.87,5.23\nrexnetr_100,224,1024.0,7545.9,135.692,0.43,7.72,4.88\nrepvit_m1,224,1024.0,7543.44,135.728,0.83,7.45,5.49\nresnet14t,224,1024.0,7466.4,137.137,1.69,5.8,10.08\nmobilenetv2_110d,224,1024.0,7331.32,139.66,0.45,8.71,4.52\nhrnet_w18_small,224,1024.0,7298.3,140.296,1.61,5.72,13.19\ncs3darknet_focus_m,256,1024.0,7202.61,142.16,1.98,4.89,9.3\nrepvit_m0_9,224,1024.0,7165.5,142.888,0.83,7.45,5.49\ncrossvit_tiny_240,240,1024.0,7123.68,143.735,1.3,5.67,7.01\nefficientvit_b1,224,1024.0,7109.59,144.02,0.53,7.25,9.1\ntf_efficientnet_b0,224,1024.0,7104.21,144.129,0.4,6.75,5.29\ncrossvit_9_240,240,1024.0,7025.32,145.747,1.55,5.59,8.55\nnf_regnet_b0,256,1024.0,6992.1,146.441,0.64,5.58,8.76\nrepvgg_a1,224,1024.0,6942.64,147.483,2.64,4.74,14.09\nmobilevitv2_050,256,1024.0,6935.55,147.628,0.48,8.04,1.37\ncs3darknet_m,256,1024.0,6929.59,147.762,2.08,5.28,9.31\nefficientnet_b1_pruned,240,1024.0,6922.7,147.909,0.4,6.21,6.33\ngernet_m,224,1024.0,6840.64,149.682,3.02,5.24,21.14\nfbnetv3_d,224,1024.0,6784.35,150.925,0.52,8.5,10.31\nsemnasnet_140,224,1024.0,6771.35,151.215,0.6,8.87,6.11\ncrossvit_9_dagger_240,240,1024.0,6704.51,152.722,1.68,6.03,8.78\ntf_efficientnetv2_b1,240,1024.0,6611.54,154.87,1.21,7.34,8.14\nmobilenetv2_140,224,1024.0,6588.7,155.407,0.6,9.57,6.11\nresnet34,224,1024.0,6504.25,157.425,3.67,3.74,21.8\nese_vovnet19b_dw,224,1024.0,6406.95,159.816,1.34,8.25,6.54\nselecsls42,224,1024.0,6366.41,160.834,2.94,4.62,30.35\nresnet18,288,1024.0,6354.7,161.13,3.01,4.11,11.69\nselecsls42b,224,1024.0,6344.62,161.386,2.98,4.62,32.46\nefficientnet_b0_g16_evos,224,1024.0,6342.4,161.442,1.01,7.42,8.11\nedgenext_xx_small,288,1024.0,6334.97,161.631,0.33,4.21,1.33\nefficientnet_lite1,240,1024.0,6268.15,163.355,0.62,10.14,5.42\npvt_v2_b0,224,1024.0,6254.52,163.711,0.53,7.01,3.67\nvisformer_tiny,224,1024.0,6218.29,164.665,1.27,5.72,10.32\nconvnext_pico,224,1024.0,6208.02,164.938,1.37,6.1,9.05\nfbnetv3_b,256,1024.0,6192.25,165.357,0.55,9.1,8.6\nefficientnet_es_pruned,224,1024.0,6175.39,165.809,1.81,8.73,5.44\nefficientnet_es,224,1024.0,6170.12,165.95,1.81,8.73,5.44\nrexnet_100,224,1024.0,6170.05,165.953,0.41,7.44,4.8\nghostnetv2_100,224,1024.0,6155.62,166.342,0.18,4.55,6.16\nseresnet34,224,1024.0,6069.09,168.714,3.67,3.74,21.96\nconvnext_pico_ols,224,1024.0,6043.01,169.442,1.43,6.5,9.06\nseresnet18,288,1024.0,5998.94,170.686,3.01,4.11,11.78\ndla46x_c,224,1024.0,5992.19,170.877,0.54,5.66,1.07\ndla34,224,1024.0,5954.72,171.952,3.07,5.02,15.74\nrepghostnet_200,224,1024.0,5934.75,172.524,0.54,7.96,9.8\nresnet26,224,1024.0,5916.33,173.07,2.36,7.35,16.0\nlevit_384,224,1024.0,5897.4,173.625,2.36,6.26,39.13\nresnet34d,224,1024.0,5884.13,174.017,3.91,4.54,21.82\ncs3darknet_focus_m,288,1024.0,5878.89,174.173,2.51,6.19,9.3\nlegacy_seresnet34,224,1024.0,5873.4,174.335,3.67,3.74,21.96\nrepvit_m2,224,1024.0,5866.53,174.53,1.36,9.43,8.8\nvit_base_patch32_224,224,1024.0,5866.04,174.553,4.37,4.19,88.22\nvit_base_patch32_clip_224,224,1024.0,5864.79,174.59,4.37,4.19,88.22\nrepvit_m1_0,224,1024.0,5862.26,174.66,1.13,8.69,7.3\ntf_efficientnet_es,224,1024.0,5831.76,175.58,1.81,8.73,5.44\nrexnetr_130,224,1024.0,5827.09,175.72,0.68,9.81,7.61\nresnetrs50,160,1024.0,5819.33,175.954,2.29,6.2,35.69\ndla60x_c,224,1024.0,5709.85,179.326,0.59,6.01,1.32\nvit_small_patch32_384,384,1024.0,5700.23,179.631,3.26,6.07,22.92\nlevit_conv_384,224,1024.0,5694.64,179.807,2.36,6.26,39.13\ntiny_vit_5m_224,224,1024.0,5681.84,180.212,1.18,9.32,12.08\nefficientnet_b1,224,1024.0,5671.54,180.54,0.59,9.36,7.79\ncs3darknet_m,288,1024.0,5670.5,180.573,2.63,6.69,9.31\nresnetblur18,224,1024.0,5631.98,181.808,2.34,3.39,11.69\ntf_efficientnet_lite1,240,1024.0,5588.09,183.236,0.62,10.14,5.42\nrepvit_m1_1,224,1024.0,5584.25,183.355,1.36,9.43,8.8\nmixnet_s,224,1024.0,5566.85,183.931,0.25,6.25,4.13\nconvnext_atto,288,1024.0,5556.64,184.274,0.91,6.3,3.7\ndarknet17,256,1024.0,5525.94,185.298,3.26,7.18,14.3\npit_s_224,224,1024.0,5520.06,185.491,2.42,6.18,23.46\nresnet18d,288,1024.0,5497.35,186.262,3.41,5.43,11.71\nselecsls60,224,1024.0,5496.69,186.283,3.59,5.52,30.67\npit_s_distilled_224,224,1024.0,5494.69,186.349,2.45,6.22,24.04\nxcit_tiny_12_p16_224,224,1024.0,5472.11,187.12,1.24,6.29,6.72\nselecsls60b,224,1024.0,5466.97,187.296,3.63,5.52,32.77\nskresnet18,224,1024.0,5432.07,188.499,1.82,3.24,11.96\nconvnext_atto_ols,288,1024.0,5378.78,190.367,0.96,6.8,3.7\nresmlp_12_224,224,1024.0,5371.14,190.637,3.01,5.5,15.35\nregnetz_005,288,1024.0,5353.96,191.249,0.86,9.68,7.12\nmobilenetv2_120d,224,1024.0,5347.39,191.484,0.69,11.97,5.83\nconvnextv2_atto,224,1024.0,5293.77,193.425,0.55,3.81,3.71\nrepvgg_b0,224,1024.0,5265.8,194.451,3.41,6.15,15.82\nmixer_b32_224,224,1024.0,5245.72,195.191,3.24,6.29,60.29\nvit_tiny_r_s16_p8_384,384,1024.0,5235.72,195.568,1.25,5.39,6.36\nnf_regnet_b1,256,1024.0,5226.46,195.915,0.82,7.27,10.22\nnf_regnet_b2,240,1024.0,5223.53,196.02,0.97,7.23,14.31\nvit_base_patch32_clip_quickgelu_224,224,1024.0,5220.87,196.124,4.37,4.19,87.85\nresnetaa34d,224,1024.0,5205.31,196.711,4.43,5.07,21.82\nresnet26d,224,1024.0,5169.81,198.062,2.6,8.15,16.01\ntf_mixnet_s,224,1024.0,5128.65,199.652,0.25,6.25,4.13\nrexnetr_150,224,1024.0,5105.32,200.564,0.89,11.13,9.78\ngmixer_12_224,224,1024.0,5083.79,201.414,2.67,7.26,12.7\nfbnetv3_d,256,1024.0,5047.63,202.856,0.68,11.1,10.31\nedgenext_x_small,256,1024.0,5018.94,204.014,0.54,5.93,2.34\nmixer_s16_224,224,1024.0,5009.58,204.393,3.79,5.97,18.53\nregnetz_b16,224,1024.0,5008.24,204.437,1.45,9.95,9.72\ngmlp_ti16_224,224,1024.0,4999.44,204.811,1.34,7.55,5.87\ndarknet21,256,1024.0,4956.17,206.601,3.93,7.47,20.86\neva02_tiny_patch14_224,224,1024.0,4940.45,207.258,1.4,6.17,5.5\nghostnetv2_130,224,1024.0,4896.55,209.116,0.28,5.9,8.96\nconvnext_femto,288,1024.0,4844.52,211.362,1.3,7.56,5.22\nnf_resnet26,224,1024.0,4822.21,212.339,2.41,7.35,16.0\nefficientnet_lite2,260,1024.0,4817.66,212.541,0.89,12.9,6.09\ntf_efficientnetv2_b2,260,1024.0,4797.27,213.444,1.72,9.84,10.1\nefficientnet_cc_b0_8e,224,1024.0,4749.51,215.591,0.42,9.42,24.01\nsedarknet21,256,1024.0,4747.46,215.684,3.93,7.47,20.95\nefficientnet_cc_b0_4e,224,1024.0,4720.11,216.933,0.41,9.42,13.31\nefficientnet_b2_pruned,260,1024.0,4716.64,217.093,0.73,9.13,8.31\nconvnext_femto_ols,288,1024.0,4709.5,217.422,1.35,8.06,5.23\nresnext26ts,256,1024.0,4668.94,219.311,2.43,10.52,10.3\ntiny_vit_11m_224,224,1024.0,4649.32,220.237,1.9,10.73,20.35\necaresnet50d_pruned,224,1024.0,4636.78,220.832,2.53,6.43,19.94\ndeit_small_patch16_224,224,1024.0,4620.93,221.59,4.25,8.25,22.05\nefficientformer_l1,224,1024.0,4616.64,221.795,1.3,5.53,12.29\nvit_small_patch16_224,224,1024.0,4614.32,221.907,4.25,8.25,22.05\ndpn48b,224,1024.0,4588.67,223.146,1.69,8.92,9.13\ndeit_small_distilled_patch16_224,224,1024.0,4587.3,223.214,4.27,8.29,22.44\nvit_base_patch32_clip_256,256,1024.0,4547.51,225.168,5.68,5.44,87.86\nconvnextv2_femto,224,1024.0,4545.73,225.256,0.79,4.57,5.23\nmobilevitv2_075,256,1024.0,4537.95,225.638,1.05,12.06,2.87\neca_resnext26ts,256,1024.0,4521.18,226.479,2.43,10.52,10.3\nseresnext26ts,256,1024.0,4517.43,226.666,2.43,10.52,10.39\nefficientnetv2_rw_t,224,1024.0,4511.98,226.94,1.93,9.94,13.65\nlegacy_seresnext26_32x4d,224,1024.0,4489.21,228.092,2.49,9.39,16.79\ngernet_l,256,1024.0,4474.96,228.817,4.57,8.0,31.08\ngcresnext26ts,256,1024.0,4472.11,228.964,2.43,10.53,10.48\nrexnet_130,224,1024.0,4453.51,229.92,0.68,9.71,7.56\ntf_efficientnet_b1,240,1024.0,4442.45,230.492,0.71,10.88,7.79\ntf_efficientnet_cc_b0_8e,224,1024.0,4391.83,233.15,0.42,9.42,24.01\nconvnext_nano,224,1024.0,4389.78,233.258,2.46,8.37,15.59\ngc_efficientnetv2_rw_t,224,1024.0,4373.41,234.132,1.94,9.97,13.68\ntf_efficientnet_cc_b0_4e,224,1024.0,4373.37,234.134,0.41,9.42,13.31\ntf_efficientnetv2_b3,240,1024.0,4372.06,234.204,1.93,9.95,14.36\ntf_efficientnet_lite2,260,1024.0,4324.79,236.764,0.89,12.9,6.09\nefficientnet_b1,256,1024.0,4298.75,238.198,0.77,12.22,7.79\ndeit3_small_patch16_224,224,1024.0,4270.38,239.779,4.25,8.25,22.06\ncs3darknet_focus_l,256,1024.0,4230.07,242.066,4.66,8.03,21.15\nnf_regnet_b1,288,1024.0,4135.98,247.568,1.02,9.2,10.22\nconvnext_nano_ols,224,1024.0,4118.16,248.644,2.65,9.38,15.65\nnf_seresnet26,224,1024.0,4112.79,248.966,2.41,7.36,17.4\nnf_ecaresnet26,224,1024.0,4107.39,249.292,2.41,7.36,16.0\nefficientnet_b2,256,1024.0,4105.27,249.424,0.89,12.81,9.11\ncs3darknet_l,256,1024.0,4101.41,249.66,4.86,8.55,21.16\nnf_regnet_b2,272,1024.0,4097.18,249.913,1.22,9.27,14.31\necaresnext50t_32x4d,224,1024.0,4074.12,251.332,2.7,10.09,15.41\necaresnext26t_32x4d,224,1024.0,4072.14,251.454,2.7,10.09,15.41\nseresnext26t_32x4d,224,1024.0,4061.05,252.141,2.7,10.09,16.81\nrepvgg_a2,224,1024.0,4049.32,252.867,5.7,6.26,28.21\npoolformer_s12,224,1024.0,4047.55,252.981,1.82,5.53,11.92\nseresnext26d_32x4d,224,1024.0,4037.54,253.609,2.73,10.19,16.81\nregnetx_016,224,1024.0,4025.84,254.342,1.62,7.93,9.19\nresnet26t,256,1024.0,4021.85,254.598,3.35,10.52,16.01\nflexivit_small,240,1024.0,4011.8,255.236,4.88,9.46,22.06\nedgenext_x_small,288,1024.0,3990.87,256.573,0.68,7.5,2.34\nrexnet_150,224,1024.0,3983.48,257.051,0.9,11.21,9.73\nvit_relpos_small_patch16_rpn_224,224,1024.0,3975.32,257.575,4.24,9.38,21.97\nrepvit_m3,224,1024.0,3966.18,258.164,1.89,13.94,10.68\nvit_relpos_small_patch16_224,224,1024.0,3948.05,259.358,4.24,9.38,21.98\nvit_srelpos_small_patch16_224,224,1024.0,3937.22,260.07,4.23,8.49,21.97\nmobileone_s1,224,1024.0,3931.71,260.434,0.86,9.67,4.83\nresnetv2_50,224,1024.0,3890.29,263.208,4.11,11.11,25.55\neca_botnext26ts_256,256,1024.0,3883.93,263.639,2.46,11.6,10.59\ncs3sedarknet_l,256,1024.0,3835.91,266.94,4.86,8.56,21.91\nghostnetv2_160,224,1024.0,3826.79,267.576,0.42,7.23,12.39\nresnet34,288,1024.0,3820.15,268.041,6.07,6.18,21.8\nedgenext_small,256,1024.0,3794.31,269.865,1.26,9.07,5.59\ndpn68,224,1024.0,3788.79,270.258,2.35,10.47,12.61\nese_vovnet19b_dw,288,1024.0,3782.88,270.682,2.22,13.63,6.54\nfbnetv3_g,240,1024.0,3779.41,270.931,1.28,14.87,16.62\nconvnext_pico,288,1024.0,3777.8,271.046,2.27,10.08,9.05\necaresnetlight,224,1024.0,3759.77,272.346,4.11,8.42,30.16\neca_halonext26ts,256,1024.0,3745.07,273.414,2.44,11.46,10.76\ndpn68b,224,1024.0,3719.51,275.293,2.35,10.47,12.61\nmixnet_m,224,1024.0,3687.37,277.689,0.36,8.19,5.01\nresnet50,224,1024.0,3687.18,277.708,4.11,11.11,25.56\nefficientnet_em,240,1024.0,3685.78,277.814,3.04,14.34,6.9\nconvnext_pico_ols,288,1024.0,3673.49,278.743,2.37,10.74,9.06\nresnet32ts,256,1024.0,3641.96,281.156,4.63,11.58,17.96\nbat_resnext26ts,256,1024.0,3638.35,281.435,2.53,12.51,10.73\nefficientnet_b3_pruned,300,1024.0,3633.29,281.827,1.04,11.86,9.86\nbotnet26t_256,256,1024.0,3632.31,281.904,3.32,11.98,12.49\nhrnet_w18_small_v2,224,1024.0,3631.33,281.979,2.62,9.65,15.6\necaresnet101d_pruned,224,1024.0,3611.37,283.538,3.48,7.69,24.88\necaresnet26t,256,1024.0,3599.02,284.511,3.35,10.53,16.01\nregnetv_040,224,1024.0,3598.04,284.583,4.0,12.29,20.64\nseresnet34,288,1024.0,3583.61,285.735,6.07,6.18,21.96\nresnetv2_50t,224,1024.0,3573.26,286.561,4.32,11.82,25.57\npvt_v2_b1,224,1024.0,3571.19,286.726,2.04,14.01,14.01\nregnety_016,224,1024.0,3567.37,287.031,1.63,8.04,11.2\nresnext26ts,288,1024.0,3565.74,287.167,3.07,13.31,10.3\nregnety_040,224,1024.0,3565.62,287.173,4.0,12.29,20.65\nresnet33ts,256,1024.0,3563.66,287.335,4.76,11.66,19.68\nresnetv2_50d,224,1024.0,3553.44,288.159,4.35,11.92,25.57\ntf_efficientnet_em,240,1024.0,3544.42,288.894,3.04,14.34,6.9\nhalonet26t,256,1024.0,3541.55,289.129,3.19,11.69,12.48\ndla60,224,1024.0,3527.55,290.275,4.26,10.16,22.04\ntf_mixnet_m,224,1024.0,3524.0,290.567,0.36,8.19,5.01\nresnet50c,224,1024.0,3521.04,290.812,4.35,11.92,25.58\nedgenext_small_rw,256,1024.0,3501.76,292.411,1.58,9.51,7.83\nresnet34d,288,1024.0,3491.3,293.29,6.47,7.51,21.82\nconvnextv2_pico,224,1024.0,3480.58,294.194,1.37,6.1,9.07\nvit_small_resnet26d_224,224,1024.0,3476.26,294.557,5.04,10.65,63.61\nconvit_tiny,224,1024.0,3460.49,295.901,1.26,7.94,5.71\ntresnet_m,224,1024.0,3457.69,296.14,5.75,7.31,31.39\nresnet26,288,1024.0,3457.48,296.158,3.9,12.15,16.0\nseresnext26ts,288,1024.0,3455.43,296.333,3.07,13.32,10.39\nvit_relpos_base_patch32_plus_rpn_256,256,1024.0,3447.98,296.974,7.59,6.63,119.42\nseresnet33ts,256,1024.0,3444.98,297.233,4.76,11.66,19.78\neca_resnext26ts,288,1024.0,3443.01,297.404,3.07,13.32,10.3\neca_resnet33ts,256,1024.0,3442.23,297.471,4.76,11.66,19.68\ntf_efficientnet_b2,260,1024.0,3440.99,297.578,1.02,13.83,9.11\ngcresnet33ts,256,1024.0,3424.64,298.998,4.76,11.68,19.88\ngcresnext26ts,288,1024.0,3414.23,299.91,3.07,13.33,10.48\nresnet50t,224,1024.0,3401.57,301.026,4.32,11.82,25.57\nvovnet39a,224,1024.0,3395.56,301.56,7.09,6.73,22.6\nresnet50d,224,1024.0,3380.59,302.894,4.35,11.92,25.58\nefficientvit_b2,224,1024.0,3359.89,304.76,1.6,14.62,24.33\nresnest14d,224,1024.0,3357.89,304.943,2.76,7.33,10.61\nvit_base_patch32_plus_256,256,1024.0,3354.04,305.293,7.7,6.35,119.48\nefficientnet_b0_gn,224,1024.0,3353.74,305.319,0.42,6.75,5.29\ncs3darknet_focus_l,288,1024.0,3340.22,306.556,5.9,10.16,21.15\nselecsls84,224,1024.0,3335.07,307.029,5.9,7.57,50.95\nvit_tiny_patch16_384,384,1024.0,3332.37,307.277,3.16,12.08,5.79\nlegacy_seresnet50,224,1024.0,3325.14,307.946,3.88,10.6,28.09\ncoatnet_nano_cc_224,224,1024.0,3301.24,310.176,2.13,13.1,13.76\nfastvit_t8,256,1024.0,3298.88,310.398,0.7,8.63,4.03\nresnetblur18,288,1024.0,3292.39,311.01,3.87,5.6,11.69\nrepvit_m1_5,224,1024.0,3281.4,312.05,2.31,15.7,14.64\nese_vovnet39b,224,1024.0,3276.58,312.51,7.09,6.74,24.57\nlevit_512,224,1024.0,3274.29,312.728,5.64,10.22,95.17\nhaloregnetz_b,224,1024.0,3272.82,312.869,1.97,11.94,11.68\nmobilevit_xs,256,1024.0,3272.76,312.87,0.93,13.62,2.32\ncoat_lite_tiny,224,1024.0,3257.39,314.352,1.6,11.65,5.72\ncoatnext_nano_rw_224,224,1024.0,3256.31,314.455,2.36,10.68,14.7\neca_vovnet39b,224,1024.0,3252.14,314.859,7.09,6.74,22.6\nefficientnet_b2,288,1024.0,3249.31,315.132,1.12,16.2,9.11\nresnetaa50,224,1024.0,3245.58,315.495,5.15,11.64,25.56\ncoatnet_nano_rw_224,224,1024.0,3238.25,316.209,2.29,13.29,15.14\ncs3darknet_l,288,1024.0,3236.81,316.35,6.16,10.83,21.16\nconvnextv2_atto,288,1024.0,3226.1,317.401,0.91,6.3,3.71\nmobileone_s2,224,1024.0,3211.19,318.869,1.34,11.55,7.88\nseresnet50,224,1024.0,3200.07,319.981,4.11,11.13,28.09\nnf_regnet_b3,288,1024.0,3185.16,321.477,1.67,11.84,18.59\ncrossvit_small_240,240,1024.0,3184.9,321.506,5.09,11.34,26.86\nres2net50_48w_2s,224,1024.0,3168.87,323.132,4.18,11.72,25.29\nresnetaa34d,288,1024.0,3155.87,324.463,7.33,8.38,21.82\nvit_small_r26_s32_224,224,1024.0,3124.44,327.727,3.54,9.44,36.43\ndla60x,224,1024.0,3106.99,329.567,3.54,13.8,17.35\nefficientnet_b0_g8_gn,224,1024.0,3104.31,329.853,0.66,6.75,6.56\nresnext50_32x4d,224,1024.0,3099.2,330.397,4.26,14.4,25.03\nlevit_conv_512,224,1024.0,3078.02,332.67,5.64,10.22,95.17\nskresnet34,224,1024.0,3073.03,333.21,3.67,5.13,22.28\ncoat_lite_mini,224,1024.0,3058.66,334.777,2.0,12.25,11.01\nresnet26d,288,1024.0,3053.73,335.317,4.29,13.48,16.01\nmobileone_s0,224,1024.0,3053.01,335.391,1.09,15.48,5.29\nlevit_512d,224,1024.0,3045.04,336.274,5.85,11.3,92.5\ncs3sedarknet_l,288,1024.0,3026.08,338.38,6.16,10.83,21.91\nresnetaa50d,224,1024.0,3022.22,338.813,5.39,12.44,25.58\nconvnext_tiny,224,1024.0,3015.62,339.555,4.47,13.44,28.59\neca_nfnet_l0,224,1024.0,3011.21,340.052,4.35,10.47,24.14\nxcit_nano_12_p16_384,384,1024.0,3011.18,340.055,1.64,12.14,3.05\nnfnet_l0,224,1024.0,3000.78,341.23,4.36,10.47,35.07\nresnetrs50,224,1024.0,2989.89,342.477,4.48,12.14,35.69\nefficientnet_cc_b1_8e,240,1024.0,2988.69,342.615,0.75,15.44,39.72\nregnetz_b16,288,1024.0,2987.05,342.79,2.39,16.43,9.72\nseresnet50t,224,1024.0,2984.21,343.128,4.32,11.83,28.1\necaresnet50d,224,1024.0,2975.54,344.128,4.35,11.93,25.58\nregnetz_c16,256,1024.0,2971.35,344.607,2.51,16.57,13.46\ndensenet121,224,1024.0,2967.84,345.021,2.87,6.9,7.98\ncrossvit_15_240,240,1024.0,2967.06,345.11,5.17,12.01,27.53\nresnet50s,224,1024.0,2958.0,346.169,5.47,13.52,25.68\nrexnetr_200,224,1024.0,2955.32,346.483,1.59,15.11,16.52\nmixnet_l,224,1024.0,2926.26,349.918,0.58,10.84,7.33\nxcit_tiny_24_p16_224,224,1024.0,2925.33,350.035,2.34,11.82,12.12\nlevit_conv_512d,224,1024.0,2899.99,353.091,5.85,11.3,92.5\ngcresnext50ts,256,1024.0,2897.54,353.393,3.75,15.46,15.67\nlambda_resnet26rpt_256,256,1024.0,2887.51,354.621,3.16,11.87,10.99\nresnext50d_32x4d,224,1024.0,2876.86,355.933,4.5,15.2,25.05\nresnet32ts,288,1024.0,2868.64,356.953,5.86,14.65,17.96\ncrossvit_15_dagger_240,240,1024.0,2848.99,359.413,5.5,12.68,28.21\ntiny_vit_21m_224,224,1024.0,2842.09,360.287,4.08,15.96,33.22\nvit_base_resnet26d_224,224,1024.0,2837.87,360.821,6.93,12.34,101.4\ntf_efficientnet_cc_b1_8e,240,1024.0,2835.77,361.09,0.75,15.44,39.72\ncspresnet50,256,1024.0,2834.55,361.245,4.54,11.5,21.62\nmobilevitv2_100,256,1024.0,2833.62,361.358,1.84,16.08,4.9\nresnet33ts,288,1024.0,2829.43,361.9,6.02,14.75,19.68\nvovnet57a,224,1024.0,2821.83,362.874,8.95,7.52,36.64\ndeit3_medium_patch16_224,224,1024.0,2805.09,365.038,7.53,10.99,38.85\ninception_next_tiny,224,1024.0,2798.9,365.847,4.19,11.98,28.06\ntf_mixnet_l,224,1024.0,2798.14,365.947,0.58,10.84,7.33\nres2next50,224,1024.0,2797.04,366.091,4.2,13.71,24.67\ndla60_res2next,224,1024.0,2795.54,366.285,3.49,13.17,17.03\ncoatnet_pico_rw_224,224,1024.0,2793.27,366.584,1.96,12.91,10.85\nconvnext_tiny_hnf,224,1024.0,2770.64,369.577,4.47,13.44,28.59\ngcresnet50t,256,1024.0,2767.9,369.943,5.42,14.67,25.9\nconvnextv2_femto,288,1024.0,2762.62,370.652,1.3,7.56,5.23\ntf_efficientnetv2_b3,300,1024.0,2757.15,371.387,3.04,15.74,14.36\nlegacy_seresnext50_32x4d,224,1024.0,2750.41,372.297,4.26,14.42,27.56\necaresnet50d_pruned,288,1024.0,2749.78,372.383,4.19,10.61,19.94\nres2net50_26w_4s,224,1024.0,2749.69,372.394,4.28,12.61,25.7\nseresnext50_32x4d,224,1024.0,2749.17,372.464,4.26,14.42,27.56\nvgg11_bn,224,1024.0,2746.28,372.857,7.62,7.44,132.87\nresmlp_24_224,224,1024.0,2745.97,372.9,5.96,10.91,30.02\nresnetv2_50x1_bit,224,1024.0,2742.41,373.383,4.23,11.11,25.55\neca_resnet33ts,288,1024.0,2737.24,374.089,6.02,14.76,19.68\nefficientnetv2_rw_t,288,1024.0,2736.91,374.133,3.19,16.42,13.65\nseresnet33ts,288,1024.0,2734.83,374.417,6.02,14.76,19.78\nnfnet_f0,192,1024.0,2731.03,374.934,7.21,10.16,71.49\nres2net50_14w_8s,224,1024.0,2724.75,375.804,4.21,13.28,25.06\nvisformer_small,224,1024.0,2720.95,376.328,4.88,11.43,40.22\nese_vovnet57b,224,1024.0,2711.8,377.598,8.95,7.52,38.61\ngcresnet33ts,288,1024.0,2705.39,378.493,6.02,14.78,19.88\ncspresnet50d,256,1024.0,2702.61,378.881,4.86,12.55,21.64\ntwins_svt_small,224,1024.0,2696.15,379.788,2.82,10.7,24.06\nefficientvit_l1,224,1024.0,2692.51,380.303,5.27,15.85,52.65\nresnetblur50,224,1024.0,2689.65,380.707,5.16,12.02,25.56\nseresnetaa50d,224,1024.0,2682.26,381.757,5.4,12.46,28.11\nfbnetv3_g,288,1024.0,2673.23,383.046,1.77,21.09,16.62\ncspresnet50w,256,1024.0,2671.97,383.228,5.04,12.19,28.12\ndla60_res2net,224,1024.0,2669.84,383.53,4.15,12.34,20.85\nconvnext_nano,288,1024.0,2669.05,383.645,4.06,13.84,15.59\ngc_efficientnetv2_rw_t,288,1024.0,2659.37,385.042,3.2,16.45,13.68\ngcvit_xxtiny,224,1024.0,2658.4,385.182,2.14,15.36,12.0\npoolformerv2_s12,224,1024.0,2624.04,390.223,1.83,5.53,11.89\nvit_relpos_medium_patch16_rpn_224,224,1024.0,2618.88,390.989,7.5,12.13,38.73\nmobileone_s3,224,1024.0,2616.83,391.296,1.94,13.85,10.17\ndavit_tiny,224,1024.0,2612.7,391.92,4.47,17.08,28.36\nvit_relpos_medium_patch16_224,224,1024.0,2603.89,393.246,7.5,12.13,38.75\nresnet51q,256,1024.0,2602.52,393.454,6.38,16.55,35.7\ngmixer_24_224,224,1024.0,2594.59,394.657,5.28,14.45,24.72\nmaxvit_pico_rw_256,256,768.0,2593.58,296.105,1.68,18.77,7.46\nvit_srelpos_medium_patch16_224,224,1024.0,2591.17,395.176,7.49,11.32,38.74\nvit_relpos_medium_patch16_cls_224,224,1024.0,2587.16,395.789,7.55,13.3,38.76\nmaxvit_rmlp_pico_rw_256,256,768.0,2587.02,296.857,1.69,21.32,7.52\nnf_regnet_b3,320,1024.0,2582.41,396.514,2.05,14.61,18.59\nres2net50d,224,1024.0,2577.65,397.25,4.52,13.41,25.72\ncs3darknet_focus_x,256,1024.0,2569.33,398.536,8.03,10.69,35.02\ndensenetblur121d,224,1024.0,2559.52,400.063,3.11,7.9,8.0\ninception_v3,299,1024.0,2546.29,402.143,5.73,8.97,23.83\ncoatnet_0_rw_224,224,1024.0,2545.57,402.256,4.23,15.1,27.44\nrepvgg_b1g4,224,1024.0,2545.06,402.332,8.15,10.64,39.97\nregnetx_032,224,1024.0,2534.07,404.077,3.2,11.37,15.3\ntwins_pcpvt_small,224,1024.0,2533.92,404.104,3.68,15.51,24.11\nresnetblur50d,224,1024.0,2528.9,404.909,5.4,12.82,25.58\nrexnet_200,224,1024.0,2519.88,406.358,1.56,14.91,16.37\nresnetrs101,192,1024.0,2505.12,408.751,6.04,12.7,63.62\nresnet26t,320,1024.0,2502.87,409.119,5.24,16.44,16.01\nnf_ecaresnet50,224,1024.0,2502.03,409.253,4.21,11.13,25.56\nconvnext_nano_ols,288,1024.0,2497.73,409.961,4.38,15.5,15.65\nconvnextv2_nano,224,1024.0,2497.72,409.963,2.46,8.37,15.62\nnf_seresnet50,224,1024.0,2494.79,410.425,4.21,11.13,28.09\nregnety_032,224,1024.0,2483.68,412.275,3.2,11.26,19.44\nvit_medium_patch16_gap_240,240,1024.0,2477.36,413.332,8.6,12.57,44.4\ncs3darknet_x,256,1024.0,2475.51,413.641,8.38,11.35,35.05\ndensenet169,224,1024.0,2463.83,415.603,3.4,7.3,14.15\nxcit_small_12_p16_224,224,1024.0,2460.07,416.237,4.82,12.57,26.25\ncspresnext50,256,1024.0,2452.36,417.546,4.05,15.86,20.57\nmobilevit_s,256,1024.0,2447.35,418.395,1.86,17.03,5.58\ndarknet53,256,1024.0,2439.82,419.693,9.31,12.39,41.61\ndarknetaa53,256,1024.0,2432.07,421.03,7.97,12.39,36.02\nedgenext_small,320,1024.0,2429.25,421.516,1.97,14.16,5.59\nseresnext26t_32x4d,288,1024.0,2412.74,424.404,4.46,16.68,16.81\nsehalonet33ts,256,1024.0,2403.77,425.986,3.55,14.7,13.69\nseresnext26d_32x4d,288,1024.0,2391.16,428.231,4.51,16.85,16.81\nresnet61q,256,1024.0,2368.17,432.39,7.8,17.01,36.85\nfastvit_t12,256,1024.0,2356.34,434.562,1.42,12.42,7.55\nvit_base_r26_s32_224,224,1024.0,2354.84,434.838,6.76,11.54,101.38\nfocalnet_tiny_srf,224,1024.0,2353.35,435.113,4.42,16.32,28.43\nresnetv2_101,224,1024.0,2342.24,437.176,7.83,16.23,44.54\ncs3sedarknet_x,256,1024.0,2329.01,439.66,8.38,11.35,35.4\nnf_resnet50,256,1024.0,2318.52,441.645,5.46,14.52,25.56\nxcit_nano_12_p8_224,224,1024.0,2310.67,443.15,2.16,15.71,3.05\nresnest26d,224,1024.0,2309.28,443.418,3.64,9.97,17.07\ncoatnet_rmlp_nano_rw_224,224,1024.0,2308.34,443.598,2.51,18.21,15.15\nresnetv2_50,288,1024.0,2302.9,444.644,6.79,18.37,25.55\necaresnet50t,256,1024.0,2299.59,445.285,5.64,15.45,25.57\ngmlp_s16_224,224,1024.0,2291.16,446.925,4.42,15.1,19.42\nefficientnet_lite3,300,1024.0,2290.17,447.117,1.65,21.85,8.2\ndm_nfnet_f0,192,1024.0,2271.28,450.836,7.21,10.16,71.49\nresnet101,224,1024.0,2263.99,452.287,7.83,16.23,44.55\necaresnet26t,320,1024.0,2258.47,453.393,5.24,16.44,16.01\nedgenext_base,256,1024.0,2256.96,453.695,3.85,15.58,18.51\nefficientnetv2_s,288,1024.0,2251.36,454.825,4.75,20.13,21.46\nskresnet50,224,1024.0,2250.82,454.933,4.11,12.5,25.8\ndla102,224,1024.0,2248.24,455.455,7.19,14.18,33.27\nedgenext_small_rw,320,1024.0,2240.98,456.929,2.46,14.85,7.83\necaresnetlight,288,1024.0,2235.21,458.11,6.79,13.91,30.16\ndpn68b,288,1024.0,2234.13,458.331,3.89,17.3,12.61\ngcresnext50ts,288,1024.0,2232.45,458.676,4.75,19.57,15.67\nfastvit_s12,256,1024.0,2229.72,459.239,1.82,13.67,9.47\nfastvit_sa12,256,1024.0,2225.03,460.206,1.96,13.83,11.58\nfocalnet_tiny_lrf,224,1024.0,2222.33,460.766,4.49,17.76,28.65\nresnetv2_101d,224,1024.0,2216.51,461.976,8.07,17.04,44.56\nresnet101c,224,1024.0,2202.12,464.995,8.08,17.04,44.57\nvit_base_resnet50d_224,224,1024.0,2199.36,465.578,8.68,16.1,110.97\nregnetv_040,288,1024.0,2190.89,467.375,6.6,20.3,20.64\nvit_medium_patch16_gap_256,256,1024.0,2190.03,467.563,9.78,14.29,38.86\nresnet50,288,1024.0,2185.5,468.532,6.8,18.37,25.56\ngcresnet50t,288,1024.0,2180.99,469.5,6.86,18.57,25.9\nregnety_040,288,1024.0,2169.28,472.031,6.61,20.3,20.65\nvgg13,224,1024.0,2159.6,474.15,11.31,12.25,133.05\neva02_small_patch14_224,224,1024.0,2151.59,475.915,5.53,12.34,21.62\nvit_medium_patch16_reg4_gap_256,256,1024.0,2149.02,476.485,9.93,14.51,38.87\nefficientnetv2_rw_s,288,1024.0,2146.83,476.971,4.91,21.41,23.94\necaresnet101d_pruned,288,1024.0,2141.83,478.084,5.75,12.71,24.88\nmobilevitv2_125,256,1024.0,2139.71,478.555,2.86,20.1,7.48\nvit_medium_patch16_reg4_256,256,1024.0,2136.17,479.352,9.97,14.56,38.87\nskresnet50d,224,1024.0,2134.1,479.815,4.36,13.31,25.82\npvt_v2_b2,224,1024.0,2119.72,483.066,3.9,24.96,25.36\nhrnet_w18_ssld,224,1024.0,2114.47,484.27,4.32,16.31,21.3\nconvnextv2_pico,288,1024.0,2113.62,484.464,2.27,10.08,9.07\neva02_tiny_patch14_336,336,1024.0,2113.11,484.582,3.14,13.85,5.76\nefficientvit_l2,224,1024.0,2109.14,485.494,6.97,19.58,63.71\nhrnet_w18,224,1024.0,2100.77,487.428,4.32,16.31,21.3\nregnetx_040,224,1024.0,2099.85,487.636,3.99,12.2,22.12\ntf_efficientnet_lite3,300,1024.0,2090.5,489.823,1.65,21.85,8.2\nwide_resnet50_2,224,1024.0,2081.66,491.904,11.43,14.4,68.88\nresnet51q,288,1024.0,2069.71,494.744,8.07,20.94,35.7\npoolformer_s24,224,1024.0,2067.46,495.278,3.41,10.68,21.39\nsebotnet33ts_256,256,512.0,2066.45,247.758,3.89,17.46,13.7\nefficientformer_l3,224,1024.0,2064.62,495.963,3.93,12.01,31.41\nresnest50d_1s4x24d,224,1024.0,2057.55,497.667,4.43,13.57,25.68\ngcvit_xtiny,224,1024.0,2053.45,498.662,2.93,20.26,19.98\ncspdarknet53,256,1024.0,2048.51,499.863,6.57,16.81,27.64\ncrossvit_18_240,240,1024.0,2029.53,504.539,8.21,16.14,43.27\nmixnet_xl,224,1024.0,2029.05,504.653,0.93,14.57,11.9\nvit_base_patch32_384,384,1024.0,2028.15,504.881,12.67,12.14,88.3\nefficientnet_b3,288,1024.0,2027.72,504.989,1.63,21.49,12.23\nvit_base_patch32_clip_384,384,1024.0,2026.31,505.34,12.67,12.14,88.3\nresnet50t,288,1024.0,2024.16,505.879,7.14,19.53,25.57\ndla102x,224,1024.0,2023.35,506.08,5.89,19.42,26.31\nlegacy_seresnet101,224,1024.0,2012.58,508.788,7.61,15.74,49.33\nresnet50d,288,1024.0,2012.14,508.9,7.19,19.7,25.58\ncs3edgenet_x,256,1024.0,2002.36,511.384,11.53,12.92,47.82\nresnetaa101d,224,1024.0,1994.67,513.346,9.12,17.56,44.57\nrepvgg_b1,224,1024.0,1994.42,513.418,13.16,10.64,57.42\nres2net50_26w_6s,224,1024.0,1979.48,517.295,6.33,15.28,37.05\nregnetz_d32,256,1024.0,1978.14,517.642,5.98,23.74,27.58\ncs3sedarknet_xdw,256,1024.0,1970.5,519.653,5.97,17.18,21.6\nresnetaa50,288,1024.0,1968.61,520.152,8.52,19.24,25.56\nseresnet101,224,1024.0,1966.15,520.803,7.84,16.27,49.33\nresnet101s,224,1024.0,1964.56,521.226,9.19,18.64,44.67\ncs3darknet_x,288,1024.0,1958.87,522.739,10.6,14.36,35.05\ncrossvit_18_dagger_240,240,1024.0,1955.55,523.625,8.65,16.91,44.27\nswin_tiny_patch4_window7_224,224,1024.0,1951.67,524.668,4.51,17.06,28.29\ntresnet_v2_l,224,1024.0,1947.69,525.738,8.85,16.34,46.17\nese_vovnet39b,288,1024.0,1941.03,527.543,11.71,11.13,24.57\nregnetz_d8,256,1024.0,1940.13,527.785,3.97,23.74,23.37\ntf_efficientnetv2_s,300,1024.0,1939.51,527.958,5.35,22.73,21.46\nregnetz_c16,320,1024.0,1933.29,529.65,3.92,25.88,13.46\ncoatnet_bn_0_rw_224,224,1024.0,1926.49,531.525,4.48,18.41,27.44\ndarknet53,288,1024.0,1924.44,532.092,11.78,15.68,41.61\nresnext101_32x4d,224,1024.0,1923.83,532.261,8.01,21.23,44.18\ncoatnet_rmlp_0_rw_224,224,1024.0,1920.22,533.259,4.52,21.26,27.45\nxcit_tiny_12_p16_384,384,1024.0,1917.57,533.997,3.64,18.25,6.72\ndarknetaa53,288,1024.0,1915.93,534.454,10.08,15.68,36.02\nmobileone_s4,224,1024.0,1915.84,534.474,3.04,17.74,14.95\nmaxxvit_rmlp_nano_rw_256,256,768.0,1913.61,401.326,4.17,21.53,16.78\nnest_tiny,224,1024.0,1909.31,536.303,5.24,14.75,17.06\nregnetz_040,256,1024.0,1906.99,536.946,4.06,24.19,27.12\nnf_regnet_b4,320,1024.0,1906.99,536.957,3.29,19.88,30.21\nseresnet50,288,1024.0,1902.22,538.306,6.8,18.39,28.09\npvt_v2_b2_li,224,1024.0,1897.86,539.539,3.77,25.04,22.55\nregnetz_040_h,256,1024.0,1896.27,539.981,4.12,24.29,28.94\ndensenet201,224,1024.0,1895.14,540.319,4.34,7.85,20.01\nhalonet50ts,256,1024.0,1887.53,542.495,5.3,19.2,22.73\nnest_tiny_jx,224,1024.0,1885.06,543.199,5.24,14.75,17.06\nvgg13_bn,224,1024.0,1884.94,543.241,11.33,12.25,133.05\nregnetx_080,224,1024.0,1883.47,543.661,8.02,14.06,39.57\nvit_large_patch32_224,224,1024.0,1882.39,543.977,15.27,11.11,305.51\necaresnet101d,224,1024.0,1880.92,544.404,8.08,17.07,44.57\nresnet61q,288,1024.0,1874.14,546.373,9.87,21.52,36.85\nnf_resnet101,224,1024.0,1864.42,549.218,8.01,16.23,44.55\ncs3se_edgenet_x,256,1024.0,1859.86,550.568,11.53,12.94,50.72\nrepvit_m2_3,224,1024.0,1852.95,552.61,4.57,26.21,23.69\nresmlp_36_224,224,1024.0,1843.66,555.406,8.91,16.33,44.69\ncs3sedarknet_x,288,1024.0,1843.16,555.556,10.6,14.37,35.4\nresnext50_32x4d,288,1024.0,1841.23,556.139,7.04,23.81,25.03\nconvnext_small,224,1024.0,1838.66,556.915,8.71,21.56,50.22\nconvnext_tiny,288,1024.0,1835.18,557.972,7.39,22.21,28.59\nresnetv2_50d_gn,224,1024.0,1829.29,559.767,4.38,11.92,25.57\nresnetaa50d,288,1024.0,1827.2,560.408,8.92,20.57,25.58\npit_b_224,224,1024.0,1823.77,561.458,10.56,16.6,73.76\neca_nfnet_l0,288,1024.0,1822.69,561.796,7.12,17.29,24.14\nnfnet_l0,288,1024.0,1817.7,563.332,7.13,17.29,35.07\nsequencer2d_s,224,1024.0,1816.41,563.738,4.96,11.31,27.65\npit_b_distilled_224,224,1024.0,1810.4,565.6,10.63,16.67,74.79\nnf_resnet50,288,1024.0,1794.38,570.655,6.88,18.37,25.56\ntwins_pcpvt_base,224,1024.0,1790.37,571.935,6.46,21.35,43.83\nrexnetr_200,288,768.0,1782.92,430.745,2.62,24.96,16.52\nseresnet50t,288,1024.0,1780.59,575.079,7.14,19.55,28.1\ncait_xxs24_224,224,1024.0,1779.24,575.513,2.53,20.29,11.96\nswin_s3_tiny_224,224,1024.0,1777.31,576.139,4.64,19.13,28.33\nresnet50_gn,224,1024.0,1776.88,576.279,4.14,11.11,25.56\necaresnet50d,288,1024.0,1775.84,576.616,7.19,19.72,25.58\nresnetblur101d,224,1024.0,1765.86,579.878,9.12,17.94,44.57\ndensenet121,288,1024.0,1761.12,581.437,4.74,11.41,7.98\ncoat_lite_small,224,1024.0,1760.12,581.767,3.96,22.09,19.84\nmixer_b16_224,224,1024.0,1758.48,582.299,12.62,14.53,59.88\nmobilevitv2_150,256,768.0,1748.31,439.266,4.09,24.11,10.59\nefficientvit_b3,224,1024.0,1742.56,587.628,3.99,26.9,48.65\nrexnetr_300,224,1024.0,1736.82,589.571,3.39,22.16,34.81\nvgg16,224,1024.0,1730.88,591.595,15.47,13.56,138.36\nmaxxvitv2_nano_rw_256,256,768.0,1724.32,445.384,6.12,19.66,23.7\nres2net101_26w_4s,224,1024.0,1723.01,594.296,8.1,18.45,45.21\nresnext50d_32x4d,288,1024.0,1717.01,596.374,7.44,25.13,25.05\nmaxvit_nano_rw_256,256,768.0,1709.05,449.363,4.26,25.76,15.45\nlegacy_seresnext101_32x4d,224,1024.0,1707.02,599.865,8.02,21.26,48.96\nseresnext101_32x4d,224,1024.0,1706.74,599.963,8.02,21.26,48.96\nmaxvit_rmlp_nano_rw_256,256,768.0,1705.93,450.183,4.28,27.4,15.5\nresnetv2_50d_frn,224,1024.0,1703.71,601.028,4.33,11.92,25.59\nmobilevitv2_175,256,512.0,1701.95,300.817,5.54,28.13,14.25\ntf_efficientnet_b3,300,1024.0,1694.25,604.385,1.87,23.83,12.23\nconvnext_tiny_hnf,288,1024.0,1681.52,608.96,7.39,22.21,28.59\nese_vovnet39b_evos,224,1024.0,1671.22,612.716,7.07,6.74,24.58\nres2net50_26w_8s,224,1024.0,1656.9,618.009,8.37,17.95,48.4\nresnet101d,256,1024.0,1654.59,618.871,10.55,22.25,44.57\ntresnet_l,224,1024.0,1652.13,619.794,10.9,11.9,55.99\nres2net101d,224,1024.0,1652.09,619.808,8.35,19.25,45.23\nmixer_l32_224,224,1024.0,1651.22,620.129,11.27,19.86,206.94\nregnetz_b16_evos,224,1024.0,1648.87,621.016,1.43,9.95,9.74\nbotnet50ts_256,256,512.0,1645.51,311.14,5.54,22.23,22.74\nefficientnet_b3,320,1024.0,1641.76,623.708,2.01,26.52,12.23\nseresnext50_32x4d,288,1024.0,1638.34,625.012,7.04,23.82,27.56\ncoatnet_0_224,224,512.0,1634.58,313.22,4.43,21.14,25.04\nswinv2_cr_tiny_224,224,1024.0,1629.27,628.491,4.66,28.45,28.33\ninception_next_small,224,1024.0,1628.58,628.755,8.36,19.27,49.37\nresnetv2_152,224,1024.0,1628.46,628.801,11.55,22.56,60.19\nregnetx_064,224,1024.0,1628.2,628.898,6.49,16.37,26.21\nhrnet_w32,224,1024.0,1627.55,629.157,8.97,22.02,41.23\nconvnextv2_tiny,224,1024.0,1627.26,629.266,4.47,13.44,28.64\nseresnetaa50d,288,1024.0,1622.33,631.178,8.92,20.59,28.11\ndavit_small,224,1024.0,1614.32,634.313,8.69,27.54,49.75\nregnety_040_sgn,224,1024.0,1612.57,634.996,4.03,12.29,20.65\nlegacy_xception,299,768.0,1604.43,478.663,8.4,35.83,22.86\nswinv2_cr_tiny_ns_224,224,1024.0,1600.49,639.793,4.66,28.45,28.33\nresnetblur50,288,1024.0,1598.7,640.511,8.52,19.87,25.56\nefficientnet_el,300,1024.0,1595.26,641.889,8.0,30.7,10.59\nefficientnet_el_pruned,300,1024.0,1592.53,642.988,8.0,30.7,10.59\nresnet152,224,1024.0,1589.58,644.183,11.56,22.56,60.19\ndeit_base_patch16_224,224,1024.0,1581.19,647.603,16.87,16.49,86.57\ncs3edgenet_x,288,1024.0,1577.26,649.216,14.59,16.36,47.82\ndeit_base_distilled_patch16_224,224,1024.0,1575.74,649.842,16.95,16.58,87.34\nvit_base_patch16_224,224,1024.0,1574.94,650.173,16.87,16.49,86.57\nvit_base_patch16_224_miil,224,1024.0,1574.63,650.301,16.88,16.5,94.4\nvit_base_patch16_clip_224,224,1024.0,1574.46,650.371,16.87,16.49,86.57\nvit_base_patch16_siglip_224,224,1024.0,1571.54,651.577,17.02,16.71,92.88\nresnetv2_152d,224,1024.0,1564.52,654.501,11.8,23.36,60.2\nvit_base_patch16_gap_224,224,1024.0,1563.13,655.085,16.78,16.41,86.57\nhalo2botnet50ts_256,256,1024.0,1562.09,655.52,5.02,21.78,22.64\nresnet152c,224,1024.0,1558.11,657.195,11.8,23.36,60.21\nese_vovnet99b,224,1024.0,1554.99,658.512,16.51,11.27,63.2\nvit_small_resnet50d_s16_224,224,1024.0,1551.97,659.792,13.0,21.12,57.53\nnf_seresnet101,224,1024.0,1549.92,660.662,8.02,16.27,49.33\nnf_ecaresnet101,224,1024.0,1549.88,660.683,8.01,16.27,44.55\ntf_efficientnet_el,300,1024.0,1543.58,663.384,8.0,30.7,10.59\ncoatnet_rmlp_1_rw_224,224,1024.0,1542.97,663.643,7.44,28.08,41.69\nnfnet_f0,256,1024.0,1541.8,664.144,12.62,18.05,71.49\nvgg16_bn,224,1024.0,1533.25,667.85,15.5,13.56,138.37\nresnest50d,224,1024.0,1530.42,669.084,5.4,14.36,27.48\ncaformer_s18,224,1024.0,1528.28,670.023,3.9,15.18,26.34\npvt_v2_b3,224,1024.0,1527.57,670.328,6.71,33.8,45.24\ndensenetblur121d,288,1024.0,1521.38,673.062,5.14,13.06,8.0\nmaxvit_tiny_rw_224,224,768.0,1520.98,504.928,4.93,28.54,29.06\nmvitv2_tiny,224,1024.0,1518.09,674.509,4.7,21.16,24.17\nvit_base_patch16_rpn_224,224,1024.0,1516.7,675.134,16.78,16.41,86.54\nconvnextv2_nano,288,768.0,1514.74,507.006,4.06,13.84,15.62\nregnety_032,288,1024.0,1514.59,676.077,5.29,18.61,19.44\nrexnet_300,224,1024.0,1508.74,678.701,3.44,22.4,34.71\nresnetblur50d,288,1024.0,1506.45,679.732,8.92,21.19,25.58\ndeit3_base_patch16_224,224,1024.0,1497.14,683.959,16.87,16.49,86.59\nconvit_small,224,1024.0,1494.54,685.148,5.76,17.87,27.78\nvit_base_patch32_clip_448,448,1024.0,1493.83,685.476,17.21,16.49,88.34\ndla169,224,1024.0,1487.25,688.504,11.6,20.2,53.39\nskresnext50_32x4d,224,1024.0,1470.99,696.12,4.5,17.18,27.48\nxcit_tiny_12_p8_224,224,1024.0,1465.13,698.903,4.81,23.6,6.71\nvit_small_patch16_36x1_224,224,1024.0,1460.65,701.044,12.63,24.59,64.67\necaresnet50t,320,1024.0,1451.46,705.484,8.82,24.13,25.57\nbeitv2_base_patch16_224,224,1024.0,1448.02,707.161,16.87,16.49,86.53\nvgg19,224,1024.0,1441.93,710.149,19.63,14.86,143.67\nbeit_base_patch16_224,224,1024.0,1440.48,710.862,16.87,16.49,86.53\nhrnet_w30,224,1024.0,1436.17,712.996,8.15,21.21,37.71\nedgenext_base,320,1024.0,1435.98,713.087,6.01,24.32,18.51\nresnet152s,224,1024.0,1434.4,713.876,12.92,24.96,60.32\nconvformer_s18,224,1024.0,1427.19,717.481,3.96,15.82,26.77\nresnetv2_50d_evos,224,1024.0,1426.57,717.793,4.33,11.92,25.59\nfocalnet_small_srf,224,1024.0,1426.35,717.904,8.62,26.26,49.89\nsequencer2d_m,224,1024.0,1413.9,724.228,6.55,14.26,38.31\nvit_relpos_base_patch16_rpn_224,224,1024.0,1408.36,727.069,16.8,17.63,86.41\nvolo_d1_224,224,1024.0,1407.83,727.348,6.94,24.43,26.63\nregnety_080,224,1024.0,1407.5,727.512,8.0,17.97,39.18\nvit_small_patch16_18x2_224,224,1024.0,1407.09,727.729,12.63,24.59,64.67\ngcvit_tiny,224,1024.0,1405.32,728.65,4.79,29.82,28.22\ndpn92,224,1024.0,1404.08,729.292,6.54,18.21,37.67\nvit_relpos_base_patch16_224,224,1024.0,1402.98,729.864,16.8,17.63,86.43\nresnetv2_101,288,1024.0,1402.28,730.227,12.94,26.83,44.54\nregnetx_160,224,1024.0,1400.84,730.974,15.99,25.52,54.28\ndla102x2,224,1024.0,1395.12,733.975,9.34,29.91,41.28\nlegacy_seresnet152,224,1024.0,1394.86,734.109,11.33,22.08,66.82\nvit_relpos_base_patch16_clsgap_224,224,1024.0,1394.83,734.131,16.88,17.72,86.43\nvit_relpos_base_patch16_cls_224,224,1024.0,1392.12,735.556,16.88,17.72,86.43\nvit_small_patch16_384,384,1024.0,1390.73,736.291,12.45,24.15,22.2\npoolformer_s36,224,1024.0,1388.46,737.493,5.0,15.82,30.86\nvit_base_patch16_clip_quickgelu_224,224,1024.0,1388.13,737.672,16.87,16.49,86.19\ndensenet161,224,1024.0,1384.23,739.75,7.79,11.06,28.68\nflexivit_base,240,1024.0,1380.45,741.777,19.35,18.92,86.59\nefficientformerv2_s0,224,1024.0,1377.72,743.244,0.41,5.3,3.6\nseresnet152,224,1024.0,1371.27,746.737,11.57,22.61,66.82\npoolformerv2_s24,224,1024.0,1356.43,754.905,3.42,10.68,21.34\nresnet101,288,1024.0,1354.29,756.102,12.95,26.83,44.55\nfocalnet_small_lrf,224,1024.0,1339.63,764.378,8.74,28.61,50.34\ninception_v4,299,1024.0,1338.22,765.183,12.28,15.09,42.68\nrepvgg_b2,224,1024.0,1336.97,765.895,20.45,12.9,89.02\nnf_regnet_b4,384,1024.0,1327.28,771.488,4.7,28.61,30.21\nrepvgg_b2g4,224,1024.0,1323.55,773.658,12.63,12.9,61.76\neca_nfnet_l1,256,1024.0,1319.97,775.763,9.62,22.04,41.41\nfastvit_sa24,256,1024.0,1310.4,781.428,3.79,23.92,21.55\nxcit_small_24_p16_224,224,1024.0,1307.21,783.335,9.1,23.63,47.67\ntwins_pcpvt_large,224,1024.0,1303.57,785.524,9.53,30.21,60.99\nvit_base_patch16_xp_224,224,1024.0,1302.82,785.975,16.85,16.49,86.51\nmaxvit_tiny_tf_224,224,768.0,1301.05,590.28,5.42,31.21,30.92\ndeit3_small_patch16_384,384,1024.0,1298.34,788.686,12.45,24.15,22.21\ncoatnet_rmlp_1_rw2_224,224,1024.0,1296.36,789.892,7.71,32.74,41.72\ncoatnet_1_rw_224,224,1024.0,1295.8,790.234,7.63,27.22,41.72\nregnety_080_tv,224,1024.0,1291.63,792.778,8.51,19.73,39.38\nvgg19_bn,224,1024.0,1290.82,793.286,19.66,14.86,143.68\nmixnet_xxl,224,768.0,1286.88,596.774,2.04,23.43,23.96\ndm_nfnet_f0,256,1024.0,1286.75,795.79,12.62,18.05,71.49\nefficientnet_b4,320,768.0,1280.17,599.91,3.13,34.76,19.34\nhrnet_w18_ssld,288,1024.0,1279.49,800.308,7.14,26.96,21.3\nmaxxvit_rmlp_tiny_rw_256,256,768.0,1274.84,602.417,6.36,32.69,29.64\nefficientformerv2_s1,224,1024.0,1271.59,805.28,0.67,7.66,6.19\nconvnext_base,224,1024.0,1268.86,807.011,15.38,28.75,88.59\nmobilevitv2_200,256,512.0,1268.57,403.59,7.22,32.15,18.45\nregnetz_d32,320,1024.0,1265.97,808.844,9.33,37.08,27.58\nefficientnetv2_s,384,1024.0,1265.12,809.401,8.44,35.77,21.46\ntwins_svt_base,224,1024.0,1261.93,811.442,8.36,20.42,56.07\nwide_resnet50_2,288,1024.0,1242.89,823.878,18.89,23.81,68.88\nregnetz_d8,320,1024.0,1242.36,824.221,6.19,37.08,23.37\nregnetz_040,320,512.0,1238.82,413.274,6.35,37.78,27.12\nregnetz_040_h,320,512.0,1231.07,415.879,6.43,37.94,28.94\nnest_small,224,1024.0,1230.37,832.252,9.41,22.88,38.35\ntf_efficientnetv2_s,384,1024.0,1224.58,836.191,8.44,35.77,21.46\nnest_small_jx,224,1024.0,1220.76,838.798,9.41,22.88,38.35\nmaxvit_tiny_rw_256,256,768.0,1213.37,632.937,6.44,37.27,29.07\nmaxvit_rmlp_tiny_rw_256,256,768.0,1210.44,634.468,6.47,39.84,29.15\nvit_base_patch16_siglip_256,256,1024.0,1208.23,847.511,22.23,21.83,92.93\nefficientnetv2_rw_s,384,1024.0,1208.22,847.514,8.72,38.03,23.94\nresnetaa101d,288,1024.0,1207.75,847.844,15.07,29.03,44.57\nswin_small_patch4_window7_224,224,1024.0,1206.81,848.507,8.77,27.47,49.61\ndpn98,224,1024.0,1206.02,849.061,11.73,25.2,61.57\nswinv2_tiny_window8_256,256,1024.0,1197.34,855.217,5.96,24.57,28.35\ncs3se_edgenet_x,320,1024.0,1196.49,855.827,18.01,20.21,50.72\nresnext101_64x4d,224,1024.0,1196.17,856.053,15.52,31.21,83.46\ncait_xxs36_224,224,1024.0,1193.04,858.302,3.77,30.34,17.3\nresnext101_32x8d,224,1024.0,1188.06,861.896,16.48,31.21,88.79\nseresnet101,288,1024.0,1178.9,868.597,12.95,26.87,49.33\nresnet152d,256,1024.0,1177.58,869.569,15.41,30.51,60.21\nwide_resnet101_2,224,1024.0,1172.43,873.387,22.8,21.23,126.89\ncrossvit_base_240,240,1024.0,1171.25,874.269,20.13,22.67,105.03\nresnet200,224,1024.0,1159.72,882.961,15.07,32.19,64.67\ninception_resnet_v2,299,1024.0,1156.1,885.722,13.18,25.06,55.84\nrexnetr_300,288,512.0,1153.3,443.932,5.59,36.61,34.81\nresnetrs101,288,1024.0,1142.76,896.066,13.56,28.53,63.62\ndavit_base,224,1024.0,1141.57,896.996,15.36,36.72,87.95\ntresnet_xl,224,1024.0,1136.08,901.333,15.2,15.34,78.44\ncoat_tiny,224,1024.0,1135.01,902.184,4.35,27.2,5.5\ntnt_s_patch16_224,224,1024.0,1134.91,902.262,5.24,24.37,23.76\nmvitv2_small,224,1024.0,1131.08,905.308,7.0,28.08,34.87\necaresnet101d,288,1024.0,1130.54,905.749,13.35,28.19,44.57\nvit_base_patch16_reg8_gap_256,256,1024.0,1124.62,910.517,22.6,22.09,86.62\nmaxvit_tiny_pm_256,256,768.0,1121.86,684.565,6.31,40.82,30.09\nhrnet_w40,224,1024.0,1119.9,914.356,12.75,25.29,57.56\nconvnext_small,288,1024.0,1119.4,914.761,14.39,35.65,50.22\nnfnet_f1,224,1024.0,1117.42,916.384,17.87,22.94,132.63\nefficientnet_lite4,380,768.0,1117.23,687.403,4.04,45.66,13.01\npvt_v2_b4,224,1024.0,1107.81,924.328,9.83,48.14,62.56\nseresnext101_64x4d,224,1024.0,1107.71,924.416,15.53,31.25,88.23\nseresnext101_32x8d,224,1024.0,1101.53,929.602,16.48,31.25,93.57\nresnetv2_50d_gn,288,1024.0,1100.54,930.437,7.24,19.7,25.57\ncoatnet_1_224,224,512.0,1098.68,466.003,8.28,31.3,42.23\nrepvgg_b3g4,224,1024.0,1097.61,932.923,17.89,15.1,83.83\nsamvit_base_patch16_224,224,1024.0,1097.38,933.118,16.83,17.2,86.46\neva02_base_patch16_clip_224,224,1024.0,1094.75,935.361,16.9,18.91,86.26\nmvitv2_small_cls,224,1024.0,1086.56,942.407,7.04,28.17,34.87\nvit_large_r50_s32_224,224,1024.0,1082.13,946.268,19.45,22.22,328.99\ninception_next_base,224,1024.0,1079.66,948.435,14.85,25.69,86.67\nresnet50_gn,288,1024.0,1076.3,951.4,6.85,18.37,25.56\npvt_v2_b5,224,1024.0,1073.94,953.474,11.39,44.23,81.96\nseresnext101d_32x8d,224,1024.0,1071.41,955.74,16.72,32.05,93.59\nefficientnetv2_m,320,1024.0,1070.2,956.818,11.01,39.97,54.14\nvit_small_r26_s32_384,384,1024.0,1066.07,960.526,10.24,27.67,36.47\nresnetblur101d,288,1024.0,1059.66,966.334,15.07,29.65,44.57\nresnet101d,320,1024.0,1045.1,979.801,16.48,34.77,44.57\nregnetz_e8,256,1024.0,1042.94,981.82,9.91,40.94,57.7\ntf_efficientnet_lite4,380,768.0,1038.99,739.169,4.04,45.66,13.01\nxception41p,299,768.0,1034.81,742.157,9.25,39.86,26.91\nrepvgg_b3,224,1024.0,1031.23,992.974,29.16,15.1,123.09\nxcit_tiny_24_p16_384,384,1024.0,1026.84,997.227,6.87,34.29,12.12\nresnetrs152,256,1024.0,1024.28,999.711,15.59,30.83,86.62\nseresnet152d,256,1024.0,1022.13,1001.814,15.42,30.56,66.84\nswinv2_cr_small_224,224,1024.0,1005.65,1018.232,9.07,50.27,49.7\nvit_base_patch16_plus_240,240,1024.0,1004.91,1018.982,26.31,22.07,117.56\nregnetz_b16_evos,288,768.0,997.65,769.796,2.36,16.43,9.74\nfocalnet_base_srf,224,1024.0,995.12,1029.007,15.28,35.01,88.15\nswinv2_cr_small_ns_224,224,1024.0,993.65,1030.528,9.08,50.27,49.7\nconvnextv2_small,224,1024.0,992.07,1032.17,8.71,21.56,50.32\nconvnextv2_tiny,288,768.0,989.58,776.074,7.39,22.21,28.64\nvit_small_patch8_224,224,1024.0,985.02,1039.56,16.76,32.86,21.67\nregnety_040_sgn,288,1024.0,979.5,1045.407,6.67,20.3,20.65\nregnetz_c16_evos,256,768.0,978.11,785.174,2.48,16.57,13.49\nvit_base_r50_s16_224,224,1024.0,971.42,1054.108,20.94,27.88,97.89\nhrnet_w44,224,1024.0,967.41,1058.48,14.94,26.92,67.06\nefficientformer_l7,224,1024.0,966.26,1059.742,10.17,24.45,82.23\nhrnet_w48_ssld,224,1024.0,963.59,1062.678,17.34,28.56,77.47\nhrnet_w48,224,1024.0,962.72,1063.645,17.34,28.56,77.47\npoolformer_m36,224,1024.0,959.97,1066.674,8.8,22.02,56.17\nresnet152,288,1024.0,955.06,1072.17,19.11,37.28,60.19\ncait_s24_224,224,1024.0,951.69,1075.97,9.35,40.58,46.92\ntiny_vit_21m_384,384,512.0,946.04,541.193,11.94,46.84,21.23\nfocalnet_base_lrf,224,1024.0,946.02,1082.418,15.43,38.13,88.75\ndm_nfnet_f1,224,1024.0,943.8,1084.958,17.87,22.94,132.63\nefficientnet_b3_gn,288,512.0,943.58,542.602,1.74,23.35,11.73\nefficientnetv2_rw_m,320,1024.0,934.42,1095.856,12.72,47.14,53.24\nvit_relpos_base_patch16_plus_240,240,1024.0,933.99,1096.357,26.21,23.41,117.38\ngmlp_b16_224,224,1024.0,931.13,1099.724,15.78,30.21,73.08\nfastvit_sa36,256,1024.0,928.53,1102.809,5.62,34.02,31.53\nxception41,299,768.0,927.7,827.842,9.28,39.86,26.97\neva02_small_patch14_336,336,1024.0,926.94,1104.696,12.41,27.7,22.13\nmaxvit_rmlp_small_rw_224,224,768.0,923.72,831.408,10.48,42.44,64.9\nsequencer2d_l,224,1024.0,917.56,1115.991,9.74,22.12,54.3\npoolformerv2_s36,224,1024.0,914.51,1119.704,5.01,15.82,30.79\nxcit_medium_24_p16_224,224,1024.0,901.57,1135.786,16.13,31.71,84.4\ncoat_mini,224,1024.0,900.78,1136.787,6.82,33.68,10.34\ncoat_lite_medium,224,1024.0,898.48,1139.693,9.81,40.06,44.57\nswin_s3_small_224,224,768.0,882.63,870.118,9.43,37.84,49.74\nefficientnet_b3_g8_gn,288,512.0,882.63,580.072,2.59,23.35,14.25\ndpn131,224,1024.0,878.67,1165.389,16.09,32.97,79.25\nlevit_384_s8,224,512.0,874.93,585.181,9.98,35.86,39.12\nefficientnet_b4,384,512.0,874.47,585.489,4.51,50.04,19.34\nvit_medium_patch16_gap_384,384,1024.0,873.17,1172.722,22.01,32.15,39.03\nnest_base,224,1024.0,871.22,1175.339,16.71,30.51,67.72\nnf_regnet_b5,384,1024.0,867.94,1179.793,7.95,42.9,49.74\nresnet200d,256,1024.0,866.43,1181.848,20.0,43.09,64.69\nmaxvit_small_tf_224,224,512.0,864.97,591.915,11.39,46.31,68.93\nnest_base_jx,224,1024.0,863.51,1185.835,16.71,30.51,67.72\nxcit_small_12_p16_384,384,1024.0,860.6,1189.852,14.14,36.5,26.25\nresnetv2_50d_evos,288,1024.0,857.98,1193.488,7.15,19.7,25.59\nswin_base_patch4_window7_224,224,1024.0,857.23,1194.527,15.47,36.63,87.77\ngcvit_small,224,1024.0,850.2,1204.416,8.57,41.61,51.09\ncrossvit_15_dagger_408,408,1024.0,849.94,1204.779,16.07,37.0,28.5\neca_nfnet_l1,320,1024.0,845.79,1210.693,14.92,34.42,41.41\ntf_efficientnet_b4,380,512.0,836.31,612.204,4.49,49.49,19.34\nregnety_080,288,1024.0,834.08,1227.682,13.22,29.69,39.18\nlevit_conv_384_s8,224,512.0,831.47,615.767,9.98,35.86,39.12\ntwins_svt_large,224,1024.0,829.67,1234.208,14.84,27.23,99.27\nseresnet152,288,1024.0,826.68,1238.676,19.11,37.34,66.82\nxception65p,299,768.0,826.46,929.251,13.91,52.48,39.82\neva02_base_patch14_224,224,1024.0,822.18,1245.459,22.0,24.67,85.76\ncaformer_s36,224,1024.0,811.28,1262.182,7.55,29.29,39.3\nmaxxvit_rmlp_small_rw_256,256,768.0,805.75,953.134,14.21,47.76,66.01\ncoatnet_2_rw_224,224,512.0,802.77,637.783,14.55,39.37,73.87\nswinv2_base_window12_192,192,1024.0,801.77,1277.157,11.9,39.72,109.28\nmvitv2_base,224,1024.0,789.29,1297.348,10.16,40.5,51.47\ndensenet264d,224,1024.0,784.72,1304.914,13.57,14.0,72.74\nresnest50d_4s2x40d,224,1024.0,782.94,1307.879,4.4,17.94,30.42\nswinv2_tiny_window16_256,256,512.0,779.51,656.811,6.68,39.02,28.35\nvolo_d2_224,224,1024.0,778.59,1315.191,14.34,41.34,58.68\ndpn107,224,1024.0,773.9,1323.149,18.38,33.46,86.92\nxcit_tiny_24_p8_224,224,1024.0,770.47,1329.042,9.21,45.38,12.11\nconvnext_base,288,1024.0,769.28,1331.103,25.43,47.53,88.59\ncoatnet_rmlp_2_rw_224,224,512.0,762.93,671.09,14.64,44.94,73.88\nmvitv2_base_cls,224,1024.0,760.58,1346.32,10.23,40.65,65.44\nconvit_base,224,1024.0,757.3,1352.149,17.52,31.77,86.54\nconvformer_s36,224,1024.0,757.3,1352.161,7.67,30.5,40.01\ncoatnet_2_224,224,384.0,753.79,509.418,15.94,42.41,74.68\nhrnet_w64,224,1024.0,748.82,1367.478,28.97,35.09,128.06\nresnet152d,320,1024.0,747.67,1369.57,24.08,47.67,60.21\necaresnet200d,256,1024.0,744.16,1376.037,20.0,43.15,64.69\nseresnet200d,256,1024.0,743.64,1376.992,20.01,43.15,71.86\nresnetrs200,256,1024.0,743.56,1377.137,20.18,43.42,93.21\nswinv2_small_window8_256,256,1024.0,740.78,1382.313,11.58,40.14,49.73\nxception65,299,768.0,738.05,1040.572,13.96,52.48,39.92\nfastvit_ma36,256,1024.0,734.46,1394.207,7.85,40.39,44.07\nswinv2_cr_small_ns_256,256,1024.0,733.6,1395.843,12.07,76.21,49.7\nsenet154,224,1024.0,731.81,1399.262,20.77,38.69,115.09\nmaxvit_rmlp_small_rw_256,256,768.0,731.54,1049.835,13.69,55.48,64.9\nlegacy_senet154,224,1024.0,730.99,1400.828,20.77,38.69,115.09\ntf_efficientnetv2_m,384,1024.0,728.54,1405.529,15.85,57.52,54.14\nxcit_nano_12_p8_384,384,1024.0,723.54,1415.249,6.34,46.06,3.05\npoolformer_m48,224,1024.0,722.45,1417.374,11.59,29.17,73.47\ntnt_b_patch16_224,224,1024.0,722.04,1418.187,14.09,39.01,65.41\nefficientvit_l3,224,1024.0,720.55,1421.127,27.62,39.16,246.04\nswinv2_cr_base_224,224,1024.0,719.69,1422.825,15.86,59.66,87.88\nefficientnet_b3_g8_gn,320,512.0,718.69,712.395,3.2,28.83,14.25\nresnest101e,256,1024.0,718.12,1425.925,13.38,28.66,48.28\nswin_s3_base_224,224,1024.0,717.57,1427.034,13.69,48.26,71.13\nresnext101_64x4d,288,1024.0,717.4,1427.37,25.66,51.59,83.46\nswinv2_cr_base_ns_224,224,1024.0,713.5,1435.162,15.86,59.66,87.88\nconvnextv2_base,224,768.0,711.23,1079.807,15.38,28.75,88.72\nresnet200,288,1024.0,697.53,1468.023,24.91,53.21,64.67\nefficientnet_b3_gn,320,512.0,695.5,736.148,2.14,28.83,11.73\ncoat_small,224,1024.0,694.03,1475.431,12.61,44.25,21.69\nconvnext_large,224,1024.0,690.43,1483.117,34.4,43.13,197.77\nregnetz_e8,320,1024.0,670.8,1526.503,15.46,63.94,57.7\nefficientformerv2_s2,224,1024.0,670.26,1527.748,1.27,11.77,12.71\nseresnext101_32x8d,288,1024.0,656.14,1560.626,27.24,51.63,93.57\nresnetrs152,320,1024.0,655.8,1561.431,24.34,48.14,86.62\nxcit_small_12_p8_224,224,1024.0,655.5,1562.148,18.69,47.19,26.21\nmaxxvitv2_rmlp_base_rw_224,224,768.0,651.85,1178.173,23.88,54.39,116.09\nseresnet152d,320,1024.0,649.85,1575.74,24.09,47.72,66.84\nvit_large_patch32_384,384,1024.0,647.57,1581.281,44.28,32.22,306.63\npoolformerv2_m36,224,1024.0,646.73,1583.338,8.81,22.02,56.08\nresnext101_32x16d,224,1024.0,641.29,1596.767,36.27,51.18,194.03\nseresnext101d_32x8d,288,1024.0,639.61,1600.97,27.64,52.95,93.59\nregnetz_d8_evos,256,1024.0,638.02,1604.938,4.5,24.92,23.46\ndavit_large,224,1024.0,634.07,1614.963,34.37,55.08,196.81\nefficientnetv2_m,416,1024.0,633.12,1617.367,18.6,67.5,54.14\nregnety_064,224,1024.0,632.1,1619.968,6.39,16.41,30.58\nregnetv_064,224,1024.0,629.87,1625.704,6.39,16.41,30.58\nregnetz_c16_evos,320,512.0,622.61,822.333,3.86,25.88,13.49\ngcvit_base,224,1024.0,620.94,1649.111,14.87,55.48,90.32\nnf_regnet_b5,456,512.0,602.97,849.111,11.7,61.95,49.74\nseresnextaa101d_32x8d,288,1024.0,601.98,1701.035,28.51,56.44,93.59\nxception71,299,768.0,600.76,1278.366,18.09,69.92,42.34\neca_nfnet_l2,320,1024.0,593.89,1724.216,20.95,47.43,56.72\nnfnet_f2,256,1024.0,593.31,1725.904,33.76,41.85,193.78\ncrossvit_18_dagger_408,408,1024.0,585.92,1747.666,25.31,49.38,44.61\nhrnet_w48_ssld,288,1024.0,585.32,1749.444,28.66,47.21,77.47\necaresnet200d,288,1024.0,584.36,1752.321,25.31,54.59,64.69\nseresnet200d,288,1024.0,583.25,1755.672,25.32,54.6,71.86\ncaformer_m36,224,1024.0,582.88,1756.773,12.75,40.61,56.2\nlevit_512_s8,224,256.0,582.77,439.271,21.82,52.28,74.05\nmaxvit_rmlp_base_rw_224,224,768.0,582.44,1318.589,22.63,79.3,116.14\nseresnet269d,256,1024.0,581.62,1760.578,26.59,53.6,113.67\nconvmixer_768_32,224,1024.0,580.09,1765.235,19.55,25.95,21.11\nresnetrs270,256,1024.0,565.62,1810.398,27.06,55.84,129.86\nmixer_l16_224,224,1024.0,553.36,1850.484,44.6,41.69,208.2\nlevit_conv_512_s8,224,256.0,552.47,463.363,21.82,52.28,74.05\nefficientnetv2_rw_m,416,1024.0,552.47,1853.491,21.49,79.62,53.24\nresnet200d,320,1024.0,551.74,1855.93,31.25,67.33,64.69\nnfnet_f1,320,1024.0,548.82,1865.795,35.97,46.77,132.63\nconvformer_m36,224,1024.0,548.78,1865.947,12.89,42.05,57.05\nvolo_d3_224,224,1024.0,541.9,1889.619,20.78,60.09,86.33\nswinv2_base_window8_256,256,1024.0,530.42,1930.519,20.37,52.59,87.92\nmaxvit_base_tf_224,224,512.0,517.72,988.937,23.52,81.67,119.47\nxcit_large_24_p16_224,224,1024.0,511.16,2003.26,35.86,47.26,189.1\nconvmixer_1024_20_ks9_p14,224,1024.0,510.74,2004.929,5.55,5.51,24.38\ndm_nfnet_f2,256,1024.0,503.11,2035.325,33.76,41.85,193.78\nswin_large_patch4_window7_224,224,768.0,494.53,1552.967,34.53,54.94,196.53\nvit_base_patch16_18x2_224,224,1024.0,494.1,2072.443,50.37,49.17,256.73\ndeit_base_patch16_384,384,1024.0,493.77,2073.808,49.4,48.3,86.86\nvit_base_patch16_384,384,1024.0,493.5,2074.946,49.4,48.3,86.86\ndeit_base_distilled_patch16_384,384,1024.0,493.31,2075.754,49.49,48.39,87.63\nvit_base_patch16_clip_384,384,1024.0,492.52,2079.081,49.41,48.3,86.86\neva_large_patch14_196,196,1024.0,491.4,2083.813,59.66,43.77,304.14\nvit_base_patch16_siglip_384,384,1024.0,490.82,2086.272,50.0,49.11,93.18\nvit_large_patch16_224,224,1024.0,489.19,2093.231,59.7,43.77,304.33\nhalonet_h1,256,256.0,487.96,524.621,3.0,51.17,8.1\ntiny_vit_21m_512,512,256.0,487.73,524.868,21.23,83.26,21.27\nseresnextaa101d_32x8d,320,768.0,487.6,1575.053,35.19,69.67,93.59\nswinv2_large_window12_192,192,768.0,487.6,1575.036,26.17,56.53,228.77\nswinv2_small_window16_256,256,512.0,487.58,1050.071,12.82,66.29,49.73\npoolformerv2_m48,224,1024.0,487.33,2101.208,11.59,29.17,73.35\nresnetrs200,320,1024.0,476.69,2148.152,31.51,67.81,93.21\nxcit_tiny_12_p8_384,384,1024.0,472.87,2165.479,14.12,69.12,6.71\nvit_small_patch14_dinov2,518,1024.0,470.72,2175.374,29.46,57.34,22.06\ndeit3_base_patch16_384,384,1024.0,469.96,2178.883,49.4,48.3,86.88\nvit_small_patch14_reg4_dinov2,518,1024.0,469.28,2182.048,29.55,57.51,22.06\ndeit3_large_patch16_224,224,1024.0,468.18,2187.162,59.7,43.77,304.37\ntf_efficientnetv2_m,480,1024.0,466.8,2193.627,24.76,89.84,54.14\ndm_nfnet_f1,320,1024.0,463.74,2208.099,35.97,46.77,132.63\nxcit_small_24_p16_384,384,1024.0,458.11,2235.247,26.72,68.57,47.67\nseresnet269d,288,1024.0,457.25,2239.451,33.65,67.81,113.67\nbeit_large_patch16_224,224,1024.0,453.95,2255.726,59.7,43.77,304.43\nbeitv2_large_patch16_224,224,1024.0,453.79,2256.515,59.7,43.77,304.43\nregnetx_120,224,1024.0,452.56,2262.648,12.13,21.37,46.11\nefficientnet_b5,448,512.0,444.06,1152.996,9.59,93.56,30.39\nregnety_120,224,1024.0,444.03,2306.127,12.14,21.38,51.82\nefficientformerv2_l,224,1024.0,441.81,2317.703,2.59,18.54,26.32\ncoatnet_3_rw_224,224,384.0,441.21,870.327,32.63,59.07,181.81\nresnetv2_152x2_bit,224,1024.0,439.95,2327.532,46.95,45.11,236.34\nconvnext_xlarge,224,768.0,438.91,1749.766,60.98,57.5,350.2\ncoatnet_rmlp_3_rw_224,224,256.0,438.69,583.549,32.75,64.7,165.15\ncoatnet_3_224,224,256.0,431.52,593.24,35.72,63.61,166.97\nconvnextv2_base,288,512.0,430.66,1188.858,25.43,47.53,88.72\nflexivit_large,240,1024.0,427.93,2392.897,68.48,50.22,304.36\nconvnextv2_large,224,512.0,424.61,1205.798,34.4,43.13,197.96\nswinv2_cr_large_224,224,768.0,424.12,1810.813,35.1,78.42,196.68\nswinv2_cr_tiny_384,384,256.0,420.98,608.099,15.34,161.01,28.33\ncaformer_b36,224,768.0,420.2,1827.698,22.5,54.14,98.75\nmaxvit_tiny_tf_384,384,256.0,419.78,609.84,16.0,94.22,30.98\nconvnext_large,288,768.0,417.93,1837.619,56.87,71.29,197.77\nregnety_160,224,1024.0,417.09,2455.096,15.96,23.04,83.59\neca_nfnet_l2,384,1024.0,412.81,2480.539,30.05,68.28,56.72\nmaxxvitv2_rmlp_large_rw_224,224,768.0,411.22,1867.582,43.69,75.4,215.42\nefficientnetv2_l,384,1024.0,409.83,2498.611,36.1,101.16,118.52\ndavit_huge,224,768.0,407.6,1884.205,60.93,73.44,348.92\ntf_efficientnetv2_l,384,1024.0,405.08,2527.906,36.1,101.16,118.52\nregnety_320,224,1024.0,403.27,2539.241,32.34,30.26,145.05\nregnetz_d8_evos,320,768.0,403.13,1905.094,7.03,38.92,23.46\nbeit_base_patch16_384,384,1024.0,402.61,2543.386,49.4,48.3,86.74\nconvformer_b36,224,768.0,397.77,1930.749,22.69,56.06,99.88\ntf_efficientnet_b5,456,384.0,394.74,972.77,10.46,98.86,30.39\neca_nfnet_l3,352,1024.0,378.23,2707.314,32.57,73.12,72.04\nvit_large_patch16_siglip_256,256,1024.0,375.52,2726.866,78.12,57.42,315.96\necaresnet269d,320,1024.0,372.48,2749.133,41.53,83.69,102.09\nvit_large_r50_s32_384,384,1024.0,369.32,2772.633,56.4,64.88,329.09\nmaxvit_large_tf_224,224,384.0,359.98,1066.726,42.99,109.57,211.79\nvit_large_patch14_224,224,1024.0,359.62,2847.449,77.83,57.11,304.2\nvit_large_patch14_clip_224,224,1024.0,359.62,2847.409,77.83,57.11,304.2\nswinv2_base_window16_256,256,384.0,359.2,1069.042,22.02,84.71,87.92\nswinv2_base_window12to16_192to256,256,384.0,359.01,1069.609,22.02,84.71,87.92\nnasnetalarge,331,384.0,356.97,1075.708,23.89,90.56,88.75\nresnetrs350,288,1024.0,356.46,2872.642,43.67,87.09,163.96\nvit_base_patch8_224,224,1024.0,351.76,2911.045,66.87,65.71,86.58\nvolo_d4_224,224,1024.0,343.2,2983.708,44.34,80.22,192.96\nxcit_small_24_p8_224,224,1024.0,342.74,2987.714,35.81,90.77,47.63\nvolo_d1_384,384,512.0,340.3,1504.541,22.75,108.55,26.78\nconvnext_large_mlp,320,512.0,338.23,1513.736,70.21,88.02,200.13\nrepvgg_d2se,320,1024.0,335.87,3048.766,74.57,46.82,133.33\nvit_large_patch14_clip_quickgelu_224,224,1024.0,324.37,3156.896,77.83,57.11,303.97\nvit_base_r50_s16_384,384,1024.0,315.28,3247.919,61.29,81.77,98.95\nnfnet_f2,352,1024.0,313.79,3263.314,63.22,79.06,193.78\nxcit_medium_24_p16_384,384,1024.0,313.38,3267.626,47.39,91.63,84.4\nvit_large_patch14_xp_224,224,1024.0,311.53,3287.018,77.77,57.11,304.06\necaresnet269d,352,1024.0,307.84,3326.422,50.25,101.25,102.09\ncoat_lite_medium_384,384,512.0,301.48,1698.273,28.73,116.7,44.57\nregnety_064,288,1024.0,298.91,3425.709,10.56,27.11,30.58\nresnetrs270,352,1024.0,298.81,3426.892,51.13,105.48,129.86\nregnetv_064,288,1024.0,298.12,3434.809,10.55,27.11,30.58\nresnext101_32x32d,224,512.0,296.06,1729.362,87.29,91.12,468.53\nnfnet_f3,320,1024.0,290.3,3527.352,68.77,83.93,254.92\nefficientnetv2_xl,384,1024.0,290.02,3530.821,52.81,139.2,208.12\ntf_efficientnetv2_xl,384,1024.0,287.47,3562.138,52.81,139.2,208.12\ncait_xxs24_384,384,1024.0,284.02,3605.396,9.63,122.65,12.03\nmaxvit_small_tf_384,384,192.0,274.58,699.228,33.58,139.86,69.02\ncoatnet_4_224,224,256.0,274.31,933.246,60.81,98.85,275.43\nconvnext_xlarge,288,512.0,265.38,1929.279,100.8,95.05,350.2\ndm_nfnet_f2,352,1024.0,265.36,3858.944,63.22,79.06,193.78\nvit_base_patch16_siglip_512,512,512.0,263.16,1945.545,88.89,87.3,93.52\nvit_so400m_patch14_siglip_224,224,1024.0,262.63,3898.968,106.18,70.45,427.68\nefficientnetv2_l,480,512.0,261.08,1961.059,56.4,157.99,118.52\nswinv2_cr_small_384,384,256.0,258.97,988.525,29.7,298.03,49.7\nconvnextv2_large,288,384.0,257.89,1488.981,56.87,71.29,197.96\ntf_efficientnetv2_l,480,512.0,257.78,1986.206,56.4,157.99,118.52\neva02_large_patch14_224,224,1024.0,256.9,3985.935,77.9,65.52,303.27\neva02_large_patch14_clip_224,224,1024.0,253.93,4032.531,77.93,65.52,304.11\nregnety_120,288,768.0,253.81,3025.924,20.06,35.34,51.82\nxcit_tiny_24_p8_384,384,1024.0,248.2,4125.63,27.05,132.94,12.11\ncoatnet_rmlp_2_rw_384,384,192.0,247.61,775.41,43.04,132.57,73.88\ndm_nfnet_f3,320,1024.0,247.07,4144.617,68.77,83.93,254.92\nresnetrs420,320,1024.0,244.54,4187.355,64.2,126.56,191.89\nmvitv2_large,224,512.0,243.6,2101.832,43.87,112.02,217.99\nmvitv2_large_cls,224,512.0,241.75,2117.866,42.17,111.69,234.58\nresmlp_big_24_224,224,1024.0,241.59,4238.519,100.23,87.31,129.14\nregnety_160,288,768.0,237.71,3230.76,26.37,38.07,83.59\nxcit_medium_24_p8_224,224,768.0,234.01,3281.941,63.52,121.22,84.32\neca_nfnet_l3,448,512.0,233.43,2193.322,52.55,118.4,72.04\nvolo_d5_224,224,1024.0,228.8,4475.542,72.4,118.11,295.46\nswin_base_patch4_window12_384,384,256.0,227.46,1125.454,47.19,134.78,87.9\nxcit_small_12_p8_384,384,384.0,223.23,1720.206,54.92,138.25,26.21\nswinv2_large_window12to16_192to256,256,256.0,219.08,1168.537,47.81,121.53,196.74\nmaxxvitv2_rmlp_base_rw_384,384,384.0,217.17,1768.16,70.18,160.22,116.09\nefficientnet_b6,528,256.0,205.22,1247.45,19.4,167.39,43.04\nregnetx_320,224,768.0,200.5,3830.333,31.81,36.3,107.81\nresnetrs350,384,1024.0,199.92,5122.143,77.59,154.74,163.96\ncait_xs24_384,384,768.0,198.76,3863.971,19.28,183.98,26.67\nmaxvit_xlarge_tf_224,224,256.0,198.54,1289.412,96.49,164.37,506.99\ntf_efficientnet_b6,528,192.0,198.54,967.028,19.4,167.39,43.04\nfocalnet_huge_fl3,224,512.0,191.39,2675.182,118.26,104.8,745.28\nvolo_d2_384,384,384.0,190.85,2012.066,46.17,184.51,58.87\ncait_xxs36_384,384,1024.0,189.78,5395.721,14.35,183.7,17.37\neva02_base_patch14_448,448,512.0,189.58,2700.759,87.74,98.4,87.12\nvit_huge_patch14_gap_224,224,1024.0,186.27,5497.294,161.36,94.7,630.76\nswinv2_cr_base_384,384,256.0,185.05,1383.395,50.57,333.68,87.88\nswinv2_cr_huge_224,224,384.0,182.04,2109.357,115.97,121.08,657.83\nmaxvit_rmlp_base_rw_384,384,384.0,179.65,2137.52,66.51,233.79,116.14\nvit_huge_patch14_224,224,1024.0,179.6,5701.574,161.99,95.07,630.76\nvit_huge_patch14_clip_224,224,1024.0,179.43,5706.842,161.99,95.07,632.05\nxcit_large_24_p16_384,384,1024.0,177.48,5769.692,105.34,137.15,189.1\nvit_base_patch14_dinov2,518,512.0,176.68,2897.828,117.11,114.68,86.58\nvit_base_patch14_reg4_dinov2,518,512.0,175.98,2909.337,117.45,115.02,86.58\ndeit3_huge_patch14_224,224,1024.0,173.53,5900.889,161.99,95.07,632.13\nnfnet_f3,416,768.0,171.77,4471.127,115.58,141.78,254.92\nmaxvit_tiny_tf_512,512,128.0,170.91,748.92,28.66,172.66,31.05\nseresnextaa201d_32x8d,384,512.0,170.35,3005.583,101.11,199.72,149.39\nmaxvit_base_tf_384,384,192.0,166.63,1152.259,69.34,247.75,119.65\nvit_huge_patch14_clip_quickgelu_224,224,1024.0,165.5,6187.275,161.99,95.07,632.08\nefficientnetv2_xl,512,512.0,163.45,3132.529,93.85,247.32,208.12\nnfnet_f4,384,768.0,163.26,4704.17,122.14,147.57,316.07\ntf_efficientnetv2_xl,512,512.0,161.63,3167.699,93.85,247.32,208.12\nvit_huge_patch14_xp_224,224,1024.0,159.72,6411.21,161.88,95.07,631.8\neva_large_patch14_336,336,768.0,155.72,4931.845,174.74,128.21,304.53\nvit_large_patch14_clip_336,336,768.0,155.28,4945.947,174.74,128.21,304.53\nvit_large_patch16_384,384,768.0,155.12,4950.906,174.85,128.21,304.72\nvit_large_patch16_siglip_384,384,768.0,154.94,4956.619,175.76,129.18,316.28\nconvnext_xxlarge,256,384.0,153.59,2500.071,198.09,124.45,846.47\nvit_giant_patch16_gap_224,224,1024.0,153.47,6672.363,198.14,103.64,1011.37\ncait_s24_384,384,512.0,153.12,3343.821,32.17,245.3,47.06\ndavit_giant,224,384.0,152.05,2525.491,192.34,138.2,1406.47\ndeit3_large_patch16_384,384,1024.0,148.73,6884.872,174.85,128.21,304.76\ncoatnet_5_224,224,192.0,147.83,1298.762,142.72,143.69,687.47\ndm_nfnet_f3,416,512.0,146.0,3506.787,115.58,141.78,254.92\nresnetrs420,416,768.0,144.59,5311.727,108.45,213.79,191.89\nvit_large_patch14_clip_quickgelu_336,336,768.0,141.12,5441.998,174.74,128.21,304.29\ndm_nfnet_f4,384,768.0,139.13,5519.969,122.14,147.57,316.07\nswin_large_patch4_window12_384,384,128.0,135.95,941.498,104.08,202.16,196.74\nxcit_large_24_p8_224,224,512.0,131.73,3886.696,141.22,181.53,188.93\nbeit_large_patch16_384,384,768.0,129.79,5917.023,174.84,128.21,305.0\nefficientnet_b7,600,192.0,128.05,1499.407,38.33,289.94,66.35\ntf_efficientnet_b7,600,192.0,124.56,1541.433,38.33,289.94,66.35\nfocalnet_huge_fl4,224,512.0,123.26,4153.862,118.9,113.34,686.46\neva_giant_patch14_clip_224,224,1024.0,116.99,8753.07,259.74,135.89,1012.59\neva_giant_patch14_224,224,1024.0,116.91,8758.747,259.74,135.89,1012.56\nnfnet_f5,416,768.0,116.91,6569.029,170.71,204.56,377.21\nxcit_small_24_p8_384,384,384.0,116.73,3289.571,105.23,265.87,47.63\nmaxvit_large_tf_384,384,128.0,116.56,1098.144,126.61,332.3,212.03\nvit_giant_patch14_224,224,1024.0,114.32,8957.604,259.74,135.89,1012.61\nvit_giant_patch14_clip_224,224,1024.0,114.12,8973.257,259.74,135.89,1012.65\nswinv2_cr_large_384,384,192.0,113.51,1691.47,108.96,404.96,196.68\neva02_large_patch14_clip_336,336,768.0,110.42,6955.361,174.97,147.1,304.43\nmvitv2_huge_cls,224,384.0,105.54,3638.368,120.67,243.63,694.8\nmaxvit_small_tf_512,512,96.0,104.89,915.238,60.02,256.36,69.13\ncait_s36_384,384,512.0,102.28,5005.663,47.99,367.39,68.37\ndm_nfnet_f5,416,512.0,99.59,5141.209,170.71,204.56,377.21\nswinv2_base_window12to24_192to384,384,96.0,96.5,994.841,55.25,280.36,87.92\nfocalnet_large_fl3,384,256.0,93.78,2729.925,105.06,168.04,239.13\nnfnet_f4,512,512.0,91.69,5583.92,216.26,262.26,316.07\nfocalnet_large_fl4,384,256.0,90.64,2824.324,105.2,181.78,239.32\nnfnet_f6,448,512.0,86.88,5893.345,229.7,273.62,438.36\nefficientnet_b8,672,128.0,85.75,1492.768,63.48,442.89,87.41\ntf_efficientnet_b8,672,128.0,83.71,1529.068,63.48,442.89,87.41\nvolo_d3_448,448,128.0,81.1,1578.235,96.33,446.83,86.63\nvit_so400m_patch14_siglip_384,384,512.0,80.75,6340.618,302.34,200.62,428.23\nxcit_medium_24_p8_384,384,256.0,80.25,3189.919,186.67,354.69,84.32\ndm_nfnet_f4,512,384.0,78.23,4908.575,216.26,262.26,316.07\nvit_huge_patch14_clip_336,336,512.0,75.44,6786.84,363.7,213.44,632.46\ndm_nfnet_f6,448,512.0,74.17,6903.248,229.7,273.62,438.36\nmaxvit_base_tf_512,512,96.0,72.37,1326.47,123.93,456.26,119.88\nnfnet_f5,544,384.0,68.39,5614.643,290.97,349.71,377.21\nnfnet_f7,480,512.0,66.61,7686.561,300.08,355.86,499.5\nvit_gigantic_patch14_224,224,512.0,66.24,7729.406,473.4,204.12,1844.44\nvit_gigantic_patch14_clip_224,224,512.0,66.15,7739.524,473.41,204.12,1844.91\nfocalnet_xlarge_fl3,384,192.0,65.92,2912.463,185.61,223.99,408.79\nmaxvit_xlarge_tf_384,384,96.0,64.9,1479.208,283.86,498.45,475.32\nfocalnet_xlarge_fl4,384,192.0,63.63,3017.361,185.79,242.31,409.03\nbeit_large_patch16_512,512,256.0,61.48,4163.85,310.6,227.76,305.67\nvolo_d4_448,448,192.0,60.99,3147.895,197.13,527.35,193.41\nregnety_640,384,192.0,60.97,3149.012,188.47,124.83,281.38\nconvnextv2_huge,384,96.0,60.92,1575.922,337.96,232.35,660.29\nswinv2_large_window12to24_192to384,384,48.0,60.75,790.151,116.15,407.83,196.74\neva02_large_patch14_448,448,512.0,59.67,8581.221,310.69,261.32,305.08\ndm_nfnet_f5,544,384.0,58.35,6580.773,290.97,349.71,377.21\nvit_huge_patch14_clip_378,378,512.0,58.14,8806.389,460.13,270.04,632.68\nconvmixer_1536_20,224,1024.0,56.99,17967.01,48.68,33.03,51.63\nvit_large_patch14_dinov2,518,384.0,56.83,6757.154,414.89,304.42,304.37\nvit_large_patch14_reg4_dinov2,518,384.0,56.64,6779.944,416.1,305.31,304.37\nmaxvit_large_tf_512,512,64.0,54.68,1170.494,225.96,611.85,212.33\ntf_efficientnet_l2,475,96.0,54.05,1776.14,172.11,609.89,480.31\nvit_huge_patch14_clip_quickgelu_378,378,384.0,53.95,7117.573,460.13,270.04,632.68\nvit_huge_patch16_gap_448,448,512.0,52.86,9685.108,494.35,290.02,631.67\nnfnet_f6,576,384.0,52.55,7307.184,378.69,452.2,438.36\nswinv2_cr_giant_224,224,192.0,52.45,3660.551,483.85,309.15,2598.76\neva_giant_patch14_336,336,512.0,49.65,10312.606,583.14,305.1,1013.01\nswinv2_cr_huge_384,384,96.0,49.62,1934.539,352.04,583.18,657.94\nxcit_large_24_p8_384,384,192.0,45.19,4249.177,415.0,531.74,188.93\ndm_nfnet_f6,576,256.0,44.83,5710.109,378.69,452.2,438.36\nvolo_d5_448,448,192.0,42.49,4518.905,315.06,737.92,295.91\nnfnet_f7,608,256.0,41.52,6165.283,480.39,570.85,499.5\ncait_m36_384,384,256.0,33.1,7733.448,173.11,734.79,271.22\nresnetv2_152x4_bit,480,96.0,32.12,2989.13,844.84,414.26,936.53\nmaxvit_xlarge_tf_512,512,48.0,30.41,1578.222,505.95,917.77,475.77\nregnety_2560,384,128.0,30.25,4231.43,747.83,296.49,1282.6\nvolo_d5_512,512,128.0,29.54,4332.489,425.09,1105.37,296.09\nsamvit_base_patch16,1024,16.0,23.81,671.88,371.55,403.08,89.67\nregnety_1280,384,128.0,22.93,5583.053,374.99,210.2,644.81\nefficientnet_l2,800,48.0,19.03,2521.932,479.12,1707.39,480.31\nvit_giant_patch14_dinov2,518,192.0,17.15,11193.542,1553.56,871.89,1136.48\nvit_giant_patch14_reg4_dinov2,518,192.0,17.12,11212.072,1558.09,874.43,1136.48\nswinv2_cr_giant_384,384,32.0,15.04,2127.877,1450.71,1394.86,2598.76\neva_giant_patch14_560,560,192.0,15.03,12771.913,1618.04,846.56,1014.45\ncait_m48_448,448,128.0,13.96,9172.063,329.4,1708.21,356.46\nsamvit_large_patch16,1024,12.0,10.64,1127.934,1317.08,1055.58,308.28\nsamvit_huge_patch16,1024,8.0,6.61,1210.638,2741.59,1727.57,637.03\n"
  },
  {
    "path": "results/benchmark-infer-amp-nhwc-pt240-cu124-rtx3090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_efficientnet,160,103646.76,9.87,1024,0.36,0.06,0.55\ntest_vit,160,103244.27,9.909,1024,0.37,0.04,0.48\ntest_byobnet,160,95063.35,10.761,1024,0.46,0.03,0.43\ntinynet_e,106,76700.93,13.341,1024,2.04,0.03,0.69\nmobilenetv3_small_050,224,57072.67,17.933,1024,1.59,0.03,0.92\nlcnet_035,224,55976.14,18.284,1024,1.64,0.03,1.04\nefficientvit_m0,224,48377.06,21.157,1024,2.35,0.08,0.91\nlcnet_050,224,47344.86,21.619,1024,1.88,0.05,1.26\nmobilenetv3_small_075,224,42488.72,24.091,1024,2.04,0.05,1.3\nmobilenetv3_small_100,224,39045.83,26.216,1024,2.54,0.06,1.42\ntinynet_d,152,38744.45,26.42,1024,2.34,0.05,1.42\nefficientvit_m1,224,37693.15,27.157,1024,2.98,0.17,1.33\ntf_mobilenetv3_small_minimal_100,224,34512.3,29.661,1024,2.04,0.06,1.41\nefficientvit_m2,224,33886.66,30.209,1024,4.19,0.2,1.47\ntf_mobilenetv3_small_075,224,33704.54,30.372,1024,2.04,0.05,1.3\ntf_mobilenetv3_small_100,224,31434.86,32.566,1024,2.54,0.06,1.42\nlcnet_075,224,31075.1,32.942,1024,2.36,0.1,1.99\nmobilenetv4_conv_small,224,30965.44,33.059,1024,3.77,0.19,1.97\nefficientvit_m3,224,29709.67,34.458,1024,6.9,0.27,1.62\nefficientvit_m4,224,28086.43,36.449,1024,8.8,0.3,1.7\nmnasnet_small,224,26858.45,38.112,1024,2.03,0.07,2.16\nregnetx_002,224,24932.59,41.06,1024,2.68,0.2,2.16\nlcnet_100,224,24627.72,41.57,1024,2.95,0.16,2.52\nmobilenetv4_conv_small,256,23522.69,43.523,1024,3.77,0.25,2.57\nregnety_002,224,23069.71,44.375,1024,3.16,0.2,2.17\nlevit_128s,224,23026.39,44.461,1024,7.78,0.31,1.88\nrepghostnet_050,224,22877.69,44.75,1024,2.31,0.05,2.02\nghostnet_050,224,22787.91,44.926,1024,2.59,0.05,1.77\nmobilenetv2_035,224,22588.52,45.323,1024,1.68,0.07,2.86\nresnet10t,176,22365.03,45.776,1024,5.44,0.7,1.51\nlevit_conv_128s,224,22115.28,46.293,1024,7.78,0.31,1.88\nmnasnet_050,224,21012.53,48.723,1024,2.22,0.11,3.07\nresnet18,160,20850.92,49.101,1024,11.69,0.93,1.27\ntinynet_c,184,19198.02,53.329,1024,2.46,0.11,2.87\nefficientvit_m5,224,19162.34,53.428,1024,12.47,0.53,2.41\nefficientvit_b0,224,19036.4,53.782,1024,3.41,0.1,2.87\nrepghostnet_058,224,18156.61,56.388,1024,2.55,0.07,2.59\nsemnasnet_050,224,17891.8,57.223,1024,2.08,0.11,3.44\nmobilenetv2_050,224,17837.51,57.397,1024,1.97,0.1,3.64\nregnetx_004,224,17491.27,58.531,1024,5.16,0.4,3.14\nregnetx_004_tv,224,16931.52,60.467,1024,5.5,0.42,3.17\ncs3darknet_focus_s,256,16535.27,61.918,1024,3.27,0.69,2.7\nlcnet_150,224,16211.53,63.155,1024,4.5,0.34,3.79\ngernet_s,224,15890.89,64.43,1024,8.17,0.75,2.65\ncs3darknet_s,256,15705.73,65.189,1024,3.28,0.72,2.97\nlevit_128,224,15600.03,65.631,1024,9.21,0.41,2.71\nmobilenetv3_large_075,224,15141.94,67.617,1024,3.99,0.16,4.0\nvit_tiny_r_s16_p8_224,224,15074.81,67.918,1024,6.34,0.44,2.06\nlevit_conv_128,224,15050.96,68.026,1024,9.21,0.41,2.71\nrepghostnet_080,224,14725.52,69.529,1024,3.28,0.1,3.22\nresnet10t,224,14365.94,71.27,1024,5.44,1.1,2.43\npit_ti_224,224,14330.5,71.446,1024,4.85,0.7,6.19\npit_ti_distilled_224,224,14304.16,71.577,1024,5.1,0.71,6.23\nmobilenetv3_rw,224,13802.85,74.178,1024,5.48,0.23,4.41\ntf_efficientnetv2_b0,192,13802.18,74.181,1024,7.14,0.54,3.51\nlevit_192,224,13743.1,74.5,1024,10.95,0.66,3.2\nmnasnet_075,224,13653.81,74.988,1024,3.17,0.23,4.77\nhardcorenas_a,224,13581.17,75.389,1024,5.26,0.23,4.38\nmobilenetv3_large_100,224,13556.89,75.524,1024,5.48,0.23,4.41\nvit_small_patch32_224,224,13379.42,76.525,1024,22.88,1.15,2.5\nlevit_conv_192,224,13084.23,78.252,1024,10.95,0.66,3.2\ntf_mobilenetv3_large_075,224,12872.36,79.54,1024,3.99,0.16,4.0\nregnety_004,224,12765.55,80.202,1024,4.34,0.41,3.89\ntinynet_b,188,12493.02,81.956,1024,3.73,0.21,4.44\ntf_mobilenetv3_large_minimal_100,224,12397.28,82.589,1024,3.92,0.22,4.4\nnf_regnet_b0,192,12394.96,82.604,1024,8.76,0.37,3.15\nresnet34,160,12381.04,82.698,1024,21.8,1.87,1.91\nhardcorenas_b,224,12362.7,82.82,1024,5.18,0.26,5.09\nhardcorenas_c,224,12318.59,83.117,1024,5.52,0.28,5.01\nmobilenetv1_100,224,12074.33,84.798,1024,4.23,0.58,5.04\nrepghostnet_100,224,12055.17,84.933,1024,4.07,0.15,3.98\nresnet14t,176,11947.82,85.696,1024,10.08,1.07,3.61\nmnasnet_100,224,11914.93,85.933,1024,4.38,0.33,5.46\nregnety_006,224,11850.66,86.395,1024,6.06,0.61,4.33\nmobilenetv1_100h,224,11818.6,86.634,1024,5.28,0.63,5.09\nghostnet_100,224,11799.55,86.773,1024,5.18,0.15,3.55\nese_vovnet19b_slim_dw,224,11783.68,86.891,1024,1.9,0.4,5.28\nmixer_s32_224,224,11733.67,87.26,1024,19.1,1.0,2.28\ntf_mobilenetv3_large_100,224,11642.43,87.945,1024,5.48,0.23,4.41\nhardcorenas_d,224,11549.38,88.653,1024,7.5,0.3,4.93\nsemnasnet_075,224,11368.35,90.064,1024,2.91,0.23,5.54\nmobilenetv2_075,224,11254.61,90.975,1024,2.64,0.22,5.86\nmobilenet_edgetpu_v2_xs,224,11215.06,91.296,1024,4.46,0.7,4.8\nresnet18,224,11188.75,91.511,1024,11.69,1.82,2.48\nspnasnet_100,224,10760.35,95.154,1024,4.42,0.35,6.03\nregnetx_008,224,10657.43,96.071,1024,7.26,0.81,5.15\ntf_efficientnetv2_b1,192,10601.92,96.577,1024,8.14,0.76,4.59\nmobilenetv4_conv_medium,224,10533.33,97.206,1024,9.72,0.84,5.8\nseresnet18,224,10519.36,97.335,1024,11.78,1.82,2.49\nrepghostnet_111,224,10479.86,97.701,1024,4.54,0.18,4.38\ndeit_tiny_patch16_224,224,10451.69,97.965,1024,5.72,1.26,5.97\ndeit_tiny_distilled_patch16_224,224,10407.6,98.38,1024,5.91,1.27,6.01\nvit_tiny_patch16_224,224,10405.73,98.398,1024,5.72,1.26,5.97\ntf_efficientnetv2_b0,224,10314.64,99.267,1024,7.14,0.73,4.77\nhardcorenas_f,224,10206.96,100.314,1024,8.2,0.35,5.57\nlegacy_seresnet18,224,10179.01,100.588,1024,11.78,1.82,2.49\ntinynet_a,192,10107.12,101.305,1024,6.19,0.35,5.41\nhardcorenas_e,224,10040.98,101.972,1024,8.07,0.35,5.65\nsemnasnet_100,224,10029.01,102.094,1024,3.89,0.32,6.23\nmobilenet_edgetpu_100,224,10018.91,102.197,1024,4.09,1.0,5.75\nlevit_256,224,9996.37,102.427,1024,18.89,1.13,4.23\ndla46_c,224,9963.65,102.763,1024,1.3,0.58,4.5\nmobilenetv2_100,224,9887.57,103.554,1024,3.5,0.31,6.68\nrepvgg_a0,224,9797.14,104.51,1024,9.11,1.52,3.59\nregnety_008,224,9778.52,104.708,1024,6.26,0.81,5.25\nfbnetc_100,224,9724.0,105.297,1024,5.57,0.4,6.51\nefficientnet_lite0,224,9657.18,106.025,1024,4.65,0.4,6.74\nregnetx_006,224,9637.51,106.242,1024,6.2,0.61,3.98\nlevit_conv_256,224,9529.69,107.444,1024,18.89,1.13,4.23\nresnet18d,224,9508.04,107.689,1024,11.71,2.06,3.29\nhgnetv2_b0,224,9407.15,108.843,1024,6.0,0.33,2.12\npit_xs_224,224,9387.39,109.073,1024,10.62,1.4,7.71\nregnety_008_tv,224,9370.17,109.273,1024,6.43,0.84,5.42\nmobilenetv4_hybrid_medium_075,224,9355.37,109.446,1024,7.31,0.66,5.65\npit_xs_distilled_224,224,9342.89,109.592,1024,11.0,1.41,7.76\nmobilenetv1_100,256,9270.71,110.445,1024,4.23,0.76,6.59\nese_vovnet19b_slim,224,9264.21,110.524,1024,3.17,1.69,3.52\nmobilenetv1_125,224,9214.57,111.118,1024,6.27,0.89,6.3\nvit_xsmall_patch16_clip_224,224,9204.49,111.24,1024,8.28,1.79,6.65\nconvnext_atto,224,9196.49,111.337,1024,3.7,0.55,3.81\nvit_medium_patch32_clip_224,224,9164.71,111.723,1024,39.69,2.0,3.34\nmobilenetv1_100h,256,9072.78,112.855,1024,5.28,0.82,6.65\nrepghostnet_130,224,8966.92,114.188,1024,5.48,0.25,5.24\nghostnet_130,224,8941.94,114.507,1024,7.36,0.24,4.6\nconvnext_atto_ols,224,8902.58,115.013,1024,3.7,0.58,4.11\nregnetz_005,224,8898.94,115.06,1024,7.12,0.52,5.86\nxcit_nano_12_p16_224,224,8661.49,118.214,1024,3.05,0.56,4.17\nlevit_256d,224,8476.14,120.799,1024,26.21,1.4,4.93\nedgenext_xx_small,256,8329.49,122.926,1024,1.33,0.26,3.33\ntf_efficientnet_lite0,224,8319.74,123.071,1024,4.65,0.4,6.74\nfbnetv3_b,224,8315.87,123.128,1024,8.6,0.42,6.97\nefficientnet_b0,224,8241.4,124.241,1024,5.29,0.4,6.75\nlevit_conv_256d,224,8143.78,125.73,1024,26.21,1.4,4.93\nmnasnet_140,224,8104.58,126.338,1024,7.12,0.6,7.71\nmobilenetv4_conv_medium,256,8092.13,126.533,1024,9.72,1.1,7.58\nconvnext_femto,224,8051.49,127.171,1024,5.22,0.79,4.57\ntf_efficientnetv2_b2,208,7901.34,129.588,1024,10.1,1.06,6.0\nmobilevit_xxs,256,7885.22,129.853,1024,1.27,0.42,8.34\nrepghostnet_150,224,7853.84,130.372,1024,6.58,0.32,6.0\nconvnext_femto_ols,224,7832.99,130.719,1024,5.23,0.82,4.87\nmobilenet_edgetpu_v2_s,224,7762.61,131.904,1024,5.99,1.21,6.6\nrexnetr_100,224,7696.45,133.034,1024,4.88,0.43,7.72\nrepvit_m1,224,7673.77,133.431,1024,5.49,0.83,7.45\nresnet14t,224,7579.52,135.091,1024,10.08,1.69,5.8\nmobilenetv4_hybrid_medium,224,7493.91,136.634,1024,11.07,0.98,6.84\nmobilenetv2_110d,224,7490.18,136.702,1024,4.52,0.45,8.71\ncs3darknet_focus_m,256,7460.64,137.244,1024,9.3,1.98,4.89\nrepvit_m0_9,224,7284.39,140.555,1024,5.49,0.83,7.45\nrexnet_100,224,7243.9,141.351,1024,4.8,0.41,7.44\ntf_efficientnet_b0,224,7243.0,141.368,1024,5.29,0.4,6.75\nefficientnet_b1_pruned,240,7240.43,141.418,1024,6.33,0.4,6.21\nefficientvit_b1,224,7214.92,141.918,1024,9.1,0.53,7.25\nresnet50,160,7190.62,142.398,1024,25.56,2.1,5.67\ncs3darknet_m,256,7120.83,143.794,1024,9.31,2.08,5.28\nmobilevitv2_050,256,7087.19,144.476,1024,1.37,0.48,8.04\nnf_regnet_b0,256,7083.25,144.556,1024,8.76,0.64,5.58\nhrnet_w18_small,224,7050.65,145.221,1024,13.19,1.61,5.72\nmobilenetv1_125,256,7010.8,146.05,1024,6.27,1.16,8.23\nrepvgg_a1,224,6986.39,146.561,1024,14.09,2.64,4.74\nhgnetv2_b1,224,6916.93,148.033,1024,6.34,0.49,2.73\nfbnetv3_d,224,6914.78,148.079,1024,10.31,0.52,8.5\ngernet_m,224,6873.04,148.978,1024,21.14,3.02,5.24\nvisformer_tiny,224,6836.16,149.782,1024,10.32,1.27,5.72\npvt_v2_b0,224,6743.86,151.832,1024,3.67,0.57,7.99\ntf_efficientnetv2_b1,240,6694.16,152.959,1024,8.14,1.21,7.34\nresnet50d,160,6637.34,154.269,1024,25.58,2.22,6.08\nedgenext_xx_small,288,6577.06,155.681,1024,1.33,0.33,4.21\nvit_betwixt_patch32_clip_224,224,6547.37,156.388,1024,61.41,3.09,4.17\nresnet34,224,6529.57,156.815,1024,21.8,3.67,3.74\nese_vovnet19b_dw,224,6511.74,157.245,1024,6.54,1.34,8.25\nefficientnet_lite1,240,6378.58,160.527,1024,5.42,0.62,10.14\nresnet18,288,6375.26,160.611,1024,11.69,3.01,4.11\nfbnetv3_b,256,6364.95,160.871,1024,8.6,0.55,9.1\nselecsls42,224,6333.78,161.663,1024,30.35,2.94,4.62\nselecsls42b,224,6319.75,162.02,1024,32.46,2.98,4.62\ncrossvit_tiny_240,240,6299.42,162.544,1024,7.01,1.57,9.08\nconvnext_pico,224,6259.06,163.593,1024,9.05,1.37,6.1\nefficientnet_es_pruned,224,6253.5,163.738,1024,5.44,1.81,8.73\ncrossvit_9_dagger_240,240,6253.37,163.741,1024,8.78,1.99,9.97\nseresnet50,160,6246.72,163.91,1024,28.09,2.1,5.69\nefficientnet_b0,256,6246.01,163.934,1024,5.29,0.52,8.81\nefficientnet_es,224,6238.02,164.145,1024,5.44,1.81,8.73\nefficientnet_blur_b0,224,6201.55,165.11,1024,5.29,0.43,8.72\nmobilenetv3_large_150d,224,6128.32,167.083,1024,14.62,,\nrepvit_m1_0,224,6105.54,167.706,1024,7.3,1.13,8.69\nseresnet34,224,6098.18,167.906,1024,21.96,3.67,3.74\nconvnext_pico_ols,224,6085.56,168.258,1024,9.06,1.43,6.5\nresnext50_32x4d,160,6056.31,169.069,1024,25.03,2.17,7.35\nrepghostnet_200,224,6053.09,169.16,1024,9.8,0.54,7.96\nmobilenet_edgetpu_v2_m,224,6048.59,169.285,1024,8.46,1.85,8.15\ntiny_vit_5m_224,224,6035.01,169.666,1024,12.08,1.28,11.25\nseresnet18,288,6022.58,170.017,1024,11.78,3.01,4.11\nlevit_384,224,6010.01,170.372,1024,39.13,2.36,6.26\ncs3darknet_focus_m,288,5993.42,170.844,1024,9.3,2.51,6.19\nresnet26,224,5987.24,171.021,1024,16.0,2.36,7.35\ndla34,224,5969.98,171.514,1024,15.74,3.07,5.02\nrepvit_m2,224,5966.06,171.627,1024,8.8,1.36,9.43\nresnet34d,224,5928.79,172.706,1024,21.82,3.91,4.54\ntf_efficientnet_es,224,5915.64,173.09,1024,5.44,1.81,8.73\nlegacy_seresnet34,224,5895.55,173.68,1024,21.96,3.67,3.74\nresnetrs50,160,5890.41,173.832,1024,35.69,2.29,6.2\ncrossvit_9_240,240,5878.46,174.185,1024,8.55,1.85,9.52\nefficientnet_b0_g16_evos,224,5867.79,174.502,1024,8.11,1.01,7.42\necaresnet50t,160,5866.78,174.532,1024,25.57,2.21,6.04\nresnet50,176,5862.03,174.673,1024,25.56,2.62,6.92\nefficientnet_b1,224,5792.59,176.768,1024,7.79,0.59,9.36\ncs3darknet_m,288,5761.99,177.707,1024,9.31,2.63,6.69\nmobilenetv4_hybrid_medium,256,5760.47,177.753,1024,11.07,1.29,9.01\nlevit_conv_384,224,5757.96,177.831,1024,39.13,2.36,6.26\nghostnetv2_100,224,5689.23,179.979,1024,6.16,0.18,4.55\ntf_efficientnet_lite1,240,5688.37,180.007,1024,5.42,0.62,10.14\nmixnet_s,224,5667.55,180.667,1024,4.13,0.25,6.25\nrepvit_m1_1,224,5665.98,180.718,1024,8.8,1.36,9.43\nresnetblur18,224,5629.69,181.883,1024,11.69,2.34,3.39\nefficientvit_b1,256,5607.85,182.591,1024,9.1,0.69,9.46\nconvnext_atto,288,5598.29,182.904,1024,3.7,0.91,6.3\nhgnetv2_b0,288,5561.84,184.102,1024,6.0,0.54,3.51\nselecsls60,224,5536.37,184.949,1024,30.67,3.59,5.52\nresnet18d,288,5520.36,185.486,1024,11.71,3.41,5.43\nxcit_tiny_12_p16_224,224,5519.98,185.498,1024,6.72,1.24,6.29\ndla46x_c,224,5516.82,185.602,1024,1.07,0.54,5.66\nselecsls60b,224,5514.38,185.687,1024,32.77,3.63,5.52\ndarknet17,256,5477.03,186.952,1024,14.3,3.26,7.18\nskresnet18,224,5436.76,188.337,1024,11.96,1.82,3.24\nregnetz_005,288,5429.12,188.602,1024,7.12,0.86,9.68\nconvnext_atto_ols,288,5423.39,188.802,1024,3.7,0.96,6.8\nresmlp_12_224,224,5417.84,188.996,1024,15.35,3.01,5.5\nconvnextv2_atto,224,5393.05,189.864,1024,3.71,0.55,3.81\nmobilenet_edgetpu_v2_l,224,5392.86,189.871,1024,10.92,2.55,9.05\nnf_regnet_b2,240,5319.96,192.472,1024,14.31,0.97,7.23\nnf_regnet_b1,256,5313.59,192.704,1024,10.22,0.82,7.27\nrepvgg_b0,224,5312.16,192.755,1024,15.82,3.41,6.15\nedgenext_x_small,256,5311.8,192.768,1024,2.34,0.54,5.93\ndla60x_c,224,5286.77,193.68,1024,1.32,0.59,6.01\ntf_mixnet_s,224,5242.96,195.299,1024,4.13,0.25,6.25\nresnet26d,224,5233.24,195.662,1024,16.01,2.6,8.15\nresnetaa34d,224,5231.1,195.743,1024,21.82,4.43,5.07\nvit_tiny_r_s16_p8_384,384,5200.71,196.885,1024,6.36,1.34,6.49\nmobilenetv4_conv_medium,320,5184.61,197.498,1024,9.72,1.71,11.84\nsemnasnet_140,224,5180.64,197.649,1024,6.11,0.6,8.87\nfbnetv3_d,256,5175.76,197.836,1024,10.31,0.68,11.1\nvit_base_patch32_clip_224,224,5121.46,199.933,1024,88.22,4.41,5.01\nvit_base_patch32_224,224,5114.3,200.213,1024,88.22,4.41,5.01\nmobilenetv4_conv_aa_medium,256,5106.4,200.523,1024,9.72,1.58,10.3\ngmixer_12_224,224,5100.86,200.741,1024,12.7,2.67,7.26\nregnetz_b16,224,5067.77,202.051,1024,9.72,1.45,9.95\nmobilenetv2_140,224,5065.51,202.141,1024,6.11,0.6,9.57\nmobilenetv4_conv_blur_medium,224,5036.62,203.302,1024,9.72,1.22,8.58\nmixer_s16_224,224,5021.22,203.925,1024,18.53,3.79,5.97\ntiny_vit_11m_224,224,5013.61,204.233,1024,20.35,2.04,13.49\nhgnetv2_b4,224,4999.81,204.797,1024,19.8,2.75,6.7\neva02_tiny_patch14_224,224,4958.89,206.487,1024,5.5,1.7,9.14\nefficientnet_b1,240,4954.82,206.656,1024,7.79,0.71,10.88\npit_s_224,224,4950.08,206.854,1024,23.46,2.88,11.56\nresnext50_32x4d,176,4946.85,206.99,1024,25.03,2.71,8.97\npit_s_distilled_224,224,4922.56,208.011,1024,24.04,2.9,11.64\ndarknet21,256,4915.34,208.316,1024,20.86,3.93,7.47\nnf_resnet26,224,4912.43,208.441,1024,16.0,2.41,7.35\nconvnext_femto,288,4895.87,209.146,1024,5.22,1.3,7.56\nefficientnet_lite2,260,4895.77,209.15,1024,6.09,0.89,12.9\ngmlp_ti16_224,224,4887.42,209.508,1024,5.87,1.34,7.55\ntf_efficientnetv2_b2,260,4842.96,211.431,1024,10.1,1.72,9.84\nconvnext_femto_ols,288,4762.02,215.024,1024,5.23,1.35,8.06\nefficientnet_b2_pruned,260,4761.44,215.051,1024,8.31,0.73,9.13\nefficientnetv2_rw_t,224,4734.23,216.287,1024,13.65,1.93,9.94\nresnext26ts,256,4729.62,216.496,1024,10.3,2.43,10.52\necaresnet50d_pruned,224,4721.76,216.858,1024,19.94,2.53,6.43\nmixer_b32_224,224,4721.22,216.883,1024,60.29,3.24,6.29\nsedarknet21,256,4717.55,217.052,1024,20.95,3.93,7.47\nefficientnet_cc_b0_4e,224,4699.43,217.889,1024,13.31,0.41,9.42\nefficientformer_l1,224,4692.55,218.208,1024,12.29,1.3,5.53\nhgnetv2_b2,224,4665.47,219.475,1024,11.22,1.15,4.12\nefficientnet_cc_b0_8e,224,4649.86,220.212,1024,24.01,0.42,9.42\ndeit_small_patch16_224,224,4640.58,220.651,1024,22.05,4.61,11.95\ndpn48b,224,4632.75,221.023,1024,9.13,1.69,8.92\nvit_base_patch32_clip_quickgelu_224,224,4629.11,221.199,1024,87.85,4.41,5.01\nvit_small_patch32_384,384,4623.55,221.464,1024,22.92,3.45,8.25\nconvnextv2_femto,224,4619.54,221.657,1024,5.23,0.79,4.57\nvit_small_patch16_224,224,4616.6,221.798,1024,22.05,4.61,11.95\nmobilenet_edgetpu_v2_m,256,4603.21,222.443,1024,8.46,2.42,10.65\nmobilevitv2_075,256,4603.11,222.448,1024,2.87,1.05,12.06\nrexnetr_130,224,4600.81,222.56,1024,7.61,0.68,9.81\ndeit_small_distilled_patch16_224,224,4599.62,222.617,1024,22.44,4.63,12.02\ngc_efficientnetv2_rw_t,224,4586.63,223.247,1024,13.68,1.94,9.97\neca_resnext26ts,256,4583.46,223.402,1024,10.3,2.43,10.52\nseresnext26ts,256,4576.49,223.742,1024,10.39,2.43,10.52\ntf_efficientnetv2_b3,240,4549.81,225.054,1024,14.36,1.93,9.95\nlegacy_seresnext26_32x4d,224,4538.76,225.601,1024,16.79,2.49,9.39\ngcresnext26ts,256,4524.04,226.336,1024,10.48,2.43,10.53\ntf_efficientnet_b1,240,4521.51,226.463,1024,7.79,0.71,10.88\ngernet_l,256,4517.69,226.654,1024,31.08,4.57,8.0\nresnet101,160,4424.84,231.411,1024,44.55,4.0,8.28\nconvnext_nano,224,4416.52,231.847,1024,15.59,2.46,8.37\ntf_efficientnet_lite2,260,4392.9,233.093,1024,6.09,0.89,12.9\nefficientnet_b1,256,4384.99,233.513,1024,7.79,0.77,12.22\nrexnet_130,224,4380.06,233.776,1024,7.56,0.68,9.71\nefficientvit_b1,288,4359.22,234.894,1024,9.1,0.87,11.96\nghostnetv2_130,224,4355.29,235.105,1024,8.96,0.28,5.9\nmobilenetv4_conv_large,256,4341.06,235.877,1024,32.59,2.86,12.14\ntf_efficientnet_cc_b0_4e,224,4338.57,236.012,1024,13.31,0.41,9.42\nmobilenetv2_120d,224,4324.07,236.803,1024,5.83,0.69,11.97\ntf_efficientnet_cc_b0_8e,224,4315.19,237.291,1024,24.01,0.42,9.42\ndeit3_small_patch16_224,224,4283.7,239.035,1024,22.06,4.61,11.95\nvit_relpos_small_patch16_rpn_224,224,4272.39,239.667,1024,21.97,4.59,13.05\nvit_wee_patch16_reg1_gap_256,256,4262.7,240.213,1024,13.42,3.83,13.9\ncs3darknet_focus_l,256,4254.84,240.657,1024,21.15,4.66,8.03\nvit_relpos_small_patch16_224,224,4248.63,241.008,1024,21.98,4.59,13.05\nvit_pwee_patch16_reg1_gap_256,256,4244.05,241.269,1024,15.25,4.37,15.87\nregnetx_016,224,4239.24,241.54,1024,9.19,1.62,7.93\nedgenext_x_small,288,4233.57,241.864,1024,2.34,0.68,7.5\nvit_srelpos_small_patch16_224,224,4224.03,242.412,1024,21.97,4.59,12.16\nnf_regnet_b1,288,4209.65,243.24,1024,10.22,1.02,9.2\nnf_ecaresnet26,224,4182.26,244.834,1024,16.0,2.41,7.36\nefficientnet_b2,256,4174.44,245.292,1024,9.11,0.89,12.81\nnf_seresnet26,224,4174.23,245.305,1024,17.4,2.41,7.36\nmobilenetv4_hybrid_large_075,256,4171.98,245.436,1024,22.75,2.06,11.64\nnf_regnet_b2,272,4148.23,246.842,1024,14.31,1.22,9.27\ncs3darknet_l,256,4138.17,247.442,1024,21.16,4.86,8.55\nconvnext_nano_ols,224,4136.46,247.544,1024,15.65,2.65,9.38\nrexnetr_150,224,4135.64,247.594,1024,9.78,0.89,11.13\necaresnext50t_32x4d,224,4123.94,248.296,1024,15.41,2.7,10.09\necaresnext26t_32x4d,224,4122.74,248.367,1024,15.41,2.7,10.09\nseresnext26t_32x4d,224,4117.08,248.709,1024,16.81,2.7,10.09\npoolformer_s12,224,4088.18,250.468,1024,11.92,1.82,5.53\nrepvgg_a2,224,4084.78,250.676,1024,28.21,5.7,6.26\nseresnext26d_32x4d,224,4078.22,251.079,1024,16.81,2.73,10.19\nresnet26t,256,4062.55,252.048,1024,16.01,3.35,10.52\nhgnet_tiny,224,4057.65,252.352,1024,14.74,4.54,6.36\nhgnetv2_b1,288,4056.18,252.444,1024,6.34,0.82,4.51\nflexivit_small,240,4044.24,253.19,1024,22.06,5.35,14.18\nrepvit_m3,224,4026.3,254.31,1024,10.68,1.89,13.94\nedgenext_small,256,4025.36,254.377,1024,5.59,1.26,9.07\nmobileone_s1,224,4001.06,255.921,1024,4.83,0.86,9.67\nvit_base_patch32_clip_256,256,3985.68,256.91,1024,87.86,5.76,6.65\nresnetv2_50,224,3927.67,260.703,1024,25.55,4.11,11.11\neca_botnext26ts_256,256,3922.63,261.039,1024,10.59,2.46,11.6\nregnety_016,224,3879.4,263.947,1024,11.2,1.63,8.04\npvt_v2_b1,224,3876.64,264.136,1024,14.01,2.12,15.39\ncs3sedarknet_l,256,3859.41,265.314,1024,21.91,4.86,8.56\nmobilenetv4_conv_blur_medium,256,3857.99,265.413,1024,9.72,1.59,11.2\ndpn68,224,3853.11,265.748,1024,12.61,2.35,10.47\nese_vovnet19b_dw,288,3852.02,265.825,1024,6.54,2.22,13.63\nfbnetv3_g,240,3837.93,266.791,1024,16.62,1.28,14.87\nresnet34,288,3825.08,267.697,1024,21.8,6.07,6.18\nconvnext_pico,288,3802.68,269.273,1024,9.05,2.27,10.08\neca_halonext26ts,256,3798.67,269.558,1024,10.76,2.44,11.46\necaresnetlight,224,3794.95,269.822,1024,30.16,4.11,8.42\ndpn68b,224,3764.97,271.97,1024,12.61,2.35,10.47\nefficientnet_em,240,3750.15,273.045,1024,6.9,3.04,14.34\nmixnet_m,224,3745.46,273.387,1024,5.01,0.36,8.19\nhgnetv2_b3,224,3744.03,273.492,1024,16.29,1.78,5.07\nresnet50,224,3729.86,274.531,1024,25.56,4.11,11.11\nefficientnet_b3_pruned,300,3729.22,274.578,1024,9.86,1.04,11.86\nbat_resnext26ts,256,3705.73,276.319,1024,10.73,2.53,12.51\nconvnext_pico_ols,288,3702.45,276.564,1024,9.06,2.37,10.74\nrexnet_150,224,3700.42,276.715,1024,9.73,0.9,11.21\nbotnet26t_256,256,3668.77,279.103,1024,12.49,3.32,11.98\nhrnet_w18_small_v2,224,3662.73,279.553,1024,15.6,2.62,9.65\nresnest14d,224,3658.62,279.877,1024,10.61,2.76,7.33\nresnet32ts,256,3655.47,280.118,1024,17.96,4.63,11.58\necaresnet26t,256,3636.71,281.563,1024,16.01,3.35,10.53\nresnet101,176,3629.67,282.109,1024,44.55,4.92,10.08\nmobilenetv4_hybrid_medium,320,3627.2,282.301,1024,11.07,2.05,14.36\nresnext26ts,288,3614.6,283.286,1024,10.3,3.07,13.31\nresnetv2_50t,224,3605.27,284.018,1024,25.57,4.32,11.82\nresnet33ts,256,3596.87,284.683,1024,19.68,4.76,11.66\ntf_efficientnet_em,240,3593.09,284.981,1024,6.9,3.04,14.34\nresnetv2_50d,224,3590.41,285.193,1024,25.57,4.35,11.92\nseresnet34,288,3587.79,285.399,1024,21.96,6.07,6.18\nregnetv_040,224,3583.87,285.714,1024,20.64,4.0,12.29\ntf_mixnet_m,224,3580.97,285.946,1024,5.01,0.36,8.19\nhalonet26t,256,3578.92,286.11,1024,12.48,3.19,11.69\nedgenext_small_rw,256,3571.34,286.716,1024,7.83,1.58,9.51\necaresnet101d_pruned,224,3571.05,286.74,1024,24.88,3.48,7.69\ndla60,224,3563.95,287.311,1024,22.04,4.26,10.16\ncoat_lite_tiny,224,3562.29,287.445,1024,5.72,1.6,11.65\nregnety_040,224,3558.64,287.74,1024,20.65,4.0,12.29\nresnet50c,224,3552.9,288.205,1024,25.58,4.35,11.92\nconvnextv2_pico,224,3531.58,289.945,1024,9.07,1.37,6.1\nvit_small_resnet26d_224,224,3508.49,291.853,1024,63.61,5.07,11.12\nresnet26,288,3505.0,292.144,1024,16.0,3.9,12.15\neca_resnext26ts,288,3503.71,292.25,1024,10.3,3.07,13.32\nseresnext26ts,288,3501.42,292.44,1024,10.39,3.07,13.32\ntf_efficientnet_b2,260,3496.2,292.88,1024,9.11,1.02,13.83\nresnet34d,288,3493.39,293.114,1024,21.82,6.47,7.51\neca_resnet33ts,256,3492.62,293.179,1024,19.68,4.76,11.66\nefficientnet_b1,288,3488.73,293.505,1024,7.79,0.97,15.46\nseresnet33ts,256,3486.45,293.698,1024,19.78,4.76,11.66\ngcresnext26ts,288,3458.99,296.03,1024,10.48,3.07,13.33\ntresnet_m,224,3453.12,296.532,1024,31.39,5.75,7.31\ngcresnet33ts,256,3446.19,297.129,1024,19.88,4.76,11.68\ncoatnet_nano_cc_224,224,3441.88,297.501,1024,13.76,2.24,15.02\nresnet50t,224,3439.67,297.693,1024,25.57,4.32,11.82\nghostnetv2_160,224,3435.57,298.048,1024,12.39,0.42,7.23\nresnet50d,224,3420.07,299.4,1024,25.58,4.35,11.92\nefficientvit_b2,224,3405.99,300.636,1024,24.33,1.6,14.62\nvovnet39a,224,3393.85,301.712,1024,22.6,7.09,6.73\ncoatnext_nano_rw_224,224,3385.52,302.454,1024,14.7,2.47,12.8\ncoatnet_nano_rw_224,224,3380.48,302.905,1024,15.14,2.41,15.41\nlegacy_seresnet50,224,3371.1,303.744,1024,28.09,3.88,10.6\nefficientnet_b0_gn,224,3367.52,304.072,1024,5.29,0.42,6.75\ncs3darknet_focus_l,288,3358.65,304.874,1024,21.15,5.9,10.16\nfastvit_t8,256,3353.24,305.366,1024,4.03,0.7,8.63\nconvit_tiny,224,3349.24,305.73,1024,5.71,1.26,7.94\nrepvit_m1_5,224,3348.79,305.769,1024,14.64,2.31,15.7\ncoat_lite_mini,224,3328.7,307.616,1024,11.01,2.0,12.25\nselecsls84,224,3309.96,309.357,1024,50.95,5.9,7.57\nefficientnet_b2,288,3305.5,309.776,1024,9.11,1.12,16.2\nsam2_hiera_tiny,224,3299.25,310.362,1024,26.85,4.91,17.12\nhaloregnetz_b,224,3297.58,310.52,1024,11.68,1.97,11.94\nresnetblur18,288,3296.17,310.652,1024,11.69,3.87,5.6\nresnetaa50,224,3294.4,310.82,1024,25.56,5.15,11.64\nlevit_512,224,3286.88,311.53,1024,95.17,5.64,10.22\nvit_tiny_patch16_384,384,3281.68,312.024,1024,5.79,4.7,25.39\nese_vovnet39b,224,3281.48,312.043,1024,24.57,7.09,6.74\nmobilevit_xs,256,3279.06,312.274,1024,2.32,1.05,16.33\nwide_resnet50_2,176,3279.04,312.276,1024,68.88,7.29,8.97\neca_vovnet39b,224,3274.47,312.712,1024,22.6,7.09,6.74\nconvnextv2_atto,288,3271.31,313.015,1024,3.71,0.91,6.3\nmobileone_s2,224,3268.79,313.256,1024,7.88,1.34,11.55\ncs3darknet_l,288,3263.27,313.785,1024,21.16,6.16,10.83\nseresnet50,224,3243.29,315.718,1024,28.09,4.11,11.13\nvit_small_r26_s32_224,224,3242.11,315.833,1024,36.43,3.56,9.85\nregnetx_032,224,3239.67,316.071,1024,15.3,3.2,11.37\nnf_regnet_b3,288,3220.95,317.909,1024,18.59,1.67,11.84\nres2net50_48w_2s,224,3208.95,319.097,1024,25.29,4.18,11.72\nresnetaa34d,288,3173.49,322.664,1024,21.82,7.33,8.38\ndla60x,224,3140.94,326.005,1024,17.35,3.54,13.8\nresnext50_32x4d,224,3135.91,326.53,1024,25.03,4.26,14.4\nmobileone_s0,224,3126.59,327.503,1024,5.29,1.09,15.48\nconvnext_tiny,224,3120.16,328.178,1024,28.59,4.47,13.44\nvisformer_small,224,3116.52,328.562,1024,40.22,4.88,11.43\nresnet152,160,3116.12,328.603,1024,60.19,5.9,11.51\nlevit_conv_512,224,3097.94,330.532,1024,95.17,5.64,10.22\nvit_relpos_base_patch32_plus_rpn_256,256,3096.69,330.659,1024,119.42,7.68,8.01\nresnet26d,288,3085.16,331.901,1024,16.01,4.29,13.48\nskresnet34,224,3069.81,333.561,1024,22.28,3.67,5.13\nlevit_512d,224,3062.18,334.39,1024,92.5,5.85,11.3\nresnet50_clip_gap,224,3051.19,335.595,1024,23.53,5.39,12.44\neca_nfnet_l0,224,3050.64,335.658,1024,24.14,4.35,10.47\nresnetaa50d,224,3048.06,335.941,1024,25.58,5.39,12.44\nregnetz_c16,256,3040.22,336.807,1024,13.46,2.51,16.57\ncs3sedarknet_l,288,3038.16,337.035,1024,21.91,6.16,10.83\nxcit_nano_12_p16_384,384,3035.13,337.372,1024,3.05,1.64,12.15\ntiny_vit_21m_224,224,3034.37,337.457,1024,33.22,4.29,20.08\ndensenet121,224,3031.71,337.752,1024,7.98,2.87,6.9\nnfnet_l0,224,3030.88,337.845,1024,35.07,4.36,10.47\nresnetrs50,224,3023.9,338.625,1024,35.69,4.48,12.14\necaresnet50t,224,3023.88,338.627,1024,25.57,4.32,11.83\nregnetz_b16,288,3023.06,338.719,1024,9.72,2.39,16.43\nseresnet50t,224,3021.11,338.934,1024,28.1,4.32,11.83\necaresnet50d,224,3008.25,340.387,1024,25.58,4.35,11.93\nvit_base_patch32_plus_256,256,3003.41,340.935,1024,119.48,7.79,7.76\nvit_medium_patch16_clip_224,224,3002.97,340.985,1024,38.59,8.0,15.93\nhgnetv2_b4,288,3001.56,341.146,1024,19.8,4.54,11.08\ncrossvit_small_240,240,2994.9,341.904,1024,26.86,5.63,18.17\nresnet50s,224,2990.06,342.458,1024,25.68,5.47,13.52\nrexnetr_200,224,2981.52,343.438,1024,16.52,1.59,15.11\nmixnet_l,224,2960.5,345.878,1024,7.33,0.58,10.84\nefficientnet_cc_b1_8e,240,2957.88,346.183,1024,39.72,0.75,15.44\nxcit_tiny_24_p16_224,224,2951.65,346.913,1024,12.12,2.34,11.82\ngcresnext50ts,256,2949.61,347.155,1024,15.67,3.75,15.46\nlambda_resnet26rpt_256,256,2920.38,350.629,1024,10.99,3.16,11.87\nhiera_tiny_224,224,2913.8,351.42,1024,27.91,4.91,17.13\nresnext50d_32x4d,224,2906.09,352.354,1024,25.05,4.5,15.2\nresnet32ts,288,2900.64,353.016,1024,17.96,5.86,14.65\nvit_little_patch16_reg1_gap_256,256,2900.54,353.027,1024,22.52,6.27,18.06\nlevit_conv_512d,224,2894.37,353.779,1024,92.5,5.85,11.3\nres2net50_26w_4s,224,2886.15,354.784,1024,25.7,4.28,12.61\nresnetv2_50x1_bit,224,2881.41,355.371,1024,25.55,4.23,11.11\nmobilenetv4_conv_large,320,2873.61,356.335,1024,32.59,4.47,18.97\ntf_efficientnetv2_b3,300,2870.86,356.676,1024,14.36,3.04,15.74\nvit_little_patch16_reg4_gap_256,256,2870.27,356.75,1024,22.52,6.35,18.33\nmobilevitv2_100,256,2869.22,356.881,1024,4.9,1.84,16.08\ncoatnet_pico_rw_224,224,2867.77,357.061,1024,10.85,2.05,14.62\nefficientnetv2_rw_t,288,2863.9,357.543,1024,13.65,3.19,16.42\nresnet33ts,288,2854.58,358.712,1024,19.68,6.02,14.75\ncspresnet50,256,2853.48,358.849,1024,21.62,4.54,11.5\ninception_next_tiny,224,2844.01,360.044,1024,28.06,4.19,11.98\nresnet50_clip,224,2836.87,360.947,1024,38.32,6.14,12.98\nres2next50,224,2827.45,362.148,1024,24.67,4.2,13.71\ntf_mixnet_l,224,2826.43,362.284,1024,7.33,0.58,10.84\ndla60_res2next,224,2826.22,362.31,1024,17.03,3.49,13.17\nvit_base_resnet26d_224,224,2826.06,362.331,1024,101.4,6.97,13.16\nhgnetv2_b5,224,2824.85,362.487,1024,39.57,6.56,11.19\nvovnet57a,224,2824.04,362.591,1024,36.64,8.95,7.52\ndeit3_medium_patch16_224,224,2819.68,363.151,1024,38.85,8.0,15.93\nconvnextv2_femto,288,2809.3,364.492,1024,5.23,1.3,7.56\nres2net50_14w_8s,224,2809.15,364.506,1024,25.06,4.21,13.28\ngcresnet50t,256,2800.68,365.615,1024,25.9,5.42,14.67\ncrossvit_15_240,240,2793.66,366.534,1024,27.53,5.81,19.77\nvit_relpos_medium_patch16_rpn_224,224,2791.96,366.75,1024,38.73,7.97,17.02\nseresnext50_32x4d,224,2785.57,367.599,1024,27.56,4.26,14.42\ndla60_res2net,224,2784.11,367.789,1024,20.85,4.15,12.34\nconvnext_tiny_hnf,224,2781.72,368.1,1024,28.59,4.47,13.44\ngc_efficientnetv2_rw_t,288,2780.71,368.241,1024,13.68,3.2,16.45\ngcvit_xxtiny,224,2779.24,368.435,1024,12.0,2.14,15.36\nvgg11_bn,224,2779.08,368.458,1024,132.87,7.62,7.44\nlegacy_seresnext50_32x4d,224,2778.41,368.545,1024,27.56,4.26,14.42\ntf_efficientnet_cc_b1_8e,240,2776.91,368.745,1024,39.72,0.75,15.44\nvit_relpos_medium_patch16_224,224,2773.12,369.248,1024,38.75,7.97,17.02\nresmlp_24_224,224,2769.63,369.714,1024,30.02,5.96,10.91\ncrossvit_15_dagger_240,240,2769.4,369.745,1024,28.21,6.13,20.43\nhgnetv2_b2,288,2767.58,369.987,1024,11.22,1.89,6.8\neca_resnet33ts,288,2765.99,370.2,1024,19.68,6.02,14.76\nvit_srelpos_medium_patch16_224,224,2763.71,370.506,1024,38.74,7.96,16.21\nseresnet33ts,288,2762.81,370.627,1024,19.78,6.02,14.76\ntwins_svt_small,224,2759.48,371.074,1024,24.06,2.94,13.75\nvit_relpos_medium_patch16_cls_224,224,2754.95,371.684,1024,38.76,8.03,18.24\ntwins_pcpvt_small,224,2745.95,372.902,1024,24.11,3.83,18.08\nnextvit_small,224,2739.59,373.768,1024,31.76,5.81,18.44\ndavit_tiny,224,2738.13,280.473,768,28.36,4.54,18.89\ngcresnet33ts,288,2730.01,375.08,1024,19.88,6.02,14.78\ncspresnet50d,256,2722.26,376.147,1024,21.64,4.86,12.55\nfbnetv3_g,288,2718.22,376.707,1024,16.62,1.77,21.09\nseresnetaa50d,224,2716.73,376.913,1024,28.11,5.4,12.46\nese_vovnet57b,224,2715.38,377.101,1024,38.61,8.95,7.52\nrexnet_200,224,2713.9,377.301,1024,16.37,1.56,14.91\nresnetblur50,224,2713.18,377.406,1024,25.56,5.16,12.02\necaresnet50d_pruned,288,2709.71,377.889,1024,19.94,4.19,10.61\nnfnet_f0,192,2708.66,378.036,1024,71.49,7.21,10.16\nefficientvit_l1,224,2706.35,378.359,1024,52.65,5.27,15.85\nconvnext_nano,288,2696.47,379.744,1024,15.59,4.06,13.84\nhgnet_small,224,2694.73,379.99,1024,24.36,8.53,8.79\nres2net50d,224,2694.69,379.989,1024,25.72,4.52,13.41\ncspresnet50w,256,2694.09,380.081,1024,28.12,5.04,12.19\ncoatnet_0_rw_224,224,2684.42,381.451,1024,27.44,4.43,18.73\npoolformerv2_s12,224,2665.71,384.127,1024,11.89,1.83,5.53\nmobileone_s3,224,2662.7,384.561,1024,10.17,1.94,13.85\nresnest26d,224,2658.83,385.121,1024,17.07,3.64,9.97\nefficientvit_b2,256,2647.05,386.835,1024,24.33,2.09,19.03\nregnetx_040,224,2643.92,387.289,1024,22.12,3.99,12.2\nregnety_032,224,2640.33,387.82,1024,19.44,3.2,11.26\nresnet51q,256,2620.47,390.76,1024,35.7,6.38,16.55\nrepvgg_b1g4,224,2615.17,391.55,1024,39.97,8.15,10.64\nnf_regnet_b3,320,2615.09,391.563,1024,18.59,2.05,14.61\ngmixer_24_224,224,2609.82,392.354,1024,24.72,5.28,14.45\ndensenetblur121d,224,2595.94,394.451,1024,8.0,3.11,7.9\nedgenext_small,320,2590.64,395.256,1024,5.59,1.97,14.16\nmobilevit_s,256,2588.55,395.577,1024,5.58,2.03,19.94\ncs3darknet_focus_x,256,2586.12,395.949,1024,35.02,8.03,10.69\ninception_v3,299,2561.31,399.784,1024,23.83,5.73,8.97\nresnet152,176,2560.74,399.874,1024,60.19,7.22,13.99\nnf_ecaresnet50,224,2541.49,402.904,1024,25.56,4.21,11.13\nresnetblur50d,224,2541.42,402.913,1024,25.58,5.4,12.82\nresnet26t,320,2540.06,403.129,1024,16.01,5.24,16.44\nnf_seresnet50,224,2535.8,403.807,1024,28.09,4.21,11.13\nconvnextv2_nano,224,2534.59,403.998,1024,15.62,2.46,8.37\nefficientnet_b0_g8_gn,224,2533.38,404.193,1024,6.56,0.66,6.75\nresnetrs101,192,2519.45,406.427,1024,63.62,6.04,12.7\nconvnext_nano_ols,288,2512.2,407.6,1024,15.65,4.38,15.5\nvit_medium_patch16_gap_240,240,2499.59,409.656,1024,44.4,9.22,18.81\ndensenet169,224,2499.14,409.729,1024,14.15,3.4,7.3\nmobilenetv4_hybrid_medium,384,2489.91,411.248,1024,11.07,3.01,21.18\nxcit_small_12_p16_224,224,2480.75,412.767,1024,26.25,4.82,12.58\ncs3darknet_x,256,2478.79,413.095,1024,35.05,8.38,11.35\ncspresnext50,256,2475.13,413.705,1024,20.57,4.05,15.86\ndarknetaa53,256,2444.37,418.912,1024,36.02,7.97,12.39\ndarknet53,256,2438.24,419.962,1024,41.61,9.31,12.39\nseresnext26t_32x4d,288,2437.82,420.035,1024,16.81,4.46,16.68\nmaxvit_pico_rw_256,256,2433.04,315.645,768,7.46,1.83,22.3\nsehalonet33ts,256,2426.84,421.938,1024,13.69,3.55,14.7\nmaxvit_rmlp_pico_rw_256,256,2425.78,316.588,768,7.52,1.85,24.86\nseresnext26d_32x4d,288,2422.58,422.677,1024,16.81,4.51,16.85\nvit_base_r26_s32_224,224,2414.03,424.176,1024,101.38,6.81,12.36\nhgnet_tiny,288,2409.6,424.956,1024,14.74,7.51,10.51\nfocalnet_tiny_srf,224,2408.09,425.223,1024,28.43,4.42,16.32\nfastvit_t12,256,2389.89,428.462,1024,7.55,1.42,12.42\ncoatnet_rmlp_nano_rw_224,224,2375.1,431.128,1024,15.15,2.62,20.34\nresnet61q,256,2368.57,432.318,1024,36.85,7.8,17.01\nedgenext_base,256,2365.56,432.866,1024,18.51,3.85,15.58\nresnetv2_101,224,2358.08,434.239,1024,44.54,7.83,16.23\nhiera_small_224,224,2356.28,434.571,1024,35.01,6.42,20.75\nnf_resnet50,256,2351.74,435.412,1024,25.56,5.46,14.52\ncs3sedarknet_x,256,2344.23,436.807,1024,35.4,8.38,11.35\nresnet50_mlp,256,2333.79,438.762,1024,26.65,7.05,16.25\necaresnet50t,256,2331.13,439.262,1024,25.57,5.64,15.45\nresnetv2_50,288,2330.51,439.377,1024,25.55,6.79,18.37\nxcit_nano_12_p8_224,224,2328.05,439.842,1024,3.05,2.16,15.71\nefficientnet_lite3,300,2314.24,442.468,1024,8.2,1.65,21.85\nresnest50d_1s4x24d,224,2310.08,443.264,1024,25.68,4.43,13.57\nresnet101,224,2287.55,447.629,1024,44.55,7.83,16.23\ndm_nfnet_f0,192,2284.54,448.22,1024,71.49,7.21,10.16\nefficientnetv2_s,288,2283.97,448.33,1024,21.46,4.75,20.13\necaresnet26t,320,2283.24,448.474,1024,16.01,5.24,16.44\npvt_v2_b2,224,2282.41,448.635,1024,25.36,4.05,27.53\ngcresnext50ts,288,2276.49,449.804,1024,15.67,4.75,19.57\ndla102,224,2270.79,450.932,1024,33.27,7.19,14.18\ndpn68b,288,2266.47,451.792,1024,12.61,3.89,17.3\nfastvit_s12,256,2263.12,452.46,1024,9.47,1.82,13.67\ngmlp_s16_224,224,2262.28,452.629,1024,19.42,4.42,15.1\nskresnet50,224,2261.97,452.692,1024,25.8,4.11,12.5\necaresnetlight,288,2258.94,453.299,1024,30.16,6.79,13.91\nrdnet_tiny,224,2257.25,453.639,1024,23.86,5.06,15.98\nfastvit_sa12,256,2256.89,453.711,1024,11.58,1.96,14.03\nfocalnet_tiny_lrf,224,2250.45,455.008,1024,28.65,4.49,17.76\nregnetx_080,224,2241.1,456.908,1024,39.57,8.02,14.06\nresnetv2_101d,224,2230.6,459.058,1024,44.56,8.07,17.04\nhgnetv2_b3,288,2229.64,459.256,1024,16.29,2.94,8.38\nresnet101c,224,2225.59,460.093,1024,44.57,8.08,17.04\nvit_medium_patch16_gap_256,256,2220.07,461.236,1024,38.86,10.59,22.15\nresnet50,288,2211.87,462.946,1024,25.56,6.8,18.37\nedgenext_small_rw,320,2206.45,464.081,1024,7.83,2.46,14.85\ngcresnet50t,288,2205.25,464.336,1024,25.9,6.86,18.57\nvit_base_resnet50d_224,224,2196.36,466.215,1024,110.97,8.73,16.92\nregnetv_040,288,2184.78,468.687,1024,20.64,6.6,20.3\neva02_tiny_patch14_336,336,2169.41,472.008,1024,5.76,4.68,27.16\nregnety_040,288,2169.12,472.071,1024,20.65,6.61,20.3\nefficientnetv2_rw_s,288,2168.46,472.214,1024,23.94,4.91,21.41\neva02_small_patch14_224,224,2168.12,472.287,1024,21.62,6.14,18.28\nresnet101d,224,2168.0,472.315,1024,44.57,8.08,17.04\nvgg13,224,2160.71,473.907,1024,133.05,11.31,12.25\ngcvit_xtiny,224,2157.88,474.529,1024,19.98,2.93,20.26\nmobilevitv2_125,256,2156.6,474.811,1024,7.48,2.86,20.1\nconvnextv2_pico,288,2147.68,476.783,1024,9.07,2.27,10.08\nskresnet50d,224,2140.48,478.386,1024,25.82,4.36,13.31\nres2net50_26w_6s,224,2137.67,479.014,1024,37.05,6.33,15.28\necaresnet101d_pruned,288,2127.79,481.24,1024,24.88,5.75,12.71\nhrnet_w18_ssld,224,2127.32,481.346,1024,21.3,4.32,16.31\nefficientvit_l2,224,2122.96,482.334,1024,63.71,6.97,19.58\nhieradet_small,256,2119.77,362.292,768,34.72,8.51,27.76\ntf_efficientnet_lite3,300,2115.57,484.019,1024,8.2,1.65,21.85\nhrnet_w18,224,2115.21,484.101,1024,21.3,4.32,16.31\nefficientformer_l3,224,2095.8,488.585,1024,31.41,3.93,12.01\nefficientvit_b2,288,2095.12,488.743,1024,24.33,2.64,24.03\nwide_resnet50_2,224,2093.41,489.145,1024,68.88,11.43,14.4\nsebotnet33ts_256,256,2091.61,244.778,512,13.7,3.89,17.46\npoolformer_s24,224,2090.86,489.741,1024,21.39,3.41,10.68\nresnet51q,288,2086.73,490.708,1024,35.7,8.07,20.94\nvit_medium_patch16_reg1_gap_256,256,2074.81,493.527,1024,38.88,10.63,22.26\ncspdarknet53,256,2060.68,496.914,1024,27.64,6.57,16.81\nnextvit_base,224,2056.15,498.007,1024,44.82,8.29,23.71\nvit_medium_patch16_reg4_gap_256,256,2054.73,498.351,1024,38.88,10.76,22.6\nefficientnet_b3,288,2053.58,498.63,1024,12.23,1.63,21.49\nresnet50t,288,2045.49,500.603,1024,25.57,7.14,19.53\ndla102x,224,2042.36,501.368,1024,26.31,5.89,19.42\npvt_v2_b2_li,224,2034.87,503.215,1024,22.55,3.91,27.6\nlegacy_seresnet101,224,2032.63,503.771,1024,49.33,7.61,15.74\nresnet50d,288,2031.97,503.934,1024,25.58,7.19,19.7\nmixnet_xl,224,2030.61,504.271,1024,11.9,0.93,14.57\nresnet101_clip_gap,224,2015.07,508.159,1024,42.52,9.11,17.56\nresnetaa101d,224,2013.22,508.627,1024,44.57,9.12,17.56\nswin_tiny_patch4_window7_224,224,2008.94,509.712,1024,28.29,4.51,17.06\nrepvgg_b1,224,2004.0,510.966,1024,57.42,13.16,10.64\ncoatnet_bn_0_rw_224,224,2002.05,511.465,1024,27.44,4.67,22.04\ncs3edgenet_x,256,1996.96,512.768,1024,47.82,11.53,12.92\ncoatnet_rmlp_0_rw_224,224,1995.05,513.261,1024,27.45,4.72,24.89\nmaxxvit_rmlp_nano_rw_256,256,1993.93,385.157,768,16.78,4.37,26.05\ncs3sedarknet_xdw,256,1988.81,514.859,1024,21.6,5.97,17.18\nseresnet101,224,1987.57,515.192,1024,49.33,7.84,16.27\nresnetaa50,288,1985.73,515.67,1024,25.56,8.52,19.24\nresnet101s,224,1984.65,515.949,1024,44.67,9.19,18.64\nmobilenetv4_conv_large,384,1984.05,516.104,1024,32.59,6.43,27.31\nregnetx_064,224,1979.8,517.214,1024,26.21,6.49,16.37\nregnetz_d32,256,1973.2,518.943,1024,27.58,5.98,23.74\nnest_tiny,224,1968.18,520.267,1024,17.06,5.83,25.48\ntf_efficientnetv2_s,300,1966.15,520.804,1024,21.46,5.35,22.73\nregnetz_d8,256,1964.69,521.191,1024,23.37,3.97,23.74\nregnetz_c16,320,1962.44,521.788,1024,13.46,3.92,25.88\ntresnet_v2_l,224,1957.54,523.094,1024,46.17,8.85,16.34\ncs3darknet_x,288,1954.84,523.818,1024,35.05,10.6,14.36\nese_vovnet39b,288,1951.57,524.694,1024,24.57,11.71,11.13\nmobileone_s4,224,1950.42,525.005,1024,14.95,3.04,17.74\ncrossvit_18_240,240,1944.91,526.492,1024,43.27,9.05,26.26\nnest_tiny_jx,224,1943.83,526.786,1024,17.06,5.83,25.48\nconvnext_tiny,288,1942.31,527.195,1024,28.59,7.39,22.21\nresnext101_32x4d,224,1940.6,527.66,1024,44.18,8.01,21.23\nxcit_tiny_12_p16_384,384,1933.82,529.511,1024,6.72,3.64,18.26\nseresnet50,288,1928.14,531.071,1024,28.09,6.8,18.39\ndarknetaa53,288,1927.58,531.226,1024,36.02,10.08,15.68\ndensenet201,224,1923.55,532.337,1024,20.01,4.34,7.85\nregnetz_040,256,1923.5,532.341,1024,27.12,4.06,24.19\nnf_regnet_b4,320,1923.32,532.402,1024,30.21,3.29,19.88\nresnet101_clip,224,1921.47,532.914,1024,56.26,9.81,18.08\ncrossvit_18_dagger_240,240,1916.57,534.275,1024,44.27,9.5,27.03\ndarknet53,288,1911.52,535.684,1024,41.61,11.78,15.68\nregnetz_040_h,256,1907.32,536.868,1024,28.94,4.12,24.29\nresnext101_32x8d,176,1906.27,537.164,1024,88.79,10.33,19.37\nhalonet50ts,256,1904.72,537.602,1024,22.73,5.3,19.2\nrepvit_m2_3,224,1903.75,537.874,1024,23.69,4.57,26.21\necaresnet101d,224,1900.31,538.848,1024,44.57,8.08,17.07\ncoat_lite_small,224,1894.79,540.417,1024,19.84,3.96,22.09\nvgg13_bn,224,1891.12,541.467,1024,133.05,11.33,12.25\nresnet61q,288,1890.53,541.637,1024,36.85,9.87,21.52\nnf_resnet101,224,1888.17,542.312,1024,44.55,8.01,16.23\ntwins_pcpvt_base,224,1884.87,543.263,1024,43.83,6.68,25.25\nconvnext_small,224,1878.13,545.211,1024,50.22,8.71,21.56\ncs3se_edgenet_x,256,1863.97,549.353,1024,50.72,11.53,12.94\nresnext50_32x4d,288,1862.16,549.888,1024,25.03,7.04,23.81\nwide_resnet101_2,176,1861.75,550.01,1024,126.89,14.31,13.18\nresmlp_36_224,224,1860.14,550.485,1024,44.69,8.91,16.33\nvolo_d1_224,224,1858.16,551.072,1024,26.63,6.94,24.43\neca_nfnet_l0,288,1849.16,553.756,1024,24.14,7.12,17.29\nnfnet_l0,288,1843.23,555.536,1024,35.07,7.13,17.29\ncs3sedarknet_x,288,1842.15,555.861,1024,35.4,10.6,14.37\nresnetaa50d,288,1841.55,556.042,1024,25.58,8.92,20.57\nresnetv2_50d_gn,224,1829.87,559.59,1024,25.57,4.38,11.92\nswin_s3_tiny_224,224,1829.63,559.665,1024,28.33,4.64,19.13\nsequencer2d_s,224,1819.49,562.785,1024,27.65,4.96,11.31\nnf_resnet50,288,1819.2,562.875,1024,25.56,6.88,18.37\necaresnet50t,288,1803.63,567.733,1024,25.57,7.14,19.55\nresnest50d,224,1801.69,568.346,1024,27.48,5.4,14.36\nseresnet50t,288,1799.96,568.892,1024,28.1,7.14,19.55\ndensenet121,288,1797.45,569.685,1024,7.98,4.74,11.41\nrexnetr_200,288,1796.89,427.395,768,16.52,2.62,24.96\necaresnet50d,288,1794.35,570.669,1024,25.58,7.19,19.72\nres2net101_26w_4s,224,1792.61,571.216,1024,45.21,8.1,18.45\nmaxxvitv2_nano_rw_256,256,1787.4,429.664,768,23.7,6.26,23.05\nresnetblur101d,224,1777.64,576.032,1024,44.57,9.12,17.94\nresnet50_gn,224,1775.58,576.701,1024,25.56,4.14,11.11\ncait_xxs24_224,224,1772.36,577.749,1024,11.96,2.53,20.29\nmaxvit_nano_rw_256,256,1770.91,433.665,768,15.45,4.46,30.28\nmixer_b16_224,224,1768.46,579.024,1024,59.88,12.62,14.53\nmaxvit_rmlp_nano_rw_256,256,1766.45,434.761,768,15.5,4.47,31.92\nvit_large_patch32_224,224,1766.19,579.765,1024,305.51,15.39,13.3\nvit_base_patch32_clip_384,384,1763.15,580.767,1024,88.3,13.06,16.5\npit_b_224,224,1762.75,580.9,1024,73.76,12.42,32.94\nvit_base_patch32_384,384,1762.38,581.02,1024,88.3,13.06,16.5\nmobilevitv2_150,256,1758.79,436.653,768,10.59,4.09,24.11\nregnety_064,224,1758.25,582.385,1024,30.58,6.39,16.41\nres2net50_26w_8s,224,1757.77,582.547,1024,48.4,8.37,17.95\npit_b_distilled_224,224,1754.34,583.684,1024,74.79,12.5,33.07\nrexnetr_300,224,1740.52,588.318,1024,34.81,3.39,22.16\nregnetv_064,224,1737.51,589.338,1024,30.58,6.39,16.41\ntresnet_l,224,1735.02,590.182,1024,55.99,10.9,11.9\nresnext50d_32x4d,288,1734.0,590.533,1024,25.05,7.44,25.13\nresnetv2_101x1_bit,224,1730.09,591.866,1024,44.54,8.04,16.23\nresnetv2_50d_frn,224,1729.39,592.103,1024,25.59,4.33,11.92\nvgg16,224,1729.39,592.106,1024,138.36,15.47,13.56\nseresnext101_32x4d,224,1725.09,593.58,1024,48.96,8.02,21.26\nlegacy_seresnext101_32x4d,224,1721.49,594.81,1024,48.96,8.02,21.26\nres2net101d,224,1716.39,596.589,1024,45.23,8.35,19.25\ntf_efficientnet_b3,300,1715.74,596.818,1024,12.23,1.87,23.83\nhgnetv2_b5,288,1704.12,600.886,1024,39.57,10.84,18.5\nconvnext_tiny_hnf,288,1697.32,603.29,1024,28.59,7.39,22.21\nvitamin_small_224,224,1687.84,606.683,1024,22.03,5.92,26.38\nese_vovnet39b_evos,224,1686.51,607.159,1024,24.58,7.07,6.74\nregnety_080,224,1686.26,607.249,1024,39.18,8.0,17.97\ncoatnet_0_224,224,1678.81,304.968,512,25.04,4.58,24.01\nconvnextv2_tiny,224,1674.96,611.347,1024,28.64,4.47,13.44\nregnetz_b16_evos,224,1670.61,612.94,1024,9.74,1.43,9.95\nresnet101d,256,1668.7,613.64,1024,44.57,10.55,22.25\nbotnet50ts_256,256,1666.23,307.27,512,22.74,5.54,22.23\nfastvit_mci0,256,1665.78,614.716,1024,11.41,2.42,18.29\nefficientnet_b3,320,1664.16,615.315,1024,12.23,2.01,26.52\nmaxvit_tiny_rw_224,224,1656.79,463.537,768,29.06,5.11,33.11\nseresnext50_32x4d,288,1655.64,618.482,1024,27.56,7.04,23.82\nhiera_small_abswin_256,256,1653.07,619.442,1024,34.36,8.29,26.38\ndavit_small,224,1646.59,466.407,768,49.75,8.8,30.49\ninception_next_small,224,1645.81,622.174,1024,49.37,8.36,19.27\nnextvit_large,224,1645.06,622.459,1024,57.87,10.78,28.99\ncoatnet_rmlp_1_rw_224,224,1642.36,623.482,1024,41.69,7.85,35.47\nseresnetaa50d,288,1641.68,623.736,1024,28.11,8.92,20.59\nswinv2_cr_tiny_224,224,1640.6,624.15,1024,28.33,4.66,28.45\nefficientvit_b3,224,1640.13,624.331,1024,48.65,3.99,26.9\nresnetv2_152,224,1638.56,624.928,1024,60.19,11.55,22.56\nlegacy_xception,299,1625.38,472.494,768,22.86,8.4,35.83\nefficientnet_el,300,1618.59,632.638,1024,10.59,8.0,30.7\nresnetblur50,288,1611.97,635.236,1024,25.56,8.52,19.87\nswinv2_cr_tiny_ns_224,224,1611.34,635.485,1024,28.33,4.66,28.45\nefficientnet_el_pruned,300,1610.86,635.676,1024,10.59,8.0,30.7\npvt_v2_b3,224,1608.94,636.429,1024,45.24,6.92,37.7\nresnet152,224,1606.24,637.501,1024,60.19,11.56,22.56\nregnety_040_sgn,224,1600.6,639.748,1024,20.65,4.03,12.29\nhiera_base_224,224,1599.47,640.202,1024,51.52,9.4,30.42\nhgnet_small,288,1591.36,482.595,768,24.36,14.09,14.53\nvit_small_resnet50d_s16_224,224,1590.51,643.806,1024,57.53,13.48,24.82\ncs3edgenet_x,288,1584.3,646.332,1024,47.82,14.59,16.36\nvit_base_patch16_siglip_gap_224,224,1583.21,646.778,1024,85.8,17.49,23.75\nmvitv2_tiny,224,1582.77,646.956,1024,24.17,4.7,21.16\nmixer_l32_224,224,1581.12,647.634,1024,206.94,11.27,19.86\ndeit_base_patch16_224,224,1579.88,648.139,1024,86.57,17.58,23.9\nhalo2botnet50ts_256,256,1579.41,648.334,1024,22.64,5.02,21.78\ndeit_base_distilled_patch16_224,224,1578.79,648.588,1024,87.34,17.68,24.05\nresnetv2_152d,224,1578.66,648.638,1024,60.2,11.8,23.36\nregnety_032,288,1575.76,649.833,1024,19.44,5.29,18.61\nvit_base_patch16_224_miil,224,1573.4,650.81,1024,94.4,17.59,23.91\nvit_base_patch16_224,224,1572.08,651.357,1024,86.57,17.58,23.9\nresnet152c,224,1571.84,651.453,1024,60.21,11.8,23.36\nregnety_080_tv,224,1571.53,651.584,1024,39.38,8.51,19.73\nnf_seresnet101,224,1570.7,651.927,1024,49.33,8.02,16.27\nvit_base_patch16_clip_224,224,1570.62,651.96,1024,86.57,17.58,23.9\nnf_ecaresnet101,224,1570.49,652.017,1024,44.55,8.01,16.27\ntf_efficientnet_el,300,1566.79,653.554,1024,10.59,8.0,30.7\nvit_base_patch16_siglip_224,224,1566.04,653.867,1024,92.88,17.73,24.06\nmobilenetv4_conv_aa_large,384,1562.44,655.376,1024,32.59,7.07,32.29\nvit_base_mci_224,224,1562.31,655.429,1024,86.35,17.73,24.65\nese_vovnet99b,224,1559.84,656.466,1024,63.2,16.51,11.27\nvit_base_patch16_gap_224,224,1554.04,658.919,1024,86.57,17.49,25.59\ncaformer_s18,224,1550.47,660.434,1024,26.34,4.13,19.39\nrexnet_300,224,1550.16,660.567,1024,34.71,3.44,22.4\nhrnet_w32,224,1547.9,661.528,1024,41.23,8.97,22.02\nefficientvit_l2,256,1547.32,661.778,1024,63.71,9.09,25.49\nrepvgg_b2g4,224,1545.72,662.466,1024,61.76,12.63,12.9\ndensenetblur121d,288,1544.46,663.005,1024,8.0,5.14,13.06\nresnet152d,224,1541.31,664.361,1024,60.21,11.8,23.36\nconvnextv2_nano,288,1537.16,499.612,768,15.62,4.06,13.84\nbeit_base_patch16_224,224,1536.2,666.568,1024,86.53,17.58,23.9\nvgg16_bn,224,1534.81,667.174,1024,138.37,15.5,13.56\nrdnet_small,224,1534.32,667.385,1024,50.44,8.74,22.55\nnfnet_f0,256,1532.99,667.963,1024,71.49,12.62,18.05\nvit_medium_patch16_rope_reg1_gap_256,256,1531.11,668.785,1024,38.74,10.63,22.26\nbeitv2_base_patch16_224,224,1520.92,673.265,1024,86.53,17.58,23.9\nresnetblur50d,288,1515.28,675.773,1024,25.58,8.92,21.19\nvit_base_patch16_rpn_224,224,1513.98,676.348,1024,86.54,17.49,23.75\nedgenext_base,320,1511.01,677.677,1024,18.51,6.01,24.32\nconvit_small,224,1505.94,679.963,1024,27.78,5.76,17.87\ndla169,224,1503.13,681.233,1024,53.39,11.6,20.2\ndeit3_base_patch16_224,224,1496.23,684.377,1024,86.59,17.58,23.9\nvit_relpos_base_patch16_rpn_224,224,1482.24,690.829,1024,86.41,17.51,24.97\nvit_relpos_base_patch16_224,224,1477.58,693.015,1024,86.43,17.51,24.97\nxcit_tiny_12_p8_224,224,1475.49,693.994,1024,6.71,4.81,23.6\nskresnext50_32x4d,224,1475.1,694.179,1024,27.48,4.5,17.18\nvit_relpos_base_patch16_clsgap_224,224,1468.63,697.236,1024,86.43,17.6,25.12\nvit_small_patch16_384,384,1467.92,697.574,1024,22.2,15.52,50.78\necaresnet50t,320,1467.5,697.775,1024,25.57,8.82,24.13\nvit_relpos_base_patch16_cls_224,224,1467.04,697.995,1024,86.43,17.6,25.12\nmobilevitv2_175,256,1465.89,349.264,512,14.25,5.54,28.13\nvit_small_patch16_36x1_224,224,1461.67,700.555,1024,64.67,13.71,35.69\ngcvit_tiny,224,1457.43,702.592,1024,28.22,4.79,29.82\nconvformer_s18,224,1455.15,703.698,1024,26.77,3.96,15.82\nvit_betwixt_patch16_reg1_gap_256,256,1449.59,706.394,1024,60.4,16.32,27.83\nresnetv2_50d_evos,224,1447.75,707.29,1024,25.59,4.33,11.92\nfocalnet_small_srf,224,1446.06,708.111,1024,49.89,8.62,26.26\nresnet152s,224,1444.93,708.675,1024,60.32,12.92,24.96\nvgg19,224,1441.62,710.3,1024,143.67,19.63,14.86\nvit_betwixt_patch16_reg4_gap_256,256,1434.73,713.713,1024,60.4,16.52,28.24\ndpn92,224,1422.77,719.712,1024,37.67,6.54,18.21\nregnetx_120,224,1419.34,721.451,1024,46.11,12.13,21.37\nsequencer2d_m,224,1415.18,723.57,1024,38.31,6.55,14.26\nresnetv2_101,288,1410.42,726.012,1024,44.54,12.94,26.83\nvit_small_patch16_18x2_224,224,1410.32,726.067,1024,64.67,13.71,35.69\ndla102x2,224,1408.31,727.1,1024,41.28,9.34,29.91\nlegacy_seresnet152,224,1405.68,728.463,1024,66.82,11.33,22.08\npoolformer_s36,224,1403.92,729.375,1024,30.86,5.0,15.82\ndensenet161,224,1400.64,731.082,1024,28.68,7.79,11.06\nmobilenetv4_hybrid_large,384,1400.4,731.209,1024,37.76,7.77,34.52\nmaxvit_tiny_tf_224,224,1398.62,549.103,768,30.92,5.6,35.78\nvit_base_patch16_clip_quickgelu_224,224,1393.62,734.768,1024,86.19,17.58,23.9\nflexivit_base,240,1387.12,738.211,1024,86.59,20.29,28.36\nseresnet152,224,1385.77,738.93,1024,66.82,11.57,22.61\nmobilenetv4_hybrid_medium,448,1381.87,741.016,1024,11.07,4.2,29.64\npoolformerv2_s24,224,1380.23,741.894,1024,21.34,3.42,10.68\nefficientformerv2_s0,224,1376.45,743.934,1024,3.6,0.41,5.3\ndeit3_small_patch16_384,384,1372.21,746.232,1024,22.21,15.52,50.78\nrepvgg_b2,224,1368.31,748.357,1024,89.02,20.45,12.9\nresnet101,288,1367.0,749.076,1024,44.55,12.95,26.83\ncoatnet_1_rw_224,224,1362.79,751.389,1024,41.72,8.04,34.6\ncoatnet_rmlp_1_rw2_224,224,1362.11,751.762,1024,41.72,8.11,40.13\nfocalnet_small_lrf,224,1356.15,755.049,1024,50.34,8.74,28.61\ntwins_pcpvt_large,224,1355.77,755.28,1024,60.99,9.84,35.82\ninception_v4,299,1352.8,756.936,1024,42.68,12.28,15.09\nmaxxvit_rmlp_tiny_rw_256,256,1347.99,569.726,768,29.64,6.66,39.76\nregnetx_160,224,1347.67,759.82,1024,54.28,15.99,25.52\nregnety_120,224,1342.31,762.851,1024,51.82,12.14,21.38\neca_nfnet_l1,256,1339.38,764.526,1024,41.41,9.62,22.04\nnf_regnet_b4,384,1338.66,764.931,1024,30.21,4.7,28.61\nfastvit_sa24,256,1331.86,768.836,1024,21.55,3.8,24.32\nxcit_small_24_p16_224,224,1319.85,775.833,1024,47.67,9.1,23.64\nvit_base_patch32_clip_448,448,1316.78,777.646,1024,88.34,17.93,23.9\nconvnext_base,224,1309.55,781.937,1024,88.59,15.38,28.75\nhrnet_w30,224,1307.0,783.46,1024,37.71,8.15,21.21\nvit_base_patch16_xp_224,224,1302.22,786.337,1024,86.51,17.56,23.9\nmixnet_xxl,224,1300.49,590.535,768,23.96,2.04,23.43\nefficientnet_b4,320,1298.17,591.59,768,19.34,3.13,34.76\nvgg19_bn,224,1292.29,792.382,1024,143.68,19.66,14.86\ndm_nfnet_f0,256,1291.37,792.948,1024,71.49,12.62,18.05\nhrnet_w18_ssld,288,1289.07,794.359,1024,21.3,7.14,26.96\nefficientnetv2_s,384,1287.98,795.031,1024,21.46,8.44,35.77\nefficientvit_b3,256,1282.51,798.417,1024,48.65,5.2,35.01\nmobilevitv2_200,256,1277.35,400.82,512,18.45,7.22,32.15\nmaxvit_tiny_rw_256,256,1275.72,601.999,768,29.07,6.74,44.35\nmaxvit_rmlp_tiny_rw_256,256,1275.02,602.33,768,29.15,6.77,46.92\nefficientformerv2_s1,224,1269.21,806.789,1024,6.19,0.67,7.66\nhiera_base_plus_224,224,1266.71,808.384,1024,69.9,12.67,37.98\ntwins_svt_base,224,1264.21,809.978,1024,56.07,8.59,26.33\nregnetz_d32,320,1259.82,812.801,1024,27.58,9.33,37.08\nregnetz_d8,320,1256.35,815.05,1024,23.37,6.19,37.08\nnest_small,224,1255.15,815.828,1024,38.35,10.35,40.04\nwide_resnet50_2,288,1252.17,817.768,1024,68.88,18.89,23.81\nrepvgg_b3g4,224,1248.83,819.957,1024,83.83,17.89,15.1\nvit_mediumd_patch16_reg4_gap_256,256,1247.91,820.557,1024,64.11,17.87,37.57\ntf_efficientnetv2_s,384,1246.18,821.699,1024,21.46,8.44,35.77\nregnetz_040,320,1245.23,411.16,512,27.12,6.35,37.78\nnest_small_jx,224,1245.09,822.421,1024,38.35,10.35,40.04\nhgnetv2_b6,224,1242.11,824.392,1024,75.26,16.88,21.23\nregnetz_040_h,320,1240.62,412.685,512,28.94,6.43,37.94\nswin_small_patch4_window7_224,224,1233.81,829.94,1024,49.61,8.77,27.47\nvit_base_patch16_siglip_gap_256,256,1230.86,831.929,1024,85.84,23.13,33.23\nefficientnetv2_rw_s,384,1225.62,835.482,1024,23.94,8.72,38.03\nefficientvit_l2,288,1223.9,836.657,1024,63.71,11.51,32.19\nresnetaa101d,288,1218.62,840.281,1024,44.57,15.07,29.03\nvit_base_patch16_siglip_256,256,1217.35,841.156,1024,92.93,23.44,33.63\nswinv2_tiny_window8_256,256,1216.18,841.967,1024,28.35,5.96,24.57\ndpn98,224,1216.04,842.062,1024,61.57,11.73,25.2\nresnext101_64x4d,224,1205.34,849.54,1024,83.46,15.52,31.21\nresnest50d_4s2x40d,224,1202.36,851.648,1024,30.42,4.4,17.94\ncs3se_edgenet_x,320,1198.22,854.587,1024,50.72,18.01,20.21\ncait_xxs36_224,224,1195.59,856.47,1024,17.3,3.77,30.34\nresnext101_32x8d,224,1193.92,857.665,1024,88.79,16.48,31.21\nseresnet101,288,1192.86,858.428,1024,49.33,12.95,26.87\nresnet152d,256,1185.38,863.846,1024,60.21,15.41,30.51\nmobilenetv4_conv_large,448,1178.81,868.664,1024,32.59,8.75,37.17\nwide_resnet101_2,224,1175.48,871.125,1024,126.89,22.8,21.23\nmvitv2_small,224,1171.19,874.316,1024,34.87,7.0,28.08\nresnet200,224,1170.95,874.489,1024,64.67,15.07,32.19\ntnt_s_patch16_224,224,1170.11,875.118,1024,23.76,5.24,24.37\ninception_resnet_v2,299,1170.1,875.124,1024,55.84,13.18,25.06\ndavit_base,224,1167.76,657.658,768,87.95,15.51,40.66\nresnext101_32x4d,288,1162.51,880.842,1024,44.18,13.24,35.09\nmaxvit_tiny_pm_256,256,1161.84,661.01,768,30.09,6.61,47.9\nconvnext_small,288,1159.49,883.132,1024,50.22,14.39,35.65\nrexnetr_300,288,1159.12,441.698,512,34.81,5.59,36.61\ncoat_tiny,224,1155.52,886.171,1024,5.5,4.35,27.2\nvit_base_patch16_reg4_gap_256,256,1154.66,886.829,1024,86.62,23.5,33.89\nresnetrs101,288,1154.54,886.919,1024,63.62,13.56,28.53\nsamvit_base_patch16_224,224,1151.9,888.955,1024,86.46,17.54,24.54\npvt_v2_b4,224,1149.9,890.501,1024,62.56,10.14,53.74\nmobilenetv4_conv_aa_large,448,1147.75,669.124,768,32.59,9.63,43.94\ncoatnet_1_224,224,1146.63,446.514,512,42.23,8.7,39.0\necaresnet101d,288,1141.31,897.207,1024,44.57,13.35,28.19\ntresnet_xl,224,1138.85,899.141,1024,78.44,15.2,15.34\nregnety_160,224,1136.53,900.98,1024,83.59,15.96,23.04\ncrossvit_base_240,240,1135.75,901.594,1024,105.03,21.22,36.33\nmvitv2_small_cls,224,1134.19,902.839,1024,34.87,7.04,28.17\nefficientnet_lite4,380,1132.94,677.871,768,13.01,4.04,45.66\nseresnext101_64x4d,224,1117.32,916.471,1024,88.23,15.53,31.25\nhiera_base_abswin_256,256,1113.82,919.348,1024,51.27,12.46,40.7\nvit_small_r26_s32_384,384,1112.62,920.336,1024,36.47,10.43,29.85\neva02_base_patch16_clip_224,224,1111.45,921.303,1024,86.26,17.62,26.32\nseresnext101_32x8d,224,1107.29,924.747,1024,93.57,16.48,31.25\nnfnet_f1,224,1107.23,924.822,1024,132.63,17.87,22.94\nresnetv2_50d_gn,288,1101.48,929.642,1024,25.57,7.24,19.7\nconvnext_tiny,384,1100.29,697.987,768,28.59,13.14,39.48\nvit_betwixt_patch16_rope_reg4_gap_256,256,1097.57,932.963,1024,60.23,16.52,28.24\nvit_large_r50_s32_224,224,1097.32,933.175,1024,328.99,19.58,24.41\npvt_v2_b5,224,1091.19,938.413,1024,81.96,11.76,50.92\ninception_next_base,224,1084.8,943.94,1024,86.67,14.85,25.69\nefficientnetv2_m,320,1081.64,946.698,1024,54.14,11.01,39.97\nseresnext101d_32x8d,224,1078.94,949.057,1024,93.59,16.72,32.05\nresnet50_gn,288,1076.3,951.392,1024,25.56,6.85,18.37\nhrnet_w40,224,1068.64,958.191,1024,57.56,12.75,25.29\nresnetblur101d,288,1065.75,960.81,1024,44.57,15.07,29.65\nregnety_064,288,1059.2,966.744,1024,30.58,10.56,27.11\nhgnet_base,224,1056.26,727.083,768,71.58,25.14,15.47\nresnet101d,320,1056.02,969.668,1024,44.57,16.48,34.77\ntf_efficientnet_lite4,380,1054.8,728.091,768,13.01,4.04,45.66\nregnetz_e8,256,1047.73,977.344,1024,57.7,9.91,40.94\nregnetv_064,288,1046.59,978.402,1024,30.58,10.55,27.11\nxception41p,299,1040.78,737.896,768,26.91,9.25,39.86\nxcit_tiny_24_p16_384,384,1037.06,987.399,1024,12.12,6.87,34.29\nseresnext101_32x4d,288,1034.95,989.41,1024,48.96,13.25,35.12\nvit_small_patch8_224,224,1034.54,989.803,1024,21.67,22.44,80.84\nresnetrs152,256,1034.09,990.227,1024,86.62,15.59,30.83\nrepvgg_b3,224,1033.07,991.212,1024,123.09,29.16,15.1\nseresnet152d,256,1030.66,993.53,1024,66.84,15.42,30.56\nrdnet_base,224,1021.45,751.859,768,87.45,15.4,31.14\nconvnextv2_tiny,288,1017.72,754.62,768,28.64,7.39,22.21\nconvnextv2_small,224,1015.43,1008.427,1024,50.32,8.71,21.56\nvolo_d2_224,224,1012.98,1010.867,1024,58.68,14.34,41.34\nregnety_080,288,1011.46,1012.381,1024,39.18,13.22,29.69\nseresnextaa101d_32x8d,224,1009.83,1014.006,1024,93.59,17.25,34.16\nregnetz_b16_evos,288,1009.35,760.872,768,9.74,2.36,16.43\nvit_base_patch16_plus_240,240,1008.65,1015.202,1024,117.56,27.41,33.08\nefficientvit_b3,288,1004.87,1019.022,1024,48.65,6.58,44.2\nswinv2_cr_small_224,224,1004.71,1019.185,1024,49.7,9.07,50.27\nconvnext_base,256,1004.59,1019.308,1024,88.59,20.09,37.55\nmobilenetv4_conv_aa_large,480,1000.68,767.464,768,32.59,11.05,50.45\nmobilenetv4_hybrid_large,448,998.89,768.844,768,37.76,10.74,48.61\nfocalnet_base_srf,224,997.56,1026.494,1024,88.15,15.28,35.01\nswinv2_cr_small_ns_224,224,993.8,1030.379,1024,49.7,9.08,50.27\nvit_relpos_base_patch16_plus_240,240,992.65,1031.568,1024,117.38,27.3,34.33\nmaxvit_rmlp_small_rw_224,224,992.28,773.964,768,64.9,10.75,49.3\nregnetz_c16_evos,256,990.18,775.606,768,13.49,2.48,16.57\nvit_base_r50_s16_224,224,987.34,1037.117,1024,97.89,21.66,35.28\nefficientformer_l7,224,979.35,1045.575,1024,82.23,10.17,24.45\nregnety_040_sgn,288,975.23,1050.001,1024,20.65,6.67,20.3\npoolformer_m36,224,970.83,1054.753,1024,56.17,8.8,22.02\nresnet152,288,962.0,1064.442,1024,60.19,19.11,37.28\nhrnet_w44,224,953.42,1073.994,1024,67.06,14.94,26.92\ntiny_vit_21m_384,384,953.33,537.052,512,21.23,13.77,77.83\ncait_s24_224,224,953.06,1074.42,1024,46.92,9.35,40.58\neva02_small_patch14_336,336,952.08,1075.525,1024,22.13,15.48,54.33\nefficientnetv2_rw_m,320,945.61,1082.891,1024,53.24,12.72,47.14\ndm_nfnet_f1,224,944.0,1084.735,1024,132.63,17.87,22.94\nfastvit_sa36,256,943.99,1084.735,1024,31.53,5.64,34.61\ncoat_lite_medium,224,942.72,1086.203,1024,44.57,9.81,40.06\nfastvit_mci1,256,940.13,1089.203,1024,21.54,4.72,32.84\ngmlp_b16_224,224,938.67,1090.888,1024,73.08,15.78,30.21\nresnet50x4_clip_gap,288,935.82,1094.209,1024,65.62,19.57,34.11\nfocalnet_base_lrf,224,935.6,1094.47,1024,88.75,15.43,38.13\nxception41,299,934.86,821.502,768,26.97,9.28,39.86\nhrnet_w48_ssld,224,933.73,1096.663,1024,77.47,17.34,28.56\nhrnet_w48,224,931.73,1098.99,1024,77.47,17.34,28.56\npoolformerv2_s36,224,930.29,1100.715,1024,30.79,5.01,15.82\nnextvit_small,384,927.23,1104.355,1024,31.76,17.26,57.14\nvit_mediumd_patch16_rope_reg1_gap_256,256,926.86,1104.797,1024,63.95,17.65,37.02\nsequencer2d_l,224,919.78,1113.292,1024,54.3,9.74,22.12\nvit_medium_patch16_gap_384,384,912.37,1122.341,1024,39.03,26.08,67.54\nxcit_medium_24_p16_224,224,909.28,1126.152,1024,84.4,16.13,31.71\ncoat_mini,224,907.22,1128.715,1024,10.34,6.82,33.68\nswin_s3_small_224,224,903.85,849.69,768,49.74,9.43,37.84\nnest_base,224,893.55,1145.979,1024,67.72,17.96,53.39\nlevit_384_s8,224,892.81,573.461,512,39.12,9.98,35.86\nresnet50x4_clip,288,889.34,1151.399,1024,87.14,21.35,35.27\nefficientnet_b4,384,888.84,576.021,512,19.34,4.51,50.04\nmaxvit_small_tf_224,224,888.54,576.215,512,68.93,11.66,53.17\ndpn131,224,887.08,1154.334,1024,79.25,16.09,32.97\nnest_base_jx,224,886.64,1154.915,1024,67.72,17.96,53.39\nswin_base_patch4_window7_224,224,882.78,1159.955,1024,87.77,15.47,36.63\nnf_regnet_b5,384,882.37,1160.502,1024,49.74,7.95,42.9\ncrossvit_15_dagger_408,408,874.39,1171.086,1024,28.5,21.45,95.05\nresnet200d,256,873.65,1172.086,1024,64.69,20.0,43.09\ngcvit_small,224,870.49,1176.332,1024,51.09,8.57,41.61\nvit_base_patch16_rope_reg1_gap_256,256,869.32,1177.92,1024,86.43,23.22,33.39\nxcit_small_12_p16_384,384,868.03,1179.666,1024,26.25,14.14,36.51\nresnetv2_50d_evos,288,866.61,1181.605,1024,25.59,7.15,19.7\nconvnextv2_nano,384,866.13,591.121,512,15.62,7.22,24.61\nresnest101e,256,860.16,1190.459,1024,48.28,13.38,28.66\neca_nfnet_l1,320,858.93,1192.174,1024,41.41,14.92,34.42\nefficientnet_b3_gn,288,854.4,599.243,512,11.73,1.74,23.35\ntf_efficientnet_b4,380,850.07,602.294,512,19.34,4.49,49.49\nlevit_conv_384_s8,224,845.53,605.525,512,39.12,9.98,35.86\nmaxxvit_rmlp_small_rw_256,256,843.15,910.843,768,66.01,14.67,58.38\ntwins_svt_large,224,838.5,1221.219,1024,99.27,15.15,35.1\ntresnet_m,448,836.12,1224.695,1024,31.39,22.99,29.21\ncoatnet_2_rw_224,224,834.11,613.819,512,73.87,15.09,49.22\nseresnet152,288,833.68,1228.27,1024,66.82,19.11,37.34\nxception65p,299,833.2,921.738,768,39.82,13.91,52.48\neva02_base_patch14_224,224,826.02,1239.671,1024,85.76,23.22,36.55\ncaformer_s36,224,824.15,1242.486,1024,39.3,8.0,37.53\nmvitv2_base,224,821.61,1246.318,1024,51.47,10.16,40.5\nswinv2_base_window12_192,192,807.64,1267.884,1024,109.28,11.9,39.72\nregnety_120,288,807.16,951.468,768,51.82,20.06,35.34\ndensenet264d,224,799.46,1280.857,1024,72.74,13.57,14.0\nmvitv2_base_cls,224,796.2,1286.102,1024,65.44,10.23,40.65\nxcit_nano_12_p8_384,384,796.01,1286.403,1024,3.05,6.34,46.08\nconvnext_base,288,793.66,1290.212,1024,88.59,25.43,47.53\ndpn107,224,793.14,1291.05,1024,86.92,18.38,33.46\ncoatnet_rmlp_2_rw_224,224,791.99,646.464,512,73.88,15.18,54.78\nswinv2_tiny_window16_256,256,787.77,649.922,512,28.35,6.68,39.02\ncoatnet_2_224,224,782.4,490.788,384,74.68,16.5,52.67\nxcit_tiny_24_p8_224,224,775.98,1319.614,1024,12.11,9.21,45.39\nmobilenetv4_conv_aa_large,544,775.96,659.818,512,32.59,14.19,64.79\nconvformer_s36,224,772.54,1325.489,1024,40.01,7.67,30.5\nmobilevitv2_150,384,765.57,668.772,512,10.59,9.2,54.25\nconvit_base,224,764.45,1339.509,1024,86.54,17.52,31.77\nmaxvit_rmlp_small_rw_256,256,758.54,1012.461,768,64.9,14.15,66.09\nseresnet200d,256,754.06,1357.971,1024,71.86,20.01,43.15\necaresnet200d,256,752.58,1360.633,1024,64.69,20.0,43.15\nresnet152d,320,751.68,1362.271,1024,60.21,24.08,47.67\nresnetrs200,256,751.62,1362.376,1024,93.21,20.18,43.42\ntnt_b_patch16_224,224,750.18,1364.987,1024,65.41,14.09,39.01\nhgnetv2_b6,288,749.26,1366.662,1024,75.26,27.9,35.09\nswinv2_small_window8_256,256,744.35,1375.689,1024,49.73,11.58,40.14\nxception65,299,743.1,1033.5,768,39.92,13.96,52.48\nfastvit_ma36,256,742.03,1379.991,1024,44.07,7.88,41.09\nswinv2_cr_small_ns_256,256,739.27,1385.138,1024,49.7,12.07,76.21\nsenet154,224,737.86,1387.783,1024,115.09,20.77,38.69\ntf_efficientnetv2_m,384,737.21,1389.018,1024,54.14,15.85,57.52\nlegacy_senet154,224,736.68,1390.006,1024,115.09,20.77,38.69\nconvnextv2_base,224,731.86,1049.374,768,88.72,15.38,28.75\npoolformer_m48,224,730.8,1401.19,1024,73.47,11.59,29.17\nswin_s3_base_224,224,729.12,1404.418,1024,71.13,13.69,48.26\nvit_so150m_patch16_reg4_gap_256,256,727.23,1408.073,1024,134.13,36.75,53.21\nvitamin_base_224,224,725.26,705.946,512,87.72,22.68,52.77\nswinv2_cr_base_224,224,723.86,1414.624,1024,87.88,15.86,59.66\nvolo_d3_224,224,723.01,1416.293,1024,86.33,20.78,60.09\nresnext101_64x4d,288,722.92,1416.456,1024,83.46,25.66,51.59\nhrnet_w64,224,719.74,1422.726,1024,128.06,28.97,35.09\nvit_so150m_patch16_reg4_map_256,256,718.73,1424.714,1024,141.48,37.18,53.68\nefficientvit_l3,224,718.02,1426.124,1024,246.04,27.62,39.16\nswinv2_cr_base_ns_224,224,717.54,1427.086,1024,87.88,15.86,59.66\nconvnext_large,224,705.3,1451.859,1024,197.77,34.4,43.13\nefficientnet_b3_g8_gn,288,703.78,727.494,512,14.25,2.59,23.35\nresnet200,288,703.42,1455.726,1024,64.67,24.91,53.21\nefficientvit_l2,384,702.19,729.139,512,63.71,20.45,57.01\nnextvit_base,384,698.47,1466.053,1024,44.82,24.64,73.95\nregnety_320,224,695.21,1472.933,1024,145.05,32.34,30.26\nefficientnet_b3_gn,320,695.09,736.585,512,11.73,2.14,28.83\ncoat_small,224,692.64,1478.393,1024,21.69,12.61,44.25\nfastvit_mci2,256,685.53,1493.712,1024,35.82,7.91,43.34\nregnety_160,288,685.03,1121.108,768,83.59,26.37,38.07\nmaxxvitv2_rmlp_base_rw_224,224,675.7,1136.584,768,116.09,24.2,62.77\nregnetz_e8,320,668.22,1149.315,768,57.7,15.46,63.94\nefficientformerv2_s2,224,667.74,1533.519,1024,12.71,1.27,11.77\nresnetrs152,320,662.28,1546.154,1024,86.62,24.34,48.14\nxcit_small_12_p8_224,224,660.86,1549.491,1024,26.21,18.69,47.21\nseresnext101_32x8d,288,658.56,1554.9,1024,93.57,27.24,51.63\nseresnet152d,320,657.78,1556.745,1024,66.84,24.09,47.72\npoolformerv2_m36,224,657.37,1557.708,1024,56.08,8.81,22.02\nconvnext_small,384,656.34,1170.108,768,50.22,25.58,63.37\nregnetz_d8_evos,256,642.61,1593.488,1024,23.46,4.5,24.92\nresnext101_32x16d,224,642.37,1594.085,1024,194.03,36.27,51.18\nconvnext_base,320,641.94,1196.37,768,88.59,31.39,58.68\nseresnext101d_32x8d,288,641.63,1595.896,1024,93.59,27.64,52.95\nregnetx_320,224,641.61,1196.979,768,107.81,31.81,36.3\nefficientnetv2_m,416,641.2,1596.999,1024,54.14,18.6,67.5\ngcvit_base,224,640.97,1597.568,1024,90.32,14.87,55.48\ndavit_large,224,640.04,1199.916,768,196.81,34.6,60.99\nvit_betwixt_patch16_reg4_gap_384,384,630.92,1623.018,1024,60.6,39.71,85.28\nregnetz_c16_evos,320,628.83,814.205,512,13.49,3.86,25.88\nmobilevitv2_175,384,624.08,615.295,384,14.25,12.47,63.29\nvolo_d1_384,384,619.82,1652.09,1024,26.78,22.75,108.55\nmaxvit_rmlp_base_rw_224,224,618.76,1241.17,768,116.14,23.15,92.64\nnf_regnet_b5,456,610.35,838.851,512,49.74,11.7,61.95\nvit_large_patch32_384,384,606.96,1687.093,1024,306.63,45.31,43.86\nxception71,299,606.18,1266.939,768,42.34,18.09,69.92\ncrossvit_18_dagger_408,408,605.96,1689.854,1024,44.61,32.47,124.87\nseresnextaa101d_32x8d,288,604.03,1695.268,1024,93.59,28.51,56.44\neca_nfnet_l2,320,603.44,1696.93,1024,56.72,20.95,47.43\ncaformer_m36,224,592.51,1728.235,1024,56.2,13.29,50.48\nlevit_512_s8,224,591.37,432.884,256,74.05,21.82,52.28\necaresnet200d,288,590.0,1735.592,1024,64.69,25.31,54.59\nseresnet200d,288,589.38,1737.392,1024,71.86,25.32,54.6\nseresnet269d,256,588.18,1740.943,1024,113.67,26.59,53.6\nnfnet_f2,256,586.41,1746.208,1024,193.78,33.76,41.85\nconvmixer_768_32,224,584.59,1751.632,1024,21.11,19.55,25.95\nefficientnet_b3_g8_gn,320,582.54,878.901,512,14.25,3.2,28.83\nresnetv2_50x3_bit,224,579.63,1324.974,768,217.32,37.06,33.34\nrdnet_large,224,573.58,892.627,512,186.27,34.74,46.67\nconvnextv2_tiny,384,573.01,670.132,384,28.64,13.14,39.48\nhrnet_w48_ssld,288,572.81,1787.672,1024,77.47,28.66,47.21\nresnetrs270,256,572.18,1789.62,1024,129.86,27.06,55.84\nnextvit_large,384,560.44,1827.129,1024,57.87,32.03,90.76\nefficientnetv2_rw_m,416,559.01,1831.797,1024,53.24,21.49,79.62\nconvformer_m36,224,558.16,1834.58,1024,57.05,12.89,42.05\nlevit_conv_512_s8,224,557.74,458.984,256,74.05,21.82,52.28\nefficientvit_l3,256,556.67,1379.617,768,246.04,36.06,50.98\nmixer_l16_224,224,555.85,1842.224,1024,208.2,44.6,41.69\nresnet200d,320,554.61,1846.322,1024,64.69,31.25,67.33\nefficientnet_b5,416,550.64,929.82,512,30.39,8.27,80.68\nmobilevitv2_200,384,550.58,697.44,384,18.45,16.24,72.34\nvit_mediumd_patch16_reg4_gap_384,384,547.09,1871.701,1024,64.27,43.67,113.51\nnfnet_f1,320,543.04,1885.679,1024,132.63,35.97,46.77\nconvnext_large_mlp,256,539.44,1898.249,1024,200.13,44.94,56.33\nmaxvit_base_tf_224,224,535.21,956.621,512,119.47,24.04,95.01\nswinv2_base_window8_256,256,534.01,1917.55,1024,87.92,20.37,52.59\ncaformer_s18,384,520.36,983.918,512,26.34,13.42,77.34\nxcit_large_24_p16_224,224,515.87,1985.0,1024,189.1,35.86,47.27\ndeit_base_patch16_384,384,513.77,1993.08,1024,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,512.49,1998.091,1024,87.63,55.65,101.82\nvit_base_patch16_siglip_gap_384,384,511.96,2000.146,1024,86.09,55.43,101.3\nvit_base_patch16_384,384,511.64,2001.39,1024,86.86,55.54,101.56\nvit_base_patch16_clip_384,384,510.4,2006.249,1024,86.86,55.54,101.56\nvit_base_patch16_siglip_384,384,506.36,2022.279,1024,93.18,56.12,102.2\nconvmixer_1024_20_ks9_p14,224,504.12,2031.237,1024,24.38,5.55,5.51\nxcit_tiny_12_p8_384,384,502.99,2035.797,1024,6.71,14.13,69.14\nswin_large_patch4_window7_224,224,502.65,1527.895,768,196.53,34.53,54.94\ndm_nfnet_f2,256,502.27,2038.737,1024,193.78,33.76,41.85\nconvformer_s18,384,497.92,1028.262,512,26.77,11.63,46.49\npoolformerv2_m48,224,495.47,2066.696,1024,73.35,11.59,29.17\nhalonet_h1,256,493.49,518.745,256,8.1,3.0,51.17\nvit_base_patch16_18x2_224,224,493.39,2075.428,1024,256.73,52.51,71.38\ntiny_vit_21m_512,512,491.13,521.239,256,21.27,27.02,177.93\nswinv2_small_window16_256,256,490.52,1043.783,512,49.73,12.82,66.29\nvit_small_patch14_dinov2,518,490.33,2088.369,1024,22.06,46.76,198.79\nseresnextaa101d_32x8d,320,490.0,1567.336,768,93.59,35.19,69.67\nvit_small_patch14_reg4_dinov2,518,488.81,2094.862,1024,22.06,46.95,199.77\nvit_large_patch16_224,224,487.63,2099.948,1024,304.33,61.6,63.52\ndeit3_base_patch16_384,384,487.08,2102.295,1024,86.88,55.54,101.56\neva_large_patch14_196,196,485.18,2110.546,1024,304.14,61.57,63.52\nswinv2_large_window12_192,192,481.88,1062.49,512,228.77,26.17,56.53\nresnetrs200,320,480.76,2129.948,1024,93.21,31.51,67.81\nhiera_large_224,224,474.3,2158.945,1024,213.74,40.34,83.37\nbeit_large_patch16_224,224,473.61,2162.105,1024,304.43,61.6,63.52\ntf_efficientnetv2_m,480,473.26,2163.712,1024,54.14,24.76,89.84\nbeitv2_large_patch16_224,224,473.13,2164.308,1024,304.43,61.6,63.52\nbeit_base_patch16_384,384,468.5,2185.674,1024,86.74,55.54,101.56\ndeit3_large_patch16_224,224,466.41,2195.494,1024,304.37,61.6,63.52\ndm_nfnet_f1,320,463.2,2210.697,1024,132.63,35.97,46.77\nxcit_small_24_p16_384,384,462.31,2214.961,1024,47.67,26.72,68.58\nseresnet269d,288,461.93,2216.787,1024,113.67,33.65,67.81\ncoatnet_3_rw_224,224,456.35,841.443,384,181.81,33.44,73.83\ncoatnet_rmlp_3_rw_224,224,453.17,564.897,256,165.15,33.56,79.47\nefficientnet_b5,448,452.46,1131.585,512,30.39,9.59,93.56\nconvnext_base,384,448.09,1142.619,512,88.59,45.21,84.49\nconvnext_xlarge,224,446.11,1721.526,768,350.2,60.98,57.5\ncoatnet_3_224,224,445.37,574.791,256,166.97,36.56,79.01\nconvnextv2_base,288,443.59,1154.194,512,88.72,25.43,47.53\nresnetv2_152x2_bit,224,442.44,2314.432,1024,236.34,46.95,45.11\nefficientformerv2_l,224,442.41,2314.602,1024,26.32,2.59,18.54\nmaxvit_tiny_tf_384,384,436.14,586.955,256,30.98,17.53,123.42\nconvnextv2_large,224,429.25,1192.776,512,197.96,34.4,43.13\nflexivit_large,240,428.81,2387.973,1024,304.36,70.99,75.39\nswinv2_cr_tiny_384,384,428.08,598.01,256,28.33,15.34,161.01\nconvnext_large,288,426.07,1802.496,768,197.77,56.87,71.29\ncaformer_b36,224,425.78,1803.715,768,98.75,23.22,67.3\nmaxxvitv2_rmlp_large_rw_224,224,424.59,1808.787,768,215.42,44.14,87.15\nswinv2_cr_large_224,224,423.38,1813.956,768,196.68,35.1,78.42\neca_nfnet_l2,384,418.47,2446.978,1024,56.72,30.05,68.28\nvolo_d4_224,224,416.41,2459.078,1024,192.96,44.34,80.22\ntresnet_l,448,415.17,2466.431,1024,55.99,43.59,47.56\nefficientnetv2_l,384,414.62,2469.714,1024,118.52,36.1,101.16\ndavit_huge,224,411.12,1245.376,512,348.92,61.23,81.32\ntf_efficientnetv2_l,384,407.96,1882.523,768,118.52,36.1,101.16\nregnetz_d8_evos,320,406.94,1887.237,768,23.46,7.03,38.92\nconvformer_b36,224,403.51,1903.287,768,99.88,22.69,56.06\ntf_efficientnet_b5,456,402.73,953.473,384,30.39,10.46,98.86\nregnety_640,224,386.45,1987.327,768,281.38,64.16,42.5\nregnety_160,384,386.34,993.924,384,83.59,46.87,67.67\neca_nfnet_l3,352,384.03,2666.457,1024,72.04,32.57,73.12\nvit_large_patch16_siglip_gap_256,256,379.1,2701.121,1024,303.36,80.8,88.34\nvit_large_patch16_siglip_256,256,376.67,2718.518,1024,315.96,81.34,88.88\necaresnet269d,320,375.5,2726.992,1024,102.09,41.53,83.69\nvit_large_r50_s32_384,384,375.05,2730.266,1024,329.09,57.43,76.52\nefficientvit_l3,320,370.4,1036.706,384,246.04,56.32,79.34\nmaxvit_large_tf_224,224,370.36,1036.83,384,211.79,43.68,127.35\ninception_next_base,384,365.73,1399.913,512,86.67,43.64,75.48\nvit_base_patch8_224,224,363.97,2110.05,768,86.58,78.22,161.69\nnasnetalarge,331,362.83,1058.339,384,88.75,23.89,90.56\nresnetrs350,288,359.88,2845.389,1024,163.96,43.67,87.09\nvit_large_patch14_224,224,358.73,2854.473,1024,304.2,81.08,88.79\nswinv2_base_window16_256,256,358.47,1071.215,384,87.92,22.02,84.71\nswinv2_base_window12to16_192to256,256,358.3,1071.718,384,87.92,22.02,84.71\nvit_large_patch14_clip_224,224,358.26,2858.236,1024,304.2,81.08,88.79\nxcit_small_24_p8_224,224,346.37,2956.368,1024,47.63,35.81,90.78\nrepvgg_d2se,320,345.41,2964.608,1024,133.33,74.57,46.82\nconvnext_large_mlp,320,344.64,1485.577,512,200.13,70.21,88.02\nvolo_d2_384,384,336.68,3041.406,1024,58.87,46.17,184.51\nresnetv2_101x3_bit,224,333.65,2301.786,768,387.93,71.23,48.7\nvit_base_r50_s16_384,384,328.62,3116.05,1024,98.95,67.43,135.03\nvit_large_patch14_clip_quickgelu_224,224,325.79,3143.121,1024,303.97,81.08,88.79\nxcit_medium_24_p16_384,384,316.51,3235.266,1024,84.4,47.39,91.64\ncoat_lite_medium_384,384,316.02,1620.128,512,44.57,28.73,116.7\nnfnet_f2,352,310.8,3294.734,1024,193.78,63.22,79.06\nvit_large_patch14_xp_224,224,310.69,3295.839,1024,304.06,81.01,88.79\necaresnet269d,352,310.52,3297.627,1024,102.09,50.25,101.25\nresnext101_32x32d,224,302.59,1692.067,512,468.53,87.29,91.12\nresnetrs270,352,302.17,3388.822,1024,129.86,51.13,105.48\nresnet50x16_clip_gap,384,298.84,1713.264,512,136.2,70.32,100.64\nefficientnetv2_xl,384,293.04,3494.359,1024,208.12,52.81,139.2\nvitamin_large2_224,224,292.42,1750.92,512,333.58,75.05,112.83\nvitamin_large_224,224,291.87,1754.211,512,333.32,75.05,112.83\ntf_efficientnetv2_xl,384,290.01,3530.88,1024,208.12,52.81,139.2\ncait_xxs24_384,384,288.21,3553.005,1024,12.03,9.63,122.66\nnfnet_f3,320,287.11,3566.606,1024,254.92,68.77,83.93\nresnet50x16_clip,384,286.91,1784.524,512,167.33,74.9,103.54\ncoatnet_4_224,224,285.69,896.079,256,275.43,62.48,129.26\ncaformer_s36,384,276.44,1852.088,512,39.3,26.08,150.33\nmaxvit_small_tf_384,384,276.03,695.573,192,69.02,35.87,183.65\nvit_base_patch16_siglip_gap_512,512,275.47,1858.601,512,86.43,107.0,246.15\ntresnet_xl,448,273.68,2806.19,768,78.44,60.77,61.31\nvit_base_patch16_siglip_512,512,272.77,1877.039,512,93.52,108.22,247.74\nvolo_d5_224,224,270.51,3785.389,1024,295.46,72.4,118.11\nconvnext_xlarge,288,269.09,1902.689,512,350.2,100.8,95.05\nvit_so400m_patch14_siglip_gap_224,224,268.96,3807.3,1024,412.44,109.57,106.13\nvit_so400m_patch14_siglip_224,224,267.66,3825.774,1024,427.68,110.26,106.73\ndm_nfnet_f2,352,265.61,2891.431,768,193.78,63.22,79.06\nxcit_tiny_24_p8_384,384,264.93,3865.202,1024,12.11,27.05,132.95\nconvformer_s36,384,264.39,1936.533,512,40.01,22.54,89.62\nefficientnetv2_l,480,263.76,1941.117,512,118.52,56.4,157.99\neva02_large_patch14_224,224,261.32,3918.509,1024,303.27,81.15,97.2\nconvnextv2_large,288,261.02,1471.122,384,197.96,56.87,71.29\nswinv2_cr_small_384,384,260.72,981.882,256,49.7,29.7,298.03\nefficientvit_l3,384,260.43,982.964,256,246.04,81.08,114.02\ntf_efficientnetv2_l,480,260.17,1967.914,512,118.52,56.4,157.99\neva02_large_patch14_clip_224,224,258.03,3968.464,1024,304.11,81.18,97.2\nconvnextv2_base,384,250.4,1022.344,256,88.72,45.21,84.49\nmvitv2_large,224,250.34,2045.21,512,217.99,43.87,112.02\nmvitv2_large_cls,224,249.73,2050.191,512,234.58,42.17,111.69\ncoatnet_rmlp_2_rw_384,384,248.98,771.136,192,73.88,47.69,209.43\nseresnextaa201d_32x8d,320,247.3,3105.549,768,149.39,70.22,138.71\nresnetrs420,320,246.64,4151.712,1024,191.89,64.2,126.56\ndm_nfnet_f3,320,246.57,4153.037,1024,254.92,68.77,83.93\nresmlp_big_24_224,224,242.5,4222.699,1024,129.14,100.23,87.31\nconvnext_large_mlp,384,240.32,1597.863,384,200.13,101.11,126.74\nconvnext_large,384,240.31,1597.914,384,197.77,101.1,126.74\neca_nfnet_l3,448,237.13,2159.158,512,72.04,52.55,118.4\nxcit_medium_24_p8_224,224,236.47,3247.69,768,84.32,63.53,121.23\nregnety_320,384,235.74,1628.925,384,145.05,95.0,88.87\nswin_base_patch4_window12_384,384,233.69,1095.46,256,87.9,47.19,134.78\nvitamin_large2_256,256,225.68,1701.529,384,333.64,99.0,154.99\nvitamin_large_256,256,225.58,1702.259,384,333.38,99.0,154.99\nxcit_small_12_p8_384,384,224.77,1708.428,384,26.21,54.92,138.29\nmaxxvitv2_rmlp_base_rw_384,384,220.85,1738.714,384,116.09,72.98,213.74\nswinv2_large_window12to16_192to256,256,218.82,1169.912,256,196.74,47.81,121.53\nhiera_huge_224,224,211.71,2418.344,512,672.78,124.85,150.95\nefficientnet_b6,528,207.93,1231.18,256,43.04,19.4,167.39\nmaxvit_xlarge_tf_224,224,203.01,1261.038,256,506.99,97.52,191.04\nresnetrs350,384,201.49,5082.03,1024,163.96,77.59,154.74\ncait_xs24_384,384,201.32,3814.854,768,26.67,19.28,183.98\ntf_efficientnet_b6,528,201.04,955.016,192,43.04,19.4,167.39\ncaformer_m36,384,199.07,1928.91,384,56.2,42.11,196.35\nregnety_1280,224,196.19,2609.667,512,644.81,127.66,71.58\nrdnet_large,384,195.92,979.983,192,186.27,102.09,137.13\neva02_base_patch14_448,448,194.43,2633.353,512,87.12,107.11,259.14\nmaxvit_rmlp_base_rw_384,384,193.23,1324.858,256,116.14,70.97,318.95\ncait_xxs36_384,384,193.04,5304.502,1024,17.37,14.35,183.7\nfocalnet_huge_fl3,224,191.24,2007.947,384,745.28,118.26,104.8\nmaxvit_tiny_tf_512,512,190.87,670.587,128,31.05,33.49,257.59\nconvformer_m36,384,190.78,2012.789,384,57.05,37.87,123.56\nvit_huge_patch14_gap_224,224,189.67,5398.887,1024,630.76,166.73,138.74\nswinv2_cr_base_384,384,186.38,1373.532,256,87.88,50.57,333.68\nvit_huge_patch14_224,224,183.75,5572.762,1024,630.76,167.4,139.41\nvit_huge_patch14_clip_224,224,183.71,5574.109,1024,632.05,167.4,139.41\nsam2_hiera_tiny,896,182.75,350.196,64,26.85,99.86,384.63\nvit_base_patch14_dinov2,518,182.34,2807.893,512,86.58,151.71,397.58\nvit_base_patch14_reg4_dinov2,518,181.77,2816.806,512,86.58,152.25,399.53\nvitamin_xlarge_256,256,181.48,1410.575,256,436.06,130.13,177.37\nswinv2_cr_huge_224,224,179.7,2136.905,384,657.83,115.97,121.08\nxcit_large_24_p16_384,384,179.28,5711.606,1024,189.1,105.35,137.17\ndeit3_huge_patch14_224,224,177.3,5775.592,1024,632.13,167.4,139.41\nconvnextv2_huge,224,176.11,1453.663,256,660.29,115.0,79.07\nseresnextaa201d_32x8d,384,170.89,2996.033,512,149.39,101.11,199.72\nvolo_d3_448,448,169.67,4526.299,768,86.63,96.33,446.83\nvit_huge_patch14_clip_quickgelu_224,224,169.19,6052.476,1024,632.08,167.4,139.41\nmaxvit_base_tf_384,384,167.18,1148.458,192,119.65,73.8,332.9\nefficientnetv2_xl,512,165.5,3093.649,512,208.12,93.85,247.32\ntf_efficientnetv2_xl,512,163.76,3126.446,512,208.12,93.85,247.32\nvit_huge_patch14_xp_224,224,163.24,6273.085,1024,631.8,167.3,139.41\nnfnet_f3,416,162.65,4721.794,768,254.92,115.58,141.78\nvit_large_patch16_siglip_gap_384,384,159.79,4806.343,768,303.69,190.85,269.55\nvit_large_patch16_384,384,159.11,4826.795,768,304.72,191.21,270.24\nvit_large_patch16_siglip_384,384,158.55,4843.885,768,316.28,192.07,270.75\neva_large_patch14_336,336,158.23,4853.789,768,304.53,191.1,270.24\nvit_large_patch14_clip_336,336,157.81,4866.487,768,304.53,191.11,270.24\nvit_giant_patch16_gap_224,224,155.53,6583.864,1024,1011.37,202.46,139.26\ncait_s24_384,384,155.15,3299.991,512,47.06,32.17,245.31\nnfnet_f4,384,154.55,4969.28,768,316.07,122.14,147.57\ncoatnet_5_224,224,153.97,1246.972,192,687.47,145.49,194.24\nsam2_hiera_small,896,153.37,417.27,64,33.95,123.99,442.63\nconvnext_xxlarge,256,153.06,2508.801,384,846.47,198.09,124.45\ndeit3_large_patch16_384,384,152.95,6694.836,1024,304.76,191.21,270.24\nconvnext_xlarge,384,151.94,1684.898,256,350.2,179.2,168.99\ndavit_giant,224,151.13,2540.897,384,1406.47,192.92,153.06\nbeit_large_patch16_384,384,147.11,6960.871,1024,305.0,191.21,270.24\nconvnextv2_large,384,147.09,1305.273,192,197.96,101.1,126.74\nresnetv2_50x3_bit,448,146.43,1311.231,192,217.32,145.7,133.37\nresnetrs420,416,145.54,5277.024,768,191.89,108.45,213.79\nresnetv2_152x4_bit,224,143.9,3558.113,512,936.53,186.9,90.22\nvit_large_patch14_clip_quickgelu_336,336,143.52,5351.207,768,304.29,191.11,270.24\ncaformer_b36,384,142.98,1790.384,256,98.75,72.33,261.79\ndm_nfnet_f3,416,140.7,3638.998,512,254.92,115.58,141.78\nconvformer_b36,384,137.79,1857.845,256,99.88,66.67,164.75\nswin_large_patch4_window12_384,384,136.8,935.679,128,196.74,104.08,202.16\ndm_nfnet_f4,384,133.79,5740.467,768,316.07,122.14,147.57\nxcit_large_24_p8_224,224,132.99,3849.902,512,188.93,141.23,181.56\nregnety_640,384,131.5,1946.681,256,281.38,188.47,124.83\nefficientnet_b7,600,129.75,1479.733,192,66.35,38.33,289.94\ntf_efficientnet_b7,600,126.11,1522.49,192,66.35,38.33,289.94\nvitamin_large2_336,336,125.61,1528.5,192,333.83,175.72,307.47\nvitamin_large_336,336,125.56,1529.162,192,333.57,175.72,307.47\nfocalnet_huge_fl4,224,122.25,3141.078,384,686.46,118.9,113.34\neva_giant_patch14_224,224,119.44,8572.982,1024,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,119.17,8592.945,1024,1012.59,267.18,192.64\nxcit_small_24_p8_384,384,117.9,3256.878,384,47.63,105.24,265.91\nvit_giant_patch14_224,224,116.48,8790.872,1024,1012.61,267.18,192.64\nmaxvit_large_tf_384,384,116.47,1098.959,128,212.03,132.55,445.84\nvit_giant_patch14_clip_224,224,116.42,8795.95,1024,1012.65,267.18,192.64\neva02_large_patch14_clip_336,336,113.52,9020.764,1024,304.43,191.34,289.13\nmaxvit_small_tf_512,512,112.98,849.727,96,69.13,67.26,383.77\nswinv2_cr_large_384,384,112.26,1140.169,128,196.68,108.96,404.96\nnfnet_f5,416,110.43,6954.526,768,377.21,170.71,204.56\nmvitv2_huge_cls,224,108.91,3525.951,384,694.8,120.67,243.63\nconvnextv2_huge,288,108.51,1769.473,192,660.29,190.1,130.7\ncait_s36_384,384,103.84,4930.739,512,68.37,47.99,367.4\nresnet50x64_clip_gap,448,101.87,3769.426,384,365.03,253.96,233.22\nvitamin_xlarge_336,336,101.39,1262.381,128,436.06,230.18,347.33\nresnet50x64_clip,448,98.61,3894.121,384,420.38,265.02,239.13\ndavit_base_fl,768,98.42,1300.504,128,90.37,190.32,530.15\nvolo_d4_448,448,96.66,5296.823,512,193.41,197.13,527.35\nswinv2_base_window12to24_192to384,384,96.52,663.07,64,87.92,55.25,280.36\ndm_nfnet_f5,416,95.55,5358.646,512,377.21,170.71,204.56\nfocalnet_large_fl3,384,93.71,2731.882,256,239.13,105.06,168.04\nfocalnet_large_fl4,384,91.08,2810.743,256,239.32,105.2,181.78\nvitamin_large_384,384,88.59,2167.247,192,333.71,234.44,440.16\nvitamin_large2_384,384,88.51,2169.148,192,333.97,234.44,440.16\nvit_so400m_patch14_siglip_gap_384,384,87.19,5872.468,512,412.99,333.46,451.19\nnfnet_f4,512,86.87,5893.611,512,316.07,216.26,262.26\nvit_so400m_patch14_siglip_384,384,86.75,5902.326,512,428.23,335.4,452.89\nefficientnet_b8,672,86.55,1478.838,128,87.41,63.48,442.89\ntf_efficientnet_b8,672,84.44,1515.913,128,87.41,63.48,442.89\nnfnet_f6,448,82.01,6243.159,512,438.36,229.7,273.62\nsam2_hiera_base_plus,896,81.14,788.789,64,68.68,227.48,828.88\nxcit_medium_24_p8_384,384,80.95,3162.43,256,84.32,186.67,354.73\nvit_huge_patch14_clip_336,336,79.58,6433.526,512,632.46,390.97,407.54\nconvmixer_1536_20,224,76.93,13310.571,1024,51.63,48.68,33.03\nmaxvit_base_tf_512,512,76.16,1260.446,96,119.88,138.02,703.99\nbeit_large_patch16_512,512,75.77,6757.452,512,305.67,362.24,656.39\nvitamin_xlarge_384,384,75.69,1691.101,128,436.06,306.38,493.46\ndm_nfnet_f4,512,75.3,5099.335,384,316.07,216.26,262.26\ndm_nfnet_f6,448,71.01,7210.621,512,438.36,229.7,273.62\nvit_gigantic_patch14_224,224,66.82,7662.227,512,1844.44,483.95,275.37\nregnety_1280,384,66.8,1916.204,128,644.81,374.99,210.2\nvit_gigantic_patch14_clip_224,224,66.79,7665.981,512,1844.91,483.96,275.37\nfocalnet_xlarge_fl3,384,66.17,2901.53,192,408.79,185.61,223.99\nmaxvit_xlarge_tf_384,384,64.84,1480.483,96,475.32,292.78,668.76\nnfnet_f5,544,64.64,5940.195,384,377.21,290.97,349.71\nfocalnet_xlarge_fl4,384,63.87,3005.958,192,409.03,185.79,242.31\nvolo_d5_448,448,63.63,6034.748,384,295.91,315.06,737.92\nnfnet_f7,480,62.7,8166.338,512,499.5,300.08,355.86\nvit_huge_patch14_clip_378,378,62.12,8241.55,512,632.68,503.79,572.79\nvit_so400m_patch14_siglip_gap_448,448,61.76,6217.473,384,413.33,487.18,764.26\neva02_large_patch14_448,448,61.54,8319.155,512,305.08,362.33,689.95\nconvnextv2_huge,384,61.34,1565.099,96,660.29,337.96,232.35\nswinv2_large_window12to24_192to384,384,61.15,785.007,48,196.74,116.15,407.83\nvit_large_patch14_dinov2,518,58.81,6529.805,384,304.37,507.15,1058.82\nvit_large_patch14_reg4_dinov2,518,58.63,6549.274,384,304.37,508.9,1064.02\nvit_huge_patch14_clip_quickgelu_378,378,57.38,6692.182,384,632.68,503.79,572.79\nvit_huge_patch16_gap_448,448,57.22,8947.785,512,631.67,544.7,636.83\ndm_nfnet_f5,544,55.97,6861.132,384,377.21,290.97,349.71\ntf_efficientnet_l2,475,54.83,1750.848,96,480.31,172.11,609.89\nmaxvit_large_tf_512,512,52.22,1225.553,64,212.33,244.75,942.15\neva_giant_patch14_336,336,51.89,9866.498,512,1013.01,620.64,550.67\nswinv2_cr_giant_224,224,51.82,3705.327,192,2598.76,483.85,309.15\nnfnet_f6,576,49.54,7751.499,384,438.36,378.69,452.2\nvolo_d5_512,512,48.79,5246.809,256,296.09,425.09,1105.37\nswinv2_cr_huge_384,384,48.61,1316.667,64,657.94,352.04,583.18\nxcit_large_24_p8_384,384,45.65,4205.892,192,188.93,415.0,531.82\ndm_nfnet_f6,576,42.93,5962.777,256,438.36,378.69,452.2\nnfnet_f7,608,39.93,6411.695,256,499.5,480.39,570.85\ndavit_huge_fl,768,34.49,1391.493,48,360.64,744.84,1060.3\nconvnextv2_huge,512,34.39,1395.744,48,660.29,600.81,413.07\ncait_m36_384,384,33.77,7580.428,256,271.22,173.11,734.81\nresnetv2_152x4_bit,480,32.13,2987.397,96,936.53,844.84,414.26\nregnety_2560,384,30.14,3185.104,96,1282.6,747.83,296.49\nmaxvit_xlarge_tf_512,512,29.4,1632.708,48,475.77,534.14,1413.22\nsam2_hiera_large,1024,24.86,1930.745,48,212.15,907.48,2190.34\nsamvit_base_patch16,1024,23.82,671.744,16,89.67,486.43,1343.27\nefficientnet_l2,800,19.37,2478.227,48,480.31,479.12,1707.39\ntf_efficientnet_l2,800,19.15,2506.394,48,480.31,479.12,1707.39\nvit_giant_patch14_dinov2,518,17.69,7234.942,128,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,17.67,10868.501,192,1136.48,1790.08,2771.21\neva_giant_patch14_560,560,16.99,11301.565,192,1014.45,1906.76,2577.17\nswinv2_cr_giant_384,384,14.96,2138.945,32,2598.76,1450.71,1394.86\ncait_m48_448,448,14.26,8978.396,128,356.46,329.41,1708.23\nsamvit_large_patch16,1024,10.89,1101.662,12,308.28,1493.86,2553.78\nvit_so400m_patch14_siglip_gap_896,896,10.82,8873.224,96,416.87,2731.49,8492.88\nsamvit_huge_patch16,1024,6.7,1194.478,8,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-amp-nhwc-pt240-cu124-rtx4090.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_vit,160,193591.5,2.637,512,0.37,0.04,0.48\ntest_efficientnet,160,140505.88,3.636,512,0.36,0.06,0.55\ntest_byobnet,160,139342.81,3.665,512,0.46,0.03,0.43\ntinynet_e,106,114149.3,4.476,512,2.04,0.03,0.69\nmobilenetv3_small_050,224,91713.66,5.573,512,1.59,0.03,0.92\nlcnet_035,224,74617.77,6.852,512,1.64,0.03,1.04\nmobilenetv3_small_075,224,64474.01,7.933,512,2.04,0.05,1.3\nlcnet_050,224,64449.16,7.935,512,1.88,0.05,1.26\nmobilenetv3_small_100,224,58931.45,8.678,512,2.54,0.06,1.42\ntinynet_d,152,53815.67,9.505,512,2.34,0.05,1.42\ntf_mobilenetv3_small_minimal_100,224,46935.54,10.899,512,2.04,0.06,1.41\ntf_mobilenetv3_small_075,224,46386.54,11.028,512,2.04,0.05,1.3\nmobilenetv4_conv_small,224,45334.09,11.283,512,3.77,0.19,1.97\ntf_mobilenetv3_small_100,224,43574.18,11.741,512,2.54,0.06,1.42\nefficientvit_m2,224,43163.92,11.852,512,4.19,0.2,1.47\nefficientvit_m1,224,41710.72,12.265,512,2.98,0.17,1.33\nlcnet_075,224,41149.58,12.43,512,2.36,0.1,1.99\nmnasnet_small,224,38517.24,13.282,512,2.03,0.07,2.16\nefficientvit_m0,224,37262.79,13.731,512,2.35,0.08,0.91\nefficientvit_m3,224,37259.36,13.732,512,6.9,0.27,1.62\nefficientvit_m4,224,36637.28,13.966,512,8.8,0.3,1.7\nresnet18,160,35956.09,14.229,512,11.69,0.93,1.27\nregnetx_002,224,35359.25,14.471,512,2.68,0.2,2.16\nresnet10t,176,35271.27,14.507,512,5.44,0.7,1.51\nghostnet_050,224,33294.15,15.367,512,2.59,0.05,1.77\nmobilenetv4_conv_small,256,32870.69,15.566,512,3.77,0.25,2.57\nrepghostnet_050,224,32631.77,15.681,512,2.31,0.05,2.02\nregnety_002,224,32330.37,15.824,512,3.16,0.2,2.17\nlevit_128s,224,31791.92,16.095,512,7.78,0.31,1.88\nlcnet_100,224,31652.74,16.165,512,2.95,0.16,2.52\nvit_small_patch32_224,224,31104.08,16.451,512,22.88,1.15,2.5\nlevit_conv_128s,224,30791.34,16.618,512,7.78,0.31,1.88\nmobilenetv2_035,224,30231.75,16.927,512,1.68,0.07,2.86\nefficientvit_m5,224,29243.03,17.498,512,12.47,0.53,2.41\nmnasnet_050,224,28381.86,18.03,512,2.22,0.11,3.07\nregnetx_004,224,26873.8,19.042,512,5.16,0.4,3.14\nregnetx_004_tv,224,26191.12,19.533,512,5.5,0.42,3.17\ntinynet_c,184,25374.27,20.162,512,2.46,0.11,2.87\nefficientvit_b0,224,24958.93,20.504,512,3.41,0.1,2.87\nrepghostnet_058,224,24922.84,20.532,512,2.55,0.07,2.59\npit_ti_224,224,24011.49,21.313,512,4.85,0.7,6.19\npit_ti_distilled_224,224,23895.35,21.415,512,5.1,0.71,6.23\nsemnasnet_050,224,23613.68,21.672,512,2.08,0.11,3.44\nmobilenetv2_050,224,23276.13,21.987,512,1.97,0.1,3.64\nmixer_s32_224,224,23222.09,22.038,512,19.1,1.0,2.28\nresnet34,160,22893.41,22.355,512,21.8,1.87,1.91\ngernet_s,224,22652.42,22.589,512,8.17,0.75,2.65\ncs3darknet_focus_s,256,22633.17,22.607,512,3.27,0.69,2.7\nresnet10t,224,21939.38,23.327,512,5.44,1.1,2.43\nlevit_128,224,21210.51,24.129,512,9.21,0.41,2.71\ncs3darknet_s,256,21180.55,24.162,512,3.28,0.72,2.97\nlevit_conv_128,224,20601.31,24.843,512,9.21,0.41,2.71\nvit_tiny_r_s16_p8_224,224,20597.39,24.845,512,6.34,0.44,2.06\nrepghostnet_080,224,19998.72,25.59,512,3.28,0.1,3.22\nlcnet_150,224,19930.25,25.679,512,4.5,0.34,3.79\ntf_efficientnetv2_b0,192,19533.13,26.202,512,7.14,0.54,3.51\nlevit_192,224,19368.21,26.425,512,10.95,0.66,3.2\nvit_medium_patch32_clip_224,224,19242.5,26.596,512,39.69,2.0,3.34\nmobilenetv3_large_075,224,19001.45,26.934,512,3.99,0.16,4.0\nlevit_conv_192,224,18406.17,27.806,512,10.95,0.66,3.2\nresnet18,224,18247.43,28.048,512,11.69,1.82,2.48\nnf_regnet_b0,192,18063.21,28.334,512,8.76,0.37,3.15\ndeit_tiny_patch16_224,224,17596.84,29.086,512,5.72,1.26,5.97\nvit_tiny_patch16_224,224,17589.76,29.098,512,5.72,1.26,5.97\ndeit_tiny_distilled_patch16_224,224,17490.74,29.262,512,5.91,1.27,6.01\nregnety_004,224,17337.89,29.52,512,4.34,0.41,3.89\nmobilenetv3_rw,224,17271.2,29.634,512,5.48,0.23,4.41\nmnasnet_075,224,17012.7,30.083,512,3.17,0.23,4.77\nresnet14t,176,16949.17,30.199,512,10.08,1.07,3.61\nmobilenetv3_large_100,224,16891.88,30.3,512,5.48,0.23,4.41\nseresnet18,224,16704.11,30.636,512,11.78,1.82,2.49\nhardcorenas_a,224,16102.68,31.784,512,5.26,0.23,4.38\nlegacy_seresnet18,224,16060.08,31.87,512,11.78,1.82,2.49\ntf_mobilenetv3_large_075,224,15808.54,32.378,512,3.99,0.16,4.0\nregnety_006,224,15808.03,32.374,512,6.06,0.61,4.33\nrepghostnet_100,224,15795.72,32.403,512,4.07,0.15,3.98\ntinynet_b,188,15674.11,32.653,512,3.73,0.21,4.44\nhardcorenas_b,224,15581.81,32.845,512,5.18,0.26,5.09\nghostnet_100,224,15502.44,33.013,512,5.18,0.15,3.55\nhardcorenas_c,224,15395.61,33.244,512,5.52,0.28,5.01\nresnet18d,224,15221.89,33.625,512,11.71,2.06,3.29\nvit_xsmall_patch16_clip_224,224,15198.95,33.676,512,8.28,1.79,6.65\npit_xs_224,224,15188.8,33.697,512,10.62,1.4,7.71\npit_xs_distilled_224,224,15115.79,33.86,512,11.0,1.41,7.76\ntf_efficientnetv2_b1,192,14975.34,34.18,512,8.14,0.76,4.59\ntf_mobilenetv3_large_minimal_100,224,14968.23,34.196,512,3.92,0.22,4.4\nhardcorenas_d,224,14805.72,34.57,512,7.5,0.3,4.93\nmobilenet_edgetpu_v2_xs,224,14739.85,34.726,512,4.46,0.7,4.8\nmobilenetv1_100,224,14689.46,34.843,512,4.23,0.58,5.04\nmobilenetv1_100h,224,14526.94,35.232,512,5.28,0.63,5.09\nmnasnet_100,224,14430.36,35.465,512,4.38,0.33,5.46\nrepvgg_a0,224,14326.74,35.726,512,9.11,1.52,3.59\nese_vovnet19b_slim_dw,224,14264.24,35.883,512,1.9,0.4,5.28\nmobilenetv4_conv_medium,224,14221.81,35.988,512,9.72,0.84,5.8\ntf_mobilenetv3_large_100,224,14215.1,36.006,512,5.48,0.23,4.41\nregnetx_008,224,14152.21,36.165,512,7.26,0.81,5.15\nlevit_256,224,14076.58,36.361,512,18.89,1.13,4.23\nsemnasnet_075,224,13892.08,36.844,512,2.91,0.23,5.54\ndla46_c,224,13792.44,37.109,512,1.3,0.58,4.5\nrepghostnet_111,224,13783.41,37.136,512,4.54,0.18,4.38\nese_vovnet19b_slim,224,13770.62,37.17,512,3.17,1.69,3.52\ntf_efficientnetv2_b0,224,13704.01,37.351,512,7.14,0.73,4.77\nvit_betwixt_patch32_clip_224,224,13660.67,37.47,512,61.41,3.09,4.17\nregnetx_006,224,13627.19,37.56,512,6.2,0.61,3.98\nmobilenetv2_075,224,13591.14,37.659,512,2.64,0.22,5.86\nspnasnet_100,224,13506.55,37.897,512,4.42,0.35,6.03\nlevit_conv_256,224,13449.58,38.057,512,18.89,1.13,4.23\nmobilenet_edgetpu_100,224,13226.44,38.699,512,4.09,1.0,5.75\nxcit_nano_12_p16_224,224,13143.36,38.944,512,3.05,0.56,4.17\nconvnext_atto,224,13136.13,38.966,512,3.7,0.55,3.81\nmobilenetv4_hybrid_medium_075,224,12839.02,39.865,512,7.31,0.66,5.65\nedgenext_xx_small,256,12753.52,40.134,512,1.33,0.26,3.33\nconvnext_atto_ols,224,12710.59,40.269,512,3.7,0.58,4.11\nregnety_008,224,12684.45,40.35,512,6.26,0.81,5.25\ntinynet_a,192,12643.44,40.482,512,6.19,0.35,5.41\nlevit_256d,224,12538.4,40.823,512,26.21,1.4,4.93\nhardcorenas_f,224,12519.6,40.883,512,8.2,0.35,5.57\nmobilenetv3_large_100,256,12500.8,40.947,512,5.48,0.29,5.75\nhardcorenas_e,224,12262.77,41.738,512,8.07,0.35,5.65\nregnety_008_tv,224,12155.65,42.107,512,6.43,0.84,5.42\nsemnasnet_100,224,12053.2,42.467,512,3.89,0.32,6.23\nlevit_conv_256d,224,12032.84,42.54,512,26.21,1.4,4.93\nfbnetc_100,224,12028.82,42.553,512,5.57,0.4,6.51\nghostnet_130,224,11996.72,42.666,512,7.36,0.24,4.6\nregnetz_005,224,11901.9,43.006,512,7.12,0.52,5.86\nmobilenetv2_100,224,11768.03,43.497,512,3.5,0.31,6.68\nrepghostnet_130,224,11593.04,44.15,512,5.48,0.25,5.24\nmobilenetv1_125,224,11535.69,44.372,512,6.27,0.89,6.3\nefficientnet_lite0,224,11509.61,44.474,512,4.65,0.4,6.74\nresnet34,224,11246.05,45.512,512,21.8,3.67,3.74\nconvnext_femto,224,11240.65,45.537,512,5.22,0.79,4.57\nmobilenetv1_100,256,11102.16,46.107,512,4.23,0.76,6.59\nmobilenetv1_100h,256,10962.04,46.696,512,5.28,0.82,6.65\nhgnetv2_b0,224,10947.51,46.757,512,6.0,0.33,2.12\nconvnext_femto_ols,224,10906.04,46.936,512,5.23,0.82,4.87\nfbnetv3_b,224,10838.47,47.224,512,8.6,0.42,6.97\ntf_efficientnetv2_b2,208,10821.76,47.3,512,10.1,1.06,6.0\ncs3darknet_focus_m,256,10820.8,47.302,512,9.3,1.98,4.89\nmobilevit_xxs,256,10784.37,47.464,512,1.27,0.42,8.34\nresnet14t,224,10567.1,48.439,512,10.08,1.69,5.8\nmobilenetv4_conv_medium,256,10509.42,48.704,512,9.72,1.1,7.58\ngernet_m,224,10469.98,48.882,512,21.14,3.02,5.24\nvit_base_patch32_224,224,10426.38,49.097,512,88.22,4.41,5.01\nvit_base_patch32_clip_224,224,10419.24,49.129,512,88.22,4.41,5.01\nresnet18,288,10402.09,49.21,512,11.69,3.01,4.11\nhrnet_w18_small,224,10372.58,49.345,512,13.19,1.61,5.72\ncrossvit_tiny_240,240,10274.57,49.816,512,7.01,1.57,9.08\nselecsls42,224,10259.59,49.889,512,30.35,2.94,4.62\ncs3darknet_m,256,10254.13,49.918,512,9.31,2.08,5.28\nselecsls42b,224,10223.02,50.071,512,32.46,2.98,4.62\nseresnet34,224,10212.95,50.12,512,21.96,3.67,3.74\nmobilenetv4_hybrid_medium,224,10180.15,50.283,512,11.07,0.98,6.84\nrepvgg_a1,224,10156.41,50.4,512,14.09,2.64,4.74\nresnet50,160,10141.36,50.475,512,25.56,2.1,5.67\nresnet34d,224,10045.08,50.956,512,21.82,3.91,4.54\nrepghostnet_150,224,10019.26,51.092,512,6.58,0.32,6.0\nmobilenet_edgetpu_v2_s,224,9959.13,51.393,512,5.99,1.21,6.6\nefficientnet_b0,224,9831.68,52.064,512,5.29,0.4,6.75\nedgenext_xx_small,288,9824.91,52.1,512,1.33,0.33,4.21\nlegacy_seresnet34,224,9801.32,52.225,512,21.96,3.67,3.74\ntf_efficientnet_lite0,224,9765.44,52.418,512,4.65,0.4,6.74\nrepvit_m1,224,9725.15,52.635,512,5.49,0.83,7.45\ncrossvit_9_240,240,9657.56,53.001,512,8.55,1.85,9.52\nseresnet18,288,9580.24,53.432,512,11.78,3.01,4.11\nefficientnet_b1_pruned,240,9566.61,53.501,512,6.33,0.4,6.21\nvit_small_patch32_384,384,9550.92,53.596,512,22.92,3.45,8.25\npvt_v2_b0,224,9469.13,54.056,512,3.67,0.57,7.99\nresnet50d,160,9423.52,54.321,512,25.58,2.22,6.08\nrepvit_m0_9,224,9420.77,54.337,512,5.49,0.83,7.45\ndla34,224,9394.03,54.492,512,15.74,3.07,5.02\npit_s_224,224,9385.23,54.542,512,23.46,2.88,11.56\npit_s_distilled_224,224,9330.9,54.86,512,24.04,2.9,11.64\nmnasnet_140,224,9285.01,55.122,512,7.12,0.6,7.71\ncrossvit_9_dagger_240,240,9245.69,55.364,512,8.78,1.99,9.97\nnf_regnet_b0,256,9175.43,55.788,512,8.76,0.64,5.58\nefficientvit_b1,224,9080.99,56.37,512,9.1,0.53,7.25\nrexnetr_100,224,9036.3,56.647,512,4.88,0.43,7.72\nvisformer_tiny,224,9033.8,56.663,512,10.32,1.27,5.72\nselecsls60,224,8984.73,56.974,512,30.67,3.59,5.52\nrexnet_100,224,8980.27,57.002,512,4.8,0.41,7.44\nresnetaa34d,224,8953.98,57.169,512,21.82,4.43,5.07\nselecsls60b,224,8928.94,57.33,512,32.77,3.63,5.52\nmobilenetv2_110d,224,8887.12,57.601,512,4.52,0.45,8.71\nfbnetv3_d,224,8866.65,57.732,512,10.31,0.52,8.5\nlevit_384,224,8827.05,57.993,512,39.13,2.36,6.26\nresnet18d,288,8808.21,58.116,512,11.71,3.41,5.43\nmobilevitv2_050,256,8703.38,58.817,512,1.37,0.48,8.04\nconvnext_pico,224,8685.06,58.939,512,9.05,1.37,6.1\nmobilenetv1_125,256,8629.12,59.32,512,6.27,1.16,8.23\nvit_base_patch32_clip_quickgelu_224,224,8623.33,59.362,512,87.85,4.41,5.01\ntf_efficientnetv2_b1,240,8620.85,59.379,512,8.14,1.21,7.34\ncs3darknet_focus_m,288,8566.21,59.756,512,9.3,2.51,6.19\ntf_efficientnet_b0,224,8541.64,59.93,512,5.29,0.4,6.75\nseresnet50,160,8474.8,60.402,512,28.09,2.1,5.69\nresnetblur18,224,8463.51,60.482,512,11.69,2.34,3.39\nresnext50_32x4d,160,8436.12,60.679,512,25.03,2.17,7.35\nconvnext_pico_ols,224,8425.05,60.76,512,9.06,1.43,6.5\nlevit_conv_384,224,8372.39,61.141,512,39.13,2.36,6.26\nefficientnet_es_pruned,224,8329.12,61.458,512,5.44,1.81,8.73\nefficientnet_es,224,8327.35,61.471,512,5.44,1.81,8.73\nefficientnet_b0_g16_evos,224,8292.21,61.731,512,8.11,1.01,7.42\nsemnasnet_140,224,8275.66,61.856,512,6.11,0.6,8.87\nmobilenetv4_conv_blur_medium,224,8246.17,62.078,512,9.72,1.22,8.58\nese_vovnet19b_dw,224,8222.83,62.253,512,6.54,1.34,8.25\nresnet50,176,8183.47,62.553,512,25.56,2.62,6.92\ndla46x_c,224,8172.45,62.637,512,1.07,0.54,5.66\ncs3darknet_m,288,8120.14,63.039,512,9.31,2.63,6.69\nmobilenet_edgetpu_v2_m,224,8111.09,63.111,512,8.46,1.85,8.15\nhgnetv2_b1,224,8089.51,63.278,512,6.34,0.49,2.73\nresnet26,224,8074.23,63.401,512,16.0,2.36,7.35\necaresnet50t,160,8063.67,63.484,512,25.57,2.21,6.04\nvit_base_patch32_clip_256,256,8042.41,63.651,512,87.86,5.76,6.65\nresnetrs50,160,8011.18,63.898,512,35.69,2.29,6.2\nmobilenetv2_140,224,7973.79,64.198,512,6.11,0.6,9.57\nxcit_tiny_12_p16_224,224,7972.61,64.209,512,6.72,1.24,6.29\nmixer_b32_224,224,7946.63,64.415,512,60.29,3.24,6.29\ndla60x_c,224,7932.08,64.535,512,1.32,0.59,6.01\nfbnetv3_b,256,7894.93,64.838,512,8.6,0.55,9.1\ntiny_vit_5m_224,224,7865.63,65.083,512,12.08,1.28,11.25\nresmlp_12_224,224,7865.04,65.084,512,15.35,3.01,5.5\nconvnext_atto,288,7834.17,65.338,512,3.7,0.91,6.3\nmixer_s16_224,224,7827.39,65.399,512,18.53,3.79,5.97\nedgenext_x_small,256,7821.76,65.447,512,2.34,0.54,5.93\nrepvit_m1_0,224,7763.02,65.941,512,7.3,1.13,8.69\ntf_efficientnet_es,224,7753.47,66.024,512,5.44,1.81,8.73\nghostnetv2_100,224,7711.41,66.38,512,6.16,0.18,4.55\nvit_small_patch16_224,224,7695.68,66.52,512,22.05,4.61,11.95\ndeit_small_patch16_224,224,7692.21,66.546,512,22.05,4.61,11.95\nrepvgg_b0,224,7686.96,66.595,512,15.82,3.41,6.15\ndeit_small_distilled_patch16_224,224,7646.43,66.946,512,22.44,4.63,12.02\nskresnet18,224,7641.81,66.984,512,11.96,1.82,3.24\ndarknet17,256,7605.93,67.302,512,14.3,3.26,7.18\nconvnext_atto_ols,288,7572.12,67.606,512,3.7,0.96,6.8\nefficientnet_lite1,240,7533.54,67.95,512,5.42,0.62,10.14\nmobilenetv4_hybrid_medium,256,7461.42,68.608,512,11.07,1.29,9.01\nrepvit_m2,224,7412.42,69.062,512,8.8,1.36,9.43\nmixnet_s,224,7370.06,69.451,512,4.13,0.25,6.25\nrepghostnet_200,224,7359.51,69.557,512,9.8,0.54,7.96\nvit_pwee_patch16_reg1_gap_256,256,7350.46,69.642,512,15.25,4.37,15.87\nefficientnet_b0,256,7334.29,69.797,512,5.29,0.52,8.81\nefficientnet_blur_b0,224,7255.8,70.553,512,5.29,0.43,8.72\ngmixer_12_224,224,7249.78,70.61,512,12.7,2.67,7.26\nresnet26d,224,7173.64,71.361,512,16.01,2.6,8.15\nmobilenet_edgetpu_v2_l,224,7155.34,71.543,512,10.92,2.55,9.05\nrepvit_m1_1,224,7120.93,71.88,512,8.8,1.36,9.43\nhgnetv2_b4,224,7066.56,72.433,512,19.8,2.75,6.7\nnf_regnet_b2,240,7056.07,72.547,512,14.31,0.97,7.23\nrexnetr_130,224,7042.44,72.689,512,7.61,0.68,9.81\ndarknet21,256,7025.74,72.86,512,20.86,3.93,7.47\neva02_tiny_patch14_224,224,7011.67,73.009,512,5.5,1.7,9.14\ngernet_l,256,7010.05,73.027,512,31.08,4.57,8.0\nmobilenetv4_conv_aa_medium,256,6992.95,73.204,512,9.72,1.58,10.3\nvit_tiny_r_s16_p8_384,384,6908.22,74.103,512,6.36,1.34,6.49\nefficientnet_b1,224,6906.68,74.116,512,7.79,0.59,9.36\nnf_regnet_b1,256,6856.88,74.654,512,10.22,0.82,7.27\nsedarknet21,256,6844.16,74.796,512,20.95,3.93,7.47\ndeit3_small_patch16_224,224,6842.04,74.819,512,22.06,4.61,11.95\nvit_wee_patch16_reg1_gap_256,256,6834.55,74.901,512,13.42,3.83,13.9\necaresnet50d_pruned,224,6821.51,75.045,512,19.94,2.53,6.43\nvit_relpos_small_patch16_rpn_224,224,6816.29,75.1,512,21.97,4.59,13.05\nresnext50_32x4d,176,6815.65,75.11,512,25.03,2.71,8.97\nefficientvit_b1,256,6805.35,75.222,512,9.1,0.69,9.46\nvit_relpos_small_patch16_224,224,6794.12,75.345,512,21.98,4.59,13.05\ntf_mixnet_s,224,6784.77,75.452,512,4.13,0.25,6.25\nvit_srelpos_small_patch16_224,224,6772.14,75.593,512,21.97,4.59,12.16\nconvnextv2_atto,224,6744.98,75.897,512,3.71,0.55,3.81\nconvnext_femto,288,6744.7,75.901,512,5.22,1.3,7.56\nregnetz_005,288,6732.16,76.038,512,7.12,0.86,9.68\nflexivit_small,240,6705.34,76.346,512,22.06,5.35,14.18\ntiny_vit_11m_224,224,6677.76,76.659,512,20.35,2.04,13.49\nresnet101,160,6654.99,76.921,512,44.55,4.0,8.28\ntf_efficientnet_lite1,240,6635.79,77.145,512,5.42,0.62,10.14\nmobilenetv4_conv_medium,320,6583.91,77.754,512,9.72,1.71,11.84\nefficientnetv2_rw_t,224,6565.65,77.969,512,13.65,1.93,9.94\nconvnext_femto_ols,288,6547.48,78.187,512,5.23,1.35,8.06\nrexnet_130,224,6526.53,78.43,512,7.56,0.68,9.71\nresnet34,288,6472.85,79.088,512,21.8,6.07,6.18\nhgnet_tiny,224,6467.95,79.144,512,14.74,4.54,6.36\nhgnetv2_b0,288,6455.26,79.302,512,6.0,0.54,3.51\nregnetz_b16,224,6447.12,79.403,512,9.72,1.45,9.95\nmobilenetv2_120d,224,6441.08,79.477,512,5.83,0.69,11.97\nregnetx_016,224,6438.32,79.508,512,9.19,1.62,7.93\nfbnetv3_d,256,6406.47,79.904,512,10.31,0.68,11.1\nnf_resnet26,224,6382.33,80.21,512,16.0,2.41,7.35\nrepvgg_a2,224,6357.34,80.526,512,28.21,5.7,6.26\nconvnext_nano,224,6328.61,80.891,512,15.59,2.46,8.37\ngmlp_ti16_224,224,6311.2,81.105,512,5.87,1.34,7.55\ncs3darknet_focus_l,256,6269.67,81.648,512,21.15,4.66,8.03\nefficientnet_cc_b0_4e,224,6251.35,81.891,512,13.31,0.41,9.42\nefficientnet_cc_b0_8e,224,6215.22,82.367,512,24.01,0.42,9.42\ntf_efficientnetv2_b2,260,6193.92,82.64,512,10.1,1.72,9.84\ngc_efficientnetv2_rw_t,224,6186.72,82.746,512,13.68,1.94,9.97\nrexnetr_150,224,6134.63,83.448,512,9.78,0.89,11.13\ntf_efficientnetv2_b3,240,6104.76,83.857,512,14.36,1.93,9.95\nmobilenet_edgetpu_v2_m,256,6098.85,83.937,512,8.46,2.42,10.65\nvit_relpos_base_patch32_plus_rpn_256,256,6094.72,83.988,512,119.42,7.68,8.01\nedgenext_x_small,288,6092.28,84.029,512,2.34,0.68,7.5\nmobilenetv4_conv_large,256,6067.97,84.365,512,32.59,2.86,12.14\nefficientformer_l1,224,6062.02,84.445,512,12.29,1.3,5.53\nresnext26ts,256,6061.47,84.453,512,10.3,2.43,10.52\ncs3darknet_l,256,6032.27,84.863,512,21.16,4.86,8.55\nvit_base_patch32_plus_256,256,5971.26,85.733,512,119.48,7.79,7.76\nconvnext_nano_ols,224,5948.71,86.056,512,15.65,2.65,9.38\nghostnetv2_130,224,5943.44,86.13,512,8.96,0.28,5.9\nmobilenetv4_conv_blur_medium,256,5900.5,86.749,512,9.72,1.59,11.2\nlegacy_seresnext26_32x4d,224,5892.27,86.881,512,16.79,2.49,9.39\ndpn48b,224,5891.72,86.887,512,9.13,1.69,8.92\nseresnet34,288,5881.41,87.035,512,21.96,6.07,6.18\nseresnext26ts,256,5849.56,87.514,512,10.39,2.43,10.52\nefficientnet_b1,240,5837.99,87.69,512,7.79,0.71,10.88\nresnet34d,288,5818.74,87.977,512,21.82,6.47,7.51\nefficientnet_lite2,260,5818.57,87.978,512,6.09,0.89,12.9\neca_resnext26ts,256,5802.09,88.223,512,10.3,2.43,10.52\nregnety_016,224,5793.48,88.359,512,11.2,1.63,8.04\ngcresnext26ts,256,5763.43,88.824,512,10.48,2.43,10.53\nvit_tiny_patch16_384,384,5757.61,88.914,512,5.79,4.7,25.39\nefficientnet_b2_pruned,260,5702.14,89.779,512,8.31,0.73,9.13\nvovnet39a,224,5696.62,89.864,512,22.6,7.09,6.73\nconvnextv2_femto,224,5691.98,89.939,512,5.23,0.79,4.57\nselecsls84,224,5690.74,89.959,512,50.95,5.9,7.57\nrexnet_150,224,5687.06,90.016,512,9.73,0.9,11.21\ntf_efficientnet_cc_b0_8e,224,5682.69,90.088,512,24.01,0.42,9.42\necaresnet101d_pruned,224,5665.08,90.365,512,24.88,3.48,7.69\ntf_efficientnet_cc_b0_4e,224,5643.92,90.706,512,13.31,0.41,9.42\nedgenext_small,256,5629.75,90.933,512,5.59,1.26,9.07\nhgnetv2_b2,224,5618.6,91.112,512,11.22,1.15,4.12\ncs3sedarknet_l,256,5560.82,92.057,512,21.91,4.86,8.56\nmobilevitv2_075,256,5556.74,92.127,512,2.87,1.05,12.06\nmobilenetv3_large_150d,256,5555.16,92.154,512,14.62,1.03,12.35\nresnet26t,256,5542.61,92.362,512,16.01,3.35,10.52\necaresnetlight,224,5507.77,92.947,512,30.16,4.11,8.42\nmobilenetv4_hybrid_large_075,256,5460.28,93.755,512,22.75,2.06,11.64\necaresnext26t_32x4d,224,5449.2,93.946,512,15.41,2.7,10.09\nseresnext26t_32x4d,224,5447.15,93.982,512,16.81,2.7,10.09\necaresnext50t_32x4d,224,5446.35,93.994,512,15.41,2.7,10.09\ntresnet_m,224,5412.07,94.592,512,31.39,5.75,7.31\nresnetv2_50,224,5404.87,94.714,512,25.55,4.11,11.11\nseresnext26d_32x4d,224,5399.24,94.813,512,16.81,2.73,10.19\nresnet101,176,5394.96,94.89,512,44.55,4.92,10.08\nese_vovnet39b,224,5390.43,94.971,512,24.57,7.09,6.74\neca_vovnet39b,224,5385.97,95.049,512,22.6,7.09,6.74\nhrnet_w18_small_v2,224,5368.46,95.35,512,15.6,2.62,9.65\nresnetaa34d,288,5316.61,96.29,512,21.82,7.33,8.38\nnf_regnet_b2,272,5315.32,96.311,512,14.31,1.22,9.27\nnf_regnet_b1,288,5308.87,96.43,512,10.22,1.02,9.2\ntf_efficientnet_b1,240,5299.34,96.602,512,7.79,0.71,10.88\nnf_ecaresnet26,224,5277.07,97.012,512,16.0,2.41,7.36\nnf_seresnet26,224,5268.95,97.161,512,17.4,2.41,7.36\nsam2_hiera_tiny,224,5256.88,97.382,512,26.85,4.91,17.12\nconvnext_pico,288,5236.48,97.762,512,9.05,2.27,10.08\nwide_resnet50_2,176,5230.3,97.88,512,68.88,7.29,8.97\nefficientvit_b1,288,5209.57,98.268,512,9.1,0.87,11.96\npoolformer_s12,224,5207.23,98.311,512,11.92,1.82,5.53\npvt_v2_b1,224,5204.63,98.361,512,14.01,2.12,15.39\nvit_small_resnet26d_224,224,5184.59,98.741,512,63.61,5.07,11.12\nvit_medium_patch16_clip_224,224,5164.35,99.127,512,38.59,8.0,15.93\ntf_efficientnet_lite2,260,5161.3,99.182,512,6.09,0.89,12.9\nefficientnet_b1,256,5151.08,99.381,512,7.79,0.77,12.22\nedgenext_small_rw,256,5138.37,99.63,512,7.83,1.58,9.51\nresnet50,224,5088.39,100.61,512,25.56,4.11,11.11\nconvnext_pico_ols,288,5080.05,100.773,512,9.06,2.37,10.74\ndpn68,224,5063.18,101.109,512,12.61,2.35,10.47\nresnet32ts,256,5060.8,101.156,512,17.96,4.63,11.58\ndla60,224,5050.11,101.367,512,22.04,4.26,10.16\nefficientnet_em,240,5043.39,101.506,512,6.9,3.04,14.34\nlevit_512,224,5027.38,101.83,512,95.17,5.64,10.22\nresnetv2_50t,224,5027.34,101.829,512,25.57,4.32,11.82\nresnet33ts,256,5009.72,102.19,512,19.68,4.76,11.66\nresnetv2_50d,224,4991.23,102.566,512,25.57,4.35,11.92\nmixnet_m,224,4990.08,102.58,512,5.01,0.36,8.19\nvgg11,224,4978.9,102.821,512,132.86,7.61,7.44\nmobilevit_xs,256,4974.99,102.897,512,2.32,1.05,16.33\neca_botnext26ts_256,256,4972.2,102.961,512,10.59,2.46,11.6\nresnetblur18,288,4952.71,103.365,512,11.69,3.87,5.6\nrepvit_m3,224,4952.4,103.372,512,10.68,1.89,13.94\nvit_little_patch16_reg1_gap_256,256,4899.59,104.486,512,22.52,6.27,18.06\ncrossvit_small_240,240,4895.55,104.569,512,26.86,5.63,18.17\nregnetv_040,224,4894.19,104.6,512,20.64,4.0,12.29\nefficientnet_b2,256,4887.72,104.739,512,9.11,0.89,12.81\ndpn68b,224,4887.43,104.743,512,12.61,2.35,10.47\nresnest14d,224,4875.39,105.004,512,10.61,2.76,7.33\ncs3darknet_focus_l,288,4864.5,105.235,512,21.15,5.9,10.16\ncoatnext_nano_rw_224,224,4863.57,105.26,512,14.7,2.47,12.8\nresnet50c,224,4858.01,105.38,512,25.58,4.35,11.92\nvovnet57a,224,4855.98,105.421,512,36.64,8.95,7.52\nvit_little_patch16_reg4_gap_256,256,4854.62,105.455,512,22.52,6.35,18.33\necaresnet26t,256,4847.81,105.603,512,16.01,3.35,10.53\nconvnext_tiny,224,4844.53,105.672,512,28.59,4.47,13.44\ncoatnet_pico_rw_224,224,4838.76,105.799,512,10.85,2.05,14.62\nregnety_040,224,4833.84,105.901,512,20.65,4.0,12.29\nese_vovnet19b_dw,288,4833.57,105.914,512,6.54,2.22,13.63\neca_resnet33ts,256,4814.31,106.335,512,19.68,4.76,11.66\nseresnet33ts,256,4808.69,106.463,512,19.78,4.76,11.66\ntf_mixnet_m,224,4798.12,106.693,512,5.01,0.36,8.19\neca_halonext26ts,256,4798.11,106.697,512,10.76,2.44,11.46\ntf_efficientnet_em,240,4792.58,106.819,512,6.9,3.04,14.34\nresnet26,288,4751.95,107.731,512,16.0,3.9,12.15\nbotnet26t_256,256,4750.9,107.757,512,12.49,3.32,11.98\nresnet50t,224,4740.57,107.989,512,25.57,4.32,11.82\nhgnetv2_b1,288,4740.36,107.996,512,6.34,0.82,4.51\ngcresnet33ts,256,4729.63,108.241,512,19.88,4.76,11.68\nresnet152,160,4727.44,108.291,512,60.19,5.9,11.51\nresnet50d,224,4716.19,108.549,512,25.58,4.35,11.92\nlevit_512d,224,4713.6,108.608,512,92.5,5.85,11.3\nfbnetv3_g,240,4705.48,108.797,512,16.62,1.28,14.87\ncs3darknet_l,288,4693.2,109.078,512,21.16,6.16,10.83\nresnext26ts,288,4684.61,109.274,512,10.3,3.07,13.31\nbat_resnext26ts,256,4683.78,109.296,512,10.73,2.53,12.51\nghostnetv2_160,224,4679.39,109.403,512,12.39,0.42,7.23\nlevit_conv_512,224,4673.9,109.533,512,95.17,5.64,10.22\nresnetaa50,224,4670.85,109.603,512,25.56,5.15,11.64\nvit_relpos_medium_patch16_rpn_224,224,4656.87,109.924,512,38.73,7.97,17.02\ndeit3_medium_patch16_224,224,4656.19,109.948,512,38.85,8.0,15.93\nvit_relpos_medium_patch16_224,224,4644.51,110.223,512,38.75,7.97,17.02\nvit_srelpos_medium_patch16_224,224,4622.27,110.755,512,38.74,7.96,16.21\nhalonet26t,256,4618.99,110.833,512,12.48,3.19,11.69\nmobileone_s1,224,4610.41,111.041,512,4.83,0.86,9.67\nvit_relpos_medium_patch16_cls_224,224,4602.22,111.238,512,38.76,8.03,18.24\nhgnetv2_b3,224,4595.96,111.389,512,16.29,1.78,5.07\nese_vovnet57b,224,4572.59,111.957,512,38.61,8.95,7.52\ncrossvit_15_240,240,4566.76,112.099,512,27.53,5.81,19.77\nefficientnet_b3_pruned,300,4556.39,112.357,512,9.86,1.04,11.86\nconvit_tiny,224,4550.29,112.508,512,5.71,1.26,7.94\nvit_small_r26_s32_224,224,4547.12,112.585,512,36.43,3.56,9.85\ncoat_lite_tiny,224,4545.49,112.628,512,5.72,1.6,11.65\nseresnext26ts,288,4515.14,113.381,512,10.39,3.07,13.32\nrexnetr_200,224,4479.28,114.291,512,16.52,1.59,15.11\neca_resnext26ts,288,4469.0,114.55,512,10.3,3.07,13.32\nhiera_tiny_224,224,4466.63,114.616,512,27.91,4.91,17.13\ngcresnext26ts,288,4454.09,114.939,512,10.48,3.07,13.33\nmobilenetv4_hybrid_medium,320,4442.05,115.247,512,11.07,2.05,14.36\nlegacy_seresnet50,224,4439.83,115.307,512,28.09,3.88,10.6\ncoatnet_nano_cc_224,224,4438.79,115.334,512,13.76,2.24,15.02\ncoatnet_nano_rw_224,224,4405.71,116.2,512,15.14,2.41,15.41\nres2net50_48w_2s,224,4402.09,116.289,512,25.29,4.18,11.72\nlevit_conv_512d,224,4399.04,116.376,512,92.5,5.85,11.3\nskresnet34,224,4398.47,116.391,512,22.28,3.67,5.13\nhgnet_small,224,4394.42,116.497,512,24.36,8.53,8.79\ncrossvit_15_dagger_240,240,4382.25,116.812,512,28.21,6.13,20.43\nregnetx_032,224,4377.92,116.931,512,15.3,3.2,11.37\nvit_base_resnet26d_224,224,4364.66,117.293,512,101.4,6.97,13.16\ninception_v3,299,4356.9,117.497,512,23.83,5.73,8.97\nresnetaa50d,224,4354.22,117.574,512,25.58,5.39,12.44\nresnet50_clip_gap,224,4352.18,117.625,512,23.53,5.39,12.44\ncs3sedarknet_l,288,4319.09,118.528,512,21.91,6.16,10.83\nvisformer_small,224,4314.89,118.648,512,40.22,4.88,11.43\ntiny_vit_21m_224,224,4303.48,118.951,512,33.22,4.29,20.08\nxcit_tiny_24_p16_224,224,4297.86,119.115,512,12.12,2.34,11.82\ncoat_lite_mini,224,4294.71,119.204,512,11.01,2.0,12.25\nrexnet_200,224,4286.19,119.439,512,16.37,1.56,14.91\nseresnet50,224,4275.51,119.739,512,28.09,4.11,11.13\nvgg11_bn,224,4269.7,119.903,512,132.87,7.62,7.44\nresnet26d,288,4243.79,120.634,512,16.01,4.29,13.48\nrepvit_m1_5,224,4236.5,120.842,512,14.64,2.31,15.7\nhgnetv2_b5,224,4234.4,120.895,512,39.57,6.56,11.19\nresnext50_32x4d,224,4219.51,121.328,512,25.03,4.26,14.4\nhgnetv2_b4,288,4217.35,121.378,512,19.8,4.54,11.08\ndla60x,224,4205.32,121.737,512,17.35,3.54,13.8\nconvnextv2_pico,224,4204.11,121.769,512,9.07,1.37,6.1\nefficientvit_b2,224,4171.44,122.725,512,24.33,1.6,14.62\nresnet50s,224,4166.01,122.887,512,25.68,5.47,13.52\nhaloregnetz_b,224,4138.03,123.716,512,11.68,1.97,11.94\nresnet50_clip,224,4125.54,124.093,512,38.32,6.14,12.98\nvit_medium_patch16_gap_240,240,4100.63,124.847,512,44.4,9.22,18.81\ntf_efficientnet_b2,260,4099.99,124.866,512,9.11,1.02,13.83\nfastvit_t8,256,4096.16,124.98,512,4.03,0.7,8.63\nmobilevitv2_100,256,4094.56,125.029,512,4.9,1.84,16.08\ncs3darknet_focus_x,256,4093.78,125.053,512,35.02,8.03,10.69\ntwins_svt_small,224,4054.15,126.278,512,24.06,2.94,13.75\ndavit_tiny,224,4047.96,126.469,512,28.36,4.54,18.89\nconvnextv2_atto,288,4046.67,126.51,512,3.71,0.91,6.3\nnf_regnet_b3,288,4045.22,126.557,512,18.59,1.67,11.84\necaresnet50t,224,4041.51,126.672,512,25.57,4.32,11.83\necaresnet50d_pruned,288,4041.28,126.68,512,19.94,4.19,10.61\nefficientnet_b0_gn,224,4037.71,126.792,512,5.29,0.42,6.75\nresnetrs50,224,4033.27,126.931,512,35.69,4.48,12.14\nseresnet50t,224,4031.57,126.983,512,28.1,4.32,11.83\nefficientnet_b1,288,4029.31,127.056,512,7.79,0.97,15.46\nresnetv2_50x1_bit,224,4026.74,127.136,512,25.55,4.23,11.11\nresmlp_24_224,224,4020.57,127.334,512,30.02,5.96,10.91\necaresnet50d,224,4020.32,127.34,512,25.58,4.35,11.93\nnfnet_f0,192,4017.91,127.416,512,71.49,7.21,10.16\neca_nfnet_l0,224,4007.54,127.747,512,24.14,4.35,10.47\nnfnet_l0,224,3992.6,128.226,512,35.07,4.36,10.47\nresnet32ts,288,3982.87,128.533,512,17.96,5.86,14.65\ncspresnet50,256,3967.99,129.011,512,21.62,4.54,11.5\nresnext50d_32x4d,224,3958.52,129.328,512,25.05,4.5,15.2\nefficientnet_cc_b1_8e,240,3942.69,129.849,512,39.72,0.75,15.44\nresnet33ts,288,3941.35,129.892,512,19.68,6.02,14.75\nconvnext_tiny_hnf,224,3937.17,130.024,512,28.59,4.47,13.44\ncs3darknet_x,256,3921.43,130.548,512,35.05,8.38,11.35\ndensenet121,224,3896.08,131.4,512,7.98,2.87,6.9\nregnety_032,224,3890.61,131.586,512,19.44,3.2,11.26\nefficientnet_b0_g8_gn,224,3878.78,131.986,512,6.56,0.66,6.75\nres2net50_26w_4s,224,3870.41,132.27,512,25.7,4.28,12.61\nrepvgg_b1g4,224,3868.8,132.327,512,39.97,8.15,10.64\nres2next50,224,3866.11,132.415,512,24.67,4.2,13.71\ndla60_res2net,224,3861.83,132.566,512,20.85,4.15,12.34\nefficientnet_b2,288,3845.58,133.127,512,9.11,1.12,16.2\ndla60_res2next,224,3838.79,133.361,512,17.03,3.49,13.17\ncspresnet50w,256,3836.45,133.436,512,28.12,5.04,12.19\nmobilevit_s,256,3831.13,133.628,512,5.58,2.03,19.94\nresnet152,176,3829.91,133.671,512,60.19,7.22,13.99\nconvnext_nano,288,3829.8,133.676,512,15.59,4.06,13.84\nregnetz_b16,288,3824.0,133.877,512,9.72,2.39,16.43\nnextvit_small,224,3822.2,133.942,512,31.76,5.81,18.44\nmobileone_s2,224,3813.62,134.241,512,7.88,1.34,11.55\ntwins_pcpvt_small,224,3810.76,134.343,512,24.11,3.83,18.08\nlambda_resnet26rpt_256,256,3805.0,134.547,512,10.99,3.16,11.87\ncspresnet50d,256,3804.58,134.553,512,21.64,4.86,12.55\nres2net50_14w_8s,224,3794.91,134.892,512,25.06,4.21,13.28\nefficientvit_l1,224,3793.97,134.935,512,52.65,5.27,15.85\nmaxvit_pico_rw_256,256,3785.35,135.245,512,7.46,1.83,22.3\nregnetz_c16,256,3784.6,135.273,512,13.46,2.51,16.57\nefficientnetv2_rw_t,288,3781.16,135.396,512,13.65,3.19,16.42\nseresnet33ts,288,3779.91,135.437,512,19.78,6.02,14.76\ncoatnet_rmlp_nano_rw_224,224,3777.49,135.526,512,15.15,2.62,20.34\nmaxvit_rmlp_pico_rw_256,256,3774.75,135.625,512,7.52,1.85,24.86\nregnetx_040,224,3770.28,135.785,512,22.12,3.99,12.2\nresnetblur50,224,3766.24,135.931,512,25.56,5.16,12.02\nhgnet_tiny,288,3750.33,136.507,512,14.74,7.51,10.51\nseresnetaa50d,224,3749.26,136.547,512,28.11,5.4,12.46\ngcresnext50ts,256,3748.73,136.567,512,15.67,3.75,15.46\ngcresnet50t,256,3737.26,136.986,512,25.9,5.42,14.67\ntf_efficientnetv2_b3,300,3732.39,137.164,512,14.36,3.04,15.74\ngcresnet33ts,288,3730.72,137.226,512,19.88,6.02,14.78\nmobilenetv4_conv_large,320,3729.46,137.261,512,32.59,4.47,18.97\ngcvit_xxtiny,224,3724.74,137.446,512,12.0,2.14,15.36\nxcit_nano_12_p16_384,384,3719.39,137.643,512,3.05,1.64,12.15\nmixnet_l,224,3706.35,138.118,512,7.33,0.58,10.84\ngmixer_24_224,224,3700.37,138.35,512,24.72,5.28,14.45\ntf_efficientnet_cc_b1_8e,240,3698.23,138.433,512,39.72,0.75,15.44\nmobileone_s0,224,3695.13,138.546,512,5.29,1.09,15.48\nhiera_small_224,224,3688.01,138.815,512,35.01,6.42,20.75\ninception_next_tiny,224,3669.77,139.505,512,28.06,4.19,11.98\ndarknet53,256,3656.81,139.987,512,41.61,9.31,12.39\neca_resnet33ts,288,3656.44,140.002,512,19.68,6.02,14.76\nresnet51q,256,3652.14,140.176,512,35.7,6.38,16.55\ncoatnet_0_rw_224,224,3650.72,140.225,512,27.44,4.43,18.73\nlegacy_seresnext50_32x4d,224,3646.33,140.404,512,27.56,4.26,14.42\nres2net50d,224,3644.05,140.488,512,25.72,4.52,13.41\nseresnext50_32x4d,224,3641.64,140.579,512,27.56,4.26,14.42\nvit_medium_patch16_gap_256,256,3640.49,140.628,512,38.86,10.59,22.15\ngc_efficientnetv2_rw_t,288,3639.71,140.657,512,13.68,3.2,16.45\nvit_base_patch32_384,384,3635.68,140.814,512,88.3,13.06,16.5\nvit_base_patch32_clip_384,384,3633.18,140.91,512,88.3,13.06,16.5\nresnetrs101,192,3609.77,141.824,512,63.62,6.04,12.7\ndarknetaa53,256,3608.53,141.868,512,36.02,7.97,12.39\ncs3sedarknet_x,256,3607.12,141.918,512,35.4,8.38,11.35\ntf_mixnet_l,224,3591.49,142.547,512,7.33,0.58,10.84\nedgenext_small,320,3557.62,143.904,512,5.59,1.97,14.16\nresnetblur50d,224,3555.32,143.996,512,25.58,5.4,12.82\nvit_base_r26_s32_224,224,3550.43,144.195,512,101.38,6.81,12.36\nconvnext_nano_ols,288,3546.6,144.351,512,15.65,4.38,15.5\nmobilenetv3_large_150d,320,3539.35,144.647,512,14.62,1.61,19.29\nvit_medium_patch16_reg1_gap_256,256,3508.51,145.916,512,38.88,10.63,22.26\nvgg13,224,3498.85,146.321,512,133.05,11.31,12.25\nresnet26t,320,3498.14,146.349,512,16.01,5.24,16.44\nresnest26d,224,3480.3,147.101,512,17.07,3.64,9.97\nvit_medium_patch16_reg4_gap_256,256,3477.69,147.213,512,38.88,10.76,22.6\nvit_large_patch32_224,224,3463.8,147.8,512,305.51,15.39,13.3\nresnetv2_101,224,3460.77,147.93,512,44.54,7.83,16.23\nxcit_small_12_p16_224,224,3440.46,148.803,512,26.25,4.82,12.58\nhieradet_small,256,3437.33,148.94,512,34.72,8.51,27.76\nconvnextv2_femto,288,3432.62,149.144,512,5.23,1.3,7.56\nresnet61q,256,3430.48,149.237,512,36.85,7.8,17.01\nefficientnet_lite3,300,3422.52,149.581,512,8.2,1.65,21.85\ndensenetblur121d,224,3387.08,151.149,512,8.0,3.11,7.9\nedgenext_base,256,3353.12,152.68,512,18.51,3.85,15.58\nresnet101,224,3320.94,154.16,512,44.55,7.83,16.23\nvit_base_resnet50d_224,224,3313.04,154.527,512,110.97,8.73,16.92\ncspresnext50,256,3309.23,154.7,512,20.57,4.05,15.86\nresnet50_mlp,256,3304.29,154.934,512,26.65,7.05,16.25\ncrossvit_18_240,240,3297.72,155.242,512,43.27,9.05,26.26\nresnetv2_101d,224,3282.49,155.964,512,44.56,8.07,17.04\nwide_resnet50_2,224,3280.9,156.042,512,68.88,11.43,14.4\nhgnetv2_b2,288,3279.23,156.12,512,11.22,1.89,6.8\nedgenext_small_rw,320,3277.42,156.207,512,7.83,2.46,14.85\ndla102,224,3273.77,156.379,512,33.27,7.19,14.18\nregnetx_080,224,3273.2,156.408,512,39.57,8.02,14.06\nfocalnet_tiny_srf,224,3262.86,156.903,512,28.43,4.42,16.32\ncs3edgenet_x,256,3259.76,157.05,512,47.82,11.53,12.92\ndensenet169,224,3257.61,157.156,512,14.15,3.4,7.3\nrepvgg_b1,224,3252.46,157.405,512,57.42,13.16,10.64\necaresnetlight,288,3245.31,157.753,512,30.16,6.79,13.91\ncoatnet_bn_0_rw_224,224,3244.97,157.769,512,27.44,4.67,22.04\nhrnet_w18_ssld,224,3243.03,157.865,512,21.3,4.32,16.31\nfbnetv3_g,288,3240.2,158.003,512,16.62,1.77,21.09\nnf_regnet_b3,320,3237.1,158.154,512,18.59,2.05,14.61\nseresnext26t_32x4d,288,3235.31,158.238,512,16.81,4.46,16.68\ncoatnet_rmlp_0_rw_224,224,3235.0,158.255,512,27.45,4.72,24.89\nmobilevitv2_125,256,3226.23,158.687,512,7.48,2.86,20.1\nhrnet_w18,224,3222.49,158.851,512,21.3,4.32,16.31\nresnet101c,224,3220.87,158.948,512,44.57,8.08,17.04\neva02_small_patch14_224,224,3219.95,158.996,512,21.62,6.14,18.28\ndm_nfnet_f0,192,3211.06,159.436,512,71.49,7.21,10.16\nseresnext26d_32x4d,288,3205.61,159.698,512,16.81,4.51,16.85\npit_b_224,224,3203.54,159.809,512,73.76,12.42,32.94\nese_vovnet39b,288,3197.44,160.116,512,24.57,11.71,11.13\nresnetv2_50,288,3191.29,160.421,512,25.55,6.79,18.37\npit_b_distilled_224,224,3189.97,160.488,512,74.79,12.5,33.07\nnf_seresnet50,224,3183.77,160.801,512,28.09,4.21,11.13\nnf_ecaresnet50,224,3182.04,160.888,512,25.56,4.21,11.13\npoolformerv2_s12,224,3173.35,161.331,512,11.89,1.83,5.53\ncrossvit_18_dagger_240,240,3169.03,161.547,512,44.27,9.5,27.03\nefficientvit_b2,256,3168.98,161.551,512,24.33,2.09,19.03\necaresnet101d_pruned,288,3165.16,161.747,512,24.88,5.75,12.71\nwide_resnet101_2,176,3159.16,162.057,512,126.89,14.31,13.18\nresnet101d,224,3154.71,162.283,512,44.57,8.08,17.04\nmobileone_s3,224,3137.93,163.149,512,10.17,1.94,13.85\nrdnet_tiny,224,3116.85,164.255,512,23.86,5.06,15.98\necaresnet50t,256,3090.6,165.649,512,25.57,5.64,15.45\ncs3darknet_x,288,3089.12,165.725,512,35.05,10.6,14.36\npvt_v2_b2,224,3075.64,166.454,512,25.36,4.05,27.53\necaresnet26t,320,3068.22,166.857,512,16.01,5.24,16.44\nconvnextv2_nano,224,3060.53,167.274,512,15.62,2.46,8.37\ngmlp_s16_224,224,3054.3,167.617,512,19.42,4.42,15.1\neva02_tiny_patch14_336,336,3045.61,168.099,512,5.76,4.68,27.16\ntf_efficientnet_lite3,300,3034.72,168.698,512,8.2,1.65,21.85\nnf_resnet50,256,3032.36,168.828,512,25.56,5.46,14.52\nresnest50d_1s4x24d,224,3022.48,169.381,512,25.68,4.43,13.57\nefficientnetv2_s,288,3014.62,169.825,512,21.46,4.75,20.13\nefficientvit_l2,224,3006.41,170.285,512,63.71,6.97,19.58\nsehalonet33ts,256,3003.76,170.438,512,13.69,3.55,14.7\nresnet50,288,3001.95,170.541,512,25.56,6.8,18.37\nrexnetr_300,224,2999.93,170.656,512,34.81,3.39,22.16\nresnetaa101d,224,2990.5,171.195,512,44.57,9.12,17.56\nresnet101_clip_gap,224,2990.13,171.215,512,42.52,9.11,17.56\nfocalnet_tiny_lrf,224,2989.28,171.265,512,28.65,4.49,17.76\nmobilenetv4_hybrid_medium,384,2986.24,171.437,512,11.07,3.01,21.18\nconvnext_small,224,2979.27,171.842,512,50.22,8.71,21.56\nskresnet50,224,2972.51,172.23,512,25.8,4.11,12.5\nnextvit_base,224,2953.28,173.349,512,44.82,8.29,23.71\nres2net50_26w_6s,224,2950.38,173.519,512,37.05,6.33,15.28\nconvnext_tiny,288,2942.29,174.001,512,28.59,7.39,22.21\nvgg13_bn,224,2930.98,174.674,512,133.05,11.33,12.25\ngcresnet50t,288,2928.81,174.801,512,25.9,6.86,18.57\nregnetv_040,288,2924.99,175.029,512,20.64,6.6,20.3\ncs3se_edgenet_x,256,2921.53,175.231,512,50.72,11.53,12.94\ngcresnext50ts,288,2907.55,176.079,512,15.67,4.75,19.57\nvgg16,224,2904.86,176.241,512,138.36,15.47,13.56\ngcvit_xtiny,224,2901.49,176.449,512,19.98,2.93,20.26\nresnet101s,224,2898.69,176.618,512,44.67,9.19,18.64\ndpn68b,288,2892.89,176.97,512,12.61,3.89,17.3\nresnext101_32x8d,176,2887.72,177.288,512,88.79,10.33,19.37\nefficientnet_b3,288,2886.07,177.389,512,12.23,1.63,21.49\nresnet101_clip,224,2884.25,177.502,512,56.26,9.81,18.08\nresnet51q,288,2880.39,177.741,512,35.7,8.07,20.94\ntresnet_v2_l,224,2874.87,178.082,512,46.17,8.85,16.34\nregnety_040,288,2873.78,178.146,512,20.65,6.61,20.3\ndarknet53,288,2865.69,178.639,512,41.61,11.78,15.68\nefficientnetv2_rw_s,288,2865.52,178.661,512,23.94,4.91,21.41\nvit_base_patch16_siglip_gap_224,224,2859.98,179.01,512,85.8,17.49,23.75\nfastvit_t12,256,2859.48,179.037,512,7.55,1.42,12.42\nvit_base_patch16_224_miil,224,2846.99,179.826,512,94.4,17.59,23.91\ndeit_base_patch16_224,224,2846.84,179.834,512,86.57,17.58,23.9\nskresnet50d,224,2843.8,180.027,512,25.82,4.36,13.31\nvit_base_patch16_224,224,2843.0,180.077,512,86.57,17.58,23.9\nvit_base_patch16_clip_224,224,2840.98,180.206,512,86.57,17.58,23.9\nxcit_nano_12_p8_224,224,2838.89,180.339,512,3.05,2.16,15.71\nmixer_b16_224,224,2837.28,180.433,512,59.88,12.62,14.53\nvitamin_small_224,224,2831.28,180.823,512,22.03,5.92,26.38\nvit_base_patch16_siglip_224,224,2830.29,180.883,512,92.88,17.73,24.06\ndarknetaa53,288,2829.22,180.948,512,36.02,10.08,15.68\ndeit_base_distilled_patch16_224,224,2828.65,180.986,512,87.34,17.68,24.05\nlegacy_seresnet101,224,2827.17,181.086,512,49.33,7.61,15.74\nregnetx_064,224,2812.17,182.051,512,26.21,6.49,16.37\nresnet50t,288,2809.9,182.199,512,25.57,7.14,19.53\nvit_base_mci_224,224,2809.18,182.246,512,86.35,17.73,24.65\ncs3sedarknet_x,288,2809.03,182.251,512,35.4,10.6,14.37\nnest_tiny,224,2808.71,182.275,512,17.06,5.83,25.48\nresnet50d,288,2794.35,183.212,512,25.58,7.19,19.7\nvolo_d1_224,224,2792.28,183.348,512,26.63,6.94,24.43\nresnetaa50,288,2787.79,183.642,512,25.56,8.52,19.24\nmaxxvit_rmlp_nano_rw_256,256,2787.18,183.665,512,16.78,4.37,26.05\ndla102x,224,2778.92,184.23,512,26.31,5.89,19.42\nvit_base_patch16_gap_224,224,2767.32,185.003,512,86.57,17.49,25.59\nsequencer2d_s,224,2764.98,185.16,512,27.65,4.96,11.31\nnest_tiny_jx,224,2762.24,185.343,512,17.06,5.83,25.48\nseresnet101,224,2762.13,185.343,512,49.33,7.84,16.27\nefficientformer_l3,224,2749.8,186.18,512,31.41,3.93,12.01\nresnext101_32x4d,224,2746.89,186.38,512,44.18,8.01,21.23\nswin_tiny_patch4_window7_224,224,2744.11,186.57,512,28.29,4.51,17.06\ncs3sedarknet_xdw,256,2728.51,187.612,512,21.6,5.97,17.18\nfastvit_sa12,256,2725.03,187.874,512,11.58,1.96,14.03\nese_vovnet99b,224,2723.96,187.941,512,63.2,16.51,11.27\nfastvit_s12,256,2721.82,188.087,512,9.47,1.82,13.67\nresnet61q,288,2721.04,188.149,512,36.85,9.87,21.52\ntwins_pcpvt_base,224,2714.36,188.612,512,43.83,6.68,25.25\nbeit_base_patch16_224,224,2706.18,189.182,512,86.53,17.58,23.9\nvit_base_patch32_clip_448,448,2701.0,189.546,512,88.34,17.93,23.9\nresmlp_36_224,224,2696.43,189.864,512,44.69,8.91,16.33\ntresnet_l,224,2694.34,190.016,512,55.99,10.9,11.9\nbeitv2_base_patch16_224,224,2693.77,190.055,512,86.53,17.58,23.9\nrexnetr_200,288,2689.11,190.383,512,16.52,2.62,24.96\nrexnet_300,224,2684.56,190.707,512,34.71,3.44,22.4\npvt_v2_b2_li,224,2681.31,190.936,512,22.55,3.91,27.6\nmaxxvitv2_nano_rw_256,256,2667.75,191.907,512,23.7,6.26,23.05\nmobilevitv2_150,256,2662.33,192.301,512,10.59,4.09,24.11\necaresnet101d,224,2660.63,192.423,512,44.57,8.08,17.07\ncspdarknet53,256,2655.45,192.786,512,27.64,6.57,16.81\npoolformer_s24,224,2646.59,193.442,512,21.39,3.41,10.68\nvit_base_patch16_rpn_224,224,2645.68,193.504,512,86.54,17.49,23.75\ncait_xxs24_224,224,2634.11,194.359,512,11.96,2.53,20.29\nmobilenetv4_conv_large,384,2634.1,194.358,512,32.59,6.43,27.31\nmixnet_xl,224,2620.78,195.34,512,11.9,0.93,14.57\ndeit3_base_patch16_224,224,2617.49,195.593,512,86.59,17.58,23.9\nmaxvit_tiny_rw_224,224,2616.24,195.687,512,29.06,5.11,33.11\nvit_relpos_base_patch16_rpn_224,224,2614.87,195.79,512,86.41,17.51,24.97\nvit_relpos_base_patch16_224,224,2608.2,196.291,512,86.43,17.51,24.97\nresnetaa50d,288,2606.94,196.383,512,25.58,8.92,20.57\nvit_relpos_base_patch16_clsgap_224,224,2596.29,197.191,512,86.43,17.6,25.12\nres2net101_26w_4s,224,2594.73,197.298,512,45.21,8.1,18.45\nresnetblur101d,224,2592.77,197.455,512,44.57,9.12,17.94\nvit_relpos_base_patch16_cls_224,224,2592.66,197.467,512,86.43,17.6,25.12\nhgnet_small,288,2591.82,197.53,512,24.36,14.09,14.53\ntf_efficientnetv2_s,300,2589.86,197.681,512,21.46,5.35,22.73\nresnetv2_101x1_bit,224,2583.09,198.195,512,44.54,8.04,16.23\nregnetz_d32,256,2582.87,198.213,512,27.58,5.98,23.74\nhgnetv2_b3,288,2571.48,199.079,512,16.29,2.94,8.38\nvit_betwixt_patch16_reg1_gap_256,256,2567.82,199.378,512,60.4,16.32,27.83\nnf_resnet101,224,2567.19,199.426,512,44.55,8.01,16.23\nregnetz_d8,256,2566.72,199.463,512,23.37,3.97,23.74\nsebotnet33ts_256,256,2559.67,200.012,512,13.7,3.89,17.46\nconvnextv2_pico,288,2545.21,201.143,512,9.07,2.27,10.08\nvit_betwixt_patch16_reg4_gap_256,256,2540.19,201.546,512,60.4,16.52,28.24\nseresnet50,288,2537.63,201.746,512,28.09,6.8,18.39\nhgnetv2_b5,288,2531.16,202.26,512,39.57,10.84,18.5\nmixer_l32_224,224,2528.06,202.513,512,206.94,11.27,19.86\ncs3edgenet_x,288,2527.64,202.544,512,47.82,14.59,16.36\ndavit_small,224,2525.76,202.697,512,49.75,8.8,30.49\nregnetz_040,256,2513.52,203.672,512,27.12,4.06,24.19\nvit_small_patch16_384,384,2513.48,203.688,512,22.2,15.52,50.78\nregnetz_040_h,256,2511.58,203.823,512,28.94,4.12,24.29\nxcit_tiny_12_p16_384,384,2508.59,204.083,512,6.72,3.64,18.26\nefficientvit_b2,288,2500.66,204.73,512,24.33,2.64,24.03\ndensenet201,224,2499.62,204.817,512,20.01,4.34,7.85\nresnext50_32x4d,288,2498.32,204.925,512,25.03,7.04,23.81\nres2net101d,224,2492.38,205.404,512,45.23,8.35,19.25\nflexivit_base,240,2489.7,205.633,512,86.59,20.29,28.36\nvgg19,224,2485.75,205.96,512,143.67,19.63,14.86\nvit_small_resnet50d_s16_224,224,2485.28,206.0,512,57.53,13.48,24.82\nvgg16_bn,224,2469.22,207.339,512,138.37,15.5,13.56\ncoat_lite_small,224,2466.91,207.531,512,19.84,3.96,22.09\nhiera_base_224,224,2456.41,208.411,512,51.52,9.4,30.42\nswin_s3_tiny_224,224,2454.84,208.551,512,28.33,4.64,19.13\nres2net50_26w_8s,224,2448.88,209.049,512,48.4,8.37,17.95\nregnetz_c16,320,2432.34,210.483,512,13.46,3.92,25.88\neca_nfnet_l0,288,2420.75,211.492,512,24.14,7.12,17.29\nresnetv2_152,224,2415.11,211.982,512,60.19,11.55,22.56\nnfnet_l0,288,2412.74,212.191,512,35.07,7.13,17.29\nnextvit_large,224,2408.12,212.596,512,57.87,10.78,28.99\nrepvit_m2_3,224,2406.51,212.743,512,23.69,4.57,26.21\nregnety_064,224,2405.63,212.821,512,30.58,6.39,16.41\necaresnet50t,288,2404.89,212.887,512,25.57,7.14,19.55\nregnetv_064,224,2402.43,213.102,512,30.58,6.39,16.41\nresnet101d,256,2401.57,213.179,512,44.57,10.55,22.25\nseresnet50t,288,2398.87,213.42,512,28.1,7.14,19.55\nhalonet50ts,256,2395.15,213.751,512,22.73,5.3,19.2\nnf_regnet_b4,320,2392.51,213.987,512,30.21,3.29,19.88\necaresnet50d,288,2392.47,213.991,512,25.58,7.19,19.72\nconvnext_tiny_hnf,288,2383.27,214.808,512,28.59,7.39,22.21\nregnety_080,224,2362.59,216.697,512,39.18,8.0,17.97\nnf_resnet50,288,2360.57,216.882,512,25.56,6.88,18.37\ntf_efficientnet_b3,300,2360.52,216.883,512,12.23,1.87,23.83\nseresnext101_32x4d,224,2356.8,217.226,512,48.96,8.02,21.26\nresnext50d_32x4d,288,2354.66,217.422,512,25.05,7.44,25.13\nlegacy_seresnext101_32x4d,224,2353.24,217.559,512,48.96,8.02,21.26\nresnet152,224,2347.77,218.062,512,60.19,11.56,22.56\nvit_small_patch16_36x1_224,224,2346.24,218.206,512,64.67,13.71,35.69\nresnest50d,224,2340.99,218.697,512,27.48,5.4,14.36\nmobileone_s4,224,2340.57,218.737,512,14.95,3.04,17.74\nrepvgg_b2,224,2337.11,219.056,512,89.02,20.45,12.9\nhiera_small_abswin_256,256,2334.48,219.308,512,34.36,8.29,26.38\nhrnet_w32,224,2333.41,219.405,512,41.23,8.97,22.02\nefficientnet_b3,320,2331.82,219.556,512,12.23,2.01,26.52\nvit_base_patch16_clip_quickgelu_224,224,2329.86,219.738,512,86.19,17.58,23.9\nresnetv2_152d,224,2328.25,219.891,512,60.2,11.8,23.36\nefficientvit_l2,256,2320.46,220.631,512,63.71,9.09,25.49\nresnetv2_50d_gn,224,2319.35,220.732,512,25.57,4.38,11.92\ninception_v4,299,2318.78,220.781,512,42.68,12.28,15.09\nregnety_032,288,2316.01,221.053,512,19.44,5.29,18.61\nresnet152c,224,2297.8,222.806,512,60.21,11.8,23.36\nrepvgg_b2g4,224,2294.12,223.163,512,61.76,12.63,12.9\ndensenet121,288,2267.9,225.743,512,7.98,4.74,11.41\nvit_base_patch16_xp_224,224,2265.93,225.941,512,86.51,17.56,23.9\nlegacy_xception,299,2265.86,225.947,512,22.86,8.4,35.83\nnfnet_f0,256,2265.35,225.998,512,71.49,12.62,18.05\nresnet152d,224,2264.89,226.044,512,60.21,11.8,23.36\ncoatnet_rmlp_1_rw_224,224,2261.48,226.371,512,41.69,7.85,35.47\nmobilevitv2_175,256,2260.07,226.526,512,14.25,5.54,28.13\npvt_v2_b3,224,2255.37,227.0,512,45.24,6.92,37.7\ndeit3_small_patch16_384,384,2253.41,227.196,512,22.21,15.52,50.78\nxception41p,299,2252.41,227.29,512,26.91,9.25,39.86\nseresnetaa50d,288,2250.46,227.494,512,28.11,8.92,20.59\nefficientnet_el,300,2247.58,227.785,512,10.59,8.0,30.7\nvit_medium_patch16_rope_reg1_gap_256,256,2246.95,227.851,512,38.74,10.63,22.26\nefficientnet_el_pruned,300,2246.64,227.882,512,10.59,8.0,30.7\nese_vovnet39b_evos,224,2241.07,228.448,512,24.58,7.07,6.74\nresnetblur50,288,2230.72,229.506,512,25.56,8.52,19.87\ninception_next_small,224,2228.86,229.7,512,49.37,8.36,19.27\nswinv2_cr_tiny_224,224,2222.44,230.365,512,28.33,4.66,28.45\nvit_small_patch16_18x2_224,224,2220.98,230.512,512,64.67,13.71,35.69\nmaxvit_nano_rw_256,256,2207.5,231.919,512,15.45,4.46,30.28\ndla169,224,2206.1,232.067,512,53.39,11.6,20.2\nmaxvit_rmlp_nano_rw_256,256,2204.07,232.283,512,15.5,4.47,31.92\nvit_base_patch16_siglip_gap_256,256,2196.58,233.076,512,85.84,23.13,33.23\nregnetx_160,224,2194.16,233.329,512,54.28,15.99,25.52\nswinv2_cr_tiny_ns_224,224,2192.78,233.479,512,28.33,4.66,28.45\nregnety_080_tv,224,2184.79,234.333,512,39.38,8.51,19.73\nefficientvit_b3,224,2179.4,234.913,512,48.65,3.99,26.9\nresnet50_gn,224,2179.17,234.937,512,25.56,4.14,11.11\nvit_base_patch16_siglip_256,256,2176.19,235.258,512,92.93,23.44,33.63\nseresnext50_32x4d,288,2170.0,235.928,512,27.56,7.04,23.82\nrdnet_small,224,2169.02,236.035,512,50.44,8.74,22.55\nsequencer2d_m,224,2166.41,236.323,512,38.31,6.55,14.26\ntf_efficientnet_el,300,2156.43,237.414,512,10.59,8.0,30.7\nmobilenetv4_hybrid_medium,448,2154.42,237.637,512,11.07,4.2,29.64\ncoatnet_1_rw_224,224,2142.22,238.989,512,41.72,8.04,34.6\nedgenext_base,320,2141.55,239.066,512,18.51,6.01,24.32\nvgg19_bn,224,2133.62,239.953,512,143.68,19.66,14.86\nresnet152s,224,2129.43,240.425,512,60.32,12.92,24.96\nmvitv2_tiny,224,2125.43,240.877,512,24.17,4.7,21.16\nvit_base_patch16_reg4_gap_256,256,2118.83,241.627,512,86.62,23.5,33.89\nresnetblur50d,288,2115.44,242.017,512,25.58,8.92,21.19\nvit_mediumd_patch16_reg4_gap_256,256,2111.48,242.47,512,64.11,17.87,37.57\nconvnextv2_tiny,224,2107.29,242.943,512,28.64,4.47,13.44\nmobilenetv4_conv_aa_large,384,2105.52,243.157,512,32.59,7.07,32.29\nmaxvit_tiny_tf_224,224,2103.05,243.442,512,30.92,5.6,35.78\nconvnext_base,224,2096.16,244.241,512,88.59,15.38,28.75\nresnetv2_50d_frn,224,2077.9,246.382,512,25.59,4.33,11.92\nbotnet50ts_256,256,2077.88,246.39,512,22.74,5.54,22.23\nregnetx_120,224,2069.68,247.365,512,46.11,12.13,21.37\ncoatnet_rmlp_1_rw2_224,224,2068.04,247.549,512,41.72,8.11,40.13\nconvit_small,224,2065.19,247.902,512,27.78,5.76,17.87\nhrnet_w30,224,2062.37,248.224,512,37.71,8.15,21.21\nnf_ecaresnet101,224,2050.3,249.705,512,44.55,8.01,16.27\nnf_seresnet101,224,2045.44,250.297,512,49.33,8.02,16.27\ncoatnet_0_224,224,2040.72,250.864,512,25.04,4.58,24.01\nresnetv2_101,288,2033.05,251.823,512,44.54,12.94,26.83\ndpn92,224,2023.11,253.058,512,37.67,6.54,18.21\nfocalnet_small_srf,224,2004.67,255.387,512,49.89,8.62,26.26\ncrossvit_base_240,240,1979.19,258.671,512,105.03,21.22,36.33\ndensenetblur121d,288,1978.97,258.705,512,8.0,5.14,13.06\nfastvit_mci0,256,1974.2,259.33,512,11.41,2.42,18.29\ntwins_svt_base,224,1973.81,259.382,512,56.07,8.59,26.33\nhgnetv2_b6,224,1973.41,259.428,512,75.26,16.88,21.23\nlegacy_seresnet152,224,1972.16,259.595,512,66.82,11.33,22.08\nhiera_base_plus_224,224,1970.81,259.766,512,69.9,12.67,37.98\necaresnet50t,320,1958.88,261.354,512,25.57,8.82,24.13\nregnety_040_sgn,224,1956.43,261.685,512,20.65,4.03,12.29\nwide_resnet50_2,288,1954.98,261.88,512,68.88,18.89,23.81\nresnet101,288,1954.77,261.909,512,44.55,12.95,26.83\ntwins_pcpvt_large,224,1953.99,262.014,512,60.99,9.84,35.82\nwide_resnet101_2,224,1953.76,262.043,512,126.89,22.8,21.23\ndensenet161,224,1946.37,263.038,512,28.68,7.79,11.06\ndla102x2,224,1944.36,263.309,512,41.28,9.34,29.91\nseresnet152,224,1938.57,264.096,512,66.82,11.57,22.61\nhalo2botnet50ts_256,256,1933.79,264.747,512,22.64,5.02,21.78\nregnety_120,224,1926.91,265.695,512,51.82,12.14,21.38\ngcvit_tiny,224,1921.55,266.438,512,28.22,4.79,29.82\nmobilenetv4_conv_large,448,1919.71,266.685,512,32.59,8.75,37.17\nrepvgg_b3g4,224,1914.94,267.358,512,83.83,17.89,15.1\nskresnext50_32x4d,224,1910.2,268.02,512,27.48,4.5,17.18\nxcit_tiny_12_p8_224,224,1904.82,268.773,512,6.71,4.81,23.6\ncaformer_s18,224,1897.73,269.774,512,26.34,4.13,19.39\nregnetz_b16_evos,224,1888.78,271.059,512,9.74,1.43,9.95\nhgnet_base,224,1879.47,272.399,512,71.58,25.14,15.47\nmaxxvit_rmlp_tiny_rw_256,256,1876.98,272.766,512,29.64,6.66,39.76\nxception41,299,1871.32,273.587,512,26.97,9.28,39.86\ncs3se_edgenet_x,320,1859.22,275.361,512,50.72,18.01,20.21\nhrnet_w18_ssld,288,1859.2,275.374,512,21.3,7.14,26.96\ntresnet_xl,224,1856.4,275.789,512,78.44,15.2,15.34\nconvnextv2_nano,288,1850.39,276.675,512,15.62,4.06,13.84\nfocalnet_small_lrf,224,1837.71,278.592,512,50.34,8.74,28.61\nxcit_small_24_p16_224,224,1834.87,279.023,512,47.67,9.1,23.64\nefficientvit_l2,288,1824.85,280.553,512,63.71,11.51,32.19\nnest_small,224,1822.19,280.962,512,38.35,10.35,40.04\nconvnext_small,288,1809.03,283.01,512,50.22,14.39,35.65\nrepvgg_b3,224,1809.03,283.005,512,123.09,29.16,15.1\nresnext101_64x4d,224,1807.89,283.188,512,83.46,15.52,31.21\nvit_base_patch16_plus_240,240,1806.76,283.363,512,117.56,27.41,33.08\nvit_relpos_base_patch16_plus_240,240,1802.12,284.093,512,117.38,27.3,34.33\nnest_small_jx,224,1801.11,284.248,512,38.35,10.35,40.04\nsamvit_base_patch16_224,224,1797.25,284.868,512,86.46,17.54,24.54\nvit_small_patch8_224,224,1797.1,284.885,512,21.67,22.44,80.84\nresnext101_32x8d,224,1792.69,285.588,512,88.79,16.48,31.21\nresnetaa101d,288,1777.27,288.066,512,44.57,15.07,29.03\npoolformer_s36,224,1773.95,288.607,512,30.86,5.0,15.82\nvit_large_r50_s32_224,224,1771.48,289.009,512,328.99,19.58,24.41\ncait_xxs36_224,224,1769.48,289.336,512,17.3,3.77,30.34\nefficientnet_b4,320,1763.52,290.311,512,19.34,3.13,34.76\nconvformer_s18,224,1757.98,291.227,512,26.77,3.96,15.82\ndavit_base,224,1755.64,291.611,512,87.95,15.51,40.66\ndpn98,224,1751.0,292.387,512,61.57,11.73,25.2\ndm_nfnet_f0,256,1740.31,294.174,512,71.49,12.62,18.05\nmobilenetv4_hybrid_large,384,1735.11,295.061,512,37.76,7.77,34.52\neca_nfnet_l1,256,1730.61,295.834,512,41.41,9.62,22.04\nresnetv2_50d_evos,224,1724.23,296.927,512,25.59,4.33,11.92\nhrnet_w40,224,1723.16,297.088,512,57.56,12.75,25.29\nresnet152d,256,1717.79,298.038,512,60.21,15.41,30.51\ntnt_s_patch16_224,224,1716.84,298.207,512,23.76,5.24,24.37\neva02_base_patch16_clip_224,224,1710.74,299.27,512,86.26,17.62,26.32\nswin_small_patch4_window7_224,224,1706.58,299.999,512,49.61,8.77,27.47\ninception_resnet_v2,299,1690.61,302.813,512,55.84,13.18,25.06\nvit_betwixt_patch16_rope_reg4_gap_256,256,1681.28,304.517,512,60.23,16.52,28.24\nefficientvit_b3,256,1678.52,305.014,512,48.65,5.2,35.01\nregnety_160,224,1676.75,305.336,512,83.59,15.96,23.04\nconvnext_tiny,384,1663.32,307.803,512,28.59,13.14,39.48\nxception65p,299,1663.22,307.807,512,39.82,13.91,52.48\nregnetz_d32,320,1656.99,308.976,512,27.58,9.33,37.08\nefficientformerv2_s0,224,1656.06,309.153,512,3.6,0.41,5.3\nefficientnetv2_s,384,1651.63,309.97,512,21.46,8.44,35.77\nresnet200,224,1649.35,310.41,512,64.67,15.07,32.19\nnf_regnet_b4,384,1647.49,310.762,512,30.21,4.7,28.61\nregnetz_d8,320,1642.91,311.627,512,23.37,6.19,37.08\nnfnet_f1,224,1642.07,311.777,512,132.63,17.87,22.94\nseresnet101,288,1638.36,312.492,512,49.33,12.95,26.87\ntf_efficientnetv2_s,384,1634.05,313.314,512,21.46,8.44,35.77\nregnetz_040,320,1629.96,314.078,512,27.12,6.35,37.78\nresnext101_32x4d,288,1628.74,314.339,512,44.18,13.24,35.09\nseresnext101_64x4d,224,1627.86,314.507,512,88.23,15.53,31.25\nmaxvit_tiny_rw_256,256,1627.49,314.58,512,29.07,6.74,44.35\nmaxvit_rmlp_tiny_rw_256,256,1624.07,315.242,512,29.15,6.77,46.92\nregnetz_040_h,320,1623.42,315.367,512,28.94,6.43,37.94\npoolformerv2_s24,224,1621.47,315.747,512,21.34,3.42,10.68\npvt_v2_b5,224,1619.65,316.103,512,81.96,11.76,50.92\nseresnext101_32x8d,224,1619.59,316.115,512,93.57,16.48,31.25\nresnest50d_4s2x40d,224,1617.32,316.558,512,30.42,4.4,17.94\nefficientnetv2_rw_s,384,1616.73,316.671,512,23.94,8.72,38.03\npvt_v2_b4,224,1616.7,316.679,512,62.56,10.14,53.74\nconvnext_base,256,1615.94,316.827,512,88.59,20.09,37.55\nmvitv2_small,224,1605.45,318.899,512,34.87,7.0,28.08\ncoat_tiny,224,1603.76,319.236,512,5.5,4.35,27.2\nmobilevitv2_200,256,1595.83,320.821,512,18.45,7.22,32.15\nswinv2_tiny_window8_256,256,1595.55,320.875,512,28.35,5.96,24.57\nfastvit_sa24,256,1588.77,322.245,512,21.55,3.8,24.32\nresnetrs101,288,1587.5,322.503,512,63.62,13.56,28.53\nvit_base_r50_s16_224,224,1585.43,322.924,512,97.89,21.66,35.28\nseresnext101d_32x8d,224,1581.7,323.686,512,93.59,16.72,32.05\nmvitv2_small_cls,224,1581.51,323.728,512,34.87,7.04,28.17\necaresnet101d,288,1578.08,324.428,512,44.57,13.35,28.19\ninception_next_base,224,1571.68,325.752,512,86.67,14.85,25.69\nmixnet_xxl,224,1570.27,326.029,512,23.96,2.04,23.43\nvit_medium_patch16_gap_384,384,1558.61,328.483,512,39.03,26.08,67.54\nmobilenetv4_conv_aa_large,448,1557.91,328.629,512,32.59,9.63,43.94\nvit_small_r26_s32_384,384,1549.48,330.418,512,36.47,10.43,29.85\nefficientformerv2_s1,224,1538.12,332.859,512,6.19,0.67,7.66\nvolo_d2_224,224,1537.61,332.969,512,58.68,14.34,41.34\nresnetblur101d,288,1534.28,333.687,512,44.57,15.07,29.65\nresnet101d,320,1514.34,338.086,512,44.57,16.48,34.77\nrdnet_base,224,1513.06,338.372,512,87.45,15.4,31.14\nhiera_base_abswin_256,256,1498.55,341.631,512,51.27,12.46,40.7\nmaxvit_rmlp_small_rw_224,224,1496.71,342.069,512,64.9,10.75,49.3\nseresnextaa101d_32x8d,224,1495.01,342.457,512,93.59,17.25,34.16\ncoatnet_1_224,224,1490.49,343.476,512,42.23,8.7,39.0\nhrnet_w48_ssld,224,1486.26,344.473,512,77.47,17.34,28.56\nhrnet_w48,224,1482.42,345.349,512,77.47,17.34,28.56\nefficientnetv2_m,320,1471.33,347.961,512,54.14,11.01,39.97\nregnetz_e8,256,1443.64,354.642,512,57.7,9.91,40.94\nresnetrs152,256,1435.38,356.681,512,86.62,15.59,30.83\nseresnet152d,256,1433.76,357.085,512,66.84,15.42,30.56\nfocalnet_base_srf,224,1432.2,357.455,512,88.15,15.28,35.01\neva02_small_patch14_336,336,1426.95,358.791,512,22.13,15.48,54.33\nregnety_064,288,1425.13,359.247,512,30.58,10.56,27.11\nmaxvit_tiny_pm_256,256,1425.01,359.276,512,30.09,6.61,47.9\nregnetv_064,288,1424.51,359.406,512,30.58,10.55,27.11\nregnety_080,288,1420.63,360.386,512,39.18,13.22,29.69\nresnet50x4_clip_gap,288,1412.45,362.468,512,65.62,19.57,34.11\ntiny_vit_21m_384,384,1405.09,364.373,512,21.23,13.77,77.83\nseresnext101_32x4d,288,1404.5,364.525,512,48.96,13.25,35.12\ncrossvit_15_dagger_408,408,1400.47,365.57,512,28.5,21.45,95.05\nsequencer2d_l,224,1398.42,366.113,512,54.3,9.74,22.12\nxception65,299,1384.96,369.654,512,39.92,13.96,52.48\nresnet152,288,1381.72,370.537,512,60.19,19.11,37.28\nresnetv2_50d_gn,288,1381.53,370.586,512,25.57,7.24,19.7\nrexnetr_300,288,1381.25,370.662,512,34.81,5.59,36.61\nswinv2_cr_small_224,224,1376.99,371.809,512,49.7,9.07,50.27\ngmlp_b16_224,224,1375.86,372.108,512,73.08,15.78,30.21\ntwins_svt_large,224,1368.81,374.033,512,99.27,15.15,35.1\nswinv2_cr_small_ns_224,224,1365.84,374.842,512,49.7,9.08,50.27\nmobilenetv4_conv_aa_large,480,1358.03,377.001,512,32.59,11.05,50.45\nresnet50x4_clip,288,1357.9,377.035,512,87.14,21.35,35.27\nvit_mediumd_patch16_rope_reg1_gap_256,256,1357.84,377.052,512,63.95,17.65,37.02\nvit_so150m_patch16_reg4_gap_256,256,1351.9,378.708,512,134.13,36.75,53.21\nhrnet_w44,224,1349.33,379.407,512,67.06,14.94,26.92\ncait_s24_224,224,1347.27,380.003,512,46.92,9.35,40.58\nxcit_tiny_24_p16_384,384,1345.07,380.635,512,12.12,6.87,34.29\nvit_so150m_patch16_reg4_map_256,256,1342.76,381.288,512,141.48,37.18,53.68\nvit_base_patch16_rope_reg1_gap_256,256,1339.91,382.096,512,86.43,23.22,33.39\ndm_nfnet_f1,224,1329.53,385.08,512,132.63,17.87,22.94\ntresnet_m,448,1324.12,386.655,512,31.39,22.99,29.21\nefficientnet_lite4,380,1322.67,387.08,512,13.01,4.04,45.66\nfocalnet_base_lrf,224,1320.54,387.704,512,88.75,15.43,38.13\nefficientvit_b3,288,1314.64,389.442,512,48.65,6.58,44.2\neva02_base_patch14_224,224,1312.14,390.184,512,85.76,23.22,36.55\nefficientformer_l7,224,1309.92,390.849,512,82.23,10.17,24.45\nnest_base,224,1307.57,391.551,512,67.72,17.96,53.39\nresnet50_gn,288,1305.16,392.272,512,25.56,6.85,18.37\nnest_base_jx,224,1294.67,395.451,512,67.72,17.96,53.39\nnextvit_small,384,1284.43,398.604,512,31.76,17.26,57.14\nconvnextv2_small,224,1282.44,399.217,512,50.32,8.71,21.56\nxcit_medium_24_p16_224,224,1280.1,399.955,512,84.4,16.13,31.71\nefficientnetv2_rw_m,320,1275.83,401.294,512,53.24,12.72,47.14\nconvnextv2_tiny,288,1271.87,402.531,512,28.64,7.39,22.21\nconvnext_base,288,1268.11,403.734,512,88.59,25.43,47.53\nmobilenetv4_hybrid_large,448,1265.42,404.591,512,37.76,10.74,48.61\ncoat_lite_medium,224,1251.87,408.976,512,44.57,9.81,40.06\ncoat_mini,224,1250.83,409.311,512,10.34,6.82,33.68\nmaxxvit_rmlp_small_rw_256,256,1243.18,411.831,512,66.01,14.67,58.38\ndpn131,224,1241.67,412.321,512,79.25,16.09,32.97\npoolformer_m36,224,1239.54,413.041,512,56.17,8.8,22.02\nefficientvit_l3,224,1238.99,413.221,512,246.04,27.62,39.16\nswin_base_patch4_window7_224,224,1237.04,413.872,512,87.77,15.47,36.63\ntf_efficientnet_lite4,380,1223.65,418.405,512,13.01,4.04,45.66\nresnet200d,256,1220.57,419.459,512,64.69,20.0,43.09\nconvnext_large,224,1210.69,422.882,512,197.77,34.4,43.13\nvit_large_patch32_384,384,1200.4,426.508,512,306.63,45.31,43.86\nmaxvit_small_tf_224,224,1189.65,430.365,512,68.93,11.66,53.17\nswin_s3_small_224,224,1180.79,433.592,512,49.74,9.43,37.84\nxcit_small_12_p16_384,384,1174.92,435.759,512,26.25,14.14,36.51\nhgnetv2_b6,288,1173.05,436.453,512,75.26,27.9,35.09\nregnety_040_sgn,288,1170.65,437.346,512,20.65,6.67,20.3\nlevit_384_s8,224,1165.28,329.517,384,39.12,9.98,35.86\nregnety_120,288,1157.95,442.145,512,51.82,20.06,35.34\nseresnet152,288,1150.95,444.835,512,66.82,19.11,37.34\nresnest101e,256,1145.83,446.818,512,48.28,13.38,28.66\ngcvit_small,224,1144.63,447.289,512,51.09,8.57,41.61\nregnetz_b16_evos,288,1135.7,450.806,512,9.74,2.36,16.43\ncoatnet_2_rw_224,224,1135.63,450.831,512,73.87,15.09,49.22\nhrnet_w64,224,1135.29,450.949,512,128.06,28.97,35.09\nmvitv2_base,224,1134.0,451.484,512,51.47,10.16,40.5\nvit_betwixt_patch16_reg4_gap_384,384,1127.26,454.181,512,60.6,39.71,85.28\nfastvit_sa36,256,1120.28,457.014,512,31.53,5.64,34.61\nlevit_conv_384_s8,224,1119.98,342.844,384,39.12,9.98,35.86\ntnt_b_patch16_224,224,1117.46,458.164,512,65.41,14.09,39.01\nregnety_320,224,1117.25,458.249,512,145.05,32.34,30.26\nmvitv2_base_cls,224,1116.06,458.742,512,65.44,10.23,40.65\ndpn107,224,1115.38,459.002,512,86.92,18.38,33.46\ndensenet264d,224,1113.18,459.927,512,72.74,13.57,14.0\nefficientnet_b3_gn,288,1108.62,461.817,512,11.73,1.74,23.35\nregnetz_c16_evos,256,1107.52,462.279,512,13.49,2.48,16.57\neca_nfnet_l1,320,1106.04,462.894,512,41.41,14.92,34.42\nvolo_d3_224,224,1105.28,463.214,512,86.33,20.78,60.09\nhgnet_base,288,1104.26,463.623,512,71.58,41.55,25.57\nfastvit_mci1,256,1101.38,464.855,512,21.54,4.72,32.84\nconvit_base,224,1094.41,467.818,512,86.54,17.52,31.77\nnf_regnet_b5,384,1094.33,467.849,512,49.74,7.95,42.9\npoolformerv2_s36,224,1088.71,470.265,512,30.79,5.01,15.82\nresnet152d,320,1084.29,472.178,512,60.21,24.08,47.67\nresnext101_64x4d,288,1081.56,473.371,512,83.46,25.66,51.59\nefficientnet_b3_g8_gn,288,1073.57,476.896,512,14.25,2.59,23.35\ncoatnet_2_224,224,1069.58,359.001,384,74.68,16.5,52.67\nxception71,299,1060.08,482.964,512,42.34,18.09,69.92\nswinv2_base_window12_192,192,1057.24,484.264,512,109.28,11.9,39.72\nvitamin_base_224,224,1057.15,484.302,512,87.72,22.68,52.77\ndavit_large,224,1055.3,485.153,512,196.81,34.6,60.99\nlegacy_senet154,224,1054.93,485.323,512,115.09,20.77,38.69\nsenet154,224,1054.21,485.652,512,115.09,20.77,38.69\nmobilenetv4_conv_aa_large,544,1053.47,485.995,512,32.59,14.19,64.79\nconvnextv2_nano,384,1044.07,490.365,512,15.62,7.22,24.61\nresnext101_32x16d,224,1041.98,491.351,512,194.03,36.27,51.18\nefficientnet_b4,384,1041.96,491.357,512,19.34,4.51,50.04\ncoatnet_rmlp_2_rw_224,224,1039.54,492.496,512,73.88,15.18,54.78\nregnetx_320,224,1036.62,493.887,512,107.81,31.81,36.3\nconvnext_base,320,1033.25,495.508,512,88.59,31.39,58.68\nresnetv2_50x1_bit,448,1033.24,495.51,512,25.55,16.62,44.46\nmaxxvitv2_rmlp_base_rw_224,224,1030.26,496.942,512,116.09,24.2,62.77\nresnetv2_50d_evos,288,1024.95,499.517,512,25.59,7.15,19.7\nconvnext_small,384,1023.25,500.352,512,50.22,25.58,63.37\ncrossvit_18_dagger_408,408,1022.68,500.623,512,44.61,32.47,124.87\necaresnet200d,256,1012.11,505.857,512,64.69,20.0,43.15\nmaxvit_rmlp_small_rw_256,256,1011.88,505.971,512,64.9,14.15,66.09\nresnetrs200,256,1010.68,506.576,512,93.21,20.18,43.42\nseresnet200d,256,1009.34,507.25,512,71.86,20.01,43.15\nregnety_160,288,1007.2,508.318,512,83.59,26.37,38.07\nxcit_tiny_24_p8_224,224,1006.77,508.541,512,12.11,9.21,45.39\nswinv2_cr_base_224,224,1003.7,510.098,512,87.88,15.86,59.66\nresnetv2_50x3_bit,224,999.54,512.217,512,217.32,37.06,33.34\nmobilevitv2_175,384,997.46,256.635,256,14.25,12.47,63.29\nswinv2_cr_base_ns_224,224,996.85,513.602,512,87.88,15.86,59.66\ntf_efficientnetv2_m,384,995.99,514.039,512,54.14,15.85,57.52\nnextvit_base,384,991.15,516.552,512,44.82,24.64,73.95\nswinv2_small_window8_256,256,991.15,516.555,512,49.73,11.58,40.14\nswinv2_cr_small_ns_256,256,989.97,517.174,512,49.7,12.07,76.21\ntf_efficientnet_b4,380,989.8,517.261,512,19.34,4.49,49.49\ncaformer_s36,224,984.99,519.776,512,39.3,8.0,37.53\nmaxvit_rmlp_base_rw_224,224,982.44,521.136,512,116.14,23.15,92.64\nswin_s3_base_224,224,980.38,522.23,512,71.13,13.69,48.26\nswinv2_tiny_window16_256,256,979.79,522.545,512,28.35,6.68,39.02\nresnet200,288,975.16,525.017,512,64.67,24.91,53.21\nseresnext101_32x8d,288,971.5,527.002,512,93.57,27.24,51.63\nxcit_nano_12_p8_384,384,969.96,527.836,512,3.05,6.34,46.08\nefficientvit_l2,384,967.67,529.083,512,63.71,20.45,57.01\nvolo_d1_384,384,963.34,531.465,512,26.78,22.75,108.55\ncoat_small,224,963.19,531.55,512,21.69,12.61,44.25\nconvnextv2_base,224,961.57,532.445,512,88.72,15.38,28.75\nmobilevitv2_150,384,958.44,400.634,384,10.59,9.2,54.25\nseresnext101d_32x8d,288,949.26,539.328,512,93.59,27.64,52.95\nmixer_l16_224,224,944.93,541.817,512,208.2,44.6,41.69\nvit_base_patch16_siglip_gap_384,384,944.73,541.939,512,86.09,55.43,101.3\nvit_base_patch16_384,384,941.77,543.639,512,86.86,55.54,101.56\ndeit_base_patch16_384,384,941.44,543.829,512,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,940.85,544.169,512,87.63,55.65,101.82\nvit_base_patch16_clip_384,384,939.44,544.987,512,86.86,55.54,101.56\nvit_mediumd_patch16_reg4_gap_384,384,937.89,545.887,512,64.27,43.67,113.51\nvit_base_patch16_siglip_384,384,935.86,547.069,512,93.18,56.12,102.2\npoolformer_m48,224,931.73,549.496,512,73.47,11.59,29.17\nregnetz_e8,320,928.33,551.508,512,57.7,15.46,63.94\nconvnext_large_mlp,256,924.56,553.757,512,200.13,44.94,56.33\nfastvit_ma36,256,923.01,554.689,512,44.07,7.88,41.09\nefficientvit_l3,256,921.63,555.515,512,246.04,36.06,50.98\nconvformer_s36,224,921.06,555.861,512,40.01,7.67,30.5\nconvmixer_1024_20_ks9_p14,224,918.33,557.51,512,24.38,5.55,5.51\nresnetrs152,320,913.9,560.222,512,86.62,24.34,48.14\nseresnet152d,320,908.95,563.261,512,66.84,24.09,47.72\nrdnet_large,224,908.27,563.694,512,186.27,34.74,46.67\neva_large_patch14_196,196,906.91,564.536,512,304.14,61.57,63.52\nvit_large_patch16_224,224,903.87,566.434,512,304.33,61.6,63.52\nseresnextaa101d_32x8d,288,901.06,568.175,512,93.59,28.51,56.44\nxcit_small_12_p8_224,224,893.66,572.906,512,26.21,18.69,47.21\nhrnet_w48_ssld,288,876.92,583.844,512,77.47,28.66,47.21\nnfnet_f2,256,871.75,587.309,512,193.78,33.76,41.85\nbeit_large_patch16_224,224,871.03,587.791,512,304.43,61.6,63.52\nefficientnetv2_m,416,869.46,588.822,512,54.14,18.6,67.5\ndeit3_base_patch16_384,384,868.27,589.658,512,86.88,55.54,101.56\nbeit_base_patch16_384,384,868.09,589.786,512,86.74,55.54,101.56\nefficientnet_b3_g8_gn,320,867.72,590.034,512,14.25,3.2,28.83\nbeitv2_large_patch16_224,224,866.84,590.635,512,304.43,61.6,63.52\nvit_small_patch14_dinov2,518,864.18,592.448,512,22.06,46.76,198.79\nvit_small_patch14_reg4_dinov2,518,861.06,594.598,512,22.06,46.95,199.77\ngcvit_base,224,849.54,602.663,512,90.32,14.87,55.48\nvit_base_patch16_18x2_224,224,848.77,603.204,512,256.73,52.51,71.38\ndeit3_large_patch16_224,224,848.18,603.626,512,304.37,61.6,63.52\nconvmixer_768_32,224,834.5,613.517,512,21.11,19.55,25.95\nfastvit_mci2,256,821.37,623.336,512,35.82,7.91,43.34\nhiera_large_224,224,815.26,627.987,512,213.74,40.34,83.37\nefficientformerv2_s2,224,808.52,633.235,512,12.71,1.27,11.77\nnextvit_large,384,808.08,633.581,512,57.87,32.03,90.76\nnfnet_f1,320,807.56,633.987,512,132.63,35.97,46.77\nefficientnet_b3_gn,320,800.5,639.584,512,11.73,2.14,28.83\nseresnet269d,256,798.35,641.309,512,113.67,26.59,53.6\nflexivit_large,240,798.21,641.41,512,304.36,70.99,75.39\necaresnet200d,288,791.65,646.732,512,64.69,25.31,54.59\nconvnext_xlarge,224,791.55,646.814,512,350.2,60.98,57.5\nseresnet200d,288,789.81,648.243,512,71.86,25.32,54.6\nlevit_512_s8,224,781.17,327.699,256,74.05,21.82,52.28\npoolformerv2_m36,224,774.93,660.677,512,56.08,8.81,22.02\nresnetrs270,256,774.23,661.28,512,129.86,27.06,55.84\nresnet200d,320,772.4,662.852,512,64.69,31.25,67.33\nxcit_large_24_p16_224,224,771.52,663.604,512,189.1,35.86,47.27\neca_nfnet_l2,320,767.25,667.296,512,56.72,20.95,47.43\nmaxvit_base_tf_224,224,759.21,674.372,512,119.47,24.04,95.01\nnf_regnet_b5,456,757.72,675.698,512,49.74,11.7,61.95\nswin_large_patch4_window7_224,224,755.87,677.341,512,196.53,34.53,54.94\nefficientnetv2_rw_m,416,752.57,680.317,512,53.24,21.49,79.62\ntiny_vit_21m_512,512,749.83,341.394,256,21.27,27.02,177.93\nlevit_conv_512_s8,224,734.53,348.504,256,74.05,21.82,52.28\nseresnextaa101d_32x8d,320,732.84,698.625,512,93.59,35.19,69.67\nresnetv2_152x2_bit,224,732.09,699.349,512,236.34,46.95,45.11\nregnetz_d8_evos,256,731.37,700.043,512,23.46,4.5,24.92\nconvnext_large,288,725.92,705.289,512,197.77,56.87,71.29\ncaformer_m36,224,725.68,705.522,512,56.2,13.29,50.48\nswinv2_base_window8_256,256,724.64,706.543,512,87.92,20.37,52.59\nconvnext_base,384,720.01,711.078,512,88.59,45.21,84.49\nconvnextv2_tiny,384,713.41,538.235,384,28.64,13.14,39.48\ndm_nfnet_f2,256,710.75,720.35,512,193.78,33.76,41.85\nmobilevitv2_200,384,710.26,540.624,384,18.45,16.24,72.34\nregnetz_c16_evos,320,708.89,722.237,512,13.49,3.86,25.88\nvit_large_patch16_siglip_gap_256,256,700.2,731.198,512,303.36,80.8,88.34\nvit_large_patch16_siglip_256,256,695.67,735.956,512,315.96,81.34,88.88\nmaxxvitv2_rmlp_large_rw_224,224,685.6,746.776,512,215.42,44.14,87.15\ndavit_huge,224,685.53,746.84,512,348.92,61.23,81.32\nvit_large_patch14_clip_224,224,677.32,755.901,512,304.2,81.08,88.79\nvit_large_patch14_224,224,676.33,757.004,512,304.2,81.08,88.79\nvit_base_patch8_224,224,676.06,757.301,512,86.58,78.22,161.69\nconvformer_m36,224,674.99,758.507,512,57.05,12.89,42.05\nregnety_640,224,667.49,767.034,512,281.38,64.16,42.5\nresnetv2_101x1_bit,448,667.21,767.353,512,44.54,31.65,64.93\nefficientnet_b5,416,666.49,768.178,512,30.39,8.27,80.68\nswinv2_large_window12_192,192,666.31,768.391,512,228.77,26.17,56.53\nvolo_d4_224,224,665.55,769.269,512,192.96,44.34,80.22\ntresnet_l,448,657.82,778.304,512,55.99,43.59,47.56\ncoatnet_3_224,224,655.14,390.726,256,166.97,36.56,79.01\nxcit_tiny_12_p8_384,384,653.44,783.523,512,6.71,14.13,69.14\ncoatnet_rmlp_3_rw_224,224,653.36,391.8,256,165.15,33.56,79.47\ncoatnet_3_rw_224,224,652.95,392.05,256,181.81,33.44,73.83\ndm_nfnet_f1,320,651.19,786.234,512,132.63,35.97,46.77\nresnetrs200,320,643.1,796.128,512,93.21,31.51,67.81\ntf_efficientnetv2_m,480,636.24,804.713,512,54.14,24.76,89.84\ncaformer_s18,384,632.12,809.945,512,26.34,13.42,77.34\nxcit_small_24_p16_384,384,627.98,815.288,512,47.67,26.72,68.58\nseresnet269d,288,624.99,819.196,512,113.67,33.65,67.81\nswinv2_cr_large_224,224,623.13,821.641,512,196.68,35.1,78.42\nswinv2_small_window16_256,256,618.81,827.379,512,49.73,12.82,66.29\nvit_large_r50_s32_384,384,616.49,830.49,512,329.09,57.43,76.52\nconvformer_s18,384,599.08,854.625,512,26.77,11.63,46.49\nmaxvit_tiny_tf_384,384,591.24,432.978,256,30.98,17.53,123.42\nconvnext_large_mlp,320,590.57,866.941,512,200.13,70.21,88.02\nresnetv2_101x3_bit,224,585.41,874.58,512,387.93,71.23,48.7\npoolformerv2_m48,224,582.9,878.354,512,73.35,11.59,29.17\nconvnextv2_base,288,582.44,879.035,512,88.72,25.43,47.53\nefficientvit_l3,320,582.15,659.603,384,246.04,56.32,79.34\nvit_large_patch14_clip_quickgelu_224,224,580.69,881.691,512,303.97,81.08,88.79\nhalonet_h1,256,577.45,443.308,256,8.1,3.0,51.17\nefficientnet_b5,448,574.83,890.666,512,30.39,9.59,93.56\nrepvgg_d2se,320,572.21,894.755,512,133.33,74.57,46.82\nefficientnetv2_l,384,566.11,904.4,512,118.52,36.1,101.16\nregnety_160,384,564.72,679.967,384,83.59,46.87,67.67\nconvnextv2_large,224,563.29,908.915,512,197.96,34.4,43.13\nvit_large_patch14_xp_224,224,561.94,911.104,512,304.06,81.01,88.79\ntf_efficientnetv2_l,384,557.98,917.578,512,118.52,36.1,101.16\nswinv2_cr_tiny_384,384,544.55,470.091,256,28.33,15.34,161.01\neca_nfnet_l2,384,537.88,951.861,512,56.72,30.05,68.28\nmaxvit_large_tf_224,224,537.76,714.064,384,211.79,43.68,127.35\ncaformer_b36,224,536.71,953.938,512,98.75,23.22,67.3\nvit_base_r50_s16_384,384,536.14,954.948,512,98.95,67.43,135.03\ninception_next_base,384,531.25,963.743,512,86.67,43.64,75.48\nefficientformerv2_l,224,530.93,964.32,512,26.32,2.59,18.54\nvolo_d2_384,384,526.74,971.989,512,58.87,46.17,184.51\nresnext101_32x32d,224,520.8,983.086,512,468.53,87.29,91.12\ntf_efficientnet_b5,456,517.6,741.872,384,30.39,10.46,98.86\nvit_so400m_patch14_siglip_224,224,517.53,989.289,512,427.68,110.26,106.73\nvit_so400m_patch14_siglip_gap_224,224,514.47,995.172,512,412.44,109.57,106.13\nnasnetalarge,331,510.54,752.056,384,88.75,23.89,90.56\necaresnet269d,320,508.74,1006.384,512,102.09,41.53,83.69\nvit_base_patch16_siglip_gap_512,512,505.39,1013.062,512,86.43,107.0,246.15\nvit_base_patch16_siglip_512,512,501.31,1021.3,512,93.52,108.22,247.74\nconvformer_b36,224,500.2,1023.579,512,99.88,22.69,56.06\nvitamin_large2_224,224,493.33,778.367,384,333.58,75.05,112.83\nvitamin_large_224,224,493.04,778.82,384,333.32,75.05,112.83\neca_nfnet_l3,352,491.65,1041.375,512,72.04,32.57,73.12\nresnetrs350,288,487.42,1050.403,512,163.96,43.67,87.09\nconvnext_xlarge,288,480.37,1065.812,512,350.2,100.8,95.05\nxcit_small_24_p8_224,224,469.3,1090.967,512,47.63,35.81,90.78\nregnetz_d8_evos,320,467.45,1095.292,512,23.46,7.03,38.92\npnasnet5large,331,464.29,827.046,384,86.06,25.04,92.89\nnfnet_f2,352,462.28,1107.536,512,193.78,63.22,79.06\nswinv2_base_window12to16_192to256,256,456.3,841.538,384,87.92,22.02,84.71\nswinv2_base_window16_256,256,456.2,841.714,384,87.92,22.02,84.71\ntresnet_xl,448,455.93,1122.965,512,78.44,60.77,61.31\nresnet50x16_clip_gap,384,454.8,1125.735,512,136.2,70.32,100.64\nxcit_medium_24_p16_384,384,445.5,1149.237,512,84.4,47.39,91.64\nresnet50x16_clip,384,441.37,1160.0,512,167.33,74.9,103.54\nvolo_d5_224,224,435.53,1175.574,512,295.46,72.4,118.11\ncoatnet_4_224,224,431.48,593.293,256,275.43,62.48,129.26\neva02_large_patch14_224,224,426.82,1199.548,512,303.27,81.15,97.2\nnfnet_f3,320,426.82,1199.559,512,254.92,68.77,83.93\neva02_large_patch14_clip_224,224,421.33,1215.175,512,304.11,81.18,97.2\ncait_xxs24_384,384,421.2,1215.542,512,12.03,9.63,122.66\necaresnet269d,352,418.83,1222.426,512,102.09,50.25,101.25\ncoat_lite_medium_384,384,416.13,1230.38,512,44.57,28.73,116.7\nconvnext_large_mlp,384,412.9,929.978,384,200.13,101.11,126.74\nconvnext_large,384,407.18,943.038,384,197.77,101.1,126.74\nresnetrs270,352,405.86,1261.514,512,129.86,51.13,105.48\nefficientnetv2_xl,384,403.28,1269.563,512,208.12,52.81,139.2\nefficientvit_l3,384,402.54,635.936,256,246.04,81.08,114.02\ntf_efficientnetv2_xl,384,399.68,1281.01,512,208.12,52.81,139.2\nmaxvit_small_tf_384,384,396.22,484.566,192,69.02,35.87,183.65\nresmlp_big_24_224,224,396.07,1292.67,512,129.14,100.23,87.31\nresnest200e,320,383.61,1334.668,512,70.2,35.69,82.78\nvitamin_large2_256,256,379.06,1013.01,384,333.64,99.0,154.99\nvitamin_large_256,256,378.79,1013.723,384,333.38,99.0,154.99\nmvitv2_large_cls,224,378.22,1353.698,512,234.58,42.17,111.69\nregnety_320,384,378.11,1015.542,384,145.05,95.0,88.87\nmvitv2_large,224,376.01,1361.654,512,217.99,43.87,112.02\ndm_nfnet_f2,352,373.26,1371.694,512,193.78,63.22,79.06\nhiera_huge_224,224,368.13,1390.782,512,672.78,124.85,150.95\nseresnextaa201d_32x8d,320,367.34,1393.779,512,149.39,70.22,138.71\nefficientnetv2_l,480,363.62,1408.035,512,118.52,56.4,157.99\ntf_efficientnetv2_l,480,357.78,1430.984,512,118.52,56.4,157.99\nconvnextv2_large,288,352.91,1088.061,384,197.96,56.87,71.29\nvit_huge_patch14_gap_224,224,352.6,1452.04,512,630.76,166.73,138.74\nvit_huge_patch14_224,224,348.57,1468.819,512,630.76,167.4,139.41\nvit_huge_patch14_clip_224,224,347.99,1471.296,512,632.05,167.4,139.41\nxcit_tiny_24_p8_384,384,345.08,1483.71,512,12.11,27.05,132.95\ndm_nfnet_f3,320,344.87,1484.575,512,254.92,68.77,83.93\nregnety_1280,224,341.69,1498.349,512,644.81,127.66,71.58\nmaxxvitv2_rmlp_base_rw_384,384,339.16,1132.191,384,116.09,72.98,213.74\nvit_base_patch14_dinov2,518,337.26,1518.099,512,86.58,151.71,397.58\nswinv2_cr_small_384,384,336.39,761.011,256,49.7,29.7,298.03\ncaformer_s36,384,336.09,1523.371,512,39.3,26.08,150.33\nresnetrs420,320,333.88,1533.484,512,191.89,64.2,126.56\ndeit3_huge_patch14_224,224,333.48,1535.294,512,632.13,167.4,139.41\nxcit_medium_24_p8_224,224,332.48,1539.899,512,84.32,63.53,121.23\nvit_base_patch14_reg4_dinov2,518,332.24,1541.02,512,86.58,152.25,399.53\nmaxvit_tiny_tf_512,512,331.23,386.422,128,31.05,33.49,257.59\nconvnextv2_base,384,329.1,777.859,256,88.72,45.21,84.49\ncoatnet_rmlp_2_rw_384,384,328.74,584.014,192,73.88,47.69,209.43\nmaxvit_xlarge_tf_224,224,320.25,799.344,256,506.99,97.52,191.04\nefficientnet_b6,528,317.5,806.282,256,43.04,19.4,167.39\nconvformer_s36,384,314.4,1628.489,512,40.01,22.54,89.62\neva02_base_patch14_448,448,313.81,1631.515,512,87.12,107.11,259.14\nfocalnet_huge_fl3,224,310.01,1651.515,512,745.28,118.26,104.8\nrdnet_large,384,309.02,621.304,192,186.27,102.09,137.13\nswin_base_patch4_window12_384,384,307.05,833.708,256,87.9,47.19,134.78\nvit_huge_patch14_clip_quickgelu_224,224,306.8,1668.804,512,632.08,167.4,139.41\nxcit_small_12_p8_384,384,304.54,1260.903,384,26.21,54.92,138.29\ntf_efficientnet_b6,528,302.7,845.699,256,43.04,19.4,167.39\nsam2_hiera_tiny,896,302.68,211.431,64,26.85,99.86,384.63\nvitamin_xlarge_256,256,302.06,847.497,256,436.06,130.13,177.37\neca_nfnet_l3,448,301.57,1697.734,512,72.04,52.55,118.4\nvit_giant_patch16_gap_224,224,298.97,1712.537,512,1011.37,202.46,139.26\nmaxvit_rmlp_base_rw_384,384,297.7,859.92,256,116.14,70.97,318.95\ncait_xs24_384,384,297.51,1720.911,512,26.67,19.28,183.98\nswinv2_cr_huge_224,224,296.61,1294.596,384,657.83,115.97,121.08\neva_large_patch14_336,336,295.4,1733.205,512,304.53,191.1,270.24\nvit_large_patch16_siglip_gap_384,384,294.75,1737.034,512,303.69,190.85,269.55\nvit_large_patch14_clip_336,336,294.46,1738.769,512,304.53,191.11,270.24\nvit_large_patch16_384,384,293.26,1745.838,512,304.72,191.21,270.24\nvit_huge_patch14_xp_224,224,293.14,1746.554,512,631.8,167.3,139.41\nvit_large_patch16_siglip_384,384,292.81,1748.518,512,316.28,192.07,270.75\nswinv2_large_window12to16_192to256,256,290.75,880.463,256,196.74,47.81,121.53\nconvnext_xxlarge,256,282.43,1359.586,384,846.47,198.09,124.45\ncait_xxs36_384,384,281.8,1816.89,512,17.37,14.35,183.7\nbeit_large_patch16_384,384,279.47,1832.019,512,305.0,191.21,270.24\ndeit3_large_patch16_384,384,278.63,1837.554,512,304.76,191.21,270.24\nconvnextv2_huge,224,275.27,929.965,256,660.29,115.0,79.07\ndavit_giant,224,272.18,1410.782,384,1406.47,192.92,153.06\nvolo_d3_448,448,271.58,1885.21,512,86.63,96.33,446.83\nresnetrs350,384,270.92,1889.827,512,163.96,77.59,154.74\nconvnext_xlarge,384,270.45,946.534,256,350.2,179.2,168.99\nxcit_large_24_p16_384,384,265.95,1925.177,512,189.1,105.35,137.17\nresnetv2_152x2_bit,384,261.67,1467.48,384,236.34,136.16,132.56\nsam2_hiera_small,896,256.83,249.174,64,33.95,123.99,442.63\nvit_large_patch14_clip_quickgelu_336,336,255.19,2006.359,512,304.29,191.11,270.24\nresnetv2_152x4_bit,224,254.45,2012.187,512,936.53,186.9,90.22\nnfnet_f3,416,253.47,2019.94,512,254.92,115.58,141.78\nseresnextaa201d_32x8d,384,252.66,2026.392,512,149.39,101.11,199.72\nmaxvit_base_tf_384,384,251.04,764.813,192,119.65,73.8,332.9\ncoatnet_5_224,224,249.66,769.027,192,687.47,145.49,194.24\nresnetv2_50x3_bit,448,249.4,769.836,192,217.32,145.7,133.37\nswinv2_cr_base_384,384,247.1,1035.999,256,87.88,50.57,333.68\ncaformer_m36,384,246.12,1040.112,256,56.2,42.11,196.35\nnfnet_f4,384,240.45,2129.281,512,316.07,122.14,147.57\neva_giant_patch14_224,224,234.57,2182.672,512,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,231.09,2215.577,512,1012.59,267.18,192.64\nconvformer_m36,384,230.39,1111.149,256,57.05,37.87,123.56\nvit_giant_patch14_224,224,228.92,2236.573,512,1012.61,267.18,192.64\nefficientnetv2_xl,512,228.83,2237.451,512,208.12,93.85,247.32\nregnety_640,384,228.31,1121.234,256,281.38,188.47,124.83\nvit_giant_patch14_clip_224,224,226.83,2257.165,512,1012.65,267.18,192.64\ntf_efficientnetv2_xl,512,226.19,2263.608,512,208.12,93.85,247.32\nmaxvit_small_tf_512,512,222.44,431.561,96,69.13,67.26,383.77\ncait_s24_384,384,219.04,2337.407,512,47.06,32.17,245.31\nvitamin_large_336,336,215.67,890.242,192,333.57,175.72,307.47\nvitamin_large2_336,336,215.39,891.377,192,333.83,175.72,307.47\nfocalnet_huge_fl4,224,209.76,2440.802,512,686.46,118.9,113.34\ndm_nfnet_f3,416,206.43,2480.193,512,254.92,115.58,141.78\nxcit_large_24_p8_224,224,198.9,2574.096,512,188.93,141.23,181.56\nconvnextv2_large,384,198.73,966.094,192,197.96,101.1,126.74\nresnetrs420,416,196.85,2601.004,512,191.89,108.45,213.79\ndm_nfnet_f4,384,196.6,2604.26,512,316.07,122.14,147.57\nswin_large_patch4_window12_384,384,191.14,669.643,128,196.74,104.08,202.16\nefficientnet_b7,600,188.83,1016.782,192,66.35,38.33,289.94\neva02_large_patch14_clip_336,336,186.74,2741.767,512,304.43,191.34,289.13\nresnetv2_152x2_bit,448,184.7,2079.012,384,236.34,184.99,180.43\ncaformer_b36,384,181.9,1407.351,256,98.75,72.33,261.79\ntf_efficientnet_b7,600,181.61,1057.177,192,66.35,38.33,289.94\nmaxvit_large_tf_384,384,178.2,718.272,128,212.03,132.55,445.84\nnfnet_f5,416,172.22,2972.911,512,377.21,170.71,204.56\nvitamin_xlarge_336,336,171.84,744.851,128,436.06,230.18,347.33\nconvformer_b36,384,170.69,1499.731,256,99.88,66.67,164.75\nvit_so400m_patch14_siglip_384,384,169.04,3028.862,512,428.23,335.4,452.89\nresnet50x64_clip_gap,448,168.37,2280.66,384,365.03,253.96,233.22\nvit_so400m_patch14_siglip_gap_384,384,168.28,3042.431,512,412.99,333.46,451.19\nconvnextv2_huge,288,166.55,1152.798,192,660.29,190.1,130.7\nmvitv2_huge_cls,224,166.12,2311.572,384,694.8,120.67,243.63\nresnet50x64_clip,448,163.96,1561.362,256,420.38,265.02,239.13\nvitamin_large2_384,384,163.86,781.139,128,333.97,234.44,440.16\nvitamin_large_384,384,163.84,781.221,128,333.71,234.44,440.16\nvolo_d4_448,448,162.45,3151.648,512,193.41,197.13,527.35\nxcit_small_24_p8_384,384,160.35,2394.77,384,47.63,105.24,265.91\nswinv2_cr_large_384,384,155.84,821.327,128,196.68,108.96,404.96\nfocalnet_large_fl3,384,153.3,1669.857,256,239.13,105.06,168.04\ncait_s36_384,384,150.49,3402.164,512,68.37,47.99,367.4\nresnetv2_101x3_bit,448,150.3,1277.431,192,387.93,280.33,194.78\ndavit_base_fl,768,149.95,853.596,128,90.37,190.32,530.15\nvit_huge_patch14_clip_336,336,148.98,3436.652,512,632.46,390.97,407.54\nfocalnet_large_fl4,384,146.26,1750.32,256,239.32,105.2,181.78\nbeit_large_patch16_512,512,143.19,3575.759,512,305.67,362.24,656.39\ndm_nfnet_f5,416,140.91,3633.497,512,377.21,170.71,204.56\nmaxvit_base_tf_512,512,140.65,682.546,96,119.88,138.02,703.99\nsam2_hiera_base_plus,896,138.32,462.665,64,68.68,227.48,828.88\nnfnet_f4,512,135.39,2836.163,384,316.07,216.26,262.26\nconvmixer_1536_20,224,135.19,3787.319,512,51.63,48.68,33.03\nvitamin_xlarge_384,384,130.77,978.777,128,436.06,306.38,493.46\nvit_gigantic_patch14_224,224,130.62,3919.717,512,1844.44,483.95,275.37\nvit_gigantic_patch14_clip_224,224,130.29,3929.635,512,1844.91,483.96,275.37\nnfnet_f6,448,127.99,4000.364,512,438.36,229.7,273.62\nefficientnet_b8,672,123.17,1039.173,128,87.41,63.48,442.89\nswinv2_base_window12to24_192to384,384,119.99,533.358,64,87.92,55.25,280.36\ntf_efficientnet_b8,672,119.33,1072.656,128,87.41,63.48,442.89\nvit_so400m_patch14_siglip_gap_448,448,116.66,3291.487,384,413.33,487.18,764.26\nvit_huge_patch14_clip_378,378,116.09,4410.348,512,632.68,503.79,572.79\nregnety_1280,384,115.79,1105.427,128,644.81,374.99,210.2\nxcit_medium_24_p8_384,384,113.78,2250.01,256,84.32,186.67,354.73\ndm_nfnet_f4,512,110.56,3473.149,384,316.07,216.26,262.26\nfocalnet_xlarge_fl3,384,108.72,1765.946,192,408.79,185.61,223.99\nvit_huge_patch16_gap_448,448,107.19,3582.45,384,631.67,544.7,636.83\nvit_large_patch14_dinov2,518,106.66,3600.098,384,304.37,507.15,1058.82\nvolo_d5_448,448,106.54,3604.236,384,295.91,315.06,737.92\nvit_large_patch14_reg4_dinov2,518,106.3,3612.324,384,304.37,508.9,1064.02\ndm_nfnet_f6,448,104.84,4883.385,512,438.36,229.7,273.62\nmaxvit_xlarge_tf_384,384,104.53,918.413,96,475.32,292.78,668.76\nfocalnet_xlarge_fl4,384,103.87,1848.501,192,409.03,185.79,242.31\nvit_huge_patch14_clip_quickgelu_378,378,103.58,3707.381,384,632.68,503.79,572.79\neva02_large_patch14_448,448,102.65,3740.795,384,305.08,362.33,689.95\nnfnet_f5,544,100.78,3810.204,384,377.21,290.97,349.71\neva_giant_patch14_336,336,100.31,5104.356,512,1013.01,620.64,550.67\nmaxvit_large_tf_512,512,99.37,644.068,64,212.33,244.75,942.15\nnfnet_f7,480,97.93,3921.33,384,499.5,300.08,355.86\nconvnextv2_huge,384,93.62,1025.37,96,660.29,337.96,232.35\nswinv2_cr_giant_224,224,88.64,2165.946,192,2598.76,483.85,309.15\ndm_nfnet_f5,544,82.3,4666.041,384,377.21,290.97,349.71\ntf_efficientnet_l2,475,82.12,1558.754,128,480.31,172.11,609.89\nvolo_d5_512,512,81.71,3133.006,256,296.09,425.09,1105.37\nnfnet_f6,576,77.44,4958.912,384,438.36,378.69,452.2\nswinv2_large_window12to24_192to384,384,76.53,627.149,48,196.74,116.15,407.83\nswinv2_cr_huge_384,384,73.82,866.901,64,657.94,352.04,583.18\nxcit_large_24_p8_384,384,68.02,2822.791,192,188.93,415.0,531.82\ncait_m36_384,384,64.68,3958.009,256,271.22,173.11,734.81\ndm_nfnet_f6,576,63.38,4039.147,256,438.36,378.69,452.2\nnfnet_f7,608,61.1,4189.78,256,499.5,480.39,570.85\nregnety_2560,384,60.04,1598.871,96,1282.6,747.83,296.49\ndavit_huge_fl,768,58.91,1086.416,64,360.64,744.84,1060.3\nmaxvit_xlarge_tf_512,512,58.65,818.415,48,475.77,534.14,1413.22\nresnetv2_152x4_bit,480,57.16,1679.394,96,936.53,844.84,414.26\nconvnextv2_huge,512,52.17,920.112,48,660.29,600.81,413.07\nsam2_hiera_large,1024,45.19,1062.166,48,212.15,907.48,2190.34\nsamvit_base_patch16,1024,33.25,481.156,16,89.67,486.43,1343.27\neva_giant_patch14_560,560,33.13,5795.849,192,1014.45,1906.76,2577.17\nvit_giant_patch14_dinov2,518,32.93,3886.558,128,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,32.91,3889.85,128,1136.48,1790.08,2771.21\nefficientnet_l2,800,29.86,1607.312,48,480.31,479.12,1707.39\ntf_efficientnet_l2,800,29.23,1642.279,48,480.31,479.12,1707.39\ncait_m48_448,448,27.4,4671.94,128,356.46,329.41,1708.23\nswinv2_cr_giant_384,384,23.87,1340.314,32,2598.76,1450.71,1394.86\nvit_so400m_patch14_siglip_gap_896,896,21.24,4519.479,96,416.87,2731.49,8492.88\nsamvit_large_patch16,1024,16.18,741.849,12,308.28,1493.86,2553.78\nsamvit_huge_patch16,1024,10.84,737.698,8,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-amp_bf16-nchw-pt291-cu130-pro6000maxq-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,502268.05,4.061,2048,0.37,0.04,0.48\r\ntest_vit2,160,420763.83,4.842,2048,0.46,0.05,0.64\r\ntest_byobnet,160,311134.5,6.562,2048,0.46,0.03,0.43\r\ntest_efficientnet,160,275765.36,7.399,2048,0.36,0.06,0.55\r\ntest_convnext,160,264763.02,7.72,2048,0.27,0.03,0.58\r\ntest_mambaout,160,262435.87,7.788,2048,0.45,0.03,0.53\r\ntest_convnext3,160,242442.46,8.427,2048,0.47,0.05,0.63\r\ntest_vit3,160,236715.42,8.635,2048,0.93,0.09,1.0\r\ntest_convnext2,160,236691.12,8.634,2048,0.48,0.05,0.63\r\ntest_resnet,160,207913.58,9.819,2048,0.47,0.1,0.64\r\ntest_efficientnet_ln,160,206558.12,9.895,2048,0.36,0.06,0.55\r\ntinynet_e,106,188194.78,10.841,2048,2.04,0.03,0.69\r\ntest_efficientnet_evos,160,188180.81,10.867,2048,0.36,0.06,0.55\r\ntest_efficientnet_gn,160,181462.61,11.268,2048,0.36,0.06,0.55\r\ntest_mambaout,192,180538.73,11.328,2048,0.45,0.04,0.77\r\ntest_vit4,160,176401.16,11.593,2048,1.02,0.11,1.07\r\nmobilenetv4_conv_small_035,224,176051.17,11.611,2048,1.91,0.05,0.98\r\nefficientvit_m0,224,160254.47,12.76,2048,2.33,0.08,0.91\r\nmobilenetv4_conv_small_050,224,149296.03,13.682,2048,2.24,0.07,1.18\r\nmobilenetv4_conv_small_035,256,131937.0,15.489,2048,1.91,0.06,1.28\r\nlcnet_035,224,130632.89,15.65,2048,1.64,0.03,1.04\r\nmobilenetv3_small_050,224,128502.87,15.891,2048,1.59,0.03,0.92\r\nmobilenetv4_conv_small_050,256,113439.04,18.024,2048,2.24,0.09,1.55\r\nlcnet_050,224,110820.97,18.428,2048,1.88,0.05,1.26\r\ntest_nfnet,160,110630.57,18.49,2048,0.38,0.29,1.2\r\nefficientvit_m1,224,108273.59,18.874,2048,2.96,0.17,1.33\r\nstarnet_s050,224,107703.92,18.971,2048,0.54,0.09,1.57\r\nshvit_s1,224,107172.42,19.091,2048,6.31,0.24,1.39\r\ntf_mobilenetv3_small_minimal_100,224,104954.57,19.486,2048,2.04,0.06,1.41\r\nefficientvit_m2,224,100862.2,20.264,2048,4.17,0.2,1.47\r\nmobilenetv3_small_075,224,95400.23,21.446,2048,2.04,0.05,1.3\r\nefficientvit_m3,224,92120.65,22.18,2048,6.88,0.26,1.62\r\nefficientvit_m4,224,88869.05,23.025,2048,8.78,0.3,1.7\r\nmobilenetv4_conv_small,224,87198.78,23.442,2048,3.77,0.19,1.97\r\nmobilenetv3_small_100,224,86305.1,23.704,2048,2.54,0.06,1.42\r\ntinynet_d,152,85770.05,23.84,2048,2.34,0.05,1.42\r\nshvit_s2,224,84463.98,24.221,2048,11.45,0.37,1.6\r\ntf_mobilenetv3_small_075,224,78891.68,25.941,2048,2.04,0.05,1.3\r\nrepghostnet_050,224,77551.04,26.356,2048,2.31,0.05,2.02\r\nresnet10t,176,76610.48,26.702,2048,5.44,0.7,1.51\r\nlevit_128s,224,76601.43,26.711,2048,7.76,0.3,1.88\r\nlcnet_075,224,75396.61,27.135,2048,2.36,0.1,1.99\r\nlevit_conv_128s,224,74988.77,27.279,2048,7.76,0.3,1.88\r\ntf_mobilenetv3_small_100,224,72535.3,28.217,2048,2.54,0.06,1.42\r\nresnet18,160,70586.31,28.979,2048,11.69,0.93,1.27\r\nmnasnet_small,224,68666.61,29.796,2048,2.03,0.07,2.16\r\nregnetx_002,224,67954.75,30.099,2048,2.68,0.2,2.16\r\nghostnet_050,224,67054.94,30.503,2048,2.59,0.05,1.77\r\nstarnet_s100,224,65706.01,31.13,2048,1.04,0.19,2.68\r\nmobilenetv4_conv_small,256,65444.58,31.252,2048,3.77,0.25,2.57\r\nfasternet_t0,224,64457.1,31.738,2048,3.91,0.34,1.97\r\nefficientvit_m5,224,63352.69,32.283,2048,12.44,0.52,2.41\r\nregnety_002,224,63038.19,32.465,2048,3.16,0.2,2.17\r\nrepghostnet_058,224,62511.48,32.724,2048,2.54,0.06,2.59\r\nstarnet_s150,224,62502.66,32.724,2048,1.56,0.23,2.75\r\nlcnet_100,224,61225.21,33.43,2048,2.95,0.16,2.52\r\nshvit_s3,224,60969.63,33.571,2048,14.21,0.6,2.33\r\nmobilenetv2_035,224,59012.72,34.677,2048,1.68,0.07,2.86\r\nconvnext_zepto_rms,224,54577.36,37.485,2048,2.16,0.3,2.75\r\nlevit_conv_128,224,54573.56,37.508,2048,9.19,0.41,2.71\r\nmnasnet_050,224,53212.41,38.437,2048,2.22,0.11,3.07\r\nrepghostnet_080,224,51653.21,39.602,2048,3.27,0.1,3.22\r\nlevit_128,224,51460.75,39.754,2048,9.19,0.41,2.71\r\nhgnetv2_b0,224,51288.1,39.908,2048,6.0,0.33,2.12\r\nefficientvit_b0,224,49081.86,41.707,2048,3.41,0.1,2.87\r\nrepvgg_a0,224,49081.85,41.664,2048,8.31,1.36,1.79\r\nresnet10t,224,47389.31,43.178,2048,5.44,1.1,2.43\r\nconvnext_zepto_rms_ols,224,47312.12,43.235,2048,2.16,0.34,3.15\r\nregnetx_004,224,47281.84,43.292,2048,5.16,0.4,3.14\r\nmobilenetv2_050,224,46551.15,43.971,2048,1.97,0.1,3.64\r\nsemnasnet_050,224,46297.16,44.212,2048,2.08,0.11,3.44\r\nvit_small_patch32_224,224,46243.35,44.252,2048,22.88,1.15,2.5\r\nlevit_conv_192,224,45769.43,44.699,2048,10.92,0.66,3.2\r\nmobileone_s0,224,45703.24,44.778,2048,2.08,0.28,3.79\r\ngernet_s,224,45568.21,44.918,2048,8.17,0.75,2.65\r\nregnetx_004_tv,224,45533.32,44.943,2048,5.5,0.42,3.17\r\npit_ti_224,224,44428.3,46.047,2048,4.85,0.7,6.19\r\npit_ti_distilled_224,224,43875.2,46.636,2048,5.1,0.71,6.23\r\ncs3darknet_focus_s,256,43702.35,46.825,2048,3.27,0.69,2.7\r\nghostnetv3_050,224,43394.78,47.155,2048,2.85,0.05,2.28\r\nedgenext_xx_small,256,42976.98,47.621,2048,1.33,0.26,3.33\r\nlcnet_150,224,42785.56,47.836,2048,4.5,0.34,3.79\r\nresnet34,160,42577.43,48.056,2048,21.8,1.87,1.91\r\nrepghostnet_100,224,42047.44,48.672,2048,4.06,0.15,3.98\r\nfasternet_t1,224,42010.68,48.724,2048,7.6,0.85,3.15\r\nvit_tiny_r_s16_p8_224,224,41914.42,48.834,2048,6.34,0.44,2.06\r\nresnet14t,176,41493.87,49.326,2048,10.08,1.07,3.61\r\nmobilenetv4_conv_small,320,40827.48,50.14,2048,3.77,0.39,4.01\r\nmixer_s32_224,224,40597.34,50.397,2048,19.1,1.0,2.28\r\ntinynet_c,184,40556.79,50.443,2048,2.46,0.11,2.87\r\nlevit_192,224,40520.72,50.493,2048,10.92,0.66,3.2\r\ncs3darknet_s,256,40453.07,50.572,2048,3.28,0.72,2.97\r\nnf_regnet_b0,192,39018.19,52.428,2048,8.76,0.37,3.15\r\nstarnet_s2,224,38852.75,52.676,2048,3.68,0.55,4.73\r\nhgnetv2_b1,224,37486.53,54.611,2048,6.34,0.49,2.73\r\nrepghostnet_111,224,37364.22,54.762,2048,4.52,0.18,4.38\r\nstarnet_s1,224,37305.66,54.847,2048,2.87,0.42,4.99\r\nresnetv2_18,224,37140.96,55.114,2048,11.69,1.82,2.48\r\ntf_mobilenetv3_large_minimal_100,224,36956.25,55.395,2048,3.92,0.22,4.4\r\nconvnext_atto_rms,224,36648.59,55.843,2048,3.69,0.55,3.81\r\nrepvgg_a1,224,36420.61,56.178,2048,12.79,2.36,2.37\r\nmobilenetv3_large_075,224,36356.08,56.292,2048,3.99,0.16,4.0\r\nconvnext_atto,224,36275.29,56.427,2048,3.7,0.55,3.81\r\nmnasnet_075,224,36157.01,56.615,2048,3.17,0.23,4.77\r\nresnet18,224,35806.38,57.166,2048,11.69,1.82,2.48\r\nghostnet_100,224,34984.54,58.498,2048,5.18,0.15,3.55\r\nregnety_004,224,34695.18,58.972,2048,4.34,0.41,3.89\r\ntf_mobilenetv3_large_075,224,34056.68,60.114,2048,3.99,0.16,4.0\r\nconvnext_atto_ols,224,34001.86,60.194,2048,3.7,0.58,4.11\r\nxcit_nano_12_p16_224,224,33841.64,60.484,2048,3.05,0.56,4.17\r\nlevit_conv_256,224,33744.85,60.657,2048,18.86,1.13,4.23\r\ninception_next_atto,224,33724.45,60.693,2048,4.16,0.5,3.63\r\nmobilenetv1_100,224,33342.13,61.39,2048,4.23,0.58,5.04\r\nedgenext_xx_small,288,33058.48,61.919,2048,1.33,0.33,4.21\r\nresnetv2_18d,224,32972.22,62.086,2048,11.71,2.06,3.29\r\nvit_tiny_patch16_224,224,32856.62,62.271,2048,5.72,1.26,5.97\r\nvit_medium_patch32_clip_224,224,32792.95,62.418,2048,39.69,2.0,3.34\r\nmobilenetv1_100h,224,32745.83,62.511,2048,5.28,0.63,5.09\r\ndeit_tiny_patch16_224,224,32729.68,62.532,2048,5.72,1.26,5.97\r\nmobilenetv3_rw,224,32648.08,62.685,2048,5.48,0.23,4.41\r\ndeit_tiny_distilled_patch16_224,224,32606.69,62.774,2048,5.91,1.27,6.01\r\nconvnextv2_atto,224,32574.98,62.841,2048,3.71,0.55,3.81\r\nregnety_006,224,32558.52,62.85,2048,6.06,0.61,4.33\r\nmobilenetv3_large_100,224,32498.9,62.985,2048,5.48,0.23,4.41\r\nlegacy_seresnet18,224,32378.58,63.212,2048,11.78,1.82,2.49\r\nseresnet18,224,32283.44,63.405,2048,11.78,1.82,2.49\r\nhardcorenas_a,224,32142.64,63.689,2048,5.26,0.23,4.38\r\ntf_efficientnetv2_b0,192,31967.39,64.018,2048,7.14,0.54,3.51\r\nshvit_s4,256,31961.39,64.04,2048,16.55,0.99,3.73\r\nresnet18d,224,31733.63,64.511,2048,11.71,2.06,3.29\r\nhardcorenas_b,224,31544.25,64.878,2048,5.18,0.26,5.09\r\nese_vovnet19b_slim_dw,224,31405.53,65.176,2048,1.9,0.4,5.28\r\nmnasnet_100,224,31355.59,65.266,2048,4.38,0.33,5.46\r\nrepghostnet_130,224,31018.0,65.996,2048,5.46,0.24,5.24\r\nconvnext_femto,224,30931.08,66.176,2048,5.22,0.79,4.57\r\nhgnetv2_b0,288,30909.53,66.21,2048,6.0,0.54,3.51\r\nhardcorenas_c,224,30525.78,67.067,2048,5.52,0.28,5.01\r\nmobilenetv2_075,224,30411.19,50.467,1536,2.64,0.22,5.86\r\nsemnasnet_075,224,30292.8,50.674,1536,2.91,0.23,5.54\r\nlevit_256,224,30292.79,67.567,2048,18.86,1.13,4.23\r\nmobilenet_edgetpu_v2_xs,224,30279.79,67.592,2048,4.46,0.7,4.8\r\nvit_xsmall_patch16_clip_224,224,30258.16,67.648,2048,8.28,1.79,6.65\r\ntf_mobilenetv3_large_100,224,30071.27,68.042,2048,5.48,0.23,4.41\r\ndla46_c,224,29862.89,68.54,2048,1.3,0.58,4.5\r\nregnetx_008,224,29813.73,68.66,2048,7.26,0.81,5.15\r\nconvnext_femto_ols,224,29156.97,70.204,2048,5.23,0.82,4.87\r\nlevit_conv_256d,224,29014.02,70.546,2048,26.16,1.39,4.93\r\nrepvgg_b0,224,28692.97,71.33,2048,14.34,3.06,3.07\r\nmobilenetv4_conv_medium,224,28664.72,71.404,2048,9.72,0.84,5.8\r\nspnasnet_100,224,28437.83,71.962,2048,4.42,0.35,6.03\r\nconvnext_atto_rms,256,28134.84,72.756,2048,3.69,0.71,4.98\r\nstarnet_s3,224,27951.23,73.222,2048,5.75,0.76,6.66\r\ncs3darknet_focus_s,320,27891.21,73.394,2048,3.27,1.08,4.22\r\nconvnextv2_femto,224,27662.28,74.002,2048,5.23,0.79,4.57\r\nmobilenet_edgetpu_100,224,27630.02,74.085,2048,4.09,1.0,5.75\r\nrepghostnet_150,224,27627.1,74.082,2048,6.55,0.31,6.0\r\nefficientformerv2_s0,224,27543.41,74.313,2048,3.6,0.41,5.3\r\npit_xs_224,224,27366.88,74.799,2048,10.62,1.4,7.71\r\nhardcorenas_d,224,27265.86,75.068,2048,7.5,0.3,4.93\r\nregnety_008,224,27199.47,75.256,2048,6.26,0.81,5.25\r\nese_vovnet19b_slim,224,27194.69,75.276,2048,3.17,1.69,3.52\r\nmobileone_s1,224,27108.76,75.505,2048,4.76,0.83,6.27\r\npit_xs_distilled_224,224,26960.12,75.922,2048,11.0,1.41,7.76\r\ntinynet_b,188,26730.23,76.584,2048,3.73,0.21,4.44\r\nlevit_256d,224,26637.46,76.844,2048,26.16,1.39,4.93\r\nghostnet_130,224,26570.53,77.043,2048,7.36,0.24,4.6\r\nsemnasnet_100,224,26530.76,57.87,1536,3.89,0.32,6.23\r\nmobilenetv2_100,224,26492.7,57.955,1536,3.5,0.31,6.68\r\nmobilenetv4_hybrid_medium_075,224,26482.86,77.291,2048,7.31,0.66,5.65\r\nrepvit_m1,224,26333.95,77.727,2048,5.07,0.82,6.17\r\nrepvit_m0_9,224,26279.47,77.902,2048,5.07,0.82,6.17\r\npvt_v2_b0,224,25867.61,79.139,2048,3.67,0.57,7.99\r\nefficientnet_lite0,224,25838.23,59.421,1536,4.65,0.4,6.74\r\nregnety_008_tv,224,25831.72,79.249,2048,6.43,0.84,5.42\r\nfbnetc_100,224,25767.12,59.561,1536,5.57,0.4,6.51\r\nhrnet_w18_small,224,25721.92,79.593,2048,13.19,1.61,5.72\r\nresnet14t,224,25715.87,79.603,2048,10.08,1.69,5.8\r\nmobilenetv1_100,256,25627.19,59.908,1536,4.23,0.76,6.59\r\nskresnet18,224,25603.46,79.952,2048,11.96,1.82,3.24\r\ntf_efficientnet_lite0,224,25314.13,60.638,1536,4.65,0.4,6.74\r\nfasternet_t2,224,25301.63,80.91,2048,14.98,1.91,4.73\r\ntf_efficientnetv2_b1,192,25262.07,81.045,2048,8.14,0.76,4.59\r\nresnetblur18,224,25175.95,81.305,2048,11.69,2.34,3.39\r\nmobilenetv1_100h,256,25169.3,61.004,1536,5.28,0.82,6.65\r\nmobilevit_xxs,256,25022.44,81.801,2048,1.27,0.42,8.34\r\nmobilenetv3_large_100,256,24935.46,61.554,1536,5.48,0.29,5.75\r\nmobilenetv1_125,224,24167.26,84.713,2048,6.27,0.89,6.3\r\nregnetx_006,224,24129.55,84.841,2048,6.2,0.61,3.98\r\nresnet50,160,24065.29,85.067,2048,25.56,2.1,5.67\r\nhgnetv2_b2,224,23909.45,85.616,2048,11.22,1.15,4.12\r\nhardcorenas_f,224,23648.8,86.565,2048,8.2,0.35,5.57\r\nhardcorenas_e,224,23555.32,86.906,2048,8.07,0.35,5.65\r\nedgenext_x_small,256,23417.67,87.424,2048,2.34,0.54,5.93\r\ngmlp_ti16_224,224,23269.39,87.964,2048,5.87,1.34,7.55\r\ntf_efficientnetv2_b0,224,23266.95,87.988,2048,7.14,0.73,4.77\r\nconvnext_pico,224,23255.03,88.025,2048,9.05,1.37,6.1\r\nswiftformer_xs,224,23186.96,88.275,2048,3.48,0.61,6.45\r\nvit_betwixt_patch32_clip_224,224,22811.9,89.725,2048,61.41,3.09,4.17\r\nresnet50d,160,22654.85,90.376,2048,25.58,2.22,6.08\r\nresnetv2_18,288,22426.03,91.299,2048,11.69,3.0,4.11\r\nhgnetv2_b1,288,22421.06,91.31,2048,6.34,0.82,4.51\r\nconvnext_pico_ols,224,22075.65,92.731,2048,9.06,1.43,6.5\r\nmobilenetv4_conv_medium,256,21964.64,93.205,2048,9.72,1.1,7.58\r\nrepvit_m1_0,224,21874.79,93.585,2048,6.81,1.11,7.19\r\nconvnext_atto,288,21873.46,93.59,2048,3.7,0.91,6.3\r\nnf_regnet_b0,256,21870.14,93.603,2048,8.76,0.64,5.58\r\nmobileone_s2,224,21864.37,93.633,2048,7.81,1.3,7.56\r\nmobilenetv4_hybrid_medium,224,21811.04,93.858,2048,11.07,0.98,6.84\r\neva02_tiny_patch14_224,224,21806.51,93.88,2048,5.5,1.7,9.14\r\nmnasnet_140,224,21781.25,93.99,2048,7.12,0.6,7.71\r\ngernet_m,224,21705.92,94.316,2048,21.14,3.02,5.24\r\nghostnetv2_100,224,21603.47,94.763,2048,6.16,0.18,4.55\r\ntinynet_a,192,21555.0,94.969,2048,6.19,0.35,5.41\r\nresnet18,288,21514.97,95.161,2048,11.69,3.01,4.11\r\nresnet34,224,21510.62,95.161,2048,21.8,3.67,3.74\r\nresnetv2_34,224,21443.95,95.477,2048,21.8,3.67,3.74\r\ncrossvit_tiny_240,240,21214.99,96.496,2048,7.01,1.57,9.08\r\ncrossvit_9_240,240,21181.89,96.644,2048,8.55,1.85,9.52\r\nghostnetv3_100,224,21106.75,96.992,2048,6.15,0.17,4.55\r\nefficientformer_l1,224,21100.92,97.017,2048,12.29,1.3,5.53\r\nmobilenet_edgetpu_v2_s,224,21042.97,97.28,2048,5.99,1.21,6.6\r\nrepghostnet_200,224,20925.89,97.813,2048,9.77,0.53,7.96\r\nrepvit_m2,224,20763.44,98.595,2048,8.24,1.34,7.82\r\nrepvit_m1_1,224,20749.7,98.664,2048,8.24,1.34,7.82\r\nconvnextv2_pico,224,20736.97,98.722,2048,9.07,1.37,6.1\r\nxcit_tiny_12_p16_224,224,20719.0,98.786,2048,6.72,1.24,6.29\r\nlevit_conv_384,224,20672.79,99.029,2048,39.07,2.35,6.26\r\ncs3darknet_focus_m,256,20618.33,99.288,2048,9.3,1.98,4.89\r\nmobilenetv4_conv_blur_medium,224,20489.53,49.924,1024,9.72,1.22,8.58\r\nmobilevitv2_050,256,20448.26,75.086,1536,1.37,0.48,8.04\r\nmambaout_femto,224,20409.15,100.304,2048,7.3,1.16,8.34\r\nconvnext_atto_ols,288,20319.48,100.742,2048,3.7,0.96,6.8\r\nmobilenetv2_110d,224,20222.1,75.913,1536,4.52,0.45,8.71\r\nrexnetr_100,224,20106.75,76.369,1536,4.88,0.43,7.72\r\nresnet34d,224,19999.11,102.352,2048,21.82,3.91,4.54\r\nresnetv2_34d,224,19929.38,102.732,2048,21.82,3.91,4.54\r\nresnetv2_18d,288,19924.41,102.76,2048,11.71,3.4,5.43\r\ncs3darknet_m,256,19827.92,103.251,2048,9.31,2.08,5.28\r\nefficientvit_b1,224,19789.45,103.453,2048,9.1,0.53,7.25\r\nconvnextv2_atto,288,19779.66,103.505,2048,3.71,0.91,6.3\r\nrexnet_100,224,19776.57,77.61,1536,4.8,0.41,7.44\r\nstarnet_s4,224,19766.04,103.564,2048,7.48,1.05,9.56\r\nregnetz_005,224,19570.55,104.627,2048,7.12,0.52,5.86\r\nrepvgg_a2,224,19503.08,104.951,2048,25.5,5.12,3.13\r\nseresnet18,288,19500.02,104.999,2048,11.78,3.01,4.11\r\nresnet26,224,19366.41,105.708,2048,16.0,2.36,7.35\r\nseresnet34,224,19357.87,105.754,2048,21.96,3.67,3.74\r\nresnet50,176,19353.23,105.786,2048,25.56,2.62,6.92\r\nefficientformerv2_s1,224,19326.9,105.92,2048,6.19,0.67,7.66\r\nlegacy_seresnet34,224,19308.37,106.026,2048,21.96,3.67,3.74\r\nfbnetv3_b,224,19292.83,106.112,2048,8.6,0.42,6.97\r\nresnext50_32x4d,160,19176.82,106.769,2048,25.03,2.17,7.35\r\nseresnet50,160,19137.8,106.969,2048,28.09,2.1,5.69\r\ncrossvit_9_dagger_240,240,19080.66,107.3,2048,8.78,1.99,9.97\r\nresnet18d,288,19003.71,107.731,2048,11.71,3.41,5.43\r\ndla34,224,18995.93,107.774,2048,15.74,3.07,5.02\r\nselecsls42,224,18789.77,108.968,2048,30.35,2.94,4.62\r\nhgnetv2_b3,224,18764.86,109.099,2048,16.29,1.78,5.07\r\nselecsls42b,224,18752.85,109.185,2048,32.46,2.98,4.62\r\ntf_efficientnetv2_b2,208,18740.53,109.221,2048,10.1,1.06,6.0\r\nswiftformer_s,224,18682.67,109.559,2048,6.09,0.99,7.81\r\nconvnext_femto,288,18616.42,109.975,2048,5.22,1.3,7.56\r\nmobilenetv1_125,256,18556.24,82.744,1536,6.27,1.16,8.23\r\nsemnasnet_140,224,18547.44,55.188,1024,6.11,0.6,8.87\r\necaresnet50t,160,18372.29,111.427,2048,25.57,2.21,6.04\r\nmobilenetv2_140,224,18233.91,56.122,1024,6.11,0.6,9.57\r\nedgenext_x_small,288,18206.37,112.439,2048,2.34,0.68,7.5\r\nnf_resnet26,224,18090.6,113.167,2048,16.0,2.41,7.35\r\nese_vovnet19b_dw,224,17988.26,113.82,2048,6.54,1.34,8.25\r\nresnetrs50,160,17909.85,114.318,2048,35.69,2.29,6.2\r\nvit_base_patch32_clip_quickgelu_224,224,17781.06,115.154,2048,87.85,4.41,5.01\r\nlevit_384,224,17759.2,115.278,2048,39.07,2.35,6.26\r\nvit_base_patch32_clip_224,224,17710.35,115.613,2048,88.22,4.41,5.01\r\nresnet26d,224,17668.78,115.876,2048,16.01,2.6,8.15\r\nresnetaa34d,224,17534.79,116.745,2048,21.82,4.43,5.07\r\nconvnext_femto_ols,288,17532.61,116.767,2048,5.23,1.35,8.06\r\nefficientnet_es_pruned,224,17451.24,117.322,2048,5.44,1.81,8.73\r\nefficientnet_es,224,17420.6,117.522,2048,5.44,1.81,8.73\r\nmobileone_s3,224,17341.83,118.05,2048,10.08,1.9,9.13\r\nefficientnet_b0,224,17308.96,88.71,1536,5.29,0.4,6.75\r\nvit_base_patch32_224,224,17167.46,119.257,2048,88.22,4.41,5.01\r\nefficientnet_lite1,240,17130.11,89.642,1536,5.42,0.62,10.14\r\nmambaout_kobe,224,17121.06,119.583,2048,9.14,1.52,10.0\r\ntf_efficientnet_es,224,17109.2,119.638,2048,5.44,1.81,8.73\r\ntf_efficientnet_lite1,240,17051.02,60.014,1024,5.42,0.62,10.14\r\ntiny_vit_5m_224,224,17030.77,120.195,2048,12.08,1.27,11.25\r\npoolformerv2_s12,224,16966.85,120.672,2048,11.89,1.83,5.53\r\nnf_seresnet26,224,16899.42,121.148,2048,17.4,2.41,7.36\r\nnf_ecaresnet26,224,16887.99,121.222,2048,16.0,2.41,7.36\r\nconvnextv2_femto,288,16887.07,121.24,2048,5.23,1.3,7.56\r\nresmlp_12_224,224,16638.58,123.034,2048,15.35,3.01,5.5\r\nmobilenetv4_hybrid_medium,256,16563.66,123.61,2048,11.07,1.29,9.01\r\nfbnetv3_d,224,16524.98,123.883,2048,10.31,0.52,8.5\r\ndarknet17,256,16491.37,124.148,2048,14.3,3.26,7.18\r\nmobilenet_edgetpu_v2_m,224,16473.36,124.281,2048,8.46,1.85,8.15\r\nghostnetv2_130,224,16448.06,124.48,2048,8.96,0.28,5.9\r\nselecsls60,224,16421.5,124.671,2048,30.67,3.59,5.52\r\nselecsls60b,224,16358.63,125.161,2048,32.77,3.63,5.52\r\nnf_regnet_b2,240,16305.32,125.557,2048,14.31,0.97,7.23\r\nnf_regnet_b1,256,16292.23,125.664,2048,10.22,0.82,7.27\r\npit_s_distilled_224,224,16284.22,125.713,2048,24.04,2.9,11.64\r\ncs3darknet_focus_m,288,16226.59,126.17,2048,9.3,2.51,6.19\r\npit_s_224,224,16057.12,127.497,2048,23.46,2.88,11.56\r\nghostnetv3_130,224,16045.06,127.597,2048,8.95,0.28,5.9\r\ngmixer_12_224,224,15945.92,128.396,2048,12.7,2.67,7.26\r\nmobilenetv4_conv_blur_medium,256,15903.19,48.263,768,9.72,1.59,11.2\r\npoolformer_s12,224,15837.92,129.271,2048,11.92,1.82,5.53\r\nvit_small_patch32_384,384,15705.19,130.358,2048,22.92,3.45,8.25\r\nresnext50_32x4d,176,15628.1,131.022,2048,25.03,2.71,8.97\r\ncs3darknet_m,288,15600.99,131.231,2048,9.31,2.63,6.69\r\nhgnetv2_b4,224,15562.91,131.549,2048,19.8,2.75,6.7\r\ntf_efficientnetv2_b1,240,15495.16,132.123,2048,8.14,1.21,7.34\r\nefficientvit_b1,256,15488.84,99.132,1536,9.1,0.69,9.46\r\nedgenext_small,256,15468.84,132.351,2048,5.59,1.26,9.07\r\nconvnext_nano,224,15345.95,133.418,2048,15.59,2.46,8.37\r\nmobilenetv4_conv_aa_medium,256,15336.7,133.493,2048,9.72,1.58,10.3\r\nresnetblur18,288,15227.63,100.821,1536,11.69,3.87,5.6\r\nefficientnet_b1_pruned,240,15211.9,134.586,2048,6.33,0.4,6.21\r\nskresnet34,224,15186.54,134.82,2048,22.28,3.67,5.13\r\nrexnetr_130,224,15168.12,67.488,1024,7.61,0.68,9.81\r\nmixnet_s,224,15122.76,101.531,1536,4.13,0.25,6.25\r\nmixer_b32_224,224,15057.01,135.961,2048,60.29,3.24,6.29\r\nresnet101,160,15013.07,136.38,2048,44.55,4.0,8.28\r\nmobilenetv2_120d,224,14965.79,68.385,1024,5.83,0.69,11.97\r\ndarknet21,256,14943.8,136.991,2048,20.86,3.93,7.47\r\nfbnetv3_b,256,14812.45,103.652,1536,8.6,0.55,9.1\r\nefficientnet_b0_gn,224,14720.27,104.312,1536,5.29,0.42,6.75\r\nfastvit_t8,256,14602.11,140.22,2048,4.0,0.69,6.59\r\nmobilenet_edgetpu_v2_l,224,14549.42,140.713,2048,10.92,2.55,9.05\r\ntf_efficientnet_b0,224,14529.45,105.688,1536,5.29,0.4,6.75\r\nvisformer_tiny,224,14526.39,140.935,2048,10.32,1.27,5.72\r\nswiftformer_l1,224,14482.47,141.356,2048,12.06,1.6,10.07\r\nmixer_s16_224,224,14408.7,142.1,2048,18.53,3.79,5.97\r\ntf_mixnet_s,224,14358.3,106.948,1536,4.13,0.25,6.25\r\nhgnetv2_b2,288,14319.59,142.978,2048,11.22,1.89,6.8\r\nrepvit_m3,224,14297.01,143.188,2048,10.12,1.86,11.43\r\ntiny_vit_11m_224,224,14241.35,143.75,2048,20.35,2.03,13.49\r\nrexnet_130,224,14141.93,72.346,1024,7.56,0.68,9.71\r\ndpn48b,224,14122.26,144.976,2048,9.13,1.69,8.92\r\ngernet_l,256,14109.74,145.102,2048,31.08,4.57,8.0\r\nconvnext_pico,288,14017.71,146.057,2048,9.05,2.27,10.08\r\nresnext26ts,256,13940.58,146.881,2048,10.3,2.43,10.52\r\nmobilenetv4_conv_medium,320,13921.24,110.281,1536,9.72,1.71,11.84\r\nconvnextv2_nano,224,13864.06,147.688,2048,15.62,2.46,8.37\r\nvit_base_patch32_clip_256,256,13842.81,147.911,2048,87.86,5.76,6.65\r\nvit_wee_patch16_reg1_gap_256,256,13812.55,148.217,2048,13.42,3.83,13.9\r\nvit_tiny_r_s16_p8_384,384,13811.7,148.222,2048,6.36,1.34,6.49\r\nvit_small_patch16_224,224,13792.41,148.443,2048,22.05,4.61,11.95\r\ndeit_small_patch16_224,224,13790.5,148.469,2048,22.05,4.61,11.95\r\ndeit3_small_patch16_224,224,13736.56,149.038,2048,22.06,4.61,11.95\r\nvit_base_patch32_siglip_gap_256,256,13710.66,149.335,2048,87.47,5.67,6.54\r\nresnet26t,256,13643.4,150.052,2048,16.01,3.35,10.52\r\ndeit_small_distilled_patch16_224,224,13627.85,150.233,2048,22.44,4.63,12.02\r\nrexnetr_150,224,13624.41,75.131,1024,9.78,0.89,11.13\r\nmobilevitv2_075,256,13542.49,75.581,1024,2.87,1.05,12.06\r\nvit_base_patch32_siglip_256,256,13541.85,151.198,2048,94.55,5.75,6.64\r\nfasternet_s,224,13487.93,151.798,2048,31.18,4.56,7.93\r\nefficientnet_b0,256,13356.28,76.625,1024,5.29,0.52,8.81\r\nefficientnet_lite2,260,13354.65,76.65,1024,6.09,0.89,12.9\r\nghostnetv2_160,224,13322.53,153.685,2048,12.39,0.42,7.23\r\nconvnext_nano_ols,224,13320.81,153.699,2048,15.65,2.65,9.38\r\nconvnext_pico_ols,288,13258.81,154.423,2048,9.06,2.37,10.74\r\nsedarknet21,256,13252.49,154.505,2048,20.95,3.93,7.47\r\nrepvgg_b1g4,224,13237.52,154.674,2048,36.13,7.31,5.32\r\nlegacy_seresnext26_32x4d,224,13151.64,155.671,2048,16.79,2.49,9.39\r\ntf_efficientnet_lite2,260,13113.89,78.041,1024,6.09,0.89,12.9\r\nefficientnet_blur_b0,224,13077.97,117.414,1536,5.29,0.43,8.72\r\nvit_pwee_patch16_reg1_gap_256,256,13056.78,156.803,2048,15.25,4.37,15.87\r\nghostnetv3_160,224,12943.08,158.188,2048,12.38,0.41,7.23\r\nresnet34,288,12931.77,158.312,2048,21.8,6.07,6.18\r\nresnetv2_34,288,12889.32,158.862,2048,21.8,6.07,6.18\r\nnf_regnet_b1,288,12820.14,159.694,2048,10.22,1.02,9.2\r\nresnest14d,224,12769.08,160.325,2048,10.61,2.76,7.33\r\nnf_regnet_b2,272,12745.65,160.628,2048,14.31,1.22,9.27\r\nhrnet_w18_small_v2,224,12742.7,160.676,2048,15.6,2.62,9.65\r\ngcresnext26ts,256,12739.81,160.716,2048,10.48,2.43,10.53\r\nfbnetv3_d,256,12697.43,120.92,1536,10.31,0.68,11.1\r\nrepvit_m1_5,224,12690.45,161.335,2048,14.05,2.27,12.84\r\necaresnet50d_pruned,224,12683.98,161.415,2048,19.94,2.53,6.43\r\nseresnext26ts,256,12662.81,161.697,2048,10.39,2.43,10.52\r\nvit_small_patch16_rope_ape_224,224,12635.48,162.017,2048,22.06,4.61,11.95\r\npvt_v2_b1,224,12632.54,81.028,1024,14.01,2.12,15.39\r\nconvnextv2_pico,288,12630.17,162.109,2048,9.07,2.27,10.08\r\nvit_small_patch16_rope_224,224,12628.73,162.108,2048,21.98,4.61,11.95\r\ncs3darknet_focus_l,256,12614.58,162.311,2048,21.15,4.66,8.03\r\nmobilenetv4_conv_large,256,12611.01,162.347,2048,32.59,2.86,12.14\r\nmobilenet_edgetpu_v2_m,256,12604.09,162.45,2048,8.46,2.42,10.65\r\neca_resnext26ts,256,12599.17,162.513,2048,10.3,2.43,10.52\r\nrexnet_150,224,12543.57,81.606,1024,9.73,0.9,11.21\r\nbotnet26t_256,256,12529.35,163.415,2048,12.49,3.32,11.98\r\necaresnext50t_32x4d,224,12504.85,163.737,2048,15.41,2.7,10.09\r\necaresnext26t_32x4d,224,12492.15,163.892,2048,15.41,2.7,10.09\r\nseresnext26t_32x4d,224,12469.52,164.186,2048,16.81,2.7,10.09\r\nvit_relpos_small_patch16_224,224,12439.0,164.597,2048,21.98,4.59,13.05\r\nhalonet26t,256,12371.89,165.494,2048,12.48,3.19,11.69\r\nseresnext26d_32x4d,224,12314.42,166.263,2048,16.81,2.73,10.19\r\nefficientformerv2_s2,224,12309.98,166.328,2048,12.71,1.27,11.77\r\ncs3darknet_l,256,12291.61,166.565,2048,21.16,4.86,8.55\r\nvit_srelpos_small_patch16_224,224,12257.04,167.033,2048,21.97,4.59,12.16\r\nresnet101,176,12251.96,167.132,2048,44.55,4.92,10.08\r\nmobileone_s4,224,12246.47,167.185,2048,14.84,2.98,11.81\r\ncsatv2,512,12197.14,167.854,2048,11.1,1.39,9.17\r\nresnet50,224,12191.9,167.945,2048,25.56,4.11,11.11\r\nefficientnet_b1,224,12087.46,127.034,1536,7.79,0.59,9.36\r\nflexivit_small,240,12058.97,169.789,2048,22.06,5.35,14.18\r\nresnet34d,288,12030.65,170.186,2048,21.82,6.47,7.51\r\ndpn68,224,12005.31,170.548,2048,12.61,2.35,10.47\r\necaresnetlight,224,12004.5,170.555,2048,30.16,4.11,8.42\r\nefficientvit_b1,288,11984.07,128.135,1536,9.1,0.87,11.96\r\nresnetv2_34d,288,11975.91,170.975,2048,21.82,6.46,7.51\r\nhgnet_tiny,224,11889.31,172.207,2048,14.74,4.54,6.36\r\nmobilenetv3_large_150d,256,11831.74,86.511,1024,14.62,1.03,12.35\r\nmobilenetv4_hybrid_large_075,256,11785.57,173.729,2048,22.75,2.06,11.64\r\nregnetz_005,288,11767.91,174.002,2048,7.12,0.86,9.68\r\ndla46x_c,224,11752.97,174.206,2048,1.07,0.54,5.66\r\nmobilevit_xs,256,11749.95,65.321,768,2.32,1.05,16.33\r\nresnet26,288,11719.14,174.705,2048,16.0,3.9,12.15\r\neca_botnext26ts_256,256,11708.41,174.878,2048,10.59,2.46,11.6\r\nresnet50c,224,11699.57,174.999,2048,25.58,4.35,11.92\r\ndpn68b,224,11669.77,175.439,2048,12.61,2.35,10.47\r\nseresnet34,288,11641.87,175.879,2048,21.96,6.07,6.18\r\nresnet32ts,256,11638.23,175.929,2048,17.96,4.63,11.58\r\neca_nfnet_l0,224,11592.86,176.619,2048,24.14,4.35,10.47\r\ndla60,224,11576.19,176.871,2048,22.04,4.26,10.16\r\ngc_efficientnetv2_rw_t,224,11568.46,176.99,2048,13.68,1.94,9.97\r\nvit_small_resnet26d_224,224,11542.56,177.374,2048,63.61,5.07,11.12\r\nresnet50t,224,11527.04,177.639,2048,25.57,4.32,11.82\r\nvit_relpos_small_patch16_rpn_224,224,11516.68,177.792,2048,21.97,4.59,13.05\r\nresnet33ts,256,11472.36,178.464,2048,19.68,4.76,11.66\r\nnfnet_l0,224,11470.03,178.506,2048,35.07,4.36,10.47\r\nlevit_conv_512,224,11460.57,178.654,2048,95.08,5.62,10.22\r\ndla60x_c,224,11446.19,178.882,2048,1.32,0.59,6.01\r\nresnet50d,224,11434.78,179.049,2048,25.58,4.35,11.92\r\ntresnet_m,224,11418.31,179.327,2048,31.39,5.75,7.31\r\nxcit_nano_12_p16_384,384,11417.56,179.312,2048,3.05,1.64,12.15\r\ncoat_lite_tiny,224,11387.93,179.786,2048,5.72,1.6,11.65\r\neca_halonext26ts,256,11356.12,180.298,2048,10.76,2.44,11.46\r\nxcit_tiny_24_p16_224,224,11320.25,180.852,2048,12.12,2.34,11.82\r\nefficientnetv2_rw_t,224,11290.98,181.337,2048,13.65,1.93,9.94\r\ntf_efficientnetv2_b2,260,11282.29,181.476,2048,10.1,1.72,9.84\r\nresnetv2_50,224,11278.47,181.551,2048,25.55,4.11,11.11\r\nhgnetv2_b3,288,11240.0,182.16,2048,16.29,2.94,8.38\r\necaresnet26t,256,11210.2,182.634,2048,16.01,3.35,10.53\r\nvit_dwee_patch16_reg1_gap_256,256,11185.41,183.066,2048,13.43,3.83,17.6\r\nresnext26ts,288,11069.31,184.987,2048,10.3,3.07,13.31\r\nbat_resnext26ts,256,11037.11,185.508,2048,10.73,2.53,12.51\r\nwide_resnet50_2,176,10954.87,186.888,2048,68.88,7.29,8.97\r\nconvit_tiny,224,10897.25,187.896,2048,5.71,1.26,7.94\r\nvit_small_patch16_rope_mixed_ape_224,224,10880.57,188.17,2048,22.06,4.61,12.85\r\nvit_small_patch16_rope_mixed_224,224,10879.18,188.201,2048,21.99,4.61,12.85\r\nese_vovnet19b_dw,288,10861.74,141.373,1536,6.54,2.22,13.63\r\nvit_small_r26_s32_224,224,10858.4,188.559,2048,36.43,3.56,9.85\r\nresnetaa50,224,10849.85,188.696,2048,25.56,5.15,11.64\r\nedgenext_small_rw,256,10735.55,190.717,2048,7.83,1.58,9.51\r\nresnet26d,288,10661.77,192.029,2048,16.01,4.29,13.48\r\nresnetv2_50d,224,10656.2,192.145,2048,25.57,4.35,11.92\r\nresnetv2_50t,224,10655.0,192.179,2048,25.57,4.32,11.82\r\nvgg11,224,10650.45,192.237,2048,132.86,7.61,7.44\r\nresnet152,160,10614.85,192.909,2048,60.19,5.9,11.51\r\nvgg11_bn,224,10612.05,192.926,2048,132.87,7.62,7.44\r\nefficientnet_b0_g16_evos,224,10610.36,192.973,2048,8.11,1.01,7.42\r\nefficientvit_b2,224,10582.82,145.1,1536,24.33,1.6,14.62\r\nefficientnet_em,240,10579.31,193.542,2048,6.9,3.04,14.34\r\nlevit_conv_512d,224,10571.77,193.672,2048,92.39,5.84,11.3\r\nresnetaa34d,288,10570.47,193.704,2048,21.82,7.33,8.38\r\nvit_base_patch32_plus_256,256,10565.41,193.788,2048,119.48,7.79,7.76\r\nvovnet39a,224,10522.3,194.581,2048,22.6,7.09,6.73\r\ncoat_lite_mini,224,10502.96,194.95,2048,11.01,2.0,12.25\r\ngcresnet33ts,256,10460.86,195.734,2048,19.88,4.76,11.68\r\nfastvit_t12,256,10448.12,146.951,1536,7.51,1.39,9.57\r\nvit_dpwee_patch16_reg1_gap_256,256,10443.06,196.083,2048,15.25,4.37,19.05\r\nnf_ecaresnet50,224,10438.79,196.148,2048,25.56,4.21,11.13\r\ntf_efficientnet_em,240,10435.45,196.205,2048,6.9,3.04,14.34\r\nres2net50_48w_2s,224,10426.7,196.375,2048,25.29,4.18,11.72\r\nregnetx_016,224,10396.83,196.947,2048,9.19,1.62,7.93\r\nnf_seresnet50,224,10389.72,197.069,2048,28.09,4.21,11.13\r\nefficientnet_b1,240,10357.68,148.258,1536,7.79,0.71,10.88\r\ncoatnext_nano_rw_224,224,10342.13,197.983,2048,14.7,2.47,12.8\r\ngmlp_s16_224,224,10300.42,198.783,2048,19.42,4.42,15.1\r\nresnetblur50,224,10282.44,199.143,2048,25.56,5.16,12.02\r\nresnetaa50d,224,10268.81,199.374,2048,25.58,5.39,12.44\r\nresnet50_clip_gap,224,10265.18,199.455,2048,23.53,5.39,12.44\r\nvit_tiny_patch16_384,384,10263.66,199.488,2048,5.79,4.7,25.39\r\necaresnet101d_pruned,224,10245.39,199.84,2048,24.88,3.48,7.69\r\nmobilevitv2_100,256,10234.65,75.014,768,4.9,1.84,16.08\r\nefficientnet_b2_pruned,260,10226.72,200.208,2048,8.31,0.73,9.13\r\nseresnet33ts,256,10222.14,200.298,2048,19.78,4.76,11.66\r\neca_resnet33ts,256,10203.17,200.683,2048,19.68,4.76,11.66\r\ntf_efficientnetv2_b3,240,10178.31,201.143,2048,14.36,1.93,9.95\r\nmobilenetv4_hybrid_medium,320,10172.75,150.951,1536,11.07,2.05,14.36\r\nlevit_512,224,10157.41,201.584,2048,95.08,5.62,10.22\r\ngcresnext26ts,288,10064.19,203.447,2048,10.48,3.07,13.33\r\nlegacy_seresnet50,224,10047.51,203.789,2048,28.09,3.88,10.6\r\ncs3sedarknet_l,256,10040.03,203.933,2048,21.91,4.86,8.56\r\nseresnext26ts,288,10007.18,204.603,2048,10.39,3.07,13.32\r\neca_resnext26ts,288,9989.08,204.973,2048,10.3,3.07,13.32\r\nese_vovnet39b,224,9963.52,205.496,2048,24.57,7.09,6.74\r\neca_vovnet39b,224,9961.36,205.554,2048,22.6,7.09,6.74\r\nresnet50s,224,9923.95,206.343,2048,25.68,5.47,13.52\r\ncs3darknet_focus_l,288,9923.04,206.344,2048,21.15,5.9,10.16\r\nregnety_016,224,9920.83,206.377,2048,11.2,1.63,8.04\r\nnf_regnet_b3,288,9914.1,206.524,2048,18.59,1.67,11.84\r\nselecsls84,224,9913.66,206.549,2048,50.95,5.9,7.57\r\nrexnetr_200,224,9869.14,77.783,768,16.52,1.59,15.11\r\nedgenext_small,320,9833.56,208.221,2048,5.59,1.97,14.16\r\nskresnet50,224,9784.04,209.284,2048,25.8,4.11,12.5\r\nresnetblur50d,224,9752.56,209.965,2048,25.58,5.4,12.82\r\nresnext50_32x4d,224,9738.01,210.279,2048,25.03,4.26,14.4\r\nmambaout_femto,288,9731.7,210.402,2048,7.3,1.91,13.79\r\nseresnet50,224,9690.14,211.304,2048,28.09,4.11,11.13\r\nmobilenetv4_conv_medium,384,9687.14,105.665,1024,9.72,2.46,17.05\r\nmixnet_m,224,9682.39,211.474,2048,5.01,0.36,8.19\r\nrepvgg_b1,224,9667.78,211.777,2048,51.83,11.82,5.32\r\ncs3darknet_l,288,9665.14,211.859,2048,21.16,6.16,10.83\r\ndla60x,224,9652.78,212.117,2048,17.35,3.54,13.8\r\nfastvit_s12,256,9644.0,159.231,1536,9.43,1.8,10.82\r\nvit_relpos_base_patch32_plus_rpn_256,256,9548.12,214.456,2048,119.42,7.68,8.01\r\nresnet50_clip,224,9541.59,214.571,2048,38.32,6.14,12.98\r\nvit_medium_patch16_clip_224,224,9532.18,214.802,2048,38.59,8.0,15.93\r\nresnest26d,224,9530.9,214.833,2048,17.07,3.64,9.97\r\ncrossvit_small_240,240,9511.46,215.26,2048,26.86,5.63,18.17\r\nconvnext_tiny,224,9497.67,215.587,2048,28.59,4.47,13.44\r\ncoatnet_pico_rw_224,224,9497.34,53.887,512,10.85,2.05,14.62\r\ninception_v3,299,9497.22,215.588,2048,23.83,5.73,8.97\r\nvit_pe_core_tiny_patch16_384,384,9490.96,215.749,2048,6.14,4.74,25.62\r\ninception_next_tiny,224,9489.38,215.781,2048,28.06,4.19,11.98\r\ncspresnet50,256,9485.35,215.868,2048,21.62,4.54,11.5\r\nlevit_512d,224,9458.62,216.479,2048,92.39,5.84,11.3\r\nconvnext_tiny_hnf,224,9446.37,216.748,2048,28.59,4.47,13.44\r\nfastvit_sa12,256,9425.04,162.925,1536,11.55,1.94,11.24\r\neva02_tiny_patch14_336,336,9416.92,217.434,2048,5.76,4.68,27.16\r\ndensenet121,224,9388.33,218.098,2048,7.98,2.87,6.9\r\nhgnetv2_b4,288,9363.25,218.686,2048,19.8,4.54,11.08\r\nxcit_nano_12_p8_224,224,9349.11,219.005,2048,3.05,2.16,15.71\r\nskresnet50d,224,9312.31,219.857,2048,25.82,4.36,13.31\r\nefficientnet_b1,256,9304.56,110.02,1024,7.79,0.77,12.22\r\necaresnet50t,224,9268.64,220.912,2048,25.57,4.32,11.83\r\nseresnet50t,224,9240.11,221.589,2048,28.1,4.32,11.83\r\neva02_small_patch14_224,224,9239.58,221.612,2048,21.62,6.14,18.28\r\nxcit_small_12_p16_224,224,9238.4,221.612,2048,26.25,4.82,12.58\r\nconvnext_nano,288,9227.79,221.896,2048,15.59,4.06,13.84\r\nese_vovnet39b_evos,224,9227.38,221.899,2048,24.58,7.07,6.74\r\nresnext50d_32x4d,224,9211.48,222.299,2048,25.05,4.5,15.2\r\nresnet32ts,288,9205.43,222.421,2048,17.96,5.86,14.65\r\nvit_base_resnet26d_224,224,9199.99,222.55,2048,101.4,6.97,13.16\r\necaresnet50d,224,9197.05,222.638,2048,25.58,4.35,11.93\r\ntf_efficientnet_b1,240,9189.7,111.399,1024,7.79,0.71,10.88\r\nresnet50_gn,224,9174.75,223.165,2048,25.56,4.14,11.11\r\nefficientformer_l3,224,9150.89,223.753,2048,31.41,3.93,12.01\r\nefficientnet_b0_g8_gn,224,9142.19,223.966,2048,6.56,0.66,6.75\r\nrexnet_200,224,9140.97,83.992,768,16.37,1.56,14.91\r\nvit_little_patch16_reg1_gap_256,256,9136.83,224.087,2048,22.52,6.27,18.06\r\ncspresnet50d,256,9133.66,224.185,2048,21.64,4.86,12.55\r\ndeit3_medium_patch16_224,224,9115.13,224.63,2048,38.85,8.0,15.93\r\nresnetrs50,224,9108.43,224.811,2048,35.69,4.48,12.14\r\nvit_little_patch16_reg4_gap_256,256,9092.25,225.191,2048,22.52,6.35,18.33\r\nresnet33ts,288,9076.26,225.59,2048,19.68,6.02,14.75\r\ncaformer_s18,224,9053.29,226.175,2048,26.34,4.13,19.39\r\ntf_mixnet_m,224,9053.08,226.183,2048,5.01,0.36,8.19\r\nfbnetv3_g,240,9019.69,170.248,1536,16.62,1.28,14.87\r\ncspresnet50w,256,8990.24,227.749,2048,28.12,5.04,12.19\r\nmobilevit_s,256,8966.22,85.597,768,5.58,2.03,19.94\r\ncrossvit_15_240,240,8920.72,229.528,2048,27.53,5.81,19.77\r\npoolformerv2_s24,224,8897.65,230.126,2048,21.34,3.42,10.68\r\ntwins_pcpvt_small,224,8848.14,231.409,2048,24.11,3.83,18.08\r\nconvformer_s18,224,8813.11,232.33,2048,26.77,3.96,15.82\r\nhaloregnetz_b,224,8791.06,232.919,2048,11.68,1.97,11.94\r\nefficientnet_b2,256,8772.92,116.666,1024,9.11,0.89,12.81\r\nconvnextv2_tiny,224,8749.09,234.039,2048,28.64,4.47,13.44\r\nres2net50_26w_4s,224,8747.51,234.092,2048,25.7,4.28,12.61\r\nvovnet57a,224,8713.82,234.973,2048,36.64,8.95,7.52\r\nresnet26t,320,8710.2,235.07,2048,16.01,5.24,16.44\r\ndensenetblur121d,224,8695.78,235.466,2048,8.0,3.11,7.9\r\nregnetz_b16,224,8686.84,235.722,2048,9.72,1.45,9.95\r\nresnet152,176,8680.94,235.874,2048,60.19,7.22,13.99\r\ntiny_vit_21m_224,224,8672.85,177.051,1536,33.21,4.27,20.08\r\nresmlp_24_224,224,8620.73,237.533,2048,30.02,5.96,10.91\r\nhgnetv2_b5,224,8617.29,237.614,2048,39.57,6.56,11.19\r\ngcvit_xxtiny,224,8606.26,237.909,2048,12.0,2.14,15.36\r\nnf_resnet50,256,8605.62,237.933,2048,25.56,5.46,14.52\r\nswiftformer_l3,224,8563.66,239.083,2048,28.49,4.01,15.77\r\nregnetx_032,224,8558.44,239.259,2048,15.3,3.2,11.37\r\nmambaout_kobe,288,8445.64,242.444,2048,9.14,2.5,16.53\r\nres2net50_14w_8s,224,8445.34,242.429,2048,25.06,4.21,13.28\r\nconvnextv2_nano,288,8445.16,242.451,2048,15.62,4.06,13.84\r\ngcresnext50ts,256,8428.74,242.924,2048,15.67,3.75,15.46\r\nseresnetaa50d,224,8426.49,242.989,2048,28.11,5.4,12.46\r\nefficientvit_l1,224,8421.29,121.558,1024,52.65,5.27,15.85\r\nres2next50,224,8418.15,243.247,2048,24.67,4.2,13.71\r\ndla60_res2net,224,8410.03,243.467,2048,20.85,4.15,12.34\r\nsehalonet33ts,256,8403.96,243.67,2048,13.69,3.55,14.7\r\ndla60_res2next,224,8344.32,245.39,2048,17.03,3.49,13.17\r\ncrossvit_15_dagger_240,240,8342.32,245.451,2048,28.21,6.13,20.43\r\ngcresnet50t,256,8342.23,245.45,2048,25.9,5.42,14.67\r\nres2net50d,224,8328.72,245.856,2048,25.72,4.52,13.41\r\nresnetv2_50d_frn,224,8321.56,246.072,2048,25.59,4.33,11.92\r\nvit_relpos_medium_patch16_224,224,8320.39,246.101,2048,38.75,7.97,17.02\r\ngmixer_24_224,224,8304.2,246.569,2048,24.72,5.28,14.45\r\nvit_relpos_medium_patch16_cls_224,224,8295.96,246.814,2048,38.76,8.03,18.24\r\ngcresnet33ts,288,8273.2,247.5,2048,19.88,6.02,14.78\r\nresnetv2_50x1_bit,224,8249.82,248.217,2048,25.55,4.23,11.11\r\nvisformer_small,224,8235.46,248.633,2048,40.22,4.88,11.43\r\nedgenext_base,256,8198.37,249.756,2048,18.51,3.85,15.58\r\nvit_srelpos_medium_patch16_224,224,8192.56,249.921,2048,38.74,7.96,16.21\r\nefficientvit_b2,256,8183.55,125.091,1024,24.33,2.09,19.03\r\nresnetv2_50d_evos,224,8177.93,250.396,2048,25.59,4.33,11.92\r\ncsatv2_21m,512,8169.54,250.644,2048,20.7,2.94,15.85\r\nregnetz_b16_evos,224,8169.35,250.666,2048,9.74,1.43,9.95\r\nresnetv2_50d_gn,224,8164.27,250.823,2048,25.57,4.38,11.92\r\ncoatnet_nano_rw_224,224,8116.78,252.269,2048,15.14,2.41,15.41\r\neca_resnet33ts,288,8098.75,252.83,2048,19.68,6.02,14.76\r\nseresnet33ts,288,8097.68,252.853,2048,19.78,6.02,14.76\r\nhgnet_small,224,8094.78,252.96,2048,24.36,8.53,8.79\r\ndavit_tiny,224,8089.88,94.898,768,28.36,4.54,18.89\r\npoolformer_s24,224,8080.92,253.391,2048,21.39,3.41,10.68\r\nvit_medium_patch16_gap_240,240,8071.72,253.671,2048,44.4,9.22,18.81\r\nseresnext50_32x4d,224,8066.82,253.837,2048,27.56,4.26,14.42\r\nnf_regnet_b3,320,8023.86,255.189,2048,18.59,2.05,14.61\r\nconvnext_nano_ols,288,8016.18,255.44,2048,15.65,4.38,15.5\r\nlegacy_seresnext50_32x4d,224,8003.32,255.836,2048,27.56,4.26,14.42\r\nresnet51q,256,7984.31,256.442,2048,35.7,6.38,16.55\r\ntwins_svt_small,224,7971.95,256.834,2048,24.06,2.94,13.75\r\ncoatnet_nano_cc_224,224,7971.75,256.867,2048,13.76,2.24,15.02\r\nmobilenetv4_conv_large,320,7963.0,192.838,1536,32.59,4.47,18.97\r\nvit_base_r26_s32_224,224,7960.82,257.208,2048,101.38,6.81,12.36\r\nresnetrs101,192,7956.04,257.377,2048,63.62,6.04,12.7\r\nmaxvit_pico_rw_256,256,7943.06,96.654,768,7.46,1.83,22.3\r\ncs3sedarknet_l,288,7924.07,258.403,2048,21.91,6.16,10.83\r\nsebotnet33ts_256,256,7903.13,259.102,2048,13.7,3.89,17.46\r\nvit_small_patch16_dinov3_qkvb,256,7892.82,259.422,2048,21.6,6.26,17.03\r\nvit_small_patch16_dinov3,256,7883.22,259.718,2048,21.59,6.26,17.03\r\nefficientnet_lite3,300,7876.62,64.968,512,8.2,1.65,21.85\r\nmaxvit_rmlp_pico_rw_256,256,7869.26,97.559,768,7.52,1.85,24.86\r\ncoatnet_0_rw_224,224,7848.89,195.651,1536,27.44,4.43,18.73\r\nlambda_resnet26rpt_256,256,7844.09,261.03,2048,10.99,3.16,11.87\r\nresnet50_mlp,256,7816.46,261.983,2048,26.65,7.05,16.25\r\ntf_efficientnet_lite3,300,7814.27,65.496,512,8.2,1.65,21.85\r\nfastvit_mci0,256,7811.64,196.575,1536,11.36,2.39,14.72\r\ndla102,224,7807.61,262.253,2048,33.27,7.19,14.18\r\ncspresnext50,256,7770.41,263.512,2048,20.57,4.05,15.86\r\nregnetv_040,224,7769.52,263.553,2048,20.64,4.0,12.29\r\nvit_dlittle_patch16_reg1_gap_256,256,7736.38,264.664,2048,22.52,6.27,22.69\r\nregnety_040,224,7721.24,265.202,2048,20.65,4.0,12.29\r\ndarknet53,256,7705.4,265.742,2048,41.61,9.31,12.39\r\nefficientnet_b3_pruned,300,7680.43,266.605,2048,9.86,1.04,11.86\r\nmobilevitv2_125,256,7672.97,100.058,768,7.48,2.86,20.1\r\ndarknetaa53,256,7645.68,267.805,2048,36.02,7.97,12.39\r\ndensenet169,224,7643.3,267.893,2048,14.15,3.4,7.3\r\nefficientformerv2_l,224,7617.69,268.799,2048,26.32,2.59,18.54\r\nresnet101,224,7602.61,269.319,2048,44.55,7.83,16.23\r\nvit_relpos_medium_patch16_rpn_224,224,7596.96,269.532,2048,38.73,7.97,17.02\r\nfocalnet_tiny_srf,224,7572.82,270.397,2048,28.43,4.42,16.32\r\nnextvit_small,224,7565.97,270.632,2048,31.74,5.8,18.44\r\necaresnet50d_pruned,288,7541.25,271.517,2048,19.94,4.19,10.61\r\nseresnext26t_32x4d,288,7537.32,271.661,2048,16.81,4.46,16.68\r\nmobilenetv3_large_150d,320,7510.61,102.21,768,14.62,1.61,19.29\r\nefficientnet_cc_b0_4e,224,7462.54,274.401,2048,13.31,0.41,9.42\r\nseresnext26d_32x4d,288,7456.69,274.608,2048,16.81,4.51,16.85\r\ncs3darknet_focus_x,256,7440.07,275.205,2048,35.02,8.03,10.69\r\nresnet101c,224,7413.89,276.196,2048,44.57,8.08,17.04\r\nresnest50d_1s4x24d,224,7411.91,276.261,2048,25.68,4.43,13.57\r\nresnet50,288,7359.67,278.229,2048,25.56,6.8,18.37\r\nmambaout_tiny,224,7350.69,278.565,2048,26.55,4.49,16.68\r\nresnet61q,256,7334.97,279.153,2048,36.85,7.8,17.01\r\nefficientnet_b1,288,7302.14,140.186,1024,7.79,0.97,15.46\r\nresnet101d,224,7300.66,280.47,2048,44.57,8.08,17.04\r\ncoatnet_rmlp_nano_rw_224,224,7289.3,70.203,512,15.15,2.62,20.34\r\nhrnet_w18,224,7277.45,281.369,2048,21.3,4.32,16.31\r\nhrnet_w18_ssld,224,7266.67,281.785,2048,21.3,4.32,16.31\r\nxcit_tiny_12_p16_384,384,7259.01,282.071,2048,6.72,3.64,18.26\r\nvit_base_resnet50d_224,224,7213.29,283.872,2048,110.97,8.73,16.92\r\necaresnetlight,288,7197.14,284.509,2048,30.16,6.79,13.91\r\necaresnet26t,320,7196.29,284.534,2048,16.01,5.24,16.44\r\nhgnet_tiny,288,7188.44,213.636,1536,14.74,7.51,10.51\r\ntf_efficientnet_b2,260,7158.45,143.019,1024,9.11,1.02,13.83\r\npvt_v2_b2,224,7153.82,143.097,1024,25.36,4.05,27.53\r\nvit_medium_patch16_gap_256,256,7142.24,286.688,2048,38.86,10.59,22.15\r\nfocalnet_tiny_lrf,224,7117.24,287.699,2048,28.65,4.49,17.76\r\nmixnet_l,224,7109.56,288.007,2048,7.33,0.58,10.84\r\ncoatnet_bn_0_rw_224,224,7108.26,71.993,512,27.44,4.67,22.04\r\nvgg13,224,7093.38,288.662,2048,133.05,11.31,12.25\r\necaresnet50t,256,7089.56,288.817,2048,25.57,5.64,15.45\r\nnfnet_f0,192,7084.52,289.03,2048,71.49,7.21,10.16\r\nswin_tiny_patch4_window7_224,224,7080.3,289.215,2048,28.29,4.51,17.06\r\nvgg13_bn,224,7056.75,290.161,2048,133.05,11.33,12.25\r\nnf_resnet101,224,7042.57,290.748,2048,44.55,8.01,16.23\r\nskresnext50_32x4d,224,7006.91,292.217,2048,27.48,4.5,17.18\r\ndpn68b,288,7004.47,292.328,2048,12.61,3.89,17.3\r\ngc_efficientnetv2_rw_t,288,7001.28,292.473,2048,13.68,3.2,16.45\r\neca_nfnet_l0,288,6999.41,292.543,2048,24.14,7.12,17.29\r\nnfnet_l0,288,6952.83,294.51,2048,35.07,7.13,17.29\r\nresnet50t,288,6951.94,294.55,2048,25.57,7.14,19.53\r\nrdnet_tiny,224,6934.72,295.286,2048,23.86,5.06,15.98\r\nrepvit_m2_3,224,6923.06,295.758,2048,22.93,4.52,21.32\r\ndm_nfnet_f0,192,6917.45,296.009,2048,71.49,7.21,10.16\r\nresnet50d,288,6911.12,296.272,2048,25.58,7.19,19.7\r\nefficientnet_b2,288,6909.48,148.162,1024,9.11,1.12,16.2\r\ntf_efficientnet_cc_b0_4e,224,6908.68,296.402,2048,13.31,0.41,9.42\r\nwide_resnet50_2,224,6898.62,296.814,2048,68.88,11.43,14.4\r\ncspdarknet53,256,6867.5,298.167,2048,27.64,6.57,16.81\r\nresnetv2_101,224,6831.28,299.766,2048,44.54,7.83,16.23\r\nresnet101_clip_gap,224,6812.4,300.579,2048,42.52,9.11,17.56\r\nresnetv2_50,288,6811.42,300.64,2048,25.55,6.79,18.37\r\nmobilenetv4_hybrid_medium,384,6809.13,150.348,1024,11.07,3.01,21.18\r\nnf_resnet50,288,6807.51,300.795,2048,25.56,6.88,18.37\r\nefficientnetv2_rw_t,288,6798.88,301.172,2048,13.65,3.19,16.42\r\nresnetaa101d,224,6791.81,301.48,2048,44.57,9.12,17.56\r\nrepvgg_b2g4,224,6773.45,302.298,2048,55.78,11.33,6.45\r\nvit_medium_patch16_reg1_gap_256,256,6753.6,303.188,2048,38.88,10.63,22.26\r\nhiera_tiny_224,224,6751.27,303.298,2048,27.91,4.91,17.13\r\nefficientvit_l2,224,6741.65,151.845,1024,63.71,6.97,19.58\r\nregnetx_040,224,6733.91,304.091,2048,22.12,3.99,12.2\r\nswinv2_cr_tiny_224,224,6721.3,304.673,2048,28.33,4.66,28.45\r\nedgenext_small_rw,320,6716.63,304.866,2048,7.83,2.46,14.85\r\nhalonet50ts,256,6715.93,304.884,2048,22.73,5.3,19.2\r\nswinv2_cr_tiny_ns_224,224,6710.1,305.177,2048,28.33,4.66,28.45\r\ncs3darknet_x,256,6704.49,305.421,2048,35.05,8.38,11.35\r\nvitamin_small_224,224,6689.21,76.498,512,22.17,5.92,26.38\r\nresnetv2_34d,384,6688.68,306.158,2048,21.82,11.49,13.35\r\nvit_medium_patch16_reg4_gap_256,256,6687.9,306.167,2048,38.88,10.76,22.6\r\nresnest50d,224,6685.61,306.284,2048,27.48,5.4,14.36\r\nefficientnet_cc_b0_8e,224,6666.99,307.143,2048,24.01,0.42,9.42\r\ngcresnext50ts,288,6659.02,307.505,2048,15.67,4.75,19.57\r\nvit_small_plus_patch16_dinov3_qkvb,256,6651.07,307.847,2048,28.69,8.11,21.84\r\nvit_small_plus_patch16_dinov3,256,6649.77,307.925,2048,28.68,8.11,21.84\r\ntf_mixnet_l,224,6649.65,307.924,2048,7.33,0.58,10.84\r\nresnet101s,224,6641.93,308.309,2048,44.67,9.19,18.64\r\ngcvit_xtiny,224,6632.07,308.75,2048,19.98,2.93,20.26\r\nresnetv2_101d,224,6599.95,310.27,2048,44.56,8.07,17.04\r\nresnetblur101d,224,6580.28,311.175,2048,44.57,9.12,17.94\r\ngcresnet50t,288,6577.91,311.301,2048,25.9,6.86,18.57\r\nresnetaa50,288,6533.74,313.39,2048,25.56,8.52,19.24\r\nnf_ecaresnet101,224,6530.76,313.534,2048,44.55,8.01,16.27\r\nnf_seresnet101,224,6523.56,313.888,2048,49.33,8.02,16.27\r\ndla102x,224,6512.11,314.449,2048,26.31,5.89,19.42\r\nrexnetr_300,224,6504.89,78.684,512,34.81,3.39,22.16\r\nresnet101_clip,224,6499.23,315.075,2048,56.26,9.81,18.08\r\nregnety_040_sgn,224,6437.28,318.088,2048,20.65,4.03,12.29\r\nefficientvit_b2,288,6432.19,159.156,1024,24.33,2.64,24.03\r\nregnety_032,224,6420.84,318.91,2048,19.44,3.2,11.26\r\nfbnetv3_g,288,6414.71,159.587,1024,16.62,1.77,21.09\r\nres2net50_26w_6s,224,6413.58,319.28,2048,37.05,6.33,15.28\r\ntf_efficientnet_cc_b0_8e,224,6397.26,320.096,2048,24.01,0.42,9.42\r\ntf_efficientnetv2_b3,300,6394.48,320.208,2048,14.36,3.04,15.74\r\nmobilevitv2_150,256,6394.34,80.034,512,10.59,4.09,24.11\r\nese_vovnet57b,256,6349.69,322.471,2048,38.61,11.69,9.82\r\nresnet51q,288,6347.82,322.577,2048,35.7,8.07,20.94\r\nnf_regnet_b4,320,6334.09,323.272,2048,30.21,3.29,19.88\r\nmvitv2_tiny,224,6295.77,325.252,2048,24.17,4.7,21.16\r\nwide_resnet101_2,176,6275.86,326.267,2048,126.89,14.31,13.18\r\nbotnet50ts_256,256,6271.29,326.519,2048,22.74,5.54,22.23\r\nvit_medium_patch16_rope_reg1_gap_256,256,6257.41,327.236,2048,38.74,10.63,22.26\r\nfasternet_m,224,6252.54,327.491,2048,53.52,8.74,15.34\r\ntresnet_v2_l,224,6222.01,329.085,2048,46.17,8.85,16.34\r\nresnetblur50,288,6205.5,247.487,1536,25.56,8.52,19.87\r\nresnetaa50d,288,6189.75,330.817,2048,25.58,8.92,20.57\r\nlegacy_seresnet101,224,6124.38,334.333,2048,49.33,7.61,15.74\r\ncoatnet_rmlp_0_rw_224,224,6116.11,83.685,512,27.45,4.72,24.89\r\nmaxxvit_rmlp_nano_rw_256,256,6089.11,252.199,1536,16.78,4.37,26.05\r\ncs3sedarknet_xdw,256,6086.82,336.406,2048,21.6,5.97,17.18\r\nfastvit_sa24,256,6080.04,252.589,1536,21.5,3.77,20.35\r\necaresnet101d_pruned,288,6073.12,337.17,2048,24.88,5.75,12.71\r\ncrossvit_18_240,240,6071.05,337.271,2048,43.27,9.05,26.26\r\ntwins_pcpvt_base,224,6059.75,337.915,2048,43.83,6.68,25.25\r\ndarknet53,288,6059.21,337.942,2048,41.61,11.78,15.68\r\nswin_s3_tiny_224,224,6047.07,338.611,2048,28.33,4.64,19.13\r\npoolformerv2_s36,224,6035.3,339.284,2048,30.79,5.01,15.82\r\nrepvgg_b2,224,6031.59,339.494,2048,80.32,18.38,6.45\r\nseresnet101,224,6028.25,339.674,2048,49.33,7.84,16.27\r\nvit_base_patch32_clip_384,384,6026.13,339.814,2048,88.3,13.06,16.5\r\npvt_v2_b2_li,224,6026.06,169.881,1024,22.55,3.91,27.6\r\ndarknetaa53,288,6024.3,339.914,2048,36.02,10.08,15.68\r\nhieradet_small,256,6021.64,127.502,768,34.73,8.51,27.76\r\nresnext101_32x4d,224,6019.21,340.17,2048,44.18,8.01,21.23\r\nese_vovnet39b,288,6017.64,340.271,2048,24.57,11.71,11.13\r\nrexnetr_200,288,6011.76,85.128,512,16.52,2.62,24.96\r\nresnext101_32x8d,176,5940.79,344.678,2048,88.79,10.33,19.37\r\nresnetblur50d,288,5890.07,260.739,1536,25.58,8.92,21.19\r\nhalo2botnet50ts_256,256,5875.19,348.533,2048,22.64,5.02,21.78\r\necaresnet101d,224,5872.48,348.692,2048,44.57,8.08,17.07\r\nregnetx_080,224,5867.05,349.015,2048,39.57,8.02,14.06\r\nresnext50_32x4d,288,5849.47,350.071,2048,25.03,7.04,23.81\r\nvit_base_patch32_384,384,5844.52,350.36,2048,88.3,13.06,16.5\r\nnextvit_base,224,5828.38,351.328,2048,44.79,8.29,23.71\r\nseresnet50,288,5822.36,351.674,2048,28.09,6.8,18.39\r\nvit_large_patch32_224,224,5818.46,351.917,2048,305.51,15.39,13.3\r\nresmlp_36_224,224,5812.35,352.3,2048,44.69,8.91,16.33\r\nhrnet_w32,224,5811.88,352.328,2048,41.23,8.97,22.02\r\nresnet61q,288,5801.5,352.935,2048,36.85,9.87,21.52\r\ncoat_lite_small,224,5799.39,353.087,2048,19.84,3.96,22.09\r\nconvnext_tiny,288,5767.88,355.019,2048,28.59,7.39,22.21\r\ndensenet201,224,5750.52,356.09,2048,20.01,4.34,7.85\r\ncrossvit_18_dagger_240,240,5744.56,356.456,2048,44.27,9.5,27.03\r\ncs3sedarknet_x,256,5731.57,357.263,2048,35.4,8.38,11.35\r\nmaxvit_nano_rw_256,256,5709.95,67.21,384,15.45,4.46,30.28\r\nxcit_tiny_12_p8_224,224,5706.67,358.807,2048,6.71,4.81,23.6\r\nvgg16,224,5705.69,358.892,2048,138.36,15.47,13.56\r\nconvnext_tiny_hnf,288,5703.18,359.05,2048,28.59,7.39,22.21\r\nefficientvit_b3,224,5702.63,179.523,1024,48.65,3.99,26.9\r\nrexnet_300,224,5697.05,89.844,512,34.71,3.44,22.4\r\nmaxvit_rmlp_nano_rw_256,256,5685.95,67.496,384,15.5,4.47,31.92\r\nvgg16_bn,224,5682.52,360.345,2048,138.37,15.5,13.56\r\nconvnext_small,224,5628.79,363.794,2048,50.22,8.71,21.56\r\ndensenet121,288,5627.68,363.863,2048,7.98,4.74,11.41\r\npit_b_224,224,5609.72,365.024,2048,73.76,12.42,32.94\r\npit_b_distilled_224,224,5595.34,365.968,2048,74.79,12.5,33.07\r\necaresnet50t,288,5593.66,366.075,2048,25.57,7.14,19.55\r\nseresnet50t,288,5583.55,366.748,2048,28.1,7.14,19.55\r\nresnet101d,256,5571.45,367.525,2048,44.57,10.55,22.25\r\nmobilenetv4_conv_large,384,5562.57,184.043,1024,32.59,6.43,27.31\r\nresnext50d_32x4d,288,5553.43,368.74,2048,25.05,7.44,25.13\r\ninception_next_small,224,5550.27,368.929,2048,49.37,8.36,19.27\r\necaresnet50d,288,5549.85,368.954,2048,25.58,7.19,19.72\r\nrepvgg_b3g4,224,5549.55,368.976,2048,75.63,16.06,7.55\r\nres2net101_26w_4s,224,5502.34,372.146,2048,45.21,8.1,18.45\r\ncoatnet_0_224,224,5487.18,69.952,384,25.04,4.58,24.01\r\nregnetz_c16,256,5486.65,373.222,2048,13.46,2.51,16.57\r\nhiera_small_224,224,5460.78,374.979,2048,35.01,6.42,20.75\r\nefficientnetv2_s,288,5456.18,375.286,2048,21.46,4.75,20.13\r\npoolformer_s36,224,5425.82,377.392,2048,30.86,5.0,15.82\r\ncs3edgenet_x,256,5384.72,380.279,2048,47.82,11.53,12.92\r\nlambda_resnet26t,256,5382.45,380.453,2048,10.96,3.02,11.87\r\nresnet152,224,5361.91,381.914,2048,60.19,11.56,22.56\r\nswinv2_tiny_window8_256,256,5354.27,382.464,2048,28.35,5.96,24.57\r\nres2net101d,224,5331.96,384.034,2048,45.23,8.35,19.25\r\nconvnextv2_tiny,288,5313.71,385.369,2048,28.64,7.39,22.21\r\nresnet50_gn,288,5295.12,386.705,2048,25.56,6.85,18.37\r\ndla169,224,5288.24,387.22,2048,53.39,11.6,20.2\r\nmobilevitv2_175,256,5286.28,96.807,512,14.25,5.54,28.13\r\nregnetz_c16_evos,256,5264.43,388.99,2048,13.49,2.48,16.57\r\ncs3darknet_x,288,5261.95,389.148,2048,35.05,10.6,14.36\r\ndensenetblur121d,288,5257.17,292.125,1536,8.0,5.14,13.06\r\nresnet152c,224,5247.27,390.255,2048,60.21,11.8,23.36\r\nmixer_b16_224,224,5238.71,390.885,2048,59.88,12.62,14.53\r\nregnetz_b16,288,5225.63,391.868,2048,9.72,2.39,16.43\r\nconvnextv2_small,224,5211.02,392.965,2048,50.32,8.71,21.56\r\nresnet152d,224,5201.83,393.674,2048,60.21,11.8,23.36\r\nresnetv2_101x1_bit,224,5189.13,394.633,2048,44.54,8.04,16.23\r\nedgenext_base,320,5189.12,394.621,2048,18.51,6.01,24.32\r\nvolo_d1_224,224,5187.93,394.695,2048,26.63,6.94,24.43\r\nefficientvit_l2,256,5185.58,148.07,768,63.71,9.09,25.49\r\nhgnetv2_b5,288,5183.32,395.056,2048,39.57,10.84,18.5\r\nconvit_small,224,5180.14,395.298,2048,27.78,5.76,17.87\r\ncsatv2_21m,640,5178.99,395.396,2048,20.7,4.72,26.68\r\nres2net50_26w_8s,224,5167.11,396.303,2048,48.4,8.37,17.95\r\ntnt_s_legacy_patch16_224,224,5160.11,396.817,2048,23.76,5.24,24.37\r\nregnetx_064,224,5159.07,396.934,2048,26.21,6.49,16.37\r\nefficientnetv2_rw_s,288,5157.24,397.058,2048,23.94,4.91,21.41\r\nefficientnet_b3,288,5152.4,99.337,512,12.23,1.63,21.49\r\nconvnext_nano,384,5139.35,398.445,2048,15.59,7.22,24.61\r\nvit_small_resnet50d_s16_224,224,5132.64,398.944,2048,57.53,13.48,24.82\r\neca_nfnet_l1,256,5118.26,400.082,2048,41.41,9.62,22.04\r\ncait_xxs24_224,224,5096.95,401.758,2048,11.96,2.53,20.29\r\ninception_v4,299,5081.95,402.931,2048,42.68,12.28,15.09\r\nseresnetaa50d,288,5074.27,403.557,2048,28.11,8.92,20.59\r\ntnt_s_patch16_224,224,5070.97,403.797,2048,23.77,5.25,24.37\r\nseresnext101_32x4d,224,5053.6,405.213,2048,48.96,8.02,21.26\r\nmaxxvitv2_nano_rw_256,256,5051.62,202.663,1024,23.7,6.26,23.05\r\nlegacy_seresnext101_32x4d,224,5029.91,407.098,2048,48.96,8.02,21.26\r\nvit_base_patch16_clip_quickgelu_224,224,5018.16,408.084,2048,86.19,17.58,23.9\r\nvit_base_patch16_clip_224,224,5008.11,408.901,2048,86.57,17.58,23.9\r\nefficientnet_cc_b1_8e,240,5007.55,408.942,2048,39.72,0.75,15.44\r\npvt_v2_b3,224,4997.19,204.874,1024,45.24,6.92,37.7\r\nmaxvit_tiny_rw_224,224,4981.1,102.747,512,29.06,5.11,33.11\r\nmixer_l32_224,224,4977.1,411.419,2048,206.94,11.27,19.86\r\nvit_base_patch16_xp_224,224,4966.09,412.328,2048,86.51,17.56,23.9\r\nregnetz_b16_evos,288,4954.17,413.345,2048,9.74,2.36,16.43\r\nxcit_small_24_p16_224,224,4928.72,415.448,2048,47.67,9.1,23.64\r\nhgnet_small,288,4914.73,312.469,1536,24.36,14.09,14.53\r\nresnetv2_50d_gn,288,4914.08,416.726,2048,25.57,7.24,19.7\r\ndeit3_base_patch16_224,224,4900.08,417.895,2048,86.59,17.58,23.9\r\nresnetv2_34d,448,4899.67,417.946,2048,21.82,15.64,18.16\r\nresnetv2_50d_evos,288,4895.6,418.293,2048,25.59,7.15,19.7\r\nvit_betwixt_patch16_gap_256,256,4883.08,419.356,2048,60.37,16.25,27.69\r\ndavit_small,224,4869.35,157.68,768,49.75,8.8,30.49\r\nvit_base_patch16_224_miil,224,4868.75,420.599,2048,94.4,17.59,23.91\r\ntresnet_l,224,4868.2,420.63,2048,55.99,10.9,11.9\r\ndeit_base_patch16_224,224,4865.51,420.866,2048,86.57,17.58,23.9\r\nvit_base_patch16_siglip_gap_224,224,4863.83,421.022,2048,85.8,17.49,23.75\r\ndeit_base_distilled_patch16_224,224,4858.13,421.504,2048,87.34,17.68,24.05\r\nvit_base_patch16_224,224,4856.08,421.703,2048,86.57,17.58,23.9\r\nvit_pe_spatial_tiny_patch16_512,512,4853.5,421.901,2048,5.68,10.46,61.64\r\nseresnext50_32x4d,288,4845.15,422.628,2048,27.56,7.04,23.82\r\nvit_base_patch16_siglip_224,224,4840.19,423.086,2048,92.88,17.73,24.06\r\ncoatnet_rmlp_1_rw_224,224,4839.8,317.311,1536,41.69,7.85,35.47\r\nresnet152s,224,4837.27,423.324,2048,60.32,12.92,24.96\r\nefficientnet_x_b3,288,4819.38,424.905,2048,13.3,3.91,15.6\r\nfastvit_mci1,256,4797.1,320.146,1536,21.46,4.67,27.3\r\ncaformer_s36,224,4784.26,428.017,2048,39.3,8.0,37.53\r\nvit_small_patch16_36x1_224,224,4781.1,428.29,2048,64.67,13.71,35.69\r\nvgg19,224,4778.93,428.503,2048,143.67,19.63,14.86\r\nregnetv_040,288,4776.82,428.674,2048,20.64,6.6,20.3\r\nvit_base_mci_224,224,4773.35,429.002,2048,86.35,17.73,24.65\r\nmobilenetv4_hybrid_medium,448,4771.68,160.899,768,11.07,4.2,29.64\r\nese_vovnet99b,224,4763.0,429.925,2048,63.2,16.51,11.27\r\nregnetv_064,224,4761.53,430.063,2048,30.58,6.39,16.41\r\nvit_small_patch16_18x2_224,224,4760.21,430.159,2048,64.67,13.71,35.69\r\nvgg19_bn,224,4748.04,431.261,2048,143.68,19.66,14.86\r\nresnetv2_152,224,4743.49,431.709,2048,60.19,11.55,22.56\r\nnextvit_large,224,4736.59,432.31,2048,57.83,10.77,28.99\r\nlegacy_xception,299,4720.27,162.655,768,22.86,8.4,35.83\r\nmixnet_xl,224,4715.54,434.251,2048,11.9,0.93,14.57\r\nvit_base_patch16_gap_224,224,4696.88,435.974,2048,86.57,17.49,25.59\r\nregnety_040,288,4695.12,436.129,2048,20.65,6.61,20.3\r\nmambaout_small,224,4686.22,436.98,2048,48.49,8.96,27.72\r\nmobilevitv2_200,256,4677.99,82.052,384,18.45,7.22,32.15\r\nvit_betwixt_patch16_reg1_gap_256,256,4675.61,437.95,2048,60.4,16.32,27.83\r\nconvnextv2_nano,384,4669.27,438.56,2048,15.62,7.22,24.61\r\nconvformer_s36,224,4647.91,440.578,2048,40.01,7.67,30.5\r\nresnetv2_152d,224,4641.94,441.151,2048,60.2,11.8,23.36\r\nrdnet_small,224,4636.62,441.632,2048,50.44,8.74,22.55\r\nvit_pe_core_base_patch16_224,224,4636.04,441.696,2048,93.67,17.82,24.21\r\nmvitv2_small,224,4635.02,441.802,2048,34.87,7.0,28.08\r\nregnety_064,224,4631.7,442.118,2048,30.58,6.39,16.41\r\nvit_betwixt_patch16_reg4_gap_256,256,4621.57,443.076,2048,60.4,16.52,28.24\r\nmobilenetv4_conv_aa_large,384,4618.75,221.667,1024,32.59,7.07,32.29\r\ntf_efficientnetv2_s,300,4616.7,443.561,2048,21.46,5.35,22.73\r\nresnet101,288,4579.47,447.17,2048,44.55,12.95,26.83\r\nrepvgg_b3,224,4572.74,447.829,2048,110.96,26.21,7.55\r\ncoatnet_1_rw_224,224,4571.48,111.962,512,41.72,8.04,34.6\r\nvit_base_patch16_rope_ape_224,224,4565.47,448.535,2048,86.59,17.58,23.9\r\nvit_base_patch16_rope_224,224,4560.09,449.074,2048,86.43,17.58,23.9\r\nmambaout_small_rw,224,4555.42,449.507,2048,48.5,8.96,27.72\r\nbeit_base_patch16_224,224,4550.91,449.963,2048,86.53,17.58,23.9\r\nbeitv2_base_patch16_224,224,4529.87,452.051,2048,86.53,17.58,23.9\r\ncs3sedarknet_x,288,4521.2,452.91,2048,35.4,10.6,14.37\r\ndensenet161,224,4515.56,453.486,2048,28.68,7.79,11.06\r\necaresnet50t,320,4508.02,454.242,2048,25.57,8.82,24.13\r\nvit_base_patch32_clip_448,448,4495.35,455.511,2048,88.34,17.93,23.9\r\nsequencer2d_s,224,4490.23,456.055,2048,27.65,4.96,11.31\r\nvit_relpos_base_patch16_224,224,4482.14,456.856,2048,86.43,17.51,24.97\r\nfastvit_sa36,256,4478.36,342.93,1536,31.46,5.59,29.46\r\ncs3se_edgenet_x,256,4477.97,457.286,2048,50.72,11.53,12.94\r\nfocalnet_small_srf,224,4477.64,457.326,2048,49.89,8.62,26.26\r\ntf_efficientnet_cc_b1_8e,240,4474.87,457.624,2048,39.72,0.75,15.44\r\neva02_base_patch16_clip_224,224,4465.29,458.599,2048,86.26,17.62,26.32\r\nhrnet_w30,224,4464.78,458.638,2048,37.71,8.15,21.21\r\nfasternet_l,224,4462.02,458.932,2048,93.47,15.52,20.46\r\nvit_relpos_base_patch16_cls_224,224,4454.27,459.712,2048,86.43,17.6,25.12\r\nxception41p,299,4453.26,114.933,512,26.91,9.25,39.86\r\nvit_relpos_base_patch16_clsgap_224,224,4452.88,459.857,2048,86.43,17.6,25.12\r\nefficientnet_el,300,4442.51,460.942,2048,10.59,8.0,30.7\r\nvit_base_patch16_rpn_224,224,4431.48,462.092,2048,86.54,17.49,23.75\r\nbeit3_base_patch16_224,224,4427.7,462.484,2048,86.66,17.63,23.9\r\nefficientnet_el_pruned,300,4423.87,462.887,2048,10.59,8.0,30.7\r\nefficientvit_b3,256,4423.02,173.594,768,48.65,5.2,35.01\r\ntf_efficientnet_el,300,4400.15,465.364,2048,10.59,8.0,30.7\r\nvit_small_patch16_384,384,4362.61,469.379,2048,22.2,15.52,50.78\r\ndeit3_small_patch16_384,384,4348.75,470.882,2048,22.21,15.52,50.78\r\nswin_small_patch4_window7_224,224,4329.58,472.953,2048,49.61,8.77,27.47\r\nhrnet_w18_ssld,288,4326.11,473.339,2048,21.3,7.14,26.96\r\ntwins_pcpvt_large,224,4306.86,475.463,2048,60.99,9.84,35.82\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,4300.18,476.199,2048,60.23,16.52,28.24\r\ndla102x2,224,4286.41,477.731,2048,41.28,9.34,29.91\r\ngcvit_tiny,224,4282.92,478.12,2048,28.22,4.79,29.82\r\ncs3edgenet_x,288,4278.64,478.596,2048,47.82,14.59,16.36\r\nregnety_080,224,4277.88,478.675,2048,39.18,8.0,17.97\r\nflexivit_base,240,4266.1,480.007,2048,86.59,20.29,28.36\r\nlegacy_seresnet152,224,4262.62,480.388,2048,66.82,11.33,22.08\r\nfocalnet_small_lrf,224,4260.62,480.62,2048,50.34,8.74,28.61\r\nmaxxvit_rmlp_tiny_rw_256,256,4255.76,360.873,1536,29.64,6.66,39.76\r\nregnetz_040,256,4244.09,482.509,2048,27.12,4.06,24.19\r\nnf_regnet_b4,384,4230.81,484.01,2048,30.21,4.7,28.61\r\nregnetz_040_h,256,4214.65,485.87,2048,28.94,4.12,24.29\r\nseresnet152,224,4209.15,486.508,2048,66.82,11.57,22.61\r\nefficientformer_l7,224,4208.02,486.622,2048,82.23,10.17,24.45\r\npoolformerv2_m36,224,4182.43,489.598,2048,56.08,8.81,22.02\r\nresnest50d_4s2x40d,224,4162.56,491.946,2048,30.42,4.4,17.94\r\nefficientnet_b3_gn,288,4151.18,123.3,512,11.73,1.74,23.35\r\nwide_resnet50_2,288,4144.97,494.036,2048,68.88,18.89,23.81\r\nefficientnet_b3,320,4139.74,123.643,512,12.23,2.01,26.52\r\nresnetv2_101,288,4127.73,496.103,2048,44.54,12.94,26.83\r\nnfnet_f0,256,4102.15,499.2,2048,71.49,12.62,18.05\r\nvit_mediumd_patch16_reg4_gap_256,256,4093.68,500.221,2048,64.11,17.87,37.57\r\nresnetaa101d,288,4089.86,500.684,2048,44.57,15.07,29.03\r\nvit_base_patch16_rope_mixed_ape_224,224,4083.47,501.483,2048,86.59,17.58,25.7\r\nregnetz_d8,256,4080.96,501.792,2048,23.37,3.97,23.74\r\nvit_base_patch16_rope_mixed_224,224,4077.65,502.195,2048,86.44,17.58,25.7\r\ntf_efficientnet_b3,300,4077.03,125.553,512,12.23,1.87,23.83\r\nefficientvit_l2,288,4072.6,188.537,768,63.71,11.51,32.19\r\nmobilenetv4_conv_large,448,4072.54,188.53,768,32.59,8.75,37.17\r\nnest_tiny,224,4067.72,503.413,2048,17.06,5.83,25.48\r\nvit_relpos_base_patch16_rpn_224,224,4065.06,503.755,2048,86.41,17.51,24.97\r\ngmlp_b16_224,224,4063.91,503.899,2048,73.08,15.78,30.21\r\nmambaout_tiny,288,4054.68,505.035,2048,26.55,7.41,27.58\r\nswinv2_cr_small_224,224,4053.16,505.24,2048,49.7,9.07,50.27\r\nswinv2_cr_small_ns_224,224,4049.37,505.713,2048,49.7,9.08,50.27\r\nvit_pe_core_small_patch16_384,384,4048.23,505.85,2048,23.78,15.69,51.23\r\nese_vovnet57b,320,4035.7,507.408,2048,38.61,18.26,15.34\r\nnest_tiny_jx,224,4033.26,507.726,2048,17.06,5.83,25.48\r\neva02_small_patch14_336,336,4002.63,511.61,2048,22.13,15.48,54.33\r\nhiera_small_abswin_256,256,3998.63,512.119,2048,34.36,8.29,26.38\r\ninception_resnet_v2,299,3997.89,512.208,2048,55.84,13.18,25.06\r\ndm_nfnet_f0,256,3997.17,512.292,2048,71.49,12.62,18.05\r\nrexnetr_300,288,3992.4,64.095,256,34.81,5.59,36.61\r\nlevit_conv_384_s8,224,3984.11,513.979,2048,39.06,9.95,35.86\r\nresnet152d,256,3967.91,516.073,2048,60.21,15.41,30.51\r\nxcit_tiny_24_p16_384,384,3963.42,516.657,2048,12.12,6.87,34.29\r\nresnetblur101d,288,3960.0,387.83,1536,44.57,15.07,29.65\r\nregnety_080_tv,224,3959.24,517.198,2048,39.38,8.51,19.73\r\nregnetz_d8_evos,256,3955.64,517.688,2048,23.46,4.5,24.92\r\ncoatnet_rmlp_1_rw2_224,224,3929.7,130.254,512,41.72,8.11,40.13\r\nwide_resnet101_2,224,3923.26,521.957,2048,126.89,22.8,21.23\r\nregnety_040_sgn,288,3912.74,523.349,2048,20.65,6.67,20.3\r\nmaxvit_tiny_tf_224,224,3909.86,130.906,512,30.92,5.6,35.78\r\ndpn98,224,3906.61,524.187,2048,61.57,11.73,25.2\r\nlevit_384_s8,224,3883.43,527.308,2048,39.06,9.95,35.86\r\nresnet200,224,3879.65,527.811,2048,64.67,15.07,32.19\r\nmaxvit_tiny_rw_256,256,3869.89,99.19,384,29.07,6.74,44.35\r\nmaxvit_rmlp_tiny_rw_256,256,3854.97,99.571,384,29.15,6.77,46.92\r\nconvnext_base,224,3853.03,531.481,2048,88.59,15.38,28.75\r\nmobilenetv4_hybrid_large,384,3819.2,268.059,1024,37.76,7.77,34.52\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,3804.91,538.193,2048,63.95,17.65,37.02\r\nmvitv2_small_cls,224,3769.32,543.275,2048,34.87,7.04,28.17\r\nvit_base_patch16_siglip_gap_256,256,3767.74,543.506,2048,85.84,23.13,33.23\r\nlamhalobotnet50ts_256,256,3760.89,544.504,2048,22.57,5.02,18.44\r\nefficientnet_lite4,380,3741.03,102.612,384,13.01,4.04,45.66\r\nvit_base_patch16_siglip_256,256,3734.93,548.26,2048,92.93,23.44,33.63\r\nhgnetv2_b6,224,3730.69,548.898,2048,75.26,16.88,21.23\r\nresnext101_64x4d,224,3727.59,549.366,2048,83.46,15.52,31.21\r\ntf_efficientnet_lite4,380,3720.43,103.189,384,13.01,4.04,45.66\r\nregnety_032,288,3709.7,552.01,2048,19.44,5.29,18.61\r\nregnetx_120,224,3705.87,552.57,2048,46.11,12.13,21.37\r\ninception_next_base,224,3705.54,552.629,2048,86.67,14.85,25.69\r\nhiera_base_224,224,3698.77,553.645,2048,51.52,9.4,30.42\r\nvit_large_r50_s32_224,224,3677.81,556.781,2048,328.99,19.58,24.41\r\nresnext101_32x4d,288,3641.61,562.333,2048,44.18,13.24,35.09\r\nxception41,299,3635.46,140.792,512,26.97,9.28,39.86\r\npoolformer_m36,224,3634.68,563.405,2048,56.17,8.8,22.02\r\nresnext101_32x8d,224,3628.15,564.404,2048,88.79,16.48,31.21\r\nseresnet101,288,3615.84,566.341,2048,49.33,12.95,26.87\r\nswinv2_tiny_window16_256,256,3591.1,570.256,2048,28.35,6.68,39.02\r\nconvnextv2_base,224,3585.16,571.189,2048,88.72,15.38,28.75\r\nvit_base_patch16_reg4_gap_256,256,3578.54,572.244,2048,86.62,23.5,33.89\r\ncrossvit_base_240,240,3563.06,574.732,2048,105.03,21.22,36.33\r\nresnet101d,320,3551.83,576.538,2048,44.57,16.48,34.77\r\necaresnet101d,288,3526.44,580.69,2048,44.57,13.35,28.19\r\npvt_v2_b4,224,3521.78,290.716,1024,62.56,10.14,53.74\r\nsequencer2d_m,224,3518.42,582.032,2048,38.31,6.55,14.26\r\ncoatnet_1_224,224,3509.33,109.38,384,42.23,8.7,39.0\r\nvit_small_r26_s32_384,384,3505.25,584.203,2048,36.47,10.43,29.85\r\nresnetrs101,288,3498.63,585.317,2048,63.62,13.56,28.53\r\nregnety_120,224,3493.12,586.22,2048,51.82,12.14,21.38\r\nefficientvit_b3,288,3465.89,221.538,768,48.65,6.58,44.2\r\nregnetz_d32,256,3449.03,593.742,2048,27.58,5.98,23.74\r\npvt_v2_b5,224,3446.78,594.115,2048,81.96,11.76,50.92\r\ncait_xxs36_224,224,3439.85,595.31,2048,17.3,3.77,30.34\r\nvit_base_patch16_plus_clip_240,240,3416.11,599.448,2048,117.21,27.41,33.08\r\nregnetz_c16,320,3415.31,599.603,2048,13.46,3.92,25.88\r\ndavit_base,224,3408.72,225.254,768,87.95,15.51,40.66\r\ntwins_svt_base,224,3403.19,601.729,2048,56.07,8.59,26.33\r\ncoat_tiny,224,3400.3,602.248,2048,5.5,4.35,27.2\r\nvit_base_patch16_rope_reg1_gap_256,256,3392.17,603.687,2048,86.43,23.22,33.39\r\nconvnext_small,288,3390.13,604.03,2048,50.22,14.39,35.65\r\nmobilenetv4_conv_aa_large,448,3362.38,228.355,768,32.59,9.63,43.94\r\neva02_base_patch14_224,224,3355.79,610.233,2048,85.76,23.22,36.55\r\nhrnet_w48,224,3352.71,610.78,2048,77.47,17.34,28.56\r\nhrnet_w48_ssld,224,3351.96,610.917,2048,77.47,17.34,28.56\r\nefficientnet_b3_gn,320,3342.41,114.854,384,11.73,2.14,28.83\r\nxcit_medium_24_p16_224,224,3341.93,612.749,2048,84.4,16.13,31.71\r\nvit_base_patch16_plus_240,240,3326.29,615.64,2048,117.56,27.41,33.08\r\ncaformer_m36,224,3306.7,619.297,2048,56.2,13.29,50.48\r\nfastvit_ma36,256,3303.18,464.94,1536,43.98,7.82,34.98\r\nseresnext101_64x4d,224,3292.77,621.915,2048,88.23,15.53,31.25\r\neca_nfnet_l1,320,3277.75,624.76,2048,41.41,14.92,34.42\r\nlambda_resnet50ts,256,3264.67,627.28,2048,21.54,5.07,17.48\r\nregnetz_c16_evos,320,3258.21,628.513,2048,13.49,3.86,25.88\r\nconvformer_m36,224,3244.56,631.156,2048,57.05,12.89,42.05\r\nfastvit_mci2,256,3237.81,474.33,1536,35.7,7.85,36.09\r\ntresnet_xl,224,3235.85,632.856,2048,78.44,15.2,15.34\r\nhgnet_base,224,3232.91,633.422,2048,71.58,25.14,15.47\r\nconvnext_tiny,384,3224.53,635.064,2048,28.59,13.14,39.48\r\nseresnext101_32x8d,224,3223.32,635.327,2048,93.57,16.48,31.25\r\nvit_base_r50_s16_224,224,3222.28,635.517,2048,97.89,21.66,35.28\r\nxception65p,299,3221.01,158.903,512,39.82,13.91,52.48\r\nresnet152,288,3215.32,636.896,2048,60.19,19.11,37.28\r\nswinv2_cr_small_ns_256,256,3212.09,637.545,2048,49.7,12.07,76.21\r\nmvitv2_base,224,3211.54,637.643,2048,51.47,10.16,40.5\r\nregnetx_160,224,3203.09,639.322,2048,54.28,15.99,25.52\r\nswinv2_small_window8_256,256,3200.4,639.871,2048,49.73,11.58,40.14\r\npoolformerv2_m48,224,3165.51,646.903,2048,73.35,11.59,29.17\r\nhrnet_w40,224,3159.83,648.069,2048,57.56,12.75,25.29\r\nxcit_nano_12_p8_384,384,3157.24,648.605,2048,3.05,6.34,46.08\r\nseresnext101d_32x8d,224,3157.1,648.636,2048,93.59,16.72,32.05\r\nefficientnet_b4,320,3151.93,162.4,512,19.34,3.13,34.76\r\nxcit_small_12_p16_384,384,3149.86,650.111,2048,26.25,14.14,36.51\r\nvit_small_patch8_224,224,3149.13,650.284,2048,21.67,22.44,80.84\r\nresnest101e,256,3148.83,650.332,2048,48.28,13.38,28.66\r\nseresnet152d,256,3146.0,650.932,2048,66.84,15.42,30.56\r\nresnetrs152,256,3139.99,652.17,2048,86.62,15.59,30.83\r\nmaxvit_tiny_pm_256,256,3137.92,122.333,384,30.09,6.61,47.9\r\nswinv2_base_window12_192,192,3133.68,653.485,2048,109.28,11.9,39.72\r\nmixnet_xxl,224,3094.47,330.865,1024,23.96,2.04,23.43\r\nfocalnet_base_srf,224,3094.18,661.821,2048,88.15,15.28,35.01\r\nregnety_160,224,3093.05,662.064,2048,83.59,15.96,23.04\r\nhrnet_w44,224,3079.96,664.881,2048,67.06,14.94,26.92\r\nvit_relpos_base_patch16_plus_240,240,3065.37,668.047,2048,117.38,27.3,34.33\r\nsamvit_base_patch16_224,224,3061.52,668.883,2048,86.46,17.54,24.54\r\nmaxvit_rmlp_small_rw_224,224,3057.98,167.384,512,64.9,10.75,49.3\r\nefficientnetv2_s,384,3047.66,671.934,2048,21.46,8.44,35.77\r\nswin_base_patch4_window7_224,224,3046.6,672.161,2048,87.77,15.47,36.63\r\nrdnet_base,224,3018.79,678.343,2048,87.45,15.4,31.14\r\nseresnext101_32x4d,288,3012.59,679.762,2048,48.96,13.25,35.12\r\nxcit_tiny_24_p8_224,224,3007.42,680.918,2048,12.11,9.21,45.39\r\nconvformer_s18,384,3003.86,511.288,1536,26.77,11.63,46.49\r\nnfnet_f1,224,3001.03,682.365,2048,132.63,17.87,22.94\r\ncaformer_s18,384,2998.74,512.168,1536,26.34,13.42,77.34\r\nvit_base_patch16_dinov3,256,2986.96,685.601,2048,85.64,23.6,34.06\r\nvit_base_patch16_dinov3_qkvb,256,2983.11,686.475,2048,85.66,23.6,34.06\r\nconvnextv2_tiny,384,2968.85,689.772,2048,28.64,13.14,39.48\r\nseresnextaa101d_32x8d,224,2966.55,690.3,2048,93.59,17.25,34.16\r\nconvnext_base,256,2941.45,696.188,2048,88.59,20.09,37.55\r\ndm_nfnet_f1,224,2938.12,696.979,2048,132.63,17.87,22.94\r\nmobilenetv4_conv_aa_large,480,2934.39,261.672,768,32.59,11.05,50.45\r\ntf_efficientnetv2_s,384,2923.2,700.526,2048,21.46,8.44,35.77\r\nvolo_d2_224,224,2920.82,701.105,2048,58.68,14.34,41.34\r\nvit_medium_patch16_gap_384,384,2913.31,702.915,2048,39.03,26.08,67.54\r\ncait_s24_224,224,2906.47,704.577,2048,46.92,9.35,40.58\r\nswinv2_cr_base_224,224,2896.47,707.006,2048,87.88,15.86,59.66\r\nmambaout_base_tall_rw,224,2894.84,707.399,2048,86.48,16.15,38.74\r\nresnet200d,256,2888.87,708.855,2048,64.69,20.0,43.09\r\nswinv2_cr_base_ns_224,224,2882.75,710.38,2048,87.88,15.86,59.66\r\nefficientnetv2_rw_s,384,2879.8,711.098,2048,23.94,8.72,38.03\r\ncs3se_edgenet_x,320,2873.32,712.69,2048,50.72,18.01,20.21\r\nfocalnet_base_lrf,224,2868.13,713.98,2048,88.75,15.43,38.13\r\nswin_s3_small_224,224,2858.71,716.333,2048,49.74,9.43,37.84\r\nregnetv_064,288,2841.68,720.633,2048,30.58,10.55,27.11\r\ndpn92,224,2841.13,720.784,2048,37.67,6.54,18.21\r\ntresnet_m,448,2841.12,360.371,1024,31.39,22.99,29.21\r\nresnet50x4_clip_gap,288,2835.75,722.141,2048,65.62,19.57,34.11\r\ndpn131,224,2833.93,722.61,2048,79.25,16.09,32.97\r\nconvit_base,224,2810.29,728.698,2048,86.54,17.52,31.77\r\nmambaout_base_short_rw,224,2803.31,730.502,2048,88.83,16.31,38.08\r\ncoat_lite_medium,224,2779.01,736.888,2048,44.57,9.81,40.06\r\ntnt_b_patch16_224,224,2764.4,740.801,2048,65.43,14.1,39.01\r\nhiera_base_plus_224,224,2757.43,742.656,2048,69.9,12.67,37.98\r\nregnety_064,288,2756.47,742.91,2048,30.58,10.56,27.11\r\ngcvit_small,224,2755.54,557.376,1536,51.09,8.57,41.61\r\nmobilenetv5_base,256,2750.19,186.125,512,82.65,20.05,36.89\r\nnf_regnet_b5,384,2750.03,744.647,2048,49.74,7.95,42.9\r\npoolformer_m48,224,2748.14,745.16,2048,73.47,11.59,29.17\r\ncoat_mini,224,2713.2,754.769,2048,10.34,6.82,33.68\r\nhiera_base_abswin_256,256,2699.74,758.531,2048,51.27,12.46,40.7\r\nresnet50x4_clip,288,2699.18,758.673,2048,87.14,21.35,35.27\r\nvitamin_base_224,224,2698.54,94.827,256,87.72,22.68,52.77\r\nregnetz_040,320,2693.61,760.273,2048,27.12,6.35,37.78\r\nregnetz_040_h,320,2680.59,763.959,2048,28.94,6.43,37.94\r\ntiny_vit_21m_384,384,2670.71,191.651,512,21.22,13.72,77.83\r\nhrnet_w64,224,2663.31,768.91,2048,128.06,28.97,35.09\r\nmobilenetv4_hybrid_large,448,2656.97,288.986,768,37.76,10.74,48.61\r\ncoatnet_2_rw_224,224,2652.32,96.48,256,73.87,15.09,49.22\r\nfastvit_mci3,256,2629.05,389.444,1024,125.07,14.82,44.88\r\nxception65,299,2627.85,194.784,512,39.92,13.96,52.48\r\nregnetz_d8,320,2608.54,785.057,2048,23.37,6.19,37.08\r\nmambaout_small,288,2606.7,785.596,2048,48.49,14.81,45.82\r\ncrossvit_15_dagger_408,408,2598.83,787.974,2048,28.5,21.45,95.05\r\ncoatnet_rmlp_2_rw_224,224,2591.65,98.742,256,73.88,15.18,54.78\r\nmvitv2_base_cls,224,2589.08,790.945,2048,65.44,10.23,40.65\r\nmaxxvit_rmlp_small_rw_256,256,2570.74,398.278,1024,66.01,14.67,58.38\r\nefficientnetv2_m,320,2566.57,797.884,2048,54.14,11.01,39.97\r\nefficientvit_l3,224,2541.67,201.398,512,246.04,27.62,39.16\r\nmambaout_small_rw,288,2540.75,805.995,2048,48.5,14.81,45.82\r\nnextvit_small,384,2540.43,806.095,2048,31.74,17.25,57.14\r\nnest_small,224,2538.68,806.665,2048,38.35,10.35,40.04\r\nseresnet152,288,2530.5,809.27,2048,66.82,19.11,37.34\r\nnest_small_jx,224,2522.97,811.678,2048,38.35,10.35,40.04\r\nresnet152d,320,2522.55,811.81,2048,60.21,24.08,47.67\r\nmobilevitv2_150,384,2514.4,101.777,256,10.59,9.2,54.25\r\nregnety_080,288,2499.59,819.266,2048,39.18,13.22,29.69\r\ndpn107,224,2477.94,826.421,2048,86.92,18.38,33.46\r\nmaxvit_small_tf_224,224,2473.2,155.219,384,68.93,11.66,53.17\r\nlevit_conv_512_s8,224,2470.04,829.061,2048,73.97,21.77,52.28\r\nefficientnet_b3_g8_gn,288,2465.72,830.53,2048,14.25,2.59,23.35\r\ndensenet264d,224,2443.4,628.587,1536,72.74,13.57,14.0\r\ncoatnet_2_224,224,2433.23,105.18,256,74.68,16.5,52.67\r\nxcit_small_12_p8_224,224,2427.02,843.77,2048,26.21,18.69,47.21\r\nlevit_512_s8,224,2417.11,847.233,2048,73.97,21.77,52.28\r\nmambaout_base_wide_rw,224,2411.4,849.238,2048,94.45,17.78,42.6\r\nmaxvit_rmlp_small_rw_256,256,2388.67,160.713,384,64.9,14.15,66.09\r\nmambaout_base,224,2333.15,877.712,2048,84.81,15.83,36.95\r\ncaformer_b36,224,2326.48,880.23,2048,98.75,23.22,67.3\r\nresnet200,288,2320.72,882.411,2048,64.67,24.91,53.21\r\ntwins_svt_large,224,2317.52,883.643,2048,99.27,15.15,35.1\r\neca_nfnet_l2,320,2313.33,885.243,2048,56.72,20.95,47.43\r\nefficientvit_l2,384,2312.73,165.991,384,63.71,20.45,57.01\r\nconvnext_base,288,2303.63,888.978,2048,88.59,25.43,47.53\r\nvit_so150m_patch16_reg4_gap_256,256,2301.61,889.741,2048,134.13,36.75,53.21\r\nmambaout_base_plus_rw,224,2294.28,892.596,2048,101.66,19.19,45.16\r\nmobilenetv4_conv_aa_large,544,2282.5,224.26,512,32.59,14.19,64.79\r\nvit_so150m_patch16_reg4_map_256,256,2278.55,898.741,2048,141.48,37.17,53.68\r\nconvformer_b36,224,2275.42,899.991,2048,99.88,22.69,56.06\r\nswin_s3_base_224,224,2266.75,903.418,2048,71.13,13.69,48.26\r\nsequencer2d_l,224,2265.03,904.131,2048,54.3,9.74,22.12\r\nseresnet200d,256,2260.01,906.132,2048,71.86,20.01,43.15\r\necaresnet200d,256,2259.63,906.274,2048,64.69,20.0,43.15\r\nefficientnetv2_rw_m,320,2246.55,911.555,2048,53.24,12.72,47.14\r\nresnetrs200,256,2243.75,912.696,2048,93.21,20.18,43.42\r\nregnetz_e8,256,2241.45,913.634,2048,57.7,9.91,40.94\r\nswinv2_base_window8_256,256,2238.95,914.638,2048,87.92,20.37,52.59\r\nhgnetv2_b6,288,2237.55,915.219,2048,75.26,27.9,35.09\r\nresnext101_64x4d,288,2213.7,925.086,2048,83.46,25.66,51.59\r\nvit_so150m2_patch16_reg1_gap_256,256,2208.19,927.396,2048,136.06,37.0,56.93\r\nefficientnet_b4,384,2199.22,174.572,384,19.34,4.51,50.04\r\nregnetz_d32,320,2189.19,935.448,2048,27.58,9.33,37.08\r\nconvnextv2_base,288,2157.28,949.282,2048,88.72,25.43,47.53\r\nswinv2_small_window16_256,256,2128.44,962.157,2048,49.73,12.82,66.29\r\nvolo_d3_224,224,2104.84,972.929,2048,86.33,20.78,60.09\r\nvit_pe_spatial_small_patch16_512,512,2098.11,976.056,2048,21.98,31.8,123.27\r\nmobilevitv2_175,384,2087.15,122.614,256,14.25,12.47,63.29\r\nconvnext_large,224,2081.38,983.895,2048,197.77,34.4,43.13\r\nmaxxvitv2_rmlp_base_rw_224,224,2079.19,492.452,1024,116.09,24.2,62.77\r\ncoat_small,224,2056.48,995.809,2048,21.69,12.61,44.25\r\nresnetv2_50x1_bit,448,2043.74,1002.031,2048,25.55,16.62,44.46\r\nxception71,299,2025.56,252.708,512,42.34,18.09,69.92\r\ntf_efficientnet_b4,380,2014.73,190.563,384,19.34,4.49,49.49\r\nvit_betwixt_patch16_reg4_gap_384,384,2014.21,1016.705,2048,60.6,39.71,85.28\r\ngcvit_base,224,2004.78,510.725,1024,90.32,14.87,55.48\r\nresnetrs152,320,1999.17,1024.375,2048,86.62,24.34,48.14\r\nhrnet_w48_ssld,288,1998.29,1024.81,2048,77.47,28.66,47.21\r\nseresnet152d,320,1990.21,1028.974,2048,66.84,24.09,47.72\r\nvit_large_patch32_384,384,1989.51,1029.331,2048,306.63,45.31,43.86\r\nregnety_120,288,1977.52,1035.571,2048,51.82,20.06,35.34\r\nconvmixer_768_32,224,1967.19,1041.014,2048,21.11,19.55,25.95\r\nefficientvit_l3,256,1958.29,196.044,384,246.04,36.06,50.98\r\nconvnextv2_large,224,1957.32,1046.258,2048,197.96,34.4,43.13\r\nresnext101_32x16d,224,1956.69,1046.608,2048,194.03,36.27,51.18\r\nnextvit_base,384,1950.67,1049.826,2048,44.79,24.62,73.95\r\nxcit_tiny_12_p8_384,384,1926.85,1062.81,2048,6.71,14.13,69.14\r\nseresnext101_32x8d,288,1924.41,1064.168,2048,93.57,27.24,51.63\r\nhgnet_base,288,1923.58,532.28,1024,71.58,41.55,25.57\r\nefficientnet_b3_g8_gn,320,1915.71,1068.989,2048,14.25,3.2,28.83\r\nconvnext_small,384,1897.96,1078.988,2048,50.22,25.58,63.37\r\nseresnext101d_32x8d,288,1888.75,1084.243,2048,93.59,27.64,52.95\r\nresnetv2_50x3_bit,224,1885.68,1086.016,2048,217.32,37.06,33.34\r\ndavit_large,224,1880.34,408.38,768,196.81,34.6,60.99\r\nnf_regnet_b5,456,1867.53,1096.566,2048,49.74,11.7,61.95\r\nconvnext_base,320,1858.51,1101.891,2048,88.59,31.39,58.68\r\nmobilevitv2_200,384,1846.19,103.964,192,18.45,16.24,72.34\r\nresnet200d,320,1839.59,1113.219,2048,64.69,31.25,67.33\r\nxcit_large_24_p16_224,224,1832.66,1117.427,2048,189.1,35.86,47.27\r\nnest_base,224,1822.52,1123.652,2048,67.72,17.96,53.39\r\nnest_base_jx,224,1811.56,1130.458,2048,67.72,17.96,53.39\r\nregnety_320,224,1810.56,1131.078,2048,145.05,32.34,30.26\r\nswinv2_large_window12_192,192,1804.43,1134.921,2048,228.77,26.17,56.53\r\ncrossvit_18_dagger_408,408,1796.94,1139.648,2048,44.61,32.47,124.87\r\nregnety_160,288,1795.43,1140.605,2048,83.59,26.37,38.07\r\nfastvit_mci4,256,1785.3,430.123,768,321.57,27.78,60.59\r\nswinv2_cr_tiny_384,384,1780.95,862.4,1536,28.33,15.34,161.01\r\necaresnet200d,288,1776.94,1152.468,2048,64.69,25.31,54.59\r\nseresnet200d,288,1774.5,1154.057,2048,71.86,25.32,54.6\r\nseresnextaa101d_32x8d,288,1771.8,1155.811,2048,93.59,28.51,56.44\r\nvit_mediumd_patch16_reg4_gap_384,384,1770.54,1156.637,2048,64.27,43.67,113.51\r\nseresnet269d,256,1763.76,1161.091,2048,113.67,26.59,53.6\r\nmixer_l16_224,224,1754.14,1167.453,2048,208.2,44.6,41.69\r\nregnetz_d8_evos,320,1733.47,1181.387,2048,23.46,7.03,38.92\r\ntf_efficientnetv2_m,384,1733.07,1181.652,2048,54.14,15.85,57.52\r\nrdnet_large,224,1715.21,895.45,1536,186.27,34.74,46.67\r\nvolo_d1_384,384,1710.19,1197.455,2048,26.78,22.75,108.55\r\nresnetrs270,256,1709.39,1198.035,2048,129.86,27.06,55.84\r\nmambaout_base_short_rw,288,1691.38,1210.781,2048,88.83,26.96,62.94\r\nswin_large_patch4_window7_224,224,1688.73,1212.664,2048,196.53,34.53,54.94\r\nxcit_small_24_p16_384,384,1685.35,1215.11,2048,47.67,26.72,68.58\r\nmaxvit_rmlp_base_rw_224,224,1664.59,307.539,512,116.14,23.15,92.64\r\nnaflexvit_base_patch16_siglip,384,1655.72,1236.857,2048,92.93,56.12,102.2\r\nvit_base_patch16_18x2_224,224,1651.82,1239.788,2048,256.73,52.51,71.38\r\nsenet154,224,1631.27,1255.421,2048,115.09,20.77,38.69\r\nlegacy_senet154,224,1628.22,1257.754,2048,115.09,20.77,38.69\r\nvit_small_patch14_dinov2,518,1626.26,1259.264,2048,22.06,46.76,198.79\r\nnfnet_f2,256,1620.72,1263.564,2048,193.78,33.76,41.85\r\nvit_base_patch16_clip_384,384,1615.72,1267.488,2048,86.86,55.54,101.56\r\nmambaout_base_tall_rw,288,1611.37,1270.906,2048,86.48,26.69,64.04\r\nvit_small_patch14_reg4_dinov2,518,1609.07,1272.717,2048,22.06,46.95,199.77\r\nnaflexvit_base_patch16_gap,384,1608.29,1273.339,2048,86.63,55.86,102.34\r\nnaflexvit_base_patch16_parfac_gap,384,1606.39,1274.845,2048,86.46,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,1605.16,1275.817,2048,86.63,55.86,102.34\r\nregnetx_320,224,1601.93,1278.394,2048,107.81,31.81,36.3\r\nswinv2_cr_large_224,224,1600.71,1279.363,2048,196.68,35.1,78.42\r\neca_nfnet_l2,384,1597.35,1282.057,2048,56.72,30.05,68.28\r\nnaflexvit_base_patch16_map,384,1586.95,1290.455,2048,93.72,56.23,102.46\r\nconvformer_s36,384,1585.51,968.71,1536,40.01,22.54,89.62\r\nconvnext_large_mlp,256,1584.08,1292.795,2048,200.13,44.94,56.33\r\nnextvit_large,384,1581.89,1294.58,2048,57.83,32.0,90.76\r\ndm_nfnet_f2,256,1581.77,1294.686,2048,193.78,33.76,41.85\r\ncaformer_s36,384,1581.08,971.426,1536,39.3,26.08,150.33\r\nvit_base_patch16_384,384,1574.64,1300.548,2048,86.86,55.54,101.56\r\ndeit3_base_patch16_384,384,1574.12,1300.981,2048,86.88,55.54,101.56\r\ndeit_base_patch16_384,384,1572.35,1302.444,2048,86.86,55.54,101.56\r\nvit_base_patch16_siglip_gap_384,384,1569.51,1304.792,2048,86.09,55.43,101.3\r\ndeit_base_distilled_patch16_384,384,1569.45,1304.842,2048,87.63,55.65,101.82\r\nvit_base_patch16_siglip_384,384,1557.62,1314.757,2048,93.18,56.12,102.2\r\nvit_large_patch16_224,224,1553.39,1318.334,2048,304.33,61.6,63.52\r\ndeit3_large_patch16_224,224,1552.72,1318.912,2048,304.37,61.6,63.52\r\neva_large_patch14_196,196,1550.45,1320.836,2048,304.14,61.57,63.52\r\nrepvgg_d2se,320,1540.82,1329.09,2048,120.39,66.99,23.42\r\ncoatnet_rmlp_3_rw_224,224,1511.88,126.957,192,165.15,33.56,79.47\r\nefficientnetv2_m,416,1510.11,1356.121,2048,54.14,18.6,67.5\r\ncoatnet_3_rw_224,224,1508.44,127.246,192,181.81,33.44,73.83\r\nswinv2_base_window12to16_192to256,256,1508.44,1357.621,2048,87.92,22.02,84.71\r\nswinv2_base_window16_256,256,1507.9,1358.101,2048,87.92,22.02,84.71\r\nbeit_large_patch16_224,224,1469.02,1394.051,2048,304.43,61.6,63.52\r\nbeit3_large_patch16_224,224,1468.72,1394.349,2048,304.57,61.72,63.52\r\nnfnet_f1,320,1468.38,1394.66,2048,132.63,35.97,46.77\r\neca_nfnet_l3,352,1468.34,1394.706,2048,72.04,32.57,73.12\r\nvit_large_patch16_rope_ape_224,224,1468.18,1394.864,2048,304.37,61.6,63.52\r\nvit_large_patch16_rope_224,224,1466.17,1396.773,2048,304.17,61.6,63.52\r\nbeitv2_large_patch16_224,224,1465.07,1397.827,2048,304.43,61.6,63.52\r\nmaxvit_base_tf_224,224,1454.14,264.035,384,119.47,24.04,95.01\r\nmambaout_base_wide_rw,288,1452.81,1409.618,2048,94.45,29.39,70.41\r\nresnetrs200,320,1431.76,1430.346,2048,93.21,31.51,67.81\r\ndm_nfnet_f1,320,1427.34,1434.769,2048,132.63,35.97,46.77\r\nregnetz_e8,320,1424.12,1438.011,2048,57.7,15.46,63.94\r\nseresnextaa101d_32x8d,320,1421.52,1440.637,2048,93.59,35.19,69.67\r\nresnetv2_152x2_bit,224,1421.29,1440.88,2048,236.34,46.95,45.11\r\ncoatnet_3_224,224,1409.48,136.184,192,166.97,36.56,79.01\r\nmambaout_base,288,1407.89,1454.587,2048,84.81,26.16,61.08\r\nseresnet269d,288,1388.21,1475.206,2048,113.67,33.65,67.81\r\nmambaout_base_plus_rw,288,1381.12,1482.789,2048,101.66,31.72,74.64\r\nbeit_base_patch16_384,384,1369.52,1495.355,2048,86.74,55.54,101.56\r\nflexivit_large,240,1365.9,1499.305,2048,304.36,70.99,75.39\r\ntiny_vit_21m_512,512,1352.4,189.238,256,21.26,26.93,177.93\r\nconvnext_xlarge,224,1343.26,1524.576,2048,350.2,60.98,57.5\r\nefficientnet_b5,416,1338.31,191.243,256,30.39,8.27,80.68\r\nvit_large_patch16_rope_mixed_ape_224,224,1331.71,1537.807,2048,304.4,61.6,68.34\r\nvit_large_patch16_rope_mixed_224,224,1325.54,1544.965,2048,304.2,61.6,68.34\r\nefficientnetv2_rw_m,416,1320.86,1550.435,2048,53.24,21.49,79.62\r\nconvnext_base,384,1299.41,1182.0,1536,88.59,45.21,84.49\r\nmaxxvitv2_rmlp_large_rw_224,224,1286.54,795.876,1024,215.42,44.14,87.15\r\nresnetv2_101x1_bit,448,1275.85,1605.14,2048,44.54,31.65,64.93\r\nxcit_small_24_p8_224,224,1268.9,1613.922,2048,47.63,35.81,90.78\r\nnasnetalarge,331,1263.16,405.283,512,88.75,23.89,90.56\r\nconvnext_large,288,1247.94,1641.034,2048,197.77,56.87,71.29\r\nefficientvit_l3,320,1247.58,205.152,256,246.04,56.32,79.34\r\npnasnet5large,331,1241.67,618.449,768,86.06,25.04,92.89\r\nmaxvit_tiny_tf_384,384,1237.99,155.046,192,30.98,17.53,123.42\r\ndavit_huge,224,1235.72,621.443,768,348.92,61.23,81.32\r\nvit_large_r50_s32_384,384,1222.0,1675.865,2048,329.09,57.43,76.52\r\nvolo_d4_224,224,1220.86,1677.436,2048,192.96,44.34,80.22\r\nconvnextv2_base,384,1217.54,1261.495,1536,88.72,45.21,84.49\r\ninception_next_base,384,1210.89,1691.255,2048,86.67,43.64,75.48\r\nvit_large_patch16_siglip_gap_256,256,1194.12,1715.011,2048,303.36,80.8,88.34\r\nvit_large_patch16_siglip_256,256,1194.02,1715.154,2048,315.96,81.34,88.88\r\nvit_large_patch14_clip_224,224,1189.11,1722.242,2048,304.2,81.08,88.79\r\nvit_large_patch14_clip_quickgelu_224,224,1188.79,1722.692,2048,303.97,81.08,88.79\r\ntresnet_l,448,1180.19,867.6,1024,55.99,43.59,47.56\r\nconvnextv2_large,288,1178.08,1738.351,2048,197.96,56.87,71.29\r\nvit_large_patch14_xp_224,224,1174.78,1743.24,2048,304.06,81.01,88.79\r\nxcit_medium_24_p16_384,384,1154.9,1773.241,2048,84.4,47.39,91.64\r\nvit_large_patch14_224,224,1154.38,1774.049,2048,304.2,81.08,88.79\r\naimv2_large_patch14_224,224,1153.43,1775.505,2048,309.2,82.3,85.2\r\nefficientnet_b5,448,1150.02,222.559,256,30.39,9.59,93.56\r\nefficientnet_x_b5,448,1143.87,671.353,768,33.44,23.35,68.87\r\nvit_base_patch8_224,224,1138.02,1799.543,2048,86.58,78.22,161.69\r\nresnest200e,320,1129.48,1813.149,2048,70.2,35.69,82.78\r\necaresnet269d,320,1123.53,1822.752,2048,102.09,41.53,83.69\r\nconvformer_m36,384,1113.51,919.544,1024,57.05,37.87,123.56\r\ntf_efficientnetv2_m,480,1106.63,1387.939,1536,54.14,24.76,89.84\r\ncaformer_m36,384,1104.71,926.867,1024,56.2,42.11,196.35\r\nhiera_large_224,224,1102.84,1856.946,2048,213.74,40.34,83.37\r\nconvmixer_1024_20_ks9_p14,224,1098.23,1864.751,2048,24.38,5.55,5.51\r\nresnetv2_101x3_bit,224,1094.04,1871.898,2048,387.93,71.23,48.7\r\nregnety_640,224,1092.32,1874.849,2048,281.38,64.16,42.5\r\nresnetrs350,288,1066.35,1920.516,2048,163.96,43.67,87.09\r\nvitamin_large_224,224,1060.27,241.392,256,333.32,75.05,112.83\r\nvitamin_large2_224,224,1057.42,242.032,256,333.58,75.05,112.83\r\nswinv2_cr_small_384,384,1054.27,1456.864,1536,49.7,29.7,298.03\r\nefficientnet_h_b5,448,1034.2,494.999,512,45.88,27.16,73.9\r\nvit_base_r50_s16_384,384,1032.95,1982.6,2048,98.95,67.43,135.03\r\nmaxvit_large_tf_224,224,1020.78,250.74,256,211.79,43.68,127.35\r\nregnety_160,384,1018.4,2010.924,2048,83.59,46.87,67.67\r\nxcit_tiny_24_p8_384,384,1010.42,2026.813,2048,12.11,27.05,132.95\r\nconvnext_large_mlp,320,1009.95,1520.801,1536,200.13,70.21,88.02\r\ntf_efficientnet_b5,456,1009.26,253.614,256,30.39,10.46,98.86\r\nvit_so150m_patch16_reg4_gap_384,384,1006.77,2034.163,2048,134.42,87.97,165.47\r\nefficientnetv2_l,384,1006.18,2035.368,2048,118.52,36.1,101.16\r\ntf_efficientnetv2_l,384,987.82,2073.174,2048,118.52,36.1,101.16\r\nresnet50x16_clip_gap,384,976.15,2097.962,2048,136.2,70.32,100.64\r\nvit_large_patch16_dinov3_qkvb,256,974.24,2102.086,2048,303.13,82.43,90.56\r\nvit_large_patch16_dinov3,256,972.75,2105.301,2048,303.08,82.43,90.56\r\neva02_large_patch14_224,224,971.12,2108.842,2048,303.27,81.15,97.2\r\neva02_large_patch14_clip_224,224,968.24,2115.107,2048,304.11,81.18,97.2\r\nnaflexvit_so150m2_patch16_reg1_gap,384,967.83,2116.002,2048,136.06,89.53,178.22\r\nvolo_d2_384,384,963.94,2124.557,2048,58.87,46.17,184.51\r\nnaflexvit_so150m2_patch16_reg1_map,384,954.08,2146.505,2048,142.46,90.33,179.2\r\nvit_so150m2_patch16_reg1_gap_384,384,949.76,2156.26,2048,136.33,89.53,178.22\r\nresnet50x16_clip,384,939.62,2179.519,2048,167.33,74.9,103.54\r\nmvitv2_large,224,932.98,2195.043,2048,217.99,43.87,112.02\r\necaresnet269d,352,923.36,2217.922,2048,102.09,50.25,101.25\r\nresnetrs270,352,895.34,2287.329,2048,129.86,51.13,105.48\r\neca_nfnet_l3,448,893.44,2292.201,2048,72.04,52.55,118.4\r\nswinv2_large_window12to16_192to256,256,882.7,2320.084,2048,196.74,47.81,121.53\r\ncoat_lite_medium_384,384,874.56,1756.24,1536,44.57,28.73,116.7\r\nefficientvit_l3,384,872.58,219.998,192,246.04,81.08,114.02\r\ncoatnet_4_224,224,871.7,220.215,192,275.43,62.48,129.26\r\nxcit_medium_24_p8_224,224,865.37,2366.546,2048,84.32,63.53,121.23\r\nvit_so400m_patch14_siglip_gap_224,224,843.93,2426.679,2048,412.44,109.57,106.13\r\nvit_so400m_patch14_siglip_224,224,843.85,2426.898,2048,427.68,110.26,106.73\r\nnfnet_f2,352,842.85,2429.789,2048,193.78,63.22,79.06\r\nvit_so400m_patch16_siglip_gap_256,256,842.7,2430.211,2048,412.65,109.62,106.13\r\nvit_so400m_patch16_siglip_256,256,838.97,2441.011,2048,427.89,110.31,106.73\r\nvit_base_patch16_siglip_gap_512,512,835.3,2451.743,2048,86.43,107.0,246.15\r\nvit_base_patch16_siglip_512,512,828.66,2471.4,2048,93.52,108.22,247.74\r\ncoatnet_rmlp_2_rw_384,384,822.79,116.629,96,73.88,47.69,209.43\r\ndm_nfnet_f2,352,821.28,2493.59,2048,193.78,63.22,79.06\r\nxcit_small_12_p8_384,384,819.16,2500.044,2048,26.21,54.92,138.29\r\nvitamin_large_256,256,816.63,235.053,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,814.64,235.639,192,333.64,99.0,154.99\r\nconvnext_xlarge,288,810.53,1894.99,1536,350.2,100.8,95.05\r\ncait_xxs24_384,384,809.02,2531.395,2048,12.03,9.63,122.66\r\nmvitv2_large_cls,224,798.28,2565.462,2048,234.58,42.17,111.69\r\nmaxvit_small_tf_384,384,792.07,161.566,128,69.02,35.87,183.65\r\nmambaout_base_plus_rw,384,785.58,1303.418,1024,101.66,56.39,132.7\r\nvit_pe_spatial_base_patch16_512,512,784.18,2611.569,2048,86.43,107.13,246.54\r\nresmlp_big_24_224,224,782.9,2615.852,2048,129.14,100.23,87.31\r\nnfnet_f3,320,782.35,2617.686,2048,254.92,68.77,83.93\r\ntresnet_xl,448,781.7,982.407,768,78.44,60.77,61.31\r\nconvformer_b36,384,780.57,983.839,768,99.88,66.67,164.75\r\nvolo_d5_224,224,780.53,2623.777,2048,295.46,72.4,118.11\r\ncaformer_b36,384,776.73,988.696,768,98.75,72.33,261.79\r\nswin_base_patch4_window12_384,384,770.73,2657.153,2048,87.9,47.19,134.78\r\ndm_nfnet_f3,320,763.21,2683.342,2048,254.92,68.77,83.93\r\neva02_base_patch14_448,448,761.61,2688.976,2048,87.12,107.11,259.14\r\nconvnextv2_huge,224,748.68,2051.555,1536,660.29,115.0,79.07\r\nswinv2_cr_base_384,384,742.5,2068.618,1536,87.88,50.57,333.68\r\nresnetrs420,320,728.37,2811.714,2048,191.89,64.2,126.56\r\nefficientnetv2_xl,384,713.05,2872.092,2048,208.12,52.81,139.2\r\nseresnextaa201d_32x8d,320,711.62,2877.885,2048,149.39,70.22,138.71\r\nconvnext_large,384,707.42,1447.448,1024,197.77,101.1,126.74\r\nconvnext_large_mlp,384,707.02,1448.273,1024,200.13,101.11,126.74\r\ntf_efficientnetv2_xl,384,701.79,2918.159,2048,208.12,52.81,139.2\r\nefficientnet_x_b5,576,693.27,738.467,512,33.44,38.59,113.83\r\nresnext101_32x32d,224,688.79,2973.264,2048,468.53,87.29,91.12\r\nmaxvit_tiny_tf_512,512,685.68,139.961,96,31.05,33.49,257.59\r\nconvnextv2_large,384,669.38,1529.71,1024,197.96,101.1,126.74\r\nvit_so150m2_patch16_reg1_gap_448,448,661.4,3096.399,2048,136.5,127.51,287.05\r\nmaxxvitv2_rmlp_base_rw_384,384,653.61,587.442,384,116.09,72.98,213.74\r\nefficientnetv2_l,480,643.08,2388.451,1536,118.52,56.4,157.99\r\nvitamin_xlarge_256,256,641.17,199.585,128,436.06,130.13,177.37\r\ntf_efficientnetv2_l,480,633.41,1616.567,1024,118.52,56.4,157.99\r\nxcit_large_24_p16_384,384,632.14,3239.703,2048,189.1,105.35,137.17\r\nefficientnet_b6,528,626.37,204.307,128,43.04,19.4,167.39\r\nefficientnet_h_b5,576,623.52,615.792,384,45.88,44.9,122.13\r\nswinv2_cr_huge_224,224,620.18,3302.177,2048,657.83,115.97,121.08\r\nfocalnet_huge_fl3,224,610.04,2517.784,1536,745.28,118.26,104.8\r\ncait_xs24_384,384,608.94,3363.17,2048,26.67,19.28,183.98\r\nregnety_320,384,601.84,2552.1,1536,145.05,95.0,88.87\r\nvit_huge_patch14_clip_quickgelu_224,224,598.67,3420.813,2048,632.08,167.4,139.41\r\nvit_huge_patch14_clip_224,224,597.71,3426.322,2048,632.05,167.4,139.41\r\nresnetrs350,384,596.94,3430.767,2048,163.96,77.59,154.74\r\nvit_base_patch14_dinov2,518,596.24,3434.811,2048,86.58,151.71,397.58\r\nvit_huge_patch14_gap_224,224,595.16,3441.041,2048,630.76,166.73,138.74\r\nvit_huge_patch14_xp_224,224,594.45,3445.124,2048,631.8,167.3,139.41\r\nrdnet_large,384,593.06,863.234,512,186.27,102.09,137.13\r\nvit_base_patch14_reg4_dinov2,518,592.31,3457.564,2048,86.58,152.25,399.53\r\ndeit3_huge_patch14_224,224,587.06,3488.52,2048,632.13,167.4,139.41\r\nvit_huge_patch14_224,224,583.89,3507.445,2048,630.76,167.4,139.41\r\nmaxvit_xlarge_tf_224,224,582.53,329.55,192,506.99,97.52,191.04\r\ntf_efficientnet_b6,528,582.32,219.771,128,43.04,19.4,167.39\r\naimv2_huge_patch14_224,224,578.93,3537.502,2048,680.85,179.01,126.22\r\nsam2_hiera_tiny,896,563.05,113.621,64,26.85,99.86,384.63\r\nregnety_1280,224,544.77,3759.342,2048,644.81,127.66,71.58\r\ncait_xxs36_384,384,539.01,3799.513,2048,17.37,14.35,183.7\r\nmaxvit_rmlp_base_rw_384,384,520.78,368.635,192,116.14,70.97,318.95\r\nhiera_huge_224,224,518.29,3951.421,2048,672.78,124.85,150.95\r\nvit_large_patch14_clip_quickgelu_336,336,516.18,3967.518,2048,304.29,191.11,270.24\r\nvit_large_patch14_clip_336,336,515.15,3975.472,2048,304.53,191.11,270.24\r\neva_large_patch14_336,336,501.11,4086.825,2048,304.53,191.1,270.24\r\nvit_large_patch16_384,384,500.82,4089.194,2048,304.72,191.21,270.24\r\ndeit3_large_patch16_384,384,500.44,4092.314,2048,304.76,191.21,270.24\r\nvit_large_patch16_siglip_gap_384,384,500.38,4092.811,2048,303.69,190.85,269.55\r\nvit_large_patch16_siglip_384,384,498.84,4105.472,2048,316.28,192.07,270.75\r\ncait_s24_384,384,497.86,4113.509,2048,47.06,32.17,245.31\r\nvit_giant_patch16_gap_224,224,492.87,4155.195,2048,1011.37,202.46,139.26\r\nvolo_d3_448,448,490.21,4177.764,2048,86.63,96.33,446.83\r\naimv2_large_patch14_336,336,488.43,4192.943,2048,309.53,194.22,227.08\r\nseresnextaa201d_32x8d,384,488.29,4194.157,2048,149.39,101.11,199.72\r\nresnest269e,416,486.86,4206.483,2048,110.93,77.69,171.98\r\nvit_pe_core_large_patch14_336,336,483.38,4236.797,2048,317.15,192.33,271.43\r\ncoatnet_5_224,224,480.28,266.466,128,687.47,145.49,194.24\r\nresnetv2_152x4_bit,224,479.82,4268.219,2048,936.53,186.9,90.22\r\nresnetv2_152x2_bit,384,473.45,4325.596,2048,236.34,136.16,132.56\r\nxcit_large_24_p8_224,224,472.71,4332.373,2048,188.93,141.23,181.56\r\ndavit_giant,224,471.52,1628.693,768,1406.47,192.92,153.06\r\nsam2_hiera_small,896,469.01,136.417,64,33.95,123.99,442.63\r\nconvnext_xxlarge,256,467.39,2190.831,1024,846.47,198.09,124.45\r\nresnetv2_50x3_bit,448,463.32,2210.079,1024,217.32,145.7,133.37\r\nconvnext_xlarge,384,460.52,1667.619,768,350.2,179.2,168.99\r\nmaxvit_base_tf_384,384,458.07,279.391,128,119.65,73.8,332.9\r\nvitamin_large_336,336,455.91,210.51,96,333.57,175.72,307.47\r\nvitamin_large2_336,336,453.83,211.481,96,333.83,175.72,307.47\r\nconvnextv2_huge,288,449.45,2278.256,1024,660.29,190.1,130.7\r\nbeit_large_patch16_384,384,444.15,4610.994,2048,305.0,191.21,270.24\r\nswin_large_patch4_window12_384,384,440.96,4644.307,2048,196.74,104.08,202.16\r\nmaxvit_small_tf_512,512,438.94,145.758,64,69.13,67.26,383.77\r\nnfnet_f3,416,436.1,4696.096,2048,254.92,115.58,141.78\r\neva02_large_patch14_clip_336,336,433.24,4727.147,2048,304.43,191.34,289.13\r\nswinv2_cr_large_384,384,428.52,2389.559,1024,196.68,108.96,404.96\r\nxcit_small_24_p8_384,384,428.28,4781.842,2048,47.63,105.24,265.91\r\nresnetrs420,416,427.03,4795.839,2048,191.89,108.45,213.79\r\nswinv2_base_window12to24_192to384,384,420.69,1825.502,768,87.92,55.25,280.36\r\nnfnet_f4,384,419.63,4880.461,2048,316.07,122.14,147.57\r\ndm_nfnet_f3,416,419.13,4886.264,2048,254.92,115.58,141.78\r\ndm_nfnet_f4,384,409.77,4997.839,2048,316.07,122.14,147.57\r\nvit_huge_plus_patch16_dinov3,256,402.56,5087.395,2048,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,402.37,5089.724,2048,840.59,224.88,193.59\r\nefficientnetv2_xl,512,401.62,2549.578,1024,208.12,93.85,247.32\r\ntf_efficientnetv2_xl,512,395.76,2587.357,1024,208.12,93.85,247.32\r\nvit_giant_patch14_clip_224,224,383.99,5333.456,2048,1012.65,267.18,192.64\r\nmvitv2_huge_cls,224,381.37,4027.556,1536,694.8,120.67,243.63\r\neva_giant_patch14_224,224,377.92,5419.013,2048,1012.56,267.18,192.64\r\nvit_giant_patch14_224,224,377.77,5421.19,2048,1012.61,267.18,192.64\r\neva_giant_patch14_clip_224,224,375.98,5447.091,2048,1012.59,267.18,192.64\r\nnaflexvit_so400m_patch16_siglip,384,369.64,5540.407,2048,427.89,259.65,319.77\r\nefficientnet_b7,600,365.07,262.916,96,66.35,38.33,289.94\r\nvitamin_xlarge_336,336,360.87,265.965,96,436.06,230.18,347.33\r\nbeit3_giant_patch14_224,224,358.85,5707.101,2048,1013.22,267.56,192.64\r\nvit_so400m_patch16_siglip_gap_384,384,358.82,5707.515,2048,413.02,258.11,318.42\r\naimv2_1b_patch14_224,224,356.77,5740.341,2048,1234.96,322.43,170.39\r\nvit_so400m_patch16_siglip_384,384,356.57,5743.616,2048,428.26,259.65,319.77\r\nvit_giantopt_patch16_siglip_gap_256,256,354.83,5771.78,2048,1134.84,298.42,199.62\r\nvit_giantopt_patch16_siglip_256,256,353.32,5796.306,2048,1163.17,299.66,200.43\r\nregnety_640,384,351.85,2910.287,1024,281.38,188.47,124.83\r\nresnetv2_152x2_bit,448,349.4,2930.689,1024,236.34,184.99,180.43\r\nvitamin_large_384,384,346.77,184.511,64,333.71,234.44,440.16\r\nvitamin_large2_384,384,345.4,185.237,64,333.97,234.44,440.16\r\ntf_efficientnet_b7,600,344.77,278.412,96,66.35,38.33,289.94\r\nresnet50x64_clip_gap,448,338.72,6046.154,2048,365.03,253.96,233.22\r\nfocalnet_huge_fl4,224,337.63,4549.258,1536,686.46,118.9,113.34\r\ncait_s36_384,384,330.92,6188.696,2048,68.37,47.99,367.4\r\nresnet50x64_clip,448,328.24,6239.183,2048,420.38,265.02,239.13\r\nmaxvit_large_tf_384,384,323.14,297.037,96,212.03,132.55,445.84\r\nnfnet_f5,416,297.64,6880.785,2048,377.21,170.71,204.56\r\nxcit_medium_24_p8_384,384,291.74,5264.895,1536,84.32,186.67,354.73\r\ndm_nfnet_f5,416,286.04,7159.746,2048,377.21,170.71,204.56\r\ndavit_base_fl,768,285.29,672.949,192,90.37,190.32,530.15\r\nvolo_d4_448,448,284.81,7190.736,2048,193.41,197.13,527.35\r\nvit_so400m_patch14_siglip_gap_378,378,276.52,7406.331,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,276.25,7413.607,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_378,378,275.26,7440.287,2048,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,275.04,7446.241,2048,428.23,335.4,452.89\r\nvitamin_xlarge_384,384,274.12,233.427,64,436.06,306.38,493.46\r\nvit_large_patch16_siglip_gap_512,512,269.13,7609.645,2048,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,267.86,7645.797,2048,316.74,364.0,657.48\r\nresnetv2_101x3_bit,448,266.3,3845.148,1024,387.93,280.33,194.78\r\nvit_intern300m_patch14_448,448,263.56,7770.461,2048,304.01,362.05,656.39\r\naimv2_large_patch14_448,448,260.86,7850.883,2048,309.98,367.84,491.78\r\nvit_pe_lang_large_patch14_448,448,260.5,7861.754,2048,291.42,346.99,629.09\r\nswinv2_large_window12to24_192to384,384,260.06,1968.699,512,196.74,116.15,407.83\r\nconvnextv2_huge,384,256.6,1995.27,512,660.29,337.96,232.35\r\nvit_huge_patch14_clip_336,336,256.52,7983.717,2048,632.46,390.97,407.54\r\nvit_pe_spatial_large_patch14_448,448,256.44,7986.064,2048,303.96,362.05,656.39\r\nmaxvit_base_tf_512,512,253.84,252.088,64,119.88,138.02,703.99\r\naimv2_huge_patch14_336,336,251.84,8132.024,2048,681.34,416.36,337.08\r\nsam2_hiera_base_plus,896,242.76,263.578,64,68.68,227.48,828.88\r\neva02_large_patch14_448,448,230.26,8894.042,2048,305.08,362.33,689.95\r\nefficientnet_b8,672,229.2,418.804,96,87.41,63.48,442.89\r\nnfnet_f4,512,223.78,9151.978,2048,316.07,216.26,262.26\r\nbeit_large_patch16_512,512,223.22,9174.863,2048,305.67,362.24,656.39\r\nvit_gigantic_patch14_clip_quickgelu_224,224,219.7,9321.819,2048,1844.91,483.96,275.37\r\nvit_gigantic_patch14_clip_224,224,219.46,9332.008,2048,1844.91,483.96,275.37\r\ndm_nfnet_f4,512,218.83,9358.641,2048,316.07,216.26,262.26\r\ntf_efficientnet_b8,672,218.61,439.098,96,87.41,63.48,442.89\r\nfocalnet_large_fl3,384,216.7,4725.456,1024,239.13,105.06,168.04\r\nfocalnet_large_fl4,384,216.08,4738.832,1024,239.32,105.2,181.78\r\nvit_gigantic_patch14_224,224,215.65,9496.879,2048,1844.44,483.95,275.37\r\nnfnet_f6,448,215.6,9499.156,2048,438.36,229.7,273.62\r\ndm_nfnet_f6,448,207.46,9871.54,2048,438.36,229.7,273.62\r\nvit_huge_patch14_clip_quickgelu_378,378,200.74,10201.927,2048,632.68,503.79,572.79\r\nvit_huge_patch14_clip_378,378,200.53,10213.002,2048,632.68,503.79,572.79\r\nregnety_1280,384,195.71,3924.051,768,644.81,374.99,210.2\r\nvit_large_patch14_dinov2,518,193.74,10571.042,2048,304.37,507.15,1058.82\r\nvit_large_patch14_reg4_dinov2,518,193.07,10607.289,2048,304.37,508.9,1064.02\r\nvit_so400m_patch14_siglip_gap_448,448,190.94,10726.066,2048,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,190.54,10748.053,2048,413.53,487.4,764.26\r\nvit_so400m_patch16_siglip_512,512,189.66,10798.358,2048,428.77,490.13,766.65\r\nmaxvit_xlarge_tf_384,384,186.14,343.775,64,475.32,292.78,668.76\r\nswinv2_cr_giant_224,224,185.84,11020.053,2048,2598.76,483.85,309.15\r\nvolo_d5_448,448,184.74,11086.039,2048,295.91,315.06,737.92\r\nvit_huge_patch16_gap_448,448,179.2,11428.198,2048,631.67,544.7,636.83\r\nmaxvit_large_tf_512,512,178.79,268.419,48,212.33,244.75,942.15\r\nswinv2_cr_huge_384,384,176.57,2899.696,512,657.94,352.04,583.18\r\ncait_m36_384,384,175.9,11642.718,2048,271.22,173.11,734.81\r\naimv2_3b_patch14_224,224,167.54,12224.0,2048,2720.66,705.91,252.44\r\nnfnet_f5,544,165.52,12373.368,2048,377.21,290.97,349.71\r\ndm_nfnet_f5,544,165.0,9308.986,1536,377.21,290.97,349.71\r\nnfnet_f7,480,164.79,12427.667,2048,499.5,300.08,355.86\r\nfocalnet_xlarge_fl3,384,163.52,4696.559,768,408.79,185.61,223.99\r\neva_giant_patch14_336,336,163.13,12554.123,2048,1013.01,620.64,550.67\r\nxcit_large_24_p8_384,384,160.92,6363.148,1024,188.93,415.0,531.82\r\nfocalnet_xlarge_fl4,384,160.18,4794.423,768,409.03,185.79,242.31\r\nbeit3_giant_patch14_336,336,155.65,13157.247,2048,1013.67,621.52,550.67\r\nvit_giantopt_patch16_siglip_gap_384,384,152.96,13389.065,2048,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,152.43,13435.936,2048,1163.66,696.85,568.91\r\naimv2_1b_patch14_336,336,150.65,13593.918,2048,1235.61,743.59,454.16\r\nconvmixer_1536_20,224,148.45,6897.964,1024,51.63,48.68,33.03\r\ntf_efficientnet_l2,475,147.31,434.396,64,480.31,172.11,609.89\r\nconvnextv2_huge,512,145.09,1764.338,256,660.29,600.81,413.07\r\nvolo_d5_512,512,141.63,10845.427,1536,296.09,425.09,1105.37\r\naimv2_huge_patch14_448,448,136.7,14982.187,2048,682.03,774.02,731.38\r\nnfnet_f6,576,128.19,15975.882,2048,438.36,378.69,452.2\r\ndm_nfnet_f6,576,125.3,16344.966,2048,438.36,378.69,452.2\r\nregnety_2560,384,105.15,4869.312,512,1282.6,747.83,296.49\r\nresnetv2_152x4_bit,480,104.37,4905.546,512,936.53,844.84,414.26\r\ndavit_huge_fl,768,103.9,1847.808,192,360.64,744.84,1060.3\r\nmobilenetv5_300m,768,103.27,619.68,64,294.13,435.74,842.16\r\nmaxvit_xlarge_tf_512,512,102.93,310.833,32,475.77,534.14,1413.22\r\nmobilenetv5_300m_enc,768,101.62,629.71,64,294.13,435.74,842.16\r\nnfnet_f7,608,100.73,15248.389,1536,499.5,480.39,570.85\r\neva02_enormous_patch14_clip_224,224,97.44,21018.442,2048,4350.56,1132.46,497.58\r\naimv2_1b_patch14_448,448,82.19,18687.474,1536,1236.53,1367.03,983.56\r\ncait_m48_448,448,80.08,19180.239,1536,356.46,329.41,1708.23\r\nvit_gigantic_patch14_clip_378,378,75.09,27273.721,2048,1845.7,1429.82,1047.37\r\nsam2_hiera_large,1024,73.69,651.282,48,212.15,907.48,2190.34\r\naimv2_3b_patch14_336,336,73.25,20968.814,1536,2721.64,1615.48,674.17\r\nvit_7b_patch16_dinov3,256,64.24,31878.179,2048,6716.03,1775.1,515.87\r\nsamvit_base_patch16,1024,62.19,1029.054,64,89.67,486.43,1343.27\r\nvit_giant_patch14_dinov2,518,60.05,25577.994,1536,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,59.89,25647.745,1536,1136.48,1790.08,2771.21\r\nvit_pe_lang_gigantic_patch14_448,448,55.54,27655.265,1536,1740.92,1931.99,1664.88\r\nefficientnet_l2,800,54.48,440.494,24,480.31,479.12,1707.39\r\neva_giant_patch14_560,560,53.55,19122.655,1024,1014.45,1906.76,2577.17\r\nswinv2_cr_giant_384,384,53.18,4813.585,256,2598.76,1450.71,1394.86\r\nvit_pe_core_gigantic_patch14_448,448,52.94,29012.481,1536,1882.03,2060.12,1774.21\r\ntf_efficientnet_l2,800,52.54,456.714,24,480.31,479.12,1707.39\r\nvit_pe_spatial_gigantic_patch14_448,448,52.25,29396.895,1536,1851.89,2055.25,1771.04\r\naimv2_3b_patch14_448,448,40.32,25397.062,1024,2723.02,2939.61,1462.76\r\nvit_so400m_patch14_siglip_gap_896,896,34.21,22451.103,768,416.87,2731.49,8492.88\r\nsamvit_large_patch16,1024,29.24,2189.034,64,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,18.79,2554.979,48,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-bf16-nchw-pt291-cu130-pro6000maxq-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\r\ntest_vit,160,662619.72,3.073,2048,0.37,0.04,0.48\r\ntest_vit2,160,521634.79,3.889,2048,0.46,0.05,0.64\r\ntest_convnext,160,269062.6,7.579,2048,0.27,0.03,0.58\r\ntest_vit3,160,267252.2,7.646,2048,0.93,0.09,1.0\r\ntest_mambaout,160,266884.81,7.654,2048,0.45,0.03,0.53\r\ntest_byobnet,160,262429.17,7.77,2048,0.46,0.03,0.43\r\ntest_convnext3,160,244053.32,8.376,2048,0.47,0.05,0.63\r\ntest_convnext2,160,237774.53,8.596,2048,0.48,0.05,0.63\r\ntest_efficientnet,160,208670.93,9.8,2048,0.36,0.06,0.55\r\ntest_efficientnet_ln,160,208371.79,9.807,2048,0.36,0.06,0.55\r\ntest_vit4,160,202739.56,10.085,2048,1.02,0.11,1.07\r\ntest_efficientnet_evos,160,188116.33,10.87,2048,0.36,0.06,0.55\r\ntinynet_e,106,185800.52,11.003,2048,2.04,0.03,0.69\r\ntest_mambaout,192,183387.18,11.146,2048,0.45,0.04,0.77\r\ntest_efficientnet_gn,160,182171.75,11.226,2048,0.36,0.06,0.55\r\ntest_resnet,160,162814.92,12.553,2048,0.47,0.1,0.64\r\nmobilenetv4_conv_small_035,224,153849.32,13.278,2048,1.91,0.05,0.98\r\nefficientvit_m0,224,150157.87,13.602,2048,2.33,0.08,0.91\r\nmobilenetv4_conv_small_050,224,134304.06,15.228,2048,2.24,0.07,1.18\r\nmobilenetv3_small_050,224,129551.62,15.782,2048,1.59,0.03,0.92\r\nlcnet_035,224,125947.3,16.235,2048,1.64,0.03,1.04\r\nmobilenetv4_conv_small_035,256,114678.27,17.817,2048,1.91,0.06,1.28\r\nefficientvit_m1,224,111710.27,18.317,2048,2.96,0.17,1.33\r\nshvit_s1,224,108061.34,18.931,2048,6.31,0.24,1.39\r\nlcnet_050,224,107759.5,18.972,2048,1.88,0.05,1.26\r\ntf_mobilenetv3_small_minimal_100,224,106059.5,19.286,2048,2.04,0.06,1.41\r\nefficientvit_m2,224,103182.67,19.825,2048,4.17,0.2,1.47\r\ntest_nfnet,160,100146.26,20.423,2048,0.38,0.29,1.2\r\nstarnet_s050,224,99810.01,20.479,2048,0.54,0.09,1.57\r\nmobilenetv4_conv_small_050,256,98839.74,20.691,2048,2.24,0.09,1.55\r\nmobilenetv3_small_075,224,95986.79,21.302,2048,2.04,0.05,1.3\r\nefficientvit_m3,224,93919.19,21.787,2048,6.88,0.26,1.62\r\nefficientvit_m4,224,90608.57,22.582,2048,8.78,0.3,1.7\r\nmobilenetv3_small_100,224,87301.9,23.439,2048,2.54,0.06,1.42\r\nshvit_s2,224,85765.59,23.846,2048,11.45,0.37,1.6\r\ntinynet_d,152,84074.74,24.335,2048,2.34,0.05,1.42\r\nmobilenetv4_conv_small,224,82273.51,24.873,2048,3.77,0.19,1.97\r\ntf_mobilenetv3_small_075,224,79702.91,25.658,2048,2.04,0.05,1.3\r\nlevit_conv_128s,224,76101.5,26.889,2048,7.76,0.3,1.88\r\nlcnet_075,224,76066.11,26.904,2048,2.36,0.1,1.99\r\nlevit_128s,224,76031.39,26.912,2048,7.76,0.3,1.88\r\ntf_mobilenetv3_small_100,224,73078.85,27.987,2048,2.54,0.06,1.42\r\nrepghostnet_050,224,72675.64,28.152,2048,2.31,0.05,2.02\r\nresnet10t,176,69892.6,29.262,2048,5.44,0.7,1.51\r\nmnasnet_small,224,66380.57,30.831,2048,2.03,0.07,2.16\r\nregnetx_002,224,63906.2,32.015,2048,2.68,0.2,2.16\r\nshvit_s3,224,61983.32,33.019,2048,14.21,0.6,2.33\r\nlcnet_100,224,61696.36,33.175,2048,2.95,0.16,2.52\r\nefficientvit_m5,224,61146.6,33.465,2048,12.44,0.52,2.41\r\nmobilenetv4_conv_small,256,60855.28,33.63,2048,3.77,0.25,2.57\r\nstarnet_s100,224,60506.19,33.808,2048,1.04,0.19,2.68\r\nstarnet_s150,224,60294.62,33.94,2048,1.56,0.23,2.75\r\nrepghostnet_058,224,60255.97,33.967,2048,2.54,0.06,2.59\r\nghostnet_050,224,58968.23,34.686,2048,2.59,0.05,1.77\r\nresnet18,160,58882.18,34.742,2048,11.69,0.93,1.27\r\nfasternet_t0,224,58760.42,34.828,2048,3.91,0.34,1.97\r\nregnety_002,224,58638.14,34.881,2048,3.16,0.2,2.17\r\nmobilenetv2_035,224,58593.43,34.922,2048,1.68,0.07,2.86\r\nconvnext_zepto_rms,224,56314.72,36.346,2048,2.16,0.3,2.75\r\nlevit_conv_128,224,54272.11,37.704,2048,9.19,0.41,2.71\r\nlevit_128,224,52833.21,38.735,2048,9.19,0.41,2.71\r\nmnasnet_050,224,52317.47,39.122,2048,2.22,0.11,3.07\r\nhgnetv2_b0,224,52163.58,39.23,2048,6.0,0.33,2.12\r\nrepghostnet_080,224,51008.72,40.131,2048,3.27,0.1,3.22\r\nvit_small_patch32_224,224,49747.28,41.149,2048,22.88,1.15,2.5\r\nefficientvit_b0,224,49493.69,41.356,2048,3.41,0.1,2.87\r\nconvnext_zepto_rms_ols,224,48698.75,42.028,2048,2.16,0.34,3.15\r\npit_ti_224,224,47580.95,43.007,2048,4.85,0.7,6.19\r\npit_ti_distilled_224,224,47431.28,43.145,2048,5.1,0.71,6.23\r\nmobilenetv2_050,224,46483.43,44.035,2048,1.97,0.1,3.64\r\nsemnasnet_050,224,46157.5,44.346,2048,2.08,0.11,3.44\r\nrepvgg_a0,224,45466.82,45.02,2048,8.31,1.36,1.79\r\nregnetx_004,224,44673.32,45.82,2048,5.16,0.4,3.14\r\nedgenext_xx_small,256,44652.83,45.847,2048,1.33,0.26,3.33\r\nlevit_conv_192,224,44260.15,46.25,2048,10.92,0.66,3.2\r\ngernet_s,224,43633.19,46.907,2048,8.17,0.75,2.65\r\nresnet10t,224,43565.46,46.989,2048,5.44,1.1,2.43\r\nvit_tiny_r_s16_p8_224,224,43041.26,47.563,2048,6.34,0.44,2.06\r\nregnetx_004_tv,224,42983.26,47.614,2048,5.5,0.42,3.17\r\nlevit_192,224,42268.66,48.42,2048,10.92,0.66,3.2\r\nmixer_s32_224,224,41710.52,49.065,2048,19.1,1.0,2.28\r\nmobileone_s0,224,41548.87,49.271,2048,2.08,0.28,3.79\r\nlcnet_150,224,41326.19,49.53,2048,4.5,0.34,3.79\r\nrepghostnet_100,224,41243.62,49.628,2048,4.06,0.15,3.98\r\nghostnetv3_050,224,41010.08,49.908,2048,2.85,0.05,2.28\r\nfasternet_t1,224,40795.97,50.176,2048,7.6,0.85,3.15\r\ntinynet_c,184,39949.82,51.228,2048,2.46,0.11,2.87\r\nresnet14t,176,39738.43,51.512,2048,10.08,1.07,3.61\r\nhgnetv2_b1,224,38801.95,52.752,2048,6.34,0.49,2.73\r\ncs3darknet_focus_s,256,38437.25,53.259,2048,3.27,0.69,2.7\r\nresnet34,160,38337.63,53.397,2048,21.8,1.87,1.91\r\nstarnet_s2,224,38129.03,53.667,2048,3.68,0.55,4.73\r\nnf_regnet_b0,192,37927.56,53.975,2048,8.76,0.37,3.15\r\nconvnext_atto_rms,224,37874.78,54.05,2048,3.69,0.55,3.81\r\nmobilenetv4_conv_small,320,37811.77,54.138,2048,3.77,0.39,4.01\r\nconvnext_atto,224,37294.73,54.885,2048,3.7,0.55,3.81\r\ncs3darknet_s,256,37279.02,54.898,2048,3.28,0.72,2.97\r\ntf_mobilenetv3_large_minimal_100,224,37016.99,55.307,2048,3.92,0.22,4.4\r\nrepghostnet_111,224,36806.38,55.62,2048,4.52,0.18,4.38\r\nxcit_nano_12_p16_224,224,36544.84,56.009,2048,3.05,0.56,4.17\r\nmobilenetv3_large_075,224,36472.1,56.124,2048,3.99,0.16,4.0\r\nstarnet_s1,224,36344.48,56.308,2048,2.87,0.42,4.99\r\nvit_tiny_patch16_224,224,35416.58,57.784,2048,5.72,1.26,5.97\r\ndeit_tiny_patch16_224,224,35323.68,57.941,2048,5.72,1.26,5.97\r\nconvnext_atto_ols,224,34910.26,58.63,2048,3.7,0.58,4.11\r\ndeit_tiny_distilled_patch16_224,224,34812.15,58.794,2048,5.91,1.27,6.01\r\nrepvgg_a1,224,34755.4,58.884,2048,12.79,2.36,2.37\r\nmnasnet_075,224,34538.81,59.255,2048,3.17,0.23,4.77\r\ntf_mobilenetv3_large_075,224,34172.24,59.909,2048,3.99,0.16,4.0\r\nedgenext_xx_small,288,34049.74,60.113,2048,1.33,0.33,4.21\r\nregnety_004,224,33904.26,60.375,2048,4.34,0.41,3.89\r\ninception_next_atto,224,33708.79,60.726,2048,4.16,0.5,3.63\r\nlevit_conv_256,224,33647.17,60.834,2048,18.86,1.13,4.23\r\nvit_medium_patch32_clip_224,224,33246.66,61.564,2048,39.69,2.0,3.34\r\nmobilenetv3_rw,224,32849.14,62.323,2048,5.48,0.23,4.41\r\nconvnextv2_atto,224,32813.25,62.383,2048,3.71,0.55,3.81\r\nghostnet_100,224,32766.04,62.472,2048,5.18,0.15,3.55\r\nmobilenetv1_100,224,32583.92,62.831,2048,4.23,0.58,5.04\r\nmobilenetv3_large_100,224,32569.93,62.85,2048,5.48,0.23,4.41\r\nshvit_s4,256,32350.07,63.272,2048,16.55,0.99,3.73\r\nconvnext_femto,224,32016.73,63.937,2048,5.22,0.79,4.57\r\nmobilenetv1_100h,224,31830.09,64.32,2048,5.28,0.63,5.09\r\nresnetv2_18d,224,31724.05,64.518,2048,11.71,2.06,3.29\r\nlevit_256,224,31608.92,64.762,2048,18.86,1.13,4.23\r\nregnety_006,224,31427.14,65.142,2048,6.06,0.61,4.33\r\nhardcorenas_a,224,31394.87,65.201,2048,5.26,0.23,4.38\r\nhgnetv2_b0,288,31365.38,65.263,2048,6.0,0.54,3.51\r\ntf_efficientnetv2_b0,192,31214.16,65.585,2048,7.14,0.54,3.51\r\nrepghostnet_130,224,31005.17,66.014,2048,5.46,0.24,5.24\r\nhardcorenas_b,224,30765.22,66.539,2048,5.18,0.26,5.09\r\nmnasnet_100,224,30695.22,66.688,2048,4.38,0.33,5.46\r\nresnet18d,224,30595.05,66.911,2048,11.71,2.06,3.29\r\nvit_xsmall_patch16_clip_224,224,30369.96,67.405,2048,8.28,1.79,6.65\r\ntf_mobilenetv3_large_100,224,30288.02,67.584,2048,5.48,0.23,4.41\r\nresnetv2_18,224,30090.88,68.033,2048,11.69,1.82,2.48\r\nconvnext_femto_ols,224,30049.69,68.127,2048,5.23,0.82,4.87\r\nese_vovnet19b_slim_dw,224,29873.59,68.534,2048,1.9,0.4,5.28\r\nhardcorenas_c,224,29873.21,68.524,2048,5.52,0.28,5.01\r\nmobilenet_edgetpu_v2_xs,224,29601.15,69.16,2048,4.46,0.7,4.8\r\nresnet18,224,29404.83,69.613,2048,11.69,1.82,2.48\r\nlevit_conv_256d,224,29240.98,70.008,2048,26.16,1.39,4.93\r\nsemnasnet_075,224,29225.62,52.528,1536,2.91,0.23,5.54\r\nmobilenetv2_075,224,29207.68,52.567,1536,2.64,0.22,5.86\r\nconvnext_atto_rms,256,29007.06,70.568,2048,3.69,0.71,4.98\r\nregnetx_008,224,28950.82,70.712,2048,7.26,0.81,5.15\r\nefficientformerv2_s0,224,28712.48,71.286,2048,3.6,0.41,5.3\r\npit_xs_224,224,28410.75,72.054,2048,10.62,1.4,7.71\r\npit_xs_distilled_224,224,28148.3,72.734,2048,11.0,1.41,7.76\r\nmobilenetv4_conv_medium,224,28096.27,72.863,2048,9.72,0.84,5.8\r\nspnasnet_100,224,28049.1,72.988,2048,4.42,0.35,6.03\r\nconvnextv2_femto,224,27785.31,73.675,2048,5.23,0.79,4.57\r\nstarnet_s3,224,27755.43,73.749,2048,5.75,0.76,6.66\r\nlevit_256d,224,27733.03,73.805,2048,26.16,1.39,4.93\r\nrepvgg_b0,224,27670.72,73.975,2048,14.34,3.06,3.07\r\npvt_v2_b0,224,27167.61,75.349,2048,3.67,0.57,7.99\r\nlegacy_seresnet18,224,27079.59,75.59,2048,11.78,1.82,2.49\r\nmobilenet_edgetpu_100,224,27050.38,75.68,2048,4.09,1.0,5.75\r\nseresnet18,224,27037.44,75.709,2048,11.78,1.82,2.49\r\nhardcorenas_d,224,26845.27,76.255,2048,7.5,0.3,4.93\r\nmobilenetv4_hybrid_medium_075,224,26606.75,76.933,2048,7.31,0.66,5.65\r\ntinynet_b,188,26394.81,77.565,2048,3.73,0.21,4.44\r\nrepghostnet_150,224,26233.77,78.035,2048,6.55,0.31,6.0\r\nregnety_008,224,26041.65,78.593,2048,6.26,0.81,5.25\r\nsemnasnet_100,224,26026.99,58.989,1536,3.89,0.32,6.23\r\nese_vovnet19b_slim,224,26018.84,78.686,2048,3.17,1.69,3.52\r\nhgnetv2_b2,224,26007.29,78.719,2048,11.22,1.15,4.12\r\nrepvit_m0_9,224,25863.4,79.154,2048,5.07,0.82,6.17\r\nmobilenetv2_100,224,25826.25,59.434,1536,3.5,0.31,6.68\r\nrepvit_m1,224,25817.51,79.28,2048,5.07,0.82,6.17\r\nmobileone_s1,224,25750.93,79.502,2048,4.76,0.83,6.27\r\nfbnetc_100,224,25643.78,59.869,1536,5.57,0.4,6.51\r\nefficientnet_lite0,224,25055.02,61.286,1536,4.65,0.4,6.74\r\nmobilenetv3_large_100,256,25036.94,61.322,1536,5.48,0.29,5.75\r\nghostnet_130,224,24934.28,82.1,2048,7.36,0.24,4.6\r\nmobilenetv1_100,256,24910.5,61.615,1536,4.23,0.76,6.59\r\nfasternet_t2,224,24877.69,82.293,2048,14.98,1.91,4.73\r\nresnet14t,224,24850.94,82.38,2048,10.08,1.69,5.8\r\ntf_efficientnet_lite0,224,24755.32,62.017,1536,4.65,0.4,6.74\r\nregnety_008_tv,224,24720.31,82.804,2048,6.43,0.84,5.42\r\nmobilevit_xxs,256,24688.26,82.923,2048,1.27,0.42,8.34\r\nedgenext_x_small,256,24645.66,83.075,2048,2.34,0.54,5.93\r\nmobilenetv1_100h,256,24484.54,62.697,1536,5.28,0.82,6.65\r\nefficientformer_l1,224,24384.11,83.948,2048,12.29,1.3,5.53\r\ncs3darknet_focus_s,320,24377.94,83.976,2048,3.27,1.08,4.22\r\nswiftformer_xs,224,24373.99,83.984,2048,3.48,0.61,6.45\r\ntf_efficientnetv2_b1,192,24181.33,84.664,2048,8.14,0.76,4.59\r\nconvnext_pico,224,24021.51,85.22,2048,9.05,1.37,6.1\r\nmobilenetv1_125,224,23936.31,85.516,2048,6.27,0.89,6.3\r\ncrossvit_9_240,240,23773.06,86.118,2048,8.55,1.85,9.52\r\nregnetx_006,224,23675.37,86.469,2048,6.2,0.61,3.98\r\ncrossvit_tiny_240,240,23640.77,86.583,2048,7.01,1.57,9.08\r\neva02_tiny_patch14_224,224,23378.56,87.569,2048,5.5,1.7,9.14\r\nhardcorenas_f,224,23354.76,87.651,2048,8.2,0.35,5.57\r\nvit_betwixt_patch32_clip_224,224,23305.08,87.838,2048,61.41,3.09,4.17\r\nhardcorenas_e,224,23211.99,88.192,2048,8.07,0.35,5.65\r\nhgnetv2_b1,288,22874.3,89.491,2048,6.34,0.82,4.51\r\nconvnext_pico_ols,224,22795.42,89.807,2048,9.06,1.43,6.5\r\nconvnext_atto,288,22695.42,90.204,2048,3.7,0.91,6.3\r\ntf_efficientnetv2_b0,224,22649.2,90.374,2048,7.14,0.73,4.77\r\nresnet50,160,22588.94,90.632,2048,25.56,2.1,5.67\r\nresnet50d,160,22448.78,91.194,2048,25.58,2.22,6.08\r\nskresnet18,224,22199.28,92.231,2048,11.96,1.82,3.24\r\nhrnet_w18_small,224,22036.73,92.897,2048,13.19,1.61,5.72\r\nmobilenetv4_hybrid_medium,224,21899.51,93.486,2048,11.07,0.98,6.84\r\ndla46_c,224,21883.09,93.557,2048,1.3,0.58,4.5\r\nxcit_tiny_12_p16_224,224,21857.57,93.662,2048,6.72,1.24,6.29\r\nresnetblur18,224,21852.71,93.685,2048,11.69,2.34,3.39\r\ngernet_m,224,21575.65,94.891,2048,21.14,3.02,5.24\r\nmobilenetv4_conv_medium,256,21515.01,95.159,2048,9.72,1.1,7.58\r\ntinynet_a,192,21262.07,96.281,2048,6.19,0.35,5.41\r\ncrossvit_9_dagger_240,240,21214.6,96.495,2048,8.78,1.99,9.97\r\nnf_regnet_b0,256,21200.26,96.559,2048,8.76,0.64,5.58\r\nconvnext_atto_ols,288,21185.36,96.639,2048,3.7,0.96,6.8\r\nmobileone_s2,224,20923.15,97.845,2048,7.81,1.3,7.56\r\nconvnextv2_pico,224,20846.12,98.209,2048,9.07,1.37,6.1\r\nghostnetv3_100,224,20824.94,98.309,2048,6.15,0.17,4.55\r\nmnasnet_140,224,20775.58,98.545,2048,7.12,0.6,7.71\r\nrepghostnet_200,224,20744.86,98.692,2048,9.77,0.53,7.96\r\nhgnetv2_b3,224,20662.17,99.08,2048,16.29,1.78,5.07\r\nmobilevitv2_050,256,20629.24,74.436,1536,1.37,0.48,8.04\r\nmambaout_femto,224,20521.24,99.763,2048,7.3,1.16,8.34\r\nrepvit_m2,224,20491.74,99.914,2048,8.24,1.34,7.82\r\nrepvit_m1_1,224,20484.05,99.945,2048,8.24,1.34,7.82\r\nefficientformerv2_s1,224,20436.0,100.181,2048,6.19,0.67,7.66\r\nmobilenetv4_conv_blur_medium,224,20376.3,50.227,1024,9.72,1.22,8.58\r\nrepvit_m1_0,224,20232.43,101.189,2048,6.81,1.11,7.19\r\nlevit_conv_384,224,20072.0,101.998,2048,39.07,2.35,6.26\r\nconvnextv2_atto,288,19961.69,102.565,2048,3.71,0.91,6.3\r\nmobilenetv2_110d,224,19945.82,76.974,1536,4.52,0.45,8.71\r\ncs3darknet_m,256,19934.89,102.7,2048,9.31,2.08,5.28\r\nefficientvit_b1,224,19903.99,102.866,2048,9.1,0.53,7.25\r\nstarnet_s4,224,19857.85,103.09,2048,7.48,1.05,9.56\r\nrexnetr_100,224,19751.45,77.735,1536,4.88,0.43,7.72\r\nghostnetv2_100,224,19734.87,103.74,2048,6.16,0.18,4.55\r\nresnet34d,224,19676.66,104.044,2048,21.82,3.91,4.54\r\nswiftformer_s,224,19664.92,104.111,2048,6.09,0.99,7.81\r\nmobilenet_edgetpu_v2_s,224,19534.09,104.807,2048,5.99,1.21,6.6\r\nresnetv2_34d,224,19482.36,105.082,2048,21.82,3.91,4.54\r\nrexnet_100,224,19458.4,78.911,1536,4.8,0.41,7.44\r\nconvnext_femto,288,19384.13,105.608,2048,5.22,1.3,7.56\r\nfbnetv3_b,224,19375.78,105.662,2048,8.6,0.42,6.97\r\nedgenext_x_small,288,19177.88,106.759,2048,2.34,0.68,7.5\r\nresnet34,224,19163.53,106.828,2048,21.8,3.67,3.74\r\nresnetv2_18d,288,19157.14,106.869,2048,11.71,3.4,5.43\r\nrepvgg_a2,224,19121.65,107.063,2048,25.5,5.12,3.13\r\nresnetv2_34,224,18961.76,107.959,2048,21.8,3.67,3.74\r\nlevit_384,224,18700.14,109.483,2048,39.07,2.35,6.26\r\nresmlp_12_224,224,18626.92,109.92,2048,15.35,3.01,5.5\r\ncs3darknet_focus_m,256,18578.72,110.196,2048,9.3,1.98,4.89\r\ngmlp_ti16_224,224,18506.28,110.619,2048,5.87,1.34,7.55\r\nselecsls42,224,18473.14,110.833,2048,30.35,2.94,4.62\r\nresnet18d,288,18394.56,111.311,2048,11.71,3.41,5.43\r\nselecsls42b,224,18388.4,111.344,2048,32.46,2.98,4.62\r\nmobilenetv1_125,256,18338.03,83.737,1536,6.27,1.16,8.23\r\nresnetv2_18,288,18237.62,112.254,2048,11.69,3.0,4.11\r\nconvnext_femto_ols,288,18208.24,112.446,2048,5.23,1.35,8.06\r\nresnet50,176,18200.54,112.488,2048,25.56,2.62,6.92\r\nseresnet50,160,18193.5,112.53,2048,28.09,2.1,5.69\r\nvit_base_patch32_clip_quickgelu_224,224,18041.7,113.483,2048,87.85,4.41,5.01\r\necaresnet50t,160,18033.31,113.52,2048,25.57,2.21,6.04\r\nvit_base_patch32_clip_224,224,17992.04,113.798,2048,88.22,4.41,5.01\r\nvit_base_patch32_224,224,17967.63,113.949,2048,88.22,4.41,5.01\r\ntf_efficientnetv2_b2,208,17934.59,114.161,2048,10.1,1.06,6.0\r\nsemnasnet_140,224,17850.53,57.33,1024,6.11,0.6,8.87\r\nresnetrs50,160,17766.69,115.227,2048,35.69,2.29,6.2\r\npoolformerv2_s12,224,17751.74,115.34,2048,11.89,1.83,5.53\r\nresnet18,288,17742.68,115.394,2048,11.69,3.01,4.11\r\nresnext50_32x4d,160,17654.91,115.964,2048,25.03,2.17,7.35\r\nmobilenetv2_140,224,17569.64,58.258,1024,6.11,0.6,9.57\r\nseresnet34,224,17493.7,117.029,2048,21.96,3.67,3.74\r\nese_vovnet19b_dw,224,17440.91,117.392,2048,6.54,1.34,8.25\r\npoolformer_s12,224,17411.57,117.594,2048,11.92,1.82,5.53\r\nlegacy_seresnet34,224,17410.14,117.598,2048,21.96,3.67,3.74\r\nresnet26,224,17393.37,117.714,2048,16.0,2.36,7.35\r\nresnet26d,224,17370.43,117.858,2048,16.01,2.6,8.15\r\nresnetaa34d,224,17302.19,118.339,2048,21.82,4.43,5.07\r\nefficientnet_es_pruned,224,17280.49,118.481,2048,5.44,1.81,8.73\r\nefficientnet_es,224,17210.53,118.965,2048,5.44,1.81,8.73\r\nefficientnet_b0,224,17194.21,89.302,1536,5.29,0.4,6.75\r\nmambaout_kobe,224,17183.3,119.14,2048,9.14,1.52,10.0\r\ntiny_vit_5m_224,224,17091.83,119.788,2048,12.08,1.27,11.25\r\ntf_efficientnet_es,224,16977.27,120.602,2048,5.44,1.81,8.73\r\nconvnextv2_femto,288,16939.0,120.875,2048,5.23,1.3,7.56\r\ntf_efficientnet_lite1,240,16821.0,60.842,1024,5.42,0.62,10.14\r\nmobilenetv4_hybrid_medium,256,16788.22,121.954,2048,11.07,1.29,9.01\r\nmobileone_s3,224,16756.19,122.19,2048,10.08,1.9,9.13\r\nefficientnet_lite1,240,16749.0,91.679,1536,5.42,0.62,10.14\r\nvit_small_patch32_384,384,16699.5,122.596,2048,22.92,3.45,8.25\r\npit_s_distilled_224,224,16489.98,124.153,2048,24.04,2.9,11.64\r\ngmixer_12_224,224,16360.11,125.15,2048,12.7,2.67,7.26\r\nseresnet18,288,16343.8,125.272,2048,11.78,3.01,4.11\r\nnf_resnet26,224,16332.26,125.362,2048,16.0,2.41,7.35\r\nedgenext_small,256,16225.5,126.191,2048,5.59,1.26,9.07\r\npit_s_224,224,16191.31,126.449,2048,23.46,2.88,11.56\r\nregnetz_005,224,16172.87,126.598,2048,7.12,0.52,5.86\r\nfbnetv3_d,224,16149.38,126.777,2048,10.31,0.52,8.5\r\nselecsls60,224,16089.79,127.245,2048,30.67,3.59,5.52\r\nselecsls60b,224,16075.67,127.357,2048,32.77,3.63,5.52\r\nnf_regnet_b2,240,15997.99,127.981,2048,14.31,0.97,7.23\r\nghostnetv3_130,224,15948.2,128.384,2048,8.95,0.28,5.9\r\nnf_regnet_b1,256,15916.35,128.63,2048,10.22,0.82,7.27\r\nconvnext_nano,224,15854.61,129.143,2048,15.59,2.46,8.37\r\nmobilenetv4_conv_blur_medium,256,15725.91,48.81,768,9.72,1.59,11.2\r\ncs3darknet_m,288,15705.75,130.362,2048,9.31,2.63,6.69\r\nmobilenet_edgetpu_v2_m,224,15627.73,131.009,2048,8.46,1.85,8.15\r\nefficientvit_b1,256,15554.4,98.716,1536,9.1,0.69,9.46\r\ndarknet17,256,15503.58,132.061,2048,14.3,3.26,7.18\r\nmixer_b32_224,224,15501.98,132.081,2048,60.29,3.24,6.29\r\ndla34,224,15486.79,132.202,2048,15.74,3.07,5.02\r\nhgnetv2_b2,288,15369.85,133.215,2048,11.22,1.89,6.8\r\nswiftformer_l1,224,15347.3,133.403,2048,12.06,1.6,10.07\r\nnf_seresnet26,224,15317.55,133.656,2048,17.4,2.41,7.36\r\nnf_ecaresnet26,224,15317.17,133.662,2048,16.0,2.41,7.36\r\nhgnetv2_b4,224,15305.62,133.767,2048,19.8,2.75,6.7\r\nmobilenetv4_conv_aa_medium,256,15142.38,135.218,2048,9.72,1.58,10.3\r\nrexnetr_130,224,15102.6,67.78,1024,7.61,0.68,9.81\r\nghostnetv2_130,224,15079.3,135.779,2048,8.96,0.28,5.9\r\nvisformer_tiny,224,14999.26,136.503,2048,10.32,1.27,5.72\r\nmixnet_s,224,14982.78,102.49,1536,4.13,0.25,6.25\r\nfbnetv3_b,256,14936.79,102.784,1536,8.6,0.55,9.1\r\ntf_efficientnetv2_b1,240,14862.79,137.755,2048,8.14,1.21,7.34\r\nmobilenetv2_120d,224,14837.88,68.983,1024,5.83,0.69,11.97\r\nefficientnet_b0_gn,224,14827.17,103.568,1536,5.29,0.42,6.75\r\nmixer_s16_224,224,14822.52,138.125,2048,18.53,3.79,5.97\r\nefficientnet_b1_pruned,240,14723.31,139.053,2048,6.33,0.4,6.21\r\nvit_wee_patch16_reg1_gap_256,256,14714.35,139.143,2048,13.42,3.83,13.9\r\ndeit_small_patch16_224,224,14627.76,139.97,2048,22.05,4.61,11.95\r\nvit_small_patch16_224,224,14622.38,140.028,2048,22.05,4.61,11.95\r\ncs3darknet_focus_m,288,14609.39,140.144,2048,9.3,2.51,6.19\r\nconvnext_pico,288,14560.08,140.612,2048,9.05,2.27,10.08\r\ndeit3_small_patch16_224,224,14556.34,140.663,2048,22.06,4.61,11.95\r\nresnet101,160,14513.1,141.07,2048,44.55,4.0,8.28\r\ndeit_small_distilled_patch16_224,224,14484.95,141.348,2048,22.44,4.63,12.02\r\nresnext50_32x4d,176,14438.14,141.796,2048,25.03,2.71,8.97\r\nvit_base_patch32_siglip_gap_256,256,14366.41,142.521,2048,87.47,5.67,6.54\r\ntf_efficientnet_b0,224,14325.67,107.185,1536,5.29,0.4,6.75\r\ntiny_vit_11m_224,224,14281.14,143.363,2048,20.35,2.03,13.49\r\nrepvit_m3,224,14224.6,143.94,2048,10.12,1.86,11.43\r\ngernet_l,256,14188.89,144.303,2048,31.08,4.57,8.0\r\ndarknet21,256,14182.44,144.365,2048,20.86,3.93,7.47\r\nfastvit_t8,256,14181.69,144.378,2048,4.0,0.69,6.59\r\nvit_base_patch32_siglip_256,256,14169.48,144.497,2048,94.55,5.75,6.64\r\nvit_tiny_r_s16_p8_384,384,14161.02,144.587,2048,6.36,1.34,6.49\r\nrexnet_130,224,14120.45,72.485,1024,7.56,0.68,9.71\r\nvit_base_patch32_clip_256,256,14056.92,145.652,2048,87.86,5.76,6.65\r\nskresnet34,224,13954.99,146.72,2048,22.28,3.67,5.13\r\nconvnextv2_nano,224,13938.45,146.903,2048,15.62,2.46,8.37\r\nconvnext_pico_ols,288,13758.88,148.818,2048,9.06,2.37,10.74\r\nconvnext_nano_ols,224,13702.26,149.415,2048,15.65,2.65,9.38\r\nvit_pwee_patch16_reg1_gap_256,256,13669.72,149.78,2048,15.25,4.37,15.87\r\nresnext26ts,256,13661.66,149.87,2048,10.3,2.43,10.52\r\nmobilenetv4_conv_medium,320,13660.4,112.404,1536,9.72,1.71,11.84\r\nmobilevitv2_075,256,13643.1,75.022,1024,2.87,1.05,12.06\r\ndpn48b,224,13542.4,151.197,2048,9.13,1.69,8.92\r\nfasternet_s,224,13524.67,151.391,2048,31.18,4.56,7.93\r\nmobilenet_edgetpu_v2_l,224,13428.5,152.477,2048,10.92,2.55,9.05\r\nvit_relpos_small_patch16_224,224,13405.58,152.726,2048,21.98,4.59,13.05\r\nvit_relpos_small_patch16_rpn_224,224,13357.9,153.281,2048,21.97,4.59,13.05\r\nvit_small_patch16_rope_ape_224,224,13313.62,153.781,2048,22.06,4.61,11.95\r\nvit_small_patch16_rope_224,224,13310.83,153.819,2048,21.98,4.61,11.95\r\nrexnetr_150,224,13300.63,76.962,1024,9.78,0.89,11.13\r\nresnet26t,256,13279.49,154.187,2048,16.01,3.35,10.52\r\nvit_srelpos_small_patch16_224,224,13265.04,154.357,2048,21.97,4.59,12.16\r\nefficientnet_b0,256,13251.6,77.245,1024,5.29,0.52,8.81\r\nresnetblur18,288,13190.64,116.411,1536,11.69,3.87,5.6\r\npvt_v2_b1,224,13190.49,77.599,1024,14.01,2.12,15.39\r\nresnest14d,224,13148.46,155.721,2048,10.61,2.76,7.33\r\nefficientnet_lite2,260,13140.15,77.901,1024,6.09,0.89,12.9\r\nrepvgg_b1g4,224,13139.15,155.824,2048,36.13,7.31,5.32\r\ntf_mixnet_s,224,13015.08,117.984,1536,4.13,0.25,6.25\r\nefficientformerv2_s2,224,13007.43,157.402,2048,12.71,1.27,11.77\r\nefficientnet_blur_b0,224,12929.9,118.756,1536,5.29,0.43,8.72\r\ntf_efficientnet_lite2,260,12921.65,79.211,1024,6.09,0.89,12.9\r\nflexivit_small,240,12844.36,159.411,2048,22.06,5.35,14.18\r\ngcresnext26ts,256,12820.8,159.699,2048,10.48,2.43,10.53\r\nconvnextv2_pico,288,12710.74,161.088,2048,9.07,2.27,10.08\r\nghostnetv3_160,224,12682.85,161.435,2048,12.38,0.41,7.23\r\nrepvit_m1_5,224,12646.7,161.906,2048,14.05,2.27,12.84\r\nsedarknet21,256,12606.64,162.416,2048,20.95,3.93,7.47\r\ncoat_lite_tiny,224,12537.57,163.31,2048,5.72,1.6,11.65\r\necaresnet50d_pruned,224,12520.8,163.533,2048,19.94,2.53,6.43\r\nnf_regnet_b1,288,12513.51,163.619,2048,10.22,1.02,9.2\r\nfbnetv3_d,256,12450.1,123.337,1536,10.31,0.68,11.1\r\nnf_regnet_b2,272,12446.57,164.5,2048,14.31,1.22,9.27\r\nseresnext26ts,256,12433.55,164.666,2048,10.39,2.43,10.52\r\neca_resnext26ts,256,12431.61,164.693,2048,10.3,2.43,10.52\r\nbotnet26t_256,256,12319.93,166.203,2048,12.49,3.32,11.98\r\ncs3darknet_l,256,12298.09,166.485,2048,21.16,4.86,8.55\r\nrexnet_150,224,12273.45,83.399,1024,9.73,0.9,11.21\r\nmobilenetv4_conv_large,256,12256.75,167.059,2048,32.59,2.86,12.14\r\nhgnetv2_b3,288,12237.7,167.302,2048,16.29,2.94,8.38\r\nhalonet26t,256,12206.06,167.742,2048,12.48,3.19,11.69\r\nxcit_nano_12_p16_384,384,12142.92,168.624,2048,3.05,1.64,12.15\r\necaresnext50t_32x4d,224,12057.53,169.8,2048,15.41,2.7,10.09\r\nxcit_tiny_24_p16_224,224,12051.61,169.894,2048,12.12,2.34,11.82\r\nghostnetv2_160,224,12049.73,169.925,2048,12.39,0.42,7.23\r\nefficientvit_b1,288,12046.79,127.473,1536,9.1,0.87,11.96\r\nmobileone_s4,224,12041.89,170.035,2048,14.84,2.98,11.81\r\necaresnext26t_32x4d,224,12038.3,170.081,2048,15.41,2.7,10.09\r\nlegacy_seresnext26_32x4d,224,12025.98,170.261,2048,16.79,2.49,9.39\r\nseresnext26t_32x4d,224,12016.39,170.389,2048,16.81,2.7,10.09\r\nseresnext26d_32x4d,224,11988.96,170.782,2048,16.81,2.73,10.19\r\nefficientnet_b1,224,11951.22,128.478,1536,7.79,0.59,9.36\r\nmobilenet_edgetpu_v2_m,256,11897.3,172.102,2048,8.46,2.42,10.65\r\ncsatv2,512,11864.52,172.575,2048,11.1,1.39,9.17\r\nhgnet_tiny,224,11856.29,172.688,2048,14.74,4.54,6.36\r\ncs3darknet_focus_l,256,11826.85,173.124,2048,21.15,4.66,8.03\r\nresnet34d,288,11823.52,173.179,2048,21.82,6.47,7.51\r\nresnet101,176,11806.42,173.415,2048,44.55,4.92,10.08\r\nvit_dwee_patch16_reg1_gap_256,256,11784.33,173.74,2048,13.43,3.83,17.6\r\nconvit_tiny,224,11763.91,174.052,2048,5.71,1.26,7.94\r\nresnetv2_34d,288,11707.48,174.889,2048,21.82,6.46,7.51\r\nvit_small_resnet26d_224,224,11689.87,175.147,2048,63.61,5.07,11.12\r\nmobilevit_xs,256,11676.12,65.751,768,2.32,1.05,16.33\r\ngc_efficientnetv2_rw_t,224,11638.73,175.916,2048,13.68,1.94,9.97\r\nmobilenetv4_hybrid_large_075,256,11636.74,175.957,2048,22.75,2.06,11.64\r\nresnet50c,224,11627.97,176.095,2048,25.58,4.35,11.92\r\neca_nfnet_l0,224,11597.38,176.554,2048,24.14,4.35,10.47\r\ntresnet_m,224,11593.69,176.603,2048,31.39,5.75,7.31\r\nmobilenetv3_large_150d,256,11562.23,88.53,1024,14.62,1.03,12.35\r\ncoat_lite_mini,224,11542.78,177.382,2048,11.01,2.0,12.25\r\nresnet34,288,11537.58,177.461,2048,21.8,6.07,6.18\r\nnfnet_l0,224,11530.2,177.58,2048,35.07,4.36,10.47\r\ndpn68,224,11525.63,177.649,2048,12.61,2.35,10.47\r\nresnet32ts,256,11506.9,177.942,2048,17.96,4.63,11.58\r\neca_botnext26ts_256,256,11502.36,178.012,2048,10.59,2.46,11.6\r\nresnetv2_34,288,11421.59,179.266,2048,21.8,6.07,6.18\r\nresnet50,224,11413.45,179.39,2048,25.56,4.11,11.11\r\nresnet50d,224,11393.5,179.718,2048,25.58,4.35,11.92\r\nresnet33ts,256,11359.62,180.249,2048,19.68,4.76,11.66\r\nresnet50t,224,11334.64,180.642,2048,25.57,4.32,11.82\r\nlevit_conv_512,224,11324.34,180.811,2048,95.08,5.62,10.22\r\neca_halonext26ts,256,11280.82,181.508,2048,10.76,2.44,11.46\r\necaresnetlight,224,11237.05,182.214,2048,30.16,4.11,8.42\r\nlevit_512,224,11201.3,182.797,2048,95.08,5.62,10.22\r\ndpn68b,224,11176.66,183.194,2048,12.61,2.35,10.47\r\nedgenext_small_rw,256,11073.45,184.91,2048,7.83,1.58,9.51\r\nvit_tiny_patch16_384,384,11069.22,184.971,2048,5.79,4.7,25.39\r\nvit_small_r26_s32_224,224,11019.77,185.805,2048,36.43,3.56,9.85\r\nvit_base_patch32_plus_256,256,10998.03,186.175,2048,119.48,7.79,7.76\r\necaresnet26t,256,10995.09,186.224,2048,16.01,3.35,10.53\r\nbat_resnext26ts,256,10921.97,187.473,2048,10.73,2.53,12.51\r\nefficientformer_l3,224,10878.42,188.224,2048,31.41,3.93,12.01\r\ntf_efficientnetv2_b2,260,10870.06,188.362,2048,10.1,1.72,9.84\r\nresnext26ts,288,10818.34,189.269,2048,10.3,3.07,13.31\r\nefficientnetv2_rw_t,224,10809.22,189.428,2048,13.65,1.93,9.94\r\ncoatnext_nano_rw_224,224,10802.65,189.542,2048,14.7,2.47,12.8\r\nvit_dpwee_patch16_reg1_gap_256,256,10786.66,189.818,2048,15.25,4.37,19.05\r\nefficientnet_b0_g16_evos,224,10671.54,191.875,2048,8.11,1.01,7.42\r\nwide_resnet50_2,176,10650.44,192.242,2048,68.88,7.29,8.97\r\nefficientvit_b2,224,10610.91,144.717,1536,24.33,1.6,14.62\r\nresnetv2_50d,224,10554.63,193.994,2048,25.57,4.35,11.92\r\nresnetv2_50,224,10551.78,194.053,2048,25.55,4.11,11.11\r\nvit_relpos_base_patch32_plus_rpn_256,256,10549.51,194.1,2048,119.42,7.68,8.01\r\nefficientnet_em,240,10540.23,194.264,2048,6.9,3.04,14.34\r\nese_vovnet19b_dw,288,10534.32,145.773,1536,6.54,2.22,13.63\r\nlevit_conv_512d,224,10533.48,194.39,2048,92.39,5.84,11.3\r\nresnet26,288,10530.22,194.444,2048,16.0,3.9,12.15\r\nresnetv2_50t,224,10527.61,194.498,2048,25.57,4.32,11.82\r\nseresnet34,288,10519.68,194.64,2048,21.96,6.07,6.18\r\ngcresnet33ts,256,10506.7,194.883,2048,19.88,4.76,11.68\r\ncrossvit_small_240,240,10485.0,195.288,2048,26.86,5.63,18.17\r\nresnet26d,288,10478.48,195.413,2048,16.01,4.29,13.48\r\nresnet152,160,10432.03,196.268,2048,60.19,5.9,11.51\r\nvovnet39a,224,10415.44,196.595,2048,22.6,7.09,6.73\r\ntf_efficientnet_em,240,10399.32,196.904,2048,6.9,3.04,14.34\r\nresnetaa34d,288,10398.42,196.899,2048,21.82,7.33,8.38\r\nfastvit_t12,256,10336.22,148.568,1536,7.51,1.39,9.57\r\ndla46x_c,224,10319.85,198.41,2048,1.07,0.54,5.66\r\nregnetx_016,224,10315.66,198.491,2048,9.19,1.62,7.93\r\nedgenext_small,320,10314.44,198.508,2048,5.59,1.97,14.16\r\nmobilenetv4_hybrid_medium,320,10280.07,149.366,1536,11.07,2.05,14.36\r\nefficientnet_b1,240,10269.0,149.54,1536,7.79,0.71,10.88\r\nmobilevitv2_100,256,10259.98,74.821,768,4.9,1.84,16.08\r\nresnet50_clip_gap,224,10242.09,199.918,2048,23.53,5.39,12.44\r\nresnetaa50,224,10211.61,200.496,2048,25.56,5.15,11.64\r\ndla60,224,10199.63,200.745,2048,22.04,4.26,10.16\r\nresnetaa50d,224,10194.5,200.855,2048,25.58,5.39,12.44\r\nvgg11,224,10174.9,201.232,2048,132.86,7.61,7.44\r\nseresnet33ts,256,10165.77,201.415,2048,19.78,4.76,11.66\r\neca_resnet33ts,256,10145.42,201.826,2048,19.68,4.76,11.66\r\nvgg11_bn,224,10143.67,201.858,2048,132.87,7.62,7.44\r\ngcresnext26ts,288,10121.1,202.304,2048,10.48,3.07,13.33\r\ntwins_svt_small,224,10119.99,202.33,2048,24.06,2.94,13.75\r\ncs3sedarknet_l,256,10107.55,202.577,2048,21.91,4.86,8.56\r\neva02_tiny_patch14_336,336,10086.8,202.995,2048,5.76,4.68,27.16\r\necaresnet101d_pruned,224,10048.95,203.757,2048,24.88,3.48,7.69\r\ndla60x_c,224,10016.19,204.428,2048,1.32,0.59,6.01\r\nconvnext_tiny,224,9969.3,205.391,2048,28.59,4.47,13.44\r\nese_vovnet39b,224,9943.27,205.924,2048,24.57,7.09,6.74\r\nselecsls84,224,9940.87,205.973,2048,50.95,5.9,7.57\r\nxcit_nano_12_p8_224,224,9923.18,206.343,2048,3.05,2.16,15.71\r\neca_vovnet39b,224,9913.17,206.546,2048,22.6,7.09,6.74\r\nefficientnet_b2_pruned,260,9904.86,206.718,2048,8.31,0.73,9.13\r\ninception_next_tiny,224,9894.57,206.94,2048,28.06,4.19,11.98\r\ncrossvit_15_240,240,9888.3,207.072,2048,27.53,5.81,19.77\r\nregnety_016,224,9881.17,207.209,2048,11.2,1.63,8.04\r\nres2net50_48w_2s,224,9855.74,207.754,2048,25.29,4.18,11.72\r\nconvnext_tiny_hnf,224,9850.29,207.873,2048,28.59,4.47,13.44\r\nrexnetr_200,224,9834.23,78.068,768,16.52,1.59,15.11\r\nlevit_512d,224,9831.52,208.268,2048,92.39,5.84,11.3\r\nnf_ecaresnet50,224,9828.99,208.314,2048,25.56,4.21,11.13\r\nseresnext26ts,288,9803.81,208.851,2048,10.39,3.07,13.32\r\nresnet50s,224,9799.5,208.935,2048,25.68,5.47,13.52\r\nnf_seresnet50,224,9798.47,208.974,2048,28.09,4.21,11.13\r\neca_resnext26ts,288,9797.17,208.991,2048,10.3,3.07,13.32\r\nresnest26d,224,9787.25,209.221,2048,17.07,3.64,9.97\r\nmambaout_femto,288,9756.57,209.863,2048,7.3,1.91,13.79\r\ntf_efficientnetv2_b3,240,9748.17,210.039,2048,14.36,1.93,9.95\r\neva02_small_patch14_224,224,9747.3,210.064,2048,21.62,6.14,18.28\r\nxcit_small_12_p16_224,224,9731.86,210.397,2048,26.25,4.82,12.58\r\nresnetblur50,224,9708.03,210.92,2048,25.56,5.16,12.02\r\nrepvgg_b1,224,9701.06,211.07,2048,51.83,11.82,5.32\r\nresnetblur50d,224,9680.32,211.516,2048,25.58,5.4,12.82\r\nvit_pe_core_tiny_patch16_384,384,9672.14,211.692,2048,6.14,4.74,25.62\r\ncs3darknet_l,288,9655.12,212.066,2048,21.16,6.16,10.83\r\nnf_regnet_b3,288,9642.67,212.348,2048,18.59,1.67,11.84\r\nresmlp_24_224,224,9642.0,212.353,2048,30.02,5.96,10.91\r\nvit_little_patch16_reg1_gap_256,256,9613.37,212.984,2048,22.52,6.27,18.06\r\nconvnext_nano,288,9609.23,213.09,2048,15.59,4.06,13.84\r\nvit_medium_patch16_clip_224,224,9605.78,213.156,2048,38.59,8.0,15.93\r\ndeit3_medium_patch16_224,224,9578.47,213.771,2048,38.85,8.0,15.93\r\nvit_little_patch16_reg4_gap_256,256,9554.37,214.313,2048,22.52,6.35,18.33\r\nmobilenetv4_conv_medium,384,9547.38,107.221,1024,9.72,2.46,17.05\r\nfastvit_s12,256,9546.76,160.855,1536,9.43,1.8,10.82\r\nresnet50_clip,224,9531.8,214.812,2048,38.32,6.14,12.98\r\ncoatnet_pico_rw_224,224,9517.43,53.761,512,10.85,2.05,14.62\r\nlegacy_seresnet50,224,9509.67,215.322,2048,28.09,3.88,10.6\r\ntwins_pcpvt_small,224,9412.74,217.528,2048,24.11,3.83,18.08\r\nvit_base_resnet26d_224,224,9380.26,218.285,2048,101.4,6.97,13.16\r\ninception_v3,299,9373.56,218.442,2048,23.83,5.73,8.97\r\nfastvit_sa12,256,9356.5,164.128,1536,11.55,1.94,11.24\r\npoolformerv2_s24,224,9313.99,219.839,2048,21.34,3.42,10.68\r\nskresnet50,224,9278.84,220.677,2048,25.8,4.11,12.5\r\ncs3darknet_focus_l,288,9272.89,220.822,2048,21.15,5.9,10.16\r\nese_vovnet39b_evos,224,9265.63,220.994,2048,24.58,7.07,6.74\r\nskresnet50d,224,9256.24,221.213,2048,25.82,4.36,13.31\r\ncaformer_s18,224,9250.52,221.352,2048,26.34,4.13,19.39\r\nefficientnet_b1,256,9238.81,110.802,1024,7.79,0.77,12.22\r\nefficientnet_b0_g8_gn,224,9206.87,222.391,2048,6.56,0.66,6.75\r\nswiftformer_l3,224,9206.15,222.408,2048,28.49,4.01,15.77\r\nresnet50_gn,224,9198.33,222.606,2048,25.56,4.14,11.11\r\nseresnet50,224,9197.37,222.623,2048,28.09,4.11,11.13\r\nhgnetv2_b4,288,9190.85,222.781,2048,19.8,4.54,11.08\r\ncrossvit_15_dagger_240,240,9181.4,223.009,2048,28.21,6.13,20.43\r\necaresnet50t,224,9144.68,223.917,2048,25.57,4.32,11.83\r\nseresnet50t,224,9139.71,224.024,2048,28.1,4.32,11.83\r\necaresnet50d,224,9122.47,224.455,2048,25.58,4.35,11.93\r\ntf_efficientnet_b1,240,9113.49,112.326,1024,7.79,0.71,10.88\r\nregnetz_005,288,9109.43,224.781,2048,7.12,0.86,9.68\r\nresnet32ts,288,9087.51,225.319,2048,17.96,5.86,14.65\r\nrexnet_200,224,9077.4,84.575,768,16.37,1.56,14.91\r\ncspresnet50d,256,9061.97,225.954,2048,21.64,4.86,12.55\r\nresnetrs50,224,9054.28,226.145,2048,35.69,4.48,12.14\r\nfbnetv3_g,240,9042.47,169.832,1536,16.62,1.28,14.87\r\nconvformer_s18,224,9010.38,227.245,2048,26.77,3.96,15.82\r\nresnext50_32x4d,224,8986.56,227.85,2048,25.03,4.26,14.4\r\nresnet33ts,288,8976.12,228.118,2048,19.68,6.02,14.75\r\nresnext50d_32x4d,224,8926.36,229.389,2048,25.05,4.5,15.2\r\nmobilevit_s,256,8917.34,86.101,768,5.58,2.03,19.94\r\nvit_relpos_medium_patch16_224,224,8917.23,229.629,2048,38.75,7.97,17.02\r\nvit_relpos_medium_patch16_rpn_224,224,8915.14,229.68,2048,38.73,7.97,17.02\r\npoolformer_s24,224,8889.9,230.324,2048,21.39,3.41,10.68\r\ncspresnet50w,256,8887.28,230.397,2048,28.12,5.04,12.19\r\nvit_relpos_medium_patch16_cls_224,224,8887.05,230.4,2048,38.76,8.03,18.24\r\ncspresnet50,256,8868.26,230.894,2048,21.62,4.54,11.5\r\ndensenet121,224,8831.38,231.859,2048,7.98,2.87,6.9\r\nvit_srelpos_medium_patch16_224,224,8807.99,232.475,2048,38.74,7.96,16.21\r\nconvnextv2_tiny,224,8761.73,233.7,2048,28.64,4.47,13.44\r\nefficientnet_b2,256,8753.2,116.947,1024,9.11,0.89,12.81\r\ngcvit_xxtiny,224,8749.49,234.027,2048,12.0,2.14,15.36\r\ntiny_vit_21m_224,224,8696.6,176.577,1536,33.21,4.27,20.08\r\nvovnet57a,224,8689.52,235.649,2048,36.64,8.95,7.52\r\nhaloregnetz_b,224,8661.08,236.417,2048,11.68,1.97,11.94\r\nregnetx_032,224,8650.43,236.709,2048,15.3,3.2,11.37\r\ndensenetblur121d,224,8628.65,237.308,2048,8.0,3.11,7.9\r\nhrnet_w18_small_v2,224,8603.05,238.009,2048,15.6,2.62,9.65\r\nregnetz_b16,224,8597.61,238.167,2048,9.72,1.45,9.95\r\nvit_small_patch16_dinov3_qkvb,256,8577.68,238.711,2048,21.6,6.26,17.03\r\nhgnetv2_b5,224,8577.44,238.723,2048,39.57,6.56,11.19\r\nedgenext_base,256,8573.02,238.844,2048,18.51,3.85,15.58\r\nvit_small_patch16_dinov3,256,8571.45,238.879,2048,21.59,6.26,17.03\r\ntf_mixnet_m,224,8517.96,240.378,2048,5.01,0.36,8.19\r\ndla60x,224,8517.23,240.401,2048,17.35,3.54,13.8\r\ngmixer_24_224,224,8511.62,240.555,2048,24.72,5.28,14.45\r\nvit_medium_patch16_gap_240,240,8510.54,240.597,2048,44.4,9.22,18.81\r\ngcresnext50ts,256,8500.57,240.887,2048,15.67,3.75,15.46\r\nsehalonet33ts,256,8490.7,241.157,2048,13.69,3.55,14.7\r\ngmlp_s16_224,224,8487.46,241.255,2048,19.42,4.42,15.1\r\nconvnextv2_nano,288,8483.36,241.356,2048,15.62,4.06,13.84\r\nmambaout_kobe,288,8475.82,241.583,2048,9.14,2.5,16.53\r\nefficientvit_l1,224,8470.61,120.854,1024,52.65,5.27,15.85\r\nresnet152,176,8470.42,241.732,2048,60.19,7.22,13.99\r\nresnet26t,320,8469.58,241.768,2048,16.01,5.24,16.44\r\nmixnet_m,224,8431.06,242.867,2048,5.01,0.36,8.19\r\ngcresnet50t,256,8390.49,244.041,2048,25.9,5.42,14.67\r\nseresnetaa50d,224,8367.33,244.718,2048,28.11,5.4,12.46\r\nresnetv2_50d_frn,224,8351.03,245.191,2048,25.59,4.33,11.92\r\nvisformer_small,224,8338.33,245.567,2048,40.22,4.88,11.43\r\ngcresnet33ts,288,8316.71,246.212,2048,19.88,6.02,14.78\r\nconvnext_nano_ols,288,8289.98,246.999,2048,15.65,4.38,15.5\r\nefficientvit_b2,256,8257.59,123.969,1024,24.33,2.09,19.03\r\nresnetv2_50x1_bit,224,8239.09,248.531,2048,25.55,4.23,11.11\r\nregnetz_b16_evos,224,8231.96,248.743,2048,9.74,1.43,9.95\r\nresnetv2_50d_evos,224,8182.22,250.252,2048,25.59,4.33,11.92\r\nresnetv2_50d_gn,224,8171.03,250.604,2048,25.57,4.38,11.92\r\ncoatnet_nano_rw_224,224,8139.47,251.57,2048,15.14,2.41,15.41\r\ndavit_tiny,224,8133.04,94.401,768,28.36,4.54,18.89\r\nvit_base_r26_s32_224,224,8107.01,252.576,2048,101.38,6.81,12.36\r\ncsatv2_21m,512,8106.87,252.586,2048,20.7,2.94,15.85\r\nhgnet_small,224,8106.47,252.585,2048,24.36,8.53,8.79\r\nvit_dlittle_patch16_reg1_gap_256,256,8081.56,253.366,2048,22.52,6.27,22.69\r\nnf_resnet50,256,8066.89,253.84,2048,25.56,5.46,14.52\r\nseresnet33ts,288,8052.06,254.3,2048,19.78,6.02,14.76\r\nresnet51q,256,8039.5,254.706,2048,35.7,6.38,16.55\r\neca_resnet33ts,288,8022.26,255.251,2048,19.68,6.02,14.76\r\ncoatnet_nano_cc_224,224,7988.08,256.343,2048,13.76,2.24,15.02\r\nefficientformerv2_l,224,7986.98,256.373,2048,26.32,2.59,18.54\r\nmaxvit_pico_rw_256,256,7973.67,96.281,768,7.46,1.83,22.3\r\nresnetrs101,192,7961.92,257.167,2048,63.62,6.04,12.7\r\ncs3sedarknet_l,288,7952.03,257.505,2048,21.91,6.16,10.83\r\nmaxvit_rmlp_pico_rw_256,256,7951.5,96.552,768,7.52,1.85,24.86\r\nregnetv_040,224,7887.35,259.618,2048,20.64,4.0,12.29\r\ncoatnet_0_rw_224,224,7879.38,194.89,1536,27.44,4.43,18.73\r\nsebotnet33ts_256,256,7829.21,261.543,2048,13.7,3.89,17.46\r\nefficientnet_lite3,300,7824.88,65.403,512,8.2,1.65,21.85\r\nlambda_resnet26rpt_256,256,7817.63,261.927,2048,10.99,3.16,11.87\r\nregnety_040,224,7808.64,262.22,2048,20.65,4.0,12.29\r\nnf_regnet_b3,320,7777.71,263.271,2048,18.59,2.05,14.61\r\nmobilenetv4_conv_large,320,7775.16,197.511,1536,32.59,4.47,18.97\r\ntf_efficientnet_lite3,300,7774.93,65.825,512,8.2,1.65,21.85\r\nfastvit_mci0,256,7761.93,197.849,1536,11.36,2.39,14.72\r\nresnet50_mlp,256,7754.56,264.059,2048,26.65,7.05,16.25\r\nres2next50,224,7714.95,265.415,2048,24.67,4.2,13.71\r\nmobilevitv2_125,256,7693.06,99.797,768,7.48,2.86,20.1\r\nxcit_tiny_12_p16_384,384,7628.02,268.438,2048,6.72,3.64,18.26\r\nfocalnet_tiny_srf,224,7604.14,269.275,2048,28.43,4.42,16.32\r\ndarknet53,256,7587.46,269.873,2048,41.61,9.31,12.39\r\nseresnext50_32x4d,224,7551.0,271.174,2048,27.56,4.26,14.42\r\nvit_medium_patch16_gap_256,256,7531.6,271.87,2048,38.86,10.59,22.15\r\nnextvit_small,224,7522.93,272.18,2048,31.74,5.8,18.44\r\nlegacy_seresnext50_32x4d,224,7492.53,273.281,2048,27.56,4.26,14.42\r\npvt_v2_b2,224,7489.12,136.691,1024,25.36,4.05,27.53\r\nswin_tiny_patch4_window7_224,224,7487.82,273.462,2048,28.29,4.51,17.06\r\ndarknetaa53,256,7470.62,274.095,2048,36.02,7.97,12.39\r\necaresnet50d_pruned,288,7444.43,275.055,2048,19.94,4.19,10.61\r\ndla60_res2next,224,7442.4,275.133,2048,17.03,3.49,13.17\r\nefficientnet_b3_pruned,300,7406.6,276.469,2048,9.86,1.04,11.86\r\nresnet101c,224,7405.15,276.509,2048,44.57,8.08,17.04\r\nres2net50_26w_4s,224,7386.48,277.215,2048,25.7,4.28,12.61\r\nres2net50d,224,7381.44,277.41,2048,25.72,4.52,13.41\r\nmobilenetv3_large_150d,320,7379.26,104.044,768,14.62,1.61,19.29\r\nmambaout_tiny,224,7378.68,277.509,2048,26.55,4.49,16.68\r\nresnest50d_1s4x24d,224,7370.08,277.828,2048,25.68,4.43,13.57\r\nresnet61q,256,7365.07,278.025,2048,36.85,7.8,17.01\r\nswinv2_cr_tiny_224,224,7351.92,278.523,2048,28.33,4.66,28.45\r\ncoatnet_rmlp_nano_rw_224,224,7333.61,69.785,512,15.15,2.62,20.34\r\nvit_base_resnet50d_224,224,7328.18,279.424,2048,110.97,8.73,16.92\r\ndensenet169,224,7321.86,279.663,2048,14.15,3.4,7.3\r\nresnet101,224,7305.04,280.31,2048,44.55,7.83,16.23\r\nresnet101d,224,7295.39,280.68,2048,44.57,8.08,17.04\r\nswinv2_cr_tiny_ns_224,224,7251.68,282.367,2048,28.33,4.66,28.45\r\nseresnext26d_32x4d,288,7248.4,282.498,2048,16.81,4.51,16.85\r\nrdnet_tiny,224,7245.28,282.622,2048,23.86,5.06,15.98\r\nseresnext26t_32x4d,288,7244.6,282.646,2048,16.81,4.46,16.68\r\nefficientnet_b1,288,7243.06,141.336,1024,7.79,0.97,15.46\r\ncoatnet_bn_0_rw_224,224,7197.93,71.106,512,27.44,4.67,22.04\r\nnfnet_f0,192,7165.09,285.785,2048,71.49,7.21,10.16\r\nfocalnet_tiny_lrf,224,7156.89,286.114,2048,28.65,4.49,17.76\r\nhgnet_tiny,288,7156.6,214.586,1536,14.74,7.51,10.51\r\ncspresnext50,256,7148.57,286.441,2048,20.57,4.05,15.86\r\nvit_small_plus_patch16_dinov3,256,7145.75,286.559,2048,28.68,8.11,21.84\r\nvit_small_plus_patch16_dinov3_qkvb,256,7135.24,286.964,2048,28.69,8.11,21.84\r\ndla102,224,7096.27,288.558,2048,33.27,7.19,14.18\r\ntf_efficientnet_b2,260,7095.81,144.271,1024,9.11,1.02,13.83\r\nvit_medium_patch16_reg1_gap_256,256,7095.25,288.599,2048,38.88,10.63,22.26\r\nefficientnet_cc_b0_8e,224,7078.54,289.287,2048,24.01,0.42,9.42\r\nefficientnet_cc_b0_4e,224,7070.5,289.616,2048,13.31,0.41,9.42\r\ncs3darknet_focus_x,256,7070.04,289.625,2048,35.02,8.03,10.69\r\ngc_efficientnetv2_rw_t,288,7050.64,290.425,2048,13.68,3.2,16.45\r\nvit_medium_patch16_reg4_gap_256,256,7031.81,291.204,2048,38.88,10.76,22.6\r\necaresnet26t,320,7029.33,291.311,2048,16.01,5.24,16.44\r\ndm_nfnet_f0,192,7017.7,291.787,2048,71.49,7.21,10.16\r\necaresnet50t,256,7003.85,292.356,2048,25.57,5.64,15.45\r\nrepvit_m2_3,224,6989.58,292.96,2048,22.93,4.52,21.32\r\neca_nfnet_l0,288,6985.98,293.115,2048,24.14,7.12,17.29\r\nnfnet_l0,288,6974.13,293.617,2048,35.07,7.13,17.29\r\ndla60_res2net,224,6968.26,293.848,2048,20.85,4.15,12.34\r\nedgenext_small_rw,320,6936.45,295.211,2048,7.83,2.46,14.85\r\nmobilenetv4_hybrid_medium,384,6905.26,148.249,1024,11.07,3.01,21.18\r\nvgg13,224,6897.41,296.877,2048,133.05,11.31,12.25\r\nresnet50,288,6884.45,297.435,2048,25.56,6.8,18.37\r\nvitamin_small_224,224,6874.57,74.438,512,22.17,5.92,26.38\r\nefficientnet_b2,288,6872.48,148.964,1024,9.11,1.12,16.2\r\nvgg13_bn,224,6865.56,298.246,2048,133.05,11.33,12.25\r\nresnet50d,288,6859.83,298.502,2048,25.58,7.19,19.7\r\nres2net50_14w_8s,224,6831.87,299.717,2048,25.06,4.21,13.28\r\nhiera_tiny_224,224,6830.84,299.77,2048,27.91,4.91,17.13\r\ncs3darknet_x,256,6827.47,299.915,2048,35.05,8.38,11.35\r\nresnest50d,224,6816.67,300.371,2048,27.48,5.4,14.36\r\nresnet50t,288,6811.3,300.632,2048,25.57,7.14,19.53\r\nresnet101_clip_gap,224,6811.27,300.635,2048,42.52,9.11,17.56\r\nnf_resnet101,224,6795.91,301.295,2048,44.55,8.01,16.23\r\nefficientvit_l2,224,6795.89,150.633,1024,63.71,6.97,19.58\r\nresnetaa101d,224,6787.7,301.672,2048,44.57,9.12,17.56\r\nrepvgg_b2g4,224,6763.13,302.766,2048,55.78,11.33,6.45\r\necaresnetlight,288,6760.3,302.891,2048,30.16,6.79,13.91\r\nhalonet50ts,256,6751.22,303.306,2048,22.73,5.3,19.2\r\ncspdarknet53,256,6740.45,303.784,2048,27.64,6.57,16.81\r\ndpn68b,288,6734.92,304.036,2048,12.61,3.89,17.3\r\nregnetx_040,224,6727.08,304.388,2048,22.12,3.99,12.2\r\ngcvit_xtiny,224,6704.01,305.441,2048,19.98,2.93,20.26\r\nwide_resnet50_2,224,6699.53,305.642,2048,68.88,11.43,14.4\r\ngcresnext50ts,288,6683.84,306.363,2048,15.67,4.75,19.57\r\ncrossvit_18_240,240,6647.85,308.016,2048,43.27,9.05,26.26\r\nresnet101s,224,6619.45,309.342,2048,44.67,9.19,18.64\r\ngcresnet50t,288,6608.76,309.837,2048,25.9,6.86,18.57\r\nresnetv2_101,224,6602.36,310.13,2048,44.54,7.83,16.23\r\nresnetv2_101d,224,6589.18,310.762,2048,44.56,8.07,17.04\r\nresnetblur101d,224,6583.1,311.053,2048,44.57,9.12,17.94\r\ntf_efficientnet_cc_b0_4e,224,6564.43,311.947,2048,13.31,0.41,9.42\r\nvit_medium_patch16_rope_reg1_gap_256,256,6551.71,312.549,2048,38.74,10.63,22.26\r\ntf_efficientnet_cc_b0_8e,224,6550.46,312.613,2048,24.01,0.42,9.42\r\nresnetv2_34d,384,6533.59,313.403,2048,21.82,11.49,13.35\r\nefficientnetv2_rw_t,288,6519.35,314.088,2048,13.65,3.19,16.42\r\nresnet101_clip,224,6513.85,314.353,2048,56.26,9.81,18.08\r\nresmlp_36_224,224,6507.55,314.665,2048,44.69,8.91,16.33\r\nskresnext50_32x4d,224,6487.85,315.613,2048,27.48,4.5,17.18\r\nregnety_040_sgn,224,6481.35,315.939,2048,20.65,4.03,12.29\r\nmaxxvit_rmlp_nano_rw_256,256,6455.49,237.888,1536,16.78,4.37,26.05\r\nefficientvit_b2,288,6447.71,158.775,1024,24.33,2.64,24.03\r\nmobilevitv2_150,256,6435.0,79.531,512,10.59,4.09,24.11\r\nfbnetv3_g,288,6427.36,159.287,1024,16.62,1.77,21.09\r\nregnety_032,224,6415.21,319.197,2048,19.44,3.2,11.26\r\ntwins_pcpvt_base,224,6413.33,319.285,2048,43.83,6.68,25.25\r\nrexnetr_300,224,6398.05,79.993,512,34.81,3.39,22.16\r\nresnetv2_50,288,6395.26,320.186,2048,25.55,6.79,18.37\r\nswin_s3_tiny_224,224,6389.07,320.501,2048,28.33,4.64,19.13\r\nhieradet_small,256,6386.79,120.215,768,34.73,8.51,27.76\r\nresnet51q,288,6381.39,320.889,2048,35.7,8.07,20.94\r\ncoat_lite_small,224,6367.25,321.587,2048,19.84,3.96,22.09\r\nnf_resnet50,288,6363.81,321.773,2048,25.56,6.88,18.37\r\nese_vovnet57b,256,6329.91,323.504,2048,38.61,11.69,9.82\r\npoolformerv2_s36,224,6326.58,323.666,2048,30.79,5.01,15.82\r\ntresnet_v2_l,224,6319.64,324.011,2048,46.17,8.85,16.34\r\ntf_mixnet_l,224,6315.95,324.215,2048,7.33,0.58,10.84\r\nnf_ecaresnet101,224,6303.31,324.857,2048,44.55,8.01,16.27\r\nmvitv2_tiny,224,6303.29,324.862,2048,24.17,4.7,21.16\r\nnf_seresnet101,224,6292.28,325.435,2048,49.33,8.02,16.27\r\npvt_v2_b2_li,224,6282.18,162.958,1024,22.55,3.91,27.6\r\ncrossvit_18_dagger_240,240,6272.54,326.451,2048,44.27,9.5,27.03\r\nbotnet50ts_256,256,6235.29,328.387,2048,22.74,5.54,22.23\r\nwide_resnet101_2,176,6223.7,329.016,2048,126.89,14.31,13.18\r\nmixnet_l,224,6193.89,330.606,2048,7.33,0.58,10.84\r\nfasternet_m,224,6191.12,330.744,2048,53.52,8.74,15.34\r\ntf_efficientnetv2_b3,300,6169.33,331.926,2048,14.36,3.04,15.74\r\nresnetaa50d,288,6150.32,332.926,2048,25.58,8.92,20.57\r\nresnetaa50,288,6142.73,333.341,2048,25.56,8.52,19.24\r\ncoatnet_rmlp_0_rw_224,224,6141.91,83.33,512,27.45,4.72,24.89\r\nfastvit_sa24,256,6108.41,251.413,1536,21.5,3.77,20.35\r\nvit_base_patch32_384,384,6107.47,335.275,2048,88.3,13.06,16.5\r\nvit_base_patch32_clip_384,384,6104.97,335.416,2048,88.3,13.06,16.5\r\nconvnext_tiny,288,6056.82,338.073,2048,28.59,7.39,22.21\r\nxcit_tiny_12_p8_224,224,6055.57,338.156,2048,6.71,4.81,23.6\r\ncs3sedarknet_xdw,256,6051.17,338.395,2048,21.6,5.97,17.18\r\nrepvgg_b2,224,6048.08,338.566,2048,80.32,18.38,6.45\r\nvit_large_patch32_224,224,6032.37,339.449,2048,305.51,15.39,13.3\r\nswinv2_tiny_window8_256,256,6029.86,339.602,2048,28.35,5.96,24.57\r\nconvnext_tiny_hnf,288,6018.44,340.234,2048,28.59,7.39,22.21\r\nhalo2botnet50ts_256,256,5995.48,341.54,2048,22.64,5.02,21.78\r\nrexnetr_200,288,5977.29,85.624,512,16.52,2.62,24.96\r\nnf_regnet_b4,320,5977.28,342.577,2048,30.21,3.29,19.88\r\npoolformer_s36,224,5974.83,342.727,2048,30.86,5.0,15.82\r\nese_vovnet39b,288,5968.69,343.068,2048,24.57,11.71,11.13\r\ndarknet53,288,5960.15,343.556,2048,41.61,11.78,15.68\r\nregnetx_080,224,5949.77,344.162,2048,39.57,8.02,14.06\r\necaresnet101d_pruned,288,5945.62,344.413,2048,24.88,5.75,12.71\r\nlegacy_seresnet101,224,5940.81,344.675,2048,49.33,7.61,15.74\r\nconvnext_small,224,5939.85,344.748,2048,50.22,8.71,21.56\r\ndla102x,224,5935.73,344.973,2048,26.31,5.89,19.42\r\ndarknetaa53,288,5885.29,347.939,2048,36.02,10.08,15.68\r\npit_b_distilled_224,224,5875.63,348.515,2048,74.79,12.5,33.07\r\nseresnet101,224,5865.01,349.132,2048,49.33,7.84,16.27\r\nresnetblur50,288,5863.97,261.897,1536,25.56,8.52,19.87\r\necaresnet101d,224,5860.51,349.407,2048,44.57,8.08,17.07\r\nresnetblur50d,288,5858.02,262.16,1536,25.58,8.92,21.19\r\npit_b_224,224,5855.69,349.687,2048,73.76,12.42,32.94\r\ncs3sedarknet_x,256,5848.43,350.137,2048,35.4,8.38,11.35\r\ninception_next_small,224,5833.2,351.036,2048,49.37,8.36,19.27\r\nresnet61q,288,5812.43,352.285,2048,36.85,9.87,21.52\r\nresnext101_32x8d,176,5790.79,353.608,2048,88.79,10.33,19.37\r\nmaxvit_nano_rw_256,256,5764.95,66.586,384,15.45,4.46,30.28\r\nnextvit_base,224,5763.8,355.27,2048,44.79,8.29,23.71\r\nmaxvit_rmlp_nano_rw_256,256,5745.56,66.804,384,15.5,4.47,31.92\r\nefficientvit_b3,224,5732.97,178.578,1024,48.65,3.99,26.9\r\nresnext101_32x4d,224,5702.15,359.114,2048,44.18,8.01,21.23\r\ntnt_s_legacy_patch16_224,224,5698.15,359.363,2048,23.76,5.24,24.37\r\ncs3edgenet_x,256,5667.67,361.291,2048,47.82,11.53,12.92\r\nrexnet_300,224,5618.72,91.093,512,34.71,3.44,22.4\r\nvgg16,224,5605.97,365.263,2048,138.36,15.47,13.56\r\nrepvgg_b3g4,224,5601.57,365.569,2048,75.63,16.06,7.55\r\nvgg16_bn,224,5580.41,366.944,2048,138.37,15.5,13.56\r\nresnet101d,256,5573.17,367.428,2048,44.57,10.55,22.25\r\ndensenet201,224,5553.37,368.746,2048,20.01,4.34,7.85\r\nconvit_small,224,5540.82,369.556,2048,27.78,5.76,17.87\r\nseresnet50,288,5537.68,369.779,2048,28.09,6.8,18.39\r\ncoatnet_0_224,224,5529.34,69.42,384,25.04,4.58,24.01\r\necaresnet50d,288,5523.67,370.71,2048,25.58,7.19,19.72\r\necaresnet50t,288,5509.13,371.701,2048,25.57,7.14,19.55\r\nhiera_small_224,224,5505.75,371.914,2048,35.01,6.42,20.75\r\ntnt_s_patch16_224,224,5504.1,372.024,2048,23.77,5.25,24.37\r\nseresnet50t,288,5499.59,372.344,2048,28.1,7.14,19.55\r\nedgenext_base,320,5436.26,376.683,2048,18.51,6.01,24.32\r\nregnetz_c16,256,5420.85,377.748,2048,13.46,2.51,16.57\r\nvolo_d1_224,224,5417.32,377.996,2048,26.63,6.94,24.43\r\nmobilenetv4_conv_large,384,5416.65,189.003,1024,32.59,6.43,27.31\r\nhrnet_w32,224,5411.07,378.439,2048,41.23,8.97,22.02\r\nresnext50_32x4d,288,5397.26,379.406,2048,25.03,7.04,23.81\r\nmixer_b16_224,224,5387.37,380.105,2048,59.88,12.62,14.53\r\ncs3darknet_x,288,5380.03,380.614,2048,35.05,10.6,14.36\r\nresnext50d_32x4d,288,5378.14,380.754,2048,25.05,7.44,25.13\r\nefficientnetv2_s,288,5357.44,382.218,2048,21.46,4.75,20.13\r\nres2net50_26w_6s,224,5344.82,383.12,2048,37.05,6.33,15.28\r\nconvnextv2_tiny,288,5333.4,383.945,2048,28.64,7.39,22.21\r\nmobilevitv2_175,256,5327.66,96.069,512,14.25,5.54,28.13\r\nlambda_resnet26t,256,5322.67,384.73,2048,10.96,3.02,11.87\r\nresnet50_gn,288,5315.38,385.242,2048,25.56,6.85,18.37\r\nmaxxvitv2_nano_rw_256,256,5313.46,192.681,1024,23.7,6.26,23.05\r\nconvnext_nano,384,5299.0,386.438,2048,15.59,7.22,24.61\r\nregnetz_c16_evos,256,5291.35,386.995,2048,13.49,2.48,16.57\r\ndensenet121,288,5277.58,388.008,2048,7.98,4.74,11.41\r\nresnet152c,224,5273.55,388.306,2048,60.21,11.8,23.36\r\nvit_small_resnet50d_s16_224,224,5263.45,389.041,2048,57.53,13.48,24.82\r\nxcit_small_24_p16_224,224,5253.75,389.769,2048,47.67,9.1,23.64\r\npvt_v2_b3,224,5244.69,195.204,1024,45.24,6.92,37.7\r\nresnet152,224,5242.3,390.613,2048,60.19,11.56,22.56\r\nefficientvit_l2,256,5231.78,146.752,768,63.71,9.09,25.49\r\nconvnextv2_small,224,5228.41,391.654,2048,50.32,8.71,21.56\r\nresnet152d,224,5213.98,392.734,2048,60.21,11.8,23.36\r\ndensenetblur121d,288,5202.65,295.195,1536,8.0,5.14,13.06\r\nregnetx_064,224,5175.95,395.621,2048,26.21,6.49,16.37\r\nefficientnet_b3,288,5174.11,98.925,512,12.23,1.63,21.49\r\nregnetz_b16,288,5168.08,396.223,2048,9.72,2.39,16.43\r\nresnetv2_101x1_bit,224,5166.26,396.365,2048,44.54,8.04,16.23\r\neca_nfnet_l1,256,5156.78,397.099,2048,41.41,9.62,22.04\r\nhgnetv2_b5,288,5133.36,398.905,2048,39.57,10.84,18.5\r\nvit_base_patch16_rpn_224,224,5124.75,399.57,2048,86.54,17.49,23.75\r\ncsatv2_21m,640,5123.51,399.679,2048,20.7,4.72,26.68\r\nvit_betwixt_patch16_gap_256,256,5113.75,400.427,2048,60.37,16.25,27.69\r\ndeit3_base_patch16_224,224,5105.0,401.127,2048,86.59,17.58,23.9\r\nmixer_l32_224,224,5076.32,403.392,2048,206.94,11.27,19.86\r\nvit_base_patch16_siglip_gap_224,224,5074.09,403.568,2048,85.8,17.49,23.75\r\ndeit_base_patch16_224,224,5070.02,403.884,2048,86.57,17.58,23.9\r\nvit_small_patch16_18x2_224,224,5069.18,403.957,2048,64.67,13.71,35.69\r\nvit_base_patch16_224_miil,224,5066.22,404.198,2048,94.4,17.59,23.91\r\ndeit_base_distilled_patch16_224,224,5064.15,404.361,2048,87.34,17.68,24.05\r\nvit_base_patch16_224,224,5059.16,404.76,2048,86.57,17.58,23.9\r\nvit_base_patch16_siglip_224,224,5051.68,405.349,2048,92.88,17.73,24.06\r\nvit_small_patch16_36x1_224,224,5049.34,405.539,2048,64.67,13.71,35.69\r\nseresnetaa50d,288,5041.27,406.193,2048,28.11,8.92,20.59\r\nefficientnetv2_rw_s,288,5033.0,406.864,2048,23.94,4.91,21.41\r\nvit_base_patch16_clip_quickgelu_224,224,5031.65,406.962,2048,86.19,17.58,23.9\r\nvit_base_patch16_clip_224,224,5029.9,407.122,2048,86.57,17.58,23.9\r\nefficientformer_l7,224,5027.31,407.31,2048,82.23,10.17,24.45\r\nmaxvit_tiny_rw_224,224,5022.43,101.907,512,29.06,5.11,33.11\r\nvit_base_patch16_xp_224,224,4992.84,410.128,2048,86.51,17.56,23.9\r\nregnetz_b16_evos,288,4990.62,410.317,2048,9.74,2.36,16.43\r\ninception_v4,299,4969.81,412.037,2048,42.68,12.28,15.09\r\nvit_base_mci_224,224,4964.45,412.489,2048,86.35,17.73,24.65\r\nres2net101_26w_4s,224,4954.13,413.339,2048,45.21,8.1,18.45\r\nefficientnet_x_b3,288,4947.92,413.862,2048,13.3,3.91,15.6\r\nvit_pe_spatial_tiny_patch16_512,512,4942.2,414.336,2048,5.68,10.46,61.64\r\nres2net101d,224,4939.39,414.575,2048,45.23,8.35,19.25\r\ntresnet_l,224,4932.34,415.166,2048,55.99,10.9,11.9\r\ndavit_small,224,4924.65,155.916,768,49.75,8.8,30.49\r\nresnetv2_50d_gn,288,4914.17,416.705,2048,25.57,7.24,19.7\r\nresnetv2_50d_evos,288,4910.73,416.994,2048,25.59,7.15,19.7\r\ncaformer_s36,224,4890.07,418.752,2048,39.3,8.0,37.53\r\nvit_base_patch16_gap_224,224,4890.01,418.749,2048,86.57,17.49,25.59\r\ndla169,224,4884.66,419.22,2048,53.39,11.6,20.2\r\nhgnet_small,288,4883.44,314.474,1536,24.36,14.09,14.53\r\nvit_betwixt_patch16_reg1_gap_256,256,4881.58,419.474,2048,60.4,16.32,27.83\r\ncoatnet_rmlp_1_rw_224,224,4872.38,315.19,1536,41.69,7.85,35.47\r\nrdnet_small,224,4851.75,422.074,2048,50.44,8.74,22.55\r\nresnet152s,224,4851.5,422.071,2048,60.32,12.92,24.96\r\ncait_xxs24_224,224,4844.27,422.723,2048,11.96,2.53,20.29\r\nmobilenetv4_hybrid_medium,448,4842.67,158.554,768,11.07,4.2,29.64\r\nregnetv_040,288,4828.03,424.139,2048,20.64,6.6,20.3\r\nfastvit_mci1,256,4825.92,318.237,1536,21.46,4.67,27.3\r\nseresnext101_32x4d,224,4819.49,424.893,2048,48.96,8.02,21.26\r\nvit_betwixt_patch16_reg4_gap_256,256,4818.39,424.983,2048,60.4,16.52,28.24\r\nlegacy_seresnext101_32x4d,224,4801.69,426.468,2048,48.96,8.02,21.26\r\nresnetv2_34d,448,4786.42,427.829,2048,21.82,15.64,18.16\r\nese_vovnet99b,224,4786.12,427.842,2048,63.2,16.51,11.27\r\nregnetv_064,224,4767.79,429.504,2048,30.58,6.39,16.41\r\ntwins_svt_base,224,4767.47,429.527,2048,56.07,8.59,26.33\r\nvit_relpos_base_patch16_224,224,4766.24,429.634,2048,86.43,17.51,24.97\r\nconvformer_s36,224,4760.74,430.134,2048,40.01,7.67,30.5\r\nregnety_040,288,4750.27,431.085,2048,20.65,6.61,20.3\r\nvit_relpos_base_patch16_rpn_224,224,4743.1,431.724,2048,86.41,17.51,24.97\r\nlegacy_xception,299,4742.22,161.917,768,22.86,8.4,35.83\r\nvit_relpos_base_patch16_cls_224,224,4741.29,431.894,2048,86.43,17.6,25.12\r\nvit_base_patch16_rope_ape_224,224,4740.8,431.943,2048,86.59,17.58,23.9\r\nvit_relpos_base_patch16_clsgap_224,224,4738.56,432.153,2048,86.43,17.6,25.12\r\nvit_base_patch16_rope_224,224,4734.84,432.476,2048,86.43,17.58,23.9\r\nvgg19,224,4720.02,433.843,2048,143.67,19.63,14.86\r\nbeit_base_patch16_224,224,4717.08,434.12,2048,86.53,17.58,23.9\r\nmambaout_small,224,4698.39,435.836,2048,48.49,8.96,27.72\r\nvgg19_bn,224,4697.4,435.925,2048,143.68,19.66,14.86\r\nmambaout_small_rw,224,4691.66,436.465,2048,48.5,8.96,27.72\r\nbeitv2_base_patch16_224,224,4690.46,436.58,2048,86.53,17.58,23.9\r\nmobilevitv2_200,256,4688.24,81.872,384,18.45,7.22,32.15\r\nconvnextv2_nano,384,4682.86,437.282,2048,15.62,7.22,24.61\r\nnextvit_large,224,4681.98,437.362,2048,57.83,10.77,28.99\r\ncs3se_edgenet_x,256,4679.57,437.6,2048,50.72,11.53,12.94\r\nregnety_064,224,4679.25,437.623,2048,30.58,6.39,16.41\r\nvit_pe_core_base_patch16_224,224,4662.06,439.23,2048,93.67,17.82,24.21\r\nmvitv2_small,224,4647.2,440.639,2048,34.87,7.0,28.08\r\neva02_base_patch16_clip_224,224,4637.75,441.535,2048,86.26,17.62,26.32\r\nrepvgg_b3,224,4613.6,443.845,2048,110.96,26.21,7.55\r\nefficientnet_cc_b1_8e,240,4613.47,443.879,2048,39.72,0.75,15.44\r\nvit_small_patch16_384,384,4607.63,444.424,2048,22.2,15.52,50.78\r\ncoatnet_1_rw_224,224,4606.33,111.115,512,41.72,8.04,34.6\r\ncs3sedarknet_x,288,4603.7,444.797,2048,35.4,10.6,14.37\r\nresnetv2_152d,224,4603.53,444.825,2048,60.2,11.8,23.36\r\nresnetv2_152,224,4596.28,445.517,2048,60.19,11.55,22.56\r\ndeit3_small_patch16_384,384,4596.12,445.541,2048,22.21,15.52,50.78\r\nbeit3_base_patch16_224,224,4582.96,446.818,2048,86.66,17.63,23.9\r\nsequencer2d_s,224,4548.67,450.198,2048,27.65,4.96,11.31\r\ntwins_pcpvt_large,224,4541.86,450.854,2048,60.99,9.84,35.82\r\nvit_base_patch32_clip_448,448,4541.57,450.892,2048,88.34,17.93,23.9\r\nseresnext50_32x4d,288,4534.89,451.547,2048,27.56,7.04,23.82\r\nmaxxvit_rmlp_tiny_rw_256,256,4534.82,338.67,1536,29.64,6.66,39.76\r\ntf_efficientnetv2_s,300,4531.23,451.927,2048,21.46,5.35,22.73\r\nswin_small_patch4_window7_224,224,4530.81,451.96,2048,49.61,8.77,27.47\r\nfastvit_sa36,256,4525.27,339.379,1536,31.46,5.59,29.46\r\nfasternet_l,224,4519.48,453.1,2048,93.47,15.52,20.46\r\nfocalnet_small_srf,224,4501.65,454.898,2048,49.89,8.62,26.26\r\nxception41p,299,4482.39,114.193,512,26.91,9.25,39.86\r\nmobilenetv4_conv_aa_large,384,4479.0,228.583,1024,32.59,7.07,32.29\r\nvit_betwixt_patch16_rope_reg4_gap_256,256,4475.08,457.576,2048,60.23,16.52,28.24\r\necaresnet50t,320,4464.74,458.65,2048,25.57,8.82,24.13\r\ncs3edgenet_x,288,4462.75,458.856,2048,47.82,14.59,16.36\r\nefficientvit_b3,256,4440.16,172.93,768,48.65,5.2,35.01\r\nflexivit_base,240,4439.69,461.235,2048,86.59,20.29,28.36\r\nefficientnet_el,300,4431.88,462.063,2048,10.59,8.0,30.7\r\nefficientnet_el_pruned,300,4429.41,462.325,2048,10.59,8.0,30.7\r\nswinv2_cr_small_224,224,4423.84,462.897,2048,49.7,9.07,50.27\r\nhrnet_w18_ssld,224,4397.67,465.655,2048,21.3,4.32,16.31\r\ntf_efficientnet_el,300,4395.2,465.909,2048,10.59,8.0,30.7\r\nswinv2_cr_small_ns_224,224,4394.08,466.021,2048,49.7,9.08,50.27\r\nresnet101,288,4388.02,466.674,2048,44.55,12.95,26.83\r\nhrnet_w18,224,4381.87,467.334,2048,21.3,4.32,16.31\r\ntf_efficientnet_cc_b1_8e,240,4380.92,467.445,2048,39.72,0.75,15.44\r\ngcvit_tiny,224,4377.04,467.839,2048,28.22,4.79,29.82\r\npoolformerv2_m36,224,4361.63,469.5,2048,56.08,8.81,22.02\r\ndensenet161,224,4340.55,471.771,2048,28.68,7.79,11.06\r\nvit_mediumd_patch16_reg4_gap_256,256,4300.97,476.114,2048,64.11,17.87,37.57\r\nregnety_080,224,4291.43,477.183,2048,39.18,8.0,17.97\r\nfocalnet_small_lrf,224,4275.18,478.992,2048,50.34,8.74,28.61\r\nmixnet_xl,224,4264.26,480.221,2048,11.9,0.93,14.57\r\nres2net50_26w_8s,224,4245.19,482.36,2048,48.4,8.37,17.95\r\neva02_small_patch14_336,336,4213.4,486.02,2048,22.13,15.48,54.33\r\nlegacy_seresnet152,224,4180.87,489.778,2048,66.82,11.33,22.08\r\nxcit_tiny_24_p16_384,384,4176.93,490.243,2048,12.12,6.87,34.29\r\nefficientnet_b3_gn,288,4172.04,122.686,512,11.73,1.74,23.35\r\nhiera_small_abswin_256,256,4170.37,491.021,2048,34.36,8.29,26.38\r\nnest_tiny,224,4161.68,492.051,2048,17.06,5.83,25.48\r\nefficientnet_b3,320,4153.18,123.246,512,12.23,2.01,26.52\r\nresnest50d_4s2x40d,224,4146.3,493.881,2048,30.42,4.4,17.94\r\nseresnet152,224,4142.59,494.319,2048,66.82,11.57,22.61\r\nnest_tiny_jx,224,4128.56,496.009,2048,17.06,5.83,25.48\r\nlevit_conv_384_s8,224,4124.4,496.494,2048,39.06,9.95,35.86\r\nnfnet_f0,256,4106.25,498.699,2048,71.49,12.62,18.05\r\nefficientvit_l2,288,4094.87,187.507,768,63.71,11.51,32.19\r\nresnetaa101d,288,4091.56,500.496,2048,44.57,15.07,29.03\r\nvit_pe_core_small_patch16_384,384,4088.15,500.895,2048,23.78,15.69,51.23\r\ntf_efficientnet_b3,300,4066.36,125.871,512,12.23,1.87,23.83\r\nmambaout_tiny,288,4065.69,503.669,2048,26.55,7.41,27.58\r\nnf_regnet_b4,384,4064.55,503.822,2048,30.21,4.7,28.61\r\nconvnext_base,224,4048.0,505.88,2048,88.59,15.38,28.75\r\ninception_resnet_v2,299,4036.48,507.32,2048,55.84,13.18,25.06\r\nwide_resnet50_2,288,4027.82,508.405,2048,68.88,18.89,23.81\r\nese_vovnet57b,320,4018.5,509.588,2048,38.61,18.26,15.34\r\ndm_nfnet_f0,256,4009.79,510.693,2048,71.49,12.62,18.05\r\nmobilenetv4_conv_large,448,3988.84,192.497,768,32.59,8.75,37.17\r\nresnet152d,256,3987.29,513.577,2048,60.21,15.41,30.51\r\nregnety_080_tv,224,3985.18,513.837,2048,39.38,8.51,19.73\r\ndla102x2,224,3981.47,514.331,2048,41.28,9.34,29.91\r\npoolformer_m36,224,3979.9,514.54,2048,56.17,8.8,22.02\r\nvit_mediumd_patch16_rope_reg1_gap_256,256,3979.71,514.555,2048,63.95,17.65,37.02\r\nregnetz_d8,256,3978.98,514.642,2048,23.37,3.97,23.74\r\nregnetz_d8_evos,256,3978.02,514.779,2048,23.46,4.5,24.92\r\nresnetv2_101,288,3976.22,515.009,2048,44.54,12.94,26.83\r\ncoatnet_rmlp_1_rw2_224,224,3973.16,128.833,512,41.72,8.11,40.13\r\nresnetblur101d,288,3965.63,387.282,1536,44.57,15.07,29.65\r\nmaxvit_tiny_tf_224,224,3949.52,129.596,512,30.92,5.6,35.78\r\nregnety_040_sgn,288,3932.24,520.764,2048,20.65,6.67,20.3\r\nrexnetr_300,288,3930.54,65.097,256,34.81,5.59,36.61\r\nvit_base_patch16_siglip_gap_256,256,3921.84,522.136,2048,85.84,23.13,33.23\r\nmaxvit_tiny_rw_256,256,3898.13,98.474,384,29.07,6.74,44.35\r\nvit_base_patch16_siglip_256,256,3895.47,525.672,2048,92.93,23.44,33.63\r\nlevit_384_s8,224,3893.81,525.915,2048,39.06,9.95,35.86\r\nswinv2_tiny_window16_256,256,3886.25,526.941,2048,28.35,6.68,39.02\r\nwide_resnet101_2,224,3881.55,527.569,2048,126.89,22.8,21.23\r\nmaxvit_rmlp_tiny_rw_256,256,3881.35,98.898,384,29.15,6.77,46.92\r\nregnetz_040,256,3880.6,527.703,2048,27.12,4.06,24.19\r\ninception_next_base,224,3863.58,530.017,2048,86.67,14.85,25.69\r\nregnetz_040_h,256,3863.52,530.023,2048,28.94,4.12,24.29\r\ncrossvit_base_240,240,3858.54,530.724,2048,105.03,21.22,36.33\r\nresnet200,224,3810.25,537.435,2048,64.67,15.07,32.19\r\nmvitv2_small_cls,224,3788.4,540.543,2048,34.87,7.04,28.17\r\nmobilenetv4_hybrid_large,384,3779.68,270.882,1024,37.76,7.77,34.52\r\nregnety_032,288,3756.39,545.141,2048,19.44,5.29,18.61\r\nvit_large_r50_s32_224,224,3755.87,545.224,2048,328.99,19.58,24.41\r\nregnetx_120,224,3744.69,546.847,2048,46.11,12.13,21.37\r\nhiera_base_224,224,3740.08,547.537,2048,51.52,9.4,30.42\r\nefficientnet_lite4,380,3738.4,102.683,384,13.01,4.04,45.66\r\nhgnetv2_b6,224,3726.09,549.588,2048,75.26,16.88,21.23\r\nvit_base_patch16_reg4_gap_256,256,3719.16,550.607,2048,86.62,23.5,33.89\r\ntf_efficientnet_lite4,380,3711.06,103.432,384,13.01,4.04,45.66\r\ndpn98,224,3701.93,553.174,2048,61.57,11.73,25.2\r\nlamhalobotnet50ts_256,256,3701.37,553.257,2048,22.57,5.02,18.44\r\npvt_v2_b4,224,3700.97,276.639,1024,62.56,10.14,53.74\r\nxception41,299,3656.51,139.986,512,26.97,9.28,39.86\r\npvt_v2_b5,224,3650.57,560.942,2048,81.96,11.76,50.92\r\ncoat_tiny,224,3632.8,563.695,2048,5.5,4.35,27.2\r\nconvnextv2_base,224,3602.26,568.475,2048,88.72,15.38,28.75\r\nswinv2_small_window8_256,256,3591.5,570.188,2048,49.73,11.58,40.14\r\nconvnext_small,288,3590.59,570.329,2048,50.22,14.39,35.65\r\nxcit_medium_24_p16_224,224,3580.2,571.967,2048,84.4,16.13,31.71\r\nswinv2_cr_small_ns_256,256,3567.3,574.056,2048,49.7,12.07,76.21\r\nresnext101_32x8d,224,3565.32,574.372,2048,88.79,16.48,31.21\r\nresnext101_64x4d,224,3554.08,576.175,2048,83.46,15.52,31.21\r\nvit_base_patch16_rope_reg1_gap_256,256,3547.85,577.208,2048,86.43,23.22,33.39\r\nsequencer2d_m,224,3545.48,577.588,2048,38.31,6.55,14.26\r\nresnet101d,320,3543.73,577.862,2048,44.57,16.48,34.77\r\nvit_small_r26_s32_384,384,3541.64,578.208,2048,36.47,10.43,29.85\r\nregnety_120,224,3537.9,578.815,2048,51.82,12.14,21.38\r\ncoatnet_1_224,224,3531.79,108.691,384,42.23,8.7,39.0\r\necaresnet101d,288,3524.02,581.097,2048,44.57,13.35,28.19\r\nseresnet101,288,3523.3,581.218,2048,49.33,12.95,26.87\r\nfastvit_ma36,256,3521.47,436.129,1536,43.98,7.82,34.98\r\nresnetrs101,288,3504.92,584.25,2048,63.62,13.56,28.53\r\neva02_base_patch14_224,224,3489.44,586.85,2048,85.76,23.22,36.55\r\nregnetz_d32,256,3486.22,587.396,2048,27.58,5.98,23.74\r\nefficientvit_b3,288,3481.01,220.579,768,48.65,6.58,44.2\r\ngmlp_b16_224,224,3466.84,590.683,2048,73.08,15.78,30.21\r\nvit_base_patch16_plus_240,240,3447.56,593.982,2048,117.56,27.41,33.08\r\nresnext101_32x4d,288,3442.19,594.905,2048,44.18,13.24,35.09\r\nfastvit_mci2,256,3435.2,447.074,1536,35.7,7.85,36.09\r\ndavit_base,224,3432.75,223.682,768,87.95,15.51,40.66\r\nvit_base_patch16_plus_clip_240,240,3429.78,597.068,2048,117.21,27.41,33.08\r\nhrnet_w40,224,3428.15,597.339,2048,57.56,12.75,25.29\r\nconvnext_tiny,384,3378.91,606.061,2048,28.59,13.14,39.48\r\ncaformer_m36,224,3378.49,606.131,2048,56.2,13.29,50.48\r\nregnetz_c16,320,3376.43,606.501,2048,13.46,3.92,25.88\r\nefficientnet_b3_gn,320,3359.34,114.273,384,11.73,2.14,28.83\r\nhrnet_w48_ssld,224,3345.9,612.035,2048,77.47,17.34,28.56\r\nswinv2_base_window12_192,192,3343.32,612.51,2048,109.28,11.9,39.72\r\ntwins_svt_large,224,3341.43,612.853,2048,99.27,15.15,35.1\r\nxcit_nano_12_p8_384,384,3337.9,613.486,2048,3.05,6.34,46.08\r\nhrnet_w48,224,3335.33,613.961,2048,77.47,17.34,28.56\r\nconvformer_m36,224,3310.92,618.501,2048,57.05,12.89,42.05\r\nmobilenetv4_conv_aa_large,448,3307.86,232.13,768,32.59,9.63,43.94\r\nvit_small_patch8_224,224,3307.05,619.218,2048,21.67,22.44,80.84\r\nvit_base_r50_s16_224,224,3303.18,619.947,2048,97.89,21.66,35.28\r\npoolformerv2_m48,224,3298.86,620.761,2048,73.35,11.59,29.17\r\nxcit_small_12_p16_384,384,3294.26,621.624,2048,26.25,14.14,36.51\r\nhrnet_w30,224,3292.85,621.907,2048,37.71,8.15,21.21\r\neca_nfnet_l1,320,3286.16,623.157,2048,41.41,14.92,34.42\r\ntresnet_xl,224,3273.29,625.615,2048,78.44,15.2,15.34\r\nregnetz_c16_evos,320,3270.12,626.221,2048,13.49,3.86,25.88\r\nxception65p,299,3256.75,157.17,512,39.82,13.91,52.48\r\nvit_relpos_base_patch16_plus_240,240,3250.2,630.058,2048,117.38,27.3,34.33\r\ncait_xxs36_224,224,3250.14,630.081,2048,17.3,3.77,30.34\r\nlambda_resnet50ts,256,3249.83,630.144,2048,21.54,5.07,17.48\r\nregnetx_160,224,3245.66,630.943,2048,54.28,15.99,25.52\r\nhgnet_base,224,3243.77,631.302,2048,71.58,25.14,15.47\r\nmaxvit_rmlp_small_rw_224,224,3234.85,158.236,512,64.9,10.75,49.3\r\nmvitv2_base,224,3217.84,636.39,2048,51.47,10.16,40.5\r\nresnest101e,256,3201.71,639.6,2048,48.28,13.38,28.66\r\nxcit_tiny_24_p8_224,224,3196.46,640.645,2048,12.11,9.21,45.39\r\nswin_base_patch4_window7_224,224,3183.57,643.244,2048,87.77,15.47,36.63\r\nvit_base_patch16_dinov3,256,3183.11,643.332,2048,85.64,23.6,34.06\r\nvit_base_patch16_dinov3_qkvb,256,3178.86,644.189,2048,85.66,23.6,34.06\r\nrdnet_base,224,3175.61,644.864,2048,87.45,15.4,31.14\r\nseresnext101d_32x8d,224,3171.68,645.649,2048,93.59,16.72,32.05\r\nseresnext101_32x8d,224,3171.46,645.69,2048,93.57,16.48,31.25\r\nseresnext101_64x4d,224,3161.82,647.667,2048,88.23,15.53,31.25\r\nseresnet152d,256,3158.8,648.283,2048,66.84,15.42,30.56\r\nmaxvit_tiny_pm_256,256,3152.21,121.779,384,30.09,6.61,47.9\r\nresnetrs152,256,3145.36,651.062,2048,86.62,15.59,30.83\r\nefficientnet_b4,320,3144.17,162.809,512,19.34,3.13,34.76\r\nresnet152,288,3142.04,651.746,2048,60.19,19.11,37.28\r\nregnety_160,224,3140.88,651.994,2048,83.59,15.96,23.04\r\nswinv2_cr_base_224,224,3124.62,655.387,2048,87.88,15.86,59.66\r\nfocalnet_base_srf,224,3112.79,657.874,2048,88.15,15.28,35.01\r\nswinv2_cr_base_ns_224,224,3099.81,660.628,2048,87.88,15.86,59.66\r\nconvnext_base,256,3096.0,661.439,2048,88.59,20.09,37.55\r\nvolo_d2_224,224,3085.97,663.598,2048,58.68,14.34,41.34\r\nsamvit_base_patch16_224,224,3084.99,663.805,2048,86.46,17.54,24.54\r\nconvformer_s18,384,3067.93,500.603,1536,26.77,11.63,46.49\r\nvit_medium_patch16_gap_384,384,3065.27,668.071,2048,39.03,26.08,67.54\r\ncaformer_s18,384,3060.73,501.782,1536,26.34,13.42,77.34\r\ncoat_lite_medium,224,3035.64,674.588,2048,44.57,9.81,40.06\r\nnfnet_f1,224,3032.76,675.237,2048,132.63,17.87,22.94\r\nconvit_base,224,3019.79,678.131,2048,86.54,17.52,31.77\r\npoolformer_m48,224,3000.18,682.574,2048,73.47,11.59,29.17\r\nefficientnetv2_s,384,2996.09,683.491,2048,21.46,8.44,35.77\r\ndm_nfnet_f1,224,2988.72,685.189,2048,132.63,17.87,22.94\r\ntnt_b_patch16_224,224,2985.8,685.856,2048,65.43,14.1,39.01\r\nmambaout_base_tall_rw,224,2979.62,687.271,2048,86.48,16.15,38.74\r\nswin_s3_small_224,224,2979.29,687.345,2048,49.74,9.43,37.84\r\nconvnextv2_tiny,384,2978.7,687.495,2048,28.64,13.14,39.48\r\nseresnextaa101d_32x8d,224,2968.82,689.779,2048,93.59,17.25,34.16\r\ncs3se_edgenet_x,320,2967.95,689.984,2048,50.72,18.01,20.21\r\nresnet200d,256,2909.53,703.837,2048,64.69,20.0,43.09\r\ncoat_mini,224,2908.19,704.162,2048,10.34,6.82,33.68\r\nresnet50x4_clip_gap,288,2905.33,704.847,2048,65.62,19.57,34.11\r\ntresnet_m,448,2896.32,353.508,1024,31.39,22.99,29.21\r\nseresnext101_32x4d,288,2883.61,710.158,2048,48.96,13.25,35.12\r\nmobilenetv4_conv_aa_large,480,2883.06,266.331,768,32.59,11.05,50.45\r\nfocalnet_base_lrf,224,2882.7,710.39,2048,88.75,15.43,38.13\r\ntf_efficientnetv2_s,384,2880.49,710.931,2048,21.46,8.44,35.77\r\ngcvit_small,224,2870.93,534.958,1536,51.09,8.57,41.61\r\nmambaout_base_short_rw,224,2867.72,714.102,2048,88.83,16.31,38.08\r\nmobilenetv5_base,256,2851.52,179.517,512,82.65,20.05,36.89\r\nregnetv_064,288,2841.89,720.59,2048,30.58,10.55,27.11\r\ncrossvit_15_dagger_408,408,2835.41,722.237,2048,28.5,21.45,95.05\r\nmixnet_xxl,224,2827.62,362.098,1024,23.96,2.04,23.43\r\nefficientnetv2_rw_s,384,2812.33,728.154,2048,23.94,8.72,38.03\r\nhiera_base_abswin_256,256,2801.33,731.022,2048,51.27,12.46,40.7\r\ndpn92,224,2792.55,733.332,2048,37.67,6.54,18.21\r\nhiera_base_plus_224,224,2788.68,734.338,2048,69.9,12.67,37.98\r\nregnety_064,288,2779.92,736.648,2048,30.58,10.56,27.11\r\nvitamin_base_224,224,2768.36,92.437,256,87.72,22.68,52.77\r\nresnet50x4_clip,288,2764.55,740.752,2048,87.14,21.35,35.27\r\nmaxxvit_rmlp_small_rw_256,256,2725.25,375.705,1024,66.01,14.67,58.38\r\ndpn131,224,2718.84,753.216,2048,79.25,16.09,32.97\r\ntiny_vit_21m_384,384,2678.27,191.124,512,21.22,13.72,77.83\r\ncoatnet_2_rw_224,224,2665.07,96.027,256,73.87,15.09,49.22\r\ncoatnet_rmlp_2_rw_224,224,2660.0,96.212,256,73.88,15.18,54.78\r\nxception65,299,2655.46,192.773,512,39.92,13.96,52.48\r\nfastvit_mci3,256,2652.57,385.996,1024,125.07,14.82,44.88\r\nmobilenetv4_hybrid_large,448,2638.11,291.076,768,37.76,10.74,48.61\r\nhrnet_w64,224,2621.12,781.277,2048,128.06,28.97,35.09\r\nmambaout_small_rw,288,2617.07,782.481,2048,48.5,14.81,45.82\r\nmambaout_small,288,2613.44,783.581,2048,48.49,14.81,45.82\r\nmvitv2_base_cls,224,2611.36,784.203,2048,65.44,10.23,40.65\r\nnest_small,224,2603.03,786.717,2048,38.35,10.35,40.04\r\nxcit_small_12_p8_224,224,2594.56,789.273,2048,26.21,18.69,47.21\r\nnest_small_jx,224,2587.46,791.444,2048,38.35,10.35,40.04\r\nefficientvit_l3,224,2559.51,199.99,512,246.04,27.62,39.16\r\nregnety_080,288,2558.96,800.262,2048,39.18,13.22,29.69\r\nhrnet_w18_ssld,288,2548.99,803.388,2048,21.3,7.14,26.96\r\nresnet152d,320,2532.65,808.568,2048,60.21,24.08,47.67\r\nlevit_conv_512_s8,224,2532.31,808.692,2048,73.97,21.77,52.28\r\nmaxvit_rmlp_small_rw_256,256,2522.76,152.173,384,64.9,14.15,66.09\r\nmobilevitv2_150,384,2522.0,101.474,256,10.59,9.2,54.25\r\nefficientnetv2_m,320,2505.78,817.252,2048,54.14,11.01,39.97\r\nmaxvit_small_tf_224,224,2499.04,153.615,384,68.93,11.66,53.17\r\nnextvit_small,384,2498.6,819.595,2048,31.74,17.25,57.14\r\nswinv2_base_window8_256,256,2489.06,822.74,2048,87.92,20.37,52.59\r\nseresnet152,288,2481.36,825.285,2048,66.82,19.11,37.34\r\nefficientnet_b3_g8_gn,288,2476.79,826.815,2048,14.25,2.59,23.35\r\nmambaout_base_wide_rw,224,2461.61,831.907,2048,94.45,17.78,42.6\r\ncait_s24_224,224,2453.31,834.745,2048,46.92,9.35,40.58\r\ndensenet264d,224,2451.24,626.577,1536,72.74,13.57,14.0\r\ncoatnet_2_224,224,2447.23,104.577,256,74.68,16.5,52.67\r\nlevit_512_s8,224,2443.42,838.095,2048,73.97,21.77,52.28\r\nconvnext_base,288,2426.22,844.047,2048,88.59,25.43,47.53\r\ndpn107,224,2403.2,852.147,2048,86.92,18.38,33.46\r\nvit_so150m_patch16_reg4_gap_256,256,2397.51,854.162,2048,134.13,36.75,53.21\r\nvit_so150m_patch16_reg4_map_256,256,2369.01,864.423,2048,141.48,37.17,53.68\r\ncaformer_b36,224,2362.07,866.967,2048,98.75,23.22,67.3\r\nswin_s3_base_224,224,2358.25,868.38,2048,71.13,13.69,48.26\r\nmambaout_base_plus_rw,224,2347.65,872.292,2048,101.66,19.19,45.16\r\nhrnet_w44,224,2339.76,875.237,2048,67.06,14.94,26.92\r\nmambaout_base,224,2336.75,876.355,2048,84.81,15.83,36.95\r\nefficientvit_l2,384,2332.7,164.576,384,63.71,20.45,57.01\r\neca_nfnet_l2,320,2328.53,879.463,2048,56.72,20.95,47.43\r\nconvformer_b36,224,2311.24,886.032,2048,99.88,22.69,56.06\r\nsequencer2d_l,224,2302.07,889.59,2048,54.3,9.74,22.12\r\nvit_so150m2_patch16_reg1_gap_256,256,2302.03,889.585,2048,136.06,37.0,56.93\r\nresnet200,288,2291.93,893.501,2048,64.67,24.91,53.21\r\nswinv2_small_window16_256,256,2282.75,897.101,2048,49.73,12.82,66.29\r\nseresnet200d,256,2268.17,902.87,2048,71.86,20.01,43.15\r\necaresnet200d,256,2261.52,905.519,2048,64.69,20.0,43.15\r\nresnetrs200,256,2257.0,907.34,2048,93.21,20.18,43.42\r\nvolo_d3_224,224,2237.86,915.087,2048,86.33,20.78,60.09\r\nhgnetv2_b6,288,2234.26,916.579,2048,75.26,27.9,35.09\r\ncoat_small,224,2228.46,918.959,2048,21.69,12.61,44.25\r\nefficientnetv2_rw_m,320,2218.61,923.04,2048,53.24,12.72,47.14\r\nmobilenetv4_conv_aa_large,544,2215.49,231.059,512,32.59,14.19,64.79\r\nregnetz_d32,320,2207.69,927.604,2048,27.58,9.33,37.08\r\nmaxxvitv2_rmlp_base_rw_224,224,2196.85,466.075,1024,116.09,24.2,62.77\r\nefficientnet_b4,384,2185.51,175.672,384,19.34,4.51,50.04\r\nconvnext_large,224,2177.19,940.605,2048,197.77,34.4,43.13\r\nconvnextv2_base,288,2168.18,944.509,2048,88.72,25.43,47.53\r\nvit_pe_spatial_small_patch16_512,512,2116.03,967.787,2048,21.98,31.8,123.27\r\nresnext101_64x4d,288,2115.13,968.189,2048,83.46,25.66,51.59\r\ngcvit_base,224,2101.42,487.23,1024,90.32,14.87,55.48\r\nmobilevitv2_175,384,2099.02,121.927,256,14.25,12.47,63.29\r\nvit_betwixt_patch16_reg4_gap_384,384,2098.44,975.896,2048,60.6,39.71,85.28\r\nconvmixer_768_32,224,2085.59,981.913,2048,21.11,19.55,25.95\r\nvit_large_patch32_384,384,2058.99,994.599,2048,306.63,45.31,43.86\r\nxception71,299,2051.15,249.573,512,42.34,18.09,69.92\r\nxcit_tiny_12_p8_384,384,2042.87,1002.448,2048,6.71,14.13,69.14\r\nresnetv2_50x1_bit,448,2041.2,1003.265,2048,25.55,16.62,44.46\r\nconvnext_small,384,2007.96,1019.885,2048,50.22,25.58,63.37\r\nseresnet152d,320,2005.88,1020.932,2048,66.84,24.09,47.72\r\nregnety_120,288,2001.79,1023.019,2048,51.82,20.06,35.34\r\nresnetrs152,320,2000.92,1023.466,2048,86.62,24.34,48.14\r\nhrnet_w48_ssld,288,1999.28,1024.301,2048,77.47,28.66,47.21\r\ntf_efficientnet_b4,380,1997.18,192.232,384,19.34,4.49,49.49\r\nefficientvit_l3,256,1973.63,194.528,384,246.04,36.06,50.98\r\nconvnextv2_large,224,1967.67,1040.763,2048,197.96,34.4,43.13\r\nconvnext_base,320,1956.1,1046.921,2048,88.59,31.39,58.68\r\ncrossvit_18_dagger_408,408,1947.62,1051.475,2048,44.61,32.47,124.87\r\nswinv2_large_window12_192,192,1946.33,1052.182,2048,228.77,26.17,56.53\r\nxcit_large_24_p16_224,224,1943.11,1053.906,2048,189.1,35.86,47.27\r\nresnext101_32x16d,224,1934.65,1058.528,2048,194.03,36.27,51.18\r\nhgnet_base,288,1933.05,529.686,1024,71.58,41.55,25.57\r\nswinv2_cr_tiny_384,384,1930.68,1060.702,2048,28.33,15.34,161.01\r\nefficientnet_b3_g8_gn,320,1923.06,1064.905,2048,14.25,3.2,28.83\r\nnextvit_base,384,1903.94,1075.593,2048,44.79,24.62,73.95\r\nseresnext101d_32x8d,288,1899.64,1078.033,2048,93.59,27.64,52.95\r\nseresnext101_32x8d,288,1895.95,1080.12,2048,93.57,27.24,51.63\r\ndavit_large,224,1893.47,405.55,768,196.81,34.6,60.99\r\nresnetv2_50x3_bit,224,1886.62,1085.47,2048,217.32,37.06,33.34\r\nnest_base,224,1869.61,1095.346,2048,67.72,17.96,53.39\r\nnest_base_jx,224,1856.7,1102.962,2048,67.72,17.96,53.39\r\nvit_mediumd_patch16_reg4_gap_384,384,1854.58,1104.222,2048,64.27,43.67,113.51\r\nmobilevitv2_200,384,1849.57,103.77,192,18.45,16.24,72.34\r\nresnet200d,320,1848.56,1107.824,2048,64.69,31.25,67.33\r\nregnety_320,224,1831.46,1118.176,2048,145.05,32.34,30.26\r\nregnety_160,288,1819.43,1125.56,2048,83.59,26.37,38.07\r\nfastvit_mci4,256,1804.91,425.454,768,321.57,27.78,60.59\r\nmixer_l16_224,224,1804.38,1134.945,2048,208.2,44.6,41.69\r\nrdnet_large,224,1791.63,857.242,1536,186.27,34.74,46.67\r\necaresnet200d,288,1787.02,1145.978,2048,64.69,25.31,54.59\r\nseresnet200d,288,1780.21,1150.351,2048,71.86,25.32,54.6\r\nxcit_small_24_p16_384,384,1779.82,1150.615,2048,47.67,26.72,68.58\r\nseresnet269d,256,1777.61,1152.035,2048,113.67,26.59,53.6\r\nswin_large_patch4_window7_224,224,1776.35,1152.868,2048,196.53,34.53,54.94\r\nseresnextaa101d_32x8d,288,1774.3,1154.186,2048,93.59,28.51,56.44\r\nvolo_d1_384,384,1771.47,1156.043,2048,26.78,22.75,108.55\r\nregnetz_d8,320,1744.27,1174.066,2048,23.37,6.19,37.08\r\nregnetz_d8_evos,320,1741.6,1175.858,2048,23.46,7.03,38.92\r\nvit_base_patch16_18x2_224,224,1729.54,1184.057,2048,256.73,52.51,71.38\r\nswinv2_cr_large_224,224,1727.59,1185.409,2048,196.68,35.1,78.42\r\nmambaout_base_short_rw,288,1727.56,1185.422,2048,88.83,26.96,62.94\r\nnf_regnet_b5,384,1723.65,1188.119,2048,49.74,7.95,42.9\r\nresnetrs270,256,1722.09,1189.19,2048,129.86,27.06,55.84\r\nvit_small_patch14_dinov2,518,1701.74,1203.415,2048,22.06,46.76,198.79\r\ntf_efficientnetv2_m,384,1693.49,1209.263,2048,54.14,15.85,57.52\r\nvit_small_patch14_reg4_dinov2,518,1683.64,1216.356,2048,22.06,46.95,199.77\r\nmaxvit_rmlp_base_rw_224,224,1674.58,305.695,512,116.14,23.15,92.64\r\nnaflexvit_base_patch16_gap,384,1667.21,1228.336,2048,86.63,55.86,102.34\r\nnaflexvit_base_patch16_parfac_gap,384,1667.16,1228.366,2048,86.46,55.86,102.34\r\nnaflexvit_base_patch16_par_gap,384,1665.26,1229.773,2048,86.63,55.86,102.34\r\nnaflexvit_base_patch16_siglip,384,1656.33,1236.401,2048,92.93,56.12,102.2\r\nconvnext_large_mlp,256,1654.55,1237.734,2048,200.13,44.94,56.33\r\nmambaout_base_tall_rw,288,1653.07,1238.837,2048,86.48,26.69,64.04\r\nnaflexvit_base_patch16_map,384,1643.12,1246.34,2048,93.72,56.23,102.46\r\ndeit3_base_patch16_384,384,1636.52,1251.375,2048,86.88,55.54,101.56\r\nregnetz_040,320,1635.19,1252.398,2048,27.12,6.35,37.78\r\nvit_base_patch16_384,384,1635.02,1252.517,2048,86.86,55.54,101.56\r\ndeit_base_patch16_384,384,1633.53,1253.667,2048,86.86,55.54,101.56\r\nvit_base_patch16_siglip_gap_384,384,1632.13,1254.731,2048,86.09,55.43,101.3\r\ndeit_base_distilled_patch16_384,384,1630.44,1256.034,2048,87.63,55.65,101.82\r\nregnetz_040_h,320,1629.42,1256.834,2048,28.94,6.43,37.94\r\nlegacy_senet154,224,1623.66,1261.283,2048,115.09,20.77,38.69\r\nvit_base_patch16_clip_384,384,1623.06,1261.743,2048,86.86,55.54,101.56\r\nregnetx_320,224,1620.87,1263.446,2048,107.81,31.81,36.3\r\nsenet154,224,1620.61,1263.669,2048,115.09,20.77,38.69\r\nconvformer_s36,384,1620.5,947.784,1536,40.01,22.54,89.62\r\nswinv2_base_window16_256,256,1619.76,1264.311,2048,87.92,22.02,84.71\r\nnfnet_f2,256,1619.2,1264.76,2048,193.78,33.76,41.85\r\nswinv2_base_window12to16_192to256,256,1617.29,1266.242,2048,87.92,22.02,84.71\r\nvit_base_patch16_siglip_384,384,1616.71,1266.702,2048,93.18,56.12,102.2\r\ncaformer_s36,384,1615.49,950.735,1536,39.3,26.08,150.33\r\ndeit3_large_patch16_224,224,1608.05,1273.525,2048,304.37,61.6,63.52\r\neva_large_patch14_196,196,1607.65,1273.841,2048,304.14,61.57,63.52\r\nvit_large_patch16_224,224,1603.56,1277.097,2048,304.33,61.6,63.52\r\neca_nfnet_l2,384,1593.71,1284.989,2048,56.72,30.05,68.28\r\ndm_nfnet_f2,256,1589.06,1288.75,2048,193.78,33.76,41.85\r\nrepvgg_d2se,320,1552.88,1318.768,2048,120.39,66.99,23.42\r\ncoatnet_rmlp_3_rw_224,224,1544.69,124.263,192,165.15,33.56,79.47\r\nnextvit_large,384,1543.88,1326.462,2048,57.83,32.0,90.76\r\ncoatnet_3_rw_224,224,1540.45,124.607,192,181.81,33.44,73.83\r\nbeit_large_patch16_224,224,1516.81,1350.133,2048,304.43,61.6,63.52\r\nvit_large_patch16_rope_224,224,1514.71,1352.01,2048,304.17,61.6,63.52\r\nvit_large_patch16_rope_ape_224,224,1514.67,1352.047,2048,304.37,61.6,63.52\r\nbeit3_large_patch16_224,224,1514.35,1352.33,2048,304.57,61.72,63.52\r\nbeitv2_large_patch16_224,224,1513.53,1353.061,2048,304.43,61.6,63.52\r\nmambaout_base_wide_rw,288,1482.8,1381.104,2048,94.45,29.39,70.41\r\neca_nfnet_l3,352,1477.33,1386.217,2048,72.04,32.57,73.12\r\nefficientnetv2_m,416,1474.83,1388.569,2048,54.14,18.6,67.5\r\nnfnet_f1,320,1473.27,1390.043,2048,132.63,35.97,46.77\r\nmaxvit_base_tf_224,224,1462.81,262.464,384,119.47,24.04,95.01\r\ndm_nfnet_f1,320,1450.42,1411.936,2048,132.63,35.97,46.77\r\nresnetrs200,320,1436.73,1425.393,2048,93.21,31.51,67.81\r\ncoatnet_3_224,224,1423.31,134.86,192,166.97,36.56,79.01\r\nseresnextaa101d_32x8d,320,1421.73,1440.431,2048,93.59,35.19,69.67\r\nbeit_base_patch16_384,384,1417.13,1445.114,2048,86.74,55.54,101.56\r\nflexivit_large,240,1415.84,1446.423,2048,304.36,70.99,75.39\r\nresnetv2_152x2_bit,224,1413.88,1448.425,2048,236.34,46.95,45.11\r\nmambaout_base_plus_rw,288,1413.01,1449.316,2048,101.66,31.72,74.64\r\nmambaout_base,288,1409.41,1453.029,2048,84.81,26.16,61.08\r\nseresnet269d,288,1398.82,1464.019,2048,113.67,33.65,67.81\r\nconvnext_xlarge,224,1397.63,1465.275,2048,350.2,60.98,57.5\r\nconvnext_base,384,1369.14,1121.792,1536,88.59,45.21,84.49\r\ntiny_vit_21m_512,512,1362.34,187.871,256,21.26,26.93,177.93\r\nxcit_small_24_p8_224,224,1358.68,1507.264,2048,47.63,35.81,90.78\r\nmaxxvitv2_rmlp_large_rw_224,224,1352.56,757.019,1024,215.42,44.14,87.15\r\nefficientnet_b5,416,1332.7,192.051,256,30.39,8.27,80.68\r\nregnetz_e8,256,1325.35,1545.191,2048,57.7,9.91,40.94\r\nvolo_d4_224,224,1304.74,1569.598,2048,192.96,44.34,80.22\r\nconvnext_large,288,1302.28,1572.557,2048,197.77,56.87,71.29\r\nefficientnetv2_rw_m,416,1301.56,1573.427,2048,53.24,21.49,79.62\r\nnasnetalarge,331,1292.89,395.951,512,88.75,23.89,90.56\r\nresnetv2_101x1_bit,448,1275.57,1605.486,2048,44.54,31.65,64.93\r\nefficientvit_l3,320,1259.77,203.16,256,246.04,56.32,79.34\r\ninception_next_base,384,1255.98,1630.527,2048,86.67,43.64,75.48\r\nvit_large_r50_s32_384,384,1249.96,1638.389,2048,329.09,57.43,76.52\r\nmaxvit_tiny_tf_384,384,1248.83,153.705,192,30.98,17.53,123.42\r\nvit_large_patch16_siglip_256,256,1244.56,1645.501,2048,315.96,81.34,88.88\r\ndavit_huge,224,1242.92,617.834,768,348.92,61.23,81.32\r\nvit_large_patch16_siglip_gap_256,256,1234.45,1658.977,2048,303.36,80.8,88.34\r\npnasnet5large,331,1225.51,626.623,768,86.06,25.04,92.89\r\nconvnextv2_base,384,1222.32,1256.563,1536,88.72,45.21,84.49\r\nxcit_medium_24_p16_384,384,1219.57,1679.21,2048,84.4,47.39,91.64\r\ntresnet_l,448,1194.69,857.058,1024,55.99,43.59,47.56\r\nvit_large_patch14_clip_224,224,1191.18,1719.234,2048,304.2,81.08,88.79\r\nvit_large_patch14_224,224,1190.88,1719.667,2048,304.2,81.08,88.79\r\nvit_large_patch14_clip_quickgelu_224,224,1189.16,1722.153,2048,303.97,81.08,88.79\r\naimv2_large_patch14_224,224,1184.49,1728.942,2048,309.2,82.3,85.2\r\nconvnextv2_large,288,1184.21,1729.359,2048,197.96,56.87,71.29\r\nvit_base_patch8_224,224,1178.26,1738.083,2048,86.58,78.22,161.69\r\nvit_large_patch14_xp_224,224,1175.83,1741.681,2048,304.06,81.01,88.79\r\nefficientnet_x_b5,448,1150.52,667.467,768,33.44,23.35,68.87\r\nefficientnet_b5,448,1147.25,223.096,256,30.39,9.59,93.56\r\nnf_regnet_b5,456,1144.88,1788.774,2048,49.74,11.7,61.95\r\nresnest200e,320,1143.47,1790.958,2048,70.2,35.69,82.78\r\nconvformer_m36,384,1135.19,901.989,1024,57.05,37.87,123.56\r\necaresnet269d,320,1130.3,1811.838,2048,102.09,41.53,83.69\r\nswinv2_cr_small_384,384,1128.09,1815.401,2048,49.7,29.7,298.03\r\ncaformer_m36,384,1126.36,909.053,1024,56.2,42.11,196.35\r\nhiera_large_224,224,1111.65,1842.241,2048,213.74,40.34,83.37\r\nregnety_640,224,1107.63,1848.926,2048,281.38,64.16,42.5\r\nconvmixer_1024_20_ks9_p14,224,1096.03,1868.482,2048,24.38,5.55,5.51\r\nvitamin_large_224,224,1091.87,234.421,256,333.32,75.05,112.83\r\nvitamin_large2_224,224,1089.41,234.951,256,333.58,75.05,112.83\r\nresnetv2_101x3_bit,224,1088.69,1881.091,2048,387.93,71.23,48.7\r\ntf_efficientnetv2_m,480,1083.51,1417.542,1536,54.14,24.76,89.84\r\nresnetrs350,288,1077.61,1900.426,2048,163.96,43.67,87.09\r\nxcit_tiny_24_p8_384,384,1074.65,1905.659,2048,12.11,27.05,132.95\r\nvit_base_r50_s16_384,384,1057.79,1936.041,2048,98.95,67.43,135.03\r\nconvnext_large_mlp,320,1054.54,1456.486,1536,200.13,70.21,88.02\r\nvit_so150m_patch16_reg4_gap_384,384,1048.89,1952.477,2048,134.42,87.97,165.47\r\nefficientnet_h_b5,448,1038.81,492.815,512,45.88,27.16,73.9\r\nregnety_160,384,1029.57,1989.118,2048,83.59,46.87,67.67\r\nvit_large_patch16_dinov3_qkvb,256,1029.26,1989.718,2048,303.13,82.43,90.56\r\nmaxvit_large_tf_224,224,1028.78,248.793,256,211.79,43.68,127.35\r\nvit_large_patch16_dinov3,256,1026.71,1994.65,2048,303.08,82.43,90.56\r\nnaflexvit_so150m2_patch16_reg1_gap,384,1010.65,2026.358,2048,136.06,89.53,178.22\r\nvolo_d2_384,384,1008.13,2031.405,2048,58.87,46.17,184.51\r\neva02_large_patch14_224,224,1005.85,2036.015,2048,303.27,81.15,97.2\r\ntf_efficientnet_b5,456,1004.58,254.791,256,30.39,10.46,98.86\r\neva02_large_patch14_clip_224,224,1002.05,2043.748,2048,304.11,81.18,97.2\r\nnaflexvit_so150m2_patch16_reg1_map,384,995.19,2057.826,2048,142.46,90.33,179.2\r\nvit_so150m2_patch16_reg1_gap_384,384,991.12,2066.277,2048,136.33,89.53,178.22\r\nefficientnetv2_l,384,990.83,2066.885,2048,118.52,36.1,101.16\r\nresnet50x16_clip_gap,384,982.19,2085.058,2048,136.2,70.32,100.64\r\ntf_efficientnetv2_l,384,974.57,2101.364,2048,118.52,36.1,101.16\r\nswinv2_large_window12to16_192to256,256,950.58,2154.409,2048,196.74,47.81,121.53\r\nresnet50x16_clip,384,945.35,2166.321,2048,167.33,74.9,103.54\r\ncoat_lite_medium_384,384,945.29,1624.823,1536,44.57,28.73,116.7\r\nmvitv2_large,224,932.8,2195.479,2048,217.99,43.87,112.02\r\necaresnet269d,352,927.13,2208.901,2048,102.09,50.25,101.25\r\nxcit_medium_24_p8_224,224,920.1,2225.785,2048,84.32,63.53,121.23\r\nresnetrs270,352,899.75,2276.12,2048,129.86,51.13,105.48\r\neca_nfnet_l3,448,898.88,2278.324,2048,72.04,52.55,118.4\r\nefficientvit_l3,384,879.99,218.145,192,246.04,81.08,114.02\r\ncoatnet_4_224,224,878.36,218.547,192,275.43,62.48,129.26\r\nxcit_small_12_p8_384,384,870.17,2353.509,2048,26.21,54.92,138.29\r\nvit_base_patch16_siglip_gap_512,512,867.37,2361.099,2048,86.43,107.0,246.15\r\nvit_so400m_patch14_siglip_224,224,867.06,2361.926,2048,427.68,110.26,106.73\r\nvit_so400m_patch14_siglip_gap_224,224,866.73,2362.831,2048,412.44,109.57,106.13\r\nvit_so400m_patch16_siglip_gap_256,256,865.6,2365.932,2048,412.65,109.62,106.13\r\nvit_so400m_patch16_siglip_256,256,861.63,2376.831,2048,427.89,110.31,106.73\r\nvit_base_patch16_siglip_512,512,860.89,2378.87,2048,93.52,108.22,247.74\r\nnfnet_f2,352,854.97,2395.33,2048,193.78,63.22,79.06\r\ncoatnet_rmlp_2_rw_384,384,841.99,113.98,96,73.88,47.69,209.43\r\nconvnext_xlarge,288,841.95,1824.263,1536,350.2,100.8,95.05\r\nvitamin_large_256,256,841.65,228.08,192,333.38,99.0,154.99\r\nvitamin_large2_256,256,839.97,228.527,192,333.64,99.0,154.99\r\ndm_nfnet_f2,352,836.91,2447.022,2048,193.78,63.22,79.06\r\nvolo_d5_224,224,834.53,2454.007,2048,295.46,72.4,118.11\r\nresmlp_big_24_224,224,828.37,2472.255,2048,129.14,100.23,87.31\r\nswin_base_patch4_window12_384,384,806.13,2540.471,2048,87.9,47.19,134.78\r\nmambaout_base_plus_rw,384,803.09,1274.999,1024,101.66,56.39,132.7\r\nmvitv2_large_cls,224,801.74,2554.373,2048,234.58,42.17,111.69\r\nmaxvit_small_tf_384,384,799.48,160.064,128,69.02,35.87,183.65\r\nconvformer_b36,384,795.91,964.869,768,99.88,66.67,164.75\r\nnfnet_f3,320,794.27,2578.388,2048,254.92,68.77,83.93\r\ntresnet_xl,448,793.15,968.229,768,78.44,60.77,61.31\r\ncaformer_b36,384,792.95,968.477,768,98.75,72.33,261.79\r\nswinv2_cr_base_384,384,791.45,1940.661,1536,87.88,50.57,333.68\r\neva02_base_patch14_448,448,790.53,2590.579,2048,87.12,107.11,259.14\r\nvit_pe_spatial_base_patch16_512,512,787.32,2601.163,2048,86.43,107.13,246.54\r\ndm_nfnet_f3,320,777.21,2634.988,2048,254.92,68.77,83.93\r\nconvnextv2_huge,224,752.48,2041.176,1536,660.29,115.0,79.07\r\nconvnext_large,384,738.87,1385.83,1024,197.77,101.1,126.74\r\nconvnext_large_mlp,384,738.17,1387.157,1024,200.13,101.11,126.74\r\nresnetrs420,320,736.86,2779.296,2048,191.89,64.2,126.56\r\ncait_xxs24_384,384,711.74,2877.419,2048,12.03,9.63,122.66\r\nseresnextaa201d_32x8d,320,708.74,2889.553,2048,149.39,70.22,138.71\r\nefficientnetv2_xl,384,701.17,2920.757,2048,208.12,52.81,139.2\r\nefficientnet_x_b5,576,696.46,735.091,512,33.44,38.59,113.83\r\nresnext101_32x32d,224,695.05,2946.462,2048,468.53,87.29,91.12\r\ntf_efficientnetv2_xl,384,691.33,2962.322,2048,208.12,52.81,139.2\r\nmaxvit_tiny_tf_512,512,690.77,138.941,96,31.05,33.49,257.59\r\nmaxxvitv2_rmlp_base_rw_384,384,688.65,557.552,384,116.09,72.98,213.74\r\nvit_so150m2_patch16_reg1_gap_448,448,686.54,2983.0,2048,136.5,127.51,287.05\r\nconvnextv2_large,384,672.4,1522.841,1024,197.96,101.1,126.74\r\nxcit_large_24_p16_384,384,666.43,3073.031,2048,189.1,105.35,137.17\r\nregnetz_e8,320,649.61,3152.611,2048,57.7,15.46,63.94\r\nswinv2_cr_huge_224,224,649.41,3153.561,2048,657.83,115.97,121.08\r\nvitamin_xlarge_256,256,646.87,197.833,128,436.06,130.13,177.37\r\nfocalnet_huge_fl3,224,637.44,2409.583,1536,745.28,118.26,104.8\r\nefficientnet_b6,528,634.75,201.615,128,43.04,19.4,167.39\r\nefficientnetv2_l,480,633.31,2425.286,1536,118.52,56.4,157.99\r\nefficientnet_h_b5,576,627.52,611.877,384,45.88,44.9,122.13\r\ntf_efficientnetv2_l,480,623.21,1643.053,1024,118.52,56.4,157.99\r\nvit_base_patch14_dinov2,518,618.57,3310.779,2048,86.58,151.71,397.58\r\nrdnet_large,384,615.4,831.928,512,186.27,102.09,137.13\r\nvit_base_patch14_reg4_dinov2,518,615.27,3328.54,2048,86.58,152.25,399.53\r\nvit_huge_patch14_gap_224,224,610.92,3352.266,2048,630.76,166.73,138.74\r\nregnety_320,384,608.54,2523.988,1536,145.05,95.0,88.87\r\ndeit3_huge_patch14_224,224,603.31,3394.556,2048,632.13,167.4,139.41\r\nresnetrs350,384,601.95,3402.231,2048,163.96,77.59,154.74\r\nvit_huge_patch14_clip_quickgelu_224,224,599.05,3418.673,2048,632.08,167.4,139.41\r\nvit_huge_patch14_224,224,598.33,3422.759,2048,630.76,167.4,139.41\r\nvit_huge_patch14_clip_224,224,597.68,3426.517,2048,632.05,167.4,139.41\r\nvit_huge_patch14_xp_224,224,595.63,3438.285,2048,631.8,167.3,139.41\r\ntf_efficientnet_b6,528,590.92,216.562,128,43.04,19.4,167.39\r\naimv2_huge_patch14_224,224,590.22,3469.815,2048,680.85,179.01,126.22\r\nmaxvit_xlarge_tf_224,224,586.25,327.461,192,506.99,97.52,191.04\r\nsam2_hiera_tiny,896,578.03,110.686,64,26.85,99.86,384.63\r\ncait_xs24_384,384,556.25,3681.757,2048,26.67,19.28,183.98\r\nregnety_1280,224,551.81,3711.383,2048,644.81,127.66,71.58\r\nmaxvit_rmlp_base_rw_384,384,525.11,365.588,192,116.14,70.97,318.95\r\nhiera_huge_224,224,521.69,3925.661,2048,672.78,124.85,150.95\r\neva_large_patch14_336,336,517.86,3954.693,2048,304.53,191.1,270.24\r\nvit_large_patch14_clip_quickgelu_336,336,516.69,3963.611,2048,304.29,191.11,270.24\r\ndeit3_large_patch16_384,384,516.61,3964.233,2048,304.76,191.21,270.24\r\nvit_large_patch16_siglip_gap_384,384,516.18,3967.529,2048,303.69,190.85,269.55\r\nvit_large_patch16_siglip_384,384,516.04,3968.609,2048,316.28,192.07,270.75\r\nvit_large_patch14_clip_336,336,515.77,3970.696,2048,304.53,191.11,270.24\r\nvit_large_patch16_384,384,515.73,3971.026,2048,304.72,191.21,270.24\r\nvolo_d3_448,448,514.21,3982.72,2048,86.63,96.33,446.83\r\nvit_giant_patch16_gap_224,224,504.37,4060.415,2048,1011.37,202.46,139.26\r\nxcit_large_24_p8_224,224,501.33,4085.041,2048,188.93,141.23,181.56\r\naimv2_large_patch14_336,336,500.38,4092.84,2048,309.53,194.22,227.08\r\nresnest269e,416,492.95,4154.55,2048,110.93,77.69,171.98\r\nseresnextaa201d_32x8d,384,486.19,4212.25,2048,149.39,101.11,199.72\r\ncoatnet_5_224,224,485.8,263.438,128,687.47,145.49,194.24\r\nvit_pe_core_large_patch14_336,336,483.64,4234.505,2048,317.15,192.33,271.43\r\nconvnext_xxlarge,256,482.44,2122.474,1024,846.47,198.09,124.45\r\nsam2_hiera_small,896,479.72,133.369,64,33.95,123.99,442.63\r\nresnetv2_152x4_bit,224,479.03,4275.207,2048,936.53,186.9,90.22\r\nconvnext_xlarge,384,478.88,1603.679,768,350.2,179.2,168.99\r\ndavit_giant,224,475.04,1616.646,768,1406.47,192.92,153.06\r\ncait_xxs36_384,384,474.87,4312.709,2048,17.37,14.35,183.7\r\nswin_large_patch4_window12_384,384,469.35,4363.393,2048,196.74,104.08,202.16\r\nvitamin_large_336,336,469.13,204.597,96,333.57,175.72,307.47\r\nvitamin_large2_336,336,468.21,204.993,96,333.83,175.72,307.47\r\nresnetv2_152x2_bit,384,468.02,4375.804,2048,236.34,136.16,132.56\r\nresnetv2_50x3_bit,448,463.84,2207.583,1024,217.32,145.7,133.37\r\nmaxvit_base_tf_384,384,461.58,277.268,128,119.65,73.8,332.9\r\nswinv2_cr_large_384,384,457.08,2240.233,1024,196.68,108.96,404.96\r\nbeit_large_patch16_384,384,456.77,4483.57,2048,305.0,191.21,270.24\r\nxcit_small_24_p8_384,384,453.97,4511.288,2048,47.63,105.24,265.91\r\nconvnextv2_huge,288,451.81,2266.379,1024,660.29,190.1,130.7\r\neva02_large_patch14_clip_336,336,451.31,4537.863,2048,304.43,191.34,289.13\r\nmaxvit_small_tf_512,512,444.35,143.994,64,69.13,67.26,383.77\r\nnfnet_f3,416,437.54,4680.64,2048,254.92,115.58,141.78\r\nswinv2_base_window12to24_192to384,384,435.28,1764.309,768,87.92,55.25,280.36\r\ndm_nfnet_f3,416,429.7,4765.999,2048,254.92,115.58,141.78\r\nresnetrs420,416,429.27,4770.792,2048,191.89,108.45,213.79\r\ncait_s24_384,384,428.53,4779.127,2048,47.06,32.17,245.31\r\nnfnet_f4,384,421.63,4857.219,2048,316.07,122.14,147.57\r\nvit_huge_plus_patch16_dinov3,256,417.56,4904.612,2048,840.51,224.88,193.59\r\nvit_huge_plus_patch16_dinov3_qkvb,256,417.18,4909.128,2048,840.59,224.88,193.59\r\ndm_nfnet_f4,384,411.72,4974.158,2048,316.07,122.14,147.57\r\nefficientnetv2_xl,512,393.98,2599.033,1024,208.12,93.85,247.32\r\ntf_efficientnetv2_xl,512,388.81,2633.596,1024,208.12,93.85,247.32\r\neva_giant_patch14_224,224,387.88,5279.844,2048,1012.56,267.18,192.64\r\nvit_giant_patch14_224,224,387.5,5285.042,2048,1012.61,267.18,192.64\r\neva_giant_patch14_clip_224,224,385.8,5308.339,2048,1012.59,267.18,192.64\r\nvit_giant_patch14_clip_224,224,385.37,5314.299,2048,1012.65,267.18,192.64\r\nmvitv2_huge_cls,224,382.53,4015.337,1536,694.8,120.67,243.63\r\nnaflexvit_so400m_patch16_siglip,384,370.9,5521.627,2048,427.89,259.65,319.77\r\nvit_so400m_patch16_siglip_gap_384,384,367.67,5570.08,2048,413.02,258.11,318.42\r\nefficientnet_b7,600,366.96,261.567,96,66.35,38.33,289.94\r\nbeit3_giant_patch14_224,224,366.58,5586.646,2048,1013.22,267.56,192.64\r\nvit_so400m_patch16_siglip_384,384,365.98,5595.863,2048,428.26,259.65,319.77\r\nvit_giantopt_patch16_siglip_gap_256,256,363.26,5637.818,2048,1134.84,298.42,199.62\r\nvitamin_xlarge_336,336,362.49,264.797,96,436.06,230.18,347.33\r\nvit_giantopt_patch16_siglip_256,256,362.01,5657.273,2048,1163.17,299.66,200.43\r\naimv2_1b_patch14_224,224,358.15,5718.28,2048,1234.96,322.43,170.39\r\nvitamin_large_384,384,357.56,178.943,64,333.71,234.44,440.16\r\nvitamin_large2_384,384,357.38,179.029,64,333.97,234.44,440.16\r\nregnety_640,384,356.0,2876.358,1024,281.38,188.47,124.83\r\ntf_efficientnet_b7,600,346.01,277.402,96,66.35,38.33,289.94\r\nfocalnet_huge_fl4,224,345.52,4445.443,1536,686.46,118.9,113.34\r\nresnet50x64_clip_gap,448,343.12,5968.712,2048,365.03,253.96,233.22\r\nresnetv2_152x2_bit,448,342.89,4479.558,1536,236.34,184.99,180.43\r\nresnet50x64_clip,448,326.47,6273.078,2048,420.38,265.02,239.13\r\nmaxvit_large_tf_384,384,325.27,295.099,96,212.03,132.55,445.84\r\nxcit_medium_24_p8_384,384,308.04,4986.337,1536,84.32,186.67,354.73\r\nvolo_d4_448,448,300.39,6817.636,2048,193.41,197.13,527.35\r\nnfnet_f5,416,297.56,6882.656,2048,377.21,170.71,204.56\r\ndm_nfnet_f5,416,293.57,6976.07,2048,377.21,170.71,204.56\r\ncait_s36_384,384,285.22,7180.277,2048,68.37,47.99,367.4\r\nvit_so400m_patch14_siglip_gap_378,378,283.48,7224.443,2048,412.99,333.46,451.19\r\nvit_so400m_patch14_siglip_gap_384,384,283.41,7226.085,2048,412.99,333.46,451.19\r\ndavit_base_fl,768,282.71,679.091,192,90.37,190.32,530.15\r\nvit_so400m_patch14_siglip_378,378,282.67,7245.2,2048,428.23,335.4,452.89\r\nvit_so400m_patch14_siglip_384,384,281.91,7264.56,2048,428.23,335.4,452.89\r\nvit_large_patch16_siglip_gap_512,512,277.28,7385.837,2048,304.15,361.84,655.36\r\nvit_large_patch16_siglip_512,512,276.06,7418.722,2048,316.74,364.0,657.48\r\nvitamin_xlarge_384,384,275.82,231.98,64,436.06,306.38,493.46\r\nvit_intern300m_patch14_448,448,270.87,7560.752,2048,304.01,362.05,656.39\r\nswinv2_large_window12to24_192to384,384,269.92,1896.804,512,196.74,116.15,407.83\r\nresnetv2_101x3_bit,448,268.95,3807.272,1024,387.93,280.33,194.78\r\nvit_pe_lang_large_patch14_448,448,267.44,7657.652,2048,291.42,346.99,629.09\r\naimv2_large_patch14_448,448,267.31,7661.558,2048,309.98,367.84,491.78\r\nconvnextv2_huge,384,257.98,1984.629,512,660.29,337.96,232.35\r\nmaxvit_base_tf_512,512,256.85,249.128,64,119.88,138.02,703.99\r\naimv2_huge_patch14_336,336,256.78,7975.736,2048,681.34,416.36,337.08\r\nvit_huge_patch14_clip_336,336,256.65,7979.738,2048,632.46,390.97,407.54\r\nvit_pe_spatial_large_patch14_448,448,256.62,7980.632,2048,303.96,362.05,656.39\r\nsam2_hiera_base_plus,896,248.23,257.786,64,68.68,227.48,828.88\r\neva02_large_patch14_448,448,238.71,8579.373,2048,305.08,362.33,689.95\r\nefficientnet_b8,672,232.32,413.172,96,87.41,63.48,442.89\r\nbeit_large_patch16_512,512,229.05,8941.301,2048,305.67,362.24,656.39\r\nnfnet_f4,512,224.77,9111.54,2048,316.07,216.26,262.26\r\nfocalnet_large_fl3,384,223.64,4578.686,1024,239.13,105.06,168.04\r\ndm_nfnet_f4,512,223.11,9179.394,2048,316.07,216.26,262.26\r\nfocalnet_large_fl4,384,222.21,4608.099,1024,239.32,105.2,181.78\r\ntf_efficientnet_b8,672,221.15,434.054,96,87.41,63.48,442.89\r\nvit_gigantic_patch14_clip_quickgelu_224,224,219.93,9312.164,2048,1844.91,483.96,275.37\r\nvit_gigantic_patch14_224,224,219.76,9319.187,2048,1844.44,483.95,275.37\r\nvit_gigantic_patch14_clip_224,224,219.75,9319.563,2048,1844.91,483.96,275.37\r\nnfnet_f6,448,217.83,9401.891,2048,438.36,229.7,273.62\r\ndm_nfnet_f6,448,215.31,9511.835,2048,438.36,229.7,273.62\r\nvit_huge_patch14_clip_quickgelu_378,378,200.73,10202.817,2048,632.68,503.79,572.79\r\nvit_huge_patch14_clip_378,378,200.52,10213.121,2048,632.68,503.79,572.79\r\nvit_large_patch14_dinov2,518,198.95,10293.824,2048,304.37,507.15,1058.82\r\nvit_large_patch14_reg4_dinov2,518,198.22,10331.809,2048,304.37,508.9,1064.02\r\nregnety_1280,384,197.57,3887.255,768,644.81,374.99,210.2\r\nvit_so400m_patch14_siglip_gap_448,448,195.65,10467.589,2048,413.33,487.18,764.26\r\nvit_so400m_patch16_siglip_gap_512,512,195.26,10488.528,2048,413.53,487.4,764.26\r\nvolo_d5_448,448,194.59,10524.69,2048,295.91,315.06,737.92\r\nvit_so400m_patch16_siglip_512,512,194.24,10543.442,2048,428.77,490.13,766.65\r\nswinv2_cr_giant_224,224,193.51,10583.453,2048,2598.76,483.85,309.15\r\nmaxvit_xlarge_tf_384,384,188.3,339.83,64,475.32,292.78,668.76\r\nswinv2_cr_huge_384,384,186.12,2750.843,512,657.94,352.04,583.18\r\nvit_huge_patch16_gap_448,448,183.41,11166.087,2048,631.67,544.7,636.83\r\nmaxvit_large_tf_512,512,180.62,265.69,48,212.33,244.75,942.15\r\nxcit_large_24_p8_384,384,169.9,6026.887,1024,188.93,415.0,531.82\r\naimv2_3b_patch14_224,224,169.45,12086.215,2048,2720.66,705.91,252.44\r\nfocalnet_xlarge_fl3,384,168.22,4565.275,768,408.79,185.61,223.99\r\ndm_nfnet_f5,544,167.19,9187.321,1536,377.21,290.97,349.71\r\neva_giant_patch14_336,336,167.08,12257.226,2048,1013.01,620.64,550.67\r\nnfnet_f7,480,166.28,12316.844,2048,499.5,300.08,355.86\r\nnfnet_f5,544,165.88,12346.205,2048,377.21,290.97,349.71\r\nfocalnet_xlarge_fl4,384,163.78,4689.124,768,409.03,185.79,242.31\r\ncait_m36_384,384,163.47,12528.127,2048,271.22,173.11,734.81\r\nbeit3_giant_patch14_336,336,158.64,12909.888,2048,1013.67,621.52,550.67\r\nvit_giantopt_patch16_siglip_gap_384,384,156.29,13103.913,2048,1135.33,694.1,567.12\r\nvit_giantopt_patch16_siglip_384,384,155.79,13146.226,2048,1163.66,696.85,568.91\r\naimv2_1b_patch14_336,336,152.63,13418.427,2048,1235.61,743.59,454.16\r\ntf_efficientnet_l2,475,149.53,427.959,64,480.31,172.11,609.89\r\nvolo_d5_512,512,148.68,10330.771,1536,296.09,425.09,1105.37\r\nconvmixer_1536_20,224,148.35,6902.39,1024,51.63,48.68,33.03\r\nconvnextv2_huge,512,145.78,1756.018,256,660.29,600.81,413.07\r\naimv2_huge_patch14_448,448,139.0,14733.363,2048,682.03,774.02,731.38\r\nnfnet_f6,576,131.16,11710.769,1536,438.36,378.69,452.2\r\ndm_nfnet_f6,576,129.1,11897.433,1536,438.36,378.69,452.2\r\nmobilenetv5_300m,768,106.75,599.494,64,294.13,435.74,842.16\r\nregnety_2560,384,106.68,4799.267,512,1282.6,747.83,296.49\r\nmobilenetv5_300m_enc,768,104.56,612.002,64,294.13,435.74,842.16\r\nresnetv2_152x4_bit,480,104.15,4916.058,512,936.53,844.84,414.26\r\nmaxvit_xlarge_tf_512,512,104.02,307.587,32,475.77,534.14,1413.22\r\ndavit_huge_fl,768,103.08,1862.509,192,360.64,744.84,1060.3\r\nnfnet_f7,608,101.03,15202.759,1536,499.5,480.39,570.85\r\neva02_enormous_patch14_clip_224,224,99.84,20512.832,2048,4350.56,1132.46,497.58\r\naimv2_1b_patch14_448,448,83.2,18461.913,1536,1236.53,1367.03,983.56\r\nvit_gigantic_patch14_clip_378,378,75.08,27277.037,2048,1845.7,1429.82,1047.37\r\nsam2_hiera_large,1024,74.52,644.039,48,212.15,907.48,2190.34\r\naimv2_3b_patch14_336,336,74.01,27671.258,2048,2721.64,1615.48,674.17\r\ncait_m48_448,448,73.11,21009.941,1536,356.46,329.41,1708.23\r\nvit_7b_patch16_dinov3,256,66.08,30994.328,2048,6716.03,1775.1,515.87\r\nsamvit_base_patch16,1024,62.58,1021.415,64,89.67,486.43,1343.27\r\nvit_giant_patch14_dinov2,518,61.37,25029.018,1536,1136.48,1784.2,2757.89\r\nvit_giant_patch14_reg4_dinov2,518,61.14,25121.871,1536,1136.48,1790.08,2771.21\r\nvit_pe_lang_gigantic_patch14_448,448,56.4,27234.124,1536,1740.92,1931.99,1664.88\r\nefficientnet_l2,800,55.4,433.172,24,480.31,479.12,1707.39\r\nswinv2_cr_giant_384,384,54.97,4657.035,256,2598.76,1450.71,1394.86\r\neva_giant_patch14_560,560,54.68,28089.079,1536,1014.45,1906.76,2577.17\r\ntf_efficientnet_l2,800,53.53,448.292,24,480.31,479.12,1707.39\r\nvit_pe_spatial_gigantic_patch14_448,448,53.05,28954.801,1536,1851.89,2055.25,1771.04\r\nvit_pe_core_gigantic_patch14_448,448,52.95,29010.38,1536,1882.03,2060.12,1774.21\r\naimv2_3b_patch14_448,448,40.73,25143.814,1024,2723.02,2939.61,1462.76\r\nvit_so400m_patch14_siglip_gap_896,896,34.79,22077.289,768,416.87,2731.49,8492.88\r\nsamvit_large_patch16,1024,29.56,2164.943,64,308.28,1493.86,2553.78\r\nsamvit_huge_patch16,1024,18.93,2535.149,48,637.03,2982.23,3428.16\r\n"
  },
  {
    "path": "results/benchmark-infer-fp32-nchw-pt221-cpu-i9_10940x-dynamo.csv",
    "content": "model,infer_samples_per_sec,infer_step_time,infer_batch_size,infer_img_size,param_count\r\ntf_mobilenetv3_small_minimal_100,309.77,3.161,1,224,2.04\r\nlcnet_035,308.98,3.161,1,224,1.64\r\nresnet10t,300.12,3.231,1,176,5.44\r\nlcnet_050,295.81,3.311,1,224,1.88\r\ntinynet_e,273.01,3.592,1,106,2.04\r\nmobilenetv3_small_050,270.37,3.63,1,224,1.59\r\nlcnet_075,266.53,3.674,1,224,2.36\r\nmobilenetv2_035,249.23,3.942,1,224,1.68\r\nlcnet_100,244.53,3.999,1,224,2.95\r\nmobilenetv3_small_100,225.97,4.345,1,224,2.54\r\nmobilenetv3_small_075,220.29,4.46,1,224,2.04\r\ntf_mobilenetv3_small_100,219.41,4.475,1,224,2.54\r\nlcnet_150,219.1,4.466,1,224,4.5\r\nmobilenetv2_050,214.82,4.575,1,224,1.97\r\ntf_mobilenetv3_small_075,208.46,4.716,1,224,2.04\r\nmnasnet_050,205.77,4.783,1,224,2.22\r\nregnetx_002,198.21,4.962,1,224,2.68\r\nese_vovnet19b_slim,192.31,5.101,1,224,3.17\r\ntinynet_d,191.92,5.128,1,152,2.34\r\nresnet18,187.86,5.216,1,160,11.69\r\nmobilenetv2_075,187.52,5.242,1,224,2.64\r\nmnasnet_small,187.12,5.265,1,224,2.03\r\nefficientvit_b0,184.09,5.346,1,224,3.41\r\nese_vovnet19b_slim_dw,184.05,5.341,1,224,1.9\r\nmobilenetv2_100,182.7,5.378,1,224,3.5\r\nhgnetv2_b0,180.83,5.43,1,224,6.0\r\nmnasnet_075,180.52,5.443,1,224,3.17\r\nhardcorenas_a,179.9,5.461,1,224,5.26\r\nmobilenetv3_large_075,176.23,5.58,1,224,3.99\r\nefficientnet_lite0,174.03,5.647,1,224,4.65\r\nlevit_128s,173.36,5.665,1,224,7.78\r\nmnasnet_100,173.17,5.679,1,224,4.38\r\nsemnasnet_050,171.62,5.745,1,224,2.08\r\ngernet_s,167.59,5.865,1,224,8.17\r\nmobilenetv3_large_100,165.26,5.952,1,224,5.48\r\ncs3darknet_s,164.21,5.995,1,256,3.28\r\nmobilenetv3_rw,162.02,6.071,1,224,5.48\r\ncs3darknet_focus_s,161.22,6.111,1,256,3.27\r\nmobilenetv2_140,159.18,6.179,1,224,6.11\r\ntf_mobilenetv3_large_minimal_100,159.13,6.186,1,224,3.92\r\nresnet10t,155.59,6.322,1,224,5.44\r\nregnety_002,154.79,6.375,1,224,3.16\r\ntf_mobilenetv3_large_075,150.25,6.551,1,224,3.99\r\nresnet14t,149.95,6.563,1,176,10.08\r\nhardcorenas_b,146.58,6.72,1,224,5.18\r\nhardcorenas_c,146.24,6.738,1,224,5.52\r\nfbnetc_100,143.95,6.847,1,224,5.57\r\nregnetx_004_tv,143.93,6.85,1,224,5.5\r\nlevit_conv_128s,142.89,6.888,1,224,7.78\r\ntf_mobilenetv3_large_100,142.19,6.933,1,224,5.48\r\nsemnasnet_075,141.97,6.949,1,224,2.91\r\nsemnasnet_100,141.36,6.977,1,224,3.89\r\nhgnetv2_b0,141.33,6.974,1,288,6.0\r\nmobilenetv2_110d,139.05,7.092,1,224,4.52\r\nspnasnet_100,138.75,7.11,1,224,4.42\r\ndla46_c,138.38,7.137,1,224,1.3\r\nhgnetv2_b1,138.06,7.135,1,224,6.34\r\nefficientnet_es,137.5,7.174,1,224,5.44\r\nefficientnet_es_pruned,137.05,7.197,1,224,5.44\r\ntf_efficientnet_es,135.86,7.256,1,224,5.44\r\nresnet14t,135.07,7.296,1,224,10.08\r\ndla46x_c,134.66,7.334,1,224,1.07\r\nefficientnet_lite1,134.08,7.36,1,240,5.42\r\nregnetx_004,134.07,7.361,1,224,5.16\r\nregnetx_006,133.51,7.39,1,224,6.2\r\nsemnasnet_140,133.03,7.417,1,224,6.11\r\nvit_tiny_r_s16_p8_224,132.94,7.419,1,224,6.34\r\nmnasnet_140,132.56,7.439,1,224,7.12\r\nregnetx_008,131.77,7.489,1,224,7.26\r\nmixer_s32_224,130.18,7.575,1,224,19.1\r\ntf_efficientnet_lite0,129.71,7.61,1,224,4.65\r\ntinynet_c,129.52,7.631,1,184,2.46\r\nlevit_192,128.69,7.669,1,224,10.95\r\nmobilevitv2_050,127.38,7.758,1,256,1.37\r\nresnext26ts,126.34,7.809,1,256,10.3\r\nrepghostnet_058,125.0,7.916,1,224,2.55\r\nefficientvit_m1,124.63,7.943,1,224,2.98\r\nmobileone_s1,123.39,8.002,1,224,4.83\r\nlevit_conv_128,122.84,8.041,1,224,9.21\r\nvit_tiny_patch16_224,122.27,8.075,1,224,5.72\r\nrepghostnet_080,121.61,8.134,1,224,3.28\r\nefficientvit_m2,120.55,8.204,1,224,4.19\r\nrepghostnet_050,120.14,8.244,1,224,2.31\r\ngmixer_12_224,119.61,8.256,1,224,12.7\r\nefficientnet_lite2,118.78,8.318,1,260,6.09\r\nedgenext_xx_small,118.74,8.34,1,288,1.33\r\nconvnext_atto,118.59,8.331,1,224,3.7\r\nlevit_conv_192,117.23,8.428,1,224,10.95\r\nresnet18,117.15,8.431,1,224,11.69\r\nese_vovnet19b_dw,116.09,8.507,1,224,6.54\r\npoolformer_s12,116.09,8.504,1,224,11.92\r\nhgnetv2_b1,115.23,8.574,1,288,6.34\r\nrepghostnet_100,115.12,8.593,1,224,4.07\r\nmobileone_s2,115.04,8.591,1,224,7.88\r\nefficientformer_l1,114.17,8.656,1,224,12.29\r\nregnety_008,114.08,8.665,1,224,6.26\r\npit_ti_distilled_224,113.8,8.689,1,224,5.1\r\neca_resnext26ts,113.67,8.691,1,256,10.3\r\nresnext26ts,113.57,8.698,1,288,10.3\r\nconvnextv2_atto,113.43,8.717,1,224,3.71\r\nhardcorenas_e,112.85,8.76,1,224,8.07\r\npit_ti_224,112.61,8.783,1,224,4.85\r\nregnety_008_tv,112.43,8.792,1,224,6.43\r\nhardcorenas_f,111.54,8.864,1,224,8.2\r\nmobilenetv2_120d,111.47,8.87,1,224,5.83\r\ntf_efficientnet_lite1,110.97,8.904,1,240,5.42\r\nrexnetr_100,110.89,8.92,1,224,4.88\r\nrepghostnet_111,110.87,8.924,1,224,4.54\r\nresnet34,110.55,8.941,1,160,21.8\r\nregnetx_016,110.01,8.986,1,224,9.19\r\ntinynet_b,109.25,9.056,1,188,3.73\r\nvit_small_patch32_224,109.13,9.057,1,224,22.88\r\nconvnext_atto_ols,109.08,9.061,1,224,3.7\r\nresnet18d,108.71,9.091,1,224,11.71\r\nrepghostnet_130,108.47,9.122,1,224,5.48\r\npoolformerv2_s12,107.65,9.183,1,224,11.89\r\nseresnext26ts,107.11,9.232,1,256,10.39\r\nconvnext_femto_ols,106.07,9.323,1,224,5.23\r\nmobilevitv2_075,106.07,9.326,1,256,2.87\r\nhardcorenas_d,105.79,9.356,1,224,7.5\r\ndla60x_c,105.71,9.368,1,224,1.32\r\necaresnext26t_32x4d,105.69,9.351,1,224,15.41\r\nregnety_004,105.63,9.369,1,224,4.34\r\nlegacy_seresnext26_32x4d,104.98,9.42,1,224,16.79\r\nrexnetr_130,104.91,9.431,1,224,7.61\r\nresnest14d,104.32,9.477,1,224,10.61\r\ngernet_m,104.16,9.496,1,224,21.14\r\ndeit_tiny_distilled_patch16_224,103.75,9.535,1,224,5.91\r\necaresnext50t_32x4d,103.7,9.532,1,224,15.41\r\ngmlp_ti16_224,103.63,9.549,1,224,5.87\r\nmobileone_s3,103.43,9.566,1,224,10.17\r\nregnety_006,103.41,9.567,1,224,6.06\r\nfastvit_t8,102.84,9.625,1,256,4.03\r\nedgenext_x_small,102.14,9.7,1,256,2.34\r\nlegacy_seresnet18,101.61,9.742,1,224,11.78\r\nvisformer_tiny,101.0,9.794,1,224,10.32\r\nconvnext_femto,100.98,9.799,1,224,5.22\r\neca_resnext26ts,100.84,9.809,1,288,10.3\r\nrepghostnet_150,100.56,9.847,1,224,6.58\r\nresnet26,100.24,9.873,1,224,16.0\r\nrexnet_130,100.02,9.897,1,224,7.56\r\nghostnet_050,99.95,9.921,1,224,2.59\r\nrepvgg_a0,99.88,9.911,1,224,9.11\r\ntf_efficientnet_lite2,99.83,9.913,1,260,6.09\r\nresmlp_12_224,99.81,9.912,1,224,15.35\r\nefficientnet_b0,99.79,9.92,1,224,5.29\r\nlevit_128,99.57,9.926,1,224,9.21\r\nseresnet18,99.43,9.958,1,224,11.78\r\nedgenext_xx_small,99.11,10.004,1,256,1.33\r\nhgnetv2_b2,99.01,9.995,1,224,11.22\r\npit_xs_224,98.69,10.031,1,224,10.62\r\nefficientvit_m3,98.64,10.045,1,224,6.9\r\nrexnet_100,98.61,10.039,1,224,4.8\r\nseresnet18,98.27,10.075,1,288,11.78\r\nseresnext26d_32x4d,98.23,10.069,1,224,16.81\r\nrexnetr_150,98.17,10.081,1,224,9.78\r\nbotnet26t_256,97.98,10.099,1,256,12.49\r\nseresnext26t_32x4d,97.89,10.103,1,224,16.81\r\neca_botnext26ts_256,97.8,10.118,1,256,10.59\r\nresnetblur18,97.7,10.13,1,224,11.69\r\nresnet18d,96.95,10.212,1,288,11.71\r\ntf_efficientnetv2_b0,96.59,10.256,1,192,7.14\r\nlevit_256,96.35,10.273,1,224,18.89\r\nefficientnet_em,96.25,10.288,1,240,6.9\r\nrexnet_150,95.9,10.322,1,224,9.73\r\nhalonet26t,95.84,10.328,1,256,12.48\r\nmixer_s16_224,95.44,10.369,1,224,18.53\r\nseresnext26ts,94.94,10.43,1,288,10.39\r\nlevit_conv_256,94.65,10.461,1,224,18.89\r\nefficientvit_b1,94.33,10.5,1,256,9.1\r\ncs3darknet_focus_m,94.26,10.506,1,288,9.3\r\neca_halonext26ts,94.18,10.514,1,256,10.76\r\ncs3darknet_focus_m,93.99,10.537,1,256,9.3\r\nhgnet_tiny,93.7,10.567,1,224,14.74\r\ngcresnext26ts,93.68,10.57,1,256,10.48\r\nresnet26d,93.37,10.604,1,224,16.01\r\nrepghostnet_200,93.05,10.645,1,224,9.8\r\nedgenext_x_small,92.43,10.725,1,288,2.34\r\ncs3darknet_m,92.2,10.746,1,288,9.31\r\nmobilevitv2_100,91.89,10.775,1,256,4.9\r\nefficientvit_b1,91.28,10.851,1,224,9.1\r\nese_vovnet19b_dw,91.06,10.875,1,288,6.54\r\nefficientnet_lite3,90.69,10.918,1,300,8.2\r\nefficientvit_m4,90.48,10.956,1,224,8.8\r\ncs3darknet_m,90.33,10.969,1,256,9.31\r\nconvnext_pico_ols,90.01,11.004,1,224,9.06\r\ntf_efficientnetv2_b0,89.68,11.051,1,224,7.14\r\nconvnext_atto,89.44,11.077,1,288,3.7\r\nresnetblur18,89.14,11.115,1,288,11.69\r\nlambda_resnet26t,89.02,11.126,1,256,10.96\r\nconvnextv2_femto,88.99,11.135,1,288,5.23\r\nresnext50_32x4d,88.42,11.202,1,160,25.03\r\nfastvit_t12,88.28,11.221,1,256,7.55\r\nefficientnet_b0_g16_evos,88.06,11.258,1,224,8.11\r\nghostnet_100,87.95,11.268,1,224,5.18\r\nconvnextv2_atto,87.77,11.29,1,288,3.71\r\nlambda_resnet26rpt_256,87.55,11.317,1,256,10.99\r\nresnet26t,87.18,11.365,1,256,16.01\r\nresnext50_32x4d,86.94,11.395,1,176,25.03\r\ntf_efficientnet_em,86.66,11.438,1,240,6.9\r\ntf_efficientnet_lite3,86.35,11.474,1,300,8.2\r\nefficientvit_b1,86.24,11.493,1,288,9.1\r\nefficientvit_m0,85.82,11.565,1,224,2.35\r\nrepvgg_a1,85.61,11.575,1,224,14.09\r\npit_xs_distilled_224,85.6,11.564,1,224,11.0\r\ntf_efficientnet_b0,85.5,11.591,1,224,5.29\r\nconvnextv2_femto,84.79,11.69,1,224,5.23\r\nmobilevitv2_125,84.78,11.688,1,256,7.48\r\ngcresnext26ts,84.57,11.723,1,288,10.48\r\nghostnet_130,84.43,11.743,1,224,7.36\r\nresnet26,84.24,11.764,1,288,16.0\r\nefficientnet_cc_b0_4e,84.1,11.789,1,224,13.31\r\nmobilevit_xxs,83.93,11.822,1,256,1.27\r\nedgenext_small,83.4,11.884,1,256,5.59\r\nconvnext_atto_ols,82.48,12.023,1,288,3.7\r\nresnet18,82.07,12.078,1,288,11.69\r\nrexnet_200,81.94,12.099,1,224,16.37\r\nresnet33ts,81.85,12.111,1,256,19.68\r\nrexnetr_200,81.41,12.177,1,224,16.52\r\nseresnext26d_32x4d,81.39,12.178,1,288,16.81\r\nhgnetv2_b2,81.32,12.191,1,288,11.22\r\ndeit_tiny_patch16_224,81.06,12.197,1,224,5.72\r\ndla34,80.92,12.257,1,224,15.74\r\nvit_tiny_r_s16_p8_384,80.91,12.256,1,384,6.36\r\nresnet34,80.89,12.257,1,224,21.8\r\nefficientformerv2_s0,80.24,12.365,1,224,3.6\r\nhgnetv2_b3,80.24,12.357,1,224,16.29\r\nregnetx_032,80.0,12.395,1,224,15.3\r\necaresnet26t,79.04,12.546,1,256,16.01\r\nresnet26d,78.81,12.583,1,288,16.01\r\ntf_efficientnetv2_b1,78.59,12.626,1,192,8.14\r\nmobileone_s4,78.54,12.626,1,224,14.95\r\ndarknet17,78.53,12.631,1,256,14.3\r\nresnet32ts,78.49,12.634,1,288,17.96\r\nefficientnet_cc_b0_8e,78.27,12.671,1,224,24.01\r\nfastvit_sa12,78.11,12.697,1,256,11.58\r\nregnetz_005,78.08,12.705,1,224,7.12\r\ntf_efficientnetv2_b2,77.95,12.727,1,208,10.1\r\nseresnext26t_32x4d,77.84,12.74,1,288,16.81\r\nxcit_nano_12_p16_224,76.77,12.935,1,224,3.05\r\nconvnext_pico,76.48,12.968,1,224,9.05\r\nresnext50_32x4d,76.13,13.026,1,224,25.03\r\nedgenext_small_rw,76.06,13.038,1,256,7.83\r\ntf_efficientnetv2_b1,75.93,13.072,1,240,8.14\r\nefficientnet_b1_pruned,75.54,13.138,1,240,6.33\r\ntf_efficientnet_cc_b0_4e,75.53,13.139,1,224,13.31\r\nresnet34d,75.17,13.196,1,224,21.82\r\nskresnet18,74.63,13.294,1,224,11.96\r\ntinynet_a,74.63,13.286,1,192,6.19\r\nresnet33ts,74.49,13.318,1,288,19.68\r\nefficientvit_m5,73.98,13.416,1,224,12.47\r\nfbnetv3_b,73.94,13.426,1,224,8.6\r\nresnest26d,73.62,13.478,1,224,17.07\r\neca_resnet33ts,73.59,13.48,1,256,19.68\r\nresnext50d_32x4d,73.4,13.513,1,224,25.05\r\nregnetz_b16,73.29,13.544,1,224,9.72\r\nconvnext_pico_ols,73.18,13.56,1,288,9.06\r\nfastvit_s12,73.13,13.565,1,256,9.47\r\nnf_regnet_b0,73.1,13.577,1,192,8.76\r\nrepvgg_b0,73.06,13.582,1,224,15.82\r\nresnet50d,72.99,13.592,1,160,25.58\r\nresnet26t,72.97,13.595,1,320,16.01\r\ngernet_l,72.73,13.644,1,256,31.08\r\nresnet50,72.62,13.661,1,160,25.56\r\nresnetaa34d,72.41,13.706,1,224,21.82\r\nregnetz_005,71.79,13.827,1,288,7.12\r\ndla60x,71.78,13.825,1,224,17.35\r\nresnet32ts,71.73,13.835,1,256,17.96\r\nvit_tiny_patch16_384,71.63,13.85,1,384,5.79\r\ntf_efficientnet_cc_b0_8e,71.62,13.862,1,224,24.01\r\nedgenext_small,71.48,13.881,1,320,5.59\r\nlevit_256d,71.24,13.93,1,224,26.21\r\npvt_v2_b0,71.09,13.963,1,224,3.67\r\nlevit_384,70.96,13.987,1,224,39.13\r\nconvnext_femto,70.73,14.033,1,288,5.22\r\ncspresnext50,70.51,14.072,1,256,20.57\r\nresnet50,70.29,14.117,1,176,25.56\r\nefficientnet_b1,70.28,14.123,1,224,7.79\r\nnf_regnet_b0,70.25,14.133,1,256,8.76\r\nefficientnet_b1,70.19,14.139,1,256,7.79\r\nrexnetr_200,70.18,14.142,1,288,16.52\r\nefficientnet_b2_pruned,70.05,14.176,1,260,8.31\r\nregnetx_040,69.8,14.216,1,224,22.12\r\nlevit_conv_256d,69.66,14.25,1,224,26.21\r\nrepvit_m1,69.25,14.34,1,224,5.49\r\npit_s_224,68.96,14.396,1,224,23.46\r\ntf_efficientnetv2_b2,68.65,14.465,1,260,10.1\r\nconvit_tiny,68.61,14.477,1,224,5.71\r\nhgnetv2_b4,68.28,14.537,1,224,19.8\r\nfbnetv3_b,68.27,14.548,1,256,8.6\r\nrepvit_m1_1,68.27,14.547,1,224,8.8\r\npit_s_distilled_224,68.06,14.584,1,224,24.04\r\nconvnext_nano_ols,68.05,14.592,1,224,15.65\r\nmobilevitv2_150,67.96,14.608,1,256,10.59\r\nhgnet_tiny,67.68,14.668,1,288,14.74\r\nconvnextv2_nano,67.59,14.691,1,224,15.62\r\nfbnetv3_d,67.58,14.695,1,224,10.31\r\nrepvit_m2,67.43,14.729,1,224,8.8\r\nmobilevit_xs,67.34,14.746,1,256,2.32\r\nefficientnet_b2,67.32,14.752,1,256,9.11\r\neca_resnet33ts,67.3,14.753,1,288,19.68\r\ndpn68,67.16,14.781,1,224,12.61\r\nrepvit_m1_0,67.13,14.798,1,224,7.3\r\nseresnet34,67.11,14.799,1,224,21.96\r\ndarknet21,67.05,14.81,1,256,20.86\r\ncrossvit_tiny_240,66.95,14.831,1,240,7.01\r\nresnet34,66.86,14.855,1,288,21.8\r\nres2net50_48w_2s,66.42,14.951,1,224,25.29\r\ndeit_small_patch16_224,66.3,14.97,1,224,22.05\r\nedgenext_small_rw,66.23,14.99,1,320,7.83\r\nconvnext_femto_ols,66.17,15.011,1,288,5.23\r\nxcit_nano_12_p16_384,66.05,15.042,1,384,3.05\r\nxcit_tiny_12_p16_224,65.97,15.061,1,224,6.72\r\neca_vovnet39b,65.95,15.057,1,224,22.6\r\nhgnetv2_b3,65.91,15.062,1,288,16.29\r\nnf_ecaresnet26,65.82,15.089,1,224,16.0\r\ndpn48b,65.5,15.164,1,224,9.13\r\nnf_resnet26,65.5,15.158,1,224,16.0\r\ntiny_vit_5m_224,65.5,15.16,1,224,12.08\r\nrepvit_m0_9,65.43,15.189,1,224,5.49\r\nresnet34d,65.35,15.193,1,288,21.82\r\ncrossvit_9_240,65.34,15.205,1,240,8.55\r\nvit_small_patch16_224,65.24,15.219,1,224,22.05\r\nvit_small_patch32_384,65.21,15.227,1,384,22.92\r\nlegacy_seresnet34,65.08,15.266,1,224,21.96\r\necaresnet26t,64.74,15.341,1,320,16.01\r\nefficientformerv2_s1,64.64,15.371,1,224,6.19\r\nlevit_conv_384,64.63,15.366,1,224,39.13\r\ndeit_small_distilled_patch16_224,64.61,15.372,1,224,22.44\r\nresnet50,64.51,15.39,1,224,25.56\r\nfbnetv3_d,64.45,15.413,1,256,10.31\r\ngcresnet33ts,64.41,15.421,1,256,19.88\r\nxcit_nano_12_p8_224,64.37,15.435,1,224,3.05\r\nmixer_b32_224,64.25,15.457,1,224,60.29\r\necaresnet50t,64.06,15.502,1,160,25.57\r\ndeit3_small_patch16_224,64.01,15.52,1,224,22.06\r\nrexnetr_300,63.86,15.548,1,224,34.81\r\nresnetaa50,63.85,15.555,1,224,25.56\r\nresnet50c,63.76,15.578,1,224,25.58\r\ndla60,63.69,15.594,1,224,22.04\r\neva02_tiny_patch14_336,63.67,15.6,1,336,5.76\r\nresnet50d,63.52,15.634,1,224,25.58\r\nresnetaa34d,63.47,15.648,1,288,21.82\r\nsehalonet33ts,63.29,15.693,1,256,13.69\r\ntf_efficientnet_b1,63.15,15.73,1,240,7.79\r\neva02_tiny_patch14_224,63.08,15.756,1,224,5.5\r\nconvnext_nano,63.02,15.761,1,224,15.59\r\ncrossvit_9_dagger_240,63.01,15.768,1,240,8.78\r\nresnet50t,62.97,15.772,1,224,25.57\r\nregnetz_b16,62.94,15.784,1,288,9.72\r\nmobilevitv2_175,62.88,15.796,1,256,14.25\r\ngmixer_24_224,62.87,15.797,1,224,24.72\r\nese_vovnet39b,62.86,15.8,1,224,24.57\r\nresnetv2_50,62.7,15.843,1,224,25.55\r\nconvnextv2_pico,62.49,15.9,1,224,9.07\r\nregnety_016,62.47,15.905,1,224,11.2\r\nconvmixer_1024_20_ks9_p14,62.44,15.912,1,224,24.38\r\nregnety_032,62.22,15.964,1,224,19.44\r\npoolformerv2_s24,62.2,15.972,1,224,21.34\r\nsebotnet33ts_256,62.0,16.018,1,256,13.7\r\nresnext50_32x4d,61.88,16.049,1,288,25.03\r\nresnetv2_50t,61.87,16.056,1,224,25.57\r\nsedarknet21,61.82,16.069,1,256,20.95\r\npoolformer_s24,61.78,16.078,1,224,21.39\r\nlegacy_seresnext50_32x4d,61.71,16.1,1,224,27.56\r\nseresnet50,61.4,16.177,1,160,28.09\r\necaresnet50d_pruned,61.19,16.232,1,288,19.94\r\nseresnet33ts,61.19,16.237,1,288,19.78\r\nregnetx_064,61.04,16.277,1,224,26.21\r\nregnetz_c16,60.87,16.326,1,256,13.46\r\ncs3darknet_focus_l,60.81,16.338,1,256,21.15\r\nresnetv2_50d,60.71,16.364,1,224,25.57\r\nrexnet_300,60.64,16.383,1,224,34.71\r\nhgnetv2_b4,60.48,16.423,1,288,19.8\r\ndpn68b,60.42,16.449,1,224,12.61\r\nresnetaa50d,60.42,16.443,1,224,25.58\r\ntf_efficientnetv2_b3,60.41,16.451,1,240,14.36\r\nefficientnet_b2,60.08,16.541,1,288,9.11\r\nresnext50d_32x4d,60.02,16.55,1,288,25.05\r\nflexivit_small,59.88,16.587,1,240,22.06\r\ngmlp_s16_224,59.87,16.6,1,224,19.42\r\nresnet50s,59.72,16.639,1,224,25.68\r\ncspresnet50,59.68,16.646,1,256,21.62\r\ntf_efficientnet_b2,59.68,16.655,1,260,9.11\r\nresnet51q,59.45,16.712,1,256,35.7\r\ntf_mixnet_s,59.43,16.728,1,224,4.13\r\nefficientformer_l3,59.35,16.753,1,224,31.41\r\nhgnet_small,59.1,16.812,1,224,24.36\r\nbat_resnext26ts,59.07,16.823,1,256,10.73\r\necaresnet50d_pruned,59.0,16.833,1,224,19.94\r\nconvnext_tiny,58.9,16.87,1,224,28.59\r\nefficientnet_lite4,58.86,16.881,1,380,13.01\r\ngcresnet33ts,58.84,16.884,1,288,19.88\r\nseresnet33ts,58.51,16.988,1,256,19.78\r\nefficientvit_b2,58.48,16.994,1,224,24.33\r\nseresnet34,58.46,17.003,1,288,21.96\r\nvisformer_small,58.42,17.01,1,224,40.22\r\ncspresnet50d,58.36,17.027,1,256,21.64\r\nresnetrs50,58.34,17.031,1,160,35.69\r\ncoat_lite_tiny,58.24,17.063,1,224,5.72\r\nnf_regnet_b1,58.24,17.068,1,256,10.22\r\nresnetblur50,58.22,17.07,1,224,25.56\r\nefficientvit_b2,58.16,17.085,1,288,24.33\r\nefficientnet_el_pruned,57.87,17.175,1,300,10.59\r\ntf_efficientnet_el,57.67,17.232,1,300,10.59\r\nnf_seresnet26,57.65,17.239,1,224,17.4\r\ncs3darknet_l,57.64,17.242,1,256,21.16\r\nrepvgg_a2,57.52,17.278,1,224,28.21\r\ninception_next_tiny,57.48,17.288,1,224,28.06\r\ncoatnet_pico_rw_224,57.46,17.295,1,224,10.85\r\nefficientnet_el,57.41,17.313,1,300,10.59\r\nnf_regnet_b1,57.28,17.357,1,288,10.22\r\necaresnetlight,56.96,17.449,1,224,30.16\r\nhrnet_w18_small,56.78,17.506,1,224,13.19\r\nxcit_tiny_12_p16_384,56.22,17.686,1,384,6.72\r\nresnetblur50d,56.13,17.711,1,224,25.58\r\necaresnet50t,56.01,17.75,1,224,25.57\r\nconvnext_pico,55.92,17.778,1,288,9.05\r\nefficientnet_cc_b1_8e,55.92,17.777,1,240,39.72\r\nghostnetv2_100,55.86,17.804,1,224,6.16\r\nregnetv_040,55.71,17.841,1,224,20.64\r\ntf_efficientnet_lite4,55.69,17.848,1,380,13.01\r\ncs3darknet_focus_l,55.54,17.901,1,288,21.15\r\nresnet50,55.49,17.917,1,288,25.56\r\nefficientnet_b3_pruned,55.48,17.922,1,300,9.86\r\nghostnetv2_160,55.39,17.953,1,224,12.39\r\nhaloregnetz_b,55.34,17.964,1,224,11.68\r\necaresnet50d,55.26,17.988,1,224,25.58\r\nnf_regnet_b2,55.2,18.012,1,272,14.31\r\ntf_efficientnet_cc_b1_8e,55.14,18.033,1,240,39.72\r\nghostnetv2_130,55.06,18.063,1,224,8.96\r\ntiny_vit_11m_224,54.89,18.114,1,224,20.35\r\nresnest50d_1s4x24d,54.69,18.175,1,224,25.68\r\nvit_srelpos_small_patch16_224,54.55,18.224,1,224,21.97\r\ncs3darknet_l,54.51,18.243,1,288,21.16\r\nmixnet_s,54.48,18.254,1,224,4.13\r\nconvnextv2_pico,54.46,18.255,1,288,9.07\r\ncoat_lite_mini,54.28,18.316,1,224,11.01\r\nregnetz_b16_evos,54.27,18.323,1,224,9.74\r\nseresnet50,54.11,18.374,1,224,28.09\r\nlegacy_seresnet50,54.04,18.401,1,224,28.09\r\nresnetv2_50d_frn,53.81,18.476,1,224,25.59\r\ngcresnext50ts,53.8,18.481,1,256,15.67\r\nconvnext_tiny_hnf,53.73,18.504,1,224,28.59\r\nefficientvit_b2,53.38,18.629,1,256,24.33\r\nresnet50t,53.07,18.736,1,288,25.57\r\nconvnextv2_nano,52.9,18.794,1,288,15.62\r\nseresnext50_32x4d,52.77,18.838,1,224,27.56\r\nresnet50d,52.69,18.872,1,288,25.58\r\nresnet51q,52.69,18.872,1,288,35.7\r\nregnety_040,52.62,18.895,1,224,20.65\r\ntf_efficientnetv2_b3,52.42,18.971,1,300,14.36\r\neva02_small_patch14_224,52.38,18.988,1,224,21.62\r\nresmlp_24_224,52.29,19.017,1,224,30.02\r\nnf_regnet_b2,52.27,19.021,1,240,14.31\r\nefficientnet_b3,52.2,19.054,1,288,12.23\r\necaresnet50t,52.11,19.079,1,288,25.57\r\nconvnextv2_tiny,52.1,19.083,1,224,28.64\r\nmobilevitv2_200,51.97,19.13,1,256,18.45\r\nregnetz_c16,51.96,19.141,1,320,13.46\r\nfbnetv3_g,51.93,19.15,1,240,16.62\r\nseresnet50t,51.84,19.184,1,224,28.1\r\nresnest50d_4s2x40d,51.75,19.211,1,224,30.42\r\nvit_relpos_small_patch16_rpn_224,51.59,19.279,1,224,21.97\r\ntwins_pcpvt_small,51.53,19.299,1,224,24.11\r\necaresnet50t,51.5,19.31,1,256,25.57\r\nnest_tiny,51.49,19.315,1,224,17.06\r\nresnet61q,51.38,19.356,1,256,36.85\r\nxcit_tiny_12_p8_224,51.12,19.456,1,224,6.71\r\nresnetaa50,51.04,19.488,1,288,25.56\r\nefficientnetv2_rw_t,50.79,19.588,1,224,13.65\r\nvit_small_resnet26d_224,50.39,19.736,1,224,63.61\r\nfbnetv3_g,50.35,19.756,1,288,16.62\r\nvit_relpos_small_patch16_224,50.18,19.818,1,224,21.98\r\nnest_tiny_jx,50.12,19.842,1,224,17.06\r\nseresnext50_32x4d,50.07,19.866,1,288,27.56\r\nresnetrs50,50.06,19.868,1,224,35.69\r\nseresnetaa50d,49.92,19.926,1,224,28.11\r\ngcresnext50ts,49.84,19.958,1,288,15.67\r\nregnety_032,49.4,20.136,1,288,19.44\r\nresnest50d,49.3,20.178,1,224,27.48\r\ncoatnet_bn_0_rw_224,49.25,20.193,1,224,27.44\r\nregnetx_080,49.22,20.207,1,224,39.57\r\nregnety_040,49.22,20.213,1,288,20.65\r\nvovnet39a,49.21,20.212,1,224,22.6\r\necaresnetlight,49.2,20.22,1,288,30.16\r\nrepvit_m3,48.97,20.319,1,224,10.68\r\ndeit3_medium_patch16_224,48.76,20.407,1,224,38.85\r\nvovnet57a,48.68,20.434,1,224,36.64\r\nese_vovnet39b,48.64,20.447,1,288,24.57\r\nedgenext_base,48.62,20.459,1,320,18.51\r\nrexnetr_300,48.55,20.49,1,288,34.81\r\nresnetaa50d,48.42,20.545,1,288,25.58\r\nregnetz_b16_evos,48.25,20.622,1,288,9.74\r\ntwins_svt_small,48.23,20.629,1,224,24.06\r\ncspdarknet53,48.09,20.684,1,256,27.64\r\nlambda_resnet50ts,47.99,20.727,1,256,21.54\r\ntf_efficientnet_b3,47.96,20.744,1,300,12.23\r\ndla102x,47.88,20.779,1,224,26.31\r\nresnetblur50,47.82,20.806,1,288,25.56\r\nefficientnet_b3,47.81,20.812,1,320,12.23\r\nnf_regnet_b3,47.8,20.816,1,288,18.59\r\ndpn68b,47.67,20.87,1,288,12.61\r\nnextvit_small,47.64,20.884,1,224,31.76\r\nregnetv_040,47.57,20.914,1,288,20.64\r\necaresnet50d,47.14,21.106,1,288,25.58\r\nres2net50_26w_4s,47.12,21.115,1,224,25.7\r\nresnetv2_50,47.07,21.139,1,288,25.55\r\nvit_base_patch32_224,47.01,21.161,1,224,88.22\r\nresnetblur50d,46.99,21.17,1,288,25.58\r\ncs3sedarknet_l,46.91,21.21,1,256,21.91\r\nbotnet50ts_256,46.78,21.266,1,256,22.74\r\nres2next50,46.65,21.326,1,224,24.67\r\nresnet61q,46.55,21.374,1,288,36.85\r\nconvnext_nano,46.5,21.397,1,288,15.59\r\nmobilevit_s,46.41,21.44,1,256,5.58\r\ntiny_vit_21m_224,46.36,21.465,1,224,33.22\r\nese_vovnet57b,46.33,21.472,1,224,38.61\r\ncoatnet_0_rw_224,46.3,21.486,1,224,27.44\r\nres2net50d,46.29,21.499,1,224,25.72\r\nvit_base_patch32_clip_quickgelu_224,46.24,21.52,1,224,87.85\r\nconvnext_nano_ols,46.22,21.535,1,288,15.65\r\nregnety_080,46.14,21.566,1,224,39.18\r\nvit_base_patch32_clip_224,46.06,21.604,1,224,88.22\r\ncoatnet_0_224,46.05,21.609,1,224,25.04\r\ncoatnet_nano_cc_224,46.01,21.631,1,224,13.76\r\neca_nfnet_l0,45.97,21.644,1,224,24.14\r\nfastvit_sa24,45.96,21.652,1,256,21.55\r\nregnety_080_tv,45.96,21.647,1,224,39.38\r\nmobileone_s0,45.88,21.695,1,224,5.29\r\npvt_v2_b1,45.54,21.85,1,224,14.01\r\npvt_v2_b2_li,45.54,21.854,1,224,22.55\r\nlegacy_xception,45.5,21.868,1,299,22.86\r\ndla60_res2next,45.49,21.875,1,224,17.03\r\nefficientnetv2_rw_t,45.15,22.048,1,288,13.65\r\nconvnext_tiny,45.14,22.046,1,288,28.59\r\ncrossvit_15_240,45.08,22.079,1,240,27.53\r\nregnetz_c16_evos,45.08,22.077,1,256,13.49\r\nvit_base_patch32_clip_256,44.73,22.25,1,256,87.86\r\ndla60_res2net,44.68,22.274,1,224,20.85\r\nregnety_064,44.51,22.357,1,224,30.58\r\nresnet101,44.47,22.382,1,160,44.55\r\ntresnet_m,44.45,22.384,1,224,31.39\r\nconvit_small,44.44,22.401,1,224,27.78\r\nhgnet_small,44.41,22.409,1,288,24.36\r\nswin_s3_tiny_224,44.37,22.427,1,224,28.33\r\npoolformerv2_s36,44.28,22.475,1,224,30.79\r\nedgenext_base,44.14,22.539,1,256,18.51\r\nskresnet34,44.12,22.561,1,224,22.28\r\nskresnet50d,44.09,22.573,1,224,25.82\r\nseresnet50t,44.06,22.589,1,288,28.1\r\nvit_medium_patch16_gap_240,44.06,22.584,1,240,44.4\r\nmvitv2_tiny,44.03,22.604,1,224,24.17\r\nseresnet50,43.99,22.623,1,288,28.09\r\ndla102,43.92,22.659,1,224,33.27\r\nvolo_d1_224,43.89,22.678,1,224,26.63\r\ncrossvit_small_240,43.82,22.717,1,240,26.86\r\nnf_regnet_b3,43.8,22.729,1,320,18.59\r\nregnetv_064,43.75,22.747,1,224,30.58\r\ngc_efficientnetv2_rw_t,43.67,22.802,1,224,13.68\r\nskresnet50,43.6,22.83,1,224,25.8\r\ndavit_tiny,43.54,22.855,1,224,28.36\r\ncoatnet_nano_rw_224,43.41,22.927,1,224,15.14\r\nhalonet50ts,43.38,22.944,1,256,22.73\r\ngcvit_xxtiny,43.37,22.961,1,224,12.0\r\nnfnet_l0,43.29,22.99,1,224,35.07\r\ntnt_s_patch16_224,43.16,23.057,1,224,23.76\r\npoolformer_s36,43.12,23.089,1,224,30.86\r\ncs3sedarknet_l,42.91,23.198,1,288,21.91\r\nfocalnet_tiny_srf,42.82,23.243,1,224,28.43\r\nvit_srelpos_medium_patch16_224,42.76,23.275,1,224,38.74\r\ndarknetaa53,42.55,23.393,1,256,36.02\r\nseresnetaa50d,42.5,23.422,1,288,28.11\r\nvit_medium_patch16_gap_256,42.42,23.467,1,256,38.86\r\nresnet101,42.27,23.552,1,176,44.55\r\ncoatnet_rmlp_nano_rw_224,42.23,23.574,1,224,15.15\r\nresnext101_32x4d,42.11,23.639,1,224,44.18\r\nmixnet_m,42.06,23.673,1,224,5.01\r\ntf_mixnet_m,41.96,23.726,1,224,5.01\r\ncs3darknet_focus_x,41.88,23.77,1,256,35.02\r\nmobilevitv2_150,41.85,23.785,1,384,10.59\r\ncs3sedarknet_xdw,41.8,23.817,1,256,21.6\r\nregnetx_120,41.78,23.827,1,224,46.11\r\nnf_seresnet50,41.51,23.981,1,224,28.09\r\necaresnet50t,41.38,24.05,1,320,25.57\r\nmixer_b16_224,41.38,24.054,1,224,59.88\r\ncrossvit_15_dagger_240,41.36,24.075,1,240,28.21\r\nvit_relpos_medium_patch16_rpn_224,41.3,24.105,1,224,38.73\r\nswinv2_cr_tiny_ns_224,41.2,24.165,1,224,28.33\r\nmaxvit_pico_rw_256,41.14,24.206,1,256,7.46\r\nregnetz_040,41.13,24.202,1,256,27.12\r\nswinv2_cr_tiny_224,41.08,24.228,1,224,28.33\r\nregnety_040_sgn,41.06,24.249,1,224,20.65\r\ncaformer_s18,41.01,24.277,1,224,26.34\r\nxception41p,40.98,24.291,1,299,26.91\r\ncoatnet_rmlp_0_rw_224,40.92,24.326,1,224,27.45\r\nnf_resnet50,40.88,24.358,1,256,25.56\r\nvit_base_patch32_plus_256,40.85,24.373,1,256,119.48\r\ncs3darknet_x,40.8,24.4,1,256,35.05\r\nxception41,40.78,24.416,1,299,26.97\r\nefficientvit_l1,40.76,24.423,1,224,52.65\r\nlamhalobotnet50ts_256,40.65,24.492,1,256,22.57\r\nnf_ecaresnet50,40.52,24.572,1,224,25.56\r\ncait_xxs24_224,40.5,24.591,1,224,11.96\r\nregnetz_040_h,40.36,24.669,1,256,28.94\r\nefficientformerv2_s2,40.22,24.759,1,224,12.71\r\ndarknetaa53,40.2,24.77,1,288,36.02\r\ndensenetblur121d,40.15,24.804,1,224,8.0\r\ngcresnet50t,40.12,24.822,1,256,25.9\r\nswin_tiny_patch4_window7_224,40.06,24.855,1,224,28.29\r\ninception_v3,39.82,25.009,1,299,23.83\r\neca_nfnet_l0,39.61,25.137,1,288,24.14\r\nconvnextv2_tiny,39.59,25.153,1,288,28.64\r\nefficientnet_b0_gn,39.48,25.225,1,224,5.29\r\nrepvit_m1_5,39.48,25.224,1,224,14.64\r\ncspresnet50w,39.38,25.285,1,256,28.12\r\nregnetz_d8,38.99,25.538,1,256,23.37\r\necaresnet101d_pruned,38.92,25.592,1,224,24.88\r\ndarknet53,38.87,25.617,1,256,41.61\r\nhalo2botnet50ts_256,38.72,25.719,1,256,22.64\r\nnf_resnet50,38.46,25.896,1,288,25.56\r\nefficientnet_b4,38.45,25.9,1,320,19.34\r\nefficientnet_b0_g8_gn,38.42,25.922,1,224,6.56\r\nvit_relpos_medium_patch16_cls_224,38.33,25.983,1,224,38.76\r\ntf_mixnet_l,38.28,26.022,1,224,7.33\r\nregnetz_c16_evos,38.24,26.043,1,320,13.49\r\nskresnext50_32x4d,38.13,26.115,1,224,27.48\r\nresnet101,38.06,26.169,1,224,44.55\r\nresnet101c,37.88,26.292,1,224,44.57\r\nnfnet_l0,37.8,26.352,1,288,35.07\r\nresnet101d,37.73,26.399,1,224,44.57\r\nmixnet_l,37.53,26.547,1,224,7.33\r\nregnetz_d32,37.49,26.567,1,256,27.58\r\ncrossvit_18_240,37.48,26.576,1,240,43.27\r\nefficientnetv2_s,37.46,26.592,1,288,21.46\r\nmaxxvitv2_nano_rw_256,37.26,26.731,1,256,23.7\r\ncs3darknet_x,37.22,26.755,1,288,35.05\r\necaresnet101d_pruned,37.16,26.807,1,288,24.88\r\ncoatnext_nano_rw_224,37.09,26.854,1,224,14.7\r\nese_vovnet39b_evos,37.08,26.863,1,224,24.58\r\nxcit_small_12_p16_224,36.97,26.945,1,224,26.25\r\nregnety_120,36.94,26.957,1,224,51.82\r\ngcresnet50t,36.9,26.992,1,288,25.9\r\nresnetv2_101,36.81,27.061,1,224,44.54\r\nlevit_512,36.8,27.068,1,224,95.17\r\nresnetv2_101d,36.79,27.075,1,224,44.56\r\nregnety_080,36.56,27.243,1,288,39.18\r\nregnetz_d8_evos,36.49,27.305,1,256,23.46\r\nmaxvit_rmlp_pico_rw_256,36.46,27.332,1,256,7.52\r\ndla102x2,36.45,27.33,1,224,41.28\r\nresnetaa101d,36.43,27.345,1,224,44.57\r\ndensenet121,36.42,27.35,1,224,7.98\r\nresnet101s,36.35,27.401,1,224,44.67\r\nlevit_conv_512,36.19,27.525,1,224,95.17\r\nregnety_064,36.1,27.593,1,288,30.58\r\nvit_relpos_medium_patch16_224,36.08,27.607,1,224,38.75\r\nselecsls60,35.99,27.68,1,224,30.67\r\nconvnext_tiny_hnf,35.91,27.736,1,288,28.59\r\nxcit_nano_12_p8_384,35.86,27.782,1,384,3.05\r\nxcit_tiny_24_p16_224,35.79,27.846,1,224,12.12\r\ntf_efficientnetv2_s,35.77,27.849,1,300,21.46\r\nresnet50_gn,35.65,27.945,1,224,25.56\r\nvit_base_resnet26d_224,35.65,27.939,1,224,101.4\r\nresnetv2_50d_evos,35.51,28.052,1,224,25.59\r\nresnetv2_50d_gn,35.48,28.077,1,224,25.57\r\nconvnext_tiny,35.4,28.138,1,384,28.59\r\nlevit_conv_512d,35.36,28.174,1,224,92.5\r\nefficientnetv2_rw_s,35.35,28.184,1,288,23.94\r\nseresnext101_32x4d,35.28,28.24,1,224,48.96\r\nregnetx_160,35.27,28.238,1,224,54.28\r\ncrossvit_18_dagger_240,35.24,28.275,1,240,44.27\r\nresmlp_36_224,35.24,28.273,1,224,44.69\r\nlevit_512d,35.22,28.283,1,224,92.5\r\nlegacy_seresnext101_32x4d,35.18,28.313,1,224,48.96\r\nresnetblur101d,35.12,28.366,1,224,44.57\r\nmaxvit_nano_rw_256,35.11,28.365,1,256,15.45\r\nnf_regnet_b4,35.02,28.45,1,320,30.21\r\nhgnetv2_b5,34.92,28.527,1,224,39.57\r\ndarknet53,34.91,28.539,1,288,41.61\r\nvit_relpos_base_patch32_plus_rpn_256,34.86,28.579,1,256,119.42\r\nresnet101d,34.76,28.665,1,256,44.57\r\nmobilevitv2_175,34.66,28.741,1,384,14.25\r\nregnetv_064,34.6,28.79,1,288,30.58\r\nvit_medium_patch16_reg4_256,34.54,28.846,1,256,38.87\r\nvit_medium_patch16_reg4_gap_256,34.51,28.871,1,256,38.87\r\nefficientvit_b3,34.48,28.893,1,256,48.65\r\nvit_small_r26_s32_224,34.26,29.081,1,224,36.43\r\nhrnet_w18_small_v2,34.09,29.227,1,224,15.6\r\ngcvit_xtiny,34.04,29.274,1,224,19.98\r\nrepvgg_b1g4,33.94,29.354,1,224,39.97\r\nnextvit_base,33.92,29.369,1,224,44.82\r\nconvformer_s18,33.87,29.421,1,224,26.77\r\ntwins_pcpvt_base,33.77,29.509,1,224,43.83\r\nregnetz_040,33.51,29.736,1,320,27.12\r\nres2net50_26w_6s,33.51,29.737,1,224,37.05\r\nregnety_040_sgn,33.5,29.739,1,288,20.65\r\ndpn92,33.38,29.853,1,224,37.67\r\nregnetz_040_h,33.37,29.86,1,320,28.94\r\nefficientvit_l2,33.35,29.874,1,256,63.71\r\necaresnet101d,33.18,30.033,1,224,44.57\r\nresnetrs101,33.16,30.049,1,192,63.62\r\nvit_small_resnet50d_s16_224,33.15,30.054,1,224,57.53\r\nselecsls42,32.94,30.251,1,224,30.35\r\nvit_small_patch16_384,32.89,30.299,1,384,22.2\r\nefficientvit_b3,32.71,30.465,1,288,48.65\r\nseresnet101,32.53,30.638,1,224,49.33\r\nlegacy_seresnet101,32.5,30.668,1,224,49.33\r\nresnet101,32.39,30.768,1,288,44.55\r\nresnet152,32.27,30.879,1,160,60.19\r\ninception_next_small,32.25,30.901,1,224,49.37\r\nefficientvit_b3,32.15,30.999,1,224,48.65\r\ncoat_lite_small,32.05,31.097,1,224,19.84\r\ncs3sedarknet_x,32.04,31.1,1,256,35.4\r\nefficientnet_b3_gn,32.01,31.134,1,288,11.73\r\npoolformerv2_m36,32.0,31.141,1,224,56.08\r\nvit_base_patch32_384,31.93,31.21,1,384,88.3\r\nnf_regnet_b4,31.88,31.263,1,384,30.21\r\nfastvit_sa36,31.82,31.317,1,256,31.53\r\nefficientnet_b4,31.76,31.379,1,384,19.34\r\ndensenetblur121d,31.68,31.462,1,288,8.0\r\ncs3edgenet_x,31.66,31.478,1,256,47.82\r\nresnet152,31.62,31.519,1,176,60.19\r\nrepvit_m2_3,31.61,31.53,1,224,23.69\r\npoolformer_m36,31.49,31.647,1,224,56.17\r\nselecsls60b,31.42,31.719,1,224,32.77\r\nvit_base_patch16_rpn_224,31.41,31.734,1,224,86.54\r\ndeit3_small_patch16_384,31.29,31.856,1,384,22.21\r\nfocalnet_tiny_lrf,31.28,31.863,1,224,28.65\r\ntf_efficientnet_b4,31.26,31.884,1,380,19.34\r\nres2net50_14w_8s,31.21,31.935,1,224,25.06\r\nconvnext_small,31.19,31.95,1,224,50.22\r\nhgnetv2_b5,31.15,31.995,1,288,39.57\r\nmaxvit_rmlp_nano_rw_256,31.14,31.993,1,256,15.5\r\nxcit_tiny_24_p16_384,31.09,32.069,1,384,12.12\r\nefficientvit_l2,31.04,32.103,1,224,63.71\r\nresnet50_gn,30.96,32.194,1,288,25.56\r\nefficientvit_l2,30.9,32.252,1,288,63.71\r\nselecsls42b,30.8,32.368,1,224,32.46\r\nvit_base_patch32_clip_384,30.63,32.537,1,384,88.3\r\nresnetv2_50x1_bit,30.6,32.565,1,224,25.55\r\nnest_small,30.32,32.867,1,224,38.35\r\nvit_base_patch16_gap_224,30.08,33.14,1,224,86.57\r\ndeit3_base_patch16_224,30.04,33.181,1,224,86.59\r\nvit_base_patch16_clip_quickgelu_224,30.04,33.179,1,224,86.19\r\nmixnet_xl,30.03,33.189,1,224,11.9\r\nefficientnetv2_s,29.98,33.254,1,384,21.46\r\nvit_base_patch16_224_miil,29.95,33.282,1,224,94.4\r\nefficientformer_l7,29.93,33.307,1,224,82.23\r\nnfnet_f0,29.92,33.319,1,192,71.49\r\nvit_base_patch16_224,29.92,33.31,1,224,86.57\r\ndm_nfnet_f0,29.79,33.469,1,192,71.49\r\ndeit_base_patch16_224,29.77,33.481,1,224,86.57\r\ngc_efficientnetv2_rw_t,29.75,33.501,1,288,13.68\r\neva02_small_patch14_336,29.68,33.591,1,336,22.13\r\nresnetaa101d,29.64,33.631,1,288,44.57\r\nnest_small_jx,29.62,33.655,1,224,38.35\r\ncs3sedarknet_x,29.53,33.76,1,288,35.4\r\nconvnextv2_tiny,29.51,33.772,1,384,28.64\r\ntf_efficientnetv2_s,29.51,33.78,1,384,21.46\r\nmvitv2_small,29.48,33.811,1,224,34.87\r\nresnext101_32x8d,29.47,33.824,1,176,88.79\r\npit_b_224,29.44,33.855,1,224,73.76\r\nrepvgg_b1,29.33,33.984,1,224,57.42\r\nmaxxvit_rmlp_nano_rw_256,29.32,33.998,1,256,16.78\r\nefficientnet_b3_g8_gn,29.31,34.011,1,288,14.25\r\npit_b_distilled_224,29.26,34.069,1,224,74.79\r\ndeit_base_distilled_patch16_224,29.13,34.224,1,224,87.34\r\nxcit_small_12_p16_384,29.13,34.224,1,384,26.25\r\nconvnextv2_small,29.09,34.269,1,224,50.32\r\ntresnet_v2_l,29.06,34.305,1,224,46.17\r\nresnetblur101d,29.04,34.324,1,288,44.57\r\nregnetz_d8,29.03,34.34,1,320,23.37\r\nvit_base_patch16_clip_224,29.03,34.345,1,224,86.57\r\nnextvit_small,28.84,34.564,1,384,31.76\r\nvit_base_patch16_siglip_224,28.82,34.585,1,224,92.88\r\nswinv2_tiny_window8_256,28.77,34.644,1,256,28.35\r\nlevit_conv_384_s8,28.66,34.782,1,224,39.12\r\ntresnet_l,28.61,34.843,1,224,55.99\r\nefficientformerv2_l,28.56,34.904,1,224,26.32\r\nxception65,28.52,34.957,1,299,39.92\r\nresnetv2_50d_evos,28.46,35.027,1,288,25.59\r\nresnetv2_50d_gn,28.46,35.032,1,288,25.57\r\nresnetv2_101,28.42,35.084,1,288,44.54\r\nresnet152c,28.31,35.214,1,224,60.21\r\nresnet101d,28.25,35.287,1,320,44.57\r\nregnetz_d32,28.23,35.315,1,320,27.58\r\nfastvit_ma36,28.2,35.351,1,256,44.07\r\nvit_base_resnet50d_224,28.2,35.357,1,224,110.97\r\nxcit_tiny_24_p8_224,28.17,35.389,1,224,12.11\r\ndensenet169,28.15,35.419,1,224,14.15\r\nefficientnet_b3_gn,28.07,35.515,1,320,11.73\r\nvit_base_patch16_xp_224,28.0,35.611,1,224,86.51\r\necaresnet101d,27.96,35.651,1,288,44.57\r\nvit_base_patch32_clip_448,27.84,35.815,1,448,88.34\r\nefficientnetv2_rw_s,27.78,35.893,1,384,23.94\r\nresnet152s,27.77,35.899,1,224,60.32\r\ncs3edgenet_x,27.67,36.025,1,288,47.82\r\nxception65p,27.58,36.151,1,299,39.82\r\nresnest101e,27.47,36.301,1,256,48.28\r\ndensenet121,27.43,36.356,1,288,7.98\r\nconvnextv2_nano,27.42,36.364,1,384,15.62\r\nregnety_120,27.36,36.442,1,288,51.82\r\nregnetz_d8_evos,27.33,36.481,1,320,23.46\r\ncait_xxs36_224,27.32,36.503,1,224,17.3\r\ntnt_b_patch16_224,27.31,36.501,1,224,65.41\r\ndla169,27.28,36.548,1,224,53.39\r\ncoatnet_1_rw_224,27.21,36.649,1,224,41.72\r\nflexivit_base,27.18,36.682,1,240,86.59\r\nese_vovnet99b,27.13,36.754,1,224,63.2\r\nresnet152,26.95,36.992,1,224,60.19\r\nresnet152d,26.94,37.016,1,224,60.21\r\nseresnet101,26.8,37.206,1,288,49.33\r\nbeitv2_base_patch16_224,26.72,37.316,1,224,86.53\r\nnf_resnet101,26.64,37.432,1,224,44.55\r\nbeit_base_patch16_224,26.63,37.447,1,224,86.53\r\ntwins_svt_base,26.61,37.465,1,224,56.07\r\nmaxvit_tiny_rw_224,26.58,37.51,1,224,29.06\r\nresnetv2_152,26.55,37.553,1,224,60.19\r\nregnety_160,26.51,37.61,1,224,83.59\r\nvit_relpos_base_patch16_rpn_224,26.45,37.7,1,224,86.41\r\nresnetv2_152d,26.15,38.137,1,224,60.2\r\ncoatnet_rmlp_1_rw_224,26.13,38.166,1,224,41.69\r\ncs3se_edgenet_x,26.07,38.25,1,256,50.72\r\nregnetz_e8,26.06,38.268,1,256,57.7\r\nnextvit_large,26.03,38.313,1,224,57.87\r\nsamvit_base_patch16_224,25.96,38.411,1,224,86.46\r\nresnext101_64x4d,25.92,38.47,1,224,83.46\r\nvit_relpos_base_patch16_224,25.89,38.52,1,224,86.43\r\nnf_ecaresnet101,25.87,38.542,1,224,44.55\r\nresnetrs101,25.82,38.619,1,288,63.62\r\npvt_v2_b2,25.81,38.645,1,224,25.36\r\nres2net50_26w_8s,25.76,38.713,1,224,48.4\r\neca_nfnet_l1,25.7,38.803,1,256,41.41\r\ncaformer_s36,25.66,38.862,1,224,39.3\r\nres2net101d,25.65,38.872,1,224,45.23\r\nefficientnet_b3_g8_gn,25.52,39.078,1,320,14.25\r\nres2net101_26w_4s,25.5,39.108,1,224,45.21\r\ninception_v4,25.41,39.25,1,299,42.68\r\ncoatnet_1_224,25.35,39.33,1,224,42.23\r\nconvmixer_768_32,25.35,39.337,1,224,21.11\r\nxcit_small_24_p16_224,25.29,39.432,1,224,47.67\r\ncoatnet_rmlp_1_rw2_224,25.19,39.588,1,224,41.72\r\nresnet152d,25.12,39.699,1,256,60.21\r\nnfnet_f0,25.1,39.741,1,256,71.49\r\npoolformerv2_m48,25.05,39.803,1,224,73.35\r\nvit_base_r26_s32_224,25.05,39.807,1,224,101.38\r\nmaxvit_tiny_rw_256,25.04,39.834,1,256,29.07\r\ntresnet_m,24.96,39.959,1,448,31.39\r\nvit_relpos_base_patch16_cls_224,24.94,39.997,1,224,86.43\r\nresnext101_32x8d,24.91,40.03,1,224,88.79\r\nvit_relpos_base_patch16_clsgap_224,24.86,40.115,1,224,86.43\r\nxcit_tiny_12_p8_384,24.76,40.28,1,384,6.71\r\ndm_nfnet_f0,24.74,40.318,1,256,71.49\r\nvit_base_patch16_reg4_gap_256,24.62,40.507,1,256,86.62\r\ndpn98,24.48,40.74,1,224,61.57\r\nwide_resnet50_2,24.46,40.774,1,176,68.88\r\ngmlp_b16_224,24.07,41.431,1,224,73.08\r\nvit_base_patch16_siglip_256,24.05,41.476,1,256,92.93\r\ngcvit_tiny,23.98,41.603,1,224,28.22\r\nlevit_384_s8,23.87,41.794,1,224,39.12\r\ndavit_small,23.68,42.124,1,224,49.75\r\nmixnet_xxl,23.56,42.341,1,224,23.96\r\npoolformer_m48,23.53,42.396,1,224,73.47\r\ntwins_pcpvt_large,23.52,42.407,1,224,60.99\r\neva02_base_patch16_clip_224,23.47,42.505,1,224,86.26\r\nresnet200,23.46,42.51,1,224,64.67\r\nnf_seresnet101,23.38,42.658,1,224,49.33\r\nmaxvit_tiny_pm_256,23.24,42.933,1,256,30.09\r\nefficientvit_l2,23.17,43.043,1,384,63.71\r\nxcit_small_12_p8_224,23.13,43.134,1,224,26.21\r\nresnet152,23.07,43.239,1,288,60.19\r\nseresnext101_64x4d,23.04,43.291,1,224,88.23\r\nconvformer_s36,22.99,43.386,1,224,40.01\r\ninception_next_base,22.95,43.455,1,224,86.67\r\nrepvgg_b2g4,22.93,43.506,1,224,61.76\r\nvolo_d2_224,22.88,43.599,1,224,58.68\r\ntresnet_xl,22.85,43.645,1,224,78.44\r\nmaxxvit_rmlp_tiny_rw_256,22.79,43.777,1,256,29.64\r\nhgnet_base,22.77,43.808,1,224,71.58\r\nfocalnet_small_srf,22.7,43.939,1,224,49.89\r\nseresnet152,22.65,44.045,1,224,66.82\r\nconvnext_base,22.56,44.224,1,224,88.59\r\nconvnext_small,22.45,44.442,1,288,50.22\r\nmaxvit_tiny_tf_224,22.41,44.515,1,224,30.92\r\nxception71,22.39,44.561,1,299,42.34\r\nmobilevitv2_200,22.37,44.598,1,384,18.45\r\nmvitv2_small_cls,22.22,44.891,1,224,34.87\r\ncait_s24_224,22.13,45.067,1,224,46.92\r\nlegacy_seresnet152,22.1,45.142,1,224,66.82\r\nswinv2_cr_small_224,22.07,45.196,1,224,49.7\r\nselecsls84,22.06,45.23,1,224,50.95\r\ncs3se_edgenet_x,21.96,45.423,1,320,50.72\r\nseresnext101_32x8d,21.85,45.669,1,224,93.57\r\nseresnext101d_32x8d,21.84,45.683,1,224,93.59\r\nswinv2_cr_small_ns_224,21.8,45.762,1,224,49.7\r\nvit_small_patch8_224,21.78,45.8,1,224,21.67\r\ncaformer_s18,21.72,45.93,1,384,26.34\r\nnf_regnet_b5,21.71,45.963,1,384,49.74\r\nvit_small_r26_s32_384,21.54,46.309,1,384,36.47\r\ncrossvit_base_240,21.5,46.397,1,240,105.03\r\ninception_resnet_v2,21.5,46.408,1,299,55.84\r\nswin_small_patch4_window7_224,21.43,46.55,1,224,49.61\r\ndensenet201,21.3,46.854,1,224,20.01\r\nswinv2_tiny_window16_256,21.3,46.85,1,256,28.35\r\nwide_resnet50_2,21.3,46.84,1,224,68.88\r\nvolo_d1_384,21.29,46.867,1,384,26.78\r\nvit_medium_patch16_gap_384,21.25,46.957,1,384,39.03\r\nconvit_base,21.22,47.014,1,224,86.54\r\npvt_v2_b3,21.2,47.06,1,224,45.24\r\nseresnextaa101d_32x8d,21.16,47.142,1,224,93.59\r\nhgnetv2_b6,21.15,47.171,1,224,75.26\r\nvit_small_patch16_18x2_224,21.11,47.258,1,224,64.67\r\nnest_base_jx,21.09,47.301,1,224,67.72\r\nresnet200d,21.09,47.302,1,256,64.69\r\neca_nfnet_l1,21.05,47.388,1,320,41.41\r\ndensenet161,21.02,47.463,1,224,28.68\r\nnest_base,20.8,47.968,1,224,67.72\r\nfocalnet_small_lrf,20.74,48.098,1,224,50.34\r\ngcvit_small,20.74,48.118,1,224,51.09\r\nmaxvit_rmlp_tiny_rw_256,20.72,48.148,1,256,29.15\r\nseresnet152d,20.72,48.153,1,256,66.84\r\nresnet152d,20.71,48.179,1,320,60.21\r\nnextvit_base,20.66,48.299,1,384,44.82\r\nvit_small_patch16_36x1_224,20.65,48.32,1,224,64.67\r\nefficientnet_b5,20.64,48.335,1,416,30.39\r\nmaxvit_rmlp_small_rw_224,20.49,48.705,1,224,64.9\r\nresnetrs152,20.48,48.724,1,256,86.62\r\nresnext101_64x4d,20.46,48.756,1,288,83.46\r\nmvitv2_base,20.41,48.893,1,224,51.47\r\nconvnextv2_base,20.33,49.071,1,224,88.72\r\nregnety_160,20.32,49.106,1,288,83.59\r\nhrnet_w18_ssld,20.05,49.77,1,224,21.3\r\nhrnet_w18,19.54,51.074,1,224,21.3\r\ncrossvit_15_dagger_408,19.4,51.437,1,408,28.5\r\nmixer_l32_224,19.37,51.507,1,224,206.94\r\nmaxvit_small_tf_224,19.35,51.563,1,224,68.93\r\nregnetz_e8,19.29,51.743,1,320,57.7\r\nefficientnetv2_m,19.18,52.035,1,320,54.14\r\ntiny_vit_21m_384,19.1,52.268,1,384,21.23\r\nconvformer_s18,19.06,52.354,1,384,26.77\r\nresnet200,19.04,52.414,1,288,64.67\r\neva02_base_patch14_224,18.97,52.61,1,224,85.76\r\nconvnext_base,18.94,52.694,1,256,88.59\r\ncaformer_m36,18.93,52.724,1,224,56.2\r\nhalonet_h1,18.91,52.787,1,256,8.1\r\nefficientnet_b5,18.89,52.829,1,448,30.39\r\nseresnet152,18.79,53.123,1,288,66.82\r\ntwins_svt_large,18.76,53.204,1,224,99.27\r\nwide_resnet50_2,18.74,53.249,1,288,68.88\r\ndpn131,18.71,53.352,1,224,79.25\r\nswin_s3_small_224,18.71,53.329,1,224,49.74\r\nvit_base_patch16_plus_240,18.67,53.459,1,240,117.56\r\nconvnext_small,18.55,53.795,1,384,50.22\r\nnf_regnet_b5,18.54,53.841,1,456,49.74\r\necaresnet200d,18.27,54.62,1,256,64.69\r\nxcit_medium_24_p16_224,18.21,54.821,1,224,84.4\r\nhrnet_w32,18.13,55.048,1,224,41.23\r\nregnetx_320,18.12,55.073,1,224,107.81\r\nrepvgg_b2,17.92,55.687,1,224,89.02\r\nefficientnetv2_rw_m,17.89,55.798,1,320,53.24\r\nfocalnet_base_srf,17.67,56.484,1,224,88.15\r\nswinv2_cr_small_ns_256,17.64,56.598,1,256,49.7\r\ncoat_tiny,17.45,57.214,1,224,5.5\r\nlevit_conv_512_s8,17.38,57.446,1,224,74.05\r\nlevit_512_s8,17.36,57.485,1,224,74.05\r\ndpn107,17.34,57.575,1,224,86.92\r\nseresnext101_32x8d,17.3,57.703,1,288,93.57\r\ncoatnet_2_rw_224,17.27,57.778,1,224,73.87\r\nconvformer_m36,17.23,57.93,1,224,57.05\r\nswin_s3_base_224,17.18,58.095,1,224,71.13\r\ndavit_base,17.13,58.254,1,224,87.95\r\nswinv2_cr_base_224,16.91,59.034,1,224,87.88\r\nseresnet152d,16.86,59.213,1,320,66.84\r\nhgnetv2_b6,16.82,59.354,1,288,75.26\r\nseresnext101d_32x8d,16.8,59.399,1,288,93.59\r\nresnet200d,16.63,60.036,1,320,64.69\r\nswinv2_cr_base_ns_224,16.59,60.178,1,224,87.88\r\ncoat_lite_medium,16.58,60.196,1,224,44.57\r\nresnetrs200,16.57,60.229,1,256,93.21\r\nhgnet_base,16.53,60.378,1,288,71.58\r\ncait_xxs24_384,16.5,60.52,1,384,12.03\r\nseresnextaa101d_32x8d,16.44,60.712,1,288,93.59\r\nfocalnet_base_lrf,16.42,60.799,1,224,88.75\r\nvolo_d3_224,16.39,60.905,1,224,86.33\r\nvit_relpos_base_patch16_plus_240,16.38,60.949,1,240,117.38\r\nwide_resnet101_2,16.37,60.994,1,176,126.89\r\nrepvgg_b3g4,16.31,61.206,1,224,83.83\r\nhrnet_w40,16.28,61.301,1,224,57.56\r\ntf_efficientnet_b5,16.28,61.33,1,456,30.39\r\necaresnet200d,16.21,61.594,1,288,64.69\r\nvit_base_r50_s16_224,16.2,61.605,1,224,97.89\r\nhrnet_w18_ssld,16.19,61.65,1,288,21.3\r\nseresnet200d,16.19,61.664,1,256,71.86\r\ndm_nfnet_f1,16.17,61.746,1,224,132.63\r\nmaxvit_rmlp_small_rw_256,16.12,61.934,1,256,64.9\r\ncoatnet_2_224,16.05,62.19,1,224,74.68\r\ncoatnet_rmlp_2_rw_224,16.05,62.216,1,224,73.88\r\nhrnet_w30,15.98,62.476,1,224,37.71\r\nswin_base_patch4_window7_224,15.92,62.688,1,224,87.77\r\nhrnet_w48,15.8,63.193,1,224,77.47\r\nxcit_small_24_p16_384,15.73,63.481,1,384,47.67\r\nmvitv2_base_cls,15.67,63.721,1,224,65.44\r\nhrnet_w48_ssld,15.62,63.92,1,224,77.47\r\nresnetv2_101x1_bit,15.5,64.422,1,224,44.54\r\nvit_large_patch32_224,15.38,64.898,1,224,305.51\r\nnfnet_f1,15.31,65.221,1,224,132.63\r\ntf_efficientnetv2_m,15.27,65.378,1,384,54.14\r\npvt_v2_b4,15.22,65.611,1,224,62.56\r\neca_nfnet_l2,15.2,65.695,1,320,56.72\r\ncrossvit_18_dagger_408,15.18,65.761,1,408,44.61\r\nresnetrs152,15.16,65.876,1,320,86.62\r\nsequencer2d_s,15.13,65.982,1,224,27.65\r\nswinv2_small_window8_256,15.12,66.049,1,256,49.73\r\ncaformer_b36,14.81,67.42,1,224,98.75\r\nvit_so150m_patch16_reg4_gap_256,14.79,67.533,1,256,134.13\r\nhrnet_w44,14.76,67.633,1,224,67.06\r\npvt_v2_b5,14.76,67.666,1,224,81.96\r\nswinv2_base_window12_192,14.64,68.185,1,192,109.28\r\nseresnet200d,14.49,68.9,1,288,71.86\r\nnextvit_large,14.45,69.101,1,384,57.87\r\nseresnextaa101d_32x8d,14.44,69.13,1,320,93.59\r\ncoat_mini,14.39,69.372,1,224,10.34\r\nefficientnetv2_m,14.33,69.663,1,416,54.14\r\nwide_resnet101_2,14.31,69.771,1,224,126.89\r\ntresnet_l,14.3,69.826,1,448,55.99\r\nregnety_160,14.2,70.322,1,384,83.59\r\nvit_so150m_patch16_reg4_map_256,14.19,70.354,1,256,141.48\r\nconvnext_base,14.1,70.818,1,288,88.59\r\nresnetv2_50x1_bit,13.95,71.57,1,448,25.55\r\nconvnext_large,13.92,71.749,1,224,197.77\r\nregnety_320,13.91,71.795,1,224,145.05\r\nresnetrs200,13.77,72.49,1,320,93.21\r\ncoat_small,13.71,72.826,1,224,21.69\r\nmaxxvit_rmlp_small_rw_256,13.61,73.351,1,256,66.01\r\nxcit_tiny_24_p8_384,13.54,73.776,1,384,12.11\r\ngcvit_base,13.34,74.838,1,224,90.32\r\nhrnet_w48_ssld,13.24,75.442,1,288,77.47\r\nswinv2_base_window8_256,13.0,76.791,1,256,87.92\r\ninception_next_base,12.99,76.866,1,384,86.67\r\nxcit_small_24_p8_224,12.99,76.888,1,224,47.63\r\nconvformer_s36,12.84,77.75,1,384,40.01\r\nconvnextv2_base,12.83,77.812,1,288,88.72\r\nefficientnetv2_rw_m,12.83,77.808,1,416,53.24\r\nlegacy_senet154,12.83,77.84,1,224,115.09\r\nconvnextv2_large,12.71,78.563,1,224,197.96\r\nconvformer_b36,12.64,79.028,1,224,99.88\r\nresnetrs270,12.54,79.637,1,256,129.86\r\nsenet154,12.54,79.615,1,224,115.09\r\nresnest200e,12.5,79.914,1,320,70.2\r\nrepvgg_b3,12.24,81.603,1,224,123.09\r\nswinv2_small_window16_256,12.22,81.758,1,256,49.73\r\ndm_nfnet_f1,12.19,81.897,1,320,132.63\r\nvit_small_patch14_reg4_dinov2,12.18,81.993,1,518,22.06\r\neca_nfnet_l2,12.14,82.304,1,384,56.72\r\nvit_small_patch14_dinov2,12.0,83.198,1,518,22.06\r\nxcit_large_24_p16_224,12.0,83.236,1,224,189.1\r\nefficientvit_l3,11.82,84.485,1,224,246.04\r\ncaformer_s36,11.81,84.544,1,384,39.3\r\ntf_efficientnetv2_m,11.71,85.301,1,480,54.14\r\nconvnext_base,11.68,85.493,1,320,88.59\r\nseresnet269d,11.66,85.618,1,256,113.67\r\nnfnet_f1,11.65,85.748,1,320,132.63\r\nconvnext_large_mlp,11.64,85.831,1,256,200.13\r\nhrnet_w64,11.56,86.429,1,224,128.06\r\nconvnext_base,11.52,86.708,1,384,88.59\r\nconvmixer_1536_20,11.44,87.282,1,224,51.63\r\nsequencer2d_m,11.44,87.314,1,224,38.31\r\ncait_xxs36_384,11.3,88.39,1,384,17.37\r\nvit_base_patch16_clip_384,11.28,88.553,1,384,86.86\r\ndeit_base_patch16_384,11.24,88.868,1,384,86.86\r\ndeit3_base_patch16_384,11.21,89.062,1,384,86.88\r\ndeit_base_distilled_patch16_384,11.2,89.193,1,384,87.63\r\ndavit_large,11.12,89.816,1,224,196.81\r\nvit_base_patch16_siglip_384,11.11,89.91,1,384,93.18\r\nvit_base_patch16_384,11.1,89.979,1,384,86.86\r\ntresnet_xl,10.86,91.955,1,448,78.44\r\ndensenet264d,10.85,92.074,1,224,72.74\r\neca_nfnet_l3,10.82,92.336,1,352,72.04\r\nmaxxvitv2_rmlp_base_rw_224,10.75,92.904,1,224,116.09\r\npnasnet5large,10.75,92.913,1,331,86.06\r\nnasnetalarge,10.68,93.528,1,331,88.75\r\nefficientvit_l3,10.66,93.703,1,256,246.04\r\nswinv2_cr_tiny_384,10.62,94.018,1,384,28.33\r\nswinv2_cr_large_224,10.6,94.24,1,224,196.68\r\necaresnet269d,10.55,94.713,1,320,102.09\r\nswin_large_patch4_window7_224,10.5,95.17,1,224,196.53\r\ncait_xs24_384,10.48,95.27,1,384,26.67\r\nresnext101_32x16d,10.42,95.861,1,224,194.03\r\nvolo_d4_224,10.3,96.948,1,224,192.96\r\nvit_large_r50_s32_224,10.24,97.519,1,224,328.99\r\nmixer_l16_224,10.19,98.039,1,224,208.2\r\nseresnet269d,10.18,98.097,1,288,113.67\r\nswinv2_large_window12_192,10.15,98.416,1,192,228.77\r\ntf_efficientnet_b6,10.13,98.587,1,528,43.04\r\ndm_nfnet_f2,10.06,99.26,1,256,193.78\r\nmaxvit_tiny_tf_384,10.04,99.454,1,384,30.98\r\nnfnet_f2,10.03,99.603,1,256,193.78\r\nvolo_d2_384,10.03,99.551,1,384,58.87\r\nconvnextv2_base,10.01,99.781,1,384,88.72\r\nconvnext_large,9.99,99.979,1,288,197.77\r\nxcit_small_12_p8_384,9.95,100.392,1,384,26.21\r\nxcit_medium_24_p16_384,9.8,101.966,1,384,84.4\r\nmaxvit_base_tf_224,9.72,102.786,1,224,119.47\r\necaresnet269d,9.69,103.106,1,352,102.09\r\nvgg11,9.64,103.628,1,224,132.86\r\nvit_base_patch16_18x2_224,9.64,103.632,1,224,256.73\r\nvgg11_bn,9.5,105.194,1,224,132.87\r\nefficientnetv2_l,9.39,106.364,1,384,118.52\r\ntf_efficientnetv2_l,9.31,107.306,1,384,118.52\r\nswinv2_base_window12to16_192to256,9.3,107.471,1,256,87.92\r\nconvformer_m36,9.27,107.717,1,384,57.05\r\nvgg13_bn,9.26,107.879,1,224,133.05\r\nconvnextv2_large,9.22,108.292,1,288,197.96\r\nmaxvit_rmlp_base_rw_224,9.22,108.408,1,224,116.14\r\nswinv2_base_window16_256,9.21,108.436,1,256,87.92\r\nefficientnet_b6,9.2,108.582,1,528,43.04\r\nresnetrs270,9.19,108.694,1,352,129.86\r\nvgg13,9.01,110.918,1,224,133.05\r\ncaformer_m36,8.96,111.546,1,384,56.2\r\nresnetrs350,8.94,111.782,1,288,163.96\r\ncoatnet_rmlp_3_rw_224,8.77,113.984,1,224,165.15\r\ncoatnet_3_rw_224,8.74,114.329,1,224,181.81\r\ncoatnet_3_224,8.71,114.748,1,224,166.97\r\nbeit_base_patch16_384,8.65,115.469,1,384,86.74\r\nvgg16,8.61,116.0,1,224,138.36\r\nefficientvit_l3,8.58,116.47,1,320,246.04\r\ncoat_lite_medium_384,8.54,116.926,1,384,44.57\r\nvgg19_bn,8.53,117.153,1,224,143.68\r\nvgg16_bn,8.52,117.29,1,224,138.37\r\nresnetv2_101x1_bit,8.46,118.13,1,448,44.54\r\nvgg19,8.39,119.033,1,224,143.67\r\nvit_base_patch8_224,8.39,119.029,1,224,86.58\r\neca_nfnet_l3,7.98,125.225,1,448,72.04\r\nxcit_medium_24_p8_224,7.98,125.271,1,224,84.32\r\nmaxxvitv2_rmlp_large_rw_224,7.72,129.399,1,224,215.42\r\nconvnext_large_mlp,7.67,130.275,1,320,200.13\r\nnfnet_f2,7.66,130.475,1,352,193.78\r\ndm_nfnet_f2,7.6,131.419,1,352,193.78\r\nvit_large_patch32_384,7.57,131.935,1,384,306.63\r\nrepvgg_d2se,7.54,132.439,1,320,133.33\r\nseresnextaa201d_32x8d,7.52,132.81,1,320,149.39\r\nvit_base_r50_s16_384,7.46,133.951,1,384,98.95\r\nsequencer2d_l,7.43,134.481,1,224,54.3\r\nmaxvit_small_tf_384,7.23,138.115,1,384,69.02\r\nresmlp_big_24_224,7.21,138.531,1,224,129.14\r\nefficientvit_l3,7.1,140.815,1,384,246.04\r\nefficientnetv2_l,7.02,142.385,1,480,118.52\r\ncait_s24_384,6.98,143.242,1,384,47.06\r\nvit_large_patch16_224,6.96,143.598,1,224,304.33\r\ndeit3_large_patch16_224,6.92,144.377,1,224,304.37\r\nmvitv2_large,6.92,144.482,1,224,217.99\r\nresnest269e,6.89,145.117,1,416,110.93\r\nvolo_d5_224,6.88,145.294,1,224,295.46\r\nconvnext_xlarge,6.87,145.419,1,224,350.2\r\neva_large_patch14_196,6.87,145.357,1,196,304.14\r\nresnetrs420,6.84,146.047,1,320,191.89\r\ntf_efficientnetv2_l,6.79,147.238,1,480,118.52\r\nregnety_640,6.77,147.685,1,224,281.38\r\nefficientnetv2_xl,6.76,147.902,1,384,208.12\r\ntf_efficientnetv2_xl,6.73,148.547,1,384,208.12\r\nresnetrs350,6.65,150.31,1,384,163.96\r\nflexivit_large,6.63,150.711,1,240,304.36\r\nnfnet_f3,6.59,151.709,1,320,254.92\r\ntiny_vit_21m_512,6.55,152.682,1,512,21.27\r\nregnety_320,6.4,156.191,1,384,145.05\r\ndm_nfnet_f3,6.39,156.391,1,320,254.92\r\nvit_base_patch16_siglip_512,6.25,159.877,1,512,93.52\r\nbeit_large_patch16_224,6.22,160.576,1,224,304.43\r\nbeitv2_large_patch16_224,6.22,160.717,1,224,304.43\r\nmaxvit_large_tf_224,6.2,161.197,1,224,211.79\r\nswinv2_large_window12to16_192to256,6.12,163.37,1,256,196.74\r\nconvnext_large,6.04,165.574,1,384,197.77\r\nconvnext_large_mlp,6.01,166.316,1,384,200.13\r\nvit_so400m_patch14_siglip_224,5.97,167.337,1,224,427.68\r\ntf_efficientnet_b7,5.96,167.661,1,600,66.35\r\nswinv2_cr_small_384,5.93,168.406,1,384,49.7\r\nvit_large_patch16_siglip_256,5.86,170.441,1,256,315.96\r\nvit_large_patch14_224,5.84,171.067,1,224,304.2\r\nvit_large_patch14_clip_quickgelu_224,5.84,171.235,1,224,303.97\r\nvit_large_patch14_clip_224,5.82,171.768,1,224,304.2\r\ncoatnet_rmlp_2_rw_384,5.75,173.867,1,384,73.88\r\nmvitv2_large_cls,5.73,174.501,1,224,234.58\r\nconvformer_b36,5.71,174.868,1,384,99.88\r\nseresnextaa201d_32x8d,5.71,174.871,1,384,149.39\r\neva02_base_patch14_448,5.68,175.869,1,448,87.12\r\ndavit_huge,5.66,176.53,1,224,348.92\r\nvit_large_patch14_xp_224,5.58,179.001,1,224,304.06\r\nmaxvit_tiny_tf_512,5.54,180.385,1,512,31.05\r\nefficientnet_b7,5.48,182.298,1,600,66.35\r\nxcit_large_24_p16_384,5.46,183.021,1,384,189.1\r\ncoatnet_4_224,5.41,184.721,1,224,275.43\r\nvolo_d3_448,5.38,185.659,1,448,86.63\r\ncaformer_b36,5.37,186.081,1,384,98.75\r\nconvnextv2_large,5.36,186.287,1,384,197.96\r\neva02_large_patch14_clip_224,5.35,186.928,1,224,304.11\r\neva02_large_patch14_224,5.34,187.155,1,224,303.27\r\nconvnext_xlarge,5.27,189.818,1,288,350.2\r\nxcit_small_24_p8_384,5.25,190.508,1,384,47.63\r\nfocalnet_large_fl3,5.2,192.302,1,384,239.13\r\nresnetrs420,5.16,193.791,1,416,191.89\r\nresnetv2_152x2_bit,5.06,197.516,1,224,236.34\r\nvit_large_r50_s32_384,4.97,201.254,1,384,329.09\r\nfocalnet_large_fl4,4.95,201.96,1,384,239.32\r\nnfnet_f3,4.77,209.555,1,416,254.92\r\ncait_s36_384,4.73,211.462,1,384,68.37\r\ndm_nfnet_f3,4.73,211.21,1,416,254.92\r\nvit_base_patch14_dinov2,4.67,214.237,1,518,86.58\r\nswinv2_cr_base_384,4.62,216.189,1,384,87.88\r\nvit_base_patch14_reg4_dinov2,4.61,216.991,1,518,86.58\r\nefficientnetv2_xl,4.49,222.467,1,512,208.12\r\ntf_efficientnetv2_xl,4.48,223.158,1,512,208.12\r\nresnetv2_50x3_bit,4.45,224.393,1,224,217.32\r\nxcit_large_24_p8_224,4.41,226.888,1,224,188.93\r\ndm_nfnet_f4,4.4,227.093,1,384,316.07\r\nnfnet_f4,4.32,231.615,1,384,316.07\r\nmaxxvitv2_rmlp_base_rw_384,4.28,233.473,1,384,116.09\r\nswin_base_patch4_window12_384,4.25,235.251,1,384,87.9\r\nmaxvit_base_tf_384,4.01,249.254,1,384,119.65\r\nmaxvit_rmlp_base_rw_384,3.97,252.089,1,384,116.14\r\nmaxvit_xlarge_tf_224,3.84,260.169,1,224,506.99\r\nswinv2_cr_huge_224,3.73,267.645,1,224,657.83\r\nresnext101_32x32d,3.7,269.902,1,224,468.53\r\nconvnextv2_huge,3.65,274.084,1,224,660.29\r\nregnety_640,3.47,288.16,1,384,281.38\r\nefficientnet_b8,3.46,289.292,1,672,87.41\r\nnfnet_f5,3.32,300.922,1,416,377.21\r\nmaxvit_small_tf_512,3.3,303.006,1,512,69.13\r\ntf_efficientnet_b8,3.3,302.574,1,672,87.41\r\nxcit_medium_24_p8_384,3.3,303.237,1,384,84.32\r\nconvnext_xlarge,3.28,305.125,1,384,350.2\r\ndm_nfnet_f5,3.28,304.623,1,416,377.21\r\nvit_large_patch16_siglip_384,3.09,323.863,1,384,316.28\r\nvolo_d4_448,3.08,324.261,1,448,193.41\r\nvit_large_patch14_clip_quickgelu_336,3.06,326.699,1,336,304.29\r\ndeit3_large_patch16_384,3.05,327.468,1,384,304.76\r\neva_large_patch14_336,3.03,330.35,1,336,304.53\r\nswin_large_patch4_window12_384,3.02,330.733,1,384,196.74\r\nvit_large_patch14_clip_336,3.02,330.571,1,336,304.53\r\nvit_large_patch16_384,3.02,330.83,1,384,304.72\r\nswinv2_cr_large_384,3.0,333.347,1,384,196.68\r\nresnetv2_152x2_bit,2.88,346.891,1,384,236.34\r\nvit_huge_patch14_gap_224,2.88,347.103,1,224,630.76\r\nresnetv2_101x3_bit,2.86,349.835,1,224,387.93\r\nnfnet_f4,2.83,353.869,1,512,316.07\r\nconvnextv2_huge,2.8,357.503,1,288,660.29\r\neva02_large_patch14_clip_336,2.8,357.279,1,336,304.43\r\nvit_huge_patch14_224,2.8,356.629,1,224,630.76\r\nvit_huge_patch14_clip_224,2.8,356.916,1,224,632.05\r\ndeit3_huge_patch14_224,2.79,358.708,1,224,632.13\r\nvit_huge_patch14_xp_224,2.79,358.77,1,224,631.8\r\nvit_huge_patch14_clip_quickgelu_224,2.78,360.058,1,224,632.08\r\nswinv2_base_window12to24_192to384,2.77,361.199,1,384,87.92\r\nmvitv2_huge_cls,2.76,362.599,1,224,694.8\r\ndm_nfnet_f4,2.75,363.412,1,512,316.07\r\nfocalnet_huge_fl3,2.74,365.417,1,224,745.28\r\nfocalnet_xlarge_fl3,2.68,372.855,1,384,408.79\r\nmaxvit_large_tf_384,2.66,375.595,1,384,212.03\r\nregnety_1280,2.66,376.44,1,224,644.81\r\nfocalnet_huge_fl4,2.65,376.997,1,224,686.46\r\nfocalnet_xlarge_fl4,2.63,379.808,1,384,409.03\r\nnfnet_f6,2.53,394.377,1,448,438.36\r\nvit_giant_patch16_gap_224,2.52,397.009,1,224,1011.37\r\ndm_nfnet_f6,2.5,399.61,1,448,438.36\r\nconvnext_xxlarge,2.49,402.224,1,256,846.47\r\ncoatnet_5_224,2.46,406.549,1,224,687.47\r\nbeit_large_patch16_384,2.43,411.343,1,384,305.0\r\nresnetv2_152x2_bit,2.36,422.825,1,448,236.34\r\nresnetv2_50x3_bit,2.34,426.363,1,448,217.32\r\ndm_nfnet_f5,2.14,466.965,1,544,377.21\r\neva_giant_patch14_224,2.12,471.488,1,224,1012.56\r\nnfnet_f5,2.11,474.766,1,544,377.21\r\neva_giant_patch14_clip_224,2.1,476.74,1,224,1012.59\r\nvit_giant_patch14_224,2.1,476.373,1,224,1012.61\r\nvit_giant_patch14_clip_224,2.07,482.288,1,224,1012.65\r\nnfnet_f7,2.01,496.295,1,480,499.5\r\nvit_so400m_patch14_siglip_384,2.01,496.268,1,384,428.23\r\nvolo_d5_448,1.97,507.454,1,448,295.91\r\nmaxvit_base_tf_512,1.88,531.931,1,512,119.88\r\ndavit_giant,1.86,536.86,1,224,1406.47\r\nconvnextv2_huge,1.75,572.127,1,384,660.29\r\neva02_large_patch14_448,1.71,586.058,1,448,305.08\r\ntf_efficientnet_l2,1.71,584.697,1,475,480.31\r\ncait_m36_384,1.7,588.838,1,384,271.22\r\ndm_nfnet_f6,1.7,587.555,1,576,438.36\r\nnfnet_f6,1.7,589.297,1,576,438.36\r\nswinv2_large_window12to24_192to384,1.7,587.644,1,384,196.74\r\nxcit_large_24_p8_384,1.68,596.178,1,384,188.93\r\nvit_huge_patch14_clip_336,1.64,609.779,1,336,632.46\r\nregnety_1280,1.59,628.931,1,384,644.81\r\nmaxvit_xlarge_tf_384,1.53,652.708,1,384,475.32\r\nvit_large_patch14_dinov2,1.5,664.399,1,518,304.37\r\nvit_large_patch14_reg4_dinov2,1.49,673.215,1,518,304.37\r\nvolo_d5_512,1.47,680.639,1,512,296.09\r\nresnetv2_101x3_bit,1.45,690.923,1,448,387.93\r\nvit_huge_patch14_clip_378,1.34,746.759,1,378,632.68\r\nvit_huge_patch14_clip_quickgelu_378,1.33,751.148,1,378,632.68\r\nnfnet_f7,1.32,756.231,1,608,499.5\r\nmaxvit_large_tf_512,1.28,779.969,1,512,212.33\r\nswinv2_cr_huge_384,1.26,792.156,1,384,657.94\r\nvit_huge_patch16_gap_448,1.24,806.627,1,448,631.67\r\nswinv2_cr_giant_224,1.22,817.72,1,224,2598.76\r\nvit_gigantic_patch14_224,1.2,831.652,1,224,1844.44\r\nvit_gigantic_patch14_clip_224,1.2,830.217,1,224,1844.91\r\nbeit_large_patch16_512,1.15,869.779,1,512,305.67\r\nconvnextv2_huge,1.11,904.605,1,512,660.29\r\nresnetv2_152x4_bit,1.07,931.333,1,224,936.53\r\neva_giant_patch14_336,1.06,945.007,1,336,1013.01\r\nmaxvit_xlarge_tf_512,0.74,1353.032,1,512,475.77\r\nregnety_2560,0.71,1404.762,1,384,1282.6\r\ncait_m48_448,0.64,1574.651,1,448,356.46\r\nsamvit_base_patch16,0.61,1639.865,1,1024,89.67\r\ntf_efficientnet_l2,0.6,1664.515,1,800,480.31\r\nefficientnet_l2,0.58,1732.697,1,800,480.31\r\nresnetv2_152x4_bit,0.54,1835.495,1,480,936.53\r\neva_giant_patch14_560,0.46,2166.883,1,560,1014.45\r\nvit_giant_patch14_dinov2,0.46,2154.504,1,518,1136.48\r\nvit_giant_patch14_reg4_dinov2,0.46,2171.619,1,518,1136.48\r\neva02_enormous_patch14_clip_224,0.44,2252.224,1,224,4350.56\r\nswinv2_cr_giant_384,0.37,2697.442,1,384,2598.76\r\nsamvit_large_patch16,0.28,3628.054,1,1024,308.28\r\nsamvit_huge_patch16,0.19,5153.132,1,1024,637.03\r\n"
  },
  {
    "path": "results/benchmark-infer-fp32-nchw-pt240-cpu-i7_12700h-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_byobnet,160,930.48,1.062,1,0.46,0.03,0.43\ntest_vit,160,752.11,1.315,1,0.37,0.04,0.48\ntest_efficientnet,160,647.07,1.53,1,0.36,0.06,0.55\nmobilenetv3_small_075,224,393.5,2.524,1,2.04,0.05,1.3\nmobilenetv3_small_050,224,384.04,2.586,1,1.59,0.03,0.92\ntf_mobilenetv3_small_minimal_100,224,379.32,2.616,1,2.04,0.06,1.41\nlcnet_035,224,378.45,2.618,1,1.64,0.03,1.04\ntinynet_e,106,376.63,2.636,1,2.04,0.03,0.69\nmobilenetv3_small_100,224,374.07,2.654,1,2.54,0.06,1.42\nlcnet_075,224,373.7,2.656,1,2.36,0.1,1.99\nlcnet_050,224,364.66,2.725,1,1.88,0.05,1.26\nmobilenetv2_035,224,323.01,3.076,1,1.68,0.07,2.86\ntf_mobilenetv3_small_100,224,322.73,3.079,1,2.54,0.06,1.42\ntf_mobilenetv3_small_075,224,298.55,3.327,1,2.04,0.05,1.3\nlcnet_100,224,291.87,3.406,1,2.95,0.16,2.52\nmnasnet_small,224,265.95,3.739,1,2.03,0.07,2.16\nmnasnet_050,224,252.54,3.939,1,2.22,0.11,3.07\nmobilenetv4_conv_small,224,248.97,3.996,1,3.77,0.19,1.97\nregnetx_002,224,237.37,4.191,1,2.68,0.2,2.16\ntinynet_d,152,232.83,4.272,1,2.34,0.05,1.42\nsemnasnet_050,224,224.05,4.443,1,2.08,0.11,3.44\nmobilenetv2_050,224,221.33,4.499,1,1.97,0.1,3.64\nefficientvit_b0,224,210.09,4.735,1,3.41,0.1,2.87\nregnety_002,224,209.64,4.749,1,3.16,0.2,2.17\nmobilenetv4_conv_small,256,199.91,4.979,1,3.77,0.25,2.57\nmobilenetv2_075,224,196.78,5.061,1,2.64,0.22,5.86\nlcnet_150,224,194.96,5.108,1,4.5,0.34,3.79\nghostnet_050,224,185.37,5.373,1,2.59,0.05,1.77\nefficientvit_m0,224,184.92,5.39,1,2.35,0.08,0.91\nrepghostnet_050,224,182.79,5.45,1,2.31,0.05,2.02\nrepghostnet_058,224,180.93,5.504,1,2.55,0.07,2.59\ntinynet_c,184,177.3,5.621,1,2.46,0.11,2.87\nlevit_128s,224,173.25,5.748,1,7.78,0.31,1.88\nmobilenetv3_large_075,224,172.84,5.763,1,3.99,0.16,4.0\nmnasnet_075,224,169.28,5.884,1,3.17,0.23,4.77\nhardcorenas_a,224,159.47,6.247,1,5.26,0.23,4.38\nresnet10t,176,158.58,6.28,1,5.44,0.7,1.51\nefficientvit_m2,224,156.16,6.382,1,4.19,0.2,1.47\nmobilenetv1_100,224,155.99,6.388,1,4.23,0.58,5.04\nhgnetv2_b0,224,154.02,6.468,1,6.0,0.33,2.12\nsemnasnet_075,224,153.91,6.473,1,2.91,0.23,5.54\nese_vovnet19b_slim_dw,224,153.26,6.504,1,1.9,0.4,5.28\nlevit_conv_128s,224,151.31,6.583,1,7.78,0.31,1.88\nmobilenetv2_100,224,151.16,6.591,1,3.5,0.31,6.68\nrepghostnet_080,224,150.68,6.615,1,3.28,0.1,3.22\nefficientvit_m1,224,149.13,6.685,1,2.98,0.17,1.33\nmobilenetv3_rw,224,148.89,6.692,1,5.48,0.23,4.41\nsemnasnet_100,224,147.79,6.744,1,3.89,0.32,6.23\nmnasnet_100,224,147.07,6.774,1,4.38,0.33,5.46\nedgenext_xx_small,288,145.77,6.838,1,1.33,0.33,4.21\nedgenext_xx_small,256,142.76,6.983,1,1.33,0.26,3.33\nmobilenetv3_large_100,256,141.04,7.067,1,5.48,0.29,5.75\nhardcorenas_b,224,139.46,7.146,1,5.18,0.26,5.09\nmobilenetv1_100h,224,137.02,7.269,1,5.28,0.63,5.09\nrepghostnet_100,224,136.7,7.291,1,4.07,0.15,3.98\ntf_mobilenetv3_large_minimal_100,224,135.13,7.374,1,3.92,0.22,4.4\nmobilenetv3_large_100,224,135.11,7.377,1,5.48,0.23,4.41\ntf_mobilenetv3_large_075,224,133.04,7.493,1,3.99,0.16,4.0\nlevit_128,224,128.82,7.736,1,9.21,0.41,2.71\ntinynet_b,188,128.04,7.786,1,3.73,0.21,4.44\nregnetx_004,224,128.0,7.786,1,5.16,0.4,3.14\nhardcorenas_c,224,126.13,7.902,1,5.52,0.28,5.01\nefficientnet_lite0,224,124.77,7.99,1,4.65,0.4,6.74\nregnetx_006,224,124.69,7.995,1,6.2,0.61,3.98\nregnetx_004_tv,224,123.65,8.061,1,5.5,0.42,3.17\nspnasnet_100,224,121.97,8.171,1,4.42,0.35,6.03\nmobilenet_edgetpu_v2_xs,224,119.74,8.329,1,4.46,0.7,4.8\ntf_mobilenetv3_large_100,224,118.57,8.408,1,5.48,0.23,4.41\nhardcorenas_d,224,117.16,8.511,1,7.5,0.3,4.93\nmobilenetv1_125,224,116.05,8.591,1,6.27,0.89,6.3\nfbnetc_100,224,115.63,8.626,1,5.57,0.4,6.51\nhardcorenas_f,224,115.25,8.654,1,8.2,0.35,5.57\nmobilenetv2_110d,224,114.18,8.735,1,4.52,0.45,8.71\nlevit_conv_128,224,113.39,8.793,1,9.21,0.41,2.71\ndla46_c,224,113.23,8.806,1,1.3,0.58,4.5\nregnety_004,224,111.71,8.931,1,4.34,0.41,3.89\nvit_tiny_r_s16_p8_224,224,110.52,9.023,1,6.34,0.44,2.06\nmobilevitv2_050,256,110.49,9.03,1,1.37,0.48,8.04\nmobilenetv2_140,224,109.44,9.115,1,6.11,0.6,9.57\nrepghostnet_111,224,109.01,9.149,1,4.54,0.18,4.38\nmobilenetv1_100,256,108.9,9.161,1,4.23,0.76,6.59\ndla46x_c,224,108.69,9.178,1,1.07,0.54,5.66\nefficientvit_m4,224,108.18,9.222,1,8.8,0.3,1.7\nhardcorenas_e,224,108.15,9.221,1,8.07,0.35,5.65\nconvnext_atto,224,107.95,9.243,1,3.7,0.55,3.81\ncs3darknet_s,256,107.58,9.272,1,3.28,0.72,2.97\nefficientvit_m3,224,107.18,9.305,1,6.9,0.27,1.62\nedgenext_x_small,256,107.01,9.325,1,2.34,0.54,5.93\nregnety_006,224,105.83,9.425,1,6.06,0.61,4.33\ngernet_s,224,104.32,9.558,1,8.17,0.75,2.65\nmnasnet_140,224,103.97,9.594,1,7.12,0.6,7.71\nresnet18,160,103.24,9.66,1,11.69,0.93,1.27\nmobilenetv1_100h,256,103.01,9.684,1,5.28,0.82,6.65\ntf_efficientnetv2_b0,192,102.23,9.758,1,7.14,0.54,3.51\nhgnetv2_b1,224,101.5,9.823,1,6.34,0.49,2.73\nghostnet_100,224,101.36,9.84,1,5.18,0.15,3.55\nresnet10t,224,101.08,9.869,1,5.44,1.1,2.43\nlevit_conv_192,224,100.88,9.888,1,10.95,0.66,3.2\nlevit_192,224,100.87,9.887,1,10.95,0.66,3.2\nghostnet_130,224,100.75,9.898,1,7.36,0.24,4.6\nregnetx_008,224,100.17,9.958,1,7.26,0.81,5.15\ncs3darknet_focus_s,256,99.25,10.051,1,3.27,0.69,2.7\ntinynet_a,192,99.06,10.071,1,6.19,0.35,5.41\nrepghostnet_130,224,99.05,10.068,1,5.48,0.25,5.24\npit_ti_distilled_224,224,98.66,10.111,1,5.1,0.71,6.23\nrexnetr_100,224,98.26,10.151,1,4.88,0.43,7.72\ntf_efficientnet_lite0,224,97.3,10.249,1,4.65,0.4,6.74\npit_ti_224,224,97.19,10.264,1,4.85,0.7,6.19\nregnety_008,224,97.13,10.269,1,6.26,0.81,5.25\nconvnext_atto_ols,224,96.79,10.31,1,3.7,0.58,4.11\nhgnetv2_b0,288,96.75,10.308,1,6.0,0.54,3.51\nmobilenet_edgetpu_100,224,95.97,10.397,1,4.09,1.0,5.75\nregnety_008_tv,224,94.42,10.567,1,6.43,0.84,5.42\nrepghostnet_150,224,93.96,10.617,1,6.58,0.32,6.0\nefficientnet_lite1,240,93.82,10.637,1,5.42,0.62,10.14\nefficientnet_b0,224,93.58,10.662,1,5.29,0.4,6.75\nefficientvit_b1,224,91.69,10.879,1,9.1,0.53,7.25\nsemnasnet_140,224,91.48,10.907,1,6.11,0.6,8.87\nrexnet_100,224,91.29,10.929,1,4.8,0.41,7.44\nmobilevit_xxs,256,90.65,11.011,1,1.27,0.42,8.34\nedgenext_x_small,288,90.43,11.036,1,2.34,0.68,7.5\nconvnext_femto_ols,224,89.2,11.187,1,5.23,0.82,4.87\nmobilenetv1_125,256,88.85,11.231,1,6.27,1.16,8.23\nmixer_s32_224,224,88.28,11.299,1,19.1,1.0,2.28\nxcit_nano_12_p16_224,224,87.78,11.371,1,3.05,0.56,4.17\nese_vovnet19b_slim,224,87.17,11.445,1,3.17,1.69,3.52\nefficientformerv2_s0,224,86.81,11.497,1,3.6,0.41,5.3\nconvnextv2_atto,224,86.78,11.501,1,3.71,0.55,3.81\nmobilenet_edgetpu_v2_s,224,85.54,11.666,1,5.99,1.21,6.6\nmobilenetv4_conv_medium,224,84.5,11.808,1,9.72,0.84,5.8\ndla60x_c,224,84.24,11.846,1,1.32,0.59,6.01\ntf_efficientnetv2_b1,192,83.74,11.917,1,8.14,0.76,4.59\nefficientnet_b0,256,82.19,12.142,1,5.29,0.52,8.81\nresnet14t,176,81.88,12.186,1,10.08,1.07,3.61\nhgnetv2_b1,288,81.46,12.251,1,6.34,0.82,4.51\nmobileone_s1,224,81.13,12.302,1,4.83,0.86,9.67\nefficientvit_b1,256,80.92,12.336,1,9.1,0.69,9.46\nmobilenetv2_120d,224,80.63,12.378,1,5.83,0.69,11.97\nese_vovnet19b_dw,224,80.55,12.39,1,6.54,1.34,8.25\nfbnetv3_b,224,80.54,12.391,1,8.6,0.42,6.97\nrepghostnet_200,224,80.43,12.406,1,9.8,0.54,7.96\ndeit_tiny_distilled_patch16_224,224,80.18,12.448,1,5.91,1.27,6.01\nmobilenetv4_hybrid_medium_075,224,80.03,12.47,1,7.31,0.66,5.65\ntf_efficientnetv2_b0,224,79.79,12.509,1,7.14,0.73,4.77\nnf_regnet_b0,192,79.28,12.59,1,8.76,0.37,3.15\nlevit_256,224,77.98,12.799,1,18.89,1.13,4.23\nghostnetv2_100,224,77.73,12.839,1,6.16,0.18,4.55\nvit_small_patch32_224,224,77.57,12.865,1,22.88,1.15,2.5\nvit_tiny_patch16_224,224,77.57,12.868,1,5.72,1.26,5.97\ndeit_tiny_patch16_224,224,77.2,12.931,1,5.72,1.26,5.97\nefficientnet_b1_pruned,240,77.19,12.932,1,6.33,0.4,6.21\ntf_efficientnet_b0,224,77.17,12.933,1,5.29,0.4,6.75\nconvnext_femto,224,77.08,12.948,1,5.22,0.79,4.57\nfbnetv3_b,256,77.03,12.958,1,8.6,0.55,9.1\nfbnetv3_d,224,76.95,12.97,1,10.31,0.52,8.5\nrexnetr_130,224,76.89,12.979,1,7.61,0.68,9.81\nconvnext_atto_ols,288,75.39,13.242,1,3.7,0.96,6.8\ntf_efficientnet_lite1,240,74.97,13.315,1,5.42,0.62,10.14\nefficientnet_cc_b0_4e,224,74.51,13.396,1,13.31,0.41,9.42\nefficientvit_b1,288,73.91,13.508,1,9.1,0.87,11.96\nefficientnet_lite2,260,73.69,13.546,1,6.09,0.89,12.9\nefficientvit_m5,224,72.44,13.781,1,12.47,0.53,2.41\nefficientnet_b0_gn,224,72.25,13.817,1,5.29,0.42,6.75\nhgnetv2_b2,224,72.08,13.849,1,11.22,1.15,4.12\nmobilenetv4_conv_medium,256,71.94,13.876,1,9.72,1.1,7.58\nconvnextv2_femto,224,71.93,13.879,1,5.23,0.79,4.57\nefficientnet_b0_g16_evos,224,71.28,14.004,1,8.11,1.01,7.42\nrexnet_130,224,71.2,14.022,1,7.56,0.68,9.71\ngmlp_ti16_224,224,70.31,14.2,1,5.87,1.34,7.55\nefficientnet_blur_b0,224,68.95,14.477,1,5.29,0.43,8.72\nfbnetv3_d,256,68.8,14.512,1,10.31,0.68,11.1\nmobilevitv2_075,256,68.69,14.534,1,2.87,1.05,12.06\ntf_mixnet_s,224,68.57,14.56,1,4.13,0.25,6.25\npit_xs_224,224,68.23,14.63,1,10.62,1.4,7.71\nlevit_conv_256,224,67.71,14.745,1,18.89,1.13,4.23\nedgenext_small,256,67.66,14.754,1,5.59,1.26,9.07\nghostnetv2_130,224,67.59,14.768,1,8.96,0.28,5.9\nvisformer_tiny,224,67.48,14.795,1,10.32,1.27,5.72\nrexnetr_150,224,67.41,14.81,1,9.78,0.89,11.13\nrepvit_m1,224,67.3,14.835,1,5.49,0.83,7.45\npit_xs_distilled_224,224,66.69,14.963,1,11.0,1.41,7.76\nefficientnet_es_pruned,224,66.45,15.026,1,5.44,1.81,8.73\ntf_efficientnet_es,224,66.43,15.029,1,5.44,1.81,8.73\nefficientnet_cc_b0_8e,224,66.29,15.061,1,24.01,0.42,9.42\nconvnext_atto,288,66.24,15.073,1,3.7,0.91,6.3\nefficientnet_b0_g8_gn,224,66.06,15.111,1,6.56,0.66,6.75\nefficientnet_es,224,65.44,15.255,1,5.44,1.81,8.73\nmobilenet_edgetpu_v2_m,224,65.39,15.267,1,8.46,1.85,8.15\nvit_tiny_r_s16_p8_384,384,65.32,15.284,1,6.36,1.34,6.49\nrepvit_m0_9,224,64.74,15.423,1,5.49,0.83,7.45\ntf_efficientnet_cc_b0_4e,224,64.52,15.451,1,13.31,0.41,9.42\nregnetz_005,224,64.47,15.487,1,7.12,0.52,5.86\nfastvit_t8,256,64.37,15.515,1,4.03,0.7,8.63\nseresnet18,224,64.3,15.526,1,11.78,1.82,2.49\nrexnet_150,224,64.23,15.544,1,9.73,0.9,11.21\ntf_efficientnetv2_b2,208,64.06,15.584,1,10.1,1.06,6.0\nmobilenetv3_large_150d,256,63.96,15.61,1,14.62,1.03,12.35\npvt_v2_b0,224,63.86,15.634,1,3.67,0.57,7.99\nregnetx_016,224,63.8,15.648,1,9.19,1.62,7.93\nefficientnet_b1,224,63.79,15.651,1,7.79,0.59,9.36\nmixnet_s,224,63.75,15.663,1,4.13,0.25,6.25\nefficientformerv2_s1,224,63.46,15.736,1,6.19,0.67,7.66\nmobileone_s2,224,63.39,15.751,1,7.88,1.34,11.55\nmobilenetv4_hybrid_medium,224,62.29,16.028,1,11.07,0.98,6.84\nresnet18,224,62.22,16.046,1,11.69,1.82,2.48\nresnet14t,224,62.13,16.072,1,10.08,1.69,5.8\nvit_xsmall_patch16_clip_224,224,62.07,16.087,1,8.28,1.79,6.65\nrepvgg_a0,224,62.04,16.092,1,9.11,1.52,3.59\ntf_efficientnet_cc_b0_8e,224,61.37,16.269,1,24.01,0.42,9.42\nnf_regnet_b0,256,61.3,16.286,1,8.76,0.64,5.58\nefficientformer_l1,224,61.29,16.291,1,12.29,1.3,5.53\nrepvit_m1_0,224,60.89,16.399,1,7.3,1.13,8.69\nlegacy_seresnet18,224,60.78,16.425,1,11.78,1.82,2.49\nconvnextv2_atto,288,60.59,16.476,1,3.71,0.91,6.3\npoolformerv2_s12,224,60.47,16.512,1,11.89,1.83,5.53\ntf_efficientnet_lite2,260,59.99,16.647,1,6.09,0.89,12.9\nresnet34,160,59.58,16.758,1,21.8,1.87,1.91\nefficientnet_b1,240,59.08,16.902,1,7.79,0.71,10.88\neva02_tiny_patch14_224,224,58.77,16.992,1,5.5,1.7,9.14\nxcit_tiny_12_p16_224,224,58.69,17.015,1,6.72,1.24,6.29\nconvnext_pico,224,58.47,17.082,1,9.05,1.37,6.1\nconvnext_femto_ols,288,58.37,17.107,1,5.23,1.35,8.06\nskresnet18,224,57.26,17.438,1,11.96,1.82,3.24\nresnetblur18,224,56.99,17.521,1,11.69,2.34,3.39\nrepvit_m2,224,56.93,17.54,1,8.8,1.36,9.43\nconvnext_femto,288,56.87,17.561,1,5.22,1.3,7.56\nedgenext_small_rw,256,56.39,17.709,1,7.83,1.58,9.51\nmobilenetv4_conv_blur_medium,224,56.36,17.718,1,9.72,1.22,8.58\nlevit_256d,224,56.23,17.762,1,26.21,1.4,4.93\nrepvit_m1_1,224,56.14,17.789,1,8.8,1.36,9.43\nefficientnet_b2_pruned,260,56.09,17.802,1,8.31,0.73,9.13\nresnet18d,224,55.94,17.85,1,11.71,2.06,3.29\nmobilenetv4_conv_medium,320,55.63,17.951,1,9.72,1.71,11.84\nese_vovnet19b_dw,288,55.61,17.955,1,6.54,2.22,13.63\ntf_efficientnetv2_b1,240,55.54,17.979,1,8.14,1.21,7.34\nmobilenetv4_hybrid_medium,256,55.47,18.002,1,11.07,1.29,9.01\ncrossvit_tiny_240,240,55.35,18.041,1,7.01,1.57,9.08\nhgnetv2_b2,288,55.22,18.084,1,11.22,1.89,6.8\npoolformer_s12,224,55.05,18.138,1,11.92,1.82,5.53\nmobilenetv4_conv_aa_medium,256,54.57,18.299,1,9.72,1.58,10.3\nconvnext_pico_ols,224,54.47,18.335,1,9.06,1.43,6.5\ntf_efficientnet_b1,240,54.39,18.361,1,7.79,0.71,10.88\nefficientnet_b1,256,53.72,18.59,1,7.79,0.77,12.22\nmobilevit_xs,256,53.72,18.59,1,2.32,1.05,16.33\nxcit_nano_12_p16_384,384,53.61,18.629,1,3.05,1.64,12.15\nghostnetv2_160,224,53.14,18.791,1,12.39,0.42,7.23\nresnet26,224,52.94,18.865,1,16.0,2.36,7.35\nhgnetv2_b3,224,52.84,18.9,1,16.29,1.78,5.07\ncrossvit_9_240,240,52.78,18.922,1,8.55,1.85,9.52\nlevit_conv_256d,224,52.46,19.035,1,26.21,1.4,4.93\ncs3darknet_m,256,51.91,19.236,1,9.31,2.08,5.28\ncs3darknet_focus_m,256,51.44,19.417,1,9.3,1.98,4.89\nmixnet_m,224,51.43,19.421,1,5.01,0.36,8.19\nregnetz_b16,224,51.32,19.464,1,9.72,1.45,9.95\ncrossvit_9_dagger_240,240,51.29,19.474,1,8.78,1.99,9.97\ndpn48b,224,51.25,19.489,1,9.13,1.69,8.92\nmobilenet_edgetpu_v2_m,256,51.21,19.504,1,8.46,2.42,10.65\nregnety_016,224,51.17,19.52,1,11.2,1.63,8.04\nconvnextv2_pico,224,50.82,19.657,1,9.07,1.37,6.1\nefficientnet_b2,256,50.61,19.735,1,9.11,0.89,12.81\nmobilenetv3_large_150d,320,50.08,19.941,1,14.62,1.61,19.29\nnf_regnet_b1,256,49.7,20.096,1,10.22,0.82,7.27\nlegacy_seresnext26_32x4d,224,49.5,20.177,1,16.79,2.49,9.39\nmobilenetv4_conv_blur_medium,256,49.26,20.275,1,9.72,1.59,11.2\nmobileone_s3,224,49.26,20.277,1,10.17,1.94,13.85\nmobileone_s0,224,49.2,20.304,1,5.29,1.09,15.48\ntf_mixnet_m,224,49.19,20.305,1,5.01,0.36,8.19\nvit_medium_patch32_clip_224,224,49.16,20.313,1,39.69,2.0,3.34\nconvnextv2_femto,288,48.87,20.44,1,5.23,1.3,7.56\nmobilevitv2_100,256,48.56,20.567,1,4.9,1.84,16.08\nrepvgg_a1,224,48.34,20.655,1,14.09,2.64,4.74\ngmixer_12_224,224,48.05,20.786,1,12.7,2.67,7.26\nregnetz_005,288,47.79,20.902,1,7.12,0.86,9.68\nedgenext_small,320,47.75,20.917,1,5.59,1.97,14.16\nefficientnet_lite3,300,47.75,20.918,1,8.2,1.65,21.85\nefficientnet_b1,288,47.52,21.022,1,7.79,0.97,15.46\ntiny_vit_5m_224,224,47.52,21.012,1,12.08,1.28,11.25\nregnetz_b16_evos,224,47.13,21.193,1,9.74,1.43,9.95\necaresnext50t_32x4d,224,46.88,21.304,1,15.41,2.7,10.09\nresnext50_32x4d,160,46.79,21.347,1,25.03,2.17,7.35\ncoat_lite_tiny,224,46.76,21.36,1,5.72,1.6,11.65\nmobilenet_edgetpu_v2_l,224,46.43,21.512,1,10.92,2.55,9.05\ntf_efficientnetv2_b2,260,46.31,21.567,1,10.1,1.72,9.84\ncs3darknet_focus_m,288,46.19,21.624,1,9.3,2.51,6.19\nfbnetv3_g,240,46.17,21.632,1,16.62,1.28,14.87\nxcit_nano_12_p8_224,224,45.93,21.749,1,3.05,2.16,15.71\necaresnext26t_32x4d,224,45.83,21.795,1,15.41,2.7,10.09\nefficientnet_b3_pruned,300,45.83,21.793,1,9.86,1.04,11.86\nseresnext26t_32x4d,224,45.82,21.799,1,16.81,2.7,10.09\nresnest14d,224,45.77,21.821,1,10.61,2.76,7.33\ndla34,224,45.69,21.857,1,15.74,3.07,5.02\nrexnet_200,224,45.02,22.186,1,16.37,1.56,14.91\nresnet18,288,44.98,22.209,1,11.69,3.01,4.11\nseresnext26d_32x4d,224,44.67,22.357,1,16.81,2.73,10.19\neca_resnext26ts,256,44.62,22.384,1,10.3,2.43,10.52\nseresnext26ts,256,44.62,22.387,1,10.39,2.43,10.52\nnf_regnet_b1,288,44.25,22.573,1,10.22,1.02,9.2\ncs3darknet_m,288,44.05,22.676,1,9.31,2.63,6.69\nresnext26ts,256,43.89,22.755,1,10.3,2.43,10.52\nhrnet_w18_small,224,43.65,22.885,1,13.19,1.61,5.72\nresnet50,160,43.63,22.892,1,25.56,2.1,5.67\ngernet_m,224,43.54,22.945,1,21.14,3.02,5.24\nresmlp_12_224,224,43.38,23.027,1,15.35,3.01,5.5\nmobilenetv4_hybrid_medium,320,43.17,23.141,1,11.07,2.05,14.36\nefficientvit_b2,224,43.16,23.139,1,24.33,1.6,14.62\nresnext50_32x4d,176,42.92,23.271,1,25.03,2.71,8.97\nefficientnet_b2,288,42.86,23.307,1,9.11,1.12,16.2\nlambda_resnet26rpt_256,256,42.84,23.317,1,10.99,3.16,11.87\nlambda_resnet26t,256,42.48,23.514,1,10.96,3.02,11.87\ngcresnext26ts,256,42.38,23.57,1,10.48,2.43,10.53\nlevit_384,224,42.25,23.643,1,39.13,2.36,6.26\nrexnetr_200,224,42.22,23.653,1,16.52,1.59,15.11\nseresnet18,288,42.19,23.675,1,11.78,3.01,4.11\nefficientnet_cc_b1_8e,240,42.18,23.68,1,39.72,0.75,15.44\ncoat_lite_mini,224,42.12,23.714,1,11.01,2.0,12.25\ndpn68,224,42.01,23.778,1,12.61,2.35,10.47\ntf_mixnet_l,224,41.91,23.833,1,7.33,0.58,10.84\nefficientformerv2_s2,224,41.63,23.997,1,12.71,1.27,11.77\ntf_efficientnet_b2,260,41.61,24.005,1,9.11,1.02,13.83\nfastvit_t12,256,41.56,24.039,1,7.55,1.42,12.42\nresnet26d,224,41.27,24.203,1,16.01,2.6,8.15\nnf_regnet_b2,240,41.2,24.245,1,14.31,0.97,7.23\nresnet50d,160,40.99,24.37,1,25.58,2.22,6.08\neca_botnext26ts_256,256,40.97,24.379,1,10.59,2.46,11.6\ntf_efficientnet_em,240,40.71,24.537,1,6.9,3.04,14.34\nresnet18d,288,40.62,24.592,1,11.71,3.41,5.43\ntf_efficientnetv2_b3,240,40.22,24.835,1,14.36,1.93,9.95\nefficientnetv2_rw_t,224,40.18,24.866,1,13.65,1.93,9.94\nefficientnet_em,240,40.1,24.91,1,6.9,3.04,14.34\ntf_efficientnet_cc_b1_8e,240,40.1,24.913,1,39.72,0.75,15.44\nmixnet_l,224,39.9,25.04,1,7.33,0.58,10.84\nseresnet50,160,39.88,25.05,1,28.09,2.1,5.69\nresnet50,176,39.81,25.095,1,25.56,2.62,6.92\ntf_efficientnet_lite3,300,39.76,25.126,1,8.2,1.65,21.85\nrepvgg_b0,224,39.75,25.127,1,15.82,3.41,6.15\nbotnet26t_256,256,39.7,25.167,1,12.49,3.32,11.98\neca_halonext26ts,256,39.59,25.233,1,10.76,2.44,11.46\nlevit_conv_384,224,39.52,25.276,1,39.13,2.36,6.26\nmobilevit_s,256,39.42,25.343,1,5.58,2.03,19.94\nhgnetv2_b3,288,39.28,25.432,1,16.29,2.94,8.38\nresnet26t,256,39.22,25.471,1,16.01,3.35,10.52\ndpn68b,224,39.08,25.566,1,12.61,2.35,10.47\nregnetx_032,224,39.08,25.564,1,15.3,3.2,11.37\necaresnet50t,160,39.07,25.569,1,25.57,2.21,6.04\nhaloregnetz_b,224,38.92,25.668,1,11.68,1.97,11.94\nrepvit_m3,224,38.91,25.671,1,10.68,1.89,13.94\nhalonet26t,256,38.81,25.741,1,12.48,3.19,11.69\necaresnet50d_pruned,224,38.75,25.779,1,19.94,2.53,6.43\nedgenext_small_rw,320,38.68,25.829,1,7.83,2.46,14.85\nfbnetv3_g,288,38.42,26.003,1,16.62,1.77,21.09\nhgnetv2_b4,224,38.42,26.001,1,19.8,2.75,6.7\ncoatnet_pico_rw_224,224,38.15,26.193,1,10.85,2.05,14.62\nconvit_tiny,224,38.05,26.261,1,5.71,1.26,7.94\nefficientvit_b2,256,38.01,26.282,1,24.33,2.09,19.03\nnf_regnet_b2,272,37.81,26.422,1,14.31,1.22,9.27\npit_s_224,224,37.65,26.533,1,23.46,2.88,11.56\nseresnext26ts,288,37.55,26.602,1,10.39,3.07,13.32\nresnext26ts,288,37.5,26.641,1,10.3,3.07,13.31\nmobilevitv2_125,256,37.19,26.863,1,7.48,2.86,20.1\nresnet34,224,37.02,26.985,1,21.8,3.67,3.74\nvit_small_patch32_384,384,36.96,27.025,1,22.92,3.45,8.25\nconvnext_pico_ols,288,36.86,27.111,1,9.06,2.37,10.74\npit_s_distilled_224,224,36.78,27.158,1,24.04,2.9,11.64\ntiny_vit_11m_224,224,36.53,27.347,1,20.35,2.04,13.49\nresnetblur18,288,36.4,27.445,1,11.69,3.87,5.6\nresnetrs50,160,36.37,27.467,1,35.69,2.29,6.2\nregnetz_b16,288,36.11,27.668,1,9.72,2.39,16.43\ndarknet17,256,36.08,27.687,1,14.3,3.26,7.18\nregnety_032,224,36.05,27.711,1,19.44,3.2,11.26\ngc_efficientnetv2_rw_t,224,35.95,27.794,1,13.68,1.94,9.97\nfastvit_s12,256,35.89,27.845,1,9.47,1.82,13.67\nmaxvit_pico_rw_256,256,35.82,27.894,1,7.46,1.83,22.3\necaresnet26t,256,35.72,27.972,1,16.01,3.35,10.53\nlegacy_seresnet34,224,35.62,28.047,1,21.96,3.67,3.74\nmixer_s16_224,224,35.4,28.223,1,18.53,3.79,5.97\ndla60x,224,35.39,28.232,1,17.35,3.54,13.8\neca_resnext26ts,288,35.16,28.416,1,10.3,3.07,13.32\nmobileone_s4,224,34.97,28.571,1,14.95,3.04,17.74\nresnet34d,224,34.93,28.602,1,21.82,3.91,4.54\nmaxvit_rmlp_pico_rw_256,256,34.8,28.705,1,7.52,1.85,24.86\nconvnext_nano,224,34.79,28.717,1,15.59,2.46,8.37\nseresnet34,224,34.75,28.749,1,21.96,3.67,3.74\nregnetz_c16,256,34.57,28.903,1,13.46,2.51,16.57\nresnest26d,224,34.46,28.994,1,17.07,3.64,9.97\nvit_betwixt_patch32_clip_224,224,34.42,29.026,1,61.41,3.09,4.17\nconvnext_nano_ols,224,34.3,29.131,1,15.65,2.65,9.38\ngcresnext26ts,288,34.18,29.228,1,10.48,3.07,13.33\nconvnextv2_nano,224,33.96,29.419,1,15.62,2.46,8.37\nvit_wee_patch16_reg1_gap_256,256,33.86,29.51,1,13.42,3.83,13.9\nrepvit_m1_5,224,33.68,29.667,1,14.64,2.31,15.7\nfastvit_sa12,256,33.64,29.7,1,11.58,1.96,14.03\nregnetz_b16_evos,288,33.34,29.968,1,9.74,2.36,16.43\nxcit_tiny_12_p16_384,384,33.27,30.033,1,6.72,3.64,18.26\npoolformerv2_s24,224,33.25,30.052,1,21.34,3.42,10.68\nhgnet_tiny,224,33.19,30.106,1,14.74,4.54,6.36\ntwins_svt_small,224,33.19,30.101,1,24.06,2.94,13.75\nmobilenetv4_hybrid_large_075,256,33.17,30.117,1,22.75,2.06,11.64\nxcit_tiny_24_p16_224,224,33.16,30.134,1,12.12,2.34,11.82\nefficientnet_b3,288,33.12,30.163,1,12.23,1.63,21.49\nresnet26,288,32.71,30.542,1,16.0,3.9,12.15\nnf_regnet_b3,288,32.68,30.576,1,18.59,1.67,11.84\nefficientvit_b2,288,32.57,30.68,1,24.33,2.64,24.03\ngcvit_xxtiny,224,32.5,30.743,1,12.0,2.14,15.36\ncait_xxs24_224,224,32.44,30.799,1,11.96,2.53,20.29\nconvnextv2_pico,288,32.09,31.137,1,9.07,2.27,10.08\nskresnet34,224,31.94,31.278,1,22.28,3.67,5.13\ncoatnet_nano_cc_224,224,31.9,31.328,1,13.76,2.24,15.02\nmobilenetv4_hybrid_medium,384,31.84,31.377,1,11.07,3.01,21.18\nmixer_b32_224,224,31.83,31.392,1,60.29,3.24,6.29\nhrnet_w18_small_v2,224,31.76,31.451,1,15.6,2.62,9.65\nregnetz_c16_evos,256,31.48,31.744,1,13.49,2.48,16.57\nnf_seresnet26,224,31.29,31.929,1,17.4,2.41,7.36\nrexnetr_200,288,31.29,31.935,1,16.52,2.62,24.96\nregnetx_040,224,31.24,31.982,1,22.12,3.99,12.2\ncoatnet_nano_rw_224,224,31.2,32.033,1,15.14,2.41,15.41\ncspresnext50,256,31.19,32.036,1,20.57,4.05,15.86\ndla60_res2next,224,31.19,32.04,1,17.03,3.49,13.17\nnf_resnet26,224,31.16,32.071,1,16.0,2.41,7.35\ncoatnext_nano_rw_224,224,31.15,32.082,1,14.7,2.47,12.8\nresnext50_32x4d,224,30.91,32.332,1,25.03,4.26,14.4\nconvnext_pico,288,30.83,32.41,1,9.05,2.27,10.08\nvit_pwee_patch16_reg1_gap_256,256,30.75,32.497,1,15.25,4.37,15.87\nregnety_040,224,30.56,32.694,1,20.65,4.0,12.29\nmixnet_xl,224,30.5,32.766,1,11.9,0.93,14.57\nresnet50,224,30.48,32.78,1,25.56,4.11,11.11\npoolformer_s24,224,30.46,32.804,1,21.39,3.41,10.68\ndla60,224,30.28,32.993,1,22.04,4.26,10.16\nseresnext26t_32x4d,288,30.14,33.149,1,16.81,4.46,16.68\nresnetaa34d,224,29.99,33.316,1,21.82,4.43,5.07\ndarknet21,256,29.98,33.327,1,20.86,3.93,7.47\ngernet_l,256,29.97,33.343,1,31.08,4.57,8.0\ntf_efficientnetv2_b3,300,29.89,33.434,1,14.36,3.04,15.74\nresnet50t,224,29.81,33.516,1,25.57,4.32,11.82\nresnet26d,288,29.49,33.889,1,16.01,4.29,13.48\nnf_ecaresnet26,224,29.45,33.93,1,16.0,2.41,7.36\ntf_efficientnet_b3,300,29.39,33.995,1,12.23,1.87,23.83\ndeit_small_patch16_224,224,29.36,34.032,1,22.05,4.61,11.95\nresnext50d_32x4d,224,29.31,34.096,1,25.05,4.5,15.2\nresnet50d,224,29.3,34.102,1,25.58,4.35,11.92\nresnet50c,224,28.94,34.531,1,25.58,4.35,11.92\nedgenext_base,256,28.92,34.545,1,18.51,3.85,15.58\nskresnet50,224,28.92,34.553,1,25.8,4.11,12.5\ndeit_small_distilled_patch16_224,224,28.75,34.757,1,22.44,4.63,12.02\ndensenet121,224,28.75,34.76,1,7.98,2.87,6.9\necaresnetlight,224,28.74,34.769,1,30.16,4.11,8.42\nnf_regnet_b3,320,28.72,34.794,1,18.59,2.05,14.61\nseresnext50_32x4d,224,28.7,34.812,1,27.56,4.26,14.42\necaresnet50d_pruned,288,28.61,34.922,1,19.94,4.19,10.61\ntwins_pcpvt_small,224,28.57,34.969,1,24.11,3.83,18.08\nefficientnetv2_rw_t,288,28.56,34.992,1,13.65,3.19,16.42\nlegacy_seresnet50,224,28.54,35.008,1,28.09,3.88,10.6\ncoatnet_rmlp_nano_rw_224,224,28.35,35.248,1,15.15,2.62,20.34\ndeit3_small_patch16_224,224,28.3,35.317,1,22.06,4.61,11.95\nresnet32ts,256,28.3,35.316,1,17.96,4.63,11.58\nseresnext26d_32x4d,288,28.24,35.385,1,16.81,4.51,16.85\nseresnet50,224,28.23,35.388,1,28.09,4.11,11.13\nmobilevitv2_150,256,28.2,35.435,1,10.59,4.09,24.11\nlegacy_seresnext50_32x4d,224,28.16,35.481,1,27.56,4.26,14.42\ncs3darknet_l,256,28.08,35.587,1,21.16,4.86,8.55\nresnet50_gn,224,27.93,35.772,1,25.56,4.14,11.11\necaresnet101d_pruned,224,27.91,35.796,1,24.88,3.48,7.69\ngcresnext50ts,256,27.84,35.895,1,15.67,3.75,15.46\nhgnetv2_b4,288,27.83,35.909,1,19.8,4.54,11.08\nefficientnet_b3,320,27.78,35.977,1,12.23,2.01,26.52\ncaformer_s18,224,27.71,36.057,1,26.34,4.13,19.39\npvt_v2_b1,224,27.63,36.165,1,14.01,2.12,15.39\ncs3darknet_focus_l,256,27.57,36.242,1,21.15,4.66,8.03\ncspresnet50,256,27.54,36.281,1,21.62,4.54,11.5\nresnet26t,320,27.51,36.326,1,16.01,5.24,16.44\ndensenetblur121d,224,27.46,36.385,1,8.0,3.11,7.9\nskresnet50d,224,27.46,36.393,1,25.82,4.36,13.31\nresnetv2_50,224,27.39,36.486,1,25.55,4.11,11.11\ncspresnet50d,256,27.36,36.525,1,21.64,4.86,12.55\necaresnet50t,224,27.34,36.544,1,25.57,4.32,11.83\ndpn68b,288,27.22,36.717,1,12.61,3.89,17.3\nefficientnet_b3_gn,288,27.22,36.708,1,11.73,1.74,23.35\nresnetv2_50d,224,27.21,36.721,1,25.57,4.35,11.92\nefficientformer_l3,224,27.2,36.738,1,31.41,3.93,12.01\ngc_efficientnetv2_rw_t,288,27.05,36.95,1,13.68,3.2,16.45\nres2next50,224,27.0,37.006,1,24.67,4.2,13.71\necaresnet50d,224,26.97,37.048,1,25.58,4.35,11.93\nfastvit_mci0,256,26.96,37.067,1,11.41,2.42,18.29\ngmlp_s16_224,224,26.92,37.128,1,19.42,4.42,15.1\nres2net50_26w_4s,224,26.87,37.194,1,25.7,4.28,12.61\nvit_tiny_patch16_384,384,26.83,37.249,1,5.79,4.7,25.39\nvit_small_patch16_224,224,26.71,37.412,1,22.05,4.61,11.95\nconvnext_tiny,224,26.62,37.534,1,28.59,4.47,13.44\neca_resnet33ts,256,26.56,37.626,1,19.68,4.76,11.66\nregnetv_040,224,26.53,37.665,1,20.64,4.0,12.29\nskresnext50_32x4d,224,26.51,37.688,1,27.48,4.5,17.18\nvit_relpos_small_patch16_rpn_224,224,26.5,37.708,1,21.97,4.59,13.05\nseresnet50t,224,26.45,37.776,1,28.1,4.32,11.83\neva02_tiny_patch14_336,336,26.39,37.866,1,5.76,4.68,27.16\nmobilenetv4_hybrid_medium,448,26.36,37.913,1,11.07,4.2,29.64\nvit_small_r26_s32_224,224,26.29,38.004,1,36.43,3.56,9.85\ngcvit_xtiny,224,26.28,38.018,1,19.98,2.93,20.26\nvit_relpos_small_patch16_224,224,26.23,38.099,1,21.98,4.59,13.05\nrexnetr_300,224,26.17,38.18,1,34.81,3.39,22.16\nregnety_040_sgn,224,26.08,38.315,1,20.65,4.03,12.29\nmobilenetv4_conv_large,256,26.05,38.363,1,32.59,2.86,12.14\nres2net50_48w_2s,224,26.05,38.36,1,25.29,4.18,11.72\nresnet33ts,256,26.05,38.358,1,19.68,4.76,11.66\ninception_next_tiny,224,25.94,38.523,1,28.06,4.19,11.98\nresnetblur50,224,25.93,38.542,1,25.56,5.16,12.02\ndla60_res2net,224,25.89,38.592,1,20.85,4.15,12.34\nsedarknet21,256,25.87,38.621,1,20.95,3.93,7.47\nxcit_tiny_12_p8_224,224,25.82,38.703,1,6.71,4.81,23.6\ngcresnet33ts,256,25.8,38.728,1,19.88,4.76,11.68\nresnetv2_50t,224,25.78,38.759,1,25.57,4.32,11.82\nmobilenetv4_conv_large,320,25.68,38.912,1,32.59,4.47,18.97\nresnetrs50,224,25.6,39.031,1,35.69,4.48,12.14\nresnetaa50,224,25.59,39.051,1,25.56,5.15,11.64\ncs3sedarknet_l,256,25.57,39.083,1,21.91,4.86,8.56\nseresnet33ts,256,25.55,39.113,1,19.78,4.76,11.66\nvit_base_patch32_clip_quickgelu_224,224,25.52,39.162,1,87.85,4.41,5.01\nres2net50d,224,25.41,39.319,1,25.72,4.52,13.41\nresnet50_clip_gap,224,25.33,39.444,1,23.53,5.39,12.44\ngmixer_24_224,224,25.32,39.467,1,24.72,5.28,14.45\nresnet101,160,25.18,39.69,1,44.55,4.0,8.28\nrexnet_300,224,25.16,39.723,1,34.71,3.44,22.4\nresnetaa50d,224,25.13,39.772,1,25.58,5.39,12.44\nresnetblur50d,224,25.03,39.922,1,25.58,5.4,12.82\nefficientformerv2_l,224,24.99,39.986,1,26.32,2.59,18.54\nresnet34,288,24.98,40.004,1,21.8,6.07,6.18\nvit_srelpos_small_patch16_224,224,24.97,40.028,1,21.97,4.59,12.16\nhiera_tiny_224,224,24.89,40.14,1,27.91,4.91,17.13\nres2net50_14w_8s,224,24.78,40.33,1,25.06,4.21,13.28\nregnety_032,288,24.68,40.5,1,19.44,5.29,18.61\nresnetv2_50d_frn,224,24.63,40.576,1,25.59,4.33,11.92\ndensenet169,224,24.61,40.608,1,14.15,3.4,7.3\nresnet33ts,288,24.57,40.672,1,19.68,6.02,14.75\nefficientnet_b3_g8_gn,288,24.55,40.716,1,14.25,2.59,23.35\nconvnextv2_tiny,224,24.54,40.715,1,28.64,4.47,13.44\nsehalonet33ts,256,24.53,40.735,1,13.69,3.55,14.7\nresnetv2_50d_gn,224,24.52,40.755,1,25.57,4.38,11.92\nvit_base_patch32_clip_224,224,24.37,41.007,1,88.22,4.41,5.01\ndavit_tiny,224,24.36,41.026,1,28.36,4.54,18.89\nconvnext_nano,288,24.35,41.038,1,15.59,4.06,13.84\ncoat_lite_small,224,24.34,41.057,1,19.84,3.96,22.09\nconvnextv2_nano,288,24.34,41.052,1,15.62,4.06,13.84\necaresnet26t,320,24.25,41.216,1,16.01,5.24,16.44\npvt_v2_b2_li,224,24.06,41.533,1,22.55,3.91,27.6\nregnetz_c16,320,24.0,41.635,1,13.46,3.92,25.88\nresnest50d_4s2x40d,224,23.98,41.674,1,30.42,4.4,17.94\nresnet34d,288,23.92,41.785,1,21.82,6.47,7.51\nvit_base_patch32_224,224,23.74,42.092,1,88.22,4.41,5.01\ngcresnext50ts,288,23.72,42.132,1,15.67,4.75,19.57\nefficientnet_b3_gn,320,23.7,42.166,1,11.73,2.14,28.83\nseresnet34,288,23.69,42.184,1,21.96,6.07,6.18\nresnet50s,224,23.56,42.42,1,25.68,5.47,13.52\nrepvgg_a2,224,23.52,42.486,1,28.21,5.7,6.26\nseresnetaa50d,224,23.38,42.748,1,28.11,5.4,12.46\nconvformer_s18,224,23.35,42.802,1,26.77,3.96,15.82\nvit_small_resnet26d_224,224,23.28,42.926,1,63.61,5.07,11.12\nsam2_hiera_tiny,224,23.24,42.996,1,26.85,4.91,17.12\nflexivit_small,240,23.15,43.162,1,22.06,5.35,14.18\npoolformerv2_s36,224,23.1,43.266,1,30.79,5.01,15.82\nresnetv2_50d_evos,224,23.02,43.404,1,25.59,4.33,11.92\nxcit_small_12_p16_224,224,23.02,43.419,1,26.25,4.82,12.58\ncait_xxs36_224,224,22.89,43.667,1,17.3,3.77,30.34\nconvnext_tiny_hnf,224,22.84,43.758,1,28.59,4.47,13.44\ntiny_vit_21m_224,224,22.73,43.971,1,33.22,4.29,20.08\nresnet101,176,22.71,44.012,1,44.55,4.92,10.08\ntf_efficientnet_lite4,380,22.62,44.185,1,13.01,4.04,45.66\ncoatnet_bn_0_rw_224,224,22.61,44.203,1,27.44,4.67,22.04\nresnet32ts,288,22.61,44.196,1,17.96,5.86,14.65\nmaxvit_nano_rw_256,256,22.59,44.245,1,15.45,4.46,30.28\nconvnext_nano_ols,288,22.45,44.517,1,15.65,4.38,15.5\nresnest50d,224,22.45,44.512,1,27.48,5.4,14.36\nregnetz_c16_evos,320,22.44,44.548,1,13.49,3.86,25.88\nvisformer_small,224,22.38,44.663,1,40.22,4.88,11.43\nese_vovnet39b,224,22.3,44.813,1,24.57,7.09,6.74\nmaxvit_rmlp_nano_rw_256,256,22.28,44.85,1,15.5,4.47,31.92\ncs3darknet_focus_l,288,22.27,44.874,1,21.15,5.9,10.16\nresmlp_24_224,224,22.22,44.978,1,30.02,5.96,10.91\nregnetx_064,224,22.15,45.129,1,26.21,6.49,16.37\nconvmixer_1024_20_ks9_p14,224,22.09,45.242,1,24.38,5.55,5.51\nselecsls60,224,22.09,45.246,1,30.67,3.59,5.52\ntresnet_m,224,22.09,45.236,1,31.39,5.75,7.31\nnextvit_small,224,22.04,45.35,1,31.76,5.81,18.44\necaresnet50t,256,21.83,45.776,1,25.57,5.64,15.45\nlambda_resnet50ts,256,21.8,45.839,1,21.54,5.07,17.48\nresnet50_clip,224,21.77,45.907,1,38.32,6.14,12.98\nmobilevitv2_175,256,21.73,45.985,1,14.25,5.54,28.13\nseresnet33ts,288,21.65,46.163,1,19.78,6.02,14.76\nvit_little_patch16_reg1_gap_256,256,21.63,46.198,1,22.52,6.27,18.06\ncs3darknet_l,288,21.62,46.225,1,21.16,6.16,10.83\npoolformer_s36,224,21.62,46.237,1,30.86,5.0,15.82\nselecsls42,224,21.54,46.391,1,30.35,2.94,4.62\ngcresnet50t,256,21.51,46.467,1,25.9,5.42,14.67\nvit_little_patch16_reg4_gap_256,256,21.51,46.462,1,22.52,6.35,18.33\ninception_v3,299,21.48,46.52,1,23.83,5.73,8.97\nefficientnetv2_s,288,21.43,46.639,1,21.46,4.75,20.13\ndla102x,224,21.31,46.899,1,26.31,5.89,19.42\nhgnet_tiny,288,21.24,47.063,1,14.74,7.51,10.51\nregnetz_040_h,256,21.23,47.076,1,28.94,4.12,24.29\nselecsls60b,224,21.23,47.084,1,32.77,3.63,5.52\nsebotnet33ts_256,256,21.22,47.09,1,13.7,3.89,17.46\ncoatnet_0_rw_224,224,21.11,47.346,1,27.44,4.43,18.73\ngcresnet33ts,288,21.11,47.353,1,19.88,6.02,14.78\nedgenext_base,320,21.02,47.535,1,18.51,6.01,24.32\nresnetv2_50x1_bit,224,20.97,47.659,1,25.55,4.23,11.11\nresnest50d_1s4x24d,224,20.96,47.676,1,25.68,4.43,13.57\ndensenet121,288,20.94,47.724,1,7.98,4.74,11.41\nmvitv2_tiny,224,20.94,47.716,1,24.17,4.7,21.16\nlevit_512,224,20.93,47.75,1,95.17,5.64,10.22\nvit_base_patch32_clip_256,256,20.88,47.863,1,87.86,5.76,6.65\neca_resnet33ts,288,20.85,47.947,1,19.68,6.02,14.76\nefficientvit_b3,224,20.85,47.936,1,48.65,3.99,26.9\nregnetz_040,256,20.81,48.025,1,27.12,4.06,24.19\nregnetz_d8,256,20.72,48.241,1,23.37,3.97,23.74\nresnetaa34d,288,20.68,48.319,1,21.82,7.33,8.38\nregnety_064,224,20.66,48.367,1,30.58,6.39,16.41\neca_vovnet39b,224,20.63,48.456,1,22.6,7.09,6.74\nfastvit_sa24,256,20.6,48.531,1,21.55,3.8,24.32\nregnety_040,288,20.57,48.586,1,20.65,6.61,20.3\neva02_small_patch14_224,224,20.51,48.731,1,21.62,6.14,18.28\ncoatnet_rmlp_0_rw_224,224,20.41,48.965,1,27.45,4.72,24.89\nregnetz_d8_evos,256,20.41,48.979,1,23.46,4.5,24.92\nresnet50_mlp,256,20.4,48.996,1,26.65,7.05,16.25\ncspresnet50w,256,20.38,49.046,1,28.12,5.04,12.19\ncs3sedarknet_l,288,20.36,49.078,1,21.91,6.16,10.83\nresnet50,288,20.27,49.302,1,25.56,6.8,18.37\ntnt_s_patch16_224,224,20.22,49.434,1,23.76,5.24,24.37\nselecsls42b,224,20.21,49.453,1,32.46,2.98,4.62\ncrossvit_15_240,240,20.18,49.54,1,27.53,5.81,19.77\nefficientnetv2_rw_s,288,20.17,49.545,1,23.94,4.91,21.41\nnf_regnet_b4,320,20.15,49.614,1,30.21,3.29,19.88\nlevit_512d,224,20.1,49.719,1,92.5,5.85,11.3\ndensenetblur121d,288,20.05,49.842,1,8.0,5.14,13.06\ncoatnet_0_224,224,20.03,49.905,1,25.04,4.58,24.01\nmaxxvit_rmlp_nano_rw_256,256,20.03,49.896,1,16.78,4.37,26.05\nefficientnet_b3_g8_gn,320,19.99,49.99,1,14.25,3.2,28.83\nlamhalobotnet50ts_256,256,19.98,50.023,1,22.57,5.02,18.44\nnest_tiny,224,19.98,50.035,1,17.06,5.83,25.48\nrepvit_m2_3,224,19.97,50.049,1,23.69,4.57,26.21\nmobilenetv4_conv_large,384,19.92,50.188,1,32.59,6.43,27.31\nmixnet_xxl,224,19.9,50.236,1,23.96,2.04,23.43\nefficientnet_b4,320,19.87,50.313,1,19.34,3.13,34.76\ncs3sedarknet_xdw,256,19.74,50.637,1,21.6,5.97,17.18\nfocalnet_tiny_srf,224,19.63,50.908,1,28.43,4.42,16.32\nvovnet39a,224,19.45,51.376,1,22.6,7.09,6.73\nnest_tiny_jx,224,19.42,51.473,1,17.06,5.83,25.48\ntf_efficientnetv2_s,300,19.41,51.505,1,21.46,5.35,22.73\ncspdarknet53,256,19.38,51.573,1,27.64,6.57,16.81\ncrossvit_15_dagger_240,240,19.31,51.749,1,28.21,6.13,20.43\ncrossvit_small_240,240,19.31,51.758,1,26.86,5.63,18.17\nese_vovnet39b_evos,224,19.31,51.751,1,24.58,7.07,6.74\nhalonet50ts,256,19.29,51.811,1,22.73,5.3,19.2\nresnet50t,288,19.24,51.951,1,25.57,7.14,19.53\nefficientvit_l1,224,19.21,52.041,1,52.65,5.27,15.85\nresnext50_32x4d,288,19.14,52.223,1,25.03,7.04,23.81\necaresnet101d_pruned,288,19.12,52.28,1,24.88,5.75,12.71\nresnet50d,288,19.02,52.56,1,25.58,7.19,19.7\ndla102,224,19.0,52.604,1,33.27,7.19,14.18\nhrnet_w18,224,18.91,52.842,1,21.3,4.32,16.31\neca_nfnet_l0,224,18.9,52.886,1,24.14,4.35,10.47\ndeit3_medium_patch16_224,224,18.81,53.125,1,38.85,8.0,15.93\nregnetv_064,224,18.81,53.15,1,30.58,6.39,16.41\nresnet51q,256,18.74,53.325,1,35.7,6.38,16.55\nlevit_conv_512,224,18.66,53.577,1,95.17,5.64,10.22\nseresnet50,288,18.63,53.661,1,28.09,6.8,18.39\nnf_ecaresnet50,224,18.58,53.793,1,25.56,4.21,11.13\nresnext50d_32x4d,288,18.58,53.804,1,25.05,7.44,25.13\nregnetx_080,224,18.54,53.896,1,39.57,8.02,14.06\nxcit_tiny_24_p16_384,384,18.53,53.945,1,12.12,6.87,34.29\necaresnetlight,288,18.49,54.067,1,30.16,6.79,13.91\nefficientnet_lite4,380,18.46,54.139,1,13.01,4.04,45.66\nres2net50_26w_6s,224,18.45,54.179,1,37.05,6.33,15.28\nhiera_small_224,224,18.4,54.311,1,35.01,6.42,20.75\nnf_seresnet50,224,18.35,54.466,1,28.09,4.21,11.13\nefficientnet_el_pruned,300,18.29,54.65,1,10.59,8.0,30.7\nmobilenetv4_conv_aa_large,384,18.24,54.793,1,32.59,7.07,32.29\nhgnet_small,224,18.18,54.971,1,24.36,8.53,8.79\nseresnext50_32x4d,288,18.17,55.012,1,27.56,7.04,23.82\nregnetv_040,288,18.16,55.041,1,20.64,6.6,20.3\nnfnet_l0,224,18.13,55.121,1,35.07,4.36,10.47\nresnet152,160,18.13,55.135,1,60.19,5.9,11.51\ntf_efficientnet_el,300,18.09,55.246,1,10.59,8.0,30.7\nmaxvit_tiny_rw_224,224,18.08,55.285,1,29.06,5.11,33.11\nlevit_conv_512d,224,18.04,55.4,1,92.5,5.85,11.3\nregnetz_d32,256,18.03,55.442,1,27.58,5.98,23.74\nefficientnet_el,300,18.01,55.489,1,10.59,8.0,30.7\nbotnet50ts_256,256,17.92,55.786,1,22.74,5.54,22.23\ncoat_tiny,224,17.9,55.83,1,5.5,4.35,27.2\nconvit_small,224,17.82,56.107,1,27.78,5.76,17.87\nhgnetv2_b5,224,17.81,56.119,1,39.57,6.56,11.19\nfocalnet_tiny_lrf,224,17.77,56.251,1,28.65,4.49,17.76\ndensenet201,224,17.74,56.353,1,20.01,4.34,7.85\ngcresnet50t,288,17.53,57.015,1,25.9,6.86,18.57\necaresnet50d,288,17.4,57.445,1,25.58,7.19,19.72\necaresnet50t,288,17.4,57.452,1,25.57,7.14,19.55\nrexnetr_300,288,17.39,57.482,1,34.81,5.59,36.61\nseresnet50t,288,17.39,57.487,1,28.1,7.14,19.55\nmaxxvitv2_nano_rw_256,256,17.38,57.518,1,23.7,6.26,23.05\nrdnet_tiny,224,17.34,57.657,1,23.86,5.06,15.98\nresnet101,224,17.34,57.657,1,44.55,7.83,16.23\nvit_relpos_medium_patch16_rpn_224,224,17.33,57.668,1,38.73,7.97,17.02\nresnet50_gn,288,17.29,57.794,1,25.56,6.85,18.37\nresnetrs101,192,17.26,57.911,1,63.62,6.04,12.7\nmobilevitv2_200,256,17.22,58.054,1,18.45,7.22,32.15\ndpn92,224,17.19,58.161,1,37.67,6.54,18.21\nregnety_040_sgn,288,17.19,58.137,1,20.65,6.67,20.3\nregnety_080,224,17.19,58.131,1,39.18,8.0,17.97\nvit_relpos_medium_patch16_224,224,17.17,58.219,1,38.75,7.97,17.02\nvit_medium_patch16_clip_224,224,17.15,58.278,1,38.59,8.0,15.93\nresnetv2_101,224,17.14,58.329,1,44.54,7.83,16.23\nresnetv2_50,288,17.09,58.478,1,25.55,6.79,18.37\ncs3darknet_focus_x,256,17.05,58.614,1,35.02,8.03,10.69\nxcit_nano_12_p8_384,384,16.99,58.831,1,3.05,6.34,46.08\nfastvit_mci1,256,16.98,58.858,1,21.54,4.72,32.84\nconvnext_tiny,288,16.97,58.884,1,28.59,7.39,22.21\nresnet101c,224,16.95,58.961,1,44.57,8.08,17.04\nvit_relpos_medium_patch16_cls_224,224,16.95,58.979,1,38.76,8.03,18.24\nresnetv2_101d,224,16.89,59.16,1,44.56,8.07,17.04\ndarknetaa53,256,16.8,59.481,1,36.02,7.97,12.39\nhalo2botnet50ts_256,256,16.78,59.557,1,22.64,5.02,21.78\nvolo_d1_224,224,16.72,59.796,1,26.63,6.94,24.43\nresnetaa50,288,16.69,59.883,1,25.56,8.52,19.24\nefficientvit_b3,256,16.66,60.01,1,48.65,5.2,35.01\ntwins_pcpvt_base,224,16.65,60.03,1,43.83,6.68,25.25\nresnet61q,256,16.62,60.14,1,36.85,7.8,17.01\nresnetblur50,288,16.61,60.191,1,25.56,8.52,19.87\nvit_base_resnet26d_224,224,16.58,60.287,1,101.4,6.97,13.16\ngcvit_tiny,224,16.52,60.518,1,28.22,4.79,29.82\nresnet101d,224,16.51,60.551,1,44.57,8.08,17.04\npvt_v2_b2,224,16.5,60.588,1,25.36,4.05,27.53\ncs3darknet_x,256,16.43,60.827,1,35.05,8.38,11.35\nese_vovnet57b,224,16.42,60.873,1,38.61,8.95,7.52\nlegacy_xception,299,16.38,61.034,1,22.86,8.4,35.83\ncaformer_s36,224,16.37,61.082,1,39.3,8.0,37.53\nresnet152,176,16.36,61.094,1,60.19,7.22,13.99\nvit_base_patch32_plus_256,256,16.32,61.244,1,119.48,7.79,7.76\nregnety_080_tv,224,16.31,61.303,1,39.38,8.51,19.73\nnf_regnet_b4,384,16.3,61.318,1,30.21,4.7,28.61\nresnetblur50d,288,16.16,61.857,1,25.58,8.92,21.19\nnf_resnet50,256,16.12,62.022,1,25.56,5.46,14.52\nresnetaa50d,288,16.12,62.011,1,25.58,8.92,20.57\nlegacy_seresnet101,224,16.09,62.122,1,49.33,7.61,15.74\nvit_relpos_base_patch32_plus_rpn_256,256,16.08,62.159,1,119.42,7.68,8.01\nvitamin_small_224,224,16.02,62.397,1,22.03,5.92,26.38\nconvnextv2_tiny,288,16.0,62.486,1,28.64,7.39,22.21\nswin_s3_tiny_224,224,15.94,62.698,1,28.33,4.64,19.13\nresnet101_clip_gap,224,15.8,63.277,1,42.52,9.11,17.56\nresnext101_32x4d,224,15.77,63.375,1,44.18,8.01,21.23\nlegacy_seresnext101_32x4d,224,15.62,63.985,1,48.96,8.02,21.26\necaresnet50t,320,15.61,64.017,1,25.57,8.82,24.13\nseresnext101_32x4d,224,15.61,64.046,1,48.96,8.02,21.26\ncs3sedarknet_x,256,15.58,64.167,1,35.4,8.38,11.35\nseresnetaa50d,288,15.52,64.403,1,28.11,8.92,20.59\nresnet51q,288,15.5,64.477,1,35.7,8.07,20.94\nnextvit_base,224,15.45,64.681,1,44.82,8.29,23.71\nvit_srelpos_medium_patch16_224,224,15.43,64.774,1,38.74,7.96,16.21\nvovnet57a,224,15.4,64.912,1,36.64,8.95,7.52\nresnet101s,224,15.39,64.967,1,44.67,9.19,18.64\nseresnet101,224,15.38,64.992,1,49.33,7.84,16.27\nresnetaa101d,224,15.21,65.73,1,44.57,9.12,17.56\necaresnet101d,224,15.14,66.034,1,44.57,8.08,17.07\nmvitv2_small,224,15.14,66.027,1,34.87,7.0,28.08\nrepvgg_b1g4,224,15.08,66.265,1,39.97,8.15,10.64\nresnetblur101d,224,15.07,66.319,1,44.57,9.12,17.94\nresmlp_36_224,224,15.04,66.459,1,44.69,8.91,16.33\nefficientvit_l2,224,15.01,66.578,1,63.71,6.97,19.58\nhrnet_w18_ssld,224,15.01,66.595,1,21.3,4.32,16.31\nhieradet_small,256,15.0,66.639,1,34.72,8.51,27.76\nregnetz_040_h,320,15.0,66.65,1,28.94,6.43,37.94\nhiera_small_abswin_256,256,14.93,66.937,1,34.36,8.29,26.38\nregnetz_d8,320,14.89,67.145,1,23.37,6.19,37.08\nvit_medium_patch16_gap_240,240,14.88,67.192,1,44.4,9.22,18.81\nmaxvit_tiny_tf_224,224,14.78,67.612,1,30.92,5.6,35.78\ntresnet_v2_l,224,14.74,67.805,1,46.17,8.85,16.34\nvit_base_r26_s32_224,224,14.74,67.808,1,101.38,6.81,12.36\nfastvit_sa36,256,14.73,67.874,1,31.53,5.64,34.61\nres2net101_26w_4s,224,14.73,67.84,1,45.21,8.1,18.45\nese_vovnet39b,288,14.65,68.251,1,24.57,11.71,11.13\ntf_efficientnet_b4,380,14.63,68.312,1,19.34,4.49,49.49\nefficientnet_b4,384,14.42,69.335,1,19.34,4.51,50.04\nres2net101d,224,14.35,69.65,1,45.23,8.35,19.25\ndla102x2,224,14.29,69.96,1,41.28,9.34,29.91\nregnetz_040,320,14.25,70.144,1,27.12,6.35,37.78\ndarknet53,256,14.22,70.293,1,41.61,9.31,12.39\nresnet101_clip,224,14.2,70.412,1,56.26,9.81,18.08\nxcit_tiny_24_p8_224,224,14.19,70.43,1,12.11,9.21,45.39\nregnetz_d8_evos,320,14.17,70.558,1,23.46,7.03,38.92\nresnetv2_50d_gn,288,14.15,70.641,1,25.57,7.24,19.7\nmobilenetv4_hybrid_large,384,14.13,70.77,1,37.76,7.77,34.52\nefficientvit_b3,288,14.11,70.869,1,48.65,6.58,44.2\nconvnextv2_nano,384,14.05,71.163,1,15.62,7.22,24.61\nres2net50_26w_8s,224,14.04,71.2,1,48.4,8.37,17.95\ndarknetaa53,288,14.0,71.413,1,36.02,10.08,15.68\nxception41p,299,14.0,71.38,1,26.91,9.25,39.86\nselecsls84,224,13.98,71.492,1,50.95,5.9,7.57\ncrossvit_18_240,240,13.96,71.63,1,43.27,9.05,26.26\nnf_resnet50,288,13.95,71.667,1,25.56,6.88,18.37\nhrnet_w18_ssld,288,13.94,71.685,1,21.3,7.14,26.96\nvit_base_resnet50d_224,224,13.9,71.926,1,110.97,8.73,16.92\nresnet101d,256,13.83,72.262,1,44.57,10.55,22.25\nconvformer_s36,224,13.8,72.438,1,40.01,7.67,30.5\neca_nfnet_l0,288,13.77,72.613,1,24.14,7.12,17.29\ncrossvit_18_dagger_240,240,13.74,72.779,1,44.27,9.5,27.03\nregnety_064,288,13.72,72.855,1,30.58,10.56,27.11\nconvnext_small,224,13.67,73.118,1,50.22,8.71,21.56\ninception_next_small,224,13.67,73.105,1,49.37,8.36,19.27\nmvitv2_small_cls,224,13.66,73.202,1,34.87,7.04,28.17\npoolformerv2_m36,224,13.66,73.166,1,56.08,8.81,22.02\ndensenet161,224,13.58,73.622,1,28.68,7.79,11.06\nefficientnetv2_s,384,13.56,73.714,1,21.46,8.44,35.77\ntf_efficientnetv2_s,384,13.52,73.932,1,21.46,8.44,35.77\nnfnet_l0,288,13.49,74.084,1,35.07,7.13,17.29\nmobilevitv2_150,384,13.48,74.141,1,10.59,9.2,54.25\ncoat_mini,224,13.45,74.346,1,10.34,6.82,33.68\nconvnext_tiny_hnf,288,13.42,74.494,1,28.59,7.39,22.21\nvit_medium_patch16_gap_256,256,13.41,74.536,1,38.86,10.59,22.15\nswinv2_cr_tiny_224,224,13.37,74.759,1,28.33,4.66,28.45\nresnet61q,288,13.36,74.793,1,36.85,9.87,21.52\nresnetv2_50d_evos,288,13.35,74.854,1,25.59,7.15,19.7\nvit_medium_patch16_reg1_gap_256,256,13.34,74.917,1,38.88,10.63,22.26\nhiera_base_224,224,13.32,75.074,1,51.52,9.4,30.42\nmaxxvit_rmlp_tiny_rw_256,256,13.31,75.078,1,29.64,6.66,39.76\nvit_medium_patch16_rope_reg1_gap_256,256,13.29,75.192,1,38.74,10.63,22.26\nxception41,299,13.25,75.445,1,26.97,9.28,39.86\nefficientnetv2_rw_s,384,13.21,75.671,1,23.94,8.72,38.03\nswinv2_cr_tiny_ns_224,224,13.19,75.808,1,28.33,4.66,28.45\ncs3darknet_x,288,13.16,75.972,1,35.05,10.6,14.36\nvit_medium_patch16_reg4_gap_256,256,13.11,76.222,1,38.88,10.76,22.6\nresnetv2_101x1_bit,224,13.02,76.755,1,44.54,8.04,16.23\ntwins_svt_base,224,12.99,76.97,1,56.07,8.59,26.33\ncoatnet_rmlp_1_rw_224,224,12.93,77.316,1,41.69,7.85,35.47\nconvnextv2_small,224,12.84,77.862,1,50.32,8.71,21.56\nmobilenetv4_conv_large,448,12.81,78.014,1,32.59,8.75,37.17\ncs3sedarknet_x,288,12.68,78.865,1,35.4,10.6,14.37\nregnetx_120,224,12.63,79.151,1,46.11,12.13,21.37\nxcit_small_24_p16_224,224,12.59,79.418,1,47.67,9.1,23.64\ndavit_small,224,12.56,79.582,1,49.75,8.8,30.49\npoolformer_m36,224,12.5,79.957,1,56.17,8.8,22.02\ncoatnet_1_rw_224,224,12.42,80.466,1,41.72,8.04,34.6\nswin_tiny_patch4_window7_224,224,12.42,80.508,1,28.29,4.51,17.06\ncoatnet_rmlp_1_rw2_224,224,12.39,80.67,1,41.72,8.11,40.13\nhrnet_w30,224,12.38,80.735,1,37.71,8.15,21.21\nhrnet_w32,224,12.34,81.003,1,41.23,8.97,22.02\ntresnet_l,224,12.28,81.411,1,55.99,10.9,11.9\nwide_resnet50_2,176,12.22,81.798,1,68.88,7.29,8.97\nresnet152,224,12.12,82.493,1,60.19,11.56,22.56\nresnext101_32x8d,176,12.11,82.522,1,88.79,10.33,19.37\nregnetv_064,288,12.09,82.675,1,30.58,10.55,27.11\nresnet152d,224,12.09,82.714,1,60.21,11.8,23.36\nresnet152c,224,12.08,82.742,1,60.21,11.8,23.36\nregnety_120,224,12.07,82.847,1,51.82,12.14,21.38\npvt_v2_b3,224,12.06,82.917,1,45.24,6.92,37.7\nvit_small_r26_s32_384,384,12.05,82.953,1,36.47,10.43,29.85\ndarknet53,288,12.01,83.223,1,41.61,11.78,15.68\ndla169,224,12.0,83.33,1,53.39,11.6,20.2\nnextvit_large,224,11.87,84.205,1,57.87,10.78,28.99\ncait_s24_224,224,11.85,84.345,1,46.92,9.35,40.58\nefficientvit_l2,256,11.85,84.387,1,63.71,9.09,25.49\nhgnet_small,288,11.83,84.498,1,24.36,14.09,14.53\ncs3edgenet_x,256,11.82,84.601,1,47.82,11.53,12.92\nfastvit_mci2,256,11.77,84.931,1,35.82,7.91,43.34\ntwins_pcpvt_large,224,11.75,85.04,1,60.99,9.84,35.82\nmobilenetv4_conv_aa_large,448,11.69,85.545,1,32.59,9.63,43.94\nvit_small_resnet50d_s16_224,224,11.61,86.102,1,57.53,13.48,24.82\nfastvit_ma36,256,11.6,86.188,1,44.07,7.88,41.09\nefficientformer_l7,224,11.59,86.272,1,82.23,10.17,24.45\nregnetz_d32,320,11.54,86.648,1,27.58,9.33,37.08\nrdnet_small,224,11.51,86.884,1,50.44,8.74,22.55\nlegacy_seresnet152,224,11.4,87.709,1,66.82,11.33,22.08\nhgnetv2_b5,288,11.33,88.251,1,39.57,10.84,18.5\ncoatnet_1_224,224,11.32,88.289,1,42.23,8.7,39.0\nresnetv2_152d,224,11.31,88.39,1,60.2,11.8,23.36\nregnety_080,288,11.26,88.798,1,39.18,13.22,29.69\nnest_small_jx,224,11.24,88.941,1,38.35,10.35,40.04\nmixer_b16_224,224,11.22,89.103,1,59.88,12.62,14.53\nlevit_384_s8,224,11.2,89.297,1,39.12,9.98,35.86\nresnetv2_152,224,11.2,89.225,1,60.19,11.55,22.56\nnf_resnet101,224,11.16,89.569,1,44.55,8.01,16.23\ninception_v4,299,11.15,89.619,1,42.68,12.28,15.09\nresnet152s,224,11.13,89.808,1,60.32,12.92,24.96\nnest_small,224,11.12,89.891,1,38.35,10.35,40.04\nresnet101,288,11.12,89.884,1,44.55,12.95,26.83\ncs3se_edgenet_x,256,11.11,89.994,1,50.72,11.53,12.94\ndpn98,224,11.06,90.407,1,61.57,11.73,25.2\ncoat_lite_medium,224,11.05,90.497,1,44.57,9.81,40.06\ndeit3_small_patch16_384,384,11.04,90.541,1,22.21,15.52,50.78\nnf_ecaresnet101,224,10.88,91.856,1,44.55,8.01,16.27\nnf_seresnet101,224,10.88,91.896,1,49.33,8.02,16.27\nseresnet152,224,10.79,92.672,1,66.82,11.57,22.61\nconvnext_tiny,384,10.74,93.109,1,28.59,13.14,39.48\nrepvgg_b1,224,10.73,93.194,1,57.42,13.16,10.64\ndm_nfnet_f0,192,10.69,93.498,1,71.49,7.21,10.16\nsequencer2d_s,224,10.66,93.751,1,27.65,4.96,11.31\nnfnet_f0,192,10.56,94.636,1,71.49,7.21,10.16\npoolformerv2_m48,224,10.56,94.635,1,73.35,11.59,29.17\nlevit_conv_384_s8,224,10.52,95.033,1,39.12,9.98,35.86\nresnetv2_101,288,10.51,95.101,1,44.54,12.94,26.83\nregnetz_e8,256,10.45,95.666,1,57.7,9.91,40.94\nnf_regnet_b5,384,10.42,95.969,1,49.74,7.95,42.9\nresnest101e,256,10.42,95.966,1,48.28,13.38,28.66\npit_b_distilled_224,224,10.41,95.993,1,74.79,12.5,33.07\nmobilevitv2_175,384,10.35,96.588,1,14.25,12.47,63.29\nxcit_small_12_p16_384,384,10.35,96.636,1,26.25,14.14,36.51\nfocalnet_small_srf,224,10.34,96.692,1,49.89,8.62,26.26\ninception_resnet_v2,299,10.33,96.814,1,55.84,13.18,25.06\npit_b_224,224,10.33,96.761,1,73.76,12.42,32.94\nswinv2_tiny_window8_256,256,10.3,97.01,1,28.35,5.96,24.57\nresnext101_32x4d,288,10.25,97.558,1,44.18,13.24,35.09\nmaxvit_rmlp_small_rw_224,224,10.24,97.618,1,64.9,10.75,49.3\nseresnext101_32x4d,288,10.23,97.701,1,48.96,13.25,35.12\ngcvit_small,224,10.19,98.115,1,51.09,8.57,41.61\nvit_base_patch32_384,384,10.18,98.198,1,88.3,13.06,16.5\nmvitv2_base,224,10.16,98.419,1,51.47,10.16,40.5\nmobilenetv4_conv_aa_large,480,10.13,98.667,1,32.59,11.05,50.45\nhiera_base_plus_224,224,10.1,99.002,1,69.9,12.67,37.98\nhiera_base_abswin_256,256,10.09,99.038,1,51.27,12.46,40.7\nseresnet101,288,10.09,99.045,1,49.33,12.95,26.87\ntnt_b_patch16_224,224,10.09,99.034,1,65.41,14.09,39.01\ncs3edgenet_x,288,10.08,99.208,1,47.82,14.59,16.36\nvit_base_patch32_clip_384,384,10.07,99.249,1,88.3,13.06,16.5\nfocalnet_small_lrf,224,9.99,100.025,1,50.34,8.74,28.61\nvit_small_patch16_384,384,9.98,100.186,1,22.2,15.52,50.78\nmixer_l32_224,224,9.95,100.463,1,206.94,11.27,19.86\nconvformer_s18,384,9.85,101.499,1,26.77,11.63,46.49\nese_vovnet99b,224,9.85,101.454,1,63.2,16.51,11.27\nresnetrs101,288,9.84,101.582,1,63.62,13.56,28.53\nconvnextv2_tiny,384,9.83,101.749,1,28.64,13.14,39.48\necaresnet101d,288,9.83,101.717,1,44.57,13.35,28.19\nresnet152d,256,9.83,101.661,1,60.21,15.41,30.51\nvit_betwixt_patch16_reg1_gap_256,256,9.83,101.705,1,60.4,16.32,27.83\nresnet101d,320,9.82,101.832,1,44.57,16.48,34.77\nefficientvit_l2,288,9.81,101.948,1,63.71,11.51,32.19\nefficientnetv2_m,320,9.79,102.161,1,54.14,11.01,39.97\npoolformer_m48,224,9.78,102.197,1,73.47,11.59,29.17\nregnetx_160,224,9.78,102.176,1,54.28,15.99,25.52\nhrnet_w40,224,9.75,102.563,1,57.56,12.75,25.29\nvit_betwixt_patch16_reg4_gap_256,256,9.75,102.537,1,60.4,16.52,28.24\nresnetblur101d,288,9.74,102.665,1,44.57,15.07,29.65\nvit_betwixt_patch16_rope_reg4_gap_256,256,9.68,103.258,1,60.23,16.52,28.24\nresnetaa101d,288,9.65,103.608,1,44.57,15.07,29.03\nxception65p,299,9.65,103.549,1,39.82,13.91,52.48\nresnet200,224,9.56,104.56,1,64.67,15.07,32.19\ndeit_base_patch16_224,224,9.46,105.633,1,86.57,17.58,23.9\ncaformer_s18,384,9.44,105.951,1,26.34,13.42,77.34\nbeit_base_patch16_224,224,9.41,106.266,1,86.53,17.58,23.9\neca_nfnet_l1,256,9.35,106.973,1,41.41,9.62,22.04\nwide_resnet50_2,224,9.35,106.889,1,68.88,11.43,14.4\ndeit3_base_patch16_224,224,9.3,107.552,1,86.59,17.58,23.9\nrepvgg_b2g4,224,9.25,108.051,1,61.76,12.63,12.9\nbeitv2_base_patch16_224,224,9.24,108.256,1,86.53,17.58,23.9\ndeit_base_distilled_patch16_224,224,9.21,108.526,1,87.34,17.68,24.05\nxception65,299,9.21,108.536,1,39.92,13.96,52.48\neva02_small_patch14_336,336,9.11,109.777,1,22.13,15.48,54.33\nxcit_tiny_12_p8_384,384,9.1,109.816,1,6.71,14.13,69.14\nconvnext_base,224,9.09,110.021,1,88.59,15.38,28.75\nvit_small_patch16_36x1_224,224,9.09,110.009,1,64.67,13.71,35.69\nresnext101_64x4d,224,9.01,110.983,1,83.46,15.52,31.21\nvit_small_patch16_18x2_224,224,9.0,111.03,1,64.67,13.71,35.69\ncoat_small,224,8.98,111.393,1,21.69,12.61,44.25\nseresnext101_64x4d,224,8.97,111.422,1,88.23,15.53,31.25\nconvformer_m36,224,8.95,111.669,1,57.05,12.89,42.05\nmobilenetv4_hybrid_large,448,8.95,111.682,1,37.76,10.74,48.61\nvolo_d2_224,224,8.92,112.137,1,58.68,14.34,41.34\nmvitv2_base_cls,224,8.91,112.248,1,65.44,10.23,40.65\ntresnet_xl,224,8.91,112.222,1,78.44,15.2,15.34\nseresnet152d,256,8.88,112.573,1,66.84,15.42,30.56\npvt_v2_b5,224,8.87,112.77,1,81.96,11.76,50.92\ngmlp_b16_224,224,8.85,113.004,1,73.08,15.78,30.21\nefficientnetv2_rw_m,320,8.83,113.284,1,53.24,12.72,47.14\nmaxvit_small_tf_224,224,8.75,114.28,1,68.93,11.66,53.17\nconvnext_small,288,8.71,114.788,1,50.22,14.39,35.65\nmobilenetv4_conv_aa_large,544,8.63,115.877,1,32.59,14.19,64.79\nnextvit_small,384,8.63,115.898,1,31.76,17.26,57.14\ncait_xxs24_384,384,8.62,115.993,1,12.03,9.63,122.66\nseresnext101_32x8d,224,8.62,116.043,1,93.57,16.48,31.25\neva02_base_patch16_clip_224,224,8.6,116.214,1,86.26,17.62,26.32\nvit_base_patch16_rpn_224,224,8.56,116.754,1,86.54,17.49,23.75\nresnetrs152,256,8.55,116.946,1,86.62,15.59,30.83\nvit_base_patch16_xp_224,224,8.52,117.358,1,86.51,17.56,23.9\nseresnext101d_32x8d,224,8.51,117.483,1,93.59,16.72,32.05\nresnext101_32x8d,224,8.5,117.633,1,88.79,16.48,31.21\nefficientnet_b5,416,8.49,117.752,1,30.39,8.27,80.68\nvit_base_mci_224,224,8.47,118.081,1,86.35,17.73,24.65\ncaformer_m36,224,8.45,118.319,1,56.2,13.29,50.48\nseresnextaa101d_32x8d,224,8.35,119.799,1,93.59,17.25,34.16\nvit_relpos_base_patch16_224,224,8.34,119.824,1,86.43,17.51,24.97\nvit_base_patch16_224,224,8.33,120.013,1,86.57,17.58,23.9\nvit_base_patch16_siglip_224,224,8.33,120.05,1,92.88,17.73,24.06\nvit_base_patch16_siglip_gap_224,224,8.33,120.095,1,85.8,17.49,23.75\nvit_base_patch16_clip_quickgelu_224,224,8.32,120.215,1,86.19,17.58,23.9\nvit_base_patch16_224_miil,224,8.31,120.296,1,94.4,17.59,23.91\nvit_base_patch16_gap_224,224,8.29,120.54,1,86.57,17.49,25.59\nregnety_160,224,8.28,120.713,1,83.59,15.96,23.04\nvit_base_patch16_clip_224,224,8.28,120.79,1,86.57,17.58,23.9\nvit_relpos_base_patch16_rpn_224,224,8.28,120.749,1,86.41,17.51,24.97\nvit_mediumd_patch16_reg4_gap_256,256,8.27,120.819,1,64.11,17.87,37.57\nhalonet_h1,256,8.25,121.167,1,8.1,3.0,51.17\nvit_mediumd_patch16_rope_reg1_gap_256,256,8.24,121.316,1,63.95,17.65,37.02\nmaxvit_rmlp_small_rw_256,256,8.16,122.576,1,64.9,14.15,66.09\nvit_relpos_base_patch16_cls_224,224,8.14,122.798,1,86.43,17.6,25.12\nvit_base_patch32_clip_448,448,8.12,123.199,1,88.34,17.93,23.9\npvt_v2_b4,224,8.09,123.551,1,62.56,10.14,53.74\nvit_relpos_base_patch16_clsgap_224,224,8.08,123.687,1,86.43,17.6,25.12\ncs3se_edgenet_x,320,8.05,124.251,1,50.72,18.01,20.21\ndavit_base,224,8.04,124.352,1,87.95,15.51,40.66\nsequencer2d_m,224,8.04,124.384,1,38.31,6.55,14.26\nswinv2_tiny_window16_256,256,8.03,124.542,1,28.35,6.68,39.02\nhrnet_w44,224,8.02,124.58,1,67.06,14.94,26.92\nresnet152,288,8.01,124.879,1,60.19,19.11,37.28\ntwins_svt_large,224,7.98,125.271,1,99.27,15.15,35.1\ndm_nfnet_f0,256,7.95,125.785,1,71.49,12.62,18.05\nxcit_small_12_p8_224,224,7.94,125.872,1,26.21,18.69,47.21\nconvmixer_768_32,224,7.92,126.272,1,21.11,19.55,25.95\nhrnet_w48_ssld,224,7.9,126.53,1,77.47,17.34,28.56\nnfnet_f0,256,7.86,127.261,1,71.49,12.62,18.05\nmobilevitv2_200,384,7.84,127.58,1,18.45,16.24,72.34\nregnety_120,288,7.81,128.044,1,51.82,20.06,35.34\nhgnetv2_b6,224,7.79,128.275,1,75.26,16.88,21.23\ndpn131,224,7.77,128.613,1,79.25,16.09,32.97\nnf_regnet_b5,456,7.75,129.064,1,49.74,11.7,61.95\nhrnet_w48,224,7.7,129.858,1,77.47,17.34,28.56\nxception71,299,7.68,130.193,1,42.34,18.09,69.92\nresnet200d,256,7.67,130.432,1,64.69,20.0,43.09\nflexivit_base,240,7.58,131.82,1,86.59,20.29,28.36\nefficientnet_b5,448,7.54,132.62,1,30.39,9.59,93.56\nxcit_medium_24_p16_224,224,7.5,133.298,1,84.4,16.13,31.71\nresnet50x4_clip_gap,288,7.49,133.398,1,65.62,19.57,34.11\ndensenet264d,224,7.46,134.027,1,72.74,13.57,14.0\nconvnextv2_base,224,7.45,134.283,1,88.72,15.38,28.75\nnest_base,224,7.42,134.753,1,67.72,17.96,53.39\nmaxxvit_rmlp_small_rw_256,256,7.37,135.578,1,66.01,14.67,58.38\nconvnext_base,256,7.35,135.957,1,88.59,20.09,37.55\ngcvit_base,224,7.34,136.128,1,90.32,14.87,55.48\nnest_base_jx,224,7.31,136.815,1,67.72,17.96,53.39\ntf_efficientnetv2_m,384,7.3,136.891,1,54.14,15.85,57.52\nregnetz_e8,320,7.24,138.164,1,57.7,15.46,63.94\nrdnet_base,224,7.19,139.147,1,87.45,15.4,31.14\neva02_base_patch14_224,224,7.12,140.454,1,85.76,23.22,36.55\nswin_s3_small_224,224,7.11,140.533,1,49.74,9.43,37.84\nvit_large_patch32_224,224,7.1,140.777,1,305.51,15.39,13.3\neca_nfnet_l1,320,7.09,141.072,1,41.41,14.92,34.42\nseresnet152,288,7.09,140.934,1,66.82,19.11,37.34\nsamvit_base_patch16_224,224,7.04,141.973,1,86.46,17.54,24.54\nconvit_base,224,7.0,142.918,1,86.54,17.52,31.77\ninception_next_base,224,6.95,143.896,1,86.67,14.85,25.69\ntf_efficientnet_b5,456,6.95,143.755,1,30.39,10.46,98.86\ntiny_vit_21m_384,384,6.93,144.336,1,21.23,13.77,77.83\ncoatnet_2_rw_224,224,6.91,144.724,1,73.87,15.09,49.22\nresnetv2_50x1_bit,448,6.91,144.669,1,25.55,16.62,44.46\nseresnet200d,256,6.88,145.349,1,71.86,20.01,43.15\ncrossvit_15_dagger_408,408,6.87,145.643,1,28.5,21.45,95.05\nresnet152d,320,6.83,146.477,1,60.21,24.08,47.67\nswin_s3_base_224,224,6.79,147.184,1,71.13,13.69,48.26\ntresnet_m,448,6.77,147.771,1,31.39,22.99,29.21\ndpn107,224,6.76,147.81,1,86.92,18.38,33.46\nresnet50x4_clip,288,6.73,148.587,1,87.14,21.35,35.27\nresnetrs200,256,6.73,148.585,1,93.21,20.18,43.42\nswinv2_base_window12_192,192,6.73,148.54,1,109.28,11.9,39.72\nvit_base_r50_s16_224,224,6.73,148.472,1,97.89,21.66,35.28\necaresnet200d,256,6.71,148.953,1,64.69,20.0,43.15\nwide_resnet101_2,176,6.66,150.227,1,126.89,14.31,13.18\ncoatnet_rmlp_2_rw_224,224,6.64,150.497,1,73.88,15.18,54.78\nrepvgg_b3g4,224,6.64,150.669,1,83.83,17.89,15.1\nswinv2_cr_small_ns_224,224,6.64,150.577,1,49.7,9.08,50.27\nhgnet_base,224,6.62,150.974,1,71.58,25.14,15.47\nvit_base_patch16_siglip_256,256,6.61,151.197,1,92.93,23.44,33.63\nwide_resnet50_2,288,6.61,151.365,1,68.88,18.89,23.81\nvgg11,224,6.59,151.695,1,132.86,7.61,7.44\nvit_base_patch16_rope_reg1_gap_256,256,6.59,151.642,1,86.43,23.22,33.39\nvgg11_bn,224,6.56,152.446,1,132.87,7.62,7.44\nswinv2_cr_small_224,224,6.53,153.224,1,49.7,9.07,50.27\nvit_base_patch16_siglip_gap_256,256,6.52,153.278,1,85.84,23.13,33.23\nvit_base_patch16_reg4_gap_256,256,6.5,153.776,1,86.62,23.5,33.89\nvit_small_patch8_224,224,6.49,153.965,1,21.67,22.44,80.84\nrepvgg_b2,224,6.48,154.38,1,89.02,20.45,12.9\nfocalnet_base_srf,224,6.46,154.81,1,88.15,15.28,35.01\nefficientvit_l2,384,6.45,155.065,1,63.71,20.45,57.01\nresnet200,288,6.45,155.123,1,64.67,24.91,53.21\ncrossvit_base_240,240,6.4,156.314,1,105.03,21.22,36.33\nvolo_d3_224,224,6.31,158.554,1,86.33,20.78,60.09\nlevit_512_s8,224,6.23,160.379,1,74.05,21.82,52.28\nseresnet152d,320,6.23,160.608,1,66.84,24.09,47.72\ncoatnet_2_224,224,6.2,161.346,1,74.68,16.5,52.67\nconvnext_base,288,6.15,162.453,1,88.59,25.43,47.53\nefficientnetv2_m,416,6.15,162.448,1,54.14,18.6,67.5\nvolo_d1_384,384,6.12,163.334,1,26.78,22.75,108.55\nresnetrs152,320,6.1,163.792,1,86.62,24.34,48.14\nswin_small_patch4_window7_224,224,6.07,164.677,1,49.61,8.77,27.47\nnextvit_base,384,6.06,164.953,1,44.82,24.64,73.95\nfocalnet_base_lrf,224,5.99,167.024,1,88.75,15.43,38.13\ncaformer_b36,224,5.93,168.58,1,98.75,23.22,67.3\nresnext101_64x4d,288,5.86,170.685,1,83.46,25.66,51.59\ncait_xxs36_384,384,5.85,171.01,1,17.37,14.35,183.7\nvit_medium_patch16_gap_384,384,5.79,172.541,1,39.03,26.08,67.54\nlevit_conv_512_s8,224,5.76,173.7,1,74.05,21.82,52.28\nvgg13_bn,224,5.76,173.557,1,133.05,11.33,12.25\nvitamin_base_224,224,5.76,173.46,1,87.72,22.68,52.77\nseresnext101_32x8d,288,5.75,173.901,1,93.57,27.24,51.63\nvgg13,224,5.75,173.979,1,133.05,11.31,12.25\nconvnext_small,384,5.72,174.665,1,50.22,25.58,63.37\nlegacy_senet154,224,5.72,174.756,1,115.09,20.77,38.69\nconvformer_s36,384,5.69,175.734,1,40.01,22.54,89.62\nefficientnetv2_rw_m,416,5.66,176.792,1,53.24,21.49,79.62\nvit_base_patch16_plus_240,240,5.64,177.413,1,117.56,27.41,33.08\nxcit_small_24_p16_384,384,5.63,177.511,1,47.67,26.72,68.58\nseresnext101d_32x8d,288,5.6,178.577,1,93.59,27.64,52.95\nseresnet200d,288,5.59,178.979,1,71.86,25.32,54.6\nvit_large_r50_s32_224,224,5.59,178.873,1,328.99,19.58,24.41\nsenet154,224,5.58,179.08,1,115.09,20.77,38.69\nseresnextaa101d_32x8d,288,5.58,179.137,1,93.59,28.51,56.44\nvit_relpos_base_patch16_plus_240,240,5.55,180.097,1,117.38,27.3,34.33\necaresnet200d,288,5.53,180.897,1,64.69,25.31,54.59\nregnety_160,288,5.52,180.975,1,83.59,26.37,38.07\nsequencer2d_l,224,5.49,182.132,1,54.3,9.74,22.12\ncaformer_s36,384,5.43,184.228,1,39.3,26.08,150.33\nhrnet_w48_ssld,288,5.38,186.017,1,77.47,28.66,47.21\nhgnetv2_b6,288,5.37,186.229,1,75.26,27.9,35.09\nresnet200d,320,5.33,187.619,1,64.69,31.25,67.33\nwide_resnet101_2,224,5.22,191.557,1,126.89,22.8,21.23\nxcit_tiny_24_p8_384,384,5.16,193.796,1,12.11,27.05,132.95\neca_nfnet_l2,320,5.15,194.064,1,56.72,20.95,47.43\nmaxvit_tiny_tf_384,384,5.1,196.11,1,30.98,17.53,123.42\nswinv2_small_window16_256,256,5.1,196.241,1,49.73,12.82,66.29\nvgg16,224,5.08,196.881,1,138.36,15.47,13.56\nvgg16_bn,224,5.05,198.104,1,138.37,15.5,13.56\nswinv2_small_window8_256,256,5.04,198.246,1,49.73,11.58,40.14\nnfnet_f1,224,5.02,199.032,1,132.63,17.87,22.94\nseresnet269d,256,5.02,199.306,1,113.67,26.59,53.6\nregnetx_320,224,5.0,199.835,1,107.81,31.81,36.3\nresnetrs270,256,4.99,200.478,1,129.86,27.06,55.84\nconvnextv2_base,288,4.98,200.977,1,88.72,25.43,47.53\nswinv2_cr_small_ns_256,256,4.98,200.886,1,49.7,12.07,76.21\ndm_nfnet_f1,224,4.96,201.546,1,132.63,17.87,22.94\nconvformer_b36,224,4.95,201.817,1,99.88,22.69,56.06\nconvnext_base,320,4.95,201.819,1,88.59,31.39,58.68\ntf_efficientnetv2_m,480,4.89,204.622,1,54.14,24.76,89.84\nhrnet_w64,224,4.87,205.177,1,128.06,28.97,35.09\ncait_xs24_384,384,4.86,205.554,1,26.67,19.28,183.98\nnextvit_large,384,4.84,206.508,1,57.87,32.03,90.76\nresnetrs200,320,4.81,207.813,1,93.21,31.51,67.81\nmaxvit_rmlp_base_rw_224,224,4.76,210.09,1,116.14,23.15,92.64\ncrossvit_18_dagger_408,408,4.66,214.577,1,44.61,32.47,124.87\nnasnetalarge,331,4.66,214.708,1,88.75,23.89,90.56\nswinv2_cr_base_224,224,4.64,215.595,1,87.88,15.86,59.66\nswinv2_cr_base_ns_224,224,4.62,216.593,1,87.88,15.86,59.66\nmaxxvitv2_rmlp_base_rw_224,224,4.57,218.631,1,116.09,24.2,62.77\nvgg19,224,4.57,218.781,1,143.67,19.63,14.86\nvit_so150m_patch16_reg4_gap_256,256,4.54,220.434,1,134.13,36.75,53.21\nmaxvit_base_tf_224,224,4.51,221.617,1,119.47,24.04,95.01\nvgg19_bn,224,4.51,221.568,1,143.68,19.66,14.86\nseresnextaa101d_32x8d,320,4.48,222.957,1,93.59,35.19,69.67\nresnest200e,320,4.45,224.812,1,70.2,35.69,82.78\nvit_so150m_patch16_reg4_map_256,256,4.44,225.038,1,141.48,37.18,53.68\nconvnext_large,224,4.34,230.221,1,197.77,34.4,43.13\npnasnet5large,331,4.33,231.035,1,86.06,25.04,92.89\nrepvgg_b3,224,4.33,230.866,1,123.09,29.16,15.1\nxcit_small_24_p8_224,224,4.32,231.329,1,47.63,35.81,90.78\nresnetv2_101x1_bit,448,4.3,232.409,1,44.54,31.65,64.93\ncoat_lite_medium_384,384,4.23,236.111,1,44.57,28.73,116.7\nconvnextv2_large,224,4.19,238.786,1,197.96,34.4,43.13\nvit_betwixt_patch16_reg4_gap_384,384,4.19,238.533,1,60.6,39.71,85.28\nseresnet269d,288,4.18,239.28,1,113.67,33.65,67.81\nhgnet_base,288,4.16,240.305,1,71.58,41.55,25.57\nswin_base_patch4_window7_224,224,4.15,240.831,1,87.77,15.47,36.63\ndavit_large,224,4.13,241.977,1,196.81,34.6,60.99\nefficientvit_l3,224,3.98,251.253,1,246.04,27.62,39.16\nregnety_320,224,3.93,254.497,1,145.05,32.34,30.26\nxcit_large_24_p16_224,224,3.92,254.84,1,189.1,35.86,47.27\ntf_efficientnet_b6,528,3.91,255.971,1,43.04,19.4,167.39\nefficientnet_b6,528,3.87,258.445,1,43.04,19.4,167.39\neca_nfnet_l2,384,3.83,260.863,1,56.72,30.05,68.28\nswinv2_base_window12to16_192to256,256,3.83,260.995,1,87.92,22.02,84.71\ntresnet_l,448,3.79,263.846,1,55.99,43.59,47.56\nswinv2_large_window12_192,192,3.75,266.366,1,228.77,26.17,56.53\nswinv2_base_window16_256,256,3.74,267.172,1,87.92,22.02,84.71\ntf_efficientnetv2_l,384,3.57,279.821,1,118.52,36.1,101.16\nconvnext_base,384,3.56,280.744,1,88.59,45.21,84.49\necaresnet269d,320,3.55,281.653,1,102.09,41.53,83.69\nswinv2_cr_tiny_384,384,3.54,282.629,1,28.33,15.34,161.01\ninception_next_base,384,3.52,284.303,1,86.67,43.64,75.48\nvit_mediumd_patch16_reg4_gap_384,384,3.51,284.717,1,64.27,43.67,113.51\nswinv2_base_window8_256,256,3.5,285.642,1,87.92,20.37,52.59\nefficientnetv2_l,384,3.49,286.334,1,118.52,36.1,101.16\nhiera_large_224,224,3.47,288.334,1,213.74,40.34,83.37\nconvformer_m36,384,3.46,289.231,1,57.05,37.87,123.56\neca_nfnet_l3,352,3.46,289.136,1,72.04,32.57,73.12\ndeit3_base_patch16_384,384,3.43,291.108,1,86.88,55.54,101.56\ndeit_base_patch16_384,384,3.41,293.52,1,86.86,55.54,101.56\ndeit_base_distilled_patch16_384,384,3.38,295.425,1,87.63,55.65,101.82\ncait_s24_384,384,3.35,298.48,1,47.06,32.17,245.31\nmixer_l16_224,224,3.34,299.523,1,208.2,44.6,41.69\ncoatnet_rmlp_3_rw_224,224,3.31,302.243,1,165.15,33.56,79.47\nconvnext_large_mlp,256,3.29,304.086,1,200.13,44.94,56.33\nregnety_160,384,3.29,303.803,1,83.59,46.87,67.67\ncoatnet_3_rw_224,224,3.27,305.713,1,181.81,33.44,73.83\nresnetrs350,288,3.27,306.236,1,163.96,43.67,87.09\ndm_nfnet_f1,320,3.26,307.082,1,132.63,35.97,46.77\nefficientvit_l3,256,3.26,306.891,1,246.04,36.06,50.98\nconvmixer_1536_20,224,3.25,307.72,1,51.63,48.68,33.03\nvolo_d4_224,224,3.25,307.319,1,192.96,44.34,80.22\nrdnet_large,224,3.22,310.919,1,186.27,34.74,46.67\nvit_small_patch14_dinov2,518,3.21,311.039,1,22.06,46.76,198.79\nnfnet_f1,320,3.19,313.353,1,132.63,35.97,46.77\nbeit_base_patch16_384,384,3.18,314.371,1,86.74,55.54,101.56\nvit_small_patch14_reg4_dinov2,518,3.15,316.948,1,22.06,46.95,199.77\ncoatnet_3_224,224,3.12,320.7,1,166.97,36.56,79.01\nxcit_medium_24_p16_384,384,3.09,324.036,1,84.4,47.39,91.64\ncaformer_m36,384,3.08,324.221,1,56.2,42.11,196.35\nresnetrs270,352,3.03,329.649,1,129.86,51.13,105.48\nmaxvit_small_tf_384,384,3.0,332.88,1,69.02,35.87,183.65\nvolo_d2_384,384,3.0,333.546,1,58.87,46.17,184.51\nresnext101_32x16d,224,2.99,334.318,1,194.03,36.27,51.18\ndm_nfnet_f2,256,2.97,336.324,1,193.78,33.76,41.85\nvit_base_patch16_384,384,2.97,337.009,1,86.86,55.54,101.56\nmvitv2_large,224,2.95,338.418,1,217.99,43.87,112.02\nvit_base_patch16_siglip_gap_384,384,2.95,339.098,1,86.09,55.43,101.3\nnfnet_f2,256,2.94,340.191,1,193.78,33.76,41.85\nvit_base_patch16_siglip_384,384,2.94,339.662,1,93.18,56.12,102.2\nvit_base_patch16_clip_384,384,2.92,342.185,1,86.86,55.54,101.56\nmaxxvitv2_rmlp_large_rw_224,224,2.91,344.02,1,215.42,44.14,87.15\ntiny_vit_21m_512,512,2.91,343.403,1,21.27,27.02,177.93\necaresnet269d,352,2.87,347.846,1,102.09,50.25,101.25\nconvnextv2_base,384,2.86,349.626,1,88.72,45.21,84.49\nxcit_small_12_p8_384,384,2.81,356.188,1,26.21,54.92,138.29\nconvnext_large,288,2.8,357.134,1,197.77,56.87,71.29\nconvnextv2_large,288,2.79,358.749,1,197.96,56.87,71.29\nvit_base_patch16_18x2_224,224,2.78,359.751,1,256.73,52.51,71.38\nmvitv2_large_cls,224,2.76,362.694,1,234.58,42.17,111.69\nbeit_large_patch16_224,224,2.75,364.218,1,304.43,61.6,63.52\ntresnet_xl,448,2.73,366.436,1,78.44,60.77,61.31\ndeit3_large_patch16_224,224,2.7,370.995,1,304.37,61.6,63.52\nvit_large_patch32_384,384,2.7,369.742,1,306.63,45.31,43.86\nbeitv2_large_patch16_224,224,2.69,372.287,1,304.43,61.6,63.52\ncoatnet_rmlp_2_rw_384,384,2.64,379.078,1,73.88,47.69,209.43\nmaxvit_tiny_tf_512,512,2.59,386.591,1,31.05,33.49,257.59\nefficientvit_l3,320,2.57,389.302,1,246.04,56.32,79.34\nmaxvit_large_tf_224,224,2.56,390.951,1,211.79,43.68,127.35\nswinv2_cr_large_224,224,2.52,397.562,1,196.68,35.1,78.42\nswin_large_patch4_window7_224,224,2.47,404.245,1,196.53,34.53,54.94\nresnetrs420,320,2.41,415.274,1,191.89,64.2,126.56\nxcit_medium_24_p8_224,224,2.4,417.212,1,84.32,63.53,121.23\nvit_base_r50_s16_384,384,2.39,417.549,1,98.95,67.43,135.03\nresnet50x16_clip_gap,384,2.37,422.331,1,136.2,70.32,100.64\ntf_efficientnetv2_l,480,2.37,421.363,1,118.52,56.4,157.99\nconvnext_xlarge,224,2.35,425.759,1,350.2,60.98,57.5\nefficientnetv2_xl,384,2.35,425.352,1,208.12,52.81,139.2\ndavit_huge,224,2.34,426.69,1,348.92,61.23,81.32\ntf_efficientnetv2_xl,384,2.34,427.212,1,208.12,52.81,139.2\nefficientnetv2_l,480,2.31,432.762,1,118.52,56.4,157.99\nresnetv2_50x3_bit,224,2.31,433.391,1,217.32,37.06,33.34\ncait_s36_384,384,2.3,433.947,1,68.37,47.99,367.4\neca_nfnet_l3,448,2.29,436.231,1,72.04,52.55,118.4\nseresnextaa201d_32x8d,320,2.28,437.688,1,149.39,70.22,138.71\nvit_large_patch16_224,224,2.26,441.632,1,304.33,61.6,63.52\neva_large_patch14_196,196,2.23,448.812,1,304.14,61.57,63.52\nconvnext_large_mlp,320,2.19,456.263,1,200.13,70.21,88.02\nrepvgg_d2se,320,2.17,461.059,1,133.33,74.57,46.82\nresnet50x16_clip,384,2.17,461.201,1,167.33,74.9,103.54\nvit_large_r50_s32_384,384,2.17,461.7,1,329.09,57.43,76.52\nconvformer_b36,384,2.16,462.452,1,99.88,66.67,164.75\nflexivit_large,240,2.15,465.028,1,304.36,70.99,75.39\ntf_efficientnet_b7,600,2.15,465.8,1,66.35,38.33,289.94\nswinv2_large_window12to16_192to256,256,2.12,471.827,1,196.74,47.81,121.53\nvit_base_patch8_224,224,2.11,475.013,1,86.58,78.22,161.69\nefficientnet_b7,600,2.09,478.281,1,66.35,38.33,289.94\ncaformer_b36,384,2.05,488.404,1,98.75,72.33,261.79\nresnest269e,416,2.03,492.91,1,110.93,77.69,171.98\nresnetrs350,384,2.03,492.902,1,163.96,77.59,154.74\nresnetv2_152x2_bit,224,2.01,496.395,1,236.34,46.95,45.11\nregnety_640,224,2.0,500.583,1,281.38,64.16,42.5\nvolo_d5_224,224,1.96,508.876,1,295.46,72.4,118.11\nnfnet_f2,352,1.94,514.863,1,193.78,63.22,79.06\ndm_nfnet_f2,352,1.92,521.607,1,193.78,63.22,79.06\nefficientvit_l3,384,1.9,526.613,1,246.04,81.08,114.02\ncoatnet_4_224,224,1.84,544.382,1,275.43,62.48,129.26\nresmlp_big_24_224,224,1.83,545.885,1,129.14,100.23,87.31\nvit_large_patch14_xp_224,224,1.82,549.68,1,304.06,81.01,88.79\nvit_large_patch14_clip_224,224,1.8,556.186,1,304.2,81.08,88.79\nvit_large_patch14_clip_quickgelu_224,224,1.8,555.306,1,303.97,81.08,88.79\nvit_large_patch16_siglip_gap_256,256,1.78,560.426,1,303.36,80.8,88.34\nvit_large_patch14_224,224,1.77,564.22,1,304.2,81.08,88.79\nvit_large_patch16_siglip_256,256,1.76,569.531,1,315.96,81.34,88.88\nmaxxvitv2_rmlp_base_rw_384,384,1.75,571.868,1,116.09,72.98,213.74\nswinv2_cr_small_384,384,1.74,576.224,1,49.7,29.7,298.03\neva02_large_patch14_clip_224,224,1.73,579.213,1,304.11,81.18,97.2\nvitamin_large2_224,224,1.72,580.708,1,333.58,75.05,112.83\neva02_large_patch14_224,224,1.71,585.819,1,303.27,81.15,97.2\nnfnet_f3,320,1.71,584.444,1,254.92,68.77,83.93\ndm_nfnet_f3,320,1.69,592.094,1,254.92,68.77,83.93\nvitamin_large_224,224,1.67,597.716,1,333.32,75.05,112.83\nconvnext_large,384,1.64,610.931,1,197.77,101.1,126.74\nregnety_320,384,1.62,615.677,1,145.05,95.0,88.87\nconvnextv2_large,384,1.61,621.538,1,197.96,101.1,126.74\neva02_base_patch14_448,448,1.61,622.76,1,87.12,107.11,259.14\nseresnextaa201d_32x8d,384,1.59,626.967,1,149.39,101.11,199.72\nconvnext_large_mlp,384,1.56,642.339,1,200.13,101.11,126.74\nxcit_large_24_p16_384,384,1.55,645.297,1,189.1,105.35,137.17\nmaxvit_base_tf_384,384,1.52,656.799,1,119.65,73.8,332.9\nmaxvit_small_tf_512,512,1.52,658.029,1,69.13,67.26,383.77\nvit_so400m_patch14_siglip_224,224,1.52,656.824,1,427.68,110.26,106.73\nconvnext_xlarge,288,1.51,662.437,1,350.2,100.8,95.05\nswin_base_patch4_window12_384,384,1.5,667.292,1,87.9,47.19,134.78\nvit_base_patch16_siglip_512,512,1.5,667.76,1,93.52,108.22,247.74\nvit_base_patch16_siglip_gap_512,512,1.48,674.572,1,86.43,107.0,246.15\nxcit_small_24_p8_384,384,1.48,673.402,1,47.63,105.24,265.91\nresnetrs420,416,1.45,689.205,1,191.89,108.45,213.79\nvit_so400m_patch14_siglip_gap_224,224,1.45,688.541,1,412.44,109.57,106.13\nvolo_d3_448,448,1.43,697.962,1,86.63,96.33,446.83\nsam2_hiera_tiny,896,1.37,731.987,1,26.85,99.86,384.63\ntf_efficientnetv2_xl,512,1.37,730.895,1,208.12,93.85,247.32\nefficientnetv2_xl,512,1.35,739.886,1,208.12,93.85,247.32\nswinv2_cr_base_384,384,1.35,738.658,1,87.88,50.57,333.68\nvitamin_large2_256,256,1.34,744.299,1,333.64,99.0,154.99\nmaxvit_rmlp_base_rw_384,384,1.33,749.061,1,116.14,70.97,318.95\nvitamin_large_256,256,1.33,751.941,1,333.38,99.0,154.99\nswinv2_base_window12to24_192to384,384,1.3,766.277,1,87.92,55.25,280.36\nefficientnet_b8,672,1.27,785.04,1,87.41,63.48,442.89\ntf_efficientnet_b8,672,1.27,790.305,1,87.41,63.48,442.89\nconvnextv2_huge,224,1.26,791.531,1,660.29,115.0,79.07\nrdnet_large,384,1.25,800.743,1,186.27,102.09,137.13\nresnetv2_101x3_bit,224,1.25,800.082,1,387.93,71.23,48.7\nmaxvit_xlarge_tf_224,224,1.24,808.144,1,506.99,97.52,191.04\nresnext101_32x32d,224,1.19,839.906,1,468.53,87.29,91.12\nxcit_large_24_p8_224,224,1.16,863.559,1,188.93,141.23,181.56\nfocalnet_large_fl3,384,1.15,865.874,1,239.13,105.06,168.04\nfocalnet_large_fl4,384,1.14,875.284,1,239.32,105.2,181.78\ndeit3_huge_patch14_224,224,1.13,882.197,1,632.13,167.4,139.41\nhiera_huge_224,224,1.12,894.408,1,672.78,124.85,150.95\nvitamin_xlarge_256,256,1.11,900.056,1,436.06,130.13,177.37\ndm_nfnet_f3,416,1.1,905.365,1,254.92,115.58,141.78\nnfnet_f3,416,1.1,906.772,1,254.92,115.58,141.78\nsam2_hiera_small,896,1.09,917.068,1,33.95,123.99,442.63\nvit_base_patch14_dinov2,518,1.05,950.436,1,86.58,151.71,397.58\nmvitv2_huge_cls,224,1.04,961.324,1,694.8,120.67,243.63\nswinv2_cr_huge_224,224,1.03,974.869,1,657.83,115.97,121.08\nvit_base_patch14_reg4_dinov2,518,1.03,974.729,1,86.58,152.25,399.53\ndm_nfnet_f4,384,1.02,983.149,1,316.07,122.14,147.57\nnfnet_f4,384,1.02,980.681,1,316.07,122.14,147.57\ndeit3_large_patch16_384,384,0.99,1013.815,1,304.76,191.21,270.24\nregnety_1280,224,0.99,1007.166,1,644.81,127.66,71.58\nresnetv2_152x2_bit,384,0.97,1026.846,1,236.34,136.16,132.56\nbeit_large_patch16_384,384,0.96,1039.846,1,305.0,191.21,270.24\nfocalnet_huge_fl3,224,0.95,1048.054,1,745.28,118.26,104.8\nfocalnet_huge_fl4,224,0.93,1073.843,1,686.46,118.9,113.34\nvit_huge_patch14_clip_quickgelu_224,224,0.93,1078.292,1,632.08,167.4,139.41\nvit_huge_patch14_gap_224,224,0.93,1077.981,1,630.76,166.73,138.74\nvit_huge_patch14_xp_224,224,0.93,1077.577,1,631.8,167.3,139.41\nvit_huge_patch14_224,224,0.92,1088.516,1,630.76,167.4,139.41\nvit_huge_patch14_clip_224,224,0.92,1086.013,1,632.05,167.4,139.41\nmaxvit_large_tf_384,384,0.88,1132.022,1,212.03,132.55,445.84\nresnetv2_50x3_bit,448,0.87,1144.567,1,217.32,145.7,133.37\nswin_large_patch4_window12_384,384,0.87,1149.611,1,196.74,104.08,202.16\ndavit_giant,224,0.85,1172.447,1,1406.47,192.92,153.06\nconvnext_xlarge,384,0.83,1200.268,1,350.2,179.2,168.99\nregnety_640,384,0.82,1218.504,1,281.38,188.47,124.83\ndavit_base_fl,768,0.81,1231.237,1,90.37,190.32,530.15\neva02_large_patch14_clip_336,336,0.81,1234.141,1,304.43,191.34,289.13\nswinv2_cr_large_384,384,0.81,1234.754,1,196.68,108.96,404.96\nvit_large_patch14_clip_quickgelu_336,336,0.81,1231.07,1,304.29,191.11,270.24\nvit_large_patch16_384,384,0.81,1238.91,1,304.72,191.21,270.24\nvitamin_large2_336,336,0.81,1231.074,1,333.83,175.72,307.47\nxcit_medium_24_p8_384,384,0.81,1235.266,1,84.32,186.67,354.73\nvit_large_patch16_siglip_gap_384,384,0.8,1247.934,1,303.69,190.85,269.55\nvitamin_large_336,336,0.8,1247.895,1,333.57,175.72,307.47\ncoatnet_5_224,224,0.79,1268.134,1,687.47,145.49,194.24\neva_large_patch14_336,336,0.79,1259.34,1,304.53,191.1,270.24\nvit_large_patch14_clip_336,336,0.79,1262.838,1,304.53,191.11,270.24\nvit_large_patch16_siglip_384,384,0.79,1261.685,1,316.28,192.07,270.75\nconvnextv2_huge,288,0.78,1279.004,1,660.29,190.1,130.7\nmaxvit_base_tf_512,512,0.78,1281.906,1,119.88,138.02,703.99\ncait_m36_384,384,0.77,1299.355,1,271.22,173.11,734.81\nnfnet_f5,416,0.77,1305.878,1,377.21,170.71,204.56\nvit_giant_patch16_gap_224,224,0.76,1314.372,1,1011.37,202.46,139.26\nvolo_d4_448,448,0.76,1308.242,1,193.41,197.13,527.35\nconvnext_xxlarge,256,0.75,1335.83,1,846.47,198.09,124.45\ndm_nfnet_f5,416,0.75,1338.347,1,377.21,170.71,204.56\nswinv2_large_window12to24_192to384,384,0.72,1379.956,1,196.74,116.15,407.83\nresnetv2_152x2_bit,448,0.71,1412.665,1,236.34,184.99,180.43\nfocalnet_xlarge_fl3,384,0.67,1482.114,1,408.79,185.61,223.99\nfocalnet_xlarge_fl4,384,0.65,1538.866,1,409.03,185.79,242.31\nvitamin_xlarge_336,336,0.65,1535.951,1,436.06,230.18,347.33\ndm_nfnet_f4,512,0.63,1582.68,1,316.07,216.26,262.26\nnfnet_f4,512,0.63,1585.327,1,316.07,216.26,262.26\nsam2_hiera_base_plus,896,0.63,1575.739,1,68.68,227.48,828.88\nvit_giant_patch14_224,224,0.62,1617.497,1,1012.61,267.18,192.64\nvitamin_large_384,384,0.61,1650.577,1,333.71,234.44,440.16\neva_giant_patch14_224,224,0.6,1653.439,1,1012.56,267.18,192.64\ntf_efficientnet_l2,475,0.6,1656.514,1,480.31,172.11,609.89\nvit_giant_patch14_clip_224,224,0.6,1656.244,1,1012.65,267.18,192.64\nvitamin_large2_384,384,0.6,1661.817,1,333.97,234.44,440.16\nresnet50x64_clip_gap,448,0.59,1697.2,1,365.03,253.96,233.22\neva_giant_patch14_clip_224,224,0.58,1723.985,1,1012.59,267.18,192.64\nresnet50x64_clip,448,0.58,1738.21,1,420.38,265.02,239.13\nnfnet_f6,448,0.56,1772.805,1,438.36,229.7,273.62\ndm_nfnet_f6,448,0.55,1813.744,1,438.36,229.7,273.62\nvit_so400m_patch14_siglip_384,384,0.5,2008.52,1,428.23,335.4,452.89\nvit_so400m_patch14_siglip_gap_384,384,0.49,2032.811,1,412.99,333.46,451.19\nbeit_large_patch16_512,512,0.48,2069.069,1,305.67,362.24,656.39\ndm_nfnet_f5,544,0.48,2090.56,1,377.21,290.97,349.71\nnfnet_f5,544,0.48,2077.094,1,377.21,290.97,349.71\nvolo_d5_448,448,0.48,2089.076,1,295.91,315.06,737.92\nresnetv2_101x3_bit,448,0.47,2137.522,1,387.93,280.33,194.78\nconvnextv2_huge,384,0.45,2200.78,1,660.29,337.96,232.35\nnfnet_f7,480,0.45,2235.058,1,499.5,300.08,355.86\nmaxvit_xlarge_tf_384,384,0.44,2276.68,1,475.32,292.78,668.76\nresnetv2_152x4_bit,224,0.44,2264.656,1,936.53,186.9,90.22\neva02_large_patch14_448,448,0.42,2395.984,1,305.08,362.33,689.95\nregnety_1280,384,0.41,2419.045,1,644.81,374.99,210.2\nvit_huge_patch14_clip_336,336,0.41,2463.126,1,632.46,390.97,407.54\nxcit_large_24_p8_384,384,0.38,2625.871,1,188.93,415.0,531.82\nnfnet_f6,576,0.37,2705.399,1,438.36,378.69,452.2\ndm_nfnet_f6,576,0.36,2764.673,1,438.36,378.69,452.2\nvolo_d5_512,512,0.35,2888.125,1,296.09,425.09,1105.37\nvit_gigantic_patch14_224,224,0.33,3043.914,1,1844.44,483.95,275.37\nvit_gigantic_patch14_clip_224,224,0.33,3054.145,1,1844.91,483.96,275.37\nvit_so400m_patch14_siglip_gap_448,448,0.33,3055.238,1,413.33,487.18,764.26\nswinv2_cr_huge_384,384,0.32,3112.935,1,657.94,352.04,583.18\nvit_huge_patch14_clip_quickgelu_378,378,0.32,3153.715,1,632.68,503.79,572.79\ncait_m48_448,448,0.31,3237.951,1,356.46,329.41,1708.23\nconvnextv2_huge,512,0.31,3235.039,1,660.29,600.81,413.07\nvit_huge_patch14_clip_378,378,0.31,3192.651,1,632.68,503.79,572.79\nvit_large_patch14_dinov2,518,0.3,3283.515,1,304.37,507.15,1058.82\nvit_large_patch14_reg4_dinov2,518,0.3,3302.339,1,304.37,508.9,1064.02\nnfnet_f7,608,0.29,3432.385,1,499.5,480.39,570.85\nvit_huge_patch16_gap_448,448,0.29,3444.702,1,631.67,544.7,636.83\nswinv2_cr_giant_224,224,0.28,3558.238,1,2598.76,483.85,309.15\neva_giant_patch14_336,336,0.25,3940.881,1,1013.01,620.64,550.67\nsamvit_base_patch16,1024,0.24,4123.097,1,89.67,486.43,1343.27\ndavit_huge_fl,768,0.23,4324.634,1,360.64,744.84,1060.3\nmaxvit_xlarge_tf_512,512,0.22,4520.247,1,475.77,534.14,1413.22\nefficientnet_l2,800,0.21,4705.895,1,480.31,479.12,1707.39\nregnety_2560,384,0.21,4775.125,1,1282.6,747.83,296.49\ntf_efficientnet_l2,800,0.21,4750.597,1,480.31,479.12,1707.39\nresnetv2_152x4_bit,480,0.17,5875.601,1,936.53,844.84,414.26\nsam2_hiera_large,1024,0.16,6214.186,1,212.15,907.48,2190.34\neva02_enormous_patch14_clip_224,224,0.14,6939.876,1,4350.56,1132.46,497.58\nvit_giant_patch14_dinov2,518,0.1,10207.402,1,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,0.1,10361.576,1,1136.48,1790.08,2771.21\neva_giant_patch14_560,560,0.09,10775.568,1,1014.45,1906.76,2577.17\nsamvit_large_patch16,1024,0.09,11652.888,1,308.28,1493.86,2553.78\nswinv2_cr_giant_384,384,0.09,11052.423,1,2598.76,1450.71,1394.86\nvit_so400m_patch14_siglip_gap_896,896,0.06,16814.851,1,416.87,2731.49,8492.88\nsamvit_huge_patch16,1024,0.05,21270.331,1,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-infer-fp32-nchw-pt240-cpu-i9_10940x-dynamo.csv",
    "content": "model,infer_img_size,infer_samples_per_sec,infer_step_time,infer_batch_size,param_count,infer_gmacs,infer_macts\ntest_vit,160,707.29,1.405,1,0.37,0.04,0.48\ntest_byobnet,160,682.63,1.456,1,0.46,0.03,0.43\ntest_efficientnet,160,642.73,1.547,1,0.36,0.06,0.55\nlcnet_035,224,382.51,2.604,1,1.64,0.03,1.04\nresnet10t,176,353.93,2.811,1,5.44,0.7,1.51\ntf_mobilenetv3_small_minimal_100,224,353.74,2.817,1,2.04,0.06,1.41\nlcnet_050,224,342.17,2.912,1,1.88,0.05,1.26\nmobilenetv3_small_050,224,310.68,3.207,1,1.59,0.03,0.92\ntinynet_e,106,298.2,3.341,1,2.04,0.03,0.69\nlcnet_075,224,288.24,3.457,1,2.36,0.1,1.99\nlcnet_150,224,281.14,3.544,1,4.5,0.34,3.79\nlcnet_100,224,263.86,3.777,1,2.95,0.16,2.52\nmobilenetv2_035,224,263.63,3.783,1,1.68,0.07,2.86\nmobilenetv3_small_075,224,253.25,3.937,1,2.04,0.05,1.3\nlevit_128s,224,252.75,3.943,1,7.78,0.31,1.88\nmobilenetv3_small_100,224,249.8,3.991,1,2.54,0.06,1.42\nmobilenetv1_100,224,248.33,4.013,1,4.23,0.58,5.04\nmobilenetv4_conv_small,224,246.85,4.038,1,3.77,0.19,1.97\nregnetx_002,224,246.41,4.047,1,2.68,0.2,2.16\ntf_mobilenetv3_small_100,224,242.79,4.106,1,2.54,0.06,1.42\ntf_mobilenetv3_small_075,224,239.83,4.157,1,2.04,0.05,1.3\nmobilenetv2_050,224,237.22,4.205,1,1.97,0.1,3.64\nmobilenetv1_125,224,236.41,4.215,1,6.27,0.89,6.3\nmobilenetv1_100h,224,232.23,4.292,1,5.28,0.63,5.09\nmnasnet_050,224,220.94,4.515,1,2.22,0.11,3.07\nresnet18,160,218.15,4.568,1,11.69,0.93,1.27\nese_vovnet19b_slim,224,217.47,4.584,1,3.17,1.69,3.52\nmobilenetv4_conv_small,256,215.85,4.62,1,3.77,0.25,2.57\nefficientvit_b0,224,210.08,4.748,1,3.41,0.1,2.87\nmobilenetv1_125,256,206.54,4.827,1,6.27,1.16,8.23\nmnasnet_small,224,204.46,4.879,1,2.03,0.07,2.16\ntinynet_d,152,200.46,4.976,1,2.34,0.05,1.42\nmobilenetv2_075,224,198.9,5.015,1,2.64,0.22,5.86\nmobilenetv1_100h,256,196.41,5.077,1,5.28,0.82,6.65\nmnasnet_075,224,196.4,5.078,1,3.17,0.23,4.77\nlevit_conv_128s,224,195.14,5.111,1,7.78,0.31,1.88\nmobilenetv1_100,256,193.77,5.146,1,4.23,0.76,6.59\nhardcorenas_a,224,192.83,5.172,1,5.26,0.23,4.38\nmobilenetv3_large_075,224,190.56,5.234,1,3.99,0.16,4.0\nlevit_192,224,190.44,5.237,1,10.95,0.66,3.2\nese_vovnet19b_slim_dw,224,190.32,5.24,1,1.9,0.4,5.28\nlevit_128,224,188.71,5.285,1,9.21,0.41,2.71\nmobilenetv2_100,224,188.22,5.299,1,3.5,0.31,6.68\nmnasnet_100,224,184.21,5.415,1,4.38,0.33,5.46\npit_ti_distilled_224,224,182.36,5.47,1,5.1,0.71,6.23\ndeit_tiny_patch16_224,224,181.9,5.484,1,5.72,1.26,5.97\nvit_tiny_patch16_224,224,181.68,5.49,1,5.72,1.26,5.97\nsemnasnet_050,224,181.4,5.5,1,2.08,0.11,3.44\nefficientnet_lite0,224,181.39,5.499,1,4.65,0.4,6.74\ngernet_s,224,180.68,5.52,1,8.17,0.75,2.65\nvit_tiny_r_s16_p8_224,224,180.34,5.53,1,6.34,0.44,2.06\nresnet10t,224,179.52,5.555,1,5.44,1.1,2.43\ndeit_tiny_distilled_patch16_224,224,178.78,5.579,1,5.91,1.27,6.01\nhgnetv2_b0,224,178.19,5.598,1,6.0,0.33,2.12\nmobilenet_edgetpu_v2_xs,224,176.26,5.66,1,4.46,0.7,4.8\nmobilenetv3_large_100,224,176.25,5.659,1,5.48,0.23,4.41\nmobilenetv3_rw,224,175.76,5.676,1,5.48,0.23,4.41\ntf_mobilenetv3_large_minimal_100,224,174.07,5.73,1,3.92,0.22,4.4\nmixer_s32_224,224,170.9,5.836,1,19.1,1.0,2.28\ncs3darknet_focus_s,256,167.84,5.944,1,3.27,0.69,2.7\npit_ti_224,224,167.47,5.957,1,4.85,0.7,6.19\nmobilenetv2_140,224,163.23,6.112,1,6.11,0.6,9.57\nmobilenet_edgetpu_100,224,160.55,6.215,1,4.09,1.0,5.75\ntf_mobilenetv3_large_075,224,159.49,6.256,1,3.99,0.16,4.0\ncs3darknet_s,256,158.87,6.28,1,3.28,0.72,2.97\nhardcorenas_b,224,158.23,6.306,1,5.18,0.26,5.09\nvit_small_patch32_224,224,158.12,6.31,1,22.88,1.15,2.5\nese_vovnet19b_dw,224,157.95,6.316,1,6.54,1.34,8.25\nregnety_002,224,157.24,6.345,1,3.16,0.2,2.17\nedgenext_xx_small,256,156.86,6.364,1,1.33,0.26,3.33\ndla46_c,224,155.31,6.425,1,1.3,0.58,4.5\nresnet14t,176,155.29,6.425,1,10.08,1.07,3.61\nhardcorenas_c,224,153.28,6.51,1,5.52,0.28,5.01\nregnetx_004_tv,224,150.43,6.634,1,5.5,0.42,3.17\ndla46x_c,224,150.23,6.643,1,1.07,0.54,5.66\nspnasnet_100,224,150.13,6.647,1,4.42,0.35,6.03\ntf_mobilenetv3_large_100,224,149.59,6.67,1,5.48,0.23,4.41\nmobilenet_edgetpu_v2_s,224,147.34,6.773,1,5.99,1.21,6.6\nfbnetc_100,224,147.19,6.78,1,5.57,0.4,6.51\nsemnasnet_075,224,147.11,6.784,1,2.91,0.23,5.54\nmnasnet_140,224,146.87,6.794,1,7.12,0.6,7.71\nsemnasnet_100,224,146.31,6.821,1,3.89,0.32,6.23\ngmixer_12_224,224,146.18,6.826,1,12.7,2.67,7.26\ngmlp_ti16_224,224,146.06,6.832,1,5.87,1.34,7.55\nefficientvit_m1,224,144.48,6.91,1,2.98,0.17,1.33\ntf_efficientnet_lite0,224,143.01,6.978,1,4.65,0.4,6.74\nefficientnet_es,224,143.0,6.979,1,5.44,1.81,8.73\npit_xs_224,224,141.88,7.033,1,10.62,1.4,7.71\ntf_efficientnet_es,224,141.88,7.034,1,5.44,1.81,8.73\nefficientnet_es_pruned,224,141.73,7.041,1,5.44,1.81,8.73\nresnet14t,224,141.53,7.051,1,10.08,1.69,5.8\nmobilenetv2_110d,224,141.41,7.057,1,4.52,0.45,8.71\nlevit_conv_128,224,141.12,7.072,1,9.21,0.41,2.71\npit_xs_distilled_224,224,141.01,7.077,1,11.0,1.41,7.76\nmobilevitv2_050,256,140.89,7.084,1,1.37,0.48,8.04\nefficientnet_lite1,240,140.13,7.122,1,5.42,0.62,10.14\nregnetx_004,224,139.23,7.169,1,5.16,0.4,3.14\nlevit_conv_192,224,138.83,7.189,1,10.95,0.66,3.2\nlevit_256,224,137.56,7.255,1,18.89,1.13,4.23\nefficientvit_m2,224,137.23,7.274,1,4.19,0.2,1.47\nregnetx_006,224,135.97,7.341,1,6.2,0.61,3.98\nrepghostnet_058,224,135.13,7.387,1,2.55,0.07,2.59\nefficientvit_m0,224,135.04,7.393,1,2.35,0.08,0.91\nsemnasnet_140,224,134.8,7.404,1,6.11,0.6,8.87\nghostnet_050,224,134.36,7.43,1,2.59,0.05,1.77\nregnetx_008,224,133.61,7.47,1,7.26,0.81,5.15\ntinynet_c,184,131.97,7.564,1,2.46,0.11,2.87\nese_vovnet19b_dw,288,131.11,7.612,1,6.54,2.22,13.63\nhgnetv2_b1,224,130.75,7.633,1,6.34,0.49,2.73\nedgenext_xx_small,288,130.47,7.652,1,1.33,0.33,4.21\nrepghostnet_050,224,130.45,7.653,1,2.31,0.05,2.02\nmobilenet_edgetpu_v2_m,224,129.27,7.721,1,8.46,1.85,8.15\nconvnext_atto,224,129.03,7.736,1,3.7,0.55,3.81\nrepghostnet_080,224,128.98,7.74,1,3.28,0.1,3.22\nmobileone_s1,224,128.61,7.761,1,4.83,0.86,9.67\nedgenext_x_small,256,126.77,7.875,1,2.34,0.54,5.93\nresnet18,224,126.23,7.907,1,11.69,1.82,2.48\nresnext26ts,256,126.06,7.918,1,10.3,2.43,10.52\ndla60x_c,224,123.16,8.107,1,1.32,0.59,6.01\nghostnet_100,224,122.98,8.117,1,5.18,0.15,3.55\nconvnextv2_atto,224,122.08,8.178,1,3.71,0.55,3.81\nrepghostnet_100,224,122.06,8.18,1,4.07,0.15,3.98\nhgnetv2_b0,288,121.94,8.186,1,6.0,0.54,3.51\nefficientnet_lite2,260,120.94,8.254,1,6.09,0.89,12.9\nresnet34,160,120.62,8.275,1,21.8,1.87,1.91\nhardcorenas_f,224,120.04,8.317,1,8.2,0.35,5.57\nconvnext_atto_ols,224,119.07,8.384,1,3.7,0.58,4.11\nconvnext_femto_ols,224,118.75,8.407,1,5.23,0.82,4.87\nresnet18d,224,118.43,8.429,1,11.71,2.06,3.29\nedgenext_x_small,288,118.24,8.444,1,2.34,0.68,7.5\nrepghostnet_111,224,118.2,8.447,1,4.54,0.18,4.38\nresnetblur18,224,117.55,8.492,1,11.69,2.34,3.39\nmobilenet_edgetpu_v2_m,256,117.43,8.501,1,8.46,2.42,10.65\nhardcorenas_e,224,117.2,8.518,1,8.07,0.35,5.65\nvit_xsmall_patch16_clip_224,224,116.75,8.551,1,8.28,1.79,6.65\neca_resnext26ts,256,115.44,8.647,1,10.3,2.43,10.52\nmobilenet_edgetpu_v2_l,224,115.23,8.663,1,10.92,2.55,9.05\nregnety_008,224,115.15,8.67,1,6.26,0.81,5.25\nmobilenetv2_120d,224,115.14,8.67,1,5.83,0.69,11.97\nmobilenetv4_conv_medium,224,115.13,8.671,1,9.72,0.84,5.8\nvisformer_tiny,224,115.0,8.681,1,10.32,1.27,5.72\nefficientvit_m3,224,114.86,8.693,1,6.9,0.27,1.62\nmobileone_s2,224,113.76,8.776,1,7.88,1.34,11.55\ntf_efficientnet_lite1,240,112.67,8.86,1,5.42,0.62,10.14\nconvnext_femto,224,112.57,8.869,1,5.22,0.79,4.57\nmobilevitv2_075,256,112.53,8.872,1,2.87,1.05,12.06\necaresnext50t_32x4d,224,112.38,8.883,1,15.41,2.7,10.09\nresmlp_12_224,224,112.17,8.9,1,15.35,3.01,5.5\nrepghostnet_130,224,111.65,8.942,1,5.48,0.25,5.24\nlambda_resnet26t,256,111.48,8.955,1,10.96,3.02,11.87\necaresnext26t_32x4d,224,111.45,8.957,1,15.41,2.7,10.09\nregnety_008_tv,224,111.42,8.961,1,6.43,0.84,5.42\nresnext26ts,288,111.36,8.965,1,10.3,3.07,13.31\nresnet18,288,111.26,8.973,1,11.69,3.01,4.11\nrexnetr_100,224,110.23,9.058,1,4.88,0.43,7.72\npoolformerv2_s12,224,110.08,9.069,1,11.89,1.83,5.53\nhardcorenas_d,224,109.61,9.11,1,7.5,0.3,4.93\nmobilenetv4_conv_medium,256,109.52,9.116,1,9.72,1.1,7.58\nedgenext_small,256,109.41,9.125,1,5.59,1.26,9.07\nlevit_conv_256,224,109.21,9.143,1,18.89,1.13,4.23\nseresnet18,224,109.21,9.142,1,11.78,1.82,2.49\ntinynet_b,188,108.98,9.162,1,3.73,0.21,4.44\nregnety_004,224,108.81,9.177,1,4.34,0.41,3.89\nseresnext26ts,256,108.64,9.19,1,10.39,2.43,10.52\npoolformer_s12,224,108.39,9.211,1,11.92,1.82,5.53\nrexnet_100,224,108.28,9.221,1,4.8,0.41,7.44\nresnest14d,224,107.97,9.247,1,10.61,2.76,7.33\nbotnet26t_256,256,107.88,9.254,1,12.49,3.32,11.98\nlegacy_seresnet18,224,107.78,9.264,1,11.78,1.82,2.49\nghostnet_130,224,107.61,9.278,1,7.36,0.24,4.6\neva02_tiny_patch14_224,224,107.45,9.293,1,5.5,1.7,9.14\nlegacy_seresnext26_32x4d,224,107.29,9.305,1,16.79,2.49,9.39\ngernet_m,224,106.38,9.385,1,21.14,3.02,5.24\nefficientvit_b1,256,106.12,9.409,1,9.1,0.69,9.46\nconvnext_atto,288,106.05,9.416,1,3.7,0.91,6.3\neca_botnext26ts_256,256,105.99,9.42,1,10.59,2.46,11.6\nefficientvit_m4,224,105.66,9.45,1,8.8,0.3,1.7\nrexnetr_130,224,105.52,9.462,1,7.61,0.68,9.81\nhgnetv2_b2,224,105.37,9.475,1,11.22,1.15,4.12\nefficientvit_b1,224,104.82,9.526,1,9.1,0.53,7.25\nrepghostnet_150,224,104.8,9.528,1,6.58,0.32,6.0\nmixer_s16_224,224,104.38,9.565,1,18.53,3.79,5.97\nfastvit_t8,256,104.04,9.598,1,4.03,0.7,8.63\nresnet18d,288,104.01,9.6,1,11.71,3.41,5.43\nefficientformer_l1,224,103.55,9.642,1,12.29,1.3,5.53\nmobileone_s3,224,103.38,9.659,1,10.17,1.94,13.85\nregnetx_016,224,103.31,9.664,1,9.19,1.62,7.93\nmobilenetv4_conv_aa_medium,256,102.95,9.698,1,9.72,1.58,10.3\nregnety_006,224,102.78,9.716,1,6.06,0.61,4.33\nseresnext26d_32x4d,224,102.22,9.767,1,16.81,2.73,10.19\nmobilenetv4_conv_blur_medium,224,101.88,9.801,1,9.72,1.22,8.58\neca_resnext26ts,288,101.54,9.834,1,10.3,3.07,13.32\nefficientnet_b0,256,101.51,9.837,1,5.29,0.52,8.81\nhgnet_tiny,224,101.32,9.854,1,14.74,4.54,6.36\nseresnext26t_32x4d,224,100.84,9.901,1,16.81,2.7,10.09\npit_s_224,224,100.54,9.931,1,23.46,2.88,11.56\nresnet26,224,100.47,9.938,1,16.0,2.36,7.35\ntf_efficientnet_lite2,260,100.42,9.943,1,6.09,0.89,12.9\nefficientnet_b0,224,100.41,9.945,1,5.29,0.4,6.75\nhalonet26t,256,100.03,9.982,1,12.48,3.19,11.69\nresnetblur18,288,99.96,9.989,1,11.69,3.87,5.6\nrepvgg_a0,224,99.9,9.995,1,9.11,1.52,3.59\nvit_medium_patch32_clip_224,224,99.8,10.005,1,39.69,2.0,3.34\nmobilevit_xxs,256,99.69,10.017,1,1.27,0.42,8.34\nconvnext_femto,288,99.64,10.021,1,5.22,1.3,7.56\nconvnext_femto_ols,288,99.29,10.057,1,5.23,1.35,8.06\nefficientnet_em,240,99.13,10.073,1,6.9,3.04,14.34\nrexnetr_150,224,98.92,10.095,1,9.78,0.89,11.13\nmobilenetv4_conv_medium,320,98.53,10.135,1,9.72,1.71,11.84\nseresnet18,288,98.53,10.135,1,11.78,3.01,4.11\nhgnetv2_b1,288,98.34,10.154,1,6.34,0.82,4.51\nedgenext_small_rw,256,98.33,10.155,1,7.83,1.58,9.51\neca_halonext26ts,256,98.17,10.172,1,10.76,2.44,11.46\ntf_efficientnetv2_b0,192,97.93,10.197,1,7.14,0.54,3.51\nrexnet_130,224,97.92,10.198,1,7.56,0.68,9.71\nlambda_resnet26rpt_256,256,97.62,10.229,1,10.99,3.16,11.87\nlevit_256d,224,97.59,10.232,1,26.21,1.4,4.93\ntf_efficientnet_em,240,97.41,10.252,1,6.9,3.04,14.34\nvit_tiny_r_s16_p8_384,384,96.81,10.314,1,6.36,1.34,6.49\nefficientvit_b1,288,96.74,10.323,1,9.1,0.87,11.96\nrexnet_150,224,96.5,10.348,1,9.73,0.9,11.21\ngcresnext26ts,256,96.09,10.392,1,10.48,2.43,10.53\nresnet26d,224,96.02,10.399,1,16.01,2.6,8.15\nconvnext_pico,224,95.9,10.413,1,9.05,1.37,6.1\nmobilenetv4_conv_blur_medium,256,95.82,10.421,1,9.72,1.59,11.2\nrepghostnet_200,224,95.57,10.449,1,9.8,0.54,7.96\nvit_wee_patch16_reg1_gap_256,256,95.45,10.462,1,13.42,3.83,13.9\nconvit_tiny,224,95.05,10.506,1,5.71,1.26,7.94\nxcit_nano_12_p16_224,224,94.66,10.551,1,3.05,0.56,4.17\nseresnext26ts,288,94.65,10.55,1,10.39,3.07,13.32\ntinynet_a,192,94.03,10.62,1,6.19,0.35,5.41\nmobilevitv2_100,256,93.64,10.665,1,4.9,1.84,16.08\npit_s_distilled_224,224,93.54,10.676,1,24.04,2.9,11.64\npvt_v2_b0,224,93.24,10.71,1,3.67,0.57,7.99\nmobilenetv3_large_150d,224,92.84,10.756,1,14.62,,\nconvnextv2_atto,288,92.51,10.795,1,3.71,0.91,6.3\nefficientnet_lite3,300,92.29,10.82,1,8.2,1.65,21.85\nconvnext_pico_ols,224,91.85,10.873,1,9.06,1.43,6.5\nefficientnet_b0_g16_evos,224,91.32,10.937,1,8.11,1.01,7.42\ncs3darknet_focus_m,288,90.7,11.01,1,9.3,2.51,6.19\nresnext50_32x4d,160,90.34,11.055,1,25.03,2.17,7.35\nconvnext_atto_ols,288,89.86,11.114,1,3.7,0.96,6.8\ncs3darknet_m,256,89.72,11.13,1,9.31,2.08,5.28\nlevit_384,224,89.69,11.134,1,39.13,2.36,6.26\nmobilenetv4_hybrid_medium_075,224,89.67,11.138,1,7.31,0.66,5.65\ncs3darknet_focus_m,256,89.19,11.197,1,9.3,1.98,4.89\nefficientformerv2_s0,224,89.05,11.216,1,3.6,0.41,5.3\nconvnextv2_femto,224,88.91,11.232,1,5.23,0.79,4.57\nresnext50_32x4d,176,88.9,11.233,1,25.03,2.71,8.97\ntf_efficientnetv2_b0,224,88.9,11.234,1,7.14,0.73,4.77\nefficientnet_cc_b0_4e,224,88.85,11.24,1,13.31,0.41,9.42\nresnet26t,256,88.38,11.3,1,16.01,3.35,10.52\ntf_efficientnet_b0,224,88.14,11.33,1,5.29,0.4,6.75\nfastvit_t12,256,88.12,11.333,1,7.55,1.42,12.42\nedgenext_small,320,88.04,11.343,1,5.59,1.97,14.16\ndla34,224,87.87,11.366,1,15.74,3.07,5.02\ncs3darknet_m,288,87.77,11.378,1,9.31,2.63,6.69\nresnet26,288,87.72,11.384,1,16.0,3.9,12.15\nefficientvit_m5,224,87.3,11.441,1,12.47,0.53,2.41\nvit_tiny_patch16_384,384,87.07,11.47,1,5.79,4.7,25.39\nskresnet18,224,87.06,11.471,1,11.96,1.82,3.24\ngcresnext26ts,288,86.9,11.493,1,10.48,3.07,13.33\nhgnetv2_b2,288,86.5,11.546,1,11.22,1.89,6.8\nmobilevitv2_125,256,85.3,11.709,1,7.48,2.86,20.1\nrepvgg_a1,224,85.15,11.728,1,14.09,2.64,4.74\nrexnetr_200,224,84.71,11.79,1,16.52,1.59,15.11\nhgnetv2_b3,224,84.48,11.822,1,16.29,1.78,5.07\ndarknet17,256,84.06,11.881,1,14.3,3.26,7.18\nvit_betwixt_patch32_clip_224,224,83.79,11.919,1,61.41,3.09,4.17\nefficientnet_blur_b0,224,83.71,11.932,1,5.29,0.43,8.72\nresnet33ts,256,83.15,12.01,1,19.68,4.76,11.66\nresnet26d,288,82.94,12.042,1,16.01,4.29,13.48\nefficientnet_cc_b0_8e,224,82.88,12.051,1,24.01,0.42,9.42\nrexnet_200,224,82.7,12.077,1,16.37,1.56,14.91\ncrossvit_tiny_240,240,82.5,12.107,1,7.01,1.57,9.08\nseresnext26t_32x4d,288,82.5,12.105,1,16.81,4.46,16.68\nvit_pwee_patch16_reg1_gap_256,256,82.09,12.167,1,15.25,4.37,15.87\nfastvit_s12,256,81.63,12.236,1,9.47,1.82,13.67\nvit_small_patch32_384,384,81.63,12.236,1,22.92,3.45,8.25\nedgenext_small_rw,320,81.46,12.261,1,7.83,2.46,14.85\necaresnet26t,256,81.44,12.263,1,16.01,3.35,10.53\ncrossvit_9_240,240,81.43,12.267,1,8.55,1.85,9.52\nmobilevit_xs,256,80.96,12.337,1,2.32,1.05,16.33\nmobilenetv4_hybrid_medium,224,80.75,12.369,1,11.07,0.98,6.84\nresnet34,224,80.61,12.391,1,21.8,3.67,3.74\nregnetx_032,224,80.6,12.392,1,15.3,3.2,11.37\nxcit_tiny_12_p16_224,224,80.35,12.432,1,6.72,1.24,6.29\nmobilenetv4_hybrid_medium,256,79.87,12.506,1,11.07,1.29,9.01\ntf_efficientnetv2_b2,208,79.66,12.539,1,10.1,1.06,6.0\ntf_efficientnet_cc_b0_4e,224,79.65,12.541,1,13.31,0.41,9.42\ngmixer_24_224,224,79.39,12.581,1,24.72,5.28,14.45\neca_resnet33ts,256,79.24,12.604,1,19.68,4.76,11.66\nxcit_nano_12_p16_384,384,79.2,12.612,1,3.05,1.64,12.15\nmobileone_s4,224,79.16,12.618,1,14.95,3.04,17.74\nlevit_conv_256d,224,79.12,12.625,1,26.21,1.4,4.93\ndla60x,224,78.71,12.691,1,17.35,3.54,13.8\nresnet32ts,288,78.66,12.697,1,17.96,5.86,14.65\nfastvit_sa12,256,78.18,12.776,1,11.58,1.96,14.03\nresnet34d,224,78.17,12.777,1,21.82,3.91,4.54\nregnetz_005,224,78.07,12.795,1,7.12,0.52,5.86\ntf_efficientnetv2_b1,192,77.79,12.842,1,8.14,0.76,4.59\ncrossvit_9_dagger_240,240,77.65,12.864,1,8.78,1.99,9.97\nresnest26d,224,77.61,12.869,1,17.07,3.64,9.97\nvit_srelpos_small_patch16_224,224,77.26,12.928,1,21.97,4.59,12.16\nresnet50,160,77.2,12.938,1,25.56,2.1,5.67\nseresnext26d_32x4d,288,77.0,12.973,1,16.81,4.51,16.85\nresnext50_32x4d,224,76.91,12.987,1,25.03,4.26,14.4\necaresnet50d_pruned,224,76.62,13.036,1,19.94,2.53,6.43\nvit_small_patch16_224,224,76.49,13.058,1,22.05,4.61,11.95\ndeit3_small_patch16_224,224,76.48,13.06,1,22.06,4.61,11.95\ndeit_small_patch16_224,224,76.41,13.072,1,22.05,4.61,11.95\ntf_efficientnetv2_b1,240,76.41,13.073,1,8.14,1.21,7.34\ndeit_small_distilled_patch16_224,224,76.14,13.119,1,22.44,4.63,12.02\nconvnextv2_pico,224,76.02,13.14,1,9.07,1.37,6.1\nresnet26t,320,75.79,13.179,1,16.01,5.24,16.44\ncoat_lite_mini,224,75.62,13.208,1,11.01,2.0,12.25\nnf_regnet_b0,192,75.34,13.258,1,8.76,0.37,3.15\nresnetaa34d,224,75.18,13.287,1,21.82,4.43,5.07\nxcit_nano_12_p8_224,224,75.15,13.293,1,3.05,2.16,15.71\nresnet50d,160,75.14,13.293,1,25.58,2.22,6.08\ntf_efficientnet_cc_b0_8e,224,74.92,13.333,1,24.01,0.42,9.42\nresnext50d_32x4d,224,74.78,13.358,1,25.05,4.5,15.2\nresnet50,176,74.75,13.362,1,25.56,2.62,6.92\nfbnetv3_b,224,74.72,13.368,1,8.6,0.42,6.97\nresnet33ts,288,74.42,13.421,1,19.68,6.02,14.75\ntiny_vit_5m_224,224,74.28,13.448,1,12.08,1.28,11.25\nmixer_b32_224,224,74.14,13.472,1,60.29,3.24,6.29\nvit_little_patch16_reg1_gap_256,256,74.1,13.481,1,22.52,6.27,18.06\ngernet_l,256,74.0,13.498,1,31.08,4.57,8.0\ndpn48b,224,73.68,13.557,1,9.13,1.69,8.92\nhgnet_tiny,288,73.65,13.563,1,14.74,7.51,10.51\nefficientnet_b1_pruned,240,73.55,13.583,1,6.33,0.4,6.21\nvit_little_patch16_reg4_gap_256,256,73.51,13.589,1,22.52,6.35,18.33\nefficientnet_b1,240,73.32,13.625,1,7.79,0.71,10.88\ncspresnext50,256,73.13,13.66,1,20.57,4.05,15.86\nvovnet39a,224,73.1,13.665,1,22.6,7.09,6.73\ngmlp_s16_224,224,73.08,13.668,1,19.42,4.42,15.1\nmobilevitv2_150,256,72.89,13.705,1,10.59,4.09,24.11\nregnetz_b16,224,72.69,13.743,1,9.72,1.45,9.95\nedgenext_base,256,72.61,13.756,1,18.51,3.85,15.58\nresnet32ts,256,72.54,13.77,1,17.96,4.63,11.58\ncoat_lite_tiny,224,72.5,13.777,1,5.72,1.6,11.65\nregnetz_005,288,72.4,13.799,1,7.12,0.86,9.68\nrepvgg_b0,224,72.16,13.843,1,15.82,3.41,6.15\nhgnetv2_b4,224,71.76,13.92,1,19.8,2.75,6.7\ndla60,224,71.69,13.933,1,22.04,4.26,10.16\nefficientnet_b1,224,71.49,13.974,1,7.79,0.59,9.36\nlevit_conv_384,224,70.82,14.105,1,39.13,2.36,6.26\nefficientnet_b1,256,70.76,14.119,1,7.79,0.77,12.22\nregnetx_040,224,70.59,14.151,1,22.12,3.99,12.2\ntf_efficientnet_lite3,300,70.56,14.156,1,8.2,1.65,21.85\neca_vovnet39b,224,70.47,14.174,1,22.6,7.09,6.74\nefficientnet_b2_pruned,260,70.35,14.199,1,8.31,0.73,9.13\ntf_efficientnetv2_b2,260,70.25,14.221,1,10.1,1.72,9.84\nhgnetv2_b3,288,70.06,14.259,1,16.29,2.94,8.38\nfbnetv3_d,224,70.0,14.271,1,10.31,0.52,8.5\nconvnext_nano_ols,224,69.98,14.274,1,15.65,2.65,9.38\nfbnetv3_b,256,69.83,14.307,1,8.6,0.55,9.1\nrepvit_m0_9,224,69.8,14.313,1,5.49,0.83,7.45\nefficientformerv2_s1,224,69.57,14.36,1,6.19,0.67,7.66\ndarknet21,256,69.5,14.374,1,20.86,3.93,7.47\nese_vovnet39b,224,69.4,14.394,1,24.57,7.09,6.74\neca_resnet33ts,288,69.39,14.395,1,19.68,6.02,14.76\nnf_regnet_b0,256,69.34,14.407,1,8.76,0.64,5.58\necaresnet50t,160,69.28,14.419,1,25.57,2.21,6.04\nrepvit_m1,224,69.18,14.442,1,5.49,0.83,7.45\nrexnetr_200,288,69.08,14.46,1,16.52,2.62,24.96\nres2net50_48w_2s,224,69.0,14.478,1,25.29,4.18,11.72\nresnet34,288,68.86,14.507,1,21.8,6.07,6.18\nflexivit_small,240,68.81,14.517,1,22.06,5.35,14.18\nlegacy_seresnet34,224,68.78,14.524,1,21.96,3.67,3.74\nconvnext_tiny,224,68.68,14.544,1,28.59,4.47,13.44\necaresnet26t,320,68.52,14.579,1,16.01,5.24,16.44\nregnetz_b16_evos,224,68.42,14.6,1,9.74,1.43,9.95\nrepvit_m1_1,224,68.38,14.61,1,8.8,1.36,9.43\nefficientnet_b2,256,68.17,14.655,1,9.11,0.89,12.81\nefficientvit_b2,224,68.17,14.655,1,24.33,1.6,14.62\nrepvit_m2,224,67.69,14.76,1,8.8,1.36,9.43\nseresnet34,224,67.32,14.839,1,21.96,3.67,3.74\nresnet50,224,67.2,14.865,1,25.56,4.11,11.11\nvit_relpos_small_patch16_rpn_224,224,67.18,14.869,1,21.97,4.59,13.05\nconvnextv2_femto,288,66.86,14.941,1,5.23,1.3,7.56\nresnet34d,288,66.79,14.958,1,21.82,6.47,7.51\ngcresnet33ts,256,66.39,15.047,1,19.88,4.76,11.68\ntf_efficientnet_b1,240,66.1,15.114,1,7.79,0.71,10.88\nfbnetv3_d,256,66.05,15.125,1,10.31,0.68,11.1\nxcit_tiny_12_p16_384,384,65.58,15.233,1,6.72,3.64,18.26\nvit_relpos_small_patch16_224,224,65.51,15.25,1,21.98,4.59,13.05\nresnet50c,224,65.48,15.257,1,25.58,4.35,11.92\ndpn68,224,65.46,15.261,1,12.61,2.35,10.47\necaresnet50d_pruned,288,65.44,15.265,1,19.94,4.19,10.61\nefficientnet_b1,288,65.21,15.322,1,7.79,0.97,15.46\nrepvit_m1_0,224,65.07,15.356,1,7.3,1.13,8.69\ntwins_pcpvt_small,224,65.06,15.355,1,24.11,3.83,18.08\nmobilenetv4_hybrid_medium,320,65.04,15.36,1,11.07,2.05,14.36\nsedarknet21,256,64.98,15.373,1,20.95,3.93,7.47\nresnet50d,224,64.83,15.41,1,25.58,4.35,11.92\nresnetaa34d,288,64.72,15.435,1,21.82,7.33,8.38\nhiera_tiny_224,224,64.52,15.483,1,27.91,4.91,17.13\npoolformerv2_s24,224,64.33,15.529,1,21.34,3.42,10.68\ndpn68b,224,64.26,15.545,1,12.61,2.35,10.47\nghostnetv2_100,224,64.11,15.585,1,6.16,0.18,4.55\nresnet50t,224,63.72,15.678,1,25.57,4.32,11.82\nresnetv2_50,224,63.72,15.68,1,25.55,4.11,11.11\nghostnetv2_130,224,63.65,15.697,1,8.96,0.28,5.9\nresnetaa50,224,63.65,15.694,1,25.56,5.15,11.64\nseresnet50,160,63.58,15.713,1,28.09,2.1,5.69\nregnetz_b16,288,63.55,15.72,1,9.72,2.39,16.43\nconvmixer_1024_20_ks9_p14,224,63.5,15.734,1,24.38,5.55,5.51\nefficientnet_b2,288,63.48,15.739,1,9.11,1.12,16.2\nresnext50_32x4d,288,63.36,15.767,1,25.03,7.04,23.81\ncoatnet_pico_rw_224,224,63.31,15.781,1,10.85,2.05,14.62\nregnetx_064,224,63.21,15.805,1,26.21,6.49,16.37\nregnety_016,224,62.98,15.864,1,11.2,1.63,8.04\npoolformer_s24,224,62.88,15.889,1,21.39,3.41,10.68\nrexnetr_300,224,62.86,15.891,1,34.81,3.39,22.16\nnf_resnet26,224,62.76,15.919,1,16.0,2.41,7.35\nsam2_hiera_tiny,224,62.71,15.931,1,26.85,4.91,17.12\nhgnet_small,224,62.6,15.958,1,24.36,8.53,8.79\nmobilevitv2_175,256,62.59,15.963,1,14.25,5.54,28.13\nefficientvit_b2,288,62.24,16.05,1,24.33,2.64,24.03\nseresnext50_32x4d,224,62.23,16.053,1,27.56,4.26,14.42\nresnext50d_32x4d,288,62.18,16.068,1,25.05,7.44,25.13\nconvnext_nano,224,62.15,16.076,1,15.59,2.46,8.37\nregnety_032,224,61.92,16.135,1,19.44,3.2,11.26\nresnet50_clip_gap,224,61.9,16.138,1,23.53,5.39,12.44\nresnetaa50d,224,61.9,16.139,1,25.58,5.39,12.44\nresnetv2_50d,224,61.83,16.157,1,25.57,4.35,11.92\nresnetblur50,224,61.61,16.216,1,25.56,5.16,12.02\ntiny_vit_11m_224,224,61.55,16.233,1,20.35,2.04,13.49\nresnet50s,224,61.42,16.265,1,25.68,5.47,13.52\nvisformer_small,224,61.4,16.271,1,40.22,4.88,11.43\ngcresnet33ts,288,61.33,16.29,1,19.88,6.02,14.78\nconvnextv2_tiny,224,61.3,16.298,1,28.64,4.47,13.44\nregnetz_c16,256,61.07,16.36,1,13.46,2.51,16.57\ncs3darknet_l,256,61.04,16.366,1,21.16,4.86,8.55\ntf_efficientnetv2_b3,240,61.03,16.372,1,14.36,1.93,9.95\nresnet51q,256,61.0,16.378,1,35.7,6.38,16.55\nlegacy_seresnext50_32x4d,224,60.86,16.415,1,27.56,4.26,14.42\nvit_base_patch32_224,224,60.58,16.492,1,88.22,4.41,5.01\nresnetv2_50t,224,60.55,16.501,1,25.57,4.32,11.82\nmobilevit_s,256,60.52,16.509,1,5.58,2.03,19.94\nxcit_tiny_12_p8_224,224,60.28,16.576,1,6.71,4.81,23.6\nefficientvit_b2,256,60.14,16.612,1,24.33,2.09,19.03\nresnetrs50,160,60.13,16.615,1,35.69,2.29,6.2\ntf_efficientnet_b2,260,60.06,16.636,1,9.11,1.02,13.83\nhgnetv2_b4,288,59.96,16.662,1,19.8,4.54,11.08\ntwins_svt_small,224,59.94,16.668,1,24.06,2.94,13.75\nefficientnet_el_pruned,300,59.77,16.716,1,10.59,8.0,30.7\nseresnet34,288,59.66,16.747,1,21.96,6.07,6.18\nvit_base_patch32_clip_quickgelu_224,224,59.58,16.77,1,87.85,4.41,5.01\nefficientnet_cc_b1_8e,240,59.51,16.79,1,39.72,0.75,15.44\nhrnet_w18_small,224,59.47,16.799,1,13.19,1.61,5.72\nefficientformer_l3,224,59.4,16.82,1,31.41,3.93,12.01\ntf_mixnet_s,224,59.38,16.826,1,4.13,0.25,6.25\nefficientnet_el,300,59.35,16.834,1,10.59,8.0,30.7\necaresnetlight,224,59.32,16.843,1,30.16,4.11,8.42\nrexnet_300,224,59.29,16.85,1,34.71,3.44,22.4\nvolo_d1_224,224,59.28,16.853,1,26.63,6.94,24.43\nresnetblur50d,224,59.19,16.88,1,25.58,5.4,12.82\nvit_base_patch32_clip_224,224,59.16,16.887,1,88.22,4.41,5.01\ntf_efficientnet_el,300,59.06,16.916,1,10.59,8.0,30.7\nseresnet33ts,256,59.02,16.928,1,19.78,4.76,11.66\nefficientnet_lite4,380,59.0,16.933,1,13.01,4.04,45.66\necaresnet50t,224,58.9,16.963,1,25.57,4.32,11.83\nresmlp_24_224,224,58.64,17.039,1,30.02,5.96,10.91\nghostnetv2_160,224,58.59,17.053,1,12.39,0.42,7.23\nnf_ecaresnet26,224,58.55,17.063,1,16.0,2.41,7.36\necaresnet50d,224,58.5,17.08,1,25.58,4.35,11.93\nmobilenetv4_conv_large,256,58.28,17.144,1,32.59,2.86,12.14\nmixnet_s,224,58.26,17.15,1,4.13,0.25,6.25\nmobilenetv4_hybrid_medium,384,57.89,17.259,1,11.07,3.01,21.18\nmaxvit_pico_rw_256,256,57.53,17.368,1,7.46,1.83,22.3\neva02_tiny_patch14_336,336,57.45,17.391,1,5.76,4.68,27.16\nresnetv2_50d_frn,224,57.35,17.421,1,25.59,4.33,11.92\ncs3darknet_focus_l,256,57.29,17.438,1,21.15,4.66,8.03\nrepvgg_a2,224,57.04,17.514,1,28.21,5.7,6.26\nregnety_040,224,56.92,17.552,1,20.65,4.0,12.29\nedgenext_base,320,56.81,17.588,1,18.51,6.01,24.32\nconvit_small,224,56.77,17.601,1,27.78,5.76,17.87\ncrossvit_15_240,240,56.47,17.694,1,27.53,5.81,19.77\ngcresnext50ts,256,56.33,17.737,1,15.67,3.75,15.46\nnf_seresnet26,224,56.21,17.774,1,17.4,2.41,7.36\nnf_regnet_b1,256,56.15,17.796,1,10.22,0.82,7.27\nlegacy_seresnet50,224,55.97,17.851,1,28.09,3.88,10.6\ndpn68b,288,55.95,17.857,1,12.61,3.89,17.3\ninception_next_tiny,224,55.95,17.858,1,28.06,4.19,11.98\nresnet50,288,55.91,17.869,1,25.56,6.8,18.37\nefficientnet_b3_pruned,300,55.81,17.904,1,9.86,1.04,11.86\ncait_xxs24_224,224,55.71,17.936,1,11.96,2.53,20.29\nhiera_small_224,224,55.7,17.938,1,35.01,6.42,20.75\nlambda_resnet50ts,256,55.57,17.981,1,21.54,5.07,17.48\ntf_efficientnet_lite4,380,55.5,18.002,1,13.01,4.04,45.66\nresnet50d,288,55.43,18.027,1,25.58,7.19,19.7\nhaloregnetz_b,224,55.38,18.043,1,11.68,1.97,11.94\nresnet50_mlp,256,55.38,18.042,1,26.65,7.05,16.25\nfbnetv3_g,240,55.16,18.115,1,16.62,1.28,14.87\nvit_base_patch32_clip_256,256,54.94,18.186,1,87.86,5.76,6.65\nseresnet33ts,288,54.78,18.24,1,19.78,6.02,14.76\nresnet50t,288,54.68,18.272,1,25.57,7.14,19.53\nresnet51q,288,54.6,18.301,1,35.7,8.07,20.94\nsehalonet33ts,256,54.54,18.322,1,13.69,3.55,14.7\ncaformer_s18,224,54.34,18.388,1,26.34,4.13,19.39\necaresnet50t,256,54.3,18.402,1,25.57,5.64,15.45\nregnetv_040,224,54.21,18.43,1,20.64,4.0,12.29\nseresnet50,224,54.16,18.45,1,28.09,4.11,11.13\nswin_s3_tiny_224,224,54.08,18.477,1,28.33,4.64,19.13\ntnt_s_patch16_224,224,53.96,18.516,1,23.76,5.24,24.37\nnf_regnet_b1,288,53.92,18.531,1,10.22,1.02,9.2\ntf_efficientnet_cc_b1_8e,240,53.91,18.536,1,39.72,0.75,15.44\nefficientnetv2_rw_t,224,53.9,18.539,1,13.65,1.93,9.94\nvit_srelpos_medium_patch16_224,224,53.78,18.578,1,38.74,7.96,16.21\ndeit3_medium_patch16_224,224,53.65,18.623,1,38.85,8.0,15.93\nvit_small_resnet26d_224,224,53.52,18.668,1,63.61,5.07,11.12\nnest_tiny,224,53.5,18.675,1,17.06,5.83,25.48\nxcit_small_12_p16_224,224,53.49,18.68,1,26.25,4.82,12.58\ncs3darknet_l,288,53.41,18.707,1,21.16,6.16,10.83\ncrossvit_15_dagger_240,240,53.29,18.75,1,28.21,6.13,20.43\nseresnet50t,224,53.29,18.749,1,28.1,4.32,11.83\nregnetz_b16_evos,288,53.13,18.805,1,9.74,2.36,16.43\nmobilenetv4_conv_large,320,52.96,18.867,1,32.59,4.47,18.97\npvt_v2_b2_li,224,52.85,18.908,1,22.55,3.91,27.6\nskresnet50,224,52.83,18.915,1,25.8,4.11,12.5\nmobilevitv2_200,256,52.49,19.037,1,18.45,7.22,32.15\nresnetaa50,288,52.3,19.104,1,25.56,8.52,19.24\nese_vovnet39b,288,52.13,19.169,1,24.57,11.71,11.13\nefficientnet_b3,288,51.98,19.224,1,12.23,1.63,21.49\nresnet50_clip,224,51.98,19.223,1,38.32,6.14,12.98\ntf_efficientnetv2_b3,300,51.94,19.237,1,14.36,3.04,15.74\nresnet61q,256,51.91,19.248,1,36.85,7.8,17.01\ncoatnet_bn_0_rw_224,224,51.84,19.274,1,27.44,4.67,22.04\ncrossvit_small_240,240,51.78,19.299,1,26.86,5.63,18.17\ncs3darknet_focus_l,288,51.73,19.315,1,21.15,5.9,10.16\nregnety_032,288,51.72,19.319,1,19.44,5.29,18.61\nskresnet50d,224,51.66,19.343,1,25.82,4.36,13.31\nresnetrs50,224,51.65,19.346,1,35.69,4.48,12.14\nnest_tiny_jx,224,51.64,19.349,1,17.06,5.83,25.48\nregnetz_c16,320,51.62,19.359,1,13.46,3.92,25.88\nmobilenetv4_hybrid_medium,448,51.57,19.376,1,11.07,4.2,29.64\nvovnet57a,224,51.53,19.39,1,36.64,8.95,7.52\nseresnetaa50d,224,51.35,19.459,1,28.11,5.4,12.46\ngcresnext50ts,288,51.14,19.538,1,15.67,4.75,19.57\nresnetblur50,288,51.1,19.555,1,25.56,8.52,19.87\nconvnext_pico,288,51.06,19.57,1,9.05,2.27,10.08\nregnetz_c16_evos,256,51.06,19.57,1,13.49,2.48,16.57\nres2net50_26w_4s,224,50.93,19.621,1,25.7,4.28,12.61\ndla102x,224,50.88,19.637,1,26.31,5.89,19.42\nnf_regnet_b2,240,50.85,19.653,1,14.31,0.97,7.23\nconvnextv2_nano,224,50.84,19.655,1,15.62,2.46,8.37\ntiny_vit_21m_224,224,50.8,19.67,1,33.22,4.29,20.08\nresnetaa50d,288,50.77,19.683,1,25.58,8.92,20.57\nnf_regnet_b2,272,50.75,19.691,1,14.31,1.22,9.27\nskresnet34,224,50.69,19.712,1,22.28,3.67,5.13\neva02_small_patch14_224,224,50.62,19.74,1,21.62,6.14,18.28\npvt_v2_b1,224,50.59,19.752,1,14.01,2.12,15.39\nmobilenetv4_hybrid_large_075,256,50.54,19.772,1,22.75,2.06,11.64\nvit_relpos_base_patch32_plus_rpn_256,256,50.44,19.809,1,119.42,7.68,8.01\nfbnetv3_g,288,50.35,19.845,1,16.62,1.77,21.09\ndla60_res2next,224,50.32,19.857,1,17.03,3.49,13.17\nconvnext_pico_ols,288,50.27,19.879,1,9.06,2.37,10.74\nsebotnet33ts_256,256,50.06,19.961,1,13.7,3.89,17.46\nres2net50d,224,50.05,19.963,1,25.72,4.52,13.41\nese_vovnet57b,224,49.97,19.996,1,38.61,8.95,7.52\necaresnetlight,288,49.96,20.0,1,30.16,6.79,13.91\nfastvit_mci0,256,49.92,20.017,1,11.41,2.42,18.29\nvit_relpos_medium_patch16_rpn_224,224,49.84,20.05,1,38.73,7.97,17.02\nregnetx_080,224,49.83,20.051,1,39.57,8.02,14.06\nmaxvit_rmlp_pico_rw_256,256,49.77,20.08,1,7.52,1.85,24.86\nseresnext50_32x4d,288,49.63,20.133,1,27.56,7.04,23.82\nresnest50d,224,49.59,20.15,1,27.48,5.4,14.36\ndla60_res2net,224,49.54,20.171,1,20.85,4.15,12.34\nskresnext50_32x4d,224,49.52,20.179,1,27.48,4.5,17.18\nregnety_040,288,49.44,20.21,1,20.65,6.61,20.3\nconvnext_tiny,288,49.43,20.215,1,28.59,7.39,22.21\nnextvit_small,224,49.39,20.232,1,31.76,5.81,18.44\nrepvit_m3,224,49.37,20.243,1,10.68,1.89,13.94\nvit_medium_patch16_gap_240,240,49.3,20.268,1,44.4,9.22,18.81\ncoatnet_nano_cc_224,224,49.22,20.303,1,13.76,2.24,15.02\nres2next50,224,49.07,20.363,1,24.67,4.2,13.71\nvit_base_patch32_plus_256,256,49.04,20.376,1,119.48,7.79,7.76\nmobilenetv4_conv_large,384,48.88,20.443,1,32.59,6.43,27.31\nresnetblur50d,288,48.87,20.447,1,25.58,8.92,21.19\necaresnet50d,288,48.86,20.451,1,25.58,7.19,19.72\nresnest50d_1s4x24d,224,48.8,20.474,1,25.68,4.43,13.57\npoolformerv2_s36,224,48.68,20.527,1,30.79,5.01,15.82\nvit_medium_patch16_clip_224,224,48.65,20.539,1,38.59,8.0,15.93\nvit_relpos_medium_patch16_224,224,48.6,20.561,1,38.75,7.97,17.02\nrexnetr_300,288,48.34,20.671,1,34.81,5.59,36.61\ncs3sedarknet_l,256,48.22,20.723,1,21.91,4.86,8.56\ngcvit_xxtiny,224,48.16,20.748,1,12.0,2.14,15.36\ndavit_tiny,224,48.09,20.778,1,28.36,4.54,18.89\nlamhalobotnet50ts_256,256,48.01,20.812,1,22.57,5.02,18.44\nconvnext_nano_ols,288,47.75,20.929,1,15.65,4.38,15.5\nconvnextv2_pico,288,47.57,21.008,1,9.07,2.27,10.08\ngc_efficientnetv2_rw_t,224,47.43,21.07,1,13.68,1.94,9.97\ncoatnet_0_rw_224,224,47.41,21.079,1,27.44,4.43,18.73\nhgnet_small,288,47.4,21.08,1,24.36,14.09,14.53\nregnety_040_sgn,224,47.27,21.142,1,20.65,4.03,12.29\ncoatnet_0_224,224,47.25,21.147,1,25.04,4.58,24.01\nefficientnet_b3,320,47.25,21.147,1,12.23,2.01,26.52\nregnetv_040,288,47.25,21.15,1,20.64,6.6,20.3\nswinv2_cr_tiny_ns_224,224,47.24,21.152,1,28.33,4.66,28.45\nresnest50d_4s2x40d,224,47.16,21.189,1,30.42,4.4,17.94\nmaxvit_nano_rw_256,256,47.1,21.214,1,15.45,4.46,30.28\nresnetv2_50,288,47.07,21.228,1,25.55,6.79,18.37\ncspresnet50d,256,46.79,21.355,1,21.64,4.86,12.55\nresnet61q,288,46.75,21.373,1,36.85,9.87,21.52\nswinv2_cr_tiny_224,224,46.75,21.377,1,28.33,4.66,28.45\ncoatnet_nano_rw_224,224,46.73,21.383,1,15.14,2.41,15.41\nregnety_080_tv,224,46.7,21.397,1,39.38,8.51,19.73\ncspresnet50,256,46.68,21.408,1,21.62,4.54,11.5\nregnety_080,224,46.66,21.415,1,39.18,8.0,17.97\ncrossvit_18_240,240,46.59,21.449,1,43.27,9.05,26.26\nefficientnetv2_rw_t,288,46.49,21.496,1,13.65,3.19,16.42\necaresnet50t,288,46.37,21.552,1,25.57,7.14,19.55\nresnet101,160,46.35,21.558,1,44.55,4.0,8.28\ntf_efficientnet_b3,300,46.18,21.641,1,12.23,1.87,23.83\nlegacy_xception,299,46.16,21.647,1,22.86,8.4,35.83\nmobilenetv4_conv_aa_large,384,46.14,21.66,1,32.59,7.07,32.29\nswin_tiny_patch4_window7_224,224,46.12,21.667,1,28.29,4.51,17.06\ncoatnet_rmlp_nano_rw_224,224,45.97,21.738,1,15.15,2.62,20.34\nvit_medium_patch16_gap_256,256,45.76,21.84,1,38.86,10.59,22.15\nseresnet50,288,45.63,21.898,1,28.09,6.8,18.39\nregnety_064,224,45.62,21.905,1,30.58,6.39,16.41\nfastvit_sa24,256,45.57,21.931,1,21.55,3.8,24.32\nmobileone_s0,224,45.48,21.974,1,5.29,1.09,15.48\nseresnet50t,288,45.48,21.971,1,28.1,7.14,19.55\nefficientvit_l1,224,45.46,21.98,1,52.65,5.27,15.85\ndla102,224,45.32,22.05,1,33.27,7.19,14.18\nmixer_b16_224,224,44.95,22.231,1,59.88,12.62,14.53\ntresnet_m,224,44.93,22.24,1,31.39,5.75,7.31\nconvnext_nano,288,44.91,22.252,1,15.59,4.06,13.84\nlevit_512d,224,44.82,22.297,1,92.5,5.85,11.3\ntf_mixnet_m,224,44.67,22.372,1,5.01,0.36,8.19\npoolformer_s36,224,44.56,22.427,1,30.86,5.0,15.82\nresnet101,176,44.51,22.453,1,44.55,4.92,10.08\nhiera_small_abswin_256,256,44.37,22.521,1,34.36,8.29,26.38\nefficientformerv2_s2,224,44.2,22.612,1,12.71,1.27,11.77\nlevit_512,224,44.15,22.633,1,95.17,5.64,10.22\necaresnet50t,320,44.14,22.639,1,25.57,8.82,24.13\ncoatnet_rmlp_0_rw_224,224,44.13,22.643,1,27.45,4.72,24.89\nregnetv_064,224,44.1,22.658,1,30.58,6.39,16.41\nnf_regnet_b3,288,44.02,22.703,1,18.59,1.67,11.84\nese_vovnet39b_evos,224,43.96,22.734,1,24.58,7.07,6.74\nregnetz_d8,256,43.94,22.743,1,23.37,3.97,23.74\nmixnet_m,224,43.88,22.776,1,5.01,0.36,8.19\nmaxxvitv2_nano_rw_256,256,43.77,22.829,1,23.7,6.26,23.05\nxcit_tiny_24_p16_224,224,43.51,22.97,1,12.12,2.34,11.82\ninception_v3,299,43.48,22.986,1,23.83,5.73,8.97\nmobilenetv4_conv_large,448,43.42,23.016,1,32.59,8.75,37.17\ncs3sedarknet_l,288,43.36,23.047,1,21.91,6.16,10.83\nmvitv2_tiny,224,43.15,23.162,1,24.17,4.7,21.16\nmobilevitv2_150,384,43.14,23.163,1,10.59,9.2,54.25\ngcresnet50t,256,43.13,23.17,1,25.9,5.42,14.67\nhalonet50ts,256,43.09,23.191,1,22.73,5.3,19.2\nconvnextv2_tiny,288,42.95,23.27,1,28.64,7.39,22.21\nhieradet_small,256,42.83,23.331,1,34.72,8.51,27.76\nresnext101_32x4d,224,42.71,23.4,1,44.18,8.01,21.23\ndarknetaa53,256,42.69,23.407,1,36.02,7.97,12.39\nregnetx_120,224,42.68,23.416,1,46.11,12.13,21.37\nxception41p,299,42.58,23.471,1,26.91,9.25,39.86\ncrossvit_18_dagger_240,240,42.54,23.491,1,44.27,9.5,27.03\ndensenetblur121d,224,42.38,23.584,1,8.0,3.11,7.9\ncs3sedarknet_xdw,256,42.26,23.645,1,21.6,5.97,17.18\nregnetz_c16_evos,320,41.93,23.833,1,13.49,3.86,25.88\nmaxxvit_rmlp_nano_rw_256,256,41.91,23.848,1,16.78,4.37,26.05\nvit_small_r26_s32_224,224,41.84,23.884,1,36.43,3.56,9.85\nnf_regnet_b3,320,41.83,23.893,1,18.59,2.05,14.61\ntf_mixnet_l,224,41.77,23.924,1,7.33,0.58,10.84\nxcit_nano_12_p8_384,384,41.57,24.039,1,3.05,6.34,46.08\ndensenet121,224,41.48,24.095,1,7.98,2.87,6.9\ngc_efficientnetv2_rw_t,288,41.42,24.127,1,13.68,3.2,16.45\ndpn92,224,41.37,24.158,1,37.67,6.54,18.21\ncoat_lite_small,224,41.08,24.326,1,19.84,3.96,22.09\nefficientnetv2_s,288,41.07,24.333,1,21.46,4.75,20.13\nmobilenetv4_conv_aa_large,448,40.96,24.396,1,32.59,9.63,43.94\nregnetz_040,256,40.92,24.423,1,27.12,4.06,24.19\necaresnet101d_pruned,224,40.84,24.468,1,24.88,3.48,7.69\nmaxvit_rmlp_nano_rw_256,256,40.78,24.507,1,15.5,4.47,31.92\nregnetz_d8_evos,256,40.78,24.507,1,23.46,4.5,24.92\nefficientnet_b0_gn,224,40.77,24.511,1,5.29,0.42,6.75\nbotnet50ts_256,256,40.7,24.554,1,22.74,5.54,22.23\nseresnetaa50d,288,40.59,24.622,1,28.11,8.92,20.59\nregnetz_040_h,256,40.56,24.64,1,28.94,4.12,24.29\nconvformer_s18,224,40.53,24.655,1,26.77,3.96,15.82\ncspresnet50w,256,40.48,24.69,1,28.12,5.04,12.19\nconvnext_tiny_hnf,224,40.44,24.711,1,28.59,4.47,13.44\nxception41,299,40.35,24.765,1,26.97,9.28,39.86\ngcresnet50t,288,40.34,24.775,1,25.9,6.86,18.57\nfocalnet_tiny_srf,224,40.29,24.806,1,28.43,4.42,16.32\ncoatnext_nano_rw_224,224,40.21,24.856,1,14.7,2.47,12.8\ngcvit_xtiny,224,40.09,24.927,1,19.98,2.93,20.26\nefficientnet_b3_gn,288,39.94,25.025,1,11.73,1.74,23.35\nmixnet_l,224,39.77,25.13,1,7.33,0.58,10.84\ntwins_pcpvt_base,224,39.74,25.15,1,43.83,6.68,25.25\ncs3darknet_x,256,39.65,25.205,1,35.05,8.38,11.35\ndarknetaa53,288,39.58,25.247,1,36.02,10.08,15.68\nresmlp_36_224,224,39.56,25.262,1,44.69,8.91,16.33\nefficientnet_b0_g8_gn,224,39.45,25.332,1,6.56,0.66,6.75\nres2net50_14w_8s,224,39.38,25.381,1,25.06,4.21,13.28\ncs3darknet_focus_x,256,39.3,25.428,1,35.02,8.03,10.69\nvit_base_resnet26d_224,224,39.28,25.441,1,101.4,6.97,13.16\ntf_efficientnetv2_s,300,39.24,25.472,1,21.46,5.35,22.73\nresnet50_gn,224,39.23,25.474,1,25.56,4.14,11.11\nlevit_conv_512,224,39.13,25.542,1,95.17,5.64,10.22\nhrnet_w18_small_v2,224,39.05,25.592,1,15.6,2.62,9.65\nhiera_base_224,224,38.8,25.755,1,51.52,9.4,30.42\nregnetz_d32,256,38.77,25.776,1,27.58,5.98,23.74\nrepvit_m1_5,224,38.77,25.781,1,14.64,2.31,15.7\necaresnet101d_pruned,288,38.73,25.803,1,24.88,5.75,12.71\nefficientnetv2_rw_s,288,38.61,25.887,1,23.94,4.91,21.41\neca_nfnet_l0,224,38.59,25.896,1,24.14,4.35,10.47\nregnety_040_sgn,288,38.56,25.92,1,20.65,6.67,20.3\nconvnext_tiny,384,38.48,25.972,1,28.59,13.14,39.48\ndarknet53,256,38.45,25.996,1,41.61,9.31,12.39\nresnet101c,224,38.43,26.009,1,44.57,8.08,17.04\nresnet101d,224,38.39,26.031,1,44.57,8.08,17.04\nlevit_conv_512d,224,38.29,26.101,1,92.5,5.85,11.3\nefficientnet_b4,320,38.25,26.128,1,19.34,3.13,34.76\nvit_medium_patch16_reg1_gap_256,256,38.21,26.156,1,38.88,10.63,22.26\ncait_xxs36_224,224,38.09,26.236,1,17.3,3.77,30.34\nresnetv2_101,224,37.98,26.313,1,44.54,7.83,16.23\nresnet101,224,37.96,26.325,1,44.55,7.83,16.23\nvit_medium_patch16_reg4_gap_256,256,37.91,26.362,1,38.88,10.76,22.6\nmobilenetv4_conv_aa_large,480,37.86,26.401,1,32.59,11.05,50.45\nvit_base_patch32_384,384,37.85,26.406,1,88.3,13.06,16.5\nresnetv2_50d_gn,224,37.84,26.412,1,25.57,4.38,11.92\ndla102x2,224,37.76,26.469,1,41.28,9.34,29.91\nnf_ecaresnet50,224,37.64,26.55,1,25.56,4.21,11.13\nresnetv2_101d,224,37.5,26.654,1,44.56,8.07,17.04\nregnety_080,288,37.29,26.802,1,39.18,13.22,29.69\nvit_betwixt_patch16_reg4_gap_256,256,37.28,26.811,1,60.4,16.52,28.24\nxcit_tiny_24_p16_384,384,37.27,26.819,1,12.12,6.87,34.29\nresnet101s,224,37.22,26.85,1,44.67,9.19,18.64\nresnet101_clip_gap,224,37.2,26.866,1,42.52,9.11,17.56\nvit_betwixt_patch16_reg1_gap_256,256,37.17,26.887,1,60.4,16.32,27.83\nregnety_120,224,37.07,26.958,1,51.82,12.14,21.38\nresnetaa101d,224,37.03,26.988,1,44.57,9.12,17.56\nvit_base_patch32_clip_384,384,36.96,27.038,1,88.3,13.06,16.5\nfocalnet_tiny_lrf,224,36.82,27.146,1,28.65,4.49,17.76\nregnety_064,288,36.7,27.234,1,30.58,10.56,27.11\nresnetblur101d,224,36.7,27.233,1,44.57,9.12,17.94\nswinv2_tiny_window8_256,256,36.67,27.255,1,28.35,5.96,24.57\nefficientvit_b3,256,36.65,27.272,1,48.65,5.2,35.01\nvit_small_resnet50d_s16_224,224,36.62,27.293,1,57.53,13.48,24.82\nconvnextv2_nano,288,36.54,27.35,1,15.62,4.06,13.84\nhgnetv2_b5,224,36.31,27.528,1,39.57,6.56,11.19\nnfnet_l0,224,36.3,27.529,1,35.07,4.36,10.47\nefficientnet_b3_g8_gn,288,36.2,27.606,1,14.25,2.59,23.35\nres2net50_26w_6s,224,36.18,27.623,1,37.05,6.33,15.28\nefficientvit_b3,224,36.11,27.679,1,48.65,3.99,26.9\nresnet50_gn,288,36.07,27.705,1,25.56,6.85,18.37\nmobilevitv2_175,384,36.06,27.718,1,14.25,12.47,63.29\nresnetv2_50d_evos,224,36.01,27.757,1,25.59,4.33,11.92\nvit_medium_patch16_rope_reg1_gap_256,256,35.85,27.875,1,38.74,10.63,22.26\nselecsls60,224,35.65,28.033,1,30.67,3.59,5.52\ndarknet53,288,35.59,28.085,1,41.61,11.78,15.68\nvit_betwixt_patch16_rope_reg4_gap_256,256,35.51,28.145,1,60.23,16.52,28.24\nnf_seresnet50,224,35.24,28.358,1,28.09,4.21,11.13\nresnet101d,256,35.24,28.36,1,44.57,10.55,22.25\nregnetx_160,224,35.17,28.416,1,54.28,15.99,25.52\nefficientnet_b3_gn,320,35.01,28.544,1,11.73,2.14,28.83\nvit_base_patch16_xp_224,224,34.85,28.679,1,86.51,17.56,23.9\ncs3darknet_x,288,34.83,28.698,1,35.05,10.6,14.36\nefficientvit_l2,224,34.79,28.728,1,63.71,6.97,19.58\nresnext101_32x4d,288,34.76,28.749,1,44.18,13.24,35.09\nefficientvit_l2,256,34.74,28.768,1,63.71,9.09,25.49\nconvnext_small,224,34.73,28.78,1,50.22,8.71,21.56\necaresnet101d,224,34.73,28.782,1,44.57,8.08,17.07\nnextvit_base,224,34.7,28.806,1,44.82,8.29,23.71\ndensenet121,288,34.67,28.829,1,7.98,4.74,11.41\nregnetv_064,288,34.58,28.905,1,30.58,10.55,27.11\nseresnext101_32x4d,224,34.52,28.956,1,48.96,8.02,21.26\nnf_resnet50,256,34.49,28.98,1,25.56,5.46,14.52\nresnetrs101,192,34.47,28.999,1,63.62,6.04,12.7\nlegacy_seresnext101_32x4d,224,34.44,29.022,1,48.96,8.02,21.26\nmaxvit_tiny_rw_224,224,34.35,29.1,1,29.06,5.11,33.11\nrepvgg_b1g4,224,34.32,29.124,1,39.97,8.15,10.64\nvit_base_patch16_gap_224,224,34.16,29.258,1,86.57,17.49,25.59\nefficientvit_b3,288,34.1,29.309,1,48.65,6.58,44.2\nhalo2botnet50ts_256,256,34.05,29.351,1,22.64,5.02,21.78\nselecsls42,224,33.89,29.496,1,30.35,2.94,4.62\neca_nfnet_l0,288,33.8,29.569,1,24.14,7.12,17.29\nvit_base_patch16_siglip_gap_224,224,33.75,29.618,1,85.8,17.49,23.75\nresnet101_clip,224,33.7,29.656,1,56.26,9.81,18.08\nresnet152,160,33.49,29.847,1,60.19,5.9,11.51\ndeit3_small_patch16_384,384,33.42,29.906,1,22.21,15.52,50.78\nmobilenetv4_hybrid_large,384,33.38,29.94,1,37.76,7.77,34.52\nregnetz_040_h,320,33.3,30.017,1,28.94,6.43,37.94\nxcit_tiny_24_p8_224,224,33.26,30.048,1,12.11,9.21,45.39\ndensenetblur121d,288,33.25,30.063,1,8.0,5.14,13.06\ndensenet169,224,33.23,30.075,1,14.15,3.4,7.3\nresnet101,288,33.13,30.169,1,44.55,12.95,26.83\ntwins_svt_base,224,33.13,30.173,1,56.07,8.59,26.33\ndeit3_base_patch16_224,224,33.11,30.189,1,86.59,17.58,23.9\nlegacy_seresnet101,224,33.08,30.217,1,49.33,7.61,15.74\nregnetz_040,320,32.88,30.4,1,27.12,6.35,37.78\nresnetv2_50x1_bit,224,32.87,30.406,1,25.55,4.23,11.11\nvit_base_resnet50d_224,224,32.87,30.408,1,110.97,8.73,16.92\nvit_base_patch16_rpn_224,224,32.84,30.438,1,86.54,17.49,23.75\npoolformerv2_m36,224,32.8,30.47,1,56.08,8.81,22.02\ntf_efficientnetv2_s,384,32.72,30.552,1,21.46,8.44,35.77\nmobilenetv4_conv_aa_large,544,32.67,30.598,1,32.59,14.19,64.79\nvit_base_patch16_siglip_224,224,32.65,30.616,1,92.88,17.73,24.06\nmixnet_xl,224,32.6,30.656,1,11.9,0.93,14.57\nselecsls60b,224,32.6,30.659,1,32.77,3.63,5.52\nmvitv2_small,224,32.58,30.68,1,34.87,7.0,28.08\nvit_small_patch16_384,384,32.55,30.711,1,22.2,15.52,50.78\ncaformer_s36,224,32.53,30.726,1,39.3,8.0,37.53\ntnt_b_patch16_224,224,32.53,30.728,1,65.41,14.09,39.01\nvitamin_small_224,224,32.51,30.744,1,22.03,5.92,26.38\nnf_resnet50,288,32.5,30.752,1,25.56,6.88,18.37\nhgnetv2_b5,288,32.49,30.76,1,39.57,10.84,18.5\ndeit_base_distilled_patch16_224,224,32.46,30.788,1,87.34,17.68,24.05\nvit_base_patch16_clip_quickgelu_224,224,32.46,30.79,1,86.19,17.58,23.9\nvit_base_patch16_224,224,32.37,30.879,1,86.57,17.58,23.9\nconvnextv2_tiny,384,32.27,30.975,1,28.64,13.14,39.48\ncs3sedarknet_x,256,32.26,30.981,1,35.4,8.38,11.35\nhiera_base_plus_224,224,32.24,31.003,1,69.9,12.67,37.98\nseresnet101,224,32.23,31.014,1,49.33,7.84,16.27\npoolformer_m36,224,32.19,31.046,1,56.17,8.8,22.02\nnest_small,224,32.07,31.17,1,38.35,10.35,40.04\nvit_base_patch16_clip_224,224,32.07,31.165,1,86.57,17.58,23.9\nconvnextv2_small,224,32.06,31.176,1,50.32,8.71,21.56\nefficientnetv2_s,384,32.05,31.19,1,21.46,8.44,35.77\nbeitv2_base_patch16_224,224,31.94,31.292,1,86.53,17.58,23.9\npit_b_224,224,31.91,31.326,1,73.76,12.42,32.94\nresnet152,176,31.85,31.385,1,60.19,7.22,13.99\ndeit_base_patch16_224,224,31.84,31.396,1,86.57,17.58,23.9\nxcit_small_12_p16_384,384,31.81,31.426,1,26.25,14.14,36.51\nvit_base_patch16_224_miil,224,31.79,31.443,1,94.4,17.59,23.91\nefficientvit_l2,288,31.76,31.475,1,63.71,11.51,32.19\ncspdarknet53,256,31.74,31.496,1,27.64,6.57,16.81\nfastvit_sa36,256,31.72,31.513,1,31.53,5.64,34.61\nefficientformerv2_l,224,31.66,31.575,1,26.32,2.59,18.54\ncs3edgenet_x,256,31.65,31.583,1,47.82,11.53,12.92\nvit_base_mci_224,224,31.6,31.632,1,86.35,17.73,24.65\nefficientnet_b4,384,31.56,31.668,1,19.34,4.51,50.04\nnest_small_jx,224,31.45,31.779,1,38.35,10.35,40.04\nvit_base_patch32_clip_448,448,31.42,31.814,1,88.34,17.93,23.9\nbeit_base_patch16_224,224,31.4,31.831,1,86.53,17.58,23.9\nnf_regnet_b4,320,31.39,31.841,1,30.21,3.29,19.88\nefficientnet_b3_g8_gn,320,31.38,31.853,1,14.25,3.2,28.83\ndpn98,224,31.36,31.872,1,61.57,11.73,25.2\nnfnet_l0,288,31.34,31.893,1,35.07,7.13,17.29\nselecsls42b,224,31.3,31.933,1,32.46,2.98,4.62\nvit_relpos_base_patch16_224,224,31.26,31.976,1,86.43,17.51,24.97\nefficientformer_l7,224,31.23,32.009,1,82.23,10.17,24.45\nvit_relpos_base_patch16_rpn_224,224,31.19,32.049,1,86.41,17.51,24.97\nefficientnetv2_rw_s,384,31.17,32.062,1,23.94,8.72,38.03\nregnetz_d8,320,31.14,32.1,1,23.37,6.19,37.08\nresnetaa101d,288,31.04,32.202,1,44.57,15.07,29.03\nrepvit_m2_3,224,30.84,32.413,1,23.69,4.57,26.21\nhiera_base_abswin_256,256,30.8,32.449,1,51.27,12.46,40.7\npit_b_distilled_224,224,30.65,32.611,1,74.79,12.5,33.07\nregnetz_d8_evos,320,30.64,32.621,1,23.46,7.03,38.92\nresnetv2_50d_gn,288,30.59,32.672,1,25.57,7.24,19.7\ntf_efficientnet_b4,380,30.51,32.765,1,19.34,4.49,49.49\ninception_next_small,224,30.42,32.862,1,49.37,8.36,19.27\npvt_v2_b2,224,30.36,32.925,1,25.36,4.05,27.53\neva02_small_patch14_336,336,30.29,33.0,1,22.13,15.48,54.33\nresnetblur101d,288,30.04,33.271,1,44.57,15.07,29.65\nvit_base_r26_s32_224,224,30.0,33.321,1,101.38,6.81,12.36\nresnext101_32x8d,176,29.83,33.513,1,88.79,10.33,19.37\nrepvgg_b1,224,29.67,33.688,1,57.42,13.16,10.64\nresnetv2_50d_evos,288,29.65,33.713,1,25.59,7.15,19.7\ntresnet_v2_l,224,29.6,33.773,1,46.17,8.85,16.34\ngcvit_tiny,224,29.49,33.891,1,28.22,4.79,29.82\nresnet101d,320,29.39,34.011,1,44.57,16.48,34.77\nflexivit_base,240,29.33,34.08,1,86.59,20.29,28.36\nregnetz_d32,320,29.32,34.088,1,27.58,9.33,37.08\nresnetv2_101,288,29.31,34.106,1,44.54,12.94,26.83\nmobilenetv4_hybrid_large,448,29.27,34.151,1,37.76,10.74,48.61\necaresnet101d,288,29.15,34.285,1,44.57,13.35,28.19\nsamvit_base_patch16_224,224,29.14,34.296,1,86.46,17.54,24.54\nres2net50_26w_8s,224,29.07,34.384,1,48.4,8.37,17.95\nseresnext101_32x4d,288,29.02,34.449,1,48.96,13.25,35.12\nresnet152c,224,29.01,34.452,1,60.21,11.8,23.36\ncoatnet_1_rw_224,224,28.9,34.585,1,41.72,8.04,34.6\nfastvit_ma36,256,28.73,34.79,1,44.07,7.88,41.09\nres2net101_26w_4s,224,28.72,34.8,1,45.21,8.1,18.45\nxcit_small_24_p16_224,224,28.72,34.804,1,47.67,9.1,23.64\ntresnet_l,224,28.63,34.917,1,55.99,10.9,11.9\nfastvit_mci1,256,28.61,34.943,1,21.54,4.72,32.84\nnf_regnet_b4,384,28.57,34.992,1,30.21,4.7,28.61\nmvitv2_small_cls,224,28.51,35.066,1,34.87,7.04,28.17\ndla169,224,28.48,35.101,1,53.39,11.6,20.2\nese_vovnet99b,224,28.47,35.103,1,63.2,16.51,11.27\ncs3sedarknet_x,288,28.29,35.337,1,35.4,10.6,14.37\nxception65p,299,28.23,35.414,1,39.82,13.91,52.48\ncs3edgenet_x,288,28.14,35.519,1,47.82,14.59,16.36\nres2net101d,224,28.14,35.527,1,45.23,8.35,19.25\nxception65,299,28.1,35.573,1,39.92,13.96,52.48\ntwins_pcpvt_large,224,28.07,35.609,1,60.99,9.84,35.82\nresnet152,224,27.96,35.749,1,60.19,11.56,22.56\nvit_base_patch16_siglip_gap_256,256,27.94,35.78,1,85.84,23.13,33.23\nregnety_120,288,27.92,35.806,1,51.82,20.06,35.34\nvolo_d2_224,224,27.78,35.98,1,58.68,14.34,41.34\ncait_s24_224,224,27.64,36.167,1,46.92,9.35,40.58\nregnetz_e8,256,27.54,36.301,1,57.7,9.91,40.94\ncoatnet_rmlp_1_rw_224,224,27.53,36.312,1,41.69,7.85,35.47\nresnet152d,224,27.31,36.603,1,60.21,11.8,23.36\nresnetv2_152,224,27.25,36.685,1,60.19,11.55,22.56\ncs3se_edgenet_x,256,27.22,36.716,1,50.72,11.53,12.94\nregnety_160,224,27.12,36.857,1,83.59,15.96,23.04\nresnet152d,256,27.11,36.874,1,60.21,15.41,30.51\nseresnet101,288,27.02,36.999,1,49.33,12.95,26.87\nresnet152s,224,26.88,37.185,1,60.32,12.92,24.96\nvit_small_patch16_36x1_224,224,26.68,37.464,1,64.67,13.71,35.69\nresnetv2_152d,224,26.66,37.494,1,60.2,11.8,23.36\nresnest101e,256,26.65,37.508,1,48.28,13.38,28.66\nvit_base_patch16_siglip_256,256,26.57,37.619,1,92.93,23.44,33.63\ncoatnet_rmlp_1_rw2_224,224,26.54,37.661,1,41.72,8.11,40.13\nresnext101_64x4d,224,26.53,37.681,1,83.46,15.52,31.21\ndavit_small,224,26.46,37.776,1,49.75,8.8,30.49\neva02_base_patch16_clip_224,224,26.43,37.819,1,86.26,17.62,26.32\nconvnext_tiny_hnf,288,26.37,37.908,1,28.59,7.39,22.21\nvit_small_patch16_18x2_224,224,26.34,37.946,1,64.67,13.71,35.69\nxcit_tiny_12_p8_384,384,26.34,37.947,1,6.71,14.13,69.14\nswinv2_cr_small_224,224,26.28,38.042,1,49.7,9.07,50.27\nlevit_384_s8,224,26.26,38.061,1,39.12,9.98,35.86\nconvmixer_768_32,224,26.25,38.075,1,21.11,19.55,25.95\nnextvit_large,224,26.22,38.123,1,57.87,10.78,28.99\ncoatnet_1_224,224,26.15,38.229,1,42.23,8.7,39.0\nfastvit_mci2,256,26.12,38.263,1,35.82,7.91,43.34\ntresnet_m,448,26.11,38.285,1,31.39,22.99,29.21\nresnetrs101,288,25.92,38.57,1,63.62,13.56,28.53\ngmlp_b16_224,224,25.89,38.602,1,73.08,15.78,30.21\nvit_base_patch16_rope_reg1_gap_256,256,25.73,38.851,1,86.43,23.22,33.39\nvit_small_r26_s32_384,384,25.73,38.852,1,36.47,10.43,29.85\nmaxxvit_rmlp_tiny_rw_256,256,25.67,38.943,1,29.64,6.66,39.76\npoolformerv2_m48,224,25.65,38.971,1,73.35,11.59,29.17\nswinv2_tiny_window16_256,256,25.64,38.993,1,28.35,6.68,39.02\nmixnet_xxl,224,25.63,38.998,1,23.96,2.04,23.43\nvit_base_patch16_reg4_gap_256,256,25.52,39.177,1,86.62,23.5,33.89\ndensenet201,224,25.51,39.186,1,20.01,4.34,7.85\ninception_v4,299,25.47,39.242,1,42.68,12.28,15.09\nswinv2_cr_small_ns_224,224,25.44,39.297,1,49.7,9.08,50.27\nxception71,299,25.35,39.425,1,42.34,18.09,69.92\nmaxvit_tiny_tf_224,224,25.19,39.689,1,30.92,5.6,35.78\nlevit_conv_384_s8,224,25.15,39.74,1,39.12,9.98,35.86\nresnext101_32x8d,224,25.15,39.747,1,88.79,16.48,31.21\nnextvit_small,384,25.13,39.772,1,31.76,17.26,57.14\nfocalnet_small_srf,224,25.12,39.79,1,49.89,8.62,26.26\npoolformer_m48,224,25.05,39.907,1,73.47,11.59,29.17\nxcit_small_12_p8_224,224,24.99,40.001,1,26.21,18.69,47.21\nwide_resnet50_2,176,24.68,40.508,1,68.88,7.29,8.97\nrdnet_tiny,224,24.6,40.632,1,23.86,5.06,15.98\nswin_small_patch4_window7_224,224,24.27,41.189,1,49.61,8.77,27.47\nconvnext_small,288,24.23,41.261,1,50.22,14.39,35.65\ngcvit_small,224,24.2,41.299,1,51.09,8.57,41.61\nconvnext_base,224,24.1,41.474,1,88.59,15.38,28.75\nconvit_base,224,24.07,41.536,1,86.54,17.52,31.77\nhrnet_w18,224,24.04,41.579,1,21.3,4.32,16.31\nhrnet_w18_ssld,224,24.02,41.615,1,21.3,4.32,16.31\ndensenet161,224,23.86,41.894,1,28.68,7.79,11.06\ncrossvit_base_240,240,23.75,42.099,1,105.03,21.22,36.33\nhgnet_base,224,23.7,42.176,1,71.58,25.14,15.47\ncoat_tiny,224,23.67,42.24,1,5.5,4.35,27.2\nresnet200,224,23.64,42.283,1,64.67,15.07,32.19\nresnet50x4_clip_gap,288,23.59,42.37,1,65.62,19.57,34.11\nefficientvit_l2,384,23.51,42.516,1,63.71,20.45,57.01\nmobilevitv2_200,384,23.51,42.518,1,18.45,16.24,72.34\ndpn131,224,23.43,42.662,1,79.25,16.09,32.97\nresnet152,288,23.31,42.887,1,60.19,19.11,37.28\nrepvgg_b2g4,224,23.16,43.162,1,61.76,12.63,12.9\nlegacy_seresnet152,224,23.14,43.193,1,66.82,11.33,22.08\nconvnextv2_nano,384,23.13,43.219,1,15.62,7.22,24.61\nseresnext101_64x4d,224,23.11,43.257,1,88.23,15.53,31.25\ninception_next_base,224,23.05,43.362,1,86.67,14.85,25.69\ntresnet_xl,224,23.05,43.371,1,78.44,15.2,15.34\nvit_mediumd_patch16_reg4_gap_256,256,23.05,43.371,1,64.11,17.87,37.57\npvt_v2_b3,224,22.89,43.665,1,45.24,6.92,37.7\nefficientnetv2_m,320,22.82,43.798,1,54.14,11.01,39.97\nmixer_l32_224,224,22.8,43.842,1,206.94,11.27,19.86\nmvitv2_base,224,22.79,43.856,1,51.47,10.16,40.5\nseresnet152,224,22.72,43.995,1,66.82,11.57,22.61\nvit_mediumd_patch16_rope_reg1_gap_256,256,22.72,43.992,1,63.95,17.65,37.02\nrdnet_small,224,22.58,44.268,1,50.44,8.74,22.55\ncoat_lite_medium,224,22.54,44.36,1,44.57,9.81,40.06\nselecsls84,224,22.48,44.46,1,50.95,5.9,7.57\nmaxvit_rmlp_small_rw_224,224,22.47,44.485,1,64.9,10.75,49.3\nvit_small_patch8_224,224,22.43,44.574,1,21.67,22.44,80.84\nnest_base,224,22.41,44.608,1,67.72,17.96,53.39\nresnet50x4_clip,288,22.35,44.73,1,87.14,21.35,35.27\nhgnetv2_b6,224,22.21,45.009,1,75.26,16.88,21.23\nseresnext101d_32x8d,224,22.21,45.016,1,93.59,16.72,32.05\ndm_nfnet_f0,192,22.11,45.211,1,71.49,7.21,10.16\nconvformer_s18,384,22.05,45.339,1,26.77,11.63,46.49\nseresnext101_32x8d,224,21.96,45.513,1,93.57,16.48,31.25\nnest_base_jx,224,21.93,45.587,1,67.72,17.96,53.39\nefficientnetv2_rw_m,320,21.92,45.609,1,53.24,12.72,47.14\nvolo_d1_384,384,21.86,45.726,1,26.78,22.75,108.55\nnfnet_f0,192,21.82,45.824,1,71.49,7.21,10.16\nconvnextv2_base,224,21.75,45.952,1,88.72,15.38,28.75\nseresnextaa101d_32x8d,224,21.72,46.028,1,93.59,17.25,34.16\ncrossvit_15_dagger_408,408,21.61,46.27,1,28.5,21.45,95.05\ncs3se_edgenet_x,320,21.55,46.387,1,50.72,18.01,20.21\nconvformer_s36,224,21.53,46.433,1,40.01,7.67,30.5\nresnet200d,256,21.53,46.442,1,64.69,20.0,43.09\neva02_base_patch14_224,224,21.38,46.75,1,85.76,23.22,36.55\nwide_resnet50_2,224,21.37,46.783,1,68.88,11.43,14.4\nnextvit_base,384,21.3,46.935,1,44.82,24.64,73.95\nvit_medium_patch16_gap_384,384,21.14,47.294,1,39.03,26.08,67.54\ninception_resnet_v2,299,21.05,47.499,1,55.84,13.18,25.06\nresnet152d,320,21.02,47.565,1,60.21,24.08,47.67\ncoat_mini,224,20.92,47.779,1,10.34,6.82,33.68\nmaxvit_small_tf_224,224,20.92,47.786,1,68.93,11.66,53.17\ndpn107,224,20.79,48.078,1,86.92,18.38,33.46\nseresnet152d,256,20.73,48.221,1,66.84,15.42,30.56\nnf_resnet101,224,20.65,48.421,1,44.55,8.01,16.23\nresnext101_64x4d,288,20.65,48.413,1,83.46,25.66,51.59\nefficientnet_b5,416,20.64,48.433,1,30.39,8.27,80.68\nregnety_160,288,20.58,48.586,1,83.59,26.37,38.07\nswin_s3_base_224,224,20.58,48.567,1,71.13,13.69,48.26\ntwins_svt_large,224,20.54,48.661,1,99.27,15.15,35.1\nresnetrs152,256,20.5,48.776,1,86.62,15.59,30.83\nconvnext_small,384,20.38,49.064,1,50.22,25.58,63.37\nmaxvit_rmlp_small_rw_256,256,20.27,49.32,1,64.9,14.15,66.09\ncaformer_m36,224,20.18,49.536,1,56.2,13.29,50.48\nconvnext_base,256,20.18,49.55,1,88.59,20.09,37.55\nvit_base_patch16_plus_240,240,20.07,49.813,1,117.56,27.41,33.08\neca_nfnet_l1,256,20.0,49.994,1,41.41,9.62,22.04\nswin_s3_small_224,224,19.89,50.262,1,49.74,9.43,37.84\nnf_ecaresnet101,224,19.82,50.439,1,44.55,8.01,16.27\ncaformer_s18,384,19.77,50.571,1,26.34,13.42,77.34\nfocalnet_small_lrf,224,19.76,50.604,1,50.34,8.74,28.61\nmaxxvit_rmlp_small_rw_256,256,19.75,50.614,1,66.01,14.67,58.38\nxcit_medium_24_p16_224,224,19.72,50.699,1,84.4,16.13,31.71\nswinv2_cr_small_ns_256,256,19.71,50.722,1,49.7,12.07,76.21\nmvitv2_base_cls,224,19.64,50.895,1,65.44,10.23,40.65\nhrnet_w30,224,19.56,51.098,1,37.71,8.15,21.21\ncoatnet_2_rw_224,224,19.47,51.344,1,73.87,15.09,49.22\nhrnet_w18_ssld,288,19.41,51.495,1,21.3,7.14,26.96\ntf_efficientnetv2_m,384,19.31,51.774,1,54.14,15.85,57.52\nregnetz_e8,320,19.3,51.793,1,57.7,15.46,63.94\nnf_regnet_b5,384,19.23,51.987,1,49.74,7.95,42.9\nswinv2_base_window12_192,192,19.21,52.03,1,109.28,11.9,39.72\nnf_seresnet101,224,19.19,52.086,1,49.33,8.02,16.27\nregnetx_320,224,19.18,52.125,1,107.81,31.81,36.3\nnfnet_f0,256,19.11,52.324,1,71.49,12.62,18.05\nseresnet152,288,19.11,52.306,1,66.82,19.11,37.34\ndavit_base,224,19.02,52.571,1,87.95,15.51,40.66\nswinv2_small_window8_256,256,18.89,52.909,1,49.73,11.58,40.14\ndm_nfnet_f0,256,18.8,53.19,1,71.49,12.62,18.05\nefficientnet_b5,448,18.77,53.255,1,30.39,9.59,93.56\ncoatnet_rmlp_2_rw_224,224,18.74,53.333,1,73.88,15.18,54.78\nvolo_d3_224,224,18.72,53.399,1,86.33,20.78,60.09\nresnet200,288,18.7,53.473,1,64.67,24.91,53.21\nwide_resnet50_2,288,18.69,53.49,1,68.88,18.89,23.81\nvit_base_r50_s16_224,224,18.49,54.07,1,97.89,21.66,35.28\ncoatnet_2_224,224,18.28,54.691,1,74.68,16.5,52.67\nhrnet_w32,224,18.26,54.761,1,41.23,8.97,22.02\nswinv2_cr_base_ns_224,224,18.25,54.78,1,87.88,15.86,59.66\nvit_relpos_base_patch16_plus_240,240,18.21,54.9,1,117.38,27.3,34.33\nswinv2_cr_base_224,224,18.2,54.931,1,87.88,15.86,59.66\nfocalnet_base_srf,224,18.18,54.982,1,88.15,15.28,35.01\necaresnet200d,256,18.17,55.035,1,64.69,20.0,43.15\nrepvgg_b2,224,18.08,55.29,1,89.02,20.45,12.9\ntiny_vit_21m_384,384,17.98,55.611,1,21.23,13.77,77.83\nefficientnetv2_m,416,17.86,55.989,1,54.14,18.6,67.5\nhrnet_w40,224,17.79,56.196,1,57.56,12.75,25.29\ncoat_small,224,17.62,56.75,1,21.69,12.61,44.25\nvit_large_patch32_224,224,17.61,56.779,1,305.51,15.39,13.3\nhgnetv2_b6,288,17.56,56.947,1,75.26,27.9,35.09\nseresnext101_32x8d,288,17.5,57.125,1,93.57,27.24,51.63\ntf_efficientnet_b5,456,17.4,57.469,1,30.39,10.46,98.86\nseresnet152d,320,17.32,57.723,1,66.84,24.09,47.72\nvitamin_base_224,224,17.29,57.813,1,87.72,22.68,52.77\nconvformer_m36,224,17.26,57.916,1,57.05,12.89,42.05\nhgnet_base,288,17.24,58.0,1,71.58,41.55,25.57\nseresnext101d_32x8d,288,17.24,57.996,1,93.59,27.64,52.95\nswin_base_patch4_window7_224,224,17.24,57.978,1,87.77,15.47,36.63\nresnet200d,320,17.21,58.078,1,64.69,31.25,67.33\nhrnet_w44,224,17.11,58.441,1,67.06,14.94,26.92\nfocalnet_base_lrf,224,17.04,58.67,1,88.75,15.43,38.13\nresnetrs200,256,16.99,58.853,1,93.21,20.18,43.42\neca_nfnet_l1,320,16.96,58.934,1,41.41,14.92,34.42\nhrnet_w48,224,16.96,58.95,1,77.47,17.34,28.56\nxcit_small_24_p16_384,384,16.95,58.995,1,47.67,26.72,68.58\nresnetv2_101x1_bit,224,16.93,59.048,1,44.54,8.04,16.23\nresnetrs152,320,16.89,59.196,1,86.62,24.34,48.14\nefficientnetv2_rw_m,416,16.83,59.388,1,53.24,21.49,79.62\nnf_regnet_b5,456,16.76,59.666,1,49.74,11.7,61.95\nseresnextaa101d_32x8d,288,16.71,59.837,1,93.59,28.51,56.44\nrdnet_base,224,16.64,60.076,1,87.45,15.4,31.14\nrepvgg_b3g4,224,16.63,60.126,1,83.83,17.89,15.1\ncaformer_b36,224,16.58,60.287,1,98.75,23.22,67.3\npvt_v2_b5,224,16.49,60.633,1,81.96,11.76,50.92\ncait_xxs24_384,384,16.46,60.75,1,12.03,9.63,122.66\nlevit_512_s8,224,16.29,61.358,1,74.05,21.82,52.28\nseresnet200d,256,16.26,61.47,1,71.86,20.01,43.15\nwide_resnet101_2,176,16.11,62.057,1,126.89,14.31,13.18\necaresnet200d,288,16.1,62.085,1,64.69,25.31,54.59\ncrossvit_18_dagger_408,408,16.07,62.198,1,44.61,32.47,124.87\nhrnet_w48_ssld,224,15.98,62.552,1,77.47,17.34,28.56\nlevit_conv_512_s8,224,15.95,62.68,1,74.05,21.82,52.28\npvt_v2_b4,224,15.87,63.01,1,62.56,10.14,53.74\nsequencer2d_s,224,15.87,63.013,1,27.65,4.96,11.31\nswinv2_small_window16_256,256,15.65,63.865,1,49.73,12.82,66.29\ntf_efficientnetv2_m,480,15.22,65.703,1,54.14,24.76,89.84\nvit_so150m_patch16_reg4_gap_256,256,15.19,65.83,1,134.13,36.75,53.21\nxcit_tiny_24_p8_384,384,15.06,66.387,1,12.11,27.05,132.95\ngcvit_base,224,15.03,66.505,1,90.32,14.87,55.48\nnextvit_large,384,14.94,66.911,1,57.87,32.03,90.76\nresnetv2_50x1_bit,448,14.94,66.918,1,25.55,16.62,44.46\nconvnext_base,288,14.89,67.143,1,88.59,25.43,47.53\nconvnext_large,224,14.84,67.358,1,197.77,34.4,43.13\nvit_so150m_patch16_reg4_map_256,256,14.81,67.502,1,141.48,37.18,53.68\ntresnet_l,448,14.68,68.089,1,55.99,43.59,47.56\nswinv2_base_window8_256,256,14.67,68.146,1,87.92,20.37,52.59\nseresnet200d,288,14.54,68.758,1,71.86,25.32,54.6\nwide_resnet101_2,224,14.36,69.613,1,126.89,22.8,21.23\nregnety_160,384,14.21,70.348,1,83.59,46.87,67.67\nresnetrs200,320,13.94,71.74,1,93.21,31.51,67.81\nxcit_small_24_p8_224,224,13.94,71.707,1,47.63,35.81,90.78\nseresnextaa101d_32x8d,320,13.92,71.842,1,93.59,35.19,69.67\nhalonet_h1,256,13.84,72.256,1,8.1,3.0,51.17\nmaxxvitv2_rmlp_base_rw_224,224,13.8,72.456,1,116.09,24.2,62.77\nconvnextv2_large,224,13.73,72.793,1,197.96,34.4,43.13\nconvmixer_1536_20,224,13.52,73.944,1,51.63,48.68,33.03\nconvnextv2_base,288,13.48,74.189,1,88.72,25.43,47.53\nxcit_large_24_p16_224,224,13.4,74.599,1,189.1,35.86,47.27\nregnety_320,224,13.37,74.754,1,145.05,32.34,30.26\nconvformer_s36,384,13.27,75.368,1,40.01,22.54,89.62\nhrnet_w48_ssld,288,13.15,76.002,1,77.47,28.66,47.21\nvit_mediumd_patch16_reg4_gap_384,384,13.02,76.816,1,64.27,43.67,113.51\ndensenet264d,224,12.85,77.785,1,72.74,13.57,14.0\nconvformer_b36,224,12.72,78.579,1,99.88,22.69,56.06\nresnetrs270,256,12.68,78.863,1,129.86,27.06,55.84\nswinv2_large_window12_192,192,12.58,79.45,1,228.77,26.17,56.53\nsenet154,224,12.54,79.75,1,115.09,20.77,38.69\nconvnext_large_mlp,256,12.5,80.005,1,200.13,44.94,56.33\nlegacy_senet154,224,12.46,80.273,1,115.09,20.77,38.69\nsequencer2d_m,224,12.45,80.32,1,38.31,6.55,14.26\ncaformer_s36,384,12.33,81.111,1,39.3,26.08,150.33\nrepvgg_b3,224,12.3,81.31,1,123.09,29.16,15.1\nconvnext_base,320,12.28,81.433,1,88.59,31.39,58.68\nvit_betwixt_patch16_reg4_gap_384,384,12.26,81.549,1,60.6,39.71,85.28\nresnest200e,320,12.23,81.771,1,70.2,35.69,82.78\nvit_large_r50_s32_224,224,12.2,81.984,1,328.99,19.58,24.41\ndavit_large,224,12.17,82.125,1,196.81,34.6,60.99\neca_nfnet_l2,320,12.15,82.312,1,56.72,20.95,47.43\nhiera_large_224,224,12.15,82.309,1,213.74,40.34,83.37\ninception_next_base,384,12.09,82.668,1,86.67,43.64,75.48\nefficientvit_l3,224,12.04,83.006,1,246.04,27.62,39.16\nhrnet_w64,224,11.98,83.453,1,128.06,28.97,35.09\nswinv2_cr_large_224,224,11.95,83.698,1,196.68,35.1,78.42\nconvnext_base,384,11.93,83.813,1,88.59,45.21,84.49\nvit_small_patch14_dinov2,518,11.91,83.943,1,22.06,46.76,198.79\nseresnet269d,256,11.85,84.4,1,113.67,26.59,53.6\nvit_small_patch14_reg4_dinov2,518,11.8,84.751,1,22.06,46.95,199.77\nvolo_d4_224,224,11.77,84.966,1,192.96,44.34,80.22\nswin_large_patch4_window7_224,224,11.62,86.078,1,196.53,34.53,54.94\ndm_nfnet_f1,224,11.52,86.756,1,132.63,17.87,22.94\nmaxvit_rmlp_base_rw_224,224,11.49,87.018,1,116.14,23.15,92.64\nvit_base_patch16_18x2_224,224,11.46,87.257,1,256.73,52.51,71.38\nnfnet_f1,224,11.39,87.745,1,132.63,17.87,22.94\nvit_base_patch16_siglip_gap_384,384,11.37,87.936,1,86.09,55.43,101.3\ndeit_base_distilled_patch16_384,384,11.35,88.072,1,87.63,55.65,101.82\nswinv2_base_window12to16_192to256,256,11.33,88.242,1,87.92,22.02,84.71\ntf_efficientnetv2_l,384,11.33,88.232,1,118.52,36.1,101.16\nefficientnetv2_l,384,11.31,88.404,1,118.52,36.1,101.16\nswinv2_base_window16_256,256,11.31,88.41,1,87.92,22.02,84.71\ncait_xxs36_384,384,11.29,88.568,1,17.37,14.35,183.7\nefficientvit_l3,256,11.28,88.634,1,246.04,36.06,50.98\ndeit3_base_patch16_384,384,11.27,88.711,1,86.88,55.54,101.56\npnasnet5large,331,11.25,88.888,1,86.06,25.04,92.89\nvit_base_patch16_siglip_384,384,11.24,88.971,1,93.18,56.12,102.2\nswinv2_cr_tiny_384,384,11.16,89.625,1,28.33,15.34,161.01\ndeit_base_patch16_384,384,11.12,89.907,1,86.86,55.54,101.56\nmaxvit_base_tf_224,224,11.03,90.659,1,119.47,24.04,95.01\nvit_base_patch16_384,384,11.0,90.869,1,86.86,55.54,101.56\nvit_base_patch16_clip_384,384,10.99,90.939,1,86.86,55.54,101.56\necaresnet269d,320,10.97,91.108,1,102.09,41.53,83.69\ncait_xs24_384,384,10.89,91.815,1,26.67,19.28,183.98\nseresnet269d,288,10.88,91.91,1,113.67,33.65,67.81\ncoat_lite_medium_384,384,10.81,92.453,1,44.57,28.73,116.7\nconvnext_large,288,10.6,94.326,1,197.77,56.87,71.29\nmixer_l16_224,224,10.5,95.198,1,208.2,44.6,41.69\nmaxvit_tiny_tf_384,384,10.45,95.712,1,30.98,17.53,123.42\nvolo_d2_384,384,10.43,95.885,1,58.87,46.17,184.51\ntresnet_xl,448,10.39,96.26,1,78.44,60.77,61.31\nconvnextv2_base,384,10.38,96.284,1,88.72,45.21,84.49\nresnext101_32x16d,224,10.37,96.373,1,194.03,36.27,51.18\nxcit_medium_24_p16_384,384,10.37,96.422,1,84.4,47.39,91.64\neca_nfnet_l2,384,10.31,96.967,1,56.72,30.05,68.28\nxcit_small_12_p8_384,384,10.12,98.757,1,26.21,54.92,138.29\ntf_efficientnet_b6,528,10.1,99.042,1,43.04,19.4,167.39\necaresnet269d,352,10.06,99.39,1,102.09,50.25,101.25\ncoatnet_3_rw_224,224,10.0,100.017,1,181.81,33.44,73.83\nresnetv2_101x1_bit,448,9.96,100.363,1,44.54,31.65,64.93\nbeit_base_patch16_384,384,9.75,102.524,1,86.74,55.54,101.56\nconvnextv2_large,288,9.74,102.618,1,197.96,56.87,71.29\ncoatnet_3_224,224,9.56,104.565,1,166.97,36.56,79.01\ncoatnet_rmlp_3_rw_224,224,9.42,106.158,1,165.15,33.56,79.47\ndm_nfnet_f1,320,9.37,106.668,1,132.63,35.97,46.77\nresnetrs270,352,9.37,106.68,1,129.86,51.13,105.48\nefficientnet_b6,528,9.3,107.544,1,43.04,19.4,167.39\nnfnet_f1,320,9.3,107.465,1,132.63,35.97,46.77\nvgg11,224,9.29,107.646,1,132.86,7.61,7.44\nvgg11_bn,224,9.23,108.31,1,132.87,7.62,7.44\nresnetrs350,288,9.0,111.133,1,163.96,43.67,87.09\nmaxxvitv2_rmlp_large_rw_224,224,8.97,111.495,1,215.42,44.14,87.15\neca_nfnet_l3,352,8.9,112.389,1,72.04,32.57,73.12\nvgg13_bn,224,8.86,112.806,1,133.05,11.33,12.25\nvgg13,224,8.83,113.264,1,133.05,11.31,12.25\nefficientvit_l3,320,8.79,113.701,1,246.04,56.32,79.34\nresnet50x16_clip_gap,384,8.68,115.129,1,136.2,70.32,100.64\ntf_efficientnetv2_l,480,8.62,115.963,1,118.52,56.4,157.99\nefficientnetv2_l,480,8.57,116.617,1,118.52,56.4,157.99\ncaformer_m36,384,8.52,117.372,1,56.2,42.11,196.35\nvgg16,224,8.48,117.85,1,138.36,15.47,13.56\nvgg16_bn,224,8.46,118.228,1,138.37,15.5,13.56\nxcit_medium_24_p8_224,224,8.45,118.342,1,84.32,63.53,121.23\nrdnet_large,224,8.33,120.029,1,186.27,34.74,46.67\nresnet50x16_clip,384,8.33,120.08,1,167.33,74.9,103.54\nnasnetalarge,331,8.32,120.143,1,88.75,23.89,90.56\nconvformer_m36,384,8.26,121.005,1,57.05,37.87,123.56\nvgg19,224,8.22,121.627,1,143.67,19.63,14.86\nvgg19_bn,224,8.17,122.35,1,143.68,19.66,14.86\nvit_base_patch8_224,224,8.12,123.15,1,86.58,78.22,161.69\nconvnext_large_mlp,320,7.99,125.216,1,200.13,70.21,88.02\nvit_base_r50_s16_384,384,7.91,126.378,1,98.95,67.43,135.03\nvolo_d5_224,224,7.87,127.129,1,295.46,72.4,118.11\nvit_large_patch32_384,384,7.85,127.363,1,306.63,45.31,43.86\nsequencer2d_l,224,7.84,127.533,1,54.3,9.74,22.12\nmvitv2_large,224,7.81,128.04,1,217.99,43.87,112.02\ntf_efficientnetv2_xl,384,7.8,128.223,1,208.12,52.81,139.2\nefficientnetv2_xl,384,7.76,128.885,1,208.12,52.81,139.2\nseresnextaa201d_32x8d,320,7.61,131.424,1,149.39,70.22,138.71\nrepvgg_d2se,320,7.53,132.811,1,133.33,74.57,46.82\nresmlp_big_24_224,224,7.43,134.501,1,129.14,100.23,87.31\nmaxvit_small_tf_384,384,7.42,134.741,1,69.02,35.87,183.65\nregnety_320,384,7.4,135.151,1,145.05,95.0,88.87\nnfnet_f2,256,7.39,135.253,1,193.78,33.76,41.85\ndm_nfnet_f2,256,7.38,135.503,1,193.78,33.76,41.85\nswinv2_large_window12to16_192to256,256,7.38,135.529,1,196.74,47.81,121.53\ncait_s24_384,384,7.33,136.424,1,47.06,32.17,245.31\nmvitv2_large_cls,224,7.22,138.516,1,234.58,42.17,111.69\nmaxvit_large_tf_224,224,7.17,139.391,1,211.79,43.68,127.35\nresnest269e,416,7.14,140.099,1,110.93,77.69,171.98\ndeit3_large_patch16_224,224,7.1,140.766,1,304.37,61.6,63.52\nconvnext_xlarge,224,7.09,140.941,1,350.2,60.98,57.5\neca_nfnet_l3,448,7.06,141.587,1,72.04,52.55,118.4\neva_large_patch14_196,196,7.05,141.766,1,304.14,61.57,63.52\nvit_large_patch16_224,224,7.04,141.995,1,304.33,61.6,63.52\nvit_so400m_patch14_siglip_gap_224,224,7.02,142.493,1,412.44,109.57,106.13\nefficientvit_l3,384,7.0,142.789,1,246.04,81.08,114.02\nvit_so400m_patch14_siglip_224,224,6.92,144.563,1,427.68,110.26,106.73\nbeit_large_patch16_224,224,6.89,145.132,1,304.43,61.6,63.52\nresnetrs420,320,6.89,145.155,1,191.89,64.2,126.56\ncoatnet_rmlp_2_rw_384,384,6.88,145.305,1,73.88,47.69,209.43\nbeitv2_large_patch16_224,224,6.81,146.829,1,304.43,61.6,63.52\nflexivit_large,240,6.75,148.061,1,304.36,70.99,75.39\nresnetrs350,384,6.72,148.693,1,163.96,77.59,154.74\ntiny_vit_21m_512,512,6.69,149.476,1,21.27,27.02,177.93\nregnety_640,224,6.62,151.153,1,281.38,64.16,42.5\nconvnext_large_mlp,384,6.22,160.853,1,200.13,101.11,126.74\nconvnext_large,384,6.18,161.678,1,197.77,101.1,126.74\nmaxvit_tiny_tf_512,512,6.18,161.718,1,31.05,33.49,257.59\nvit_base_patch16_siglip_gap_512,512,6.16,162.429,1,86.43,107.0,246.15\nvit_base_patch16_siglip_512,512,6.07,164.62,1,93.52,108.22,247.74\nswinv2_cr_small_384,384,6.06,165.106,1,49.7,29.7,298.03\nvit_large_patch16_siglip_256,256,6.05,165.378,1,315.96,81.34,88.88\nvit_large_patch16_siglip_gap_256,256,6.05,165.228,1,303.36,80.8,88.34\ncaformer_b36,384,6.03,165.848,1,98.75,72.33,261.79\nconvformer_b36,384,6.02,166.077,1,99.88,66.67,164.75\ntf_efficientnet_b7,600,5.99,166.816,1,66.35,38.33,289.94\nnfnet_f2,352,5.93,168.761,1,193.78,63.22,79.06\nvit_large_patch14_224,224,5.93,168.491,1,304.2,81.08,88.79\nvit_large_patch14_clip_224,224,5.91,169.122,1,304.2,81.08,88.79\nvit_large_patch14_clip_quickgelu_224,224,5.9,169.496,1,303.97,81.08,88.79\ndm_nfnet_f2,352,5.88,170.031,1,193.78,63.22,79.06\ndavit_huge,224,5.84,171.147,1,348.92,61.23,81.32\neva02_base_patch14_448,448,5.73,174.651,1,87.12,107.11,259.14\nvolo_d3_448,448,5.72,174.72,1,86.63,96.33,446.83\nvit_large_patch14_xp_224,224,5.68,175.996,1,304.06,81.01,88.79\nresnetv2_152x2_bit,224,5.66,176.636,1,236.34,46.95,45.11\nmaxxvitv2_rmlp_base_rw_384,384,5.61,178.328,1,116.09,72.98,213.74\neva02_large_patch14_224,224,5.59,178.843,1,303.27,81.15,97.2\nxcit_large_24_p16_384,384,5.58,179.303,1,189.1,105.35,137.17\neva02_large_patch14_clip_224,224,5.57,179.623,1,304.11,81.18,97.2\nconvnextv2_large,384,5.56,179.883,1,197.96,101.1,126.74\nefficientnet_b7,600,5.52,181.092,1,66.35,38.33,289.94\nseresnextaa201d_32x8d,384,5.43,183.998,1,149.39,101.11,199.72\nxcit_small_24_p8_384,384,5.41,184.801,1,47.63,105.24,265.91\ncoatnet_4_224,224,5.4,185.083,1,275.43,62.48,129.26\nconvnext_xlarge,288,5.4,185.103,1,350.2,100.8,95.05\nfocalnet_large_fl3,384,5.39,185.552,1,239.13,105.06,168.04\nefficientnetv2_xl,512,5.36,186.546,1,208.12,93.85,247.32\ntf_efficientnetv2_xl,512,5.33,187.548,1,208.12,93.85,247.32\nvit_large_r50_s32_384,384,5.33,187.657,1,329.09,57.43,76.52\nresnetrs420,416,5.1,195.875,1,191.89,108.45,213.79\nmaxvit_rmlp_base_rw_384,384,5.06,197.69,1,116.14,70.97,318.95\ncait_s36_384,384,4.97,201.009,1,68.37,47.99,367.4\ndm_nfnet_f3,320,4.95,201.925,1,254.92,68.77,83.93\nnfnet_f3,320,4.91,203.587,1,254.92,68.77,83.93\nsam2_hiera_tiny,896,4.84,206.772,1,26.85,99.86,384.63\nfocalnet_large_fl4,384,4.83,206.841,1,239.32,105.2,181.78\nresnetv2_50x3_bit,224,4.67,214.048,1,217.32,37.06,33.34\nswinv2_cr_base_384,384,4.65,215.074,1,87.88,50.57,333.68\nswin_base_patch4_window12_384,384,4.64,215.585,1,87.9,47.19,134.78\nvit_base_patch14_dinov2,518,4.52,221.304,1,86.58,151.71,397.58\nxcit_large_24_p8_224,224,4.51,221.497,1,188.93,141.23,181.56\nvit_base_patch14_reg4_dinov2,518,4.47,223.826,1,86.58,152.25,399.53\nvitamin_large2_224,224,4.35,230.086,1,333.58,75.05,112.83\nvitamin_large_224,224,4.31,232.245,1,333.32,75.05,112.83\nmaxvit_base_tf_384,384,4.15,240.709,1,119.65,73.8,332.9\nmaxvit_xlarge_tf_224,224,4.13,242.143,1,506.99,97.52,191.04\nsam2_hiera_small,896,4.05,246.841,1,33.95,123.99,442.63\nswinv2_cr_huge_224,224,4.04,247.411,1,657.83,115.97,121.08\ndm_nfnet_f3,416,3.93,254.65,1,254.92,115.58,141.78\nnfnet_f3,416,3.9,256.503,1,254.92,115.58,141.78\nresnext101_32x32d,224,3.88,257.499,1,468.53,87.29,91.12\nconvnextv2_huge,224,3.79,263.535,1,660.29,115.0,79.07\nmaxvit_small_tf_512,512,3.77,265.289,1,69.13,67.26,383.77\nregnety_640,384,3.5,285.87,1,281.38,188.47,124.83\nhiera_huge_224,224,3.46,288.977,1,672.78,124.85,150.95\ndm_nfnet_f4,384,3.43,291.492,1,316.07,122.14,147.57\nnfnet_f4,384,3.41,293.188,1,316.07,122.14,147.57\nxcit_medium_24_p8_384,384,3.41,293.093,1,84.32,186.67,354.73\nefficientnet_b8,672,3.4,294.249,1,87.41,63.48,442.89\nconvnext_xlarge,384,3.35,298.31,1,350.2,179.2,168.99\ntf_efficientnet_b8,672,3.25,307.835,1,87.41,63.48,442.89\nvolo_d4_448,448,3.24,308.298,1,193.41,197.13,527.35\nmvitv2_huge_cls,224,3.22,310.451,1,694.8,120.67,243.63\nswinv2_base_window12to24_192to384,384,3.2,312.151,1,87.92,55.25,280.36\nvit_large_patch16_siglip_gap_384,384,3.12,320.663,1,303.69,190.85,269.55\nresnetv2_152x2_bit,384,3.09,323.865,1,236.34,136.16,132.56\nvit_large_patch16_siglip_384,384,3.09,323.531,1,316.28,192.07,270.75\nswinv2_cr_large_384,384,3.08,324.559,1,196.68,108.96,404.96\nvit_large_patch14_clip_quickgelu_336,336,3.08,324.978,1,304.29,191.11,270.24\ndeit3_large_patch16_384,384,3.07,325.832,1,304.76,191.21,270.24\nswin_large_patch4_window12_384,384,3.07,326.135,1,196.74,104.08,202.16\nvit_large_patch14_clip_336,336,3.06,326.627,1,304.53,191.11,270.24\nvit_large_patch16_384,384,3.06,327.075,1,304.72,191.21,270.24\neva_large_patch14_336,336,3.05,327.44,1,304.53,191.1,270.24\nresnetv2_101x3_bit,224,3.05,327.405,1,387.93,71.23,48.7\nvitamin_large_256,256,2.97,336.409,1,333.38,99.0,154.99\neva02_large_patch14_clip_336,336,2.96,338.131,1,304.43,191.34,289.13\nvitamin_large2_256,256,2.95,339.37,1,333.64,99.0,154.99\nvit_huge_patch14_gap_224,224,2.91,344.196,1,630.76,166.73,138.74\nvitamin_xlarge_256,256,2.89,346.39,1,436.06,130.13,177.37\ndeit3_huge_patch14_224,224,2.86,350.235,1,632.13,167.4,139.41\nvit_huge_patch14_clip_quickgelu_224,224,2.86,349.77,1,632.08,167.4,139.41\nvit_huge_patch14_xp_224,224,2.85,350.634,1,631.8,167.3,139.41\nconvnextv2_huge,288,2.84,351.627,1,660.29,190.1,130.7\nvit_huge_patch14_224,224,2.82,354.377,1,630.76,167.4,139.41\nvit_huge_patch14_clip_224,224,2.82,354.323,1,632.05,167.4,139.41\nfocalnet_xlarge_fl3,384,2.74,365.312,1,408.79,185.61,223.99\nrdnet_large,384,2.73,366.431,1,186.27,102.09,137.13\nfocalnet_xlarge_fl4,384,2.71,368.544,1,409.03,185.79,242.31\nbeit_large_patch16_384,384,2.68,372.56,1,305.0,191.21,270.24\nvit_giant_patch16_gap_224,224,2.67,374.898,1,1011.37,202.46,139.26\nfocalnet_huge_fl3,224,2.66,376.461,1,745.28,118.26,104.8\nfocalnet_huge_fl4,224,2.66,375.445,1,686.46,118.9,113.34\nmaxvit_large_tf_384,384,2.66,375.382,1,212.03,132.55,445.84\nnfnet_f5,416,2.65,376.958,1,377.21,170.71,204.56\ndm_nfnet_f5,416,2.61,382.719,1,377.21,170.71,204.56\nregnety_1280,224,2.59,386.67,1,644.81,127.66,71.58\nresnetv2_152x2_bit,448,2.59,386.55,1,236.34,184.99,180.43\ndavit_base_fl,768,2.54,394.288,1,90.37,190.32,530.15\nresnet50x64_clip_gap,448,2.54,393.711,1,365.03,253.96,233.22\ncoatnet_5_224,224,2.53,395.688,1,687.47,145.49,194.24\nresnetv2_50x3_bit,448,2.52,397.155,1,217.32,145.7,133.37\nconvnext_xxlarge,256,2.51,397.633,1,846.47,198.09,124.45\ndm_nfnet_f4,512,2.47,405.436,1,316.07,216.26,262.26\nnfnet_f4,512,2.46,405.746,1,316.07,216.26,262.26\nresnet50x64_clip,448,2.45,407.97,1,420.38,265.02,239.13\nsam2_hiera_base_plus,896,2.28,437.683,1,68.68,227.48,828.88\nvit_giant_patch14_224,224,2.23,448.178,1,1012.61,267.18,192.64\nvit_giant_patch14_clip_224,224,2.23,448.488,1,1012.65,267.18,192.64\neva_giant_patch14_224,224,2.22,450.784,1,1012.56,267.18,192.64\neva_giant_patch14_clip_224,224,2.22,449.564,1,1012.59,267.18,192.64\nmaxvit_base_tf_512,512,2.22,449.687,1,119.88,138.02,703.99\nswinv2_large_window12to24_192to384,384,2.09,478.133,1,196.74,116.15,407.83\nnfnet_f6,448,2.08,481.295,1,438.36,229.7,273.62\ndm_nfnet_f6,448,2.07,484.134,1,438.36,229.7,273.62\nvolo_d5_448,448,2.05,486.854,1,295.91,315.06,737.92\nvit_so400m_patch14_siglip_gap_384,384,2.04,490.133,1,412.99,333.46,451.19\nvit_so400m_patch14_siglip_384,384,2.03,492.881,1,428.23,335.4,452.89\nvitamin_large2_336,336,1.99,502.882,1,333.83,175.72,307.47\nvitamin_large_336,336,1.97,507.694,1,333.57,175.72,307.47\ndavit_giant,224,1.93,517.662,1,1406.47,192.92,153.06\ndm_nfnet_f5,544,1.87,535.5,1,377.21,290.97,349.71\nnfnet_f5,544,1.84,542.033,1,377.21,290.97,349.71\neva02_large_patch14_448,448,1.79,559.708,1,305.08,362.33,689.95\ncait_m36_384,384,1.77,566.081,1,271.22,173.11,734.81\nconvnextv2_huge,384,1.77,565.32,1,660.29,337.96,232.35\nnfnet_f7,480,1.73,577.613,1,499.5,300.08,355.86\nxcit_large_24_p8_384,384,1.73,579.168,1,188.93,415.0,531.82\ntf_efficientnet_l2,475,1.68,594.956,1,480.31,172.11,609.89\nvitamin_xlarge_336,336,1.67,597.981,1,436.06,230.18,347.33\nvit_huge_patch14_clip_336,336,1.65,604.321,1,632.46,390.97,407.54\nvitamin_large2_384,384,1.6,626.786,1,333.97,234.44,440.16\nmaxvit_xlarge_tf_384,384,1.59,627.883,1,475.32,292.78,668.76\nvitamin_large_384,384,1.59,629.157,1,333.71,234.44,440.16\nregnety_1280,384,1.58,633.28,1,644.81,374.99,210.2\nresnetv2_101x3_bit,448,1.57,635.087,1,387.93,280.33,194.78\nvit_so400m_patch14_siglip_gap_448,448,1.52,659.739,1,413.33,487.18,764.26\nvolo_d5_512,512,1.52,658.657,1,296.09,425.09,1105.37\nvit_large_patch14_dinov2,518,1.5,668.176,1,304.37,507.15,1058.82\ndm_nfnet_f6,576,1.49,670.137,1,438.36,378.69,452.2\nnfnet_f6,576,1.48,676.503,1,438.36,378.69,452.2\nvit_large_patch14_reg4_dinov2,518,1.48,674.657,1,304.37,508.9,1064.02\nbeit_large_patch16_512,512,1.43,700.682,1,305.67,362.24,656.39\nvit_huge_patch14_clip_378,378,1.34,745.367,1,632.68,503.79,572.79\nvit_huge_patch14_clip_quickgelu_378,378,1.34,744.209,1,632.68,503.79,572.79\nvit_gigantic_patch14_clip_224,224,1.26,792.869,1,1844.91,483.96,275.37\nvit_huge_patch16_gap_448,448,1.26,792.181,1,631.67,544.7,636.83\nswinv2_cr_giant_224,224,1.25,799.001,1,2598.76,483.85,309.15\nswinv2_cr_huge_384,384,1.25,802.044,1,657.94,352.04,583.18\nvit_gigantic_patch14_224,224,1.25,797.556,1,1844.44,483.95,275.37\nnfnet_f7,608,1.21,825.686,1,499.5,480.39,570.85\nresnetv2_152x4_bit,224,1.15,870.095,1,936.53,186.9,90.22\nconvnextv2_huge,512,1.11,898.321,1,660.29,600.81,413.07\neva_giant_patch14_336,336,1.07,931.905,1,1013.01,620.64,550.67\ndavit_huge_fl,768,0.82,1222.73,1,360.64,744.84,1060.3\nmaxvit_xlarge_tf_512,512,0.82,1224.526,1,475.77,534.14,1413.22\nsamvit_base_patch16,1024,0.8,1252.765,1,89.67,486.43,1343.27\nregnety_2560,384,0.72,1397.042,1,1282.6,747.83,296.49\ncait_m48_448,448,0.7,1431.241,1,356.46,329.41,1708.23\nsam2_hiera_large,1024,0.7,1429.016,1,212.15,907.48,2190.34\nefficientnet_l2,800,0.59,1701.073,1,480.31,479.12,1707.39\ntf_efficientnet_l2,800,0.59,1687.246,1,480.31,479.12,1707.39\nresnetv2_152x4_bit,480,0.56,1780.62,1,936.53,844.84,414.26\neva_giant_patch14_560,560,0.47,2137.135,1,1014.45,1906.76,2577.17\nvit_giant_patch14_dinov2,518,0.46,2152.578,1,1136.48,1784.2,2757.89\nvit_giant_patch14_reg4_dinov2,518,0.46,2172.897,1,1136.48,1790.08,2771.21\neva02_enormous_patch14_clip_224,224,0.45,2211.904,1,4350.56,1132.46,497.58\nswinv2_cr_giant_384,384,0.37,2667.378,1,2598.76,1450.71,1394.86\nsamvit_large_patch16,1024,0.33,3039.952,1,308.28,1493.86,2553.78\nvit_so400m_patch14_siglip_gap_896,896,0.28,3633.38,1,416.87,2731.49,8492.88\nsamvit_huge_patch16,1024,0.22,4556.317,1,637.03,2982.23,3428.16\n"
  },
  {
    "path": "results/benchmark-train-amp-nchw-pt112-cu113-rtx3090.csv",
    "content": "model,train_samples_per_sec,train_step_time,train_batch_size,train_img_size,param_count\ntinynet_e,10001.12,50.423,512,106,2.04\nmobilenetv3_small_050,7406.47,68.392,512,224,1.59\ntf_mobilenetv3_small_minimal_100,6438.14,78.983,512,224,2.04\nmobilenetv3_small_075,6186.83,82.006,512,224,2.04\ntf_mobilenetv3_small_075,5783.46,87.782,512,224,2.04\nmobilenetv3_small_100,5749.13,88.315,512,224,2.54\nlcnet_035,5673.53,89.75,512,224,1.64\ntf_mobilenetv3_small_100,5383.9,94.36,512,224,2.54\nlevit_128s,5298.88,95.701,512,224,7.78\nlcnet_050,5280.37,96.452,512,224,1.88\ntinynet_d,5161.83,98.416,512,152,2.34\nmixer_s32_224,4696.33,108.475,512,224,19.1\nresnet10t,4669.46,109.393,512,176,5.44\nvit_small_patch32_224,4447.28,114.289,512,224,22.88\nlcnet_075,4278.23,119.175,512,224,2.36\nvit_tiny_r_s16_p8_224,4137.87,122.895,512,224,6.34\nlevit_128,3895.0,130.318,512,224,9.21\nregnetx_002,3718.05,137.026,512,224,2.68\nlcnet_100,3569.0,142.969,512,224,2.95\nmnasnet_small,3450.28,147.453,512,224,2.03\nregnety_002,3414.18,149.006,512,224,3.16\ncs3darknet_focus_s,3251.91,156.949,512,256,3.27\nmobilenetv2_035,3160.04,161.202,512,224,1.68\nlevit_192,3046.5,166.9,512,224,10.95\ngernet_s,3034.31,168.028,512,224,8.17\ntinynet_c,2919.98,174.314,512,184,2.46\nmnasnet_050,2847.14,179.025,512,224,2.22\ncs3darknet_s,2821.27,180.951,512,256,3.28\nresnet18,2764.22,184.877,512,224,11.69\nssl_resnet18,2760.71,185.109,512,224,11.69\nmobilenetv2_050,2751.58,185.257,512,224,1.97\nswsl_resnet18,2742.47,186.338,512,224,11.69\nsemnasnet_050,2741.67,185.816,512,224,2.08\ngluon_resnet18_v1b,2741.53,186.395,512,224,11.69\nlcnet_150,2713.5,188.193,512,224,4.5\nregnetx_004,2695.23,188.875,512,224,5.16\nese_vovnet19b_slim_dw,2588.37,197.313,512,224,1.9\nseresnet18,2562.51,199.293,512,224,11.78\nnf_regnet_b0,2561.76,198.646,512,192,8.76\nlegacy_seresnet18,2500.8,204.207,512,224,11.78\ntf_efficientnetv2_b0,2483.22,204.949,512,192,7.14\nlevit_256,2482.39,205.091,512,224,18.89\nmobilenetv3_large_075,2392.41,213.119,512,224,3.99\nresnet14t,2385.69,214.281,512,176,10.08\ntf_mobilenetv3_large_minimal_100,2347.68,217.368,512,224,3.92\nvit_tiny_patch16_224,2293.54,222.408,512,224,5.72\nregnetx_006,2293.09,222.433,512,224,6.2\ndeit_tiny_patch16_224,2290.53,222.68,512,224,5.72\ntf_mobilenetv3_large_075,2259.6,225.688,512,224,3.99\ndeit_tiny_distilled_patch16_224,2253.36,226.358,512,224,5.91\nedgenext_xx_small,2231.33,228.598,512,256,1.33\nghostnet_050,2189.91,232.414,512,224,2.59\nmobilenetv3_rw,2184.31,233.512,512,224,5.48\nmnasnet_075,2176.02,234.492,512,224,3.17\nmobilenetv3_large_100,2167.29,235.344,512,224,5.48\nmobilenetv3_large_100_miil,2165.63,235.504,512,224,5.48\nlevit_256d,2159.5,235.516,512,224,26.21\nresnet18d,2129.12,240.084,512,224,11.71\nhardcorenas_a,2118.32,240.968,512,224,5.26\nregnety_004,2100.99,242.536,512,224,4.34\npit_ti_distilled_224,2086.5,244.504,512,224,5.1\npit_ti_224,2079.54,245.311,512,224,4.85\nese_vovnet19b_slim,2066.1,247.446,512,224,3.17\nmnasnet_100,2053.84,248.477,512,224,4.38\ntf_mobilenetv3_large_100,2053.63,248.437,512,224,5.48\nmnasnet_b1,2053.54,248.485,512,224,4.38\nsemnasnet_075,2008.51,253.986,512,224,2.91\nhardcorenas_b,2008.46,253.96,512,224,5.18\nmobilenetv2_075,1983.69,257.32,512,224,2.64\nhardcorenas_c,1977.37,257.94,512,224,5.52\nxcit_nano_12_p16_224_dist,1970.62,258.036,512,224,3.05\nxcit_nano_12_p16_224,1969.78,258.084,512,224,3.05\ntinynet_b,1965.95,259.368,512,188,3.73\nhardcorenas_d,1880.3,271.085,512,224,7.5\ntf_efficientnetv2_b1,1876.23,271.395,512,192,8.14\nresnetblur18,1872.21,273.11,512,224,11.69\nspnasnet_100,1862.13,273.955,512,224,4.42\nmnasnet_a1,1859.21,274.476,512,224,3.89\nsemnasnet_100,1857.75,274.693,512,224,3.89\nmobilenetv2_100,1832.14,278.633,512,224,3.5\nregnety_006,1809.24,281.912,512,224,6.06\nvisformer_tiny,1802.41,283.384,512,224,10.32\nmixer_b32_224,1784.58,286.101,512,224,60.29\nskresnet18,1730.13,295.275,512,224,11.96\ntinynet_a,1710.13,298.117,512,192,6.19\nvit_base_patch32_224_sam,1703.64,299.668,512,224,88.22\nvit_base_patch32_224,1703.57,299.695,512,224,88.22\nefficientnet_lite0,1674.68,304.971,512,224,4.65\ncs3darknet_focus_m,1668.48,306.209,512,256,9.3\nhardcorenas_e,1650.74,309.021,512,224,8.07\nhardcorenas_f,1646.88,309.777,512,224,8.2\ngluon_resnet34_v1b,1634.03,312.731,512,224,21.8\nregnetx_008,1632.2,312.851,512,224,7.26\ntv_resnet34,1630.02,313.513,512,224,21.8\nresnet34,1622.41,314.992,512,224,21.8\nghostnet_100,1601.5,318.319,512,224,5.18\ntf_efficientnet_lite0,1591.79,320.884,512,224,4.65\nfbnetc_100,1567.77,325.605,512,224,5.57\npit_xs_distilled_224,1551.83,329.02,512,224,11.0\npit_xs_224,1549.02,329.642,512,224,10.62\nmixer_s16_224,1543.23,331.197,512,224,18.53\ndla46_c,1532.94,333.18,512,224,1.3\nmnasnet_140,1525.17,334.879,512,224,7.12\nseresnet34,1505.77,339.147,512,224,21.96\ncs3darknet_m,1499.82,340.716,512,256,9.31\nregnety_008,1498.63,340.596,512,224,6.26\nlevit_384,1491.26,342.207,512,224,39.13\nedgenext_x_small,1481.71,344.446,512,256,2.34\nese_vovnet19b_dw,1466.46,348.623,512,224,6.54\nlegacy_seresnet34,1465.81,348.38,512,224,21.96\nefficientnet_b0,1459.11,262.1,384,224,5.29\ngernet_m,1456.76,350.74,512,224,21.14\nvit_small_patch32_384,1448.56,352.604,512,384,22.92\nregnetz_005,1448.06,352.165,512,224,7.12\nrexnet_100,1447.81,264.049,384,224,4.8\nrexnetr_100,1441.71,265.216,384,224,4.88\nnf_resnet26,1422.76,359.346,512,224,16.0\nhrnet_w18_small,1410.43,361.614,512,224,13.19\nselecsls42,1405.04,363.736,512,224,30.35\nselecsls42b,1401.22,364.735,512,224,32.46\nmobilenetv2_110d,1400.15,273.199,384,224,4.52\ntf_efficientnet_b0_ap,1398.67,273.43,384,224,5.29\nmobilevitv2_050,1396.45,365.664,512,256,1.37\ntf_efficientnet_b0_ns,1395.54,274.064,384,224,5.29\ntf_efficientnet_b0,1395.32,274.114,384,224,5.29\ntf_efficientnetv2_b2,1392.9,365.948,512,208,10.1\nvit_tiny_r_s16_p8_384,1392.75,274.873,384,384,6.36\nresnet34d,1379.64,370.514,512,224,21.82\nghostnet_130,1364.55,373.824,512,224,7.36\ngmixer_12_224,1352.72,377.701,512,224,12.7\ncrossvit_tiny_240,1349.19,377.902,512,240,7.01\ngmlp_ti16_224,1340.6,284.894,384,224,5.87\nsemnasnet_140,1340.57,380.992,512,224,6.11\ndla46x_c,1338.33,381.81,512,224,1.07\nxcit_tiny_12_p16_224,1323.84,384.926,512,224,6.72\nxcit_tiny_12_p16_224_dist,1317.19,386.895,512,224,6.72\nresnetrs50,1317.01,387.565,512,160,35.69\nmobilevit_xxs,1316.84,290.489,384,256,1.27\nresnet26,1312.7,389.566,512,224,16.0\nefficientnet_b1_pruned,1301.95,391.798,512,240,6.33\nmobilenetv2_140,1267.4,302.189,384,224,6.11\ndla60x_c,1262.98,404.404,512,224,1.32\ncrossvit_9_240,1260.08,303.33,384,240,8.55\nconvnext_nano_hnf,1235.34,413.703,512,224,15.59\nconvnext_nano_ols,1234.94,413.902,512,224,15.6\npoolformer_s12,1234.11,414.201,512,224,11.92\nconvnext_nano,1233.61,414.261,512,224,15.59\nresmlp_12_distilled_224,1232.37,414.645,512,224,15.35\nresmlp_12_224,1232.04,414.762,512,224,15.35\nfbnetv3_b,1226.89,415.617,512,224,8.6\nnf_regnet_b2,1219.45,418.235,512,240,14.31\nrepvgg_b0,1217.24,419.512,512,224,15.82\nselecsls60b,1214.07,420.825,512,224,32.77\nselecsls60,1211.7,421.663,512,224,30.67\nnf_regnet_b1,1209.03,421.975,512,256,10.22\ncrossvit_9_dagger_240,1206.16,316.906,384,240,8.78\nnf_seresnet26,1198.39,426.558,512,224,17.4\nmixnet_s,1181.75,431.958,512,224,4.13\nnf_ecaresnet26,1174.85,435.233,512,224,16.0\nefficientnet_lite1,1171.46,217.556,256,240,5.42\ndarknet17,1164.06,439.537,512,256,14.3\nefficientnet_es_pruned,1160.76,440.317,512,224,5.44\nefficientnet_es,1160.37,440.47,512,224,5.44\nregnetx_016,1139.3,448.473,512,224,9.19\nfbnetv3_d,1138.14,335.598,384,224,10.31\ntf_efficientnet_es,1136.29,449.83,512,224,5.44\nrexnetr_130,1133.04,224.76,256,224,7.61\ndla34,1132.96,451.315,512,224,15.74\nresnet26d,1119.56,456.822,512,224,16.01\ntf_mixnet_s,1118.11,456.605,512,224,4.13\ntf_efficientnet_lite1,1110.94,229.444,256,240,5.42\nedgenext_small,1109.37,460.388,512,256,5.59\nconvit_tiny,1095.04,466.531,512,224,5.71\nrexnet_130,1094.78,232.699,256,224,7.56\nmobilenetv2_120d,1078.49,236.158,256,224,5.83\ndarknet21,1073.87,476.43,512,256,20.86\necaresnet50d_pruned,1067.01,478.899,512,224,19.94\ndeit_small_patch16_224,1053.64,363.563,384,224,22.05\nvit_small_patch16_224,1052.92,363.872,384,224,22.05\ndeit_small_distilled_patch16_224,1032.61,370.971,384,224,22.44\nsedarknet21,1031.46,495.893,512,256,20.95\ngernet_l,1030.31,496.058,512,256,31.08\nefficientnet_b1,1030.3,246.963,256,224,7.79\nrexnetr_150,1022.06,249.288,256,224,9.78\nrepvgg_a2,1010.18,506.008,512,224,28.21\nedgenext_small_rw,1009.52,506.183,512,256,7.83\nskresnet34,1008.96,506.323,512,224,22.28\nresnest14d,979.06,522.497,512,224,10.61\ncs3darknet_focus_l,977.57,391.957,384,256,21.15\ndeit3_small_patch16_224,977.26,391.961,384,224,22.06\ndeit3_small_patch16_224_in21ft1k,976.5,392.276,384,224,22.06\nrexnet_150,965.2,264.04,256,224,9.73\nregnety_016,954.26,534.657,512,224,11.2\nvit_base_patch32_plus_256,951.64,537.091,512,256,119.48\nmobilevitv2_075,947.54,269.157,256,256,2.87\nlegacy_seresnext26_32x4d,946.21,405.17,384,224,16.79\npit_s_224,942.8,270.615,256,224,23.46\npit_s_distilled_224,939.97,271.455,256,224,24.04\nvit_srelpos_small_patch16_224,922.29,415.451,384,224,21.97\nvit_relpos_small_patch16_224,921.7,415.439,384,224,21.98\nefficientnet_b0_g16_evos,909.42,421.149,384,224,8.11\nresnext26ts,905.09,423.733,384,256,10.3\ncs3darknet_l,902.18,282.891,256,256,21.16\ncoat_lite_tiny,893.97,428.624,384,224,5.72\nresnet26t,890.89,574.188,512,256,16.01\nefficientnet_b0_gn,881.54,289.263,256,224,5.29\nresnetv2_50,880.1,580.976,512,224,25.55\nefficientnet_b2_pruned,874.13,291.317,256,260,8.31\nseresnext26ts,867.62,294.407,256,256,10.39\neca_resnext26ts,867.54,294.527,256,256,10.3\ntf_efficientnet_b1,863.78,294.816,256,240,7.79\ntf_efficientnet_b1_ap,863.54,294.906,256,240,7.79\ntf_efficientnet_b1_ns,863.39,294.941,256,240,7.79\ncs3sedarknet_l,861.38,444.523,384,256,21.91\ntf_efficientnetv2_b3,855.0,297.539,256,240,14.36\nefficientnet_lite2,852.1,299.402,256,260,6.09\ntwins_svt_small,851.73,449.18,384,224,24.06\ngcresnext26ts,850.58,300.113,256,256,10.48\nefficientnetv2_rw_t,850.16,298.981,256,224,13.65\nbotnet26t_256,849.43,451.465,384,256,12.49\necaresnetlight,846.78,603.683,512,224,30.16\nseresnext26t_32x4d,845.6,453.458,384,224,16.81\nseresnext26tn_32x4d,845.31,453.612,384,224,16.81\nseresnext26d_32x4d,844.96,453.775,384,224,16.81\ncoat_lite_mini,842.06,455.115,384,224,11.01\ntf_efficientnet_cc_b0_8e,837.03,457.594,384,224,24.01\necaresnet101d_pruned,837.02,609.921,512,224,24.88\necaresnext26t_32x4d,835.25,459.196,384,224,15.41\necaresnext50t_32x4d,834.39,459.653,384,224,15.41\ncspresnet50,830.57,461.498,384,256,21.62\nswsl_resnet50,829.79,616.192,512,224,25.56\nssl_resnet50,829.64,616.294,512,224,25.56\ngluon_resnet50_v1b,829.63,616.32,512,224,25.56\nvisformer_small,828.8,462.625,384,224,40.22\ntv_resnet50,826.55,618.618,512,224,25.56\nresnet50,826.06,618.983,512,224,25.56\nvgg11,825.98,619.706,512,224,132.86\nhalonet26t,824.96,464.902,384,256,12.48\nvovnet39a,817.65,625.544,512,224,22.6\ntf_efficientnet_lite2,816.76,312.458,256,260,6.09\nconvnext_tiny_hnf,815.48,312.97,256,224,28.59\nconvnext_tiny_hnfd,815.19,313.078,256,224,28.59\nvit_small_resnet26d_224,813.66,470.891,384,224,63.61\nconvnext_tiny,813.16,313.859,256,224,28.59\nefficientnet_cc_b0_8e,812.96,471.165,384,224,24.01\nvit_relpos_base_patch32_plus_rpn_256,811.26,630.0,512,256,119.42\nmixnet_m,810.2,630.361,512,224,5.01\nefficientnet_cc_b0_4e,808.8,473.577,384,224,13.31\nconvnext_tiny_in22ft1k,808.5,315.666,256,224,28.59\nefficientnet_b2a,800.27,318.401,256,256,9.11\nefficientnet_b2,799.96,318.544,256,256,9.11\nregnetz_b16,796.72,319.811,256,224,9.72\ntresnet_m,792.8,643.233,512,224,31.39\nmobilevit_xs,792.23,321.979,256,256,2.32\necaresnet26t,791.55,484.557,384,256,16.01\ngc_efficientnetv2_rw_t,791.43,320.691,256,224,13.68\nresnetv2_50t,790.83,646.602,512,224,25.57\nresnetv2_50d,790.11,647.216,512,224,25.57\nregnetx_032,787.5,486.368,384,224,15.3\nese_vovnet39b,786.37,650.429,512,224,24.57\ntf_efficientnet_cc_b0_4e,784.54,488.254,384,224,13.31\neca_botnext26ts_256,781.43,326.976,256,256,10.59\nresnet32ts,781.02,327.191,256,256,17.96\ntf_mixnet_m,777.93,492.059,384,224,5.01\nresnet33ts,767.78,332.812,256,256,19.68\ngluon_resnet50_v1c,763.32,502.196,384,224,25.58\neca_halonext26ts,763.09,334.836,256,256,10.76\nrexnetr_200,762.4,250.686,192,224,16.52\ndpn68b,756.57,506.252,384,224,12.61\nlambda_resnet26t,751.39,510.429,384,256,10.96\nvit_relpos_small_patch16_rpn_224,751.37,510.029,384,224,21.97\nresnet50t,748.03,512.487,384,224,25.57\ncspresnet50d,746.91,341.842,256,256,21.64\ngluon_resnet50_v1d,746.5,513.543,384,224,25.58\nresnet50d,744.99,514.575,384,224,25.58\nlegacy_seresnet50,744.88,514.356,384,224,28.09\ncspresnet50w,744.04,343.166,256,256,28.12\neca_resnet33ts,743.27,343.735,256,256,19.68\nefficientnet_b0_g8_gn,743.27,343.315,256,224,6.56\nseresnet33ts,742.4,344.003,256,256,19.78\nresnetaa50,741.95,516.711,384,224,25.56\nselecsls84,740.99,689.736,512,224,50.95\ndpn68,739.97,517.747,384,224,12.61\nres2net50_48w_2s,738.06,519.427,384,224,25.29\nvit_small_r26_s32_224,737.22,345.982,256,224,36.43\neca_vovnet39b,735.59,695.354,512,224,22.6\nlambda_resnet26rpt_256,735.09,260.579,192,256,10.99\nnf_regnet_b3,732.33,522.471,384,288,18.59\nrexnet_200,731.68,261.239,192,224,16.37\ndensenet121,730.11,348.758,256,224,7.98\nresnest26d,728.94,526.039,384,224,17.07\nbat_resnext26ts,728.42,350.197,256,256,10.73\nmobilevitv2_100,727.72,262.852,192,256,4.9\ntv_densenet121,727.58,350.07,256,224,7.98\nnf_seresnet50,727.17,526.884,384,224,28.09\ngcresnet33ts,725.89,351.666,256,256,19.88\neca_nfnet_l0,723.69,706.434,512,224,24.14\nnfnet_l0,719.96,532.162,384,224,35.07\nseresnet50,714.65,536.208,384,224,28.09\ntwins_pcpvt_small,714.45,356.63,256,224,24.11\nnf_ecaresnet50,713.69,537.063,384,224,25.56\ndla60,709.61,540.13,384,224,22.04\nefficientnet_em,708.33,360.423,256,240,6.9\nhrnet_w18_small_v2,705.02,723.712,512,224,15.6\nresnetblur50d,704.94,362.275,256,224,25.58\nvgg11_bn,703.05,545.962,384,224,132.87\nresnetblur50,698.61,548.824,384,224,25.56\nregnety_032,696.58,549.77,384,224,19.44\nnf_resnet50,696.17,550.716,384,256,25.56\nefficientnet_b3_pruned,694.05,367.106,256,300,9.86\ntf_efficientnet_em,690.66,369.697,256,240,6.9\nskresnet50,685.67,371.92,256,224,25.8\nxcit_tiny_24_p16_224,683.44,371.201,256,224,12.12\npoolformer_s24,681.93,374.176,256,224,21.39\nxcit_tiny_24_p16_224_dist,681.85,371.937,256,224,12.12\nvit_base_resnet26d_224,680.96,562.594,384,224,101.4\nvovnet57a,678.75,564.837,384,224,36.64\ndensenet121d,678.22,375.614,256,224,8.0\nresnetaa50d,673.73,569.117,384,224,25.58\ngluon_resnet50_v1s,669.16,573.001,384,224,25.68\ngmixer_24_224,666.22,382.715,256,224,24.72\nswsl_resnext50_32x4d,663.66,577.766,384,224,25.03\nresnext50_32x4d,663.39,577.966,384,224,25.03\nssl_resnext50_32x4d,663.18,578.185,384,224,25.03\ntv_resnext50_32x4d,662.37,578.888,384,224,25.03\ngluon_resnext50_32x4d,662.06,579.185,384,224,25.03\nhaloregnetz_b,660.09,386.296,256,224,11.68\nese_vovnet57b,656.27,584.17,384,224,38.61\ncspresnext50,656.07,389.365,256,256,20.57\nseresnet50t,655.71,584.407,384,224,28.1\nvit_relpos_medium_patch16_cls_224,654.69,389.857,256,224,38.76\nseresnetaa50d,654.11,390.147,256,224,28.11\ndensenetblur121d,649.47,392.249,256,224,8.0\nres2net50_26w_4s,648.76,590.62,384,224,25.7\nfbnetv3_g,647.3,294.603,192,240,16.62\nswin_tiny_patch4_window7_224,646.76,394.841,256,224,28.29\necaresnet50d,643.9,595.437,384,224,25.58\nregnety_040,640.15,598.298,384,224,20.65\ngmlp_s16_224,638.67,299.017,192,224,19.42\ncrossvit_small_240,637.47,399.952,256,240,26.86\nresnext50d_32x4d,635.21,402.121,256,224,25.05\nnfnet_f0,634.03,806.334,512,192,71.49\nvit_srelpos_medium_patch16_224,629.85,405.54,256,224,38.74\nmobilevit_s,629.67,303.779,192,256,5.58\nskresnet50d,628.92,405.574,256,224,25.82\nvit_relpos_medium_patch16_224,628.2,406.369,256,224,38.75\nresnest50d_1s4x24d,628.12,406.263,256,224,25.68\nmixnet_l,627.47,406.445,256,224,7.33\ntf_efficientnet_b2_ns,627.11,304.591,192,260,9.11\ntf_efficientnet_b2_ap,626.79,304.757,192,260,9.11\ntf_efficientnet_b2,626.11,305.153,192,260,9.11\nregnetx_040,624.89,613.356,384,224,22.12\nregnetv_040,622.71,409.581,256,224,20.64\ndarknetaa53,614.47,415.819,256,256,36.02\nseresnext50_32x4d,613.62,416.021,256,224,27.56\ngluon_seresnext50_32x4d,613.35,416.206,256,224,27.56\nsehalonet33ts,613.13,416.664,256,256,13.69\nlegacy_seresnext50_32x4d,612.89,416.52,256,224,27.56\ndla60x,612.79,416.731,256,224,17.35\ngcresnet50t,611.79,626.12,384,256,25.9\nxcit_nano_12_p16_384_dist,611.55,416.81,256,384,3.05\nresmlp_24_224,609.69,418.351,256,224,30.02\nresmlp_24_distilled_224,609.51,418.474,256,224,30.02\ngcresnext50ts,606.82,314.923,192,256,15.67\ntf_inception_v3,603.29,635.057,384,299,23.83\ngluon_inception_v3,603.22,635.143,384,299,23.83\nadv_inception_v3,603.01,635.347,384,299,23.83\ninception_v3,602.27,636.205,384,299,23.83\ntf_mixnet_l,600.24,424.956,256,224,7.33\ndm_nfnet_f0,600.1,638.573,384,192,71.49\nxcit_small_12_p16_224,598.44,425.955,256,224,26.25\nxcit_small_12_p16_224_dist,598.22,426.013,256,224,26.25\nsemobilevit_s,597.07,320.258,192,256,5.74\ndensenet169,592.78,429.221,256,224,14.15\nres2next50,591.98,431.144,256,224,24.67\nresnetv2_101,590.74,431.806,256,224,44.54\ndarknet53,590.64,432.606,256,256,41.61\nresnetv2_50x1_bit_distilled,587.0,326.262,192,224,25.55\nres2net50_14w_8s,586.94,433.992,256,224,25.06\nswin_s3_tiny_224,586.39,435.576,256,224,28.33\nrepvgg_b1g4,584.26,875.234,512,224,39.97\ndla60_res2net,583.07,437.618,256,224,20.85\ncrossvit_15_240,576.62,331.16,192,240,27.53\ncait_xxs24_224,576.52,441.46,256,224,11.96\ncs3darknet_focus_x,569.86,448.292,256,256,35.02\nresnet101,568.98,448.321,256,224,44.55\ngluon_resnet101_v1b,568.4,448.834,256,224,44.55\ntv_resnet101,566.24,450.547,256,224,44.55\nresnetrs101,564.72,451.1,256,192,63.62\nefficientnet_cc_b1_8e,564.18,452.061,256,240,39.72\ncrossvit_15_dagger_240,558.23,342.033,192,240,28.21\nvit_base_resnet50d_224,557.61,457.501,256,224,110.97\nmobilevitv2_125,557.38,343.473,192,256,7.48\nxcit_nano_12_p8_224_dist,555.43,459.069,256,224,3.05\nxcit_nano_12_p8_224,555.18,459.311,256,224,3.05\nsebotnet33ts_256,554.49,230.012,128,256,13.7\nresnet51q,551.31,463.504,256,256,35.7\nresnetv2_101d,548.02,465.6,256,224,44.56\ntf_efficientnet_cc_b1_8e,547.16,466.173,256,240,39.72\nresnetv2_50d_gn,546.54,350.469,192,224,25.57\nnf_resnet101,543.9,704.337,384,224,44.55\nvit_base_patch32_384,542.76,470.804,256,384,88.3\ngluon_resnet101_v1c,537.15,475.0,256,224,44.57\ncspdarknet53,537.1,475.617,256,256,27.64\ncs3darknet_x,534.86,477.64,256,256,35.05\nvit_base_r26_s32_224,534.67,357.767,192,224,101.38\nresnest50d,534.66,477.434,256,224,27.48\nresnet50_gn,531.78,360.235,192,224,25.56\nregnetz_c16,530.35,360.552,192,256,13.46\ngluon_resnet101_v1d,528.59,482.76,256,224,44.57\nmixer_b16_224,528.02,484.004,256,224,59.88\nmixer_l32_224,527.33,362.504,192,224,206.94\nmixer_b16_224_miil,526.58,485.347,256,224,59.88\nvit_large_patch32_224,521.73,489.021,256,224,306.54\ndla60_res2next,520.31,490.572,256,224,17.03\necaresnet50t,516.29,494.896,256,256,25.57\ncs3sedarknet_xdw,516.24,246.008,128,256,21.6\nlambda_resnet50ts,515.16,371.658,192,256,21.54\nvit_tiny_patch16_384,512.2,249.072,128,384,5.79\nresnet61q,510.55,375.027,192,256,36.85\nswinv2_cr_tiny_224,505.83,504.823,256,224,28.33\nhalonet50ts,503.76,380.122,192,256,22.73\nrepvgg_b1,503.57,1015.623,512,224,57.42\nswinv2_cr_tiny_ns_224,502.5,508.144,256,224,28.33\ncs3sedarknet_x,501.96,508.547,256,256,35.4\ndla102,497.28,513.14,256,224,33.27\nwide_resnet50_2,495.68,773.85,384,224,68.88\nres2net50_26w_6s,493.57,516.914,256,224,37.05\nresnetaa101d,490.51,520.338,256,224,44.57\nconvnext_small,489.81,390.224,192,224,50.22\nconvnext_small_in22ft1k,489.45,390.576,192,224,50.22\nlegacy_seresnet101,487.73,522.616,256,224,49.33\nvit_relpos_medium_patch16_rpn_224,485.5,526.221,256,224,38.73\nefficientnet_lite3,484.47,263.098,128,300,8.2\ngluon_resnet101_v1s,483.47,527.891,256,224,44.67\nseresnet101,480.65,530.38,256,224,49.33\ncs3edgenet_x,477.59,535.019,256,256,47.82\nnest_tiny,476.68,267.593,128,224,17.06\nnf_seresnet101,474.46,537.213,256,224,49.33\nmobilevitv2_150_in22ft1k,473.86,269.132,128,256,10.59\nmobilevitv2_150,473.84,269.144,128,256,10.59\nresnetblur101d,472.23,540.497,256,224,44.57\njx_nest_tiny,472.22,270.163,128,224,17.06\nnf_ecaresnet101,469.53,543.375,256,224,44.55\nvgg13_bn,468.37,546.3,256,224,133.05\ntwins_pcpvt_base,466.47,408.848,192,224,43.83\ntf_efficientnet_lite3,465.4,273.895,128,300,8.2\nvgg16,465.36,824.954,384,224,138.36\nsequencer2d_s,462.43,412.819,192,224,27.65\nmixnet_xl,460.21,554.308,256,224,11.9\ncoat_lite_small,457.06,418.568,192,224,19.84\nefficientnet_b3a,456.95,278.403,128,288,12.23\nefficientnet_b3,456.81,278.448,128,288,12.23\nregnetx_080,454.56,843.629,384,224,39.57\nregnetx_064,452.43,564.953,256,224,26.21\nhalo2botnet50ts_256,451.35,424.392,192,256,22.64\necaresnet101d,447.44,570.33,256,224,44.57\ndensenet201,447.44,425.987,192,224,20.01\nnf_regnet_b4,445.8,428.533,192,320,30.21\nconvit_small,443.63,431.737,192,224,27.78\nefficientnetv2_s,433.27,293.157,128,288,21.46\nskresnext50_32x4d,432.3,590.802,256,224,27.48\ncs3se_edgenet_x,431.68,443.324,192,256,50.72\nbotnet50ts_256,428.8,297.529,128,256,22.74\nssl_resnext101_32x4d,427.28,447.74,192,224,44.18\nresnext101_32x4d,427.18,447.921,192,224,44.18\nswsl_resnext101_32x4d,427.16,447.915,192,224,44.18\ngluon_resnext101_32x4d,427.13,447.97,192,224,44.18\npoolformer_s36,425.0,449.906,192,224,30.86\nese_vovnet39b_evos,421.31,302.862,128,224,24.58\nresnet101d,418.0,457.739,192,256,44.57\ndla102x,417.16,458.658,192,224,26.31\nres2net101_26w_4s,416.51,612.121,256,224,45.21\nlamhalobotnet50ts_256,413.09,463.774,192,256,22.57\ntwins_svt_base,411.8,464.138,192,224,56.07\ncrossvit_18_240,406.79,312.611,128,240,43.27\ntresnet_l,404.84,1261.389,512,224,55.99\nefficientnetv2_rw_s,402.34,315.806,128,288,23.94\nvolo_d1_224,401.47,476.8,192,224,26.63\nresmlp_36_224,401.06,476.505,192,224,44.69\nres2net50_26w_8s,400.52,636.999,256,224,48.4\nresmlp_36_distilled_224,400.14,477.557,192,224,44.69\nswin_small_patch4_window7_224,399.67,478.499,192,224,49.61\nresnest50d_4s2x40d,396.0,645.092,256,224,30.42\nvit_base_patch16_224_miil,395.78,484.311,192,224,86.54\ncrossvit_18_dagger_240,394.08,322.72,128,240,44.27\ndeit_base_patch16_224,390.88,490.307,192,224,86.57\nvit_base_patch16_224,390.86,490.391,192,224,86.57\nvit_base_patch16_224_sam,390.67,490.608,192,224,86.57\nmobilevitv2_175_in22ft1k,389.97,327.241,128,256,14.25\nmobilevitv2_175,389.95,327.23,128,256,14.25\ntf_efficientnetv2_s_in21ft1k,389.66,326.288,128,300,21.46\ntf_efficientnetv2_s,389.1,326.713,128,300,21.46\nvgg16_bn,388.69,658.276,256,224,138.37\nregnety_064,388.46,657.256,256,224,30.58\nregnety_080,385.84,662.273,256,224,39.18\ndeit_base_distilled_patch16_224,385.62,497.03,192,224,87.34\nxception,385.56,331.194,128,299,22.86\nregnety_040s_gn,384.97,330.927,128,224,20.65\nrepvgg_b2g4,379.42,1348.329,512,224,61.76\nresnetv2_152,379.4,503.868,192,224,60.19\nregnetz_d8,378.76,336.282,128,256,23.37\nhrnet_w18,378.26,671.883,256,224,21.3\nese_vovnet99b,377.27,677.036,256,224,63.2\nvit_small_resnet50d_s16_224,376.52,508.654,192,224,57.53\ncait_xxs36_224,375.8,507.032,192,224,17.3\ngluon_seresnext101_32x4d,375.02,509.78,192,224,48.96\nregnetz_040,374.91,339.544,128,256,27.12\nseresnext101_32x4d,374.73,510.176,192,224,48.96\nregnetv_064,372.69,513.522,192,224,30.58\nregnetz_040h,372.64,341.593,128,256,28.94\nlegacy_seresnext101_32x4d,372.06,513.705,192,224,48.96\ndeit3_base_patch16_224_in21ft1k,371.79,515.431,192,224,86.59\ndeit3_base_patch16_224,371.73,515.464,192,224,86.59\ntf_efficientnet_b3,370.15,344.089,128,300,12.23\ntf_efficientnet_b3_ap,370.14,344.111,128,300,12.23\ntf_efficientnet_b3_ns,370.1,344.134,128,300,12.23\nresnet152,370.08,516.516,192,224,60.19\nvit_relpos_base_patch16_clsgap_224,369.76,518.105,192,224,86.43\nvit_relpos_base_patch16_cls_224,369.34,518.67,192,224,86.43\nresnetv2_50d_frn,369.16,345.594,128,224,25.59\ngluon_resnet152_v1b,369.02,517.998,192,224,60.19\ntv_resnet152,369.0,518.088,192,224,60.19\nregnetz_b16_evos,365.3,348.518,128,224,9.74\nsequencer2d_m,363.12,525.505,192,224,38.31\nese_vovnet99b_iabn,362.9,1055.043,384,224,63.2\nresnetv2_152d,360.99,529.48,192,224,60.2\nbeit_base_patch16_224,358.29,534.776,192,224,86.53\nxcit_tiny_12_p16_384_dist,357.91,534.55,192,384,6.72\nvit_relpos_base_patch16_224,355.33,539.194,192,224,86.43\ngluon_resnet152_v1c,354.77,538.797,192,224,60.21\nregnetz_d32,354.52,359.397,128,256,27.58\nswinv2_tiny_window8_256,354.35,540.569,192,256,28.35\nresnetv2_50d_evos,353.36,270.506,96,224,25.59\ndpn92,353.0,723.617,256,224,37.67\nvgg19,352.0,1090.655,384,224,143.67\ngluon_resnet152_v1d,351.06,544.563,192,224,60.21\ndensenet161,346.02,367.416,128,224,28.68\nxception41p,344.85,370.318,128,299,26.91\ngluon_resnet152_v1s,344.7,368.96,128,224,60.32\nmobilevitv2_200,342.4,372.843,128,256,18.45\ntnt_s_patch16_224,342.25,559.037,192,224,23.76\nmobilevitv2_200_in22ft1k,342.08,373.147,128,256,18.45\neca_nfnet_l1,341.07,561.084,192,256,41.41\nhrnet_w32,340.54,747.043,256,224,41.23\ndla169,338.11,565.259,192,224,53.39\nconvnext_base_in22ft1k,337.76,377.102,128,224,88.59\nconvnext_base,336.98,378.091,128,224,88.59\nrepvgg_b2,335.01,1527.215,512,224,89.02\nrepvgg_b3g4,334.01,1148.557,384,224,83.83\nvgg13,331.37,1544.923,512,224,133.05\npit_b_224,331.17,385.577,128,224,73.76\nvgg19_bn,330.96,773.109,256,224,143.68\npit_b_distilled_224,329.46,387.568,128,224,74.79\nregnetx_120,327.41,780.952,256,224,46.11\ntwins_pcpvt_large,322.96,392.17,128,224,60.99\nhrnet_w30,321.86,790.607,256,224,37.71\nlegacy_seresnet152,319.56,397.245,128,224,66.82\ninception_v4,316.87,603.734,192,299,42.68\nseresnet152,313.75,608.677,192,224,66.82\nvit_small_patch16_36x1_224,310.56,409.448,128,224,64.67\ndla102x2,309.09,412.537,128,224,41.28\nxcit_small_24_p16_224_dist,307.83,412.466,128,224,47.67\nconvmixer_1024_20_ks9_p14,307.81,830.813,256,224,24.38\nvit_small_patch16_18x2_224,307.61,413.3,128,224,64.67\nxcit_small_24_p16_224,307.46,412.867,128,224,47.67\nregnety_120,307.05,623.971,192,224,51.82\npoolformer_m36,303.34,420.132,128,224,56.17\nefficientnet_el_pruned,301.49,423.464,128,300,10.59\nefficientnet_el,301.45,423.5,128,300,10.59\nswinv2_cr_small_ns_224,300.41,423.619,128,224,49.7\nswinv2_cr_small_224,297.65,427.521,128,224,49.7\nmixnet_xxl,297.33,428.503,128,224,23.96\ncait_s24_224,296.96,428.341,128,224,46.92\nnest_small,296.72,321.888,96,224,38.35\ncoat_tiny,296.44,429.708,128,224,5.5\ntf_efficientnet_el,295.51,432.07,128,300,10.59\njx_nest_small,294.83,323.932,96,224,38.35\nefficientnet_b4,293.1,325.442,96,320,19.34\nxception41,293.07,435.505,128,299,26.97\nxcit_tiny_12_p8_224_dist,291.52,437.287,128,224,6.71\ntresnet_xl,291.4,875.028,256,224,78.44\nresnext101_64x4d,291.33,437.816,128,224,83.46\ngluon_resnext101_64x4d,291.25,437.881,128,224,83.46\nswin_s3_small_224,289.76,439.818,128,224,49.74\nwide_resnet101_2,289.62,661.356,192,224,126.89\nxcit_tiny_12_p8_224,289.33,440.549,128,224,6.71\ntwins_svt_large,289.11,440.647,128,224,99.27\nresnet152d,281.47,452.46,128,256,60.21\nswin_base_patch4_window7_224,279.66,455.817,128,224,87.77\nconvnext_tiny_384_in22ft1k,278.8,343.389,96,384,28.59\nresnet200,276.62,459.688,128,224,64.67\nssl_resnext101_32x8d,276.52,461.341,128,224,88.79\nig_resnext101_32x8d,276.39,461.582,128,224,88.79\nresnext101_32x8d,276.22,461.854,128,224,88.79\nswsl_resnext101_32x8d,276.22,461.764,128,224,88.79\nrepvgg_b3,271.93,1411.039,384,224,123.09\nnfnet_f1,271.43,705.161,192,224,132.63\nresnetv2_50d_evob,268.92,355.729,96,224,25.59\ngmlp_b16_224,268.22,356.298,96,224,73.08\ndpn98,267.62,476.602,128,224,61.57\nregnetx_160,266.55,719.244,192,224,54.28\nregnety_160,264.44,724.758,192,224,83.59\ngluon_seresnext101_64x4d,264.12,482.344,128,224,88.23\nens_adv_inception_resnet_v2,261.47,730.952,192,299,55.84\ninception_resnet_v2,261.44,730.919,192,299,55.84\nxception65p,259.32,492.32,128,299,39.82\nefficientnet_lite4,255.23,249.374,64,380,13.01\nvit_base_patch16_rpn_224,254.51,753.593,192,224,86.54\nresnest101e,253.9,501.575,128,256,48.28\ncrossvit_base_240,253.73,376.737,96,240,105.03\nseresnext101_32x8d,251.44,506.792,128,224,93.57\nvit_relpos_base_patch16_rpn_224,250.62,765.002,192,224,86.41\nvit_base_patch16_plus_240,248.4,514.352,128,240,117.56\ntf_efficientnet_lite4,247.32,257.437,64,380,13.01\nefficientnet_b3_gn,245.44,258.998,64,288,11.73\ndm_nfnet_f1,244.51,521.138,128,224,132.63\nseresnext101d_32x8d,242.49,525.503,128,224,93.59\nseresnet152d,242.25,392.75,96,256,66.84\nxcit_tiny_24_p16_384_dist,241.62,526.318,128,384,12.12\nvit_small_patch16_384,239.05,266.865,64,384,22.2\nvit_relpos_base_patch16_plus_240,238.57,535.322,128,240,117.38\nvit_large_r50_s32_224,237.94,401.033,96,224,328.99\nresnetrs152,237.63,535.199,128,256,86.62\nswinv2_tiny_window16_256,237.41,403.072,96,256,28.35\nseresnextaa101d_32x8d,228.0,559.15,128,224,93.59\nxcit_medium_24_p16_224_dist,227.91,558.239,128,224,84.4\ndeit3_small_patch16_384_in21ft1k,227.77,280.008,64,384,22.21\ndeit3_small_patch16_384,227.76,280.015,64,384,22.21\nxcit_medium_24_p16_224,227.75,558.491,128,224,84.4\nvit_small_r26_s32_384,227.25,280.302,64,384,36.47\nconvit_base,224.86,568.198,128,224,86.54\ngluon_xception65,224.1,426.474,96,299,39.92\nswin_s3_base_224,223.36,426.944,96,224,71.13\ntnt_b_patch16_224,222.94,572.213,128,224,65.41\nxception65,222.93,428.728,96,299,39.92\ncoat_mini,222.88,572.209,128,224,10.34\nvolo_d2_224,222.28,430.111,96,224,58.68\nxcit_small_12_p16_384_dist,221.79,430.984,96,384,26.25\npoolformer_m48,220.53,432.741,96,224,73.47\nhrnet_w40,219.45,869.959,192,224,57.56\nvit_base_r50_s16_224,215.41,443.988,96,224,98.66\nswinv2_cr_base_ns_224,213.88,446.428,96,224,87.88\nsequencer2d_l,213.86,444.098,96,224,54.3\nswinv2_small_window8_256,212.59,449.01,96,256,49.73\nswinv2_cr_base_224,211.39,451.682,96,224,87.88\nmobilevitv2_150_384_in22ft1k,210.38,303.23,64,384,10.59\nnest_base,210.2,302.774,64,224,67.72\ntresnet_m_448,209.55,913.447,192,448,31.39\nefficientnetv2_m,207.96,304.545,64,320,54.14\njx_nest_base,207.78,306.371,64,224,67.72\nregnetz_c16_evos,207.35,306.824,64,256,13.49\nhrnet_w44,206.45,925.026,192,224,67.06\nresnet200d,204.47,623.017,128,256,64.69\nefficientnet_b3_g8_gn,203.44,312.836,64,288,14.25\nhrnet_w48,202.15,628.427,128,224,77.47\ndensenet264,202.1,470.789,96,224,72.69\ndpn131,198.25,643.486,128,224,79.25\ntf_efficientnet_b4,194.83,326.399,64,380,19.34\ntf_efficientnet_b4_ap,194.65,326.738,64,380,19.34\ntf_efficientnet_b4_ns,194.23,327.375,64,380,19.34\nxcit_nano_12_p8_384_dist,187.76,338.965,64,384,3.05\nefficientnetv2_rw_m,187.31,338.14,64,320,53.24\ndpn107,187.14,682.151,128,224,86.92\nconvnext_large_in22ft1k,187.05,511.402,96,224,197.77\nconvnext_large,187.01,511.523,96,224,197.77\nnf_regnet_b5,186.49,512.09,96,384,49.74\nxcit_tiny_24_p8_224_dist,183.21,520.533,96,224,12.11\nxcit_tiny_24_p8_224,183.21,520.609,96,224,12.11\nhalonet_h1,177.48,359.151,64,256,8.1\nhrnet_w64,176.04,722.362,128,224,128.06\nmobilevitv2_175_384_in22ft1k,175.76,363.135,64,384,14.25\nsenet154,174.83,545.792,96,224,115.09\nregnety_320,174.41,732.528,128,224,145.05\ngluon_senet154,174.03,548.162,96,224,115.09\nregnetz_e8,173.89,365.999,64,256,57.7\nlegacy_senet154,170.27,560.493,96,224,115.09\nxception71,168.81,376.911,64,299,42.34\nxcit_small_12_p8_224,168.52,377.961,64,224,26.21\nxcit_small_12_p8_224_dist,168.32,378.375,64,224,26.21\nvit_large_patch32_384,168.05,569.595,96,384,306.63\nconvnext_small_384_in22ft1k,164.94,386.292,64,384,50.22\nmixer_l16_224,164.43,582.335,96,224,208.2\necaresnet200d,161.74,392.334,64,256,64.69\nseresnet200d,161.56,391.892,64,256,71.86\nresnetrs200,160.52,394.222,64,256,93.21\ndensenet264d_iabn,158.12,804.924,128,224,72.74\nregnetx_320,155.94,819.702,128,224,107.81\nswin_large_patch4_window7_224,153.79,414.255,64,224,196.53\nvolo_d3_224,152.73,416.584,64,224,86.33\nmobilevitv2_200_384_in22ft1k,150.68,317.559,48,384,18.45\nswinv2_base_window8_256,150.28,423.39,64,256,87.92\nresnetv2_50x1_bitm,149.04,321.21,48,448,25.55\nnfnet_f2,148.92,641.446,96,256,193.78\nswinv2_small_window16_256,142.83,445.591,64,256,49.73\ntf_efficientnetv2_m,142.41,333.833,48,384,54.14\neca_nfnet_l2,142.2,672.291,96,320,56.72\ntf_efficientnetv2_m_in21ft1k,141.35,336.246,48,384,54.14\nregnetz_d8_evos,132.2,360.995,48,256,23.46\nswinv2_cr_tiny_384,131.5,485.388,64,384,28.33\nig_resnext101_32x16d,130.47,734.203,96,224,194.03\nssl_resnext101_32x16d,130.4,734.63,96,224,194.03\nswsl_resnext101_32x16d,130.37,734.771,96,224,194.03\nxcit_large_24_p16_224,126.98,500.577,64,224,189.1\nxcit_large_24_p16_224_dist,126.97,500.662,64,224,189.1\nseresnet269d,125.7,503.318,64,256,113.67\ndm_nfnet_f2,125.2,507.681,64,256,193.78\nswinv2_cr_large_224,124.7,510.736,64,224,196.68\nxcit_tiny_12_p8_384_dist,122.38,390.412,48,384,6.71\nresnetrs270,121.5,520.761,64,256,129.86\ncrossvit_15_dagger_408,117.57,270.29,32,408,28.5\nvit_large_patch16_224,117.08,544.981,64,224,304.33\nvit_base_patch16_18x2_224,116.55,546.378,64,224,256.73\nconvnext_base_384_in22ft1k,115.97,412.084,48,384,88.59\nconvnext_xlarge_in22ft1k,115.91,550.445,64,224,350.2\ndeit3_large_patch16_224_in21ft1k,113.19,563.501,64,224,304.37\ndeit3_large_patch16_224,113.17,563.634,64,224,304.37\nxcit_small_24_p16_384_dist,112.88,421.839,48,384,47.67\nbeit_large_patch16_224,107.8,591.544,64,224,304.43\nswinv2_base_window16_256,103.68,460.461,48,256,87.92\nswinv2_base_window12to16_192to256_22kft1k,103.56,461.021,48,256,87.92\ntresnet_l_448,103.2,1236.839,128,448,55.99\nvolo_d1_384,99.39,320.613,32,384,26.78\ncait_xxs24_384,97.83,488.033,48,384,12.03\nvit_base_patch16_384,96.96,329.192,32,384,86.86\ndeit_base_patch16_384,96.37,331.171,32,384,86.86\nvolo_d4_224,95.75,498.748,48,224,192.96\ndeit_base_distilled_patch16_384,94.83,336.556,32,384,87.63\nefficientnet_b5,93.71,338.901,32,456,30.39\ndeit3_base_patch16_384,93.22,342.328,32,384,86.88\ndeit3_base_patch16_384_in21ft1k,92.68,344.327,32,384,86.88\ntf_efficientnet_b5,92.16,344.785,32,456,30.39\ntf_efficientnet_b5_ns,92.11,344.939,32,456,30.39\ntf_efficientnet_b5_ap,91.98,345.405,32,456,30.39\nresnetv2_152x2_bit_teacher,89.37,355.711,32,224,236.34\ncrossvit_18_dagger_408,88.76,358.492,32,408,44.61\nxcit_small_24_p8_224,87.25,546.829,48,224,47.63\nxcit_small_24_p8_224_dist,86.87,549.24,48,224,47.63\nconvmixer_768_32,85.53,1121.025,96,224,21.11\nvit_large_patch14_224,85.27,561.239,48,224,304.2\neca_nfnet_l3,84.61,563.702,48,352,72.04\nresnetv2_101x1_bitm,84.61,187.399,16,448,44.54\nbeit_base_patch16_384,83.67,381.291,32,384,86.74\nresnest200e,83.33,570.802,48,320,70.2\ntf_efficientnetv2_l_in21ft1k,83.27,379.678,32,384,118.52\nefficientnetv2_l,83.27,379.867,32,384,118.52\ntf_efficientnetv2_l,82.74,382.367,32,384,118.52\necaresnet269d,82.15,579.477,48,320,102.09\ntresnet_xl_448,78.31,1222.487,96,448,78.44\nxcit_medium_24_p16_384_dist,77.9,407.346,32,384,84.4\nvit_large_r50_s32_384,77.49,410.426,32,384,329.09\nswinv2_cr_small_384,76.31,416.793,32,384,49.7\nswin_base_patch4_window12_384,74.03,430.327,32,384,87.9\npnasnet5large,68.77,461.392,32,331,86.06\nresnetrs350,68.24,460.967,32,288,163.96\nnfnet_f3,67.87,703.087,48,320,254.92\nnasnetalarge,67.38,469.785,32,331,88.75\nresmlp_big_24_distilled_224,67.03,475.867,32,224,129.14\nresmlp_big_24_224_in22ft1k,67.03,475.857,32,224,129.14\nresmlp_big_24_224,67.02,475.97,32,224,129.14\ncait_xs24_384,65.59,485.229,32,384,26.67\nconvnext_large_384_in22ft1k,63.62,501.159,32,384,197.77\nvit_base_patch8_224,63.42,377.591,24,224,86.58\ncait_xxs36_384,63.23,502.345,32,384,17.37\nig_resnext101_32x32d,62.72,508.666,32,224,468.53\nxcit_tiny_24_p8_384_dist,62.14,511.676,32,384,12.11\nvolo_d5_224,61.58,516.467,32,224,295.46\nvit_base_resnet50_384,61.06,391.43,24,384,98.95\nvit_base_r50_s16_384,61.0,391.773,24,384,98.95\nswinv2_large_window12to16_192to256_22kft1k,60.93,391.352,24,256,196.74\nxcit_medium_24_p8_224,60.43,526.111,32,224,84.32\nxcit_medium_24_p8_224_dist,60.03,529.652,32,224,84.32\nxcit_small_12_p8_384_dist,57.75,413.763,24,384,26.21\ndm_nfnet_f3,57.26,554.375,32,320,254.92\nvolo_d2_384,55.56,286.247,16,384,58.87\nefficientnet_b6,54.98,288.003,16,528,43.04\nswinv2_cr_base_384,54.71,436.265,24,384,87.88\ntf_efficientnet_b6,54.38,291.338,16,528,43.04\ntf_efficientnet_b6_ns,54.21,292.241,16,528,43.04\ntf_efficientnet_b6_ap,54.17,292.479,16,528,43.04\nefficientnetv2_xl,53.77,291.666,16,384,208.12\ntf_efficientnetv2_xl_in21ft1k,53.07,295.611,16,384,208.12\nconvmixer_1536_20,50.1,957.271,48,224,51.63\nswinv2_cr_huge_224,49.51,482.114,24,224,657.83\ncait_s24_384,49.37,483.331,24,384,47.06\nresnetrs420,48.12,489.4,24,320,191.89\nxcit_large_24_p16_384_dist,45.6,522.909,24,384,189.1\nswin_large_patch4_window12_384,41.65,382.145,16,384,196.74\nconvnext_xlarge_384_in22ft1k,40.18,595.51,24,384,350.2\nvit_huge_patch14_224,39.94,398.436,16,224,632.05\ndeit3_huge_patch14_224_in21ft1k,38.36,414.578,16,224,632.13\ndeit3_huge_patch14_224,38.33,414.855,16,224,632.13\nnfnet_f4,36.85,646.047,24,384,316.07\nresnest269e,35.75,664.499,24,416,110.93\nresnetv2_50x3_bitm,34.88,457.822,16,448,217.32\nxcit_large_24_p8_224_dist,33.7,471.344,16,224,188.93\nxcit_large_24_p8_224,33.68,471.512,16,224,188.93\nresnetv2_152x2_bit_teacher_384,32.69,487.138,16,384,236.34\nig_resnext101_32x48d,32.39,492.417,16,224,828.41\nswinv2_cr_large_384,32.36,491.839,16,384,196.68\ncait_s36_384,31.92,497.404,16,384,68.37\nefficientnet_b7,31.74,248.492,8,600,66.35\ndm_nfnet_f4,31.4,758.349,24,384,316.07\ntf_efficientnet_b7,31.4,251.12,8,600,66.35\ntf_efficientnet_b7_ns,31.37,251.394,8,600,66.35\ntf_efficientnet_b7_ap,31.35,251.548,8,600,66.35\nxcit_small_24_p8_384_dist,29.2,544.65,16,384,47.63\nvit_large_patch16_384,29.07,411.127,12,384,304.72\ndeit3_large_patch16_384,28.22,423.365,12,384,304.76\ndeit3_large_patch16_384_in21ft1k,28.19,423.825,12,384,304.76\nswinv2_base_window12to24_192to384_22kft1k,28.14,423.938,12,384,87.92\nbeit_large_patch16_384,25.12,475.56,12,384,305.0\nvolo_d3_448,23.79,333.686,8,448,86.63\nnfnet_f5,22.84,694.067,16,416,377.21\nresnetv2_152x2_bitm,22.69,350.236,8,448,236.34\nvit_giant_patch14_224,22.29,356.113,8,224,1012.61\ndm_nfnet_f5,20.95,756.72,16,416,377.21\nxcit_medium_24_p8_384_dist,19.97,397.272,8,384,84.32\nefficientnet_b8,19.9,297.659,6,672,87.41\ntf_efficientnet_b8_ap,19.66,301.198,6,672,87.41\ntf_efficientnet_b8,19.65,301.246,6,672,87.41\nnfnet_f6,18.5,641.193,12,448,438.36\nresnetv2_101x3_bitm,18.0,442.742,8,448,387.93\nvolo_d4_448,16.87,353.154,6,448,193.41\nswinv2_large_window12to24_192to384_22kft1k,16.59,359.187,6,384,196.74\ndm_nfnet_f6,15.07,522.261,8,448,438.36\nswinv2_cr_huge_384,12.92,461.964,6,384,657.94\nnfnet_f7,12.67,622.861,8,480,499.5\ncait_m36_384,11.76,506.439,6,384,271.22\nxcit_large_24_p8_384_dist,11.53,516.755,6,384,188.93\nvolo_d5_448,11.08,357.783,4,448,295.91\ntf_efficientnet_l2_ns_475,10.91,360.832,4,475,480.31\nbeit_large_patch16_512,9.42,422.333,4,512,305.67\nvolo_d5_512,7.72,385.462,3,512,296.09\nresnetv2_152x4_bitm,4.91,404.529,2,480,936.53\ncait_m48_448,4.71,419.69,2,448,356.46\nefficientnet_l2,3.43,285.826,1,800,480.31\ntf_efficientnet_l2_ns,3.42,287.247,1,800,480.31\n"
  },
  {
    "path": "results/benchmark-train-amp-nhwc-pt112-cu113-rtx3090.csv",
    "content": "model,train_samples_per_sec,train_step_time,train_batch_size,train_img_size,param_count\ntinynet_e,11915.85,41.681,512,106,2.04\nmobilenetv3_small_050,11290.99,44.293,512,224,1.59\nlcnet_035,10015.98,50.125,512,224,1.64\nlcnet_050,9286.37,54.37,512,224,1.88\ntf_mobilenetv3_small_minimal_100,9042.22,55.986,512,224,2.04\nmobilenetv3_small_075,8679.98,58.254,512,224,2.04\nmobilenetv3_small_100,8035.08,62.981,512,224,2.54\ntinynet_d,7990.69,63.223,512,152,2.34\ntf_mobilenetv3_small_075,7930.1,63.8,512,224,2.04\ntf_mobilenetv3_small_100,7330.24,69.047,512,224,2.54\nlcnet_075,6950.91,73.156,512,224,2.36\nlevit_128s,6539.16,77.346,512,224,7.78\nresnet10t,6318.63,80.774,512,176,5.44\nmnasnet_small,5607.09,90.422,512,224,2.03\nlcnet_100,5354.67,95.126,512,224,2.95\nmixer_s32_224,4943.04,103.013,512,224,19.1\nmobilenetv2_035,4789.43,106.101,512,224,1.68\nmnasnet_050,4680.08,108.62,512,224,2.22\nlevit_128,4558.28,111.213,512,224,9.21\ncs3darknet_focus_s,4469.48,114.041,512,256,3.27\nvit_small_patch32_224,4445.76,114.324,512,224,22.88\ntinynet_c,4167.16,121.826,512,184,2.46\ngernet_s,4165.03,122.198,512,224,8.17\ncs3darknet_s,4110.51,124.007,512,256,3.28\nregnetx_002,4105.04,124.027,512,224,2.68\nmobilenetv2_050,4051.14,125.606,512,224,1.97\nvit_tiny_r_s16_p8_224,4025.23,126.328,512,224,6.34\nsemnasnet_050,3904.91,130.185,512,224,2.08\nregnety_002,3777.81,134.562,512,224,3.16\nlevit_192,3727.29,136.213,512,224,10.95\nghostnet_050,3670.99,138.144,512,224,2.59\nese_vovnet19b_slim_dw,3629.92,140.575,512,224,1.9\nlcnet_150,3576.28,142.665,512,224,4.5\ngluon_resnet18_v1b,3482.17,146.691,512,224,11.69\nresnet18,3481.78,146.713,512,224,11.69\nswsl_resnet18,3480.5,146.765,512,224,11.69\nssl_resnet18,3477.04,146.904,512,224,11.69\nresnet14t,3472.37,147.102,512,176,10.08\ntf_efficientnetv2_b0,3428.08,148.143,512,192,7.14\ntf_mobilenetv3_large_minimal_100,3366.45,151.356,512,224,3.92\nmnasnet_075,3238.88,157.273,512,224,3.17\ntf_mobilenetv3_large_075,3189.08,159.67,512,224,3.99\nseresnet18,3138.91,162.608,512,224,11.78\nmobilenetv3_large_075,3095.0,164.56,512,224,3.99\nlegacy_seresnet18,3076.04,165.928,512,224,11.78\nhardcorenas_a,2971.63,171.576,512,224,5.26\nlevit_256,2956.43,172.043,512,224,18.89\nmnasnet_b1,2930.02,173.933,512,224,4.38\nmnasnet_100,2929.31,173.976,512,224,4.38\ntf_mobilenetv3_large_100,2907.93,175.204,512,224,5.48\nresnet18d,2875.3,177.69,512,224,11.71\ntinynet_b,2851.82,178.435,512,188,3.73\nhardcorenas_b,2772.42,183.73,512,224,5.18\nhardcorenas_c,2763.94,184.272,512,224,5.52\nmobilenetv3_rw,2754.46,184.981,512,224,5.48\nnf_regnet_b0,2740.89,185.595,512,192,8.76\nmobilenetv3_large_100_miil,2733.62,186.4,512,224,5.48\nmobilenetv3_large_100,2732.43,186.472,512,224,5.48\nese_vovnet19b_slim,2684.58,190.344,512,224,3.17\nspnasnet_100,2610.47,195.171,512,224,4.42\nmobilenetv2_075,2609.91,195.379,512,224,2.64\nsemnasnet_075,2603.1,195.762,512,224,2.91\nhardcorenas_d,2566.48,198.271,512,224,7.5\ntf_efficientnetv2_b1,2548.95,199.349,512,192,8.14\nlevit_256d,2522.09,201.424,512,224,26.21\nfbnetc_100,2397.58,212.548,512,224,5.57\ntinynet_a,2334.41,218.035,512,192,6.19\nmobilenetv2_100,2313.1,220.563,512,224,3.5\nvit_tiny_patch16_224,2299.56,221.804,512,224,5.72\nmnasnet_a1,2291.94,222.453,512,224,3.89\ndeit_tiny_patch16_224,2290.33,222.697,512,224,5.72\nsemnasnet_100,2279.15,223.737,512,224,3.89\nedgenext_xx_small,2271.04,224.572,512,256,1.33\ndla46_c,2266.89,225.115,512,224,1.3\nhardcorenas_f,2252.64,226.141,512,224,8.2\ndeit_tiny_distilled_patch16_224,2248.67,226.799,512,224,5.91\nhardcorenas_e,2245.94,226.861,512,224,8.07\nxcit_nano_12_p16_224_dist,2177.52,233.052,512,224,3.05\nxcit_nano_12_p16_224,2170.17,234.054,512,224,3.05\ntf_efficientnet_lite0,2134.89,239.057,512,224,4.65\nghostnet_100,2129.82,239.0,512,224,5.18\nhrnet_w18_small,2121.96,239.906,512,224,13.19\nregnety_004,2085.76,244.311,512,224,4.34\nefficientnet_lite0,2079.28,245.485,512,224,4.65\ncs3darknet_focus_m,2062.98,247.547,512,256,9.3\npit_ti_distilled_224,2061.94,247.414,512,224,5.1\nmnasnet_140,2060.59,247.645,512,224,7.12\npit_ti_224,2057.02,247.989,512,224,4.85\ngluon_resnet34_v1b,2039.68,250.446,512,224,21.8\ntv_resnet34,2038.39,250.573,512,224,21.8\nresnet34,2036.51,250.813,512,224,21.8\nese_vovnet19b_dw,1999.58,255.562,512,224,6.54\nresnet26,1962.0,260.488,512,224,16.0\ntf_efficientnetv2_b2,1951.52,260.748,512,208,10.1\nskresnet18,1943.81,262.753,512,224,11.96\ncs3darknet_m,1940.79,263.122,512,256,9.31\nregnetz_005,1916.17,265.765,512,224,7.12\nresnetblur18,1897.99,269.406,512,224,11.69\nrexnetr_100,1893.12,201.724,384,224,4.88\nnf_resnet26,1869.64,273.344,512,224,16.0\nmobilenetv2_110d,1868.27,204.505,384,224,4.52\nvisformer_tiny,1861.63,274.356,512,224,10.32\nmixer_b32_224,1856.75,274.965,512,224,60.29\nseresnet34,1837.21,277.783,512,224,21.96\nfbnetv3_b,1825.5,278.744,512,224,8.6\nmobilevitv2_050,1824.87,279.552,512,256,1.37\ngernet_m,1822.12,280.293,512,224,21.14\nresnet34d,1813.16,281.758,512,224,21.82\nlevit_384,1801.0,283.153,512,224,39.13\nlegacy_seresnet34,1781.32,286.529,512,224,21.96\nregnetx_004,1780.61,286.47,512,224,5.16\ntf_efficientnet_b0_ns,1779.24,214.77,384,224,5.29\ntf_efficientnet_b0,1779.02,214.765,384,224,5.29\ntf_efficientnet_b0_ap,1777.73,214.898,384,224,5.29\nefficientnet_b0,1751.72,291.183,512,224,5.29\nselecsls42,1718.76,297.231,512,224,30.35\nselecsls42b,1710.18,298.726,512,224,32.46\nvit_base_patch32_224,1708.63,298.818,512,224,88.22\nvit_base_patch32_224_sam,1707.5,298.997,512,224,88.22\nefficientnet_es_pruned,1687.32,302.688,512,224,5.44\nresnetrs50,1686.45,302.42,512,160,35.69\nefficientnet_es,1686.11,302.906,512,224,5.44\nmixer_s16_224,1660.76,307.737,512,224,18.53\ndarknet17,1654.01,309.253,512,256,14.3\nmobilenetv2_140,1637.57,233.691,384,224,6.11\nfbnetv3_d,1634.54,233.136,384,224,10.31\ntf_efficientnet_es,1623.9,314.542,512,224,5.44\nresnet26d,1623.31,314.899,512,224,16.01\nmobilevit_xxs,1602.81,238.427,384,256,1.27\nresmlp_12_distilled_224,1577.54,323.769,512,224,15.35\nresmlp_12_224,1577.31,323.803,512,224,15.35\npit_xs_224,1555.66,328.198,512,224,10.62\npit_xs_distilled_224,1555.48,328.255,512,224,11.0\nsemnasnet_140,1546.19,330.184,512,224,6.11\nghostnet_130,1542.47,330.535,512,224,7.36\nrepvgg_b0,1538.07,331.828,512,224,15.82\nefficientnet_lite1,1530.99,166.26,256,240,5.42\ndla34,1524.02,335.337,512,224,15.74\nedgenext_x_small,1512.48,337.399,512,256,2.34\ndarknet21,1486.14,344.159,512,256,20.86\nselecsls60,1482.76,344.397,512,224,30.67\nselecsls60b,1478.62,345.378,512,224,32.77\nnf_seresnet26,1473.71,346.754,512,224,17.4\nvit_small_patch32_384,1455.89,350.818,512,384,22.92\ngmixer_12_224,1448.32,352.721,512,224,12.7\nefficientnet_b1_pruned,1446.82,352.35,512,240,6.33\ntf_efficientnet_lite1,1443.47,176.394,256,240,5.42\nnf_ecaresnet26,1440.41,354.896,512,224,16.0\nxcit_tiny_12_p16_224_dist,1426.36,357.157,512,224,6.72\nxcit_tiny_12_p16_224,1426.18,357.168,512,224,6.72\nsedarknet21,1401.98,364.696,512,256,20.95\nrexnetr_130,1388.84,183.199,256,224,7.61\ndla46x_c,1388.59,367.953,512,224,1.07\ngmlp_ti16_224,1381.11,276.449,384,224,5.87\nmixnet_s,1365.54,373.667,512,224,4.13\nrexnet_100,1364.31,280.319,384,224,4.8\nregnety_006,1361.43,374.963,512,224,6.06\nmobilenetv2_120d,1352.9,188.013,256,224,5.83\nlegacy_seresnext26_32x4d,1349.26,378.798,512,224,16.79\ncrossvit_tiny_240,1348.01,378.219,512,240,7.01\nvit_tiny_r_s16_p8_384,1345.31,284.562,384,384,6.36\npoolformer_s12,1342.54,380.659,512,224,11.92\ndla60x_c,1341.77,380.621,512,224,1.32\nefficientnet_b1,1325.85,191.544,256,224,7.79\nresnetv2_50,1288.61,396.553,512,224,25.55\nregnetx_006,1286.44,397.176,512,224,6.2\ncrossvit_9_240,1258.73,303.637,384,240,8.55\nconvnext_nano_ols,1252.33,408.151,512,224,15.6\nconvnext_nano,1249.89,408.864,512,224,15.59\nconvnext_nano_hnf,1249.05,409.138,512,224,15.59\nresnet26t,1237.34,413.275,512,256,16.01\ntf_mixnet_s,1236.15,412.905,512,224,4.13\nnf_regnet_b2,1229.56,414.759,512,240,14.31\nrexnetr_150,1224.57,207.878,256,224,9.78\ngluon_resnet50_v1b,1219.23,419.12,512,224,25.56\ntv_resnet50,1218.99,419.17,512,224,25.56\ncrossvit_9_dagger_240,1218.38,313.701,384,240,8.78\nresnet50,1218.01,419.528,512,224,25.56\nswsl_resnet50,1217.39,419.737,512,224,25.56\nssl_resnet50,1217.38,419.757,512,224,25.56\ncs3darknet_focus_l,1216.61,314.788,384,256,21.15\nrepvgg_a2,1214.87,420.579,512,224,28.21\ncs3darknet_l,1203.14,318.267,384,256,21.16\ngernet_l,1201.09,425.379,512,256,31.08\nefficientnet_lite2,1191.67,213.855,256,260,6.09\nnf_regnet_b1,1181.15,431.966,512,256,10.22\nseresnext26d_32x4d,1178.86,325.051,384,224,16.81\nbotnet26t_256,1178.34,325.281,384,256,12.49\nseresnext26tn_32x4d,1177.85,325.355,384,224,16.81\nseresnext26t_32x4d,1176.65,325.669,384,224,16.81\nmobilevitv2_075,1174.29,217.001,256,256,2.87\necaresnext50t_32x4d,1159.52,330.605,384,224,15.41\necaresnext26t_32x4d,1158.26,330.961,384,224,15.41\ngluon_resnet50_v1c,1147.86,333.697,384,224,25.58\nhalonet26t,1136.15,337.402,384,256,12.48\nresnetv2_50d,1134.86,450.316,512,224,25.57\nresnetv2_50t,1132.89,451.133,512,224,25.57\nedgenext_small,1127.71,452.849,512,256,5.59\ntf_efficientnet_lite2,1121.02,227.403,256,260,6.09\nconvit_tiny,1118.98,456.53,512,224,5.71\nskresnet34,1113.08,458.799,512,224,22.28\ntf_efficientnet_b1,1099.77,231.299,256,240,7.79\ntf_efficientnet_b1_ap,1099.37,231.402,256,240,7.79\nefficientnetv2_rw_t,1098.86,230.78,256,224,13.65\ntf_efficientnet_b1_ns,1098.29,231.567,256,240,7.79\necaresnetlight,1091.16,468.275,512,224,30.16\ngluon_resnet50_v1d,1084.38,353.226,384,224,25.58\ndpn68b,1083.77,353.123,384,224,12.61\ncs3sedarknet_l,1083.42,353.12,384,256,21.91\nresnet50d,1078.0,355.348,384,224,25.58\nresnet50t,1076.81,355.721,384,224,25.57\nresnet32ts,1075.86,237.337,256,256,17.96\nresnet33ts,1061.36,240.599,256,256,19.68\nvit_small_patch16_224,1057.92,362.157,384,224,22.05\nresnetaa50,1057.73,362.204,384,224,25.56\nvit_small_resnet26d_224,1057.57,362.04,384,224,63.61\ndeit_small_patch16_224,1050.7,364.638,384,224,22.05\ncspresnet50,1042.19,367.617,384,256,21.62\ntf_efficientnetv2_b3,1041.71,243.94,256,240,14.36\nregnetx_008,1034.73,493.971,512,224,7.26\necaresnet26t,1033.34,371.048,384,256,16.01\ndeit_small_distilled_patch16_224,1028.8,372.398,384,224,22.44\nvit_relpos_base_patch32_plus_rpn_256,1021.86,499.989,512,256,119.42\ndla60,1020.05,375.488,384,224,22.04\nres2net50_48w_2s,1018.83,376.079,384,224,25.29\ngc_efficientnetv2_rw_t,1014.65,249.524,256,224,13.68\nvit_relpos_small_patch16_rpn_224,1013.69,377.786,384,224,21.97\nedgenext_small_rw,1011.18,505.339,512,256,7.83\npit_s_224,1010.83,378.943,384,224,23.46\nseresnet33ts,1007.26,253.362,256,256,19.78\nefficientnet_em,1007.19,253.179,256,240,6.9\nvovnet39a,1006.62,507.995,512,224,22.6\nlegacy_seresnet50,1003.5,381.52,384,224,28.09\ngluon_resnext50_32x4d,1001.3,382.689,384,224,25.03\ntv_resnext50_32x4d,1001.18,382.711,384,224,25.03\nresnext50_32x4d,1001.03,382.776,384,224,25.03\nssl_resnext50_32x4d,1000.68,382.908,384,224,25.03\neca_resnet33ts,999.77,255.368,256,256,19.68\nswsl_resnext50_32x4d,997.37,384.186,384,224,25.03\nregnety_008,993.3,514.408,512,224,6.26\ndpn68,992.27,385.859,384,224,12.61\ndeit3_small_patch16_224,987.86,387.777,384,224,22.06\ndeit3_small_patch16_224_in21ft1k,987.15,388.058,384,224,22.06\ngcresnet33ts,985.12,258.855,256,256,19.88\nefficientnet_b2a,980.29,259.63,256,256,9.11\ntf_efficientnet_em,980.0,260.253,256,240,6.9\nefficientnet_b2,978.68,260.092,256,256,9.11\nseresnet50,971.79,394.011,384,224,28.09\ngluon_resnet50_v1s,970.71,394.714,384,224,25.68\nvit_srelpos_small_patch16_224,969.18,395.281,384,224,21.97\nvit_relpos_small_patch16_224,965.13,396.742,384,224,21.98\necaresnet50d_pruned,964.18,530.07,512,224,19.94\ncspresnet50d,956.82,266.672,256,256,21.64\nvgg11,954.03,536.508,512,224,132.86\ncspresnet50w,952.27,267.927,256,256,28.12\nese_vovnet39b,951.93,537.173,512,224,24.57\nvit_base_patch32_plus_256,951.5,537.138,512,256,119.48\nresnetaa50d,950.79,403.026,384,224,25.58\neca_vovnet39b,948.4,539.184,512,224,22.6\nlambda_resnet26rpt_256,942.15,203.17,192,256,10.99\npit_s_distilled_224,934.29,273.079,256,224,24.04\nmobilevit_xs,924.5,275.792,256,256,2.32\ntv_densenet121,917.93,277.067,256,224,7.98\ndensenet121,913.65,278.353,256,224,7.98\nresnetblur50,911.91,420.254,384,224,25.56\nhrnet_w18_small_v2,910.26,559.998,512,224,15.6\ncoat_lite_tiny,909.29,421.406,384,224,5.72\nmobilevitv2_100,907.45,281.094,256,256,4.9\nnf_resnet50,900.11,425.722,384,256,25.56\nresnext50d_32x4d,894.57,285.293,256,224,25.05\nnf_seresnet50,892.73,428.967,384,224,28.09\nrexnetr_200,890.57,214.407,192,224,16.52\nefficientnet_cc_b0_4e,890.34,430.073,384,224,13.31\nefficientnet_cc_b0_8e,889.37,430.553,384,224,24.01\ndla60x,886.5,287.775,256,224,17.35\ntwins_svt_small,885.48,432.048,384,224,24.06\nseresnet50t,879.71,435.29,384,224,28.1\nmixnet_m,878.04,581.529,512,224,5.01\nnf_ecaresnet50,875.38,437.674,384,224,25.56\nefficientnet_b2_pruned,873.9,291.355,256,260,8.31\ndensenet121d,873.44,291.238,256,224,8.0\ncspresnext50,868.23,294.006,256,256,20.57\nrexnet_150,866.26,294.391,256,224,9.73\necaresnet50d,862.65,444.205,384,224,25.58\nfbnetv3_g,862.32,220.642,192,240,16.62\nregnetz_b16,862.05,295.457,256,224,9.72\ntf_efficientnet_cc_b0_4e,861.1,444.691,384,224,13.31\ntf_efficientnet_cc_b0_8e,857.16,446.822,384,224,24.01\ngcresnet50t,854.99,447.633,384,256,25.9\nres2net50_26w_4s,851.03,449.921,384,224,25.7\ncoat_lite_mini,849.82,450.985,384,224,11.01\ntf_efficientnet_b2_ap,849.52,224.466,192,260,9.11\ntf_efficientnet_b2,848.58,224.736,192,260,9.11\ntf_efficientnet_b2_ns,847.86,224.983,192,260,9.11\nvit_base_resnet26d_224,844.62,453.315,384,224,101.4\nvgg11_bn,832.74,460.889,384,224,132.87\nvovnet57a,832.06,614.449,512,224,36.64\nselecsls84,830.17,615.492,512,224,50.95\nresnetblur50d,826.31,308.964,256,224,25.58\nconvnext_tiny_hnfd,820.9,310.941,256,224,28.59\nconvnext_tiny_hnf,819.46,311.471,256,224,28.59\nconvnext_tiny,819.24,311.536,256,224,28.59\nconvnext_tiny_in22ft1k,818.81,311.724,256,224,28.59\nrexnet_130,816.78,312.226,256,224,7.56\nseresnext50_32x4d,814.69,313.102,256,224,27.56\nlegacy_seresnext50_32x4d,813.61,313.477,256,224,27.56\ngluon_seresnext50_32x4d,813.13,313.678,256,224,27.56\nskresnet50,808.8,473.357,384,224,25.8\nvisformer_small,806.27,475.588,384,224,40.22\nres2net50_14w_8s,794.56,319.93,256,224,25.06\ndensenetblur121d,789.33,322.521,256,224,8.0\nseresnetaa50d,785.32,324.779,256,224,28.11\ngluon_inception_v3,782.59,489.263,384,299,23.83\ninception_v3,782.35,489.427,384,299,23.83\nadv_inception_v3,778.18,491.976,384,299,23.83\nresmlp_24_distilled_224,777.24,327.895,256,224,30.02\nresmlp_24_224,776.95,327.972,256,224,30.02\ntf_inception_v3,775.41,493.776,384,299,23.83\nese_vovnet57b,774.18,495.058,384,224,38.61\ntf_mixnet_m,773.08,495.127,384,224,5.01\nresnetv2_101,772.45,329.834,256,224,44.54\ndla60_res2net,767.35,332.099,256,224,20.85\nnf_regnet_b3,766.23,499.321,384,288,18.59\nsehalonet33ts,763.66,334.4,256,256,13.69\necaresnet101d_pruned,754.9,676.449,512,224,24.88\ndarknet53,753.16,339.081,256,256,41.61\ndensenet169,752.52,337.551,256,224,14.15\nresnet101,747.89,340.74,256,224,44.55\ngluon_resnet101_v1b,747.04,341.055,256,224,44.55\ntv_resnet101,746.84,341.219,256,224,44.55\nskresnet50d,739.17,344.891,256,224,25.82\ntwins_pcpvt_small,738.11,345.194,256,224,24.11\nvit_small_r26_s32_224,733.9,347.477,256,224,36.43\nmobilevit_s,733.0,260.821,192,256,5.58\ndarknetaa53,732.7,348.577,256,256,36.02\nxcit_tiny_24_p16_224_dist,727.98,348.335,256,224,12.12\nxcit_tiny_24_p16_224,727.1,348.63,256,224,12.12\nefficientnet_b0_gn,724.56,352.174,256,224,5.29\nefficientnet_b3_pruned,722.23,352.701,256,300,9.86\ngluon_resnet101_v1c,717.66,355.143,256,224,44.57\nresnext26ts,717.15,534.946,384,256,10.3\nresnetv2_101d,715.45,356.238,256,224,44.56\ngmixer_24_224,714.67,356.582,256,224,24.72\nresnetrs101,714.37,356.071,256,192,63.62\nnf_resnet101,712.1,537.603,384,224,44.55\nefficientnet_lite3,702.44,181.104,128,300,8.2\nmixnet_l,702.18,545.327,384,224,7.33\neca_resnext26ts,694.05,368.289,256,256,10.3\nsemobilevit_s,692.92,368.16,256,256,5.74\nseresnext26ts,691.18,369.699,256,256,10.39\npoolformer_s24,689.84,369.792,256,224,21.39\ngluon_resnet101_v1d,688.26,370.323,256,224,44.57\ndla102,688.03,370.524,256,224,33.27\nvit_relpos_medium_patch16_rpn_224,687.13,371.514,256,224,38.73\nsebotnet33ts_256,686.07,279.058,192,256,13.7\ngcresnext26ts,683.09,373.929,256,256,10.48\nregnetx_016,682.73,749.012,512,224,9.19\nhaloregnetz_b,680.78,374.495,256,224,11.68\ncspdarknet53,679.01,375.961,256,256,27.64\nvgg13,677.45,566.653,384,224,133.05\nxcit_nano_12_p16_384_dist,671.72,379.231,256,384,3.05\nwide_resnet50_2,668.78,573.358,384,224,68.88\ntf_efficientnet_lite3,665.78,191.165,128,300,8.2\nvit_relpos_medium_patch16_cls_224,661.77,385.665,256,224,38.76\nvit_srelpos_medium_patch16_224,659.88,386.996,256,224,38.74\nrexnet_200,659.06,290.146,192,224,16.37\nvit_base_resnet50d_224,658.84,386.945,256,224,110.97\necaresnet50t,657.78,388.237,256,256,25.57\ngmlp_s16_224,657.63,290.408,192,224,19.42\nvit_relpos_medium_patch16_224,657.05,388.484,256,224,38.75\ntf_efficientnet_cc_b1_8e,654.82,389.25,256,240,39.72\nregnety_016,650.07,785.757,512,224,11.2\nswin_tiny_patch4_window7_224,648.69,393.641,256,224,28.29\nxcit_small_12_p16_224,640.82,397.688,256,224,26.25\ngluon_resnet101_v1s,640.79,397.908,256,224,44.67\nxcit_small_12_p16_224_dist,639.99,398.193,256,224,26.25\ncrossvit_small_240,638.8,399.076,256,240,26.86\nefficientnet_cc_b1_8e,637.42,399.94,256,240,39.72\nresnetaa101d,634.86,401.619,256,224,44.57\ncs3sedarknet_xdw,630.82,302.41,192,256,21.6\nrepvgg_b1,623.55,820.034,512,224,57.42\nmobilevitv2_125,620.51,308.406,192,256,7.48\nbat_resnext26ts,613.61,415.954,256,256,10.73\ngluon_resnext101_32x4d,609.67,418.333,256,224,44.18\nswsl_resnext101_32x4d,609.02,418.731,256,224,44.18\nresnext101_32x4d,609.01,418.74,256,224,44.18\ntf_mixnet_l,606.88,420.297,256,224,7.33\nssl_resnext101_32x4d,606.28,420.718,256,224,44.18\nlegacy_seresnet101,601.55,423.316,256,224,49.33\ncs3darknet_focus_x,600.02,425.715,256,256,35.02\ndla102x,598.42,319.231,192,224,26.31\nhalonet50ts,597.8,320.205,192,256,22.73\nxcit_nano_12_p8_224,595.07,428.358,256,224,3.05\nxcit_nano_12_p8_224_dist,593.27,429.695,256,224,3.05\ncait_xxs24_224,593.22,428.92,256,224,11.96\nseresnet101,590.42,431.41,256,224,49.33\nswin_s3_tiny_224,588.57,433.98,256,224,28.33\nresnetv2_50x1_bit_distilled,585.83,326.889,192,224,25.55\nefficientnet_b0_g8_gn,582.67,438.264,256,224,6.56\ncrossvit_15_240,580.46,328.975,192,240,27.53\nresnetblur101d,576.87,442.155,256,224,44.57\nres2net50_26w_6s,573.8,444.339,256,224,37.05\nvgg13_bn,573.47,446.125,256,224,133.05\nefficientnet_b3a,572.29,221.925,128,288,12.23\nefficientnet_b3,572.18,221.941,128,288,12.23\ncs3darknet_x,571.12,447.259,256,256,35.05\ndensenet201,562.52,338.221,192,224,20.01\ncrossvit_15_dagger_240,562.49,339.489,192,240,28.21\nefficientnetv2_s,559.15,226.702,128,288,21.46\neca_botnext26ts_256,558.6,457.666,256,256,10.59\nmixer_b16_224,556.6,459.152,256,224,59.88\nmixer_b16_224_miil,556.5,459.202,256,224,59.88\neca_halonext26ts,547.96,466.574,256,256,10.76\necaresnet101d,546.58,466.555,256,224,44.57\nvgg16,546.06,702.994,384,224,138.36\nmixer_l32_224,543.38,351.819,192,224,206.94\nvit_base_patch32_384,543.37,470.294,256,384,88.3\nnf_seresnet101,540.53,471.014,256,224,49.33\nresnetv2_152,536.63,474.697,256,224,60.19\nbotnet50ts_256,534.71,238.412,128,256,22.74\nmobilevitv2_150,533.38,238.97,128,256,10.59\nvit_base_r26_s32_224,533.28,358.697,192,224,101.38\nmobilevitv2_150_in22ft1k,532.99,239.183,128,256,10.59\ncs3sedarknet_x,531.85,479.872,256,256,35.4\nnf_ecaresnet101,531.3,479.947,256,224,44.55\ncs3edgenet_x,529.37,482.632,256,256,47.82\nres2next50,528.59,483.023,256,224,24.67\nres2net101_26w_4s,527.01,483.179,256,224,45.21\nvit_large_patch32_224,524.74,486.172,256,224,306.54\nresnet101d,523.85,364.964,192,256,44.57\nefficientnetv2_rw_s,520.77,243.564,128,288,23.94\nhalo2botnet50ts_256,517.51,369.975,192,256,22.64\nresmlp_36_distilled_224,513.23,371.84,192,224,44.69\nvit_tiny_patch16_384,510.99,249.657,128,384,5.79\nresmlp_36_224,509.53,374.55,192,224,44.69\nswinv2_cr_tiny_224,506.82,503.861,256,224,28.33\nmixnet_xl,505.67,504.387,256,224,11.9\nresnetv2_50d_gn,505.25,379.149,192,224,25.57\nswinv2_cr_tiny_ns_224,504.1,506.527,256,224,28.33\ngluon_resnet152_v1b,502.02,380.204,192,224,60.19\nregnetz_d8,501.8,253.463,128,256,23.37\nresnet152,501.44,380.547,192,224,60.19\ntv_resnet152,501.12,380.811,192,224,60.19\nxception,497.64,256.367,128,299,22.86\nregnety_032,496.85,771.405,384,224,19.44\ntf_efficientnet_b3_ap,496.0,256.348,128,300,12.23\ntf_efficientnet_b3,494.58,257.101,128,300,12.23\ntf_efficientnet_b3_ns,492.45,258.213,128,300,12.23\nconvnext_small_in22ft1k,490.79,389.411,192,224,50.22\nres2net50_26w_8s,489.22,520.921,256,224,48.4\ntf_efficientnetv2_s_in21ft1k,488.77,259.622,128,300,21.46\ntf_efficientnetv2_s,488.25,259.918,128,300,21.46\ngluon_resnet152_v1c,488.2,390.894,192,224,60.21\nconvnext_small,487.15,392.394,192,224,50.22\ntwins_pcpvt_base,487.13,391.314,192,224,43.83\nresnetv2_152d,486.82,392.059,192,224,60.2\nlegacy_seresnext101_32x4d,484.7,393.829,192,224,48.96\nresnet50_gn,482.45,397.141,192,224,25.56\ngluon_seresnext101_32x4d,480.63,397.197,192,224,48.96\nhrnet_w32,480.24,528.215,256,224,41.23\nsequencer2d_s,480.16,264.213,128,224,27.65\nseresnext101_32x4d,479.03,398.526,192,224,48.96\nnest_tiny,477.97,266.889,128,224,17.06\ndla60_res2next,477.74,534.408,256,224,17.03\ngluon_resnet152_v1d,476.32,400.788,192,224,60.21\nregnetz_c16,475.79,402.061,192,256,13.46\nhrnet_w18,473.42,535.867,256,224,21.3\njx_nest_tiny,472.84,269.807,128,224,17.06\nregnetz_d32,471.85,269.596,128,256,27.58\nregnetz_040,471.81,269.412,128,256,27.12\nxception41p,471.79,270.424,128,299,26.91\nvgg16_bn,470.3,543.999,256,224,138.37\nregnetz_040h,469.27,270.846,128,256,28.94\npoolformer_s36,467.39,408.832,192,224,30.86\nresnet51q,463.81,551.102,256,256,35.7\nefficientnet_el_pruned,461.98,275.957,128,300,10.59\nefficientnet_el,461.97,275.94,128,300,10.59\ncoat_lite_small,461.36,414.606,192,224,19.84\nnf_regnet_b4,457.97,417.049,192,320,30.21\nvgg19,457.37,839.347,384,224,143.67\ncs3se_edgenet_x,457.05,418.615,192,256,50.72\ndla169,455.51,419.044,192,224,53.39\nconvit_small,454.72,421.197,192,224,27.78\ngluon_resnet152_v1s,452.53,421.917,192,224,60.32\ntf_efficientnet_el,449.8,283.446,128,300,10.59\ngcresnext50ts,445.12,429.834,192,256,15.67\nregnetx_040,442.35,866.937,384,224,22.12\nvit_small_resnet50d_s16_224,437.26,437.826,192,224,57.53\nvolo_d1_224,437.04,437.842,192,224,26.63\nmobilevitv2_175_in22ft1k,434.9,293.339,128,256,14.25\nmobilevitv2_175,434.88,293.341,128,256,14.25\nresnet61q,433.95,441.405,192,256,36.85\nese_vovnet99b,433.24,589.371,256,224,63.2\nese_vovnet39b_evos,430.54,296.328,128,224,24.58\ntwins_svt_base,425.02,449.64,192,224,56.07\nresnest14d,411.87,1242.634,512,224,10.61\ndla102x2,405.87,313.766,128,224,41.28\nmobilevitv2_200_in22ft1k,405.83,314.414,128,256,18.45\nmobilevitv2_200,405.76,314.447,128,256,18.45\ninception_v4,405.43,471.399,192,299,42.68\ncrossvit_18_240,404.58,314.332,128,240,43.27\nswin_small_patch4_window7_224,400.37,477.632,192,224,49.61\ndensenet161,399.17,318.189,128,224,28.68\nvgg19_bn,398.5,642.012,256,224,143.68\nlegacy_seresnet152,398.4,478.588,192,224,66.82\nvit_base_patch16_224_miil,397.86,481.79,192,224,86.54\nsequencer2d_m,396.83,480.626,192,224,38.31\ncrossvit_18_dagger_240,396.31,320.926,128,240,44.27\nresnetv2_50d_frn,394.18,323.553,128,224,25.59\nvit_base_patch16_224,392.99,487.729,192,224,86.57\nvit_base_patch16_224_sam,392.92,487.774,192,224,86.57\nvit_base_patch16_rpn_224,391.32,489.846,192,224,86.54\nxception41,391.05,326.045,128,299,26.97\ndeit_base_patch16_224,390.04,491.437,192,224,86.57\ncait_xxs36_224,387.24,492.066,192,224,17.3\nefficientnet_b0_g16_evos,386.23,993.13,384,224,8.11\ndeit_base_distilled_patch16_224,384.06,499.086,192,224,87.34\nxcit_tiny_12_p16_384_dist,383.09,499.371,192,384,6.72\nvit_relpos_base_patch16_rpn_224,382.95,500.328,192,224,86.41\nseresnet152,379.38,334.127,128,224,66.82\nresnetv2_50d_evos,374.27,340.812,128,224,25.59\ndeit3_base_patch16_224,374.05,512.341,192,224,86.59\ndeit3_base_patch16_224_in21ft1k,373.84,512.639,192,224,86.59\nvit_relpos_base_patch16_clsgap_224,370.76,516.704,192,224,86.43\nvit_relpos_base_patch16_cls_224,370.22,517.437,192,224,86.43\nhrnet_w30,369.35,688.202,256,224,37.71\nvit_relpos_base_patch16_224,368.93,519.279,192,224,86.43\ngluon_resnext101_64x4d,363.93,350.084,128,224,83.46\nresnext101_64x4d,363.79,350.21,128,224,83.46\nbeit_base_patch16_224,358.77,534.02,192,224,86.53\nens_adv_inception_resnet_v2,358.6,532.08,192,299,55.84\nwide_resnet101_2,358.56,533.868,192,224,126.89\ninception_resnet_v2,358.54,532.143,192,299,55.84\nresnet200,357.55,355.04,128,224,64.67\nresnet152d,357.54,355.729,128,256,60.21\nswinv2_tiny_window8_256,357.0,536.56,192,256,28.35\nefficientnet_b4,354.99,268.381,96,320,19.34\ndpn92,353.0,723.721,256,224,37.67\nrepvgg_b2,352.05,1453.222,512,224,89.02\nresnest50d_1s4x24d,349.97,730.142,256,224,25.68\nregnetz_b16_evos,347.19,366.83,128,224,9.74\ntnt_s_patch16_224,342.71,558.25,192,224,23.76\nxception65p,341.43,373.588,128,299,39.82\nconvnext_base_in22ft1k,339.67,374.996,128,224,88.59\nconvnext_base,338.68,376.119,128,224,88.59\nefficientnet_lite4,338.39,187.783,64,380,13.01\ntwins_pcpvt_large,333.52,379.633,128,224,60.99\npit_b_224,331.08,385.636,128,224,73.76\npit_b_distilled_224,328.87,388.208,128,224,74.79\nxcit_small_24_p16_224_dist,326.41,388.817,128,224,47.67\nxcit_small_24_p16_224,326.38,388.806,128,224,47.67\ntf_efficientnet_lite4,324.6,195.748,64,380,13.01\neca_nfnet_l0,319.84,1599.745,512,224,24.14\nnfnet_l0,319.69,1600.317,512,224,35.07\ngluon_seresnext101_64x4d,319.51,398.398,128,224,88.23\nrepvgg_b3,316.57,1211.922,384,224,123.09\nskresnext50_32x4d,315.84,809.121,256,224,27.48\npoolformer_m36,315.69,403.496,128,224,56.17\nssl_resnext101_32x8d,313.35,406.924,128,224,88.79\nresnext101_32x8d,312.8,407.622,128,224,88.79\nswsl_resnext101_32x8d,312.76,407.724,128,224,88.79\nig_resnext101_32x8d,311.13,409.865,128,224,88.79\nvit_small_patch16_36x1_224,309.04,411.365,128,224,64.67\nregnetx_032,308.88,1241.936,384,224,15.3\nvit_small_patch16_18x2_224,306.57,414.654,128,224,64.67\nxcit_tiny_12_p8_224,306.37,415.93,128,224,6.71\ncait_s24_224,305.74,415.965,128,224,46.92\nxcit_tiny_12_p8_224_dist,304.23,418.886,128,224,6.71\nswinv2_cr_small_ns_224,300.99,422.86,128,224,49.7\ntwins_svt_large,300.69,423.548,128,224,99.27\nswinv2_cr_small_224,299.81,424.482,128,224,49.7\ncoat_tiny,298.83,426.275,128,224,5.5\nresnest26d,298.23,1286.829,384,224,17.07\nnest_small,296.79,321.765,96,224,38.35\njx_nest_small,293.75,325.094,96,224,38.35\nswin_s3_small_224,290.56,438.612,128,224,49.74\ndpn98,290.11,439.591,128,224,61.57\nresnetv2_50d_evob,289.65,330.197,96,224,25.59\nseresnet152d,283.9,447.414,128,256,66.84\ngluon_xception65,283.18,337.068,96,299,39.92\nconvnext_tiny_384_in22ft1k,282.39,338.982,96,384,28.59\nresnetrs152,282.26,450.046,128,256,86.62\nxception65,281.11,339.548,96,299,39.92\nswin_base_patch4_window7_224,281.0,453.662,128,224,87.77\nhrnet_w48,279.44,682.135,192,224,77.47\nmixnet_xxl,278.4,457.833,128,224,23.96\nseresnext101_32x8d,278.13,458.033,128,224,93.57\ngmlp_b16_224,275.97,346.253,96,224,73.08\nseresnext101d_32x8d,270.35,471.144,128,224,93.59\nresnet200d,267.1,476.272,128,256,64.69\nnfnet_f0,265.61,1926.394,512,192,71.49\nregnetz_e8,256.51,247.489,64,256,57.7\nxcit_tiny_24_p16_384_dist,255.73,371.975,96,384,12.12\ncrossvit_base_240,254.6,375.374,96,240,105.03\ndm_nfnet_f0,251.38,1526.301,384,192,71.49\nhrnet_w40,249.23,765.525,192,224,57.56\nvit_base_patch16_plus_240,246.96,517.368,128,240,117.56\nefficientnetv2_m,246.55,256.379,64,320,54.14\nvit_relpos_base_patch16_plus_240,244.88,521.493,128,240,117.38\nseresnextaa101d_32x8d,243.89,522.629,128,224,93.59\ntf_efficientnet_b4_ap,242.14,262.218,64,380,19.34\ntf_efficientnet_b4,241.83,262.52,64,380,19.34\ntf_efficientnet_b4_ns,241.46,263.01,64,380,19.34\nxcit_medium_24_p16_224,241.39,526.926,128,224,84.4\nxcit_medium_24_p16_224_dist,241.08,527.466,128,224,84.4\nxcit_small_12_p16_384_dist,240.61,397.192,96,384,26.25\nvit_small_patch16_384,239.06,266.856,64,384,22.2\nvolo_d2_224,238.89,400.019,96,224,58.68\nswinv2_tiny_window16_256,238.79,400.76,96,256,28.35\nmobilevitv2_150_384_in22ft1k,238.1,267.77,64,384,10.59\nvit_large_r50_s32_224,236.27,403.797,96,224,328.99\ntresnet_m,233.24,2192.365,512,224,31.39\nhrnet_w44,232.48,820.975,192,224,67.06\npoolformer_m48,232.43,410.471,96,224,73.47\ndensenet264,231.27,411.12,96,224,72.69\nconvit_base,231.06,552.947,128,224,86.54\nnf_regnet_b5,228.54,417.354,96,384,49.74\ndeit3_small_patch16_384,226.74,281.318,64,384,22.21\ndeit3_small_patch16_384_in21ft1k,226.44,281.652,64,384,22.21\nvit_small_r26_s32_384,226.15,281.726,64,384,36.47\ncoat_mini,225.14,566.497,128,224,10.34\nefficientnetv2_rw_m,224.46,281.565,64,320,53.24\nswin_s3_base_224,224.0,425.728,96,224,71.13\ntnt_b_patch16_224,223.52,570.669,128,224,65.41\nhrnet_w64,223.29,568.417,128,224,128.06\nsequencer2d_l,220.03,286.022,64,224,54.3\ndpn131,216.53,588.962,128,224,79.25\nvit_base_r50_s16_224,215.49,443.851,96,224,98.66\nswinv2_cr_base_ns_224,214.73,444.647,96,224,87.88\nxception71,214.09,296.77,64,299,42.34\nswinv2_cr_base_224,213.21,447.81,96,224,87.88\nswinv2_small_window8_256,213.06,448.048,96,256,49.73\nnest_base,210.25,302.717,64,224,67.72\njx_nest_base,209.06,304.441,64,224,67.72\nseresnet200d,203.53,467.209,96,256,71.86\nresnetrs200,201.84,471.293,96,256,93.21\nresnest50d,201.6,1268.493,256,224,27.48\necaresnet200d,201.55,472.938,96,256,64.69\nxcit_nano_12_p8_384_dist,201.45,315.854,64,384,3.05\nefficientnet_b3_gn,197.65,322.123,64,288,11.73\nxcit_tiny_24_p8_224_dist,195.37,488.075,96,224,12.11\nxcit_tiny_24_p8_224,195.11,488.622,96,224,12.11\ndpn107,194.08,492.913,96,224,86.92\nregnetz_c16_evos,193.89,328.188,64,256,13.49\nregnety_040,190.14,2017.916,384,224,20.65\nmobilevitv2_175_384_in22ft1k,189.6,336.534,64,384,14.25\nregnetv_040,188.29,1358.084,256,224,20.64\nconvnext_large,187.93,509.087,96,224,197.77\nconvnext_large_in22ft1k,187.83,509.365,96,224,197.77\nconvmixer_768_32,187.17,511.603,96,224,21.11\nregnetx_080,181.41,1409.979,256,224,39.57\nresnest50d_4s2x40d,180.44,1417.38,256,224,30.42\nxcit_small_12_p8_224,179.5,354.768,64,224,26.21\nxcit_small_12_p8_224_dist,179.34,355.047,64,224,26.21\nhalonet_h1,176.7,360.706,64,256,8.1\ntf_efficientnetv2_m_in21ft1k,175.14,270.794,48,384,54.14\nmobilevitv2_200_384_in22ft1k,175.13,273.08,48,384,18.45\ntf_efficientnetv2_m,173.37,273.617,48,384,54.14\nmixer_l16_224,171.41,558.471,96,224,208.2\nefficientnet_b3_g8_gn,168.79,377.376,64,288,14.25\nrepvgg_b1g4,167.59,3053.943,512,224,39.97\nvit_large_patch32_384,167.04,573.058,96,384,306.63\nconvnext_small_384_in22ft1k,165.65,384.557,64,384,50.22\nvolo_d3_224,162.19,392.021,64,224,86.33\nregnetz_d8_evos,155.31,307.002,48,256,23.46\nswin_large_patch4_window7_224,153.79,414.289,64,224,196.53\nswinv2_base_window8_256,151.21,420.663,64,256,87.92\nconvmixer_1024_20_ks9_p14,149.3,1713.726,256,224,24.38\nresnetv2_50x1_bitm,147.75,215.764,32,448,25.55\nseresnet269d,145.59,433.61,64,256,113.67\nresnetrs270,144.14,437.83,64,256,129.86\nswinv2_small_window16_256,143.52,443.487,64,256,49.73\nregnety_040s_gn,142.58,896.132,128,224,20.65\nrepvgg_b2g4,133.72,3827.892,512,224,61.76\neca_nfnet_l1,133.59,1435.413,192,256,41.41\nxcit_large_24_p16_224,132.6,479.222,64,224,189.1\nswinv2_cr_tiny_384,131.94,483.86,64,384,28.33\nxcit_large_24_p16_224_dist,131.66,482.65,64,224,189.1\nxcit_tiny_12_p8_384_dist,131.64,362.75,48,384,6.71\nregnetx_064,129.82,1970.916,256,224,26.21\nswinv2_cr_large_224,124.15,513.018,64,224,196.68\nxcit_small_24_p16_384_dist,120.64,394.328,48,384,47.67\nregnety_064,119.44,2141.523,256,224,30.58\nregnety_080,117.88,2170.37,256,224,39.18\ncrossvit_15_dagger_408,117.86,269.618,32,408,28.5\nvit_large_patch16_224,117.2,544.512,64,224,304.33\nregnetv_064,117.03,1638.944,192,224,30.58\nese_vovnet99b_iabn,117.02,3278.167,384,224,63.2\nconvnext_xlarge_in22ft1k,116.37,548.167,64,224,350.2\nvit_base_patch16_18x2_224,116.0,548.972,64,224,256.73\nconvnext_base_384_in22ft1k,115.58,413.454,48,384,88.59\nefficientnet_b5,113.63,279.129,32,456,30.39\ndeit3_large_patch16_224_in21ft1k,112.51,567.041,64,224,304.37\ndeit3_large_patch16_224,112.48,567.139,64,224,304.37\ntf_efficientnet_b5,111.42,284.665,32,456,30.39\ntf_efficientnet_b5_ap,111.14,285.451,32,456,30.39\ntf_efficientnet_b5_ns,111.14,285.33,32,456,30.39\nlegacy_senet154,110.98,861.567,96,224,115.09\nsenet154,110.82,862.828,96,224,115.09\ngluon_senet154,110.77,863.12,96,224,115.09\nbeit_large_patch16_224,109.02,584.818,64,224,304.43\nrepvgg_b3g4,108.77,3529.239,384,224,83.83\nregnetx_160,107.6,1783.261,192,224,54.28\nnfnet_f1,107.01,1791.907,192,224,132.63\nvolo_d1_384,105.69,301.347,32,384,26.78\nswinv2_base_window16_256,103.88,459.56,48,256,87.92\nswinv2_base_window12to16_192to256_22kft1k,103.79,460.002,48,256,87.92\ntresnet_l,102.82,4975.916,512,224,55.99\ndm_nfnet_f1,101.59,1257.525,128,224,132.63\nvolo_d4_224,101.08,472.359,48,224,192.96\ncait_xxs24_384,99.39,480.268,48,384,12.03\necaresnet269d,99.06,479.988,48,320,102.09\nefficientnetv2_l,98.76,319.521,32,384,118.52\ntf_efficientnetv2_l_in21ft1k,98.35,320.759,32,384,118.52\ntf_efficientnetv2_l,97.56,323.47,32,384,118.52\ndeit_base_patch16_384,97.3,328.042,32,384,86.86\nvit_base_patch16_384,97.1,328.712,32,384,86.86\nresnest101e,96.09,1329.413,128,256,48.28\ndeit_base_distilled_patch16_384,94.63,337.315,32,384,87.63\nregnetx_120,94.03,2721.558,256,224,46.11\ndeit3_base_patch16_384,93.5,341.294,32,384,86.88\ndeit3_base_patch16_384_in21ft1k,93.49,341.309,32,384,86.88\nxcit_small_24_p8_224_dist,92.61,514.968,48,224,47.63\nxcit_small_24_p8_224,92.51,515.466,48,224,47.63\nregnety_120,92.07,2083.952,192,224,51.82\ntresnet_xl,91.15,4209.119,384,224,78.44\ncrossvit_18_dagger_408,89.17,356.787,32,408,44.61\nresnetv2_152x2_bit_teacher,89.16,356.538,32,224,236.34\nvit_large_patch14_224,85.06,562.673,48,224,304.2\nresnetv2_101x1_bitm,84.72,187.286,16,448,44.54\nresnetrs350,84.14,372.211,32,288,163.96\nbeit_base_patch16_384,83.87,380.424,32,384,86.74\nregnety_160,83.24,2305.144,192,224,83.59\npnasnet5large,83.16,380.801,32,331,86.06\nxcit_medium_24_p16_384_dist,82.74,383.266,32,384,84.4\nvit_large_r50_s32_384,77.34,411.186,32,384,329.09\nnasnetalarge,77.32,408.633,32,331,88.75\nswinv2_cr_small_384,76.42,416.277,32,384,49.7\nswin_base_patch4_window12_384,74.73,426.327,32,384,87.9\nresmlp_big_24_distilled_224,70.88,449.95,32,224,129.14\nresmlp_big_24_224_in22ft1k,70.88,449.933,32,224,129.14\nresmlp_big_24_224,70.41,452.99,32,224,129.14\nregnety_320,66.33,1928.357,128,224,145.05\nxcit_tiny_24_p8_384_dist,66.29,479.361,32,384,12.11\ncait_xs24_384,66.24,480.525,32,384,26.67\nig_resnext101_32x16d,65.9,1455.165,96,224,194.03\nssl_resnext101_32x16d,65.74,1458.688,96,224,194.03\nswsl_resnext101_32x16d,65.74,1458.738,96,224,194.03\nvolo_d5_224,64.41,493.535,32,224,295.46\ncait_xxs36_384,64.34,493.602,32,384,17.37\nefficientnet_b6,64.08,246.77,16,528,43.04\nxcit_medium_24_p8_224,63.96,496.86,32,224,84.32\nxcit_medium_24_p8_224_dist,63.93,497.194,32,224,84.32\nconvnext_large_384_in22ft1k,63.85,499.388,32,384,197.77\nvit_base_patch8_224,63.45,377.425,24,224,86.58\ntf_efficientnet_b6_ns,63.1,250.577,16,528,43.04\ntf_efficientnet_b6,62.84,251.669,16,528,43.04\ntf_efficientnet_b6_ap,62.76,252.073,16,528,43.04\nefficientnetv2_xl,62.18,251.438,16,384,208.12\ntf_efficientnetv2_xl_in21ft1k,62.14,251.721,16,384,208.12\nxcit_small_12_p8_384_dist,61.84,386.224,24,384,26.21\nvit_base_r50_s16_384,61.01,391.67,24,384,98.95\nvit_base_resnet50_384,60.98,391.903,24,384,98.95\nswinv2_large_window12to16_192to256_22kft1k,60.98,391.098,24,256,196.74\neca_nfnet_l2,58.72,1632.112,96,320,56.72\nvolo_d2_384,58.5,271.766,16,384,58.87\nresnetrs420,56.49,415.629,24,320,191.89\nswinv2_cr_base_384,55.08,433.269,24,384,87.88\nnfnet_f2,54.73,1750.573,96,256,193.78\ntresnet_m_448,53.52,3584.333,192,448,31.39\ndm_nfnet_f2,51.26,1245.084,64,256,193.78\ncait_s24_384,50.14,476.064,24,384,47.06\nswinv2_cr_huge_224,49.63,481.092,24,224,657.83\nregnetx_320,48.06,2662.024,128,224,107.81\nxcit_large_24_p16_384_dist,48.02,496.416,24,384,189.1\nswin_large_patch4_window12_384,41.75,381.31,16,384,196.74\nconvnext_xlarge_384_in22ft1k,40.45,591.485,24,384,350.2\ndeit3_huge_patch14_224_in21ft1k,38.41,414.036,16,224,632.13\ndeit3_huge_patch14_224,38.4,414.103,16,224,632.13\nefficientnet_b7,37.97,207.232,8,600,66.35\ntf_efficientnet_b7_ap,37.27,210.986,8,600,66.35\ntf_efficientnet_b7_ns,37.25,211.249,8,600,66.35\ntf_efficientnet_b7,37.22,211.329,8,600,66.35\neca_nfnet_l3,35.61,1344.526,48,352,72.04\nxcit_large_24_p8_224_dist,35.32,449.68,16,224,188.93\nxcit_large_24_p8_224,35.06,452.952,16,224,188.93\nresnetv2_50x3_bitm,34.68,460.605,16,448,217.32\nswinv2_cr_large_384,32.56,488.949,16,384,196.68\ncait_s36_384,32.3,491.641,16,384,68.37\ndensenet264d_iabn,32.11,3982.48,128,224,72.74\nresnetv2_152x2_bit_teacher_384,31.17,382.508,12,384,236.34\nxcit_small_24_p8_384_dist,31.12,510.761,16,384,47.63\nresnest200e,30.3,1579.149,48,320,70.2\nvit_large_patch16_384,29.14,410.147,12,384,304.72\ndeit3_large_patch16_384,28.26,422.768,12,384,304.76\ndeit3_large_patch16_384_in21ft1k,28.25,422.94,12,384,304.76\nswinv2_base_window12to24_192to384_22kft1k,28.19,423.281,12,384,87.92\nnfnet_f3,26.1,1834.868,48,320,254.92\nbeit_large_patch16_384,25.3,472.219,12,384,305.0\ntresnet_l_448,25.28,5060.321,128,448,55.99\nvolo_d3_448,24.7,321.403,8,448,86.63\ndm_nfnet_f3,24.56,1297.967,32,320,254.92\ntresnet_xl_448,23.27,4122.323,96,448,78.44\nefficientnet_b8,22.93,257.589,6,672,87.41\ntf_efficientnet_b8,22.71,260.18,6,672,87.41\ntf_efficientnet_b8_ap,22.69,260.555,6,672,87.41\nresnetv2_152x2_bitm,22.58,351.774,8,448,236.34\nvit_giant_patch14_224,22.32,355.766,8,224,1012.61\nig_resnext101_32x32d,21.03,1519.993,32,224,468.53\nxcit_medium_24_p8_384_dist,21.03,376.997,8,384,84.32\nconvmixer_1536_20,20.83,2303.059,48,224,51.63\nresnetv2_101x3_bitm,18.05,441.706,8,448,387.93\nvolo_d4_448,17.57,338.789,6,448,193.41\nswinv2_large_window12to24_192to384_22kft1k,16.62,358.548,6,384,196.74\nresnest269e,16.0,1493.085,24,416,110.93\nnfnet_f4,14.17,1687.74,24,384,316.07\nswinv2_cr_huge_384,13.13,454.627,6,384,657.94\ndm_nfnet_f4,12.96,1229.019,16,384,316.07\nxcit_large_24_p8_384_dist,12.19,488.758,6,384,188.93\ncait_m36_384,11.91,500.148,6,384,271.22\nvolo_d5_448,11.43,346.654,4,448,295.91\nig_resnext101_32x48d,11.2,1427.437,16,224,828.41\ntf_efficientnet_l2_ns_475,10.96,267.912,3,475,480.31\ndm_nfnet_f5,9.76,1222.345,12,416,377.21\nbeit_large_patch16_512,9.42,422.548,4,512,305.67\nvolo_d5_512,8.0,371.847,3,512,296.09\nnfnet_f5,8.0,1992.337,16,416,377.21\ndm_nfnet_f6,7.45,1065.231,8,448,438.36\nnfnet_f6,5.82,2052.248,12,448,438.36\nnfnet_f7,5.73,1387.07,8,480,499.5\nresnetv2_152x4_bitm,4.89,406.668,2,480,936.53\ncait_m48_448,4.76,414.936,2,448,356.46\nefficientnet_l2,3.95,247.515,1,800,480.31\ntf_efficientnet_l2_ns,3.93,248.975,1,800,480.31\n"
  },
  {
    "path": "results/generate_csv_results.py",
    "content": "import numpy as np\nimport pandas as pd\n\n\nresults = {\n    'results-imagenet.csv': [\n        'results-imagenet-real.csv',\n        'results-imagenetv2-matched-frequency.csv',\n        'results-sketch.csv'\n    ],\n    'results-imagenet-a-clean.csv': [\n        'results-imagenet-a.csv',\n    ],\n    'results-imagenet-r-clean.csv': [\n        'results-imagenet-r.csv',\n    ],\n}\n\n\ndef diff(base_df, test_csv):\n    base_df['mi'] = base_df.model + '-' + base_df.img_size.astype('str')\n    base_models = base_df['mi'].values\n    test_df = pd.read_csv(test_csv)\n    test_df['mi'] = test_df.model + '-' + test_df.img_size.astype('str')\n    test_models = test_df['mi'].values\n\n    rank_diff = np.zeros_like(test_models, dtype='object')\n    top1_diff = np.zeros_like(test_models, dtype='object')\n    top5_diff = np.zeros_like(test_models, dtype='object')\n    \n    for rank, model in enumerate(test_models):\n        if model in base_models:            \n            base_rank = int(np.where(base_models == model)[0])\n            top1_d = test_df['top1'][rank] - base_df['top1'][base_rank]\n            top5_d = test_df['top5'][rank] - base_df['top5'][base_rank]\n            \n            # rank_diff\n            if rank == base_rank:\n                rank_diff[rank] = f'0'\n            elif rank > base_rank:\n                rank_diff[rank] = f'-{rank - base_rank}'\n            else:\n                rank_diff[rank] = f'+{base_rank - rank}'\n                \n            # top1_diff\n            if top1_d >= .0:\n                top1_diff[rank] = f'+{top1_d:.3f}'\n            else:\n                top1_diff[rank] = f'-{abs(top1_d):.3f}'\n            \n            # top5_diff\n            if top5_d >= .0:\n                top5_diff[rank] = f'+{top5_d:.3f}'\n            else:\n                top5_diff[rank] = f'-{abs(top5_d):.3f}'\n                \n        else: \n            rank_diff[rank] = ''\n            top1_diff[rank] = ''\n            top5_diff[rank] = ''\n\n    test_df['top1_diff'] = top1_diff\n    test_df['top5_diff'] = top5_diff\n    test_df['rank_diff'] = rank_diff\n\n    test_df.drop('mi', axis=1, inplace=True)\n    base_df.drop('mi', axis=1, inplace=True)\n    test_df['param_count'] = test_df['param_count'].map('{:,.2f}'.format)\n    test_df.sort_values(['top1', 'top5', 'model'], ascending=[False, False, True], inplace=True)\n    test_df.to_csv(test_csv, index=False, float_format='%.3f')\n\n\nfor base_results, test_results in results.items():\n    base_df = pd.read_csv(base_results)\n    base_df.sort_values(['top1', 'top5', 'model'], ascending=[False, False, True], inplace=True)\n    for test_csv in test_results:\n        diff(base_df, test_csv)\n    base_df['param_count'] = base_df['param_count'].map('{:,.2f}'.format)\n    base_df.to_csv(base_results, index=False, float_format='%.3f')\n"
  },
  {
    "path": "results/model_metadata-in1k.csv",
    "content": "model,pretrain\nadv_inception_v3,in1k-adv\nbat_resnext26ts,in1k\nbeit_base_patch16_224,in21k-selfsl\nbeit_base_patch16_384,in21k-selfsl\nbeit_large_patch16_224,in21k-selfsl\nbeit_large_patch16_384,in21k-selfsl\nbeit_large_patch16_512,in21k-selfsl\nbotnet26t_256,in1k\ncait_m36_384,in1k-dist\ncait_m48_448,in1k-dist\ncait_s24_224,in1k-dist\ncait_s24_384,in1k-dist\ncait_s36_384,in1k-dist\ncait_xs24_384,in1k-dist\ncait_xxs24_224,in1k-dist\ncait_xxs24_384,in1k-dist\ncait_xxs36_224,in1k-dist\ncait_xxs36_384,in1k-dist\ncoat_lite_mini,in1k\ncoat_lite_small,in1k\ncoat_lite_tiny,in1k\ncoat_mini,in1k\ncoat_tiny,in1k\nconvit_base,in1k\nconvit_small,in1k\nconvit_tiny,in1k\nconvmixer_1024_20_ks9_p14,in1k\nconvmixer_1536_20,in1k\nconvmixer_768_32,in1k\ncrossvit_15_240,in1k\ncrossvit_15_dagger_240,in1k\ncrossvit_15_dagger_408,in1k\ncrossvit_18_240,in1k\ncrossvit_18_dagger_240,in1k\ncrossvit_18_dagger_408,in1k\ncrossvit_9_240,in1k\ncrossvit_9_dagger_240,in1k\ncrossvit_base_240,in1k\ncrossvit_small_240,in1k\ncrossvit_tiny_240,in1k\ncspdarknet53,in1k\ncspresnet50,in1k\ncspresnext50,in1k\ndeit_base_distilled_patch16_224,in1k-dist\ndeit_base_distilled_patch16_384,in1k-dist\ndeit_base_patch16_224,in1k\ndeit_base_patch16_384,in1k\ndeit_small_distilled_patch16_224,in1k-dist\ndeit_small_patch16_224,in1k\ndeit_tiny_distilled_patch16_224,in1k-dist\ndeit_tiny_patch16_224,in1k\ndensenet121,in1k\ndensenet161,in1k\ndensenet169,in1k\ndensenet201,in1k\ndensenetblur121d,in1k\ndla102,in1k\ndla102x,in1k\ndla102x2,in1k\ndla169,in1k\ndla34,in1k\ndla46_c,in1k\ndla46x_c,in1k\ndla60,in1k\ndla60_res2net,in1k\ndla60_res2next,in1k\ndla60x,in1k\ndla60x_c,in1k\ndm_nfnet_f0,in1k\ndm_nfnet_f1,in1k\ndm_nfnet_f2,in1k\ndm_nfnet_f3,in1k\ndm_nfnet_f4,in1k\ndm_nfnet_f5,in1k\ndm_nfnet_f6,in1k\ndpn107,in1k\ndpn131,in1k\ndpn68,in1k\ndpn68b,in1k\ndpn92,in1k\ndpn98,in1k\neca_botnext26ts_256,in1k\neca_halonext26ts,in1k\neca_nfnet_l0,in1k\neca_nfnet_l1,in1k\neca_nfnet_l2,in1k\neca_resnet33ts,in1k\neca_resnext26ts,in1k\necaresnet101d,in1k\necaresnet101d_pruned,in1k\necaresnet269d,in1k\necaresnet26t,in1k\necaresnet50d,in1k\necaresnet50d_pruned,in1k\necaresnet50t,in1k\necaresnetlight,in1k\nefficientnet_b0,in1k\nefficientnet_b1,in1k\nefficientnet_b1_pruned,in1k\nefficientnet_b2,in1k\nefficientnet_b2_pruned,in1k\nefficientnet_b3,in1k\nefficientnet_b3_pruned,in1k\nefficientnet_b4,in1k\nefficientnet_el,in1k\nefficientnet_el_pruned,in1k\nefficientnet_em,in1k\nefficientnet_es,in1k\nefficientnet_es_pruned,in1k\nefficientnet_lite0,in1k\nefficientnetv2_rw_m,in1k\nefficientnetv2_rw_s,in1k\nefficientnetv2_rw_t,in1k\nens_adv_inception_resnet_v2,in1k-adv\nese_vovnet19b_dw,in1k\nese_vovnet39b,in1k\nfbnetc_100,in1k\ngc_efficientnetv2_rw_t,in1k\ngcresnet33ts,in1k\ngcresnet50t,in1k\ngcresnext26ts,in1k\ngcresnext50ts,in1k\ngernet_l,in1k\ngernet_m,in1k\ngernet_s,in1k\nghostnet_100,in1k\ngluon_inception_v3,in1k\ngluon_resnet101_v1b,in1k\ngluon_resnet101_v1c,in1k\ngluon_resnet101_v1d,in1k\ngluon_resnet101_v1s,in1k\ngluon_resnet152_v1b,in1k\ngluon_resnet152_v1c,in1k\ngluon_resnet152_v1d,in1k\ngluon_resnet152_v1s,in1k\ngluon_resnet18_v1b,in1k\ngluon_resnet34_v1b,in1k\ngluon_resnet50_v1b,in1k\ngluon_resnet50_v1c,in1k\ngluon_resnet50_v1d,in1k\ngluon_resnet50_v1s,in1k\ngluon_resnext101_32x4d,in1k\ngluon_resnext101_64x4d,in1k\ngluon_resnext50_32x4d,in1k\ngluon_senet154,in1k\ngluon_seresnext101_32x4d,in1k\ngluon_seresnext101_64x4d,in1k\ngluon_seresnext50_32x4d,in1k\ngluon_xception65,in1k\ngmixer_24_224,in1k\ngmlp_s16_224,in1k\nhalo2botnet50ts_256,in1k\nhalonet26t,in1k\nhalonet50ts,in1k\nhaloregnetz_b,in1k\nhardcorenas_a,in1k\nhardcorenas_b,in1k\nhardcorenas_c,in1k\nhardcorenas_d,in1k\nhardcorenas_e,in1k\nhardcorenas_f,in1k\nhrnet_w18,in1k\nhrnet_w18_small,in1k\nhrnet_w18_small_v2,in1k\nhrnet_w30,in1k\nhrnet_w32,in1k\nhrnet_w40,in1k\nhrnet_w44,in1k\nhrnet_w48,in1k\nhrnet_w64,in1k\nig_resnext101_32x16d,ig1b-wsl\nig_resnext101_32x32d,ig1b-wsl\nig_resnext101_32x48d,ig1b-wsl\nig_resnext101_32x8d,ig1b-wsl\ninception_resnet_v2,in1k\ninception_v3,in1k\ninception_v4,in1k\njx_nest_base,in1k\njx_nest_small,in1k\njx_nest_tiny,in1k\nlambda_resnet26rpt_256,in1k\nlambda_resnet26t,in1k\nlambda_resnet50ts,in1k\nlamhalobotnet50ts_256,in1k\nlegacy_senet154,in1k\nlegacy_seresnet101,in1k\nlegacy_seresnet152,in1k\nlegacy_seresnet18,in1k\nlegacy_seresnet34,in1k\nlegacy_seresnet50,in1k\nlegacy_seresnext101_32x4d,in1k\nlegacy_seresnext26_32x4d,in1k\nlegacy_seresnext50_32x4d,in1k\nlevit_128,in1k-dist\nlevit_128s,in1k-dist\nlevit_192,in1k-dist\nlevit_256,in1k-dist\nlevit_384,in1k-dist\nmixer_b16_224,in1k\nmixer_b16_224_miil,in21k\nmixer_l16_224,in1k\nmixnet_l,in1k\nmixnet_m,in1k\nmixnet_s,in1k\nmixnet_xl,in1k\nmnasnet_100,in1k\nmobilenetv2_100,in1k\nmobilenetv2_110d,in1k\nmobilenetv2_120d,in1k\nmobilenetv2_140,in1k\nmobilenetv3_large_100,in1k\nmobilenetv3_large_100_miil,in21k\nmobilenetv3_rw,in1k\nnasnetalarge,in1k\nnf_regnet_b1,in1k\nnf_resnet50,in1k\nnfnet_l0,in1k\npit_b_224,in1k\npit_b_distilled_224,in1k-dist\npit_s_224,in1k\npit_s_distilled_224,in1k-dist\npit_ti_224,in1k\npit_ti_distilled_224,in1k-dist\npit_xs_224,in1k\npit_xs_distilled_224,in1k-dist\npnasnet5large,in1k\nregnetx_002,in1k\nregnetx_004,in1k\nregnetx_006,in1k\nregnetx_008,in1k\nregnetx_016,in1k\nregnetx_032,in1k\nregnetx_040,in1k\nregnetx_064,in1k\nregnetx_080,in1k\nregnetx_120,in1k\nregnetx_160,in1k\nregnetx_320,in1k\nregnety_002,in1k\nregnety_004,in1k\nregnety_006,in1k\nregnety_008,in1k\nregnety_016,in1k\nregnety_032,in1k\nregnety_040,in1k\nregnety_064,in1k\nregnety_080,in1k\nregnety_120,in1k\nregnety_160,in1k\nregnety_320,in1k\nregnetz_b,in1k\nregnetz_c,in1k\nregnetz_d,in1k\nrepvgg_a2,in1k\nrepvgg_b0,in1k\nrepvgg_b1,in1k\nrepvgg_b1g4,in1k\nrepvgg_b2,in1k\nrepvgg_b2g4,in1k\nrepvgg_b3,in1k\nrepvgg_b3g4,in1k\nres2net101_26w_4s,in1k\nres2net50_14w_8s,in1k\nres2net50_26w_4s,in1k\nres2net50_26w_6s,in1k\nres2net50_26w_8s,in1k\nres2net50_48w_2s,in1k\nres2next50,in1k\nresmlp_12_224,in1k\nresmlp_12_distilled_224,in1k-dist\nresmlp_24_224,in1k\nresmlp_24_distilled_224,in1k-dist\nresmlp_36_224,in1k\nresmlp_36_distilled_224,in1k-dist\nresmlp_big_24_224,in1k\nresmlp_big_24_224_in22ft1k,in21k\nresmlp_big_24_distilled_224,in1k-dist\nresnest101e,in1k\nresnest14d,in1k\nresnest200e,in1k\nresnest269e,in1k\nresnest26d,in1k\nresnest50d,in1k\nresnest50d_1s4x24d,in1k\nresnest50d_4s2x40d,in1k\nresnet101d,in1k\nresnet152d,in1k\nresnet18,in1k\nresnet18d,in1k\nresnet200d,in1k\nresnet26,in1k\nresnet26d,in1k\nresnet26t,in1k\nresnet32ts,in1k\nresnet33ts,in1k\nresnet34,in1k\nresnet34d,in1k\nresnet50,in1k\nresnet50d,in1k\nresnet51q,in1k\nresnet61q,in1k\nresnetblur50,in1k\nresnetrs101,in1k\nresnetrs152,in1k\nresnetrs200,in1k\nresnetrs270,in1k\nresnetrs350,in1k\nresnetrs420,in1k\nresnetrs50,in1k\nresnetv2_101,in1k\nresnetv2_101x1_bitm,in21k\nresnetv2_101x3_bitm,in21k\nresnetv2_152x2_bit_teacher,in21k\nresnetv2_152x2_bit_teacher_384,in21k\nresnetv2_152x2_bitm,in21k\nresnetv2_152x4_bitm,in21k\nresnetv2_50,in1k\nresnetv2_50x1_bit_distilled,in1k-dist\nresnetv2_50x1_bitm,in21k\nresnetv2_50x3_bitm,in21k\nresnext101_32x8d,in1k\nresnext26ts,in1k\nresnext50_32x4d,in1k\nresnext50d_32x4d,in1k\nrexnet_100,in1k\nrexnet_130,in1k\nrexnet_150,in1k\nrexnet_200,in1k\nsehalonet33ts,in1k\nselecsls42b,in1k\nselecsls60,in1k\nselecsls60b,in1k\nsemnasnet_100,in1k\nseresnet152d,in1k\nseresnet33ts,in1k\nseresnet50,in1k\nseresnext26d_32x4d,in1k\nseresnext26t_32x4d,in1k\nseresnext26ts,in1k\nseresnext50_32x4d,in1k\nskresnet18,in1k\nskresnet34,in1k\nskresnext50_32x4d,in1k\nspnasnet_100,in1k\nssl_resnet18,yfc-semisl\nssl_resnet50,yfc-semisl\nssl_resnext101_32x16d,yfc-semisl\nssl_resnext101_32x4d,yfc-semisl\nssl_resnext101_32x8d,yfc-semisl\nssl_resnext50_32x4d,yfc-semisl\nswin_base_patch4_window12_384,in21k\nswin_base_patch4_window7_224,in21k\nswin_large_patch4_window12_384,in21k\nswin_large_patch4_window7_224,in21k\nswin_small_patch4_window7_224,in1k\nswin_tiny_patch4_window7_224,in1k\nswsl_resnet18,ig1b-swsl\nswsl_resnet50,ig1b-swsl\nswsl_resnext101_32x16d,ig1b-swsl\nswsl_resnext101_32x4d,ig1b-swsl\nswsl_resnext101_32x8d,ig1b-swsl\nswsl_resnext50_32x4d,ig1b-swsl\ntf_efficientnet_b0,in1k\ntf_efficientnet_b0_ap,in1k-ap\ntf_efficientnet_b0_ns,jft300m-ns\ntf_efficientnet_b1,in1k\ntf_efficientnet_b1_ap,in1k-ap\ntf_efficientnet_b1_ns,jft300m-ns\ntf_efficientnet_b2,in1k\ntf_efficientnet_b2_ap,in1k-ap\ntf_efficientnet_b2_ns,jft300m-ns\ntf_efficientnet_b3,in1k\ntf_efficientnet_b3_ap,in1k-ap\ntf_efficientnet_b3_ns,jft300m-ns\ntf_efficientnet_b4,in1k\ntf_efficientnet_b4_ap,in1k-ap\ntf_efficientnet_b4_ns,jft300m-ns\ntf_efficientnet_b5,in1k\ntf_efficientnet_b5_ap,in1k-ap\ntf_efficientnet_b5_ns,jft300m-ns\ntf_efficientnet_b6,in1k\ntf_efficientnet_b6_ap,in1k-ap\ntf_efficientnet_b6_ns,jft300m-ns\ntf_efficientnet_b7,in1k\ntf_efficientnet_b7_ap,in1k-ap\ntf_efficientnet_b7_ns,jft300m-ns\ntf_efficientnet_b8,in1k\ntf_efficientnet_b8_ap,in1k-ap\ntf_efficientnet_cc_b0_4e,in1k\ntf_efficientnet_cc_b0_8e,in1k\ntf_efficientnet_cc_b1_8e,in1k\ntf_efficientnet_el,in1k\ntf_efficientnet_em,in1k\ntf_efficientnet_es,in1k\ntf_efficientnet_l2_ns,jft300m-ns\ntf_efficientnet_l2_ns_475,jft300m-ns\ntf_efficientnet_lite0,in1k\ntf_efficientnet_lite1,in1k\ntf_efficientnet_lite2,in1k\ntf_efficientnet_lite3,in1k\ntf_efficientnet_lite4,in1k\ntf_efficientnetv2_b0,in1k\ntf_efficientnetv2_b1,in1k\ntf_efficientnetv2_b2,in1k\ntf_efficientnetv2_b3,in1k\ntf_efficientnetv2_l,in1k\ntf_efficientnetv2_l_in21ft1k,in21k\ntf_efficientnetv2_m,in1k\ntf_efficientnetv2_m_in21ft1k,in21k\ntf_efficientnetv2_s,in1k\ntf_efficientnetv2_s_in21ft1k,in21k\ntf_efficientnetv2_xl_in21ft1k,in21k\ntf_inception_v3,in1k\ntf_mixnet_l,in1k\ntf_mixnet_m,in1k\ntf_mixnet_s,in1k\ntf_mobilenetv3_large_075,in1k\ntf_mobilenetv3_large_100,in1k\ntf_mobilenetv3_large_minimal_100,in1k\ntf_mobilenetv3_small_075,in1k\ntf_mobilenetv3_small_100,in1k\ntf_mobilenetv3_small_minimal_100,in1k\ntnt_s_patch16_224,in1k\ntresnet_l,in1k\ntresnet_l_448,in1k\ntresnet_m,in21k\ntresnet_m_448,in1k\ntresnet_xl,in1k\ntresnet_xl_448,in1k\ntv_densenet121,in1k\ntv_resnet101,in1k\ntv_resnet152,in1k\ntv_resnet34,in1k\ntv_resnet50,in1k\ntv_resnext50_32x4d,in1k\ntwins_pcpvt_base,in1k\ntwins_pcpvt_large,in1k\ntwins_pcpvt_small,in1k\ntwins_svt_base,in1k\ntwins_svt_large,in1k\ntwins_svt_small,in1k\nvgg11,in1k\nvgg11_bn,in1k\nvgg13,in1k\nvgg13_bn,in1k\nvgg16,in1k\nvgg16_bn,in1k\nvgg19,in1k\nvgg19_bn,in1k\nvisformer_small,in1k\nvit_base_patch16_224,in21k\nvit_base_patch16_224_miil,in21k\nvit_base_patch16_384,in21k\nvit_base_patch16_224_sam,in1k\nvit_base_patch32_224,in21k\nvit_base_patch32_384,in21k\nvit_base_patch32_224_sam,in1k\nvit_base_r50_s16_384,in21k\nvit_large_patch16_224,in21k\nvit_large_patch16_384,in21k\nvit_large_patch32_384,in21k\nvit_large_r50_s32_224,in21k\nvit_large_r50_s32_384,in21k\nvit_small_patch16_224,in21k\nvit_small_patch16_384,in21k\nvit_small_patch32_224,in21k\nvit_small_patch32_384,in21k\nvit_small_r26_s32_224,in21k\nvit_small_r26_s32_384,in21k\nvit_tiny_patch16_224,in21k\nvit_tiny_patch16_384,in21k\nvit_tiny_r_s16_p8_224,in21k\nvit_tiny_r_s16_p8_384,in21k\nwide_resnet101_2,in1k\nwide_resnet50_2,in1k\nxception,in1k\nxception41,in1k\nxception65,in1k\nxception71,in1k\nxcit_large_24_p16_224,in1k\nxcit_large_24_p16_224_dist,in1k-dist\nxcit_large_24_p16_384_dist,in1k-dist\nxcit_large_24_p8_224,in1k\nxcit_large_24_p8_224_dist,in1k-dist\nxcit_large_24_p8_384_dist,in1k-dist\nxcit_medium_24_p16_224,in1k\nxcit_medium_24_p16_224_dist,in1k-dist\nxcit_medium_24_p16_384_dist,in1k-dist\nxcit_medium_24_p8_224,in1k\nxcit_medium_24_p8_224_dist,in1k-dist\nxcit_medium_24_p8_384_dist,in1k-dist\nxcit_nano_12_p16_224,in1k\nxcit_nano_12_p16_224_dist,in1k-dist\nxcit_nano_12_p16_384_dist,in1k-dist\nxcit_nano_12_p8_224,in1k\nxcit_nano_12_p8_224_dist,in1k-dist\nxcit_nano_12_p8_384_dist,in1k-dist\nxcit_small_12_p16_224,in1k\nxcit_small_12_p16_224_dist,in1k-dist\nxcit_small_12_p16_384_dist,in1k-dist\nxcit_small_12_p8_224,in1k\nxcit_small_12_p8_224_dist,in1k-dist\nxcit_small_12_p8_384_dist,in1k-dist\nxcit_small_24_p16_224,in1k\nxcit_small_24_p16_224_dist,in1k-dist\nxcit_small_24_p16_384_dist,in1k-dist\nxcit_small_24_p8_224,in1k\nxcit_small_24_p8_224_dist,in1k-dist\nxcit_small_24_p8_384_dist,in1k-dist\nxcit_tiny_12_p16_224,in1k\nxcit_tiny_12_p16_224_dist,in1k-dist\nxcit_tiny_12_p16_384_dist,in1k-dist\nxcit_tiny_12_p8_224,in1k\nxcit_tiny_12_p8_224_dist,in1k-dist\nxcit_tiny_12_p8_384_dist,in1k-dist\nxcit_tiny_24_p16_224,in1k\nxcit_tiny_24_p16_224_dist,in1k-dist\nxcit_tiny_24_p16_384_dist,in1k-dist\nxcit_tiny_24_p8_224,in1k\nxcit_tiny_24_p8_224_dist,in1k-dist\nxcit_tiny_24_p8_384_dist,in1k-dist\n"
  },
  {
    "path": "results/results-imagenet-a-clean.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,98.940,1.060,99.910,0.090,305.08,1.000,bicubic\neva02_large_patch14_448.mim_in22k_ft_in1k,448,98.850,1.150,99.840,0.160,305.08,1.000,bicubic\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,98.830,1.170,99.870,0.130,305.08,1.000,bicubic\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,98.820,1.180,99.900,0.100,\"1,014.45\",1.000,bicubic\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,98.810,1.190,99.900,0.100,\"1,013.01\",1.000,bicubic\neva_giant_patch14_336.clip_ft_in1k,336,98.810,1.190,99.820,0.180,\"1,013.01\",1.000,bicubic\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,98.780,1.220,99.830,0.170,414.14,1.000,bicubic\neva_large_patch14_336.in22k_ft_in22k_in1k,336,98.750,1.250,99.810,0.190,304.53,1.000,bicubic\neva02_large_patch14_448.mim_m38m_ft_in1k,448,98.730,1.270,99.790,0.210,305.08,1.000,bicubic\neva_large_patch14_336.in22k_ft_in1k,336,98.720,1.280,99.870,0.130,304.53,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,98.670,1.330,99.860,0.140,660.29,1.000,bicubic\nmaxvit_base_tf_512.in21k_ft_in1k,512,98.640,1.360,99.800,0.200,119.88,1.000,bicubic\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,98.620,1.380,99.830,0.170,429.38,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,98.620,1.380,99.800,0.200,87.12,1.000,bicubic\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,98.620,1.380,99.800,0.200,475.77,1.000,bicubic\nmaxvit_large_tf_512.in21k_ft_in1k,512,98.610,1.390,99.790,0.210,212.33,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,98.590,1.410,99.870,0.130,660.29,1.000,bicubic\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,98.560,1.440,99.830,0.170,305.67,1.000,bicubic\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,98.550,1.450,99.760,0.240,304.43,0.950,bicubic\ntf_efficientnet_l2.ns_jft_in1k,800,98.540,1.460,99.820,0.180,480.31,0.960,bicubic\nmaxvit_base_tf_384.in21k_ft_in1k,384,98.520,1.480,99.750,0.250,119.65,1.000,bicubic\ntf_efficientnet_l2.ns_jft_in1k_475,475,98.500,1.500,99.840,0.160,480.31,0.936,bicubic\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,98.500,1.500,99.820,0.180,305.00,1.000,bicubic\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,98.500,1.500,99.780,0.220,475.32,1.000,bicubic\nmaxvit_large_tf_384.in21k_ft_in1k,384,98.490,1.510,99.750,0.250,212.03,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,98.480,1.520,99.750,0.250,200.13,1.000,bicubic\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,98.460,1.540,99.750,0.250,304.76,1.000,bicubic\neva_giant_patch14_224.clip_ft_in1k,224,98.460,1.540,99.750,0.250,\"1,012.56\",0.900,bicubic\nregnety_1280.swag_ft_in1k,384,98.450,1.550,99.870,0.130,644.81,1.000,bicubic\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,98.450,1.550,99.800,0.200,846.47,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k_384,384,98.440,1.560,99.820,0.180,98.75,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in1k,448,98.440,1.560,99.820,0.180,87.12,1.000,bicubic\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,98.430,1.570,99.820,0.180,632.46,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,98.420,1.580,99.810,0.190,350.20,1.000,bicubic\neva_large_patch14_196.in22k_ft_in22k_in1k,196,98.420,1.580,99.770,0.230,304.14,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,98.400,1.600,99.810,0.190,136.50,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,98.380,1.620,99.760,0.240,197.96,1.000,bicubic\neva_large_patch14_196.in22k_ft_in1k,196,98.360,1.640,99.830,0.170,304.14,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,98.360,1.640,99.780,0.220,88.72,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,98.340,1.660,99.760,0.240,101.66,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,98.330,1.670,99.760,0.240,304.53,1.000,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,98.290,1.710,99.780,0.220,632.05,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,98.280,1.720,99.770,0.230,200.13,1.000,bicubic\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,98.270,1.730,99.770,0.230,304.53,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k_384,384,98.260,1.740,99.830,0.170,99.88,1.000,bicubic\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,98.260,1.740,99.780,0.220,116.09,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,98.260,1.740,99.760,0.240,200.13,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k_384,384,98.260,1.740,99.750,0.250,197.77,1.000,bicubic\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,98.250,1.750,99.800,0.200,304.72,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,98.240,1.760,99.810,0.190,136.33,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,98.230,1.770,99.720,0.280,304.20,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,98.230,1.770,99.720,0.280,304.53,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,98.220,1.780,99.730,0.270,136.06,1.000,bicubic\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,98.200,1.800,99.780,0.220,149.39,1.000,bicubic\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,98.200,1.800,99.770,0.230,134.42,1.000,bicubic\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,98.190,1.810,99.670,0.330,86.86,0.950,bicubic\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,98.180,1.820,99.780,0.220,116.14,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k,224,98.170,1.830,99.770,0.230,98.75,1.000,bicubic\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,98.170,1.830,99.760,0.240,304.43,0.900,bicubic\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,98.170,1.830,99.750,0.250,304.37,1.000,bicubic\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,98.170,1.830,99.730,0.270,632.13,1.000,bicubic\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,98.170,1.830,99.730,0.270,64.27,1.000,bicubic\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,98.160,1.840,99.710,0.290,196.74,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in1k,224,98.160,1.840,99.660,0.340,304.20,1.000,bicubic\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,98.150,1.850,99.780,0.220,87.92,1.000,bicubic\ncaformer_m36.sail_in22k_ft_in1k_384,384,98.150,1.850,99.750,0.250,56.20,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k,288,98.130,1.870,99.750,0.250,197.77,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,288,98.120,1.880,99.780,0.220,197.96,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,288,98.120,1.880,99.770,0.230,350.20,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,98.120,1.880,99.670,0.330,101.66,1.000,bicubic\nconvnext_base.fb_in22k_ft_in1k_384,384,98.100,1.900,99.650,0.350,88.59,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,98.090,1.910,99.760,0.240,304.20,1.000,bicubic\nregnety_320.swag_ft_in1k,384,98.080,1.920,99.860,0.140,145.05,1.000,bicubic\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,98.070,1.930,99.770,0.230,304.57,1.000,bicubic\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,98.070,1.930,99.700,0.300,73.88,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,288,98.060,1.940,99.750,0.250,88.72,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,98.050,1.950,99.750,0.250,88.59,1.000,bicubic\nbeit3_large_patch16_224.in22k_ft_in1k,224,98.050,1.950,99.740,0.260,304.57,1.000,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,98.030,1.970,99.730,0.270,632.05,1.000,bicubic\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,98.030,1.970,99.690,0.310,196.74,1.000,bicubic\nconvformer_m36.sail_in22k_ft_in1k_384,384,98.020,1.980,99.690,0.310,57.05,1.000,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,98.010,1.990,99.660,0.340,86.86,1.000,bicubic\ncaformer_s36.sail_in22k_ft_in1k_384,384,98.000,2.000,99.720,0.280,39.30,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,97.980,2.020,99.700,0.300,200.13,1.000,bicubic\nhiera_huge_224.mae_in1k_ft_in1k,224,97.970,2.030,99.610,0.390,672.78,0.900,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,97.960,2.040,99.700,0.300,93.59,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k,224,97.950,2.050,99.760,0.240,99.88,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,97.950,2.050,99.740,0.260,93.59,0.950,bicubic\nnextvit_large.bd_ssld_6m_in1k_384,384,97.940,2.060,99.790,0.210,57.87,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,224,97.930,2.070,99.770,0.230,197.96,0.875,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,97.930,2.070,99.680,0.320,93.59,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,224,97.920,2.080,99.680,0.320,350.20,0.875,bicubic\nbeitv2_large_patch16_224.in1k_ft_in1k,224,97.920,2.080,99.660,0.340,304.43,0.950,bicubic\nconvnextv2_huge.fcmae_ft_in1k,288,97.910,2.090,99.680,0.320,660.29,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,97.910,2.090,99.650,0.350,304.20,1.000,bicubic\ntf_efficientnet_b7.ns_jft_in1k,600,97.900,2.100,99.720,0.280,66.35,0.949,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,224,97.900,2.100,99.690,0.310,88.72,0.875,bicubic\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,97.900,2.100,99.630,0.370,21.27,1.000,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,512,97.900,2.100,99.590,0.410,208.12,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,97.900,2.100,99.560,0.440,101.66,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k_384,384,97.870,2.130,99.760,0.240,44.82,1.000,bicubic\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,97.870,2.130,99.710,0.290,87.90,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k,224,97.850,2.150,99.690,0.310,197.77,0.875,bicubic\nconvnext_base.fb_in22k_ft_in1k,288,97.850,2.150,99.670,0.330,88.59,1.000,bicubic\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,97.850,2.150,99.670,0.330,329.09,1.000,bicubic\nconvformer_s36.sail_in22k_ft_in1k_384,384,97.850,2.150,99.640,0.360,40.01,1.000,bicubic\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,97.840,2.160,99.680,0.320,86.88,1.000,bicubic\ncaformer_m36.sail_in22k_ft_in1k,224,97.840,2.160,99.670,0.330,56.20,1.000,bicubic\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,97.840,2.160,99.670,0.330,196.74,0.900,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,480,97.830,2.170,99.760,0.240,118.52,1.000,bicubic\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,97.830,2.170,99.700,0.300,86.74,1.000,bicubic\nmaxvit_large_tf_512.in1k,512,97.830,2.170,99.560,0.440,212.33,1.000,bicubic\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,97.820,2.180,99.670,0.330,86.86,1.000,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,97.810,2.190,99.690,0.310,75.26,1.000,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,480,97.810,2.190,99.600,0.400,54.14,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,288,97.800,2.200,99.690,0.310,75.26,1.000,bicubic\nvolo_d5_512.sail_in1k,512,97.800,2.200,99.670,0.330,296.09,1.150,bicubic\nconvnext_small.in12k_ft_in1k_384,384,97.800,2.200,99.650,0.350,50.22,1.000,bicubic\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,97.800,2.200,99.650,0.350,116.14,0.950,bicubic\nmaxvit_base_tf_512.in1k,512,97.800,2.200,99.610,0.390,119.88,1.000,bicubic\nregnety_160.swag_ft_in1k,384,97.790,2.210,99.760,0.240,83.59,1.000,bicubic\ndm_nfnet_f6.dm_in1k,576,97.780,2.220,99.650,0.350,438.36,0.956,bicubic\ndm_nfnet_f5.dm_in1k,544,97.780,2.220,99.590,0.410,377.21,0.954,bicubic\nnextvit_small.bd_ssld_6m_in1k_384,384,97.770,2.230,99.710,0.290,31.76,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,97.770,2.230,99.590,0.410,60.60,1.000,bicubic\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,97.760,2.240,99.660,0.340,134.13,0.950,bicubic\nmaxvit_small_tf_512.in1k,512,97.760,2.240,99.550,0.450,69.13,1.000,bicubic\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,97.750,2.250,99.710,0.290,116.09,0.950,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,97.750,2.250,99.630,0.370,86.86,1.000,bicubic\nvolo_d5_448.sail_in1k,448,97.750,2.250,99.620,0.380,295.91,1.150,bicubic\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,97.740,2.260,99.640,0.360,86.58,0.900,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,97.710,2.290,99.640,0.360,64.11,0.950,bicubic\ndm_nfnet_f6.dm_in1k,448,97.710,2.290,99.610,0.390,438.36,0.956,bicubic\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,97.700,2.300,99.680,0.320,86.53,0.900,bicubic\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,97.680,2.320,99.720,0.280,87.92,0.900,bicubic\nregnety_1280.swag_lc_in1k,224,97.680,2.320,99.640,0.360,644.81,0.965,bicubic\nvolo_d4_448.sail_in1k,448,97.680,2.320,99.610,0.390,193.41,1.150,bicubic\nrdnet_large.nv_in1k_ft_in1k_384,384,97.670,2.330,99.550,0.450,186.27,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,288,97.660,2.340,99.590,0.410,197.96,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,97.650,2.350,99.690,0.310,93.59,0.875,bicubic\nconvnextv2_huge.fcmae_ft_in1k,224,97.650,2.350,99.620,0.380,660.29,0.875,bicubic\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,97.640,2.360,99.590,0.410,304.33,0.900,bicubic\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,97.640,2.360,99.570,0.430,73.88,0.950,bicubic\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,97.640,2.360,99.560,0.440,196.53,0.900,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,97.630,2.370,99.620,0.380,64.11,0.950,bicubic\ndm_nfnet_f4.dm_in1k,512,97.630,2.370,99.540,0.460,316.07,0.951,bicubic\nconvnext_small.fb_in22k_ft_in1k_384,384,97.620,2.380,99.600,0.400,50.22,1.000,bicubic\ntf_efficientnet_b6.ns_jft_in1k,528,97.620,2.380,99.580,0.420,43.04,0.942,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,97.610,2.390,99.620,0.380,75.26,0.965,bicubic\ncaformer_s36.sail_in22k_ft_in1k,224,97.610,2.390,99.610,0.390,39.30,1.000,bicubic\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,97.610,2.390,99.590,0.410,21.23,1.000,bicubic\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,97.610,2.390,99.540,0.460,88.59,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,97.600,2.400,99.720,0.280,88.59,1.000,bicubic\nconvformer_m36.sail_in22k_ft_in1k,224,97.590,2.410,99.620,0.380,57.05,1.000,bicubic\nmaxvit_base_tf_384.in1k,384,97.590,2.410,99.590,0.410,119.65,1.000,bicubic\nmaxvit_large_tf_384.in1k,384,97.590,2.410,99.530,0.470,212.03,1.000,bicubic\nefficientvit_l3.r384_in1k,384,97.590,2.410,99.520,0.480,246.04,1.000,bicubic\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,97.580,2.420,99.670,0.330,86.58,0.900,bicubic\nmaxvit_tiny_tf_512.in1k,512,97.570,2.430,99.560,0.440,31.05,1.000,bicubic\nvit_base_patch16_clip_384.openai_ft_in1k,384,97.550,2.450,99.660,0.340,86.86,1.000,bicubic\nvolo_d3_448.sail_in1k,448,97.550,2.450,99.550,0.450,86.63,1.000,bicubic\nconvformer_b36.sail_in1k_384,384,97.540,2.460,99.520,0.480,99.88,1.000,bicubic\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,97.540,2.460,99.510,0.490,86.57,0.950,bicubic\nxcit_large_24_p16_384.fb_dist_in1k,384,97.540,2.460,99.480,0.520,189.10,1.000,bicubic\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,97.530,2.470,99.610,0.390,73.87,0.950,bicubic\nxcit_large_24_p8_384.fb_dist_in1k,384,97.530,2.470,99.540,0.460,188.93,1.000,bicubic\ntf_efficientnetv2_l.in1k,480,97.520,2.480,99.550,0.450,118.52,1.000,bicubic\nregnety_160.sw_in12k_ft_in1k,288,97.510,2.490,99.590,0.410,83.59,1.000,bicubic\ncaformer_b36.sail_in1k_384,384,97.510,2.490,99.580,0.420,98.75,1.000,bicubic\nconvnext_base.fb_in22k_ft_in1k,224,97.500,2.500,99.610,0.390,88.59,0.875,bicubic\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,97.490,2.510,99.620,0.380,86.59,1.000,bicubic\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,97.490,2.510,99.620,0.380,936.53,1.000,bilinear\ncait_m48_448.fb_dist_in1k,448,97.490,2.510,99.600,0.400,356.46,1.000,bicubic\ndm_nfnet_f3.dm_in1k,416,97.480,2.520,99.560,0.440,254.92,0.940,bicubic\ndm_nfnet_f5.dm_in1k,416,97.480,2.520,99.520,0.480,377.21,0.954,bicubic\ntf_efficientnet_b5.ns_jft_in1k,456,97.470,2.530,99.640,0.360,30.39,0.934,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,384,97.460,2.540,99.640,0.360,118.52,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,224,97.460,2.540,99.600,0.400,75.26,0.965,bicubic\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,97.450,2.550,99.640,0.360,39.03,0.950,bicubic\ncaformer_m36.sail_in1k_384,384,97.450,2.550,99.600,0.400,56.20,1.000,bicubic\nregnety_160.lion_in12k_ft_in1k,288,97.450,2.550,99.600,0.400,83.59,1.000,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,97.450,2.550,99.540,0.460,86.57,0.950,bicubic\ndeit3_large_patch16_384.fb_in1k,384,97.440,2.560,99.630,0.370,304.76,1.000,bicubic\ncaformer_s18.sail_in22k_ft_in1k_384,384,97.440,2.560,99.590,0.410,26.34,1.000,bicubic\nhiera_large_224.mae_in1k_ft_in1k,224,97.440,2.560,99.460,0.540,213.74,0.900,bicubic\nhgnet_base.ssld_in1k,288,97.430,2.570,99.620,0.380,71.58,1.000,bicubic\nflexivit_large.1200ep_in1k,240,97.420,2.580,99.600,0.400,304.36,0.950,bicubic\nmaxvit_small_tf_384.in1k,384,97.410,2.590,99.500,0.500,69.02,1.000,bicubic\nefficientvit_l3.r320_in1k,320,97.410,2.590,99.390,0.610,246.04,1.000,bicubic\ncaformer_s36.sail_in1k_384,384,97.400,2.600,99.540,0.460,39.30,1.000,bicubic\ncait_m36_384.fb_dist_in1k,384,97.400,2.600,99.510,0.490,271.22,1.000,bicubic\nconvformer_m36.sail_in1k_384,384,97.400,2.600,99.470,0.530,57.05,1.000,bicubic\nefficientvit_l2.r384_in1k,384,97.400,2.600,99.420,0.580,63.71,1.000,bicubic\nnextvit_large.bd_ssld_6m_in1k,224,97.390,2.610,99.670,0.330,57.87,0.950,bicubic\nefficientnet_b5.sw_in12k_ft_in1k,448,97.390,2.610,99.550,0.450,30.39,1.000,bicubic\nvolo_d5_224.sail_in1k,224,97.380,2.620,99.570,0.430,295.46,0.960,bicubic\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,97.380,2.620,99.510,0.490,88.30,1.000,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,384,97.380,2.620,99.370,0.630,208.12,1.000,bicubic\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,97.370,2.630,99.680,0.320,468.53,0.875,bilinear\nhgnetv2_b5.ssld_stage2_ft_in1k,288,97.370,2.630,99.670,0.330,39.57,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,224,97.350,2.650,99.560,0.440,197.96,0.875,bicubic\ncait_s36_384.fb_dist_in1k,384,97.350,2.650,99.530,0.470,68.37,1.000,bicubic\nconvnext_small.fb_in22k_ft_in1k,288,97.350,2.650,99.530,0.470,50.22,1.000,bicubic\ndm_nfnet_f4.dm_in1k,384,97.350,2.650,99.480,0.520,316.07,0.951,bicubic\nconvnext_small.in12k_ft_in1k,288,97.340,2.660,99.580,0.420,50.22,1.000,bicubic\nconvnext_tiny.in12k_ft_in1k_384,384,97.330,2.670,99.600,0.400,28.59,1.000,bicubic\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,97.330,2.670,99.480,0.520,88.34,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,97.330,2.670,99.480,0.520,60.40,0.950,bicubic\nvolo_d2_384.sail_in1k,384,97.320,2.680,99.590,0.410,58.87,1.000,bicubic\nmaxvit_tiny_tf_384.in1k,384,97.310,2.690,99.500,0.500,30.98,1.000,bicubic\nxcit_medium_24_p16_384.fb_dist_in1k,384,97.310,2.690,99.460,0.540,84.40,1.000,bicubic\nvolo_d4_224.sail_in1k,224,97.300,2.700,99.530,0.470,192.96,0.960,bicubic\nxcit_medium_24_p8_384.fb_dist_in1k,384,97.300,2.700,99.510,0.490,84.32,1.000,bicubic\nxcit_small_24_p8_384.fb_dist_in1k,384,97.280,2.720,99.600,0.400,47.63,1.000,bicubic\nflexivit_large.600ep_in1k,240,97.280,2.720,99.590,0.410,304.36,0.950,bicubic\nconvformer_s18.sail_in22k_ft_in1k_384,384,97.280,2.720,99.560,0.440,26.77,1.000,bicubic\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,97.280,2.720,99.520,0.480,87.77,0.900,bicubic\ntf_efficientnetv2_l.in1k,384,97.280,2.720,99.500,0.500,118.52,1.000,bicubic\nconvformer_s36.sail_in1k_384,384,97.280,2.720,99.430,0.570,40.01,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,97.270,2.730,99.660,0.340,39.57,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k,224,97.270,2.730,99.660,0.340,44.82,0.950,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,97.270,2.730,99.560,0.440,88.59,1.000,bicubic\nregnety_120.sw_in12k_ft_in1k,288,97.270,2.730,99.540,0.460,51.82,1.000,bicubic\ninception_next_base.sail_in1k_384,384,97.270,2.730,99.520,0.480,86.67,1.000,bicubic\nflexivit_large.300ep_in1k,240,97.250,2.750,99.490,0.510,304.36,0.950,bicubic\nconvnextv2_base.fcmae_ft_in1k,288,97.240,2.760,99.550,0.450,88.72,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,97.240,2.760,99.540,0.460,28.64,1.000,bicubic\nregnety_160.lion_in12k_ft_in1k,224,97.240,2.760,99.540,0.460,83.59,0.950,bicubic\nregnety_2560.seer_ft_in1k,384,97.240,2.760,99.510,0.490,\"1,282.60\",1.000,bicubic\ntf_efficientnetv2_m.in1k,480,97.230,2.770,99.530,0.470,54.14,1.000,bicubic\nxcit_small_12_p8_384.fb_dist_in1k,384,97.230,2.770,99.480,0.520,26.21,1.000,bicubic\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,97.220,2.780,99.590,0.410,88.79,0.875,bilinear\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,97.220,2.780,99.590,0.410,60.40,0.950,bicubic\nregnety_160.sw_in12k_ft_in1k,224,97.220,2.780,99.580,0.420,83.59,0.950,bicubic\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,97.220,2.780,99.490,0.510,86.66,1.000,bicubic\nconvnext_small.in12k_ft_in1k,224,97.210,2.790,99.600,0.400,50.22,0.950,bicubic\ntf_efficientnet_b7.ap_in1k,600,97.210,2.790,99.540,0.460,66.35,0.949,bicubic\nregnetz_e8.ra3_in1k,320,97.210,2.790,99.500,0.500,57.70,1.000,bicubic\ntf_efficientnet_b8.ra_in1k,672,97.210,2.790,99.500,0.500,87.41,0.954,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,384,97.210,2.790,99.470,0.530,54.14,1.000,bicubic\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,97.200,2.800,99.490,0.510,21.20,0.950,bicubic\ndm_nfnet_f3.dm_in1k,320,97.190,2.810,99.540,0.460,254.92,0.940,bicubic\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,97.180,2.820,99.560,0.440,32.59,1.000,bicubic\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,97.180,2.820,99.560,0.440,98.95,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,97.170,2.830,99.580,0.420,32.59,1.000,bicubic\nregnety_120.sw_in12k_ft_in1k,224,97.170,2.830,99.520,0.480,51.82,0.950,bicubic\nbeitv2_base_patch16_224.in1k_ft_in1k,224,97.170,2.830,99.460,0.540,86.53,0.900,bicubic\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,97.170,2.830,99.430,0.570,34.36,0.950,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,97.170,2.830,99.390,0.610,38.88,0.950,bicubic\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,97.160,2.840,99.540,0.460,86.57,0.900,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,576,97.160,2.840,99.500,0.500,45.88,1.000,bicubic\nregnety_320.swag_lc_in1k,224,97.150,2.850,99.670,0.330,145.05,0.965,bicubic\neva02_small_patch14_336.mim_in22k_ft_in1k,336,97.150,2.850,99.460,0.540,22.13,1.000,bicubic\ncoat_lite_medium_384.in1k,384,97.150,2.850,99.450,0.550,44.57,1.000,bicubic\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,97.130,2.870,99.500,0.500,22.21,1.000,bicubic\nxcit_small_24_p16_384.fb_dist_in1k,384,97.130,2.870,99.440,0.560,47.67,1.000,bicubic\ntf_efficientnet_b8.ap_in1k,672,97.120,2.880,99.660,0.340,87.41,0.954,bicubic\nhgnetv2_b5.ssld_stage2_ft_in1k,224,97.120,2.880,99.610,0.390,39.57,0.965,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,97.120,2.880,99.460,0.540,86.57,1.000,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,288,97.120,2.880,99.420,0.580,86.48,1.000,bicubic\ntf_efficientnet_b6.ap_in1k,528,97.110,2.890,99.610,0.390,43.04,0.942,bicubic\nconvnext_tiny.fb_in22k_ft_in1k_384,384,97.110,2.890,99.500,0.500,28.59,1.000,bicubic\ndm_nfnet_f2.dm_in1k,352,97.110,2.890,99.500,0.500,193.78,0.920,bicubic\nvolo_d3_224.sail_in1k,224,97.110,2.890,99.460,0.540,86.33,0.960,bicubic\necaresnet269d.ra2_in1k,320,97.110,2.890,99.400,0.600,102.09,0.950,bicubic\nconvnext_tiny.in12k_ft_in1k,288,97.100,2.900,99.520,0.480,28.59,1.000,bicubic\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,97.100,2.900,99.520,0.480,88.30,0.950,bicubic\nmambaout_base_wide_rw.sw_e500_in1k,288,97.100,2.900,99.450,0.550,94.45,1.000,bicubic\nvit_base_patch16_clip_224.openai_ft_in1k,224,97.090,2.910,99.480,0.520,86.57,0.900,bicubic\necaresnet269d.ra2_in1k,352,97.090,2.910,99.470,0.530,102.09,1.000,bicubic\nconvnext_large.fb_in1k,288,97.090,2.910,99.450,0.550,197.77,1.000,bicubic\nhgnet_base.ssld_in1k,224,97.080,2.920,99.610,0.390,71.58,0.965,bicubic\nconvformer_s36.sail_in22k_ft_in1k,224,97.080,2.920,99.550,0.450,40.01,1.000,bicubic\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,97.070,2.930,99.610,0.390,86.53,0.900,bicubic\neca_nfnet_l2.ra3_in1k,384,97.070,2.930,99.510,0.490,56.72,1.000,bicubic\ncaformer_s18.sail_in1k_384,384,97.070,2.930,99.430,0.570,26.34,1.000,bicubic\ncait_s24_384.fb_dist_in1k,384,97.070,2.930,99.410,0.590,47.06,1.000,bicubic\nxcit_large_24_p8_224.fb_dist_in1k,224,97.060,2.940,99.420,0.580,188.93,1.000,bicubic\ndm_nfnet_f1.dm_in1k,320,97.060,2.940,99.390,0.610,132.63,0.910,bicubic\nefficientvit_l2.r288_in1k,288,97.060,2.940,99.380,0.620,63.71,1.000,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,576,97.050,2.950,99.470,0.530,33.44,1.000,bicubic\nhrnet_w48_ssld.paddle_in1k,288,97.040,2.960,99.640,0.360,77.47,1.000,bilinear\nconvformer_s18.sail_in1k_384,384,97.040,2.960,99.370,0.630,26.77,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,97.030,2.970,99.570,0.430,39.57,0.965,bicubic\nefficientvit_l3.r256_in1k,256,97.030,2.970,99.240,0.760,246.04,1.000,bicubic\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,97.020,2.980,99.590,0.410,236.34,1.000,bilinear\nmaxvit_large_tf_224.in1k,224,97.020,2.980,99.240,0.760,211.79,0.950,bicubic\nnextvit_large.bd_in1k_384,384,97.010,2.990,99.510,0.490,57.87,1.000,bicubic\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,97.010,2.990,99.490,0.510,387.93,1.000,bilinear\nmambaout_base_short_rw.sw_e500_in1k,288,97.010,2.990,99.380,0.620,88.83,1.000,bicubic\ndeit3_base_patch16_384.fb_in1k,384,97.010,2.990,99.360,0.640,86.88,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,97.010,2.990,99.310,0.690,32.59,0.950,bicubic\nefficientnetv2_rw_m.agc_in1k,416,97.000,3.000,99.530,0.470,53.24,1.000,bicubic\ntf_efficientnet_b7.ra_in1k,600,97.000,3.000,99.520,0.480,66.35,0.949,bicubic\ndeit_base_distilled_patch16_384.fb_in1k,384,97.000,3.000,99.490,0.510,87.63,1.000,bicubic\ncaformer_b36.sail_in1k,224,97.000,3.000,99.350,0.650,98.75,1.000,bicubic\nefficientvit_l3.r224_in1k,224,97.000,3.000,99.220,0.780,246.04,1.000,bicubic\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,96.990,3.010,99.450,0.550,38.85,1.000,bicubic\nvolo_d2_224.sail_in1k,224,96.990,3.010,99.390,0.610,58.68,0.960,bicubic\ntf_efficientnet_b4.ns_jft_in1k,380,96.980,3.020,99.580,0.420,19.34,0.922,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,448,96.980,3.020,99.420,0.580,45.88,1.000,bicubic\ndeit3_large_patch16_224.fb_in1k,224,96.980,3.020,99.330,0.670,304.37,0.900,bicubic\nmaxvit_base_tf_224.in1k,224,96.980,3.020,99.270,0.730,119.47,0.950,bicubic\nconvnext_small.fb_in22k_ft_in1k,224,96.970,3.030,99.410,0.590,50.22,0.875,bicubic\nnextvit_small.bd_ssld_6m_in1k,224,96.960,3.040,99.480,0.520,31.76,0.950,bicubic\nxcit_small_12_p16_384.fb_dist_in1k,384,96.950,3.050,99.400,0.600,26.25,1.000,bicubic\nmambaout_small_rw.sw_e450_in1k,288,96.950,3.050,99.390,0.610,48.50,1.000,bicubic\nhiera_base_plus_224.mae_in1k_ft_in1k,224,96.950,3.050,99.340,0.660,69.90,0.900,bicubic\nseresnextaa101d_32x8d.ah_in1k,288,96.940,3.060,99.400,0.600,93.59,1.000,bicubic\nhgnet_small.ssld_in1k,288,96.930,3.070,99.560,0.440,24.36,1.000,bicubic\nvolo_d1_384.sail_in1k,384,96.930,3.070,99.520,0.480,26.78,1.000,bicubic\nmvitv2_large.fb_in1k,224,96.930,3.070,99.400,0.600,217.99,0.900,bicubic\nxcit_medium_24_p8_224.fb_dist_in1k,224,96.930,3.070,99.380,0.620,84.32,1.000,bicubic\ndavit_base.msft_in1k,224,96.930,3.070,99.250,0.750,87.95,0.950,bicubic\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,96.920,3.080,99.590,0.410,194.03,0.875,bilinear\nbeit3_base_patch16_224.in22k_ft_in1k,224,96.920,3.080,99.580,0.420,86.66,1.000,bicubic\nnextvit_base.bd_in1k_384,384,96.910,3.090,99.400,0.600,44.82,1.000,bicubic\nrdnet_large.nv_in1k,224,96.910,3.090,99.400,0.600,186.27,0.900,bicubic\nconvformer_b36.sail_in1k,224,96.910,3.090,99.230,0.770,99.88,1.000,bicubic\nresnetrs420.tf_in1k,416,96.900,3.100,99.450,0.550,191.89,1.000,bicubic\nconvnextv2_base.fcmae_ft_in1k,224,96.900,3.100,99.420,0.580,88.72,0.875,bicubic\nxcit_small_24_p8_224.fb_dist_in1k,224,96.890,3.110,99.480,0.520,47.63,1.000,bicubic\ncaformer_m36.sail_in1k,224,96.890,3.110,99.440,0.560,56.20,1.000,bicubic\nvit_large_patch16_rope_mixed_224.naver_in1k,224,96.890,3.110,99.320,0.680,304.20,0.900,bicubic\ndeit3_huge_patch14_224.fb_in1k,224,96.880,3.120,99.480,0.520,632.13,0.900,bicubic\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,96.870,3.130,99.530,0.470,86.57,0.900,bicubic\nregnety_1280.seer_ft_in1k,384,96.870,3.130,99.400,0.600,644.81,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,96.870,3.130,99.360,0.640,32.59,0.950,bicubic\nmambaout_base.in1k,288,96.870,3.130,99.170,0.830,84.81,1.000,bicubic\nrexnetr_300.sw_in12k_ft_in1k,288,96.850,3.150,99.520,0.480,34.81,1.000,bicubic\nefficientvit_l2.r256_in1k,256,96.850,3.150,99.300,0.700,63.71,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,96.840,3.160,99.450,0.550,28.64,1.000,bicubic\nefficientvit_l2.r224_in1k,224,96.840,3.160,99.250,0.750,63.71,1.000,bicubic\nregnety_160.swag_lc_in1k,224,96.830,3.170,99.660,0.340,83.59,0.965,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,288,96.830,3.170,99.530,0.470,19.80,1.000,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,96.830,3.170,99.450,0.550,236.34,1.000,bicubic\nregnety_640.seer_ft_in1k,384,96.830,3.170,99.400,0.600,281.38,1.000,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,448,96.830,3.170,99.370,0.630,33.44,1.000,bicubic\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,96.820,3.180,99.440,0.560,304.40,0.900,bicubic\nvit_large_patch16_rope_224.naver_in1k,224,96.820,3.180,99.430,0.570,304.17,0.900,bicubic\nconvnext_base.fb_in1k,288,96.820,3.180,99.420,0.580,88.59,1.000,bicubic\nxcit_large_24_p16_224.fb_dist_in1k,224,96.820,3.180,99.370,0.630,189.10,1.000,bicubic\nnextvit_small.bd_in1k_384,384,96.820,3.180,99.300,0.700,31.76,1.000,bicubic\nseresnet152d.ra2_in1k,320,96.800,3.200,99.450,0.550,66.84,1.000,bicubic\nfastvit_ma36.apple_dist_in1k,256,96.800,3.200,99.330,0.670,44.07,0.950,bicubic\ntf_efficientnetv2_m.in1k,384,96.790,3.210,99.420,0.580,54.14,1.000,bicubic\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,96.790,3.210,99.370,0.630,66.01,0.950,bicubic\nseresnext101_32x8d.ah_in1k,288,96.790,3.210,99.340,0.660,93.57,1.000,bicubic\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,96.790,3.210,99.330,0.670,328.99,0.900,bicubic\nvit_large_patch16_rope_ape_224.naver_in1k,224,96.770,3.230,99.400,0.600,304.37,0.900,bicubic\nresnetrs350.tf_in1k,384,96.770,3.230,99.340,0.660,163.96,1.000,bicubic\nswinv2_base_window16_256.ms_in1k,256,96.770,3.230,99.330,0.670,87.92,0.900,bicubic\nmvitv2_base.fb_in1k,224,96.770,3.230,99.250,0.750,51.47,0.900,bicubic\nconvnext_large.fb_in1k,224,96.760,3.240,99.300,0.700,197.77,0.875,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,384,96.750,3.250,99.420,0.580,21.46,1.000,bicubic\nxcit_small_12_p8_224.fb_dist_in1k,224,96.750,3.250,99.370,0.630,26.21,1.000,bicubic\nflexivit_base.1200ep_in1k,240,96.750,3.250,99.360,0.640,86.59,0.950,bicubic\neca_nfnet_l1.ra2_in1k,320,96.750,3.250,99.290,0.710,41.41,1.000,bicubic\nconvnext_tiny.in12k_ft_in1k,224,96.740,3.260,99.460,0.540,28.59,0.950,bicubic\nedgenext_base.in21k_ft_in1k,320,96.740,3.260,99.420,0.580,18.51,1.000,bicubic\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,96.730,3.270,99.530,0.470,217.32,1.000,bilinear\nseresnext101d_32x8d.ah_in1k,288,96.730,3.270,99.360,0.640,93.59,1.000,bicubic\nresnet200d.ra2_in1k,320,96.730,3.270,99.340,0.660,64.69,1.000,bicubic\ndm_nfnet_f2.dm_in1k,256,96.730,3.270,99.300,0.700,193.78,0.920,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,96.720,3.280,99.520,0.480,19.80,1.000,bicubic\nedgenext_base.usi_in1k,320,96.720,3.280,99.430,0.570,18.51,1.000,bicubic\ncaformer_s36.sail_in1k,224,96.720,3.280,99.360,0.640,39.30,1.000,bicubic\nresnetrs200.tf_in1k,320,96.720,3.280,99.360,0.640,93.21,1.000,bicubic\nmambaout_small.in1k,288,96.720,3.280,99.260,0.740,48.49,1.000,bicubic\nvit_base_patch16_384.orig_in21k_ft_in1k,384,96.710,3.290,99.510,0.490,86.86,1.000,bicubic\ncaformer_s18.sail_in22k_ft_in1k,224,96.710,3.290,99.480,0.520,26.34,1.000,bicubic\nregnetz_040.ra3_in1k,320,96.710,3.290,99.480,0.520,27.12,1.000,bicubic\ntf_efficientnet_b5.ap_in1k,456,96.710,3.290,99.450,0.550,30.39,0.934,bicubic\nregnetz_040_h.ra3_in1k,320,96.700,3.300,99.500,0.500,28.94,1.000,bicubic\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,96.700,3.300,99.480,0.520,22.20,1.000,bicubic\nmobilenetv4_hybrid_large.e600_r384_in1k,448,96.700,3.300,99.390,0.610,37.76,1.000,bicubic\nresnetrs270.tf_in1k,352,96.700,3.300,99.350,0.650,129.86,1.000,bicubic\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,96.700,3.300,99.230,0.770,34.36,0.950,bicubic\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,96.690,3.310,99.580,0.420,36.47,1.000,bicubic\nmaxvit_small_tf_224.in1k,224,96.690,3.310,99.340,0.660,68.93,0.950,bicubic\nregnetz_e8.ra3_in1k,256,96.690,3.310,99.340,0.660,57.70,0.940,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,224,96.690,3.310,99.280,0.720,86.48,0.950,bicubic\nrdnet_base.nv_in1k,224,96.690,3.310,99.240,0.760,87.45,0.900,bicubic\nconvformer_m36.sail_in1k,224,96.690,3.310,99.070,0.930,57.05,1.000,bicubic\ntf_efficientnet_b6.aa_in1k,528,96.680,3.320,99.370,0.630,43.04,0.942,bicubic\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,96.670,3.330,99.500,0.500,38.86,0.950,bicubic\nrepvgg_d2se.rvgg_in1k,320,96.670,3.330,99.370,0.630,133.33,1.000,bilinear\nresnetaa101d.sw_in12k_ft_in1k,288,96.670,3.330,99.360,0.640,44.57,1.000,bicubic\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,96.670,3.330,99.330,0.670,22.06,1.000,bicubic\nresnetrs350.tf_in1k,288,96.670,3.330,99.290,0.710,163.96,1.000,bicubic\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,96.670,3.330,99.160,0.840,41.72,0.950,bicubic\nflexivit_base.600ep_in1k,240,96.650,3.350,99.330,0.670,86.59,0.950,bicubic\nefficientvit_b3.r288_in1k,288,96.650,3.350,99.220,0.780,48.65,1.000,bicubic\ndavit_small.msft_in1k,224,96.640,3.360,99.370,0.630,49.75,0.950,bicubic\nrexnetr_300.sw_in12k_ft_in1k,224,96.640,3.360,99.370,0.630,34.81,0.950,bicubic\nmambaout_base_wide_rw.sw_e500_in1k,224,96.640,3.360,99.220,0.780,94.45,0.950,bicubic\nregnetz_d8.ra3_in1k,320,96.630,3.370,99.450,0.550,23.37,1.000,bicubic\nhgnet_small.ssld_in1k,224,96.630,3.370,99.420,0.580,24.36,0.965,bicubic\nresmlp_big_24_224.fb_in22k_ft_in1k,224,96.620,3.380,99.510,0.490,129.14,0.875,bicubic\nresnetrs420.tf_in1k,320,96.620,3.380,99.300,0.700,191.89,1.000,bicubic\nxcit_medium_24_p16_224.fb_dist_in1k,224,96.620,3.380,99.280,0.720,84.40,1.000,bicubic\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,96.620,3.380,99.220,0.780,63.95,0.950,bicubic\nresnest200e.in1k,320,96.610,3.390,99.350,0.650,70.20,0.909,bicubic\nconvformer_s18.sail_in22k_ft_in1k,224,96.610,3.390,99.330,0.670,26.77,1.000,bicubic\nflexivit_base.300ep_in1k,240,96.610,3.390,99.270,0.730,86.59,0.950,bicubic\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,96.600,3.400,99.540,0.460,194.03,0.875,bilinear\nregnetz_d32.ra3_in1k,320,96.600,3.400,99.380,0.620,27.58,0.950,bicubic\nmambaout_base_short_rw.sw_e500_in1k,224,96.600,3.400,99.230,0.770,88.83,0.950,bicubic\nefficientnetv2_rw_m.agc_in1k,320,96.590,3.410,99.430,0.570,53.24,1.000,bicubic\neca_nfnet_l2.ra3_in1k,320,96.590,3.410,99.410,0.590,56.72,0.900,bicubic\nhiera_base_224.mae_in1k_ft_in1k,224,96.590,3.410,99.310,0.690,51.52,0.900,bicubic\nfocalnet_base_srf.ms_in1k,224,96.590,3.410,99.160,0.840,88.15,0.900,bicubic\nxcit_tiny_24_p8_384.fb_dist_in1k,384,96.580,3.420,99.320,0.680,12.11,1.000,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,96.580,3.420,99.310,0.690,37.76,1.000,bicubic\nvit_base_patch16_rope_mixed_224.naver_in1k,224,96.580,3.420,99.310,0.690,86.44,0.900,bicubic\nswin_base_patch4_window12_384.ms_in1k,384,96.580,3.420,99.240,0.760,87.90,1.000,bicubic\nconvformer_s36.sail_in1k,224,96.580,3.420,99.160,0.840,40.01,1.000,bicubic\nregnetz_d8_evos.ch_in1k,320,96.570,3.430,99.460,0.540,23.46,1.000,bicubic\ncait_xs24_384.fb_dist_in1k,384,96.570,3.430,99.420,0.580,26.67,1.000,bicubic\nresnetrs152.tf_in1k,320,96.570,3.430,99.240,0.760,86.62,1.000,bicubic\ngcvit_base.in1k,224,96.570,3.430,99.230,0.770,90.32,0.875,bicubic\nmaxvit_rmlp_small_rw_224.sw_in1k,224,96.570,3.430,99.130,0.870,64.90,0.900,bicubic\nconvnext_small.fb_in1k,288,96.560,3.440,99.340,0.660,50.22,1.000,bicubic\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,96.550,3.450,99.410,0.590,49.61,0.900,bicubic\nswinv2_base_window8_256.ms_in1k,256,96.550,3.450,99.280,0.720,87.92,0.900,bicubic\ninception_next_base.sail_in1k,224,96.550,3.450,99.090,0.910,86.67,0.950,bicubic\nefficientnetv2_rw_s.ra2_in1k,384,96.540,3.460,99.360,0.640,23.94,1.000,bicubic\ntf_efficientnet_b7.aa_in1k,600,96.540,3.460,99.310,0.690,66.35,0.949,bicubic\nregnety_080.ra3_in1k,288,96.530,3.470,99.330,0.670,39.18,1.000,bicubic\ncrossvit_18_dagger_408.in1k,408,96.530,3.470,99.250,0.750,44.61,1.000,bicubic\ncoatnet_rmlp_2_rw_224.sw_in1k,224,96.530,3.470,99.080,0.920,73.88,0.950,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,224,96.520,3.480,99.380,0.620,19.80,0.965,bicubic\nresnest269e.in1k,416,96.520,3.480,99.370,0.630,110.93,0.928,bicubic\nmambaout_small_rw.sw_e450_in1k,224,96.520,3.480,99.200,0.800,48.50,1.000,bicubic\nmambaout_tiny.in1k,288,96.510,3.490,99.130,0.870,26.55,1.000,bicubic\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,96.500,3.500,99.410,0.590,88.30,1.000,bicubic\nfastvit_ma36.apple_in1k,256,96.490,3.510,99.300,0.700,44.07,0.950,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,96.480,3.520,99.400,0.600,28.64,0.875,bicubic\ntf_efficientnet_b5.aa_in1k,456,96.480,3.520,99.230,0.770,30.39,0.934,bicubic\nfocalnet_base_lrf.ms_in1k,224,96.480,3.520,99.130,0.870,88.75,0.900,bicubic\nregnetz_040.ra3_in1k,256,96.470,3.530,99.390,0.610,27.12,1.000,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,96.470,3.530,99.360,0.640,19.80,0.965,bicubic\nresmlp_big_24_224.fb_distilled_in1k,224,96.470,3.530,99.310,0.690,129.14,0.875,bicubic\nvit_base_patch16_224_miil.in21k_ft_in1k,224,96.470,3.530,99.300,0.700,86.54,0.875,bilinear\nswinv2_small_window16_256.ms_in1k,256,96.470,3.530,99.210,0.790,49.73,0.900,bicubic\nseresnext101d_32x8d.ah_in1k,224,96.470,3.530,99.200,0.800,93.59,0.950,bicubic\ncoat_lite_medium.in1k,224,96.470,3.530,99.160,0.840,44.57,0.900,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,96.460,3.540,99.300,0.700,32.59,1.000,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,96.460,3.540,99.180,0.820,37.76,0.950,bicubic\nregnetv_064.ra3_in1k,288,96.450,3.550,99.360,0.640,30.58,1.000,bicubic\nmambaout_small.in1k,224,96.450,3.550,99.200,0.800,48.49,1.000,bicubic\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,96.440,3.560,99.480,0.520,44.18,0.875,bilinear\ncs3se_edgenet_x.c2ns_in1k,320,96.440,3.560,99.390,0.610,50.72,1.000,bicubic\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,96.440,3.560,99.310,0.690,86.59,0.900,bicubic\nconvnext_base.fb_in1k,224,96.440,3.560,99.230,0.770,88.59,0.875,bicubic\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,96.440,3.560,99.130,0.870,86.43,0.950,bicubic\nseresnet152d.ra2_in1k,256,96.430,3.570,99.390,0.610,66.84,0.950,bicubic\nmobilenetv4_hybrid_large.e600_r384_in1k,384,96.430,3.570,99.180,0.820,37.76,0.950,bicubic\nxcit_small_24_p8_224.fb_in1k,224,96.430,3.570,99.130,0.870,47.63,1.000,bicubic\nmambaout_base.in1k,224,96.430,3.570,99.000,1.000,84.81,1.000,bicubic\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,96.420,3.580,99.390,0.610,29.15,0.950,bicubic\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,96.420,3.580,99.240,0.760,38.74,0.950,bicubic\ncrossvit_15_dagger_408.in1k,408,96.420,3.580,99.160,0.840,28.50,1.000,bicubic\nedgenext_base.usi_in1k,256,96.400,3.600,99.380,0.620,18.51,0.950,bicubic\nregnetz_d8.ra3_in1k,256,96.400,3.600,99.240,0.760,23.37,0.940,bicubic\ncait_s24_224.fb_dist_in1k,224,96.400,3.600,99.150,0.850,46.92,1.000,bicubic\nxcit_large_24_p8_224.fb_in1k,224,96.400,3.600,98.980,1.020,188.93,1.000,bicubic\ntf_efficientnet_b5.ra_in1k,456,96.390,3.610,99.330,0.670,30.39,0.934,bicubic\nresnetrs270.tf_in1k,256,96.390,3.610,99.250,0.750,129.86,1.000,bicubic\nseresnext101_32x8d.ah_in1k,224,96.390,3.610,99.230,0.770,93.57,0.950,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,96.390,3.610,99.210,0.790,38.88,0.950,bicubic\nhrnet_w48_ssld.paddle_in1k,224,96.380,3.620,99.410,0.590,77.47,0.950,bilinear\nresnet152d.ra2_in1k,320,96.380,3.620,99.390,0.610,60.21,1.000,bicubic\nvit_base_patch16_rope_ape_224.naver_in1k,224,96.380,3.620,99.380,0.620,86.59,0.900,bicubic\ntf_efficientnet_b3.ns_jft_in1k,300,96.380,3.620,99.350,0.650,12.23,0.904,bicubic\nregnety_160.deit_in1k,288,96.380,3.620,99.330,0.670,83.59,1.000,bicubic\nvit_base_patch16_rope_224.naver_in1k,224,96.380,3.620,99.300,0.700,86.43,0.900,bicubic\npvt_v2_b4.in1k,224,96.380,3.620,99.180,0.820,62.56,0.900,bicubic\nnextvit_large.bd_in1k,224,96.370,3.630,99.340,0.660,57.87,0.950,bicubic\nedgenext_base.in21k_ft_in1k,256,96.360,3.640,99.380,0.620,18.51,0.950,bicubic\nxception65.ra3_in1k,299,96.360,3.640,99.240,0.760,39.92,0.940,bicubic\nregnety_064.ra3_in1k,288,96.360,3.640,99.230,0.770,30.58,1.000,bicubic\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,96.360,3.640,99.180,0.820,60.23,0.950,bicubic\nhgnetv2_b3.ssld_stage2_ft_in1k,288,96.350,3.650,99.470,0.530,16.29,1.000,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,96.350,3.650,99.390,0.610,15.62,1.000,bicubic\ndm_nfnet_f0.dm_in1k,256,96.350,3.650,99.340,0.660,71.49,0.900,bicubic\nfastvit_sa36.apple_dist_in1k,256,96.350,3.650,99.250,0.750,31.53,0.900,bicubic\ndm_nfnet_f1.dm_in1k,224,96.350,3.650,99.220,0.780,132.63,0.910,bicubic\nmvitv2_small.fb_in1k,224,96.350,3.650,99.220,0.780,34.87,0.900,bicubic\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,96.350,3.650,99.220,0.780,38.88,0.950,bicubic\ntf_efficientnetv2_s.in1k,384,96.350,3.650,99.200,0.800,21.46,1.000,bicubic\npvt_v2_b5.in1k,224,96.350,3.650,99.170,0.830,81.96,0.900,bicubic\nefficientvit_b3.r256_in1k,256,96.350,3.650,99.110,0.890,48.65,1.000,bicubic\nrepvit_m2_3.dist_450e_in1k,224,96.340,3.660,99.400,0.600,23.69,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,96.340,3.660,99.340,0.660,11.07,1.000,bicubic\nregnety_320.seer_ft_in1k,384,96.340,3.660,99.340,0.660,145.05,1.000,bicubic\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,96.340,3.660,99.320,0.680,22.52,0.950,bicubic\nresnetaa101d.sw_in12k_ft_in1k,224,96.340,3.660,99.260,0.740,44.57,0.950,bicubic\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,96.340,3.660,99.050,0.950,86.63,1.000,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,300,96.330,3.670,99.240,0.760,21.46,1.000,bicubic\nseresnextaa101d_32x8d.ah_in1k,224,96.330,3.670,99.230,0.770,93.59,0.950,bicubic\nrdnet_small.nv_in1k,224,96.330,3.670,99.200,0.800,50.44,0.900,bicubic\nhiera_small_224.mae_in1k_ft_in1k,224,96.330,3.670,99.170,0.830,35.01,0.900,bicubic\nefficientvit_l1.r224_in1k,224,96.330,3.670,99.050,0.950,52.65,1.000,bicubic\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,96.320,3.680,99.420,0.580,88.79,0.875,bilinear\nvolo_d1_224.sail_in1k,224,96.320,3.680,99.320,0.680,26.63,0.960,bicubic\nswinv2_small_window8_256.ms_in1k,256,96.320,3.680,99.200,0.800,49.73,0.900,bicubic\nregnetz_040_h.ra3_in1k,256,96.310,3.690,99.300,0.700,28.94,1.000,bicubic\nresnet200d.ra2_in1k,256,96.310,3.690,99.250,0.750,64.69,0.950,bicubic\nfasternet_l.in1k,224,96.300,3.700,99.130,0.870,93.47,1.000,bicubic\nresnet101d.ra2_in1k,320,96.290,3.710,99.260,0.740,44.57,1.000,bicubic\ndeit3_base_patch16_224.fb_in1k,224,96.290,3.710,99.190,0.810,86.59,0.900,bicubic\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,96.280,3.720,99.190,0.810,11.00,0.950,bicubic\ngcvit_small.in1k,224,96.280,3.720,99.150,0.850,51.09,0.875,bicubic\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,96.270,3.730,99.480,0.520,16.29,1.000,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,320,96.270,3.730,99.320,0.680,38.61,1.000,bicubic\nregnetz_d8_evos.ch_in1k,256,96.270,3.730,99.320,0.680,23.46,0.950,bicubic\nswin_s3_base_224.ms_in1k,224,96.270,3.730,99.160,0.840,71.13,0.900,bicubic\nmobilenetv4_conv_large.e600_r384_in1k,448,96.250,3.750,99.310,0.690,32.59,1.000,bicubic\ninception_next_small.sail_in1k,224,96.250,3.750,99.230,0.770,49.37,0.875,bicubic\nnextvit_base.bd_in1k,224,96.250,3.750,99.230,0.770,44.82,0.950,bicubic\neca_nfnet_l1.ra2_in1k,256,96.250,3.750,99.220,0.780,41.41,0.900,bicubic\nnest_base_jx.goog_in1k,224,96.250,3.750,99.220,0.780,67.72,0.875,bicubic\npit_b_distilled_224.in1k,224,96.250,3.750,99.130,0.870,74.79,0.900,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,224,96.240,3.760,99.340,0.660,28.59,0.875,bicubic\nfastvit_sa36.apple_in1k,256,96.240,3.760,99.180,0.820,31.53,0.900,bicubic\ntwins_svt_large.in1k,224,96.240,3.760,99.150,0.850,99.27,0.900,bicubic\nmaxvit_tiny_rw_224.sw_in1k,224,96.240,3.760,99.120,0.880,29.06,0.950,bicubic\nswin_s3_small_224.ms_in1k,224,96.240,3.760,99.070,0.930,49.74,0.900,bicubic\nhgnet_tiny.ssld_in1k,288,96.230,3.770,99.420,0.580,14.74,1.000,bicubic\necaresnet101d.miil_in1k,288,96.230,3.770,99.310,0.690,44.57,0.950,bicubic\nregnetv_040.ra3_in1k,288,96.230,3.770,99.300,0.700,20.64,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,288,96.230,3.770,99.250,0.750,16.52,1.000,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,300,96.230,3.770,99.240,0.760,14.36,0.900,bicubic\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,96.230,3.770,99.110,0.890,60.40,0.950,bicubic\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,96.230,3.770,98.930,1.070,86.46,1.000,bicubic\ndeit3_small_patch16_384.fb_in1k,384,96.220,3.780,99.280,0.720,22.21,1.000,bicubic\nresnet152.a1h_in1k,288,96.220,3.780,99.230,0.770,60.19,1.000,bicubic\nxcit_small_24_p16_224.fb_dist_in1k,224,96.220,3.780,99.210,0.790,47.67,1.000,bicubic\nswinv2_cr_small_ns_224.sw_in1k,224,96.220,3.780,99.130,0.870,49.70,0.900,bicubic\nhgnetv2_b2.ssld_stage2_ft_in1k,288,96.210,3.790,99.420,0.580,11.22,1.000,bicubic\nxception65p.ra3_in1k,299,96.210,3.790,99.180,0.820,39.82,0.940,bicubic\nregnetz_d32.ra3_in1k,256,96.200,3.800,99.290,0.710,27.58,0.950,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,288,96.200,3.800,99.240,0.760,28.64,1.000,bicubic\ntiny_vit_21m_224.in1k,224,96.200,3.800,99.130,0.870,21.20,0.950,bicubic\nconvnext_small.fb_in1k,224,96.200,3.800,99.110,0.890,50.22,0.875,bicubic\nresnetrs200.tf_in1k,256,96.190,3.810,99.270,0.730,93.21,1.000,bicubic\ngcvit_tiny.in1k,224,96.190,3.810,99.230,0.770,28.22,0.875,bicubic\nfocalnet_small_lrf.ms_in1k,224,96.190,3.810,99.190,0.810,50.34,0.900,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,96.190,3.810,99.130,0.870,14.25,1.000,bicubic\nrepvit_m2_3.dist_300e_in1k,224,96.180,3.820,99.340,0.660,23.69,0.950,bicubic\ntf_efficientnet_b4.ap_in1k,380,96.170,3.830,99.280,0.720,19.34,0.922,bicubic\ntresnet_v2_l.miil_in21k_ft_in1k,224,96.170,3.830,99.250,0.750,46.17,0.875,bilinear\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,96.170,3.830,99.200,0.800,11.07,1.000,bicubic\nefficientnet_b4.ra2_in1k,384,96.170,3.830,99.190,0.810,19.34,1.000,bicubic\ndeit_base_patch16_384.fb_in1k,384,96.170,3.830,99.150,0.850,86.86,1.000,bicubic\ntwins_svt_base.in1k,224,96.170,3.830,99.070,0.930,56.07,0.900,bicubic\ncaformer_s18.sail_in1k,224,96.170,3.830,98.990,1.010,26.34,1.000,bicubic\nfastvit_sa24.apple_dist_in1k,256,96.160,3.840,99.210,0.790,21.55,0.900,bicubic\ntwins_pcpvt_large.in1k,224,96.160,3.840,99.190,0.810,60.99,0.900,bicubic\nefficientnetv2_rw_s.ra2_in1k,288,96.160,3.840,99.150,0.850,23.94,1.000,bicubic\nregnetz_c16_evos.ch_in1k,320,96.150,3.850,99.380,0.620,13.49,0.950,bicubic\nresnetv2_101.a1h_in1k,288,96.150,3.850,99.160,0.840,44.54,1.000,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,96.150,3.850,99.110,0.890,32.59,0.950,bicubic\ntf_efficientnetv2_s.in1k,300,96.150,3.850,99.090,0.910,21.46,1.000,bicubic\nmaxvit_tiny_tf_224.in1k,224,96.140,3.860,99.250,0.750,30.92,0.950,bicubic\nsequencer2d_l.in1k,224,96.140,3.860,99.160,0.840,54.30,0.875,bicubic\nnfnet_l0.ra2_in1k,288,96.130,3.870,99.250,0.750,35.07,1.000,bicubic\nresnext101_64x4d.c1_in1k,288,96.130,3.870,99.230,0.770,83.46,1.000,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,96.130,3.870,99.210,0.790,88.22,0.900,bicubic\nxcit_medium_24_p8_224.fb_in1k,224,96.130,3.870,98.910,1.090,84.32,1.000,bicubic\nefficientformer_l7.snap_dist_in1k,224,96.110,3.890,99.270,0.730,82.23,0.950,bicubic\nxcit_small_12_p8_224.fb_in1k,224,96.110,3.890,99.170,0.830,26.21,1.000,bicubic\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,96.100,3.900,99.280,0.720,44.54,1.000,bilinear\nresnetv2_50x1_bit.goog_distilled_in1k,224,96.100,3.900,99.270,0.730,25.55,0.875,bicubic\ndeit_base_distilled_patch16_224.fb_in1k,224,96.100,3.900,99.190,0.810,87.34,0.900,bicubic\nswin_base_patch4_window7_224.ms_in1k,224,96.100,3.900,99.050,0.950,87.77,0.900,bicubic\nregnetv_064.ra3_in1k,224,96.090,3.910,99.310,0.690,30.58,0.950,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,96.090,3.910,99.310,0.690,236.34,0.875,bicubic\ndeit3_medium_patch16_224.fb_in1k,224,96.090,3.910,99.190,0.810,38.85,0.900,bicubic\nfocalnet_small_srf.ms_in1k,224,96.090,3.910,99.160,0.840,49.89,0.900,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,96.090,3.910,99.150,0.850,60.40,0.950,bicubic\nswinv2_cr_small_224.sw_in1k,224,96.090,3.910,98.870,1.130,49.70,0.900,bicubic\ntf_efficientnet_b5.in1k,456,96.080,3.920,99.300,0.700,30.39,0.934,bicubic\nregnety_320.tv2_in1k,224,96.080,3.920,99.240,0.760,145.05,0.965,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,96.070,3.930,99.220,0.780,15.62,1.000,bicubic\nresnet152d.ra2_in1k,256,96.060,3.940,99.300,0.700,60.21,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,96.060,3.940,99.240,0.760,11.07,0.950,bicubic\nefficientformerv2_l.snap_dist_in1k,224,96.060,3.940,99.210,0.790,26.32,0.950,bicubic\ncs3edgenet_x.c2_in1k,288,96.060,3.940,99.130,0.870,47.82,1.000,bicubic\nxcit_tiny_12_p8_384.fb_dist_in1k,384,96.050,3.950,99.130,0.870,6.71,1.000,bicubic\nregnety_160.deit_in1k,224,96.040,3.960,99.210,0.790,83.59,0.950,bicubic\nresnet101.a1h_in1k,288,96.040,3.960,99.140,0.860,44.55,1.000,bicubic\ncs3sedarknet_x.c2ns_in1k,288,96.040,3.960,99.110,0.890,35.40,1.000,bicubic\nresnetrs152.tf_in1k,256,96.040,3.960,99.110,0.890,86.62,1.000,bicubic\nmambaout_tiny.in1k,224,96.040,3.960,98.900,1.100,26.55,1.000,bicubic\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,96.030,3.970,99.260,0.740,16.78,0.950,bicubic\nfasternet_m.in1k,224,96.030,3.970,99.170,0.830,53.52,1.000,bicubic\nresnext101_64x4d.tv_in1k,224,96.030,3.970,99.160,0.840,83.46,0.875,bilinear\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,96.030,3.970,99.140,0.860,18.45,1.000,bicubic\nregnety_080.ra3_in1k,224,96.030,3.970,99.110,0.890,39.18,0.950,bicubic\ncoatnet_1_rw_224.sw_in1k,224,96.030,3.970,99.050,0.950,41.72,0.950,bicubic\nxcit_small_12_p16_224.fb_dist_in1k,224,96.020,3.980,99.140,0.860,26.25,1.000,bicubic\nefficientvit_b3.r224_in1k,224,96.020,3.980,98.920,1.080,48.65,0.950,bicubic\nconvnext_tiny_hnf.a2h_in1k,288,96.010,3.990,99.080,0.920,28.59,1.000,bicubic\nregnety_040.ra3_in1k,288,96.000,4.000,99.200,0.800,20.65,1.000,bicubic\npvt_v2_b3.in1k,224,96.000,4.000,99.190,0.810,45.24,0.900,bicubic\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,96.000,4.000,98.960,1.040,15.50,0.950,bicubic\nconvnext_nano.in12k_ft_in1k,288,95.990,4.010,99.310,0.690,15.59,1.000,bicubic\nconvnext_nano.r384_in12k_ft_in1k,384,95.990,4.010,99.290,0.710,15.59,1.000,bicubic\nxcit_tiny_24_p16_384.fb_dist_in1k,384,95.990,4.010,99.230,0.770,12.12,1.000,bicubic\nregnety_032.ra_in1k,288,95.990,4.010,99.190,0.810,19.44,1.000,bicubic\nregnetx_320.tv2_in1k,224,95.990,4.010,99.100,0.900,107.81,0.965,bicubic\nresnext101_32x8d.tv2_in1k,224,95.990,4.010,99.090,0.910,88.79,0.965,bilinear\nsequencer2d_s.in1k,224,95.990,4.010,99.050,0.950,27.65,0.875,bicubic\nhrnet_w18_ssld.paddle_in1k,288,95.980,4.020,99.290,0.710,21.30,1.000,bilinear\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,95.980,4.020,99.280,0.720,16.29,0.965,bicubic\nregnety_160.tv2_in1k,224,95.980,4.020,99.170,0.830,83.59,0.965,bicubic\nswinv2_tiny_window16_256.ms_in1k,256,95.980,4.020,99.150,0.850,28.35,0.900,bicubic\nregnetz_c16.ra3_in1k,320,95.980,4.020,99.100,0.900,13.46,1.000,bicubic\neca_nfnet_l0.ra2_in1k,288,95.970,4.030,99.210,0.790,24.14,1.000,bicubic\nefficientvit_b2.r288_in1k,288,95.970,4.030,99.210,0.790,24.33,1.000,bicubic\ntf_efficientnet_b4.aa_in1k,380,95.970,4.030,99.170,0.830,19.34,0.922,bicubic\ncs3se_edgenet_x.c2ns_in1k,256,95.970,4.030,99.160,0.840,50.72,0.950,bicubic\ntresnet_xl.miil_in1k_448,448,95.970,4.030,99.120,0.880,78.44,0.875,bilinear\nnest_small_jx.goog_in1k,224,95.970,4.030,99.040,0.960,38.35,0.875,bicubic\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,95.960,4.040,99.170,0.830,9.72,0.950,bicubic\nswiftformer_l3.dist_in1k,224,95.950,4.050,99.260,0.740,28.49,0.950,bicubic\ncoatnet_rmlp_1_rw_224.sw_in1k,224,95.950,4.050,99.180,0.820,41.69,0.950,bicubic\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,95.950,4.050,99.000,1.000,86.63,1.000,bicubic\nconvformer_s18.sail_in1k,224,95.940,4.060,98.900,1.100,26.77,1.000,bicubic\nhgnetv2_b3.ssld_stage2_ft_in1k,224,95.930,4.070,99.280,0.720,16.29,0.965,bicubic\nmaxvit_nano_rw_256.sw_in1k,256,95.930,4.070,99.000,1.000,15.45,0.950,bicubic\ncoat_small.in1k,224,95.920,4.080,99.160,0.840,21.69,0.900,bicubic\nrepvit_m1_5.dist_450e_in1k,224,95.920,4.080,99.140,0.860,14.64,0.950,bicubic\nfastvit_sa24.apple_in1k,256,95.910,4.090,99.160,0.840,21.55,0.900,bicubic\nresnet101d.ra2_in1k,256,95.900,4.100,99.150,0.850,44.57,0.950,bicubic\nconvnextv2_nano.fcmae_ft_in1k,288,95.900,4.100,99.110,0.890,15.62,1.000,bicubic\nregnetx_160.tv2_in1k,224,95.900,4.100,99.090,0.910,54.28,0.965,bicubic\nmobilenetv4_conv_large.e500_r256_in1k,320,95.890,4.110,99.120,0.880,32.59,1.000,bicubic\ntresnet_l.miil_in1k_448,448,95.890,4.110,99.110,0.890,55.99,0.875,bilinear\nresnet152.a1h_in1k,224,95.890,4.110,99.080,0.920,60.19,0.950,bicubic\nmaxxvitv2_nano_rw_256.sw_in1k,256,95.890,4.110,99.040,0.960,23.70,0.950,bicubic\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,95.880,4.120,99.250,0.750,25.03,0.875,bilinear\nresnet51q.ra2_in1k,288,95.880,4.120,99.120,0.880,35.70,1.000,bilinear\nmvitv2_tiny.fb_in1k,224,95.880,4.120,99.090,0.910,24.17,0.900,bicubic\nnextvit_small.bd_in1k,224,95.880,4.120,99.080,0.920,31.76,0.950,bicubic\nswin_small_patch4_window7_224.ms_in1k,224,95.880,4.120,99.020,0.980,49.61,0.900,bicubic\ncs3darknet_x.c2ns_in1k,288,95.870,4.130,99.190,0.810,35.05,1.000,bicubic\nregnety_064.ra3_in1k,224,95.870,4.130,99.150,0.850,30.58,0.950,bicubic\nregnety_080_tv.tv2_in1k,224,95.870,4.130,99.100,0.900,39.38,0.965,bicubic\ncait_xxs36_384.fb_dist_in1k,384,95.870,4.130,99.090,0.910,17.37,1.000,bicubic\nresnest101e.in1k,256,95.850,4.150,99.210,0.790,48.28,0.875,bilinear\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,95.850,4.150,99.170,0.830,194.03,0.875,bilinear\nvit_large_patch32_384.orig_in21k_ft_in1k,384,95.840,4.160,99.160,0.840,306.63,1.000,bicubic\nrexnet_300.nav_in1k,224,95.840,4.160,99.130,0.870,34.71,0.875,bicubic\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,95.830,4.170,99.090,0.910,11.07,0.950,bicubic\nsequencer2d_m.in1k,224,95.820,4.180,99.110,0.890,38.31,0.875,bicubic\ntf_efficientnet_b4.in1k,380,95.820,4.180,99.060,0.940,19.34,0.922,bicubic\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,95.810,4.190,99.270,0.730,11.22,1.000,bicubic\nresnetaa50d.sw_in12k_ft_in1k,288,95.810,4.190,99.190,0.810,25.58,1.000,bicubic\nmobilenetv4_conv_large.e600_r384_in1k,384,95.810,4.190,99.040,0.960,32.59,0.950,bicubic\ntnt_b_patch16_224.in1k,224,95.810,4.190,98.940,1.060,65.43,0.900,bicubic\nxcit_tiny_24_p8_224.fb_dist_in1k,224,95.800,4.200,99.210,0.790,12.11,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,224,95.800,4.200,99.170,0.830,16.52,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,95.800,4.200,99.150,0.850,11.07,1.000,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,256,95.800,4.200,99.030,0.970,38.61,0.950,bicubic\nresnet61q.ra2_in1k,288,95.800,4.200,99.000,1.000,36.85,1.000,bicubic\nconvnext_tiny.fb_in1k,288,95.780,4.220,99.180,0.820,28.59,1.000,bicubic\ntwins_pcpvt_base.in1k,224,95.780,4.220,99.130,0.870,43.83,0.900,bicubic\nregnetv_040.ra3_in1k,224,95.760,4.240,99.210,0.790,20.64,0.950,bicubic\necaresnet101d_pruned.miil_in1k,288,95.760,4.240,99.180,0.820,24.88,0.950,bicubic\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,95.760,4.240,99.040,0.960,86.43,0.900,bicubic\npoolformerv2_m48.sail_in1k,224,95.750,4.250,98.980,1.020,73.35,1.000,bicubic\nefficientnet_b4.ra2_in1k,320,95.740,4.260,99.150,0.850,19.34,0.875,bicubic\ntf_efficientnet_b2.ns_jft_in1k,260,95.740,4.260,99.120,0.880,9.11,0.890,bicubic\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,95.740,4.260,99.120,0.880,22.52,0.950,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,288,95.740,4.260,99.020,0.980,13.68,1.000,bicubic\ntresnet_m.miil_in21k_ft_in1k,224,95.730,4.270,99.030,0.970,31.39,0.875,bilinear\nefficientnet_b3.ra2_in1k,320,95.720,4.280,99.060,0.940,12.23,1.000,bicubic\npnasnet5large.tf_in1k,331,95.720,4.280,98.920,1.080,86.06,0.911,bicubic\nhgnet_tiny.ssld_in1k,224,95.710,4.290,99.260,0.740,14.74,0.965,bicubic\nconvnext_nano.in12k_ft_in1k,224,95.710,4.290,99.250,0.750,15.59,0.950,bicubic\nseresnext50_32x4d.racm_in1k,288,95.710,4.290,99.190,0.810,27.56,0.950,bicubic\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,95.710,4.290,99.150,0.850,10.59,1.000,bicubic\nresnext101_64x4d.c1_in1k,224,95.710,4.290,99.030,0.970,83.46,0.950,bicubic\nregnetz_c16_evos.ch_in1k,256,95.700,4.300,99.230,0.770,13.49,0.950,bicubic\nflexivit_small.600ep_in1k,240,95.700,4.300,99.080,0.920,22.06,0.950,bicubic\ncoatnet_bn_0_rw_224.sw_in1k,224,95.700,4.300,99.060,0.940,27.44,0.950,bicubic\nrepvit_m1_5.dist_300e_in1k,224,95.700,4.300,98.980,1.020,14.64,0.950,bicubic\nnasnetalarge.tf_in1k,331,95.700,4.300,98.910,1.090,88.75,0.911,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,224,95.680,4.320,99.150,0.850,28.64,0.875,bicubic\nxcit_tiny_24_p8_224.fb_in1k,224,95.680,4.320,99.060,0.940,12.11,1.000,bicubic\nresnetv2_101.a1h_in1k,224,95.680,4.320,99.010,0.990,44.54,0.950,bicubic\nefficientvit_b2.r256_in1k,256,95.670,4.330,99.060,0.940,24.33,1.000,bicubic\ncrossvit_15_dagger_240.in1k,240,95.670,4.330,98.820,1.180,28.21,0.875,bicubic\nwide_resnet50_2.racm_in1k,288,95.660,4.340,99.220,0.780,68.88,0.950,bicubic\ndavit_tiny.msft_in1k,224,95.660,4.340,99.040,0.960,28.36,0.950,bicubic\nhgnet_small.paddle_in1k,288,95.650,4.350,99.150,0.850,24.36,1.000,bicubic\nresnetv2_50d_evos.ah_in1k,288,95.650,4.350,99.110,0.890,25.59,1.000,bicubic\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,95.640,4.360,99.190,0.810,36.43,0.900,bicubic\nrdnet_tiny.nv_in1k,224,95.640,4.360,99.030,0.970,23.86,0.900,bicubic\npoolformer_m48.sail_in1k,224,95.640,4.360,98.950,1.050,73.47,0.950,bicubic\ncs3sedarknet_x.c2ns_in1k,256,95.630,4.370,98.980,1.020,35.40,0.887,bicubic\ncrossvit_18_dagger_240.in1k,240,95.620,4.380,99.050,0.950,44.27,0.875,bicubic\npit_b_224.in1k,224,95.620,4.380,98.680,1.320,73.76,0.900,bicubic\nefficientformer_l3.snap_dist_in1k,224,95.610,4.390,99.190,0.810,31.41,0.950,bicubic\nefficientnetv2_rw_t.ra2_in1k,288,95.610,4.390,99.040,0.960,13.65,1.000,bicubic\nhiera_tiny_224.mae_in1k_ft_in1k,224,95.600,4.400,98.880,1.120,27.91,0.900,bicubic\ngcvit_xtiny.in1k,224,95.590,4.410,99.030,0.970,19.98,0.875,bicubic\nvit_relpos_base_patch16_224.sw_in1k,224,95.590,4.410,99.030,0.970,86.43,0.900,bicubic\npvt_v2_b2_li.in1k,224,95.590,4.410,98.990,1.010,22.55,0.900,bicubic\nregnety_040.ra3_in1k,224,95.570,4.430,99.050,0.950,20.65,0.950,bicubic\nflexivit_small.1200ep_in1k,240,95.560,4.440,99.110,0.890,22.06,0.950,bicubic\nxception41p.ra3_in1k,299,95.560,4.440,98.930,1.070,26.91,0.940,bicubic\ncoat_lite_small.in1k,224,95.560,4.440,98.880,1.120,19.84,0.900,bicubic\nwide_resnet101_2.tv2_in1k,224,95.550,4.450,99.090,0.910,126.89,0.965,bilinear\ntiny_vit_11m_224.in1k,224,95.550,4.450,98.990,1.010,11.00,0.950,bicubic\nconvit_base.fb_in1k,224,95.550,4.450,98.890,1.110,86.54,0.875,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,95.550,4.450,98.870,1.130,88.22,0.900,bicubic\nhgnetv2_b2.ssld_stage2_ft_in1k,224,95.540,4.460,99.180,0.820,11.22,0.965,bicubic\necaresnet101d.miil_in1k,224,95.540,4.460,99.140,0.860,44.57,0.875,bicubic\nlevit_384.fb_dist_in1k,224,95.540,4.460,99.050,0.950,39.13,0.900,bicubic\nlevit_conv_384.fb_dist_in1k,224,95.540,4.460,99.050,0.950,39.13,0.900,bicubic\nconvnext_tiny.fb_in1k,224,95.540,4.460,99.000,1.000,28.59,0.875,bicubic\nxcit_small_24_p16_224.fb_in1k,224,95.540,4.460,98.760,1.240,47.67,1.000,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,95.530,4.470,99.120,0.880,15.62,0.875,bicubic\nregnetz_c16.ra3_in1k,256,95.530,4.470,98.960,1.040,13.46,0.940,bicubic\nresnet152.tv2_in1k,224,95.530,4.470,98.960,1.040,60.19,0.965,bilinear\ncrossvit_base_240.in1k,240,95.530,4.470,98.820,1.180,105.03,0.875,bicubic\necaresnet50t.ra2_in1k,320,95.520,4.480,99.120,0.880,25.57,0.950,bicubic\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,95.520,4.480,99.080,0.920,38.73,0.900,bicubic\nconvnext_tiny_hnf.a2h_in1k,224,95.520,4.480,99.010,0.990,28.59,0.950,bicubic\nflexivit_small.300ep_in1k,240,95.520,4.480,98.960,1.040,22.06,0.950,bicubic\ncs3edgenet_x.c2_in1k,256,95.510,4.490,99.030,0.970,47.82,0.887,bicubic\nxcit_medium_24_p16_224.fb_in1k,224,95.510,4.490,98.760,1.240,84.40,1.000,bicubic\nswinv2_tiny_window8_256.ms_in1k,256,95.500,4.500,99.120,0.880,28.35,0.900,bicubic\nfbnetv3_g.ra2_in1k,288,95.500,4.500,98.990,1.010,16.62,0.950,bilinear\npvt_v2_b2.in1k,224,95.500,4.500,98.990,1.010,25.36,0.900,bicubic\nresnetv2_50d_gn.ah_in1k,288,95.500,4.500,98.980,1.020,25.57,1.000,bicubic\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,95.490,4.510,99.220,0.780,28.29,0.900,bicubic\nfocalnet_tiny_srf.ms_in1k,224,95.490,4.510,99.140,0.860,28.43,0.900,bicubic\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,95.490,4.510,99.110,0.890,88.79,0.875,bilinear\necaresnet50d.miil_in1k,288,95.490,4.510,99.090,0.910,25.58,0.950,bicubic\nresnet51q.ra2_in1k,256,95.490,4.510,98.990,1.010,35.70,0.875,bilinear\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,95.490,4.510,98.950,1.050,38.76,0.900,bicubic\nvisformer_small.in1k,224,95.490,4.510,98.900,1.100,40.22,0.900,bicubic\nresnet101.a1_in1k,288,95.490,4.510,98.850,1.150,44.55,1.000,bicubic\ncrossvit_18_240.in1k,240,95.490,4.510,98.800,1.200,43.27,0.875,bicubic\nresnet152.a2_in1k,288,95.490,4.510,98.780,1.220,60.19,1.000,bicubic\nresnet152.a1_in1k,288,95.490,4.510,98.770,1.230,60.19,1.000,bicubic\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,95.480,4.520,99.100,0.900,44.18,0.875,bilinear\nfocalnet_tiny_lrf.ms_in1k,224,95.480,4.520,98.940,1.060,28.65,0.900,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,95.480,4.520,98.900,1.100,11.07,1.000,bicubic\nresnet50.fb_swsl_ig1b_ft_in1k,224,95.470,4.530,99.330,0.670,25.56,0.875,bilinear\ninception_next_tiny.sail_in1k,224,95.470,4.530,99.010,0.990,28.06,0.875,bicubic\nvit_relpos_medium_patch16_224.sw_in1k,224,95.470,4.530,98.960,1.040,38.75,0.900,bicubic\necaresnet50t.a1_in1k,288,95.460,4.540,99.020,0.980,25.57,1.000,bicubic\ncoatnet_0_rw_224.sw_in1k,224,95.460,4.540,98.720,1.280,27.44,0.950,bicubic\nxcit_large_24_p16_224.fb_in1k,224,95.460,4.540,98.630,1.370,189.10,1.000,bicubic\ntresnet_xl.miil_in1k,224,95.440,4.560,99.060,0.940,78.44,0.875,bilinear\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,95.440,4.560,98.990,1.010,15.15,0.900,bicubic\ndeit_base_patch16_224.fb_in1k,224,95.440,4.560,98.840,1.160,86.57,0.900,bicubic\nresnext50_32x4d.a1h_in1k,288,95.440,4.560,98.840,1.160,25.03,1.000,bicubic\nnfnet_l0.ra2_in1k,224,95.430,4.570,99.090,0.910,35.07,0.900,bicubic\nconvnextv2_nano.fcmae_ft_in1k,224,95.430,4.570,98.980,1.020,15.62,0.875,bicubic\nresnet101.a2_in1k,288,95.430,4.570,98.940,1.060,44.55,1.000,bicubic\npoolformerv2_m36.sail_in1k,224,95.430,4.570,98.880,1.120,56.08,1.000,bicubic\nxcit_small_12_p16_224.fb_in1k,224,95.430,4.570,98.840,1.160,26.25,1.000,bicubic\nedgenext_small.usi_in1k,320,95.420,4.580,99.100,0.900,5.59,1.000,bicubic\nresnetrs101.tf_in1k,288,95.420,4.580,99.030,0.970,63.62,0.940,bicubic\nhalo2botnet50ts_256.a1h_in1k,256,95.420,4.580,99.020,0.980,22.64,0.950,bicubic\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,95.410,4.590,99.130,0.870,22.05,0.900,bicubic\ncs3darknet_x.c2ns_in1k,256,95.410,4.590,99.020,0.980,35.05,0.950,bicubic\ncoatnext_nano_rw_224.sw_in1k,224,95.410,4.590,99.000,1.000,14.70,0.900,bicubic\npoolformer_m36.sail_in1k,224,95.410,4.590,98.840,1.160,56.17,0.950,bicubic\nregnety_032.ra_in1k,224,95.400,4.600,99.080,0.920,19.44,0.950,bicubic\nvit_base_patch16_rpn_224.sw_in1k,224,95.390,4.610,98.950,1.050,86.54,0.900,bicubic\nefficientformerv2_s2.snap_dist_in1k,224,95.380,4.620,98.950,1.050,12.71,0.950,bicubic\ndm_nfnet_f0.dm_in1k,192,95.380,4.620,98.920,1.080,71.49,0.900,bicubic\nresnext101_32x8d.tv2_in1k,176,95.370,4.630,99.050,0.950,88.79,0.875,bilinear\nresnetaa50d.sw_in12k_ft_in1k,224,95.370,4.630,98.980,1.020,25.58,0.950,bicubic\nresnet101.a1h_in1k,224,95.370,4.630,98.860,1.140,44.55,0.950,bicubic\nhrnet_w18_ssld.paddle_in1k,224,95.360,4.640,99.070,0.930,21.30,0.950,bilinear\ntf_efficientnet_b3.ap_in1k,300,95.360,4.640,98.900,1.100,12.23,0.904,bicubic\nmixer_b16_224.miil_in21k_ft_in1k,224,95.360,4.640,98.870,1.130,59.88,0.875,bilinear\necaresnet50t.a2_in1k,288,95.350,4.650,98.930,1.070,25.57,1.000,bicubic\nconvnext_nano.d1h_in1k,288,95.350,4.650,98.850,1.150,15.59,1.000,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,95.350,4.650,98.810,1.190,11.07,0.950,bicubic\nvit_base_patch16_224.orig_in21k_ft_in1k,224,95.340,4.660,99.010,0.990,86.57,0.900,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,240,95.340,4.660,98.970,1.030,14.36,0.900,bicubic\nresnet61q.ra2_in1k,256,95.340,4.660,98.900,1.100,36.85,0.900,bicubic\nseresnet50.ra2_in1k,288,95.330,4.670,99.010,0.990,28.09,0.950,bicubic\nswinv2_cr_tiny_ns_224.sw_in1k,224,95.330,4.670,98.940,1.060,28.33,0.900,bicubic\npoolformerv2_s36.sail_in1k,224,95.330,4.670,98.900,1.100,30.79,1.000,bicubic\ncs3sedarknet_l.c2ns_in1k,288,95.320,4.680,99.140,0.860,21.91,0.950,bicubic\nresnet50d.ra4_e3600_r224_in1k,288,95.320,4.680,98.980,1.020,25.58,1.000,bicubic\nefficientnet_b1.ra4_e3600_r240_in1k,288,95.320,4.680,98.820,1.180,7.79,1.000,bicubic\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,95.310,4.690,99.000,1.000,14.62,1.000,bilinear\nefficientvit_b2.r224_in1k,224,95.310,4.690,98.790,1.210,24.33,0.950,bicubic\necaresnetlight.miil_in1k,288,95.290,4.710,99.040,0.960,30.16,0.950,bicubic\nvit_small_patch16_384.augreg_in1k,384,95.290,4.710,99.020,0.980,22.20,1.000,bicubic\nregnety_032.tv2_in1k,224,95.290,4.710,98.930,1.070,19.44,0.965,bicubic\nresnet101.tv2_in1k,224,95.290,4.710,98.920,1.080,44.55,0.965,bilinear\nresnet50_gn.a1h_in1k,288,95.280,4.720,98.990,1.010,25.56,0.950,bicubic\ngcresnet50t.ra2_in1k,288,95.280,4.720,98.900,1.100,25.90,1.000,bicubic\neca_nfnet_l0.ra2_in1k,224,95.270,4.730,99.100,0.900,24.14,0.900,bicubic\ntresnet_l.miil_in1k,224,95.270,4.730,99.010,0.990,55.99,0.875,bilinear\ncait_xxs24_384.fb_dist_in1k,384,95.270,4.730,98.960,1.040,12.03,1.000,bicubic\nnest_tiny_jx.goog_in1k,224,95.250,4.750,98.970,1.030,17.06,0.875,bicubic\nconvnextv2_pico.fcmae_ft_in1k,288,95.250,4.750,98.940,1.060,9.07,0.950,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,95.250,4.750,98.800,1.200,14.25,0.888,bicubic\nrepvit_m3.dist_in1k,224,95.240,4.760,99.060,0.940,10.68,0.950,bicubic\nvit_srelpos_medium_patch16_224.sw_in1k,224,95.240,4.760,98.980,1.020,38.74,0.900,bicubic\nresnetaa50.a1h_in1k,288,95.240,4.760,98.910,1.090,25.56,1.000,bicubic\ncoatnet_nano_rw_224.sw_in1k,224,95.240,4.760,98.870,1.130,15.14,0.900,bicubic\ntwins_pcpvt_small.in1k,224,95.230,4.770,98.880,1.120,24.11,0.900,bicubic\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,95.220,4.780,99.080,0.920,11.22,0.965,bicubic\ntwins_svt_small.in1k,224,95.210,4.790,98.860,1.140,24.06,0.900,bicubic\nefficientnet_b3.ra2_in1k,288,95.190,4.810,99.040,0.960,12.23,0.875,bicubic\nswin_s3_tiny_224.ms_in1k,224,95.190,4.810,98.960,1.040,28.33,0.900,bicubic\nvit_relpos_small_patch16_224.sw_in1k,224,95.190,4.810,98.950,1.050,21.98,0.900,bicubic\nconvit_small.fb_in1k,224,95.190,4.810,98.930,1.070,27.78,0.875,bicubic\npit_s_distilled_224.in1k,224,95.180,4.820,98.880,1.120,24.04,0.900,bicubic\ntf_efficientnetv2_b3.in1k,300,95.180,4.820,98.820,1.180,14.36,0.904,bicubic\nregnetz_b16.ra3_in1k,288,95.170,4.830,99.080,0.920,9.72,1.000,bicubic\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,95.170,4.830,98.920,1.080,18.45,0.888,bicubic\nswin_tiny_patch4_window7_224.ms_in1k,224,95.170,4.830,98.860,1.140,28.29,0.900,bicubic\nxcit_tiny_12_p16_384.fb_dist_in1k,384,95.160,4.840,99.000,1.000,6.72,1.000,bicubic\ncs3darknet_focus_l.c2ns_in1k,288,95.160,4.840,98.960,1.040,21.15,0.950,bicubic\nlamhalobotnet50ts_256.a1h_in1k,256,95.160,4.840,98.880,1.120,22.57,0.950,bicubic\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,95.150,4.850,98.830,1.170,10.59,0.888,bicubic\ntf_efficientnet_b1.ns_jft_in1k,240,95.140,4.860,99.090,0.910,7.79,0.882,bicubic\ncs3darknet_l.c2ns_in1k,288,95.140,4.860,98.990,1.010,21.16,0.950,bicubic\nhalonet50ts.a1h_in1k,256,95.140,4.860,98.780,1.220,22.73,0.940,bicubic\nresnet152.a1_in1k,224,95.140,4.860,98.410,1.590,60.19,0.950,bicubic\nfastvit_sa12.apple_dist_in1k,256,95.130,4.870,98.810,1.190,11.58,0.900,bicubic\nefficientnet_el.ra_in1k,300,95.120,4.880,98.990,1.010,10.59,0.904,bicubic\nvit_base_patch32_clip_224.openai_ft_in1k,224,95.120,4.880,98.990,1.010,88.22,0.900,bicubic\ncrossvit_15_240.in1k,240,95.120,4.880,98.940,1.060,27.53,0.875,bicubic\nhgnet_small.paddle_in1k,224,95.110,4.890,99.030,0.970,24.36,0.965,bicubic\necaresnet50d_pruned.miil_in1k,288,95.110,4.890,98.930,1.070,19.94,0.950,bicubic\npoolformer_s36.sail_in1k,224,95.110,4.890,98.900,1.100,30.86,0.900,bicubic\ntnt_s_patch16_224.in1k,224,95.110,4.890,98.870,1.130,23.77,0.900,bicubic\nregnetx_080.tv2_in1k,224,95.110,4.890,98.800,1.200,39.57,0.965,bicubic\nconvnext_nano_ols.d1h_in1k,288,95.110,4.890,98.710,1.290,15.65,1.000,bicubic\ngernet_l.idstcv_in1k,256,95.100,4.900,98.900,1.100,31.08,0.875,bilinear\nmobilenetv4_conv_large.e500_r256_in1k,256,95.090,4.910,98.950,1.050,32.59,0.950,bicubic\nxcit_tiny_12_p8_224.fb_dist_in1k,224,95.090,4.910,98.930,1.070,6.71,1.000,bicubic\nlegacy_senet154.in1k,224,95.090,4.910,98.830,1.170,115.09,0.875,bilinear\nmambaout_kobe.in1k,288,95.090,4.910,98.830,1.170,9.14,1.000,bicubic\nfasternet_s.in1k,224,95.090,4.910,98.780,1.220,31.18,1.000,bicubic\nconvmixer_1536_20.in1k,224,95.080,4.920,99.030,0.970,51.63,0.960,bicubic\necaresnet101d_pruned.miil_in1k,224,95.080,4.920,98.980,1.020,24.88,0.875,bicubic\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,95.070,4.930,98.980,1.020,22.92,1.000,bicubic\nwide_resnet50_2.racm_in1k,224,95.070,4.930,98.960,1.040,68.88,0.875,bicubic\nvit_srelpos_small_patch16_224.sw_in1k,224,95.070,4.930,98.950,1.050,21.97,0.900,bicubic\nresnet152s.gluon_in1k,224,95.060,4.940,98.930,1.070,60.32,0.875,bicubic\ntnt_s_legacy_patch16_224.in1k,224,95.060,4.940,98.840,1.160,23.76,0.900,bicubic\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,95.040,4.960,98.990,1.010,5.39,0.950,bicubic\nseresnet33ts.ra2_in1k,288,95.040,4.960,98.900,1.100,19.78,1.000,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,224,95.040,4.960,98.840,1.160,13.68,1.000,bicubic\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,95.030,4.970,99.060,0.940,25.55,1.000,bilinear\nseresnext50_32x4d.racm_in1k,224,95.030,4.970,98.890,1.110,27.56,0.875,bicubic\nlevit_256.fb_dist_in1k,224,95.030,4.970,98.870,1.130,18.89,0.900,bicubic\nlevit_conv_256.fb_dist_in1k,224,95.030,4.970,98.870,1.130,18.89,0.900,bicubic\nfbnetv3_g.ra2_in1k,240,95.030,4.970,98.800,1.200,16.62,0.950,bilinear\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,95.010,4.990,99.020,0.980,88.22,0.900,bicubic\nresnet50d.ra2_in1k,288,95.010,4.990,98.970,1.030,25.58,0.950,bicubic\ntf_efficientnet_b3.aa_in1k,300,95.010,4.990,98.910,1.090,12.23,0.904,bicubic\nmobilenetv4_conv_medium.e500_r256_in1k,320,95.000,5.000,98.900,1.100,9.72,1.000,bicubic\ncoat_mini.in1k,224,94.990,5.010,98.780,1.220,10.34,0.900,bicubic\ndeit3_small_patch16_224.fb_in1k,224,94.990,5.010,98.470,1.530,22.06,0.900,bicubic\nrexnet_200.nav_in1k,224,94.980,5.020,98.990,1.010,16.37,0.875,bicubic\ntresnet_m.miil_in1k_448,448,94.980,5.020,98.980,1.020,31.39,0.875,bilinear\nresnet50.d_in1k,288,94.980,5.020,98.830,1.170,25.56,1.000,bicubic\nefficientnetv2_rw_t.ra2_in1k,224,94.980,5.020,98.750,1.250,13.65,1.000,bicubic\nresnetv2_50d_evos.ah_in1k,224,94.970,5.030,98.750,1.250,25.59,0.950,bicubic\nresnest50d_4s2x40d.in1k,224,94.960,5.040,99.070,0.930,30.42,0.875,bicubic\nhgnet_tiny.paddle_in1k,288,94.960,5.040,99.060,0.940,14.74,1.000,bicubic\nvit_base_patch16_384.augreg_in1k,384,94.960,5.040,98.910,1.090,86.86,1.000,bicubic\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,94.960,5.040,98.870,1.130,5.76,1.000,bicubic\nedgenext_small.usi_in1k,256,94.950,5.050,98.960,1.040,5.59,0.950,bicubic\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,94.950,5.050,98.800,1.200,8.46,0.950,bicubic\nresnet152.a2_in1k,224,94.950,5.050,98.570,1.430,60.19,0.950,bicubic\nseresnext101_64x4d.gluon_in1k,224,94.940,5.060,98.800,1.200,88.23,0.875,bicubic\nsenet154.gluon_in1k,224,94.930,5.070,98.770,1.230,115.09,0.875,bicubic\nconvnext_nano.d1h_in1k,224,94.930,5.070,98.670,1.330,15.59,0.950,bicubic\ngcresnet33ts.ra2_in1k,288,94.920,5.080,98.880,1.120,19.88,1.000,bicubic\nseresnext101_32x4d.gluon_in1k,224,94.920,5.080,98.780,1.220,48.96,0.875,bicubic\ntf_efficientnet_lite4.in1k,380,94.890,5.110,99.020,0.980,13.01,0.920,bilinear\nrepvit_m1_1.dist_450e_in1k,224,94.890,5.110,98.920,1.080,8.80,0.950,bicubic\nese_vovnet39b.ra_in1k,288,94.890,5.110,98.910,1.090,24.57,0.950,bicubic\nfastvit_sa12.apple_in1k,256,94.890,5.110,98.860,1.140,11.58,0.900,bicubic\nresnet50.c2_in1k,288,94.890,5.110,98.810,1.190,25.56,1.000,bicubic\nresmlp_36_224.fb_distilled_in1k,224,94.880,5.120,98.870,1.130,44.69,0.875,bicubic\nwide_resnet50_2.tv2_in1k,224,94.870,5.130,98.950,1.050,68.88,0.965,bilinear\ncs3sedarknet_l.c2ns_in1k,256,94.870,5.130,98.900,1.100,21.91,0.887,bicubic\nmobilevitv2_175.cvnets_in1k,256,94.870,5.130,98.860,1.140,14.25,0.888,bicubic\ngcresnext50ts.ch_in1k,288,94.870,5.130,98.850,1.150,15.67,1.000,bicubic\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,94.870,5.130,98.810,1.190,9.72,1.000,bicubic\nresnet50.b1k_in1k,288,94.870,5.130,98.800,1.200,25.56,1.000,bicubic\nfastvit_s12.apple_dist_in1k,256,94.870,5.130,98.770,1.230,9.47,0.900,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,94.870,5.130,98.680,1.320,11.07,0.950,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,288,94.870,5.130,98.550,1.450,28.59,1.000,bicubic\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,94.860,5.140,98.940,1.060,14.62,0.950,bilinear\nresnetv2_50.a1h_in1k,288,94.860,5.140,98.870,1.130,25.55,1.000,bicubic\nseresnet33ts.ra2_in1k,256,94.860,5.140,98.810,1.190,19.78,0.900,bicubic\ngcresnet50t.ra2_in1k,256,94.860,5.140,98.790,1.210,25.90,0.900,bicubic\nmobilevitv2_200.cvnets_in1k,256,94.860,5.140,98.720,1.280,18.45,0.888,bicubic\ncrossvit_small_240.in1k,240,94.850,5.150,99.020,0.980,26.86,0.875,bicubic\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,94.850,5.150,98.880,1.120,25.03,0.875,bilinear\nefficientnet_b1.ra4_e3600_r240_in1k,240,94.850,5.150,98.710,1.290,7.79,0.900,bicubic\nlambda_resnet50ts.a1h_in1k,256,94.850,5.150,98.470,1.530,21.54,0.950,bicubic\necaresnet50t.ra2_in1k,256,94.840,5.160,98.910,1.090,25.57,0.875,bicubic\nres2net101d.in1k,224,94.840,5.160,98.780,1.220,45.23,0.875,bilinear\nresnext50_32x4d.a1_in1k,288,94.840,5.160,98.620,1.380,25.03,1.000,bicubic\nresnest50d.in1k,224,94.830,5.170,98.870,1.130,27.48,0.875,bilinear\ncspresnext50.ra_in1k,256,94.830,5.170,98.730,1.270,20.57,0.887,bilinear\nresnet152d.gluon_in1k,224,94.820,5.180,98.760,1.240,60.21,0.875,bicubic\nswiftformer_l1.dist_in1k,224,94.800,5.200,98.930,1.070,12.06,0.950,bicubic\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,94.790,5.210,98.950,1.050,13.42,0.950,bicubic\nresnet50.c1_in1k,288,94.790,5.210,98.920,1.080,25.56,1.000,bicubic\nsehalonet33ts.ra2_in1k,256,94.790,5.210,98.570,1.430,13.69,0.940,bicubic\nresnet50.a1h_in1k,224,94.780,5.220,98.690,1.310,25.56,1.000,bicubic\nresnet50.a1_in1k,288,94.770,5.230,98.760,1.240,25.56,1.000,bicubic\nresnest50d_1s4x24d.in1k,224,94.760,5.240,98.980,1.020,25.68,0.875,bicubic\nrepvit_m1_1.dist_300e_in1k,224,94.760,5.240,98.930,1.070,8.80,0.950,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,256,94.760,5.240,98.860,1.140,9.72,1.000,bicubic\necaresnetlight.miil_in1k,224,94.760,5.240,98.800,1.200,30.16,0.875,bicubic\nresnetv2_50d_gn.ah_in1k,224,94.760,5.240,98.690,1.310,25.57,0.950,bicubic\nresnet50d.a1_in1k,288,94.760,5.240,98.470,1.530,25.58,1.000,bicubic\nconvnext_pico.d1_in1k,288,94.750,5.250,98.700,1.300,9.05,0.950,bicubic\nresnet101s.gluon_in1k,224,94.740,5.260,98.820,1.180,44.67,0.875,bicubic\nhaloregnetz_b.ra3_in1k,224,94.740,5.260,98.680,1.320,11.68,0.940,bicubic\nrepvit_m2.dist_in1k,224,94.740,5.260,98.670,1.330,8.80,0.950,bicubic\nconvnext_nano_ols.d1h_in1k,224,94.740,5.260,98.620,1.380,15.65,0.950,bicubic\nvit_small_patch16_rope_224.naver_in1k,224,94.740,5.260,98.220,1.780,21.98,0.900,bicubic\nresnet50.b2k_in1k,288,94.720,5.280,98.820,1.180,25.56,1.000,bicubic\nresnet152.a3_in1k,224,94.720,5.280,98.690,1.310,60.19,0.950,bicubic\ndeit_small_distilled_patch16_224.fb_in1k,224,94.710,5.290,99.020,0.980,22.44,0.900,bicubic\nresnetrs101.tf_in1k,192,94.700,5.300,98.710,1.290,63.62,0.940,bicubic\nresnet101.a2_in1k,224,94.700,5.300,98.640,1.360,44.55,0.950,bicubic\nvit_small_patch16_rope_mixed_224.naver_in1k,224,94.700,5.300,98.580,1.420,21.99,0.900,bicubic\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,94.690,5.310,98.800,1.200,7.52,0.950,bicubic\nresnext50_32x4d.ra_in1k,288,94.690,5.310,98.760,1.240,25.03,0.950,bicubic\nxcit_tiny_12_p8_224.fb_in1k,224,94.680,5.320,98.830,1.170,6.71,1.000,bicubic\nedgenext_small_rw.sw_in1k,320,94.680,5.320,98.820,1.180,7.83,1.000,bicubic\nseresnet50.a2_in1k,288,94.680,5.320,98.760,1.240,28.09,1.000,bicubic\necaresnet50t.a1_in1k,224,94.680,5.320,98.580,1.420,25.57,0.950,bicubic\ncspdarknet53.ra_in1k,256,94.670,5.330,98.800,1.200,27.64,0.887,bilinear\nseresnet50.a1_in1k,288,94.670,5.330,98.730,1.270,28.09,1.000,bicubic\nresnext101_64x4d.gluon_in1k,224,94.670,5.330,98.650,1.350,83.46,0.875,bicubic\nsebotnet33ts_256.a1h_in1k,256,94.670,5.330,98.500,1.500,13.70,0.940,bicubic\nregnetx_032.tv2_in1k,224,94.660,5.340,98.840,1.160,15.30,0.965,bicubic\ncs3darknet_focus_l.c2ns_in1k,256,94.660,5.340,98.810,1.190,21.15,0.887,bicubic\nresnet50.tv2_in1k,224,94.660,5.340,98.780,1.220,25.56,0.965,bilinear\nefficientnet_b3_pruned.in1k,300,94.660,5.340,98.740,1.260,9.86,0.904,bicubic\npit_s_224.in1k,224,94.660,5.340,98.720,1.280,23.46,0.900,bicubic\neca_resnet33ts.ra2_in1k,288,94.650,5.350,98.890,1.110,19.68,1.000,bicubic\ngernet_m.idstcv_in1k,224,94.650,5.350,98.860,1.140,21.14,0.875,bilinear\npoolformerv2_s24.sail_in1k,224,94.650,5.350,98.830,1.170,21.34,1.000,bicubic\nconvnext_pico_ols.d1_in1k,288,94.650,5.350,98.770,1.230,9.06,1.000,bicubic\nresmlp_big_24_224.fb_in1k,224,94.650,5.350,98.510,1.490,129.14,0.875,bicubic\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,94.640,5.360,98.780,1.220,15.25,0.950,bicubic\nefficientnet_b2.ra_in1k,288,94.640,5.360,98.710,1.290,9.11,1.000,bicubic\nresnet50.a2_in1k,288,94.640,5.360,98.660,1.340,25.56,1.000,bicubic\nresnetaa50.a1h_in1k,224,94.640,5.360,98.590,1.410,25.56,0.950,bicubic\nvit_small_patch16_rope_ape_224.naver_in1k,224,94.640,5.360,98.310,1.690,22.06,0.900,bicubic\necaresnet50d.miil_in1k,224,94.630,5.370,98.880,1.120,25.58,0.875,bicubic\nresnext50_32x4d.tv2_in1k,224,94.630,5.370,98.780,1.220,25.03,0.965,bilinear\ntf_efficientnet_b3.in1k,300,94.630,5.370,98.770,1.230,12.23,0.904,bicubic\nresnet50d.ra4_e3600_r224_in1k,224,94.630,5.370,98.700,1.300,25.58,0.950,bicubic\ndarknet53.c2ns_in1k,288,94.620,5.380,98.900,1.100,41.61,1.000,bicubic\nmambaout_femto.in1k,288,94.620,5.380,98.850,1.150,7.30,1.000,bicubic\nfastvit_t12.apple_dist_in1k,256,94.620,5.380,98.800,1.200,7.55,0.900,bicubic\nnf_resnet50.ra2_in1k,288,94.620,5.380,98.800,1.200,25.56,0.940,bicubic\ntresnet_m.miil_in1k,224,94.620,5.380,98.560,1.440,31.39,0.875,bilinear\nresnet50d.a2_in1k,288,94.590,5.410,98.670,1.330,25.58,1.000,bicubic\ntf_efficientnetv2_b3.in1k,240,94.590,5.410,98.660,1.340,14.36,0.904,bicubic\nhgnetv2_b1.ssld_stage2_ft_in1k,288,94.580,5.420,98.960,1.040,6.34,1.000,bicubic\npoolformer_s24.sail_in1k,224,94.580,5.420,98.900,1.100,21.39,0.900,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,94.580,5.420,98.880,1.120,6.34,1.000,bicubic\nregnety_016.tv2_in1k,224,94.580,5.420,98.840,1.160,11.20,0.965,bicubic\nregnety_320.pycls_in1k,224,94.580,5.420,98.840,1.160,145.05,0.875,bicubic\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,94.580,5.420,98.360,1.640,22.06,0.900,bicubic\ninception_resnet_v2.tf_in1k,299,94.570,5.430,98.800,1.200,55.84,0.897,bicubic\nseresnet50.ra2_in1k,224,94.570,5.430,98.760,1.240,28.09,0.875,bicubic\nrepvit_m1_0.dist_450e_in1k,224,94.560,5.440,98.890,1.110,7.30,0.950,bicubic\nregnetz_b16.ra3_in1k,224,94.560,5.440,98.880,1.120,9.72,0.940,bicubic\nresnext50_32x4d.a2_in1k,288,94.560,5.440,98.640,1.360,25.03,1.000,bicubic\ncs3darknet_l.c2ns_in1k,256,94.550,5.450,98.910,1.090,21.16,0.887,bicubic\nrepvgg_b3.rvgg_in1k,224,94.550,5.450,98.780,1.220,123.09,0.875,bilinear\nresnext50d_32x4d.bt_in1k,288,94.550,5.450,98.690,1.310,25.05,0.950,bicubic\ngcresnext50ts.ch_in1k,256,94.540,5.460,98.710,1.290,15.67,0.900,bicubic\nresnext50_32x4d.a1h_in1k,224,94.540,5.460,98.600,1.400,25.03,0.950,bicubic\nxcit_tiny_24_p16_224.fb_dist_in1k,224,94.530,5.470,98.800,1.200,12.12,1.000,bicubic\nmobilevitv2_150.cvnets_in1k,256,94.530,5.470,98.720,1.280,10.59,0.888,bicubic\nresnext101_32x4d.gluon_in1k,224,94.530,5.470,98.630,1.370,44.18,0.875,bicubic\nresnet101.a1_in1k,224,94.530,5.470,98.520,1.480,44.55,0.950,bicubic\nrepvgg_b3g4.rvgg_in1k,224,94.520,5.480,98.960,1.040,83.83,0.875,bilinear\nregnety_120.pycls_in1k,224,94.520,5.480,98.820,1.180,51.82,0.875,bicubic\ndarknetaa53.c2ns_in1k,288,94.520,5.480,98.760,1.240,36.02,1.000,bilinear\nresnet50.ram_in1k,288,94.520,5.480,98.670,1.330,25.56,0.950,bicubic\ntf_efficientnet_b2.ap_in1k,260,94.510,5.490,98.620,1.380,9.11,0.890,bicubic\nconvmixer_768_32.in1k,224,94.500,5.500,98.850,1.150,21.11,0.960,bicubic\nefficientformer_l1.snap_dist_in1k,224,94.500,5.500,98.820,1.180,12.29,0.950,bicubic\ngcresnet33ts.ra2_in1k,256,94.500,5.500,98.780,1.220,19.88,0.900,bicubic\necaresnet50t.a2_in1k,224,94.500,5.500,98.640,1.360,25.57,0.950,bicubic\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,94.490,5.510,98.920,1.080,25.56,0.875,bilinear\nresnetv2_34d.ra4_e3600_r384_in1k,448,94.490,5.510,98.780,1.220,21.82,1.000,bicubic\nefficientvit_b1.r288_in1k,288,94.490,5.510,98.540,1.460,9.10,1.000,bicubic\nrexnet_150.nav_in1k,224,94.480,5.520,98.790,1.210,9.73,0.875,bicubic\nresnetblur50.bt_in1k,288,94.470,5.530,98.780,1.220,25.56,0.950,bicubic\nwide_resnet101_2.tv2_in1k,176,94.470,5.530,98.680,1.320,126.89,0.875,bilinear\nconvnextv2_pico.fcmae_ft_in1k,224,94.460,5.540,98.620,1.380,9.07,0.875,bicubic\nresmlp_24_224.fb_distilled_in1k,224,94.450,5.550,98.770,1.230,30.02,0.875,bicubic\nresnetv2_50.a1h_in1k,224,94.450,5.550,98.700,1.300,25.55,0.950,bicubic\nregnetx_320.pycls_in1k,224,94.440,5.560,98.730,1.270,107.81,0.875,bicubic\ntf_efficientnetv2_b2.in1k,260,94.440,5.560,98.600,1.400,10.10,0.890,bicubic\ndeit_small_patch16_224.fb_in1k,224,94.430,5.570,98.700,1.300,22.05,0.900,bicubic\nresnet50.a1_in1k,224,94.410,5.590,98.430,1.570,25.56,0.950,bicubic\ntf_efficientnet_el.in1k,300,94.400,5.600,98.710,1.290,10.59,0.904,bicubic\necaresnet50t.a3_in1k,224,94.400,5.600,98.670,1.330,25.57,0.950,bicubic\ntf_efficientnet_b2.aa_in1k,260,94.400,5.600,98.600,1.400,9.11,0.890,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,288,94.400,5.600,98.490,1.510,21.82,1.000,bicubic\nefficientnet_el_pruned.in1k,300,94.390,5.610,98.750,1.250,10.59,0.904,bicubic\ngcvit_xxtiny.in1k,224,94.380,5.620,98.890,1.110,12.00,0.875,bicubic\ninception_v4.tf_in1k,299,94.380,5.620,98.560,1.440,42.68,0.875,bicubic\nregnety_160.pycls_in1k,224,94.370,5.630,98.830,1.170,83.59,0.875,bicubic\nlegacy_seresnext101_32x4d.in1k,224,94.370,5.630,98.640,1.360,48.96,0.875,bilinear\ndpn107.mx_in1k,224,94.370,5.630,98.480,1.520,86.92,0.875,bicubic\nresnet50_gn.a1h_in1k,224,94.360,5.640,98.690,1.310,25.56,0.940,bicubic\nseresnext50_32x4d.gluon_in1k,224,94.360,5.640,98.620,1.380,27.56,0.875,bicubic\nrepvit_m1_0.dist_300e_in1k,224,94.330,5.670,98.850,1.150,7.30,0.950,bicubic\nxception71.tf_in1k,299,94.330,5.670,98.650,1.350,42.34,0.903,bicubic\nresnet50.bt_in1k,288,94.320,5.680,98.640,1.360,25.56,0.950,bicubic\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,94.320,5.680,98.580,1.420,8.46,0.900,bicubic\ndarknet53.c2ns_in1k,256,94.310,5.690,98.860,1.140,41.61,0.887,bicubic\necaresnet26t.ra2_in1k,320,94.310,5.690,98.720,1.280,16.01,0.950,bicubic\nres2net50d.in1k,224,94.310,5.690,98.550,1.450,25.72,0.875,bilinear\nmobilenetv4_conv_medium.e500_r256_in1k,256,94.300,5.700,98.780,1.220,9.72,0.950,bicubic\nresnet50d.ra2_in1k,224,94.300,5.700,98.740,1.260,25.58,0.875,bicubic\nresnetrs50.tf_in1k,224,94.300,5.700,98.640,1.360,35.69,0.910,bicubic\nresnet50d.a1_in1k,224,94.300,5.700,98.360,1.640,25.58,0.950,bicubic\nhgnet_tiny.paddle_in1k,224,94.290,5.710,98.800,1.200,14.74,0.965,bicubic\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,94.290,5.710,98.750,1.250,9.72,0.950,bicubic\ndpn92.mx_in1k,224,94.280,5.720,98.750,1.250,37.67,0.875,bicubic\ntiny_vit_5m_224.in1k,224,94.280,5.720,98.660,1.340,5.39,0.950,bicubic\nresnet50.c2_in1k,224,94.280,5.720,98.540,1.460,25.56,0.950,bicubic\ncait_xxs36_224.fb_dist_in1k,224,94.260,5.740,98.710,1.290,17.30,1.000,bicubic\necaresnet50d_pruned.miil_in1k,224,94.260,5.740,98.700,1.300,19.94,0.875,bicubic\nskresnext50_32x4d.ra_in1k,224,94.260,5.740,98.470,1.530,27.48,0.875,bicubic\nresnet101d.gluon_in1k,224,94.250,5.750,98.550,1.450,44.57,0.875,bicubic\nregnetx_120.pycls_in1k,224,94.240,5.760,98.650,1.350,46.11,0.875,bicubic\nresnet50.ra_in1k,288,94.220,5.780,98.640,1.360,25.56,0.950,bicubic\nmambaout_kobe.in1k,224,94.220,5.780,98.600,1.400,9.14,1.000,bicubic\nresnext50d_32x4d.bt_in1k,224,94.220,5.780,98.600,1.400,25.05,0.875,bicubic\ninception_resnet_v2.tf_ens_adv_in1k,299,94.220,5.780,98.590,1.410,55.84,0.897,bicubic\nfastvit_s12.apple_in1k,256,94.220,5.780,98.570,1.430,9.47,0.900,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,224,94.210,5.790,98.700,1.300,9.72,0.950,bicubic\nresnetv2_34.ra4_e3600_r224_in1k,288,94.210,5.790,98.560,1.440,21.80,1.000,bicubic\neca_resnet33ts.ra2_in1k,256,94.200,5.800,98.770,1.230,19.68,0.900,bicubic\nwide_resnet50_2.tv2_in1k,176,94.200,5.800,98.680,1.320,68.88,0.875,bilinear\nresmlp_36_224.fb_in1k,224,94.200,5.800,98.660,1.340,44.69,0.875,bicubic\ntf_efficientnet_lite3.in1k,300,94.200,5.800,98.640,1.360,8.20,0.904,bilinear\nefficientformerv2_s1.snap_dist_in1k,224,94.200,5.800,98.630,1.370,6.19,0.950,bicubic\nmixnet_xl.ra_in1k,224,94.200,5.800,98.330,1.670,11.90,0.875,bicubic\nregnety_080.pycls_in1k,224,94.180,5.820,98.670,1.330,39.18,0.875,bicubic\nlevit_192.fb_dist_in1k,224,94.180,5.820,98.540,1.460,10.95,0.900,bicubic\nlevit_conv_192.fb_dist_in1k,224,94.180,5.820,98.540,1.460,10.95,0.900,bicubic\nregnetx_016.tv2_in1k,224,94.170,5.830,98.740,1.260,9.19,0.965,bicubic\nconvnextv2_femto.fcmae_ft_in1k,288,94.170,5.830,98.630,1.370,5.23,0.950,bicubic\nregnetx_160.pycls_in1k,224,94.160,5.840,98.740,1.260,54.28,0.875,bicubic\nregnety_064.pycls_in1k,224,94.160,5.840,98.720,1.280,30.58,0.875,bicubic\nresnet152c.gluon_in1k,224,94.160,5.840,98.630,1.370,60.21,0.875,bicubic\nresnet33ts.ra2_in1k,288,94.160,5.840,98.610,1.390,19.68,1.000,bicubic\ndpn98.mx_in1k,224,94.160,5.840,98.600,1.400,61.57,0.875,bicubic\nresnet152.gluon_in1k,224,94.160,5.840,98.460,1.540,60.19,0.875,bicubic\nnf_resnet50.ra2_in1k,256,94.150,5.850,98.740,1.260,25.56,0.940,bicubic\nvit_base_patch16_224.sam_in1k,224,94.150,5.850,98.690,1.310,86.57,0.900,bicubic\nnf_regnet_b1.ra2_in1k,288,94.150,5.850,98.630,1.370,10.22,0.900,bicubic\ngmlp_s16_224.ra3_in1k,224,94.150,5.850,98.490,1.510,19.42,0.875,bicubic\nefficientnet_b2_pruned.in1k,260,94.140,5.860,98.520,1.480,8.31,0.890,bicubic\ntf_efficientnet_b2.in1k,260,94.130,5.870,98.450,1.550,9.11,0.890,bicubic\nresnet50.b2k_in1k,224,94.120,5.880,98.450,1.550,25.56,0.950,bicubic\nese_vovnet39b.ra_in1k,224,94.110,5.890,98.660,1.340,24.57,0.875,bicubic\nresnext50_32x4d.ra_in1k,224,94.110,5.890,98.330,1.670,25.03,0.875,bicubic\nresnet152.tv2_in1k,176,94.100,5.900,98.550,1.450,60.19,0.875,bilinear\nresnet50d.a2_in1k,224,94.100,5.900,98.480,1.520,25.58,0.950,bicubic\nxcit_tiny_24_p16_224.fb_in1k,224,94.090,5.910,98.510,1.490,12.12,1.000,bicubic\nresnetv2_34d.ra4_e3600_r384_in1k,384,94.070,5.930,98.630,1.370,21.82,1.000,bicubic\neca_halonext26ts.c1_in1k,256,94.070,5.930,98.500,1.500,10.76,0.940,bicubic\nresnext50_32x4d.a1_in1k,224,94.070,5.930,98.200,1.800,25.03,0.950,bicubic\nconvnext_pico_ols.d1_in1k,224,94.060,5.940,98.540,1.460,9.06,0.950,bicubic\nconvnext_pico.d1_in1k,224,94.060,5.940,98.490,1.510,9.05,0.875,bicubic\ndla102x2.in1k,224,94.060,5.940,98.480,1.520,41.28,0.875,bilinear\nresnet50.d_in1k,224,94.060,5.940,98.430,1.570,25.56,0.950,bicubic\nresnet101.a3_in1k,224,94.050,5.950,98.650,1.350,44.55,0.950,bicubic\ncoat_lite_mini.in1k,224,94.050,5.950,98.530,1.470,11.01,0.900,bicubic\nhalonet26t.a1h_in1k,256,94.050,5.950,98.470,1.530,12.48,0.950,bicubic\nseresnet50.a1_in1k,224,94.050,5.950,98.410,1.590,28.09,0.950,bicubic\nresnext50_32x4d.a2_in1k,224,94.050,5.950,98.240,1.760,25.03,0.950,bicubic\ndpn131.mx_in1k,224,94.030,5.970,98.710,1.290,79.25,0.875,bicubic\nhrnet_w64.ms_in1k,224,94.030,5.970,98.620,1.380,128.06,0.875,bilinear\nresnet50.c1_in1k,224,94.030,5.970,98.620,1.380,25.56,0.950,bicubic\nresnet50.b1k_in1k,224,94.030,5.970,98.520,1.480,25.56,0.950,bicubic\nefficientvit_b1.r256_in1k,256,94.030,5.970,98.370,1.630,9.10,1.000,bicubic\nefficientnet_b2.ra_in1k,256,94.020,5.980,98.460,1.540,9.11,0.875,bicubic\nresmlp_24_224.fb_in1k,224,94.020,5.980,98.350,1.650,30.02,0.875,bicubic\nfbnetv3_b.ra2_in1k,256,93.990,6.010,98.630,1.370,8.60,0.950,bilinear\nfastvit_t12.apple_in1k,256,93.990,6.010,98.600,1.400,7.55,0.900,bicubic\nmobilevitv2_125.cvnets_in1k,256,93.990,6.010,98.560,1.440,7.48,0.888,bicubic\nresnet50.am_in1k,224,93.990,6.010,98.520,1.480,25.56,0.875,bicubic\nresnetblur50.bt_in1k,224,93.980,6.020,98.540,1.460,25.56,0.875,bicubic\ndpn68b.ra_in1k,288,93.980,6.020,98.320,1.680,12.61,1.000,bicubic\nshvit_s4.in1k,256,93.970,6.030,98.460,1.540,16.59,0.875,bicubic\ntf_efficientnetv2_b1.in1k,240,93.960,6.040,98.630,1.370,8.14,0.882,bicubic\nseresnet50.a2_in1k,224,93.960,6.040,98.390,1.610,28.09,0.950,bicubic\nhrnet_w48.ms_in1k,224,93.930,6.070,98.600,1.400,77.47,0.875,bilinear\nedgenext_small_rw.sw_in1k,256,93.930,6.070,98.550,1.450,7.83,0.900,bicubic\nconvnext_femto.d1_in1k,288,93.930,6.070,98.530,1.470,5.22,0.950,bicubic\nfbnetv3_d.ra2_in1k,256,93.920,6.080,98.740,1.260,10.31,0.950,bilinear\ntf_efficientnet_cc_b1_8e.in1k,240,93.920,6.080,98.270,1.730,39.72,0.882,bicubic\ndarknetaa53.c2ns_in1k,256,93.900,6.100,98.630,1.370,36.02,0.887,bilinear\nregnetx_064.pycls_in1k,224,93.900,6.100,98.630,1.370,26.21,0.875,bicubic\nvit_small_patch16_224.augreg_in1k,224,93.900,6.100,98.460,1.540,22.05,0.900,bicubic\nconvnext_femto_ols.d1_in1k,288,93.890,6.110,98.620,1.380,5.23,0.950,bicubic\nrexnet_130.nav_in1k,224,93.890,6.110,98.420,1.580,7.56,0.875,bicubic\nhgnetv2_b1.ssld_stage2_ft_in1k,224,93.880,6.120,98.710,1.290,6.34,0.965,bicubic\nregnety_040.pycls_in1k,224,93.880,6.120,98.650,1.350,20.65,0.875,bicubic\nrepvgg_b2g4.rvgg_in1k,224,93.870,6.130,98.610,1.390,61.76,0.875,bilinear\nlambda_resnet26t.c1_in1k,256,93.870,6.130,98.600,1.400,10.96,0.940,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,256,93.870,6.130,98.390,1.610,5.29,1.000,bicubic\nregnetx_080.pycls_in1k,224,93.850,6.150,98.520,1.480,39.57,0.875,bicubic\nresnet50.a2_in1k,224,93.850,6.150,98.360,1.640,25.56,0.950,bicubic\nefficientnet_em.ra2_in1k,240,93.840,6.160,98.810,1.190,6.90,0.882,bicubic\necaresnet26t.ra2_in1k,256,93.840,6.160,98.650,1.350,16.01,0.875,bicubic\npvt_v2_b1.in1k,224,93.830,6.170,98.660,1.340,14.01,0.900,bicubic\nresnet32ts.ra2_in1k,288,93.830,6.170,98.660,1.340,17.96,1.000,bicubic\nresnext50_32x4d.gluon_in1k,224,93.830,6.170,98.410,1.590,25.03,0.875,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,288,93.820,6.180,98.760,1.240,6.00,1.000,bicubic\nresnet101.tv2_in1k,176,93.820,6.180,98.450,1.550,44.55,0.875,bilinear\ntf_efficientnetv2_b2.in1k,208,93.820,6.180,98.360,1.640,10.10,0.890,bicubic\nresnext101_32x8d.tv_in1k,224,93.810,6.190,98.580,1.420,88.79,0.875,bilinear\nresnet50.ram_in1k,224,93.810,6.190,98.400,1.600,25.56,0.875,bicubic\ncspresnet50.ra_in1k,256,93.800,6.200,98.630,1.370,21.62,0.887,bilinear\neca_botnext26ts_256.c1_in1k,256,93.800,6.200,98.500,1.500,10.59,0.950,bicubic\nstarnet_s4.in1k,224,93.800,6.200,98.430,1.570,7.48,0.875,bicubic\nresnet50.tv2_in1k,176,93.790,6.210,98.500,1.500,25.56,0.875,bilinear\nresnet50d.gluon_in1k,224,93.790,6.210,98.390,1.610,25.58,0.875,bicubic\npit_xs_distilled_224.in1k,224,93.780,6.220,98.610,1.390,11.00,0.900,bicubic\nxception65.tf_in1k,299,93.770,6.230,98.420,1.580,39.92,0.903,bicubic\nmobileone_s4.apple_in1k,224,93.770,6.230,98.250,1.750,14.95,0.900,bilinear\nwide_resnet101_2.tv_in1k,224,93.760,6.240,98.520,1.480,126.89,0.875,bilinear\nres2net101_26w_4s.in1k,224,93.750,6.250,98.320,1.680,45.21,0.875,bilinear\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,93.750,6.250,98.100,1.900,119.42,0.900,bicubic\nlegacy_seresnext50_32x4d.in1k,224,93.730,6.270,98.590,1.410,27.56,0.875,bilinear\nlambda_resnet26rpt_256.c1_in1k,256,93.730,6.270,98.490,1.510,10.99,0.940,bicubic\nresnet101.gluon_in1k,224,93.730,6.270,98.400,1.600,44.55,0.875,bicubic\ntf_efficientnet_b1.ap_in1k,240,93.720,6.280,98.370,1.630,7.79,0.882,bicubic\nmambaout_femto.in1k,224,93.700,6.300,98.500,1.500,7.30,1.000,bicubic\ntf_efficientnet_b0.ns_jft_in1k,224,93.690,6.310,98.630,1.370,5.29,0.875,bicubic\nregnety_008_tv.tv2_in1k,224,93.690,6.310,98.490,1.510,6.43,0.965,bicubic\nresnet50s.gluon_in1k,224,93.680,6.320,98.460,1.540,25.68,0.875,bicubic\nresnet101c.gluon_in1k,224,93.680,6.320,98.440,1.560,44.57,0.875,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,93.670,6.330,98.670,1.330,6.34,0.965,bicubic\nresnet152.a3_in1k,160,93.670,6.330,98.230,1.770,60.19,0.950,bicubic\nresnet34d.ra2_in1k,288,93.650,6.350,98.560,1.440,21.82,0.950,bicubic\nresnext50_32x4d.a3_in1k,224,93.650,6.350,98.510,1.490,25.03,0.950,bicubic\nresnet34.ra4_e3600_r224_in1k,288,93.640,6.360,98.470,1.530,21.80,1.000,bicubic\nvit_base_patch32_384.augreg_in1k,384,93.640,6.360,98.390,1.610,88.30,1.000,bicubic\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,93.630,6.370,98.600,1.400,5.79,1.000,bicubic\nvit_base_patch16_224.augreg_in1k,224,93.630,6.370,98.240,1.760,86.57,0.900,bicubic\nresnext50_32x4d.tv2_in1k,176,93.620,6.380,98.300,1.700,25.03,0.875,bilinear\nresnet33ts.ra2_in1k,256,93.610,6.390,98.520,1.480,19.68,0.900,bicubic\nrepvit_m0_9.dist_450e_in1k,224,93.610,6.390,98.510,1.490,5.49,0.950,bicubic\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,93.590,6.410,98.700,1.300,6.00,1.000,bicubic\nvisformer_tiny.in1k,224,93.590,6.410,98.510,1.490,10.32,0.900,bicubic\ncait_xxs24_224.fb_dist_in1k,224,93.590,6.410,98.450,1.550,11.96,1.000,bicubic\nhrnet_w44.ms_in1k,224,93.580,6.420,98.680,1.320,67.06,0.875,bilinear\ncoat_tiny.in1k,224,93.580,6.420,98.430,1.570,5.50,0.900,bicubic\nresnet50.a1h_in1k,176,93.570,6.430,98.510,1.490,25.56,0.900,bicubic\nseresnext26t_32x4d.bt_in1k,288,93.570,6.430,98.400,1.600,16.81,0.950,bicubic\nfasternet_t2.in1k,224,93.560,6.440,98.380,1.620,14.98,1.000,bicubic\nregnetx_040.pycls_in1k,224,93.540,6.460,98.570,1.430,22.12,0.875,bicubic\nefficientvit_b1.r224_in1k,224,93.540,6.460,98.330,1.670,9.10,0.950,bicubic\nresnet50d.a3_in1k,224,93.520,6.480,98.460,1.540,25.58,0.950,bicubic\nbotnet26t_256.c1_in1k,256,93.520,6.480,98.320,1.680,12.49,0.950,bicubic\nhrnet_w18.ms_aug_in1k,224,93.510,6.490,98.610,1.390,21.30,0.950,bilinear\nhrnet_w40.ms_in1k,224,93.510,6.490,98.560,1.440,57.56,0.875,bilinear\nhrnet_w32.ms_in1k,224,93.510,6.490,98.440,1.560,41.23,0.875,bilinear\ntf_efficientnet_b1.aa_in1k,240,93.510,6.490,98.360,1.640,7.79,0.882,bicubic\nrepvgg_b2.rvgg_in1k,224,93.500,6.500,98.740,1.260,89.02,0.875,bilinear\ninception_v3.gluon_in1k,299,93.500,6.500,98.540,1.460,23.83,0.875,bicubic\nrepghostnet_200.in1k,224,93.500,6.500,98.540,1.460,9.80,0.875,bicubic\nresnet32ts.ra2_in1k,256,93.500,6.500,98.500,1.500,17.96,0.900,bicubic\ndla102x.in1k,224,93.490,6.510,98.490,1.510,26.31,0.875,bilinear\nlegacy_xception.tf_in1k,299,93.470,6.530,98.530,1.470,22.86,0.897,bicubic\nxcit_nano_12_p8_384.fb_dist_in1k,384,93.460,6.540,98.520,1.480,3.05,1.000,bicubic\nregnety_032.pycls_in1k,224,93.440,6.560,98.630,1.370,19.44,0.875,bicubic\nfbnetv3_d.ra2_in1k,224,93.440,6.560,98.440,1.560,10.31,0.950,bilinear\nlegacy_seresnet152.in1k,224,93.440,6.560,98.360,1.640,66.82,0.875,bilinear\nseresnext26d_32x4d.bt_in1k,288,93.440,6.560,98.340,1.660,16.81,0.950,bicubic\nrepvit_m0_9.dist_300e_in1k,224,93.430,6.570,98.730,1.270,5.49,0.950,bicubic\nnf_regnet_b1.ra2_in1k,256,93.420,6.580,98.600,1.400,10.22,0.900,bicubic\nxception41.tf_in1k,299,93.420,6.580,98.410,1.590,26.97,0.903,bicubic\nres2net50_26w_6s.in1k,224,93.420,6.580,98.260,1.740,37.05,0.875,bilinear\nmixnet_l.ft_in1k,224,93.420,6.580,98.230,1.770,7.33,0.875,bicubic\nres2net50_26w_8s.in1k,224,93.410,6.590,98.180,1.820,48.40,0.875,bilinear\nxcit_tiny_12_p16_224.fb_dist_in1k,224,93.400,6.600,98.550,1.450,6.72,1.000,bicubic\ndpn68b.ra_in1k,224,93.400,6.600,98.220,1.780,12.61,0.950,bicubic\nconvnextv2_femto.fcmae_ft_in1k,224,93.390,6.610,98.480,1.520,5.23,0.875,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,224,93.390,6.610,98.380,1.620,21.82,0.900,bicubic\nlevit_128.fb_dist_in1k,224,93.380,6.620,98.380,1.620,9.21,0.900,bicubic\nlevit_conv_128.fb_dist_in1k,224,93.380,6.620,98.370,1.630,9.21,0.900,bicubic\nresnest26d.gluon_in1k,224,93.360,6.640,98.630,1.370,17.07,0.875,bilinear\ncs3darknet_m.c2ns_in1k,288,93.350,6.650,98.620,1.380,9.31,0.950,bicubic\nresnet50.ra_in1k,224,93.350,6.650,98.550,1.450,25.56,0.875,bicubic\ninception_v3.tf_in1k,299,93.350,6.650,98.060,1.940,23.83,0.875,bicubic\ndla169.in1k,224,93.330,6.670,98.600,1.400,53.39,0.875,bilinear\nrepvgg_b1.rvgg_in1k,224,93.330,6.670,98.510,1.490,57.42,0.875,bilinear\nresnet152.tv_in1k,224,93.330,6.670,98.380,1.620,60.19,0.875,bilinear\nselecsls60b.in1k,224,93.330,6.670,98.270,1.730,32.77,0.875,bicubic\nrepvit_m1.dist_in1k,224,93.320,6.680,98.420,1.580,5.49,0.950,bicubic\nbat_resnext26ts.ch_in1k,256,93.320,6.680,98.350,1.650,10.73,0.900,bicubic\nlegacy_seresnet101.in1k,224,93.310,6.690,98.480,1.520,49.33,0.875,bilinear\nfbnetv3_b.ra2_in1k,224,93.310,6.690,98.440,1.560,8.60,0.950,bilinear\nmobilevitv2_100.cvnets_in1k,256,93.290,6.710,98.250,1.750,4.90,0.888,bicubic\ntf_mixnet_l.in1k,224,93.290,6.710,98.030,1.970,7.33,0.875,bicubic\nefficientnet_b1.ft_in1k,256,93.280,6.720,98.240,1.760,7.79,1.000,bicubic\nresnet50.bt_in1k,224,93.260,6.740,98.390,1.610,25.56,0.875,bicubic\ncoat_lite_tiny.in1k,224,93.260,6.740,98.270,1.730,5.72,0.900,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,224,93.230,6.770,98.370,1.630,5.29,0.900,bicubic\nmobilevit_s.cvnets_in1k,256,93.210,6.790,98.470,1.530,5.58,0.900,bicubic\nwide_resnet50_2.tv_in1k,224,93.210,6.790,98.320,1.680,68.88,0.875,bilinear\ndla60_res2next.in1k,224,93.200,6.800,98.410,1.590,17.03,0.875,bilinear\nhrnet_w30.ms_in1k,224,93.200,6.800,98.410,1.590,37.71,0.875,bilinear\nefficientnet_es.ra_in1k,224,93.200,6.800,98.400,1.600,5.44,0.875,bicubic\nresnet26t.ra2_in1k,320,93.180,6.820,98.500,1.500,16.01,1.000,bicubic\ngcresnext26ts.ch_in1k,288,93.180,6.820,98.300,1.700,10.48,1.000,bicubic\nese_vovnet19b_dw.ra_in1k,288,93.170,6.830,98.270,1.730,6.54,0.950,bicubic\ndla60_res2net.in1k,224,93.150,6.850,98.410,1.590,20.85,0.875,bilinear\ntf_efficientnetv2_b1.in1k,192,93.150,6.850,98.100,1.900,8.14,0.882,bicubic\nregnetx_032.pycls_in1k,224,93.140,6.860,98.390,1.610,15.30,0.875,bicubic\nregnety_016.pycls_in1k,224,93.140,6.860,98.340,1.660,11.20,0.875,bicubic\ntf_efficientnetv2_b0.in1k,224,93.130,6.870,98.360,1.640,7.14,0.875,bicubic\npit_xs_224.in1k,224,93.130,6.870,98.320,1.680,10.62,0.900,bicubic\nresnet34.a1_in1k,288,93.110,6.890,98.300,1.700,21.80,1.000,bicubic\ndla60x.in1k,224,93.100,6.900,98.490,1.510,17.35,0.875,bilinear\neca_resnext26ts.ch_in1k,288,93.100,6.900,98.430,1.570,10.30,1.000,bicubic\necaresnet50t.a3_in1k,160,93.100,6.900,98.270,1.730,25.57,0.950,bicubic\nconvnext_femto_ols.d1_in1k,224,93.090,6.910,98.400,1.600,5.23,0.875,bicubic\nconvnext_atto_ols.a2_in1k,288,93.080,6.920,98.470,1.530,3.70,0.950,bicubic\ntf_efficientnet_b1.in1k,240,93.080,6.920,98.290,1.710,7.79,0.882,bicubic\ndla102.in1k,224,93.070,6.930,98.550,1.450,33.27,0.875,bilinear\nresnet101.a3_in1k,160,93.040,6.960,98.150,1.850,44.55,0.950,bicubic\nswiftformer_s.dist_in1k,224,93.030,6.970,98.330,1.670,6.09,0.950,bicubic\nrexnet_100.nav_in1k,224,93.030,6.970,98.190,1.810,4.80,0.875,bicubic\nresnet50c.gluon_in1k,224,93.020,6.980,98.360,1.640,25.58,0.875,bicubic\nghostnetv2_160.in1k,224,93.020,6.980,98.230,1.770,12.39,0.875,bicubic\nrepvgg_b1g4.rvgg_in1k,224,93.010,6.990,98.410,1.590,39.97,0.875,bilinear\nselecsls60.in1k,224,93.010,6.990,98.310,1.690,30.67,0.875,bicubic\ncs3darknet_focus_m.c2ns_in1k,288,93.000,7.000,98.370,1.630,9.30,0.950,bicubic\nseresnext26ts.ch_in1k,288,92.980,7.020,98.450,1.550,10.39,1.000,bicubic\nlegacy_seresnet50.in1k,224,92.980,7.020,98.200,1.800,28.09,0.875,bilinear\nhardcorenas_f.miil_green_in1k,224,92.980,7.020,98.140,1.860,8.20,0.875,bilinear\nconvnextv2_atto.fcmae_ft_in1k,288,92.980,7.020,98.060,1.940,3.71,0.950,bicubic\nresnetv2_34.ra4_e3600_r224_in1k,224,92.970,7.030,98.250,1.750,21.80,0.900,bicubic\npoolformerv2_s12.sail_in1k,224,92.960,7.040,98.360,1.640,11.89,1.000,bicubic\ntf_efficientnet_em.in1k,240,92.950,7.050,98.210,1.790,6.90,0.882,bicubic\nconvnext_femto.d1_in1k,224,92.940,7.060,98.260,1.740,5.22,0.875,bicubic\ncrossvit_9_dagger_240.in1k,240,92.940,7.060,98.210,1.790,8.78,0.875,bicubic\nresnetrs50.tf_in1k,160,92.930,7.070,98.250,1.750,35.69,0.910,bicubic\nmobileone_s3.apple_in1k,224,92.910,7.090,98.180,1.820,10.17,0.900,bilinear\nres2next50.in1k,224,92.900,7.100,98.210,1.790,24.67,0.875,bilinear\ninception_v3.tf_adv_in1k,299,92.900,7.100,98.140,1.860,23.83,0.875,bicubic\ngmixer_24_224.ra3_in1k,224,92.870,7.130,97.890,2.110,24.72,0.875,bicubic\nresnet34.ra4_e3600_r224_in1k,224,92.860,7.140,98.080,1.920,21.80,0.900,bicubic\nseresnext26t_32x4d.bt_in1k,224,92.840,7.160,98.350,1.650,16.81,0.875,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,224,92.830,7.170,98.430,1.570,6.00,0.965,bicubic\nmobileone_s2.apple_in1k,224,92.830,7.170,98.250,1.750,7.88,0.900,bilinear\ntf_efficientnet_cc_b0_8e.in1k,224,92.830,7.170,98.160,1.840,24.01,0.875,bicubic\ndpn68b.mx_in1k,224,92.820,7.180,98.150,1.850,12.61,0.875,bicubic\nresmlp_12_224.fb_distilled_in1k,224,92.820,7.180,98.130,1.870,15.35,0.875,bicubic\nresnet101.tv_in1k,224,92.810,7.190,98.230,1.770,44.55,0.875,bilinear\nefficientnet_b1_pruned.in1k,240,92.800,7.200,98.050,1.950,6.33,0.882,bicubic\nmobilenetv1_125.ra4_e3600_r224_in1k,256,92.790,7.210,98.180,1.820,6.27,1.000,bicubic\ninception_v3.tv_in1k,299,92.780,7.220,97.950,2.050,23.83,0.875,bicubic\nhrnet_w18_small_v2.gluon_in1k,224,92.770,7.230,98.420,1.580,15.60,0.875,bicubic\ngcresnext26ts.ch_in1k,256,92.770,7.230,98.280,1.720,10.48,0.900,bicubic\nresnet50.a3_in1k,224,92.770,7.230,98.170,1.830,25.56,0.950,bicubic\nconvnext_atto.d2_in1k,288,92.770,7.230,98.070,1.930,3.70,0.950,bicubic\ndensenet201.tv_in1k,224,92.760,7.240,98.230,1.770,20.01,0.875,bicubic\nresnext50_32x4d.tv_in1k,224,92.750,7.250,98.290,1.710,25.03,0.875,bilinear\nresnet26t.ra2_in1k,256,92.750,7.250,98.250,1.750,16.01,0.940,bicubic\nresnet34.a2_in1k,288,92.750,7.250,97.990,2.010,21.80,1.000,bicubic\nseresnext26d_32x4d.bt_in1k,224,92.740,7.260,98.140,1.860,16.81,0.875,bicubic\nresnet34d.ra2_in1k,224,92.730,7.270,98.280,1.720,21.82,0.875,bicubic\nseresnext26ts.ch_in1k,256,92.720,7.280,98.300,1.700,10.39,0.900,bicubic\nres2net50_14w_8s.in1k,224,92.710,7.290,98.180,1.820,25.06,0.875,bilinear\nresnext50_32x4d.a3_in1k,160,92.690,7.310,97.990,2.010,25.03,0.950,bicubic\ntf_efficientnet_lite2.in1k,260,92.680,7.320,98.240,1.760,6.09,0.890,bicubic\ncs3darknet_m.c2ns_in1k,256,92.670,7.330,98.480,1.520,9.31,0.887,bicubic\nefficientnet_b0.ra_in1k,224,92.670,7.330,98.080,1.920,5.29,0.875,bicubic\npoolformer_s12.sail_in1k,224,92.650,7.350,98.180,1.820,11.92,0.900,bicubic\nlegacy_seresnext26_32x4d.in1k,224,92.640,7.360,98.120,1.880,16.79,0.875,bicubic\neca_resnext26ts.ch_in1k,256,92.630,7.370,98.260,1.740,10.30,0.900,bicubic\ndensenetblur121d.ra_in1k,288,92.630,7.370,98.240,1.760,8.00,0.950,bicubic\nshvit_s3.in1k,224,92.630,7.370,98.150,1.850,14.25,0.875,bicubic\ntf_efficientnet_cc_b0_4e.in1k,224,92.630,7.370,98.090,1.910,13.31,0.875,bicubic\nstarnet_s3.in1k,224,92.620,7.380,98.250,1.750,5.75,0.875,bicubic\ntf_efficientnet_lite1.in1k,240,92.590,7.410,98.040,1.960,5.42,0.882,bicubic\nxcit_tiny_12_p16_224.fb_in1k,224,92.580,7.420,98.250,1.750,6.72,1.000,bicubic\nregnetx_008.tv2_in1k,224,92.580,7.420,98.180,1.820,7.26,0.965,bicubic\nhardcorenas_e.miil_green_in1k,224,92.570,7.430,98.120,1.880,8.07,0.875,bilinear\nres2net50_48w_2s.in1k,224,92.560,7.440,98.070,1.930,25.29,0.875,bilinear\ndensenet161.tv_in1k,224,92.550,7.450,98.300,1.700,28.68,0.875,bicubic\ndensenet121.ra_in1k,288,92.550,7.450,98.210,1.790,7.98,0.950,bicubic\nresnet50.gluon_in1k,224,92.550,7.450,98.190,1.810,25.56,0.875,bicubic\nfastvit_t8.apple_dist_in1k,256,92.540,7.460,98.030,1.970,4.03,0.900,bicubic\nres2net50_26w_4s.in1k,224,92.510,7.490,98.050,1.950,25.70,0.875,bilinear\nresnet26d.bt_in1k,288,92.480,7.520,98.220,1.780,16.01,0.950,bicubic\nmobilenetv2_120d.ra_in1k,224,92.480,7.520,98.040,1.960,5.83,0.875,bicubic\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,92.470,7.530,98.420,1.580,6.00,0.965,bicubic\ntinynet_a.in1k,192,92.470,7.530,98.080,1.920,6.19,0.875,bicubic\nhardcorenas_d.miil_green_in1k,224,92.460,7.540,98.030,1.970,7.50,0.875,bilinear\nresnet50d.a3_in1k,160,92.450,7.550,98.040,1.960,25.58,0.950,bicubic\nefficientvit_m5.r224_in1k,224,92.440,7.560,97.990,2.010,12.47,0.875,bicubic\nmixnet_m.ft_in1k,224,92.430,7.570,97.860,2.140,5.01,0.875,bicubic\nefficientnet_b1.ft_in1k,224,92.430,7.570,97.820,2.180,7.79,0.875,bicubic\nconvmixer_1024_20_ks9_p14.in1k,224,92.420,7.580,98.280,1.720,24.38,0.960,bicubic\nresnet34.bt_in1k,288,92.410,7.590,98.150,1.850,21.80,0.950,bicubic\nskresnet34.ra_in1k,224,92.380,7.620,98.140,1.860,22.28,0.875,bicubic\ncs3darknet_focus_m.c2ns_in1k,256,92.360,7.640,98.380,1.620,9.30,0.887,bicubic\nhrnet_w18.ms_in1k,224,92.360,7.640,98.310,1.690,21.30,0.875,bilinear\nrepghostnet_150.in1k,224,92.360,7.640,98.060,1.940,6.58,0.875,bicubic\ntf_mixnet_m.in1k,224,92.340,7.660,97.880,2.120,5.01,0.875,bicubic\nghostnetv2_130.in1k,224,92.320,7.680,98.070,1.930,8.96,0.875,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,92.310,7.690,98.020,1.980,5.48,1.000,bicubic\nselecsls42b.in1k,224,92.300,7.700,98.140,1.860,32.46,0.875,bicubic\ntf_efficientnetv2_b0.in1k,192,92.290,7.710,98.210,1.790,7.14,0.875,bicubic\nese_vovnet19b_dw.ra_in1k,224,92.280,7.720,98.110,1.890,6.54,0.875,bicubic\ntf_efficientnet_b0.aa_in1k,224,92.280,7.720,97.990,2.010,5.29,0.875,bicubic\ntf_efficientnet_b0.ap_in1k,224,92.260,7.740,98.020,1.980,5.29,0.875,bicubic\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,92.260,7.740,97.650,2.350,5.48,0.875,bilinear\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,92.230,7.770,98.000,2.000,5.28,0.950,bicubic\nresmlp_12_224.fb_in1k,224,92.210,7.790,98.150,1.850,15.35,0.875,bicubic\ndla60.in1k,224,92.210,7.790,98.100,1.900,22.04,0.875,bilinear\nregnetx_016.pycls_in1k,224,92.180,7.820,98.190,1.810,9.19,0.875,bicubic\nfastvit_t8.apple_in1k,256,92.160,7.840,97.890,2.110,4.03,0.900,bicubic\ngernet_s.idstcv_in1k,224,92.150,7.850,98.210,1.790,8.17,0.875,bilinear\nresnext26ts.ra2_in1k,288,92.150,7.850,98.010,1.990,10.30,1.000,bicubic\nconvnextv2_atto.fcmae_ft_in1k,224,92.140,7.860,97.740,2.260,3.71,0.875,bicubic\nxcit_nano_12_p8_224.fb_dist_in1k,224,92.110,7.890,98.170,1.830,3.05,1.000,bicubic\nresnet34.a1_in1k,224,92.110,7.890,97.790,2.210,21.80,0.950,bicubic\nseresnet50.a3_in1k,224,92.100,7.900,98.040,1.960,28.09,0.950,bicubic\nmobilenetv1_100.ra4_e3600_r224_in1k,256,92.100,7.900,97.820,2.180,4.23,0.950,bicubic\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,92.080,7.920,98.250,1.750,22.88,0.900,bicubic\ntf_efficientnet_b0.in1k,224,92.080,7.920,97.920,2.080,5.29,0.875,bicubic\nresnet18d.ra4_e3600_r224_in1k,288,92.050,7.950,98.060,1.940,11.71,1.000,bicubic\nresnet26d.bt_in1k,224,92.050,7.950,97.960,2.040,16.01,0.875,bicubic\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,92.040,7.960,98.290,1.710,6.36,1.000,bicubic\nresnet26.bt_in1k,288,92.030,7.970,98.200,1.800,16.00,0.950,bicubic\nhardcorenas_c.miil_green_in1k,224,92.030,7.970,97.840,2.160,5.52,0.875,bilinear\nmobilenetv1_125.ra4_e3600_r224_in1k,224,91.990,8.010,98.030,1.970,6.27,0.900,bicubic\ndpn68.mx_in1k,224,91.990,8.010,98.010,1.990,12.61,0.875,bicubic\ntf_efficientnet_es.in1k,224,91.980,8.020,97.870,2.130,5.44,0.875,bicubic\ndensenet169.tv_in1k,224,91.960,8.040,98.090,1.910,14.15,0.875,bicubic\nefficientformerv2_s0.snap_dist_in1k,224,91.960,8.040,97.890,2.110,3.60,0.950,bicubic\nmixnet_s.ft_in1k,224,91.950,8.050,97.680,2.320,4.13,0.875,bicubic\nlevit_128s.fb_dist_in1k,224,91.940,8.060,98.070,1.930,7.78,0.900,bicubic\nrepvgg_a2.rvgg_in1k,224,91.930,8.070,98.150,1.850,28.21,0.875,bilinear\nlevit_conv_128s.fb_dist_in1k,224,91.930,8.070,98.070,1.930,7.78,0.900,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,91.930,8.070,97.920,2.080,5.48,0.950,bicubic\ndensenetblur121d.ra_in1k,224,91.920,8.080,98.100,1.900,8.00,0.875,bicubic\nconvnext_atto_ols.a2_in1k,224,91.920,8.080,97.960,2.040,3.70,0.875,bicubic\nresnetv2_18d.ra4_e3600_r224_in1k,288,91.900,8.100,98.130,1.870,11.71,1.000,bicubic\nresnet50.tv_in1k,224,91.890,8.110,98.050,1.950,25.56,0.875,bilinear\nxcit_nano_12_p16_384.fb_dist_in1k,384,91.890,8.110,98.040,1.960,3.05,1.000,bicubic\nrepghostnet_130.in1k,224,91.880,8.120,97.930,2.070,5.48,0.875,bicubic\nghostnetv3_100.in1k,224,91.880,8.120,97.920,2.080,8.13,0.875,bicubic\nresnext26ts.ra2_in1k,256,91.870,8.130,97.900,2.100,10.30,0.900,bicubic\nhardcorenas_b.miil_green_in1k,224,91.840,8.160,97.770,2.230,5.18,0.875,bilinear\nmobilenetv2_140.ra_in1k,224,91.830,8.170,97.880,2.120,6.11,0.875,bicubic\nmixer_b16_224.goog_in21k_ft_in1k,224,91.830,8.170,97.210,2.790,59.88,0.875,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,320,91.810,8.190,97.640,2.360,3.77,1.000,bicubic\nconvnext_atto.d2_in1k,224,91.780,8.220,97.940,2.060,3.70,0.875,bicubic\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,91.760,8.240,98.030,1.970,5.72,0.900,bicubic\nmobilevitv2_075.cvnets_in1k,256,91.750,8.250,97.860,2.140,2.87,0.888,bicubic\nedgenext_x_small.in1k,288,91.740,8.260,97.590,2.410,2.34,1.000,bicubic\nregnety_008.pycls_in1k,224,91.730,8.270,98.180,1.820,6.26,0.875,bicubic\nresnet34.a2_in1k,224,91.730,8.270,97.770,2.230,21.80,0.950,bicubic\nresnest14d.gluon_in1k,224,91.720,8.280,97.870,2.130,10.61,0.875,bilinear\ndensenet121.ra_in1k,224,91.620,8.380,98.080,1.920,7.98,0.875,bicubic\nregnety_004.tv2_in1k,224,91.620,8.380,97.910,2.090,4.34,0.965,bicubic\nfasternet_t1.in1k,224,91.620,8.380,97.730,2.270,7.60,1.000,bicubic\ntf_mixnet_s.in1k,224,91.520,8.480,97.590,2.410,4.13,0.875,bicubic\nshvit_s2.in1k,224,91.450,8.550,97.830,2.170,11.48,0.875,bicubic\nresnet50.a3_in1k,160,91.450,8.550,97.630,2.370,25.56,0.950,bicubic\nregnety_006.pycls_in1k,224,91.440,8.560,97.760,2.240,6.06,0.875,bicubic\nrepvgg_b0.rvgg_in1k,224,91.400,8.600,97.980,2.020,15.82,0.875,bilinear\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,91.380,8.620,97.800,2.200,5.28,0.875,bicubic\ninception_next_atto.sail_in1k,224,91.380,8.620,97.680,2.320,4.16,0.875,bicubic\nhardcorenas_a.miil_green_in1k,224,91.360,8.640,97.840,2.160,5.26,0.875,bilinear\nmobileone_s1.apple_in1k,224,91.360,8.640,97.770,2.230,4.83,0.900,bilinear\nsemnasnet_100.rmsp_in1k,224,91.360,8.640,97.540,2.460,3.89,0.875,bicubic\nmobilenetv3_large_100.ra_in1k,224,91.340,8.660,97.700,2.300,5.48,0.875,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,288,91.330,8.670,97.860,2.140,11.69,1.000,bicubic\nmobilenetv3_rw.rmsp_in1k,224,91.320,8.680,97.630,2.370,5.48,0.875,bicubic\ntf_mobilenetv3_large_100.in1k,224,91.210,8.790,97.660,2.340,5.48,0.875,bilinear\nefficientnet_es_pruned.in1k,224,91.200,8.800,97.750,2.250,5.44,0.875,bicubic\nvit_base_patch32_224.augreg_in1k,224,91.200,8.800,97.380,2.620,88.22,0.900,bicubic\nhrnet_w18_small_v2.ms_in1k,224,91.190,8.810,97.910,2.090,15.60,0.875,bilinear\nmobilenetv1_100.ra4_e3600_r224_in1k,224,91.190,8.810,97.780,2.220,4.23,0.875,bicubic\nresnet26.bt_in1k,224,91.180,8.820,97.770,2.230,16.00,0.875,bicubic\nresnet34.bt_in1k,224,91.170,8.830,97.550,2.450,21.80,0.875,bicubic\nefficientnet_lite0.ra_in1k,224,91.140,8.860,97.640,2.360,4.65,0.875,bicubic\nedgenext_x_small.in1k,256,91.140,8.860,97.550,2.450,2.34,0.900,bicubic\nregnetx_008.pycls_in1k,224,91.100,8.900,97.710,2.290,7.26,0.875,bicubic\ntf_efficientnet_lite0.in1k,224,91.100,8.900,97.540,2.460,4.65,0.875,bicubic\nswiftformer_xs.dist_in1k,224,91.060,8.940,97.650,2.350,3.48,0.950,bicubic\nmobilenetv2_110d.ra_in1k,224,91.000,9.000,97.550,2.450,4.52,0.875,bicubic\nseresnet50.a3_in1k,160,91.000,9.000,97.410,2.590,28.09,0.950,bicubic\nmobilenetv4_conv_small.e2400_r224_in1k,256,90.990,9.010,97.660,2.340,3.77,0.950,bicubic\nxcit_nano_12_p8_224.fb_in1k,224,90.970,9.030,97.790,2.210,3.05,1.000,bicubic\nresnet34.gluon_in1k,224,90.970,9.030,97.650,2.350,21.80,0.875,bicubic\nlegacy_seresnet34.in1k,224,90.920,9.080,97.570,2.430,21.96,0.875,bilinear\ntinynet_b.in1k,188,90.910,9.090,97.670,2.330,3.73,0.875,bicubic\ndensenet121.tv_in1k,224,90.900,9.100,97.730,2.270,7.98,0.875,bicubic\nstarnet_s2.in1k,224,90.900,9.100,97.600,2.400,3.68,0.875,bicubic\nghostnetv2_100.in1k,224,90.890,9.110,97.700,2.300,6.16,0.875,bicubic\nresnetv2_18d.ra4_e3600_r224_in1k,224,90.870,9.130,97.540,2.460,11.71,0.900,bicubic\nmobilevit_xs.cvnets_in1k,256,90.850,9.150,97.940,2.060,2.32,0.900,bicubic\nfbnetc_100.rmsp_in1k,224,90.780,9.220,97.220,2.780,5.57,0.875,bilinear\ndla34.in1k,224,90.770,9.230,97.650,2.350,15.74,0.875,bilinear\npit_ti_distilled_224.in1k,224,90.770,9.230,97.650,2.350,5.10,0.900,bicubic\ndeit_tiny_distilled_patch16_224.fb_in1k,224,90.760,9.240,97.570,2.430,5.91,0.900,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,256,90.760,9.240,97.390,2.610,3.77,0.950,bicubic\nrepghostnet_111.in1k,224,90.750,9.250,97.460,2.540,4.54,0.875,bicubic\nresnet18d.ra4_e3600_r224_in1k,224,90.690,9.310,97.570,2.430,11.71,0.900,bicubic\ncrossvit_9_240.in1k,240,90.660,9.340,97.740,2.260,8.55,0.875,bicubic\nresnet18.fb_swsl_ig1b_ft_in1k,224,90.660,9.340,97.700,2.300,11.69,0.875,bilinear\nmnasnet_100.rmsp_in1k,224,90.650,9.350,97.510,2.490,4.38,0.875,bicubic\nmobilenetv4_conv_small.e1200_r224_in1k,256,90.650,9.350,97.510,2.490,3.77,0.950,bicubic\nconvit_tiny.fb_in1k,224,90.620,9.380,97.740,2.260,5.71,0.875,bicubic\nefficientvit_m4.r224_in1k,224,90.600,9.400,97.590,2.410,8.80,0.875,bicubic\nregnetx_004_tv.tv2_in1k,224,90.600,9.400,97.580,2.420,5.50,0.965,bicubic\nrepvgg_a1.rvgg_in1k,224,90.590,9.410,97.640,2.360,14.09,0.875,bilinear\nregnety_004.pycls_in1k,224,90.510,9.490,97.580,2.420,4.34,0.875,bicubic\nregnetx_006.pycls_in1k,224,90.370,9.630,97.420,2.580,6.20,0.875,bicubic\nrepghostnet_100.in1k,224,90.320,9.680,97.490,2.510,4.07,0.875,bicubic\nstarnet_s1.in1k,224,90.320,9.680,97.420,2.580,2.87,0.875,bicubic\nresnet18d.ra2_in1k,288,90.310,9.690,97.560,2.440,11.71,0.950,bicubic\nspnasnet_100.rmsp_in1k,224,90.310,9.690,97.190,2.810,4.42,0.875,bilinear\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,90.250,9.750,97.560,2.440,11.69,0.875,bilinear\ncrossvit_tiny_240.in1k,240,90.240,9.760,97.610,2.390,7.01,0.875,bicubic\nghostnet_100.in1k,224,90.200,9.800,97.250,2.750,5.18,0.875,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,224,90.140,9.860,97.310,2.690,11.69,0.900,bicubic\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,90.130,9.870,97.270,2.730,2.16,0.900,bicubic\nvgg19_bn.tv_in1k,224,90.100,9.900,97.580,2.420,143.68,0.875,bilinear\nsemnasnet_075.rmsp_in1k,224,90.090,9.910,97.430,2.570,2.91,0.875,bicubic\nvgg16_bn.tv_in1k,224,90.090,9.910,97.370,2.630,138.37,0.875,bilinear\nmobilenetv4_conv_small.e2400_r224_in1k,224,90.070,9.930,97.280,2.720,3.77,0.875,bicubic\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,90.020,9.980,97.300,2.700,2.16,0.875,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,89.990,10.010,97.470,2.530,3.27,1.000,bicubic\npit_ti_224.in1k,224,89.950,10.050,97.440,2.560,4.85,0.900,bicubic\nresnet34.a3_in1k,224,89.940,10.060,97.170,2.830,21.80,0.950,bicubic\nresnet34.tv_in1k,224,89.930,10.070,97.340,2.660,21.80,0.875,bilinear\nmobilenetv4_conv_small.e1200_r224_in1k,224,89.900,10.100,97.230,2.770,3.77,0.875,bicubic\nefficientvit_m3.r224_in1k,224,89.880,10.120,97.530,2.470,6.90,0.875,bicubic\nvit_base_patch32_224.sam_in1k,224,89.750,10.250,96.980,3.020,88.22,0.900,bicubic\nresnet18.a1_in1k,288,89.720,10.280,97.100,2.900,11.69,1.000,bicubic\ndeit_tiny_patch16_224.fb_in1k,224,89.690,10.310,97.500,2.500,5.72,0.900,bicubic\nshvit_s1.in1k,224,89.690,10.310,97.230,2.770,6.33,0.875,bicubic\ntf_mobilenetv3_large_075.in1k,224,89.670,10.330,97.220,2.780,3.99,0.875,bilinear\nskresnet18.ra_in1k,224,89.660,10.340,97.240,2.760,11.96,0.875,bicubic\nxcit_nano_12_p16_224.fb_dist_in1k,224,89.660,10.340,97.090,2.910,3.05,1.000,bicubic\nmobilenetv2_100.ra_in1k,224,89.610,10.390,97.160,2.840,3.50,0.875,bicubic\nresnet18.a2_in1k,288,89.580,10.420,96.960,3.040,11.69,1.000,bicubic\nhrnet_w18_small.gluon_in1k,224,89.470,10.530,97.030,2.970,13.19,0.875,bicubic\nresnet18d.ra2_in1k,224,89.330,10.670,97.180,2.820,11.71,0.875,bicubic\nrepvgg_a0.rvgg_in1k,224,89.260,10.740,96.930,3.070,9.11,0.875,bilinear\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,89.230,10.770,97.220,2.780,6.34,0.900,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,89.180,10.820,96.900,3.100,3.27,0.887,bicubic\nhrnet_w18_small.ms_in1k,224,89.050,10.950,97.080,2.920,13.19,0.875,bilinear\nvgg19.tv_in1k,224,89.050,10.950,96.880,3.120,143.67,0.875,bilinear\nresnet14t.c3_in1k,224,89.000,11.000,96.700,3.300,10.08,0.950,bicubic\nrepghostnet_080.in1k,224,88.970,11.030,96.770,3.230,3.28,0.875,bicubic\ntf_mobilenetv3_large_minimal_100.in1k,224,88.950,11.050,96.870,3.130,3.92,0.875,bilinear\nregnetx_004.pycls_in1k,224,88.920,11.080,97.100,2.900,5.16,0.875,bicubic\nlegacy_seresnet18.in1k,224,88.890,11.110,96.950,3.050,11.78,0.875,bicubic\nedgenext_xx_small.in1k,288,88.870,11.130,96.680,3.320,1.33,1.000,bicubic\nvgg13_bn.tv_in1k,224,88.780,11.220,96.970,3.030,133.05,0.875,bilinear\npvt_v2_b0.in1k,224,88.780,11.220,96.870,3.130,3.67,0.900,bicubic\nlcnet_100.ra2_in1k,224,88.760,11.240,96.710,3.290,2.95,0.875,bicubic\nxcit_nano_12_p16_224.fb_in1k,224,88.600,11.400,96.830,3.170,3.05,1.000,bicubic\nvgg16.tv_in1k,224,88.540,11.460,96.800,3.200,138.36,0.875,bilinear\nefficientvit_m2.r224_in1k,224,88.450,11.550,96.910,3.090,4.19,0.875,bicubic\nresnet18.a1_in1k,224,88.440,11.560,96.650,3.350,11.69,0.950,bicubic\nedgenext_xx_small.in1k,256,88.400,11.600,96.530,3.470,1.33,0.900,bicubic\nresnet18.gluon_in1k,224,88.370,11.630,96.700,3.300,11.69,0.875,bicubic\nmobileone_s0.apple_in1k,224,88.320,11.680,96.430,3.570,5.29,0.875,bilinear\nresnet14t.c3_in1k,176,88.290,11.710,96.370,3.630,10.08,0.875,bicubic\nfasternet_t0.in1k,224,88.270,11.730,96.520,3.480,3.91,1.000,bicubic\nmobilevitv2_050.cvnets_in1k,256,88.230,11.770,96.960,3.040,1.37,0.888,bicubic\nresnet18.a2_in1k,224,88.030,11.970,96.420,3.580,11.69,0.950,bicubic\nefficientvit_b0.r224_in1k,224,88.000,12.000,96.080,3.920,3.41,0.950,bicubic\nresnet34.a3_in1k,160,87.900,12.100,96.470,3.530,21.80,0.950,bicubic\ntinynet_c.in1k,184,87.800,12.200,96.380,3.620,2.46,0.875,bicubic\nvgg11_bn.tv_in1k,224,87.500,12.500,96.810,3.190,132.87,0.875,bilinear\nresnet18.tv_in1k,224,87.380,12.620,96.270,3.730,11.69,0.875,bilinear\nregnety_002.pycls_in1k,224,87.350,12.650,96.600,3.400,3.16,0.875,bicubic\nmobilevit_xxs.cvnets_in1k,256,87.150,12.850,96.100,3.900,1.27,0.900,bicubic\nmixer_l16_224.goog_in21k_ft_in1k,224,87.150,12.850,93.530,6.470,208.20,0.875,bicubic\nvgg13.tv_in1k,224,87.020,12.980,96.310,3.690,133.05,0.875,bilinear\nefficientvit_m1.r224_in1k,224,86.800,13.200,96.010,3.990,2.98,0.875,bicubic\nvgg11.tv_in1k,224,86.570,13.430,96.310,3.690,132.86,0.875,bilinear\nrepghostnet_058.in1k,224,86.500,13.500,95.900,4.100,2.55,0.875,bicubic\nresnet18.a3_in1k,224,86.460,13.540,95.890,4.110,11.69,0.950,bicubic\ndla60x_c.in1k,224,86.330,13.670,96.090,3.910,1.32,0.875,bilinear\nresnet10t.c3_in1k,224,86.250,13.750,95.760,4.240,5.44,0.950,bicubic\nregnetx_002.pycls_in1k,224,86.200,13.800,95.970,4.030,2.68,0.875,bicubic\nlcnet_075.ra2_in1k,224,86.030,13.970,95.680,4.320,2.36,0.875,bicubic\ntf_mobilenetv3_small_100.in1k,224,85.220,14.780,95.770,4.230,2.54,0.875,bilinear\nmobilenetv3_small_100.lamb_in1k,224,85.190,14.810,95.620,4.380,2.54,0.875,bicubic\nrepghostnet_050.in1k,224,85.040,14.960,95.220,4.780,2.31,0.875,bicubic\ntinynet_d.in1k,152,84.830,15.170,95.220,4.780,2.34,0.875,bicubic\nresnet10t.c3_in1k,176,84.790,15.210,95.010,4.990,5.44,0.875,bicubic\nmnasnet_small.lamb_in1k,224,84.360,15.640,95.160,4.840,2.03,0.875,bicubic\nresnet18.a3_in1k,160,84.280,15.720,94.620,5.380,11.69,0.950,bicubic\ndla46x_c.in1k,224,84.270,15.730,95.240,4.760,1.07,0.875,bilinear\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,84.160,15.840,95.140,4.860,2.24,0.950,bicubic\nmobilenetv2_050.lamb_in1k,224,83.940,16.060,94.590,5.410,1.97,0.875,bicubic\ndla46_c.in1k,224,83.640,16.360,94.930,5.070,1.30,0.875,bilinear\ntf_mobilenetv3_small_075.in1k,224,83.550,16.450,94.860,5.140,2.04,0.875,bilinear\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,83.420,16.580,94.580,5.420,2.24,0.875,bicubic\nmobilenetv3_small_075.lamb_in1k,224,83.110,16.890,94.150,5.850,2.04,0.875,bicubic\nefficientvit_m0.r224_in1k,224,82.410,17.590,94.440,5.560,2.35,0.875,bicubic\nlcnet_050.ra2_in1k,224,81.730,18.270,93.730,6.270,1.88,0.875,bicubic\ntf_mobilenetv3_small_minimal_100.in1k,224,81.340,18.660,93.690,6.310,2.04,0.875,bilinear\ntinynet_e.in1k,106,78.980,21.020,92.560,7.440,2.04,0.875,bicubic\ntest_vit3.r160_in1k,160,77.580,22.420,92.350,7.650,0.93,0.950,bicubic\nmobilenetv3_small_050.lamb_in1k,224,77.030,22.970,91.300,8.700,1.59,0.875,bicubic\ntest_convnext2.r160_in1k,160,74.920,25.080,91.120,8.880,0.48,0.950,bicubic\ntest_convnext3.r160_in1k,160,74.280,25.720,90.610,9.390,0.47,0.950,bicubic\ntest_convnext.r160_in1k,160,69.420,30.580,88.650,11.350,0.27,0.950,bicubic\ntest_nfnet.r160_in1k,160,68.110,31.890,88.030,11.970,0.38,0.950,bicubic\ntest_efficientnet_evos.r160_in1k,160,66.810,33.190,86.300,13.700,0.36,0.950,bicubic\ntest_efficientnet.r160_in1k,160,66.400,33.600,86.290,13.710,0.36,0.950,bicubic\ntest_byobnet.r160_in1k,160,65.890,34.110,86.180,13.820,0.46,0.950,bicubic\ntest_efficientnet_ln.r160_in1k,160,64.800,35.200,85.330,14.670,0.36,0.950,bicubic\ntest_vit2.r160_in1k,160,64.640,35.360,85.450,14.550,0.46,0.950,bicubic\ntest_efficientnet_gn.r160_in1k,160,64.510,35.490,85.570,14.430,0.36,0.950,bicubic\ntest_vit.r160_in1k,160,62.490,37.510,84.380,15.620,0.37,0.950,bicubic\ntest_resnet.r160_in1k,160,62.160,37.840,84.210,15.790,0.47,0.950,bilinear\n"
  },
  {
    "path": "results/results-imagenet-a.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation,top1_diff,top5_diff,rank_diff\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,88.627,11.373,97.360,2.640,305.08,1.000,bicubic,-10.203,-2.510,+2\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,88.413,11.587,97.040,2.960,305.08,1.000,bicubic,-10.527,-2.870,-1\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,87.320,12.680,96.800,3.200,\"1,014.45\",1.000,bicubic,-11.500,-3.100,+1\neva02_large_patch14_448.mim_m38m_ft_in1k,448,87.040,12.960,96.347,3.653,305.08,1.000,bicubic,-11.690,-3.443,+5\neva02_large_patch14_448.mim_in22k_ft_in1k,448,86.227,13.773,95.880,4.120,305.08,1.000,bicubic,-12.623,-3.960,-3\neva_giant_patch14_336.clip_ft_in1k,336,85.107,14.893,95.667,4.333,\"1,013.01\",1.000,bicubic,-13.703,-4.233,0\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,85.013,14.987,96.253,3.747,\"1,013.01\",1.000,bicubic,-13.797,-3.567,-2\ntf_efficientnet_l2.ns_jft_in1k,800,84.680,15.320,96.053,3.947,480.31,0.960,bicubic,-13.860,-3.767,+12\nregnety_1280.swag_ft_in1k,384,83.800,16.200,96.187,3.813,644.81,1.000,bicubic,-14.650,-3.613,+20\neva_large_patch14_336.in22k_ft_in22k_in1k,336,83.520,16.480,95.200,4.800,304.53,1.000,bicubic,-15.230,-4.610,-2\ntf_efficientnet_l2.ns_jft_in1k_475,475,83.373,16.627,95.480,4.520,480.31,0.936,bicubic,-15.127,-4.360,+11\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,83.347,16.653,95.893,4.107,660.29,1.000,bicubic,-15.243,-3.977,+5\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,83.120,16.880,95.347,4.653,475.77,1.000,bicubic,-15.500,-4.453,+2\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,82.973,17.027,95.440,4.560,429.38,1.000,bicubic,-15.647,-4.360,-1\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,82.960,17.040,95.720,4.280,305.67,1.000,bicubic,-15.600,-4.110,+3\neva_large_patch14_336.in22k_ft_in1k,336,82.680,17.320,95.533,4.467,304.53,1.000,bicubic,-16.040,-4.337,-6\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,81.773,18.227,94.613,5.387,414.14,1.000,bicubic,-17.007,-5.217,-10\nmaxvit_large_tf_512.in21k_ft_in1k,512,81.693,18.307,94.987,5.013,212.33,1.000,bicubic,-16.917,-4.803,-2\neva_giant_patch14_224.clip_ft_in1k,224,81.107,18.893,94.187,5.813,\"1,012.56\",0.900,bicubic,-17.353,-5.563,+9\nmaxvit_base_tf_512.in21k_ft_in1k,512,81.053,18.947,94.320,5.680,119.88,1.000,bicubic,-17.587,-5.480,-8\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,80.947,19.053,94.440,5.560,475.32,1.000,bicubic,-17.553,-5.340,+3\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,80.173,19.827,94.733,5.267,305.00,1.000,bicubic,-18.327,-5.087,+1\ncaformer_b36.sail_in22k_ft_in1k_384,384,79.587,20.413,94.320,5.680,98.75,1.000,bicubic,-18.853,-5.500,+8\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,79.373,20.627,94.547,5.453,660.29,1.000,bicubic,-19.297,-5.313,-13\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,79.320,20.680,93.627,6.373,304.76,1.000,bicubic,-19.140,-6.123,+2\nmaxvit_large_tf_384.in21k_ft_in1k,384,78.120,21.880,93.213,6.787,212.03,1.000,bicubic,-20.370,-6.537,-1\neva02_base_patch14_448.mim_in22k_ft_in1k,448,77.840,22.160,93.200,6.800,87.12,1.000,bicubic,-20.600,-6.620,+5\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,77.547,22.453,93.053,6.947,87.12,1.000,bicubic,-21.073,-6.777,-14\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,77.240,22.760,93.760,6.240,136.50,1.000,bicubic,-21.160,-6.050,+7\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,77.240,22.760,93.613,6.387,136.33,1.000,bicubic,-21.000,-6.197,+20\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,77.133,22.867,93.667,6.333,304.53,1.000,bicubic,-21.137,-6.103,+13\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,76.880,23.120,94.213,5.787,846.47,1.000,bicubic,-21.570,-5.657,-2\nmaxvit_base_tf_384.in21k_ft_in1k,384,76.707,23.293,92.387,7.613,119.65,1.000,bicubic,-21.813,-7.363,-12\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,76.600,23.400,93.293,6.707,304.43,0.950,bicubic,-21.950,-6.467,-15\neva_large_patch14_196.in22k_ft_in22k_in1k,196,75.400,24.600,91.627,8.373,304.14,1.000,bicubic,-23.020,-8.183,0\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,74.827,25.173,91.480,8.520,116.14,1.000,bicubic,-23.353,-8.300,+21\nregnety_1280.swag_lc_in1k,224,74.507,25.493,91.627,8.373,644.81,0.965,bicubic,-23.173,-8.093,+99\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,74.347,25.653,92.360,7.640,196.74,1.000,bicubic,-23.813,-7.350,+25\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,74.240,25.760,92.307,7.693,632.46,1.000,bicubic,-24.190,-7.513,-6\nregnety_320.swag_ft_in1k,384,74.000,26.000,92.880,7.120,145.05,1.000,bicubic,-24.080,-6.980,+33\ncaformer_m36.sail_in22k_ft_in1k_384,384,73.920,26.080,91.347,8.653,56.20,1.000,bicubic,-24.230,-8.433,+25\nconvformer_b36.sail_in22k_ft_in1k_384,384,73.653,26.347,91.640,8.360,99.88,1.000,bicubic,-24.607,-8.190,+3\neva_large_patch14_196.in22k_ft_in1k,196,73.040,26.960,91.347,8.653,304.14,1.000,bicubic,-25.320,-8.483,-5\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,72.533,27.467,91.360,8.640,134.42,1.000,bicubic,-25.667,-8.420,+11\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,72.173,27.827,90.813,9.187,304.20,1.000,bicubic,-26.057,-8.907,+6\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,71.840,28.160,91.840,8.160,87.92,1.000,bicubic,-26.310,-7.910,+19\nvit_large_patch14_clip_224.openai_ft_in1k,224,71.827,28.173,91.360,8.640,304.20,1.000,bicubic,-26.333,-8.300,+17\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,71.453,28.547,89.813,10.187,304.53,1.000,bicubic,-26.877,-9.947,-7\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,71.293,28.707,89.880,10.120,86.88,1.000,bicubic,-26.547,-9.790,+58\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,71.187,28.813,90.813,9.187,350.20,1.000,bicubic,-27.233,-8.957,-17\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,71.187,28.813,89.707,10.293,73.88,1.000,bicubic,-26.883,-10.063,+25\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,71.053,28.947,89.680,10.320,304.72,1.000,bicubic,-27.197,-10.120,-3\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,70.920,29.080,90.867,9.133,200.13,1.000,bicubic,-27.560,-8.883,-27\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,70.893,29.107,90.547,9.453,632.05,1.000,bicubic,-27.397,-9.233,-12\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,70.800,29.200,90.507,9.493,196.74,1.000,bicubic,-27.230,-9.183,+25\ncaformer_s36.sail_in22k_ft_in1k_384,384,70.747,29.253,90.427,9.573,39.30,1.000,bicubic,-27.253,-9.293,+27\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,70.627,29.373,90.560,9.440,64.27,1.000,bicubic,-27.543,-9.190,+5\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,70.213,29.787,89.600,10.400,197.96,1.000,bicubic,-28.167,-10.160,-21\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,70.187,29.813,90.800,9.200,632.13,1.000,bicubic,-27.983,-8.970,+2\nbeit3_large_patch16_224.in22k_ft_in1k,224,70.160,29.840,90.680,9.320,304.57,1.000,bicubic,-27.890,-9.070,+18\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,70.080,29.920,90.267,9.733,304.57,1.000,bicubic,-27.990,-9.433,+13\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,70.027,29.973,90.440,9.560,116.09,1.000,bicubic,-28.233,-9.310,-16\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,69.853,30.147,90.707,9.293,200.13,1.000,bicubic,-28.427,-9.063,-20\nvolo_d5_512.sail_in1k,512,69.467,30.533,90.587,9.413,296.09,1.150,bicubic,-28.333,-9.023,+53\ncaformer_b36.sail_in22k_ft_in1k,224,69.453,30.547,90.213,9.787,98.75,1.000,bicubic,-28.717,-9.517,-7\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,69.053,30.947,89.973,10.027,304.43,0.900,bicubic,-29.117,-9.757,-7\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,68.627,31.373,88.573,11.427,149.39,1.000,bicubic,-29.573,-11.197,-13\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,68.573,31.427,89.960,10.040,304.37,1.000,bicubic,-29.597,-9.800,-8\nconvformer_m36.sail_in22k_ft_in1k_384,384,68.520,31.480,89.160,10.840,57.05,1.000,bicubic,-29.500,-10.530,+12\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,68.493,31.507,89.693,10.307,101.66,1.000,bicubic,-29.847,-10.067,-30\nconvnext_xlarge.fb_in22k_ft_in1k,288,68.240,31.760,90.120,9.880,350.20,1.000,bicubic,-29.880,-9.660,-2\nregnety_160.swag_ft_in1k,384,68.093,31.907,90.680,9.320,83.59,1.000,bicubic,-29.697,-9.080,+48\nmaxvit_base_tf_512.in1k,512,68.093,31.907,88.547,11.453,119.88,1.000,bicubic,-29.707,-11.103,+48\nvolo_d5_448.sail_in1k,448,67.960,32.040,89.733,10.267,295.91,1.150,bicubic,-29.790,-9.897,+56\nmaxvit_large_tf_512.in1k,512,67.667,32.333,87.693,12.307,212.33,1.000,bicubic,-30.163,-12.067,+37\nbeitv2_large_patch16_224.in1k_ft_in1k,224,67.627,32.373,88.733,11.267,304.43,0.950,bicubic,-30.293,-10.927,+17\nconvnext_large.fb_in22k_ft_in1k_384,384,67.387,32.613,88.880,11.120,197.77,1.000,bicubic,-30.873,-10.880,-29\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,67.373,32.627,88.507,11.493,196.74,0.900,bicubic,-30.467,-11.163,+31\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,67.267,32.733,89.960,10.040,200.13,1.000,bicubic,-30.993,-9.820,-32\ntf_efficientnet_b7.ns_jft_in1k,600,67.187,32.813,88.533,11.467,66.35,0.949,bicubic,-30.713,-11.157,+16\ntf_efficientnetv2_xl.in21k_ft_in1k,512,67.160,32.840,87.147,12.853,208.12,1.000,bicubic,-30.740,-12.483,+18\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,67.053,32.947,88.907,11.093,304.53,1.000,bicubic,-31.177,-10.813,-30\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,67.013,32.987,89.093,10.907,86.74,1.000,bicubic,-30.817,-10.607,+28\nconvnextv2_large.fcmae_ft_in22k_in1k,288,66.880,33.120,88.880,11.120,197.96,1.000,bicubic,-31.240,-10.790,-16\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,66.853,33.147,88.147,11.853,304.20,1.000,bicubic,-31.237,-11.613,-13\nconvnext_large.fb_in22k_ft_in1k,288,66.333,33.667,88.987,11.013,197.77,1.000,bicubic,-31.797,-10.763,-19\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,66.307,33.693,88.680,11.320,136.06,1.000,bicubic,-31.913,-11.050,-34\nvolo_d4_448.sail_in1k,448,66.293,33.707,89.013,10.987,193.41,1.150,bicubic,-31.387,-10.597,+49\nhiera_huge_224.mae_in1k_ft_in1k,224,66.253,33.747,86.573,13.427,672.78,0.900,bicubic,-31.717,-13.037,-4\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,66.013,33.987,87.813,12.187,93.59,1.000,bicubic,-31.947,-11.887,-4\nconvnextv2_large.fcmae_ft_in22k_in1k,224,65.933,34.067,87.280,12.720,197.96,0.875,bicubic,-31.997,-12.400,-1\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,65.813,34.187,87.840,12.160,88.72,1.000,bicubic,-32.547,-11.940,-53\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,65.667,34.333,87.840,12.160,632.05,1.000,bicubic,-32.363,-11.890,-14\nregnety_320.swag_lc_in1k,224,65.547,34.453,88.040,11.960,145.05,0.965,bicubic,-31.603,-11.420,+158\nvolo_d3_448.sail_in1k,448,65.520,34.480,87.373,12.627,86.63,1.000,bicubic,-32.030,-12.287,+66\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,65.387,34.613,88.333,11.667,60.60,1.000,bicubic,-32.383,-11.377,+29\nnextvit_large.bd_ssld_6m_in1k_384,384,65.373,34.627,88.587,11.413,57.87,1.000,bicubic,-32.567,-11.203,-8\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,65.293,34.707,88.587,11.413,87.90,1.000,bicubic,-32.577,-11.123,+4\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,64.960,35.040,87.920,12.080,101.66,1.000,bicubic,-33.160,-11.850,-29\nefficientvit_l3.r384_in1k,384,64.853,35.147,87.040,12.960,246.04,1.000,bicubic,-32.737,-12.580,+57\nconvnextv2_huge.fcmae_ft_in1k,288,64.533,35.467,87.187,12.813,660.29,1.000,bicubic,-33.377,-12.493,-7\ntf_efficientnetv2_l.in21k_ft_in1k,480,64.520,35.480,87.667,12.333,118.52,1.000,bicubic,-33.310,-11.893,+8\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,64.520,35.480,87.067,12.933,93.59,0.950,bicubic,-33.430,-12.693,-16\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,64.520,35.480,85.867,14.133,116.14,0.950,bicubic,-33.280,-13.783,+16\nconvnext_xlarge.fb_in22k_ft_in1k,224,63.787,36.213,87.387,12.613,350.20,0.875,bicubic,-34.133,-12.293,-13\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,63.467,36.533,87.667,12.333,87.92,0.900,bicubic,-34.213,-11.973,+29\nmaxvit_large_tf_384.in1k,384,63.400,36.600,85.187,14.813,212.03,1.000,bicubic,-34.190,-14.403,+49\nconvformer_b36.sail_in22k_ft_in1k,224,63.373,36.627,86.480,13.520,99.88,1.000,bicubic,-34.577,-13.260,-21\nconvnext_base.fb_in22k_ft_in1k_384,384,63.307,36.693,86.827,13.173,88.59,1.000,bicubic,-34.793,-12.823,-38\nconvformer_s36.sail_in22k_ft_in1k_384,384,63.027,36.973,86.773,13.227,40.01,1.000,bicubic,-34.823,-12.917,-5\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,63.027,36.973,86.080,13.920,86.86,1.000,bicubic,-34.793,-13.590,+3\nmaxvit_small_tf_512.in1k,512,62.840,37.160,86.467,13.533,69.13,1.000,bicubic,-34.920,-13.193,+15\nnextvit_base.bd_ssld_6m_in1k_384,384,62.773,37.227,87.893,12.107,44.82,1.000,bicubic,-35.097,-11.867,-12\nmaxvit_base_tf_384.in1k,384,62.467,37.533,85.133,14.867,119.65,1.000,bicubic,-35.123,-14.387,+41\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,62.307,37.693,86.560,13.440,134.13,0.950,bicubic,-35.453,-12.990,+11\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,62.240,37.760,85.893,14.107,86.58,0.900,bicubic,-35.500,-13.747,+15\ncaformer_b36.sail_in1k_384,384,62.093,37.907,84.480,15.520,98.75,1.000,bicubic,-35.417,-15.110,+52\nconvnextv2_base.fcmae_ft_in22k_in1k,288,62.067,37.933,87.053,12.947,88.72,1.000,bicubic,-35.993,-12.697,-42\ncait_m48_448.fb_dist_in1k,448,62.067,37.933,86.467,13.533,356.46,1.000,bicubic,-35.423,-13.153,+54\ntf_efficientnet_b6.ns_jft_in1k,528,61.853,38.147,85.027,14.973,43.04,0.942,bicubic,-35.767,-14.553,+27\nconvnextv2_huge.fcmae_ft_in1k,224,61.853,38.147,84.640,15.360,660.29,0.875,bicubic,-35.797,-14.980,+21\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,61.480,38.520,85.573,14.427,86.53,0.900,bicubic,-36.220,-14.107,+12\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,61.333,38.667,86.600,13.400,196.53,0.900,bicubic,-36.307,-12.960,+21\nconvnext_base.fb_in22k_ft_in1k,288,61.253,38.747,86.480,13.520,88.59,1.000,bicubic,-36.597,-13.160,-20\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,61.187,38.813,83.787,16.213,329.09,1.000,bicubic,-36.663,-15.883,-20\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,61.147,38.853,85.840,14.160,93.59,1.000,bicubic,-36.783,-13.930,-35\ncaformer_m36.sail_in22k_ft_in1k,224,60.973,39.027,84.973,15.027,56.20,1.000,bicubic,-36.867,-14.707,-19\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,60.893,39.107,85.293,14.707,64.11,0.950,bicubic,-36.817,-14.347,+4\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,60.733,39.267,86.693,13.307,200.13,1.000,bicubic,-37.247,-13.007,-45\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,60.693,39.307,84.413,15.587,73.88,0.950,bicubic,-36.947,-15.157,+13\nregnety_160.swag_lc_in1k,224,60.533,39.467,85.853,14.147,83.59,0.965,bicubic,-36.297,-13.807,+203\nconvnextv2_base.fcmae_ft_in22k_in1k,224,60.520,39.480,85.053,14.947,88.72,0.875,bicubic,-37.380,-14.507,-35\nconvnext_small.in12k_ft_in1k_384,384,60.507,39.493,84.813,15.187,50.22,1.000,bicubic,-37.293,-14.877,-15\nconvnext_large.fb_in22k_ft_in1k,224,60.480,39.520,85.147,14.853,197.77,0.875,bicubic,-37.370,-14.523,-31\ncaformer_m36.sail_in1k_384,384,60.347,39.653,84.813,15.187,56.20,1.000,bicubic,-37.103,-14.787,+45\ndeit3_large_patch16_384.fb_in1k,384,60.227,39.773,85.533,14.467,304.76,1.000,bicubic,-37.213,-14.057,+47\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,60.160,39.840,84.613,15.387,73.87,0.950,bicubic,-37.370,-14.997,+28\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,60.053,39.947,85.853,14.147,304.20,1.000,bicubic,-37.857,-13.797,-43\ntf_efficientnet_b5.ns_jft_in1k,456,59.920,40.080,84.280,15.720,30.39,0.934,bicubic,-37.550,-15.360,+37\nxcit_large_24_p8_384.fb_dist_in1k,384,59.893,40.107,85.360,14.640,188.93,1.000,bicubic,-37.637,-14.180,+26\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,59.840,40.160,84.960,15.040,116.09,0.950,bicubic,-37.910,-14.660,-14\nefficientvit_l3.r320_in1k,320,59.840,40.160,84.933,15.067,246.04,1.000,bicubic,-37.570,-14.567,+48\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,59.720,40.280,84.493,15.507,101.66,1.000,bicubic,-38.180,-15.097,-43\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,59.627,40.373,84.200,15.800,86.86,0.950,bicubic,-38.563,-15.470,-88\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,59.560,40.440,83.160,16.840,936.53,1.000,bilinear,-37.930,-16.440,+27\ntf_efficientnetv2_m.in21k_ft_in1k,480,59.080,40.920,84.480,15.520,54.14,1.000,bicubic,-38.730,-15.210,-31\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,58.840,41.160,85.787,14.213,88.59,1.000,bicubic,-39.210,-13.953,-70\nmaxvit_tiny_tf_512.in1k,512,58.627,41.373,84.653,15.347,31.05,1.000,bicubic,-38.943,-14.907,+11\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,58.480,41.520,82.573,17.427,86.58,0.900,bicubic,-39.100,-17.097,+9\nvolo_d2_384.sail_in1k,384,58.427,41.573,84.280,15.720,58.87,1.000,bicubic,-38.893,-15.310,+59\ncaformer_s18.sail_in22k_ft_in1k_384,384,58.413,41.587,85.440,14.560,26.34,1.000,bicubic,-39.027,-14.020,+33\ntf_efficientnetv2_xl.in21k_ft_in1k,384,58.200,41.800,80.840,19.160,208.12,1.000,bicubic,-39.180,-18.730,+46\nnextvit_small.bd_ssld_6m_in1k_384,384,58.080,41.920,85.373,14.627,31.76,1.000,bicubic,-39.690,-14.217,-29\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,58.027,41.973,83.200,16.800,64.11,0.950,bicubic,-39.603,-16.340,-9\nhiera_large_224.mae_in1k_ft_in1k,224,58.013,41.987,81.667,18.333,213.74,0.900,bicubic,-39.427,-17.963,+30\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,57.920,42.080,80.707,19.293,468.53,0.875,bilinear,-39.450,-18.963,+43\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,57.787,42.213,83.147,16.853,21.27,1.000,bicubic,-40.113,-16.573,-59\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,57.773,42.227,81.867,18.133,86.86,1.000,bicubic,-40.237,-17.793,-76\nefficientvit_l2.r384_in1k,384,57.747,42.253,83.747,16.253,63.71,1.000,bicubic,-39.653,-15.673,+34\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,57.680,42.320,82.813,17.187,93.59,0.875,bicubic,-39.970,-16.877,-20\ncait_m36_384.fb_dist_in1k,384,57.667,42.333,84.707,15.293,271.22,1.000,bicubic,-39.733,-14.763,+30\nhgnetv2_b6.ssld_stage2_ft_in1k,288,57.547,42.453,84.173,15.827,75.26,1.000,bicubic,-40.253,-15.497,-46\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,57.480,42.520,83.107,16.893,21.23,1.000,bicubic,-40.130,-16.513,-12\ndm_nfnet_f5.dm_in1k,544,57.347,42.653,82.080,17.920,377.21,0.954,bicubic,-40.433,-17.570,-41\ntf_efficientnetv2_l.in21k_ft_in1k,384,57.293,42.707,82.293,17.707,118.52,1.000,bicubic,-40.167,-17.307,+12\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,57.173,42.827,84.720,15.280,88.59,1.000,bicubic,-40.437,-14.820,-14\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,57.147,42.853,83.507,16.493,86.59,1.000,bicubic,-40.343,-16.113,+4\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,57.133,42.867,83.013,16.987,22.21,1.000,bicubic,-39.997,-16.487,+87\ncaformer_s36.sail_in1k_384,384,57.120,42.880,82.640,17.360,39.30,1.000,bicubic,-40.280,-16.900,+21\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,57.093,42.907,84.240,15.760,75.26,1.000,bicubic,-40.717,-15.360,-56\nconvnextv2_large.fcmae_ft_in1k,288,57.080,42.920,83.587,16.413,197.96,1.000,bicubic,-40.580,-16.003,-32\nvolo_d5_224.sail_in1k,224,56.827,43.173,82.760,17.240,295.46,0.960,bicubic,-40.553,-16.610,+24\nrdnet_large.nv_in1k_ft_in1k_384,384,56.747,43.253,83.013,16.987,186.27,1.000,bicubic,-40.923,-16.537,-36\ndm_nfnet_f6.dm_in1k,576,56.747,43.253,81.533,18.467,438.36,0.956,bicubic,-41.033,-18.057,-51\nmaxvit_small_tf_384.in1k,384,56.720,43.280,82.280,17.720,69.02,1.000,bicubic,-40.690,-17.110,+13\nregnety_160.lion_in12k_ft_in1k,288,56.680,43.320,83.893,16.107,83.59,1.000,bicubic,-40.770,-15.647,+5\nxcit_medium_24_p8_384.fb_dist_in1k,384,56.640,43.360,83.640,16.360,84.32,1.000,bicubic,-40.660,-15.890,+36\nconvformer_m36.sail_in22k_ft_in1k,224,56.253,43.747,82.227,17.773,57.05,1.000,bicubic,-41.337,-17.303,-25\ndm_nfnet_f4.dm_in1k,512,56.253,43.747,81.373,18.627,316.07,0.951,bicubic,-41.377,-18.247,-32\nregnety_160.sw_in12k_ft_in1k,288,56.213,43.787,82.813,17.187,83.59,1.000,bicubic,-41.297,-16.767,-12\nefficientvit_l3.r256_in1k,256,56.147,43.853,81.333,18.667,246.04,1.000,bicubic,-40.883,-17.907,+104\ncaformer_s36.sail_in22k_ft_in1k,224,55.560,44.440,81.893,18.107,39.30,1.000,bicubic,-42.050,-17.697,-32\nconvformer_b36.sail_in1k_384,384,55.467,44.533,81.253,18.747,99.88,1.000,bicubic,-42.073,-18.227,-21\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,55.293,44.707,79.680,20.320,304.33,0.900,bicubic,-42.347,-19.910,-42\nconvnext_base.fb_in22k_ft_in1k,224,54.773,45.227,82.573,17.427,88.59,0.875,bicubic,-42.727,-17.037,-15\nconvnextv2_large.fcmae_ft_in1k,224,54.680,45.320,81.160,18.840,197.96,0.875,bicubic,-42.670,-18.370,+15\nvit_base_patch16_clip_384.openai_ft_in1k,384,54.453,45.547,82.307,17.693,86.86,1.000,bicubic,-43.097,-17.243,-27\nxcit_small_24_p8_384.fb_dist_in1k,384,54.387,45.613,81.707,18.293,47.63,1.000,bicubic,-42.893,-17.793,+26\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,54.347,45.653,80.813,19.187,98.95,1.000,bicubic,-42.833,-18.747,+55\ncait_s36_384.fb_dist_in1k,384,54.213,45.787,81.253,18.747,68.37,1.000,bicubic,-43.137,-18.277,+12\ndeit3_huge_patch14_224.fb_in1k,224,54.147,45.853,81.920,18.080,632.13,0.900,bicubic,-42.733,-17.560,+134\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,53.840,46.160,81.520,18.480,39.03,0.950,bicubic,-43.610,-18.080,-14\nvolo_d1_384.sail_in1k,384,53.840,46.160,80.907,19.093,26.78,1.000,bicubic,-43.090,-18.343,+119\nbeitv2_base_patch16_224.in1k_ft_in1k,224,53.640,46.360,81.827,18.173,86.53,0.900,bicubic,-43.530,-17.633,+53\nconvformer_m36.sail_in1k_384,384,53.613,46.387,80.867,19.133,57.05,1.000,bicubic,-43.787,-18.643,-3\ndm_nfnet_f3.dm_in1k,416,53.560,46.440,79.293,20.707,254.92,0.940,bicubic,-43.920,-20.227,-22\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,53.373,46.627,80.413,19.587,86.86,1.000,bicubic,-44.377,-19.297,-68\ndeit3_base_patch16_384.fb_in1k,384,53.293,46.707,80.507,19.493,86.88,1.000,bicubic,-43.717,-18.983,+93\nconvnext_small.fb_in22k_ft_in1k_384,384,53.160,46.840,81.467,18.533,50.22,1.000,bicubic,-44.460,-18.133,-52\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,53.040,46.960,76.880,23.120,194.03,0.875,bilinear,-43.880,-22.700,+115\nconvnext_small.in12k_ft_in1k,288,52.987,47.013,81.307,18.693,50.22,1.000,bicubic,-44.353,-18.273,+4\nvolo_d4_224.sail_in1k,224,52.867,47.133,80.560,19.440,192.96,0.960,bicubic,-44.433,-18.950,+10\nxcit_large_24_p16_384.fb_dist_in1k,384,52.827,47.173,81.733,18.267,189.10,1.000,bicubic,-44.713,-17.787,-39\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,52.800,47.200,81.133,18.867,236.34,1.000,bilinear,-44.220,-18.107,+82\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,52.507,47.493,82.760,17.240,88.59,1.000,bicubic,-45.093,-16.960,-52\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,52.400,47.600,79.840,20.160,387.93,1.000,bilinear,-44.610,-19.670,+83\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,52.307,47.693,79.973,20.027,88.34,1.000,bicubic,-45.023,-19.627,0\nmaxvit_tiny_tf_384.in1k,384,52.040,47.960,79.893,20.107,30.98,1.000,bicubic,-45.270,-19.607,+2\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,52.027,47.973,80.467,19.533,60.40,0.950,bicubic,-45.193,-19.023,+24\ndm_nfnet_f5.dm_in1k,416,51.960,48.040,77.467,22.533,377.21,0.954,bicubic,-45.520,-22.093,-35\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,51.813,48.187,80.413,19.587,87.77,0.900,bicubic,-45.467,-19.177,+6\nregnety_120.sw_in12k_ft_in1k,288,51.800,48.200,80.653,19.347,51.82,1.000,bicubic,-45.470,-19.007,+11\nefficientvit_l3.r224_in1k,224,51.720,48.280,79.080,20.920,246.04,1.000,bicubic,-45.280,-20.450,+84\nconvnext_small.fb_in22k_ft_in1k,288,51.547,48.453,81.333,18.667,50.22,1.000,bicubic,-45.803,-18.227,-11\ndm_nfnet_f6.dm_in1k,448,51.533,48.467,76.533,23.467,438.36,0.956,bicubic,-46.177,-23.077,-82\nefficientvit_l2.r288_in1k,288,51.360,48.640,79.973,20.027,63.71,1.000,bicubic,-45.700,-19.447,+64\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,51.360,48.640,79.920,20.080,75.26,0.965,bicubic,-46.250,-19.690,-68\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,51.173,48.827,78.253,21.747,88.79,0.875,bilinear,-46.047,-21.337,+14\neva02_small_patch14_336.mim_in22k_ft_in1k,336,51.067,48.933,79.013,20.987,22.13,1.000,bicubic,-46.083,-20.657,+34\ntf_efficientnet_b4.ns_jft_in1k,380,51.040,48.960,79.067,20.933,19.34,0.922,bicubic,-45.940,-20.263,+80\nefficientnet_b5.sw_in12k_ft_in1k,448,51.013,48.987,78.667,21.333,30.39,1.000,bicubic,-46.377,-20.883,-26\nhgnetv2_b6.ssld_stage2_ft_in1k,224,50.920,49.080,79.280,20.720,75.26,0.965,bicubic,-46.540,-20.360,-44\ntf_efficientnetv2_m.in21k_ft_in1k,384,50.920,49.080,78.520,21.480,54.14,1.000,bicubic,-46.290,-20.980,+17\nflexivit_large.1200ep_in1k,240,50.840,49.160,80.560,19.440,304.36,0.950,bicubic,-46.580,-19.040,-37\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,50.747,49.253,79.893,20.107,86.53,0.900,bicubic,-46.323,-19.537,+49\nmvitv2_large.fb_in1k,224,50.640,49.360,78.067,21.933,217.99,0.900,bicubic,-46.290,-21.313,+86\nxcit_small_12_p8_384.fb_dist_in1k,384,50.547,49.453,79.640,20.360,26.21,1.000,bicubic,-46.683,-19.890,+4\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,50.387,49.613,79.747,20.253,86.66,1.000,bicubic,-46.833,-19.833,+7\nefficientnet_x_b5.sw_r448_e450_in1k,576,50.333,49.667,78.627,21.373,33.44,1.000,bicubic,-46.717,-20.843,+52\ndm_nfnet_f4.dm_in1k,384,50.293,49.707,76.067,23.933,316.07,0.951,bicubic,-47.057,-23.413,-26\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,50.240,49.760,78.200,21.800,86.57,0.950,bicubic,-47.210,-21.440,-49\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,50.200,49.800,77.440,22.560,86.57,0.950,bicubic,-47.340,-22.070,-69\ntf_efficientnetv2_l.in1k,480,50.173,49.827,77.560,22.440,118.52,1.000,bicubic,-47.347,-21.990,-66\nconvformer_s18.sail_in22k_ft_in1k_384,384,50.160,49.840,81.053,18.947,26.77,1.000,bicubic,-47.120,-18.377,-18\nconvnext_tiny.in12k_ft_in1k_384,384,50.120,49.880,79.733,20.267,28.59,1.000,bicubic,-47.210,-19.747,-29\nconvformer_s36.sail_in1k_384,384,50.080,49.920,78.907,21.093,40.01,1.000,bicubic,-47.200,-20.693,-18\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,50.080,49.920,77.707,22.293,236.34,1.000,bicubic,-46.750,-21.823,+100\nvit_base_patch16_384.orig_in21k_ft_in1k,384,50.053,49.947,77.600,22.400,86.86,1.000,bicubic,-46.657,-21.880,+132\nvolo_d3_224.sail_in1k,224,50.000,50.000,78.320,21.680,86.33,0.960,bicubic,-47.110,-21.180,+25\nflexivit_large.600ep_in1k,240,49.973,50.027,79.973,20.027,304.36,0.950,bicubic,-47.307,-19.587,-25\nefficientnet_h_b5.sw_r448_e450_in1k,576,49.853,50.147,79.093,20.907,45.88,1.000,bicubic,-47.307,-20.447,+9\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,49.853,50.147,78.560,21.440,32.59,1.000,bicubic,-47.317,-20.960,+4\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,49.733,50.267,78.733,21.267,86.57,0.900,bicubic,-47.427,-20.767,+7\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,49.680,50.320,78.493,21.507,60.40,0.950,bicubic,-47.650,-20.987,-36\nbeit3_base_patch16_224.in22k_ft_in1k,224,49.600,50.400,79.787,20.213,86.66,1.000,bicubic,-47.320,-19.803,+71\nnextvit_large.bd_ssld_6m_in1k,224,49.520,50.480,80.787,19.213,57.87,0.950,bicubic,-47.870,-18.883,-52\ninception_next_base.sail_in1k_384,384,49.493,50.507,78.560,21.440,86.67,1.000,bicubic,-47.777,-21.000,-23\nregnety_160.lion_in12k_ft_in1k,224,49.467,50.533,78.453,21.547,83.59,0.950,bicubic,-47.773,-21.097,-20\ncait_s24_384.fb_dist_in1k,384,49.413,50.587,78.653,21.347,47.06,1.000,bicubic,-47.657,-20.757,+28\nnextvit_large.bd_in1k_384,384,49.320,50.680,78.413,21.587,57.87,1.000,bicubic,-47.690,-20.897,+38\nxcit_medium_24_p16_384.fb_dist_in1k,384,49.267,50.733,79.747,20.253,84.40,1.000,bicubic,-48.043,-19.713,-40\nhiera_base_plus_224.mae_in1k_ft_in1k,224,49.240,50.760,76.533,23.467,69.90,0.900,bicubic,-47.710,-22.867,+56\ndeit_base_distilled_patch16_384.fb_in1k,384,49.040,50.960,79.000,21.000,87.63,1.000,bicubic,-47.960,-20.220,+42\nregnety_160.sw_in12k_ft_in1k,224,49.013,50.987,77.733,22.267,83.59,0.950,bicubic,-48.207,-21.857,-20\ncaformer_s18.sail_in1k_384,384,48.893,51.107,78.720,21.280,26.34,1.000,bicubic,-48.177,-20.790,+21\ncoat_lite_medium_384.in1k,384,48.840,51.160,78.547,21.453,44.57,1.000,bicubic,-48.310,-20.903,-2\nvit_large_patch16_rope_ape_224.naver_in1k,224,48.747,51.253,78.280,21.720,304.37,0.900,bicubic,-48.023,-21.050,+93\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,48.693,51.307,79.933,20.067,88.59,1.000,bicubic,-48.577,-19.587,-36\nvit_large_patch16_rope_mixed_224.naver_in1k,224,48.680,51.320,77.987,22.013,304.20,0.900,bicubic,-48.210,-21.453,+65\ncaformer_b36.sail_in1k,224,48.627,51.373,75.707,24.293,98.75,1.000,bicubic,-48.373,-23.643,+36\nconvnextv2_base.fcmae_ft_in1k,288,48.600,51.400,78.840,21.160,88.72,1.000,bicubic,-48.640,-20.700,-35\nefficientvit_l2.r256_in1k,256,48.587,51.413,77.907,22.093,63.71,1.000,bicubic,-48.263,-21.393,+69\ndeit3_large_patch16_224.fb_in1k,224,48.573,51.427,78.027,21.973,304.37,0.900,bicubic,-48.407,-21.553,+39\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,48.493,51.507,77.400,22.600,88.30,1.000,bicubic,-48.887,-22.110,-67\nflexivit_large.300ep_in1k,240,48.467,51.533,78.627,21.373,304.36,0.950,bicubic,-48.783,-20.863,-40\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,48.267,51.733,77.053,22.947,38.85,1.000,bicubic,-48.723,-22.397,+32\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,48.107,51.893,78.107,21.893,304.40,0.900,bicubic,-48.713,-21.193,+72\ntf_efficientnetv2_s.in21k_ft_in1k,384,48.040,51.960,77.773,22.227,21.46,1.000,bicubic,-48.710,-21.597,+87\ntf_efficientnet_b8.ra_in1k,672,47.987,52.013,76.453,23.547,87.41,0.954,bicubic,-49.223,-23.087,-30\nxcit_large_24_p8_224.fb_dist_in1k,224,47.907,52.093,79.120,20.880,188.93,1.000,bicubic,-49.153,-20.270,+8\nconvnext_small.in12k_ft_in1k,224,47.907,52.093,77.253,22.747,50.22,0.950,bicubic,-49.303,-22.247,-35\nregnety_2560.seer_ft_in1k,384,47.787,52.213,76.387,23.613,\"1,282.60\",1.000,bicubic,-49.453,-23.153,-43\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,47.747,52.253,76.907,23.093,34.36,0.950,bicubic,-49.423,-22.483,-25\nresnest269e.in1k,416,47.627,52.373,74.027,25.973,110.93,0.928,bicubic,-48.893,-25.343,+159\nconvnext_tiny.in12k_ft_in1k,288,47.533,52.467,78.973,21.027,28.59,1.000,bicubic,-49.567,-20.477,-9\nvit_base_patch16_clip_224.openai_ft_in1k,224,47.453,52.547,78.147,21.853,86.57,0.900,bicubic,-49.637,-21.303,-7\nconvformer_s36.sail_in22k_ft_in1k,224,47.440,52.560,77.253,22.747,40.01,1.000,bicubic,-49.640,-22.357,-4\nregnetz_e8.ra3_in1k,320,47.333,52.667,75.853,24.147,57.70,1.000,bicubic,-49.877,-23.617,-40\nnextvit_base.bd_ssld_6m_in1k,224,47.307,52.693,78.600,21.400,44.82,0.950,bicubic,-49.963,-20.940,-58\nvit_large_patch16_rope_224.naver_in1k,224,47.107,52.893,77.587,22.413,304.17,0.900,bicubic,-49.713,-21.843,+60\nxcit_large_24_p8_224.fb_in1k,224,46.867,53.133,74.413,25.587,188.93,1.000,bicubic,-49.533,-24.827,+186\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,46.827,53.173,75.840,24.160,86.46,1.000,bicubic,-49.403,-23.270,+254\nxcit_small_24_p16_384.fb_dist_in1k,384,46.773,53.227,76.973,23.027,47.67,1.000,bicubic,-50.357,-22.467,-27\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,46.720,53.280,77.387,22.613,32.59,1.000,bicubic,-50.460,-22.173,-41\nconvnext_large.fb_in1k,288,46.667,53.333,76.733,23.267,197.77,1.000,bicubic,-50.423,-22.737,-14\ntf_efficientnet_b8.ap_in1k,672,46.533,53.467,76.133,23.867,87.41,0.954,bicubic,-50.587,-23.477,-29\nmambaout_base_tall_rw.sw_e500_in1k,288,46.507,53.493,76.533,23.467,86.48,1.000,bicubic,-50.613,-23.127,-27\nhgnet_base.ssld_in1k,288,46.480,53.520,76.173,23.827,71.58,1.000,bicubic,-50.950,-23.447,-102\nnextvit_base.bd_in1k_384,384,46.413,53.587,76.800,23.200,44.82,1.000,bicubic,-50.497,-22.600,+28\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,46.387,53.613,76.680,23.320,217.32,1.000,bilinear,-50.343,-22.620,+71\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,46.360,53.640,74.947,25.053,86.63,1.000,bicubic,-49.590,-24.053,+339\nefficientnetv2_rw_m.agc_in1k,416,46.333,53.667,75.733,24.267,53.24,1.000,bicubic,-50.667,-23.787,+1\ndm_nfnet_f2.dm_in1k,352,46.173,53.827,74.693,25.307,193.78,0.920,bicubic,-50.937,-24.767,-30\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,46.133,53.867,77.000,23.000,38.88,0.950,bicubic,-51.037,-22.430,-45\nmambaout_base_short_rw.sw_e500_in1k,288,46.107,53.893,76.040,23.960,88.83,1.000,bicubic,-50.903,-23.340,-5\nhgnetv2_b5.ssld_stage2_ft_in1k,288,45.947,54.053,77.773,22.227,39.57,1.000,bicubic,-51.423,-21.907,-96\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,45.920,54.080,72.067,27.933,194.03,0.875,bilinear,-50.680,-27.163,+109\nvolo_d2_224.sail_in1k,224,45.907,54.093,75.133,24.867,58.68,0.960,bicubic,-51.083,-24.257,+1\nswinv2_base_window16_256.ms_in1k,256,45.907,54.093,75.107,24.893,87.92,0.900,bicubic,-50.863,-24.143,+53\necaresnet269d.ra2_in1k,352,45.773,54.227,74.933,25.067,102.09,1.000,bicubic,-51.317,-24.547,-30\ncaformer_m36.sail_in1k,224,45.773,54.227,74.533,25.467,56.20,1.000,bicubic,-51.117,-24.787,+22\nconvnextv2_base.fcmae_ft_in1k,224,45.720,54.280,76.040,23.960,88.72,0.875,bicubic,-51.180,-23.410,+19\ndm_nfnet_f3.dm_in1k,320,45.440,54.560,72.360,27.640,254.92,0.940,bicubic,-51.750,-27.180,-61\nswin_base_patch4_window12_384.ms_in1k,384,45.413,54.587,74.613,25.387,87.90,1.000,bicubic,-51.167,-24.547,+112\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,45.373,54.627,74.973,25.027,86.63,1.000,bicubic,-50.967,-24.367,+194\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,45.360,54.640,76.160,23.840,22.20,1.000,bicubic,-51.340,-23.320,+69\ntf_efficientnet_b7.ap_in1k,600,45.267,54.733,74.013,25.987,66.35,0.949,bicubic,-51.943,-25.587,-70\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,45.253,54.747,70.840,29.160,88.79,0.875,bilinear,-51.067,-28.580,+197\nxcit_medium_24_p8_224.fb_dist_in1k,224,45.227,54.773,76.880,23.120,84.32,1.000,bicubic,-51.703,-22.520,+4\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,45.227,54.773,75.480,24.520,36.47,1.000,bicubic,-51.463,-23.860,+69\nconvnext_small.fb_in22k_ft_in1k,224,45.147,54.853,77.493,22.507,50.22,0.875,bicubic,-51.823,-21.917,-7\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,45.133,54.867,73.960,26.040,41.72,0.950,bicubic,-51.537,-25.370,+79\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,45.080,54.920,76.573,23.427,28.64,1.000,bicubic,-52.160,-22.937,-86\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,44.880,55.120,74.573,25.427,32.59,0.950,bicubic,-52.130,-24.787,-22\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,44.787,55.213,74.547,25.453,34.36,0.950,bicubic,-51.913,-24.683,+63\neca_nfnet_l2.ra3_in1k,384,44.760,55.240,75.653,24.347,56.72,1.000,bicubic,-52.310,-23.957,-41\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,44.493,55.507,75.133,24.867,66.01,0.950,bicubic,-52.297,-24.237,+30\nregnety_120.sw_in12k_ft_in1k,224,44.467,55.533,74.640,25.360,51.82,0.950,bicubic,-52.703,-24.940,-72\necaresnet269d.ra2_in1k,320,44.320,55.680,73.947,26.053,102.09,0.950,bicubic,-52.790,-25.663,-54\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,44.307,55.693,74.947,25.053,21.20,0.950,bicubic,-52.893,-24.543,-79\nrexnetr_300.sw_in12k_ft_in1k,288,44.080,55.920,76.707,23.293,34.81,1.000,bicubic,-52.770,-22.813,+9\ntf_efficientnetv2_l.in1k,384,44.080,55.920,71.987,28.013,118.52,1.000,bicubic,-53.200,-27.533,-104\nresnest200e.in1k,320,43.987,56.013,73.400,26.600,70.20,0.909,bicubic,-52.623,-25.870,+80\nmambaout_base_wide_rw.sw_e500_in1k,288,43.893,56.107,73.973,26.027,94.45,1.000,bicubic,-53.207,-25.547,-56\ncrossvit_18_dagger_408.in1k,408,43.840,56.160,73.613,26.387,44.61,1.000,bicubic,-52.690,-25.717,+104\nmambaout_base.in1k,288,43.840,56.160,72.573,27.427,84.81,1.000,bicubic,-53.030,-26.597,+4\nseresnextaa101d_32x8d.ah_in1k,288,43.640,56.360,73.040,26.960,93.59,1.000,bicubic,-53.300,-26.360,-18\ncait_xs24_384.fb_dist_in1k,384,43.627,56.373,74.893,25.107,26.67,1.000,bicubic,-52.943,-24.527,+91\nresnetrs200.tf_in1k,320,43.493,56.507,72.547,27.453,93.21,1.000,bicubic,-53.227,-26.883,+39\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,43.453,56.547,76.267,23.733,49.61,0.900,bicubic,-53.097,-23.013,+94\nhiera_base_224.mae_in1k_ft_in1k,224,43.400,56.600,73.227,26.773,51.52,0.900,bicubic,-53.190,-26.203,+80\ncaformer_s18.sail_in22k_ft_in1k,224,43.320,56.680,74.907,25.093,26.34,1.000,bicubic,-53.390,-24.603,+39\nmvitv2_base.fb_in1k,224,43.293,56.707,73.920,26.080,51.47,0.900,bicubic,-53.477,-25.480,+20\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,43.280,56.720,75.253,24.747,39.57,1.000,bicubic,-53.990,-24.407,-114\nxcit_small_12_p16_384.fb_dist_in1k,384,43.227,56.773,74.000,26.000,26.25,1.000,bicubic,-53.723,-25.390,-29\ntresnet_xl.miil_in1k_448,448,43.213,56.787,72.147,27.853,78.44,0.875,bilinear,-52.757,-27.063,+289\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,43.133,56.867,72.547,27.453,86.57,0.900,bicubic,-53.737,-26.813,-11\nxcit_medium_24_p8_224.fb_in1k,224,43.120,56.880,70.080,29.920,84.32,1.000,bicubic,-53.010,-28.830,+233\nregnetz_e8.ra3_in1k,256,43.107,56.893,72.200,27.800,57.70,0.940,bicubic,-53.583,-27.140,+42\nrdnet_large.nv_in1k,224,42.787,57.213,73.613,26.387,186.27,0.900,bicubic,-54.123,-25.617,-22\nedgenext_base.in21k_ft_in1k,320,42.773,57.227,75.427,24.573,18.51,1.000,bicubic,-53.967,-24.033,+19\nhrnet_w48_ssld.paddle_in1k,288,42.693,57.307,72.067,27.933,77.47,1.000,bilinear,-54.347,-27.573,-60\nefficientvit_l2.r224_in1k,224,42.653,57.347,74.320,25.680,63.71,1.000,bicubic,-54.187,-25.130,-10\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,42.560,57.440,72.587,27.413,88.30,0.950,bicubic,-54.540,-26.933,-77\nnextvit_small.bd_ssld_6m_in1k,224,42.533,57.467,75.467,24.533,31.76,0.950,bicubic,-54.427,-24.013,-40\ncoatnet_rmlp_2_rw_224.sw_in1k,224,42.533,57.467,71.040,28.960,73.88,0.950,bicubic,-53.997,-28.040,+85\ngcvit_base.in1k,224,42.520,57.480,73.440,26.560,90.32,0.875,bicubic,-54.050,-25.800,+74\nresnetrs420.tf_in1k,416,42.440,57.560,70.147,29.853,191.89,1.000,bicubic,-54.460,-29.273,-28\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,42.427,57.573,74.373,25.627,38.86,0.950,bicubic,-54.243,-24.787,+37\ntf_efficientnetv2_m.in1k,480,42.387,57.613,72.373,27.627,54.14,1.000,bicubic,-54.843,-27.107,-120\nmaxvit_large_tf_224.in1k,224,42.360,57.640,69.213,30.787,211.79,0.950,bicubic,-54.660,-30.377,-64\nxcit_tiny_24_p8_384.fb_dist_in1k,384,42.347,57.653,72.787,27.213,12.11,1.000,bicubic,-54.233,-26.523,+61\nefficientnet_h_b5.sw_r448_e450_in1k,448,42.293,57.707,72.827,27.173,45.88,1.000,bicubic,-54.687,-26.443,-52\nswinv2_small_window16_256.ms_in1k,256,42.267,57.733,72.747,27.253,49.73,0.900,bicubic,-54.203,-26.643,+90\ntf_efficientnet_b7.ra_in1k,600,42.267,57.733,72.520,27.480,66.35,0.949,bicubic,-54.733,-26.970,-60\ndm_nfnet_f1.dm_in1k,320,42.213,57.787,71.227,28.773,132.63,0.910,bicubic,-54.847,-28.153,-77\nconvformer_s18.sail_in1k_384,384,42.187,57.813,74.040,25.960,26.77,1.000,bicubic,-54.853,-25.330,-75\nmaxvit_rmlp_small_rw_224.sw_in1k,224,42.187,57.813,72.373,27.627,64.90,0.900,bicubic,-54.383,-26.857,+65\nmaxvit_base_tf_224.in1k,224,42.027,57.973,70.067,29.933,119.47,0.950,bicubic,-54.953,-29.353,-56\nconvnext_tiny.fb_in22k_ft_in1k_384,384,42.013,57.987,73.533,26.467,28.59,1.000,bicubic,-55.097,-25.867,-98\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,41.960,58.040,73.987,26.013,86.57,1.000,bicubic,-55.160,-25.473,-102\nconvnext_base.fb_in1k,288,41.907,58.093,73.840,26.160,88.59,1.000,bicubic,-54.913,-25.530,-21\neca_nfnet_l2.ra3_in1k,320,41.907,58.093,72.933,27.067,56.72,0.900,bicubic,-54.683,-26.227,+47\nmambaout_small.in1k,288,41.853,58.147,72.200,27.800,48.49,1.000,bicubic,-54.867,-27.160,+5\ncrossvit_15_dagger_408.in1k,408,41.787,58.213,72.013,27.987,28.50,1.000,bicubic,-54.633,-27.377,+98\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,41.760,58.240,75.147,24.853,28.64,1.000,bicubic,-55.080,-24.103,-34\nnextvit_small.bd_in1k_384,384,41.747,58.253,72.747,27.253,31.76,1.000,bicubic,-55.073,-26.693,-24\nxcit_small_24_p8_224.fb_in1k,224,41.653,58.347,70.867,29.133,47.63,1.000,bicubic,-54.777,-28.133,+91\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,41.613,58.387,70.280,29.720,328.99,0.900,bicubic,-55.177,-29.140,-20\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,41.600,58.400,71.853,28.147,44.18,0.875,bilinear,-54.840,-27.537,+82\nefficientnet_x_b5.sw_r448_e450_in1k,448,41.587,58.413,71.187,28.813,33.44,1.000,bicubic,-55.243,-28.263,-33\nxcit_small_24_p8_224.fb_dist_in1k,224,41.533,58.467,73.653,26.347,47.63,1.000,bicubic,-55.357,-25.827,-50\nresnetaa101d.sw_in12k_ft_in1k,288,41.520,58.480,72.493,27.507,44.57,1.000,bicubic,-55.150,-27.007,+15\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,41.427,58.573,71.627,28.373,63.95,0.950,bicubic,-55.193,-27.883,+28\nconvnext_large.fb_in1k,224,41.213,58.787,73.280,26.720,197.77,0.875,bicubic,-55.547,-26.020,-21\nswinv2_base_window8_256.ms_in1k,256,41.160,58.840,72.560,27.440,87.92,0.900,bicubic,-55.390,-26.530,+49\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,41.080,58.920,72.000,28.000,22.06,1.000,bicubic,-55.590,-27.370,+12\ncaformer_s36.sail_in1k,224,41.027,58.973,70.920,29.080,39.30,1.000,bicubic,-55.693,-28.340,-11\nseresnext101d_32x8d.ah_in1k,288,40.933,59.067,70.720,29.280,93.59,1.000,bicubic,-55.797,-28.810,-17\ndavit_base.msft_in1k,224,40.760,59.240,72.533,27.467,87.95,0.950,bicubic,-56.170,-26.987,-66\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,40.720,59.280,70.867,29.133,29.15,0.950,bicubic,-55.700,-28.293,+80\nhgnet_base.ssld_in1k,224,40.693,59.307,70.667,29.333,71.58,0.965,bicubic,-56.387,-28.883,-110\nflexivit_base.1200ep_in1k,240,40.600,59.400,72.187,27.813,86.59,0.950,bicubic,-56.150,-27.173,-26\nregnety_1280.seer_ft_in1k,384,40.573,59.427,70.587,29.413,644.81,1.000,bicubic,-56.297,-28.943,-57\nresmlp_big_24_224.fb_in22k_ft_in1k,224,40.467,59.533,74.653,25.347,129.14,0.875,bicubic,-56.153,-24.627,+14\ntf_efficientnet_b6.ap_in1k,528,40.373,59.627,71.373,28.627,43.04,0.942,bicubic,-56.737,-28.127,-125\nconvformer_b36.sail_in1k,224,40.280,59.720,69.107,30.893,99.88,1.000,bicubic,-56.630,-30.293,-68\ndeit3_small_patch16_384.fb_in1k,384,40.227,59.773,69.960,30.040,22.21,1.000,bicubic,-55.993,-29.250,+149\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,40.133,59.867,72.453,27.547,32.59,0.950,bicubic,-56.737,-26.947,-62\ntresnet_l.miil_in1k_448,448,40.133,59.867,69.720,30.280,55.99,0.875,bilinear,-55.757,-29.360,+252\nhgnetv2_b5.ssld_stage2_ft_in1k,224,39.907,60.093,72.333,27.667,39.57,0.965,bicubic,-57.213,-27.087,-133\ndeit_base_patch16_384.fb_in1k,384,39.893,60.107,70.427,29.573,86.86,1.000,bicubic,-56.277,-28.823,+164\nflexivit_base.600ep_in1k,240,39.800,60.200,71.680,28.320,86.59,0.950,bicubic,-56.850,-27.650,-1\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,39.747,60.253,72.520,27.480,37.76,1.000,bicubic,-56.833,-26.790,+20\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,39.720,60.280,72.587,27.413,28.64,0.875,bicubic,-56.760,-26.813,+43\nregnetz_040_h.ra3_in1k,320,39.707,60.293,70.960,29.040,28.94,1.000,bicubic,-56.993,-28.390,-22\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,39.693,60.307,71.133,28.867,22.52,0.950,bicubic,-56.647,-28.207,+100\nmobilenetv4_hybrid_large.e600_r384_in1k,448,39.680,60.320,72.853,27.147,37.76,1.000,bicubic,-57.020,-26.647,-22\nswin_s3_base_224.ms_in1k,224,39.627,60.373,70.613,29.387,71.13,0.900,bicubic,-56.643,-28.707,+119\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,39.613,60.387,70.467,29.533,86.43,0.950,bicubic,-56.827,-29.013,+56\nregnetz_d8.ra3_in1k,320,39.507,60.493,71.227,28.773,23.37,1.000,bicubic,-57.123,-28.223,-4\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,39.467,60.533,71.587,28.413,86.59,0.900,bicubic,-56.973,-27.643,+52\nresnetrs350.tf_in1k,384,39.387,60.613,68.333,31.667,163.96,1.000,bicubic,-57.383,-31.007,-52\nflexivit_base.300ep_in1k,240,39.320,60.680,70.707,29.293,86.59,0.950,bicubic,-57.290,-28.623,+1\ngcvit_small.in1k,224,39.253,60.747,70.520,29.480,51.09,0.875,bicubic,-57.027,-28.630,+109\nvit_base_patch16_rope_mixed_224.naver_in1k,224,39.147,60.853,71.747,28.253,86.44,0.900,bicubic,-57.433,-27.493,+9\nvit_base_patch16_rope_ape_224.naver_in1k,224,39.067,60.933,70.747,29.253,86.59,0.900,bicubic,-57.313,-28.553,+67\nmambaout_base_tall_rw.sw_e500_in1k,224,39.040,60.960,70.627,29.373,86.48,0.950,bicubic,-57.650,-28.653,-26\nseresnext101_32x8d.ah_in1k,288,39.040,60.960,68.987,31.013,93.57,1.000,bicubic,-57.750,-30.353,-61\ndeit3_base_patch16_224.fb_in1k,224,38.920,61.080,70.760,29.240,86.59,0.900,bicubic,-57.370,-28.500,+102\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,38.907,61.093,70.707,29.293,32.59,1.000,bicubic,-57.553,-28.593,+37\nregnetz_d8_evos.ch_in1k,320,38.840,61.160,71.307,28.693,23.46,1.000,bicubic,-57.730,-27.823,+6\nvit_large_patch32_384.orig_in21k_ft_in1k,384,38.840,61.160,68.613,31.387,306.63,1.000,bicubic,-57.000,-30.517,+242\nvolo_d1_224.sail_in1k,224,38.680,61.320,70.120,29.880,26.63,0.960,bicubic,-57.640,-29.200,+92\nmvitv2_small.fb_in1k,224,38.560,61.440,69.960,30.040,34.87,0.900,bicubic,-57.790,-29.150,+74\nmambaout_base_short_rw.sw_e500_in1k,224,38.533,61.467,70.133,29.867,88.83,0.950,bicubic,-58.067,-29.407,-8\nvit_base_patch16_rope_224.naver_in1k,224,38.480,61.520,70.493,29.507,86.43,0.900,bicubic,-57.900,-28.687,+60\ncoat_lite_medium.in1k,224,38.440,61.560,71.093,28.907,44.57,0.900,bicubic,-58.030,-28.207,+29\nregnetz_040.ra3_in1k,320,38.440,61.560,70.080,29.920,27.12,1.000,bicubic,-58.270,-29.400,-47\nefficientnetv2_rw_m.agc_in1k,320,38.427,61.573,70.133,29.867,53.24,1.000,bicubic,-58.163,-29.277,-11\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,38.280,61.720,70.253,29.747,44.54,1.000,bilinear,-57.820,-28.937,+153\nxcit_small_12_p8_224.fb_dist_in1k,224,38.160,61.840,71.333,28.667,26.21,1.000,bicubic,-58.590,-28.087,-66\ndavit_small.msft_in1k,224,38.107,61.893,70.613,29.387,49.75,0.950,bicubic,-58.533,-28.607,-29\nmambaout_small_rw.sw_e450_in1k,288,38.093,61.907,70.000,30.000,48.50,1.000,bicubic,-58.857,-29.340,-117\nconvnext_tiny.in12k_ft_in1k,224,38.080,61.920,71.840,28.160,28.59,0.950,bicubic,-58.660,-27.580,-66\nrdnet_base.nv_in1k,224,38.013,61.987,70.000,30.000,87.45,0.900,bicubic,-58.677,-29.240,-43\nconvformer_m36.sail_in1k,224,37.920,62.080,66.933,33.067,57.05,1.000,bicubic,-58.770,-32.137,-43\nresnet200d.ra2_in1k,320,37.893,62.107,68.333,31.667,64.69,1.000,bicubic,-58.837,-31.027,-65\ntf_efficientnet_b7.aa_in1k,600,37.787,62.213,69.427,30.573,66.35,0.949,bicubic,-58.753,-29.883,-1\nfocalnet_base_srf.ms_in1k,224,37.760,62.240,69.720,30.280,88.15,0.900,bicubic,-58.830,-29.590,-18\nmaxvit_small_tf_224.in1k,224,37.707,62.293,68.373,31.627,68.93,0.950,bicubic,-58.983,-31.207,-51\nswinv2_small_window8_256.ms_in1k,256,37.667,62.333,69.960,30.040,49.73,0.900,bicubic,-58.653,-29.240,+75\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,37.627,62.373,68.867,31.133,60.23,0.950,bicubic,-58.733,-30.373,+50\ntf_efficientnetv2_s.in21k_ft_in1k,300,37.613,62.387,69.667,30.333,21.46,1.000,bicubic,-58.717,-29.573,+66\nfastvit_ma36.apple_dist_in1k,256,37.533,62.467,70.973,29.027,44.07,0.950,bicubic,-59.267,-28.477,-90\nfocalnet_base_lrf.ms_in1k,224,37.520,62.480,68.560,31.440,88.75,0.900,bicubic,-58.960,-30.670,+4\nseresnet152d.ra2_in1k,320,37.507,62.493,69.053,30.947,66.84,1.000,bicubic,-59.293,-30.277,-93\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,37.480,62.520,69.827,30.173,39.57,0.965,bicubic,-59.550,-29.743,-154\ntwins_svt_large.in1k,224,37.347,62.653,69.200,30.800,99.27,0.900,bicubic,-58.893,-29.950,+88\nxcit_large_24_p16_224.fb_dist_in1k,224,37.307,62.693,71.413,28.587,189.10,1.000,bicubic,-59.513,-28.007,-98\nxcit_small_12_p8_224.fb_in1k,224,37.293,62.707,68.133,31.867,26.21,1.000,bicubic,-58.817,-31.037,+132\nefficientvit_b3.r288_in1k,288,37.267,62.733,69.600,30.400,48.65,1.000,bicubic,-59.383,-29.620,-49\nswin_s3_small_224.ms_in1k,224,37.053,62.947,68.147,31.853,49.74,0.900,bicubic,-59.187,-30.923,+86\neca_nfnet_l1.ra2_in1k,320,37.040,62.960,70.560,29.440,41.41,1.000,bicubic,-59.710,-28.730,-86\nmambaout_base.in1k,224,36.987,63.013,66.413,33.587,84.81,1.000,bicubic,-59.443,-32.767,+15\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,36.960,63.040,68.893,31.107,38.74,0.950,bicubic,-59.460,-30.347,+16\nregnetz_d32.ra3_in1k,320,36.813,63.187,70.067,29.933,27.58,0.950,bicubic,-59.787,-29.313,-40\nconvnext_small.fb_in1k,288,36.787,63.213,70.947,29.053,50.22,1.000,bicubic,-59.773,-28.393,-25\nseresnextaa101d_32x8d.ah_in1k,224,36.773,63.227,66.347,33.653,93.59,0.950,bicubic,-59.557,-32.853,+52\npvt_v2_b4.in1k,224,36.747,63.253,68.680,31.320,62.56,0.900,bicubic,-59.633,-30.730,+28\nconvnext_base.fb_in1k,224,36.693,63.307,69.947,30.053,88.59,0.875,bicubic,-59.747,-29.183,+4\npit_b_distilled_224.in1k,224,36.653,63.347,67.813,32.187,74.79,0.900,bicubic,-59.597,-31.407,+72\nconvnext_tiny.fb_in22k_ft_in1k,224,36.640,63.360,69.973,30.027,28.59,0.875,bicubic,-59.600,-29.207,+72\nregnety_064.ra3_in1k,288,36.640,63.360,67.573,32.427,30.58,1.000,bicubic,-59.720,-31.657,+28\nefficientnetv2_rw_s.ra2_in1k,384,36.573,63.427,68.147,31.853,23.94,1.000,bicubic,-59.967,-31.213,-28\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,36.560,63.440,68.040,31.960,60.40,0.950,bicubic,-59.530,-31.120,+126\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,36.520,63.480,69.147,30.853,88.30,1.000,bicubic,-59.980,-30.263,-21\nregnety_160.deit_in1k,288,36.480,63.520,68.813,31.187,83.59,1.000,bicubic,-59.900,-30.577,+18\nhiera_small_224.mae_in1k_ft_in1k,224,36.453,63.547,67.960,32.040,35.01,0.900,bicubic,-59.877,-31.210,+44\nfastvit_sa36.apple_dist_in1k,256,36.413,63.587,69.573,30.427,31.53,0.900,bicubic,-59.937,-29.767,+27\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,36.373,63.627,69.067,30.933,37.76,0.950,bicubic,-60.087,-30.113,-12\nresnext101_64x4d.c1_in1k,288,36.347,63.653,65.960,34.040,83.46,1.000,bicubic,-59.783,-33.250,+107\nhgnet_small.ssld_in1k,288,36.320,63.680,70.787,29.213,24.36,1.000,bicubic,-60.610,-28.773,-153\npvt_v2_b5.in1k,224,36.280,63.720,68.400,31.600,81.96,0.900,bicubic,-60.070,-30.800,+28\nrexnetr_300.sw_in12k_ft_in1k,224,36.267,63.733,69.867,30.133,34.81,0.950,bicubic,-60.373,-29.503,-70\nmambaout_base_wide_rw.sw_e500_in1k,224,36.240,63.760,68.240,31.760,94.45,0.950,bicubic,-60.400,-31.130,-70\ncait_xxs36_384.fb_dist_in1k,384,36.093,63.907,67.413,32.587,17.37,1.000,bicubic,-59.777,-31.777,+185\nnest_base_jx.goog_in1k,224,35.973,64.027,66.493,33.507,67.72,0.875,bicubic,-60.277,-32.817,+55\nefficientvit_l1.r224_in1k,224,35.947,64.053,68.000,32.000,52.65,1.000,bicubic,-60.383,-31.050,+35\nregnety_640.seer_ft_in1k,384,35.787,64.213,68.000,32.000,281.38,1.000,bicubic,-61.043,-31.370,-133\nswin_base_patch4_window7_224.ms_in1k,224,35.773,64.227,68.240,31.760,87.77,0.900,bicubic,-60.327,-31.040,+106\nmaxvit_tiny_rw_224.sw_in1k,224,35.733,64.267,65.693,34.307,29.06,0.950,bicubic,-60.507,-33.427,+56\nrepvgg_d2se.rvgg_in1k,320,35.720,64.280,66.680,33.320,133.33,1.000,bilinear,-60.950,-32.680,-86\ncoatnet_1_rw_224.sw_in1k,224,35.707,64.293,66.987,33.013,41.72,0.950,bicubic,-60.323,-32.063,+128\nmobilenetv4_hybrid_large.e600_r384_in1k,384,35.667,64.333,69.533,30.467,37.76,0.950,bicubic,-60.763,-29.857,-18\ntf_efficientnet_b3.ns_jft_in1k,300,35.667,64.333,67.760,32.240,12.23,0.904,bicubic,-60.713,-31.590,0\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,35.640,64.360,67.067,32.933,60.40,0.950,bicubic,-60.590,-32.183,+58\nedgenext_base.in21k_ft_in1k,256,35.627,64.373,69.467,30.533,18.51,0.950,bicubic,-60.733,-29.913,+2\ncs3se_edgenet_x.c2ns_in1k,320,35.547,64.453,67.640,32.360,50.72,1.000,bicubic,-60.893,-31.670,-26\nresnetrs200.tf_in1k,256,35.453,64.547,64.827,35.173,93.21,1.000,bicubic,-60.737,-34.403,+67\nhgnetv2_b4.ssld_stage2_ft_in1k,288,35.360,64.640,70.000,30.000,19.80,1.000,bicubic,-61.470,-29.400,-146\ntf_efficientnetv2_m.in1k,384,35.253,64.747,65.467,34.533,54.14,1.000,bicubic,-61.537,-33.863,-136\nresnetrs420.tf_in1k,320,35.187,64.813,63.453,36.547,191.89,1.000,bicubic,-61.433,-35.767,-83\nregnety_080.ra3_in1k,288,35.147,64.853,66.747,33.253,39.18,1.000,bicubic,-61.383,-32.503,-55\nsequencer2d_l.in1k,224,35.080,64.920,67.107,32.893,54.30,0.875,bicubic,-61.060,-32.053,+82\ntf_efficientnet_b6.aa_in1k,528,34.973,65.027,67.360,32.640,43.04,0.942,bicubic,-61.707,-32.010,-101\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,34.947,65.053,69.213,30.787,11.07,1.000,bicubic,-61.393,-30.047,+8\ngcvit_tiny.in1k,224,34.907,65.093,66.293,33.707,28.22,0.875,bicubic,-61.283,-32.897,+60\ninception_next_base.sail_in1k,224,34.853,65.147,66.227,33.773,86.67,0.950,bicubic,-61.697,-33.183,-63\ndm_nfnet_f2.dm_in1k,256,34.600,65.400,63.933,36.067,193.78,0.920,bicubic,-62.130,-35.407,-126\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,34.573,65.427,67.653,32.347,38.88,0.950,bicubic,-61.777,-31.817,-1\nfastvit_ma36.apple_in1k,256,34.533,65.467,67.120,32.880,44.07,0.950,bicubic,-61.957,-32.180,-55\nresnetrs270.tf_in1k,352,34.533,65.467,65.120,34.880,129.86,1.000,bicubic,-62.167,-34.270,-116\ntf_efficientnet_b5.ap_in1k,456,34.400,65.600,67.147,32.853,30.39,0.934,bicubic,-62.310,-32.303,-121\nxcit_tiny_12_p8_384.fb_dist_in1k,384,34.347,65.653,66.320,33.680,6.71,1.000,bicubic,-61.703,-32.810,+96\nmobilenetv4_conv_large.e600_r384_in1k,448,34.333,65.667,67.747,32.253,32.59,1.000,bicubic,-61.917,-31.473,+23\nmambaout_small.in1k,224,34.307,65.693,65.773,34.227,48.49,1.000,bicubic,-62.143,-33.587,-46\ndeit3_medium_patch16_224.fb_in1k,224,34.160,65.840,65.880,34.120,38.85,0.900,bicubic,-61.930,-32.990,+82\ncoat_small.in1k,224,34.147,65.853,66.013,33.987,21.69,0.900,bicubic,-61.773,-33.127,+135\nvit_base_patch16_224_miil.in21k_ft_in1k,224,34.120,65.880,64.547,35.453,86.54,0.875,bilinear,-62.350,-34.653,-56\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,34.027,65.973,67.293,32.707,38.88,0.950,bicubic,-62.363,-31.937,-30\nxcit_medium_24_p16_224.fb_dist_in1k,224,34.013,65.987,67.627,32.373,84.40,1.000,bicubic,-62.607,-31.673,-101\nseresnext101d_32x8d.ah_in1k,224,34.000,66.000,64.320,35.680,93.59,0.950,bicubic,-62.470,-35.040,-57\nresnet152d.ra2_in1k,320,33.840,66.160,65.507,34.493,60.21,1.000,bicubic,-62.540,-33.823,-31\ntresnet_m.miil_in1k_448,448,33.827,66.173,64.307,35.693,31.39,0.875,bilinear,-61.153,-34.523,+370\nresmlp_big_24_224.fb_distilled_in1k,224,33.787,66.213,69.360,30.640,129.14,0.875,bicubic,-62.683,-29.950,-64\nregnetv_064.ra3_in1k,288,33.787,66.213,67.667,32.333,30.58,1.000,bicubic,-62.663,-31.533,-56\ncaformer_s18.sail_in1k,224,33.667,66.333,65.187,34.813,26.34,1.000,bicubic,-62.503,-33.883,+50\ncoatnet_rmlp_1_rw_224.sw_in1k,224,33.653,66.347,65.280,34.720,41.69,0.950,bicubic,-62.297,-33.980,+120\npvt_v2_b3.in1k,224,33.587,66.413,67.427,32.573,45.24,0.900,bicubic,-62.413,-31.533,+97\nfocalnet_small_srf.ms_in1k,224,33.547,66.453,65.853,34.147,49.89,0.900,bicubic,-62.543,-33.337,+69\nxcit_tiny_24_p16_384.fb_dist_in1k,384,33.547,66.453,65.267,34.733,12.12,1.000,bicubic,-62.443,-33.833,+100\nrdnet_small.nv_in1k,224,33.493,66.507,66.933,33.067,50.44,0.900,bicubic,-62.837,-32.297,-11\nconvformer_s18.sail_in22k_ft_in1k,224,33.480,66.520,68.240,31.760,26.77,1.000,bicubic,-63.130,-31.110,-110\ninception_next_small.sail_in1k,224,33.427,66.573,65.893,34.107,49.37,0.875,bicubic,-62.823,-33.337,+5\nregnetz_d8.ra3_in1k,256,33.387,66.613,66.560,33.440,23.37,0.940,bicubic,-63.013,-32.420,-51\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,33.320,66.680,65.707,34.293,11.00,0.950,bicubic,-62.960,-33.483,-4\nresnetrs350.tf_in1k,288,33.280,66.720,62.040,37.960,163.96,1.000,bicubic,-63.390,-37.250,-128\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,33.240,66.760,67.840,32.160,11.07,1.000,bicubic,-62.930,-31.150,+35\nconvformer_s36.sail_in1k,224,33.200,66.800,63.560,36.440,40.01,1.000,bicubic,-63.380,-35.760,-103\ntwins_pcpvt_large.in1k,224,33.133,66.867,67.933,32.067,60.99,0.900,bicubic,-63.027,-31.257,+39\ntwins_svt_base.in1k,224,33.080,66.920,65.720,34.280,56.07,0.900,bicubic,-63.090,-33.480,+35\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,33.067,66.933,65.213,34.787,25.03,0.875,bilinear,-62.813,-33.907,+121\nresnet152.a1h_in1k,288,33.053,66.947,63.160,36.840,60.19,1.000,bicubic,-63.167,-36.120,+14\nnextvit_large.bd_in1k,224,33.027,66.973,67.000,33.000,57.87,0.950,bicubic,-63.343,-32.340,-46\nhrnet_w48_ssld.paddle_in1k,224,33.013,66.987,63.493,36.507,77.47,0.950,bilinear,-63.367,-35.887,-54\nmambaout_tiny.in1k,288,33.000,67.000,66.387,33.613,26.55,1.000,bicubic,-63.510,-32.743,-93\nfastvit_sa36.apple_in1k,256,33.000,67.000,65.960,34.040,31.53,0.900,bicubic,-63.240,-33.380,-1\nfocalnet_small_lrf.ms_in1k,224,32.947,67.053,67.160,32.840,50.34,0.900,bicubic,-63.243,-32.110,+20\ntiny_vit_21m_224.in1k,224,32.880,67.120,66.933,33.067,21.20,0.950,bicubic,-63.320,-32.197,+15\nregnetz_d8_evos.ch_in1k,256,32.853,67.147,65.373,34.627,23.46,0.950,bicubic,-63.417,-33.787,-14\npit_b_224.in1k,224,32.773,67.227,62.280,37.720,73.76,0.900,bicubic,-62.847,-36.770,+174\nswinv2_cr_small_ns_224.sw_in1k,224,32.747,67.253,65.720,34.280,49.70,0.900,bicubic,-63.473,-33.410,+7\nxcit_large_24_p16_224.fb_in1k,224,32.733,67.267,61.773,38.227,189.10,1.000,bicubic,-62.727,-36.857,+226\nregnety_320.seer_ft_in1k,384,32.720,67.280,65.947,34.053,145.05,1.000,bicubic,-63.620,-33.373,-39\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,32.480,67.520,64.773,35.227,18.45,1.000,bicubic,-63.550,-34.397,+64\neca_nfnet_l1.ra2_in1k,256,32.440,67.560,65.333,34.667,41.41,0.900,bicubic,-63.810,-33.897,-15\necaresnet101d.miil_in1k,288,32.400,67.600,65.813,34.187,44.57,0.950,bicubic,-63.830,-33.497,-8\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,32.400,67.600,63.693,36.307,194.03,0.875,bilinear,-63.450,-35.517,+116\nswin_small_patch4_window7_224.ms_in1k,224,32.373,67.627,65.240,34.760,49.61,0.900,bicubic,-63.507,-33.840,+108\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,32.333,67.667,66.293,33.707,15.62,1.000,bicubic,-64.017,-32.927,-56\nresnetaa101d.sw_in12k_ft_in1k,224,32.307,67.693,65.173,34.827,44.57,0.950,bicubic,-64.033,-34.227,-44\nnest_small_jx.goog_in1k,224,32.307,67.693,63.827,36.173,38.35,0.875,bicubic,-63.663,-35.333,+83\nefficientvit_b3.r256_in1k,256,32.293,67.707,65.720,34.280,48.65,1.000,bicubic,-64.057,-33.500,-51\nxception65.ra3_in1k,299,32.267,67.733,62.480,37.520,39.92,0.940,bicubic,-64.093,-36.700,-64\ntnt_b_patch16_224.in1k,224,32.200,67.800,65.240,34.760,65.43,0.900,bicubic,-63.610,-33.700,+117\ntf_efficientnetv2_b3.in21k_ft_in1k,300,32.107,67.893,65.747,34.253,14.36,0.900,bicubic,-64.123,-33.183,-13\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,32.080,67.920,64.200,35.800,14.25,1.000,bicubic,-64.110,-34.930,+2\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,32.040,67.960,63.547,36.453,236.34,0.875,bicubic,-64.050,-35.763,+30\nconvnextv2_tiny.fcmae_ft_in1k,288,31.973,68.027,67.120,32.880,28.64,1.000,bicubic,-64.227,-32.120,-6\nregnetz_040_h.ra3_in1k,256,31.960,68.040,63.120,36.880,28.94,1.000,bicubic,-64.350,-36.130,-44\nconvnext_tiny_hnf.a2h_in1k,288,31.960,68.040,62.480,37.520,28.59,1.000,bicubic,-64.050,-36.600,+54\nseresnext101_32x8d.ah_in1k,224,31.907,68.093,61.253,38.747,93.57,0.950,bicubic,-64.483,-37.997,-83\nrexnetr_200.sw_in12k_ft_in1k,288,31.880,68.120,67.547,32.453,16.52,1.000,bicubic,-64.350,-31.693,-22\nefficientvit_b3.r224_in1k,224,31.880,68.120,64.493,35.507,48.65,0.950,bicubic,-64.140,-34.427,+50\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,31.800,68.200,67.120,32.880,9.72,0.950,bicubic,-64.160,-32.050,+71\nswinv2_tiny_window16_256.ms_in1k,256,31.787,68.213,65.587,34.413,28.35,0.900,bicubic,-64.193,-33.513,+62\nconvnext_nano.r384_in12k_ft_in1k,384,31.707,68.293,67.760,32.240,15.59,1.000,bicubic,-64.283,-31.430,+52\nresnext101_64x4d.tv_in1k,224,31.640,68.360,63.747,36.253,83.46,0.875,bilinear,-64.390,-35.363,+40\nmaxvit_nano_rw_256.sw_in1k,256,31.613,68.387,63.693,36.307,15.45,0.950,bicubic,-64.317,-35.587,+73\nresnest101e.in1k,256,31.587,68.413,64.067,35.933,48.28,0.875,bilinear,-64.263,-35.103,+92\nvit_base_patch16_224.orig_in21k_ft_in1k,224,31.587,68.413,61.147,38.853,86.57,0.900,bicubic,-63.753,-37.753,+228\nregnetz_d32.ra3_in1k,256,31.520,68.480,64.973,35.027,27.58,0.950,bicubic,-64.680,-34.137,-20\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,31.480,68.520,66.840,33.160,19.80,1.000,bicubic,-65.240,-32.680,-199\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,31.427,68.573,65.840,34.160,11.07,0.950,bicubic,-64.633,-33.400,+23\nconvnext_small.fb_in1k,224,31.373,68.627,65.987,34.013,50.22,0.875,bicubic,-64.827,-33.303,-20\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,31.360,68.640,64.493,35.507,16.78,0.950,bicubic,-64.670,-34.647,+30\nfastvit_sa24.apple_dist_in1k,256,31.347,68.653,64.720,35.280,21.55,0.900,bicubic,-64.813,-34.430,-9\nconvnext_nano.in12k_ft_in1k,288,31.293,68.707,67.333,32.667,15.59,1.000,bicubic,-64.697,-31.977,+40\nnextvit_base.bd_in1k,224,31.253,68.747,65.173,34.827,44.82,0.950,bicubic,-64.997,-33.957,-49\nregnety_320.tv2_in1k,224,31.213,68.787,64.520,35.480,145.05,0.965,bicubic,-64.867,-34.720,+14\ncait_s24_224.fb_dist_in1k,224,31.187,68.813,64.253,35.747,46.92,1.000,bicubic,-65.213,-34.897,-106\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,31.160,68.840,65.307,34.693,32.59,0.950,bicubic,-64.990,-34.073,-9\ntf_efficientnet_b5.ra_in1k,456,31.120,68.880,64.520,35.480,30.39,0.934,bicubic,-65.270,-34.690,-106\nregnetz_c16_evos.ch_in1k,320,31.080,68.920,65.933,34.067,13.49,0.950,bicubic,-65.070,-33.227,-13\nregnetv_040.ra3_in1k,288,31.013,68.987,64.080,35.920,20.64,1.000,bicubic,-65.217,-35.220,-44\ncrossvit_base_240.in1k,240,31.013,68.987,60.960,39.040,105.03,0.875,bicubic,-64.517,-37.860,+153\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,30.987,69.013,63.120,36.880,15.50,0.950,bicubic,-65.013,-36.070,+30\nmaxvit_tiny_tf_224.in1k,224,30.920,69.080,63.027,36.973,30.92,0.950,bicubic,-65.220,-36.223,-13\nseresnet152d.ra2_in1k,256,30.907,69.093,63.173,36.827,66.84,0.950,bicubic,-65.523,-35.957,-123\nregnetz_040.ra3_in1k,256,30.893,69.107,62.653,37.347,27.12,1.000,bicubic,-65.577,-36.507,-140\nswinv2_cr_small_224.sw_in1k,224,30.773,69.227,61.853,38.147,49.70,0.900,bicubic,-65.317,-37.457,+1\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,30.720,69.280,62.320,37.680,88.22,0.900,bicubic,-65.410,-36.910,-13\nxcit_small_24_p16_224.fb_dist_in1k,224,30.600,69.400,64.533,35.467,47.67,1.000,bicubic,-65.620,-34.697,-45\nefficientnet_b4.ra2_in1k,384,30.587,69.413,64.347,35.653,19.34,1.000,bicubic,-65.583,-34.933,-30\nhiera_tiny_224.mae_in1k_ft_in1k,224,30.560,69.440,62.253,37.747,27.91,0.900,bicubic,-65.040,-36.627,+123\ncrossvit_18_240.in1k,240,30.413,69.587,61.533,38.467,43.27,0.875,bicubic,-65.077,-37.577,+162\nsequencer2d_m.in1k,224,30.360,69.640,62.933,37.067,38.31,0.875,bicubic,-65.460,-36.177,+70\nrepvit_m2_3.dist_450e_in1k,224,30.320,69.680,63.627,36.373,23.69,0.950,bicubic,-66.020,-35.423,-95\nmaxxvitv2_nano_rw_256.sw_in1k,256,30.227,69.773,63.773,36.227,23.70,0.950,bicubic,-65.663,-35.347,+53\ncrossvit_18_dagger_240.in1k,240,30.200,69.800,61.613,38.387,44.27,0.875,bicubic,-65.420,-37.067,+114\nregnety_040.ra3_in1k,288,30.173,69.827,63.387,36.613,20.65,1.000,bicubic,-65.827,-35.813,+14\nhgnet_small.ssld_in1k,224,30.133,69.867,64.467,35.533,24.36,0.965,bicubic,-66.497,-34.953,-195\nmambaout_small_rw.sw_e450_in1k,224,30.107,69.893,62.493,37.507,48.50,1.000,bicubic,-66.413,-36.887,-160\nrexnet_300.nav_in1k,224,30.053,69.947,64.187,35.813,34.71,0.875,bicubic,-65.787,-34.973,+61\nxcit_medium_24_p16_224.fb_in1k,224,30.027,69.973,59.187,40.813,84.40,1.000,bicubic,-65.483,-39.843,+140\ndm_nfnet_f1.dm_in1k,224,30.000,70.000,57.947,42.053,132.63,0.910,bicubic,-66.350,-41.443,-109\nmvitv2_tiny.fb_in1k,224,29.867,70.133,63.747,36.253,24.17,0.900,bicubic,-66.013,-35.273,+48\ntwins_pcpvt_base.in1k,224,29.840,70.160,64.587,35.413,43.83,0.900,bicubic,-65.940,-34.593,+71\nconvnext_tiny.fb_in1k,288,29.787,70.213,64.920,35.080,28.59,1.000,bicubic,-65.993,-34.210,+69\nresnet50.fb_swsl_ig1b_ft_in1k,224,29.760,70.240,63.840,36.160,25.56,0.875,bilinear,-65.710,-35.170,+154\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,29.733,70.267,65.653,34.347,15.62,1.000,bicubic,-66.337,-33.567,-16\ntf_efficientnet_b5.aa_in1k,456,29.707,70.293,62.733,37.267,30.39,0.934,bicubic,-66.773,-36.397,-164\ncait_xxs24_384.fb_dist_in1k,384,29.653,70.347,63.613,36.387,12.03,1.000,bicubic,-65.617,-35.397,+205\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,29.640,70.360,65.920,34.080,16.29,1.000,bicubic,-66.630,-33.400,-90\nresnet200d.ra2_in1k,256,29.640,70.360,61.387,38.613,64.69,0.950,bicubic,-66.670,-37.913,-97\ncs3sedarknet_x.c2ns_in1k,288,29.627,70.373,61.587,38.413,35.40,1.000,bicubic,-66.413,-37.553,-13\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,29.547,70.453,66.493,33.507,28.29,0.900,bicubic,-65.943,-32.457,+133\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,29.547,70.453,61.560,38.440,10.59,1.000,bicubic,-66.163,-37.590,+76\nresnet152.a1_in1k,288,29.493,70.507,57.027,42.973,60.19,1.000,bicubic,-65.997,-42.113,+141\ndeit_base_distilled_patch16_224.fb_in1k,224,29.480,70.520,64.093,35.907,87.34,0.900,bicubic,-66.620,-35.177,-35\nfasternet_l.in1k,224,29.480,70.520,62.187,37.813,93.47,1.000,bicubic,-66.820,-36.943,-102\nconvnextv2_tiny.fcmae_ft_in1k,224,29.467,70.533,63.893,36.107,28.64,0.875,bicubic,-66.213,-35.257,+79\nconvit_base.fb_in1k,224,29.387,70.613,61.493,38.507,86.54,0.875,bicubic,-66.163,-37.597,+105\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,29.360,70.640,62.160,37.840,86.43,0.900,bicubic,-66.400,-37.050,+58\nconvnext_tiny.fb_in22k_ft_in1k,288,29.307,70.693,56.560,43.440,28.59,1.000,bicubic,-65.563,-42.340,+289\nmobilenetv4_conv_large.e600_r384_in1k,384,29.147,70.853,63.680,36.320,32.59,0.950,bicubic,-66.663,-35.510,+45\ncs3se_edgenet_x.c2ns_in1k,256,29.080,70.920,60.933,39.067,50.72,0.950,bicubic,-66.890,-38.107,+6\ntf_efficientnetv2_s.in1k,384,29.013,70.987,61.160,38.840,21.46,1.000,bicubic,-67.337,-38.010,-128\nfastvit_sa24.apple_in1k,256,29.000,71.000,62.320,37.680,21.55,0.900,bicubic,-66.910,-36.840,+16\nregnety_160.deit_in1k,224,28.987,71.013,61.480,38.520,83.59,0.950,bicubic,-67.053,-37.630,-29\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,28.960,71.040,60.787,39.213,88.79,0.875,bilinear,-66.530,-38.303,+121\ndavit_tiny.msft_in1k,224,28.893,71.107,63.627,36.373,28.36,0.950,bicubic,-66.767,-35.593,+75\nhgnetv2_b4.ssld_stage2_ft_in1k,224,28.827,71.173,63.507,36.493,19.80,0.965,bicubic,-67.693,-35.693,-192\nresnet101d.ra2_in1k,320,28.813,71.187,61.920,38.080,44.57,1.000,bicubic,-67.477,-37.270,-114\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,28.800,71.200,62.400,37.600,22.52,0.950,bicubic,-66.940,-36.620,+51\nedgenext_base.usi_in1k,320,28.787,71.213,64.720,35.280,18.51,1.000,bicubic,-67.933,-34.640,-261\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,28.733,71.267,60.040,39.960,38.76,0.900,bicubic,-66.757,-38.740,+118\nresnetrs152.tf_in1k,320,28.720,71.280,59.973,40.027,86.62,1.000,bicubic,-67.850,-39.487,-209\nregnety_160.tv2_in1k,224,28.693,71.307,61.280,38.720,83.59,0.965,bicubic,-67.287,-38.000,-12\nxcit_tiny_24_p8_224.fb_in1k,224,28.653,71.347,60.160,39.840,12.11,1.000,bicubic,-67.027,-38.900,+62\nresnetv2_101.a1h_in1k,288,28.547,71.453,59.613,40.387,44.54,1.000,bicubic,-67.603,-39.477,-69\nxcit_tiny_24_p8_224.fb_dist_in1k,224,28.520,71.480,61.400,38.600,12.11,1.000,bicubic,-67.280,-37.750,+31\nvit_relpos_medium_patch16_224.sw_in1k,224,28.480,71.520,61.520,38.480,38.75,0.900,bicubic,-66.990,-37.440,+123\ncrossvit_15_dagger_240.in1k,240,28.453,71.547,60.093,39.907,28.21,0.875,bicubic,-67.217,-38.967,+61\nhgnetv2_b3.ssld_stage2_ft_in1k,288,28.400,71.600,65.013,34.987,16.29,1.000,bicubic,-67.950,-34.237,-153\nefficientvit_b2.r288_in1k,288,28.400,71.600,63.907,36.093,24.33,1.000,bicubic,-67.570,-35.303,-14\ncs3edgenet_x.c2_in1k,288,28.360,71.640,61.040,38.960,47.82,1.000,bicubic,-67.700,-38.260,-48\nxcit_small_24_p16_224.fb_in1k,224,28.320,71.680,59.000,41.000,47.67,1.000,bicubic,-67.220,-39.760,+87\nxception65p.ra3_in1k,299,28.307,71.693,59.480,40.520,39.82,0.940,bicubic,-67.903,-39.700,-98\npvt_v2_b2_li.in1k,224,28.253,71.747,61.520,38.480,22.55,0.900,bicubic,-67.337,-37.510,+71\nefficientnet_b4.ra2_in1k,320,28.240,71.760,61.920,38.080,19.34,0.875,bicubic,-67.500,-37.230,+33\nregnety_080.ra3_in1k,224,28.227,71.773,59.693,40.307,39.18,0.950,bicubic,-67.803,-39.567,-42\nregnety_064.ra3_in1k,224,28.213,71.787,59.507,40.493,30.58,0.950,bicubic,-67.657,-39.583,+6\nrepvit_m2_3.dist_300e_in1k,224,28.200,71.800,61.387,38.613,23.69,0.950,bicubic,-67.980,-37.953,-94\nresnet101.a1h_in1k,288,28.120,71.880,59.640,40.360,44.55,1.000,bicubic,-67.920,-39.570,-53\nese_vovnet57b.ra4_e3600_r256_in1k,320,28.080,71.920,59.893,40.107,38.61,1.000,bicubic,-68.190,-39.587,-131\ncs3sedarknet_x.c2ns_in1k,256,28.040,71.960,60.333,39.667,35.40,0.887,bicubic,-67.590,-38.647,+56\nefficientnetv2_rw_s.ra2_in1k,288,27.973,72.027,60.347,39.653,23.94,1.000,bicubic,-68.187,-38.863,-88\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,27.733,72.267,62.693,37.307,11.07,0.950,bicubic,-68.097,-36.397,+5\nrdnet_tiny.nv_in1k,224,27.733,72.267,62.027,37.973,23.86,0.900,bicubic,-67.907,-37.003,+51\nefficientformerv2_l.snap_dist_in1k,224,27.733,72.267,61.347,38.653,26.32,0.950,bicubic,-68.327,-37.783,-61\ncoat_lite_small.in1k,224,27.667,72.333,58.547,41.453,19.84,0.900,bicubic,-67.893,-40.333,+63\nefficientformer_l7.snap_dist_in1k,224,27.653,72.347,62.640,37.360,82.23,0.950,bicubic,-68.457,-36.630,-82\nflexivit_small.1200ep_in1k,240,27.560,72.440,58.280,41.720,22.06,0.950,bicubic,-68.000,-40.650,+59\nresnetaa50d.sw_in12k_ft_in1k,288,27.520,72.480,62.067,37.933,25.58,1.000,bicubic,-68.290,-37.203,+5\nregnetv_064.ra3_in1k,224,27.347,72.653,61.120,38.880,30.58,0.950,bicubic,-68.743,-38.030,-79\npvt_v2_b2.in1k,224,27.307,72.693,60.587,39.413,25.36,0.900,bicubic,-68.193,-38.533,+81\nvit_base_patch16_384.augreg_in1k,384,27.293,72.707,56.507,43.493,86.86,1.000,bicubic,-67.667,-42.563,+222\nresnext101_32x8d.tv2_in1k,224,27.240,72.760,59.347,40.653,88.79,0.965,bilinear,-68.750,-39.703,-46\ndeit_base_patch16_224.fb_in1k,224,27.173,72.827,58.640,41.360,86.57,0.900,bicubic,-68.267,-40.200,+102\nregnetz_c16.ra3_in1k,320,27.160,72.840,62.373,37.627,13.46,1.000,bicubic,-68.820,-36.917,-42\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,27.147,72.853,62.293,37.707,15.62,0.875,bicubic,-68.383,-36.827,+63\nregnety_080_tv.tv2_in1k,224,27.147,72.853,61.360,38.640,39.38,0.965,bicubic,-68.723,-37.790,-12\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,27.120,72.880,62.613,37.387,25.55,1.000,bilinear,-67.910,-36.447,+198\ncoatnet_bn_0_rw_224.sw_in1k,224,27.053,72.947,60.840,39.160,27.44,0.950,bicubic,-68.647,-38.140,+23\nxcit_small_12_p16_224.fb_dist_in1k,224,27.000,73.000,59.813,40.187,26.25,1.000,bicubic,-69.020,-39.327,-64\ncoatnet_0_rw_224.sw_in1k,224,26.973,73.027,59.107,40.893,27.44,0.950,bicubic,-68.487,-39.613,+91\nflexivit_small.600ep_in1k,240,26.840,73.160,56.960,43.040,22.06,0.950,bicubic,-68.860,-42.100,+19\nvit_relpos_base_patch16_224.sw_in1k,224,26.787,73.213,60.467,39.533,86.43,0.900,bicubic,-68.803,-38.523,+41\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,26.787,73.213,58.880,41.120,22.05,0.900,bicubic,-68.623,-40.140,+102\ndm_nfnet_f0.dm_in1k,256,26.773,73.227,58.067,41.933,71.49,0.900,bicubic,-69.577,-41.153,-186\ngcvit_xtiny.in1k,224,26.707,73.293,60.600,39.400,19.98,0.875,bicubic,-68.883,-38.430,+37\nswin_s3_tiny_224.ms_in1k,224,26.680,73.320,60.293,39.707,28.33,0.900,bicubic,-68.510,-38.657,+147\nnextvit_small.bd_in1k,224,26.640,73.360,61.227,38.773,31.76,0.950,bicubic,-69.240,-37.863,-28\nmobilenetv4_conv_large.e500_r256_in1k,320,26.587,73.413,61.080,38.920,32.59,1.000,bicubic,-69.303,-37.960,-36\nsequencer2d_s.in1k,224,26.373,73.627,60.253,39.747,27.65,0.875,bicubic,-69.617,-38.837,-62\ntresnet_v2_l.miil_in21k_ft_in1k,224,26.320,73.680,59.507,40.493,46.17,0.875,bilinear,-69.850,-39.643,-125\ncs3edgenet_x.c2_in1k,256,26.267,73.733,59.067,40.933,47.82,0.887,bicubic,-69.243,-39.693,+56\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,26.227,73.773,58.707,41.293,18.45,0.888,bicubic,-68.943,-40.213,+147\nhgnet_small.paddle_in1k,288,26.187,73.813,60.040,39.960,24.36,1.000,bicubic,-69.463,-39.070,+18\nswinv2_tiny_window8_256.ms_in1k,256,26.173,73.827,60.480,39.520,28.35,0.900,bicubic,-69.327,-38.510,+55\nregnetx_320.tv2_in1k,224,26.160,73.840,57.733,42.267,107.81,0.965,bicubic,-69.830,-41.557,-70\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,26.133,73.867,60.387,39.613,15.15,0.900,bicubic,-69.307,-38.453,+78\nnfnet_l0.ra2_in1k,288,26.080,73.920,61.547,38.453,35.07,1.000,bicubic,-70.050,-37.703,-117\nresnext101_64x4d.c1_in1k,224,26.067,73.933,56.227,43.773,83.46,0.950,bicubic,-69.643,-43.023,0\nregnety_032.ra_in1k,288,26.053,73.947,60.853,39.147,19.44,1.000,bicubic,-69.937,-38.377,-75\ndeit3_small_patch16_224.fb_in1k,224,26.013,73.987,54.320,45.680,22.06,0.900,bicubic,-68.977,-44.150,+185\nresnet152d.ra2_in1k,256,26.000,74.000,58.040,41.960,60.21,0.950,bicubic,-70.060,-41.170,-102\nfbnetv3_g.ra2_in1k,288,25.920,74.080,60.907,39.093,16.62,0.950,bilinear,-69.580,-38.073,+47\ntf_efficientnet_b4.aa_in1k,380,25.920,74.080,59.973,40.027,19.34,0.922,bicubic,-70.050,-39.197,-67\ncoatnext_nano_rw_224.sw_in1k,224,25.893,74.107,59.147,40.853,14.70,0.900,bicubic,-69.517,-39.853,+83\ntf_efficientnet_b4.ap_in1k,380,25.813,74.187,59.573,40.427,19.34,0.922,bicubic,-70.357,-39.617,-141\nflexivit_small.300ep_in1k,240,25.773,74.227,56.813,43.187,22.06,0.950,bicubic,-69.747,-42.147,+40\necaresnet101d.miil_in1k,224,25.760,74.240,58.680,41.320,44.57,0.875,bicubic,-69.780,-40.370,+27\ncoat_mini.in1k,224,25.720,74.280,57.680,42.320,10.34,0.900,bicubic,-69.270,-41.100,+176\ninception_next_tiny.sail_in1k,224,25.693,74.307,59.480,40.520,28.06,0.875,bicubic,-69.777,-39.850,+59\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,25.680,74.320,58.000,42.000,14.25,0.888,bicubic,-69.570,-40.940,+114\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,25.667,74.333,60.067,39.933,11.07,1.000,bicubic,-70.133,-39.143,-35\necaresnet50t.ra2_in1k,320,25.667,74.333,59.760,40.240,25.57,0.950,bicubic,-69.853,-39.320,+32\nfasternet_m.in1k,224,25.667,74.333,58.840,41.160,53.52,1.000,bicubic,-70.363,-40.320,-102\nvisformer_small.in1k,224,25.640,74.360,58.573,41.427,40.22,0.900,bicubic,-69.850,-40.327,+45\nmambaout_tiny.in1k,224,25.613,74.387,58.800,41.200,26.55,1.000,bicubic,-70.427,-40.310,-107\nrexnetr_200.sw_in12k_ft_in1k,224,25.507,74.493,60.373,39.627,16.52,0.950,bicubic,-70.293,-38.797,-39\nvit_small_patch16_384.augreg_in1k,384,25.413,74.587,57.147,42.853,22.20,1.000,bicubic,-69.877,-41.773,+97\ncrossvit_15_240.in1k,240,25.400,74.600,57.320,42.680,27.53,0.875,bicubic,-69.720,-41.670,+134\nconvformer_s18.sail_in1k,224,25.387,74.613,57.600,42.400,26.77,1.000,bicubic,-70.553,-41.300,-76\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,25.267,74.733,58.267,41.733,38.73,0.900,bicubic,-70.253,-40.743,+24\nvit_small_patch16_rope_mixed_224.naver_in1k,224,25.227,74.773,52.453,47.547,21.99,0.900,bicubic,-69.473,-46.127,+235\nresnetv2_50x1_bit.goog_distilled_in1k,224,25.133,74.867,59.387,40.613,25.55,0.875,bicubic,-70.967,-39.663,-136\nhalo2botnet50ts_256.a1h_in1k,256,25.133,74.867,56.307,43.693,22.64,0.950,bicubic,-70.287,-42.723,+62\nresnetrs270.tf_in1k,256,25.040,74.960,54.307,45.693,129.86,1.000,bicubic,-71.350,-45.023,-243\nedgenext_base.usi_in1k,256,25.027,74.973,60.680,39.320,18.51,0.950,bicubic,-71.373,-38.700,-249\nxcit_small_12_p16_224.fb_in1k,224,25.027,74.973,55.707,44.293,26.25,1.000,bicubic,-70.403,-43.273,+55\nvit_small_patch16_rope_224.naver_in1k,224,25.000,75.000,52.760,47.240,21.98,0.900,bicubic,-69.740,-46.060,+223\nvit_srelpos_medium_patch16_224.sw_in1k,224,24.987,75.013,58.147,41.853,38.74,0.900,bicubic,-70.253,-40.913,+98\nresnet101.a1_in1k,288,24.947,75.053,54.973,45.027,44.55,1.000,bicubic,-70.543,-43.827,+31\nhgnet_tiny.ssld_in1k,288,24.920,75.080,61.427,38.573,14.74,1.000,bicubic,-71.310,-37.993,-188\nconvit_small.fb_in1k,224,24.813,75.187,56.880,43.120,27.78,0.875,bicubic,-70.377,-42.050,+104\nresnet152.a2_in1k,288,24.800,75.200,53.893,46.107,60.19,1.000,bicubic,-70.690,-44.957,+30\nregnetz_c16_evos.ch_in1k,256,24.733,75.267,59.653,40.347,13.49,0.950,bicubic,-70.967,-39.427,-34\nvit_base_patch16_rpn_224.sw_in1k,224,24.720,75.280,57.987,42.013,86.54,0.900,bicubic,-70.670,-40.963,+56\ntnt_s_patch16_224.in1k,224,24.627,75.373,57.600,42.400,23.77,0.900,bicubic,-70.483,-41.200,+121\ngc_efficientnetv2_rw_t.agc_in1k,288,24.587,75.413,57.507,42.493,13.68,1.000,bicubic,-71.153,-41.613,-46\ntnt_s_legacy_patch16_224.in1k,224,24.573,75.427,57.933,42.067,23.76,0.900,bicubic,-70.487,-40.997,+133\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,24.573,75.427,51.787,48.213,22.06,0.900,bicubic,-70.007,-47.113,+261\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,24.547,75.453,60.387,39.613,19.80,0.965,bicubic,-71.923,-38.823,-285\nefficientvit_b2.r256_in1k,256,24.533,75.467,59.120,40.880,24.33,1.000,bicubic,-71.137,-39.700,-33\neca_nfnet_l0.ra2_in1k,288,24.507,75.493,59.600,40.400,24.14,1.000,bicubic,-71.463,-39.520,-108\nregnetv_040.ra3_in1k,224,24.373,75.627,57.587,42.413,20.64,0.950,bicubic,-71.387,-41.453,-59\nswinv2_cr_tiny_ns_224.sw_in1k,224,24.280,75.720,58.347,41.653,28.33,0.900,bicubic,-71.050,-40.553,+62\nswiftformer_l3.dist_in1k,224,24.280,75.720,57.600,42.400,28.49,0.950,bicubic,-71.670,-41.580,-103\ntf_efficientnetv2_b3.in21k_ft_in1k,240,24.240,75.760,55.840,44.160,14.36,0.900,bicubic,-71.100,-43.130,+58\nxception41p.ra3_in1k,299,24.240,75.760,54.560,45.440,26.91,0.940,bicubic,-71.320,-44.550,-19\nefficientnetv2_rw_t.ra2_in1k,288,24.227,75.773,57.160,42.840,13.65,1.000,bicubic,-71.383,-42.030,-27\ntf_efficientnet_b2.ns_jft_in1k,260,24.213,75.787,57.547,42.453,9.11,0.890,bicubic,-71.527,-41.573,-60\nxcit_tiny_12_p16_384.fb_dist_in1k,384,24.120,75.880,56.920,43.080,6.72,1.000,bicubic,-71.040,-42.080,+92\nconvnext_tiny.fb_in1k,224,24.107,75.893,59.107,40.893,28.59,0.875,bicubic,-71.433,-39.943,-13\nconvnext_nano.in12k_ft_in1k,224,24.067,75.933,60.373,39.627,15.59,0.950,bicubic,-71.643,-38.887,-56\ntwins_svt_small.in1k,224,24.013,75.987,57.320,42.680,24.06,0.900,bicubic,-71.197,-41.540,+79\ncs3darknet_x.c2ns_in1k,288,23.987,76.013,57.573,42.427,35.05,1.000,bicubic,-71.883,-41.527,-93\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,23.987,76.013,55.507,44.493,5.76,1.000,bicubic,-70.973,-43.363,+141\nconvnext_nano_ols.d1h_in1k,288,23.973,76.027,56.493,43.507,15.65,1.000,bicubic,-71.137,-42.407,+103\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,23.933,76.067,55.947,44.053,36.43,0.900,bicubic,-71.707,-43.003,-43\nvit_relpos_small_patch16_224.sw_in1k,224,23.907,76.093,57.733,42.267,21.98,0.900,bicubic,-71.283,-41.307,+77\necaresnet50d.miil_in1k,288,23.800,76.200,58.547,41.453,25.58,0.950,bicubic,-71.690,-40.223,-2\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,23.800,76.200,57.227,42.773,44.18,0.875,bilinear,-71.680,-41.673,+5\ncs3sedarknet_l.c2ns_in1k,288,23.747,76.253,58.280,41.720,21.91,0.950,bicubic,-71.573,-40.540,+48\ncoatnet_nano_rw_224.sw_in1k,224,23.720,76.280,56.693,43.307,15.14,0.900,bicubic,-71.520,-42.287,+67\npoolformer_m48.sail_in1k,224,23.680,76.320,56.853,43.147,73.47,0.950,bicubic,-71.960,-42.337,-47\nconvnext_nano.d1h_in1k,288,23.653,76.347,55.707,44.293,15.59,1.000,bicubic,-71.697,-43.223,+37\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,23.547,76.453,56.840,43.160,22.92,1.000,bicubic,-71.523,-42.110,+103\nresnet152.a1h_in1k,224,23.533,76.467,53.093,46.907,60.19,0.950,bicubic,-72.357,-46.017,-111\ntiny_vit_11m_224.in1k,224,23.507,76.493,58.640,41.360,11.00,0.950,bicubic,-72.043,-40.230,-36\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,23.507,76.493,55.187,44.813,10.59,0.888,bicubic,-71.643,-43.643,+77\ntf_efficientnet_b5.in1k,456,23.440,76.560,57.480,42.520,30.39,0.934,bicubic,-72.640,-41.820,-173\ncrossvit_small_240.in1k,240,23.440,76.560,56.400,43.600,26.86,0.875,bicubic,-71.410,-42.620,+155\nnest_tiny_jx.goog_in1k,224,23.293,76.707,56.120,43.880,17.06,0.875,bicubic,-71.957,-42.680,+52\nseresnext50_32x4d.racm_in1k,288,23.267,76.733,57.360,42.640,27.56,0.950,bicubic,-72.443,-41.670,-75\nhrnet_w18_ssld.paddle_in1k,288,23.240,76.760,55.067,44.933,21.30,1.000,bilinear,-72.740,-44.083,-144\nfocalnet_tiny_srf.ms_in1k,224,23.227,76.773,58.160,41.840,28.43,0.900,bicubic,-72.263,-40.830,-19\nefficientnet_b3.ra2_in1k,320,23.213,76.787,55.920,44.080,12.23,1.000,bicubic,-72.507,-43.140,-82\nregnety_040.ra3_in1k,224,23.187,76.813,55.333,44.667,20.65,0.950,bicubic,-72.383,-43.717,-50\nconvnext_tiny_hnf.a2h_in1k,224,23.187,76.813,54.960,45.040,28.59,0.950,bicubic,-72.333,-44.160,-31\nlevit_384.fb_dist_in1k,224,23.173,76.827,55.747,44.253,39.13,0.900,bicubic,-72.367,-43.433,-43\nlevit_conv_384.fb_dist_in1k,224,23.173,76.827,55.747,44.253,39.13,0.900,bicubic,-72.367,-43.393,-41\nlamhalobotnet50ts_256.a1h_in1k,256,23.147,76.853,54.853,45.147,22.57,0.950,bicubic,-72.013,-44.027,+64\nregnetz_c16.ra3_in1k,256,23.107,76.893,57.733,42.267,13.46,0.940,bicubic,-72.423,-41.227,-40\nnasnetalarge.tf_in1k,331,23.093,76.907,54.520,45.480,88.75,0.911,bicubic,-72.607,-44.710,-78\npnasnet5large.tf_in1k,331,23.093,76.907,53.293,46.707,86.06,0.911,bicubic,-72.627,-45.627,-89\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,23.067,76.933,55.347,44.653,88.22,0.900,bicubic,-72.483,-43.543,-51\nefficientformer_l3.snap_dist_in1k,224,23.040,76.960,56.787,43.213,31.41,0.950,bicubic,-72.570,-42.253,-65\nwide_resnet50_2.racm_in1k,288,23.013,76.987,55.547,44.453,68.88,0.950,bicubic,-72.647,-43.493,-76\npit_s_distilled_224.in1k,224,22.987,77.013,56.880,43.120,24.04,0.900,bicubic,-72.193,-42.000,+50\nfocalnet_tiny_lrf.ms_in1k,224,22.920,77.080,58.133,41.867,28.65,0.900,bicubic,-72.560,-40.967,-22\nvit_small_patch16_rope_ape_224.naver_in1k,224,22.853,77.147,50.613,49.387,22.06,0.900,bicubic,-71.787,-48.047,+193\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,22.827,77.173,58.760,41.240,16.29,0.965,bicubic,-73.153,-40.410,-160\nconvnextv2_nano.fcmae_ft_in1k,288,22.813,77.187,58.747,41.253,15.62,1.000,bicubic,-73.087,-40.363,-140\nregnetx_160.tv2_in1k,224,22.800,77.200,56.213,43.787,54.28,0.965,bicubic,-73.100,-42.937,-140\nresnet61q.ra2_in1k,288,22.773,77.227,55.480,44.520,36.85,1.000,bicubic,-73.027,-43.520,-112\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,22.760,77.240,55.907,44.093,5.39,0.950,bicubic,-72.280,-43.083,+79\nvit_base_patch32_clip_224.openai_ft_in1k,224,22.747,77.253,55.840,44.160,88.22,0.900,bicubic,-72.373,-43.100,+57\nresnetv2_50d_evos.ah_in1k,288,22.747,77.253,54.947,45.053,25.59,1.000,bicubic,-72.903,-44.203,-84\nhalonet50ts.a1h_in1k,256,22.747,77.253,54.360,45.640,22.73,0.940,bicubic,-72.393,-44.420,+52\ntwins_pcpvt_small.in1k,224,22.680,77.320,56.600,43.400,24.11,0.900,bicubic,-72.550,-42.280,+32\ntf_efficientnet_b4.in1k,380,22.680,77.320,56.267,43.733,19.34,0.922,bicubic,-73.140,-42.793,-127\nefficientnet_b3.ra2_in1k,288,22.667,77.333,55.080,44.920,12.23,0.875,bicubic,-72.523,-43.880,+33\nresmlp_big_24_224.fb_in1k,224,22.627,77.373,54.013,45.987,129.14,0.875,bicubic,-72.023,-44.817,+176\nhgnetv2_b2.ssld_stage2_ft_in1k,288,22.547,77.453,59.240,40.760,11.22,1.000,bicubic,-73.663,-40.180,-248\npoolformerv2_m48.sail_in1k,224,22.520,77.480,55.533,44.467,73.35,1.000,bicubic,-73.230,-43.447,-116\nrepvit_m1_5.dist_450e_in1k,224,22.387,77.613,55.547,44.453,14.64,0.950,bicubic,-73.533,-43.613,-156\nvit_srelpos_small_patch16_224.sw_in1k,224,22.360,77.640,55.293,44.707,21.97,0.900,bicubic,-72.710,-43.667,+65\necaresnet101d_pruned.miil_in1k,288,22.333,77.667,56.453,43.547,24.88,0.950,bicubic,-73.427,-42.727,-121\npoolformer_m36.sail_in1k,224,22.267,77.733,55.147,44.853,56.17,0.950,bicubic,-73.143,-43.693,-18\nhgnetv2_b3.ssld_stage2_ft_in1k,224,22.253,77.747,57.893,42.107,16.29,0.965,bicubic,-73.677,-41.107,-163\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,22.227,77.773,53.733,46.267,88.22,0.900,bicubic,-72.783,-45.237,+72\nxcit_tiny_12_p8_224.fb_dist_in1k,224,22.067,77.933,53.880,46.120,6.71,1.000,bicubic,-73.023,-44.950,+52\nresnetv2_50d_gn.ah_in1k,288,22.040,77.960,54.880,45.120,25.57,1.000,bicubic,-73.460,-44.110,-58\nwide_resnet101_2.tv2_in1k,224,22.013,77.987,54.627,45.373,126.89,0.965,bilinear,-73.537,-44.363,-82\necaresnet50t.a1_in1k,288,21.840,78.160,53.560,46.440,25.57,1.000,bicubic,-73.620,-45.460,-42\nresnext50_32x4d.a1h_in1k,288,21.813,78.187,54.200,45.800,25.03,1.000,bicubic,-73.627,-44.860,-37\ntf_efficientnetv2_s.in1k,300,21.773,78.227,53.293,46.707,21.46,1.000,bicubic,-74.377,-45.817,-237\nresnet101d.ra2_in1k,256,21.693,78.307,54.360,45.640,44.57,0.950,bicubic,-74.207,-44.730,-166\nres2net101d.in1k,224,21.613,78.387,51.147,48.853,45.23,0.875,bilinear,-73.227,-47.763,+112\nresnetrs152.tf_in1k,256,21.587,78.413,51.360,48.640,86.62,1.000,bicubic,-74.453,-47.540,-210\nefficientvit_b2.r224_in1k,224,21.573,78.427,55.253,44.747,24.33,0.950,bicubic,-73.737,-43.537,-6\nconvnextv2_nano.fcmae_ft_in1k,224,21.547,78.453,55.427,44.573,15.62,0.875,bicubic,-73.883,-43.453,-41\nese_vovnet57b.ra4_e3600_r256_in1k,256,21.547,78.453,53.627,46.373,38.61,0.950,bicubic,-74.253,-45.403,-141\nefficientformerv2_s2.snap_dist_in1k,224,21.533,78.467,53.880,46.120,12.71,0.950,bicubic,-73.847,-45.040,-30\ncs3sedarknet_l.c2ns_in1k,256,21.467,78.533,55.640,44.360,21.91,0.887,bicubic,-73.403,-43.170,+88\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,21.413,78.587,54.507,45.493,13.42,0.950,bicubic,-73.377,-44.413,+111\ntresnet_m.miil_in21k_ft_in1k,224,21.387,78.613,53.520,46.480,31.39,0.875,bilinear,-74.343,-45.510,-133\nfastvit_sa12.apple_dist_in1k,256,21.307,78.693,54.600,45.400,11.58,0.900,bicubic,-73.823,-44.210,+24\necaresnet50t.ra2_in1k,256,21.240,78.760,53.000,47.000,25.57,0.875,bicubic,-73.600,-45.620,+101\nmambaout_kobe.in1k,288,21.227,78.773,55.000,45.000,9.14,1.000,bicubic,-73.863,-43.830,+36\nresnet50_gn.a1h_in1k,288,21.160,78.840,54.200,45.800,25.56,0.950,bicubic,-74.120,-44.790,-12\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,21.160,78.840,51.787,48.213,7.52,0.950,bicubic,-73.530,-46.973,+129\nswin_tiny_patch4_window7_224.ms_in1k,224,21.120,78.880,55.640,44.360,28.29,0.900,bicubic,-74.050,-43.440,+10\nrepvit_m1_5.dist_300e_in1k,224,21.107,78.893,54.307,45.693,14.64,0.950,bicubic,-74.593,-44.603,-129\npit_s_224.in1k,224,21.080,78.920,53.547,46.453,23.46,0.900,bicubic,-73.580,-45.193,+139\nconvmixer_1536_20.in1k,224,20.973,79.027,55.307,44.693,51.63,0.960,bicubic,-74.107,-43.673,+32\ncs3darknet_x.c2ns_in1k,256,20.907,79.093,53.453,46.547,35.05,0.950,bicubic,-74.503,-45.677,-48\nresnet101.a2_in1k,288,20.880,79.120,51.853,48.147,44.55,1.000,bicubic,-74.550,-47.237,-56\nxcit_tiny_12_p8_224.fb_in1k,224,20.827,79.173,51.987,48.013,6.71,1.000,bicubic,-73.853,-46.593,+123\nnfnet_l0.ra2_in1k,224,20.760,79.240,55.507,44.493,35.07,0.900,bicubic,-74.670,-43.433,-60\ndeit_small_distilled_patch16_224.fb_in1k,224,20.733,79.267,54.853,45.147,22.44,0.900,bicubic,-73.977,-44.167,+115\npoolformerv2_m36.sail_in1k,224,20.667,79.333,52.987,47.013,56.08,1.000,bicubic,-74.763,-45.853,-59\nresnet61q.ra2_in1k,256,20.653,79.347,52.947,47.053,36.85,0.900,bicubic,-74.687,-46.063,-36\nresnet51q.ra2_in1k,288,20.600,79.400,55.173,44.827,35.70,1.000,bilinear,-75.280,-44.077,-184\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,20.560,79.440,53.173,46.827,15.25,0.950,bicubic,-74.080,-45.537,+135\nregnety_032.tv2_in1k,224,20.547,79.453,54.347,45.653,19.44,0.965,bicubic,-74.743,-44.583,-28\nresnet152.a1_in1k,224,20.493,79.507,48.853,51.147,60.19,0.950,bicubic,-74.647,-50.137,+4\necaresnetlight.miil_in1k,288,20.467,79.533,53.387,46.613,30.16,0.950,bicubic,-74.823,-45.633,-32\nresnet152.tv2_in1k,224,20.453,79.547,52.427,47.573,60.19,0.965,bilinear,-75.077,-46.533,-105\nsebotnet33ts_256.a1h_in1k,256,20.333,79.667,48.653,51.347,13.70,0.940,bicubic,-74.337,-50.077,+119\nresnetrs101.tf_in1k,288,20.320,79.680,52.600,47.400,63.62,0.940,bicubic,-75.100,-46.420,-65\nregnety_032.ra_in1k,224,20.173,79.827,52.933,47.067,19.44,0.950,bicubic,-75.227,-46.147,-60\nresnest50d_4s2x40d.in1k,224,20.093,79.907,52.907,47.093,30.42,0.875,bicubic,-74.867,-46.153,+41\nhgnet_small.paddle_in1k,224,20.040,79.960,52.627,47.373,24.36,0.965,bicubic,-75.070,-46.243,+2\nresnetaa50d.sw_in12k_ft_in1k,224,20.027,79.973,54.720,45.280,25.58,0.950,bicubic,-75.343,-44.140,-58\nresnetaa50.a1h_in1k,288,20.013,79.987,51.733,48.267,25.56,1.000,bicubic,-75.227,-47.177,-26\nhgnet_tiny.ssld_in1k,224,19.987,80.013,54.773,45.227,14.74,0.965,bicubic,-75.723,-44.417,-160\nfbnetv3_g.ra2_in1k,240,19.947,80.053,53.720,46.280,16.62,0.950,bilinear,-75.083,-45.150,+24\nmobilenetv4_conv_large.e500_r256_in1k,256,19.920,80.080,53.493,46.507,32.59,0.950,bicubic,-75.170,-45.287,+4\nxcit_nano_12_p8_384.fb_dist_in1k,384,19.760,80.240,50.560,49.440,3.05,1.000,bicubic,-73.700,-47.960,+355\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,19.693,80.307,55.360,44.640,11.22,1.000,bicubic,-76.117,-43.680,-187\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,19.693,80.307,53.253,46.747,25.03,0.875,bilinear,-75.157,-45.217,+66\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,19.680,80.320,53.120,46.880,11.07,0.950,bicubic,-75.670,-45.730,-59\nresnext50_32x4d.a1_in1k,288,19.680,80.320,50.147,49.853,25.03,1.000,bicubic,-75.160,-48.633,+68\nregnetz_b16.ra3_in1k,288,19.587,80.413,52.427,47.573,9.72,1.000,bicubic,-75.583,-46.433,-25\nhaloregnetz_b.ra3_in1k,224,19.587,80.413,49.613,50.387,11.68,0.940,bicubic,-75.153,-48.607,+85\nresnet51q.ra2_in1k,256,19.560,80.440,53.533,46.467,35.70,0.875,bilinear,-75.930,-45.687,-107\nmobilenetv4_conv_medium.e500_r256_in1k,320,19.453,80.547,53.067,46.933,9.72,1.000,bicubic,-75.547,-45.833,+18\nresnext101_32x8d.tv2_in1k,176,19.440,80.560,51.893,48.107,88.79,0.875,bilinear,-75.930,-47.157,-73\ntresnet_xl.miil_in1k,224,19.347,80.653,52.827,47.173,78.44,0.875,bilinear,-76.093,-46.163,-94\nresnetv2_101.a1h_in1k,224,19.320,80.680,48.853,51.147,44.54,0.950,bicubic,-76.360,-50.157,-162\nresnet101.a1h_in1k,224,19.280,80.720,49.787,50.213,44.55,0.950,bicubic,-76.090,-49.193,-74\nsenet154.gluon_in1k,224,19.133,80.867,47.280,52.720,115.09,0.875,bicubic,-75.797,-51.490,+29\nrexnet_200.nav_in1k,224,19.040,80.960,52.573,47.427,16.37,0.875,bicubic,-75.940,-46.407,+13\nlambda_resnet50ts.a1h_in1k,256,19.040,80.960,48.893,51.107,21.54,0.950,bicubic,-75.810,-49.987,+54\nresnet50d.a1_in1k,288,19.040,80.960,48.707,51.293,25.58,1.000,bicubic,-75.720,-49.983,+73\nseresnext101_64x4d.gluon_in1k,224,19.013,80.987,48.920,51.080,88.23,0.875,bicubic,-75.927,-49.880,+24\nlevit_256.fb_dist_in1k,224,19.000,81.000,49.707,50.293,18.89,0.900,bicubic,-76.030,-49.093,+1\nlevit_conv_256.fb_dist_in1k,224,19.000,81.000,49.707,50.293,18.89,0.900,bicubic,-76.030,-49.163,+3\nlegacy_senet154.in1k,224,18.933,81.067,47.627,52.373,115.09,0.875,bilinear,-76.157,-51.303,-15\ntf_efficientnet_b1.ns_jft_in1k,240,18.907,81.093,51.680,48.320,7.79,0.882,bicubic,-76.233,-46.730,-33\nrepvgg_b3.rvgg_in1k,224,18.867,81.133,49.827,50.173,123.09,0.875,bilinear,-75.683,-48.953,+126\ngcvit_xxtiny.in1k,224,18.827,81.173,53.107,46.893,12.00,0.875,bicubic,-75.553,-45.453,+160\neca_nfnet_l0.ra2_in1k,224,18.747,81.253,53.333,46.667,24.14,0.900,bicubic,-76.523,-45.767,-62\ndeit_small_patch16_224.fb_in1k,224,18.720,81.280,51.093,48.907,22.05,0.900,bicubic,-75.710,-47.607,+151\nmobilevitv2_200.cvnets_in1k,256,18.613,81.387,50.000,50.000,18.45,0.888,bicubic,-76.247,-48.810,+39\nmixer_b16_224.miil_in21k_ft_in1k,224,18.547,81.453,51.213,48.787,59.88,0.875,bilinear,-76.813,-47.657,-86\nedgenext_small.usi_in1k,320,18.520,81.480,53.573,46.427,5.59,1.000,bicubic,-76.900,-45.527,-103\necaresnet50t.a2_in1k,288,18.427,81.573,48.760,51.240,25.57,1.000,bicubic,-76.923,-50.050,-87\nregnetx_080.tv2_in1k,224,18.413,81.587,50.133,49.867,39.57,0.965,bicubic,-76.697,-48.577,-30\npoolformer_s36.sail_in1k,224,18.293,81.707,52.027,47.973,30.86,0.900,bicubic,-76.817,-47.003,-34\nrepvit_m3.dist_in1k,224,18.293,81.707,51.680,48.320,10.68,0.950,bicubic,-76.947,-47.190,-63\nseresnet50.ra2_in1k,288,18.280,81.720,51.027,48.973,28.09,0.950,bicubic,-77.050,-47.913,-85\nseresnext50_32x4d.racm_in1k,224,18.240,81.760,50.893,49.107,27.56,0.875,bicubic,-76.790,-47.997,-15\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,18.133,81.867,54.427,45.573,14.62,1.000,bilinear,-77.177,-44.573,-82\nwide_resnet50_2.tv2_in1k,224,18.133,81.867,52.267,47.733,68.88,0.965,bilinear,-76.737,-46.533,+17\nese_vovnet39b.ra_in1k,288,18.107,81.893,49.613,50.387,24.57,0.950,bicubic,-76.783,-49.197,+11\ncait_xxs36_224.fb_dist_in1k,224,18.080,81.920,49.267,50.733,17.30,1.000,bicubic,-76.180,-49.203,+168\nsehalonet33ts.ra2_in1k,256,18.040,81.960,47.613,52.387,13.69,0.940,bicubic,-76.750,-51.337,+40\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,18.000,82.000,51.613,48.387,11.07,1.000,bicubic,-77.480,-47.327,-132\necaresnet50d.miil_in1k,224,18.000,82.000,51.507,48.493,25.58,0.875,bicubic,-76.630,-47.193,+85\ntf_efficientnet_lite4.in1k,380,17.960,82.040,50.293,49.707,13.01,0.920,bilinear,-76.930,-48.567,+4\ngcresnet50t.ra2_in1k,288,17.947,82.053,49.373,50.627,25.90,1.000,bicubic,-77.333,-49.527,-82\ncs3darknet_l.c2ns_in1k,288,17.880,82.120,51.453,48.547,21.16,0.950,bicubic,-77.260,-47.637,-55\nresnet101.a1_in1k,224,17.853,82.147,45.573,54.427,44.55,0.950,bicubic,-76.677,-53.147,+110\nresnest50d_1s4x24d.in1k,224,17.640,82.360,49.627,50.373,25.68,0.875,bicubic,-77.120,-48.843,+36\nmobilevitv2_175.cvnets_in1k,256,17.587,82.413,49.280,50.720,14.25,0.888,bicubic,-77.283,-49.670,+7\nresnet50d.ra2_in1k,288,17.547,82.453,49.907,50.093,25.58,0.950,bicubic,-77.463,-49.113,-25\nresnetv2_50.a1h_in1k,288,17.547,82.453,49.853,50.147,25.55,1.000,bicubic,-77.313,-48.937,+14\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,17.547,82.453,49.747,50.253,5.79,1.000,bicubic,-76.083,-48.493,+278\ntiny_vit_5m_224.in1k,224,17.533,82.467,49.973,50.027,5.39,0.950,bicubic,-76.747,-48.777,+153\nhgnet_tiny.paddle_in1k,288,17.480,82.520,50.827,49.173,14.74,1.000,bicubic,-77.480,-48.083,-17\nseresnext101_32x4d.gluon_in1k,224,17.400,82.600,46.493,53.507,48.96,0.875,bicubic,-77.520,-52.387,-8\nresnest50d.in1k,224,17.347,82.653,50.547,49.453,27.48,0.875,bilinear,-77.483,-48.183,+19\nresnet152.a2_in1k,224,17.347,82.653,45.600,54.400,60.19,0.950,bicubic,-77.603,-53.360,-15\ninception_v4.tf_in1k,299,17.320,82.680,45.373,54.627,42.68,0.875,bicubic,-77.060,-53.517,+128\nresnet50.c2_in1k,288,17.307,82.693,49.333,50.667,25.56,1.000,bicubic,-77.583,-49.687,-7\nhgnetv2_b2.ssld_stage2_ft_in1k,224,17.293,82.707,51.787,48.213,11.22,0.965,bicubic,-78.247,-47.213,-182\ndm_nfnet_f0.dm_in1k,192,17.200,82.800,45.027,54.973,71.49,0.900,bicubic,-78.180,-53.923,-124\nconvnext_pico.d1_in1k,288,17.160,82.840,49.720,50.280,9.05,0.950,bicubic,-77.590,-48.980,+28\ntf_efficientnet_b3.ap_in1k,300,17.133,82.867,49.573,50.427,12.23,0.904,bicubic,-78.227,-49.327,-121\nxcit_tiny_24_p16_224.fb_dist_in1k,224,17.093,82.907,47.427,52.573,12.12,1.000,bicubic,-77.437,-51.203,+90\nefficientnet_el.ra_in1k,300,17.067,82.933,49.800,50.200,10.59,0.904,bicubic,-78.053,-49.190,-70\nresnetv2_34d.ra4_e3600_r384_in1k,448,17.027,82.973,47.573,52.427,21.82,1.000,bicubic,-77.463,-51.207,+102\nregnetx_032.tv2_in1k,224,17.000,83.000,48.120,51.880,15.30,0.965,bicubic,-77.660,-50.720,+45\nfastvit_s12.apple_dist_in1k,256,16.987,83.013,49.333,50.667,9.47,0.900,bicubic,-77.883,-49.527,-8\ngcresnext50ts.ch_in1k,288,16.973,83.027,47.920,52.080,15.67,1.000,bicubic,-77.897,-50.630,-12\nxception71.tf_in1k,299,16.893,83.107,45.613,54.387,42.34,0.903,bicubic,-77.437,-53.037,+123\ncs3darknet_focus_l.c2ns_in1k,288,16.853,83.147,50.053,49.947,21.15,0.950,bicubic,-78.307,-48.907,-84\nmambaout_femto.in1k,288,16.827,83.173,49.040,50.960,7.30,1.000,bicubic,-77.793,-49.520,+60\nhrnet_w18_ssld.paddle_in1k,224,16.827,83.173,46.867,53.133,21.30,0.950,bilinear,-78.533,-52.203,-132\nregnety_016.tv2_in1k,224,16.773,83.227,49.840,50.160,11.20,0.965,bicubic,-77.807,-49.040,+67\ntf_efficientnet_b3.aa_in1k,300,16.760,83.240,49.240,50.760,12.23,0.904,bicubic,-78.250,-49.670,-48\ngc_efficientnetv2_rw_t.agc_in1k,224,16.747,83.253,46.667,53.333,13.68,1.000,bicubic,-78.293,-52.173,-57\nconvnextv2_pico.fcmae_ft_in1k,288,16.720,83.280,49.987,50.013,9.07,0.950,bicubic,-78.530,-48.983,-110\nefficientnetv2_rw_t.ra2_in1k,224,16.720,83.280,47.093,52.907,13.65,1.000,bicubic,-78.260,-51.897,-45\nresnext101_64x4d.gluon_in1k,224,16.720,83.280,44.027,55.973,83.46,0.875,bicubic,-77.950,-54.473,+33\nresmlp_36_224.fb_distilled_in1k,224,16.707,83.293,51.093,48.907,44.69,0.875,bicubic,-78.173,-47.777,-27\npoolformerv2_s36.sail_in1k,224,16.680,83.320,49.253,50.747,30.79,1.000,bicubic,-78.650,-49.757,-129\nresnet50d.ra4_e3600_r224_in1k,288,16.587,83.413,49.173,50.827,25.58,1.000,bicubic,-78.733,-49.807,-128\ntf_efficientnetv2_b3.in1k,300,16.587,83.413,48.480,51.520,14.36,0.904,bicubic,-78.593,-50.340,-101\necaresnet50t.a1_in1k,224,16.520,83.480,47.160,52.840,25.57,0.950,bicubic,-78.160,-51.600,+23\ninception_resnet_v2.tf_in1k,299,16.467,83.533,44.680,55.320,55.84,0.897,bicubic,-78.103,-54.120,+59\ntresnet_l.miil_in1k,224,16.440,83.560,49.573,50.427,55.99,0.875,bilinear,-78.830,-49.387,-121\nresnet152s.gluon_in1k,224,16.440,83.560,44.347,55.653,60.32,0.875,bicubic,-78.620,-54.493,-72\nconvnext_pico_ols.d1_in1k,288,16.347,83.653,49.267,50.733,9.06,1.000,bicubic,-78.303,-49.243,+32\nresnet101.tv2_in1k,224,16.333,83.667,48.333,51.667,44.55,0.965,bilinear,-78.957,-50.707,-128\ngcresnet50t.ra2_in1k,256,16.320,83.680,48.320,51.680,25.90,0.900,bicubic,-78.540,-50.620,-25\nresnet152d.gluon_in1k,224,16.320,83.680,44.080,55.920,60.21,0.875,bicubic,-78.500,-54.680,-13\nfastvit_sa12.apple_in1k,256,16.307,83.693,49.387,50.613,11.58,0.900,bicubic,-78.583,-49.523,-41\nconvnext_nano.d1h_in1k,224,16.293,83.707,47.440,52.560,15.59,0.950,bicubic,-78.637,-51.230,-48\nmobilevitv2_150.cvnets_in1k,256,16.253,83.747,47.960,52.040,10.59,0.888,bicubic,-78.277,-50.840,+60\ngernet_l.idstcv_in1k,256,16.253,83.747,46.773,53.227,31.08,0.875,bilinear,-78.847,-52.127,-90\nresmlp_24_224.fb_distilled_in1k,224,16.227,83.773,50.093,49.907,30.02,0.875,bicubic,-78.223,-48.607,+78\nseresnet50.a1_in1k,288,16.213,83.787,46.840,53.160,28.09,1.000,bicubic,-78.457,-51.960,+12\nconvnext_nano_ols.d1h_in1k,224,16.200,83.800,47.893,52.107,15.65,0.950,bicubic,-78.540,-50.727,-4\nwide_resnet50_2.racm_in1k,224,16.133,83.867,48.093,51.907,68.88,0.875,bicubic,-78.937,-50.887,-86\ninception_resnet_v2.tf_ens_adv_in1k,299,16.120,83.880,43.333,56.667,55.84,0.897,bicubic,-78.100,-55.267,+116\nresnetv2_50d_evos.ah_in1k,224,16.107,83.893,46.467,53.533,25.59,0.950,bicubic,-78.863,-52.283,-66\nrepvgg_b3g4.rvgg_in1k,224,16.067,83.933,47.480,52.520,83.83,0.875,bilinear,-78.453,-51.340,+56\ngcresnext50ts.ch_in1k,256,16.067,83.933,46.120,53.880,15.67,0.900,bicubic,-78.473,-52.590,+49\nresnetv2_50d_gn.ah_in1k,224,16.053,83.947,46.133,53.867,25.57,0.950,bicubic,-78.707,-52.667,-17\nxcit_tiny_24_p16_224.fb_in1k,224,16.053,83.947,45.267,54.733,12.12,1.000,bicubic,-78.037,-53.243,+144\ngmlp_s16_224.ra3_in1k,224,16.053,83.947,44.707,55.293,19.42,0.875,bicubic,-78.097,-53.923,+134\nrepvit_m1_1.dist_450e_in1k,224,16.040,83.960,49.120,50.880,8.80,0.950,bicubic,-78.850,-49.800,-58\necaresnet50d_pruned.miil_in1k,288,15.920,84.080,49.627,50.373,19.94,0.950,bicubic,-79.190,-49.303,-109\nefficientnet_b1.ra4_e3600_r240_in1k,288,15.907,84.093,48.667,51.333,7.79,1.000,bicubic,-79.413,-50.473,-154\necaresnet101d_pruned.miil_in1k,224,15.907,84.093,48.573,51.427,24.88,0.875,bicubic,-79.173,-50.457,-99\nswiftformer_l1.dist_in1k,224,15.893,84.107,48.280,51.720,12.06,0.950,bicubic,-78.907,-50.650,-34\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,15.893,84.107,47.920,52.080,11.07,0.950,bicubic,-78.977,-50.930,-52\nxception65.tf_in1k,299,15.893,84.107,43.507,56.493,39.92,0.903,bicubic,-77.877,-54.743,+199\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,15.840,84.160,49.333,50.667,25.56,0.875,bilinear,-78.650,-49.207,+53\nresnet50.a1_in1k,288,15.840,84.160,45.560,54.440,25.56,1.000,bicubic,-78.930,-53.200,-32\nedgenext_small_rw.sw_in1k,320,15.800,84.200,49.520,50.480,7.83,1.000,bicubic,-78.880,-49.300,-11\nresnext50_32x4d.ra_in1k,288,15.760,84.240,46.947,53.053,25.03,0.950,bicubic,-78.930,-51.853,-14\nresnet50.c1_in1k,288,15.653,84.347,47.107,52.893,25.56,1.000,bicubic,-79.137,-51.463,-38\nfastvit_t12.apple_dist_in1k,256,15.613,84.387,47.653,52.347,7.55,0.900,bicubic,-79.007,-51.197,+14\necaresnet26t.ra2_in1k,320,15.507,84.493,47.867,52.133,16.01,0.950,bicubic,-78.803,-50.993,+77\nvit_base_patch32_384.augreg_in1k,384,15.493,84.507,43.733,56.267,88.30,1.000,bicubic,-78.147,-54.737,+208\ncoat_tiny.in1k,224,15.467,84.533,45.520,54.480,5.50,0.900,bicubic,-78.113,-53.160,+217\nregnety_320.pycls_in1k,224,15.453,84.547,44.680,55.320,145.05,0.875,bicubic,-79.127,-53.680,+19\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,15.440,84.560,47.960,52.040,9.72,1.000,bicubic,-79.430,-50.720,-66\nconvmixer_768_32.in1k,224,15.387,84.613,47.613,52.387,21.11,0.960,bicubic,-79.113,-51.207,+38\nresnet50.d_in1k,288,15.347,84.653,44.493,55.507,25.56,1.000,bicubic,-79.633,-54.257,-93\nedgenext_small.usi_in1k,256,15.320,84.680,48.360,51.640,5.59,0.950,bicubic,-79.630,-50.440,-87\nresnet50d.a2_in1k,288,15.320,84.680,44.547,55.453,25.58,1.000,bicubic,-79.270,-54.123,+8\necaresnetlight.miil_in1k,224,15.227,84.773,45.600,54.400,30.16,0.875,bicubic,-79.533,-53.380,-42\nresnext50d_32x4d.bt_in1k,288,15.213,84.787,45.933,54.067,25.05,0.950,bicubic,-79.337,-52.757,+21\ncs3darknet_l.c2ns_in1k,256,15.187,84.813,48.853,51.147,21.16,0.887,bicubic,-79.363,-50.057,+18\nskresnext50_32x4d.ra_in1k,224,15.173,84.827,44.520,55.480,27.48,0.875,bicubic,-79.087,-54.190,+78\nrepvit_m1_0.dist_450e_in1k,224,15.027,84.973,46.653,53.347,7.30,0.950,bicubic,-79.533,-51.987,+13\nfastvit_s12.apple_in1k,256,15.027,84.973,45.027,54.973,9.47,0.900,bicubic,-79.193,-53.563,+82\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,15.027,84.973,42.333,57.667,119.42,0.900,bicubic,-78.723,-55.767,+180\nseresnet33ts.ra2_in1k,288,15.013,84.987,47.067,52.933,19.78,1.000,bicubic,-80.027,-51.833,-118\nresnext50_32x4d.a2_in1k,288,15.013,84.987,45.093,54.907,25.03,1.000,bicubic,-79.547,-53.797,+11\nefficientvit_b1.r288_in1k,288,15.000,85.000,46.667,53.333,9.10,1.000,bicubic,-79.490,-52.253,+31\neca_resnet33ts.ra2_in1k,288,14.987,85.013,48.627,51.373,19.68,1.000,bicubic,-79.663,-50.143,-22\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,14.947,85.053,48.347,51.653,11.22,0.965,bicubic,-80.273,-50.733,-165\nvit_base_patch16_224.augreg_in1k,224,14.907,85.093,41.893,58.107,86.57,0.900,bicubic,-78.723,-56.707,+190\ncait_xxs24_224.fb_dist_in1k,224,14.880,85.120,44.627,55.373,11.96,1.000,bicubic,-78.710,-54.073,+195\nfasternet_s.in1k,224,14.867,85.133,47.427,52.573,31.18,1.000,bicubic,-80.223,-51.523,-134\nregnetz_b16.ra3_in1k,224,14.827,85.173,45.800,54.200,9.72,0.940,bicubic,-79.733,-53.080,+3\nmambaout_kobe.in1k,224,14.800,85.200,45.867,54.133,9.14,1.000,bicubic,-79.420,-52.733,+69\nlevit_conv_192.fb_dist_in1k,224,14.680,85.320,44.653,55.347,10.95,0.900,bicubic,-79.500,-53.887,+82\nlevit_192.fb_dist_in1k,224,14.667,85.333,44.640,55.360,10.95,0.900,bicubic,-79.513,-53.900,+80\nseresnet50.a2_in1k,288,14.653,85.347,44.160,55.840,28.09,1.000,bicubic,-80.027,-54.670,-42\npoolformerv2_s24.sail_in1k,224,14.640,85.360,45.973,54.027,21.34,1.000,bicubic,-80.010,-52.917,-30\nconvnextv2_pico.fcmae_ft_in1k,224,14.627,85.373,45.853,54.147,9.07,0.875,bicubic,-79.833,-52.767,+23\nres2net50d.in1k,224,14.613,85.387,44.507,55.493,25.72,0.875,bilinear,-79.697,-54.213,+47\ngcresnet33ts.ra2_in1k,288,14.573,85.427,46.013,53.987,19.88,1.000,bicubic,-80.347,-52.767,-106\nrepvit_m1_1.dist_300e_in1k,224,14.547,85.453,46.800,53.200,8.80,0.950,bicubic,-80.213,-52.060,-68\ncs3darknet_focus_l.c2ns_in1k,256,14.480,85.520,46.507,53.493,21.15,0.887,bicubic,-80.180,-52.273,-41\ndarknet53.c2ns_in1k,288,14.440,85.560,46.827,53.173,41.61,1.000,bicubic,-80.180,-52.073,-24\ncoat_lite_mini.in1k,224,14.400,85.600,44.533,55.467,11.01,0.900,bicubic,-79.650,-54.117,+101\nresnet50.tv2_in1k,224,14.387,85.613,46.733,53.267,25.56,0.965,bilinear,-80.273,-51.987,-43\nefficientnet_el_pruned.in1k,300,14.387,85.613,45.840,54.160,10.59,0.904,bicubic,-80.003,-52.910,+26\nrexnet_150.nav_in1k,224,14.373,85.627,46.400,53.600,9.73,0.875,bicubic,-80.107,-52.390,+11\nefficientnet_b2.ra_in1k,288,14.320,85.680,45.827,54.173,9.11,1.000,bicubic,-80.320,-52.763,-38\nresnext50_32x4d.a1h_in1k,224,14.320,85.680,44.000,56.000,25.03,0.950,bicubic,-80.220,-54.600,-7\nseresnet33ts.ra2_in1k,256,14.280,85.720,45.840,54.160,19.78,0.900,bicubic,-80.580,-52.880,-97\ndarknetaa53.c2ns_in1k,288,14.120,85.880,44.880,55.120,36.02,1.000,bilinear,-80.400,-53.880,-3\npoolformer_s24.sail_in1k,224,14.107,85.893,47.040,52.960,21.39,0.900,bicubic,-80.473,-51.800,-25\nseresnet50.ra2_in1k,224,14.080,85.920,45.293,54.707,28.09,0.875,bicubic,-80.490,-53.467,-20\nrepvit_m2.dist_in1k,224,14.053,85.947,46.320,53.680,8.80,0.950,bicubic,-80.687,-52.350,-73\npvt_v2_b1.in1k,224,14.027,85.973,47.360,52.640,14.01,0.900,bicubic,-79.803,-51.050,+129\nmobilevitv2_125.cvnets_in1k,256,14.000,86.000,44.280,55.720,7.48,0.888,bicubic,-79.990,-54.350,+102\neca_resnet33ts.ra2_in1k,256,13.987,86.013,47.027,52.973,19.68,0.900,bicubic,-80.213,-51.633,+51\ngernet_m.idstcv_in1k,224,13.987,86.013,45.867,54.133,21.14,0.875,bilinear,-80.663,-52.993,-52\nfbnetv3_d.ra2_in1k,256,13.947,86.053,45.987,54.013,10.31,0.950,bilinear,-79.973,-52.753,+109\nresnet101.a2_in1k,224,13.907,86.093,42.960,57.040,44.55,0.950,bicubic,-80.793,-55.680,-72\nlegacy_seresnext101_32x4d.in1k,224,13.853,86.147,42.600,57.400,48.96,0.875,bilinear,-80.517,-55.880,+15\nresnext101_32x4d.gluon_in1k,224,13.680,86.320,41.347,58.653,44.18,0.875,bicubic,-80.850,-57.173,-18\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,13.653,86.347,47.400,52.600,14.62,0.950,bilinear,-81.207,-51.470,-112\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,13.653,86.347,45.400,54.600,8.46,0.950,bicubic,-81.297,-53.170,-135\ngcresnet33ts.ra2_in1k,256,13.640,86.360,44.813,55.187,19.88,0.900,bicubic,-80.860,-54.037,-12\nedgenext_small_rw.sw_in1k,256,13.600,86.400,45.907,54.093,7.83,0.900,bicubic,-80.330,-52.693,+97\nmobilenetv4_conv_medium.e500_r256_in1k,256,13.600,86.400,44.893,55.107,9.72,0.950,bicubic,-80.700,-53.467,+21\nefficientnet_b2.ra_in1k,256,13.600,86.400,44.213,55.787,9.11,0.875,bicubic,-80.420,-54.137,+86\ndpn68b.ra_in1k,288,13.600,86.400,39.453,60.547,12.61,1.000,bicubic,-80.380,-59.087,+94\nfastvit_t12.apple_in1k,256,13.587,86.413,43.253,56.747,7.55,0.900,bicubic,-80.403,-55.267,+87\nhgnet_tiny.paddle_in1k,224,13.573,86.427,44.000,56.000,14.74,0.965,bicubic,-80.717,-54.800,+20\npit_xs_distilled_224.in1k,224,13.507,86.493,45.107,54.893,11.00,0.900,bicubic,-80.273,-53.503,+125\nwide_resnet101_2.tv2_in1k,176,13.453,86.547,40.893,59.107,126.89,0.875,bilinear,-81.017,-57.787,-13\nseresnext50_32x4d.gluon_in1k,224,13.440,86.560,43.440,56.560,27.56,0.875,bicubic,-80.920,-55.180,+5\nresmlp_36_224.fb_in1k,224,13.427,86.573,46.253,53.747,44.69,0.875,bicubic,-80.773,-52.077,+35\ndarknet53.c2ns_in1k,256,13.427,86.573,44.720,55.280,41.61,0.887,bicubic,-80.883,-53.830,+8\necaresnet50t.a2_in1k,224,13.400,86.600,42.493,57.507,25.57,0.950,bicubic,-81.100,-56.287,-23\nmobilenetv4_conv_medium.e500_r224_in1k,256,13.373,86.627,45.067,54.933,9.72,1.000,bicubic,-81.387,-53.863,-104\neca_botnext26ts_256.c1_in1k,256,13.360,86.640,42.267,57.733,10.59,0.950,bicubic,-80.440,-56.163,+114\nresnet50_gn.a1h_in1k,224,13.333,86.667,42.773,57.227,25.56,0.940,bicubic,-81.027,-55.917,-2\nresnet152.a3_in1k,224,13.267,86.733,42.787,57.213,60.19,0.950,bicubic,-81.453,-56.033,-96\nrepvgg_b2g4.rvgg_in1k,224,13.227,86.773,43.427,56.573,61.76,0.875,bilinear,-80.643,-55.183,+95\nregnetx_320.pycls_in1k,224,13.173,86.827,40.227,59.773,107.81,0.875,bicubic,-81.267,-58.503,-19\nvisformer_tiny.in1k,224,13.160,86.840,43.560,56.440,10.32,0.900,bicubic,-80.430,-54.890,+140\nresnet50.b1k_in1k,288,13.147,86.853,43.773,56.227,25.56,1.000,bicubic,-81.723,-54.997,-138\nresnet50d.a1_in1k,224,13.133,86.867,41.227,58.773,25.58,0.950,bicubic,-81.167,-57.513,+4\nese_vovnet39b.ra_in1k,224,13.120,86.880,43.707,56.293,24.57,0.875,bicubic,-80.990,-54.623,+45\nefficientformerv2_s1.snap_dist_in1k,224,13.080,86.920,42.627,57.373,6.19,0.950,bicubic,-81.120,-56.143,+23\nvit_small_patch16_224.augreg_in1k,224,13.080,86.920,40.987,59.013,22.05,0.900,bicubic,-80.820,-57.643,+84\ncspresnext50.ra_in1k,256,13.040,86.960,45.053,54.947,20.57,0.887,bilinear,-81.790,-53.817,-126\nefficientnet_b3_pruned.in1k,300,13.027,86.973,44.893,55.107,9.86,0.904,bicubic,-81.633,-53.917,-88\nmobilevit_s.cvnets_in1k,256,13.013,86.987,40.827,59.173,5.58,0.900,bicubic,-80.197,-57.493,+188\nmixnet_xl.ra_in1k,224,12.960,87.040,43.160,56.840,11.90,0.875,bicubic,-81.240,-55.480,+20\nnf_regnet_b1.ra2_in1k,288,12.907,87.093,43.907,56.093,10.22,0.900,bicubic,-81.243,-54.783,+33\nefficientformer_l1.snap_dist_in1k,224,12.893,87.107,45.147,54.853,12.29,0.950,bicubic,-81.607,-53.493,-45\neca_halonext26ts.c1_in1k,256,12.893,87.107,42.200,57.800,10.76,0.940,bicubic,-81.177,-56.300,+43\nresnext50_32x4d.a1_in1k,224,12.893,87.107,40.813,59.187,25.03,0.950,bicubic,-81.177,-57.817,+43\nresnet101d.gluon_in1k,224,12.773,87.227,41.333,58.667,44.57,0.875,bicubic,-81.477,-57.217,+1\nregnetx_016.tv2_in1k,224,12.760,87.240,45.160,54.840,9.19,0.965,bicubic,-81.410,-53.580,+18\nrepvit_m1_0.dist_300e_in1k,224,12.720,87.280,43.920,56.080,7.30,0.950,bicubic,-81.610,-54.930,-21\npit_xs_224.in1k,224,12.720,87.280,42.613,57.387,10.62,0.900,bicubic,-80.410,-55.707,+193\nresnetv2_34d.ra4_e3600_r384_in1k,384,12.627,87.373,41.547,58.453,21.82,1.000,bicubic,-81.443,-56.653,+35\nhgnetv2_b1.ssld_stage2_ft_in1k,288,12.600,87.400,46.720,53.280,6.34,1.000,bicubic,-81.980,-52.240,-77\nresnetblur50.bt_in1k,288,12.573,87.427,44.280,55.720,25.56,0.950,bicubic,-81.897,-54.500,-45\ntf_efficientnet_b3.in1k,300,12.560,87.440,43.240,56.760,12.23,0.904,bicubic,-82.070,-55.540,-89\ncrossvit_9_dagger_240.in1k,240,12.560,87.440,41.387,58.613,8.78,0.875,bicubic,-80.380,-56.873,+213\nresnet50.b2k_in1k,288,12.547,87.453,43.787,56.213,25.56,1.000,bicubic,-82.173,-54.903,-123\nresnext50_32x4d.tv2_in1k,224,12.533,87.467,42.853,57.147,25.03,0.965,bilinear,-82.097,-55.917,-92\ninception_v3.gluon_in1k,299,12.507,87.493,40.200,59.800,23.83,0.875,bicubic,-80.993,-58.340,+131\nresmlp_24_224.fb_in1k,224,12.467,87.533,43.413,56.587,30.02,0.875,bicubic,-81.553,-55.047,+45\ntresnet_m.miil_in1k,224,12.467,87.533,41.707,58.293,31.39,0.875,bilinear,-82.153,-57.093,-88\nefficientvit_b1.r256_in1k,256,12.440,87.560,41.947,58.053,9.10,1.000,bicubic,-81.590,-56.423,+41\nefficientnet_b1.ra4_e3600_r240_in1k,240,12.427,87.573,43.973,56.027,7.79,0.900,bicubic,-82.423,-54.737,-156\nwide_resnet50_2.tv2_in1k,176,12.427,87.573,43.467,56.533,68.88,0.875,bilinear,-81.773,-55.213,-4\ndarknetaa53.c2ns_in1k,256,12.413,87.587,42.760,57.240,36.02,0.887,bilinear,-81.487,-55.700,+55\nresnetv2_34d.ra4_e3600_r224_in1k,288,12.373,87.627,40.120,59.880,21.82,1.000,bicubic,-82.027,-58.370,-45\nregnety_120.pycls_in1k,224,12.347,87.653,41.773,58.227,51.82,0.875,bicubic,-82.173,-56.897,-70\ncoat_lite_tiny.in1k,224,12.280,87.720,41.107,58.893,5.72,0.900,bicubic,-80.980,-57.163,+160\necaresnet26t.ra2_in1k,256,12.267,87.733,42.640,57.360,16.01,0.875,bicubic,-81.573,-56.010,+64\nconvnext_femto_ols.d1_in1k,288,12.240,87.760,43.440,56.560,5.23,0.950,bicubic,-81.650,-54.980,+53\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,12.227,87.773,42.973,57.027,9.72,0.950,bicubic,-82.063,-55.777,-29\nresnet50.a1h_in1k,224,12.213,87.787,43.853,56.147,25.56,1.000,bicubic,-82.567,-54.837,-152\nresnetaa50.a1h_in1k,224,12.213,87.787,42.360,57.640,25.56,0.950,bicubic,-82.427,-56.420,-110\nregnety_160.pycls_in1k,224,12.200,87.800,41.133,58.867,83.59,0.875,bicubic,-82.170,-57.697,-49\nresnet50.a2_in1k,288,12.080,87.920,40.107,59.893,25.56,1.000,bicubic,-82.560,-58.203,-113\nefficientnet_em.ra2_in1k,240,12.013,87.987,43.667,56.333,6.90,0.882,bicubic,-81.827,-55.143,+56\nresnet101s.gluon_in1k,224,11.987,88.013,40.800,59.200,44.67,0.875,bicubic,-82.753,-57.880,-148\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,11.973,88.027,45.333,54.667,6.34,1.000,bicubic,-82.607,-53.507,-100\nconvnext_pico.d1_in1k,224,11.947,88.053,43.440,56.560,9.05,0.875,bicubic,-82.113,-55.040,+12\nshvit_s4.in1k,256,11.933,88.067,41.267,58.733,16.59,0.875,bicubic,-82.037,-57.193,+32\nhrnet_w64.ms_in1k,224,11.920,88.080,40.773,59.227,128.06,0.875,bilinear,-82.110,-57.847,+19\nresnetrs101.tf_in1k,192,11.920,88.080,38.693,61.307,63.62,0.940,bicubic,-82.780,-60.017,-145\nresnet101.a3_in1k,224,11.787,88.213,40.280,59.720,44.55,0.950,bicubic,-82.263,-58.190,+11\nxcit_tiny_12_p16_224.fb_dist_in1k,224,11.747,88.253,39.853,60.147,6.72,1.000,bicubic,-81.653,-58.367,+122\nresnet50d.ra2_in1k,224,11.733,88.267,42.200,57.800,25.58,0.875,bicubic,-82.567,-56.580,-47\necaresnet50t.a3_in1k,224,11.720,88.280,40.907,59.093,25.57,0.950,bicubic,-82.680,-57.693,-67\nnf_resnet50.ra2_in1k,288,11.707,88.293,45.333,54.667,25.56,0.940,bicubic,-82.913,-53.467,-116\nxception41.tf_in1k,299,11.707,88.293,38.707,61.293,26.97,0.903,bicubic,-81.713,-59.893,+115\ncspdarknet53.ra_in1k,256,11.693,88.307,43.240,56.760,27.64,0.887,bilinear,-82.977,-55.410,-143\nfbnetv3_b.ra2_in1k,256,11.680,88.320,44.133,55.867,8.60,0.950,bilinear,-82.310,-54.427,+16\nefficientnet_b0.ra4_e3600_r224_in1k,256,11.640,88.360,40.893,59.107,5.29,1.000,bicubic,-82.230,-57.497,+38\ngmixer_24_224.ra3_in1k,224,11.627,88.373,37.267,62.733,24.72,0.875,bicubic,-81.243,-60.623,+181\nconvnextv2_femto.fcmae_ft_in1k,288,11.600,88.400,40.600,59.400,5.23,0.950,bicubic,-82.570,-58.030,-25\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,11.587,88.413,39.533,60.467,22.88,0.900,bicubic,-80.493,-58.717,+260\nbotnet26t_256.c1_in1k,256,11.507,88.493,39.760,60.240,12.49,0.950,bicubic,-82.013,-58.700,+89\nresnet50.ra_in1k,288,11.493,88.507,41.160,58.840,25.56,0.950,bicubic,-82.727,-57.480,-45\ndla102x2.in1k,224,11.453,88.547,41.133,58.867,41.28,0.875,bilinear,-82.607,-57.357,-5\nmambaout_femto.in1k,224,11.400,88.600,41.080,58.920,7.30,1.000,bicubic,-82.300,-57.420,+58\nseresnet50.a1_in1k,224,11.400,88.600,39.107,60.893,28.09,0.950,bicubic,-82.650,-59.303,-1\nregnety_080.pycls_in1k,224,11.387,88.613,40.733,59.267,39.18,0.875,bicubic,-82.793,-57.937,-37\nxcit_nano_12_p16_384.fb_dist_in1k,384,11.387,88.613,39.587,60.413,3.05,1.000,bicubic,-80.503,-58.453,+275\nefficientvit_b1.r224_in1k,224,11.373,88.627,39.920,60.080,9.10,0.950,bicubic,-82.167,-58.650,+80\nlevit_128.fb_dist_in1k,224,11.360,88.640,39.747,60.253,9.21,0.900,bicubic,-82.020,-58.633,+107\nresnet152.tv2_in1k,176,11.347,88.653,38.053,61.947,60.19,0.875,bilinear,-82.753,-60.427,-20\nfbnetv3_d.ra2_in1k,224,11.307,88.693,41.053,58.947,10.31,0.950,bilinear,-82.133,-57.307,+92\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,11.293,88.707,42.067,57.933,8.46,0.900,bicubic,-83.027,-56.513,-74\nefficientnet_b2_pruned.in1k,260,11.293,88.707,41.813,58.187,8.31,0.890,bicubic,-82.847,-56.707,-27\nlambda_resnet26t.c1_in1k,256,11.293,88.707,39.960,60.040,10.96,0.940,bicubic,-82.577,-58.640,+20\nlevit_conv_128.fb_dist_in1k,224,11.293,88.707,39.720,60.280,9.21,0.900,bicubic,-82.087,-58.650,+102\ntf_efficientnet_el.in1k,300,11.253,88.747,41.440,58.560,10.59,0.904,bicubic,-83.147,-57.270,-92\ntf_efficientnetv2_b3.in1k,240,11.227,88.773,39.173,60.827,14.36,0.904,bicubic,-83.363,-59.487,-136\nseresnext26t_32x4d.bt_in1k,288,11.187,88.813,40.747,59.253,16.81,0.950,bicubic,-82.383,-57.763,+67\ndpn92.mx_in1k,224,11.120,88.880,39.440,60.560,37.67,0.875,bicubic,-83.160,-59.220,-70\nconvnext_femto.d1_in1k,288,11.107,88.893,42.573,57.427,5.22,0.950,bicubic,-82.823,-55.977,+3\nresnetv2_34.ra4_e3600_r224_in1k,288,11.107,88.893,37.973,62.027,21.80,1.000,bicubic,-83.103,-60.727,-58\ntf_efficientnet_b0.ns_jft_in1k,224,11.093,88.907,39.907,60.093,5.29,0.875,bicubic,-82.597,-58.723,+42\nmobilenetv4_conv_medium.e500_r224_in1k,224,11.080,88.920,40.453,59.547,9.72,0.950,bicubic,-83.130,-58.107,-61\necaresnet50d_pruned.miil_in1k,224,11.053,88.947,42.027,57.973,19.94,0.875,bicubic,-83.207,-56.673,-71\nresnet152c.gluon_in1k,224,11.053,88.947,36.787,63.213,60.21,0.875,bicubic,-83.107,-61.933,-48\nmobilevitv2_100.cvnets_in1k,256,11.040,88.960,40.133,59.867,4.90,0.888,bicubic,-82.250,-57.897,+104\nresnet50d.ra4_e3600_r224_in1k,224,10.973,89.027,40.613,59.387,25.58,0.950,bicubic,-83.657,-58.267,-153\nxcit_tiny_12_p16_224.fb_in1k,224,10.933,89.067,36.880,63.120,6.72,1.000,bicubic,-81.647,-61.370,+187\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,10.920,89.080,39.453,60.547,6.36,1.000,bicubic,-81.120,-58.837,+235\nhrnet_w48.ms_in1k,224,10.893,89.107,39.973,60.027,77.47,0.875,bilinear,-83.037,-58.557,-9\ninception_v3.tf_adv_in1k,299,10.880,89.120,36.760,63.240,23.83,0.875,bicubic,-82.020,-61.380,+147\nregnety_008_tv.tv2_in1k,224,10.853,89.147,40.573,59.427,6.43,0.965,bicubic,-82.837,-57.917,+32\ninception_v3.tf_in1k,299,10.853,89.147,36.693,63.307,23.83,0.875,bicubic,-82.497,-61.857,+89\ntf_efficientnetv2_b2.in1k,260,10.840,89.160,39.480,60.520,10.10,0.890,bicubic,-83.600,-59.120,-114\nhalonet26t.a1h_in1k,256,10.840,89.160,38.627,61.373,12.48,0.950,bicubic,-83.210,-59.903,-32\nnf_regnet_b1.ra2_in1k,256,10.813,89.187,40.413,59.587,10.22,0.900,bicubic,-82.607,-57.997,+71\nmobileone_s4.apple_in1k,224,10.787,89.213,38.453,61.547,14.95,0.900,bilinear,-82.983,-59.967,+18\ndpn107.mx_in1k,224,10.747,89.253,38.040,61.960,86.92,0.875,bicubic,-83.623,-60.600,-105\nresnet50.c2_in1k,224,10.733,89.267,39.000,61.000,25.56,0.950,bicubic,-83.547,-59.540,-88\nconvnext_pico_ols.d1_in1k,224,10.720,89.280,40.627,59.373,9.06,0.950,bicubic,-83.340,-57.803,-44\nresnetv2_50.a1h_in1k,224,10.720,89.280,39.267,60.733,25.55,0.950,bicubic,-83.730,-59.503,-122\nresnet34d.ra2_in1k,288,10.707,89.293,38.987,61.013,21.82,0.950,bicubic,-82.943,-59.523,+28\nresnext50_32x4d.ra_in1k,224,10.667,89.333,40.227,59.773,25.03,0.875,bicubic,-83.443,-58.433,-54\nseresnext26d_32x4d.bt_in1k,288,10.613,89.387,40.947,59.053,16.81,0.950,bicubic,-82.827,-57.493,+61\nxcit_nano_12_p8_224.fb_dist_in1k,224,10.600,89.400,37.947,62.053,3.05,1.000,bicubic,-81.510,-59.843,+211\nresnext50d_32x4d.bt_in1k,224,10.533,89.467,39.373,60.627,25.05,0.875,bicubic,-83.687,-59.197,-87\ntf_efficientnet_b2.ap_in1k,260,10.453,89.547,39.707,60.293,9.11,0.890,bicubic,-84.057,-58.913,-141\nstarnet_s4.in1k,224,10.373,89.627,39.427,60.573,7.48,0.875,bicubic,-83.427,-59.073,+2\nrepvit_m0_9.dist_450e_in1k,224,10.347,89.653,40.027,59.973,5.49,0.950,bicubic,-83.263,-58.493,+29\ndpn131.mx_in1k,224,10.347,89.653,36.653,63.347,79.25,0.875,bicubic,-83.683,-61.967,-45\nhrnet_w44.ms_in1k,224,10.333,89.667,39.187,60.813,67.06,0.875,bilinear,-83.247,-59.243,+31\ndensenetblur121d.ra_in1k,288,10.307,89.693,39.520,60.480,8.00,0.950,bicubic,-82.323,-58.630,+158\nrexnet_130.nav_in1k,224,10.293,89.707,41.400,58.600,7.56,0.875,bicubic,-83.597,-57.220,-23\nconvnextv2_femto.fcmae_ft_in1k,224,10.280,89.720,38.560,61.440,5.23,0.875,bicubic,-83.110,-59.820,+60\nxcit_nano_12_p8_224.fb_in1k,224,10.200,89.800,36.920,63.080,3.05,1.000,bicubic,-80.770,-60.730,+272\nresnext101_32x8d.tv_in1k,224,10.133,89.867,37.733,62.267,88.79,0.875,bilinear,-83.677,-60.847,-10\nseresnet50.a2_in1k,224,10.107,89.893,38.413,61.587,28.09,0.950,bicubic,-83.853,-60.217,-37\nresnet101.tv2_in1k,176,10.093,89.907,37.253,62.747,44.55,0.875,bilinear,-83.727,-61.107,-14\nresnetrs50.tf_in1k,224,10.080,89.920,37.267,62.733,35.69,0.910,bicubic,-84.220,-61.373,-114\nregnetx_160.pycls_in1k,224,10.040,89.960,37.773,62.227,54.28,0.875,bicubic,-84.120,-60.967,-85\nregnety_064.pycls_in1k,224,10.027,89.973,39.120,60.880,30.58,0.875,bicubic,-84.133,-59.510,-85\nlambda_resnet26rpt_256.c1_in1k,256,10.027,89.973,37.560,62.440,10.99,0.940,bicubic,-83.703,-60.930,-2\ndpn98.mx_in1k,224,9.987,90.013,35.973,64.027,61.57,0.875,bicubic,-84.173,-62.487,-84\nresnet50.a1_in1k,224,9.973,90.027,38.000,62.000,25.56,0.950,bicubic,-84.437,-60.430,-142\nresnext50_32x4d.a3_in1k,224,9.973,90.027,37.587,62.413,25.03,0.950,bicubic,-83.677,-60.973,+7\nresnet33ts.ra2_in1k,288,9.960,90.040,39.293,60.707,19.68,1.000,bicubic,-84.200,-59.317,-88\nefficientnet_b1.ft_in1k,256,9.947,90.053,37.307,62.693,7.79,1.000,bicubic,-83.333,-60.933,+65\nlegacy_xception.tf_in1k,299,9.933,90.067,37.880,62.120,22.86,0.897,bicubic,-83.537,-60.650,+32\nlegacy_seresnext50_32x4d.in1k,224,9.893,90.107,38.960,61.040,27.56,0.875,bilinear,-83.837,-59.440,-11\ninception_v3.tv_in1k,299,9.893,90.107,35.040,64.960,23.83,0.875,bicubic,-82.887,-62.910,+121\nefficientnet_b0.ra4_e3600_r224_in1k,224,9.880,90.120,38.120,61.880,5.29,0.900,bicubic,-83.350,-60.250,+64\nresnext50_32x4d.a2_in1k,224,9.840,90.160,37.067,62.933,25.03,0.950,bicubic,-84.210,-61.173,-68\nresnet152.gluon_in1k,224,9.747,90.253,35.920,64.080,60.19,0.875,bicubic,-84.413,-62.680,-93\ntf_efficientnet_b2.aa_in1k,260,9.680,90.320,38.880,61.120,9.11,0.890,bicubic,-84.720,-59.790,-148\nresnet50.c1_in1k,224,9.640,90.360,37.547,62.453,25.56,0.950,bicubic,-84.390,-61.163,-68\ntf_efficientnet_cc_b1_8e.in1k,240,9.613,90.387,36.973,63.027,39.72,0.882,bicubic,-84.307,-61.297,-51\ntf_efficientnet_lite3.in1k,300,9.587,90.413,39.147,60.853,8.20,0.904,bilinear,-84.613,-59.483,-110\nresnet50d.a2_in1k,224,9.560,90.440,37.680,62.320,25.58,0.950,bicubic,-84.540,-60.870,-87\nhgnetv2_b1.ssld_stage2_ft_in1k,224,9.547,90.453,39.613,60.387,6.34,0.965,bicubic,-84.333,-59.037,-48\nfbnetv3_b.ra2_in1k,224,9.493,90.507,38.840,61.160,8.60,0.950,bilinear,-83.817,-59.640,+49\nresnet50.ram_in1k,288,9.427,90.573,35.373,64.627,25.56,0.950,bicubic,-85.093,-63.587,-176\nres2net101_26w_4s.in1k,224,9.413,90.587,34.440,65.560,45.21,0.875,bilinear,-84.337,-63.880,-25\nresnet34.ra4_e3600_r224_in1k,288,9.400,90.600,37.560,62.440,21.80,1.000,bicubic,-84.240,-60.830,-11\ncspresnet50.ra_in1k,256,9.360,90.640,39.667,60.333,21.62,0.887,bilinear,-84.440,-58.963,-36\nnf_resnet50.ra2_in1k,256,9.347,90.653,40.347,59.653,25.56,0.940,bicubic,-84.803,-58.143,-104\nresnet50.tv2_in1k,176,9.320,90.680,38.787,61.213,25.56,0.875,bilinear,-84.470,-59.713,-35\nresnet50.d_in1k,224,9.307,90.693,35.667,64.333,25.56,0.950,bicubic,-84.753,-62.873,-88\nlegacy_seresnet152.in1k,224,9.253,90.747,37.040,62.960,66.82,0.875,bilinear,-84.187,-61.590,+15\nresnet34.a1_in1k,288,9.253,90.747,34.547,65.453,21.80,1.000,bicubic,-83.857,-63.753,+62\nhrnet_w40.ms_in1k,224,9.187,90.813,36.600,63.400,57.56,0.875,bilinear,-84.323,-61.840,+2\nrepvit_m0_9.dist_300e_in1k,224,9.173,90.827,38.347,61.653,5.49,0.950,bicubic,-84.257,-60.383,+15\nregnetx_120.pycls_in1k,224,9.093,90.907,36.907,63.093,46.11,0.875,bicubic,-85.147,-61.743,-136\ndpn68b.ra_in1k,224,9.080,90.920,33.707,66.293,12.61,0.950,bicubic,-84.320,-64.843,+20\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,9.053,90.947,34.160,65.840,5.72,0.900,bicubic,-82.707,-63.870,+196\nresnet32ts.ra2_in1k,288,9.040,90.960,38.107,61.893,17.96,1.000,bicubic,-84.790,-60.553,-54\nresnext50_32x4d.gluon_in1k,224,9.040,90.960,36.213,63.787,25.03,0.875,bicubic,-84.790,-62.447,-54\nregnety_040.pycls_in1k,224,9.027,90.973,37.147,62.853,20.65,0.875,bicubic,-84.853,-61.563,-65\nvit_base_patch16_224.sam_in1k,224,9.013,90.987,36.053,63.947,86.57,0.900,bicubic,-85.137,-62.687,-116\nresnest26d.gluon_in1k,224,9.000,91.000,37.547,62.453,17.07,0.875,bilinear,-84.360,-61.083,+19\nresnet33ts.ra2_in1k,256,8.987,91.013,38.200,61.800,19.68,0.900,bicubic,-84.623,-60.310,-23\nswiftformer_s.dist_in1k,224,8.987,91.013,35.880,64.120,6.09,0.950,bicubic,-84.043,-62.310,+58\ncrossvit_tiny_240.in1k,240,8.960,91.040,34.333,65.667,7.01,0.875,bicubic,-81.280,-63.277,+256\nbat_resnext26ts.ch_in1k,256,8.920,91.080,36.000,64.000,10.73,0.900,bicubic,-84.400,-62.420,+24\nmixnet_l.ft_in1k,224,8.867,91.133,36.253,63.747,7.33,0.875,bicubic,-84.553,-62.007,+6\nseresnext26d_32x4d.bt_in1k,224,8.840,91.160,36.427,63.573,16.81,0.875,bicubic,-83.900,-61.713,+94\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,8.800,91.200,38.987,61.013,6.00,1.000,bicubic,-84.790,-59.523,-27\nrexnet_100.nav_in1k,224,8.800,91.200,36.187,63.813,4.80,0.875,bicubic,-84.230,-62.143,+53\nseresnext26t_32x4d.bt_in1k,224,8.787,91.213,36.560,63.440,16.81,0.875,bicubic,-84.053,-61.790,+72\nefficientvit_m5.r224_in1k,224,8.787,91.213,34.427,65.573,12.47,0.875,bicubic,-83.653,-63.563,+122\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,8.773,91.227,38.307,61.693,6.34,0.965,bicubic,-84.897,-59.923,-43\nresnet50d.a3_in1k,224,8.773,91.227,36.600,63.400,25.58,0.950,bicubic,-84.747,-61.720,-21\ngcresnext26ts.ch_in1k,288,8.747,91.253,36.693,63.307,10.48,1.000,bicubic,-84.433,-61.807,+30\nconvit_tiny.fb_in1k,224,8.707,91.293,34.013,65.987,5.71,0.875,bicubic,-81.913,-63.727,+234\nhrnet_w18.ms_aug_in1k,224,8.693,91.307,38.987,61.013,21.30,0.950,bilinear,-84.817,-59.573,-23\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,8.693,91.307,32.467,67.533,5.48,0.875,bilinear,-83.567,-65.183,+132\nhrnet_w30.ms_in1k,224,8.667,91.333,37.027,62.973,37.71,0.875,bilinear,-84.533,-61.373,+23\nresnet50.bt_in1k,288,8.640,91.360,38.427,61.573,25.56,0.950,bicubic,-85.680,-60.213,-179\nlevit_128s.fb_dist_in1k,224,8.613,91.387,32.853,67.147,7.78,0.900,bicubic,-83.327,-65.217,+154\nlevit_conv_128s.fb_dist_in1k,224,8.613,91.387,32.840,67.160,7.78,0.900,bicubic,-83.317,-65.080,+157\nresnet50.a1h_in1k,176,8.600,91.400,35.533,64.467,25.56,0.900,bicubic,-84.970,-62.867,-36\nghostnetv2_160.in1k,224,8.587,91.413,36.533,63.467,12.39,0.875,bicubic,-84.433,-61.827,+41\nhgnetv2_b0.ssld_stage2_ft_in1k,288,8.573,91.427,39.160,60.840,6.00,1.000,bicubic,-85.247,-59.600,-80\ngcresnext26ts.ch_in1k,256,8.573,91.427,35.547,64.453,10.48,0.900,bicubic,-84.197,-62.623,+70\nrepvit_m1.dist_in1k,224,8.547,91.453,36.800,63.200,5.49,0.950,bicubic,-84.773,-61.550,+1\nefficientnet_b1.ft_in1k,224,8.547,91.453,34.147,65.853,7.79,0.875,bicubic,-83.883,-63.713,+108\neca_resnext26ts.ch_in1k,288,8.533,91.467,36.453,63.547,10.30,1.000,bicubic,-84.567,-62.037,+26\nstarnet_s3.in1k,224,8.520,91.480,35.240,64.760,5.75,0.875,bicubic,-84.100,-63.010,+86\nresnet32ts.ra2_in1k,256,8.480,91.520,36.947,63.053,17.96,0.900,bicubic,-85.020,-61.593,-31\nshvit_s3.in1k,224,8.480,91.520,34.440,65.560,14.25,0.875,bicubic,-84.150,-63.800,+83\ndla169.in1k,224,8.453,91.547,35.960,64.040,53.39,0.875,bilinear,-84.877,-62.640,-8\ntf_efficientnet_b2.in1k,260,8.413,91.587,36.320,63.680,9.11,0.890,bicubic,-85.717,-62.130,-145\ntf_efficientnet_b1.ap_in1k,240,8.413,91.587,35.147,64.853,7.79,0.882,bicubic,-85.307,-63.223,-70\nmixer_b16_224.goog_in21k_ft_in1k,224,8.400,91.600,29.253,70.747,59.88,0.875,bicubic,-83.430,-68.627,+154\nconvnext_atto_ols.a2_in1k,288,8.387,91.613,34.787,65.213,3.70,0.950,bicubic,-84.693,-63.503,+21\nresnetblur50.bt_in1k,224,8.360,91.640,37.440,62.560,25.56,0.875,bicubic,-85.620,-60.880,-119\nrepvgg_b2.rvgg_in1k,224,8.280,91.720,36.027,63.973,89.02,0.875,bilinear,-85.220,-62.713,-41\nlegacy_seresnet101.in1k,224,8.240,91.760,35.707,64.293,49.33,0.875,bilinear,-85.070,-62.733,-10\ncrossvit_9_240.in1k,240,8.240,91.760,34.120,65.880,8.55,0.875,bicubic,-82.420,-63.620,+206\nese_vovnet19b_dw.ra_in1k,288,8.227,91.773,37.173,62.827,6.54,0.950,bicubic,-84.943,-61.097,+4\nresnet50.b1k_in1k,224,8.213,91.787,35.133,64.867,25.56,0.950,bicubic,-85.817,-63.387,-132\ndla102x.in1k,224,8.173,91.827,36.840,63.160,26.31,0.875,bilinear,-85.317,-61.650,-42\neca_resnext26ts.ch_in1k,256,8.120,91.880,35.747,64.253,10.30,0.900,bicubic,-84.510,-62.343,+67\nresmlp_12_224.fb_distilled_in1k,224,8.107,91.893,36.627,63.373,15.35,0.875,bicubic,-84.713,-61.503,+43\nresnetv2_34d.ra4_e3600_r224_in1k,224,8.040,91.960,33.107,66.893,21.82,0.900,bicubic,-85.350,-65.373,-29\ncs3darknet_m.c2ns_in1k,288,7.960,92.040,36.360,63.640,9.31,0.950,bicubic,-85.390,-61.700,-26\nhrnet_w32.ms_in1k,224,7.947,92.053,37.267,62.733,41.23,0.875,bilinear,-85.563,-61.343,-53\nseresnext26ts.ch_in1k,288,7.920,92.080,35.973,64.027,10.39,1.000,bicubic,-85.060,-62.477,+19\nresnet50.b2k_in1k,224,7.907,92.093,35.320,64.680,25.56,0.950,bicubic,-86.213,-63.130,-161\npoolformerv2_s12.sail_in1k,224,7.893,92.107,34.467,65.533,11.89,1.000,bicubic,-85.067,-63.893,+22\nresnet101c.gluon_in1k,224,7.880,92.120,32.920,67.080,44.57,0.875,bicubic,-85.800,-65.520,-83\nresnet26t.ra2_in1k,320,7.867,92.133,36.293,63.707,16.01,1.000,bicubic,-85.313,-62.007,-10\nrepghostnet_200.in1k,224,7.853,92.147,36.747,63.253,9.80,0.875,bicubic,-85.647,-61.753,-55\nvit_base_patch32_224.augreg_in1k,224,7.827,92.173,30.387,69.613,88.22,0.900,bicubic,-83.373,-67.363,+161\nresnet50d.gluon_in1k,224,7.813,92.187,34.760,65.240,25.58,0.875,bicubic,-85.977,-63.630,-103\nres2net50_26w_8s.in1k,224,7.813,92.187,33.053,66.947,48.40,0.875,bilinear,-85.597,-65.127,-45\nefficientformerv2_s0.snap_dist_in1k,224,7.813,92.187,32.947,67.053,3.60,0.950,bicubic,-84.147,-64.943,+117\nconvnext_femto_ols.d1_in1k,224,7.787,92.213,36.467,63.533,5.23,0.875,bicubic,-85.303,-61.933,-3\ndla60_res2next.in1k,224,7.773,92.227,34.667,65.333,17.03,0.875,bilinear,-85.427,-63.743,-20\nmobilevitv2_075.cvnets_in1k,256,7.760,92.240,33.373,66.627,2.87,0.888,bicubic,-83.990,-64.487,+132\nfastvit_t8.apple_dist_in1k,256,7.747,92.253,34.427,65.573,4.03,0.900,bicubic,-84.793,-63.603,+62\ntf_efficientnetv2_b1.in1k,240,7.720,92.280,34.320,65.680,8.14,0.882,bicubic,-86.240,-64.070,-143\ndeit_tiny_distilled_patch16_224.fb_in1k,224,7.720,92.280,33.533,66.467,5.91,0.900,bicubic,-83.040,-64.037,+178\nregnety_032.pycls_in1k,224,7.693,92.307,34.040,65.960,19.44,0.875,bicubic,-85.747,-64.300,-61\nmobilevit_xs.cvnets_in1k,256,7.693,92.307,32.547,67.453,2.32,0.900,bicubic,-83.157,-65.393,+171\nfasternet_t2.in1k,224,7.653,92.347,34.520,65.480,14.98,1.000,bicubic,-85.907,-63.860,-80\ndensenetblur121d.ra_in1k,224,7.653,92.347,34.347,65.653,8.00,0.875,bicubic,-84.267,-63.613,+110\nconvnextv2_atto.fcmae_ft_in1k,288,7.653,92.347,32.693,67.307,3.71,0.950,bicubic,-85.327,-65.367,+4\nconvnext_atto.d2_in1k,288,7.560,92.440,34.747,65.253,3.70,0.950,bicubic,-85.210,-63.673,+25\nresnext50_32x4d.tv2_in1k,176,7.560,92.440,33.667,66.333,25.03,0.875,bilinear,-86.060,-64.633,-92\ntf_efficientnetv2_b2.in1k,208,7.533,92.467,32.960,67.040,10.10,0.890,bicubic,-86.287,-65.490,-125\ndla60_res2net.in1k,224,7.507,92.493,34.587,65.413,20.85,0.875,bilinear,-85.643,-63.513,-27\nresnet50.a2_in1k,224,7.507,92.493,33.240,66.760,25.56,0.950,bicubic,-86.343,-65.280,-135\nresnet152.a3_in1k,160,7.413,92.587,31.560,68.440,60.19,0.950,bicubic,-86.257,-67.110,-104\nhardcorenas_e.miil_green_in1k,224,7.373,92.627,33.347,66.653,8.07,0.875,bilinear,-85.197,-64.773,+43\nresnet50.ra_in1k,224,7.360,92.640,34.880,65.120,25.56,0.875,bicubic,-85.990,-63.740,-55\nregnetx_064.pycls_in1k,224,7.360,92.640,34.373,65.627,26.21,0.875,bicubic,-86.540,-64.257,-149\nresnet34.a2_in1k,288,7.320,92.680,31.787,68.213,21.80,1.000,bicubic,-85.430,-66.463,+21\nefficientnet_b1_pruned.in1k,240,7.307,92.693,34.400,65.600,6.33,0.882,bicubic,-85.493,-63.650,+10\nwide_resnet101_2.tv_in1k,224,7.307,92.693,33.720,66.280,126.89,0.875,bilinear,-86.453,-64.800,-123\nregnetx_008.tv2_in1k,224,7.227,92.773,34.173,65.827,7.26,0.965,bicubic,-85.353,-64.007,+36\nefficientnet_b0.ra_in1k,224,7.227,92.773,33.960,66.040,5.29,0.875,bicubic,-85.443,-64.120,+25\nresnet101.gluon_in1k,224,7.213,92.787,32.720,67.280,44.55,0.875,bicubic,-86.517,-65.870,-122\ndeit_tiny_patch16_224.fb_in1k,224,7.213,92.787,30.627,69.373,5.72,0.900,bicubic,-82.477,-66.603,+193\ndensenet121.ra_in1k,288,7.200,92.800,35.307,64.693,7.98,0.950,bicubic,-85.350,-62.993,+36\ntf_efficientnet_cc_b0_8e.in1k,224,7.187,92.813,31.973,68.027,24.01,0.875,bicubic,-85.643,-66.457,-1\ntf_efficientnet_b1.aa_in1k,240,7.160,92.840,33.027,66.973,7.79,0.882,bicubic,-86.350,-65.333,-92\nresnet50s.gluon_in1k,224,7.120,92.880,33.133,66.867,25.68,0.875,bicubic,-86.560,-65.327,-121\nedgenext_x_small.in1k,288,7.107,92.893,30.587,69.413,2.34,1.000,bicubic,-84.633,-67.003,+104\ntf_mixnet_l.in1k,224,7.093,92.907,31.573,68.427,7.33,0.875,bicubic,-86.197,-66.677,-58\nresmlp_12_224.fb_in1k,224,7.067,92.933,33.867,66.133,15.35,0.875,bicubic,-85.143,-64.283,+59\nconvmixer_1024_20_ks9_p14.in1k,224,7.000,93.000,32.640,67.360,24.38,0.960,bicubic,-85.420,-65.640,+42\nseresnext26ts.ch_in1k,256,6.987,93.013,34.693,65.307,10.39,0.900,bicubic,-85.733,-63.607,+9\nhardcorenas_f.miil_green_in1k,224,6.973,93.027,33.947,66.053,8.20,0.875,bilinear,-86.007,-64.193,-25\ncs3darknet_focus_m.c2ns_in1k,288,6.960,93.040,34.400,65.600,9.30,0.950,bicubic,-86.040,-63.970,-29\npit_ti_distilled_224.in1k,224,6.933,93.067,30.800,69.200,5.10,0.900,bicubic,-83.837,-66.850,+143\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,6.827,93.173,33.587,66.413,6.00,0.965,bicubic,-85.643,-64.833,+30\nfastvit_t8.apple_in1k,256,6.800,93.200,33.453,66.547,4.03,0.900,bicubic,-85.360,-64.437,+55\nconvnext_femto.d1_in1k,224,6.773,93.227,35.067,64.933,5.22,0.875,bicubic,-86.167,-63.143,-25\nconvnextv2_atto.fcmae_ft_in1k,224,6.760,93.240,30.867,69.133,3.71,0.875,bicubic,-85.380,-66.873,+56\nmixnet_m.ft_in1k,224,6.707,93.293,32.053,67.947,5.01,0.875,bicubic,-85.723,-65.767,+31\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,6.680,93.320,31.827,68.173,5.48,1.000,bicubic,-85.630,-66.193,+40\ntinynet_a.in1k,192,6.653,93.347,32.707,67.293,6.19,0.875,bicubic,-85.817,-65.373,+23\ndpn68b.mx_in1k,224,6.653,93.347,32.600,67.400,12.61,0.875,bicubic,-86.167,-65.550,-17\nghostnetv2_130.in1k,224,6.653,93.347,32.600,67.400,8.96,0.875,bicubic,-85.667,-65.470,+37\nselecsls60b.in1k,224,6.640,93.360,33.067,66.933,32.77,0.875,bicubic,-86.690,-65.443,-80\nefficientnet_es.ra_in1k,224,6.627,93.373,34.333,65.667,5.44,0.875,bicubic,-86.573,-64.077,-66\nese_vovnet19b_dw.ra_in1k,224,6.613,93.387,33.160,66.840,6.54,0.875,bicubic,-85.667,-64.830,+37\nmobileone_s3.apple_in1k,224,6.600,93.400,32.000,68.000,10.17,0.900,bilinear,-86.310,-66.180,-32\npoolformer_s12.sail_in1k,224,6.560,93.440,34.227,65.773,11.92,0.900,bicubic,-86.090,-63.953,-2\nhardcorenas_d.miil_green_in1k,224,6.533,93.467,32.133,67.867,7.50,0.875,bilinear,-85.927,-65.897,+17\nres2net50_26w_6s.in1k,224,6.533,93.467,31.453,68.547,37.05,0.875,bilinear,-86.887,-66.777,-101\nlegacy_seresnext26_32x4d.in1k,224,6.520,93.480,33.027,66.973,16.79,0.875,bicubic,-86.120,-65.093,-4\ndla60x.in1k,224,6.453,93.547,33.973,66.027,17.35,0.875,bilinear,-86.647,-64.457,-62\ntf_efficientnet_b1.in1k,240,6.440,93.560,32.133,67.867,7.79,0.882,bicubic,-86.640,-66.337,-58\nrepghostnet_150.in1k,224,6.427,93.573,32.133,67.867,6.58,0.875,bicubic,-85.933,-66.247,+23\nghostnetv3_100.in1k,224,6.413,93.587,31.867,68.133,8.13,0.875,bicubic,-85.467,-66.053,+67\nskresnet34.ra_in1k,224,6.413,93.587,31.493,68.507,22.28,0.875,bicubic,-85.967,-66.647,+18\ncs3darknet_m.c2ns_in1k,256,6.400,93.600,33.413,66.587,9.31,0.887,bicubic,-86.270,-65.067,-13\nregnetx_080.pycls_in1k,224,6.400,93.600,32.307,67.693,39.57,0.875,bicubic,-87.450,-66.053,-183\nresnet34d.ra2_in1k,224,6.373,93.627,31.640,68.360,21.82,0.875,bicubic,-86.357,-66.640,-20\nhgnetv2_b0.ssld_stage2_ft_in1k,224,6.360,93.640,33.227,66.773,6.00,0.965,bicubic,-86.470,-64.933,-39\nresnetv2_34.ra4_e3600_r224_in1k,224,6.347,93.653,30.813,69.187,21.80,0.900,bicubic,-86.623,-67.437,-53\nregnety_004.tv2_in1k,224,6.347,93.653,30.413,69.587,4.34,0.965,bicubic,-85.273,-67.317,+75\nresnet50.a3_in1k,224,6.333,93.667,31.547,68.453,25.56,0.950,bicubic,-86.437,-66.523,-31\nrepvgg_b1.rvgg_in1k,224,6.320,93.680,33.613,66.387,57.42,0.875,bilinear,-87.010,-64.767,-102\necaresnet50t.a3_in1k,160,6.293,93.707,30.387,69.613,25.57,0.950,bicubic,-86.807,-67.883,-73\nresnet18.fb_swsl_ig1b_ft_in1k,224,6.227,93.773,31.280,68.720,11.69,0.875,bilinear,-84.433,-66.420,+117\nresnet101.a3_in1k,160,6.200,93.800,28.880,71.120,44.55,0.950,bicubic,-86.840,-69.270,-70\nedgenext_x_small.in1k,256,6.160,93.840,29.627,70.373,2.34,0.900,bicubic,-84.980,-67.923,+91\nresnet26d.bt_in1k,288,6.147,93.853,32.853,67.147,16.01,0.950,bicubic,-86.333,-65.367,-6\nlegacy_seresnet50.in1k,224,6.067,93.933,32.387,67.613,28.09,0.875,bilinear,-86.913,-65.813,-65\npit_ti_224.in1k,224,6.067,93.933,30.067,69.933,4.85,0.900,bicubic,-83.883,-67.373,+137\nresnet152.tv_in1k,224,6.053,93.947,31.467,68.533,60.19,0.875,bilinear,-87.277,-66.803,-109\ntf_efficientnet_cc_b0_4e.in1k,224,6.000,94.000,29.507,70.493,13.31,0.875,bicubic,-86.630,-68.753,-22\nmobilenetv1_125.ra4_e3600_r224_in1k,256,5.987,94.013,30.453,69.547,6.27,1.000,bicubic,-86.803,-67.727,-46\nresnet26t.ra2_in1k,256,5.960,94.040,31.893,68.107,16.01,0.940,bicubic,-86.790,-66.397,-39\ntf_efficientnetv2_b0.in1k,224,5.920,94.080,30.613,69.387,7.14,0.875,bicubic,-87.210,-67.747,-89\nmixer_l16_224.goog_in21k_ft_in1k,224,5.907,94.093,18.227,81.773,208.20,0.875,bicubic,-81.243,-75.303,+178\nwide_resnet50_2.tv_in1k,224,5.880,94.120,31.867,68.133,68.88,0.875,bilinear,-87.330,-66.603,-102\ndla102.in1k,224,5.853,94.147,32.733,67.267,33.27,0.875,bilinear,-87.217,-65.817,-83\nresnetv2_18d.ra4_e3600_r224_in1k,288,5.800,94.200,29.800,70.200,11.71,1.000,bicubic,-86.100,-68.330,+38\nresnet50.ram_in1k,224,5.800,94.200,29.080,70.920,25.56,0.875,bicubic,-88.010,-69.320,-195\nresnetrs50.tf_in1k,160,5.787,94.213,27.653,72.347,35.69,0.910,bicubic,-87.143,-70.597,-68\nregnetx_040.pycls_in1k,224,5.773,94.227,31.213,68.787,22.12,0.875,bicubic,-87.767,-67.117,-157\nconvnext_atto_ols.a2_in1k,224,5.760,94.240,29.453,70.547,3.70,0.875,bicubic,-86.160,-68.647,+33\nselecsls60.in1k,224,5.747,94.253,32.347,67.653,30.67,0.875,bicubic,-87.263,-66.063,-82\nresnet50.bt_in1k,224,5.680,94.320,31.893,68.107,25.56,0.875,bicubic,-87.580,-66.497,-114\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,5.640,94.360,29.093,70.907,5.48,0.950,bicubic,-86.290,-69.057,+28\nregnety_016.pycls_in1k,224,5.613,94.387,30.493,69.507,11.20,0.875,bicubic,-87.527,-67.897,-102\nhardcorenas_c.miil_green_in1k,224,5.600,94.400,30.147,69.853,5.52,0.875,bilinear,-86.430,-67.693,+16\nhrnet_w18_small_v2.gluon_in1k,224,5.587,94.413,31.853,68.147,15.60,0.875,bicubic,-87.183,-66.427,-61\nresnet34.ra4_e3600_r224_in1k,224,5.587,94.413,31.013,68.987,21.80,0.900,bicubic,-87.273,-67.067,-71\nres2next50.in1k,224,5.533,94.467,30.653,69.347,24.67,0.875,bilinear,-87.367,-67.557,-77\nresnet18d.ra4_e3600_r224_in1k,288,5.533,94.467,29.240,70.760,11.71,1.000,bicubic,-86.517,-68.820,+9\nresnext50_32x4d.a3_in1k,160,5.520,94.480,27.853,72.147,25.03,0.950,bicubic,-87.170,-70.137,-52\nseresnet50.a3_in1k,224,5.507,94.493,30.213,69.787,28.09,0.950,bicubic,-86.593,-67.607,+2\nresnet34.a1_in1k,224,5.480,94.520,27.600,72.400,21.80,0.950,bicubic,-86.630,-70.570,0\ncs3darknet_focus_m.c2ns_in1k,256,5.467,94.533,31.867,68.133,9.30,0.887,bicubic,-86.893,-66.443,-22\nhrnet_w18.ms_in1k,224,5.467,94.533,30.840,69.160,21.30,0.875,bilinear,-86.893,-67.220,-22\nghostnetv2_100.in1k,224,5.453,94.547,28.827,71.173,6.16,0.875,bicubic,-85.437,-68.873,+73\ntf_efficientnet_lite2.in1k,260,5.360,94.640,31.000,69.000,6.09,0.890,bicubic,-87.320,-67.240,-57\nresnest14d.gluon_in1k,224,5.333,94.667,28.560,71.440,10.61,0.875,bilinear,-86.387,-69.310,+33\ntf_efficientnet_b0.ap_in1k,224,5.307,94.693,28.853,71.147,5.29,0.875,bicubic,-86.953,-69.167,-17\nresnext26ts.ra2_in1k,288,5.293,94.707,29.400,70.600,10.30,1.000,bicubic,-86.857,-68.610,-11\ntf_efficientnetv2_b1.in1k,192,5.293,94.707,28.493,71.507,8.14,0.882,bicubic,-87.857,-69.917,-119\ngernet_s.idstcv_in1k,224,5.267,94.733,30.013,69.987,8.17,0.875,bilinear,-86.883,-68.197,-13\nefficientvit_m4.r224_in1k,224,5.253,94.747,27.773,72.227,8.80,0.875,bicubic,-85.347,-69.817,+81\ntf_efficientnet_em.in1k,240,5.213,94.787,30.693,69.307,6.90,0.882,bicubic,-87.737,-67.517,-96\nresnet34.bt_in1k,288,5.200,94.800,29.307,70.693,21.80,0.950,bicubic,-87.210,-68.843,-36\nefficientvit_m3.r224_in1k,224,5.200,94.800,27.413,72.587,6.90,0.875,bicubic,-84.680,-70.117,+103\nxcit_nano_12_p16_224.fb_dist_in1k,224,5.200,94.800,26.693,73.307,3.05,1.000,bicubic,-84.460,-70.397,+108\ntf_efficientnet_b0.aa_in1k,224,5.187,94.813,29.053,70.947,5.29,0.875,bicubic,-87.093,-69.057,-27\ndensenet121.ra_in1k,224,5.173,94.827,29.533,70.467,7.98,0.875,bicubic,-86.447,-68.547,+23\nshvit_s2.in1k,224,5.173,94.827,27.387,72.613,11.48,0.875,bicubic,-86.277,-70.443,+26\nconvnext_atto.d2_in1k,224,5.160,94.840,28.853,71.147,3.70,0.875,bicubic,-86.620,-69.087,+14\nrepvgg_b1g4.rvgg_in1k,224,5.107,94.893,30.467,69.533,39.97,0.875,bilinear,-87.903,-67.843,-113\nmobilenetv4_conv_small.e3600_r256_in1k,320,5.093,94.907,29.987,70.013,3.77,1.000,bicubic,-86.717,-67.653,+10\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,5.093,94.907,28.480,71.520,5.28,0.950,bicubic,-87.137,-69.520,-29\nmobilenetv3_large_100.ra_in1k,224,5.080,94.920,28.053,71.947,5.48,0.875,bicubic,-86.260,-69.647,+30\nres2net50_26w_4s.in1k,224,5.067,94.933,29.053,70.947,25.70,0.875,bilinear,-87.443,-68.997,-57\ntf_mixnet_m.in1k,224,5.040,94.960,28.320,71.680,5.01,0.875,bicubic,-87.300,-69.560,-42\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,4.973,95.027,26.813,73.187,6.34,0.900,bicubic,-84.257,-70.407,+103\nhardcorenas_a.miil_green_in1k,224,4.960,95.040,27.947,72.053,5.26,0.875,bilinear,-86.400,-69.823,+23\nmobilenetv1_125.ra4_e3600_r224_in1k,224,4.947,95.053,28.280,71.720,6.27,0.900,bicubic,-87.043,-69.730,-17\nregnetx_004_tv.tv2_in1k,224,4.933,95.067,27.453,72.547,5.50,0.965,bicubic,-85.667,-70.127,+64\nmixnet_s.ft_in1k,224,4.920,95.080,28.840,71.160,4.13,0.875,bicubic,-87.030,-68.840,-14\nregnetx_032.pycls_in1k,224,4.880,95.120,29.880,70.120,15.30,0.875,bicubic,-88.260,-68.460,-141\nres2net50_14w_8s.in1k,224,4.880,95.120,28.440,71.560,25.06,0.875,bilinear,-87.830,-69.740,-86\ntf_efficientnetv2_b0.in1k,192,4.853,95.147,26.573,73.427,7.14,0.875,bicubic,-87.437,-71.637,-46\nrepghostnet_130.in1k,224,4.840,95.160,29.480,70.520,5.48,0.875,bicubic,-87.040,-68.450,-8\nhardcorenas_b.miil_green_in1k,224,4.827,95.173,27.773,72.227,5.18,0.875,bilinear,-87.013,-69.997,-6\nmobilenetv3_rw.rmsp_in1k,224,4.813,95.187,29.853,70.147,5.48,0.875,bicubic,-86.507,-67.777,+19\nxcit_nano_12_p16_224.fb_in1k,224,4.813,95.187,25.253,74.747,3.05,1.000,bicubic,-83.787,-71.577,+105\nresnet50c.gluon_in1k,224,4.787,95.213,27.760,72.240,25.58,0.875,bicubic,-88.233,-70.470,-133\nstarnet_s2.in1k,224,4.747,95.253,27.680,72.320,3.68,0.875,bicubic,-86.153,-70.050,+37\nresnext26ts.ra2_in1k,256,4.693,95.307,28.800,71.200,10.30,0.900,bicubic,-87.177,-69.100,-14\nmobilenetv4_conv_small.e2400_r224_in1k,256,4.693,95.307,26.987,73.013,3.77,0.950,bicubic,-86.297,-70.673,+30\nfasternet_t1.in1k,224,4.693,95.307,26.413,73.587,7.60,1.000,bicubic,-86.927,-71.497,+1\nresnext50_32x4d.tv_in1k,224,4.680,95.320,29.840,70.160,25.03,0.875,bilinear,-88.070,-68.150,-103\nselecsls42b.in1k,224,4.653,95.347,28.307,71.693,32.46,0.875,bicubic,-87.647,-69.833,-58\ndensenet161.tv_in1k,224,4.627,95.373,29.453,70.547,28.68,0.875,bicubic,-87.923,-68.737,-82\nmobilenetv1_100.ra4_e3600_r224_in1k,256,4.613,95.387,27.107,72.893,4.23,0.950,bicubic,-87.487,-70.933,-45\nresnetv2_18.ra4_e3600_r224_in1k,288,4.613,95.387,27.040,72.960,11.69,1.000,bicubic,-86.717,-70.820,+8\nswiftformer_xs.dist_in1k,224,4.613,95.387,26.360,73.640,3.48,0.950,bicubic,-86.447,-71.290,+20\nresnet101.tv_in1k,224,4.560,95.440,29.013,70.987,44.55,0.875,bilinear,-88.250,-69.217,-119\nstarnet_s1.in1k,224,4.560,95.440,25.840,74.160,2.87,0.875,bicubic,-85.760,-71.650,+49\nmobileone_s2.apple_in1k,224,4.480,95.520,28.867,71.133,7.88,0.900,bilinear,-88.350,-69.383,-124\ntf_efficientnet_lite1.in1k,240,4.480,95.520,28.320,71.680,5.42,0.882,bicubic,-88.110,-69.720,-94\nmobilenetv2_120d.ra_in1k,224,4.467,95.533,29.120,70.880,5.83,0.875,bicubic,-88.013,-68.920,-84\nresnet34.a2_in1k,224,4.427,95.573,24.413,75.587,21.80,0.950,bicubic,-87.303,-73.357,-16\ntf_efficientnet_b0.in1k,224,4.413,95.587,26.760,73.240,5.29,0.875,bicubic,-87.667,-71.160,-50\nresnet50d.a3_in1k,160,4.373,95.627,26.147,73.853,25.58,0.950,bicubic,-88.077,-71.893,-83\npvt_v2_b0.in1k,224,4.347,95.653,25.707,74.293,3.67,0.900,bicubic,-84.433,-71.163,+83\ninception_next_atto.sail_in1k,224,4.333,95.667,27.427,72.573,4.16,0.875,bicubic,-87.047,-70.373,-10\nshvit_s1.in1k,224,4.333,95.667,24.733,75.267,6.33,0.875,bicubic,-85.357,-72.767,+62\nvit_base_patch32_224.sam_in1k,224,4.307,95.693,24.307,75.693,88.22,0.900,bicubic,-85.443,-72.673,+57\ntinynet_b.in1k,188,4.253,95.747,26.800,73.200,3.73,0.875,bicubic,-86.657,-70.870,+13\nrepghostnet_111.in1k,224,4.240,95.760,26.307,73.693,4.54,0.875,bicubic,-86.510,-71.153,+23\nmobilenetv4_conv_small.e1200_r224_in1k,256,4.227,95.773,26.147,73.853,3.77,0.950,bicubic,-86.423,-71.363,+26\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,4.227,95.773,25.787,74.213,5.28,0.875,bicubic,-87.153,-71.893,-15\nfbnetc_100.rmsp_in1k,224,4.213,95.787,25.880,74.120,5.57,0.875,bilinear,-86.567,-71.340,+15\nmobilenetv1_100.ra4_e3600_r224_in1k,224,4.173,95.827,25.280,74.720,4.23,0.875,bicubic,-87.017,-72.500,-6\nedgenext_xx_small.in1k,288,4.173,95.827,24.000,76.000,1.33,1.000,bicubic,-84.697,-72.680,+71\nresnet50.am_in1k,224,4.133,95.867,28.547,71.453,25.56,0.875,bicubic,-89.857,-70.053,-309\nefficientnet_es_pruned.in1k,224,4.120,95.880,26.800,73.200,5.44,0.875,bicubic,-87.080,-70.580,-12\nmobilenetv4_conv_small.e3600_r256_in1k,256,4.067,95.933,25.707,74.293,3.77,0.950,bicubic,-86.693,-71.683,+14\nresnet50.gluon_in1k,224,4.053,95.947,26.800,73.200,25.56,0.875,bicubic,-88.497,-71.410,-106\nresnet26d.bt_in1k,224,4.040,95.960,28.360,71.640,16.01,0.875,bicubic,-88.010,-69.600,-65\ndensenet201.tv_in1k,224,3.987,96.013,27.267,72.733,20.01,0.875,bicubic,-88.773,-70.963,-134\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,3.973,96.027,24.160,75.840,2.16,0.900,bicubic,-86.157,-73.110,+31\nsemnasnet_100.rmsp_in1k,224,3.947,96.053,26.920,73.080,3.89,0.875,bicubic,-87.413,-70.620,-24\ntf_mixnet_s.in1k,224,3.947,96.053,25.253,74.747,4.13,0.875,bicubic,-87.573,-72.337,-32\nmobilevitv2_050.cvnets_in1k,256,3.933,96.067,23.667,76.333,1.37,0.888,bicubic,-84.297,-73.293,+74\nresnet26.bt_in1k,288,3.907,96.093,28.133,71.867,16.00,0.950,bicubic,-88.123,-70.067,-70\ndpn68.mx_in1k,224,3.907,96.093,25.600,74.400,12.61,0.875,bicubic,-88.083,-72.430,-66\nresnetv2_18d.ra4_e3600_r224_in1k,224,3.893,96.107,24.627,75.373,11.71,0.900,bicubic,-86.977,-72.913,-2\nrepvgg_a2.rvgg_in1k,224,3.853,96.147,27.307,72.693,28.21,0.875,bilinear,-88.077,-70.763,-63\ntf_efficientnet_es.in1k,224,3.840,96.160,26.093,73.907,5.44,0.875,bicubic,-88.140,-71.777,-69\nresnet18.a1_in1k,288,3.813,96.187,23.000,77.000,11.69,1.000,bicubic,-85.907,-74.100,+35\nsemnasnet_075.rmsp_in1k,224,3.800,96.200,26.600,73.400,2.91,0.875,bicubic,-86.290,-70.830,+23\nmobilevit_xxs.cvnets_in1k,256,3.800,96.200,22.133,77.867,1.27,0.900,bicubic,-83.350,-73.967,+74\nresnet18d.ra2_in1k,288,3.760,96.240,25.720,74.280,11.71,0.950,bicubic,-86.550,-71.470,+13\nedgenext_xx_small.in1k,256,3.760,96.240,23.453,76.547,1.33,0.900,bicubic,-84.640,-73.077,+59\nregnety_008.pycls_in1k,224,3.747,96.253,26.867,73.133,6.26,0.875,bicubic,-87.983,-71.313,-52\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,3.747,96.253,25.293,74.707,11.69,0.875,bilinear,-86.503,-72.267,+13\nmobilenetv4_conv_small.e1200_r224_in1k,224,3.720,96.280,24.467,75.533,3.77,0.875,bicubic,-86.180,-72.763,+25\ndensenet169.tv_in1k,224,3.707,96.293,25.573,74.427,14.15,0.875,bicubic,-88.253,-72.517,-77\nmobilenetv2_140.ra_in1k,224,3.667,96.333,26.453,73.547,6.11,0.875,bicubic,-88.163,-70.757,-63\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,3.667,96.333,23.400,76.600,2.16,0.875,bicubic,-86.353,-73.900,+18\ntf_mobilenetv3_large_100.in1k,224,3.653,96.347,24.960,75.040,5.48,0.875,bilinear,-87.557,-72.700,-38\nmobilenetv4_conv_small.e2400_r224_in1k,224,3.653,96.347,24.413,75.587,3.77,0.875,bicubic,-86.417,-72.867,+14\ndla60.in1k,224,3.640,96.360,28.013,71.987,22.04,0.875,bilinear,-88.570,-70.087,-102\nres2net50_48w_2s.in1k,224,3.600,96.400,26.187,73.813,25.29,0.875,bilinear,-88.960,-71.883,-135\nrepghostnet_100.in1k,224,3.560,96.440,24.320,75.680,4.07,0.875,bicubic,-86.760,-73.100,-1\nefficientnet_lite0.ra_in1k,224,3.547,96.453,26.320,73.680,4.65,0.875,bicubic,-87.593,-71.320,-36\nefficientvit_m2.r224_in1k,224,3.547,96.453,21.600,78.400,4.19,0.875,bicubic,-84.903,-75.310,+44\nregnetx_016.pycls_in1k,224,3.533,96.467,26.347,73.653,9.19,0.875,bicubic,-88.647,-71.843,-107\nregnety_006.pycls_in1k,224,3.533,96.467,25.107,74.893,6.06,0.875,bicubic,-87.907,-72.653,-57\nspnasnet_100.rmsp_in1k,224,3.533,96.467,24.387,75.613,4.42,0.875,bilinear,-86.777,-73.173,-4\nseresnet50.a3_in1k,160,3.533,96.467,22.627,77.373,28.09,0.950,bicubic,-87.467,-74.783,-32\nresnet18d.ra4_e3600_r224_in1k,224,3.493,96.507,24.560,75.440,11.71,0.900,bicubic,-87.197,-73.010,-19\nghostnet_100.in1k,224,3.400,96.600,25.187,74.813,5.18,0.875,bicubic,-86.800,-72.063,-4\nresnet34.a3_in1k,224,3.400,96.600,23.240,76.760,21.80,0.950,bicubic,-86.540,-73.930,+7\nresnet50.a3_in1k,160,3.373,96.627,23.600,76.400,25.56,0.950,bicubic,-88.077,-74.030,-63\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,3.360,96.640,22.840,77.160,3.27,1.000,bicubic,-86.630,-74.630,+2\nlegacy_seresnet34.in1k,224,3.320,96.680,23.707,76.293,21.96,0.875,bilinear,-87.600,-73.863,-37\nresnet18.a2_in1k,288,3.307,96.693,22.253,77.747,11.69,1.000,bicubic,-86.273,-74.707,+14\nresnet34.bt_in1k,224,3.253,96.747,24.413,75.587,21.80,0.875,bicubic,-87.917,-73.137,-50\nefficientvit_b0.r224_in1k,224,3.253,96.747,19.520,80.480,3.41,0.950,bicubic,-84.747,-76.560,+40\ndla34.in1k,224,3.240,96.760,23.667,76.333,15.74,0.875,bilinear,-87.530,-73.983,-34\ntest_vit3.r160_in1k,160,3.240,96.760,17.133,82.867,0.93,0.950,bicubic,-74.340,-75.217,+74\ntinynet_c.in1k,184,3.200,96.800,21.360,78.640,2.46,0.875,bicubic,-84.600,-75.020,+39\nmobilenetv2_110d.ra_in1k,224,3.133,96.867,24.787,75.213,4.52,0.875,bicubic,-87.867,-72.763,-50\nmnasnet_100.rmsp_in1k,224,3.133,96.867,24.307,75.693,4.38,0.875,bicubic,-87.517,-73.203,-28\nregnety_004.pycls_in1k,224,3.080,96.920,22.613,77.387,4.34,0.875,bicubic,-87.430,-74.967,-24\ntf_efficientnet_lite0.in1k,224,3.053,96.947,22.827,77.173,4.65,0.875,bicubic,-88.047,-74.713,-54\nmobileone_s1.apple_in1k,224,3.040,96.960,25.027,74.973,4.83,0.900,bilinear,-88.320,-72.813,-71\nresnetv2_18.ra4_e3600_r224_in1k,224,3.040,96.960,22.173,77.827,11.69,0.900,bicubic,-87.100,-75.137,-17\nskresnet18.ra_in1k,224,3.027,96.973,22.680,77.320,11.96,0.875,bicubic,-86.633,-74.560,-2\nrepghostnet_080.in1k,224,3.027,96.973,22.627,77.373,3.28,0.875,bicubic,-85.943,-74.143,+11\nefficientvit_m1.r224_in1k,224,2.907,97.093,19.573,80.427,2.98,0.875,bicubic,-83.893,-76.437,+37\ntf_mobilenetv3_large_075.in1k,224,2.893,97.107,21.707,78.293,3.99,0.875,bilinear,-86.777,-75.513,-5\nvgg19_bn.tv_in1k,224,2.853,97.147,23.547,76.453,143.68,0.875,bilinear,-87.247,-74.033,-21\ntinynet_d.in1k,152,2.773,97.227,17.733,82.267,2.34,0.875,bicubic,-82.057,-77.487,+45\nregnetx_008.pycls_in1k,224,2.720,97.280,22.360,77.640,7.26,0.875,bicubic,-88.380,-75.350,-64\nresnet14t.c3_in1k,224,2.720,97.280,19.827,80.173,10.08,0.950,bicubic,-86.280,-76.873,+3\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,2.667,97.333,20.600,79.400,3.27,0.887,bicubic,-86.513,-76.300,-1\nlcnet_100.ra2_in1k,224,2.653,97.347,20.507,79.493,2.95,0.875,bicubic,-86.107,-76.203,+9\nresnet34.gluon_in1k,224,2.640,97.360,21.653,78.347,21.80,0.875,bicubic,-88.330,-76.137,-61\nhrnet_w18_small_v2.ms_in1k,224,2.627,97.373,23.600,76.400,15.60,0.875,bilinear,-88.563,-74.310,-75\nfasternet_t0.in1k,224,2.627,97.373,19.173,80.827,3.91,1.000,bicubic,-85.643,-77.347,+15\ntest_vit2.r160_in1k,160,2.613,97.387,13.360,86.640,0.46,0.950,bicubic,-62.027,-72.090,+62\nrepvgg_b0.rvgg_in1k,224,2.600,97.400,23.907,76.093,15.82,0.875,bilinear,-88.800,-74.073,-91\nhrnet_w18_small.gluon_in1k,224,2.600,97.400,20.587,79.413,13.19,0.875,bicubic,-86.870,-76.443,-11\ntest_convnext.r160_in1k,160,2.573,97.427,13.587,86.413,0.27,0.950,bicubic,-66.847,-75.063,+53\nvgg16_bn.tv_in1k,224,2.560,97.440,23.587,76.413,138.37,0.875,bilinear,-87.530,-73.783,-32\nvgg16.tv_in1k,224,2.560,97.440,19.973,80.027,138.36,0.875,bilinear,-85.980,-76.827,+2\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,2.547,97.453,16.853,83.147,2.24,0.950,bicubic,-81.613,-78.287,+36\nresnet18d.ra2_in1k,224,2.520,97.480,21.533,78.467,11.71,0.875,bicubic,-86.810,-75.647,-16\ndensenet121.tv_in1k,224,2.467,97.533,22.533,77.467,7.98,0.875,bicubic,-88.433,-75.067,-71\nregnetx_006.pycls_in1k,224,2.467,97.533,20.720,79.280,6.20,0.875,bicubic,-87.903,-76.700,-49\nlegacy_seresnet18.in1k,224,2.467,97.533,19.880,80.120,11.78,0.875,bicubic,-86.423,-77.070,-7\ntest_nfnet.r160_in1k,160,2.453,97.547,11.813,88.187,0.38,0.950,bicubic,-65.657,-76.217,+46\nresnet18.a1_in1k,224,2.440,97.560,18.467,81.533,11.69,0.950,bicubic,-86.000,-78.183,-3\nresnet26.bt_in1k,224,2.400,97.600,22.840,77.160,16.00,0.875,bicubic,-88.780,-74.930,-88\ntest_efficientnet_evos.r160_in1k,160,2.320,97.680,11.840,88.160,0.36,0.950,bicubic,-64.490,-74.460,+44\nlcnet_075.ra2_in1k,224,2.280,97.720,17.133,82.867,2.36,0.875,bicubic,-83.750,-78.547,+16\ntest_vit.r160_in1k,160,2.280,97.720,12.667,87.333,0.37,0.950,bicubic,-60.210,-71.713,+48\ntest_efficientnet_gn.r160_in1k,160,2.280,97.720,11.707,88.293,0.36,0.950,bicubic,-62.230,-73.863,+48\nefficientvit_m0.r224_in1k,224,2.267,97.733,16.413,83.587,2.35,0.875,bicubic,-80.143,-78.027,+30\nrepghostnet_058.in1k,224,2.253,97.747,18.427,81.573,2.55,0.875,bicubic,-84.247,-77.473,+9\nresnet34.a3_in1k,160,2.253,97.747,18.280,81.720,21.80,0.950,bicubic,-85.647,-78.190,-2\nresnet18.a3_in1k,224,2.253,97.747,17.520,82.480,11.69,0.950,bicubic,-84.207,-78.370,+8\nmobilenetv3_small_075.lamb_in1k,224,2.227,97.773,15.733,84.267,2.04,0.875,bicubic,-80.883,-78.417,+25\nmobilenetv2_100.ra_in1k,224,2.173,97.827,19.733,80.267,3.50,0.875,bicubic,-87.437,-77.427,-35\nrepvgg_a1.rvgg_in1k,224,2.147,97.853,21.227,78.773,14.09,0.875,bilinear,-88.443,-76.413,-68\ntf_mobilenetv3_small_100.in1k,224,2.147,97.853,16.773,83.227,2.54,0.875,bilinear,-83.073,-78.997,+9\ntest_resnet.r160_in1k,160,2.147,97.853,11.213,88.787,0.47,0.950,bilinear,-60.013,-72.997,+41\nregnety_002.pycls_in1k,224,2.133,97.867,18.640,81.360,3.16,0.875,bicubic,-85.217,-77.960,-6\nresnet14t.c3_in1k,176,2.133,97.867,16.960,83.040,10.08,0.875,bicubic,-86.157,-79.410,-16\ntest_efficientnet_ln.r160_in1k,160,2.133,97.867,10.493,89.507,0.36,0.950,bicubic,-62.667,-74.837,+34\nvgg19.tv_in1k,224,2.107,97.893,20.480,79.520,143.67,0.875,bilinear,-86.943,-76.400,-35\nmobileone_s0.apple_in1k,224,2.107,97.893,17.680,82.320,5.29,0.875,bilinear,-86.213,-78.750,-18\nvgg13_bn.tv_in1k,224,2.080,97.920,20.027,79.973,133.05,0.875,bilinear,-86.700,-76.943,-29\ntest_byobnet.r160_in1k,160,2.053,97.947,11.333,88.667,0.46,0.950,bicubic,-63.837,-74.847,+27\ntest_convnext3.r160_in1k,160,2.013,97.987,13.160,86.840,0.47,0.950,bicubic,-72.267,-77.450,+20\ntest_efficientnet.r160_in1k,160,2.013,97.987,10.440,89.560,0.36,0.950,bicubic,-64.387,-75.850,+25\nregnetx_004.pycls_in1k,224,1.973,98.027,19.080,80.920,5.16,0.875,bicubic,-86.947,-78.020,-36\nresnet18.a2_in1k,224,1.973,98.027,17.467,82.533,11.69,0.950,bicubic,-86.057,-78.953,-21\ntf_mobilenetv3_small_075.in1k,224,1.960,98.040,15.013,84.987,2.04,0.875,bilinear,-81.590,-79.847,+7\nmobilenetv3_small_100.lamb_in1k,224,1.947,98.053,16.920,83.080,2.54,0.875,bicubic,-83.243,-78.700,-4\ntinynet_e.in1k,106,1.933,98.067,13.987,86.013,2.04,0.875,bicubic,-77.047,-78.573,+11\nrepghostnet_050.in1k,224,1.920,98.080,16.347,83.653,2.31,0.875,bicubic,-83.120,-78.873,-5\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,1.893,98.107,15.440,84.560,2.24,0.875,bicubic,-81.527,-79.140,+4\ntest_convnext2.r160_in1k,160,1.867,98.133,13.213,86.787,0.48,0.950,bicubic,-73.053,-77.907,+11\nresnet34.tv_in1k,224,1.840,98.160,19.813,80.187,21.80,0.875,bilinear,-88.090,-77.527,-67\nvgg13.tv_in1k,224,1.840,98.160,17.933,82.067,133.05,0.875,bilinear,-85.180,-78.377,-19\nlcnet_050.ra2_in1k,224,1.787,98.213,13.107,86.893,1.88,0.875,bicubic,-79.943,-80.623,+3\nmobilenetv3_small_050.lamb_in1k,224,1.773,98.227,12.440,87.560,1.59,0.875,bicubic,-75.257,-78.860,+6\nmnasnet_small.lamb_in1k,224,1.760,98.240,15.027,84.973,2.03,0.875,bicubic,-82.600,-80.133,-9\nvgg11_bn.tv_in1k,224,1.733,98.267,18.107,81.893,132.87,0.875,bilinear,-85.767,-78.703,-29\ndla46x_c.in1k,224,1.720,98.280,16.373,83.627,1.07,0.875,bilinear,-82.550,-78.867,-9\ntf_mobilenetv3_large_minimal_100.in1k,224,1.653,98.347,17.347,82.653,3.92,0.875,bilinear,-87.297,-79.523,-52\nresnet10t.c3_in1k,224,1.640,98.360,15.907,84.093,5.44,0.950,bicubic,-84.610,-79.853,-22\nmobilenetv2_050.lamb_in1k,224,1.640,98.360,14.053,85.947,1.97,0.875,bicubic,-82.300,-80.537,-9\nvgg11.tv_in1k,224,1.613,98.387,15.907,84.093,132.86,0.875,bilinear,-84.957,-80.403,-27\ndla60x_c.in1k,224,1.600,98.400,17.760,82.240,1.32,0.875,bilinear,-84.730,-78.330,-26\nresnet18.gluon_in1k,224,1.600,98.400,16.773,83.227,11.69,0.875,bicubic,-86.770,-79.927,-44\nhrnet_w18_small.ms_in1k,224,1.507,98.493,18.120,81.880,13.19,0.875,bilinear,-87.543,-78.960,-62\nrepvgg_a0.rvgg_in1k,224,1.493,98.507,17.467,82.533,9.11,0.875,bilinear,-87.767,-79.463,-66\ndla46_c.in1k,224,1.493,98.507,15.120,84.880,1.30,0.875,bilinear,-82.147,-79.810,-15\nresnet10t.c3_in1k,176,1.427,98.573,14.133,85.867,5.44,0.875,bicubic,-83.363,-80.877,-22\nresnet18.a3_in1k,160,1.387,98.613,13.707,86.293,11.69,0.950,bicubic,-82.893,-80.913,-21\nregnetx_002.pycls_in1k,224,1.293,98.707,14.907,85.093,2.68,0.875,bicubic,-84.907,-81.063,-30\ntf_mobilenetv3_small_minimal_100.in1k,224,1.133,98.867,11.413,88.587,2.04,0.875,bilinear,-80.207,-82.277,-13\nresnet18.tv_in1k,224,1.120,98.880,16.200,83.800,11.69,0.875,bilinear,-86.260,-80.070,-43\nresmlp_24_224.fb_dino,224,0.480,99.520,2.320,97.680,30.02,0.875,bicubic,,,\n"
  },
  {
    "path": "results/results-imagenet-r-clean.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,98.160,1.840,99.880,0.120,305.08,1.000,bicubic\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,98.030,1.970,99.890,0.110,305.08,1.000,bicubic\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,98.020,1.980,99.900,0.100,\"1,013.01\",1.000,bicubic\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,97.990,2.010,99.860,0.140,\"1,014.45\",1.000,bicubic\neva_large_patch14_336.in22k_ft_in22k_in1k,336,97.900,2.100,99.880,0.120,304.53,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,97.860,2.140,99.900,0.100,660.29,1.000,bicubic\neva_giant_patch14_336.clip_ft_in1k,336,97.860,2.140,99.790,0.210,\"1,013.01\",1.000,bicubic\neva02_large_patch14_448.mim_in22k_ft_in1k,448,97.860,2.140,99.770,0.230,305.08,1.000,bicubic\neva_large_patch14_336.in22k_ft_in1k,336,97.820,2.180,99.850,0.150,304.53,1.000,bicubic\neva02_large_patch14_448.mim_m38m_ft_in1k,448,97.820,2.180,99.830,0.170,305.08,1.000,bicubic\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,97.820,2.180,99.760,0.240,429.38,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,97.810,2.190,99.860,0.140,660.29,1.000,bicubic\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,97.810,2.190,99.790,0.210,305.00,1.000,bicubic\ntf_efficientnet_l2.ns_jft_in1k,800,97.770,2.230,99.890,0.110,480.31,0.960,bicubic\nregnety_1280.swag_ft_in1k,384,97.770,2.230,99.860,0.140,644.81,1.000,bicubic\nmaxvit_base_tf_512.in21k_ft_in1k,512,97.770,2.230,99.850,0.150,119.88,1.000,bicubic\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,97.770,2.230,99.850,0.150,475.32,1.000,bicubic\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,97.770,2.230,99.810,0.190,305.67,1.000,bicubic\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,97.770,2.230,99.810,0.190,846.47,1.000,bicubic\ntf_efficientnet_l2.ns_jft_in1k_475,475,97.770,2.230,99.810,0.190,480.31,0.936,bicubic\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,97.760,2.240,99.820,0.180,475.77,1.000,bicubic\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,97.760,2.240,99.790,0.210,304.43,0.950,bicubic\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,97.720,2.280,99.770,0.230,414.14,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in1k,448,97.710,2.290,99.760,0.240,87.12,1.000,bicubic\nmaxvit_large_tf_384.in21k_ft_in1k,384,97.670,2.330,99.820,0.180,212.03,1.000,bicubic\nmaxvit_large_tf_512.in21k_ft_in1k,512,97.660,2.340,99.730,0.270,212.33,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k_384,384,97.650,2.350,99.860,0.140,98.75,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,97.650,2.350,99.800,0.200,197.96,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,97.640,2.360,99.730,0.270,304.20,1.000,bicubic\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,97.620,2.380,99.760,0.240,632.46,1.000,bicubic\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,97.620,2.380,99.730,0.270,304.53,1.000,bicubic\neva_large_patch14_196.in22k_ft_in22k_in1k,196,97.610,2.390,99.810,0.190,304.14,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,97.600,2.400,99.820,0.180,87.12,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,97.580,2.420,99.780,0.220,350.20,1.000,bicubic\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,97.580,2.420,99.710,0.290,304.76,1.000,bicubic\neva_giant_patch14_224.clip_ft_in1k,224,97.580,2.420,99.710,0.290,\"1,012.56\",0.900,bicubic\nmaxvit_base_tf_384.in21k_ft_in1k,384,97.560,2.440,99.760,0.240,119.65,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,97.550,2.450,99.780,0.220,136.50,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,97.550,2.450,99.750,0.250,136.33,1.000,bicubic\neva_large_patch14_196.in22k_ft_in1k,196,97.530,2.470,99.790,0.210,304.14,1.000,bicubic\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,97.500,2.500,99.680,0.320,304.43,0.900,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,288,97.480,2.520,99.820,0.180,350.20,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,97.470,2.530,99.760,0.240,200.13,1.000,bicubic\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,97.470,2.530,99.760,0.240,116.09,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,97.460,2.540,99.780,0.220,304.53,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k_384,384,97.450,2.550,99.750,0.250,99.88,1.000,bicubic\nbeit3_large_patch16_224.in22k_ft_in1k,224,97.450,2.550,99.720,0.280,304.57,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in1k,224,97.450,2.550,99.690,0.310,304.20,1.000,bicubic\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,97.430,2.570,99.780,0.220,304.72,1.000,bicubic\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,97.430,2.570,99.780,0.220,134.42,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,97.430,2.570,99.760,0.240,101.66,1.000,bicubic\nregnety_1280.swag_lc_in1k,224,97.420,2.580,99.730,0.270,644.81,0.965,bicubic\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,97.410,2.590,99.770,0.230,64.27,1.000,bicubic\nregnety_320.swag_ft_in1k,384,97.390,2.610,99.760,0.240,145.05,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,97.390,2.610,99.740,0.260,304.20,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,97.390,2.610,99.730,0.270,200.13,1.000,bicubic\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,97.390,2.610,99.690,0.310,304.57,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k,224,97.380,2.620,99.830,0.170,98.75,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,97.370,2.630,99.710,0.290,88.72,1.000,bicubic\ncaformer_m36.sail_in22k_ft_in1k_384,384,97.360,2.640,99.780,0.220,56.20,1.000,bicubic\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,97.360,2.640,99.730,0.270,149.39,1.000,bicubic\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,97.360,2.640,99.710,0.290,73.88,1.000,bicubic\nconvformer_m36.sail_in22k_ft_in1k_384,384,97.360,2.640,99.680,0.320,57.05,1.000,bicubic\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,97.350,2.650,99.710,0.290,86.74,1.000,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,97.340,2.660,99.800,0.200,632.05,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,288,97.340,2.660,99.760,0.240,197.96,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,97.330,2.670,99.760,0.240,93.59,0.950,bicubic\nconvnext_large.fb_in22k_ft_in1k_384,384,97.330,2.670,99.750,0.250,197.77,1.000,bicubic\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,97.320,2.680,99.720,0.280,116.14,1.000,bicubic\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,97.320,2.680,99.680,0.320,304.37,1.000,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,480,97.320,2.680,99.640,0.360,118.52,1.000,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,512,97.320,2.680,99.600,0.400,208.12,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,97.310,2.690,99.780,0.220,93.59,1.000,bicubic\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,97.300,2.700,99.780,0.220,196.74,1.000,bicubic\nvolo_d5_512.sail_in1k,512,97.300,2.700,99.770,0.230,296.09,1.150,bicubic\nbeitv2_large_patch16_224.in1k_ft_in1k,224,97.290,2.710,99.740,0.260,304.43,0.950,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,97.290,2.710,99.740,0.260,200.13,1.000,bicubic\ncaformer_s36.sail_in22k_ft_in1k_384,384,97.280,2.720,99.720,0.280,39.30,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,224,97.270,2.730,99.720,0.280,197.96,0.875,bicubic\nconvnextv2_huge.fcmae_ft_in1k,288,97.270,2.730,99.710,0.290,660.29,1.000,bicubic\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,97.260,2.740,99.800,0.200,87.92,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,224,97.260,2.740,99.740,0.260,350.20,0.875,bicubic\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,97.250,2.750,99.760,0.240,136.06,1.000,bicubic\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,97.250,2.750,99.720,0.280,632.13,1.000,bicubic\nvolo_d5_448.sail_in1k,448,97.240,2.760,99.760,0.240,295.91,1.150,bicubic\nconvnext_base.fb_in22k_ft_in1k_384,384,97.240,2.760,99.720,0.280,88.59,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k,288,97.240,2.760,99.720,0.280,197.77,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,97.240,2.760,99.720,0.280,304.53,1.000,bicubic\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,97.240,2.760,99.680,0.320,86.88,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k,224,97.230,2.770,99.760,0.240,99.88,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,97.230,2.770,99.750,0.250,101.66,1.000,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,97.230,2.770,99.700,0.300,86.86,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k,224,97.230,2.770,99.660,0.340,197.77,0.875,bicubic\nconvnext_base.fb_in22k_ft_in1k,288,97.220,2.780,99.760,0.240,88.59,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k_384,384,97.220,2.780,99.730,0.270,44.82,1.000,bicubic\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,97.220,2.780,99.710,0.290,196.74,0.900,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,97.220,2.780,99.680,0.320,101.66,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,288,97.210,2.790,99.760,0.240,88.72,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,97.210,2.790,99.720,0.280,60.60,1.000,bicubic\nhiera_huge_224.mae_in1k_ft_in1k,224,97.210,2.790,99.630,0.370,672.78,0.900,bicubic\ntf_efficientnet_b7.ns_jft_in1k,600,97.200,2.800,99.700,0.300,66.35,0.949,bicubic\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,97.190,2.810,99.650,0.350,73.88,0.950,bicubic\nmaxvit_base_tf_512.in1k,512,97.190,2.810,99.640,0.360,119.88,1.000,bicubic\nregnety_160.swag_ft_in1k,384,97.180,2.820,99.780,0.220,83.59,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,97.180,2.820,99.760,0.240,93.59,1.000,bicubic\nnextvit_large.bd_ssld_6m_in1k_384,384,97.180,2.820,99.750,0.250,57.87,1.000,bicubic\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,97.180,2.820,99.690,0.310,196.74,1.000,bicubic\nmaxvit_small_tf_512.in1k,512,97.180,2.820,99.620,0.380,69.13,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,224,97.160,2.840,99.660,0.340,88.72,0.875,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,97.150,2.850,99.680,0.320,64.11,0.950,bicubic\ncaformer_b36.sail_in1k_384,384,97.150,2.850,99.610,0.390,98.75,1.000,bicubic\nconvnext_small.fb_in22k_ft_in1k_384,384,97.140,2.860,99.640,0.360,50.22,1.000,bicubic\nmaxvit_base_tf_384.in1k,384,97.140,2.860,99.580,0.420,119.65,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,97.130,2.870,99.720,0.280,200.13,1.000,bicubic\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,97.130,2.870,99.640,0.360,86.86,0.950,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,97.110,2.890,99.700,0.300,632.05,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,288,97.100,2.900,99.740,0.260,75.26,1.000,bicubic\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,97.090,2.910,99.610,0.390,116.14,0.950,bicubic\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,97.090,2.910,99.610,0.390,86.58,0.900,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,97.080,2.920,99.710,0.290,75.26,1.000,bicubic\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,97.080,2.920,99.680,0.320,116.09,0.950,bicubic\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,97.070,2.930,99.770,0.230,87.90,1.000,bicubic\nvolo_d4_448.sail_in1k,448,97.070,2.930,99.760,0.240,193.41,1.150,bicubic\nnextvit_small.bd_ssld_6m_in1k_384,384,97.070,2.930,99.710,0.290,31.76,1.000,bicubic\nconvformer_m36.sail_in22k_ft_in1k,224,97.070,2.930,99.630,0.370,57.05,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,97.060,2.940,99.690,0.310,88.59,1.000,bicubic\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,97.060,2.940,99.670,0.330,87.92,0.900,bicubic\nmaxvit_large_tf_512.in1k,512,97.060,2.940,99.590,0.410,212.33,1.000,bicubic\nconvformer_s36.sail_in22k_ft_in1k_384,384,97.050,2.950,99.710,0.290,40.01,1.000,bicubic\nvolo_d3_448.sail_in1k,448,97.040,2.960,99.680,0.320,86.63,1.000,bicubic\ncaformer_m36.sail_in22k_ft_in1k,224,97.030,2.970,99.730,0.270,56.20,1.000,bicubic\nrdnet_large.nv_in1k_ft_in1k_384,384,97.030,2.970,99.680,0.320,186.27,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,97.030,2.970,99.670,0.330,304.20,1.000,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,97.030,2.970,99.610,0.390,75.26,0.965,bicubic\ncaformer_m36.sail_in1k_384,384,97.020,2.980,99.710,0.290,56.20,1.000,bicubic\ntf_efficientnet_b6.ns_jft_in1k,528,97.020,2.980,99.710,0.290,43.04,0.942,bicubic\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,97.020,2.980,99.700,0.300,86.86,1.000,bicubic\ndm_nfnet_f5.dm_in1k,544,97.020,2.980,99.670,0.330,377.21,0.954,bicubic\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,97.020,2.980,99.670,0.330,134.13,0.950,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,480,97.020,2.980,99.620,0.380,54.14,1.000,bicubic\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,97.000,3.000,99.640,0.360,73.87,0.950,bicubic\nconvnext_small.in12k_ft_in1k_384,384,96.990,3.010,99.660,0.340,50.22,1.000,bicubic\ndm_nfnet_f6.dm_in1k,576,96.980,3.020,99.760,0.240,438.36,0.956,bicubic\nmaxvit_tiny_tf_512.in1k,512,96.970,3.030,99.670,0.330,31.05,1.000,bicubic\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,96.960,3.040,99.720,0.280,86.53,0.900,bicubic\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,96.960,3.040,99.720,0.280,329.09,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,96.960,3.040,99.640,0.360,93.59,0.875,bicubic\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,96.950,3.050,99.660,0.340,196.53,0.900,bicubic\ndm_nfnet_f4.dm_in1k,512,96.950,3.050,99.630,0.370,316.07,0.951,bicubic\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,96.950,3.050,99.620,0.380,21.23,1.000,bicubic\nmaxvit_large_tf_384.in1k,384,96.950,3.050,99.560,0.440,212.03,1.000,bicubic\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,96.940,3.060,99.650,0.350,86.58,0.900,bicubic\nconvnextv2_huge.fcmae_ft_in1k,224,96.940,3.060,99.640,0.360,660.29,0.875,bicubic\ndm_nfnet_f6.dm_in1k,448,96.930,3.070,99.720,0.280,438.36,0.956,bicubic\nxcit_large_24_p16_384.fb_dist_in1k,384,96.930,3.070,99.510,0.490,189.10,1.000,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,96.920,3.080,99.670,0.330,86.86,1.000,bicubic\nvolo_d5_224.sail_in1k,224,96.910,3.090,99.660,0.340,295.46,0.960,bicubic\nnextvit_large.bd_ssld_6m_in1k,224,96.910,3.090,99.640,0.360,57.87,0.950,bicubic\ntf_efficientnet_b5.ns_jft_in1k,456,96.900,3.100,99.640,0.360,30.39,0.934,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,384,96.900,3.100,99.460,0.540,208.12,1.000,bicubic\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,96.890,3.110,99.710,0.290,21.27,1.000,bicubic\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,96.890,3.110,99.680,0.320,936.53,1.000,bilinear\ncait_m48_448.fb_dist_in1k,448,96.890,3.110,99.620,0.380,356.46,1.000,bicubic\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,96.890,3.110,99.620,0.380,86.59,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k,224,96.880,3.120,99.680,0.320,44.82,0.950,bicubic\ncaformer_s36.sail_in1k_384,384,96.880,3.120,99.660,0.340,39.30,1.000,bicubic\nconvformer_b36.sail_in1k_384,384,96.870,3.130,99.650,0.350,99.88,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,224,96.870,3.130,99.650,0.350,75.26,0.965,bicubic\ndeit3_large_patch16_384.fb_in1k,384,96.870,3.130,99.620,0.380,304.76,1.000,bicubic\nconvnext_small.fb_in22k_ft_in1k,288,96.870,3.130,99.510,0.490,50.22,1.000,bicubic\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,96.860,3.140,99.690,0.310,88.59,1.000,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,384,96.860,3.140,99.590,0.410,118.52,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,288,96.850,3.150,99.770,0.230,197.96,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,224,96.850,3.150,99.680,0.320,197.96,0.875,bicubic\nconvnext_base.fb_in22k_ft_in1k,224,96.850,3.150,99.660,0.340,88.59,0.875,bicubic\nregnety_160.lion_in12k_ft_in1k,288,96.840,3.160,99.730,0.270,83.59,1.000,bicubic\ncaformer_s36.sail_in22k_ft_in1k,224,96.840,3.160,99.620,0.380,39.30,1.000,bicubic\nregnety_320.swag_lc_in1k,224,96.830,3.170,99.730,0.270,145.05,0.965,bicubic\ncait_m36_384.fb_dist_in1k,384,96.820,3.180,99.660,0.340,271.22,1.000,bicubic\nvit_base_patch16_clip_384.openai_ft_in1k,384,96.820,3.180,99.660,0.340,86.86,1.000,bicubic\nxcit_small_24_p8_384.fb_dist_in1k,384,96.820,3.180,99.630,0.370,47.63,1.000,bicubic\nefficientnet_b5.sw_in12k_ft_in1k,448,96.820,3.180,99.590,0.410,30.39,1.000,bicubic\nregnety_160.sw_in12k_ft_in1k,288,96.810,3.190,99.700,0.300,83.59,1.000,bicubic\ndm_nfnet_f5.dm_in1k,416,96.810,3.190,99.600,0.400,377.21,0.954,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,96.800,3.200,99.680,0.320,88.59,1.000,bicubic\nhgnet_base.ssld_in1k,288,96.800,3.200,99.680,0.320,71.58,1.000,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,96.800,3.200,99.640,0.360,64.11,0.950,bicubic\nflexivit_large.1200ep_in1k,240,96.800,3.200,99.610,0.390,304.36,0.950,bicubic\nconvformer_s18.sail_in22k_ft_in1k_384,384,96.790,3.210,99.710,0.290,26.77,1.000,bicubic\nxcit_large_24_p8_384.fb_dist_in1k,384,96.790,3.210,99.550,0.450,188.93,1.000,bicubic\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,96.790,3.210,99.530,0.470,468.53,0.875,bilinear\nvolo_d4_224.sail_in1k,224,96.780,3.220,99.670,0.330,192.96,0.960,bicubic\nxcit_medium_24_p8_384.fb_dist_in1k,384,96.780,3.220,99.620,0.380,84.32,1.000,bicubic\nbeitv2_base_patch16_224.in1k_ft_in1k,224,96.780,3.220,99.550,0.450,86.53,0.900,bicubic\nconvformer_m36.sail_in1k_384,384,96.770,3.230,99.610,0.390,57.05,1.000,bicubic\nhiera_large_224.mae_in1k_ft_in1k,224,96.750,3.250,99.590,0.410,213.74,0.900,bicubic\nefficientvit_l3.r320_in1k,320,96.750,3.250,99.410,0.590,246.04,1.000,bicubic\nhgnetv2_b5.ssld_stage2_ft_in1k,288,96.740,3.260,99.710,0.290,39.57,1.000,bicubic\ndm_nfnet_f4.dm_in1k,384,96.740,3.260,99.620,0.380,316.07,0.951,bicubic\nvolo_d2_384.sail_in1k,384,96.740,3.260,99.600,0.400,58.87,1.000,bicubic\nflexivit_large.600ep_in1k,240,96.740,3.260,99.550,0.450,304.36,0.950,bicubic\nefficientvit_l3.r384_in1k,384,96.740,3.260,99.450,0.550,246.04,1.000,bicubic\ninception_next_base.sail_in1k_384,384,96.730,3.270,99.620,0.380,86.67,1.000,bicubic\nmaxvit_small_tf_384.in1k,384,96.730,3.270,99.590,0.410,69.02,1.000,bicubic\ntf_efficientnetv2_l.in1k,480,96.730,3.270,99.550,0.450,118.52,1.000,bicubic\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,96.720,3.280,99.640,0.360,304.33,0.900,bicubic\ntf_efficientnet_b4.ns_jft_in1k,380,96.710,3.290,99.640,0.360,19.34,0.922,bicubic\nconvformer_s36.sail_in1k_384,384,96.700,3.300,99.580,0.420,40.01,1.000,bicubic\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,96.700,3.300,99.570,0.430,86.66,1.000,bicubic\ntf_efficientnet_b8.ra_in1k,672,96.700,3.300,99.550,0.450,87.41,0.954,bicubic\neva02_small_patch14_336.mim_in22k_ft_in1k,336,96.690,3.310,99.610,0.390,22.13,1.000,bicubic\nxcit_medium_24_p16_384.fb_dist_in1k,384,96.690,3.310,99.600,0.400,84.40,1.000,bicubic\nflexivit_large.300ep_in1k,240,96.690,3.310,99.580,0.420,304.36,0.950,bicubic\nefficientvit_l2.r384_in1k,384,96.690,3.310,99.420,0.580,63.71,1.000,bicubic\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,96.680,3.320,99.640,0.360,22.21,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,96.680,3.320,99.580,0.420,32.59,1.000,bicubic\nefficientvit_l2.r288_in1k,288,96.680,3.320,99.530,0.470,63.71,1.000,bicubic\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,96.670,3.330,99.660,0.340,87.77,0.900,bicubic\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,96.660,3.340,99.660,0.340,86.53,0.900,bicubic\nhgnet_base.ssld_in1k,224,96.650,3.350,99.560,0.440,71.58,0.965,bicubic\ncait_s36_384.fb_dist_in1k,384,96.640,3.360,99.600,0.400,68.37,1.000,bicubic\nefficientvit_l3.r256_in1k,256,96.640,3.360,99.290,0.710,246.04,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,96.630,3.370,99.590,0.410,28.64,1.000,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,96.630,3.370,99.560,0.440,86.57,0.950,bicubic\nxcit_large_24_p8_224.fb_dist_in1k,224,96.630,3.370,99.480,0.520,188.93,1.000,bicubic\nregnetz_e8.ra3_in1k,320,96.620,3.380,99.610,0.390,57.70,1.000,bicubic\ncoat_lite_medium_384.in1k,384,96.620,3.380,99.450,0.550,44.57,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,96.610,3.390,99.700,0.300,39.57,1.000,bicubic\ndm_nfnet_f3.dm_in1k,416,96.610,3.390,99.630,0.370,254.92,0.940,bicubic\nnextvit_small.bd_ssld_6m_in1k,224,96.610,3.390,99.580,0.420,31.76,0.950,bicubic\ntf_efficientnet_b7.ra_in1k,600,96.610,3.390,99.530,0.470,66.35,0.949,bicubic\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,96.610,3.390,99.470,0.530,88.30,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,96.600,3.400,99.590,0.410,60.40,0.950,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,576,96.600,3.400,99.580,0.420,45.88,1.000,bicubic\nregnety_160.sw_in12k_ft_in1k,224,96.590,3.410,99.690,0.310,83.59,0.950,bicubic\nconvnext_small.in12k_ft_in1k,288,96.590,3.410,99.580,0.420,50.22,1.000,bicubic\ndeit3_huge_patch14_224.fb_in1k,224,96.590,3.410,99.520,0.480,632.13,0.900,bicubic\nconvnext_tiny.in12k_ft_in1k_384,384,96.580,3.420,99.660,0.340,28.59,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,96.580,3.420,99.640,0.360,88.59,1.000,bicubic\ncait_s24_384.fb_dist_in1k,384,96.580,3.420,99.550,0.450,47.06,1.000,bicubic\nregnety_120.sw_in12k_ft_in1k,288,96.570,3.430,99.680,0.320,51.82,1.000,bicubic\nconvnext_small.in12k_ft_in1k,224,96.570,3.430,99.580,0.420,50.22,0.950,bicubic\nmaxvit_tiny_tf_384.in1k,384,96.570,3.430,99.570,0.430,30.98,1.000,bicubic\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,96.570,3.430,99.530,0.470,88.34,1.000,bicubic\nregnety_2560.seer_ft_in1k,384,96.570,3.430,99.510,0.490,\"1,282.60\",1.000,bicubic\nhrnet_w48_ssld.paddle_in1k,288,96.560,3.440,99.640,0.360,77.47,1.000,bilinear\ntf_efficientnet_b8.ap_in1k,672,96.560,3.440,99.550,0.450,87.41,0.954,bicubic\nconvformer_s36.sail_in22k_ft_in1k,224,96.550,3.450,99.600,0.400,40.01,1.000,bicubic\nxcit_small_24_p8_224.fb_dist_in1k,224,96.550,3.450,99.560,0.440,47.63,1.000,bicubic\ntf_efficientnetv2_l.in1k,384,96.550,3.450,99.480,0.520,118.52,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,96.550,3.450,99.470,0.530,60.40,0.950,bicubic\nhgnetv2_b5.ssld_stage2_ft_in1k,224,96.540,3.460,99.660,0.340,39.57,0.965,bicubic\ncaformer_b36.sail_in1k,224,96.540,3.460,99.460,0.540,98.75,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,96.530,3.470,99.620,0.380,39.57,0.965,bicubic\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,96.530,3.470,99.590,0.410,236.34,1.000,bilinear\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,96.530,3.470,99.540,0.460,32.59,1.000,bicubic\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,96.530,3.470,99.540,0.460,86.57,0.950,bicubic\ncaformer_s18.sail_in22k_ft_in1k_384,384,96.520,3.480,99.590,0.410,26.34,1.000,bicubic\ndm_nfnet_f2.dm_in1k,352,96.520,3.480,99.570,0.430,193.78,0.920,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,384,96.520,3.480,99.510,0.490,54.14,1.000,bicubic\nxcit_medium_24_p8_224.fb_dist_in1k,224,96.520,3.480,99.470,0.530,84.32,1.000,bicubic\nregnety_160.lion_in12k_ft_in1k,224,96.510,3.490,99.690,0.310,83.59,0.950,bicubic\nconvnext_small.fb_in22k_ft_in1k,224,96.510,3.490,99.490,0.510,50.22,0.875,bicubic\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,96.500,3.500,99.630,0.370,39.03,0.950,bicubic\ndeit_base_distilled_patch16_384.fb_in1k,384,96.500,3.500,99.590,0.410,87.63,1.000,bicubic\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,96.500,3.500,99.590,0.410,86.57,0.900,bicubic\nconvnextv2_base.fcmae_ft_in1k,288,96.500,3.500,99.520,0.480,88.72,1.000,bicubic\nvolo_d1_384.sail_in1k,384,96.490,3.510,99.550,0.450,26.78,1.000,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,96.490,3.510,99.540,0.460,38.88,0.950,bicubic\nefficientvit_l3.r224_in1k,224,96.490,3.510,99.310,0.690,246.04,1.000,bicubic\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,96.480,3.520,99.660,0.340,98.95,1.000,bicubic\nbeit3_base_patch16_224.in22k_ft_in1k,224,96.480,3.520,99.600,0.400,86.66,1.000,bicubic\ntf_efficientnetv2_m.in1k,480,96.480,3.520,99.600,0.400,54.14,1.000,bicubic\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,96.480,3.520,99.540,0.460,194.03,0.875,bilinear\nmambaout_base_wide_rw.sw_e500_in1k,288,96.480,3.520,99.520,0.480,94.45,1.000,bicubic\nxcit_small_12_p8_384.fb_dist_in1k,384,96.480,3.520,99.480,0.520,26.21,1.000,bicubic\nefficientvit_l2.r256_in1k,256,96.480,3.520,99.390,0.610,63.71,1.000,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,576,96.470,3.530,99.580,0.420,33.44,1.000,bicubic\nnextvit_large.bd_in1k_384,384,96.470,3.530,99.550,0.450,57.87,1.000,bicubic\nconvnextv2_base.fcmae_ft_in1k,224,96.470,3.530,99.430,0.570,88.72,0.875,bicubic\neca_nfnet_l2.ra3_in1k,384,96.460,3.540,99.610,0.390,56.72,1.000,bicubic\nvolo_d3_224.sail_in1k,224,96.460,3.540,99.610,0.390,86.33,0.960,bicubic\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,96.460,3.540,99.570,0.430,34.36,0.950,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,384,96.460,3.540,99.570,0.430,21.46,1.000,bicubic\necaresnet269d.ra2_in1k,352,96.450,3.550,99.610,0.390,102.09,1.000,bicubic\ncaformer_s18.sail_in1k_384,384,96.450,3.550,99.570,0.430,26.34,1.000,bicubic\nmambaout_base_short_rw.sw_e500_in1k,288,96.450,3.550,99.530,0.470,88.83,1.000,bicubic\nresnetrs420.tf_in1k,416,96.440,3.560,99.540,0.460,191.89,1.000,bicubic\nregnety_160.swag_lc_in1k,224,96.430,3.570,99.760,0.240,83.59,0.965,bicubic\nregnetz_e8.ra3_in1k,256,96.430,3.570,99.470,0.530,57.70,0.940,bicubic\nmvitv2_large.fb_in1k,224,96.430,3.570,99.440,0.560,217.99,0.900,bicubic\necaresnet269d.ra2_in1k,320,96.420,3.580,99.520,0.480,102.09,0.950,bicubic\nseresnextaa101d_32x8d.ah_in1k,288,96.420,3.580,99.520,0.480,93.59,1.000,bicubic\nvolo_d2_224.sail_in1k,224,96.420,3.580,99.510,0.490,58.68,0.960,bicubic\ncaformer_m36.sail_in1k,224,96.410,3.590,99.530,0.470,56.20,1.000,bicubic\nresnetaa101d.sw_in12k_ft_in1k,288,96.410,3.590,99.460,0.540,44.57,1.000,bicubic\nhgnet_small.ssld_in1k,288,96.400,3.600,99.620,0.380,24.36,1.000,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,288,96.400,3.600,99.530,0.470,86.48,1.000,bicubic\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,96.400,3.600,99.500,0.500,21.20,0.950,bicubic\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,96.400,3.600,99.460,0.540,88.30,0.950,bicubic\nconvnext_large.fb_in1k,288,96.390,3.610,99.530,0.470,197.77,1.000,bicubic\nvit_large_patch16_rope_224.naver_in1k,224,96.390,3.610,99.440,0.560,304.17,0.900,bicubic\nswin_base_patch4_window12_384.ms_in1k,384,96.390,3.610,99.420,0.580,87.90,1.000,bicubic\nxcit_small_24_p16_384.fb_dist_in1k,384,96.380,3.620,99.580,0.420,47.67,1.000,bicubic\nrdnet_large.nv_in1k,224,96.380,3.620,99.520,0.480,186.27,0.900,bicubic\nseresnext101d_32x8d.ah_in1k,288,96.380,3.620,99.470,0.530,93.59,1.000,bicubic\nxcit_small_12_p16_384.fb_dist_in1k,384,96.380,3.620,99.470,0.530,26.25,1.000,bicubic\ndm_nfnet_f3.dm_in1k,320,96.370,3.630,99.590,0.410,254.92,0.940,bicubic\nhiera_base_plus_224.mae_in1k_ft_in1k,224,96.370,3.630,99.420,0.580,69.90,0.900,bicubic\nvit_base_patch16_clip_224.openai_ft_in1k,224,96.360,3.640,99.570,0.430,86.57,0.900,bicubic\nresmlp_big_24_224.fb_in22k_ft_in1k,224,96.360,3.640,99.510,0.490,129.14,0.875,bicubic\ntf_efficientnet_b7.ap_in1k,600,96.350,3.650,99.600,0.400,66.35,0.949,bicubic\ntf_efficientnet_b6.ap_in1k,528,96.350,3.650,99.540,0.460,43.04,0.942,bicubic\nmaxvit_large_tf_224.in1k,224,96.350,3.650,99.400,0.600,211.79,0.950,bicubic\nmaxvit_base_tf_224.in1k,224,96.350,3.650,99.380,0.620,119.47,0.950,bicubic\nnextvit_base.bd_in1k_384,384,96.340,3.660,99.580,0.420,44.82,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,96.340,3.660,99.550,0.450,28.64,1.000,bicubic\nmambaout_small_rw.sw_e450_in1k,288,96.340,3.660,99.550,0.450,48.50,1.000,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,448,96.340,3.660,99.520,0.480,45.88,1.000,bicubic\nregnetz_040_h.ra3_in1k,320,96.340,3.660,99.510,0.490,28.94,1.000,bicubic\nhiera_base_224.mae_in1k_ft_in1k,224,96.340,3.660,99.350,0.650,51.52,0.900,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,96.330,3.670,99.560,0.440,86.57,1.000,bicubic\ndm_nfnet_f1.dm_in1k,320,96.330,3.670,99.530,0.470,132.63,0.910,bicubic\nseresnet152d.ra2_in1k,320,96.330,3.670,99.510,0.490,66.84,1.000,bicubic\nconvnext_base.fb_in1k,288,96.330,3.670,99.480,0.520,88.59,1.000,bicubic\nregnety_1280.seer_ft_in1k,384,96.330,3.670,99.410,0.590,644.81,1.000,bicubic\nresnetrs200.tf_in1k,320,96.320,3.680,99.530,0.470,93.21,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,96.320,3.680,99.380,0.620,32.59,0.950,bicubic\nxcit_large_24_p16_224.fb_dist_in1k,224,96.310,3.690,99.480,0.520,189.10,1.000,bicubic\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,96.310,3.690,99.410,0.590,32.59,0.950,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,288,96.300,3.700,99.620,0.380,19.80,1.000,bicubic\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,96.300,3.700,99.590,0.410,387.93,1.000,bilinear\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,96.300,3.700,99.560,0.440,86.57,0.900,bicubic\nfastvit_ma36.apple_dist_in1k,256,96.300,3.700,99.490,0.510,44.07,0.950,bicubic\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,96.290,3.710,99.630,0.370,217.32,1.000,bilinear\ntf_efficientnet_b6.aa_in1k,528,96.290,3.710,99.540,0.460,43.04,0.942,bicubic\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,96.270,3.730,99.590,0.410,88.79,0.875,bilinear\nefficientnetv2_rw_m.agc_in1k,416,96.270,3.730,99.560,0.440,53.24,1.000,bicubic\nmaxvit_small_tf_224.in1k,224,96.270,3.730,99.490,0.510,68.93,0.950,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,96.260,3.740,99.600,0.400,19.80,1.000,bicubic\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,96.260,3.740,99.500,0.500,194.03,0.875,bilinear\nresnetrs350.tf_in1k,384,96.260,3.740,99.480,0.520,163.96,1.000,bicubic\nxcit_medium_24_p16_224.fb_dist_in1k,224,96.260,3.740,99.400,0.600,84.40,1.000,bicubic\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,96.260,3.740,99.370,0.630,304.40,0.900,bicubic\nefficientvit_l2.r224_in1k,224,96.260,3.740,99.330,0.670,63.71,1.000,bicubic\nconvformer_s18.sail_in1k_384,384,96.250,3.750,99.540,0.460,26.77,1.000,bicubic\nxcit_tiny_24_p8_384.fb_dist_in1k,384,96.250,3.750,99.440,0.560,12.11,1.000,bicubic\nconvformer_b36.sail_in1k,224,96.250,3.750,99.310,0.690,99.88,1.000,bicubic\nconvnext_tiny.in12k_ft_in1k,288,96.240,3.760,99.650,0.350,28.59,1.000,bicubic\ndavit_base.msft_in1k,224,96.240,3.760,99.390,0.610,87.95,0.950,bicubic\ncoatnet_rmlp_2_rw_224.sw_in1k,224,96.240,3.760,99.280,0.720,73.88,0.950,bicubic\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,96.230,3.770,99.490,0.510,66.01,0.950,bicubic\ndeit3_base_patch16_384.fb_in1k,384,96.230,3.770,99.410,0.590,86.88,1.000,bicubic\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,96.220,3.780,99.530,0.470,328.99,0.900,bicubic\nhrnet_w48_ssld.paddle_in1k,224,96.220,3.780,99.510,0.490,77.47,0.950,bilinear\nregnetz_d8_evos.ch_in1k,320,96.220,3.780,99.490,0.510,23.46,1.000,bicubic\nmambaout_small.in1k,288,96.220,3.780,99.430,0.570,48.49,1.000,bicubic\nresnetrs350.tf_in1k,288,96.220,3.780,99.390,0.610,163.96,1.000,bicubic\ndeit3_large_patch16_224.fb_in1k,224,96.220,3.780,99.300,0.700,304.37,0.900,bicubic\nregnety_120.sw_in12k_ft_in1k,224,96.200,3.800,99.530,0.470,51.82,0.950,bicubic\nregnetz_040.ra3_in1k,320,96.200,3.800,99.510,0.490,27.12,1.000,bicubic\nconvnext_tiny.in12k_ft_in1k,224,96.190,3.810,99.580,0.420,28.59,0.950,bicubic\nedgenext_base.in21k_ft_in1k,320,96.190,3.810,99.470,0.530,18.51,1.000,bicubic\nefficientnetv2_rw_m.agc_in1k,320,96.190,3.810,99.460,0.540,53.24,1.000,bicubic\nvit_base_patch16_384.orig_in21k_ft_in1k,384,96.180,3.820,99.520,0.480,86.86,1.000,bicubic\nmambaout_base_wide_rw.sw_e500_in1k,224,96.180,3.820,99.350,0.650,94.45,0.950,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,96.170,3.830,99.500,0.500,236.34,1.000,bicubic\nswinv2_base_window16_256.ms_in1k,256,96.170,3.830,99.400,0.600,87.92,0.900,bicubic\nhgnet_small.ssld_in1k,224,96.160,3.840,99.550,0.450,24.36,0.965,bicubic\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,96.160,3.840,99.490,0.510,38.85,1.000,bicubic\nconvnext_tiny.fb_in22k_ft_in1k_384,384,96.160,3.840,99.480,0.520,28.59,1.000,bicubic\nvit_large_patch16_rope_ape_224.naver_in1k,224,96.160,3.840,99.480,0.520,304.37,0.900,bicubic\nvit_large_patch16_rope_mixed_224.naver_in1k,224,96.160,3.840,99.450,0.550,304.20,0.900,bicubic\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,96.160,3.840,99.320,0.680,41.72,0.950,bicubic\nmambaout_base.in1k,288,96.160,3.840,99.240,0.760,84.81,1.000,bicubic\nnextvit_small.bd_in1k_384,384,96.140,3.860,99.380,0.620,31.76,1.000,bicubic\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,96.140,3.860,99.310,0.690,34.36,0.950,bicubic\ncrossvit_18_dagger_408.in1k,408,96.130,3.870,99.460,0.540,44.61,1.000,bicubic\nflexivit_base.1200ep_in1k,240,96.130,3.870,99.400,0.600,86.59,0.950,bicubic\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,96.130,3.870,99.380,0.620,63.95,0.950,bicubic\nseresnext101_32x8d.ah_in1k,288,96.130,3.870,99.360,0.640,93.57,1.000,bicubic\nrexnetr_300.sw_in12k_ft_in1k,288,96.120,3.880,99.530,0.470,34.81,1.000,bicubic\nresnest269e.in1k,416,96.120,3.880,99.520,0.480,110.93,0.928,bicubic\nresnet200d.ra2_in1k,320,96.120,3.880,99.460,0.540,64.69,1.000,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,224,96.120,3.880,99.390,0.610,86.48,0.950,bicubic\nefficientvit_b3.r288_in1k,288,96.120,3.880,99.310,0.690,48.65,1.000,bicubic\nconvformer_s36.sail_in1k,224,96.120,3.880,99.280,0.720,40.01,1.000,bicubic\nresnest200e.in1k,320,96.110,3.890,99.480,0.520,70.20,0.909,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,224,96.100,3.900,99.520,0.480,19.80,0.965,bicubic\nresnetrs420.tf_in1k,320,96.100,3.900,99.470,0.530,191.89,1.000,bicubic\ntf_efficientnet_b3.ns_jft_in1k,300,96.100,3.900,99.470,0.530,12.23,0.904,bicubic\nseresnext101d_32x8d.ah_in1k,224,96.100,3.900,99.300,0.700,93.59,0.950,bicubic\ntf_efficientnet_b5.ap_in1k,456,96.090,3.910,99.540,0.460,30.39,0.934,bicubic\ncaformer_s36.sail_in1k,224,96.090,3.910,99.500,0.500,39.30,1.000,bicubic\nconvformer_s18.sail_in22k_ft_in1k,224,96.090,3.910,99.490,0.510,26.77,1.000,bicubic\nrdnet_base.nv_in1k,224,96.090,3.910,99.320,0.680,87.45,0.900,bicubic\nxcit_large_24_p8_224.fb_in1k,224,96.090,3.910,99.150,0.850,188.93,1.000,bicubic\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,96.080,3.920,99.520,0.480,44.18,0.875,bilinear\nconvformer_m36.sail_in1k,224,96.080,3.920,99.240,0.760,57.05,1.000,bicubic\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,96.070,3.930,99.560,0.440,36.47,1.000,bicubic\ntf_efficientnet_b7.aa_in1k,600,96.070,3.930,99.460,0.540,66.35,0.949,bicubic\nseresnet152d.ra2_in1k,256,96.070,3.930,99.410,0.590,66.84,0.950,bicubic\nswin_s3_base_224.ms_in1k,224,96.070,3.930,99.350,0.650,71.13,0.900,bicubic\nresnetrs270.tf_in1k,352,96.060,3.940,99.500,0.500,129.86,1.000,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,96.060,3.940,99.410,0.590,37.76,1.000,bicubic\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,96.060,3.940,99.410,0.590,86.59,0.900,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,96.060,3.940,99.380,0.620,28.64,0.875,bicubic\nswinv2_small_window16_256.ms_in1k,256,96.060,3.940,99.340,0.660,49.73,0.900,bicubic\ncs3se_edgenet_x.c2ns_in1k,320,96.050,3.950,99.450,0.550,50.72,1.000,bicubic\nswinv2_base_window8_256.ms_in1k,256,96.050,3.950,99.410,0.590,87.92,0.900,bicubic\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,96.050,3.950,99.400,0.600,29.15,0.950,bicubic\ngcvit_base.in1k,224,96.050,3.950,99.390,0.610,90.32,0.875,bicubic\nmambaout_base_short_rw.sw_e500_in1k,224,96.050,3.950,99.350,0.650,88.83,0.950,bicubic\nvit_base_patch16_224_miil.in21k_ft_in1k,224,96.050,3.950,99.350,0.650,86.54,0.875,bilinear\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,96.050,3.950,99.260,0.740,86.43,0.950,bicubic\ntf_efficientnetv2_m.in1k,384,96.040,3.960,99.500,0.500,54.14,1.000,bicubic\nconvnext_large.fb_in1k,224,96.040,3.960,99.470,0.530,197.77,0.875,bicubic\ndavit_small.msft_in1k,224,96.040,3.960,99.390,0.610,49.75,0.950,bicubic\nhgnetv2_b3.ssld_stage2_ft_in1k,288,96.030,3.970,99.590,0.410,16.29,1.000,bicubic\neca_nfnet_l2.ra3_in1k,320,96.030,3.970,99.560,0.440,56.72,0.900,bicubic\ncaformer_s18.sail_in22k_ft_in1k,224,96.030,3.970,99.550,0.450,26.34,1.000,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,96.030,3.970,99.530,0.470,32.59,1.000,bicubic\nregnetz_d8.ra3_in1k,320,96.030,3.970,99.520,0.480,23.37,1.000,bicubic\nregnety_640.seer_ft_in1k,384,96.030,3.970,99.500,0.500,281.38,1.000,bicubic\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,96.030,3.970,99.490,0.510,49.61,0.900,bicubic\nvolo_d1_224.sail_in1k,224,96.030,3.970,99.400,0.600,26.63,0.960,bicubic\ndm_nfnet_f2.dm_in1k,256,96.030,3.970,99.340,0.660,193.78,0.920,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,96.020,3.980,99.490,0.510,19.80,0.965,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,448,96.020,3.980,99.440,0.560,33.44,1.000,bicubic\nmvitv2_base.fb_in1k,224,96.020,3.980,99.350,0.650,51.47,0.900,bicubic\ncait_xs24_384.fb_dist_in1k,384,96.010,3.990,99.420,0.580,26.67,1.000,bicubic\nfastvit_ma36.apple_in1k,256,96.010,3.990,99.360,0.640,44.07,0.950,bicubic\nmambaout_small_rw.sw_e450_in1k,224,96.010,3.990,99.350,0.650,48.50,1.000,bicubic\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,96.000,4.000,99.600,0.400,22.20,1.000,bicubic\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,96.000,4.000,99.490,0.510,38.86,0.950,bicubic\nrepvit_m2_3.dist_450e_in1k,224,96.000,4.000,99.400,0.600,23.69,0.950,bicubic\ncoat_lite_medium.in1k,224,96.000,4.000,99.350,0.650,44.57,0.900,bicubic\nseresnextaa101d_32x8d.ah_in1k,224,96.000,4.000,99.350,0.650,93.59,0.950,bicubic\nrexnetr_300.sw_in12k_ft_in1k,224,95.990,4.010,99.470,0.530,34.81,0.950,bicubic\nconvnext_small.fb_in1k,288,95.990,4.010,99.430,0.570,50.22,1.000,bicubic\nmobilenetv4_hybrid_large.e600_r384_in1k,384,95.990,4.010,99.340,0.660,37.76,0.950,bicubic\nvit_base_patch16_rope_mixed_224.naver_in1k,224,95.990,4.010,99.320,0.680,86.44,0.900,bicubic\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,95.980,4.020,99.600,0.400,16.29,1.000,bicubic\ntf_efficientnet_b5.ra_in1k,456,95.980,4.020,99.460,0.540,30.39,0.934,bicubic\nflexivit_base.600ep_in1k,240,95.980,4.020,99.400,0.600,86.59,0.950,bicubic\nregnetz_040_h.ra3_in1k,256,95.980,4.020,99.390,0.610,28.94,1.000,bicubic\nmaxvit_rmlp_small_rw_224.sw_in1k,224,95.970,4.030,99.340,0.660,64.90,0.900,bicubic\nefficientvit_l1.r224_in1k,224,95.970,4.030,99.120,0.880,52.65,1.000,bicubic\nxcit_small_12_p8_224.fb_dist_in1k,224,95.960,4.040,99.420,0.580,26.21,1.000,bicubic\npvt_v2_b5.in1k,224,95.960,4.040,99.390,0.610,81.96,0.900,bicubic\nresnetrs152.tf_in1k,320,95.960,4.040,99.380,0.620,86.62,1.000,bicubic\nxcit_small_24_p8_224.fb_in1k,224,95.960,4.040,99.180,0.820,47.63,1.000,bicubic\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,95.950,4.050,99.400,0.600,88.79,0.875,bilinear\nfastvit_sa36.apple_dist_in1k,256,95.950,4.050,99.370,0.630,31.53,0.900,bicubic\nflexivit_base.300ep_in1k,240,95.950,4.050,99.360,0.640,86.59,0.950,bicubic\npvt_v2_b4.in1k,224,95.950,4.050,99.350,0.650,62.56,0.900,bicubic\neca_nfnet_l1.ra2_in1k,320,95.940,4.060,99.490,0.510,41.41,1.000,bicubic\nconvnext_base.fb_in1k,224,95.940,4.060,99.380,0.620,88.59,0.875,bicubic\nrepvgg_d2se.rvgg_in1k,320,95.920,4.080,99.490,0.510,133.33,1.000,bilinear\ngcvit_small.in1k,224,95.920,4.080,99.270,0.730,51.09,0.875,bicubic\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,95.910,4.090,99.440,0.560,88.30,1.000,bicubic\ninception_next_base.sail_in1k,224,95.910,4.090,99.230,0.770,86.67,0.950,bicubic\nregnety_160.deit_in1k,288,95.900,4.100,99.560,0.440,83.59,1.000,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,300,95.900,4.100,99.360,0.640,21.46,1.000,bicubic\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,95.900,4.100,99.360,0.640,38.88,0.950,bicubic\nswin_base_patch4_window7_224.ms_in1k,224,95.900,4.100,99.300,0.700,87.77,0.900,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,95.900,4.100,99.260,0.740,37.76,0.950,bicubic\nmvitv2_small.fb_in1k,224,95.890,4.110,99.420,0.580,34.87,0.900,bicubic\nfocalnet_base_srf.ms_in1k,224,95.890,4.110,99.360,0.640,88.15,0.900,bicubic\ntf_efficientnet_b5.aa_in1k,456,95.890,4.110,99.360,0.640,30.39,0.934,bicubic\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,95.890,4.110,99.340,0.660,38.74,0.950,bicubic\nxcit_medium_24_p8_224.fb_in1k,224,95.890,4.110,99.090,0.910,84.32,1.000,bicubic\nmobilenetv4_hybrid_large.e600_r384_in1k,448,95.880,4.120,99.450,0.550,37.76,1.000,bicubic\nedgenext_base.in21k_ft_in1k,256,95.880,4.120,99.410,0.590,18.51,0.950,bicubic\nnextvit_large.bd_in1k,224,95.880,4.120,99.410,0.590,57.87,0.950,bicubic\nregnety_080.ra3_in1k,288,95.870,4.130,99.450,0.550,39.18,1.000,bicubic\nregnetz_d32.ra3_in1k,320,95.870,4.130,99.440,0.560,27.58,0.950,bicubic\nresmlp_big_24_224.fb_distilled_in1k,224,95.870,4.130,99.440,0.560,129.14,0.875,bicubic\nfasternet_l.in1k,224,95.870,4.130,99.350,0.650,93.47,1.000,bicubic\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,95.870,4.130,99.160,0.840,86.46,1.000,bicubic\nsequencer2d_l.in1k,224,95.860,4.140,99.470,0.530,54.30,0.875,bicubic\npit_b_distilled_224.in1k,224,95.860,4.140,99.200,0.800,74.79,0.900,bicubic\nresnet152d.ra2_in1k,320,95.850,4.150,99.430,0.570,60.21,1.000,bicubic\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,95.850,4.150,99.410,0.590,22.52,0.950,bicubic\nswin_s3_small_224.ms_in1k,224,95.850,4.150,99.190,0.810,49.74,0.900,bicubic\ntf_efficientnet_b5.in1k,456,95.840,4.160,99.400,0.600,30.39,0.934,bicubic\nresnext101_64x4d.tv_in1k,224,95.840,4.160,99.320,0.680,83.46,0.875,bilinear\ntresnet_v2_l.miil_in21k_ft_in1k,224,95.840,4.160,99.290,0.710,46.17,0.875,bilinear\nfocalnet_base_lrf.ms_in1k,224,95.840,4.160,99.170,0.830,88.75,0.900,bicubic\nmambaout_base.in1k,224,95.840,4.160,99.100,0.900,84.81,1.000,bicubic\nresnetrs270.tf_in1k,256,95.830,4.170,99.410,0.590,129.86,1.000,bicubic\nregnetz_d8.ra3_in1k,256,95.830,4.170,99.310,0.690,23.37,0.940,bicubic\ncrossvit_15_dagger_408.in1k,408,95.830,4.170,99.300,0.700,28.50,1.000,bicubic\nregnety_064.ra3_in1k,288,95.830,4.170,99.300,0.700,30.58,1.000,bicubic\nresnetaa101d.sw_in12k_ft_in1k,224,95.830,4.170,99.290,0.710,44.57,0.950,bicubic\nmaxvit_tiny_tf_224.in1k,224,95.830,4.170,99.250,0.750,30.92,0.950,bicubic\nhiera_small_224.mae_in1k_ft_in1k,224,95.830,4.170,99.220,0.780,35.01,0.900,bicubic\nefficientvit_b3.r256_in1k,256,95.830,4.170,99.190,0.810,48.65,1.000,bicubic\nrdnet_small.nv_in1k,224,95.820,4.180,99.420,0.580,50.44,0.900,bicubic\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,95.820,4.180,99.410,0.590,22.06,1.000,bicubic\nresnetrs200.tf_in1k,256,95.820,4.180,99.350,0.650,93.21,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,288,95.810,4.190,99.340,0.660,28.64,1.000,bicubic\nregnetz_d8_evos.ch_in1k,256,95.800,4.200,99.400,0.600,23.46,0.950,bicubic\nregnety_320.seer_ft_in1k,384,95.800,4.200,99.390,0.610,145.05,1.000,bicubic\nxcit_small_24_p16_224.fb_dist_in1k,224,95.800,4.200,99.350,0.650,47.67,1.000,bicubic\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,95.800,4.200,99.270,0.730,60.23,0.950,bicubic\nseresnext101_32x8d.ah_in1k,224,95.800,4.200,99.260,0.740,93.57,0.950,bicubic\nedgenext_base.usi_in1k,320,95.790,4.210,99.580,0.420,18.51,1.000,bicubic\nregnetv_064.ra3_in1k,288,95.790,4.210,99.420,0.580,30.58,1.000,bicubic\nhgnetv2_b3.ssld_stage2_ft_in1k,224,95.790,4.210,99.390,0.610,16.29,0.965,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,95.790,4.210,99.280,0.720,38.88,0.950,bicubic\ndeit3_base_patch16_224.fb_in1k,224,95.790,4.210,99.260,0.740,86.59,0.900,bicubic\nresnet152.a1h_in1k,288,95.780,4.220,99.440,0.560,60.19,1.000,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,95.780,4.220,99.300,0.700,15.62,1.000,bicubic\nedgenext_base.usi_in1k,256,95.770,4.230,99.420,0.580,18.51,0.950,bicubic\nregnetv_040.ra3_in1k,288,95.770,4.230,99.390,0.610,20.64,1.000,bicubic\nefficientnetv2_rw_s.ra2_in1k,384,95.770,4.230,99.370,0.630,23.94,1.000,bicubic\nswinv2_small_window8_256.ms_in1k,256,95.770,4.230,99.360,0.640,49.73,0.900,bicubic\nvit_base_patch16_rope_ape_224.naver_in1k,224,95.770,4.230,99.340,0.660,86.59,0.900,bicubic\ndeit_base_distilled_patch16_224.fb_in1k,224,95.770,4.230,99.280,0.720,87.34,0.900,bicubic\nregnetz_040.ra3_in1k,256,95.760,4.240,99.410,0.590,27.12,1.000,bicubic\ntf_efficientnetv2_s.in1k,384,95.760,4.240,99.410,0.590,21.46,1.000,bicubic\nhrnet_w18_ssld.paddle_in1k,288,95.760,4.240,99.330,0.670,21.30,1.000,bilinear\nxcit_small_12_p16_224.fb_dist_in1k,224,95.760,4.240,99.300,0.700,26.25,1.000,bicubic\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,95.760,4.240,99.130,0.870,86.63,1.000,bicubic\nhgnetv2_b2.ssld_stage2_ft_in1k,288,95.750,4.250,99.470,0.530,11.22,1.000,bicubic\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,95.750,4.250,99.450,0.550,16.29,0.965,bicubic\nresnet101d.ra2_in1k,320,95.750,4.250,99.440,0.560,44.57,1.000,bicubic\nmambaout_small.in1k,224,95.750,4.250,99.260,0.740,48.49,1.000,bicubic\ntwins_pcpvt_large.in1k,224,95.740,4.260,99.480,0.520,60.99,0.900,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,95.740,4.260,99.430,0.570,236.34,0.875,bicubic\nefficientformerv2_l.snap_dist_in1k,224,95.740,4.260,99.370,0.630,26.32,0.950,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,224,95.740,4.260,99.360,0.640,28.59,0.875,bicubic\nmaxvit_tiny_rw_224.sw_in1k,224,95.740,4.260,99.170,0.830,29.06,0.950,bicubic\ndm_nfnet_f0.dm_in1k,256,95.730,4.270,99.370,0.630,71.49,0.900,bicubic\nswin_small_patch4_window7_224.ms_in1k,224,95.730,4.270,99.290,0.710,49.61,0.900,bicubic\nfocalnet_small_lrf.ms_in1k,224,95.730,4.270,99.210,0.790,50.34,0.900,bicubic\nvit_base_patch16_rope_224.naver_in1k,224,95.720,4.280,99.330,0.670,86.43,0.900,bicubic\nswinv2_cr_small_ns_224.sw_in1k,224,95.720,4.280,99.300,0.700,49.70,0.900,bicubic\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,95.720,4.280,99.280,0.720,11.00,0.950,bicubic\ntwins_svt_large.in1k,224,95.710,4.290,99.360,0.640,99.27,0.900,bicubic\nnextvit_base.bd_in1k,224,95.700,4.300,99.380,0.620,44.82,0.950,bicubic\ngcvit_tiny.in1k,224,95.700,4.300,99.340,0.660,28.22,0.875,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,95.700,4.300,99.330,0.670,32.59,0.950,bicubic\ncaformer_s18.sail_in1k,224,95.700,4.300,99.290,0.710,26.34,1.000,bicubic\nconvnext_nano.r384_in12k_ft_in1k,384,95.690,4.310,99.400,0.600,15.59,1.000,bicubic\ninception_next_small.sail_in1k,224,95.690,4.310,99.270,0.730,49.37,0.875,bicubic\ntiny_vit_21m_224.in1k,224,95.690,4.310,99.240,0.760,21.20,0.950,bicubic\nmobilenetv4_conv_large.e600_r384_in1k,448,95.680,4.320,99.450,0.550,32.59,1.000,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,95.680,4.320,99.430,0.570,11.07,1.000,bicubic\nxception65.ra3_in1k,299,95.680,4.320,99.330,0.670,39.92,0.940,bicubic\nrepvit_m2_3.dist_300e_in1k,224,95.670,4.330,99.400,0.600,23.69,0.950,bicubic\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,95.670,4.330,99.280,0.720,60.40,0.950,bicubic\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,95.660,4.340,99.450,0.550,25.03,0.875,bilinear\nregnetz_c16_evos.ch_in1k,320,95.660,4.340,99.420,0.580,13.49,0.950,bicubic\ndeit3_small_patch16_384.fb_in1k,384,95.660,4.340,99.390,0.610,22.21,1.000,bicubic\nxception65p.ra3_in1k,299,95.660,4.340,99.280,0.720,39.82,0.940,bicubic\ndeit_base_patch16_384.fb_in1k,384,95.660,4.340,99.240,0.760,86.86,1.000,bicubic\ncait_s24_224.fb_dist_in1k,224,95.650,4.350,99.380,0.620,46.92,1.000,bicubic\nresnet200d.ra2_in1k,256,95.650,4.350,99.310,0.690,64.69,0.950,bicubic\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,95.650,4.350,99.190,0.810,86.63,1.000,bicubic\necaresnet101d.miil_in1k,288,95.640,4.360,99.360,0.640,44.57,0.950,bicubic\nmambaout_tiny.in1k,288,95.640,4.360,99.250,0.750,26.55,1.000,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,320,95.630,4.370,99.450,0.550,38.61,1.000,bicubic\nresnetv2_101.a1h_in1k,288,95.630,4.370,99.370,0.630,44.54,1.000,bicubic\nfocalnet_small_srf.ms_in1k,224,95.630,4.370,99.300,0.700,49.89,0.900,bicubic\nefficientformer_l7.snap_dist_in1k,224,95.620,4.380,99.450,0.550,82.23,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,95.620,4.380,99.400,0.600,11.07,0.950,bicubic\nfastvit_sa36.apple_in1k,256,95.620,4.380,99.320,0.680,31.53,0.900,bicubic\ncoatnet_1_rw_224.sw_in1k,224,95.620,4.380,99.220,0.780,41.72,0.950,bicubic\nhgnet_tiny.ssld_in1k,288,95.610,4.390,99.470,0.530,14.74,1.000,bicubic\ndm_nfnet_f1.dm_in1k,224,95.610,4.390,99.370,0.630,132.63,0.910,bicubic\neca_nfnet_l1.ra2_in1k,256,95.610,4.390,99.300,0.700,41.41,0.900,bicubic\nresnest101e.in1k,256,95.610,4.390,99.280,0.720,48.28,0.875,bilinear\nsequencer2d_m.in1k,224,95.610,4.390,99.280,0.720,38.31,0.875,bicubic\nconvnext_small.fb_in1k,224,95.610,4.390,99.260,0.740,50.22,0.875,bicubic\nregnety_320.tv2_in1k,224,95.600,4.400,99.400,0.600,145.05,0.965,bicubic\ncs3se_edgenet_x.c2ns_in1k,256,95.600,4.400,99.310,0.690,50.72,0.950,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,300,95.600,4.400,99.280,0.720,14.36,0.900,bicubic\ntf_efficientnet_b4.aa_in1k,380,95.590,4.410,99.320,0.680,19.34,0.922,bicubic\nefficientvit_b2.r288_in1k,288,95.590,4.410,99.240,0.760,24.33,1.000,bicubic\nregnety_064.ra3_in1k,224,95.590,4.410,99.160,0.840,30.58,0.950,bicubic\nfastvit_sa24.apple_dist_in1k,256,95.580,4.420,99.310,0.690,21.55,0.900,bicubic\nresnet101.a1h_in1k,288,95.580,4.420,99.260,0.740,44.55,1.000,bicubic\nresnet152.a1h_in1k,224,95.580,4.420,99.260,0.740,60.19,0.950,bicubic\nnest_base_jx.goog_in1k,224,95.570,4.430,99.300,0.700,67.72,0.875,bicubic\nresnext101_64x4d.c1_in1k,288,95.570,4.430,99.290,0.710,83.46,1.000,bicubic\nregnety_080.ra3_in1k,224,95.570,4.430,99.260,0.740,39.18,0.950,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,95.570,4.430,99.260,0.740,60.40,0.950,bicubic\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,95.560,4.440,99.440,0.560,11.22,1.000,bicubic\ntwins_svt_base.in1k,224,95.560,4.440,99.230,0.770,56.07,0.900,bicubic\nnest_small_jx.goog_in1k,224,95.560,4.440,99.220,0.780,38.35,0.875,bicubic\ntresnet_xl.miil_in1k_448,448,95.550,4.450,99.330,0.670,78.44,0.875,bilinear\nrexnet_300.nav_in1k,224,95.550,4.450,99.290,0.710,34.71,0.875,bicubic\nefficientvit_b3.r224_in1k,224,95.540,4.460,99.190,0.810,48.65,0.950,bicubic\nefficientnet_b4.ra2_in1k,384,95.530,4.470,99.410,0.590,19.34,1.000,bicubic\nregnety_160.deit_in1k,224,95.520,4.480,99.390,0.610,83.59,0.950,bicubic\ntf_efficientnet_b2.ns_jft_in1k,260,95.510,4.490,99.340,0.660,9.11,0.890,bicubic\ncoatnet_rmlp_1_rw_224.sw_in1k,224,95.510,4.490,99.260,0.740,41.69,0.950,bicubic\ntf_efficientnet_b4.ap_in1k,380,95.500,4.500,99.380,0.620,19.34,0.922,bicubic\ntf_efficientnet_b4.in1k,380,95.500,4.500,99.280,0.720,19.34,0.922,bicubic\ntwins_pcpvt_base.in1k,224,95.490,4.510,99.370,0.630,43.83,0.900,bicubic\nregnetv_064.ra3_in1k,224,95.490,4.510,99.330,0.670,30.58,0.950,bicubic\npvt_v2_b3.in1k,224,95.480,4.520,99.330,0.670,45.24,0.900,bicubic\nxcit_tiny_24_p8_224.fb_dist_in1k,224,95.480,4.520,99.310,0.690,12.11,1.000,bicubic\ncs3edgenet_x.c2_in1k,288,95.480,4.520,99.290,0.710,47.82,1.000,bicubic\nmaxvit_nano_rw_256.sw_in1k,256,95.480,4.520,99.130,0.870,15.45,0.950,bicubic\nxcit_tiny_24_p16_384.fb_dist_in1k,384,95.470,4.530,99.350,0.650,12.12,1.000,bicubic\nregnety_032.ra_in1k,288,95.470,4.530,99.320,0.680,19.44,1.000,bicubic\nregnety_040.ra3_in1k,288,95.450,4.550,99.410,0.590,20.65,1.000,bicubic\nxcit_tiny_12_p8_384.fb_dist_in1k,384,95.450,4.550,99.340,0.660,6.71,1.000,bicubic\nxcit_small_12_p8_224.fb_in1k,224,95.450,4.550,99.190,0.810,26.21,1.000,bicubic\neca_nfnet_l0.ra2_in1k,288,95.440,4.560,99.390,0.610,24.14,1.000,bicubic\nmaxxvitv2_nano_rw_256.sw_in1k,256,95.440,4.560,99.210,0.790,23.70,0.950,bicubic\nefficientnetv2_rw_s.ra2_in1k,288,95.440,4.560,99.190,0.810,23.94,1.000,bicubic\nnfnet_l0.ra2_in1k,288,95.430,4.570,99.430,0.570,35.07,1.000,bicubic\ncs3sedarknet_x.c2ns_in1k,288,95.430,4.570,99.320,0.680,35.40,1.000,bicubic\nsequencer2d_s.in1k,224,95.430,4.570,99.260,0.740,27.65,0.875,bicubic\nresnet101d.ra2_in1k,256,95.430,4.570,99.200,0.800,44.57,0.950,bicubic\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,95.430,4.570,99.080,0.920,15.50,0.950,bicubic\nresnet152d.ra2_in1k,256,95.420,4.580,99.310,0.690,60.21,0.950,bicubic\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,95.420,4.580,99.280,0.720,18.45,1.000,bicubic\ntresnet_m.miil_in21k_ft_in1k,224,95.420,4.580,99.160,0.840,31.39,0.875,bilinear\nswinv2_cr_small_224.sw_in1k,224,95.420,4.580,99.060,0.940,49.70,0.900,bicubic\nefficientnet_b4.ra2_in1k,320,95.410,4.590,99.280,0.720,19.34,0.875,bicubic\ntresnet_l.miil_in1k_448,448,95.410,4.590,99.280,0.720,55.99,0.875,bilinear\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,95.400,4.600,99.400,0.600,194.03,0.875,bilinear\nswiftformer_l3.dist_in1k,224,95.400,4.600,99.320,0.680,28.49,0.950,bicubic\ntf_efficientnetv2_s.in1k,300,95.400,4.600,99.290,0.710,21.46,1.000,bicubic\nmvitv2_tiny.fb_in1k,224,95.400,4.600,99.160,0.840,24.17,0.900,bicubic\nconvnext_nano.in12k_ft_in1k,288,95.390,4.610,99.480,0.520,15.59,1.000,bicubic\nresnetv2_50x1_bit.goog_distilled_in1k,224,95.390,4.610,99.420,0.580,25.55,0.875,bicubic\nregnetz_c16.ra3_in1k,320,95.390,4.610,99.340,0.660,13.46,1.000,bicubic\nregnetz_d32.ra3_in1k,256,95.390,4.610,99.340,0.660,27.58,0.950,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,95.390,4.610,99.320,0.680,15.62,1.000,bicubic\nmobilenetv4_conv_large.e600_r384_in1k,384,95.390,4.610,99.250,0.750,32.59,0.950,bicubic\ndeit3_medium_patch16_224.fb_in1k,224,95.390,4.610,99.170,0.830,38.85,0.900,bicubic\nfasternet_m.in1k,224,95.380,4.620,99.320,0.680,53.52,1.000,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,95.380,4.620,99.180,0.820,15.62,0.875,bicubic\nhgnet_tiny.ssld_in1k,224,95.370,4.630,99.390,0.610,14.74,0.965,bicubic\nswinv2_tiny_window16_256.ms_in1k,256,95.370,4.630,99.300,0.700,28.35,0.900,bicubic\nresnetrs152.tf_in1k,256,95.370,4.630,99.240,0.760,86.62,1.000,bicubic\npnasnet5large.tf_in1k,331,95.370,4.630,99.130,0.870,86.06,0.911,bicubic\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,95.360,4.640,99.340,0.660,9.72,0.950,bicubic\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,95.360,4.640,99.330,0.670,16.78,0.950,bicubic\nregnetz_c16_evos.ch_in1k,256,95.350,4.650,99.290,0.710,13.49,0.950,bicubic\nregnetv_040.ra3_in1k,224,95.350,4.650,99.250,0.750,20.64,0.950,bicubic\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,95.340,4.660,99.330,0.670,88.79,0.875,bilinear\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,95.340,4.660,99.130,0.870,10.59,1.000,bicubic\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,95.330,4.670,99.380,0.620,44.54,1.000,bilinear\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,95.330,4.670,99.290,0.710,22.52,0.950,bicubic\nrdnet_tiny.nv_in1k,224,95.330,4.670,99.260,0.740,23.86,0.900,bicubic\nconvformer_s18.sail_in1k,224,95.330,4.670,99.150,0.850,26.77,1.000,bicubic\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,95.320,4.680,99.430,0.570,11.07,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,288,95.320,4.680,99.420,0.580,16.52,1.000,bicubic\nregnetx_320.tv2_in1k,224,95.320,4.680,99.290,0.710,107.81,0.965,bicubic\nrepvit_m1_5.dist_450e_in1k,224,95.320,4.680,99.260,0.740,14.64,0.950,bicubic\nresnext101_32x8d.tv2_in1k,224,95.310,4.690,99.350,0.650,88.79,0.965,bilinear\nfastvit_sa24.apple_in1k,256,95.310,4.690,99.310,0.690,21.55,0.900,bicubic\nregnety_080_tv.tv2_in1k,224,95.310,4.690,99.230,0.770,39.38,0.965,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,224,95.300,4.700,99.240,0.760,28.64,0.875,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,288,95.300,4.700,99.220,0.780,13.68,1.000,bicubic\nresnet50d.ra4_e3600_r224_in1k,288,95.290,4.710,99.250,0.750,25.58,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,224,95.290,4.710,99.210,0.790,16.52,0.950,bicubic\nflexivit_small.600ep_in1k,240,95.290,4.710,99.190,0.810,22.06,0.950,bicubic\nefficientvit_b2.r256_in1k,256,95.290,4.710,99.110,0.890,24.33,1.000,bicubic\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,95.280,4.720,99.120,0.880,38.76,0.900,bicubic\nresnetaa50d.sw_in12k_ft_in1k,288,95.270,4.730,99.390,0.610,25.58,1.000,bicubic\ncs3darknet_x.c2ns_in1k,288,95.270,4.730,99.290,0.710,35.05,1.000,bicubic\npvt_v2_b2_li.in1k,224,95.270,4.730,99.280,0.720,22.55,0.900,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,256,95.270,4.730,99.230,0.770,38.61,0.950,bicubic\ntnt_b_patch16_224.in1k,224,95.270,4.730,99.120,0.880,65.43,0.900,bicubic\nconvnext_tiny_hnf.a2h_in1k,288,95.270,4.730,98.970,1.030,28.59,1.000,bicubic\nregnetx_160.tv2_in1k,224,95.260,4.740,99.260,0.740,54.28,0.965,bicubic\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,95.260,4.740,99.190,0.810,86.43,0.900,bicubic\nflexivit_small.1200ep_in1k,240,95.260,4.740,99.170,0.830,22.06,0.950,bicubic\nresnet50.fb_swsl_ig1b_ft_in1k,224,95.250,4.750,99.400,0.600,25.56,0.875,bilinear\nvit_large_patch32_384.orig_in21k_ft_in1k,384,95.250,4.750,99.320,0.680,306.63,1.000,bicubic\ncait_xxs36_384.fb_dist_in1k,384,95.250,4.750,99.310,0.690,17.37,1.000,bicubic\nwide_resnet101_2.tv2_in1k,224,95.250,4.750,99.200,0.800,126.89,0.965,bilinear\nhiera_tiny_224.mae_in1k_ft_in1k,224,95.250,4.750,99.090,0.910,27.91,0.900,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,95.240,4.760,99.250,0.750,88.22,0.900,bicubic\nresnetv2_50d_gn.ah_in1k,288,95.240,4.760,99.030,0.970,25.57,1.000,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,95.230,4.770,99.410,0.590,11.07,1.000,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,95.230,4.770,99.380,0.620,14.25,1.000,bicubic\nvit_base_patch16_224.orig_in21k_ft_in1k,224,95.230,4.770,99.240,0.760,86.57,0.900,bicubic\nnextvit_small.bd_in1k,224,95.230,4.770,99.220,0.780,31.76,0.950,bicubic\nresnetrs101.tf_in1k,288,95.230,4.770,99.210,0.790,63.62,0.940,bicubic\ntiny_vit_11m_224.in1k,224,95.230,4.770,99.210,0.790,11.00,0.950,bicubic\nresnext101_64x4d.c1_in1k,224,95.230,4.770,99.130,0.870,83.46,0.950,bicubic\nefficientformer_l3.snap_dist_in1k,224,95.220,4.780,99.320,0.680,31.41,0.950,bicubic\nconvnext_nano.in12k_ft_in1k,224,95.220,4.780,99.260,0.740,15.59,0.950,bicubic\nfocalnet_tiny_lrf.ms_in1k,224,95.220,4.780,99.220,0.780,28.65,0.900,bicubic\nlevit_384.fb_dist_in1k,224,95.220,4.780,99.170,0.830,39.13,0.900,bicubic\nlevit_conv_384.fb_dist_in1k,224,95.220,4.780,99.170,0.830,39.13,0.900,bicubic\nconvnext_tiny.fb_in1k,288,95.210,4.790,99.320,0.680,28.59,1.000,bicubic\ncoat_small.in1k,224,95.210,4.790,99.290,0.710,21.69,0.900,bicubic\nresnet51q.ra2_in1k,288,95.210,4.790,99.260,0.740,35.70,1.000,bilinear\nhgnetv2_b2.ssld_stage2_ft_in1k,224,95.210,4.790,99.240,0.760,11.22,0.965,bicubic\necaresnet101d.miil_in1k,224,95.200,4.800,99.250,0.750,44.57,0.875,bicubic\npoolformerv2_m48.sail_in1k,224,95.200,4.800,99.150,0.850,73.35,1.000,bicubic\nvit_relpos_medium_patch16_224.sw_in1k,224,95.190,4.810,99.230,0.770,38.75,0.900,bicubic\nvit_relpos_base_patch16_224.sw_in1k,224,95.180,4.820,99.280,0.720,86.43,0.900,bicubic\nnasnetalarge.tf_in1k,331,95.180,4.820,99.130,0.870,88.75,0.911,bicubic\ncrossvit_18_dagger_240.in1k,240,95.180,4.820,99.120,0.880,44.27,0.875,bicubic\nmobilenetv4_conv_large.e500_r256_in1k,320,95.160,4.840,99.340,0.660,32.59,1.000,bicubic\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,95.160,4.840,99.330,0.670,44.18,0.875,bilinear\nconvnextv2_nano.fcmae_ft_in1k,288,95.160,4.840,99.250,0.750,15.62,1.000,bicubic\nregnety_160.tv2_in1k,224,95.160,4.840,99.250,0.750,83.59,0.965,bicubic\nefficientnet_b3.ra2_in1k,320,95.160,4.840,99.210,0.790,12.23,1.000,bicubic\nregnetz_c16.ra3_in1k,256,95.160,4.840,99.190,0.810,13.46,0.940,bicubic\nhrnet_w18_ssld.paddle_in1k,224,95.150,4.850,99.210,0.790,21.30,0.950,bilinear\nflexivit_small.300ep_in1k,240,95.150,4.850,99.140,0.860,22.06,0.950,bicubic\ncs3edgenet_x.c2_in1k,256,95.140,4.860,99.240,0.760,47.82,0.887,bicubic\nrepvit_m1_5.dist_300e_in1k,224,95.140,4.860,99.240,0.760,14.64,0.950,bicubic\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,95.140,4.860,99.220,0.780,36.43,0.900,bicubic\ntf_efficientnetv2_b3.in1k,300,95.140,4.860,99.210,0.790,14.36,0.904,bicubic\nresnet61q.ra2_in1k,288,95.140,4.860,99.070,0.930,36.85,1.000,bicubic\nresnet152.a1_in1k,288,95.140,4.860,99.010,0.990,60.19,1.000,bicubic\nxcit_medium_24_p16_224.fb_in1k,224,95.140,4.860,98.930,1.070,84.40,1.000,bicubic\nwide_resnet50_2.racm_in1k,288,95.130,4.870,99.260,0.740,68.88,0.950,bicubic\nfbnetv3_g.ra2_in1k,288,95.130,4.870,99.200,0.800,16.62,0.950,bilinear\ninception_next_tiny.sail_in1k,224,95.120,4.880,99.140,0.860,28.06,0.875,bicubic\nconvit_base.fb_in1k,224,95.120,4.880,99.120,0.880,86.54,0.875,bicubic\nefficientformerv2_s2.snap_dist_in1k,224,95.120,4.880,99.120,0.880,12.71,0.950,bicubic\necaresnet50t.ra2_in1k,320,95.110,4.890,99.290,0.710,25.57,0.950,bicubic\nmambaout_tiny.in1k,224,95.110,4.890,99.110,0.890,26.55,1.000,bicubic\ncoat_lite_small.in1k,224,95.110,4.890,99.020,0.980,19.84,0.900,bicubic\ndavit_tiny.msft_in1k,224,95.100,4.900,99.140,0.860,28.36,0.950,bicubic\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,95.100,4.900,99.090,0.910,18.45,0.888,bicubic\nhgnet_small.paddle_in1k,288,95.090,4.910,99.320,0.680,24.36,1.000,bicubic\ncs3sedarknet_l.c2ns_in1k,288,95.090,4.910,99.220,0.780,21.91,0.950,bicubic\nefficientnetv2_rw_t.ra2_in1k,288,95.090,4.910,99.210,0.790,13.65,1.000,bicubic\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,95.090,4.910,99.210,0.790,38.73,0.900,bicubic\npoolformer_m48.sail_in1k,224,95.090,4.910,99.110,0.890,73.47,0.950,bicubic\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,95.080,4.920,99.270,0.730,11.07,0.950,bicubic\ntresnet_xl.miil_in1k,224,95.080,4.920,99.250,0.750,78.44,0.875,bilinear\nxception41p.ra3_in1k,299,95.080,4.920,99.190,0.810,26.91,0.940,bicubic\nresnet152.tv2_in1k,224,95.080,4.920,99.180,0.820,60.19,0.965,bilinear\nxcit_small_24_p16_224.fb_in1k,224,95.080,4.920,99.070,0.930,47.67,1.000,bicubic\necaresnet101d_pruned.miil_in1k,288,95.060,4.940,99.240,0.760,24.88,0.950,bicubic\ncs3sedarknet_x.c2ns_in1k,256,95.060,4.940,99.200,0.800,35.40,0.887,bicubic\nswinv2_tiny_window8_256.ms_in1k,256,95.060,4.940,99.180,0.820,28.35,0.900,bicubic\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,95.060,4.940,99.160,0.840,15.15,0.900,bicubic\ncrossvit_18_240.in1k,240,95.060,4.940,99.120,0.880,43.27,0.875,bicubic\npoolformer_m36.sail_in1k,224,95.060,4.940,99.100,0.900,56.17,0.950,bicubic\ngcvit_xtiny.in1k,224,95.050,4.950,99.190,0.810,19.98,0.875,bicubic\ncs3sedarknet_l.c2ns_in1k,256,95.050,4.950,99.140,0.860,21.91,0.887,bicubic\ncrossvit_base_240.in1k,240,95.050,4.950,98.980,1.020,105.03,0.875,bicubic\npoolformerv2_m36.sail_in1k,224,95.040,4.960,99.150,0.850,56.08,1.000,bicubic\ndeit_base_patch16_224.fb_in1k,224,95.040,4.960,98.960,1.040,86.57,0.900,bicubic\nresnet61q.ra2_in1k,256,95.040,4.960,98.940,1.060,36.85,0.900,bicubic\ncs3darknet_x.c2ns_in1k,256,95.030,4.970,99.190,0.810,35.05,0.950,bicubic\nresnet51q.ra2_in1k,256,95.030,4.970,99.190,0.810,35.70,0.875,bilinear\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,95.030,4.970,99.180,0.820,11.22,0.965,bicubic\ncoatnet_nano_rw_224.sw_in1k,224,95.030,4.970,99.150,0.850,15.14,0.900,bicubic\nfocalnet_tiny_srf.ms_in1k,224,95.020,4.980,99.280,0.720,28.43,0.900,bicubic\nhalo2botnet50ts_256.a1h_in1k,256,95.020,4.980,99.050,0.950,22.64,0.950,bicubic\nseresnext50_32x4d.racm_in1k,288,95.010,4.990,99.200,0.800,27.56,0.950,bicubic\ntf_efficientnet_b3.ap_in1k,300,95.000,5.000,99.080,0.920,12.23,0.904,bicubic\nresnet152.a2_in1k,288,95.000,5.000,99.060,0.940,60.19,1.000,bicubic\nregnety_040.ra3_in1k,224,94.990,5.010,99.220,0.780,20.65,0.950,bicubic\nresnext50_32x4d.a1h_in1k,288,94.990,5.010,99.190,0.810,25.03,1.000,bicubic\npvt_v2_b2.in1k,224,94.990,5.010,99.140,0.860,25.36,0.900,bicubic\nresnet101.a1h_in1k,224,94.990,5.010,99.090,0.910,44.55,0.950,bicubic\ncoatnet_bn_0_rw_224.sw_in1k,224,94.980,5.020,99.230,0.770,27.44,0.950,bicubic\ncait_xxs24_384.fb_dist_in1k,384,94.980,5.020,99.150,0.850,12.03,1.000,bicubic\nconvnext_tiny.fb_in1k,224,94.970,5.030,99.210,0.790,28.59,0.875,bicubic\nvisformer_small.in1k,224,94.970,5.030,99.210,0.790,40.22,0.900,bicubic\nconvmixer_1536_20.in1k,224,94.960,5.040,99.170,0.830,51.63,0.960,bicubic\nswin_s3_tiny_224.ms_in1k,224,94.960,5.040,99.160,0.840,28.33,0.900,bicubic\neca_nfnet_l0.ra2_in1k,224,94.960,5.040,99.150,0.850,24.14,0.900,bicubic\ncrossvit_15_dagger_240.in1k,240,94.960,5.040,99.140,0.860,28.21,0.875,bicubic\nresnetaa50d.sw_in12k_ft_in1k,224,94.950,5.050,99.260,0.740,25.58,0.950,bicubic\nvit_srelpos_medium_patch16_224.sw_in1k,224,94.950,5.050,99.200,0.800,38.74,0.900,bicubic\nxcit_large_24_p16_224.fb_in1k,224,94.950,5.050,98.830,1.170,189.10,1.000,bicubic\nresnetv2_101.a1h_in1k,224,94.940,5.060,99.150,0.850,44.54,0.950,bicubic\nnest_tiny_jx.goog_in1k,224,94.940,5.060,99.100,0.900,17.06,0.875,bicubic\ngernet_l.idstcv_in1k,256,94.930,5.070,99.200,0.800,31.08,0.875,bilinear\nefficientnet_b3.ra2_in1k,288,94.930,5.070,99.170,0.830,12.23,0.875,bicubic\ncoatnet_0_rw_224.sw_in1k,224,94.930,5.070,99.020,0.980,27.44,0.950,bicubic\nregnety_032.ra_in1k,224,94.920,5.080,99.180,0.820,19.44,0.950,bicubic\ntf_efficientnet_b3.aa_in1k,300,94.920,5.080,99.090,0.910,12.23,0.904,bicubic\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,94.910,5.090,99.270,0.730,22.05,0.900,bicubic\nresnext101_32x8d.tv2_in1k,176,94.910,5.090,99.220,0.780,88.79,0.875,bilinear\nconvit_small.fb_in1k,224,94.910,5.090,99.130,0.870,27.78,0.875,bicubic\nconvnextv2_nano.fcmae_ft_in1k,224,94.910,5.090,99.000,1.000,15.62,0.875,bicubic\nnfnet_l0.ra2_in1k,224,94.900,5.100,99.260,0.740,35.07,0.900,bicubic\nresnetv2_50d_evos.ah_in1k,288,94.900,5.100,99.190,0.810,25.59,1.000,bicubic\ntf_efficientnet_lite4.in1k,380,94.900,5.100,99.120,0.880,13.01,0.920,bilinear\nregnety_032.tv2_in1k,224,94.890,5.110,99.250,0.750,19.44,0.965,bicubic\nconvnext_nano.d1h_in1k,288,94.890,5.110,99.130,0.870,15.59,1.000,bicubic\nresnet101.a2_in1k,288,94.890,5.110,99.090,0.910,44.55,1.000,bicubic\necaresnet50t.a1_in1k,288,94.890,5.110,99.080,0.920,25.57,1.000,bicubic\nmixer_b16_224.miil_in21k_ft_in1k,224,94.890,5.110,99.080,0.920,59.88,0.875,bilinear\nresnet101.tv2_in1k,224,94.890,5.110,99.030,0.970,44.55,0.965,bilinear\nefficientvit_b2.r224_in1k,224,94.890,5.110,99.000,1.000,24.33,0.950,bicubic\nresnet101.a1_in1k,288,94.890,5.110,99.000,1.000,44.55,1.000,bicubic\nseresnext50_32x4d.racm_in1k,224,94.880,5.120,99.130,0.870,27.56,0.875,bicubic\ncoatnext_nano_rw_224.sw_in1k,224,94.870,5.130,99.210,0.790,14.70,0.900,bicubic\nxcit_tiny_24_p8_224.fb_in1k,224,94.870,5.130,99.190,0.810,12.11,1.000,bicubic\nxcit_small_12_p16_224.fb_in1k,224,94.870,5.130,99.040,0.960,26.25,1.000,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,240,94.870,5.130,99.020,0.980,14.36,0.900,bicubic\ntf_efficientnet_b1.ns_jft_in1k,240,94.860,5.140,99.250,0.750,7.79,0.882,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,94.860,5.140,99.210,0.790,11.07,1.000,bicubic\nhgnet_small.paddle_in1k,224,94.860,5.140,99.200,0.800,24.36,0.965,bicubic\nresnetaa50.a1h_in1k,288,94.860,5.140,99.120,0.880,25.56,1.000,bicubic\nresnet50d.ra4_e3600_r224_in1k,224,94.860,5.140,99.050,0.950,25.58,0.950,bicubic\ntresnet_l.miil_in1k,224,94.860,5.140,99.030,0.970,55.99,0.875,bilinear\nedgenext_small.usi_in1k,320,94.850,5.150,99.410,0.590,5.59,1.000,bicubic\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,94.840,5.160,99.280,0.720,28.29,0.900,bicubic\nresnet50d.ra2_in1k,288,94.840,5.160,99.230,0.770,25.58,0.950,bicubic\nvit_base_patch16_rpn_224.sw_in1k,224,94.830,5.170,99.090,0.910,86.54,0.900,bicubic\ncs3darknet_focus_l.c2ns_in1k,288,94.820,5.180,99.180,0.820,21.15,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,94.820,5.180,99.090,0.910,11.07,0.950,bicubic\npit_b_224.in1k,224,94.820,5.180,98.820,1.180,73.76,0.900,bicubic\nwide_resnet50_2.tv2_in1k,224,94.800,5.200,99.260,0.740,68.88,0.965,bilinear\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,94.790,5.210,99.200,0.800,14.62,1.000,bilinear\nmambaout_kobe.in1k,288,94.790,5.210,99.150,0.850,9.14,1.000,bicubic\ngcresnet50t.ra2_in1k,288,94.780,5.220,99.130,0.870,25.90,1.000,bicubic\nswinv2_cr_tiny_ns_224.sw_in1k,224,94.780,5.220,99.130,0.870,28.33,0.900,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,94.780,5.220,99.100,0.900,14.25,0.888,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,94.780,5.220,99.050,0.950,88.22,0.900,bicubic\nseresnet50.ra2_in1k,288,94.770,5.230,99.130,0.870,28.09,0.950,bicubic\ntwins_svt_small.in1k,224,94.770,5.230,99.090,0.910,24.06,0.900,bicubic\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,94.760,5.240,99.180,0.820,25.55,1.000,bilinear\nconvnext_tiny_hnf.a2h_in1k,224,94.760,5.240,99.170,0.830,28.59,0.950,bicubic\nresnet152s.gluon_in1k,224,94.750,5.250,99.050,0.950,60.32,0.875,bicubic\nlamhalobotnet50ts_256.a1h_in1k,256,94.750,5.250,98.990,1.010,22.57,0.950,bicubic\ncoat_mini.in1k,224,94.750,5.250,98.950,1.050,10.34,0.900,bicubic\nresnet152.a2_in1k,224,94.750,5.250,98.810,1.190,60.19,0.950,bicubic\nlegacy_senet154.in1k,224,94.740,5.260,99.110,0.890,115.09,0.875,bilinear\nregnetx_080.tv2_in1k,224,94.740,5.260,99.050,0.950,39.57,0.965,bicubic\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,94.740,5.260,98.920,1.080,10.59,0.888,bicubic\necaresnet50t.ra2_in1k,256,94.730,5.270,99.080,0.920,25.57,0.875,bicubic\nrepvit_m3.dist_in1k,224,94.730,5.270,99.070,0.930,10.68,0.950,bicubic\nxcit_tiny_12_p8_224.fb_dist_in1k,224,94.720,5.280,99.170,0.830,6.71,1.000,bicubic\npoolformerv2_s36.sail_in1k,224,94.710,5.290,99.240,0.760,30.79,1.000,bicubic\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,94.710,5.290,99.240,0.760,25.03,0.875,bilinear\nresnest50d_4s2x40d.in1k,224,94.710,5.290,99.140,0.860,30.42,0.875,bicubic\nresnetv2_50.a1h_in1k,288,94.710,5.290,99.110,0.890,25.55,1.000,bicubic\nsenet154.gluon_in1k,224,94.710,5.290,98.970,1.030,115.09,0.875,bicubic\ndeit3_small_patch16_224.fb_in1k,224,94.710,5.290,98.730,1.270,22.06,0.900,bicubic\ncs3darknet_l.c2ns_in1k,288,94.700,5.300,99.220,0.780,21.16,0.950,bicubic\npit_s_distilled_224.in1k,224,94.700,5.300,99.130,0.870,24.04,0.900,bicubic\nregnetz_b16.ra3_in1k,288,94.700,5.300,99.120,0.880,9.72,1.000,bicubic\nfastvit_sa12.apple_dist_in1k,256,94.700,5.300,99.070,0.930,11.58,0.900,bicubic\nhalonet50ts.a1h_in1k,256,94.700,5.300,98.840,1.160,22.73,0.940,bicubic\nswin_tiny_patch4_window7_224.ms_in1k,224,94.690,5.310,99.100,0.900,28.29,0.900,bicubic\ncrossvit_15_240.in1k,240,94.690,5.310,99.080,0.920,27.53,0.875,bicubic\nresnet152.a1_in1k,224,94.690,5.310,98.790,1.210,60.19,0.950,bicubic\necaresnet50d.miil_in1k,288,94.680,5.320,99.230,0.770,25.58,0.950,bicubic\nvit_relpos_small_patch16_224.sw_in1k,224,94.680,5.320,99.100,0.900,21.98,0.900,bicubic\nrexnet_200.nav_in1k,224,94.680,5.320,99.090,0.910,16.37,0.875,bicubic\nefficientnet_el.ra_in1k,300,94.670,5.330,99.130,0.870,10.59,0.904,bicubic\ndm_nfnet_f0.dm_in1k,192,94.670,5.330,99.060,0.940,71.49,0.900,bicubic\ntresnet_m.miil_in1k_448,448,94.660,5.340,99.170,0.830,31.39,0.875,bilinear\nefficientnetv2_rw_t.ra2_in1k,224,94.660,5.340,99.110,0.890,13.65,1.000,bicubic\nwide_resnet50_2.racm_in1k,224,94.660,5.340,99.060,0.940,68.88,0.875,bicubic\nseresnext101_64x4d.gluon_in1k,224,94.660,5.340,98.980,1.020,88.23,0.875,bicubic\nvit_small_patch16_384.augreg_in1k,384,94.650,5.350,99.140,0.860,22.20,1.000,bicubic\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,94.630,5.370,99.140,0.860,5.39,0.950,bicubic\nefficientnet_b3_pruned.in1k,300,94.630,5.370,99.040,0.960,9.86,0.904,bicubic\npoolformer_s36.sail_in1k,224,94.630,5.370,99.030,0.970,30.86,0.900,bicubic\nresnest50d.in1k,224,94.630,5.370,99.030,0.970,27.48,0.875,bilinear\nresnet50_gn.a1h_in1k,288,94.620,5.380,99.150,0.850,25.56,0.950,bicubic\ntwins_pcpvt_small.in1k,224,94.610,5.390,99.140,0.860,24.11,0.900,bicubic\ngcresnet50t.ra2_in1k,256,94.610,5.390,98.990,1.010,25.90,0.900,bicubic\nrepvgg_b3.rvgg_in1k,224,94.610,5.390,98.920,1.080,123.09,0.875,bilinear\nfbnetv3_g.ra2_in1k,240,94.600,5.400,99.100,0.900,16.62,0.950,bilinear\npit_s_224.in1k,224,94.600,5.400,98.930,1.070,23.46,0.900,bicubic\ncrossvit_small_240.in1k,240,94.590,5.410,99.110,0.890,26.86,0.875,bicubic\nresnet50.tv2_in1k,224,94.590,5.410,99.090,0.910,25.56,0.965,bilinear\nconvnext_nano_ols.d1h_in1k,288,94.590,5.410,99.040,0.960,15.65,1.000,bicubic\nfasternet_s.in1k,224,94.590,5.410,99.020,0.980,31.18,1.000,bicubic\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,94.580,5.420,99.140,0.860,22.92,1.000,bicubic\nvit_srelpos_small_patch16_224.sw_in1k,224,94.580,5.420,99.140,0.860,21.97,0.900,bicubic\ndeit_small_distilled_patch16_224.fb_in1k,224,94.580,5.420,99.090,0.910,22.44,0.900,bicubic\necaresnet50t.a2_in1k,288,94.580,5.420,99.050,0.950,25.57,1.000,bicubic\ntnt_s_legacy_patch16_224.in1k,224,94.570,5.430,99.180,0.820,23.76,0.900,bicubic\nresmlp_36_224.fb_distilled_in1k,224,94.570,5.430,99.170,0.830,44.69,0.875,bicubic\nlambda_resnet50ts.a1h_in1k,256,94.570,5.430,98.660,1.340,21.54,0.950,bicubic\nxcit_tiny_12_p16_384.fb_dist_in1k,384,94.560,5.440,99.170,0.830,6.72,1.000,bicubic\nconvnextv2_pico.fcmae_ft_in1k,288,94.560,5.440,99.150,0.850,9.07,0.950,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,224,94.560,5.440,99.060,0.940,13.68,1.000,bicubic\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,94.560,5.440,99.010,0.990,8.46,0.950,bicubic\nmobilevitv2_200.cvnets_in1k,256,94.560,5.440,98.980,1.020,18.45,0.888,bicubic\nregnetx_032.tv2_in1k,224,94.560,5.440,98.890,1.110,15.30,0.965,bicubic\nresnext50_32x4d.a1h_in1k,224,94.560,5.440,98.820,1.180,25.03,0.950,bicubic\necaresnetlight.miil_in1k,288,94.550,5.450,99.190,0.810,30.16,0.950,bicubic\nrepvit_m1_1.dist_450e_in1k,224,94.550,5.450,99.080,0.920,8.80,0.950,bicubic\nres2net101d.in1k,224,94.550,5.450,98.990,1.010,45.23,0.875,bilinear\nresnet101.a2_in1k,224,94.550,5.450,98.900,1.100,44.55,0.950,bicubic\ngernet_m.idstcv_in1k,224,94.550,5.450,98.890,1.110,21.14,0.875,bilinear\nefficientnet_b1.ra4_e3600_r240_in1k,288,94.540,5.460,99.220,0.780,7.79,1.000,bicubic\nresnet50.c1_in1k,288,94.530,5.470,99.070,0.930,25.56,1.000,bicubic\nhaloregnetz_b.ra3_in1k,224,94.520,5.480,98.960,1.040,11.68,0.940,bicubic\nsehalonet33ts.ra2_in1k,256,94.520,5.480,98.780,1.220,13.69,0.940,bicubic\nrepvgg_b3g4.rvgg_in1k,224,94.510,5.490,99.020,0.980,83.83,0.875,bilinear\nresnet50.b1k_in1k,288,94.510,5.490,99.000,1.000,25.56,1.000,bicubic\nregnety_320.pycls_in1k,224,94.500,5.500,99.170,0.830,145.05,0.875,bicubic\nese_vovnet39b.ra_in1k,288,94.500,5.500,99.090,0.910,24.57,0.950,bicubic\nseresnext101_32x4d.gluon_in1k,224,94.500,5.500,99.090,0.910,48.96,0.875,bicubic\nresnetv2_34d.ra4_e3600_r384_in1k,448,94.500,5.500,99.040,0.960,21.82,1.000,bicubic\ngcresnext50ts.ch_in1k,288,94.500,5.500,99.000,1.000,15.67,1.000,bicubic\nresnext50_32x4d.tv2_in1k,224,94.490,5.510,99.020,0.980,25.03,0.965,bilinear\nresnet50.d_in1k,288,94.490,5.510,98.990,1.010,25.56,1.000,bicubic\nresnet50d.a2_in1k,288,94.480,5.520,98.910,1.090,25.58,1.000,bicubic\nresnet101.a1_in1k,224,94.480,5.520,98.760,1.240,44.55,0.950,bicubic\nvit_base_patch16_384.augreg_in1k,384,94.470,5.530,99.020,0.980,86.86,1.000,bicubic\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,94.460,5.540,99.130,0.870,13.42,0.950,bicubic\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,94.460,5.540,99.100,0.900,5.76,1.000,bicubic\nfastvit_sa12.apple_in1k,256,94.460,5.540,99.080,0.920,11.58,0.900,bicubic\nrepvit_m2.dist_in1k,224,94.460,5.540,99.070,0.930,8.80,0.950,bicubic\nresnetaa50.a1h_in1k,224,94.460,5.540,98.900,1.100,25.56,0.950,bicubic\nresnet152.a3_in1k,224,94.460,5.540,98.880,1.120,60.19,0.950,bicubic\necaresnet101d_pruned.miil_in1k,224,94.450,5.550,99.100,0.900,24.88,0.875,bicubic\npoolformerv2_s24.sail_in1k,224,94.450,5.550,99.010,0.990,21.34,1.000,bicubic\nresnet152d.gluon_in1k,224,94.450,5.550,98.980,1.020,60.21,0.875,bicubic\nseresnet50.a2_in1k,288,94.450,5.550,98.910,1.090,28.09,1.000,bicubic\nvit_small_patch16_rope_mixed_224.naver_in1k,224,94.450,5.550,98.680,1.320,21.99,0.900,bicubic\nswiftformer_l1.dist_in1k,224,94.440,5.560,99.060,0.940,12.06,0.950,bicubic\nregnety_016.tv2_in1k,224,94.440,5.560,99.040,0.960,11.20,0.965,bicubic\nresnext50d_32x4d.bt_in1k,288,94.440,5.560,99.040,0.960,25.05,0.950,bicubic\nvit_base_patch32_clip_224.openai_ft_in1k,224,94.430,5.570,99.180,0.820,88.22,0.900,bicubic\npoolformer_s24.sail_in1k,224,94.430,5.570,99.060,0.940,21.39,0.900,bicubic\nvit_small_patch16_rope_224.naver_in1k,224,94.430,5.570,98.370,1.630,21.98,0.900,bicubic\nlevit_256.fb_dist_in1k,224,94.420,5.580,99.060,0.940,18.89,0.900,bicubic\ngcresnext50ts.ch_in1k,256,94.420,5.580,98.980,1.020,15.67,0.900,bicubic\nresnetv2_50d_gn.ah_in1k,224,94.420,5.580,98.880,1.120,25.57,0.950,bicubic\nconvmixer_768_32.in1k,224,94.410,5.590,99.110,0.890,21.11,0.960,bicubic\nlevit_conv_256.fb_dist_in1k,224,94.410,5.590,99.060,0.940,18.89,0.900,bicubic\nedgenext_small.usi_in1k,256,94.400,5.600,99.180,0.820,5.59,0.950,bicubic\nmobilenetv4_conv_large.e500_r256_in1k,256,94.400,5.600,99.120,0.880,32.59,0.950,bicubic\nresnetrs101.tf_in1k,192,94.400,5.600,98.920,1.080,63.62,0.940,bicubic\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,94.390,5.610,99.040,0.960,15.25,0.950,bicubic\nnf_resnet50.ra2_in1k,288,94.380,5.620,99.060,0.940,25.56,0.940,bicubic\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,94.380,5.620,99.060,0.940,88.22,0.900,bicubic\nresnest50d_1s4x24d.in1k,224,94.380,5.620,99.030,0.970,25.68,0.875,bicubic\ntnt_s_patch16_224.in1k,224,94.370,5.630,99.170,0.830,23.77,0.900,bicubic\ndarknet53.c2ns_in1k,288,94.370,5.630,99.060,0.940,41.61,1.000,bicubic\nefficientnet_b2.ra_in1k,288,94.370,5.630,99.050,0.950,9.11,1.000,bicubic\ninception_v4.tf_in1k,299,94.370,5.630,98.830,1.170,42.68,0.875,bicubic\nresnext50_32x4d.a1_in1k,288,94.370,5.630,98.770,1.230,25.03,1.000,bicubic\nmobilenetv4_conv_medium.e500_r256_in1k,320,94.360,5.640,99.160,0.840,9.72,1.000,bicubic\ntf_efficientnet_el.in1k,300,94.360,5.640,99.090,0.910,10.59,0.904,bicubic\nedgenext_small_rw.sw_in1k,320,94.360,5.640,99.040,0.960,7.83,1.000,bicubic\nresnet50d.a1_in1k,288,94.360,5.640,98.790,1.210,25.58,1.000,bicubic\nresmlp_24_224.fb_distilled_in1k,224,94.350,5.650,99.090,0.910,30.02,0.875,bicubic\nxcit_tiny_12_p8_224.fb_in1k,224,94.350,5.650,99.050,0.950,6.71,1.000,bicubic\nresnext101_64x4d.gluon_in1k,224,94.350,5.650,98.910,1.090,83.46,0.875,bicubic\ngcresnet33ts.ra2_in1k,288,94.340,5.660,98.970,1.030,19.88,1.000,bicubic\nsebotnet33ts_256.a1h_in1k,256,94.340,5.660,98.540,1.460,13.70,0.940,bicubic\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,94.330,5.670,99.140,0.860,25.56,0.875,bilinear\ninception_resnet_v2.tf_in1k,299,94.330,5.670,98.820,1.180,55.84,0.897,bicubic\nvit_small_patch16_rope_ape_224.naver_in1k,224,94.330,5.670,98.510,1.490,22.06,0.900,bicubic\nresnet50.b2k_in1k,288,94.320,5.680,98.920,1.080,25.56,1.000,bicubic\necaresnet50d_pruned.miil_in1k,288,94.310,5.690,99.200,0.800,19.94,0.950,bicubic\nhgnet_tiny.paddle_in1k,288,94.310,5.690,99.180,0.820,14.74,1.000,bicubic\nresnext50_32x4d.ra_in1k,288,94.310,5.690,99.040,0.960,25.03,0.950,bicubic\nconvnext_nano.d1h_in1k,224,94.310,5.690,98.960,1.040,15.59,0.950,bicubic\nresnetv2_50.a1h_in1k,224,94.310,5.690,98.930,1.070,25.55,0.950,bicubic\nresnet50.a1_in1k,288,94.310,5.690,98.920,1.080,25.56,1.000,bicubic\nregnetx_120.pycls_in1k,224,94.300,5.700,99.190,0.810,46.11,0.875,bicubic\ncs3darknet_l.c2ns_in1k,256,94.300,5.700,99.110,0.890,21.16,0.887,bicubic\nresnetv2_50d_evos.ah_in1k,224,94.300,5.700,98.970,1.030,25.59,0.950,bicubic\nmobilevitv2_175.cvnets_in1k,256,94.300,5.700,98.900,1.100,14.25,0.888,bicubic\nrexnet_150.nav_in1k,224,94.290,5.710,99.090,0.910,9.73,0.875,bicubic\ntf_efficientnet_b2.ap_in1k,260,94.290,5.710,98.950,1.050,9.11,0.890,bicubic\ntf_efficientnet_b3.in1k,300,94.280,5.720,99.120,0.880,12.23,0.904,bicubic\nrepvit_m1_0.dist_450e_in1k,224,94.280,5.720,99.030,0.970,7.30,0.950,bicubic\nfastvit_s12.apple_dist_in1k,256,94.280,5.720,98.990,1.010,9.47,0.900,bicubic\neca_resnet33ts.ra2_in1k,288,94.270,5.730,99.040,0.960,19.68,1.000,bicubic\nresnet50.c2_in1k,288,94.270,5.730,99.040,0.960,25.56,1.000,bicubic\nres2net50d.in1k,224,94.270,5.730,98.880,1.120,25.72,0.875,bilinear\nresmlp_big_24_224.fb_in1k,224,94.270,5.730,98.820,1.180,129.14,0.875,bicubic\nseresnet33ts.ra2_in1k,288,94.260,5.740,99.010,0.990,19.78,1.000,bicubic\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,94.260,5.740,99.000,1.000,7.52,0.950,bicubic\necaresnet50t.a2_in1k,224,94.260,5.740,98.950,1.050,25.57,0.950,bicubic\nlegacy_seresnext101_32x4d.in1k,224,94.250,5.750,98.970,1.030,48.96,0.875,bilinear\ndarknetaa53.c2ns_in1k,288,94.250,5.750,98.940,1.060,36.02,1.000,bilinear\nmixnet_xl.ra_in1k,224,94.250,5.750,98.840,1.160,11.90,0.875,bicubic\nseresnet33ts.ra2_in1k,256,94.250,5.750,98.770,1.230,19.78,0.900,bicubic\ncs3darknet_focus_l.c2ns_in1k,256,94.240,5.760,99.070,0.930,21.15,0.887,bicubic\ncspresnext50.ra_in1k,256,94.240,5.760,99.060,0.940,20.57,0.887,bilinear\nxcit_tiny_24_p16_224.fb_dist_in1k,224,94.240,5.760,98.950,1.050,12.12,1.000,bicubic\necaresnet50d.miil_in1k,224,94.230,5.770,98.990,1.010,25.58,0.875,bicubic\ntf_efficientnetv2_b3.in1k,240,94.230,5.770,98.860,1.140,14.36,0.904,bicubic\nefficientnet_b1.ra4_e3600_r240_in1k,240,94.220,5.780,99.090,0.910,7.79,0.900,bicubic\nmambaout_femto.in1k,288,94.210,5.790,99.050,0.950,7.30,1.000,bicubic\nregnetx_320.pycls_in1k,224,94.210,5.790,99.050,0.950,107.81,0.875,bicubic\ntf_efficientnet_b2.aa_in1k,260,94.210,5.790,99.020,0.980,9.11,0.890,bicubic\nresnet50.a1h_in1k,224,94.210,5.790,98.910,1.090,25.56,1.000,bicubic\nseresnet50.a1_in1k,288,94.210,5.790,98.840,1.160,28.09,1.000,bicubic\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,94.200,5.800,99.030,0.970,14.62,0.950,bilinear\nresnet101s.gluon_in1k,224,94.200,5.800,99.020,0.980,44.67,0.875,bicubic\necaresnet50t.a1_in1k,224,94.200,5.800,98.840,1.160,25.57,0.950,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,94.200,5.800,98.840,1.160,11.07,0.950,bicubic\nresnext50_32x4d.a2_in1k,288,94.200,5.800,98.740,1.260,25.03,1.000,bicubic\nrepvit_m1_1.dist_300e_in1k,224,94.190,5.810,99.110,0.890,8.80,0.950,bicubic\nhgnetv2_b1.ssld_stage2_ft_in1k,288,94.180,5.820,99.030,0.970,6.34,1.000,bicubic\nresnet101d.gluon_in1k,224,94.180,5.820,98.970,1.030,44.57,0.875,bicubic\nefficientvit_b1.r288_in1k,288,94.180,5.820,98.940,1.060,9.10,1.000,bicubic\nresnet50_gn.a1h_in1k,224,94.180,5.820,98.930,1.070,25.56,0.940,bicubic\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,94.180,5.820,98.520,1.480,22.06,0.900,bicubic\nseresnext50_32x4d.gluon_in1k,224,94.170,5.830,98.920,1.080,27.56,0.875,bicubic\nresnet50.ra_in1k,288,94.170,5.830,98.860,1.140,25.56,0.950,bicubic\nresnetblur50.bt_in1k,288,94.160,5.840,99.020,0.980,25.56,0.950,bicubic\ntf_efficientnet_lite3.in1k,300,94.160,5.840,98.960,1.040,8.20,0.904,bilinear\necaresnetlight.miil_in1k,224,94.150,5.850,98.950,1.050,30.16,0.875,bicubic\ndpn92.mx_in1k,224,94.150,5.850,98.940,1.060,37.67,0.875,bicubic\nwide_resnet101_2.tv2_in1k,176,94.150,5.850,98.850,1.150,126.89,0.875,bilinear\nconvnext_nano_ols.d1h_in1k,224,94.150,5.850,98.830,1.170,15.65,0.950,bicubic\nregnetz_b16.ra3_in1k,224,94.140,5.860,99.000,1.000,9.72,0.940,bicubic\ncspdarknet53.ra_in1k,256,94.140,5.860,98.990,1.010,27.64,0.887,bilinear\nnf_resnet50.ra2_in1k,256,94.140,5.860,98.950,1.050,25.56,0.940,bicubic\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,94.140,5.860,98.860,1.140,8.46,0.900,bicubic\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,94.130,5.870,99.150,0.850,9.72,1.000,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,94.130,5.870,99.020,0.980,6.34,1.000,bicubic\nregnety_160.pycls_in1k,224,94.130,5.870,99.000,1.000,83.59,0.875,bicubic\nresnet50.a2_in1k,288,94.130,5.870,98.850,1.150,25.56,1.000,bicubic\ninception_resnet_v2.tf_ens_adv_in1k,299,94.130,5.870,98.790,1.210,55.84,0.897,bicubic\nregnety_064.pycls_in1k,224,94.120,5.880,99.030,0.970,30.58,0.875,bicubic\nseresnet50.ra2_in1k,224,94.120,5.880,98.960,1.040,28.09,0.875,bicubic\nmambaout_kobe.in1k,224,94.120,5.880,98.920,1.080,9.14,1.000,bicubic\nresnext101_32x4d.gluon_in1k,224,94.120,5.880,98.910,1.090,44.18,0.875,bicubic\nfastvit_t12.apple_dist_in1k,256,94.100,5.900,99.030,0.970,7.55,0.900,bicubic\nconvnextv2_pico.fcmae_ft_in1k,224,94.100,5.900,99.020,0.980,9.07,0.875,bicubic\nefficientnet_b2.ra_in1k,256,94.100,5.900,99.010,0.990,9.11,0.875,bicubic\ntresnet_m.miil_in1k,224,94.100,5.900,98.830,1.170,31.39,0.875,bilinear\ngcvit_xxtiny.in1k,224,94.090,5.910,99.090,0.910,12.00,0.875,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,256,94.090,5.910,98.980,1.020,9.72,1.000,bicubic\nmobilevitv2_150.cvnets_in1k,256,94.080,5.920,98.910,1.090,10.59,0.888,bicubic\nregnety_120.pycls_in1k,224,94.070,5.930,99.020,0.980,51.82,0.875,bicubic\nefficientnet_el_pruned.in1k,300,94.070,5.930,99.010,0.990,10.59,0.904,bicubic\ndarknet53.c2ns_in1k,256,94.060,5.940,98.980,1.020,41.61,0.887,bicubic\nresnet50d.ra2_in1k,224,94.060,5.940,98.940,1.060,25.58,0.875,bicubic\ntf_efficientnetv2_b2.in1k,260,94.060,5.940,98.940,1.060,10.10,0.890,bicubic\nresnetrs50.tf_in1k,224,94.050,5.950,98.840,1.160,35.69,0.910,bicubic\nresnet152.tv2_in1k,176,94.050,5.950,98.710,1.290,60.19,0.875,bilinear\nhrnet_w48.ms_in1k,224,94.040,5.960,99.020,0.980,77.47,0.875,bilinear\nconvnext_pico.d1_in1k,288,94.040,5.960,99.010,0.990,9.05,0.950,bicubic\nresnet152.gluon_in1k,224,94.040,5.960,98.730,1.270,60.19,0.875,bicubic\ndla102x2.in1k,224,94.030,5.970,99.010,0.990,41.28,0.875,bilinear\nconvnext_pico_ols.d1_in1k,288,94.030,5.970,98.930,1.070,9.06,1.000,bicubic\nregnetx_016.tv2_in1k,224,94.030,5.970,98.920,1.080,9.19,0.965,bicubic\nwide_resnet50_2.tv2_in1k,176,94.020,5.980,99.110,0.890,68.88,0.875,bilinear\nresnetv2_34d.ra4_e3600_r384_in1k,384,94.010,5.990,98.930,1.070,21.82,1.000,bicubic\nresnet50.bt_in1k,288,94.010,5.990,98.890,1.110,25.56,0.950,bicubic\ndpn107.mx_in1k,224,94.010,5.990,98.820,1.180,86.92,0.875,bicubic\ndeit_small_patch16_224.fb_in1k,224,94.000,6.000,98.950,1.050,22.05,0.900,bicubic\nresnet50.ram_in1k,288,94.000,6.000,98.880,1.120,25.56,0.950,bicubic\nresnet50d.a2_in1k,224,94.000,6.000,98.680,1.320,25.58,0.950,bicubic\nefficientformer_l1.snap_dist_in1k,224,93.990,6.010,99.060,0.940,12.29,0.950,bicubic\nskresnext50_32x4d.ra_in1k,224,93.980,6.020,98.820,1.180,27.48,0.875,bicubic\necaresnet26t.ra2_in1k,320,93.970,6.030,98.950,1.050,16.01,0.950,bicubic\nresnet50.c1_in1k,224,93.960,6.040,98.840,1.160,25.56,0.950,bicubic\nresnet50.a1_in1k,224,93.960,6.040,98.520,1.480,25.56,0.950,bicubic\ndpn98.mx_in1k,224,93.950,6.050,98.920,1.080,61.57,0.875,bicubic\nresnet33ts.ra2_in1k,288,93.950,6.050,98.870,1.130,19.68,1.000,bicubic\ncait_xxs36_224.fb_dist_in1k,224,93.940,6.060,98.890,1.110,17.30,1.000,bicubic\nresnet50d.a1_in1k,224,93.940,6.060,98.630,1.370,25.58,0.950,bicubic\nresnet101.tv2_in1k,176,93.930,6.070,98.740,1.260,44.55,0.875,bilinear\nregnetx_160.pycls_in1k,224,93.920,6.080,99.080,0.920,54.28,0.875,bicubic\nregnety_080.pycls_in1k,224,93.920,6.080,98.990,1.010,39.18,0.875,bicubic\nxception71.tf_in1k,299,93.920,6.080,98.920,1.080,42.34,0.903,bicubic\ncspresnet50.ra_in1k,256,93.910,6.090,98.850,1.150,21.62,0.887,bilinear\nnf_regnet_b1.ra2_in1k,288,93.910,6.090,98.780,1.220,10.22,0.900,bicubic\nresnet50.c2_in1k,224,93.900,6.100,98.790,1.210,25.56,0.950,bicubic\ngcresnet33ts.ra2_in1k,256,93.890,6.110,98.930,1.070,19.88,0.900,bicubic\nvit_base_patch16_224.sam_in1k,224,93.890,6.110,98.890,1.110,86.57,0.900,bicubic\nresnet152c.gluon_in1k,224,93.890,6.110,98.800,1.200,60.21,0.875,bicubic\nhgnet_tiny.paddle_in1k,224,93.880,6.120,99.020,0.980,14.74,0.965,bicubic\neca_resnet33ts.ra2_in1k,256,93.880,6.120,98.870,1.130,19.68,0.900,bicubic\nresnet101.a3_in1k,224,93.870,6.130,98.760,1.240,44.55,0.950,bicubic\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,93.860,6.140,99.110,0.890,9.72,0.950,bicubic\necaresnet50t.a3_in1k,224,93.860,6.140,98.860,1.140,25.57,0.950,bicubic\necaresnet50d_pruned.miil_in1k,224,93.850,6.150,98.960,1.040,19.94,0.875,bicubic\nese_vovnet39b.ra_in1k,224,93.850,6.150,98.900,1.100,24.57,0.875,bicubic\nmobilenetv4_conv_medium.e500_r256_in1k,256,93.850,6.150,98.870,1.130,9.72,0.950,bicubic\nxcit_tiny_24_p16_224.fb_in1k,224,93.850,6.150,98.750,1.250,12.12,1.000,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,288,93.840,6.160,99.030,0.970,6.00,1.000,bicubic\nefficientformerv2_s1.snap_dist_in1k,224,93.840,6.160,98.890,1.110,6.19,0.950,bicubic\nresnext50_32x4d.ra_in1k,224,93.840,6.160,98.820,1.180,25.03,0.875,bicubic\nresnext50d_32x4d.bt_in1k,224,93.840,6.160,98.730,1.270,25.05,0.875,bicubic\nrepvgg_b2g4.rvgg_in1k,224,93.830,6.170,98.920,1.080,61.76,0.875,bilinear\nedgenext_small_rw.sw_in1k,256,93.830,6.170,98.720,1.280,7.83,0.900,bicubic\nhrnet_w64.ms_in1k,224,93.820,6.180,98.930,1.070,128.06,0.875,bilinear\nfbnetv3_d.ra2_in1k,256,93.820,6.180,98.880,1.120,10.31,0.950,bilinear\nresnet101.gluon_in1k,224,93.820,6.180,98.710,1.290,44.55,0.875,bicubic\nresnet50.b2k_in1k,224,93.820,6.180,98.680,1.320,25.56,0.950,bicubic\nefficientnet_b2_pruned.in1k,260,93.810,6.190,98.910,1.090,8.31,0.890,bicubic\ntiny_vit_5m_224.in1k,224,93.800,6.200,98.940,1.060,5.39,0.950,bicubic\nxception65.tf_in1k,299,93.800,6.200,98.910,1.090,39.92,0.903,bicubic\nresnet50.d_in1k,224,93.800,6.200,98.630,1.370,25.56,0.950,bicubic\nresnext50_32x4d.a1_in1k,224,93.800,6.200,98.440,1.560,25.03,0.950,bicubic\ndarknetaa53.c2ns_in1k,256,93.790,6.210,98.940,1.060,36.02,0.887,bilinear\nregnetx_080.pycls_in1k,224,93.790,6.210,98.900,1.100,39.57,0.875,bicubic\ndpn131.mx_in1k,224,93.790,6.210,98.830,1.170,79.25,0.875,bicubic\ntf_efficientnet_b0.ns_jft_in1k,224,93.780,6.220,98.970,1.030,5.29,0.875,bicubic\nresnext101_32x8d.tv_in1k,224,93.780,6.220,98.950,1.050,88.79,0.875,bilinear\nrepvit_m1_0.dist_300e_in1k,224,93.770,6.230,98.920,1.080,7.30,0.950,bicubic\ndla169.in1k,224,93.770,6.230,98.860,1.140,53.39,0.875,bilinear\nwide_resnet101_2.tv_in1k,224,93.770,6.230,98.830,1.170,126.89,0.875,bilinear\nresnext50_32x4d.a2_in1k,224,93.770,6.230,98.670,1.330,25.03,0.950,bicubic\nconvnextv2_femto.fcmae_ft_in1k,288,93.760,6.240,98.940,1.060,5.23,0.950,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,288,93.760,6.240,98.920,1.080,21.82,1.000,bicubic\nefficientnet_em.ra2_in1k,240,93.760,6.240,98.910,1.090,6.90,0.882,bicubic\nresnet50.tv2_in1k,176,93.750,6.250,98.750,1.250,25.56,0.875,bilinear\ndpn68b.ra_in1k,288,93.750,6.250,98.520,1.480,12.61,1.000,bicubic\ntf_efficientnet_b2.in1k,260,93.740,6.260,98.930,1.070,9.11,0.890,bicubic\nmobileone_s4.apple_in1k,224,93.740,6.260,98.670,1.330,14.95,0.900,bilinear\ntf_efficientnetv2_b1.in1k,240,93.730,6.270,98.850,1.150,8.14,0.882,bicubic\nhrnet_w40.ms_in1k,224,93.730,6.270,98.820,1.180,57.56,0.875,bilinear\nresnet101c.gluon_in1k,224,93.730,6.270,98.760,1.240,44.57,0.875,bicubic\nresnet50.a2_in1k,224,93.730,6.270,98.720,1.280,25.56,0.950,bicubic\ntf_efficientnet_b1.aa_in1k,240,93.720,6.280,98.800,1.200,7.79,0.882,bicubic\nconvnext_pico_ols.d1_in1k,224,93.710,6.290,98.860,1.140,9.06,0.950,bicubic\nresnetblur50.bt_in1k,224,93.710,6.290,98.800,1.200,25.56,0.875,bicubic\nlevit_192.fb_dist_in1k,224,93.710,6.290,98.780,1.220,10.95,0.900,bicubic\nlevit_conv_192.fb_dist_in1k,224,93.710,6.290,98.780,1.220,10.95,0.900,bicubic\nresnetv2_34.ra4_e3600_r224_in1k,288,93.710,6.290,98.760,1.240,21.80,1.000,bicubic\nefficientvit_b1.r256_in1k,256,93.700,6.300,98.830,1.170,9.10,1.000,bicubic\nresnet50.b1k_in1k,224,93.700,6.300,98.800,1.200,25.56,0.950,bicubic\nresnext50_32x4d.gluon_in1k,224,93.700,6.300,98.670,1.330,25.03,0.875,bicubic\nregnetx_040.pycls_in1k,224,93.690,6.310,98.940,1.060,22.12,0.875,bicubic\nseresnet50.a2_in1k,224,93.690,6.310,98.650,1.350,28.09,0.950,bicubic\nregnetx_064.pycls_in1k,224,93.670,6.330,99.050,0.950,26.21,0.875,bicubic\nresmlp_36_224.fb_in1k,224,93.670,6.330,98.920,1.080,44.69,0.875,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,224,93.670,6.330,98.800,1.200,9.72,0.950,bicubic\nrexnet_130.nav_in1k,224,93.670,6.330,98.710,1.290,7.56,0.875,bicubic\nfastvit_s12.apple_in1k,256,93.660,6.340,98.730,1.270,9.47,0.900,bicubic\nhrnet_w44.ms_in1k,224,93.650,6.350,98.960,1.040,67.06,0.875,bilinear\nfbnetv3_b.ra2_in1k,256,93.650,6.350,98.900,1.100,8.60,0.950,bilinear\nlegacy_xception.tf_in1k,299,93.650,6.350,98.760,1.240,22.86,0.897,bicubic\ntf_efficientnet_b1.ap_in1k,240,93.640,6.360,98.780,1.220,7.79,0.882,bicubic\nresnet33ts.ra2_in1k,256,93.640,6.360,98.760,1.240,19.68,0.900,bicubic\nresnet34d.ra2_in1k,288,93.640,6.360,98.750,1.250,21.82,0.950,bicubic\nresnet50.am_in1k,224,93.630,6.370,98.860,1.140,25.56,0.875,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,93.620,6.380,98.770,1.230,6.34,0.965,bicubic\nhgnetv2_b1.ssld_stage2_ft_in1k,224,93.620,6.380,98.750,1.250,6.34,0.965,bicubic\nhalonet26t.a1h_in1k,256,93.620,6.380,98.640,1.360,12.48,0.950,bicubic\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,93.610,6.390,99.000,1.000,6.00,1.000,bicubic\nregnety_040.pycls_in1k,224,93.610,6.390,98.940,1.060,20.65,0.875,bicubic\ndla60_res2next.in1k,224,93.610,6.390,98.810,1.190,17.03,0.875,bilinear\ntf_efficientnetv2_b2.in1k,208,93.610,6.390,98.690,1.310,10.10,0.890,bicubic\ninception_v3.gluon_in1k,299,93.600,6.400,98.840,1.160,23.83,0.875,bicubic\nresnet32ts.ra2_in1k,288,93.600,6.400,98.740,1.260,17.96,1.000,bicubic\nresnet50s.gluon_in1k,224,93.580,6.420,98.850,1.150,25.68,0.875,bicubic\nresnet32ts.ra2_in1k,256,93.580,6.420,98.730,1.270,17.96,0.900,bicubic\ntf_efficientnet_cc_b1_8e.in1k,240,93.580,6.420,98.710,1.290,39.72,0.882,bicubic\nrepvgg_b2.rvgg_in1k,224,93.570,6.430,99.070,0.930,89.02,0.875,bilinear\nres2net50_26w_6s.in1k,224,93.570,6.430,98.760,1.240,37.05,0.875,bilinear\nresnet50d.gluon_in1k,224,93.570,6.430,98.710,1.290,25.58,0.875,bicubic\nshvit_s4.in1k,256,93.560,6.440,98.780,1.220,16.59,0.875,bicubic\neca_halonext26ts.c1_in1k,256,93.560,6.440,98.690,1.310,10.76,0.940,bicubic\nresnext50_32x4d.a3_in1k,224,93.550,6.450,98.770,1.230,25.03,0.950,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,288,93.550,6.450,98.620,1.380,28.59,1.000,bicubic\nseresnet50.a1_in1k,224,93.550,6.450,98.570,1.430,28.09,0.950,bicubic\ngmlp_s16_224.ra3_in1k,224,93.540,6.460,98.780,1.220,19.42,0.875,bicubic\nrepghostnet_200.in1k,224,93.540,6.460,98.590,1.410,9.80,0.875,bicubic\ndla102x.in1k,224,93.530,6.470,98.860,1.140,26.31,0.875,bilinear\nres2net101_26w_4s.in1k,224,93.530,6.470,98.620,1.380,45.21,0.875,bilinear\npvt_v2_b1.in1k,224,93.520,6.480,98.880,1.120,14.01,0.900,bicubic\nmambaout_femto.in1k,224,93.520,6.480,98.810,1.190,7.30,1.000,bicubic\ncoat_lite_mini.in1k,224,93.520,6.480,98.800,1.200,11.01,0.900,bicubic\nresnet50.ram_in1k,224,93.520,6.480,98.600,1.400,25.56,0.875,bicubic\nconvnext_pico.d1_in1k,224,93.510,6.490,98.840,1.160,9.05,0.875,bicubic\ncait_xxs24_224.fb_dist_in1k,224,93.510,6.490,98.780,1.220,11.96,1.000,bicubic\ncoat_tiny.in1k,224,93.510,6.490,98.680,1.320,5.50,0.900,bicubic\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,93.500,6.500,98.840,1.160,5.79,1.000,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,256,93.500,6.500,98.830,1.170,5.29,1.000,bicubic\nfastvit_t12.apple_in1k,256,93.500,6.500,98.670,1.330,7.55,0.900,bicubic\nstarnet_s4.in1k,224,93.500,6.500,98.610,1.390,7.48,0.875,bicubic\nregnety_032.pycls_in1k,224,93.490,6.510,98.960,1.040,19.44,0.875,bicubic\nmobilevitv2_125.cvnets_in1k,256,93.480,6.520,98.840,1.160,7.48,0.888,bicubic\nselecsls60b.in1k,224,93.480,6.520,98.840,1.160,32.77,0.875,bicubic\nwide_resnet50_2.tv_in1k,224,93.470,6.530,98.970,1.030,68.88,0.875,bilinear\nrepvit_m0_9.dist_450e_in1k,224,93.470,6.530,98.880,1.120,5.49,0.950,bicubic\nxception41.tf_in1k,299,93.470,6.530,98.740,1.260,26.97,0.903,bicubic\nresnext50_32x4d.tv2_in1k,176,93.470,6.530,98.670,1.330,25.03,0.875,bilinear\nhrnet_w18.ms_aug_in1k,224,93.460,6.540,99.000,1.000,21.30,0.950,bilinear\nconvnext_femto.d1_in1k,288,93.460,6.540,98.830,1.170,5.22,0.950,bicubic\nvit_small_patch16_224.augreg_in1k,224,93.460,6.540,98.770,1.230,22.05,0.900,bicubic\nresnet34.ra4_e3600_r224_in1k,288,93.460,6.540,98.680,1.320,21.80,1.000,bicubic\nnf_regnet_b1.ra2_in1k,256,93.450,6.550,98.850,1.150,10.22,0.900,bicubic\nrepvit_m0_9.dist_300e_in1k,224,93.450,6.550,98.830,1.170,5.49,0.950,bicubic\nlambda_resnet26rpt_256.c1_in1k,256,93.440,6.560,98.880,1.120,10.99,0.940,bicubic\nlambda_resnet26t.c1_in1k,256,93.440,6.560,98.760,1.240,10.96,0.940,bicubic\nresnet50.ra_in1k,224,93.430,6.570,98.830,1.170,25.56,0.875,bicubic\nresmlp_24_224.fb_in1k,224,93.430,6.570,98.820,1.180,30.02,0.875,bicubic\nlegacy_seresnet152.in1k,224,93.420,6.580,98.850,1.150,66.82,0.875,bilinear\ndla60_res2net.in1k,224,93.420,6.580,98.840,1.160,20.85,0.875,bilinear\nres2net50_26w_8s.in1k,224,93.420,6.580,98.680,1.320,48.40,0.875,bilinear\necaresnet26t.ra2_in1k,256,93.420,6.580,98.670,1.330,16.01,0.875,bicubic\nconvnext_femto_ols.d1_in1k,288,93.410,6.590,98.900,1.100,5.23,0.950,bicubic\nlegacy_seresnext50_32x4d.in1k,224,93.410,6.590,98.800,1.200,27.56,0.875,bilinear\nrepvgg_b1.rvgg_in1k,224,93.410,6.590,98.770,1.230,57.42,0.875,bilinear\nbotnet26t_256.c1_in1k,256,93.410,6.590,98.660,1.340,12.49,0.950,bicubic\nhrnet_w30.ms_in1k,224,93.400,6.600,98.850,1.150,37.71,0.875,bilinear\nresnet50d.a3_in1k,224,93.400,6.600,98.750,1.250,25.58,0.950,bicubic\nresnet50.a1h_in1k,176,93.390,6.610,98.760,1.240,25.56,0.900,bicubic\neca_botnext26ts_256.c1_in1k,256,93.390,6.610,98.700,1.300,10.59,0.950,bicubic\nxcit_nano_12_p8_384.fb_dist_in1k,384,93.380,6.620,98.880,1.120,3.05,1.000,bicubic\nresnet152.a3_in1k,160,93.370,6.630,98.580,1.420,60.19,0.950,bicubic\nseresnext26t_32x4d.bt_in1k,288,93.340,6.660,98.700,1.300,16.81,0.950,bicubic\nrepvit_m1.dist_in1k,224,93.340,6.660,98.660,1.340,5.49,0.950,bicubic\nvit_base_patch16_224.augreg_in1k,224,93.340,6.660,98.640,1.360,86.57,0.900,bicubic\nxcit_tiny_12_p16_224.fb_dist_in1k,224,93.330,6.670,98.760,1.240,6.72,1.000,bicubic\nefficientvit_b1.r224_in1k,224,93.320,6.680,98.620,1.380,9.10,0.950,bicubic\nregnetx_032.pycls_in1k,224,93.300,6.700,98.700,1.300,15.30,0.875,bicubic\npit_xs_distilled_224.in1k,224,93.290,6.710,98.790,1.210,11.00,0.900,bicubic\nlegacy_seresnet101.in1k,224,93.290,6.710,98.750,1.250,49.33,0.875,bilinear\ndpn68b.ra_in1k,224,93.290,6.710,98.530,1.470,12.61,0.950,bicubic\nresnest26d.gluon_in1k,224,93.280,6.720,98.870,1.130,17.07,0.875,bilinear\ndla102.in1k,224,93.280,6.720,98.790,1.210,33.27,0.875,bilinear\nresnet152.tv_in1k,224,93.270,6.730,98.750,1.250,60.19,0.875,bilinear\nmixnet_l.ft_in1k,224,93.270,6.730,98.680,1.320,7.33,0.875,bicubic\ncs3darknet_m.c2ns_in1k,288,93.250,6.750,98.720,1.280,9.31,0.950,bicubic\nfasternet_t2.in1k,224,93.250,6.750,98.640,1.360,14.98,1.000,bicubic\nfbnetv3_d.ra2_in1k,224,93.230,6.770,98.670,1.330,10.31,0.950,bilinear\ntf_efficientnetv2_b1.in1k,192,93.230,6.770,98.540,1.460,8.14,0.882,bicubic\ndla60x.in1k,224,93.220,6.780,98.720,1.280,17.35,0.875,bilinear\ninception_v3.tf_in1k,299,93.210,6.790,98.480,1.520,23.83,0.875,bicubic\ntf_efficientnet_em.in1k,240,93.200,6.800,98.680,1.320,6.90,0.882,bicubic\nconvnextv2_femto.fcmae_ft_in1k,224,93.190,6.810,98.870,1.130,5.23,0.875,bicubic\nres2net50_26w_4s.in1k,224,93.190,6.810,98.660,1.340,25.70,0.875,bilinear\nmobilevit_s.cvnets_in1k,256,93.180,6.820,98.800,1.200,5.58,0.900,bicubic\nvit_base_patch32_384.augreg_in1k,384,93.180,6.820,98.620,1.380,88.30,1.000,bicubic\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,93.170,6.830,98.320,1.680,119.42,0.900,bicubic\nregnety_008_tv.tv2_in1k,224,93.160,6.840,98.680,1.320,6.43,0.965,bicubic\nres2next50.in1k,224,93.140,6.860,98.650,1.350,24.67,0.875,bilinear\nstarnet_s3.in1k,224,93.140,6.860,98.640,1.360,5.75,0.875,bicubic\nghostnetv2_160.in1k,224,93.130,6.870,98.710,1.290,12.39,0.875,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,224,93.130,6.870,98.660,1.340,5.29,0.900,bicubic\ncs3darknet_focus_m.c2ns_in1k,288,93.120,6.880,98.750,1.250,9.30,0.950,bicubic\nbat_resnext26ts.ch_in1k,256,93.120,6.880,98.740,1.260,10.73,0.900,bicubic\nmobilevitv2_100.cvnets_in1k,256,93.110,6.890,98.760,1.240,4.90,0.888,bicubic\nresnet50.bt_in1k,224,93.110,6.890,98.600,1.400,25.56,0.875,bicubic\nseresnext26d_32x4d.bt_in1k,288,93.060,6.940,98.710,1.290,16.81,0.950,bicubic\ntf_efficientnetv2_b0.in1k,224,93.060,6.940,98.700,1.300,7.14,0.875,bicubic\nlevit_conv_128.fb_dist_in1k,224,93.060,6.940,98.690,1.310,9.21,0.900,bicubic\nregnety_016.pycls_in1k,224,93.060,6.940,98.660,1.340,11.20,0.875,bicubic\nefficientnet_b1_pruned.in1k,240,93.060,6.940,98.560,1.440,6.33,0.882,bicubic\nrepvgg_b1g4.rvgg_in1k,224,93.050,6.950,98.810,1.190,39.97,0.875,bilinear\nlevit_128.fb_dist_in1k,224,93.050,6.950,98.690,1.310,9.21,0.900,bicubic\ntf_mixnet_l.in1k,224,93.050,6.950,98.550,1.450,7.33,0.875,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,224,93.040,6.960,98.700,1.300,21.82,0.900,bicubic\nswiftformer_s.dist_in1k,224,93.040,6.960,98.550,1.450,6.09,0.950,bicubic\nhardcorenas_f.miil_green_in1k,224,93.030,6.970,98.650,1.350,8.20,0.875,bilinear\nefficientnet_b1.ft_in1k,256,93.020,6.980,98.710,1.290,7.79,1.000,bicubic\nres2net50_14w_8s.in1k,224,93.020,6.980,98.700,1.300,25.06,0.875,bilinear\nresnet34.a1_in1k,288,93.020,6.980,98.590,1.410,21.80,1.000,bicubic\ndensenetblur121d.ra_in1k,288,93.010,6.990,98.600,1.400,8.00,0.950,bicubic\nmobileone_s3.apple_in1k,224,93.000,7.000,98.610,1.390,10.17,0.900,bilinear\nselecsls60.in1k,224,92.990,7.010,98.830,1.170,30.67,0.875,bicubic\nhrnet_w18_small_v2.gluon_in1k,224,92.980,7.020,98.780,1.220,15.60,0.875,bicubic\nhrnet_w32.ms_in1k,224,92.970,7.030,98.830,1.170,41.23,0.875,bilinear\nvisformer_tiny.in1k,224,92.970,7.030,98.740,1.260,10.32,0.900,bicubic\nseresnext26ts.ch_in1k,288,92.970,7.030,98.710,1.290,10.39,1.000,bicubic\nresnet50.a3_in1k,224,92.970,7.030,98.550,1.450,25.56,0.950,bicubic\ninception_v3.tf_adv_in1k,299,92.970,7.030,98.490,1.510,23.83,0.875,bicubic\nresnet26t.ra2_in1k,320,92.960,7.040,98.680,1.320,16.01,1.000,bicubic\ndpn68b.mx_in1k,224,92.960,7.040,98.490,1.510,12.61,0.875,bicubic\ndensenet161.tv_in1k,224,92.950,7.050,98.800,1.200,28.68,0.875,bicubic\nefficientnet_es.ra_in1k,224,92.950,7.050,98.690,1.310,5.44,0.875,bicubic\nconvnext_atto_ols.a2_in1k,288,92.950,7.050,98.680,1.320,3.70,0.950,bicubic\nhardcorenas_e.miil_green_in1k,224,92.950,7.050,98.590,1.410,8.07,0.875,bilinear\ntf_efficientnet_b1.in1k,240,92.940,7.060,98.660,1.340,7.79,0.882,bicubic\nresnet50c.gluon_in1k,224,92.930,7.070,98.710,1.290,25.58,0.875,bicubic\nfbnetv3_b.ra2_in1k,224,92.920,7.080,98.760,1.240,8.60,0.950,bilinear\nresnetrs50.tf_in1k,160,92.920,7.080,98.600,1.400,35.69,0.910,bicubic\nconvnextv2_atto.fcmae_ft_in1k,288,92.920,7.080,98.560,1.440,3.71,0.950,bicubic\nconvnext_femto_ols.d1_in1k,224,92.910,7.090,98.680,1.320,5.23,0.875,bicubic\npoolformerv2_s12.sail_in1k,224,92.910,7.090,98.530,1.470,11.89,1.000,bicubic\npit_xs_224.in1k,224,92.900,7.100,98.790,1.210,10.62,0.900,bicubic\nresnext50_32x4d.tv_in1k,224,92.900,7.100,98.690,1.310,25.03,0.875,bilinear\ninception_v3.tv_in1k,299,92.900,7.100,98.320,1.680,23.83,0.875,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,224,92.890,7.110,98.670,1.330,6.00,0.965,bicubic\nresnet101.tv_in1k,224,92.890,7.110,98.650,1.350,44.55,0.875,bilinear\nresnetv2_34.ra4_e3600_r224_in1k,224,92.890,7.110,98.450,1.550,21.80,0.900,bicubic\nresmlp_12_224.fb_distilled_in1k,224,92.880,7.120,98.660,1.340,15.35,0.875,bicubic\ncoat_lite_tiny.in1k,224,92.880,7.120,98.650,1.350,5.72,0.900,bicubic\nconvnext_femto.d1_in1k,224,92.880,7.120,98.600,1.400,5.22,0.875,bicubic\nresnet101.a3_in1k,160,92.880,7.120,98.520,1.480,44.55,0.950,bicubic\ntf_efficientnet_cc_b0_8e.in1k,224,92.880,7.120,98.510,1.490,24.01,0.875,bicubic\ntf_efficientnet_cc_b0_4e.in1k,224,92.860,7.140,98.430,1.570,13.31,0.875,bicubic\nrexnet_100.nav_in1k,224,92.840,7.160,98.640,1.360,4.80,0.875,bicubic\ncs3darknet_focus_m.c2ns_in1k,256,92.820,7.180,98.600,1.400,9.30,0.887,bicubic\ntinynet_a.in1k,192,92.820,7.180,98.570,1.430,6.19,0.875,bicubic\ngcresnext26ts.ch_in1k,288,92.810,7.190,98.610,1.390,10.48,1.000,bicubic\nseresnext26t_32x4d.bt_in1k,224,92.800,7.200,98.540,1.460,16.81,0.875,bicubic\nres2net50_48w_2s.in1k,224,92.800,7.200,98.460,1.540,25.29,0.875,bilinear\nseresnext26ts.ch_in1k,256,92.790,7.210,98.600,1.400,10.39,0.900,bicubic\nconvnext_atto.d2_in1k,288,92.780,7.220,98.620,1.380,3.70,0.950,bicubic\neca_resnext26ts.ch_in1k,288,92.770,7.230,98.710,1.290,10.30,1.000,bicubic\nshvit_s3.in1k,224,92.760,7.240,98.550,1.450,14.25,0.875,bicubic\ncrossvit_9_dagger_240.in1k,240,92.760,7.240,98.500,1.500,8.78,0.875,bicubic\nhrnet_w18.ms_in1k,224,92.750,7.250,98.650,1.350,21.30,0.875,bilinear\ndensenet121.ra_in1k,288,92.750,7.250,98.630,1.370,7.98,0.950,bicubic\ndensenet201.tv_in1k,224,92.740,7.260,98.680,1.320,20.01,0.875,bicubic\nese_vovnet19b_dw.ra_in1k,288,92.740,7.260,98.660,1.340,6.54,0.950,bicubic\ncs3darknet_m.c2ns_in1k,256,92.720,7.280,98.670,1.330,9.31,0.887,bicubic\ndla60.in1k,224,92.710,7.290,98.620,1.380,22.04,0.875,bilinear\ntf_efficientnet_lite2.in1k,260,92.710,7.290,98.550,1.450,6.09,0.890,bicubic\nresnet34.ra4_e3600_r224_in1k,224,92.700,7.300,98.570,1.430,21.80,0.900,bicubic\nresnet26t.ra2_in1k,256,92.680,7.320,98.600,1.400,16.01,0.940,bicubic\nresnet34d.ra2_in1k,224,92.680,7.320,98.450,1.550,21.82,0.875,bicubic\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,92.670,7.330,98.680,1.320,6.00,0.965,bicubic\nlegacy_seresnet50.in1k,224,92.670,7.330,98.660,1.340,28.09,0.875,bilinear\ngmixer_24_224.ra3_in1k,224,92.670,7.330,98.240,1.760,24.72,0.875,bicubic\nmobilenetv2_120d.ra_in1k,224,92.660,7.340,98.560,1.440,5.83,0.875,bicubic\nrepvgg_a2.rvgg_in1k,224,92.660,7.340,98.530,1.470,28.21,0.875,bilinear\nmobileone_s2.apple_in1k,224,92.630,7.370,98.680,1.320,7.88,0.900,bilinear\nhardcorenas_d.miil_green_in1k,224,92.620,7.380,98.480,1.520,7.50,0.875,bilinear\ntf_efficientnet_b0.ap_in1k,224,92.620,7.380,98.410,1.590,5.29,0.875,bicubic\necaresnet50t.a3_in1k,160,92.610,7.390,98.630,1.370,25.57,0.950,bicubic\nregnetx_016.pycls_in1k,224,92.600,7.400,98.570,1.430,9.19,0.875,bicubic\nresnet34.a2_in1k,288,92.590,7.410,98.580,1.420,21.80,1.000,bicubic\nlegacy_seresnext26_32x4d.in1k,224,92.590,7.410,98.450,1.550,16.79,0.875,bicubic\nfastvit_t8.apple_dist_in1k,256,92.590,7.410,98.430,1.570,4.03,0.900,bicubic\nresnet26d.bt_in1k,288,92.580,7.420,98.640,1.360,16.01,0.950,bicubic\nresnet50.gluon_in1k,224,92.570,7.430,98.540,1.460,25.56,0.875,bicubic\nskresnet34.ra_in1k,224,92.570,7.430,98.510,1.490,22.28,0.875,bicubic\nmobilenetv1_125.ra4_e3600_r224_in1k,256,92.550,7.450,98.600,1.400,6.27,1.000,bicubic\nxcit_tiny_12_p16_224.fb_in1k,224,92.540,7.460,98.650,1.350,6.72,1.000,bicubic\npoolformer_s12.sail_in1k,224,92.520,7.480,98.390,1.610,11.92,0.900,bicubic\nselecsls42b.in1k,224,92.480,7.520,98.440,1.560,32.46,0.875,bicubic\nefficientnet_b0.ra_in1k,224,92.470,7.530,98.680,1.320,5.29,0.875,bicubic\nregnetx_008.tv2_in1k,224,92.470,7.530,98.440,1.560,7.26,0.965,bicubic\nseresnext26d_32x4d.bt_in1k,224,92.460,7.540,98.540,1.460,16.81,0.875,bicubic\ngcresnext26ts.ch_in1k,256,92.460,7.540,98.500,1.500,10.48,0.900,bicubic\ngernet_s.idstcv_in1k,224,92.460,7.540,98.500,1.500,8.17,0.875,bilinear\neca_resnext26ts.ch_in1k,256,92.440,7.560,98.610,1.390,10.30,0.900,bicubic\nresnext26ts.ra2_in1k,288,92.430,7.570,98.390,1.610,10.30,1.000,bicubic\nrepghostnet_150.in1k,224,92.400,7.600,98.540,1.460,6.58,0.875,bicubic\nxcit_nano_12_p8_224.fb_dist_in1k,224,92.400,7.600,98.520,1.480,3.05,1.000,bicubic\ndensenetblur121d.ra_in1k,224,92.400,7.600,98.410,1.590,8.00,0.875,bicubic\ntf_efficientnet_b0.aa_in1k,224,92.390,7.610,98.480,1.520,5.29,0.875,bicubic\nseresnet50.a3_in1k,224,92.380,7.620,98.330,1.670,28.09,0.950,bicubic\nefficientnet_b1.ft_in1k,224,92.360,7.640,98.360,1.640,7.79,0.875,bicubic\nresnext50_32x4d.a3_in1k,160,92.350,7.650,98.370,1.630,25.03,0.950,bicubic\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,92.350,7.650,98.260,1.740,5.28,0.950,bicubic\ndensenet169.tv_in1k,224,92.340,7.660,98.570,1.430,14.15,0.875,bicubic\nconvmixer_1024_20_ks9_p14.in1k,224,92.330,7.670,98.410,1.590,24.38,0.960,bicubic\nresnet34.bt_in1k,288,92.320,7.680,98.640,1.360,21.80,0.950,bicubic\nhardcorenas_c.miil_green_in1k,224,92.320,7.680,98.330,1.670,5.52,0.875,bilinear\ntf_efficientnet_b0.in1k,224,92.300,7.700,98.550,1.450,5.29,0.875,bicubic\ntf_efficientnet_lite1.in1k,240,92.300,7.700,98.520,1.480,5.42,0.882,bicubic\nresnetv2_18d.ra4_e3600_r224_in1k,288,92.290,7.710,98.410,1.590,11.71,1.000,bicubic\ndpn68.mx_in1k,224,92.280,7.720,98.630,1.370,12.61,0.875,bicubic\nresnet50d.a3_in1k,160,92.280,7.720,98.330,1.670,25.58,0.950,bicubic\nghostnetv2_130.in1k,224,92.270,7.730,98.370,1.630,8.96,0.875,bicubic\nmixnet_m.ft_in1k,224,92.270,7.730,98.370,1.630,5.01,0.875,bicubic\nghostnetv3_100.in1k,224,92.260,7.740,98.460,1.540,8.13,0.875,bicubic\ntf_efficientnetv2_b0.in1k,192,92.260,7.740,98.370,1.630,7.14,0.875,bicubic\nresnext26ts.ra2_in1k,256,92.260,7.740,98.270,1.730,10.30,0.900,bicubic\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,92.250,7.750,98.480,1.520,22.88,0.900,bicubic\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,92.230,7.770,98.240,1.760,5.48,0.875,bilinear\nresnet26d.bt_in1k,224,92.220,7.780,98.470,1.530,16.01,0.875,bicubic\nconvnext_atto.d2_in1k,224,92.200,7.800,98.310,1.690,3.70,0.875,bicubic\ntf_mixnet_m.in1k,224,92.180,7.820,98.460,1.540,5.01,0.875,bicubic\nresnet18d.ra4_e3600_r224_in1k,288,92.180,7.820,98.350,1.650,11.71,1.000,bicubic\nconvnextv2_atto.fcmae_ft_in1k,224,92.180,7.820,98.340,1.660,3.71,0.875,bicubic\nresnet26.bt_in1k,288,92.160,7.840,98.550,1.450,16.00,0.950,bicubic\nresnet50.tv_in1k,224,92.150,7.850,98.390,1.610,25.56,0.875,bilinear\nxcit_nano_12_p16_384.fb_dist_in1k,384,92.140,7.860,98.490,1.510,3.05,1.000,bicubic\nresmlp_12_224.fb_in1k,224,92.130,7.870,98.570,1.430,15.35,0.875,bicubic\nefficientvit_m5.r224_in1k,224,92.130,7.870,98.500,1.500,12.47,0.875,bicubic\ntf_efficientnet_es.in1k,224,92.110,7.890,98.430,1.570,5.44,0.875,bicubic\nconvnext_atto_ols.a2_in1k,224,92.110,7.890,98.380,1.620,3.70,0.875,bicubic\nmobilenetv1_100.ra4_e3600_r224_in1k,256,92.080,7.920,98.350,1.650,4.23,0.950,bicubic\nmobilenetv1_125.ra4_e3600_r224_in1k,224,92.080,7.920,98.300,1.700,6.27,0.900,bicubic\nmobilenetv2_140.ra_in1k,224,92.080,7.920,98.270,1.730,6.11,0.875,bicubic\nese_vovnet19b_dw.ra_in1k,224,92.010,7.990,98.520,1.480,6.54,0.875,bicubic\nmobilevitv2_075.cvnets_in1k,256,92.010,7.990,98.320,1.680,2.87,0.888,bicubic\ndensenet121.ra_in1k,224,91.950,8.050,98.280,1.720,7.98,0.875,bicubic\nfastvit_t8.apple_in1k,256,91.940,8.060,98.410,1.590,4.03,0.900,bicubic\nhardcorenas_b.miil_green_in1k,224,91.940,8.060,98.340,1.660,5.18,0.875,bilinear\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,91.940,8.060,98.330,1.670,5.72,0.900,bicubic\nresnet34.a1_in1k,224,91.930,8.070,98.390,1.610,21.80,0.950,bicubic\nrepghostnet_130.in1k,224,91.930,8.070,98.370,1.630,5.48,0.875,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,320,91.920,8.080,98.240,1.760,3.77,1.000,bicubic\nregnety_008.pycls_in1k,224,91.890,8.110,98.420,1.580,6.26,0.875,bicubic\nefficientformerv2_s0.snap_dist_in1k,224,91.850,8.150,98.370,1.630,3.60,0.950,bicubic\nmixnet_s.ft_in1k,224,91.820,8.180,98.340,1.660,4.13,0.875,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,91.810,8.190,98.450,1.550,5.48,1.000,bicubic\nmobileone_s1.apple_in1k,224,91.770,8.230,98.470,1.530,4.83,0.900,bilinear\nresnet34.a2_in1k,224,91.770,8.230,98.260,1.740,21.80,0.950,bicubic\nrepvgg_b0.rvgg_in1k,224,91.710,8.290,98.460,1.540,15.82,0.875,bilinear\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,91.710,8.290,98.430,1.570,6.36,1.000,bicubic\nefficientnet_es_pruned.in1k,224,91.710,8.290,98.400,1.600,5.44,0.875,bicubic\ntf_mixnet_s.in1k,224,91.690,8.310,98.220,1.780,4.13,0.875,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,288,91.680,8.320,98.320,1.680,11.69,1.000,bicubic\nsemnasnet_100.rmsp_in1k,224,91.660,8.340,98.270,1.730,3.89,0.875,bicubic\nhardcorenas_a.miil_green_in1k,224,91.660,8.340,98.150,1.850,5.26,0.875,bilinear\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,91.660,8.340,98.060,1.940,5.28,0.875,bicubic\nresnet50.a3_in1k,160,91.660,8.340,98.060,1.940,25.56,0.950,bicubic\nghostnetv2_100.in1k,224,91.630,8.370,98.280,1.720,6.16,0.875,bicubic\nfasternet_t1.in1k,224,91.620,8.380,98.120,1.880,7.60,1.000,bicubic\nregnety_006.pycls_in1k,224,91.600,8.400,98.420,1.580,6.06,0.875,bicubic\nregnety_004.tv2_in1k,224,91.590,8.410,98.290,1.710,4.34,0.965,bicubic\nedgenext_x_small.in1k,288,91.590,8.410,98.190,1.810,2.34,1.000,bicubic\nlevit_128s.fb_dist_in1k,224,91.580,8.420,98.420,1.580,7.78,0.900,bicubic\ninception_next_atto.sail_in1k,224,91.560,8.440,98.340,1.660,4.16,0.875,bicubic\nmobilenetv3_rw.rmsp_in1k,224,91.560,8.440,98.270,1.730,5.48,0.875,bicubic\nlevit_conv_128s.fb_dist_in1k,224,91.540,8.460,98.430,1.570,7.78,0.900,bicubic\nlegacy_seresnet34.in1k,224,91.480,8.520,98.220,1.780,21.96,0.875,bilinear\nmobilenetv3_large_100.ra_in1k,224,91.470,8.530,98.320,1.680,5.48,0.875,bicubic\nresnet26.bt_in1k,224,91.460,8.540,98.270,1.730,16.00,0.875,bicubic\ndensenet121.tv_in1k,224,91.440,8.560,98.240,1.760,7.98,0.875,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,91.430,8.570,98.410,1.590,5.48,0.950,bicubic\nedgenext_x_small.in1k,256,91.430,8.570,98.160,1.840,2.34,0.900,bicubic\ntf_mobilenetv3_large_100.in1k,224,91.410,8.590,98.230,1.770,5.48,0.875,bilinear\nmobilenetv1_100.ra4_e3600_r224_in1k,224,91.360,8.640,98.130,1.870,4.23,0.875,bicubic\ntf_efficientnet_lite0.in1k,224,91.360,8.640,98.090,1.910,4.65,0.875,bicubic\nmobilenetv2_110d.ra_in1k,224,91.320,8.680,98.150,1.850,4.52,0.875,bicubic\nefficientnet_lite0.ra_in1k,224,91.270,8.730,98.230,1.770,4.65,0.875,bicubic\nmobilevit_xs.cvnets_in1k,256,91.270,8.730,98.220,1.780,2.32,0.900,bicubic\nresnetv2_18d.ra4_e3600_r224_in1k,224,91.270,8.730,98.140,1.860,11.71,0.900,bicubic\nseresnet50.a3_in1k,160,91.270,8.730,97.950,2.050,28.09,0.950,bicubic\ndla34.in1k,224,91.260,8.740,98.160,1.840,15.74,0.875,bilinear\nmnasnet_100.rmsp_in1k,224,91.260,8.740,98.030,1.970,4.38,0.875,bicubic\nfbnetc_100.rmsp_in1k,224,91.250,8.750,97.830,2.170,5.57,0.875,bilinear\nresnet34.bt_in1k,224,91.230,8.770,98.210,1.790,21.80,0.875,bicubic\nregnetx_008.pycls_in1k,224,91.220,8.780,98.380,1.620,7.26,0.875,bicubic\nshvit_s2.in1k,224,91.210,8.790,98.240,1.760,11.48,0.875,bicubic\nhrnet_w18_small_v2.ms_in1k,224,91.190,8.810,98.350,1.650,15.60,0.875,bilinear\nxcit_nano_12_p8_224.fb_in1k,224,91.190,8.810,98.280,1.720,3.05,1.000,bicubic\nrepvgg_a1.rvgg_in1k,224,91.160,8.840,98.190,1.810,14.09,0.875,bilinear\nstarnet_s2.in1k,224,91.150,8.850,98.320,1.680,3.68,0.875,bicubic\nresnest14d.gluon_in1k,224,91.140,8.860,98.350,1.650,10.61,0.875,bilinear\ndeit_tiny_distilled_patch16_224.fb_in1k,224,91.130,8.870,98.270,1.730,5.91,0.900,bicubic\ntinynet_b.in1k,188,91.120,8.880,98.080,1.920,3.73,0.875,bicubic\nregnetx_004_tv.tv2_in1k,224,91.120,8.880,98.070,1.930,5.50,0.965,bicubic\nmixer_b16_224.goog_in21k_ft_in1k,224,91.120,8.880,97.410,2.590,59.88,0.875,bicubic\nresnet34.gluon_in1k,224,91.110,8.890,98.180,1.820,21.80,0.875,bicubic\nrepghostnet_111.in1k,224,91.110,8.890,98.050,1.950,4.54,0.875,bicubic\nmobilenetv4_conv_small.e2400_r224_in1k,256,91.100,8.900,98.080,1.920,3.77,0.950,bicubic\ncrossvit_9_240.in1k,240,91.080,8.920,98.310,1.690,8.55,0.875,bicubic\nresnet18.fb_swsl_ig1b_ft_in1k,224,91.060,8.940,98.200,1.800,11.69,0.875,bilinear\nvgg19_bn.tv_in1k,224,90.980,9.020,98.110,1.890,143.68,0.875,bilinear\nresnet18d.ra4_e3600_r224_in1k,224,90.910,9.090,98.190,1.810,11.71,0.900,bicubic\nswiftformer_xs.dist_in1k,224,90.840,9.160,98.000,2.000,3.48,0.950,bicubic\nresnet18d.ra2_in1k,288,90.770,9.230,98.170,1.830,11.71,0.950,bicubic\nregnetx_006.pycls_in1k,224,90.770,9.230,98.090,1.910,6.20,0.875,bicubic\nefficientvit_m4.r224_in1k,224,90.750,9.250,98.090,1.910,8.80,0.875,bicubic\nregnety_004.pycls_in1k,224,90.750,9.250,98.060,1.940,4.34,0.875,bicubic\npit_ti_distilled_224.in1k,224,90.740,9.260,98.240,1.760,5.10,0.900,bicubic\nstarnet_s1.in1k,224,90.730,9.270,97.830,2.170,2.87,0.875,bicubic\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,90.690,9.310,98.040,1.960,11.69,0.875,bilinear\nrepghostnet_100.in1k,224,90.680,9.320,98.110,1.890,4.07,0.875,bicubic\nspnasnet_100.rmsp_in1k,224,90.670,9.330,97.960,2.040,4.42,0.875,bilinear\nconvit_tiny.fb_in1k,224,90.580,9.420,98.200,1.800,5.71,0.875,bicubic\nvit_base_patch32_224.augreg_in1k,224,90.580,9.420,97.740,2.260,88.22,0.900,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,256,90.570,9.430,98.060,1.940,3.77,0.950,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,90.570,9.430,97.980,2.020,3.27,1.000,bicubic\ncrossvit_tiny_240.in1k,240,90.540,9.460,97.960,2.040,7.01,0.875,bicubic\nmobilenetv4_conv_small.e2400_r224_in1k,224,90.540,9.460,97.820,2.180,3.77,0.875,bicubic\nvgg16_bn.tv_in1k,224,90.520,9.480,98.000,2.000,138.37,0.875,bilinear\nghostnet_100.in1k,224,90.470,9.530,97.940,2.060,5.18,0.875,bicubic\npit_ti_224.in1k,224,90.440,9.560,98.000,2.000,4.85,0.900,bicubic\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,90.440,9.560,97.680,2.320,2.16,0.900,bicubic\ntf_mobilenetv3_large_075.in1k,224,90.340,9.660,97.850,2.150,3.99,0.875,bilinear\nhrnet_w18_small.gluon_in1k,224,90.310,9.690,97.750,2.250,13.19,0.875,bicubic\nresnet34.a3_in1k,224,90.300,9.700,97.890,2.110,21.80,0.950,bicubic\nresnet34.tv_in1k,224,90.290,9.710,97.970,2.030,21.80,0.875,bilinear\nsemnasnet_075.rmsp_in1k,224,90.280,9.720,97.930,2.070,2.91,0.875,bicubic\nmobilenetv4_conv_small.e1200_r224_in1k,256,90.270,9.730,98.100,1.900,3.77,0.950,bicubic\nxcit_nano_12_p16_224.fb_dist_in1k,224,90.200,9.800,97.760,2.240,3.05,1.000,bicubic\nresnet18.a1_in1k,288,90.190,9.810,97.780,2.220,11.69,1.000,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,224,90.180,9.820,97.730,2.270,11.69,0.900,bicubic\nskresnet18.ra_in1k,224,90.170,9.830,97.780,2.220,11.96,0.875,bicubic\nmobilenetv4_conv_small.e1200_r224_in1k,224,90.120,9.880,97.980,2.020,3.77,0.875,bicubic\nefficientvit_m3.r224_in1k,224,90.050,9.950,97.870,2.130,6.90,0.875,bicubic\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,90.040,9.960,97.930,2.070,2.16,0.875,bicubic\nresnet18d.ra2_in1k,224,90.020,9.980,97.840,2.160,11.71,0.875,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,89.950,10.050,97.630,2.370,3.27,0.887,bicubic\nvit_base_patch32_224.sam_in1k,224,89.890,10.110,97.600,2.400,88.22,0.900,bicubic\nhrnet_w18_small.ms_in1k,224,89.880,10.120,97.890,2.110,13.19,0.875,bilinear\nmobilenetv2_100.ra_in1k,224,89.860,10.140,97.840,2.160,3.50,0.875,bicubic\nrepvgg_a0.rvgg_in1k,224,89.810,10.190,97.790,2.210,9.11,0.875,bilinear\nedgenext_xx_small.in1k,288,89.810,10.190,97.500,2.500,1.33,1.000,bicubic\nshvit_s1.in1k,224,89.770,10.230,97.730,2.270,6.33,0.875,bicubic\nvgg19.tv_in1k,224,89.700,10.300,97.530,2.470,143.67,0.875,bilinear\ndeit_tiny_patch16_224.fb_in1k,224,89.640,10.360,97.960,2.040,5.72,0.900,bicubic\nresnet18.a2_in1k,288,89.540,10.460,97.660,2.340,11.69,1.000,bicubic\nregnetx_004.pycls_in1k,224,89.490,10.510,97.820,2.180,5.16,0.875,bicubic\nrepghostnet_080.in1k,224,89.450,10.550,97.470,2.530,3.28,0.875,bicubic\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,89.410,10.590,97.700,2.300,6.34,0.900,bicubic\nvgg16.tv_in1k,224,89.400,10.600,97.500,2.500,138.36,0.875,bilinear\nlegacy_seresnet18.in1k,224,89.290,10.710,97.710,2.290,11.78,0.875,bicubic\nedgenext_xx_small.in1k,256,89.250,10.750,97.250,2.750,1.33,0.900,bicubic\nresnet14t.c3_in1k,224,89.240,10.760,97.450,2.550,10.08,0.950,bicubic\nvgg13_bn.tv_in1k,224,89.230,10.770,97.520,2.480,133.05,0.875,bilinear\ntf_mobilenetv3_large_minimal_100.in1k,224,89.200,10.800,97.290,2.710,3.92,0.875,bilinear\nresnet18.a1_in1k,224,89.150,10.850,97.330,2.670,11.69,0.950,bicubic\nmobilevitv2_050.cvnets_in1k,256,89.060,10.940,97.590,2.410,1.37,0.888,bicubic\npvt_v2_b0.in1k,224,88.980,11.020,97.680,2.320,3.67,0.900,bicubic\nefficientvit_m2.r224_in1k,224,88.940,11.060,97.360,2.640,4.19,0.875,bicubic\nlcnet_100.ra2_in1k,224,88.930,11.070,97.370,2.630,2.95,0.875,bicubic\nxcit_nano_12_p16_224.fb_in1k,224,88.900,11.100,97.440,2.560,3.05,1.000,bicubic\nmobileone_s0.apple_in1k,224,88.770,11.230,97.230,2.770,5.29,0.875,bilinear\nfasternet_t0.in1k,224,88.710,11.290,97.080,2.920,3.91,1.000,bicubic\nresnet18.gluon_in1k,224,88.690,11.310,97.110,2.890,11.69,0.875,bicubic\nresnet34.a3_in1k,160,88.650,11.350,97.380,2.620,21.80,0.950,bicubic\nresnet14t.c3_in1k,176,88.470,11.530,96.950,3.050,10.08,0.875,bicubic\ntinynet_c.in1k,184,88.450,11.550,97.270,2.730,2.46,0.875,bicubic\nvgg11_bn.tv_in1k,224,88.410,11.590,97.280,2.720,132.87,0.875,bilinear\nefficientvit_b0.r224_in1k,224,88.250,11.750,96.880,3.120,3.41,0.950,bicubic\nresnet18.a2_in1k,224,88.210,11.790,97.180,2.820,11.69,0.950,bicubic\nregnety_002.pycls_in1k,224,88.200,11.800,97.340,2.660,3.16,0.875,bicubic\nresnet18.tv_in1k,224,88.200,11.800,97.110,2.890,11.69,0.875,bilinear\nmobilevit_xxs.cvnets_in1k,256,87.970,12.030,97.200,2.800,1.27,0.900,bicubic\nvgg13.tv_in1k,224,87.560,12.440,97.120,2.880,133.05,0.875,bilinear\nregnetx_002.pycls_in1k,224,87.350,12.650,96.990,3.010,2.68,0.875,bicubic\nvgg11.tv_in1k,224,87.340,12.660,97.130,2.870,132.86,0.875,bilinear\nefficientvit_m1.r224_in1k,224,87.190,12.810,97.000,3.000,2.98,0.875,bicubic\nrepghostnet_058.in1k,224,87.190,12.810,96.710,3.290,2.55,0.875,bicubic\ndla60x_c.in1k,224,87.110,12.890,97.150,2.850,1.32,0.875,bilinear\nresnet18.a3_in1k,224,87.050,12.950,96.670,3.330,11.69,0.950,bicubic\nlcnet_075.ra2_in1k,224,87.010,12.990,96.550,3.450,2.36,0.875,bicubic\nmixer_l16_224.goog_in21k_ft_in1k,224,87.000,13.000,94.020,5.980,208.20,0.875,bicubic\nresnet10t.c3_in1k,224,86.710,13.290,96.730,3.270,5.44,0.950,bicubic\nmobilenetv3_small_100.lamb_in1k,224,86.190,13.810,96.450,3.550,2.54,0.875,bicubic\ntf_mobilenetv3_small_100.in1k,224,86.010,13.990,96.390,3.610,2.54,0.875,bilinear\nrepghostnet_050.in1k,224,85.560,14.440,96.110,3.890,2.31,0.875,bicubic\nresnet10t.c3_in1k,176,85.490,14.510,96.290,3.710,5.44,0.875,bicubic\nmnasnet_small.lamb_in1k,224,85.490,14.510,95.980,4.020,2.03,0.875,bicubic\ndla46x_c.in1k,224,85.480,14.520,96.460,3.540,1.07,0.875,bilinear\ntinynet_d.in1k,152,85.480,14.520,96.020,3.980,2.34,0.875,bicubic\nresnet18.a3_in1k,160,85.040,14.960,95.940,4.060,11.69,0.950,bicubic\nmobilenetv2_050.lamb_in1k,224,84.940,15.060,95.670,4.330,1.97,0.875,bicubic\ndla46_c.in1k,224,84.740,15.260,96.210,3.790,1.30,0.875,bilinear\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,84.670,15.330,96.140,3.860,2.24,0.950,bicubic\ntf_mobilenetv3_small_075.in1k,224,84.520,15.480,95.790,4.210,2.04,0.875,bilinear\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,84.230,15.770,95.720,4.280,2.24,0.875,bicubic\nmobilenetv3_small_075.lamb_in1k,224,84.140,15.860,95.500,4.500,2.04,0.875,bicubic\nefficientvit_m0.r224_in1k,224,83.340,16.660,95.630,4.370,2.35,0.875,bicubic\nlcnet_050.ra2_in1k,224,83.130,16.870,94.950,5.050,1.88,0.875,bicubic\ntf_mobilenetv3_small_minimal_100.in1k,224,82.640,17.360,94.990,5.010,2.04,0.875,bilinear\ntinynet_e.in1k,106,79.840,20.160,94.000,6.000,2.04,0.875,bicubic\ntest_vit3.r160_in1k,160,79.050,20.950,93.770,6.230,0.93,0.950,bicubic\nmobilenetv3_small_050.lamb_in1k,224,78.090,21.910,93.010,6.990,1.59,0.875,bicubic\ntest_convnext2.r160_in1k,160,75.860,24.140,92.810,7.190,0.48,0.950,bicubic\ntest_convnext3.r160_in1k,160,75.640,24.360,92.860,7.140,0.47,0.950,bicubic\ntest_convnext.r160_in1k,160,70.430,29.570,90.960,9.040,0.27,0.950,bicubic\ntest_nfnet.r160_in1k,160,70.180,29.820,89.870,10.130,0.38,0.950,bicubic\ntest_efficientnet_evos.r160_in1k,160,68.060,31.940,88.250,11.750,0.36,0.950,bicubic\ntest_efficientnet.r160_in1k,160,67.620,32.380,88.500,11.500,0.36,0.950,bicubic\ntest_byobnet.r160_in1k,160,67.520,32.480,88.710,11.290,0.46,0.950,bicubic\ntest_efficientnet_ln.r160_in1k,160,66.030,33.970,87.680,12.320,0.36,0.950,bicubic\ntest_vit2.r160_in1k,160,65.760,34.240,88.210,11.790,0.46,0.950,bicubic\ntest_efficientnet_gn.r160_in1k,160,65.760,34.240,87.860,12.140,0.36,0.950,bicubic\ntest_resnet.r160_in1k,160,64.600,35.400,86.960,13.040,0.47,0.950,bilinear\ntest_vit.r160_in1k,160,63.980,36.020,87.170,12.830,0.37,0.950,bicubic\n"
  },
  {
    "path": "results/results-imagenet-r.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation,top1_diff,top5_diff,rank_diff\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,90.847,9.153,97.213,2.787,429.38,1.000,bicubic,-6.973,-2.547,+10\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,90.617,9.383,97.913,2.087,846.47,1.000,bicubic,-7.153,-1.897,+17\neva_giant_patch14_336.clip_ft_in1k,336,90.517,9.483,97.213,2.787,\"1,013.01\",1.000,bicubic,-7.343,-2.687,+4\neva02_large_patch14_448.mim_m38m_ft_in1k,448,90.477,9.523,97.277,2.723,305.08,1.000,bicubic,-7.343,-2.573,+6\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,90.243,9.757,97.177,2.823,305.08,1.000,bicubic,-7.787,-2.713,-3\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,90.240,9.760,96.977,3.023,414.14,1.000,bicubic,-7.480,-2.793,+17\neva_giant_patch14_224.clip_ft_in1k,224,89.837,10.163,97.023,2.977,\"1,012.56\",0.900,bicubic,-7.743,-2.687,+29\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,88.620,11.380,95.947,4.053,\"1,013.01\",1.000,bicubic,-9.400,-3.953,-5\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,88.413,11.587,95.633,4.367,\"1,014.45\",1.000,bicubic,-9.577,-4.227,-5\nregnety_1280.swag_ft_in1k,384,88.313,11.687,96.490,3.510,644.81,1.000,bicubic,-9.457,-3.320,+5\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,87.833,12.167,95.503,4.497,305.08,1.000,bicubic,-10.327,-4.377,-10\neva02_large_patch14_448.mim_in22k_ft_in1k,448,87.530,12.470,95.757,4.243,305.08,1.000,bicubic,-10.330,-4.033,-4\nregnety_1280.swag_lc_in1k,224,86.943,13.057,95.737,4.263,644.81,0.965,bicubic,-10.477,-3.993,+39\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,84.573,15.427,94.407,5.593,200.13,1.000,bicubic,-12.817,-5.333,+42\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,83.963,16.037,93.907,6.093,304.53,1.000,bicubic,-13.657,-5.823,+16\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,83.650,16.350,93.487,6.513,304.53,1.000,bicubic,-13.590,-6.273,+72\neva_large_patch14_336.in22k_ft_in1k,336,83.523,16.477,93.153,6.847,304.53,1.000,bicubic,-14.297,-6.677,-8\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,83.273,16.727,93.123,6.877,632.05,1.000,bicubic,-13.837,-6.577,+98\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,83.070,16.930,92.860,7.140,632.46,1.000,bicubic,-14.550,-6.900,+11\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,82.963,17.037,92.657,7.343,632.05,1.000,bicubic,-14.377,-7.103,+45\nbeit3_large_patch16_224.in22k_ft_in1k,224,82.740,17.260,92.880,7.120,304.57,1.000,bicubic,-14.710,-6.840,+26\nvit_large_patch14_clip_224.openai_ft_in1k,224,82.397,17.603,92.967,7.033,304.20,1.000,bicubic,-15.053,-6.783,+26\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,82.280,17.720,92.437,7.563,200.13,1.000,bicubic,-15.190,-7.323,+20\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,81.697,18.303,92.240,7.760,304.20,1.000,bicubic,-15.333,-7.440,+109\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,81.467,18.533,92.057,7.943,200.13,1.000,bicubic,-15.823,-7.683,+52\nregnety_320.swag_lc_in1k,224,81.460,18.540,93.313,6.687,145.05,0.965,bicubic,-15.370,-6.417,+152\neva_large_patch14_196.in22k_ft_in1k,196,81.300,18.700,91.537,8.463,304.14,1.000,bicubic,-16.230,-8.253,+13\nregnety_320.swag_ft_in1k,384,81.290,18.710,92.727,7.273,145.05,1.000,bicubic,-16.100,-6.963,+26\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,81.220,18.780,91.577,8.423,304.57,1.000,bicubic,-16.170,-8.183,+28\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,80.990,19.010,91.943,8.057,200.13,1.000,bicubic,-16.140,-7.697,+84\neva_large_patch14_336.in22k_ft_in22k_in1k,336,80.093,19.907,89.333,10.667,304.53,1.000,bicubic,-17.807,-10.547,-26\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,79.517,20.483,89.240,10.760,468.53,0.875,bilinear,-17.273,-10.470,+159\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,78.897,21.103,88.460,11.540,194.03,0.875,bilinear,-17.583,-10.930,+241\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,78.727,21.273,89.000,11.000,304.20,1.000,bicubic,-18.913,-10.730,-5\neva_large_patch14_196.in22k_ft_in22k_in1k,196,78.510,21.490,88.323,11.677,304.14,1.000,bicubic,-19.100,-11.487,-3\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,78.503,21.497,88.540,11.460,304.53,1.000,bicubic,-18.957,-11.240,+9\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,78.293,21.707,88.787,11.213,304.20,1.000,bicubic,-19.097,-10.943,+18\nregnety_160.swag_lc_in1k,224,78.233,21.767,91.703,8.297,83.59,0.965,bicubic,-18.197,-7.737,+251\nregnety_160.swag_ft_in1k,384,77.717,22.283,90.713,9.287,83.59,1.000,bicubic,-19.463,-9.037,+65\neva02_base_patch14_448.mim_in22k_ft_in1k,448,77.533,22.467,89.323,10.677,87.12,1.000,bicubic,-20.177,-10.437,-16\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,77.423,22.577,88.423,11.577,87.12,1.000,bicubic,-20.177,-11.397,-8\ntf_efficientnet_l2.ns_jft_in1k_475,475,76.350,23.650,88.517,11.483,480.31,0.936,bicubic,-21.420,-11.333,-22\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,76.310,23.690,87.050,12.950,304.43,0.950,bicubic,-21.450,-12.770,-21\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,76.217,23.783,87.730,12.270,194.03,0.875,bilinear,-20.043,-11.670,+297\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,75.970,24.030,86.617,13.383,660.29,1.000,bicubic,-21.890,-13.153,-39\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,75.883,24.117,86.240,13.760,88.79,0.875,bilinear,-20.067,-13.160,+407\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,75.840,24.160,86.933,13.067,660.29,1.000,bicubic,-21.970,-12.857,-35\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,75.573,24.427,87.047,12.953,88.79,0.875,bilinear,-20.697,-12.443,+289\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,75.203,24.797,88.480,11.520,88.59,1.000,bicubic,-21.857,-11.190,+77\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,75.163,24.837,88.520,11.480,88.59,1.000,bicubic,-21.697,-11.070,+121\ntf_efficientnet_l2.ns_jft_in1k,800,74.590,25.410,87.483,12.517,480.31,0.960,bicubic,-23.180,-12.407,-37\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,73.703,26.297,87.303,12.697,88.59,1.000,bicubic,-23.097,-12.337,+133\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,73.447,26.553,87.200,12.800,88.59,1.000,bicubic,-23.133,-12.440,+186\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,73.187,26.813,84.807,15.193,305.00,1.000,bicubic,-24.623,-15.053,-41\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,73.047,26.953,84.937,15.063,305.67,1.000,bicubic,-24.723,-14.923,-37\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,72.703,27.297,85.193,14.807,44.18,0.875,bilinear,-23.377,-14.327,+342\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,71.903,28.097,83.030,16.970,475.77,1.000,bicubic,-25.857,-16.760,-36\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,71.777,28.223,82.813,17.187,475.32,1.000,bicubic,-25.993,-16.997,-41\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,71.170,28.830,83.153,16.847,136.50,1.000,bicubic,-26.380,-16.627,-21\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,71.027,28.973,83.347,16.653,304.43,0.900,bicubic,-26.473,-16.333,-19\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,70.843,29.157,82.207,17.793,632.13,1.000,bicubic,-26.407,-17.513,+23\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,70.830,29.170,82.657,17.343,136.33,1.000,bicubic,-26.720,-17.093,-23\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,70.620,29.380,82.403,17.597,304.76,1.000,bicubic,-26.960,-17.307,-28\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,70.597,29.403,83.703,16.297,86.86,1.000,bicubic,-26.323,-15.967,+92\nbeitv2_large_patch16_224.in1k_ft_in1k,224,70.440,29.560,83.413,16.587,304.43,0.950,bicubic,-26.850,-16.327,+11\ncaformer_b36.sail_in22k_ft_in1k_384,384,70.433,29.567,82.413,17.587,98.75,1.000,bicubic,-27.217,-17.447,-40\nmaxvit_base_tf_512.in21k_ft_in1k,512,70.433,29.567,81.620,18.380,119.88,1.000,bicubic,-27.337,-18.230,-50\nmaxvit_large_tf_512.in21k_ft_in1k,512,70.357,29.643,81.660,18.340,212.33,1.000,bicubic,-27.303,-18.070,-42\nmaxvit_large_tf_384.in21k_ft_in1k,384,70.007,29.993,80.997,19.003,212.03,1.000,bicubic,-27.663,-18.823,-44\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,69.727,30.273,81.200,18.800,304.37,1.000,bicubic,-27.593,-18.400,0\nmaxvit_base_tf_384.in21k_ft_in1k,384,69.597,30.403,80.717,19.283,119.65,1.000,bicubic,-27.963,-19.043,-34\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,69.107,30.893,82.930,17.070,25.03,0.875,bilinear,-26.553,-16.350,+482\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,68.967,31.033,79.877,20.123,116.14,1.000,bicubic,-28.353,-19.803,-4\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,68.920,31.080,81.023,18.977,136.06,1.000,bicubic,-28.330,-18.737,+9\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,68.703,31.297,82.397,17.603,86.57,1.000,bicubic,-27.627,-17.083,+247\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,68.530,31.470,80.987,19.013,197.96,1.000,bicubic,-29.120,-18.813,-48\nresnet50.fb_swsl_ig1b_ft_in1k,224,68.380,31.620,83.387,16.613,25.56,0.875,bilinear,-26.870,-15.923,+600\ncaformer_b36.sail_in22k_ft_in1k,224,68.327,31.673,80.650,19.350,98.75,1.000,bicubic,-29.053,-19.180,-20\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,68.070,31.930,81.840,18.160,75.26,0.965,bicubic,-28.960,-17.770,+55\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,68.010,31.990,80.620,19.380,116.09,1.000,bicubic,-29.460,-19.140,-36\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,67.980,32.020,79.923,20.077,134.42,1.000,bicubic,-29.450,-19.857,-31\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,67.780,32.220,80.127,19.873,350.20,1.000,bicubic,-29.800,-19.653,-48\nhgnetv2_b6.ssld_stage2_ft_in1k,224,67.727,32.273,81.730,18.270,75.26,0.965,bicubic,-29.143,-17.780,+85\nhgnetv2_b5.ssld_stage2_ft_in1k,224,67.587,32.413,81.003,18.997,39.57,0.965,bicubic,-28.953,-18.657,+168\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,67.527,32.473,80.147,19.853,196.74,1.000,bicubic,-29.773,-19.623,-11\nvit_base_patch16_clip_384.openai_ft_in1k,384,67.263,32.737,81.787,18.213,86.86,1.000,bicubic,-29.557,-17.843,+94\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,67.210,32.790,80.533,19.467,39.57,0.965,bicubic,-29.320,-19.087,+167\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,67.123,32.877,78.720,21.280,304.72,1.000,bicubic,-30.307,-21.040,-39\ntf_efficientnet_b7.ns_jft_in1k,600,67.023,32.977,81.087,18.913,66.35,0.949,bicubic,-30.177,-18.613,+12\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,67.003,32.997,79.720,20.280,88.72,1.000,bicubic,-30.367,-19.990,-31\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,66.997,33.003,80.237,19.763,101.66,1.000,bicubic,-30.433,-19.543,-40\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,66.517,33.483,78.273,21.727,73.88,1.000,bicubic,-30.843,-21.507,-30\nconvnext_large.fb_in22k_ft_in1k_384,384,66.510,33.490,79.427,20.573,197.77,1.000,bicubic,-30.820,-20.333,-25\nbeit3_base_patch16_224.in22k_ft_in1k,224,66.493,33.507,80.937,19.063,86.66,1.000,bicubic,-29.987,-18.663,+178\nconvformer_b36.sail_in22k_ft_in1k_384,384,66.463,33.537,78.993,21.007,99.88,1.000,bicubic,-30.987,-20.697,-49\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,66.340,33.660,77.857,22.143,116.14,0.950,bicubic,-30.750,-21.753,+22\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,66.327,33.673,78.963,21.037,134.13,0.950,bicubic,-30.693,-20.747,+42\nhgnetv2_b6.ssld_stage2_ft_in1k,288,66.313,33.687,80.460,19.540,75.26,1.000,bicubic,-30.787,-19.280,+19\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,66.253,33.747,80.503,19.497,75.26,1.000,bicubic,-30.827,-19.207,+21\nconvnextv2_large.fcmae_ft_in22k_in1k,224,66.023,33.977,78.963,21.037,197.96,0.875,bicubic,-31.247,-20.747,-21\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,65.960,34.040,79.243,20.757,196.74,1.000,bicubic,-31.220,-20.537,+6\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,65.927,34.073,78.683,21.317,86.86,1.000,bicubic,-31.303,-21.077,-10\nvit_base_patch16_clip_224.openai_ft_in1k,224,65.883,34.117,80.870,19.130,86.57,0.900,bicubic,-30.477,-18.700,+207\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,65.747,34.253,78.837,21.163,86.53,0.900,bicubic,-31.213,-20.803,+41\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,65.570,34.430,79.977,20.023,86.66,1.000,bicubic,-31.130,-19.593,+104\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,65.523,34.477,78.863,21.137,87.92,1.000,bicubic,-31.737,-20.937,-25\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,65.507,34.493,78.440,21.560,196.74,0.900,bicubic,-31.713,-21.270,-11\ntf_efficientnet_b6.ns_jft_in1k,528,65.463,34.537,79.460,20.540,43.04,0.942,bicubic,-31.557,-20.210,+28\ncaformer_m36.sail_in22k_ft_in1k_384,384,65.340,34.660,78.410,21.590,56.20,1.000,bicubic,-32.020,-21.320,-49\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,65.307,34.693,78.533,21.467,101.66,1.000,bicubic,-31.913,-21.147,-13\nconvformer_b36.sail_in22k_ft_in1k,224,65.273,34.727,77.950,22.050,99.88,1.000,bicubic,-31.957,-21.750,-21\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,65.247,34.753,78.410,21.590,64.27,1.000,bicubic,-32.163,-21.360,-59\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,65.220,34.780,78.883,21.117,86.86,0.950,bicubic,-31.910,-20.837,+2\nconvnext_xlarge.fb_in22k_ft_in1k,224,65.073,34.927,77.690,22.310,350.20,0.875,bicubic,-32.187,-22.050,-32\nhiera_huge_224.mae_in1k_ft_in1k,224,64.967,35.033,77.690,22.310,672.78,0.900,bicubic,-32.243,-22.070,-15\nnextvit_large.bd_ssld_6m_in1k_384,384,64.827,35.173,79.210,20.790,57.87,1.000,bicubic,-32.353,-20.480,-10\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,64.800,35.200,78.583,21.417,101.66,1.000,bicubic,-32.430,-21.167,-26\nconvnext_xlarge.fb_in22k_ft_in1k,288,64.760,35.240,77.847,22.153,350.20,1.000,bicubic,-32.720,-21.973,-76\nconvnext_base.fb_in22k_ft_in1k_384,384,64.743,35.257,78.177,21.823,88.59,1.000,bicubic,-32.497,-21.503,-33\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,64.733,35.267,77.697,22.303,86.57,0.950,bicubic,-31.897,-21.863,+104\nconvnextv2_large.fcmae_ft_in22k_in1k,288,64.663,35.337,78.173,21.827,197.96,1.000,bicubic,-32.677,-21.627,-55\nconvnextv2_base.fcmae_ft_in22k_in1k,224,64.610,35.390,78.253,21.747,88.72,0.875,bicubic,-32.550,-21.407,-13\nhgnetv2_b5.ssld_stage2_ft_in1k,288,64.570,35.430,79.017,20.983,39.57,1.000,bicubic,-32.170,-20.693,+75\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,64.367,35.633,76.273,23.727,304.33,0.900,bicubic,-32.353,-23.367,+82\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,64.263,35.737,78.343,21.657,39.57,1.000,bicubic,-32.347,-21.237,+103\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,64.167,35.833,75.917,24.083,329.09,1.000,bicubic,-32.793,-23.803,+20\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,63.947,36.053,77.453,22.547,116.09,0.950,bicubic,-33.133,-22.227,-6\nhgnet_base.ssld_in1k,224,63.943,36.057,76.790,23.210,71.58,0.965,bicubic,-32.707,-22.770,+92\nconvnext_large.fb_in22k_ft_in1k,288,63.940,36.060,77.323,22.677,197.77,1.000,bicubic,-33.300,-22.397,-42\nconvnext_large.fb_in22k_ft_in1k,224,63.843,36.157,77.267,22.733,197.77,0.875,bicubic,-33.387,-22.393,-37\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,63.733,36.267,78.060,21.940,196.53,0.900,bicubic,-33.217,-21.570,+17\nnextvit_base.bd_ssld_6m_in1k_384,384,63.600,36.400,78.487,21.513,44.82,1.000,bicubic,-33.620,-21.243,-37\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,63.560,36.440,76.537,23.463,149.39,1.000,bicubic,-33.800,-23.143,-72\nhrnet_w48_ssld.paddle_in1k,224,63.483,36.517,76.577,23.423,77.47,0.950,bilinear,-32.737,-22.913,+221\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,63.410,36.590,77.843,22.157,86.74,1.000,bicubic,-33.940,-21.867,-71\ncaformer_m36.sail_in22k_ft_in1k,224,63.367,36.633,76.737,23.263,56.20,1.000,bicubic,-33.663,-22.993,-5\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,63.290,36.710,76.797,23.203,64.11,0.950,bicubic,-33.860,-22.813,-27\nconvnextv2_base.fcmae_ft_in22k_in1k,288,63.167,36.833,77.073,22.927,88.72,1.000,bicubic,-34.043,-22.557,-40\nconvnextv2_huge.fcmae_ft_in1k,224,63.083,36.917,76.243,23.757,660.29,0.875,bicubic,-33.857,-23.407,+14\ncaformer_s36.sail_in22k_ft_in1k_384,384,63.047,36.953,77.223,22.777,39.30,1.000,bicubic,-34.233,-22.497,-62\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,63.023,36.977,77.733,22.267,87.90,1.000,bicubic,-34.047,-22.037,-19\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,63.020,36.980,76.777,23.223,87.92,0.900,bicubic,-34.040,-22.813,-15\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,63.007,36.993,76.697,23.303,86.57,0.950,bicubic,-33.523,-22.843,+114\ntf_efficientnet_b5.ns_jft_in1k,456,62.883,37.117,77.710,22.290,30.39,0.934,bicubic,-34.017,-21.750,+15\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,62.677,37.323,75.603,24.397,86.88,1.000,bicubic,-34.563,-24.117,-56\ntf_efficientnetv2_l.in21k_ft_in1k,480,62.413,37.587,76.730,23.270,118.52,1.000,bicubic,-34.907,-22.990,-75\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,62.410,37.590,76.613,23.387,86.58,0.900,bicubic,-34.530,-23.027,+5\ntf_efficientnetv2_l.in21k_ft_in1k,384,62.393,37.607,76.607,23.393,118.52,1.000,bicubic,-34.467,-23.083,+24\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,62.350,37.650,75.000,25.000,73.88,0.950,bicubic,-34.840,-24.640,-47\nconvnext_base.fb_in22k_ft_in1k,288,62.243,37.757,76.177,23.823,88.59,1.000,bicubic,-34.977,-23.583,-56\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,62.233,37.767,75.677,24.323,86.58,0.900,bicubic,-34.857,-23.933,-32\ntf_efficientnetv2_xl.in21k_ft_in1k,384,62.193,37.807,75.413,24.587,208.12,1.000,bicubic,-34.707,-24.227,+8\nhgnet_base.ssld_in1k,288,62.140,37.860,75.250,24.750,71.58,1.000,bicubic,-34.660,-24.360,+33\ntf_efficientnetv2_xl.in21k_ft_in1k,512,62.127,37.873,75.607,24.393,208.12,1.000,bicubic,-35.193,-24.033,-82\nhrnet_w48_ssld.paddle_in1k,288,62.037,37.963,75.273,24.727,77.47,1.000,bilinear,-34.523,-24.367,+91\nnextvit_large.bd_ssld_6m_in1k,224,61.927,38.073,76.807,23.193,57.87,0.950,bicubic,-34.983,-22.833,+2\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,61.917,38.083,74.717,25.283,86.59,1.000,bicubic,-34.973,-24.993,+7\nconvnext_base.fb_in22k_ft_in1k,224,61.783,38.217,75.820,24.180,88.59,0.875,bicubic,-35.067,-23.950,+17\nconvformer_m36.sail_in22k_ft_in1k_384,384,61.773,38.227,75.250,24.750,57.05,1.000,bicubic,-35.587,-24.460,-96\nbeitv2_base_patch16_224.in1k_ft_in1k,224,61.487,38.513,76.000,24.000,86.53,0.900,bicubic,-35.293,-23.550,+34\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,61.473,38.527,74.573,25.427,64.11,0.950,bicubic,-35.327,-25.107,+26\nconvnextv2_huge.fcmae_ft_in1k,288,61.260,38.740,74.420,25.580,660.29,1.000,bicubic,-36.010,-25.300,-82\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,61.243,38.757,74.673,25.327,60.60,1.000,bicubic,-35.967,-25.047,-64\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,61.200,38.800,73.927,26.073,73.87,0.950,bicubic,-35.800,-25.713,-23\ntf_efficientnet_b4.ns_jft_in1k,380,61.107,38.893,76.063,23.937,19.34,0.922,bicubic,-35.603,-23.577,+42\nmaxvit_base_tf_512.in1k,512,61.027,38.973,73.977,26.023,119.88,1.000,bicubic,-36.163,-25.673,-63\nconvformer_m36.sail_in22k_ft_in1k,224,60.910,39.090,74.460,25.540,57.05,1.000,bicubic,-36.160,-25.170,-42\nnextvit_base.bd_ssld_6m_in1k,224,60.790,39.210,76.340,23.660,44.82,0.950,bicubic,-36.090,-23.320,-3\ncaformer_s36.sail_in22k_ft_in1k,224,60.690,39.310,75.063,24.937,39.30,1.000,bicubic,-36.150,-24.667,+8\nnextvit_small.bd_ssld_6m_in1k_384,384,60.393,39.607,75.703,24.297,31.76,1.000,bicubic,-36.677,-24.057,-46\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,60.303,39.697,73.733,26.267,88.30,1.000,bicubic,-36.307,-25.967,+61\nhiera_large_224.mae_in1k_ft_in1k,224,60.290,39.710,73.187,26.813,213.74,0.900,bicubic,-36.460,-26.223,+24\ntf_efficientnetv2_m.in21k_ft_in1k,480,60.263,39.737,75.107,24.893,54.14,1.000,bicubic,-36.757,-24.563,-33\ntf_efficientnetv2_m.in21k_ft_in1k,384,60.200,39.800,75.003,24.997,54.14,1.000,bicubic,-36.320,-24.467,+86\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,60.187,39.813,73.847,26.153,86.86,1.000,bicubic,-36.833,-25.863,-38\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,60.163,39.837,75.380,24.620,86.53,0.900,bicubic,-36.497,-24.280,+43\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,60.120,39.880,73.317,26.683,88.34,1.000,bicubic,-36.450,-26.363,+67\nhgnet_small.ssld_in1k,224,60.010,39.990,75.160,24.840,24.36,0.965,bicubic,-36.150,-24.160,+191\nconvnext_small.fb_in22k_ft_in1k_384,384,59.927,40.073,74.267,25.733,50.22,1.000,bicubic,-37.213,-25.313,-67\nmaxvit_large_tf_512.in1k,512,59.880,40.120,72.797,27.203,212.33,1.000,bicubic,-37.180,-26.893,-52\nconvformer_s36.sail_in22k_ft_in1k_384,384,59.867,40.133,73.857,26.143,40.01,1.000,bicubic,-37.183,-25.853,-52\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,59.560,40.440,74.657,25.343,21.27,1.000,bicubic,-37.330,-24.963,-21\nhgnetv2_b4.ssld_stage2_ft_in1k,224,59.517,40.483,74.907,25.093,19.80,0.965,bicubic,-36.583,-24.613,+206\nconvnextv2_large.fcmae_ft_in1k,224,59.460,40.540,74.023,25.977,197.96,0.875,bicubic,-37.390,-25.657,-10\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,59.313,40.687,73.927,26.073,87.77,0.900,bicubic,-37.357,-25.733,+33\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,59.223,40.777,73.677,26.323,86.57,0.900,bicubic,-37.277,-25.913,+80\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,59.180,40.820,72.970,27.030,93.59,0.950,bicubic,-38.150,-26.780,-120\nmaxvit_base_tf_384.in1k,384,59.167,40.833,71.707,28.293,119.65,1.000,bicubic,-37.973,-27.933,-75\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,59.010,40.990,73.870,26.130,88.22,0.900,bicubic,-35.770,-25.260,+634\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,58.967,41.033,72.713,27.287,93.59,1.000,bicubic,-38.343,-27.067,-117\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,58.960,41.040,72.720,27.280,60.40,0.950,bicubic,-37.640,-26.860,+42\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,58.933,41.067,73.963,26.037,21.23,1.000,bicubic,-38.017,-25.697,-42\nconvformer_s36.sail_in22k_ft_in1k,224,58.887,41.113,72.843,27.157,40.01,1.000,bicubic,-37.663,-26.757,+55\nvolo_d5_512.sail_in1k,512,58.773,41.227,73.127,26.873,296.09,1.150,bicubic,-38.527,-26.653,-119\nvolo_d5_448.sail_in1k,448,58.693,41.307,72.987,27.013,295.91,1.150,bicubic,-38.547,-26.733,-110\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,58.693,41.307,71.860,28.140,328.99,0.900,bicubic,-37.527,-27.530,+158\nconvnext_small.in12k_ft_in1k_384,384,58.637,41.363,72.643,27.357,50.22,1.000,bicubic,-38.353,-27.017,-55\nnextvit_small.bd_ssld_6m_in1k,224,58.587,41.413,74.227,25.773,31.76,0.950,bicubic,-38.023,-25.403,+32\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,58.560,41.440,73.267,26.733,88.30,0.950,bicubic,-37.840,-26.263,+101\neva02_small_patch14_336.mim_in22k_ft_in1k,336,58.443,41.557,73.027,26.973,22.13,1.000,bicubic,-38.247,-26.573,+11\nmaxvit_large_tf_384.in1k,384,58.417,41.583,71.113,28.887,212.03,1.000,bicubic,-38.533,-28.447,-50\nefficientvit_l3.r384_in1k,384,58.343,41.657,72.703,27.297,246.04,1.000,bicubic,-38.397,-26.897,0\ndeit3_large_patch16_384.fb_in1k,384,58.323,41.677,72.953,27.047,304.76,1.000,bicubic,-38.547,-26.697,-34\nvit_large_patch16_rope_mixed_224.naver_in1k,224,58.283,41.717,71.973,28.027,304.20,0.900,bicubic,-37.877,-27.507,+169\nhgnet_small.ssld_in1k,288,58.230,41.770,73.753,26.247,24.36,1.000,bicubic,-38.170,-25.707,+92\nvit_large_patch16_rope_224.naver_in1k,224,58.213,41.787,71.950,28.050,304.17,0.900,bicubic,-38.177,-27.490,+96\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,58.167,41.833,72.237,27.763,304.40,0.900,bicubic,-38.093,-27.363,+137\ndeit3_huge_patch14_224.fb_in1k,224,58.110,41.890,72.063,27.937,632.13,0.900,bicubic,-38.480,-27.627,+29\nconvnextv2_large.fcmae_ft_in1k,288,58.063,41.937,72.560,27.440,197.96,1.000,bicubic,-38.787,-27.100,-36\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,57.970,42.030,71.617,28.383,93.59,0.875,bicubic,-38.990,-28.103,-63\ntf_efficientnet_b8.ap_in1k,672,57.873,42.127,72.937,27.063,87.41,0.954,bicubic,-38.687,-26.613,+36\nrdnet_large.nv_in1k_ft_in1k_384,384,57.853,42.147,71.733,28.267,186.27,1.000,bicubic,-39.177,-27.937,-80\nvit_large_patch16_rope_ape_224.naver_in1k,224,57.820,42.180,72.120,27.880,304.37,0.900,bicubic,-38.340,-27.120,+159\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,57.677,42.323,71.377,28.623,93.59,1.000,bicubic,-39.503,-28.383,-109\nconvnext_small.fb_in22k_ft_in1k,288,57.603,42.397,72.497,27.503,50.22,1.000,bicubic,-39.267,-27.153,-45\nefficientvit_l3.r320_in1k,320,57.597,42.403,71.557,28.443,246.04,1.000,bicubic,-39.153,-28.033,-19\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,57.550,42.450,72.913,27.087,19.80,0.965,bicubic,-38.470,-26.527,+211\nefficientvit_l3.r224_in1k,224,57.530,42.470,71.363,28.637,246.04,1.000,bicubic,-38.960,-27.947,+52\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,57.500,42.500,71.597,28.403,60.40,0.950,bicubic,-39.050,-27.963,+32\nmvitv2_large.fb_in1k,224,57.497,42.503,70.760,29.240,217.99,0.900,bicubic,-38.933,-29.000,+71\ncait_m48_448.fb_dist_in1k,448,57.433,42.567,71.787,28.213,356.46,1.000,bicubic,-39.457,-27.893,-58\ncait_m36_384.fb_dist_in1k,384,57.417,42.583,72.313,27.687,271.22,1.000,bicubic,-39.403,-27.347,-43\ntf_efficientnet_b3.ns_jft_in1k,300,57.400,42.600,72.427,27.573,12.23,0.904,bicubic,-38.700,-27.043,+168\nhgnetv2_b3.ssld_stage2_ft_in1k,224,57.390,42.610,73.290,26.710,16.29,0.965,bicubic,-38.400,-26.100,+286\nhgnetv2_b4.ssld_stage2_ft_in1k,288,57.340,42.660,72.960,27.040,19.80,1.000,bicubic,-38.960,-26.600,+106\nconvnext_small.fb_in22k_ft_in1k,224,57.303,42.697,72.497,27.503,50.22,0.875,bicubic,-39.207,-26.993,+37\nvolo_d4_448.sail_in1k,448,57.277,42.723,71.423,28.577,193.41,1.150,bicubic,-39.793,-28.287,-104\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,57.160,42.840,72.553,27.447,21.20,0.950,bicubic,-39.240,-26.947,+71\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,57.037,42.963,71.337,28.663,88.22,0.900,bicubic,-38.203,-27.913,+453\nefficientvit_l3.r256_in1k,256,57.003,42.997,70.747,29.253,246.04,1.000,bicubic,-39.637,-28.543,-8\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,56.967,43.033,72.927,27.073,16.29,0.965,bicubic,-38.783,-26.513,+296\nmaxvit_small_tf_512.in1k,512,56.947,43.053,70.953,29.047,69.13,1.000,bicubic,-40.233,-28.667,-124\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,56.860,43.140,70.680,29.320,86.57,0.900,bicubic,-39.440,-28.810,+100\nconvnext_small.in12k_ft_in1k,224,56.750,43.250,71.060,28.940,50.22,0.950,bicubic,-39.820,-28.450,+8\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,56.687,43.313,69.760,30.240,38.85,1.000,bicubic,-39.473,-29.720,+135\nefficientvit_l2.r288_in1k,288,56.543,43.457,70.110,29.890,63.71,1.000,bicubic,-40.137,-29.530,-19\ndeit3_large_patch16_224.fb_in1k,224,56.483,43.517,70.463,29.537,304.37,0.900,bicubic,-39.737,-28.967,+122\nrdnet_large.nv_in1k,224,56.393,43.607,70.000,30.000,186.27,0.900,bicubic,-39.987,-29.470,+67\nvolo_d5_224.sail_in1k,224,56.383,43.617,70.687,29.313,295.46,0.960,bicubic,-40.527,-28.973,-82\nxcit_large_24_p8_384.fb_dist_in1k,384,56.373,43.627,71.247,28.753,188.93,1.000,bicubic,-40.417,-28.283,-50\nhgnet_tiny.ssld_in1k,224,56.340,43.660,71.980,28.020,14.74,0.965,bicubic,-39.030,-27.410,+399\nflexivit_large.1200ep_in1k,240,56.260,43.740,71.593,28.407,304.36,0.950,bicubic,-40.540,-28.087,-54\ndm_nfnet_f5.dm_in1k,416,56.257,43.743,70.100,29.900,377.21,0.954,bicubic,-40.553,-29.600,-59\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,56.070,43.930,71.793,28.207,28.64,1.000,bicubic,-40.560,-27.687,-21\nflexivit_large.600ep_in1k,240,56.060,43.940,71.213,28.787,304.36,0.950,bicubic,-40.680,-28.407,-44\nxcit_large_24_p8_224.fb_dist_in1k,224,55.997,44.003,70.687,29.313,188.93,1.000,bicubic,-40.633,-28.903,-21\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,55.963,44.037,70.347,29.653,38.88,0.950,bicubic,-40.527,-29.203,+22\ndm_nfnet_f6.dm_in1k,448,55.907,44.093,69.860,30.140,438.36,0.956,bicubic,-41.023,-29.860,-94\ncaformer_s18.sail_in22k_ft_in1k_384,384,55.867,44.133,71.277,28.723,26.34,1.000,bicubic,-40.653,-28.313,+9\nvit_base_patch32_clip_224.openai_ft_in1k,224,55.847,44.153,72.203,27.797,88.22,0.900,bicubic,-38.583,-26.857,+675\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,55.727,44.273,70.997,29.003,39.03,0.950,bicubic,-40.773,-28.523,+13\nefficientvit_l2.r384_in1k,384,55.720,44.280,69.903,30.097,63.71,1.000,bicubic,-40.970,-29.517,-38\nflexivit_large.300ep_in1k,240,55.697,44.303,70.700,29.300,304.36,0.950,bicubic,-40.993,-28.880,-40\nregnety_160.sw_in12k_ft_in1k,224,55.660,44.340,69.997,30.003,83.59,0.950,bicubic,-40.930,-29.523,-19\nconvnext_small.in12k_ft_in1k,288,55.643,44.357,70.597,29.403,50.22,1.000,bicubic,-40.947,-28.983,-19\nregnety_160.lion_in12k_ft_in1k,224,55.620,44.380,69.737,30.263,83.59,0.950,bicubic,-40.890,-29.953,+6\nhiera_base_plus_224.mae_in1k_ft_in1k,224,55.570,44.430,68.973,31.027,69.90,0.900,bicubic,-40.800,-30.447,+52\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,55.380,44.620,71.047,28.953,19.80,1.000,bicubic,-40.880,-28.453,+82\nhrnet_w18_ssld.paddle_in1k,224,55.287,44.713,69.797,30.203,21.30,0.950,bilinear,-39.863,-29.413,+453\nhgnetv2_b3.ssld_stage2_ft_in1k,288,55.090,44.910,71.253,28.747,16.29,1.000,bicubic,-40.940,-28.147,+159\ncaformer_b36.sail_in1k_384,384,55.040,44.960,67.980,32.020,98.75,1.000,bicubic,-42.110,-31.700,-150\nconvformer_s18.sail_in22k_ft_in1k_384,384,54.987,45.013,70.000,30.000,26.77,1.000,bicubic,-41.803,-29.550,-73\ncaformer_m36.sail_in1k_384,384,54.947,45.053,68.367,31.633,56.20,1.000,bicubic,-42.073,-31.333,-128\nxcit_large_24_p16_384.fb_dist_in1k,384,54.870,45.130,69.923,30.077,189.10,1.000,bicubic,-42.060,-29.587,-109\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,54.843,45.157,70.830,29.170,49.61,0.900,bicubic,-41.187,-28.690,+160\nvolo_d4_224.sail_in1k,224,54.710,45.290,68.827,31.173,192.96,0.960,bicubic,-42.070,-30.793,-74\ndm_nfnet_f4.dm_in1k,384,54.693,45.307,68.940,31.060,316.07,0.951,bicubic,-42.047,-30.610,-68\nmaxvit_tiny_tf_512.in1k,512,54.647,45.353,68.887,31.113,31.05,1.000,bicubic,-42.323,-30.783,-124\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,54.590,45.410,68.377,31.623,22.21,1.000,bicubic,-42.090,-31.203,-54\nefficientvit_l2.r224_in1k,224,54.580,45.420,67.947,32.053,63.71,1.000,bicubic,-41.680,-31.383,+75\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,54.577,45.423,70.590,29.410,11.22,0.965,bicubic,-40.453,-28.590,+484\nhgnet_tiny.ssld_in1k,288,54.557,45.443,70.517,29.483,14.74,1.000,bicubic,-41.053,-28.853,+299\nconvnextv2_base.fcmae_ft_in1k,224,54.537,45.463,68.917,31.083,88.72,0.875,bicubic,-41.933,-30.663,+7\ncaformer_s36.sail_in1k_384,384,54.530,45.470,68.647,31.353,39.30,1.000,bicubic,-42.350,-31.033,-108\nregnetz_e8.ra3_in1k,256,54.527,45.473,68.447,31.553,57.70,0.940,bicubic,-41.903,-31.023,+15\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,54.480,45.520,70.923,29.077,16.29,1.000,bicubic,-41.500,-28.537,+167\ntf_efficientnetv2_l.in1k,384,54.437,45.563,68.540,31.460,118.52,1.000,bicubic,-42.113,-30.930,-27\ndm_nfnet_f5.dm_in1k,544,54.390,45.610,68.570,31.430,377.21,0.954,bicubic,-42.630,-31.050,-140\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,54.387,45.613,69.503,30.497,98.95,1.000,bicubic,-42.093,-30.017,-8\nvit_base_patch16_rope_mixed_224.naver_in1k,224,54.380,45.620,68.740,31.260,86.44,0.900,bicubic,-41.610,-30.580,+162\ndm_nfnet_f3.dm_in1k,320,54.363,45.637,68.473,31.527,254.92,0.940,bicubic,-42.007,-31.117,+27\nmaxvit_small_tf_384.in1k,384,54.340,45.660,68.167,31.833,69.02,1.000,bicubic,-42.390,-31.383,-78\nxcit_large_24_p16_224.fb_dist_in1k,224,54.297,45.703,68.963,31.037,189.10,1.000,bicubic,-42.013,-30.517,+46\nefficientvit_l2.r256_in1k,256,54.290,45.710,67.873,32.127,63.71,1.000,bicubic,-42.190,-31.727,-7\nefficientnet_b5.sw_in12k_ft_in1k,448,54.277,45.723,69.823,30.177,30.39,1.000,bicubic,-42.543,-29.837,-103\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,54.253,45.747,68.277,31.723,86.59,0.900,bicubic,-41.807,-31.133,+120\ninception_next_base.sail_in1k_384,384,54.237,45.763,68.487,31.513,86.67,1.000,bicubic,-42.493,-31.103,-84\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,54.217,45.783,70.273,29.727,936.53,1.000,bilinear,-42.673,-29.347,-126\nefficientnetv2_rw_m.agc_in1k,320,54.197,45.803,69.037,30.963,53.24,1.000,bicubic,-41.993,-30.543,+75\nregnety_160.sw_in12k_ft_in1k,288,54.160,45.840,68.860,31.140,83.59,1.000,bicubic,-42.650,-30.740,-107\ncaformer_s18.sail_in22k_ft_in1k,224,54.117,45.883,69.673,30.327,26.34,1.000,bicubic,-41.913,-29.917,+130\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,54.093,45.907,68.787,31.213,36.47,1.000,bicubic,-41.977,-30.673,+108\nregnety_160.lion_in12k_ft_in1k,288,54.087,45.913,68.873,31.127,83.59,1.000,bicubic,-42.753,-30.747,-117\nmambaout_base_wide_rw.sw_e500_in1k,224,54.027,45.973,68.060,31.940,94.45,0.950,bicubic,-42.153,-31.460,+72\ntf_efficientnet_b5.ap_in1k,456,53.943,46.057,69.230,30.770,30.39,0.934,bicubic,-42.147,-30.090,+98\nregnety_120.sw_in12k_ft_in1k,224,53.937,46.063,68.873,31.127,51.82,0.950,bicubic,-42.263,-30.657,+64\nvolo_d3_448.sail_in1k,448,53.933,46.067,67.950,32.050,86.63,1.000,bicubic,-43.107,-31.730,-167\ncaformer_b36.sail_in1k,224,53.927,46.073,66.607,33.393,98.75,1.000,bicubic,-42.613,-32.853,-45\nefficientnet_h_b5.sw_r448_e450_in1k,448,53.763,46.237,69.273,30.727,45.88,1.000,bicubic,-42.577,-30.237,+20\nxcit_medium_24_p8_224.fb_dist_in1k,224,53.660,46.340,68.303,31.697,84.32,1.000,bicubic,-42.860,-31.267,-40\ndm_nfnet_f6.dm_in1k,576,53.660,46.340,68.183,31.817,438.36,0.956,bicubic,-43.320,-31.577,-157\ntf_efficientnet_b6.ap_in1k,528,53.607,46.393,68.570,31.430,43.04,0.942,bicubic,-42.743,-30.830,+11\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,53.587,46.413,69.177,30.823,38.86,0.950,bicubic,-42.413,-30.313,+132\nhgnetv2_b2.ssld_stage2_ft_in1k,224,53.530,46.470,69.820,30.180,11.22,0.965,bicubic,-41.680,-29.420,+395\ncait_s36_384.fb_dist_in1k,384,53.520,46.480,68.010,31.990,68.37,1.000,bicubic,-43.120,-31.590,-84\ntf_efficientnet_b2.ns_jft_in1k,260,53.497,46.503,70.363,29.637,9.11,0.890,bicubic,-42.013,-28.897,+292\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,53.483,46.517,68.303,31.697,34.36,0.950,bicubic,-42.977,-31.307,-24\nmambaout_base_wide_rw.sw_e500_in1k,288,53.447,46.553,67.563,32.437,94.45,1.000,bicubic,-43.033,-32.097,-33\ndeit3_base_patch16_384.fb_in1k,384,53.440,46.560,67.590,32.410,86.88,1.000,bicubic,-42.790,-31.820,+44\ndeit3_base_patch16_224.fb_in1k,224,53.433,46.567,67.573,32.427,86.59,0.900,bicubic,-42.357,-31.847,+202\ndm_nfnet_f3.dm_in1k,416,53.430,46.570,67.583,32.417,254.92,0.940,bicubic,-43.180,-31.887,-82\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,53.427,46.573,68.587,31.413,32.59,0.950,bicubic,-42.883,-30.823,+18\nconvnext_large.fb_in1k,224,53.413,46.587,68.067,31.933,197.77,0.875,bicubic,-42.627,-31.433,+104\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,53.403,46.597,68.150,31.850,88.30,1.000,bicubic,-42.507,-31.080,+147\nconvformer_s18.sail_in22k_ft_in1k,224,53.390,46.610,68.623,31.377,26.77,1.000,bicubic,-42.700,-30.867,+79\nvit_base_patch16_rope_ape_224.naver_in1k,224,53.390,46.610,68.107,31.893,86.59,0.900,bicubic,-42.380,-31.173,+202\nconvnextv2_base.fcmae_ft_in1k,288,53.390,46.610,67.780,32.220,88.72,1.000,bicubic,-43.110,-31.850,-48\nvit_base_patch16_rope_224.naver_in1k,224,53.377,46.623,67.653,32.347,86.43,0.900,bicubic,-42.343,-31.647,+220\nxcit_medium_24_p8_384.fb_dist_in1k,384,53.337,46.663,68.097,31.903,84.32,1.000,bicubic,-43.443,-31.573,-126\ntf_efficientnet_b7.ap_in1k,600,53.313,46.687,68.907,31.093,66.35,0.949,bicubic,-43.037,-30.473,-8\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,53.273,46.727,69.313,30.687,28.64,0.875,bicubic,-42.787,-30.097,+86\ntf_efficientnetv2_s.in21k_ft_in1k,300,53.273,46.727,69.130,30.870,21.46,1.000,bicubic,-42.627,-30.170,+142\nhrnet_w18_ssld.paddle_in1k,288,53.260,46.740,68.147,31.853,21.30,1.000,bilinear,-42.500,-31.263,+200\ntf_efficientnetv2_s.in21k_ft_in1k,384,53.257,46.743,69.070,30.930,21.46,1.000,bicubic,-43.203,-30.500,-41\nmaxvit_base_tf_224.in1k,224,53.257,46.743,66.213,33.787,119.47,0.950,bicubic,-43.093,-33.327,-9\nxcit_medium_24_p16_384.fb_dist_in1k,384,53.237,46.763,68.103,31.897,84.40,1.000,bicubic,-43.453,-31.507,-114\ntf_efficientnetv2_l.in1k,480,53.193,46.807,67.867,32.133,118.52,1.000,bicubic,-43.537,-31.753,-122\ntf_efficientnet_b8.ra_in1k,672,53.163,46.837,68.953,31.047,87.41,0.954,bicubic,-43.537,-30.627,-118\nconvnext_large.fb_in1k,288,53.140,46.860,67.820,32.180,197.77,1.000,bicubic,-43.250,-31.710,-28\nhiera_base_224.mae_in1k_ft_in1k,224,53.140,46.860,67.507,32.493,51.52,0.900,bicubic,-43.200,-32.013,-9\ntf_efficientnet_b4.ap_in1k,380,53.113,46.887,68.300,31.700,19.34,0.922,bicubic,-42.387,-31.080,+269\nregnetz_e8.ra3_in1k,320,53.060,46.940,67.183,32.817,57.70,1.000,bicubic,-43.560,-32.267,-106\nmaxvit_large_tf_224.in1k,224,53.010,46.990,65.433,34.567,211.79,0.950,bicubic,-43.340,-34.167,-19\nefficientnet_x_b5.sw_r448_e450_in1k,448,52.953,47.047,68.000,32.000,33.44,1.000,bicubic,-43.067,-31.350,+95\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,52.943,47.057,68.250,31.750,32.59,1.000,bicubic,-43.587,-31.290,-79\nconvnext_tiny.in12k_ft_in1k_384,384,52.890,47.110,68.400,31.600,28.59,1.000,bicubic,-43.690,-31.260,-98\nrexnetr_300.sw_in12k_ft_in1k,224,52.873,47.127,69.183,30.817,34.81,0.950,bicubic,-43.117,-30.157,+102\nefficientnet_h_b5.sw_r448_e450_in1k,576,52.813,47.187,68.280,31.720,45.88,1.000,bicubic,-43.787,-31.310,-104\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,52.787,47.213,66.423,33.577,41.72,0.950,bicubic,-43.373,-33.067,+35\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,52.770,47.230,66.850,33.150,22.06,1.000,bicubic,-43.050,-32.570,+160\nrdnet_small.nv_in1k,224,52.753,47.247,67.423,32.577,50.44,0.900,bicubic,-43.067,-31.987,+158\nvolo_d3_224.sail_in1k,224,52.677,47.323,66.293,33.707,86.33,0.960,bicubic,-43.783,-33.317,-60\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,52.673,47.327,67.763,32.237,32.59,0.950,bicubic,-43.647,-31.617,-15\nmambaout_base_tall_rw.sw_e500_in1k,224,52.653,47.347,66.680,33.320,86.48,0.950,bicubic,-43.467,-32.850,+41\nrdnet_base.nv_in1k,224,52.647,47.353,66.860,33.140,87.45,0.900,bicubic,-43.443,-32.640,+51\nmambaout_base_short_rw.sw_e500_in1k,224,52.623,47.377,66.713,33.287,88.83,0.950,bicubic,-43.427,-32.697,+67\nmaxvit_tiny_tf_384.in1k,384,52.607,47.393,66.737,33.263,30.98,1.000,bicubic,-43.963,-32.843,-104\nefficientvit_l1.r224_in1k,224,52.547,47.453,65.690,34.310,52.65,1.000,bicubic,-43.423,-33.650,+100\nregnety_120.sw_in12k_ft_in1k,288,52.483,47.517,67.520,32.480,51.82,1.000,bicubic,-44.087,-32.050,-108\ndm_nfnet_f4.dm_in1k,512,52.397,47.603,66.913,33.087,316.07,0.951,bicubic,-44.553,-32.707,-201\nefficientnetv2_rw_m.agc_in1k,416,52.360,47.640,67.237,32.763,53.24,1.000,bicubic,-43.910,-32.323,-13\ntf_efficientnet_b7.ra_in1k,600,52.347,47.653,68.200,31.800,66.35,0.949,bicubic,-44.263,-31.330,-121\ntf_efficientnetv2_m.in1k,384,52.330,47.670,67.087,32.913,54.14,1.000,bicubic,-43.710,-32.383,+63\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,52.330,47.670,66.197,33.803,63.95,0.950,bicubic,-43.800,-33.263,+26\nxcit_small_24_p8_384.fb_dist_in1k,384,52.313,47.687,66.847,33.153,47.63,1.000,bicubic,-44.507,-32.743,-174\ninception_next_base.sail_in1k,224,52.293,47.707,65.843,34.157,86.67,0.950,bicubic,-43.617,-33.597,+106\nresnet18.fb_swsl_ig1b_ft_in1k,224,52.260,47.740,70.470,29.530,11.69,0.875,bilinear,-38.800,-27.730,+1084\nxcit_medium_24_p16_224.fb_dist_in1k,224,52.233,47.767,66.887,33.113,84.40,1.000,bicubic,-44.027,-32.593,-15\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,52.230,47.770,67.123,32.877,32.59,1.000,bicubic,-44.450,-32.407,-143\nconvformer_b36.sail_in1k_384,384,52.207,47.793,66.353,33.647,99.88,1.000,bicubic,-44.663,-33.267,-193\ndeit_base_distilled_patch16_384.fb_in1k,384,52.200,47.800,67.720,32.280,87.63,1.000,bicubic,-44.300,-31.870,-96\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,52.197,47.803,68.803,31.197,11.22,1.000,bicubic,-43.363,-30.417,+228\nconvformer_m36.sail_in1k_384,384,52.167,47.833,65.930,34.070,57.05,1.000,bicubic,-44.603,-33.680,-168\nxcit_small_24_p8_224.fb_dist_in1k,224,52.163,47.837,66.730,33.270,47.63,1.000,bicubic,-44.387,-32.750,-115\nnextvit_large.bd_in1k_384,384,52.140,47.860,67.447,32.553,57.87,1.000,bicubic,-44.330,-32.103,-86\nconvnext_tiny.fb_in22k_ft_in1k_384,384,52.073,47.927,66.873,33.127,28.59,1.000,bicubic,-44.087,-32.677,+5\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,52.070,47.930,68.833,31.167,236.34,1.000,bicubic,-44.100,-30.567,0\necaresnet269d.ra2_in1k,320,52.050,47.950,66.523,33.477,102.09,0.950,bicubic,-44.370,-32.987,-76\nmambaout_base_tall_rw.sw_e500_in1k,288,52.037,47.963,66.073,33.927,86.48,1.000,bicubic,-44.363,-33.547,-71\nresmlp_big_24_224.fb_in22k_ft_in1k,224,51.913,48.087,68.427,31.573,129.14,0.875,bicubic,-44.447,-31.083,-59\nconvformer_s36.sail_in1k_384,384,51.890,48.110,66.103,33.897,40.01,1.000,bicubic,-44.810,-33.447,-163\nfastvit_ma36.apple_dist_in1k,256,51.880,48.120,67.027,32.973,44.07,0.950,bicubic,-44.420,-32.593,-38\nxcit_small_24_p16_384.fb_dist_in1k,384,51.870,48.130,66.317,33.683,47.67,1.000,bicubic,-44.510,-33.263,-69\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,51.820,48.180,69.360,30.640,236.34,1.000,bilinear,-44.710,-30.230,-119\nmambaout_base_short_rw.sw_e500_in1k,288,51.763,48.237,65.840,34.160,88.83,1.000,bicubic,-44.687,-33.690,-88\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,51.750,48.250,67.727,32.273,28.64,1.000,bicubic,-44.590,-31.823,-59\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,51.740,48.260,65.840,34.160,86.43,0.950,bicubic,-44.310,-33.560,+38\ncait_s24_384.fb_dist_in1k,384,51.730,48.270,66.317,33.683,47.06,1.000,bicubic,-44.850,-33.233,-138\ncaformer_m36.sail_in1k,224,51.673,48.327,64.473,35.527,56.20,1.000,bicubic,-44.737,-35.057,-84\ncaformer_s36.sail_in1k,224,51.653,48.347,64.880,35.120,39.30,1.000,bicubic,-44.437,-34.660,+14\nconvnext_tiny.in12k_ft_in1k,224,51.640,48.360,67.717,32.283,28.59,0.950,bicubic,-44.550,-31.743,-19\nregnety_2560.seer_ft_in1k,384,51.613,48.387,68.117,31.883,\"1,282.60\",1.000,bicubic,-44.957,-31.413,-137\nrexnetr_300.sw_in12k_ft_in1k,288,51.593,48.407,67.840,32.160,34.81,1.000,bicubic,-44.527,-31.440,-2\necaresnet269d.ra2_in1k,352,51.593,48.407,66.073,33.927,102.09,1.000,bicubic,-44.857,-33.497,-98\nregnetz_040_h.ra3_in1k,256,51.570,48.430,66.560,33.440,28.94,1.000,bicubic,-44.410,-33.040,+61\nvit_base_patch16_224_miil.in21k_ft_in1k,224,51.557,48.443,65.230,34.770,86.54,0.875,bilinear,-44.493,-34.120,+28\ntf_efficientnetv2_m.in1k,480,51.507,48.493,66.630,33.370,54.14,1.000,bicubic,-44.973,-32.910,-114\nmvitv2_base.fb_in1k,224,51.473,48.527,65.623,34.377,51.47,0.900,bicubic,-44.547,-33.867,+42\ndavit_small.msft_in1k,224,51.437,48.563,66.430,33.570,49.75,0.950,bicubic,-44.603,-32.960,+29\nrepvit_m2_3.dist_450e_in1k,224,51.433,48.567,66.803,33.197,23.69,0.950,bicubic,-44.567,-32.597,+46\nefficientnet_x_b5.sw_r448_e450_in1k,576,51.430,48.570,66.837,33.163,33.44,1.000,bicubic,-45.040,-32.593,-113\ncoat_lite_medium_384.in1k,384,51.413,48.587,65.680,34.320,44.57,1.000,bicubic,-45.207,-33.930,-165\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,51.403,48.597,65.677,34.323,37.76,0.950,bicubic,-44.497,-33.583,+74\nmobilenetv4_hybrid_large.e600_r384_in1k,384,51.393,48.607,65.903,34.097,37.76,0.950,bicubic,-44.597,-33.567,+47\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,51.357,48.643,65.727,34.273,38.74,0.950,bicubic,-44.533,-33.633,+76\nhgnetv2_b2.ssld_stage2_ft_in1k,288,51.353,48.647,68.047,31.953,11.22,1.000,bicubic,-44.397,-31.213,+130\nedgenext_base.in21k_ft_in1k,320,51.340,48.660,65.760,34.240,18.51,1.000,bicubic,-44.850,-33.710,-34\nmambaout_small_rw.sw_e450_in1k,224,51.340,48.660,65.653,34.347,48.50,1.000,bicubic,-44.670,-33.707,+35\ncaformer_s18.sail_in1k_384,384,51.303,48.697,65.680,34.320,26.34,1.000,bicubic,-45.147,-33.930,-113\nmaxvit_rmlp_small_rw_224.sw_in1k,224,51.290,48.710,65.017,34.983,64.90,0.900,bicubic,-44.680,-34.103,+47\nmaxvit_small_tf_224.in1k,224,51.283,48.717,65.393,34.607,68.93,0.950,bicubic,-44.987,-34.197,-62\ndavit_base.msft_in1k,224,51.270,48.730,65.207,34.793,87.95,0.950,bicubic,-44.970,-34.183,-52\nconvnext_tiny.in12k_ft_in1k,288,51.263,48.737,66.947,33.053,28.59,1.000,bicubic,-44.977,-32.703,-54\nconvnext_base.fb_in1k,224,51.250,48.750,66.120,33.880,88.59,0.875,bicubic,-44.690,-33.260,+54\nxcit_small_12_p8_384.fb_dist_in1k,384,51.147,48.853,65.887,34.113,26.21,1.000,bicubic,-45.333,-33.593,-129\nconvformer_b36.sail_in1k,224,51.110,48.890,64.293,35.707,99.88,1.000,bicubic,-45.140,-35.017,-58\nvit_base_patch16_384.orig_in21k_ft_in1k,384,51.050,48.950,65.387,34.613,86.86,1.000,bicubic,-45.130,-33.963,-42\nfasternet_l.in1k,224,51.047,48.953,66.420,33.580,93.47,1.000,bicubic,-44.823,-32.930,+71\nconvnext_base.fb_in1k,288,51.030,48.970,65.817,34.183,88.59,1.000,bicubic,-45.300,-33.743,-84\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,51.013,48.987,64.750,35.250,60.23,0.950,bicubic,-44.787,-34.510,+96\nconvformer_m36.sail_in1k,224,51.003,48.997,63.637,36.363,57.05,1.000,bicubic,-45.077,-35.603,-12\nconvnext_tiny.fb_in22k_ft_in1k,224,50.983,49.017,66.420,33.580,28.59,0.875,bicubic,-44.757,-32.940,+121\nseresnextaa101d_32x8d.ah_in1k,224,50.937,49.063,65.167,34.833,93.59,0.950,bicubic,-45.063,-34.433,+25\nnextvit_large.bd_in1k,224,50.910,49.090,66.033,33.967,57.87,0.950,bicubic,-44.970,-33.377,+61\nmobilenetv4_hybrid_large.e600_r384_in1k,448,50.823,49.177,65.677,34.323,37.76,1.000,bicubic,-45.057,-33.773,+58\nvolo_d2_384.sail_in1k,384,50.807,49.193,65.567,34.433,58.87,1.000,bicubic,-45.933,-33.883,-216\ntf_efficientnet_b1.ns_jft_in1k,240,50.797,49.203,67.887,32.113,7.79,0.882,bicubic,-44.063,-31.323,+387\nconvformer_s36.sail_in1k,224,50.793,49.207,64.053,35.947,40.01,1.000,bicubic,-45.327,-35.407,-31\nxcit_small_24_p16_224.fb_dist_in1k,224,50.743,49.257,65.047,34.953,47.67,1.000,bicubic,-45.057,-34.223,+86\nflexivit_base.1200ep_in1k,240,50.730,49.270,65.120,34.880,86.59,0.950,bicubic,-45.400,-34.280,-41\neca_nfnet_l2.ra3_in1k,320,50.727,49.273,65.833,34.167,56.72,0.900,bicubic,-45.303,-33.657,-1\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,50.717,49.283,65.417,34.583,37.76,1.000,bicubic,-45.343,-34.083,-17\nefficientnet_b4.ra2_in1k,320,50.710,49.290,65.990,34.010,19.34,0.875,bicubic,-44.700,-33.290,+202\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,50.700,49.300,65.217,34.783,22.52,0.950,bicubic,-45.150,-34.213,+60\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,50.693,49.307,66.897,33.103,11.00,0.950,bicubic,-45.027,-32.383,+115\nconvformer_s18.sail_in1k_384,384,50.670,49.330,65.640,34.360,26.77,1.000,bicubic,-45.580,-33.900,-80\nrepvit_m2_3.dist_300e_in1k,224,50.657,49.343,66.677,33.323,23.69,0.950,bicubic,-45.013,-32.603,+125\nregnetz_040.ra3_in1k,256,50.627,49.373,65.233,34.767,27.12,1.000,bicubic,-45.133,-34.067,+93\nedgenext_base.in21k_ft_in1k,256,50.610,49.390,65.000,35.000,18.51,0.950,bicubic,-45.270,-34.410,+45\ndm_nfnet_f2.dm_in1k,256,50.597,49.403,65.017,34.983,193.78,0.920,bicubic,-45.433,-34.513,-3\nefficientnetv2_rw_s.ra2_in1k,288,50.590,49.410,65.560,34.440,23.94,1.000,bicubic,-44.850,-33.830,+184\nresnetrs420.tf_in1k,320,50.590,49.410,64.873,35.127,191.89,1.000,bicubic,-45.510,-34.427,-42\nefficientvit_b3.r256_in1k,256,50.567,49.433,64.200,35.800,48.65,1.000,bicubic,-45.263,-35.210,+65\ncoatnet_rmlp_2_rw_224.sw_in1k,224,50.550,49.450,63.383,36.617,73.88,0.950,bicubic,-45.690,-35.897,-83\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,50.523,49.477,67.967,32.033,387.93,1.000,bilinear,-45.777,-31.623,-103\nefficientnet_b4.ra2_in1k,384,50.523,49.477,65.800,34.200,19.34,1.000,bicubic,-45.007,-33.610,+160\nedgenext_base.usi_in1k,256,50.507,49.493,66.190,33.810,18.51,0.950,bicubic,-45.263,-33.230,+78\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,50.473,49.527,65.447,34.553,34.36,0.950,bicubic,-45.667,-33.863,-61\nxcit_small_12_p8_224.fb_dist_in1k,224,50.463,49.537,65.507,34.493,26.21,1.000,bicubic,-45.497,-33.873,+10\nxcit_small_12_p16_384.fb_dist_in1k,384,50.440,49.560,65.290,34.710,26.25,1.000,bicubic,-45.940,-34.180,-133\nregnetz_d32.ra3_in1k,256,50.437,49.563,66.807,33.193,27.58,0.950,bicubic,-44.953,-32.533,+192\nvolo_d1_384.sail_in1k,384,50.437,49.563,64.823,35.177,26.78,1.000,bicubic,-46.053,-34.717,-173\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,50.397,49.603,64.967,35.033,60.40,0.950,bicubic,-45.173,-34.323,+146\nefficientvit_b3.r288_in1k,288,50.387,49.613,64.077,35.923,48.65,1.000,bicubic,-45.733,-35.233,-58\nflexivit_base.600ep_in1k,240,50.377,49.623,64.670,35.330,86.59,0.950,bicubic,-45.603,-34.730,0\nmambaout_small_rw.sw_e450_in1k,288,50.367,49.633,65.050,34.950,48.50,1.000,bicubic,-45.973,-34.500,-128\nseresnet152d.ra2_in1k,256,50.340,49.660,65.043,34.957,66.84,0.950,bicubic,-45.730,-34.367,-45\nresnetrs350.tf_in1k,288,50.337,49.663,64.547,35.453,163.96,1.000,bicubic,-45.883,-34.963,-90\nfastvit_sa36.apple_dist_in1k,256,50.327,49.673,65.713,34.287,31.53,0.900,bicubic,-45.623,-33.637,+5\nregnetz_040_h.ra3_in1k,320,50.313,49.687,65.610,34.390,28.94,1.000,bicubic,-46.027,-33.740,-130\ninception_next_small.sail_in1k,224,50.293,49.707,65.130,34.870,49.37,0.875,bicubic,-45.397,-34.110,+96\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,50.287,49.713,66.150,33.850,194.03,0.875,bilinear,-45.113,-33.170,+175\npit_b_distilled_224.in1k,224,50.253,49.747,64.953,35.047,74.79,0.900,bicubic,-45.607,-34.517,+29\ncait_s24_224.fb_dist_in1k,224,50.243,49.757,65.003,34.997,46.92,1.000,bicubic,-45.407,-34.377,+105\neca_nfnet_l2.ra3_in1k,384,50.217,49.783,65.463,34.537,56.72,1.000,bicubic,-46.243,-34.107,-174\npvt_v2_b5.in1k,224,50.180,49.820,65.063,34.937,81.96,0.900,bicubic,-45.780,-34.357,-6\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,50.170,49.830,65.893,34.107,22.20,1.000,bicubic,-45.830,-33.457,-23\nmvitv2_small.fb_in1k,224,50.163,49.837,64.893,35.107,34.87,0.900,bicubic,-45.727,-34.527,+10\nfasternet_m.in1k,224,50.157,49.843,65.693,34.307,53.52,1.000,bicubic,-45.223,-33.627,+179\nmambaout_small.in1k,224,50.140,49.860,63.770,36.230,48.49,1.000,bicubic,-45.610,-35.680,+69\nresnest269e.in1k,416,50.133,49.867,64.660,35.340,110.93,0.928,bicubic,-45.987,-34.730,-78\ntresnet_v2_l.miil_in21k_ft_in1k,224,50.120,49.880,65.180,34.820,46.17,0.875,bilinear,-45.720,-33.920,+26\ntf_efficientnet_b3.ap_in1k,300,50.110,49.890,65.313,34.687,12.23,0.904,bicubic,-44.890,-33.767,+297\ndeit3_medium_patch16_224.fb_in1k,224,50.100,49.900,64.687,35.313,38.85,0.900,bicubic,-45.290,-34.483,+173\ndeit_base_distilled_patch16_224.fb_in1k,224,50.070,49.930,66.257,33.743,87.34,0.900,bicubic,-45.700,-33.133,+55\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,50.057,49.943,64.943,35.057,38.88,0.950,bicubic,-45.733,-34.637,+45\npvt_v2_b4.in1k,224,50.050,49.950,65.100,34.900,62.56,0.900,bicubic,-45.900,-34.270,-11\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,50.017,49.983,64.617,35.383,60.40,0.950,bicubic,-45.653,-34.783,+85\nflexivit_base.300ep_in1k,240,49.993,50.007,64.143,35.857,86.59,0.950,bicubic,-45.957,-35.217,-14\ncoat_lite_medium.in1k,224,49.987,50.013,64.807,35.193,44.57,0.900,bicubic,-46.013,-34.543,-33\nnextvit_base.bd_in1k_384,384,49.963,50.037,65.600,34.400,44.82,1.000,bicubic,-46.377,-33.980,-156\nseresnext101d_32x8d.ah_in1k,224,49.963,50.037,63.560,36.440,93.59,0.950,bicubic,-46.137,-35.910,-79\nefficientformer_l7.snap_dist_in1k,224,49.850,50.150,66.030,33.970,82.23,0.950,bicubic,-45.770,-33.420,+94\nresnet152d.ra2_in1k,256,49.843,50.157,64.857,35.143,60.21,0.950,bicubic,-45.577,-34.203,+147\nconvnext_nano.r384_in12k_ft_in1k,384,49.840,50.160,66.537,33.463,15.59,1.000,bicubic,-45.850,-32.733,+71\nmambaout_base.in1k,224,49.833,50.167,62.943,37.057,84.81,1.000,bicubic,-46.007,-36.457,+14\nresnest200e.in1k,320,49.807,50.193,64.763,35.237,70.20,0.909,bicubic,-46.303,-34.717,-89\nvolo_d2_224.sail_in1k,224,49.773,50.227,64.560,35.440,58.68,0.960,bicubic,-46.647,-34.960,-184\nmobilenetv4_conv_large.e600_r384_in1k,448,49.767,50.233,64.667,35.333,32.59,1.000,bicubic,-45.913,-34.763,+70\nseresnextaa101d_32x8d.ah_in1k,288,49.763,50.237,64.487,35.513,93.59,1.000,bicubic,-46.657,-35.033,-187\nresnet200d.ra2_in1k,256,49.757,50.243,64.910,35.090,64.69,0.950,bicubic,-45.893,-34.400,+79\nxception65.ra3_in1k,299,49.740,50.260,63.470,36.530,39.92,0.940,bicubic,-45.940,-35.980,+69\nresnetaa101d.sw_in12k_ft_in1k,224,49.723,50.277,64.583,35.417,44.57,0.950,bicubic,-46.107,-34.707,+12\nmobilenetv4_conv_large.e600_r384_in1k,384,49.673,50.327,65.140,34.860,32.59,0.950,bicubic,-45.717,-34.340,+152\nswinv2_base_window16_256.ms_in1k,256,49.603,50.397,63.713,36.287,87.92,0.900,bicubic,-46.567,-35.787,-117\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,49.567,50.433,65.567,34.433,15.62,1.000,bicubic,-46.213,-33.733,+28\nnextvit_base.bd_in1k,224,49.553,50.447,64.883,35.117,44.82,0.950,bicubic,-46.147,-34.407,+55\npvt_v2_b3.in1k,224,49.553,50.447,64.833,35.167,45.24,0.900,bicubic,-45.927,-34.297,+115\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,49.553,50.447,63.720,36.280,29.15,0.950,bicubic,-46.497,-35.670,-77\nefficientformerv2_l.snap_dist_in1k,224,49.550,50.450,65.107,34.893,26.32,0.950,bicubic,-46.190,-34.323,+42\nconvnext_small.fb_in1k,224,49.540,50.460,64.930,35.070,50.22,0.875,bicubic,-46.070,-34.350,+85\nfastvit_ma36.apple_in1k,256,49.520,50.480,63.460,36.540,44.07,0.950,bicubic,-46.490,-35.960,-60\ncait_xs24_384.fb_dist_in1k,384,49.507,50.493,64.837,35.163,26.67,1.000,bicubic,-46.503,-34.513,-62\ntf_efficientnet_b5.ra_in1k,456,49.487,50.513,65.597,34.403,30.39,0.934,bicubic,-46.493,-33.793,-50\nresnet200d.ra2_in1k,320,49.440,50.560,64.300,35.700,64.69,1.000,bicubic,-46.680,-35.220,-111\nxcit_small_12_p16_224.fb_dist_in1k,224,49.413,50.587,63.910,36.090,26.25,1.000,bicubic,-46.347,-35.220,+28\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,49.390,50.610,64.317,35.683,32.59,0.950,bicubic,-46.310,-35.013,+47\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,49.380,50.620,64.450,35.550,88.22,0.900,bicubic,-45.000,-34.580,+440\nresnest101e.in1k,256,49.373,50.627,65.513,34.487,48.28,0.875,bilinear,-46.237,-33.747,+74\nhiera_small_224.mae_in1k_ft_in1k,224,49.373,50.627,64.437,35.563,35.01,0.900,bicubic,-46.457,-34.863,-2\nrdnet_tiny.nv_in1k,224,49.323,50.677,63.570,36.430,23.86,0.900,bicubic,-46.007,-35.720,+151\nregnetz_d8.ra3_in1k,256,49.320,50.680,65.430,34.570,23.37,0.940,bicubic,-46.510,-33.760,-10\ndm_nfnet_f2.dm_in1k,352,49.300,50.700,63.923,36.077,193.78,0.920,bicubic,-47.220,-35.587,-244\nresnet152d.ra2_in1k,320,49.273,50.727,64.433,35.567,60.21,1.000,bicubic,-46.577,-34.757,-21\ntiny_vit_21m_224.in1k,224,49.247,50.753,64.283,35.717,21.20,0.950,bicubic,-46.443,-35.117,+43\nseresnet152d.ra2_in1k,320,49.217,50.783,64.273,35.727,66.84,1.000,bicubic,-47.113,-35.237,-182\ndm_nfnet_f1.dm_in1k,224,49.183,50.817,63.513,36.487,132.63,0.910,bicubic,-46.427,-35.957,+65\nregnetz_040.ra3_in1k,320,49.180,50.820,64.040,35.960,27.12,1.000,bicubic,-47.020,-35.470,-147\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,49.170,50.830,65.447,34.553,236.34,0.875,bicubic,-46.570,-33.723,+22\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,49.157,50.843,65.490,34.510,88.79,0.875,bilinear,-46.183,-33.640,+138\nxcit_large_24_p8_224.fb_in1k,224,49.143,50.857,62.717,37.283,188.93,1.000,bicubic,-46.947,-36.433,-114\nresnetrs270.tf_in1k,256,49.140,50.860,63.827,36.173,129.86,1.000,bicubic,-46.690,-35.473,-21\nconvnext_small.fb_in1k,288,49.043,50.957,64.770,35.230,50.22,1.000,bicubic,-46.947,-34.660,-73\nresmlp_big_24_224.fb_distilled_in1k,224,49.033,50.967,65.467,34.533,129.14,0.875,bicubic,-46.837,-33.973,-36\nrepvgg_b3.rvgg_in1k,224,49.013,50.987,64.887,35.113,123.09,0.875,bilinear,-45.597,-34.033,+354\ngcvit_base.in1k,224,49.000,51.000,63.963,36.037,90.32,0.875,bicubic,-47.050,-35.387,-104\nresnetaa101d.sw_in12k_ft_in1k,288,48.997,51.003,64.107,35.893,44.57,1.000,bicubic,-47.413,-35.353,-222\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,48.997,51.003,63.100,36.900,66.01,0.950,bicubic,-47.233,-36.390,-165\nvolo_d1_224.sail_in1k,224,48.983,51.017,63.170,36.830,26.63,0.960,bicubic,-47.047,-36.390,-93\nresnext101_64x4d.tv_in1k,224,48.970,51.030,63.533,36.467,83.46,0.875,bilinear,-46.870,-35.637,-33\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,48.953,51.047,64.033,35.967,38.88,0.950,bicubic,-46.947,-35.327,-56\nefficientvit_b3.r224_in1k,224,48.937,51.063,62.963,37.037,48.65,0.950,bicubic,-46.603,-36.227,+73\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,48.880,51.120,63.520,36.480,32.59,1.000,bicubic,-47.150,-35.820,-101\nmambaout_small.in1k,288,48.863,51.137,63.293,36.707,48.49,1.000,bicubic,-47.357,-36.237,-167\nresnetrs420.tf_in1k,416,48.813,51.187,63.483,36.517,191.89,1.000,bicubic,-47.627,-36.057,-237\nmaxvit_tiny_tf_224.in1k,224,48.803,51.197,62.953,37.047,30.92,0.950,bicubic,-47.027,-36.357,-30\nseresnext101_32x8d.ah_in1k,224,48.750,51.250,62.243,37.757,93.57,0.950,bicubic,-47.050,-37.107,-20\ncaformer_s18.sail_in1k,224,48.743,51.257,62.943,37.057,26.34,1.000,bicubic,-46.957,-36.397,+17\nregnetz_d8_evos.ch_in1k,256,48.737,51.263,64.343,35.657,23.46,0.950,bicubic,-47.063,-35.047,-26\nconvformer_s18.sail_in1k,224,48.727,51.273,62.937,37.063,26.77,1.000,bicubic,-46.603,-36.323,+123\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,48.710,51.290,66.063,33.937,6.34,0.965,bicubic,-44.910,-32.687,+609\nefficientnetv2_rw_s.ra2_in1k,384,48.683,51.317,63.877,36.123,23.94,1.000,bicubic,-47.087,-35.463,-15\nregnetz_d32.ra3_in1k,320,48.627,51.373,65.207,34.793,27.58,0.950,bicubic,-47.243,-34.243,-56\ndeit3_small_patch16_384.fb_in1k,384,48.607,51.393,62.750,37.250,22.21,1.000,bicubic,-47.053,-36.490,+22\nefficientnet_b3.ra2_in1k,320,48.593,51.407,64.340,35.660,12.23,1.000,bicubic,-46.567,-34.910,+175\nedgenext_base.usi_in1k,320,48.580,51.420,64.407,35.593,18.51,1.000,bicubic,-47.210,-34.873,-28\nmambaout_base.in1k,288,48.573,51.427,62.157,37.843,84.81,1.000,bicubic,-47.587,-37.293,-162\nswinv2_small_window16_256.ms_in1k,256,48.550,51.450,62.653,37.347,49.73,0.900,bicubic,-47.510,-36.727,-130\nseresnext101d_32x8d.ah_in1k,288,48.513,51.487,62.953,37.047,93.59,1.000,bicubic,-47.867,-36.567,-233\necaresnet101d.miil_in1k,224,48.510,51.490,64.117,35.883,44.57,0.875,bicubic,-46.690,-35.133,+160\nfocalnet_base_lrf.ms_in1k,224,48.433,51.567,62.997,37.003,88.75,0.900,bicubic,-47.407,-36.323,-52\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,48.410,51.590,63.940,36.060,36.43,0.900,bicubic,-46.730,-35.270,+174\nvit_large_patch32_384.orig_in21k_ft_in1k,384,48.357,51.643,61.910,38.090,306.63,1.000,bicubic,-46.893,-37.410,+135\nfocalnet_base_srf.ms_in1k,224,48.323,51.677,63.020,36.980,88.15,0.900,bicubic,-47.567,-36.320,-75\nswinv2_base_window8_256.ms_in1k,256,48.290,51.710,63.410,36.590,87.92,0.900,bicubic,-47.760,-35.850,-135\nrexnetr_200.sw_in12k_ft_in1k,224,48.273,51.727,65.297,34.703,16.52,0.950,bicubic,-47.017,-33.893,+118\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,48.270,51.730,64.900,35.100,9.72,0.950,bicubic,-47.090,-34.430,+97\nrepvgg_b3g4.rvgg_in1k,224,48.263,51.737,64.827,35.173,83.83,0.875,bilinear,-46.247,-34.173,+351\nconvit_base.fb_in1k,224,48.247,51.753,62.987,37.013,86.54,0.875,bicubic,-46.873,-36.153,+175\nfastvit_sa36.apple_in1k,256,48.203,51.797,62.753,37.247,31.53,0.900,bicubic,-47.417,-36.467,+19\nswin_s3_base_224.ms_in1k,224,48.163,51.837,62.290,37.710,71.13,0.900,bicubic,-47.907,-37.270,-148\nresnext101_32x8d.tv2_in1k,224,48.143,51.857,62.703,37.297,88.79,0.965,bilinear,-47.167,-36.527,+106\nnextvit_small.bd_in1k_384,384,48.130,51.870,64.117,35.883,31.76,1.000,bicubic,-48.010,-35.263,-178\nsequencer2d_l.in1k,224,48.130,51.870,62.283,37.717,54.30,0.875,bicubic,-47.730,-36.917,-72\ngcvit_small.in1k,224,48.100,51.900,62.777,37.223,51.09,0.875,bicubic,-47.820,-36.713,-95\nregnety_160.deit_in1k,224,48.057,51.943,63.593,36.407,83.59,0.950,bicubic,-47.463,-35.797,+41\ntf_efficientnetv2_b3.in21k_ft_in1k,300,48.053,51.947,64.827,35.173,14.36,0.900,bicubic,-47.547,-34.453,+22\nregnetz_d8.ra3_in1k,320,48.027,51.973,64.323,35.677,23.37,1.000,bicubic,-48.003,-35.227,-135\nfocalnet_small_lrf.ms_in1k,224,48.017,51.983,63.093,36.907,50.34,0.900,bicubic,-47.713,-36.197,-22\nresnetrs350.tf_in1k,384,47.983,52.017,62.600,37.400,163.96,1.000,bicubic,-48.277,-36.770,-218\nrepvit_m1_5.dist_450e_in1k,224,47.973,52.027,63.670,36.330,14.64,0.950,bicubic,-47.347,-35.590,+95\nfastvit_sa24.apple_dist_in1k,256,47.973,52.027,62.840,37.160,21.55,0.900,bicubic,-47.607,-36.470,+22\nefficientnet_b3.ra2_in1k,288,47.967,52.033,63.930,36.070,12.23,0.875,bicubic,-46.963,-35.240,+216\neca_nfnet_l1.ra2_in1k,256,47.967,52.033,63.090,36.910,41.41,0.900,bicubic,-47.643,-36.190,+10\nconvnextv2_tiny.fcmae_ft_in1k,224,47.927,52.073,63.297,36.703,28.64,0.875,bicubic,-47.373,-35.943,+95\ntwins_svt_large.in1k,224,47.927,52.073,62.760,37.240,99.27,0.900,bicubic,-47.783,-36.600,-24\nswiftformer_l3.dist_in1k,224,47.923,52.077,63.207,36.793,28.49,0.950,bicubic,-47.477,-35.953,+61\nresnext101_32x8d.tv2_in1k,176,47.920,52.080,62.837,37.163,88.79,0.875,bilinear,-46.990,-36.383,+217\nvit_relpos_base_patch16_224.sw_in1k,224,47.890,52.110,62.843,37.157,86.43,0.900,bicubic,-47.290,-36.437,+134\nresnetrs200.tf_in1k,256,47.890,52.110,62.100,37.900,93.21,1.000,bicubic,-47.930,-37.250,-69\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,47.873,52.127,63.920,36.080,15.62,0.875,bicubic,-47.507,-35.260,+68\nmixer_b16_224.miil_in21k_ft_in1k,224,47.847,52.153,63.343,36.657,59.88,0.875,bilinear,-47.043,-35.787,+223\ntf_efficientnet_b5.aa_in1k,456,47.843,52.157,63.953,36.047,30.39,0.934,bicubic,-48.047,-35.137,-103\nregnetv_064.ra3_in1k,224,47.793,52.207,63.407,36.593,30.58,0.950,bicubic,-47.697,-35.963,+29\nrepvgg_b2g4.rvgg_in1k,224,47.783,52.217,64.423,35.577,61.76,0.875,bilinear,-46.047,-34.297,+511\nrepvit_m1_5.dist_300e_in1k,224,47.783,52.217,63.713,36.287,14.64,0.950,bicubic,-47.357,-35.357,+139\nregnety_1280.seer_ft_in1k,384,47.767,52.233,64.133,35.867,644.81,1.000,bicubic,-48.563,-35.277,-251\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,47.760,52.240,65.667,34.333,217.32,1.000,bilinear,-48.530,-33.963,-243\nrepvgg_d2se.rvgg_in1k,320,47.740,52.260,62.797,37.203,133.33,1.000,bilinear,-48.180,-36.473,-120\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,47.740,52.260,62.380,37.620,22.52,0.950,bicubic,-47.590,-37.000,+71\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,47.737,52.263,62.410,37.590,86.43,0.900,bicubic,-47.523,-36.760,+94\nmvitv2_tiny.fb_in1k,224,47.703,52.297,62.697,37.303,24.17,0.900,bicubic,-47.697,-36.593,+48\ntf_efficientnetv2_b3.in21k_ft_in1k,240,47.683,52.317,64.117,35.883,14.36,0.900,bicubic,-47.187,-34.923,+220\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,47.627,52.373,61.787,38.213,38.76,0.900,bicubic,-47.653,-37.333,+83\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,47.617,52.383,63.327,36.673,11.07,0.950,bicubic,-48.003,-36.073,-18\neca_nfnet_l1.ra2_in1k,320,47.617,52.383,62.643,37.357,41.41,1.000,bicubic,-48.323,-36.847,-128\necaresnet101d.miil_in1k,288,47.610,52.390,63.517,36.483,44.57,0.950,bicubic,-48.030,-35.733,-25\nseresnext101_32x8d.ah_in1k,288,47.577,52.423,61.323,38.677,93.57,1.000,bicubic,-48.553,-38.037,-207\nregnetz_d8_evos.ch_in1k,320,47.557,52.443,63.757,36.243,23.46,1.000,bicubic,-48.663,-35.543,-233\npit_s_distilled_224.in1k,224,47.557,52.443,63.187,36.813,24.04,0.900,bicubic,-47.143,-35.883,+255\nmobilenetv4_conv_large.e500_r256_in1k,256,47.537,52.463,62.863,37.137,32.59,0.950,bicubic,-46.863,-36.317,+343\nfocalnet_small_srf.ms_in1k,224,47.517,52.483,62.327,37.673,49.89,0.900,bicubic,-48.113,-37.043,-26\nresnest50d_4s2x40d.in1k,224,47.487,52.513,63.863,36.137,30.42,0.875,bicubic,-47.223,-35.377,+247\ndm_nfnet_f1.dm_in1k,320,47.460,52.540,62.140,37.860,132.63,0.910,bicubic,-48.870,-37.390,-271\nefficientnet_b3_pruned.in1k,300,47.453,52.547,62.863,37.137,9.86,0.904,bicubic,-47.177,-36.167,+268\ndavit_tiny.msft_in1k,224,47.433,52.567,63.277,36.723,28.36,0.950,bicubic,-47.667,-35.813,+133\ncoatnet_rmlp_1_rw_224.sw_in1k,224,47.410,52.590,61.367,38.633,41.69,0.950,bicubic,-48.100,-37.973,+2\nvit_base_patch16_224.orig_in21k_ft_in1k,224,47.400,52.600,61.697,38.303,86.57,0.900,bicubic,-47.830,-37.683,+88\nmobilenetv4_conv_large.e500_r256_in1k,320,47.347,52.653,62.400,37.600,32.59,1.000,bicubic,-47.813,-36.940,+107\nxcit_small_24_p8_224.fb_in1k,224,47.347,52.653,61.113,38.887,47.63,1.000,bicubic,-48.613,-38.277,-148\ncrossvit_18_dagger_408.in1k,408,47.340,52.660,60.883,39.117,44.61,1.000,bicubic,-48.790,-38.497,-223\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,47.330,52.670,64.483,35.517,6.34,1.000,bicubic,-46.800,-34.517,+417\ntresnet_m.miil_in21k_ft_in1k,224,47.303,52.697,62.080,37.920,31.39,0.875,bilinear,-48.117,-37.230,+20\nresnet101d.ra2_in1k,256,47.300,52.700,62.960,37.040,44.57,0.950,bicubic,-48.130,-36.470,+15\npoolformerv2_m48.sail_in1k,224,47.297,52.703,63.887,36.113,73.35,1.000,bicubic,-47.903,-35.263,+96\nwide_resnet101_2.tv2_in1k,224,47.247,52.753,61.967,38.033,126.89,0.965,bilinear,-48.003,-37.433,+74\nefficientformer_l3.snap_dist_in1k,224,47.240,52.760,63.453,36.547,31.41,0.950,bicubic,-47.980,-35.717,+84\nrepvit_m3.dist_in1k,224,47.203,52.797,63.497,36.503,10.68,0.950,bicubic,-47.527,-35.573,+228\nconvnext_nano.in12k_ft_in1k,224,47.193,52.807,63.967,36.033,15.59,0.950,bicubic,-48.027,-35.353,+83\nregnety_032.ra_in1k,224,47.193,52.807,62.530,37.470,19.44,0.950,bicubic,-47.727,-36.650,+172\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,47.183,52.817,63.400,36.600,44.18,0.875,bilinear,-47.977,-35.790,+96\nefficientvit_b2.r256_in1k,256,47.183,52.817,61.670,38.330,24.33,1.000,bicubic,-48.107,-37.580,+54\nregnety_080.ra3_in1k,224,47.160,52.840,62.110,37.890,39.18,0.950,bicubic,-48.410,-37.150,-25\ntf_efficientnet_b6.aa_in1k,528,47.157,52.843,63.053,36.947,43.04,0.942,bicubic,-49.133,-36.487,-278\nresnetrs270.tf_in1k,352,47.140,52.860,62.047,37.953,129.86,1.000,bicubic,-48.920,-37.293,-211\nconvnext_tiny.fb_in1k,224,47.133,52.867,63.180,36.820,28.59,0.875,bicubic,-47.837,-36.030,+152\nregnety_320.tv2_in1k,224,47.080,52.920,62.060,37.940,145.05,0.965,bicubic,-48.520,-37.340,-40\nxcit_small_12_p8_224.fb_in1k,224,47.080,52.920,60.657,39.343,26.21,1.000,bicubic,-48.370,-38.683,-6\nefficientvit_b2.r288_in1k,288,47.073,52.927,61.607,38.393,24.33,1.000,bicubic,-48.517,-37.633,-38\ntf_efficientnet_b4.aa_in1k,380,47.057,52.943,62.877,37.123,19.34,0.922,bicubic,-48.533,-36.283,-40\ninception_next_tiny.sail_in1k,224,47.037,52.963,62.957,37.043,28.06,0.875,bicubic,-48.083,-36.163,+102\nvit_base_patch16_rpn_224.sw_in1k,224,47.027,52.973,62.427,37.573,86.54,0.900,bicubic,-47.803,-36.663,+191\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,47.023,52.977,63.003,36.997,11.07,1.000,bicubic,-48.657,-36.327,-73\nregnetz_c16.ra3_in1k,256,47.010,52.990,63.670,36.330,13.46,0.940,bicubic,-48.150,-35.660,+87\nswin_base_patch4_window12_384.ms_in1k,384,46.993,53.007,61.857,38.143,87.90,1.000,bicubic,-49.397,-37.563,-322\nswinv2_small_window8_256.ms_in1k,256,46.990,53.010,62.160,37.840,49.73,0.900,bicubic,-48.780,-37.210,-108\nhgnetv2_b1.ssld_stage2_ft_in1k,224,46.977,53.023,64.360,35.640,6.34,0.965,bicubic,-46.643,-34.280,+514\nmambaout_tiny.in1k,224,46.963,53.037,61.840,38.160,26.55,1.000,bicubic,-48.147,-37.450,+99\nconvnext_tiny_hnf.a2h_in1k,224,46.957,53.043,61.143,38.857,28.59,0.950,bicubic,-47.803,-38.037,+198\nconvnextv2_tiny.fcmae_ft_in1k,288,46.953,53.047,62.470,37.530,28.64,1.000,bicubic,-48.857,-36.870,-128\nrexnetr_200.sw_in12k_ft_in1k,288,46.937,53.063,63.830,36.170,16.52,1.000,bicubic,-48.383,-35.590,+24\nresnet101d.ra2_in1k,320,46.930,53.070,62.447,37.553,44.57,1.000,bicubic,-48.820,-37.023,-104\ntnt_b_patch16_224.in1k,224,46.930,53.070,62.360,37.640,65.43,0.900,bicubic,-48.340,-36.870,+39\ncoat_small.in1k,224,46.927,53.073,61.363,38.637,21.69,0.900,bicubic,-48.283,-37.927,+63\nxception65p.ra3_in1k,299,46.907,53.093,61.063,38.937,39.82,0.940,bicubic,-48.753,-38.327,-78\nxcit_large_24_p16_224.fb_in1k,224,46.890,53.110,60.757,39.243,189.10,1.000,bicubic,-48.060,-38.073,+140\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,46.883,53.117,62.580,37.420,11.07,0.950,bicubic,-48.197,-36.490,+99\nmaxvit_tiny_rw_224.sw_in1k,224,46.847,53.153,60.767,39.233,29.06,0.950,bicubic,-48.893,-38.603,-104\nresnet152.a1h_in1k,224,46.847,53.153,60.590,39.410,60.19,0.950,bicubic,-48.733,-38.670,-54\nnextvit_small.bd_in1k,224,46.833,53.167,62.813,37.187,31.76,0.950,bicubic,-48.397,-36.427,+47\npvt_v2_b2_li.in1k,224,46.800,53.200,62.380,37.620,22.55,0.900,bicubic,-48.470,-36.910,+29\ngc_efficientnetv2_rw_t.agc_in1k,224,46.787,53.213,61.320,38.680,13.68,1.000,bicubic,-47.773,-37.850,+243\nresnet152.tv2_in1k,224,46.777,53.223,61.003,38.997,60.19,0.965,bilinear,-48.303,-38.267,+96\nefficientformerv2_s2.snap_dist_in1k,224,46.753,53.247,61.823,38.177,12.71,0.950,bicubic,-48.367,-37.297,+81\nseresnext101_64x4d.gluon_in1k,224,46.677,53.323,61.363,38.637,88.23,0.875,bicubic,-47.983,-37.697,+215\nfastvit_sa24.apple_in1k,256,46.653,53.347,61.633,38.367,21.55,0.900,bicubic,-48.657,-37.677,+13\nregnety_640.seer_ft_in1k,384,46.613,53.387,63.083,36.917,281.38,1.000,bicubic,-49.417,-36.417,-223\nfbnetv3_g.ra2_in1k,240,46.583,53.417,62.600,37.400,16.62,0.950,bilinear,-48.017,-36.500,+222\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,46.577,53.423,63.923,36.077,28.29,0.900,bicubic,-48.263,-35.357,+162\nfasternet_s.in1k,224,46.563,53.437,62.600,37.400,31.18,1.000,bicubic,-48.027,-36.420,+225\ntwins_pcpvt_large.in1k,224,46.553,53.447,62.203,37.797,60.99,0.900,bicubic,-49.187,-37.277,-121\nconvnext_tiny.fb_in1k,288,46.530,53.470,63.247,36.753,28.59,1.000,bicubic,-48.680,-36.013,+44\nregnetv_064.ra3_in1k,288,46.527,53.473,62.290,37.710,30.58,1.000,bicubic,-49.263,-36.970,-144\nresnet152.a1h_in1k,288,46.520,53.480,60.517,39.483,60.19,1.000,bicubic,-49.260,-38.923,-141\nswin_base_patch4_window7_224.ms_in1k,224,46.503,53.497,61.467,38.533,87.77,0.900,bicubic,-49.397,-37.893,-189\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,46.460,53.540,62.877,37.123,15.62,1.000,bicubic,-48.930,-36.463,-21\nefficientnetv2_rw_t.ra2_in1k,224,46.447,53.553,61.403,38.597,13.65,1.000,bicubic,-48.213,-37.767,+201\nxcit_medium_24_p8_224.fb_in1k,224,46.443,53.557,59.573,40.427,84.32,1.000,bicubic,-49.447,-39.787,-186\ncoatnet_1_rw_224.sw_in1k,224,46.420,53.580,60.103,39.897,41.72,0.950,bicubic,-49.200,-39.217,-89\ngcvit_tiny.in1k,224,46.397,53.603,61.663,38.337,28.22,0.875,bicubic,-49.303,-37.717,-117\nswin_s3_small_224.ms_in1k,224,46.383,53.617,60.840,39.160,49.74,0.900,bicubic,-49.467,-38.570,-176\ncrossvit_15_dagger_408.in1k,408,46.380,53.620,60.527,39.473,28.50,1.000,bicubic,-49.450,-38.723,-169\nfbnetv3_g.ra2_in1k,288,46.377,53.623,62.477,37.523,16.62,0.950,bilinear,-48.753,-36.723,+58\nresnetrs200.tf_in1k,320,46.377,53.623,60.963,39.037,93.21,1.000,bicubic,-49.943,-38.567,-336\nese_vovnet57b.ra4_e3600_r256_in1k,320,46.353,53.647,60.487,39.513,38.61,1.000,bicubic,-49.277,-38.963,-101\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,46.310,53.690,63.407,36.593,6.00,0.965,bicubic,-46.360,-35.273,+643\nhgnetv2_b0.ssld_stage2_ft_in1k,224,46.310,53.690,63.273,36.727,6.00,0.965,bicubic,-46.580,-35.177,+612\ntresnet_xl.miil_in1k,224,46.307,53.693,61.890,38.110,78.44,0.875,bilinear,-48.773,-37.360,+68\ncs3se_edgenet_x.c2ns_in1k,256,46.307,53.693,61.277,38.723,50.72,0.950,bicubic,-49.293,-38.033,-90\nese_vovnet57b.ra4_e3600_r256_in1k,256,46.303,53.697,60.890,39.110,38.61,0.950,bicubic,-48.967,-38.500,+1\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,46.297,53.703,60.703,39.297,86.63,1.000,bicubic,-49.463,-38.707,-146\nxcit_tiny_24_p8_224.fb_dist_in1k,224,46.280,53.720,60.613,39.387,12.11,1.000,bicubic,-49.200,-38.697,-67\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,46.260,53.740,60.567,39.433,86.46,1.000,bicubic,-49.610,-38.593,-193\nsequencer2d_m.in1k,224,46.247,53.753,60.817,39.183,38.31,0.875,bicubic,-49.363,-38.483,-99\nrexnet_300.nav_in1k,224,46.243,53.757,62.440,37.560,34.71,0.875,bicubic,-49.307,-36.850,-81\nxcit_tiny_24_p8_384.fb_dist_in1k,384,46.243,53.757,60.640,39.360,12.11,1.000,bicubic,-50.007,-38.800,-329\ndm_nfnet_f0.dm_in1k,192,46.213,53.787,61.497,38.503,71.49,0.900,bicubic,-48.457,-37.633,+179\nresnetrs152.tf_in1k,256,46.210,53.790,60.763,39.237,86.62,1.000,bicubic,-49.160,-38.477,-36\nresnest50d_1s4x24d.in1k,224,46.203,53.797,62.513,37.487,25.68,0.875,bicubic,-48.177,-36.547,+260\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,46.200,53.800,61.833,38.167,11.07,0.950,bicubic,-48.000,-37.187,+318\nrepvit_m1_1.dist_450e_in1k,224,46.187,53.813,63.347,36.653,8.80,0.950,bicubic,-48.363,-35.643,+210\ndeit_small_distilled_patch16_224.fb_in1k,224,46.150,53.850,62.450,37.550,22.44,0.900,bicubic,-48.430,-36.640,+196\nnfnet_l0.ra2_in1k,224,46.147,53.853,62.783,37.217,35.07,0.900,bicubic,-48.753,-36.337,+105\ngernet_m.idstcv_in1k,224,46.140,53.860,62.727,37.273,21.14,0.875,bilinear,-48.410,-36.163,+210\nregnety_160.deit_in1k,288,46.123,53.877,61.803,38.197,83.59,1.000,bicubic,-49.777,-37.757,-222\ncrossvit_base_240.in1k,240,46.087,53.913,60.220,39.780,105.03,0.875,bicubic,-48.963,-38.920,+63\ntf_efficientnet_b0.ns_jft_in1k,224,46.067,53.933,63.283,36.717,5.29,0.875,bicubic,-47.713,-35.667,+413\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,46.067,53.933,60.220,39.780,86.63,1.000,bicubic,-49.583,-38.970,-127\npoolformerv2_m36.sail_in1k,224,46.050,53.950,62.200,37.800,56.08,1.000,bicubic,-48.990,-36.760,+61\npoolformerv2_s36.sail_in1k,224,46.033,53.967,62.307,37.693,30.79,1.000,bicubic,-48.677,-36.423,+148\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,46.033,53.967,61.847,38.153,22.05,0.900,bicubic,-48.877,-37.283,+93\nresnet51q.ra2_in1k,288,46.030,53.970,60.920,39.080,35.70,1.000,bilinear,-49.180,-38.400,+6\nswinv2_cr_small_ns_224.sw_in1k,224,46.027,53.973,60.603,39.397,49.70,0.900,bicubic,-49.693,-38.727,-154\nnest_base_jx.goog_in1k,224,45.960,54.040,60.107,39.893,67.72,0.875,bicubic,-49.610,-39.153,-108\ndeit3_small_patch16_224.fb_in1k,224,45.960,54.040,58.953,41.047,22.06,0.900,bicubic,-48.750,-40.287,+148\ncrossvit_18_240.in1k,240,45.957,54.043,60.350,39.650,43.27,0.875,bicubic,-49.103,-38.750,+49\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,45.937,54.063,61.580,38.420,11.07,1.000,bicubic,-48.923,-37.540,+108\nvit_relpos_medium_patch16_224.sw_in1k,224,45.937,54.063,61.080,38.920,38.75,0.900,bicubic,-49.253,-38.150,+4\nregnetz_c16.ra3_in1k,320,45.933,54.067,62.743,37.257,13.46,1.000,bicubic,-49.457,-36.577,-66\nregnety_080.ra3_in1k,288,45.933,54.067,60.837,39.163,39.18,1.000,bicubic,-49.937,-38.603,-224\ntwins_pcpvt_base.in1k,224,45.900,54.100,61.373,38.627,43.83,0.900,bicubic,-49.590,-37.957,-99\nresnest50d.in1k,224,45.890,54.110,62.677,37.323,27.48,0.875,bilinear,-48.740,-36.353,+163\ntwins_svt_base.in1k,224,45.887,54.113,60.903,39.097,56.07,0.900,bicubic,-49.673,-38.537,-112\nresnext101_64x4d.c1_in1k,224,45.887,54.113,59.310,40.690,83.46,0.950,bicubic,-49.343,-39.910,-14\nregnety_032.ra_in1k,288,45.883,54.117,61.537,38.463,19.44,1.000,bicubic,-49.587,-37.813,-96\nregnetz_c16_evos.ch_in1k,256,45.867,54.133,62.960,37.040,13.49,0.950,bicubic,-49.483,-36.330,-61\nconvnext_nano.in12k_ft_in1k,288,45.867,54.133,62.610,37.390,15.59,1.000,bicubic,-49.523,-36.640,-75\nconvnext_tiny_hnf.a2h_in1k,288,45.840,54.160,60.143,39.857,28.59,1.000,bicubic,-49.430,-38.977,-35\ncrossvit_18_dagger_240.in1k,240,45.823,54.177,59.967,40.033,44.27,0.875,bicubic,-49.357,-39.163,-4\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,45.813,54.187,61.603,38.397,11.07,1.000,bicubic,-49.507,-37.687,-56\nhiera_tiny_224.mae_in1k_ft_in1k,224,45.780,54.220,61.017,38.983,27.91,0.900,bicubic,-49.470,-38.183,-30\nlevit_conv_384.fb_dist_in1k,224,45.777,54.223,61.717,38.283,39.13,0.900,bicubic,-49.443,-37.453,-18\nlevit_384.fb_dist_in1k,224,45.777,54.223,61.713,38.287,39.13,0.900,bicubic,-49.443,-37.507,-18\ngc_efficientnetv2_rw_t.agc_in1k,288,45.730,54.270,60.300,39.700,13.68,1.000,bicubic,-49.570,-38.920,-52\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,45.720,54.280,60.940,39.060,38.73,0.900,bicubic,-49.370,-38.280,+19\nxcit_tiny_24_p16_384.fb_dist_in1k,384,45.700,54.300,60.527,39.473,12.12,1.000,bicubic,-49.770,-38.793,-108\nhgnetv2_b1.ssld_stage2_ft_in1k,288,45.690,54.310,62.707,37.293,6.34,1.000,bicubic,-48.490,-36.263,+284\nconvmixer_1536_20.in1k,224,45.683,54.317,61.690,38.310,51.63,0.960,bicubic,-49.277,-37.480,+52\ndm_nfnet_f0.dm_in1k,256,45.680,54.320,61.333,38.667,71.49,0.900,bicubic,-50.050,-38.037,-185\nvit_srelpos_medium_patch16_224.sw_in1k,224,45.680,54.320,61.060,38.940,38.74,0.900,bicubic,-49.270,-38.140,+56\nregnetx_320.tv2_in1k,224,45.640,54.360,60.220,39.780,107.81,0.965,bicubic,-49.680,-39.210,-65\ncrossvit_15_dagger_240.in1k,240,45.613,54.387,60.060,39.940,28.21,0.875,bicubic,-49.347,-39.100,+51\nflexivit_small.1200ep_in1k,240,45.607,54.393,59.917,40.083,22.06,0.950,bicubic,-49.653,-39.273,-47\nefficientnetv2_rw_t.ra2_in1k,288,45.600,54.400,60.173,39.827,13.65,1.000,bicubic,-49.490,-39.147,+9\nxcit_small_12_p16_224.fb_in1k,224,45.597,54.403,59.453,40.547,26.25,1.000,bicubic,-49.273,-39.567,+77\nxcit_medium_24_p16_224.fb_in1k,224,45.590,54.410,59.000,41.000,84.40,1.000,bicubic,-49.550,-39.930,-6\nxcit_small_24_p16_224.fb_in1k,224,45.577,54.423,58.967,41.033,47.67,1.000,bicubic,-49.503,-40.223,+13\nseresnext101_32x4d.gluon_in1k,224,45.547,54.453,61.107,38.893,48.96,0.875,bicubic,-48.953,-37.983,+175\nrepvit_m1_1.dist_300e_in1k,224,45.540,54.460,62.747,37.253,8.80,0.950,bicubic,-48.650,-36.363,+271\nmambaout_tiny.in1k,288,45.530,54.470,60.697,39.303,26.55,1.000,bicubic,-50.110,-38.663,-167\nwide_resnet101_2.tv2_in1k,176,45.530,54.470,59.580,40.420,126.89,0.875,bilinear,-48.620,-39.250,+281\ntf_efficientnet_b7.aa_in1k,600,45.460,54.540,61.763,38.237,66.35,0.949,bicubic,-50.610,-37.587,-331\nresnet152d.gluon_in1k,224,45.443,54.557,60.067,39.933,60.21,0.875,bicubic,-49.007,-38.943,+186\nflexivit_small.600ep_in1k,240,45.433,54.567,59.627,40.373,22.06,0.950,bicubic,-49.857,-39.483,-69\ncs3se_edgenet_x.c2ns_in1k,320,45.430,54.570,60.430,39.570,50.72,1.000,bicubic,-50.620,-39.020,-326\npvt_v2_b2.in1k,224,45.403,54.597,60.687,39.313,25.36,0.900,bicubic,-49.587,-38.503,+28\ntf_efficientnetv2_s.in1k,300,45.390,54.610,60.243,39.757,21.46,1.000,bicubic,-50.010,-39.157,-108\nregnety_320.seer_ft_in1k,384,45.387,54.613,62.230,37.770,145.05,1.000,bicubic,-50.413,-37.170,-234\nresnext101_64x4d.c1_in1k,288,45.383,54.617,59.047,40.953,83.46,1.000,bicubic,-50.187,-40.253,-152\nnfnet_l0.ra2_in1k,288,45.380,54.620,62.087,37.913,35.07,1.000,bicubic,-50.050,-37.173,-125\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,45.380,54.620,61.710,38.290,9.72,1.000,bicubic,-48.750,-37.080,+278\nswiftformer_l1.dist_in1k,224,45.377,54.623,61.503,38.497,12.06,0.950,bicubic,-49.063,-37.537,+180\nconvnextv2_nano.fcmae_ft_in1k,224,45.377,54.623,61.180,38.820,15.62,0.875,bicubic,-49.533,-37.820,+44\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,45.370,54.630,61.997,38.003,25.03,0.875,bilinear,-49.340,-37.143,+95\nresnetv2_50x1_bit.goog_distilled_in1k,224,45.357,54.643,62.270,37.730,25.55,0.875,bicubic,-50.033,-37.150,-113\nresnet51q.ra2_in1k,256,45.303,54.697,60.573,39.427,35.70,0.875,bilinear,-49.727,-38.577,+7\nnest_small_jx.goog_in1k,224,45.303,54.697,58.993,41.007,38.35,0.875,bicubic,-50.257,-40.237,-154\nresnet101.tv2_in1k,224,45.300,54.700,60.043,39.957,44.55,0.965,bilinear,-49.590,-38.957,+48\ncs3edgenet_x.c2_in1k,288,45.280,54.720,60.243,39.757,47.82,1.000,bicubic,-50.200,-39.087,-143\nresnet101.a1h_in1k,224,45.263,54.737,59.807,40.193,44.55,0.950,bicubic,-49.727,-39.413,+15\nresnet61q.ra2_in1k,288,45.257,54.743,59.340,40.660,36.85,1.000,bicubic,-49.883,-39.880,-33\nconvit_small.fb_in1k,224,45.250,54.750,60.480,39.520,27.78,0.875,bicubic,-49.660,-38.790,+34\nmobilenetv4_conv_medium.e500_r256_in1k,256,45.233,54.767,61.863,38.137,9.72,0.950,bicubic,-48.617,-37.007,+327\nefficientvit_b2.r224_in1k,224,45.230,54.770,59.187,40.813,24.33,0.950,bicubic,-49.660,-39.893,+43\nnasnetalarge.tf_in1k,331,45.193,54.807,57.963,42.037,88.75,0.911,bicubic,-49.987,-41.157,-51\ntresnet_xl.miil_in1k_448,448,45.177,54.823,61.437,38.563,78.44,0.875,bilinear,-50.373,-37.893,-163\nfocalnet_tiny_lrf.ms_in1k,224,45.163,54.837,61.253,38.747,28.65,0.900,bicubic,-50.057,-38.007,-64\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,45.150,54.850,61.673,38.327,9.72,0.950,bicubic,-48.710,-37.187,+318\nrexnet_200.nav_in1k,224,45.143,54.857,62.333,37.667,16.37,0.875,bicubic,-49.537,-36.757,+95\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,45.133,54.867,62.313,37.687,6.00,1.000,bicubic,-48.477,-36.627,+383\nswin_small_patch4_window7_224.ms_in1k,224,45.113,54.887,60.280,39.720,49.61,0.900,bicubic,-50.617,-38.930,-226\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,45.113,54.887,57.360,42.640,22.06,0.900,bicubic,-49.067,-41.580,+244\nmobilenetv4_conv_medium.e500_r224_in1k,256,45.090,54.910,61.497,38.503,9.72,1.000,bicubic,-49.000,-37.593,+269\ntf_efficientnet_b3.aa_in1k,300,45.080,54.920,60.707,39.293,12.23,0.904,bicubic,-49.840,-38.383,+19\nmobilenetv4_conv_medium.e500_r256_in1k,320,45.070,54.930,61.477,38.523,9.72,1.000,bicubic,-49.290,-37.613,+180\nsequencer2d_s.in1k,224,45.030,54.970,60.033,39.967,27.65,0.875,bicubic,-50.400,-39.287,-148\nresnetrs152.tf_in1k,320,44.980,55.020,59.670,40.330,86.62,1.000,bicubic,-50.980,-39.510,-316\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,44.957,55.043,59.587,40.413,16.78,0.950,bicubic,-50.403,-39.753,-124\nresnet101.a1h_in1k,288,44.957,55.043,59.337,40.663,44.55,1.000,bicubic,-50.623,-39.923,-184\nregnetv_040.ra3_in1k,224,44.943,55.057,59.957,40.043,20.64,0.950,bicubic,-50.407,-39.293,-123\nhgnetv2_b0.ssld_stage2_ft_in1k,288,44.933,55.067,62.330,37.670,6.00,1.000,bicubic,-48.907,-36.400,+311\ndeit_base_patch16_224.fb_in1k,224,44.913,55.087,59.183,40.817,86.57,0.900,bicubic,-50.127,-39.757,-21\necaresnetlight.miil_in1k,224,44.910,55.090,60.777,39.223,30.16,0.875,bicubic,-49.240,-38.173,+237\nflexivit_small.300ep_in1k,240,44.907,55.093,59.457,40.543,22.06,0.950,bicubic,-50.243,-39.683,-61\nmobilenetv4_conv_medium.e500_r224_in1k,224,44.900,55.100,61.387,38.613,9.72,0.950,bicubic,-48.770,-37.533,+355\nresnetv2_101.a1h_in1k,224,44.837,55.163,58.867,41.133,44.54,0.950,bicubic,-50.103,-40.283,+1\ntf_efficientnet_b2.ap_in1k,260,44.813,55.187,60.767,39.233,9.11,0.890,bicubic,-49.477,-38.323,+192\ncoatnet_bn_0_rw_224.sw_in1k,224,44.807,55.193,60.903,39.097,27.44,0.950,bicubic,-50.173,-38.247,-12\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,44.803,55.197,60.563,39.437,11.07,0.950,bicubic,-50.017,-38.617,+36\nresmlp_24_224.fb_distilled_in1k,224,44.797,55.203,61.507,38.493,30.02,0.875,bicubic,-49.553,-37.403,+169\nfocalnet_tiny_srf.ms_in1k,224,44.787,55.213,60.997,39.003,28.43,0.900,bicubic,-50.233,-38.283,-24\nrepvit_m2.dist_in1k,224,44.777,55.223,61.873,38.127,8.80,0.950,bicubic,-49.683,-37.197,+132\nedgenext_small.usi_in1k,256,44.757,55.243,60.847,39.153,5.59,0.950,bicubic,-49.643,-38.073,+150\ncait_xxs36_384.fb_dist_in1k,384,44.757,55.243,59.427,40.573,17.37,1.000,bicubic,-50.493,-39.663,-105\ntiny_vit_11m_224.in1k,224,44.747,55.253,60.180,39.820,11.00,0.950,bicubic,-50.483,-39.030,-96\nresmlp_36_224.fb_distilled_in1k,224,44.730,55.270,61.157,38.843,44.69,0.875,bicubic,-49.840,-38.023,+95\ngernet_l.idstcv_in1k,256,44.717,55.283,58.993,41.007,31.08,0.875,bilinear,-50.213,-40.207,-8\nregnety_032.tv2_in1k,224,44.697,55.303,60.990,39.010,19.44,0.965,bicubic,-50.193,-38.090,+3\nxcit_tiny_24_p16_224.fb_dist_in1k,224,44.693,55.307,59.423,40.577,12.12,1.000,bicubic,-49.547,-39.527,+197\ndeit_base_patch16_384.fb_in1k,384,44.657,55.343,59.573,40.427,86.86,1.000,bicubic,-51.003,-39.877,-232\nresnet61q.ra2_in1k,256,44.627,55.373,58.910,41.090,36.85,0.900,bicubic,-50.413,-40.240,-39\necaresnetlight.miil_in1k,288,44.593,55.407,60.423,39.577,30.16,0.950,bicubic,-49.957,-38.767,+98\nregnety_064.ra3_in1k,224,44.570,55.430,58.493,41.507,30.58,0.950,bicubic,-51.020,-40.827,-211\nvit_relpos_small_patch16_224.sw_in1k,224,44.540,55.460,60.200,39.800,21.98,0.900,bicubic,-50.140,-38.900,+59\nswinv2_tiny_window16_256.ms_in1k,256,44.503,55.497,59.467,40.533,28.35,0.900,bicubic,-50.867,-39.833,-154\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,44.493,55.507,61.330,38.670,14.62,0.950,bilinear,-49.707,-37.510,+199\nresnetv2_101.a1h_in1k,288,44.490,55.510,58.610,41.390,44.54,1.000,bicubic,-51.140,-40.690,-233\nvit_small_patch16_rope_224.naver_in1k,224,44.490,55.510,56.983,43.017,21.98,0.900,bicubic,-49.940,-41.387,+130\ninception_resnet_v2.tf_ens_adv_in1k,299,44.487,55.513,58.170,41.830,55.84,0.897,bicubic,-49.643,-40.680,+223\ngmlp_s16_224.ra3_in1k,224,44.480,55.520,58.657,41.343,19.42,0.875,bicubic,-49.060,-40.123,+360\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,44.470,55.530,60.903,39.097,5.39,0.950,bicubic,-50.160,-38.237,+61\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,44.450,55.550,60.270,39.730,11.07,1.000,bicubic,-50.780,-38.940,-119\nresnetrs101.tf_in1k,192,44.450,55.550,59.387,40.613,63.62,0.940,bicubic,-49.950,-39.733,+133\ncs3edgenet_x.c2_in1k,256,44.437,55.563,59.710,40.290,47.82,0.887,bicubic,-50.703,-39.530,-90\nrepvit_m1_0.dist_300e_in1k,224,44.417,55.583,61.430,38.570,7.30,0.950,bicubic,-49.353,-37.430,+297\nregnety_160.tv2_in1k,224,44.387,55.613,59.147,40.853,83.59,0.965,bicubic,-50.773,-40.063,-97\nrepvit_m1_0.dist_450e_in1k,224,44.353,55.647,61.450,38.550,7.30,0.950,bicubic,-49.927,-37.580,+164\ntresnet_l.miil_in1k,224,44.353,55.647,59.950,40.050,55.99,0.875,bilinear,-50.507,-39.300,+1\nvit_small_patch16_rope_mixed_224.naver_in1k,224,44.347,55.653,56.393,43.607,21.99,0.900,bicubic,-50.103,-42.287,+112\nmaxxvitv2_nano_rw_256.sw_in1k,256,44.337,55.663,58.723,41.277,23.70,0.950,bicubic,-51.103,-40.487,-196\nresnext101_32x4d.gluon_in1k,224,44.330,55.670,59.110,40.890,44.18,0.875,bicubic,-49.790,-39.810,+215\ngcvit_xtiny.in1k,224,44.247,55.753,60.023,39.977,19.98,0.875,bicubic,-50.803,-38.957,-65\neca_nfnet_l0.ra2_in1k,224,44.217,55.783,60.807,39.193,24.14,0.900,bicubic,-50.743,-38.333,-41\npoolformer_m48.sail_in1k,224,44.190,55.810,59.163,40.837,73.47,0.950,bicubic,-50.900,-39.947,-79\nvit_small_patch16_rope_ape_224.naver_in1k,224,44.163,55.837,56.490,43.510,22.06,0.900,bicubic,-50.167,-42.650,+141\nwide_resnet50_2.racm_in1k,224,44.157,55.843,59.667,40.333,68.88,0.875,bicubic,-50.503,-39.443,+43\nregnety_080_tv.tv2_in1k,224,44.147,55.853,58.823,41.177,39.38,0.965,bicubic,-51.163,-40.527,-157\nvit_srelpos_small_patch16_224.sw_in1k,224,44.133,55.867,59.680,40.320,21.97,0.900,bicubic,-50.447,-39.460,+59\nregnetz_c16_evos.ch_in1k,320,44.130,55.870,61.003,38.997,13.49,0.950,bicubic,-51.530,-38.417,-264\ncrossvit_15_240.in1k,240,44.127,55.873,59.153,40.847,27.53,0.875,bicubic,-50.563,-39.927,+30\nresnet152s.gluon_in1k,224,44.117,55.883,58.760,41.240,60.32,0.875,bicubic,-50.633,-40.190,+7\nfastvit_sa12.apple_dist_in1k,256,44.110,55.890,59.067,40.933,11.58,0.900,bicubic,-50.590,-40.063,+25\nhgnet_small.paddle_in1k,224,44.107,55.893,60.517,39.483,24.36,0.965,bicubic,-50.753,-38.533,-17\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,44.100,55.900,62.050,37.950,44.54,1.000,bilinear,-51.230,-37.100,-174\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,44.100,55.900,58.157,41.843,15.50,0.950,bicubic,-51.330,-40.923,-205\nseresnext50_32x4d.racm_in1k,224,44.097,55.903,59.373,40.627,27.56,0.875,bicubic,-50.783,-39.757,-27\npit_b_224.in1k,224,44.083,55.917,58.000,42.000,73.76,0.900,bicubic,-50.737,-40.820,-11\nconvnextv2_nano.fcmae_ft_in1k,288,44.077,55.923,60.177,39.823,15.62,1.000,bicubic,-51.083,-39.073,-120\ninception_resnet_v2.tf_in1k,299,44.067,55.933,57.987,42.013,55.84,0.897,bicubic,-50.263,-40.833,+126\nwide_resnet50_2.tv2_in1k,176,44.017,55.983,59.673,40.327,68.88,0.875,bilinear,-50.003,-39.437,+217\npnasnet5large.tf_in1k,331,43.983,56.017,56.817,43.183,86.06,0.911,bicubic,-51.387,-42.313,-188\necaresnet50t.ra2_in1k,256,43.970,56.030,60.030,39.970,25.57,0.875,bicubic,-50.760,-39.050,+3\npoolformer_m36.sail_in1k,224,43.933,56.067,59.110,40.890,56.17,0.950,bicubic,-51.127,-40.050,-87\nresnetaa50d.sw_in12k_ft_in1k,224,43.920,56.080,59.530,40.470,25.58,0.950,bicubic,-51.030,-39.730,-60\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,43.903,56.097,61.923,38.077,25.56,0.875,bilinear,-50.427,-36.587,+118\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,43.903,56.097,59.383,40.617,13.42,0.950,bicubic,-50.557,-39.747,+76\npit_s_224.in1k,224,43.893,56.107,58.680,41.320,23.46,0.900,bicubic,-50.707,-40.250,+34\nresnext101_64x4d.gluon_in1k,224,43.890,56.110,58.713,41.287,83.46,0.875,bicubic,-50.460,-40.377,+113\ntf_efficientnetv2_s.in1k,384,43.873,56.127,58.727,41.273,21.46,1.000,bicubic,-51.887,-40.603,-317\ncoatnext_nano_rw_224.sw_in1k,224,43.847,56.153,58.583,41.417,14.70,0.900,bicubic,-51.023,-40.607,-40\nregnety_040.ra3_in1k,224,43.843,56.157,57.870,42.130,20.65,0.950,bicubic,-51.147,-41.270,-79\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,43.840,56.160,58.800,41.200,15.25,0.950,bicubic,-50.550,-40.240,+94\nwide_resnet50_2.tv2_in1k,224,43.820,56.180,59.657,40.343,68.88,0.965,bilinear,-50.980,-39.603,-26\ncoat_lite_small.in1k,224,43.803,56.197,57.020,42.980,19.84,0.900,bicubic,-51.307,-42.000,-116\ncait_xxs36_224.fb_dist_in1k,224,43.800,56.200,58.800,41.200,17.30,1.000,bicubic,-50.140,-40.090,+216\necaresnet50d.miil_in1k,224,43.780,56.220,60.343,39.657,25.58,0.875,bicubic,-50.450,-38.647,+141\necaresnet101d_pruned.miil_in1k,224,43.780,56.220,59.660,40.340,24.88,0.875,bicubic,-50.670,-39.440,+70\nconvnext_nano.d1h_in1k,224,43.770,56.230,58.773,41.227,15.59,0.950,bicubic,-50.540,-40.407,+113\ntnt_s_legacy_patch16_224.in1k,224,43.767,56.233,59.240,40.760,23.76,0.900,bicubic,-50.803,-39.930,+31\npit_xs_distilled_224.in1k,224,43.757,56.243,60.650,39.350,11.00,0.900,bicubic,-49.533,-38.100,+362\nresnet152.tv2_in1k,176,43.757,56.243,57.643,42.357,60.19,0.875,bilinear,-50.293,-41.197,+189\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,43.753,56.247,60.377,39.623,14.62,1.000,bilinear,-51.037,-38.773,-34\nregnetv_040.ra3_in1k,288,43.750,56.250,58.520,41.480,20.64,1.000,bicubic,-52.020,-40.840,-337\ncspresnext50.ra_in1k,256,43.727,56.273,60.240,39.760,20.57,0.887,bilinear,-50.513,-38.820,+131\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,43.723,56.277,59.467,40.533,18.45,0.888,bicubic,-51.377,-39.673,-125\nedgenext_small.usi_in1k,320,43.687,56.313,59.933,40.067,5.59,1.000,bicubic,-51.163,-39.477,-46\ntf_efficientnet_b5.in1k,456,43.660,56.340,60.177,39.823,30.39,0.934,bicubic,-52.180,-39.113,-371\nswinv2_cr_small_224.sw_in1k,224,43.650,56.350,57.513,42.487,49.70,0.900,bicubic,-51.770,-41.647,-234\nrexnet_150.nav_in1k,224,43.610,56.390,60.867,39.133,9.73,0.875,bicubic,-50.680,-38.083,+109\nswin_s3_tiny_224.ms_in1k,224,43.587,56.413,59.353,40.647,28.33,0.900,bicubic,-51.373,-39.797,-89\nresnet101.tv2_in1k,176,43.570,56.430,57.807,42.193,44.55,0.875,bilinear,-50.360,-40.933,+202\nxcit_tiny_12_p8_224.fb_dist_in1k,224,43.553,56.447,58.437,41.563,6.71,1.000,bicubic,-51.167,-40.733,-25\nefficientformer_l1.snap_dist_in1k,224,43.533,56.467,59.930,40.070,12.29,0.950,bicubic,-50.457,-39.130,+191\ncrossvit_small_240.in1k,240,43.530,56.470,58.970,41.030,26.86,0.875,bicubic,-51.060,-40.070,+8\nmaxvit_nano_rw_256.sw_in1k,256,43.527,56.473,57.507,42.493,15.45,0.950,bicubic,-51.953,-41.783,-258\nconvnextv2_pico.fcmae_ft_in1k,224,43.490,56.510,60.050,39.950,9.07,0.875,bicubic,-50.610,-38.780,+162\ntf_efficientnet_b4.in1k,380,43.487,56.513,59.590,40.410,19.34,0.922,bicubic,-52.013,-39.690,-266\nregnety_016.tv2_in1k,224,43.473,56.527,59.603,40.397,11.20,0.965,bicubic,-50.967,-39.437,+55\ncs3sedarknet_x.c2ns_in1k,288,43.467,56.533,58.810,41.190,35.40,1.000,bicubic,-51.963,-40.390,-252\nwide_resnet50_2.racm_in1k,288,43.460,56.540,59.043,40.957,68.88,0.950,bicubic,-51.670,-40.217,-149\nresnet101d.gluon_in1k,224,43.443,56.557,58.643,41.357,44.57,0.875,bicubic,-50.737,-40.287,+131\nefficientvit_b1.r288_in1k,288,43.440,56.560,57.847,42.153,9.10,1.000,bicubic,-50.740,-40.673,+131\nresnet101s.gluon_in1k,224,43.417,56.583,58.703,41.297,44.67,0.875,bicubic,-50.783,-40.327,+123\necaresnet50t.ra2_in1k,320,43.403,56.597,59.303,40.697,25.57,0.950,bicubic,-51.707,-39.807,-148\nnf_resnet50.ra2_in1k,256,43.360,56.640,60.593,39.407,25.56,0.940,bicubic,-50.780,-38.407,+141\ncspdarknet53.ra_in1k,256,43.353,56.647,59.477,40.523,27.64,0.887,bilinear,-50.787,-39.473,+139\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,43.353,56.647,58.633,41.367,15.15,0.900,bicubic,-51.707,-40.567,-133\nresnet152.a1_in1k,224,43.347,56.653,56.347,43.653,60.19,0.950,bicubic,-51.343,-42.443,-27\nefficientnet_b1.ra4_e3600_r240_in1k,288,43.313,56.687,58.987,41.013,7.79,1.000,bicubic,-51.227,-40.233,+16\neca_nfnet_l0.ra2_in1k,288,43.297,56.703,59.907,40.093,24.14,1.000,bicubic,-52.143,-39.283,-267\nxcit_tiny_12_p8_384.fb_dist_in1k,384,43.263,56.737,58.147,41.853,6.71,1.000,bicubic,-52.187,-41.043,-270\nconvmixer_768_32.in1k,224,43.260,56.740,59.363,40.637,21.11,0.960,bicubic,-51.150,-39.697,+49\necaresnet101d_pruned.miil_in1k,288,43.240,56.760,58.977,41.023,24.88,0.950,bicubic,-51.820,-40.143,-142\nxcit_tiny_24_p8_224.fb_in1k,224,43.233,56.767,57.297,42.703,12.11,1.000,bicubic,-51.637,-41.913,-83\ncs3sedarknet_x.c2ns_in1k,256,43.227,56.773,58.727,41.273,35.40,0.887,bicubic,-51.833,-40.453,-143\npoolformerv2_s24.sail_in1k,224,43.217,56.783,60.407,39.593,21.34,1.000,bicubic,-51.233,-38.573,+32\nrepvit_m0_9.dist_300e_in1k,224,43.207,56.793,60.447,39.553,5.49,0.950,bicubic,-50.243,-38.403,+300\nvisformer_small.in1k,224,43.200,56.800,57.970,42.030,40.22,0.900,bicubic,-51.770,-41.240,-119\nfbnetv3_d.ra2_in1k,224,43.193,56.807,59.537,40.463,10.31,0.950,bilinear,-50.037,-39.003,+332\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,43.187,56.813,59.287,40.713,22.92,1.000,bicubic,-51.393,-39.853,-14\ntf_efficientnetv2_b3.in1k,240,43.167,56.833,58.910,41.090,14.36,0.904,bicubic,-51.063,-39.950,+96\nregnetx_160.tv2_in1k,224,43.167,56.833,57.450,42.550,54.28,0.965,bicubic,-52.093,-41.810,-217\nregnety_064.ra3_in1k,288,43.157,56.843,57.293,42.707,30.58,1.000,bicubic,-52.673,-41.927,-399\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,43.153,56.847,58.430,41.570,119.42,0.900,bicubic,-50.017,-39.890,+336\ncspresnet50.ra_in1k,256,43.140,56.860,59.250,40.750,21.62,0.887,bilinear,-50.770,-39.600,+172\nregnetx_080.tv2_in1k,224,43.133,56.867,57.973,42.027,39.57,0.965,bicubic,-51.607,-40.947,-63\nresnest26d.gluon_in1k,224,43.130,56.870,60.590,39.410,17.07,0.875,bilinear,-50.150,-38.200,+318\nrepvit_m0_9.dist_450e_in1k,224,43.130,56.870,60.437,39.563,5.49,0.950,bicubic,-50.340,-38.443,+281\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,43.123,56.877,58.270,41.730,8.46,0.900,bicubic,-51.017,-40.590,+118\ntwins_pcpvt_small.in1k,224,43.117,56.883,58.940,41.060,24.11,0.900,bicubic,-51.493,-40.200,-33\ncoatnet_nano_rw_224.sw_in1k,224,43.070,56.930,57.877,42.123,15.14,0.900,bicubic,-51.960,-41.313,-145\necaresnet50d.miil_in1k,288,43.060,56.940,59.483,40.517,25.58,0.950,bicubic,-51.620,-39.747,-50\nefficientnet_b1.ra4_e3600_r240_in1k,240,43.040,56.960,59.257,40.743,7.79,0.900,bicubic,-51.180,-39.833,+86\nresmlp_36_224.fb_in1k,224,43.010,56.990,59.413,40.587,44.69,0.875,bicubic,-50.660,-39.637,+225\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,42.993,57.007,58.033,41.967,8.46,0.950,bicubic,-51.567,-41.117,-19\nresnet152.gluon_in1k,224,42.973,57.027,57.773,42.227,60.19,0.875,bicubic,-51.067,-41.237,+137\nresnet50.b1k_in1k,224,42.957,57.043,58.923,41.077,25.56,0.950,bicubic,-50.743,-39.907,+217\ntwins_svt_small.in1k,224,42.940,57.060,58.457,41.543,24.06,0.900,bicubic,-51.830,-40.673,-83\nfastvit_sa12.apple_in1k,256,42.920,57.080,58.870,41.130,11.58,0.900,bicubic,-51.540,-40.230,+4\nresnet50d.ra4_e3600_r224_in1k,224,42.900,57.100,58.170,41.830,25.58,0.950,bicubic,-51.960,-41.030,-102\nfbnetv3_d.ra2_in1k,256,42.890,57.110,59.657,40.343,10.31,0.950,bilinear,-50.930,-39.023,+178\ntf_efficientnet_b1.ap_in1k,240,42.883,57.117,58.907,41.093,7.79,0.882,bicubic,-50.757,-39.853,+223\ndpn131.mx_in1k,224,42.883,57.117,57.167,42.833,79.25,0.875,bicubic,-50.907,-41.733,+187\ntf_efficientnet_lite4.in1k,380,42.873,57.127,57.723,42.277,13.01,0.920,bilinear,-52.027,-41.537,-124\ngcresnet50t.ra2_in1k,256,42.870,57.130,59.230,40.770,25.90,0.900,bicubic,-51.740,-39.760,-47\nresnet152c.gluon_in1k,224,42.850,57.150,57.793,42.207,60.21,0.875,bicubic,-51.040,-41.007,+156\nresnet50.tv2_in1k,224,42.840,57.160,58.543,41.457,25.56,0.965,bilinear,-51.750,-40.567,-44\ncs3darknet_x.c2ns_in1k,256,42.823,57.177,58.363,41.637,35.05,0.950,bicubic,-52.207,-40.827,-165\ncoatnet_0_rw_224.sw_in1k,224,42.807,57.193,56.297,43.703,27.44,0.950,bicubic,-52.123,-42.723,-138\ntresnet_l.miil_in1k_448,448,42.803,57.197,58.943,41.057,55.99,0.875,bilinear,-52.607,-40.337,-294\ngcresnet50t.ra2_in1k,288,42.797,57.203,59.077,40.923,25.90,1.000,bicubic,-51.983,-40.053,-101\nlevit_256.fb_dist_in1k,224,42.787,57.213,57.913,42.087,18.89,0.900,bicubic,-51.633,-40.967,+6\nlevit_conv_256.fb_dist_in1k,224,42.783,57.217,57.913,42.087,18.89,0.900,bicubic,-51.627,-41.197,+9\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,42.780,57.220,58.853,41.147,18.45,1.000,bicubic,-52.640,-40.427,-302\ncs3darknet_x.c2ns_in1k,288,42.770,57.230,58.157,41.843,35.05,1.000,bicubic,-52.500,-41.123,-256\nresnext50_32x4d.tv2_in1k,224,42.753,57.247,57.563,42.437,25.03,0.965,bilinear,-51.737,-41.427,-20\ndpn107.mx_in1k,224,42.740,57.260,57.277,42.723,86.92,0.875,bicubic,-51.270,-41.543,+123\nresnet152.a3_in1k,160,42.707,57.293,56.937,43.063,60.19,0.950,bicubic,-50.663,-41.643,+277\nseresnext50_32x4d.gluon_in1k,224,42.700,57.300,58.657,41.343,27.56,0.875,bicubic,-51.470,-40.263,+77\ntresnet_m.miil_in1k,224,42.700,57.300,58.200,41.800,31.39,0.875,bilinear,-51.400,-40.830,+100\ntnt_s_patch16_224.in1k,224,42.697,57.303,58.147,41.853,23.77,0.900,bicubic,-51.673,-40.903,+9\nresnet50d.ra2_in1k,224,42.693,57.307,58.750,41.250,25.58,0.875,bicubic,-51.367,-40.190,+104\nconvnext_nano_ols.d1h_in1k,224,42.693,57.307,57.663,42.337,15.65,0.950,bicubic,-51.457,-41.277,+81\nresnext101_32x8d.tv_in1k,224,42.600,57.400,58.370,41.630,88.79,0.875,bilinear,-51.180,-40.600,+166\nregnety_040.ra3_in1k,288,42.600,57.400,57.010,42.990,20.65,1.000,bicubic,-52.850,-42.400,-324\nresnetv2_50d_gn.ah_in1k,224,42.587,57.413,57.733,42.267,25.57,0.950,bicubic,-51.833,-41.247,-6\nconvnext_nano.d1h_in1k,288,42.583,57.417,57.520,42.480,15.59,1.000,bicubic,-52.307,-41.570,-145\nxcit_tiny_12_p16_384.fb_dist_in1k,384,42.573,57.427,58.057,41.943,6.72,1.000,bicubic,-51.987,-40.923,-55\nresnetaa50d.sw_in12k_ft_in1k,288,42.567,57.433,58.423,41.577,25.58,1.000,bicubic,-52.703,-40.547,-271\nfastvit_s12.apple_dist_in1k,256,42.550,57.450,58.160,41.840,9.47,0.900,bicubic,-51.730,-40.830,+32\nefficientvit_b1.r256_in1k,256,42.540,57.460,57.497,42.503,9.10,1.000,bicubic,-51.160,-41.173,+182\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,42.527,57.473,58.297,41.703,14.25,0.888,bicubic,-52.253,-40.753,-120\nresnetv2_50d_evos.ah_in1k,224,42.517,57.483,57.130,42.870,25.59,0.950,bicubic,-51.783,-41.770,+23\nseresnet50.ra2_in1k,224,42.510,57.490,58.747,41.253,28.09,0.875,bicubic,-51.610,-40.163,+80\nnf_resnet50.ra2_in1k,288,42.480,57.520,59.603,40.397,25.56,0.940,bicubic,-51.900,-39.457,-8\nseresnext50_32x4d.racm_in1k,288,42.473,57.527,58.123,41.877,27.56,0.950,bicubic,-52.537,-41.077,-187\nresnet50.b2k_in1k,224,42.467,57.533,58.137,41.863,25.56,0.950,bicubic,-51.353,-40.573,+144\nresnetrs101.tf_in1k,288,42.450,57.550,57.290,42.710,63.62,0.940,bicubic,-52.780,-41.840,-260\nresnet50.tv2_in1k,176,42.430,57.570,58.053,41.947,25.56,0.875,bilinear,-51.320,-40.697,+160\npoolformer_s36.sail_in1k,224,42.380,57.620,58.807,41.193,30.86,0.900,bicubic,-52.250,-40.233,-86\nrepvit_m1.dist_in1k,224,42.370,57.630,59.640,40.360,5.49,0.950,bicubic,-50.970,-39.000,+255\nmobileone_s4.apple_in1k,224,42.370,57.630,58.053,41.947,14.95,0.900,bilinear,-51.370,-40.877,+161\ntf_efficientnetv2_b3.in1k,300,42.327,57.673,58.013,41.987,14.36,0.904,bicubic,-52.813,-40.997,-236\nnest_tiny_jx.goog_in1k,224,42.327,57.673,57.013,42.987,17.06,0.875,bicubic,-52.613,-42.087,-176\nlegacy_senet154.in1k,224,42.323,57.677,56.693,43.307,115.09,0.875,bilinear,-52.417,-42.417,-123\nmobileone_s3.apple_in1k,224,42.300,57.700,59.327,40.673,10.17,0.900,bilinear,-50.700,-39.283,+299\nxcit_tiny_24_p16_224.fb_in1k,224,42.290,57.710,56.803,43.197,12.12,1.000,bicubic,-51.560,-42.097,+124\nhgnet_small.paddle_in1k,288,42.287,57.713,58.703,41.297,24.36,1.000,bicubic,-52.803,-40.507,-228\ntf_efficientnet_cc_b1_8e.in1k,240,42.287,57.713,58.540,41.460,39.72,0.882,bicubic,-51.293,-40.190,+193\ndeit_small_patch16_224.fb_in1k,224,42.270,57.730,58.007,41.993,22.05,0.900,bicubic,-51.730,-40.943,+91\nconvmixer_1024_20_ks9_p14.in1k,224,42.260,57.740,59.697,40.303,24.38,0.960,bicubic,-50.070,-38.713,+384\ntf_efficientnet_b2.aa_in1k,260,42.243,57.757,58.223,41.777,9.11,0.890,bicubic,-51.967,-40.797,+30\nresnet152.a1_in1k,288,42.240,57.760,55.483,44.517,60.19,1.000,bicubic,-52.900,-43.757,-244\nmambaout_kobe.in1k,224,42.233,57.767,57.787,42.213,9.14,1.000,bicubic,-51.887,-41.243,+61\nresnet101.a1_in1k,224,42.233,57.767,55.470,44.530,44.55,0.950,bicubic,-52.247,-43.290,-56\ndpn68b.ra_in1k,224,42.190,57.810,56.617,43.383,12.61,0.950,bicubic,-51.100,-41.913,+248\ndpn98.mx_in1k,224,42.170,57.830,56.677,43.323,61.57,0.875,bicubic,-51.780,-42.243,+92\nresnet50d.ra4_e3600_r224_in1k,288,42.157,57.843,57.640,42.360,25.58,1.000,bicubic,-53.133,-41.570,-305\ncait_xxs24_384.fb_dist_in1k,384,42.153,57.847,57.503,42.497,12.03,1.000,bicubic,-52.827,-41.727,-202\nresnext50_32x4d.gluon_in1k,224,42.140,57.860,57.693,42.307,25.03,0.875,bicubic,-51.560,-41.107,+155\nxception41p.ra3_in1k,299,42.067,57.933,56.817,43.183,26.91,0.940,bicubic,-53.013,-42.363,-233\nhgnet_tiny.paddle_in1k,224,42.053,57.947,59.000,41.000,14.74,0.965,bicubic,-51.827,-39.870,+101\nresnet50.ram_in1k,224,42.050,57.950,56.097,43.903,25.56,0.875,bicubic,-51.470,-42.713,+194\nconvnext_nano_ols.d1h_in1k,288,42.037,57.963,56.863,43.137,15.65,1.000,bicubic,-52.553,-42.227,-100\necaresnet50d_pruned.miil_in1k,224,42.023,57.977,58.340,41.660,19.94,0.875,bicubic,-51.827,-40.410,+103\nresnet50.b1k_in1k,288,42.000,58.000,58.170,41.830,25.56,1.000,bicubic,-52.510,-40.850,-76\nedgenext_small_rw.sw_in1k,256,41.990,58.010,58.693,41.307,7.83,0.900,bicubic,-51.840,-40.227,+110\nresnet50.b2k_in1k,288,41.980,58.020,57.680,42.320,25.56,1.000,bicubic,-52.340,-41.240,-21\npvt_v2_b1.in1k,224,41.967,58.033,59.580,40.420,14.01,0.900,bicubic,-51.553,-39.300,+184\nefficientnet_b2.ra_in1k,288,41.967,58.033,58.303,41.697,9.11,1.000,bicubic,-52.403,-40.467,-37\ntf_efficientnet_b3.in1k,300,41.907,58.093,58.123,41.877,12.23,0.904,bicubic,-52.373,-40.997,-11\nresnext50_32x4d.a1h_in1k,224,41.880,58.120,56.757,43.243,25.03,0.950,bicubic,-52.680,-42.253,-93\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,41.877,58.123,57.890,42.110,10.59,0.888,bicubic,-52.863,-41.160,-149\nefficientformerv2_s1.snap_dist_in1k,224,41.857,58.143,57.970,42.030,6.19,0.950,bicubic,-51.983,-40.850,+99\nxcit_tiny_12_p16_224.fb_dist_in1k,224,41.843,58.157,57.303,42.697,6.72,1.000,bicubic,-51.487,-41.457,+224\nresnet152.a2_in1k,224,41.837,58.163,55.673,44.327,60.19,0.950,bicubic,-52.913,-43.377,-155\nfastvit_t12.apple_dist_in1k,256,41.830,58.170,57.600,42.400,7.55,0.900,bicubic,-52.270,-41.410,+40\nresnext50_32x4d.tv2_in1k,176,41.813,58.187,56.577,43.423,25.03,0.875,bilinear,-51.657,-42.163,+193\nresnet50d.ra2_in1k,288,41.797,58.203,58.090,41.910,25.58,0.950,bicubic,-53.043,-41.140,-177\npoolformer_s24.sail_in1k,224,41.763,58.237,58.520,41.480,21.39,0.900,bicubic,-52.667,-40.660,-64\ngcvit_xxtiny.in1k,224,41.750,58.250,58.333,41.667,12.00,0.875,bicubic,-52.340,-40.647,+40\nresnext50_32x4d.a1h_in1k,288,41.710,58.290,56.400,43.600,25.03,1.000,bicubic,-53.280,-42.690,-229\nedgenext_small_rw.sw_in1k,320,41.693,58.307,58.520,41.480,7.83,1.000,bicubic,-52.667,-40.640,-46\nsenet154.gluon_in1k,224,41.693,58.307,56.493,43.507,115.09,0.875,bicubic,-53.017,-42.477,-152\ndla102x2.in1k,224,41.680,58.320,58.030,41.970,41.28,0.875,bilinear,-52.350,-40.900,+49\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,41.680,58.320,57.813,42.187,10.59,1.000,bicubic,-53.660,-41.517,-347\ninception_v4.tf_in1k,299,41.660,58.340,55.367,44.633,42.68,0.875,bicubic,-52.710,-43.463,-54\ngcresnext50ts.ch_in1k,288,41.657,58.343,57.340,42.660,15.67,1.000,bicubic,-52.843,-41.700,-93\nefficientvit_b1.r224_in1k,224,41.640,58.360,56.603,43.397,9.10,0.950,bicubic,-51.680,-42.017,+211\ncs3sedarknet_l.c2ns_in1k,256,41.633,58.367,57.360,42.640,21.91,0.887,bicubic,-53.417,-41.830,-252\nhrnet_w64.ms_in1k,224,41.627,58.373,57.197,42.803,128.06,0.875,bilinear,-52.193,-41.733,+87\nseresnet50.ra2_in1k,288,41.603,58.397,57.953,42.047,28.09,0.950,bicubic,-53.167,-41.137,-178\nhaloregnetz_b.ra3_in1k,224,41.577,58.423,57.080,42.920,11.68,0.940,bicubic,-52.943,-41.880,-106\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,41.557,58.443,57.857,42.143,14.25,1.000,bicubic,-53.673,-41.553,-319\nresnet50.c1_in1k,224,41.553,58.447,56.733,43.267,25.56,0.950,bicubic,-52.407,-42.107,+52\ncs3sedarknet_l.c2ns_in1k,288,41.543,58.457,57.483,42.517,21.91,0.950,bicubic,-53.547,-41.727,-274\ntf_efficientnet_cc_b0_8e.in1k,224,41.523,58.477,57.393,42.607,24.01,0.875,bicubic,-51.357,-41.207,+279\ntf_efficientnetv2_b2.in1k,208,41.517,58.483,57.370,42.630,10.10,0.890,bicubic,-52.093,-41.630,+138\nconvnextv2_pico.fcmae_ft_in1k,288,41.500,58.500,58.053,41.947,9.07,0.950,bicubic,-53.060,-40.837,-125\nnf_regnet_b1.ra2_in1k,256,41.490,58.510,58.893,41.107,10.22,0.900,bicubic,-51.960,-39.937,+176\nefficientnet_em.ra2_in1k,240,41.490,58.510,58.863,41.137,6.90,0.882,bicubic,-52.270,-40.057,+96\nefficientnet_el.ra_in1k,300,41.490,58.510,58.343,41.657,10.59,0.904,bicubic,-53.180,-40.717,-158\nresnetv2_50.a1h_in1k,224,41.490,58.510,56.807,43.193,25.55,0.950,bicubic,-52.820,-42.233,-49\nresnet50.a1h_in1k,224,41.463,58.537,56.793,43.207,25.56,1.000,bicubic,-52.747,-42.257,-21\nresnetaa50.a1h_in1k,224,41.450,58.550,56.423,43.577,25.56,0.950,bicubic,-53.010,-42.457,-100\nhalo2botnet50ts_256.a1h_in1k,256,41.450,58.550,56.263,43.737,22.64,0.950,bicubic,-53.570,-42.787,-258\ncait_xxs24_224.fb_dist_in1k,224,41.430,58.570,57.580,42.420,11.96,1.000,bicubic,-52.080,-41.260,+152\nswinv2_cr_tiny_ns_224.sw_in1k,224,41.413,58.587,57.133,42.867,28.33,0.900,bicubic,-53.367,-41.967,-197\nresnetv2_50d_evos.ah_in1k,288,41.400,58.600,56.547,43.453,25.59,1.000,bicubic,-53.500,-42.643,-230\nswin_tiny_patch4_window7_224.ms_in1k,224,41.397,58.603,57.333,42.667,28.29,0.900,bicubic,-53.293,-41.767,-171\nconvnext_tiny.fb_in22k_ft_in1k,288,41.370,58.630,55.357,44.643,28.59,1.000,bicubic,-52.180,-43.263,+137\nresnext50_32x4d.ra_in1k,224,41.363,58.637,56.940,43.060,25.03,0.875,bicubic,-52.477,-42.090,+62\nefficientnet_b2.ra_in1k,256,41.360,58.640,57.993,42.007,9.11,0.875,bicubic,-52.740,-41.027,+5\nresnet101.a2_in1k,224,41.360,58.640,55.430,44.570,44.55,0.950,bicubic,-53.190,-43.470,-130\nfasternet_t2.in1k,224,41.350,58.650,58.470,41.530,14.98,1.000,bicubic,-51.900,-40.170,+195\nresnet152.tv_in1k,224,41.340,58.660,57.593,42.407,60.19,0.875,bilinear,-51.930,-41.087,+191\nresnet50.ra_in1k,224,41.330,58.670,57.467,42.533,25.56,0.875,bicubic,-52.100,-41.353,+162\nresnet50.ram_in1k,288,41.330,58.670,55.077,44.923,25.56,0.950,bicubic,-52.670,-43.803,+25\nxception71.tf_in1k,299,41.327,58.673,55.977,44.023,42.34,0.903,bicubic,-52.593,-43.013,+37\ncs3darknet_l.c2ns_in1k,288,41.320,58.680,57.387,42.613,21.16,0.950,bicubic,-53.380,-41.833,-187\ngcresnext50ts.ch_in1k,256,41.320,58.680,57.143,42.857,15.67,0.900,bicubic,-53.100,-41.917,-101\ngernet_s.idstcv_in1k,224,41.307,58.693,58.893,41.107,8.17,0.875,bilinear,-51.153,-39.607,+301\nswinv2_tiny_window8_256.ms_in1k,256,41.280,58.720,56.960,43.040,28.35,0.900,bicubic,-53.780,-42.280,-290\nresnet101.a1_in1k,288,41.273,58.727,54.253,45.747,44.55,1.000,bicubic,-53.617,-44.747,-236\ndpn68b.ra_in1k,288,41.257,58.743,55.047,44.953,12.61,1.000,bicubic,-52.493,-43.473,+75\nregnetz_b16.ra3_in1k,224,41.253,58.747,57.217,42.783,9.72,0.940,bicubic,-52.887,-41.773,-22\ncs3darknet_l.c2ns_in1k,256,41.250,58.750,57.170,42.830,21.16,0.887,bicubic,-53.050,-41.800,-72\ninception_v3.tf_adv_in1k,299,41.237,58.763,56.333,43.667,23.83,0.875,bicubic,-51.733,-42.157,+224\nresnet50.a1h_in1k,176,41.210,58.790,56.637,43.363,25.56,0.900,bicubic,-52.180,-42.123,+162\ndpn92.mx_in1k,224,41.210,58.790,56.207,43.793,37.67,0.875,bicubic,-52.940,-42.643,-28\nshvit_s4.in1k,256,41.197,58.803,56.707,43.293,16.59,0.875,bicubic,-52.363,-41.983,+114\nresnet101.a3_in1k,160,41.150,58.850,55.957,44.043,44.55,0.950,bicubic,-51.730,-42.703,+243\nresnetv2_50d_gn.ah_in1k,288,41.117,58.883,56.620,43.380,25.57,1.000,bicubic,-54.123,-42.410,-360\nresnetblur50.bt_in1k,224,41.113,58.887,57.097,42.903,25.56,0.875,bicubic,-52.597,-41.683,+75\nmambaout_femto.in1k,224,41.093,58.907,56.857,43.143,7.30,1.000,bicubic,-52.427,-41.943,+120\nresnet50.c1_in1k,288,41.087,58.913,56.543,43.457,25.56,1.000,bicubic,-53.443,-42.527,-150\nresnet50d.gluon_in1k,224,41.040,58.960,57.170,42.830,25.58,0.875,bicubic,-52.530,-41.900,+107\nnf_regnet_b1.ra2_in1k,288,41.023,58.977,58.190,41.810,10.22,0.900,bicubic,-52.887,-40.590,+20\necaresnet50d_pruned.miil_in1k,288,40.993,59.007,57.740,42.260,19.94,0.950,bicubic,-53.317,-41.190,-91\nresnetv2_50.a1h_in1k,288,40.973,59.027,56.463,43.537,25.55,1.000,bicubic,-53.737,-42.647,-209\nfbnetv3_b.ra2_in1k,224,40.937,59.063,58.760,41.240,8.60,0.950,bilinear,-51.983,-39.800,+220\nfbnetv3_b.ra2_in1k,256,40.923,59.077,58.673,41.327,8.60,0.950,bilinear,-52.727,-40.227,+82\ntf_efficientnetv2_b1.in1k,192,40.893,59.107,56.740,43.260,8.14,0.882,bicubic,-52.337,-41.930,+169\ninception_v3.gluon_in1k,299,40.887,59.113,55.693,44.307,23.83,0.875,bicubic,-52.713,-43.047,+93\nresnet34.ra4_e3600_r224_in1k,288,40.883,59.117,54.983,45.017,21.80,1.000,bicubic,-52.577,-43.787,+130\nese_vovnet39b.ra_in1k,224,40.877,59.123,57.013,42.987,24.57,0.875,bicubic,-52.973,-41.947,+23\nresnet152.a3_in1k,224,40.870,59.130,55.033,44.967,60.19,0.950,bicubic,-53.590,-44.047,-141\ncs3darknet_focus_l.c2ns_in1k,288,40.867,59.133,56.663,43.337,21.15,0.950,bicubic,-53.953,-42.427,-244\nlegacy_xception.tf_in1k,299,40.863,59.137,56.573,43.427,22.86,0.897,bicubic,-52.787,-42.387,+76\nresnet34d.ra2_in1k,224,40.860,59.140,56.507,43.493,21.82,0.875,bicubic,-51.820,-41.943,+248\ncs3darknet_focus_l.c2ns_in1k,256,40.850,59.150,56.567,43.433,21.15,0.887,bicubic,-53.390,-42.503,-77\nregnety_320.pycls_in1k,224,40.840,59.160,56.193,43.807,145.05,0.875,bicubic,-53.660,-42.977,-161\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,40.837,59.163,55.277,44.723,7.52,0.950,bicubic,-53.423,-43.723,-85\nresnetv2_34.ra4_e3600_r224_in1k,224,40.820,59.180,55.197,44.803,21.80,0.900,bicubic,-52.070,-43.473,+217\nresnet152.a2_in1k,288,40.810,59.190,54.317,45.683,60.19,1.000,bicubic,-54.190,-44.743,-304\nlevit_192.fb_dist_in1k,224,40.807,59.193,56.637,43.363,10.95,0.900,bicubic,-52.903,-42.123,+54\nregnetx_032.tv2_in1k,224,40.803,59.197,56.780,43.220,15.30,0.965,bicubic,-53.757,-42.040,-179\nresnet50_gn.a1h_in1k,224,40.797,59.203,55.703,44.297,25.56,0.940,bicubic,-53.383,-43.327,-64\nlevit_conv_192.fb_dist_in1k,224,40.790,59.210,56.687,43.313,10.95,0.900,bicubic,-52.920,-42.173,+52\nresnet101.gluon_in1k,224,40.787,59.213,56.230,43.770,44.55,0.875,bicubic,-53.033,-42.650,+20\ntiny_vit_5m_224.in1k,224,40.783,59.217,57.267,42.733,5.39,0.950,bicubic,-53.017,-41.173,+22\nhalonet50ts.a1h_in1k,256,40.777,59.223,55.370,44.630,22.73,0.940,bicubic,-53.923,-43.470,-224\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,40.767,59.233,56.167,43.833,5.76,1.000,bicubic,-53.693,-42.733,-161\nresnetv2_34d.ra4_e3600_r384_in1k,384,40.727,59.273,55.683,44.317,21.82,1.000,bicubic,-53.283,-43.247,-26\nhrnet_w40.ms_in1k,224,40.720,59.280,56.843,43.157,57.56,0.875,bilinear,-53.010,-41.877,+39\nvit_base_patch32_384.augreg_in1k,384,40.707,59.293,55.243,44.757,88.30,1.000,bicubic,-52.473,-43.557,+153\nresnet50.c2_in1k,224,40.697,59.303,55.720,44.280,25.56,0.950,bicubic,-53.203,-43.070,-8\nrepvgg_b1.rvgg_in1k,224,40.693,59.307,57.800,42.200,57.42,0.875,bilinear,-52.717,-41.000,+120\nskresnext50_32x4d.ra_in1k,224,40.687,59.313,56.000,44.000,27.48,0.875,bicubic,-53.293,-42.820,-24\nresmlp_24_224.fb_in1k,224,40.677,59.323,56.617,43.383,30.02,0.875,bicubic,-52.753,-42.213,+111\nresnet50.d_in1k,224,40.673,59.327,55.207,44.793,25.56,0.950,bicubic,-53.127,-43.733,+14\nlamhalobotnet50ts_256.a1h_in1k,256,40.670,59.330,56.137,43.863,22.57,0.950,bicubic,-54.080,-42.673,-253\nresnetaa50.a1h_in1k,288,40.663,59.337,56.157,43.843,25.56,1.000,bicubic,-54.197,-42.873,-276\nresnetv2_34.ra4_e3600_r224_in1k,288,40.647,59.353,54.650,45.350,21.80,1.000,bicubic,-53.063,-44.150,+38\ntf_efficientnet_lite3.in1k,300,40.640,59.360,56.750,43.250,8.20,0.904,bilinear,-53.520,-42.270,-76\nese_vovnet39b.ra_in1k,288,40.637,59.363,56.693,43.307,24.57,0.950,bicubic,-53.863,-42.397,-184\nmambaout_kobe.in1k,288,40.630,59.370,56.370,43.630,9.14,1.000,bicubic,-54.160,-42.830,-268\necaresnet50t.a1_in1k,224,40.613,59.387,55.100,44.900,25.57,0.950,bicubic,-53.587,-43.640,-91\nconvnext_pico_ols.d1_in1k,224,40.610,59.390,56.753,43.247,9.06,0.950,bicubic,-53.100,-42.027,+29\nresnet50.am_in1k,224,40.593,59.407,57.333,42.667,25.56,0.875,bicubic,-53.037,-41.527,+49\nmobilevitv2_175.cvnets_in1k,256,40.573,59.427,56.217,43.783,14.25,0.888,bicubic,-53.727,-42.893,-124\nresnext50_32x4d.ra_in1k,288,40.570,59.430,56.180,43.820,25.03,0.950,bicubic,-53.740,-42.740,-132\ndla169.in1k,224,40.547,59.453,57.387,42.613,53.39,0.875,bilinear,-53.223,-41.443,+10\nresnet34.ra4_e3600_r224_in1k,224,40.537,59.463,55.100,44.900,21.80,0.900,bicubic,-52.163,-43.470,+212\nrepvgg_b2.rvgg_in1k,224,40.533,59.467,57.787,42.213,89.02,0.875,bilinear,-53.037,-40.923,+56\nregnetx_320.pycls_in1k,224,40.533,59.467,55.660,44.340,107.81,0.875,bicubic,-53.677,-43.180,-104\nxcit_tiny_12_p8_224.fb_in1k,224,40.527,59.473,55.563,44.437,6.71,1.000,bicubic,-53.823,-43.487,-147\npit_xs_224.in1k,224,40.520,59.480,56.520,43.480,10.62,0.900,bicubic,-52.380,-42.270,+178\nwide_resnet101_2.tv_in1k,224,40.510,59.490,55.987,44.013,126.89,0.875,bilinear,-53.260,-42.933,+5\ntresnet_m.miil_in1k_448,448,40.507,59.493,56.670,43.330,31.39,0.875,bilinear,-54.153,-42.310,-243\nefficientnet_b2_pruned.in1k,260,40.450,59.550,56.613,43.387,8.31,0.890,bicubic,-53.360,-42.297,-9\nregnetz_b16.ra3_in1k,288,40.443,59.557,56.037,43.963,9.72,1.000,bicubic,-54.257,-43.083,-256\nvit_base_patch16_384.augreg_in1k,384,40.423,59.577,53.270,46.730,86.86,1.000,bicubic,-54.047,-45.750,-193\nresnet50.a1_in1k,224,40.417,59.583,54.670,45.330,25.56,0.950,bicubic,-53.543,-43.850,-46\ntf_efficientnet_b0.ap_in1k,224,40.397,59.603,56.837,43.163,5.29,0.875,bicubic,-52.223,-41.643,+211\nskresnet34.ra_in1k,224,40.390,59.610,56.763,43.237,22.28,0.875,bicubic,-52.180,-41.747,+218\neca_resnet33ts.ra2_in1k,288,40.380,59.620,57.340,42.660,19.68,1.000,bicubic,-53.890,-41.480,-134\nhgnet_tiny.paddle_in1k,288,40.377,59.623,56.563,43.437,14.74,1.000,bicubic,-53.933,-42.397,-149\nregnetx_160.pycls_in1k,224,40.337,59.663,56.107,43.893,54.28,0.875,bicubic,-53.583,-42.813,-45\nlegacy_seresnext101_32x4d.in1k,224,40.330,59.670,54.863,45.137,48.96,0.875,bilinear,-53.920,-44.077,-130\ncoat_mini.in1k,224,40.313,59.687,55.150,44.850,10.34,0.900,bicubic,-54.437,-43.840,-281\nresnet34d.ra2_in1k,288,40.310,59.690,56.160,43.840,21.82,0.950,bicubic,-53.330,-42.590,+26\ndensenet201.tv_in1k,224,40.300,59.700,56.757,43.243,20.01,0.875,bicubic,-52.440,-41.903,+188\ncoat_lite_mini.in1k,224,40.300,59.700,55.657,44.343,11.01,0.900,bicubic,-53.220,-42.943,+52\nxception65.tf_in1k,299,40.300,59.700,55.250,44.750,39.92,0.903,bicubic,-53.500,-43.380,-21\nsebotnet33ts_256.a1h_in1k,256,40.297,59.703,53.147,46.853,13.70,0.940,bicubic,-54.043,-45.393,-163\nefficientnet_el_pruned.in1k,300,40.290,59.710,56.830,43.170,10.59,0.904,bicubic,-53.780,-42.180,-82\nmobileone_s2.apple_in1k,224,40.287,59.713,57.993,42.007,7.88,0.900,bilinear,-52.343,-40.687,+196\nresnet101.a2_in1k,288,40.260,59.740,54.237,45.763,44.55,1.000,bicubic,-54.630,-45.013,-326\nresnet50.ra_in1k,288,40.250,59.750,56.253,43.747,25.56,0.950,bicubic,-53.920,-42.607,-113\nresnext50_32x4d.a1_in1k,224,40.230,59.770,54.167,45.833,25.03,0.950,bicubic,-53.570,-44.743,-25\neca_resnet33ts.ra2_in1k,256,40.190,59.810,56.980,43.020,19.68,0.900,bicubic,-53.690,-42.040,-48\nresnet50.c2_in1k,288,40.180,59.820,55.280,44.720,25.56,1.000,bicubic,-54.090,-43.760,-149\nconvnext_pico.d1_in1k,224,40.177,59.823,56.040,43.960,9.05,0.875,bicubic,-53.333,-42.740,+44\nhrnet_w48.ms_in1k,224,40.163,59.837,56.613,43.387,77.47,0.875,bilinear,-53.877,-42.407,-84\nresnext50d_32x4d.bt_in1k,224,40.160,59.840,55.673,44.327,25.05,0.875,bicubic,-53.680,-43.217,-41\npoolformerv2_s12.sail_in1k,224,40.147,59.853,57.460,42.540,11.89,1.000,bicubic,-52.763,-41.070,+148\nresnetblur50.bt_in1k,288,40.147,59.853,56.230,43.770,25.56,0.950,bicubic,-54.013,-42.730,-120\nmobilevitv2_200.cvnets_in1k,256,40.137,59.863,55.583,44.417,18.45,0.888,bicubic,-54.423,-43.477,-242\ndarknetaa53.c2ns_in1k,288,40.123,59.877,55.767,44.233,36.02,1.000,bilinear,-54.127,-43.203,-149\nhrnet_w30.ms_in1k,224,40.120,59.880,57.083,42.917,37.71,0.875,bilinear,-53.280,-41.767,+69\nefficientnet_b0.ra4_e3600_r224_in1k,256,40.117,59.883,56.137,43.863,5.29,1.000,bicubic,-53.383,-42.473,+40\nvit_base_patch16_224.sam_in1k,224,40.113,59.887,55.553,44.447,86.57,0.900,bicubic,-53.777,-43.337,-62\nresnetv2_34d.ra4_e3600_r384_in1k,448,40.110,59.890,55.497,44.503,21.82,1.000,bicubic,-54.390,-43.503,-230\nfastvit_t12.apple_in1k,256,40.080,59.920,55.247,44.753,7.55,0.900,bicubic,-53.420,-43.593,+38\nlegacy_seresnet152.in1k,224,40.070,59.930,55.883,44.117,66.82,0.875,bilinear,-53.350,-42.967,+56\nresnet50_gn.a1h_in1k,288,40.047,59.953,54.953,45.047,25.56,0.950,bicubic,-54.573,-44.197,-271\ntf_efficientnet_b1.aa_in1k,240,40.043,59.957,56.180,43.820,7.79,0.882,bicubic,-53.677,-42.620,-21\nefficientnet_b0.ra4_e3600_r224_in1k,224,40.027,59.973,55.783,44.217,5.29,0.900,bicubic,-53.103,-42.927,+96\nresnet50.d_in1k,288,40.000,60.000,54.670,45.330,25.56,1.000,bicubic,-54.490,-44.350,-233\nregnetx_080.pycls_in1k,224,39.987,60.013,56.077,43.923,39.57,0.875,bicubic,-53.803,-42.753,-43\nresnet101c.gluon_in1k,224,39.983,60.017,55.233,44.767,44.57,0.875,bicubic,-53.747,-43.587,-27\nseresnet50.a1_in1k,224,39.973,60.027,55.193,44.807,28.09,0.950,bicubic,-53.577,-43.377,+16\nshvit_s3.in1k,224,39.950,60.050,56.107,43.893,14.25,0.875,bicubic,-52.810,-42.393,+153\nseresnet33ts.ra2_in1k,288,39.930,60.070,56.350,43.650,19.78,1.000,bicubic,-54.330,-42.600,-168\nfastvit_s12.apple_in1k,256,39.917,60.083,54.777,45.223,9.47,0.900,bicubic,-53.743,-43.953,-14\nresmlp_12_224.fb_distilled_in1k,224,39.883,60.117,57.423,42.577,15.35,0.875,bicubic,-52.997,-41.227,+135\nconvnext_pico_ols.d1_in1k,288,39.840,60.160,55.583,44.417,9.06,1.000,bicubic,-54.190,-43.427,-103\nresnet34.a1_in1k,224,39.820,60.180,54.677,45.323,21.80,0.950,bicubic,-52.110,-43.693,+232\nseresnet33ts.ra2_in1k,256,39.810,60.190,56.517,43.483,19.78,0.900,bicubic,-54.440,-42.253,-167\ntf_efficientnetv2_b0.in1k,224,39.803,60.197,56.283,43.717,7.14,0.875,bicubic,-53.257,-42.427,+90\ntf_efficientnetv2_b0.in1k,192,39.800,60.200,56.257,43.743,7.14,0.875,bicubic,-52.460,-42.013,+204\necaresnet50t.a3_in1k,160,39.793,60.207,55.553,44.447,25.57,0.950,bicubic,-52.817,-43.077,+164\nres2net50_26w_8s.in1k,224,39.767,60.233,54.937,45.063,48.40,0.875,bilinear,-53.653,-43.733,+40\nres2net101_26w_4s.in1k,224,39.763,60.237,54.583,45.417,45.21,0.875,bilinear,-53.767,-44.277,+8\ndarknetaa53.c2ns_in1k,256,39.760,60.240,55.430,44.570,36.02,0.887,bilinear,-54.030,-43.510,-59\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,39.757,60.243,55.290,44.710,22.88,0.900,bicubic,-52.493,-43.190,+201\ndarknet53.c2ns_in1k,288,39.743,60.257,55.367,44.633,41.61,1.000,bicubic,-54.627,-43.693,-217\nregnetx_120.pycls_in1k,224,39.737,60.263,55.663,44.337,46.11,0.875,bicubic,-54.563,-43.527,-196\nresnet50d.a1_in1k,224,39.737,60.263,54.137,45.863,25.58,0.950,bicubic,-54.203,-44.493,-97\nmambaout_femto.in1k,288,39.710,60.290,55.657,44.343,7.30,1.000,bicubic,-54.500,-43.393,-171\nhrnet_w44.ms_in1k,224,39.690,60.310,55.327,44.673,67.06,0.875,bilinear,-53.960,-43.433,-29\nresnetv2_34d.ra4_e3600_r224_in1k,224,39.680,60.320,54.177,45.823,21.82,0.900,bicubic,-53.360,-44.523,+85\nlambda_resnet50ts.a1h_in1k,256,39.667,60.333,54.307,45.693,21.54,0.950,bicubic,-54.903,-44.353,-282\ndla102x.in1k,224,39.653,60.347,56.490,43.510,26.31,0.875,bilinear,-53.877,-42.130,-4\nxception41.tf_in1k,299,39.653,60.347,54.997,45.003,26.97,0.903,bicubic,-53.817,-43.673,+15\ntf_efficientnetv2_b1.in1k,240,39.640,60.360,55.470,44.530,8.14,0.882,bicubic,-54.090,-43.380,-54\ntf_efficientnet_b2.in1k,260,39.630,60.370,56.187,43.813,9.11,0.890,bicubic,-54.110,-42.483,-57\ndensenet161.tv_in1k,224,39.630,60.370,56.163,43.837,28.68,0.875,bicubic,-53.320,-42.527,+96\nsehalonet33ts.ra2_in1k,256,39.623,60.377,53.980,46.020,13.69,0.940,bicubic,-54.897,-44.800,-272\nresmlp_big_24_224.fb_in1k,224,39.593,60.407,54.773,45.227,129.14,0.875,bicubic,-54.677,-44.267,-195\nmixnet_xl.ra_in1k,224,39.563,60.437,55.840,44.160,11.90,0.875,bicubic,-54.687,-43.000,-190\nhrnet_w32.ms_in1k,224,39.550,60.450,56.247,43.753,41.23,0.875,bilinear,-53.420,-42.493,+85\nvit_small_patch16_384.augreg_in1k,384,39.530,60.470,54.210,45.790,22.20,1.000,bicubic,-55.120,-44.930,-313\ngcresnet33ts.ra2_in1k,256,39.510,60.490,55.847,44.153,19.88,0.900,bicubic,-54.380,-43.083,-105\nxcit_tiny_12_p16_224.fb_in1k,224,39.497,60.503,55.030,44.970,6.72,1.000,bicubic,-53.043,-43.620,+150\nresnetv2_34d.ra4_e3600_r224_in1k,288,39.480,60.520,53.560,46.440,21.82,1.000,bicubic,-54.280,-45.350,-70\nlevit_128.fb_dist_in1k,224,39.470,60.530,55.337,44.663,9.21,0.900,bicubic,-53.580,-43.353,+68\nlevit_conv_128.fb_dist_in1k,224,39.460,60.540,55.447,44.553,9.21,0.900,bicubic,-53.600,-43.253,+63\nrexnet_130.nav_in1k,224,39.447,60.553,56.620,43.380,7.56,0.875,bicubic,-54.223,-42.180,-49\necaresnet50t.a1_in1k,288,39.437,60.563,53.630,46.370,25.57,1.000,bicubic,-55.453,-45.400,-387\nconvnext_pico.d1_in1k,288,39.433,60.567,55.370,44.630,9.05,0.950,bicubic,-54.607,-43.360,-140\ndarknet53.c2ns_in1k,256,39.423,60.577,55.083,44.917,41.61,0.887,bicubic,-54.637,-43.857,-147\nconvnextv2_femto.fcmae_ft_in1k,224,39.400,60.600,55.597,44.403,5.23,0.875,bicubic,-53.790,-43.063,+42\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,39.397,60.603,57.883,42.117,25.55,1.000,bilinear,-55.363,-41.287,-359\nregnety_120.pycls_in1k,224,39.397,60.603,55.347,44.653,51.82,0.875,bicubic,-54.673,-43.673,-152\nregnety_064.pycls_in1k,224,39.390,60.610,55.823,44.177,30.58,0.875,bicubic,-54.730,-43.137,-164\ninception_v3.tf_in1k,299,39.377,60.623,54.350,45.650,23.83,0.875,bicubic,-53.833,-44.130,+36\nstarnet_s4.in1k,224,39.370,60.630,55.610,44.390,7.48,0.875,bicubic,-54.130,-43.220,-15\ntf_efficientnet_el.in1k,300,39.363,60.637,55.473,44.527,10.59,0.904,bicubic,-54.997,-43.567,-244\ngcresnet33ts.ra2_in1k,288,39.343,60.657,55.917,44.083,19.88,1.000,bicubic,-54.997,-43.053,-239\ndensenetblur121d.ra_in1k,224,39.330,60.670,56.667,43.333,8.00,0.875,bicubic,-53.070,-41.743,+146\nrepghostnet_200.in1k,224,39.300,60.700,56.513,43.487,9.80,0.875,bicubic,-54.240,-42.077,-32\nresnet101.tv_in1k,224,39.297,60.703,55.857,44.143,44.55,0.875,bilinear,-53.593,-42.793,+86\nregnety_160.pycls_in1k,224,39.297,60.703,55.393,44.607,83.59,0.875,bicubic,-54.833,-43.757,-175\nresnext50_32x4d.a2_in1k,224,39.293,60.707,53.733,46.267,25.03,0.950,bicubic,-54.477,-44.937,-91\nresnet101.a3_in1k,224,39.280,60.720,53.703,46.297,44.55,0.950,bicubic,-54.590,-45.057,-122\nresnet50s.gluon_in1k,224,39.267,60.733,55.093,44.907,25.68,0.875,bicubic,-54.313,-43.617,-50\nresnext50_32x4d.a1_in1k,288,39.267,60.733,53.370,46.630,25.03,1.000,bicubic,-55.103,-45.800,-254\nmobilevitv2_150.cvnets_in1k,256,39.260,60.740,55.193,44.807,10.59,0.888,bicubic,-54.820,-43.717,-167\ndensenet169.tv_in1k,224,39.193,60.807,55.850,44.150,14.15,0.875,bicubic,-53.147,-42.720,+143\ntf_efficientnetv2_b2.in1k,260,39.187,60.813,54.617,45.383,10.10,0.890,bicubic,-54.873,-44.363,-164\nswiftformer_s.dist_in1k,224,39.163,60.837,55.483,44.517,6.09,0.950,bicubic,-53.877,-43.067,+46\nresnext50d_32x4d.bt_in1k,288,39.153,60.847,54.417,45.583,25.05,0.950,bicubic,-55.287,-44.643,-280\nlegacy_seresnet101.in1k,224,39.120,60.880,55.113,44.887,49.33,0.875,bilinear,-54.170,-43.677,+7\necaresnet50t.a2_in1k,224,39.120,60.880,54.257,45.743,25.57,0.950,bicubic,-55.140,-44.753,-226\nstarnet_s3.in1k,224,39.080,60.920,55.720,44.280,5.75,0.875,bicubic,-54.060,-42.920,+26\nefficientnet_b1_pruned.in1k,240,39.067,60.933,55.793,44.207,6.33,0.882,bicubic,-53.993,-42.897,+36\nrepvgg_b1g4.rvgg_in1k,224,39.043,60.957,56.500,43.500,39.97,0.875,bilinear,-54.007,-42.050,+36\nresnet50.a1_in1k,288,38.993,61.007,53.267,46.733,25.56,1.000,bicubic,-55.317,-45.933,-247\nresnext50_32x4d.a3_in1k,160,38.987,61.013,54.183,45.817,25.03,0.950,bicubic,-53.363,-44.077,+131\nregnety_080.pycls_in1k,224,38.967,61.033,55.203,44.797,39.18,0.875,bicubic,-54.953,-43.877,-147\ninception_v3.tv_in1k,299,38.957,61.043,53.827,46.173,23.83,0.875,bicubic,-53.943,-44.493,+65\ncrossvit_9_dagger_240.in1k,240,38.933,61.067,54.920,45.080,8.78,0.875,bicubic,-53.827,-43.630,+84\nseresnet50.a2_in1k,224,38.917,61.083,53.810,46.190,28.09,0.950,bicubic,-54.773,-44.840,-88\ndla102.in1k,224,38.913,61.087,55.377,44.623,33.27,0.875,bilinear,-54.367,-43.493,-1\nlegacy_seresnext50_32x4d.in1k,224,38.913,61.087,54.627,45.373,27.56,0.875,bilinear,-54.497,-44.273,-19\nresnet33ts.ra2_in1k,256,38.903,61.097,55.543,44.457,19.68,0.900,bicubic,-54.737,-43.237,-81\nvisformer_tiny.in1k,224,38.870,61.130,55.047,44.953,10.32,0.900,bicubic,-54.100,-43.503,+40\nres2net101d.in1k,224,38.843,61.157,53.020,46.980,45.23,0.875,bilinear,-55.707,-46.060,-328\nregnety_040.pycls_in1k,224,38.837,61.163,55.613,44.387,20.65,0.875,bicubic,-54.773,-43.197,-77\nregnetx_040.pycls_in1k,224,38.800,61.200,55.507,44.493,22.12,0.875,bicubic,-54.890,-43.433,-96\nefficientvit_m5.r224_in1k,224,38.793,61.207,55.000,45.000,12.47,0.875,bicubic,-53.337,-43.500,+146\nregnetx_032.pycls_in1k,224,38.770,61.230,55.153,44.847,15.30,0.875,bicubic,-54.530,-43.547,-13\nresnet50d.a2_in1k,224,38.770,61.230,53.583,46.417,25.58,0.950,bicubic,-55.230,-45.097,-172\ndensenet121.ra_in1k,224,38.767,61.233,56.233,43.767,7.98,0.875,bicubic,-53.183,-42.047,+151\nresnet32ts.ra2_in1k,256,38.763,61.237,55.847,44.153,17.96,0.900,bicubic,-54.817,-43.003,-77\nres2net50_14w_8s.in1k,224,38.710,61.290,54.177,45.823,25.06,0.875,bilinear,-54.310,-44.523,+24\ndla60_res2net.in1k,224,38.700,61.300,54.617,45.383,20.85,0.875,bilinear,-54.720,-44.223,-37\nres2net50_26w_6s.in1k,224,38.700,61.300,53.780,46.220,37.05,0.875,bilinear,-54.870,-44.980,-76\ndpn68.mx_in1k,224,38.693,61.307,54.733,45.267,12.61,0.875,bicubic,-53.587,-43.897,+120\ndensenetblur121d.ra_in1k,288,38.670,61.330,55.627,44.373,8.00,0.950,bicubic,-54.340,-42.973,+22\nwide_resnet50_2.tv_in1k,224,38.667,61.333,54.613,45.387,68.88,0.875,bilinear,-54.803,-44.357,-55\nresnet33ts.ra2_in1k,288,38.660,61.340,55.223,44.777,19.68,1.000,bicubic,-55.290,-43.647,-174\nselecsls60.in1k,224,38.653,61.347,55.710,44.290,30.67,0.875,bicubic,-54.337,-43.120,+21\nresnet50.bt_in1k,224,38.650,61.350,55.470,44.530,25.56,0.875,bicubic,-54.460,-43.290,+3\nregnetx_016.tv2_in1k,224,38.623,61.377,54.783,45.217,9.19,0.965,bicubic,-55.407,-44.137,-191\ndpn68b.mx_in1k,224,38.620,61.380,55.183,44.817,12.61,0.875,bicubic,-54.340,-43.307,+26\ndla60x.in1k,224,38.613,61.387,55.503,44.497,17.35,0.875,bilinear,-54.607,-43.217,-16\nresnet50.a2_in1k,224,38.603,61.397,53.107,46.893,25.56,0.950,bicubic,-55.127,-45.653,-124\nselecsls60b.in1k,224,38.587,61.413,55.330,44.670,32.77,0.875,bicubic,-54.893,-43.510,-65\nresnetrs50.tf_in1k,160,38.587,61.413,54.083,45.917,35.69,0.910,bicubic,-54.333,-44.517,+31\ntf_efficientnet_b0.aa_in1k,224,38.557,61.443,56.133,43.867,5.29,0.875,bicubic,-53.833,-42.347,+96\necaresnet26t.ra2_in1k,256,38.533,61.467,55.383,44.617,16.01,0.875,bicubic,-54.887,-43.297,-49\nresnet32ts.ra2_in1k,288,38.520,61.480,55.550,44.450,17.96,1.000,bicubic,-55.080,-43.290,-97\nseresnet50.a1_in1k,288,38.513,61.487,53.403,46.597,28.09,1.000,bicubic,-55.697,-45.507,-252\nhardcorenas_f.miil_green_in1k,224,38.510,61.490,55.720,44.280,8.20,0.875,bilinear,-54.520,-42.930,+3\ndla60_res2next.in1k,224,38.503,61.497,54.987,45.013,17.03,0.875,bilinear,-55.107,-43.703,-103\nhrnet_w18_small_v2.gluon_in1k,224,38.500,61.500,56.247,43.753,15.60,0.875,bicubic,-54.480,-42.533,+8\nrepvgg_a2.rvgg_in1k,224,38.497,61.503,55.820,44.180,28.21,0.875,bilinear,-54.163,-42.740,+63\nregnetx_064.pycls_in1k,224,38.490,61.510,55.030,44.970,26.21,0.875,bicubic,-55.180,-43.680,-123\nresnet50.gluon_in1k,224,38.460,61.540,54.893,45.107,25.56,0.875,bicubic,-54.110,-43.647,+71\nresmlp_12_224.fb_in1k,224,38.450,61.550,56.363,43.637,15.35,0.875,bicubic,-53.680,-42.207,+115\ntf_efficientnet_cc_b0_4e.in1k,224,38.440,61.560,55.180,44.820,13.31,0.875,bicubic,-54.420,-43.250,+33\nresnet50d.a1_in1k,288,38.417,61.583,52.867,47.133,25.58,1.000,bicubic,-55.943,-45.923,-307\nhrnet_w18.ms_in1k,224,38.390,61.610,55.790,44.210,21.30,0.875,bilinear,-54.360,-42.860,+43\nghostnetv2_160.in1k,224,38.367,61.633,55.647,44.353,12.39,0.875,bicubic,-54.763,-43.013,-23\ntinynet_a.in1k,192,38.347,61.653,55.497,44.503,6.19,0.875,bicubic,-54.473,-43.103,+32\nregnety_008_tv.tv2_in1k,224,38.330,61.670,54.383,45.617,6.43,0.965,bicubic,-54.830,-44.297,-28\nresnet34.a1_in1k,288,38.323,61.677,52.350,47.650,21.80,1.000,bicubic,-54.697,-46.240,-7\ndensenet121.ra_in1k,288,38.253,61.747,55.217,44.783,7.98,0.950,bicubic,-54.497,-43.413,+39\nmixnet_l.ft_in1k,224,38.237,61.763,54.870,45.130,7.33,0.875,bicubic,-55.033,-43.880,-44\nconvnext_femto.d1_in1k,224,38.217,61.783,54.710,45.290,5.22,0.875,bicubic,-54.663,-43.800,+21\necaresnet50t.a3_in1k,224,38.203,61.797,53.593,46.407,25.57,0.950,bicubic,-55.657,-45.517,-187\nresnet50d.a3_in1k,160,38.187,61.813,53.827,46.173,25.58,0.950,bicubic,-54.093,-44.503,+87\npoolformer_s12.sail_in1k,224,38.180,61.820,56.390,43.610,11.92,0.900,bicubic,-54.340,-42.000,+61\nregnety_032.pycls_in1k,224,38.177,61.823,54.473,45.527,19.44,0.875,bicubic,-55.313,-44.487,-92\nefficientnet_b1.ft_in1k,256,38.170,61.830,54.033,45.967,7.79,1.000,bicubic,-54.850,-44.677,-17\nhardcorenas_e.miil_green_in1k,224,38.147,61.853,55.217,44.783,8.07,0.875,bilinear,-54.803,-43.463,-1\necaresnet50t.a2_in1k,288,38.147,61.853,53.023,46.977,25.57,1.000,bicubic,-56.433,-46.027,-391\nvit_base_patch16_224.augreg_in1k,224,38.140,61.860,50.707,49.293,86.57,0.900,bicubic,-55.200,-47.993,-63\ncoat_lite_tiny.in1k,224,38.077,61.923,53.463,46.537,5.72,0.900,bicubic,-54.803,-45.057,+11\ngmixer_24_224.ra3_in1k,224,37.983,62.017,52.147,47.853,24.72,0.875,bicubic,-54.687,-46.093,+38\nhardcorenas_c.miil_green_in1k,224,37.980,62.020,55.700,44.300,5.52,0.875,bilinear,-54.340,-42.630,+73\nresnext50_32x4d.a2_in1k,288,37.967,62.033,52.377,47.623,25.03,1.000,bicubic,-56.233,-46.463,-276\nresnet50.a3_in1k,160,37.960,62.040,53.320,46.680,25.56,0.950,bicubic,-53.700,-44.740,+122\nresnetrs50.tf_in1k,224,37.950,62.050,53.300,46.700,35.69,0.910,bicubic,-56.100,-45.410,-238\nmobilevitv2_125.cvnets_in1k,256,37.927,62.073,54.103,45.897,7.48,0.888,bicubic,-55.553,-44.737,-102\nres2net50_26w_4s.in1k,224,37.897,62.103,53.133,46.867,25.70,0.875,bilinear,-55.293,-45.737,-52\nshvit_s2.in1k,224,37.890,62.110,54.397,45.603,11.48,0.875,bicubic,-53.320,-43.843,+147\nresnet50c.gluon_in1k,224,37.887,62.113,54.203,45.797,25.58,0.875,bicubic,-55.043,-44.507,-11\nefficientnet_es.ra_in1k,224,37.853,62.147,54.987,45.013,5.44,0.875,bicubic,-55.097,-43.813,-16\nresnest14d.gluon_in1k,224,37.827,62.173,56.613,43.387,10.61,0.875,bilinear,-53.313,-41.737,+149\nefficientformerv2_s0.snap_dist_in1k,224,37.813,62.187,54.017,45.983,3.60,0.950,bicubic,-54.037,-44.353,+101\nconvnext_femto.d1_in1k,288,37.810,62.190,54.287,45.713,5.22,0.950,bicubic,-55.650,-44.393,-102\nmobilenetv1_125.ra4_e3600_r224_in1k,256,37.803,62.197,53.850,46.150,6.27,1.000,bicubic,-54.747,-44.750,+39\nmobilenetv1_125.ra4_e3600_r224_in1k,224,37.787,62.213,54.193,45.807,6.27,0.900,bicubic,-54.293,-44.107,+85\nresnext50_32x4d.tv_in1k,224,37.787,62.213,54.143,45.857,25.03,0.875,bilinear,-55.113,-44.547,-10\npit_ti_distilled_224.in1k,224,37.783,62.217,55.690,44.310,5.10,0.900,bicubic,-52.957,-42.550,+160\nseresnet50.a2_in1k,288,37.740,62.260,52.313,47.687,28.09,1.000,bicubic,-56.710,-46.597,-370\nresnet26t.ra2_in1k,256,37.733,62.267,55.313,44.687,16.01,0.940,bicubic,-54.947,-43.287,+16\nresnet34.a2_in1k,224,37.707,62.293,52.573,47.427,21.80,0.950,bicubic,-54.063,-45.687,+97\nefficientnet_b1.ft_in1k,224,37.687,62.313,53.870,46.130,7.79,0.875,bicubic,-54.673,-44.490,+48\nseresnet50.a3_in1k,160,37.623,62.377,53.267,46.733,28.09,0.950,bicubic,-53.647,-44.963,+127\nhardcorenas_d.miil_green_in1k,224,37.593,62.407,54.723,45.277,7.50,0.875,bilinear,-55.027,-43.687,+20\necaresnet26t.ra2_in1k,320,37.593,62.407,54.380,45.620,16.01,0.950,bicubic,-56.377,-44.570,-240\nvit_base_patch32_224.augreg_in1k,224,37.590,62.410,51.867,48.133,88.22,0.900,bicubic,-52.990,-46.333,+158\nresnet50.bt_in1k,288,37.577,62.423,53.763,46.237,25.56,0.950,bicubic,-56.433,-45.127,-249\nconvnext_femto_ols.d1_in1k,224,37.533,62.467,53.413,46.587,5.23,0.875,bicubic,-55.377,-45.267,-25\nres2next50.in1k,224,37.530,62.470,52.870,47.130,24.67,0.875,bilinear,-55.610,-45.780,-68\nfastvit_t8.apple_dist_in1k,256,37.513,62.487,53.863,46.137,4.03,0.900,bicubic,-55.077,-44.587,+20\nconvnextv2_femto.fcmae_ft_in1k,288,37.463,62.537,53.653,46.347,5.23,0.950,bicubic,-56.297,-45.287,-196\nlambda_resnet26t.c1_in1k,256,37.393,62.607,53.540,46.460,10.96,0.940,bicubic,-56.047,-45.340,-114\nhrnet_w18.ms_aug_in1k,224,37.373,62.627,54.213,45.787,21.30,0.950,bilinear,-56.087,-44.787,-122\ncs3darknet_focus_m.c2ns_in1k,256,37.370,62.630,54.113,45.887,9.30,0.887,bicubic,-55.450,-44.457,-16\nconvnext_femto_ols.d1_in1k,288,37.283,62.717,53.160,46.840,5.23,0.950,bicubic,-56.127,-45.610,-110\nhardcorenas_b.miil_green_in1k,224,37.273,62.727,55.010,44.990,5.18,0.875,bilinear,-54.667,-43.330,+72\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,37.247,62.753,53.550,46.450,5.48,0.875,bilinear,-54.983,-44.690,+51\nresnet50d.a2_in1k,288,37.243,62.757,51.823,48.177,25.58,1.000,bicubic,-57.237,-47.087,-401\nres2net50d.in1k,224,37.223,62.777,51.303,48.697,25.72,0.875,bilinear,-57.047,-47.577,-335\nresnet50.a2_in1k,288,37.203,62.797,51.450,48.550,25.56,1.000,bicubic,-56.927,-47.570,-290\ndla60.in1k,224,37.183,62.817,54.293,45.707,22.04,0.875,bilinear,-55.527,-44.257,-8\nres2net50_48w_2s.in1k,224,37.180,62.820,53.413,46.587,25.29,0.875,bilinear,-55.620,-45.047,-20\ncs3darknet_focus_m.c2ns_in1k,288,37.173,62.827,53.947,46.053,9.30,0.950,bicubic,-55.947,-44.793,-78\nfastvit_t8.apple_in1k,256,37.160,62.840,53.147,46.853,4.03,0.900,bicubic,-54.780,-45.183,+63\nregnety_016.pycls_in1k,224,37.127,62.873,54.150,45.850,11.20,0.875,bicubic,-55.933,-44.510,-74\nlambda_resnet26rpt_256.c1_in1k,256,37.127,62.873,53.843,46.157,10.99,0.940,bicubic,-56.313,-44.917,-128\nvit_small_patch16_224.augreg_in1k,224,37.123,62.877,51.610,48.390,22.05,0.900,bicubic,-56.337,-47.220,-134\neca_halonext26ts.c1_in1k,256,37.117,62.883,53.090,46.910,10.76,0.940,bicubic,-56.443,-45.690,-163\nbat_resnext26ts.ch_in1k,256,37.107,62.893,53.737,46.263,10.73,0.900,bicubic,-56.013,-45.013,-83\nconvnextv2_atto.fcmae_ft_in1k,224,37.097,62.903,53.090,46.910,3.71,0.875,bicubic,-55.083,-45.370,+43\nrexnet_100.nav_in1k,224,37.063,62.937,54.103,45.897,4.80,0.875,bicubic,-55.777,-44.537,-34\ntf_mixnet_l.in1k,224,37.057,62.943,52.760,47.240,7.33,0.875,bicubic,-55.993,-46.050,-76\nmobileone_s1.apple_in1k,224,37.033,62.967,54.657,45.343,4.83,0.900,bilinear,-54.737,-43.813,+64\ntf_efficientnet_b1.in1k,240,37.030,62.970,53.457,46.543,7.79,0.882,bicubic,-55.910,-45.203,-56\nfasternet_t1.in1k,224,37.003,62.997,54.910,45.090,7.60,1.000,bicubic,-54.617,-43.210,+74\nbotnet26t_256.c1_in1k,256,36.980,63.020,53.090,46.910,12.49,0.950,bicubic,-56.430,-45.570,-128\nresnet34.a2_in1k,288,36.967,63.033,51.520,48.480,21.80,1.000,bicubic,-55.623,-47.060,-9\nresnet34.bt_in1k,224,36.927,63.073,53.617,46.383,21.80,0.875,bicubic,-54.303,-44.593,+96\ntf_efficientnet_lite2.in1k,260,36.920,63.080,53.390,46.610,6.09,0.890,bicubic,-55.790,-45.230,-25\nlegacy_seresnet50.in1k,224,36.913,63.087,53.477,46.523,28.09,0.875,bilinear,-55.757,-45.183,-21\nghostnetv2_130.in1k,224,36.877,63.123,54.213,45.787,8.96,0.875,bicubic,-55.393,-44.157,+21\nregnetx_016.pycls_in1k,224,36.863,63.137,53.423,46.577,9.19,0.875,bicubic,-55.737,-45.147,-15\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,36.787,63.213,53.133,46.867,5.28,0.950,bicubic,-55.563,-45.237,+9\nhalonet26t.a1h_in1k,256,36.783,63.217,52.233,47.767,12.48,0.950,bicubic,-56.837,-46.537,-193\ndensenet121.tv_in1k,224,36.767,63.233,53.997,46.003,7.98,0.875,bicubic,-54.673,-44.243,+75\nmobilenetv2_120d.ra_in1k,224,36.723,63.277,53.950,46.050,5.83,0.875,bicubic,-55.937,-44.580,-25\nhardcorenas_a.miil_green_in1k,224,36.713,63.287,54.863,45.137,5.26,0.875,bilinear,-54.947,-43.287,+58\neca_botnext26ts_256.c1_in1k,256,36.700,63.300,52.607,47.393,10.59,0.950,bicubic,-56.690,-46.093,-136\nrepghostnet_150.in1k,224,36.693,63.307,54.193,45.807,6.58,0.875,bicubic,-55.707,-44.327,-4\ntf_efficientnet_lite1.in1k,240,36.687,63.313,53.733,46.267,5.42,0.882,bicubic,-55.613,-44.787,+8\nefficientnet_b0.ra_in1k,224,36.673,63.327,53.597,46.403,5.29,0.875,bicubic,-55.797,-44.843,-13\nresnext50_32x4d.a3_in1k,224,36.620,63.380,51.210,48.790,25.03,0.950,bicubic,-56.930,-47.560,-186\nvit_base_patch32_224.sam_in1k,224,36.590,63.410,53.087,46.913,88.22,0.900,bicubic,-53.300,-44.513,+135\nlevit_conv_128s.fb_dist_in1k,224,36.583,63.417,53.077,46.923,7.78,0.900,bicubic,-54.957,-45.353,+62\ncs3darknet_m.c2ns_in1k,256,36.580,63.420,53.610,46.390,9.31,0.887,bicubic,-56.140,-45.060,-43\nlevit_128s.fb_dist_in1k,224,36.577,63.423,53.083,46.917,7.78,0.900,bicubic,-55.003,-45.337,+57\nghostnetv3_100.in1k,224,36.560,63.440,54.147,45.853,8.13,0.875,bicubic,-55.700,-44.223,+6\nefficientvit_m4.r224_in1k,224,36.560,63.440,53.307,46.693,8.80,0.875,bicubic,-54.190,-44.783,+99\nxcit_nano_12_p8_224.fb_dist_in1k,224,36.530,63.470,52.887,47.113,3.05,1.000,bicubic,-55.870,-45.653,-13\nrepvgg_a1.rvgg_in1k,224,36.520,63.480,53.820,46.180,14.09,0.875,bilinear,-54.640,-44.370,+79\nresnet34.a3_in1k,160,36.517,63.483,52.573,47.427,21.80,0.950,bicubic,-52.133,-44.807,+154\ntf_efficientnet_em.in1k,240,36.477,63.523,52.907,47.093,6.90,0.882,bicubic,-56.723,-45.773,-128\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,36.460,63.540,52.797,47.203,5.28,0.875,bicubic,-55.200,-45.263,+43\ncs3darknet_m.c2ns_in1k,288,36.443,63.557,53.237,46.763,9.31,0.950,bicubic,-56.807,-45.483,-137\nmobilevitv2_100.cvnets_in1k,256,36.443,63.557,53.073,46.927,4.90,0.888,bicubic,-56.667,-45.527,-117\nskresnet18.ra_in1k,224,36.330,63.670,54.253,45.747,11.96,0.875,bicubic,-53.840,-43.527,+116\nrepvgg_b0.rvgg_in1k,224,36.323,63.677,54.083,45.917,15.82,0.875,bilinear,-55.387,-44.317,+32\nresnet34.bt_in1k,288,36.313,63.687,52.847,47.153,21.80,0.950,bicubic,-56.007,-45.793,-13\nresnet50d.a3_in1k,224,36.310,63.690,51.330,48.670,25.58,0.950,bicubic,-57.090,-47.420,-159\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,36.190,63.810,53.997,46.003,5.48,0.950,bicubic,-55.240,-44.163,+51\nxcit_nano_12_p16_384.fb_dist_in1k,384,36.157,63.843,53.203,46.797,3.05,1.000,bicubic,-55.983,-45.287,+3\nresnet50.tv_in1k,224,36.157,63.843,52.870,47.130,25.56,0.875,bilinear,-55.993,-45.520,+3\nlegacy_seresnet34.in1k,224,36.157,63.843,52.563,47.437,21.96,0.875,bilinear,-55.323,-45.657,+46\nmobilenetv4_conv_small.e3600_r256_in1k,256,36.143,63.857,54.283,45.717,3.77,0.950,bicubic,-54.427,-43.697,+91\nresnet34.tv_in1k,224,36.140,63.860,53.553,46.447,21.80,0.875,bilinear,-54.150,-44.417,+101\nefficientvit_m3.r224_in1k,224,36.137,63.863,52.550,47.450,6.90,0.875,bicubic,-53.913,-45.320,+108\ndeit_tiny_distilled_patch16_224.fb_in1k,224,36.120,63.880,54.293,45.707,5.91,0.900,bicubic,-55.010,-43.977,+65\ncoat_tiny.in1k,224,36.103,63.897,51.083,48.917,5.50,0.900,bicubic,-57.407,-47.597,-199\nstarnet_s2.in1k,224,36.050,63.950,53.670,46.330,3.68,0.875,bicubic,-55.100,-44.650,+61\nresnet50.a3_in1k,224,36.050,63.950,50.647,49.353,25.56,0.950,bicubic,-56.920,-48.183,-110\nconvnext_atto.d2_in1k,224,36.040,63.960,52.927,47.073,3.70,0.875,bicubic,-56.160,-45.383,-11\ntf_efficientnet_lite0.in1k,224,36.033,63.967,53.487,46.513,4.65,0.875,bicubic,-55.327,-44.643,+43\nconvnextv2_atto.fcmae_ft_in1k,288,36.013,63.987,51.160,48.840,3.71,0.950,bicubic,-56.907,-47.600,-101\nmobilenetv2_140.ra_in1k,224,36.010,63.990,53.977,46.023,6.11,0.875,bicubic,-56.070,-44.373,-1\nresnetv2_18.ra4_e3600_r224_in1k,224,35.997,64.003,52.447,47.553,11.69,0.900,bicubic,-54.183,-45.283,+96\nresnetv2_18d.ra4_e3600_r224_in1k,224,35.980,64.020,52.537,47.463,11.71,0.900,bicubic,-55.290,-45.683,+43\nresnetv2_18d.ra4_e3600_r224_in1k,288,35.927,64.073,52.037,47.963,11.71,1.000,bicubic,-56.363,-46.373,-28\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,35.857,64.143,53.770,46.230,5.48,1.000,bicubic,-55.953,-44.680,+8\nselecsls42b.in1k,224,35.837,64.163,52.557,47.443,32.46,0.875,bicubic,-56.643,-45.883,-52\nseresnext26ts.ch_in1k,256,35.830,64.170,54.000,46.000,10.39,0.900,bicubic,-56.960,-44.600,-87\nresnetv2_18.ra4_e3600_r224_in1k,288,35.827,64.173,51.927,48.073,11.69,1.000,bicubic,-55.853,-46.393,+12\nresnet34.gluon_in1k,224,35.823,64.177,52.363,47.637,21.80,0.875,bicubic,-55.287,-45.687,+54\nresnet18d.ra4_e3600_r224_in1k,224,35.817,64.183,52.417,47.583,11.71,0.900,bicubic,-55.093,-45.773,+59\nmobilenetv1_100.ra4_e3600_r224_in1k,256,35.787,64.213,52.597,47.403,4.23,0.950,bicubic,-56.293,-45.673,-13\nconvnext_atto.d2_in1k,288,35.783,64.217,52.347,47.653,3.70,0.950,bicubic,-56.997,-46.273,-91\nxcit_nano_12_p8_384.fb_dist_in1k,384,35.757,64.243,52.313,47.687,3.05,1.000,bicubic,-57.623,-46.567,-183\nresnet26t.ra2_in1k,320,35.753,64.247,53.583,46.417,16.01,1.000,bicubic,-57.207,-45.097,-125\nmobilenetv4_conv_small.e3600_r256_in1k,320,35.737,64.263,53.453,46.547,3.77,1.000,bicubic,-56.183,-44.787,-6\nseresnet50.a3_in1k,224,35.727,64.273,51.273,48.727,28.09,0.950,bicubic,-56.653,-47.057,-50\nseresnext26ts.ch_in1k,288,35.723,64.277,53.470,46.530,10.39,1.000,bicubic,-57.247,-45.240,-131\nmobilenetv1_100.ra4_e3600_r224_in1k,224,35.713,64.287,52.810,47.190,4.23,0.875,bicubic,-55.647,-45.280,+23\nefficientnet_lite0.ra_in1k,224,35.690,64.310,53.720,46.280,4.65,0.875,bicubic,-55.580,-44.230,+25\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,35.647,64.353,53.820,46.180,11.69,0.875,bilinear,-55.043,-44.220,+57\ndla34.in1k,224,35.647,64.353,52.837,47.163,15.74,0.875,bilinear,-55.613,-45.323,+27\nmixnet_m.ft_in1k,224,35.643,64.357,52.460,47.540,5.01,0.875,bicubic,-56.627,-45.910,-42\nmobilenetv3_rw.rmsp_in1k,224,35.640,64.360,53.767,46.233,5.48,0.875,bicubic,-55.920,-44.573,+9\nconvnext_atto_ols.a2_in1k,288,35.560,64.440,51.457,48.543,3.70,0.950,bicubic,-57.390,-47.133,-131\nregnetx_008.tv2_in1k,224,35.557,64.443,51.503,48.497,7.26,0.965,bicubic,-56.913,-47.177,-69\nconvnext_atto_ols.a2_in1k,224,35.507,64.493,52.077,47.923,3.70,0.875,bicubic,-56.603,-46.303,-29\ninception_next_atto.sail_in1k,224,35.497,64.503,52.780,47.220,4.16,0.875,bicubic,-56.063,-45.490,+4\nefficientnet_es_pruned.in1k,224,35.397,64.603,52.883,47.117,5.44,0.875,bicubic,-56.313,-45.547,-10\nresnet18d.ra4_e3600_r224_in1k,288,35.337,64.663,51.757,48.243,11.71,1.000,bicubic,-56.843,-46.593,-40\nrepghostnet_130.in1k,224,35.323,64.677,52.740,47.260,5.48,0.875,bicubic,-56.607,-45.650,-22\nmobilenetv2_110d.ra_in1k,224,35.287,64.713,52.810,47.190,4.52,0.875,bicubic,-56.033,-45.340,+12\ntf_mixnet_m.in1k,224,35.250,64.750,51.003,48.997,5.01,0.875,bicubic,-56.930,-47.337,-44\nhrnet_w18_small_v2.ms_in1k,224,35.237,64.763,52.543,47.457,15.60,0.875,bilinear,-55.953,-45.807,+21\nxcit_nano_12_p16_224.fb_dist_in1k,224,35.210,64.790,52.607,47.393,3.05,1.000,bicubic,-54.990,-45.153,+62\nmobilenetv4_conv_small.e2400_r224_in1k,256,35.207,64.793,53.010,46.990,3.77,0.950,bicubic,-55.893,-45.070,+30\nresnet18d.ra2_in1k,224,35.143,64.857,52.900,47.100,11.71,0.875,bicubic,-54.877,-44.940,+67\nresnext26ts.ra2_in1k,256,35.110,64.890,53.437,46.563,10.30,0.900,bicubic,-57.150,-45.023,-54\nconvit_tiny.fb_in1k,224,35.100,64.900,51.830,48.170,5.71,0.875,bicubic,-55.480,-45.910,+42\nresnet34.a3_in1k,224,35.090,64.910,50.520,49.480,21.80,0.950,bicubic,-55.210,-47.370,+53\nresnet18.a1_in1k,224,35.033,64.967,50.983,49.017,11.69,0.950,bicubic,-54.117,-46.347,+83\nmobilenetv4_conv_small.e2400_r224_in1k,224,35.013,64.987,52.897,47.103,3.77,0.875,bicubic,-55.527,-45.063,+44\ntinynet_b.in1k,188,35.013,64.987,52.120,47.880,3.73,0.875,bicubic,-56.107,-45.950,+18\ngcresnext26ts.ch_in1k,288,35.013,64.987,51.460,48.540,10.48,1.000,bicubic,-57.797,-47.150,-126\ngcresnext26ts.ch_in1k,256,34.990,65.010,51.660,48.340,10.48,0.900,bicubic,-57.470,-46.840,-86\nshvit_s1.in1k,224,34.963,65.037,52.047,47.953,6.33,0.875,bicubic,-54.807,-45.683,+65\neca_resnext26ts.ch_in1k,256,34.953,65.047,52.340,47.660,10.30,0.900,bicubic,-57.487,-46.270,-86\nregnety_004.tv2_in1k,224,34.937,65.063,51.397,48.603,4.34,0.965,bicubic,-56.653,-46.893,-19\neca_resnext26ts.ch_in1k,288,34.927,65.073,52.357,47.643,10.30,1.000,bicubic,-57.843,-46.353,-126\nregnety_008.pycls_in1k,224,34.883,65.117,51.887,48.113,6.26,0.875,bicubic,-57.007,-46.533,-39\nese_vovnet19b_dw.ra_in1k,224,34.843,65.157,51.987,48.013,6.54,0.875,bicubic,-57.167,-46.533,-49\nresnext26ts.ra2_in1k,288,34.790,65.210,52.820,47.180,10.30,1.000,bicubic,-57.640,-45.570,-90\nmobilenetv4_conv_small.e1200_r224_in1k,256,34.767,65.233,52.690,47.310,3.77,0.950,bicubic,-55.503,-45.410,+43\nmobilenetv3_large_100.ra_in1k,224,34.673,65.327,52.933,47.067,5.48,0.875,bicubic,-56.797,-45.387,-18\ncrossvit_9_240.in1k,240,34.670,65.330,51.797,48.203,8.55,0.875,bicubic,-56.410,-46.513,+12\ntf_efficientnet_b0.in1k,224,34.660,65.340,51.163,48.837,5.29,0.875,bicubic,-57.640,-47.387,-81\npit_ti_224.in1k,224,34.630,65.370,52.133,47.867,4.85,0.900,bicubic,-55.810,-45.547,+32\nseresnext26t_32x4d.bt_in1k,224,34.563,65.437,51.487,48.513,16.81,0.875,bicubic,-58.237,-47.053,-139\nseresnext26d_32x4d.bt_in1k,224,34.560,65.440,51.477,48.523,16.81,0.875,bicubic,-57.900,-47.063,-101\npvt_v2_b0.in1k,224,34.410,65.590,53.093,46.907,3.67,0.900,bicubic,-54.570,-44.587,+66\nresnet26d.bt_in1k,224,34.340,65.660,51.830,48.170,16.01,0.875,bicubic,-57.880,-46.640,-74\nfbnetc_100.rmsp_in1k,224,34.337,65.663,51.490,48.510,5.57,0.875,bilinear,-56.913,-46.340,-11\nmixer_b16_224.goog_in21k_ft_in1k,224,34.323,65.677,48.070,51.930,59.88,0.875,bicubic,-56.797,-50.010,0\nmobilenetv4_conv_small.e1200_r224_in1k,224,34.303,65.697,52.343,47.657,3.77,0.875,bicubic,-55.817,-45.637,+37\ntf_efficientnet_es.in1k,224,34.300,65.700,51.507,48.493,5.44,0.875,bicubic,-57.810,-46.923,-68\nregnety_006.pycls_in1k,224,34.283,65.717,51.423,48.577,6.06,0.875,bicubic,-57.317,-46.997,-38\nresnet18d.ra2_in1k,288,34.240,65.760,51.690,48.310,11.71,0.950,bicubic,-56.530,-46.480,+5\nstarnet_s1.in1k,224,34.200,65.800,50.773,49.227,2.87,0.875,bicubic,-56.530,-47.057,+9\nghostnetv2_100.in1k,224,34.167,65.833,52.050,47.950,6.16,0.875,bicubic,-57.463,-46.230,-43\nswiftformer_xs.dist_in1k,224,34.160,65.840,50.777,49.223,3.48,0.950,bicubic,-56.680,-47.223,+1\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,34.100,65.900,48.240,51.760,2.16,0.875,bicubic,-55.940,-49.690,+32\nrepvgg_a0.rvgg_in1k,224,34.090,65.910,52.000,48.000,9.11,0.875,bilinear,-55.720,-45.500,+37\nresnet18.a1_in1k,288,34.050,65.950,49.517,50.483,11.69,1.000,bicubic,-56.140,-48.263,+25\ntf_mobilenetv3_large_100.in1k,224,33.993,66.007,51.553,48.447,5.48,0.875,bilinear,-57.417,-46.677,-33\nmnasnet_100.rmsp_in1k,224,33.873,66.127,51.217,48.783,4.38,0.875,bicubic,-57.387,-46.813,-25\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,33.850,66.150,48.160,51.840,2.16,0.900,bicubic,-56.590,-49.840,+14\nregnetx_004_tv.tv2_in1k,224,33.837,66.163,49.890,50.110,5.50,0.965,bicubic,-57.283,-47.520,-15\nrepghostnet_111.in1k,224,33.807,66.193,51.550,48.450,4.54,0.875,bicubic,-57.303,-46.630,-13\nmobilevit_s.cvnets_in1k,256,33.790,66.210,49.480,50.520,5.58,0.900,bicubic,-59.390,-49.140,-224\nese_vovnet19b_dw.ra_in1k,288,33.767,66.233,50.937,49.063,6.54,0.950,bicubic,-58.973,-47.743,-151\nregnetx_008.pycls_in1k,224,33.733,66.267,50.580,49.420,7.26,0.875,bicubic,-57.487,-47.800,-28\nlcnet_100.ra2_in1k,224,33.720,66.280,52.103,47.897,2.95,0.875,bicubic,-55.210,-45.267,+46\nsemnasnet_075.rmsp_in1k,224,33.710,66.290,52.430,47.570,2.91,0.875,bicubic,-56.570,-45.500,+12\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,33.667,66.333,50.760,49.240,6.36,1.000,bicubic,-58.043,-47.700,-66\nxcit_nano_12_p8_224.fb_in1k,224,33.623,66.377,50.430,49.570,3.05,1.000,bicubic,-57.567,-47.850,-29\nmixnet_s.ft_in1k,224,33.620,66.380,51.023,48.977,4.13,0.875,bicubic,-58.200,-47.317,-73\nresnet26.bt_in1k,224,33.597,66.403,51.010,48.990,16.00,0.875,bicubic,-57.863,-47.260,-50\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,33.557,66.443,50.963,49.037,5.79,1.000,bicubic,-59.943,-47.707,-291\nsemnasnet_100.rmsp_in1k,224,33.557,66.443,50.830,49.170,3.89,0.875,bicubic,-58.103,-47.440,-66\nspnasnet_100.rmsp_in1k,224,33.503,66.497,51.310,48.690,4.42,0.875,bilinear,-57.167,-46.650,-10\ncrossvit_tiny_240.in1k,240,33.443,66.557,49.960,50.040,7.01,0.875,bicubic,-57.097,-47.860,-6\nmobilevitv2_075.cvnets_in1k,256,33.420,66.580,50.143,49.857,2.87,0.888,bicubic,-58.590,-48.177,-89\nvgg19_bn.tv_in1k,224,33.310,66.690,50.883,49.117,143.68,0.875,bilinear,-57.670,-47.227,-24\nregnetx_006.pycls_in1k,224,33.277,66.723,50.327,49.673,6.20,0.875,bicubic,-57.493,-47.763,-21\nresnet18.a2_in1k,224,33.267,66.733,49.520,50.480,11.69,0.950,bicubic,-54.943,-47.660,+43\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,33.233,66.767,51.107,48.893,3.27,0.887,bicubic,-56.717,-46.523,+9\nefficientvit_m2.r224_in1k,224,33.190,66.810,49.780,50.220,4.19,0.875,bicubic,-55.750,-47.580,+30\nedgenext_x_small.in1k,288,33.140,66.860,49.043,50.957,2.34,1.000,bicubic,-58.450,-49.147,-68\nrepghostnet_100.in1k,224,33.133,66.867,50.753,49.247,4.07,0.875,bicubic,-57.547,-47.357,-20\nseresnext26t_32x4d.bt_in1k,288,33.073,66.927,50.247,49.753,16.81,0.950,bicubic,-60.267,-48.413,-267\nxcit_nano_12_p16_224.fb_in1k,224,33.037,66.963,50.067,49.933,3.05,1.000,bicubic,-55.863,-47.373,+28\nresnet18.tv_in1k,224,33.023,66.977,51.143,48.857,11.69,0.875,bilinear,-55.177,-46.197,+38\nseresnext26d_32x4d.bt_in1k,288,32.997,67.003,49.863,50.137,16.81,0.950,bicubic,-60.063,-48.697,-236\nmobileone_s0.apple_in1k,224,32.980,67.020,51.090,48.910,5.29,0.875,bilinear,-55.790,-46.140,+26\nhrnet_w18_small.gluon_in1k,224,32.847,67.153,50.497,49.503,13.19,0.875,bicubic,-57.463,-47.253,-13\nlegacy_seresnext26_32x4d.in1k,224,32.763,67.237,49.220,50.780,16.79,0.875,bicubic,-59.827,-49.210,-159\nhrnet_w18_small.ms_in1k,224,32.730,67.270,50.643,49.357,13.19,0.875,bilinear,-57.150,-47.247,0\nedgenext_x_small.in1k,256,32.720,67.280,48.670,51.330,2.34,0.900,bicubic,-58.710,-49.740,-68\ndeit_tiny_patch16_224.fb_in1k,224,32.713,67.287,50.327,49.673,5.72,0.900,bicubic,-56.927,-47.633,+4\nlegacy_seresnet18.in1k,224,32.677,67.323,50.327,49.673,11.78,0.875,bicubic,-56.613,-47.383,+9\nregnetx_004.pycls_in1k,224,32.603,67.397,49.387,50.613,5.16,0.875,bicubic,-56.887,-48.433,+4\nghostnet_100.in1k,224,32.597,67.403,50.417,49.583,5.18,0.875,bicubic,-57.873,-47.523,-24\nmobilenetv2_100.ra_in1k,224,32.593,67.407,50.870,49.130,3.50,0.875,bicubic,-57.267,-46.970,-5\nresnet26d.bt_in1k,288,32.477,67.523,50.093,49.907,16.01,0.950,bicubic,-60.103,-48.547,-165\nresnet18.gluon_in1k,224,32.423,67.577,49.730,50.270,11.69,0.875,bicubic,-56.267,-47.380,+17\nregnety_004.pycls_in1k,224,32.397,67.603,49.497,50.503,4.34,0.875,bicubic,-58.353,-48.563,-41\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,32.323,67.677,49.887,50.113,3.27,1.000,bicubic,-58.247,-48.173,-33\ntf_mixnet_s.in1k,224,32.220,67.780,48.593,51.407,4.13,0.875,bicubic,-59.470,-49.627,-98\nresnet18.a2_in1k,288,32.183,67.817,47.907,52.093,11.69,1.000,bicubic,-57.357,-49.753,-5\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,32.150,67.850,49.103,50.897,5.72,0.900,bicubic,-59.790,-49.307,-113\nresnet26.bt_in1k,288,32.147,67.853,49.510,50.490,16.00,0.950,bicubic,-60.013,-49.040,-129\ntf_mobilenetv3_large_075.in1k,224,31.970,68.030,49.150,50.850,3.99,0.875,bilinear,-58.370,-48.700,-31\ntf_mobilenetv3_large_minimal_100.in1k,224,31.680,68.320,49.443,50.557,3.92,0.875,bilinear,-57.520,-47.847,0\nresnet18.a3_in1k,160,31.587,68.413,48.370,51.630,11.69,0.950,bicubic,-53.453,-47.570,+35\nefficientvit_m1.r224_in1k,224,31.260,68.740,48.470,51.530,2.98,0.875,bicubic,-55.930,-48.240,+20\nrepghostnet_080.in1k,224,30.973,69.027,48.837,51.163,3.28,0.875,bicubic,-58.477,-48.633,-10\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,30.870,69.130,47.680,52.320,6.34,0.900,bicubic,-58.540,-50.020,-10\ntinynet_c.in1k,184,30.530,69.470,48.537,51.463,2.46,0.875,bicubic,-57.920,-48.733,+7\nlcnet_075.ra2_in1k,224,30.380,69.620,48.760,51.240,2.36,0.875,bicubic,-56.630,-47.790,+19\nvgg16_bn.tv_in1k,224,30.380,69.620,47.317,52.683,138.37,0.875,bilinear,-60.140,-50.683,-42\nefficientvit_b0.r224_in1k,224,30.113,69.887,46.713,53.287,3.41,0.950,bicubic,-58.137,-50.167,+6\nresnet18.a3_in1k,224,30.093,69.907,46.313,53.687,11.69,0.950,bicubic,-56.957,-50.357,+16\nresnet10t.c3_in1k,176,29.790,70.210,47.817,52.183,5.44,0.875,bicubic,-55.700,-48.473,+22\nedgenext_xx_small.in1k,288,29.747,70.253,46.507,53.493,1.33,1.000,bicubic,-60.063,-51.283,-24\nmobilevit_xs.cvnets_in1k,256,29.693,70.307,46.090,53.910,2.32,0.900,bicubic,-61.577,-52.050,-89\nregnety_002.pycls_in1k,224,29.683,70.317,46.823,53.177,3.16,0.875,bicubic,-58.517,-50.287,+3\nedgenext_xx_small.in1k,256,29.447,70.553,46.373,53.627,1.33,0.900,bicubic,-59.803,-50.877,-17\nfasternet_t0.in1k,224,29.347,70.653,47.040,52.960,3.91,1.000,bicubic,-59.363,-50.040,-7\nmobilenetv3_small_100.lamb_in1k,224,29.063,70.937,47.267,52.733,2.54,0.875,bicubic,-57.127,-49.183,+13\nmnasnet_small.lamb_in1k,224,28.983,71.017,47.380,52.620,2.03,0.875,bicubic,-56.507,-48.600,+16\nvgg13_bn.tv_in1k,224,28.957,71.043,46.823,53.177,133.05,0.875,bilinear,-60.273,-50.697,-19\nresnet10t.c3_in1k,224,28.907,71.093,46.967,53.033,5.44,0.950,bicubic,-57.803,-49.763,+9\nregnetx_002.pycls_in1k,224,28.857,71.143,45.603,54.397,2.68,0.875,bicubic,-58.493,-51.387,0\nefficientvit_m0.r224_in1k,224,28.827,71.173,45.800,54.200,2.35,0.875,bicubic,-54.513,-49.830,+22\nresnet14t.c3_in1k,176,28.733,71.267,46.040,53.960,10.08,0.875,bicubic,-59.737,-50.910,-11\nvgg19.tv_in1k,224,28.700,71.300,45.280,54.720,143.67,0.875,bilinear,-61.000,-52.250,-34\nmobilenetv2_050.lamb_in1k,224,28.680,71.320,46.597,53.403,1.97,0.875,bicubic,-56.260,-49.073,+13\nmobilevitv2_050.cvnets_in1k,256,28.630,71.370,45.287,54.713,1.37,0.888,bicubic,-60.430,-52.303,-23\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,28.570,71.430,46.667,53.333,2.24,0.950,bicubic,-56.100,-49.473,+13\nrepghostnet_058.in1k,224,28.497,71.503,46.607,53.393,2.55,0.875,bicubic,-58.693,-50.393,-4\ndla60x_c.in1k,224,28.490,71.510,46.247,53.753,1.32,0.875,bilinear,-58.620,-50.903,-4\nvgg11_bn.tv_in1k,224,28.440,71.560,46.477,53.523,132.87,0.875,bilinear,-59.970,-50.803,-16\ntinynet_d.in1k,152,27.993,72.007,45.940,54.060,2.34,0.875,bicubic,-57.487,-50.520,+5\nvgg16.tv_in1k,224,27.960,72.040,44.757,55.243,138.36,0.875,bilinear,-61.440,-52.743,-36\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,27.833,72.167,46.427,53.573,2.24,0.875,bicubic,-56.397,-49.293,+9\nresnet14t.c3_in1k,224,27.647,72.353,44.713,55.287,10.08,0.950,bicubic,-61.593,-52.737,-35\ntf_mobilenetv3_small_100.in1k,224,27.570,72.430,44.913,55.087,2.54,0.875,bilinear,-58.440,-51.477,-4\nrepghostnet_050.in1k,224,27.133,72.867,45.050,54.950,2.31,0.875,bicubic,-58.427,-51.060,-4\nmixer_l16_224.goog_in21k_ft_in1k,224,26.803,73.197,37.863,62.137,208.20,0.875,bicubic,-60.197,-56.157,-9\nvgg11.tv_in1k,224,26.620,73.380,43.527,56.473,132.86,0.875,bilinear,-60.720,-53.603,-16\nmobilenetv3_small_075.lamb_in1k,224,26.577,73.423,43.923,56.077,2.04,0.875,bicubic,-57.563,-51.577,+4\ndla46x_c.in1k,224,26.403,73.597,43.907,56.093,1.07,0.875,bilinear,-59.077,-52.113,-6\nmobilevit_xxs.cvnets_in1k,256,26.403,73.597,43.137,56.863,1.27,0.900,bicubic,-61.567,-54.063,-21\nvgg13.tv_in1k,224,26.307,73.693,43.453,56.547,133.05,0.875,bilinear,-61.253,-53.667,-22\ntf_mobilenetv3_small_075.in1k,224,26.267,73.733,43.780,56.220,2.04,0.875,bilinear,-58.253,-52.010,-2\nlcnet_050.ra2_in1k,224,26.223,73.777,44.590,55.410,1.88,0.875,bicubic,-56.907,-50.360,+1\ntf_mobilenetv3_small_minimal_100.in1k,224,25.813,74.187,43.647,56.353,2.04,0.875,bilinear,-56.827,-51.343,+1\ndla46_c.in1k,224,25.513,74.487,43.903,56.097,1.30,0.875,bilinear,-59.227,-52.307,-7\ntest_vit3.r160_in1k,160,24.623,75.377,41.737,58.263,0.93,0.950,bicubic,-54.427,-52.033,+1\ntinynet_e.in1k,106,23.473,76.527,41.177,58.823,2.04,0.875,bicubic,-56.367,-52.823,-1\ntest_convnext2.r160_in1k,160,22.407,77.593,40.410,59.590,0.48,0.950,bicubic,-53.453,-52.400,+1\ntest_convnext3.r160_in1k,160,22.390,77.610,39.963,60.037,0.47,0.950,bicubic,-53.250,-52.897,+1\nmobilenetv3_small_050.lamb_in1k,224,21.797,78.203,38.783,61.217,1.59,0.875,bicubic,-56.293,-54.227,-2\ntest_convnext.r160_in1k,160,19.687,80.313,36.787,63.213,0.27,0.950,bicubic,-50.743,-54.173,0\ntest_resnet.r160_in1k,160,18.243,81.757,35.140,64.860,0.47,0.950,bilinear,-46.357,-51.820,+7\ntest_efficientnet_evos.r160_in1k,160,17.897,82.103,34.573,65.427,0.36,0.950,bicubic,-50.163,-53.677,0\ntest_nfnet.r160_in1k,160,17.597,82.403,34.283,65.717,0.38,0.950,bicubic,-52.583,-55.587,-2\ntest_byobnet.r160_in1k,160,17.260,82.740,33.740,66.260,0.46,0.950,bicubic,-50.260,-54.970,0\ntest_efficientnet.r160_in1k,160,16.663,83.337,33.230,66.770,0.36,0.950,bicubic,-50.957,-55.270,-2\ntest_efficientnet_gn.r160_in1k,160,16.430,83.570,32.650,67.350,0.36,0.950,bicubic,-49.330,-55.560,+1\ntest_efficientnet_ln.r160_in1k,160,16.123,83.877,32.907,67.093,0.36,0.950,bicubic,-49.907,-54.773,-2\ntest_vit2.r160_in1k,160,14.633,85.367,29.693,70.307,0.46,0.950,bicubic,-51.127,-58.167,-2\ntest_vit.r160_in1k,160,13.413,86.587,27.980,72.020,0.37,0.950,bicubic,-50.567,-59.190,0\n"
  },
  {
    "path": "results/results-imagenet-real.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation,top1_diff,top5_diff,rank_diff\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,91.144,8.856,98.725,1.275,305.08,1.000,bicubic,+1.088,-0.329,0\neva_giant_patch14_336.clip_ft_in1k,336,91.078,8.922,98.604,1.396,\"1,013.01\",1.000,bicubic,+1.616,-0.224,+5\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,91.050,8.950,98.685,1.315,305.08,1.000,bicubic,+1.094,-0.329,-1\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,90.950,9.050,98.674,1.326,\"1,014.45\",1.000,bicubic,+1.159,-0.316,-1\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,90.939,9.061,98.648,1.351,414.14,1.000,bicubic,+1.883,-0.103,+5\neva02_large_patch14_448.mim_in22k_ft_in1k,448,90.930,9.070,98.678,1.322,305.08,1.000,bicubic,+1.296,-0.276,-2\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,90.911,9.089,98.811,1.189,87.12,1.000,bicubic,+2.233,+0.085,+7\neva_large_patch14_336.in22k_ft_in1k,336,90.907,9.093,98.781,1.219,304.53,1.000,bicubic,+2.227,+0.067,+5\neva_giant_patch14_224.clip_ft_in1k,224,90.896,9.104,98.683,1.317,\"1,012.56\",0.900,bicubic,+2.000,+0.011,+2\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,90.896,9.104,98.661,1.339,\"1,013.01\",1.000,bicubic,+1.330,-0.291,-5\neva02_large_patch14_448.mim_m38m_ft_in1k,448,90.885,9.115,98.659,1.341,305.08,1.000,bicubic,+1.335,-0.265,-5\neva_large_patch14_336.in22k_ft_in22k_in1k,336,90.877,9.123,98.721,1.279,304.53,1.000,bicubic,+1.639,-0.127,-3\ncaformer_b36.sail_in22k_ft_in1k_384,384,90.804,9.196,98.862,1.138,98.75,1.000,bicubic,+2.746,+0.272,+24\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,90.798,9.202,98.687,1.313,429.38,1.000,bicubic,+1.388,-0.167,-6\neva02_base_patch14_448.mim_in22k_ft_in1k,448,90.789,9.211,98.738,1.262,87.12,1.000,bicubic,+2.527,+0.168,+12\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,90.717,9.283,98.742,1.258,136.50,1.000,bicubic,+2.643,+0.184,+20\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,90.715,9.285,98.770,1.230,305.67,1.000,bicubic,+2.139,+0.114,+2\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,90.702,9.298,98.821,1.179,200.13,1.000,bicubic,+2.368,+0.247,+6\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,90.670,9.330,98.762,1.238,200.13,1.000,bicubic,+2.780,+0.314,+25\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,90.668,9.332,98.806,1.194,846.47,1.000,bicubic,+2.046,+0.088,-3\nregnety_1280.swag_ft_in1k,384,90.661,9.339,98.819,1.181,644.81,1.000,bicubic,+2.433,+0.135,+10\nvolo_d5_512.sail_in1k,512,90.633,9.367,98.702,1.298,296.09,1.150,bicubic,+3.566,+0.732,+57\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,90.614,9.386,98.772,1.228,136.33,1.000,bicubic,+2.682,+0.272,+18\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,90.599,9.401,98.772,1.228,305.00,1.000,bicubic,+2.219,+0.168,-2\nvolo_d5_448.sail_in1k,448,90.587,9.414,98.691,1.309,295.91,1.150,bicubic,+3.612,+0.763,+60\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,90.576,9.424,98.623,1.377,116.14,1.000,bicubic,+2.766,+0.257,+21\ntf_efficientnet_l2.ns_jft_in1k,800,90.569,9.431,98.777,1.223,480.31,0.960,bicubic,+2.209,+0.121,-4\nmaxvit_base_tf_512.in21k_ft_in1k,512,90.567,9.433,98.702,1.298,119.88,1.000,bicubic,+2.353,+0.164,+4\neva_large_patch14_196.in22k_ft_in22k_in1k,196,90.561,9.439,98.700,1.300,304.14,1.000,bicubic,+1.971,+0.038,-11\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,90.557,9.443,98.800,1.200,200.13,1.000,bicubic,+2.587,+0.326,+9\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,90.548,9.452,98.674,1.326,304.53,1.000,bicubic,+2.288,+0.150,-3\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,90.540,9.460,98.770,1.230,350.20,1.000,bicubic,+2.776,+0.218,+16\neva_large_patch14_196.in22k_ft_in1k,196,90.527,9.473,98.768,1.232,304.14,1.000,bicubic,+2.575,+0.278,+6\ntf_efficientnet_l2.ns_jft_in1k_475,475,90.527,9.473,98.708,1.292,480.31,0.936,bicubic,+2.281,+0.154,-4\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,90.525,9.475,98.704,1.296,660.29,1.000,bicubic,+1.665,-0.036,-23\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,90.520,9.480,98.796,1.204,101.66,1.000,bicubic,+3.012,+0.368,+19\nvolo_d4_448.sail_in1k,448,90.510,9.490,98.595,1.405,193.41,1.150,bicubic,+3.722,+0.715,+61\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,90.505,9.495,98.597,1.403,475.77,1.000,bicubic,+1.965,-0.053,-18\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,90.503,9.497,98.627,1.373,632.46,1.000,bicubic,+1.869,-0.041,-23\nbeit3_large_patch16_224.in22k_ft_in1k,224,90.493,9.507,98.653,1.347,304.57,1.000,bicubic,+2.865,+0.319,+11\nconvformer_b36.sail_in22k_ft_in1k_384,384,90.486,9.514,98.772,1.228,99.88,1.000,bicubic,+2.880,+0.348,+12\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,90.482,9.518,98.702,1.298,304.57,1.000,bicubic,+2.948,+0.338,+12\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,90.475,9.524,98.683,1.317,660.29,1.000,bicubic,+1.809,-0.051,-28\ncaformer_m36.sail_in22k_ft_in1k_384,384,90.473,9.527,98.670,1.330,56.20,1.000,bicubic,+3.003,+0.362,+16\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,90.444,9.556,98.749,1.251,87.92,1.000,bicubic,+3.302,+0.521,+32\ncaformer_b36.sail_in22k_ft_in1k,224,90.441,9.559,98.766,1.234,98.75,1.000,bicubic,+2.991,+0.430,+16\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,90.441,9.559,98.747,1.253,116.09,1.000,bicubic,+2.961,+0.371,+11\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,90.439,9.561,98.640,1.360,632.05,1.000,bicubic,+2.151,+0.090,-22\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,90.435,9.565,98.689,1.311,64.27,1.000,bicubic,+2.997,+0.433,+14\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,90.428,9.572,98.644,1.356,304.53,1.000,bicubic,+2.239,+0.078,-17\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,90.407,9.593,98.738,1.262,86.74,1.000,bicubic,+3.593,+0.602,+45\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,90.394,9.606,98.646,1.354,73.88,1.000,bicubic,+3.006,+0.334,+13\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,90.375,9.625,98.666,1.334,88.72,1.000,bicubic,+2.737,+0.250,-4\nmaxvit_large_tf_512.in21k_ft_in1k,512,90.375,9.625,98.640,1.360,212.33,1.000,bicubic,+2.139,+0.032,-23\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,90.373,9.627,98.582,1.418,475.32,1.000,bicubic,+2.071,+0.044,-30\nmaxvit_base_tf_384.in21k_ft_in1k,384,90.369,9.631,98.680,1.319,119.65,1.000,bicubic,+2.439,+0.134,-14\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,90.362,9.638,98.666,1.334,304.20,1.000,bicubic,+2.200,+0.128,-22\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,90.362,9.638,98.582,1.418,304.43,0.950,bicubic,+1.956,-0.020,-37\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,90.358,9.642,98.770,1.230,88.59,1.000,bicubic,+3.214,+0.542,+16\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,90.354,9.646,98.736,1.264,149.39,1.000,bicubic,+3.050,+0.402,+9\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,90.354,9.646,98.646,1.354,197.96,1.000,bicubic,+2.174,+0.126,-26\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,90.350,9.650,98.648,1.351,200.13,1.000,bicubic,+3.006,+0.427,+6\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,90.345,9.655,98.775,1.226,136.06,1.000,bicubic,+3.027,+0.447,+6\nconvnext_base.fb_in22k_ft_in1k_384,384,90.343,9.657,98.796,1.204,88.59,1.000,bicubic,+3.517,+0.552,+31\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,90.341,9.659,98.595,1.405,304.53,1.000,bicubic,+2.471,+0.235,-20\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,90.326,9.674,98.670,1.330,304.20,1.000,bicubic,+2.426,+0.262,-23\nmaxvit_large_tf_384.in21k_ft_in1k,384,90.317,9.682,98.670,1.330,212.03,1.000,bicubic,+2.323,+0.094,-29\ncaformer_s36.sail_in22k_ft_in1k_384,384,90.315,9.685,98.806,1.194,39.30,1.000,bicubic,+3.455,+0.587,+23\nvit_large_patch14_clip_224.openai_ft_in1k,224,90.311,9.689,98.642,1.358,304.20,1.000,bicubic,+2.453,+0.212,-23\nconvnext_large.fb_in22k_ft_in1k_384,384,90.307,9.693,98.666,1.334,197.77,1.000,bicubic,+2.851,+0.286,-9\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,90.279,9.721,98.764,1.236,134.42,1.000,bicubic,+2.909,+0.450,-4\nconvformer_b36.sail_in22k_ft_in1k,224,90.251,9.749,98.702,1.298,99.88,1.000,bicubic,+3.259,+0.538,+11\nconvformer_m36.sail_in22k_ft_in1k_384,384,90.251,9.749,98.657,1.343,57.05,1.000,bicubic,+3.383,+0.535,+17\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,90.247,9.753,98.629,1.371,304.76,1.000,bicubic,+2.511,+0.121,-25\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,90.241,9.759,98.783,1.217,93.59,1.000,bicubic,+3.519,+0.599,+27\ndm_nfnet_f6.dm_in1k,576,90.241,9.759,98.629,1.371,438.36,0.956,bicubic,+3.877,+0.729,+56\nconvnextv2_huge.fcmae_ft_in1k,288,90.236,9.764,98.550,1.450,660.29,1.000,bicubic,+3.618,+0.540,+31\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,90.219,9.781,98.653,1.347,304.72,1.000,bicubic,+3.123,+0.351,0\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,90.215,9.785,98.636,1.364,632.13,1.000,bicubic,+3.035,+0.372,-5\ntf_efficientnetv2_l.in21k_ft_in1k,480,90.213,9.787,98.725,1.275,118.52,1.000,bicubic,+3.411,+0.601,+17\nnextvit_large.bd_ssld_6m_in1k_384,384,90.211,9.789,98.794,1.206,57.87,1.000,bicubic,+3.667,+0.668,+32\nregnety_320.swag_ft_in1k,384,90.211,9.789,98.772,1.228,145.05,1.000,bicubic,+3.381,+0.410,+13\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,90.207,9.793,98.651,1.349,86.86,0.950,bicubic,+3.177,+0.465,-3\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,90.192,9.809,98.698,1.302,86.86,1.000,bicubic,+3.553,+0.690,+22\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,90.192,9.809,98.610,1.390,196.74,1.000,bicubic,+2.718,+0.346,-26\nnextvit_base.bd_ssld_6m_in1k_384,384,90.187,9.813,98.813,1.187,44.82,1.000,bicubic,+3.815,+0.773,+45\ncait_m48_448.fb_dist_in1k,448,90.181,9.819,98.480,1.520,356.46,1.000,bicubic,+3.701,+0.732,+35\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,90.179,9.821,98.555,1.445,632.05,1.000,bicubic,+2.567,+0.329,-37\nvolo_d3_448.sail_in1k,448,90.179,9.821,98.542,1.458,86.63,1.000,bicubic,+3.665,+0.832,+31\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,90.174,9.826,98.567,1.433,86.86,1.000,bicubic,+2.964,+0.535,-17\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,90.159,9.841,98.725,1.275,304.43,0.900,bicubic,+2.673,+0.407,-35\ntf_efficientnet_b7.ns_jft_in1k,600,90.159,9.841,98.606,1.394,66.35,0.949,bicubic,+3.309,+0.526,+1\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,90.149,9.851,98.713,1.287,101.66,1.000,bicubic,+3.233,+0.475,-5\ndm_nfnet_f6.dm_in1k,448,90.123,9.877,98.578,1.422,438.36,0.956,bicubic,+3.967,+0.816,+56\nbeitv2_large_patch16_224.in1k_ft_in1k,224,90.112,9.887,98.437,1.563,304.43,0.950,bicubic,+2.698,+0.221,-31\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,90.104,9.896,98.555,1.445,304.20,1.000,bicubic,+2.816,+0.319,-25\nconvnext_xlarge.fb_in22k_ft_in1k,288,90.095,9.905,98.629,1.371,350.20,1.000,bicubic,+2.717,+0.299,-31\ncaformer_b36.sail_in1k_384,384,90.068,9.932,98.516,1.484,98.75,1.000,bicubic,+3.668,+0.692,+28\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,90.066,9.934,98.663,1.337,196.74,1.000,bicubic,+2.924,+0.435,-23\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,90.063,9.937,98.710,1.290,87.90,1.000,bicubic,+3.621,+0.642,+24\nconvnextv2_base.fcmae_ft_in22k_in1k,288,90.063,9.937,98.670,1.330,88.72,1.000,bicubic,+3.069,+0.502,-18\nconvformer_s36.sail_in22k_ft_in1k_384,384,90.049,9.951,98.627,1.373,40.01,1.000,bicubic,+3.675,+0.647,+28\ncait_m36_384.fb_dist_in1k,384,90.049,9.951,98.484,1.516,271.22,1.000,bicubic,+3.989,+0.752,+55\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,90.034,9.966,98.698,1.302,93.59,1.000,bicubic,+3.510,+0.664,+14\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,90.031,9.969,98.482,1.518,21.27,1.000,bicubic,+3.587,+0.598,+19\ntf_efficientnetv2_m.in21k_ft_in1k,480,90.016,9.984,98.661,1.339,54.14,1.000,bicubic,+4.002,+0.705,+59\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,90.008,9.992,98.689,1.311,75.26,1.000,bicubic,+3.708,+0.747,+27\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,90.008,9.992,98.661,1.339,304.37,1.000,bicubic,+3.018,+0.415,-23\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,90.006,9.994,98.713,1.287,93.59,0.950,bicubic,+3.476,+0.619,+8\nconvnextv2_large.fcmae_ft_in22k_in1k,288,90.001,9.998,98.631,1.369,197.96,1.000,bicubic,+2.507,+0.271,-54\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,90.001,9.998,98.602,1.399,64.11,0.950,bicubic,+3.393,+0.668,-1\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,89.999,10.001,98.548,1.452,88.59,1.000,bicubic,+3.509,+0.588,+9\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,89.993,10.007,98.631,1.369,134.13,0.950,bicubic,+3.273,+0.607,-10\nnextvit_small.bd_ssld_6m_in1k_384,384,89.984,10.016,98.713,1.287,31.76,1.000,bicubic,+4.018,+0.809,+55\nconvnextv2_large.fcmae_ft_in22k_in1k,224,89.982,10.018,98.544,1.456,197.96,0.875,bicubic,+2.716,+0.304,-43\nconvnext_xlarge.fb_in22k_ft_in1k,224,89.978,10.022,98.578,1.422,350.20,0.875,bicubic,+3.008,+0.374,-30\nhiera_huge_224.mae_in1k_ft_in1k,224,89.978,10.022,98.367,1.633,672.78,0.900,bicubic,+3.138,+0.359,-24\nmaxvit_base_tf_512.in1k,512,89.972,10.028,98.444,1.556,119.88,1.000,bicubic,+3.374,+0.519,-6\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,89.969,10.030,98.678,1.322,86.86,1.000,bicubic,+3.959,+0.680,+47\nconvnextv2_large.fcmae_ft_in1k,288,89.961,10.039,98.548,1.452,197.96,1.000,bicubic,+3.829,+0.728,+32\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,89.950,10.050,98.427,1.573,116.14,0.950,bicubic,+3.042,+0.422,-32\ncaformer_m36.sail_in1k_384,384,89.942,10.058,98.448,1.552,56.20,1.000,bicubic,+3.778,+0.636,+27\nconvnext_small.fb_in22k_ft_in1k_384,384,89.927,10.073,98.691,1.309,50.22,1.000,bicubic,+4.175,+0.813,+69\nregnety_160.swag_ft_in1k,384,89.918,10.082,98.640,1.360,83.59,1.000,bicubic,+3.898,+0.594,+39\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,89.918,10.082,98.505,1.494,196.74,0.900,bicubic,+2.984,+0.400,-37\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,89.908,10.092,98.655,1.345,101.66,1.000,bicubic,+3.266,+0.505,-22\nefficientnet_b5.sw_in12k_ft_in1k,448,89.901,10.099,98.572,1.428,30.39,1.000,bicubic,+4.007,+0.830,+47\nhgnet_base.ssld_in1k,288,89.899,10.101,98.535,1.465,71.58,1.000,bicubic,+4.411,+0.905,+86\nxcit_large_24_p8_384.fb_dist_in1k,384,89.897,10.103,98.377,1.623,188.93,1.000,bicubic,+3.873,+0.687,+34\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,89.893,10.107,98.659,1.341,87.92,0.900,bicubic,+3.623,+0.753,+7\nconvnext_large.fb_in22k_ft_in1k,288,89.891,10.110,98.608,1.392,197.77,1.000,bicubic,+2.879,+0.396,-50\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,89.888,10.112,98.602,1.399,86.88,1.000,bicubic,+3.146,+0.492,-31\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,89.882,10.118,98.484,1.516,60.60,1.000,bicubic,+3.264,+0.516,-26\nvolo_d5_224.sail_in1k,224,89.876,10.124,98.490,1.510,295.46,0.960,bicubic,+3.800,+0.916,+23\nconvnextv2_huge.fcmae_ft_in1k,224,89.876,10.124,98.412,1.589,660.29,0.875,bicubic,+3.615,+0.660,+3\nconvnext_base.fb_in22k_ft_in1k,288,89.873,10.127,98.698,1.302,88.59,1.000,bicubic,+3.571,+0.610,-2\nhgnetv2_b6.ssld_stage2_ft_in1k,288,89.858,10.142,98.627,1.373,75.26,1.000,bicubic,+3.478,+0.685,-9\ncaformer_m36.sail_in22k_ft_in1k,224,89.854,10.146,98.585,1.415,56.20,1.000,bicubic,+3.248,+0.547,-27\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,89.848,10.152,98.497,1.503,21.23,1.000,bicubic,+3.760,+0.781,+17\nconvnextv2_base.fcmae_ft_in22k_in1k,224,89.839,10.161,98.546,1.454,88.72,0.875,bicubic,+3.087,+0.526,-40\nefficientvit_l2.r384_in1k,384,89.835,10.165,98.185,1.815,63.71,1.000,bicubic,+3.851,+0.675,+27\nconvnext_large.fb_in22k_ft_in1k,224,89.831,10.169,98.512,1.488,197.77,0.875,bicubic,+3.219,+0.476,-33\ncait_s36_384.fb_dist_in1k,384,89.829,10.171,98.431,1.569,68.37,1.000,bicubic,+4.371,+0.951,+73\nconvformer_m36.sail_in22k_ft_in1k,224,89.826,10.174,98.561,1.439,57.05,1.000,bicubic,+3.674,+0.709,+7\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,89.824,10.176,98.531,1.469,73.87,0.950,bicubic,+3.242,+0.635,-32\nxcit_medium_24_p8_384.fb_dist_in1k,384,89.809,10.191,98.367,1.633,84.32,1.000,bicubic,+3.967,+0.765,+33\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,89.805,10.195,98.655,1.345,88.59,1.000,bicubic,+3.427,+0.679,-18\nmaxvit_large_tf_512.in1k,512,89.805,10.195,98.345,1.655,212.33,1.000,bicubic,+3.271,+0.461,-32\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,89.799,10.201,98.599,1.401,75.26,0.965,bicubic,+3.747,+0.793,+10\nvolo_d4_224.sail_in1k,224,89.799,10.201,98.422,1.578,192.96,0.960,bicubic,+3.931,+0.958,+26\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,89.796,10.204,98.520,1.480,64.11,0.950,bicubic,+3.599,+0.632,-6\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,89.790,10.210,98.516,1.484,329.09,1.000,bicubic,+3.602,+0.588,-6\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,89.786,10.214,98.648,1.351,196.53,0.900,bicubic,+3.466,+0.758,-20\nvolo_d2_384.sail_in1k,384,89.784,10.216,98.407,1.593,58.87,1.000,bicubic,+3.730,+0.825,+5\ntf_efficientnetv2_xl.in21k_ft_in1k,512,89.784,10.216,98.294,1.706,208.12,1.000,bicubic,+3.030,+0.280,-56\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,89.777,10.223,98.473,1.527,73.88,0.950,bicubic,+3.239,+0.579,-41\ntf_efficientnet_b6.ns_jft_in1k,528,89.771,10.229,98.527,1.473,43.04,0.942,bicubic,+3.295,+0.663,-34\ncaformer_s18.sail_in22k_ft_in1k_384,384,89.762,10.238,98.578,1.422,26.34,1.000,bicubic,+4.320,+0.860,+60\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,89.758,10.242,98.593,1.407,86.53,0.900,bicubic,+3.264,+0.537,-39\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,89.752,10.248,98.516,1.484,86.58,0.900,bicubic,+3.500,+0.676,-22\nxcit_small_24_p8_384.fb_dist_in1k,384,89.752,10.248,98.420,1.580,47.63,1.000,bicubic,+4.172,+0.872,+44\ncaformer_s36.sail_in22k_ft_in1k,224,89.747,10.253,98.653,1.347,39.30,1.000,bicubic,+3.967,+0.821,+24\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,89.741,10.259,98.484,1.516,116.09,0.950,bicubic,+3.101,+0.470,-58\ndm_nfnet_f4.dm_in1k,512,89.741,10.259,98.405,1.595,316.07,0.951,bicubic,+3.903,+0.629,+17\nregnety_160.lion_in12k_ft_in1k,288,89.728,10.272,98.610,1.390,83.59,1.000,bicubic,+3.724,+0.776,+2\ndm_nfnet_f5.dm_in1k,544,89.728,10.272,98.435,1.565,377.21,0.954,bicubic,+3.626,+0.677,-12\nconvformer_m36.sail_in1k_384,384,89.720,10.280,98.444,1.556,57.05,1.000,bicubic,+4.140,+0.871,+38\nefficientvit_l3.r384_in1k,384,89.720,10.280,98.014,1.986,246.04,1.000,bicubic,+3.324,+0.374,-41\nvolo_d1_384.sail_in1k,384,89.711,10.289,98.290,1.710,26.78,1.000,bicubic,+4.449,+1.072,+69\ncaformer_s36.sail_in1k_384,384,89.698,10.302,98.313,1.687,39.30,1.000,bicubic,+3.936,+0.647,+20\ndeit3_large_patch16_384.fb_in1k,384,89.692,10.308,98.401,1.599,304.76,1.000,bicubic,+3.872,+0.799,+11\nvit_base_patch16_clip_384.openai_ft_in1k,384,89.690,10.310,98.516,1.484,86.86,1.000,bicubic,+3.480,+0.622,-30\nhgnetv2_b6.ssld_stage2_ft_in1k,224,89.686,10.315,98.557,1.443,75.26,0.965,bicubic,+3.480,+0.747,-30\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,89.677,10.323,98.476,1.524,88.59,1.000,bicubic,+3.493,+0.802,-26\nconvnext_base.fb_in22k_ft_in1k,224,89.675,10.325,98.542,1.458,88.59,0.875,bicubic,+3.861,+0.686,+6\nxcit_large_24_p16_384.fb_dist_in1k,384,89.675,10.325,98.407,1.593,189.10,1.000,bicubic,+3.897,+0.877,+11\nconvformer_b36.sail_in1k_384,384,89.675,10.325,98.384,1.616,99.88,1.000,bicubic,+3.905,+0.862,+14\nnextvit_large.bd_ssld_6m_in1k,224,89.668,10.332,98.634,1.366,57.87,0.950,bicubic,+4.154,+1.146,+32\ntf_efficientnet_b5.ns_jft_in1k,456,89.662,10.338,98.488,1.512,30.39,0.934,bicubic,+3.566,+0.744,-24\nhgnetv2_b5.ssld_stage2_ft_in1k,288,89.649,10.351,98.490,1.510,39.57,1.000,bicubic,+4.491,+0.903,+70\nregnety_2560.seer_ft_in1k,384,89.630,10.370,98.403,1.597,\"1,282.60\",1.000,bicubic,+4.486,+0.959,+72\ndm_nfnet_f3.dm_in1k,416,89.628,10.372,98.461,1.539,254.92,0.940,bicubic,+3.952,+0.885,+14\nconvnext_small.in12k_ft_in1k_384,384,89.621,10.379,98.450,1.550,50.22,1.000,bicubic,+3.435,+0.530,-36\nmaxvit_base_tf_384.in1k,384,89.606,10.393,98.322,1.678,119.65,1.000,bicubic,+3.316,+0.520,-48\nregnety_160.sw_in12k_ft_in1k,288,89.598,10.402,98.557,1.443,83.59,1.000,bicubic,+3.570,+0.721,-23\nvolo_d3_224.sail_in1k,224,89.589,10.411,98.371,1.629,86.33,0.960,bicubic,+4.149,+1.097,+33\ntf_efficientnetv2_l.in21k_ft_in1k,384,89.587,10.413,98.503,1.497,118.52,1.000,bicubic,+3.741,+0.737,-10\nconvformer_s18.sail_in22k_ft_in1k_384,384,89.585,10.415,98.531,1.469,26.77,1.000,bicubic,+4.581,+0.959,+83\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,89.583,10.417,98.409,1.591,86.57,0.950,bicubic,+3.379,+0.647,-45\nmaxvit_large_tf_384.in1k,384,89.583,10.417,98.185,1.815,212.03,1.000,bicubic,+3.341,+0.501,-50\ntf_efficientnet_b8.ap_in1k,672,89.581,10.419,98.303,1.697,87.41,0.954,bicubic,+4.223,+1.005,+39\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,89.579,10.421,98.486,1.514,93.59,0.875,bicubic,+3.623,+0.662,-21\nefficientnet_h_b5.sw_r448_e450_in1k,576,89.577,10.423,98.420,1.580,45.88,1.000,bicubic,+4.635,+1.030,+86\nefficientvit_l2.r288_in1k,288,89.564,10.436,98.061,1.939,63.71,1.000,bicubic,+3.950,+0.697,+6\nmaxvit_tiny_tf_512.in1k,512,89.562,10.438,98.335,1.665,31.05,1.000,bicubic,+3.904,+0.743,+3\ntf_efficientnetv2_l.in1k,480,89.553,10.447,98.339,1.661,118.52,1.000,bicubic,+3.893,+0.867,+1\ndm_nfnet_f4.dm_in1k,384,89.549,10.451,98.271,1.729,316.07,0.951,bicubic,+4.035,+0.593,+13\nefficientvit_l3.r320_in1k,320,89.549,10.451,97.861,2.139,246.04,1.000,bicubic,+3.319,+0.385,-56\nconvnextv2_large.fcmae_ft_in1k,224,89.547,10.453,98.294,1.706,197.96,0.875,bicubic,+3.785,+0.720,-8\nflexivit_large.1200ep_in1k,240,89.540,10.460,98.394,1.606,304.36,0.950,bicubic,+3.894,+0.848,-1\nhiera_large_224.mae_in1k_ft_in1k,224,89.538,10.462,98.226,1.774,213.74,0.900,bicubic,+3.492,+0.580,-40\nxcit_small_12_p8_384.fb_dist_in1k,384,89.521,10.479,98.296,1.704,26.21,1.000,bicubic,+4.453,+1.032,+63\nxcit_large_24_p8_224.fb_dist_in1k,224,89.519,10.481,98.222,1.778,188.93,1.000,bicubic,+4.123,+0.803,+23\ncait_s24_384.fb_dist_in1k,384,89.510,10.490,98.362,1.638,47.06,1.000,bicubic,+4.462,+1.012,+62\nnextvit_base.bd_ssld_6m_in1k,224,89.506,10.494,98.591,1.409,44.82,0.950,bicubic,+4.320,+0.991,+42\nflexivit_large.600ep_in1k,240,89.506,10.494,98.386,1.614,304.36,0.950,bicubic,+3.976,+0.892,+3\nconvformer_s36.sail_in22k_ft_in1k,224,89.504,10.496,98.465,1.535,40.01,1.000,bicubic,+4.084,+0.891,+17\nhgnet_base.ssld_in1k,224,89.502,10.498,98.424,1.576,71.58,0.965,bicubic,+4.594,+1.082,+80\nregnety_160.lion_in12k_ft_in1k,224,89.502,10.498,98.424,1.576,83.59,0.950,bicubic,+3.900,+0.756,-7\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,89.498,10.502,98.522,1.478,39.57,1.000,bicubic,+4.568,+1.232,+73\nefficientvit_l3.r256_in1k,256,89.498,10.502,97.910,2.090,246.04,1.000,bicubic,+3.546,+0.570,-37\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,89.493,10.507,98.484,1.516,28.64,1.000,bicubic,+4.395,+0.852,+47\nconvnext_tiny.in12k_ft_in1k_384,384,89.491,10.509,98.510,1.490,28.59,1.000,bicubic,+4.337,+0.876,+39\nbeit3_base_patch16_224.in22k_ft_in1k,224,89.489,10.511,98.399,1.601,86.66,1.000,bicubic,+4.099,+0.759,+13\nrdnet_large.nv_in1k_ft_in1k_384,384,89.483,10.517,98.343,1.657,186.27,1.000,bicubic,+3.639,+0.663,-37\nconvformer_s36.sail_in1k_384,384,89.476,10.524,98.367,1.633,40.01,1.000,bicubic,+4.096,+0.899,+12\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,89.472,10.528,98.557,1.443,86.59,1.000,bicubic,+3.744,+0.811,-24\nxcit_medium_24_p16_384.fb_dist_in1k,384,89.472,10.528,98.298,1.702,84.40,1.000,bicubic,+4.038,+0.890,+4\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,89.466,10.534,98.478,1.522,86.57,1.000,bicubic,+3.978,+0.892,-4\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,89.457,10.543,98.471,1.529,86.58,0.900,bicubic,+3.619,+0.819,-40\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,89.457,10.543,98.467,1.533,86.57,0.900,bicubic,+4.349,+0.937,+34\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,89.457,10.543,98.414,1.586,88.34,1.000,bicubic,+3.665,+0.780,-36\nefficientnet_x_b5.sw_r448_e450_in1k,576,89.453,10.547,98.384,1.616,33.44,1.000,bicubic,+4.525,+1.072,+63\ndm_nfnet_f5.dm_in1k,416,89.446,10.554,98.305,1.695,377.21,0.954,bicubic,+3.734,+0.755,-29\nmaxvit_small_tf_512.in1k,512,89.442,10.558,98.360,1.640,69.13,1.000,bicubic,+3.340,+0.672,-72\ninception_next_base.sail_in1k_384,384,89.440,10.560,98.352,1.648,86.67,1.000,bicubic,+4.238,+1.078,+19\nregnety_120.sw_in12k_ft_in1k,288,89.434,10.566,98.540,1.460,51.82,1.000,bicubic,+4.006,+0.964,-5\ndeit_base_distilled_patch16_384.fb_in1k,384,89.434,10.566,98.441,1.559,87.63,1.000,bicubic,+3.996,+1.109,-8\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,89.431,10.569,98.405,1.595,86.57,0.950,bicubic,+3.477,+0.681,-58\nconvnextv2_base.fcmae_ft_in1k,288,89.431,10.569,98.367,1.633,88.72,1.000,bicubic,+3.941,+0.987,-16\ntf_efficientnet_b7.ap_in1k,600,89.429,10.571,98.335,1.665,66.35,0.949,bicubic,+4.297,+1.079,+24\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,89.427,10.573,98.520,1.480,86.53,0.900,bicubic,+4.189,+0.866,+8\nefficientvit_l3.r224_in1k,224,89.416,10.584,97.886,2.114,246.04,1.000,bicubic,+3.606,+0.676,-49\ncaformer_b36.sail_in1k,224,89.408,10.592,98.224,1.776,98.75,1.000,bicubic,+3.906,+0.908,-22\nhrnet_w48_ssld.paddle_in1k,288,89.401,10.598,98.386,1.614,77.47,1.000,bilinear,+4.918,+1.162,+106\nefficientvit_l2.r256_in1k,256,89.399,10.601,98.053,1.947,63.71,1.000,bicubic,+4.019,+0.797,-7\nregnety_160.sw_in12k_ft_in1k,224,89.391,10.609,98.394,1.606,83.59,0.950,bicubic,+3.795,+0.726,-35\nregnetz_e8.ra3_in1k,320,89.387,10.613,98.454,1.546,57.70,1.000,bicubic,+4.371,+1.176,+31\ntf_efficientnet_b4.ns_jft_in1k,380,89.382,10.618,98.367,1.633,19.34,0.922,bicubic,+4.226,+0.885,+12\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,89.380,10.620,98.373,1.627,86.66,1.000,bicubic,+3.926,+0.753,-23\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,89.372,10.628,98.388,1.612,22.21,1.000,bicubic,+4.526,+1.270,+54\neva02_small_patch14_336.mim_in22k_ft_in1k,336,89.372,10.628,98.358,1.642,22.13,1.000,bicubic,+3.602,+0.742,-53\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,89.365,10.635,98.495,1.505,39.03,0.950,bicubic,+3.809,+0.857,-36\ntf_efficientnetv2_m.in1k,480,89.350,10.650,98.326,1.674,54.14,1.000,bicubic,+4.142,+0.968,-2\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,89.346,10.654,98.367,1.633,32.59,1.000,bicubic,+4.574,+1.319,+60\nconvnext_small.fb_in22k_ft_in1k,288,89.344,10.656,98.358,1.642,50.22,1.000,bicubic,+4.068,+0.792,-11\ncaformer_s18.sail_in1k_384,384,89.338,10.662,98.290,1.710,26.34,1.000,bicubic,+4.302,+0.936,+20\ntf_efficientnet_b6.ap_in1k,528,89.338,10.662,98.283,1.717,43.04,0.942,bicubic,+4.544,+1.145,+57\nhgnetv2_b5.ssld_stage2_ft_in1k,224,89.329,10.671,98.328,1.672,39.57,0.965,bicubic,+4.505,+1.034,+52\ntf_efficientnet_b8.ra_in1k,672,89.329,10.671,98.305,1.695,87.41,0.954,bicubic,+3.985,+0.913,-20\nvolo_d2_224.sail_in1k,224,89.329,10.671,98.207,1.794,58.68,0.960,bicubic,+4.125,+0.843,-5\nnextvit_small.bd_ssld_6m_in1k,224,89.323,10.678,98.424,1.576,31.76,0.950,bicubic,+4.451,+1.052,+42\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,89.318,10.682,98.405,1.595,304.33,0.900,bicubic,+3.462,+0.583,-77\nxcit_small_24_p16_384.fb_dist_in1k,384,89.312,10.688,98.352,1.648,47.67,1.000,bicubic,+4.210,+1.034,+4\nflexivit_large.300ep_in1k,240,89.310,10.690,98.322,1.678,304.36,0.950,bicubic,+4.026,+0.918,-22\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,89.299,10.701,98.313,1.687,60.40,0.950,bicubic,+3.565,+0.703,-63\nxcit_medium_24_p8_224.fb_dist_in1k,224,89.297,10.703,98.196,1.804,84.32,1.000,bicubic,+4.219,+0.922,+6\nmambaout_base_short_rw.sw_e500_in1k,288,89.297,10.703,98.096,1.905,88.83,1.000,bicubic,+4.455,+0.809,+41\nnextvit_large.bd_in1k_384,384,89.293,10.707,98.307,1.693,57.87,1.000,bicubic,+4.363,+0.813,+26\nmambaout_base_wide_rw.sw_e500_in1k,288,89.286,10.714,98.179,1.821,94.45,1.000,bicubic,+4.320,+0.957,+17\nnextvit_base.bd_in1k_384,384,89.284,10.716,98.307,1.693,44.82,1.000,bicubic,+4.578,+1.087,+52\ndm_nfnet_f3.dm_in1k,320,89.280,10.720,98.281,1.719,254.92,0.940,bicubic,+4.188,+0.895,-2\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,89.276,10.724,98.262,1.738,32.59,1.000,bicubic,+4.286,+0.968,+10\nmambaout_base_tall_rw.sw_e500_in1k,288,89.276,10.724,98.189,1.810,86.48,1.000,bicubic,+4.288,+0.855,+10\nbeitv2_base_patch16_224.in1k_ft_in1k,224,89.271,10.729,98.249,1.751,86.53,0.900,bicubic,+3.675,+0.735,-62\nhgnet_small.ssld_in1k,288,89.246,10.754,98.369,1.631,24.36,1.000,bicubic,+4.878,+1.239,+102\nxcit_small_12_p16_384.fb_dist_in1k,384,89.243,10.757,98.258,1.742,26.25,1.000,bicubic,+4.510,+1.122,+41\nxcit_small_24_p8_224.fb_dist_in1k,224,89.224,10.776,98.236,1.764,47.63,1.000,bicubic,+4.346,+1.036,+25\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,89.218,10.782,98.350,1.650,88.30,1.000,bicubic,+3.820,+0.686,-44\ncoat_lite_medium_384.in1k,384,89.216,10.784,98.226,1.774,44.57,1.000,bicubic,+4.326,+0.850,+20\ndeit3_huge_patch14_224.fb_in1k,224,89.212,10.789,98.172,1.828,632.13,0.900,bicubic,+4.008,+0.986,-28\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,89.197,10.803,98.422,1.578,87.77,0.900,bicubic,+3.921,+0.744,-36\ndm_nfnet_f2.dm_in1k,352,89.186,10.814,98.228,1.772,193.78,0.920,bicubic,+4.002,+0.888,-25\nregnety_120.sw_in12k_ft_in1k,224,89.175,10.825,98.350,1.650,51.82,0.950,bicubic,+4.181,+0.938,-2\ncait_xs24_384.fb_dist_in1k,384,89.169,10.831,98.285,1.714,26.67,1.000,bicubic,+5.105,+1.397,+137\ntf_efficientnetv2_m.in21k_ft_in1k,384,89.160,10.840,98.392,1.608,54.14,1.000,bicubic,+4.392,+0.958,+31\neca_nfnet_l2.ra3_in1k,384,89.158,10.842,98.320,1.680,56.72,1.000,bicubic,+4.448,+1.328,+34\nvit_base_patch16_clip_224.openai_ft_in1k,224,89.156,10.844,98.281,1.719,86.57,0.900,bicubic,+3.874,+0.833,-44\nconvnext_small.fb_in22k_ft_in1k,224,89.150,10.850,98.322,1.678,50.22,0.875,bicubic,+4.568,+1.044,+47\nefficientnet_h_b5.sw_r448_e450_in1k,448,89.120,10.880,98.232,1.768,45.88,1.000,bicubic,+4.702,+1.092,+79\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,89.120,10.880,98.185,1.815,468.53,0.875,bilinear,+4.030,+0.743,-20\nfastvit_ma36.apple_dist_in1k,256,89.120,10.880,98.149,1.851,44.07,0.950,bicubic,+4.512,+1.153,+43\nmaxvit_small_tf_384.in1k,384,89.118,10.882,98.160,1.840,69.02,1.000,bicubic,+3.584,+0.694,-76\nconvformer_s18.sail_in1k_384,384,89.118,10.882,98.142,1.857,26.77,1.000,bicubic,+4.726,+1.032,+82\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,89.103,10.897,98.296,1.704,39.57,0.965,bicubic,+4.647,+1.448,+61\nmaxvit_tiny_tf_384.in1k,384,89.100,10.899,98.215,1.785,30.98,1.000,bicubic,+3.995,+0.833,-29\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,89.098,10.902,98.239,1.761,21.20,0.950,bicubic,+4.010,+0.987,-25\ntf_efficientnet_b7.ra_in1k,600,89.090,10.910,98.192,1.808,66.35,0.949,bicubic,+4.164,+0.972,-2\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,89.090,10.910,98.130,1.870,304.40,0.900,bicubic,+4.244,+0.644,+9\necaresnet269d.ra2_in1k,352,89.088,10.912,98.243,1.757,102.09,1.000,bicubic,+4.120,+1.019,-14\ntf_efficientnetv2_l.in1k,384,89.079,10.921,98.228,1.772,118.52,1.000,bicubic,+3.877,+0.804,-45\nefficientvit_l2.r224_in1k,224,89.073,10.927,98.031,1.968,63.71,1.000,bicubic,+4.027,+0.927,-25\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,89.066,10.934,98.170,1.830,60.40,0.950,bicubic,+3.640,+0.692,-70\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,89.062,10.938,98.256,1.744,38.88,0.950,bicubic,+4.106,+0.872,-16\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,89.060,10.940,98.292,1.708,88.30,0.950,bicubic,+3.842,+0.894,-54\nxcit_large_24_p16_224.fb_dist_in1k,224,89.053,10.947,98.064,1.937,189.10,1.000,bicubic,+4.117,+0.936,-13\nregnety_1280.seer_ft_in1k,384,89.047,10.953,98.151,1.849,644.81,1.000,bicubic,+4.621,+1.065,+61\nconvnext_small.in12k_ft_in1k,288,89.026,10.974,98.230,1.770,50.22,1.000,bicubic,+3.702,+0.676,-66\nresmlp_big_24_224.fb_in22k_ft_in1k,224,89.024,10.976,98.215,1.785,129.14,0.875,bicubic,+4.610,+1.107,+63\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,89.024,10.976,98.081,1.919,32.59,0.950,bicubic,+4.356,+0.973,+17\ndm_nfnet_f1.dm_in1k,320,89.022,10.979,98.254,1.746,132.63,0.910,bicubic,+4.322,+1.076,+13\nxcit_small_12_p8_224.fb_dist_in1k,224,89.019,10.981,98.078,1.922,26.21,1.000,bicubic,+4.767,+1.204,+83\nmobilenetv4_hybrid_large.e600_r384_in1k,448,89.015,10.985,98.055,1.945,37.76,1.000,bicubic,+4.751,+1.099,+79\nvit_large_patch16_rope_mixed_224.naver_in1k,224,89.013,10.987,98.160,1.840,304.20,0.900,bicubic,+4.173,+1.032,-5\ncaformer_m36.sail_in1k,224,89.000,11.000,98.038,1.962,56.20,1.000,bicubic,+3.758,+0.812,-66\nefficientnetv2_rw_m.agc_in1k,416,88.992,11.008,98.213,1.787,53.24,1.000,bicubic,+4.162,+1.065,-7\nhgnetv2_b3.ssld_stage2_ft_in1k,288,88.992,11.008,98.179,1.821,16.29,1.000,bicubic,+5.404,+1.367,+193\nconvnext_large.fb_in1k,288,88.990,11.011,98.044,1.956,197.77,1.000,bicubic,+4.128,+0.834,-13\ncaformer_s18.sail_in22k_ft_in1k,224,88.975,11.025,98.307,1.693,26.34,1.000,bicubic,+4.883,+1.125,+99\nregnetz_040_h.ra3_in1k,320,88.972,11.028,98.194,1.806,28.94,1.000,bicubic,+4.474,+1.188,+27\nvit_large_patch16_rope_224.naver_in1k,224,88.972,11.028,98.100,1.900,304.17,0.900,bicubic,+4.326,+0.980,+10\nedgenext_base.in21k_ft_in1k,320,88.970,11.030,98.269,1.732,18.51,1.000,bicubic,+4.894,+1.061,+97\nvit_large_patch16_rope_ape_224.naver_in1k,224,88.968,11.032,98.059,1.941,304.37,0.900,bicubic,+4.308,+0.913,+4\nhgnetv2_b4.ssld_stage2_ft_in1k,288,88.962,11.038,98.307,1.693,19.80,1.000,bicubic,+5.010,+1.241,+110\nmambaout_small_rw.sw_e450_in1k,288,88.962,11.038,98.070,1.930,48.50,1.000,bicubic,+4.358,+0.974,+11\nregnetz_e8.ra3_in1k,256,88.957,11.043,98.249,1.751,57.70,0.940,bicubic,+4.539,+1.261,+45\nregnetz_d8_evos.ch_in1k,320,88.957,11.043,98.179,1.821,23.46,1.000,bicubic,+4.819,+1.161,+85\ntf_efficientnet_b5.ap_in1k,456,88.949,11.051,98.185,1.815,30.39,0.934,bicubic,+4.691,+1.201,+65\nregnety_1280.swag_lc_in1k,224,88.947,11.053,98.234,1.766,644.81,0.965,bicubic,+2.983,+0.382,-150\nefficientnet_x_b5.sw_r448_e450_in1k,448,88.947,11.053,98.222,1.778,33.44,1.000,bicubic,+4.745,+1.250,+73\ndeit3_base_patch16_384.fb_in1k,384,88.940,11.060,98.036,1.964,86.88,1.000,bicubic,+3.852,+0.682,-60\nmvitv2_large.fb_in1k,224,88.940,11.060,97.961,2.039,217.99,0.900,bicubic,+3.676,+0.773,-84\nhiera_base_plus_224.mae_in1k_ft_in1k,224,88.940,11.060,97.914,2.086,69.90,0.900,bicubic,+3.804,+0.752,-68\nconvnext_small.in12k_ft_in1k,224,88.938,11.062,98.269,1.732,50.22,0.950,bicubic,+3.774,+0.767,-75\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,88.921,11.079,98.296,1.704,38.85,1.000,bicubic,+4.371,+1.116,+7\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,88.917,11.083,98.192,1.808,32.59,0.950,bicubic,+4.603,+1.090,+49\ntf_efficientnetv2_s.in21k_ft_in1k,384,88.913,11.087,98.273,1.727,21.46,1.000,bicubic,+4.581,+0.999,+46\nconvnext_tiny.in12k_ft_in1k,288,88.908,11.092,98.303,1.697,28.59,1.000,bicubic,+4.460,+1.229,+21\nnextvit_small.bd_in1k_384,384,88.906,11.094,98.049,1.951,31.76,1.000,bicubic,+4.870,+1.055,+89\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,88.902,11.098,98.283,1.717,19.80,1.000,bicubic,+5.452,+1.363,+193\nvolo_d1_224.sail_in1k,224,88.898,11.102,98.023,1.977,26.63,0.960,bicubic,+4.750,+1.251,+68\ntf_efficientnetv2_xl.in21k_ft_in1k,384,88.898,11.102,97.978,2.022,208.12,1.000,bicubic,+3.336,+0.522,-125\ntf_efficientnetv2_m.in1k,384,88.895,11.104,98.222,1.778,54.14,1.000,bicubic,+4.341,+1.163,-2\necaresnet269d.ra2_in1k,320,88.893,11.107,98.215,1.785,102.09,0.950,bicubic,+4.169,+1.035,-24\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,88.887,11.113,98.219,1.781,86.57,0.900,bicubic,+4.353,+0.919,-1\nconvnextv2_base.fcmae_ft_in1k,224,88.866,11.134,98.068,1.932,88.72,0.875,bicubic,+3.976,+0.980,-45\nresnetrs420.tf_in1k,416,88.861,11.139,98.019,1.981,191.89,1.000,bicubic,+3.851,+0.909,-67\nconvnext_tiny.fb_in22k_ft_in1k_384,384,88.859,11.141,98.290,1.710,28.59,1.000,bicubic,+4.761,+1.508,+67\nconvformer_b36.sail_in1k,224,88.857,11.143,97.882,2.118,99.88,1.000,bicubic,+4.031,+0.934,-38\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,88.853,11.147,97.865,2.135,41.72,0.950,bicubic,+3.913,+0.897,-60\nregnetz_d8.ra3_in1k,320,88.846,11.154,98.187,1.813,23.37,1.000,bicubic,+4.802,+1.183,+74\nresnetrs270.tf_in1k,352,88.840,11.160,98.134,1.866,129.86,1.000,bicubic,+4.402,+1.162,+11\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,88.836,11.164,98.044,1.956,194.03,0.875,bilinear,+4.670,+0.852,+55\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,88.832,11.168,98.113,1.887,34.36,0.950,bicubic,+3.919,+0.853,-56\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,88.821,11.179,97.957,2.043,37.76,1.000,bicubic,+4.469,+1.069,+26\nmambaout_base.in1k,288,88.821,11.179,97.850,2.150,84.81,1.000,bicubic,+4.091,+0.916,-37\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,88.817,11.184,98.341,1.659,36.47,1.000,bicubic,+4.751,+1.031,+64\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,88.817,11.184,98.335,1.665,49.61,0.900,bicubic,+5.487,+1.339,+201\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,88.812,11.188,98.072,1.928,66.01,0.950,bicubic,+4.178,+1.010,-28\nxcit_tiny_24_p8_384.fb_dist_in1k,384,88.804,11.196,98.183,1.817,12.11,1.000,bicubic,+5.020,+1.467,+110\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,88.802,11.198,98.245,1.755,98.95,1.000,bicubic,+3.814,+0.949,-76\nxcit_medium_24_p16_224.fb_dist_in1k,224,88.802,11.198,98.036,1.964,84.40,1.000,bicubic,+4.510,+1.100,+27\nseresnet152d.ra2_in1k,320,88.799,11.200,98.168,1.832,66.84,1.000,bicubic,+4.456,+1.124,+19\nhrnet_w48_ssld.paddle_in1k,224,88.793,11.207,98.108,1.892,77.47,0.950,bilinear,+5.137,+1.260,+133\ntf_efficientnet_b7.aa_in1k,600,88.793,11.207,98.055,1.945,66.35,0.949,bicubic,+4.369,+1.191,+5\nrexnetr_300.sw_in12k_ft_in1k,288,88.789,11.211,98.341,1.659,34.81,1.000,bicubic,+4.207,+0.927,-29\nmambaout_base_wide_rw.sw_e500_in1k,224,88.787,11.213,97.908,2.092,94.45,0.950,bicubic,+4.339,+0.580,-6\nfastvit_sa36.apple_dist_in1k,256,88.784,11.216,98.093,1.907,31.53,0.900,bicubic,+4.756,+1.221,+62\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,88.782,11.218,98.200,1.800,16.29,1.000,bicubic,+5.664,+1.746,+218\nmambaout_base_short_rw.sw_e500_in1k,224,88.778,11.222,97.861,2.139,88.83,0.950,bicubic,+4.414,+0.917,+10\nconvnext_base.fb_in1k,288,88.776,11.224,97.923,2.077,88.59,1.000,bicubic,+4.310,+0.941,-17\ntf_efficientnet_b6.aa_in1k,528,88.770,11.230,98.068,1.932,43.04,0.942,bicubic,+4.646,+1.178,+40\nconvformer_m36.sail_in1k,224,88.770,11.230,97.765,2.235,57.05,1.000,bicubic,+4.284,+0.899,-23\ndeit3_large_patch16_224.fb_in1k,224,88.763,11.237,97.908,2.092,304.37,0.900,bicubic,+3.991,+0.564,-58\nedgenext_base.usi_in1k,320,88.761,11.239,98.147,1.853,18.51,1.000,bicubic,+4.791,+1.377,+59\nhiera_base_224.mae_in1k_ft_in1k,224,88.759,11.241,97.989,2.011,51.52,0.900,bicubic,+4.231,+0.979,-31\neca_nfnet_l2.ra3_in1k,320,88.757,11.243,98.170,1.830,56.72,0.900,bicubic,+4.587,+1.154,+30\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,88.757,11.243,98.142,1.857,88.79,0.875,bilinear,+4.481,+0.954,+13\nhgnet_small.ssld_in1k,224,88.755,11.245,98.149,1.851,24.36,0.965,bicubic,+4.937,+1.303,+82\nresnetrs200.tf_in1k,320,88.755,11.245,98.104,1.896,93.21,1.000,bicubic,+4.307,+1.232,-19\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,88.750,11.250,98.200,1.800,28.64,1.000,bicubic,+4.322,+0.940,-14\ncaformer_s36.sail_in1k,224,88.748,11.252,98.014,1.986,39.30,1.000,bicubic,+4.220,+0.992,-36\nconvformer_s18.sail_in22k_ft_in1k,224,88.746,11.254,98.209,1.791,26.77,1.000,bicubic,+4.978,+1.163,+93\nresnetrs350.tf_in1k,384,88.746,11.254,98.034,1.966,163.96,1.000,bicubic,+4.036,+0.778,-62\nvit_base_patch16_224_miil.in21k_ft_in1k,224,88.744,11.256,98.025,1.975,86.54,0.875,bilinear,+4.466,+1.227,+5\nefficientvit_l1.r224_in1k,224,88.744,11.256,97.792,2.208,52.65,1.000,bicubic,+4.280,+0.932,-31\nmambaout_small.in1k,288,88.740,11.260,97.940,2.060,48.49,1.000,bicubic,+4.228,+0.970,-40\nrdnet_large.nv_in1k,224,88.733,11.267,97.970,2.030,186.27,0.900,bicubic,+3.939,+0.814,-76\nmambaout_base_tall_rw.sw_e500_in1k,224,88.733,11.267,97.948,2.052,86.48,0.950,bicubic,+4.297,+0.992,-24\nregnetz_040.ra3_in1k,320,88.727,11.273,98.089,1.911,27.12,1.000,bicubic,+4.485,+1.153,+7\nconvnext_tiny.in12k_ft_in1k,224,88.712,11.288,98.198,1.802,28.59,0.950,bicubic,+4.520,+1.338,+14\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,88.710,11.290,98.305,1.695,236.34,1.000,bilinear,+4.242,+0.873,-39\nefficientnetv2_rw_m.agc_in1k,320,88.708,11.292,98.038,1.962,53.24,1.000,bicubic,+4.412,+1.166,-5\ndavit_base.msft_in1k,224,88.703,11.297,97.874,2.127,87.95,0.950,bicubic,+4.051,+0.854,-65\nregnety_160.deit_in1k,288,88.701,11.299,98.068,1.932,83.59,1.000,bicubic,+4.997,+1.252,+94\nhgnetv2_b4.ssld_stage2_ft_in1k,224,88.695,11.305,98.142,1.857,19.80,0.965,bicubic,+4.995,+1.355,+96\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,88.676,11.324,98.023,1.977,32.59,1.000,bicubic,+4.850,+1.303,+64\nflexivit_base.1200ep_in1k,240,88.661,11.339,97.961,2.039,86.59,0.950,bicubic,+3.983,+0.981,-73\nregnety_640.seer_ft_in1k,384,88.652,11.348,98.168,1.832,281.38,1.000,bicubic,+4.728,+1.242,+37\nregnetz_d32.ra3_in1k,320,88.652,11.348,98.085,1.915,27.58,0.950,bicubic,+4.624,+1.235,+30\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,88.650,11.350,98.236,1.764,22.20,1.000,bicubic,+4.836,+1.130,+62\ndavit_small.msft_in1k,224,88.639,11.361,97.953,2.047,49.75,0.950,bicubic,+4.387,+1.011,-8\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,88.637,11.363,98.185,1.815,38.86,0.950,bicubic,+4.195,+0.971,-41\neca_nfnet_l1.ra2_in1k,320,88.633,11.367,98.136,1.864,41.41,1.000,bicubic,+4.571,+1.110,+19\nregnety_080.ra3_in1k,288,88.626,11.373,97.972,2.028,39.18,1.000,bicubic,+4.686,+1.088,+30\nmvitv2_base.fb_in1k,224,88.626,11.373,97.816,2.184,51.47,0.900,bicubic,+4.202,+0.908,-35\nmobilenetv4_hybrid_large.e600_r384_in1k,384,88.609,11.391,97.933,2.067,37.76,0.950,bicubic,+4.783,+1.185,+53\nswinv2_base_window16_256.ms_in1k,256,88.609,11.391,97.901,2.099,87.92,0.900,bicubic,+4.001,+0.837,-76\nrepvgg_d2se.rvgg_in1k,320,88.607,11.393,98.010,1.990,133.33,1.000,bilinear,+5.057,+1.348,+103\nrdnet_base.nv_in1k,224,88.605,11.395,97.888,2.112,87.45,0.900,bicubic,+4.239,+1.010,-31\nmaxvit_base_tf_224.in1k,224,88.599,11.401,97.859,2.142,119.47,0.950,bicubic,+3.715,+0.857,-109\nconvnext_large.fb_in1k,224,88.594,11.405,97.841,2.159,197.77,0.875,bicubic,+4.290,+0.900,-25\nflexivit_base.600ep_in1k,240,88.582,11.418,97.918,2.082,86.59,0.950,bicubic,+4.018,+1.008,-73\nregnety_320.seer_ft_in1k,384,88.579,11.421,98.111,1.889,145.05,1.000,bicubic,+5.255,+1.398,+146\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,88.567,11.433,98.192,1.808,936.53,1.000,bilinear,+3.631,+0.736,-123\nefficientvit_b3.r288_in1k,288,88.567,11.433,97.716,2.284,48.65,1.000,bicubic,+4.405,+0.984,-7\nseresnextaa101d_32x8d.ah_in1k,288,88.556,11.444,97.989,2.011,93.59,1.000,bicubic,+3.976,+0.927,-79\nresnetaa101d.sw_in12k_ft_in1k,288,88.554,11.446,98.074,1.926,44.57,1.000,bicubic,+4.440,+0.960,-5\nxcit_small_24_p16_224.fb_dist_in1k,224,88.539,11.461,98.002,1.998,47.67,1.000,bicubic,+4.659,+1.268,+31\nswinv2_base_window8_256.ms_in1k,256,88.537,11.463,97.897,2.103,87.92,0.900,bicubic,+4.291,+0.995,-25\nmaxvit_rmlp_small_rw_224.sw_in1k,224,88.537,11.463,97.779,2.220,64.90,0.900,bicubic,+4.049,+1.007,-71\nresnest269e.in1k,416,88.533,11.467,98.021,1.979,110.93,0.928,bicubic,+3.991,+1.033,-79\ncoatnet_rmlp_2_rw_224.sw_in1k,224,88.520,11.480,97.581,2.419,73.88,0.950,bicubic,+3.912,+0.837,-89\nregnetz_d8.ra3_in1k,256,88.516,11.485,97.970,2.030,23.37,0.940,bicubic,+4.969,+1.224,+89\nresnet200d.ra2_in1k,320,88.516,11.485,97.965,2.035,64.69,1.000,bicubic,+4.540,+1.145,+9\nfastvit_sa24.apple_dist_in1k,256,88.509,11.491,97.944,2.056,21.55,0.900,bicubic,+5.141,+1.370,+123\nrepvit_m2_3.dist_300e_in1k,224,88.507,11.493,97.927,2.073,23.69,0.950,bicubic,+4.983,+1.395,+89\nrexnetr_300.sw_in12k_ft_in1k,224,88.505,11.495,98.076,1.924,34.81,0.950,bicubic,+4.469,+1.084,-1\nedgenext_base.in21k_ft_in1k,256,88.503,11.497,98.014,1.986,18.51,0.950,bicubic,+5.087,+1.214,+113\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,88.501,11.499,98.051,1.949,34.36,0.950,bicubic,+3.937,+0.943,-91\nconvformer_s36.sail_in1k,224,88.498,11.502,97.771,2.229,40.01,1.000,bicubic,+4.430,+1.027,-11\nmobilenetv4_conv_large.e600_r384_in1k,448,88.494,11.506,97.950,2.050,32.59,1.000,bicubic,+5.076,+1.338,+109\nfasternet_l.in1k,224,88.492,11.508,97.963,2.037,93.47,1.000,bicubic,+4.976,+1.305,+85\nseresnext101_32x8d.ah_in1k,288,88.492,11.508,97.876,2.124,93.57,1.000,bicubic,+4.300,+0.744,-28\nefficientnetv2_rw_s.ra2_in1k,384,88.490,11.510,97.974,2.026,23.94,1.000,bicubic,+4.660,+1.262,+23\ncrossvit_18_dagger_408.in1k,408,88.488,11.512,97.886,2.114,44.61,1.000,bicubic,+4.290,+1.052,-34\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,88.488,11.512,97.765,2.235,37.76,0.950,bicubic,+4.492,+1.051,-5\nrepvit_m2_3.dist_450e_in1k,224,88.486,11.514,98.053,1.947,23.69,0.950,bicubic,+4.740,+1.403,+39\ndm_nfnet_f2.dm_in1k,256,88.486,11.514,97.959,2.041,193.78,0.920,bicubic,+4.386,+1.049,-25\nflexivit_base.300ep_in1k,240,88.486,11.514,97.839,2.161,86.59,0.950,bicubic,+4.084,+0.955,-64\nmambaout_tiny.in1k,288,88.486,11.514,97.784,2.216,26.55,1.000,bicubic,+5.034,+1.244,+92\ngcvit_base.in1k,224,88.484,11.517,97.775,2.225,90.32,0.875,bicubic,+4.028,+0.557,-85\nefficientformerv2_l.snap_dist_in1k,224,88.481,11.519,97.933,2.067,26.32,0.950,bicubic,+4.839,+1.387,+59\nnextvit_base.bd_in1k,224,88.477,11.523,97.918,2.082,44.82,0.950,bicubic,+4.979,+1.270,+78\nvit_base_patch16_rope_mixed_224.naver_in1k,224,88.475,11.525,97.931,2.069,86.44,0.900,bicubic,+4.669,+1.209,+22\nmaxvit_large_tf_224.in1k,224,88.475,11.525,97.831,2.169,211.79,0.950,bicubic,+3.537,+0.851,-155\nhgnet_tiny.ssld_in1k,288,88.471,11.529,98.096,1.905,14.74,1.000,bicubic,+5.943,+1.576,+235\ntf_efficientnet_b3.ns_jft_in1k,300,88.469,11.531,98.027,1.973,12.23,0.904,bicubic,+4.415,+1.101,-24\nregnetv_064.ra3_in1k,288,88.466,11.534,98.061,1.939,30.58,1.000,bicubic,+4.720,+1.315,+30\nmaxvit_small_tf_224.in1k,224,88.464,11.536,97.876,2.124,68.93,0.950,bicubic,+4.012,+1.052,-92\nmambaout_small_rw.sw_e450_in1k,224,88.462,11.538,97.856,2.144,48.50,1.000,bicubic,+4.422,+1.114,-25\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,88.456,11.544,98.070,1.930,11.07,1.000,bicubic,+5.458,+1.394,+152\nregnety_320.swag_lc_in1k,224,88.449,11.551,98.106,1.894,145.05,0.965,bicubic,+3.795,+0.614,-125\ncait_s24_224.fb_dist_in1k,224,88.447,11.553,97.950,2.050,46.92,1.000,bicubic,+4.963,+1.364,+71\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,88.445,11.555,98.155,1.845,387.93,1.000,bilinear,+4.015,+0.775,-89\nresmlp_big_24_224.fb_distilled_in1k,224,88.441,11.559,97.938,2.062,129.14,0.875,bicubic,+4.847,+1.279,+52\ninception_next_base.sail_in1k,224,88.439,11.561,97.779,2.220,86.67,0.950,bicubic,+4.341,+0.641,-41\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,88.434,11.566,98.127,1.873,22.52,0.950,bicubic,+4.634,+1.543,+8\nresnest200e.in1k,320,88.434,11.566,98.040,1.960,70.20,0.909,bicubic,+4.554,+1.160,-9\nresnetrs350.tf_in1k,288,88.434,11.566,97.888,2.112,163.96,1.000,bicubic,+4.130,+0.996,-73\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,88.430,11.570,97.876,2.124,86.59,0.900,bicubic,+4.532,+1.124,-16\nswin_base_patch4_window12_384.ms_in1k,384,88.428,11.572,97.814,2.186,87.90,1.000,bicubic,+3.950,+0.928,-110\nseresnext101d_32x8d.ah_in1k,288,88.426,11.574,97.944,2.056,93.59,1.000,bicubic,+4.046,+1.280,-87\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,88.424,11.576,98.072,1.928,328.99,0.900,bicubic,+3.990,+0.920,-99\nregnetz_c16_evos.ch_in1k,320,88.413,11.587,98.040,1.960,13.49,0.950,bicubic,+5.753,+1.560,+188\nconvnext_small.fb_in1k,288,88.413,11.587,98.010,1.990,50.22,1.000,bicubic,+4.709,+1.220,+21\ntf_efficientnetv2_s.in1k,384,88.413,11.587,97.927,2.073,21.46,1.000,bicubic,+4.513,+1.225,-23\nnextvit_large.bd_in1k,224,88.404,11.595,97.965,2.035,57.87,0.950,bicubic,+4.751,+1.283,+31\ntf_efficientnet_b5.aa_in1k,456,88.402,11.598,97.933,2.067,30.39,0.934,bicubic,+4.706,+1.221,+24\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,88.400,11.600,98.185,1.815,217.32,1.000,bilinear,+4.418,+1.055,-38\nvit_base_patch16_384.orig_in21k_ft_in1k,384,88.398,11.602,98.147,1.853,86.86,1.000,bicubic,+4.186,+0.937,-71\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,88.398,11.602,98.066,1.934,11.07,1.000,bicubic,+5.008,+1.300,+76\ntresnet_v2_l.miil_in21k_ft_in1k,224,88.387,11.613,97.908,2.092,46.17,0.875,bilinear,+4.477,+1.416,-34\nedgenext_base.usi_in1k,256,88.385,11.615,98.064,1.937,18.51,0.950,bicubic,+4.701,+1.314,+21\nefficientnet_b4.ra2_in1k,384,88.381,11.619,97.944,2.056,19.34,1.000,bicubic,+4.913,+1.370,+52\nconvnext_base.fb_in1k,224,88.377,11.623,97.777,2.223,88.59,0.875,bicubic,+4.539,+1.031,-20\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,88.368,11.632,98.083,1.917,19.80,0.965,bicubic,+5.476,+1.451,+142\nswinv2_small_window16_256.ms_in1k,256,88.368,11.632,97.848,2.152,49.73,0.900,bicubic,+4.152,+0.974,-78\nvit_base_patch16_rope_224.naver_in1k,224,88.366,11.634,97.786,2.214,86.43,0.900,bicubic,+4.652,+1.112,+5\nseresnet152d.ra2_in1k,256,88.364,11.636,98.061,1.939,66.84,0.950,bicubic,+4.662,+1.441,+10\nresnet152d.ra2_in1k,320,88.360,11.640,97.946,2.054,60.21,1.000,bicubic,+4.676,+1.248,+13\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,88.358,11.643,97.948,2.052,28.64,0.875,bicubic,+4.454,+1.286,-41\nfastvit_ma36.apple_in1k,256,88.355,11.645,97.927,2.073,44.07,0.950,bicubic,+4.471,+1.171,-35\ntf_efficientnet_b4.ap_in1k,380,88.353,11.647,97.897,2.103,19.34,0.922,bicubic,+5.107,+1.505,+84\nresnetrs420.tf_in1k,320,88.349,11.651,97.859,2.142,191.89,1.000,bicubic,+4.109,+0.997,-87\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,88.347,11.653,97.824,2.176,29.15,0.950,bicubic,+4.121,+1.042,-87\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,88.343,11.658,98.140,1.860,22.06,1.000,bicubic,+5.261,+1.366,+105\ncrossvit_15_dagger_408.in1k,408,88.343,11.658,97.869,2.131,28.50,1.000,bicubic,+4.501,+1.083,-32\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,88.338,11.662,97.773,2.227,63.95,0.950,bicubic,+4.022,+0.963,-105\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,88.321,11.679,97.940,2.060,15.62,1.000,bicubic,+4.955,+1.200,+60\nefficientvit_b3.r256_in1k,256,88.321,11.679,97.570,2.430,48.65,1.000,bicubic,+4.539,+1.072,-20\nregnety_064.ra3_in1k,288,88.317,11.683,97.865,2.135,30.58,1.000,bicubic,+4.601,+1.141,-9\ndeit3_small_patch16_384.fb_in1k,384,88.308,11.692,97.884,2.116,22.21,1.000,bicubic,+4.876,+1.210,+43\nresnetrs200.tf_in1k,256,88.304,11.696,97.929,2.071,93.21,1.000,bicubic,+4.420,+1.167,-47\ntf_efficientnet_b5.ra_in1k,456,88.304,11.696,97.914,2.086,30.39,0.934,bicubic,+4.470,+1.150,-38\ncs3se_edgenet_x.c2ns_in1k,320,88.302,11.698,97.929,2.071,50.72,1.000,bicubic,+4.752,+1.263,+17\nvit_base_patch16_rope_ape_224.naver_in1k,224,88.300,11.700,97.820,2.180,86.59,0.900,bicubic,+4.520,+1.210,-25\npvt_v2_b4.in1k,224,88.291,11.709,97.814,2.186,62.56,0.900,bicubic,+4.581,+1.200,-13\nmvitv2_small.fb_in1k,224,88.289,11.711,97.707,2.293,34.87,0.900,bicubic,+4.517,+1.149,-24\nese_vovnet57b.ra4_e3600_r256_in1k,320,88.283,11.717,97.912,2.088,38.61,1.000,bicubic,+4.985,+1.390,+61\nxcit_small_12_p16_224.fb_dist_in1k,224,88.283,11.717,97.854,2.146,26.25,1.000,bicubic,+4.929,+1.382,+53\nefficientformer_l7.snap_dist_in1k,224,88.278,11.722,97.886,2.114,82.23,0.950,bicubic,+4.882,+1.354,+44\nhgnetv2_b3.ssld_stage2_ft_in1k,224,88.276,11.724,97.923,2.077,16.29,0.965,bicubic,+5.362,+1.563,+113\nregnetz_d8_evos.ch_in1k,256,88.274,11.726,97.959,2.041,23.46,0.950,bicubic,+4.870,+1.297,+39\nregnetz_040_h.ra3_in1k,256,88.274,11.726,97.910,2.090,28.94,1.000,bicubic,+4.510,+1.210,-27\nmambaout_base.in1k,224,88.270,11.730,97.664,2.336,84.81,1.000,bicubic,+4.076,+0.996,-102\nresnetrs270.tf_in1k,256,88.268,11.732,97.861,2.139,129.86,1.000,bicubic,+4.672,+1.245,0\ninception_next_small.sail_in1k,224,88.268,11.732,97.799,2.201,49.37,0.875,bicubic,+4.684,+1.201,+2\nmambaout_small.in1k,224,88.266,11.734,97.730,2.270,48.49,1.000,bicubic,+4.172,+1.104,-92\nrdnet_small.nv_in1k,224,88.264,11.736,97.906,2.095,50.44,0.900,bicubic,+4.606,+1.202,-13\nregnetz_040.ra3_in1k,256,88.264,11.736,97.878,2.122,27.12,1.000,bicubic,+4.620,+1.196,-10\ndeit3_base_patch16_224.fb_in1k,224,88.261,11.739,97.805,2.195,86.59,0.900,bicubic,+4.461,+0.833,-43\ngcvit_small.in1k,224,88.255,11.745,97.807,2.193,51.09,0.875,bicubic,+4.351,+0.843,-71\nresnetrs152.tf_in1k,320,88.255,11.745,97.733,2.267,86.62,1.000,bicubic,+4.545,+1.061,-28\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,88.255,11.745,97.664,2.336,60.23,0.950,bicubic,+4.349,+0.986,-74\nregnetz_d32.ra3_in1k,256,88.253,11.747,97.880,2.120,27.58,0.950,bicubic,+4.821,+1.246,+21\nconvnext_nano.r384_in12k_ft_in1k,384,88.238,11.762,97.989,2.011,15.59,1.000,bicubic,+4.904,+1.365,+41\nregnetv_040.ra3_in1k,288,88.227,11.773,97.970,2.030,20.64,1.000,bicubic,+5.023,+1.288,+53\nresnetaa101d.sw_in12k_ft_in1k,224,88.217,11.783,97.933,2.067,44.57,0.950,bicubic,+4.673,+1.105,-4\ndeit_base_distilled_patch16_224.fb_in1k,224,88.212,11.788,97.912,2.088,87.34,0.900,bicubic,+4.828,+1.420,+28\nxcit_tiny_24_p16_384.fb_dist_in1k,384,88.195,11.805,97.940,2.060,12.12,1.000,bicubic,+5.615,+1.680,+154\nswinv2_small_window8_256.ms_in1k,256,88.189,11.811,97.777,2.223,49.73,0.900,bicubic,+4.345,+1.127,-68\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,88.187,11.813,97.871,2.129,38.74,0.950,bicubic,+4.377,+1.047,-59\nxception65p.ra3_in1k,299,88.187,11.813,97.794,2.205,39.82,0.940,bicubic,+5.027,+1.424,+54\nhiera_small_224.mae_in1k_ft_in1k,224,88.182,11.818,97.654,2.346,35.01,0.900,bicubic,+4.294,+0.986,-79\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,88.176,11.824,97.972,2.028,9.72,0.950,bicubic,+5.400,+1.524,+110\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,88.170,11.830,97.865,2.135,88.79,0.875,bilinear,+5.450,+1.241,+118\nregnety_160.deit_in1k,224,88.168,11.832,97.818,2.182,83.59,0.950,bicubic,+5.168,+1.316,+75\ncaformer_s18.sail_in1k,224,88.168,11.832,97.741,2.259,26.34,1.000,bicubic,+4.504,+1.215,-33\ntiny_vit_21m_224.in1k,224,88.163,11.837,97.841,2.159,21.20,0.950,bicubic,+4.895,+1.239,+33\nxcit_large_24_p8_224.fb_in1k,224,88.163,11.837,97.387,2.613,188.93,1.000,bicubic,+3.783,+0.469,-153\ncait_xxs36_384.fb_dist_in1k,384,88.161,11.839,97.899,2.101,17.37,1.000,bicubic,+5.957,+1.751,+205\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,88.157,11.843,97.880,2.120,38.88,0.950,bicubic,+4.691,+1.258,-3\ntf_efficientnetv2_s.in21k_ft_in1k,300,88.153,11.848,97.935,2.065,21.46,1.000,bicubic,+4.805,+1.225,+21\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,88.153,11.848,97.726,2.274,32.59,0.950,bicubic,+4.889,+1.336,+31\ncoat_lite_medium.in1k,224,88.150,11.850,97.884,2.116,44.57,0.900,bicubic,+4.548,+1.170,-30\nhgnetv2_b2.ssld_stage2_ft_in1k,288,88.148,11.852,97.984,2.015,11.22,1.000,bicubic,+5.786,+1.572,+171\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,88.144,11.856,98.046,1.954,236.34,1.000,bicubic,+4.286,+0.916,-86\npvt_v2_b3.in1k,224,88.135,11.865,97.771,2.229,45.24,0.900,bicubic,+5.015,+1.211,+47\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,88.131,11.869,97.972,2.028,16.29,0.965,bicubic,+5.537,+1.608,+131\nswiftformer_l3.dist_in1k,224,88.129,11.871,97.812,2.188,28.49,0.950,bicubic,+5.135,+1.576,+64\ndm_nfnet_f0.dm_in1k,256,88.123,11.877,97.903,2.097,71.49,0.900,bicubic,+4.655,+1.481,-14\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,88.118,11.882,97.801,2.199,11.00,0.950,bicubic,+4.886,+1.173,+26\npvt_v2_b5.in1k,224,88.114,11.886,97.696,2.304,81.96,0.900,bicubic,+4.354,+1.062,-66\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,88.112,11.888,97.677,2.323,60.40,0.950,bicubic,+4.402,+1.061,-59\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,88.108,11.892,97.963,2.037,44.18,0.875,bilinear,+4.840,+1.203,+17\npit_b_distilled_224.in1k,224,88.106,11.894,97.645,2.355,74.79,0.900,bicubic,+4.332,+1.177,-73\nxcit_tiny_12_p8_384.fb_dist_in1k,384,88.103,11.897,97.925,2.075,6.71,1.000,bicubic,+5.709,+1.731,+153\nrexnetr_200.sw_in12k_ft_in1k,288,88.101,11.899,98.021,1.979,16.52,1.000,bicubic,+4.941,+1.515,+27\nfastvit_sa36.apple_in1k,256,88.101,11.899,97.794,2.205,31.53,0.900,bicubic,+4.599,+1.162,-28\nfasternet_m.in1k,224,88.088,11.912,97.884,2.116,53.52,1.000,bicubic,+5.240,+1.496,+73\nhrnet_w18_ssld.paddle_in1k,288,88.078,11.922,97.829,2.171,21.30,1.000,bilinear,+6.026,+1.579,+206\ndm_nfnet_f1.dm_in1k,224,88.067,11.933,97.844,2.156,132.63,0.910,bicubic,+4.623,+1.210,-19\nxception65.ra3_in1k,299,88.065,11.935,97.752,2.248,39.92,0.940,bicubic,+4.879,+1.156,+18\nregnety_160.swag_lc_in1k,224,88.063,11.937,98.038,1.962,83.59,0.965,bicubic,+4.257,+0.754,-89\nseresnextaa101d_32x8d.ah_in1k,224,88.059,11.941,97.724,2.276,93.59,0.950,bicubic,+4.159,+1.076,-112\nresnet152.a1h_in1k,288,88.056,11.944,97.709,2.291,60.19,1.000,bicubic,+4.562,+1.187,-34\nmobilenetv4_conv_large.e600_r384_in1k,384,88.056,11.944,97.707,2.293,32.59,0.950,bicubic,+5.082,+1.463,+50\nxcit_tiny_24_p8_224.fb_dist_in1k,224,88.054,11.946,97.850,2.150,12.11,1.000,bicubic,+5.476,+1.664,+117\nefficientvit_b3.r224_in1k,224,88.054,11.946,97.575,2.425,48.65,0.950,bicubic,+4.610,+1.237,-25\nconvnextv2_tiny.fcmae_ft_in1k,288,88.052,11.948,97.854,2.146,28.64,1.000,bicubic,+4.574,+1.136,-36\nconvnext_small.fb_in1k,224,88.052,11.948,97.784,2.216,50.22,0.875,bicubic,+4.908,+1.352,+21\nswin_s3_base_224.ms_in1k,224,88.052,11.948,97.649,2.351,71.13,0.900,bicubic,+4.132,+0.999,-125\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,88.039,11.961,97.794,2.205,11.07,0.950,bicubic,+5.087,+1.325,+46\nfocalnet_base_srf.ms_in1k,224,88.033,11.967,97.639,2.361,88.15,0.900,bicubic,+4.225,+0.979,-100\nregnety_080.ra3_in1k,224,88.031,11.969,97.707,2.293,39.18,0.950,bicubic,+4.865,+1.227,+9\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,88.027,11.973,97.619,2.381,86.43,0.950,bicubic,+4.155,+0.943,-116\nmaxvit_tiny_tf_224.in1k,224,88.024,11.976,97.824,2.176,30.92,0.950,bicubic,+4.598,+1.240,-28\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,88.022,11.978,97.730,2.270,38.88,0.950,bicubic,+4.560,+1.182,-38\nconvnext_tiny.fb_in22k_ft_in1k,224,88.018,11.982,97.920,2.080,28.59,0.875,bicubic,+5.118,+1.630,+47\neca_nfnet_l1.ra2_in1k,256,88.016,11.984,97.918,2.082,41.41,0.900,bicubic,+4.752,+1.234,-6\nfocalnet_base_lrf.ms_in1k,224,88.012,11.988,97.621,2.378,88.75,0.900,bicubic,+4.184,+1.011,-113\ngcvit_tiny.in1k,224,88.003,11.997,97.728,2.272,28.22,0.875,bicubic,+4.603,+1.334,-28\nmobilenetv4_conv_large.e500_r256_in1k,320,88.001,11.999,97.675,2.325,32.59,1.000,bicubic,+5.329,+1.365,+76\ncs3sedarknet_x.c2ns_in1k,288,87.992,12.008,97.792,2.208,35.40,1.000,bicubic,+5.338,+1.444,+81\nresnet152d.ra2_in1k,256,87.986,12.014,97.728,2.272,60.21,0.950,bicubic,+4.826,+1.082,+3\nxcit_small_24_p8_224.fb_in1k,224,87.984,12.016,97.570,2.430,47.63,1.000,bicubic,+4.130,+0.934,-124\ntf_efficientnet_b5.in1k,456,87.978,12.023,97.931,2.069,30.39,0.934,bicubic,+4.808,+1.397,-5\nregnetz_c16.ra3_in1k,320,87.975,12.025,97.779,2.220,13.46,1.000,bicubic,+5.313,+1.448,+72\nefficientformer_l3.snap_dist_in1k,224,87.975,12.025,97.713,2.287,31.41,0.950,bicubic,+5.413,+1.463,+100\nnfnet_l0.ra2_in1k,288,87.973,12.027,97.861,2.139,35.07,1.000,bicubic,+5.207,+1.367,+57\ntf_efficientnet_b4.aa_in1k,380,87.969,12.031,97.737,2.263,19.34,0.922,bicubic,+4.929,+1.439,+15\neca_nfnet_l0.ra2_in1k,288,87.967,12.033,97.876,2.124,24.14,1.000,bicubic,+5.377,+1.390,+89\nresnet200d.ra2_in1k,256,87.960,12.040,97.820,2.180,64.69,0.950,bicubic,+4.710,+1.268,-18\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,87.960,12.040,97.801,2.199,18.45,1.000,bicubic,+4.530,+1.225,-46\ncoatnet_1_rw_224.sw_in1k,224,87.950,12.050,97.461,2.539,41.72,0.950,bicubic,+4.330,+1.079,-83\nregnety_032.ra_in1k,288,87.948,12.052,97.888,2.112,19.44,1.000,bicubic,+5.224,+1.462,+56\nresnet101d.ra2_in1k,320,87.943,12.057,97.903,2.097,44.57,1.000,bicubic,+4.923,+1.447,+11\nfocalnet_small_lrf.ms_in1k,224,87.943,12.057,97.698,2.302,50.34,0.900,bicubic,+4.435,+1.112,-70\nswinv2_cr_small_ns_224.sw_in1k,224,87.930,12.069,97.664,2.336,49.70,0.900,bicubic,+4.441,+1.178,-66\ntwins_svt_large.in1k,224,87.928,12.072,97.572,2.428,99.27,0.900,bicubic,+4.210,+0.972,-109\nhgnet_tiny.ssld_in1k,224,87.922,12.078,97.865,2.135,14.74,0.965,bicubic,+5.980,+1.747,+187\nrepvit_m1_5.dist_450e_in1k,224,87.915,12.085,97.707,2.293,14.64,0.950,bicubic,+5.395,+1.449,+91\nregnetv_064.ra3_in1k,224,87.911,12.089,97.754,2.246,30.58,0.950,bicubic,+4.793,+1.042,-6\nresnetv2_101.a1h_in1k,288,87.911,12.089,97.660,2.340,44.54,1.000,bicubic,+4.909,+1.216,+6\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,87.911,12.089,97.594,2.406,60.40,0.950,bicubic,+4.259,+1.048,-95\nflexivit_small.1200ep_in1k,240,87.909,12.091,97.613,2.387,22.06,0.950,bicubic,+5.333,+1.461,+80\nsequencer2d_l.in1k,224,87.907,12.093,97.711,2.289,54.30,0.875,bicubic,+4.501,+1.207,-55\nefficientvit_b2.r288_in1k,288,87.907,12.093,97.592,2.408,24.33,1.000,bicubic,+4.829,+1.306,-5\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,87.903,12.097,98.021,1.979,88.30,1.000,bicubic,+4.545,+1.193,-49\nregnety_040.ra3_in1k,288,87.896,12.104,97.880,2.120,20.65,1.000,bicubic,+4.848,+1.378,-6\ntwins_pcpvt_large.in1k,224,87.896,12.104,97.865,2.135,60.99,0.900,bicubic,+4.738,+1.495,-21\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,87.892,12.108,97.893,2.107,11.22,1.000,bicubic,+6.436,+1.877,+238\ntf_efficientnetv2_s.in1k,300,87.890,12.110,97.745,2.255,21.46,1.000,bicubic,+4.728,+1.411,-28\ncoatnet_rmlp_1_rw_224.sw_in1k,224,87.875,12.125,97.630,2.370,41.69,0.950,bicubic,+4.525,+1.178,-51\nmaxvit_tiny_rw_224.sw_in1k,224,87.871,12.129,97.649,2.351,29.06,0.950,bicubic,+4.349,+1.161,-89\nswin_base_patch4_window7_224.ms_in1k,224,87.869,12.131,97.558,2.442,87.77,0.900,bicubic,+4.265,+1.108,-102\necaresnet101d.miil_in1k,288,87.866,12.134,97.899,2.101,44.57,0.950,bicubic,+4.860,+1.603,-8\nconvnext_nano.in12k_ft_in1k,288,87.864,12.136,97.895,2.105,15.59,1.000,bicubic,+4.982,+1.333,+13\ndeit_base_patch16_384.fb_in1k,384,87.862,12.138,97.510,2.490,86.86,1.000,bicubic,+4.754,+1.134,-21\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,87.858,12.142,97.722,2.278,14.25,1.000,bicubic,+4.938,+1.272,+2\nswin_s3_small_224.ms_in1k,224,87.854,12.146,97.442,2.558,49.74,0.900,bicubic,+4.076,+0.982,-139\nefficientnetv2_rw_s.ra2_in1k,288,87.852,12.149,97.741,2.259,23.94,1.000,bicubic,+4.963,+1.413,+7\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,87.849,12.151,97.711,2.289,22.52,0.950,bicubic,+5.329,+1.599,+69\nconvformer_s18.sail_in1k,224,87.841,12.159,97.545,2.455,26.77,1.000,bicubic,+4.873,+1.299,-6\nfocalnet_small_srf.ms_in1k,224,87.832,12.168,97.551,2.449,49.89,0.900,bicubic,+4.404,+1.119,-77\nxcit_small_12_p8_224.fb_in1k,224,87.830,12.170,97.562,2.438,26.21,1.000,bicubic,+4.476,+1.142,-65\ndeit3_medium_patch16_224.fb_in1k,224,87.822,12.178,97.643,2.357,38.85,0.900,bicubic,+4.732,+1.531,-27\nseresnext101_32x8d.ah_in1k,224,87.822,12.178,97.545,2.455,93.57,0.950,bicubic,+4.204,+1.133,-115\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,87.815,12.185,97.752,2.248,16.78,0.950,bicubic,+4.779,+1.408,-22\nflexivit_small.600ep_in1k,240,87.815,12.185,97.566,2.434,22.06,0.950,bicubic,+5.427,+1.524,+83\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,87.807,12.193,97.765,2.235,88.22,0.900,bicubic,+4.503,+1.237,-62\ntresnet_xl.miil_in1k_448,448,87.807,12.193,97.466,2.534,78.44,0.875,bilinear,+4.741,+1.286,-27\nresnetv2_50x1_bit.goog_distilled_in1k,224,87.798,12.202,97.903,2.097,25.55,0.875,bicubic,+4.978,+1.377,+6\nregnetz_c16_evos.ch_in1k,256,87.794,12.206,97.822,2.178,13.49,0.950,bicubic,+5.854,+1.672,+154\nnextvit_small.bd_in1k,224,87.794,12.206,97.677,2.323,31.76,0.950,bicubic,+5.168,+1.453,+36\ntf_efficientnetv2_b3.in21k_ft_in1k,300,87.785,12.215,97.893,2.107,14.36,0.900,bicubic,+5.151,+1.265,+31\nrepvit_m1_5.dist_300e_in1k,224,87.785,12.215,97.645,2.355,14.64,0.950,bicubic,+5.403,+1.619,+78\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,87.766,12.234,97.335,2.665,86.63,1.000,bicubic,+4.258,+0.965,-109\nconvnext_tiny.fb_in1k,288,87.762,12.238,97.587,2.413,28.59,1.000,bicubic,+5.068,+1.443,+19\nefficientnet_b4.ra2_in1k,320,87.755,12.245,97.630,2.370,19.34,0.875,bicubic,+5.097,+1.496,+23\nregnety_064.ra3_in1k,224,87.751,12.249,97.643,2.357,30.58,0.950,bicubic,+4.745,+1.119,-31\nrdnet_tiny.nv_in1k,224,87.743,12.257,97.705,2.295,23.86,0.900,bicubic,+4.929,+1.673,-2\nregnety_320.tv2_in1k,224,87.743,12.257,97.675,2.325,145.05,0.965,bicubic,+4.573,+1.259,-59\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,87.738,12.262,97.891,2.109,15.62,1.000,bicubic,+5.072,+1.345,+16\ntresnet_m.miil_in21k_ft_in1k,224,87.736,12.264,97.525,2.474,31.39,0.875,bilinear,+4.646,+1.241,-44\nhgnet_small.paddle_in1k,288,87.728,12.272,97.656,2.344,24.36,1.000,bicubic,+5.506,+1.432,+95\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,87.725,12.274,97.686,2.314,11.07,1.000,bicubic,+5.225,+1.400,+49\ngc_efficientnetv2_rw_t.agc_in1k,288,87.723,12.277,97.809,2.191,13.68,1.000,bicubic,+5.237,+1.507,+51\ntwins_pcpvt_base.in1k,224,87.723,12.277,97.722,2.278,43.83,0.900,bicubic,+4.999,+1.372,+4\nmvitv2_tiny.fb_in1k,224,87.723,12.277,97.562,2.438,24.17,0.900,bicubic,+5.315,+1.412,+58\nresnext101_64x4d.tv_in1k,224,87.717,12.283,97.585,2.415,83.46,0.875,bilinear,+4.739,+1.311,-35\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,87.715,12.285,97.570,2.430,15.50,0.950,bicubic,+4.737,+1.318,-37\nseresnext101d_32x8d.ah_in1k,224,87.713,12.287,97.707,2.293,93.59,0.950,bicubic,+4.011,+0.913,-153\nresnetrs152.tf_in1k,256,87.706,12.294,97.528,2.472,86.62,1.000,bicubic,+4.806,+0.912,-27\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,87.691,12.309,97.679,2.321,10.59,1.000,bicubic,+5.085,+1.365,+20\nmambaout_tiny.in1k,224,87.689,12.311,97.491,2.509,26.55,1.000,bicubic,+4.947,+1.757,-7\nswin_small_patch4_window7_224.ms_in1k,224,87.685,12.315,97.568,2.432,49.61,0.900,bicubic,+4.457,+1.242,-78\nrexnet_300.nav_in1k,224,87.683,12.317,97.609,2.391,34.71,0.875,bicubic,+4.903,+1.379,-14\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,87.678,12.322,97.946,2.054,44.54,1.000,bilinear,+5.347,+1.422,+63\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,87.662,12.338,97.792,2.208,11.07,0.950,bicubic,+5.302,+1.536,+58\ncoat_small.in1k,224,87.662,12.338,97.538,2.462,21.69,0.900,bicubic,+5.284,+1.324,+54\npnasnet5large.tf_in1k,331,87.662,12.338,97.489,2.511,86.06,0.911,bicubic,+4.848,+1.205,-20\nefficientnetv2_rw_t.ra2_in1k,288,87.657,12.343,97.679,2.321,13.65,1.000,bicubic,+5.303,+1.501,+57\ncs3edgenet_x.c2_in1k,288,87.655,12.345,97.647,2.353,47.82,1.000,bicubic,+4.921,+1.273,-13\nresnetaa50d.sw_in12k_ft_in1k,288,87.653,12.347,97.805,2.195,25.58,1.000,bicubic,+4.997,+1.313,-1\nfastvit_sa24.apple_in1k,256,87.649,12.351,97.703,2.297,21.55,0.900,bicubic,+4.943,+1.425,-10\nefficientvit_b2.r256_in1k,256,87.644,12.356,97.459,2.541,24.33,1.000,bicubic,+4.932,+1.367,-12\nmaxxvitv2_nano_rw_256.sw_in1k,256,87.642,12.358,97.534,2.466,23.70,0.950,bicubic,+4.534,+1.202,-70\nmaxvit_nano_rw_256.sw_in1k,256,87.642,12.358,97.521,2.479,15.45,0.950,bicubic,+4.708,+1.297,-47\nresnet101.a1h_in1k,288,87.636,12.364,97.560,2.440,44.55,1.000,bicubic,+4.840,+1.236,-27\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,87.636,12.364,97.447,2.554,86.46,1.000,bicubic,+4.168,+0.877,-134\nxcit_medium_24_p8_224.fb_in1k,224,87.636,12.364,97.201,2.799,84.32,1.000,bicubic,+3.892,+0.807,-181\ntwins_svt_base.in1k,224,87.632,12.368,97.525,2.474,56.07,0.900,bicubic,+4.510,+1.095,-79\nswinv2_tiny_window16_256.ms_in1k,256,87.625,12.375,97.549,2.451,28.35,0.900,bicubic,+4.797,+1.327,-38\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,87.612,12.388,97.826,2.174,194.03,0.875,bilinear,+4.272,+0.983,-111\ntf_efficientnet_b4.in1k,380,87.612,12.388,97.592,2.408,19.34,0.922,bicubic,+5.014,+1.460,+4\nflexivit_small.300ep_in1k,240,87.610,12.390,97.613,2.387,22.06,0.950,bicubic,+5.440,+1.581,+75\nconvnext_nano.in12k_ft_in1k,224,87.604,12.396,97.797,2.203,15.59,0.950,bicubic,+5.298,+1.459,+50\ncs3se_edgenet_x.c2ns_in1k,256,87.600,12.400,97.645,2.355,50.72,0.950,bicubic,+4.832,+1.339,-33\nconvnextv2_tiny.fcmae_ft_in1k,224,87.600,12.400,97.600,2.400,28.64,0.875,bicubic,+4.652,+1.314,-61\nnest_base_jx.goog_in1k,224,87.600,12.400,97.515,2.485,67.72,0.875,bicubic,+4.047,+1.151,-160\nconvnextv2_nano.fcmae_ft_in1k,288,87.591,12.409,97.694,2.306,15.62,1.000,bicubic,+5.101,+1.468,+16\nregnetv_040.ra3_in1k,224,87.589,12.411,97.671,2.329,20.64,0.950,bicubic,+5.149,+1.469,+23\ntnt_b_patch16_224.in1k,224,87.585,12.415,97.481,2.519,65.43,0.900,bicubic,+4.721,+1.251,-52\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,87.574,12.426,97.643,2.357,25.03,0.875,bilinear,+5.412,+1.587,+68\nregnetz_c16.ra3_in1k,256,87.574,12.426,97.621,2.378,13.46,0.940,bicubic,+5.412,+1.397,+68\nsequencer2d_m.in1k,224,87.572,12.428,97.583,2.417,38.31,0.875,bicubic,+4.736,+1.307,-53\ntf_efficientnet_b2.ns_jft_in1k,260,87.570,12.430,97.651,2.349,9.11,0.890,bicubic,+5.158,+1.413,+19\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,87.570,12.430,97.372,2.628,86.63,1.000,bicubic,+4.230,+1.006,-123\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,87.565,12.435,97.577,2.423,88.22,0.900,bicubic,+4.951,+1.371,-21\nlevit_384.fb_dist_in1k,224,87.565,12.435,97.549,2.451,39.13,0.900,bicubic,+4.965,+1.529,-13\nlevit_conv_384.fb_dist_in1k,224,87.565,12.435,97.549,2.451,39.13,0.900,bicubic,+4.965,+1.529,-13\ndavit_tiny.msft_in1k,224,87.561,12.439,97.579,2.421,28.36,0.950,bicubic,+4.867,+1.315,-36\nese_vovnet57b.ra4_e3600_r256_in1k,256,87.555,12.445,97.519,2.481,38.61,0.950,bicubic,+5.095,+1.513,+9\necaresnet50t.ra2_in1k,320,87.550,12.450,97.666,2.334,25.57,0.950,bicubic,+5.180,+1.546,+21\nregnetx_320.tv2_in1k,224,87.546,12.454,97.568,2.432,107.81,0.965,bicubic,+4.722,+1.368,-58\ninception_next_tiny.sail_in1k,224,87.527,12.473,97.528,2.472,28.06,0.875,bicubic,+5.019,+1.528,-2\nnest_small_jx.goog_in1k,224,87.510,12.490,97.519,2.481,38.35,0.875,bicubic,+4.378,+1.189,-106\npvt_v2_b2_li.in1k,224,87.510,12.490,97.474,2.526,22.55,0.900,bicubic,+5.310,+1.366,+48\nefficientformerv2_s2.snap_dist_in1k,224,87.508,12.492,97.615,2.385,12.71,0.950,bicubic,+5.352,+1.703,+55\nvit_base_patch16_rpn_224.sw_in1k,224,87.508,12.492,97.489,2.511,86.54,0.900,bicubic,+5.290,+1.493,+42\nedgenext_small.usi_in1k,320,87.506,12.494,97.587,2.413,5.59,1.000,bicubic,+5.932,+1.875,+124\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,87.491,12.509,97.801,2.199,236.34,0.875,bicubic,+4.573,+1.233,-80\nrexnetr_200.sw_in12k_ft_in1k,224,87.484,12.516,97.765,2.235,16.52,0.950,bicubic,+4.882,+1.369,-29\ncoatnet_bn_0_rw_224.sw_in1k,224,87.484,12.516,97.553,2.447,27.44,0.950,bicubic,+5.090,+1.331,+7\nefficientnet_b3.ra2_in1k,320,87.480,12.520,97.698,2.302,12.23,1.000,bicubic,+5.184,+1.762,+25\nregnety_080_tv.tv2_in1k,224,87.471,12.529,97.634,2.366,39.38,0.965,bicubic,+4.891,+1.380,-25\nvit_relpos_base_patch16_224.sw_in1k,224,87.471,12.529,97.558,2.442,86.43,0.900,bicubic,+4.989,+1.413,-9\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,87.471,12.529,97.528,2.472,86.43,0.900,bicubic,+4.713,+1.350,-59\nresnet152.a1h_in1k,224,87.459,12.541,97.400,2.600,60.19,0.950,bicubic,+4.641,+1.276,-70\nregnetx_160.tv2_in1k,224,87.452,12.548,97.431,2.568,54.28,0.965,bicubic,+4.878,+1.373,-24\nfbnetv3_g.ra2_in1k,288,87.448,12.552,97.547,2.453,16.62,0.950,bilinear,+5.414,+1.483,+58\nwide_resnet50_2.racm_in1k,288,87.446,12.554,97.562,2.438,68.88,0.950,bicubic,+5.186,+1.500,+23\nresnext101_64x4d.c1_in1k,288,87.437,12.563,97.438,2.562,83.46,1.000,bicubic,+4.279,+0.812,-124\nresnet61q.ra2_in1k,288,87.435,12.565,97.607,2.393,36.85,1.000,bicubic,+4.903,+1.475,-25\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,87.424,12.575,97.598,2.402,15.62,0.875,bicubic,+5.386,+1.424,+53\npoolformerv2_m48.sail_in1k,224,87.424,12.575,97.406,2.594,73.35,1.000,bicubic,+4.812,+1.338,-44\nhgnetv2_b2.ssld_stage2_ft_in1k,224,87.420,12.580,97.649,2.351,11.22,0.965,bicubic,+5.856,+1.755,+109\ncs3sedarknet_l.c2ns_in1k,288,87.418,12.582,97.566,2.434,21.91,0.950,bicubic,+5.648,+1.604,+91\ncait_xxs24_384.fb_dist_in1k,384,87.414,12.586,97.619,2.381,12.03,1.000,bicubic,+6.460,+2.189,+206\ntresnet_l.miil_in1k_448,448,87.403,12.597,97.487,2.513,55.99,0.875,bilinear,+5.125,+1.499,+14\ncs3darknet_x.c2ns_in1k,288,87.397,12.603,97.600,2.400,35.05,1.000,bicubic,+5.163,+1.366,+17\nresnet51q.ra2_in1k,288,87.386,12.614,97.585,2.415,35.70,1.000,bilinear,+5.030,+1.403,-4\nxcit_tiny_24_p8_224.fb_in1k,224,87.384,12.616,97.639,2.361,12.11,1.000,bicubic,+5.496,+1.661,+68\nnasnetalarge.tf_in1k,331,87.371,12.629,97.429,2.571,88.75,0.911,bicubic,+4.733,+1.363,-58\nsequencer2d_s.in1k,224,87.371,12.629,97.395,2.605,27.65,0.875,bicubic,+5.035,+1.363,-5\nswinv2_cr_small_224.sw_in1k,224,87.369,12.631,97.338,2.662,49.70,0.900,bicubic,+4.243,+1.228,-134\ncoat_lite_small.in1k,224,87.367,12.633,97.368,2.632,19.84,0.900,bicubic,+5.063,+1.514,+1\npvt_v2_b2.in1k,224,87.365,12.635,97.528,2.472,25.36,0.900,bicubic,+5.303,+1.562,+36\nnfnet_l0.ra2_in1k,224,87.363,12.637,97.517,2.483,35.07,0.900,bicubic,+5.493,+1.483,+63\nseresnext50_32x4d.racm_in1k,288,87.360,12.640,97.602,2.398,27.56,0.950,bicubic,+5.174,+1.456,+18\nresnet101d.ra2_in1k,256,87.358,12.642,97.619,2.381,44.57,0.950,bicubic,+5.104,+1.549,+5\nresnetv2_50d_gn.ah_in1k,288,87.358,12.642,97.536,2.464,25.57,1.000,bicubic,+5.398,+1.756,+50\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,87.358,12.642,97.455,2.545,38.76,0.900,bicubic,+4.800,+1.379,-45\nfastvit_sa12.apple_dist_in1k,256,87.354,12.646,97.491,2.509,11.58,0.900,bicubic,+5.516,+1.791,+66\nhiera_tiny_224.mae_in1k_ft_in1k,224,87.343,12.657,97.376,2.624,27.91,0.900,bicubic,+4.517,+1.188,-99\ncrossvit_18_dagger_240.in1k,240,87.335,12.665,97.453,2.547,44.27,0.875,bicubic,+4.841,+1.383,-40\nwide_resnet101_2.tv2_in1k,224,87.328,12.672,97.397,2.603,126.89,0.965,bilinear,+4.814,+1.385,-44\nrepvit_m3.dist_in1k,224,87.324,12.676,97.474,2.526,10.68,0.950,bicubic,+5.830,+1.912,+100\nregnety_040.ra3_in1k,224,87.318,12.682,97.607,2.393,20.65,0.950,bicubic,+5.010,+1.529,-13\nresnext101_32x8d.tv2_in1k,224,87.311,12.689,97.558,2.442,88.79,0.965,bilinear,+4.479,+1.324,-106\ncrossvit_18_240.in1k,240,87.309,12.691,97.485,2.515,43.27,0.875,bicubic,+4.919,+1.421,-30\nconvnext_tiny.fb_in1k,224,87.309,12.691,97.444,2.556,28.59,0.875,bicubic,+5.243,+1.590,+20\nfocalnet_tiny_srf.ms_in1k,224,87.307,12.693,97.412,2.588,28.43,0.900,bicubic,+5.161,+1.440,+13\nresnetv2_101.a1h_in1k,224,87.301,12.699,97.329,2.671,44.54,0.950,bicubic,+5.259,+1.463,+23\necaresnet50d.miil_in1k,288,87.299,12.701,97.664,2.336,25.58,0.950,bicubic,+5.651,+1.780,+71\nresnest101e.in1k,256,87.299,12.701,97.558,2.442,48.28,0.875,bilinear,+4.383,+1.248,-123\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,87.296,12.704,97.801,2.199,28.29,0.900,bicubic,+6.330,+1.781,+175\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,87.294,12.706,97.564,2.436,14.62,1.000,bilinear,+5.462,+1.686,+53\necaresnet101d.miil_in1k,224,87.292,12.708,97.560,2.440,44.57,0.875,bicubic,+5.112,+1.498,0\ncs3sedarknet_x.c2ns_in1k,256,87.286,12.714,97.534,2.466,35.40,0.887,bicubic,+5.360,+1.512,+39\ntiny_vit_11m_224.in1k,224,87.286,12.714,97.500,2.500,11.00,0.950,bicubic,+5.748,+1.630,+79\ngcvit_xtiny.in1k,224,87.284,12.716,97.481,2.519,19.98,0.875,bicubic,+5.320,+1.515,+27\nhrnet_w18_ssld.paddle_in1k,224,87.284,12.716,97.481,2.519,21.30,0.950,bilinear,+6.170,+2.359,+149\neca_nfnet_l0.ra2_in1k,224,87.281,12.719,97.551,2.449,24.14,0.900,bicubic,+5.519,+1.557,+57\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,87.277,12.723,97.447,2.554,15.15,0.900,bicubic,+5.213,+1.573,+8\ncoatnext_nano_rw_224.sw_in1k,224,87.273,12.727,97.521,2.479,14.70,0.900,bicubic,+5.275,+2.227,+17\nmobilenetv4_conv_medium.e500_r256_in1k,320,87.269,12.731,97.523,2.477,9.72,1.000,bicubic,+6.379,+1.773,+177\necaresnet101d_pruned.miil_in1k,288,87.258,12.742,97.713,2.287,24.88,0.950,bicubic,+5.242,+1.533,+12\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,87.258,12.742,97.449,2.551,38.73,0.900,bicubic,+4.944,+1.739,-34\nregnety_032.ra_in1k,224,87.243,12.757,97.549,2.451,19.44,0.950,bicubic,+5.295,+1.577,+24\nregnety_160.tv2_in1k,224,87.243,12.757,97.472,2.528,83.59,0.965,bicubic,+4.597,+1.242,-95\ntresnet_xl.miil_in1k,224,87.237,12.763,97.395,2.605,78.44,0.875,bilinear,+5.177,+1.463,+3\nresnetrs101.tf_in1k,288,87.232,12.768,97.457,2.543,63.62,0.940,bicubic,+4.944,+1.457,-30\npoolformer_m48.sail_in1k,224,87.232,12.768,97.308,2.692,73.47,0.950,bicubic,+4.770,+1.222,-63\nmixer_b16_224.miil_in21k_ft_in1k,224,87.230,12.770,97.417,2.583,59.88,0.875,bilinear,+4.916,+1.431,-40\nxcit_tiny_12_p8_224.fb_dist_in1k,224,87.226,12.774,97.438,2.562,6.71,1.000,bicubic,+6.012,+1.824,+121\nresnetv2_50d_evos.ah_in1k,288,87.215,12.785,97.423,2.577,25.59,1.000,bicubic,+5.191,+1.507,+3\nxcit_tiny_12_p16_384.fb_dist_in1k,384,87.209,12.791,97.468,2.532,6.72,1.000,bicubic,+6.255,+1.830,+158\ntf_efficientnet_b3.ap_in1k,300,87.209,12.791,97.393,2.607,12.23,0.904,bicubic,+5.367,+1.779,+29\nconvit_base.fb_in1k,224,87.200,12.800,97.297,2.703,86.54,0.875,bicubic,+4.904,+1.173,-39\nresnet152.tv2_in1k,224,87.194,12.806,97.410,2.590,60.19,0.965,bilinear,+4.900,+1.410,-39\nvit_base_patch32_clip_224.openai_ft_in1k,224,87.185,12.815,97.461,2.539,88.22,0.900,bicubic,+5.241,+1.489,+14\nvisformer_small.in1k,224,87.179,12.821,97.316,2.684,40.22,0.900,bicubic,+5.087,+1.440,-13\ncs3edgenet_x.c2_in1k,256,87.175,12.825,97.521,2.479,47.82,0.887,bicubic,+4.977,+1.565,-27\nfocalnet_tiny_lrf.ms_in1k,224,87.172,12.828,97.363,2.637,28.65,0.900,bicubic,+4.998,+1.415,-25\nvit_srelpos_medium_patch16_224.sw_in1k,224,87.172,12.828,97.303,2.697,38.74,0.900,bicubic,+4.916,+1.371,-40\nconvnext_tiny_hnf.a2h_in1k,288,87.162,12.838,97.282,2.718,28.59,1.000,bicubic,+4.554,+1.258,-104\ncrossvit_15_dagger_240.in1k,240,87.158,12.842,97.442,2.558,28.21,0.875,bicubic,+4.856,+1.482,-49\ncoatnet_0_rw_224.sw_in1k,224,87.153,12.847,97.214,2.786,27.44,0.950,bicubic,+4.757,+1.370,-71\nmobilenetv4_conv_large.e500_r256_in1k,256,87.151,12.849,97.301,2.699,32.59,0.950,bicubic,+5.287,+1.605,+12\nxcit_small_24_p16_224.fb_in1k,224,87.151,12.849,97.259,2.741,47.67,1.000,bicubic,+4.563,+1.263,-99\nswin_s3_tiny_224.ms_in1k,224,87.149,12.851,97.308,2.692,28.33,0.900,bicubic,+5.021,+1.350,-25\nvit_relpos_medium_patch16_224.sw_in1k,224,87.141,12.860,97.504,2.496,38.75,0.900,bicubic,+4.678,+1.536,-83\nresnetaa50d.sw_in12k_ft_in1k,224,87.130,12.870,97.594,2.406,25.58,0.950,bicubic,+5.348,+1.472,+23\ncs3darknet_x.c2ns_in1k,256,87.119,12.881,97.455,2.545,35.05,0.950,bicubic,+5.263,+1.715,+11\nrepvit_m1_1.dist_450e_in1k,224,87.109,12.892,97.412,2.588,8.80,0.950,bicubic,+5.795,+1.844,+84\nefficientvit_b2.r224_in1k,224,87.109,12.892,97.184,2.816,24.33,0.950,bicubic,+4.947,+1.474,-32\nswinv2_tiny_window8_256.ms_in1k,256,87.100,12.900,97.517,2.483,28.35,0.900,bicubic,+5.276,+1.529,+15\nmambaout_kobe.in1k,288,87.091,12.909,97.440,2.560,9.14,1.000,bicubic,+6.027,+1.758,+120\npit_s_distilled_224.in1k,224,87.091,12.909,97.357,2.643,24.04,0.900,bicubic,+5.265,+1.637,+13\nresnet101.a1h_in1k,224,87.083,12.917,97.263,2.737,44.55,0.950,bicubic,+5.133,+1.467,-8\necaresnet50t.a1_in1k,288,87.083,12.917,97.133,2.867,25.57,1.000,bicubic,+4.973,+1.479,-33\nxception41p.ra3_in1k,299,87.074,12.926,97.203,2.797,26.91,0.940,bicubic,+5.096,+1.419,-15\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,87.068,12.932,97.528,2.472,11.22,0.965,bicubic,+6.326,+2.028,+164\nresnet101.a1_in1k,288,87.066,12.934,96.932,3.068,44.55,1.000,bicubic,+4.740,+1.302,-73\nresnext50_32x4d.a1h_in1k,288,87.059,12.941,97.333,2.667,25.03,1.000,bicubic,+5.043,+1.399,-24\nregnetz_b16.ra3_in1k,288,87.055,12.945,97.410,2.590,9.72,1.000,bicubic,+6.315,+1.882,+162\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,87.053,12.947,97.442,2.558,18.45,0.888,bicubic,+4.733,+1.500,-75\nfasternet_s.in1k,224,87.049,12.951,97.387,2.613,31.18,1.000,bicubic,+5.721,+1.709,+69\npoolformerv2_m36.sail_in1k,224,87.044,12.956,97.265,2.735,56.08,1.000,bicubic,+4.830,+1.369,-57\nnest_tiny_jx.goog_in1k,224,87.042,12.958,97.389,2.611,17.06,0.875,bicubic,+5.614,+1.947,+53\nconvit_small.fb_in1k,224,87.038,12.962,97.340,2.660,27.78,0.875,bicubic,+5.626,+1.600,+54\ntf_efficientnetv2_b3.in1k,300,87.038,12.962,97.297,2.703,14.36,0.904,bicubic,+5.078,+1.365,-20\ncrossvit_15_240.in1k,240,87.036,12.964,97.427,2.573,27.53,0.875,bicubic,+5.514,+1.733,+27\nswinv2_cr_tiny_ns_224.sw_in1k,224,87.036,12.964,97.286,2.714,28.33,0.900,bicubic,+5.240,+1.462,-1\ngcresnet50t.ra2_in1k,288,87.027,12.973,97.380,2.620,25.90,1.000,bicubic,+5.569,+1.656,+42\nresnet152.a2_in1k,288,87.017,12.983,96.913,3.087,60.19,1.000,bicubic,+4.403,+1.165,-136\nefficientnet_b1.ra4_e3600_r240_in1k,288,87.008,12.992,97.325,2.675,7.79,1.000,bicubic,+5.564,+1.627,+43\ncoatnet_nano_rw_224.sw_in1k,224,87.006,12.994,97.261,2.739,15.14,0.900,bicubic,+5.302,+1.611,+4\ndeit3_small_patch16_224.fb_in1k,224,87.002,12.998,97.165,2.835,22.06,0.900,bicubic,+5.620,+1.547,+53\nresnet61q.ra2_in1k,256,87.000,13.000,97.400,2.600,36.85,0.900,bicubic,+5.030,+1.552,-32\nresnet50d.ra4_e3600_r224_in1k,288,86.998,13.002,97.421,2.579,25.58,1.000,bicubic,+5.157,+1.495,-13\nxcit_small_12_p16_224.fb_in1k,224,86.995,13.005,97.250,2.750,26.25,1.000,bicubic,+5.013,+1.436,-37\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,86.987,13.013,97.361,2.639,14.25,0.888,bicubic,+5.037,+1.601,-31\ndeit_small_distilled_patch16_224.fb_in1k,224,86.985,13.015,97.314,2.686,22.44,0.900,bicubic,+5.781,+1.936,+74\nresmlp_36_224.fb_distilled_in1k,224,86.980,13.020,97.271,2.729,44.69,0.875,bicubic,+5.830,+1.763,+78\nhgnet_small.paddle_in1k,224,86.976,13.024,97.412,2.588,24.36,0.965,bicubic,+5.550,+1.566,+39\nregnety_032.tv2_in1k,224,86.972,13.028,97.400,2.600,19.44,0.965,bicubic,+5.184,+1.574,-13\npoolformer_m36.sail_in1k,224,86.965,13.035,97.139,2.861,56.17,0.950,bicubic,+4.861,+1.441,-60\nxcit_large_24_p16_224.fb_in1k,224,86.963,13.037,96.921,3.079,189.10,1.000,bicubic,+4.077,+1.041,-191\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,86.953,13.047,97.472,2.528,11.07,1.000,bicubic,+5.669,+1.726,+54\nedgenext_small.usi_in1k,256,86.946,13.054,97.291,2.709,5.59,0.950,bicubic,+5.878,+1.961,+87\nresnet101.a2_in1k,288,86.946,13.054,96.981,3.019,44.55,1.000,bicubic,+4.728,+1.241,-81\nxcit_medium_24_p16_224.fb_in1k,224,86.942,13.058,97.094,2.906,84.40,1.000,bicubic,+4.312,+1.116,-156\nresnet51q.ra2_in1k,256,86.938,13.062,97.468,2.532,35.70,0.875,bilinear,+5.164,+1.536,-17\nresnet101.tv2_in1k,224,86.929,13.071,97.252,2.748,44.55,0.965,bilinear,+5.023,+1.484,-36\npoolformerv2_s36.sail_in1k,224,86.927,13.073,97.359,2.641,30.79,1.000,bicubic,+5.351,+1.679,-3\nresnetv2_50.a1h_in1k,288,86.923,13.077,97.327,2.673,25.55,1.000,bicubic,+5.517,+1.615,+30\ncs3sedarknet_l.c2ns_in1k,256,86.921,13.079,97.419,2.581,21.91,0.887,bicubic,+5.713,+1.621,+59\ntnt_s_legacy_patch16_224.in1k,224,86.908,13.092,97.365,2.635,23.76,0.900,bicubic,+5.396,+1.623,+2\nconvnext_tiny_hnf.a2h_in1k,224,86.908,13.092,97.271,2.729,28.59,0.950,bicubic,+4.678,+1.411,-92\nvit_relpos_small_patch16_224.sw_in1k,224,86.903,13.097,97.489,2.511,21.98,0.900,bicubic,+5.427,+1.665,+10\nconvmixer_1536_20.in1k,224,86.895,13.105,97.353,2.647,51.63,0.960,bicubic,+5.513,+1.899,+29\nconvnextv2_nano.fcmae_ft_in1k,224,86.891,13.109,97.267,2.733,15.62,0.875,bicubic,+5.035,+1.311,-37\nresnet152.a1_in1k,288,86.886,13.114,96.802,3.198,60.19,1.000,bicubic,+4.144,+0.704,-186\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,86.876,13.124,97.534,2.466,36.43,0.900,bicubic,+5.020,+1.450,-41\necaresnetlight.miil_in1k,288,86.874,13.126,97.493,2.507,30.16,0.950,bicubic,+5.444,+1.703,+16\nrexnet_200.nav_in1k,224,86.863,13.137,97.278,2.722,16.37,0.875,bicubic,+5.227,+1.606,-19\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,86.854,13.146,97.596,2.404,22.05,0.900,bicubic,+5.452,+1.446,+20\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,86.846,13.154,97.519,2.481,194.03,0.875,bilinear,+4.990,+1.481,-47\ntf_efficientnet_b3.aa_in1k,300,86.846,13.154,97.297,2.703,12.23,0.904,bicubic,+5.198,+1.573,-23\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,86.842,13.158,96.883,3.117,22.06,0.900,bicubic,+5.868,+1.907,+79\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,86.840,13.161,97.329,2.671,11.07,0.950,bicubic,+5.362,+1.637,-2\ncoat_mini.in1k,224,86.835,13.165,97.158,2.842,10.34,0.900,bicubic,+5.515,+1.770,+26\nswiftformer_l1.dist_in1k,224,86.833,13.167,97.308,2.692,12.06,0.950,bicubic,+5.927,+2.054,+86\ndeit_base_patch16_224.fb_in1k,224,86.833,13.167,97.062,2.938,86.57,0.900,bicubic,+4.853,+1.322,-69\nvit_small_patch16_rope_mixed_224.naver_in1k,224,86.831,13.169,96.740,3.260,21.99,0.900,bicubic,+5.579,+1.684,+33\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,86.829,13.171,97.278,2.722,5.76,1.000,bicubic,+6.181,+1.758,+125\nefficientnet_b3.ra2_in1k,288,86.827,13.173,97.323,2.677,12.23,0.875,bicubic,+5.355,+1.627,-5\nvit_small_patch16_rope_224.naver_in1k,224,86.822,13.178,96.855,3.145,21.98,0.900,bicubic,+5.600,+1.835,+35\ntresnet_m.miil_in1k_448,448,86.818,13.182,97.203,2.797,31.39,0.875,bilinear,+5.108,+1.641,-39\nrepvit_m1_1.dist_300e_in1k,224,86.807,13.193,97.314,2.686,8.80,0.950,bicubic,+5.981,+2.140,+95\nresnet50.fb_swsl_ig1b_ft_in1k,224,86.805,13.195,97.496,2.504,25.56,0.875,bilinear,+5.659,+1.516,+43\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,86.803,13.197,97.474,2.526,88.79,0.875,bilinear,+5.183,+1.438,-35\ntf_efficientnet_lite4.in1k,380,86.803,13.197,97.254,2.746,13.01,0.920,bilinear,+5.263,+1.586,-25\nresnetaa50.a1h_in1k,288,86.799,13.201,97.359,2.641,25.56,1.000,bicubic,+5.181,+1.565,-35\nresnext101_64x4d.c1_in1k,224,86.784,13.216,97.152,2.848,83.46,0.950,bicubic,+4.338,+1.232,-153\ngc_efficientnetv2_rw_t.agc_in1k,224,86.782,13.218,97.278,2.722,13.68,1.000,bicubic,+5.506,+1.698,+19\nese_vovnet39b.ra_in1k,288,86.780,13.220,97.374,2.626,24.57,0.950,bicubic,+6.402,+2.014,+156\nseresnet50.ra2_in1k,288,86.775,13.225,97.357,2.643,28.09,0.950,bicubic,+5.487,+1.705,+14\ntresnet_l.miil_in1k,224,86.773,13.227,97.273,2.727,55.99,0.875,bilinear,+5.275,+1.621,-24\nregnetx_080.tv2_in1k,224,86.771,13.229,97.182,2.818,39.57,0.965,bicubic,+5.219,+1.642,-35\nfbnetv3_g.ra2_in1k,240,86.765,13.235,97.242,2.759,16.62,0.950,bilinear,+5.519,+1.555,+18\ntnt_s_patch16_224.in1k,224,86.763,13.237,97.320,2.679,23.77,0.900,bicubic,+5.257,+1.554,-30\ntwins_svt_small.in1k,224,86.763,13.237,97.169,2.831,24.06,0.900,bicubic,+5.069,+1.499,-51\ncs3darknet_l.c2ns_in1k,288,86.760,13.239,97.472,2.528,21.16,0.950,bicubic,+5.878,+1.810,+72\necaresnet50t.a2_in1k,288,86.760,13.239,97.069,2.931,25.57,1.000,bicubic,+5.078,+1.545,-52\nvit_base_patch16_224.orig_in21k_ft_in1k,224,86.754,13.246,97.447,2.554,86.57,0.900,bicubic,+4.972,+1.410,-63\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,86.754,13.246,97.246,2.754,13.42,0.950,bicubic,+6.492,+1.886,+163\nresnet50d.ra2_in1k,288,86.752,13.248,97.372,2.628,25.58,0.950,bicubic,+5.398,+1.814,-5\nconvnextv2_pico.fcmae_ft_in1k,288,86.752,13.248,97.329,2.671,9.07,0.950,bicubic,+5.690,+1.853,+40\nresnet50_gn.a1h_in1k,288,86.750,13.250,97.444,2.556,25.56,0.950,bicubic,+5.534,+1.810,+13\ncs3darknet_focus_l.c2ns_in1k,288,86.750,13.250,97.370,2.630,21.15,0.950,bicubic,+5.856,+1.696,+61\nseresnet50.a1_in1k,288,86.748,13.252,96.955,3.045,28.09,1.000,bicubic,+5.634,+1.619,+29\nefficientnetv2_rw_t.ra2_in1k,224,86.746,13.255,97.224,2.776,13.65,1.000,bicubic,+5.376,+1.674,-11\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,86.743,13.257,97.318,2.682,5.39,0.950,bicubic,+5.869,+1.650,+64\nfastvit_s12.apple_dist_in1k,256,86.735,13.265,97.203,2.797,9.47,0.900,bicubic,+5.661,+1.925,+30\ncrossvit_base_240.in1k,240,86.735,13.265,97.118,2.882,105.03,0.875,bicubic,+4.523,+1.292,-133\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,86.731,13.269,97.227,2.773,10.59,0.888,bicubic,+5.265,+1.683,-35\nconvnext_nano_ols.d1h_in1k,288,86.731,13.269,97.028,2.972,15.65,1.000,bicubic,+5.119,+1.402,-56\nlevit_conv_256.fb_dist_in1k,224,86.726,13.274,97.254,2.746,18.89,0.900,bicubic,+5.220,+1.788,-45\nlevit_256.fb_dist_in1k,224,86.724,13.276,97.254,2.746,18.89,0.900,bicubic,+5.218,+1.788,-47\nvit_srelpos_small_patch16_224.sw_in1k,224,86.711,13.289,97.244,2.756,21.97,0.900,bicubic,+5.603,+1.664,+21\npit_b_224.in1k,224,86.707,13.293,96.896,3.104,73.76,0.900,bicubic,+4.249,+1.192,-181\nhgnet_tiny.paddle_in1k,288,86.705,13.295,97.310,2.690,14.74,1.000,bicubic,+6.061,+1.754,+88\nseresnext50_32x4d.racm_in1k,224,86.705,13.295,97.203,2.797,27.56,0.875,bicubic,+5.437,+1.579,-7\nhalo2botnet50ts_256.a1h_in1k,256,86.703,13.297,97.090,2.910,22.64,0.950,bicubic,+4.619,+1.432,-126\nresnet50.ram_in1k,288,86.696,13.304,97.195,2.805,25.56,0.950,bicubic,+6.714,+2.157,+173\ncrossvit_small_240.in1k,240,86.694,13.306,97.282,2.718,26.86,0.875,bicubic,+5.658,+1.816,+25\nvit_small_patch16_rope_ape_224.naver_in1k,224,86.694,13.306,96.853,3.147,22.06,0.900,bicubic,+5.678,+1.865,+27\necaresnet50t.ra2_in1k,256,86.690,13.310,97.340,2.660,25.57,0.875,bicubic,+5.240,+1.662,-41\ntf_efficientnet_b1.ns_jft_in1k,240,86.688,13.312,97.393,2.607,7.79,0.882,bicubic,+5.300,+1.853,-32\nwide_resnet50_2.racm_in1k,224,86.677,13.323,97.212,2.788,68.88,0.875,bicubic,+5.211,+1.994,-47\nswin_tiny_patch4_window7_224.ms_in1k,224,86.677,13.323,97.201,2.799,28.29,0.900,bicubic,+5.289,+1.445,-33\nresnet50d.a1_in1k,288,86.675,13.325,96.684,3.316,25.58,1.000,bicubic,+5.209,+1.006,-48\necaresnet50d_pruned.miil_in1k,288,86.669,13.331,97.431,2.568,19.94,0.950,bicubic,+5.861,+1.865,+55\ngernet_l.idstcv_in1k,256,86.666,13.334,97.205,2.795,31.08,0.875,bilinear,+5.312,+1.463,-31\ntwins_pcpvt_small.in1k,224,86.652,13.348,97.320,2.679,24.11,0.900,bicubic,+5.536,+1.697,+1\nresnext101_32x8d.tv2_in1k,176,86.652,13.348,97.203,2.797,88.79,0.875,bilinear,+4.662,+1.487,-123\npoolformer_s36.sail_in1k,224,86.643,13.357,97.145,2.855,30.86,0.900,bicubic,+5.215,+1.515,-45\nresmlp_24_224.fb_distilled_in1k,224,86.635,13.366,97.139,2.861,30.02,0.875,bicubic,+5.868,+1.919,+54\nresnet50.c2_in1k,288,86.630,13.370,97.346,2.654,25.56,1.000,bicubic,+5.768,+1.816,+41\nrepvit_m2.dist_in1k,224,86.628,13.372,97.212,2.788,8.80,0.950,bicubic,+6.160,+2.040,+91\nefficientnet_b3_pruned.in1k,300,86.620,13.380,97.182,2.818,9.86,0.904,bicubic,+5.756,+1.916,+38\nresnest50d_4s2x40d.in1k,224,86.615,13.385,97.284,2.716,30.42,0.875,bicubic,+5.469,+1.718,-9\nefficientnet_el.ra_in1k,300,86.615,13.385,97.177,2.823,10.59,0.904,bicubic,+5.307,+1.639,-33\ngcresnext50ts.ch_in1k,288,86.605,13.395,97.182,2.818,15.67,1.000,bicubic,+5.363,+1.646,-25\nsehalonet33ts.ra2_in1k,256,86.596,13.404,97.019,2.981,13.69,0.940,bicubic,+5.620,+1.747,+12\nrepvgg_b3.rvgg_in1k,224,86.594,13.406,97.130,2.869,123.09,0.875,bilinear,+6.066,+1.802,+78\nnf_resnet50.ra2_in1k,288,86.590,13.410,97.297,2.703,25.56,0.940,bicubic,+5.932,+1.947,+61\nsebotnet33ts_256.a1h_in1k,256,86.581,13.419,96.780,3.220,13.70,0.940,bicubic,+5.425,+1.620,-18\nresnet50.a1_in1k,288,86.579,13.421,96.806,3.194,25.56,1.000,bicubic,+5.339,+1.694,-29\nfastvit_sa12.apple_in1k,256,86.577,13.423,97.210,2.791,11.58,0.900,bicubic,+5.731,+1.864,+32\nwide_resnet50_2.tv2_in1k,224,86.568,13.432,97.250,2.750,68.88,0.965,bilinear,+4.956,+1.484,-92\nmambaout_femto.in1k,288,86.558,13.442,97.269,2.731,7.30,1.000,bicubic,+6.646,+2.135,+152\nxcit_tiny_24_p16_224.fb_dist_in1k,224,86.538,13.462,97.214,2.786,12.12,1.000,bicubic,+6.076,+2.012,+81\nseresnet50.a2_in1k,288,86.532,13.468,96.934,3.066,28.09,1.000,bicubic,+5.438,+1.704,-11\nrepvit_m1_0.dist_450e_in1k,224,86.530,13.470,97.098,2.902,7.30,0.950,bicubic,+6.100,+2.176,+88\nresnet50.c1_in1k,288,86.528,13.472,97.235,2.765,25.56,1.000,bicubic,+5.600,+1.681,+10\nconvnext_nano.d1h_in1k,288,86.526,13.474,97.177,2.823,15.59,1.000,bicubic,+5.052,+1.507,-77\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,86.515,13.485,97.190,2.810,14.62,0.950,bilinear,+5.521,+1.716,-2\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,86.511,13.489,97.466,2.534,44.18,0.875,bilinear,+5.575,+1.734,+6\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,86.508,13.492,97.214,2.786,7.52,0.950,bicubic,+5.959,+2.028,+58\ndm_nfnet_f0.dm_in1k,192,86.506,13.494,97.293,2.707,71.49,0.900,bicubic,+5.070,+1.621,-72\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,86.504,13.496,97.203,2.797,8.46,0.950,bicubic,+5.820,+1.761,+43\nseresnet33ts.ra2_in1k,288,86.504,13.496,97.180,2.820,19.78,1.000,bicubic,+5.690,+1.834,+23\nvit_small_patch16_384.augreg_in1k,384,86.502,13.498,97.186,2.814,22.20,1.000,bicubic,+5.372,+1.596,-28\nhaloregnetz_b.ra3_in1k,224,86.502,13.498,96.949,3.051,11.68,0.940,bicubic,+5.444,+1.753,-16\ngcresnet50t.ra2_in1k,256,86.491,13.509,97.130,2.869,25.90,0.900,bicubic,+5.547,+1.672,-2\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,86.487,13.513,97.273,2.727,9.72,1.000,bicubic,+6.335,+1.975,+115\necaresnet50d.miil_in1k,224,86.485,13.515,97.186,2.814,25.58,0.875,bicubic,+5.851,+1.876,+45\nresnet152.a2_in1k,224,86.483,13.517,96.595,3.405,60.19,0.950,bicubic,+4.719,+1.325,-122\nhalonet50ts.a1h_in1k,256,86.481,13.519,97.156,2.844,22.73,0.940,bicubic,+4.841,+1.544,-115\nresnet50.d_in1k,288,86.472,13.528,97.056,2.944,25.56,1.000,bicubic,+5.460,+1.600,-16\nresnet152s.gluon_in1k,224,86.466,13.534,97.139,2.861,60.32,0.875,bicubic,+5.418,+1.697,-22\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,86.464,13.536,97.613,2.387,25.55,1.000,bilinear,+6.078,+1.919,+74\nrepvit_m1_0.dist_300e_in1k,224,86.464,13.536,97.049,2.951,7.30,0.950,bicubic,+6.312,+2.279,+109\nmobilevitv2_200.cvnets_in1k,256,86.459,13.541,96.981,3.019,18.45,0.888,bicubic,+5.311,+1.601,-43\ntf_efficientnetv2_b3.in21k_ft_in1k,240,86.455,13.545,97.417,2.583,14.36,0.900,bicubic,+5.377,+1.497,-32\nresnet50.b1k_in1k,288,86.453,13.547,97.235,2.765,25.56,1.000,bicubic,+5.745,+2.057,+23\nresnext50d_32x4d.bt_in1k,288,86.453,13.547,97.152,2.848,25.05,0.950,bicubic,+5.777,+1.726,+29\nresnet50.tv2_in1k,224,86.451,13.549,97.150,2.850,25.56,0.965,bilinear,+5.605,+1.720,+2\nresnetv2_34d.ra4_e3600_r384_in1k,448,86.440,13.560,97.218,2.782,21.82,1.000,bicubic,+6.010,+1.936,+62\nregnety_016.tv2_in1k,224,86.432,13.568,97.214,2.786,11.20,0.965,bicubic,+5.764,+1.878,+26\nresnest50d_1s4x24d.in1k,224,86.430,13.570,97.150,2.850,25.68,0.875,bicubic,+5.406,+1.812,-29\nhgnetv2_b1.ssld_stage2_ft_in1k,288,86.421,13.579,97.184,2.816,6.34,1.000,bicubic,+6.505,+1.996,+119\nmobilenetv4_conv_medium.e500_r224_in1k,256,86.410,13.590,97.216,2.784,9.72,1.000,bicubic,+6.574,+2.024,+136\nresnet50d.ra4_e3600_r224_in1k,224,86.410,13.590,97.133,2.867,25.58,0.950,bicubic,+5.454,+1.753,-24\npoolformerv2_s24.sail_in1k,224,86.406,13.594,97.120,2.880,21.34,1.000,bicubic,+5.660,+1.802,+8\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,86.404,13.596,97.162,2.837,11.07,0.950,bicubic,+5.972,+1.784,+54\nresnet152.a1_in1k,224,86.380,13.620,96.513,3.487,60.19,0.950,bicubic,+4.382,+0.625,-177\nfastvit_t12.apple_dist_in1k,256,86.365,13.634,97.086,2.914,7.55,0.900,bicubic,+6.001,+2.046,+63\nlamhalobotnet50ts_256.a1h_in1k,256,86.365,13.634,97.058,2.942,22.57,0.950,bicubic,+4.813,+1.548,-124\ndarknet53.c2ns_in1k,288,86.363,13.637,97.120,2.880,41.61,1.000,bicubic,+5.821,+1.692,+31\ndarknetaa53.c2ns_in1k,288,86.361,13.639,97.145,2.855,36.02,1.000,bilinear,+5.833,+1.891,+31\nrepvgg_b3g4.rvgg_in1k,224,86.361,13.639,97.045,2.955,83.83,0.875,bilinear,+6.137,+1.945,+84\nresnetv2_50d_gn.ah_in1k,224,86.359,13.641,97.139,2.861,25.57,0.950,bicubic,+5.559,+1.783,-5\ntf_efficientnet_b3.in1k,300,86.346,13.654,96.923,3.077,12.23,0.904,bicubic,+5.468,+1.615,-19\nefficientformer_l1.snap_dist_in1k,224,86.344,13.656,97.017,2.983,12.29,0.950,bicubic,+5.842,+2.025,+30\nnf_resnet50.ra2_in1k,256,86.340,13.660,97.064,2.936,25.56,0.940,bicubic,+6.104,+1.960,+78\nresnet101.a1_in1k,224,86.340,13.660,96.543,3.457,44.55,0.950,bicubic,+4.846,+1.379,-122\nresnext50_32x4d.a1h_in1k,224,86.338,13.662,96.970,3.030,25.03,0.950,bicubic,+5.198,+1.650,-65\nlegacy_senet154.in1k,224,86.338,13.662,96.934,3.066,115.09,0.875,bilinear,+5.018,+1.432,-93\nresnet50.a1h_in1k,224,86.336,13.664,97.058,2.942,25.56,1.000,bicubic,+5.682,+1.746,+8\nmobilevitv2_175.cvnets_in1k,256,86.331,13.669,96.975,3.025,14.25,0.888,bicubic,+5.461,+1.695,-24\nresnet50d.a2_in1k,288,86.327,13.673,96.684,3.316,25.58,1.000,bicubic,+5.163,+1.584,-74\ncait_xxs36_224.fb_dist_in1k,224,86.323,13.677,97.113,2.887,17.30,1.000,bicubic,+6.573,+2.247,+127\nmobilenetv4_conv_medium.e500_r256_in1k,256,86.321,13.679,97.094,2.906,9.72,0.950,bicubic,+6.405,+1.930,+96\npit_s_224.in1k,224,86.319,13.681,97.041,2.959,23.46,0.900,bicubic,+5.204,+1.403,-67\ncs3darknet_l.c2ns_in1k,256,86.316,13.684,97.214,2.786,21.16,0.887,bicubic,+5.966,+1.910,+47\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,86.310,13.690,97.412,2.588,22.92,1.000,bicubic,+5.834,+2.218,+21\nefficientnet_b2.ra_in1k,288,86.310,13.690,96.990,3.010,9.11,1.000,bicubic,+5.698,+1.670,+6\nresnet50.b2k_in1k,288,86.308,13.692,97.066,2.934,25.56,1.000,bicubic,+5.842,+2.032,+22\ngernet_m.idstcv_in1k,224,86.304,13.696,97.094,2.906,21.14,0.875,bilinear,+5.602,+1.908,-7\nsenet154.gluon_in1k,224,86.304,13.696,96.938,3.062,115.09,0.875,bicubic,+5.046,+1.580,-96\nresnext50_32x4d.a1_in1k,288,86.299,13.701,96.701,3.299,25.03,1.000,bicubic,+4.819,+1.549,-136\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,86.297,13.703,97.143,2.857,15.25,0.950,bicubic,+6.215,+2.005,+74\ngcresnet33ts.ra2_in1k,288,86.295,13.705,97.043,2.957,19.88,1.000,bicubic,+5.707,+1.725,+2\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,86.276,13.724,97.141,2.859,6.34,1.000,bicubic,+7.226,+2.251,+198\nresnext50_32x4d.tv2_in1k,224,86.276,13.724,97.054,2.946,25.03,0.965,bilinear,+5.086,+1.710,-90\nresnext50_32x4d.a2_in1k,288,86.274,13.726,96.665,3.335,25.03,1.000,bicubic,+4.970,+1.551,-108\nregnetz_b16.ra3_in1k,224,86.263,13.737,96.998,3.002,9.72,0.940,bicubic,+6.399,+2.016,+95\neca_resnet33ts.ra2_in1k,288,86.257,13.743,97.165,2.835,19.68,1.000,bicubic,+5.541,+1.783,-20\ngcvit_xxtiny.in1k,224,86.252,13.748,97.107,2.893,12.00,0.875,bicubic,+6.496,+2.049,+108\nresnetv2_50d_evos.ah_in1k,224,86.248,13.752,97.017,2.983,25.59,0.950,bicubic,+5.420,+1.747,-39\nregnetx_032.tv2_in1k,224,86.235,13.765,97.058,2.942,15.30,0.965,bicubic,+5.319,+1.810,-56\nresnest50d.in1k,224,86.220,13.780,97.062,2.938,27.48,0.875,bilinear,+5.262,+1.690,-65\nconvmixer_768_32.in1k,224,86.220,13.780,97.024,2.976,21.11,0.960,bicubic,+6.062,+1.954,+57\nvit_base_patch16_384.augreg_in1k,384,86.214,13.786,96.960,3.040,86.86,1.000,bicubic,+5.112,+1.626,-84\ncs3darknet_focus_l.c2ns_in1k,256,86.212,13.788,97.126,2.874,21.15,0.887,bicubic,+5.958,+1.832,+44\nefficientnet_b1.ra4_e3600_r240_in1k,240,86.205,13.795,96.968,3.032,7.79,0.900,bicubic,+5.801,+1.816,+18\ntresnet_m.miil_in1k,224,86.197,13.803,96.676,3.324,31.39,0.875,bilinear,+5.381,+1.814,-44\nefficientnet_el_pruned.in1k,300,86.195,13.805,97.024,2.976,10.59,0.904,bicubic,+5.911,+1.806,+32\nmambaout_kobe.in1k,224,86.192,13.807,96.990,3.010,9.14,1.000,bicubic,+6.206,+2.008,+66\necaresnet101d_pruned.miil_in1k,224,86.186,13.814,97.325,2.675,24.88,0.875,bicubic,+5.376,+1.683,-45\ncspdarknet53.ra_in1k,256,86.182,13.818,96.979,3.021,27.64,0.887,bilinear,+6.104,+1.919,+56\nrexnet_150.nav_in1k,224,86.178,13.822,97.064,2.936,9.73,0.875,bicubic,+5.856,+2.084,+22\nresnet101.a2_in1k,224,86.171,13.829,96.548,3.452,44.55,0.950,bicubic,+4.827,+1.350,-131\nxcit_tiny_12_p8_224.fb_in1k,224,86.160,13.839,97.030,2.970,6.71,1.000,bicubic,+6.451,+2.212,+100\nefficientvit_b1.r288_in1k,288,86.158,13.842,96.928,3.072,9.10,1.000,bicubic,+5.836,+1.764,+20\nresnetrs101.tf_in1k,192,86.154,13.846,96.891,3.109,63.62,0.940,bicubic,+5.448,+1.651,-35\necaresnet50t.a1_in1k,224,86.154,13.846,96.778,3.222,25.57,0.950,bicubic,+4.872,+1.628,-126\ninception_v4.tf_in1k,299,86.150,13.850,96.925,3.075,42.68,0.875,bicubic,+6.006,+1.944,+44\nres2net101d.in1k,224,86.143,13.857,96.844,3.156,45.23,0.875,bilinear,+4.907,+1.498,-120\nresnet50.a2_in1k,288,86.135,13.865,96.676,3.324,25.56,1.000,bicubic,+5.361,+1.698,-51\ninception_resnet_v2.tf_in1k,299,86.129,13.871,97.034,2.966,55.84,0.897,bicubic,+5.695,+1.722,-3\nconvnext_nano_ols.d1h_in1k,224,86.118,13.882,96.834,3.166,15.65,0.950,bicubic,+5.212,+1.460,-75\nresnetaa50.a1h_in1k,224,86.109,13.891,96.960,3.040,25.56,0.950,bicubic,+5.495,+1.748,-31\nmobilevitv2_150.cvnets_in1k,256,86.086,13.914,96.847,3.154,10.59,0.888,bicubic,+5.706,+1.741,+5\ncspresnext50.ra_in1k,256,86.079,13.921,97.101,2.899,20.57,0.887,bilinear,+5.531,+1.771,-28\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,86.071,13.929,97.205,2.795,25.03,0.875,bilinear,+5.755,+1.819,+10\nresnet101s.gluon_in1k,224,86.067,13.933,97.032,2.968,44.67,0.875,bicubic,+5.789,+1.872,+15\ntf_efficientnet_el.in1k,300,86.067,13.933,96.964,3.036,10.59,0.904,bicubic,+5.821,+2.200,+24\necaresnet50t.a2_in1k,224,86.064,13.936,96.725,3.275,25.57,0.950,bicubic,+5.180,+1.703,-77\nlambda_resnet50ts.a1h_in1k,256,86.056,13.944,96.740,3.260,21.54,0.950,bicubic,+4.872,+1.646,-124\nconvnext_pico_ols.d1_in1k,288,86.054,13.946,97.022,2.978,9.06,1.000,bicubic,+5.596,+1.774,-18\nedgenext_small_rw.sw_in1k,320,86.052,13.948,96.943,3.057,7.83,1.000,bicubic,+5.600,+1.737,-17\necaresnetlight.miil_in1k,224,86.043,13.957,97.079,2.921,30.16,0.875,bicubic,+5.589,+1.831,-19\nresnetv2_34d.ra4_e3600_r384_in1k,384,86.041,13.959,96.985,3.015,21.82,1.000,bicubic,+6.249,+2.083,+70\npoolformer_s24.sail_in1k,224,86.026,13.974,97.028,2.972,21.39,0.900,bicubic,+5.732,+1.968,+4\nconvnext_pico.d1_in1k,288,86.020,13.980,96.938,3.062,9.05,0.950,bicubic,+5.594,+1.876,-15\nefficientformerv2_s1.snap_dist_in1k,224,86.020,13.980,96.836,3.164,6.19,0.950,bicubic,+6.336,+2.124,+80\nresnetv2_50.a1h_in1k,224,86.017,13.982,96.904,3.096,25.55,0.950,bicubic,+5.608,+1.822,-15\nseresnext101_32x4d.gluon_in1k,224,86.009,13.991,96.981,3.019,48.96,0.875,bicubic,+5.123,+1.713,-89\ngcresnext50ts.ch_in1k,256,86.009,13.991,96.960,3.040,15.67,0.900,bicubic,+5.401,+1.780,-46\nseresnet33ts.ra2_in1k,256,86.007,13.993,97.007,2.993,19.78,0.900,bicubic,+5.627,+1.957,-13\nresnet50d.ra2_in1k,224,86.005,13.995,96.987,3.013,25.58,0.875,bicubic,+5.459,+1.827,-44\nconvnextv2_pico.fcmae_ft_in1k,224,86.005,13.995,96.983,3.017,9.07,0.875,bicubic,+5.701,+1.901,-5\necaresnet26t.ra2_in1k,320,86.000,14.000,97.015,2.985,16.01,0.950,bicubic,+6.100,+1.945,+43\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,85.994,14.006,96.964,3.036,8.46,0.900,bicubic,+5.878,+1.970,+18\nresnet152.a3_in1k,224,85.981,14.019,96.851,3.149,60.19,0.950,bicubic,+5.433,+1.847,-48\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,85.979,14.021,97.137,2.863,88.22,0.900,bicubic,+5.247,+1.563,-73\ntf_efficientnet_b2.ap_in1k,260,85.979,14.021,96.821,3.179,9.11,0.890,bicubic,+5.679,+1.789,-9\nresnetblur50.bt_in1k,288,85.977,14.023,96.968,3.032,25.56,0.950,bicubic,+5.729,+1.768,+1\ntf_efficientnetv2_b3.in1k,240,85.968,14.032,96.832,3.168,14.36,0.904,bicubic,+5.260,+1.402,-70\nresnext50_32x4d.ra_in1k,288,85.956,14.044,97.032,2.968,25.03,0.950,bicubic,+5.236,+1.688,-75\nseresnext101_64x4d.gluon_in1k,224,85.956,14.044,96.987,3.013,88.23,0.875,bicubic,+5.072,+1.679,-100\nresnet50d.a1_in1k,224,85.956,14.044,96.366,3.634,25.58,0.950,bicubic,+5.225,+1.688,-77\nconvnext_nano.d1h_in1k,224,85.926,14.074,96.855,3.145,15.59,0.950,bicubic,+5.164,+1.523,-83\nfbnetv3_d.ra2_in1k,256,85.924,14.076,97.028,2.972,10.31,0.950,bilinear,+6.242,+2.076,+62\ntf_efficientnetv2_b2.in1k,260,85.921,14.079,96.868,3.132,10.10,0.890,bicubic,+5.695,+1.858,-2\nvit_large_patch32_384.orig_in21k_ft_in1k,384,85.913,14.087,97.368,2.632,306.63,1.000,bicubic,+4.405,+1.282,-209\ndarknet53.c2ns_in1k,256,85.902,14.098,96.862,3.139,41.61,0.887,bicubic,+5.894,+1.816,+15\nresnet152d.gluon_in1k,224,85.902,14.098,96.802,3.198,60.21,0.875,bicubic,+5.426,+1.208,-51\nresnet50_gn.a1h_in1k,224,85.883,14.117,96.862,3.139,25.56,0.940,bicubic,+5.809,+1.956,+8\ntf_efficientnet_b2.aa_in1k,260,85.881,14.119,96.855,3.145,9.11,0.890,bicubic,+5.807,+1.897,+8\nvit_base_patch16_224.sam_in1k,224,85.881,14.119,96.701,3.299,86.57,0.900,bicubic,+5.635,+1.579,-10\nresnet101d.gluon_in1k,224,85.877,14.123,96.684,3.316,44.57,0.875,bicubic,+5.411,+1.378,-52\nrepvgg_b2g4.rvgg_in1k,224,85.872,14.128,96.814,3.186,61.76,0.875,bilinear,+6.476,+2.134,+81\nefficientvit_b1.r256_in1k,256,85.853,14.147,96.780,3.220,9.10,1.000,bicubic,+6.119,+1.994,+45\nhgnet_tiny.paddle_in1k,224,85.847,14.153,97.030,2.970,14.74,0.965,bicubic,+5.955,+1.976,+22\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,85.832,14.168,96.947,3.053,9.72,0.950,bicubic,+6.386,+2.457,+74\nseresnet50.ra2_in1k,224,85.830,14.170,97.000,3.000,28.09,0.875,bicubic,+5.570,+1.932,-21\ninception_resnet_v2.tf_ens_adv_in1k,299,85.817,14.183,96.755,3.245,55.84,0.897,bicubic,+5.819,+1.815,+5\nmixnet_xl.ra_in1k,224,85.812,14.188,96.714,3.286,11.90,0.875,bicubic,+5.319,+1.736,-64\nresnet50.c2_in1k,224,85.810,14.190,96.900,3.100,25.56,0.950,bicubic,+5.942,+2.038,+20\nresnetv2_34d.ra4_e3600_r224_in1k,288,85.806,14.194,96.718,3.282,21.82,1.000,bicubic,+6.192,+1.958,+51\nresnet50.ram_in1k,224,85.806,14.194,96.716,3.284,25.56,0.875,bicubic,+6.772,+2.330,+120\ngcresnet33ts.ra2_in1k,256,85.802,14.198,96.887,3.113,19.88,0.900,bicubic,+5.730,+1.901,-4\nseresnet50.a1_in1k,224,85.791,14.209,96.552,3.448,28.09,0.950,bicubic,+5.779,+1.848,-3\nlegacy_seresnext101_32x4d.in1k,224,85.766,14.235,96.761,3.239,48.96,0.875,bilinear,+5.492,+1.729,-32\nres2net50d.in1k,224,85.763,14.237,96.772,3.228,25.72,0.875,bilinear,+5.483,+1.744,-35\nresnet50.c1_in1k,224,85.755,14.245,96.934,3.066,25.56,0.950,bicubic,+5.989,+1.980,+28\nxcit_tiny_24_p16_224.fb_in1k,224,85.751,14.249,96.938,3.062,12.12,1.000,bicubic,+6.277,+2.060,+59\nresnet101.a3_in1k,224,85.748,14.252,96.499,3.502,44.55,0.950,bicubic,+5.912,+1.791,+20\ntf_efficientnet_lite3.in1k,300,85.744,14.256,96.889,3.111,8.20,0.904,bilinear,+5.936,+1.981,+21\nese_vovnet39b.ra_in1k,224,85.740,14.260,96.900,3.100,24.57,0.875,bicubic,+6.428,+2.178,+77\nresnext101_32x4d.gluon_in1k,224,85.740,14.260,96.622,3.378,44.18,0.875,bicubic,+5.404,+1.714,-50\ncspresnet50.ra_in1k,256,85.731,14.269,96.797,3.203,21.62,0.887,bilinear,+6.153,+2.099,+43\nrepvit_m0_9.dist_450e_in1k,224,85.729,14.271,96.823,3.177,5.49,0.950,bicubic,+6.671,+2.435,+105\neca_resnet33ts.ra2_in1k,256,85.727,14.273,96.896,3.104,19.68,0.900,bicubic,+5.643,+1.916,-21\nregnety_320.pycls_in1k,224,85.719,14.281,96.725,3.275,145.05,0.875,bicubic,+4.923,+1.479,-119\nresnet50.a1_in1k,224,85.712,14.288,96.486,3.514,25.56,0.950,bicubic,+5.330,+1.888,-61\nxception71.tf_in1k,299,85.710,14.290,96.767,3.232,42.34,0.903,bicubic,+5.790,+1.859,-11\ndarknetaa53.c2ns_in1k,256,85.706,14.294,96.883,3.117,36.02,0.887,bilinear,+5.952,+1.979,+18\nresnext101_64x4d.gluon_in1k,224,85.701,14.299,96.644,3.356,83.46,0.875,bicubic,+5.061,+1.644,-101\nresnet50.ra_in1k,288,85.697,14.303,96.894,3.107,25.56,0.950,bicubic,+5.833,+1.924,0\nresmlp_big_24_224.fb_in1k,224,85.697,14.303,96.419,3.580,129.14,0.875,bicubic,+4.663,+1.394,-162\nresnet33ts.ra2_in1k,288,85.682,14.318,96.757,3.243,19.68,1.000,bicubic,+5.972,+1.699,+21\nhgnetv2_b0.ssld_stage2_ft_in1k,288,85.680,14.320,96.823,3.177,6.00,1.000,bicubic,+7.090,+2.435,+138\nefficientnet_em.ra2_in1k,240,85.678,14.322,96.941,3.059,6.90,0.882,bicubic,+6.422,+2.412,+72\ndeit_small_patch16_224.fb_in1k,224,85.672,14.328,96.913,3.087,22.05,0.900,bicubic,+5.816,+1.857,-1\ndpn107.mx_in1k,224,85.669,14.331,96.757,3.243,86.92,0.875,bicubic,+5.503,+1.825,-41\necaresnet50t.a3_in1k,224,85.669,14.331,96.738,3.262,25.57,0.950,bicubic,+6.121,+2.028,+32\nwide_resnet50_2.tv2_in1k,176,85.644,14.356,96.834,3.166,68.88,0.875,bilinear,+5.202,+1.758,-83\nefficientnet_b2_pruned.in1k,260,85.640,14.360,96.738,3.262,8.31,0.890,bicubic,+5.736,+2.072,-16\nresnet50d.a2_in1k,224,85.625,14.375,96.407,3.593,25.58,0.950,bicubic,+5.357,+1.765,-57\nresmlp_36_224.fb_in1k,224,85.623,14.377,96.795,3.205,44.69,0.875,bicubic,+5.850,+1.907,+2\ntiny_vit_5m_224.in1k,224,85.614,14.386,96.947,3.053,5.39,0.950,bicubic,+6.418,+2.155,+72\nseresnet50.a2_in1k,224,85.603,14.397,96.575,3.425,28.09,0.950,bicubic,+5.501,+1.855,-41\nresnet50.bt_in1k,288,85.601,14.399,96.812,3.188,25.56,0.950,bicubic,+5.949,+1.904,+13\nlevit_192.fb_dist_in1k,224,85.601,14.399,96.742,3.258,10.95,0.900,bicubic,+5.743,+1.934,-11\nmobilevitv2_125.cvnets_in1k,256,85.601,14.399,96.661,3.339,7.48,0.888,bicubic,+5.919,+1.817,+11\nlevit_conv_192.fb_dist_in1k,224,85.595,14.405,96.744,3.256,10.95,0.900,bicubic,+5.735,+1.942,-15\necaresnet50d_pruned.miil_in1k,224,85.584,14.416,96.934,3.066,19.94,0.875,bicubic,+5.870,+2.072,+2\nmambaout_femto.in1k,224,85.580,14.420,96.738,3.262,7.30,1.000,bicubic,+6.700,+2.330,+96\nresnet152c.gluon_in1k,224,85.573,14.427,96.671,3.329,60.21,0.875,bicubic,+5.669,+1.817,-28\nresnext50d_32x4d.bt_in1k,224,85.569,14.431,96.750,3.250,25.05,0.875,bicubic,+5.897,+1.872,+6\ntf_efficientnetv2_b1.in1k,240,85.565,14.435,96.710,3.290,8.14,0.882,bicubic,+6.047,+2.128,+19\nresnext50_32x4d.a2_in1k,224,85.563,14.437,96.298,3.702,25.03,0.950,bicubic,+5.119,+1.668,-99\nregnety_120.pycls_in1k,224,85.561,14.439,96.757,3.243,51.82,0.875,bicubic,+5.179,+1.653,-90\nresnet32ts.ra2_in1k,288,85.558,14.442,96.868,3.132,17.96,1.000,bicubic,+6.170,+2.288,+29\npit_xs_distilled_224.in1k,224,85.543,14.457,96.689,3.312,11.00,0.900,bicubic,+6.371,+2.339,+60\nwide_resnet101_2.tv2_in1k,176,85.541,14.459,96.569,3.431,126.89,0.875,bilinear,+5.047,+1.631,-114\nnf_regnet_b1.ra2_in1k,288,85.539,14.461,96.797,3.203,10.22,0.900,bicubic,+6.171,+2.071,+30\nregnetx_320.pycls_in1k,224,85.524,14.476,96.665,3.335,107.81,0.875,bicubic,+5.266,+1.649,-73\nfbnetv3_b.ra2_in1k,256,85.516,14.484,96.868,3.132,8.60,0.950,bilinear,+6.362,+2.760,+57\nmobilenetv4_conv_medium.e500_r224_in1k,224,85.496,14.504,96.932,3.068,9.72,0.950,bicubic,+6.404,+2.144,+60\nresnet152.gluon_in1k,224,85.496,14.504,96.565,3.435,60.19,0.875,bicubic,+5.784,+1.845,-10\nconvnextv2_femto.fcmae_ft_in1k,288,85.494,14.506,96.787,3.213,5.23,0.950,bicubic,+6.152,+2.197,+29\nregnety_160.pycls_in1k,224,85.492,14.508,96.624,3.376,83.59,0.875,bicubic,+5.208,+1.642,-85\nresnetrs50.tf_in1k,224,85.488,14.512,96.753,3.247,35.69,0.910,bicubic,+5.554,+1.781,-50\ndpn92.mx_in1k,224,85.484,14.516,96.659,3.341,37.67,0.875,bicubic,+5.452,+1.783,-58\nresnet152.tv2_in1k,176,85.484,14.516,96.496,3.504,60.19,0.875,bilinear,+5.262,+1.846,-72\nresnet50.b1k_in1k,224,85.479,14.521,96.721,3.280,25.56,0.950,bicubic,+5.915,+2.099,-2\nresnet50.d_in1k,224,85.479,14.521,96.560,3.440,25.56,0.950,bicubic,+5.575,+1.722,-45\nrexnet_130.nav_in1k,224,85.469,14.531,96.693,3.307,7.56,0.875,bicubic,+5.977,+2.013,+5\nfastvit_s12.apple_in1k,256,85.450,14.550,96.731,3.269,9.47,0.900,bicubic,+5.567,+1.933,-44\nresnet50.a2_in1k,224,85.447,14.553,96.492,3.508,25.56,0.950,bicubic,+5.619,+1.932,-33\nresnext50_32x4d.a1_in1k,224,85.443,14.557,96.144,3.856,25.03,0.950,bicubic,+4.917,+1.686,-132\nefficientnet_b0.ra4_e3600_r224_in1k,256,85.439,14.561,96.753,3.247,5.29,1.000,bicubic,+6.075,+1.999,+15\nconvnext_tiny.fb_in22k_ft_in1k,288,85.437,14.563,96.814,3.186,28.59,1.000,bicubic,+6.487,+2.534,+61\ndpn131.mx_in1k,224,85.435,14.565,96.616,3.384,79.25,0.875,bicubic,+5.599,+2.008,-39\nresnet101.tv2_in1k,176,85.415,14.585,96.409,3.591,44.55,0.875,bilinear,+5.507,+1.803,-56\ntf_efficientnet_b2.in1k,260,85.390,14.610,96.592,3.408,9.11,0.890,bicubic,+5.778,+1.878,-16\nregnetx_160.pycls_in1k,224,85.383,14.617,96.639,3.361,54.28,0.875,bicubic,+5.539,+1.787,-45\nresnet50.tv2_in1k,176,85.383,14.617,96.558,3.442,25.56,0.875,bilinear,+5.949,+1.918,+3\ndla102x2.in1k,224,85.381,14.619,96.633,3.367,41.28,0.875,bilinear,+5.933,+1.979,-3\ngmlp_s16_224.ra3_in1k,224,85.362,14.638,96.639,3.361,19.42,0.875,bicubic,+5.710,+2.015,-22\nregnetx_016.tv2_in1k,224,85.360,14.640,96.829,3.171,9.19,0.965,bicubic,+5.920,+2.061,-2\nrepvit_m0_9.dist_300e_in1k,224,85.356,14.644,96.627,3.373,5.49,0.950,bicubic,+6.704,+2.513,+86\nefficientnet_b2.ra_in1k,256,85.353,14.646,96.539,3.461,9.11,0.875,bicubic,+6.029,+1.957,+10\nresnet34.ra4_e3600_r224_in1k,288,85.347,14.653,96.695,3.305,21.80,1.000,bicubic,+6.375,+2.241,+46\nresnetv2_34.ra4_e3600_r224_in1k,288,85.347,14.653,96.633,3.367,21.80,1.000,bicubic,+6.277,+2.073,+39\nskresnext50_32x4d.ra_in1k,224,85.345,14.655,96.390,3.610,27.48,0.875,bicubic,+5.177,+1.746,-93\ndpn98.mx_in1k,224,85.338,14.662,96.507,3.493,61.57,0.875,bicubic,+5.674,+1.863,-31\nlambda_resnet26t.c1_in1k,256,85.334,14.666,96.706,3.294,10.96,0.940,bicubic,+6.208,+2.152,+29\nseresnext50_32x4d.gluon_in1k,224,85.334,14.666,96.684,3.316,27.56,0.875,bicubic,+5.418,+1.868,-71\nxception65.tf_in1k,299,85.321,14.679,96.629,3.371,39.92,0.903,bicubic,+5.763,+1.973,-25\nbotnet26t_256.c1_in1k,256,85.319,14.681,96.609,3.390,12.49,0.950,bicubic,+6.063,+1.816,+14\nresnet101c.gluon_in1k,224,85.313,14.687,96.422,3.578,44.57,0.875,bicubic,+5.771,+1.716,-25\nresnext50_32x4d.a3_in1k,224,85.313,14.687,96.336,3.664,25.03,0.950,bicubic,+6.055,+2.028,+11\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,85.302,14.698,96.787,3.213,6.00,1.000,bicubic,+7.264,+2.543,+128\nconvnext_pico_ols.d1_in1k,224,85.294,14.706,96.659,3.341,9.06,0.950,bicubic,+5.752,+2.071,-28\nresnetblur50.bt_in1k,224,85.289,14.711,96.528,3.472,25.56,0.875,bicubic,+5.983,+1.990,+1\nregnety_064.pycls_in1k,224,85.287,14.713,96.633,3.367,30.58,0.875,bicubic,+5.553,+1.865,-50\nresnet34d.ra2_in1k,288,85.279,14.721,96.691,3.309,21.82,0.950,bicubic,+6.839,+2.343,+92\nresmlp_24_224.fb_in1k,224,85.277,14.723,96.505,3.495,30.02,0.875,bicubic,+5.897,+1.955,-14\ncoat_lite_mini.in1k,224,85.266,14.734,96.671,3.329,11.01,0.900,bicubic,+6.162,+2.065,+19\nconvnext_pico.d1_in1k,224,85.257,14.743,96.622,3.378,9.05,0.875,bicubic,+5.741,+2.068,-30\nregnety_080.pycls_in1k,224,85.251,14.749,96.614,3.386,39.18,0.875,bicubic,+5.377,+1.780,-76\nconvnext_femto_ols.d1_in1k,288,85.249,14.751,96.774,3.226,5.23,0.950,bicubic,+6.333,+2.240,+34\ncait_xxs24_224.fb_dist_in1k,224,85.240,14.760,96.714,3.286,11.96,1.000,bicubic,+6.840,+2.388,+90\nresnet33ts.ra2_in1k,256,85.234,14.766,96.605,3.395,19.68,0.900,bicubic,+6.018,+2.029,+4\nresnext50_32x4d.ra_in1k,224,85.230,14.770,96.528,3.472,25.03,0.875,bicubic,+5.430,+1.920,-67\nresnet50.b2k_in1k,224,85.225,14.775,96.644,3.356,25.56,0.950,bicubic,+5.837,+1.968,-25\nefficientvit_b1.r224_in1k,224,85.225,14.775,96.439,3.561,9.10,0.950,bicubic,+5.971,+2.139,-1\nxcit_tiny_12_p16_224.fb_dist_in1k,224,85.219,14.781,96.614,3.386,6.72,1.000,bicubic,+6.647,+2.404,+62\nhalonet26t.a1h_in1k,256,85.217,14.783,96.475,3.525,12.48,0.950,bicubic,+6.083,+2.135,+7\nresnext101_32x8d.tv_in1k,224,85.217,14.783,96.441,3.559,88.79,0.875,bilinear,+5.909,+1.911,-15\npvt_v2_b1.in1k,224,85.213,14.787,96.627,3.373,14.01,0.900,bicubic,+6.515,+2.137,+51\nfastvit_t12.apple_in1k,256,85.198,14.802,96.595,3.405,7.55,0.900,bicubic,+5.922,+2.025,-13\nresnet32ts.ra2_in1k,256,85.195,14.805,96.629,3.371,17.96,0.900,bicubic,+6.141,+2.261,+13\ninception_v3.gluon_in1k,299,85.187,14.813,96.548,3.452,23.83,0.875,bicubic,+6.381,+2.176,+35\nresnet50.a1h_in1k,176,85.178,14.822,96.584,3.416,25.56,0.900,bicubic,+5.914,+2.086,-15\nedgenext_small_rw.sw_in1k,256,85.172,14.828,96.432,3.568,7.83,0.900,bicubic,+5.576,+1.914,-55\nshvit_s4.in1k,256,85.170,14.830,96.475,3.525,16.59,0.875,bicubic,+5.808,+2.105,-30\nconvnext_femto.d1_in1k,288,85.166,14.834,96.712,3.288,5.22,0.950,bicubic,+6.454,+2.276,+40\nhrnet_w48.ms_in1k,224,85.166,14.834,96.473,3.527,77.47,0.875,bilinear,+5.838,+1.957,-28\nstarnet_s4.in1k,224,85.157,14.843,96.601,3.399,7.48,0.875,bicubic,+6.333,+2.305,+26\nlegacy_xception.tf_in1k,299,85.155,14.845,96.479,3.521,22.86,0.897,bicubic,+6.097,+2.083,+3\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,85.148,14.851,96.652,3.348,6.34,0.965,bicubic,+7.069,+2.480,+95\nresnet101.gluon_in1k,224,85.148,14.851,96.381,3.619,44.55,0.875,bicubic,+5.843,+1.749,-25\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,85.138,14.862,96.821,3.179,25.56,0.875,bilinear,+5.886,+1.995,-19\neca_halonext26ts.c1_in1k,256,85.138,14.862,96.584,3.416,10.76,0.940,bicubic,+5.620,+1.874,-55\nregnetx_120.pycls_in1k,224,85.136,14.864,96.479,3.521,46.11,0.875,bicubic,+5.528,+1.745,-66\ntf_efficientnet_b1.ap_in1k,240,85.133,14.867,96.411,3.589,7.79,0.882,bicubic,+5.846,+2.095,-29\nresnet50.am_in1k,224,85.131,14.869,96.562,3.438,25.56,0.875,bicubic,+6.127,+2.171,+1\neca_botnext26ts_256.c1_in1k,256,85.129,14.871,96.509,3.491,10.59,0.950,bicubic,+5.867,+1.909,-29\ntf_efficientnetv2_b2.in1k,208,85.129,14.871,96.494,3.506,10.10,0.890,bicubic,+5.921,+1.892,-19\nrepvit_m1.dist_in1k,224,85.112,14.888,96.605,3.395,5.49,0.950,bicubic,+6.570,+2.523,+44\nmobileone_s4.apple_in1k,224,85.102,14.899,96.437,3.563,14.95,0.900,bilinear,+5.656,+1.517,-55\nhrnet_w64.ms_in1k,224,85.099,14.901,96.740,3.260,128.06,0.875,bilinear,+5.641,+2.094,-59\nfbnetv3_d.ra2_in1k,224,85.097,14.903,96.676,3.324,10.31,0.950,bilinear,+6.439,+2.222,+32\nres2net101_26w_4s.in1k,224,85.091,14.909,96.377,3.623,45.21,0.875,bilinear,+5.899,+1.911,-23\nlambda_resnet26rpt_256.c1_in1k,256,85.076,14.924,96.535,3.465,10.99,0.940,bicubic,+6.116,+2.115,-4\necaresnet26t.ra2_in1k,256,85.074,14.926,96.635,3.365,16.01,0.875,bicubic,+6.180,+2.087,+2\ntf_efficientnet_cc_b1_8e.in1k,240,85.067,14.933,96.430,3.570,39.72,0.882,bicubic,+5.749,+2.046,-45\ndpn68b.ra_in1k,288,85.059,14.941,96.439,3.561,12.61,1.000,bicubic,+5.717,+2.009,-49\nnf_regnet_b1.ra2_in1k,256,85.052,14.948,96.548,3.452,10.22,0.900,bicubic,+6.342,+2.168,+19\nhgnetv2_b1.ssld_stage2_ft_in1k,224,85.044,14.956,96.678,3.322,6.34,0.965,bicubic,+6.160,+2.194,0\ntf_efficientnet_b0.ns_jft_in1k,224,85.020,14.980,96.492,3.508,5.29,0.875,bicubic,+6.340,+2.120,+21\nxcit_nano_12_p8_384.fb_dist_in1k,384,85.008,14.992,96.624,3.376,3.05,1.000,bicubic,+7.198,+2.808,+101\nresnext50_32x4d.gluon_in1k,224,85.003,14.997,96.434,3.566,25.03,0.875,bicubic,+5.649,+2.010,-56\nresnest26d.gluon_in1k,224,84.999,15.001,96.635,3.365,17.07,0.875,bilinear,+6.523,+2.335,+38\ncoat_tiny.in1k,224,84.965,15.035,96.415,3.585,5.50,0.900,bicubic,+6.523,+2.371,+42\nregnety_040.pycls_in1k,224,84.950,15.050,96.601,3.399,20.65,0.875,bicubic,+5.704,+1.935,-40\nmobilevitv2_100.cvnets_in1k,256,84.931,15.069,96.392,3.608,4.90,0.888,bicubic,+6.843,+2.222,+69\nresnet50d.a3_in1k,224,84.931,15.069,96.296,3.704,25.58,0.950,bicubic,+6.189,+2.060,+6\ntf_efficientnet_b1.aa_in1k,240,84.916,15.084,96.364,3.636,7.79,0.882,bicubic,+6.076,+2.162,-5\ndla169.in1k,224,84.914,15.086,96.522,3.478,53.39,0.875,bilinear,+6.212,+2.188,+10\nlegacy_seresnext50_32x4d.in1k,224,84.909,15.091,96.437,3.563,27.56,0.875,bilinear,+5.829,+2.009,-31\nfasternet_t2.in1k,224,84.907,15.093,96.499,3.502,14.98,1.000,bicubic,+6.169,+2.167,+3\nhrnet_w44.ms_in1k,224,84.892,15.108,96.432,3.568,67.06,0.875,bilinear,+6.002,+2.056,-14\nresnet50.ra_in1k,224,84.882,15.118,96.473,3.527,25.56,0.875,bicubic,+6.062,+2.159,-8\nregnety_008_tv.tv2_in1k,224,84.879,15.120,96.601,3.399,6.43,0.965,bicubic,+6.207,+2.221,+8\nregnetx_080.pycls_in1k,224,84.877,15.123,96.441,3.559,39.57,0.875,bicubic,+5.663,+1.899,-48\nresnet50s.gluon_in1k,224,84.867,15.133,96.441,3.559,25.68,0.875,bicubic,+6.157,+2.193,+2\nvisformer_tiny.in1k,224,84.860,15.140,96.516,3.484,10.32,0.900,bicubic,+6.700,+2.428,+51\ndla60_res2net.in1k,224,84.837,15.163,96.466,3.534,20.85,0.875,bilinear,+6.357,+2.263,+22\ndla60_res2next.in1k,224,84.835,15.165,96.394,3.606,17.03,0.875,bilinear,+6.347,+2.394,+16\nres2net50_26w_8s.in1k,224,84.828,15.172,96.349,3.651,48.40,0.875,bilinear,+5.878,+1.641,-28\ndla102x.in1k,224,84.826,15.174,96.541,3.459,26.31,0.875,bilinear,+6.310,+2.315,+11\nresnet50d.gluon_in1k,224,84.826,15.174,96.388,3.612,25.58,0.875,bicubic,+5.746,+1.930,-44\nlevit_conv_128.fb_dist_in1k,224,84.826,15.174,96.349,3.651,9.21,0.900,bicubic,+6.338,+2.205,+15\nlevit_128.fb_dist_in1k,224,84.824,15.176,96.349,3.651,9.21,0.900,bicubic,+6.338,+2.351,+15\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,84.822,15.178,96.710,3.290,5.79,1.000,bicubic,+6.384,+2.168,+23\nresnet26t.ra2_in1k,320,84.820,15.180,96.422,3.578,16.01,1.000,bicubic,+6.490,+2.376,+29\nxception41.tf_in1k,299,84.818,15.182,96.398,3.602,26.97,0.903,bicubic,+6.268,+2.116,+2\nmixnet_l.ft_in1k,224,84.818,15.182,96.330,3.670,7.33,0.875,bicubic,+5.848,+2.150,-38\nrepghostnet_200.in1k,224,84.815,15.185,96.411,3.589,9.80,0.875,bicubic,+6.013,+2.079,-20\nresnet152.tv_in1k,224,84.815,15.185,96.219,3.781,60.19,0.875,bilinear,+6.485,+2.083,+26\nrepvgg_b2.rvgg_in1k,224,84.794,15.206,96.499,3.502,89.02,0.875,bilinear,+6.006,+2.085,-22\nresnet152.a3_in1k,160,84.794,15.206,96.135,3.865,60.19,0.950,bicubic,+5.898,+1.995,-35\nhrnet_w18.ms_aug_in1k,224,84.788,15.212,96.471,3.529,21.30,0.950,bilinear,+6.678,+2.409,+39\nefficientnet_b0.ra4_e3600_r224_in1k,224,84.783,15.217,96.451,3.549,5.29,0.900,bicubic,+6.215,+2.111,-5\nregnetx_064.pycls_in1k,224,84.779,15.221,96.477,3.523,26.21,0.875,bicubic,+5.717,+2.009,-54\npit_xs_224.in1k,224,84.777,15.223,96.509,3.491,10.62,0.900,bicubic,+6.581,+2.353,+30\nresnext50_32x4d.tv2_in1k,176,84.775,15.225,96.332,3.668,25.03,0.875,bilinear,+5.391,+2.030,-96\ngcresnext26ts.ch_in1k,288,84.766,15.234,96.298,3.702,10.48,1.000,bicubic,+6.330,+2.270,+11\nhrnet_w40.ms_in1k,224,84.749,15.251,96.556,3.444,57.56,0.875,bilinear,+5.821,+2.076,-46\nres2net50_26w_6s.in1k,224,84.747,15.253,96.285,3.715,37.05,0.875,bilinear,+6.161,+2.165,-13\npoolformerv2_s12.sail_in1k,224,84.743,15.257,96.383,3.617,11.89,1.000,bicubic,+6.735,+2.519,+41\nresnet34.a1_in1k,288,84.743,15.257,96.236,3.764,21.80,1.000,bicubic,+6.813,+2.478,+50\nresmlp_12_224.fb_distilled_in1k,224,84.724,15.276,96.227,3.773,15.35,0.875,bicubic,+6.776,+2.657,+46\nvit_base_patch32_384.augreg_in1k,384,84.717,15.283,96.347,3.653,88.30,1.000,bicubic,+5.967,+2.109,-32\ncs3darknet_m.c2ns_in1k,288,84.711,15.289,96.488,3.512,9.31,0.950,bicubic,+7.075,+2.472,+67\nfbnetv3_b.ra2_in1k,224,84.709,15.291,96.475,3.525,8.60,0.950,bilinear,+6.525,+2.237,+21\nswiftformer_s.dist_in1k,224,84.709,15.291,96.242,3.758,6.09,0.950,bicubic,+6.247,+2.262,-4\nlegacy_seresnet152.in1k,224,84.696,15.304,96.415,3.585,66.82,0.875,bilinear,+6.034,+2.045,-26\nbat_resnext26ts.ch_in1k,256,84.696,15.304,96.287,3.713,10.73,0.900,bicubic,+6.416,+2.173,+11\nhrnet_w32.ms_in1k,224,84.670,15.330,96.407,3.593,41.23,0.875,bilinear,+6.218,+2.223,-6\nconvnextv2_femto.fcmae_ft_in1k,224,84.664,15.336,96.430,3.570,5.23,0.875,bicubic,+6.188,+2.446,-9\nselecsls60b.in1k,224,84.655,15.345,96.296,3.704,32.77,0.875,bicubic,+6.245,+2.130,-2\nseresnext26d_32x4d.bt_in1k,288,84.634,15.366,96.268,3.732,16.81,0.950,bicubic,+5.818,+2.002,-48\nresnetv2_34d.ra4_e3600_r224_in1k,224,84.634,15.366,96.234,3.766,21.82,0.900,bicubic,+6.364,+2.284,+7\ntf_efficientnetv2_b0.in1k,224,84.632,15.368,96.281,3.719,7.14,0.875,bicubic,+6.246,+2.247,-3\nefficientnet_b1.ft_in1k,256,84.617,15.383,96.341,3.659,7.79,1.000,bicubic,+5.811,+1.995,-48\nregnetx_040.pycls_in1k,224,84.613,15.387,96.392,3.608,22.12,0.875,bicubic,+6.127,+2.308,-20\nefficientnet_es.ra_in1k,224,84.602,15.398,96.311,3.689,5.44,0.875,bicubic,+6.502,+2.383,+15\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,84.593,15.407,96.020,3.980,119.42,0.900,bicubic,+5.085,+1.910,-130\nhrnet_w30.ms_in1k,224,84.589,15.411,96.392,3.608,37.71,0.875,bilinear,+6.387,+2.172,+4\nresnet50.bt_in1k,224,84.585,15.415,96.360,3.640,25.56,0.875,bicubic,+6.141,+2.086,-16\nregnety_032.pycls_in1k,224,84.578,15.422,96.432,3.568,19.44,0.875,bicubic,+5.676,+2.006,-67\nseresnext26t_32x4d.bt_in1k,288,84.564,15.437,96.383,3.617,16.81,0.950,bicubic,+5.828,+2.065,-48\nwide_resnet101_2.tv_in1k,224,84.561,15.439,96.347,3.653,126.89,0.875,bilinear,+5.699,+2.035,-63\ntf_mixnet_l.in1k,224,84.555,15.445,96.247,3.753,7.33,0.875,bicubic,+5.781,+2.247,-54\nhrnet_w18_small_v2.gluon_in1k,224,84.544,15.456,96.287,3.713,15.60,0.875,bicubic,+6.342,+2.381,-1\ndla60x.in1k,224,84.531,15.469,96.293,3.707,17.35,0.875,bilinear,+6.297,+2.271,-4\nseresnext26ts.ch_in1k,288,84.523,15.477,96.328,3.672,10.39,1.000,bicubic,+6.231,+2.252,-10\nvit_small_patch16_224.augreg_in1k,224,84.510,15.490,96.285,3.715,22.05,0.900,bicubic,+5.668,+2.005,-67\nlegacy_seresnet101.in1k,224,84.497,15.503,96.345,3.655,49.33,0.875,bilinear,+6.127,+2.239,-17\ncs3darknet_focus_m.c2ns_in1k,288,84.491,15.509,96.445,3.555,9.30,0.950,bicubic,+7.205,+2.479,+71\nresnet50.a3_in1k,224,84.480,15.520,95.986,4.014,25.56,0.950,bicubic,+6.428,+2.206,+5\nresnet26t.ra2_in1k,256,84.469,15.530,96.208,3.792,16.01,0.940,bicubic,+6.591,+2.370,+22\ncoat_lite_tiny.in1k,224,84.465,15.535,96.366,3.634,5.72,0.900,bicubic,+6.917,+2.442,+47\ntf_efficientnet_b1.in1k,240,84.461,15.539,96.086,3.914,7.79,0.882,bicubic,+5.915,+2.104,-44\ntf_efficientnet_em.in1k,240,84.448,15.552,96.178,3.822,6.90,0.882,bicubic,+6.320,+2.126,-5\ndpn68b.ra_in1k,224,84.446,15.554,96.159,3.841,12.61,0.950,bicubic,+5.900,+2.063,-45\nconvnext_femto_ols.d1_in1k,224,84.440,15.560,96.208,3.792,5.23,0.875,bicubic,+6.588,+2.382,+22\nrepvgg_b1.rvgg_in1k,224,84.420,15.580,96.212,3.788,57.42,0.875,bilinear,+6.050,+1.948,-25\nwide_resnet50_2.tv_in1k,224,84.414,15.586,96.249,3.751,68.88,0.875,bilinear,+5.928,+1.997,-42\nefficientnet_b1_pruned.in1k,240,84.399,15.601,96.125,3.875,6.33,0.882,bicubic,+6.147,+2.305,-20\nvit_base_patch16_224.augreg_in1k,224,84.388,15.612,96.042,3.958,86.57,0.900,bicubic,+5.234,+1.310,-109\nres2net50_26w_4s.in1k,224,84.380,15.620,96.082,3.918,25.70,0.875,bilinear,+6.386,+2.222,+2\nres2net50_14w_8s.in1k,224,84.326,15.674,96.061,3.939,25.06,0.875,bilinear,+6.184,+2.209,-14\nhardcorenas_f.miil_green_in1k,224,84.326,15.674,96.016,3.984,8.20,0.875,bilinear,+6.227,+2.216,-11\nhgnetv2_b0.ssld_stage2_ft_in1k,224,84.279,15.720,96.289,3.711,6.00,0.965,bicubic,+6.918,+2.481,+50\nselecsls60.in1k,224,84.275,15.725,96.082,3.918,30.67,0.875,bicubic,+6.301,+2.260,0\nmobilevit_s.cvnets_in1k,256,84.265,15.735,96.283,3.717,5.58,0.900,bicubic,+5.967,+2.115,-31\nresnet101.a3_in1k,160,84.260,15.740,95.960,4.040,44.55,0.950,bicubic,+6.332,+2.274,+3\nregnetx_032.pycls_in1k,224,84.243,15.757,96.251,3.749,15.30,0.875,bicubic,+6.083,+2.087,-21\nese_vovnet19b_dw.ra_in1k,288,84.241,15.759,96.253,3.747,6.54,0.950,bicubic,+6.453,+2.561,+14\neca_resnext26ts.ch_in1k,288,84.235,15.765,96.189,3.811,10.30,1.000,bicubic,+6.215,+2.257,-12\nmobileone_s3.apple_in1k,224,84.233,15.767,96.118,3.881,10.17,0.900,bilinear,+6.231,+2.254,-9\nconvnextv2_atto.fcmae_ft_in1k,288,84.230,15.770,96.056,3.943,3.71,0.950,bicubic,+6.436,+2.329,+10\nres2next50.in1k,224,84.220,15.780,96.007,3.993,24.67,0.875,bilinear,+5.982,+2.103,-33\nconvnext_atto_ols.a2_in1k,288,84.209,15.791,96.236,3.764,3.70,0.950,bicubic,+6.989,+2.536,+51\nresnet50c.gluon_in1k,224,84.201,15.800,96.170,3.830,25.58,0.875,bicubic,+6.184,+2.218,-16\ndla102.in1k,224,84.192,15.808,96.202,3.798,33.27,0.875,bilinear,+6.176,+2.208,-17\nmobileone_s2.apple_in1k,224,84.192,15.808,96.063,3.937,7.88,0.900,bilinear,+6.688,+2.399,+25\ngcresnext26ts.ch_in1k,256,84.190,15.810,96.067,3.933,10.48,0.900,bicubic,+6.380,+2.031,+3\ncs3darknet_m.c2ns_in1k,256,84.171,15.829,96.247,3.753,9.31,0.887,bicubic,+7.197,+2.665,+62\necaresnet50t.a3_in1k,160,84.168,15.832,95.935,4.065,25.57,0.950,bicubic,+6.382,+2.315,+5\nrexnet_100.nav_in1k,224,84.154,15.846,96.268,3.732,4.80,0.875,bicubic,+6.292,+2.378,-6\ndensenetblur121d.ra_in1k,288,84.149,15.851,96.244,3.756,8.00,0.950,bicubic,+6.829,+2.448,+35\nfastvit_t8.apple_dist_in1k,256,84.149,15.851,96.059,3.941,4.03,0.900,bicubic,+6.979,+2.785,+46\nconvnext_atto.d2_in1k,288,84.147,15.853,96.198,3.803,3.70,0.950,bicubic,+7.131,+2.494,+55\nseresnext26ts.ch_in1k,256,84.139,15.861,96.065,3.935,10.39,0.900,bicubic,+6.279,+2.275,-9\ninception_v3.tf_in1k,299,84.136,15.864,95.916,4.084,23.83,0.875,bicubic,+6.270,+2.272,-12\ntf_efficientnetv2_b1.in1k,192,84.130,15.870,96.035,3.965,8.14,0.882,bicubic,+6.196,+2.213,-19\nresnet34.ra4_e3600_r224_in1k,224,84.126,15.874,95.856,4.144,21.80,0.900,bicubic,+6.634,+2.358,+16\nghostnetv2_160.in1k,224,84.113,15.887,96.208,3.792,12.39,0.875,bicubic,+6.277,+2.260,-11\nres2net50_48w_2s.in1k,224,84.113,15.887,95.969,4.031,25.29,0.875,bilinear,+6.585,+2.407,+11\ntf_efficientnet_lite2.in1k,260,84.111,15.889,96.069,3.931,6.09,0.890,bicubic,+6.641,+2.313,+16\nconvnext_femto.d1_in1k,224,84.107,15.893,96.191,3.809,5.22,0.875,bicubic,+6.609,+2.513,+10\nxcit_tiny_12_p16_224.fb_in1k,224,84.104,15.896,96.193,3.807,6.72,1.000,bicubic,+6.982,+2.465,+38\nresnet34d.ra2_in1k,224,84.096,15.904,95.971,4.029,21.82,0.875,bicubic,+6.988,+2.599,+38\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,84.083,15.917,96.240,3.760,6.00,0.965,bicubic,+7.215,+2.620,+56\npoolformer_s12.sail_in1k,224,84.053,15.947,96.174,3.826,11.92,0.900,bicubic,+6.815,+2.636,+27\nresnetv2_34.ra4_e3600_r224_in1k,224,84.051,15.949,95.916,4.084,21.80,0.900,bicubic,+6.425,+2.398,-5\nefficientnet_b0.ra_in1k,224,84.040,15.960,95.967,4.033,5.29,0.875,bicubic,+6.342,+2.429,-11\nresnet34.a2_in1k,288,84.030,15.970,95.933,4.067,21.80,1.000,bicubic,+6.880,+2.645,+31\ncrossvit_9_dagger_240.in1k,240,84.025,15.975,96.076,3.924,8.78,0.875,bicubic,+7.035,+2.474,+40\ntf_efficientnet_cc_b0_8e.in1k,224,84.010,15.989,96.040,3.961,24.01,0.875,bicubic,+6.054,+2.401,-35\nstarnet_s3.in1k,224,84.006,15.994,96.125,3.875,5.75,0.875,bicubic,+6.628,+2.509,+11\nresnext50_32x4d.tv_in1k,224,83.989,16.011,95.960,4.040,25.03,0.875,bilinear,+6.357,+2.286,-12\nresnet50.gluon_in1k,224,83.981,16.019,96.027,3.973,25.56,0.875,bicubic,+6.401,+2.323,-6\ncs3darknet_focus_m.c2ns_in1k,256,83.976,16.024,96.061,3.939,9.30,0.887,bicubic,+7.222,+2.511,+50\nregnety_016.pycls_in1k,224,83.972,16.028,95.999,4.001,11.20,0.875,bicubic,+6.092,+2.273,-35\ngmixer_24_224.ra3_in1k,224,83.972,16.028,95.852,4.148,24.72,0.875,bicubic,+5.924,+2.198,-50\nhardcorenas_e.miil_green_in1k,224,83.970,16.030,95.913,4.087,8.07,0.875,bilinear,+6.182,+2.141,-24\ndensenet161.tv_in1k,224,83.944,16.056,96.014,3.986,28.68,0.875,bicubic,+6.560,+2.358,+3\nmobilenetv1_125.ra4_e3600_r224_in1k,256,83.923,16.077,96.099,3.901,6.27,1.000,bicubic,+6.311,+2.345,-17\nmobilenetv2_120d.ra_in1k,224,83.904,16.096,95.909,4.091,5.83,0.875,bicubic,+6.604,+2.393,+7\nseresnext26t_32x4d.bt_in1k,224,83.889,16.111,95.948,4.052,16.81,0.875,bicubic,+5.909,+2.198,-48\ninception_v3.tf_adv_in1k,299,83.878,16.122,95.935,4.065,23.83,0.875,bicubic,+6.280,+2.209,-18\nresnext50_32x4d.a3_in1k,160,83.878,16.122,95.625,4.375,25.03,0.950,bicubic,+6.146,+2.311,-28\nresnet101.tv_in1k,224,83.863,16.137,95.892,4.108,44.55,0.875,bilinear,+6.459,+2.340,-4\ntinynet_a.in1k,192,83.835,16.165,95.817,4.183,6.19,0.875,bicubic,+6.169,+2.285,-28\nresnet26d.bt_in1k,288,83.808,16.192,95.973,4.027,16.01,0.950,bicubic,+6.386,+2.339,-7\nhardcorenas_d.miil_green_in1k,224,83.795,16.205,95.717,4.283,7.50,0.875,bilinear,+6.345,+2.237,-10\ninception_v3.tv_in1k,299,83.791,16.209,95.894,4.106,23.83,0.875,bicubic,+6.319,+2.426,-13\ndpn68b.mx_in1k,224,83.761,16.239,95.975,4.025,12.61,0.875,bicubic,+6.267,+2.141,-17\nxcit_nano_12_p8_224.fb_dist_in1k,224,83.748,16.252,95.941,4.059,3.05,1.000,bicubic,+7.418,+2.871,+56\nseresnext26d_32x4d.bt_in1k,224,83.742,16.259,95.839,4.161,16.81,0.875,bicubic,+6.150,+2.225,-25\ndla60.in1k,224,83.724,16.276,95.922,4.078,22.04,0.875,bilinear,+6.688,+2.850,+14\nshvit_s3.in1k,224,83.722,16.278,95.884,4.116,14.25,0.875,bicubic,+6.366,+2.574,-9\nresnext26ts.ra2_in1k,288,83.703,16.297,95.975,4.025,10.30,1.000,bicubic,+6.517,+2.505,+1\nrepvgg_b1g4.rvgg_in1k,224,83.699,16.301,96.025,3.975,39.97,0.875,bilinear,+6.091,+2.178,-32\neca_resnext26ts.ch_in1k,256,83.695,16.305,95.924,4.076,10.30,0.900,bicubic,+6.249,+2.350,-18\nconvmixer_1024_20_ks9_p14.in1k,224,83.684,16.316,95.894,4.106,24.38,0.960,bicubic,+6.740,+2.530,+16\nskresnet34.ra_in1k,224,83.680,16.320,95.918,4.082,22.28,0.875,bicubic,+6.724,+2.598,+13\nregnetx_008.tv2_in1k,224,83.675,16.325,95.986,4.014,7.26,0.965,bicubic,+6.381,+2.322,-11\nlegacy_seresnet50.in1k,224,83.669,16.331,95.971,4.029,28.09,0.875,bilinear,+6.019,+2.239,-42\ntf_efficientnet_b0.ap_in1k,224,83.658,16.342,95.794,4.206,5.29,0.875,bicubic,+6.558,+2.462,+2\nseresnet50.a3_in1k,224,83.645,16.355,95.679,4.321,28.09,0.950,bicubic,+6.609,+2.363,+5\nrepghostnet_150.in1k,224,83.643,16.357,95.911,4.089,6.58,0.875,bicubic,+6.169,+2.415,-29\nresnetrs50.tf_in1k,160,83.641,16.359,95.832,4.168,35.69,0.910,bicubic,+5.783,+2.022,-58\ntf_efficientnet_cc_b0_4e.in1k,224,83.639,16.361,95.732,4.268,13.31,0.875,bicubic,+6.315,+2.398,-20\nresnet50d.a3_in1k,160,83.633,16.367,95.638,4.362,25.58,0.950,bicubic,+6.411,+2.382,-13\ndensenet121.ra_in1k,288,83.609,16.391,96.054,3.946,7.98,0.950,bicubic,+7.123,+2.676,+34\nefficientnet_b1.ft_in1k,224,83.590,16.410,95.770,4.230,7.79,0.875,bicubic,+5.998,+2.134,-43\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,83.571,16.429,95.755,4.245,5.48,1.000,bicubic,+6.389,+2.427,-12\nresmlp_12_224.fb_in1k,224,83.566,16.434,95.760,4.240,15.35,0.875,bicubic,+6.906,+2.590,+18\ntf_efficientnet_b0.aa_in1k,224,83.552,16.448,95.758,4.242,5.29,0.875,bicubic,+6.669,+2.488,+6\ndensenet201.tv_in1k,224,83.549,16.451,95.805,4.195,20.01,0.875,bicubic,+6.265,+2.325,-22\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,83.543,16.457,95.435,4.565,5.48,0.875,bilinear,+5.623,+2.541,-73\nmixnet_m.ft_in1k,224,83.519,16.481,95.698,4.302,5.01,0.875,bicubic,+6.251,+2.280,-23\nhrnet_w18.ms_in1k,224,83.517,16.483,95.894,4.106,21.30,0.875,bilinear,+6.747,+2.446,+7\nlegacy_seresnext26_32x4d.in1k,224,83.513,16.487,95.738,4.262,16.79,0.875,bicubic,+6.413,+2.480,-14\ngernet_s.idstcv_in1k,224,83.502,16.498,95.792,4.208,8.17,0.875,bilinear,+6.628,+2.650,+2\nresnet34.bt_in1k,288,83.485,16.515,95.960,4.040,21.80,0.950,bicubic,+6.979,+2.618,+21\ndensenetblur121d.ra_in1k,224,83.485,16.515,95.813,4.187,8.00,0.875,bicubic,+6.911,+2.625,+14\nresnext26ts.ra2_in1k,256,83.466,16.534,95.734,4.266,10.30,0.900,bicubic,+6.698,+2.596,+3\nselecsls42b.in1k,224,83.464,16.536,95.743,4.257,32.46,0.875,bicubic,+6.280,+2.353,-25\nefficientvit_m5.r224_in1k,224,83.453,16.547,95.807,4.193,12.47,0.875,bicubic,+6.361,+2.639,-17\nresnet34.a1_in1k,224,83.449,16.551,95.510,4.490,21.80,0.950,bicubic,+7.029,+2.614,+19\nefficientformerv2_s0.snap_dist_in1k,224,83.398,16.602,95.811,4.189,3.60,0.950,bicubic,+7.296,+2.961,+27\nhardcorenas_c.miil_green_in1k,224,83.379,16.622,95.702,4.298,5.52,0.875,bilinear,+6.287,+2.510,-21\nghostnetv2_130.in1k,224,83.359,16.641,95.832,4.168,8.96,0.875,bicubic,+6.613,+2.468,-1\ntf_efficientnet_lite1.in1k,240,83.344,16.656,95.649,4.351,5.42,0.882,bicubic,+6.670,+2.417,0\ntf_efficientnetv2_b0.in1k,192,83.276,16.724,95.636,4.364,7.14,0.875,bicubic,+6.400,+2.452,-10\nmobilenetv1_125.ra4_e3600_r224_in1k,224,83.270,16.730,95.662,4.338,6.27,0.900,bicubic,+6.350,+2.430,-13\nfastvit_t8.apple_in1k,256,83.240,16.760,95.828,4.172,4.03,0.900,bicubic,+7.056,+2.786,+19\nghostnetv3_100.in1k,224,83.227,16.773,95.514,4.486,8.13,0.875,bicubic,+6.291,+2.390,-16\nresnetv2_18d.ra4_e3600_r224_in1k,288,83.193,16.807,95.745,4.255,11.71,1.000,bicubic,+7.135,+2.735,+20\ntf_efficientnet_es.in1k,224,83.193,16.807,95.583,4.417,5.44,0.875,bicubic,+6.585,+2.407,-1\nmobilenetv2_140.ra_in1k,224,83.191,16.809,95.668,4.332,6.11,0.875,bicubic,+6.663,+2.660,+2\ntf_mixnet_m.in1k,224,83.191,16.809,95.482,4.518,5.01,0.875,bicubic,+6.239,+2.318,-22\nconvnextv2_atto.fcmae_ft_in1k,224,83.189,16.811,95.521,4.479,3.71,0.875,bicubic,+6.544,+2.479,-7\nregnetx_016.pycls_in1k,224,83.182,16.818,95.751,4.249,9.19,0.875,bicubic,+6.246,+2.319,-24\ndpn68.mx_in1k,224,83.182,16.818,95.632,4.368,12.61,0.875,bicubic,+6.878,+2.638,+11\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,83.159,16.841,95.715,4.285,5.28,0.950,bicubic,+6.561,+2.443,-7\nxcit_nano_12_p16_384.fb_dist_in1k,384,83.139,16.861,95.732,4.268,3.05,1.000,bicubic,+7.679,+3.060,+39\nlevit_conv_128s.fb_dist_in1k,224,83.118,16.882,95.585,4.415,7.78,0.900,bicubic,+6.584,+2.703,-5\nresnet18d.ra4_e3600_r224_in1k,288,83.109,16.890,95.698,4.302,11.71,1.000,bicubic,+7.097,+2.914,+12\nese_vovnet19b_dw.ra_in1k,224,83.101,16.899,95.794,4.206,6.54,0.875,bicubic,+6.273,+2.518,-22\nlevit_128s.fb_dist_in1k,224,83.078,16.922,95.574,4.426,7.78,0.900,bicubic,+6.558,+2.704,-6\nresnet26d.bt_in1k,224,83.052,16.948,95.606,4.394,16.01,0.875,bicubic,+6.354,+2.450,-19\nconvnext_atto_ols.a2_in1k,224,83.005,16.995,95.536,4.464,3.70,0.875,bicubic,+7.103,+2.694,+16\nrepvgg_a2.rvgg_in1k,224,82.996,17.004,95.602,4.398,28.21,0.875,bilinear,+6.508,+2.586,-7\nconvnext_atto.d2_in1k,224,82.979,17.021,95.706,4.294,3.70,0.875,bicubic,+7.307,+2.792,+18\nresnet50.tv_in1k,224,82.969,17.031,95.474,4.526,25.56,0.875,bilinear,+6.813,+2.608,+1\nresnet26.bt_in1k,288,82.941,17.059,95.730,4.270,16.00,0.950,bicubic,+6.557,+2.552,-7\nmobilenetv1_100.ra4_e3600_r224_in1k,256,82.926,17.074,95.632,4.368,4.23,0.950,bicubic,+6.838,+2.626,+1\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,82.919,17.081,95.401,4.599,5.48,0.950,bicubic,+6.609,+2.559,-5\nrepghostnet_130.in1k,224,82.898,17.102,95.446,4.554,5.48,0.875,bicubic,+6.520,+2.548,-9\nhardcorenas_b.miil_green_in1k,224,82.866,17.134,95.388,4.612,5.18,0.875,bilinear,+6.316,+2.622,-20\nmobileone_s1.apple_in1k,224,82.845,17.155,95.523,4.477,4.83,0.900,bilinear,+7.085,+2.735,+9\ndensenet121.ra_in1k,224,82.815,17.185,95.583,4.417,7.98,0.875,bicubic,+7.241,+2.935,+18\nmobilevitv2_075.cvnets_in1k,256,82.793,17.206,95.568,4.432,2.87,0.888,bicubic,+7.181,+2.814,+14\nresnetv2_18.ra4_e3600_r224_in1k,288,82.787,17.213,95.504,4.496,11.69,1.000,bicubic,+7.423,+2.812,+25\nresnet50.a3_in1k,160,82.766,17.234,95.085,4.915,25.56,0.950,bicubic,+6.772,+2.591,-2\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,82.687,17.313,95.860,4.140,6.36,1.000,bicubic,+6.725,+2.588,-2\nmobilenetv4_conv_small.e3600_r256_in1k,320,82.687,17.313,95.546,4.454,3.77,1.000,bicubic,+7.071,+2.778,+8\ndensenet169.tv_in1k,224,82.674,17.326,95.568,4.432,14.15,0.875,bicubic,+6.752,+2.760,-2\nregnety_004.tv2_in1k,224,82.618,17.382,95.508,4.492,4.34,0.965,bicubic,+7.024,+2.802,+10\nfasternet_t1.in1k,224,82.593,17.407,95.497,4.503,7.60,1.000,bicubic,+6.671,+2.521,-3\ntf_efficientnet_b0.in1k,224,82.591,17.409,95.420,4.580,5.29,0.875,bicubic,+6.049,+2.412,-30\nresnet34.a2_in1k,224,82.574,17.426,95.249,4.750,21.80,0.950,bicubic,+7.054,+2.796,+9\nedgenext_x_small.in1k,288,82.569,17.431,95.427,4.573,2.34,1.000,bicubic,+6.863,+2.667,-2\ninception_next_atto.sail_in1k,224,82.554,17.446,95.350,4.650,4.16,0.875,bicubic,+7.206,+2.792,+16\nmixnet_s.ft_in1k,224,82.529,17.471,95.363,4.637,4.13,0.875,bicubic,+6.529,+2.567,-13\nregnety_008.pycls_in1k,224,82.507,17.493,95.489,4.511,6.26,0.875,bicubic,+6.173,+2.429,-25\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,82.503,17.497,95.674,4.326,22.88,0.900,bicubic,+6.493,+2.408,-16\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,82.403,17.597,95.181,4.819,5.28,0.875,bicubic,+6.739,+2.659,-5\nresnest14d.gluon_in1k,224,82.377,17.623,95.333,4.667,10.61,0.875,bilinear,+6.861,+2.821,+3\nefficientnet_lite0.ra_in1k,224,82.377,17.623,95.279,4.721,4.65,0.875,bicubic,+6.889,+2.771,+4\nhardcorenas_a.miil_green_in1k,224,82.335,17.666,95.271,4.729,5.26,0.875,bilinear,+6.409,+2.785,-16\nmobilenetv1_100.ra4_e3600_r224_in1k,224,82.292,17.708,95.053,4.947,4.23,0.875,bicubic,+6.904,+2.743,+6\nmobilenetv3_rw.rmsp_in1k,224,82.270,17.730,95.177,4.823,5.48,0.875,bicubic,+6.654,+2.481,-7\nefficientnet_es_pruned.in1k,224,82.268,17.732,95.301,4.699,5.44,0.875,bicubic,+7.258,+2.863,+19\nsemnasnet_100.rmsp_in1k,224,82.264,17.736,95.230,4.770,3.89,0.875,bicubic,+6.812,+2.372,+2\nmobilenetv3_large_100.ra_in1k,224,82.168,17.832,95.190,4.810,5.48,0.875,bicubic,+6.384,+2.662,-17\nswiftformer_xs.dist_in1k,224,82.166,17.834,95.179,4.821,3.48,0.950,bicubic,+6.566,+2.865,-9\nresnet34.bt_in1k,224,82.164,17.836,95.055,4.945,21.80,0.875,bicubic,+6.988,+2.889,+7\nmobilenetv2_110d.ra_in1k,224,82.080,17.920,95.064,4.936,4.52,0.875,bicubic,+7.024,+2.878,+12\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,82.061,17.939,95.493,4.507,5.72,0.900,bicubic,+6.609,+2.885,-4\nshvit_s2.in1k,224,82.027,17.973,95.190,4.810,11.48,0.875,bicubic,+6.843,+2.874,+3\ntf_mixnet_s.in1k,224,82.027,17.973,95.119,4.881,4.13,0.875,bicubic,+6.371,+2.487,-18\nrepvgg_b0.rvgg_in1k,224,82.019,17.982,95.117,4.883,15.82,0.875,bilinear,+6.861,+2.707,+3\ndeit_tiny_distilled_patch16_224.fb_in1k,224,82.012,17.988,95.145,4.855,5.91,0.900,bicubic,+7.474,+3.247,+22\nmixer_b16_224.goog_in21k_ft_in1k,224,82.012,17.988,94.455,5.545,59.88,0.875,bicubic,+5.396,+2.205,-58\nhrnet_w18_small_v2.ms_in1k,224,81.969,18.031,95.166,4.834,15.60,0.875,bilinear,+6.875,+2.752,+3\nstarnet_s2.in1k,224,81.967,18.033,95.341,4.659,3.68,0.875,bicubic,+7.301,+3.197,+14\ntf_efficientnet_lite0.in1k,224,81.957,18.043,95.162,4.838,4.65,0.875,bicubic,+7.111,+2.982,+9\nresnet26.bt_in1k,224,81.937,18.063,95.249,4.750,16.00,0.875,bicubic,+6.639,+2.671,-8\nghostnetv2_100.in1k,224,81.916,18.084,95.106,4.894,6.16,0.875,bicubic,+6.730,+2.772,-7\nseresnet50.a3_in1k,160,81.892,18.108,94.961,5.039,28.09,0.950,bicubic,+6.788,+2.865,-3\nedgenext_x_small.in1k,256,81.890,18.110,95.030,4.970,2.34,0.900,bicubic,+7.016,+2.726,+4\ntf_mobilenetv3_large_100.in1k,224,81.854,18.146,95.068,4.932,5.48,0.875,bilinear,+6.344,+2.468,-20\ntinynet_b.in1k,188,81.835,18.165,94.880,5.120,3.73,0.875,bicubic,+6.889,+2.686,0\npit_ti_distilled_224.in1k,224,81.790,18.210,95.083,4.917,5.10,0.900,bicubic,+7.514,+3.163,+19\nrepghostnet_111.in1k,224,81.756,18.244,94.831,5.169,4.54,0.875,bicubic,+6.696,+2.635,-6\ndensenet121.tv_in1k,224,81.730,18.270,95.032,4.968,7.98,0.875,bicubic,+6.980,+2.874,+2\nregnety_006.pycls_in1k,224,81.711,18.289,95.109,4.891,6.06,0.875,bicubic,+6.437,+2.577,-16\nregnetx_004_tv.tv2_in1k,224,81.692,18.308,95.025,4.975,5.50,0.965,bicubic,+7.096,+2.885,+6\nxcit_nano_12_p8_224.fb_in1k,224,81.675,18.325,95.258,4.742,3.05,1.000,bicubic,+7.775,+3.104,+23\nresnet18d.ra2_in1k,288,81.673,18.327,95.044,4.955,11.71,0.950,bicubic,+7.883,+3.210,+23\nmobilenetv4_conv_small.e2400_r224_in1k,256,81.668,18.332,94.991,5.009,3.77,0.950,bicubic,+7.024,+2.969,0\nresnet18d.ra4_e3600_r224_in1k,224,81.660,18.340,94.869,5.131,11.71,0.900,bicubic,+7.304,+3.043,+8\ndla34.in1k,224,81.653,18.347,94.863,5.137,15.74,0.875,bilinear,+7.021,+2.811,-1\ncrossvit_9_240.in1k,240,81.641,18.359,94.985,5.015,8.55,0.875,bicubic,+7.647,+3.009,+14\nresnetv2_18d.ra4_e3600_r224_in1k,224,81.638,18.362,94.993,5.007,11.71,0.900,bicubic,+7.216,+3.065,+4\nfbnetc_100.rmsp_in1k,224,81.555,18.445,94.951,5.049,5.57,0.875,bilinear,+6.435,+2.567,-20\nlegacy_seresnet34.in1k,224,81.544,18.456,94.901,5.098,21.96,0.875,bilinear,+6.757,+2.767,-10\nmobilevit_xs.cvnets_in1k,256,81.542,18.458,95.042,4.958,2.32,0.900,bicubic,+6.924,+2.692,-5\nmobilenetv4_conv_small.e3600_r256_in1k,256,81.540,18.460,94.886,5.114,3.77,0.950,bicubic,+7.006,+2.947,-2\nregnetx_008.pycls_in1k,224,81.513,18.488,95.057,4.943,7.26,0.875,bicubic,+6.459,+2.719,-19\nmnasnet_100.rmsp_in1k,224,81.472,18.528,94.895,5.105,4.38,0.875,bicubic,+6.800,+2.787,-12\nresnet34.gluon_in1k,224,81.472,18.528,94.797,5.203,21.80,0.875,bicubic,+6.898,+2.809,-7\nefficientvit_m4.r224_in1k,224,81.455,18.545,94.981,5.019,8.80,0.875,bicubic,+7.111,+3.021,-2\nvgg19_bn.tv_in1k,224,81.438,18.562,94.775,5.224,143.68,0.875,bilinear,+7.208,+2.927,0\nmobilenetv4_conv_small.e1200_r224_in1k,256,81.416,18.584,95.059,4.941,3.77,0.950,bicubic,+7.134,+2.938,-3\nrepvgg_a1.rvgg_in1k,224,81.275,18.724,94.705,5.295,14.09,0.875,bilinear,+6.801,+2.853,-8\nvit_base_patch32_224.augreg_in1k,224,81.145,18.855,94.436,5.564,88.22,0.900,bicubic,+6.237,+2.666,-23\nconvit_tiny.fb_in1k,224,81.132,18.868,95.057,4.943,5.71,0.875,bicubic,+7.992,+3.353,+20\nstarnet_s1.in1k,224,81.118,18.883,94.780,5.220,2.87,0.875,bicubic,+7.586,+3.276,+8\nresnetv2_18.ra4_e3600_r224_in1k,224,81.118,18.883,94.547,5.453,11.69,0.900,bicubic,+7.546,+3.191,+6\ncrossvit_tiny_240.in1k,240,81.107,18.893,94.976,5.024,7.01,0.875,bicubic,+7.755,+3.070,+11\nresnet18.a1_in1k,288,81.047,18.953,94.372,5.628,11.69,1.000,bicubic,+7.883,+3.318,+14\nrepghostnet_100.in1k,224,80.938,19.062,94.530,5.470,4.07,0.875,bicubic,+6.730,+2.980,-8\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,80.925,19.075,94.778,5.222,3.27,1.000,bicubic,+7.767,+3.320,+13\nspnasnet_100.rmsp_in1k,224,80.863,19.137,94.519,5.481,4.42,0.875,bilinear,+6.777,+2.693,-9\nresnet34.a3_in1k,224,80.842,19.158,94.370,5.630,21.80,0.950,bicubic,+7.846,+3.254,+15\nmobilenetv4_conv_small.e2400_r224_in1k,224,80.834,19.166,94.598,5.402,3.77,0.875,bicubic,+7.078,+3.168,-3\nefficientvit_m3.r224_in1k,224,80.703,19.297,94.543,5.457,6.90,0.875,bicubic,+7.309,+3.207,+2\nghostnet_100.in1k,224,80.673,19.327,94.368,5.632,5.18,0.875,bicubic,+6.715,+2.826,-10\nregnetx_006.pycls_in1k,224,80.637,19.363,94.530,5.470,6.20,0.875,bicubic,+6.733,+2.890,-9\nskresnet18.ra_in1k,224,80.629,19.371,94.383,5.617,11.96,0.875,bicubic,+7.609,+3.211,+8\nregnety_004.pycls_in1k,224,80.622,19.378,94.694,5.306,4.34,0.875,bicubic,+6.618,+2.938,-15\npit_ti_224.in1k,224,80.603,19.397,94.598,5.402,4.85,0.900,bicubic,+7.677,+3.186,+9\nresnet18.fb_swsl_ig1b_ft_in1k,224,80.564,19.436,94.758,5.242,11.69,0.875,bilinear,+7.278,+3.004,0\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,80.562,19.438,94.380,5.620,2.16,0.900,bicubic,+7.328,+3.111,0\nmobilenetv4_conv_small.e1200_r224_in1k,224,80.543,19.457,94.383,5.617,3.77,0.875,bicubic,+7.093,+3.043,-8\nvgg16_bn.tv_in1k,224,80.532,19.468,94.577,5.423,138.37,0.875,bilinear,+7.178,+3.087,-6\nsemnasnet_075.rmsp_in1k,224,80.492,19.508,94.329,5.671,2.91,0.875,bicubic,+7.492,+3.205,+2\nhrnet_w18_small.gluon_in1k,224,80.406,19.593,94.024,5.976,13.19,0.875,bicubic,+6.472,+2.846,-19\nresnet18d.ra2_in1k,224,80.400,19.600,94.257,5.743,11.71,0.875,bicubic,+8.104,+3.575,+13\nresnet34.tv_in1k,224,80.396,19.604,94.408,5.592,21.80,0.875,bilinear,+7.098,+2.986,-8\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,80.321,19.679,94.212,5.788,2.16,0.875,bicubic,+7.501,+3.106,+2\nresnet18.a2_in1k,288,80.289,19.711,94.099,5.901,11.69,1.000,bicubic,+7.925,+3.479,+7\nxcit_nano_12_p16_224.fb_dist_in1k,224,80.246,19.754,94.361,5.639,3.05,1.000,bicubic,+7.928,+3.535,+8\nmobilenetv2_100.ra_in1k,224,80.225,19.775,94.216,5.784,3.50,0.875,bicubic,+7.315,+3.216,-2\nvit_base_patch32_224.sam_in1k,224,80.216,19.784,93.823,6.177,88.22,0.900,bicubic,+6.513,+2.811,-21\nshvit_s1.in1k,224,80.169,19.831,94.240,5.760,6.33,0.875,bicubic,+7.389,+3.216,-2\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,80.112,19.888,94.590,5.410,11.69,0.875,bilinear,+7.476,+3.174,-2\ntf_mobilenetv3_large_075.in1k,224,80.078,19.922,94.182,5.818,3.99,0.875,bilinear,+6.636,+2.850,-20\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,80.046,19.954,94.340,5.660,3.27,0.887,bicubic,+7.868,+3.510,+8\ndeit_tiny_patch16_224.fb_in1k,224,80.026,19.974,94.464,5.536,5.72,0.900,bicubic,+7.837,+3.364,+6\nrepvgg_a0.rvgg_in1k,224,79.542,20.458,93.781,6.219,9.11,0.875,bilinear,+7.116,+3.275,-4\nhrnet_w18_small.ms_in1k,224,79.533,20.467,93.911,6.089,13.19,0.875,bilinear,+7.193,+3.223,-2\nvgg19.tv_in1k,224,79.486,20.514,93.868,6.132,143.67,0.875,bilinear,+7.086,+2.990,-5\nregnetx_004.pycls_in1k,224,79.465,20.535,93.879,6.121,5.16,0.875,bicubic,+6.983,+3.039,-8\nresnet18.a1_in1k,224,79.450,20.550,93.655,6.345,11.69,0.950,bicubic,+7.946,+3.569,+11\ntf_mobilenetv3_large_minimal_100.in1k,224,79.251,20.749,93.738,6.262,3.92,0.875,bilinear,+6.981,+3.074,-3\nedgenext_xx_small.in1k,288,79.211,20.789,93.823,6.177,1.33,1.000,bicubic,+7.325,+3.289,+3\nlegacy_seresnet18.in1k,224,79.155,20.845,93.774,6.226,11.78,0.875,bicubic,+7.413,+3.434,+4\nresnet14t.c3_in1k,224,79.126,20.875,93.599,6.401,10.08,0.950,bicubic,+6.883,+3.281,-5\nrepghostnet_080.in1k,224,79.113,20.887,93.682,6.318,3.28,0.875,bicubic,+6.873,+3.190,-5\nvgg16.tv_in1k,224,79.029,20.971,93.653,6.348,138.36,0.875,bilinear,+7.435,+3.255,+3\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,79.014,20.985,93.866,6.134,6.34,0.900,bicubic,+7.215,+3.056,-1\nvgg13_bn.tv_in1k,224,78.985,21.015,93.640,6.360,133.05,0.875,bilinear,+7.425,+3.268,+2\nlcnet_100.ra2_in1k,224,78.914,21.086,93.544,6.456,2.95,0.875,bicubic,+6.784,+3.168,-6\npvt_v2_b0.in1k,224,78.741,21.259,93.847,6.153,3.67,0.900,bicubic,+8.087,+3.641,+9\nedgenext_xx_small.in1k,256,78.698,21.302,93.503,6.497,1.33,0.900,bicubic,+7.566,+3.461,+5\nefficientvit_m2.r224_in1k,224,78.615,21.385,93.554,6.446,4.19,0.875,bicubic,+7.819,+3.402,+6\nfasternet_t0.in1k,224,78.553,21.447,93.465,6.535,3.91,1.000,bicubic,+6.821,+3.391,-5\nmobileone_s0.apple_in1k,224,78.508,21.492,93.334,6.666,5.29,0.875,bilinear,+7.110,+3.470,-1\nresnet18.a2_in1k,224,78.496,21.504,93.304,6.696,11.69,0.950,bicubic,+7.862,+3.818,+5\nefficientvit_b0.r224_in1k,224,78.434,21.566,92.807,7.193,3.41,0.950,bicubic,+7.026,+3.373,-4\ntinynet_c.in1k,184,78.421,21.579,93.136,6.864,2.46,0.875,bicubic,+7.207,+3.390,-2\nresnet18.gluon_in1k,224,78.387,21.613,93.132,6.869,11.69,0.875,bicubic,+7.543,+3.382,-1\nresnet14t.c3_in1k,176,78.329,21.671,93.140,6.860,10.08,0.875,bicubic,+7.011,+3.460,-5\nresnet34.a3_in1k,160,78.306,21.694,93.221,6.779,21.80,0.950,bicubic,+7.726,+3.681,+1\nmobilevitv2_050.cvnets_in1k,256,78.128,21.872,93.584,6.416,1.37,0.888,bicubic,+7.972,+3.654,+3\nvgg11_bn.tv_in1k,224,77.945,22.055,93.225,6.775,132.87,0.875,bilinear,+7.571,+3.423,0\nxcit_nano_12_p16_224.fb_in1k,224,77.906,22.094,93.435,6.565,3.05,1.000,bicubic,+7.924,+3.655,+2\nregnety_002.pycls_in1k,224,77.413,22.587,92.916,7.084,3.16,0.875,bicubic,+7.125,+3.372,-1\nmixer_l16_224.goog_in21k_ft_in1k,224,77.306,22.694,90.544,9.456,208.20,0.875,bicubic,+5.222,+2.912,-21\nresnet18.tv_in1k,224,77.291,22.709,92.760,7.240,11.69,0.875,bilinear,+7.540,+3.678,+1\nvgg13.tv_in1k,224,77.238,22.762,92.707,7.293,133.05,0.875,bilinear,+7.288,+3.445,-1\nmobilevit_xxs.cvnets_in1k,256,76.604,23.396,92.677,7.323,1.27,0.900,bicubic,+7.668,+3.735,+2\nresnet18.a3_in1k,224,76.446,23.554,92.237,7.763,11.69,0.950,bicubic,+8.196,+4.065,+6\nefficientvit_m1.r224_in1k,224,76.397,23.603,92.551,7.449,2.98,0.875,bicubic,+8.057,+3.859,+3\nvgg11.tv_in1k,224,76.397,23.603,92.160,7.840,132.86,0.875,bilinear,+7.347,+3.530,-2\nrepghostnet_058.in1k,224,76.262,23.738,92.062,7.938,2.55,0.875,bicubic,+7.324,+3.676,-3\nresnet10t.c3_in1k,224,76.168,23.832,92.228,7.772,5.44,0.950,bicubic,+7.814,+4.194,0\nregnetx_002.pycls_in1k,224,76.119,23.881,92.205,7.795,2.68,0.875,bicubic,+7.361,+3.637,-2\nlcnet_075.ra2_in1k,224,76.053,23.947,92.083,7.917,2.36,0.875,bicubic,+7.267,+3.699,-4\ndla60x_c.in1k,224,75.671,24.329,92.164,7.836,1.32,0.875,bilinear,+7.743,+3.738,0\nmobilenetv3_small_100.lamb_in1k,224,74.887,25.113,91.470,8.530,2.54,0.875,bicubic,+7.251,+3.832,+1\ntf_mobilenetv3_small_100.in1k,224,74.727,25.273,91.274,8.726,2.54,0.875,bilinear,+6.805,+3.588,-1\nresnet10t.c3_in1k,176,74.541,25.459,91.176,8.824,5.44,0.875,bicubic,+7.817,+4.200,+2\ntinynet_d.in1k,152,74.264,25.736,90.877,9.123,2.34,0.875,bicubic,+7.328,+3.799,0\nrepghostnet_050.in1k,224,74.257,25.742,90.826,9.174,2.31,0.875,bicubic,+7.279,+3.890,-2\nmnasnet_small.lamb_in1k,224,73.809,26.191,90.706,9.294,2.03,0.875,bicubic,+7.605,+4.236,0\ndla46x_c.in1k,224,73.645,26.355,91.122,8.878,1.07,0.875,bilinear,+7.637,+4.176,0\nresnet18.a3_in1k,160,73.634,26.366,90.618,9.382,11.69,0.950,bicubic,+7.964,+4.350,+3\nmobilenetv2_050.lamb_in1k,224,73.446,26.554,90.322,9.678,1.97,0.875,bicubic,+7.518,+4.208,-1\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,73.418,26.581,90.687,9.313,2.24,0.950,bicubic,+7.593,+4.271,-1\ntf_mobilenetv3_small_075.in1k,224,72.810,27.190,90.012,9.988,2.04,0.875,bilinear,+7.104,+3.882,-1\ndla46_c.in1k,224,72.611,27.389,90.503,9.497,1.30,0.875,bilinear,+7.731,+4.173,+1\nmobilenetv3_small_075.lamb_in1k,224,72.336,27.664,89.694,10.306,2.04,0.875,bicubic,+7.052,+4.232,-1\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,72.169,27.831,89.831,10.169,2.24,0.875,bicubic,+7.391,+4.331,0\nefficientvit_m0.r224_in1k,224,71.117,28.883,89.579,10.421,2.35,0.875,bicubic,+7.813,+4.439,0\nlcnet_050.ra2_in1k,224,70.446,29.554,88.827,11.173,1.88,0.875,bicubic,+7.308,+4.425,0\ntf_mobilenetv3_small_minimal_100.in1k,224,70.069,29.931,88.518,11.482,2.04,0.875,bilinear,+7.176,+4.248,0\ntinynet_e.in1k,106,66.832,33.168,86.259,13.741,2.04,0.875,bicubic,+6.958,+4.485,0\nmobilenetv3_small_050.lamb_in1k,224,64.699,35.301,84.839,15.161,1.59,0.875,bicubic,+6.781,+4.695,0\ntest_vit3.r160_in1k,160,64.421,35.579,85.576,14.425,0.93,0.950,bicubic,+7.513,+4.828,0\ntest_convnext2.r160_in1k,160,60.962,39.038,83.865,16.135,0.48,0.950,bicubic,+7.442,+5.307,0\ntest_convnext3.r160_in1k,160,60.843,39.157,83.780,16.220,0.47,0.950,bicubic,+7.541,+5.454,0\ntest_convnext.r160_in1k,160,54.782,45.218,79.710,20.290,0.27,0.950,bicubic,+7.008,+5.544,0\ntest_nfnet.r160_in1k,160,54.572,45.428,78.778,21.223,0.38,0.950,bicubic,+6.938,+5.898,0\ntest_efficientnet_evos.r160_in1k,160,52.982,47.018,76.809,23.191,0.36,0.950,bicubic,+6.478,+5.787,+1\ntest_efficientnet.r160_in1k,160,52.943,47.057,76.903,23.097,0.36,0.950,bicubic,+6.437,+5.867,-1\ntest_byobnet.r160_in1k,160,52.388,47.612,76.785,23.215,0.46,0.950,bicubic,+6.530,+5.781,0\ntest_efficientnet_gn.r160_in1k,160,50.238,49.762,74.960,25.040,0.36,0.950,bicubic,+6.350,+5.804,+1\ntest_efficientnet_ln.r160_in1k,160,50.195,49.805,75.287,24.713,0.36,0.950,bicubic,+6.237,+5.961,-1\ntest_vit2.r160_in1k,160,48.434,51.566,74.469,25.531,0.46,0.950,bicubic,+6.184,+5.487,0\ntest_resnet.r160_in1k,160,47.911,52.089,73.848,26.152,0.47,0.950,bilinear,+6.309,+5.864,0\ntest_vit.r160_in1k,160,47.087,52.913,72.804,27.196,0.37,0.950,bicubic,+6.105,+5.422,0\n"
  },
  {
    "path": "results/results-imagenet.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,90.056,9.944,99.054,0.946,305.08,1.000,bicubic\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,89.956,10.044,99.014,0.986,305.08,1.000,bicubic\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,89.790,10.210,98.990,1.010,\"1,014.45\",1.000,bicubic\neva02_large_patch14_448.mim_in22k_ft_in1k,448,89.634,10.366,98.954,1.046,305.08,1.000,bicubic\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,89.566,10.434,98.952,1.048,\"1,013.01\",1.000,bicubic\neva02_large_patch14_448.mim_m38m_ft_in1k,448,89.550,10.450,98.924,1.076,305.08,1.000,bicubic\neva_giant_patch14_336.clip_ft_in1k,336,89.462,10.538,98.828,1.172,\"1,013.01\",1.000,bicubic\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,89.410,10.590,98.854,1.146,429.38,1.000,bicubic\neva_large_patch14_336.in22k_ft_in22k_in1k,336,89.238,10.762,98.848,1.152,304.53,1.000,bicubic\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,89.056,10.944,98.752,1.248,414.14,1.000,bicubic\neva_giant_patch14_224.clip_ft_in1k,224,88.896,11.104,98.672,1.328,\"1,012.56\",0.900,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,88.860,11.140,98.740,1.260,660.29,1.000,bicubic\neva_large_patch14_336.in22k_ft_in1k,336,88.680,11.320,98.714,1.286,304.53,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,88.678,11.322,98.726,1.274,87.12,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,88.666,11.334,98.734,1.266,660.29,1.000,bicubic\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,88.634,11.366,98.668,1.332,632.46,1.000,bicubic\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,88.622,11.378,98.718,1.282,846.47,1.000,bicubic\neva_large_patch14_196.in22k_ft_in22k_in1k,196,88.590,11.410,98.662,1.338,304.14,1.000,bicubic\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,88.576,11.424,98.656,1.344,305.67,1.000,bicubic\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,88.540,11.460,98.650,1.350,475.77,1.000,bicubic\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,88.406,11.594,98.602,1.398,304.43,0.950,bicubic\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,88.380,11.620,98.604,1.396,305.00,1.000,bicubic\ntf_efficientnet_l2.ns_jft_in1k,800,88.360,11.640,98.656,1.344,480.31,0.960,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,88.334,11.666,98.574,1.426,200.13,1.000,bicubic\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,88.302,11.698,98.538,1.462,475.32,1.000,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,88.288,11.712,98.550,1.450,632.05,1.000,bicubic\neva02_base_patch14_448.mim_in22k_ft_in1k,448,88.262,11.738,98.570,1.430,87.12,1.000,bicubic\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,88.260,11.740,98.524,1.476,304.53,1.000,bicubic\ntf_efficientnet_l2.ns_jft_in1k_475,475,88.246,11.754,98.554,1.446,480.31,0.936,bicubic\nmaxvit_large_tf_512.in21k_ft_in1k,512,88.236,11.764,98.608,1.392,212.33,1.000,bicubic\nregnety_1280.swag_ft_in1k,384,88.228,11.772,98.684,1.316,644.81,1.000,bicubic\nmaxvit_base_tf_512.in21k_ft_in1k,512,88.214,11.786,98.538,1.462,119.88,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,88.190,11.810,98.566,1.434,304.53,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,88.180,11.820,98.520,1.480,197.96,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,88.162,11.838,98.538,1.462,304.20,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,88.074,11.926,98.558,1.442,136.50,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k_384,384,88.058,11.942,98.590,1.410,98.75,1.000,bicubic\nmaxvit_large_tf_384.in21k_ft_in1k,384,87.994,12.006,98.576,1.424,212.03,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,87.970,12.030,98.474,1.526,200.13,1.000,bicubic\neva_large_patch14_196.in22k_ft_in1k,196,87.952,12.048,98.490,1.510,304.14,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,87.932,12.068,98.500,1.500,136.33,1.000,bicubic\nmaxvit_base_tf_384.in21k_ft_in1k,384,87.930,12.070,98.546,1.454,119.65,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,87.900,12.100,98.408,1.592,304.20,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,87.890,12.110,98.448,1.552,200.13,1.000,bicubic\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,87.870,12.130,98.360,1.640,304.53,1.000,bicubic\nvit_large_patch14_clip_224.openai_ft_in1k,224,87.858,12.142,98.430,1.570,304.20,1.000,bicubic\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,87.810,12.190,98.366,1.634,116.14,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,87.764,12.236,98.552,1.448,350.20,1.000,bicubic\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,87.736,12.264,98.508,1.492,304.76,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,87.638,12.362,98.416,1.584,88.72,1.000,bicubic\nbeit3_large_patch16_224.in22k_ft_in1k,224,87.628,12.372,98.334,1.666,304.57,1.000,bicubic\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,87.612,12.388,98.226,1.774,632.05,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k_384,384,87.606,12.394,98.424,1.576,99.88,1.000,bicubic\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,87.534,12.466,98.364,1.636,304.57,1.000,bicubic\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,87.508,12.492,98.428,1.572,101.66,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,288,87.494,12.506,98.360,1.640,197.96,1.000,bicubic\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,87.486,12.514,98.318,1.682,304.43,0.900,bicubic\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,87.480,12.520,98.376,1.624,116.09,1.000,bicubic\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,87.474,12.526,98.264,1.736,196.74,1.000,bicubic\ncaformer_m36.sail_in22k_ft_in1k_384,384,87.470,12.530,98.308,1.692,56.20,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k_384,384,87.456,12.544,98.380,1.620,197.77,1.000,bicubic\ncaformer_b36.sail_in22k_ft_in1k,224,87.450,12.550,98.336,1.664,98.75,1.000,bicubic\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,87.438,12.562,98.256,1.744,64.27,1.000,bicubic\nbeitv2_large_patch16_224.in1k_ft_in1k,224,87.414,12.586,98.216,1.784,304.43,0.950,bicubic\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,87.388,12.612,98.312,1.688,73.88,1.000,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,288,87.378,12.622,98.330,1.670,350.20,1.000,bicubic\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,87.370,12.630,98.314,1.686,134.42,1.000,bicubic\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,87.344,12.656,98.222,1.778,200.13,1.000,bicubic\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,87.318,12.682,98.328,1.672,136.06,1.000,bicubic\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,87.304,12.696,98.334,1.666,149.39,1.000,bicubic\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,87.288,12.712,98.236,1.764,304.20,1.000,bicubic\nconvnextv2_large.fcmae_ft_in22k_in1k,224,87.266,12.734,98.240,1.760,197.96,0.875,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,87.210,12.790,98.032,1.968,86.86,1.000,bicubic\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,87.180,12.820,98.264,1.736,632.13,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,87.144,12.856,98.228,1.772,88.59,1.000,bicubic\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,87.142,12.858,98.228,1.772,196.74,1.000,bicubic\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,87.142,12.858,98.228,1.772,87.92,1.000,bicubic\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,87.096,12.904,98.302,1.698,304.72,1.000,bicubic\nvolo_d5_512.sail_in1k,512,87.068,12.932,97.970,2.030,296.09,1.150,bicubic\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,87.030,12.970,98.186,1.814,86.86,0.950,bicubic\nconvnext_large.fb_in22k_ft_in1k,288,87.012,12.988,98.212,1.788,197.77,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,288,86.994,13.006,98.168,1.832,88.72,1.000,bicubic\nconvformer_b36.sail_in22k_ft_in1k,224,86.992,13.008,98.164,1.836,99.88,1.000,bicubic\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,86.990,13.010,98.246,1.754,304.37,1.000,bicubic\nvolo_d5_448.sail_in1k,448,86.974,13.026,97.928,2.072,295.91,1.150,bicubic\nconvnext_xlarge.fb_in22k_ft_in1k,224,86.970,13.030,98.204,1.796,350.20,0.875,bicubic\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,86.934,13.066,98.106,1.894,196.74,0.900,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,86.916,13.084,98.238,1.762,101.66,1.000,bicubic\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,86.908,13.092,98.004,1.996,116.14,0.950,bicubic\nconvformer_m36.sail_in22k_ft_in1k_384,384,86.868,13.132,98.122,1.878,57.05,1.000,bicubic\ncaformer_s36.sail_in22k_ft_in1k_384,384,86.860,13.140,98.220,1.780,39.30,1.000,bicubic\ntf_efficientnet_b7.ns_jft_in1k,600,86.850,13.150,98.080,1.920,66.35,0.949,bicubic\nhiera_huge_224.mae_in1k_ft_in1k,224,86.840,13.160,98.008,1.992,672.78,0.900,bicubic\nregnety_320.swag_ft_in1k,384,86.830,13.170,98.362,1.638,145.05,1.000,bicubic\nconvnext_base.fb_in22k_ft_in1k_384,384,86.826,13.174,98.244,1.756,88.59,1.000,bicubic\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,86.814,13.186,98.136,1.864,86.74,1.000,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,480,86.802,13.198,98.124,1.876,118.52,1.000,bicubic\nvolo_d4_448.sail_in1k,448,86.788,13.212,97.880,2.120,193.41,1.150,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,512,86.754,13.246,98.014,1.986,208.12,1.000,bicubic\nconvnextv2_base.fcmae_ft_in22k_in1k,224,86.752,13.248,98.020,1.980,88.72,0.875,bicubic\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,86.742,13.258,98.110,1.890,86.88,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,86.722,13.278,98.184,1.816,93.59,1.000,bicubic\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,86.720,13.280,98.024,1.976,134.13,0.950,bicubic\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,86.642,13.358,98.150,1.850,101.66,1.000,bicubic\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,86.640,13.360,98.014,1.986,116.09,0.950,bicubic\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,86.638,13.362,98.008,1.992,86.86,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,86.618,13.382,98.010,1.990,60.60,1.000,bicubic\nconvnextv2_huge.fcmae_ft_in1k,288,86.618,13.382,97.968,2.032,660.29,1.000,bicubic\nconvnext_large.fb_in22k_ft_in1k,224,86.612,13.388,98.036,1.964,197.77,0.875,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,86.608,13.392,97.934,2.066,64.11,0.950,bicubic\ncaformer_m36.sail_in22k_ft_in1k,224,86.606,13.394,98.038,1.962,56.20,1.000,bicubic\nmaxvit_base_tf_512.in1k,512,86.598,13.402,97.924,2.076,119.88,1.000,bicubic\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,86.582,13.418,97.896,2.104,73.87,0.950,bicubic\nnextvit_large.bd_ssld_6m_in1k_384,384,86.544,13.456,98.126,1.874,57.87,1.000,bicubic\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,86.538,13.462,97.894,2.106,73.88,0.950,bicubic\nmaxvit_large_tf_512.in1k,512,86.534,13.466,97.884,2.116,212.33,1.000,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,86.530,13.470,98.094,1.906,93.59,0.950,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,86.524,13.476,98.034,1.966,93.59,1.000,bicubic\nvolo_d3_448.sail_in1k,448,86.514,13.486,97.710,2.290,86.63,1.000,bicubic\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,86.494,13.506,98.056,1.944,86.53,0.900,bicubic\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,86.490,13.510,97.960,2.040,88.59,1.000,bicubic\ncait_m48_448.fb_dist_in1k,448,86.480,13.520,97.748,2.252,356.46,1.000,bicubic\ntf_efficientnet_b6.ns_jft_in1k,528,86.476,13.524,97.864,2.136,43.04,0.942,bicubic\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,86.444,13.556,97.884,2.116,21.27,1.000,bicubic\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,86.442,13.558,98.068,1.932,87.90,1.000,bicubic\ncaformer_b36.sail_in1k_384,384,86.400,13.600,97.824,2.176,98.75,1.000,bicubic\nefficientvit_l3.r384_in1k,384,86.396,13.604,97.640,2.360,246.04,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,288,86.380,13.620,97.942,2.058,75.26,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,86.378,13.622,97.976,2.024,88.59,1.000,bicubic\nconvformer_s36.sail_in22k_ft_in1k_384,384,86.374,13.626,97.980,2.020,40.01,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k_384,384,86.372,13.628,98.040,1.960,44.82,1.000,bicubic\ndm_nfnet_f6.dm_in1k,576,86.364,13.636,97.900,2.100,438.36,0.956,bicubic\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,86.320,13.680,97.890,2.110,196.53,0.900,bicubic\nconvnext_base.fb_in22k_ft_in1k,288,86.302,13.698,98.088,1.912,88.59,1.000,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,86.300,13.700,97.942,2.058,75.26,1.000,bicubic\nmaxvit_base_tf_384.in1k,384,86.290,13.710,97.802,2.198,119.65,1.000,bicubic\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,86.270,13.730,97.906,2.094,87.92,0.900,bicubic\nconvnextv2_huge.fcmae_ft_in1k,224,86.260,13.740,97.752,2.248,660.29,0.875,bicubic\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,86.252,13.748,97.840,2.160,86.58,0.900,bicubic\nmaxvit_large_tf_384.in1k,384,86.242,13.758,97.684,2.316,212.03,1.000,bicubic\nefficientvit_l3.r320_in1k,320,86.230,13.770,97.476,2.524,246.04,1.000,bicubic\nvit_base_patch16_clip_384.openai_ft_in1k,384,86.210,13.790,97.894,2.106,86.86,1.000,bicubic\nhgnetv2_b6.ssld_stage2_ft_in1k,224,86.206,13.794,97.810,2.190,75.26,0.965,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,86.204,13.796,97.762,2.238,86.57,0.950,bicubic\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,86.198,13.802,97.888,2.112,64.11,0.950,bicubic\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,86.188,13.812,97.928,2.072,329.09,1.000,bicubic\nconvnext_small.in12k_ft_in1k_384,384,86.186,13.814,97.920,2.080,50.22,1.000,bicubic\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,86.184,13.816,97.674,2.326,88.59,1.000,bicubic\ncaformer_m36.sail_in1k_384,384,86.164,13.836,97.812,2.188,56.20,1.000,bicubic\ndm_nfnet_f6.dm_in1k,448,86.156,13.844,97.762,2.238,438.36,0.956,bicubic\nconvformer_m36.sail_in22k_ft_in1k,224,86.152,13.848,97.852,2.148,57.05,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,288,86.132,13.868,97.820,2.180,197.96,1.000,bicubic\nmaxvit_small_tf_512.in1k,512,86.102,13.898,97.758,2.242,69.13,1.000,bicubic\ndm_nfnet_f5.dm_in1k,544,86.102,13.898,97.688,2.312,377.21,0.954,bicubic\ntf_efficientnet_b5.ns_jft_in1k,456,86.096,13.904,97.744,2.256,30.39,0.934,bicubic\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,86.088,13.912,97.716,2.284,21.23,1.000,bicubic\nvolo_d5_224.sail_in1k,224,86.076,13.924,97.574,2.426,295.46,0.960,bicubic\ncait_m36_384.fb_dist_in1k,384,86.060,13.940,97.732,2.268,271.22,1.000,bicubic\nvolo_d2_384.sail_in1k,384,86.054,13.946,97.582,2.418,58.87,1.000,bicubic\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,86.052,13.948,97.806,2.194,75.26,0.965,bicubic\nhiera_large_224.mae_in1k_ft_in1k,224,86.046,13.954,97.646,2.354,213.74,0.900,bicubic\nregnety_160.sw_in12k_ft_in1k,288,86.028,13.972,97.836,2.164,83.59,1.000,bicubic\nxcit_large_24_p8_384.fb_dist_in1k,384,86.024,13.976,97.690,2.310,188.93,1.000,bicubic\nregnety_160.swag_ft_in1k,384,86.020,13.980,98.046,1.954,83.59,1.000,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,480,86.014,13.986,97.956,2.044,54.14,1.000,bicubic\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,86.010,13.990,97.998,2.002,86.86,1.000,bicubic\nregnety_160.lion_in12k_ft_in1k,288,86.004,13.996,97.834,2.166,83.59,1.000,bicubic\nefficientvit_l2.r384_in1k,384,85.984,14.016,97.510,2.490,63.71,1.000,bicubic\nnextvit_small.bd_ssld_6m_in1k_384,384,85.966,14.034,97.904,2.096,31.76,1.000,bicubic\nregnety_1280.swag_lc_in1k,224,85.964,14.036,97.852,2.148,644.81,0.965,bicubic\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,85.956,14.044,97.824,2.176,93.59,0.875,bicubic\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,85.954,14.046,97.724,2.276,86.57,0.950,bicubic\nefficientvit_l3.r256_in1k,256,85.952,14.048,97.340,2.660,246.04,1.000,bicubic\nefficientnet_b5.sw_in12k_ft_in1k,448,85.894,14.106,97.742,2.258,30.39,1.000,bicubic\nvolo_d4_224.sail_in1k,224,85.868,14.132,97.464,2.536,192.96,0.960,bicubic\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,85.856,14.144,97.822,2.178,304.33,0.900,bicubic\ntf_efficientnetv2_l.in21k_ft_in1k,384,85.846,14.154,97.766,2.234,118.52,1.000,bicubic\nrdnet_large.nv_in1k_ft_in1k_384,384,85.844,14.156,97.680,2.320,186.27,1.000,bicubic\nxcit_medium_24_p8_384.fb_dist_in1k,384,85.842,14.158,97.602,2.398,84.32,1.000,bicubic\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,85.838,14.162,97.776,2.224,86.58,0.900,bicubic\ndm_nfnet_f4.dm_in1k,512,85.838,14.162,97.652,2.348,316.07,0.951,bicubic\ndeit3_large_patch16_384.fb_in1k,384,85.820,14.180,97.602,2.398,304.76,1.000,bicubic\nconvnext_base.fb_in22k_ft_in1k,224,85.814,14.186,97.856,2.144,88.59,0.875,bicubic\nefficientvit_l3.r224_in1k,224,85.810,14.190,97.210,2.790,246.04,1.000,bicubic\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,85.792,14.208,97.634,2.366,88.34,1.000,bicubic\ncaformer_s36.sail_in22k_ft_in1k,224,85.780,14.220,97.832,2.168,39.30,1.000,bicubic\nxcit_large_24_p16_384.fb_dist_in1k,384,85.778,14.222,97.530,2.470,189.10,1.000,bicubic\neva02_small_patch14_336.mim_in22k_ft_in1k,336,85.770,14.230,97.616,2.384,22.13,1.000,bicubic\nconvformer_b36.sail_in1k_384,384,85.770,14.230,97.522,2.478,99.88,1.000,bicubic\ncaformer_s36.sail_in1k_384,384,85.762,14.238,97.666,2.334,39.30,1.000,bicubic\nconvnextv2_large.fcmae_ft_in1k,224,85.762,14.238,97.574,2.426,197.96,0.875,bicubic\nconvnext_small.fb_in22k_ft_in1k_384,384,85.752,14.248,97.878,2.122,50.22,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,85.734,14.266,97.610,2.390,60.40,0.950,bicubic\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,85.728,14.272,97.746,2.254,86.59,1.000,bicubic\ndm_nfnet_f5.dm_in1k,416,85.712,14.288,97.550,2.450,377.21,0.954,bicubic\ndm_nfnet_f3.dm_in1k,416,85.676,14.324,97.576,2.424,254.92,0.940,bicubic\ntf_efficientnetv2_l.in1k,480,85.660,14.340,97.472,2.528,118.52,1.000,bicubic\nmaxvit_tiny_tf_512.in1k,512,85.658,14.342,97.592,2.408,31.05,1.000,bicubic\nflexivit_large.1200ep_in1k,240,85.646,14.354,97.546,2.454,304.36,0.950,bicubic\nefficientvit_l2.r288_in1k,288,85.614,14.386,97.364,2.636,63.71,1.000,bicubic\nregnety_160.lion_in12k_ft_in1k,224,85.602,14.398,97.668,2.332,83.59,0.950,bicubic\nregnety_160.sw_in12k_ft_in1k,224,85.596,14.404,97.668,2.332,83.59,0.950,bicubic\nbeitv2_base_patch16_224.in1k_ft_in1k,224,85.596,14.404,97.514,2.486,86.53,0.900,bicubic\nxcit_small_24_p8_384.fb_dist_in1k,384,85.580,14.420,97.572,2.428,47.63,1.000,bicubic\nconvformer_m36.sail_in1k_384,384,85.580,14.420,97.548,2.452,57.05,1.000,bicubic\ntf_efficientnetv2_xl.in21k_ft_in1k,384,85.562,14.438,97.456,2.544,208.12,1.000,bicubic\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,85.556,14.444,97.638,2.362,39.03,0.950,bicubic\nmaxvit_small_tf_384.in1k,384,85.534,14.466,97.466,2.534,69.02,1.000,bicubic\nflexivit_large.600ep_in1k,240,85.530,14.470,97.494,2.506,304.36,0.950,bicubic\nnextvit_large.bd_ssld_6m_in1k,224,85.514,14.486,97.678,2.322,57.87,0.950,bicubic\ndm_nfnet_f4.dm_in1k,384,85.514,14.486,97.488,2.512,316.07,0.951,bicubic\ncaformer_b36.sail_in1k,224,85.502,14.498,97.316,2.684,98.75,1.000,bicubic\nconvnextv2_base.fcmae_ft_in1k,288,85.490,14.510,97.380,2.620,88.72,1.000,bicubic\nhgnet_base.ssld_in1k,288,85.488,14.512,97.630,2.370,71.58,1.000,bicubic\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,85.488,14.512,97.586,2.414,86.57,1.000,bicubic\ncait_s36_384.fb_dist_in1k,384,85.458,14.542,97.480,2.520,68.37,1.000,bicubic\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,85.454,14.546,97.620,2.380,86.66,1.000,bicubic\ncaformer_s18.sail_in22k_ft_in1k_384,384,85.442,14.558,97.718,2.282,26.34,1.000,bicubic\nvolo_d3_224.sail_in1k,224,85.440,14.560,97.274,2.726,86.33,0.960,bicubic\ndeit_base_distilled_patch16_384.fb_in1k,384,85.438,14.562,97.332,2.668,87.63,1.000,bicubic\nxcit_medium_24_p16_384.fb_dist_in1k,384,85.434,14.566,97.408,2.592,84.40,1.000,bicubic\nregnety_120.sw_in12k_ft_in1k,288,85.428,14.572,97.576,2.424,51.82,1.000,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,85.426,14.574,97.478,2.522,60.40,0.950,bicubic\nconvformer_s36.sail_in22k_ft_in1k,224,85.420,14.580,97.574,2.426,40.01,1.000,bicubic\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,85.398,14.602,97.664,2.336,88.30,1.000,bicubic\nxcit_large_24_p8_224.fb_dist_in1k,224,85.396,14.604,97.418,2.582,188.93,1.000,bicubic\nbeit3_base_patch16_224.in22k_ft_in1k,224,85.390,14.610,97.640,2.360,86.66,1.000,bicubic\nconvformer_s36.sail_in1k_384,384,85.380,14.620,97.468,2.532,40.01,1.000,bicubic\nefficientvit_l2.r256_in1k,256,85.380,14.620,97.256,2.744,63.71,1.000,bicubic\ntf_efficientnet_b8.ap_in1k,672,85.358,14.642,97.298,2.702,87.41,0.954,bicubic\ntf_efficientnet_b8.ra_in1k,672,85.344,14.656,97.392,2.608,87.41,0.954,bicubic\nconvnext_small.in12k_ft_in1k,288,85.324,14.676,97.554,2.446,50.22,1.000,bicubic\nflexivit_large.300ep_in1k,240,85.284,14.716,97.404,2.596,304.36,0.950,bicubic\nvit_base_patch16_clip_224.openai_ft_in1k,224,85.282,14.718,97.448,2.552,86.57,0.900,bicubic\nconvnext_small.fb_in22k_ft_in1k,288,85.276,14.724,97.678,2.322,50.22,1.000,bicubic\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,85.276,14.724,97.566,2.434,87.77,0.900,bicubic\nmvitv2_large.fb_in1k,224,85.264,14.736,97.188,2.812,217.99,0.900,bicubic\nvolo_d1_384.sail_in1k,384,85.262,14.738,97.218,2.782,26.78,1.000,bicubic\ncaformer_m36.sail_in1k,224,85.242,14.758,97.226,2.774,56.20,1.000,bicubic\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,85.238,14.762,97.654,2.346,86.53,0.900,bicubic\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,85.218,14.782,97.398,2.602,88.30,0.950,bicubic\ntf_efficientnetv2_m.in1k,480,85.208,14.792,97.358,2.642,54.14,1.000,bicubic\ndeit3_huge_patch14_224.fb_in1k,224,85.204,14.796,97.364,2.636,632.13,0.900,bicubic\nvolo_d2_224.sail_in1k,224,85.204,14.796,97.186,2.814,58.68,0.960,bicubic\ninception_next_base.sail_in1k_384,384,85.202,14.798,97.424,2.576,86.67,1.000,bicubic\ntf_efficientnetv2_l.in1k,384,85.202,14.798,97.274,2.726,118.52,1.000,bicubic\nnextvit_base.bd_ssld_6m_in1k,224,85.186,14.814,97.600,2.400,44.82,0.950,bicubic\ndm_nfnet_f2.dm_in1k,352,85.184,14.816,97.340,2.660,193.78,0.920,bicubic\nconvnext_small.in12k_ft_in1k,224,85.164,14.836,97.502,2.498,50.22,0.950,bicubic\nhgnetv2_b5.ssld_stage2_ft_in1k,288,85.158,14.842,97.588,2.412,39.57,1.000,bicubic\ntf_efficientnet_b4.ns_jft_in1k,380,85.156,14.844,97.482,2.518,19.34,0.922,bicubic\nconvnext_tiny.in12k_ft_in1k_384,384,85.154,14.846,97.634,2.366,28.59,1.000,bicubic\nregnety_2560.seer_ft_in1k,384,85.144,14.856,97.444,2.556,\"1,282.60\",1.000,bicubic\nhiera_base_plus_224.mae_in1k_ft_in1k,224,85.136,14.864,97.162,2.838,69.90,0.900,bicubic\ntf_efficientnet_b7.ap_in1k,600,85.132,14.868,97.256,2.744,66.35,0.949,bicubic\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,85.108,14.892,97.530,2.470,86.57,0.900,bicubic\nmaxvit_tiny_tf_384.in1k,384,85.106,14.894,97.382,2.618,30.98,1.000,bicubic\nxcit_small_24_p16_384.fb_dist_in1k,384,85.102,14.898,97.318,2.682,47.67,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,85.098,14.902,97.632,2.368,28.64,1.000,bicubic\ndm_nfnet_f3.dm_in1k,320,85.092,14.908,97.386,2.614,254.92,0.940,bicubic\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,85.090,14.910,97.442,2.558,468.53,0.875,bilinear\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,85.088,14.912,97.354,2.646,21.20,0.950,bicubic\ndeit3_base_patch16_384.fb_in1k,384,85.088,14.912,97.252,2.748,86.88,1.000,bicubic\nxcit_medium_24_p8_224.fb_dist_in1k,224,85.078,14.922,97.274,2.726,84.32,1.000,bicubic\nxcit_small_12_p8_384.fb_dist_in1k,384,85.068,14.932,97.264,2.736,26.21,1.000,bicubic\ncait_s24_384.fb_dist_in1k,384,85.048,14.952,97.350,2.650,47.06,1.000,bicubic\nefficientvit_l2.r224_in1k,224,85.046,14.954,97.104,2.896,63.71,1.000,bicubic\ncaformer_s18.sail_in1k_384,384,85.036,14.964,97.354,2.646,26.34,1.000,bicubic\nregnetz_e8.ra3_in1k,320,85.016,14.984,97.278,2.722,57.70,1.000,bicubic\nresnetrs420.tf_in1k,416,85.010,14.990,97.110,2.890,191.89,1.000,bicubic\nconvformer_s18.sail_in22k_ft_in1k_384,384,85.004,14.996,97.572,2.428,26.77,1.000,bicubic\nregnety_120.sw_in12k_ft_in1k,224,84.994,15.006,97.412,2.588,51.82,0.950,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,84.990,15.010,97.294,2.706,32.59,1.000,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,288,84.988,15.012,97.334,2.666,86.48,1.000,bicubic\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,84.988,15.012,97.296,2.704,98.95,1.000,bicubic\necaresnet269d.ra2_in1k,352,84.968,15.032,97.224,2.776,102.09,1.000,bicubic\nmambaout_base_wide_rw.sw_e500_in1k,288,84.966,15.034,97.222,2.778,94.45,1.000,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,84.956,15.044,97.384,2.616,38.88,0.950,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,576,84.942,15.058,97.390,2.610,45.88,1.000,bicubic\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,84.940,15.060,96.968,3.032,41.72,0.950,bicubic\nmaxvit_large_tf_224.in1k,224,84.938,15.062,96.980,3.020,211.79,0.950,bicubic\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,84.936,15.064,97.456,2.544,936.53,1.000,bilinear\nxcit_large_24_p16_224.fb_dist_in1k,224,84.936,15.064,97.128,2.872,189.10,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,84.930,15.070,97.494,2.506,39.57,1.000,bicubic\nnextvit_large.bd_in1k_384,384,84.930,15.070,97.290,2.710,57.87,1.000,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,576,84.928,15.072,97.312,2.688,33.44,1.000,bicubic\ntf_efficientnet_b7.ra_in1k,600,84.926,15.074,97.220,2.780,66.35,0.949,bicubic\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,84.912,15.088,97.260,2.740,34.36,0.950,bicubic\nhgnet_base.ssld_in1k,224,84.908,15.092,97.342,2.658,71.58,0.965,bicubic\ncoat_lite_medium_384.in1k,384,84.890,15.110,97.376,2.624,44.57,1.000,bicubic\nconvnextv2_base.fcmae_ft_in1k,224,84.890,15.110,97.088,2.912,88.72,0.875,bicubic\nmaxvit_base_tf_224.in1k,224,84.884,15.116,97.002,2.998,119.47,0.950,bicubic\nxcit_small_24_p8_224.fb_dist_in1k,224,84.878,15.122,97.200,2.800,47.63,1.000,bicubic\nnextvit_small.bd_ssld_6m_in1k,224,84.872,15.128,97.372,2.628,31.76,0.950,bicubic\nconvnext_large.fb_in1k,288,84.862,15.138,97.210,2.790,197.77,1.000,bicubic\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,84.846,15.154,97.486,2.514,22.21,1.000,bicubic\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,84.846,15.154,97.118,2.882,304.40,0.900,bicubic\nmambaout_base_short_rw.sw_e500_in1k,288,84.842,15.158,97.286,2.714,88.83,1.000,bicubic\nvit_large_patch16_rope_mixed_224.naver_in1k,224,84.840,15.160,97.128,2.872,304.20,0.900,bicubic\nefficientnetv2_rw_m.agc_in1k,416,84.830,15.170,97.148,2.852,53.24,1.000,bicubic\nconvformer_b36.sail_in1k,224,84.826,15.174,96.948,3.052,99.88,1.000,bicubic\nhgnetv2_b5.ssld_stage2_ft_in1k,224,84.824,15.176,97.294,2.706,39.57,0.965,bicubic\nrdnet_large.nv_in1k,224,84.794,15.206,97.156,2.844,186.27,0.900,bicubic\ntf_efficientnet_b6.ap_in1k,528,84.794,15.206,97.138,2.862,43.04,0.942,bicubic\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,84.772,15.228,97.344,2.656,32.59,1.000,bicubic\ndeit3_large_patch16_224.fb_in1k,224,84.772,15.228,97.048,2.952,304.37,0.900,bicubic\ntf_efficientnetv2_m.in21k_ft_in1k,384,84.768,15.232,97.434,2.566,54.14,1.000,bicubic\nxcit_small_12_p16_384.fb_dist_in1k,384,84.734,15.266,97.136,2.864,26.25,1.000,bicubic\nmambaout_base.in1k,288,84.730,15.270,96.934,3.066,84.81,1.000,bicubic\necaresnet269d.ra2_in1k,320,84.724,15.276,97.180,2.820,102.09,0.950,bicubic\neca_nfnet_l2.ra3_in1k,384,84.710,15.290,97.256,2.744,56.72,1.000,bicubic\nresnetrs350.tf_in1k,384,84.710,15.290,96.992,3.008,163.96,1.000,bicubic\nnextvit_base.bd_in1k_384,384,84.706,15.294,97.220,2.780,44.82,1.000,bicubic\ndm_nfnet_f1.dm_in1k,320,84.700,15.300,97.178,2.822,132.63,0.910,bicubic\nflexivit_base.1200ep_in1k,240,84.678,15.322,96.980,3.020,86.59,0.950,bicubic\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,84.668,15.332,97.108,2.892,32.59,0.950,bicubic\nvit_large_patch16_rope_ape_224.naver_in1k,224,84.660,15.340,97.146,2.854,304.37,0.900,bicubic\nregnety_320.swag_lc_in1k,224,84.654,15.346,97.492,2.508,145.05,0.965,bicubic\ndavit_base.msft_in1k,224,84.652,15.348,97.020,2.980,87.95,0.950,bicubic\nvit_large_patch16_rope_224.naver_in1k,224,84.646,15.354,97.120,2.880,304.17,0.900,bicubic\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,84.634,15.366,97.062,2.938,66.01,0.950,bicubic\nswinv2_base_window16_256.ms_in1k,256,84.608,15.392,97.064,2.936,87.92,0.900,bicubic\nfastvit_ma36.apple_dist_in1k,256,84.608,15.392,96.996,3.004,44.07,0.950,bicubic\ncoatnet_rmlp_2_rw_224.sw_in1k,224,84.608,15.392,96.744,3.256,73.88,0.950,bicubic\nmambaout_small_rw.sw_e450_in1k,288,84.604,15.396,97.096,2.904,48.50,1.000,bicubic\nconvnext_small.fb_in22k_ft_in1k,224,84.582,15.418,97.414,2.586,50.22,0.875,bicubic\nrexnetr_300.sw_in12k_ft_in1k,288,84.582,15.418,97.278,2.722,34.81,1.000,bicubic\nseresnextaa101d_32x8d.ah_in1k,288,84.580,15.420,97.062,2.938,93.59,1.000,bicubic\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,84.564,15.436,97.108,2.892,34.36,0.950,bicubic\nflexivit_base.600ep_in1k,240,84.564,15.436,96.910,3.090,86.59,0.950,bicubic\ntf_efficientnetv2_m.in1k,384,84.554,15.446,97.058,2.942,54.14,1.000,bicubic\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,84.550,15.450,97.180,2.820,38.85,1.000,bicubic\nresnest269e.in1k,416,84.542,15.458,96.988,3.012,110.93,0.928,bicubic\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,84.534,15.466,97.300,2.700,86.57,0.900,bicubic\nhiera_base_224.mae_in1k_ft_in1k,224,84.528,15.472,97.022,2.978,51.52,0.900,bicubic\ncaformer_s36.sail_in1k,224,84.528,15.472,97.010,2.990,39.30,1.000,bicubic\nmambaout_small.in1k,288,84.512,15.488,96.970,3.030,48.49,1.000,bicubic\nregnetz_040_h.ra3_in1k,320,84.498,15.502,97.006,2.994,28.94,1.000,bicubic\nmaxvit_rmlp_small_rw_224.sw_in1k,224,84.488,15.512,96.772,3.228,64.90,0.900,bicubic\nconvformer_m36.sail_in1k,224,84.486,15.514,96.866,3.134,57.05,1.000,bicubic\nhrnet_w48_ssld.paddle_in1k,288,84.484,15.516,97.224,2.776,77.47,1.000,bilinear\nswin_base_patch4_window12_384.ms_in1k,384,84.478,15.522,96.886,3.114,87.90,1.000,bicubic\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,84.468,15.532,97.432,2.568,236.34,1.000,bilinear\nconvnext_base.fb_in1k,288,84.466,15.534,96.982,3.018,88.59,1.000,bicubic\nefficientvit_l1.r224_in1k,224,84.464,15.536,96.860,3.140,52.65,1.000,bicubic\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,84.456,15.544,97.218,2.782,39.57,0.965,bicubic\ngcvit_base.in1k,224,84.456,15.544,96.848,3.152,90.32,0.875,bicubic\nmaxvit_small_tf_224.in1k,224,84.452,15.548,96.824,3.176,68.93,0.950,bicubic\nconvnext_tiny.in12k_ft_in1k,288,84.448,15.552,97.328,2.672,28.59,1.000,bicubic\nresnetrs200.tf_in1k,320,84.448,15.552,97.074,2.926,93.21,1.000,bicubic\nmambaout_base_wide_rw.sw_e500_in1k,224,84.448,15.552,96.872,3.128,94.45,0.950,bicubic\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,84.442,15.558,97.214,2.786,38.86,0.950,bicubic\nresnetrs270.tf_in1k,352,84.438,15.562,96.972,3.028,129.86,1.000,bicubic\nmambaout_base_tall_rw.sw_e500_in1k,224,84.436,15.564,96.956,3.044,86.48,0.950,bicubic\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,84.434,15.566,97.152,2.848,328.99,0.900,bicubic\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,84.430,15.570,97.380,2.620,387.93,1.000,bilinear\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,84.428,15.572,97.260,2.740,28.64,1.000,bicubic\nregnety_1280.seer_ft_in1k,384,84.426,15.574,97.086,2.914,644.81,1.000,bicubic\ntf_efficientnet_b7.aa_in1k,600,84.424,15.576,96.908,3.092,66.35,0.949,bicubic\nmvitv2_base.fb_in1k,224,84.424,15.576,96.864,3.136,51.47,0.900,bicubic\nefficientnet_h_b5.sw_r448_e450_in1k,448,84.418,15.582,97.140,2.860,45.88,1.000,bicubic\nregnetz_e8.ra3_in1k,256,84.418,15.582,96.988,3.012,57.70,0.940,bicubic\nresmlp_big_24_224.fb_in22k_ft_in1k,224,84.414,15.586,97.108,2.892,129.14,0.875,bicubic\nflexivit_base.300ep_in1k,240,84.402,15.598,96.884,3.116,86.59,0.950,bicubic\nconvformer_s18.sail_in1k_384,384,84.392,15.608,97.110,2.890,26.77,1.000,bicubic\nseresnext101d_32x8d.ah_in1k,288,84.380,15.620,96.918,3.082,93.59,1.000,bicubic\nxcit_large_24_p8_224.fb_in1k,224,84.380,15.620,96.664,3.336,188.93,1.000,bicubic\nhgnet_small.ssld_in1k,288,84.368,15.632,97.130,2.870,24.36,1.000,bicubic\nrdnet_base.nv_in1k,224,84.366,15.634,96.878,3.122,87.45,0.900,bicubic\nmambaout_base_short_rw.sw_e500_in1k,224,84.364,15.636,96.944,3.056,88.83,0.950,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,84.352,15.648,96.888,3.112,37.76,1.000,bicubic\nseresnet152d.ra2_in1k,320,84.344,15.656,97.044,2.956,66.84,1.000,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,384,84.332,15.668,97.274,2.726,21.46,1.000,bicubic\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,84.316,15.684,96.810,3.190,63.95,0.950,bicubic\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,84.314,15.686,97.102,2.898,32.59,0.950,bicubic\nresnetrs350.tf_in1k,288,84.304,15.696,96.942,3.058,163.96,1.000,bicubic\nconvnext_large.fb_in1k,224,84.304,15.696,96.892,3.108,197.77,0.875,bicubic\nefficientnetv2_rw_m.agc_in1k,320,84.296,15.704,96.872,3.128,53.24,1.000,bicubic\nxcit_medium_24_p16_224.fb_dist_in1k,224,84.292,15.708,96.936,3.064,84.40,1.000,bicubic\nvit_base_patch16_224_miil.in21k_ft_in1k,224,84.278,15.722,96.798,3.202,86.54,0.875,bilinear\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,84.276,15.724,97.188,2.812,88.79,0.875,bilinear\nmobilenetv4_hybrid_large.e600_r384_in1k,448,84.264,15.736,96.956,3.044,37.76,1.000,bicubic\ntf_efficientnet_b5.ap_in1k,456,84.258,15.742,96.984,3.016,30.39,0.934,bicubic\ndavit_small.msft_in1k,224,84.252,15.748,96.942,3.058,49.75,0.950,bicubic\nxcit_small_12_p8_224.fb_dist_in1k,224,84.252,15.748,96.874,3.126,26.21,1.000,bicubic\nswinv2_base_window8_256.ms_in1k,256,84.246,15.754,96.902,3.098,87.92,0.900,bicubic\nregnetz_040.ra3_in1k,320,84.242,15.758,96.936,3.064,27.12,1.000,bicubic\nresnetrs420.tf_in1k,320,84.240,15.760,96.862,3.138,191.89,1.000,bicubic\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,84.226,15.774,96.782,3.218,29.15,0.950,bicubic\nswinv2_small_window16_256.ms_in1k,256,84.216,15.784,96.874,3.126,49.73,0.900,bicubic\nvit_base_patch16_384.orig_in21k_ft_in1k,384,84.212,15.788,97.210,2.790,86.86,1.000,bicubic\nefficientnet_x_b5.sw_r448_e450_in1k,448,84.202,15.798,96.972,3.028,33.44,1.000,bicubic\ncrossvit_18_dagger_408.in1k,408,84.198,15.802,96.834,3.166,44.61,1.000,bicubic\nmambaout_base.in1k,224,84.194,15.806,96.668,3.332,84.81,1.000,bicubic\nconvnext_tiny.in12k_ft_in1k,224,84.192,15.808,97.132,2.868,28.59,0.950,bicubic\nseresnext101_32x8d.ah_in1k,288,84.192,15.808,96.860,3.140,93.57,1.000,bicubic\neca_nfnet_l2.ra3_in1k,320,84.170,15.830,97.016,2.984,56.72,0.900,bicubic\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,84.166,15.834,97.192,2.808,194.03,0.875,bilinear\nefficientvit_b3.r288_in1k,288,84.162,15.838,96.732,3.268,48.65,1.000,bicubic\nvolo_d1_224.sail_in1k,224,84.148,15.852,96.772,3.228,26.63,0.960,bicubic\nregnetz_d8_evos.ch_in1k,320,84.138,15.862,97.018,2.982,23.46,1.000,bicubic\ntf_efficientnet_b6.aa_in1k,528,84.124,15.876,96.890,3.110,43.04,0.942,bicubic\nresnetaa101d.sw_in12k_ft_in1k,288,84.114,15.886,97.114,2.886,44.57,1.000,bicubic\ndm_nfnet_f2.dm_in1k,256,84.100,15.900,96.910,3.090,193.78,0.920,bicubic\nconvnext_tiny.fb_in22k_ft_in1k_384,384,84.098,15.902,97.138,2.862,28.59,1.000,bicubic\ninception_next_base.sail_in1k,224,84.098,15.902,96.782,3.218,86.67,0.950,bicubic\nmambaout_small.in1k,224,84.094,15.906,96.626,3.374,48.49,1.000,bicubic\ncaformer_s18.sail_in22k_ft_in1k,224,84.092,15.908,97.182,2.818,26.34,1.000,bicubic\nedgenext_base.in21k_ft_in1k,320,84.076,15.924,97.208,2.792,18.51,1.000,bicubic\nconvformer_s36.sail_in1k,224,84.068,15.932,96.744,3.256,40.01,1.000,bicubic\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,84.066,15.934,97.310,2.690,36.47,1.000,bicubic\ncait_xs24_384.fb_dist_in1k,384,84.064,15.936,96.888,3.112,26.67,1.000,bicubic\neca_nfnet_l1.ra2_in1k,320,84.062,15.938,97.026,2.974,41.41,1.000,bicubic\ntf_efficientnet_b3.ns_jft_in1k,300,84.054,15.946,96.926,3.074,12.23,0.904,bicubic\nregnetz_d8.ra3_in1k,320,84.044,15.956,97.004,2.996,23.37,1.000,bicubic\nmambaout_small_rw.sw_e450_in1k,224,84.040,15.960,96.742,3.258,48.50,1.000,bicubic\nrexnetr_300.sw_in12k_ft_in1k,224,84.036,15.964,96.994,3.006,34.81,0.950,bicubic\nnextvit_small.bd_in1k_384,384,84.036,15.964,96.992,3.008,31.76,1.000,bicubic\nregnetz_d32.ra3_in1k,320,84.028,15.972,96.872,3.128,27.58,0.950,bicubic\nfastvit_sa36.apple_dist_in1k,256,84.028,15.972,96.850,3.150,31.53,0.900,bicubic\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,83.996,16.004,96.714,3.286,37.76,0.950,bicubic\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,83.982,16.018,97.130,2.870,217.32,1.000,bilinear\nresnet200d.ra2_in1k,320,83.976,16.024,96.820,3.180,64.69,1.000,bicubic\nedgenext_base.usi_in1k,320,83.970,16.030,96.770,3.230,18.51,1.000,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,288,83.952,16.048,97.066,2.934,19.80,1.000,bicubic\nregnety_080.ra3_in1k,288,83.940,16.060,96.884,3.116,39.18,1.000,bicubic\nregnety_640.seer_ft_in1k,384,83.924,16.076,96.926,3.074,281.38,1.000,bicubic\nswin_s3_base_224.ms_in1k,224,83.920,16.080,96.650,3.350,71.13,0.900,bicubic\ntresnet_v2_l.miil_in21k_ft_in1k,224,83.910,16.090,96.492,3.508,46.17,0.875,bilinear\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,83.906,16.094,96.678,3.322,60.23,0.950,bicubic\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,83.904,16.096,96.964,3.036,28.64,0.875,bicubic\ngcvit_small.in1k,224,83.904,16.096,96.662,3.338,51.09,0.875,bicubic\ntf_efficientnetv2_s.in1k,384,83.900,16.100,96.702,3.298,21.46,1.000,bicubic\nseresnextaa101d_32x8d.ah_in1k,224,83.900,16.100,96.648,3.352,93.59,0.950,bicubic\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,83.898,16.102,96.752,3.248,86.59,0.900,bicubic\nhiera_small_224.mae_in1k_ft_in1k,224,83.888,16.112,96.668,3.332,35.01,0.900,bicubic\nresnetrs200.tf_in1k,256,83.884,16.116,96.762,3.238,93.21,1.000,bicubic\nfastvit_ma36.apple_in1k,256,83.884,16.116,96.756,3.244,44.07,0.950,bicubic\nresnest200e.in1k,320,83.880,16.120,96.880,3.120,70.20,0.909,bicubic\nxcit_small_24_p16_224.fb_dist_in1k,224,83.880,16.120,96.734,3.266,47.67,1.000,bicubic\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,83.872,16.128,96.676,3.324,86.43,0.950,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,83.858,16.142,97.130,2.870,236.34,1.000,bicubic\nxcit_small_24_p8_224.fb_in1k,224,83.854,16.146,96.636,3.364,47.63,1.000,bicubic\nswinv2_small_window8_256.ms_in1k,256,83.844,16.156,96.650,3.350,49.73,0.900,bicubic\ncrossvit_15_dagger_408.in1k,408,83.842,16.158,96.786,3.214,28.50,1.000,bicubic\nconvnext_base.fb_in1k,224,83.838,16.162,96.746,3.254,88.59,0.875,bicubic\ntf_efficientnet_b5.ra_in1k,456,83.834,16.166,96.764,3.236,30.39,0.934,bicubic\nefficientnetv2_rw_s.ra2_in1k,384,83.830,16.170,96.712,3.288,23.94,1.000,bicubic\nfocalnet_base_lrf.ms_in1k,224,83.828,16.172,96.610,3.390,88.75,0.900,bicubic\nmobilenetv4_hybrid_large.e600_r384_in1k,384,83.826,16.174,96.748,3.252,37.76,0.950,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,83.826,16.174,96.720,3.280,32.59,1.000,bicubic\nhgnet_small.ssld_in1k,224,83.818,16.182,96.846,3.154,24.36,0.965,bicubic\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,83.814,16.186,97.106,2.894,22.20,1.000,bicubic\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,83.810,16.190,96.824,3.176,38.74,0.950,bicubic\nfocalnet_base_srf.ms_in1k,224,83.808,16.192,96.660,3.340,88.15,0.900,bicubic\nregnety_160.swag_lc_in1k,224,83.806,16.194,97.284,2.716,83.59,0.965,bicubic\nvit_base_patch16_rope_mixed_224.naver_in1k,224,83.806,16.194,96.722,3.278,86.44,0.900,bicubic\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,83.800,16.200,96.972,3.028,22.52,0.950,bicubic\ndeit3_base_patch16_224.fb_in1k,224,83.800,16.200,96.584,3.416,86.59,0.900,bicubic\nxcit_tiny_24_p8_384.fb_dist_in1k,384,83.784,16.216,96.716,3.284,12.11,1.000,bicubic\nefficientvit_b3.r256_in1k,256,83.782,16.218,96.498,3.502,48.65,1.000,bicubic\nvit_base_patch16_rope_ape_224.naver_in1k,224,83.780,16.220,96.610,3.390,86.59,0.900,bicubic\nswin_s3_small_224.ms_in1k,224,83.778,16.222,96.460,3.540,49.74,0.900,bicubic\npit_b_distilled_224.in1k,224,83.774,16.226,96.468,3.532,74.79,0.900,bicubic\nmvitv2_small.fb_in1k,224,83.772,16.228,96.558,3.442,34.87,0.900,bicubic\nconvformer_s18.sail_in22k_ft_in1k,224,83.768,16.232,97.046,2.954,26.77,1.000,bicubic\nregnetz_040_h.ra3_in1k,256,83.764,16.236,96.700,3.300,28.94,1.000,bicubic\npvt_v2_b5.in1k,224,83.760,16.240,96.634,3.366,81.96,0.900,bicubic\nregnetv_064.ra3_in1k,288,83.746,16.254,96.746,3.254,30.58,1.000,bicubic\nrepvit_m2_3.dist_450e_in1k,224,83.746,16.254,96.650,3.350,23.69,0.950,bicubic\nxcit_medium_24_p8_224.fb_in1k,224,83.744,16.256,96.394,3.606,84.32,1.000,bicubic\ntwins_svt_large.in1k,224,83.718,16.282,96.600,3.400,99.27,0.900,bicubic\nregnety_064.ra3_in1k,288,83.716,16.284,96.724,3.276,30.58,1.000,bicubic\nvit_base_patch16_rope_224.naver_in1k,224,83.714,16.286,96.674,3.326,86.43,0.900,bicubic\npvt_v2_b4.in1k,224,83.710,16.290,96.672,3.328,62.56,0.900,bicubic\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,83.710,16.290,96.616,3.384,60.40,0.950,bicubic\nresnetrs152.tf_in1k,320,83.710,16.290,96.614,3.386,86.62,1.000,bicubic\nconvnext_small.fb_in1k,288,83.704,16.296,96.816,3.184,50.22,1.000,bicubic\nregnety_160.deit_in1k,288,83.704,16.296,96.790,3.210,83.59,1.000,bicubic\nseresnet152d.ra2_in1k,256,83.702,16.298,96.794,3.206,66.84,0.950,bicubic\nseresnext101d_32x8d.ah_in1k,224,83.702,16.298,96.620,3.380,93.59,0.950,bicubic\nhgnetv2_b4.ssld_stage2_ft_in1k,224,83.700,16.300,96.788,3.212,19.80,0.965,bicubic\ntf_efficientnet_b5.aa_in1k,456,83.696,16.304,96.712,3.288,30.39,0.934,bicubic\nresnet152d.ra2_in1k,320,83.684,16.316,96.750,3.250,60.21,1.000,bicubic\nedgenext_base.usi_in1k,256,83.684,16.316,96.698,3.302,18.51,0.950,bicubic\ncaformer_s18.sail_in1k,224,83.664,16.336,96.526,3.474,26.34,1.000,bicubic\nrdnet_small.nv_in1k,224,83.658,16.342,96.704,3.296,50.44,0.900,bicubic\nhrnet_w48_ssld.paddle_in1k,224,83.656,16.344,96.848,3.152,77.47,0.950,bilinear\nnextvit_large.bd_in1k,224,83.654,16.346,96.682,3.318,57.87,0.950,bicubic\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,83.652,16.348,96.546,3.454,60.40,0.950,bicubic\nregnetz_040.ra3_in1k,256,83.644,16.356,96.682,3.318,27.12,1.000,bicubic\nefficientformerv2_l.snap_dist_in1k,224,83.642,16.358,96.546,3.454,26.32,0.950,bicubic\ncoatnet_1_rw_224.sw_in1k,224,83.620,16.380,96.382,3.618,41.72,0.950,bicubic\nseresnext101_32x8d.ah_in1k,224,83.618,16.382,96.412,3.588,93.57,0.950,bicubic\nswin_base_patch4_window7_224.ms_in1k,224,83.604,16.396,96.450,3.550,87.77,0.900,bicubic\ncoat_lite_medium.in1k,224,83.602,16.398,96.714,3.286,44.57,0.900,bicubic\nresnetrs270.tf_in1k,256,83.596,16.404,96.616,3.384,129.86,1.000,bicubic\nresmlp_big_24_224.fb_distilled_in1k,224,83.594,16.406,96.658,3.342,129.14,0.875,bicubic\nhgnetv2_b3.ssld_stage2_ft_in1k,288,83.588,16.412,96.812,3.188,16.29,1.000,bicubic\ninception_next_small.sail_in1k,224,83.584,16.416,96.598,3.402,49.37,0.875,bicubic\nnest_base_jx.goog_in1k,224,83.552,16.448,96.364,3.636,67.72,0.875,bicubic\nrepvgg_d2se.rvgg_in1k,320,83.550,16.450,96.666,3.334,133.33,1.000,bilinear\ncs3se_edgenet_x.c2ns_in1k,320,83.550,16.450,96.662,3.338,50.72,1.000,bicubic\nregnetz_d8.ra3_in1k,256,83.546,16.454,96.746,3.254,23.37,0.940,bicubic\nresnetaa101d.sw_in12k_ft_in1k,224,83.544,16.456,96.828,3.172,44.57,0.950,bicubic\nrepvit_m2_3.dist_300e_in1k,224,83.524,16.476,96.532,3.468,23.69,0.950,bicubic\nmaxvit_tiny_rw_224.sw_in1k,224,83.522,16.478,96.488,3.512,29.06,0.950,bicubic\nfasternet_l.in1k,224,83.516,16.484,96.658,3.342,93.47,1.000,bicubic\nfocalnet_small_lrf.ms_in1k,224,83.508,16.492,96.586,3.414,50.34,0.900,bicubic\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,83.508,16.492,96.370,3.630,86.63,1.000,bicubic\nfastvit_sa36.apple_in1k,256,83.502,16.498,96.632,3.368,31.53,0.900,bicubic\nnextvit_base.bd_in1k,224,83.498,16.502,96.648,3.352,44.82,0.950,bicubic\nresnet152.a1h_in1k,288,83.494,16.506,96.522,3.478,60.19,1.000,bicubic\nswinv2_cr_small_ns_224.sw_in1k,224,83.490,16.510,96.486,3.514,49.70,0.900,bicubic\ncait_s24_224.fb_dist_in1k,224,83.484,16.516,96.586,3.414,46.92,1.000,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,288,83.478,16.522,96.718,3.282,28.64,1.000,bicubic\nefficientnet_b4.ra2_in1k,384,83.468,16.532,96.574,3.426,19.34,1.000,bicubic\ndm_nfnet_f0.dm_in1k,256,83.468,16.532,96.570,3.430,71.49,0.900,bicubic\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,83.468,16.532,96.422,3.578,86.46,1.000,bicubic\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,83.466,16.534,96.622,3.378,38.88,0.950,bicubic\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,83.462,16.538,96.548,3.452,38.88,0.950,bicubic\nmambaout_tiny.in1k,288,83.452,16.548,96.540,3.460,26.55,1.000,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,83.450,16.550,96.920,3.080,19.80,1.000,bicubic\ndm_nfnet_f1.dm_in1k,224,83.444,16.556,96.634,3.366,132.63,0.910,bicubic\nefficientvit_b3.r224_in1k,224,83.444,16.556,96.338,3.662,48.65,0.950,bicubic\ndeit3_small_patch16_384.fb_in1k,384,83.432,16.568,96.674,3.326,22.21,1.000,bicubic\nregnetz_d32.ra3_in1k,256,83.432,16.568,96.634,3.366,27.58,0.950,bicubic\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,83.430,16.570,96.576,3.424,18.45,1.000,bicubic\nfocalnet_small_srf.ms_in1k,224,83.428,16.572,96.432,3.568,49.89,0.900,bicubic\nmaxvit_tiny_tf_224.in1k,224,83.426,16.574,96.584,3.416,30.92,0.950,bicubic\nmobilenetv4_conv_large.e600_r384_in1k,448,83.418,16.582,96.612,3.388,32.59,1.000,bicubic\nedgenext_base.in21k_ft_in1k,256,83.416,16.584,96.800,3.200,18.51,0.950,bicubic\nsequencer2d_l.in1k,224,83.406,16.594,96.504,3.496,54.30,0.875,bicubic\nregnetz_d8_evos.ch_in1k,256,83.404,16.596,96.662,3.338,23.46,0.950,bicubic\ngcvit_tiny.in1k,224,83.400,16.600,96.394,3.606,28.22,0.875,bicubic\nefficientformer_l7.snap_dist_in1k,224,83.396,16.604,96.532,3.468,82.23,0.950,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,83.390,16.610,96.766,3.234,11.07,1.000,bicubic\ndeit_base_distilled_patch16_224.fb_in1k,224,83.384,16.616,96.492,3.508,87.34,0.900,bicubic\nfastvit_sa24.apple_dist_in1k,256,83.368,16.632,96.574,3.426,21.55,0.900,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,83.366,16.634,96.740,3.260,15.62,1.000,bicubic\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,83.358,16.642,96.828,3.172,88.30,1.000,bicubic\nxcit_small_12_p8_224.fb_in1k,224,83.354,16.646,96.472,3.528,26.21,1.000,bicubic\nxcit_small_12_p16_224.fb_dist_in1k,224,83.354,16.646,96.420,3.580,26.25,1.000,bicubic\ncoatnet_rmlp_1_rw_224.sw_in1k,224,83.350,16.650,96.452,3.548,41.69,0.950,bicubic\ntf_efficientnetv2_s.in21k_ft_in1k,300,83.348,16.652,96.710,3.290,21.46,1.000,bicubic\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,83.340,16.660,96.844,3.156,194.03,0.875,bilinear\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,83.340,16.660,96.366,3.634,86.63,1.000,bicubic\nconvnext_nano.r384_in12k_ft_in1k,384,83.334,16.666,96.624,3.376,15.59,1.000,bicubic\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,83.330,16.670,96.996,3.004,49.61,0.900,bicubic\nregnety_320.seer_ft_in1k,384,83.324,16.676,96.712,3.288,145.05,1.000,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,83.304,16.696,96.528,3.472,88.22,0.900,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,320,83.298,16.702,96.522,3.478,38.61,1.000,bicubic\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,83.268,16.732,96.760,3.240,44.18,0.875,bilinear\ntiny_vit_21m_224.in1k,224,83.268,16.732,96.602,3.398,21.20,0.950,bicubic\neca_nfnet_l1.ra2_in1k,256,83.264,16.736,96.684,3.316,41.41,0.900,bicubic\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,83.264,16.736,96.390,3.610,32.59,0.950,bicubic\nresnet200d.ra2_in1k,256,83.250,16.750,96.552,3.448,64.69,0.950,bicubic\ntf_efficientnet_b4.ap_in1k,380,83.246,16.754,96.392,3.608,19.34,0.922,bicubic\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,83.232,16.768,96.628,3.372,11.00,0.950,bicubic\nswin_small_patch4_window7_224.ms_in1k,224,83.228,16.772,96.326,3.674,49.61,0.900,bicubic\nregnetv_040.ra3_in1k,288,83.204,16.796,96.682,3.318,20.64,1.000,bicubic\nxception65.ra3_in1k,299,83.186,16.814,96.596,3.404,39.92,0.940,bicubic\ntf_efficientnet_b5.in1k,456,83.170,16.830,96.534,3.466,30.39,0.934,bicubic\nregnety_320.tv2_in1k,224,83.170,16.830,96.416,3.584,145.05,0.965,bicubic\nregnety_080.ra3_in1k,224,83.166,16.834,96.480,3.520,39.18,0.950,bicubic\ntf_efficientnetv2_s.in1k,300,83.162,16.838,96.334,3.666,21.46,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,288,83.160,16.840,96.646,3.354,16.52,1.000,bicubic\nxception65p.ra3_in1k,299,83.160,16.840,96.506,3.494,39.82,0.940,bicubic\nresnet152d.ra2_in1k,256,83.160,16.840,96.370,3.630,60.21,0.950,bicubic\ntwins_pcpvt_large.in1k,224,83.158,16.842,96.626,3.374,60.99,0.900,bicubic\nresnext101_64x4d.c1_in1k,288,83.158,16.842,96.370,3.630,83.46,1.000,bicubic\nconvnext_small.fb_in1k,224,83.144,16.856,96.432,3.568,50.22,0.875,bicubic\nnest_small_jx.goog_in1k,224,83.132,16.868,96.330,3.670,38.35,0.875,bicubic\nswinv2_cr_small_224.sw_in1k,224,83.126,16.874,96.110,3.890,49.70,0.900,bicubic\ntwins_svt_base.in1k,224,83.122,16.878,96.430,3.570,56.07,0.900,bicubic\npvt_v2_b3.in1k,224,83.120,16.880,96.560,3.440,45.24,0.900,bicubic\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,83.118,16.882,96.712,3.288,16.29,1.000,bicubic\nregnetv_064.ra3_in1k,224,83.118,16.882,96.454,3.546,30.58,0.950,bicubic\ndeit_base_patch16_384.fb_in1k,384,83.108,16.892,96.376,3.624,86.86,1.000,bicubic\nmaxxvitv2_nano_rw_256.sw_in1k,256,83.108,16.892,96.332,3.668,23.70,0.950,bicubic\ndeit3_medium_patch16_224.fb_in1k,224,83.090,16.910,96.284,3.716,38.85,0.900,bicubic\ntresnet_m.miil_in21k_ft_in1k,224,83.090,16.910,96.112,3.888,31.39,0.875,bilinear\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,83.082,16.918,96.774,3.226,22.06,1.000,bicubic\nefficientvit_b2.r288_in1k,288,83.078,16.922,96.286,3.714,24.33,1.000,bicubic\ntresnet_xl.miil_in1k_448,448,83.066,16.934,96.180,3.820,78.44,0.875,bilinear\nregnety_040.ra3_in1k,288,83.048,16.952,96.502,3.498,20.65,1.000,bicubic\ntf_efficientnet_b4.aa_in1k,380,83.040,16.960,96.298,3.702,19.34,0.922,bicubic\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,83.036,16.964,96.344,3.656,16.78,0.950,bicubic\nresnet101d.ra2_in1k,320,83.020,16.980,96.456,3.544,44.57,1.000,bicubic\necaresnet101d.miil_in1k,288,83.006,16.994,96.524,3.476,44.57,0.950,bicubic\nregnety_064.ra3_in1k,224,83.006,16.994,96.296,3.704,30.58,0.950,bicubic\nresnetv2_101.a1h_in1k,288,83.002,16.998,96.444,3.556,44.54,1.000,bicubic\nregnety_160.deit_in1k,224,83.000,17.000,96.502,3.498,83.59,0.950,bicubic\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,82.998,17.002,96.676,3.324,11.07,1.000,bicubic\nswiftformer_l3.dist_in1k,224,82.994,17.006,96.236,3.764,28.49,0.950,bicubic\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,82.978,17.022,96.274,3.726,15.50,0.950,bicubic\nresnext101_64x4d.tv_in1k,224,82.978,17.022,96.252,3.748,83.46,0.875,bilinear\nmobilenetv4_conv_large.e600_r384_in1k,384,82.974,17.026,96.244,3.756,32.59,0.950,bicubic\nconvformer_s18.sail_in1k,224,82.968,17.032,96.246,3.754,26.77,1.000,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,82.952,17.048,96.470,3.530,11.07,0.950,bicubic\nconvnextv2_tiny.fcmae_ft_in1k,224,82.948,17.052,96.286,3.714,28.64,0.875,bicubic\nmaxvit_nano_rw_256.sw_in1k,256,82.934,17.066,96.224,3.776,15.45,0.950,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,82.920,17.080,96.450,3.550,14.25,1.000,bicubic\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,82.918,17.082,96.568,3.432,236.34,0.875,bicubic\nresnest101e.in1k,256,82.916,17.084,96.310,3.690,48.28,0.875,bilinear\nhgnetv2_b3.ssld_stage2_ft_in1k,224,82.914,17.086,96.360,3.640,16.29,0.965,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,224,82.900,17.100,96.616,3.384,28.59,0.875,bicubic\nresnetrs152.tf_in1k,256,82.900,17.100,96.290,3.710,86.62,1.000,bicubic\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,82.892,17.108,96.632,3.368,19.80,0.965,bicubic\nefficientnetv2_rw_s.ra2_in1k,288,82.888,17.112,96.328,3.672,23.94,1.000,bicubic\nxcit_large_24_p16_224.fb_in1k,224,82.886,17.114,95.880,4.120,189.10,1.000,bicubic\nconvnext_nano.in12k_ft_in1k,288,82.882,17.118,96.562,3.438,15.59,1.000,bicubic\ntnt_b_patch16_224.in1k,224,82.864,17.136,96.230,3.770,65.43,0.900,bicubic\nfasternet_m.in1k,224,82.848,17.152,96.388,3.612,53.52,1.000,bicubic\nsequencer2d_m.in1k,224,82.836,17.164,96.276,3.724,38.31,0.875,bicubic\nresnext101_32x8d.tv2_in1k,224,82.832,17.168,96.234,3.766,88.79,0.965,bilinear\nswinv2_tiny_window16_256.ms_in1k,256,82.828,17.172,96.222,3.778,28.35,0.900,bicubic\nhiera_tiny_224.mae_in1k_ft_in1k,224,82.826,17.174,96.188,3.812,27.91,0.900,bicubic\nregnetx_320.tv2_in1k,224,82.824,17.176,96.200,3.800,107.81,0.965,bicubic\nresnetv2_50x1_bit.goog_distilled_in1k,224,82.820,17.180,96.526,3.474,25.55,0.875,bicubic\nresnet152.a1h_in1k,224,82.818,17.182,96.124,3.876,60.19,0.950,bicubic\nrdnet_tiny.nv_in1k,224,82.814,17.186,96.284,3.716,23.86,0.900,bicubic\npnasnet5large.tf_in1k,331,82.814,17.186,96.032,3.968,86.06,0.911,bicubic\nresnet101.a1h_in1k,288,82.796,17.204,96.324,3.676,44.55,1.000,bicubic\nrexnet_300.nav_in1k,224,82.780,17.220,96.230,3.770,34.71,0.875,bicubic\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,82.776,17.224,96.448,3.552,9.72,0.950,bicubic\ncs3se_edgenet_x.c2ns_in1k,256,82.768,17.232,96.306,3.694,50.72,0.950,bicubic\nnfnet_l0.ra2_in1k,288,82.766,17.234,96.494,3.506,35.07,1.000,bicubic\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,82.758,17.242,96.178,3.822,86.43,0.900,bicubic\nmambaout_tiny.in1k,224,82.742,17.258,96.098,3.902,26.55,1.000,bicubic\nresnet152.a1_in1k,288,82.742,17.258,95.734,4.266,60.19,1.000,bicubic\ncs3edgenet_x.c2_in1k,288,82.734,17.266,96.374,3.626,47.82,1.000,bicubic\nregnety_032.ra_in1k,288,82.724,17.276,96.426,3.574,19.44,1.000,bicubic\ntwins_pcpvt_base.in1k,224,82.724,17.276,96.350,3.650,43.83,0.900,bicubic\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,82.720,17.280,96.624,3.376,88.79,0.875,bilinear\nefficientvit_b2.r256_in1k,256,82.712,17.288,96.092,3.908,24.33,1.000,bicubic\nfastvit_sa24.apple_in1k,256,82.706,17.294,96.278,3.722,21.55,0.900,bicubic\ndavit_tiny.msft_in1k,224,82.694,17.306,96.264,3.736,28.36,0.950,bicubic\nconvnext_tiny.fb_in1k,288,82.694,17.306,96.144,3.856,28.59,1.000,bicubic\nmobilenetv4_conv_large.e500_r256_in1k,320,82.672,17.328,96.310,3.690,32.59,1.000,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,82.666,17.334,96.546,3.454,15.62,1.000,bicubic\nregnetz_c16.ra3_in1k,320,82.662,17.338,96.332,3.668,13.46,1.000,bicubic\nregnetz_c16_evos.ch_in1k,320,82.660,17.340,96.480,3.520,13.49,0.950,bicubic\nefficientnet_b4.ra2_in1k,320,82.658,17.342,96.134,3.866,19.34,0.875,bicubic\nresnetaa50d.sw_in12k_ft_in1k,288,82.656,17.344,96.492,3.508,25.58,1.000,bicubic\ncs3sedarknet_x.c2ns_in1k,288,82.654,17.346,96.348,3.652,35.40,1.000,bicubic\nregnety_160.tv2_in1k,224,82.646,17.354,96.230,3.770,83.59,0.965,bicubic\nnasnetalarge.tf_in1k,331,82.638,17.362,96.066,3.934,88.75,0.911,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,300,82.634,17.366,96.628,3.372,14.36,0.900,bicubic\nxcit_medium_24_p16_224.fb_in1k,224,82.630,17.370,95.978,4.022,84.40,1.000,bicubic\nnextvit_small.bd_in1k,224,82.626,17.374,96.224,3.776,31.76,0.950,bicubic\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,82.614,17.386,96.206,3.794,88.22,0.900,bicubic\nresnet152.a2_in1k,288,82.614,17.386,95.748,4.252,60.19,1.000,bicubic\npoolformerv2_m48.sail_in1k,224,82.612,17.388,96.068,3.932,73.35,1.000,bicubic\nconvnext_tiny_hnf.a2h_in1k,288,82.608,17.392,96.024,3.976,28.59,1.000,bicubic\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,82.606,17.394,96.314,3.686,10.59,1.000,bicubic\nrexnetr_200.sw_in12k_ft_in1k,224,82.602,17.398,96.396,3.604,16.52,0.950,bicubic\nlevit_384.fb_dist_in1k,224,82.600,17.400,96.020,3.980,39.13,0.900,bicubic\nlevit_conv_384.fb_dist_in1k,224,82.600,17.400,96.020,3.980,39.13,0.900,bicubic\ntf_efficientnet_b4.in1k,380,82.598,17.402,96.132,3.868,19.34,0.922,bicubic\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,82.594,17.406,96.364,3.636,16.29,0.965,bicubic\neca_nfnet_l0.ra2_in1k,288,82.590,17.410,96.486,3.514,24.14,1.000,bicubic\nxcit_small_24_p16_224.fb_in1k,224,82.588,17.412,95.996,4.004,47.67,1.000,bicubic\nregnety_080_tv.tv2_in1k,224,82.580,17.420,96.260,3.740,39.38,0.965,bicubic\nxcit_tiny_24_p16_384.fb_dist_in1k,384,82.580,17.420,96.254,3.746,12.12,1.000,bicubic\nxcit_tiny_24_p8_224.fb_dist_in1k,224,82.578,17.422,96.186,3.814,12.11,1.000,bicubic\nflexivit_small.1200ep_in1k,240,82.576,17.424,96.152,3.848,22.06,0.950,bicubic\nregnetx_160.tv2_in1k,224,82.574,17.426,96.058,3.942,54.28,0.965,bicubic\nefficientformer_l3.snap_dist_in1k,224,82.562,17.438,96.250,3.750,31.41,0.950,bicubic\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,82.558,17.442,96.076,3.924,38.76,0.900,bicubic\nresnet61q.ra2_in1k,288,82.532,17.468,96.132,3.868,36.85,1.000,bicubic\nhgnet_tiny.ssld_in1k,288,82.528,17.472,96.520,3.480,14.74,1.000,bicubic\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,82.520,17.480,96.258,3.742,22.52,0.950,bicubic\nrepvit_m1_5.dist_450e_in1k,224,82.520,17.480,96.112,3.888,14.64,0.950,bicubic\nwide_resnet101_2.tv2_in1k,224,82.514,17.486,96.012,3.988,126.89,0.965,bilinear\ninception_next_tiny.sail_in1k,224,82.508,17.492,96.000,4.000,28.06,0.875,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,82.500,17.500,96.286,3.714,11.07,1.000,bicubic\ncrossvit_18_dagger_240.in1k,240,82.494,17.506,96.070,3.930,44.27,0.875,bicubic\nconvnextv2_nano.fcmae_ft_in1k,288,82.490,17.510,96.226,3.774,15.62,1.000,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,288,82.486,17.514,96.302,3.698,13.68,1.000,bicubic\nvit_relpos_base_patch16_224.sw_in1k,224,82.482,17.518,96.144,3.856,86.43,0.900,bicubic\nvit_relpos_medium_patch16_224.sw_in1k,224,82.462,17.538,96.086,3.914,38.75,0.900,bicubic\npoolformer_m48.sail_in1k,224,82.462,17.538,95.968,4.032,73.47,0.950,bicubic\nese_vovnet57b.ra4_e3600_r256_in1k,256,82.460,17.540,96.006,3.994,38.61,0.950,bicubic\npit_b_224.in1k,224,82.458,17.542,95.704,4.296,73.76,0.900,bicubic\nresnext101_64x4d.c1_in1k,224,82.446,17.554,95.920,4.080,83.46,0.950,bicubic\nregnetv_040.ra3_in1k,224,82.440,17.560,96.202,3.798,20.64,0.950,bicubic\ntf_efficientnet_b2.ns_jft_in1k,260,82.412,17.588,96.238,3.762,9.11,0.890,bicubic\nmvitv2_tiny.fb_in1k,224,82.408,17.592,96.150,3.850,24.17,0.900,bicubic\ncoatnet_0_rw_224.sw_in1k,224,82.396,17.604,95.844,4.156,27.44,0.950,bicubic\nxcit_tiny_12_p8_384.fb_dist_in1k,384,82.394,17.606,96.222,3.778,6.71,1.000,bicubic\ncoatnet_bn_0_rw_224.sw_in1k,224,82.394,17.606,96.194,3.806,27.44,0.950,bicubic\ncrossvit_18_240.in1k,240,82.390,17.610,96.064,3.936,43.27,0.875,bicubic\nflexivit_small.600ep_in1k,240,82.388,17.612,96.042,3.958,22.06,0.950,bicubic\nrepvit_m1_5.dist_300e_in1k,224,82.382,17.618,96.026,3.974,14.64,0.950,bicubic\ncoat_small.in1k,224,82.378,17.622,96.214,3.786,21.69,0.900,bicubic\necaresnet50t.ra2_in1k,320,82.370,17.630,96.120,3.880,25.57,0.950,bicubic\nhgnetv2_b2.ssld_stage2_ft_in1k,288,82.362,17.638,96.412,3.588,11.22,1.000,bicubic\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,82.360,17.640,96.256,3.744,11.07,0.950,bicubic\nresnet51q.ra2_in1k,288,82.356,17.644,96.182,3.818,35.70,1.000,bilinear\nefficientnetv2_rw_t.ra2_in1k,288,82.354,17.646,96.178,3.822,13.65,1.000,bicubic\nsequencer2d_s.in1k,224,82.336,17.664,96.032,3.968,27.65,0.875,bicubic\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,82.332,17.668,96.524,3.476,44.54,1.000,bilinear\nresnet101.a1_in1k,288,82.326,17.674,95.630,4.370,44.55,1.000,bicubic\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,82.320,17.680,95.942,4.058,18.45,0.888,bicubic\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,82.314,17.686,95.986,4.014,38.73,0.900,bicubic\nmixer_b16_224.miil_in21k_ft_in1k,224,82.314,17.686,95.710,4.290,59.88,0.875,bilinear\nregnety_040.ra3_in1k,224,82.308,17.692,96.078,3.922,20.65,0.950,bicubic\nconvnext_nano.in12k_ft_in1k,224,82.306,17.694,96.338,3.662,15.59,0.950,bicubic\ncoat_lite_small.in1k,224,82.304,17.696,95.854,4.146,19.84,0.900,bicubic\ncrossvit_15_dagger_240.in1k,240,82.302,17.698,95.960,4.040,28.21,0.875,bicubic\nefficientnet_b3.ra2_in1k,320,82.296,17.704,96.124,3.876,12.23,1.000,bicubic\nconvit_base.fb_in1k,224,82.296,17.704,95.936,4.064,86.54,0.875,bicubic\nresnet152.tv2_in1k,224,82.294,17.706,96.000,4.000,60.19,0.965,bilinear\nresnetrs101.tf_in1k,288,82.288,17.712,96.000,4.000,63.62,0.940,bicubic\ntresnet_l.miil_in1k_448,448,82.278,17.722,95.988,4.012,55.99,0.875,bilinear\nwide_resnet50_2.racm_in1k,288,82.260,17.740,96.062,3.938,68.88,0.950,bicubic\nvit_srelpos_medium_patch16_224.sw_in1k,224,82.256,17.744,95.932,4.068,38.74,0.900,bicubic\nresnet101d.ra2_in1k,256,82.254,17.746,96.070,3.930,44.57,0.950,bicubic\ncs3darknet_x.c2ns_in1k,288,82.234,17.766,96.234,3.766,35.05,1.000,bicubic\nconvnext_tiny_hnf.a2h_in1k,224,82.230,17.770,95.860,4.140,28.59,0.950,bicubic\nhgnet_small.paddle_in1k,288,82.222,17.778,96.224,3.776,24.36,1.000,bicubic\nvit_base_patch16_rpn_224.sw_in1k,224,82.218,17.782,95.996,4.004,86.54,0.900,bicubic\nresnet101.a2_in1k,288,82.218,17.782,95.740,4.260,44.55,1.000,bicubic\npoolformerv2_m36.sail_in1k,224,82.214,17.786,95.896,4.104,56.08,1.000,bicubic\ncrossvit_base_240.in1k,240,82.212,17.788,95.826,4.174,105.03,0.875,bicubic\ncait_xxs36_384.fb_dist_in1k,384,82.204,17.796,96.148,3.852,17.37,1.000,bicubic\npvt_v2_b2_li.in1k,224,82.200,17.800,96.108,3.892,22.55,0.900,bicubic\ncs3edgenet_x.c2_in1k,256,82.198,17.802,95.956,4.044,47.82,0.887,bicubic\nseresnext50_32x4d.racm_in1k,288,82.186,17.814,96.146,3.854,27.56,0.950,bicubic\necaresnet101d.miil_in1k,224,82.180,17.820,96.062,3.938,44.57,0.875,bicubic\nfocalnet_tiny_lrf.ms_in1k,224,82.174,17.826,95.948,4.052,28.65,0.900,bicubic\nflexivit_small.300ep_in1k,240,82.170,17.830,96.032,3.968,22.06,0.950,bicubic\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,82.162,17.838,96.224,3.776,25.03,0.875,bilinear\nregnetz_c16.ra3_in1k,256,82.162,17.838,96.056,3.944,13.46,0.940,bicubic\nefficientvit_b2.r224_in1k,224,82.162,17.838,95.710,4.290,24.33,0.950,bicubic\nefficientformerv2_s2.snap_dist_in1k,224,82.156,17.844,95.912,4.088,12.71,0.950,bicubic\nfocalnet_tiny_srf.ms_in1k,224,82.146,17.854,95.972,4.028,28.43,0.900,bicubic\nswin_s3_tiny_224.ms_in1k,224,82.128,17.872,95.958,4.042,28.33,0.900,bicubic\necaresnet50t.a1_in1k,288,82.110,17.890,95.654,4.346,25.57,1.000,bicubic\npoolformer_m36.sail_in1k,224,82.104,17.896,95.698,4.302,56.17,0.950,bicubic\nvisformer_small.in1k,224,82.092,17.908,95.876,4.124,40.22,0.900,bicubic\nhalo2botnet50ts_256.a1h_in1k,256,82.084,17.916,95.658,4.342,22.64,0.950,bicubic\nconvnext_tiny.fb_in1k,224,82.066,17.934,95.854,4.146,28.59,0.875,bicubic\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,82.064,17.936,95.874,4.126,15.15,0.900,bicubic\npvt_v2_b2.in1k,224,82.062,17.938,95.966,4.034,25.36,0.900,bicubic\ntresnet_xl.miil_in1k,224,82.060,17.940,95.932,4.068,78.44,0.875,bilinear\nhrnet_w18_ssld.paddle_in1k,288,82.052,17.948,96.250,3.750,21.30,1.000,bilinear\nresnetv2_101.a1h_in1k,224,82.042,17.958,95.866,4.134,44.54,0.950,bicubic\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,82.038,17.962,96.174,3.826,15.62,0.875,bicubic\nfbnetv3_g.ra2_in1k,288,82.034,17.966,96.064,3.936,16.62,0.950,bilinear\nresnetv2_50d_evos.ah_in1k,288,82.024,17.976,95.916,4.084,25.59,1.000,bicubic\necaresnet101d_pruned.miil_in1k,288,82.016,17.984,96.180,3.820,24.88,0.950,bicubic\nresnext50_32x4d.a1h_in1k,288,82.016,17.984,95.934,4.066,25.03,1.000,bicubic\ncoatnext_nano_rw_224.sw_in1k,224,81.998,18.002,95.888,4.112,14.70,0.900,bicubic\nresnet152.a1_in1k,224,81.998,18.002,95.294,4.706,60.19,0.950,bicubic\nresnext101_32x8d.tv2_in1k,176,81.990,18.010,95.716,4.284,88.79,0.875,bilinear\nxcit_small_12_p16_224.fb_in1k,224,81.982,18.018,95.814,4.186,26.25,1.000,bicubic\ndeit_base_patch16_224.fb_in1k,224,81.980,18.020,95.740,4.260,86.57,0.900,bicubic\nxception41p.ra3_in1k,299,81.978,18.022,95.784,4.216,26.91,0.940,bicubic\nresnet61q.ra2_in1k,256,81.970,18.030,95.848,4.152,36.85,0.900,bicubic\ngcvit_xtiny.in1k,224,81.964,18.036,95.966,4.034,19.98,0.875,bicubic\nresnetv2_50d_gn.ah_in1k,288,81.960,18.040,95.932,4.068,25.57,1.000,bicubic\ntf_efficientnetv2_b3.in1k,300,81.960,18.040,95.780,4.220,14.36,0.904,bicubic\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,81.950,18.050,95.796,4.204,14.25,0.888,bicubic\nresnet101.a1h_in1k,224,81.950,18.050,95.760,4.240,44.55,0.950,bicubic\nregnety_032.ra_in1k,224,81.948,18.052,95.972,4.028,19.44,0.950,bicubic\nvit_base_patch32_clip_224.openai_ft_in1k,224,81.944,18.056,95.972,4.028,88.22,0.900,bicubic\nhgnet_tiny.ssld_in1k,224,81.942,18.058,96.118,3.882,14.74,0.965,bicubic\nregnetz_c16_evos.ch_in1k,256,81.940,18.060,96.150,3.850,13.49,0.950,bicubic\ncs3sedarknet_x.c2ns_in1k,256,81.926,18.074,96.022,3.978,35.40,0.887,bicubic\nresnet101.tv2_in1k,224,81.906,18.094,95.768,4.232,44.55,0.965,bilinear\nxcit_tiny_24_p8_224.fb_in1k,224,81.888,18.112,95.978,4.022,12.11,1.000,bicubic\nnfnet_l0.ra2_in1k,224,81.870,18.130,96.034,3.966,35.07,0.900,bicubic\nmobilenetv4_conv_large.e500_r256_in1k,256,81.864,18.136,95.696,4.304,32.59,0.950,bicubic\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,81.856,18.144,96.084,3.916,194.03,0.875,bilinear\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,81.856,18.144,96.038,3.962,36.43,0.900,bicubic\ncs3darknet_x.c2ns_in1k,256,81.856,18.144,95.956,4.044,35.05,0.950,bicubic\nconvnextv2_nano.fcmae_ft_in1k,224,81.856,18.144,95.740,4.260,15.62,0.875,bicubic\ntf_efficientnet_b3.ap_in1k,300,81.842,18.158,95.614,4.386,12.23,0.904,bicubic\nresnet50d.ra4_e3600_r224_in1k,288,81.840,18.160,95.926,4.074,25.58,1.000,bicubic\nfastvit_sa12.apple_dist_in1k,256,81.838,18.162,95.700,4.300,11.58,0.900,bicubic\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,81.832,18.168,95.878,4.122,14.62,1.000,bilinear\npit_s_distilled_224.in1k,224,81.826,18.174,95.720,4.280,24.04,0.900,bicubic\nswinv2_tiny_window8_256.ms_in1k,256,81.824,18.176,95.988,4.012,28.35,0.900,bicubic\nswinv2_cr_tiny_ns_224.sw_in1k,224,81.796,18.204,95.824,4.176,28.33,0.900,bicubic\nregnety_032.tv2_in1k,224,81.788,18.212,95.826,4.174,19.44,0.965,bicubic\nvit_base_patch16_224.orig_in21k_ft_in1k,224,81.782,18.218,96.122,3.878,86.57,0.900,bicubic\nresnetaa50d.sw_in12k_ft_in1k,224,81.782,18.218,96.036,3.964,25.58,0.950,bicubic\nresnet51q.ra2_in1k,256,81.774,18.226,95.932,4.068,35.70,0.875,bilinear\ncs3sedarknet_l.c2ns_in1k,288,81.770,18.230,95.962,4.038,21.91,0.950,bicubic\nresnet152.a2_in1k,224,81.764,18.236,95.270,4.730,60.19,0.950,bicubic\neca_nfnet_l0.ra2_in1k,224,81.762,18.238,95.994,4.006,24.14,0.900,bicubic\ntresnet_m.miil_in1k_448,448,81.710,18.290,95.562,4.438,31.39,0.875,bilinear\ncoatnet_nano_rw_224.sw_in1k,224,81.704,18.296,95.650,4.350,15.14,0.900,bicubic\ntwins_svt_small.in1k,224,81.694,18.306,95.670,4.330,24.06,0.900,bicubic\necaresnet50t.a2_in1k,288,81.682,18.318,95.524,4.476,25.57,1.000,bicubic\necaresnet50d.miil_in1k,288,81.648,18.352,95.884,4.116,25.58,0.950,bicubic\ntf_efficientnet_b3.aa_in1k,300,81.648,18.352,95.724,4.276,12.23,0.904,bicubic\nhalonet50ts.a1h_in1k,256,81.640,18.360,95.612,4.388,22.73,0.940,bicubic\nrexnet_200.nav_in1k,224,81.636,18.364,95.672,4.328,16.37,0.875,bicubic\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,81.620,18.380,96.036,3.964,88.79,0.875,bilinear\nresnetaa50.a1h_in1k,288,81.618,18.382,95.794,4.206,25.56,1.000,bicubic\nwide_resnet50_2.tv2_in1k,224,81.612,18.388,95.766,4.234,68.88,0.965,bilinear\nconvnext_nano_ols.d1h_in1k,288,81.612,18.388,95.626,4.374,15.65,1.000,bicubic\npoolformerv2_s36.sail_in1k,224,81.576,18.424,95.680,4.320,30.79,1.000,bicubic\nedgenext_small.usi_in1k,320,81.574,18.426,95.712,4.288,5.59,1.000,bicubic\nhgnetv2_b2.ssld_stage2_ft_in1k,224,81.564,18.436,95.894,4.106,11.22,0.965,bicubic\nregnetx_080.tv2_in1k,224,81.552,18.448,95.540,4.460,39.57,0.965,bicubic\nlamhalobotnet50ts_256.a1h_in1k,256,81.552,18.448,95.510,4.490,22.57,0.950,bicubic\ntf_efficientnet_lite4.in1k,380,81.540,18.460,95.668,4.332,13.01,0.920,bilinear\ntiny_vit_11m_224.in1k,224,81.538,18.462,95.870,4.130,11.00,0.950,bicubic\ncrossvit_15_240.in1k,240,81.522,18.478,95.694,4.306,27.53,0.875,bicubic\ntnt_s_legacy_patch16_224.in1k,224,81.512,18.488,95.742,4.258,23.76,0.900,bicubic\nvit_large_patch32_384.orig_in21k_ft_in1k,384,81.508,18.492,96.086,3.914,306.63,1.000,bicubic\ntnt_s_patch16_224.in1k,224,81.506,18.494,95.766,4.234,23.77,0.900,bicubic\nlevit_256.fb_dist_in1k,224,81.506,18.494,95.466,4.534,18.89,0.900,bicubic\nlevit_conv_256.fb_dist_in1k,224,81.506,18.494,95.466,4.534,18.89,0.900,bicubic\ntresnet_l.miil_in1k,224,81.498,18.502,95.652,4.348,55.99,0.875,bilinear\nrepvit_m3.dist_in1k,224,81.494,18.506,95.562,4.438,10.68,0.950,bicubic\nresnet101.a1_in1k,224,81.494,18.506,95.164,4.836,44.55,0.950,bicubic\nresnext50_32x4d.a1_in1k,288,81.480,18.520,95.152,4.848,25.03,1.000,bicubic\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,81.478,18.522,95.692,4.308,11.07,0.950,bicubic\nvit_relpos_small_patch16_224.sw_in1k,224,81.476,18.524,95.824,4.176,21.98,0.900,bicubic\nconvnext_nano.d1h_in1k,288,81.474,18.526,95.670,4.330,15.59,1.000,bicubic\nefficientnet_b3.ra2_in1k,288,81.472,18.528,95.696,4.304,12.23,0.875,bicubic\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,81.466,18.534,95.678,4.322,10.59,0.888,bicubic\nwide_resnet50_2.racm_in1k,224,81.466,18.534,95.544,4.456,68.88,0.875,bicubic\nresnet50d.a1_in1k,288,81.466,18.534,95.218,4.782,25.58,1.000,bicubic\ngcresnet50t.ra2_in1k,288,81.458,18.542,95.724,4.276,25.90,1.000,bicubic\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,81.456,18.544,96.016,3.984,11.22,1.000,bicubic\necaresnet50t.ra2_in1k,256,81.450,18.550,95.678,4.322,25.57,0.875,bicubic\nefficientnet_b1.ra4_e3600_r240_in1k,288,81.444,18.556,95.698,4.302,7.79,1.000,bicubic\ndm_nfnet_f0.dm_in1k,192,81.436,18.564,95.672,4.328,71.49,0.900,bicubic\necaresnetlight.miil_in1k,288,81.430,18.570,95.790,4.210,30.16,0.950,bicubic\nnest_tiny_jx.goog_in1k,224,81.428,18.572,95.630,4.370,17.06,0.875,bicubic\npoolformer_s36.sail_in1k,224,81.428,18.572,95.442,4.558,30.86,0.900,bicubic\nhgnet_small.paddle_in1k,224,81.426,18.574,95.846,4.154,24.36,0.965,bicubic\nconvit_small.fb_in1k,224,81.412,18.588,95.740,4.260,27.78,0.875,bicubic\nresnetv2_50.a1h_in1k,288,81.406,18.594,95.712,4.288,25.55,1.000,bicubic\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,81.402,18.598,96.150,3.850,22.05,0.900,bicubic\ntf_efficientnet_b1.ns_jft_in1k,240,81.388,18.612,95.756,4.244,7.79,0.882,bicubic\nswin_tiny_patch4_window7_224.ms_in1k,224,81.388,18.612,95.540,4.460,28.29,0.900,bicubic\nconvmixer_1536_20.in1k,224,81.382,18.618,95.618,4.382,51.63,0.960,bicubic\ndeit3_small_patch16_224.fb_in1k,224,81.382,18.618,95.454,4.546,22.06,0.900,bicubic\nefficientnetv2_rw_t.ra2_in1k,224,81.370,18.630,95.550,4.450,13.65,1.000,bicubic\nresnet50d.ra2_in1k,288,81.354,18.646,95.742,4.258,25.58,0.950,bicubic\ngernet_l.idstcv_in1k,256,81.354,18.646,95.558,4.442,31.08,0.875,bilinear\nresnet101.a2_in1k,224,81.344,18.656,95.198,4.802,44.55,0.950,bicubic\nfasternet_s.in1k,224,81.328,18.672,95.678,4.322,31.18,1.000,bicubic\nlegacy_senet154.in1k,224,81.320,18.680,95.502,4.498,115.09,0.875,bilinear\ncoat_mini.in1k,224,81.320,18.680,95.388,4.612,10.34,0.900,bicubic\nrepvit_m1_1.dist_450e_in1k,224,81.314,18.686,95.568,4.432,8.80,0.950,bicubic\nefficientnet_el.ra_in1k,300,81.308,18.692,95.538,4.462,10.59,0.904,bicubic\nresnext50_32x4d.a2_in1k,288,81.304,18.696,95.114,4.886,25.03,1.000,bicubic\nseresnet50.ra2_in1k,288,81.288,18.712,95.652,4.348,28.09,0.950,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,81.284,18.716,95.746,4.254,11.07,1.000,bicubic\necaresnet50t.a1_in1k,224,81.282,18.718,95.150,4.850,25.57,0.950,bicubic\ngc_efficientnetv2_rw_t.agc_in1k,224,81.276,18.724,95.580,4.420,13.68,1.000,bicubic\nseresnext50_32x4d.racm_in1k,224,81.268,18.732,95.624,4.376,27.56,0.875,bicubic\nsenet154.gluon_in1k,224,81.258,18.742,95.358,4.642,115.09,0.875,bicubic\nvit_small_patch16_rope_mixed_224.naver_in1k,224,81.252,18.748,95.056,4.944,21.99,0.900,bicubic\nfbnetv3_g.ra2_in1k,240,81.246,18.754,95.686,4.314,16.62,0.950,bilinear\ngcresnext50ts.ch_in1k,288,81.242,18.758,95.536,4.464,15.67,1.000,bicubic\nresnet50.a1_in1k,288,81.240,18.760,95.112,4.888,25.56,1.000,bicubic\nres2net101d.in1k,224,81.236,18.764,95.346,4.654,45.23,0.875,bilinear\nvit_small_patch16_rope_224.naver_in1k,224,81.222,18.778,95.020,4.980,21.98,0.900,bicubic\nresnet50_gn.a1h_in1k,288,81.216,18.784,95.634,4.366,25.56,0.950,bicubic\nxcit_tiny_12_p8_224.fb_dist_in1k,224,81.214,18.786,95.614,4.386,6.71,1.000,bicubic\ncs3sedarknet_l.c2ns_in1k,256,81.208,18.792,95.798,4.202,21.91,0.887,bicubic\ndeit_small_distilled_patch16_224.fb_in1k,224,81.204,18.796,95.378,4.622,22.44,0.900,bicubic\nresnext50_32x4d.tv2_in1k,224,81.190,18.810,95.344,4.656,25.03,0.965,bilinear\nlambda_resnet50ts.a1h_in1k,256,81.184,18.816,95.094,4.906,21.54,0.950,bicubic\nresnet50d.a2_in1k,288,81.164,18.836,95.100,4.900,25.58,1.000,bicubic\nsebotnet33ts_256.a1h_in1k,256,81.156,18.844,95.160,4.840,13.70,0.940,bicubic\nresmlp_36_224.fb_distilled_in1k,224,81.150,18.850,95.508,4.492,44.69,0.875,bicubic\nmobilevitv2_200.cvnets_in1k,256,81.148,18.852,95.380,4.620,18.45,0.888,bicubic\nresnet50.fb_swsl_ig1b_ft_in1k,224,81.146,18.854,95.980,4.020,25.56,0.875,bilinear\nresnest50d_4s2x40d.in1k,224,81.146,18.854,95.566,4.434,30.42,0.875,bicubic\nresnext50_32x4d.a1h_in1k,224,81.140,18.860,95.320,4.680,25.03,0.950,bicubic\nvit_small_patch16_384.augreg_in1k,384,81.130,18.870,95.590,4.410,22.20,1.000,bicubic\ntwins_pcpvt_small.in1k,224,81.116,18.884,95.624,4.376,24.11,0.900,bicubic\nhrnet_w18_ssld.paddle_in1k,224,81.114,18.886,95.638,4.362,21.30,0.950,bilinear\npit_s_224.in1k,224,81.114,18.886,95.336,4.664,23.46,0.900,bicubic\nseresnet50.a1_in1k,288,81.114,18.886,95.122,4.878,28.09,1.000,bicubic\nvit_srelpos_small_patch16_224.sw_in1k,224,81.108,18.892,95.580,4.420,21.97,0.900,bicubic\nvit_base_patch16_384.augreg_in1k,384,81.102,18.898,95.334,4.666,86.86,1.000,bicubic\nseresnet50.a2_in1k,288,81.094,18.906,95.230,4.770,28.09,1.000,bicubic\ntf_efficientnetv2_b3.in21k_ft_in1k,240,81.078,18.922,95.920,4.080,14.36,0.900,bicubic\nfastvit_s12.apple_dist_in1k,256,81.074,18.926,95.278,4.722,9.47,0.900,bicubic\nedgenext_small.usi_in1k,256,81.068,18.932,95.330,4.670,5.59,0.950,bicubic\nmambaout_kobe.in1k,288,81.064,18.936,95.682,4.318,9.14,1.000,bicubic\nconvnextv2_pico.fcmae_ft_in1k,288,81.062,18.938,95.476,4.524,9.07,0.950,bicubic\nhaloregnetz_b.ra3_in1k,224,81.058,18.942,95.196,4.804,11.68,0.940,bicubic\nresnet152s.gluon_in1k,224,81.048,18.952,95.442,4.558,60.32,0.875,bicubic\ncrossvit_small_240.in1k,240,81.036,18.964,95.466,4.534,26.86,0.875,bicubic\nresmlp_big_24_224.fb_in1k,224,81.034,18.966,95.026,4.974,129.14,0.875,bicubic\nresnest50d_1s4x24d.in1k,224,81.024,18.976,95.338,4.662,25.68,0.875,bicubic\nvit_small_patch16_rope_ape_224.naver_in1k,224,81.016,18.984,94.988,5.012,22.06,0.900,bicubic\nresnet50.d_in1k,288,81.012,18.988,95.456,4.544,25.56,1.000,bicubic\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,80.994,19.006,95.474,4.526,14.62,0.950,bilinear\nsehalonet33ts.ra2_in1k,256,80.976,19.024,95.272,4.728,13.69,0.940,bicubic\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,80.974,19.026,94.976,5.024,22.06,0.900,bicubic\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,80.966,19.034,96.020,3.980,28.29,0.900,bicubic\nresnest50d.in1k,224,80.958,19.042,95.372,4.628,27.48,0.875,bilinear\nresnet50d.ra4_e3600_r224_in1k,224,80.956,19.044,95.380,4.620,25.58,0.950,bicubic\ncait_xxs24_384.fb_dist_in1k,384,80.954,19.046,95.638,4.362,12.03,1.000,bicubic\nxcit_tiny_12_p16_384.fb_dist_in1k,384,80.954,19.046,95.430,4.570,6.72,1.000,bicubic\ngcresnet50t.ra2_in1k,256,80.944,19.056,95.458,4.542,25.90,0.900,bicubic\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,80.936,19.064,95.732,4.268,44.18,0.875,bilinear\nresnet50.c1_in1k,288,80.928,19.072,95.554,4.446,25.56,1.000,bicubic\nregnetx_032.tv2_in1k,224,80.916,19.084,95.248,4.752,15.30,0.965,bicubic\nswiftformer_l1.dist_in1k,224,80.906,19.094,95.374,4.626,12.06,0.950,bicubic\nconvnext_nano_ols.d1h_in1k,224,80.906,19.094,95.254,4.746,15.65,0.950,bicubic\ncs3darknet_focus_l.c2ns_in1k,288,80.894,19.106,95.674,4.326,21.15,0.950,bicubic\nmobilenetv4_conv_medium.e500_r256_in1k,320,80.890,19.110,95.750,4.250,9.72,1.000,bicubic\nseresnext101_32x4d.gluon_in1k,224,80.886,19.114,95.268,4.732,48.96,0.875,bicubic\nseresnext101_64x4d.gluon_in1k,224,80.884,19.116,95.308,4.692,88.23,0.875,bicubic\necaresnet50t.a2_in1k,224,80.884,19.116,95.022,4.978,25.57,0.950,bicubic\ncs3darknet_l.c2ns_in1k,288,80.882,19.118,95.662,4.338,21.16,0.950,bicubic\ntf_efficientnet_b3.in1k,300,80.878,19.122,95.308,4.692,12.23,0.904,bicubic\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,80.874,19.126,95.668,4.332,5.39,0.950,bicubic\nmobilevitv2_175.cvnets_in1k,256,80.870,19.130,95.280,4.720,14.25,0.888,bicubic\nefficientnet_b3_pruned.in1k,300,80.864,19.136,95.266,4.734,9.86,0.904,bicubic\nresnet50.c2_in1k,288,80.862,19.138,95.530,4.470,25.56,1.000,bicubic\nresnet50.tv2_in1k,224,80.846,19.154,95.430,4.570,25.56,0.965,bilinear\nfastvit_sa12.apple_in1k,256,80.846,19.154,95.346,4.654,11.58,0.900,bicubic\nresnetv2_50d_evos.ah_in1k,224,80.828,19.172,95.270,4.730,25.59,0.950,bicubic\nrepvit_m1_1.dist_300e_in1k,224,80.826,19.174,95.174,4.826,8.80,0.950,bicubic\ntresnet_m.miil_in1k,224,80.816,19.184,94.862,5.138,31.39,0.875,bilinear\nseresnet33ts.ra2_in1k,288,80.814,19.186,95.346,4.654,19.78,1.000,bicubic\necaresnet101d_pruned.miil_in1k,224,80.810,19.190,95.642,4.358,24.88,0.875,bicubic\necaresnet50d_pruned.miil_in1k,288,80.808,19.192,95.566,4.434,19.94,0.950,bicubic\nresnetv2_50d_gn.ah_in1k,224,80.800,19.200,95.356,4.644,25.57,0.950,bicubic\nregnety_320.pycls_in1k,224,80.796,19.204,95.246,4.754,145.05,0.875,bicubic\nresnet50.a2_in1k,288,80.774,19.226,94.978,5.022,25.56,1.000,bicubic\nresmlp_24_224.fb_distilled_in1k,224,80.766,19.234,95.220,4.780,30.02,0.875,bicubic\nconvnext_nano.d1h_in1k,224,80.762,19.238,95.332,4.668,15.59,0.950,bicubic\npoolformerv2_s24.sail_in1k,224,80.746,19.254,95.318,4.682,21.34,1.000,bicubic\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,80.742,19.258,95.500,4.500,11.22,0.965,bicubic\nregnetz_b16.ra3_in1k,288,80.740,19.260,95.528,4.472,9.72,1.000,bicubic\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,80.732,19.268,95.574,4.426,88.22,0.900,bicubic\nresnet50d.a1_in1k,224,80.730,19.270,94.678,5.322,25.58,0.950,bicubic\nresnext50_32x4d.ra_in1k,288,80.720,19.280,95.344,4.656,25.03,0.950,bicubic\neca_resnet33ts.ra2_in1k,288,80.716,19.284,95.382,4.618,19.68,1.000,bicubic\nresnet50.b1k_in1k,288,80.708,19.292,95.430,4.570,25.56,1.000,bicubic\ntf_efficientnetv2_b3.in1k,240,80.708,19.292,95.178,4.822,14.36,0.904,bicubic\nresnetrs101.tf_in1k,192,80.706,19.294,95.240,4.760,63.62,0.940,bicubic\ngernet_m.idstcv_in1k,224,80.702,19.298,95.186,4.814,21.14,0.875,bilinear\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,80.684,19.316,95.442,4.558,8.46,0.950,bicubic\nresnext50d_32x4d.bt_in1k,288,80.676,19.324,95.426,4.574,25.05,0.950,bicubic\nregnety_016.tv2_in1k,224,80.668,19.332,95.336,4.664,11.20,0.965,bicubic\nnf_resnet50.ra2_in1k,288,80.658,19.342,95.350,4.650,25.56,0.940,bicubic\nresnet50.a1h_in1k,224,80.654,19.346,95.312,4.688,25.56,1.000,bicubic\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,80.648,19.352,95.520,4.480,5.76,1.000,bicubic\nhgnet_tiny.paddle_in1k,288,80.644,19.356,95.556,4.444,14.74,1.000,bicubic\nresnext101_64x4d.gluon_in1k,224,80.640,19.360,95.000,5.000,83.46,0.875,bicubic\necaresnet50d.miil_in1k,224,80.634,19.366,95.310,4.690,25.58,0.875,bicubic\nresnetaa50.a1h_in1k,224,80.614,19.386,95.212,4.788,25.56,0.950,bicubic\nefficientnet_b2.ra_in1k,288,80.612,19.388,95.320,4.680,9.11,1.000,bicubic\ngcresnext50ts.ch_in1k,256,80.608,19.392,95.180,4.820,15.67,0.900,bicubic\ngcresnet33ts.ra2_in1k,288,80.588,19.412,95.318,4.682,19.88,1.000,bicubic\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,80.550,19.450,95.186,4.814,7.52,0.950,bicubic\ncspresnext50.ra_in1k,256,80.548,19.452,95.330,4.670,20.57,0.887,bilinear\nresnet152.a3_in1k,224,80.548,19.452,95.004,4.996,60.19,0.950,bicubic\nresnet50d.ra2_in1k,224,80.546,19.454,95.160,4.840,25.58,0.875,bicubic\ndarknet53.c2ns_in1k,288,80.542,19.458,95.428,4.572,41.61,1.000,bicubic\ndarknetaa53.c2ns_in1k,288,80.528,19.472,95.328,4.672,36.02,1.000,bilinear\nrepvgg_b3.rvgg_in1k,224,80.528,19.472,95.254,4.746,123.09,0.875,bilinear\nresnext50_32x4d.a1_in1k,224,80.526,19.474,94.458,5.542,25.03,0.950,bicubic\nefficientformer_l1.snap_dist_in1k,224,80.502,19.498,94.992,5.008,12.29,0.950,bicubic\nwide_resnet101_2.tv2_in1k,176,80.494,19.506,94.978,5.022,126.89,0.875,bilinear\nmixnet_xl.ra_in1k,224,80.494,19.506,94.938,5.062,11.90,0.875,bicubic\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,80.476,19.524,95.594,4.406,22.92,1.000,bicubic\nresnet152d.gluon_in1k,224,80.476,19.524,95.194,4.806,60.21,0.875,bicubic\nrepvit_m2.dist_in1k,224,80.468,19.532,95.172,4.828,8.80,0.950,bicubic\nresnet50.b2k_in1k,288,80.466,19.534,95.306,4.694,25.56,1.000,bicubic\nresnet101d.gluon_in1k,224,80.466,19.534,95.034,4.966,44.57,0.875,bicubic\nxcit_tiny_24_p16_224.fb_dist_in1k,224,80.462,19.538,95.202,4.798,12.12,1.000,bicubic\nconvnext_pico_ols.d1_in1k,288,80.458,19.542,95.248,4.752,9.06,1.000,bicubic\necaresnetlight.miil_in1k,224,80.454,19.546,95.248,4.752,30.16,0.875,bicubic\nedgenext_small_rw.sw_in1k,320,80.452,19.548,95.206,4.794,7.83,1.000,bicubic\nresnext50_32x4d.a2_in1k,224,80.444,19.556,94.630,5.370,25.03,0.950,bicubic\nwide_resnet50_2.tv2_in1k,176,80.442,19.558,95.076,4.924,68.88,0.875,bilinear\ninception_resnet_v2.tf_in1k,299,80.434,19.566,95.312,4.688,55.84,0.897,bicubic\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,80.432,19.568,95.378,4.622,11.07,0.950,bicubic\nresnetv2_34d.ra4_e3600_r384_in1k,448,80.430,19.570,95.282,4.718,21.82,1.000,bicubic\nrepvit_m1_0.dist_450e_in1k,224,80.430,19.570,94.922,5.078,7.30,0.950,bicubic\nconvnext_pico.d1_in1k,288,80.426,19.574,95.062,4.938,9.05,0.950,bicubic\nresnetv2_50.a1h_in1k,224,80.410,19.590,95.082,4.918,25.55,0.950,bicubic\nefficientnet_b1.ra4_e3600_r240_in1k,240,80.404,19.596,95.152,4.848,7.79,0.900,bicubic\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,80.386,19.614,95.694,4.306,25.55,1.000,bilinear\nregnety_120.pycls_in1k,224,80.382,19.618,95.104,4.896,51.82,0.875,bicubic\nresnet50.a1_in1k,224,80.382,19.618,94.598,5.402,25.56,0.950,bicubic\nseresnet33ts.ra2_in1k,256,80.380,19.620,95.106,4.894,19.78,0.900,bicubic\nmobilevitv2_150.cvnets_in1k,256,80.380,19.620,95.050,4.950,10.59,0.888,bicubic\nese_vovnet39b.ra_in1k,288,80.378,19.622,95.360,4.640,24.57,0.950,bicubic\nfastvit_t12.apple_dist_in1k,256,80.364,19.636,95.040,4.960,7.55,0.900,bicubic\ncs3darknet_l.c2ns_in1k,256,80.350,19.650,95.304,4.696,21.16,0.887,bicubic\nresnext101_32x4d.gluon_in1k,224,80.336,19.664,94.908,5.092,44.18,0.875,bicubic\nrexnet_150.nav_in1k,224,80.322,19.678,95.164,4.836,9.73,0.875,bicubic\nefficientvit_b1.r288_in1k,288,80.322,19.678,94.980,5.020,9.10,1.000,bicubic\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,80.316,19.684,95.386,4.614,25.03,0.875,bilinear\nconvnextv2_pico.fcmae_ft_in1k,224,80.304,19.696,95.082,4.918,9.07,0.875,bicubic\ntf_efficientnet_b2.ap_in1k,260,80.300,19.700,95.032,4.968,9.11,0.890,bicubic\npoolformer_s24.sail_in1k,224,80.294,19.706,95.060,4.940,21.39,0.900,bicubic\nefficientnet_el_pruned.in1k,300,80.284,19.716,95.218,4.782,10.59,0.904,bicubic\nregnety_160.pycls_in1k,224,80.284,19.716,94.982,5.018,83.59,0.875,bicubic\nres2net50d.in1k,224,80.280,19.720,95.028,4.972,25.72,0.875,bilinear\nresnet101s.gluon_in1k,224,80.278,19.722,95.160,4.840,44.67,0.875,bicubic\nlegacy_seresnext101_32x4d.in1k,224,80.274,19.726,95.032,4.968,48.96,0.875,bilinear\nresnet50d.a2_in1k,224,80.268,19.732,94.642,5.358,25.58,0.950,bicubic\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,80.262,19.738,95.360,4.640,13.42,0.950,bicubic\nseresnet50.ra2_in1k,224,80.260,19.740,95.068,4.932,28.09,0.875,bicubic\nregnetx_320.pycls_in1k,224,80.258,19.742,95.016,4.984,107.81,0.875,bicubic\ncs3darknet_focus_l.c2ns_in1k,256,80.254,19.746,95.294,4.706,21.15,0.887,bicubic\nresnetblur50.bt_in1k,288,80.248,19.752,95.200,4.800,25.56,0.950,bicubic\ntf_efficientnet_el.in1k,300,80.246,19.754,95.122,4.878,10.59,0.904,bicubic\nvit_base_patch16_224.sam_in1k,224,80.246,19.754,94.764,5.236,86.57,0.900,bicubic\nnf_resnet50.ra2_in1k,256,80.236,19.764,95.104,4.896,25.56,0.940,bicubic\ntf_efficientnetv2_b2.in1k,260,80.226,19.774,95.010,4.990,10.10,0.890,bicubic\nrepvgg_b3g4.rvgg_in1k,224,80.224,19.776,95.100,4.900,83.83,0.875,bilinear\nresnet152.tv2_in1k,176,80.222,19.778,94.650,5.350,60.19,0.875,bilinear\nskresnext50_32x4d.ra_in1k,224,80.168,19.832,94.644,5.356,27.48,0.875,bicubic\ndpn107.mx_in1k,224,80.166,19.834,94.932,5.068,86.92,0.875,bicubic\nconvmixer_768_32.in1k,224,80.158,19.842,95.070,4.930,21.11,0.960,bicubic\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,80.152,19.848,95.298,4.702,9.72,1.000,bicubic\nrepvit_m1_0.dist_300e_in1k,224,80.152,19.848,94.770,5.230,7.30,0.950,bicubic\ninception_v4.tf_in1k,299,80.144,19.856,94.982,5.018,42.68,0.875,bicubic\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,80.116,19.884,94.994,5.006,8.46,0.900,bicubic\nseresnet50.a2_in1k,224,80.102,19.898,94.720,5.280,28.09,0.950,bicubic\neca_resnet33ts.ra2_in1k,256,80.084,19.916,94.980,5.020,19.68,0.900,bicubic\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,80.082,19.918,95.138,4.862,15.25,0.950,bicubic\ncspdarknet53.ra_in1k,256,80.078,19.922,95.060,4.940,27.64,0.887,bilinear\nresnet50_gn.a1h_in1k,224,80.074,19.926,94.958,5.042,25.56,0.940,bicubic\ntf_efficientnet_b2.aa_in1k,260,80.074,19.926,94.906,5.094,9.11,0.890,bicubic\ngcresnet33ts.ra2_in1k,256,80.072,19.928,94.986,5.014,19.88,0.900,bicubic\ndpn92.mx_in1k,224,80.032,19.968,94.876,5.124,37.67,0.875,bicubic\nseresnet50.a1_in1k,224,80.012,19.988,94.704,5.296,28.09,0.950,bicubic\ndarknet53.c2ns_in1k,256,80.008,19.992,95.046,4.954,41.61,0.887,bicubic\ninception_resnet_v2.tf_ens_adv_in1k,299,79.998,20.002,94.940,5.060,55.84,0.897,bicubic\nmambaout_kobe.in1k,224,79.986,20.014,94.982,5.018,9.14,1.000,bicubic\nresnet50.ram_in1k,288,79.982,20.018,95.038,4.962,25.56,0.950,bicubic\nresnetrs50.tf_in1k,224,79.934,20.066,94.972,5.028,35.69,0.910,bicubic\nxception71.tf_in1k,299,79.920,20.080,94.908,5.092,42.34,0.903,bicubic\nmobilenetv4_conv_medium.e500_r256_in1k,256,79.916,20.084,95.188,4.812,9.72,0.950,bicubic\nhgnetv2_b1.ssld_stage2_ft_in1k,288,79.916,20.084,95.164,4.836,6.34,1.000,bicubic\nseresnext50_32x4d.gluon_in1k,224,79.916,20.084,94.816,5.184,27.56,0.875,bicubic\nmambaout_femto.in1k,288,79.912,20.088,95.134,4.866,7.30,1.000,bicubic\nresnet101.tv2_in1k,176,79.908,20.092,94.606,5.394,44.55,0.875,bilinear\nresnet152c.gluon_in1k,224,79.904,20.096,94.854,5.146,60.21,0.875,bicubic\nefficientnet_b2_pruned.in1k,260,79.904,20.096,94.838,5.162,8.31,0.890,bicubic\nresnet50.d_in1k,224,79.904,20.096,94.666,5.334,25.56,0.950,bicubic\necaresnet26t.ra2_in1k,320,79.900,20.100,95.070,4.930,16.01,0.950,bicubic\nhgnet_tiny.paddle_in1k,224,79.892,20.108,95.054,4.946,14.74,0.965,bicubic\nfastvit_s12.apple_in1k,256,79.882,20.118,94.798,5.202,9.47,0.900,bicubic\nregnety_080.pycls_in1k,224,79.874,20.126,94.834,5.166,39.18,0.875,bicubic\nresnet50.c2_in1k,224,79.868,20.132,94.862,5.138,25.56,0.950,bicubic\nregnetz_b16.ra3_in1k,224,79.864,20.136,94.982,5.018,9.72,0.940,bicubic\nresnet50.ra_in1k,288,79.864,20.136,94.970,5.030,25.56,0.950,bicubic\nlevit_conv_192.fb_dist_in1k,224,79.860,20.140,94.802,5.198,10.95,0.900,bicubic\nlevit_192.fb_dist_in1k,224,79.858,20.142,94.808,5.192,10.95,0.900,bicubic\ndeit_small_patch16_224.fb_in1k,224,79.856,20.144,95.056,4.944,22.05,0.900,bicubic\nregnetx_160.pycls_in1k,224,79.844,20.156,94.852,5.148,54.28,0.875,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,256,79.836,20.164,95.192,4.808,9.72,1.000,bicubic\ndpn131.mx_in1k,224,79.836,20.164,94.708,5.292,79.25,0.875,bicubic\nresnet101.a3_in1k,224,79.836,20.164,94.608,5.392,44.55,0.950,bicubic\nresnet50.a2_in1k,224,79.828,20.172,94.560,5.440,25.56,0.950,bicubic\ntf_efficientnet_lite3.in1k,300,79.808,20.192,94.908,5.092,8.20,0.904,bilinear\nresnext50_32x4d.ra_in1k,224,79.800,20.200,94.608,5.392,25.03,0.875,bicubic\nresnetv2_34d.ra4_e3600_r384_in1k,384,79.792,20.208,94.902,5.098,21.82,1.000,bicubic\nresmlp_36_224.fb_in1k,224,79.772,20.228,94.888,5.112,44.69,0.875,bicubic\nresnet50.c1_in1k,224,79.766,20.234,94.954,5.046,25.56,0.950,bicubic\ngcvit_xxtiny.in1k,224,79.756,20.244,95.058,4.942,12.00,0.875,bicubic\ndarknetaa53.c2ns_in1k,256,79.754,20.246,94.904,5.096,36.02,0.887,bilinear\ncait_xxs36_224.fb_dist_in1k,224,79.750,20.250,94.866,5.134,17.30,1.000,bicubic\nefficientvit_b1.r256_in1k,256,79.734,20.266,94.786,5.214,9.10,1.000,bicubic\nregnety_064.pycls_in1k,224,79.734,20.266,94.768,5.232,30.58,0.875,bicubic\necaresnet50d_pruned.miil_in1k,224,79.714,20.286,94.862,5.138,19.94,0.875,bicubic\nresnet152.gluon_in1k,224,79.712,20.288,94.720,5.280,60.19,0.875,bicubic\nxcit_tiny_12_p8_224.fb_in1k,224,79.710,20.290,95.058,4.942,6.71,1.000,bicubic\nresnet33ts.ra2_in1k,288,79.710,20.290,94.818,5.182,19.68,1.000,bicubic\nefficientformerv2_s1.snap_dist_in1k,224,79.684,20.316,94.712,5.288,6.19,0.950,bicubic\nfbnetv3_d.ra2_in1k,256,79.682,20.318,94.952,5.048,10.31,0.950,bilinear\nmobilevitv2_125.cvnets_in1k,256,79.682,20.318,94.844,5.156,7.48,0.888,bicubic\nresnext50d_32x4d.bt_in1k,224,79.672,20.328,94.878,5.122,25.05,0.875,bicubic\ndpn98.mx_in1k,224,79.664,20.336,94.644,5.356,61.57,0.875,bicubic\nresnet50.bt_in1k,288,79.652,20.348,94.908,5.092,25.56,0.950,bicubic\ngmlp_s16_224.ra3_in1k,224,79.652,20.348,94.624,5.376,19.42,0.875,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,288,79.614,20.386,94.760,5.240,21.82,1.000,bicubic\ntf_efficientnet_b2.in1k,260,79.612,20.388,94.714,5.286,9.11,0.890,bicubic\nregnetx_120.pycls_in1k,224,79.608,20.392,94.734,5.266,46.11,0.875,bicubic\nedgenext_small_rw.sw_in1k,256,79.596,20.404,94.518,5.482,7.83,0.900,bicubic\ncspresnet50.ra_in1k,256,79.578,20.422,94.698,5.302,21.62,0.887,bilinear\nresnet50.b1k_in1k,224,79.564,20.436,94.622,5.378,25.56,0.950,bicubic\nxception65.tf_in1k,299,79.558,20.442,94.656,5.344,39.92,0.903,bicubic\necaresnet50t.a3_in1k,224,79.548,20.452,94.710,5.290,25.57,0.950,bicubic\nconvnext_pico_ols.d1_in1k,224,79.542,20.458,94.706,5.294,9.06,0.950,bicubic\nresnet101c.gluon_in1k,224,79.542,20.458,94.588,5.412,44.57,0.875,bicubic\ntf_efficientnetv2_b1.in1k,240,79.518,20.482,94.710,5.290,8.14,0.882,bicubic\neca_halonext26ts.c1_in1k,256,79.518,20.482,94.582,5.418,10.76,0.940,bicubic\nconvnext_pico.d1_in1k,224,79.516,20.484,94.554,5.446,9.05,0.875,bicubic\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,79.508,20.492,94.110,5.890,119.42,0.900,bicubic\nrexnet_130.nav_in1k,224,79.492,20.508,94.680,5.320,7.56,0.875,bicubic\nxcit_tiny_24_p16_224.fb_in1k,224,79.474,20.526,94.878,5.122,12.12,1.000,bicubic\nhrnet_w64.ms_in1k,224,79.458,20.542,94.646,5.354,128.06,0.875,bilinear\ndla102x2.in1k,224,79.448,20.552,94.654,5.346,41.28,0.875,bilinear\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,79.446,20.554,94.920,5.080,9.72,0.950,bicubic\nmobileone_s4.apple_in1k,224,79.446,20.554,94.490,5.510,14.95,0.900,bilinear\nregnetx_016.tv2_in1k,224,79.440,20.560,94.768,5.232,9.19,0.965,bicubic\nresnet50.tv2_in1k,176,79.434,20.566,94.640,5.360,25.56,0.875,bilinear\nrepvgg_b2g4.rvgg_in1k,224,79.396,20.604,94.680,5.320,61.76,0.875,bilinear\nresnet32ts.ra2_in1k,288,79.388,20.612,94.676,5.324,17.96,1.000,bicubic\nresnet50.b2k_in1k,224,79.388,20.612,94.580,5.420,25.56,0.950,bicubic\nresnext50_32x4d.tv2_in1k,176,79.384,20.616,94.302,5.698,25.03,0.875,bilinear\nresmlp_24_224.fb_in1k,224,79.380,20.620,94.550,5.450,30.02,0.875,bicubic\nnf_regnet_b1.ra2_in1k,288,79.368,20.632,94.726,5.274,10.22,0.900,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,256,79.364,20.636,94.754,5.246,5.29,1.000,bicubic\nshvit_s4.in1k,256,79.362,20.638,94.370,5.630,16.59,0.875,bicubic\nresnext50_32x4d.gluon_in1k,224,79.354,20.646,94.424,5.576,25.03,0.875,bicubic\nconvnextv2_femto.fcmae_ft_in1k,288,79.342,20.658,94.590,5.410,5.23,0.950,bicubic\ndpn68b.ra_in1k,288,79.342,20.658,94.430,5.570,12.61,1.000,bicubic\nhrnet_w48.ms_in1k,224,79.328,20.672,94.516,5.484,77.47,0.875,bilinear\nefficientnet_b2.ra_in1k,256,79.324,20.676,94.582,5.418,9.11,0.875,bicubic\ntf_efficientnet_cc_b1_8e.in1k,240,79.318,20.682,94.384,5.616,39.72,0.882,bicubic\nese_vovnet39b.ra_in1k,224,79.312,20.688,94.722,5.278,24.57,0.875,bicubic\nresnext101_32x8d.tv_in1k,224,79.308,20.692,94.530,5.470,88.79,0.875,bilinear\nresnetblur50.bt_in1k,224,79.306,20.694,94.632,5.368,25.56,0.875,bicubic\nresnet101.gluon_in1k,224,79.306,20.694,94.538,5.462,44.55,0.875,bicubic\ntf_efficientnet_b1.ap_in1k,240,79.288,20.712,94.316,5.684,7.79,0.882,bicubic\nfastvit_t12.apple_in1k,256,79.276,20.724,94.570,5.430,7.55,0.900,bicubic\nresnet50.a1h_in1k,176,79.264,20.736,94.498,5.502,25.56,0.900,bicubic\neca_botnext26ts_256.c1_in1k,256,79.262,20.738,94.600,5.400,10.59,0.950,bicubic\nresnext50_32x4d.a3_in1k,224,79.258,20.742,94.308,5.692,25.03,0.950,bicubic\nefficientnet_em.ra2_in1k,240,79.256,20.744,94.794,5.206,6.90,0.882,bicubic\nbotnet26t_256.c1_in1k,256,79.256,20.744,94.528,5.472,12.49,0.950,bicubic\nefficientvit_b1.r224_in1k,224,79.254,20.746,94.300,5.700,9.10,0.950,bicubic\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,79.252,20.748,94.826,5.174,25.56,0.875,bilinear\nregnety_040.pycls_in1k,224,79.246,20.754,94.666,5.334,20.65,0.875,bicubic\nresnet33ts.ra2_in1k,256,79.216,20.784,94.576,5.424,19.68,0.900,bicubic\nregnetx_080.pycls_in1k,224,79.214,20.786,94.542,5.458,39.57,0.875,bicubic\ntf_efficientnetv2_b2.in1k,208,79.208,20.792,94.602,5.398,10.10,0.890,bicubic\ntiny_vit_5m_224.in1k,224,79.196,20.804,94.792,5.208,5.39,0.950,bicubic\nres2net101_26w_4s.in1k,224,79.192,20.808,94.466,5.534,45.21,0.875,bilinear\npit_xs_distilled_224.in1k,224,79.172,20.828,94.350,5.650,11.00,0.900,bicubic\nfbnetv3_b.ra2_in1k,256,79.154,20.846,94.732,5.268,8.60,0.950,bilinear\nvit_base_patch16_224.augreg_in1k,224,79.154,20.846,94.108,5.892,86.57,0.900,bicubic\nhalonet26t.a1h_in1k,256,79.134,20.866,94.340,5.660,12.48,0.950,bicubic\nlambda_resnet26t.c1_in1k,256,79.126,20.874,94.554,5.446,10.96,0.940,bicubic\ncoat_lite_mini.in1k,224,79.104,20.896,94.606,5.394,11.01,0.900,bicubic\nmobilenetv4_conv_medium.e500_r224_in1k,224,79.092,20.908,94.788,5.212,9.72,0.950,bicubic\nresnet50d.gluon_in1k,224,79.080,20.920,94.458,5.542,25.58,0.875,bicubic\nlegacy_seresnext50_32x4d.in1k,224,79.080,20.920,94.428,5.572,27.56,0.875,bilinear\nresnetv2_34.ra4_e3600_r224_in1k,288,79.070,20.930,94.560,5.440,21.80,1.000,bicubic\nregnetx_064.pycls_in1k,224,79.062,20.938,94.468,5.532,26.21,0.875,bicubic\nlegacy_xception.tf_in1k,299,79.058,20.942,94.396,5.604,22.86,0.897,bicubic\nrepvit_m0_9.dist_450e_in1k,224,79.058,20.942,94.388,5.612,5.49,0.950,bicubic\nresnet32ts.ra2_in1k,256,79.054,20.946,94.368,5.632,17.96,0.900,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,79.050,20.950,94.890,5.110,6.34,1.000,bicubic\nresnet50.ram_in1k,224,79.034,20.966,94.386,5.614,25.56,0.875,bicubic\nresnet50.am_in1k,224,79.004,20.996,94.392,5.608,25.56,0.875,bicubic\nresnet34.ra4_e3600_r224_in1k,288,78.972,21.028,94.454,5.546,21.80,1.000,bicubic\nmixnet_l.ft_in1k,224,78.970,21.030,94.180,5.820,7.33,0.875,bicubic\nlambda_resnet26rpt_256.c1_in1k,256,78.960,21.040,94.420,5.580,10.99,0.940,bicubic\nconvnext_tiny.fb_in22k_ft_in1k,288,78.950,21.050,94.708,5.292,28.59,1.000,bicubic\nres2net50_26w_8s.in1k,224,78.950,21.050,94.280,5.720,48.40,0.875,bilinear\nhrnet_w40.ms_in1k,224,78.928,21.072,94.480,5.520,57.56,0.875,bilinear\nconvnext_femto_ols.d1_in1k,288,78.916,21.084,94.534,5.466,5.23,0.950,bicubic\nregnety_032.pycls_in1k,224,78.902,21.098,94.426,5.574,19.44,0.875,bicubic\nresnet152.a3_in1k,160,78.896,21.104,94.140,5.860,60.19,0.950,bicubic\necaresnet26t.ra2_in1k,256,78.894,21.106,94.548,5.452,16.01,0.875,bicubic\nhrnet_w44.ms_in1k,224,78.890,21.110,94.376,5.624,67.06,0.875,bilinear\nhgnetv2_b1.ssld_stage2_ft_in1k,224,78.884,21.116,94.484,5.516,6.34,0.965,bicubic\nmambaout_femto.in1k,224,78.880,21.120,94.408,5.592,7.30,1.000,bicubic\nwide_resnet101_2.tv_in1k,224,78.862,21.138,94.312,5.688,126.89,0.875,bilinear\nvit_small_patch16_224.augreg_in1k,224,78.842,21.158,94.280,5.720,22.05,0.900,bicubic\ntf_efficientnet_b1.aa_in1k,240,78.840,21.160,94.202,5.798,7.79,0.882,bicubic\nstarnet_s4.in1k,224,78.824,21.176,94.296,5.704,7.48,0.875,bicubic\nresnet50.ra_in1k,224,78.820,21.180,94.314,5.686,25.56,0.875,bicubic\nseresnext26d_32x4d.bt_in1k,288,78.816,21.184,94.266,5.734,16.81,0.950,bicubic\ninception_v3.gluon_in1k,299,78.806,21.194,94.372,5.628,23.83,0.875,bicubic\nefficientnet_b1.ft_in1k,256,78.806,21.194,94.346,5.654,7.79,1.000,bicubic\nrepghostnet_200.in1k,224,78.802,21.198,94.332,5.668,9.80,0.875,bicubic\nrepvgg_b2.rvgg_in1k,224,78.788,21.212,94.414,5.586,89.02,0.875,bilinear\ntf_mixnet_l.in1k,224,78.774,21.226,94.000,6.000,7.33,0.875,bicubic\nvit_base_patch32_384.augreg_in1k,384,78.750,21.250,94.238,5.762,88.30,1.000,bicubic\nresnet50d.a3_in1k,224,78.742,21.258,94.236,5.764,25.58,0.950,bicubic\nfasternet_t2.in1k,224,78.738,21.262,94.332,5.668,14.98,1.000,bicubic\nseresnext26t_32x4d.bt_in1k,288,78.736,21.264,94.318,5.682,16.81,0.950,bicubic\nconvnext_femto.d1_in1k,288,78.712,21.288,94.436,5.564,5.22,0.950,bicubic\nnf_regnet_b1.ra2_in1k,256,78.710,21.290,94.380,5.620,10.22,0.900,bicubic\nresnet50s.gluon_in1k,224,78.710,21.290,94.248,5.752,25.68,0.875,bicubic\ndla169.in1k,224,78.702,21.298,94.334,5.666,53.39,0.875,bilinear\npvt_v2_b1.in1k,224,78.698,21.302,94.490,5.510,14.01,0.900,bicubic\ntf_efficientnet_b0.ns_jft_in1k,224,78.680,21.320,94.372,5.628,5.29,0.875,bicubic\nregnety_008_tv.tv2_in1k,224,78.672,21.328,94.380,5.620,6.43,0.965,bicubic\nlegacy_seresnet152.in1k,224,78.662,21.338,94.370,5.630,66.82,0.875,bilinear\nfbnetv3_d.ra2_in1k,224,78.658,21.342,94.454,5.546,10.31,0.950,bilinear\nrepvit_m0_9.dist_300e_in1k,224,78.652,21.348,94.114,5.886,5.49,0.950,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,288,78.590,21.410,94.388,5.612,6.00,1.000,bicubic\nres2net50_26w_6s.in1k,224,78.586,21.414,94.120,5.880,37.05,0.875,bilinear\nxcit_tiny_12_p16_224.fb_dist_in1k,224,78.572,21.428,94.210,5.790,6.72,1.000,bicubic\nefficientnet_b0.ra4_e3600_r224_in1k,224,78.568,21.432,94.340,5.660,5.29,0.900,bicubic\nxception41.tf_in1k,299,78.550,21.450,94.282,5.718,26.97,0.903,bicubic\ntf_efficientnet_b1.in1k,240,78.546,21.454,94.096,5.904,7.79,0.882,bicubic\ndpn68b.ra_in1k,224,78.546,21.454,93.982,6.018,12.61,0.950,bicubic\nrepvit_m1.dist_in1k,224,78.542,21.458,94.082,5.918,5.49,0.950,bicubic\ndla102x.in1k,224,78.516,21.484,94.226,5.774,26.31,0.875,bilinear\ndla60_res2next.in1k,224,78.488,21.512,94.144,5.856,17.03,0.875,bilinear\nlevit_conv_128.fb_dist_in1k,224,78.488,21.512,94.000,6.000,9.21,0.900,bicubic\nregnetx_040.pycls_in1k,224,78.486,21.514,94.252,5.748,22.12,0.875,bicubic\nwide_resnet50_2.tv_in1k,224,78.486,21.514,94.084,5.916,68.88,0.875,bilinear\nlevit_128.fb_dist_in1k,224,78.486,21.514,93.998,6.002,9.21,0.900,bicubic\ndla60_res2net.in1k,224,78.480,21.520,94.204,5.796,20.85,0.875,bilinear\nresnest26d.gluon_in1k,224,78.476,21.524,94.300,5.700,17.07,0.875,bilinear\nconvnextv2_femto.fcmae_ft_in1k,224,78.476,21.524,93.984,6.016,5.23,0.875,bicubic\nswiftformer_s.dist_in1k,224,78.462,21.538,93.980,6.020,6.09,0.950,bicubic\nhrnet_w32.ms_in1k,224,78.452,21.548,94.184,5.816,41.23,0.875,bilinear\nresnet50.bt_in1k,224,78.444,21.556,94.274,5.726,25.56,0.875,bicubic\ncoat_tiny.in1k,224,78.442,21.558,94.044,5.956,5.50,0.900,bicubic\nresnet34d.ra2_in1k,288,78.440,21.560,94.348,5.652,21.82,0.950,bicubic\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,78.438,21.562,94.542,5.458,5.79,1.000,bicubic\ngcresnext26ts.ch_in1k,288,78.436,21.564,94.028,5.972,10.48,1.000,bicubic\nselecsls60b.in1k,224,78.410,21.590,94.166,5.834,32.77,0.875,bicubic\ncait_xxs24_224.fb_dist_in1k,224,78.400,21.600,94.326,5.674,11.96,1.000,bicubic\ntf_efficientnetv2_b0.in1k,224,78.386,21.614,94.034,5.966,7.14,0.875,bicubic\nlegacy_seresnet101.in1k,224,78.370,21.630,94.264,5.736,49.33,0.875,bilinear\nrepvgg_b1.rvgg_in1k,224,78.370,21.630,94.106,5.894,57.42,0.875,bilinear\nresnet26t.ra2_in1k,320,78.330,21.670,94.136,5.864,16.01,1.000,bicubic\nresnet152.tv_in1k,224,78.330,21.670,94.046,5.954,60.19,0.875,bilinear\nmobilevit_s.cvnets_in1k,256,78.298,21.702,94.168,5.832,5.58,0.900,bicubic\nseresnext26ts.ch_in1k,288,78.292,21.708,94.076,5.924,10.39,1.000,bicubic\nbat_resnext26ts.ch_in1k,256,78.280,21.720,94.114,5.886,10.73,0.900,bicubic\nresnetv2_34d.ra4_e3600_r224_in1k,224,78.270,21.730,93.950,6.050,21.82,0.900,bicubic\nefficientnet_b1_pruned.in1k,240,78.252,21.748,93.820,6.180,6.33,0.882,bicubic\nres2next50.in1k,224,78.238,21.762,93.904,6.096,24.67,0.875,bilinear\ndla60x.in1k,224,78.234,21.766,94.022,5.978,17.35,0.875,bilinear\nhrnet_w30.ms_in1k,224,78.202,21.798,94.220,5.780,37.71,0.875,bilinear\nhrnet_w18_small_v2.gluon_in1k,224,78.202,21.798,93.906,6.094,15.60,0.875,bicubic\npit_xs_224.in1k,224,78.196,21.804,94.156,5.844,10.62,0.900,bicubic\nfbnetv3_b.ra2_in1k,224,78.184,21.816,94.238,5.762,8.60,0.950,bilinear\nvisformer_tiny.in1k,224,78.160,21.840,94.164,5.836,10.32,0.900,bicubic\nregnetx_032.pycls_in1k,224,78.160,21.840,94.088,5.912,15.30,0.875,bicubic\nres2net50_14w_8s.in1k,224,78.142,21.858,93.852,6.148,25.06,0.875,bilinear\ntf_efficientnet_em.in1k,240,78.128,21.872,94.052,5.948,6.90,0.882,bicubic\nhrnet_w18.ms_aug_in1k,224,78.110,21.890,94.062,5.938,21.30,0.950,bilinear\nefficientnet_es.ra_in1k,224,78.100,21.900,93.928,6.072,5.44,0.875,bicubic\nhardcorenas_f.miil_green_in1k,224,78.100,21.900,93.800,6.200,8.20,0.875,bilinear\nmobilevitv2_100.cvnets_in1k,256,78.088,21.912,94.170,5.830,4.90,0.888,bicubic\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,78.080,21.920,94.172,5.828,6.34,0.965,bicubic\nresnet50.a3_in1k,224,78.052,21.948,93.780,6.220,25.56,0.950,bicubic\ngmixer_24_224.ra3_in1k,224,78.048,21.952,93.654,6.346,24.72,0.875,bicubic\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,78.038,21.962,94.244,5.756,6.00,1.000,bicubic\neca_resnext26ts.ch_in1k,288,78.020,21.980,93.932,6.068,10.30,1.000,bicubic\nresnet50c.gluon_in1k,224,78.016,21.984,93.994,6.006,25.58,0.875,bicubic\ndla102.in1k,224,78.016,21.984,93.952,6.048,33.27,0.875,bilinear\npoolformerv2_s12.sail_in1k,224,78.008,21.992,93.864,6.136,11.89,1.000,bicubic\nmobileone_s3.apple_in1k,224,78.002,21.998,93.864,6.136,10.17,0.900,bilinear\nres2net50_26w_4s.in1k,224,77.994,22.006,93.860,6.140,25.70,0.875,bilinear\nseresnext26t_32x4d.bt_in1k,224,77.980,22.020,93.750,6.250,16.81,0.875,bicubic\nselecsls60.in1k,224,77.974,22.026,93.822,6.178,30.67,0.875,bicubic\ntf_efficientnet_cc_b0_8e.in1k,224,77.956,22.044,93.638,6.362,24.01,0.875,bicubic\nresmlp_12_224.fb_distilled_in1k,224,77.948,22.052,93.570,6.430,15.35,0.875,bicubic\ntf_efficientnetv2_b1.in1k,192,77.934,22.066,93.822,6.178,8.14,0.882,bicubic\nresnet34.a1_in1k,288,77.930,22.070,93.758,6.242,21.80,1.000,bicubic\nresnet101.a3_in1k,160,77.928,22.072,93.686,6.314,44.55,0.950,bicubic\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,77.920,22.080,92.894,7.106,5.48,0.875,bilinear\nregnety_016.pycls_in1k,224,77.880,22.120,93.726,6.274,11.20,0.875,bicubic\nresnet26t.ra2_in1k,256,77.878,22.122,93.838,6.162,16.01,0.940,bicubic\ninception_v3.tf_in1k,299,77.866,22.134,93.644,6.356,23.83,0.875,bicubic\nrexnet_100.nav_in1k,224,77.862,22.138,93.890,6.110,4.80,0.875,bicubic\nseresnext26ts.ch_in1k,256,77.860,22.140,93.790,6.210,10.39,0.900,bicubic\nresnetrs50.tf_in1k,160,77.858,22.142,93.810,6.190,35.69,0.910,bicubic\nconvnext_femto_ols.d1_in1k,224,77.852,22.148,93.826,6.174,5.23,0.875,bicubic\nghostnetv2_160.in1k,224,77.836,22.164,93.948,6.052,12.39,0.875,bicubic\nxcit_nano_12_p8_384.fb_dist_in1k,384,77.810,22.190,94.036,5.964,3.05,1.000,bicubic\ngcresnext26ts.ch_in1k,256,77.810,22.190,93.816,6.184,10.48,0.900,bicubic\nconvnextv2_atto.fcmae_ft_in1k,288,77.794,22.206,93.728,6.272,3.71,0.950,bicubic\nese_vovnet19b_dw.ra_in1k,288,77.788,22.212,93.772,6.228,6.54,0.950,bicubic\nhardcorenas_e.miil_green_in1k,224,77.788,22.212,93.692,6.308,8.07,0.875,bilinear\necaresnet50t.a3_in1k,160,77.786,22.214,93.620,6.380,25.57,0.950,bicubic\nresnext50_32x4d.a3_in1k,160,77.732,22.268,93.314,6.686,25.03,0.950,bicubic\nefficientnet_b0.ra_in1k,224,77.698,22.302,93.538,6.462,5.29,0.875,bicubic\ntinynet_a.in1k,192,77.666,22.334,93.532,6.468,6.19,0.875,bicubic\nlegacy_seresnet50.in1k,224,77.650,22.350,93.732,6.268,28.09,0.875,bilinear\ncs3darknet_m.c2ns_in1k,288,77.636,22.364,94.016,5.984,9.31,0.950,bicubic\nresnext50_32x4d.tv_in1k,224,77.632,22.368,93.674,6.326,25.03,0.875,bilinear\nresnetv2_34.ra4_e3600_r224_in1k,224,77.626,22.374,93.518,6.482,21.80,0.900,bicubic\nmobilenetv1_125.ra4_e3600_r224_in1k,256,77.612,22.388,93.754,6.246,6.27,1.000,bicubic\nrepvgg_b1g4.rvgg_in1k,224,77.608,22.392,93.846,6.154,39.97,0.875,bilinear\ninception_v3.tf_adv_in1k,299,77.598,22.402,93.726,6.274,23.83,0.875,bicubic\nefficientnet_b1.ft_in1k,224,77.592,22.408,93.636,6.364,7.79,0.875,bicubic\nseresnext26d_32x4d.bt_in1k,224,77.592,22.408,93.614,6.386,16.81,0.875,bicubic\nresnet50.gluon_in1k,224,77.580,22.420,93.704,6.296,25.56,0.875,bicubic\ncoat_lite_tiny.in1k,224,77.548,22.452,93.924,6.076,5.72,0.900,bicubic\nres2net50_48w_2s.in1k,224,77.528,22.472,93.562,6.438,25.29,0.875,bilinear\nmobileone_s2.apple_in1k,224,77.504,22.496,93.664,6.336,7.88,0.900,bilinear\nconvnext_femto.d1_in1k,224,77.498,22.502,93.678,6.322,5.22,0.875,bicubic\ndpn68b.mx_in1k,224,77.494,22.506,93.834,6.166,12.61,0.875,bicubic\nresnet34.ra4_e3600_r224_in1k,224,77.492,22.508,93.498,6.502,21.80,0.900,bicubic\nrepghostnet_150.in1k,224,77.474,22.526,93.496,6.504,6.58,0.875,bicubic\ninception_v3.tv_in1k,299,77.472,22.528,93.468,6.532,23.83,0.875,bicubic\ntf_efficientnet_lite2.in1k,260,77.470,22.530,93.756,6.244,6.09,0.890,bicubic\nhardcorenas_d.miil_green_in1k,224,77.450,22.550,93.480,6.520,7.50,0.875,bilinear\neca_resnext26ts.ch_in1k,256,77.446,22.554,93.574,6.426,10.30,0.900,bicubic\nresnet26d.bt_in1k,288,77.422,22.578,93.634,6.366,16.01,0.950,bicubic\nresnet101.tv_in1k,224,77.404,22.596,93.552,6.448,44.55,0.875,bilinear\ndensenet161.tv_in1k,224,77.384,22.616,93.656,6.344,28.68,0.875,bicubic\nstarnet_s3.in1k,224,77.378,22.622,93.616,6.384,5.75,0.875,bicubic\nhgnetv2_b0.ssld_stage2_ft_in1k,224,77.362,22.638,93.808,6.192,6.00,0.965,bicubic\nshvit_s3.in1k,224,77.356,22.644,93.310,6.690,14.25,0.875,bicubic\ntf_efficientnet_cc_b0_4e.in1k,224,77.324,22.676,93.334,6.666,13.31,0.875,bicubic\ndensenetblur121d.ra_in1k,288,77.320,22.680,93.796,6.204,8.00,0.950,bicubic\nmobilenetv2_120d.ra_in1k,224,77.300,22.700,93.516,6.484,5.83,0.875,bicubic\nregnetx_008.tv2_in1k,224,77.294,22.706,93.664,6.336,7.26,0.965,bicubic\ncs3darknet_focus_m.c2ns_in1k,288,77.286,22.714,93.966,6.034,9.30,0.950,bicubic\ndensenet201.tv_in1k,224,77.284,22.716,93.480,6.520,20.01,0.875,bicubic\nmixnet_m.ft_in1k,224,77.268,22.732,93.418,6.582,5.01,0.875,bicubic\npoolformer_s12.sail_in1k,224,77.238,22.762,93.538,6.462,11.92,0.900,bicubic\nresnet50d.a3_in1k,160,77.222,22.778,93.256,6.744,25.58,0.950,bicubic\nconvnext_atto_ols.a2_in1k,288,77.220,22.780,93.700,6.300,3.70,0.950,bicubic\nresnext26ts.ra2_in1k,288,77.186,22.814,93.470,6.530,10.30,1.000,bicubic\nselecsls42b.in1k,224,77.184,22.816,93.390,6.610,32.46,0.875,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,77.182,22.818,93.328,6.672,5.48,1.000,bicubic\nfastvit_t8.apple_dist_in1k,256,77.170,22.830,93.274,6.726,4.03,0.900,bicubic\nresnet34.a2_in1k,288,77.150,22.850,93.288,6.712,21.80,1.000,bicubic\nxcit_tiny_12_p16_224.fb_in1k,224,77.122,22.878,93.728,6.272,6.72,1.000,bicubic\nresnet34d.ra2_in1k,224,77.108,22.892,93.372,6.628,21.82,0.875,bicubic\nlegacy_seresnext26_32x4d.in1k,224,77.100,22.900,93.332,6.668,16.79,0.875,bicubic\ntf_efficientnet_b0.ap_in1k,224,77.100,22.900,93.258,6.742,5.29,0.875,bicubic\nhardcorenas_c.miil_green_in1k,224,77.092,22.908,93.192,6.808,5.52,0.875,bilinear\nefficientvit_m5.r224_in1k,224,77.092,22.908,93.168,6.832,12.47,0.875,bicubic\ndla60.in1k,224,77.036,22.964,93.316,6.684,22.04,0.875,bilinear\nseresnet50.a3_in1k,224,77.036,22.964,93.072,6.928,28.09,0.950,bicubic\nconvnext_atto.d2_in1k,288,77.016,22.984,93.704,6.296,3.70,0.950,bicubic\ncrossvit_9_dagger_240.in1k,240,76.990,23.010,93.602,6.398,8.78,0.875,bicubic\ncs3darknet_m.c2ns_in1k,256,76.974,23.026,93.582,6.418,9.31,0.887,bicubic\nskresnet34.ra_in1k,224,76.956,23.044,93.320,6.680,22.28,0.875,bicubic\ntf_mixnet_m.in1k,224,76.952,23.048,93.164,6.836,5.01,0.875,bicubic\nconvmixer_1024_20_ks9_p14.in1k,224,76.944,23.056,93.364,6.636,24.38,0.960,bicubic\nregnetx_016.pycls_in1k,224,76.936,23.064,93.432,6.568,9.19,0.875,bicubic\nghostnetv3_100.in1k,224,76.936,23.064,93.124,6.876,8.13,0.875,bicubic\nmobilenetv1_125.ra4_e3600_r224_in1k,224,76.920,23.080,93.232,6.768,6.27,0.900,bicubic\ntf_efficientnet_b0.aa_in1k,224,76.882,23.118,93.270,6.730,5.29,0.875,bicubic\ntf_efficientnetv2_b0.in1k,192,76.876,23.124,93.184,6.816,7.14,0.875,bicubic\ngernet_s.idstcv_in1k,224,76.874,23.126,93.142,6.858,8.17,0.875,bilinear\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,76.868,23.132,93.620,6.380,6.00,0.965,bicubic\nese_vovnet19b_dw.ra_in1k,224,76.828,23.172,93.276,6.724,6.54,0.875,bicubic\nhrnet_w18.ms_in1k,224,76.770,23.230,93.448,6.552,21.30,0.875,bilinear\nresnext26ts.ra2_in1k,256,76.768,23.232,93.138,6.862,10.30,0.900,bicubic\ncs3darknet_focus_m.c2ns_in1k,256,76.754,23.246,93.550,6.450,9.30,0.887,bicubic\nghostnetv2_130.in1k,224,76.746,23.254,93.364,6.636,8.96,0.875,bicubic\nresnet26d.bt_in1k,224,76.698,23.302,93.156,6.844,16.01,0.875,bicubic\ntf_efficientnet_lite1.in1k,240,76.674,23.326,93.232,6.768,5.42,0.882,bicubic\nresmlp_12_224.fb_in1k,224,76.660,23.340,93.170,6.830,15.35,0.875,bicubic\nconvnextv2_atto.fcmae_ft_in1k,224,76.644,23.356,93.042,6.958,3.71,0.875,bicubic\nmixer_b16_224.goog_in21k_ft_in1k,224,76.616,23.384,92.250,7.750,59.88,0.875,bicubic\ntf_efficientnet_es.in1k,224,76.608,23.392,93.176,6.824,5.44,0.875,bicubic\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,76.598,23.402,93.272,6.728,5.28,0.950,bicubic\ndensenetblur121d.ra_in1k,224,76.574,23.426,93.188,6.812,8.00,0.875,bicubic\nhardcorenas_b.miil_green_in1k,224,76.550,23.450,92.766,7.234,5.18,0.875,bilinear\ntf_efficientnet_b0.in1k,224,76.542,23.458,93.008,6.992,5.29,0.875,bicubic\nlevit_conv_128s.fb_dist_in1k,224,76.534,23.466,92.882,7.118,7.78,0.900,bicubic\nmobilenetv2_140.ra_in1k,224,76.528,23.472,93.008,6.992,6.11,0.875,bicubic\nlevit_128s.fb_dist_in1k,224,76.520,23.480,92.870,7.130,7.78,0.900,bicubic\nresnet34.bt_in1k,288,76.506,23.494,93.342,6.658,21.80,0.950,bicubic\nrepvgg_a2.rvgg_in1k,224,76.488,23.512,93.016,6.984,28.21,0.875,bilinear\ndensenet121.ra_in1k,288,76.486,23.514,93.378,6.622,7.98,0.950,bicubic\nresnet34.a1_in1k,224,76.420,23.580,92.896,7.104,21.80,0.950,bicubic\nresnet26.bt_in1k,288,76.384,23.616,93.178,6.822,16.00,0.950,bicubic\nrepghostnet_130.in1k,224,76.378,23.622,92.898,7.102,5.48,0.875,bicubic\nregnety_008.pycls_in1k,224,76.334,23.666,93.060,6.940,6.26,0.875,bicubic\nxcit_nano_12_p8_224.fb_dist_in1k,224,76.330,23.670,93.070,6.930,3.05,1.000,bicubic\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,76.310,23.690,92.842,7.158,5.48,0.950,bicubic\ndpn68.mx_in1k,224,76.304,23.696,92.994,7.006,12.61,0.875,bicubic\nfastvit_t8.apple_in1k,256,76.184,23.816,93.042,6.958,4.03,0.900,bicubic\nresnet50.tv_in1k,224,76.156,23.844,92.866,7.134,25.56,0.875,bilinear\nefficientformerv2_s0.snap_dist_in1k,224,76.102,23.898,92.850,7.150,3.60,0.950,bicubic\nmobilenetv1_100.ra4_e3600_r224_in1k,256,76.088,23.912,93.006,6.994,4.23,0.950,bicubic\nresnetv2_18d.ra4_e3600_r224_in1k,288,76.058,23.942,93.010,6.990,11.71,1.000,bicubic\nresnet18d.ra4_e3600_r224_in1k,288,76.012,23.988,92.784,7.216,11.71,1.000,bicubic\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,76.010,23.990,93.266,6.734,22.88,0.900,bicubic\nmixnet_s.ft_in1k,224,76.000,24.000,92.796,7.204,4.13,0.875,bicubic\nresnet50.a3_in1k,160,75.994,24.006,92.494,7.506,25.56,0.950,bicubic\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,75.962,24.038,93.272,6.728,6.36,1.000,bicubic\nhardcorenas_a.miil_green_in1k,224,75.926,24.074,92.486,7.514,5.26,0.875,bilinear\ndensenet169.tv_in1k,224,75.922,24.078,92.976,7.024,14.15,0.875,bicubic\nfasternet_t1.in1k,224,75.922,24.078,92.808,7.192,7.60,1.000,bicubic\nconvnext_atto_ols.a2_in1k,224,75.902,24.098,92.842,7.158,3.70,0.875,bicubic\nmobilenetv3_large_100.ra_in1k,224,75.784,24.216,92.528,7.472,5.48,0.875,bicubic\nmobileone_s1.apple_in1k,224,75.760,24.240,92.788,7.212,4.83,0.900,bilinear\nedgenext_x_small.in1k,288,75.706,24.294,92.760,7.240,2.34,1.000,bicubic\nconvnext_atto.d2_in1k,224,75.672,24.328,92.914,7.086,3.70,0.875,bicubic\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,75.664,24.336,92.522,7.478,5.28,0.875,bicubic\ntf_mixnet_s.in1k,224,75.656,24.344,92.632,7.368,4.13,0.875,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,320,75.616,24.384,92.768,7.232,3.77,1.000,bicubic\nmobilenetv3_rw.rmsp_in1k,224,75.616,24.384,92.696,7.304,5.48,0.875,bicubic\nmobilevitv2_075.cvnets_in1k,256,75.612,24.388,92.754,7.246,2.87,0.888,bicubic\nswiftformer_xs.dist_in1k,224,75.600,24.400,92.314,7.686,3.48,0.950,bicubic\nregnety_004.tv2_in1k,224,75.594,24.406,92.706,7.294,4.34,0.965,bicubic\ndensenet121.ra_in1k,224,75.574,24.426,92.648,7.352,7.98,0.875,bicubic\nresnet34.a2_in1k,224,75.520,24.480,92.454,7.546,21.80,0.950,bicubic\nresnest14d.gluon_in1k,224,75.516,24.484,92.512,7.488,10.61,0.875,bilinear\ntf_mobilenetv3_large_100.in1k,224,75.510,24.490,92.600,7.400,5.48,0.875,bilinear\nefficientnet_lite0.ra_in1k,224,75.488,24.512,92.508,7.492,4.65,0.875,bicubic\nxcit_nano_12_p16_384.fb_dist_in1k,384,75.460,24.540,92.672,7.328,3.05,1.000,bicubic\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,75.452,24.548,92.858,7.142,5.72,0.900,bicubic\nsemnasnet_100.rmsp_in1k,224,75.452,24.548,92.608,7.392,3.89,0.875,bicubic\nmobilenetv1_100.ra4_e3600_r224_in1k,224,75.388,24.612,92.310,7.690,4.23,0.875,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,288,75.364,24.636,92.692,7.308,11.69,1.000,bicubic\ninception_next_atto.sail_in1k,224,75.348,24.652,92.558,7.442,4.16,0.875,bicubic\nresnet26.bt_in1k,224,75.298,24.702,92.578,7.422,16.00,0.875,bicubic\nregnety_006.pycls_in1k,224,75.274,24.726,92.532,7.468,6.06,0.875,bicubic\nghostnetv2_100.in1k,224,75.186,24.814,92.334,7.666,6.16,0.875,bicubic\nshvit_s2.in1k,224,75.184,24.816,92.316,7.684,11.48,0.875,bicubic\nresnet34.bt_in1k,224,75.176,24.824,92.166,7.834,21.80,0.875,bicubic\nrepvgg_b0.rvgg_in1k,224,75.158,24.842,92.410,7.590,15.82,0.875,bilinear\nfbnetc_100.rmsp_in1k,224,75.120,24.880,92.384,7.616,5.57,0.875,bilinear\nseresnet50.a3_in1k,160,75.104,24.896,92.096,7.904,28.09,0.950,bicubic\nhrnet_w18_small_v2.ms_in1k,224,75.094,24.906,92.414,7.586,15.60,0.875,bilinear\nrepghostnet_111.in1k,224,75.060,24.940,92.196,7.804,4.54,0.875,bicubic\nmobilenetv2_110d.ra_in1k,224,75.056,24.944,92.186,7.814,4.52,0.875,bicubic\nregnetx_008.pycls_in1k,224,75.054,24.946,92.338,7.662,7.26,0.875,bicubic\nefficientnet_es_pruned.in1k,224,75.010,24.990,92.438,7.562,5.44,0.875,bicubic\ntinynet_b.in1k,188,74.946,25.054,92.194,7.806,3.73,0.875,bicubic\nvit_base_patch32_224.augreg_in1k,224,74.908,25.092,91.770,8.230,88.22,0.900,bicubic\nedgenext_x_small.in1k,256,74.874,25.126,92.304,7.696,2.34,0.900,bicubic\ntf_efficientnet_lite0.in1k,224,74.846,25.154,92.180,7.820,4.65,0.875,bicubic\nlegacy_seresnet34.in1k,224,74.788,25.212,92.134,7.866,21.96,0.875,bilinear\ndensenet121.tv_in1k,224,74.750,25.250,92.158,7.842,7.98,0.875,bicubic\nmnasnet_100.rmsp_in1k,224,74.672,25.328,92.108,7.892,4.38,0.875,bicubic\nstarnet_s2.in1k,224,74.666,25.334,92.144,7.856,3.68,0.875,bicubic\nmobilenetv4_conv_small.e2400_r224_in1k,256,74.644,25.356,92.022,7.978,3.77,0.950,bicubic\ndla34.in1k,224,74.632,25.368,92.052,7.948,15.74,0.875,bilinear\nmobilevit_xs.cvnets_in1k,256,74.618,25.382,92.350,7.650,2.32,0.900,bicubic\nregnetx_004_tv.tv2_in1k,224,74.596,25.404,92.140,7.860,5.50,0.965,bicubic\nresnet34.gluon_in1k,224,74.574,25.426,91.988,8.012,21.80,0.875,bicubic\ndeit_tiny_distilled_patch16_224.fb_in1k,224,74.538,25.462,91.898,8.102,5.91,0.900,bicubic\nmobilenetv4_conv_small.e3600_r256_in1k,256,74.534,25.466,91.940,8.060,3.77,0.950,bicubic\nrepvgg_a1.rvgg_in1k,224,74.474,25.526,91.852,8.148,14.09,0.875,bilinear\nresnetv2_18d.ra4_e3600_r224_in1k,224,74.422,25.578,91.928,8.072,11.71,0.900,bicubic\nresnet18d.ra4_e3600_r224_in1k,224,74.356,25.644,91.826,8.174,11.71,0.900,bicubic\nefficientvit_m4.r224_in1k,224,74.344,25.656,91.960,8.040,8.80,0.875,bicubic\nmobilenetv4_conv_small.e1200_r224_in1k,256,74.282,25.718,92.122,7.878,3.77,0.950,bicubic\npit_ti_distilled_224.in1k,224,74.276,25.724,91.920,8.080,5.10,0.900,bicubic\nvgg19_bn.tv_in1k,224,74.230,25.770,91.848,8.152,143.68,0.875,bilinear\nrepghostnet_100.in1k,224,74.208,25.792,91.550,8.450,4.07,0.875,bicubic\nspnasnet_100.rmsp_in1k,224,74.086,25.914,91.826,8.174,4.42,0.875,bilinear\nregnety_004.pycls_in1k,224,74.004,25.996,91.756,8.244,4.34,0.875,bicubic\ncrossvit_9_240.in1k,240,73.994,26.006,91.976,8.024,8.55,0.875,bicubic\nghostnet_100.in1k,224,73.958,26.042,91.542,8.458,5.18,0.875,bicubic\nhrnet_w18_small.gluon_in1k,224,73.934,26.066,91.178,8.822,13.19,0.875,bicubic\nregnetx_006.pycls_in1k,224,73.904,26.096,91.640,8.360,6.20,0.875,bicubic\nxcit_nano_12_p8_224.fb_in1k,224,73.900,26.100,92.154,7.846,3.05,1.000,bicubic\nresnet18d.ra2_in1k,288,73.790,26.210,91.834,8.166,11.71,0.950,bicubic\nmobilenetv4_conv_small.e2400_r224_in1k,224,73.756,26.244,91.430,8.570,3.77,0.875,bicubic\nvit_base_patch32_224.sam_in1k,224,73.704,26.296,91.012,8.988,88.22,0.900,bicubic\nresnetv2_18.ra4_e3600_r224_in1k,224,73.572,26.428,91.356,8.644,11.69,0.900,bicubic\nstarnet_s1.in1k,224,73.532,26.468,91.504,8.496,2.87,0.875,bicubic\nmobilenetv4_conv_small.e1200_r224_in1k,224,73.450,26.550,91.340,8.660,3.77,0.875,bicubic\ntf_mobilenetv3_large_075.in1k,224,73.442,26.558,91.332,8.668,3.99,0.875,bilinear\nefficientvit_m3.r224_in1k,224,73.394,26.606,91.336,8.664,6.90,0.875,bicubic\nvgg16_bn.tv_in1k,224,73.354,26.646,91.490,8.510,138.37,0.875,bilinear\ncrossvit_tiny_240.in1k,240,73.352,26.648,91.906,8.094,7.01,0.875,bicubic\nresnet34.tv_in1k,224,73.298,26.702,91.422,8.578,21.80,0.875,bilinear\nresnet18.fb_swsl_ig1b_ft_in1k,224,73.286,26.714,91.754,8.246,11.69,0.875,bilinear\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,73.234,26.766,91.270,8.730,2.16,0.900,bicubic\nresnet18.a1_in1k,288,73.164,26.836,91.054,8.946,11.69,1.000,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,73.158,26.842,91.458,8.542,3.27,1.000,bicubic\nconvit_tiny.fb_in1k,224,73.140,26.860,91.704,8.296,5.71,0.875,bicubic\nskresnet18.ra_in1k,224,73.020,26.980,91.172,8.828,11.96,0.875,bicubic\nsemnasnet_075.rmsp_in1k,224,73.000,27.000,91.124,8.876,2.91,0.875,bicubic\nresnet34.a3_in1k,224,72.996,27.004,91.116,8.884,21.80,0.950,bicubic\npit_ti_224.in1k,224,72.926,27.074,91.412,8.588,4.85,0.900,bicubic\nmobilenetv2_100.ra_in1k,224,72.910,27.090,91.000,9.000,3.50,0.875,bicubic\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,72.820,27.180,91.106,8.894,2.16,0.875,bicubic\nshvit_s1.in1k,224,72.780,27.220,91.024,8.976,6.33,0.875,bicubic\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,72.636,27.364,91.416,8.584,11.69,0.875,bilinear\nregnetx_004.pycls_in1k,224,72.482,27.518,90.840,9.160,5.16,0.875,bicubic\nrepvgg_a0.rvgg_in1k,224,72.426,27.574,90.506,9.494,9.11,0.875,bilinear\nvgg19.tv_in1k,224,72.400,27.600,90.878,9.122,143.67,0.875,bilinear\nresnet18.a2_in1k,288,72.364,27.636,90.620,9.380,11.69,1.000,bicubic\nhrnet_w18_small.ms_in1k,224,72.340,27.660,90.688,9.312,13.19,0.875,bilinear\nxcit_nano_12_p16_224.fb_dist_in1k,224,72.318,27.682,90.826,9.174,3.05,1.000,bicubic\nresnet18d.ra2_in1k,224,72.296,27.704,90.682,9.318,11.71,0.875,bicubic\ntf_mobilenetv3_large_minimal_100.in1k,224,72.270,27.730,90.664,9.336,3.92,0.875,bilinear\nresnet14t.c3_in1k,224,72.242,27.758,90.318,9.682,10.08,0.950,bicubic\nrepghostnet_080.in1k,224,72.240,27.760,90.492,9.508,3.28,0.875,bicubic\ndeit_tiny_patch16_224.fb_in1k,224,72.190,27.810,91.100,8.900,5.72,0.900,bicubic\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,72.178,27.822,90.830,9.170,3.27,0.887,bicubic\nlcnet_100.ra2_in1k,224,72.130,27.870,90.376,9.624,2.95,0.875,bicubic\nmixer_l16_224.goog_in21k_ft_in1k,224,72.084,27.916,87.632,12.368,208.20,0.875,bicubic\nedgenext_xx_small.in1k,288,71.886,28.114,90.534,9.466,1.33,1.000,bicubic\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,71.800,28.200,90.810,9.190,6.34,0.900,bicubic\nlegacy_seresnet18.in1k,224,71.742,28.258,90.340,9.660,11.78,0.875,bicubic\nfasternet_t0.in1k,224,71.732,28.268,90.074,9.926,3.91,1.000,bicubic\nvgg16.tv_in1k,224,71.594,28.406,90.398,9.602,138.36,0.875,bilinear\nvgg13_bn.tv_in1k,224,71.560,28.440,90.372,9.628,133.05,0.875,bilinear\nresnet18.a1_in1k,224,71.504,28.496,90.086,9.914,11.69,0.950,bicubic\nefficientvit_b0.r224_in1k,224,71.408,28.592,89.434,10.566,3.41,0.950,bicubic\nmobileone_s0.apple_in1k,224,71.398,28.602,89.864,10.136,5.29,0.875,bilinear\nresnet14t.c3_in1k,176,71.318,28.682,89.680,10.320,10.08,0.875,bicubic\ntinynet_c.in1k,184,71.214,28.786,89.746,10.254,2.46,0.875,bicubic\nedgenext_xx_small.in1k,256,71.132,28.868,90.042,9.958,1.33,0.900,bicubic\nresnet18.gluon_in1k,224,70.844,29.156,89.750,10.250,11.69,0.875,bicubic\nefficientvit_m2.r224_in1k,224,70.796,29.204,90.152,9.848,4.19,0.875,bicubic\npvt_v2_b0.in1k,224,70.654,29.346,90.206,9.794,3.67,0.900,bicubic\nresnet18.a2_in1k,224,70.634,29.366,89.486,10.514,11.69,0.950,bicubic\nresnet34.a3_in1k,160,70.580,29.420,89.540,10.460,21.80,0.950,bicubic\nvgg11_bn.tv_in1k,224,70.374,29.626,89.802,10.198,132.87,0.875,bilinear\nregnety_002.pycls_in1k,224,70.288,29.712,89.544,10.456,3.16,0.875,bicubic\nmobilevitv2_050.cvnets_in1k,256,70.156,29.844,89.930,10.070,1.37,0.888,bicubic\nxcit_nano_12_p16_224.fb_in1k,224,69.982,30.018,89.780,10.220,3.05,1.000,bicubic\nvgg13.tv_in1k,224,69.950,30.050,89.262,10.738,133.05,0.875,bilinear\nresnet18.tv_in1k,224,69.752,30.248,89.082,10.918,11.69,0.875,bilinear\nvgg11.tv_in1k,224,69.050,30.950,88.630,11.370,132.86,0.875,bilinear\nrepghostnet_058.in1k,224,68.938,31.062,88.386,11.614,2.55,0.875,bicubic\nmobilevit_xxs.cvnets_in1k,256,68.936,31.064,88.942,11.058,1.27,0.900,bicubic\nlcnet_075.ra2_in1k,224,68.786,31.214,88.384,11.616,2.36,0.875,bicubic\nregnetx_002.pycls_in1k,224,68.758,31.242,88.568,11.432,2.68,0.875,bicubic\nresnet10t.c3_in1k,224,68.354,31.646,88.034,11.966,5.44,0.950,bicubic\nefficientvit_m1.r224_in1k,224,68.340,31.660,88.692,11.308,2.98,0.875,bicubic\nresnet18.a3_in1k,224,68.250,31.750,88.172,11.828,11.69,0.950,bicubic\ndla60x_c.in1k,224,67.928,32.072,88.426,11.574,1.32,0.875,bilinear\ntf_mobilenetv3_small_100.in1k,224,67.922,32.078,87.686,12.314,2.54,0.875,bilinear\nmobilenetv3_small_100.lamb_in1k,224,67.636,32.364,87.638,12.362,2.54,0.875,bicubic\nrepghostnet_050.in1k,224,66.978,33.022,86.936,13.064,2.31,0.875,bicubic\ntinynet_d.in1k,152,66.936,33.064,87.078,12.922,2.34,0.875,bicubic\nresnet10t.c3_in1k,176,66.724,33.276,86.976,13.024,5.44,0.875,bicubic\nmnasnet_small.lamb_in1k,224,66.204,33.796,86.470,13.530,2.03,0.875,bicubic\ndla46x_c.in1k,224,66.008,33.992,86.946,13.054,1.07,0.875,bilinear\nmobilenetv2_050.lamb_in1k,224,65.928,34.072,86.114,13.886,1.97,0.875,bicubic\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,65.826,34.174,86.416,13.584,2.24,0.950,bicubic\ntf_mobilenetv3_small_075.in1k,224,65.706,34.294,86.130,13.870,2.04,0.875,bilinear\nresnet18.a3_in1k,160,65.670,34.330,86.268,13.732,11.69,0.950,bicubic\nmobilenetv3_small_075.lamb_in1k,224,65.284,34.716,85.462,14.538,2.04,0.875,bicubic\ndla46_c.in1k,224,64.880,35.120,86.330,13.670,1.30,0.875,bilinear\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,64.778,35.222,85.500,14.500,2.24,0.875,bicubic\nefficientvit_m0.r224_in1k,224,63.304,36.696,85.140,14.860,2.35,0.875,bicubic\nlcnet_050.ra2_in1k,224,63.138,36.862,84.402,15.598,1.88,0.875,bicubic\ntf_mobilenetv3_small_minimal_100.in1k,224,62.892,37.108,84.270,15.730,2.04,0.875,bilinear\ntinynet_e.in1k,106,59.874,40.126,81.774,18.226,2.04,0.875,bicubic\nmobilenetv3_small_050.lamb_in1k,224,57.918,42.082,80.144,19.856,1.59,0.875,bicubic\ntest_vit3.r160_in1k,160,56.908,43.092,80.748,19.252,0.93,0.950,bicubic\ntest_convnext2.r160_in1k,160,53.520,46.480,78.558,21.442,0.48,0.950,bicubic\ntest_convnext3.r160_in1k,160,53.302,46.698,78.326,21.674,0.47,0.950,bicubic\ntest_convnext.r160_in1k,160,47.774,52.226,74.166,25.834,0.27,0.950,bicubic\ntest_nfnet.r160_in1k,160,47.634,52.366,72.880,27.120,0.38,0.950,bicubic\ntest_efficientnet.r160_in1k,160,46.506,53.494,71.036,28.964,0.36,0.950,bicubic\ntest_efficientnet_evos.r160_in1k,160,46.504,53.496,71.022,28.978,0.36,0.950,bicubic\ntest_byobnet.r160_in1k,160,45.858,54.142,71.004,28.996,0.46,0.950,bicubic\ntest_efficientnet_ln.r160_in1k,160,43.958,56.042,69.326,30.674,0.36,0.950,bicubic\ntest_efficientnet_gn.r160_in1k,160,43.888,56.112,69.156,30.844,0.36,0.950,bicubic\ntest_vit2.r160_in1k,160,42.250,57.750,68.982,31.018,0.46,0.950,bicubic\ntest_resnet.r160_in1k,160,41.602,58.398,67.984,32.016,0.47,0.950,bilinear\ntest_vit.r160_in1k,160,40.982,59.018,67.382,32.618,0.37,0.950,bicubic\n"
  },
  {
    "path": "results/results-imagenetv2-matched-frequency.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation,top1_diff,top5_diff,rank_diff\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,82.770,17.230,96.450,3.550,305.08,1.000,bicubic,-7.186,-2.564,+1\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,82.710,17.290,96.550,3.450,305.08,1.000,bicubic,-7.346,-2.504,-1\neva02_large_patch14_448.mim_m38m_ft_in1k,448,82.400,17.600,96.200,3.800,305.08,1.000,bicubic,-7.150,-2.724,+3\neva02_large_patch14_448.mim_in22k_ft_in1k,448,82.220,17.780,96.320,3.680,305.08,1.000,bicubic,-7.414,-2.634,0\neva_giant_patch14_336.clip_ft_in1k,336,82.180,17.820,96.250,3.750,\"1,013.01\",1.000,bicubic,-7.282,-2.578,+2\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,81.870,18.130,96.230,3.770,429.38,1.000,bicubic,-7.540,-2.624,+2\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,81.800,18.200,96.340,3.660,\"1,014.45\",1.000,bicubic,-7.990,-2.650,-4\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,81.690,18.310,96.180,3.820,\"1,013.01\",1.000,bicubic,-7.876,-2.772,-3\neva_giant_patch14_224.clip_ft_in1k,224,81.680,18.320,96.090,3.910,\"1,012.56\",0.900,bicubic,-7.216,-2.582,+2\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,81.620,18.380,96.270,3.730,414.14,1.000,bicubic,-7.436,-2.482,0\neva_large_patch14_336.in22k_ft_in1k,336,81.070,18.930,95.810,4.190,304.53,1.000,bicubic,-7.610,-2.904,+2\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,80.820,19.180,95.860,4.140,305.67,1.000,bicubic,-7.756,-2.796,+7\neva_large_patch14_336.in22k_ft_in22k_in1k,336,80.780,19.220,95.990,4.010,304.53,1.000,bicubic,-8.458,-2.858,-4\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,80.610,19.390,95.900,4.100,846.47,1.000,bicubic,-8.012,-2.818,+3\ntf_efficientnet_l2.ns_jft_in1k_475,475,80.520,19.480,95.630,4.370,480.31,0.936,bicubic,-7.726,-2.924,+14\nregnety_1280.swag_ft_in1k,384,80.500,19.500,96.110,3.890,644.81,1.000,bicubic,-7.728,-2.574,+15\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,80.480,19.520,95.520,4.480,304.53,1.000,bicubic,-7.780,-3.004,+11\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,80.410,19.590,95.210,4.790,304.43,0.950,bicubic,-7.996,-3.392,+3\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,80.270,19.730,95.600,4.400,305.00,1.000,bicubic,-8.110,-3.004,+3\ncaformer_b36.sail_in22k_ft_in1k_384,384,80.190,19.810,95.470,4.530,98.75,1.000,bicubic,-7.868,-3.120,+16\neva_large_patch14_196.in22k_ft_in22k_in1k,196,80.190,19.810,95.420,4.580,304.14,1.000,bicubic,-8.400,-3.242,-2\neva_large_patch14_196.in22k_ft_in1k,196,80.160,19.840,95.380,4.620,304.14,1.000,bicubic,-7.792,-3.110,+18\ntf_efficientnet_l2.ns_jft_in1k,800,80.130,19.870,95.850,4.150,480.31,0.960,bicubic,-8.230,-2.806,0\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,80.050,19.950,95.520,4.480,87.12,1.000,bicubic,-8.628,-3.206,-10\nmaxvit_large_tf_512.in21k_ft_in1k,512,80.040,19.960,95.160,4.840,212.33,1.000,bicubic,-8.196,-3.448,+5\nmaxvit_base_tf_512.in21k_ft_in1k,512,79.960,20.040,95.390,4.610,119.88,1.000,bicubic,-8.254,-3.148,+6\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,79.820,20.180,95.410,4.590,475.77,1.000,bicubic,-8.720,-3.240,-7\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,79.730,20.270,95.580,4.420,660.29,1.000,bicubic,-8.936,-3.154,-13\nvit_large_patch14_clip_224.openai_ft_in1k,224,79.670,20.330,95.000,5.000,304.20,1.000,bicubic,-8.188,-3.430,+17\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,79.620,20.380,95.060,4.940,475.32,1.000,bicubic,-8.682,-3.478,-5\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,79.610,20.390,95.260,4.740,200.13,1.000,bicubic,-8.724,-3.314,-7\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,79.580,20.420,95.250,4.750,200.13,1.000,bicubic,-8.310,-3.198,+12\nmaxvit_large_tf_384.in21k_ft_in1k,384,79.550,20.450,95.050,4.950,212.03,1.000,bicubic,-8.444,-3.526,+5\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,79.520,20.480,94.980,5.020,632.46,1.000,bicubic,-9.114,-3.688,-18\neva02_base_patch14_448.mim_in22k_ft_in1k,448,79.500,20.500,95.180,4.820,87.12,1.000,bicubic,-8.762,-3.390,-9\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,79.500,20.500,95.010,4.990,304.57,1.000,bicubic,-8.034,-3.354,+19\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,79.500,20.500,94.990,5.010,200.13,1.000,bicubic,-8.470,-3.484,+2\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,79.500,20.500,94.910,5.090,632.05,1.000,bicubic,-8.112,-3.316,+14\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,79.420,20.580,95.690,4.310,660.29,1.000,bicubic,-9.440,-3.050,-27\nbeit3_large_patch16_224.in22k_ft_in1k,224,79.400,20.600,95.010,4.990,304.57,1.000,bicubic,-8.228,-3.324,+11\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,79.360,20.640,94.930,5.070,304.43,0.900,bicubic,-8.126,-3.388,+16\nmaxvit_base_tf_384.in21k_ft_in1k,384,79.330,20.670,95.070,4.930,119.65,1.000,bicubic,-8.600,-3.476,0\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,79.270,20.730,95.090,4.910,632.05,1.000,bicubic,-9.018,-3.460,-17\nconvformer_b36.sail_in22k_ft_in1k_384,384,79.260,20.740,94.870,5.130,99.88,1.000,bicubic,-8.346,-3.554,+9\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,79.250,20.750,95.010,4.990,304.53,1.000,bicubic,-8.620,-3.350,0\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,79.220,20.780,95.060,4.940,304.76,1.000,bicubic,-8.516,-3.448,+3\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,79.210,20.790,94.880,5.120,632.13,1.000,bicubic,-7.970,-3.384,+26\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,79.210,20.790,94.680,5.320,116.14,1.000,bicubic,-8.600,-3.686,0\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,79.180,20.820,95.190,4.810,136.50,1.000,bicubic,-8.894,-3.368,-13\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,79.160,20.840,94.910,5.090,304.20,1.000,bicubic,-9.002,-3.628,-15\ncaformer_m36.sail_in22k_ft_in1k_384,384,79.110,20.890,94.780,5.220,56.20,1.000,bicubic,-8.360,-3.528,+9\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,79.070,20.930,95.170,4.830,136.33,1.000,bicubic,-8.862,-3.330,-11\ncaformer_b36.sail_in22k_ft_in1k,224,78.920,21.080,94.680,5.320,98.75,1.000,bicubic,-8.530,-3.656,+9\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,78.830,21.170,94.860,5.140,304.53,1.000,bicubic,-9.360,-3.706,-21\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,78.810,21.190,95.110,4.890,350.20,1.000,bicubic,-8.954,-3.442,-7\nconvnext_large.fb_in22k_ft_in1k_384,384,78.700,21.300,94.870,5.130,197.77,1.000,bicubic,-8.756,-3.510,+5\nhiera_huge_224.mae_in1k_ft_in1k,224,78.690,21.310,93.850,6.150,672.78,0.900,bicubic,-8.150,-4.158,+36\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,78.670,21.330,94.530,5.470,200.13,1.000,bicubic,-8.674,-3.692,+10\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,78.640,21.360,94.740,5.260,304.37,1.000,bicubic,-8.350,-3.506,+25\nbeitv2_large_patch16_224.in1k_ft_in1k,224,78.600,21.400,94.190,5.810,304.43,0.950,bicubic,-8.814,-4.026,+4\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,78.510,21.490,94.680,5.320,304.20,1.000,bicubic,-9.390,-3.728,-18\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,78.490,21.510,94.340,5.660,73.88,1.000,bicubic,-8.898,-3.972,+3\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,78.480,21.520,94.510,5.490,136.06,1.000,bicubic,-8.838,-3.818,+6\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,78.410,21.590,94.590,5.410,304.20,1.000,bicubic,-8.878,-3.646,+7\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,78.370,21.630,94.720,5.280,87.92,1.000,bicubic,-8.772,-3.508,+11\ncaformer_s36.sail_in22k_ft_in1k_384,384,78.370,21.630,94.280,5.720,39.30,1.000,bicubic,-8.490,-3.940,+26\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,78.340,21.660,94.360,5.640,196.74,1.000,bicubic,-8.802,-3.868,+9\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,78.310,21.690,94.640,5.360,196.74,1.000,bicubic,-9.164,-3.624,-9\nconvnext_xlarge.fb_in22k_ft_in1k,288,78.280,21.720,94.530,5.470,350.20,1.000,bicubic,-9.098,-3.800,-3\nregnety_320.swag_ft_in1k,384,78.250,21.750,95.130,4.870,145.05,1.000,bicubic,-8.580,-3.232,+24\nconvformer_m36.sail_in22k_ft_in1k_384,384,78.240,21.760,94.310,5.690,57.05,1.000,bicubic,-8.628,-3.812,+19\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,78.190,21.810,94.230,5.770,64.27,1.000,bicubic,-9.248,-4.026,-9\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,78.180,21.820,94.780,5.220,134.42,1.000,bicubic,-9.190,-3.534,-6\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,78.150,21.850,94.690,5.310,101.66,1.000,bicubic,-9.358,-3.738,-20\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,78.150,21.850,94.410,5.590,116.09,1.000,bicubic,-9.330,-3.966,-16\nconvformer_b36.sail_in22k_ft_in1k,224,78.150,21.850,94.190,5.810,99.88,1.000,bicubic,-8.842,-3.974,+7\nvolo_d5_512.sail_in1k,512,78.120,21.880,94.220,5.780,296.09,1.150,bicubic,-8.948,-3.750,+2\nconvnext_large.fb_in22k_ft_in1k,288,78.070,21.930,94.380,5.620,197.77,1.000,bicubic,-8.942,-3.832,+3\nconvnextv2_huge.fcmae_ft_in1k,288,78.050,21.950,94.040,5.960,660.29,1.000,bicubic,-8.568,-3.970,+29\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,77.980,22.020,94.200,5.800,86.88,1.000,bicubic,-8.762,-3.910,+21\ntf_efficientnet_b7.ns_jft_in1k,600,77.940,22.060,94.330,5.670,66.35,0.949,bicubic,-8.910,-3.750,+11\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,77.930,22.070,94.040,5.960,116.14,0.950,bicubic,-8.978,-3.964,+7\nvolo_d5_448.sail_in1k,448,77.910,22.090,94.140,5.860,295.91,1.150,bicubic,-9.064,-3.788,+1\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,77.910,22.090,93.990,6.010,196.74,0.900,bicubic,-9.024,-4.116,+4\nconvnext_xlarge.fb_in22k_ft_in1k,224,77.890,22.110,94.000,6.000,350.20,0.875,bicubic,-9.080,-4.204,+1\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,77.880,22.120,94.600,5.400,197.96,1.000,bicubic,-10.300,-3.920,-52\ncaformer_m36.sail_in22k_ft_in1k,224,77.860,22.140,93.960,6.040,56.20,1.000,bicubic,-8.746,-4.078,+24\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,77.790,22.210,94.320,5.680,86.74,1.000,bicubic,-9.024,-3.816,+7\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,77.790,22.210,94.310,5.690,304.72,1.000,bicubic,-9.306,-3.992,-10\nconvnext_base.fb_in22k_ft_in1k_384,384,77.790,22.210,94.170,5.830,88.59,1.000,bicubic,-9.036,-4.074,+5\nnextvit_base.bd_ssld_6m_in1k_384,384,77.770,22.230,94.170,5.830,44.82,1.000,bicubic,-8.602,-3.870,+40\nvolo_d4_448.sail_in1k,448,77.750,22.250,93.920,6.080,193.41,1.150,bicubic,-9.038,-3.960,+6\nnextvit_large.bd_ssld_6m_in1k_384,384,77.730,22.270,94.360,5.640,57.87,1.000,bicubic,-8.814,-3.766,+21\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,77.670,22.330,94.020,5.980,101.66,1.000,bicubic,-8.972,-4.130,+10\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,77.650,22.350,94.070,5.930,149.39,1.000,bicubic,-9.654,-4.264,-25\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,77.630,22.370,94.330,5.670,88.59,1.000,bicubic,-9.514,-3.898,-21\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,77.610,22.390,94.250,5.750,101.66,1.000,bicubic,-9.306,-3.988,-10\ntf_efficientnetv2_xl.in21k_ft_in1k,512,77.610,22.390,93.790,6.210,208.12,1.000,bicubic,-9.144,-4.224,+2\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,77.590,22.410,94.050,5.950,93.59,1.000,bicubic,-9.132,-4.134,+3\nregnety_1280.swag_lc_in1k,224,77.560,22.440,94.460,5.540,644.81,0.965,bicubic,-8.404,-3.392,+69\nconvnext_large.fb_in22k_ft_in1k,224,77.560,22.440,93.970,6.030,197.77,0.875,bicubic,-9.052,-4.066,+9\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,77.510,22.490,94.350,5.650,88.72,1.000,bicubic,-10.128,-4.066,-52\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,77.500,22.500,93.980,6.020,134.13,0.950,bicubic,-9.220,-4.044,0\ncaformer_b36.sail_in1k_384,384,77.480,22.520,93.540,6.460,98.75,1.000,bicubic,-8.920,-4.284,+22\nconvformer_m36.sail_in22k_ft_in1k,224,77.470,22.530,93.810,6.190,57.05,1.000,bicubic,-8.682,-4.042,+46\nmaxvit_base_tf_512.in1k,512,77.460,22.540,93.900,6.100,119.88,1.000,bicubic,-9.138,-4.024,+6\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,77.460,22.540,93.820,6.180,93.59,0.950,bicubic,-9.070,-4.274,+10\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,77.450,22.550,93.920,6.080,88.59,1.000,bicubic,-9.040,-4.040,+13\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,77.410,22.590,93.540,6.460,73.88,0.950,bicubic,-9.128,-4.354,+6\nefficientvit_l3.r384_in1k,384,77.410,22.590,93.360,6.640,246.04,1.000,bicubic,-8.986,-4.280,+17\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,77.380,22.620,94.320,5.680,87.90,1.000,bicubic,-9.062,-3.748,+12\ntf_efficientnetv2_l.in21k_ft_in1k,480,77.380,22.620,94.220,5.780,118.52,1.000,bicubic,-9.422,-3.904,-14\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,77.380,22.620,93.690,6.310,60.60,1.000,bicubic,-9.238,-4.278,-5\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,77.330,22.670,93.810,6.190,93.59,1.000,bicubic,-9.194,-4.224,+4\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,77.320,22.680,94.230,5.770,75.26,1.000,bicubic,-8.980,-3.712,+20\nhgnetv2_b6.ssld_stage2_ft_in1k,288,77.310,22.690,94.250,5.750,75.26,1.000,bicubic,-9.070,-3.692,+12\nconvnextv2_large.fcmae_ft_in22k_in1k,288,77.280,22.720,94.230,5.770,197.96,1.000,bicubic,-10.214,-4.130,-61\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,77.270,22.730,94.030,5.970,86.53,0.900,bicubic,-9.224,-4.026,+1\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,77.270,22.730,94.020,5.980,87.92,0.900,bicubic,-9.000,-3.886,+19\nefficientvit_l3.r320_in1k,320,77.270,22.730,93.180,6.820,246.04,1.000,bicubic,-8.960,-4.296,+21\ncaformer_m36.sail_in1k_384,384,77.250,22.750,93.630,6.370,56.20,1.000,bicubic,-8.914,-4.182,+27\nconvnextv2_huge.fcmae_ft_in1k,224,77.250,22.750,93.580,6.420,660.29,0.875,bicubic,-9.010,-4.172,+17\nmaxvit_large_tf_512.in1k,512,77.230,22.770,93.700,6.300,212.33,1.000,bicubic,-9.304,-4.184,-8\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,77.230,22.770,93.690,6.310,86.86,1.000,bicubic,-9.980,-4.342,-50\nconvnextv2_large.fcmae_ft_in22k_in1k,224,77.220,22.780,93.960,6.040,197.96,0.875,bicubic,-10.046,-4.280,-53\nvolo_d3_448.sail_in1k,448,77.190,22.810,94.040,5.960,86.63,1.000,bicubic,-9.324,-3.670,-7\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,77.190,22.810,93.770,6.230,196.53,0.900,bicubic,-9.130,-4.120,+6\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,77.180,22.820,93.760,6.240,64.11,0.950,bicubic,-9.428,-4.174,-18\nregnety_160.swag_ft_in1k,384,77.170,22.830,94.570,5.430,83.59,1.000,bicubic,-8.850,-3.476,+34\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,77.170,22.830,93.600,6.400,64.11,0.950,bicubic,-9.028,-4.288,+16\ntf_efficientnet_b6.ns_jft_in1k,528,77.160,22.840,93.840,6.160,43.04,0.942,bicubic,-9.316,-4.024,-8\nconvnextv2_base.fcmae_ft_in22k_in1k,288,77.150,22.850,94.080,5.920,88.72,1.000,bicubic,-9.844,-4.088,-50\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,77.100,22.900,93.610,6.390,73.87,0.950,bicubic,-9.482,-4.286,-20\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,77.080,22.920,93.870,6.130,86.86,1.000,bicubic,-9.558,-4.138,-28\nconvnextv2_base.fcmae_ft_in22k_in1k,224,77.030,22.970,93.590,6.410,88.72,0.875,bicubic,-9.722,-4.430,-35\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,77.000,23.000,93.760,6.240,86.86,0.950,bicubic,-10.030,-4.426,-56\nhgnetv2_b6.ssld_stage2_ft_in1k,224,76.980,23.020,93.890,6.110,75.26,0.965,bicubic,-9.226,-3.920,+6\nconvnext_base.fb_in22k_ft_in1k,288,76.960,23.040,93.960,6.040,88.59,1.000,bicubic,-9.342,-4.128,-4\nvit_base_patch16_clip_384.openai_ft_in1k,384,76.930,23.070,93.680,6.320,86.86,1.000,bicubic,-9.280,-4.214,+3\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,76.890,23.110,93.680,6.320,329.09,1.000,bicubic,-9.298,-4.248,+5\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,76.890,23.110,93.560,6.440,116.09,0.950,bicubic,-9.750,-4.454,-35\nhiera_large_224.mae_in1k_ft_in1k,224,76.880,23.120,93.190,6.810,213.74,0.900,bicubic,-9.166,-4.456,+19\nmaxvit_large_tf_384.in1k,384,76.840,23.160,93.420,6.580,212.03,1.000,bicubic,-9.402,-4.264,-4\ncait_m48_448.fb_dist_in1k,448,76.840,23.160,93.350,6.650,356.46,1.000,bicubic,-9.640,-4.398,-21\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,76.830,23.170,93.640,6.360,86.58,0.900,bicubic,-9.422,-4.200,-6\nconvformer_s36.sail_in22k_ft_in1k_384,384,76.810,23.190,94.110,5.890,40.01,1.000,bicubic,-9.564,-3.870,-16\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,76.780,23.220,93.860,6.140,75.26,0.965,bicubic,-9.272,-3.946,+13\nhgnet_base.ssld_in1k,288,76.780,23.220,93.500,6.500,71.58,1.000,bicubic,-8.708,-4.130,+66\ntf_efficientnetv2_m.in21k_ft_in1k,480,76.760,23.240,93.590,6.410,54.14,1.000,bicubic,-9.254,-4.366,+16\nregnety_160.sw_in12k_ft_in1k,288,76.750,23.250,93.760,6.240,83.59,1.000,bicubic,-9.278,-4.076,+12\nxcit_large_24_p8_384.fb_dist_in1k,384,76.750,23.250,93.100,6.900,188.93,1.000,bicubic,-9.274,-4.590,+12\ncaformer_s36.sail_in22k_ft_in1k,224,76.740,23.260,93.560,6.440,39.30,1.000,bicubic,-9.040,-4.272,+34\ndeit3_large_patch16_384.fb_in1k,384,76.730,23.270,93.340,6.660,304.76,1.000,bicubic,-9.090,-4.262,+29\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,76.700,23.300,93.200,6.800,468.53,0.875,bilinear,-8.390,-4.242,+107\ntf_efficientnet_b5.ns_jft_in1k,456,76.670,23.330,93.500,6.500,30.39,0.934,bicubic,-9.426,-4.244,0\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,76.660,23.340,94.010,5.990,88.59,1.000,bicubic,-9.718,-3.966,-27\nconvnextv2_large.fcmae_ft_in1k,288,76.650,23.350,93.600,6.400,197.96,1.000,bicubic,-9.482,-4.220,-5\nefficientvit_l2.r384_in1k,384,76.630,23.370,93.000,7.000,63.71,1.000,bicubic,-9.354,-4.510,+10\nvolo_d5_224.sail_in1k,224,76.600,23.400,93.360,6.640,295.46,0.960,bicubic,-9.476,-4.214,-2\nmaxvit_base_tf_384.in1k,384,76.580,23.420,93.480,6.520,119.65,1.000,bicubic,-9.710,-4.322,-24\nregnety_160.lion_in12k_ft_in1k,288,76.570,23.430,93.750,6.250,83.59,1.000,bicubic,-9.434,-4.084,+6\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,76.560,23.440,93.460,6.540,21.27,1.000,bicubic,-9.884,-4.424,-38\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,76.530,23.470,93.590,6.410,86.59,1.000,bicubic,-9.198,-4.156,+30\nrdnet_large.nv_in1k_ft_in1k_384,384,76.530,23.470,93.080,6.920,186.27,1.000,bicubic,-9.314,-4.600,+15\nmaxvit_small_tf_512.in1k,512,76.520,23.480,93.210,6.790,69.13,1.000,bicubic,-9.582,-4.478,-12\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,76.490,23.510,92.890,7.110,60.40,0.950,bicubic,-9.244,-4.720,+27\nbeitv2_base_patch16_224.in1k_ft_in1k,224,76.470,23.530,92.900,7.100,86.53,0.900,bicubic,-9.126,-4.614,+36\nnextvit_small.bd_ssld_6m_in1k_384,384,76.450,23.550,93.870,6.130,31.76,1.000,bicubic,-9.516,-4.034,-1\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,76.450,23.550,93.410,6.590,93.59,0.875,bicubic,-9.506,-4.414,+2\nefficientvit_l3.r256_in1k,256,76.450,23.550,92.620,7.380,246.04,1.000,bicubic,-9.502,-4.720,+5\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,76.420,23.580,93.580,6.420,88.59,1.000,bicubic,-9.764,-4.094,-23\nconvformer_m36.sail_in1k_384,384,76.410,23.590,93.100,6.900,57.05,1.000,bicubic,-9.170,-4.472,+33\ncaformer_s36.sail_in1k_384,384,76.380,23.620,93.420,6.580,39.30,1.000,bicubic,-9.382,-4.246,+17\ncait_m36_384.fb_dist_in1k,384,76.370,23.630,93.040,6.960,271.22,1.000,bicubic,-9.690,-4.692,-16\nconvnext_small.in12k_ft_in1k_384,384,76.350,23.650,93.360,6.640,50.22,1.000,bicubic,-9.836,-4.560,-28\nxcit_medium_24_p8_384.fb_dist_in1k,384,76.300,23.700,92.990,7.010,84.32,1.000,bicubic,-9.542,-4.612,+3\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,76.290,23.710,93.020,6.980,21.23,1.000,bicubic,-9.798,-4.696,-22\nefficientvit_l2.r288_in1k,288,76.290,23.710,92.810,7.190,63.71,1.000,bicubic,-9.324,-4.554,+23\nnextvit_large.bd_ssld_6m_in1k,224,76.270,23.730,93.340,6.660,57.87,0.950,bicubic,-9.244,-4.148,+31\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,76.250,23.750,93.020,6.980,86.66,1.000,bicubic,-9.204,-4.600,+37\nvolo_d2_384.sail_in1k,384,76.240,23.760,93.170,6.830,58.87,1.000,bicubic,-9.814,-4.412,-22\nconvformer_b36.sail_in1k_384,384,76.240,23.760,92.750,7.250,99.88,1.000,bicubic,-9.530,-4.772,+7\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,76.210,23.790,93.360,6.640,39.57,1.000,bicubic,-8.720,-3.930,+101\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,76.180,23.820,93.480,6.520,304.33,0.900,bicubic,-9.676,-4.342,-9\nmaxvit_tiny_tf_512.in1k,512,76.180,23.820,93.060,6.940,31.05,1.000,bicubic,-9.478,-4.532,+14\nconvnext_small.fb_in22k_ft_in1k_384,384,76.170,23.830,93.590,6.410,50.22,1.000,bicubic,-9.582,-4.288,+6\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,76.160,23.840,93.610,6.390,86.86,1.000,bicubic,-9.850,-4.388,-24\nconvnext_base.fb_in22k_ft_in1k,224,76.160,23.840,93.350,6.650,88.59,0.875,bicubic,-9.654,-4.506,-4\ntf_efficientnetv2_l.in21k_ft_in1k,384,76.160,23.840,93.350,6.650,118.52,1.000,bicubic,-9.686,-4.416,-12\ndm_nfnet_f6.dm_in1k,576,76.160,23.840,93.170,6.830,438.36,0.956,bicubic,-10.204,-4.730,-56\nregnety_160.lion_in12k_ft_in1k,224,76.130,23.870,93.300,6.700,83.59,0.950,bicubic,-9.472,-4.368,+10\nvolo_d4_224.sail_in1k,224,76.130,23.870,93.080,6.920,192.96,0.960,bicubic,-9.738,-4.384,-17\nconvnextv2_large.fcmae_ft_in1k,224,76.090,23.910,93.140,6.860,197.96,0.875,bicubic,-9.672,-4.434,-3\nvit_base_patch16_clip_224.openai_ft_in1k,224,76.090,23.910,93.000,7.000,86.57,0.900,bicubic,-9.192,-4.448,+39\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,76.090,23.910,92.930,7.070,86.57,0.950,bicubic,-10.114,-4.832,-49\ncaformer_s18.sail_in22k_ft_in1k_384,384,76.070,23.930,93.660,6.340,26.34,1.000,bicubic,-9.372,-4.058,+22\nxcit_large_24_p8_224.fb_dist_in1k,224,76.060,23.940,92.710,7.290,188.93,1.000,bicubic,-9.336,-4.708,+29\nregnety_160.sw_in12k_ft_in1k,224,76.050,23.950,93.050,6.950,83.59,0.950,bicubic,-9.546,-4.618,+4\nmaxvit_small_tf_384.in1k,384,76.050,23.950,92.570,7.430,69.02,1.000,bicubic,-9.484,-4.896,+9\nflexivit_large.1200ep_in1k,240,76.040,23.960,93.010,6.990,304.36,0.950,bicubic,-9.606,-4.536,-1\nflexivit_large.600ep_in1k,240,76.020,23.980,92.790,7.210,304.36,0.950,bicubic,-9.510,-4.704,+8\ncait_s36_384.fb_dist_in1k,384,76.010,23.990,92.920,7.080,68.37,1.000,bicubic,-9.448,-4.560,+14\ntf_efficientnetv2_l.in1k,480,76.000,24.000,93.040,6.960,118.52,1.000,bicubic,-9.660,-4.432,-6\ntf_efficientnet_b7.ap_in1k,600,75.970,24.030,92.910,7.090,66.35,0.949,bicubic,-9.162,-4.346,+51\ntf_efficientnet_b8.ap_in1k,672,75.970,24.030,92.640,7.360,87.41,0.954,bicubic,-9.388,-4.658,+25\nefficientvit_l3.r224_in1k,224,75.960,24.040,92.470,7.530,246.04,1.000,bicubic,-9.850,-4.740,-22\nconvformer_s36.sail_in1k_384,384,75.940,24.060,93.070,6.930,40.01,1.000,bicubic,-9.440,-4.398,+21\ndm_nfnet_f6.dm_in1k,448,75.880,24.120,92.980,7.020,438.36,0.956,bicubic,-10.276,-4.782,-59\ntf_efficientnetv2_xl.in21k_ft_in1k,384,75.880,24.120,92.820,7.180,208.12,1.000,bicubic,-9.682,-4.636,-2\nhgnet_base.ssld_in1k,224,75.880,24.120,92.780,7.220,71.58,0.965,bicubic,-9.028,-4.562,+79\nhgnetv2_b5.ssld_stage2_ft_in1k,288,75.870,24.130,93.530,6.470,39.57,1.000,bicubic,-9.288,-4.058,+39\nefficientvit_l2.r256_in1k,256,75.870,24.130,92.480,7.520,63.71,1.000,bicubic,-9.510,-4.776,+17\ndm_nfnet_f4.dm_in1k,512,75.850,24.150,92.990,7.010,316.07,0.951,bicubic,-9.988,-4.786,-32\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,75.830,24.170,93.370,6.630,86.58,0.900,bicubic,-10.008,-4.282,-35\nxcit_large_24_p16_384.fb_dist_in1k,384,75.830,24.170,92.780,7.220,189.10,1.000,bicubic,-9.948,-4.750,-29\neva02_small_patch14_336.mim_in22k_ft_in1k,336,75.830,24.170,92.760,7.240,22.13,1.000,bicubic,-9.940,-4.856,-26\nflexivit_large.300ep_in1k,240,75.820,24.180,92.660,7.340,304.36,0.950,bicubic,-9.464,-4.744,+16\nxcit_small_24_p8_384.fb_dist_in1k,384,75.810,24.190,92.970,7.030,47.63,1.000,bicubic,-9.770,-4.578,-14\ndeit3_huge_patch14_224.fb_in1k,224,75.800,24.200,92.730,7.270,632.13,0.900,bicubic,-9.404,-4.456,+24\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,75.790,24.210,92.860,7.140,86.57,1.000,bicubic,-9.698,-4.726,-5\nconvformer_s36.sail_in22k_ft_in1k,224,75.770,24.230,93.330,6.670,40.01,1.000,bicubic,-9.650,-4.244,+3\nefficientnet_b5.sw_in12k_ft_in1k,448,75.760,24.240,93.130,6.870,30.39,1.000,bicubic,-10.134,-4.612,-48\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,75.730,24.270,92.950,7.050,86.57,0.950,bicubic,-10.224,-4.774,-51\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,75.710,24.290,92.950,7.050,39.03,0.950,bicubic,-9.846,-4.688,-17\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,75.700,24.300,93.210,6.790,87.77,0.900,bicubic,-9.576,-4.468,+11\ncaformer_b36.sail_in1k,224,75.700,24.300,92.680,7.320,98.75,1.000,bicubic,-9.802,-4.636,-14\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,75.690,24.310,93.430,6.570,86.53,0.900,bicubic,-9.548,-4.224,+13\nvolo_d1_384.sail_in1k,384,75.690,24.310,92.980,7.020,26.78,1.000,bicubic,-9.572,-4.238,+9\nvolo_d3_224.sail_in1k,224,75.690,24.310,92.950,7.050,86.33,0.960,bicubic,-9.750,-4.324,-9\nxcit_medium_24_p8_224.fb_dist_in1k,224,75.660,24.340,92.780,7.220,84.32,1.000,bicubic,-9.418,-4.494,+34\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,75.650,24.350,92.950,7.050,194.03,0.875,bilinear,-8.516,-4.242,+167\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,75.650,24.350,92.700,7.300,39.57,0.965,bicubic,-8.806,-4.148,+114\nregnety_120.sw_in12k_ft_in1k,288,75.630,24.370,92.940,7.060,51.82,1.000,bicubic,-9.798,-4.636,-11\nhgnetv2_b5.ssld_stage2_ft_in1k,224,75.620,24.380,92.900,7.100,39.57,0.965,bicubic,-9.204,-4.394,+68\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,75.620,24.380,92.500,7.500,60.40,0.950,bicubic,-9.806,-4.978,-12\nhiera_base_plus_224.mae_in1k_ft_in1k,224,75.580,24.420,92.490,7.510,69.90,0.900,bicubic,-9.556,-4.672,+18\nbeit3_base_patch16_224.in22k_ft_in1k,224,75.570,24.430,93.110,6.890,86.66,1.000,bicubic,-9.820,-4.530,-10\ncaformer_m36.sail_in1k,224,75.570,24.430,92.350,7.650,56.20,1.000,bicubic,-9.672,-4.876,+1\nconvnextv2_base.fcmae_ft_in1k,288,75.560,24.440,92.830,7.170,88.72,1.000,bicubic,-9.930,-4.550,-26\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,75.550,24.450,92.830,7.170,88.34,1.000,bicubic,-10.242,-4.804,-55\nconvnext_small.fb_in22k_ft_in1k,288,75.540,24.460,93.200,6.800,50.22,1.000,bicubic,-9.736,-4.366,-6\nvit_large_patch16_rope_224.naver_in1k,224,75.540,24.460,92.230,7.770,304.17,0.900,bicubic,-9.106,-4.890,+78\ntf_efficientnet_b4.ns_jft_in1k,380,75.530,24.470,92.850,7.150,19.34,0.922,bicubic,-9.626,-4.632,+8\nregnety_320.swag_lc_in1k,224,75.520,24.480,93.790,6.210,145.05,0.965,bicubic,-9.134,-3.702,+73\ninception_next_base.sail_in1k_384,384,75.520,24.480,92.530,7.470,86.67,1.000,bicubic,-9.682,-4.744,+1\nhrnet_w48_ssld.paddle_in1k,288,75.510,24.490,92.850,7.150,77.47,1.000,bilinear,-8.974,-4.374,+95\nnextvit_base.bd_ssld_6m_in1k,224,75.500,24.500,93.170,6.830,44.82,0.950,bicubic,-9.686,-4.430,0\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,75.470,24.530,92.690,7.310,88.79,0.875,bilinear,-8.806,-4.498,+133\nvolo_d2_224.sail_in1k,224,75.470,24.530,92.470,7.530,58.68,0.960,bicubic,-9.734,-4.894,-5\nregnetz_e8.ra3_in1k,320,75.460,24.540,92.550,7.450,57.70,1.000,bicubic,-9.556,-4.728,+19\ndm_nfnet_f3.dm_in1k,416,75.430,24.570,92.900,7.100,254.92,0.940,bicubic,-10.246,-4.676,-56\ndm_nfnet_f5.dm_in1k,544,75.430,24.570,92.770,7.230,377.21,0.954,bicubic,-10.672,-4.988,-97\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,75.420,24.580,92.750,7.250,98.95,1.000,bicubic,-9.568,-4.546,+22\ndeit_base_distilled_patch16_384.fb_in1k,384,75.400,24.600,92.520,7.480,87.63,1.000,bicubic,-10.038,-4.812,-34\ncait_s24_384.fb_dist_in1k,384,75.380,24.620,92.520,7.480,47.06,1.000,bicubic,-9.668,-4.830,+10\ntf_efficientnet_b6.ap_in1k,528,75.380,24.620,92.300,7.700,43.04,0.942,bicubic,-9.414,-4.838,+49\ntf_efficientnetv2_l.in1k,384,75.350,24.650,92.360,7.640,118.52,1.000,bicubic,-9.852,-5.064,-11\ntf_efficientnetv2_m.in21k_ft_in1k,384,75.340,24.660,92.660,7.340,54.14,1.000,bicubic,-9.428,-4.774,+49\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,75.330,24.670,92.690,7.310,88.30,1.000,bicubic,-10.068,-4.974,-34\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,75.320,24.680,93.280,6.720,86.57,0.900,bicubic,-9.788,-4.250,-6\nconvnext_small.in12k_ft_in1k,224,75.320,24.680,92.800,7.200,50.22,0.950,bicubic,-9.844,-4.702,-12\nhgnet_small.ssld_in1k,288,75.320,24.680,92.660,7.340,24.36,1.000,bicubic,-9.048,-4.470,+108\ndm_nfnet_f5.dm_in1k,416,75.300,24.700,92.470,7.530,377.21,0.954,bicubic,-10.412,-5.080,-69\ndm_nfnet_f4.dm_in1k,384,75.300,24.700,92.450,7.550,316.07,0.951,bicubic,-10.214,-5.228,-52\nefficientnet_x_b5.sw_r448_e450_in1k,576,75.290,24.710,92.760,7.240,33.44,1.000,bicubic,-9.638,-4.552,+20\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,75.290,24.710,92.440,7.560,38.88,0.950,bicubic,-9.666,-4.944,+13\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,75.290,24.710,92.230,7.770,304.40,0.900,bicubic,-9.556,-5.256,+30\nefficientvit_l2.r224_in1k,224,75.270,24.730,92.030,7.970,63.71,1.000,bicubic,-9.776,-5.074,-1\ncaformer_s18.sail_in1k_384,384,75.240,24.760,92.650,7.350,26.34,1.000,bicubic,-9.796,-4.704,-1\nmambaout_base_short_rw.sw_e500_in1k,288,75.240,24.760,92.500,7.500,88.83,1.000,bicubic,-9.602,-4.786,+28\ncoat_lite_medium_384.in1k,384,75.210,24.790,92.180,7.820,44.57,1.000,bicubic,-9.680,-5.196,+19\nxcit_medium_24_p16_384.fb_dist_in1k,384,75.190,24.810,92.320,7.680,84.40,1.000,bicubic,-10.244,-5.088,-51\nnextvit_small.bd_ssld_6m_in1k,224,75.170,24.830,92.810,7.190,31.76,0.950,bicubic,-9.702,-4.562,+20\nefficientnet_h_b5.sw_r448_e450_in1k,576,75.170,24.830,92.650,7.350,45.88,1.000,bicubic,-9.772,-4.740,+6\nmambaout_base_wide_rw.sw_e500_in1k,288,75.170,24.830,92.360,7.640,94.45,1.000,bicubic,-9.796,-4.862,+1\nvit_large_patch16_rope_ape_224.naver_in1k,224,75.170,24.830,92.340,7.660,304.37,0.900,bicubic,-9.490,-4.806,+42\nvit_large_patch16_rope_mixed_224.naver_in1k,224,75.150,24.850,92.270,7.730,304.20,0.900,bicubic,-9.690,-4.858,+22\ntf_efficientnetv2_m.in1k,480,75.140,24.860,92.630,7.370,54.14,1.000,bicubic,-10.068,-4.728,-36\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,75.130,24.870,92.800,7.200,22.21,1.000,bicubic,-9.716,-4.318,+17\nefficientnetv2_rw_m.agc_in1k,416,75.130,24.870,92.540,7.460,53.24,1.000,bicubic,-9.700,-4.608,+20\nregnety_2560.seer_ft_in1k,384,75.110,24.890,92.710,7.290,\"1,282.60\",1.000,bicubic,-10.034,-4.734,-28\nrdnet_large.nv_in1k,224,75.090,24.910,92.140,7.860,186.27,0.900,bicubic,-9.704,-5.016,+21\ndeit3_large_patch16_224.fb_in1k,224,75.080,24.920,92.220,7.780,304.37,0.900,bicubic,-9.692,-5.124,+23\ndm_nfnet_f2.dm_in1k,352,75.060,24.940,92.270,7.730,193.78,0.920,bicubic,-10.124,-5.070,-36\nxcit_small_12_p8_384.fb_dist_in1k,384,75.040,24.960,92.440,7.560,26.21,1.000,bicubic,-10.028,-4.824,-20\nmvitv2_large.fb_in1k,224,75.040,24.960,92.270,7.730,217.99,0.900,bicubic,-10.224,-4.918,-49\nconvformer_s18.sail_in22k_ft_in1k_384,384,75.030,24.970,93.000,7.000,26.77,1.000,bicubic,-9.974,-4.572,-16\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,75.020,24.980,92.410,7.590,34.36,0.950,bicubic,-9.892,-4.850,0\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,75.010,24.990,92.560,7.440,21.20,0.950,bicubic,-10.078,-4.692,-27\nconvnextv2_base.fcmae_ft_in1k,224,74.990,25.010,92.290,7.710,88.72,0.875,bicubic,-9.900,-4.798,+1\nxcit_small_24_p8_224.fb_dist_in1k,224,74.980,25.020,92.400,7.600,47.63,1.000,bicubic,-9.898,-4.800,+2\nconvnext_small.in12k_ft_in1k,288,74.970,25.030,92.610,7.390,50.22,1.000,bicubic,-10.354,-4.944,-61\nmambaout_small.in1k,288,74.970,25.030,91.700,8.300,48.49,1.000,bicubic,-9.542,-5.270,+45\nmambaout_base_tall_rw.sw_e500_in1k,288,74.950,25.050,92.270,7.730,86.48,1.000,bicubic,-10.038,-5.064,-20\nregnety_120.sw_in12k_ft_in1k,224,74.910,25.090,92.590,7.410,51.82,0.950,bicubic,-10.084,-4.822,-24\nefficientnetv2_rw_m.agc_in1k,320,74.910,25.090,92.070,7.930,53.24,1.000,bicubic,-9.386,-4.802,+83\nconvnext_large.fb_in1k,288,74.900,25.100,92.080,7.920,197.77,1.000,bicubic,-9.962,-5.130,-2\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,74.900,25.100,91.940,8.060,41.72,0.950,bicubic,-10.040,-5.028,-18\necaresnet269d.ra2_in1k,352,74.870,25.130,92.770,7.230,102.09,1.000,bicubic,-10.098,-4.454,-23\nmaxvit_tiny_tf_384.in1k,384,74.860,25.140,92.400,7.600,30.98,1.000,bicubic,-10.246,-4.982,-43\nconvformer_b36.sail_in1k,224,74.840,25.160,91.640,8.360,99.88,1.000,bicubic,-9.986,-5.308,0\nmambaout_small_rw.sw_e450_in1k,288,74.820,25.180,91.790,8.210,48.50,1.000,bicubic,-9.784,-5.306,+23\nconvnext_small.fb_in22k_ft_in1k,224,74.800,25.200,92.900,7.100,50.22,0.875,bicubic,-9.782,-4.378,+23\nconvnext_tiny.in12k_ft_in1k_384,384,74.790,25.210,92.830,7.170,28.59,1.000,bicubic,-10.364,-4.804,-53\necaresnet269d.ra2_in1k,320,74.790,25.210,92.490,7.510,102.09,0.950,bicubic,-9.934,-4.690,+6\nconvformer_s18.sail_in1k_384,384,74.760,25.240,92.540,7.460,26.77,1.000,bicubic,-9.632,-4.570,+58\ndeit3_base_patch16_384.fb_in1k,384,74.760,25.240,92.190,7.810,86.88,1.000,bicubic,-10.328,-5.164,-43\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,74.750,25.250,91.910,8.090,66.01,0.950,bicubic,-9.884,-5.152,+13\ndm_nfnet_f3.dm_in1k,320,74.710,25.290,91.920,8.080,254.92,0.940,bicubic,-10.382,-5.466,-49\nhrnet_w48_ssld.paddle_in1k,224,74.690,25.310,92.090,7.910,77.47,0.950,bilinear,-8.966,-4.758,+178\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,74.680,25.320,92.570,7.430,38.85,1.000,bicubic,-9.870,-4.610,+21\nmaxvit_large_tf_224.in1k,224,74.670,25.330,91.800,8.200,211.79,0.950,bicubic,-10.268,-5.180,-31\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,74.660,25.340,91.910,8.090,32.59,1.000,bicubic,-10.330,-5.384,-40\nxcit_small_24_p16_384.fb_dist_in1k,384,74.650,25.350,92.530,7.470,47.67,1.000,bicubic,-10.452,-4.788,-56\nmambaout_base_wide_rw.sw_e500_in1k,224,74.640,25.360,91.930,8.070,94.45,0.950,bicubic,-9.808,-5.398,+36\nregnetz_e8.ra3_in1k,256,74.630,25.370,91.830,8.170,57.70,0.940,bicubic,-9.788,-5.158,+46\neca_nfnet_l2.ra3_in1k,384,74.590,25.410,92.700,7.300,56.72,1.000,bicubic,-10.120,-4.292,-6\ncaformer_s36.sail_in1k,224,74.580,25.420,92.020,7.980,39.30,1.000,bicubic,-9.948,-5.002,+18\nxcit_large_24_p16_224.fb_dist_in1k,224,74.560,25.440,91.990,8.010,189.10,1.000,bicubic,-10.376,-5.138,-37\nmambaout_base.in1k,288,74.560,25.440,91.410,8.590,84.81,1.000,bicubic,-10.170,-5.524,-10\ntf_efficientnet_b8.ra_in1k,672,74.550,25.450,92.180,7.820,87.41,0.954,bicubic,-10.794,-5.212,-93\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,74.550,25.450,91.910,8.090,38.86,0.950,bicubic,-9.892,-5.304,+31\nhiera_base_224.mae_in1k_ft_in1k,224,74.550,25.450,91.840,8.160,51.52,0.900,bicubic,-9.978,-5.170,+12\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,74.550,25.450,91.590,8.410,63.95,0.950,bicubic,-9.766,-5.220,+52\nregnety_160.swag_lc_in1k,224,74.500,25.500,93.040,6.960,83.59,0.965,bicubic,-9.306,-4.244,+131\ngcvit_base.in1k,224,74.490,25.510,91.770,8.230,90.32,0.875,bicubic,-9.966,-5.448,+21\nmaxvit_rmlp_small_rw_224.sw_in1k,224,74.470,25.530,91.480,8.520,64.90,0.900,bicubic,-10.018,-5.292,+12\nconvnext_tiny.in12k_ft_in1k,288,74.460,25.540,92.600,7.400,28.59,1.000,bicubic,-9.988,-4.474,+21\ntf_efficientnet_b7.ra_in1k,600,74.460,25.540,92.130,7.870,66.35,0.949,bicubic,-10.466,-5.090,-42\nnextvit_large.bd_in1k_384,384,74.440,25.560,92.550,7.450,57.87,1.000,bicubic,-10.490,-4.944,-45\ncaformer_s18.sail_in22k_ft_in1k,224,74.440,25.560,92.520,7.480,26.34,1.000,bicubic,-9.652,-4.662,+77\nfastvit_ma36.apple_dist_in1k,256,74.440,25.560,91.930,8.070,44.07,0.950,bicubic,-10.168,-5.066,-9\nmambaout_base_short_rw.sw_e500_in1k,224,74.430,25.570,91.850,8.150,88.83,0.950,bicubic,-9.934,-5.094,+37\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,74.400,25.600,92.090,7.910,32.59,1.000,bicubic,-10.372,-4.958,-29\nmambaout_base_tall_rw.sw_e500_in1k,224,74.400,25.600,91.710,8.290,86.48,0.950,bicubic,-10.036,-5.246,+19\ndavit_base.msft_in1k,224,74.400,25.600,91.650,8.350,87.95,0.950,bicubic,-10.252,-5.370,-17\nrexnetr_300.sw_in12k_ft_in1k,288,74.390,25.610,92.470,7.530,34.81,1.000,bicubic,-10.192,-4.944,-13\nxcit_small_12_p16_384.fb_dist_in1k,384,74.390,25.610,92.140,7.860,26.25,1.000,bicubic,-10.344,-4.996,-29\ntf_efficientnet_b5.ap_in1k,456,74.390,25.610,91.920,8.080,30.39,0.934,bicubic,-9.868,-5.064,+44\nconvformer_m36.sail_in1k,224,74.390,25.610,91.480,8.520,57.05,1.000,bicubic,-10.096,-5.386,+2\nregnetz_040_h.ra3_in1k,320,74.360,25.640,92.210,7.790,28.94,1.000,bicubic,-10.138,-4.796,-4\ndm_nfnet_f1.dm_in1k,320,74.360,25.640,92.040,7.960,132.63,0.910,bicubic,-10.340,-5.138,-27\nrdnet_base.nv_in1k,224,74.330,25.670,91.820,8.180,87.45,0.900,bicubic,-10.036,-5.058,+26\nmaxvit_base_tf_224.in1k,224,74.310,25.690,91.880,8.120,119.47,0.950,bicubic,-10.574,-5.122,-52\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,74.300,25.700,92.450,7.550,88.30,0.950,bicubic,-10.918,-4.948,-105\ntf_efficientnetv2_s.in21k_ft_in1k,384,74.300,25.700,92.410,7.590,21.46,1.000,bicubic,-10.032,-4.864,+26\nhgnet_small.ssld_in1k,224,74.300,25.700,92.090,7.910,24.36,0.965,bicubic,-9.518,-4.756,+107\neca_nfnet_l2.ra3_in1k,320,74.290,25.710,92.320,7.680,56.72,0.900,bicubic,-9.880,-4.696,+47\nregnetz_040.ra3_in1k,320,74.290,25.710,91.850,8.150,27.12,1.000,bicubic,-9.952,-5.086,+38\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,74.290,25.710,91.630,8.370,37.76,1.000,bicubic,-10.062,-5.258,+23\nresnest200e.in1k,320,74.250,25.750,91.860,8.140,70.20,0.909,bicubic,-9.630,-5.020,+88\nhgnetv2_b4.ssld_stage2_ft_in1k,288,74.240,25.760,92.540,7.460,19.80,1.000,bicubic,-9.712,-4.526,+72\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,74.240,25.760,91.600,8.400,32.59,0.950,bicubic,-10.428,-5.508,-36\nnextvit_base.bd_in1k_384,384,74.220,25.780,92.190,7.810,44.82,1.000,bicubic,-10.486,-5.030,-41\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,74.200,25.800,92.310,7.690,328.99,0.900,bicubic,-10.234,-4.842,-2\nresmlp_big_24_224.fb_in22k_ft_in1k,224,74.200,25.800,92.080,7.920,129.14,0.875,bicubic,-10.214,-5.028,+5\nresnetrs200.tf_in1k,320,74.200,25.800,91.860,8.140,93.21,1.000,bicubic,-10.248,-5.012,-5\ncoatnet_rmlp_2_rw_224.sw_in1k,224,74.200,25.800,91.240,8.760,73.88,0.950,bicubic,-10.408,-5.504,-32\nseresnet152d.ra2_in1k,320,74.190,25.810,91.990,8.010,66.84,1.000,bicubic,-10.154,-5.054,+13\nefficientnet_h_b5.sw_r448_e450_in1k,448,74.190,25.810,91.940,8.060,45.88,1.000,bicubic,-10.228,-5.200,+1\nconvnext_large.fb_in1k,224,74.190,25.810,91.530,8.470,197.77,0.875,bicubic,-10.114,-5.412,+16\nmvitv2_base.fb_in1k,224,74.190,25.810,91.510,8.490,51.47,0.900,bicubic,-10.234,-5.398,-2\nswinv2_base_window16_256.ms_in1k,256,74.180,25.820,91.870,8.130,87.92,0.900,bicubic,-10.428,-5.194,-41\nconvnext_base.fb_in1k,288,74.180,25.820,91.670,8.330,88.59,1.000,bicubic,-10.286,-5.312,-20\nxcit_large_24_p8_224.fb_in1k,224,74.170,25.830,90.850,9.150,188.93,1.000,bicubic,-10.210,-6.068,+2\nflexivit_base.1200ep_in1k,240,74.160,25.840,91.810,8.190,86.59,0.950,bicubic,-10.518,-5.170,-52\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,74.160,25.840,91.680,8.320,22.52,0.950,bicubic,-9.640,-4.904,+92\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,74.130,25.870,92.450,7.550,28.64,1.000,bicubic,-10.968,-5.182,-111\ntf_efficientnetv2_m.in1k,384,74.130,25.870,91.940,8.060,54.14,1.000,bicubic,-10.424,-5.118,-37\nconvformer_s36.sail_in1k,224,74.130,25.870,91.200,8.800,40.01,1.000,bicubic,-9.938,-5.544,+41\nconvnext_tiny.in12k_ft_in1k,224,74.120,25.880,92.170,7.830,28.59,0.950,bicubic,-10.072,-4.690,+24\ncait_xs24_384.fb_dist_in1k,384,74.110,25.890,91.920,8.080,26.67,1.000,bicubic,-9.954,-4.968,+38\nefficientvit_l1.r224_in1k,224,74.110,25.890,91.680,8.320,52.65,1.000,bicubic,-10.354,-5.180,-28\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,74.110,25.890,91.060,8.940,86.43,0.950,bicubic,-9.762,-5.616,+69\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,74.100,25.900,92.370,7.630,86.57,0.900,bicubic,-10.434,-4.930,-41\nvolo_d1_224.sail_in1k,224,74.100,25.900,92.050,7.950,26.63,0.960,bicubic,-10.048,-4.722,+24\nmobilenetv4_hybrid_large.e600_r384_in1k,448,74.100,25.900,92.000,8.000,37.76,1.000,bicubic,-10.164,-4.956,+5\nefficientnetv2_rw_s.ra2_in1k,384,74.090,25.910,91.790,8.210,23.94,1.000,bicubic,-9.740,-4.922,+70\nvit_base_patch16_224_miil.in21k_ft_in1k,224,74.060,25.940,91.630,8.370,86.54,0.875,bilinear,-10.218,-5.168,+1\nflexivit_base.600ep_in1k,240,74.050,25.950,91.610,8.390,86.59,0.950,bicubic,-10.514,-5.300,-50\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,74.040,25.960,91.940,8.060,44.18,0.875,bilinear,-9.228,-4.820,+168\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,74.040,25.960,91.730,8.270,34.36,0.950,bicubic,-10.524,-5.378,-53\nseresnext101d_32x8d.ah_in1k,288,74.040,25.960,91.660,8.340,93.59,1.000,bicubic,-10.340,-5.004,-18\nseresnextaa101d_32x8d.ah_in1k,288,74.040,25.960,91.640,8.360,93.59,1.000,bicubic,-10.540,-5.422,-53\ntf_efficientnetv2_s.in1k,384,74.040,25.960,91.420,8.580,21.46,1.000,bicubic,-9.860,-5.282,+50\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,74.020,25.980,91.210,8.790,32.59,0.950,bicubic,-10.294,-5.892,-11\nflexivit_base.300ep_in1k,240,74.010,25.990,91.350,8.650,86.59,0.950,bicubic,-10.392,-5.534,-23\ncrossvit_18_dagger_408.in1k,408,74.010,25.990,91.300,8.700,44.61,1.000,bicubic,-10.188,-5.534,+5\ninception_next_base.sail_in1k,224,74.000,26.000,91.370,8.630,86.67,0.950,bicubic,-10.098,-5.768,+17\nhgnetv2_b4.ssld_stage2_ft_in1k,224,73.970,26.030,91.950,8.050,19.80,0.965,bicubic,-9.730,-4.838,+92\nmambaout_small.in1k,224,73.970,26.030,91.140,8.860,48.49,1.000,bicubic,-10.124,-5.486,+16\ndavit_small.msft_in1k,224,73.960,26.040,91.570,8.430,49.75,0.950,bicubic,-10.292,-5.372,-8\nresnest269e.in1k,416,73.940,26.060,92.040,7.960,110.93,0.928,bicubic,-10.602,-4.948,-60\nefficientnet_x_b5.sw_r448_e450_in1k,448,73.940,26.060,91.660,8.340,33.44,1.000,bicubic,-10.262,-5.312,-2\nfastvit_sa36.apple_dist_in1k,256,73.920,26.080,91.660,8.340,31.53,0.900,bicubic,-10.108,-5.212,+25\nxcit_small_12_p8_224.fb_dist_in1k,224,73.910,26.090,91.690,8.310,26.21,1.000,bicubic,-10.342,-5.184,-11\neca_nfnet_l1.ra2_in1k,320,73.880,26.120,92.030,7.970,41.41,1.000,bicubic,-10.182,-4.996,+16\ntf_efficientnet_b7.aa_in1k,600,73.880,26.120,91.720,8.280,66.35,0.949,bicubic,-10.544,-5.144,-39\nvit_base_patch16_rope_ape_224.naver_in1k,224,73.880,26.120,91.540,8.460,86.59,0.900,bicubic,-9.900,-5.070,+63\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,73.870,26.130,92.230,7.770,36.47,1.000,bicubic,-10.196,-5.080,+11\ntf_efficientnet_b6.aa_in1k,528,73.860,26.140,91.670,8.330,43.04,0.942,bicubic,-10.264,-5.220,+1\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,73.850,26.150,91.970,8.030,22.06,1.000,bicubic,-9.232,-4.804,+180\nrepvgg_d2se.rvgg_in1k,320,73.850,26.150,91.390,8.610,133.33,1.000,bilinear,-9.700,-5.272,+100\nresnetrs420.tf_in1k,416,73.830,26.170,91.690,8.310,191.89,1.000,bicubic,-11.180,-5.420,-134\npit_b_distilled_224.in1k,224,73.820,26.180,90.740,9.260,74.79,0.900,bicubic,-9.954,-5.728,+59\ntf_efficientnet_b3.ns_jft_in1k,300,73.810,26.190,91.790,8.210,12.23,0.904,bicubic,-10.244,-5.136,+8\nregnety_1280.seer_ft_in1k,384,73.800,26.200,91.760,8.240,644.81,1.000,bicubic,-10.626,-5.326,-50\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,73.800,26.200,91.160,8.840,60.23,0.950,bicubic,-10.106,-5.518,+21\nfocalnet_base_lrf.ms_in1k,224,73.800,26.200,91.070,8.930,88.75,0.900,bicubic,-10.028,-5.540,+42\nvit_base_patch16_384.orig_in21k_ft_in1k,384,73.780,26.220,92.090,7.910,86.86,1.000,bicubic,-10.432,-5.120,-19\nconvnext_base.fb_in1k,224,73.780,26.220,91.250,8.750,88.59,0.875,bicubic,-10.058,-5.496,+35\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,73.770,26.230,91.500,8.500,38.74,0.950,bicubic,-10.040,-5.324,+40\nseresnext101_32x8d.ah_in1k,288,73.770,26.230,91.380,8.620,93.57,1.000,bicubic,-10.422,-5.752,-16\nrdnet_small.nv_in1k,224,73.770,26.230,91.340,8.660,50.44,0.900,bicubic,-9.888,-5.364,+74\nmambaout_base.in1k,224,73.760,26.240,91.000,9.000,84.81,1.000,bicubic,-10.434,-5.668,-21\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,73.750,26.250,91.390,8.610,86.59,0.900,bicubic,-10.148,-5.362,+19\nregnetz_d32.ra3_in1k,320,73.740,26.260,91.860,8.140,27.58,0.950,bicubic,-10.288,-4.990,-1\nfastvit_ma36.apple_in1k,256,73.740,26.260,91.580,8.420,44.07,0.950,bicubic,-10.144,-5.176,+21\nefficientvit_b3.r288_in1k,288,73.740,26.260,91.410,8.590,48.65,1.000,bicubic,-10.422,-5.322,-19\nresnetrs420.tf_in1k,320,73.740,26.260,91.080,8.920,191.89,1.000,bicubic,-10.500,-5.782,-31\nregnetz_d8.ra3_in1k,320,73.730,26.270,91.960,8.040,23.37,1.000,bicubic,-10.314,-5.044,-6\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,73.720,26.280,92.090,7.910,194.03,0.875,bilinear,-9.620,-4.754,+124\nmaxvit_small_tf_224.in1k,224,73.700,26.300,91.400,8.600,68.93,0.950,bicubic,-10.752,-5.424,-75\nregnetz_040.ra3_in1k,256,73.700,26.300,91.240,8.760,27.12,1.000,bicubic,-9.944,-5.442,+67\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,73.690,26.310,92.330,7.670,49.61,0.900,bicubic,-9.640,-4.666,+124\nresnetaa101d.sw_in12k_ft_in1k,288,73.690,26.310,91.780,8.220,44.57,1.000,bicubic,-10.424,-5.334,-24\nmambaout_small_rw.sw_e450_in1k,224,73.690,26.310,91.110,8.890,48.50,1.000,bicubic,-10.350,-5.632,-10\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,73.680,26.320,92.180,7.820,88.79,0.875,bilinear,-9.040,-4.444,+206\ngcvit_small.in1k,224,73.680,26.320,91.320,8.680,51.09,0.875,bicubic,-10.224,-5.644,+4\nswinv2_base_window8_256.ms_in1k,256,73.670,26.330,91.610,8.390,87.92,0.900,bicubic,-10.576,-5.292,-45\nxcit_medium_24_p16_224.fb_dist_in1k,224,73.670,26.330,91.490,8.510,84.40,1.000,bicubic,-10.622,-5.446,-51\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,73.640,26.360,91.410,8.590,29.15,0.950,bicubic,-10.586,-5.372,-43\nedgenext_base.usi_in1k,320,73.630,26.370,91.770,8.230,18.51,1.000,bicubic,-10.340,-5.000,-10\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,73.630,26.370,91.080,8.920,37.76,0.950,bicubic,-10.366,-5.634,-12\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,73.600,26.400,92.340,7.660,19.80,1.000,bicubic,-9.850,-4.580,+86\nedgenext_base.in21k_ft_in1k,320,73.600,26.400,91.840,8.160,18.51,1.000,bicubic,-10.476,-5.368,-27\nvit_base_patch16_rope_mixed_224.naver_in1k,224,73.600,26.400,91.340,8.660,86.44,0.900,bicubic,-10.206,-5.382,+22\nseresnet152d.ra2_in1k,256,73.590,26.410,91.500,8.500,66.84,0.950,bicubic,-10.112,-5.120,+41\nhiera_small_224.mae_in1k_ft_in1k,224,73.590,26.410,91.380,8.620,35.01,0.900,bicubic,-10.298,-5.288,-4\nmaxvit_tiny_rw_224.sw_in1k,224,73.590,26.410,90.860,9.140,29.06,0.950,bicubic,-9.932,-5.628,+69\nconvnext_small.fb_in1k,288,73.570,26.430,91.390,8.610,50.22,1.000,bicubic,-10.134,-5.400,+36\nsequencer2d_l.in1k,224,73.570,26.430,91.100,8.900,54.30,0.875,bicubic,-9.836,-5.404,+92\nseresnextaa101d_32x8d.ah_in1k,224,73.570,26.430,90.940,9.060,93.59,0.950,bicubic,-10.330,-5.708,-9\ndm_nfnet_f2.dm_in1k,256,73.560,26.440,91.420,8.580,193.78,0.920,bicubic,-10.540,-5.490,-40\npvt_v2_b4.in1k,224,73.560,26.440,91.120,8.880,62.56,0.900,bicubic,-10.150,-5.494,+30\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,73.540,26.460,91.950,8.050,28.64,1.000,bicubic,-10.888,-5.310,-89\nregnety_080.ra3_in1k,288,73.480,26.520,91.700,8.300,39.18,1.000,bicubic,-10.460,-5.184,-25\nmobilenetv4_hybrid_large.e600_r384_in1k,384,73.480,26.520,91.520,8.480,37.76,0.950,bicubic,-10.346,-5.228,+3\nedgenext_base.in21k_ft_in1k,256,73.480,26.520,91.240,8.760,18.51,0.950,bicubic,-9.936,-5.560,+82\ncoat_lite_medium.in1k,224,73.480,26.520,91.230,8.770,44.57,0.900,bicubic,-10.122,-5.484,+48\ninception_next_small.sail_in1k,224,73.480,26.520,91.140,8.860,49.37,0.875,bicubic,-10.104,-5.458,+51\nresnetrs270.tf_in1k,352,73.460,26.540,91.450,8.550,129.86,1.000,bicubic,-10.978,-5.522,-99\nxcit_tiny_24_p8_384.fb_dist_in1k,384,73.450,26.550,91.660,8.340,12.11,1.000,bicubic,-10.334,-5.056,+5\nrepvit_m2_3.dist_450e_in1k,224,73.450,26.550,91.080,8.920,23.69,0.950,bicubic,-10.296,-5.570,+14\ntresnet_v2_l.miil_in21k_ft_in1k,224,73.450,26.550,91.030,8.970,46.17,0.875,bilinear,-10.460,-5.462,-25\ntwins_svt_large.in1k,224,73.450,26.550,90.870,9.130,99.27,0.900,bicubic,-10.268,-5.730,+19\nconvformer_s18.sail_in22k_ft_in1k,224,73.440,26.560,91.930,8.070,26.77,1.000,bicubic,-10.328,-5.116,+8\nregnetz_040_h.ra3_in1k,256,73.440,26.560,91.540,8.460,28.94,1.000,bicubic,-10.324,-5.160,+10\nresnetrs350.tf_in1k,288,73.440,26.560,91.110,8.890,163.96,1.000,bicubic,-10.864,-5.782,-83\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,73.430,26.570,91.240,8.760,60.40,0.950,bicubic,-10.280,-5.376,+16\ndeit3_base_patch16_224.fb_in1k,224,73.420,26.580,91.260,8.740,86.59,0.900,bicubic,-10.380,-5.712,-2\nswinv2_small_window16_256.ms_in1k,256,73.400,26.600,91.280,8.720,49.73,0.900,bicubic,-10.816,-5.594,-72\nedgenext_base.usi_in1k,256,73.400,26.600,91.170,8.830,18.51,0.950,bicubic,-10.284,-5.580,+21\nresnetaa101d.sw_in12k_ft_in1k,224,73.400,26.600,91.100,8.900,44.57,0.950,bicubic,-10.144,-5.728,+42\nrexnetr_300.sw_in12k_ft_in1k,224,73.390,26.610,91.800,8.200,34.81,0.950,bicubic,-10.646,-5.192,-48\nregnety_064.ra3_in1k,288,73.380,26.620,91.260,8.740,30.58,1.000,bicubic,-10.336,-5.464,+6\nresmlp_big_24_224.fb_distilled_in1k,224,73.380,26.620,91.090,8.910,129.14,0.875,bicubic,-10.214,-5.568,+32\nresnetrs200.tf_in1k,256,73.370,26.630,91.220,8.780,93.21,1.000,bicubic,-10.514,-5.542,-31\nconvnext_tiny.fb_in22k_ft_in1k_384,384,73.360,26.640,91.860,8.140,28.59,1.000,bicubic,-10.738,-4.922,-65\nvit_base_patch16_rope_224.naver_in1k,224,73.360,26.640,91.320,8.680,86.43,0.900,bicubic,-10.354,-5.354,+5\nswin_s3_base_224.ms_in1k,224,73.360,26.640,91.270,8.730,71.13,0.900,bicubic,-10.560,-5.380,-43\nresnet200d.ra2_in1k,320,73.350,26.650,91.460,8.540,64.69,1.000,bicubic,-10.626,-5.360,-49\ntf_efficientnet_b5.ra_in1k,456,73.340,26.660,91.370,8.630,30.39,0.934,bicubic,-10.494,-5.394,-26\nregnety_160.deit_in1k,288,73.320,26.680,91.630,8.370,83.59,1.000,bicubic,-10.384,-5.186,+5\nswin_base_patch4_window12_384.ms_in1k,384,73.320,26.680,91.350,8.650,87.90,1.000,bicubic,-11.158,-5.536,-133\nhgnetv2_b3.ssld_stage2_ft_in1k,288,73.300,26.700,91.840,8.160,16.29,1.000,bicubic,-10.288,-4.972,+23\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,73.290,26.710,91.800,8.200,19.80,0.965,bicubic,-9.602,-4.832,+132\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,73.280,26.720,91.960,8.040,16.29,1.000,bicubic,-9.838,-4.494,+98\nregnetv_064.ra3_in1k,288,73.280,26.720,91.600,8.400,30.58,1.000,bicubic,-10.466,-5.146,-11\ntf_efficientnet_b5.aa_in1k,456,73.280,26.720,91.070,8.930,30.39,0.934,bicubic,-10.416,-5.642,+5\nefficientnet_b4.ra2_in1k,384,73.270,26.730,91.320,8.680,19.34,1.000,bicubic,-10.198,-5.254,+36\nfocalnet_base_srf.ms_in1k,224,73.270,26.730,91.300,8.700,88.15,0.900,bicubic,-10.538,-5.360,-27\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,73.270,26.730,91.150,8.850,60.40,0.950,bicubic,-10.382,-5.396,+7\ngcvit_tiny.in1k,224,73.270,26.730,90.860,9.140,28.22,0.875,bicubic,-10.130,-5.534,+51\nfocalnet_small_srf.ms_in1k,224,73.270,26.730,90.630,9.370,49.89,0.900,bicubic,-10.158,-5.802,+44\nregnetz_d8_evos.ch_in1k,256,73.260,26.740,90.760,9.240,23.46,0.950,bicubic,-10.144,-5.902,+48\ntiny_vit_21m_224.in1k,224,73.250,26.750,91.410,8.590,21.20,0.950,bicubic,-10.018,-5.192,+64\nregnetz_d8.ra3_in1k,256,73.250,26.750,91.300,8.700,23.37,0.940,bicubic,-10.296,-5.446,+17\nresnet152d.ra2_in1k,320,73.250,26.750,91.200,8.800,60.21,1.000,bicubic,-10.434,-5.498,-4\nmvitv2_small.fb_in1k,224,73.250,26.750,91.120,8.880,34.87,0.900,bicubic,-10.522,-5.438,-24\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,73.200,26.800,91.170,8.830,32.59,1.000,bicubic,-10.626,-5.550,-41\nswinv2_small_window8_256.ms_in1k,256,73.200,26.800,91.040,8.960,49.73,0.900,bicubic,-10.644,-5.610,-47\npvt_v2_b5.in1k,224,73.190,26.810,91.300,8.700,81.96,0.900,bicubic,-10.570,-5.334,-25\nseresnext101d_32x8d.ah_in1k,224,73.180,26.820,91.200,8.800,93.59,0.950,bicubic,-10.522,-5.594,-13\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,73.170,26.830,91.890,8.110,22.20,1.000,bicubic,-10.644,-5.216,-45\nregnetz_d8_evos.ch_in1k,320,73.170,26.830,91.630,8.370,23.46,1.000,bicubic,-10.968,-5.388,-94\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,73.170,26.830,91.210,8.790,38.88,0.950,bicubic,-10.296,-5.412,+23\nnest_base_jx.goog_in1k,224,73.170,26.830,90.990,9.010,67.72,0.875,bicubic,-10.382,-5.374,+5\nfastvit_sa24.apple_dist_in1k,256,73.160,26.840,91.390,8.610,21.55,0.900,bicubic,-10.208,-5.184,+40\nxcit_small_24_p16_224.fb_dist_in1k,224,73.150,26.850,91.460,8.540,47.67,1.000,bicubic,-10.730,-5.274,-61\nfastvit_sa36.apple_in1k,256,73.150,26.850,90.960,9.040,31.53,0.900,bicubic,-10.352,-5.672,+11\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,73.140,26.860,91.780,8.220,936.53,1.000,bilinear,-11.796,-5.676,-220\nregnetz_d32.ra3_in1k,256,73.140,26.860,91.450,8.550,27.58,0.950,bicubic,-10.292,-5.184,+23\ncs3se_edgenet_x.c2ns_in1k,320,73.140,26.860,91.280,8.720,50.72,1.000,bicubic,-10.410,-5.386,-3\nxcit_small_24_p8_224.fb_in1k,224,73.140,26.860,91.230,8.770,47.63,1.000,bicubic,-10.714,-5.406,-64\nese_vovnet57b.ra4_e3600_r256_in1k,320,73.140,26.860,91.150,8.850,38.61,1.000,bicubic,-10.158,-5.372,+49\nxcit_medium_24_p8_224.fb_in1k,224,73.140,26.860,90.440,9.560,84.32,1.000,bicubic,-10.604,-5.954,-34\nresnetrs350.tf_in1k,384,73.130,26.870,91.180,8.820,163.96,1.000,bicubic,-11.580,-6.076,-196\nseresnext101_32x8d.ah_in1k,224,73.120,26.880,90.490,9.510,93.57,0.950,bicubic,-10.498,-5.922,-15\nnextvit_large.bd_in1k,224,73.100,26.900,91.550,8.450,57.87,0.950,bicubic,-10.554,-5.132,-21\neca_nfnet_l1.ra2_in1k,256,73.100,26.900,91.370,8.630,41.41,0.900,bicubic,-10.164,-5.314,+43\nfasternet_l.in1k,224,73.100,26.900,90.810,9.190,93.47,1.000,bicubic,-10.416,-5.848,-3\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,73.090,26.910,91.370,8.630,11.07,1.000,bicubic,-10.300,-5.396,+24\nefficientvit_b3.r256_in1k,256,73.080,26.920,90.900,9.100,48.65,1.000,bicubic,-10.702,-5.598,-53\nnextvit_small.bd_in1k_384,384,73.070,26.930,91.480,8.520,31.76,1.000,bicubic,-10.966,-5.514,-98\npvt_v2_b3.in1k,224,73.070,26.930,90.990,9.010,45.24,0.900,bicubic,-10.050,-5.570,+59\nresnet200d.ra2_in1k,256,73.070,26.930,90.850,9.150,64.69,0.950,bicubic,-10.180,-5.702,+42\nresnet152.a1h_in1k,288,73.060,26.940,91.130,8.870,60.19,1.000,bicubic,-10.434,-5.392,-6\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,73.060,26.940,90.590,9.410,86.63,1.000,bicubic,-10.280,-5.776,+29\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,73.050,26.950,91.050,8.950,38.88,0.950,bicubic,-10.412,-5.498,+1\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,73.040,26.960,91.990,8.010,236.34,1.000,bilinear,-11.428,-5.442,-178\nswin_s3_small_224.ms_in1k,224,73.030,26.970,90.720,9.280,49.74,0.900,bicubic,-10.748,-5.740,-59\ndeit3_small_patch16_384.fb_in1k,384,73.010,26.990,91.250,8.750,22.21,1.000,bicubic,-10.422,-5.424,0\ncait_s24_224.fb_dist_in1k,224,73.010,26.990,91.200,8.800,46.92,1.000,bicubic,-10.474,-5.386,-10\nrepvit_m2_3.dist_300e_in1k,224,73.010,26.990,90.860,9.140,23.69,0.950,bicubic,-10.514,-5.672,-18\nefficientvit_b3.r224_in1k,224,73.010,26.990,90.540,9.460,48.65,0.950,bicubic,-10.434,-5.798,+2\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,72.990,27.010,91.580,8.420,28.64,0.875,bicubic,-10.914,-5.082,-96\nmobilenetv4_conv_large.e600_r384_in1k,448,72.980,27.020,91.240,8.760,32.59,1.000,bicubic,-10.438,-5.372,+3\nregnety_080.ra3_in1k,224,72.980,27.020,90.750,9.250,39.18,0.950,bicubic,-10.186,-5.730,+36\ndeit_base_distilled_patch16_224.fb_in1k,224,72.970,27.030,90.960,9.040,87.34,0.900,bicubic,-10.414,-5.532,+8\nmaxvit_tiny_tf_224.in1k,224,72.960,27.040,90.880,9.120,30.92,0.950,bicubic,-10.466,-5.704,-1\nresnetrs270.tf_in1k,256,72.930,27.070,90.620,9.380,129.86,1.000,bicubic,-10.666,-5.996,-35\nefficientnetv2_rw_s.ra2_in1k,288,72.900,27.100,91.290,8.710,23.94,1.000,bicubic,-9.988,-5.038,+77\ncaformer_s18.sail_in1k,224,72.900,27.100,90.960,9.040,26.34,1.000,bicubic,-10.764,-5.566,-48\nhgnet_tiny.ssld_in1k,288,72.890,27.110,91.590,8.410,14.74,1.000,bicubic,-9.638,-4.930,+136\nregnety_640.seer_ft_in1k,384,72.890,27.110,91.270,8.730,281.38,1.000,bicubic,-11.034,-5.656,-111\nresnetrs152.tf_in1k,320,72.890,27.110,91.200,8.800,86.62,1.000,bicubic,-10.820,-5.472,-58\nregnety_320.tv2_in1k,224,72.890,27.110,90.780,9.220,145.05,0.965,bicubic,-10.280,-5.636,+27\nefficientformerv2_l.snap_dist_in1k,224,72.860,27.140,90.820,9.180,26.32,0.950,bicubic,-10.782,-5.726,-47\nrexnet_300.nav_in1k,224,72.850,27.150,90.700,9.300,34.71,0.875,bicubic,-9.930,-5.530,+85\ncoatnet_rmlp_1_rw_224.sw_in1k,224,72.840,27.160,90.800,9.200,41.69,0.950,bicubic,-10.510,-5.652,+3\ntf_efficientnetv2_s.in21k_ft_in1k,300,72.820,27.180,91.370,8.630,21.46,1.000,bicubic,-10.528,-5.340,+3\nconvnextv2_tiny.fcmae_ft_in1k,288,72.810,27.190,91.080,8.920,28.64,1.000,bicubic,-10.668,-5.638,-27\nregnety_160.deit_in1k,224,72.800,27.200,90.930,9.070,83.59,0.950,bicubic,-10.200,-5.572,+49\nregnetv_064.ra3_in1k,224,72.800,27.200,90.710,9.290,30.58,0.950,bicubic,-10.318,-6.002,+33\nswinv2_cr_small_ns_224.sw_in1k,224,72.770,27.230,90.590,9.410,49.70,0.900,bicubic,-10.720,-5.896,-32\ncrossvit_15_dagger_408.in1k,408,72.760,27.240,91.050,8.950,28.50,1.000,bicubic,-11.082,-5.736,-102\ntf_efficientnet_b4.ap_in1k,380,72.760,27.240,91.040,8.960,19.34,0.922,bicubic,-10.486,-5.352,+10\ntf_efficientnetv2_s.in1k,300,72.760,27.240,90.770,9.230,21.46,1.000,bicubic,-10.402,-5.564,+17\nnextvit_base.bd_in1k,224,72.750,27.250,91.230,8.770,44.82,0.950,bicubic,-10.748,-5.418,-38\nfocalnet_small_lrf.ms_in1k,224,72.740,27.260,90.800,9.200,50.34,0.900,bicubic,-10.768,-5.786,-42\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,72.720,27.280,90.940,9.060,236.34,1.000,bicubic,-11.138,-6.190,-110\ntwins_svt_base.in1k,224,72.720,27.280,90.420,9.580,56.07,0.900,bicubic,-10.402,-6.010,+22\nregnetv_040.ra3_in1k,288,72.700,27.300,91.120,8.880,20.64,1.000,bicubic,-10.504,-5.562,+7\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,72.700,27.300,90.650,9.350,86.63,1.000,bicubic,-10.808,-5.720,-46\nresnetv2_101.a1h_in1k,288,72.700,27.300,90.580,9.420,44.54,1.000,bicubic,-10.302,-5.864,+37\nefficientformer_l7.snap_dist_in1k,224,72.690,27.310,90.690,9.310,82.23,0.950,bicubic,-10.706,-5.842,-22\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,72.690,27.310,90.660,9.340,86.46,1.000,bicubic,-10.778,-5.910,-38\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,72.680,27.320,91.160,8.840,88.30,1.000,bicubic,-10.678,-5.668,-19\ncoatnet_1_rw_224.sw_in1k,224,72.680,27.320,90.790,9.210,41.72,0.950,bicubic,-10.940,-5.592,-66\nxcit_small_12_p8_224.fb_in1k,224,72.670,27.330,90.660,9.340,26.21,1.000,bicubic,-10.684,-5.760,-19\nregnety_064.ra3_in1k,224,72.650,27.350,90.630,9.370,30.58,0.950,bicubic,-10.356,-5.894,+29\necaresnet101d.miil_in1k,288,72.640,27.360,91.010,8.990,44.57,0.950,bicubic,-10.366,-5.286,+27\nregnety_032.ra_in1k,288,72.610,27.390,90.930,9.070,19.44,1.000,bicubic,-10.114,-5.496,+69\nxception65p.ra3_in1k,299,72.590,27.410,90.750,9.250,39.82,0.940,bicubic,-10.570,-5.620,+3\nregnety_040.ra3_in1k,288,72.590,27.410,90.740,9.260,20.65,1.000,bicubic,-10.458,-5.762,+20\nconvnext_tiny.fb_in22k_ft_in1k,224,72.580,27.420,91.340,8.660,28.59,0.875,bicubic,-10.320,-4.950,+39\ntwins_pcpvt_large.in1k,224,72.580,27.420,90.880,9.120,60.99,0.900,bicubic,-10.578,-5.490,+3\nxception65.ra3_in1k,299,72.570,27.430,90.710,9.290,39.92,0.940,bicubic,-10.616,-5.886,-7\npnasnet5large.tf_in1k,331,72.560,27.440,90.530,9.470,86.06,0.911,bicubic,-10.254,-5.754,+53\nxcit_small_12_p16_224.fb_dist_in1k,224,72.540,27.460,91.080,8.920,26.25,1.000,bicubic,-10.814,-5.392,-28\nmambaout_tiny.in1k,288,72.540,27.460,90.650,9.350,26.55,1.000,bicubic,-10.912,-5.890,-50\nhgnetv2_b3.ssld_stage2_ft_in1k,224,72.530,27.470,91.290,8.710,16.29,0.965,bicubic,-10.384,-5.070,+33\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,72.530,27.470,91.290,8.710,11.07,1.000,bicubic,-10.468,-5.386,+20\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,72.530,27.470,90.930,9.070,88.22,0.900,bicubic,-10.774,-5.598,-24\ndeit_base_patch16_384.fb_in1k,384,72.520,27.480,90.200,9.800,86.86,1.000,bicubic,-10.588,-6.176,+3\nnfnet_l0.ra2_in1k,288,72.500,27.500,90.880,9.120,35.07,1.000,bicubic,-10.266,-5.614,+50\ndeit3_medium_patch16_224.fb_in1k,224,72.500,27.500,90.810,9.190,38.85,0.900,bicubic,-10.590,-5.302,+4\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,72.490,27.510,91.150,8.850,16.29,0.965,bicubic,-10.104,-5.214,+82\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,72.480,27.520,90.480,9.520,32.59,0.950,bicubic,-10.784,-5.910,-24\ntf_efficientnet_b5.in1k,456,72.470,27.530,90.900,9.100,30.39,0.934,bicubic,-10.700,-5.634,-19\nresnext101_64x4d.c1_in1k,288,72.470,27.530,90.660,9.340,83.46,1.000,bicubic,-10.688,-5.966,-10\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,72.460,27.540,90.850,9.150,25.03,0.875,bilinear,-9.702,-5.206,+151\nregnety_080_tv.tv2_in1k,224,72.460,27.540,90.510,9.490,39.38,0.965,bicubic,-10.120,-5.744,+80\ngc_efficientnetv2_rw_t.agc_in1k,288,72.450,27.550,90.820,9.180,13.68,1.000,bicubic,-10.036,-5.482,+95\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,72.430,27.570,92.070,7.930,387.93,1.000,bilinear,-12.000,-5.310,-230\nconvformer_s18.sail_in1k,224,72.430,27.570,90.560,9.440,26.77,1.000,bicubic,-10.538,-5.686,+12\nxcit_tiny_24_p8_224.fb_dist_in1k,224,72.410,27.590,90.790,9.210,12.11,1.000,bicubic,-10.168,-5.396,+78\nswin_base_patch4_window7_224.ms_in1k,224,72.400,27.600,90.840,9.160,87.77,0.900,bicubic,-11.204,-5.610,-94\nmobilenetv4_conv_large.e600_r384_in1k,384,72.400,27.600,90.800,9.200,32.59,0.950,bicubic,-10.574,-5.444,+8\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,72.400,27.600,90.730,9.270,11.07,0.950,bicubic,-10.552,-5.740,+9\ncs3sedarknet_x.c2ns_in1k,288,72.390,27.610,91.040,8.960,35.40,1.000,bicubic,-10.264,-5.308,+54\nsequencer2d_m.in1k,224,72.390,27.610,90.670,9.330,38.31,0.875,bicubic,-10.446,-5.606,+22\nrdnet_tiny.nv_in1k,224,72.380,27.620,90.440,9.560,23.86,0.900,bicubic,-10.434,-5.592,+28\ntnt_b_patch16_224.in1k,224,72.370,27.630,90.590,9.410,65.43,0.900,bicubic,-10.494,-5.640,+18\nresnet101d.ra2_in1k,320,72.370,27.630,90.580,9.420,44.57,1.000,bicubic,-10.650,-5.876,-7\nnest_small_jx.goog_in1k,224,72.360,27.640,90.730,9.270,38.35,0.875,bicubic,-10.772,-5.600,-24\nhrnet_w18_ssld.paddle_in1k,288,72.340,27.660,90.770,9.230,21.30,1.000,bilinear,-9.712,-5.480,+150\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,72.330,27.670,90.770,9.230,16.78,0.950,bicubic,-10.706,-5.574,-11\nresnest101e.in1k,256,72.330,27.670,90.770,9.230,48.28,0.875,bilinear,-10.586,-5.540,+4\nnasnetalarge.tf_in1k,331,72.330,27.670,90.520,9.480,88.75,0.911,bicubic,-10.308,-5.546,+48\nefficientnetv2_rw_t.ra2_in1k,288,72.320,27.680,90.460,9.540,13.65,1.000,bicubic,-10.034,-5.718,+99\ntf_efficientnet_b2.ns_jft_in1k,260,72.310,27.690,91.010,8.990,9.11,0.890,bicubic,-10.102,-5.228,+85\nconvnext_small.fb_in1k,224,72.300,27.700,90.860,9.140,50.22,0.875,bicubic,-10.844,-5.572,-33\nefficientvit_b2.r288_in1k,288,72.300,27.700,90.860,9.140,24.33,1.000,bicubic,-10.778,-5.426,-20\nswiftformer_l3.dist_in1k,224,72.300,27.700,90.570,9.430,28.49,0.950,bicubic,-10.694,-5.666,-11\nconvnext_nano.r384_in12k_ft_in1k,384,72.260,27.740,91.040,8.960,15.59,1.000,bicubic,-11.074,-5.584,-59\nresnet152d.ra2_in1k,256,72.250,27.750,90.600,9.400,60.21,0.950,bicubic,-10.910,-6.046,-39\ntwins_pcpvt_base.in1k,224,72.250,27.750,90.550,9.450,43.83,0.900,bicubic,-10.474,-5.800,+25\nhgnetv2_b2.ssld_stage2_ft_in1k,288,72.240,27.760,90.820,9.180,11.22,1.000,bicubic,-10.122,-5.592,+87\nresnext101_32x8d.tv2_in1k,224,72.240,27.760,90.160,9.840,88.79,0.965,bilinear,-10.592,-6.074,+6\nefficientnet_b4.ra2_in1k,320,72.230,27.770,90.620,9.380,19.34,0.875,bicubic,-10.428,-5.514,+32\ncrossvit_18_240.in1k,240,72.220,27.780,90.280,9.720,43.27,0.875,bicubic,-10.170,-5.784,+80\nresnext101_64x4d.tv_in1k,224,72.200,27.800,90.610,9.390,83.46,0.875,bilinear,-10.778,-5.664,-17\ntresnet_xl.miil_in1k_448,448,72.200,27.800,90.110,9.890,78.44,0.875,bilinear,-10.866,-6.070,-30\nresnetv2_50x1_bit.goog_distilled_in1k,224,72.180,27.820,91.080,8.920,25.55,0.875,bicubic,-10.640,-5.446,+3\ntf_efficientnet_b4.aa_in1k,380,72.180,27.820,90.510,9.490,19.34,0.922,bicubic,-10.860,-5.788,-29\ncs3se_edgenet_x.c2ns_in1k,256,72.170,27.830,90.750,9.250,50.72,0.950,bicubic,-10.598,-5.556,+9\nmaxvit_nano_rw_256.sw_in1k,256,72.160,27.840,90.460,9.540,15.45,0.950,bicubic,-10.774,-5.764,-17\ndm_nfnet_f1.dm_in1k,224,72.150,27.850,90.410,9.590,132.63,0.910,bicubic,-11.294,-6.224,-98\nregnetx_320.tv2_in1k,224,72.150,27.850,90.180,9.820,107.81,0.965,bicubic,-10.674,-6.020,-1\ntresnet_m.miil_in21k_ft_in1k,224,72.150,27.850,90.180,9.820,31.39,0.875,bilinear,-10.940,-6.104,-39\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,72.140,27.860,91.380,8.620,11.00,0.950,bicubic,-11.092,-5.248,-64\ncait_xxs36_384.fb_dist_in1k,384,72.140,27.860,90.780,9.220,17.37,1.000,bicubic,-10.064,-5.368,+103\nrexnetr_200.sw_in12k_ft_in1k,288,72.130,27.870,91.270,8.730,16.52,1.000,bicubic,-11.030,-5.236,-58\nmaxxvitv2_nano_rw_256.sw_in1k,256,72.120,27.880,90.510,9.490,23.70,0.950,bicubic,-10.988,-5.822,-47\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,72.120,27.880,90.330,9.670,15.50,0.950,bicubic,-10.858,-5.922,-30\nrepvit_m1_5.dist_450e_in1k,224,72.110,27.890,90.400,9.600,14.64,0.950,bicubic,-10.410,-5.858,+46\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,72.090,27.910,90.290,9.710,38.76,0.900,bicubic,-10.468,-5.786,+41\nxcit_tiny_24_p16_384.fb_dist_in1k,384,72.080,27.920,90.520,9.480,12.12,1.000,bicubic,-10.500,-5.740,+35\nfastvit_sa24.apple_in1k,256,72.070,27.930,90.750,9.250,21.55,0.900,bicubic,-10.636,-5.528,+6\nregnetz_c16_evos.ch_in1k,320,72.060,27.940,91.070,8.930,13.49,0.950,bicubic,-10.600,-5.410,+11\nregnetz_c16.ra3_in1k,320,72.050,27.950,90.940,9.060,13.46,1.000,bicubic,-10.612,-5.392,+9\nregnety_320.seer_ft_in1k,384,72.040,27.960,90.860,9.140,145.05,1.000,bicubic,-11.284,-5.852,-84\nconvnext_nano.in12k_ft_in1k,288,72.030,27.970,90.870,9.130,15.59,1.000,bicubic,-10.852,-5.692,-24\nconvnextv2_tiny.fcmae_ft_in1k,224,72.030,27.970,90.350,9.650,28.64,0.875,bicubic,-10.918,-5.936,-34\ncrossvit_18_dagger_240.in1k,240,72.010,27.990,90.110,9.890,44.27,0.875,bicubic,-10.484,-5.960,+41\nresnext101_64x4d.c1_in1k,224,71.980,28.020,89.710,10.290,83.46,0.950,bicubic,-10.466,-6.210,+48\ncoat_small.in1k,224,71.970,28.030,90.450,9.550,21.69,0.900,bicubic,-10.408,-5.764,+57\nregnety_040.ra3_in1k,224,71.970,28.030,90.350,9.650,20.65,0.950,bicubic,-10.338,-5.728,+68\nresnet101.a1h_in1k,288,71.950,28.050,90.770,9.230,44.55,1.000,bicubic,-10.846,-5.554,-17\nfasternet_m.in1k,224,71.940,28.060,90.560,9.440,53.52,1.000,bicubic,-10.908,-5.828,-29\ncs3edgenet_x.c2_in1k,288,71.940,28.060,90.390,9.610,47.82,1.000,bicubic,-10.794,-5.984,-10\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,71.930,28.070,90.760,9.240,11.07,1.000,bicubic,-10.570,-5.526,+32\nregnetv_040.ra3_in1k,224,71.930,28.070,90.440,9.560,20.64,0.950,bicubic,-10.510,-5.762,+41\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,71.930,28.070,90.410,9.590,11.07,0.950,bicubic,-10.430,-5.846,+55\nregnety_160.tv2_in1k,224,71.920,28.080,90.220,9.780,83.59,0.965,bicubic,-10.726,-6.010,-1\nconvnext_tiny_hnf.a2h_in1k,288,71.920,28.080,89.620,10.380,28.59,1.000,bicubic,-10.688,-6.404,+8\nswinv2_tiny_window16_256.ms_in1k,256,71.910,28.090,90.490,9.510,28.35,0.900,bicubic,-10.918,-5.732,-32\nmobilenetv4_conv_large.e500_r256_in1k,320,71.900,28.100,90.230,9.770,32.59,1.000,bicubic,-10.772,-6.080,-10\nregnetz_c16_evos.ch_in1k,256,71.890,28.110,90.750,9.250,13.49,0.950,bicubic,-10.050,-5.400,+118\ninception_next_tiny.sail_in1k,224,71.890,28.110,90.020,9.980,28.06,0.875,bicubic,-10.618,-5.980,+25\ndm_nfnet_f0.dm_in1k,256,71.880,28.120,90.720,9.280,71.49,0.900,bicubic,-11.588,-5.702,-136\ndavit_tiny.msft_in1k,224,71.880,28.120,90.240,9.760,28.36,0.950,bicubic,-10.814,-6.024,-15\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,71.870,28.130,90.750,9.250,22.52,0.950,bicubic,-10.650,-5.362,+18\nconvnext_tiny.fb_in1k,288,71.870,28.130,90.180,9.820,28.59,1.000,bicubic,-10.824,-5.964,-17\nresnet101.a1_in1k,288,71.840,28.160,89.100,10.900,44.55,1.000,bicubic,-10.486,-6.530,+47\nmambaout_tiny.in1k,224,71.830,28.170,89.980,10.020,26.55,1.000,bicubic,-10.912,-5.754,-28\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,71.820,28.180,91.260,8.740,217.32,1.000,bilinear,-12.162,-5.870,-238\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,71.820,28.180,90.760,9.240,15.62,1.000,bicubic,-10.846,-5.786,-18\nresnetrs152.tf_in1k,256,71.810,28.190,90.250,9.750,86.62,1.000,bicubic,-11.090,-6.366,-53\nxcit_tiny_12_p8_384.fb_dist_in1k,384,71.800,28.200,90.710,9.290,6.71,1.000,bicubic,-10.594,-5.484,+29\nseresnext50_32x4d.racm_in1k,288,71.800,28.200,89.940,10.060,27.56,0.950,bicubic,-10.386,-6.206,+67\nefficientformer_l3.snap_dist_in1k,224,71.780,28.220,90.150,9.850,31.41,0.950,bicubic,-10.782,-6.100,+5\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,71.770,28.230,90.490,9.510,18.45,1.000,bicubic,-11.660,-6.086,-137\nresnet152.a1h_in1k,224,71.760,28.240,90.270,9.730,60.19,0.950,bicubic,-11.058,-5.854,-45\nswin_small_patch4_window7_224.ms_in1k,224,71.750,28.250,90.220,9.780,49.61,0.900,bicubic,-11.478,-6.106,-107\nhgnet_tiny.ssld_in1k,224,71.740,28.260,90.530,9.470,14.74,0.965,bicubic,-10.202,-5.588,+101\nsequencer2d_s.in1k,224,71.740,28.260,90.520,9.480,27.65,0.875,bicubic,-10.596,-5.512,+33\nefficientvit_b2.r256_in1k,256,71.730,28.270,90.250,9.750,24.33,1.000,bicubic,-10.982,-5.842,-34\nregnetx_160.tv2_in1k,224,71.730,28.270,90.110,9.890,54.28,0.965,bicubic,-10.844,-5.948,-4\npvt_v2_b2_li.in1k,224,71.730,28.270,90.100,9.900,22.55,0.900,bicubic,-10.470,-6.008,+57\nese_vovnet57b.ra4_e3600_r256_in1k,256,71.720,28.280,90.460,9.540,38.61,0.950,bicubic,-10.740,-5.546,+11\nresnet152.a2_in1k,288,71.710,28.290,89.310,10.690,60.19,1.000,bicubic,-10.904,-6.438,-21\nflexivit_small.1200ep_in1k,240,71.690,28.310,90.260,9.740,22.06,0.950,bicubic,-10.886,-5.892,-8\nconvnext_nano.in12k_ft_in1k,224,71.670,28.330,90.600,9.400,15.59,0.950,bicubic,-10.636,-5.738,+33\nresnet50.fb_swsl_ig1b_ft_in1k,224,71.670,28.330,90.580,9.420,25.56,0.875,bilinear,-9.476,-5.400,+201\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,71.660,28.340,90.260,9.740,86.43,0.900,bicubic,-11.098,-5.918,-49\npit_b_224.in1k,224,71.650,28.350,89.280,10.720,73.76,0.900,bicubic,-10.808,-6.424,+6\nrepvit_m1_5.dist_300e_in1k,224,71.630,28.370,90.210,9.790,14.64,0.950,bicubic,-10.752,-5.816,+15\ncrossvit_15_dagger_240.in1k,240,71.630,28.370,89.810,10.190,28.21,0.875,bicubic,-10.672,-6.150,+30\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,71.620,28.380,90.530,9.470,14.25,1.000,bicubic,-11.300,-5.920,-79\nvit_relpos_base_patch16_224.sw_in1k,224,71.610,28.390,90.230,9.770,86.43,0.900,bicubic,-10.872,-5.914,-4\ngcvit_xtiny.in1k,224,71.610,28.390,90.150,9.850,19.98,0.875,bicubic,-10.354,-5.816,+79\nflexivit_small.600ep_in1k,240,71.610,28.390,90.120,9.880,22.06,0.950,bicubic,-10.778,-5.922,+10\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,71.600,28.400,90.580,9.420,11.22,1.000,bicubic,-9.856,-5.436,+143\ncoatnet_bn_0_rw_224.sw_in1k,224,71.600,28.400,90.330,9.670,27.44,0.950,bicubic,-10.794,-5.892,+2\nswinv2_cr_small_224.sw_in1k,224,71.600,28.400,90.140,9.860,49.70,0.900,bicubic,-11.526,-5.970,-112\nresnet61q.ra2_in1k,288,71.600,28.400,90.130,9.870,36.85,1.000,bicubic,-10.932,-6.002,-18\nresnetaa50d.sw_in12k_ft_in1k,224,71.600,28.400,89.710,10.290,25.58,0.950,bicubic,-10.182,-6.412,+102\neca_nfnet_l0.ra2_in1k,288,71.590,28.410,91.010,8.990,24.14,1.000,bicubic,-11.000,-5.476,-29\nhiera_tiny_224.mae_in1k_ft_in1k,224,71.590,28.410,89.960,10.040,27.91,0.900,bicubic,-11.236,-6.228,-74\nflexivit_small.300ep_in1k,240,71.590,28.410,89.860,10.140,22.06,0.950,bicubic,-10.580,-6.172,+40\nresnet152.tv2_in1k,224,71.580,28.420,89.880,10.120,60.19,0.965,bilinear,-10.714,-6.120,+20\nmvitv2_tiny.fb_in1k,224,71.560,28.440,90.220,9.780,24.17,0.900,bicubic,-10.848,-5.930,-6\nefficientnet_b3.ra2_in1k,320,71.530,28.470,90.020,9.980,12.23,1.000,bicubic,-10.766,-5.916,+15\nxcit_large_24_p16_224.fb_in1k,224,71.530,28.470,89.190,10.810,189.10,1.000,bicubic,-11.356,-6.690,-85\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,71.520,28.480,90.640,9.360,15.62,1.000,bicubic,-11.846,-6.100,-159\nconvit_base.fb_in1k,224,71.520,28.480,90.080,9.920,86.54,0.875,bicubic,-10.776,-6.044,+15\npoolformer_m48.sail_in1k,224,71.510,28.490,89.720,10.280,73.47,0.950,bicubic,-10.952,-6.366,-17\necaresnet101d_pruned.miil_in1k,288,71.500,28.500,90.380,9.620,24.88,0.950,bicubic,-10.516,-5.800,+50\nresnetv2_50d_evos.ah_in1k,288,71.500,28.500,90.040,9.960,25.59,1.000,bicubic,-10.524,-5.876,+51\nresnet101d.ra2_in1k,256,71.500,28.500,89.850,10.150,44.57,0.950,bicubic,-10.754,-6.220,+17\ntresnet_l.miil_in1k_448,448,71.480,28.520,89.960,10.040,55.99,0.875,bilinear,-10.798,-6.028,+12\ntresnet_xl.miil_in1k,224,71.460,28.540,89.470,10.530,78.44,0.875,bilinear,-10.600,-6.462,+42\nresnetaa50d.sw_in12k_ft_in1k,288,71.450,28.550,90.360,9.640,25.58,1.000,bicubic,-11.206,-6.132,-61\nregnety_032.ra_in1k,224,71.450,28.550,90.190,9.810,19.44,0.950,bicubic,-10.498,-5.782,+60\npoolformerv2_m48.sail_in1k,224,71.440,28.560,89.820,10.180,73.35,1.000,bicubic,-11.172,-6.248,-54\nnextvit_small.bd_in1k,224,71.430,28.570,90.390,9.610,31.76,0.950,bicubic,-11.196,-5.834,-59\nfbnetv3_g.ra2_in1k,288,71.430,28.570,90.340,9.660,16.62,0.950,bilinear,-10.604,-5.724,+42\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,71.430,28.570,90.340,9.660,88.79,0.875,bilinear,-10.190,-5.696,+90\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,71.430,28.570,90.280,9.720,236.34,0.875,bicubic,-11.488,-6.288,-107\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,71.410,28.590,90.630,9.370,9.72,0.950,bicubic,-11.366,-5.818,-87\nefficientvit_b2.r224_in1k,224,71.390,28.610,89.670,10.330,24.33,0.950,bicubic,-10.772,-6.040,+22\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,71.380,28.620,90.470,9.530,88.22,0.900,bicubic,-11.234,-5.736,-63\ncoatnet_0_rw_224.sw_in1k,224,71.350,28.650,89.370,10.630,27.44,0.950,bicubic,-11.046,-6.474,-26\npvt_v2_b2.in1k,224,71.340,28.660,89.990,10.010,25.36,0.900,bicubic,-10.722,-5.976,+29\nxcit_tiny_24_p8_224.fb_in1k,224,71.330,28.670,90.300,9.700,12.11,1.000,bicubic,-10.558,-5.678,+53\nwide_resnet50_2.racm_in1k,288,71.330,28.670,90.180,9.820,68.88,0.950,bicubic,-10.930,-5.882,-2\nresnet152.a1_in1k,288,71.330,28.670,89.290,10.710,60.19,1.000,bicubic,-11.412,-6.808,-87\nwide_resnet101_2.tv2_in1k,224,71.320,28.680,89.740,10.260,126.89,0.965,bilinear,-11.194,-6.272,-46\necaresnet101d.miil_in1k,224,71.300,28.700,90.190,9.810,44.57,0.875,bicubic,-10.880,-5.872,+9\nnfnet_l0.ra2_in1k,224,71.300,28.700,89.510,10.490,35.07,0.900,bicubic,-10.570,-6.524,+51\nresnetv2_101.a1h_in1k,224,71.290,28.710,89.770,10.230,44.54,0.950,bicubic,-10.752,-6.096,+25\nresnext101_32x8d.tv2_in1k,176,71.290,28.710,89.310,10.690,88.79,0.875,bilinear,-10.700,-6.406,+32\nregnetz_c16.ra3_in1k,256,71.250,28.750,90.270,9.730,13.46,0.940,bicubic,-10.912,-5.954,+9\nhgnet_small.paddle_in1k,288,71.240,28.760,90.170,9.830,24.36,1.000,bicubic,-10.982,-6.054,-5\ncs3sedarknet_x.c2ns_in1k,256,71.230,28.770,90.400,9.600,35.40,0.887,bicubic,-10.696,-5.622,+43\nhgnetv2_b2.ssld_stage2_ft_in1k,224,71.230,28.770,90.370,9.630,11.22,0.965,bicubic,-10.334,-5.524,+79\nvit_relpos_medium_patch16_224.sw_in1k,224,71.220,28.780,89.960,10.040,38.75,0.900,bicubic,-11.242,-6.008,-48\ntf_efficientnet_b4.in1k,380,71.210,28.790,90.020,9.980,19.34,0.922,bicubic,-11.388,-6.112,-71\nmixer_b16_224.miil_in21k_ft_in1k,224,71.200,28.800,89.540,10.460,59.88,0.875,bilinear,-11.114,-6.446,-25\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,71.190,28.810,90.430,9.570,194.03,0.875,bilinear,-10.666,-5.608,+42\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,71.190,28.810,90.260,9.740,10.59,1.000,bicubic,-11.416,-6.054,-77\nrexnetr_200.sw_in12k_ft_in1k,224,71.180,28.820,90.500,9.500,16.52,0.950,bicubic,-11.422,-5.896,-78\npit_s_distilled_224.in1k,224,71.170,28.830,89.590,10.410,24.04,0.900,bicubic,-10.656,-6.130,+47\necaresnet50t.a1_in1k,288,71.170,28.830,89.550,10.450,25.57,1.000,bicubic,-10.940,-6.104,+4\nconvmixer_1536_20.in1k,224,71.160,28.840,89.440,10.560,51.63,0.960,bicubic,-10.222,-6.014,+106\nconvnext_tiny_hnf.a2h_in1k,224,71.150,28.850,89.320,10.680,28.59,0.950,bicubic,-11.080,-6.540,-18\necaresnet50t.ra2_in1k,320,71.140,28.860,90.400,9.600,25.57,0.950,bicubic,-11.230,-5.720,-43\nxcit_small_24_p16_224.fb_in1k,224,71.130,28.870,89.520,10.480,47.67,1.000,bicubic,-11.458,-6.476,-78\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,71.120,28.880,90.300,9.700,15.62,0.875,bicubic,-10.918,-5.874,+8\ncrossvit_base_240.in1k,240,71.120,28.880,89.760,10.240,105.03,0.875,bicubic,-11.092,-6.066,-17\nresnet51q.ra2_in1k,288,71.110,28.890,90.040,9.960,35.70,1.000,bilinear,-11.246,-6.142,-44\nfocalnet_tiny_srf.ms_in1k,224,71.100,28.900,89.580,10.420,28.43,0.900,bicubic,-11.046,-6.392,-7\nefficientformerv2_s2.snap_dist_in1k,224,71.090,28.910,90.070,9.930,12.71,0.950,bicubic,-11.066,-5.842,-10\nxcit_small_12_p16_224.fb_in1k,224,71.090,28.910,89.660,10.340,26.25,1.000,bicubic,-10.892,-6.154,+12\nresnext50_32x4d.a1h_in1k,288,71.080,28.920,89.650,10.350,25.03,1.000,bicubic,-10.936,-6.284,+6\nvit_base_patch16_224.orig_in21k_ft_in1k,224,71.070,28.930,90.430,9.570,86.57,0.900,bicubic,-10.712,-5.606,+38\nhrnet_w18_ssld.paddle_in1k,224,71.070,28.930,89.910,10.090,21.30,0.950,bilinear,-10.044,-5.212,+134\ntf_efficientnetv2_b3.in21k_ft_in1k,300,71.060,28.940,90.550,9.450,14.36,0.900,bicubic,-11.574,-6.078,-105\ncs3edgenet_x.c2_in1k,256,71.060,28.940,89.880,10.120,47.82,0.887,bicubic,-11.138,-6.076,-23\nfocalnet_tiny_lrf.ms_in1k,224,71.060,28.940,89.520,10.480,28.65,0.900,bicubic,-11.114,-6.428,-19\ntf_efficientnet_b3.ap_in1k,300,71.060,28.940,89.350,10.650,12.23,0.904,bicubic,-10.782,-6.264,+27\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,71.050,28.950,89.950,10.050,38.73,0.900,bicubic,-11.264,-5.760,-49\ncoat_lite_small.in1k,224,71.030,28.970,89.580,10.420,19.84,0.900,bicubic,-11.274,-6.274,-46\nconvnextv2_nano.fcmae_ft_in1k,288,71.020,28.980,90.120,9.880,15.62,1.000,bicubic,-11.470,-6.106,-78\nmobilenetv4_conv_large.e500_r256_in1k,256,71.000,29.000,89.410,10.590,32.59,0.950,bicubic,-10.864,-6.286,+17\necaresnet50d.miil_in1k,288,70.990,29.010,90.210,9.790,25.58,0.950,bicubic,-10.658,-5.674,+37\nedgenext_small.usi_in1k,320,70.990,29.010,89.790,10.210,5.59,1.000,bicubic,-10.584,-5.922,+47\nrepvit_m3.dist_in1k,224,70.990,29.010,89.500,10.500,10.68,0.950,bicubic,-10.504,-6.062,+61\nresnetv2_50d_gn.ah_in1k,288,70.980,29.020,89.820,10.180,25.57,1.000,bicubic,-10.980,-5.960,+1\nfastvit_sa12.apple_dist_in1k,256,70.980,29.020,89.220,10.780,11.58,0.900,bicubic,-10.858,-6.480,+19\nswin_s3_tiny_224.ms_in1k,224,70.970,29.030,89.870,10.130,28.33,0.900,bicubic,-11.158,-6.088,-25\ndeit_base_patch16_224.fb_in1k,224,70.970,29.030,89.180,10.820,86.57,0.900,bicubic,-11.010,-6.560,-6\ntiny_vit_11m_224.in1k,224,70.960,29.040,89.840,10.160,11.00,0.950,bicubic,-10.578,-6.030,+46\nxcit_tiny_12_p8_224.fb_dist_in1k,224,70.960,29.040,89.720,10.280,6.71,1.000,bicubic,-10.254,-5.894,+102\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,70.950,29.050,89.560,10.440,18.45,0.888,bicubic,-11.370,-6.382,-63\nxcit_medium_24_p16_224.fb_in1k,224,70.940,29.060,89.420,10.580,84.40,1.000,bicubic,-11.690,-6.558,-119\nconvnext_tiny.fb_in1k,224,70.930,29.070,89.820,10.180,28.59,0.875,bicubic,-11.136,-6.034,-26\nhalo2botnet50ts_256.a1h_in1k,256,70.920,29.080,89.540,10.460,22.64,0.950,bicubic,-11.164,-6.118,-30\nvisformer_small.in1k,224,70.920,29.080,89.330,10.670,40.22,0.900,bicubic,-11.172,-6.546,-30\nvit_small_patch16_rope_mixed_224.naver_in1k,224,70.920,29.080,88.820,11.180,21.99,0.900,bicubic,-10.332,-6.236,+91\ncs3sedarknet_l.c2ns_in1k,288,70.900,29.100,90.290,9.710,21.91,0.950,bicubic,-10.870,-5.672,+17\nvit_large_patch32_384.orig_in21k_ft_in1k,384,70.890,29.110,90.460,9.540,306.63,1.000,bicubic,-10.618,-5.626,+38\ncs3darknet_x.c2ns_in1k,288,70.890,29.110,90.040,9.960,35.05,1.000,bicubic,-11.344,-6.194,-55\nvit_srelpos_medium_patch16_224.sw_in1k,224,70.890,29.110,89.900,10.100,38.74,0.900,bicubic,-11.366,-6.032,-58\nrexnet_200.nav_in1k,224,70.880,29.120,89.860,10.140,16.37,0.875,bicubic,-10.756,-5.812,+23\ncoatnext_nano_rw_224.sw_in1k,224,70.870,29.130,90.190,9.810,14.70,0.900,bicubic,-11.128,-5.104,-24\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,70.870,29.130,89.910,10.090,15.15,0.900,bicubic,-11.194,-5.964,-35\nresnet101.a1h_in1k,224,70.870,29.130,89.590,10.410,44.55,0.950,bicubic,-11.080,-6.206,-15\nresnest50d_4s2x40d.in1k,224,70.870,29.130,89.520,10.480,30.42,0.875,bicubic,-10.276,-6.046,+97\ntnt_s_legacy_patch16_224.in1k,224,70.860,29.140,89.510,10.490,23.76,0.900,bicubic,-10.652,-6.232,+31\npoolformer_m36.sail_in1k,224,70.860,29.140,89.430,10.570,56.17,0.950,bicubic,-11.244,-6.268,-43\neca_nfnet_l0.ra2_in1k,224,70.840,29.160,89.850,10.150,24.14,0.900,bicubic,-10.922,-6.144,+8\nwide_resnet50_2.tv2_in1k,224,70.840,29.160,89.170,10.830,68.88,0.965,bilinear,-10.772,-6.596,+18\nnest_tiny_jx.goog_in1k,224,70.830,29.170,89.770,10.230,17.06,0.875,bicubic,-10.598,-5.672,+48\nvit_base_patch16_rpn_224.sw_in1k,224,70.830,29.170,89.760,10.240,86.54,0.900,bicubic,-11.388,-6.236,-64\nresnet101.a2_in1k,288,70.820,29.180,89.250,10.750,44.55,1.000,bicubic,-11.398,-6.490,-65\nlamhalobotnet50ts_256.a1h_in1k,256,70.790,29.210,89.000,11.000,22.57,0.950,bicubic,-10.762,-6.510,+20\ntresnet_m.miil_in1k_448,448,70.780,29.220,88.690,11.310,31.39,0.875,bilinear,-10.930,-6.872,+3\nresnetrs101.tf_in1k,288,70.770,29.230,89.690,10.310,63.62,0.940,bicubic,-11.518,-6.310,-79\ndeit3_small_patch16_224.fb_in1k,224,70.770,29.230,89.520,10.480,22.06,0.900,bicubic,-10.612,-6.098,+53\nregnetx_080.tv2_in1k,224,70.770,29.230,89.210,10.790,39.57,0.965,bicubic,-10.782,-6.330,+16\nconvnext_nano.d1h_in1k,288,70.760,29.240,89.390,10.610,15.59,1.000,bicubic,-10.714,-6.280,+30\nefficientnet_b3.ra2_in1k,288,70.760,29.240,89.380,10.620,12.23,0.875,bicubic,-10.712,-6.316,+30\nresnet152.a2_in1k,224,70.760,29.240,88.540,11.460,60.19,0.950,bicubic,-11.004,-6.730,-5\npoolformerv2_m36.sail_in1k,224,70.750,29.250,89.460,10.540,56.08,1.000,bicubic,-11.464,-6.436,-73\nseresnet50.ra2_in1k,288,70.740,29.260,89.810,10.190,28.09,0.950,bicubic,-10.548,-5.842,+56\ncoatnet_nano_rw_224.sw_in1k,224,70.740,29.260,89.610,10.390,15.14,0.900,bicubic,-10.964,-6.040,-4\nvit_base_patch32_clip_224.openai_ft_in1k,224,70.730,29.270,89.920,10.080,88.22,0.900,bicubic,-11.214,-6.052,-33\ntf_efficientnet_b1.ns_jft_in1k,240,70.720,29.280,89.970,10.030,7.79,0.882,bicubic,-10.668,-5.570,+40\nswinv2_cr_tiny_ns_224.sw_in1k,224,70.720,29.280,89.380,10.620,28.33,0.900,bicubic,-11.076,-6.444,-17\nwide_resnet50_2.racm_in1k,224,70.720,29.280,89.080,10.920,68.88,0.875,bicubic,-10.746,-6.138,+24\nconvit_small.fb_in1k,224,70.710,29.290,89.500,10.500,27.78,0.875,bicubic,-10.702,-6.240,+34\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,70.700,29.300,90.960,9.040,44.54,1.000,bilinear,-11.632,-5.564,-103\nregnety_032.tv2_in1k,224,70.690,29.310,89.850,10.150,19.44,0.965,bicubic,-11.098,-5.976,-21\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,70.690,29.310,89.750,10.250,14.62,1.000,bilinear,-11.142,-6.128,-24\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,70.680,29.320,90.490,9.510,28.29,0.900,bicubic,-10.286,-5.530,+92\necaresnetlight.miil_in1k,288,70.680,29.320,89.850,10.150,30.16,0.950,bicubic,-10.750,-5.940,+25\ncs3darknet_x.c2ns_in1k,256,70.680,29.320,89.780,10.220,35.05,0.950,bicubic,-11.176,-5.960,-33\ntf_efficientnetv2_b3.in1k,300,70.680,29.320,89.420,10.580,14.36,0.904,bicubic,-11.280,-6.512,-49\nvit_small_patch16_rope_ape_224.naver_in1k,224,70.680,29.320,88.470,11.530,22.06,0.900,bicubic,-10.336,-6.518,+90\ntresnet_l.miil_in1k,224,70.670,29.330,89.630,10.370,55.99,0.875,bilinear,-10.828,-6.022,+3\nlevit_384.fb_dist_in1k,224,70.670,29.330,89.290,10.710,39.13,0.900,bicubic,-11.930,-6.730,-157\nlevit_conv_384.fb_dist_in1k,224,70.670,29.330,89.290,10.710,39.13,0.900,bicubic,-11.930,-6.730,-155\ncrossvit_small_240.in1k,240,70.660,29.340,89.390,10.610,26.86,0.875,bicubic,-10.376,-6.076,+79\nresnet50d.ra4_e3600_r224_in1k,288,70.640,29.360,89.500,10.500,25.58,1.000,bicubic,-11.200,-6.426,-37\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,70.630,29.370,90.080,9.920,22.05,0.900,bicubic,-10.772,-6.070,+22\nresnet61q.ra2_in1k,256,70.600,29.400,89.810,10.190,36.85,0.900,bicubic,-11.370,-6.038,-59\nresnet50_gn.a1h_in1k,288,70.590,29.410,89.470,10.530,25.56,0.950,bicubic,-10.626,-6.164,+47\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,70.570,29.430,89.520,10.480,11.07,0.950,bicubic,-10.908,-6.172,+1\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,70.570,29.430,89.050,10.950,22.06,0.900,bicubic,-10.404,-5.926,+80\nresnet152.a1_in1k,224,70.570,29.430,88.420,11.580,60.19,0.950,bicubic,-11.428,-7.468,-68\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,70.550,29.450,90.030,9.970,36.43,0.900,bicubic,-11.306,-6.054,-51\ncait_xxs24_384.fb_dist_in1k,384,70.550,29.450,89.770,10.230,12.03,1.000,bicubic,-10.404,-5.660,+80\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,70.550,29.450,89.640,10.360,14.25,0.888,bicubic,-11.400,-6.120,-60\nconvnextv2_nano.fcmae_ft_in1k,224,70.550,29.450,89.360,10.640,15.62,0.875,bicubic,-11.306,-6.596,-47\nresnet51q.ra2_in1k,256,70.540,29.460,89.470,10.530,35.70,0.875,bilinear,-11.234,-6.462,-39\ncoat_mini.in1k,224,70.530,29.470,89.560,10.440,10.34,0.900,bicubic,-10.790,-5.828,+21\nresnet101.tv2_in1k,224,70.530,29.470,89.350,10.650,44.55,0.965,bilinear,-11.376,-6.418,-58\nefficientnet_b1.ra4_e3600_r240_in1k,288,70.520,29.480,89.610,10.390,7.79,1.000,bicubic,-10.924,-6.088,+1\ngc_efficientnetv2_rw_t.agc_in1k,224,70.510,29.490,89.660,10.340,13.68,1.000,bicubic,-10.766,-5.920,+26\ndeit_small_distilled_patch16_224.fb_in1k,224,70.510,29.490,89.380,10.620,22.44,0.900,bicubic,-10.694,-5.998,+36\nhalonet50ts.a1h_in1k,256,70.510,29.490,89.190,10.810,22.73,0.940,bicubic,-11.130,-6.422,-34\nvit_relpos_small_patch16_224.sw_in1k,224,70.490,29.510,89.970,10.030,21.98,0.900,bicubic,-10.986,-5.854,-13\nlegacy_senet154.in1k,224,70.490,29.510,88.960,11.040,115.09,0.875,bilinear,-10.830,-6.542,+15\nseresnext101_64x4d.gluon_in1k,224,70.480,29.520,89.180,10.820,88.23,0.875,bicubic,-10.404,-6.128,+80\ntwins_pcpvt_small.in1k,224,70.470,29.530,89.120,10.880,24.11,0.900,bicubic,-10.646,-6.504,+43\nswinv2_tiny_window8_256.ms_in1k,256,70.450,29.550,89.450,10.550,28.35,0.900,bicubic,-11.374,-6.538,-56\nresnetv2_50.a1h_in1k,288,70.450,29.550,89.240,10.760,25.55,1.000,bicubic,-10.956,-6.472,0\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,70.430,29.570,89.640,10.360,11.22,0.965,bicubic,-10.312,-5.860,+97\ntwins_svt_small.in1k,224,70.430,29.570,89.390,10.610,24.06,0.900,bicubic,-11.264,-6.280,-46\nhgnet_small.paddle_in1k,224,70.420,29.580,89.440,10.560,24.36,0.965,bicubic,-11.006,-6.406,-7\nseresnext50_32x4d.racm_in1k,224,70.420,29.580,89.120,10.880,27.56,0.875,bicubic,-10.848,-6.504,+16\npoolformerv2_s36.sail_in1k,224,70.410,29.590,89.590,10.410,30.79,1.000,bicubic,-11.166,-6.090,-40\ntf_efficientnet_b3.aa_in1k,300,70.410,29.590,89.290,10.710,12.23,0.904,bicubic,-11.238,-6.434,-48\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,70.400,29.600,89.810,10.190,44.18,0.875,bilinear,-10.536,-5.922,+62\nregnetz_b16.ra3_in1k,288,70.400,29.600,89.210,10.790,9.72,1.000,bicubic,-10.340,-6.318,+91\nresnest50d_1s4x24d.in1k,224,70.400,29.600,89.210,10.790,25.68,0.875,bicubic,-10.624,-6.128,+49\nresnet50d.ra2_in1k,288,70.390,29.610,89.300,10.700,25.58,0.950,bicubic,-10.964,-6.258,-4\ncrossvit_15_240.in1k,240,70.380,29.620,89.500,10.500,27.53,0.875,bicubic,-11.142,-6.194,-40\nresnet50.c2_in1k,288,70.380,29.620,89.200,10.800,25.56,1.000,bicubic,-10.482,-6.330,+73\nrepvit_m1_1.dist_450e_in1k,224,70.370,29.630,89.240,10.760,8.80,0.950,bicubic,-10.944,-6.328,-1\necaresnet50t.a2_in1k,288,70.360,29.640,89.070,10.930,25.57,1.000,bicubic,-11.322,-6.454,-58\ngernet_l.idstcv_in1k,256,70.360,29.640,88.960,11.040,31.08,0.875,bilinear,-10.994,-6.782,-9\nsenet154.gluon_in1k,224,70.360,29.640,88.900,11.100,115.09,0.875,bicubic,-10.898,-6.458,+5\nvit_small_patch16_rope_224.naver_in1k,224,70.350,29.650,88.680,11.320,21.98,0.900,bicubic,-10.872,-6.340,+9\nfbnetv3_g.ra2_in1k,240,70.330,29.670,89.600,10.400,16.62,0.950,bilinear,-10.916,-6.086,+3\nconvnext_nano_ols.d1h_in1k,288,70.330,29.670,88.980,11.020,15.65,1.000,bicubic,-11.282,-6.646,-54\ntf_efficientnet_lite4.in1k,380,70.320,29.680,89.200,10.800,13.01,0.920,bilinear,-11.220,-6.468,-50\ngcresnext50ts.ch_in1k,288,70.280,29.720,89.240,10.760,15.67,1.000,bicubic,-10.962,-6.296,+2\ncs3sedarknet_l.c2ns_in1k,256,70.270,29.730,89.800,10.200,21.91,0.887,bicubic,-10.938,-5.998,+7\nresnest50d.in1k,224,70.260,29.740,88.630,11.370,27.48,0.875,bilinear,-10.698,-6.742,+41\nconvnext_nano.d1h_in1k,224,70.260,29.740,88.450,11.550,15.59,0.950,bicubic,-10.502,-6.882,+73\nefficientnetv2_rw_t.ra2_in1k,224,70.250,29.750,89.670,10.330,13.65,1.000,bicubic,-11.120,-5.880,-21\nsebotnet33ts_256.a1h_in1k,256,70.250,29.750,88.660,11.340,13.70,0.940,bicubic,-10.906,-6.500,+9\nresnetaa50.a1h_in1k,288,70.200,29.800,89.870,10.130,25.56,1.000,bicubic,-11.418,-5.924,-66\ngcresnet50t.ra2_in1k,288,70.200,29.800,89.520,10.480,25.90,1.000,bicubic,-11.258,-6.204,-38\nmambaout_kobe.in1k,288,70.190,29.810,89.380,10.620,9.14,1.000,bicubic,-10.874,-6.302,+22\ntnt_s_patch16_224.in1k,224,70.170,29.830,89.510,10.490,23.77,0.900,bicubic,-11.336,-6.256,-55\nresnet152s.gluon_in1k,224,70.170,29.830,88.860,11.140,60.32,0.875,bicubic,-10.878,-6.582,+23\nvit_srelpos_small_patch16_224.sw_in1k,224,70.160,29.840,89.520,10.480,21.97,0.900,bicubic,-10.948,-6.060,+13\ncs3darknet_l.c2ns_in1k,288,70.150,29.850,89.600,10.400,21.16,0.950,bicubic,-10.732,-6.062,+43\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,70.150,29.850,89.440,10.560,11.07,1.000,bicubic,-11.134,-6.306,-18\nxception41p.ra3_in1k,299,70.150,29.850,89.150,10.850,26.91,0.940,bicubic,-11.828,-6.634,-114\nswiftformer_l1.dist_in1k,224,70.150,29.850,88.920,11.080,12.06,0.950,bicubic,-10.756,-6.334,+37\nefficientnet_el.ra_in1k,300,70.100,29.900,89.180,10.820,10.59,0.904,bicubic,-11.208,-6.358,-24\nresmlp_36_224.fb_distilled_in1k,224,70.100,29.900,89.060,10.940,44.69,0.875,bicubic,-11.050,-6.448,-3\nresnet50.d_in1k,288,70.090,29.910,88.580,11.420,25.56,1.000,bicubic,-10.922,-6.876,+20\nseresnet50.a2_in1k,288,70.080,29.920,88.700,11.300,28.09,1.000,bicubic,-11.014,-6.530,+7\nresnet101.a1_in1k,224,70.080,29.920,88.570,11.430,44.55,0.950,bicubic,-11.414,-6.594,-61\nresnetv2_50d_evos.ah_in1k,224,70.070,29.930,89.070,10.930,25.59,0.950,bicubic,-10.758,-6.200,+45\npoolformer_s36.sail_in1k,224,70.060,29.940,89.170,10.830,30.86,0.900,bicubic,-11.368,-6.460,-47\nmobilenetv4_conv_medium.e500_r256_in1k,320,70.050,29.950,89.480,10.520,9.72,1.000,bicubic,-10.840,-6.270,+31\nrepvgg_b3.rvgg_in1k,224,70.020,29.980,88.640,11.360,123.09,0.875,bilinear,-10.508,-6.688,+83\nfasternet_s.in1k,224,70.010,29.990,89.070,10.930,31.18,1.000,bicubic,-11.318,-6.608,-38\ninception_resnet_v2.tf_in1k,299,70.010,29.990,88.640,11.360,55.84,0.897,bicubic,-10.424,-6.672,+98\nedgenext_small.usi_in1k,256,70.000,30.000,89.230,10.770,5.59,0.950,bicubic,-11.068,-6.100,+1\nhaloregnetz_b.ra3_in1k,224,70.000,30.000,88.830,11.170,11.68,0.940,bicubic,-11.058,-6.366,+5\nlevit_256.fb_dist_in1k,224,69.980,30.020,89.170,10.830,18.89,0.900,bicubic,-11.526,-6.296,-75\nlevit_conv_256.fb_dist_in1k,224,69.980,30.020,89.170,10.830,18.89,0.900,bicubic,-11.526,-6.296,-73\nresnet50.c1_in1k,288,69.950,30.050,88.920,11.080,25.56,1.000,bicubic,-10.978,-6.634,+18\nregnety_320.pycls_in1k,224,69.950,30.050,88.750,11.250,145.05,0.875,bicubic,-10.846,-6.496,+41\necaresnet50t.a1_in1k,224,69.920,30.080,88.650,11.350,25.57,0.950,bicubic,-11.362,-6.500,-37\nresnet50d.a1_in1k,288,69.920,30.080,88.330,11.670,25.58,1.000,bicubic,-11.546,-7.348,-67\nseresnext101_32x4d.gluon_in1k,224,69.910,30.090,88.880,11.120,48.96,0.875,bicubic,-10.976,-6.388,+20\nresnext50_32x4d.ra_in1k,288,69.890,30.110,88.410,11.590,25.03,0.950,bicubic,-10.830,-6.934,+46\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,69.880,30.120,88.920,11.080,8.46,0.950,bicubic,-10.804,-6.522,+51\nfastvit_s12.apple_dist_in1k,256,69.860,30.140,88.700,11.300,9.47,0.900,bicubic,-11.214,-6.578,-10\nresnet101.a2_in1k,224,69.860,30.140,88.270,11.730,44.55,0.950,bicubic,-11.484,-6.928,-52\necaresnet50d_pruned.miil_in1k,288,69.850,30.150,89.590,10.410,19.94,0.950,bicubic,-10.958,-5.976,+31\nese_vovnet39b.ra_in1k,288,69.850,30.150,89.540,10.460,24.57,0.950,bicubic,-10.528,-5.820,+94\necaresnet50t.ra2_in1k,256,69.820,30.180,89.390,10.610,25.57,0.875,bicubic,-11.630,-6.288,-72\nresnet152d.gluon_in1k,224,69.810,30.190,88.480,11.520,60.21,0.875,bicubic,-10.666,-7.114,+69\nseresnet50.a1_in1k,288,69.810,30.190,88.450,11.550,28.09,1.000,bicubic,-11.304,-6.886,-20\npit_s_224.in1k,224,69.800,30.200,88.850,11.150,23.46,0.900,bicubic,-11.314,-6.788,-23\nregnety_016.tv2_in1k,224,69.790,30.210,89.350,10.650,11.20,0.965,bicubic,-10.878,-5.986,+44\nresnetaa50.a1h_in1k,224,69.780,30.220,88.770,11.230,25.56,0.950,bicubic,-10.834,-6.442,+50\nsehalonet33ts.ra2_in1k,256,69.770,30.230,88.580,11.420,13.69,0.940,bicubic,-11.206,-6.692,-8\necaresnet101d_pruned.miil_in1k,224,69.760,30.240,89.390,10.610,24.88,0.875,bicubic,-11.050,-6.252,+21\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,69.760,30.240,89.060,10.940,10.59,0.888,bicubic,-11.706,-6.484,-85\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,69.740,30.260,89.130,10.870,7.52,0.950,bicubic,-10.810,-6.056,+49\nxcit_tiny_24_p16_224.fb_dist_in1k,224,69.740,30.260,88.700,11.300,12.12,1.000,bicubic,-10.722,-6.502,+66\nresnext50_32x4d.a1_in1k,288,69.730,30.270,88.360,11.640,25.03,1.000,bicubic,-11.750,-6.792,-93\nresnet50d.ra4_e3600_r224_in1k,224,69.720,30.280,88.860,11.140,25.58,0.950,bicubic,-11.236,-6.520,-11\nresnext50_32x4d.a2_in1k,288,69.720,30.280,88.260,11.740,25.03,1.000,bicubic,-11.584,-6.854,-61\nmobilevitv2_200.cvnets_in1k,256,69.700,30.300,88.450,11.550,18.45,0.888,bicubic,-11.448,-6.930,-41\nresnet50.a1_in1k,288,69.690,30.310,88.270,11.730,25.56,1.000,bicubic,-11.550,-6.842,-54\ntresnet_m.miil_in1k,224,69.680,30.320,87.940,12.060,31.39,0.875,bilinear,-11.136,-6.922,+10\nfastvit_sa12.apple_in1k,256,69.670,30.330,88.920,11.080,11.58,0.900,bicubic,-11.176,-6.426,+6\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,69.660,30.340,89.380,10.620,5.39,0.950,bicubic,-11.214,-6.288,0\nfastvit_t12.apple_dist_in1k,256,69.650,30.350,88.290,11.710,7.55,0.900,bicubic,-10.714,-6.750,+75\necaresnet50d.miil_in1k,224,69.640,30.360,89.350,10.650,25.58,0.875,bicubic,-10.994,-5.960,+33\nconvnextv2_pico.fcmae_ft_in1k,288,69.640,30.360,89.260,10.740,9.07,0.950,bicubic,-11.422,-6.216,-31\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,69.620,30.380,89.020,10.980,14.62,0.950,bilinear,-11.374,-6.454,-25\nresmlp_24_224.fb_distilled_in1k,224,69.620,30.380,89.000,11.000,30.02,0.875,bicubic,-11.146,-6.220,+10\nresnext50d_32x4d.bt_in1k,288,69.610,30.390,89.250,10.750,25.05,0.950,bicubic,-11.066,-6.176,+23\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,69.600,30.400,89.290,10.710,25.03,0.875,bilinear,-10.716,-6.096,+74\nresnetv2_50d_gn.ah_in1k,224,69.600,30.400,88.890,11.110,25.57,0.950,bicubic,-11.200,-6.466,+4\nlambda_resnet50ts.a1h_in1k,256,69.590,30.410,88.820,11.180,21.54,0.950,bicubic,-11.594,-6.274,-58\ninception_resnet_v2.tf_ens_adv_in1k,299,69.590,30.410,88.330,11.670,55.84,0.897,bicubic,-10.408,-6.610,+109\ngcresnet50t.ra2_in1k,256,69.570,30.430,88.940,11.060,25.90,0.900,bicubic,-11.374,-6.518,-25\nresnext50_32x4d.a1h_in1k,224,69.570,30.430,88.580,11.420,25.03,0.950,bicubic,-11.570,-6.740,-53\nefficientnet_b3_pruned.in1k,300,69.550,30.450,88.800,11.200,9.86,0.904,bicubic,-11.314,-6.466,-12\ngernet_m.idstcv_in1k,224,69.550,30.450,88.590,11.410,21.14,0.875,bilinear,-11.152,-6.596,+14\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,69.520,30.480,89.370,10.630,13.42,0.950,bicubic,-10.742,-5.990,+74\nefficientnet_el_pruned.in1k,300,69.520,30.480,88.900,11.100,10.59,0.904,bicubic,-10.764,-6.318,+69\ncspresnext50.ra_in1k,256,69.520,30.480,88.710,11.290,20.57,0.887,bilinear,-11.028,-6.620,+27\nefficientnet_b1.ra4_e3600_r240_in1k,240,69.510,30.490,89.020,10.980,7.79,0.900,bicubic,-10.894,-6.132,+50\nresnet50.tv2_in1k,224,69.510,30.490,88.630,11.370,25.56,0.965,bilinear,-11.336,-6.800,-14\nrepvgg_b3g4.rvgg_in1k,224,69.510,30.490,88.540,11.460,83.83,0.875,bilinear,-10.714,-6.560,+80\nconvnext_nano_ols.d1h_in1k,224,69.490,30.510,88.430,11.570,15.65,0.950,bicubic,-11.416,-6.944,-30\nresnext101_64x4d.gluon_in1k,224,69.490,30.510,88.270,11.730,83.46,0.875,bicubic,-11.150,-6.730,+14\nxcit_tiny_12_p16_384.fb_dist_in1k,384,69.480,30.520,88.980,11.020,6.72,1.000,bicubic,-11.474,-6.658,-39\nconvmixer_768_32.in1k,224,69.480,30.520,88.940,11.060,21.11,0.960,bicubic,-10.678,-6.130,+81\nrepvit_m1_1.dist_300e_in1k,224,69.480,30.520,88.700,11.300,8.80,0.950,bicubic,-11.346,-6.474,-17\nhgnet_tiny.paddle_in1k,288,69.470,30.530,89.210,10.790,14.74,1.000,bicubic,-11.174,-6.346,+8\ngcresnext50ts.ch_in1k,256,69.470,30.530,88.740,11.260,15.67,0.900,bicubic,-11.138,-6.440,+12\nrexnet_150.nav_in1k,224,69.460,30.540,88.990,11.010,9.73,0.875,bicubic,-10.862,-5.990,+51\nefficientvit_b1.r288_in1k,288,69.460,30.540,88.590,11.410,9.10,1.000,bicubic,-10.862,-6.574,+51\nswin_tiny_patch4_window7_224.ms_in1k,224,69.450,30.550,89.020,10.980,28.29,0.900,bicubic,-11.938,-6.736,-109\ngcresnet33ts.ra2_in1k,288,69.450,30.550,88.910,11.090,19.88,1.000,bicubic,-11.138,-6.408,+9\nres2net101d.in1k,224,69.450,30.550,88.620,11.380,45.23,0.875,bilinear,-11.786,-6.726,-88\nregnetx_032.tv2_in1k,224,69.440,30.560,89.130,10.870,15.30,0.965,bicubic,-11.476,-6.118,-43\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,69.440,30.560,89.080,10.920,15.25,0.950,bicubic,-10.642,-6.058,+76\nregnetx_320.pycls_in1k,224,69.440,30.560,88.150,11.850,107.81,0.875,bicubic,-10.818,-6.866,+59\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,69.430,30.570,89.270,10.730,88.22,0.900,bicubic,-11.302,-6.304,-16\ncs3darknet_l.c2ns_in1k,256,69.420,30.580,88.970,11.030,21.16,0.887,bicubic,-10.930,-6.334,+40\ntf_efficientnetv2_b3.in21k_ft_in1k,240,69.390,30.610,89.090,10.910,14.36,0.900,bicubic,-11.688,-6.830,-73\ninception_v4.tf_in1k,299,69.390,30.610,88.650,11.350,42.68,0.875,bicubic,-10.754,-6.332,+68\nefficientnet_b2.ra_in1k,288,69.390,30.610,88.640,11.360,9.11,1.000,bicubic,-11.222,-6.680,0\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,69.370,30.630,89.300,10.700,5.76,1.000,bicubic,-11.278,-6.220,-8\nnf_resnet50.ra2_in1k,288,69.350,30.650,88.690,11.310,25.56,0.940,bicubic,-11.308,-6.660,-11\nxception71.tf_in1k,299,69.350,30.650,88.190,11.810,42.34,0.903,bicubic,-10.570,-6.718,+80\ntf_efficientnetv2_b3.in1k,240,69.330,30.670,88.290,11.710,14.36,0.904,bicubic,-11.378,-7.140,-19\necaresnetlight.miil_in1k,224,69.320,30.680,89.200,10.800,30.16,0.875,bicubic,-11.134,-6.048,+13\ndm_nfnet_f0.dm_in1k,192,69.320,30.680,88.740,11.260,71.49,0.900,bicubic,-12.116,-6.932,-133\ndarknet53.c2ns_in1k,288,69.310,30.690,88.770,11.230,41.61,1.000,bicubic,-11.232,-6.658,-2\ngcvit_xxtiny.in1k,224,69.280,30.720,88.840,11.160,12.00,0.875,bicubic,-10.476,-6.218,+104\nresnext50_32x4d.tv2_in1k,224,69.280,30.720,88.410,11.590,25.03,0.965,bilinear,-11.910,-6.934,-99\nseresnet33ts.ra2_in1k,288,69.260,30.740,89.020,10.980,19.78,1.000,bicubic,-11.554,-6.326,-43\nrepvit_m1_0.dist_450e_in1k,224,69.260,30.740,88.720,11.280,7.30,0.950,bicubic,-11.170,-6.202,+15\nregnetz_b16.ra3_in1k,224,69.260,30.740,88.380,11.620,9.72,0.940,bicubic,-10.604,-6.602,+87\neca_resnet33ts.ra2_in1k,288,69.230,30.770,89.130,10.870,19.68,1.000,bicubic,-11.486,-6.252,-30\nresnet50_gn.a1h_in1k,224,69.200,30.800,88.410,11.590,25.56,0.940,bicubic,-10.874,-6.496,+59\ncs3darknet_focus_l.c2ns_in1k,288,69.190,30.810,89.330,10.670,21.15,0.950,bicubic,-11.704,-6.344,-64\nvit_small_patch16_384.augreg_in1k,384,69.190,30.810,88.840,11.160,22.20,1.000,bicubic,-11.940,-6.750,-99\nefficientformer_l1.snap_dist_in1k,224,69.190,30.810,88.480,11.520,12.29,0.950,bicubic,-11.312,-6.512,-7\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,69.190,30.810,88.090,11.910,8.46,0.900,bicubic,-10.926,-6.904,+53\nresnet50.b2k_in1k,288,69.170,30.830,88.490,11.510,25.56,1.000,bicubic,-11.296,-6.544,-5\nresnet50d.ra2_in1k,224,69.170,30.830,88.130,11.870,25.58,0.875,bicubic,-11.376,-7.030,-15\nedgenext_small_rw.sw_in1k,320,69.140,30.860,88.790,11.210,7.83,1.000,bicubic,-11.312,-6.416,-1\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,69.130,30.870,88.820,11.180,11.07,0.950,bicubic,-11.302,-6.558,0\nrepvit_m2.dist_in1k,224,69.130,30.870,88.570,11.430,8.80,0.950,bicubic,-11.338,-6.602,-9\nwide_resnet50_2.tv2_in1k,176,69.130,30.870,88.070,11.930,68.88,0.875,bilinear,-11.312,-7.006,0\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,69.120,30.880,89.140,10.860,9.72,1.000,bicubic,-11.032,-6.158,+40\nmobilevitv2_175.cvnets_in1k,256,69.110,30.890,88.890,11.110,14.25,0.888,bicubic,-11.760,-6.390,-65\nmambaout_femto.in1k,288,69.090,30.910,88.650,11.350,7.30,1.000,bicubic,-10.822,-6.484,+60\ntf_efficientnetv2_b2.in1k,260,69.090,30.910,88.140,11.860,10.10,0.890,bicubic,-11.136,-6.870,+31\nmixnet_xl.ra_in1k,224,69.080,30.920,88.360,11.640,11.90,0.875,bicubic,-11.414,-6.618,-18\npoolformerv2_s24.sail_in1k,224,69.070,30.930,88.540,11.460,21.34,1.000,bicubic,-11.676,-6.778,-53\nlegacy_seresnext101_32x4d.in1k,224,69.060,30.940,88.100,11.900,48.96,0.875,bilinear,-11.214,-6.932,+18\nresnet50d.a2_in1k,288,69.030,30.970,87.960,12.040,25.58,1.000,bicubic,-12.134,-7.140,-120\nresnet50.c2_in1k,224,69.010,30.990,88.120,11.880,25.56,0.950,bicubic,-10.858,-6.742,+63\nmobilenetv4_conv_medium.e500_r256_in1k,256,69.000,31.000,88.780,11.220,9.72,0.950,bicubic,-10.916,-6.384,+49\nmobilevitv2_150.cvnets_in1k,256,69.000,31.000,88.020,11.980,10.59,0.888,bicubic,-11.380,-7.086,0\nresnetblur50.bt_in1k,288,68.990,31.010,88.400,11.600,25.56,0.950,bicubic,-11.258,-6.800,+19\nresnetrs101.tf_in1k,192,68.980,31.020,88.270,11.730,63.62,0.940,bicubic,-11.726,-6.970,-51\nvit_base_patch16_384.augreg_in1k,384,68.970,31.030,88.350,11.650,86.86,1.000,bicubic,-12.132,-6.984,-114\nregnety_120.pycls_in1k,224,68.970,31.030,88.180,11.820,51.82,0.875,bicubic,-11.412,-6.924,-7\nresnet152c.gluon_in1k,224,68.970,31.030,87.640,12.360,60.21,0.875,bicubic,-10.934,-7.214,+49\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,68.950,31.050,89.650,10.350,22.92,1.000,bicubic,-11.526,-5.544,-29\nseresnet33ts.ra2_in1k,256,68.950,31.050,88.350,11.650,19.78,0.900,bicubic,-11.430,-6.700,-9\ncs3darknet_focus_l.c2ns_in1k,256,68.930,31.070,88.830,11.170,21.15,0.887,bicubic,-11.324,-6.464,+11\nresnet50.b1k_in1k,288,68.910,31.090,88.490,11.510,25.56,1.000,bicubic,-11.798,-6.688,-60\nresnet50d.a1_in1k,224,68.910,31.090,87.980,12.020,25.58,0.950,bicubic,-11.820,-6.698,-64\nconvnextv2_pico.fcmae_ft_in1k,224,68.900,31.100,88.450,11.550,9.07,0.875,bicubic,-11.404,-6.632,-4\nresnetv2_34d.ra4_e3600_r384_in1k,448,68.890,31.110,88.800,11.200,21.82,1.000,bicubic,-11.540,-6.482,-22\nconvnext_pico_ols.d1_in1k,288,68.890,31.110,88.700,11.300,9.06,1.000,bicubic,-11.568,-6.548,-30\nresnet50.a1h_in1k,224,68.880,31.120,88.310,11.690,25.56,1.000,bicubic,-11.774,-7.002,-58\nresnext101_32x4d.gluon_in1k,224,68.880,31.120,88.280,11.720,44.18,0.875,bicubic,-11.456,-6.628,-11\nhgnetv2_b1.ssld_stage2_ft_in1k,288,68.870,31.130,89.080,10.920,6.34,1.000,bicubic,-11.046,-6.108,+34\nresnetv2_50.a1h_in1k,224,68.870,31.130,88.430,11.570,25.55,0.950,bicubic,-11.540,-6.652,-25\nhrnet_w64.ms_in1k,224,68.870,31.130,88.120,11.880,128.06,0.875,bilinear,-10.588,-6.526,+92\necaresnet50t.a2_in1k,224,68.870,31.130,88.090,11.910,25.57,0.950,bicubic,-12.014,-6.932,-97\ncspdarknet53.ra_in1k,256,68.860,31.140,88.590,11.410,27.64,0.887,bilinear,-11.218,-6.470,+17\ngcresnet33ts.ra2_in1k,256,68.850,31.150,88.520,11.480,19.88,0.900,bicubic,-11.222,-6.466,+19\nseresnet50.ra2_in1k,224,68.830,31.170,88.600,11.400,28.09,0.875,bicubic,-11.430,-6.468,-5\npoolformer_s24.sail_in1k,224,68.800,31.200,88.250,11.750,21.39,0.900,bicubic,-11.494,-6.810,-14\neca_resnet33ts.ra2_in1k,256,68.790,31.210,88.510,11.490,19.68,0.900,bicubic,-11.294,-6.470,+11\nresnet101d.gluon_in1k,224,68.790,31.210,88.100,11.900,44.57,0.875,bicubic,-11.676,-7.206,-45\nresnet152.gluon_in1k,224,68.790,31.210,87.590,12.410,60.19,0.875,bicubic,-10.922,-7.130,+58\nmobilenetv4_conv_medium.e500_r224_in1k,256,68.780,31.220,88.870,11.130,9.72,1.000,bicubic,-11.056,-6.322,+41\nconvnext_pico.d1_in1k,288,68.770,31.230,88.370,11.630,9.05,0.950,bicubic,-11.656,-6.692,-37\ntf_efficientnet_b2.ap_in1k,260,68.770,31.230,88.320,11.680,9.11,0.890,bicubic,-11.530,-6.712,-20\nxception65.tf_in1k,299,68.770,31.230,88.250,11.750,39.92,0.903,bicubic,-10.788,-6.406,+69\nresnet50.a2_in1k,288,68.760,31.240,87.950,12.050,25.56,1.000,bicubic,-12.014,-7.028,-92\nrepvgg_b2g4.rvgg_in1k,224,68.750,31.250,88.250,11.750,61.76,0.875,bilinear,-10.646,-6.430,+83\nwide_resnet101_2.tv2_in1k,176,68.750,31.250,87.600,12.400,126.89,0.875,bilinear,-11.744,-7.338,-58\nseresnet50.a1_in1k,224,68.730,31.270,87.500,12.500,28.09,0.950,bicubic,-11.282,-7.204,+8\nresnext50_32x4d.a2_in1k,224,68.730,31.270,87.380,12.620,25.03,0.950,bicubic,-11.714,-7.250,-49\nresnet101s.gluon_in1k,224,68.720,31.280,87.720,12.280,44.67,0.875,bicubic,-11.558,-7.440,-23\nresnext50d_32x4d.bt_in1k,224,68.710,31.290,88.220,11.780,25.05,0.875,bicubic,-10.962,-6.658,+52\nregnety_160.pycls_in1k,224,68.700,31.300,88.190,11.810,83.59,0.875,bicubic,-11.584,-6.792,-27\nnf_resnet50.ra2_in1k,256,68.690,31.310,88.430,11.570,25.56,0.940,bicubic,-11.546,-6.674,-17\nseresnet50.a2_in1k,224,68.690,31.310,87.490,12.510,28.09,0.950,bicubic,-11.412,-7.230,-5\nresnetv2_34d.ra4_e3600_r224_in1k,288,68.680,31.320,88.240,11.760,21.82,1.000,bicubic,-10.934,-6.520,+52\ndpn131.mx_in1k,224,68.670,31.330,87.390,12.610,79.25,0.875,bicubic,-11.166,-7.218,+27\nresnet152.a3_in1k,224,68.650,31.350,87.690,12.310,60.19,0.950,bicubic,-11.898,-7.314,-75\nmambaout_kobe.in1k,224,68.640,31.360,88.300,11.700,9.14,1.000,bicubic,-11.346,-6.682,+1\nhgnet_tiny.paddle_in1k,224,68.640,31.360,88.220,11.780,14.74,0.965,bicubic,-11.252,-6.834,+13\nresnet50.c1_in1k,224,68.640,31.360,87.770,12.230,25.56,0.950,bicubic,-11.126,-7.184,+30\nrepvit_m1_0.dist_300e_in1k,224,68.630,31.370,88.020,11.980,7.30,0.950,bicubic,-11.522,-6.750,-16\ngmlp_s16_224.ra3_in1k,224,68.600,31.400,87.950,12.050,19.42,0.875,bicubic,-11.052,-6.674,+44\nxcit_tiny_12_p8_224.fb_in1k,224,68.590,31.410,88.690,11.310,6.71,1.000,bicubic,-11.120,-6.128,+35\nefficientnet_b0.ra4_e3600_r224_in1k,256,68.590,31.410,87.890,12.110,5.29,1.000,bicubic,-10.774,-6.864,+71\nseresnext50_32x4d.gluon_in1k,224,68.560,31.440,88.150,11.850,27.56,0.875,bicubic,-11.356,-6.666,0\ndpn107.mx_in1k,224,68.560,31.440,88.090,11.910,86.92,0.875,bicubic,-11.606,-6.842,-24\nregnetx_160.pycls_in1k,224,68.550,31.450,88.250,11.750,54.28,0.875,bicubic,-11.294,-6.602,+14\ndeit_small_patch16_224.fb_in1k,224,68.530,31.470,88.170,11.830,22.05,0.900,bicubic,-11.326,-6.886,+12\nresnet50.d_in1k,224,68.530,31.470,87.650,12.350,25.56,0.950,bicubic,-11.374,-7.188,+1\nresnet152.tv2_in1k,176,68.520,31.480,87.720,12.280,60.19,0.875,bilinear,-11.702,-6.930,-30\nresnet50.ram_in1k,288,68.510,31.490,88.430,11.570,25.56,0.950,bicubic,-11.472,-6.608,-12\nregnety_080.pycls_in1k,224,68.510,31.490,87.970,12.030,39.18,0.875,bicubic,-11.364,-6.864,+3\nskresnext50_32x4d.ra_in1k,224,68.470,31.530,87.620,12.380,27.48,0.875,bicubic,-11.698,-7.024,-32\nresnet50.a1_in1k,224,68.470,31.530,87.590,12.410,25.56,0.950,bicubic,-11.912,-7.008,-64\nresnext50_32x4d.a1_in1k,224,68.470,31.530,87.380,12.620,25.03,0.950,bicubic,-12.056,-7.078,-87\nresnext50_32x4d.ra_in1k,224,68.450,31.550,87.560,12.440,25.03,0.875,bicubic,-11.350,-7.048,+11\ntf_efficientnet_b2.aa_in1k,260,68.440,31.560,88.040,11.960,9.11,0.890,bicubic,-11.634,-6.918,-24\nresnet101.a3_in1k,224,68.440,31.560,87.430,12.570,44.55,0.950,bicubic,-11.396,-7.278,+6\ndarknetaa53.c2ns_in1k,288,68.430,31.570,88.680,11.320,36.02,1.000,bilinear,-12.098,-6.574,-94\ndla102x2.in1k,224,68.410,31.590,87.800,12.200,41.28,0.875,bilinear,-11.038,-6.854,+44\ncspresnet50.ra_in1k,256,68.400,31.600,87.940,12.060,21.62,0.887,bilinear,-11.178,-6.758,+30\nfbnetv3_d.ra2_in1k,256,68.390,31.610,88.420,11.580,10.31,0.950,bilinear,-11.292,-6.532,+18\nres2net50d.in1k,224,68.390,31.610,88.020,11.980,25.72,0.875,bilinear,-11.890,-7.008,-56\ndarknet53.c2ns_in1k,256,68.390,31.610,88.000,12.000,41.61,0.887,bicubic,-11.618,-7.046,-27\ntf_efficientnet_b3.in1k,300,68.380,31.620,88.600,11.400,12.23,0.904,bicubic,-12.498,-6.708,-149\nxcit_tiny_24_p16_224.fb_in1k,224,68.380,31.620,88.100,11.900,12.12,1.000,bicubic,-11.094,-6.778,+37\nrexnet_130.nav_in1k,224,68.380,31.620,87.970,12.030,7.56,0.875,bicubic,-11.112,-6.710,+35\ncait_xxs36_224.fb_dist_in1k,224,68.370,31.630,88.600,11.400,17.30,1.000,bicubic,-11.380,-6.266,+5\nvit_base_patch16_224.sam_in1k,224,68.370,31.630,87.670,12.330,86.57,0.900,bicubic,-11.876,-7.452,-53\nefficientvit_b1.r256_in1k,256,68.360,31.640,87.940,12.060,9.10,1.000,bicubic,-11.374,-6.846,+4\nregnety_064.pycls_in1k,224,68.350,31.650,88.020,11.980,30.58,0.875,bicubic,-11.384,-6.748,+4\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,68.330,31.670,88.560,11.440,25.56,0.875,bilinear,-10.922,-6.266,+63\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,68.310,31.690,88.840,11.160,6.34,1.000,bicubic,-10.740,-6.050,+83\nresnet50d.a2_in1k,224,68.310,31.690,87.290,12.710,25.58,0.950,bicubic,-11.958,-7.352,-65\ndpn98.mx_in1k,224,68.300,31.700,87.560,12.440,61.57,0.875,bicubic,-11.364,-7.084,+9\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,68.290,31.710,88.340,11.660,9.72,0.950,bicubic,-11.156,-6.150,+29\nresmlp_big_24_224.fb_in1k,224,68.280,31.720,87.690,12.310,129.14,0.875,bicubic,-12.754,-7.336,-183\nfastvit_s12.apple_in1k,256,68.260,31.740,87.840,12.160,9.47,0.900,bicubic,-11.622,-6.958,-25\necaresnet26t.ra2_in1k,320,68.250,31.750,88.640,11.360,16.01,0.950,bicubic,-11.650,-6.430,-28\nese_vovnet39b.ra_in1k,224,68.250,31.750,88.210,11.790,24.57,0.875,bicubic,-11.062,-6.512,+43\nresnext50_32x4d.gluon_in1k,224,68.240,31.760,87.400,12.600,25.03,0.875,bicubic,-11.114,-7.024,+36\nregnetx_120.pycls_in1k,224,68.230,31.770,87.560,12.440,46.11,0.875,bicubic,-11.378,-7.174,+7\nefficientnet_b2_pruned.in1k,260,68.220,31.780,88.100,11.900,8.31,0.890,bicubic,-11.684,-6.566,-35\nresnet50.ra_in1k,288,68.220,31.780,87.940,12.060,25.56,0.950,bicubic,-11.644,-7.030,-26\nefficientformerv2_s1.snap_dist_in1k,224,68.180,31.820,88.200,11.800,6.19,0.950,bicubic,-11.504,-6.512,-5\necaresnet50d_pruned.miil_in1k,224,68.170,31.830,88.350,11.650,19.94,0.875,bicubic,-11.544,-6.512,-10\nefficientnet_b2.ra_in1k,256,68.160,31.840,87.750,12.250,9.11,0.875,bicubic,-11.164,-6.832,+34\nmobileone_s4.apple_in1k,224,68.150,31.850,87.100,12.900,14.95,0.900,bilinear,-11.296,-7.820,+18\nresnetv2_34.ra4_e3600_r224_in1k,288,68.140,31.860,87.920,12.080,21.80,1.000,bicubic,-10.930,-6.640,+62\npit_xs_distilled_224.in1k,224,68.130,31.870,87.820,12.180,11.00,0.900,bicubic,-11.042,-6.530,+52\ntf_efficientnet_el.in1k,300,68.110,31.890,88.000,12.000,10.59,0.904,bicubic,-12.136,-6.764,-76\ntiny_vit_5m_224.in1k,224,68.060,31.940,87.660,12.340,5.39,0.950,bicubic,-11.136,-7.132,+48\nfbnetv3_b.ra2_in1k,256,68.050,31.950,87.900,12.100,8.60,0.950,bilinear,-11.104,-6.208,+50\nresmlp_36_224.fb_in1k,224,68.040,31.960,88.170,11.830,44.69,0.875,bicubic,-11.732,-6.718,-25\nresnet50.tv2_in1k,176,68.030,31.970,87.360,12.640,25.56,0.875,bilinear,-11.404,-7.280,+13\nfastvit_t12.apple_in1k,256,68.020,31.980,87.700,12.300,7.55,0.900,bicubic,-11.256,-6.870,+32\nconvnext_pico_ols.d1_in1k,224,68.000,32.000,87.890,12.110,9.06,0.950,bicubic,-11.542,-6.698,-2\nconvnext_pico.d1_in1k,224,67.990,32.010,87.730,12.270,9.05,0.875,bicubic,-11.526,-6.824,+1\ndpn68b.ra_in1k,288,67.990,32.010,87.280,12.720,12.61,1.000,bicubic,-11.352,-7.150,+20\nregnetx_080.pycls_in1k,224,67.980,32.020,87.060,12.940,39.57,0.875,bicubic,-11.234,-7.482,+38\nresnetv2_34d.ra4_e3600_r384_in1k,384,67.970,32.030,88.200,11.800,21.82,1.000,bicubic,-11.822,-6.702,-33\nedgenext_small_rw.sw_in1k,256,67.960,32.040,88.310,11.690,7.83,0.900,bicubic,-11.636,-6.208,-12\nresnet50d.gluon_in1k,224,67.950,32.050,86.930,13.070,25.58,0.875,bicubic,-11.130,-7.528,+46\nregnetx_016.tv2_in1k,224,67.930,32.070,88.130,11.870,9.19,0.965,bicubic,-11.510,-6.638,+3\ntf_efficientnet_lite3.in1k,300,67.930,32.070,87.700,12.300,8.20,0.904,bilinear,-11.878,-7.208,-39\nresnetrs50.tf_in1k,224,67.920,32.080,87.640,12.360,35.69,0.910,bicubic,-12.014,-7.332,-65\ntf_efficientnetv2_b2.in1k,208,67.910,32.090,87.010,12.990,10.10,0.890,bicubic,-11.298,-7.592,+32\nlegacy_seresnext50_32x4d.in1k,224,67.900,32.100,87.620,12.380,27.56,0.875,bilinear,-11.180,-6.808,+42\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,67.890,32.110,89.100,10.900,25.55,1.000,bilinear,-12.496,-6.594,-121\nresnet34.ra4_e3600_r224_in1k,288,67.890,32.110,87.780,12.220,21.80,1.000,bicubic,-11.082,-6.674,+50\nhgnetv2_b1.ssld_stage2_ft_in1k,224,67.870,32.130,88.010,11.990,6.34,0.965,bicubic,-11.014,-6.474,+59\nlevit_192.fb_dist_in1k,224,67.870,32.130,87.900,12.100,10.95,0.900,bicubic,-11.988,-6.908,-53\nnf_regnet_b1.ra2_in1k,288,67.860,32.140,87.990,12.010,10.22,0.900,bicubic,-11.508,-6.736,+1\ntf_efficientnetv2_b1.in1k,240,67.860,32.140,87.670,12.330,8.14,0.882,bicubic,-11.658,-6.912,-17\nresnet50.b2k_in1k,224,67.860,32.140,87.320,12.680,25.56,0.950,bicubic,-11.528,-7.356,-4\nlevit_conv_192.fb_dist_in1k,224,67.850,32.150,87.890,12.110,10.95,0.900,bicubic,-12.010,-6.912,-58\nefficientvit_b1.r224_in1k,224,67.850,32.150,87.260,12.740,9.10,0.950,bicubic,-11.404,-7.040,+17\nrepvit_m0_9.dist_450e_in1k,224,67.830,32.170,87.670,12.330,5.49,0.950,bicubic,-11.228,-6.718,+36\neca_botnext26ts_256.c1_in1k,256,67.830,32.170,86.970,13.030,10.59,0.950,bicubic,-11.432,-7.630,+11\nresnet101.tv2_in1k,176,67.820,32.180,87.350,12.650,44.55,0.875,bilinear,-12.088,-7.256,-73\nresnet50.a2_in1k,224,67.810,32.190,86.870,13.130,25.56,0.950,bicubic,-12.018,-7.690,-56\nhrnet_w48.ms_in1k,224,67.800,32.200,87.340,12.660,77.47,0.875,bilinear,-11.528,-7.176,-3\nresnet50.b1k_in1k,224,67.800,32.200,87.340,12.660,25.56,0.950,bicubic,-11.764,-7.282,-30\nresnet50.bt_in1k,288,67.750,32.250,87.890,12.110,25.56,0.950,bicubic,-11.902,-7.018,-40\nresnext101_32x8d.tv_in1k,224,67.750,32.250,87.350,12.650,88.79,0.875,bilinear,-11.558,-7.180,0\ntf_efficientnet_b0.ns_jft_in1k,224,67.740,32.260,87.950,12.050,5.29,0.875,bicubic,-10.940,-6.422,+65\ncoat_lite_mini.in1k,224,67.740,32.260,87.790,12.210,11.01,0.900,bicubic,-11.364,-6.816,+21\nhrnet_w44.ms_in1k,224,67.710,32.290,87.510,12.490,67.06,0.875,bilinear,-11.180,-6.866,+41\ndarknetaa53.c2ns_in1k,256,67.700,32.300,88.030,11.970,36.02,0.887,bilinear,-12.054,-6.874,-58\nlambda_resnet26t.c1_in1k,256,67.700,32.300,87.730,12.270,10.96,0.940,bicubic,-11.426,-6.824,+15\nresmlp_24_224.fb_in1k,224,67.700,32.300,87.570,12.430,30.02,0.875,bicubic,-11.680,-6.980,-16\necaresnet50t.a3_in1k,224,67.680,32.320,87.520,12.480,25.57,0.950,bicubic,-11.868,-7.190,-38\nhalonet26t.a1h_in1k,256,67.680,32.320,87.320,12.680,12.48,0.950,bicubic,-11.454,-7.020,+12\nefficientnet_em.ra2_in1k,240,67.660,32.340,88.210,11.790,6.90,0.882,bicubic,-11.596,-6.318,-2\nregnetx_064.pycls_in1k,224,67.650,32.350,87.470,12.530,26.21,0.875,bicubic,-11.412,-6.998,+17\nregnety_040.pycls_in1k,224,67.650,32.350,87.370,12.630,20.65,0.875,bicubic,-11.596,-7.296,0\neca_halonext26ts.c1_in1k,256,67.640,32.360,87.260,12.740,10.76,0.940,bicubic,-11.878,-7.450,-39\ndla169.in1k,224,67.620,32.380,87.450,12.550,53.39,0.875,bilinear,-11.082,-6.884,+52\nefficientnet_b1.ft_in1k,256,67.610,32.390,87.470,12.530,7.79,1.000,bicubic,-11.196,-6.876,+40\ndpn92.mx_in1k,224,67.580,32.420,87.280,12.720,37.67,0.875,bicubic,-12.452,-7.596,-105\nresnext50_32x4d.a3_in1k,224,67.580,32.420,86.890,13.110,25.03,0.950,bicubic,-11.678,-7.418,-10\nmambaout_femto.in1k,224,67.570,32.430,87.630,12.370,7.30,1.000,bicubic,-11.310,-6.778,+29\nconvnext_tiny.fb_in22k_ft_in1k,288,67.570,32.430,87.470,12.530,28.59,1.000,bicubic,-11.380,-6.810,+19\nresnext50_32x4d.tv2_in1k,176,67.560,32.440,86.720,13.280,25.03,0.875,bilinear,-11.824,-7.582,-32\ninception_v3.gluon_in1k,299,67.550,32.450,87.260,12.740,23.83,0.875,bicubic,-11.256,-7.112,+33\nlegacy_xception.tf_in1k,299,67.540,32.460,87.530,12.470,22.86,0.897,bicubic,-11.518,-6.866,+7\nmobilevitv2_125.cvnets_in1k,256,67.540,32.460,87.470,12.530,7.48,0.888,bicubic,-12.142,-7.374,-65\nregnety_008_tv.tv2_in1k,224,67.520,32.480,87.890,12.110,6.43,0.965,bicubic,-11.152,-6.490,+45\nresnet33ts.ra2_in1k,288,67.510,32.490,87.920,12.080,19.68,1.000,bicubic,-12.200,-7.138,-72\nresnetblur50.bt_in1k,224,67.510,32.490,87.480,12.520,25.56,0.875,bicubic,-11.796,-7.058,-25\nhrnet_w40.ms_in1k,224,67.510,32.490,87.050,12.950,57.56,0.875,bilinear,-11.418,-7.430,+15\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,67.500,32.500,87.870,12.130,6.34,0.965,bicubic,-10.580,-6.302,+95\nregnety_032.pycls_in1k,224,67.500,32.500,87.230,12.770,19.44,0.875,bicubic,-11.402,-7.196,+13\ntf_efficientnet_b1.ap_in1k,240,67.490,32.510,87.520,12.480,7.79,0.882,bicubic,-11.798,-6.796,-27\nlegacy_seresnet152.in1k,224,67.490,32.510,87.490,12.510,66.82,0.875,bilinear,-11.172,-6.880,+39\nresnet34d.ra2_in1k,288,67.480,32.520,87.880,12.120,21.82,0.950,bicubic,-10.960,-6.468,+62\nresnet50.ram_in1k,224,67.460,32.540,87.370,12.630,25.56,0.875,bicubic,-11.574,-7.016,0\nresnet32ts.ra2_in1k,288,67.450,32.550,87.490,12.510,17.96,1.000,bicubic,-11.938,-7.090,-48\nres2net101_26w_4s.in1k,224,67.420,32.580,87.230,12.770,45.21,0.875,bilinear,-11.772,-7.236,-18\nconvnext_femto_ols.d1_in1k,288,67.400,32.600,87.320,12.680,5.23,0.950,bicubic,-11.516,-7.214,+5\ntf_efficientnet_cc_b1_8e.in1k,240,67.380,32.620,87.290,12.710,39.72,0.882,bicubic,-11.938,-7.094,-40\nres2net50_26w_8s.in1k,224,67.380,32.620,87.170,12.830,48.40,0.875,bilinear,-11.570,-7.538,+2\nresnet101.gluon_in1k,224,67.360,32.640,87.050,12.950,44.55,0.875,bicubic,-11.946,-7.582,-37\necaresnet26t.ra2_in1k,256,67.350,32.650,87.730,12.270,16.01,0.875,bicubic,-11.544,-6.818,+4\nefficientnet_b0.ra4_e3600_r224_in1k,224,67.340,32.660,87.140,12.860,5.29,0.900,bicubic,-11.228,-7.200,+35\nmobilenetv4_conv_medium.e500_r224_in1k,224,67.320,32.680,87.940,12.060,9.72,0.950,bicubic,-11.772,-6.848,-18\nresnet101c.gluon_in1k,224,67.310,32.690,87.070,12.930,44.57,0.875,bicubic,-12.232,-7.636,-71\nhgnetv2_b0.ssld_stage2_ft_in1k,288,67.290,32.710,87.700,12.300,6.00,1.000,bicubic,-11.300,-6.688,+29\ncait_xxs24_224.fb_dist_in1k,224,67.290,32.710,87.510,12.490,11.96,1.000,bicubic,-11.110,-6.816,+52\nxception41.tf_in1k,299,67.290,32.710,87.120,12.880,26.97,0.903,bicubic,-11.260,-7.162,+32\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,67.270,32.730,86.510,13.490,119.42,0.900,bicubic,-12.238,-7.600,-71\nrepvit_m0_9.dist_300e_in1k,224,67.260,32.740,87.160,12.840,5.49,0.950,bicubic,-11.392,-6.954,+24\nresnet50.a1h_in1k,176,67.250,32.750,87.130,12.870,25.56,0.900,bicubic,-12.014,-7.368,-44\nresnet50.ra_in1k,224,67.250,32.750,86.940,13.060,25.56,0.875,bicubic,-11.570,-7.374,+2\nnf_regnet_b1.ra2_in1k,256,67.210,32.790,87.220,12.780,10.22,0.900,bicubic,-11.500,-7.160,+13\nrepghostnet_200.in1k,224,67.200,32.800,87.220,12.780,9.80,0.875,bicubic,-11.602,-7.112,+4\nbotnet26t_256.c1_in1k,256,67.190,32.810,87.480,12.520,12.49,0.950,bicubic,-12.066,-7.314,-44\nregnetx_032.pycls_in1k,224,67.190,32.810,86.930,13.070,15.30,0.875,bicubic,-10.970,-7.234,+62\ntf_efficientnet_b2.in1k,260,67.180,32.820,87.510,12.490,9.11,0.890,bicubic,-12.432,-7.204,-91\ncoat_tiny.in1k,224,67.160,32.840,87.390,12.610,5.50,0.900,bicubic,-11.282,-6.654,+37\nconvnext_femto.d1_in1k,288,67.150,32.850,87.500,12.500,5.22,0.950,bicubic,-11.562,-6.936,+6\nresnet33ts.ra2_in1k,256,67.150,32.850,87.390,12.610,19.68,0.900,bicubic,-12.066,-7.186,-45\nlegacy_seresnet101.in1k,224,67.150,32.850,87.110,12.890,49.33,0.875,bilinear,-11.220,-6.996,+41\ndla60_res2net.in1k,224,67.140,32.860,87.060,12.940,20.85,0.875,bilinear,-11.340,-7.144,+27\nstarnet_s4.in1k,224,67.140,32.860,86.980,13.020,7.48,0.875,bicubic,-11.684,-7.316,-10\nconvnextv2_femto.fcmae_ft_in1k,288,67.120,32.880,87.420,12.580,5.23,0.950,bicubic,-12.222,-7.170,-68\nresnest26d.gluon_in1k,224,67.120,32.880,87.120,12.880,17.07,0.875,bilinear,-11.356,-7.180,+24\nresnet152.a3_in1k,160,67.120,32.880,86.510,13.490,60.19,0.950,bicubic,-11.776,-7.630,-20\nshvit_s4.in1k,256,67.090,32.910,87.700,12.300,16.59,0.875,bicubic,-12.272,-6.670,-74\nfbnetv3_d.ra2_in1k,224,67.090,32.910,87.650,12.350,10.31,0.950,bilinear,-11.568,-6.804,+6\ndla60x.in1k,224,67.060,32.940,87.230,12.770,17.35,0.875,bilinear,-11.174,-6.792,+42\nmixnet_l.ft_in1k,224,67.060,32.940,86.770,13.230,7.33,0.875,bicubic,-11.910,-7.410,-31\nresnet152.tv_in1k,224,67.040,32.960,87.460,12.540,60.19,0.875,bilinear,-11.290,-6.676,+34\nres2net50_26w_6s.in1k,224,67.030,32.970,86.860,13.140,37.05,0.875,bilinear,-11.556,-7.260,+4\ndla102x.in1k,224,67.010,32.990,86.730,13.270,26.31,0.875,bilinear,-11.506,-7.496,+10\nvisformer_tiny.in1k,224,66.980,33.020,86.910,13.090,10.32,0.900,bicubic,-11.180,-7.178,+43\nxcit_tiny_12_p16_224.fb_dist_in1k,224,66.970,33.030,87.390,12.610,6.72,1.000,bicubic,-11.602,-6.820,+2\nrepvgg_b2.rvgg_in1k,224,66.960,33.040,87.270,12.730,89.02,0.875,bilinear,-11.828,-7.144,-17\nresnet50s.gluon_in1k,224,66.940,33.060,86.820,13.180,25.68,0.875,bicubic,-11.770,-7.428,-10\npit_xs_224.in1k,224,66.920,33.080,87.200,12.800,10.62,0.900,bicubic,-11.276,-6.956,+37\nresnet34.a1_in1k,288,66.920,33.080,86.220,13.780,21.80,1.000,bicubic,-11.010,-7.538,+61\npvt_v2_b1.in1k,224,66.900,33.100,87.370,12.630,14.01,0.900,bicubic,-11.798,-7.120,-11\nrepvit_m1.dist_in1k,224,66.890,33.110,87.150,12.850,5.49,0.950,bicubic,-11.652,-6.932,+1\nfasternet_t2.in1k,224,66.890,33.110,86.860,13.140,14.98,1.000,bicubic,-11.848,-7.472,-19\nlambda_resnet26rpt_256.c1_in1k,256,66.840,33.160,87.050,12.950,10.99,0.940,bicubic,-12.120,-7.370,-44\ntf_efficientnet_b1.aa_in1k,240,66.830,33.170,86.930,13.070,7.79,0.882,bicubic,-12.010,-7.272,-32\nrepvgg_b1.rvgg_in1k,224,66.820,33.180,86.720,13.280,57.42,0.875,bilinear,-11.550,-7.544,+17\ndpn68b.ra_in1k,224,66.820,33.180,86.540,13.460,12.61,0.950,bicubic,-11.726,-7.556,-4\nxcit_nano_12_p8_384.fb_dist_in1k,384,66.800,33.200,87.020,12.980,3.05,1.000,bicubic,-11.010,-6.796,+64\nmobilevit_s.cvnets_in1k,256,66.790,33.210,86.920,13.080,5.58,0.900,bicubic,-11.508,-7.248,+18\nresnetv2_34d.ra4_e3600_r224_in1k,224,66.780,33.220,86.550,13.450,21.82,0.900,bicubic,-11.490,-7.400,+20\nhrnet_w18.ms_aug_in1k,224,66.770,33.230,87.450,12.550,21.30,0.950,bilinear,-11.340,-6.612,+31\nresnet50d.a3_in1k,224,66.760,33.240,86.520,13.480,25.58,0.950,bicubic,-11.982,-7.716,-29\nfbnetv3_b.ra2_in1k,224,66.740,33.260,86.970,13.030,8.60,0.950,bilinear,-11.444,-7.268,+24\nresnet32ts.ra2_in1k,256,66.730,33.270,87.050,12.950,17.96,0.900,bicubic,-12.324,-7.318,-61\ndla60_res2next.in1k,224,66.730,33.270,86.900,13.100,17.03,0.875,bilinear,-11.758,-7.100,-9\nresnet50.am_in1k,224,66.720,33.280,86.700,13.300,25.56,0.875,bicubic,-12.284,-7.692,-60\nefficientnet_es.ra_in1k,224,66.720,33.280,86.650,13.350,5.44,0.875,bicubic,-11.380,-7.278,+27\nhrnet_w30.ms_in1k,224,66.710,33.290,86.730,13.270,37.71,0.875,bilinear,-11.492,-7.490,+16\nresnet50c.gluon_in1k,224,66.700,33.300,86.090,13.910,25.58,0.875,bicubic,-11.316,-7.862,+32\nhrnet_w32.ms_in1k,224,66.690,33.310,87.250,12.750,41.23,0.875,bilinear,-11.762,-6.934,-6\nswiftformer_s.dist_in1k,224,66.680,33.320,86.870,13.130,6.09,0.950,bicubic,-11.782,-7.110,-9\ntf_mixnet_l.in1k,224,66.680,33.320,86.390,13.610,7.33,0.875,bicubic,-12.094,-7.610,-40\nresnet50.bt_in1k,224,66.670,33.330,86.940,13.060,25.56,0.875,bicubic,-11.774,-7.334,-9\nseresnext26d_32x4d.bt_in1k,288,66.670,33.330,86.700,13.300,16.81,0.950,bicubic,-12.146,-7.566,-47\nregnetx_040.pycls_in1k,224,66.660,33.340,86.540,13.460,22.12,0.875,bicubic,-11.826,-7.544,-18\nseresnext26t_32x4d.bt_in1k,288,66.640,33.360,86.790,13.210,16.81,0.950,bicubic,-12.096,-7.528,-41\nselecsls60b.in1k,224,66.640,33.360,86.570,13.430,32.77,0.875,bicubic,-11.770,-7.596,-7\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,66.630,33.370,87.590,12.410,6.00,1.000,bicubic,-11.408,-6.654,+21\ndla102.in1k,224,66.590,33.410,86.940,13.060,33.27,0.875,bilinear,-11.426,-7.054,+22\nvit_small_patch16_224.augreg_in1k,224,66.590,33.410,86.840,13.160,22.05,0.900,bicubic,-12.252,-7.440,-57\nwide_resnet101_2.tv_in1k,224,66.550,33.450,86.970,13.030,126.89,0.875,bilinear,-12.312,-7.342,-60\ntf_efficientnet_b1.in1k,240,66.530,33.470,86.690,13.310,7.79,0.882,bicubic,-12.016,-7.292,-31\ntf_efficientnetv2_b0.in1k,224,66.510,33.490,86.610,13.390,7.14,0.875,bicubic,-11.876,-7.424,-11\nlevit_128.fb_dist_in1k,224,66.480,33.520,86.720,13.280,9.21,0.900,bicubic,-12.006,-7.278,-25\ncs3darknet_m.c2ns_in1k,288,66.470,33.530,87.010,12.990,9.31,0.950,bicubic,-11.166,-7.006,+47\nlevit_conv_128.fb_dist_in1k,224,66.470,33.530,86.720,13.280,9.21,0.900,bicubic,-12.018,-7.424,-30\nseresnext26ts.ch_in1k,288,66.420,33.580,86.640,13.360,10.39,1.000,bicubic,-11.872,-7.436,-9\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,66.410,33.590,87.140,12.860,5.79,1.000,bicubic,-12.028,-7.402,-21\nvit_base_patch32_384.augreg_in1k,384,66.410,33.590,86.910,13.090,88.30,1.000,bicubic,-12.340,-7.328,-56\ninception_v3.tf_adv_in1k,299,66.400,33.600,86.410,13.590,23.83,0.875,bicubic,-11.198,-7.316,+47\nwide_resnet50_2.tv_in1k,224,66.380,33.620,86.750,13.250,68.88,0.875,bilinear,-12.106,-7.502,-33\nhrnet_w18_small_v2.gluon_in1k,224,66.340,33.660,86.440,13.560,15.60,0.875,bicubic,-11.862,-7.466,-8\nselecsls60.in1k,224,66.340,33.660,86.330,13.670,30.67,0.875,bicubic,-11.634,-7.492,+13\nvit_base_patch16_224.augreg_in1k,224,66.340,33.660,86.130,13.870,86.57,0.900,bicubic,-12.814,-8.602,-99\neca_resnext26ts.ch_in1k,288,66.330,33.670,86.430,13.570,10.30,1.000,bicubic,-11.690,-7.502,+5\nmobileone_s3.apple_in1k,224,66.330,33.670,86.360,13.640,10.17,0.900,bilinear,-11.672,-7.504,+8\nhgnetv2_b0.ssld_stage2_ft_in1k,224,66.310,33.690,86.650,13.350,6.00,0.965,bicubic,-11.052,-7.158,+59\ninception_v3.tf_in1k,299,66.300,33.700,86.580,13.420,23.83,0.875,bicubic,-11.566,-7.064,+17\nresnetv2_34.ra4_e3600_r224_in1k,224,66.300,33.700,86.550,13.450,21.80,0.900,bicubic,-11.326,-6.968,+36\nmobilevitv2_100.cvnets_in1k,256,66.290,33.710,86.830,13.170,4.90,0.888,bicubic,-11.798,-7.340,-5\nresmlp_12_224.fb_distilled_in1k,224,66.290,33.710,86.470,13.530,15.35,0.875,bicubic,-11.658,-7.100,+8\nhardcorenas_f.miil_green_in1k,224,66.280,33.720,86.240,13.760,8.20,0.875,bilinear,-11.820,-7.560,-8\nconvnext_femto_ols.d1_in1k,224,66.260,33.740,86.560,13.440,5.23,0.875,bicubic,-11.592,-7.266,+17\nbat_resnext26ts.ch_in1k,256,66.250,33.750,86.680,13.320,10.73,0.900,bicubic,-12.030,-7.434,-25\ncoat_lite_tiny.in1k,224,66.230,33.770,86.960,13.040,5.72,0.900,bicubic,-11.318,-6.964,+36\nresnet101.a3_in1k,160,66.160,33.840,85.930,14.070,44.55,0.950,bicubic,-11.768,-7.756,+6\ntf_efficientnet_cc_b0_8e.in1k,224,66.150,33.850,86.130,13.870,24.01,0.875,bicubic,-11.806,-7.508,+1\nefficientnet_b0.ra_in1k,224,66.150,33.850,86.120,13.880,5.29,0.875,bicubic,-11.548,-7.418,+21\ngmixer_24_224.ra3_in1k,224,66.140,33.860,86.030,13.970,24.72,0.875,bicubic,-11.908,-7.624,-11\ncs3darknet_focus_m.c2ns_in1k,288,66.130,33.870,86.930,13.070,9.30,0.950,bicubic,-11.156,-7.036,+52\nlegacy_seresnet50.in1k,224,66.120,33.880,86.280,13.720,28.09,0.875,bilinear,-11.530,-7.452,+20\nghostnetv2_160.in1k,224,66.100,33.900,86.660,13.340,12.39,0.875,bicubic,-11.736,-7.288,+9\ndensenetblur121d.ra_in1k,288,66.100,33.900,86.580,13.420,8.00,0.950,bicubic,-11.220,-7.216,+46\ninception_v3.tv_in1k,299,66.100,33.900,86.140,13.860,23.83,0.875,bicubic,-11.372,-7.328,+34\nefficientnet_b1.ft_in1k,224,66.060,33.940,86.530,13.470,7.79,0.875,bicubic,-11.532,-7.106,+22\ntf_efficientnet_em.in1k,240,66.060,33.940,86.160,13.840,6.90,0.882,bicubic,-12.068,-7.892,-24\nhardcorenas_e.miil_green_in1k,224,66.040,33.960,85.980,14.020,8.07,0.875,bilinear,-11.748,-7.792,+9\nres2net50_14w_8s.in1k,224,66.030,33.970,86.120,13.880,25.06,0.875,bilinear,-12.112,-7.732,-28\ntinynet_a.in1k,192,66.020,33.980,85.930,14.070,6.19,0.875,bicubic,-11.646,-7.602,+11\nresnet26t.ra2_in1k,320,66.010,33.990,86.550,13.450,16.01,1.000,bicubic,-12.320,-7.496,-45\nefficientnet_b1_pruned.in1k,240,66.000,34.000,86.600,13.400,6.33,0.882,bicubic,-12.252,-7.220,-40\nconvnextv2_femto.fcmae_ft_in1k,224,65.980,34.020,86.700,13.300,5.23,0.875,bicubic,-12.496,-7.284,-61\nres2net50_26w_4s.in1k,224,65.980,34.020,86.590,13.410,25.70,0.875,bilinear,-12.014,-7.270,-17\nese_vovnet19b_dw.ra_in1k,288,65.960,34.040,86.460,13.540,6.54,0.950,bicubic,-11.828,-7.232,+1\npoolformerv2_s12.sail_in1k,224,65.950,34.050,86.450,13.550,11.89,1.000,bicubic,-12.058,-7.414,-22\nresnext50_32x4d.tv_in1k,224,65.950,34.050,86.030,13.970,25.03,0.875,bilinear,-11.682,-7.644,+7\nregnety_016.pycls_in1k,224,65.940,34.060,86.260,13.740,11.20,0.875,bicubic,-11.940,-7.466,-14\nresnet101.tv_in1k,224,65.940,34.060,86.020,13.980,44.55,0.875,bilinear,-11.464,-7.532,+26\ngcresnext26ts.ch_in1k,288,65.920,34.080,86.650,13.350,10.48,1.000,bicubic,-12.516,-7.378,-60\nresnet50.gluon_in1k,224,65.890,34.110,86.260,13.740,25.56,0.875,bicubic,-11.690,-7.444,+10\nresnet34d.ra2_in1k,224,65.850,34.150,86.580,13.420,21.82,0.875,bicubic,-11.258,-6.792,+43\nstarnet_s3.in1k,224,65.830,34.170,86.110,13.890,5.75,0.875,bicubic,-11.548,-7.506,+23\nconvnext_femto.d1_in1k,224,65.820,34.180,86.570,13.430,5.22,0.875,bicubic,-11.678,-7.108,+11\nrexnet_100.nav_in1k,224,65.820,34.180,86.530,13.470,4.80,0.875,bicubic,-12.042,-7.360,-18\ngcresnext26ts.ch_in1k,256,65.820,34.180,85.860,14.140,10.48,0.900,bicubic,-11.990,-8.176,-11\nrepvgg_b1g4.rvgg_in1k,224,65.800,34.200,86.010,13.990,39.97,0.875,bilinear,-11.808,-7.836,0\nseresnext26t_32x4d.bt_in1k,224,65.790,34.210,85.630,14.370,16.81,0.875,bicubic,-12.190,-8.120,-31\nresnet34.ra4_e3600_r224_in1k,224,65.770,34.230,85.670,14.330,21.80,0.900,bicubic,-11.722,-7.828,+8\nresnet34.a2_in1k,288,65.760,34.240,86.070,13.930,21.80,1.000,bicubic,-11.390,-7.218,+33\ndensenet161.tv_in1k,224,65.720,34.280,86.550,13.450,28.68,0.875,bicubic,-11.664,-7.106,+14\nregnetx_008.tv2_in1k,224,65.700,34.300,86.270,13.730,7.26,0.965,bicubic,-11.594,-7.394,+20\neca_resnext26ts.ch_in1k,256,65.690,34.310,85.840,14.160,10.30,0.900,bicubic,-11.756,-7.734,+9\nskresnet34.ra_in1k,224,65.680,34.320,85.980,14.020,22.28,0.875,bicubic,-11.276,-7.340,+41\ncs3darknet_m.c2ns_in1k,256,65.670,34.330,86.620,13.380,9.31,0.887,bicubic,-11.304,-6.962,+38\nresnet50.a3_in1k,224,65.670,34.330,85.680,14.320,25.56,0.950,bicubic,-12.382,-8.100,-49\nresnext50_32x4d.a3_in1k,160,65.670,34.330,85.270,14.730,25.03,0.950,bicubic,-12.062,-8.044,-16\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,65.660,34.340,85.210,14.790,5.48,0.875,bilinear,-12.260,-7.684,-34\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,65.630,34.370,86.390,13.610,6.00,0.965,bicubic,-11.238,-7.230,+45\nxcit_tiny_12_p16_224.fb_in1k,224,65.630,34.370,86.250,13.750,6.72,1.000,bicubic,-11.492,-7.478,+24\nres2next50.in1k,224,65.630,34.370,85.800,14.200,24.67,0.875,bilinear,-12.608,-8.104,-68\nmobilenetv1_125.ra4_e3600_r224_in1k,256,65.610,34.390,85.920,14.080,6.27,1.000,bicubic,-12.002,-7.834,-16\nconvnextv2_atto.fcmae_ft_in1k,288,65.600,34.400,86.090,13.910,3.71,0.950,bicubic,-12.194,-7.638,-28\nseresnext26ts.ch_in1k,256,65.590,34.410,86.060,13.940,10.39,0.900,bicubic,-12.270,-7.730,-35\nselecsls42b.in1k,224,65.580,34.420,85.870,14.130,32.46,0.875,bicubic,-11.604,-7.520,+15\nhardcorenas_d.miil_green_in1k,224,65.570,34.430,85.490,14.510,7.50,0.875,bilinear,-11.880,-7.990,-5\nconvnext_atto_ols.a2_in1k,288,65.550,34.450,86.270,13.730,3.70,0.950,bicubic,-11.670,-7.430,+11\necaresnet50t.a3_in1k,160,65.550,34.450,85.710,14.290,25.57,0.950,bicubic,-12.236,-7.910,-30\ntf_efficientnetv2_b1.in1k,192,65.540,34.460,85.930,14.070,8.14,0.882,bicubic,-12.394,-7.892,-48\npoolformer_s12.sail_in1k,224,65.530,34.470,86.230,13.770,11.92,0.900,bicubic,-11.708,-7.308,+6\nresnet26t.ra2_in1k,256,65.470,34.530,86.210,13.790,16.01,0.940,bicubic,-12.408,-7.628,-45\nconvmixer_1024_20_ks9_p14.in1k,224,65.440,34.560,85.510,14.490,24.38,0.960,bicubic,-11.504,-7.854,+25\nfastvit_t8.apple_dist_in1k,256,65.380,34.620,86.060,13.940,4.03,0.900,bicubic,-11.790,-7.214,+8\nmobileone_s2.apple_in1k,224,65.380,34.620,85.930,14.070,7.88,0.900,bilinear,-12.124,-7.734,-21\ndensenetblur121d.ra_in1k,224,65.380,34.620,85.740,14.260,8.00,0.875,bicubic,-11.194,-7.448,+44\ntf_efficientnet_b0.ap_in1k,224,65.350,34.650,85.510,14.490,5.29,0.875,bicubic,-11.750,-7.822,+11\ntf_efficientnet_lite2.in1k,260,65.330,34.670,86.020,13.980,6.09,0.890,bicubic,-12.140,-7.736,-17\ndpn68b.mx_in1k,224,65.290,34.710,85.890,14.110,12.61,0.875,bicubic,-12.204,-7.944,-22\ndensenet201.tv_in1k,224,65.280,34.720,85.650,14.350,20.01,0.875,bicubic,-12.004,-7.830,-5\nseresnext26d_32x4d.bt_in1k,224,65.260,34.740,85.800,14.200,16.81,0.875,bicubic,-12.332,-7.814,-30\ncrossvit_9_dagger_240.in1k,240,65.250,34.750,86.440,13.560,8.78,0.875,bicubic,-11.740,-7.162,+12\nres2net50_48w_2s.in1k,224,65.240,34.760,85.920,14.080,25.29,0.875,bilinear,-12.288,-7.642,-29\nresnetrs50.tf_in1k,160,65.200,34.800,85.690,14.310,35.69,0.910,bicubic,-12.658,-8.120,-53\nhrnet_w18.ms_in1k,224,65.190,34.810,85.690,14.310,21.30,0.875,bilinear,-11.580,-7.758,+22\ndla60.in1k,224,65.150,34.850,85.820,14.180,22.04,0.875,bilinear,-11.886,-7.252,+5\ncs3darknet_focus_m.c2ns_in1k,256,65.130,34.870,86.020,13.980,9.30,0.887,bicubic,-11.624,-7.530,+22\nshvit_s3.in1k,224,65.090,34.910,85.850,14.150,14.25,0.875,bicubic,-12.266,-7.460,-19\nese_vovnet19b_dw.ra_in1k,224,65.090,34.910,85.440,14.560,6.54,0.875,bicubic,-11.738,-7.836,+17\nresnet26d.bt_in1k,288,65.070,34.930,85.800,14.200,16.01,0.950,bicubic,-12.352,-7.834,-26\ngernet_s.idstcv_in1k,224,65.060,34.940,85.360,14.640,8.17,0.875,bilinear,-11.814,-7.782,+13\nresnet34.a1_in1k,224,65.040,34.960,85.170,14.830,21.80,0.950,bicubic,-11.380,-7.726,+35\nconvnext_atto.d2_in1k,288,64.980,35.020,86.150,13.850,3.70,0.950,bicubic,-12.036,-7.554,0\nseresnet50.a3_in1k,224,64.980,35.020,85.120,14.880,28.09,0.950,bicubic,-12.056,-8.196,-2\ntf_efficientnet_cc_b0_4e.in1k,224,64.960,35.040,84.990,15.010,13.31,0.875,bicubic,-12.364,-8.344,-25\nmobilenetv2_120d.ra_in1k,224,64.940,35.060,85.840,14.160,5.83,0.875,bicubic,-12.360,-7.676,-24\nhardcorenas_c.miil_green_in1k,224,64.920,35.080,85.130,14.870,5.52,0.875,bilinear,-12.172,-8.062,-8\nghostnetv2_130.in1k,224,64.880,35.120,85.420,14.580,8.96,0.875,bicubic,-11.866,-7.944,+12\nresnet34.bt_in1k,288,64.840,35.160,86.150,13.850,21.80,0.950,bicubic,-11.666,-7.192,+25\nlegacy_seresnext26_32x4d.in1k,224,64.840,35.160,85.540,14.460,16.79,0.875,bicubic,-12.260,-7.718,-13\nrepghostnet_150.in1k,224,64.820,35.180,85.940,14.060,6.58,0.875,bicubic,-12.654,-7.556,-42\nresnet50d.a3_in1k,160,64.820,35.180,84.940,15.060,25.58,0.950,bicubic,-12.402,-8.316,-24\nefficientformerv2_s0.snap_dist_in1k,224,64.740,35.260,85.560,14.440,3.60,0.950,bicubic,-11.362,-7.290,+33\nmixnet_m.ft_in1k,224,64.730,35.270,85.490,14.510,5.01,0.875,bicubic,-12.538,-7.928,-28\nfastvit_t8.apple_in1k,256,64.690,35.310,85.580,14.420,4.03,0.900,bicubic,-11.494,-7.462,+29\nresnet26d.bt_in1k,224,64.660,35.340,85.150,14.850,16.01,0.875,bicubic,-12.038,-8.006,+5\ntf_efficientnetv2_b0.in1k,192,64.630,35.370,85.410,14.590,7.14,0.875,bicubic,-12.246,-7.774,-4\nefficientvit_m5.r224_in1k,224,64.630,35.370,85.320,14.680,12.47,0.875,bicubic,-12.462,-7.848,-18\nresnext26ts.ra2_in1k,288,64.590,35.410,85.540,14.460,10.30,1.000,bicubic,-12.596,-7.930,-29\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,64.580,35.420,85.570,14.430,5.48,1.000,bicubic,-12.602,-7.758,-28\ntf_efficientnet_lite1.in1k,240,64.580,35.420,85.390,14.610,5.42,0.882,bicubic,-12.094,-7.842,+1\nxcit_nano_12_p8_224.fb_dist_in1k,224,64.510,35.490,85.900,14.100,3.05,1.000,bicubic,-11.820,-7.170,+19\ndensenet169.tv_in1k,224,64.490,35.510,85.310,14.690,14.15,0.875,bicubic,-11.432,-7.498,+32\nmobilenetv1_125.ra4_e3600_r224_in1k,224,64.470,35.530,85.100,14.900,6.27,0.900,bicubic,-12.450,-8.132,-13\ntf_mixnet_m.in1k,224,64.470,35.530,84.920,15.080,5.01,0.875,bicubic,-12.482,-8.244,-18\nlevit_conv_128s.fb_dist_in1k,224,64.470,35.530,84.690,15.310,7.78,0.900,bicubic,-12.064,-8.192,+5\nlevit_128s.fb_dist_in1k,224,64.450,35.550,84.690,15.310,7.78,0.900,bicubic,-12.070,-8.180,+6\nresnext26ts.ra2_in1k,256,64.440,35.560,85.010,14.990,10.30,0.900,bicubic,-12.328,-8.128,-10\nhardcorenas_b.miil_green_in1k,224,64.380,35.620,84.750,15.250,5.18,0.875,bilinear,-12.170,-8.016,0\nghostnetv3_100.in1k,224,64.350,35.650,85.120,14.880,8.13,0.875,bicubic,-12.586,-8.004,-20\ntf_efficientnet_b0.aa_in1k,224,64.340,35.660,85.190,14.810,5.29,0.875,bicubic,-12.542,-8.080,-19\nresmlp_12_224.fb_in1k,224,64.330,35.670,85.600,14.400,15.35,0.875,bicubic,-12.330,-7.570,-9\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,64.330,35.670,85.510,14.490,5.28,0.950,bicubic,-12.268,-7.762,-6\nrepvgg_a2.rvgg_in1k,224,64.290,35.710,85.070,14.930,28.21,0.875,bilinear,-12.198,-7.946,+1\nregnetx_016.pycls_in1k,224,64.270,35.730,85.430,14.570,9.19,0.875,bicubic,-12.666,-8.002,-26\nconvnextv2_atto.fcmae_ft_in1k,224,64.260,35.740,85.050,14.950,3.71,0.875,bicubic,-12.384,-7.992,-12\nxcit_nano_12_p16_384.fb_dist_in1k,384,64.250,35.750,85.260,14.740,3.05,1.000,bicubic,-11.210,-7.412,+37\ndensenet121.ra_in1k,288,64.170,35.830,85.700,14.300,7.98,0.950,bicubic,-12.316,-7.678,-2\nresnet26.bt_in1k,288,64.090,35.910,85.140,14.860,16.00,0.950,bicubic,-12.294,-8.038,-2\nconvnext_atto_ols.a2_in1k,224,64.090,35.910,84.810,15.190,3.70,0.875,bicubic,-11.812,-8.032,+18\nresnet34.a2_in1k,224,64.020,35.980,84.320,15.680,21.80,0.950,bicubic,-11.500,-8.134,+29\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,63.920,36.080,85.590,14.410,22.88,0.900,bicubic,-12.090,-7.676,+8\nresnetv2_18d.ra4_e3600_r224_in1k,288,63.910,36.090,85.450,14.550,11.71,1.000,bicubic,-12.148,-7.560,+5\nresnet18d.ra4_e3600_r224_in1k,288,63.890,36.110,85.270,14.730,11.71,1.000,bicubic,-12.122,-7.514,+5\nmobilenetv1_100.ra4_e3600_r224_in1k,256,63.890,36.110,85.110,14.890,4.23,0.950,bicubic,-12.198,-7.896,+2\ndpn68.mx_in1k,224,63.880,36.120,85.070,14.930,12.61,0.875,bicubic,-12.424,-7.924,-3\nmobilenetv2_140.ra_in1k,224,63.860,36.140,84.960,15.040,6.11,0.875,bicubic,-12.668,-8.048,-15\ntf_efficientnet_es.in1k,224,63.860,36.140,84.590,15.410,5.44,0.875,bicubic,-12.748,-8.586,-22\nregnety_008.pycls_in1k,224,63.800,36.200,85.240,14.760,6.26,0.875,bicubic,-12.534,-7.820,-9\nrepghostnet_130.in1k,224,63.770,36.230,84.780,15.220,5.48,0.875,bicubic,-12.608,-8.118,-11\ndensenet121.ra_in1k,224,63.740,36.260,84.580,15.420,7.98,0.875,bicubic,-11.834,-8.068,+17\nhardcorenas_a.miil_green_in1k,224,63.740,36.260,84.490,15.510,5.26,0.875,bilinear,-12.186,-7.996,+3\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,63.710,36.290,84.800,15.200,5.48,0.950,bicubic,-12.600,-8.042,-11\nresnest14d.gluon_in1k,224,63.680,36.320,84.050,15.950,10.61,0.875,bilinear,-11.836,-8.462,+17\nconvnext_atto.d2_in1k,224,63.670,36.330,84.950,15.050,3.70,0.875,bicubic,-12.002,-7.964,+6\ntf_efficientnet_b0.in1k,224,63.540,36.460,84.680,15.320,5.29,0.875,bicubic,-13.002,-8.328,-26\nregnety_004.tv2_in1k,224,63.520,36.480,84.820,15.180,4.34,0.965,bicubic,-12.074,-7.886,+11\nmobilevitv2_075.cvnets_in1k,256,63.500,36.500,84.880,15.120,2.87,0.888,bicubic,-12.112,-7.874,+8\nmixnet_s.ft_in1k,224,63.490,36.510,84.730,15.270,4.13,0.875,bicubic,-12.510,-8.066,-8\nresnet26.bt_in1k,224,63.450,36.550,84.200,15.800,16.00,0.875,bicubic,-11.848,-8.378,+20\ntf_mixnet_s.in1k,224,63.440,36.560,84.100,15.900,4.13,0.875,bicubic,-12.216,-8.532,+2\nmobilenetv3_rw.rmsp_in1k,224,63.320,36.680,84.620,15.380,5.48,0.875,bicubic,-12.296,-8.076,+1\nresnet34.bt_in1k,224,63.320,36.680,84.230,15.770,21.80,0.875,bicubic,-11.856,-7.936,+21\nmobilenetv3_large_100.ra_in1k,224,63.320,36.680,84.080,15.920,5.48,0.875,bicubic,-12.464,-8.448,-4\nefficientnet_es_pruned.in1k,224,63.280,36.720,84.840,15.160,5.44,0.875,bicubic,-11.730,-7.598,+27\nresnetv2_18.ra4_e3600_r224_in1k,288,63.280,36.720,84.720,15.280,11.69,1.000,bicubic,-12.084,-7.972,+12\nsemnasnet_100.rmsp_in1k,224,63.230,36.770,84.480,15.520,3.89,0.875,bicubic,-12.222,-8.378,+9\nmixer_b16_224.goog_in21k_ft_in1k,224,63.220,36.780,83.090,16.910,59.88,0.875,bicubic,-13.396,-9.160,-43\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,63.200,36.800,85.130,14.870,6.36,1.000,bicubic,-12.762,-8.142,-17\nefficientnet_lite0.ra_in1k,224,63.200,36.800,84.350,15.650,4.65,0.875,bicubic,-12.288,-8.158,+4\nresnet50.tv_in1k,224,63.190,36.810,84.640,15.360,25.56,0.875,bilinear,-12.966,-8.226,-26\nswiftformer_xs.dist_in1k,224,63.150,36.850,84.250,15.750,3.48,0.950,bicubic,-12.450,-8.064,-5\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,63.110,36.890,84.900,15.100,5.72,0.900,bicubic,-12.342,-7.708,+2\nmobileone_s1.apple_in1k,224,63.060,36.940,84.260,15.740,4.83,0.900,bilinear,-12.700,-8.528,-15\npit_ti_distilled_224.in1k,224,63.060,36.940,83.810,16.190,5.10,0.900,bicubic,-11.216,-8.110,+38\nghostnetv2_100.in1k,224,63.040,36.960,84.190,15.810,6.16,0.875,bicubic,-12.146,-8.144,+6\ninception_next_atto.sail_in1k,224,62.930,37.070,84.600,15.400,4.16,0.875,bicubic,-12.418,-7.958,+2\nmobilenetv1_100.ra4_e3600_r224_in1k,224,62.880,37.120,84.370,15.630,4.23,0.875,bicubic,-12.508,-7.940,-1\nresnet50.a3_in1k,160,62.870,37.130,84.020,15.980,25.56,0.950,bicubic,-13.124,-8.474,-27\nfasternet_t1.in1k,224,62.860,37.140,84.020,15.980,7.60,1.000,bicubic,-13.062,-8.956,-24\nmobilevit_xs.cvnets_in1k,256,62.850,37.150,84.690,15.310,2.32,0.900,bicubic,-11.768,-7.660,+22\nlegacy_seresnet34.in1k,224,62.820,37.180,84.180,15.820,21.96,0.875,bilinear,-11.968,-7.954,+15\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,62.760,37.240,84.420,15.580,5.28,0.875,bicubic,-12.904,-8.102,-24\ndensenet121.tv_in1k,224,62.760,37.240,84.110,15.890,7.98,0.875,bicubic,-11.990,-8.048,+13\nregnety_006.pycls_in1k,224,62.760,37.240,84.100,15.900,6.06,0.875,bicubic,-12.514,-8.432,-3\ndeit_tiny_distilled_patch16_224.fb_in1k,224,62.760,37.240,83.850,16.150,5.91,0.900,bicubic,-11.778,-8.048,+23\nedgenext_x_small.in1k,288,62.750,37.250,84.550,15.450,2.34,1.000,bicubic,-12.956,-8.210,-27\ntinynet_b.in1k,188,62.710,37.290,84.310,15.690,3.73,0.875,bicubic,-12.236,-7.884,+5\nseresnet50.a3_in1k,160,62.700,37.300,83.820,16.180,28.09,0.950,bicubic,-12.404,-8.276,-2\nhrnet_w18_small_v2.ms_in1k,224,62.690,37.310,83.910,16.090,15.60,0.875,bilinear,-12.404,-8.504,-2\nresnet18.fb_swsl_ig1b_ft_in1k,224,62.680,37.320,84.300,15.700,11.69,0.875,bilinear,-10.606,-7.454,+43\nmobilenetv2_110d.ra_in1k,224,62.670,37.330,84.460,15.540,4.52,0.875,bicubic,-12.386,-7.726,-3\nmobilenetv4_conv_small.e3600_r256_in1k,320,62.670,37.330,84.460,15.540,3.77,1.000,bicubic,-12.946,-8.308,-28\ntf_efficientnet_lite0.in1k,224,62.650,37.350,84.180,15.820,4.65,0.875,bicubic,-12.196,-8.000,+2\ndla34.in1k,224,62.610,37.390,83.860,16.140,15.74,0.875,bilinear,-12.022,-8.192,+7\nstarnet_s2.in1k,224,62.580,37.420,84.170,15.830,3.68,0.875,bicubic,-12.086,-7.974,+4\nrepvgg_b0.rvgg_in1k,224,62.570,37.430,83.680,16.320,15.82,0.875,bilinear,-12.588,-8.730,-12\nresnet34.gluon_in1k,224,62.540,37.460,83.970,16.030,21.80,0.875,bicubic,-12.034,-8.018,+7\nregnetx_008.pycls_in1k,224,62.510,37.490,83.980,16.020,7.26,0.875,bicubic,-12.544,-8.358,-8\nfbnetc_100.rmsp_in1k,224,62.460,37.540,83.490,16.510,5.57,0.875,bilinear,-12.660,-8.894,-14\nxcit_nano_12_p8_224.fb_in1k,224,62.380,37.620,84.130,15.870,3.05,1.000,bicubic,-11.520,-8.024,+21\ntf_mobilenetv3_large_100.in1k,224,62.380,37.620,83.800,16.200,5.48,0.875,bilinear,-13.130,-8.800,-30\nresnetv2_18d.ra4_e3600_r224_in1k,224,62.250,37.750,83.810,16.190,11.71,0.900,bicubic,-12.172,-8.118,+6\nedgenext_x_small.in1k,256,62.210,37.790,83.850,16.150,2.34,0.900,bicubic,-12.664,-8.454,-9\nrepghostnet_111.in1k,224,62.200,37.800,83.750,16.250,4.54,0.875,bicubic,-12.860,-8.446,-16\ncrossvit_9_240.in1k,240,62.170,37.830,84.210,15.790,8.55,0.875,bicubic,-11.824,-7.766,+12\nmnasnet_100.rmsp_in1k,224,62.080,37.920,83.710,16.290,4.38,0.875,bicubic,-12.592,-8.398,-8\ncrossvit_tiny_240.in1k,240,62.060,37.940,83.780,16.220,7.01,0.875,bicubic,-11.292,-8.126,+24\nshvit_s2.in1k,224,62.050,37.950,83.980,16.020,11.48,0.875,bicubic,-13.134,-8.336,-26\nresnet18d.ra4_e3600_r224_in1k,224,62.000,38.000,83.900,16.100,11.71,0.900,bicubic,-12.356,-7.926,0\nefficientvit_m4.r224_in1k,224,61.930,38.070,83.440,16.560,8.80,0.875,bicubic,-12.414,-8.520,0\nregnety_004.pycls_in1k,224,61.920,38.080,83.450,16.550,4.34,0.875,bicubic,-12.084,-8.306,+5\nmobilenetv4_conv_small.e2400_r224_in1k,256,61.880,38.120,83.870,16.130,3.77,0.950,bicubic,-12.764,-8.152,-12\nvgg19_bn.tv_in1k,224,61.880,38.120,83.380,16.620,143.68,0.875,bilinear,-12.350,-8.468,-1\nrepvgg_a1.rvgg_in1k,224,61.880,38.120,82.990,17.010,14.09,0.875,bilinear,-12.594,-8.862,-6\nregnetx_004_tv.tv2_in1k,224,61.810,38.190,83.720,16.280,5.50,0.965,bicubic,-12.786,-8.420,-12\nresnet18d.ra2_in1k,288,61.780,38.220,83.780,16.220,11.71,0.950,bicubic,-12.010,-8.054,+6\nmobilenetv4_conv_small.e1200_r224_in1k,256,61.630,38.370,83.080,16.920,3.77,0.950,bicubic,-12.652,-9.042,-6\nresnet18.a1_in1k,288,61.600,38.400,82.380,17.620,11.69,1.000,bicubic,-11.564,-8.674,+17\nmobilenetv4_conv_small.e3600_r256_in1k,256,61.590,38.410,83.780,16.220,3.77,0.950,bicubic,-12.944,-8.160,-13\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,61.490,38.510,83.210,16.790,11.69,0.875,bilinear,-11.146,-8.206,+25\nstarnet_s1.in1k,224,61.480,38.520,83.710,16.290,2.87,0.875,bicubic,-12.052,-7.794,+5\nrepghostnet_100.in1k,224,61.460,38.540,82.550,17.450,4.07,0.875,bicubic,-12.748,-9.000,-8\nconvit_tiny.fb_in1k,224,61.430,38.570,83.990,16.010,5.71,0.875,bicubic,-11.710,-7.714,+14\nresnet34.a3_in1k,224,61.360,38.640,82.510,17.490,21.80,0.950,bicubic,-11.636,-8.606,+16\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,61.350,38.650,83.740,16.260,3.27,1.000,bicubic,-11.808,-7.718,+11\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,61.220,38.780,82.840,17.160,2.16,0.900,bicubic,-12.014,-8.430,+8\nregnetx_006.pycls_in1k,224,61.210,38.790,83.300,16.700,6.20,0.875,bicubic,-12.694,-8.340,-8\nresnet34.tv_in1k,224,61.210,38.790,82.670,17.330,21.80,0.875,bilinear,-12.088,-8.752,+5\nhrnet_w18_small.gluon_in1k,224,61.210,38.790,81.900,18.100,13.19,0.875,bicubic,-12.724,-9.278,-10\nghostnet_100.in1k,224,61.160,38.840,82.390,17.610,5.18,0.875,bicubic,-12.798,-9.152,-12\nspnasnet_100.rmsp_in1k,224,61.110,38.890,82.770,17.230,4.42,0.875,bilinear,-12.976,-9.056,-16\nefficientvit_m3.r224_in1k,224,61.050,38.950,83.040,16.960,6.90,0.875,bicubic,-12.344,-8.296,-3\nvit_base_patch32_224.augreg_in1k,224,61.010,38.990,82.740,17.260,88.22,0.900,bicubic,-13.898,-9.030,-40\npit_ti_224.in1k,224,60.940,39.060,83.830,16.170,4.85,0.900,bicubic,-11.986,-7.582,+7\nmobilenetv4_conv_small.e2400_r224_in1k,224,60.900,39.100,82.710,17.290,3.77,0.875,bicubic,-12.856,-8.720,-12\nskresnet18.ra_in1k,224,60.880,39.120,82.870,17.130,11.96,0.875,bicubic,-12.140,-8.302,+2\nresnetv2_18.ra4_e3600_r224_in1k,224,60.830,39.170,82.870,17.130,11.69,0.900,bicubic,-12.742,-8.486,-12\nvgg16_bn.tv_in1k,224,60.820,39.180,83.000,17.000,138.37,0.875,bilinear,-12.534,-8.490,-8\nsemnasnet_075.rmsp_in1k,224,60.630,39.370,82.510,17.490,2.91,0.875,bicubic,-12.370,-8.614,0\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,60.610,39.390,82.740,17.260,2.16,0.875,bicubic,-12.210,-8.366,+3\nmobilenetv4_conv_small.e1200_r224_in1k,224,60.600,39.400,82.490,17.510,3.77,0.875,bicubic,-12.850,-8.850,-14\nxcit_nano_12_p16_224.fb_dist_in1k,224,60.350,39.650,82.500,17.500,3.05,1.000,bicubic,-11.968,-8.326,+9\ntf_mobilenetv3_large_075.in1k,224,60.320,39.680,81.800,18.200,3.99,0.875,bilinear,-13.122,-9.532,-15\nmobilenetv2_100.ra_in1k,224,60.220,39.780,82.120,17.880,3.50,0.875,bicubic,-12.690,-8.880,-2\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,60.180,39.820,82.720,17.280,3.27,0.887,bicubic,-11.998,-8.110,+12\nresnet18.a2_in1k,288,60.120,39.880,81.810,18.190,11.69,1.000,bicubic,-12.244,-8.810,+3\nvit_base_patch32_224.sam_in1k,224,59.980,40.020,81.270,18.730,88.22,0.900,bicubic,-13.724,-9.742,-23\ndeit_tiny_patch16_224.fb_in1k,224,59.920,40.080,82.750,17.250,5.72,0.900,bicubic,-12.270,-8.350,+8\nresnet18d.ra2_in1k,224,59.910,40.090,82.330,17.670,11.71,0.875,bicubic,-12.386,-8.352,+3\nshvit_s1.in1k,224,59.800,40.200,82.140,17.860,6.33,0.875,bicubic,-12.980,-8.884,-6\nlegacy_seresnet18.in1k,224,59.760,40.240,81.620,18.380,11.78,0.875,bicubic,-11.982,-8.720,+11\nvgg19.tv_in1k,224,59.720,40.280,81.440,18.560,143.67,0.875,bilinear,-12.680,-9.438,-4\nrepvgg_a0.rvgg_in1k,224,59.500,40.500,81.280,18.720,9.11,0.875,bilinear,-12.926,-9.226,-6\nedgenext_xx_small.in1k,288,59.430,40.570,81.850,18.150,1.33,1.000,bicubic,-12.456,-8.684,+6\nresnet18.a1_in1k,224,59.320,40.680,81.110,18.890,11.69,0.950,bicubic,-12.184,-8.976,+11\nregnetx_004.pycls_in1k,224,59.280,40.720,81.840,18.160,5.16,0.875,bicubic,-13.202,-9.000,-10\nrepghostnet_080.in1k,224,59.090,40.910,81.250,18.750,3.28,0.875,bicubic,-13.150,-9.242,-2\ntf_mobilenetv3_large_minimal_100.in1k,224,58.990,41.010,80.910,19.090,3.92,0.875,bilinear,-13.280,-9.754,-5\nhrnet_w18_small.ms_in1k,224,58.910,41.090,81.320,18.680,13.19,0.875,bilinear,-13.430,-9.368,-9\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,58.890,41.110,81.520,18.480,6.34,0.900,bicubic,-12.910,-9.290,+1\nvgg13_bn.tv_in1k,224,58.870,41.130,81.030,18.970,133.05,0.875,bilinear,-12.690,-9.342,+4\nlcnet_100.ra2_in1k,224,58.750,41.250,80.920,19.080,2.95,0.875,bicubic,-13.380,-9.456,-4\npvt_v2_b0.in1k,224,58.710,41.290,82.030,17.970,3.67,0.900,bicubic,-11.944,-8.176,+11\nvgg16.tv_in1k,224,58.640,41.360,81.610,18.390,138.36,0.875,bilinear,-12.954,-8.788,0\nmobileone_s0.apple_in1k,224,58.420,41.580,80.070,19.930,5.29,0.875,bilinear,-12.978,-9.794,+3\nxcit_nano_12_p16_224.fb_in1k,224,58.410,41.590,80.750,19.250,3.05,1.000,bicubic,-11.572,-9.030,+14\nefficientvit_m2.r224_in1k,224,58.340,41.660,81.170,18.830,4.19,0.875,bicubic,-12.456,-8.982,+6\nresnet18.gluon_in1k,224,58.330,41.670,80.980,19.020,11.69,0.875,bicubic,-12.514,-8.770,+4\nresnet18.a2_in1k,224,58.190,41.810,80.200,19.800,11.69,0.950,bicubic,-12.444,-9.286,+6\ntinynet_c.in1k,184,58.170,41.830,80.260,19.740,2.46,0.875,bicubic,-13.044,-9.486,0\nresnet14t.c3_in1k,224,58.150,41.850,80.070,19.930,10.08,0.950,bicubic,-14.092,-10.248,-18\nresnet34.a3_in1k,160,58.150,41.850,80.070,19.930,21.80,0.950,bicubic,-12.430,-9.470,+5\nefficientvit_b0.r224_in1k,224,58.150,41.850,79.890,20.110,3.41,0.950,bicubic,-13.258,-9.544,-6\nedgenext_xx_small.in1k,256,58.100,41.900,81.230,18.770,1.33,0.900,bicubic,-13.032,-8.812,-3\nfasternet_t0.in1k,224,58.050,41.950,80.400,19.600,3.91,1.000,bicubic,-13.682,-9.674,-12\nmobilevitv2_050.cvnets_in1k,256,57.690,42.310,80.980,19.020,1.37,0.888,bicubic,-12.466,-8.950,+3\nresnet18.tv_in1k,224,57.350,42.650,80.080,19.920,11.69,0.875,bilinear,-12.402,-9.002,+5\nmobilevit_xxs.cvnets_in1k,256,57.250,42.750,79.730,20.270,1.27,0.900,bicubic,-11.686,-9.212,+7\nvgg13.tv_in1k,224,57.090,42.910,79.620,20.380,133.05,0.875,bilinear,-12.860,-9.642,+2\nvgg11_bn.tv_in1k,224,57.010,42.990,79.770,20.230,132.87,0.875,bilinear,-13.364,-10.032,-4\nresnet14t.c3_in1k,176,57.010,42.990,79.610,20.390,10.08,0.875,bicubic,-14.308,-10.070,-11\nregnety_002.pycls_in1k,224,56.970,43.030,79.780,20.220,3.16,0.875,bicubic,-13.318,-9.764,-4\ndla60x_c.in1k,224,56.230,43.770,78.840,21.160,1.32,0.875,bilinear,-11.698,-9.586,+8\nmixer_l16_224.goog_in21k_ft_in1k,224,56.210,43.790,75.710,24.290,208.20,0.875,bicubic,-15.874,-11.922,-25\nresnet18.a3_in1k,224,55.990,44.010,78.830,21.170,11.69,0.950,bicubic,-12.260,-9.342,+5\nrepghostnet_058.in1k,224,55.970,44.030,78.350,21.650,2.55,0.875,bicubic,-12.968,-10.036,-2\nvgg11.tv_in1k,224,55.850,44.150,78.770,21.230,132.86,0.875,bilinear,-13.200,-9.860,-4\nregnetx_002.pycls_in1k,224,55.830,44.170,79.260,20.740,2.68,0.875,bicubic,-12.928,-9.308,-1\nefficientvit_m1.r224_in1k,224,55.570,44.430,78.950,21.050,2.98,0.875,bicubic,-12.770,-9.742,0\nresnet10t.c3_in1k,224,55.440,44.560,78.300,21.700,5.44,0.950,bicubic,-12.914,-9.734,-2\nlcnet_075.ra2_in1k,224,55.290,44.710,78.230,21.770,2.36,0.875,bicubic,-13.496,-10.154,-5\nmobilenetv3_small_100.lamb_in1k,224,54.630,45.370,77.670,22.330,2.54,0.875,bicubic,-13.006,-9.968,+1\ntf_mobilenetv3_small_100.in1k,224,54.580,45.420,76.970,23.030,2.54,0.875,bilinear,-13.342,-10.716,-1\nresnet10t.c3_in1k,176,54.030,45.970,76.500,23.500,5.44,0.875,bicubic,-12.694,-10.476,+2\nrepghostnet_050.in1k,224,53.560,46.440,76.630,23.370,2.31,0.875,bicubic,-13.418,-10.306,-1\ntinynet_d.in1k,152,53.270,46.730,76.350,23.650,2.34,0.875,bicubic,-13.666,-10.728,-1\ndla46x_c.in1k,224,53.120,46.880,76.780,23.220,1.07,0.875,bilinear,-12.888,-10.166,+1\nmnasnet_small.lamb_in1k,224,53.120,46.880,75.850,24.150,2.03,0.875,bicubic,-13.084,-10.620,-1\nmobilenetv2_050.lamb_in1k,224,53.000,47.000,75.510,24.490,1.97,0.875,bicubic,-12.928,-10.604,0\nresnet18.a3_in1k,160,52.870,47.130,75.840,24.160,11.69,0.950,bicubic,-12.800,-10.428,+2\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,52.460,47.540,75.930,24.070,2.24,0.950,bicubic,-13.366,-10.486,-1\ntf_mobilenetv3_small_075.in1k,224,52.320,47.680,75.520,24.480,2.04,0.875,bilinear,-13.386,-10.610,-1\ndla46_c.in1k,224,52.030,47.970,75.670,24.330,1.30,0.875,bilinear,-12.850,-10.660,+1\nmobilenetv3_small_075.lamb_in1k,224,51.820,48.180,74.710,25.290,2.04,0.875,bicubic,-13.464,-10.752,-1\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,51.430,48.570,75.160,24.840,2.24,0.875,bicubic,-13.348,-10.340,0\nefficientvit_m0.r224_in1k,224,50.610,49.390,74.450,25.550,2.35,0.875,bicubic,-12.694,-10.690,0\nlcnet_050.ra2_in1k,224,50.050,49.950,73.430,26.570,1.88,0.875,bicubic,-13.088,-10.972,0\ntf_mobilenetv3_small_minimal_100.in1k,224,49.570,50.430,72.810,27.190,2.04,0.875,bilinear,-13.322,-11.460,0\ntinynet_e.in1k,106,46.760,53.240,70.230,29.770,2.04,0.875,bicubic,-13.114,-11.544,0\ntest_vit3.r160_in1k,160,45.390,54.610,70.390,29.610,0.93,0.950,bicubic,-11.518,-10.358,+1\nmobilenetv3_small_050.lamb_in1k,224,44.840,55.160,67.760,32.240,1.59,0.875,bicubic,-13.078,-12.384,-1\ntest_convnext2.r160_in1k,160,43.080,56.920,67.910,32.090,0.48,0.950,bicubic,-10.440,-10.648,0\ntest_convnext3.r160_in1k,160,42.430,57.570,67.710,32.290,0.47,0.950,bicubic,-10.872,-10.616,0\ntest_convnext.r160_in1k,160,37.780,62.220,63.470,36.530,0.27,0.950,bicubic,-9.994,-10.696,0\ntest_nfnet.r160_in1k,160,36.860,63.140,61.770,38.230,0.38,0.950,bicubic,-10.774,-11.110,0\ntest_efficientnet_evos.r160_in1k,160,35.770,64.230,59.730,40.270,0.36,0.950,bicubic,-10.734,-11.292,+1\ntest_efficientnet.r160_in1k,160,35.180,64.820,59.400,40.600,0.36,0.950,bicubic,-11.326,-11.636,-1\ntest_byobnet.r160_in1k,160,35.090,64.910,59.560,40.440,0.46,0.950,bicubic,-10.768,-11.444,0\ntest_efficientnet_ln.r160_in1k,160,33.760,66.240,58.380,41.620,0.36,0.950,bicubic,-10.198,-10.946,0\ntest_efficientnet_gn.r160_in1k,160,33.490,66.510,57.850,42.150,0.36,0.950,bicubic,-10.398,-11.306,0\ntest_vit2.r160_in1k,160,32.830,67.170,58.110,41.890,0.46,0.950,bicubic,-9.420,-10.872,0\ntest_resnet.r160_in1k,160,31.680,68.320,56.830,43.170,0.47,0.950,bilinear,-9.922,-11.154,0\ntest_vit.r160_in1k,160,31.620,68.380,56.600,43.400,0.37,0.950,bicubic,-9.362,-10.782,0\n"
  },
  {
    "path": "results/results-sketch.csv",
    "content": "model,img_size,top1,top1_err,top5,top5_err,param_count,crop_pct,interpolation,top1_diff,top5_diff,rank_diff\nvit_so400m_patch14_siglip_gap_378.webli_ft_in1k,378,71.717,28.283,90.666,9.334,414.14,1.000,bicubic,-17.339,-8.086,+9\nvit_so400m_patch14_siglip_378.webli_ft_in1k,378,71.489,28.511,90.619,9.381,429.38,1.000,bicubic,-17.921,-8.235,+6\neva_giant_patch14_336.clip_ft_in1k,336,71.200,28.800,90.336,9.664,\"1,013.01\",1.000,bicubic,-18.262,-8.492,+4\neva02_large_patch14_448.mim_m38m_ft_in22k_in1k,448,70.668,29.332,89.835,10.165,305.08,1.000,bicubic,-19.388,-9.219,-3\neva02_large_patch14_448.mim_m38m_ft_in1k,448,70.532,29.468,89.847,10.153,305.08,1.000,bicubic,-19.018,-9.078,+1\neva_giant_patch14_224.clip_ft_in1k,224,70.076,29.924,89.766,10.234,\"1,012.56\",0.900,bicubic,-18.820,-8.906,+5\nconvnext_xxlarge.clip_laion2b_soup_ft_in1k,256,70.062,29.938,90.344,9.656,846.47,1.000,bicubic,-18.560,-8.374,+10\neva_giant_patch14_336.m30m_ft_in22k_in1k,336,68.048,31.952,87.826,12.174,\"1,013.01\",1.000,bicubic,-21.518,-11.126,-3\neva02_large_patch14_448.mim_in22k_ft_in22k_in1k,448,67.561,32.439,87.496,12.504,305.08,1.000,bicubic,-22.395,-11.518,-7\neva_giant_patch14_560.m30m_ft_in22k_in1k,560,67.480,32.520,87.471,12.529,\"1,014.45\",1.000,bicubic,-22.310,-11.519,-7\nvit_huge_patch14_clip_224.laion2b_ft_in1k,224,67.425,32.575,87.870,12.130,632.05,1.000,bicubic,-20.187,-10.356,+41\neva02_large_patch14_448.mim_in22k_ft_in1k,448,66.987,33.013,87.455,12.545,305.08,1.000,bicubic,-22.647,-11.499,-8\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k_384,384,66.466,33.534,87.406,12.594,200.13,1.000,bicubic,-21.424,-11.042,+31\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384,384,65.747,34.253,87.023,12.977,200.13,1.000,bicubic,-22.587,-11.551,+10\nvit_large_patch14_clip_336.laion2b_ft_in1k,336,65.747,34.253,86.948,13.052,304.53,1.000,bicubic,-22.123,-11.412,+30\nvit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,224,65.323,34.677,86.816,13.184,632.05,1.000,bicubic,-22.965,-11.734,+10\nvit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,336,65.289,34.711,86.765,13.235,632.46,1.000,bicubic,-23.345,-11.903,-1\nconvnext_large_mlp.clip_laion2b_augreg_ft_in1k,256,65.071,34.929,86.288,13.712,200.13,1.000,bicubic,-22.273,-11.934,+50\nconvnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320,320,64.918,35.082,86.628,13.372,200.13,1.000,bicubic,-23.052,-11.846,+20\nvit_large_patch14_clip_224.laion2b_ft_in1k,224,64.827,35.173,86.581,13.419,304.20,1.000,bicubic,-22.461,-11.655,+51\nregnety_1280.swag_ft_in1k,384,64.106,35.894,86.050,13.950,644.81,1.000,bicubic,-24.122,-12.634,+10\nvit_large_patch14_clip_336.openai_ft_in12k_in1k,336,64.047,35.953,85.912,14.088,304.53,1.000,bicubic,-24.213,-12.612,+6\neva_large_patch14_336.in22k_ft_in1k,336,63.088,36.912,84.399,15.601,304.53,1.000,bicubic,-25.592,-14.315,-10\nvit_large_patch14_clip_224.openai_ft_in1k,224,62.644,37.356,85.126,14.873,304.20,1.000,bicubic,-25.214,-13.304,+22\nbeit3_large_patch16_224.in22k_ft_in1k,224,62.082,37.918,83.236,16.764,304.57,1.000,bicubic,-25.546,-15.098,+26\nvit_large_patch14_clip_224.laion2b_ft_in12k_in1k,224,62.062,37.938,84.348,15.652,304.20,1.000,bicubic,-25.838,-14.060,+17\nvit_large_patch14_clip_336.laion2b_ft_in12k_in1k,336,61.660,38.340,83.674,16.326,304.53,1.000,bicubic,-26.530,-14.892,+6\nvit_large_patch14_clip_224.openai_ft_in12k_in1k,224,61.394,38.606,83.389,16.611,304.20,1.000,bicubic,-26.768,-15.149,+7\neva_large_patch14_196.in22k_ft_in1k,196,61.149,38.851,82.759,17.241,304.14,1.000,bicubic,-26.803,-15.731,+11\neva_large_patch14_336.in22k_ft_in22k_in1k,336,60.935,39.065,82.153,17.847,304.53,1.000,bicubic,-28.303,-16.695,-21\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k_384,384,60.815,39.185,83.213,16.788,88.59,1.000,bicubic,-26.329,-15.015,+44\nconvnext_base.clip_laion2b_augreg_ft_in1k,256,60.318,39.682,82.719,17.281,88.59,1.000,bicubic,-25.866,-14.955,+116\nregnety_1280.swag_lc_in1k,224,59.938,40.062,83.153,16.846,644.81,0.965,bicubic,-26.026,-14.699,+137\neva_large_patch14_196.in22k_ft_in22k_in1k,196,59.864,40.136,81.122,18.878,304.14,1.000,bicubic,-28.726,-17.540,-16\nconvnext_base.clip_laion2b_augreg_ft_in12k_in1k,256,59.848,40.152,82.810,17.190,88.59,1.000,bicubic,-26.530,-15.166,+94\nbeit3_large_patch16_224.indomain_in22k_ft_in1k,224,59.636,40.364,81.242,18.759,304.57,1.000,bicubic,-27.898,-17.123,+18\nconvnext_base.clip_laiona_augreg_ft_in1k_384,384,59.410,40.590,82.252,17.748,88.59,1.000,bicubic,-27.080,-15.708,+84\neva02_base_patch14_448.mim_in22k_ft_in22k_in1k,448,58.518,41.482,80.791,19.209,87.12,1.000,bicubic,-30.160,-17.934,-24\nresnext101_32x32d.fb_wsl_ig1b_ft_in1k,224,58.417,41.583,80.402,19.598,468.53,0.875,bilinear,-26.673,-17.040,+222\nbeitv2_large_patch16_224.in1k_ft_in22k_in1k,224,58.378,41.622,80.224,19.776,304.43,0.950,bicubic,-30.028,-18.378,-19\neva02_base_patch14_448.mim_in22k_ft_in1k,448,58.032,41.968,80.733,19.267,87.12,1.000,bicubic,-30.230,-17.837,-14\nregnety_320.swag_ft_in1k,384,57.920,42.080,81.497,18.503,145.05,1.000,bicubic,-28.910,-16.865,+52\nconvnextv2_huge.fcmae_ft_in22k_in1k_384,384,57.887,42.113,79.656,20.344,660.29,1.000,bicubic,-30.779,-19.078,-28\nconvnextv2_huge.fcmae_ft_in22k_in1k_512,512,57.859,42.141,79.428,20.572,660.29,1.000,bicubic,-31.001,-19.312,-32\nresnext101_32x16d.fb_wsl_ig1b_ft_in1k,224,57.698,42.302,79.913,20.087,194.03,0.875,bilinear,-26.468,-17.279,+353\nresnext101_32x16d.fb_swsl_ig1b_ft_in1k,224,57.460,42.540,80.402,19.598,194.03,0.875,bilinear,-25.880,-16.442,+500\nbeit_large_patch16_384.in22k_ft_in22k_in1k,384,56.979,43.021,79.166,20.834,305.00,1.000,bicubic,-31.401,-19.438,-25\nvit_base_patch16_clip_384.laion2b_ft_in1k,384,56.869,43.131,79.990,20.010,86.86,1.000,bicubic,-29.769,-18.018,+58\nbeit_large_patch16_512.in22k_ft_in22k_in1k,512,56.796,43.204,78.897,21.103,305.67,1.000,bicubic,-31.780,-19.759,-30\nresnext101_32x8d.fb_swsl_ig1b_ft_in1k,224,56.448,43.552,78.974,21.026,88.79,0.875,bilinear,-27.828,-18.214,+331\nmaxvit_rmlp_base_rw_384.sw_in12k_ft_in1k,384,56.279,43.721,77.303,22.697,116.14,1.000,bicubic,-31.531,-21.062,-4\nmaxvit_xlarge_tf_384.in21k_ft_in1k,384,56.230,43.770,78.726,21.274,475.32,1.000,bicubic,-32.072,-19.812,-27\nmaxvit_xlarge_tf_512.in21k_ft_in1k,512,56.161,43.839,78.681,21.319,475.77,1.000,bicubic,-32.379,-19.969,-33\nmaxvit_base_tf_512.in21k_ft_in1k,512,56.108,43.892,78.661,21.339,119.88,1.000,bicubic,-32.106,-19.877,-22\ndeit3_huge_patch14_224.fb_in22k_ft_in1k,224,55.784,44.216,77.636,22.364,632.13,1.000,bicubic,-31.396,-20.628,+19\nmaxvit_base_tf_384.in21k_ft_in1k,384,55.702,44.298,78.027,21.973,119.65,1.000,bicubic,-32.228,-20.519,-14\nregnety_320.swag_lc_in1k,224,55.454,44.546,79.762,20.238,145.05,0.965,bicubic,-29.200,-17.730,+261\nvit_base_patch16_clip_224.laion2b_ft_in1k,224,55.426,44.574,79.068,20.932,86.57,1.000,bicubic,-30.062,-18.518,+157\nregnety_160.swag_ft_in1k,384,55.179,44.821,79.337,20.663,83.59,1.000,bicubic,-30.841,-18.709,+105\nmaxvit_large_tf_512.in21k_ft_in1k,512,55.151,44.849,77.258,22.742,212.33,1.000,bicubic,-33.085,-21.350,-30\nmaxvit_large_tf_384.in21k_ft_in1k,384,55.071,44.929,77.146,22.854,212.03,1.000,bicubic,-32.923,-21.430,-23\nconvnext_xlarge.fb_in22k_ft_in1k_384,384,55.006,44.994,76.773,23.227,350.20,1.000,bicubic,-32.758,-21.779,-14\nbeit_large_patch16_224.in22k_ft_in22k_in1k,224,54.973,45.027,77.616,22.384,304.43,0.900,bicubic,-32.513,-20.702,-6\nresnext101_32x8d.fb_wsl_ig1b_ft_in1k,224,54.961,45.039,77.526,22.474,88.79,0.875,bilinear,-27.759,-19.098,+571\ndeit3_large_patch16_384.fb_in22k_ft_in1k,384,54.912,45.088,77.376,22.624,304.76,1.000,bicubic,-32.824,-21.132,-16\nvit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k,448,54.864,45.136,76.726,23.274,136.50,1.000,bicubic,-33.209,-21.832,-30\nmaxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k,384,54.790,45.210,76.861,23.139,116.09,1.000,bicubic,-32.690,-21.515,-9\ncaformer_b36.sail_in22k_ft_in1k_384,384,54.442,45.558,76.828,23.172,98.75,1.000,bicubic,-33.616,-21.762,-31\ndeit3_large_patch16_224.fb_in22k_ft_in1k,224,54.385,45.615,76.555,23.445,304.37,1.000,bicubic,-32.605,-21.691,+15\nvit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k,384,54.295,45.705,76.506,23.494,136.33,1.000,bicubic,-33.637,-21.994,-29\nhgnetv2_b6.ssld_stage1_in22k_in1k,224,54.287,45.713,77.292,22.708,75.26,0.965,bicubic,-31.765,-20.514,+89\nbeitv2_large_patch16_224.in1k_ft_in1k,224,54.165,45.835,75.564,24.436,304.43,0.950,bicubic,-33.249,-22.651,-8\nconvnextv2_large.fcmae_ft_in22k_in1k_384,384,53.917,46.083,76.034,23.966,197.96,1.000,bicubic,-34.263,-22.486,-39\nhgnetv2_b6.ssld_stage2_ft_in1k,224,53.855,46.145,76.995,23.005,75.26,0.965,bicubic,-32.352,-20.815,+69\nmaxvit_rmlp_base_rw_224.sw_in12k_ft_in1k,224,53.784,46.216,75.160,24.840,116.14,0.950,bicubic,-33.124,-22.844,+14\nresnext101_32x4d.fb_swsl_ig1b_ft_in1k,224,53.646,46.354,76.407,23.593,44.18,0.875,bilinear,-29.622,-20.353,+477\nvit_base_patch16_clip_384.laion2b_ft_in12k_in1k,384,53.491,46.509,75.700,24.300,86.86,1.000,bicubic,-33.719,-22.332,-4\nhgnetv2_b6.ssld_stage1_in22k_in1k,288,53.169,46.831,76.178,23.822,75.26,1.000,bicubic,-33.131,-21.764,+57\nvit_base_patch16_clip_384.openai_ft_in1k,384,53.096,46.904,76.683,23.317,86.86,1.000,bicubic,-33.114,-21.211,+63\nregnety_160.swag_lc_in1k,224,53.090,46.910,78.162,21.838,83.59,0.965,bicubic,-30.716,-19.122,+376\nhgnetv2_b6.ssld_stage2_ft_in1k,288,53.011,46.989,76.050,23.950,75.26,1.000,bicubic,-33.368,-21.892,+47\nvit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k,256,52.990,47.010,74.997,25.003,136.06,1.000,bicubic,-34.328,-23.331,-13\nconvnextv2_base.fcmae_ft_in22k_in1k_384,384,52.919,47.081,75.083,24.917,88.72,1.000,bicubic,-34.719,-23.333,-33\nconvformer_b36.sail_in22k_ft_in1k_384,384,52.895,47.105,74.961,25.039,99.88,1.000,bicubic,-34.710,-23.463,-31\nvit_large_patch16_384.augreg_in21k_ft_in1k,384,52.799,47.201,74.735,25.265,304.72,1.000,bicubic,-34.297,-23.567,-7\ncaformer_b36.sail_in22k_ft_in1k,224,52.787,47.213,75.281,24.718,98.75,1.000,bicubic,-34.663,-23.055,-24\nvit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k,384,52.760,47.240,74.834,25.166,134.42,1.000,bicubic,-34.610,-23.480,-20\nconvnext_large.fb_in22k_ft_in1k_384,384,52.732,47.268,74.716,25.284,197.77,1.000,bicubic,-34.724,-23.664,-27\nconvformer_b36.sail_in22k_ft_in1k,224,52.727,47.273,74.908,25.092,99.88,1.000,bicubic,-34.266,-23.256,-6\nconvnext_xlarge.fb_in22k_ft_in1k,224,52.567,47.433,74.389,25.611,350.20,0.875,bicubic,-34.403,-23.815,-4\ncoatnet_rmlp_2_rw_384.sw_in12k_ft_in1k,384,52.406,47.594,73.806,26.194,73.88,1.000,bicubic,-34.982,-24.506,-26\nswinv2_large_window12to24_192to384.ms_in22k_ft_in1k,384,52.316,47.684,74.413,25.587,196.74,1.000,bicubic,-35.158,-23.851,-33\nconvnext_xlarge.fb_in22k_ft_in1k,288,52.208,47.792,73.914,26.086,350.20,1.000,bicubic,-35.170,-24.416,-27\nmambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k,384,52.111,47.889,74.269,25.730,101.66,1.000,bicubic,-35.396,-24.159,-39\nvit_large_r50_s32_384.augreg_in21k_ft_in1k,384,52.035,47.965,73.548,26.452,329.09,1.000,bicubic,-34.153,-24.380,+51\nvit_large_patch16_224.augreg_in21k_ft_in1k,224,51.832,48.168,73.704,26.296,304.33,0.900,bicubic,-34.024,-24.118,+80\nvit_base_patch16_clip_224.laion2b_ft_in12k_in1k,224,51.815,48.185,74.668,25.332,86.57,0.950,bicubic,-34.389,-23.094,+47\nvit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k,256,51.652,48.348,73.635,26.365,134.13,0.950,bicubic,-35.068,-24.389,+5\nconvnextv2_large.fcmae_ft_in22k_in1k,224,51.650,48.350,73.680,26.320,197.96,0.875,bicubic,-35.616,-24.560,-27\nconvnext_base.fb_in22k_ft_in1k_384,384,51.551,48.449,74.539,25.461,88.59,1.000,bicubic,-35.275,-23.705,-5\ntf_efficientnet_l2.ns_jft_in1k_475,475,51.412,48.588,73.814,26.186,480.31,0.936,bicubic,-36.834,-24.740,-72\nmaxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k,224,51.188,48.812,73.136,26.864,116.09,0.950,bicubic,-35.452,-24.878,+3\nvit_base_patch16_clip_384.openai_ft_in12k_in1k,384,51.160,48.840,74.346,25.654,86.86,0.950,bicubic,-35.870,-23.840,-23\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,224,51.056,48.944,73.322,26.678,101.66,1.000,bicubic,-35.586,-24.828,0\ncaformer_m36.sail_in22k_ft_in1k_384,384,51.041,48.959,73.413,26.587,56.20,1.000,bicubic,-36.429,-24.895,-45\nswinv2_base_window12to24_192to384.ms_in22k_ft_in1k,384,51.007,48.993,73.283,26.717,87.92,1.000,bicubic,-36.135,-24.945,-29\nvit_base_patch16_clip_224.openai_ft_in1k,224,50.993,49.007,74.824,25.176,86.57,0.900,bicubic,-34.289,-22.624,+127\ntf_efficientnetv2_xl.in21k_ft_in1k,384,50.801,49.199,72.788,27.212,208.12,1.000,bicubic,-34.761,-24.668,+98\nhgnetv2_b5.ssld_stage1_in22k_in1k,224,50.779,49.221,73.648,26.352,39.57,0.965,bicubic,-33.677,-23.200,+237\nseresnextaa201d_32x8d.sw_in12k_ft_in1k_384,384,50.771,49.229,73.645,26.355,149.39,1.000,bicubic,-36.533,-24.689,-40\nhgnetv2_b5.ssld_stage2_ft_in1k,224,50.665,49.335,73.637,26.363,39.57,0.965,bicubic,-34.159,-23.657,+191\nconvnextv2_base.fcmae_ft_in22k_in1k,224,50.555,49.445,72.747,27.253,88.72,0.875,bicubic,-36.197,-25.273,-12\nresnext50_32x4d.fb_swsl_ig1b_ft_in1k,224,50.532,49.468,73.326,26.674,25.03,0.875,bilinear,-31.631,-22.730,+621\nswinv2_large_window12to16_192to256.ms_in22k_ft_in1k,256,50.473,49.527,72.780,27.220,196.74,0.900,bicubic,-36.461,-25.326,-27\nswin_large_patch4_window12_384.ms_in22k_ft_in1k,384,50.343,49.657,72.534,27.466,196.74,1.000,bicubic,-36.799,-25.694,-39\nmambaout_base_plus_rw.sw_e150_in12k_ft_in1k,288,50.294,49.706,72.910,27.090,101.66,1.000,bicubic,-36.622,-25.328,-28\nconvnextv2_large.fcmae_ft_in22k_in1k,288,50.158,49.842,72.389,27.611,197.96,1.000,bicubic,-37.336,-25.971,-61\nvit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,50.082,49.918,72.552,27.448,64.27,1.000,bicubic,-37.356,-25.704,-55\nhiera_huge_224.mae_in1k_ft_in1k,224,50.078,49.922,71.237,28.763,672.78,0.900,bicubic,-36.762,-26.771,-26\nconvnext_large.fb_in22k_ft_in1k,288,49.940,50.060,72.208,27.792,197.77,1.000,bicubic,-37.072,-26.004,-39\nconvnext_large.fb_in22k_ft_in1k,224,49.924,50.076,72.202,27.798,197.77,0.875,bicubic,-36.688,-25.834,-12\nnextvit_large.bd_ssld_6m_in1k_384,384,49.795,50.205,72.835,27.165,57.87,1.000,bicubic,-36.749,-25.291,-8\ntf_efficientnetv2_xl.in21k_ft_in1k,512,49.757,50.243,72.116,27.884,208.12,1.000,bicubic,-36.997,-25.898,-24\ncaformer_m36.sail_in22k_ft_in1k,224,49.708,50.292,72.137,27.863,56.20,1.000,bicubic,-36.898,-25.901,-13\ntf_efficientnetv2_l.in21k_ft_in1k,384,49.706,50.294,72.739,27.261,118.52,1.000,bicubic,-36.140,-25.027,+52\nvit_base_patch16_clip_224.openai_ft_in12k_in1k,224,49.698,50.302,72.874,27.126,86.57,0.950,bicubic,-36.256,-24.850,+46\nbeit3_base_patch16_224.in22k_ft_in1k,224,49.659,50.341,72.719,27.281,86.66,1.000,bicubic,-35.731,-24.921,+100\nnextvit_base.bd_ssld_6m_in1k_384,384,49.645,50.355,72.621,27.379,44.82,1.000,bicubic,-36.727,-25.419,+3\nvit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,49.567,50.433,71.542,28.458,64.11,0.950,bicubic,-37.041,-26.392,-19\nbeitv2_base_patch16_224.in1k_ft_in22k_in1k,224,49.523,50.477,72.397,27.603,86.53,0.900,bicubic,-36.971,-25.659,-10\nresnet50.fb_swsl_ig1b_ft_in1k,224,49.516,50.484,72.340,27.660,25.56,0.875,bilinear,-31.630,-23.640,+747\nhgnet_base.ssld_in1k,224,49.378,50.622,71.528,28.472,71.58,0.965,bicubic,-35.530,-25.814,+157\ncoatnet_rmlp_2_rw_224.sw_in12k_ft_in1k,224,49.349,50.651,70.837,29.163,73.88,0.950,bicubic,-37.189,-27.058,-18\nconvnextv2_base.fcmae_ft_in22k_in1k,288,49.162,50.838,71.234,28.767,88.72,1.000,bicubic,-37.832,-26.934,-52\nconvformer_m36.sail_in22k_ft_in1k_384,384,49.129,50.871,71.385,28.615,57.05,1.000,bicubic,-37.739,-26.737,-45\nvit_base_patch32_clip_224.laion2b_ft_in1k,224,49.074,50.926,72.574,27.426,88.22,0.900,bicubic,-33.541,-23.632,+516\nconvformer_m36.sail_in22k_ft_in1k,224,49.062,50.938,71.475,28.525,57.05,1.000,bicubic,-37.090,-26.377,+14\nswin_large_patch4_window7_224.ms_in22k_ft_in1k,224,48.985,51.015,71.379,28.621,196.53,0.900,bicubic,-37.335,-26.511,-5\nconvnext_base.fb_in22k_ft_in1k,288,48.950,51.050,71.705,28.295,88.59,1.000,bicubic,-37.352,-26.383,-5\nbeit3_base_patch16_224.indomain_in22k_ft_in1k,224,48.846,51.154,71.522,28.478,86.66,1.000,bicubic,-36.608,-26.098,+77\nconvnext_base.fb_in22k_ft_in1k,224,48.804,51.196,71.913,28.087,88.59,0.875,bicubic,-37.010,-25.943,+42\nhgnetv2_b5.ssld_stage1_in22k_in1k,288,48.800,51.200,71.536,28.464,39.57,1.000,bicubic,-36.130,-25.754,+142\ntf_efficientnetv2_l.in21k_ft_in1k,480,48.788,51.212,72.053,27.947,118.52,1.000,bicubic,-38.014,-26.071,-46\nswinv2_base_window12to16_192to256.ms_in22k_ft_in1k,256,48.783,51.217,71.377,28.623,87.92,0.900,bicubic,-37.487,-26.529,-7\ncoatnet_2_rw_224.sw_in12k_ft_in1k,224,48.761,51.239,70.119,29.881,73.87,0.950,bicubic,-37.821,-27.777,-32\nbeit_base_patch16_384.in22k_ft_in22k_in1k,384,48.696,51.304,72.094,27.906,86.74,1.000,bicubic,-38.118,-26.042,-50\nhgnetv2_b5.ssld_stage2_ft_in1k,288,48.657,51.343,71.379,28.621,39.57,1.000,bicubic,-36.501,-26.209,+103\nconvnextv2_huge.fcmae_ft_in1k,224,48.582,51.418,69.542,30.458,660.29,0.875,bicubic,-37.678,-28.210,-10\nswin_base_patch4_window12_384.ms_in22k_ft_in1k,384,48.523,51.477,71.829,28.171,87.90,1.000,bicubic,-37.919,-26.239,-24\ncaformer_s36.sail_in22k_ft_in1k_384,384,48.478,51.522,71.509,28.491,39.30,1.000,bicubic,-38.382,-26.711,-59\ntf_efficientnetv2_m.in21k_ft_in1k,384,48.445,51.555,71.898,28.102,54.14,1.000,bicubic,-36.323,-25.536,+156\nmaxvit_base_tf_512.in1k,512,48.246,51.754,70.844,29.156,119.88,1.000,bicubic,-38.352,-27.080,-40\nvit_large_r50_s32_224.augreg_in21k_ft_in1k,224,48.213,51.787,70.878,29.122,328.99,0.900,bicubic,-36.221,-26.274,+202\nnextvit_large.bd_ssld_6m_in1k,224,48.073,51.927,71.145,28.855,57.87,0.950,bicubic,-37.441,-26.343,+56\nnextvit_base.bd_ssld_6m_in1k,224,48.046,51.954,70.715,29.285,44.82,0.950,bicubic,-37.140,-26.885,+92\nvit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,48.014,51.986,69.671,30.329,64.11,0.950,bicubic,-38.184,-28.217,-11\nvit_base_patch32_clip_384.laion2b_ft_in12k_in1k,384,47.940,52.060,70.907,29.093,88.30,1.000,bicubic,-37.458,-26.757,+68\nvit_base_patch8_224.augreg_in21k_ft_in1k,224,47.773,52.227,70.984,29.016,86.58,0.900,bicubic,-38.065,-26.668,+22\ntf_efficientnet_b7.ns_jft_in1k,600,47.724,52.276,69.603,30.398,66.35,0.949,bicubic,-39.126,-28.477,-67\ntf_efficientnet_b6.ns_jft_in1k,528,47.708,52.292,69.897,30.103,43.04,0.942,bicubic,-38.768,-27.967,-37\ndeit3_base_patch16_384.fb_in22k_ft_in1k,384,47.684,52.316,69.738,30.262,86.88,1.000,bicubic,-39.058,-28.372,-60\nvit_base_patch32_clip_448.laion2b_ft_in12k_in1k,448,47.584,52.416,70.058,29.942,88.34,1.000,bicubic,-38.208,-27.576,+23\nhrnet_w48_ssld.paddle_in1k,224,47.537,52.463,69.398,30.602,77.47,0.950,bilinear,-36.119,-27.450,+325\nvit_base_patch8_224.augreg2_in21k_ft_in1k,224,47.499,52.501,70.334,29.666,86.58,0.900,bicubic,-38.752,-27.507,-25\ntf_efficientnetv2_m.in21k_ft_in1k,480,47.458,52.542,70.923,29.077,54.14,1.000,bicubic,-38.556,-27.033,0\nhgnet_base.ssld_in1k,288,47.421,52.579,69.322,30.678,71.58,1.000,bicubic,-38.067,-28.308,+48\ntf_efficientnet_l2.ns_jft_in1k,800,47.401,52.599,69.844,30.156,480.31,0.960,bicubic,-40.959,-28.812,-144\ndeit3_base_patch16_224.fb_in22k_ft_in1k,224,47.370,52.630,69.817,30.183,86.59,1.000,bicubic,-38.358,-27.929,+26\ntiny_vit_21m_512.dist_in22k_ft_in1k,512,47.238,52.762,70.041,29.959,21.27,1.000,bicubic,-39.206,-27.843,-45\nconvformer_s36.sail_in22k_ft_in1k_384,384,47.146,52.854,69.496,30.504,40.01,1.000,bicubic,-39.228,-28.484,-40\nmaxvit_large_tf_512.in1k,512,47.057,52.943,69.575,30.425,212.33,1.000,bicubic,-39.477,-28.309,-55\nconvnextv2_huge.fcmae_ft_in1k,288,46.884,53.116,67.779,32.221,660.29,1.000,bicubic,-39.734,-30.231,-64\nconvformer_s36.sail_in22k_ft_in1k,224,46.877,53.123,69.551,30.449,40.01,1.000,bicubic,-38.544,-28.023,+51\nconvnext_small.fb_in22k_ft_in1k_384,384,46.873,53.127,69.530,30.470,50.22,1.000,bicubic,-38.879,-28.348,+18\ncaformer_s36.sail_in22k_ft_in1k,224,46.708,53.292,69.773,30.227,39.30,1.000,bicubic,-39.072,-28.059,+11\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,288,46.517,53.483,69.587,30.413,93.59,0.950,bicubic,-40.013,-28.507,-59\nnextvit_small.bd_ssld_6m_in1k_384,384,46.434,53.566,69.153,30.848,31.76,1.000,bicubic,-39.532,-28.751,-8\ntiny_vit_21m_384.dist_in22k_ft_in1k,384,46.287,53.713,69.204,30.796,21.23,1.000,bicubic,-39.801,-28.512,-22\nmaxvit_base_tf_384.in1k,384,46.275,53.725,68.529,31.471,119.65,1.000,bicubic,-40.015,-29.273,-43\nbeit_base_patch16_224.in22k_ft_in22k_in1k,224,46.260,53.740,69.909,30.091,86.53,0.900,bicubic,-38.978,-27.745,+60\nhrnet_w48_ssld.paddle_in1k,288,46.226,53.774,68.136,31.863,77.47,1.000,bilinear,-38.258,-29.088,+160\nvit_base_patch32_clip_384.openai_ft_in12k_in1k,384,46.224,53.776,69.302,30.698,88.30,0.950,bicubic,-38.994,-28.096,+59\nbeitv2_base_patch16_224.in1k_ft_in1k,224,45.998,54.002,67.871,32.129,86.53,0.900,bicubic,-39.598,-29.643,+20\nconvnextv2_large.fcmae_ft_in1k,224,45.933,54.067,67.014,32.986,197.96,0.875,bicubic,-39.829,-30.559,+7\nvit_base_patch16_384.augreg_in21k_ft_in1k,384,45.896,54.104,68.531,31.468,86.86,1.000,bicubic,-40.114,-29.467,-19\nseresnextaa101d_32x8d.sw_in12k_ft_in1k_288,320,45.892,54.108,68.571,31.429,93.59,1.000,bicubic,-40.830,-29.613,-84\nhiera_large_224.mae_in1k_ft_in1k,224,45.837,54.163,66.260,33.740,213.74,0.900,bicubic,-40.209,-31.386,-26\nmaxvit_large_tf_384.in1k,384,45.798,54.202,68.123,31.877,212.03,1.000,bicubic,-40.444,-29.561,-48\ntf_efficientnet_b8.ap_in1k,672,45.762,54.238,67.905,32.095,87.41,0.954,bicubic,-39.596,-29.393,+41\nvit_base_patch32_clip_224.laion2b_ft_in12k_in1k,224,45.755,54.245,68.883,31.117,88.22,0.900,bicubic,-37.550,-27.645,+361\nvit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k,384,45.727,54.273,67.679,32.321,60.60,1.000,bicubic,-40.891,-30.289,-84\nconvnext_small.in12k_ft_in1k_384,384,45.717,54.283,67.830,32.170,50.22,1.000,bicubic,-40.469,-30.090,-45\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,224,45.646,54.354,68.856,31.144,93.59,0.875,bicubic,-40.310,-28.968,-22\ntf_efficientnet_b5.ns_jft_in1k,456,45.615,54.385,67.854,32.146,30.39,0.934,bicubic,-40.481,-29.890,-39\nswin_base_patch4_window7_224.ms_in22k_ft_in1k,224,45.566,54.434,68.506,31.494,87.77,0.900,bicubic,-39.710,-29.172,+41\nhgnet_small.ssld_in1k,224,45.354,54.646,67.802,32.197,24.36,0.965,bicubic,-38.464,-29.044,+256\nrdnet_large.nv_in1k_ft_in1k_384,384,45.342,54.658,66.655,33.345,186.27,1.000,bicubic,-40.502,-31.025,-19\nmvitv2_large.fb_in1k,224,45.310,54.690,65.216,34.783,217.99,0.900,bicubic,-39.954,-31.972,+39\nhgnetv2_b4.ssld_stage2_ft_in1k,224,45.267,54.733,67.600,32.400,19.80,0.965,bicubic,-38.433,-29.188,+283\nvit_base_patch16_224.augreg2_in21k_ft_in1k,224,45.106,54.894,67.455,32.545,86.57,0.900,bicubic,-40.002,-30.075,+56\nvit_base_patch32_clip_224.openai_ft_in1k,224,45.033,54.967,68.469,31.531,88.22,0.900,bicubic,-36.911,-27.503,+567\nnextvit_small.bd_ssld_6m_in1k,224,45.020,54.980,67.712,32.288,31.76,0.950,bicubic,-39.852,-29.660,+92\nvit_large_patch16_rope_mixed_224.naver_in1k,224,44.953,55.047,65.175,34.825,304.20,0.900,bicubic,-39.887,-31.953,+96\nvit_large_patch16_rope_mixed_ape_224.naver_in1k,224,44.939,55.061,65.395,34.605,304.40,0.900,bicubic,-39.907,-32.091,+93\nseresnextaa101d_32x8d.sw_in12k_ft_in1k,288,44.904,55.096,67.549,32.451,93.59,1.000,bicubic,-41.620,-30.485,-87\ntiny_vit_21m_224.dist_in22k_ft_in1k,224,44.823,55.177,67.555,32.445,21.20,0.950,bicubic,-40.265,-29.697,+56\nconvnextv2_large.fcmae_ft_in1k,288,44.801,55.199,65.867,34.133,197.96,1.000,bicubic,-41.331,-31.953,-55\nvit_large_patch16_rope_ape_224.naver_in1k,224,44.638,55.362,65.156,34.844,304.37,0.900,bicubic,-40.022,-31.990,+109\nregnety_160.lion_in12k_ft_in1k,224,44.570,55.430,66.344,33.656,83.59,0.950,bicubic,-41.032,-31.324,-8\nvolo_d5_512.sail_in1k,512,44.570,55.430,65.753,34.247,296.09,1.150,bicubic,-42.498,-32.217,-131\nvit_large_patch16_rope_224.naver_in1k,224,44.546,55.454,65.725,34.275,304.17,0.900,bicubic,-40.100,-31.395,+109\nregnety_160.sw_in12k_ft_in1k,224,44.530,55.470,66.510,33.490,83.59,0.950,bicubic,-41.066,-31.159,-10\nrdnet_large.nv_in1k,224,44.522,55.478,65.012,34.988,186.27,0.900,bicubic,-40.272,-32.144,+90\nvit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k,256,44.442,55.558,65.513,34.487,60.40,0.950,bicubic,-41.292,-32.097,-21\nconvnext_small.in12k_ft_in1k,224,44.326,55.674,66.309,33.691,50.22,0.950,bicubic,-40.838,-31.193,+34\nconvnextv2_tiny.fcmae_ft_in22k_in1k_384,384,44.312,55.688,66.620,33.380,28.64,1.000,bicubic,-40.786,-31.013,+43\ncait_m48_448.fb_dist_in1k,448,44.251,55.749,64.704,35.296,356.46,1.000,bicubic,-42.229,-33.044,-95\ndeit3_large_patch16_384.fb_in1k,384,44.181,55.819,64.829,35.171,304.76,1.000,bicubic,-41.639,-32.773,-36\ndm_nfnet_f6.dm_in1k,448,44.125,55.875,65.883,34.117,438.36,0.956,bicubic,-42.031,-31.879,-69\nvolo_d5_448.sail_in1k,448,44.100,55.900,65.069,34.931,295.91,1.150,bicubic,-42.874,-32.859,-135\neva02_small_patch14_336.mim_in22k_ft_in1k,336,44.013,55.987,65.932,34.068,22.13,1.000,bicubic,-41.757,-31.684,-33\ndm_nfnet_f5.dm_in1k,416,43.929,56.071,64.861,35.139,377.21,0.954,bicubic,-41.783,-32.689,-27\ndeit3_huge_patch14_224.fb_in1k,224,43.811,56.189,64.328,35.672,632.13,0.900,bicubic,-41.393,-32.858,+20\nhgnetv2_b4.ssld_stage2_ft_in1k,288,43.793,56.207,66.394,33.606,19.80,1.000,bicubic,-40.159,-30.672,+201\nvit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,43.617,56.383,64.106,35.894,60.40,0.950,bicubic,-41.809,-33.372,-2\nconvnext_small.fb_in22k_ft_in1k,224,43.611,56.389,66.564,33.435,50.22,0.875,bicubic,-40.971,-30.714,+100\nconvnext_small.fb_in22k_ft_in1k,288,43.593,56.407,66.447,33.553,50.22,1.000,bicubic,-41.683,-31.119,+8\ndeit3_large_patch16_224.fb_in1k,224,43.557,56.443,63.481,36.519,304.37,0.900,bicubic,-41.215,-33.863,+78\nvit_base_r50_s16_384.orig_in21k_ft_in1k,384,43.532,56.468,66.800,33.200,98.95,1.000,bicubic,-41.456,-30.496,+46\nhgnet_small.ssld_in1k,288,43.512,56.488,65.722,34.279,24.36,1.000,bicubic,-40.856,-31.408,+138\ntf_efficientnet_b4.ns_jft_in1k,380,43.444,56.556,65.464,34.536,19.34,0.922,bicubic,-41.712,-32.018,+20\nregnety_120.sw_in12k_ft_in1k,224,43.400,56.600,65.596,34.404,51.82,0.950,bicubic,-41.594,-31.816,+40\ndm_nfnet_f3.dm_in1k,320,43.304,56.696,64.228,35.772,254.92,0.940,bicubic,-41.788,-33.158,+27\nefficientvit_l3.r384_in1k,384,43.288,56.712,63.173,36.827,246.04,1.000,bicubic,-43.108,-34.467,-107\nvolo_d5_224.sail_in1k,224,43.282,56.718,64.104,35.896,295.46,0.960,bicubic,-42.794,-33.470,-78\ndeit3_medium_patch16_224.fb_in22k_ft_in1k,224,43.267,56.733,64.912,35.088,38.85,1.000,bicubic,-41.283,-32.268,+96\nvit_base_patch16_224.augreg_in21k_ft_in1k,224,43.261,56.739,65.712,34.288,86.57,0.900,bicubic,-41.273,-31.588,+97\nvolo_d4_448.sail_in1k,448,43.129,56.871,64.128,35.872,193.41,1.150,bicubic,-43.659,-33.752,-140\nhgnetv2_b4.ssld_stage1_in22k_in1k,224,43.086,56.914,65.112,34.888,19.80,0.965,bicubic,-39.806,-31.520,+370\nhgnetv2_b3.ssld_stage2_ft_in1k,224,42.968,57.032,65.093,34.907,16.29,0.965,bicubic,-39.946,-31.267,+366\nefficientvit_l3.r320_in1k,320,42.956,57.044,62.383,37.617,246.04,1.000,bicubic,-43.274,-35.093,-100\ntf_efficientnetv2_l.in1k,384,42.886,57.114,64.071,35.929,118.52,1.000,bicubic,-42.316,-33.353,+4\nefficientnet_b5.sw_in12k_ft_in1k,448,42.870,57.130,65.405,34.595,30.39,1.000,bicubic,-43.024,-32.337,-70\ndm_nfnet_f4.dm_in1k,384,42.870,57.130,63.471,36.529,316.07,0.951,bicubic,-42.644,-34.206,-32\nxcit_large_24_p8_384.fb_dist_in1k,384,42.864,57.136,63.371,36.629,188.93,1.000,bicubic,-43.160,-34.319,-82\nregnety_160.lion_in12k_ft_in1k,288,42.764,57.236,64.228,35.772,83.59,1.000,bicubic,-43.240,-33.606,-79\nvit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k,256,42.728,57.272,64.012,35.988,38.88,0.950,bicubic,-42.228,-33.372,+31\nregnety_160.sw_in12k_ft_in1k,288,42.718,57.282,64.322,35.678,83.59,1.000,bicubic,-43.310,-33.514,-86\nconvnext_small.in12k_ft_in1k,288,42.681,57.319,64.314,35.685,50.22,1.000,bicubic,-42.643,-33.240,-17\nmaxvit_small_tf_512.in1k,512,42.665,57.335,64.562,35.438,69.13,1.000,bicubic,-43.437,-33.126,-97\nefficientvit_l3.r224_in1k,224,42.642,57.358,62.306,37.694,246.04,1.000,bicubic,-43.168,-34.904,-67\nxcit_large_24_p8_224.fb_dist_in1k,224,42.573,57.427,63.104,36.896,188.93,1.000,bicubic,-42.823,-34.314,-26\ncaformer_b36.sail_in1k,224,42.471,57.529,62.870,37.130,98.75,1.000,bicubic,-43.031,-34.446,-41\ncaformer_b36.sail_in1k_384,384,42.447,57.553,62.815,37.185,98.75,1.000,bicubic,-43.953,-35.009,-128\nmaxvit_large_tf_224.in1k,224,42.430,57.570,63.393,36.607,211.79,0.950,bicubic,-42.508,-33.587,+26\ntf_efficientnet_b8.ra_in1k,672,42.422,57.578,64.823,35.177,87.41,0.954,bicubic,-42.922,-32.569,-25\ncait_m36_384.fb_dist_in1k,384,42.392,57.608,63.350,36.650,271.22,1.000,bicubic,-43.668,-34.382,-99\nmambaout_base_wide_rw.sw_e500_in1k,224,42.312,57.688,62.300,37.700,94.45,0.950,bicubic,-42.136,-35.028,+93\nvolo_d4_224.sail_in1k,224,42.310,57.690,62.994,37.006,192.96,0.960,bicubic,-43.558,-34.470,-84\nhgnetv2_b3.ssld_stage1_in22k_in1k,224,42.115,57.885,64.059,35.941,16.29,0.965,bicubic,-40.479,-32.305,+401\nefficientvit_l2.r288_in1k,288,42.084,57.916,61.404,38.596,63.71,1.000,bicubic,-43.530,-35.960,-61\nefficientvit_l3.r256_in1k,256,42.072,57.928,61.542,38.458,246.04,1.000,bicubic,-43.880,-35.798,-89\nhiera_base_plus_224.mae_in1k_ft_in1k,224,42.068,57.932,61.736,38.264,69.90,0.900,bicubic,-43.068,-35.426,-9\ncaformer_s18.sail_in22k_ft_in1k_384,384,42.023,57.977,64.770,35.230,26.34,1.000,bicubic,-43.419,-32.948,-46\ndeit3_small_patch16_384.fb_in22k_ft_in1k,384,41.964,58.036,64.550,35.450,22.21,1.000,bicubic,-42.882,-32.568,+31\nvit_medium_patch16_gap_384.sw_in12k_ft_in1k,384,41.915,58.085,63.721,36.279,39.03,0.950,bicubic,-43.641,-33.917,-59\nmaxvit_tiny_tf_512.in1k,512,41.868,58.132,63.578,36.422,31.05,1.000,bicubic,-43.790,-34.014,-69\nefficientnetv2_rw_m.agc_in1k,320,41.848,58.152,62.638,37.362,53.24,1.000,bicubic,-42.448,-34.234,+110\ncaformer_s36.sail_in1k_384,384,41.773,58.227,62.768,37.232,39.30,1.000,bicubic,-43.989,-34.898,-79\nhgnetv2_b4.ssld_stage1_in22k_in1k,288,41.706,58.294,63.727,36.273,19.80,1.000,bicubic,-41.744,-33.193,+253\nhgnet_tiny.ssld_in1k,224,41.675,58.325,63.578,36.422,14.74,0.965,bicubic,-40.267,-32.540,+498\nmambaout_base_wide_rw.sw_e500_in1k,288,41.669,58.331,61.990,38.010,94.45,1.000,bicubic,-43.297,-35.232,+5\nregnetz_e8.ra3_in1k,256,41.653,58.347,63.293,36.707,57.70,0.940,bicubic,-42.765,-33.695,+89\nconvnextv2_tiny.fcmae_ft_in22k_in1k,224,41.640,58.360,63.841,36.159,28.64,0.875,bicubic,-42.264,-32.821,+157\nefficientnet_h_b5.sw_r448_e450_in1k,448,41.636,58.364,62.955,37.045,45.88,1.000,bicubic,-42.782,-34.185,+86\nswin_small_patch4_window7_224.ms_in22k_ft_in1k,224,41.602,58.398,64.554,35.446,49.61,0.900,bicubic,-41.728,-32.442,+273\nconvformer_s18.sail_in22k_ft_in1k_384,384,41.563,58.437,63.358,36.642,26.77,1.000,bicubic,-43.441,-34.215,-6\nregnety_2560.seer_ft_in1k,384,41.553,58.447,64.906,35.094,\"1,282.60\",1.000,bicubic,-43.591,-32.538,-25\ntf_efficientnetv2_s.in21k_ft_in1k,300,41.530,58.470,64.556,35.444,21.46,1.000,bicubic,-41.818,-32.154,+266\ncoatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k,224,41.494,58.506,61.485,38.515,41.72,0.950,bicubic,-43.446,-35.483,0\ncaformer_m36.sail_in1k_384,384,41.488,58.512,61.532,38.468,56.20,1.000,bicubic,-44.676,-36.280,-132\ntf_efficientnet_b7.ap_in1k,600,41.459,58.541,62.872,37.128,66.35,0.949,bicubic,-43.673,-34.384,-27\ntf_efficientnet_b7.ra_in1k,600,41.439,58.561,63.059,36.941,66.35,0.949,bicubic,-43.487,-34.161,+4\nhgnetv2_b3.ssld_stage2_ft_in1k,288,41.422,58.578,63.688,36.312,16.29,1.000,bicubic,-42.166,-33.124,+215\nvit_base_patch16_rope_mixed_224.naver_in1k,224,41.414,58.586,61.212,38.788,86.44,0.900,bicubic,-42.392,-35.510,+172\ntf_efficientnet_b5.ap_in1k,456,41.392,58.608,62.120,37.880,30.39,0.934,bicubic,-42.866,-34.864,+97\ndm_nfnet_f5.dm_in1k,544,41.335,58.665,62.019,37.981,377.21,0.954,bicubic,-44.767,-35.739,-133\nregnety_120.sw_in12k_ft_in1k,288,41.323,58.677,63.200,36.800,51.82,1.000,bicubic,-44.105,-34.376,-66\ndm_nfnet_f3.dm_in1k,416,41.308,58.692,62.135,37.865,254.92,0.940,bicubic,-44.368,-35.441,-93\nresnetv2_152x4_bit.goog_in21k_ft_in1k,480,41.304,58.696,64.360,35.640,936.53,1.000,bilinear,-43.632,-33.096,-8\nconvnextv2_base.fcmae_ft_in1k,224,41.266,58.734,61.430,38.570,88.72,0.875,bicubic,-43.624,-35.658,0\ncaformer_s18.sail_in22k_ft_in1k,224,41.192,58.808,63.906,36.094,26.34,1.000,bicubic,-42.900,-33.276,+116\ndm_nfnet_f6.dm_in1k,576,41.184,58.816,62.821,37.179,438.36,0.956,bicubic,-45.180,-35.079,-161\nvit_base_patch16_rope_mixed_ape_224.naver_in1k,224,41.180,58.820,61.229,38.771,86.59,0.900,bicubic,-42.718,-35.523,+141\ntf_efficientnet_b6.ap_in1k,528,41.117,58.883,62.377,37.623,43.04,0.942,bicubic,-43.677,-34.761,+9\nconvnext_tiny.in12k_ft_in1k_384,384,41.097,58.903,62.825,37.175,28.59,1.000,bicubic,-44.057,-34.809,-44\nefficientvit_l2.r224_in1k,224,41.074,58.926,60.726,39.274,63.71,1.000,bicubic,-43.972,-36.378,-30\nxcit_large_24_p16_384.fb_dist_in1k,384,41.042,58.958,61.265,38.735,189.10,1.000,bicubic,-44.736,-36.265,-111\ntf_efficientnetv2_s.in21k_ft_in1k,384,40.997,59.003,63.851,36.149,21.46,1.000,bicubic,-43.335,-33.423,+73\ntf_efficientnetv2_l.in1k,480,40.997,59.003,62.027,37.973,118.52,1.000,bicubic,-44.663,-35.445,-102\nxcit_large_24_p16_224.fb_dist_in1k,224,40.979,59.021,61.334,38.666,189.10,1.000,bicubic,-43.957,-35.794,-18\nhiera_small_abswin_256.sbb2_e200_in12k_ft_in1k,256,40.924,59.076,62.558,37.442,34.36,0.950,bicubic,-43.988,-34.702,-14\nmaxvit_base_tf_224.in1k,224,40.883,59.117,61.182,38.818,119.47,0.950,bicubic,-44.001,-35.820,-11\nmaxvit_small_tf_384.in1k,384,40.858,59.142,62.080,37.920,69.02,1.000,bicubic,-44.676,-35.386,-96\nedgenext_base.in21k_ft_in1k,320,40.838,59.162,61.758,38.242,18.51,1.000,bicubic,-43.238,-35.450,+104\nmambaout_base_short_rw.sw_e500_in1k,224,40.810,59.190,60.805,39.195,88.83,0.950,bicubic,-43.554,-36.139,+64\nresnetrs350.tf_in1k,288,40.730,59.270,61.587,38.413,163.96,1.000,bicubic,-43.574,-35.305,+69\nefficientvit_l2.r256_in1k,256,40.675,59.325,60.599,39.401,63.71,1.000,bicubic,-44.705,-36.657,-79\nhrnet_w18_ssld.paddle_in1k,224,40.643,59.357,62.304,37.696,21.30,0.950,bilinear,-40.471,-32.818,+574\nxcit_medium_24_p8_224.fb_dist_in1k,224,40.508,59.492,60.514,39.486,84.32,1.000,bicubic,-44.570,-36.760,-46\ntf_efficientnetv2_m.in1k,384,40.494,59.506,61.799,38.201,54.14,1.000,bicubic,-44.060,-35.259,+20\nrexnetr_300.sw_in12k_ft_in1k,224,40.490,59.510,63.159,36.841,34.81,0.950,bicubic,-43.546,-33.833,+105\nvit_small_r26_s32_384.augreg_in21k_ft_in1k,384,40.480,59.520,62.774,37.226,36.47,1.000,bicubic,-43.586,-34.536,+98\ntf_efficientnet_b4.ap_in1k,380,40.476,59.524,61.746,38.254,19.34,0.922,bicubic,-42.770,-34.646,+244\nvit_base_patch16_rope_224.naver_in1k,224,40.433,59.567,60.547,39.453,86.43,0.900,bicubic,-43.281,-36.127,+159\nmambaout_base_tall_rw.sw_e500_in1k,224,40.431,59.569,60.396,39.604,86.48,0.950,bicubic,-44.005,-36.560,+38\nhgnetv2_b3.ssld_stage1_in22k_in1k,288,40.398,59.602,62.495,37.505,16.29,1.000,bicubic,-42.720,-33.959,+260\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,384,40.386,59.614,61.960,38.040,32.59,0.950,bicubic,-43.928,-35.142,+57\ndeit3_base_patch16_224.fb_in1k,224,40.378,59.622,60.166,39.834,86.59,0.900,bicubic,-43.422,-36.806,+140\ninception_next_base.sail_in1k_384,384,40.339,59.661,60.787,39.213,86.67,1.000,bicubic,-44.863,-36.487,-75\nconvformer_s18.sail_in22k_ft_in1k,224,40.307,59.693,61.723,38.277,26.77,1.000,bicubic,-43.461,-35.323,+145\necaresnet269d.ra2_in1k,320,40.295,59.705,61.105,38.895,102.09,0.950,bicubic,-44.429,-36.075,-12\nvit_medium_patch16_gap_256.sw_in12k_ft_in1k,256,40.290,59.710,61.666,38.334,38.86,0.950,bicubic,-44.152,-35.548,+29\nflexivit_large.600ep_in1k,240,40.270,59.730,60.357,39.643,304.36,0.950,bicubic,-45.260,-37.137,-115\nhgnet_tiny.ssld_in1k,288,40.231,59.769,61.829,38.171,14.74,1.000,bicubic,-42.297,-34.691,+347\nefficientnet_x_b5.sw_r448_e450_in1k,448,40.191,59.809,60.911,39.089,33.44,1.000,bicubic,-44.011,-36.061,+66\ndeit3_small_patch16_224.fb_in22k_ft_in1k,224,40.184,59.816,61.860,38.140,22.06,1.000,bicubic,-42.898,-34.914,+256\nvit_base_patch16_224_miil.in21k_ft_in1k,224,40.184,59.816,60.891,39.109,86.54,0.875,bilinear,-44.095,-35.907,+52\nconvnext_tiny.in12k_ft_in1k,224,40.170,59.830,61.575,38.425,28.59,0.950,bicubic,-44.022,-35.285,+66\nregnetz_e8.ra3_in1k,320,40.132,59.868,61.312,38.688,57.70,1.000,bicubic,-44.884,-35.966,-61\nxcit_medium_24_p8_384.fb_dist_in1k,384,40.123,59.877,60.522,39.478,84.32,1.000,bicubic,-45.719,-37.080,-152\nmaxvit_rmlp_small_rw_224.sw_in1k,224,40.115,59.885,59.478,40.522,64.90,0.900,bicubic,-44.373,-37.294,+7\nrdnet_base.nv_in1k,224,40.105,59.895,60.200,39.800,87.45,0.900,bicubic,-44.261,-36.678,+36\nconvnext_large.fb_in1k,224,40.105,59.895,60.109,39.891,197.77,0.875,bicubic,-44.199,-36.833,+43\nefficientvit_l1.r224_in1k,224,40.101,59.899,59.510,40.490,52.65,1.000,bicubic,-44.363,-37.350,+10\nvit_base_patch16_rope_ape_224.naver_in1k,224,40.097,59.903,60.042,39.958,86.59,0.900,bicubic,-43.683,-36.568,+126\nflexivit_large.1200ep_in1k,240,40.083,59.917,60.638,39.362,304.36,0.950,bicubic,-45.563,-36.908,-138\nmaxvit_tiny_tf_384.in1k,384,40.054,59.946,60.964,39.036,30.98,1.000,bicubic,-45.052,-36.418,-81\nhiera_base_224.mae_in1k_ft_in1k,224,40.048,59.952,60.762,39.238,51.52,0.900,bicubic,-44.480,-36.248,-4\nflexivit_large.300ep_in1k,240,40.014,59.986,59.989,40.011,304.36,0.950,bicubic,-45.270,-37.415,-107\nmobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k,480,39.999,60.001,61.723,38.277,32.59,1.000,bicubic,-44.773,-35.325,-36\nedgenext_base.in21k_ft_in1k,256,39.981,60.019,61.212,38.788,18.51,0.950,bicubic,-43.435,-35.588,+190\nconvnextv2_tiny.fcmae_ft_in22k_in1k,288,39.959,60.041,61.817,38.183,28.64,1.000,bicubic,-44.468,-35.443,+14\nxcit_medium_24_p16_384.fb_dist_in1k,384,39.938,60.062,60.211,39.789,84.40,1.000,bicubic,-45.496,-37.197,-123\ndm_nfnet_f4.dm_in1k,512,39.922,60.078,60.414,39.586,316.07,0.951,bicubic,-45.916,-37.362,-164\nhgnetv2_b2.ssld_stage1_in22k_in1k,224,39.916,60.084,61.677,38.323,11.22,0.965,bicubic,-40.826,-33.823,+594\nmambaout_base_short_rw.sw_e500_in1k,288,39.851,60.149,59.901,40.099,88.83,1.000,bicubic,-44.991,-37.385,-49\nrdnet_small.nv_in1k,224,39.820,60.180,60.318,39.682,50.44,0.900,bicubic,-43.838,-36.386,+139\nconvnext_tiny.fb_in22k_ft_in1k_384,384,39.798,60.202,61.530,38.470,28.59,1.000,bicubic,-44.300,-35.252,+55\nvit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,256,39.798,60.202,59.807,40.193,63.95,0.950,bicubic,-44.518,-37.003,+25\ncait_s36_384.fb_dist_in1k,384,39.767,60.233,60.469,39.531,68.37,1.000,bicubic,-45.691,-37.011,-135\nconvnextv2_base.fcmae_ft_in1k,288,39.761,60.239,59.850,40.150,88.72,1.000,bicubic,-45.729,-37.530,-139\nvolo_d3_448.sail_in1k,448,39.704,60.296,59.769,40.231,86.63,1.000,bicubic,-46.810,-37.941,-234\nefficientnetv2_rw_m.agc_in1k,416,39.673,60.327,59.691,40.309,53.24,1.000,bicubic,-45.157,-37.457,-54\nxception65.ra3_in1k,299,39.649,60.351,60.931,39.069,39.92,0.940,bicubic,-43.537,-35.665,+207\nefficientvit_l2.r384_in1k,384,39.643,60.357,59.465,40.535,63.71,1.000,bicubic,-46.341,-38.045,-188\ncaformer_m36.sail_in1k,224,39.618,60.382,58.698,41.302,56.20,1.000,bicubic,-45.624,-38.528,-118\necaresnet269d.ra2_in1k,352,39.604,60.396,60.355,39.645,102.09,1.000,bicubic,-45.364,-36.869,-82\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,448,39.592,60.408,60.742,39.258,32.59,0.950,bicubic,-45.076,-36.366,-43\neca_nfnet_l2.ra3_in1k,320,39.588,60.412,60.540,39.460,56.72,0.900,bicubic,-44.582,-36.476,+37\nresnetrs420.tf_in1k,320,39.557,60.443,60.306,39.694,191.89,1.000,bicubic,-44.683,-36.556,+27\ncaformer_s36.sail_in1k,224,39.547,60.453,59.779,40.221,39.30,1.000,bicubic,-44.981,-37.243,-26\nconvformer_b36.sail_in1k,224,39.545,60.455,58.085,41.915,99.88,1.000,bicubic,-45.281,-38.863,-62\nefficientnet_h_b5.sw_r448_e450_in1k,576,39.504,60.496,60.386,39.614,45.88,1.000,bicubic,-45.438,-37.004,-85\ntf_efficientnet_b3.ns_jft_in1k,300,39.500,60.500,61.426,38.574,12.23,0.904,bicubic,-44.554,-35.500,+49\nvolo_d3_224.sail_in1k,224,39.486,60.514,59.873,40.127,86.33,0.960,bicubic,-45.954,-37.401,-147\nhgnetv2_b2.ssld_stage2_ft_in1k,224,39.468,60.532,61.239,38.761,11.22,0.965,bicubic,-42.096,-34.655,+440\nconvnext_large.fb_in1k,288,39.468,60.532,59.233,40.767,197.77,1.000,bicubic,-45.394,-37.977,-72\ndeit3_base_patch16_384.fb_in1k,384,39.417,60.583,58.950,41.050,86.88,1.000,bicubic,-45.671,-38.404,-106\nmambaout_base_tall_rw.sw_e500_in1k,288,39.337,60.663,59.671,40.329,86.48,1.000,bicubic,-45.651,-37.663,-96\nxcit_small_24_p8_224.fb_dist_in1k,224,39.321,60.679,59.419,40.581,47.63,1.000,bicubic,-45.557,-37.781,-78\nxcit_medium_24_p16_224.fb_dist_in1k,224,39.283,60.717,59.482,40.518,84.40,1.000,bicubic,-45.009,-37.454,+7\ninception_next_base.sail_in1k,224,39.272,60.728,59.264,40.736,86.67,0.950,bicubic,-44.826,-37.874,+33\nconvformer_m36.sail_in1k,224,39.242,60.758,57.645,42.355,57.05,1.000,bicubic,-45.244,-39.221,-34\nefficientnet_b4.ra2_in1k,320,39.201,60.799,59.942,40.058,19.34,0.875,bicubic,-43.457,-36.192,+269\ncoat_lite_medium_384.in1k,384,39.185,60.815,59.290,40.710,44.57,1.000,bicubic,-45.705,-38.086,-86\nefficientnetv2_rw_s.ra2_in1k,288,39.154,60.846,59.592,40.408,23.94,1.000,bicubic,-43.734,-36.736,+233\nefficientnet_b4.ra2_in1k,384,39.134,60.866,59.659,40.341,19.34,1.000,bicubic,-44.334,-36.915,+139\ntiny_vit_11m_224.dist_in22k_ft_in1k,224,39.120,60.880,61.033,38.967,11.00,0.950,bicubic,-44.112,-35.595,+180\nvit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,256,39.081,60.919,58.822,41.178,60.23,0.950,bicubic,-44.825,-37.856,+50\nhrnet_w18_ssld.paddle_in1k,288,39.079,60.921,60.638,39.362,21.30,1.000,bilinear,-42.973,-35.612,+367\nmobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k,544,39.032,60.968,60.545,39.455,32.59,1.000,bicubic,-45.958,-36.748,-109\nxcit_small_24_p8_384.fb_dist_in1k,384,39.030,60.970,59.205,40.795,47.63,1.000,bicubic,-46.550,-38.343,-179\nvit_base_patch16_rope_reg1_gap_256.sbb_in1k,256,39.022,60.978,58.633,41.367,86.43,0.950,bicubic,-44.850,-38.043,+57\ntresnet_v2_l.miil_in21k_ft_in1k,224,39.008,60.992,59.478,40.522,46.17,0.875,bilinear,-44.902,-37.014,+44\ndm_nfnet_f2.dm_in1k,256,39.008,60.992,59.141,40.859,193.78,0.920,bicubic,-45.092,-37.769,+18\nmaxvit_small_tf_224.in1k,224,39.005,60.995,59.302,40.698,68.93,0.950,bicubic,-45.447,-37.522,-39\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384,384,38.989,61.011,62.487,37.513,236.34,1.000,bicubic,-44.869,-34.643,+54\nseresnext101d_32x8d.ah_in1k,224,38.946,61.054,58.942,41.058,93.59,0.950,bicubic,-44.756,-37.852,+92\nconvformer_b36.sail_in1k_384,384,38.944,61.056,58.398,41.602,99.88,1.000,bicubic,-46.826,-39.124,-201\nmambaout_small_rw.sw_e450_in1k,224,38.896,61.104,58.820,41.180,48.50,1.000,bicubic,-45.144,-37.922,+25\nconvnext_tiny.in12k_ft_in1k,288,38.883,61.117,59.875,40.125,28.59,1.000,bicubic,-45.565,-37.199,-43\ncoatnet_rmlp_2_rw_224.sw_in1k,224,38.867,61.133,58.081,41.919,73.88,0.950,bicubic,-45.741,-38.663,-69\nvit_base_patch32_384.augreg_in21k_ft_in1k,384,38.828,61.172,60.337,39.663,88.30,1.000,bicubic,-44.530,-36.491,+147\nnextvit_large.bd_in1k_384,384,38.824,61.176,59.818,40.182,57.87,1.000,bicubic,-46.106,-37.676,-110\nregnetz_d32.ra3_in1k,256,38.775,61.225,60.037,39.963,27.58,0.950,bicubic,-44.657,-36.597,+131\ntf_efficientnetv2_m.in1k,480,38.757,61.243,59.854,40.146,54.14,1.000,bicubic,-46.451,-37.504,-155\nhgnetv2_b2.ssld_stage1_in22k_in1k,288,38.743,61.257,60.300,39.700,11.22,1.000,bicubic,-42.713,-35.716,+433\neca_nfnet_l2.ra3_in1k,384,38.731,61.269,59.504,40.496,56.72,1.000,bicubic,-45.979,-37.488,-88\nefficientvit_b3.r288_in1k,288,38.694,61.306,58.410,41.590,48.65,1.000,bicubic,-45.468,-38.322,-1\nresnet200d.ra2_in1k,256,38.690,61.310,59.351,40.649,64.69,0.950,bicubic,-44.560,-37.201,+156\nefficientvit_b3.r256_in1k,256,38.682,61.318,58.651,41.349,48.65,1.000,bicubic,-45.100,-37.847,+59\nresnet152d.ra2_in1k,256,38.672,61.328,59.309,40.691,60.21,0.950,bicubic,-44.488,-37.337,+166\nxcit_small_12_p8_384.fb_dist_in1k,384,38.669,61.331,58.999,41.001,26.21,1.000,bicubic,-46.399,-38.265,-139\ndavit_small.msft_in1k,224,38.651,61.349,58.205,41.795,49.75,0.950,bicubic,-45.601,-38.737,-21\nresnetrs270.tf_in1k,256,38.609,61.391,59.300,40.700,129.86,1.000,bicubic,-44.987,-37.316,+91\nregnetz_040_h.ra3_in1k,256,38.592,61.408,59.260,40.740,28.94,1.000,bicubic,-45.172,-37.440,+60\nconvformer_m36.sail_in1k_384,384,38.553,61.447,57.728,42.272,57.05,1.000,bicubic,-47.027,-39.844,-203\nmvitv2_small.fb_in1k,224,38.545,61.455,58.119,41.881,34.87,0.900,bicubic,-45.227,-38.439,+56\nvit_medium_patch16_rope_reg1_gap_256.sbb_in1k,256,38.541,61.459,58.785,41.215,38.74,0.950,bicubic,-45.269,-38.039,+44\ndavit_base.msft_in1k,224,38.501,61.499,57.513,42.487,87.95,0.950,bicubic,-46.151,-39.507,-92\nxcit_small_24_p16_384.fb_dist_in1k,384,38.498,61.502,58.404,41.596,47.67,1.000,bicubic,-46.605,-38.914,-154\nresnetaa101d.sw_in12k_ft_in1k,224,38.492,61.508,59.288,40.712,44.57,0.950,bicubic,-45.052,-37.540,+92\nvit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k,256,38.462,61.538,59.382,40.618,22.52,0.950,bicubic,-45.338,-37.202,+43\nmvitv2_base.fb_in1k,224,38.462,61.538,57.920,42.080,51.47,0.900,bicubic,-45.962,-38.988,-54\nconvnext_tiny.fb_in22k_ft_in1k,224,38.458,61.542,60.439,39.561,28.59,0.875,bicubic,-44.442,-35.851,+191\nxcit_small_12_p8_224.fb_dist_in1k,224,38.454,61.546,58.836,41.164,26.21,1.000,bicubic,-45.798,-38.038,-32\nrexnetr_300.sw_in12k_ft_in1k,288,38.411,61.589,60.569,39.431,34.81,1.000,bicubic,-46.171,-36.845,-91\nconvformer_s36.sail_in1k,224,38.403,61.597,57.720,42.280,40.01,1.000,bicubic,-45.665,-39.024,-9\nfastvit_ma36.apple_dist_in1k,256,38.386,61.614,58.486,41.514,44.07,0.950,bicubic,-46.223,-38.510,-97\nhgnetv2_b2.ssld_stage2_ft_in1k,288,38.378,61.622,59.822,40.178,11.22,1.000,bicubic,-43.984,-36.590,+277\nseresnextaa101d_32x8d.ah_in1k,224,38.366,61.634,58.356,41.644,93.59,0.950,bicubic,-45.534,-38.292,+12\ntf_efficientnet_b5.ra_in1k,456,38.354,61.646,59.930,40.070,30.39,0.934,bicubic,-45.480,-36.834,+24\ndm_nfnet_f1.dm_in1k,224,38.283,61.717,58.132,41.868,132.63,0.910,bicubic,-45.161,-38.502,+100\ndeit_base_distilled_patch16_384.fb_in1k,384,38.250,61.750,57.796,42.204,87.63,1.000,bicubic,-47.188,-39.536,-205\nconvnext_base.fb_in1k,224,38.236,61.764,58.219,41.781,88.59,0.875,bicubic,-45.602,-38.527,+20\nseresnet152d.ra2_in1k,256,38.144,61.856,58.423,41.577,66.84,0.950,bicubic,-45.558,-38.197,+53\nxcit_large_24_p8_224.fb_in1k,224,38.138,61.862,57.871,42.129,188.93,1.000,bicubic,-46.242,-39.047,-61\nvit_base_patch16_384.orig_in21k_ft_in1k,384,38.126,61.874,60.434,39.566,86.86,1.000,bicubic,-46.086,-36.776,-38\nhiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k,256,38.085,61.915,58.286,41.714,34.36,0.950,bicubic,-46.479,-38.822,-101\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,384,38.055,61.945,58.671,41.329,37.76,0.950,bicubic,-45.941,-38.043,-10\nresnetv2_152x2_bit.goog_in21k_ft_in1k,448,38.006,61.994,61.172,38.828,236.34,1.000,bilinear,-46.462,-36.260,-89\nrepvit_m2_3.dist_450e_in1k,224,37.989,62.011,58.174,41.826,23.69,0.950,bicubic,-45.757,-38.476,+37\npvt_v2_b4.in1k,224,37.953,62.047,58.213,41.787,62.56,0.900,bicubic,-45.757,-38.401,+41\nefficientnet_x_b5.sw_r448_e450_in1k,576,37.943,62.057,58.606,41.394,33.44,1.000,bicubic,-46.985,-38.706,-149\nmambaout_small_rw.sw_e450_in1k,288,37.918,62.082,57.861,42.139,48.50,1.000,bicubic,-46.686,-39.235,-111\nregnetz_040.ra3_in1k,256,37.912,62.088,57.944,42.056,27.12,1.000,bicubic,-45.732,-38.738,+54\ncait_s24_384.fb_dist_in1k,384,37.908,62.092,58.068,41.932,47.06,1.000,bicubic,-47.140,-39.282,-172\ncoat_lite_medium.in1k,224,37.896,62.104,57.824,42.176,44.57,0.900,bicubic,-45.706,-38.890,+57\nconvnextv2_nano.fcmae_ft_in22k_in1k_384,384,37.886,62.114,59.443,40.557,15.62,1.000,bicubic,-45.480,-37.297,+100\nresnet152d.ra2_in1k,320,37.875,62.125,58.384,41.616,60.21,1.000,bicubic,-45.809,-38.314,+43\nnextvit_large.bd_in1k,224,37.867,62.133,58.213,41.787,57.87,0.950,bicubic,-45.787,-38.469,+47\nconvformer_s36.sail_in1k_384,384,37.814,62.186,57.478,42.522,40.01,1.000,bicubic,-47.566,-39.990,-215\nfasternet_l.in1k,224,37.764,62.236,57.458,42.542,93.47,1.000,bicubic,-45.752,-39.200,+64\nhiera_small_224.mae_in1k_ft_in1k,224,37.763,62.237,57.496,42.504,35.01,0.900,bicubic,-46.125,-39.172,-9\nresnetrs420.tf_in1k,416,37.757,62.243,58.221,41.779,191.89,1.000,bicubic,-47.253,-38.889,-176\ndeit3_medium_patch16_224.fb_in1k,224,37.741,62.259,57.105,42.895,38.85,0.900,bicubic,-45.349,-39.007,+134\nresnetrs350.tf_in1k,384,37.739,62.261,58.136,41.864,163.96,1.000,bicubic,-46.971,-39.120,-136\nxcit_small_24_p16_224.fb_dist_in1k,224,37.721,62.279,57.356,42.644,47.67,1.000,bicubic,-46.159,-39.378,-9\ncaformer_s18.sail_in1k_384,384,37.682,62.318,57.602,42.398,26.34,1.000,bicubic,-47.354,-39.752,-182\nmambaout_small.in1k,224,37.674,62.326,57.525,42.475,48.49,1.000,bicubic,-46.420,-39.101,-44\nvit_betwixt_patch16_reg1_gap_256.sbb_in1k,256,37.658,62.342,56.782,43.218,60.40,0.950,bicubic,-45.994,-39.764,+38\nconvnext_nano.r384_in12k_ft_in1k,384,37.635,62.365,58.893,41.107,15.59,1.000,bicubic,-45.699,-37.731,+95\nregnety_640.seer_ft_in1k,384,37.631,62.369,59.842,40.158,281.38,1.000,bicubic,-46.293,-37.084,-27\nxcit_small_12_p16_384.fb_dist_in1k,384,37.609,62.391,57.785,42.215,26.25,1.000,bicubic,-47.125,-39.351,-147\npvt_v2_b5.in1k,224,37.546,62.454,57.256,42.744,81.96,0.900,bicubic,-46.214,-39.378,+12\nmambaout_base.in1k,224,37.539,62.461,56.645,43.355,84.81,1.000,bicubic,-46.655,-40.023,-63\nresnet200d.ra2_in1k,320,37.527,62.473,58.315,41.685,64.69,1.000,bicubic,-46.449,-38.505,-35\nvit_betwixt_patch16_reg4_gap_256.sbb_in1k,256,37.476,62.524,57.071,42.929,60.40,0.950,bicubic,-46.234,-39.545,+17\nmaxvit_rmlp_tiny_rw_256.sw_in1k,256,37.407,62.593,57.187,42.813,29.15,0.950,bicubic,-46.819,-39.595,-71\nefficientvit_b3.r224_in1k,224,37.370,62.630,57.120,42.880,48.65,0.950,bicubic,-46.074,-39.218,+64\nresnetv2_152x2_bit.goog_teacher_in21k_ft_in1k,224,37.356,62.644,59.461,40.539,236.34,0.875,bicubic,-45.562,-37.107,+142\nregnety_1280.seer_ft_in1k,384,37.350,62.650,59.154,40.846,644.81,1.000,bicubic,-47.076,-37.932,-105\nregnetz_d8_evos.ch_in1k,256,37.330,62.670,58.011,41.989,23.46,0.950,bicubic,-46.074,-38.651,+70\nmobilenetv4_hybrid_large.e600_r384_in1k,384,37.320,62.680,57.651,42.349,37.76,0.950,bicubic,-46.506,-39.097,-15\nconvnext_base.fb_in1k,288,37.313,62.687,57.325,42.675,88.59,1.000,bicubic,-47.153,-39.657,-122\nresnest269e.in1k,416,37.303,62.697,57.450,42.550,110.93,0.928,bicubic,-47.239,-39.538,-134\nvit_small_r26_s32_224.augreg_in21k_ft_in1k,224,37.250,62.750,59.064,40.936,36.43,0.900,bicubic,-44.606,-37.020,+309\nresmlp_big_24_224.fb_in22k_ft_in1k,224,37.240,62.760,58.172,41.828,129.14,0.875,bicubic,-47.174,-38.936,-106\nedgenext_base.usi_in1k,256,37.220,62.780,57.574,42.426,18.51,0.950,bicubic,-46.464,-39.176,+15\npit_b_distilled_224.in1k,224,37.208,62.792,56.505,43.495,74.79,0.900,bicubic,-46.566,-39.963,-7\nrdnet_tiny.nv_in1k,224,37.206,62.794,57.382,42.618,23.86,0.900,bicubic,-45.607,-38.650,+150\nrepvit_m2_3.dist_300e_in1k,224,37.203,62.797,57.203,42.797,23.69,0.950,bicubic,-46.322,-39.329,+33\ncait_s24_224.fb_dist_in1k,224,37.153,62.847,56.716,43.284,46.92,1.000,bicubic,-46.331,-39.870,+41\nvit_medium_patch16_reg4_gap_256.sbb_in1k,256,37.148,62.852,57.305,42.695,38.88,0.950,bicubic,-46.318,-39.317,+45\ndm_nfnet_f2.dm_in1k,352,37.136,62.864,56.993,43.007,193.78,0.920,bicubic,-48.048,-40.347,-228\npvt_v2_b3.in1k,224,37.128,62.872,57.313,42.687,45.24,0.900,bicubic,-45.992,-39.247,+99\ntiny_vit_21m_224.in1k,224,37.120,62.880,57.435,42.565,21.20,0.950,bicubic,-46.148,-39.167,+76\nmobilenetv4_hybrid_large.ix_e600_r384_in1k,448,37.116,62.884,57.667,42.333,37.76,1.000,bicubic,-47.236,-39.221,-108\nefficientformer_l7.snap_dist_in1k,224,37.114,62.886,56.908,43.092,82.23,0.950,bicubic,-46.282,-39.624,+56\nvolo_d1_384.sail_in1k,384,37.110,62.890,57.132,42.868,26.78,1.000,bicubic,-48.152,-40.086,-243\nfastvit_sa36.apple_dist_in1k,256,37.104,62.896,57.120,42.880,31.53,0.900,bicubic,-46.924,-39.752,-62\nvit_base_patch32_224.augreg_in21k_ft_in1k,224,37.098,62.902,59.282,40.718,88.22,0.900,bicubic,-43.634,-36.292,+459\nresnetaa101d.sw_in12k_ft_in1k,288,37.090,62.910,57.887,42.113,44.57,1.000,bicubic,-47.024,-39.227,-81\ntf_efficientnet_b3.ap_in1k,300,37.085,62.915,57.260,42.740,12.23,0.904,bicubic,-44.757,-38.354,+295\nefficientnetv2_rw_s.ra2_in1k,384,37.075,62.925,56.824,43.176,23.94,1.000,bicubic,-46.755,-39.888,-38\nmaxvit_tiny_tf_224.in1k,224,37.067,62.933,56.928,43.072,30.92,0.950,bicubic,-46.359,-39.656,+43\nconvnext_small.fb_in1k,224,37.063,62.937,57.128,42.872,50.22,0.875,bicubic,-46.081,-39.304,+84\nswinv2_base_window16_256.ms_in1k,256,37.000,63.000,56.138,43.862,87.92,0.900,bicubic,-47.608,-40.926,-167\nregnetz_040_h.ra3_in1k,320,36.990,63.010,57.260,42.740,28.94,1.000,bicubic,-47.508,-39.746,-152\nxcit_small_12_p16_224.fb_dist_in1k,224,36.949,63.051,56.733,43.267,26.25,1.000,bicubic,-46.405,-39.739,+52\nrexnetr_200.sw_in12k_ft_in1k,224,36.935,63.065,58.806,41.194,16.52,0.950,bicubic,-45.667,-37.590,+165\nfasternet_m.in1k,224,36.906,63.094,56.741,43.259,53.52,1.000,bicubic,-45.942,-39.647,+121\nvolo_d1_224.sail_in1k,224,36.906,63.094,56.661,43.339,26.63,0.960,bicubic,-47.242,-40.111,-94\ninception_next_small.sail_in1k,224,36.884,63.116,56.765,43.235,49.37,0.875,bicubic,-46.700,-39.833,+5\nseresnext101_32x8d.ah_in1k,224,36.798,63.202,55.708,44.292,93.57,0.950,bicubic,-46.820,-40.705,-2\nseresnet152d.ra2_in1k,320,36.796,63.204,56.714,43.286,66.84,1.000,bicubic,-47.548,-40.330,-125\nefficientformerv2_l.snap_dist_in1k,224,36.784,63.216,56.639,43.361,26.32,0.950,bicubic,-46.858,-39.907,-6\neca_nfnet_l1.ra2_in1k,256,36.715,63.285,57.122,42.878,41.41,0.900,bicubic,-46.549,-39.562,+56\nseresnext101d_32x8d.ah_in1k,288,36.697,63.303,56.423,43.577,93.59,1.000,bicubic,-47.682,-40.241,-134\nmaxxvit_rmlp_small_rw_256.sw_in1k,256,36.692,63.308,56.012,43.988,66.01,0.950,bicubic,-47.942,-41.050,-180\nregnetz_d8.ra3_in1k,256,36.686,63.314,58.032,41.968,23.37,0.940,bicubic,-46.860,-38.714,+2\nresnetrs200.tf_in1k,256,36.656,63.344,56.672,43.328,93.21,1.000,bicubic,-47.228,-40.090,-66\nvolo_d2_224.sail_in1k,224,36.619,63.381,56.480,43.520,58.68,0.960,bicubic,-48.585,-40.884,-260\ncaformer_s18.sail_in1k,224,36.617,63.383,55.845,44.155,26.34,1.000,bicubic,-47.047,-40.681,-19\nfastvit_ma36.apple_in1k,256,36.572,63.428,56.521,43.479,44.07,0.950,bicubic,-47.312,-40.235,-68\nxception65p.ra3_in1k,299,36.568,63.432,56.435,43.565,39.82,0.940,bicubic,-46.592,-39.935,+61\nmobilenetv4_hybrid_large.e600_r384_in1k,448,36.558,63.442,56.719,43.281,37.76,1.000,bicubic,-47.706,-40.237,-126\nseresnextaa101d_32x8d.ah_in1k,288,36.548,63.452,56.374,43.626,93.59,1.000,bicubic,-48.032,-40.688,-181\nfastvit_sa36.apple_in1k,256,36.540,63.460,55.994,44.006,31.53,0.900,bicubic,-46.962,-40.638,+1\nnextvit_base.bd_in1k,224,36.538,63.462,56.484,43.516,44.82,0.950,bicubic,-46.960,-40.164,+1\nese_vovnet57b.ra4_e3600_r256_in1k,256,36.521,63.479,56.409,43.591,38.61,0.950,bicubic,-45.939,-39.597,+172\nfocalnet_base_srf.ms_in1k,224,36.466,63.534,56.199,43.801,88.15,0.900,bicubic,-47.342,-40.461,-58\nresnetv2_101x3_bit.goog_in21k_ft_in1k,448,36.444,63.556,59.034,40.966,387.93,1.000,bilinear,-47.986,-38.346,-158\nregnetz_d32.ra3_in1k,320,36.428,63.572,57.372,42.628,27.58,0.950,bicubic,-47.600,-39.478,-96\nvolo_d2_384.sail_in1k,384,36.426,63.574,56.295,43.705,58.87,1.000,bicubic,-49.628,-41.287,-357\ndeit_base_distilled_patch16_224.fb_in1k,224,36.416,63.584,56.670,43.330,87.34,0.900,bicubic,-46.968,-39.822,+21\nefficientnet_b3.ra2_in1k,320,36.414,63.586,56.831,43.169,12.23,1.000,bicubic,-45.882,-39.105,+194\ncait_xs24_384.fb_dist_in1k,384,36.409,63.591,56.920,43.080,26.67,1.000,bicubic,-47.655,-39.968,-107\ngcvit_base.in1k,224,36.379,63.621,55.900,44.100,90.32,0.875,bicubic,-48.077,-41.318,-173\nvit_little_patch16_reg4_gap_256.sbb_in1k,256,36.375,63.625,56.727,43.273,22.52,0.950,bicubic,-46.145,-39.385,+152\ndm_nfnet_f1.dm_in1k,320,36.358,63.642,55.717,44.283,132.63,0.910,bicubic,-48.342,-41.461,-208\nresnetrs270.tf_in1k,352,36.356,63.644,56.560,43.440,129.86,1.000,bicubic,-48.082,-40.412,-170\nvit_medium_patch16_reg1_gap_256.sbb_in1k,256,36.318,63.682,56.283,43.717,38.88,0.950,bicubic,-47.144,-40.265,-3\ntresnet_m.miil_in21k_ft_in1k,224,36.269,63.731,55.790,44.210,31.39,0.875,bilinear,-46.821,-40.494,+57\nmixer_b16_224.miil_in21k_ft_in1k,224,36.251,63.749,55.965,44.035,59.88,0.875,bilinear,-46.063,-40.021,+180\nconvnext_small.fb_in1k,288,36.251,63.749,55.934,44.066,50.22,1.000,bicubic,-47.453,-40.857,-48\nconvformer_s18.sail_in1k_384,384,36.224,63.776,56.051,43.949,26.77,1.000,bicubic,-48.168,-41.059,-163\nmambaout_small.in1k,288,36.193,63.807,55.849,44.151,48.49,1.000,bicubic,-48.319,-41.121,-192\nregnety_160.deit_in1k,224,36.181,63.819,56.417,43.583,83.59,0.950,bicubic,-46.819,-40.085,+63\ndeit3_small_patch16_384.fb_in1k,384,36.163,63.837,55.593,44.407,22.21,1.000,bicubic,-47.269,-41.081,-5\nmobilenetv4_conv_large.e600_r384_in1k,384,36.130,63.870,56.232,43.768,32.59,0.950,bicubic,-46.844,-40.012,+66\nmvitv2_tiny.fb_in1k,224,36.120,63.880,55.134,44.866,24.17,0.900,bicubic,-46.288,-41.016,+156\nfocalnet_base_lrf.ms_in1k,224,36.114,63.886,55.597,44.403,88.75,0.900,bicubic,-47.714,-41.013,-85\ntf_efficientnet_b2.ns_jft_in1k,260,36.110,63.890,57.496,42.504,9.11,0.890,bicubic,-46.302,-38.742,+153\nconvnextv2_tiny.fcmae_ft_in1k,224,36.077,63.923,55.564,44.436,28.64,0.875,bicubic,-46.871,-40.722,+65\nresnet152.a1h_in1k,224,36.067,63.933,55.554,44.446,60.19,0.950,bicubic,-46.751,-40.570,+84\nregnetz_040.ra3_in1k,320,36.059,63.941,55.749,44.251,27.12,1.000,bicubic,-48.183,-41.187,-151\nregnety_320.seer_ft_in1k,384,36.043,63.957,58.467,41.533,145.05,1.000,bicubic,-47.281,-38.245,+11\necaresnet101d.miil_in1k,224,36.027,63.973,56.195,43.805,44.57,0.875,bicubic,-46.153,-39.867,+191\nmobilenetv4_conv_large.e600_r384_in1k,448,35.955,64.045,56.014,43.986,32.59,1.000,bicubic,-47.463,-40.598,-10\nmobilenetv4_conv_aa_large.e600_r384_in1k,384,35.945,64.055,55.442,44.558,32.59,0.950,bicubic,-47.319,-40.948,+14\nmambaout_base.in1k,288,35.943,64.057,54.829,45.171,84.81,1.000,bicubic,-48.787,-42.105,-234\nresnest200e.in1k,320,35.939,64.061,55.865,44.135,70.20,0.909,bicubic,-47.941,-41.015,-105\nnextvit_base.bd_in1k_384,384,35.927,64.073,56.226,43.774,44.82,1.000,bicubic,-48.779,-40.994,-232\nresnetrs152.tf_in1k,256,35.876,64.124,55.432,44.568,86.62,1.000,bicubic,-47.024,-41.184,+62\ndm_nfnet_f0.dm_in1k,192,35.864,64.136,56.537,43.463,71.49,0.900,bicubic,-45.572,-39.135,+287\nresnet18.fb_swsl_ig1b_ft_in1k,224,35.862,64.138,58.461,41.539,11.69,0.875,bilinear,-37.424,-33.293,+918\neca_nfnet_l1.ra2_in1k,320,35.849,64.151,55.939,44.061,41.41,1.000,bicubic,-48.213,-41.087,-136\nsequencer2d_l.in1k,224,35.823,64.177,55.747,44.253,54.30,0.875,bicubic,-47.583,-40.757,-17\ngcvit_small.in1k,224,35.780,64.220,54.908,45.092,51.09,0.875,bicubic,-48.124,-42.056,-119\nvit_base_patch16_224.orig_in21k_ft_in1k,224,35.776,64.224,57.399,42.601,86.57,0.900,bicubic,-46.006,-38.637,+236\nvit_relpos_medium_patch16_cls_224.sw_in1k,224,35.727,64.273,54.939,45.061,38.76,0.900,bicubic,-46.831,-41.137,+117\nese_vovnet57b.ra4_e3600_r256_in1k,320,35.723,64.277,55.358,44.642,38.61,1.000,bicubic,-47.575,-41.164,-2\nresnet101d.ra2_in1k,256,35.721,64.279,55.399,44.601,44.57,0.950,bicubic,-46.533,-40.671,+164\nxcit_small_12_p8_224.fb_in1k,224,35.587,64.413,55.619,44.381,26.21,1.000,bicubic,-47.767,-40.801,-14\nefficientnet_b3.ra2_in1k,288,35.536,64.464,55.920,44.080,12.23,0.875,bicubic,-45.936,-39.776,+269\nflexivit_base.1200ep_in1k,240,35.519,64.481,53.843,46.157,86.59,0.950,bicubic,-49.159,-43.137,-243\nvit_small_patch16_384.augreg_in21k_ft_in1k,384,35.517,64.484,57.553,42.447,22.20,1.000,bicubic,-48.297,-39.553,-107\ncoat_small.in1k,224,35.517,64.484,55.177,44.823,21.69,0.900,bicubic,-46.861,-41.037,+137\nxcit_small_24_p8_224.fb_in1k,224,35.515,64.485,54.778,45.222,47.63,1.000,bicubic,-48.339,-41.858,-118\nxcit_large_24_p16_224.fb_in1k,224,35.499,64.501,54.758,45.242,189.10,1.000,bicubic,-47.387,-41.122,+49\nconvnextv2_nano.fcmae_ft_in22k_in1k,224,35.497,64.503,56.378,43.622,15.62,0.875,bicubic,-46.541,-39.796,+187\ntf_efficientnet_b5.aa_in1k,456,35.475,64.525,56.199,43.801,30.39,0.934,bicubic,-48.221,-40.513,-81\nswinv2_base_window8_256.ms_in1k,256,35.464,64.537,54.635,45.365,87.92,0.900,bicubic,-48.782,-42.267,-179\nregnetz_d8_evos.ch_in1k,320,35.462,64.538,55.764,44.236,23.46,1.000,bicubic,-48.677,-41.254,-165\nxcit_medium_24_p8_224.fb_in1k,224,35.442,64.558,54.837,45.163,84.32,1.000,bicubic,-48.302,-41.557,-96\ngc_efficientnetv2_rw_t.agc_in1k,224,35.436,64.564,55.212,44.788,13.68,1.000,bicubic,-45.840,-40.368,+291\nswinv2_small_window16_256.ms_in1k,256,35.422,64.578,54.642,45.358,49.73,0.900,bicubic,-48.794,-42.231,-179\ndm_nfnet_f0.dm_in1k,256,35.407,64.594,55.525,44.475,71.49,0.900,bicubic,-48.062,-40.897,-52\nefficientnetv2_rw_t.ra2_in1k,224,35.393,64.607,54.982,45.018,13.65,1.000,bicubic,-45.977,-40.568,+275\nresnest101e.in1k,256,35.383,64.617,55.786,44.214,48.28,0.875,bilinear,-47.533,-40.524,+33\nresnet152.a1h_in1k,288,35.363,64.637,54.650,45.350,60.19,1.000,bicubic,-48.131,-41.872,-60\nconvit_base.fb_in1k,224,35.318,64.682,54.939,45.061,86.54,0.875,bicubic,-46.978,-41.185,+139\nconvnext_tiny_hnf.a2h_in1k,224,35.302,64.698,53.860,46.140,28.59,0.950,bicubic,-46.928,-42.000,+146\nefficientformer_l3.snap_dist_in1k,224,35.298,64.702,54.493,45.507,31.41,0.950,bicubic,-47.264,-41.757,+93\nregnety_080.ra3_in1k,224,35.279,64.721,55.425,44.575,39.18,0.950,bicubic,-47.887,-41.055,-12\nfocalnet_small_lrf.ms_in1k,224,35.279,64.721,54.922,45.078,50.34,0.900,bicubic,-48.229,-41.664,-69\nconvformer_s18.sail_in1k,224,35.259,64.741,54.611,45.389,26.77,1.000,bicubic,-47.709,-41.635,+20\nxcit_tiny_24_p8_224.fb_dist_in1k,224,35.243,64.757,55.269,44.731,12.11,1.000,bicubic,-47.335,-40.917,+86\nedgenext_base.usi_in1k,320,35.241,64.759,55.136,44.864,18.51,1.000,bicubic,-48.729,-41.634,-157\nnfnet_l0.ra2_in1k,224,35.220,64.780,55.961,44.039,35.07,0.900,bicubic,-46.650,-40.073,+192\nfastvit_sa24.apple_dist_in1k,256,35.216,64.784,54.650,45.350,21.55,0.900,bicubic,-48.152,-41.924,-44\nflexivit_base.600ep_in1k,240,35.151,64.849,53.672,46.328,86.59,0.950,bicubic,-49.413,-43.238,-254\nswiftformer_l3.dist_in1k,224,35.118,64.882,54.530,45.470,28.49,0.950,bicubic,-47.876,-41.706,+10\nregnetz_c16.ra3_in1k,256,35.114,64.886,56.303,43.697,13.46,0.940,bicubic,-47.048,-39.921,+149\ntwins_svt_large.in1k,224,35.106,64.894,54.709,45.291,99.27,0.900,bicubic,-48.612,-41.891,-115\nrepvgg_b3g4.rvgg_in1k,224,35.078,64.922,54.756,45.244,83.83,0.875,bilinear,-45.146,-40.344,+434\nconvnextv2_tiny.fcmae_ft_in1k,288,35.055,64.945,54.206,45.794,28.64,1.000,bicubic,-48.423,-42.512,-73\nrepvgg_b3.rvgg_in1k,224,35.027,64.973,54.523,45.477,123.09,0.875,bilinear,-45.501,-40.805,+379\nregnety_032.ra_in1k,224,35.021,64.979,54.961,45.039,19.44,0.950,bicubic,-46.927,-41.011,+176\nregnetz_d8.ra3_in1k,320,35.019,64.981,55.965,44.035,23.37,1.000,bicubic,-49.025,-41.039,-177\nrepvit_m1_5.dist_450e_in1k,224,35.002,64.998,54.297,45.703,14.64,0.950,bicubic,-47.518,-41.961,+81\nrexnetr_200.sw_in12k_ft_in1k,288,34.937,65.063,55.908,44.092,16.52,1.000,bicubic,-48.223,-40.598,-27\nrepvgg_d2se.rvgg_in1k,320,34.925,65.075,53.524,46.476,133.33,1.000,bilinear,-48.625,-43.138,-93\nxcit_tiny_24_p8_384.fb_dist_in1k,384,34.903,65.097,55.110,44.890,12.11,1.000,bicubic,-48.881,-41.606,-136\nflexivit_base.300ep_in1k,240,34.876,65.124,53.202,46.798,86.59,0.950,bicubic,-49.526,-43.682,-233\nresnet101d.ra2_in1k,320,34.874,65.126,54.181,45.819,44.57,1.000,bicubic,-48.146,-42.275,-9\nrepvit_m1_5.dist_300e_in1k,224,34.872,65.128,54.385,45.615,14.64,0.950,bicubic,-47.510,-41.641,+96\ncoatnet_1_rw_224.sw_in1k,224,34.856,65.144,53.438,46.562,41.72,0.950,bicubic,-48.764,-42.944,-107\ncoatnet_rmlp_1_rw_224.sw_in1k,224,34.809,65.191,53.962,46.038,41.69,0.950,bicubic,-48.541,-42.489,-57\nswin_s3_base_224.ms_in1k,224,34.805,65.195,53.731,46.269,71.13,0.900,bicubic,-49.115,-42.919,-174\nregnetz_c16_evos.ch_in1k,256,34.803,65.197,56.000,44.000,13.49,0.950,bicubic,-47.137,-40.150,+167\nseresnext101_32x8d.ah_in1k,288,34.793,65.207,53.528,46.472,93.57,1.000,bicubic,-49.399,-43.604,-208\nresmlp_big_24_224.fb_distilled_in1k,224,34.788,65.213,54.642,45.358,129.14,0.875,bicubic,-48.806,-42.016,-107\nmaxvit_tiny_rw_224.sw_in1k,224,34.785,65.215,53.357,46.643,29.06,0.950,bicubic,-48.737,-43.131,-99\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,384,34.746,65.254,55.348,44.652,11.07,0.950,bicubic,-48.206,-41.122,-7\nvit_base_patch16_rpn_224.sw_in1k,224,34.727,65.273,54.650,45.350,86.54,0.900,bicubic,-47.491,-41.346,+115\nvit_relpos_base_patch16_clsgap_224.sw_in1k,224,34.719,65.281,54.234,45.766,86.43,0.900,bicubic,-48.039,-41.944,+20\nsequencer2d_m.in1k,224,34.711,65.289,54.010,45.990,38.31,0.875,bicubic,-48.125,-42.266,+5\nvit_large_patch32_384.orig_in21k_ft_in1k,384,34.709,65.291,55.719,44.281,306.63,1.000,bicubic,-46.799,-40.367,+204\nhgnetv2_b1.ssld_stage1_in22k_in1k,224,34.703,65.297,55.733,44.267,6.34,0.965,bicubic,-43.377,-38.439,+638\nmobilenetv4_conv_medium.e250_r384_in12k_ft_in1k,384,34.691,65.309,55.696,44.304,9.72,0.950,bicubic,-48.085,-40.752,+13\nmambaout_tiny.in1k,224,34.685,65.315,54.564,45.436,26.55,1.000,bicubic,-48.057,-41.170,+16\ndeit3_small_patch16_224.fb_in1k,224,34.685,65.315,53.141,46.859,22.06,0.900,bicubic,-46.697,-42.477,+230\nfocalnet_small_srf.ms_in1k,224,34.673,65.326,54.416,45.584,49.89,0.900,bicubic,-48.755,-42.016,-88\nresnet101.a1h_in1k,224,34.673,65.326,54.114,45.886,44.55,0.950,bicubic,-47.277,-41.682,+150\nconvnext_nano.in12k_ft_in1k,224,34.668,65.332,56.102,43.898,15.59,0.950,bicubic,-47.638,-40.235,+91\ndavit_tiny.msft_in1k,224,34.666,65.334,54.332,45.668,28.36,0.950,bicubic,-48.028,-41.932,+19\necaresnet101d.miil_in1k,288,34.664,65.336,54.511,45.489,44.57,0.950,bicubic,-48.342,-41.785,-30\nvit_relpos_base_patch16_224.sw_in1k,224,34.624,65.376,54.271,45.729,86.43,0.900,bicubic,-47.858,-41.873,+60\nresnext101_32x16d.fb_ssl_yfcc100m_ft_in1k,224,34.620,65.380,55.951,44.049,194.03,0.875,bilinear,-47.236,-40.087,+154\nrepvgg_b2g4.rvgg_in1k,224,34.609,65.391,54.798,45.202,61.76,0.875,bilinear,-44.787,-39.882,+489\nresnetv2_101.a1h_in1k,224,34.593,65.407,53.173,46.827,44.54,0.950,bicubic,-47.449,-42.693,+125\nmobilenetv4_conv_aa_large.e600_r384_in1k,480,34.587,65.413,54.027,45.973,32.59,1.000,bicubic,-49.239,-42.693,-174\ngcvit_tiny.in1k,224,34.542,65.458,53.220,46.780,28.22,0.875,bicubic,-48.858,-43.174,-91\nresnetrs200.tf_in1k,320,34.536,65.464,54.271,45.729,93.21,1.000,bicubic,-49.912,-42.601,-277\nresnext101_64x4d.c1_in1k,224,34.534,65.466,53.336,46.664,83.46,0.950,bicubic,-47.912,-42.584,+58\nmobilenetv4_conv_large.e500_r256_in1k,256,34.528,65.472,54.336,45.664,32.59,0.950,bicubic,-47.336,-41.360,+146\npoolformerv2_m48.sail_in1k,224,34.502,65.498,53.980,46.020,73.35,1.000,bicubic,-48.109,-42.088,+24\nregnetv_064.ra3_in1k,224,34.501,65.499,55.279,44.721,30.58,0.950,bicubic,-48.617,-41.433,-53\nhiera_tiny_224.mae_in1k_ft_in1k,224,34.481,65.519,53.349,46.651,27.91,0.900,bicubic,-48.345,-42.839,-14\ntf_efficientnetv2_s.in1k,300,34.461,65.539,54.255,45.745,21.46,1.000,bicubic,-48.701,-42.079,-67\nconvnextv2_nano.fcmae_ft_in22k_in1k,288,34.424,65.576,55.010,44.990,15.62,1.000,bicubic,-48.242,-41.536,+7\nefficientvit_b2.r256_in1k,256,34.420,65.580,53.591,46.409,24.33,1.000,bicubic,-48.292,-42.501,+1\nresnetrs152.tf_in1k,320,34.406,65.594,53.532,46.468,86.62,1.000,bicubic,-49.304,-43.140,-159\nresnest50d_4s2x40d.in1k,224,34.357,65.643,54.623,45.377,30.42,0.875,bicubic,-46.789,-40.943,+242\npvt_v2_b2_li.in1k,224,34.322,65.678,54.088,45.912,22.55,0.900,bicubic,-47.878,-42.020,+90\nvit_small_patch16_rope_mixed_ape_224.naver_in1k,224,34.275,65.725,52.068,47.932,22.06,0.900,bicubic,-46.699,-42.908,+264\ncrossvit_18_dagger_408.in1k,408,34.239,65.761,53.094,46.906,44.61,1.000,bicubic,-49.959,-43.740,-247\nxcit_medium_24_p16_224.fb_in1k,224,34.235,65.765,53.155,46.845,84.40,1.000,bicubic,-48.395,-42.823,+9\ntf_efficientnetv2_b3.in21k_ft_in1k,240,34.229,65.771,55.786,44.214,14.36,0.900,bicubic,-46.849,-40.134,+247\nresnetrs101.tf_in1k,192,34.220,65.780,54.550,45.450,63.62,0.940,bicubic,-46.486,-40.690,+305\nefficientvit_b2.r288_in1k,288,34.190,65.810,53.497,46.503,24.33,1.000,bicubic,-48.888,-42.789,-60\ntf_efficientnet_b1.ns_jft_in1k,240,34.184,65.816,55.513,44.487,7.79,0.882,bicubic,-47.204,-40.027,+197\npit_s_distilled_224.in1k,224,34.166,65.834,53.369,46.631,24.04,0.900,bicubic,-47.659,-42.351,+138\nefficientnetv2_rw_t.ra2_in1k,288,34.165,65.835,53.155,46.845,13.65,1.000,bicubic,-48.189,-43.023,+54\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,256,34.127,65.873,54.554,45.446,11.07,0.950,bicubic,-48.233,-41.702,+51\ntf_efficientnet_b4.aa_in1k,380,34.115,65.885,54.212,45.788,19.34,0.922,bicubic,-48.925,-42.086,-62\nfastvit_sa24.apple_in1k,256,34.096,65.904,53.760,46.240,21.55,0.900,bicubic,-48.610,-42.518,-13\nvit_small_patch16_rope_mixed_224.naver_in1k,224,34.088,65.912,51.506,48.494,21.99,0.900,bicubic,-47.164,-43.550,+211\ntwins_pcpvt_large.in1k,224,34.080,65.920,54.118,45.882,60.99,0.900,bicubic,-49.078,-42.252,-82\nresnetv2_101.a1h_in1k,288,34.078,65.922,52.312,47.688,44.54,1.000,bicubic,-48.924,-44.132,-61\nmobilenetv4_hybrid_medium.ix_e550_r384_in1k,448,34.070,65.930,54.432,45.568,11.07,1.000,bicubic,-49.320,-42.334,-117\nresnext101_32x8d.fb_ssl_yfcc100m_ft_in1k,224,34.064,65.936,55.590,44.410,88.79,0.875,bilinear,-47.556,-40.446,+147\ntf_efficientnet_b6.aa_in1k,528,34.062,65.938,54.564,45.436,43.04,0.942,bicubic,-50.062,-42.326,-254\nefficientformerv2_s2.snap_dist_in1k,224,34.056,65.944,53.522,46.478,12.71,0.950,bicubic,-48.100,-42.390,+80\nmobilenetv4_conv_large.e500_r256_in1k,320,34.054,65.945,53.721,46.279,32.59,1.000,bicubic,-48.617,-42.589,-18\nxcit_small_24_p16_224.fb_in1k,224,34.039,65.961,53.271,46.729,47.67,1.000,bicubic,-48.549,-42.725,+4\nhgnetv2_b1.ssld_stage2_ft_in1k,224,34.035,65.965,54.855,45.145,6.34,0.965,bicubic,-44.849,-39.629,+513\nefficientnet_b3_pruned.in1k,300,34.009,65.991,54.122,45.878,9.86,0.904,bicubic,-46.855,-41.144,+263\nnfnet_l0.ra2_in1k,288,34.002,65.999,54.358,45.642,35.07,1.000,bicubic,-48.765,-42.136,-34\nregnety_160.deit_in1k,288,33.996,66.004,53.542,46.458,83.59,1.000,bicubic,-49.708,-43.274,-184\ngc_efficientnetv2_rw_t.agc_in1k,288,33.994,66.006,53.281,46.719,13.68,1.000,bicubic,-48.492,-43.021,+16\nnextvit_small.bd_in1k_384,384,33.990,66.010,53.866,46.134,31.76,1.000,bicubic,-50.046,-43.128,-248\nresnext101_64x4d.tv_in1k,224,33.990,66.010,52.516,47.484,83.46,0.875,bilinear,-48.988,-43.758,-68\nresnext101_64x4d.c1_in1k,288,33.866,66.134,52.172,47.828,83.46,1.000,bicubic,-49.292,-44.454,-96\nswinv2_cr_small_ns_224.sw_in1k,224,33.842,66.158,52.634,47.366,49.70,0.900,bicubic,-49.648,-43.852,-154\npoolformerv2_s36.sail_in1k,224,33.838,66.162,53.666,46.334,30.79,1.000,bicubic,-47.738,-42.014,+137\nvit_small_patch16_rope_224.naver_in1k,224,33.831,66.169,51.656,48.344,21.98,0.900,bicubic,-47.391,-43.364,+197\nxcit_small_12_p16_224.fb_in1k,224,33.827,66.173,53.155,46.845,26.25,1.000,bicubic,-48.155,-42.659,+87\nconvnext_tiny.fb_in1k,224,33.825,66.175,53.654,46.346,28.59,0.875,bicubic,-48.241,-42.200,+72\nnextvit_small.bd_in1k,224,33.815,66.185,53.196,46.804,31.76,0.950,bicubic,-48.811,-43.028,-22\nconvnextv2_nano.fcmae_ft_in1k,224,33.805,66.195,53.967,46.033,15.62,0.875,bicubic,-48.051,-41.989,+105\nrepvit_m3.dist_in1k,224,33.789,66.211,53.116,46.884,10.68,0.950,bicubic,-47.705,-42.446,+145\nresnet101.a1h_in1k,288,33.770,66.230,53.139,46.861,44.55,1.000,bicubic,-49.026,-43.185,-52\nresnetv2_50x3_bit.goog_in21k_ft_in1k,448,33.744,66.256,55.939,44.061,217.32,1.000,bilinear,-50.238,-41.191,-255\nhgnetv2_b1.ssld_stage1_in22k_in1k,288,33.722,66.278,54.336,45.664,6.34,1.000,bicubic,-45.328,-40.554,+481\ntnt_b_patch16_224.in1k,224,33.713,66.287,53.104,46.896,65.43,0.900,bicubic,-49.151,-43.126,-66\nswin_s3_small_224.ms_in1k,224,33.689,66.311,52.408,47.592,49.74,0.900,bicubic,-50.089,-44.052,-217\nswinv2_small_window8_256.ms_in1k,256,33.665,66.335,52.778,47.222,49.73,0.900,bicubic,-50.179,-43.872,-237\ncs3se_edgenet_x.c2ns_in1k,256,33.652,66.348,52.986,47.014,50.72,0.950,bicubic,-49.116,-43.320,-55\nresnet152.tv2_in1k,224,33.634,66.366,51.656,48.344,60.19,0.965,bilinear,-48.660,-44.344,+31\nresnet51q.ra2_in1k,288,33.595,66.405,53.041,46.959,35.70,1.000,bilinear,-48.761,-43.141,+16\ninception_next_tiny.sail_in1k,224,33.593,66.407,53.045,46.955,28.06,0.875,bicubic,-48.915,-42.955,-9\nnaflexvit_base_patch16_parfac_gap.e300_s576_in1k,384,33.557,66.443,51.823,48.177,86.46,1.000,bicubic,-49.911,-44.747,-167\nxcit_tiny_24_p16_384.fb_dist_in1k,384,33.546,66.454,52.789,47.211,12.12,1.000,bicubic,-49.034,-43.471,-22\nmobilenetv4_hybrid_medium.e500_r224_in1k,256,33.522,66.478,52.874,47.126,11.07,1.000,bicubic,-47.762,-42.872,+169\nvit_small_patch16_rope_ape_224.naver_in1k,224,33.520,66.480,50.683,49.317,22.06,0.900,bicubic,-47.496,-44.305,+210\nvit_relpos_medium_patch16_224.sw_in1k,224,33.506,66.494,52.630,47.370,38.75,0.900,bicubic,-48.956,-43.338,-8\nregnety_080.ra3_in1k,288,33.483,66.517,52.899,47.101,39.18,1.000,bicubic,-50.457,-43.985,-265\nmobilenetv4_hybrid_medium.e500_r224_in1k,224,33.471,66.529,53.149,46.851,11.07,0.950,bicubic,-46.961,-42.229,+294\ncs3edgenet_x.c2_in1k,288,33.465,66.535,52.943,47.057,47.82,1.000,bicubic,-49.269,-43.431,-61\nregnety_032.ra_in1k,288,33.455,66.545,52.766,47.234,19.44,1.000,bicubic,-49.269,-43.660,-61\nconvmixer_1536_20.in1k,224,33.449,66.551,53.055,46.945,51.63,0.960,bicubic,-47.933,-42.399,+149\ntf_efficientnetv2_b3.in21k_ft_in1k,300,33.447,66.553,54.986,45.014,14.36,0.900,bicubic,-49.187,-41.642,-47\nregnety_064.ra3_in1k,224,33.439,66.561,52.925,47.075,30.58,0.950,bicubic,-49.567,-43.599,-106\nsequencer2d_s.in1k,224,33.432,66.568,52.392,47.608,27.65,0.875,bicubic,-48.904,-43.639,+4\nregnetz_c16.ra3_in1k,320,33.394,66.606,54.230,45.770,13.46,1.000,bicubic,-49.268,-42.102,-57\ncrossvit_18_240.in1k,240,33.390,66.610,52.263,47.737,43.27,0.875,bicubic,-49.000,-43.801,-8\ngernet_l.idstcv_in1k,256,33.390,66.610,51.793,48.207,31.08,0.875,bilinear,-47.964,-43.949,+148\nvit_srelpos_medium_patch16_224.sw_in1k,224,33.380,66.620,52.493,47.507,38.74,0.900,bicubic,-48.876,-43.439,+16\nregnetv_040.ra3_in1k,224,33.373,66.627,52.950,47.050,20.64,0.950,bicubic,-49.067,-43.252,-16\ncrossvit_15_dagger_408.in1k,408,33.351,66.649,52.164,47.836,28.50,1.000,bicubic,-50.491,-44.622,-259\ntiny_vit_5m_224.dist_in22k_ft_in1k,224,33.343,66.657,55.057,44.943,5.39,0.950,bicubic,-47.531,-40.611,+217\nwide_resnet101_2.tv2_in1k,224,33.278,66.722,51.443,48.557,126.89,0.965,bilinear,-49.236,-44.569,-31\ntresnet_xl.miil_in1k,224,33.276,66.724,52.314,47.686,78.44,0.875,bilinear,-48.784,-43.618,+40\ncrossvit_18_dagger_240.in1k,240,33.274,66.726,52.218,47.782,44.27,0.875,bicubic,-49.220,-43.852,-30\nswin_tiny_patch4_window7_224.ms_in22k_ft_in1k,224,33.272,66.728,55.246,44.754,28.29,0.900,bicubic,-47.694,-40.774,+195\nresnext101_32x8d.tv2_in1k,176,33.255,66.745,51.318,48.682,88.79,0.875,bilinear,-48.735,-44.398,+47\nfbnetv3_g.ra2_in1k,240,33.245,66.755,53.729,46.271,16.62,0.950,bilinear,-48.001,-41.957,+152\nnest_base_jx.goog_in1k,224,33.221,66.779,51.831,48.169,67.72,0.875,bicubic,-50.331,-44.534,-211\nconvnext_tiny.fb_in1k,288,33.180,66.820,52.668,47.332,28.59,1.000,bicubic,-49.514,-43.476,-74\nfasternet_s.in1k,224,33.178,66.822,52.852,47.148,31.18,1.000,bicubic,-48.150,-42.826,+136\nresnest50d_1s4x24d.in1k,224,33.172,66.828,52.854,47.146,25.68,0.875,bicubic,-47.852,-42.484,+183\nconvnext_nano.in12k_ft_in1k,288,33.135,66.865,53.982,46.018,15.59,1.000,bicubic,-49.747,-42.580,-104\nresnet61q.ra2_in1k,288,33.121,66.879,51.730,48.270,36.85,1.000,bicubic,-49.411,-44.402,-46\nmambaout_tiny.in1k,288,33.099,66.900,52.980,47.020,26.55,1.000,bicubic,-50.352,-43.560,-196\nvit_relpos_medium_patch16_rpn_224.sw_in1k,224,33.096,66.904,52.394,47.606,38.73,0.900,bicubic,-49.218,-43.316,-13\nmaxxvit_rmlp_nano_rw_256.sw_in1k,256,33.084,66.916,51.903,48.097,16.78,0.950,bicubic,-49.952,-44.441,-133\ntf_efficientnetv2_s.in1k,384,33.084,66.916,51.901,48.099,21.46,1.000,bicubic,-50.816,-44.801,-287\nrexnet_300.nav_in1k,224,33.078,66.922,52.377,47.623,34.71,0.875,bicubic,-49.702,-43.853,-97\ntwins_pcpvt_base.in1k,224,33.066,66.934,52.618,47.382,43.83,0.900,bicubic,-49.658,-43.732,-89\nnest_small_jx.goog_in1k,224,33.037,66.963,51.048,48.952,38.35,0.875,bicubic,-50.095,-45.282,-151\nhgnetv2_b1.ssld_stage2_ft_in1k,288,33.035,66.965,53.534,46.466,6.34,1.000,bicubic,-46.881,-41.654,+322\npvt_v2_b2.in1k,224,33.035,66.965,52.011,47.989,25.36,0.900,bicubic,-49.027,-43.955,+20\ncrossvit_base_240.in1k,240,33.025,66.975,51.357,48.643,105.03,0.875,bicubic,-49.187,-44.469,-1\npoolformerv2_m36.sail_in1k,224,33.019,66.981,51.854,48.146,56.08,1.000,bicubic,-49.195,-44.042,-3\nxcit_tiny_24_p16_224.fb_dist_in1k,224,33.017,66.983,52.078,47.922,12.12,1.000,bicubic,-47.445,-43.124,+250\nresnet51q.ra2_in1k,256,32.989,67.011,52.373,47.627,35.70,0.875,bilinear,-48.785,-43.559,+60\nrexnet_200.nav_in1k,224,32.972,67.028,52.935,47.065,16.37,0.875,bicubic,-48.664,-42.737,+70\nresnest50d.in1k,224,32.960,67.040,52.719,47.281,27.48,0.875,bilinear,-47.998,-42.653,+173\nconvit_small.fb_in1k,224,32.956,67.044,52.153,47.847,27.78,0.875,bicubic,-48.456,-43.587,+106\nnaflexvit_base_patch16_par_gap.e300_s576_in1k,384,32.948,67.052,51.050,48.950,86.63,1.000,bicubic,-50.560,-45.320,-224\ntf_efficientnet_b3.aa_in1k,300,32.917,67.083,52.933,47.067,12.23,0.904,bicubic,-48.731,-42.791,+64\nswin_base_patch4_window12_384.ms_in1k,384,32.911,67.089,51.768,48.232,87.90,1.000,bicubic,-51.567,-45.118,-394\nconvnext_tiny_hnf.a2h_in1k,288,32.905,67.095,51.198,48.802,28.59,1.000,bicubic,-49.703,-44.826,-82\npnasnet5large.tf_in1k,331,32.899,67.101,50.539,49.461,86.06,0.911,bicubic,-49.915,-45.745,-115\nvit_small_patch16_224.augreg_in21k_ft_in1k,224,32.893,67.107,53.941,46.059,22.05,0.900,bicubic,-48.509,-42.209,+102\ncrossvit_15_dagger_240.in1k,240,32.893,67.107,51.772,48.228,28.21,0.875,bicubic,-49.409,-44.188,-29\ntwins_svt_base.in1k,224,32.840,67.160,51.487,48.513,56.07,0.900,bicubic,-50.282,-44.943,-166\nregnetv_064.ra3_in1k,288,32.830,67.170,52.886,47.114,30.58,1.000,bicubic,-50.916,-43.860,-273\ncs3edgenet_x.c2_in1k,256,32.822,67.178,52.182,47.818,47.82,0.887,bicubic,-49.376,-43.774,-14\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,256,32.807,67.193,52.530,47.470,11.07,0.950,bicubic,-48.671,-43.162,+79\nconvnextv2_nano.fcmae_ft_in1k,288,32.805,67.195,52.689,47.311,15.62,1.000,bicubic,-49.685,-43.537,-66\nnasnetalarge.tf_in1k,331,32.803,67.197,50.162,49.838,88.75,0.911,bicubic,-49.835,-45.904,-98\nhgnet_small.paddle_in1k,224,32.791,67.209,52.465,47.535,24.36,0.965,bicubic,-48.635,-43.381,+90\nresnet61q.ra2_in1k,256,32.791,67.209,51.170,48.830,36.85,0.900,bicubic,-49.179,-44.678,+14\nresnet152d.gluon_in1k,224,32.764,67.237,51.076,48.924,60.21,0.875,bicubic,-47.712,-44.518,+226\ninception_resnet_v2.tf_in1k,299,32.750,67.250,50.671,49.329,55.84,0.897,bicubic,-47.684,-44.641,+235\nrepvit_m1_1.dist_450e_in1k,224,32.740,67.260,52.675,47.325,8.80,0.950,bicubic,-48.574,-42.893,+102\ngernet_m.idstcv_in1k,224,32.732,67.268,51.887,48.113,21.14,0.875,bilinear,-47.970,-43.299,+197\neca_nfnet_l0.ra2_in1k,224,32.712,67.288,53.226,46.774,24.14,0.900,bicubic,-49.050,-42.768,+40\npit_b_224.in1k,224,32.697,67.303,49.842,50.158,73.76,0.900,bicubic,-49.761,-45.862,-69\ntf_efficientnet_b2.ap_in1k,260,32.675,67.325,52.253,47.747,9.11,0.890,bicubic,-47.625,-42.779,+250\nswin_base_patch4_window7_224.ms_in1k,224,32.644,67.356,51.567,48.433,87.77,0.900,bicubic,-50.960,-44.883,-261\nfbnetv3_g.ra2_in1k,288,32.628,67.372,52.895,47.105,16.62,0.950,bilinear,-49.406,-43.168,-6\nmobilenetv4_conv_blur_medium.e500_r224_in1k,224,32.628,67.372,52.412,47.588,9.72,0.950,bicubic,-46.818,-42.078,+350\nregnety_320.tv2_in1k,224,32.610,67.390,50.292,49.708,145.05,0.965,bicubic,-50.560,-46.124,-195\nmobilenetv4_conv_blur_medium.e500_r224_in1k,256,32.604,67.396,52.361,47.639,9.72,1.000,bicubic,-47.548,-42.937,+267\nmobilenetv4_conv_medium.e500_r256_in1k,256,32.590,67.409,52.648,47.352,9.72,0.950,bicubic,-47.325,-42.516,+285\nswiftformer_l1.dist_in1k,224,32.583,67.417,51.919,48.081,12.06,0.950,bicubic,-48.323,-43.335,+151\nresnext101_32x8d.tv2_in1k,224,32.571,67.429,50.154,49.846,88.79,0.965,bilinear,-50.261,-46.080,-147\nregnetz_c16_evos.ch_in1k,320,32.563,67.437,52.931,47.069,13.49,0.950,bicubic,-50.097,-43.549,-121\ncait_xxs36_384.fb_dist_in1k,384,32.549,67.451,52.257,47.743,17.37,1.000,bicubic,-49.655,-43.891,-38\ntresnet_l.miil_in1k,224,32.547,67.453,51.127,48.873,55.99,0.875,bilinear,-48.951,-44.525,+53\nnaflexvit_base_patch16_gap.e300_s576_in1k,384,32.541,67.459,50.885,49.115,86.63,1.000,bicubic,-50.799,-45.481,-220\nwide_resnet50_2.racm_in1k,224,32.486,67.514,51.471,48.529,68.88,0.875,bicubic,-48.980,-43.747,+60\nresnet152.a1_in1k,224,32.478,67.522,49.828,50.172,60.19,0.950,bicubic,-49.520,-46.060,-13\ngmlp_s16_224.ra3_in1k,224,32.418,67.582,51.832,48.168,19.42,0.875,bicubic,-47.234,-42.792,+319\nresnetaa50d.sw_in12k_ft_in1k,224,32.404,67.596,52.516,47.484,25.58,0.950,bicubic,-49.378,-43.606,+18\ninception_resnet_v2.tf_ens_adv_in1k,299,32.396,67.604,50.439,49.561,55.84,0.897,bicubic,-47.602,-44.501,+269\ndeit_base_patch16_224.fb_in1k,224,32.390,67.610,50.986,49.014,86.57,0.900,bicubic,-49.590,-44.754,-14\nmaxvit_nano_rw_256.sw_in1k,256,32.374,67.626,50.606,49.394,15.45,0.950,bicubic,-50.560,-45.618,-172\nmobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k,320,32.368,67.632,51.809,48.191,11.07,1.000,bicubic,-50.630,-44.867,-181\nswin_small_patch4_window7_224.ms_in1k,224,32.349,67.651,50.913,49.087,49.61,0.900,bicubic,-50.879,-45.413,-216\nresnet152s.gluon_in1k,224,32.347,67.653,50.532,49.468,60.32,0.875,bicubic,-48.701,-44.910,+118\nregnety_040.ra3_in1k,224,32.345,67.655,51.504,48.496,20.65,0.950,bicubic,-49.963,-44.574,-70\nrepvit_m1_1.dist_300e_in1k,224,32.290,67.710,51.964,48.036,8.80,0.950,bicubic,-48.536,-43.210,+149\nedgenext_small.usi_in1k,256,32.290,67.710,51.897,48.103,5.59,0.950,bicubic,-48.778,-43.433,+112\ndeit_small_distilled_patch16_224.fb_in1k,224,32.282,67.718,52.111,47.889,22.44,0.900,bicubic,-48.922,-43.267,+90\npoolformerv2_s24.sail_in1k,224,32.274,67.726,51.489,48.511,21.34,1.000,bicubic,-48.472,-43.829,+157\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,256,32.251,67.749,51.874,48.126,14.62,0.950,bilinear,-48.743,-43.600,+118\nxcit_tiny_24_p8_224.fb_in1k,224,32.245,67.755,51.878,48.122,12.11,1.000,bicubic,-49.643,-44.100,-11\nseresnext101_64x4d.gluon_in1k,224,32.203,67.797,50.333,49.667,88.23,0.875,bicubic,-48.681,-44.975,+133\nregnetx_320.tv2_in1k,224,32.202,67.799,49.364,50.636,107.81,0.965,bicubic,-50.622,-46.836,-167\ngcvit_xtiny.in1k,224,32.133,67.867,51.115,48.885,19.98,0.875,bicubic,-49.831,-44.851,-25\ncoat_lite_small.in1k,224,32.127,67.873,49.983,50.017,19.84,0.900,bicubic,-50.177,-45.871,-78\nseresnext101_32x4d.gluon_in1k,224,32.119,67.881,51.221,48.779,48.96,0.875,bicubic,-48.767,-44.047,+128\nefficientvit_b2.r224_in1k,224,32.099,67.901,50.936,49.064,24.33,0.950,bicubic,-50.063,-44.774,-54\ntiny_vit_11m_224.in1k,224,32.093,67.907,51.306,48.694,11.00,0.950,bicubic,-49.445,-44.564,+21\ncoatnext_nano_rw_224.sw_in1k,224,32.084,67.916,51.031,48.969,14.70,0.900,bicubic,-49.914,-44.263,-37\nflexivit_small.1200ep_in1k,240,32.078,67.922,50.315,49.685,22.06,0.950,bicubic,-50.498,-45.837,-126\ntf_efficientnet_b7.aa_in1k,600,32.068,67.932,52.133,47.867,66.35,0.949,bicubic,-52.356,-44.731,-435\nfocalnet_tiny_lrf.ms_in1k,224,32.060,67.940,51.428,48.572,28.65,0.900,bicubic,-50.114,-44.520,-64\nmaxxvitv2_nano_rw_256.sw_in1k,256,32.060,67.940,50.323,49.677,23.70,0.950,bicubic,-51.048,-46.009,-215\nresnetv2_50d_gn.ah_in1k,224,32.021,67.979,51.213,48.787,25.57,0.950,bicubic,-48.779,-44.143,+137\nseresnext50_32x4d.racm_in1k,224,32.005,67.995,51.255,48.745,27.56,0.875,bicubic,-49.263,-44.369,+62\ndeit_base_patch16_384.fb_in1k,384,31.981,68.019,50.539,49.461,86.86,1.000,bicubic,-51.127,-45.837,-220\nmaxvit_rmlp_nano_rw_256.sw_in1k,256,31.971,68.028,50.618,49.382,15.50,0.950,bicubic,-51.006,-45.634,-204\nmobilenetv4_hybrid_medium.ix_e550_r256_in1k,320,31.958,68.042,51.463,48.537,11.07,1.000,bicubic,-50.542,-44.823,-124\nxcit_tiny_12_p8_224.fb_dist_in1k,224,31.944,68.056,51.388,48.612,6.71,1.000,bicubic,-49.270,-44.226,+67\nwide_resnet101_2.tv2_in1k,176,31.909,68.091,49.527,50.473,126.89,0.875,bilinear,-48.585,-45.411,+169\nhgnetv2_b0.ssld_stage1_in22k_in1k,224,31.905,68.095,52.308,47.692,6.00,0.965,bicubic,-44.963,-41.312,+543\ncoatnet_bn_0_rw_224.sw_in1k,224,31.873,68.127,51.001,48.999,27.44,0.950,bicubic,-50.521,-45.221,-113\nlevit_conv_384.fb_dist_in1k,224,31.873,68.127,50.624,49.376,39.13,0.900,bicubic,-50.727,-45.396,-147\nlevit_384.fb_dist_in1k,224,31.871,68.129,50.622,49.378,39.13,0.900,bicubic,-50.729,-45.398,-149\necaresnet50t.ra2_in1k,256,31.867,68.133,51.766,48.234,25.57,0.875,bicubic,-49.583,-43.912,+24\nresnetrs101.tf_in1k,288,31.865,68.135,51.019,48.981,63.62,0.940,bicubic,-50.423,-44.981,-94\ncs3se_edgenet_x.c2ns_in1k,320,31.861,68.138,50.822,49.178,50.72,1.000,bicubic,-51.688,-45.844,-307\nmobilenetv4_conv_medium.e500_r256_in1k,320,31.856,68.144,51.781,48.219,9.72,1.000,bicubic,-49.034,-43.969,+105\nvit_wee_patch16_reg1_gap_256.sbb_in1k,256,31.854,68.146,51.540,48.460,13.42,0.950,bicubic,-48.408,-43.820,+201\nmobilenetv4_conv_medium.e500_r224_in1k,224,31.838,68.162,51.540,48.460,9.72,0.950,bicubic,-47.254,-43.248,+338\nmobilenetv4_conv_medium.e500_r224_in1k,256,31.828,68.172,51.583,48.417,9.72,1.000,bicubic,-48.008,-43.609,+251\nvit_relpos_small_patch16_224.sw_in1k,224,31.775,68.225,50.608,49.392,21.98,0.900,bicubic,-49.701,-45.216,+9\nflexivit_small.600ep_in1k,240,31.755,68.245,49.614,50.386,22.06,0.950,bicubic,-50.633,-46.428,-122\ntf_efficientnetv2_b3.in1k,240,31.738,68.262,51.111,48.889,14.36,0.904,bicubic,-48.970,-44.319,+130\nconvnextv2_pico.fcmae_ft_in1k,224,31.720,68.280,51.164,48.836,9.07,0.875,bicubic,-48.584,-43.918,+186\npoolformer_m48.sail_in1k,224,31.669,68.331,49.842,50.158,73.47,0.950,bicubic,-50.793,-46.244,-136\ntnt_s_legacy_patch16_224.in1k,224,31.663,68.337,51.170,48.830,23.76,0.900,bicubic,-49.849,-44.572,-6\nconvnext_tiny.fb_in22k_ft_in1k,288,31.643,68.357,51.768,48.232,28.59,1.000,bicubic,-47.307,-42.512,+344\nresnetv2_50x1_bit.goog_distilled_in1k,224,31.640,68.361,51.312,48.688,25.55,0.875,bicubic,-51.180,-45.214,-202\nefficientnet_b1.ra4_e3600_r240_in1k,240,31.634,68.366,50.486,49.514,7.79,0.900,bicubic,-48.770,-44.666,+168\neca_nfnet_l0.ra2_in1k,288,31.624,68.376,51.610,48.390,24.14,1.000,bicubic,-50.966,-44.876,-162\nefficientnet_b1.ra4_e3600_r240_in1k,288,31.618,68.382,50.630,49.370,7.79,1.000,bicubic,-49.826,-45.068,+8\nfocalnet_tiny_srf.ms_in1k,224,31.616,68.384,50.868,49.132,28.43,0.900,bicubic,-50.530,-45.104,-88\nresnet50.ram_in1k,224,31.594,68.406,50.205,49.795,25.56,0.875,bicubic,-47.440,-44.181,+333\nxception41p.ra3_in1k,299,31.563,68.437,50.368,49.632,26.91,0.940,bicubic,-50.415,-45.416,-68\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,224,31.553,68.447,50.893,49.107,8.46,0.900,bicubic,-48.563,-44.101,+200\ncoatnet_rmlp_nano_rw_224.sw_in1k,224,31.553,68.447,50.215,49.785,15.15,0.900,bicubic,-50.511,-45.659,-84\nmobilevitv2_200.cvnets_in22k_ft_in1k,256,31.543,68.457,51.834,48.166,18.45,0.888,bicubic,-50.777,-44.108,-126\nwide_resnet50_2.racm_in1k,288,31.520,68.480,50.382,49.618,68.88,0.950,bicubic,-50.740,-45.680,-115\nnf_resnet50.ra2_in1k,256,31.492,68.508,50.765,49.235,25.56,0.940,bicubic,-48.744,-44.339,+187\nhgnetv2_b0.ssld_stage2_ft_in1k,224,31.486,68.514,51.734,48.266,6.00,0.965,bicubic,-45.876,-42.074,+476\nregnety_064.ra3_in1k,288,31.466,68.534,50.490,49.510,30.58,1.000,bicubic,-52.249,-46.234,-362\npoolformer_m36.sail_in1k,224,31.463,68.537,50.025,49.975,56.17,0.950,bicubic,-50.641,-45.673,-95\nresnext101_32x4d.fb_ssl_yfcc100m_ft_in1k,224,31.455,68.545,52.151,47.849,44.18,0.875,bilinear,-49.481,-43.581,+73\nflexivit_small.300ep_in1k,240,31.455,68.545,49.225,50.775,22.06,0.950,bicubic,-50.715,-46.807,-105\nrepvit_m1_0.dist_450e_in1k,224,31.443,68.557,50.677,49.323,7.30,0.950,bicubic,-48.987,-44.245,+149\nrexnet_150.nav_in1k,224,31.431,68.569,51.325,48.675,9.73,0.875,bicubic,-48.891,-43.655,+161\nresnet152.a1_in1k,288,31.431,68.569,48.671,51.329,60.19,1.000,bicubic,-51.311,-47.427,-210\nmobilenetv3_large_150d.ra4_e3600_r256_in1k,320,31.415,68.585,50.844,49.156,14.62,1.000,bilinear,-50.417,-45.034,-59\nresnet50.b1k_in1k,224,31.410,68.591,50.648,49.352,25.56,0.950,bicubic,-48.154,-43.974,+252\ninception_v4.tf_in1k,299,31.388,68.612,49.248,50.752,42.68,0.875,bicubic,-48.756,-45.734,+185\nresnet101.tv2_in1k,224,31.366,68.634,49.578,50.422,44.55,0.965,bilinear,-50.540,-46.190,-73\nefficientformer_l1.snap_dist_in1k,224,31.358,68.642,50.514,49.486,12.29,0.950,bicubic,-49.144,-44.478,+125\nrepvit_m2.dist_in1k,224,31.335,68.665,50.675,49.325,8.80,0.950,bicubic,-49.133,-44.497,+129\npit_s_224.in1k,224,31.325,68.675,49.685,50.315,23.46,0.900,bicubic,-49.789,-45.953,+36\nregnety_032.tv2_in1k,224,31.311,68.689,50.139,49.861,19.44,0.965,bicubic,-50.477,-45.687,-62\nswinv2_tiny_window16_256.ms_in1k,256,31.299,68.701,49.683,50.317,28.35,0.900,bicubic,-51.529,-46.539,-233\ncrossvit_15_240.in1k,240,31.297,68.703,50.197,49.803,27.53,0.875,bicubic,-50.225,-45.497,-38\ncoatnet_0_rw_224.sw_in1k,224,31.284,68.716,48.651,51.349,27.44,0.950,bicubic,-51.112,-47.193,-162\nvit_srelpos_small_patch16_224.sw_in1k,224,31.282,68.718,50.264,49.736,21.97,0.900,bicubic,-49.826,-45.316,+33\ncrossvit_small_240.in1k,240,31.274,68.726,50.194,49.806,26.86,0.875,bicubic,-49.762,-45.272,+42\ncait_xxs36_224.fb_dist_in1k,224,31.272,68.728,50.624,49.376,17.30,1.000,bicubic,-48.478,-44.242,+221\nrepvit_m1_0.dist_300e_in1k,224,31.262,68.738,50.822,49.178,7.30,0.950,bicubic,-48.890,-43.948,+172\ncspresnet50.ra_in1k,256,31.256,68.744,51.241,48.759,21.62,0.887,bilinear,-48.322,-43.457,+237\nswinv2_cr_small_224.sw_in1k,224,31.254,68.746,48.761,51.239,49.70,0.900,bicubic,-51.872,-47.349,-284\nhgnetv2_b0.ssld_stage1_in22k_in1k,288,31.239,68.761,51.510,48.490,6.00,1.000,bicubic,-46.799,-42.734,+394\nconvmixer_768_32.in1k,224,31.235,68.765,50.936,49.064,21.11,0.960,bicubic,-48.923,-44.134,+166\ncspresnext50.ra_in1k,256,31.221,68.779,50.911,49.089,20.57,0.887,bilinear,-49.327,-44.419,+103\nvit_pwee_patch16_reg1_gap_256.sbb_in1k,256,31.219,68.781,51.103,48.897,15.25,0.950,bicubic,-48.863,-44.035,+170\nswin_s3_tiny_224.ms_in1k,224,31.219,68.781,49.714,50.286,28.33,0.900,bicubic,-50.909,-46.244,-123\nmobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k,256,31.213,68.787,50.339,49.661,8.46,0.950,bicubic,-49.471,-45.103,+85\nregnetv_040.ra3_in1k,288,31.213,68.787,50.109,49.891,20.64,1.000,bicubic,-51.991,-46.573,-305\ncoat_mini.in1k,224,31.213,68.787,49.787,50.213,10.34,0.900,bicubic,-50.107,-45.601,-12\nxcit_tiny_12_p8_384.fb_dist_in1k,384,31.201,68.799,50.512,49.488,6.71,1.000,bicubic,-51.193,-45.682,-176\nresnet50d.ra4_e3600_r224_in1k,224,31.189,68.811,49.895,50.105,25.58,0.950,bicubic,-49.767,-45.485,+38\nresnet152.tv2_in1k,176,31.176,68.824,48.737,51.263,60.19,0.875,bilinear,-49.046,-45.913,+154\nfastvit_sa12.apple_dist_in1k,256,31.172,68.828,49.989,50.011,11.58,0.900,bicubic,-50.666,-45.711,-88\nresnet101.a1_in1k,224,31.162,68.838,48.052,51.948,44.55,0.950,bicubic,-50.332,-47.112,-50\necaresnetlight.miil_in1k,224,31.152,68.848,50.262,49.738,30.16,0.875,bicubic,-49.302,-44.986,+109\nresnet101s.gluon_in1k,224,31.146,68.854,49.820,50.180,44.67,0.875,bicubic,-49.132,-45.340,+137\nconvnext_nano.d1h_in1k,224,31.140,68.860,50.072,49.928,15.59,0.950,bicubic,-49.622,-45.260,+64\nrepvit_m0_9.dist_300e_in1k,224,31.131,68.870,50.769,49.231,5.49,0.950,bicubic,-47.522,-43.345,+324\nedgenext_small.usi_in1k,320,31.123,68.877,50.160,49.840,5.59,1.000,bicubic,-50.451,-45.552,-69\nresnet152.a3_in1k,160,31.117,68.883,49.260,50.740,60.19,0.950,bicubic,-47.779,-44.880,+293\ncoatnet_nano_rw_224.sw_in1k,224,31.103,68.897,49.575,50.425,15.14,0.900,bicubic,-50.601,-46.075,-83\nresmlp_36_224.fb_distilled_in1k,224,31.099,68.901,49.693,50.307,44.69,0.875,bicubic,-50.051,-45.815,-3\ntf_efficientnet_cc_b0_8e.in1k,224,31.089,68.911,50.795,49.205,24.01,0.875,bicubic,-46.867,-42.843,+382\necaresnet50d.miil_in1k,224,31.085,68.915,50.870,49.130,25.58,0.875,bicubic,-49.549,-44.441,+77\nconvnext_nano_ols.d1h_in1k,224,31.068,68.932,49.828,50.172,15.65,0.950,bicubic,-49.838,-45.546,+32\necaresnet50t.ra2_in1k,320,31.046,68.954,50.600,49.400,25.57,0.950,bicubic,-51.324,-45.520,-186\nresnet50d.ra2_in1k,224,31.038,68.962,49.844,50.156,25.58,0.875,bicubic,-49.508,-45.316,+82\ncs3sedarknet_x.c2ns_in1k,288,31.026,68.974,50.162,49.838,35.40,1.000,bicubic,-51.628,-46.186,-239\ngcresnet50t.ra2_in1k,256,31.020,68.980,50.125,49.875,25.90,0.900,bicubic,-49.924,-45.333,+23\nresnet152.a2_in1k,224,31.013,68.987,47.798,52.202,60.19,0.950,bicubic,-50.751,-47.472,-95\nregnety_160.tv2_in1k,224,31.011,68.989,49.105,50.895,83.59,0.965,bicubic,-51.635,-47.125,-241\nresnext101_64x4d.gluon_in1k,224,31.011,68.989,48.533,51.467,83.46,0.875,bicubic,-49.629,-46.467,+68\ncspdarknet53.ra_in1k,256,31.007,68.993,50.392,49.608,27.64,0.887,bilinear,-49.071,-44.668,+144\ntresnet_m.miil_in1k,224,31.001,68.999,48.694,51.306,31.39,0.875,bilinear,-49.815,-46.168,+39\nresnet152c.gluon_in1k,224,30.995,69.005,48.950,51.050,60.21,0.875,bicubic,-48.909,-45.904,+159\ntwins_svt_small.in1k,224,30.971,69.029,49.235,50.765,24.06,0.900,bicubic,-50.723,-46.435,-97\ngcresnet50t.ra2_in1k,288,30.958,69.042,50.068,49.932,25.90,1.000,bicubic,-50.500,-45.656,-64\ncs3sedarknet_x.c2ns_in1k,256,30.916,69.084,49.971,50.029,35.40,0.887,bicubic,-51.010,-46.051,-125\nresnext50_32x4d.a1h_in1k,224,30.916,69.084,49.240,50.760,25.03,0.950,bicubic,-50.224,-46.079,-15\nregnety_080_tv.tv2_in1k,224,30.912,69.088,48.706,51.294,39.38,0.965,bicubic,-51.668,-47.548,-233\nhgnetv2_b0.ssld_stage2_ft_in1k,288,30.904,69.096,51.048,48.952,6.00,1.000,bicubic,-47.686,-43.340,+301\necaresnet101d_pruned.miil_in1k,224,30.904,69.096,50.015,49.985,24.88,0.875,bicubic,-49.906,-45.627,+34\ntf_efficientnet_cc_b1_8e.in1k,240,30.893,69.107,50.084,49.916,39.72,0.882,bicubic,-48.425,-44.300,+225\nresmlp_24_224.fb_distilled_in1k,224,30.891,69.109,50.144,49.856,30.02,0.875,bicubic,-49.875,-45.076,+36\nresnetaa50.a1h_in1k,224,30.885,69.115,49.514,50.486,25.56,0.950,bicubic,-49.729,-45.698,+57\ntf_efficientnetv2_b3.in1k,300,30.883,69.117,49.808,50.192,14.36,0.904,bicubic,-51.077,-46.124,-140\nresnet50.b2k_in1k,224,30.883,69.117,49.394,50.606,25.56,0.950,bicubic,-48.505,-45.282,+211\nrepvit_m0_9.dist_450e_in1k,224,30.879,69.121,50.144,49.856,5.49,0.950,bicubic,-48.179,-44.244,+252\nresnext101_32x4d.gluon_in1k,224,30.873,69.127,48.543,51.457,44.18,0.875,bicubic,-49.463,-46.365,+94\ntf_efficientnet_lite4.in1k,380,30.838,69.162,50.400,49.600,13.01,0.920,bilinear,-50.702,-45.268,-96\nresnet50.c1_in1k,224,30.832,69.168,49.465,50.535,25.56,0.950,bicubic,-48.934,-45.489,+165\ncs3darknet_x.c2ns_in1k,256,30.826,69.174,49.698,50.302,35.05,0.950,bicubic,-51.030,-46.042,-131\nresnetaa50d.sw_in12k_ft_in1k,288,30.818,69.182,50.583,49.417,25.58,1.000,bicubic,-51.838,-45.909,-265\nefficientvit_b1.r288_in1k,288,30.791,69.210,49.975,50.025,9.10,1.000,bicubic,-49.532,-45.189,+91\nresnetv2_50d_evos.ah_in1k,224,30.757,69.243,48.812,51.188,25.59,0.950,bicubic,-50.071,-46.458,+16\ndpn68b.ra_in1k,224,30.747,69.253,49.506,50.494,12.61,0.950,bicubic,-47.799,-44.590,+293\nwide_resnet50_2.tv2_in1k,176,30.726,69.274,48.889,51.111,68.88,0.875,bilinear,-49.716,-46.187,+70\nnf_resnet50.ra2_in1k,288,30.718,69.282,49.952,50.048,25.56,0.940,bicubic,-49.940,-45.398,+38\nxcit_tiny_24_p16_224.fb_in1k,224,30.716,69.284,50.390,49.610,12.12,1.000,bicubic,-48.758,-44.488,+189\npoolformer_s36.sail_in1k,224,30.708,69.292,49.463,50.537,30.86,0.900,bicubic,-50.720,-46.167,-80\nese_vovnet39b.ra_in1k,224,30.694,69.306,49.885,50.115,24.57,0.875,bicubic,-48.618,-44.837,+208\nresnetv2_34.ra4_e3600_r224_in1k,224,30.675,69.325,49.011,50.989,21.80,0.900,bicubic,-46.951,-44.507,+369\nhgnet_small.paddle_in1k,288,30.661,69.339,49.962,50.038,24.36,1.000,bicubic,-51.561,-46.262,-198\ndpn107.mx_in1k,224,30.657,69.343,48.747,51.253,86.92,0.875,bicubic,-49.509,-46.185,+104\nresnet152.gluon_in1k,224,30.639,69.361,48.525,51.475,60.19,0.875,bicubic,-49.073,-46.195,+158\ntresnet_xl.miil_in1k_448,448,30.633,69.367,49.117,50.883,78.44,0.875,bilinear,-52.433,-47.063,-338\nresnet50.a1h_in1k,224,30.625,69.374,49.419,50.581,25.56,1.000,bicubic,-50.028,-45.893,+30\nresnext50_32x4d.fb_ssl_yfcc100m_ft_in1k,224,30.616,69.384,50.691,49.309,25.03,0.875,bilinear,-49.700,-44.695,+78\nhaloregnetz_b.ra3_in1k,224,30.590,69.410,48.985,51.015,11.68,0.940,bicubic,-50.468,-46.211,-32\nresnetv2_34.ra4_e3600_r224_in1k,288,30.567,69.433,48.559,51.441,21.80,1.000,bicubic,-48.503,-46.001,+227\nhgnet_tiny.paddle_in1k,224,30.561,69.439,50.109,49.891,14.74,0.965,bicubic,-49.331,-44.945,+127\nregnetx_080.tv2_in1k,224,30.557,69.443,48.093,51.907,39.57,0.965,bicubic,-50.995,-47.447,-120\npit_xs_distilled_224.in1k,224,30.555,69.445,50.172,49.828,11.00,0.900,bicubic,-48.617,-44.178,+215\nregnetz_b16.ra3_in1k,224,30.537,69.463,49.297,50.703,9.72,0.940,bicubic,-49.327,-45.684,+128\nresnet50.a1h_in1k,176,30.535,69.465,49.309,50.691,25.56,0.900,bicubic,-48.729,-45.189,+200\nmobilevitv2_200.cvnets_in22k_ft_in1k_384,384,30.531,69.469,50.569,49.431,18.45,1.000,bicubic,-52.899,-46.007,-405\nresnet101d.gluon_in1k,224,30.515,69.485,47.977,52.023,44.57,0.875,bicubic,-49.951,-47.329,+44\nresnest26d.gluon_in1k,224,30.494,69.506,50.642,49.358,17.07,0.875,bilinear,-47.982,-43.658,+280\nefficientnet_b2.ra_in1k,288,30.492,69.508,49.698,50.302,9.11,1.000,bicubic,-50.120,-45.622,+24\nresnet50.ram_in1k,288,30.447,69.553,49.020,50.980,25.56,0.950,bicubic,-49.535,-46.018,+106\nefficientformerv2_s1.snap_dist_in1k,224,30.445,69.555,49.620,50.380,6.19,0.950,bicubic,-49.239,-45.092,+145\nresnetv2_34d.ra4_e3600_r384_in1k,384,30.437,69.563,48.922,51.078,21.82,1.000,bicubic,-49.355,-45.980,+132\ncs3darknet_x.c2ns_in1k,288,30.435,69.565,49.180,50.820,35.05,1.000,bicubic,-51.799,-47.054,-220\ntf_efficientnet_b1.ap_in1k,240,30.433,69.567,49.604,50.396,7.79,0.882,bicubic,-48.855,-44.712,+189\nresnetv2_50.a1h_in1k,224,30.429,69.571,48.918,51.082,25.55,0.950,bicubic,-49.981,-46.164,+48\nresnet50d.ra2_in1k,288,30.405,69.595,48.769,51.231,25.58,0.950,bicubic,-50.949,-46.789,-96\ntwins_pcpvt_small.in1k,224,30.401,69.599,49.382,50.618,24.11,0.900,bicubic,-50.715,-46.242,-62\nresnet50.d_in1k,224,30.399,69.600,48.164,51.836,25.56,0.950,bicubic,-49.504,-46.674,+108\nxcit_tiny_12_p16_384.fb_dist_in1k,384,30.394,69.606,50.105,49.895,6.72,1.000,bicubic,-50.560,-45.533,-38\necaresnetlight.miil_in1k,288,30.366,69.634,49.187,50.813,30.16,0.950,bicubic,-51.064,-46.603,-112\necaresnet101d_pruned.miil_in1k,288,30.360,69.640,48.820,51.180,24.88,0.950,bicubic,-51.656,-47.360,-195\ntf_efficientnet_b5.in1k,456,30.350,69.650,49.771,50.229,30.39,0.934,bicubic,-52.820,-46.763,-386\nresnet34.ra4_e3600_r224_in1k,288,30.339,69.661,48.788,51.212,21.80,1.000,bicubic,-48.633,-45.665,+212\nvisformer_small.in1k,224,30.325,69.675,48.315,51.685,40.22,0.900,bicubic,-51.767,-47.561,-209\ntf_efficientnetv2_b2.in1k,208,30.315,69.685,49.040,50.960,10.10,0.890,bicubic,-48.893,-45.562,+190\nresnet101.a1_in1k,288,30.311,69.689,46.554,53.446,44.55,1.000,bicubic,-52.015,-49.076,-249\nregnety_040.ra3_in1k,288,30.305,69.695,48.885,51.115,20.65,1.000,bicubic,-52.743,-47.617,-368\nmobilevitv2_175.cvnets_in22k_ft_in1k,256,30.274,69.726,49.295,50.705,14.25,0.888,bicubic,-51.676,-46.465,-190\nresnet50.c2_in1k,224,30.260,69.740,48.470,51.530,25.56,0.950,bicubic,-49.608,-46.392,+102\nregnetx_160.tv2_in1k,224,30.254,69.746,47.047,52.953,54.28,0.965,bicubic,-52.320,-49.011,-289\nvit_relpos_base_patch32_plus_rpn_256.sw_in1k,256,30.225,69.775,48.716,51.284,119.42,0.900,bicubic,-49.283,-45.394,+145\nresnet50.b1k_in1k,288,30.207,69.793,49.187,50.813,25.56,1.000,bicubic,-50.501,-45.991,-13\nmambaout_kobe.in1k,224,30.191,69.809,49.382,50.618,9.14,1.000,bicubic,-49.795,-45.600,+82\ntnt_s_patch16_224.in1k,224,30.191,69.809,48.910,51.090,23.77,0.900,bicubic,-51.315,-46.856,-145\nfastvit_s12.apple_dist_in1k,256,30.187,69.813,48.950,51.050,9.47,0.900,bicubic,-50.887,-46.328,-72\ncs3sedarknet_l.c2ns_in1k,256,30.183,69.817,49.553,50.447,21.91,0.887,bicubic,-51.025,-46.245,-93\nseresnext50_32x4d.racm_in1k,288,30.170,69.830,49.091,50.909,27.56,0.950,bicubic,-52.016,-47.055,-234\nresnet50.b2k_in1k,288,30.158,69.842,48.285,51.715,25.56,1.000,bicubic,-50.308,-46.749,+12\nwide_resnet50_2.tv2_in1k,224,30.150,69.850,48.358,51.642,68.88,0.965,bilinear,-51.462,-47.408,-162\nefficientnet_b0.ra4_e3600_r224_in1k,256,30.138,69.862,49.024,50.976,5.29,1.000,bicubic,-49.226,-45.730,+151\nregnety_016.tv2_in1k,224,30.132,69.868,49.278,50.722,11.20,0.965,bicubic,-50.536,-46.058,-16\nresnet34.ra4_e3600_r224_in1k,224,30.126,69.874,48.773,51.227,21.80,0.900,bicubic,-47.366,-44.725,+330\nresnet50d.ra4_e3600_r224_in1k,288,30.126,69.874,48.637,51.363,25.58,1.000,bicubic,-51.714,-47.289,-188\nresnet101.tv2_in1k,176,30.093,69.907,48.091,51.909,44.55,0.875,bilinear,-49.815,-46.515,+79\nnf_regnet_b1.ra2_in1k,256,30.087,69.913,50.170,49.830,10.22,0.900,bicubic,-48.623,-44.210,+219\nseresnet50.ra2_in1k,224,30.087,69.913,49.321,50.679,28.09,0.875,bicubic,-50.173,-45.747,+41\nconvmixer_1024_20_ks9_p14.in1k,224,30.069,69.931,49.926,50.074,24.38,0.960,bicubic,-46.875,-43.438,+366\ndpn68b.ra_in1k,288,30.069,69.931,48.193,51.807,12.61,1.000,bicubic,-49.273,-46.237,+147\nlegacy_senet154.in1k,224,30.061,69.939,48.010,51.990,115.09,0.875,bilinear,-51.258,-47.492,-125\nhalo2botnet50ts_256.a1h_in1k,256,30.042,69.958,48.515,51.485,22.64,0.950,bicubic,-52.042,-47.143,-234\nefficientnet_el.ra_in1k,300,30.040,69.960,48.836,51.164,10.59,0.904,bicubic,-51.268,-46.702,-124\ntf_efficientnet_b2.aa_in1k,260,30.028,69.972,49.608,50.392,9.11,0.890,bicubic,-50.046,-45.350,+57\nxcit_tiny_12_p16_224.fb_dist_in1k,224,30.003,69.997,49.637,50.363,6.72,1.000,bicubic,-48.569,-44.573,+222\nfbnetv3_d.ra2_in1k,224,29.999,70.001,49.932,50.068,10.31,0.950,bilinear,-48.659,-44.522,+217\ndpn98.mx_in1k,224,29.973,70.027,48.166,51.834,61.57,0.875,bicubic,-49.691,-46.478,+105\nresnet101.a2_in1k,224,29.969,70.031,47.000,53.000,44.55,0.950,bicubic,-51.375,-48.198,-134\nresnext50_32x4d.a1h_in1k,288,29.967,70.033,48.238,51.762,25.03,1.000,bicubic,-52.049,-47.696,-230\nmobilevitv2_150.cvnets_in22k_ft_in1k,256,29.965,70.035,49.262,50.738,10.59,0.888,bicubic,-51.501,-46.282,-158\nresnetv2_50d_gn.ah_in1k,288,29.948,70.052,48.227,51.773,25.57,1.000,bicubic,-52.012,-47.553,-223\ndpn131.mx_in1k,224,29.942,70.058,48.050,51.950,79.25,0.875,bicubic,-49.894,-46.558,+79\ndpn92.mx_in1k,224,29.920,70.080,49.142,50.858,37.67,0.875,bicubic,-50.112,-45.734,+50\nresnetv2_101x1_bit.goog_in21k_ft_in1k,448,29.914,70.086,51.105,48.895,44.54,1.000,bilinear,-52.418,-45.419,-286\ntf_efficientnet_b4.in1k,380,29.885,70.115,49.048,50.952,19.34,0.922,bicubic,-52.713,-47.084,-331\nsenet154.gluon_in1k,224,29.885,70.115,47.883,52.117,115.09,0.875,bicubic,-51.373,-47.475,-129\nconvnextv2_pico.fcmae_ft_in1k,288,29.883,70.117,48.775,51.225,9.07,0.950,bicubic,-51.179,-46.701,-99\nlegacy_xception.tf_in1k,299,29.871,70.129,48.732,51.268,22.86,0.897,bicubic,-49.187,-45.664,+163\nresnet50.tv2_in1k,224,29.869,70.131,48.046,51.954,25.56,0.965,bilinear,-50.977,-47.384,-68\ncs3sedarknet_l.c2ns_in1k,288,29.832,70.168,49.099,50.901,21.91,0.950,bicubic,-51.938,-46.863,-204\ninception_v3.tf_adv_in1k,299,29.828,70.172,47.861,52.139,23.83,0.875,bicubic,-47.770,-45.865,+295\nresnet152.a2_in1k,288,29.810,70.190,45.945,54.055,60.19,1.000,bicubic,-52.804,-49.803,-344\nefficientnet_b0.ra4_e3600_r224_in1k,224,29.804,70.196,48.914,51.086,5.29,0.900,bicubic,-48.764,-45.426,+205\nvit_base_patch16_384.augreg_in1k,384,29.800,70.200,48.356,51.644,86.86,1.000,bicubic,-51.302,-46.978,-112\nresnetaa50.a1h_in1k,288,29.796,70.204,48.014,51.986,25.56,1.000,bicubic,-51.822,-47.780,-197\nefficientvit_b1.r224_in1k,224,29.784,70.216,48.203,51.797,9.10,0.950,bicubic,-49.470,-46.097,+136\nfbnetv3_d.ra2_in1k,256,29.763,70.237,49.474,50.526,10.31,0.950,bilinear,-49.919,-45.478,+82\nese_vovnet39b.ra_in1k,288,29.743,70.257,49.060,50.940,24.57,0.950,bicubic,-50.635,-46.300,-6\nresmlp_36_224.fb_in1k,224,29.739,70.261,48.916,51.084,44.69,0.875,bicubic,-50.033,-45.972,+68\nlamhalobotnet50ts_256.a1h_in1k,256,29.737,70.263,48.346,51.654,22.57,0.950,bicubic,-51.815,-47.164,-195\nedgenext_small_rw.sw_in1k,256,29.729,70.271,49.193,50.807,7.83,0.900,bicubic,-49.867,-45.325,+87\necaresnet50t.a1_in1k,224,29.724,70.276,47.527,52.473,25.57,0.950,bicubic,-51.558,-47.623,-149\nresnet101.a3_in1k,160,29.718,70.282,47.663,52.337,44.55,0.950,bicubic,-48.210,-46.023,+258\nresnet50.ra_in1k,224,29.696,70.304,48.529,51.471,25.56,0.875,bicubic,-49.124,-45.785,+170\nconvnext_nano.d1h_in1k,288,29.696,70.304,47.926,52.074,15.59,1.000,bicubic,-51.778,-47.744,-185\nresnet50.c1_in1k,288,29.694,70.306,48.464,51.536,25.56,1.000,bicubic,-51.234,-47.090,-100\nresnet50.tv2_in1k,176,29.684,70.316,48.103,51.897,25.56,0.875,bilinear,-49.750,-46.537,+99\nfastvit_sa12.apple_in1k,256,29.680,70.320,48.574,51.426,11.58,0.900,bicubic,-51.166,-46.772,-86\nvit_base_patch32_384.augreg_in1k,384,29.661,70.339,49.005,50.995,88.30,1.000,bicubic,-49.089,-45.233,+171\nefficientvit_b1.r256_in1k,256,29.661,70.339,48.248,51.752,9.10,1.000,bicubic,-50.073,-46.538,+63\nresnetv2_34d.ra4_e3600_r384_in1k,448,29.653,70.347,48.168,51.832,21.82,1.000,bicubic,-50.777,-47.114,-29\nfastvit_t12.apple_dist_in1k,256,29.647,70.353,48.513,51.487,7.55,0.900,bicubic,-50.717,-46.527,-19\nefficientnet_b2.ra_in1k,256,29.643,70.357,48.881,51.119,9.11,0.875,bicubic,-49.681,-45.701,+106\nresnet50.a1_in1k,224,29.641,70.359,46.737,53.263,25.56,0.950,bicubic,-50.741,-47.861,-25\nresnetblur50.bt_in1k,224,29.623,70.377,48.256,51.744,25.56,0.875,bicubic,-49.683,-46.282,+108\nnest_tiny_jx.goog_in1k,224,29.582,70.418,46.992,53.008,17.06,0.875,bicubic,-51.846,-48.449,-185\necaresnet50d.miil_in1k,288,29.580,70.420,48.995,51.005,25.58,0.950,bicubic,-52.068,-46.889,-224\nresnet50_gn.a1h_in1k,224,29.570,70.430,48.303,51.697,25.56,0.940,bicubic,-50.504,-46.603,+12\nresnetv2_34d.ra4_e3600_r224_in1k,288,29.555,70.445,47.425,52.575,21.82,1.000,bicubic,-50.059,-47.335,+66\ncs3darknet_l.c2ns_in1k,256,29.541,70.459,48.374,51.626,21.16,0.887,bicubic,-50.809,-46.930,-26\nresnet152.a3_in1k,224,29.531,70.469,47.006,52.994,60.19,0.950,bicubic,-51.017,-47.998,-61\nresnetv2_34d.ra4_e3600_r224_in1k,224,29.517,70.483,47.572,52.428,21.82,0.900,bicubic,-48.753,-46.378,+207\ngcresnext50ts.ch_in1k,288,29.509,70.491,47.851,52.149,15.67,1.000,bicubic,-51.733,-47.685,-164\nefficientnet_em.ra2_in1k,240,29.500,70.501,48.944,51.056,6.90,0.882,bicubic,-49.757,-45.584,+106\nresnext50_32x4d.tv2_in1k,224,29.496,70.504,47.246,52.754,25.03,0.965,bilinear,-51.694,-48.098,-158\ncs3darknet_l.c2ns_in1k,288,29.482,70.518,48.240,51.760,21.16,0.950,bicubic,-51.400,-47.422,-111\nresnetv2_50.a1h_in1k,288,29.448,70.552,47.494,52.506,25.55,1.000,bicubic,-51.958,-48.218,-192\ngcresnext50ts.ch_in1k,256,29.446,70.554,47.883,52.117,15.67,0.900,bicubic,-51.162,-47.297,-72\nresnext101_32x8d.tv_in1k,224,29.445,70.555,48.492,51.508,88.79,0.875,bilinear,-49.864,-46.038,+93\nmobileone_s4.apple_in1k,224,29.439,70.561,47.997,52.003,14.95,0.900,bilinear,-50.007,-46.923,+74\ndeit_small_patch16_224.fb_in1k,224,29.435,70.565,48.268,51.732,22.05,0.900,bicubic,-50.421,-46.788,+27\nsebotnet33ts_256.a1h_in1k,256,29.427,70.573,47.150,52.850,13.70,0.940,bicubic,-51.729,-48.010,-162\nnf_regnet_b1.ra2_in1k,288,29.423,70.577,49.421,50.579,10.22,0.900,bicubic,-49.945,-45.305,+79\nresnet50.fb_ssl_yfcc100m_ft_in1k,224,29.419,70.581,49.783,50.217,25.56,0.875,bilinear,-49.833,-45.043,+99\ncoat_lite_mini.in1k,224,29.397,70.603,47.674,52.326,11.01,0.900,bicubic,-49.707,-46.932,+110\ncait_xxs24_384.fb_dist_in1k,384,29.386,70.615,48.743,51.257,12.03,1.000,bicubic,-51.568,-46.687,-134\nrepvit_m1.dist_in1k,224,29.384,70.616,48.525,51.475,5.49,0.950,bicubic,-49.158,-45.557,+165\nresnetv2_50d_evos.ah_in1k,288,29.380,70.620,47.273,52.727,25.59,1.000,bicubic,-52.644,-48.642,-291\nresnext50_32x4d.a1_in1k,224,29.370,70.630,46.238,53.762,25.03,0.950,bicubic,-51.156,-48.220,-74\nedgenext_small_rw.sw_in1k,320,29.366,70.634,48.730,51.270,7.83,1.000,bicubic,-51.086,-46.476,-63\nresnext50_32x4d.ra_in1k,224,29.358,70.642,47.405,52.595,25.03,0.875,bicubic,-50.442,-47.203,+24\nswin_tiny_patch4_window7_224.ms_in1k,224,29.350,70.650,47.584,52.416,28.29,0.900,bicubic,-52.038,-48.172,-204\nconvnext_nano_ols.d1h_in1k,288,29.342,70.658,47.411,52.589,15.65,1.000,bicubic,-52.270,-48.215,-243\nresnet34d.ra2_in1k,224,29.328,70.671,48.431,51.569,21.82,0.875,bicubic,-47.780,-44.941,+279\nregnetz_b16.ra3_in1k,288,29.321,70.679,47.904,52.096,9.72,1.000,bicubic,-51.419,-47.624,-109\ncait_xxs24_224.fb_dist_in1k,224,29.309,70.691,48.525,51.475,11.96,1.000,bicubic,-49.091,-45.801,+174\neca_resnet33ts.ra2_in1k,288,29.279,70.721,48.914,51.086,19.68,1.000,bicubic,-51.437,-46.468,-107\nresnet50.d_in1k,288,29.264,70.736,47.215,52.785,25.56,1.000,bicubic,-51.748,-48.242,-153\npvt_v2_b1.in1k,224,29.258,70.742,48.958,51.042,14.01,0.900,bicubic,-49.440,-45.532,+140\nresnet34.a1_in1k,224,29.258,70.742,47.108,52.892,21.80,0.950,bicubic,-47.162,-45.788,+314\nmaxvit_rmlp_pico_rw_256.sw_in1k,256,29.248,70.752,47.739,52.261,7.52,0.950,bicubic,-51.302,-47.447,-93\nresnet50.c2_in1k,288,29.246,70.754,47.161,52.839,25.56,1.000,bicubic,-51.616,-48.369,-132\necaresnet50d_pruned.miil_in1k,224,29.221,70.779,48.468,51.532,19.94,0.875,bicubic,-50.493,-46.394,+21\npoolformer_s24.sail_in1k,224,29.209,70.791,48.122,51.878,21.39,0.900,bicubic,-51.085,-46.938,-53\ngcvit_xxtiny.in1k,224,29.193,70.807,48.376,51.624,12.00,0.875,bicubic,-50.563,-46.682,+14\ntresnet_l.miil_in1k_448,448,29.183,70.817,47.262,52.738,55.99,0.875,bilinear,-53.095,-48.726,-345\nseresnet50.ra2_in1k,288,29.158,70.842,47.743,52.257,28.09,0.950,bicubic,-52.130,-47.909,-206\nlambda_resnet50ts.a1h_in1k,256,29.130,70.870,46.973,53.027,21.54,0.950,bicubic,-52.054,-48.121,-190\ninception_v3.gluon_in1k,299,29.130,70.870,46.953,53.047,23.83,0.875,bicubic,-49.676,-47.419,+117\nresnet101.a2_in1k,288,29.101,70.899,45.731,54.269,44.55,1.000,bicubic,-53.117,-50.009,-341\neca_resnet33ts.ra2_in1k,256,29.099,70.901,48.822,51.178,19.68,0.900,bicubic,-50.985,-46.158,-35\nresnet50d.a1_in1k,224,29.099,70.901,46.004,53.996,25.58,0.950,bicubic,-51.631,-48.674,-123\ncs3darknet_focus_l.c2ns_in1k,256,29.079,70.921,47.678,52.322,21.15,0.887,bicubic,-51.175,-47.616,-52\nconvnext_pico_ols.d1_in1k,224,29.055,70.945,47.971,52.029,9.06,0.950,bicubic,-50.487,-46.617,+29\nxception71.tf_in1k,299,29.055,70.945,47.407,52.593,42.34,0.903,bicubic,-50.865,-47.501,-25\nresnext50_32x4d.tv2_in1k,176,29.034,70.966,46.824,53.176,25.03,0.875,bilinear,-50.350,-47.479,+44\nhrnet_w64.ms_in1k,224,29.028,70.972,47.106,52.894,128.06,0.875,bilinear,-50.430,-47.540,+34\nresnet34d.ra2_in1k,288,29.020,70.980,48.069,51.931,21.82,0.950,bicubic,-49.420,-46.279,+148\nregnetx_032.tv2_in1k,224,28.981,71.019,47.053,52.947,15.30,0.965,bicubic,-51.935,-48.195,-162\ncs3darknet_focus_l.c2ns_in1k,288,28.979,71.021,47.608,52.392,21.15,0.950,bicubic,-51.915,-48.067,-160\nvit_small_patch32_384.augreg_in21k_ft_in1k,384,28.926,71.074,48.948,51.052,22.92,1.000,bicubic,-51.550,-46.246,-102\nxcit_tiny_12_p8_224.fb_in1k,224,28.914,71.086,47.495,52.505,6.71,1.000,bicubic,-50.796,-47.322,+4\ntf_efficientnet_b0.ns_jft_in1k,224,28.908,71.092,48.967,51.033,5.29,0.875,bicubic,-49.772,-45.405,+117\nresnet101.gluon_in1k,224,28.900,71.100,46.419,53.581,44.55,0.875,bicubic,-50.406,-48.213,+50\nxception65.tf_in1k,299,28.894,71.106,47.132,52.868,39.92,0.903,bicubic,-50.664,-47.524,+16\ntf_efficientnet_b1.aa_in1k,240,28.884,71.116,47.545,52.455,7.79,0.882,bicubic,-49.956,-46.657,+96\nmobilevitv2_150.cvnets_in22k_ft_in1k_384,384,28.859,71.141,47.930,52.070,10.59,1.000,bicubic,-53.747,-48.384,-426\nhalonet50ts.a1h_in1k,256,28.843,71.157,46.511,53.489,22.73,0.940,bicubic,-52.797,-49.101,-283\nskresnext50_32x4d.ra_in1k,224,28.835,71.165,46.489,53.511,27.48,0.875,bicubic,-51.333,-48.155,-60\nmambaout_kobe.in1k,288,28.798,71.202,47.973,52.027,9.14,1.000,bicubic,-52.266,-47.709,-193\nsehalonet33ts.ra2_in1k,256,28.780,71.220,46.574,53.426,13.69,0.940,bicubic,-52.196,-48.698,-184\nlevit_conv_256.fb_dist_in1k,224,28.770,71.230,46.723,53.277,18.89,0.900,bicubic,-52.736,-48.743,-269\nlevit_256.fb_dist_in1k,224,28.768,71.231,46.729,53.271,18.89,0.900,bicubic,-52.737,-48.737,-271\ntf_efficientnet_lite3.in1k,300,28.765,71.235,47.480,52.520,8.20,0.904,bilinear,-51.043,-47.428,-21\nresnet50.ra_in1k,288,28.765,71.235,47.342,52.658,25.56,0.950,bicubic,-51.099,-47.628,-29\nhgnet_tiny.paddle_in1k,288,28.749,71.251,47.704,52.296,14.74,1.000,bicubic,-51.895,-47.852,-135\nseresnet50.a1_in1k,224,28.747,71.253,45.817,54.183,28.09,0.950,bicubic,-51.265,-48.887,-53\nmambaout_femto.in1k,224,28.704,71.296,47.786,52.214,7.30,1.000,bicubic,-50.176,-46.622,+81\nmobileone_s3.apple_in1k,224,28.698,71.302,47.539,52.461,10.17,0.900,bilinear,-49.304,-46.325,+164\ndarknetaa53.c2ns_in1k,288,28.660,71.340,46.963,53.037,36.02,1.000,bilinear,-51.868,-48.291,-127\nskresnet34.ra_in1k,224,28.659,71.341,47.987,52.013,22.28,0.875,bicubic,-48.297,-45.333,+242\nresnetblur50.bt_in1k,288,28.654,71.346,46.922,53.078,25.56,0.950,bicubic,-51.594,-48.278,-80\ntf_efficientnetv2_b0.in1k,192,28.645,71.355,47.413,52.587,7.14,0.875,bicubic,-48.231,-45.771,+247\nconvnext_pico.d1_in1k,224,28.639,71.361,47.366,52.634,9.05,0.875,bicubic,-50.877,-47.188,+3\nseresnext50_32x4d.gluon_in1k,224,28.635,71.365,46.440,53.560,27.56,0.875,bicubic,-51.281,-48.376,-52\nhrnet_w40.ms_in1k,224,28.631,71.369,47.464,52.536,57.56,0.875,bilinear,-50.297,-47.016,+66\nswinv2_tiny_window8_256.ms_in1k,256,28.615,71.385,46.177,53.823,28.35,0.900,bicubic,-53.209,-49.811,-317\nmobilevitv2_175.cvnets_in22k_ft_in1k_384,384,28.607,71.393,47.144,52.856,14.25,1.000,bicubic,-54.313,-49.306,-500\ntf_efficientnet_b3.in1k,300,28.574,71.426,48.006,51.994,12.23,0.904,bicubic,-52.304,-47.302,-183\ntf_efficientnetv2_b0.in1k,224,28.568,71.432,47.105,52.895,7.14,0.875,bicubic,-49.818,-46.930,+121\ntf_efficientnetv2_b1.in1k,192,28.564,71.436,47.452,52.548,8.14,0.882,bicubic,-49.370,-46.370,+158\nresnet152.tv_in1k,224,28.550,71.450,47.146,52.854,60.19,0.875,bilinear,-49.780,-46.990,+123\npoolformerv2_s12.sail_in1k,224,28.543,71.457,47.407,52.593,11.89,1.000,bicubic,-49.465,-46.457,+149\neva02_tiny_patch14_336.mim_in22k_ft_in1k,336,28.525,71.475,47.541,52.459,5.76,1.000,bicubic,-52.123,-47.979,-154\nxcit_tiny_12_p16_224.fb_in1k,224,28.511,71.489,47.376,52.624,6.72,1.000,bicubic,-48.611,-46.352,+217\necaresnet50t.a1_in1k,288,28.474,71.526,45.550,54.450,25.57,1.000,bicubic,-53.636,-50.104,-371\nresnet50d.a2_in1k,224,28.458,71.542,45.513,54.487,25.58,0.950,bicubic,-51.810,-49.129,-100\necaresnet50t.a2_in1k,224,28.444,71.556,45.752,54.248,25.57,0.950,bicubic,-52.440,-49.270,-194\nrepvgg_b2.rvgg_in1k,224,28.433,71.567,47.038,52.962,89.02,0.875,bilinear,-50.355,-47.376,+70\ntf_efficientnet_b0.ap_in1k,224,28.427,71.573,47.553,52.447,5.29,0.875,bicubic,-48.673,-45.779,+215\nhrnet_w48.ms_in1k,224,28.415,71.585,47.600,52.400,77.47,0.875,bilinear,-50.913,-46.916,+7\nfasternet_t2.in1k,224,28.413,71.587,47.258,52.742,14.98,1.000,bicubic,-50.325,-47.074,+71\ndla102x2.in1k,224,28.411,71.589,46.755,53.245,41.28,0.875,bilinear,-51.037,-47.899,-11\nresnext50_32x4d.gluon_in1k,224,28.403,71.597,45.350,54.650,25.03,0.875,bicubic,-50.951,-49.074,+1\nshvit_s4.in1k,256,28.393,71.607,46.582,53.418,16.59,0.875,bicubic,-50.969,-47.788,-1\ndla169.in1k,224,28.379,71.621,47.505,52.495,53.39,0.875,bilinear,-50.323,-46.829,+72\nefficientnet_b2_pruned.in1k,260,28.360,71.640,47.061,52.939,8.31,0.890,bicubic,-51.544,-47.605,-70\ntf_efficientnet_cc_b0_4e.in1k,224,28.356,71.644,47.370,52.630,13.31,0.875,bicubic,-48.968,-45.964,+189\nswinv2_cr_tiny_ns_224.sw_in1k,224,28.348,71.652,45.947,54.053,28.33,0.900,bicubic,-53.448,-49.877,-338\nseresnet33ts.ra2_in1k,256,28.338,71.662,47.773,52.227,19.78,0.900,bicubic,-52.042,-47.277,-130\ndarknet53.c2ns_in1k,288,28.311,71.689,46.886,53.114,41.61,1.000,bicubic,-52.231,-48.542,-159\nresnext50_32x4d.ra_in1k,288,28.305,71.695,46.130,53.870,25.03,0.950,bicubic,-52.415,-49.214,-183\necaresnet50t.a3_in1k,160,28.289,71.711,46.148,53.852,25.57,0.950,bicubic,-49.497,-47.473,+153\nmixnet_xl.ra_in1k,224,28.279,71.721,46.735,53.265,11.90,0.875,bicubic,-52.215,-48.243,-156\nswiftformer_s.dist_in1k,224,28.279,71.721,46.601,53.399,6.09,0.950,bicubic,-50.183,-47.379,+87\nshvit_s3.in1k,224,28.234,71.766,46.698,53.302,14.25,0.875,bicubic,-49.122,-46.612,+179\nresnet50d.gluon_in1k,224,28.234,71.766,45.922,54.078,25.58,0.875,bicubic,-50.846,-48.536,+21\nseresnet33ts.ra2_in1k,288,28.204,71.796,47.574,52.426,19.78,1.000,bicubic,-52.609,-47.772,-202\nresnet50.a1_in1k,288,28.189,71.811,44.919,55.081,25.56,1.000,bicubic,-53.051,-50.193,-269\nfastvit_s12.apple_in1k,256,28.151,71.849,46.670,53.330,9.47,0.900,bicubic,-51.731,-48.128,-79\nwide_resnet101_2.tv_in1k,224,28.142,71.858,46.560,53.440,126.89,0.875,bilinear,-50.720,-47.752,+39\nresnet101c.gluon_in1k,224,28.128,71.872,45.977,54.023,44.57,0.875,bicubic,-51.414,-48.730,-38\ndensenet161.tv_in1k,224,28.116,71.884,46.658,53.342,28.68,0.875,bicubic,-49.268,-46.998,+170\nresnet34.a1_in1k,288,28.104,71.896,45.723,54.277,21.80,1.000,bicubic,-49.826,-48.035,+126\nregnetx_320.pycls_in1k,224,28.081,71.919,45.130,54.870,107.81,0.875,bicubic,-52.177,-49.886,-125\necaresnet50d_pruned.miil_in1k,288,28.077,71.923,47.057,52.943,19.94,0.950,bicubic,-52.731,-48.509,-209\nresnext50_32x4d.a1_in1k,288,28.077,71.923,44.831,55.169,25.03,1.000,bicubic,-53.403,-50.321,-319\nregnety_320.pycls_in1k,224,28.067,71.933,45.464,54.536,145.05,0.875,bicubic,-52.729,-49.782,-208\ndarknetaa53.c2ns_in1k,256,28.057,71.943,46.578,53.422,36.02,0.887,bilinear,-51.697,-48.326,-69\ngernet_s.idstcv_in1k,224,28.053,71.947,46.747,53.253,8.17,0.875,bilinear,-48.821,-46.395,+201\nfbnetv3_b.ra2_in1k,224,28.035,71.965,47.743,52.257,8.60,0.950,bilinear,-50.148,-46.495,+94\nmobilevitv2_175.cvnets_in1k,256,28.035,71.965,46.108,53.892,14.25,0.888,bicubic,-52.835,-49.172,-223\nresnext50_32x4d.a3_in1k,160,28.016,71.984,45.589,54.411,25.03,0.950,bicubic,-49.716,-47.725,+134\nlevit_conv_192.fb_dist_in1k,224,28.006,71.994,45.884,54.116,10.95,0.900,bicubic,-51.854,-48.918,-88\nefficientnet_el_pruned.in1k,300,28.000,72.000,46.790,53.210,10.59,0.904,bicubic,-52.284,-48.428,-143\nlevit_192.fb_dist_in1k,224,28.000,72.000,45.886,54.114,10.95,0.900,bicubic,-51.858,-48.922,-89\nvit_base_patch16_224.augreg_in1k,224,27.973,72.027,45.719,54.281,86.57,0.900,bicubic,-51.181,-49.013,-5\nresnext50_32x4d.a2_in1k,224,27.965,72.035,44.446,55.554,25.03,0.950,bicubic,-52.479,-50.184,-170\nresnet50d.a3_in1k,160,27.953,72.047,45.295,54.705,25.58,0.950,bicubic,-49.269,-47.961,+166\nfastvit_t12.apple_in1k,256,27.937,72.063,46.389,53.611,7.55,0.900,bicubic,-51.339,-48.181,-24\nresnet101.a3_in1k,224,27.937,72.063,45.023,54.977,44.55,0.950,bicubic,-51.899,-49.684,-89\nresnet50_gn.a1h_in1k,288,27.935,72.065,46.093,53.907,25.56,0.950,bicubic,-53.281,-49.541,-289\ndarknet53.c2ns_in1k,256,27.914,72.086,46.537,53.463,41.61,0.887,bicubic,-52.094,-48.509,-119\nstarnet_s4.in1k,224,27.914,72.086,46.081,53.919,7.48,0.875,bicubic,-50.910,-48.215,+20\nxception41.tf_in1k,299,27.894,72.106,45.906,54.094,26.97,0.903,bicubic,-50.656,-48.376,+44\nresnetrs50.tf_in1k,160,27.865,72.135,46.686,53.314,35.69,0.910,bicubic,-49.993,-47.124,+112\ndpn68b.mx_in1k,224,27.835,72.165,47.372,52.628,12.61,0.875,bicubic,-49.659,-46.462,+137\nregnetx_160.pycls_in1k,224,27.833,72.167,45.629,54.371,54.28,0.875,bicubic,-52.011,-49.223,-99\nres2net101_26w_4s.in1k,224,27.814,72.186,45.177,54.823,45.21,0.875,bilinear,-51.378,-49.289,-20\nseresnet50.a2_in1k,224,27.784,72.216,44.709,55.291,28.09,0.950,bicubic,-52.318,-50.011,-134\ninception_v3.tf_in1k,299,27.782,72.218,45.711,54.289,23.83,0.875,bicubic,-50.084,-47.933,+104\nresnet50d.a1_in1k,288,27.768,72.232,44.383,55.617,25.58,1.000,bicubic,-53.698,-51.295,-338\ntf_efficientnetv2_b1.in1k,240,27.760,72.240,46.582,53.418,8.14,0.882,bicubic,-51.758,-48.000,-68\nrepghostnet_200.in1k,224,27.753,72.247,46.309,53.691,9.80,0.875,bicubic,-51.049,-48.023,+14\nmobilevitv2_200.cvnets_in1k,256,27.723,72.277,45.833,54.167,18.45,0.888,bicubic,-53.425,-49.547,-293\nvit_base_patch16_224.sam_in1k,224,27.717,72.283,45.106,54.894,86.57,0.900,bicubic,-52.529,-50.016,-152\nfbnetv3_b.ra2_in1k,256,27.658,72.342,46.990,53.010,8.60,0.950,bilinear,-51.496,-47.118,-26\nrepvgg_b1.rvgg_in1k,224,27.652,72.348,46.511,53.489,57.42,0.875,bilinear,-50.718,-47.753,+55\nmambaout_femto.in1k,288,27.652,72.348,46.401,53.599,7.30,1.000,bicubic,-52.260,-48.733,-125\ninception_v3.tv_in1k,299,27.639,72.361,45.253,54.747,23.83,0.875,bicubic,-49.833,-48.215,+127\nconvnextv2_femto.fcmae_ft_in1k,224,27.637,72.363,46.818,53.182,5.23,0.875,bicubic,-50.839,-47.166,+40\nhrnet_w44.ms_in1k,224,27.627,72.373,45.847,54.153,67.06,0.875,bilinear,-51.263,-48.529,-5\ngcresnet33ts.ra2_in1k,256,27.599,72.401,46.238,53.762,19.88,0.900,bicubic,-52.473,-48.748,-141\nregnety_160.pycls_in1k,224,27.597,72.403,45.546,54.454,83.59,0.875,bicubic,-52.687,-49.436,-171\nresnet50.am_in1k,224,27.580,72.420,45.361,54.639,25.56,0.875,bicubic,-51.424,-49.030,-19\nresmlp_24_224.fb_in1k,224,27.570,72.430,45.700,54.300,30.02,0.875,bicubic,-51.810,-48.850,-65\npit_xs_224.in1k,224,27.489,72.511,45.855,54.145,10.62,0.900,bicubic,-50.707,-48.301,+58\ntiny_vit_5m_224.in1k,224,27.485,72.515,45.864,54.136,5.39,0.950,bicubic,-51.711,-48.928,-40\nhrnet_w30.ms_in1k,224,27.472,72.528,46.546,53.454,37.71,0.875,bilinear,-50.730,-47.674,+54\ngcresnet33ts.ra2_in1k,288,27.401,72.599,46.157,53.843,19.88,1.000,bicubic,-53.187,-49.161,-223\nresnet50.a2_in1k,224,27.393,72.607,44.251,55.749,25.56,0.950,bicubic,-52.435,-50.309,-118\nregnetx_080.pycls_in1k,224,27.387,72.613,44.994,55.006,39.57,0.875,bicubic,-51.827,-49.548,-46\nconvnext_pico.d1_in1k,288,27.383,72.617,45.666,54.334,9.05,0.950,bicubic,-53.043,-49.396,-199\nhrnet_w32.ms_in1k,224,27.377,72.623,46.016,53.984,41.23,0.875,bilinear,-51.075,-48.168,+29\nvit_small_patch16_384.augreg_in1k,384,27.348,72.652,46.151,53.849,22.20,1.000,bicubic,-53.782,-49.439,-309\nresnet33ts.ra2_in1k,288,27.340,72.660,45.200,54.800,19.68,1.000,bicubic,-52.370,-49.858,-109\nresnet50s.gluon_in1k,224,27.332,72.668,45.253,54.747,25.68,0.875,bicubic,-51.378,-48.995,0\nres2net50_26w_8s.in1k,224,27.314,72.686,44.817,55.183,48.40,0.875,bilinear,-51.636,-49.891,-27\nconvnext_pico_ols.d1_in1k,288,27.295,72.705,45.650,54.350,9.06,1.000,bicubic,-53.163,-49.598,-214\ndensenet201.tv_in1k,224,27.279,72.721,46.212,53.788,20.01,0.875,bicubic,-50.005,-47.268,+122\necaresnet50t.a2_in1k,288,27.271,72.729,44.076,55.924,25.57,1.000,bicubic,-54.411,-51.448,-399\nghostnetv2_160.in1k,224,27.259,72.741,46.393,53.607,12.39,0.875,bicubic,-50.577,-47.555,+78\ndensenetblur121d.ra_in1k,224,27.240,72.760,46.303,53.697,8.00,0.875,bicubic,-49.334,-46.885,+162\nefficientnet_b1_pruned.in1k,240,27.226,72.774,45.961,54.039,6.33,0.882,bicubic,-51.026,-47.859,+36\nregnety_064.pycls_in1k,224,27.222,72.778,44.858,55.142,30.58,0.875,bicubic,-52.512,-49.910,-122\nresnet33ts.ra2_in1k,256,27.187,72.813,45.391,54.609,19.68,0.900,bicubic,-52.029,-49.185,-61\ntf_efficientnetv2_b2.in1k,260,27.177,72.823,44.568,55.432,10.10,0.890,bicubic,-53.049,-50.442,-181\nresnetrs50.tf_in1k,224,27.159,72.841,45.073,54.927,35.69,0.910,bicubic,-52.775,-49.899,-159\nvit_base_patch32_224.augreg_in1k,224,27.141,72.859,45.194,54.806,88.22,0.900,bicubic,-47.767,-46.576,+222\nseresnet50.a1_in1k,288,27.100,72.900,43.593,56.407,28.09,1.000,bicubic,-54.014,-51.743,-320\nresnet50d.a2_in1k,288,27.098,72.902,43.840,56.160,25.58,1.000,bicubic,-54.066,-51.259,-332\nresnet32ts.ra2_in1k,256,27.081,72.919,45.259,54.741,17.96,0.900,bicubic,-51.974,-49.109,-49\nrexnet_130.nav_in1k,224,27.078,72.921,45.922,54.078,7.56,0.875,bicubic,-52.414,-48.759,-104\ndla102x.in1k,224,27.051,72.949,45.493,54.507,26.31,0.875,bilinear,-51.465,-48.733,-1\ngmixer_24_224.ra3_in1k,224,27.031,72.969,44.373,55.627,24.72,0.875,bicubic,-51.017,-49.281,+42\nresnet101.tv_in1k,224,26.974,73.026,45.240,54.760,44.55,0.875,bilinear,-50.430,-48.312,+96\nresnet32ts.ra2_in1k,288,26.931,73.069,45.065,54.935,17.96,1.000,bicubic,-52.457,-49.515,-99\nresnext50d_32x4d.bt_in1k,224,26.913,73.087,44.393,55.607,25.05,0.875,bicubic,-52.758,-50.485,-127\nrexnet_100.nav_in1k,224,26.874,73.126,45.375,54.625,4.80,0.875,bicubic,-50.988,-48.515,+57\nregnetx_120.pycls_in1k,224,26.870,73.130,44.676,55.324,46.11,0.875,bicubic,-52.738,-50.058,-123\ndensenet169.tv_in1k,224,26.862,73.138,45.420,54.580,14.15,0.875,bicubic,-49.060,-47.388,+171\nregnety_120.pycls_in1k,224,26.811,73.189,44.491,55.509,51.82,0.875,bicubic,-53.571,-50.613,-224\nlegacy_seresnext101_32x4d.in1k,224,26.802,73.198,43.477,56.523,48.96,0.875,bilinear,-53.472,-51.555,-207\nregnetx_064.pycls_in1k,224,26.797,73.203,44.947,55.053,26.21,0.875,bicubic,-52.264,-49.521,-64\ntinynet_a.in1k,192,26.792,73.208,45.094,54.906,6.19,0.875,bicubic,-50.874,-48.438,+64\nres2net101d.in1k,224,26.721,73.279,44.363,55.637,45.23,0.875,bilinear,-54.515,-50.983,-355\nregnetx_032.pycls_in1k,224,26.707,73.293,45.249,54.751,15.30,0.875,bicubic,-51.453,-48.915,+21\nresnext50_32x4d.a2_in1k,288,26.691,73.309,42.750,57.250,25.03,1.000,bicubic,-54.613,-52.364,-368\ndensenet121.ra_in1k,224,26.680,73.320,45.896,54.104,7.98,0.875,bicubic,-48.894,-46.752,+177\nlegacy_seresnet152.in1k,224,26.676,73.324,43.951,56.049,66.82,0.875,bilinear,-51.986,-50.419,-28\nefficientvit_m5.r224_in1k,224,26.666,73.334,44.917,55.083,12.47,0.875,bicubic,-50.426,-48.251,+106\ndla60x.in1k,224,26.634,73.366,45.025,54.975,17.35,0.875,bilinear,-51.600,-48.997,+10\nefficientnet_es.ra_in1k,224,26.631,73.370,45.102,54.898,5.44,0.875,bicubic,-51.469,-48.826,+19\nres2net50_26w_6s.in1k,224,26.601,73.399,43.996,56.004,37.05,0.875,bilinear,-51.985,-50.124,-28\nseresnet50.a3_in1k,160,26.599,73.401,44.709,55.291,28.09,0.950,bicubic,-48.505,-47.387,+189\nrepvgg_b1g4.rvgg_in1k,224,26.579,73.421,45.092,54.908,39.97,0.875,bilinear,-51.029,-48.754,+59\nresnet50.a3_in1k,160,26.572,73.428,44.224,55.776,25.56,0.950,bicubic,-49.422,-48.270,+152\ncoat_lite_tiny.in1k,224,26.554,73.446,44.719,55.281,5.72,0.900,bicubic,-50.994,-49.205,+62\nregnety_080.pycls_in1k,224,26.542,73.458,44.397,55.603,39.18,0.875,bicubic,-53.332,-50.437,-177\nresnet34.a2_in1k,224,26.538,73.462,43.911,56.089,21.80,0.950,bicubic,-48.982,-48.543,+167\nmobilenetv3_large_100.miil_in21k_ft_in1k,224,26.517,73.484,44.534,55.466,5.48,0.875,bilinear,-51.404,-48.360,+31\necaresnet26t.ra2_in1k,256,26.503,73.497,44.963,55.037,16.01,0.875,bicubic,-52.391,-49.585,-66\nres2net50_14w_8s.in1k,224,26.491,73.509,44.365,55.635,25.06,0.875,bilinear,-51.651,-49.487,+6\ntf_efficientnet_b0.aa_in1k,224,26.479,73.521,45.642,54.358,5.29,0.875,bicubic,-50.403,-47.627,+105\ntf_efficientnet_b2.in1k,260,26.471,73.529,44.778,55.222,9.11,0.890,bicubic,-53.141,-49.936,-149\necaresnet50t.a3_in1k,224,26.452,73.548,43.507,56.493,25.57,0.950,bicubic,-53.096,-51.203,-144\nconvnextv2_atto.fcmae_ft_in1k,224,26.444,73.556,44.296,55.704,3.71,0.875,bicubic,-50.200,-48.746,+114\nresnet50.gluon_in1k,224,26.432,73.568,44.051,55.949,25.56,0.875,bicubic,-51.148,-49.653,+51\nmobileone_s2.apple_in1k,224,26.418,73.582,44.554,55.446,7.88,0.900,bilinear,-51.086,-49.110,+53\nconvnext_femto.d1_in1k,224,26.387,73.613,44.839,55.161,5.22,0.875,bicubic,-51.111,-48.839,+52\nmobilenetv1_125.ra4_e3600_r224_in1k,224,26.387,73.613,44.654,55.346,6.27,0.900,bicubic,-50.533,-48.578,+96\ntf_efficientnet_el.in1k,300,26.387,73.613,44.245,55.755,10.59,0.904,bicubic,-53.859,-50.519,-227\nmobilenetv1_125.ra4_e3600_r224_in1k,256,26.371,73.629,44.554,55.446,6.27,1.000,bicubic,-51.241,-49.200,+41\nlambda_resnet26t.c1_in1k,256,26.361,73.639,44.408,55.592,10.96,0.940,bicubic,-52.765,-50.146,-101\nlevit_conv_128.fb_dist_in1k,224,26.361,73.639,44.143,55.857,9.21,0.900,bicubic,-52.127,-50.001,-39\nlevit_128.fb_dist_in1k,224,26.349,73.650,44.121,55.879,9.21,0.900,bicubic,-52.137,-49.877,-38\nresmlp_12_224.fb_distilled_in1k,224,26.322,73.678,44.896,55.104,15.35,0.875,bicubic,-51.626,-48.674,+11\nresmlp_big_24_224.fb_in1k,224,26.316,73.684,43.559,56.441,129.14,0.875,bicubic,-54.718,-51.466,-356\nregnetx_040.pycls_in1k,224,26.263,73.737,44.422,55.578,22.12,0.875,bicubic,-52.223,-49.662,-43\nvisformer_tiny.in1k,224,26.257,73.743,44.202,55.798,10.32,0.900,bicubic,-51.903,-49.886,-13\nmobilevitv2_150.cvnets_in1k,256,26.224,73.776,43.829,56.171,10.59,0.888,bicubic,-54.156,-51.277,-260\nconvnext_femto_ols.d1_in1k,224,26.200,73.800,44.245,55.755,5.23,0.875,bicubic,-51.652,-49.581,+17\nvit_small_patch32_224.augreg_in21k_ft_in1k,224,26.183,73.817,45.132,54.868,22.88,0.900,bicubic,-49.828,-48.134,+124\ncrossvit_9_dagger_240.in1k,240,26.175,73.825,44.562,55.438,8.78,0.875,bicubic,-50.815,-49.040,+77\nshvit_s2.in1k,224,26.173,73.827,44.051,55.949,11.48,0.875,bicubic,-49.011,-48.265,+155\nseresnet50.a2_in1k,288,26.169,73.831,42.679,57.321,28.09,1.000,bicubic,-54.925,-52.551,-373\ndensenetblur121d.ra_in1k,288,26.167,73.833,45.035,54.965,8.00,0.950,bicubic,-51.153,-48.761,+51\nresnet50d.a3_in1k,224,26.133,73.867,42.974,57.026,25.58,0.950,bicubic,-52.609,-51.262,-76\nresnet50.a2_in1k,288,26.125,73.874,42.593,57.407,25.56,1.000,bicubic,-54.648,-52.385,-328\nresnext50_32x4d.a3_in1k,224,26.096,73.904,42.952,57.048,25.03,0.950,bicubic,-53.162,-51.356,-131\nresnet34.a2_in1k,288,26.092,73.908,43.133,56.867,21.80,1.000,bicubic,-51.058,-50.155,+60\nefficientnet_b1.ft_in1k,256,26.084,73.916,44.047,55.953,7.79,1.000,bicubic,-52.722,-50.299,-85\nconvnextv2_femto.fcmae_ft_in1k,288,26.069,73.931,44.351,55.649,5.23,0.950,bicubic,-53.273,-50.239,-147\nmobilevitv2_125.cvnets_in1k,256,26.037,73.963,43.701,56.299,7.48,0.888,bicubic,-53.645,-51.143,-184\nlambda_resnet26rpt_256.c1_in1k,256,26.019,73.981,44.232,55.768,10.99,0.940,bicubic,-52.941,-50.188,-106\nhardcorenas_c.miil_green_in1k,224,26.006,73.994,44.855,55.145,5.52,0.875,bilinear,-51.086,-48.337,+60\ndpn68.mx_in1k,224,26.004,73.996,44.074,55.926,12.61,0.875,bicubic,-50.300,-48.920,+103\nresnetv2_18d.ra4_e3600_r224_in1k,224,26.002,73.998,44.004,55.996,11.71,0.900,bicubic,-48.420,-47.924,+168\nfastvit_t8.apple_dist_in1k,256,25.992,74.008,44.424,55.576,4.03,0.900,bicubic,-51.178,-48.850,+51\nrepghostnet_150.in1k,224,25.982,74.018,44.369,55.631,6.58,0.875,bicubic,-51.492,-49.127,+26\nhrnet_w18.ms_in1k,224,25.976,74.024,44.803,55.197,21.30,0.875,bilinear,-50.794,-48.645,+73\nhardcorenas_f.miil_green_in1k,224,25.966,74.034,44.236,55.764,8.20,0.875,bilinear,-52.134,-49.564,-29\nvit_small_patch16_224.augreg_in1k,224,25.953,74.047,43.966,56.034,22.05,0.900,bicubic,-52.889,-50.314,-102\nresnetv2_18.ra4_e3600_r224_in1k,224,25.925,74.075,44.540,55.460,11.69,0.900,bicubic,-47.647,-46.816,+179\nregnety_040.pycls_in1k,224,25.923,74.077,43.846,56.154,20.65,0.875,bicubic,-53.323,-50.820,-141\nresnext50d_32x4d.bt_in1k,288,25.921,74.079,42.952,57.048,25.05,0.950,bicubic,-54.755,-52.474,-330\nhrnet_w18_small_v2.gluon_in1k,224,25.894,74.106,43.815,56.185,15.60,0.875,bicubic,-52.308,-50.091,-43\nregnetx_016.tv2_in1k,224,25.894,74.106,43.355,56.645,9.19,0.965,bicubic,-53.546,-51.413,-173\ndensenet121.ra_in1k,288,25.874,74.126,44.904,55.096,7.98,0.950,bicubic,-50.612,-48.474,+83\nresnet26t.ra2_in1k,256,25.874,74.126,43.996,56.004,16.01,0.940,bicubic,-52.004,-49.842,-15\ncoat_tiny.in1k,224,25.872,74.128,43.284,56.716,5.50,0.900,bicubic,-52.570,-50.760,-66\nres2net50_26w_4s.in1k,224,25.864,74.136,43.163,56.837,25.70,0.875,bilinear,-52.130,-50.697,-28\ntresnet_m.miil_in1k_448,448,25.858,74.142,42.891,57.109,31.39,0.875,bilinear,-55.852,-52.671,-494\nfastvit_t8.apple_in1k,256,25.850,74.150,44.110,55.890,4.03,0.900,bicubic,-50.334,-48.932,+87\nghostnetv3_100.in1k,224,25.790,74.210,44.218,55.782,8.13,0.875,bicubic,-51.147,-48.906,+52\nresnet50c.gluon_in1k,224,25.790,74.210,43.039,56.961,25.58,0.875,bicubic,-52.227,-50.913,-36\nmobilenetv3_large_100.ra4_e3600_r224_in1k,224,25.788,74.213,44.147,55.853,5.48,0.950,bicubic,-50.523,-48.695,+82\nselecsls60.in1k,224,25.748,74.252,44.065,55.935,30.67,0.875,bicubic,-52.226,-49.758,-32\nhalonet26t.a1h_in1k,256,25.738,74.262,43.235,56.765,12.48,0.950,bicubic,-53.396,-51.105,-146\ncs3darknet_focus_m.c2ns_in1k,256,25.687,74.313,44.151,55.849,9.30,0.887,bicubic,-51.067,-49.399,+56\ndla60_res2net.in1k,224,25.681,74.319,43.589,56.411,20.85,0.875,bilinear,-52.799,-50.615,-82\nhardcorenas_e.miil_green_in1k,224,25.670,74.330,43.420,56.580,8.07,0.875,bilinear,-52.118,-50.352,-17\npoolformer_s12.sail_in1k,224,25.654,74.346,44.192,55.808,11.92,0.900,bicubic,-51.584,-49.346,+21\nstarnet_s3.in1k,224,25.642,74.358,43.617,56.383,5.75,0.875,bicubic,-51.736,-49.999,+10\ndla60_res2next.in1k,224,25.624,74.376,43.697,56.303,17.03,0.875,bilinear,-52.864,-50.303,-91\neca_halonext26ts.c1_in1k,256,25.546,74.454,43.221,56.779,10.76,0.940,bicubic,-53.972,-51.488,-200\necaresnet26t.ra2_in1k,320,25.542,74.458,43.683,56.317,16.01,0.950,bicubic,-54.358,-51.387,-248\nconvnext_femto.d1_in1k,288,25.536,74.464,43.666,56.334,5.22,0.950,bicubic,-53.176,-50.770,-113\nresmlp_12_224.fb_in1k,224,25.526,74.474,44.355,55.645,15.35,0.875,bicubic,-51.134,-48.815,+50\ntf_efficientnet_lite1.in1k,240,25.526,74.474,43.614,56.386,5.42,0.882,bicubic,-51.148,-49.617,+50\nmixnet_l.ft_in1k,224,25.524,74.476,43.487,56.513,7.33,0.875,bicubic,-53.446,-50.693,-143\nresnet50.bt_in1k,224,25.514,74.486,43.141,56.859,25.56,0.875,bicubic,-52.930,-51.133,-88\nrepvgg_a2.rvgg_in1k,224,25.501,74.499,43.890,56.110,28.21,0.875,bilinear,-50.987,-49.126,+59\nbat_resnext26ts.ch_in1k,256,25.501,74.499,43.214,56.786,10.73,0.900,bicubic,-52.779,-50.900,-76\ncs3darknet_focus_m.c2ns_in1k,288,25.499,74.501,43.764,56.236,9.30,0.950,bicubic,-51.787,-50.202,+6\nres2net50d.in1k,224,25.497,74.503,43.066,56.934,25.72,0.875,bilinear,-54.783,-51.962,-302\nresnext50_32x4d.tv_in1k,224,25.481,74.519,42.819,57.181,25.03,0.875,bilinear,-52.151,-50.855,-25\nmobilenetv1_100h.ra4_e3600_r224_in1k,224,25.473,74.527,43.843,56.157,5.28,0.875,bicubic,-50.191,-48.680,+81\nresnetv2_18.ra4_e3600_r224_in1k,288,25.457,74.543,44.055,55.945,11.69,1.000,bicubic,-49.907,-48.637,+96\nbotnet26t_256.c1_in1k,256,25.440,74.560,42.630,57.370,12.49,0.950,bicubic,-53.816,-52.164,-179\nregnety_008_tv.tv2_in1k,224,25.420,74.580,43.436,56.564,6.43,0.965,bicubic,-53.252,-50.944,-120\ntf_mixnet_l.in1k,224,25.418,74.582,42.536,57.464,7.33,0.875,bicubic,-53.356,-51.464,-132\nresnetv2_18d.ra4_e3600_r224_in1k,288,25.404,74.596,43.652,56.348,11.71,1.000,bicubic,-50.654,-49.358,+62\nres2next50.in1k,224,25.400,74.600,42.530,57.470,24.67,0.875,bilinear,-52.838,-51.374,-83\nconvnext_femto_ols.d1_in1k,288,25.395,74.606,43.149,56.851,5.23,0.950,bicubic,-53.521,-51.385,-152\nhardcorenas_b.miil_green_in1k,224,25.392,74.608,44.155,55.845,5.18,0.875,bilinear,-51.157,-48.611,+40\nresnet34.bt_in1k,224,25.392,74.608,43.080,56.920,21.80,0.875,bicubic,-49.784,-49.086,+94\nresnet18d.ra4_e3600_r224_in1k,224,25.367,74.633,43.921,56.079,11.71,0.900,bicubic,-48.989,-47.905,+119\nefficientformerv2_s0.snap_dist_in1k,224,25.361,74.639,43.931,56.069,3.60,0.950,bicubic,-50.741,-48.919,+54\nresnetv2_50x1_bit.goog_in21k_ft_in1k,448,25.355,74.645,45.361,54.639,25.55,1.000,bilinear,-55.031,-50.333,-333\nmobilenetv3_large_100.ra4_e3600_r224_in1k,256,25.341,74.659,43.923,56.077,5.48,1.000,bicubic,-51.841,-49.405,-2\nwide_resnet50_2.tv_in1k,224,25.340,74.660,42.188,57.812,68.88,0.875,bilinear,-53.147,-52.064,-115\nmobilenetv1_100h.ra4_e3600_r224_in1k,256,25.332,74.668,43.888,56.112,5.28,0.950,bicubic,-51.266,-49.384,+30\nhardcorenas_d.miil_green_in1k,224,25.332,74.668,43.174,56.826,7.50,0.875,bilinear,-52.118,-50.306,-24\ndla102.in1k,224,25.330,74.670,43.837,56.163,33.27,0.875,bilinear,-52.686,-50.157,-74\nlegacy_seresnet101.in1k,224,25.326,74.674,42.811,57.189,49.33,0.875,bilinear,-53.044,-51.295,-104\nresnest14d.gluon_in1k,224,25.316,74.684,44.088,55.912,10.61,0.875,bilinear,-50.200,-48.424,+71\nselecsls60b.in1k,224,25.314,74.686,43.497,56.503,32.77,0.875,bicubic,-53.096,-50.669,-110\nregnety_032.pycls_in1k,224,25.314,74.686,42.948,57.052,19.44,0.875,bicubic,-53.588,-51.478,-164\nefficientnet_b1.ft_in1k,224,25.265,74.735,42.992,57.008,7.79,0.875,bicubic,-52.327,-50.644,-43\nlegacy_seresnext50_32x4d.in1k,224,25.218,74.782,41.962,58.038,27.56,0.875,bilinear,-53.862,-52.466,-183\nghostnetv2_130.in1k,224,25.171,74.829,43.204,56.796,8.96,0.875,bicubic,-51.575,-50.160,+15\nmixer_b16_224.goog_in21k_ft_in1k,224,25.153,74.847,41.243,58.757,59.88,0.875,bicubic,-51.463,-51.007,+19\nresnet50.a3_in1k,224,25.068,74.932,41.968,58.032,25.56,0.950,bicubic,-52.984,-51.812,-88\neca_botnext26ts_256.c1_in1k,256,25.047,74.953,43.037,56.963,10.59,0.950,bicubic,-54.215,-51.563,-207\nefficientnet_b0.ra_in1k,224,25.017,74.983,42.791,57.209,5.29,0.875,bicubic,-52.681,-50.747,-58\nres2net50_48w_2s.in1k,224,25.015,74.985,42.223,57.777,25.29,0.875,bilinear,-52.513,-51.339,-46\nmobilenetv4_conv_small.e3600_r256_in1k,256,24.972,75.028,43.620,56.380,3.77,0.950,bicubic,-49.562,-48.320,+96\nxcit_nano_12_p8_224.fb_dist_in1k,224,24.958,75.042,43.292,56.708,3.05,1.000,bicubic,-51.372,-49.778,+29\nresnet34.gluon_in1k,224,24.954,75.046,42.294,57.706,21.80,0.875,bicubic,-49.620,-49.694,+92\ndla60.in1k,224,24.927,75.073,43.320,56.680,22.04,0.875,bilinear,-52.109,-49.752,-13\nmobilenetv2_120d.ra_in1k,224,24.919,75.081,43.039,56.961,5.83,0.875,bicubic,-52.381,-50.477,-33\ncs3darknet_m.c2ns_in1k,256,24.905,75.095,43.111,56.889,9.31,0.887,bicubic,-52.069,-50.470,-11\nconvnextv2_atto.fcmae_ft_in1k,288,24.884,75.116,42.487,57.513,3.71,0.950,bicubic,-52.910,-51.241,-71\nresnet34.bt_in1k,288,24.836,75.164,42.101,57.899,21.80,0.950,bicubic,-51.670,-51.241,+16\nresnet18d.ra4_e3600_r224_in1k,288,24.828,75.171,43.147,56.853,11.71,1.000,bicubic,-51.184,-49.637,+30\nseresnet50.a3_in1k,224,24.809,75.191,42.101,57.899,28.09,0.950,bicubic,-52.227,-51.215,-18\nmobilenetv1_100.ra4_e3600_r224_in1k,256,24.805,75.195,43.479,56.521,4.23,0.950,bicubic,-51.283,-49.527,+25\nregnety_016.pycls_in1k,224,24.805,75.195,42.618,57.382,11.20,0.875,bicubic,-53.075,-51.108,-85\nmobilenetv1_100.ra4_e3600_r224_in1k,224,24.785,75.215,43.457,56.543,4.23,0.875,bicubic,-50.603,-48.853,+54\nresnet34.a3_in1k,160,24.760,75.240,42.534,57.466,21.80,0.950,bicubic,-45.820,-47.006,+153\npit_ti_distilled_224.in1k,224,24.721,75.279,43.192,56.808,5.10,0.900,bicubic,-49.555,-48.728,+88\nseresnext26ts.ch_in1k,256,24.701,75.299,43.096,56.904,10.39,0.900,bicubic,-53.159,-50.694,-86\neca_resnext26ts.ch_in1k,256,24.669,75.331,42.893,57.107,10.30,0.900,bicubic,-52.777,-50.681,-54\ncs3darknet_m.c2ns_in1k,288,24.622,75.378,43.005,56.995,9.31,0.950,bicubic,-53.014,-51.011,-74\nresnet50.bt_in1k,288,24.595,75.405,41.478,58.522,25.56,0.950,bicubic,-55.057,-53.430,-273\neca_resnext26ts.ch_in1k,288,24.571,75.429,42.542,57.458,10.30,1.000,bicubic,-53.449,-51.390,-108\ntf_efficientnet_lite2.in1k,260,24.569,75.431,42.263,57.737,6.09,0.890,bicubic,-52.901,-51.493,-60\nfasternet_t1.in1k,224,24.567,75.433,42.956,57.044,7.60,1.000,bicubic,-51.355,-50.020,+24\nregnetx_016.pycls_in1k,224,24.563,75.437,42.496,57.504,9.19,0.875,bicubic,-52.373,-50.936,-24\nmobilevitv2_100.cvnets_in1k,256,24.561,75.439,42.943,57.057,4.90,0.888,bicubic,-53.527,-51.227,-117\nseresnext26ts.ch_in1k,288,24.524,75.476,42.656,57.344,10.39,1.000,bicubic,-53.768,-51.420,-135\nskresnet18.ra_in1k,224,24.485,75.515,42.561,57.439,11.96,0.875,bicubic,-48.535,-48.611,+103\nhardcorenas_a.miil_green_in1k,224,24.410,75.590,43.345,56.655,5.26,0.875,bilinear,-51.516,-49.141,+17\ntf_efficientnet_lite0.in1k,224,24.392,75.608,42.489,57.511,4.65,0.875,bicubic,-50.454,-49.691,+58\nconvnext_atto.d2_in1k,224,24.369,75.631,42.359,57.641,3.70,0.875,bicubic,-51.303,-50.555,+22\nhrnet_w18.ms_aug_in1k,224,24.331,75.669,42.860,57.140,21.30,0.950,bilinear,-53.779,-51.202,-126\nmobilenetv4_conv_small.e3600_r256_in1k,320,24.298,75.702,42.673,57.327,3.77,1.000,bicubic,-51.318,-50.095,+23\nefficientvit_m4.r224_in1k,224,24.272,75.728,41.799,58.201,8.80,0.875,bicubic,-50.072,-50.161,+69\nmobilenetv4_conv_small.e2400_r224_in1k,256,24.255,75.745,42.551,57.449,3.77,0.950,bicubic,-50.389,-49.471,+58\ngcresnext26ts.ch_in1k,288,24.190,75.810,41.347,58.653,10.48,1.000,bicubic,-54.246,-52.681,-153\nconvnext_zepto_rms.ra4_e3600_r224_in1k,224,24.180,75.820,41.477,58.523,2.16,0.875,bicubic,-48.640,-49.629,+99\nconvnext_atto_ols.a2_in1k,224,24.164,75.836,42.052,57.948,3.70,0.875,bicubic,-51.738,-50.790,+11\ntf_efficientnet_b1.in1k,240,24.119,75.881,41.520,58.480,7.79,0.882,bicubic,-54.427,-52.462,-174\nresnet18.a1_in1k,224,24.072,75.928,40.919,59.081,11.69,0.950,bicubic,-47.432,-49.167,+119\nxcit_nano_12_p8_384.fb_dist_in1k,384,24.064,75.936,42.035,57.965,3.05,1.000,bicubic,-53.746,-51.781,-106\nresnet50.tv_in1k,224,24.064,75.936,41.329,58.671,25.56,0.875,bilinear,-52.092,-51.537,-4\nlevit_conv_128s.fb_dist_in1k,224,24.037,75.963,40.985,59.015,7.78,0.900,bicubic,-52.497,-51.897,-20\nlevit_128s.fb_dist_in1k,224,24.033,75.967,40.975,59.025,7.78,0.900,bicubic,-52.487,-51.895,-19\nlegacy_seresnet34.in1k,224,24.017,75.983,41.909,58.091,21.96,0.875,bilinear,-50.771,-50.225,+42\nconvnext_zepto_rms_ols.ra4_e3600_r224_in1k,224,24.017,75.983,41.569,58.431,2.16,0.900,bicubic,-49.217,-49.701,+82\ngcresnext26ts.ch_in1k,256,24.017,75.983,41.372,58.628,10.48,0.900,bicubic,-53.793,-52.664,-109\nxcit_nano_12_p16_384.fb_dist_in1k,384,23.991,76.009,42.278,57.722,3.05,1.000,bicubic,-51.469,-50.394,+18\nrepghostnet_130.in1k,224,23.966,76.034,41.671,58.329,5.48,0.875,bicubic,-52.412,-51.227,-18\nresnet18d.ra2_in1k,224,23.944,76.056,42.282,57.718,11.71,0.875,bicubic,-48.352,-48.400,+95\nmobilenetv4_conv_small.e2400_r224_in1k,224,23.925,76.075,41.732,58.268,3.77,0.875,bicubic,-49.831,-49.698,+65\nefficientnet_lite0.ra_in1k,224,23.907,76.093,42.135,57.865,4.65,0.875,bicubic,-51.581,-50.373,+13\nresnext26ts.ra2_in1k,256,23.883,76.117,41.131,58.869,10.30,0.900,bicubic,-52.885,-52.007,-43\nefficientnet_es_pruned.in1k,224,23.870,76.130,41.999,58.001,5.44,0.875,bicubic,-51.140,-50.439,+30\ndensenet121.tv_in1k,224,23.836,76.164,41.934,58.066,7.98,0.875,bicubic,-50.914,-50.224,+35\nregnetx_008.tv2_in1k,224,23.769,76.231,40.708,59.292,7.26,0.965,bicubic,-53.525,-52.956,-81\nresnet26t.ra2_in1k,320,23.742,76.258,41.355,58.645,16.01,1.000,bicubic,-54.588,-52.691,-168\nmobilenetv2_140.ra_in1k,224,23.693,76.307,41.459,58.541,6.11,0.875,bicubic,-52.835,-51.549,-34\ndla34.in1k,224,23.687,76.313,41.555,58.445,15.74,0.875,bilinear,-50.945,-50.497,+35\nmixnet_m.ft_in1k,224,23.685,76.315,41.178,58.822,5.01,0.875,bicubic,-53.583,-52.240,-82\nlegacy_seresnet50.in1k,224,23.638,76.362,40.099,59.901,28.09,0.875,bilinear,-54.012,-53.633,-116\nresnext26ts.ra2_in1k,288,23.616,76.384,40.930,59.070,10.30,1.000,bicubic,-53.570,-52.540,-80\nconvnext_atto.d2_in1k,288,23.612,76.388,41.093,58.907,3.70,0.950,bicubic,-53.404,-52.611,-68\nese_vovnet19b_dw.ra_in1k,224,23.551,76.449,41.280,58.720,6.54,0.875,bicubic,-53.277,-51.996,-56\nresnet34.tv_in1k,224,23.494,76.506,41.380,58.620,21.80,0.875,bilinear,-49.804,-50.042,+60\ntf_mixnet_m.in1k,224,23.480,76.519,40.995,59.005,5.01,0.875,bicubic,-53.471,-52.169,-67\nefficientvit_m3.r224_in1k,224,23.400,76.600,40.516,59.484,6.90,0.875,bicubic,-49.994,-50.820,+55\ntf_efficientnet_em.in1k,240,23.392,76.608,40.402,59.598,6.90,0.882,bicubic,-54.736,-53.650,-163\nselecsls42b.in1k,224,23.386,76.614,40.661,59.339,32.46,0.875,bicubic,-53.798,-52.729,-86\nresnet34.a3_in1k,224,23.355,76.645,40.138,59.862,21.80,0.950,bicubic,-49.641,-50.978,+63\nrepvgg_b0.rvgg_in1k,224,23.339,76.661,41.260,58.740,15.82,0.875,bilinear,-51.819,-51.150,+6\nshvit_s1.in1k,224,23.323,76.677,40.901,59.099,6.33,0.875,bicubic,-49.457,-50.123,+65\nxcit_nano_12_p16_224.fb_dist_in1k,224,23.272,76.728,41.459,58.541,3.05,1.000,bicubic,-49.046,-49.367,+71\nregnety_004.tv2_in1k,224,23.260,76.740,40.873,59.127,4.34,0.965,bicubic,-52.334,-51.833,-14\nconvnext_atto_ols.a2_in1k,288,23.137,76.863,40.901,59.099,3.70,0.950,bicubic,-54.083,-52.799,-94\nmobilenetv2_110d.ra_in1k,224,23.076,76.924,40.736,59.264,4.52,0.875,bicubic,-51.980,-51.450,+6\nresnet18.a1_in1k,288,23.074,76.926,39.572,60.428,11.69,1.000,bicubic,-50.090,-51.482,+51\nvit_base_patch32_224.sam_in1k,224,23.054,76.946,39.592,60.408,88.22,0.900,bicubic,-50.650,-51.420,+39\ninception_next_atto.sail_in1k,224,23.040,76.960,40.636,59.364,4.16,0.875,bicubic,-52.308,-51.923,-8\ntinynet_b.in1k,188,23.036,76.964,40.958,59.042,3.73,0.875,bicubic,-51.910,-51.236,+5\nresnet18d.ra2_in1k,288,23.025,76.975,41.123,58.877,11.71,0.950,bicubic,-50.765,-50.711,+34\ncs3darknet_focus_s.ra4_e3600_r256_in1k,256,22.985,77.015,40.940,59.060,3.27,0.887,bicubic,-49.193,-49.890,+68\nmobilenetv4_conv_small.e1200_r224_in1k,256,22.952,77.048,41.064,58.936,3.77,0.950,bicubic,-51.330,-51.058,+21\nrepghostnet_111.in1k,224,22.885,77.115,40.502,59.498,4.54,0.875,bicubic,-52.175,-51.694,-3\nmobileone_s1.apple_in1k,224,22.832,77.168,39.887,60.113,4.83,0.900,bilinear,-52.928,-52.901,-34\nstarnet_s2.in1k,224,22.795,77.205,40.268,59.732,3.68,0.875,bicubic,-51.871,-51.876,+6\ndeit_tiny_distilled_patch16_224.fb_in1k,224,22.714,77.286,40.785,59.215,5.91,0.900,bicubic,-51.824,-51.113,+11\nmobilenetv3_large_100.ra_in1k,224,22.691,77.309,40.710,59.290,5.48,0.875,bicubic,-53.093,-51.818,-38\nswiftformer_xs.dist_in1k,224,22.671,77.329,39.751,60.249,3.48,0.950,bicubic,-52.929,-52.563,-30\nrepvgg_a1.rvgg_in1k,224,22.647,77.353,39.877,60.123,14.09,0.875,bilinear,-51.827,-51.975,+10\nmobilenetv3_rw.rmsp_in1k,224,22.636,77.365,40.415,59.585,5.48,0.875,bicubic,-52.980,-52.281,-34\nghostnetv2_100.in1k,224,22.614,77.386,40.058,59.942,6.16,0.875,bicubic,-52.572,-52.276,-18\nmobilevit_s.cvnets_in1k,256,22.606,77.394,38.863,61.137,5.58,0.900,bicubic,-55.692,-55.305,-202\nedgenext_x_small.in1k,288,22.594,77.406,39.511,60.489,2.34,1.000,bicubic,-53.112,-53.249,-42\ntf_mobilenetv3_large_100.in1k,224,22.563,77.437,39.763,60.237,5.48,0.875,bilinear,-52.947,-52.837,-31\ntf_efficientnet_b0.in1k,224,22.555,77.445,39.594,60.406,5.29,0.875,bicubic,-53.987,-53.414,-74\nmobilenetv4_conv_small.e1200_r224_in1k,224,22.539,77.461,40.598,59.402,3.77,0.875,bicubic,-50.911,-50.742,+23\nxcit_nano_12_p8_224.fb_in1k,224,22.504,77.496,40.795,59.205,3.05,1.000,bicubic,-51.396,-51.359,+16\ntf_efficientnet_es.in1k,224,22.421,77.579,39.114,60.886,5.44,0.875,bicubic,-54.187,-54.062,-81\nhrnet_w18_small_v2.ms_in1k,224,22.372,77.628,39.893,60.107,15.60,0.875,bilinear,-52.722,-52.521,-20\nconvit_tiny.fb_in1k,224,22.266,77.734,39.678,60.322,5.71,0.875,bicubic,-50.874,-52.025,+29\nregnetx_004_tv.tv2_in1k,224,22.242,77.757,39.120,60.880,5.50,0.965,bicubic,-52.354,-53.020,-6\nedgenext_x_small.in1k,256,22.188,77.812,39.054,60.946,2.34,0.900,bicubic,-52.686,-53.250,-16\nregnety_008.pycls_in1k,224,22.131,77.870,38.985,61.015,6.26,0.875,bicubic,-54.204,-54.075,-72\nese_vovnet19b_dw.ra_in1k,288,22.095,77.905,39.482,60.518,6.54,0.950,bicubic,-55.693,-54.210,-166\nseresnext26t_32x4d.bt_in1k,224,22.020,77.980,38.505,61.495,16.81,0.875,bicubic,-55.960,-55.245,-186\nregnetx_008.pycls_in1k,224,21.979,78.021,38.924,61.076,7.26,0.875,bicubic,-53.075,-53.414,-24\nregnety_006.pycls_in1k,224,21.967,78.033,38.955,61.045,6.06,0.875,bicubic,-53.307,-53.577,-35\nsemnasnet_100.rmsp_in1k,224,21.956,78.044,38.702,61.298,3.89,0.875,bicubic,-53.496,-54.156,-41\nvit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,384,21.952,78.048,39.427,60.573,6.36,1.000,bicubic,-54.010,-53.845,-65\nresnet26d.bt_in1k,224,21.938,78.062,38.631,61.369,16.01,0.875,bicubic,-54.760,-54.525,-98\nstarnet_s1.in1k,224,21.891,78.109,38.940,61.060,2.87,0.875,bicubic,-51.641,-52.564,+7\ncs3darknet_focus_s.ra4_e3600_r256_in1k,320,21.889,78.111,39.789,60.211,3.27,1.000,bicubic,-51.269,-51.669,+16\nresnet18.a2_in1k,224,21.873,78.127,38.466,61.534,11.69,0.950,bicubic,-48.761,-51.020,+54\npit_ti_224.in1k,224,21.852,78.148,39.527,60.473,4.85,0.900,bicubic,-51.075,-51.885,+19\npvt_v2_b0.in1k,224,21.834,78.166,40.138,59.862,3.67,0.900,bicubic,-48.820,-50.068,+51\nregnetx_006.pycls_in1k,224,21.759,78.241,38.942,61.058,6.20,0.875,bicubic,-52.145,-52.698,-4\nvit_tiny_patch16_384.augreg_in21k_ft_in1k,384,21.702,78.298,39.323,60.677,5.79,1.000,bicubic,-56.736,-55.219,-236\ncrossvit_9_240.in1k,240,21.698,78.302,39.258,60.742,8.55,0.875,bicubic,-52.296,-52.718,-9\nvgg19_bn.tv_in1k,224,21.637,78.363,39.288,60.712,143.68,0.875,bilinear,-52.593,-52.560,-14\nsemnasnet_075.rmsp_in1k,224,21.594,78.406,38.950,61.050,2.91,0.875,bicubic,-51.406,-52.174,+11\nresnet18.gluon_in1k,224,21.559,78.441,38.891,61.109,11.69,0.875,bicubic,-49.285,-50.859,+43\nmobilevitv2_075.cvnets_in1k,256,21.543,78.457,38.614,61.386,2.87,0.888,bicubic,-54.069,-54.141,-65\nfbnetc_100.rmsp_in1k,224,21.529,78.471,38.156,61.844,5.57,0.875,bilinear,-53.591,-54.228,-46\nrepghostnet_100.in1k,224,21.490,78.510,38.700,61.300,4.07,0.875,bicubic,-52.718,-52.850,-18\nxcit_nano_12_p16_224.fb_in1k,224,21.449,78.551,39.787,60.213,3.05,1.000,bicubic,-48.533,-49.993,+47\nmixnet_s.ft_in1k,224,21.417,78.583,38.228,61.772,4.13,0.875,bicubic,-54.583,-54.568,-84\nghostnet_100.in1k,224,21.376,78.624,38.128,61.872,5.18,0.875,bicubic,-52.582,-53.414,-17\nmnasnet_100.rmsp_in1k,224,21.364,78.636,37.729,62.271,4.38,0.875,bicubic,-53.308,-54.379,-38\nseresnext26d_32x4d.bt_in1k,224,21.299,78.701,37.322,62.678,16.81,0.875,bicubic,-56.293,-56.292,-177\nresnet26.bt_in1k,224,21.295,78.705,38.030,61.970,16.00,0.875,bicubic,-54.003,-54.548,-59\nresnet18.fb_ssl_yfcc100m_ft_in1k,224,21.290,78.710,39.126,60.874,11.69,0.875,bilinear,-51.346,-52.290,+6\nlcnet_100.ra2_in1k,224,21.285,78.715,38.889,61.111,2.95,0.875,bicubic,-50.844,-51.487,+18\nlegacy_seresnext26_32x4d.in1k,224,21.085,78.915,37.639,62.361,16.79,0.875,bicubic,-56.015,-55.619,-145\ncrossvit_tiny_240.in1k,240,21.046,78.954,38.051,61.949,7.01,0.875,bicubic,-52.306,-53.855,-11\nefficientvit_m2.r224_in1k,224,21.046,78.954,37.670,62.330,4.19,0.875,bicubic,-49.750,-52.482,+30\nrepvgg_a0.rvgg_in1k,224,20.993,79.007,37.601,62.399,9.11,0.875,bilinear,-51.433,-52.905,+3\nresnet18.a2_in1k,288,20.951,79.049,36.849,63.151,11.69,1.000,bicubic,-51.413,-53.771,+4\nregnetx_004.pycls_in1k,224,20.900,79.100,37.560,62.440,5.16,0.875,bicubic,-51.582,-53.280,0\nseresnext26t_32x4d.bt_in1k,288,20.865,79.135,36.364,63.636,16.81,0.950,bicubic,-57.871,-57.954,-291\nspnasnet_100.rmsp_in1k,224,20.843,79.157,37.902,62.098,4.42,0.875,bilinear,-53.243,-53.924,-33\nlegacy_seresnet18.in1k,224,20.830,79.170,37.633,62.367,11.78,0.875,bicubic,-50.912,-52.707,+13\nmobilenetv2_100.ra_in1k,224,20.765,79.235,37.776,62.224,3.50,0.875,bicubic,-52.145,-53.224,-8\ntf_mixnet_s.in1k,224,20.466,79.534,36.642,63.358,4.13,0.875,bicubic,-55.190,-55.990,-89\nvit_tiny_patch16_224.augreg_in21k_ft_in1k,224,20.460,79.540,37.574,62.426,5.72,0.900,bicubic,-54.992,-55.034,-78\nregnety_004.pycls_in1k,224,20.409,79.591,37.041,62.959,4.34,0.875,bicubic,-53.595,-54.715,-37\ntf_mobilenetv3_large_075.in1k,224,20.403,79.597,36.794,63.206,3.99,0.875,bilinear,-53.039,-54.538,-26\nhrnet_w18_small.gluon_in1k,224,20.397,79.603,37.018,62.982,13.19,0.875,bicubic,-53.537,-54.160,-36\nhrnet_w18_small.ms_in1k,224,20.360,79.640,37.090,62.910,13.19,0.875,bilinear,-51.980,-53.598,-6\nresnet26d.bt_in1k,288,20.277,79.722,36.358,63.642,16.01,0.950,bicubic,-57.144,-57.276,-184\nresnet18.tv_in1k,224,20.238,79.762,37.258,62.742,11.69,0.875,bilinear,-49.514,-51.824,+24\nmixer_l16_224.goog_in21k_ft_in1k,224,20.181,79.819,32.952,67.048,208.20,0.875,bicubic,-51.903,-54.680,0\ndeit_tiny_patch16_224.fb_in1k,224,20.140,79.860,37.574,62.426,5.72,0.900,bicubic,-52.050,-53.526,-4\ntf_mobilenetv3_large_minimal_100.in1k,224,20.130,79.870,36.912,63.088,3.92,0.875,bilinear,-52.140,-53.752,-8\nseresnext26d_32x4d.bt_in1k,288,20.071,79.929,35.237,64.763,16.81,0.950,bicubic,-58.745,-59.029,-315\nefficientvit_m1.r224_in1k,224,19.975,80.025,36.481,63.519,2.98,0.875,bicubic,-48.365,-52.211,+26\nvgg16_bn.tv_in1k,224,19.963,80.037,36.324,63.676,138.37,0.875,bilinear,-53.391,-55.166,-34\nresnet26.bt_in1k,288,19.765,80.235,35.845,64.155,16.00,0.950,bicubic,-56.619,-57.333,-128\nresnet18.a3_in1k,160,19.700,80.300,35.416,64.584,11.69,0.950,bicubic,-45.970,-50.852,+36\nrepghostnet_080.in1k,224,19.478,80.522,35.955,64.045,3.28,0.875,bicubic,-52.762,-54.537,-12\nvit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,224,19.356,80.644,36.102,63.898,6.34,0.900,bicubic,-52.444,-54.708,-7\nmobileone_s0.apple_in1k,224,19.299,80.701,35.336,64.664,5.29,0.875,bilinear,-52.099,-54.528,-1\ntinynet_c.in1k,184,19.289,80.711,36.031,63.969,2.46,0.875,bicubic,-51.925,-53.715,0\nedgenext_xx_small.in1k,288,18.867,81.133,35.178,64.822,1.33,1.000,bicubic,-53.019,-55.356,-11\nedgenext_xx_small.in1k,256,18.611,81.389,34.699,65.301,1.33,0.900,bicubic,-52.521,-55.343,-1\nresnet18.a3_in1k,224,18.440,81.560,33.475,66.525,11.69,0.950,bicubic,-49.810,-54.697,+17\nefficientvit_b0.r224_in1k,224,18.438,81.562,33.243,66.757,3.41,0.950,bicubic,-52.970,-56.191,-7\nmobilevit_xs.cvnets_in1k,256,18.340,81.660,33.235,66.765,2.32,0.900,bicubic,-56.278,-59.115,-73\nlcnet_075.ra2_in1k,224,18.116,81.884,34.363,65.637,2.36,0.875,bicubic,-50.670,-54.021,+10\nmobilenetv4_conv_small_050.e3000_r224_in1k,256,17.978,82.022,34.453,65.547,2.24,0.950,bicubic,-47.848,-51.963,+23\nvgg19.tv_in1k,224,17.935,82.065,33.064,66.936,143.67,0.875,bilinear,-54.465,-57.814,-30\nvgg13_bn.tv_in1k,224,17.805,82.195,34.045,65.955,133.05,0.875,bilinear,-53.755,-56.327,-14\nmobilenetv4_conv_small_050.e3000_r224_in1k,224,17.678,82.322,33.852,66.148,2.24,0.875,bicubic,-47.100,-51.648,+25\nresnet10t.c3_in1k,176,17.650,82.350,33.520,66.480,5.44,0.875,bicubic,-49.074,-53.456,+15\nvgg16.tv_in1k,224,17.534,82.466,32.760,67.240,138.36,0.875,bilinear,-54.060,-57.638,-18\nregnety_002.pycls_in1k,224,17.448,82.552,32.465,67.535,3.16,0.875,bicubic,-52.840,-57.079,-5\nvgg11_bn.tv_in1k,224,17.397,82.603,32.952,67.048,132.87,0.875,bilinear,-52.977,-56.850,-7\nresnet14t.c3_in1k,176,17.381,82.619,32.115,67.885,10.08,0.875,bicubic,-53.937,-57.565,-16\nmobilevitv2_050.cvnets_in1k,256,17.346,82.654,33.015,66.985,1.37,0.888,bicubic,-52.810,-56.915,-7\nrepghostnet_058.in1k,224,17.198,82.802,32.630,67.370,2.55,0.875,bicubic,-51.740,-55.756,-3\nregnetx_002.pycls_in1k,224,16.980,83.020,32.243,67.757,2.68,0.875,bicubic,-51.778,-56.325,-1\nmobilenetv3_small_100.lamb_in1k,224,16.850,83.150,32.520,67.480,2.54,0.875,bicubic,-50.786,-55.118,+4\nfasternet_t0.in1k,224,16.748,83.252,31.842,68.158,3.91,1.000,bicubic,-54.984,-58.232,-27\nmobilenetv2_050.lamb_in1k,224,16.721,83.279,31.897,68.103,1.97,0.875,bicubic,-49.207,-54.217,+8\nresnet10t.c3_in1k,224,16.709,83.291,32.139,67.861,5.44,0.950,bicubic,-51.645,-55.895,-4\ntinynet_d.in1k,152,16.683,83.317,32.457,67.543,2.34,0.875,bicubic,-50.253,-54.621,+2\nefficientvit_m0.r224_in1k,224,16.662,83.338,31.944,68.056,2.35,0.875,bicubic,-46.642,-53.196,+12\nmnasnet_small.lamb_in1k,224,16.640,83.360,31.877,68.123,2.03,0.875,bicubic,-49.564,-54.593,+2\ndla60x_c.in1k,224,16.316,83.684,31.752,68.249,1.32,0.875,bilinear,-51.612,-56.675,-5\ntf_mobilenetv3_small_100.in1k,224,16.273,83.727,31.119,68.881,2.54,0.875,bilinear,-51.649,-56.567,-5\nvgg13.tv_in1k,224,16.127,83.873,30.991,69.009,133.05,0.875,bilinear,-53.823,-58.271,-17\nresnet14t.c3_in1k,224,15.999,84.001,30.040,69.960,10.08,0.950,bicubic,-56.243,-60.278,-45\nvgg11.tv_in1k,224,15.715,84.285,30.470,69.530,132.86,0.875,bilinear,-53.335,-58.160,-17\nrepghostnet_050.in1k,224,15.601,84.399,30.215,69.785,2.31,0.875,bicubic,-51.377,-56.721,-7\nmobilenetv3_small_075.lamb_in1k,224,14.988,85.013,29.745,70.255,2.04,0.875,bicubic,-50.297,-55.717,+1\ntf_mobilenetv3_small_075.in1k,224,14.974,85.026,29.655,70.345,2.04,0.875,bilinear,-50.732,-56.475,-2\ndla46_c.in1k,224,14.744,85.256,29.354,70.646,1.30,0.875,bilinear,-50.136,-56.976,0\nmobilevit_xxs.cvnets_in1k,256,14.565,85.435,28.725,71.275,1.27,0.900,bicubic,-54.371,-60.217,-20\ndla46x_c.in1k,224,14.402,85.598,29.236,70.764,1.07,0.875,bilinear,-51.606,-57.710,-8\ntf_mobilenetv3_small_minimal_100.in1k,224,14.257,85.743,28.666,71.334,2.04,0.875,bilinear,-48.636,-55.604,+1\nlcnet_050.ra2_in1k,224,14.239,85.761,28.527,71.473,1.88,0.875,bicubic,-48.899,-55.875,-1\ntest_vit3.r160_in1k,160,13.512,86.488,28.185,71.815,0.93,0.950,bicubic,-43.396,-52.563,+2\ntinynet_e.in1k,106,12.743,87.257,26.495,73.505,2.04,0.875,bicubic,-47.131,-55.279,-1\ntest_convnext3.r160_in1k,160,12.372,87.628,26.214,73.786,0.47,0.950,bicubic,-40.930,-52.112,+2\ntest_convnext2.r160_in1k,160,12.290,87.710,26.462,73.538,0.48,0.950,bicubic,-41.231,-52.097,0\nmobilenetv3_small_050.lamb_in1k,224,11.046,88.954,23.480,76.519,1.59,0.875,bicubic,-46.872,-56.664,-3\ntest_convnext.r160_in1k,160,10.248,89.752,23.797,76.203,0.27,0.950,bicubic,-37.526,-50.369,0\ntest_resnet.r160_in1k,160,9.265,90.735,21.859,78.141,0.47,0.950,bilinear,-32.337,-46.125,+7\ntest_efficientnet_evos.r160_in1k,160,8.861,91.139,19.688,80.312,0.36,0.950,bicubic,-37.643,-51.334,+1\ntest_nfnet.r160_in1k,160,8.589,91.411,19.552,80.448,0.38,0.950,bicubic,-39.045,-53.328,-2\ntest_efficientnet_ln.r160_in1k,160,8.277,91.723,19.081,80.919,0.36,0.950,bicubic,-35.681,-50.245,+1\ntest_efficientnet_gn.r160_in1k,160,7.962,92.038,18.175,81.825,0.36,0.950,bicubic,-35.926,-50.981,+1\ntest_byobnet.r160_in1k,160,7.774,92.226,17.866,82.134,0.46,0.950,bicubic,-38.084,-53.138,-2\ntest_efficientnet.r160_in1k,160,7.361,92.639,17.173,82.827,0.36,0.950,bicubic,-39.145,-53.863,-5\ntest_vit2.r160_in1k,160,5.608,94.392,14.058,85.942,0.46,0.950,bicubic,-36.642,-54.924,-1\ntest_vit.r160_in1k,160,4.862,95.138,11.954,88.046,0.37,0.950,bicubic,-36.120,-55.429,0\n"
  },
  {
    "path": "setup.cfg",
    "content": "[dist_conda]\n\nconda_name_differences = 'torch:pytorch'\nchannels = pytorch\nnoarch = True\n\n[metadata]\n\nurl = \"https://github.com/huggingface/pytorch-image-models\""
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/test_checkpoint_loading.py",
    "content": "import argparse\nimport inspect\n\nimport pytest\nimport torch\n\nfrom timm.models._helpers import load_state_dict, resume_checkpoint\n\n\n_HAS_WEIGHTS_ONLY = 'weights_only' in inspect.signature(torch.load).parameters\n_HAS_SAFE_GLOBALS = hasattr(torch.serialization, 'safe_globals')\n\n\nclass _CustomPayload:\n    def __init__(self, value: int = 1):\n        self.value = value\n\n\n@pytest.mark.skipif(\n    not (_HAS_WEIGHTS_ONLY and _HAS_SAFE_GLOBALS),\n    reason='requires torch.load(weights_only=...) with safe_globals support',\n)\ndef test_weights_only_allows_argparse_namespace(tmp_path):\n    checkpoint_path = tmp_path / 'namespace_ckpt.pth'\n    checkpoint = {\n        'state_dict': {'layer.weight': torch.randn(2, 2)},\n        'args': argparse.Namespace(model='test-model'),\n    }\n    torch.save(checkpoint, checkpoint_path)\n\n    state_dict = load_state_dict(checkpoint_path)\n    assert 'layer.weight' in state_dict\n\n\n@pytest.mark.skipif(not _HAS_WEIGHTS_ONLY, reason='requires torch.load(weights_only=...) support')\ndef test_weights_only_blocks_non_allowlisted_globals(tmp_path):\n    checkpoint_path = tmp_path / 'custom_ckpt.pth'\n    checkpoint = {\n        'state_dict': {'layer.weight': torch.randn(2, 2)},\n        'args': _CustomPayload(3),\n    }\n    torch.save(checkpoint, checkpoint_path)\n\n    with pytest.raises(RuntimeError, match='No automatic unsafe pickle fallback is performed'):\n        load_state_dict(checkpoint_path)\n\n\n@pytest.mark.skipif(\n    not (_HAS_WEIGHTS_ONLY and _HAS_SAFE_GLOBALS),\n    reason='requires torch.load(weights_only=...) with safe_globals support',\n)\ndef test_resume_checkpoint_default_weights_only_namespace(tmp_path):\n    src_model = torch.nn.Linear(4, 2)\n    src_optimizer = torch.optim.SGD(src_model.parameters(), lr=0.123, momentum=0.9)\n    x = torch.randn(3, 4)\n    src_optimizer.zero_grad()\n    src_model(x).sum().backward()\n    src_optimizer.step()\n\n    checkpoint_path = tmp_path / 'resume_namespace_ckpt.pth'\n    checkpoint = {\n        'state_dict': src_model.state_dict(),\n        'optimizer': src_optimizer.state_dict(),\n        'epoch': 7,\n        'version': 2,\n        'args': argparse.Namespace(model='test-model'),\n    }\n    torch.save(checkpoint, checkpoint_path)\n\n    dst_model = torch.nn.Linear(4, 2)\n    dst_optimizer = torch.optim.SGD(dst_model.parameters(), lr=0.5, momentum=0.9)\n    resume_epoch = resume_checkpoint(dst_model, checkpoint_path, optimizer=dst_optimizer, log_info=False)\n\n    assert resume_epoch == 8\n    assert torch.equal(dst_model.weight, src_model.weight)\n    assert torch.equal(dst_model.bias, src_model.bias)\n    assert dst_optimizer.param_groups[0]['lr'] == pytest.approx(0.123)\n    assert len(dst_optimizer.state_dict()['state']) > 0\n\n\n@pytest.mark.skipif(not _HAS_WEIGHTS_ONLY, reason='requires torch.load(weights_only=...) support')\ndef test_resume_checkpoint_blocks_non_allowlisted_globals(tmp_path):\n    model = torch.nn.Linear(4, 2)\n    checkpoint_path = tmp_path / 'resume_custom_ckpt.pth'\n    checkpoint = {\n        'state_dict': model.state_dict(),\n        'args': _CustomPayload(11),\n    }\n    torch.save(checkpoint, checkpoint_path)\n\n    with pytest.raises(RuntimeError, match='No automatic unsafe pickle fallback is performed'):\n        resume_checkpoint(model, checkpoint_path, log_info=False)\n\n\ndef test_resume_checkpoint_weights_only_false_allows_custom_globals(tmp_path):\n    src_model = torch.nn.Linear(4, 2)\n    checkpoint_path = tmp_path / 'resume_custom_ckpt_unsafe.pth'\n    checkpoint = {\n        'state_dict': src_model.state_dict(),\n        'epoch': 3,\n        'version': 2,\n        'args': _CustomPayload(11),\n    }\n    torch.save(checkpoint, checkpoint_path)\n\n    dst_model = torch.nn.Linear(4, 2)\n    resume_epoch = resume_checkpoint(dst_model, checkpoint_path, log_info=False, weights_only=False)\n\n    assert resume_epoch == 4\n    assert torch.equal(dst_model.weight, src_model.weight)\n    assert torch.equal(dst_model.bias, src_model.bias)\n"
  },
  {
    "path": "tests/test_layers.py",
    "content": "import pytest\nimport torch\nimport torch.nn as nn\n\nfrom timm.layers import create_act_layer, set_layer_config, get_act_layer, get_act_fn, Attention2d, MultiQueryAttentionV2\n\nimport importlib\nimport os\n\ntorch_backend = os.environ.get('TORCH_BACKEND')\nif torch_backend is not None:\n    importlib.import_module(torch_backend)\ntorch_device = os.environ.get('TORCH_DEVICE', 'cpu')\n\nclass MLP(nn.Module):\n    def __init__(self, act_layer=\"relu\", inplace=True):\n        super(MLP, self).__init__()\n        self.fc1 = nn.Linear(1000, 100)\n        self.act = create_act_layer(act_layer, inplace=inplace)\n        self.fc2 = nn.Linear(100, 10)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.fc2(x)\n        return x\n\n\ndef _run_act_layer_grad(act_type, inplace=True):\n    x = torch.rand(10, 1000) * 10\n    m = MLP(act_layer=act_type, inplace=inplace)\n\n    def _run(x, act_layer=''):\n        if act_layer:\n            # replace act layer if set\n            m.act = create_act_layer(act_layer, inplace=inplace)\n        out = m(x)\n        l = (out - 0).pow(2).sum()\n        return l\n\n    x = x.to(device=torch_device)\n    m.to(device=torch_device)\n\n    out_me = _run(x)\n\n    with set_layer_config(scriptable=True):\n        out_jit = _run(x, act_type)\n\n    assert torch.isclose(out_jit, out_me)\n\n    with set_layer_config(no_jit=True):\n        out_basic = _run(x, act_type)\n\n    assert torch.isclose(out_basic, out_jit)\n\n\ndef test_swish_grad():\n    for _ in range(100):\n        _run_act_layer_grad('swish')\n\n\ndef test_mish_grad():\n    for _ in range(100):\n        _run_act_layer_grad('mish')\n\n\ndef test_hard_sigmoid_grad():\n    for _ in range(100):\n        _run_act_layer_grad('hard_sigmoid', inplace=None)\n\n\ndef test_hard_swish_grad():\n    for _ in range(100):\n        _run_act_layer_grad('hard_swish')\n\n\ndef test_hard_mish_grad():\n    for _ in range(100):\n        _run_act_layer_grad('hard_mish')\n\ndef test_get_act_layer_empty_string():\n    # Empty string should return None\n    assert get_act_layer('') is None\n\n\ndef test_create_act_layer_inplace_error():\n    class NoInplaceAct(nn.Module):\n        def __init__(self):\n            super().__init__()\n        def forward(self, x):\n            return x\n    \n    # Should recover when inplace arg causes TypeError\n    layer = create_act_layer(NoInplaceAct, inplace=True)\n    assert isinstance(layer, NoInplaceAct)\n\n\ndef test_create_act_layer_edge_cases():\n    # Test None input\n    assert create_act_layer(None) is None\n    \n    # Test TypeError handling for inplace\n    class CustomAct(nn.Module):\n        def __init__(self, **kwargs):\n            super().__init__()\n        def forward(self, x):\n            return x\n            \n    result = create_act_layer(CustomAct, inplace=True)\n    assert isinstance(result, CustomAct)\n\n\ndef test_get_act_fn_callable():\n    def custom_act(x): \n        return x\n    assert get_act_fn(custom_act) is custom_act\n\n\ndef test_get_act_fn_none():\n    assert get_act_fn(None) is None\n    assert get_act_fn('') is None\n\n\n@pytest.mark.parametrize(\"dim\", [128])\n@pytest.mark.parametrize(\"dim_out\", [128, 256])\n@pytest.mark.parametrize(\"use_m\", [True, False])\ndef test_mqa_v2(dim, dim_out, use_m):\n    mqa = MultiQueryAttentionV2(dim, dim_out)\n    \n    x = torch.randn(1, dim, 32, 48)\n    if use_m:\n        m = torch.randn(1, dim, 16, 24)\n    else:\n        m = None\n        \n    y = mqa(x, m=m)\n    \n    assert (y.shape) == (1, dim_out, 32, 48)\n\n\n@pytest.mark.parametrize(\"bias\", [True, False])\n@pytest.mark.parametrize(\"expand_first\", [True, False])\n@pytest.mark.parametrize(\"head_first\", [True, False])\n@pytest.mark.parametrize(\"attn_mask\", [True, False])\ndef test_attn2d(bias, expand_first, head_first, attn_mask):\n    x = torch.randn(1, 128, 32, 48)\n    attn = Attention2d(\n        128, 128, num_heads=4, bias=bias, expand_first=expand_first, head_first=head_first\n    )\n    \n    if attn_mask:\n        mask = torch.randint(0, 1, size=(32 * 48, 32 * 48), dtype=torch.float32)\n    else:\n        mask = None\n    \n    o1 = attn(x, mask)\n    attn.fused_attn = False\n    o2 = attn(x, mask)\n    \n    assert torch.allclose(o1, o2, atol=1e-5), f\"{torch.abs(o1 - o2).max()}\"\n"
  },
  {
    "path": "tests/test_layers_drop.py",
    "content": "\"\"\"Tests for timm.layers.drop module (DropBlock, DropPath).\"\"\"\nimport torch\nimport pytest\n\nfrom timm.layers.drop import drop_block_2d, DropBlock2d, drop_path, DropPath\n\n\nclass TestDropBlock2d:\n    \"\"\"Test drop_block_2d function and DropBlock2d module.\"\"\"\n\n    def test_drop_block_2d_output_shape(self):\n        \"\"\"Test that output shape matches input shape.\"\"\"\n        for h, w in [(7, 7), (4, 8), (10, 5), (3, 3)]:\n            x = torch.ones((2, 3, h, w))\n            result = drop_block_2d(x, drop_prob=0.1, block_size=3)\n            assert result.shape == x.shape, f\"Shape mismatch for input ({h}, {w})\"\n\n    def test_drop_block_2d_no_drop_when_prob_zero(self):\n        \"\"\"Test that no dropping occurs when drop_prob=0.\"\"\"\n        x = torch.ones((2, 3, 8, 8))\n        result = drop_block_2d(x, drop_prob=0.0, block_size=3)\n        assert torch.allclose(result, x)\n\n    def test_drop_block_2d_approximate_keep_ratio(self):\n        \"\"\"Test that the drop ratio is approximately correct.\"\"\"\n        torch.manual_seed(123)\n        # Use large batch for statistical stability\n        x = torch.ones((32, 16, 56, 56))\n        drop_prob = 0.1\n\n        # With scale_by_keep=False, kept values stay at 1.0 and dropped are 0.0\n        # so we can directly measure the drop ratio\n        result = drop_block_2d(x, drop_prob=drop_prob, block_size=7, scale_by_keep=False)\n\n        total_elements = result.numel()\n        dropped_elements = (result == 0).sum().item()\n        actual_drop_ratio = dropped_elements / total_elements\n\n        # Allow some tolerance since it's stochastic\n        assert abs(actual_drop_ratio - drop_prob) < 0.03, \\\n            f\"Drop ratio {actual_drop_ratio:.3f} not close to expected {drop_prob}\"\n\n    def test_drop_block_2d_inplace(self):\n        \"\"\"Test inplace operation.\"\"\"\n        x = torch.ones((2, 3, 8, 8))\n        x_clone = x.clone()\n        torch.manual_seed(42)\n        result = drop_block_2d(x_clone, drop_prob=0.3, block_size=3, inplace=True)\n        assert result is x_clone, \"Inplace should return the same tensor\"\n\n    def test_drop_block_2d_couple_channels_true(self):\n        \"\"\"Test couple_channels=True uses same mask for all channels.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((2, 4, 16, 16))\n        result = drop_block_2d(x, drop_prob=0.3, block_size=5, couple_channels=True)\n\n        # With couple_channels=True, all channels should have same drop pattern\n        for b in range(x.shape[0]):\n            mask_c0 = (result[b, 0] == 0).float()\n            for c in range(1, x.shape[1]):\n                mask_c = (result[b, c] == 0).float()\n                assert torch.allclose(mask_c0, mask_c), f\"Channel {c} has different mask than channel 0\"\n\n    def test_drop_block_2d_couple_channels_false(self):\n        \"\"\"Test couple_channels=False uses independent mask per channel.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((2, 4, 16, 16))\n        result = drop_block_2d(x, drop_prob=0.3, block_size=5, couple_channels=False)\n\n        # With couple_channels=False, channels should have different patterns\n        # (with high probability for reasonable drop_prob)\n        mask_c0 = (result[0, 0] == 0).float()\n        mask_c1 = (result[0, 1] == 0).float()\n        # They might occasionally be the same by chance, but very unlikely\n        assert not torch.allclose(mask_c0, mask_c1), \"Channels should have independent masks\"\n\n    def test_drop_block_2d_with_noise(self):\n        \"\"\"Test with_noise option adds gaussian noise to dropped regions.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((2, 3, 16, 16))\n        result = drop_block_2d(x, drop_prob=0.3, block_size=5, with_noise=True)\n\n        # With noise, dropped regions should have non-zero values from gaussian noise\n        # The result should contain values other than the scaled kept values\n        unique_vals = torch.unique(result)\n        assert len(unique_vals) > 2, \"With noise should produce varied values\"\n\n    def test_drop_block_2d_even_block_size(self):\n        \"\"\"Test that even block sizes work correctly.\"\"\"\n        x = torch.ones((2, 3, 16, 16))\n        for block_size in [2, 4, 6]:\n            result = drop_block_2d(x, drop_prob=0.1, block_size=block_size)\n            assert result.shape == x.shape, f\"Shape mismatch for block_size={block_size}\"\n\n    def test_drop_block_2d_asymmetric_input(self):\n        \"\"\"Test with asymmetric H != W inputs.\"\"\"\n        for h, w in [(8, 16), (16, 8), (7, 14), (14, 7)]:\n            x = torch.ones((2, 3, h, w))\n            result = drop_block_2d(x, drop_prob=0.1, block_size=5)\n            assert result.shape == x.shape, f\"Shape mismatch for ({h}, {w})\"\n\n    def test_drop_block_2d_scale_by_keep(self):\n        \"\"\"Test scale_by_keep parameter.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((2, 3, 16, 16))\n\n        # With scale_by_keep=True (default), kept values are scaled up\n        result_scaled = drop_block_2d(x.clone(), drop_prob=0.3, block_size=5, scale_by_keep=True)\n        kept_vals_scaled = result_scaled[result_scaled > 0]\n        # Scaled values should be > 1.0 (scaled up to compensate for drops)\n        assert kept_vals_scaled.min() > 1.0, \"Scaled values should be > 1.0\"\n\n        # With scale_by_keep=False, kept values stay at original\n        torch.manual_seed(42)\n        result_unscaled = drop_block_2d(x.clone(), drop_prob=0.3, block_size=5, scale_by_keep=False)\n        kept_vals_unscaled = result_unscaled[result_unscaled > 0]\n        # Unscaled values should be exactly 1.0\n        assert torch.allclose(kept_vals_unscaled, torch.ones_like(kept_vals_unscaled)), \\\n            \"Unscaled values should be 1.0\"\n\n\nclass TestDropBlock2dModule:\n    \"\"\"Test DropBlock2d nn.Module.\"\"\"\n\n    def test_deprecated_args_accepted(self):\n        \"\"\"Test that deprecated args (batchwise, fast) are silently accepted.\"\"\"\n        # These should not raise\n        module1 = DropBlock2d(drop_prob=0.1, batchwise=True)\n        module2 = DropBlock2d(drop_prob=0.1, fast=False)\n        module3 = DropBlock2d(drop_prob=0.1, batchwise=False, fast=True)\n        assert module1.drop_prob == 0.1\n        assert module2.drop_prob == 0.1\n        assert module3.drop_prob == 0.1\n\n    def test_unknown_args_warned(self):\n        \"\"\"Test that unknown kwargs emit a warning.\"\"\"\n        with pytest.warns(UserWarning, match=\"unexpected keyword argument 'unknown_arg'\"):\n            DropBlock2d(drop_prob=0.1, unknown_arg=True)\n\n    def test_training_mode(self):\n        \"\"\"Test that dropping only occurs in training mode.\"\"\"\n        module = DropBlock2d(drop_prob=0.5, block_size=3)\n        x = torch.ones((2, 3, 8, 8))\n\n        # In eval mode, should return input unchanged\n        module.eval()\n        result = module(x)\n        assert torch.allclose(result, x), \"Should not drop in eval mode\"\n\n        # In train mode, should modify input\n        module.train()\n        torch.manual_seed(42)\n        result = module(x)\n        assert not torch.allclose(result, x), \"Should drop in train mode\"\n\n    def test_couple_channels_parameter(self):\n        \"\"\"Test couple_channels parameter is passed through.\"\"\"\n        x = torch.ones((2, 4, 16, 16))\n\n        # couple_channels=True (default)\n        module_coupled = DropBlock2d(drop_prob=0.3, block_size=5, couple_channels=True)\n        module_coupled.train()\n        torch.manual_seed(42)\n        result_coupled = module_coupled(x)\n\n        # All channels should have same pattern\n        mask_c0 = (result_coupled[0, 0] == 0).float()\n        mask_c1 = (result_coupled[0, 1] == 0).float()\n        assert torch.allclose(mask_c0, mask_c1)\n\n        # couple_channels=False\n        module_uncoupled = DropBlock2d(drop_prob=0.3, block_size=5, couple_channels=False)\n        module_uncoupled.train()\n        torch.manual_seed(42)\n        result_uncoupled = module_uncoupled(x)\n\n        # Channels should have different patterns\n        mask_c0 = (result_uncoupled[0, 0] == 0).float()\n        mask_c1 = (result_uncoupled[0, 1] == 0).float()\n        assert not torch.allclose(mask_c0, mask_c1)\n\n\nclass TestDropPath:\n    \"\"\"Test drop_path function and DropPath module.\"\"\"\n\n    def test_no_drop_when_prob_zero(self):\n        \"\"\"Test that no dropping occurs when drop_prob=0.\"\"\"\n        x = torch.ones((4, 8, 16, 16))\n        result = drop_path(x, drop_prob=0.0, training=True)\n        assert torch.allclose(result, x)\n\n    def test_no_drop_when_not_training(self):\n        \"\"\"Test that no dropping occurs when not training.\"\"\"\n        x = torch.ones((4, 8, 16, 16))\n        result = drop_path(x, drop_prob=0.5, training=False)\n        assert torch.allclose(result, x)\n\n    def test_drop_path_scaling(self):\n        \"\"\"Test that scale_by_keep properly scales kept paths.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((100, 8, 4, 4))  # Large batch for statistical stability\n        keep_prob = 0.8\n        drop_prob = 1 - keep_prob\n\n        result = drop_path(x, drop_prob=drop_prob, training=True, scale_by_keep=True)\n\n        # Kept samples should be scaled by 1/keep_prob = 1.25\n        kept_mask = (result[:, 0, 0, 0] != 0)\n        if kept_mask.any():\n            kept_vals = result[kept_mask, 0, 0, 0]\n            expected_scale = 1.0 / keep_prob\n            assert torch.allclose(kept_vals, torch.full_like(kept_vals, expected_scale), atol=1e-5)\n\n    def test_drop_path_no_scaling(self):\n        \"\"\"Test that scale_by_keep=False does not scale.\"\"\"\n        torch.manual_seed(42)\n        x = torch.ones((100, 8, 4, 4))\n        result = drop_path(x, drop_prob=0.2, training=True, scale_by_keep=False)\n\n        # Kept samples should remain at 1.0\n        kept_mask = (result[:, 0, 0, 0] != 0)\n        if kept_mask.any():\n            kept_vals = result[kept_mask, 0, 0, 0]\n            assert torch.allclose(kept_vals, torch.ones_like(kept_vals))\n\n\nclass TestDropPathModule:\n    \"\"\"Test DropPath nn.Module.\"\"\"\n\n    def test_training_mode(self):\n        \"\"\"Test that dropping only occurs in training mode.\"\"\"\n        module = DropPath(drop_prob=0.5)\n        x = torch.ones((32, 8, 4, 4))  # Larger batch for statistical reliability\n\n        module.eval()\n        result = module(x)\n        assert torch.allclose(result, x), \"Should not drop in eval mode\"\n\n        module.train()\n        torch.manual_seed(42)\n        result = module(x)\n        # With 50% drop prob on 32 samples, very unlikely all survive\n        # Check that at least one sample has zeros (was dropped)\n        has_zeros = (result == 0).any()\n        assert has_zeros, \"Should drop some paths in train mode\"\n\n    def test_extra_repr(self):\n        \"\"\"Test extra_repr for nice printing.\"\"\"\n        module = DropPath(drop_prob=0.123)\n        repr_str = module.extra_repr()\n        assert \"0.123\" in repr_str\n"
  },
  {
    "path": "tests/test_layers_pool.py",
    "content": "\"\"\"Tests for timm pooling layers.\"\"\"\nimport pytest\nimport torch\nimport torch.nn as nn\n\nimport importlib\nimport os\n\ntorch_backend = os.environ.get('TORCH_BACKEND')\nif torch_backend is not None:\n    importlib.import_module(torch_backend)\ntorch_device = os.environ.get('TORCH_DEVICE', 'cpu')\n\n\n# Adaptive Avg/Max Pooling Tests\n\nclass TestAdaptiveAvgMaxPool:\n    \"\"\"Test adaptive_avgmax_pool module.\"\"\"\n\n    def test_adaptive_avgmax_pool2d(self):\n        from timm.layers import adaptive_avgmax_pool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        out = adaptive_avgmax_pool2d(x, 1)\n        assert out.shape == (2, 64, 1, 1)\n        # Should be average of avg and max\n        expected = 0.5 * (x.mean(dim=(2, 3), keepdim=True) + x.amax(dim=(2, 3), keepdim=True))\n        assert torch.allclose(out, expected)\n\n    def test_select_adaptive_pool2d(self):\n        from timm.layers import select_adaptive_pool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n\n        out_avg = select_adaptive_pool2d(x, pool_type='avg', output_size=1)\n        assert out_avg.shape == (2, 64, 1, 1)\n        assert torch.allclose(out_avg, x.mean(dim=(2, 3), keepdim=True))\n\n        out_max = select_adaptive_pool2d(x, pool_type='max', output_size=1)\n        assert out_max.shape == (2, 64, 1, 1)\n        assert torch.allclose(out_max, x.amax(dim=(2, 3), keepdim=True))\n\n    def test_adaptive_avgmax_pool2d_module(self):\n        from timm.layers import AdaptiveAvgMaxPool2d\n        x = torch.randn(2, 64, 14, 14, device=torch_device)\n        pool = AdaptiveAvgMaxPool2d(output_size=1).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64, 1, 1)\n\n    def test_select_adaptive_pool2d_module(self):\n        from timm.layers import SelectAdaptivePool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n\n        for pool_type in ['avg', 'max', 'avgmax', 'catavgmax']:\n            pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True).to(torch_device)\n            out = pool(x)\n            if pool_type == 'catavgmax':\n                assert out.shape == (2, 128)  # concatenated\n            else:\n                assert out.shape == (2, 64)\n\n    def test_select_adaptive_pool2d_fast(self):\n        from timm.layers import SelectAdaptivePool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n\n        for pool_type in ['fast', 'fastavg', 'fastmax', 'fastavgmax', 'fastcatavgmax']:\n            pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True).to(torch_device)\n            out = pool(x)\n            if 'cat' in pool_type:\n                assert out.shape == (2, 128)\n            else:\n                assert out.shape == (2, 64)\n\n\n# Attention Pool Tests\n\nclass TestAttentionPool:\n    \"\"\"Test attention-based pooling layers.\"\"\"\n\n    def test_attention_pool_latent_basic(self):\n        from timm.layers import AttentionPoolLatent\n        x = torch.randn(2, 49, 64, device=torch_device)\n        pool = AttentionPoolLatent(in_features=64, num_heads=4).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool_latent_multi_latent(self):\n        from timm.layers import AttentionPoolLatent\n        x = torch.randn(2, 49, 64, device=torch_device)\n        pool = AttentionPoolLatent(\n            in_features=64,\n            num_heads=4,\n            latent_len=4,\n            pool_type='avg',\n        ).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool2d_basic(self):\n        from timm.layers import AttentionPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = AttentionPool2d(in_features=64, feat_size=7).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool2d_different_feat_size(self):\n        from timm.layers import AttentionPool2d\n        # Test with different spatial sizes (requires pos_embed interpolation)\n        pool = AttentionPool2d(in_features=64, feat_size=7).to(torch_device)\n        for size in [7, 14]:\n            x = torch.randn(2, 64, size, size, device=torch_device)\n            out = pool(x)\n            assert out.shape == (2, 64)\n\n    def test_rot_attention_pool2d_basic(self):\n        from timm.layers import RotAttentionPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = RotAttentionPool2d(in_features=64, ref_feat_size=7).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_rot_attention_pool2d_different_sizes(self):\n        from timm.layers import RotAttentionPool2d\n        pool = RotAttentionPool2d(in_features=64, ref_feat_size=7).to(torch_device)\n        for size in [7, 14, 10]:\n            x = torch.randn(2, 64, size, size, device=torch_device)\n            out = pool(x)\n            assert out.shape == (2, 64)\n\n    def test_rot_attention_pool2d_rope_types(self):\n        from timm.layers import RotAttentionPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        for rope_type in ['base', 'cat', 'dinov3']:\n            pool = RotAttentionPool2d(\n                in_features=64,\n                ref_feat_size=7,\n                rope_type=rope_type,\n            ).to(torch_device)\n            out = pool(x)\n            assert out.shape == (2, 64)\n\n    @pytest.mark.parametrize('pool_cls,base_kwargs,input_shape', [\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    @pytest.mark.parametrize('out_features,embed_dim,expected_out', [\n        (None, None, 64),      # default: out_features = in_features\n        (None, 128, 64),       # default with different embed_dim\n        (32, None, 32),        # explicit out_features\n        (32, 128, 32),         # explicit out_features with different embed_dim\n        (0, None, 64),         # disabled projection, out = embed_dim = in_features\n        (0, 128, 128),         # disabled projection, out = embed_dim\n    ])\n    def test_attention_pool2d_out_features(\n            self, pool_cls, base_kwargs, input_shape, out_features, embed_dim, expected_out,\n    ):\n        import timm.layers as layers\n        kwargs = {**base_kwargs, 'out_features': out_features}\n        if embed_dim is not None:\n            kwargs['embed_dim'] = embed_dim\n        pool = getattr(layers, pool_cls)(**kwargs).to(torch_device)\n        assert pool.out_features == expected_out\n        if out_features == 0:\n            assert isinstance(pool.proj, nn.Identity)\n        else:\n            assert isinstance(pool.proj, nn.Linear)\n        x = torch.randn(*input_shape, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, expected_out)\n\n    @pytest.mark.parametrize('pool_cls,base_kwargs,input_shape', [\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7, 'embed_dim': 128}, (2, 64, 7, 7)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7, 'embed_dim': 128}, (2, 64, 7, 7)),\n    ])\n    @pytest.mark.parametrize('num_classes,expected_out', [\n        (10, 10),\n        (0, 128),    # reset to 0 => Identity, out_features = embed_dim\n        (100, 100),\n    ])\n    def test_attention_pool2d_reset(\n            self, pool_cls, base_kwargs, input_shape, num_classes, expected_out,\n    ):\n        import timm.layers as layers\n        pool = getattr(layers, pool_cls)(**base_kwargs).to(torch_device)\n        pool.reset(num_classes=num_classes)\n        assert pool.out_features == expected_out\n        if num_classes > 0:\n            assert isinstance(pool.proj, nn.Linear)\n            assert pool.proj.in_features == 128  # embed_dim, not in_features\n            assert pool.proj.out_features == num_classes\n        else:\n            assert isinstance(pool.proj, nn.Identity)\n        x = torch.randn(*input_shape, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, expected_out)\n\n    @pytest.mark.parametrize('pool_cls,base_kwargs,input_shape', [\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_attention_pool2d_pre_logits(self, pool_cls, base_kwargs, input_shape):\n        import timm.layers as layers\n        pool = getattr(layers, pool_cls)(**base_kwargs, out_features=32).to(torch_device)\n        x = torch.randn(*input_shape, device=torch_device)\n        out = pool(x, pre_logits=True)\n        # pre_logits skips proj, so output dim = embed_dim (= in_features by default)\n        assert out.shape == (2, 64)\n\n    @pytest.mark.parametrize('pool_cls,base_kwargs,input_shape', [\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_attention_pool2d_qkv_separate(self, pool_cls, base_kwargs, input_shape):\n        import timm.layers as layers\n        pool = getattr(layers, pool_cls)(**base_kwargs, qkv_separate=True).to(torch_device)\n        assert pool.qkv is None\n        x = torch.randn(*input_shape, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    @pytest.mark.parametrize('pool_cls,base_kwargs,input_shape', [\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_attention_pool2d_class_token(self, pool_cls, base_kwargs, input_shape):\n        import timm.layers as layers\n        pool = getattr(layers, pool_cls)(**base_kwargs, class_token=True).to(torch_device)\n        assert pool.cls_token is not None\n        x = torch.randn(*input_shape, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool_prr_basic(self):\n        from timm.layers import AttentionPoolPrr\n        x = torch.randn(2, 50, 64, device=torch_device)  # 1 CLS + 49 patches\n        pool = AttentionPoolPrr(dim=64, num_heads=4).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool_prr_avg_pool(self):\n        from timm.layers import AttentionPoolPrr\n        x = torch.randn(2, 49, 64, device=torch_device)\n        pool = AttentionPoolPrr(dim=64, num_heads=4, pool_type='avg').to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_attention_pool_prr_parameter_free(self):\n        from timm.layers import AttentionPoolPrr\n        pool = AttentionPoolPrr(dim=64, num_heads=4)\n        num_params = sum(p.numel() for p in pool.parameters())\n        assert num_params == 0, f\"Expected 0 parameters, got {num_params}\"\n\n    def test_attention_pool_prr_with_norms(self):\n        from timm.layers import AttentionPoolPrr\n        pool = AttentionPoolPrr(\n            dim=64,\n            num_heads=4,\n            pre_norm=True,\n            post_norm=True,\n        ).to(torch_device)\n        # Should have parameters from the two LayerNorms\n        num_params = sum(p.numel() for p in pool.parameters())\n        assert num_params > 0\n        x = torch.randn(2, 49, 64, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    @pytest.mark.parametrize('out_features,embed_dim,expected_out', [\n        (None, None, 64),      # default: out_features = in_features\n        (None, 128, 64),       # default with different embed_dim\n        (32, None, 32),        # explicit out_features\n        (32, 128, 32),         # explicit out_features with different embed_dim\n        (0, None, 64),         # disabled projection, out = embed_dim = in_features\n        (0, 128, 128),         # disabled projection, out = embed_dim\n    ])\n    def test_attention_pool_latent_out_features(self, out_features, embed_dim, expected_out):\n        from timm.layers import AttentionPoolLatent\n        kwargs = {'in_features': 64, 'num_heads': 4}\n        if out_features is not None:\n            kwargs['out_features'] = out_features\n        if embed_dim is not None:\n            kwargs['embed_dim'] = embed_dim\n        pool = AttentionPoolLatent(**kwargs).to(torch_device)\n        assert pool.out_features == expected_out\n        if out_features == 0:\n            assert isinstance(pool.proj, nn.Identity)\n            assert pool.mlp is None\n        else:\n            assert isinstance(pool.proj, nn.Linear)\n            assert pool.mlp is not None\n        in_dim = embed_dim or 64\n        x = torch.randn(2, 49, in_dim, device=torch_device)\n        out = pool(x)\n        assert out.shape == (2, expected_out)\n\n\n# LSE Pool Tests\n\nclass TestLsePool:\n    \"\"\"Test LogSumExp pooling layers.\"\"\"\n\n    def test_lse_plus_2d_basic(self):\n        from timm.layers import LsePlus2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = LsePlus2d().to(torch_device)\n        out = pool(x)\n        # Default is flatten=True\n        assert out.shape == (2, 64)\n\n    def test_lse_plus_2d_no_flatten(self):\n        from timm.layers import LsePlus2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = LsePlus2d(flatten=False).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64, 1, 1)\n\n    def test_lse_plus_1d_basic(self):\n        from timm.layers import LsePlus1d\n        x = torch.randn(2, 49, 64, device=torch_device)\n        pool = LsePlus1d().to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_lse_high_r_approximates_max(self):\n        from timm.layers import LsePlus2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = LsePlus2d(r=100.0, r_learnable=False).to(torch_device)\n        out = pool(x)\n        out_max = x.amax(dim=(2, 3))\n        assert torch.allclose(out, out_max, atol=0.1)\n\n    def test_lse_low_r_approximates_avg(self):\n        from timm.layers import LsePlus2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = LsePlus2d(r=0.01, r_learnable=False).to(torch_device)\n        out = pool(x)\n        out_avg = x.mean(dim=(2, 3))\n        assert torch.allclose(out, out_avg, atol=0.1)\n\n    def test_lse_learnable_r_gradient(self):\n        from timm.layers import LsePlus2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = LsePlus2d(r=10.0, r_learnable=True).to(torch_device)\n        out = pool(x).sum()\n        out.backward()\n        assert pool.r.grad is not None\n        assert pool.r.grad.abs() > 0\n\n\n# SimPool Tests\n\nclass TestSimPool:\n    \"\"\"Test SimPool attention-based pooling layers.\"\"\"\n\n    def test_simpool_2d_basic(self):\n        from timm.layers import SimPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = SimPool2d(dim=64).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_simpool_1d_basic(self):\n        from timm.layers import SimPool1d\n        x = torch.randn(2, 49, 64, device=torch_device)\n        pool = SimPool1d(dim=64).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n    def test_simpool_multi_head(self):\n        from timm.layers import SimPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        for num_heads in [1, 2, 4, 8]:\n            pool = SimPool2d(dim=64, num_heads=num_heads).to(torch_device)\n            out = pool(x)\n            assert out.shape == (2, 64)\n\n    def test_simpool_with_gamma(self):\n        from timm.layers import SimPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = SimPool2d(dim=64, gamma=2.0).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n        assert not torch.isnan(out).any()\n\n    def test_simpool_qk_norm(self):\n        from timm.layers import SimPool2d\n        x = torch.randn(2, 64, 7, 7, device=torch_device)\n        pool = SimPool2d(dim=64, qk_norm=True).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64)\n\n\n# Common Tests (Gradient, JIT, dtype)\n\nclass TestPoolingCommon:\n    \"\"\"Common tests across all pooling layers.\"\"\"\n\n    @pytest.mark.parametrize('pool_cls,kwargs,input_shape', [\n        ('LsePlus2d', {}, (2, 64, 7, 7)),\n        ('LsePlus1d', {}, (2, 49, 64)),\n        ('SimPool2d', {'dim': 64}, (2, 64, 7, 7)),\n        ('SimPool1d', {'dim': 64}, (2, 49, 64)),\n        ('SelectAdaptivePool2d', {'pool_type': 'avg', 'flatten': True}, (2, 64, 7, 7)),\n        ('AttentionPoolLatent', {'in_features': 64, 'num_heads': 4}, (2, 49, 64)),\n        ('AttentionPoolPrr', {'dim': 64, 'num_heads': 4}, (2, 49, 64)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_gradient_flow(self, pool_cls, kwargs, input_shape):\n        import timm.layers as layers\n        x = torch.randn(*input_shape, device=torch_device, requires_grad=True)\n        pool = getattr(layers, pool_cls)(**kwargs).to(torch_device)\n        out = pool(x)\n        loss = out.sum()\n        loss.backward()\n        assert x.grad is not None\n        assert x.grad.abs().sum() > 0\n\n    @pytest.mark.parametrize('pool_cls,kwargs,input_shape', [\n        ('LsePlus2d', {}, (2, 64, 7, 7)),\n        ('LsePlus1d', {}, (2, 49, 64)),\n        ('SimPool2d', {'dim': 64}, (2, 64, 7, 7)),\n        ('SimPool1d', {'dim': 64}, (2, 49, 64)),\n        ('AttentionPoolPrr', {'dim': 64, 'num_heads': 4}, (2, 49, 64)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_torchscript(self, pool_cls, kwargs, input_shape):\n        import timm.layers as layers\n        x = torch.randn(*input_shape, device=torch_device)\n        pool = getattr(layers, pool_cls)(**kwargs).to(torch_device)\n        pool.eval()\n        scripted = torch.jit.script(pool)\n        out_orig = pool(x)\n        out_script = scripted(x)\n        assert torch.allclose(out_orig, out_script, atol=1e-5)\n\n    @pytest.mark.parametrize('pool_cls,kwargs,input_shape', [\n        ('LsePlus2d', {}, (2, 64, 7, 7)),\n        ('LsePlus1d', {}, (2, 49, 64)),\n        ('SimPool2d', {'dim': 64}, (2, 64, 7, 7)),\n        ('SimPool1d', {'dim': 64}, (2, 49, 64)),\n        ('AttentionPoolPrr', {'dim': 64, 'num_heads': 4}, (2, 49, 64)),\n        ('AttentionPool2d', {'in_features': 64, 'feat_size': 7}, (2, 64, 7, 7)),\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_eval_deterministic(self, pool_cls, kwargs, input_shape):\n        import timm.layers as layers\n        x = torch.randn(*input_shape, device=torch_device)\n        pool = getattr(layers, pool_cls)(**kwargs).to(torch_device)\n        pool.eval()\n        with torch.no_grad():\n            out1 = pool(x)\n            out2 = pool(x)\n        assert torch.allclose(out1, out2)\n\n    @pytest.mark.parametrize('pool_cls,kwargs,input_shape', [\n        ('LsePlus2d', {}, (2, 64, 7, 7)),\n        ('SimPool2d', {'dim': 64}, (2, 64, 7, 7)),\n        ('RotAttentionPool2d', {'in_features': 64, 'ref_feat_size': 7}, (2, 64, 7, 7)),\n    ])\n    def test_different_spatial_sizes(self, pool_cls, kwargs, input_shape):\n        import timm.layers as layers\n        B, C, _, _ = input_shape\n        pool = getattr(layers, pool_cls)(**kwargs).to(torch_device)\n        for H, W in [(7, 7), (14, 14), (1, 1), (3, 5)]:\n            x = torch.randn(B, C, H, W, device=torch_device)\n            out = pool(x)\n            assert out.shape[0] == B\n            assert out.shape[-1] == C\n\n\n# BlurPool Tests\n\nclass TestBlurPool:\n    \"\"\"Test BlurPool anti-aliasing layer.\"\"\"\n\n    def test_blur_pool_2d_basic(self):\n        from timm.layers import BlurPool2d\n        x = torch.randn(2, 64, 14, 14, device=torch_device)\n        pool = BlurPool2d(channels=64).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64, 7, 7)\n\n    def test_blur_pool_2d_stride(self):\n        from timm.layers import BlurPool2d\n        x = torch.randn(2, 64, 28, 28, device=torch_device)\n        pool = BlurPool2d(channels=64, stride=4).to(torch_device)\n        out = pool(x)\n        assert out.shape == (2, 64, 8, 8)\n\n\n# Pool1d Tests\n\nclass TestPool1d:\n    \"\"\"Test 1D pooling utilities.\"\"\"\n\n    def test_global_pool_nlc(self):\n        from timm.layers import global_pool_nlc\n        x = torch.randn(2, 49, 64, device=torch_device)\n\n        # By default, avg/max excludes first token (num_prefix_tokens=1)\n        out_avg = global_pool_nlc(x, pool_type='avg')\n        assert out_avg.shape == (2, 64)\n        assert torch.allclose(out_avg, x[:, 1:].mean(dim=1))\n\n        out_max = global_pool_nlc(x, pool_type='max')\n        assert out_max.shape == (2, 64)\n        assert torch.allclose(out_max, x[:, 1:].amax(dim=1))\n\n        out_first = global_pool_nlc(x, pool_type='token')\n        assert out_first.shape == (2, 64)\n        assert torch.allclose(out_first, x[:, 0])\n\n        # Test with reduce_include_prefix=True\n        out_avg_all = global_pool_nlc(x, pool_type='avg', reduce_include_prefix=True)\n        assert torch.allclose(out_avg_all, x.mean(dim=1))\n"
  },
  {
    "path": "tests/test_models.py",
    "content": "\"\"\"Run tests for all models\n\nTests that run on CI should have a specific marker, e.g. @pytest.mark.base. This\nmarker is used to parallelize the CI runs, with one runner for each marker.\n\nIf new tests are added, ensure that they use one of the existing markers\n(documented in pyproject.toml > pytest > markers) or that a new marker is added\nfor this set of tests. If using a new marker, adjust the test matrix in\n.github/workflows/tests.yml to run tests with this new marker, otherwise the\ntests will be skipped on CI.\n\n\"\"\"\n\nimport pytest\nimport torch\nimport platform\nimport os\nimport fnmatch\n\n_IS_MAC = platform.system() == 'Darwin'\n\ntry:\n    from torchvision.models.feature_extraction import create_feature_extractor, get_graph_node_names, NodePathTracer\n    has_fx_feature_extraction = True\nexcept ImportError:\n    has_fx_feature_extraction = False\n\nimport timm\nfrom timm import list_models, list_pretrained, create_model, set_scriptable, get_pretrained_cfg_value\nfrom timm.layers import Format, get_spatial_dim, get_channel_dim\nfrom timm.models import get_notrace_modules, get_notrace_functions\n\nimport importlib\nimport os\n\ntorch_backend = os.environ.get('TORCH_BACKEND')\nif torch_backend is not None:\n    importlib.import_module(torch_backend)\ntorch_device = os.environ.get('TORCH_DEVICE', 'cpu')\ntimeout = os.environ.get('TIMEOUT')\ntimeout120 = int(timeout) if timeout else 120\ntimeout240 = int(timeout) if timeout else 240\ntimeout360 = int(timeout) if timeout else 360\n\nif hasattr(torch._C, '_jit_set_profiling_executor'):\n    # legacy executor is too slow to compile large models for unit tests\n    # no need for the fusion performance here\n    torch._C._jit_set_profiling_executor(True)\n    torch._C._jit_set_profiling_mode(False)\n\n# models with forward_intermediates() and support for FeatureGetterNet features_only wrapper\nFEAT_INTER_FILTERS = [\n    'vision_transformer', 'vision_transformer_sam', 'vision_transformer_hybrid', 'vision_transformer_relpos',\n    'beit', 'mvitv2', 'eva', 'cait', 'xcit', 'volo', 'twins', 'deit', 'swin_transformer', 'swin_transformer_v2',\n    'swin_transformer_v2_cr', 'maxxvit', 'efficientnet', 'mobilenetv3', 'levit', 'efficientformer', 'resnet',\n    'regnet', 'byobnet', 'byoanet', 'mlp_mixer', 'hiera', 'fastvit', 'hieradet_sam2', 'aimv2*', 'tnt',\n    'tiny_vit', 'vovnet', 'tresnet', 'rexnet', 'resnetv2', 'repghost', 'repvit', 'pvt_v2', 'nextvit', 'nest',\n    'mambaout', 'inception_next', 'inception_v4', 'hgnet', 'gcvit', 'focalnet', 'efficientformer_v2', 'edgenext',\n    'davit', 'rdnet', 'convnext', 'pit', 'starnet', 'shvit', 'fasternet', 'swiftformer', 'ghostnet', 'naflexvit',\n    'csatv2'\n]\n\n# transformer / hybrid models don't support full set of spatial / feature APIs and/or have spatial output.\nNON_STD_FILTERS = [\n    'vit_*', 'naflexvit*', 'tnt_*', 'pit_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*',\n    'convit_*', 'levit*', 'visformer*', 'deit*', 'xcit_*', 'crossvit_*', 'beit*', 'aimv2*', 'swiftformer_*',\n    'poolformer_*', 'volo_*', 'sequencer2d_*', 'mvitv2*', 'gcvit*', 'efficientformer*', 'sam_hiera*',\n    'eva_*', 'flexivit*', 'eva02*', 'samvit_*', 'efficientvit_m*', 'tiny_vit_*', 'hiera_*', 'vitamin*', 'test_vit*',\n]\nNUM_NON_STD = len(NON_STD_FILTERS)\n\n# exclude models that cause specific test failures\nif 'GITHUB_ACTIONS' in os.environ:\n    # GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models\n    EXCLUDE_FILTERS = [\n        '*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm', '*101x3_bitm', '*50x3_bitm',\n        '*nfnet_f3*', '*nfnet_f4*', '*nfnet_f5*', '*nfnet_f6*', '*nfnet_f7*', '*efficientnetv2_xl*',\n        '*resnetrs350*', '*resnetrs420*', 'xcit_large_24_p8*', '*huge*', '*giant*', '*gigantic*',\n        '*enormous*', 'maxvit_xlarge*', 'regnet*1280', 'regnet*2560', '*_1b_*', '*_3b_*', '*_7b_*']\n    NON_STD_EXCLUDE_FILTERS = ['*huge*', '*giant*',  '*gigantic*', '*enormous*', '*_1b_*', '*_3b_*', '*_7b_*']\nelse:\n    EXCLUDE_FILTERS = ['*enormous*', '*_7b_*']\n    NON_STD_EXCLUDE_FILTERS = ['*gigantic*', '*enormous*', '*_3b_*', '*_7b_*']\n\nEXCLUDE_JIT_FILTERS = ['hiera_*', '*naflex*', '*_7b_*', 'hrnet*', 'dpn*', 'densenet*', 'selecsls*']\n\nTARGET_FWD_SIZE = MAX_FWD_SIZE = 384\nTARGET_BWD_SIZE = 128\nMAX_BWD_SIZE = 320\nMAX_FWD_OUT_SIZE = 448\nTARGET_JIT_SIZE = 128\nMAX_JIT_SIZE = 320\nTARGET_FFEAT_SIZE = 96\nMAX_FFEAT_SIZE = 256\nTARGET_FWD_FX_SIZE = 128\nMAX_FWD_FX_SIZE = 256\nTARGET_BWD_FX_SIZE = 128\nMAX_BWD_FX_SIZE = 224\n\n\ndef _get_input_size(model=None, model_name='', target=None):\n    if model is None:\n        assert model_name, \"One of model or model_name must be provided\"\n        input_size = get_pretrained_cfg_value(model_name, 'input_size')\n        fixed_input_size = get_pretrained_cfg_value(model_name, 'fixed_input_size')\n        min_input_size = get_pretrained_cfg_value(model_name, 'min_input_size')\n    else:\n        default_cfg = model.default_cfg\n        input_size = default_cfg['input_size']\n        fixed_input_size = default_cfg.get('fixed_input_size', None)\n        min_input_size = default_cfg.get('min_input_size', None)\n    assert input_size is not None\n\n    if fixed_input_size:\n        return input_size\n\n    if min_input_size:\n        if target and max(input_size) > target:\n            input_size = min_input_size\n    else:\n        if target and max(input_size) > target:\n            input_size = tuple([min(x, target) for x in input_size])\n    return input_size\n\n\n@pytest.mark.base\n@pytest.mark.timeout(timeout240)\n@pytest.mark.parametrize('model_name', list_pretrained('test_*'))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_inference(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    from PIL import Image\n    from huggingface_hub import snapshot_download\n    import tempfile\n    import safetensors\n\n    model = create_model(model_name, pretrained=True)\n    model.eval()\n    pp = timm.data.create_transform(**timm.data.resolve_data_config(model=model))\n\n    with tempfile.TemporaryDirectory()  as temp_dir:\n        snapshot_download(\n            repo_id='timm/' + model_name, repo_type='model', local_dir=temp_dir, allow_patterns='test/*'\n        )\n        rand_tensors = safetensors.torch.load_file(os.path.join(temp_dir, 'test', 'rand_tensors.safetensors'))\n        owl_tensors = safetensors.torch.load_file(os.path.join(temp_dir, 'test', 'owl_tensors.safetensors'))\n        test_owl = Image.open(os.path.join(temp_dir, 'test', 'test_owl.jpg'))\n\n    with torch.inference_mode():\n        rand_output = model(rand_tensors['input'])\n        rand_features = model.forward_features(rand_tensors['input'])\n        rand_pre_logits = model.forward_head(rand_features, pre_logits=True)\n        assert torch.allclose(rand_output, rand_tensors['output'], rtol=1e-3, atol=1e-4), 'rand output does not match'\n        assert torch.allclose(rand_features, rand_tensors['features'], rtol=1e-3, atol=1e-4), 'rand features do not match'\n        assert torch.allclose(rand_pre_logits, rand_tensors['pre_logits'], rtol=1e-3, atol=1e-4), 'rand pre_logits do not match'\n\n        def _test_owl(owl_input, tol=(1e-3, 1e-4)):\n            owl_output = model(owl_input)\n            owl_features = model.forward_features(owl_input)\n            owl_pre_logits = model.forward_head(owl_features.clone(), pre_logits=True)\n            assert owl_output.softmax(1).argmax(1) == 24  # owl\n            assert torch.allclose(owl_output, owl_tensors['output'], rtol=tol[0], atol=tol[1]), 'owl output does not match'\n            assert torch.allclose(owl_features, owl_tensors['features'], rtol=tol[0], atol=tol[1]), 'owl output does not match'\n            assert torch.allclose(owl_pre_logits, owl_tensors['pre_logits'], rtol=tol[0], atol=tol[1]), 'owl output does not match'\n\n        _test_owl(owl_tensors['input'])  # test with original pp owl tensor\n        _test_owl(pp(test_owl).unsqueeze(0), tol=(1e-1, 1e-1))  # re-process from original jpg, Pillow output can change a lot btw ver\n\n\n@pytest.mark.base\n@pytest.mark.timeout(timeout120)\n@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    model = create_model(model_name, pretrained=False)\n    model.eval()\n\n    input_size = _get_input_size(model=model, target=TARGET_FWD_SIZE)\n    if max(input_size) > MAX_FWD_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n    inputs = torch.randn((batch_size, *input_size))\n    inputs = inputs.to(torch_device)\n    model.to(torch_device)\n    outputs = model(inputs)\n\n    assert outputs.shape[0] == batch_size\n    assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n    # Test that grad-checkpointing, if supported, doesn't cause model failures or change in output\n    try:\n        model.set_grad_checkpointing()\n    except Exception:\n        # throws if not supported, that's fine\n        pass\n    else:\n        outputs2 = model(inputs)\n        if isinstance(outputs, tuple):\n            outputs2 = torch.cat(outputs2)\n        assert torch.allclose(outputs, outputs2, rtol=1e-4, atol=1e-5), 'Output does not match'\n\n\n@pytest.mark.base\n@pytest.mark.timeout(timeout120)\n@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS, name_matches_cfg=True))\n@pytest.mark.parametrize('batch_size', [2])\ndef test_model_backward(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    input_size = _get_input_size(model_name=model_name, target=TARGET_BWD_SIZE)\n    if max(input_size) > MAX_BWD_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n\n    model = create_model(model_name, pretrained=False, num_classes=42)\n    encoder_only = model.num_classes == 0  # FIXME better approach?\n    num_params = sum([x.numel() for x in model.parameters()])\n    model.train()\n\n    inputs = torch.randn((batch_size, *input_size))\n    inputs = inputs.to(torch_device)\n    model.to(torch_device)\n    outputs = model(inputs)\n    if isinstance(outputs, tuple):\n        outputs = torch.cat(outputs)\n    outputs.mean().backward()\n    for n, x in model.named_parameters():\n        assert x.grad is not None, f'No gradient for {n}'\n    num_grad = sum([x.grad.numel() for x in model.parameters() if x.grad is not None])\n\n    if encoder_only:\n        output_fmt = getattr(model, 'output_fmt', 'NCHW')\n        feat_axis = get_channel_dim(output_fmt)\n        assert outputs.shape[feat_axis] == model.num_features, f'unpooled feature dim {outputs.shape[feat_axis]} != model.num_features {model.num_features}'\n    else:\n        assert outputs.shape[-1] == 42\n    assert num_params == num_grad, 'Some parameters are missing gradients'\n    assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n\n# models with extra conv/linear layers after pooling\nEARLY_POOL_MODELS = (\n    timm.models.EfficientVit,\n    timm.models.EfficientVitLarge,\n    timm.models.FasterNet,\n    timm.models.HighPerfGpuNet,\n    timm.models.GhostNet,\n    timm.models.MetaNeXt, # InceptionNeXt\n    timm.models.MobileNetV3,\n    timm.models.RepGhostNet,\n    timm.models.VGG,\n)\n\n@pytest.mark.cfg\n@pytest.mark.timeout(timeout360)\n@pytest.mark.parametrize('model_name', list_models(\n    exclude_filters=EXCLUDE_FILTERS + NON_STD_FILTERS, include_tags=True))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_default_cfgs(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    model = create_model(model_name, pretrained=False)\n    model.eval()\n    model.to(torch_device)\n    assert getattr(model, 'num_classes') >= 0\n    assert getattr(model, 'num_features') > 0\n    assert getattr(model, 'head_hidden_size') > 0\n    state_dict = model.state_dict()\n    cfg = model.default_cfg\n\n    pool_size = cfg['pool_size']\n    input_size = model.default_cfg['input_size']\n    output_fmt = getattr(model, 'output_fmt', 'NCHW')\n    spatial_axis = get_spatial_dim(output_fmt)\n    assert len(spatial_axis) == 2  # TODO add 1D sequence support\n    feat_axis = get_channel_dim(output_fmt)\n\n    if all([x <= MAX_FWD_OUT_SIZE for x in input_size]) and \\\n            not any([fnmatch.fnmatch(model_name, x) for x in EXCLUDE_FILTERS]):\n        # output sizes only checked if default res <= 448 * 448 to keep resource down\n        input_size = tuple([min(x, MAX_FWD_OUT_SIZE) for x in input_size])\n        input_tensor = torch.randn((batch_size, *input_size), device=torch_device)\n\n        # test forward_features (always unpooled) & forward_head w/ pre_logits\n        outputs = model.forward_features(input_tensor)\n        outputs_pre = model.forward_head(outputs, pre_logits=True)\n        assert outputs.shape[spatial_axis[0]] == pool_size[0], f'unpooled feature shape {outputs.shape} != config'\n        assert outputs.shape[spatial_axis[1]] == pool_size[1], f'unpooled feature shape {outputs.shape} != config'\n        assert outputs.shape[feat_axis] == model.num_features, f'unpooled feature dim {outputs.shape[feat_axis]} != model.num_features {model.num_features}'\n        assert outputs_pre.shape[1] == model.head_hidden_size, f'pre_logits feature dim {outputs_pre.shape[1]} != model.head_hidden_size {model.head_hidden_size}'\n\n        # test forward after deleting the classifier, output should be poooled, size(-1) == model.num_features\n        model.reset_classifier(0)\n        assert model.num_classes == 0, f'Expected num_classes to be 0 after reset_classifier(0), but got {model.num_classes}'\n        model.to(torch_device)\n        outputs = model.forward(input_tensor)\n        assert len(outputs.shape) == 2\n        assert outputs.shape[1] == model.head_hidden_size, f'feature dim w/ removed classifier {outputs.shape[1]} != model.head_hidden_size {model.head_hidden_size}'\n        assert outputs.shape == outputs_pre.shape, f'output shape of pre_logits {outputs_pre.shape} does not match reset_head(0) {outputs.shape}'\n\n        # test model forward after removing pooling and classifier\n        if not isinstance(model, EARLY_POOL_MODELS):\n            model.reset_classifier(0, '')  # reset classifier and disable global pooling\n            model.to(torch_device)\n            outputs = model.forward(input_tensor)\n            assert len(outputs.shape) == 4\n            assert outputs.shape[spatial_axis[0]] == pool_size[0] and outputs.shape[spatial_axis[1]] == pool_size[1]\n\n        # test classifier + global pool deletion via __init__\n        if 'pruned' not in model_name and not isinstance(model, EARLY_POOL_MODELS):\n            model = create_model(model_name, pretrained=False, num_classes=0, global_pool='').eval()\n            model.to(torch_device)\n            outputs = model.forward(input_tensor)\n            assert len(outputs.shape) == 4\n            assert outputs.shape[spatial_axis[0]] == pool_size[0] and outputs.shape[spatial_axis[1]] == pool_size[1]\n\n    # check classifier name matches default_cfg\n    if cfg.get('num_classes', None):\n        classifier = cfg['classifier']\n        if not isinstance(classifier, (tuple, list)):\n            classifier = classifier,\n        for c in classifier:\n            assert c + \".weight\" in state_dict.keys(), f'{c} not in model params'\n\n    # check first conv(s) names match default_cfg\n    first_conv = cfg['first_conv']\n    if isinstance(first_conv, str):\n        first_conv = (first_conv,)\n    assert isinstance(first_conv, (tuple, list))\n    for fc in first_conv:\n        assert fc + \".weight\" in state_dict.keys(), f'{fc} not in model params'\n\n\n@pytest.mark.cfg\n@pytest.mark.timeout(timeout360)\n@pytest.mark.parametrize('model_name', list_models(filter=NON_STD_FILTERS, exclude_filters=NON_STD_EXCLUDE_FILTERS, include_tags=True))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_default_cfgs_non_std(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    model = create_model(model_name, pretrained=False)\n    model.eval()\n    model.to(torch_device)\n    assert getattr(model, 'num_classes') >= 0\n    assert getattr(model, 'num_features') > 0\n    assert getattr(model, 'head_hidden_size') > 0\n    state_dict = model.state_dict()\n    cfg = model.default_cfg\n\n    input_size = _get_input_size(model=model)\n    if max(input_size) > 320:  # FIXME const\n        pytest.skip(\"Fixed input size model > limit.\")\n\n    input_tensor = torch.randn((batch_size, *input_size), device=torch_device)\n    feat_dim = getattr(model, 'feature_dim', None)\n\n    outputs = model.forward_features(input_tensor)\n    outputs_pre = model.forward_head(outputs, pre_logits=True)\n    if isinstance(outputs, (tuple, list)):\n        # cannot currently verify multi-tensor output.\n        pass\n    else:\n        if feat_dim is None:\n            feat_dim = -1 if outputs.ndim == 3 else 1\n        assert outputs.shape[feat_dim] == model.num_features\n        assert outputs_pre.shape[1] == model.head_hidden_size\n\n    # test forward after deleting the classifier, output should be poooled, size(-1) == model.num_features\n    model.reset_classifier(0)\n    assert model.num_classes == 0, f'Expected num_classes to be 0 after reset_classifier(0), but got {model.num_classes}'\n    model.to(torch_device)\n    outputs = model.forward(input_tensor)\n    if isinstance(outputs,  (tuple, list)):\n        outputs = outputs[0]\n    if feat_dim is None:\n        feat_dim = -1 if outputs.ndim == 3 else 1\n    assert outputs.shape[feat_dim] == model.head_hidden_size, 'pooled num_features != config'\n    assert outputs.shape == outputs_pre.shape\n\n    model = create_model(model_name, pretrained=False, num_classes=0).eval()\n    model.to(torch_device)\n    outputs = model.forward(input_tensor)\n    if isinstance(outputs, (tuple, list)):\n        outputs = outputs[0]\n    if feat_dim is None:\n        feat_dim = -1 if outputs.ndim == 3 else 1\n    assert outputs.shape[feat_dim] == model.num_features\n\n    # check classifier name matches default_cfg\n    if cfg.get('num_classes', None):\n        classifier = cfg['classifier']\n        if not isinstance(classifier, (tuple, list)):\n            classifier = classifier,\n        for c in classifier:\n            assert c + \".weight\" in state_dict.keys(), f'{c} not in model params'\n\n    # check first conv(s) names match default_cfg\n    first_conv = cfg['first_conv']\n    if isinstance(first_conv, str):\n        first_conv = (first_conv,)\n    assert isinstance(first_conv, (tuple, list))\n    for fc in first_conv:\n        assert fc + \".weight\" in state_dict.keys(), f'{fc} not in model params'\n\n\nif 'GITHUB_ACTIONS' not in os.environ:\n    @pytest.mark.timeout(240)\n    @pytest.mark.parametrize('model_name', list_models(pretrained=True))\n    @pytest.mark.parametrize('batch_size', [1])\n    def test_model_load_pretrained(model_name, batch_size):\n        \"\"\"Create that pretrained weights load, verify support for in_chans != 3 while doing so.\"\"\"\n        in_chans = 3 if 'pruned' in model_name else 1  # pruning not currently supported with in_chans change\n        create_model(model_name, pretrained=True, in_chans=in_chans, num_classes=5)\n        create_model(model_name, pretrained=True, in_chans=in_chans, num_classes=0)\n\n    @pytest.mark.timeout(240)\n    @pytest.mark.parametrize('model_name', list_models(pretrained=True, exclude_filters=NON_STD_FILTERS))\n    @pytest.mark.parametrize('batch_size', [1])\n    def test_model_features_pretrained(model_name, batch_size):\n        \"\"\"Create that pretrained weights load when features_only==True.\"\"\"\n        create_model(model_name, pretrained=True, features_only=True)\n\n\n@pytest.mark.torchscript\n@pytest.mark.timeout(timeout120)\n@pytest.mark.parametrize(\n    'model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS, name_matches_cfg=True))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward_torchscript(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model\"\"\"\n    input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)\n    if max(input_size) > MAX_JIT_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n\n    with set_scriptable(True):\n        model = create_model(model_name, pretrained=False)\n    model.eval()\n\n    model = torch.jit.script(model)\n    model.to(torch_device)\n    outputs = model(torch.randn((batch_size, *input_size)))\n\n    assert outputs.shape[0] == batch_size\n    assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n\nEXCLUDE_FEAT_FILTERS = [\n    '*pruned*',  # hopefully fix at some point\n] + NON_STD_FILTERS\nif 'GITHUB_ACTIONS' in os.environ:  # and 'Linux' in platform.system():\n    # GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models\n    EXCLUDE_FEAT_FILTERS += ['*resnext101_32x32d', '*resnext101_32x16d']\n\n\n@pytest.mark.features\n@pytest.mark.timeout(120)\n@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FEAT_FILTERS))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward_features(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model in feature extraction mode\"\"\"\n    model = create_model(model_name, pretrained=False, features_only=True)\n    model.eval()\n    expected_channels = model.feature_info.channels()\n    expected_reduction = model.feature_info.reduction()\n    assert len(expected_channels) >= 3  # all models here should have at least 3 default feat levels\n\n    input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)\n    if max(input_size) > MAX_FFEAT_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n    output_fmt = getattr(model, 'output_fmt', 'NCHW')\n    feat_axis = get_channel_dim(output_fmt)\n    spatial_axis = get_spatial_dim(output_fmt)\n    import math\n\n    outputs = model(torch.randn((batch_size, *input_size)))\n    assert len(expected_channels) == len(outputs)\n    spatial_size = input_size[-2:]\n    for e, r, o in zip(expected_channels, expected_reduction, outputs):\n        assert e == o.shape[feat_axis]\n        assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1\n        assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1\n        assert o.shape[0] == batch_size\n        assert not torch.isnan(o).any()\n\n\n@pytest.mark.features\n@pytest.mark.timeout(120)\n@pytest.mark.parametrize('model_name', list_models(module=FEAT_INTER_FILTERS, exclude_filters=EXCLUDE_FILTERS + ['*pruned*']))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward_intermediates_features(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model in feature extraction mode\"\"\"\n    model = create_model(model_name, pretrained=False, features_only=True, feature_cls='getter')\n    model.eval()\n    expected_channels = model.feature_info.channels()\n    expected_reduction = model.feature_info.reduction()\n\n    input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)\n    if max(input_size) > MAX_FFEAT_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n    output_fmt = getattr(model, 'output_fmt', 'NCHW')\n    feat_axis = get_channel_dim(output_fmt)\n    spatial_axis = get_spatial_dim(output_fmt)\n    import math\n\n    outputs = model(torch.randn((batch_size, *input_size)))\n    assert len(expected_channels) == len(outputs)\n    spatial_size = input_size[-2:]\n    for e, r, o in zip(expected_channels, expected_reduction, outputs):\n        print(o.shape)\n        assert e == o.shape[feat_axis]\n        assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1\n        assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1\n        assert o.shape[0] == batch_size\n        assert not torch.isnan(o).any()\n\n\n@pytest.mark.features\n@pytest.mark.timeout(120)\n@pytest.mark.parametrize('model_name', list_models(module=FEAT_INTER_FILTERS, exclude_filters=EXCLUDE_FILTERS + ['*pruned*']))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward_intermediates(model_name, batch_size):\n    \"\"\"Run a single forward pass with each model in feature extraction mode\"\"\"\n    model = create_model(model_name, pretrained=False)\n    model.eval()\n    feature_info = timm.models.FeatureInfo(model.feature_info, len(model.feature_info))\n    expected_channels = feature_info.channels()\n    expected_reduction = feature_info.reduction()\n    assert len(expected_channels) >= 3  # all models here should have at least 3 feature levels\n\n    input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)\n    if max(input_size) > MAX_FFEAT_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n    output_fmt = 'NCHW'  # NOTE output_fmt determined by forward_intermediates() arg, not model attribute\n    feat_axis = get_channel_dim(output_fmt)\n    spatial_axis = get_spatial_dim(output_fmt)\n    import math\n\n    inpt = torch.randn((batch_size, *input_size))\n    output, intermediates = model.forward_intermediates(\n        inpt,\n        output_fmt=output_fmt,\n    )\n    assert len(expected_channels) == len(intermediates)\n    spatial_size = input_size[-2:]\n    for e, r, o in zip(expected_channels, expected_reduction, intermediates):\n        assert e == o.shape[feat_axis]\n        assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1\n        assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1\n        assert o.shape[0] == batch_size\n        assert not torch.isnan(o).any()\n\n    output2 = model.forward_features(inpt)\n    assert torch.allclose(output, output2)\n\n    # Test that grad-checkpointing, if supported\n    try:\n        model.set_grad_checkpointing()\n    except Exception:\n        # throws if not supported, that's fine\n        pass\n    else:\n        output3, _ = model.forward_intermediates(\n            inpt,\n            output_fmt=output_fmt,\n        )\n        assert torch.allclose(output, output3, rtol=1e-4, atol=1e-5), 'Output does not match'\n\n\n\ndef _create_fx_model(model, train=False):\n    # This block of code does a bit of juggling to handle any case where there are multiple outputs in train mode\n    # So we trace once and look at the graph, and get the indices of the nodes that lead into the original fx output\n    # node. Then we use those indices to select from train_nodes returned by torchvision get_graph_node_names\n    tracer_kwargs = dict(\n        leaf_modules=get_notrace_modules(),\n        autowrap_functions=get_notrace_functions(),\n        #enable_cpatching=True,\n        param_shapes_constant=True\n    )\n    train_nodes, eval_nodes = get_graph_node_names(model, tracer_kwargs=tracer_kwargs)\n\n    eval_return_nodes = [eval_nodes[-1]]\n    train_return_nodes = [train_nodes[-1]]\n    if train:\n        tracer = NodePathTracer(**tracer_kwargs)\n        graph = tracer.trace(model)\n        graph_nodes = list(reversed(graph.nodes))\n        output_node_names = [n.name for n in graph_nodes[0]._input_nodes.keys()]\n        graph_node_names = [n.name for n in graph_nodes]\n        output_node_indices = [-graph_node_names.index(node_name) for node_name in output_node_names]\n        train_return_nodes = [train_nodes[ix] for ix in output_node_indices]\n\n    fx_model = create_feature_extractor(\n        model,\n        train_return_nodes=train_return_nodes,\n        eval_return_nodes=eval_return_nodes,\n        tracer_kwargs=tracer_kwargs,\n    )\n    return fx_model\n\n\nEXCLUDE_FX_FILTERS = ['vit_gi*', 'hiera*']\n# not enough memory to run fx on more models than other tests\nif 'GITHUB_ACTIONS' in os.environ:\n    EXCLUDE_FX_FILTERS += [\n        'beit_large*',\n        'mixer_l*',\n        '*nfnet_f2*',\n        '*resnext101_32x32d',\n        'resnetv2_152x2*',\n        'resmlp_big*',\n        'resnetrs270',\n        'swin_large*',\n        'vgg*',\n        'vit_large*',\n        'vit_base_patch8*',\n        'xcit_large*',\n    ]\n\n\n@pytest.mark.fxforward\n@pytest.mark.timeout(120)\n@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FX_FILTERS))\n@pytest.mark.parametrize('batch_size', [1])\ndef test_model_forward_fx(model_name, batch_size):\n    \"\"\"\n    Symbolically trace each model and run single forward pass through the resulting GraphModule\n    Also check that the output of a forward pass through the GraphModule is the same as that from the original Module\n    \"\"\"\n    if not has_fx_feature_extraction:\n        pytest.skip(\"Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.\")\n\n    model = create_model(model_name, pretrained=False)\n    model.eval()\n\n    input_size = _get_input_size(model=model, target=TARGET_FWD_FX_SIZE)\n    if max(input_size) > MAX_FWD_FX_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n    with torch.inference_mode():\n        inputs = torch.randn((batch_size, *input_size))\n        outputs = model(inputs)\n        if isinstance(outputs, tuple):\n            outputs = torch.cat(outputs)\n\n        model = _create_fx_model(model)\n        fx_outputs = tuple(model(inputs).values())\n        if isinstance(fx_outputs, tuple):\n            fx_outputs = torch.cat(fx_outputs)\n\n    assert torch.all(fx_outputs == outputs)\n    assert outputs.shape[0] == batch_size\n    assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n\n@pytest.mark.fxbackward\n@pytest.mark.timeout(120)\n@pytest.mark.parametrize('model_name', list_models(\n    exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FX_FILTERS, name_matches_cfg=True))\n@pytest.mark.parametrize('batch_size', [2])\ndef test_model_backward_fx(model_name, batch_size):\n    \"\"\"Symbolically trace each model and run single backward pass through the resulting GraphModule\"\"\"\n    if not has_fx_feature_extraction:\n        pytest.skip(\"Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.\")\n\n    input_size = _get_input_size(model_name=model_name, target=TARGET_BWD_FX_SIZE)\n    if max(input_size) > MAX_BWD_FX_SIZE:\n        pytest.skip(\"Fixed input size model > limit.\")\n\n    model = create_model(model_name, pretrained=False, num_classes=42)\n    model.train()\n    num_params = sum([x.numel() for x in model.parameters()])\n    if 'GITHUB_ACTIONS' in os.environ and num_params > 100e6:\n        pytest.skip(\"Skipping FX backward test on model with more than 100M params.\")\n\n    model = _create_fx_model(model, train=True)\n    outputs = tuple(model(torch.randn((batch_size, *input_size))).values())\n    if isinstance(outputs, tuple):\n        outputs = torch.cat(outputs)\n    outputs.mean().backward()\n    for n, x in model.named_parameters():\n        assert x.grad is not None, f'No gradient for {n}'\n    num_grad = sum([x.grad.numel() for x in model.parameters() if x.grad is not None])\n\n    assert outputs.shape[-1] == 42\n    assert num_params == num_grad, 'Some parameters are missing gradients'\n    assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n\nif 'GITHUB_ACTIONS' not in os.environ:\n    # FIXME this test is causing GitHub actions to run out of RAM and abruptly kill the test process\n\n    # reason: model is scripted after fx tracing, but beit has torch.jit.is_scripting() control flow\n    EXCLUDE_FX_JIT_FILTERS = [\n        'deit_*_distilled_patch16_224',\n        'levit*',\n        'pit_*_distilled_224',\n    ] + EXCLUDE_FX_FILTERS\n\n\n    @pytest.mark.timeout(120)\n    @pytest.mark.parametrize(\n        'model_name', list_models(\n            exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS + EXCLUDE_FX_JIT_FILTERS, name_matches_cfg=True))\n    @pytest.mark.parametrize('batch_size', [1])\n    def test_model_forward_fx_torchscript(model_name, batch_size):\n        \"\"\"Symbolically trace each model, script it, and run single forward pass\"\"\"\n        if not has_fx_feature_extraction:\n            pytest.skip(\"Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.\")\n\n        input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)\n        if max(input_size) > MAX_JIT_SIZE:\n            pytest.skip(\"Fixed input size model > limit.\")\n\n        with set_scriptable(True):\n            model = create_model(model_name, pretrained=False)\n        model.eval()\n\n        model = torch.jit.script(_create_fx_model(model))\n        with torch.inference_mode():\n            outputs = tuple(model(torch.randn((batch_size, *input_size))).values())\n            if isinstance(outputs, tuple):\n                outputs = torch.cat(outputs)\n\n        assert outputs.shape[0] == batch_size\n        assert not torch.isnan(outputs).any(), 'Output included NaNs'\n\n    @pytest.mark.timeout(120)\n    @pytest.mark.parametrize('model_name', [\"regnetx_002\"])\n    @pytest.mark.parametrize('batch_size', [1])\n    def test_model_forward_torchscript_with_features_fx(model_name, batch_size):\n        \"\"\"Create a model with feature extraction based on fx, script it, and run\n        a single forward pass\"\"\"\n        if not has_fx_feature_extraction:\n            pytest.skip(\"Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.\")\n\n        allowed_models = list_models(\n            exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS + EXCLUDE_FX_JIT_FILTERS,\n            name_matches_cfg=True\n        )\n        assert model_name in allowed_models, f\"{model_name=} not supported for this test\"\n\n        input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)\n        assert max(input_size) <= MAX_JIT_SIZE, \"Fixed input size model > limit. Pick a different model to run this test\"\n\n        with set_scriptable(True):\n            model = create_model(model_name, pretrained=False, features_only=True, feature_cfg={\"feature_cls\": \"fx\"})\n        model.eval()\n\n        model = torch.jit.script(model)\n        with torch.inference_mode():\n            outputs = model(torch.randn((batch_size, *input_size)))\n\n        assert isinstance(outputs, list)\n\n        for tensor in outputs:\n            assert tensor.shape[0] == batch_size\n            assert not torch.isnan(tensor).any(), 'Output included NaNs'\n"
  },
  {
    "path": "tests/test_optim.py",
    "content": "\"\"\" Optimzier Tests\n\nThese tests were adapted from PyTorch' optimizer tests.\n\n\"\"\"\nimport functools\nimport importlib\nimport os\nfrom copy import deepcopy\n\nimport pytest\nimport torch\nfrom torch.nn import Parameter\nfrom torch.testing._internal.common_utils import TestCase\n\nfrom timm.optim import create_optimizer_v2, list_optimizers, get_optimizer_class, get_optimizer_info, OptimInfo\nfrom timm.optim import param_groups_layer_decay, param_groups_weight_decay\nfrom timm.scheduler import PlateauLRScheduler\n\ntorch_backend = os.environ.get('TORCH_BACKEND')\nif torch_backend is not None:\n    importlib.import_module(torch_backend)\ntorch_device = os.environ.get('TORCH_DEVICE', 'cuda')\n\n# HACK relying on internal PyTorch test functionality for comparisons that I don't want to write\ntorch_tc = TestCase()\n\n\ndef _test_basic_cases_template(weight, bias, input, constructor, scheduler_constructors):\n    weight = Parameter(weight)\n    bias = Parameter(bias)\n    input = Parameter(input)\n    optimizer = constructor(weight, bias)\n    schedulers = []\n    for scheduler_constructor in scheduler_constructors:\n        schedulers.append(scheduler_constructor(optimizer))\n\n    # to check if the optimizer can be printed as a string\n    optimizer.__repr__()\n\n    def fn():\n        optimizer.zero_grad()\n        y = weight.mv(input)\n        if y.is_cuda and bias.is_cuda and y.get_device() != bias.get_device():\n            y = y.cuda(bias.get_device())\n        loss = (y + bias).pow(2).sum()\n        loss.backward()\n        return loss\n\n    initial_value = fn().item()\n    for _i in range(200):\n        for scheduler in schedulers:\n            if isinstance(scheduler, PlateauLRScheduler):\n                val_loss = fn()\n                scheduler.step(val_loss)\n            else:\n                scheduler.step()\n        optimizer.step(fn)\n\n    assert fn().item() < initial_value\n\n\ndef _test_state_dict(weight, bias, input, constructor):\n    weight = Parameter(weight)\n    bias = Parameter(bias)\n    input = Parameter(input)\n\n    def fn_base(optimizer, weight, bias):\n        optimizer.zero_grad()\n        i = input_device if weight.device.type != 'cpu' else input\n        loss = (weight.mv(i) + bias).pow(2).sum()\n        loss.backward()\n        return loss\n\n    optimizer = constructor(weight, bias)\n    fn = functools.partial(fn_base, optimizer, weight, bias)\n\n    # Prime the optimizer\n    for _i in range(20):\n        optimizer.step(fn)\n    # Clone the weights and construct new optimizer for them\n    with torch.no_grad():\n        weight_c = Parameter(weight.clone().detach())\n        bias_c = Parameter(bias.clone().detach())\n    optimizer_c = constructor(weight_c, bias_c)\n    fn_c = functools.partial(fn_base, optimizer_c, weight_c, bias_c)\n    # Load state dict\n    state_dict = deepcopy(optimizer.state_dict())\n    state_dict_c = deepcopy(optimizer.state_dict())\n    optimizer_c.load_state_dict(state_dict_c)\n\n    # Run both optimizations in parallel\n    for _i in range(20):\n        optimizer.step(fn)\n        optimizer_c.step(fn_c)\n        torch_tc.assertEqual(weight, weight_c)\n        torch_tc.assertEqual(bias, bias_c)\n    # Make sure state dict is deterministic with equal but not identical parameters\n    torch_tc.assertEqual(optimizer.state_dict(), optimizer_c.state_dict())\n    # Make sure repeated parameters have identical representation in state dict\n    optimizer_c.param_groups.extend(optimizer_c.param_groups)\n    torch_tc.assertEqual(optimizer.state_dict()['param_groups'][-1], optimizer_c.state_dict()['param_groups'][-1])\n\n    # Check that state dict can be loaded even when we cast parameters\n    # to a different type and move to a different device.\n    if torch_device == 'cpu':\n        return\n    elif torch_device == 'cuda' and not torch.cuda.is_available():\n        return\n\n    with torch.no_grad():\n        input_device = Parameter(input.clone().detach().float().to(torch_device))\n        weight_device = Parameter(weight.clone().detach().to(torch_device))\n        bias_device = Parameter(bias.clone().detach().to(torch_device))\n    optimizer_device = constructor(weight_device, bias_device)\n    fn_device = functools.partial(fn_base, optimizer_device, weight_device, bias_device)\n\n    state_dict = deepcopy(optimizer.state_dict())\n    state_dict_c = deepcopy(optimizer.state_dict())\n    optimizer_device.load_state_dict(state_dict_c)\n\n    # Make sure state dict wasn't modified\n    torch_tc.assertEqual(state_dict, state_dict_c)\n\n    for _i in range(20):\n        optimizer.step(fn)\n        optimizer_device.step(fn_device)\n        torch_tc.assertEqual(weight, weight_device)\n        torch_tc.assertEqual(bias, bias_device)\n\n    # validate deepcopy() copies all public attributes\n    def getPublicAttr(obj):\n        return set(k for k in obj.__dict__ if not k.startswith('_'))\n\n    assert getPublicAttr(optimizer) == getPublicAttr(deepcopy(optimizer))\n\n\ndef _test_basic_cases(constructor, scheduler_constructors=None):\n    if scheduler_constructors is None:\n        scheduler_constructors = []\n    _test_state_dict(\n        torch.randn(10, 5),\n        torch.randn(10),\n        torch.randn(5),\n        constructor\n    )\n    _test_basic_cases_template(\n        torch.randn(10, 5),\n        torch.randn(10),\n        torch.randn(5),\n        constructor,\n        scheduler_constructors\n    )\n    # non-contiguous parameters\n    _test_basic_cases_template(\n        torch.randn(10, 5, 2)[..., 0],\n        torch.randn(10, 2)[..., 0],\n        torch.randn(5),\n        constructor,\n        scheduler_constructors\n    )\n    # CUDA\n    if torch_device == 'cpu':\n        return\n    elif torch_device == 'cuda' and not torch.cuda.is_available():\n        return\n\n    _test_basic_cases_template(\n        torch.randn(10, 5).to(torch_device),\n        torch.randn(10).to(torch_device),\n        torch.randn(5).to(torch_device),\n        constructor,\n        scheduler_constructors\n    )\n\n\ndef _test_model(optimizer, params, device=torch.device('cpu'), after_step=0):\n    weight = torch.tensor(\n        [[-0.2109, -0.4976], [-0.1413, -0.3420], [-0.2524, 0.6976]],\n        device=device, requires_grad=True)\n    bias = torch.tensor([-0.1085, -0.2979, 0.6892], device=device, requires_grad=True)\n    weight2 = torch.tensor([[-0.0508, -0.3941, -0.2843]], device=device, requires_grad=True)\n    bias2 = torch.tensor([-0.0711], device=device, requires_grad=True)\n    input = torch.tensor([0.1, 0.2, 0.3, 0.4, 0.5, 0.6], device=device).reshape(3, 2)\n\n    model = torch.nn.Sequential(torch.nn.Linear(2, 3),\n                                torch.nn.Sigmoid(),\n                                torch.nn.Linear(3, 1),\n                                torch.nn.Sigmoid())\n    model.to(device)\n\n    pretrained_dict = model.state_dict()\n    pretrained_dict['0.weight'] = weight\n    pretrained_dict['0.bias'] = bias\n    pretrained_dict['2.weight'] = weight2\n    pretrained_dict['2.bias'] = bias2\n    model.load_state_dict(pretrained_dict)\n\n    optimizer = create_optimizer_v2(model, opt=optimizer, **params)\n\n    prev_loss = float('inf')\n    for i in range(20):\n        optimizer.zero_grad()\n        output = model(input)\n        loss = output.sum()\n        loss.backward()\n        loss = loss.item()\n        if i > after_step:\n            assert loss < prev_loss\n        prev_loss = loss\n        optimizer.step()\n\n\ndef rosenbrock(tensor):\n    x, y = tensor\n    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2\n\n\ndef drosenbrock(tensor):\n    x, y = tensor\n    return torch.tensor((-400 * x * (y - x ** 2) - 2 * (1 - x), 200 * (y - x ** 2)))\n\n\ndef _test_rosenbrock(constructor, scheduler_constructors=None):\n    if scheduler_constructors is None:\n        scheduler_constructors = []\n    params_t = torch.tensor([1.5, 1.5])\n\n    params = Parameter(params_t)\n    optimizer = constructor([params])\n    schedulers = []\n    for scheduler_constructor in scheduler_constructors:\n        schedulers.append(scheduler_constructor(optimizer))\n\n    solution = torch.tensor([1, 1])\n    initial_dist = params.clone().detach().dist(solution)\n\n\n    def get_grad(_param, _sparse_grad, _w):\n        grad = drosenbrock(params.clone().detach())\n        # Depending on w, provide only the x or y gradient\n        if _sparse_grad:\n            if _w:\n                i = torch.tensor([[0, 0]], dtype=torch.int64)\n                x = grad[0]\n                v = torch.tensor([x / 4.0, x - x / 4.0])\n            else:\n                i = torch.tensor([[1, 1]], dtype=torch.int64)\n                y = grad[1]\n                v = torch.tensor([y - y / 4.0, y / 4.0])\n            grad_out = torch.sparse_coo_tensor(i, v, (2,), dtype=v.dtype)\n        else:\n            if _w:\n                grad_out = torch.tensor([grad[0], 0], dtype=_param.dtype)\n            else:\n                grad_out = torch.tensor([0, grad[1]], dtype=_param.dtype)\n        return grad_out\n\n\n    def eval(_param, _sparse_grad, _w):\n        # Depending on w, provide only the x or y gradient\n        optimizer.zero_grad()\n        loss = rosenbrock(_param)\n        loss.backward()\n\n        grad_out = get_grad(_param, _sparse_grad, _w)\n        with torch.no_grad():\n            _param.grad = grad_out.to_dense()\n\n        return loss\n\n    for i in range(2000):\n        # Do cyclic coordinate descent\n        w = i % 2\n        optimizer.step(functools.partial(eval, params, True, w))\n        for scheduler in schedulers:\n            if isinstance(scheduler, PlateauLRScheduler):\n                scheduler.step(rosenbrock(params))\n            else:\n                scheduler.step()\n\n    torch_tc.assertLessEqual(params.clone().detach().dist(solution), initial_dist)\n\n\ndef _build_params_dict(weight, bias, **kwargs):\n    return [{'params': [weight]}, dict(params=[bias], **kwargs)]\n\n\ndef _build_params_dict_single(weight, bias, **kwargs):\n    return [dict(params=bias, **kwargs)]\n\n\n@pytest.mark.parametrize('optimizer', list_optimizers(exclude_filters=('fused*', 'bnb*', 'kron*')))\ndef test_optim_factory(optimizer):\n    assert issubclass(get_optimizer_class(optimizer, bind_defaults=False), torch.optim.Optimizer)\n\n    opt_info = get_optimizer_info(optimizer)\n    assert isinstance(opt_info, OptimInfo)\n\n    lr = (1e-2,) * 4\n    if optimizer in ('mars', 'nadam', 'claprop', 'crmsproptf', 'cadafactorbv', 'csgdw', 'csgdc', 'clamb'):\n        lr = (1e-3,) * 4\n    elif optimizer in ('cmars',):\n        lr = (1e-4,) * 4\n\n    try:\n        if not opt_info.second_order:  # basic tests don't support second order right now\n            # test basic cases that don't need specific tuning via factory test\n            _test_basic_cases(\n                lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=lr[0])\n            )\n            _test_basic_cases(\n                lambda weight, bias: create_optimizer_v2(\n                    _build_params_dict(weight, bias, lr=lr[1]),\n                    optimizer,\n                    lr=lr[1] / 10)\n            )\n            _test_basic_cases(\n                lambda weight, bias: create_optimizer_v2(\n                    _build_params_dict_single(weight, bias, lr=lr[2]),\n                    optimizer,\n                    lr=lr[2] / 10)\n            )\n            _test_basic_cases(\n                lambda weight, bias: create_optimizer_v2(\n                    _build_params_dict_single(weight, bias, lr=lr[3]),\n                    optimizer)\n            )\n    except TypeError as e:\n        if 'radamw' in optimizer:\n            pytest.skip(\"Expected for 'radamw' (decoupled decay) to fail in older PyTorch versions.\")\n        else:\n            raise e\n\n\n\n#@pytest.mark.parametrize('optimizer', ['sgd', 'momentum'])\n# FIXME momentum variant frequently fails in GitHub runner, but never local after many attempts\n@pytest.mark.parametrize('optimizer', ['sgd'])\ndef test_sgd(optimizer):\n    # _test_basic_cases(\n    #     lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=1e-3),\n    #     [lambda opt: StepLR(opt, gamma=0.9, step_size=10)]\n    # )\n    # _test_basic_cases(\n    #     lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=1e-3),\n    #     [lambda opt: WarmUpLR(opt, warmup_factor=0.4, warmup_iters=4, warmup_method=\"linear\")]\n    # )\n    # _test_basic_cases(\n    #     lambda weight, bias: optimizer([weight, bias], lr=1e-3),\n    #     [lambda opt: WarmUpLR(opt, warmup_factor=0.4, warmup_iters=4, warmup_method=\"constant\")]\n    # )\n    # _test_basic_cases(\n    #     lambda weight, bias: optimizer([weight, bias], lr=1e-3),\n    #     [lambda opt: StepLR(opt, gamma=0.9, step_size=10),\n    #      lambda opt: WarmUpLR(opt, warmup_factor=0.4, warmup_iters=4)]\n    # )\n    # _test_basic_cases(\n    #     lambda weight, bias: optimizer([weight, bias], lr=1e-3),\n    #     [lambda opt: StepLR(opt, gamma=0.9, step_size=10),\n    #      lambda opt: ReduceLROnPlateau(opt)]\n    # )\n    # _test_basic_cases(\n    #     lambda weight, bias: optimizer([weight, bias], lr=1e-3),\n    #     [lambda opt: StepLR(opt, gamma=0.99, step_size=10),\n    #      lambda opt: ExponentialLR(opt, gamma=0.99),\n    #      lambda opt: ReduceLROnPlateau(opt)]\n    # )\n    _test_basic_cases(\n        lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=3e-3, momentum=1)\n    )\n    _test_basic_cases(\n        lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=3e-3, momentum=1, weight_decay=.1)\n    )\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',  ['adamw', 'adam', 'nadam', 'adamax', 'nadamw', 'adamwlegacy', 'adamc'])\ndef test_adam(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n\n@pytest.mark.parametrize('optimizer',  ['kron'])\ndef test_kron(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',  ['muon', 'nmuon'])\ndef test_muon(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',  ['adamuon', 'nadamuon'])\ndef test_adamuon(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',  ['adopt', 'adoptw'])\ndef test_adopt(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=3e-3)\n    )\n    _test_model(optimizer, dict(lr=5e-2), after_step=1)  # note no convergence in first step for ADOPT\n\n\n@pytest.mark.parametrize('optimizer',  ['adan', 'adanw'])\ndef test_adan(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=5e-2), after_step=1)  # note no convergence in first step for ADOPT\n\n\n@pytest.mark.parametrize('optimizer',  ['adabelief'])\ndef test_adabelief(optimizer):\n    _test_basic_cases(\n        lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=1e-3, weight_decay=1)\n    )\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n\n@pytest.mark.parametrize('optimizer',  ['radam', 'radabelief'])\ndef test_rectified(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',   ['adadelta', 'adagrad'])\ndef test_adaother(optimizer):\n    _test_basic_cases(\n        lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=1e-3, weight_decay=1)\n    )\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-1)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n\n@pytest.mark.parametrize('optimizer',   ['adafactor', 'adafactorbv'])\ndef test_adafactor(optimizer):\n    _test_basic_cases(\n        lambda weight, bias: create_optimizer_v2([weight, bias], optimizer, lr=1e-3, weight_decay=1)\n    )\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n\n@pytest.mark.parametrize('optimizer',  ['lamb', 'lambc'])\ndef test_lamb(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer', ['laprop'])\ndef test_laprop(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-2)\n    )\n    _test_model(optimizer, dict(lr=1e-2))\n\n\n@pytest.mark.parametrize('optimizer',  ['lars', 'larc', 'nlars', 'nlarc'])\ndef test_lars(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer',  ['madgrad', 'madgradw'])\ndef test_madgrad(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-2)\n    )\n    _test_model(optimizer, dict(lr=1e-2))\n\n\n@pytest.mark.parametrize('optimizer',  ['mars'])\ndef test_mars(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=5e-2), after_step=1)  # note no convergence in first step for ADOPT\n\n\n@pytest.mark.parametrize('optimizer',  ['novograd'])\ndef test_novograd(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer', ['rmsprop', 'rmsproptf'])\ndef test_rmsprop(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-2)\n    )\n    _test_model(optimizer, dict(lr=1e-2))\n\n\n@pytest.mark.parametrize('optimizer', ['adamp'])\ndef test_adamp(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n\n@pytest.mark.parametrize('optimizer', ['sgdp'])\ndef test_sgdp(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n    _test_model(optimizer, dict(lr=1e-3))\n\n\n@pytest.mark.parametrize('optimizer', ['lookahead_sgd', 'lookahead_momentum'])\ndef test_lookahead_sgd(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-3)\n    )\n\n\n@pytest.mark.parametrize('optimizer', ['lookahead_adamw', 'lookahead_adam'])\ndef test_lookahead_adam(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n\n\n@pytest.mark.parametrize('optimizer', ['lookahead_radam'])\ndef test_lookahead_radam(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=1e-4)\n    )\n\n\ndef test_param_groups_layer_decay_with_min():\n    model = torch.nn.Sequential(\n        torch.nn.Linear(10, 5),\n        torch.nn.ReLU(),\n        torch.nn.Linear(5, 2)\n    )\n    \n    param_groups = param_groups_layer_decay(\n        model,\n        weight_decay=0.05,\n        layer_decay=0.75,\n        min_scale=0.5,\n        verbose=True\n    )\n    \n    assert len(param_groups) > 0\n    # Verify layer scaling is applied with a min scale\n    for group in param_groups:\n        assert 'lr_scale' in group\n        assert group['lr_scale'] <= 1.0\n        assert group['lr_scale'] >= 0.5\n\n\ndef test_param_groups_layer_decay_with_matcher():\n    class ModelWithMatcher(torch.nn.Module):\n        def __init__(self):\n            super().__init__()\n            self.layer1 = torch.nn.Linear(10, 5)\n            self.layer2 = torch.nn.Linear(5, 2)\n            \n        def group_matcher(self, coarse=False):\n            return lambda name: int(name.split('.')[0][-1])\n            \n    model = ModelWithMatcher()\n    param_groups = param_groups_layer_decay(\n        model,\n        weight_decay=0.05,\n        layer_decay=0.75,\n        verbose=True\n    )\n    \n    assert len(param_groups) > 0\n    # Verify layer scaling is applied\n    for group in param_groups:\n        assert 'lr_scale' in group\n        assert 'weight_decay' in group\n        assert len(group['params']) > 0\n\n\ndef test_param_groups_weight_decay():\n    model = torch.nn.Sequential(\n        torch.nn.Linear(10, 5),\n        torch.nn.ReLU(),\n        torch.nn.Linear(5, 2)\n    )\n    weight_decay = 0.01\n    no_weight_decay_list = ['1.weight']\n    \n    param_groups = param_groups_weight_decay(\n        model, \n        weight_decay=weight_decay,\n        no_weight_decay_list=no_weight_decay_list\n    )\n    \n    assert len(param_groups) == 2\n    assert param_groups[0]['weight_decay'] == 0.0\n    assert param_groups[1]['weight_decay'] == weight_decay\n    \n    # Verify parameters are correctly grouped\n    no_decay_params = set(param_groups[0]['params'])\n    decay_params = set(param_groups[1]['params'])\n    \n    for name, param in model.named_parameters():\n        if param.ndim <= 1 or name.endswith(\".bias\") or name in no_weight_decay_list:\n            assert param in no_decay_params\n        else:\n            assert param in decay_params\n\n@pytest.mark.parametrize('optimizer', ['cadamp'])\ndef test_cadamp(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-2)\n    )\n    _test_model(optimizer, dict(lr=5e-2))\n\n@pytest.mark.parametrize('optimizer', ['csgdp'])\ndef test_csgdp(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-4)\n    )\n    _test_model(optimizer, dict(lr=5e-4))\n\n@pytest.mark.parametrize('optimizer', ['csgdw'])\ndef test_csgdw(optimizer):\n    _test_rosenbrock(\n        lambda params: create_optimizer_v2(params, optimizer, lr=5e-4)\n    )\n    _test_model(optimizer, dict(lr=5e-4))\n\n"
  },
  {
    "path": "tests/test_scheduler.py",
    "content": "\"\"\" Scheduler Tests\n\nTests for learning rate schedulers in timm.scheduler.\n\"\"\"\nimport math\nimport pytest\nimport torch\nfrom torch.nn import Parameter\n\nfrom timm.scheduler import (\n    CosineLRScheduler,\n    StepLRScheduler,\n    MultiStepLRScheduler,\n    PlateauLRScheduler,\n    PolyLRScheduler,\n    TanhLRScheduler,\n)\nfrom timm.scheduler.scheduler import Scheduler\n\n\ndef _create_optimizer(lr: float = 0.1, num_groups: int = 1) -> torch.optim.Optimizer:\n    \"\"\"Create a mock optimizer with simple parameters for testing.\"\"\"\n    param_groups = []\n    for _ in range(num_groups):\n        param = Parameter(torch.randn(10, 5))\n        param_groups.append({'params': [param], 'lr': lr})\n    optimizer = torch.optim.SGD(param_groups, lr=lr)\n    return optimizer\n\n\nclass TestSchedulerBasics:\n    \"\"\"Test basic scheduler initialization and stepping.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30]}),\n        (PlateauLRScheduler, {}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_scheduler_init(self, scheduler_cls, kwargs):\n        \"\"\"Test that all schedulers can be initialized.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = scheduler_cls(optimizer, **kwargs)\n        assert scheduler is not None\n        assert scheduler.optimizer is optimizer\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30]}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_scheduler_step(self, scheduler_cls, kwargs):\n        \"\"\"Test that schedulers can step without error.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = scheduler_cls(optimizer, **kwargs)\n\n        initial_lr = optimizer.param_groups[0]['lr']\n        for epoch in range(10):\n            scheduler.step(epoch)\n\n        # LR should have changed after stepping\n        final_lr = optimizer.param_groups[0]['lr']\n        # For most schedulers, LR should decrease or stay same\n        assert final_lr <= initial_lr\n\n    def test_plateau_scheduler_step(self):\n        \"\"\"Test PlateauLRScheduler with metric.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = PlateauLRScheduler(optimizer, patience_t=2, decay_rate=0.5)\n\n        # Simulate plateau - same metric for multiple steps\n        for epoch in range(10):\n            scheduler.step(epoch, metric=1.0)\n\n\nclass TestWarmup:\n    \"\"\"Test warmup behavior across schedulers.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30]}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_warmup_lr_increases(self, scheduler_cls, kwargs):\n        \"\"\"Test that LR increases during warmup period.\"\"\"\n        base_lr = 0.1\n        warmup_lr_init = 0.001\n        warmup_t = 5\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = scheduler_cls(\n            optimizer,\n            warmup_t=warmup_t,\n            warmup_lr_init=warmup_lr_init,\n            **kwargs,\n        )\n\n        # Initial LR should be warmup_lr_init\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(warmup_lr_init, rel=1e-5)\n\n        # LR should increase during warmup\n        prev_lr = warmup_lr_init\n        for epoch in range(1, warmup_t):\n            scheduler.step(epoch)\n            current_lr = optimizer.param_groups[0]['lr']\n            assert current_lr > prev_lr, f\"LR should increase during warmup at epoch {epoch}\"\n            prev_lr = current_lr\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10, 'decay_rate': 0.5}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30], 'decay_rate': 0.5}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_warmup_prefix_reaches_target_lr(self, scheduler_cls, kwargs):\n        \"\"\"Test that target LR is reached at first step after warmup when warmup_prefix=True.\"\"\"\n        base_lr = 0.1\n        warmup_lr_init = 0.001\n        warmup_t = 5\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = scheduler_cls(\n            optimizer,\n            warmup_t=warmup_t,\n            warmup_lr_init=warmup_lr_init,\n            warmup_prefix=True,\n            **kwargs,\n        )\n\n        # Step through warmup\n        for epoch in range(warmup_t):\n            scheduler.step(epoch)\n\n        # At t=warmup_t (first step after warmup), with warmup_prefix=True,\n        # the main schedule starts at t=0, which should be base_lr\n        scheduler.step(warmup_t)\n        lr_after_warmup = optimizer.param_groups[0]['lr']\n        assert lr_after_warmup == pytest.approx(base_lr, rel=1e-5), \\\n            f\"LR should be base_lr ({base_lr}) at first step after warmup, got {lr_after_warmup}\"\n\n\nclass TestCosineScheduler:\n    \"\"\"Test CosineLRScheduler specific behavior.\"\"\"\n\n    def test_cosine_decay(self):\n        \"\"\"Test that cosine scheduler decays LR correctly.\"\"\"\n        base_lr = 0.1\n        lr_min = 0.001\n        t_initial = 100\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = CosineLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=lr_min,\n        )\n\n        # At t=0, LR should be base_lr\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr, rel=1e-5)\n\n        # At t=t_initial/2, LR should be approximately (base_lr + lr_min) / 2\n        scheduler.step(t_initial // 2)\n        mid_lr = optimizer.param_groups[0]['lr']\n        expected_mid = lr_min + 0.5 * (base_lr - lr_min) * (1 + math.cos(math.pi * 0.5))\n        assert mid_lr == pytest.approx(expected_mid, rel=1e-2)\n\n        # At t=t_initial, LR should be lr_min\n        scheduler.step(t_initial)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(lr_min, rel=1e-5)\n\n    def test_cosine_cycles(self):\n        \"\"\"Test cosine scheduler with multiple cycles.\"\"\"\n        base_lr = 0.1\n        lr_min = 0.001\n        t_initial = 50\n        cycle_limit = 2\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = CosineLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=lr_min,\n            cycle_limit=cycle_limit,\n        )\n\n        # Step through the first cycle - at t_initial-1, LR should be near minimum\n        scheduler.step(t_initial - 1)\n        lr_near_end = optimizer.param_groups[0]['lr']\n        assert lr_near_end < base_lr * 0.5, \"LR should be significantly lower near end of cycle\"\n\n        # After cycle limit is exceeded, LR should stay at lr_min\n        for epoch in range(t_initial * cycle_limit, t_initial * cycle_limit + 10):\n            scheduler.step(epoch)\n        lr_after_cycles = optimizer.param_groups[0]['lr']\n        assert lr_after_cycles == pytest.approx(lr_min, rel=1e-5)\n\n    def test_get_cycle_length(self):\n        \"\"\"Test get_cycle_length method.\"\"\"\n        optimizer = _create_optimizer()\n        t_initial = 100\n\n        scheduler = CosineLRScheduler(optimizer, t_initial=t_initial)\n        assert scheduler.get_cycle_length(1) == t_initial\n\n        # With warmup prefix\n        warmup_t = 10\n        scheduler_warmup = CosineLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            warmup_t=warmup_t,\n            warmup_prefix=True,\n        )\n        assert scheduler_warmup.get_cycle_length(1) == t_initial + warmup_t\n\n\nclass TestStepScheduler:\n    \"\"\"Test StepLRScheduler specific behavior.\"\"\"\n\n    def test_step_decay(self):\n        \"\"\"Test that step scheduler decays at correct intervals.\"\"\"\n        base_lr = 0.1\n        decay_t = 10\n        decay_rate = 0.5\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = StepLRScheduler(\n            optimizer,\n            decay_t=decay_t,\n            decay_rate=decay_rate,\n        )\n\n        # Before first decay\n        scheduler.step(decay_t - 1)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr, rel=1e-5)\n\n        # After first decay\n        scheduler.step(decay_t)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr * decay_rate, rel=1e-5)\n\n        # After second decay\n        scheduler.step(2 * decay_t)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr * decay_rate ** 2, rel=1e-5)\n\n\nclass TestMultiStepScheduler:\n    \"\"\"Test MultiStepLRScheduler specific behavior.\"\"\"\n\n    def test_multistep_decay(self):\n        \"\"\"Test decay at specified milestones.\"\"\"\n        base_lr = 0.1\n        decay_t = [10, 20, 30]\n        decay_rate = 0.5\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = MultiStepLRScheduler(\n            optimizer,\n            decay_t=decay_t,\n            decay_rate=decay_rate,\n        )\n\n        # Before first milestone\n        scheduler.step(8)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr, rel=1e-5)\n\n        # After first milestone (step 10 means we've passed milestone at 10)\n        scheduler.step(11)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr * decay_rate, rel=1e-5)\n\n        # After second milestone\n        scheduler.step(21)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr * decay_rate ** 2, rel=1e-5)\n\n        # After third milestone\n        scheduler.step(31)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr * decay_rate ** 3, rel=1e-5)\n\n\nclass TestPolyScheduler:\n    \"\"\"Test PolyLRScheduler specific behavior.\"\"\"\n\n    def test_poly_decay(self):\n        \"\"\"Test polynomial decay behavior.\"\"\"\n        base_lr = 0.1\n        lr_min = 0.001\n        t_initial = 100\n        power = 1.0  # Linear decay\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = PolyLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=lr_min,\n            power=power,\n        )\n\n        # At t=0, LR should be base_lr\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(base_lr, rel=1e-5)\n\n        # At t=t_initial, LR should be lr_min\n        scheduler.step(t_initial)\n        assert optimizer.param_groups[0]['lr'] == pytest.approx(lr_min, rel=1e-5)\n\n\nclass TestTanhScheduler:\n    \"\"\"Test TanhLRScheduler specific behavior.\"\"\"\n\n    def test_tanh_decay(self):\n        \"\"\"Test tanh decay behavior.\"\"\"\n        base_lr = 0.1\n        lr_min = 0.001\n        t_initial = 100\n\n        optimizer = _create_optimizer(lr=base_lr)\n        scheduler = TanhLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=lr_min,\n        )\n\n        # Collect LR values\n        lrs = [optimizer.param_groups[0]['lr']]\n        for epoch in range(1, t_initial + 1):\n            scheduler.step(epoch)\n            lrs.append(optimizer.param_groups[0]['lr'])\n\n        # LR should generally decrease (with possible non-monotonic behavior due to tanh)\n        assert lrs[-1] < lrs[0]\n\n\nclass TestStateDict:\n    \"\"\"Test state dict save/load functionality.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30]}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_state_dict_save_load(self, scheduler_cls, kwargs):\n        \"\"\"Test that state dict can be saved and loaded.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = scheduler_cls(optimizer, **kwargs)\n\n        # Step a few times\n        for epoch in range(5):\n            scheduler.step(epoch)\n\n        # Save state\n        state_dict = scheduler.state_dict()\n        assert isinstance(state_dict, dict)\n\n        # Create new scheduler and load state\n        optimizer2 = _create_optimizer()\n        scheduler2 = scheduler_cls(optimizer2, **kwargs)\n        scheduler2.load_state_dict(state_dict)\n\n        # State should be restored\n        assert scheduler2.state_dict() == state_dict\n\n    def test_plateau_state_dict_save_load(self):\n        \"\"\"Test PlateauLRScheduler state dict save/load.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = PlateauLRScheduler(optimizer)\n\n        # Step a few times\n        for epoch in range(5):\n            scheduler.step(epoch, metric=1.0)\n\n        # Save state\n        state_dict = scheduler.state_dict()\n        assert isinstance(state_dict, dict)\n\n        # Create new scheduler and load state\n        optimizer2 = _create_optimizer()\n        scheduler2 = PlateauLRScheduler(optimizer2)\n        scheduler2.load_state_dict(state_dict)\n\n        # State should be restored\n        assert scheduler2.state_dict() == state_dict\n\n\nclass TestStepUpdate:\n    \"\"\"Test step_update for update-based scheduling.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10, 'decay_rate': 0.5}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30], 'decay_rate': 0.5}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_step_update_with_t_in_epochs_false(self, scheduler_cls, kwargs):\n        \"\"\"Test step_update when t_in_epochs=False.\"\"\"\n        optimizer = _create_optimizer()\n        scheduler = scheduler_cls(\n            optimizer,\n            t_in_epochs=False,\n            **kwargs,\n        )\n\n        initial_lr = optimizer.param_groups[0]['lr']\n\n        # step_update should work when t_in_epochs=False\n        for update in range(50):\n            scheduler.step_update(update)\n\n        # LR should have changed for all these schedulers by step 50\n        final_lr = optimizer.param_groups[0]['lr']\n        assert final_lr != initial_lr, f\"LR should change after 50 updates for {scheduler_cls.__name__}\"\n\n\nclass TestMultipleParamGroups:\n    \"\"\"Test schedulers with multiple parameter groups.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (MultiStepLRScheduler, {'decay_t': [10, 20, 30]}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_multiple_param_groups(self, scheduler_cls, kwargs):\n        \"\"\"Test that schedulers handle multiple param groups correctly.\"\"\"\n        optimizer = _create_optimizer(num_groups=3)\n        scheduler = scheduler_cls(optimizer, **kwargs)\n\n        initial_lrs = [pg['lr'] for pg in optimizer.param_groups]\n\n        for epoch in range(20):\n            scheduler.step(epoch)\n\n        final_lrs = [pg['lr'] for pg in optimizer.param_groups]\n\n        # All param groups should be updated\n        for i, (initial, final) in enumerate(zip(initial_lrs, final_lrs)):\n            assert final <= initial, f\"Param group {i} LR should decrease or stay same\"\n\n\nclass TestNoise:\n    \"\"\"Test noise application in schedulers.\"\"\"\n\n    @pytest.mark.parametrize('scheduler_cls,kwargs', [\n        (CosineLRScheduler, {'t_initial': 100}),\n        (StepLRScheduler, {'decay_t': 10}),\n        (PolyLRScheduler, {'t_initial': 100}),\n        (TanhLRScheduler, {'t_initial': 100}),\n    ])\n    def test_noise_range(self, scheduler_cls, kwargs):\n        \"\"\"Test that noise is applied within specified range.\"\"\"\n        optimizer = _create_optimizer()\n        noise_range_t = (10, 50)\n\n        scheduler = scheduler_cls(\n            optimizer,\n            noise_range_t=noise_range_t,\n            noise_pct=0.5,\n            noise_seed=42,\n            **kwargs,\n        )\n\n        # Collect LRs with same seed - should be deterministic\n        lrs_run1 = []\n        for epoch in range(60):\n            scheduler.step(epoch)\n            lrs_run1.append(optimizer.param_groups[0]['lr'])\n\n        # Reset and run again with same seed\n        optimizer2 = _create_optimizer()\n        scheduler2 = scheduler_cls(\n            optimizer2,\n            noise_range_t=noise_range_t,\n            noise_pct=0.5,\n            noise_seed=42,\n            **kwargs,\n        )\n\n        lrs_run2 = []\n        for epoch in range(60):\n            scheduler2.step(epoch)\n            lrs_run2.append(optimizer2.param_groups[0]['lr'])\n\n        # With same seed, noise should be deterministic\n        assert lrs_run1 == lrs_run2\n\n\nclass TestKDecay:\n    \"\"\"Test k-decay option in cosine and poly schedulers.\"\"\"\n\n    def test_cosine_k_decay(self):\n        \"\"\"Test k-decay in cosine scheduler.\"\"\"\n        optimizer1 = _create_optimizer()\n        optimizer2 = _create_optimizer()\n\n        scheduler_k1 = CosineLRScheduler(optimizer1, t_initial=100, k_decay=1.0)\n        scheduler_k2 = CosineLRScheduler(optimizer2, t_initial=100, k_decay=2.0)\n\n        # Different k values should produce different schedules\n        lrs_k1 = []\n        lrs_k2 = []\n        for epoch in range(100):\n            scheduler_k1.step(epoch)\n            scheduler_k2.step(epoch)\n            lrs_k1.append(optimizer1.param_groups[0]['lr'])\n            lrs_k2.append(optimizer2.param_groups[0]['lr'])\n\n        # The schedules should differ (except at endpoints)\n        assert lrs_k1[50] != lrs_k2[50]\n\n    def test_poly_k_decay(self):\n        \"\"\"Test k-decay in poly scheduler.\"\"\"\n        optimizer1 = _create_optimizer()\n        optimizer2 = _create_optimizer()\n\n        scheduler_k1 = PolyLRScheduler(optimizer1, t_initial=100, k_decay=1.0)\n        scheduler_k2 = PolyLRScheduler(optimizer2, t_initial=100, k_decay=2.0)\n\n        lrs_k1 = []\n        lrs_k2 = []\n        for epoch in range(100):\n            scheduler_k1.step(epoch)\n            scheduler_k2.step(epoch)\n            lrs_k1.append(optimizer1.param_groups[0]['lr'])\n            lrs_k2.append(optimizer2.param_groups[0]['lr'])\n\n        # The schedules should differ\n        assert lrs_k1[50] != lrs_k2[50]\n"
  },
  {
    "path": "tests/test_utils.py",
    "content": "from torch.nn.modules.batchnorm import BatchNorm2d\nfrom torchvision.ops.misc import FrozenBatchNorm2d\n\nimport timm\nimport pytest\nfrom timm.utils.model import freeze, unfreeze\nfrom timm.utils.model import ActivationStatsHook\nfrom timm.utils.model import extract_spp_stats\n\nfrom timm.utils.model import _freeze_unfreeze\nfrom timm.utils.model import avg_sq_ch_mean, avg_ch_var, avg_ch_var_residual\nfrom timm.utils.model import reparameterize_model\nfrom timm.utils.model import get_state_dict\n\ndef test_freeze_unfreeze():\n    model = timm.create_model('resnet18')\n\n    # Freeze all\n    freeze(model)\n    # Check top level module\n    assert model.fc.weight.requires_grad == False\n    # Check submodule\n    assert model.layer1[0].conv1.weight.requires_grad == False\n    # Check BN\n    assert isinstance(model.layer1[0].bn1, FrozenBatchNorm2d)\n\n    # Unfreeze all\n    unfreeze(model)\n    # Check top level module\n    assert model.fc.weight.requires_grad == True\n    # Check submodule\n    assert model.layer1[0].conv1.weight.requires_grad == True\n    # Check BN\n    assert isinstance(model.layer1[0].bn1, BatchNorm2d)\n\n    # Freeze some\n    freeze(model, ['layer1', 'layer2.0'])\n    # Check frozen\n    assert model.layer1[0].conv1.weight.requires_grad == False\n    assert isinstance(model.layer1[0].bn1, FrozenBatchNorm2d)\n    assert model.layer2[0].conv1.weight.requires_grad == False\n    # Check not frozen\n    assert model.layer3[0].conv1.weight.requires_grad == True\n    assert isinstance(model.layer3[0].bn1, BatchNorm2d)\n    assert model.layer2[1].conv1.weight.requires_grad == True\n\n    # Unfreeze some\n    unfreeze(model, ['layer1', 'layer2.0'])\n    # Check not frozen\n    assert model.layer1[0].conv1.weight.requires_grad == True\n    assert isinstance(model.layer1[0].bn1, BatchNorm2d)\n    assert model.layer2[0].conv1.weight.requires_grad == True\n\n    # Freeze/unfreeze BN\n    # From root\n    freeze(model, ['layer1.0.bn1'])\n    assert isinstance(model.layer1[0].bn1, FrozenBatchNorm2d)\n    unfreeze(model, ['layer1.0.bn1'])\n    assert isinstance(model.layer1[0].bn1, BatchNorm2d)\n    # From direct parent\n    freeze(model.layer1[0], ['bn1'])\n    assert isinstance(model.layer1[0].bn1, FrozenBatchNorm2d)    \n    unfreeze(model.layer1[0], ['bn1'])\n    assert isinstance(model.layer1[0].bn1, BatchNorm2d)\n\ndef test_activation_stats_hook_validation():\n    model = timm.create_model('resnet18')\n    \n    def test_hook(model, input, output):\n        return output.mean().item()\n    \n    # Test error case with mismatched lengths\n    with pytest.raises(ValueError, match=\"Please provide `hook_fns` for each `hook_fn_locs`\"):\n        ActivationStatsHook(\n            model,\n            hook_fn_locs=['layer1.0.conv1', 'layer1.0.conv2'],\n            hook_fns=[test_hook]\n        )\n\n\ndef test_extract_spp_stats():\n    model = timm.create_model('resnet18')\n    \n    def test_hook(model, input, output):\n        return output.mean().item()\n    \n    stats = extract_spp_stats(\n        model,\n        hook_fn_locs=['layer1.0.conv1'],\n        hook_fns=[test_hook],\n        input_shape=[2, 3, 32, 32]\n    )\n    \n    assert isinstance(stats, dict)\n    assert test_hook.__name__ in stats\n    assert isinstance(stats[test_hook.__name__], list)\n    assert len(stats[test_hook.__name__]) > 0\n\ndef test_freeze_unfreeze_bn_root():\n    import torch.nn as nn\n    from timm.layers import BatchNormAct2d\n    \n    # Create batch norm layers\n    bn = nn.BatchNorm2d(10)\n    bn_act = BatchNormAct2d(10)\n    \n    # Test with BatchNorm2d as root\n    with pytest.raises(AssertionError):\n        _freeze_unfreeze(bn, mode=\"freeze\")\n    \n    # Test with BatchNormAct2d as root\n    with pytest.raises(AssertionError):\n        _freeze_unfreeze(bn_act, mode=\"freeze\")\n\n\ndef test_activation_stats_functions():\n    import torch\n    \n    # Create sample input tensor [batch, channels, height, width]\n    x = torch.randn(2, 3, 4, 4)\n    \n    # Test avg_sq_ch_mean\n    result1 = avg_sq_ch_mean(None, None, x)\n    assert isinstance(result1, float)\n    \n    # Test avg_ch_var\n    result2 = avg_ch_var(None, None, x)\n    assert isinstance(result2, float)\n    \n    # Test avg_ch_var_residual\n    result3 = avg_ch_var_residual(None, None, x)\n    assert isinstance(result3, float)\n\n\ndef test_reparameterize_model():\n    import torch.nn as nn\n    \n    class FusableModule(nn.Module):\n        def __init__(self):\n            super().__init__()\n            self.conv = nn.Conv2d(3, 3, 1)\n        \n        def fuse(self):\n            return nn.Identity()\n    \n    class ModelWithFusable(nn.Module):\n        def __init__(self):\n            super().__init__()\n            self.fusable = FusableModule()\n            self.normal = nn.Linear(10, 10)\n    \n    model = ModelWithFusable()\n    \n    # Test with inplace=False (should create a copy)\n    new_model = reparameterize_model(model, inplace=False)\n    assert isinstance(new_model.fusable, nn.Identity)\n    assert isinstance(model.fusable, FusableModule)  # Original unchanged\n    \n    # Test with inplace=True\n    reparameterize_model(model, inplace=True)\n    assert isinstance(model.fusable, nn.Identity)\n\n\ndef test_get_state_dict_custom_unwrap():\n    import torch.nn as nn\n    \n    class CustomModel(nn.Module):\n        def __init__(self):\n            super().__init__()\n            self.linear = nn.Linear(10, 10)\n    \n    model = CustomModel()\n    \n    def custom_unwrap(m):\n        return m\n    \n    state_dict = get_state_dict(model, unwrap_fn=custom_unwrap)\n    assert 'linear.weight' in state_dict\n    assert 'linear.bias' in state_dict\n\n\ndef test_freeze_unfreeze_string_input():\n    model = timm.create_model('resnet18')\n    \n    # Test with string input\n    _freeze_unfreeze(model, 'layer1', mode='freeze')\n    assert model.layer1[0].conv1.weight.requires_grad == False\n    \n    # Test unfreezing with string input\n    _freeze_unfreeze(model, 'layer1', mode='unfreeze')\n    assert model.layer1[0].conv1.weight.requires_grad == True\n\n"
  },
  {
    "path": "timm/__init__.py",
    "content": "from .version import __version__ as __version__\nfrom .layers import (\n    is_scriptable as is_scriptable,\n    is_exportable as is_exportable,\n    set_scriptable as set_scriptable,\n    set_exportable as set_exportable,\n)\nfrom .models import (\n    create_model as create_model,\n    list_models as list_models,\n    list_pretrained as list_pretrained,\n    is_model as is_model,\n    list_modules as list_modules,\n    model_entrypoint as model_entrypoint,\n    is_model_pretrained as is_model_pretrained,\n    get_pretrained_cfg as get_pretrained_cfg,\n    get_pretrained_cfg_value as get_pretrained_cfg_value,\n)\n"
  },
  {
    "path": "timm/data/__init__.py",
    "content": "from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\\\n    rand_augment_transform, auto_augment_transform\nfrom .config import resolve_data_config, resolve_model_data_config\nfrom .constants import *\nfrom .dataset import ImageDataset, IterableImageDataset, AugMixDataset\nfrom .dataset_factory import create_dataset\nfrom .dataset_info import DatasetInfo, CustomDatasetInfo\nfrom .imagenet_info import ImageNetInfo, infer_imagenet_subset\nfrom .loader import create_loader\nfrom .mixup import Mixup, FastCollateMixup\nfrom .naflex_dataset import NaFlexMapDatasetWrapper, calculate_naflex_batch_size\nfrom .naflex_loader import create_naflex_loader\nfrom .naflex_mixup import NaFlexMixup, pairwise_mixup_target, mix_batch_variable_size\nfrom .naflex_transforms import (\n    ResizeToSequence,\n    CenterCropToSequence,\n    RandomCropToSequence,\n    RandomResizedCropToSequence,\n    ResizeKeepRatioToSequence,\n    Patchify,\n    patchify_image,\n)\nfrom .readers import create_reader\nfrom .readers import get_img_extensions, is_img_extension, set_img_extensions, add_img_extensions, del_img_extensions\nfrom .real_labels import RealLabelsImagenet\nfrom .transforms import *\nfrom .transforms_factory import create_transform\n"
  },
  {
    "path": "timm/data/_info/imagenet12k_synsets.txt",
    "content": "n00005787\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440382\nn00440509\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443692\nn00443803\nn00444340\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446493\nn00446804\nn00446980\nn00447073\nn00447221\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00452034\nn00452152\nn00452293\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454983\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00474881\nn00475014\nn00475273\nn00475403\nn00475535\nn00475661\nn00475787\nn00476235\nn00476389\nn00477392\nn00477639\nn00478262\nn00479076\nn00479440\nn00479616\nn00479887\nn00480211\nn00480366\nn00480508\nn00480993\nn00481803\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00825773\nn00887544\nn01055165\nn01314388\nn01314663\nn01314781\nn01315213\nn01316422\nn01317089\nn01317294\nn01317541\nn01317813\nn01317916\nn01318279\nn01318381\nn01318894\nn01319467\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01324431\nn01324610\nn01326291\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01379389\nn01381044\nn01384164\nn01386354\nn01392275\nn01392380\nn01395254\nn01396048\nn01397114\nn01397871\nn01402600\nn01405007\nn01405616\nn01407798\nn01410457\nn01415626\nn01421807\nn01422335\nn01424420\nn01438581\nn01439121\nn01439514\nn01440160\nn01440764\nn01441117\nn01442972\nn01443243\nn01443537\nn01443831\nn01444339\nn01446760\nn01447331\nn01447658\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01451426\nn01453087\nn01454545\nn01455778\nn01456756\nn01457852\nn01458842\nn01459791\nn01461315\nn01462042\nn01462544\nn01464844\nn01468238\nn01468712\nn01469103\nn01471682\nn01472303\nn01477525\nn01477875\nn01482071\nn01482330\nn01483830\nn01484097\nn01484285\nn01484850\nn01485479\nn01486838\nn01487506\nn01488038\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491361\nn01491874\nn01492569\nn01493146\nn01494475\nn01495006\nn01495493\nn01495701\nn01496331\nn01497118\nn01498041\nn01498989\nn01499396\nn01500091\nn01500476\nn01501160\nn01503061\nn01503976\nn01504179\nn01504344\nn01514668\nn01514752\nn01514859\nn01515303\nn01517565\nn01517966\nn01518878\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01524359\nn01526521\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01530575\nn01531178\nn01531344\nn01531512\nn01531811\nn01531971\nn01532325\nn01532511\nn01532829\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534433\nn01534582\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537544\nn01537895\nn01538059\nn01538200\nn01538630\nn01538955\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542786\nn01543175\nn01543632\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01547832\nn01548301\nn01548492\nn01548694\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01551080\nn01551300\nn01551711\nn01552034\nn01552813\nn01553142\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01558993\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560419\nn01560636\nn01560793\nn01560935\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576695\nn01577035\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580077\nn01580870\nn01581166\nn01581730\nn01581984\nn01582220\nn01582398\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587526\nn01587834\nn01588002\nn01588725\nn01589286\nn01589718\nn01589893\nn01591005\nn01591123\nn01591301\nn01591697\nn01592084\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598588\nn01598988\nn01599159\nn01599269\nn01599556\nn01600085\nn01600657\nn01601068\nn01601694\nn01602630\nn01602832\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606522\nn01606672\nn01606809\nn01607600\nn01607812\nn01607962\nn01608265\nn01608432\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01614925\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616318\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01618922\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622779\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624212\nn01624305\nn01624537\nn01624833\nn01625562\nn01627424\nn01628770\nn01629276\nn01629819\nn01629962\nn01630284\nn01630670\nn01630901\nn01631354\nn01631512\nn01631663\nn01632458\nn01632601\nn01632777\nn01633406\nn01633781\nn01635027\nn01636127\nn01636352\nn01636829\nn01637615\nn01639765\nn01640846\nn01641206\nn01641391\nn01641577\nn01641739\nn01642257\nn01642539\nn01643507\nn01643896\nn01644373\nn01644900\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647303\nn01647640\nn01648139\nn01648620\nn01649170\nn01650167\nn01650690\nn01650901\nn01651059\nn01652026\nn01653223\nn01654637\nn01661091\nn01662622\nn01662784\nn01663401\nn01663782\nn01664065\nn01664369\nn01664492\nn01664674\nn01664990\nn01665541\nn01665932\nn01666228\nn01666585\nn01667114\nn01667432\nn01667778\nn01668091\nn01668436\nn01668665\nn01668892\nn01669191\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01672032\nn01673282\nn01674464\nn01674990\nn01675722\nn01677366\nn01677747\nn01678043\nn01678343\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01682714\nn01683558\nn01684133\nn01684578\nn01685808\nn01686044\nn01687665\nn01687978\nn01688243\nn01689081\nn01689811\nn01690149\nn01690466\nn01691217\nn01692333\nn01692523\nn01693175\nn01693334\nn01693783\nn01694178\nn01694709\nn01694955\nn01695060\nn01696633\nn01697178\nn01697457\nn01697611\nn01698434\nn01698640\nn01698782\nn01699040\nn01699675\nn01701859\nn01704323\nn01713764\nn01726692\nn01727646\nn01728572\nn01728920\nn01729322\nn01729977\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731545\nn01731941\nn01732244\nn01732614\nn01732789\nn01733466\nn01733757\nn01733957\nn01734104\nn01734418\nn01734637\nn01734808\nn01735189\nn01735439\nn01735577\nn01737021\nn01737472\nn01737728\nn01737875\nn01738065\nn01738601\nn01739381\nn01740131\nn01740551\nn01741232\nn01741562\nn01741943\nn01742172\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744401\nn01745125\nn01745484\nn01745902\nn01746359\nn01747589\nn01747885\nn01748264\nn01748389\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01749939\nn01750167\nn01750437\nn01751036\nn01751472\nn01751748\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753488\nn01753959\nn01754370\nn01754533\nn01754876\nn01755581\nn01755740\nn01756089\nn01756291\nn01756508\nn01756733\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01767661\nn01768244\nn01769347\nn01770081\nn01770393\nn01770795\nn01771417\nn01772222\nn01772664\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01775370\nn01776313\nn01777304\nn01778217\nn01779148\nn01779629\nn01782209\nn01782516\nn01784675\nn01785667\nn01786646\nn01787835\nn01789740\nn01790711\nn01791107\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792640\nn01792955\nn01793085\nn01793249\nn01793435\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795545\nn01795735\nn01796340\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01797886\nn01798168\nn01798484\nn01798706\nn01798839\nn01799679\nn01800424\nn01801876\nn01803078\nn01803362\nn01804163\nn01804478\nn01804653\nn01805070\nn01805801\nn01806143\nn01806297\nn01806364\nn01806467\nn01806567\nn01806847\nn01807105\nn01807496\nn01807828\nn01808140\nn01809106\nn01809371\nn01809752\nn01810268\nn01811909\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813948\nn01814217\nn01814370\nn01814755\nn01814921\nn01815601\nn01816887\nn01817263\nn01817346\nn01817953\nn01818299\nn01818515\nn01818832\nn01819115\nn01819313\nn01819465\nn01819734\nn01820052\nn01820348\nn01820546\nn01821076\nn01821203\nn01821869\nn01822300\nn01823013\nn01823414\nn01824035\nn01824575\nn01825278\nn01826364\nn01826680\nn01827403\nn01827793\nn01828096\nn01828556\nn01828970\nn01829413\nn01829869\nn01830042\nn01830915\nn01832167\nn01832493\nn01833805\nn01834177\nn01834540\nn01835276\nn01837072\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01840120\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01842235\nn01842504\nn01843065\nn01843383\nn01843719\nn01844231\nn01844551\nn01844917\nn01845132\nn01846331\nn01847000\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855032\nn01855188\nn01855476\nn01855672\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860187\nn01860497\nn01861778\nn01862399\nn01871265\nn01871875\nn01872401\nn01872772\nn01873310\nn01874434\nn01874928\nn01875313\nn01876034\nn01876326\nn01877134\nn01877606\nn01877812\nn01878061\nn01878929\nn01879217\nn01879509\nn01881171\nn01882125\nn01882714\nn01883070\nn01884834\nn01885498\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889520\nn01891633\nn01892030\nn01893825\nn01896844\nn01897536\nn01899894\nn01900150\nn01903346\nn01904029\nn01904806\nn01904886\nn01905661\nn01906749\nn01909906\nn01910747\nn01913166\nn01914609\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916925\nn01917289\nn01917611\nn01917882\nn01918744\nn01922303\nn01923025\nn01924916\nn01930112\nn01934440\nn01935395\nn01937909\nn01938454\nn01940736\nn01942869\nn01943087\nn01943899\nn01944118\nn01944390\nn01944812\nn01944955\nn01945143\nn01945685\nn01946630\nn01947396\nn01947997\nn01948573\nn01949085\nn01950731\nn01951274\nn01951613\nn01953361\nn01953594\nn01953762\nn01955084\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958531\nn01959492\nn01959985\nn01960177\nn01960459\nn01961985\nn01963317\nn01963571\nn01964049\nn01964271\nn01964441\nn01965529\nn01965889\nn01968897\nn01970164\nn01970667\nn01971280\nn01972541\nn01974773\nn01976146\nn01976868\nn01976957\nn01978287\nn01978455\nn01979874\nn01980166\nn01981276\nn01982068\nn01982347\nn01982650\nn01983481\nn01984245\nn01984695\nn01985128\nn01985493\nn01986214\nn01986806\nn01987545\nn01990007\nn01990800\nn01991028\nn01991520\nn01992773\nn01994910\nn01998183\nn01998741\nn01999186\nn02000954\nn02002075\nn02002556\nn02002724\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006656\nn02006985\nn02007284\nn02007558\nn02008041\nn02008497\nn02008643\nn02008796\nn02009229\nn02009380\nn02009508\nn02009750\nn02009912\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011460\nn02011805\nn02011943\nn02012185\nn02012849\nn02013177\nn02013567\nn02013706\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017213\nn02017475\nn02017725\nn02018027\nn02018207\nn02018368\nn02018795\nn02019190\nn02019929\nn02021050\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025239\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027492\nn02027897\nn02028035\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030287\nn02030837\nn02030996\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02033041\nn02033208\nn02033561\nn02033779\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02036053\nn02036711\nn02037110\nn02037464\nn02037869\nn02038466\nn02038993\nn02039171\nn02040266\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02049088\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051845\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056570\nn02056728\nn02057035\nn02057330\nn02057731\nn02058221\nn02058594\nn02059162\nn02060133\nn02060411\nn02060569\nn02060889\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064338\nn02064816\nn02065026\nn02065263\nn02065407\nn02066245\nn02066707\nn02067240\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02071294\nn02071636\nn02072040\nn02072798\nn02073831\nn02074367\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02077923\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02080713\nn02081571\nn02081798\nn02082791\nn02083346\nn02083672\nn02084071\nn02084732\nn02084861\nn02085272\nn02085374\nn02085620\nn02085936\nn02086079\nn02086240\nn02086478\nn02086646\nn02086753\nn02086910\nn02087046\nn02087122\nn02087394\nn02087551\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02088839\nn02089232\nn02089468\nn02089555\nn02089973\nn02090379\nn02090475\nn02090622\nn02090721\nn02090827\nn02091032\nn02091134\nn02091244\nn02091467\nn02091831\nn02092002\nn02092339\nn02092468\nn02093056\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02094562\nn02094721\nn02094931\nn02095050\nn02095314\nn02095412\nn02095570\nn02095727\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02096756\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02097786\nn02098105\nn02098286\nn02098413\nn02098550\nn02098806\nn02098906\nn02099029\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02099997\nn02100236\nn02100399\nn02100583\nn02100735\nn02100877\nn02101006\nn02101108\nn02101388\nn02101556\nn02101861\nn02102040\nn02102177\nn02102318\nn02102480\nn02102605\nn02102973\nn02103406\nn02103841\nn02104029\nn02104280\nn02104365\nn02104523\nn02104882\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02106854\nn02106966\nn02107142\nn02107312\nn02107420\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108254\nn02108422\nn02108551\nn02108672\nn02108915\nn02109047\nn02109525\nn02109811\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111626\nn02111889\nn02112018\nn02112137\nn02112350\nn02112497\nn02112826\nn02113023\nn02113186\nn02113335\nn02113624\nn02113712\nn02113799\nn02114100\nn02114367\nn02114548\nn02114712\nn02114855\nn02115096\nn02115335\nn02115641\nn02115913\nn02116738\nn02117135\nn02117512\nn02117900\nn02118333\nn02119022\nn02119477\nn02119634\nn02119789\nn02120079\nn02120505\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122878\nn02122948\nn02123045\nn02123159\nn02123242\nn02123394\nn02123478\nn02123597\nn02123785\nn02123917\nn02124075\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125311\nn02125494\nn02126028\nn02126139\nn02126640\nn02126787\nn02127052\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128385\nn02128669\nn02128757\nn02128925\nn02129165\nn02129463\nn02129604\nn02129837\nn02129923\nn02129991\nn02130308\nn02131653\nn02132136\nn02132466\nn02132580\nn02132788\nn02133161\nn02133704\nn02134084\nn02134418\nn02135220\nn02136103\nn02136452\nn02137015\nn02137549\nn02138441\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02146371\nn02146700\nn02147173\nn02147328\nn02147591\nn02147947\nn02149420\nn02150482\nn02152740\nn02152881\nn02153109\nn02156871\nn02157206\nn02159955\nn02160947\nn02161338\nn02161457\nn02162561\nn02163297\nn02164464\nn02165105\nn02165456\nn02165877\nn02166567\nn02166826\nn02167151\nn02167820\nn02168245\nn02168699\nn02169023\nn02169497\nn02169705\nn02169974\nn02172182\nn02172518\nn02172870\nn02173113\nn02173373\nn02174001\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02176747\nn02177972\nn02180875\nn02181235\nn02181477\nn02181724\nn02183096\nn02183857\nn02184473\nn02188699\nn02190166\nn02190790\nn02191773\nn02191979\nn02192252\nn02192513\nn02195526\nn02195819\nn02196119\nn02196344\nn02197185\nn02197689\nn02198859\nn02200198\nn02200509\nn02200850\nn02201000\nn02201626\nn02202006\nn02203152\nn02204907\nn02205219\nn02205673\nn02206856\nn02207179\nn02207345\nn02207805\nn02208280\nn02208498\nn02208848\nn02209111\nn02209354\nn02209624\nn02210427\nn02211444\nn02211627\nn02212062\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214341\nn02214773\nn02215621\nn02215770\nn02216211\nn02216365\nn02217563\nn02218371\nn02219486\nn02220518\nn02220804\nn02221083\nn02221414\nn02222035\nn02223266\nn02226429\nn02226821\nn02226970\nn02227247\nn02227966\nn02228341\nn02229156\nn02229544\nn02229765\nn02230187\nn02231052\nn02231487\nn02231803\nn02233338\nn02233943\nn02234355\nn02234848\nn02236044\nn02236241\nn02236355\nn02236896\nn02237581\nn02239774\nn02240068\nn02240517\nn02241426\nn02242137\nn02243562\nn02244797\nn02246628\nn02247216\nn02250822\nn02251775\nn02252226\nn02254697\nn02256656\nn02257284\nn02257985\nn02258198\nn02259212\nn02259708\nn02262449\nn02262803\nn02264232\nn02264363\nn02264885\nn02266050\nn02266864\nn02268148\nn02268443\nn02268853\nn02270623\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276258\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277742\nn02278024\nn02278210\nn02278839\nn02278980\nn02279257\nn02279637\nn02279972\nn02280649\nn02281015\nn02281136\nn02281406\nn02281787\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283951\nn02284611\nn02284884\nn02285801\nn02286089\nn02287004\nn02287799\nn02288268\nn02288789\nn02289610\nn02291748\nn02292692\nn02295064\nn02295390\nn02297442\nn02298218\nn02298541\nn02299157\nn02299505\nn02299846\nn02300797\nn02301935\nn02302244\nn02302459\nn02302620\nn02302969\nn02303284\nn02304036\nn02304432\nn02305085\nn02305929\nn02307325\nn02307681\nn02308139\nn02308471\nn02308735\nn02309242\nn02309337\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02312006\nn02312427\nn02312640\nn02313008\nn02315487\nn02316707\nn02317335\nn02317781\nn02318167\nn02319095\nn02319308\nn02319555\nn02321170\nn02321529\nn02322047\nn02323449\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325366\nn02325722\nn02326432\nn02326862\nn02327028\nn02327656\nn02327842\nn02328150\nn02328429\nn02329401\nn02330245\nn02331046\nn02332156\nn02332755\nn02333190\nn02333546\nn02333909\nn02334201\nn02336641\nn02337001\nn02338145\nn02339376\nn02341475\nn02341974\nn02342885\nn02343320\nn02343772\nn02346627\nn02348173\nn02350105\nn02352591\nn02353861\nn02355227\nn02355477\nn02356381\nn02356612\nn02356798\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358890\nn02359047\nn02359324\nn02359556\nn02359915\nn02360282\nn02361337\nn02361587\nn02361706\nn02363005\nn02363245\nn02363351\nn02364520\nn02364673\nn02364840\nn02365108\nn02365480\nn02366002\nn02366959\nn02367492\nn02369293\nn02370806\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378541\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02385214\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387722\nn02387887\nn02388143\nn02388276\nn02388735\nn02388832\nn02388917\nn02389026\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02390015\nn02390101\nn02390454\nn02390640\nn02391049\nn02391234\nn02391373\nn02391508\nn02391994\nn02392434\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395406\nn02395694\nn02396014\nn02396088\nn02396427\nn02397096\nn02397529\nn02397744\nn02398521\nn02399000\nn02402010\nn02402175\nn02402425\nn02403003\nn02403231\nn02403325\nn02403454\nn02403740\nn02403920\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405440\nn02405799\nn02405929\nn02406174\nn02406432\nn02406533\nn02406647\nn02406749\nn02406859\nn02407071\nn02407276\nn02407390\nn02407521\nn02407625\nn02407959\nn02408429\nn02408817\nn02409508\nn02410011\nn02410509\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412080\nn02412210\nn02412440\nn02412629\nn02413050\nn02413131\nn02413593\nn02414209\nn02414290\nn02414578\nn02414763\nn02415253\nn02415435\nn02415577\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417387\nn02417534\nn02417663\nn02417914\nn02418465\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422106\nn02422391\nn02422699\nn02423022\nn02423218\nn02423589\nn02424085\nn02424305\nn02424486\nn02424909\nn02425228\nn02425887\nn02426481\nn02426813\nn02427032\nn02427470\nn02427576\nn02427724\nn02428349\nn02428508\nn02429456\nn02430045\nn02430559\nn02430830\nn02431122\nn02431337\nn02431441\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433925\nn02434190\nn02434954\nn02437136\nn02437312\nn02437482\nn02437616\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441326\nn02441942\nn02442336\nn02442845\nn02443015\nn02443114\nn02443346\nn02443484\nn02444819\nn02445004\nn02445171\nn02445394\nn02445715\nn02446206\nn02447366\nn02447762\nn02448060\nn02449350\nn02450295\nn02453108\nn02454379\nn02454794\nn02456962\nn02457408\nn02457945\nn02458135\nn02460009\nn02460451\nn02461128\nn02461830\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02472293\nn02472987\nn02473307\nn02474777\nn02475078\nn02475669\nn02480153\nn02480495\nn02480855\nn02481103\nn02481235\nn02481366\nn02481500\nn02481823\nn02482286\nn02482474\nn02482650\nn02483362\nn02483708\nn02484322\nn02484473\nn02484975\nn02485536\nn02486261\nn02486410\nn02486657\nn02486908\nn02487347\nn02487547\nn02487675\nn02487847\nn02488291\nn02488415\nn02488702\nn02488894\nn02489166\nn02490219\nn02490811\nn02491107\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02496913\nn02497673\nn02499022\nn02499316\nn02499808\nn02500267\nn02501583\nn02503517\nn02504013\nn02504458\nn02508021\nn02508213\nn02508742\nn02509197\nn02509515\nn02509815\nn02510455\nn02512053\nn02512830\nn02512938\nn02514041\nn02516188\nn02517442\nn02518324\nn02519148\nn02519686\nn02519862\nn02520147\nn02522399\nn02523427\nn02524202\nn02525382\nn02526121\nn02527057\nn02527271\nn02527622\nn02530421\nn02530831\nn02530999\nn02532028\nn02532602\nn02533209\nn02533834\nn02534734\nn02535163\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02536864\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02540412\nn02541687\nn02542432\nn02543565\nn02548247\nn02549248\nn02549989\nn02555863\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02557909\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02562315\nn02562796\nn02563182\nn02563648\nn02563792\nn02564270\nn02564720\nn02565072\nn02565324\nn02565573\nn02568087\nn02568447\nn02568959\nn02569484\nn02570164\nn02570838\nn02572196\nn02572484\nn02573704\nn02574271\nn02576575\nn02576906\nn02577403\nn02578771\nn02578928\nn02579303\nn02579928\nn02580336\nn02580679\nn02580830\nn02581957\nn02583890\nn02584145\nn02584449\nn02585872\nn02586543\nn02587618\nn02588286\nn02589623\nn02590094\nn02590702\nn02592055\nn02593019\nn02595702\nn02596067\nn02596381\nn02597608\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02601344\nn02603317\nn02603540\nn02605316\nn02605703\nn02605936\nn02606052\nn02606384\nn02607072\nn02607201\nn02607470\nn02607862\nn02610066\nn02610664\nn02611561\nn02613181\nn02616851\nn02618827\nn02619165\nn02619550\nn02620167\nn02624167\nn02624551\nn02624807\nn02624987\nn02625258\nn02625612\nn02625851\nn02626265\nn02626762\nn02627292\nn02627532\nn02627835\nn02628062\nn02629230\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02639087\nn02639605\nn02640242\nn02640626\nn02640857\nn02641379\nn02643112\nn02643566\nn02643836\nn02644113\nn02649546\nn02650050\nn02652132\nn02653145\nn02653497\nn02654112\nn02654425\nn02654745\nn02655020\nn02655848\nn02656032\nn02656670\nn02657368\nn02657694\nn02658531\nn02660208\nn02660640\nn02663211\nn02666196\nn02666501\nn02666624\nn02666943\nn02667093\nn02667244\nn02667379\nn02667478\nn02667576\nn02669295\nn02669534\nn02669723\nn02670186\nn02670382\nn02670683\nn02670935\nn02672371\nn02672831\nn02675219\nn02676566\nn02676938\nn02677718\nn02678897\nn02679257\nn02680110\nn02680512\nn02680754\nn02681392\nn02682311\nn02682569\nn02682922\nn02683323\nn02683454\nn02683558\nn02683791\nn02685082\nn02685995\nn02686121\nn02686227\nn02686379\nn02686568\nn02687172\nn02687423\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02690373\nn02691156\nn02692086\nn02692232\nn02692877\nn02693246\nn02694045\nn02694426\nn02694662\nn02695627\nn02696165\nn02697221\nn02697675\nn02698634\nn02699494\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701002\nn02702989\nn02703275\nn02704645\nn02704792\nn02704949\nn02705201\nn02705429\nn02705944\nn02706806\nn02708093\nn02708433\nn02708555\nn02708711\nn02709101\nn02709367\nn02709637\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02713003\nn02713364\nn02714751\nn02715229\nn02715513\nn02715712\nn02720048\nn02723165\nn02725872\nn02726017\nn02726210\nn02726305\nn02726681\nn02727016\nn02727141\nn02727426\nn02728440\nn02729837\nn02729965\nn02730930\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02734835\nn02735361\nn02735538\nn02735688\nn02736798\nn02737660\nn02738031\nn02738535\nn02738741\nn02738859\nn02739427\nn02739550\nn02739668\nn02739889\nn02740300\nn02740533\nn02740764\nn02741475\nn02742322\nn02742468\nn02742753\nn02744323\nn02744844\nn02745611\nn02746365\nn02746595\nn02747177\nn02747672\nn02747802\nn02749479\nn02749953\nn02750070\nn02750169\nn02751215\nn02751295\nn02752496\nn02752615\nn02752810\nn02753044\nn02753394\nn02754103\nn02754656\nn02755140\nn02755529\nn02755823\nn02756098\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02758134\nn02758863\nn02758960\nn02759257\nn02759387\nn02759963\nn02760099\nn02760199\nn02760429\nn02760658\nn02760855\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762371\nn02762508\nn02763306\nn02763604\nn02763901\nn02764044\nn02764398\nn02764505\nn02764779\nn02764935\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769748\nn02769963\nn02770211\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771750\nn02772101\nn02772435\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775897\nn02776007\nn02776205\nn02776631\nn02776825\nn02776978\nn02777100\nn02777292\nn02777734\nn02778294\nn02778456\nn02778669\nn02779435\nn02780704\nn02780815\nn02781121\nn02781338\nn02782093\nn02782602\nn02782681\nn02782778\nn02783161\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02785648\nn02786058\nn02786198\nn02786331\nn02786736\nn02786837\nn02787435\nn02787622\nn02788021\nn02788148\nn02788572\nn02789487\nn02790669\nn02790823\nn02790996\nn02791124\nn02791270\nn02791665\nn02792409\nn02792552\nn02793089\nn02793199\nn02793495\nn02793842\nn02794008\nn02794156\nn02794664\nn02795169\nn02795528\nn02795670\nn02796207\nn02796318\nn02796995\nn02797295\nn02797535\nn02797692\nn02799071\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802426\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803934\nn02804123\nn02804252\nn02804414\nn02804515\nn02804610\nn02805983\nn02806088\nn02806379\nn02806530\nn02807133\nn02807523\nn02807616\nn02807731\nn02808185\nn02808304\nn02808440\nn02809105\nn02809241\nn02810270\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02814116\nn02814428\nn02814533\nn02814774\nn02814860\nn02815749\nn02815834\nn02815950\nn02816656\nn02816768\nn02817031\nn02817516\nn02818135\nn02818832\nn02820210\nn02820556\nn02820675\nn02821202\nn02821415\nn02821627\nn02821943\nn02822064\nn02822220\nn02822579\nn02823124\nn02823335\nn02823428\nn02823510\nn02823586\nn02823750\nn02823848\nn02823964\nn02824058\nn02824319\nn02824448\nn02825153\nn02825442\nn02825657\nn02825961\nn02826068\nn02826589\nn02826886\nn02827606\nn02828299\nn02828427\nn02828884\nn02829596\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02833793\nn02834397\nn02834506\nn02834778\nn02835271\nn02835412\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836392\nn02837789\nn02837887\nn02838345\nn02838728\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840245\nn02840619\nn02841187\nn02841315\nn02841506\nn02842573\nn02842809\nn02843029\nn02843158\nn02843276\nn02843553\nn02843684\nn02844307\nn02846141\nn02846511\nn02846733\nn02847631\nn02847852\nn02848216\nn02848523\nn02848921\nn02849154\nn02849885\nn02850732\nn02850950\nn02851099\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02854378\nn02854532\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855925\nn02856237\nn02857477\nn02857644\nn02858304\nn02859184\nn02859343\nn02859443\nn02859955\nn02860415\nn02860640\nn02860847\nn02861022\nn02861147\nn02861387\nn02861886\nn02862048\nn02862916\nn02863014\nn02863426\nn02863536\nn02863750\nn02864504\nn02864593\nn02865351\nn02865665\nn02865931\nn02866386\nn02866578\nn02867715\nn02867966\nn02868638\nn02868975\nn02869155\nn02869249\nn02869563\nn02869737\nn02869837\nn02870526\nn02870676\nn02870880\nn02871005\nn02871147\nn02871314\nn02871439\nn02871525\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873733\nn02873839\nn02874086\nn02874442\nn02874537\nn02874750\nn02876084\nn02876326\nn02876657\nn02877266\nn02877765\nn02877962\nn02878222\nn02878425\nn02878628\nn02879087\nn02879309\nn02879718\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881757\nn02881906\nn02882190\nn02882301\nn02882647\nn02882894\nn02883004\nn02883205\nn02883344\nn02884994\nn02885108\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02887079\nn02887209\nn02887489\nn02887970\nn02888270\nn02889425\nn02889646\nn02890188\nn02890351\nn02890513\nn02890662\nn02890940\nn02891188\nn02891788\nn02892201\nn02892304\nn02892499\nn02892767\nn02892948\nn02893608\nn02893692\nn02893941\nn02894158\nn02894337\nn02894605\nn02895154\nn02895328\nn02895438\nn02896442\nn02897097\nn02897820\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900705\nn02901114\nn02901259\nn02901377\nn02901793\nn02902079\nn02902687\nn02902916\nn02903126\nn02903204\nn02903852\nn02904233\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02906734\nn02907082\nn02907391\nn02907656\nn02907873\nn02908217\nn02908773\nn02909285\nn02909870\nn02910145\nn02910353\nn02910542\nn02910864\nn02911332\nn02911485\nn02912065\nn02912319\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916179\nn02916350\nn02916936\nn02917067\nn02917377\nn02917521\nn02917607\nn02917964\nn02918112\nn02918330\nn02918595\nn02918831\nn02918964\nn02919148\nn02919414\nn02919792\nn02919890\nn02920083\nn02920259\nn02920369\nn02920658\nn02921029\nn02921195\nn02921756\nn02921884\nn02922292\nn02922578\nn02922798\nn02923682\nn02924116\nn02925009\nn02925107\nn02925519\nn02925666\nn02926426\nn02926591\nn02927161\nn02927764\nn02927887\nn02928049\nn02928299\nn02928608\nn02929289\nn02929582\nn02930080\nn02930214\nn02930645\nn02930766\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02933990\nn02934168\nn02934451\nn02935017\nn02935387\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02937958\nn02938886\nn02939185\nn02939866\nn02940385\nn02940570\nn02942349\nn02942460\nn02942699\nn02943241\nn02943871\nn02943964\nn02944075\nn02944146\nn02944459\nn02944579\nn02946127\nn02946270\nn02946348\nn02946509\nn02946824\nn02946921\nn02947660\nn02947818\nn02948072\nn02948403\nn02948557\nn02949202\nn02949542\nn02950256\nn02950632\nn02950826\nn02950943\nn02951358\nn02951585\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02953197\nn02953455\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02955767\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957755\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02962061\nn02962200\nn02962414\nn02962843\nn02963159\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964843\nn02965216\nn02965300\nn02965783\nn02966193\nn02966545\nn02966687\nn02967294\nn02967626\nn02967782\nn02968074\nn02968333\nn02968473\nn02969010\nn02969323\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971356\nn02971473\nn02971579\nn02971691\nn02972397\nn02973017\nn02973236\nn02973805\nn02973904\nn02974003\nn02974348\nn02974697\nn02975212\nn02976123\nn02976249\nn02976350\nn02976455\nn02976939\nn02977058\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978367\nn02978478\nn02978753\nn02978881\nn02979074\nn02979186\nn02979290\nn02979399\nn02979836\nn02980036\nn02980441\nn02981024\nn02981321\nn02981792\nn02981911\nn02982232\nn02982416\nn02982515\nn02982599\nn02983189\nn02983357\nn02984061\nn02984203\nn02984469\nn02985963\nn02986160\nn02987379\nn02987492\nn02988066\nn02988156\nn02988304\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02991302\nn02991847\nn02992032\nn02992211\nn02992368\nn02992529\nn02992795\nn02993194\nn02993368\nn02994573\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998563\nn02998841\nn02999138\nn02999410\nn02999936\nn03000134\nn03000247\nn03000684\nn03001115\nn03001627\nn03002096\nn03002341\nn03002711\nn03002816\nn03002948\nn03003091\nn03004275\nn03004824\nn03005033\nn03005147\nn03005285\nn03006626\nn03007130\nn03007444\nn03007591\nn03008177\nn03008976\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012373\nn03012897\nn03013438\nn03013580\nn03013850\nn03014440\nn03014705\nn03015149\nn03015254\nn03015478\nn03015851\nn03016389\nn03016609\nn03016737\nn03016868\nn03016953\nn03017070\nn03017168\nn03018209\nn03018349\nn03018712\nn03019434\nn03019685\nn03019806\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03025165\nn03025250\nn03026506\nn03026907\nn03027001\nn03027108\nn03027250\nn03027625\nn03028079\nn03028596\nn03028785\nn03029197\nn03029445\nn03029925\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03032252\nn03032453\nn03032811\nn03033362\nn03033986\nn03034244\nn03034405\nn03034663\nn03035252\nn03035832\nn03036022\nn03036469\nn03037404\nn03037709\nn03038281\nn03038480\nn03038685\nn03038870\nn03039015\nn03039259\nn03039493\nn03039827\nn03039947\nn03040376\nn03041114\nn03041449\nn03041632\nn03041810\nn03042139\nn03042490\nn03042697\nn03043423\nn03043693\nn03043958\nn03044934\nn03045228\nn03045337\nn03045698\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047690\nn03047799\nn03047941\nn03048883\nn03049066\nn03049782\nn03049924\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03054901\nn03055418\nn03055857\nn03057021\nn03057541\nn03057636\nn03057920\nn03058107\nn03058603\nn03059685\nn03061211\nn03061345\nn03061505\nn03061674\nn03061893\nn03062015\nn03062122\nn03062245\nn03062336\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063599\nn03063689\nn03063968\nn03064250\nn03064350\nn03064758\nn03064935\nn03065243\nn03065424\nn03066359\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03071021\nn03071160\nn03072201\nn03072440\nn03072682\nn03073296\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075370\nn03075634\nn03075768\nn03075946\nn03077616\nn03077741\nn03078287\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03080497\nn03080633\nn03081986\nn03082280\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085013\nn03085219\nn03085602\nn03085915\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087816\nn03088389\nn03088580\nn03089624\nn03089753\nn03089879\nn03090000\nn03090172\nn03091044\nn03091374\nn03092166\nn03092314\nn03092656\nn03092883\nn03094159\nn03094503\nn03095699\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098688\nn03098959\nn03099147\nn03099274\nn03099454\nn03099945\nn03100240\nn03100346\nn03100490\nn03100897\nn03101156\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102654\nn03103396\nn03103563\nn03104512\nn03105088\nn03105306\nn03105467\nn03106898\nn03107046\nn03107488\nn03108455\nn03108853\nn03109150\nn03109253\nn03109693\nn03109881\nn03110669\nn03111041\nn03111177\nn03111296\nn03112719\nn03112869\nn03113152\nn03113657\nn03113835\nn03114236\nn03114379\nn03114504\nn03115180\nn03115400\nn03115762\nn03115897\nn03116530\nn03116767\nn03118969\nn03119203\nn03119396\nn03119510\nn03120491\nn03120778\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03123553\nn03123809\nn03123917\nn03124043\nn03124170\nn03124474\nn03124590\nn03125057\nn03125729\nn03125870\nn03126385\nn03126580\nn03126707\nn03127203\nn03127408\nn03127747\nn03127925\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129753\nn03129848\nn03130761\nn03131574\nn03131669\nn03131967\nn03132076\nn03132261\nn03132666\nn03132776\nn03133050\nn03133415\nn03133878\nn03134739\nn03134853\nn03135030\nn03135532\nn03136369\nn03137473\nn03138344\nn03138669\nn03139464\nn03140126\nn03140292\nn03140431\nn03140652\nn03141065\nn03141327\nn03141455\nn03141702\nn03141823\nn03142679\nn03145147\nn03145522\nn03145719\nn03146219\nn03146687\nn03146777\nn03146846\nn03147280\nn03147509\nn03148324\nn03148727\nn03149135\nn03149686\nn03150232\nn03150511\nn03151077\nn03152303\nn03154073\nn03154895\nn03155178\nn03156279\nn03156767\nn03157348\nn03158186\nn03158885\nn03159535\nn03159640\nn03160309\nn03160740\nn03161450\nn03163222\nn03163381\nn03164344\nn03164605\nn03164722\nn03165096\nn03165466\nn03165616\nn03166514\nn03167978\nn03168107\nn03168217\nn03169176\nn03170635\nn03170872\nn03171228\nn03171356\nn03171635\nn03172038\nn03173270\nn03173387\nn03173929\nn03174450\nn03174731\nn03175081\nn03175189\nn03175457\nn03176386\nn03176594\nn03176763\nn03177059\nn03177165\nn03178000\nn03178430\nn03178674\nn03179701\nn03179910\nn03180011\nn03180384\nn03180504\nn03180865\nn03180969\nn03181293\nn03183080\nn03186285\nn03186818\nn03187037\nn03187268\nn03187595\nn03188531\nn03188725\nn03189083\nn03191286\nn03192543\nn03193107\nn03193260\nn03193423\nn03193597\nn03195332\nn03195959\nn03196062\nn03196217\nn03196598\nn03196990\nn03197201\nn03197337\nn03198500\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201208\nn03201529\nn03201638\nn03201776\nn03202354\nn03202940\nn03204306\nn03204558\nn03205458\nn03205574\nn03205669\nn03206158\nn03206282\nn03206718\nn03206908\nn03207305\nn03207630\nn03207743\nn03207835\nn03207941\nn03208556\nn03208938\nn03209359\nn03209477\nn03209910\nn03210245\nn03210372\nn03210552\nn03211117\nn03211789\nn03212114\nn03212811\nn03213538\nn03213715\nn03213826\nn03214253\nn03214582\nn03215508\nn03216402\nn03216710\nn03216828\nn03218198\nn03219010\nn03219135\nn03219483\nn03219612\nn03219859\nn03219966\nn03220237\nn03220513\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03223162\nn03223299\nn03223553\nn03223686\nn03224603\nn03224753\nn03225108\nn03225777\nn03225988\nn03226254\nn03226375\nn03226538\nn03226880\nn03227184\nn03227317\nn03228254\nn03228365\nn03228692\nn03228967\nn03229244\nn03231160\nn03231368\nn03231819\nn03232309\nn03232543\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03236093\nn03236217\nn03236423\nn03236735\nn03237340\nn03237416\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03239054\nn03239259\nn03239726\nn03240140\nn03240683\nn03240892\nn03241093\nn03241335\nn03241496\nn03242506\nn03243218\nn03244047\nn03244231\nn03244775\nn03244919\nn03245724\nn03245889\nn03246454\nn03246933\nn03247083\nn03249342\nn03249569\nn03250089\nn03250279\nn03250405\nn03250847\nn03250952\nn03251533\nn03251766\nn03251932\nn03252637\nn03253279\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254862\nn03255030\nn03255899\nn03256032\nn03256166\nn03256788\nn03256928\nn03257210\nn03257586\nn03258330\nn03258577\nn03258905\nn03259009\nn03259280\nn03259401\nn03259505\nn03260849\nn03261019\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03264906\nn03266371\nn03266749\nn03267113\nn03267468\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269203\nn03269401\nn03270165\nn03270854\nn03271030\nn03271574\nn03272010\nn03272125\nn03272239\nn03272383\nn03272562\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03275681\nn03276696\nn03277459\nn03277771\nn03278248\nn03278914\nn03279508\nn03280644\nn03281145\nn03281673\nn03282295\nn03282401\nn03283221\nn03284308\nn03284743\nn03284886\nn03284981\nn03285578\nn03285912\nn03287351\nn03287733\nn03288003\nn03288500\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03290653\nn03291413\nn03291741\nn03291819\nn03291963\nn03292475\nn03292603\nn03293741\nn03293863\nn03294048\nn03294604\nn03294833\nn03295012\nn03295246\nn03296081\nn03296328\nn03296478\nn03297103\nn03297226\nn03297495\nn03297644\nn03297735\nn03298089\nn03298716\nn03298858\nn03300216\nn03300443\nn03301291\nn03301568\nn03301833\nn03301940\nn03302671\nn03302938\nn03303217\nn03303831\nn03306385\nn03307037\nn03307792\nn03308152\nn03308481\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314608\nn03314780\nn03314884\nn03315644\nn03316105\nn03316406\nn03317788\nn03317889\nn03318136\nn03318294\nn03318865\nn03318983\nn03319457\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320959\nn03321103\nn03321563\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03323703\nn03324928\nn03325088\nn03325403\nn03325584\nn03325691\nn03325941\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03329302\nn03329536\nn03329663\nn03331077\nn03331599\nn03332005\nn03332271\nn03332393\nn03332989\nn03333129\nn03333252\nn03333610\nn03333711\nn03334291\nn03334382\nn03334912\nn03335030\nn03336282\nn03336575\nn03337140\nn03337383\nn03338821\nn03339529\nn03339643\nn03340723\nn03341153\nn03341297\nn03341606\nn03342015\nn03342127\nn03342262\nn03342657\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344393\nn03344642\nn03345487\nn03345837\nn03346135\nn03346455\nn03347037\nn03347617\nn03348868\nn03349469\nn03349771\nn03349892\nn03350204\nn03350602\nn03351434\nn03351979\nn03352628\nn03353951\nn03354207\nn03354903\nn03355768\nn03355925\nn03356858\nn03356982\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03359137\nn03359285\nn03359436\nn03359566\nn03360300\nn03360431\nn03360622\nn03361297\nn03361380\nn03361550\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364599\nn03365231\nn03365374\nn03365592\nn03365991\nn03366823\nn03366974\nn03367059\nn03367410\nn03367545\nn03368352\nn03369276\nn03370387\nn03371875\nn03372029\nn03372549\nn03373237\nn03373611\nn03373943\nn03374372\nn03374473\nn03374649\nn03374838\nn03375171\nn03375329\nn03375575\nn03376159\nn03376279\nn03376595\nn03376938\nn03378005\nn03378174\nn03379051\nn03379204\nn03379343\nn03379828\nn03380724\nn03380867\nn03381126\nn03382292\nn03382413\nn03382856\nn03383099\nn03384352\nn03384891\nn03385557\nn03386011\nn03386544\nn03386726\nn03386870\nn03387653\nn03388043\nn03388183\nn03388323\nn03388549\nn03389611\nn03389761\nn03389889\nn03390075\nn03390786\nn03390983\nn03391301\nn03391770\nn03392741\nn03393017\nn03393761\nn03393912\nn03394272\nn03394480\nn03394649\nn03394916\nn03395256\nn03395514\nn03395859\nn03396074\nn03396580\nn03396654\nn03397087\nn03397266\nn03397532\nn03397947\nn03398153\nn03398228\nn03399677\nn03399761\nn03399971\nn03400231\nn03400972\nn03401129\nn03401279\nn03402188\nn03402369\nn03402941\nn03403643\nn03404149\nn03404251\nn03404360\nn03404449\nn03405111\nn03405265\nn03405595\nn03405725\nn03406966\nn03407369\nn03407865\nn03408054\nn03408444\nn03409297\nn03409393\nn03409591\nn03410423\nn03410571\nn03410740\nn03410938\nn03411079\nn03412058\nn03413684\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415749\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417042\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418915\nn03419014\nn03420345\nn03420801\nn03421117\nn03421324\nn03421485\nn03421669\nn03422072\nn03422771\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424325\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425413\nn03425595\nn03425769\nn03426134\nn03426285\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03431243\nn03431745\nn03432061\nn03432129\nn03433877\nn03434188\nn03434285\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436891\nn03437430\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438863\nn03439348\nn03439814\nn03440216\nn03440682\nn03441112\nn03441345\nn03441582\nn03442597\nn03442756\nn03443005\nn03443149\nn03443371\nn03443912\nn03444034\nn03445326\nn03445617\nn03445777\nn03445924\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447447\nn03447721\nn03448590\nn03448956\nn03449309\nn03449451\nn03450230\nn03450516\nn03450734\nn03450974\nn03451120\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03452741\nn03453231\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455488\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457686\nn03457902\nn03458271\nn03459328\nn03459775\nn03460040\nn03460147\nn03460297\nn03461288\nn03461385\nn03461988\nn03462110\nn03463381\nn03463666\nn03464053\nn03465151\nn03465426\nn03465500\nn03465718\nn03466493\nn03466600\nn03466839\nn03467068\nn03467517\nn03467796\nn03467984\nn03468696\nn03468821\nn03469175\nn03469493\nn03469903\nn03470629\nn03471190\nn03472232\nn03473227\nn03474779\nn03474896\nn03475581\nn03475823\nn03476083\nn03476313\nn03476684\nn03476991\nn03477512\nn03478589\nn03478756\nn03478907\nn03479121\nn03479397\nn03479502\nn03480579\nn03480719\nn03481172\nn03482252\nn03482405\nn03482523\nn03482877\nn03483230\nn03483316\nn03483823\nn03483971\nn03484083\nn03484487\nn03484576\nn03484809\nn03484931\nn03485198\nn03485309\nn03485407\nn03485794\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488188\nn03488438\nn03488887\nn03489162\nn03490006\nn03490119\nn03490884\nn03491032\nn03491988\nn03492250\nn03492542\nn03492922\nn03494278\nn03494537\nn03494706\nn03495039\nn03495258\nn03495570\nn03496296\nn03496612\nn03496892\nn03497352\nn03497657\nn03498441\nn03498662\nn03498781\nn03498962\nn03499354\nn03499468\nn03499907\nn03500209\nn03500389\nn03500699\nn03501152\nn03501614\nn03502200\nn03502331\nn03502509\nn03503233\nn03503477\nn03503997\nn03504205\nn03504723\nn03505133\nn03505383\nn03505504\nn03505667\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507963\nn03508101\nn03508881\nn03509394\nn03509608\nn03510244\nn03511175\nn03511333\nn03512147\nn03512911\nn03513137\nn03513376\nn03514451\nn03514693\nn03514894\nn03516367\nn03516844\nn03516996\nn03517647\nn03517760\nn03517899\nn03518135\nn03518305\nn03518445\nn03518943\nn03519081\nn03519387\nn03520493\nn03521076\nn03521544\nn03521675\nn03521899\nn03522003\nn03522100\nn03522634\nn03523134\nn03523987\nn03524150\nn03524574\nn03524745\nn03525074\nn03525454\nn03527149\nn03527444\nn03527565\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03529860\nn03530511\nn03530642\nn03530910\nn03531281\nn03532342\nn03532672\nn03532919\nn03533014\nn03534580\nn03534776\nn03535024\nn03535780\nn03536122\nn03536761\nn03537241\nn03537412\nn03538037\nn03538179\nn03538406\nn03538634\nn03539433\nn03539546\nn03539678\nn03540090\nn03540267\nn03540595\nn03540914\nn03541091\nn03541269\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543603\nn03543735\nn03543945\nn03544143\nn03544238\nn03544360\nn03545150\nn03545470\nn03545756\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03548086\nn03548402\nn03548626\nn03549199\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03551395\nn03551582\nn03552749\nn03553019\nn03553248\nn03554460\nn03555006\nn03555426\nn03555564\nn03555662\nn03556679\nn03556992\nn03557270\nn03557360\nn03557590\nn03557692\nn03558176\nn03558404\nn03558633\nn03558739\nn03559999\nn03560430\nn03561047\nn03563200\nn03563460\nn03565288\nn03565830\nn03566193\nn03566730\nn03567066\nn03568117\nn03569293\nn03571280\nn03571625\nn03571942\nn03572107\nn03572321\nn03574243\nn03574555\nn03574816\nn03577090\nn03577672\nn03578055\nn03578251\nn03578656\nn03579538\nn03580518\nn03580845\nn03581125\nn03582508\nn03582959\nn03584254\nn03584400\nn03584829\nn03585073\nn03585438\nn03585682\nn03586219\nn03586631\nn03587205\nn03588841\nn03588951\nn03589513\nn03589791\nn03590306\nn03590588\nn03590841\nn03590932\nn03592245\nn03592669\nn03592773\nn03592931\nn03593122\nn03593526\nn03594148\nn03594523\nn03594734\nn03594945\nn03595264\nn03595409\nn03595523\nn03595614\nn03595860\nn03596285\nn03596543\nn03597916\nn03598151\nn03598299\nn03598515\nn03598930\nn03599486\nn03600285\nn03600475\nn03600722\nn03600977\nn03601442\nn03601638\nn03601840\nn03602081\nn03602883\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604843\nn03605598\nn03605722\nn03605915\nn03606251\nn03607029\nn03607659\nn03607923\nn03609235\nn03609397\nn03610098\nn03610418\nn03610524\nn03610682\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614532\nn03614782\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617480\nn03618101\nn03618982\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620967\nn03621049\nn03621377\nn03622058\nn03622526\nn03622839\nn03622931\nn03623198\nn03623338\nn03623556\nn03624134\nn03624400\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626760\nn03627232\nn03627954\nn03628215\nn03628511\nn03629100\nn03629231\nn03629520\nn03630262\nn03630383\nn03631177\nn03631922\nn03632577\nn03632729\nn03632852\nn03633091\nn03633886\nn03634034\nn03635032\nn03635108\nn03635330\nn03635668\nn03636248\nn03636649\nn03637181\nn03637318\nn03637898\nn03638883\nn03639077\nn03639497\nn03640850\nn03640988\nn03641569\nn03642444\nn03642806\nn03643149\nn03643253\nn03643491\nn03643737\nn03644378\nn03644858\nn03645011\nn03645577\nn03646020\nn03646148\nn03646296\nn03646916\nn03647520\nn03648431\nn03649161\nn03649674\nn03649797\nn03649909\nn03650551\nn03651388\nn03651843\nn03652100\nn03652729\nn03652932\nn03653110\nn03653220\nn03653583\nn03653740\nn03653833\nn03653975\nn03654576\nn03655072\nn03655720\nn03656484\nn03656957\nn03657121\nn03657511\nn03658185\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660909\nn03661043\nn03661340\nn03662601\nn03662719\nn03662887\nn03663531\nn03664943\nn03665366\nn03665924\nn03666362\nn03666591\nn03666917\nn03667235\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669534\nn03669886\nn03670208\nn03671914\nn03672827\nn03673027\nn03673450\nn03674270\nn03674440\nn03674731\nn03675235\nn03676087\nn03676483\nn03676623\nn03676759\nn03677115\nn03678558\nn03678729\nn03679384\nn03679712\nn03680355\nn03680512\nn03680734\nn03680858\nn03680942\nn03682487\nn03682877\nn03683079\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684611\nn03684823\nn03685307\nn03685820\nn03686130\nn03686924\nn03687137\nn03687928\nn03688192\nn03688405\nn03688605\nn03688943\nn03689157\nn03690279\nn03690473\nn03690938\nn03691459\nn03691817\nn03692379\nn03692522\nn03693293\nn03693474\nn03693707\nn03693860\nn03694639\nn03695452\nn03695753\nn03695857\nn03696065\nn03696301\nn03696568\nn03697007\nn03697552\nn03697913\nn03698360\nn03698604\nn03698723\nn03698815\nn03699280\nn03699591\nn03699975\nn03700963\nn03701391\nn03701790\nn03703730\nn03703862\nn03703945\nn03704549\nn03706229\nn03706653\nn03708036\nn03708843\nn03709206\nn03709363\nn03709823\nn03710193\nn03710637\nn03710721\nn03711044\nn03711999\nn03712111\nn03712337\nn03713069\nn03713436\nn03714235\nn03715114\nn03715386\nn03715669\nn03715892\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03717622\nn03718212\nn03718335\nn03718458\nn03718581\nn03718789\nn03718935\nn03719053\nn03719343\nn03719743\nn03720163\nn03720891\nn03721047\nn03721252\nn03721384\nn03721590\nn03722007\nn03722288\nn03723267\nn03723781\nn03724066\nn03724417\nn03724538\nn03724623\nn03724756\nn03724870\nn03725035\nn03725600\nn03725717\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03728982\nn03729308\nn03729826\nn03730153\nn03730334\nn03730494\nn03730893\nn03731019\nn03731483\nn03731695\nn03732020\nn03732114\nn03732458\nn03733131\nn03733281\nn03733644\nn03733805\nn03733925\nn03735637\nn03735963\nn03736064\nn03736470\nn03736970\nn03738066\nn03738241\nn03738472\nn03739518\nn03739693\nn03742019\nn03742115\nn03743016\nn03743279\nn03743902\nn03744276\nn03744840\nn03745146\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749807\nn03751269\nn03751458\nn03751757\nn03752185\nn03753077\nn03757604\nn03758089\nn03759243\nn03759661\nn03759954\nn03760310\nn03760671\nn03760944\nn03761084\nn03762332\nn03762434\nn03762602\nn03763968\nn03764276\nn03764736\nn03764822\nn03764995\nn03765561\nn03765934\nn03766044\nn03766322\nn03766508\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768916\nn03769610\nn03769881\nn03770085\nn03770316\nn03770439\nn03770679\nn03770954\nn03772077\nn03772269\nn03772584\nn03773035\nn03773504\nn03774327\nn03774461\nn03775071\nn03775199\nn03775388\nn03775546\nn03775636\nn03775747\nn03775847\nn03776460\nn03776877\nn03777568\nn03777754\nn03778817\nn03779128\nn03781244\nn03781683\nn03781787\nn03782006\nn03782190\nn03782794\nn03783430\nn03784270\nn03784896\nn03785016\nn03785237\nn03785721\nn03786194\nn03786313\nn03786621\nn03786715\nn03786901\nn03787032\nn03787523\nn03788047\nn03788195\nn03788365\nn03788498\nn03788601\nn03788914\nn03789171\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791053\nn03791235\nn03792048\nn03792334\nn03792526\nn03792782\nn03792972\nn03793489\nn03793850\nn03794056\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796401\nn03796522\nn03796605\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03799876\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802393\nn03802643\nn03803284\nn03804744\nn03805180\nn03805280\nn03805725\nn03809312\nn03809603\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812924\nn03813078\nn03814639\nn03814817\nn03814906\nn03815149\nn03815482\nn03815615\nn03816005\nn03816136\nn03816530\nn03816849\nn03817191\nn03817647\nn03818343\nn03819336\nn03819448\nn03819595\nn03819994\nn03820318\nn03820728\nn03821518\nn03822171\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03824381\nn03824713\nn03825080\nn03825271\nn03825788\nn03826039\nn03826186\nn03827536\nn03828020\nn03829954\nn03831382\nn03832144\nn03832673\nn03834040\nn03835197\nn03836062\nn03836451\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03837869\nn03838298\nn03838899\nn03839424\nn03839671\nn03840681\nn03840823\nn03841143\nn03841290\nn03841666\nn03842012\nn03842156\nn03842377\nn03842986\nn03843316\nn03843438\nn03843555\nn03844045\nn03844233\nn03844673\nn03844815\nn03845190\nn03846100\nn03846234\nn03846431\nn03846677\nn03847471\nn03847823\nn03848168\nn03848348\nn03849679\nn03849814\nn03850053\nn03850245\nn03850492\nn03851341\nn03851787\nn03852280\nn03852688\nn03853924\nn03854065\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855604\nn03855756\nn03856012\nn03856465\nn03857291\nn03857687\nn03857828\nn03858085\nn03858183\nn03858418\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860404\nn03861271\nn03861430\nn03861842\nn03862676\nn03862862\nn03863108\nn03863262\nn03863923\nn03864139\nn03864356\nn03864692\nn03865371\nn03865557\nn03865949\nn03866082\nn03868242\nn03868406\nn03868643\nn03868863\nn03870105\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871628\nn03871724\nn03873416\nn03873699\nn03874138\nn03874293\nn03874487\nn03874599\nn03874823\nn03875218\nn03875806\nn03875955\nn03876231\nn03877351\nn03877472\nn03877674\nn03877845\nn03878066\nn03878211\nn03878828\nn03878963\nn03879705\nn03880323\nn03880531\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03884397\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03887185\nn03887330\nn03887697\nn03888257\nn03888605\nn03889503\nn03889726\nn03889871\nn03890093\nn03890233\nn03890514\nn03891051\nn03891251\nn03891332\nn03891538\nn03892178\nn03892425\nn03892557\nn03892728\nn03894051\nn03894379\nn03894677\nn03895866\nn03896103\nn03896233\nn03896419\nn03896526\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03899768\nn03899933\nn03900393\nn03900979\nn03901229\nn03901750\nn03901974\nn03902125\nn03902482\nn03902756\nn03903424\nn03903733\nn03903868\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905947\nn03906224\nn03906463\nn03906997\nn03908204\nn03908618\nn03908714\nn03909020\nn03909160\nn03909406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03913343\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915437\nn03915900\nn03916031\nn03916470\nn03916720\nn03917198\nn03917814\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03920288\nn03920641\nn03920737\nn03920867\nn03923379\nn03923918\nn03924069\nn03924679\nn03926148\nn03927091\nn03927299\nn03927539\nn03928116\nn03928814\nn03929660\nn03929855\nn03930313\nn03930630\nn03931765\nn03931885\nn03933933\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03935116\nn03935234\nn03935335\nn03936466\nn03937543\nn03937835\nn03937931\nn03938037\nn03938244\nn03938401\nn03938522\nn03938725\nn03939178\nn03939677\nn03939844\nn03940256\nn03941013\nn03941231\nn03941417\nn03941684\nn03942813\nn03942920\nn03943115\nn03943266\nn03943714\nn03943920\nn03944024\nn03944138\nn03944341\nn03946076\nn03946162\nn03947466\nn03947798\nn03947888\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03950228\nn03950537\nn03950899\nn03952576\nn03953901\nn03954393\nn03954731\nn03955296\nn03955489\nn03956157\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958227\nn03958752\nn03959014\nn03959701\nn03960374\nn03960490\nn03961711\nn03961939\nn03962852\nn03963198\nn03963294\nn03963645\nn03964495\nn03964611\nn03965456\nn03965907\nn03966206\nn03966976\nn03967270\nn03967396\nn03967562\nn03967942\nn03968293\nn03968581\nn03968728\nn03970156\nn03970546\nn03971218\nn03973285\nn03973402\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03976467\nn03976657\nn03977592\nn03977966\nn03978421\nn03978686\nn03978815\nn03978966\nn03980026\nn03980478\nn03980874\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982430\nn03982642\nn03982895\nn03983396\nn03983612\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986224\nn03986355\nn03986562\nn03986704\nn03986949\nn03987266\nn03987376\nn03987990\nn03988170\nn03989665\nn03990474\nn03991062\nn03991202\nn03991646\nn03991837\nn03992325\nn03992436\nn03992509\nn03992703\nn03993053\nn03993180\nn03993403\nn03993703\nn03994008\nn03994614\nn03995265\nn03995372\nn03995535\nn03995856\nn03996145\nn03996416\nn03996849\nn03998194\nn03998333\nn03999160\nn03999992\nn04000311\nn04000592\nn04000998\nn04001265\nn04001499\nn04001845\nn04002262\nn04003241\nn04003856\nn04004210\nn04004475\nn04004767\nn04004990\nn04005197\nn04005630\nn04008385\nn04008634\nn04009552\nn04009801\nn04011827\nn04012084\nn04012482\nn04013729\nn04015908\nn04016240\nn04016576\nn04016684\nn04016846\nn04018155\nn04018667\nn04019101\nn04019541\nn04019696\nn04020087\nn04020298\nn04020912\nn04021028\nn04021798\nn04022332\nn04023695\nn04023962\nn04024274\nn04024862\nn04024983\nn04025508\nn04026053\nn04026180\nn04026417\nn04026813\nn04026918\nn04027023\nn04027706\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029647\nn04029734\nn04030274\nn04030518\nn04032603\nn04033425\nn04033901\nn04033995\nn04034262\nn04035836\nn04035912\nn04036303\nn04037220\nn04037443\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039381\nn04039742\nn04039848\nn04040247\nn04040373\nn04040759\nn04041069\nn04041243\nn04041408\nn04041544\nn04041747\nn04042358\nn04043411\nn04043733\nn04044307\nn04044498\nn04044716\nn04045255\nn04045397\nn04045644\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047401\nn04048441\nn04049303\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050933\nn04051549\nn04051825\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04054361\nn04054670\nn04055180\nn04056180\nn04056413\nn04056932\nn04057047\nn04057215\nn04057981\nn04058096\nn04058239\nn04058594\nn04059157\nn04059516\nn04059947\nn04060647\nn04061681\nn04061793\nn04061969\nn04062428\nn04063154\nn04063373\nn04063868\nn04064401\nn04064747\nn04064862\nn04065272\nn04065464\nn04065789\nn04066270\nn04067472\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069276\nn04069434\nn04070003\nn04070207\nn04070415\nn04070727\nn04071263\nn04071393\nn04072193\nn04072551\nn04072960\nn04073948\nn04074185\nn04074963\nn04075291\nn04075468\nn04075715\nn04075916\nn04076284\nn04076713\nn04077430\nn04078574\nn04079244\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081281\nn04081699\nn04082562\nn04082710\nn04082886\nn04083309\nn04083800\nn04084889\nn04086273\nn04086446\nn04087432\nn04087709\nn04087826\nn04089376\nn04089666\nn04089836\nn04089976\nn04090263\nn04091097\nn04091693\nn04093625\nn04093775\nn04094720\nn04095109\nn04095210\nn04095342\nn04095577\nn04096066\nn04097373\nn04097760\nn04097866\nn04098513\nn04099003\nn04099175\nn04099429\nn04099969\nn04100519\nn04101701\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04102872\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04105068\nn04105704\nn04105893\nn04107743\nn04108268\nn04108822\nn04110178\nn04110955\nn04111190\nn04111414\nn04111531\nn04111668\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04114069\nn04114844\nn04115144\nn04115256\nn04115456\nn04115802\nn04115996\nn04116098\nn04116294\nn04116512\nn04117464\nn04118021\nn04118538\nn04118635\nn04118776\nn04119091\nn04119230\nn04119360\nn04119478\nn04119751\nn04120489\nn04120842\nn04121426\nn04121511\nn04121728\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04125021\nn04125257\nn04125853\nn04126066\nn04127249\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04130143\nn04130257\nn04130907\nn04131208\nn04131368\nn04131690\nn04131929\nn04132158\nn04132603\nn04132985\nn04133789\nn04134008\nn04134170\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04136045\nn04136161\nn04136333\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138261\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140631\nn04141076\nn04141198\nn04141327\nn04141712\nn04141838\nn04141975\nn04142434\nn04142731\nn04142999\nn04143140\nn04143897\nn04144241\nn04144539\nn04145863\nn04146050\nn04146343\nn04146504\nn04146614\nn04146862\nn04147183\nn04147793\nn04148054\nn04148579\nn04148703\nn04149083\nn04149374\nn04149813\nn04150153\nn04150980\nn04152387\nn04152593\nn04153025\nn04153751\nn04154152\nn04154340\nn04154565\nn04154938\nn04155068\nn04156040\nn04156140\nn04156946\nn04157099\nn04157320\nn04158807\nn04158956\nn04160372\nn04160586\nn04160847\nn04161358\nn04161981\nn04162433\nn04162706\nn04163530\nn04164002\nn04164406\nn04164757\nn04164868\nn04165409\nn04166281\nn04167346\nn04168199\nn04169437\nn04170037\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172230\nn04172342\nn04172776\nn04172904\nn04173046\nn04173511\nn04173907\nn04174101\nn04175039\nn04175147\nn04176068\nn04176190\nn04176295\nn04177041\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04179126\nn04179712\nn04179824\nn04179913\nn04180063\nn04180229\nn04180888\nn04181228\nn04181561\nn04182152\nn04182322\nn04183217\nn04183329\nn04184316\nn04184435\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186848\nn04187061\nn04187233\nn04187547\nn04187970\nn04188179\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190997\nn04191595\nn04191943\nn04192238\nn04192698\nn04192858\nn04193377\nn04194127\nn04194289\nn04196502\nn04197110\nn04197391\nn04197781\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200800\nn04200908\nn04201064\nn04201297\nn04201733\nn04202417\nn04204081\nn04204238\nn04204347\nn04205318\nn04205505\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208210\nn04208427\nn04208760\nn04208936\nn04209133\nn04209239\nn04209509\nn04209613\nn04210120\nn04210288\nn04210390\nn04210591\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04213353\nn04214046\nn04214282\nn04215153\nn04215402\nn04216634\nn04216860\nn04216963\nn04217546\nn04217882\nn04218564\nn04219185\nn04219424\nn04220250\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04223170\nn04223299\nn04224543\nn04224842\nn04225031\nn04225729\nn04225987\nn04226464\nn04226826\nn04227144\nn04227900\nn04228054\nn04228215\nn04228581\nn04228693\nn04229007\nn04229107\nn04229480\nn04229737\nn04229816\nn04229959\nn04230387\nn04230603\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232800\nn04233124\nn04233715\nn04234455\nn04234887\nn04235291\nn04235860\nn04236377\nn04236809\nn04236935\nn04237423\nn04238128\nn04238321\nn04238617\nn04238763\nn04239074\nn04239436\nn04239786\nn04240752\nn04241249\nn04241573\nn04242084\nn04242408\nn04242704\nn04243546\nn04243941\nn04244379\nn04244997\nn04245508\nn04246060\nn04246271\nn04246731\nn04246855\nn04247011\nn04247630\nn04247736\nn04247876\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250692\nn04250850\nn04251144\nn04251701\nn04251791\nn04252077\nn04252225\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253931\nn04254009\nn04254120\nn04254680\nn04254777\nn04255163\nn04255586\nn04255899\nn04256520\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258138\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259630\nn04260364\nn04261116\nn04261281\nn04261638\nn04262161\nn04263257\nn04263336\nn04263502\nn04264628\nn04264765\nn04264914\nn04265275\nn04265904\nn04266014\nn04266162\nn04266375\nn04266486\nn04266968\nn04267435\nn04269270\nn04269822\nn04269944\nn04270147\nn04270371\nn04270891\nn04271531\nn04272054\nn04272389\nn04272928\nn04273285\nn04273569\nn04273659\nn04273796\nn04273972\nn04274985\nn04275175\nn04275548\nn04275661\nn04277352\nn04277493\nn04277826\nn04278247\nn04278353\nn04278447\nn04279172\nn04279353\nn04279462\nn04281260\nn04281375\nn04282231\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285008\nn04285146\nn04285803\nn04285965\nn04286575\nn04287451\nn04287747\nn04287898\nn04288272\nn04288533\nn04289027\nn04289195\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04291992\nn04292080\nn04292414\nn04292572\nn04292921\nn04293119\nn04294426\nn04294614\nn04294879\nn04295081\nn04295571\nn04295881\nn04296562\nn04297098\nn04297750\nn04297847\nn04298053\nn04298661\nn04299215\nn04299370\nn04299963\nn04300643\nn04301000\nn04301760\nn04303357\nn04303497\nn04304215\nn04304375\nn04304680\nn04305210\nn04305323\nn04305572\nn04306080\nn04306592\nn04306847\nn04307767\nn04307986\nn04308084\nn04308273\nn04308397\nn04309049\nn04309348\nn04309548\nn04309833\nn04310018\nn04310157\nn04310721\nn04310904\nn04311004\nn04311174\nn04311595\nn04312154\nn04312432\nn04313220\nn04313503\nn04313628\nn04314522\nn04314914\nn04315342\nn04315713\nn04315948\nn04316498\nn04317063\nn04317175\nn04317325\nn04317420\nn04317833\nn04317976\nn04318787\nn04318892\nn04319937\nn04320973\nn04321453\nn04322026\nn04322801\nn04323819\nn04324297\nn04324387\nn04325041\nn04325704\nn04326547\nn04326676\nn04326799\nn04326896\nn04327204\nn04327682\nn04328186\nn04328329\nn04328946\nn04329834\nn04329958\nn04330267\nn04330340\nn04330746\nn04330998\nn04331277\nn04331639\nn04332074\nn04332243\nn04332580\nn04333129\nn04333869\nn04334105\nn04334365\nn04334599\nn04335209\nn04335435\nn04335693\nn04335886\nn04336792\nn04337287\nn04338517\nn04338963\nn04339879\nn04340521\nn04340750\nn04340935\nn04341133\nn04341686\nn04344003\nn04344734\nn04344873\nn04345028\nn04345201\nn04346003\nn04346157\nn04346328\nn04346428\nn04347119\nn04347519\nn04347754\nn04348359\nn04349306\nn04349401\nn04350458\nn04350581\nn04350769\nn04350905\nn04351699\nn04353573\nn04354026\nn04354182\nn04354487\nn04354589\nn04355115\nn04355267\nn04355338\nn04355511\nn04355933\nn04356056\nn04356595\nn04356925\nn04357121\nn04357314\nn04357531\nn04358117\nn04358491\nn04358707\nn04358874\nn04359335\nn04359500\nn04360798\nn04360914\nn04361095\nn04361260\nn04363082\nn04363777\nn04363991\nn04364160\nn04364545\nn04365328\nn04366033\nn04366116\nn04366367\nn04367011\nn04367371\nn04367480\nn04367746\nn04367950\nn04368496\nn04369025\nn04369282\nn04370048\nn04370288\nn04370456\nn04370774\nn04371050\nn04371430\nn04371563\nn04371774\nn04372370\nn04373089\nn04373428\nn04373704\nn04373795\nn04373894\nn04374315\nn04374735\nn04375241\nn04375405\nn04375615\nn04376400\nn04376876\nn04377057\nn04378956\nn04379243\nn04379964\nn04380255\nn04380346\nn04380533\nn04380916\nn04381073\nn04381587\nn04381724\nn04381860\nn04381994\nn04382438\nn04382695\nn04382880\nn04383015\nn04383130\nn04383839\nn04384593\nn04384910\nn04385536\nn04385799\nn04386051\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387706\nn04387932\nn04388743\nn04389033\nn04389430\nn04389521\nn04389718\nn04389854\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04392985\nn04393095\nn04393549\nn04393808\nn04393913\nn04394630\nn04395024\nn04395106\nn04395651\nn04396335\nn04396808\nn04396902\nn04397027\nn04397452\nn04397645\nn04397768\nn04398044\nn04398497\nn04398688\nn04398834\nn04398951\nn04399158\nn04399537\nn04399846\nn04400109\nn04400289\nn04400737\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404412\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406817\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409384\nn04409515\nn04409625\nn04409806\nn04410086\nn04411264\nn04412097\nn04412416\nn04413969\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415663\nn04416005\nn04417086\nn04417180\nn04417361\nn04417672\nn04417809\nn04418357\nn04419073\nn04419642\nn04419868\nn04421872\nn04422409\nn04422727\nn04422875\nn04423845\nn04424692\nn04425804\nn04426316\nn04426427\nn04427715\nn04428191\nn04428634\nn04429376\nn04430475\nn04430896\nn04431025\nn04431745\nn04432203\nn04432662\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435653\nn04436012\nn04436185\nn04436329\nn04437953\nn04438304\nn04438507\nn04438897\nn04439585\nn04439712\nn04440963\nn04441662\nn04441790\nn04442312\nn04442441\nn04442741\nn04443164\nn04443257\nn04443766\nn04444749\nn04445040\nn04445154\nn04445327\nn04445952\nn04446276\nn04446844\nn04447028\nn04447276\nn04447443\nn04447861\nn04448070\nn04448361\nn04449290\nn04449966\nn04450133\nn04450243\nn04450640\nn04450749\nn04450994\nn04451318\nn04451818\nn04452528\nn04452615\nn04452757\nn04453037\nn04453156\nn04453390\nn04453666\nn04454908\nn04455250\nn04455652\nn04456115\nn04457157\nn04457474\nn04457767\nn04457910\nn04458633\nn04458843\nn04459018\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461696\nn04461879\nn04462011\nn04462240\nn04463679\nn04464615\nn04464852\nn04465050\nn04465358\nn04465501\nn04465666\nn04466871\nn04467099\nn04467307\nn04467665\nn04468005\nn04469003\nn04469514\nn04469813\nn04471148\nn04471632\nn04472563\nn04473108\nn04474035\nn04474187\nn04474466\nn04475411\nn04475631\nn04476116\nn04476259\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04478512\nn04479046\nn04479823\nn04479939\nn04480033\nn04480853\nn04482177\nn04482297\nn04482393\nn04483073\nn04483307\nn04483925\nn04484432\nn04485082\nn04485423\nn04485884\nn04486054\nn04486213\nn04486934\nn04487081\nn04487394\nn04487724\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491388\nn04491638\nn04491769\nn04492060\nn04492375\nn04492749\nn04493381\nn04494204\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497442\nn04497570\nn04497801\nn04498389\nn04499062\nn04499446\nn04500060\nn04501370\nn04501550\nn04501837\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04503413\nn04503593\nn04504141\nn04505036\nn04505345\nn04505470\nn04506289\nn04506506\nn04506688\nn04507155\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509417\nn04510706\nn04511002\nn04513827\nn04513998\nn04514241\nn04515003\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517823\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04520170\nn04520382\nn04520784\nn04521863\nn04522168\nn04523525\nn04523831\nn04524142\nn04524313\nn04524941\nn04525038\nn04525191\nn04525305\nn04525417\nn04525584\nn04525821\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530566\nn04531098\nn04531873\nn04532106\nn04532398\nn04532670\nn04532831\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535252\nn04535370\nn04535524\nn04536153\nn04536335\nn04536595\nn04536866\nn04538552\nn04539053\nn04539203\nn04539794\nn04540053\nn04540255\nn04541320\nn04541987\nn04542095\nn04542715\nn04542858\nn04542943\nn04543158\nn04543636\nn04543772\nn04543996\nn04544325\nn04544450\nn04545305\nn04545748\nn04545858\nn04546194\nn04546340\nn04547592\nn04548280\nn04548362\nn04549028\nn04549122\nn04549629\nn04549919\nn04550184\nn04551055\nn04552348\nn04552696\nn04553561\nn04553703\nn04554211\nn04554406\nn04554684\nn04554871\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556948\nn04557648\nn04557751\nn04558478\nn04559166\nn04559451\nn04559730\nn04559910\nn04560113\nn04560292\nn04560804\nn04560882\nn04561287\nn04561422\nn04561734\nn04562262\nn04562496\nn04562935\nn04563204\nn04563413\nn04564278\nn04564581\nn04565375\nn04566257\nn04566561\nn04566756\nn04568069\nn04568557\nn04568841\nn04569063\nn04569822\nn04570214\nn04570815\nn04570958\nn04571292\nn04571566\nn04571686\nn04571958\nn04573281\nn04573379\nn04573513\nn04573937\nn04574067\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04577426\nn04577769\nn04578934\nn04579056\nn04579145\nn04579230\nn04579432\nn04579667\nn04579986\nn04580493\nn04581102\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583212\nn04583620\nn04584207\nn04584373\nn04585128\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589593\nn04589890\nn04590021\nn04590129\nn04590263\nn04590553\nn04590746\nn04590933\nn04591056\nn04591157\nn04591517\nn04591713\nn04591887\nn04592005\nn04592099\nn04592465\nn04592741\nn04593077\nn04593185\nn04593376\nn04593524\nn04593866\nn04594114\nn04594218\nn04594489\nn04594828\nn04595028\nn04595285\nn04595855\nn04596742\nn04596852\nn04597309\nn04597400\nn04597804\nn04597913\nn04598318\nn04598582\nn04598965\nn04599124\nn04599235\nn04600312\nn04600912\nn04602762\nn04602956\nn04603399\nn04603729\nn04603872\nn04604644\nn04605163\nn04605321\nn04605572\nn04605726\nn04606251\nn04606574\nn04607035\nn04607242\nn04607869\nn04608329\nn04608435\nn04608567\nn04608923\nn04609531\nn04609651\nn04610013\nn04610274\nn04610503\nn04610676\nn04611916\nn04612026\nn04612373\nn04612504\nn04613015\nn04613696\nn04613939\nn04614655\nn04615226\nn04615644\nn04950952\nn04951071\nn04951186\nn04953296\nn04955160\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970398\nn04970470\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973291\nn04973386\nn04973585\nn04973816\nn04974859\nn04976319\nn04976952\nn04977412\nn04979002\nn04981658\nn05218119\nn05238282\nn05239437\nn05242928\nn05244934\nn05245192\nn05258051\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05262698\nn05263183\nn05263448\nn05282652\nn05302499\nn05399034\nn05399243\nn05418717\nn05450617\nn05451384\nn05453657\nn05458576\nn05486510\nn05526957\nn05538625\nn05578095\nn05581932\nn05586759\nn05716342\nn06255081\nn06263609\nn06266633\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273414\nn06273555\nn06273743\nn06273986\nn06274760\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06359193\nn06359467\nn06415688\nn06417096\nn06470073\nn06592281\nn06595351\nn06596364\nn06596474\nn06596607\nn06596727\nn06785654\nn06793231\nn06794110\nn06874185\nn06883725\nn06892775\nn06998748\nn07005523\nn07248320\nn07273802\nn07461050\nn07556406\nn07556637\nn07556970\nn07557434\nn07560193\nn07560331\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562495\nn07563207\nn07564008\nn07564796\nn07564971\nn07565083\nn07565161\nn07565259\nn07566340\nn07567707\nn07567980\nn07568502\nn07568818\nn07569106\nn07569644\nn07570720\nn07572616\nn07572957\nn07573347\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576781\nn07577144\nn07577374\nn07577538\nn07578093\nn07579575\nn07579688\nn07579787\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581775\nn07581931\nn07582152\nn07582277\nn07582609\nn07582892\nn07583066\nn07584110\nn07584332\nn07584423\nn07584593\nn07585107\nn07585208\nn07585557\nn07585758\nn07585906\nn07586099\nn07586318\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587331\nn07587441\nn07587618\nn07587700\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588817\nn07588947\nn07589458\nn07589543\nn07590320\nn07590502\nn07590611\nn07590752\nn07591049\nn07591473\nn07591586\nn07591961\nn07592094\nn07592400\nn07592481\nn07592656\nn07592768\nn07593004\nn07593199\nn07593471\nn07594066\nn07595180\nn07595649\nn07595914\nn07596684\nn07596967\nn07597145\nn07597365\nn07598256\nn07598734\nn07599161\nn07599911\nn07599998\nn07600177\nn07600285\nn07600696\nn07601290\nn07601572\nn07601686\nn07601809\nn07604956\nn07605040\nn07605380\nn07605474\nn07605597\nn07605804\nn07605944\nn07606538\nn07606669\nn07606764\nn07607138\nn07607605\nn07607967\nn07608098\nn07608339\nn07608429\nn07608866\nn07609215\nn07609407\nn07609632\nn07609840\nn07610620\nn07611046\nn07611148\nn07611267\nn07611358\nn07611839\nn07611991\nn07612137\nn07612367\nn07612632\nn07612996\nn07613266\nn07613480\nn07613815\nn07614198\nn07614500\nn07614730\nn07614825\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615774\nn07616046\nn07616386\nn07616487\nn07616590\nn07616748\nn07617051\nn07617611\nn07617708\nn07617932\nn07618029\nn07618119\nn07618432\nn07619004\nn07619208\nn07619409\nn07620689\nn07621618\nn07623136\nn07624466\nn07624666\nn07624924\nn07625061\nn07627931\nn07628068\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642933\nn07643026\nn07643200\nn07643306\nn07643474\nn07643891\nn07643981\nn07648913\nn07648997\nn07650903\nn07651025\nn07654148\nn07654298\nn07655263\nn07665438\nn07666176\nn07678729\nn07679034\nn07679356\nn07680313\nn07680517\nn07680761\nn07680932\nn07681450\nn07681691\nn07681926\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683360\nn07683490\nn07683617\nn07683786\nn07684084\nn07684164\nn07684289\nn07684517\nn07684600\nn07684938\nn07685031\nn07685218\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688130\nn07688624\nn07688898\nn07689003\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691863\nn07691954\nn07692614\nn07693048\nn07693223\nn07693590\nn07693725\nn07693972\nn07694403\nn07694516\nn07694659\nn07694839\nn07695652\nn07695742\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697313\nn07697537\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07704054\nn07704205\nn07705931\nn07707451\nn07708124\nn07708398\nn07708685\nn07709046\nn07709172\nn07709333\nn07709881\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711569\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714571\nn07714802\nn07714895\nn07714990\nn07715103\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716358\nn07716504\nn07716906\nn07717070\nn07717410\nn07717556\nn07718472\nn07718747\nn07719213\nn07719616\nn07719839\nn07720277\nn07720442\nn07720615\nn07720875\nn07721018\nn07721195\nn07721325\nn07721456\nn07721678\nn07721942\nn07722052\nn07722217\nn07722485\nn07722763\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723968\nn07724269\nn07724492\nn07724654\nn07724943\nn07725255\nn07725376\nn07725531\nn07725789\nn07725888\nn07726095\nn07726525\nn07726672\nn07726796\nn07727048\nn07727458\nn07727578\nn07727868\nn07728053\nn07728181\nn07728391\nn07728585\nn07728708\nn07729384\nn07729485\nn07729828\nn07729926\nn07730033\nn07730207\nn07730320\nn07730406\nn07730708\nn07730855\nn07731006\nn07731284\nn07731587\nn07731767\nn07731952\nn07732168\nn07732636\nn07732747\nn07732904\nn07733394\nn07733567\nn07733712\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734744\nn07734879\nn07735404\nn07735510\nn07735687\nn07735803\nn07736087\nn07736256\nn07736371\nn07736692\nn07736813\nn07737745\nn07739125\nn07739344\nn07739506\nn07740033\nn07740220\nn07740342\nn07740461\nn07740597\nn07740954\nn07741138\nn07741461\nn07742012\nn07742313\nn07742704\nn07743224\nn07743544\nn07743902\nn07744057\nn07744246\nn07744430\nn07744682\nn07744811\nn07745046\nn07745466\nn07745940\nn07746186\nn07746334\nn07746551\nn07747055\nn07747607\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749192\nn07749312\nn07749446\nn07749582\nn07749731\nn07749870\nn07749969\nn07750146\nn07750449\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07751858\nn07752109\nn07752377\nn07752514\nn07752664\nn07752966\nn07753113\nn07753275\nn07753592\nn07753743\nn07753980\nn07754451\nn07754684\nn07754894\nn07755089\nn07755411\nn07755707\nn07755929\nn07756325\nn07756641\nn07756951\nn07757132\nn07757312\nn07757511\nn07757990\nn07758680\nn07759194\nn07759816\nn07760153\nn07760859\nn07761141\nn07761309\nn07761611\nn07762114\nn07762244\nn07762740\nn07762913\nn07763107\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765862\nn07765999\nn07766173\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768230\nn07768423\nn07768694\nn07768858\nn07769584\nn07769731\nn07770034\nn07770763\nn07771212\nn07771731\nn07772147\nn07772274\nn07772788\nn07772935\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07800740\nn07801091\nn07801342\nn07801508\nn07801779\nn07801892\nn07802026\nn07802417\nn07802863\nn07802963\nn07803093\nn07803545\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805594\nn07805731\nn07805966\nn07806120\nn07806221\nn07806633\nn07806774\nn07807002\nn07807171\nn07807317\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808022\nn07808587\nn07808904\nn07809096\nn07810907\nn07812046\nn07812184\nn07814203\nn07814390\nn07814487\nn07814634\nn07814790\nn07815424\nn07815588\nn07815839\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816839\nn07817024\nn07817160\nn07817315\nn07817871\nn07818133\nn07818277\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819480\nn07819769\nn07819896\nn07820145\nn07820297\nn07820497\nn07820683\nn07820960\nn07821260\nn07821610\nn07821758\nn07821919\nn07822197\nn07822323\nn07822518\nn07822845\nn07823105\nn07823280\nn07823460\nn07823698\nn07823951\nn07824191\nn07824702\nn07825194\nn07825717\nn07825972\nn07826091\nn07826340\nn07826453\nn07826930\nn07827130\nn07827284\nn07827410\nn07827750\nn07827896\nn07828642\nn07829248\nn07829331\nn07829412\nn07830593\nn07831146\nn07831267\nn07832416\nn07832902\nn07834065\nn07834507\nn07834618\nn07834872\nn07835331\nn07835457\nn07835921\nn07836838\nn07837002\nn07837362\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07840027\nn07840520\nn07840804\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07843117\nn07843464\nn07843636\nn07843775\nn07844042\nn07844604\nn07844867\nn07845087\nn07845702\nn07845863\nn07846143\nn07847198\nn07847453\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07849336\nn07849619\nn07849733\nn07849912\nn07850083\nn07850329\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07852045\nn07852229\nn07852302\nn07852614\nn07852833\nn07852919\nn07853560\nn07854184\nn07854982\nn07855510\nn07855907\nn07857170\nn07857731\nn07858114\nn07858978\nn07859284\nn07859583\nn07859796\nn07860103\nn07860331\nn07860447\nn07860805\nn07860988\nn07861158\nn07861557\nn07861813\nn07861983\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07863374\nn07863547\nn07863802\nn07864065\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07866015\nn07866151\nn07866277\nn07866409\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07868200\nn07868340\nn07868508\nn07868830\nn07868955\nn07869291\nn07869391\nn07869522\nn07869611\nn07869775\nn07870069\nn07870167\nn07870313\nn07870894\nn07871234\nn07871436\nn07871720\nn07871810\nn07872593\nn07873057\nn07873348\nn07873464\nn07873807\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874780\nn07875152\nn07875436\nn07875560\nn07875693\nn07876189\nn07876651\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879659\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07880968\nn07881205\nn07881404\nn07881800\nn07882420\nn07882497\nn07883031\nn07883251\nn07884567\nn07885705\nn07886057\nn07886176\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888229\nn07888465\nn07888816\nn07889274\nn07889510\nn07889814\nn07890068\nn07890226\nn07890352\nn07890540\nn07890750\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892512\nn07892813\nn07893253\nn07893528\nn07893642\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896661\nn07896765\nn07896893\nn07896994\nn07897116\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904293\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07909129\nn07909593\nn07910048\nn07910152\nn07910379\nn07910538\nn07910656\nn07911249\nn07911371\nn07911677\nn07912211\nn07913393\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914777\nn07914995\nn07915094\nn07915491\nn07915618\nn07915918\nn07916041\nn07916183\nn07916319\nn07916437\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07918028\nn07918193\nn07918309\nn07918879\nn07919310\nn07919441\nn07919572\nn07920052\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921239\nn07921455\nn07921615\nn07921834\nn07922041\nn07922147\nn07922512\nn07922764\nn07923748\nn07924033\nn07924276\nn07924443\nn07924560\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925500\nn07925608\nn07925966\nn07926250\nn07926346\nn07926785\nn07926920\nn07927197\nn07927512\nn07927836\nn07927931\nn07928163\nn07928367\nn07928488\nn07928696\nn07928790\nn07928887\nn07929172\nn07929351\nn07929519\nn07930062\nn07930315\nn07930433\nn07930554\nn07930864\nn07931001\nn07931452\nn07931612\nn07931870\nn07932039\nn07932841\nn07933154\nn07933274\nn07933799\nn07934282\nn07935043\nn07935379\nn07935504\nn07935737\nn07935878\nn07936263\nn07936548\nn07936745\nn07937461\nn07938007\nn07938149\nn07938313\nn07938594\nn07942152\nn07951464\nn07954211\nn07977870\nn08182379\nn08242223\nn08249459\nn08256735\nn08376250\nn08492461\nn08494231\nn08495908\nn08505018\nn08517676\nn08518171\nn08521623\nn08524735\nn08539072\nn08547468\nn08547544\nn08551296\nn08555710\nn08560295\nn08571898\nn08573842\nn08578517\nn08579352\nn08580944\nn08583292\nn08583455\nn08584914\nn08596076\nn08598301\nn08598568\nn08611339\nn08614632\nn08616050\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08645104\nn08645212\nn08649711\nn08658309\nn08659446\nn08659861\nn08663703\nn08673039\nn08677424\nn08896327\nn09189157\nn09191635\nn09193705\nn09194227\nn09199101\nn09205509\nn09206896\nn09206985\nn09208496\nn09210862\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09224725\nn09228055\nn09229709\nn09230041\nn09230202\nn09231117\nn09233446\nn09238926\nn09239302\nn09242389\nn09245515\nn09246464\nn09247410\nn09248399\nn09249034\nn09251407\nn09256479\nn09257843\nn09259025\nn09259219\nn09260907\nn09263912\nn09265620\nn09267854\nn09269341\nn09269472\nn09270735\nn09274152\nn09279986\nn09282208\nn09283193\nn09283405\nn09283767\nn09283866\nn09287968\nn09288635\nn09289331\nn09290444\nn09294877\nn09295946\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305898\nn09308572\nn09308743\nn09309168\nn09309292\nn09326662\nn09331251\nn09332890\nn09335809\nn09336555\nn09337253\nn09344324\nn09348460\nn09349648\nn09359803\nn09361517\nn09362945\nn09366317\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09392402\nn09393605\nn09396465\nn09396608\nn09398076\nn09398677\nn09399592\nn09400987\nn09403211\nn09403427\nn09403734\nn09405078\nn09406793\nn09409512\nn09409752\nn09410224\nn09411189\nn09415584\nn09415671\nn09416076\nn09416890\nn09421799\nn09421951\nn09428293\nn09428628\nn09432283\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09457979\nn09460046\nn09461069\nn09466678\nn09468604\nn09472413\nn09472597\nn09475044\nn09475179\nn09475925\nn09481120\nn09505153\nn09606527\nn09607630\nn09607903\nn09608709\nn09610405\nn09616922\nn09618760\nn09618880\nn09618957\nn09619168\nn09620078\nn09620794\nn09621232\nn09622049\nn09622302\nn09624168\nn09624559\nn09626238\nn09627906\nn09629752\nn09632518\nn09635534\nn09636339\nn09637339\nn09638454\nn09638875\nn09639919\nn09640715\nn09641002\nn09643799\nn09644152\nn09648743\nn09650729\nn09651123\nn09652149\nn09654518\nn09656077\nn09659039\nn09659188\nn09661873\nn09666883\nn09670521\nn09675922\nn09676021\nn09676247\nn09676884\nn09679170\nn09681234\nn09683757\nn09683924\nn09684901\nn09686401\nn09688804\nn09689435\nn09689958\nn09690621\nn09691729\nn09691858\nn09692915\nn09693982\nn09694664\nn09694771\nn09695514\nn09695620\nn09695979\nn09696456\nn09696585\nn09696763\nn09697401\nn09698644\nn09700964\nn09701148\nn09701833\nn09703485\nn09703708\nn09705124\nn09705784\nn09706255\nn09707289\nn09708750\nn09708889\nn09710041\nn09711435\nn09712324\nn09712448\nn09712696\nn09713108\nn09714694\nn09715427\nn09717233\nn09718217\nn09718811\nn09718936\nn09719309\nn09719794\nn09720033\nn09720256\nn09720595\nn09720842\nn09722658\nn09723067\nn09724533\nn09724656\nn09724785\nn09725000\nn09725653\nn09725772\nn09726621\nn09727440\nn09727826\nn09728137\nn09728285\nn09730077\nn09730204\nn09730824\nn09731343\nn09731436\nn09732170\nn09733793\nn09734185\nn09734450\nn09734535\nn09734639\nn09736798\nn09736945\nn09738121\nn09740724\nn09741816\nn09742101\nn09742315\nn09743487\nn09743792\nn09744161\nn09744834\nn09747191\nn09747495\nn09749386\nn09750282\nn09750641\nn09750770\nn09750891\nn09751496\nn09751895\nn09752023\nn09752519\nn09753792\nn09754217\nn09755241\nn09756049\nn09757449\nn09758885\nn09759501\nn09760609\nn09761068\nn09763784\nn09764598\nn09764900\nn09765118\nn09767197\nn09770179\nn09770359\nn09772930\nn09774783\nn09776346\nn09779790\nn09782167\nn09782397\nn09785659\nn09785891\nn09787534\nn09787765\nn09789566\nn09791014\nn09791419\nn09791816\nn09792555\nn09792969\nn09793141\nn09796809\nn09797873\nn09800964\nn09801533\nn09805151\nn09805324\nn09809538\nn09809749\nn09809925\nn09811852\nn09813219\nn09814660\nn09816771\nn09818022\nn09820263\nn09822830\nn09823502\nn09823832\nn09824135\nn09824609\nn09827246\nn09827363\nn09828216\nn09830194\nn09830400\nn09830629\nn09832456\nn09833441\nn09833536\nn09834378\nn09834699\nn09835230\nn09835348\nn09835506\nn09836160\nn09836343\nn09836519\nn09836786\nn09838621\nn09839702\nn09840217\nn09840520\nn09841188\nn09841696\nn09842047\nn09842395\nn09842528\nn09843443\nn09843824\nn09844457\nn09845401\nn09846469\nn09846755\nn09846894\nn09847543\nn09850760\nn09851165\nn09851575\nn09854218\nn09854421\nn09855433\nn09856671\nn09858165\nn09859152\nn09861599\nn09861863\nn09861946\nn09862621\nn09863031\nn09866817\nn09871229\nn09871681\nn09871867\nn09872066\nn09873348\nn09873473\nn09873899\nn09874428\nn09874725\nn09874862\nn09877288\nn09877750\nn09877951\nn09881265\nn09881895\nn09886403\nn09889065\nn09889170\nn09889941\nn09890749\nn09893191\nn09893344\nn09893502\nn09894143\nn09894445\nn09894654\nn09895222\nn09895561\nn09896170\nn09896401\nn09896685\nn09899671\nn09899782\nn09899929\nn09901337\nn09901921\nn09902731\nn09902954\nn09903153\nn09903501\nn09904208\nn09904837\nn09905185\nn09906449\nn09911226\nn09913455\nn09913593\nn09915434\nn09915651\nn09916348\nn09917214\nn09917345\nn09917593\nn09918248\nn09918554\nn09919451\nn09920283\nn09923186\nn09923418\nn09923561\nn09923673\nn09924106\nn09924195\nn09924996\nn09927451\nn09928136\nn09929298\nn09929577\nn09930257\nn09930876\nn09931165\nn09931640\nn09932098\nn09932336\nn09932508\nn09933098\nn09934337\nn09934774\nn09936825\nn09938449\nn09941089\nn09941787\nn09941964\nn09942970\nn09943239\nn09943811\nn09944022\nn09944430\nn09945745\nn09946814\nn09951274\nn09951616\nn09953350\nn09954639\nn09959142\nn09964202\nn09967967\nn09970822\nn09971273\nn09972010\nn09972458\nn09974648\nn09975425\nn09976283\nn09976429\nn09980985\nn09981278\nn09981540\nn09981939\nn09988063\nn09988493\nn09988703\nn09989502\nn09990415\nn09990690\nn09990777\nn09991867\nn09993252\nn09994673\nn10001217\nn10001481\nn10002760\nn10004718\nn10005934\nn10007684\nn10009276\nn10013811\nn10015485\nn10017272\nn10019072\nn10019406\nn10020670\nn10020890\nn10024362\nn10025635\nn10026976\nn10027246\nn10033412\nn10033663\nn10034201\nn10034614\nn10036692\nn10036929\nn10037385\nn10037922\nn10038409\nn10039271\nn10039946\nn10040945\nn10042845\nn10043491\nn10043643\nn10048612\nn10049363\nn10053439\nn10053808\nn10054657\nn10055410\nn10058962\nn10060352\nn10063635\nn10069296\nn10069981\nn10070108\nn10070711\nn10075693\nn10076224\nn10076604\nn10076957\nn10077593\nn10078131\nn10078719\nn10078806\nn10079399\nn10080869\nn10081204\nn10082043\nn10082687\nn10082997\nn10084295\nn10085869\nn10086383\nn10087434\nn10091450\nn10091564\nn10091651\nn10092488\nn10092643\nn10092794\nn10092978\nn10093475\nn10093818\nn10095769\nn10095869\nn10098245\nn10098517\nn10098624\nn10098710\nn10098862\nn10102800\nn10104064\nn10105733\nn10107303\nn10108018\nn10112129\nn10115430\nn10116702\nn10117739\nn10117851\nn10120330\nn10120671\nn10123122\nn10123844\nn10127689\nn10129825\nn10131151\nn10131815\nn10132035\nn10134178\nn10134982\nn10135129\nn10137825\nn10140597\nn10140929\nn10141364\nn10141732\nn10142391\nn10142747\nn10143172\nn10144338\nn10145239\nn10145340\nn10145480\nn10145590\nn10145774\nn10145902\nn10146002\nn10146104\nn10146416\nn10146816\nn10147121\nn10147262\nn10147935\nn10148035\nn10150071\nn10150940\nn10151760\nn10152763\nn10153414\nn10153594\nn10155849\nn10157128\nn10159045\nn10159533\nn10160280\nn10164233\nn10164492\nn10165448\nn10167152\nn10167838\nn10168837\nn10169147\nn10173410\nn10173771\nn10174330\nn10174445\nn10175248\nn10178216\nn10182190\nn10183931\nn10185483\nn10185793\nn10186068\nn10186216\nn10187491\nn10187990\nn10188957\nn10189278\nn10191001\nn10192839\nn10194231\nn10195593\nn10198437\nn10200781\nn10202624\nn10203949\nn10205231\nn10205457\nn10207169\nn10208189\nn10208950\nn10209082\nn10209731\nn10210911\nn10212231\nn10212501\nn10215623\nn10216106\nn10221312\nn10222170\nn10223177\nn10225219\nn10225931\nn10226413\nn10227985\nn10229883\nn10233248\nn10235024\nn10235385\nn10236304\nn10237069\nn10237196\nn10237464\nn10237676\nn10241300\nn10242328\nn10243137\nn10243273\nn10243664\nn10247358\nn10247880\nn10249270\nn10249459\nn10252222\nn10253122\nn10253296\nn10253479\nn10253703\nn10258786\nn10259348\nn10259780\nn10259997\nn10260706\nn10260800\nn10261624\nn10262445\nn10262561\nn10262655\nn10263411\nn10263790\nn10267311\nn10267865\nn10274815\nn10275395\nn10276477\nn10277027\nn10279018\nn10280674\nn10282482\nn10282672\nn10283170\nn10288964\nn10289039\nn10289462\nn10290919\nn10291822\nn10293332\nn10296176\nn10296444\nn10297234\nn10297531\nn10297841\nn10298647\nn10298912\nn10299250\nn10300154\nn10300303\nn10300500\nn10303814\nn10304086\nn10304914\nn10305802\nn10308168\nn10308732\nn10313000\nn10313239\nn10313724\nn10314054\nn10314517\nn10314836\nn10315456\nn10315561\nn10316360\nn10317007\nn10317500\nn10318293\nn10318607\nn10320863\nn10321340\nn10323634\nn10324560\nn10325774\nn10327987\nn10328123\nn10328328\nn10331167\nn10332385\nn10332861\nn10333439\nn10333601\nn10333838\nn10334009\nn10339717\nn10340312\nn10341343\nn10341573\nn10342992\nn10343355\nn10345015\nn10346015\nn10347446\nn10348526\nn10353016\nn10355142\nn10355449\nn10355688\nn10356877\nn10357613\nn10359546\nn10360747\nn10362319\nn10362557\nn10364198\nn10366276\nn10366966\nn10368291\nn10368528\nn10368624\nn10369317\nn10370955\nn10373390\nn10375052\nn10375314\nn10375402\nn10376523\nn10377021\nn10377185\nn10377291\nn10378026\nn10380672\nn10382710\nn10382825\nn10384392\nn10384496\nn10385566\nn10386984\nn10387196\nn10387324\nn10393909\nn10395073\nn10395828\nn10396106\nn10400108\nn10400437\nn10400618\nn10401331\nn10401639\nn10403876\nn10405694\nn10406266\nn10406391\nn10406765\nn10407310\nn10407954\nn10410246\nn10411551\nn10415037\nn10418735\nn10419472\nn10419785\nn10420507\nn10421016\nn10421470\nn10421956\nn10422405\nn10427764\nn10431625\nn10432189\nn10432441\nn10435169\nn10435988\nn10438842\nn10439373\nn10439851\nn10441037\nn10441962\nn10449664\nn10450161\nn10450303\nn10451450\nn10453184\nn10461060\nn10464052\nn10465451\nn10465831\nn10467179\nn10467395\nn10469874\nn10470779\nn10472129\nn10473917\nn10474645\nn10476467\nn10477713\nn10481268\nn10482220\nn10483138\nn10483799\nn10485883\nn10486166\nn10487182\nn10488656\nn10493685\nn10495756\nn10498816\nn10498986\nn10499232\nn10499355\nn10500217\nn10500419\nn10500603\nn10502329\nn10504206\nn10505613\nn10506915\nn10508141\nn10508710\nn10509063\nn10510245\nn10512372\nn10513823\nn10514429\nn10521100\nn10521662\nn10522035\nn10522759\nn10523341\nn10524076\nn10525436\nn10525617\nn10528023\nn10529231\nn10530150\nn10530383\nn10530959\nn10536416\nn10540114\nn10542608\nn10542761\nn10542888\nn10548537\nn10548681\nn10550369\nn10553235\nn10559288\nn10559508\nn10559996\nn10560106\nn10562135\nn10562283\nn10563314\nn10563403\nn10565667\nn10566072\nn10568358\nn10568608\nn10569179\nn10572706\nn10572889\nn10574538\nn10574840\nn10575463\nn10577284\nn10578021\nn10578471\nn10580030\nn10581890\nn10582746\nn10583387\nn10583790\nn10585077\nn10588074\nn10588357\nn10588965\nn10590146\nn10592811\nn10593521\nn10595164\nn10595647\nn10598181\nn10599806\nn10602470\nn10602985\nn10603851\nn10604380\nn10604979\nn10607291\nn10607478\nn10610465\nn10610850\nn10611267\nn10611613\nn10613996\nn10618342\nn10620586\nn10620758\nn10622053\nn10624074\nn10624310\nn10624437\nn10624540\nn10627252\nn10628644\nn10629939\nn10630188\nn10631309\nn10633450\nn10634849\nn10635788\nn10638922\nn10639359\nn10639637\nn10642596\nn10644598\nn10645017\nn10646140\nn10649197\nn10652605\nn10655594\nn10657835\nn10661563\nn10665587\nn10665698\nn10667477\nn10667863\nn10669991\nn10671613\nn10671736\nn10672371\nn10672662\nn10674713\nn10675010\nn10678937\nn10679174\nn10680609\nn10680796\nn10682953\nn10685398\nn10686073\nn10686885\nn10688356\nn10689306\nn10690648\nn10692482\nn10693824\nn10694258\nn10696508\nn10698368\nn10699981\nn10701180\nn10701644\nn10701962\nn10702167\nn10707134\nn10707233\nn10709529\nn10711766\nn10718131\nn10719132\nn10721321\nn10726031\nn10727171\nn10727458\nn10728624\nn10730728\nn10732010\nn10734394\nn10734891\nn10737103\nn10738111\nn10739391\nn10740868\nn10741367\nn10744164\nn10745006\nn10746931\nn10747119\nn10748620\nn10750031\nn10750640\nn10751152\nn10753442\nn10754189\nn10755080\nn10755648\nn10756148\nn10757050\nn10757492\nn10761190\nn10763075\nn10763383\nn10763620\nn10765679\nn10772092\nn10773665\nn10780284\nn10780632\nn10782471\nn10782791\nn10782940\nn10787470\nn10791115\nn10791221\nn10792335\nn10792856\nn10793570\nn10802507\nn10804287\nn10806113\nn11448153\nn11487732\nn11508382\nn11524451\nn11532682\nn11533212\nn11536673\nn11537327\nn11542137\nn11542640\nn11544015\nn11545524\nn11545714\nn11547855\nn11552133\nn11552806\nn11552976\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602873\nn11603246\nn11603835\nn11608250\nn11609475\nn11609862\nn11610047\nn11610215\nn11610437\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612349\nn11612575\nn11613219\nn11613459\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615387\nn11615607\nn11615967\nn11616486\nn11616662\nn11617090\nn11617272\nn11617631\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11620389\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625632\nn11625804\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627908\nn11628087\nn11628456\nn11628793\nn11630017\nn11631854\nn11632167\nn11632619\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11645590\nn11645914\nn11646167\nn11646344\nn11646694\nn11647306\nn11647703\nn11650558\nn11652376\nn11653904\nn11654293\nn11655974\nn11656123\nn11658331\nn11658544\nn11660300\nn11661372\nn11661909\nn11662371\nn11664418\nn11665372\nn11666854\nn11669786\nn11669921\nn11672269\nn11672400\nn11675025\nn11676500\nn11678010\nn11680596\nn11682659\nn11686912\nn11689483\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694664\nn11695599\nn11695974\nn11698042\nn11699442\nn11700058\nn11701066\nn11703669\nn11704093\nn11704620\nn11705171\nn11705387\nn11705776\nn11706761\nn11707229\nn11707827\nn11709205\nn11709674\nn11710136\nn11710393\nn11710827\nn11711537\nn11711764\nn11712282\nn11714382\nn11715430\nn11715678\nn11717577\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11722466\nn11722982\nn11723227\nn11723770\nn11724109\nn11725015\nn11725311\nn11725480\nn11725821\nn11725973\nn11726269\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728945\nn11730602\nn11731659\nn11732567\nn11733054\nn11733312\nn11733548\nn11735053\nn11736694\nn11736851\nn11737534\nn11748811\nn11752937\nn11753143\nn11753355\nn11753700\nn11754893\nn11756092\nn11756669\nn11756870\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759853\nn11760785\nn11761202\nn11761650\nn11762433\nn11769176\nn11769621\nn11769803\nn11770256\nn11772408\nn11772879\nn11773987\nn11774513\nn11777080\nn11778257\nn11779300\nn11780148\nn11781176\nn11782036\nn11782761\nn11783920\nn11784126\nn11784497\nn11785668\nn11786131\nn11786539\nn11788727\nn11789066\nn11789589\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793779\nn11794024\nn11794519\nn11795049\nn11797321\nn11800236\nn11801891\nn11802586\nn11802800\nn11805544\nn11805956\nn11806219\nn11806679\nn11807108\nn11807525\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11809754\nn11810358\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817914\nn11818069\nn11819509\nn11819912\nn11820965\nn11821184\nn11823436\nn11824146\nn11825351\nn11826198\nn11828577\nn11830906\nn11832214\nn11832480\nn11834654\nn11836722\nn11837970\nn11838916\nn11839568\nn11839823\nn11840067\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846765\nn11847169\nn11848479\nn11849467\nn11849871\nn11849983\nn11850521\nn11851258\nn11851578\nn11851839\nn11852028\nn11853356\nn11853813\nn11854479\nn11855274\nn11855553\nn11855842\nn11857875\nn11858077\nn11859275\nn11859472\nn11859737\nn11860555\nn11861641\nn11861853\nn11862835\nn11865874\nn11866248\nn11869689\nn11870418\nn11870747\nn11872146\nn11874081\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877646\nn11878101\nn11879054\nn11879722\nn11879895\nn11881189\nn11882074\nn11882426\nn11883328\nn11885856\nn11887119\nn11888800\nn11889619\nn11890150\nn11891175\nn11892029\nn11892637\nn11892817\nn11893640\nn11894327\nn11894558\nn11894770\nn11895092\nn11896722\nn11897116\nn11898775\nn11900569\nn11901294\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903671\nn11904109\nn11905392\nn11905749\nn11906917\nn11907100\nn11907689\nn11908549\nn11908846\nn11910271\nn11910460\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11918286\nn11918473\nn11919447\nn11919975\nn11920133\nn11921395\nn11923174\nn11923397\nn11923637\nn11924445\nn11924849\nn11925303\nn11925898\nn11926365\nn11926833\nn11927215\nn11928352\nn11928858\nn11929743\nn11931540\nn11931918\nn11933546\nn11933728\nn11934616\nn11934807\nn11935330\nn11935469\nn11939180\nn11939491\nn11939699\nn11940006\nn11940599\nn11941924\nn11943407\nn11943660\nn11943992\nn11944196\nn11944954\nn11945367\nn11945514\nn11945783\nn11946727\nn11946918\nn11947629\nn11947802\nn11948264\nn11948864\nn11949015\nn11949402\nn11950345\nn11950686\nn11950877\nn11951511\nn11952541\nn11953038\nn11953610\nn11953884\nn11954161\nn11954345\nn11954642\nn11955153\nn11955896\nn11956348\nn11956850\nn11957678\nn11958080\nn11959632\nn11959862\nn11960245\nn11961100\nn11961446\nn11961871\nn11962272\nn11962667\nn11963932\nn11965218\nn11965627\nn11966083\nn11966215\nn11966617\nn11966896\nn11968704\nn11968931\nn11969166\nn11969607\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11973341\nn11977303\nn11978233\nn11978551\nn11978713\nn11978961\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11982115\nn11984144\nn11984542\nn11986511\nn11987126\nn11988596\nn11989087\nn11989393\nn11989869\nn11990167\nn11990313\nn11991263\nn11992806\nn11995092\nn11998888\nn12001707\nn12002428\nn12003167\nn12003696\nn12004547\nn12005656\nn12006766\nn12006930\nn12007196\nn12007406\nn12008252\nn12008487\nn12008749\nn12009420\nn12011620\nn12012111\nn12014085\nn12015221\nn12015525\nn12015959\nn12016567\nn12018760\nn12019035\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12023108\nn12023407\nn12023726\nn12024445\nn12024690\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12029635\nn12030908\nn12031139\nn12031927\nn12033709\nn12034141\nn12034384\nn12035631\nn12036939\nn12037499\nn12037691\nn12038038\nn12038406\nn12038585\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044467\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12052447\nn12052787\nn12053405\nn12053690\nn12055516\nn12056217\nn12056601\nn12056758\nn12057211\nn12057447\nn12057660\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12061380\nn12061614\nn12062468\nn12062626\nn12062781\nn12063639\nn12064389\nn12064591\nn12065316\nn12065777\nn12066018\nn12066261\nn12066630\nn12067193\nn12068432\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071744\nn12072722\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077944\nn12078172\nn12079120\nn12079963\nn12080395\nn12080820\nn12081215\nn12083113\nn12083591\nn12083847\nn12084158\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12087961\nn12088223\nn12090890\nn12091213\nn12091377\nn12091550\nn12091953\nn12092262\nn12092417\nn12093329\nn12093600\nn12094612\nn12095020\nn12095647\nn12097396\nn12098403\nn12098524\nn12099342\nn12101870\nn12102133\nn12104238\nn12104501\nn12104734\nn12105125\nn12107710\nn12107970\nn12108871\nn12109365\nn12110085\nn12110778\nn12112008\nn12112609\nn12112918\nn12113195\nn12115180\nn12116429\nn12119238\nn12121610\nn12122725\nn12123741\nn12124627\nn12124818\nn12126084\nn12127460\nn12127768\nn12128071\nn12129134\nn12133462\nn12133682\nn12134025\nn12135049\nn12136392\nn12137120\nn12137569\nn12139575\nn12140903\nn12141167\nn12141385\nn12142085\nn12144313\nn12144580\nn12145477\nn12146311\nn12146654\nn12148757\nn12150722\nn12151615\nn12152532\nn12152722\nn12154773\nn12155009\nn12157056\nn12158031\nn12158443\nn12159055\nn12159388\nn12160303\nn12160490\nn12160857\nn12161056\nn12161969\nn12162181\nn12162425\nn12164363\nn12164656\nn12164881\nn12165170\nn12165758\nn12166128\nn12166424\nn12166793\nn12167075\nn12167436\nn12167602\nn12168565\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12174926\nn12178896\nn12179122\nn12180168\nn12180885\nn12184912\nn12185859\nn12187247\nn12187891\nn12189429\nn12189987\nn12190410\nn12190869\nn12194147\nn12195533\nn12196336\nn12196527\nn12196694\nn12198286\nn12199790\nn12200143\nn12201331\nn12201580\nn12202936\nn12203529\nn12203896\nn12204032\nn12204175\nn12205694\nn12214789\nn12215022\nn12215579\nn12217453\nn12223569\nn12223764\nn12224978\nn12225563\nn12227658\nn12228229\nn12228387\nn12230794\nn12237486\nn12237641\nn12238913\nn12240477\nn12242409\nn12243109\nn12244153\nn12244650\nn12244819\nn12245319\nn12246232\nn12249542\nn12252168\nn12256920\nn12257570\nn12258885\nn12260799\nn12261571\nn12261808\nn12262018\nn12262185\nn12263038\nn12263204\nn12263738\nn12263987\nn12264512\nn12265600\nn12266217\nn12266796\nn12267411\nn12267677\nn12268246\nn12269241\nn12269406\nn12270027\nn12270741\nn12270946\nn12271933\nn12272239\nn12272883\nn12273114\nn12273344\nn12273768\nn12273939\nn12274358\nn12274863\nn12275131\nn12275675\nn12275888\nn12276110\nn12276477\nn12276628\nn12276872\nn12277150\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12278865\nn12279458\nn12279772\nn12280060\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283147\nn12283542\nn12284262\nn12284821\nn12285369\nn12285900\nn12286826\nn12286988\nn12287836\nn12288005\nn12288823\nn12289433\nn12290748\nn12291143\nn12291959\nn12293723\nn12294124\nn12294331\nn12294723\nn12294871\nn12295033\nn12295429\nn12295796\nn12296432\nn12300840\nn12301180\nn12301445\nn12302071\nn12302248\nn12302565\nn12303083\nn12303462\nn12304115\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305819\nn12305986\nn12306089\nn12306717\nn12307076\nn12307240\nn12307756\nn12309277\nn12311579\nn12312728\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12318965\nn12319204\nn12319414\nn12320010\nn12320806\nn12321077\nn12322099\nn12322501\nn12322699\nn12325234\nn12328398\nn12328567\nn12329260\nn12329473\nn12330469\nn12330587\nn12330891\nn12331655\nn12332030\nn12332555\nn12333053\nn12333530\nn12333771\nn12334293\nn12334891\nn12336092\nn12336224\nn12336333\nn12336727\nn12336973\nn12337391\nn12337617\nn12338258\nn12338454\nn12338655\nn12338796\nn12339831\nn12340383\nn12340755\nn12342299\nn12342498\nn12342852\nn12343480\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12346813\nn12347158\nn12350758\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353754\nn12356023\nn12356960\nn12357485\nn12360108\nn12360684\nn12360958\nn12361135\nn12361946\nn12362274\nn12362668\nn12367611\nn12368028\nn12368257\nn12368451\nn12369309\nn12371439\nn12373100\nn12374418\nn12374862\nn12377198\nn12383894\nn12384037\nn12384227\nn12384375\nn12384839\nn12385429\nn12385566\nn12387633\nn12387839\nn12388143\nn12388858\nn12388989\nn12389130\nn12389501\nn12390099\nn12390314\nn12392549\nn12393269\nn12397431\nn12399132\nn12399384\nn12400489\nn12400720\nn12401684\nn12402051\nn12402348\nn12402596\nn12402840\nn12403994\nn12405714\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407715\nn12407890\nn12408077\nn12408717\nn12409231\nn12409470\nn12409840\nn12412355\nn12412606\nn12413165\nn12413301\nn12413419\nn12413642\nn12413880\nn12414035\nn12414159\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12416703\nn12418221\nn12420722\nn12421137\nn12421683\nn12421917\nn12422129\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12428076\nn12428412\nn12428747\nn12429352\nn12432356\nn12433081\nn12433178\nn12433769\nn12435152\nn12435649\nn12435777\nn12437513\nn12437769\nn12437930\nn12441183\nn12441390\nn12441958\nn12443323\nn12446519\nn12448700\nn12449296\nn12449526\nn12450344\nn12450840\nn12451070\nn12451240\nn12451399\nn12451915\nn12452836\nn12453186\nn12454159\nn12454436\nn12454556\nn12454705\nn12454949\nn12455950\nn12457091\nn12458550\nn12459629\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462805\nn12463134\nn12465557\nn12466727\nn12469517\nn12472024\nn12473608\nn12473840\nn12474167\nn12475035\nn12475242\nn12476510\nn12477163\nn12477401\nn12477583\nn12477747\nn12478768\nn12479537\nn12480456\nn12480895\nn12481458\nn12482437\nn12482668\nn12482893\nn12483427\nn12483625\nn12483841\nn12484784\nn12485653\nn12485981\nn12486574\nn12489815\nn12491017\nn12491826\nn12492106\nn12493208\nn12494794\nn12495146\nn12495895\nn12496427\nn12496949\nn12498055\nn12499979\nn12501202\nn12504570\nn12504783\nn12506341\nn12506991\nn12508309\nn12509476\nn12509665\nn12513172\nn12513613\nn12513933\nn12514138\nn12515711\nn12515925\nn12516828\nn12517445\nn12517642\nn12519089\nn12519563\nn12521394\nn12523475\nn12527738\nn12528549\nn12528974\nn12529220\nn12530629\nn12530818\nn12532564\nn12537253\nn12539306\nn12540250\nn12544539\nn12545635\nn12546183\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12549192\nn12552309\nn12554911\nn12556656\nn12557064\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12560282\nn12560621\nn12560775\nn12561169\nn12562785\nn12564083\nn12566954\nn12568186\nn12570394\nn12570703\nn12570972\nn12571781\nn12573474\nn12574320\nn12574866\nn12575322\nn12575812\nn12576323\nn12577895\nn12578626\nn12578916\nn12579038\nn12580654\nn12580896\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12584191\nn12584715\nn12585629\nn12587132\nn12587803\nn12588320\nn12588780\nn12590232\nn12590499\nn12591017\nn12591351\nn12593994\nn12595699\nn12595964\nn12596148\nn12596345\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599435\nn12602262\nn12602980\nn12603449\nn12604228\nn12606438\nn12606545\nn12607456\nn12610328\nn12614477\nn12615232\nn12620196\nn12620546\nn12620969\nn12621410\nn12622297\nn12622875\nn12623077\nn12624381\nn12624568\nn12625383\nn12627119\nn12628986\nn12629305\nn12629666\nn12630763\nn12631331\nn12631932\nn12632335\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635532\nn12635744\nn12635955\nn12636224\nn12637123\nn12638218\nn12638753\nn12638964\nn12639584\nn12640839\nn12641007\nn12641413\nn12642090\nn12642200\nn12643313\nn12643473\nn12644902\nn12645174\nn12646072\nn12646397\nn12646605\nn12646740\nn12647560\nn12647893\nn12648045\nn12648888\nn12649065\nn12649317\nn12649539\nn12650379\nn12650556\nn12651229\nn12651611\nn12651821\nn12653218\nn12655869\nn12656369\nn12656685\nn12657082\nn12658118\nn12658308\nn12658481\nn12659064\nn12659356\nn12659539\nn12662772\nn12663023\nn12663359\nn12665048\nn12665271\nn12665857\nn12666965\nn12670758\nn12671651\nn12674895\nn12675299\nn12675876\nn12676534\nn12676703\nn12679593\nn12680402\nn12680864\nn12681893\nn12682411\nn12682668\nn12683096\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686676\nn12687044\nn12687462\nn12687698\nn12687957\nn12688716\nn12691428\nn12691661\nn12694486\nn12695975\nn12696492\nn12698598\nn12700088\nn12703190\nn12703383\nn12703557\nn12703856\nn12704343\nn12706410\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12711596\nn12711817\nn12711984\nn12713063\nn12713866\nn12714755\nn12717072\nn12717224\nn12719684\nn12719944\nn12720200\nn12723610\nn12724942\nn12725521\nn12725738\nn12726159\nn12726670\nn12727101\nn12727518\nn12729315\nn12729521\nn12729729\nn12731029\nn12731401\nn12731835\nn12732009\nn12732491\nn12732756\nn12732966\nn12733218\nn12733428\nn12733647\nn12733870\nn12734070\nn12737383\nn12737898\nn12739332\nn12741222\nn12741792\nn12743009\nn12743352\nn12744387\nn12745386\nn12746884\nn12749049\nn12749456\nn12749679\nn12749852\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755727\nn12756457\nn12757303\nn12757458\nn12757816\nn12759273\nn12761284\nn12762049\nn12762896\nn12764202\nn12765115\nn12766595\nn12766869\nn12767648\nn12768682\nn12771192\nn12771390\nn12771597\nn12772753\nn12772908\nn12773651\nn12774299\nn12774641\nn12775919\nn12777680\nn12778398\nn12778605\nn12779603\nn12779851\nn12781940\nn12782530\nn12782915\nn12784889\nn12785724\nn12785889\nn12788854\nn12789054\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12794135\nn12794367\nn12794985\nn12795352\nn12795555\nn12796022\nn12797860\nn12799776\nn12801520\nn12801781\nn12803754\nn12805146\nn12805561\nn12806015\nn12806732\nn12807251\nn12807409\nn12807773\nn12808007\nn12810595\nn12811027\nn12812478\nn12813189\nn12814643\nn12815198\nn12816508\nn12817464\nn12817694\nn12818346\nn12818966\nn12819728\nn12820853\nn12821505\nn12821895\nn12822115\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12825497\nn12827270\nn12827537\nn12828220\nn12828379\nn12828791\nn12830222\nn12830568\nn12831932\nn12832315\nn12832538\nn12833149\nn12833985\nn12834798\nn12835331\nn12836212\nn12836337\nn12836508\nn12836862\nn12837803\nn12840362\nn12840749\nn12841007\nn12841193\nn12841354\nn12843557\nn12843970\nn12844939\nn12845413\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851469\nn12853482\nn12854048\nn12854600\nn12855494\nn12856091\nn12856287\nn12856479\nn12856680\nn12857779\nn12858150\nn12858397\nn12858618\nn12858871\nn12859986\nn12860365\nn12861345\nn12861541\nn12861892\nn12862512\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866459\nn12866635\nn12867826\nn12868019\nn12869061\nn12869478\nn12870535\nn12870682\nn12870891\nn12872458\nn12875269\nn12877838\nn12879527\nn12879963\nn12880244\nn12880462\nn12882779\nn12882945\nn12884100\nn12884260\nn12887293\nn12889219\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12893463\nn12893993\nn12895811\nn12898774\nn12899537\nn12899752\nn12901724\nn12902662\nn12904314\nn12905412\nn12906214\nn12906498\nn12908093\nn12908645\nn12909421\nn12909917\nn12911079\nn12911440\nn12911673\nn12912670\nn12913791\nn12914923\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12918609\nn12919403\nn12919646\nn12919847\nn12920204\nn12920955\nn12921868\nn12922763\nn12924623\nn12925179\nn12926480\nn12926689\nn12927013\nn12927494\nn12928071\nn12929403\nn12931542\nn12932173\nn12932365\nn12932966\nn12934036\nn12934174\nn12934479\nn12934985\nn12935609\nn12937130\nn12938193\nn12939282\nn12939874\nn12940226\nn12940609\nn12942395\nn12942572\nn12946849\nn12947313\nn12947544\nn12947895\nn12948053\nn12948251\nn12948495\nn12950126\nn12950314\nn12951146\nn12951835\nn12953206\nn12953484\nn12954799\nn12956367\nn12957924\nn12961879\nn12963628\nn12964920\nn12965626\nn12966945\nn12969131\nn12969425\nn12973443\nn12974987\nn12975804\nn12979829\nn12980840\nn12981443\nn12982468\nn12983048\nn12985420\nn12985773\nn12985857\nn12986227\nn12987056\nn12988158\nn12989938\nn12991184\nn12991837\nn12992177\nn12992868\nn12995601\nn12997654\nn12997919\nn12998815\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13008315\nn13009085\nn13009429\nn13011595\nn13012253\nn13012973\nn13013534\nn13013764\nn13014409\nn13014741\nn13017102\nn13017240\nn13019835\nn13020191\nn13020964\nn13021689\nn13022210\nn13022709\nn13023134\nn13024012\nn13025647\nn13028611\nn13029326\nn13029760\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033577\nn13034062\nn13035241\nn13035707\nn13035925\nn13037406\nn13038068\nn13038744\nn13039349\nn13040303\nn13040629\nn13041312\nn13043926\nn13044375\nn13044778\nn13046669\nn13049953\nn13050397\nn13052670\nn13052931\nn13053608\nn13054073\nn13054560\nn13055423\nn13055577\nn13055949\nn13060190\nn13061348\nn13062421\nn13065089\nn13066448\nn13068255\nn13072528\nn13074619\nn13077033\nn13077295\nn13079073\nn13083023\nn13084184\nn13084834\nn13085747\nn13090871\nn13091620\nn13094273\nn13099999\nn13100677\nn13102775\nn13103877\nn13104059\nn13107694\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108841\nn13111881\nn13121349\nn13122364\nn13123431\nn13125117\nn13126856\nn13127843\nn13128976\nn13130726\nn13131028\nn13131618\nn13132338\nn13132656\nn13133613\nn13133932\nn13134947\nn13135832\nn13136316\nn13136556\nn13137409\nn13138308\nn13138842\nn13139055\nn13141415\nn13141564\nn13142504\nn13145040\nn13145250\nn13146583\nn13147270\nn13147386\nn13148208\nn13150894\nn13154388\nn13154494\nn13155095\nn13155305\nn13155611\nn13157684\nn13158512\nn13160604\nn13163991\nn13172923\nn13173488\nn13173882\nn13177048\nn13177884\nn13180534\nn13180875\nn13181055\nn13181811\nn13183056\nn13183489\nn13185269\nn13187367\nn13188096\nn13190747\nn13192625\nn13193642\nn13193856\nn13194036\nn13194572\nn13195341\nn13196003\nn13197274\nn13197507\nn13198914\nn13199717\nn13199970\nn13200651\nn13201969\nn13205058\nn13206178\nn13206817\nn13207094\nn13207335\nn13208302\nn13209808\nn13211020\nn13213066\nn13214340\nn13215586\nn13219422\nn13219833\nn13219976\nn13220122\nn13221529\nn13223588\nn13223710\nn13223843\nn13226871\nn13229543\nn13230662\nn13231078\nn13232779\nn13234678\nn13235159\nn13235503\nn13237188\nn13238375\nn13238988\nn13579829\nn13653902\nn13862407\nn13863020\nn13863771\nn13864035\nn13865298\nn13865483\nn13865904\nn13868944\nn13869547\nn13869788\nn13869896\nn13872592\nn13872822\nn13873502\nn13873917\nn13875392\nn13875571\nn13876561\nn13878306\nn13879049\nn13879320\nn13880994\nn13881644\nn13882201\nn13882276\nn13882563\nn13886260\nn13895262\nn13896100\nn13896217\nn13897996\nn13898207\nn13900287\nn13900422\nn13901211\nn13901321\nn13901858\nn13902048\nn13902336\nn13905792\nn13907272\nn13908201\nn13908580\nn13912260\nn13912540\nn13914608\nn13915023\nn13915113\nn13916721\nn13918274\nn13918387\nn13919547\nn13919919\nn13926786\nn14131950\nn14564779\nn14685296\nn14696793\nn14698884\nn14765422\nn14785065\nn14810561\nn14820180\nn14844693\nn14858292\nn14900342\nn14908027\nn14915184\nn14919819\nn14973585\nn14974264\nn14976759\nn14976871\nn14977504\nn15019030\nn15062057\nn15067877\nn15075141\nn15086247\nn15089258\nn15090065\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092650\nn15092942\nn15093137\nn15093298\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet21k_goog_synsets.txt",
    "content": "n00004475\nn00005787\nn00006024\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288190\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440218\nn00440382\nn00440509\nn00440643\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443375\nn00443517\nn00443692\nn00443803\nn00443917\nn00444142\nn00444340\nn00444490\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446411\nn00446493\nn00446632\nn00446804\nn00446980\nn00447073\nn00447221\nn00447361\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00451768\nn00451866\nn00452034\nn00452152\nn00452293\nn00452734\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453631\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454855\nn00454983\nn00455076\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00474769\nn00474881\nn00475014\nn00475142\nn00475273\nn00475403\nn00475535\nn00475661\nn00475787\nn00476140\nn00476235\nn00476389\nn00477392\nn00477639\nn00477827\nn00478262\nn00479076\nn00479440\nn00479616\nn00479734\nn00479887\nn00480211\nn00480366\nn00480508\nn00480885\nn00480993\nn00481803\nn00481938\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00812526\nn00825773\nn00887544\nn01035504\nn01035667\nn01055165\nn01314388\nn01314663\nn01314781\nn01314910\nn01315213\nn01315330\nn01315581\nn01315805\nn01316422\nn01316579\nn01316734\nn01316949\nn01317089\nn01317294\nn01317391\nn01317541\nn01317813\nn01317916\nn01318053\nn01318279\nn01318381\nn01318478\nn01318660\nn01318894\nn01319001\nn01319187\nn01319467\nn01319685\nn01320872\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01323781\nn01324305\nn01324431\nn01324610\nn01324799\nn01324916\nn01325060\nn01326291\nn01327909\nn01329186\nn01330126\nn01330497\nn01332181\nn01333082\nn01333483\nn01333610\nn01334217\nn01334690\nn01335218\nn01337191\nn01337734\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01340522\nn01340785\nn01340935\nn01341090\nn01342269\nn01347583\nn01349735\nn01350226\nn01350701\nn01351170\nn01351315\nn01357328\nn01357507\nn01358572\nn01359762\nn01362336\nn01363719\nn01365474\nn01365885\nn01366700\nn01367772\nn01368672\nn01369358\nn01369484\nn01374703\nn01374846\nn01375204\nn01376237\nn01376437\nn01376543\nn01377278\nn01377510\nn01377694\nn01378545\nn01379389\nn01380610\nn01380754\nn01381044\nn01382033\nn01384084\nn01384164\nn01384687\nn01385017\nn01385330\nn01386007\nn01386182\nn01386354\nn01387065\nn01389507\nn01390123\nn01390763\nn01392275\nn01392380\nn01393486\nn01394040\nn01394492\nn01394771\nn01395254\nn01396048\nn01396617\nn01397114\nn01397690\nn01397871\nn01400247\nn01400391\nn01402600\nn01403457\nn01404365\nn01404495\nn01405007\nn01405616\nn01407798\nn01410457\nn01411450\nn01412694\nn01413457\nn01414216\nn01415626\nn01415920\nn01416213\nn01418498\nn01418620\nn01419332\nn01419573\nn01419888\nn01421333\nn01421807\nn01422185\nn01422335\nn01422450\nn01423302\nn01423617\nn01424420\nn01425223\nn01427399\nn01429172\nn01438208\nn01438581\nn01439121\nn01439514\nn01439808\nn01440160\nn01440242\nn01440467\nn01440764\nn01441117\nn01441272\nn01441425\nn01441910\nn01442450\nn01442710\nn01442972\nn01443243\nn01443537\nn01443831\nn01444339\nn01444783\nn01445429\nn01445593\nn01445857\nn01446152\nn01446589\nn01446760\nn01447139\nn01447331\nn01447658\nn01447946\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01449980\nn01450661\nn01450950\nn01451115\nn01451295\nn01451426\nn01451863\nn01452345\nn01453087\nn01453475\nn01453742\nn01454545\nn01454856\nn01455317\nn01455461\nn01455778\nn01456137\nn01456454\nn01456756\nn01457082\nn01457407\nn01457852\nn01458746\nn01458842\nn01459791\nn01460303\nn01461315\nn01461646\nn01462042\nn01462544\nn01462803\nn01464844\nn01466257\nn01467336\nn01467804\nn01468238\nn01468712\nn01469103\nn01469723\nn01470145\nn01470479\nn01470733\nn01470895\nn01471682\nn01472303\nn01472502\nn01473806\nn01474283\nn01474864\nn01475232\nn01475940\nn01476418\nn01477080\nn01477525\nn01477875\nn01478511\nn01478969\nn01479213\nn01479820\nn01480106\nn01480516\nn01480880\nn01481331\nn01481498\nn01482071\nn01482330\nn01483021\nn01483522\nn01483830\nn01484097\nn01484285\nn01484447\nn01484562\nn01484850\nn01485479\nn01486010\nn01486540\nn01486838\nn01487506\nn01488038\nn01488918\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491361\nn01491661\nn01491874\nn01492357\nn01492569\nn01492708\nn01492860\nn01493146\nn01493541\nn01493829\nn01494041\nn01494475\nn01494757\nn01494882\nn01495006\nn01495493\nn01495701\nn01496331\nn01497118\nn01497413\nn01497738\nn01498041\nn01498406\nn01498699\nn01498989\nn01499396\nn01499732\nn01500091\nn01500476\nn01500854\nn01501160\nn01501641\nn01501777\nn01501948\nn01502101\nn01503061\nn01503976\nn01504179\nn01504344\nn01514668\nn01514752\nn01514859\nn01514926\nn01515078\nn01515217\nn01515303\nn01516212\nn01517389\nn01517565\nn01517966\nn01518878\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01522450\nn01523105\nn01524359\nn01524761\nn01525720\nn01526521\nn01526766\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01530575\nn01531178\nn01531344\nn01531512\nn01531639\nn01531811\nn01531971\nn01532325\nn01532511\nn01532829\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534433\nn01534582\nn01534762\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537544\nn01537895\nn01538059\nn01538200\nn01538362\nn01538630\nn01538955\nn01539272\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542168\nn01542433\nn01542786\nn01543175\nn01543383\nn01543632\nn01543936\nn01544208\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01546921\nn01547832\nn01548301\nn01548492\nn01548694\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01550761\nn01551080\nn01551300\nn01551711\nn01552034\nn01552333\nn01552813\nn01553142\nn01553527\nn01553762\nn01554017\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01556514\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01558993\nn01559160\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560419\nn01560636\nn01560793\nn01560935\nn01561181\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564101\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568132\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571410\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573627\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576358\nn01576695\nn01577035\nn01577458\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580077\nn01580379\nn01580490\nn01580772\nn01580870\nn01581166\nn01581434\nn01581730\nn01581874\nn01581984\nn01582220\nn01582398\nn01582498\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587278\nn01587526\nn01587834\nn01588002\nn01588431\nn01588725\nn01588996\nn01589286\nn01589718\nn01589893\nn01590220\nn01591005\nn01591123\nn01591301\nn01591697\nn01592084\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01593282\nn01593553\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595624\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598271\nn01598588\nn01598988\nn01599159\nn01599269\nn01599388\nn01599556\nn01599741\nn01600085\nn01600341\nn01600657\nn01601068\nn01601410\nn01601694\nn01602080\nn01602209\nn01602630\nn01602832\nn01603000\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606097\nn01606177\nn01606522\nn01606672\nn01606809\nn01606978\nn01607309\nn01607429\nn01607600\nn01607812\nn01607962\nn01608265\nn01608432\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611674\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01612955\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01614925\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616318\nn01616551\nn01616764\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01618922\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622779\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624212\nn01624305\nn01624537\nn01624833\nn01625121\nn01625562\nn01627424\nn01628331\nn01628770\nn01629276\nn01629819\nn01629962\nn01630148\nn01630284\nn01630670\nn01630901\nn01631175\nn01631354\nn01631512\nn01631663\nn01632047\nn01632308\nn01632458\nn01632601\nn01632777\nn01632952\nn01633406\nn01633781\nn01634227\nn01634522\nn01635027\nn01635176\nn01635480\nn01636127\nn01636352\nn01636510\nn01636829\nn01637112\nn01637338\nn01637615\nn01637932\nn01638194\nn01638329\nn01638722\nn01639187\nn01639765\nn01640846\nn01641206\nn01641391\nn01641577\nn01641739\nn01641930\nn01642097\nn01642257\nn01642391\nn01642539\nn01642943\nn01643255\nn01643507\nn01643896\nn01644373\nn01644900\nn01645466\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647033\nn01647180\nn01647303\nn01647466\nn01647640\nn01648139\nn01648356\nn01648620\nn01649170\nn01649412\nn01649556\nn01649726\nn01650167\nn01650690\nn01650901\nn01651059\nn01651285\nn01651487\nn01651641\nn01651778\nn01652026\nn01652297\nn01653026\nn01653223\nn01653509\nn01653773\nn01654083\nn01654637\nn01654863\nn01655344\nn01661091\nn01661592\nn01661818\nn01662060\nn01662622\nn01662784\nn01663401\nn01663782\nn01664065\nn01664369\nn01664492\nn01664674\nn01664990\nn01665541\nn01665932\nn01666228\nn01666585\nn01667114\nn01667432\nn01667778\nn01668091\nn01668436\nn01668665\nn01668892\nn01669191\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01671705\nn01672032\nn01672432\nn01672611\nn01673282\nn01674216\nn01674464\nn01674990\nn01675352\nn01675722\nn01676755\nn01677366\nn01677747\nn01678043\nn01678343\nn01678657\nn01679005\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01680983\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01682714\nn01683201\nn01683558\nn01684133\nn01684578\nn01684741\nn01685439\nn01685808\nn01686044\nn01686220\nn01686403\nn01686609\nn01686808\nn01687128\nn01687290\nn01687665\nn01687978\nn01688243\nn01688961\nn01689081\nn01689411\nn01689811\nn01690149\nn01690466\nn01691217\nn01691652\nn01691951\nn01692333\nn01692523\nn01692864\nn01693175\nn01693334\nn01693783\nn01694178\nn01694311\nn01694709\nn01694955\nn01695060\nn01696633\nn01697178\nn01697457\nn01697611\nn01697749\nn01697978\nn01698434\nn01698640\nn01698782\nn01699040\nn01699254\nn01699675\nn01701551\nn01701859\nn01702256\nn01702479\nn01703011\nn01703161\nn01703569\nn01704103\nn01704323\nn01704626\nn01705010\nn01705591\nn01705934\nn01707294\nn01708106\nn01708998\nn01709484\nn01709876\nn01710177\nn01711160\nn01712008\nn01712752\nn01713170\nn01713764\nn01714231\nn01715888\nn01717016\nn01717229\nn01717467\nn01718096\nn01718414\nn01719403\nn01721174\nn01721898\nn01722670\nn01722998\nn01723579\nn01724231\nn01724840\nn01725086\nn01725713\nn01726203\nn01726692\nn01727646\nn01728266\nn01728572\nn01728920\nn01729322\nn01729672\nn01729977\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731137\nn01731277\nn01731545\nn01731764\nn01731941\nn01732093\nn01732244\nn01732614\nn01732789\nn01732989\nn01733214\nn01733466\nn01733757\nn01733957\nn01734104\nn01734418\nn01734637\nn01734808\nn01735189\nn01735439\nn01735577\nn01735728\nn01736032\nn01736375\nn01736796\nn01737021\nn01737472\nn01737728\nn01737875\nn01738065\nn01738306\nn01738601\nn01738731\nn01739094\nn01739381\nn01739647\nn01739871\nn01740131\nn01740551\nn01740885\nn01741232\nn01741442\nn01741562\nn01741943\nn01742172\nn01742447\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744401\nn01744555\nn01745125\nn01745484\nn01745902\nn01746191\nn01746359\nn01746952\nn01747285\nn01747589\nn01747885\nn01748264\nn01748389\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01749939\nn01750167\nn01750437\nn01750743\nn01751036\nn01751215\nn01751472\nn01751748\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753488\nn01753959\nn01754370\nn01754533\nn01754876\nn01755581\nn01755740\nn01755952\nn01756089\nn01756291\nn01756508\nn01756733\nn01756916\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01758895\nn01767661\nn01768244\nn01769347\nn01770081\nn01770393\nn01770795\nn01771100\nn01771417\nn01771766\nn01772222\nn01772664\nn01773157\nn01773549\nn01773797\nn01774097\nn01774384\nn01774750\nn01775062\nn01775370\nn01775730\nn01776192\nn01776313\nn01776705\nn01777304\nn01777467\nn01777649\nn01777909\nn01778217\nn01778487\nn01778621\nn01778801\nn01779148\nn01779463\nn01779629\nn01779939\nn01780142\nn01780426\nn01780696\nn01781071\nn01781570\nn01781698\nn01781875\nn01782209\nn01782516\nn01783017\nn01783706\nn01784293\nn01784675\nn01785667\nn01786646\nn01787006\nn01787191\nn01787835\nn01788291\nn01788579\nn01788864\nn01789386\nn01789740\nn01790171\nn01790304\nn01790398\nn01790557\nn01790711\nn01790812\nn01791107\nn01791314\nn01791388\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792530\nn01792640\nn01792808\nn01792955\nn01793085\nn01793159\nn01793249\nn01793340\nn01793435\nn01793565\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795545\nn01795735\nn01795900\nn01796019\nn01796105\nn01796340\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01797886\nn01798168\nn01798484\nn01798706\nn01798839\nn01798979\nn01799302\nn01799679\nn01800195\nn01800424\nn01800633\nn01801088\nn01801479\nn01801672\nn01801876\nn01802159\nn01802721\nn01803078\nn01803362\nn01803641\nn01803893\nn01804163\nn01804478\nn01804653\nn01804921\nn01805070\nn01805321\nn01805801\nn01806061\nn01806143\nn01806297\nn01806364\nn01806467\nn01806567\nn01806847\nn01807105\nn01807496\nn01807828\nn01808140\nn01808291\nn01808596\nn01809106\nn01809371\nn01809752\nn01810268\nn01810700\nn01811243\nn01811909\nn01812187\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813658\nn01813948\nn01814217\nn01814370\nn01814549\nn01814620\nn01814755\nn01814921\nn01815036\nn01815270\nn01815601\nn01816017\nn01816140\nn01816474\nn01816887\nn01817263\nn01817346\nn01817953\nn01818299\nn01818515\nn01818832\nn01819115\nn01819313\nn01819465\nn01819734\nn01820052\nn01820348\nn01820546\nn01820801\nn01821076\nn01821203\nn01821554\nn01821869\nn01822300\nn01822602\nn01823013\nn01823414\nn01823740\nn01824035\nn01824344\nn01824575\nn01824749\nn01825278\nn01825930\nn01826364\nn01826680\nn01826844\nn01827403\nn01827793\nn01828096\nn01828556\nn01828970\nn01829413\nn01829869\nn01830042\nn01830479\nn01830915\nn01831360\nn01831712\nn01832167\nn01832493\nn01832813\nn01833112\nn01833415\nn01833805\nn01834177\nn01834540\nn01835276\nn01835769\nn01835918\nn01836087\nn01836673\nn01837072\nn01837526\nn01838038\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01839949\nn01840120\nn01840412\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01841943\nn01842235\nn01842504\nn01842788\nn01843065\nn01843383\nn01843719\nn01844231\nn01844551\nn01844746\nn01844917\nn01845132\nn01845477\nn01846331\nn01847000\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855032\nn01855188\nn01855476\nn01855672\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860187\nn01860497\nn01860864\nn01861148\nn01861330\nn01861778\nn01862399\nn01871265\nn01871543\nn01871875\nn01872401\nn01872772\nn01873310\nn01874434\nn01874928\nn01875313\nn01875610\nn01876034\nn01876326\nn01876667\nn01877134\nn01877606\nn01877812\nn01878061\nn01878335\nn01878639\nn01878929\nn01879217\nn01879509\nn01879837\nn01880152\nn01880473\nn01880716\nn01880813\nn01881171\nn01881564\nn01881857\nn01882125\nn01882714\nn01883070\nn01883513\nn01883920\nn01884104\nn01884203\nn01884476\nn01884834\nn01885158\nn01885498\nn01886045\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889074\nn01889520\nn01889849\nn01890144\nn01890564\nn01890860\nn01891013\nn01891274\nn01891633\nn01892030\nn01892145\nn01892385\nn01892551\nn01892744\nn01893021\nn01893164\nn01893399\nn01893825\nn01894207\nn01894522\nn01894956\nn01896844\nn01897257\nn01897426\nn01897536\nn01897667\nn01898593\nn01899894\nn01900150\nn01903234\nn01903346\nn01903498\nn01904029\nn01904806\nn01904886\nn01905321\nn01905661\nn01906749\nn01907287\nn01907738\nn01908042\nn01908958\nn01909422\nn01909788\nn01909906\nn01910252\nn01910747\nn01911063\nn01911403\nn01911839\nn01912152\nn01912454\nn01912809\nn01913166\nn01913346\nn01913440\nn01914163\nn01914609\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916588\nn01916925\nn01917289\nn01917611\nn01917882\nn01918744\nn01919385\nn01920051\nn01920438\nn01921059\nn01922303\nn01922717\nn01922948\nn01923025\nn01923404\nn01923890\nn01924800\nn01924916\nn01925270\nn01925695\nn01925916\nn01926379\nn01926689\nn01927159\nn01927456\nn01927928\nn01928215\nn01928517\nn01928865\nn01929186\nn01930112\nn01930852\nn01931140\nn01931520\nn01931714\nn01932151\nn01932936\nn01933151\nn01933478\nn01933988\nn01934440\nn01934844\nn01935176\nn01935395\nn01936391\nn01936671\nn01936858\nn01937579\nn01937909\nn01938454\nn01938735\nn01940736\nn01941223\nn01941340\nn01942177\nn01942869\nn01943087\nn01943541\nn01943899\nn01944118\nn01944390\nn01944812\nn01944955\nn01945143\nn01945340\nn01945685\nn01945845\nn01946277\nn01946630\nn01946827\nn01947139\nn01947396\nn01947997\nn01948446\nn01948573\nn01949085\nn01949499\nn01949973\nn01950731\nn01951274\nn01951613\nn01952029\nn01952712\nn01953361\nn01953594\nn01953762\nn01954516\nn01955084\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958435\nn01958531\nn01959029\nn01959492\nn01959985\nn01960177\nn01960459\nn01961234\nn01961600\nn01961985\nn01962506\nn01962788\nn01963317\nn01963479\nn01963571\nn01964049\nn01964271\nn01964441\nn01964957\nn01965252\nn01965529\nn01965889\nn01966377\nn01966586\nn01967094\nn01967308\nn01967963\nn01968315\nn01968897\nn01969726\nn01970164\nn01970667\nn01971094\nn01971280\nn01971620\nn01971850\nn01972131\nn01972541\nn01973148\nn01974773\nn01975687\nn01976146\nn01976868\nn01976957\nn01977485\nn01978010\nn01978136\nn01978287\nn01978455\nn01978587\nn01978930\nn01979269\nn01979526\nn01979874\nn01980166\nn01980655\nn01981276\nn01981702\nn01982068\nn01982347\nn01982650\nn01983048\nn01983481\nn01983674\nn01983829\nn01984245\nn01984695\nn01985128\nn01985493\nn01985797\nn01986214\nn01986806\nn01987076\nn01987545\nn01987727\nn01988203\nn01988701\nn01988869\nn01989516\nn01989869\nn01990007\nn01990516\nn01990800\nn01991028\nn01991520\nn01992262\nn01992423\nn01992773\nn01993525\nn01993830\nn01994910\nn01995514\nn01995686\nn01996280\nn01996585\nn01997119\nn01997825\nn01998183\nn01998741\nn01999186\nn01999767\nn02000954\nn02002075\nn02002556\nn02002724\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006656\nn02006985\nn02007284\nn02007558\nn02008041\nn02008497\nn02008643\nn02008796\nn02009229\nn02009380\nn02009508\nn02009750\nn02009912\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011460\nn02011805\nn02011943\nn02012185\nn02012849\nn02013177\nn02013567\nn02013706\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02015797\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017213\nn02017475\nn02017725\nn02018027\nn02018207\nn02018368\nn02018795\nn02019190\nn02019438\nn02019929\nn02020219\nn02020578\nn02021050\nn02021281\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025239\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027492\nn02027897\nn02028035\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030224\nn02030287\nn02030568\nn02030837\nn02030996\nn02031298\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02032769\nn02033041\nn02033208\nn02033324\nn02033561\nn02033779\nn02033882\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02035402\nn02035656\nn02036053\nn02036228\nn02036711\nn02037110\nn02037464\nn02037869\nn02038141\nn02038466\nn02038993\nn02039171\nn02039497\nn02039780\nn02040266\nn02040505\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02048698\nn02049088\nn02049532\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051474\nn02051845\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056570\nn02056728\nn02057035\nn02057330\nn02057731\nn02057898\nn02058221\nn02058594\nn02058747\nn02059162\nn02059541\nn02059852\nn02060133\nn02060411\nn02060569\nn02060889\nn02061217\nn02061560\nn02061853\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064000\nn02064338\nn02064816\nn02065026\nn02065263\nn02065407\nn02065726\nn02066245\nn02066707\nn02067240\nn02067603\nn02067768\nn02068206\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02070624\nn02070776\nn02071028\nn02071294\nn02071636\nn02072040\nn02072493\nn02072798\nn02073250\nn02073831\nn02074367\nn02074726\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02077923\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02080713\nn02081060\nn02081571\nn02081798\nn02081927\nn02082056\nn02082190\nn02082791\nn02083346\nn02083672\nn02083780\nn02084071\nn02084732\nn02084861\nn02085019\nn02085118\nn02085272\nn02085374\nn02085620\nn02085782\nn02085936\nn02086079\nn02086240\nn02086346\nn02086478\nn02086646\nn02086753\nn02086910\nn02087046\nn02087122\nn02087314\nn02087394\nn02087551\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02088745\nn02088839\nn02088992\nn02089078\nn02089232\nn02089468\nn02089555\nn02089725\nn02089867\nn02089973\nn02090129\nn02090253\nn02090379\nn02090475\nn02090622\nn02090721\nn02090827\nn02091032\nn02091134\nn02091244\nn02091467\nn02091635\nn02091831\nn02092002\nn02092173\nn02092339\nn02092468\nn02093056\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02094562\nn02094721\nn02094931\nn02095050\nn02095212\nn02095314\nn02095412\nn02095570\nn02095727\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02096756\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02097786\nn02097967\nn02098105\nn02098286\nn02098413\nn02098550\nn02098806\nn02098906\nn02099029\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02099997\nn02100236\nn02100399\nn02100583\nn02100735\nn02100877\nn02101006\nn02101108\nn02101388\nn02101556\nn02101670\nn02101861\nn02102040\nn02102177\nn02102318\nn02102480\nn02102605\nn02102806\nn02102973\nn02103181\nn02103406\nn02103841\nn02104029\nn02104184\nn02104280\nn02104365\nn02104523\nn02104882\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02106854\nn02106966\nn02107142\nn02107312\nn02107420\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108254\nn02108422\nn02108551\nn02108672\nn02108915\nn02109047\nn02109150\nn02109256\nn02109391\nn02109525\nn02109687\nn02109811\nn02109961\nn02110063\nn02110185\nn02110341\nn02110532\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111626\nn02111889\nn02112018\nn02112137\nn02112350\nn02112497\nn02112706\nn02112826\nn02113023\nn02113186\nn02113335\nn02113624\nn02113712\nn02113799\nn02113892\nn02113978\nn02114100\nn02114367\nn02114548\nn02114712\nn02114855\nn02115012\nn02115096\nn02115335\nn02115641\nn02115913\nn02116185\nn02116450\nn02116738\nn02117135\nn02117512\nn02117646\nn02117900\nn02118176\nn02118333\nn02118643\nn02118707\nn02119022\nn02119247\nn02119359\nn02119477\nn02119634\nn02119789\nn02120079\nn02120278\nn02120505\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122810\nn02122878\nn02122948\nn02123045\nn02123159\nn02123242\nn02123394\nn02123478\nn02123597\nn02123785\nn02123917\nn02124075\nn02124157\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125311\nn02125494\nn02125689\nn02125872\nn02126028\nn02126139\nn02126317\nn02126640\nn02126787\nn02127052\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128385\nn02128598\nn02128669\nn02128757\nn02128925\nn02129165\nn02129463\nn02129530\nn02129604\nn02129837\nn02129923\nn02129991\nn02130086\nn02130308\nn02130545\nn02130925\nn02131653\nn02132136\nn02132320\nn02132466\nn02132580\nn02132788\nn02133161\nn02133400\nn02133704\nn02134084\nn02134418\nn02134971\nn02135220\nn02135610\nn02135844\nn02136103\nn02136285\nn02136452\nn02136794\nn02137015\nn02137302\nn02137549\nn02137722\nn02137888\nn02138169\nn02138441\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02140179\nn02140268\nn02140491\nn02140858\nn02141306\nn02141611\nn02141713\nn02142407\nn02142734\nn02142898\nn02143142\nn02143439\nn02143891\nn02144251\nn02144593\nn02144936\nn02145424\nn02145910\nn02146201\nn02146371\nn02146700\nn02146879\nn02147173\nn02147328\nn02147591\nn02147947\nn02148088\nn02148512\nn02148835\nn02148991\nn02149420\nn02149653\nn02149861\nn02150134\nn02150482\nn02150885\nn02151230\nn02152740\nn02152881\nn02152991\nn02153109\nn02153203\nn02153809\nn02156732\nn02156871\nn02157206\nn02157285\nn02159955\nn02160947\nn02161225\nn02161338\nn02161457\nn02161588\nn02162561\nn02163008\nn02163297\nn02164464\nn02165105\nn02165456\nn02165877\nn02166229\nn02166567\nn02166826\nn02167151\nn02167505\nn02167820\nn02167944\nn02168245\nn02168427\nn02168699\nn02169023\nn02169218\nn02169497\nn02169705\nn02169974\nn02170400\nn02170599\nn02170738\nn02170993\nn02171164\nn02171453\nn02171869\nn02172182\nn02172518\nn02172678\nn02172761\nn02172870\nn02173113\nn02173373\nn02173784\nn02174001\nn02174355\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02176747\nn02176916\nn02177196\nn02177506\nn02177775\nn02177972\nn02178411\nn02178717\nn02179012\nn02179192\nn02179340\nn02179891\nn02180233\nn02180427\nn02180875\nn02181235\nn02181477\nn02181724\nn02182045\nn02182355\nn02182642\nn02182930\nn02183096\nn02183507\nn02183857\nn02184473\nn02184589\nn02184720\nn02185167\nn02185481\nn02186153\nn02186717\nn02187150\nn02187279\nn02187554\nn02187900\nn02188699\nn02189363\nn02189670\nn02190166\nn02190790\nn02191273\nn02191773\nn02191979\nn02192252\nn02192513\nn02192814\nn02193009\nn02193163\nn02194249\nn02194750\nn02195091\nn02195526\nn02195819\nn02196119\nn02196344\nn02196896\nn02197185\nn02197689\nn02197877\nn02198129\nn02198532\nn02198859\nn02199170\nn02199502\nn02200198\nn02200509\nn02200630\nn02200850\nn02201000\nn02201497\nn02201626\nn02202006\nn02202124\nn02202287\nn02202678\nn02203152\nn02203592\nn02203978\nn02204249\nn02204722\nn02204907\nn02205219\nn02205673\nn02206270\nn02206856\nn02207179\nn02207345\nn02207449\nn02207647\nn02207805\nn02208280\nn02208498\nn02208848\nn02208979\nn02209111\nn02209354\nn02209624\nn02209964\nn02210427\nn02210921\nn02211444\nn02211627\nn02211896\nn02212062\nn02212602\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214096\nn02214341\nn02214499\nn02214660\nn02214773\nn02215161\nn02215621\nn02215770\nn02216211\nn02216365\nn02216740\nn02217563\nn02217839\nn02218134\nn02218371\nn02218713\nn02219015\nn02219486\nn02220055\nn02220225\nn02220518\nn02220804\nn02221083\nn02221414\nn02221571\nn02221715\nn02221820\nn02222035\nn02222321\nn02222582\nn02223266\nn02223520\nn02224023\nn02224713\nn02225081\nn02225798\nn02226183\nn02226429\nn02226821\nn02226970\nn02227247\nn02227604\nn02227966\nn02228341\nn02228697\nn02229156\nn02229544\nn02229765\nn02230023\nn02230187\nn02230480\nn02230634\nn02231052\nn02231487\nn02231803\nn02232223\nn02233338\nn02233943\nn02234355\nn02234570\nn02234848\nn02235205\nn02236044\nn02236241\nn02236355\nn02236896\nn02237424\nn02237581\nn02237868\nn02238235\nn02238358\nn02238594\nn02238887\nn02239192\nn02239528\nn02239774\nn02240068\nn02240517\nn02241008\nn02241426\nn02241569\nn02241799\nn02242137\nn02242455\nn02243209\nn02243562\nn02243878\nn02244173\nn02244515\nn02244797\nn02245111\nn02245443\nn02246011\nn02246628\nn02246941\nn02247216\nn02247511\nn02247655\nn02248062\nn02248368\nn02248510\nn02248887\nn02249134\nn02249515\nn02249809\nn02250280\nn02250822\nn02251067\nn02251233\nn02251593\nn02251775\nn02252226\nn02252799\nn02252972\nn02253127\nn02253264\nn02253494\nn02253715\nn02253913\nn02254246\nn02254697\nn02254901\nn02255023\nn02255391\nn02256172\nn02256656\nn02257003\nn02257284\nn02257715\nn02257985\nn02258198\nn02258508\nn02258629\nn02259212\nn02259377\nn02259708\nn02259987\nn02260421\nn02260863\nn02261063\nn02261419\nn02261757\nn02262178\nn02262449\nn02262803\nn02263378\nn02264021\nn02264232\nn02264363\nn02264591\nn02264885\nn02265330\nn02266050\nn02266269\nn02266421\nn02266864\nn02267208\nn02267483\nn02268148\nn02268443\nn02268853\nn02269196\nn02269340\nn02269522\nn02269657\nn02270011\nn02270200\nn02270623\nn02270945\nn02271222\nn02271570\nn02271897\nn02272286\nn02272552\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276258\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277422\nn02277742\nn02278024\nn02278210\nn02278463\nn02278839\nn02278980\nn02279257\nn02279637\nn02279972\nn02280458\nn02280649\nn02281015\nn02281136\nn02281267\nn02281406\nn02281787\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283617\nn02283951\nn02284224\nn02284611\nn02284884\nn02285179\nn02285548\nn02285801\nn02286089\nn02286425\nn02286654\nn02287004\nn02287352\nn02287622\nn02287799\nn02287987\nn02288122\nn02288268\nn02288789\nn02289307\nn02289610\nn02289988\nn02290340\nn02290664\nn02290870\nn02291220\nn02291572\nn02291748\nn02292085\nn02292401\nn02292692\nn02293352\nn02293868\nn02294097\nn02294407\nn02294577\nn02295064\nn02295390\nn02295870\nn02296021\nn02296276\nn02296612\nn02296912\nn02297294\nn02297442\nn02297819\nn02297938\nn02298095\nn02298218\nn02298541\nn02299039\nn02299157\nn02299378\nn02299505\nn02299846\nn02300173\nn02300554\nn02300797\nn02301452\nn02301935\nn02302244\nn02302459\nn02302620\nn02302969\nn02303284\nn02303585\nn02303777\nn02304036\nn02304432\nn02304657\nn02304797\nn02305085\nn02305407\nn02305636\nn02305929\nn02306433\nn02306825\nn02307176\nn02307325\nn02307515\nn02307681\nn02307910\nn02308033\nn02308139\nn02308471\nn02308618\nn02308735\nn02309120\nn02309242\nn02309337\nn02309841\nn02310000\nn02310149\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02311748\nn02312006\nn02312175\nn02312325\nn02312427\nn02312640\nn02312912\nn02313008\nn02313360\nn02313709\nn02315487\nn02315821\nn02316707\nn02317335\nn02317781\nn02318167\nn02318687\nn02319095\nn02319308\nn02319555\nn02319829\nn02320127\nn02320465\nn02321170\nn02321529\nn02322047\nn02322992\nn02323449\nn02323902\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325366\nn02325722\nn02325884\nn02326074\nn02326432\nn02326763\nn02326862\nn02327028\nn02327175\nn02327435\nn02327656\nn02327842\nn02328009\nn02328150\nn02328429\nn02328820\nn02328942\nn02329401\nn02330245\nn02331046\nn02331309\nn02331842\nn02332156\nn02332447\nn02332755\nn02332954\nn02333190\nn02333546\nn02333733\nn02333819\nn02333909\nn02334201\nn02334460\nn02334728\nn02335127\nn02335231\nn02336011\nn02336275\nn02336641\nn02336826\nn02337001\nn02337171\nn02337332\nn02337598\nn02337902\nn02338145\nn02338449\nn02338722\nn02338901\nn02339282\nn02339376\nn02339922\nn02340186\nn02340358\nn02340640\nn02340930\nn02341288\nn02341475\nn02341616\nn02341974\nn02342250\nn02342534\nn02342885\nn02343058\nn02343320\nn02343772\nn02344175\nn02344270\nn02344408\nn02344528\nn02344918\nn02345078\nn02345340\nn02345600\nn02345774\nn02345997\nn02346170\nn02346627\nn02346998\nn02347274\nn02347573\nn02347744\nn02348173\nn02348788\nn02349205\nn02349390\nn02349557\nn02349847\nn02350105\nn02350357\nn02350670\nn02350989\nn02351343\nn02351870\nn02352002\nn02352290\nn02352591\nn02352932\nn02353172\nn02353411\nn02353861\nn02354162\nn02354320\nn02354621\nn02354781\nn02355227\nn02355477\nn02356381\nn02356612\nn02356798\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358712\nn02358890\nn02359047\nn02359324\nn02359556\nn02359667\nn02359915\nn02360282\nn02360480\nn02360781\nn02360933\nn02361090\nn02361337\nn02361587\nn02361706\nn02361850\nn02362194\nn02363005\nn02363245\nn02363351\nn02363996\nn02364520\nn02364673\nn02364840\nn02365108\nn02365480\nn02366002\nn02366301\nn02366579\nn02366959\nn02367492\nn02367812\nn02368116\nn02368399\nn02368821\nn02369293\nn02369555\nn02369680\nn02369935\nn02370137\nn02370525\nn02370806\nn02371344\nn02372140\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02375438\nn02375757\nn02375862\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378149\nn02378299\nn02378415\nn02378541\nn02378625\nn02378755\nn02378870\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379743\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381119\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02384741\nn02384858\nn02385002\nn02385098\nn02385214\nn02385580\nn02385676\nn02385776\nn02385898\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386746\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387452\nn02387722\nn02387887\nn02387983\nn02388143\nn02388276\nn02388453\nn02388588\nn02388735\nn02388832\nn02388917\nn02389026\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02389865\nn02389943\nn02390015\nn02390101\nn02390258\nn02390454\nn02390640\nn02390738\nn02390834\nn02390938\nn02391049\nn02391234\nn02391373\nn02391508\nn02391617\nn02391994\nn02392434\nn02392555\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395406\nn02395694\nn02395855\nn02395931\nn02396014\nn02396088\nn02396157\nn02396427\nn02396796\nn02397096\nn02397529\nn02397744\nn02397987\nn02398521\nn02399000\nn02401031\nn02402010\nn02402175\nn02402425\nn02403003\nn02403153\nn02403231\nn02403325\nn02403454\nn02403740\nn02403820\nn02403920\nn02404028\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405440\nn02405577\nn02405692\nn02405799\nn02405929\nn02406046\nn02406174\nn02406432\nn02406533\nn02406647\nn02406749\nn02406859\nn02406952\nn02407071\nn02407172\nn02407276\nn02407390\nn02407521\nn02407625\nn02407763\nn02407959\nn02408429\nn02408660\nn02408817\nn02409038\nn02409202\nn02409508\nn02409870\nn02410011\nn02410141\nn02410509\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412080\nn02412210\nn02412440\nn02412629\nn02412700\nn02412787\nn02412909\nn02412977\nn02413050\nn02413131\nn02413484\nn02413593\nn02413717\nn02413824\nn02413917\nn02414043\nn02414209\nn02414290\nn02414442\nn02414578\nn02414763\nn02414904\nn02415130\nn02415253\nn02415435\nn02415577\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417242\nn02417387\nn02417534\nn02417663\nn02417785\nn02417914\nn02418064\nn02418465\nn02418770\nn02419056\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422106\nn02422391\nn02422699\nn02423022\nn02423218\nn02423362\nn02423589\nn02424085\nn02424305\nn02424486\nn02424589\nn02424695\nn02424909\nn02425086\nn02425228\nn02425532\nn02425887\nn02426176\nn02426481\nn02426813\nn02427032\nn02427183\nn02427470\nn02427576\nn02427724\nn02428089\nn02428349\nn02428508\nn02428842\nn02429456\nn02430045\nn02430559\nn02430643\nn02430748\nn02430830\nn02431122\nn02431337\nn02431441\nn02431542\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433729\nn02433925\nn02434190\nn02434415\nn02434712\nn02434954\nn02435216\nn02435517\nn02435853\nn02436224\nn02436353\nn02436645\nn02437136\nn02437312\nn02437482\nn02437616\nn02437971\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441326\nn02441942\nn02442172\nn02442336\nn02442446\nn02442572\nn02442668\nn02442845\nn02443015\nn02443114\nn02443346\nn02443484\nn02443808\nn02443959\nn02444251\nn02444819\nn02445004\nn02445171\nn02445394\nn02445715\nn02446206\nn02446352\nn02446645\nn02447021\nn02447366\nn02447762\nn02448060\nn02448318\nn02448633\nn02448885\nn02449183\nn02449350\nn02449699\nn02450034\nn02450295\nn02450426\nn02450561\nn02450677\nn02450829\nn02451125\nn02451415\nn02451575\nn02453108\nn02453611\nn02454379\nn02454794\nn02455135\nn02455428\nn02455720\nn02456008\nn02456275\nn02456962\nn02457408\nn02457945\nn02458135\nn02458517\nn02459190\nn02460009\nn02460451\nn02460817\nn02461128\nn02461830\nn02462213\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02470709\nn02470899\nn02471300\nn02471762\nn02472293\nn02472987\nn02473307\nn02473554\nn02473720\nn02473857\nn02473983\nn02474110\nn02474282\nn02474605\nn02474777\nn02475078\nn02475358\nn02475669\nn02476219\nn02476567\nn02476870\nn02477028\nn02477187\nn02477329\nn02477516\nn02477782\nn02478239\nn02478875\nn02479332\nn02480153\nn02480495\nn02480855\nn02481103\nn02481235\nn02481366\nn02481500\nn02481823\nn02482060\nn02482286\nn02482474\nn02482650\nn02483092\nn02483362\nn02483708\nn02484322\nn02484473\nn02484975\nn02485225\nn02485371\nn02485536\nn02485688\nn02485988\nn02486261\nn02486410\nn02486657\nn02486908\nn02487079\nn02487347\nn02487547\nn02487675\nn02487847\nn02488003\nn02488291\nn02488415\nn02488702\nn02488894\nn02489166\nn02489589\nn02490219\nn02490597\nn02490811\nn02491107\nn02491329\nn02491474\nn02492035\nn02492356\nn02492660\nn02492948\nn02493224\nn02493509\nn02493793\nn02494079\nn02494383\nn02495242\nn02496052\nn02496913\nn02497673\nn02498153\nn02498743\nn02499022\nn02499316\nn02499568\nn02499808\nn02500267\nn02500596\nn02501583\nn02501923\nn02502006\nn02502514\nn02502807\nn02503127\nn02503517\nn02503756\nn02504013\nn02504458\nn02504770\nn02505063\nn02505238\nn02505485\nn02505998\nn02506947\nn02507148\nn02507649\nn02508021\nn02508213\nn02508346\nn02508742\nn02509197\nn02509515\nn02509815\nn02510455\nn02511730\nn02512053\nn02512752\nn02512830\nn02512938\nn02513248\nn02513355\nn02513560\nn02513727\nn02513805\nn02513939\nn02514041\nn02515214\nn02515713\nn02516188\nn02516776\nn02517442\nn02517938\nn02518324\nn02518622\nn02519148\nn02519340\nn02519472\nn02519686\nn02519862\nn02520147\nn02520525\nn02520810\nn02521646\nn02522399\nn02522637\nn02522722\nn02522866\nn02523110\nn02523427\nn02523877\nn02524202\nn02524524\nn02524659\nn02524928\nn02525382\nn02525703\nn02526121\nn02526425\nn02526818\nn02527057\nn02527271\nn02527622\nn02528163\nn02529293\nn02529772\nn02530052\nn02530188\nn02530421\nn02530637\nn02530831\nn02530999\nn02531114\nn02531625\nn02532028\nn02532272\nn02532451\nn02532602\nn02532786\nn02532918\nn02533209\nn02533545\nn02533834\nn02534165\nn02534559\nn02534734\nn02535080\nn02535163\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02536864\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02538406\nn02538562\nn02538985\nn02539424\nn02539573\nn02539894\nn02540412\nn02540983\nn02541257\nn02541687\nn02542017\nn02542432\nn02542958\nn02543255\nn02543565\nn02544274\nn02545841\nn02546028\nn02546331\nn02546627\nn02547014\nn02547733\nn02548247\nn02548689\nn02548884\nn02549248\nn02549376\nn02549989\nn02550203\nn02550460\nn02550655\nn02551134\nn02551668\nn02552171\nn02553028\nn02554730\nn02555863\nn02556373\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02557909\nn02558206\nn02558860\nn02559144\nn02559383\nn02559862\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02561803\nn02561937\nn02562315\nn02562796\nn02562971\nn02563079\nn02563182\nn02563648\nn02563792\nn02563949\nn02564270\nn02564403\nn02564720\nn02564935\nn02565072\nn02565324\nn02565573\nn02566109\nn02566489\nn02566665\nn02567334\nn02567633\nn02568087\nn02568447\nn02568959\nn02569484\nn02569631\nn02569905\nn02570164\nn02570484\nn02570838\nn02571167\nn02571652\nn02571810\nn02572196\nn02572484\nn02573249\nn02573704\nn02574271\nn02574910\nn02575325\nn02575590\nn02576223\nn02576575\nn02576906\nn02577041\nn02577164\nn02577403\nn02577662\nn02577952\nn02578233\nn02578454\nn02578771\nn02578928\nn02579303\nn02579557\nn02579762\nn02579928\nn02580336\nn02580679\nn02580830\nn02581108\nn02581482\nn02581642\nn02581957\nn02582220\nn02582349\nn02582721\nn02583567\nn02583890\nn02584145\nn02584449\nn02585872\nn02586238\nn02586543\nn02587051\nn02587300\nn02587479\nn02587618\nn02587877\nn02588286\nn02588794\nn02588945\nn02589062\nn02589196\nn02589316\nn02589623\nn02589796\nn02590094\nn02590495\nn02590702\nn02590987\nn02591330\nn02591613\nn02591911\nn02592055\nn02592371\nn02592734\nn02593019\nn02593191\nn02593453\nn02593679\nn02594250\nn02594942\nn02595056\nn02595339\nn02595702\nn02596067\nn02596252\nn02596381\nn02596720\nn02597004\nn02597367\nn02597608\nn02597818\nn02597972\nn02598134\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02599958\nn02600298\nn02600503\nn02600798\nn02601344\nn02601767\nn02601921\nn02602059\nn02602405\nn02602760\nn02603317\nn02603540\nn02603862\nn02604157\nn02604480\nn02604954\nn02605316\nn02605703\nn02605936\nn02606052\nn02606384\nn02606751\nn02607072\nn02607201\nn02607470\nn02607862\nn02608284\nn02608547\nn02608860\nn02608996\nn02609302\nn02609823\nn02610066\nn02610373\nn02610664\nn02610980\nn02611561\nn02611898\nn02612167\nn02613181\nn02613572\nn02613820\nn02614140\nn02614482\nn02614653\nn02614978\nn02615298\nn02616128\nn02616397\nn02616851\nn02617537\nn02618094\nn02618513\nn02618827\nn02619165\nn02619550\nn02619861\nn02620167\nn02620578\nn02621258\nn02621908\nn02622249\nn02622547\nn02622712\nn02622955\nn02623445\nn02624167\nn02624551\nn02624807\nn02624987\nn02625258\nn02625612\nn02625851\nn02626089\nn02626265\nn02626471\nn02626762\nn02627037\nn02627292\nn02627532\nn02627835\nn02628062\nn02628259\nn02628600\nn02629230\nn02629716\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02631628\nn02631775\nn02632039\nn02632494\nn02633422\nn02633677\nn02633977\nn02634545\nn02635154\nn02635580\nn02636170\nn02636405\nn02636550\nn02636854\nn02637179\nn02637475\nn02637977\nn02638596\nn02639087\nn02639605\nn02639922\nn02640242\nn02640626\nn02640857\nn02641379\nn02642107\nn02642644\nn02643112\nn02643316\nn02643566\nn02643836\nn02644113\nn02644360\nn02644501\nn02644665\nn02644817\nn02645538\nn02645691\nn02645953\nn02646667\nn02646892\nn02648035\nn02648625\nn02648916\nn02649218\nn02649546\nn02650050\nn02650413\nn02650541\nn02651060\nn02652132\nn02652668\nn02653145\nn02653497\nn02653786\nn02654112\nn02654425\nn02654745\nn02655020\nn02655523\nn02655848\nn02656032\nn02656301\nn02656670\nn02656969\nn02657368\nn02657694\nn02658079\nn02658531\nn02658811\nn02659176\nn02659478\nn02659808\nn02660091\nn02660208\nn02660519\nn02660640\nn02661017\nn02661473\nn02661618\nn02662239\nn02662397\nn02662559\nn02662825\nn02662993\nn02663211\nn02663485\nn02663849\nn02664285\nn02664642\nn02665250\nn02665985\nn02666196\nn02666501\nn02666624\nn02666943\nn02667093\nn02667244\nn02667379\nn02667478\nn02667576\nn02667693\nn02668393\nn02668613\nn02669295\nn02669442\nn02669534\nn02669723\nn02670186\nn02670382\nn02670683\nn02670935\nn02671780\nn02672152\nn02672371\nn02672831\nn02675077\nn02675219\nn02675522\nn02676097\nn02676261\nn02676566\nn02676670\nn02676938\nn02677028\nn02677136\nn02677436\nn02677718\nn02678010\nn02678384\nn02678897\nn02679142\nn02679257\nn02679961\nn02680110\nn02680512\nn02680638\nn02680754\nn02681392\nn02682311\nn02682407\nn02682569\nn02682811\nn02682922\nn02683183\nn02683323\nn02683454\nn02683558\nn02683791\nn02684248\nn02684356\nn02684515\nn02684649\nn02684962\nn02685082\nn02685253\nn02685365\nn02685701\nn02685995\nn02686121\nn02686227\nn02686379\nn02686568\nn02687172\nn02687423\nn02687682\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02689819\nn02690373\nn02690715\nn02691156\nn02692086\nn02692232\nn02692513\nn02692680\nn02692877\nn02693246\nn02693413\nn02693540\nn02694045\nn02694279\nn02694426\nn02694662\nn02694966\nn02695627\nn02695762\nn02696165\nn02696246\nn02696569\nn02696843\nn02697022\nn02697221\nn02697576\nn02697675\nn02697876\nn02698244\nn02698473\nn02698634\nn02699494\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701002\nn02701260\nn02701730\nn02702989\nn02703124\nn02703275\nn02704645\nn02704792\nn02704949\nn02705201\nn02705429\nn02705944\nn02706221\nn02706806\nn02708093\nn02708224\nn02708433\nn02708555\nn02708711\nn02708885\nn02709101\nn02709367\nn02709637\nn02709763\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02711237\nn02711780\nn02712545\nn02712643\nn02713003\nn02713218\nn02713364\nn02713496\nn02714315\nn02714535\nn02714751\nn02715229\nn02715513\nn02715712\nn02716626\nn02720048\nn02720576\nn02721813\nn02723165\nn02724722\nn02725872\nn02726017\nn02726210\nn02726305\nn02726681\nn02727016\nn02727141\nn02727426\nn02727825\nn02728440\nn02729222\nn02729837\nn02729965\nn02730265\nn02730568\nn02730930\nn02731251\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02734835\nn02735268\nn02735361\nn02735538\nn02735688\nn02736396\nn02736798\nn02737351\nn02737660\nn02738031\nn02738271\nn02738449\nn02738535\nn02738741\nn02738859\nn02738978\nn02739123\nn02739427\nn02739550\nn02739668\nn02739889\nn02740061\nn02740300\nn02740533\nn02740764\nn02741367\nn02741475\nn02742070\nn02742194\nn02742322\nn02742468\nn02742753\nn02743426\nn02744323\nn02744844\nn02744961\nn02745492\nn02745611\nn02745816\nn02746008\nn02746225\nn02746365\nn02746595\nn02746683\nn02746978\nn02747063\nn02747177\nn02747672\nn02747802\nn02748183\nn02748359\nn02748491\nn02749169\nn02749292\nn02749479\nn02749670\nn02749790\nn02749953\nn02750070\nn02750169\nn02750320\nn02750652\nn02751067\nn02751215\nn02751295\nn02751490\nn02752199\nn02752496\nn02752615\nn02752810\nn02752917\nn02753044\nn02753394\nn02753710\nn02754103\nn02754656\nn02755140\nn02755352\nn02755529\nn02755675\nn02755823\nn02755984\nn02756098\nn02756854\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02757927\nn02758134\nn02758490\nn02758863\nn02758960\nn02759257\nn02759387\nn02759700\nn02759963\nn02760099\nn02760199\nn02760298\nn02760429\nn02760658\nn02760855\nn02761034\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762169\nn02762371\nn02762508\nn02762725\nn02762909\nn02763083\nn02763198\nn02763306\nn02763604\nn02763714\nn02763901\nn02764044\nn02764398\nn02764505\nn02764614\nn02764779\nn02764935\nn02765028\nn02766168\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768433\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769748\nn02769963\nn02770078\nn02770211\nn02770585\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771547\nn02771750\nn02772101\nn02772435\nn02772554\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775689\nn02775813\nn02775897\nn02776007\nn02776205\nn02776505\nn02776631\nn02776825\nn02776978\nn02777100\nn02777292\nn02777402\nn02777638\nn02777734\nn02777927\nn02778131\nn02778294\nn02778456\nn02778588\nn02778669\nn02779435\nn02779609\nn02779719\nn02779971\nn02780315\nn02780445\nn02780588\nn02780704\nn02780815\nn02781121\nn02781213\nn02781338\nn02781517\nn02781764\nn02782093\nn02782432\nn02782602\nn02782681\nn02782778\nn02783035\nn02783161\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02784998\nn02785648\nn02786058\nn02786198\nn02786331\nn02786463\nn02786611\nn02786736\nn02786837\nn02787120\nn02787269\nn02787435\nn02787622\nn02788021\nn02788148\nn02788386\nn02788462\nn02788572\nn02788689\nn02789487\nn02790669\nn02790823\nn02790996\nn02791124\nn02791270\nn02791532\nn02791665\nn02791795\nn02792409\nn02792552\nn02792948\nn02793089\nn02793199\nn02793296\nn02793414\nn02793495\nn02793684\nn02793842\nn02793930\nn02794008\nn02794156\nn02794368\nn02794474\nn02794664\nn02794779\nn02794972\nn02795169\nn02795528\nn02795670\nn02795783\nn02795978\nn02796207\nn02796318\nn02796412\nn02796623\nn02796995\nn02797295\nn02797535\nn02797692\nn02797881\nn02799071\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02800940\nn02801047\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802426\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803809\nn02803934\nn02804123\nn02804252\nn02804414\nn02804515\nn02804610\nn02805283\nn02805845\nn02805983\nn02806088\nn02806379\nn02806530\nn02806762\nn02806875\nn02806992\nn02807133\nn02807523\nn02807616\nn02807731\nn02808185\nn02808304\nn02808440\nn02808829\nn02808968\nn02809105\nn02809241\nn02809364\nn02809491\nn02809605\nn02809736\nn02810139\nn02810270\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812342\nn02812631\nn02812785\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02813981\nn02814116\nn02814338\nn02814428\nn02814533\nn02814774\nn02814860\nn02815478\nn02815749\nn02815834\nn02815950\nn02816494\nn02816656\nn02816768\nn02817031\nn02817251\nn02817386\nn02817516\nn02817650\nn02817799\nn02818135\nn02818254\nn02818687\nn02818832\nn02819697\nn02820085\nn02820210\nn02820556\nn02820675\nn02821202\nn02821415\nn02821543\nn02821627\nn02821943\nn02822064\nn02822220\nn02822399\nn02822579\nn02822762\nn02822865\nn02823124\nn02823335\nn02823428\nn02823510\nn02823586\nn02823750\nn02823848\nn02823964\nn02824058\nn02824152\nn02824319\nn02824448\nn02825153\nn02825240\nn02825442\nn02825657\nn02825872\nn02825961\nn02826068\nn02826259\nn02826459\nn02826589\nn02826683\nn02826812\nn02826886\nn02827148\nn02827606\nn02828115\nn02828299\nn02828427\nn02828884\nn02829246\nn02829353\nn02829510\nn02829596\nn02830157\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02831998\nn02833040\nn02833140\nn02833275\nn02833403\nn02833793\nn02834027\nn02834397\nn02834506\nn02834642\nn02834778\nn02835271\nn02835412\nn02835551\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836268\nn02836392\nn02836513\nn02836607\nn02836900\nn02837134\nn02837567\nn02837789\nn02837887\nn02838014\nn02838178\nn02838345\nn02838577\nn02838728\nn02838958\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840245\nn02840515\nn02840619\nn02841063\nn02841187\nn02841315\nn02841506\nn02841641\nn02841847\nn02842133\nn02842573\nn02842809\nn02843029\nn02843158\nn02843276\nn02843465\nn02843553\nn02843684\nn02843777\nn02843909\nn02844056\nn02844214\nn02844307\nn02844714\nn02845130\nn02845293\nn02845985\nn02846141\nn02846260\nn02846511\nn02846619\nn02846733\nn02846874\nn02847461\nn02847631\nn02847852\nn02848118\nn02848216\nn02848523\nn02848806\nn02848921\nn02849154\nn02849885\nn02850060\nn02850358\nn02850732\nn02850950\nn02851099\nn02851795\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02853218\nn02853336\nn02853745\nn02853870\nn02854378\nn02854532\nn02854630\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855793\nn02855925\nn02856013\nn02856237\nn02856362\nn02857365\nn02857477\nn02857644\nn02857907\nn02858304\nn02859184\nn02859343\nn02859443\nn02859557\nn02859729\nn02859955\nn02860415\nn02860640\nn02860847\nn02861022\nn02861147\nn02861286\nn02861387\nn02861509\nn02861658\nn02861777\nn02861886\nn02862048\nn02862916\nn02863014\nn02863176\nn02863340\nn02863426\nn02863536\nn02863638\nn02863750\nn02864122\nn02864504\nn02864593\nn02864987\nn02865351\nn02865665\nn02865931\nn02866106\nn02866386\nn02866578\nn02867401\nn02867592\nn02867715\nn02867966\nn02868240\nn02868429\nn02868546\nn02868638\nn02868975\nn02869155\nn02869249\nn02869563\nn02869737\nn02869837\nn02870526\nn02870676\nn02870772\nn02870880\nn02871005\nn02871147\nn02871314\nn02871439\nn02871525\nn02871631\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873623\nn02873733\nn02873839\nn02874086\nn02874214\nn02874336\nn02874442\nn02874537\nn02874642\nn02874750\nn02875436\nn02875626\nn02875948\nn02876084\nn02876326\nn02876457\nn02876657\nn02877266\nn02877513\nn02877642\nn02877765\nn02877962\nn02878107\nn02878222\nn02878425\nn02878534\nn02878628\nn02878796\nn02879087\nn02879309\nn02879422\nn02879517\nn02879718\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881546\nn02881757\nn02881906\nn02882190\nn02882301\nn02882483\nn02882647\nn02882894\nn02883004\nn02883101\nn02883205\nn02883344\nn02884225\nn02884450\nn02884859\nn02884994\nn02885108\nn02885233\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02886599\nn02887079\nn02887209\nn02887489\nn02887832\nn02887970\nn02888270\nn02888429\nn02888569\nn02888898\nn02889425\nn02889646\nn02889856\nn02889996\nn02890188\nn02890351\nn02890513\nn02890662\nn02890804\nn02890940\nn02891188\nn02891788\nn02892201\nn02892304\nn02892392\nn02892499\nn02892626\nn02892767\nn02892948\nn02893269\nn02893418\nn02893608\nn02893692\nn02893941\nn02894024\nn02894158\nn02894337\nn02894605\nn02894847\nn02895008\nn02895154\nn02895328\nn02895438\nn02896074\nn02896294\nn02896442\nn02896694\nn02896856\nn02896949\nn02897097\nn02897389\nn02897820\nn02898093\nn02898173\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900459\nn02900594\nn02900705\nn02900857\nn02900987\nn02901114\nn02901259\nn02901377\nn02901481\nn02901620\nn02901793\nn02901901\nn02902079\nn02902687\nn02902816\nn02902916\nn02903006\nn02903126\nn02903204\nn02903727\nn02903852\nn02904109\nn02904233\nn02904505\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02905886\nn02906734\nn02906963\nn02907082\nn02907296\nn02907391\nn02907656\nn02907873\nn02908123\nn02908217\nn02908773\nn02908951\nn02909053\nn02909165\nn02909285\nn02909706\nn02909870\nn02910145\nn02910241\nn02910353\nn02910542\nn02910701\nn02910864\nn02910964\nn02911332\nn02911485\nn02912065\nn02912319\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916065\nn02916179\nn02916350\nn02916936\nn02917067\nn02917377\nn02917521\nn02917607\nn02917742\nn02917964\nn02918112\nn02918330\nn02918455\nn02918595\nn02918831\nn02918964\nn02919148\nn02919308\nn02919414\nn02919648\nn02919792\nn02919890\nn02919976\nn02920083\nn02920164\nn02920259\nn02920369\nn02920503\nn02920658\nn02921029\nn02921195\nn02921292\nn02921406\nn02921592\nn02921756\nn02921884\nn02922159\nn02922292\nn02922461\nn02922578\nn02922798\nn02922877\nn02923129\nn02923535\nn02923682\nn02923915\nn02924116\nn02925009\nn02925107\nn02925385\nn02925519\nn02925666\nn02926426\nn02926591\nn02927053\nn02927161\nn02927764\nn02927887\nn02928049\nn02928299\nn02928413\nn02928608\nn02929184\nn02929289\nn02929462\nn02929582\nn02929923\nn02930080\nn02930214\nn02930339\nn02930645\nn02930766\nn02931013\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02933750\nn02933990\nn02934168\nn02934451\nn02935017\nn02935387\nn02935490\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02936921\nn02937010\nn02937336\nn02937958\nn02938218\nn02938321\nn02938886\nn02939185\nn02939763\nn02939866\nn02940289\nn02940385\nn02940570\nn02940706\nn02941095\nn02941228\nn02941845\nn02942015\nn02942147\nn02942349\nn02942460\nn02942699\nn02943241\nn02943465\nn02943686\nn02943871\nn02943964\nn02944075\nn02944146\nn02944256\nn02944459\nn02944579\nn02944826\nn02945161\nn02945813\nn02945964\nn02946127\nn02946270\nn02946348\nn02946509\nn02946753\nn02946824\nn02946921\nn02947212\nn02947660\nn02947818\nn02947977\nn02948072\nn02948293\nn02948403\nn02948557\nn02948834\nn02948942\nn02949084\nn02949202\nn02949356\nn02949542\nn02950018\nn02950120\nn02950186\nn02950256\nn02950482\nn02950632\nn02950826\nn02950943\nn02951358\nn02951585\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02952798\nn02952935\nn02953056\nn02953197\nn02953455\nn02953552\nn02953673\nn02953850\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02955767\nn02956393\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957252\nn02957427\nn02957755\nn02957862\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02961947\nn02962061\nn02962200\nn02962414\nn02962843\nn02962938\nn02963159\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964075\nn02964196\nn02964295\nn02964634\nn02964843\nn02964934\nn02965024\nn02965122\nn02965216\nn02965300\nn02965529\nn02965783\nn02966068\nn02966193\nn02966545\nn02966687\nn02966786\nn02966942\nn02967081\nn02967170\nn02967294\nn02967407\nn02967540\nn02967626\nn02967782\nn02967991\nn02968074\nn02968210\nn02968333\nn02968473\nn02969010\nn02969163\nn02969323\nn02969527\nn02969634\nn02969886\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971356\nn02971473\nn02971579\nn02971691\nn02971940\nn02972397\nn02972714\nn02972934\nn02973017\nn02973236\nn02973805\nn02973904\nn02974003\nn02974348\nn02974454\nn02974565\nn02974697\nn02975212\nn02975589\nn02975994\nn02976123\nn02976249\nn02976350\nn02976455\nn02976552\nn02976641\nn02976815\nn02976939\nn02977058\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978205\nn02978367\nn02978478\nn02978753\nn02978881\nn02979074\nn02979186\nn02979290\nn02979399\nn02979516\nn02979836\nn02980036\nn02980203\nn02980441\nn02980625\nn02981024\nn02981198\nn02981321\nn02981565\nn02981792\nn02981911\nn02982232\nn02982416\nn02982515\nn02982599\nn02983072\nn02983189\nn02983357\nn02983507\nn02983904\nn02984061\nn02984203\nn02984469\nn02984699\nn02985137\nn02985606\nn02985828\nn02985963\nn02986066\nn02986160\nn02986348\nn02987047\nn02987379\nn02987492\nn02987706\nn02987823\nn02987950\nn02988066\nn02988156\nn02988304\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02990758\nn02991048\nn02991302\nn02991847\nn02992032\nn02992211\nn02992368\nn02992529\nn02992795\nn02993194\nn02993368\nn02993546\nn02994573\nn02994743\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998107\nn02998563\nn02998696\nn02998841\nn02999138\nn02999410\nn02999936\nn03000134\nn03000247\nn03000530\nn03000684\nn03001115\nn03001282\nn03001540\nn03001627\nn03002096\nn03002210\nn03002341\nn03002555\nn03002711\nn03002816\nn03002948\nn03003091\nn03003633\nn03004275\nn03004409\nn03004531\nn03004620\nn03004713\nn03004824\nn03005033\nn03005147\nn03005285\nn03005515\nn03005619\nn03006626\nn03006788\nn03006903\nn03007130\nn03007297\nn03007444\nn03007591\nn03008177\nn03008817\nn03008976\nn03009111\nn03009269\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012159\nn03012373\nn03012499\nn03012644\nn03012734\nn03012897\nn03013006\nn03013438\nn03013580\nn03013850\nn03014440\nn03014705\nn03015149\nn03015254\nn03015478\nn03015631\nn03015851\nn03016209\nn03016389\nn03016609\nn03016737\nn03016868\nn03016953\nn03017070\nn03017168\nn03017698\nn03017835\nn03018209\nn03018349\nn03018614\nn03018712\nn03018848\nn03019198\nn03019304\nn03019434\nn03019685\nn03019806\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03024233\nn03024333\nn03024518\nn03025070\nn03025165\nn03025250\nn03025886\nn03026506\nn03026907\nn03027001\nn03027108\nn03027250\nn03027505\nn03027625\nn03028079\nn03028596\nn03028785\nn03029066\nn03029197\nn03029296\nn03029445\nn03029925\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03031756\nn03032252\nn03032453\nn03032811\nn03033267\nn03033362\nn03033986\nn03034244\nn03034405\nn03034516\nn03034663\nn03035252\nn03035510\nn03035715\nn03035832\nn03036022\nn03036149\nn03036244\nn03036341\nn03036469\nn03036701\nn03036866\nn03037108\nn03037228\nn03037404\nn03037590\nn03037709\nn03038041\nn03038281\nn03038480\nn03038685\nn03038870\nn03039015\nn03039259\nn03039353\nn03039493\nn03039827\nn03039947\nn03040229\nn03040376\nn03040836\nn03041114\nn03041265\nn03041449\nn03041632\nn03041810\nn03042139\nn03042384\nn03042490\nn03042697\nn03042829\nn03042984\nn03043173\nn03043274\nn03043423\nn03043693\nn03043798\nn03043958\nn03044671\nn03044801\nn03044934\nn03045074\nn03045228\nn03045337\nn03045698\nn03045800\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047171\nn03047690\nn03047799\nn03047941\nn03048883\nn03049066\nn03049326\nn03049457\nn03049782\nn03049924\nn03050026\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03052464\nn03052917\nn03053047\nn03053976\nn03054491\nn03054605\nn03054901\nn03055159\nn03055418\nn03055670\nn03055857\nn03056097\nn03056215\nn03056288\nn03056493\nn03056583\nn03056873\nn03057021\nn03057541\nn03057636\nn03057724\nn03057841\nn03057920\nn03058107\nn03058603\nn03058949\nn03059103\nn03059236\nn03059366\nn03059685\nn03059934\nn03060728\nn03061050\nn03061211\nn03061345\nn03061505\nn03061674\nn03061819\nn03061893\nn03062015\nn03062122\nn03062245\nn03062336\nn03062651\nn03062798\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063599\nn03063689\nn03063834\nn03063968\nn03064250\nn03064350\nn03064562\nn03064758\nn03064935\nn03065243\nn03065424\nn03065708\nn03066232\nn03066359\nn03066464\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03070396\nn03070587\nn03070854\nn03071021\nn03071160\nn03071288\nn03071552\nn03072056\nn03072201\nn03072440\nn03072682\nn03073296\nn03073384\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075248\nn03075370\nn03075500\nn03075634\nn03075768\nn03075946\nn03076411\nn03076623\nn03076708\nn03077442\nn03077616\nn03077741\nn03078287\nn03078506\nn03078670\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03079616\nn03079741\nn03080309\nn03080497\nn03080633\nn03080731\nn03080904\nn03081859\nn03081986\nn03082127\nn03082280\nn03082450\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085013\nn03085219\nn03085333\nn03085602\nn03085781\nn03085915\nn03086183\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087521\nn03087643\nn03087816\nn03088389\nn03088580\nn03088707\nn03089477\nn03089624\nn03089753\nn03089879\nn03090000\nn03090172\nn03090437\nn03090710\nn03090856\nn03091044\nn03091223\nn03091374\nn03091907\nn03092053\nn03092166\nn03092314\nn03092476\nn03092656\nn03092883\nn03093427\nn03093792\nn03094159\nn03094503\nn03095699\nn03095965\nn03096439\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098515\nn03098688\nn03098806\nn03098959\nn03099147\nn03099274\nn03099454\nn03099622\nn03099771\nn03099945\nn03100240\nn03100346\nn03100490\nn03100897\nn03101156\nn03101302\nn03101375\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102516\nn03102654\nn03102859\nn03103128\nn03103396\nn03103563\nn03103904\nn03104019\nn03104512\nn03105088\nn03105214\nn03105306\nn03105467\nn03105645\nn03105810\nn03105974\nn03106722\nn03106898\nn03107046\nn03107488\nn03107716\nn03108455\nn03108624\nn03108759\nn03108853\nn03109033\nn03109150\nn03109253\nn03109693\nn03109881\nn03110202\nn03110669\nn03111041\nn03111177\nn03111296\nn03111690\nn03112240\nn03112719\nn03112869\nn03113152\nn03113505\nn03113657\nn03113835\nn03114041\nn03114236\nn03114379\nn03114504\nn03114743\nn03114839\nn03115014\nn03115180\nn03115400\nn03115663\nn03115762\nn03115897\nn03116008\nn03116163\nn03116530\nn03116767\nn03117199\nn03117642\nn03118346\nn03118969\nn03119203\nn03119396\nn03119510\nn03120198\nn03120491\nn03120778\nn03121040\nn03121190\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03122748\nn03123553\nn03123666\nn03123809\nn03123917\nn03124043\nn03124170\nn03124313\nn03124474\nn03124590\nn03125057\nn03125588\nn03125729\nn03125870\nn03126090\nn03126385\nn03126580\nn03126707\nn03126927\nn03127024\nn03127203\nn03127408\nn03127531\nn03127747\nn03127925\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129636\nn03129753\nn03129848\nn03130066\nn03130233\nn03130563\nn03130761\nn03130866\nn03131193\nn03131574\nn03131669\nn03131967\nn03132076\nn03132261\nn03132438\nn03132666\nn03132776\nn03133050\nn03133415\nn03133878\nn03134118\nn03134232\nn03134394\nn03134739\nn03134853\nn03135030\nn03135532\nn03135656\nn03135788\nn03135917\nn03136051\nn03136254\nn03136369\nn03136504\nn03137473\nn03137579\nn03138128\nn03138217\nn03138344\nn03138669\nn03139089\nn03139464\nn03139640\nn03139998\nn03140126\nn03140292\nn03140431\nn03140546\nn03140652\nn03140771\nn03140900\nn03141065\nn03141327\nn03141455\nn03141612\nn03141702\nn03141823\nn03142099\nn03142205\nn03142325\nn03142431\nn03142679\nn03143400\nn03143572\nn03143754\nn03144156\nn03144873\nn03144982\nn03145147\nn03145277\nn03145384\nn03145522\nn03145719\nn03145843\nn03146219\nn03146342\nn03146449\nn03146560\nn03146687\nn03146777\nn03146846\nn03147084\nn03147156\nn03147280\nn03147509\nn03148324\nn03148518\nn03148727\nn03148808\nn03149135\nn03149401\nn03149686\nn03149810\nn03150232\nn03150511\nn03150661\nn03150795\nn03151077\nn03152303\nn03152951\nn03153246\nn03153585\nn03153948\nn03154073\nn03154316\nn03154446\nn03154616\nn03154745\nn03154895\nn03155178\nn03155502\nn03155915\nn03156071\nn03156279\nn03156405\nn03156767\nn03157348\nn03158186\nn03158414\nn03158668\nn03158796\nn03158885\nn03159535\nn03159640\nn03160001\nn03160186\nn03160309\nn03160740\nn03161016\nn03161450\nn03161893\nn03162297\nn03162460\nn03162556\nn03162714\nn03162818\nn03163222\nn03163381\nn03163488\nn03163798\nn03163973\nn03164192\nn03164344\nn03164605\nn03164722\nn03164929\nn03165096\nn03165211\nn03165466\nn03165616\nn03165823\nn03165955\nn03166120\nn03166514\nn03166600\nn03166685\nn03166809\nn03166951\nn03167153\nn03167978\nn03168107\nn03168217\nn03168543\nn03168663\nn03168774\nn03168933\nn03169063\nn03169176\nn03170292\nn03170459\nn03170635\nn03170872\nn03171228\nn03171356\nn03171635\nn03171910\nn03172038\nn03172738\nn03172965\nn03173270\nn03173387\nn03173929\nn03174079\nn03174450\nn03174731\nn03175081\nn03175189\nn03175301\nn03175457\nn03175604\nn03175843\nn03175983\nn03176238\nn03176386\nn03176594\nn03176763\nn03177059\nn03177165\nn03177708\nn03178000\nn03178173\nn03178430\nn03178538\nn03178674\nn03179701\nn03179910\nn03180011\nn03180384\nn03180504\nn03180732\nn03180865\nn03180969\nn03181293\nn03181667\nn03182140\nn03182232\nn03182912\nn03183080\nn03185868\nn03186199\nn03186285\nn03186818\nn03187037\nn03187153\nn03187268\nn03187595\nn03187751\nn03188290\nn03188531\nn03188725\nn03188871\nn03189083\nn03189311\nn03189818\nn03190458\nn03191286\nn03191451\nn03191561\nn03191776\nn03192543\nn03192907\nn03193107\nn03193260\nn03193423\nn03193597\nn03193754\nn03194170\nn03194297\nn03194812\nn03194992\nn03195332\nn03195485\nn03195799\nn03195959\nn03196062\nn03196217\nn03196324\nn03196598\nn03196990\nn03197201\nn03197337\nn03197446\nn03198223\nn03198500\nn03199358\nn03199488\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201208\nn03201529\nn03201638\nn03201776\nn03201895\nn03201996\nn03202354\nn03202481\nn03202760\nn03202940\nn03203089\nn03203806\nn03204134\nn03204306\nn03204436\nn03204558\nn03204955\nn03205143\nn03205304\nn03205458\nn03205574\nn03205669\nn03205903\nn03206023\nn03206158\nn03206282\nn03206405\nn03206602\nn03206718\nn03206908\nn03207305\nn03207548\nn03207630\nn03207743\nn03207835\nn03207941\nn03208556\nn03208938\nn03209359\nn03209477\nn03209666\nn03209910\nn03210245\nn03210372\nn03210552\nn03210683\nn03211117\nn03211413\nn03211616\nn03211789\nn03212114\nn03212247\nn03212406\nn03212811\nn03213014\nn03213361\nn03213538\nn03213715\nn03213826\nn03214253\nn03214450\nn03214582\nn03214966\nn03215076\nn03215191\nn03215337\nn03215508\nn03215749\nn03215930\nn03216199\nn03216402\nn03216562\nn03216710\nn03216828\nn03217653\nn03217739\nn03217889\nn03218198\nn03218446\nn03219010\nn03219135\nn03219483\nn03219612\nn03219859\nn03219966\nn03220095\nn03220237\nn03220513\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03222722\nn03222857\nn03223162\nn03223299\nn03223441\nn03223553\nn03223686\nn03223923\nn03224490\nn03224603\nn03224753\nn03224893\nn03225108\nn03225458\nn03225616\nn03225777\nn03225988\nn03226090\nn03226254\nn03226375\nn03226538\nn03226880\nn03227010\nn03227184\nn03227317\nn03227721\nn03227856\nn03228016\nn03228254\nn03228365\nn03228533\nn03228692\nn03228796\nn03228967\nn03229115\nn03229244\nn03229526\nn03231160\nn03231368\nn03231819\nn03232309\nn03232417\nn03232543\nn03232815\nn03232923\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03235979\nn03236093\nn03236217\nn03236423\nn03236580\nn03236735\nn03237212\nn03237340\nn03237416\nn03237639\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03238762\nn03238879\nn03239054\nn03239259\nn03239607\nn03239726\nn03240140\nn03240683\nn03240892\nn03241093\nn03241335\nn03241496\nn03241903\nn03242120\nn03242264\nn03242390\nn03242506\nn03242995\nn03243218\nn03243625\nn03244047\nn03244231\nn03244388\nn03244775\nn03244919\nn03245271\nn03245421\nn03245724\nn03245889\nn03246197\nn03246312\nn03246454\nn03246653\nn03246933\nn03247083\nn03247351\nn03247495\nn03248835\nn03249342\nn03249569\nn03249956\nn03250089\nn03250279\nn03250405\nn03250588\nn03250847\nn03250952\nn03251100\nn03251280\nn03251533\nn03251766\nn03251932\nn03252231\nn03252324\nn03252422\nn03252637\nn03252787\nn03253071\nn03253187\nn03253279\nn03253714\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254625\nn03254737\nn03254862\nn03255030\nn03255167\nn03255322\nn03255488\nn03255899\nn03256032\nn03256166\nn03256472\nn03256631\nn03256788\nn03256928\nn03257065\nn03257210\nn03257586\nn03258192\nn03258330\nn03258456\nn03258577\nn03258905\nn03259009\nn03259280\nn03259401\nn03259505\nn03260206\nn03260504\nn03260733\nn03260849\nn03261019\nn03261263\nn03261395\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03263338\nn03263640\nn03263758\nn03264906\nn03265032\nn03265754\nn03266195\nn03266371\nn03266620\nn03266749\nn03267113\nn03267468\nn03267696\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269073\nn03269203\nn03269401\nn03270165\nn03270695\nn03270854\nn03271030\nn03271260\nn03271376\nn03271574\nn03271765\nn03271865\nn03272010\nn03272125\nn03272239\nn03272383\nn03272562\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03274561\nn03274796\nn03275125\nn03275311\nn03275566\nn03275681\nn03275864\nn03276179\nn03276696\nn03276839\nn03277004\nn03277149\nn03277459\nn03277602\nn03277771\nn03278248\nn03278914\nn03279153\nn03279364\nn03279508\nn03279804\nn03279918\nn03280216\nn03280394\nn03280644\nn03281145\nn03281524\nn03281673\nn03282060\nn03282295\nn03282401\nn03283221\nn03283413\nn03283827\nn03284308\nn03284482\nn03284743\nn03284886\nn03284981\nn03285578\nn03285730\nn03285912\nn03286572\nn03287351\nn03287733\nn03288003\nn03288500\nn03288643\nn03288742\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03290653\nn03291413\nn03291551\nn03291741\nn03291819\nn03291963\nn03292085\nn03292362\nn03292475\nn03292603\nn03292736\nn03292960\nn03293095\nn03293741\nn03293863\nn03294048\nn03294604\nn03294833\nn03295012\nn03295140\nn03295246\nn03295928\nn03296081\nn03296217\nn03296328\nn03296478\nn03296963\nn03297103\nn03297226\nn03297495\nn03297644\nn03297735\nn03298089\nn03298352\nn03298716\nn03298858\nn03299406\nn03300216\nn03300443\nn03301175\nn03301291\nn03301389\nn03301568\nn03301833\nn03301940\nn03302671\nn03302790\nn03302938\nn03303217\nn03303669\nn03303831\nn03304197\nn03304323\nn03304465\nn03305300\nn03305522\nn03305953\nn03306385\nn03306869\nn03307037\nn03307573\nn03307792\nn03308152\nn03308481\nn03308614\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314378\nn03314608\nn03314780\nn03314884\nn03315644\nn03315805\nn03315990\nn03316105\nn03316406\nn03316873\nn03317233\nn03317510\nn03317673\nn03317788\nn03317889\nn03318136\nn03318294\nn03318865\nn03318983\nn03319167\nn03319457\nn03319576\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320845\nn03320959\nn03321103\nn03321419\nn03321563\nn03321843\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03323211\nn03323319\nn03323703\nn03324629\nn03324814\nn03324928\nn03325088\nn03325288\nn03325403\nn03325584\nn03325691\nn03325941\nn03326073\nn03326371\nn03326475\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03327841\nn03328201\nn03329302\nn03329536\nn03329663\nn03330002\nn03330665\nn03330792\nn03330947\nn03331077\nn03331244\nn03331599\nn03332005\nn03332173\nn03332271\nn03332393\nn03332591\nn03332784\nn03332989\nn03333129\nn03333252\nn03333349\nn03333610\nn03333711\nn03333851\nn03334017\nn03334291\nn03334382\nn03334492\nn03334912\nn03335030\nn03335333\nn03335461\nn03335846\nn03336168\nn03336282\nn03336575\nn03336742\nn03336839\nn03337140\nn03337383\nn03337494\nn03337822\nn03338287\nn03338821\nn03339296\nn03339529\nn03339643\nn03340009\nn03340723\nn03340923\nn03341035\nn03341153\nn03341297\nn03341606\nn03342015\nn03342127\nn03342262\nn03342432\nn03342657\nn03342863\nn03342961\nn03343047\nn03343234\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344393\nn03344509\nn03344642\nn03344784\nn03344935\nn03345487\nn03345837\nn03346135\nn03346289\nn03346455\nn03347037\nn03347472\nn03347617\nn03348142\nn03348868\nn03349020\nn03349296\nn03349367\nn03349469\nn03349599\nn03349771\nn03349892\nn03350204\nn03350352\nn03350456\nn03350602\nn03351151\nn03351262\nn03351434\nn03351979\nn03352232\nn03352366\nn03352628\nn03352961\nn03353281\nn03353951\nn03354207\nn03354903\nn03355468\nn03355768\nn03355925\nn03356038\nn03356279\nn03356446\nn03356559\nn03356858\nn03356982\nn03357081\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03358841\nn03359137\nn03359285\nn03359436\nn03359566\nn03360133\nn03360300\nn03360431\nn03360622\nn03360731\nn03361109\nn03361297\nn03361380\nn03361550\nn03361683\nn03362639\nn03362771\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364156\nn03364599\nn03364937\nn03365231\nn03365374\nn03365592\nn03365991\nn03366464\nn03366721\nn03366823\nn03366974\nn03367059\nn03367321\nn03367410\nn03367545\nn03367875\nn03367969\nn03368048\nn03368352\nn03369276\nn03369407\nn03369512\nn03369866\nn03370387\nn03370646\nn03371875\nn03372029\nn03372549\nn03372822\nn03372933\nn03373237\nn03373611\nn03373943\nn03374102\nn03374282\nn03374372\nn03374473\nn03374570\nn03374649\nn03374838\nn03375171\nn03375329\nn03375575\nn03376159\nn03376279\nn03376595\nn03376771\nn03376938\nn03378005\nn03378174\nn03378342\nn03378442\nn03378593\nn03378765\nn03379051\nn03379204\nn03379343\nn03379719\nn03379828\nn03379989\nn03380301\nn03380647\nn03380724\nn03380867\nn03381126\nn03381231\nn03381450\nn03381565\nn03381776\nn03382104\nn03382292\nn03382413\nn03382533\nn03382708\nn03382856\nn03382969\nn03383099\nn03383211\nn03383378\nn03383468\nn03383562\nn03383821\nn03384167\nn03384352\nn03384891\nn03385295\nn03385557\nn03386011\nn03386343\nn03386544\nn03386726\nn03386870\nn03387323\nn03387653\nn03388043\nn03388183\nn03388323\nn03388549\nn03388711\nn03388990\nn03389611\nn03389761\nn03389889\nn03389983\nn03390075\nn03390327\nn03390673\nn03390786\nn03390983\nn03391301\nn03391613\nn03391770\nn03392648\nn03392741\nn03393017\nn03393199\nn03393324\nn03393761\nn03393912\nn03394149\nn03394272\nn03394480\nn03394649\nn03394916\nn03395256\nn03395401\nn03395514\nn03395859\nn03396074\nn03396580\nn03396654\nn03396997\nn03397087\nn03397266\nn03397412\nn03397532\nn03397947\nn03398153\nn03398228\nn03399579\nn03399677\nn03399761\nn03399971\nn03400231\nn03400972\nn03401129\nn03401279\nn03401721\nn03402188\nn03402369\nn03402511\nn03402785\nn03402941\nn03403643\nn03404012\nn03404149\nn03404251\nn03404360\nn03404449\nn03404900\nn03405111\nn03405265\nn03405595\nn03405725\nn03406759\nn03406966\nn03407369\nn03407865\nn03408054\nn03408264\nn03408340\nn03408444\nn03409297\nn03409393\nn03409591\nn03409920\nn03410022\nn03410147\nn03410303\nn03410423\nn03410571\nn03410740\nn03410938\nn03411079\nn03411208\nn03411339\nn03411927\nn03412058\nn03412220\nn03412387\nn03412511\nn03412906\nn03413124\nn03413264\nn03413428\nn03413684\nn03413828\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415626\nn03415749\nn03415868\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417042\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418749\nn03418915\nn03419014\nn03420345\nn03420801\nn03420935\nn03421117\nn03421324\nn03421485\nn03421669\nn03421768\nn03421960\nn03422072\nn03422484\nn03422589\nn03422771\nn03423099\nn03423224\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424204\nn03424325\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425413\nn03425595\nn03425769\nn03426134\nn03426285\nn03426462\nn03426574\nn03426871\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429771\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03430959\nn03431243\nn03431570\nn03431745\nn03432061\nn03432129\nn03432360\nn03432509\nn03433247\nn03433637\nn03433877\nn03434188\nn03434285\nn03434830\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436656\nn03436772\nn03436891\nn03436990\nn03437184\nn03437295\nn03437430\nn03437581\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438780\nn03438863\nn03439348\nn03439631\nn03439814\nn03440216\nn03440682\nn03440876\nn03441112\nn03441345\nn03441465\nn03441582\nn03442288\nn03442487\nn03442597\nn03442756\nn03443005\nn03443149\nn03443371\nn03443543\nn03443912\nn03444034\nn03445326\nn03445617\nn03445777\nn03445924\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447447\nn03447721\nn03447894\nn03448031\nn03448590\nn03448696\nn03448956\nn03449217\nn03449309\nn03449451\nn03449564\nn03449858\nn03450230\nn03450516\nn03450734\nn03450881\nn03450974\nn03451120\nn03451253\nn03451365\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03452741\nn03453231\nn03453320\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455355\nn03455488\nn03455642\nn03455802\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457451\nn03457686\nn03457902\nn03458271\nn03458422\nn03459328\nn03459591\nn03459775\nn03459914\nn03460040\nn03460147\nn03460297\nn03460455\nn03460899\nn03461288\nn03461385\nn03461651\nn03461882\nn03461988\nn03462110\nn03462315\nn03462747\nn03462972\nn03463185\nn03463381\nn03463666\nn03464053\nn03464467\nn03464628\nn03464952\nn03465040\nn03465151\nn03465320\nn03465426\nn03465500\nn03465605\nn03465718\nn03465818\nn03466162\nn03466493\nn03466600\nn03466839\nn03466947\nn03467068\nn03467254\nn03467380\nn03467517\nn03467796\nn03467887\nn03467984\nn03468570\nn03468696\nn03468821\nn03469031\nn03469175\nn03469493\nn03469832\nn03469903\nn03470005\nn03470222\nn03470387\nn03470629\nn03470948\nn03471030\nn03471190\nn03471347\nn03471779\nn03472232\nn03472535\nn03472672\nn03472796\nn03472937\nn03473078\nn03473227\nn03473465\nn03473817\nn03473966\nn03474167\nn03474352\nn03474779\nn03474896\nn03475581\nn03475674\nn03475823\nn03475961\nn03476083\nn03476313\nn03476542\nn03476684\nn03476991\nn03477143\nn03477303\nn03477410\nn03477512\nn03477773\nn03477902\nn03478589\nn03478756\nn03478907\nn03479121\nn03479266\nn03479397\nn03479502\nn03480579\nn03480719\nn03480973\nn03481172\nn03481521\nn03482001\nn03482128\nn03482252\nn03482405\nn03482523\nn03482877\nn03483086\nn03483230\nn03483316\nn03483531\nn03483637\nn03483823\nn03483971\nn03484083\nn03484487\nn03484576\nn03484809\nn03484931\nn03485198\nn03485309\nn03485407\nn03485575\nn03485794\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488111\nn03488188\nn03488438\nn03488603\nn03488784\nn03488887\nn03489048\nn03489162\nn03490006\nn03490119\nn03490324\nn03490449\nn03490649\nn03490784\nn03490884\nn03491032\nn03491724\nn03491988\nn03492087\nn03492250\nn03492542\nn03492922\nn03493219\nn03493792\nn03493911\nn03494278\nn03494537\nn03494706\nn03495039\nn03495258\nn03495570\nn03495671\nn03495941\nn03496183\nn03496296\nn03496486\nn03496612\nn03496892\nn03497100\nn03497352\nn03497657\nn03498441\nn03498536\nn03498662\nn03498781\nn03498866\nn03498962\nn03499354\nn03499468\nn03499907\nn03500090\nn03500209\nn03500295\nn03500389\nn03500457\nn03500557\nn03500699\nn03500838\nn03500971\nn03501152\nn03501288\nn03501520\nn03501614\nn03502200\nn03502331\nn03502509\nn03502777\nn03502897\nn03503097\nn03503233\nn03503358\nn03503477\nn03503567\nn03503718\nn03503997\nn03504205\nn03504293\nn03504723\nn03505015\nn03505133\nn03505383\nn03505504\nn03505667\nn03505764\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507658\nn03507963\nn03508101\nn03508485\nn03508881\nn03509394\nn03509608\nn03509843\nn03510072\nn03510244\nn03510384\nn03510487\nn03510583\nn03510866\nn03510987\nn03511175\nn03511333\nn03512030\nn03512147\nn03512452\nn03512624\nn03512911\nn03513137\nn03513376\nn03514129\nn03514340\nn03514451\nn03514693\nn03514894\nn03515338\nn03515934\nn03516266\nn03516367\nn03516647\nn03516844\nn03516996\nn03517509\nn03517647\nn03517760\nn03517899\nn03517982\nn03518135\nn03518230\nn03518305\nn03518445\nn03518631\nn03518829\nn03518943\nn03519081\nn03519226\nn03519387\nn03519674\nn03519848\nn03520493\nn03521076\nn03521431\nn03521544\nn03521675\nn03521771\nn03521899\nn03522003\nn03522100\nn03522634\nn03522863\nn03522990\nn03523134\nn03523398\nn03523506\nn03523987\nn03524150\nn03524287\nn03524425\nn03524574\nn03524745\nn03524976\nn03525074\nn03525252\nn03525454\nn03525693\nn03525827\nn03526062\nn03527149\nn03527444\nn03527565\nn03527675\nn03528100\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03529860\nn03530189\nn03530511\nn03530642\nn03530910\nn03531281\nn03531447\nn03531546\nn03531691\nn03531982\nn03532342\nn03532672\nn03532919\nn03533014\nn03533392\nn03533486\nn03533654\nn03533845\nn03534580\nn03534695\nn03534776\nn03535024\nn03535284\nn03535647\nn03535780\nn03536122\nn03536568\nn03536761\nn03537085\nn03537241\nn03537412\nn03537550\nn03538037\nn03538179\nn03538300\nn03538406\nn03538542\nn03538634\nn03538817\nn03538957\nn03539103\nn03539293\nn03539433\nn03539546\nn03539678\nn03539754\nn03540090\nn03540267\nn03540476\nn03540595\nn03540914\nn03541091\nn03541269\nn03541393\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542727\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543511\nn03543603\nn03543735\nn03543945\nn03544143\nn03544238\nn03544360\nn03545150\nn03545470\nn03545585\nn03545756\nn03545961\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03547397\nn03547530\nn03547861\nn03548086\nn03548195\nn03548320\nn03548402\nn03548533\nn03548626\nn03548930\nn03549199\nn03549350\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03550420\nn03551084\nn03551395\nn03551582\nn03551790\nn03552001\nn03552449\nn03552749\nn03553019\nn03553248\nn03553486\nn03554375\nn03554460\nn03554645\nn03555006\nn03555217\nn03555426\nn03555564\nn03555662\nn03555862\nn03555996\nn03556173\nn03556679\nn03556811\nn03556992\nn03557270\nn03557360\nn03557590\nn03557692\nn03557840\nn03558007\nn03558176\nn03558404\nn03558633\nn03558739\nn03559373\nn03559531\nn03559999\nn03560430\nn03560860\nn03561047\nn03561169\nn03561573\nn03562565\nn03563200\nn03563460\nn03563710\nn03563967\nn03564849\nn03565288\nn03565565\nn03565710\nn03565830\nn03565991\nn03566193\nn03566329\nn03566555\nn03566730\nn03566860\nn03567066\nn03567635\nn03567788\nn03567912\nn03568117\nn03568818\nn03569014\nn03569174\nn03569293\nn03569494\nn03571280\nn03571439\nn03571625\nn03571853\nn03571942\nn03572107\nn03572205\nn03572321\nn03572631\nn03573574\nn03573848\nn03574243\nn03574416\nn03574555\nn03574816\nn03575958\nn03576215\nn03576443\nn03576955\nn03577090\nn03577312\nn03577474\nn03577672\nn03577818\nn03578055\nn03578251\nn03578656\nn03578981\nn03579538\nn03579982\nn03580518\nn03580615\nn03580845\nn03580990\nn03581125\nn03581531\nn03581897\nn03582508\nn03582959\nn03583419\nn03583621\nn03584254\nn03584400\nn03584829\nn03585073\nn03585337\nn03585438\nn03585551\nn03585682\nn03585778\nn03585875\nn03586219\nn03586631\nn03586911\nn03587205\nn03588216\nn03588841\nn03588951\nn03589313\nn03589513\nn03589672\nn03589791\nn03590306\nn03590475\nn03590588\nn03590841\nn03590932\nn03591116\nn03591313\nn03591592\nn03591798\nn03591901\nn03592245\nn03592669\nn03592773\nn03592931\nn03593122\nn03593222\nn03593526\nn03593862\nn03594010\nn03594148\nn03594277\nn03594523\nn03594734\nn03594945\nn03595055\nn03595264\nn03595409\nn03595523\nn03595614\nn03595860\nn03596099\nn03596285\nn03596543\nn03597147\nn03597317\nn03597916\nn03598151\nn03598299\nn03598385\nn03598515\nn03598646\nn03598783\nn03598930\nn03599486\nn03599964\nn03600285\nn03600475\nn03600722\nn03600977\nn03601442\nn03601638\nn03601840\nn03602081\nn03602194\nn03602365\nn03602686\nn03602790\nn03602883\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604536\nn03604629\nn03604763\nn03604843\nn03605417\nn03605504\nn03605598\nn03605722\nn03605915\nn03606106\nn03606251\nn03606347\nn03606465\nn03607029\nn03607186\nn03607527\nn03607659\nn03607923\nn03608504\nn03609147\nn03609235\nn03609397\nn03609542\nn03609786\nn03609959\nn03610098\nn03610418\nn03610524\nn03610682\nn03610836\nn03610992\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614383\nn03614532\nn03614782\nn03614887\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616091\nn03616225\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617480\nn03617594\nn03617834\nn03618101\nn03618339\nn03618546\nn03618678\nn03618797\nn03618982\nn03619050\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620353\nn03620967\nn03621049\nn03621377\nn03621694\nn03622058\nn03622401\nn03622526\nn03622839\nn03622931\nn03623198\nn03623338\nn03623556\nn03624134\nn03624400\nn03624767\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626272\nn03626418\nn03626502\nn03626760\nn03627232\nn03627954\nn03628071\nn03628215\nn03628421\nn03628511\nn03628728\nn03628831\nn03628984\nn03629100\nn03629231\nn03629520\nn03629643\nn03630262\nn03630383\nn03631177\nn03631811\nn03631922\nn03632100\nn03632577\nn03632729\nn03632852\nn03632963\nn03633091\nn03633341\nn03633632\nn03633886\nn03634034\nn03634899\nn03635032\nn03635108\nn03635330\nn03635516\nn03635668\nn03635932\nn03636248\nn03636649\nn03637027\nn03637181\nn03637318\nn03637480\nn03637787\nn03637898\nn03638014\nn03638180\nn03638623\nn03638743\nn03638883\nn03639077\nn03639230\nn03639497\nn03639675\nn03639880\nn03640850\nn03640988\nn03641569\nn03641947\nn03642144\nn03642341\nn03642444\nn03642573\nn03642806\nn03643149\nn03643253\nn03643491\nn03643737\nn03643907\nn03644073\nn03644378\nn03644858\nn03645011\nn03645168\nn03645290\nn03645577\nn03646020\nn03646148\nn03646296\nn03646809\nn03646916\nn03647423\nn03647520\nn03648219\nn03648431\nn03648667\nn03649003\nn03649161\nn03649288\nn03649674\nn03649797\nn03649909\nn03650551\nn03651388\nn03651605\nn03651843\nn03652100\nn03652389\nn03652729\nn03652826\nn03652932\nn03653110\nn03653220\nn03653454\nn03653583\nn03653740\nn03653833\nn03653975\nn03654576\nn03654826\nn03655072\nn03655470\nn03655720\nn03656484\nn03656957\nn03657121\nn03657239\nn03657511\nn03658102\nn03658185\nn03658635\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660562\nn03660909\nn03661043\nn03661340\nn03662301\nn03662452\nn03662601\nn03662719\nn03662887\nn03663433\nn03663531\nn03663910\nn03664159\nn03664675\nn03664840\nn03664943\nn03665232\nn03665366\nn03665851\nn03665924\nn03666238\nn03666362\nn03666591\nn03666917\nn03667060\nn03667235\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669245\nn03669534\nn03669886\nn03670208\nn03671914\nn03672521\nn03672827\nn03673027\nn03673270\nn03673450\nn03673767\nn03674270\nn03674440\nn03674731\nn03674842\nn03675076\nn03675235\nn03675445\nn03675558\nn03675907\nn03676087\nn03676483\nn03676623\nn03676759\nn03677115\nn03677682\nn03677766\nn03678558\nn03678729\nn03678879\nn03679384\nn03679712\nn03680248\nn03680355\nn03680512\nn03680734\nn03680858\nn03680942\nn03681477\nn03681813\nn03682380\nn03682487\nn03682877\nn03683079\nn03683341\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684489\nn03684611\nn03684740\nn03684823\nn03685307\nn03685486\nn03685640\nn03685820\nn03686130\nn03686363\nn03686470\nn03686924\nn03687137\nn03687928\nn03688066\nn03688192\nn03688405\nn03688504\nn03688605\nn03688707\nn03688832\nn03688943\nn03689157\nn03689570\nn03690168\nn03690279\nn03690473\nn03690851\nn03690938\nn03691459\nn03691817\nn03692004\nn03692136\nn03692272\nn03692379\nn03692522\nn03692842\nn03693293\nn03693474\nn03693707\nn03693860\nn03694196\nn03694356\nn03694639\nn03694761\nn03694949\nn03695122\nn03695452\nn03695616\nn03695753\nn03695857\nn03695957\nn03696065\nn03696301\nn03696445\nn03696568\nn03696746\nn03696909\nn03697007\nn03697366\nn03697552\nn03697812\nn03697913\nn03698123\nn03698226\nn03698360\nn03698604\nn03698723\nn03698815\nn03699280\nn03699591\nn03699754\nn03699975\nn03700963\nn03701191\nn03701391\nn03701640\nn03701790\nn03702248\nn03702440\nn03702582\nn03703075\nn03703203\nn03703463\nn03703590\nn03703730\nn03703862\nn03703945\nn03704549\nn03704834\nn03705379\nn03705808\nn03706229\nn03706415\nn03706653\nn03706939\nn03707171\nn03707372\nn03707597\nn03707766\nn03708036\nn03708425\nn03708843\nn03708962\nn03709206\nn03709363\nn03709545\nn03709644\nn03709823\nn03709960\nn03710079\nn03710193\nn03710294\nn03710421\nn03710528\nn03710637\nn03710721\nn03710937\nn03711044\nn03711711\nn03711999\nn03712111\nn03712337\nn03712444\nn03712887\nn03712981\nn03713069\nn03713151\nn03713436\nn03714235\nn03715114\nn03715275\nn03715386\nn03715669\nn03715892\nn03716228\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03717622\nn03718212\nn03718335\nn03718458\nn03718581\nn03718699\nn03718789\nn03718935\nn03719053\nn03719343\nn03719560\nn03719743\nn03720005\nn03720163\nn03720665\nn03720891\nn03721047\nn03721252\nn03721384\nn03721590\nn03722007\nn03722288\nn03722646\nn03722944\nn03723153\nn03723267\nn03723439\nn03723781\nn03723885\nn03724066\nn03724176\nn03724417\nn03724538\nn03724623\nn03724756\nn03724870\nn03725035\nn03725506\nn03725600\nn03725717\nn03725869\nn03726116\nn03726233\nn03726371\nn03726516\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03728982\nn03729131\nn03729308\nn03729402\nn03729482\nn03729647\nn03729826\nn03729951\nn03730153\nn03730334\nn03730494\nn03730655\nn03730788\nn03730893\nn03731019\nn03731483\nn03731695\nn03731882\nn03732020\nn03732114\nn03732458\nn03732543\nn03732658\nn03733131\nn03733281\nn03733465\nn03733547\nn03733644\nn03733805\nn03733925\nn03735637\nn03735963\nn03736064\nn03736147\nn03736269\nn03736372\nn03736470\nn03736970\nn03738066\nn03738241\nn03738472\nn03739518\nn03739693\nn03742019\nn03742115\nn03742238\nn03743016\nn03743279\nn03743902\nn03744276\nn03744684\nn03744840\nn03745146\nn03745487\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749504\nn03749634\nn03749807\nn03750206\nn03750437\nn03750614\nn03751065\nn03751269\nn03751458\nn03751590\nn03751757\nn03752071\nn03752185\nn03752398\nn03752922\nn03753077\nn03753514\nn03757604\nn03758089\nn03758220\nn03758894\nn03758992\nn03759243\nn03759432\nn03759661\nn03759954\nn03760310\nn03760671\nn03760944\nn03761084\nn03761588\nn03761731\nn03762238\nn03762332\nn03762434\nn03762602\nn03762982\nn03763727\nn03763968\nn03764276\nn03764606\nn03764736\nn03764822\nn03764995\nn03765128\nn03765467\nn03765561\nn03765934\nn03766044\nn03766218\nn03766322\nn03766508\nn03766600\nn03766697\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768132\nn03768683\nn03768823\nn03768916\nn03769610\nn03769722\nn03769881\nn03770085\nn03770224\nn03770316\nn03770439\nn03770520\nn03770679\nn03770834\nn03770954\nn03772077\nn03772269\nn03772584\nn03772674\nn03773035\nn03773504\nn03773835\nn03774327\nn03774461\nn03775071\nn03775199\nn03775388\nn03775546\nn03775636\nn03775747\nn03775847\nn03776167\nn03776460\nn03776877\nn03776997\nn03777126\nn03777568\nn03777754\nn03778459\nn03778817\nn03779000\nn03779128\nn03779246\nn03779370\nn03779884\nn03780047\nn03780799\nn03781055\nn03781244\nn03781467\nn03781594\nn03781683\nn03781787\nn03782006\nn03782190\nn03782794\nn03782929\nn03783304\nn03783430\nn03783575\nn03783873\nn03784139\nn03784270\nn03784793\nn03784896\nn03785016\nn03785142\nn03785237\nn03785499\nn03785721\nn03786096\nn03786194\nn03786313\nn03786621\nn03786715\nn03786901\nn03787032\nn03787523\nn03788047\nn03788195\nn03788365\nn03788498\nn03788601\nn03788914\nn03789171\nn03789400\nn03789603\nn03789794\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791053\nn03791235\nn03792048\nn03792334\nn03792526\nn03792782\nn03792972\nn03793489\nn03793850\nn03794056\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796181\nn03796401\nn03796522\nn03796605\nn03796848\nn03796974\nn03797062\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03798610\nn03798982\nn03799113\nn03799240\nn03799375\nn03799610\nn03799876\nn03800371\nn03800485\nn03800563\nn03800772\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802228\nn03802393\nn03802643\nn03802800\nn03802973\nn03803116\nn03803284\nn03803780\nn03804211\nn03804744\nn03805180\nn03805280\nn03805374\nn03805503\nn03805725\nn03805933\nn03807334\nn03809211\nn03809312\nn03809603\nn03809686\nn03809802\nn03810412\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812263\nn03812382\nn03812789\nn03812924\nn03813078\nn03813176\nn03813946\nn03814528\nn03814639\nn03814727\nn03814817\nn03814906\nn03815149\nn03815278\nn03815482\nn03815615\nn03816005\nn03816136\nn03816394\nn03816530\nn03816849\nn03817191\nn03817331\nn03817522\nn03817647\nn03818001\nn03818343\nn03819047\nn03819336\nn03819448\nn03819595\nn03819994\nn03820154\nn03820318\nn03820728\nn03820950\nn03821145\nn03821424\nn03821518\nn03822171\nn03822361\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03823673\nn03823906\nn03824197\nn03824284\nn03824381\nn03824589\nn03824713\nn03824999\nn03825080\nn03825271\nn03825442\nn03825673\nn03825788\nn03825913\nn03826039\nn03826186\nn03827420\nn03827536\nn03828020\nn03829340\nn03829857\nn03829954\nn03831203\nn03831382\nn03831757\nn03832144\nn03832673\nn03833907\nn03834040\nn03834472\nn03834604\nn03835197\nn03835729\nn03835941\nn03836062\nn03836451\nn03836602\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03837869\nn03838024\nn03838298\nn03838748\nn03838899\nn03839172\nn03839276\nn03839424\nn03839671\nn03839795\nn03840327\nn03840681\nn03840823\nn03841011\nn03841143\nn03841290\nn03841666\nn03842012\nn03842156\nn03842276\nn03842377\nn03842585\nn03842754\nn03842986\nn03843092\nn03843316\nn03843438\nn03843555\nn03843883\nn03844045\nn03844233\nn03844550\nn03844673\nn03844815\nn03844965\nn03845107\nn03845190\nn03845990\nn03846100\nn03846234\nn03846431\nn03846677\nn03846772\nn03846970\nn03847471\nn03847823\nn03848033\nn03848168\nn03848348\nn03848537\nn03849275\nn03849412\nn03849679\nn03849814\nn03849943\nn03850053\nn03850245\nn03850492\nn03850613\nn03851341\nn03851787\nn03852280\nn03852544\nn03852688\nn03853291\nn03853924\nn03854065\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855464\nn03855604\nn03855756\nn03855908\nn03856012\nn03856335\nn03856465\nn03856728\nn03857026\nn03857156\nn03857291\nn03857687\nn03857828\nn03858085\nn03858183\nn03858418\nn03858533\nn03858837\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860234\nn03860404\nn03861048\nn03861271\nn03861430\nn03861596\nn03861842\nn03862379\nn03862676\nn03862862\nn03863108\nn03863262\nn03863657\nn03863783\nn03863923\nn03864139\nn03864356\nn03864692\nn03865288\nn03865371\nn03865557\nn03865820\nn03865949\nn03866082\nn03867854\nn03868044\nn03868242\nn03868324\nn03868406\nn03868643\nn03868763\nn03868863\nn03869838\nn03869976\nn03870105\nn03870290\nn03870546\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871628\nn03871724\nn03871860\nn03872016\nn03872167\nn03872273\nn03873416\nn03873699\nn03873848\nn03873996\nn03874138\nn03874293\nn03874487\nn03874599\nn03874823\nn03875218\nn03875806\nn03875955\nn03876111\nn03876231\nn03877351\nn03877472\nn03877674\nn03877845\nn03878066\nn03878211\nn03878294\nn03878418\nn03878511\nn03878674\nn03878828\nn03878963\nn03879456\nn03879705\nn03880032\nn03880129\nn03880323\nn03880531\nn03881305\nn03881404\nn03881534\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03883664\nn03883773\nn03883944\nn03884397\nn03884554\nn03884639\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885410\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03886940\nn03887185\nn03887330\nn03887512\nn03887697\nn03887899\nn03888022\nn03888257\nn03888605\nn03888808\nn03888998\nn03889397\nn03889503\nn03889626\nn03889726\nn03889871\nn03890093\nn03890233\nn03890358\nn03890514\nn03891051\nn03891251\nn03891332\nn03891538\nn03892178\nn03892425\nn03892557\nn03892728\nn03893935\nn03894051\nn03894379\nn03894677\nn03894933\nn03895038\nn03895170\nn03895866\nn03896103\nn03896233\nn03896419\nn03896526\nn03896628\nn03896984\nn03897130\nn03897634\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03898787\nn03899100\nn03899612\nn03899768\nn03899933\nn03900028\nn03900194\nn03900301\nn03900393\nn03900979\nn03901229\nn03901338\nn03901750\nn03901974\nn03902125\nn03902220\nn03902482\nn03902756\nn03903133\nn03903290\nn03903424\nn03903733\nn03903868\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905361\nn03905540\nn03905730\nn03905947\nn03906106\nn03906224\nn03906463\nn03906590\nn03906789\nn03906894\nn03906997\nn03907475\nn03907654\nn03907908\nn03908111\nn03908204\nn03908456\nn03908618\nn03908714\nn03909020\nn03909160\nn03909406\nn03909516\nn03909658\nn03911406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03912821\nn03913343\nn03913930\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915320\nn03915437\nn03915900\nn03916031\nn03916289\nn03916385\nn03916470\nn03916720\nn03917048\nn03917198\nn03917327\nn03917814\nn03918074\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03919808\nn03920288\nn03920384\nn03920641\nn03920737\nn03920867\nn03923379\nn03923564\nn03923692\nn03923918\nn03924069\nn03924407\nn03924532\nn03924679\nn03926148\nn03926412\nn03926876\nn03927091\nn03927299\nn03927539\nn03927792\nn03928116\nn03928589\nn03928814\nn03928994\nn03929091\nn03929202\nn03929443\nn03929660\nn03929855\nn03930229\nn03930313\nn03930431\nn03930515\nn03930630\nn03931044\nn03931765\nn03931885\nn03931980\nn03932080\nn03932670\nn03933391\nn03933933\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03934890\nn03935116\nn03935234\nn03935335\nn03935883\nn03936269\nn03936466\nn03937543\nn03937835\nn03937931\nn03938037\nn03938244\nn03938401\nn03938522\nn03938725\nn03939062\nn03939178\nn03939281\nn03939440\nn03939565\nn03939677\nn03939844\nn03940256\nn03940894\nn03941013\nn03941231\nn03941417\nn03941586\nn03941684\nn03941887\nn03942028\nn03942600\nn03942813\nn03942920\nn03943115\nn03943266\nn03943623\nn03943714\nn03943833\nn03943920\nn03944024\nn03944138\nn03944341\nn03945459\nn03945615\nn03945817\nn03945928\nn03946076\nn03946162\nn03947111\nn03947343\nn03947466\nn03947798\nn03947888\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03949761\nn03950228\nn03950359\nn03950537\nn03950647\nn03950899\nn03951068\nn03951213\nn03951453\nn03951800\nn03951971\nn03952150\nn03952576\nn03953020\nn03953416\nn03953901\nn03954393\nn03954731\nn03955296\nn03955489\nn03955809\nn03955941\nn03956157\nn03956331\nn03956531\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958227\nn03958338\nn03958630\nn03958752\nn03959014\nn03959123\nn03959227\nn03959701\nn03960374\nn03960490\nn03961394\nn03961630\nn03961711\nn03961828\nn03961939\nn03962525\nn03962685\nn03962852\nn03962932\nn03963028\nn03963198\nn03963294\nn03963483\nn03963645\nn03964495\nn03964611\nn03965456\nn03965907\nn03966206\nn03966325\nn03966582\nn03966751\nn03966976\nn03967270\nn03967396\nn03967562\nn03967942\nn03968293\nn03968479\nn03968581\nn03968728\nn03969510\nn03970156\nn03970363\nn03970546\nn03971218\nn03971321\nn03971960\nn03972146\nn03972372\nn03972524\nn03973003\nn03973285\nn03973402\nn03973520\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03975926\nn03976105\nn03976268\nn03976467\nn03976657\nn03977158\nn03977266\nn03977430\nn03977592\nn03977966\nn03978421\nn03978575\nn03978686\nn03978815\nn03978966\nn03979377\nn03979492\nn03980026\nn03980478\nn03980874\nn03980986\nn03981094\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982430\nn03982642\nn03982767\nn03982895\nn03983396\nn03983499\nn03983612\nn03983712\nn03983928\nn03984125\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986071\nn03986224\nn03986355\nn03986562\nn03986704\nn03986857\nn03986949\nn03987266\nn03987376\nn03987674\nn03987865\nn03987990\nn03988170\nn03988758\nn03988926\nn03989199\nn03989349\nn03989447\nn03989665\nn03989777\nn03989898\nn03990474\nn03991062\nn03991202\nn03991321\nn03991443\nn03991646\nn03991837\nn03992325\nn03992436\nn03992509\nn03992703\nn03992975\nn03993053\nn03993180\nn03993403\nn03993703\nn03993878\nn03994008\nn03994297\nn03994417\nn03994614\nn03994757\nn03995018\nn03995265\nn03995372\nn03995535\nn03995661\nn03995856\nn03996004\nn03996145\nn03996416\nn03996849\nn03997274\nn03997484\nn03997875\nn03998194\nn03998333\nn03998673\nn03999064\nn03999160\nn03999621\nn03999992\nn04000311\nn04000480\nn04000592\nn04000716\nn04000998\nn04001132\nn04001265\nn04001397\nn04001499\nn04001661\nn04001845\nn04002262\nn04002371\nn04002629\nn04003241\nn04003359\nn04003856\nn04004099\nn04004210\nn04004475\nn04004767\nn04004990\nn04005197\nn04005630\nn04005912\nn04006067\nn04006227\nn04006330\nn04006411\nn04007415\nn04007664\nn04008385\nn04008634\nn04009552\nn04009801\nn04009923\nn04010057\nn04010779\nn04010927\nn04011827\nn04012084\nn04012482\nn04012665\nn04013060\nn04013176\nn04013600\nn04013729\nn04014297\nn04015204\nn04015786\nn04015908\nn04016240\nn04016479\nn04016576\nn04016684\nn04016846\nn04017571\nn04017807\nn04018155\nn04018399\nn04018667\nn04019101\nn04019335\nn04019541\nn04019696\nn04019881\nn04020087\nn04020298\nn04020744\nn04020912\nn04021028\nn04021164\nn04021362\nn04021503\nn04021704\nn04021798\nn04022332\nn04022434\nn04022708\nn04022866\nn04023021\nn04023119\nn04023249\nn04023422\nn04023695\nn04023962\nn04024137\nn04024274\nn04024862\nn04024983\nn04025508\nn04025633\nn04026053\nn04026180\nn04026417\nn04026813\nn04026918\nn04027023\nn04027367\nn04027706\nn04027820\nn04027935\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029416\nn04029647\nn04029734\nn04029913\nn04030054\nn04030161\nn04030274\nn04030414\nn04030518\nn04030846\nn04030965\nn04031884\nn04032509\nn04032603\nn04032936\nn04033287\nn04033425\nn04033557\nn04033801\nn04033901\nn04033995\nn04034262\nn04034367\nn04035231\nn04035634\nn04035748\nn04035836\nn04035912\nn04036155\nn04036303\nn04036776\nn04036963\nn04037076\nn04037220\nn04037298\nn04037443\nn04037873\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039041\nn04039209\nn04039381\nn04039742\nn04039848\nn04040247\nn04040373\nn04040540\nn04040759\nn04041069\nn04041243\nn04041408\nn04041544\nn04041747\nn04042076\nn04042204\nn04042358\nn04042632\nn04042795\nn04042985\nn04043168\nn04043411\nn04043733\nn04044307\nn04044498\nn04044716\nn04044955\nn04045085\nn04045255\nn04045397\nn04045644\nn04045787\nn04045941\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047139\nn04047401\nn04047733\nn04047834\nn04048441\nn04049303\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050600\nn04050933\nn04051269\nn04051439\nn04051549\nn04051705\nn04051825\nn04052235\nn04052346\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04053767\nn04054361\nn04054566\nn04054670\nn04055180\nn04055447\nn04055700\nn04055861\nn04056073\nn04056180\nn04056413\nn04056932\nn04057047\nn04057215\nn04057435\nn04057673\nn04057846\nn04057981\nn04058096\nn04058239\nn04058486\nn04058594\nn04058721\nn04059157\nn04059298\nn04059399\nn04059516\nn04059947\nn04060198\nn04060448\nn04060647\nn04060904\nn04061681\nn04061793\nn04061969\nn04062179\nn04062428\nn04062644\nn04062807\nn04063154\nn04063373\nn04063868\nn04064213\nn04064401\nn04064747\nn04064862\nn04065272\nn04065464\nn04065789\nn04065909\nn04066023\nn04066270\nn04066388\nn04066476\nn04066767\nn04067143\nn04067231\nn04067353\nn04067472\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069166\nn04069276\nn04069434\nn04069582\nn04069777\nn04070003\nn04070207\nn04070415\nn04070545\nn04070727\nn04070964\nn04071102\nn04071263\nn04071393\nn04072193\nn04072551\nn04072960\nn04073425\nn04073948\nn04074185\nn04074963\nn04075291\nn04075468\nn04075715\nn04075813\nn04075916\nn04076052\nn04076284\nn04076713\nn04077430\nn04077594\nn04077734\nn04077889\nn04078002\nn04078574\nn04078955\nn04079106\nn04079244\nn04079603\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081281\nn04081699\nn04081844\nn04082344\nn04082562\nn04082710\nn04082886\nn04083113\nn04083309\nn04083649\nn04083800\nn04084517\nn04084682\nn04084889\nn04085017\nn04085574\nn04085873\nn04086066\nn04086273\nn04086446\nn04086663\nn04086794\nn04086937\nn04087126\nn04087432\nn04087709\nn04087826\nn04088229\nn04088343\nn04088441\nn04088696\nn04088797\nn04089152\nn04089376\nn04089666\nn04089836\nn04089976\nn04090263\nn04090548\nn04090781\nn04091097\nn04091466\nn04091584\nn04091693\nn04092168\nn04093157\nn04093223\nn04093625\nn04093775\nn04093915\nn04094060\nn04094250\nn04094438\nn04094608\nn04094720\nn04094859\nn04095109\nn04095210\nn04095342\nn04095577\nn04095938\nn04096066\nn04096733\nn04096848\nn04097085\nn04097373\nn04097622\nn04097760\nn04097866\nn04098169\nn04098260\nn04098399\nn04098513\nn04098795\nn04099003\nn04099175\nn04099429\nn04099969\nn04100174\nn04100519\nn04101375\nn04101497\nn04101701\nn04101860\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04102760\nn04102872\nn04102962\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04104925\nn04105068\nn04105438\nn04105704\nn04105893\nn04107598\nn04107743\nn04107984\nn04108268\nn04108822\nn04108999\nn04110068\nn04110178\nn04110281\nn04110439\nn04110654\nn04110841\nn04110955\nn04111190\nn04111414\nn04111531\nn04111668\nn04111962\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04112921\nn04113038\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04113968\nn04114069\nn04114301\nn04114428\nn04114719\nn04114844\nn04114996\nn04115144\nn04115256\nn04115456\nn04115542\nn04115802\nn04115996\nn04116098\nn04116294\nn04116389\nn04116512\nn04117216\nn04117464\nn04117639\nn04118021\nn04118538\nn04118635\nn04118776\nn04119091\nn04119230\nn04119360\nn04119478\nn04119630\nn04119751\nn04120489\nn04120695\nn04120842\nn04121228\nn04121342\nn04121426\nn04121511\nn04121728\nn04122262\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123123\nn04123228\nn04123317\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04124573\nn04124887\nn04125021\nn04125116\nn04125257\nn04125541\nn04125692\nn04125853\nn04126066\nn04126244\nn04126541\nn04126659\nn04126852\nn04126980\nn04127117\nn04127249\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04129490\nn04129688\nn04129766\nn04130143\nn04130257\nn04130566\nn04130907\nn04131015\nn04131113\nn04131208\nn04131368\nn04131499\nn04131690\nn04131811\nn04131929\nn04132158\nn04132465\nn04132603\nn04132829\nn04132985\nn04133114\nn04133789\nn04134008\nn04134170\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04135933\nn04136045\nn04136161\nn04136333\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138131\nn04138261\nn04138869\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140539\nn04140631\nn04140777\nn04140853\nn04141076\nn04141198\nn04141327\nn04141712\nn04141838\nn04141975\nn04142175\nn04142327\nn04142434\nn04142731\nn04142999\nn04143140\nn04143365\nn04143897\nn04144241\nn04144539\nn04144651\nn04145863\nn04146050\nn04146343\nn04146504\nn04146614\nn04146862\nn04146976\nn04147183\nn04147291\nn04147495\nn04147793\nn04147916\nn04148054\nn04148285\nn04148464\nn04148579\nn04148703\nn04149083\nn04149374\nn04149813\nn04150153\nn04150273\nn04150371\nn04150980\nn04151108\nn04151581\nn04151940\nn04152387\nn04152593\nn04153025\nn04153330\nn04153751\nn04154152\nn04154340\nn04154565\nn04154753\nn04154854\nn04154938\nn04155068\nn04155177\nn04155457\nn04155625\nn04155735\nn04155889\nn04156040\nn04156140\nn04156297\nn04156411\nn04156591\nn04156814\nn04156946\nn04157099\nn04157320\nn04158002\nn04158138\nn04158250\nn04158672\nn04158807\nn04158956\nn04160036\nn04160261\nn04160372\nn04160586\nn04160847\nn04161010\nn04161358\nn04161981\nn04162433\nn04162706\nn04163530\nn04164002\nn04164199\nn04164406\nn04164757\nn04164868\nn04165409\nn04165675\nn04165945\nn04166111\nn04166281\nn04166436\nn04167346\nn04167489\nn04167661\nn04168084\nn04168199\nn04168472\nn04168541\nn04168840\nn04169437\nn04169597\nn04170037\nn04170384\nn04170515\nn04170694\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172230\nn04172342\nn04172512\nn04172607\nn04172776\nn04172904\nn04173046\nn04173172\nn04173511\nn04173907\nn04174026\nn04174101\nn04174234\nn04174500\nn04174705\nn04175039\nn04175147\nn04175574\nn04176068\nn04176190\nn04176295\nn04176528\nn04177041\nn04177329\nn04177545\nn04177654\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04178668\nn04179126\nn04179712\nn04179824\nn04179913\nn04180063\nn04180229\nn04180888\nn04181083\nn04181228\nn04181561\nn04181718\nn04182152\nn04182322\nn04183217\nn04183329\nn04183957\nn04184095\nn04184316\nn04184435\nn04184600\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186624\nn04186848\nn04187061\nn04187233\nn04187547\nn04187751\nn04187885\nn04187970\nn04188064\nn04188179\nn04189092\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190464\nn04190747\nn04190997\nn04191150\nn04191595\nn04191943\nn04192238\nn04192361\nn04192521\nn04192698\nn04192858\nn04193179\nn04193377\nn04193742\nn04193883\nn04194009\nn04194127\nn04194289\nn04196080\nn04196502\nn04196803\nn04196925\nn04197110\nn04197391\nn04197781\nn04197878\nn04198015\nn04198233\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200800\nn04200908\nn04201064\nn04201297\nn04201733\nn04202142\nn04202282\nn04202417\nn04203356\nn04204081\nn04204238\nn04204347\nn04204755\nn04205062\nn04205318\nn04205505\nn04205613\nn04206070\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208210\nn04208427\nn04208582\nn04208760\nn04208936\nn04209133\nn04209239\nn04209509\nn04209613\nn04209811\nn04210012\nn04210120\nn04210288\nn04210390\nn04210591\nn04210858\nn04211001\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04212810\nn04213105\nn04213264\nn04213353\nn04213530\nn04214046\nn04214282\nn04214413\nn04214649\nn04215153\nn04215402\nn04215588\nn04215800\nn04215910\nn04216634\nn04216860\nn04216963\nn04217387\nn04217546\nn04217718\nn04217882\nn04218564\nn04218921\nn04219185\nn04219424\nn04219580\nn04220250\nn04220805\nn04221076\nn04221673\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04222847\nn04223066\nn04223170\nn04223299\nn04224395\nn04224543\nn04224842\nn04225031\nn04225222\nn04225729\nn04225987\nn04226322\nn04226464\nn04226537\nn04226826\nn04226962\nn04227050\nn04227144\nn04227519\nn04227787\nn04227900\nn04228054\nn04228215\nn04228422\nn04228581\nn04228693\nn04229007\nn04229107\nn04229480\nn04229620\nn04229737\nn04229816\nn04229959\nn04230387\nn04230487\nn04230603\nn04230707\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232312\nn04232437\nn04232800\nn04233027\nn04233124\nn04233295\nn04233715\nn04233832\nn04234160\nn04234260\nn04234455\nn04234670\nn04234763\nn04234887\nn04235291\nn04235646\nn04235771\nn04235860\nn04236001\nn04236377\nn04236702\nn04236809\nn04236935\nn04237174\nn04237287\nn04237423\nn04238128\nn04238321\nn04238617\nn04238763\nn04238953\nn04239074\nn04239218\nn04239333\nn04239436\nn04239639\nn04239786\nn04239900\nn04240434\nn04240752\nn04240867\nn04241042\nn04241249\nn04241394\nn04241573\nn04242084\nn04242315\nn04242408\nn04242587\nn04242704\nn04243003\nn04243142\nn04243251\nn04243546\nn04243941\nn04244379\nn04244847\nn04244997\nn04245218\nn04245412\nn04245508\nn04245847\nn04246060\nn04246271\nn04246459\nn04246731\nn04246855\nn04247011\nn04247440\nn04247544\nn04247630\nn04247736\nn04247876\nn04248209\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250599\nn04250692\nn04250850\nn04251144\nn04251701\nn04251791\nn04252077\nn04252225\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253304\nn04253931\nn04254009\nn04254120\nn04254450\nn04254680\nn04254777\nn04255163\nn04255346\nn04255499\nn04255586\nn04255670\nn04255768\nn04255899\nn04256318\nn04256520\nn04256758\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258138\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259202\nn04259468\nn04259630\nn04260192\nn04260364\nn04260589\nn04261116\nn04261281\nn04261369\nn04261506\nn04261638\nn04261767\nn04261868\nn04262161\nn04262530\nn04262678\nn04262869\nn04263257\nn04263336\nn04263502\nn04263760\nn04263950\nn04264134\nn04264233\nn04264361\nn04264485\nn04264628\nn04264765\nn04264914\nn04265275\nn04265428\nn04265904\nn04266014\nn04266162\nn04266375\nn04266486\nn04266849\nn04266968\nn04267091\nn04267165\nn04267246\nn04267435\nn04267577\nn04267985\nn04268142\nn04268275\nn04268418\nn04268565\nn04268799\nn04269086\nn04269270\nn04269502\nn04269668\nn04269822\nn04269944\nn04270147\nn04270371\nn04270576\nn04270891\nn04271148\nn04271531\nn04271793\nn04271891\nn04272054\nn04272389\nn04272782\nn04272928\nn04273064\nn04273285\nn04273569\nn04273659\nn04273796\nn04273972\nn04274686\nn04274985\nn04275093\nn04275175\nn04275283\nn04275548\nn04275661\nn04275904\nn04277352\nn04277493\nn04277669\nn04277826\nn04278247\nn04278353\nn04278447\nn04278605\nn04278932\nn04279063\nn04279172\nn04279353\nn04279462\nn04279858\nn04279987\nn04280259\nn04280373\nn04280487\nn04280845\nn04280970\nn04281260\nn04281375\nn04281571\nn04281998\nn04282231\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283784\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285008\nn04285146\nn04285622\nn04285803\nn04285965\nn04286128\nn04286575\nn04286960\nn04287351\nn04287451\nn04287747\nn04287898\nn04287986\nn04288165\nn04288272\nn04288533\nn04288673\nn04289027\nn04289195\nn04289449\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04290762\nn04291069\nn04291242\nn04291759\nn04291992\nn04292080\nn04292221\nn04292414\nn04292572\nn04292921\nn04293119\nn04293258\nn04293744\nn04294212\nn04294426\nn04294614\nn04294879\nn04295081\nn04295353\nn04295571\nn04295777\nn04295881\nn04296562\nn04297098\nn04297750\nn04297847\nn04298053\nn04298661\nn04298765\nn04299215\nn04299370\nn04299963\nn04300358\nn04300509\nn04300643\nn04301000\nn04301242\nn04301474\nn04301760\nn04302200\nn04302863\nn04302988\nn04303095\nn04303258\nn04303357\nn04303497\nn04304215\nn04304375\nn04304680\nn04305016\nn04305210\nn04305323\nn04305471\nn04305572\nn04305947\nn04306080\nn04306592\nn04306847\nn04307419\nn04307767\nn04307878\nn04307986\nn04308084\nn04308273\nn04308397\nn04308583\nn04308807\nn04308915\nn04309049\nn04309348\nn04309548\nn04309833\nn04310018\nn04310157\nn04310507\nn04310604\nn04310721\nn04310904\nn04311004\nn04311174\nn04311595\nn04312020\nn04312154\nn04312432\nn04312654\nn04312756\nn04312916\nn04313220\nn04313503\nn04313628\nn04314107\nn04314216\nn04314522\nn04314632\nn04314914\nn04315342\nn04315713\nn04315828\nn04315948\nn04316498\nn04316815\nn04316924\nn04317063\nn04317175\nn04317325\nn04317420\nn04317833\nn04317976\nn04318131\nn04318787\nn04318892\nn04318982\nn04319545\nn04319774\nn04319937\nn04320405\nn04320598\nn04320871\nn04320973\nn04321121\nn04321453\nn04322026\nn04322531\nn04322692\nn04322801\nn04323519\nn04323819\nn04324120\nn04324297\nn04324387\nn04324515\nn04325041\nn04325208\nn04325704\nn04325804\nn04325968\nn04326547\nn04326676\nn04326799\nn04326896\nn04327204\nn04327544\nn04327682\nn04328054\nn04328186\nn04328329\nn04328580\nn04328703\nn04328946\nn04329477\nn04329681\nn04329834\nn04329958\nn04330109\nn04330189\nn04330267\nn04330340\nn04330669\nn04330746\nn04330896\nn04330998\nn04331277\nn04331443\nn04331639\nn04331765\nn04331892\nn04332074\nn04332243\nn04332580\nn04332987\nn04333129\nn04333869\nn04334105\nn04334365\nn04334504\nn04334599\nn04335209\nn04335435\nn04335693\nn04335886\nn04336792\nn04337157\nn04337287\nn04337503\nn04337650\nn04338517\nn04338963\nn04339062\nn04339191\nn04339638\nn04339879\nn04340019\nn04340521\nn04340750\nn04340935\nn04341133\nn04341288\nn04341414\nn04341686\nn04343511\nn04343630\nn04343740\nn04344003\nn04344734\nn04344873\nn04345028\nn04345201\nn04345787\nn04346003\nn04346157\nn04346328\nn04346428\nn04346511\nn04346679\nn04346855\nn04347119\nn04347519\nn04347754\nn04348070\nn04348184\nn04348359\nn04348988\nn04349189\nn04349306\nn04349401\nn04349913\nn04350104\nn04350235\nn04350458\nn04350581\nn04350688\nn04350769\nn04350905\nn04351550\nn04351699\nn04353573\nn04354026\nn04354182\nn04354387\nn04354487\nn04354589\nn04355115\nn04355267\nn04355338\nn04355511\nn04355684\nn04355821\nn04355933\nn04356056\nn04356595\nn04356772\nn04356925\nn04357121\nn04357314\nn04357531\nn04357930\nn04358117\nn04358256\nn04358491\nn04358707\nn04358874\nn04359034\nn04359124\nn04359217\nn04359335\nn04359500\nn04359589\nn04360501\nn04360798\nn04360914\nn04361095\nn04361260\nn04361937\nn04362624\nn04362821\nn04362972\nn04363082\nn04363210\nn04363412\nn04363671\nn04363777\nn04363874\nn04363991\nn04364160\nn04364397\nn04364545\nn04364827\nn04364994\nn04365112\nn04365229\nn04365328\nn04365484\nn04365751\nn04366033\nn04366116\nn04366367\nn04366832\nn04367011\nn04367371\nn04367480\nn04367746\nn04367950\nn04368109\nn04368235\nn04368365\nn04368496\nn04368695\nn04368840\nn04369025\nn04369282\nn04369485\nn04369618\nn04370048\nn04370288\nn04370456\nn04370600\nn04370774\nn04370955\nn04371050\nn04371430\nn04371563\nn04371774\nn04371979\nn04372370\nn04373089\nn04373428\nn04373563\nn04373704\nn04373795\nn04373894\nn04374315\nn04374521\nn04374735\nn04374907\nn04375080\nn04375241\nn04375405\nn04375615\nn04375775\nn04375926\nn04376400\nn04376876\nn04377057\nn04378489\nn04378651\nn04378956\nn04379096\nn04379243\nn04379964\nn04380255\nn04380346\nn04380533\nn04380916\nn04381073\nn04381450\nn04381587\nn04381724\nn04381860\nn04381994\nn04382334\nn04382438\nn04382537\nn04382695\nn04382880\nn04383015\nn04383130\nn04383301\nn04383839\nn04383923\nn04384593\nn04384910\nn04385079\nn04385157\nn04385536\nn04385799\nn04386051\nn04386456\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387531\nn04387706\nn04387932\nn04388040\nn04388162\nn04388473\nn04388574\nn04388743\nn04389033\nn04389430\nn04389521\nn04389718\nn04389854\nn04389999\nn04390483\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04392985\nn04393095\nn04393301\nn04393549\nn04393808\nn04393913\nn04394031\nn04394261\nn04394421\nn04394630\nn04395024\nn04395106\nn04395332\nn04395651\nn04395875\nn04396226\nn04396335\nn04396650\nn04396808\nn04396902\nn04397027\nn04397168\nn04397261\nn04397452\nn04397645\nn04397768\nn04397860\nn04398044\nn04398497\nn04398688\nn04398834\nn04398951\nn04399046\nn04399158\nn04399537\nn04399846\nn04400109\nn04400289\nn04400499\nn04400737\nn04400899\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402342\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404072\nn04404200\nn04404412\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406552\nn04406687\nn04406817\nn04407257\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409279\nn04409384\nn04409515\nn04409625\nn04409806\nn04409911\nn04410086\nn04410365\nn04410485\nn04410565\nn04410663\nn04410760\nn04410886\nn04411019\nn04411264\nn04411835\nn04411966\nn04412097\nn04412300\nn04412416\nn04413151\nn04413419\nn04413969\nn04414101\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415257\nn04415663\nn04415815\nn04416005\nn04416901\nn04417086\nn04417180\nn04417361\nn04417672\nn04417809\nn04418357\nn04418644\nn04419073\nn04419642\nn04419868\nn04420024\nn04420720\nn04421083\nn04421258\nn04421417\nn04421582\nn04421740\nn04421872\nn04422409\nn04422566\nn04422727\nn04422875\nn04423552\nn04423687\nn04423845\nn04424692\nn04425804\nn04425977\nn04426184\nn04426316\nn04426427\nn04427216\nn04427473\nn04427559\nn04427715\nn04427857\nn04428008\nn04428191\nn04428382\nn04428634\nn04429038\nn04429376\nn04430475\nn04430605\nn04430896\nn04431025\nn04431436\nn04431648\nn04431745\nn04431925\nn04432043\nn04432203\nn04432662\nn04432785\nn04433377\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435552\nn04435653\nn04435759\nn04435870\nn04436012\nn04436185\nn04436329\nn04436401\nn04436542\nn04436832\nn04436992\nn04437276\nn04437380\nn04437670\nn04437953\nn04438304\nn04438507\nn04438643\nn04438897\nn04439505\nn04439585\nn04439712\nn04440597\nn04440963\nn04441093\nn04441528\nn04441662\nn04441790\nn04442312\nn04442441\nn04442582\nn04442741\nn04443164\nn04443257\nn04443433\nn04443766\nn04444121\nn04444218\nn04444749\nn04444953\nn04445040\nn04445154\nn04445327\nn04445610\nn04445782\nn04445952\nn04446162\nn04446276\nn04446844\nn04447028\nn04447156\nn04447276\nn04447443\nn04447861\nn04448070\nn04448185\nn04448361\nn04449290\nn04449449\nn04449550\nn04449700\nn04449966\nn04450133\nn04450243\nn04450465\nn04450640\nn04450749\nn04450994\nn04451139\nn04451318\nn04451636\nn04451818\nn04452528\nn04452615\nn04452757\nn04452848\nn04453037\nn04453156\nn04453390\nn04453666\nn04453910\nn04454654\nn04454792\nn04454908\nn04455048\nn04455250\nn04455579\nn04455652\nn04456011\nn04456115\nn04456472\nn04456734\nn04457157\nn04457326\nn04457474\nn04457638\nn04457767\nn04457910\nn04458201\nn04458633\nn04458843\nn04459018\nn04459122\nn04459243\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461696\nn04461879\nn04462011\nn04462240\nn04462576\nn04463679\nn04464125\nn04464615\nn04464852\nn04465050\nn04465203\nn04465358\nn04465501\nn04465666\nn04466871\nn04467099\nn04467307\nn04467506\nn04467665\nn04467899\nn04468005\nn04469003\nn04469251\nn04469514\nn04469684\nn04469813\nn04470741\nn04471148\nn04471315\nn04471632\nn04471912\nn04472243\nn04472563\nn04472726\nn04472961\nn04473108\nn04473275\nn04473884\nn04474035\nn04474187\nn04474466\nn04475309\nn04475411\nn04475496\nn04475631\nn04475749\nn04475900\nn04476116\nn04476259\nn04476526\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04477725\nn04478066\nn04478383\nn04478512\nn04478657\nn04479046\nn04479287\nn04479405\nn04479526\nn04479694\nn04479823\nn04479939\nn04480033\nn04480141\nn04480303\nn04480527\nn04480853\nn04480995\nn04481524\nn04481642\nn04482177\nn04482297\nn04482393\nn04482975\nn04483073\nn04483307\nn04483925\nn04484024\nn04484432\nn04485082\nn04485423\nn04485586\nn04485750\nn04485884\nn04486054\nn04486213\nn04486322\nn04486616\nn04486934\nn04487081\nn04487394\nn04487724\nn04487894\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491312\nn04491388\nn04491638\nn04491769\nn04491934\nn04492060\nn04492157\nn04492375\nn04492749\nn04493109\nn04493259\nn04493381\nn04494204\nn04495051\nn04495183\nn04495310\nn04495450\nn04495555\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497249\nn04497442\nn04497570\nn04497801\nn04498275\nn04498389\nn04498523\nn04498873\nn04499062\nn04499300\nn04499446\nn04499554\nn04499810\nn04500060\nn04500390\nn04501127\nn04501281\nn04501370\nn04501550\nn04501837\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04502989\nn04503073\nn04503155\nn04503269\nn04503413\nn04503499\nn04503593\nn04503705\nn04504038\nn04504141\nn04504770\nn04505036\nn04505345\nn04505470\nn04505888\nn04506289\nn04506402\nn04506506\nn04506688\nn04506895\nn04506994\nn04507155\nn04507326\nn04507453\nn04507689\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509417\nn04509592\nn04510706\nn04511002\nn04513827\nn04513998\nn04514095\nn04514241\nn04514648\nn04515003\nn04515444\nn04515729\nn04515890\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517823\nn04517999\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04519536\nn04519728\nn04519887\nn04520170\nn04520382\nn04520784\nn04520962\nn04521571\nn04521863\nn04521987\nn04522168\nn04523525\nn04523831\nn04524142\nn04524313\nn04524594\nn04524716\nn04524941\nn04525038\nn04525191\nn04525305\nn04525417\nn04525584\nn04525821\nn04526520\nn04526800\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530456\nn04530566\nn04531098\nn04531873\nn04532022\nn04532106\nn04532398\nn04532504\nn04532670\nn04532831\nn04533042\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535252\nn04535370\nn04535524\nn04536153\nn04536335\nn04536465\nn04536595\nn04536765\nn04536866\nn04537436\nn04538249\nn04538403\nn04538552\nn04538878\nn04539053\nn04539203\nn04539407\nn04539794\nn04540053\nn04540255\nn04540397\nn04540761\nn04541136\nn04541320\nn04541662\nn04541777\nn04541987\nn04542095\nn04542329\nn04542474\nn04542595\nn04542715\nn04542858\nn04542943\nn04543158\nn04543509\nn04543636\nn04543772\nn04543924\nn04543996\nn04544325\nn04544450\nn04545305\nn04545471\nn04545748\nn04545858\nn04545984\nn04546081\nn04546194\nn04546340\nn04546595\nn04546855\nn04547592\nn04548280\nn04548362\nn04549028\nn04549122\nn04549629\nn04549721\nn04549919\nn04550184\nn04550676\nn04551055\nn04551833\nn04552097\nn04552348\nn04552551\nn04552696\nn04553389\nn04553561\nn04553703\nn04554211\nn04554406\nn04554684\nn04554871\nn04554998\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556664\nn04556948\nn04557308\nn04557522\nn04557648\nn04557751\nn04558059\nn04558199\nn04558478\nn04558804\nn04559023\nn04559166\nn04559451\nn04559620\nn04559730\nn04559910\nn04559994\nn04560113\nn04560292\nn04560502\nn04560619\nn04560804\nn04560882\nn04561010\nn04561287\nn04561422\nn04561734\nn04561857\nn04561965\nn04562122\nn04562262\nn04562496\nn04562935\nn04563020\nn04563204\nn04563413\nn04563560\nn04563790\nn04564278\nn04564581\nn04565039\nn04565375\nn04566257\nn04566561\nn04566756\nn04567098\nn04567593\nn04567746\nn04568069\nn04568557\nn04568713\nn04568841\nn04569063\nn04569520\nn04569822\nn04570118\nn04570214\nn04570416\nn04570532\nn04570815\nn04570958\nn04571292\nn04571566\nn04571686\nn04571800\nn04571958\nn04572121\nn04572235\nn04572935\nn04573045\nn04573281\nn04573379\nn04573513\nn04573625\nn04573832\nn04573937\nn04574067\nn04574348\nn04574471\nn04574606\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04576971\nn04577139\nn04577293\nn04577426\nn04577567\nn04577769\nn04578112\nn04578329\nn04578559\nn04578708\nn04578801\nn04578934\nn04579056\nn04579145\nn04579230\nn04579432\nn04579667\nn04579986\nn04580493\nn04581102\nn04581595\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583022\nn04583212\nn04583620\nn04583888\nn04583967\nn04584056\nn04584207\nn04584373\nn04585128\nn04585318\nn04585456\nn04585626\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589434\nn04589593\nn04589890\nn04590021\nn04590129\nn04590263\nn04590553\nn04590746\nn04590933\nn04591056\nn04591157\nn04591249\nn04591359\nn04591517\nn04591631\nn04591713\nn04591887\nn04592005\nn04592099\nn04592356\nn04592465\nn04592596\nn04592741\nn04593077\nn04593185\nn04593376\nn04593524\nn04593629\nn04593866\nn04594114\nn04594218\nn04594489\nn04594742\nn04594828\nn04594919\nn04595028\nn04595285\nn04595501\nn04595611\nn04595762\nn04595855\nn04596116\nn04596492\nn04596742\nn04596852\nn04597066\nn04597309\nn04597400\nn04597804\nn04597913\nn04598136\nn04598318\nn04598416\nn04598582\nn04598965\nn04599124\nn04599235\nn04600312\nn04600486\nn04600912\nn04601041\nn04601159\nn04601938\nn04602762\nn04602840\nn04602956\nn04603399\nn04603729\nn04603872\nn04604276\nn04604644\nn04604806\nn04605057\nn04605163\nn04605321\nn04605446\nn04605572\nn04605726\nn04606251\nn04606574\nn04607035\nn04607242\nn04607640\nn04607759\nn04607869\nn04607982\nn04608329\nn04608435\nn04608567\nn04608809\nn04608923\nn04609531\nn04609651\nn04609811\nn04610013\nn04610176\nn04610274\nn04610503\nn04610676\nn04611351\nn04611795\nn04611916\nn04612026\nn04612159\nn04612257\nn04612373\nn04612504\nn04612840\nn04613015\nn04613158\nn04613696\nn04613939\nn04614505\nn04614655\nn04614844\nn04615149\nn04615226\nn04615644\nn04682018\nn04950713\nn04950952\nn04951071\nn04951186\nn04951373\nn04951716\nn04951875\nn04953296\nn04953678\nn04955160\nn04957356\nn04957589\nn04958634\nn04958865\nn04959061\nn04959230\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963111\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967561\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970312\nn04970398\nn04970470\nn04970631\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973020\nn04973291\nn04973386\nn04973585\nn04973669\nn04973816\nn04974145\nn04974340\nn04974859\nn04975739\nn04976319\nn04976952\nn04977412\nn04978561\nn04979002\nn04979307\nn04981658\nn05102764\nn05218119\nn05233741\nn05235879\nn05238282\nn05239437\nn05241218\nn05241485\nn05241662\nn05242070\nn05242239\nn05242928\nn05244421\nn05244755\nn05244934\nn05245192\nn05257476\nn05257967\nn05258051\nn05258627\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05262698\nn05263183\nn05263316\nn05263448\nn05265736\nn05266096\nn05266879\nn05278922\nn05279953\nn05282652\nn05285623\nn05302499\nn05314075\nn05399034\nn05399243\nn05399356\nn05418717\nn05427346\nn05442594\nn05447757\nn05448704\nn05448827\nn05449196\nn05449661\nn05449959\nn05450617\nn05451099\nn05451384\nn05453412\nn05453657\nn05453815\nn05454833\nn05454978\nn05455113\nn05458173\nn05458576\nn05459101\nn05459457\nn05459769\nn05460759\nn05464534\nn05467054\nn05467758\nn05468098\nn05468739\nn05469664\nn05469861\nn05475397\nn05482922\nn05486510\nn05491154\nn05526957\nn05538625\nn05539947\nn05541509\nn05542893\nn05545879\nn05571341\nn05578095\nn05581932\nn05584746\nn05586759\nn05604434\nn05716342\nn06008896\nn06209940\nn06254669\nn06255081\nn06255613\nn06259898\nn06262567\nn06262943\nn06263202\nn06263369\nn06263609\nn06263762\nn06263895\nn06266417\nn06266633\nn06266710\nn06266878\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273207\nn06273294\nn06273414\nn06273555\nn06273743\nn06273890\nn06273986\nn06274092\nn06274292\nn06274546\nn06274760\nn06274921\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06276902\nn06277025\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06281175\nn06340977\nn06359193\nn06359467\nn06359657\nn06415688\nn06417096\nn06418693\nn06419354\nn06423496\nn06470073\nn06591815\nn06592078\nn06592281\nn06592421\nn06595351\nn06596179\nn06596364\nn06596474\nn06596607\nn06596727\nn06596845\nn06613686\nn06614901\nn06616216\nn06618653\nn06625062\nn06785654\nn06793231\nn06794110\nn06874185\nn06883725\nn06892775\nn06998748\nn07005523\nn07248320\nn07273802\nn07461050\nn07556406\nn07556637\nn07556872\nn07556970\nn07557165\nn07557434\nn07560193\nn07560331\nn07560422\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562017\nn07562172\nn07562379\nn07562495\nn07562651\nn07562881\nn07562984\nn07563207\nn07563366\nn07563642\nn07563800\nn07564008\nn07564101\nn07564292\nn07564515\nn07564629\nn07564796\nn07564971\nn07565083\nn07565161\nn07565259\nn07565608\nn07565725\nn07565945\nn07566092\nn07566231\nn07566340\nn07566863\nn07567039\nn07567139\nn07567390\nn07567611\nn07567707\nn07567980\nn07568095\nn07568241\nn07568389\nn07568502\nn07568625\nn07568818\nn07568991\nn07569106\nn07569423\nn07569543\nn07569644\nn07569873\nn07570021\nn07570530\nn07570720\nn07572353\nn07572616\nn07572858\nn07572957\nn07573103\nn07573347\nn07573453\nn07573563\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575226\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576577\nn07576781\nn07576969\nn07577144\nn07577374\nn07577538\nn07577657\nn07577772\nn07577918\nn07578093\nn07579575\nn07579688\nn07579787\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581607\nn07581775\nn07581931\nn07582027\nn07582152\nn07582277\nn07582441\nn07582609\nn07582811\nn07582892\nn07582970\nn07583066\nn07583197\nn07583865\nn07583978\nn07584110\nn07584228\nn07584332\nn07584423\nn07584593\nn07584859\nn07584938\nn07585015\nn07585107\nn07585208\nn07585474\nn07585557\nn07585644\nn07585758\nn07585906\nn07585997\nn07586099\nn07586179\nn07586318\nn07586485\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587206\nn07587331\nn07587441\nn07587618\nn07587700\nn07587819\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588688\nn07588817\nn07588947\nn07589458\nn07589543\nn07589724\nn07589872\nn07589967\nn07590068\nn07590177\nn07590320\nn07590502\nn07590611\nn07590752\nn07590841\nn07590974\nn07591049\nn07591162\nn07591236\nn07591330\nn07591473\nn07591586\nn07591813\nn07591961\nn07592094\nn07592317\nn07592400\nn07592481\nn07592656\nn07592768\nn07592922\nn07593004\nn07593107\nn07593199\nn07593471\nn07593774\nn07593972\nn07594066\nn07594155\nn07594250\nn07594737\nn07594840\nn07595051\nn07595180\nn07595368\nn07595649\nn07595751\nn07595914\nn07596046\nn07596160\nn07596362\nn07596452\nn07596566\nn07596684\nn07596967\nn07597145\nn07597263\nn07597365\nn07598256\nn07598529\nn07598622\nn07598734\nn07598928\nn07599068\nn07599161\nn07599242\nn07599383\nn07599468\nn07599554\nn07599649\nn07599783\nn07599911\nn07599998\nn07600177\nn07600285\nn07600394\nn07600506\nn07600696\nn07600895\nn07601025\nn07601175\nn07601290\nn07601407\nn07601572\nn07601686\nn07601809\nn07602650\nn07604956\nn07605040\nn07605198\nn07605282\nn07605380\nn07605474\nn07605597\nn07605693\nn07605804\nn07605944\nn07606058\nn07606191\nn07606278\nn07606419\nn07606538\nn07606669\nn07606764\nn07606933\nn07607027\nn07607138\nn07607361\nn07607492\nn07607605\nn07607707\nn07607832\nn07607967\nn07608098\nn07608245\nn07608339\nn07608429\nn07608533\nn07608641\nn07608721\nn07608866\nn07608980\nn07609083\nn07609215\nn07609316\nn07609407\nn07609549\nn07609632\nn07609728\nn07609840\nn07610295\nn07610502\nn07610620\nn07610746\nn07610890\nn07611046\nn07611148\nn07611267\nn07611358\nn07611733\nn07611839\nn07611991\nn07612137\nn07612273\nn07612367\nn07612530\nn07612632\nn07612996\nn07613158\nn07613266\nn07613480\nn07613671\nn07613815\nn07614103\nn07614198\nn07614348\nn07614500\nn07614730\nn07614825\nn07615052\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615774\nn07615954\nn07616046\nn07616174\nn07616265\nn07616386\nn07616487\nn07616590\nn07616748\nn07616906\nn07617051\nn07617188\nn07617344\nn07617447\nn07617526\nn07617611\nn07617708\nn07617839\nn07617932\nn07618029\nn07618119\nn07618281\nn07618432\nn07618587\nn07618684\nn07618871\nn07619004\nn07619208\nn07619301\nn07619409\nn07619508\nn07619881\nn07620047\nn07620145\nn07620327\nn07620597\nn07620689\nn07621264\nn07621497\nn07621618\nn07623136\nn07624466\nn07624666\nn07624757\nn07624924\nn07625061\nn07625324\nn07627931\nn07628068\nn07628181\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642833\nn07642933\nn07643026\nn07643200\nn07643306\nn07643474\nn07643577\nn07643679\nn07643764\nn07643891\nn07643981\nn07644244\nn07648913\nn07648997\nn07650792\nn07650903\nn07651025\nn07654148\nn07654298\nn07655067\nn07655263\nn07663899\nn07665438\nn07666176\nn07672914\nn07678586\nn07678729\nn07678953\nn07679034\nn07679140\nn07679356\nn07680168\nn07680313\nn07680416\nn07680517\nn07680655\nn07680761\nn07680932\nn07681264\nn07681355\nn07681450\nn07681691\nn07681805\nn07681926\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683138\nn07683265\nn07683360\nn07683490\nn07683617\nn07683786\nn07684084\nn07684164\nn07684289\nn07684422\nn07684517\nn07684600\nn07684938\nn07685031\nn07685118\nn07685218\nn07685303\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686299\nn07686461\nn07686634\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688021\nn07688130\nn07688265\nn07688412\nn07688624\nn07688757\nn07688898\nn07689003\nn07689217\nn07689313\nn07689490\nn07689624\nn07689757\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691863\nn07691954\nn07692114\nn07692248\nn07692405\nn07692517\nn07692614\nn07692887\nn07693048\nn07693223\nn07693439\nn07693590\nn07693725\nn07693889\nn07693972\nn07694169\nn07694403\nn07694516\nn07694659\nn07694839\nn07695187\nn07695284\nn07695410\nn07695504\nn07695652\nn07695742\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697313\nn07697408\nn07697537\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07703889\nn07704054\nn07704205\nn07704305\nn07705931\nn07707451\nn07708124\nn07708398\nn07708512\nn07708685\nn07708798\nn07709046\nn07709172\nn07709333\nn07709701\nn07709881\nn07710007\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711569\nn07711683\nn07711799\nn07711907\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714571\nn07714802\nn07714895\nn07714990\nn07715103\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716358\nn07716504\nn07716649\nn07716750\nn07716906\nn07717070\nn07717410\nn07717556\nn07717714\nn07717858\nn07718068\nn07718195\nn07718329\nn07718472\nn07718671\nn07718747\nn07718920\nn07719058\nn07719213\nn07719330\nn07719437\nn07719616\nn07719756\nn07719839\nn07719980\nn07720084\nn07720185\nn07720277\nn07720442\nn07720615\nn07720875\nn07721018\nn07721118\nn07721195\nn07721325\nn07721456\nn07721678\nn07721833\nn07721942\nn07722052\nn07722217\nn07722390\nn07722485\nn07722666\nn07722763\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723753\nn07723968\nn07724078\nn07724173\nn07724269\nn07724492\nn07724654\nn07724819\nn07724943\nn07725158\nn07725255\nn07725376\nn07725531\nn07725663\nn07725789\nn07725888\nn07726009\nn07726095\nn07726230\nn07726386\nn07726525\nn07726672\nn07726796\nn07727048\nn07727140\nn07727252\nn07727377\nn07727458\nn07727578\nn07727741\nn07727868\nn07728053\nn07728181\nn07728284\nn07728391\nn07728585\nn07728708\nn07728804\nn07729000\nn07729142\nn07729225\nn07729384\nn07729485\nn07729828\nn07729926\nn07730033\nn07730207\nn07730320\nn07730406\nn07730562\nn07730708\nn07730855\nn07731006\nn07731122\nn07731284\nn07731436\nn07731587\nn07731767\nn07731952\nn07732168\nn07732302\nn07732433\nn07732525\nn07732636\nn07732747\nn07732904\nn07733005\nn07733124\nn07733217\nn07733394\nn07733567\nn07733712\nn07733847\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734744\nn07734879\nn07735052\nn07735179\nn07735294\nn07735404\nn07735510\nn07735687\nn07735803\nn07735981\nn07736087\nn07736256\nn07736371\nn07736527\nn07736692\nn07736813\nn07736971\nn07737081\nn07737594\nn07737745\nn07738105\nn07738224\nn07739035\nn07739125\nn07739344\nn07739506\nn07739923\nn07740033\nn07740115\nn07740220\nn07740342\nn07740461\nn07740597\nn07740744\nn07740855\nn07740954\nn07741138\nn07741235\nn07741357\nn07741461\nn07741623\nn07741706\nn07741804\nn07741888\nn07742012\nn07742224\nn07742313\nn07742415\nn07742513\nn07742605\nn07742704\nn07743224\nn07743384\nn07743544\nn07743723\nn07743902\nn07744057\nn07744246\nn07744430\nn07744559\nn07744682\nn07744811\nn07745046\nn07745197\nn07745357\nn07745466\nn07745661\nn07745940\nn07746038\nn07746186\nn07746334\nn07746551\nn07746749\nn07746910\nn07747055\nn07747607\nn07747811\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749095\nn07749192\nn07749312\nn07749446\nn07749582\nn07749731\nn07749870\nn07749969\nn07750146\nn07750299\nn07750449\nn07750586\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07751737\nn07751858\nn07751977\nn07752109\nn07752264\nn07752377\nn07752514\nn07752602\nn07752664\nn07752782\nn07752874\nn07752966\nn07753113\nn07753275\nn07753448\nn07753592\nn07753743\nn07753980\nn07754155\nn07754279\nn07754451\nn07754684\nn07754894\nn07755089\nn07755262\nn07755411\nn07755619\nn07755707\nn07755929\nn07756096\nn07756325\nn07756499\nn07756641\nn07756838\nn07756951\nn07757132\nn07757312\nn07757511\nn07757602\nn07757753\nn07757874\nn07757990\nn07758125\nn07758260\nn07758407\nn07758582\nn07758680\nn07758950\nn07759194\nn07759324\nn07759424\nn07759576\nn07759691\nn07759816\nn07760070\nn07760153\nn07760297\nn07760395\nn07760501\nn07760673\nn07760755\nn07760859\nn07761141\nn07761309\nn07761611\nn07761777\nn07761954\nn07762114\nn07762244\nn07762373\nn07762534\nn07762740\nn07762913\nn07763107\nn07763290\nn07763483\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764486\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765517\nn07765612\nn07765728\nn07765862\nn07765999\nn07766173\nn07766409\nn07766530\nn07766723\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768139\nn07768230\nn07768318\nn07768423\nn07768590\nn07768694\nn07768858\nn07769102\nn07769306\nn07769465\nn07769584\nn07769731\nn07769886\nn07770034\nn07770180\nn07770439\nn07770571\nn07770763\nn07770869\nn07771082\nn07771212\nn07771405\nn07771539\nn07771731\nn07771891\nn07772026\nn07772147\nn07772274\nn07772413\nn07772788\nn07772935\nn07773428\nn07774182\nn07774295\nn07774479\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07783827\nn07785487\nn07800091\nn07800487\nn07800636\nn07800740\nn07801007\nn07801091\nn07801342\nn07801508\nn07801709\nn07801779\nn07801892\nn07802026\nn07802152\nn07802246\nn07802417\nn07802767\nn07802863\nn07802963\nn07803093\nn07803213\nn07803310\nn07803408\nn07803545\nn07803779\nn07803895\nn07803992\nn07804152\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805006\nn07805254\nn07805389\nn07805478\nn07805594\nn07805731\nn07805966\nn07806043\nn07806120\nn07806221\nn07806633\nn07806774\nn07806879\nn07807002\nn07807171\nn07807317\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808022\nn07808166\nn07808268\nn07808352\nn07808479\nn07808587\nn07808675\nn07808806\nn07808904\nn07809096\nn07809368\nn07810531\nn07810907\nn07811416\nn07812046\nn07812184\nn07812662\nn07812790\nn07812913\nn07813107\nn07813324\nn07813495\nn07813579\nn07813717\nn07813833\nn07814007\nn07814203\nn07814390\nn07814487\nn07814634\nn07814790\nn07814925\nn07815163\nn07815294\nn07815424\nn07815588\nn07815839\nn07815956\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816726\nn07816839\nn07817024\nn07817160\nn07817315\nn07817465\nn07817599\nn07817758\nn07817871\nn07818029\nn07818133\nn07818277\nn07818422\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819303\nn07819480\nn07819682\nn07819769\nn07819896\nn07820036\nn07820145\nn07820297\nn07820497\nn07820683\nn07820814\nn07820960\nn07821107\nn07821260\nn07821404\nn07821610\nn07821758\nn07821919\nn07822053\nn07822197\nn07822323\nn07822518\nn07822687\nn07822845\nn07823105\nn07823280\nn07823369\nn07823460\nn07823591\nn07823698\nn07823814\nn07823951\nn07824191\nn07824268\nn07824383\nn07824502\nn07824702\nn07824863\nn07824988\nn07825194\nn07825399\nn07825496\nn07825597\nn07825717\nn07825850\nn07825972\nn07826091\nn07826250\nn07826340\nn07826453\nn07826544\nn07826653\nn07826930\nn07827130\nn07827284\nn07827410\nn07827554\nn07827750\nn07827896\nn07828041\nn07828156\nn07828275\nn07828378\nn07828642\nn07828987\nn07829248\nn07829331\nn07829412\nn07830493\nn07830593\nn07830690\nn07830841\nn07830986\nn07831146\nn07831267\nn07831450\nn07831663\nn07831821\nn07831955\nn07832099\nn07832202\nn07832307\nn07832416\nn07832592\nn07832741\nn07832902\nn07833333\nn07833535\nn07833672\nn07833816\nn07833951\nn07834065\nn07834160\nn07834286\nn07834507\nn07834618\nn07834774\nn07834872\nn07835051\nn07835173\nn07835331\nn07835457\nn07835547\nn07835701\nn07835823\nn07835921\nn07836077\nn07836269\nn07836456\nn07836600\nn07836731\nn07836838\nn07837002\nn07837110\nn07837234\nn07837362\nn07837545\nn07837630\nn07837755\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07838659\nn07838811\nn07838905\nn07839055\nn07839172\nn07839312\nn07839478\nn07839593\nn07839730\nn07839864\nn07840027\nn07840124\nn07840219\nn07840304\nn07840395\nn07840520\nn07840672\nn07840804\nn07841037\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07842972\nn07843117\nn07843220\nn07843348\nn07843464\nn07843636\nn07843775\nn07844042\nn07844604\nn07844786\nn07844867\nn07845087\nn07845166\nn07845335\nn07845421\nn07845495\nn07845571\nn07845702\nn07845775\nn07845863\nn07846014\nn07846143\nn07846274\nn07846359\nn07846471\nn07846557\nn07846688\nn07846802\nn07846938\nn07847047\nn07847198\nn07847453\nn07847585\nn07847706\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07848771\nn07848936\nn07849026\nn07849186\nn07849336\nn07849506\nn07849619\nn07849733\nn07849912\nn07850083\nn07850219\nn07850329\nn07851054\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07851926\nn07852045\nn07852229\nn07852302\nn07852376\nn07852452\nn07852532\nn07852614\nn07852712\nn07852833\nn07852919\nn07853125\nn07853232\nn07853345\nn07853445\nn07853560\nn07853648\nn07853762\nn07853852\nn07853946\nn07854066\nn07854184\nn07854266\nn07854348\nn07854455\nn07854614\nn07854707\nn07854813\nn07854982\nn07855105\nn07855188\nn07855317\nn07855413\nn07855510\nn07855603\nn07855721\nn07855812\nn07855907\nn07856045\nn07856186\nn07856270\nn07856756\nn07856895\nn07856992\nn07857076\nn07857170\nn07857356\nn07857598\nn07857731\nn07857959\nn07858114\nn07858197\nn07858336\nn07858484\nn07858595\nn07858841\nn07858978\nn07859142\nn07859284\nn07859583\nn07859796\nn07859951\nn07860103\nn07860208\nn07860331\nn07860447\nn07860548\nn07860629\nn07860805\nn07860988\nn07861158\nn07861247\nn07861334\nn07861557\nn07861681\nn07861813\nn07861983\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07862770\nn07862946\nn07863107\nn07863229\nn07863374\nn07863547\nn07863644\nn07863802\nn07863935\nn07864065\nn07864198\nn07864317\nn07864475\nn07864638\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07865575\nn07865700\nn07865788\nn07866015\nn07866151\nn07866277\nn07866409\nn07866571\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07867883\nn07868045\nn07868200\nn07868340\nn07868508\nn07868684\nn07868830\nn07868955\nn07869111\nn07869291\nn07869391\nn07869522\nn07869611\nn07869775\nn07869937\nn07870069\nn07870167\nn07870313\nn07870478\nn07870620\nn07870734\nn07870894\nn07871065\nn07871234\nn07871335\nn07871436\nn07871588\nn07871720\nn07871810\nn07872593\nn07872748\nn07873057\nn07873198\nn07873348\nn07873464\nn07873679\nn07873807\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874531\nn07874674\nn07874780\nn07874995\nn07875086\nn07875152\nn07875267\nn07875436\nn07875560\nn07875693\nn07875835\nn07875926\nn07876026\nn07876189\nn07876281\nn07876460\nn07876550\nn07876651\nn07876775\nn07876893\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878145\nn07878283\nn07878479\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879560\nn07879659\nn07879821\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07880968\nn07881117\nn07881205\nn07881404\nn07881525\nn07881625\nn07881800\nn07882420\nn07882497\nn07882886\nn07883031\nn07883156\nn07883251\nn07883384\nn07883510\nn07883661\nn07884567\nn07885705\nn07886057\nn07886176\nn07886317\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888058\nn07888229\nn07888378\nn07888465\nn07888816\nn07888909\nn07889193\nn07889274\nn07889510\nn07889814\nn07889990\nn07890068\nn07890226\nn07890352\nn07890540\nn07890617\nn07890750\nn07890890\nn07890970\nn07891095\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892512\nn07892813\nn07893253\nn07893425\nn07893528\nn07893642\nn07893792\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896422\nn07896560\nn07896661\nn07896765\nn07896893\nn07896994\nn07897116\nn07897200\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07898895\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07899976\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902520\nn07902698\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904072\nn07904293\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905618\nn07905770\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07908923\nn07909129\nn07909231\nn07909362\nn07909504\nn07909593\nn07909714\nn07909811\nn07909954\nn07910048\nn07910152\nn07910245\nn07910379\nn07910538\nn07910656\nn07910799\nn07910970\nn07911061\nn07911249\nn07911371\nn07911677\nn07912093\nn07912211\nn07913180\nn07913300\nn07913393\nn07913537\nn07913644\nn07913774\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914686\nn07914777\nn07914887\nn07914995\nn07915094\nn07915213\nn07915366\nn07915491\nn07915618\nn07915800\nn07915918\nn07916041\nn07916183\nn07916319\nn07916437\nn07916582\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07917791\nn07917874\nn07917951\nn07918028\nn07918193\nn07918309\nn07918706\nn07918879\nn07919165\nn07919310\nn07919441\nn07919572\nn07919665\nn07919787\nn07919894\nn07920052\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921090\nn07921239\nn07921360\nn07921455\nn07921615\nn07921834\nn07921948\nn07922041\nn07922147\nn07922512\nn07922607\nn07922764\nn07922955\nn07923748\nn07924033\nn07924276\nn07924366\nn07924443\nn07924560\nn07924655\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925327\nn07925423\nn07925500\nn07925608\nn07925708\nn07925808\nn07925966\nn07926250\nn07926346\nn07926442\nn07926540\nn07926785\nn07926920\nn07927070\nn07927197\nn07927512\nn07927716\nn07927836\nn07927931\nn07928163\nn07928264\nn07928367\nn07928488\nn07928578\nn07928696\nn07928790\nn07928887\nn07928998\nn07929172\nn07929351\nn07929519\nn07929940\nn07930062\nn07930205\nn07930315\nn07930433\nn07930554\nn07930864\nn07931001\nn07931096\nn07931280\nn07931452\nn07931612\nn07931733\nn07931870\nn07932039\nn07932323\nn07932454\nn07932614\nn07932762\nn07932841\nn07933154\nn07933274\nn07933530\nn07933652\nn07933799\nn07933891\nn07934032\nn07934152\nn07934282\nn07934373\nn07934530\nn07934678\nn07934800\nn07934908\nn07935043\nn07935152\nn07935288\nn07935379\nn07935504\nn07935737\nn07935878\nn07936015\nn07936093\nn07936263\nn07936459\nn07936548\nn07936745\nn07936979\nn07937069\nn07937344\nn07937461\nn07937621\nn07938007\nn07938149\nn07938313\nn07938594\nn07942152\nn07951464\nn07954211\nn07977870\nn08079613\nn08182379\nn08238463\nn08242223\nn08249459\nn08253141\nn08256735\nn08376250\nn08385989\nn08492354\nn08492461\nn08494231\nn08495908\nn08496334\nn08500819\nn08500989\nn08501887\nn08505018\nn08506347\nn08511017\nn08517010\nn08517676\nn08518171\nn08519299\nn08521623\nn08523340\nn08524735\nn08539072\nn08539276\nn08540532\nn08547468\nn08547544\nn08551296\nn08554440\nn08555333\nn08555710\nn08558770\nn08558963\nn08559155\nn08560295\nn08569482\nn08571275\nn08571642\nn08571898\nn08573674\nn08573842\nn08578517\nn08579266\nn08579352\nn08580944\nn08583292\nn08583455\nn08583554\nn08583682\nn08584914\nn08586978\nn08589670\nn08596076\nn08597579\nn08598301\nn08598568\nn08599174\nn08599292\nn08611339\nn08611421\nn08613733\nn08614632\nn08616050\nn08618831\nn08619112\nn08623676\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08643267\nn08644045\nn08645104\nn08645212\nn08645318\nn08647264\nn08648917\nn08649711\nn08651104\nn08652376\nn08658309\nn08658918\nn08659242\nn08659331\nn08659446\nn08659861\nn08661878\nn08662427\nn08663051\nn08663703\nn08663860\nn08673039\nn08674344\nn08676253\nn08677424\nn08677801\nn08678783\nn08679167\nn08679269\nn08679562\nn08685188\nn08782627\nn08896327\nn09032191\nn09186592\nn09189157\nn09191635\nn09193551\nn09193705\nn09194227\nn09199101\nn09201998\nn09203827\nn09205509\nn09206896\nn09206985\nn09208496\nn09209025\nn09210862\nn09213434\nn09213565\nn09214060\nn09214269\nn09214916\nn09215023\nn09215437\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09223487\nn09224725\nn09226869\nn09228055\nn09229709\nn09230041\nn09230202\nn09231117\nn09233446\nn09233603\nn09238926\nn09239302\nn09242389\nn09245515\nn09246464\nn09247410\nn09248153\nn09248399\nn09249034\nn09249155\nn09251407\nn09255070\nn09256479\nn09257843\nn09259025\nn09259219\nn09260907\nn09262690\nn09263912\nn09264803\nn09265620\nn09266604\nn09267854\nn09268007\nn09269341\nn09269472\nn09269882\nn09270160\nn09270657\nn09270735\nn09274152\nn09274305\nn09279986\nn09281252\nn09282208\nn09283193\nn09283405\nn09283514\nn09283767\nn09283866\nn09287415\nn09287968\nn09288635\nn09289331\nn09289596\nn09290350\nn09290444\nn09294877\nn09295210\nn09295946\nn09300306\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305031\nn09305898\nn09308572\nn09308743\nn09309046\nn09309168\nn09309292\nn09310616\nn09315159\nn09319604\nn09325824\nn09326662\nn09327077\nn09327538\nn09330378\nn09331251\nn09332890\nn09335693\nn09335809\nn09336555\nn09337048\nn09337253\nn09338013\nn09339810\nn09344198\nn09344324\nn09344724\nn09348460\nn09349648\nn09351905\nn09352849\nn09353815\nn09354511\nn09357346\nn09357447\nn09359803\nn09361517\nn09362316\nn09362945\nn09366017\nn09366317\nn09375606\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09389867\nn09391386\nn09391644\nn09391774\nn09392402\nn09393524\nn09393605\nn09396465\nn09396608\nn09398076\nn09398677\nn09399592\nn09400584\nn09400987\nn09402944\nn09403086\nn09403211\nn09403427\nn09403734\nn09405078\nn09405787\nn09406793\nn09409512\nn09409752\nn09410224\nn09411189\nn09411295\nn09415584\nn09415671\nn09416076\nn09416890\nn09421031\nn09421799\nn09421951\nn09422190\nn09422631\nn09425019\nn09425344\nn09428293\nn09428628\nn09429630\nn09432283\nn09432990\nn09433312\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439032\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452291\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09454744\nn09456207\nn09457979\nn09458269\nn09459979\nn09460046\nn09461069\nn09462600\nn09463226\nn09464486\nn09466678\nn09467696\nn09468604\nn09470027\nn09470222\nn09472413\nn09472597\nn09474010\nn09474412\nn09474765\nn09475044\nn09475179\nn09475925\nn09476123\nn09478210\nn09480959\nn09481120\nn09493983\nn09495962\nn09505153\nn09537660\nn09556121\nn09605110\nn09606009\nn09606527\nn09607630\nn09607782\nn09607903\nn09608709\nn09610255\nn09610405\nn09611722\nn09612700\nn09613118\nn09613191\nn09613690\nn09615336\nn09616573\nn09616922\nn09617161\nn09617435\nn09617577\nn09617696\nn09618760\nn09618880\nn09618957\nn09619168\nn09619452\nn09620078\nn09620794\nn09621232\nn09622049\nn09622302\nn09624168\nn09624559\nn09624899\nn09625401\nn09626238\nn09627807\nn09627906\nn09629065\nn09629246\nn09629752\nn09631129\nn09632274\nn09632518\nn09633969\nn09635534\nn09635635\nn09635973\nn09636339\nn09637339\nn09638454\nn09638875\nn09639382\nn09639919\nn09640327\nn09640715\nn09641002\nn09641578\nn09643799\nn09644152\nn09644657\nn09648743\nn09648911\nn09649067\nn09650729\nn09650839\nn09650989\nn09651123\nn09651968\nn09652149\nn09653144\nn09653438\nn09654079\nn09654518\nn09654898\nn09655213\nn09655466\nn09656077\nn09657206\nn09657748\nn09658254\nn09658398\nn09658815\nn09658921\nn09659039\nn09659188\nn09660010\nn09660240\nn09661873\nn09662038\nn09662661\nn09662951\nn09663248\nn09663786\nn09663999\nn09664556\nn09664908\nn09665367\nn09665545\nn09666349\nn09666476\nn09666883\nn09667358\nn09668199\nn09668437\nn09668562\nn09668988\nn09669631\nn09670280\nn09670521\nn09670909\nn09671089\nn09672590\nn09672725\nn09672840\nn09673091\nn09674412\nn09674786\nn09675045\nn09675673\nn09675799\nn09675922\nn09676021\nn09676247\nn09676884\nn09677427\nn09678747\nn09679028\nn09679170\nn09679925\nn09680908\nn09681107\nn09681234\nn09681973\nn09683180\nn09683757\nn09683924\nn09684082\nn09684901\nn09685233\nn09685806\nn09686262\nn09686401\nn09688233\nn09688804\nn09689435\nn09689958\nn09690083\nn09690208\nn09690496\nn09690621\nn09690864\nn09691604\nn09691729\nn09691858\nn09692125\nn09692915\nn09693244\nn09693982\nn09694664\nn09694771\nn09695019\nn09695132\nn09695514\nn09695620\nn09695979\nn09696456\nn09696585\nn09696763\nn09697401\nn09697986\nn09698644\nn09699020\nn09699642\nn09700125\nn09700964\nn09701148\nn09701833\nn09702134\nn09702673\nn09703101\nn09703344\nn09703485\nn09703708\nn09703809\nn09703932\nn09704057\nn09704157\nn09704283\nn09705003\nn09705124\nn09705671\nn09705784\nn09706029\nn09706255\nn09707061\nn09707289\nn09707735\nn09708750\nn09708889\nn09709531\nn09709673\nn09710041\nn09710164\nn09710886\nn09711132\nn09711435\nn09712324\nn09712448\nn09712696\nn09712967\nn09713108\nn09714120\nn09714694\nn09715165\nn09715303\nn09715427\nn09716047\nn09716933\nn09717233\nn09718217\nn09718811\nn09718936\nn09719309\nn09719794\nn09720033\nn09720256\nn09720595\nn09720702\nn09720842\nn09721244\nn09721444\nn09722064\nn09722658\nn09722817\nn09723067\nn09723819\nn09723944\nn09724234\nn09724533\nn09724656\nn09724785\nn09725000\nn09725229\nn09725546\nn09725653\nn09725772\nn09725935\nn09726621\nn09726811\nn09727440\nn09727826\nn09728137\nn09728285\nn09729062\nn09729156\nn09730077\nn09730204\nn09730824\nn09731343\nn09731436\nn09731571\nn09732170\nn09733459\nn09733793\nn09734185\nn09734450\nn09734535\nn09734639\nn09735258\nn09735654\nn09736485\nn09736798\nn09736945\nn09737050\nn09737161\nn09737453\nn09738121\nn09738400\nn09740724\nn09741074\nn09741331\nn09741722\nn09741816\nn09741904\nn09741999\nn09742101\nn09742315\nn09742927\nn09743487\nn09743601\nn09743792\nn09744161\nn09744346\nn09744462\nn09744679\nn09744834\nn09745229\nn09745324\nn09745834\nn09745933\nn09746936\nn09747191\nn09747495\nn09748101\nn09748408\nn09748648\nn09748889\nn09749386\nn09750282\nn09750641\nn09750770\nn09750891\nn09751076\nn09751496\nn09751622\nn09751895\nn09752023\nn09752519\nn09753348\nn09753792\nn09754152\nn09754217\nn09754633\nn09754907\nn09755086\nn09755241\nn09755555\nn09755788\nn09755893\nn09756049\nn09756195\nn09756961\nn09757449\nn09758173\nn09758885\nn09759501\nn09760290\nn09760609\nn09760913\nn09761068\nn09761753\nn09762011\nn09762385\nn09763272\nn09763784\nn09764201\nn09764598\nn09764732\nn09764900\nn09765118\nn09765278\nn09767197\nn09769076\nn09769525\nn09769929\nn09770179\nn09770359\nn09771435\nn09772330\nn09772746\nn09772930\nn09773962\nn09774167\nn09774783\nn09775907\nn09776346\nn09776642\nn09776807\nn09777870\nn09778266\nn09778537\nn09778783\nn09778927\nn09779124\nn09779280\nn09779461\nn09779790\nn09780395\nn09780828\nn09780984\nn09781398\nn09781504\nn09781650\nn09782167\nn09782397\nn09782855\nn09783537\nn09783776\nn09783884\nn09784043\nn09784160\nn09784564\nn09785236\nn09785659\nn09785891\nn09786115\nn09787534\nn09787765\nn09788073\nn09788237\nn09789150\nn09789566\nn09789898\nn09790047\nn09790482\nn09791014\nn09791419\nn09791816\nn09792125\nn09792555\nn09792969\nn09793141\nn09793352\nn09793946\nn09794550\nn09794668\nn09795010\nn09795124\nn09795334\nn09796809\nn09796974\nn09797742\nn09797873\nn09797998\nn09798096\nn09800469\nn09800964\nn09801102\nn09801275\nn09801533\nn09802445\nn09802641\nn09802951\nn09804230\nn09805151\nn09805324\nn09805475\nn09806944\nn09807075\nn09808080\nn09808591\nn09809279\nn09809538\nn09809749\nn09809925\nn09810166\nn09811568\nn09811712\nn09811852\nn09813219\nn09814252\nn09814381\nn09814488\nn09814567\nn09814660\nn09815455\nn09815790\nn09816654\nn09816771\nn09817174\nn09817386\nn09818022\nn09819477\nn09820044\nn09820263\nn09821831\nn09822830\nn09823153\nn09823287\nn09823502\nn09823832\nn09824135\nn09824609\nn09825096\nn09825750\nn09826204\nn09826605\nn09826821\nn09827246\nn09827363\nn09828216\nn09828403\nn09828988\nn09830194\nn09830400\nn09830629\nn09830759\nn09830926\nn09831962\nn09832456\nn09832633\nn09832978\nn09833111\nn09833275\nn09833441\nn09833536\nn09833751\nn09833997\nn09834258\nn09834378\nn09834699\nn09834885\nn09835017\nn09835153\nn09835230\nn09835348\nn09835506\nn09836160\nn09836343\nn09836519\nn09836786\nn09837459\nn09837720\nn09838295\nn09838370\nn09838621\nn09839702\nn09840217\nn09840435\nn09840520\nn09841188\nn09841515\nn09841696\nn09842047\nn09842288\nn09842395\nn09842528\nn09842823\nn09843443\nn09843602\nn09843716\nn09843824\nn09844457\nn09844898\nn09845401\nn09845849\nn09846142\nn09846469\nn09846586\nn09846755\nn09846894\nn09847267\nn09847344\nn09847543\nn09848110\nn09848489\nn09849167\nn09849990\nn09850760\nn09850974\nn09851165\nn09851575\nn09853541\nn09853645\nn09853881\nn09854218\nn09854421\nn09854915\nn09855433\nn09856401\nn09856671\nn09856827\nn09857007\nn09858165\nn09858299\nn09858733\nn09859152\nn09859285\nn09859684\nn09859975\nn09861287\nn09861599\nn09861863\nn09861946\nn09862183\nn09862621\nn09863031\nn09863339\nn09863749\nn09863936\nn09864632\nn09864968\nn09865068\nn09865162\nn09865398\nn09865672\nn09865744\nn09866115\nn09866354\nn09866559\nn09866661\nn09866817\nn09866922\nn09867069\nn09867154\nn09867311\nn09868270\nn09868782\nn09868899\nn09869317\nn09869447\nn09869578\nn09870096\nn09871095\nn09871229\nn09871681\nn09871867\nn09871952\nn09872066\nn09872557\nn09873348\nn09873473\nn09873769\nn09873899\nn09874428\nn09874725\nn09874862\nn09875025\nn09875979\nn09876701\nn09877288\nn09877587\nn09877750\nn09877951\nn09878921\nn09879552\nn09880189\nn09880741\nn09881265\nn09881358\nn09881895\nn09883047\nn09883452\nn09883807\nn09885059\nn09885866\nn09886403\nn09886540\nn09888635\nn09889065\nn09889170\nn09889691\nn09889941\nn09890192\nn09890749\nn09891730\nn09892262\nn09892513\nn09892693\nn09893191\nn09893344\nn09893502\nn09893600\nn09894143\nn09894445\nn09894654\nn09894909\nn09895222\nn09895480\nn09895561\nn09895701\nn09895902\nn09896170\nn09896311\nn09896401\nn09896685\nn09896826\nn09898020\nn09899289\nn09899671\nn09899782\nn09899929\nn09901337\nn09901502\nn09901642\nn09901786\nn09901921\nn09902128\nn09902353\nn09902731\nn09902851\nn09902954\nn09903153\nn09903501\nn09903639\nn09903936\nn09904208\nn09904837\nn09905050\nn09905185\nn09905530\nn09906293\nn09906449\nn09906704\nn09907804\nn09908769\nn09909660\nn09909929\nn09910222\nn09910374\nn09910556\nn09910840\nn09911226\nn09912431\nn09912681\nn09912907\nn09912995\nn09913329\nn09913455\nn09913593\nn09915434\nn09915651\nn09916348\nn09917214\nn09917345\nn09917481\nn09917593\nn09918248\nn09918554\nn09918867\nn09919061\nn09919200\nn09919451\nn09919899\nn09920106\nn09920283\nn09920901\nn09921034\nn09923003\nn09923186\nn09923418\nn09923561\nn09923673\nn09923996\nn09924106\nn09924195\nn09924313\nn09924437\nn09924996\nn09927089\nn09927451\nn09928136\nn09928451\nn09928845\nn09929202\nn09929298\nn09929577\nn09930257\nn09930628\nn09930876\nn09931165\nn09931418\nn09931640\nn09932098\nn09932336\nn09932508\nn09932788\nn09933020\nn09933098\nn09933842\nn09933972\nn09934337\nn09934488\nn09934774\nn09935107\nn09935434\nn09936825\nn09936892\nn09937056\nn09937688\nn09937802\nn09937903\nn09938080\nn09938449\nn09938991\nn09940725\nn09940818\nn09941089\nn09941571\nn09941787\nn09941964\nn09942697\nn09942970\nn09943239\nn09943811\nn09944022\nn09944160\nn09944430\nn09945021\nn09945223\nn09945319\nn09945603\nn09945745\nn09946814\nn09947127\nn09950457\nn09950728\nn09951070\nn09951274\nn09951524\nn09951616\nn09952163\nn09953052\nn09953350\nn09953615\nn09954355\nn09954639\nn09955406\nn09955944\nn09956578\nn09957523\nn09958133\nn09958292\nn09958447\nn09958569\nn09959142\nn09959658\nn09960688\nn09961198\nn09961331\nn09961469\nn09961605\nn09961739\nn09962966\nn09964202\nn09964411\nn09965515\nn09965787\nn09966470\nn09966554\nn09967063\nn09967406\nn09967555\nn09967816\nn09967967\nn09968259\nn09968652\nn09968741\nn09968845\nn09970088\nn09970192\nn09970402\nn09970822\nn09971273\nn09971385\nn09971839\nn09972010\nn09972458\nn09972587\nn09974648\nn09975425\nn09976024\nn09976283\nn09976429\nn09976728\nn09976917\nn09978442\nn09979321\nn09979913\nn09980458\nn09980805\nn09980985\nn09981092\nn09981278\nn09981540\nn09981939\nn09982152\nn09982525\nn09983314\nn09983572\nn09983889\nn09984960\nn09985470\nn09985809\nn09985978\nn09986450\nn09986700\nn09986904\nn09987045\nn09987161\nn09987239\nn09988063\nn09988311\nn09988493\nn09988703\nn09989502\nn09990415\nn09990690\nn09990777\nn09991740\nn09991867\nn09992538\nn09992837\nn09993252\nn09993651\nn09994400\nn09994673\nn09994808\nn09994878\nn09995829\nn09996039\nn09996304\nn09996481\nn09997622\nn09998788\nn09999135\nn10000294\nn10000459\nn10000787\nn10001217\nn10001481\nn10001764\nn10002257\nn10002760\nn10003476\nn10004718\nn10005006\nn10005934\nn10006177\nn10006748\nn10007684\nn10007809\nn10007995\nn10008123\nn10008254\nn10009162\nn10009276\nn10009484\nn10009671\nn10010062\nn10010243\nn10010632\nn10010767\nn10010864\nn10011360\nn10011486\nn10012484\nn10013811\nn10015215\nn10015485\nn10015792\nn10015897\nn10017272\nn10017422\nn10018747\nn10018861\nn10019072\nn10019187\nn10019406\nn10020366\nn10020533\nn10020670\nn10020807\nn10020890\nn10022908\nn10023264\nn10023506\nn10023656\nn10024025\nn10024362\nn10024937\nn10025060\nn10025295\nn10025391\nn10025635\nn10026976\nn10027246\nn10027590\nn10028402\nn10028541\nn10029068\nn10030277\nn10032987\nn10033412\nn10033572\nn10033663\nn10033888\nn10034201\nn10034614\nn10035952\nn10036266\nn10036444\nn10036692\nn10036929\nn10037080\nn10037385\nn10037588\nn10037922\nn10038119\nn10038409\nn10038620\nn10039271\nn10039946\nn10040240\nn10040698\nn10040945\nn10041373\nn10041887\nn10042690\nn10042845\nn10043024\nn10043491\nn10043643\nn10044682\nn10044879\nn10047199\nn10047459\nn10048117\nn10048367\nn10048612\nn10048836\nn10049363\nn10050043\nn10050880\nn10051026\nn10051761\nn10051861\nn10051975\nn10052694\nn10053439\nn10053808\nn10054657\nn10055297\nn10055410\nn10055566\nn10055730\nn10055847\nn10056103\nn10056611\nn10056719\nn10057271\nn10058411\nn10058962\nn10059067\nn10060075\nn10060175\nn10060352\nn10061043\nn10061195\nn10061431\nn10061882\nn10062042\nn10062176\nn10062275\nn10062492\nn10062594\nn10062716\nn10062905\nn10062996\nn10063635\nn10063919\nn10064831\nn10064977\nn10065758\nn10066206\nn10066314\nn10067011\nn10067305\nn10067600\nn10067968\nn10068234\nn10068425\nn10069296\nn10069981\nn10070108\nn10070377\nn10070449\nn10070563\nn10070711\nn10071332\nn10071557\nn10072054\nn10074249\nn10074578\nn10074735\nn10074841\nn10075299\nn10075693\nn10076224\nn10076483\nn10076604\nn10076957\nn10077106\nn10077593\nn10077879\nn10078131\nn10078719\nn10078806\nn10079399\nn10079893\nn10080117\nn10080508\nn10080869\nn10081204\nn10081842\nn10082043\nn10082299\nn10082423\nn10082562\nn10082687\nn10082997\nn10083677\nn10083823\nn10084043\nn10084295\nn10085101\nn10085869\nn10086383\nn10086744\nn10087434\nn10087736\nn10088200\nn10090745\nn10091349\nn10091450\nn10091564\nn10091651\nn10091861\nn10091997\nn10092488\nn10092643\nn10092794\nn10092978\nn10093167\nn10093475\nn10093818\nn10094320\nn10094584\nn10094782\nn10095265\nn10095420\nn10095769\nn10095869\nn10096126\nn10096508\nn10097262\nn10097477\nn10097590\nn10097842\nn10097995\nn10098245\nn10098388\nn10098517\nn10098624\nn10098710\nn10098862\nn10099002\nn10099375\nn10101308\nn10101634\nn10101981\nn10102800\nn10103155\nn10103228\nn10103921\nn10104064\nn10104487\nn10104756\nn10104888\nn10105085\nn10105733\nn10105906\nn10106387\nn10106509\nn10106995\nn10107173\nn10107303\nn10108018\nn10108089\nn10108464\nn10108832\nn10109443\nn10109662\nn10109826\nn10110093\nn10110731\nn10110893\nn10111358\nn10111779\nn10111903\nn10112129\nn10113249\nn10113583\nn10113869\nn10114476\nn10114550\nn10114662\nn10115430\nn10115946\nn10116370\nn10116478\nn10116702\nn10117017\nn10117267\nn10117415\nn10117739\nn10117851\nn10118301\nn10118743\nn10118844\nn10119609\nn10120330\nn10120671\nn10121026\nn10121246\nn10121714\nn10121800\nn10122300\nn10122531\nn10123122\nn10123844\nn10126177\nn10126424\nn10126708\nn10127186\nn10127689\nn10128519\nn10128748\nn10129338\nn10129825\nn10130686\nn10130877\nn10131151\nn10131268\nn10131590\nn10131815\nn10132035\nn10132502\nn10134178\nn10134396\nn10134760\nn10134982\nn10135129\nn10135197\nn10135297\nn10136615\nn10136959\nn10137825\nn10138369\nn10138472\nn10139077\nn10139651\nn10140051\nn10140597\nn10140683\nn10140783\nn10140929\nn10141364\nn10141732\nn10142166\nn10142391\nn10142537\nn10142747\nn10142946\nn10143172\nn10143595\nn10143725\nn10144338\nn10145239\nn10145340\nn10145480\nn10145590\nn10145774\nn10145902\nn10146002\nn10146104\nn10146416\nn10146816\nn10146927\nn10147121\nn10147262\nn10147710\nn10147935\nn10148035\nn10148305\nn10148825\nn10149436\nn10149867\nn10150071\nn10150794\nn10150940\nn10151133\nn10151261\nn10151367\nn10151570\nn10151760\nn10152306\nn10152616\nn10152763\nn10153155\nn10153414\nn10153594\nn10153865\nn10154013\nn10154186\nn10154601\nn10155222\nn10155600\nn10155849\nn10156629\nn10156831\nn10157016\nn10157128\nn10157271\nn10158506\nn10159045\nn10159289\nn10159533\nn10160188\nn10160280\nn10160412\nn10161622\nn10162016\nn10162194\nn10162354\nn10164025\nn10164233\nn10164492\nn10165448\nn10166189\nn10166394\nn10167152\nn10167361\nn10167565\nn10167838\nn10168012\nn10168183\nn10168584\nn10168837\nn10169147\nn10169241\nn10169419\nn10169796\nn10170060\nn10170681\nn10170866\nn10171219\nn10171456\nn10171567\nn10172080\nn10173410\nn10173579\nn10173665\nn10173771\nn10174253\nn10174330\nn10174445\nn10174589\nn10174695\nn10174971\nn10175248\nn10175725\nn10176913\nn10177150\nn10178077\nn10178216\nn10179069\nn10180580\nn10180791\nn10180923\nn10181445\nn10181547\nn10181799\nn10181878\nn10182190\nn10182402\nn10183347\nn10183931\nn10184505\nn10185148\nn10185483\nn10185793\nn10186068\nn10186143\nn10186216\nn10186350\nn10186686\nn10186774\nn10187130\nn10187491\nn10187990\nn10188715\nn10188856\nn10188957\nn10189278\nn10189597\nn10190122\nn10190516\nn10191001\nn10191388\nn10191613\nn10192839\nn10193650\nn10194231\nn10194775\nn10195056\nn10195155\nn10195261\nn10195593\nn10196404\nn10196725\nn10197392\nn10198437\nn10198832\nn10199251\nn10200246\nn10200781\nn10202225\nn10202624\nn10202763\nn10203949\nn10204177\nn10204833\nn10205231\nn10205344\nn10205457\nn10205714\nn10206173\nn10206506\nn10206629\nn10207077\nn10207169\nn10208189\nn10208847\nn10208950\nn10209082\nn10209731\nn10210137\nn10210512\nn10210648\nn10210911\nn10211036\nn10211666\nn10211830\nn10212231\nn10212501\nn10212780\nn10213034\nn10213429\nn10214062\nn10214390\nn10215623\nn10216106\nn10216403\nn10217208\nn10218043\nn10218164\nn10218292\nn10219240\nn10219453\nn10219879\nn10220080\nn10220924\nn10221312\nn10221520\nn10222170\nn10222259\nn10222497\nn10222716\nn10223069\nn10223177\nn10223606\nn10224578\nn10225219\nn10225931\nn10226413\nn10227166\nn10227266\nn10227393\nn10227490\nn10227698\nn10227793\nn10227985\nn10228278\nn10228468\nn10228592\nn10228712\nn10229883\nn10230216\nn10233248\nn10235024\nn10235269\nn10235385\nn10236304\nn10236521\nn10236842\nn10237069\nn10237196\nn10237464\nn10237556\nn10237676\nn10237799\nn10238272\nn10238375\nn10239928\nn10240082\nn10240235\nn10240417\nn10240821\nn10241024\nn10241300\nn10242328\nn10243137\nn10243273\nn10243483\nn10243664\nn10243872\nn10244108\nn10244359\nn10244913\nn10245029\nn10245341\nn10245507\nn10245639\nn10245863\nn10246317\nn10246395\nn10246703\nn10247358\nn10247880\nn10248008\nn10248198\nn10248377\nn10249191\nn10249270\nn10249459\nn10249869\nn10249950\nn10250712\nn10251329\nn10251612\nn10252075\nn10252222\nn10252354\nn10252547\nn10253122\nn10253296\nn10253479\nn10253611\nn10253703\nn10255459\nn10257221\nn10258602\nn10258786\nn10259348\nn10259780\nn10259997\nn10260473\nn10260706\nn10260800\nn10261211\nn10261511\nn10261624\nn10261862\nn10262343\nn10262445\nn10262561\nn10262655\nn10262880\nn10263146\nn10263411\nn10263790\nn10265281\nn10265801\nn10265891\nn10266016\nn10266328\nn10266848\nn10267166\nn10267311\nn10267865\nn10268629\nn10269199\nn10269289\nn10271677\nn10272782\nn10272913\nn10273064\nn10274173\nn10274318\nn10274815\nn10275249\nn10275395\nn10275848\nn10276045\nn10276477\nn10276942\nn10277027\nn10277638\nn10277815\nn10277912\nn10278456\nn10279018\nn10279778\nn10280034\nn10280130\nn10280598\nn10280674\nn10281546\nn10281770\nn10281896\nn10282482\nn10282672\nn10283170\nn10283366\nn10283546\nn10284064\nn10284871\nn10284965\nn10286282\nn10286539\nn10286749\nn10288964\nn10289039\nn10289176\nn10289462\nn10289766\nn10290422\nn10290541\nn10290813\nn10290919\nn10291110\nn10291469\nn10291822\nn10291942\nn10292316\nn10293332\nn10293590\nn10293861\nn10294020\nn10294139\nn10295371\nn10295479\nn10296176\nn10296444\nn10297234\nn10297367\nn10297531\nn10297841\nn10298202\nn10298271\nn10298647\nn10298912\nn10299125\nn10299250\nn10299700\nn10299875\nn10300041\nn10300154\nn10300303\nn10300500\nn10300654\nn10300829\nn10302576\nn10302700\nn10302905\nn10303037\nn10303814\nn10304086\nn10304650\nn10304914\nn10305635\nn10305802\nn10306004\nn10306279\nn10306496\nn10306595\nn10306890\nn10307114\nn10308066\nn10308168\nn10308275\nn10308504\nn10308653\nn10308732\nn10310783\nn10311506\nn10311661\nn10312287\nn10312491\nn10312600\nn10313000\nn10313239\nn10313441\nn10313724\nn10314054\nn10314182\nn10314517\nn10314836\nn10315217\nn10315456\nn10315561\nn10315730\nn10316360\nn10316527\nn10316862\nn10317007\nn10317500\nn10317963\nn10318293\nn10318607\nn10318686\nn10319313\nn10320484\nn10320863\nn10321126\nn10321340\nn10321632\nn10321882\nn10322238\nn10323634\nn10323752\nn10323999\nn10324560\nn10325549\nn10325774\nn10326776\nn10327143\nn10327987\nn10328123\nn10328328\nn10328437\nn10328696\nn10328941\nn10329035\nn10330593\nn10330931\nn10331098\nn10331167\nn10331258\nn10331347\nn10331841\nn10332110\nn10332385\nn10332861\nn10332953\nn10333044\nn10333165\nn10333317\nn10333439\nn10333601\nn10333838\nn10334009\nn10334461\nn10334782\nn10335246\nn10335801\nn10335931\nn10336411\nn10336904\nn10337488\nn10338231\nn10338391\nn10339179\nn10339251\nn10339717\nn10340312\nn10341243\nn10341343\nn10341446\nn10341573\nn10341955\nn10342180\nn10342367\nn10342543\nn10342893\nn10342992\nn10343088\nn10343355\nn10343449\nn10343554\nn10343869\nn10344121\nn10344203\nn10344319\nn10344656\nn10344774\nn10345015\nn10345100\nn10345302\nn10345422\nn10345659\nn10346015\nn10347204\nn10347446\nn10348526\nn10349243\nn10349750\nn10349836\nn10350220\nn10350774\nn10351064\nn10353016\nn10353355\nn10353928\nn10354265\nn10354754\nn10355142\nn10355306\nn10355449\nn10355688\nn10355806\nn10356450\nn10356877\nn10357012\nn10357613\nn10357737\nn10358032\nn10358124\nn10358575\nn10359117\nn10359422\nn10359546\nn10359659\nn10360366\nn10360747\nn10361060\nn10361194\nn10361296\nn10361525\nn10362003\nn10362319\nn10362557\nn10363445\nn10363573\nn10364198\nn10364502\nn10365514\nn10366145\nn10366276\nn10366966\nn10368291\nn10368528\nn10368624\nn10368711\nn10368798\nn10369095\nn10369317\nn10369417\nn10369528\nn10369699\nn10369955\nn10370381\nn10370955\nn10371052\nn10371221\nn10371330\nn10371450\nn10373390\nn10373525\nn10374541\nn10374849\nn10374943\nn10375052\nn10375314\nn10375402\nn10376523\nn10376890\nn10377021\nn10377185\nn10377291\nn10377542\nn10377633\nn10378026\nn10378113\nn10378780\nn10379376\nn10380126\nn10380499\nn10380672\nn10381804\nn10381981\nn10382157\nn10382302\nn10382480\nn10382710\nn10382825\nn10383094\nn10383237\nn10383505\nn10383816\nn10384214\nn10384392\nn10384496\nn10385566\nn10386196\nn10386754\nn10386874\nn10386984\nn10387196\nn10387324\nn10387836\nn10389865\nn10389976\nn10390600\nn10390698\nn10390807\nn10391416\nn10393909\nn10394434\nn10394786\nn10395073\nn10395209\nn10395390\nn10395828\nn10396106\nn10396337\nn10396727\nn10396908\nn10397001\nn10397142\nn10397392\nn10399130\nn10400003\nn10400108\nn10400205\nn10400437\nn10400618\nn10400998\nn10401204\nn10401331\nn10401639\nn10402709\nn10402824\nn10403633\nn10403876\nn10404426\nn10404998\nn10405540\nn10405694\nn10406266\nn10406391\nn10406765\nn10407310\nn10407954\nn10408809\nn10409459\nn10409752\nn10410246\nn10410996\nn10411356\nn10411551\nn10411867\nn10414239\nn10414768\nn10414865\nn10415037\nn10416567\nn10417288\nn10417424\nn10417551\nn10417682\nn10417843\nn10417969\nn10418101\nn10418735\nn10419047\nn10419472\nn10419630\nn10419785\nn10420031\nn10420277\nn10420507\nn10420649\nn10421016\nn10421470\nn10421956\nn10422405\nn10425946\nn10426454\nn10426630\nn10427223\nn10427359\nn10427764\nn10428004\nn10431122\nn10431625\nn10432189\nn10432441\nn10432875\nn10432957\nn10433077\nn10433452\nn10433610\nn10433737\nn10435169\nn10435251\nn10435716\nn10435988\nn10436334\nn10437014\nn10437137\nn10437262\nn10437698\nn10438172\nn10438619\nn10438842\nn10439373\nn10439523\nn10439727\nn10439851\nn10441037\nn10441124\nn10441694\nn10441962\nn10442093\nn10442232\nn10442417\nn10442573\nn10443032\nn10443659\nn10443830\nn10444194\nn10448322\nn10448455\nn10449664\nn10450038\nn10450161\nn10450303\nn10451450\nn10451590\nn10451858\nn10453184\nn10455619\nn10456070\nn10456138\nn10456696\nn10457214\nn10457444\nn10457903\nn10458111\nn10458356\nn10458596\nn10459882\nn10460033\nn10461060\nn10462588\nn10462751\nn10462860\nn10464052\nn10464542\nn10464711\nn10464870\nn10465002\nn10465451\nn10465831\nn10466198\nn10466564\nn10466918\nn10467179\nn10467395\nn10468750\nn10469611\nn10469874\nn10470779\nn10471640\nn10471732\nn10471859\nn10472129\nn10472447\nn10473453\nn10473562\nn10473789\nn10473917\nn10474064\nn10474343\nn10474446\nn10474645\nn10475835\nn10475940\nn10476467\nn10477713\nn10477955\nn10478118\nn10478293\nn10478462\nn10478827\nn10478960\nn10479135\nn10479328\nn10481167\nn10481268\nn10482054\nn10482220\nn10482587\nn10482921\nn10483138\nn10483395\nn10483799\nn10483890\nn10484858\nn10485298\nn10485883\nn10486166\nn10486236\nn10486561\nn10487182\nn10487363\nn10487592\nn10488016\nn10488309\nn10488656\nn10489426\nn10490421\nn10491998\nn10492086\nn10492727\nn10493199\nn10493419\nn10493685\nn10493835\nn10493922\nn10494195\nn10494373\nn10495167\nn10495421\nn10495555\nn10495756\nn10496393\nn10496489\nn10497135\nn10497534\nn10497645\nn10498046\nn10498699\nn10498816\nn10498986\nn10499110\nn10499232\nn10499355\nn10499631\nn10499857\nn10500217\nn10500419\nn10500603\nn10500824\nn10500942\nn10501453\nn10501635\nn10502046\nn10502329\nn10502950\nn10503818\nn10504090\nn10504206\nn10505347\nn10505613\nn10505732\nn10505942\nn10506336\nn10506544\nn10506915\nn10507070\nn10507380\nn10507482\nn10507565\nn10507692\nn10508141\nn10508379\nn10508710\nn10509063\nn10509161\nn10509810\nn10510245\nn10510974\nn10511771\nn10512201\nn10512372\nn10512708\nn10512859\nn10513509\nn10513823\nn10513938\nn10514051\nn10514121\nn10514255\nn10514429\nn10514784\nn10515863\nn10516527\nn10517137\nn10517283\nn10518349\nn10519126\nn10519494\nn10519984\nn10520286\nn10520544\nn10520964\nn10521100\nn10521662\nn10521853\nn10522035\nn10522324\nn10522759\nn10523341\nn10524076\nn10524223\nn10524869\nn10525134\nn10525436\nn10525617\nn10525878\nn10526534\nn10527147\nn10527334\nn10528023\nn10528148\nn10528493\nn10529231\nn10530150\nn10530383\nn10530571\nn10530959\nn10531109\nn10531445\nn10531838\nn10533874\nn10533983\nn10536134\nn10536274\nn10536416\nn10537708\nn10537906\nn10538629\nn10538733\nn10538853\nn10539015\nn10539160\nn10539278\nn10540114\nn10540252\nn10540656\nn10541833\nn10542608\nn10542761\nn10542888\nn10543161\nn10543937\nn10544232\nn10544748\nn10545792\nn10546428\nn10546633\nn10548419\nn10548537\nn10548681\nn10549510\nn10550252\nn10550369\nn10550468\nn10551576\nn10552393\nn10553140\nn10553235\nn10554024\nn10554141\nn10554846\nn10555059\nn10555430\nn10556033\nn10556518\nn10556704\nn10556825\nn10557246\nn10557854\nn10559009\nn10559288\nn10559508\nn10559683\nn10559996\nn10560106\nn10560637\nn10561222\nn10561320\nn10561736\nn10562135\nn10562283\nn10562509\nn10562968\nn10563314\nn10563403\nn10563711\nn10564098\nn10565502\nn10565667\nn10566072\nn10567613\nn10567722\nn10567848\nn10568200\nn10568358\nn10568443\nn10568608\nn10568915\nn10569011\nn10569179\nn10570019\nn10570704\nn10571907\nn10572706\nn10572889\nn10573957\nn10574311\nn10574538\nn10574840\nn10575463\nn10575594\nn10575787\nn10576223\nn10576316\nn10576676\nn10576818\nn10576962\nn10577182\nn10577284\nn10577710\nn10577820\nn10578021\nn10578162\nn10578471\nn10578656\nn10579062\nn10579549\nn10580030\nn10580437\nn10580535\nn10581648\nn10581890\nn10582604\nn10582746\nn10583387\nn10583790\nn10585077\nn10585217\nn10585628\nn10586166\nn10586265\nn10586444\nn10586903\nn10586998\nn10588074\nn10588357\nn10588724\nn10588965\nn10589666\nn10590146\nn10590239\nn10590452\nn10590903\nn10591072\nn10591811\nn10592049\nn10592811\nn10593521\nn10594147\nn10594523\nn10594857\nn10595164\nn10595647\nn10596517\nn10596899\nn10597505\nn10597745\nn10597889\nn10598013\nn10598181\nn10598459\nn10598904\nn10599215\nn10599806\nn10601234\nn10601362\nn10602119\nn10602470\nn10602985\nn10603528\nn10603851\nn10604275\nn10604380\nn10604634\nn10604880\nn10604979\nn10605253\nn10605737\nn10607291\nn10607478\nn10609092\nn10609198\nn10610465\nn10610850\nn10611267\nn10611613\nn10612210\nn10612373\nn10612518\nn10613996\nn10614507\nn10614629\nn10615179\nn10615334\nn10616578\nn10617024\nn10617193\nn10617397\nn10618234\nn10618342\nn10618465\nn10618685\nn10618848\nn10619492\nn10619642\nn10619888\nn10620212\nn10620586\nn10620758\nn10621294\nn10621400\nn10621514\nn10622053\nn10624074\nn10624310\nn10624437\nn10624540\nn10625860\nn10626630\nn10627252\nn10628097\nn10628644\nn10629329\nn10629647\nn10629939\nn10630093\nn10630188\nn10631131\nn10631309\nn10631654\nn10632576\nn10633298\nn10633450\nn10634464\nn10634849\nn10634990\nn10635788\nn10636488\nn10637483\nn10638922\nn10639238\nn10639359\nn10639637\nn10639817\nn10641223\nn10642596\nn10642705\nn10643095\nn10643837\nn10643937\nn10644598\nn10645017\nn10645223\nn10646032\nn10646140\nn10646433\nn10646641\nn10646780\nn10646942\nn10647745\nn10648237\nn10648696\nn10649197\nn10649308\nn10650162\nn10652605\nn10652703\nn10654015\nn10654211\nn10654321\nn10654827\nn10654932\nn10655169\nn10655442\nn10655594\nn10655730\nn10655986\nn10656120\nn10656223\nn10656969\nn10657306\nn10657556\nn10657835\nn10658304\nn10659042\nn10659762\nn10660128\nn10660621\nn10660883\nn10661002\nn10661216\nn10661563\nn10661732\nn10663315\nn10663549\nn10665302\nn10665587\nn10665698\nn10666752\nn10667477\nn10667709\nn10667863\nn10668450\nn10668666\nn10669991\nn10671042\nn10671613\nn10671736\nn10671898\nn10672371\nn10672540\nn10672662\nn10673296\nn10673776\nn10674130\nn10674713\nn10675010\nn10675142\nn10675609\nn10676018\nn10676434\nn10676569\nn10678937\nn10679174\nn10679503\nn10679610\nn10679723\nn10680609\nn10680796\nn10681194\nn10681557\nn10682713\nn10682953\nn10683675\nn10684146\nn10684630\nn10684827\nn10685398\nn10686073\nn10686517\nn10686694\nn10686885\nn10688356\nn10688811\nn10689306\nn10690268\nn10690421\nn10690648\nn10691318\nn10691937\nn10692090\nn10692482\nn10692883\nn10693235\nn10693334\nn10693824\nn10694258\nn10694939\nn10695450\nn10696101\nn10696508\nn10697135\nn10697282\nn10698368\nn10699558\nn10699752\nn10699981\nn10700105\nn10700201\nn10700640\nn10700963\nn10701180\nn10701644\nn10701962\nn10702167\nn10702615\nn10703221\nn10703336\nn10703480\nn10703692\nn10704238\nn10704712\nn10704886\nn10705448\nn10705615\nn10706812\nn10707134\nn10707233\nn10707707\nn10708292\nn10708454\nn10709529\nn10710171\nn10710259\nn10710778\nn10710913\nn10711483\nn10711766\nn10712229\nn10712374\nn10712474\nn10712690\nn10712835\nn10713254\nn10713686\nn10713843\nn10714195\nn10715030\nn10715347\nn10715789\nn10716576\nn10716864\nn10717055\nn10717196\nn10717337\nn10718131\nn10718349\nn10718509\nn10718665\nn10718952\nn10719036\nn10719132\nn10719267\nn10719807\nn10720197\nn10720453\nn10720964\nn10721124\nn10721321\nn10721612\nn10721708\nn10721819\nn10722029\nn10722575\nn10722965\nn10723230\nn10723597\nn10724132\nn10724372\nn10724570\nn10725280\nn10726031\nn10726786\nn10727016\nn10727171\nn10727458\nn10728117\nn10728233\nn10728624\nn10728998\nn10729330\nn10730542\nn10730728\nn10731013\nn10731732\nn10732010\nn10732521\nn10732854\nn10732967\nn10733820\nn10734394\nn10734741\nn10734891\nn10734963\nn10735173\nn10735298\nn10735984\nn10737103\nn10737264\nn10738111\nn10738215\nn10738670\nn10738871\nn10739135\nn10739297\nn10739391\nn10740594\nn10740732\nn10740868\nn10741152\nn10741367\nn10741493\nn10742005\nn10742111\nn10742546\nn10742997\nn10743124\nn10743356\nn10744078\nn10744164\nn10745006\nn10745770\nn10746931\nn10747119\nn10747424\nn10747548\nn10747965\nn10748142\nn10748506\nn10748620\nn10749928\nn10750031\nn10750188\nn10750640\nn10751026\nn10751152\nn10751265\nn10751710\nn10752480\nn10753061\nn10753182\nn10753339\nn10753442\nn10753989\nn10754189\nn10754281\nn10754449\nn10755080\nn10755164\nn10755394\nn10755648\nn10756061\nn10756148\nn10756261\nn10756641\nn10756837\nn10757050\nn10757492\nn10758337\nn10758445\nn10758949\nn10759151\nn10759331\nn10759982\nn10760199\nn10760622\nn10760951\nn10761190\nn10761326\nn10761519\nn10762212\nn10762480\nn10763075\nn10763245\nn10763383\nn10763620\nn10764465\nn10764622\nn10764719\nn10765305\nn10765587\nn10765679\nn10765885\nn10766260\nn10768148\nn10768272\nn10768903\nn10769084\nn10769188\nn10769321\nn10769459\nn10771066\nn10772092\nn10772580\nn10772937\nn10773665\nn10773800\nn10774329\nn10774756\nn10775003\nn10775128\nn10776052\nn10776339\nn10776887\nn10777299\nn10778044\nn10778148\nn10778711\nn10778999\nn10779610\nn10779897\nn10779995\nn10780284\nn10780632\nn10781236\nn10781817\nn10782362\nn10782471\nn10782791\nn10782940\nn10783240\nn10783539\nn10783646\nn10783734\nn10784113\nn10784544\nn10784922\nn10785480\nn10787470\nn10788852\nn10789415\nn10789709\nn10791115\nn10791221\nn10791820\nn10791890\nn10792335\nn10792506\nn10792856\nn10793570\nn10793799\nn10794014\nn10801561\nn10801802\nn10802507\nn10802621\nn10802953\nn10803031\nn10803282\nn10803978\nn10804287\nn10804636\nn10804732\nn10805501\nn10806113\nn10994097\nn11100798\nn11196627\nn11242849\nn11318824\nn11346873\nn11448153\nn11487732\nn11508382\nn11511327\nn11524451\nn11530008\nn11531193\nn11531334\nn11532682\nn11533212\nn11533999\nn11536567\nn11536673\nn11537327\nn11539289\nn11542137\nn11542640\nn11544015\nn11545350\nn11545524\nn11545714\nn11547562\nn11547855\nn11548728\nn11548870\nn11549009\nn11549245\nn11549779\nn11549895\nn11552133\nn11552386\nn11552594\nn11552806\nn11552976\nn11553240\nn11553522\nn11596108\nn11597657\nn11598287\nn11598686\nn11598886\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602091\nn11602478\nn11602873\nn11603246\nn11603462\nn11603835\nn11604046\nn11608250\nn11609475\nn11609684\nn11609862\nn11610047\nn11610215\nn11610437\nn11610602\nn11610823\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612235\nn11612349\nn11612575\nn11612923\nn11613219\nn11613459\nn11613692\nn11613867\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615259\nn11615387\nn11615607\nn11615812\nn11615967\nn11616260\nn11616486\nn11616662\nn11616852\nn11617090\nn11617272\nn11617631\nn11617878\nn11618079\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11619687\nn11619845\nn11620016\nn11620389\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625391\nn11625632\nn11625804\nn11626010\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627714\nn11627908\nn11628087\nn11628456\nn11628793\nn11629047\nn11629354\nn11630017\nn11630489\nn11631159\nn11631405\nn11631619\nn11631854\nn11631985\nn11632167\nn11632376\nn11632619\nn11632929\nn11633284\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639084\nn11639306\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11644872\nn11645163\nn11645590\nn11645914\nn11646167\nn11646344\nn11646517\nn11646694\nn11646955\nn11647306\nn11647703\nn11647868\nn11648039\nn11648268\nn11648776\nn11649150\nn11649359\nn11649878\nn11650160\nn11650307\nn11650430\nn11650558\nn11650759\nn11652039\nn11652217\nn11652376\nn11652578\nn11652753\nn11652966\nn11653126\nn11653570\nn11653904\nn11654293\nn11654438\nn11654984\nn11655152\nn11655592\nn11655974\nn11656123\nn11656549\nn11656771\nn11657585\nn11658331\nn11658544\nn11658709\nn11659248\nn11659627\nn11660300\nn11661372\nn11661909\nn11662128\nn11662371\nn11662585\nn11662937\nn11663263\nn11664418\nn11665372\nn11666854\nn11668117\nn11669786\nn11669921\nn11672269\nn11672400\nn11674019\nn11674332\nn11675025\nn11675404\nn11675738\nn11676500\nn11676743\nn11676850\nn11677485\nn11677902\nn11678010\nn11678299\nn11678377\nn11679378\nn11680457\nn11680596\nn11682659\nn11683216\nn11683838\nn11684264\nn11684499\nn11684654\nn11685091\nn11685621\nn11686195\nn11686652\nn11686780\nn11686912\nn11687071\nn11687432\nn11687789\nn11687964\nn11688069\nn11688378\nn11689197\nn11689367\nn11689483\nn11689678\nn11689815\nn11689957\nn11690088\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694300\nn11694469\nn11694664\nn11694866\nn11695085\nn11695285\nn11695599\nn11695974\nn11696450\nn11696935\nn11697560\nn11697802\nn11698042\nn11698245\nn11699442\nn11699751\nn11700058\nn11700279\nn11700864\nn11701066\nn11701302\nn11702713\nn11703669\nn11704093\nn11704620\nn11704791\nn11705171\nn11705387\nn11705573\nn11705776\nn11706325\nn11706761\nn11706942\nn11707229\nn11707827\nn11708658\nn11708857\nn11709045\nn11709205\nn11709674\nn11710136\nn11710393\nn11710658\nn11710827\nn11710987\nn11711289\nn11711537\nn11711764\nn11711971\nn11712282\nn11713164\nn11713370\nn11713763\nn11714382\nn11715430\nn11715678\nn11716698\nn11717399\nn11717577\nn11718296\nn11718681\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11721642\nn11722036\nn11722342\nn11722466\nn11722621\nn11722982\nn11723227\nn11723452\nn11723770\nn11723986\nn11724109\nn11724660\nn11725015\nn11725311\nn11725480\nn11725623\nn11725821\nn11725973\nn11726145\nn11726269\nn11726433\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728769\nn11728945\nn11729142\nn11729478\nn11729860\nn11730015\nn11730458\nn11730602\nn11730750\nn11730933\nn11731157\nn11731659\nn11732052\nn11732567\nn11733054\nn11733312\nn11733548\nn11734493\nn11734698\nn11735053\nn11735570\nn11735977\nn11736362\nn11736694\nn11736851\nn11737009\nn11737125\nn11737534\nn11738547\nn11738997\nn11739365\nn11739978\nn11740414\nn11741175\nn11741350\nn11741575\nn11741797\nn11742310\nn11742878\nn11744011\nn11744108\nn11744471\nn11745817\nn11746600\nn11747468\nn11748002\nn11748811\nn11749112\nn11749603\nn11750173\nn11750508\nn11750989\nn11751765\nn11751974\nn11752578\nn11752798\nn11752937\nn11753143\nn11753355\nn11753562\nn11753700\nn11754893\nn11756092\nn11756329\nn11756669\nn11756870\nn11757017\nn11757190\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759609\nn11759853\nn11760785\nn11761202\nn11761650\nn11761836\nn11762018\nn11762433\nn11762927\nn11763142\nn11763625\nn11763874\nn11764478\nn11764814\nn11765568\nn11766046\nn11766189\nn11766432\nn11767354\nn11767877\nn11768816\nn11769176\nn11769621\nn11769803\nn11770256\nn11771147\nn11771539\nn11771746\nn11771924\nn11772408\nn11772879\nn11773408\nn11773628\nn11773987\nn11774513\nn11774972\nn11775340\nn11775626\nn11776234\nn11777080\nn11778092\nn11778257\nn11779300\nn11780148\nn11780424\nn11781176\nn11782036\nn11782266\nn11782761\nn11782878\nn11783162\nn11783920\nn11784126\nn11784497\nn11785276\nn11785668\nn11785875\nn11786131\nn11786539\nn11786843\nn11787190\nn11788039\nn11788727\nn11789066\nn11789438\nn11789589\nn11789962\nn11790089\nn11790788\nn11790936\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793403\nn11793779\nn11794024\nn11794139\nn11794519\nn11795049\nn11795216\nn11795580\nn11796005\nn11796188\nn11797321\nn11797508\nn11797981\nn11798270\nn11798496\nn11798688\nn11798978\nn11799331\nn11799732\nn11800236\nn11800565\nn11801392\nn11801665\nn11801891\nn11802410\nn11802586\nn11802800\nn11802995\nn11805255\nn11805544\nn11805956\nn11806219\nn11806369\nn11806521\nn11806679\nn11806814\nn11807108\nn11807525\nn11807696\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11809754\nn11810030\nn11810358\nn11811059\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11814996\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817160\nn11817501\nn11817914\nn11818069\nn11818636\nn11819509\nn11819912\nn11820965\nn11821184\nn11822300\nn11823043\nn11823305\nn11823436\nn11823756\nn11824146\nn11824344\nn11824747\nn11825351\nn11825749\nn11826198\nn11826569\nn11827541\nn11828577\nn11828973\nn11829205\nn11829672\nn11829922\nn11830045\nn11830252\nn11830400\nn11830714\nn11830906\nn11831100\nn11831297\nn11831521\nn11832214\nn11832480\nn11832671\nn11832899\nn11833373\nn11833749\nn11834272\nn11834654\nn11834890\nn11835251\nn11836327\nn11836722\nn11837204\nn11837351\nn11837562\nn11837743\nn11837970\nn11838413\nn11838916\nn11839460\nn11839568\nn11839823\nn11840067\nn11840246\nn11840476\nn11840764\nn11841247\nn11843441\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846312\nn11846425\nn11846765\nn11847169\nn11848479\nn11848867\nn11849271\nn11849467\nn11849871\nn11849983\nn11850521\nn11850918\nn11851258\nn11851578\nn11851839\nn11852028\nn11852148\nn11852531\nn11853079\nn11853356\nn11853813\nn11854479\nn11855274\nn11855435\nn11855553\nn11855842\nn11856573\nn11857696\nn11857875\nn11858077\nn11858703\nn11858814\nn11859275\nn11859472\nn11859737\nn11860208\nn11860555\nn11861238\nn11861487\nn11861641\nn11861853\nn11862835\nn11863467\nn11863877\nn11865071\nn11865276\nn11865429\nn11865574\nn11865874\nn11866248\nn11866706\nn11867311\nn11868814\nn11869351\nn11869689\nn11870044\nn11870418\nn11870747\nn11871059\nn11871496\nn11871748\nn11872146\nn11872324\nn11872658\nn11873182\nn11873612\nn11874081\nn11874423\nn11874878\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877473\nn11877646\nn11877860\nn11878101\nn11878283\nn11878633\nn11879054\nn11879722\nn11879895\nn11881189\nn11882074\nn11882237\nn11882426\nn11882636\nn11882821\nn11882972\nn11883328\nn11883628\nn11883945\nn11884384\nn11884967\nn11885856\nn11887119\nn11887310\nn11887476\nn11887750\nn11888061\nn11888424\nn11888800\nn11889205\nn11889619\nn11890022\nn11890150\nn11890884\nn11891175\nn11892029\nn11892181\nn11892637\nn11892817\nn11893640\nn11893916\nn11894327\nn11894558\nn11894770\nn11895092\nn11895472\nn11895714\nn11896141\nn11896722\nn11897116\nn11897466\nn11898639\nn11898775\nn11899223\nn11899762\nn11899921\nn11900569\nn11901294\nn11901452\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903333\nn11903671\nn11904109\nn11904274\nn11905392\nn11905749\nn11906127\nn11906514\nn11906917\nn11907100\nn11907405\nn11907689\nn11908549\nn11908846\nn11909864\nn11910271\nn11910460\nn11910666\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11917407\nn11917835\nn11918286\nn11918473\nn11918808\nn11919447\nn11919761\nn11919975\nn11920133\nn11920498\nn11920663\nn11920998\nn11921395\nn11921792\nn11922661\nn11922755\nn11922839\nn11922926\nn11923174\nn11923397\nn11923637\nn11924014\nn11924445\nn11924849\nn11925303\nn11925450\nn11925898\nn11926365\nn11926833\nn11926976\nn11927215\nn11927740\nn11928352\nn11928858\nn11929743\nn11930038\nn11930203\nn11930353\nn11930571\nn11930788\nn11930994\nn11931135\nn11931540\nn11931918\nn11932745\nn11932927\nn11933099\nn11933257\nn11933387\nn11933546\nn11933728\nn11933903\nn11934041\nn11934239\nn11934463\nn11934616\nn11934807\nn11935027\nn11935187\nn11935330\nn11935469\nn11935627\nn11935715\nn11935794\nn11935877\nn11935953\nn11936027\nn11936113\nn11936199\nn11936287\nn11936369\nn11936448\nn11936539\nn11936624\nn11936707\nn11936782\nn11936864\nn11936946\nn11937023\nn11937102\nn11937195\nn11937278\nn11937360\nn11937446\nn11937692\nn11938556\nn11939180\nn11939491\nn11939699\nn11940006\nn11940349\nn11940599\nn11940750\nn11941094\nn11941478\nn11941924\nn11942659\nn11943133\nn11943407\nn11943660\nn11943992\nn11944196\nn11944751\nn11944954\nn11945367\nn11945514\nn11945783\nn11946051\nn11946313\nn11946727\nn11946918\nn11947251\nn11947629\nn11947802\nn11948044\nn11948264\nn11948469\nn11948864\nn11949015\nn11949402\nn11949857\nn11950345\nn11950686\nn11950877\nn11951052\nn11951511\nn11951820\nn11952346\nn11952541\nn11953038\nn11953339\nn11953610\nn11953884\nn11954161\nn11954345\nn11954484\nn11954642\nn11954798\nn11955040\nn11955153\nn11955532\nn11955896\nn11956348\nn11956850\nn11957317\nn11957514\nn11957678\nn11958080\nn11958499\nn11958888\nn11959259\nn11959632\nn11959862\nn11960245\nn11960673\nn11961100\nn11961446\nn11961871\nn11962272\nn11962667\nn11962994\nn11963572\nn11963932\nn11964446\nn11964848\nn11965218\nn11965627\nn11965962\nn11966083\nn11966215\nn11966385\nn11966617\nn11966896\nn11967142\nn11967315\nn11967744\nn11967878\nn11968519\nn11968704\nn11968931\nn11969166\nn11969607\nn11969806\nn11970101\nn11970298\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11972959\nn11973341\nn11973634\nn11973749\nn11974373\nn11974557\nn11974888\nn11975254\nn11976170\nn11976314\nn11976511\nn11976933\nn11977303\nn11977660\nn11977887\nn11978233\nn11978551\nn11978713\nn11978961\nn11979187\nn11979354\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11981475\nn11982115\nn11982545\nn11982939\nn11983375\nn11983606\nn11984144\nn11984542\nn11985053\nn11985321\nn11985739\nn11985903\nn11986511\nn11986729\nn11987126\nn11987349\nn11987511\nn11988132\nn11988596\nn11988893\nn11989087\nn11989393\nn11989869\nn11990167\nn11990313\nn11990627\nn11990920\nn11991263\nn11991549\nn11991777\nn11992479\nn11992806\nn11993203\nn11993444\nn11993675\nn11994150\nn11995092\nn11995396\nn11996251\nn11996677\nn11997032\nn11997160\nn11997969\nn11998492\nn11998888\nn11999278\nn11999656\nn12000191\nn12001294\nn12001707\nn12001924\nn12002428\nn12002651\nn12002826\nn12003167\nn12003696\nn12004120\nn12004547\nn12004987\nn12005656\nn12006306\nn12006766\nn12006930\nn12007196\nn12007406\nn12007766\nn12008252\nn12008487\nn12008749\nn12009047\nn12009420\nn12009792\nn12010628\nn12010815\nn12011370\nn12011620\nn12012111\nn12012253\nn12012510\nn12013035\nn12013511\nn12013701\nn12014085\nn12014355\nn12014923\nn12015221\nn12015525\nn12015959\nn12016434\nn12016567\nn12016777\nn12016914\nn12017127\nn12017326\nn12017511\nn12017664\nn12017853\nn12018014\nn12018100\nn12018188\nn12018271\nn12018363\nn12018447\nn12018530\nn12018760\nn12019035\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12022382\nn12022821\nn12023108\nn12023407\nn12023726\nn12024176\nn12024445\nn12024690\nn12024805\nn12025220\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12028424\nn12029039\nn12029635\nn12030092\nn12030654\nn12030908\nn12031139\nn12031388\nn12031547\nn12031927\nn12032429\nn12032686\nn12033139\nn12033504\nn12033709\nn12034141\nn12034384\nn12034594\nn12035631\nn12035907\nn12036067\nn12036226\nn12036939\nn12037499\nn12037691\nn12038038\nn12038208\nn12038406\nn12038585\nn12038760\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044041\nn12044467\nn12044784\nn12045157\nn12045514\nn12045860\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12048928\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12051514\nn12051792\nn12052267\nn12052447\nn12052787\nn12053405\nn12053690\nn12053962\nn12054195\nn12055073\nn12055516\nn12056099\nn12056217\nn12056601\nn12056758\nn12056990\nn12057211\nn12057447\nn12057660\nn12057895\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12060546\nn12061104\nn12061380\nn12061614\nn12062105\nn12062468\nn12062626\nn12062781\nn12063211\nn12063639\nn12064389\nn12064591\nn12065316\nn12065649\nn12065777\nn12066018\nn12066261\nn12066451\nn12066630\nn12066821\nn12067029\nn12067193\nn12067433\nn12067672\nn12067817\nn12068138\nn12068432\nn12068615\nn12069009\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071259\nn12071477\nn12071744\nn12072210\nn12072722\nn12073217\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077244\nn12077944\nn12078172\nn12078451\nn12078747\nn12079120\nn12079523\nn12079963\nn12080395\nn12080588\nn12080820\nn12081215\nn12081649\nn12082131\nn12083113\nn12083591\nn12083847\nn12084158\nn12084400\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12087961\nn12088223\nn12088327\nn12088495\nn12088909\nn12089320\nn12089496\nn12089846\nn12090890\nn12091213\nn12091377\nn12091550\nn12091697\nn12091953\nn12092262\nn12092417\nn12092629\nn12092930\nn12093329\nn12093600\nn12093885\nn12094244\nn12094401\nn12094612\nn12095020\nn12095281\nn12095412\nn12095543\nn12095647\nn12095934\nn12096089\nn12096395\nn12096563\nn12096674\nn12097396\nn12097556\nn12098403\nn12098524\nn12098827\nn12099342\nn12100187\nn12101870\nn12102133\nn12103680\nn12103894\nn12104104\nn12104238\nn12104501\nn12104734\nn12105125\nn12105353\nn12105828\nn12105981\nn12106134\nn12106323\nn12107002\nn12107191\nn12107710\nn12107970\nn12108432\nn12108613\nn12108871\nn12109365\nn12109827\nn12110085\nn12110236\nn12110352\nn12110475\nn12110778\nn12111238\nn12111627\nn12112008\nn12112337\nn12112609\nn12112918\nn12113195\nn12113323\nn12113657\nn12114010\nn12114590\nn12115180\nn12116058\nn12116429\nn12116734\nn12117017\nn12117235\nn12117326\nn12117695\nn12117912\nn12118414\nn12118661\nn12119099\nn12119238\nn12119390\nn12119539\nn12119717\nn12120347\nn12120578\nn12121033\nn12121187\nn12121610\nn12122442\nn12122725\nn12122918\nn12123648\nn12123741\nn12124172\nn12124627\nn12124818\nn12125001\nn12125183\nn12125584\nn12126084\nn12126360\nn12126736\nn12127460\nn12127575\nn12127768\nn12128071\nn12128306\nn12128490\nn12129134\nn12129738\nn12129986\nn12130549\nn12131405\nn12131550\nn12132092\nn12132956\nn12133151\nn12133462\nn12133682\nn12134025\nn12134486\nn12134695\nn12134836\nn12135049\nn12135576\nn12135729\nn12135898\nn12136392\nn12136581\nn12136720\nn12137120\nn12137569\nn12137791\nn12137954\nn12138110\nn12138248\nn12138444\nn12138578\nn12139196\nn12139575\nn12139793\nn12139921\nn12140511\nn12140759\nn12140903\nn12141167\nn12141385\nn12141495\nn12142085\nn12142357\nn12142450\nn12143065\nn12143215\nn12143405\nn12143676\nn12144313\nn12144580\nn12144987\nn12145148\nn12145477\nn12146311\nn12146488\nn12146654\nn12147226\nn12147835\nn12148757\nn12150722\nn12150969\nn12151170\nn12151615\nn12152031\nn12152251\nn12152532\nn12152722\nn12153033\nn12153224\nn12153580\nn12153741\nn12153914\nn12154114\nn12154773\nn12155009\nn12155583\nn12155773\nn12156679\nn12156819\nn12157056\nn12157179\nn12157769\nn12158031\nn12158443\nn12158798\nn12159055\nn12159388\nn12159555\nn12159804\nn12159942\nn12160125\nn12160303\nn12160490\nn12160857\nn12161056\nn12161285\nn12161577\nn12161744\nn12161969\nn12162181\nn12162425\nn12162758\nn12163035\nn12163279\nn12164363\nn12164656\nn12164881\nn12165170\nn12165384\nn12165758\nn12166128\nn12166424\nn12166793\nn12166929\nn12167075\nn12167436\nn12167602\nn12168565\nn12169099\nn12170585\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12174926\nn12175181\nn12175370\nn12175598\nn12176453\nn12176709\nn12176953\nn12177129\nn12177455\nn12178129\nn12178780\nn12178896\nn12179122\nn12179632\nn12180168\nn12180456\nn12180885\nn12181352\nn12181612\nn12182049\nn12182276\nn12183026\nn12183452\nn12183816\nn12184095\nn12184468\nn12184912\nn12185254\nn12185859\nn12186352\nn12186554\nn12186839\nn12187247\nn12187663\nn12187891\nn12188289\nn12188635\nn12189429\nn12189779\nn12189987\nn12190410\nn12190869\nn12191240\nn12192132\nn12192877\nn12193334\nn12193665\nn12194147\nn12194613\nn12195391\nn12195533\nn12195734\nn12196129\nn12196336\nn12196527\nn12196694\nn12196954\nn12197359\nn12197601\nn12198286\nn12198793\nn12199266\nn12199399\nn12199790\nn12199982\nn12200143\nn12200504\nn12200905\nn12201331\nn12201580\nn12201938\nn12202936\nn12203529\nn12203699\nn12203896\nn12204032\nn12204175\nn12204730\nn12205460\nn12205694\nn12214789\nn12215022\nn12215210\nn12215579\nn12215824\nn12216215\nn12216628\nn12216968\nn12217453\nn12217851\nn12218274\nn12218490\nn12218868\nn12219668\nn12220019\nn12220496\nn12220829\nn12221191\nn12221368\nn12221522\nn12221801\nn12222090\nn12222493\nn12222900\nn12223160\nn12223569\nn12223764\nn12224978\nn12225222\nn12225349\nn12225563\nn12226932\nn12227658\nn12227909\nn12228229\nn12228387\nn12228689\nn12228886\nn12229111\nn12229651\nn12229887\nn12230540\nn12230794\nn12231192\nn12231709\nn12232114\nn12232280\nn12232851\nn12233249\nn12234318\nn12234669\nn12235051\nn12235479\nn12236160\nn12236546\nn12236768\nn12236977\nn12237152\nn12237486\nn12237641\nn12237855\nn12238756\nn12238913\nn12239240\nn12239647\nn12239880\nn12240150\nn12240477\nn12240965\nn12241192\nn12241426\nn12241880\nn12242123\nn12242409\nn12242850\nn12243109\nn12243693\nn12244153\nn12244458\nn12244650\nn12244819\nn12245319\nn12245695\nn12245885\nn12246037\nn12246232\nn12246773\nn12246941\nn12247202\nn12247407\nn12247963\nn12248141\nn12248359\nn12248574\nn12248780\nn12248941\nn12249122\nn12249294\nn12249542\nn12251001\nn12251278\nn12251740\nn12252168\nn12252383\nn12252866\nn12253229\nn12253487\nn12253664\nn12253835\nn12254168\nn12255225\nn12256112\nn12256325\nn12256522\nn12256708\nn12256920\nn12257570\nn12257725\nn12258101\nn12258885\nn12259316\nn12260799\nn12261359\nn12261571\nn12261808\nn12262018\nn12262185\nn12262553\nn12263038\nn12263204\nn12263410\nn12263588\nn12263738\nn12263987\nn12264512\nn12264786\nn12265083\nn12265394\nn12265600\nn12266217\nn12266528\nn12266644\nn12266796\nn12266984\nn12267133\nn12267265\nn12267411\nn12267534\nn12267677\nn12267931\nn12268246\nn12269241\nn12269406\nn12269652\nn12270027\nn12270278\nn12270460\nn12270741\nn12270946\nn12271187\nn12271451\nn12271643\nn12271933\nn12272239\nn12272432\nn12272735\nn12272883\nn12273114\nn12273344\nn12273515\nn12273768\nn12273939\nn12274151\nn12274358\nn12274630\nn12274863\nn12275131\nn12275317\nn12275489\nn12275675\nn12275888\nn12276110\nn12276314\nn12276477\nn12276628\nn12276872\nn12277150\nn12277334\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12278865\nn12279060\nn12279293\nn12279458\nn12279772\nn12280060\nn12280364\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283147\nn12283395\nn12283542\nn12283790\nn12284262\nn12284821\nn12285049\nn12285195\nn12285369\nn12285512\nn12285705\nn12285900\nn12286068\nn12286197\nn12286826\nn12286988\nn12287195\nn12287642\nn12287836\nn12288005\nn12288823\nn12289310\nn12289433\nn12289585\nn12290748\nn12290975\nn12291143\nn12291459\nn12291671\nn12291959\nn12292463\nn12292877\nn12293723\nn12294124\nn12294331\nn12294542\nn12294723\nn12294871\nn12295033\nn12295237\nn12295429\nn12295796\nn12296045\nn12296432\nn12296735\nn12296929\nn12297110\nn12297280\nn12297507\nn12297846\nn12298165\nn12299640\nn12300840\nn12301180\nn12301445\nn12301613\nn12301766\nn12302071\nn12302248\nn12302565\nn12303083\nn12303462\nn12304115\nn12304286\nn12304420\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305654\nn12305819\nn12305986\nn12306089\nn12306270\nn12306717\nn12306938\nn12307076\nn12307240\nn12307756\nn12308112\nn12308447\nn12308907\nn12309277\nn12309630\nn12310021\nn12310349\nn12310638\nn12311045\nn12311224\nn12311413\nn12311579\nn12312110\nn12312728\nn12315060\nn12315245\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12318782\nn12318965\nn12319204\nn12319414\nn12320010\nn12320414\nn12320627\nn12320806\nn12321077\nn12321395\nn12321669\nn12321873\nn12322099\nn12322501\nn12322699\nn12323665\nn12324056\nn12324222\nn12324388\nn12324558\nn12324906\nn12325234\nn12325787\nn12327022\nn12327528\nn12327846\nn12328398\nn12328567\nn12328801\nn12329260\nn12329473\nn12330239\nn12330469\nn12330587\nn12330891\nn12331066\nn12331263\nn12331655\nn12331788\nn12332030\nn12332218\nn12332555\nn12333053\nn12333530\nn12333771\nn12333961\nn12334153\nn12334293\nn12334891\nn12335483\nn12335664\nn12335800\nn12335937\nn12336092\nn12336224\nn12336333\nn12336586\nn12336727\nn12336973\nn12337131\nn12337246\nn12337391\nn12337617\nn12337800\nn12337922\nn12338034\nn12338146\nn12338258\nn12338454\nn12338655\nn12338796\nn12338979\nn12339526\nn12339831\nn12340383\nn12340581\nn12340755\nn12341542\nn12341931\nn12342299\nn12342498\nn12342852\nn12343480\nn12343753\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12346578\nn12346813\nn12346986\nn12347158\nn12349315\nn12349711\nn12350032\nn12350758\nn12351091\nn12351790\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353431\nn12353754\nn12355760\nn12356023\nn12356395\nn12356960\nn12357485\nn12357968\nn12358293\nn12360108\nn12360534\nn12360684\nn12360817\nn12360958\nn12361135\nn12361560\nn12361754\nn12361946\nn12362274\nn12362514\nn12362668\nn12363301\nn12363768\nn12364604\nn12364940\nn12365158\nn12365285\nn12365462\nn12365900\nn12366053\nn12366186\nn12366313\nn12366675\nn12366870\nn12367611\nn12368028\nn12368257\nn12368451\nn12369066\nn12369309\nn12369476\nn12369665\nn12369845\nn12370174\nn12370549\nn12371202\nn12371439\nn12371704\nn12372233\nn12373100\nn12373739\nn12374418\nn12374705\nn12374862\nn12375769\nn12377198\nn12377494\nn12378249\nn12378753\nn12378963\nn12379531\nn12380761\nn12381511\nn12382233\nn12382875\nn12383737\nn12383894\nn12384037\nn12384227\nn12384375\nn12384569\nn12384680\nn12384839\nn12385429\nn12385566\nn12385830\nn12386945\nn12387103\nn12387633\nn12387839\nn12388143\nn12388293\nn12388858\nn12388989\nn12389130\nn12389501\nn12389727\nn12389932\nn12390099\nn12390314\nn12392070\nn12392549\nn12392765\nn12393269\nn12394118\nn12394328\nn12394638\nn12395068\nn12395289\nn12395463\nn12395906\nn12396091\nn12396924\nn12397431\nn12399132\nn12399384\nn12399534\nn12399656\nn12399899\nn12400489\nn12400720\nn12400924\nn12401335\nn12401684\nn12401893\nn12402051\nn12402348\nn12402596\nn12402840\nn12403075\nn12403276\nn12403513\nn12403994\nn12404729\nn12405714\nn12406304\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407396\nn12407545\nn12407715\nn12407890\nn12408077\nn12408280\nn12408466\nn12408717\nn12408873\nn12409231\nn12409470\nn12409651\nn12409840\nn12411461\nn12412355\nn12412606\nn12412987\nn12413165\nn12413301\nn12413419\nn12413642\nn12413880\nn12414035\nn12414159\nn12414329\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12416423\nn12416703\nn12417836\nn12418221\nn12418507\nn12419037\nn12419878\nn12420124\nn12420535\nn12420722\nn12421137\nn12421467\nn12421683\nn12421917\nn12422129\nn12422559\nn12425281\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12427946\nn12428076\nn12428242\nn12428412\nn12428747\nn12429352\nn12430198\nn12430471\nn12430675\nn12431434\nn12432069\nn12432356\nn12432574\nn12432707\nn12433081\nn12433178\nn12433769\nn12433952\nn12434106\nn12434483\nn12434634\nn12434775\nn12434985\nn12435152\nn12435486\nn12435649\nn12435777\nn12435965\nn12436090\nn12436907\nn12437513\nn12437769\nn12437930\nn12439154\nn12439830\nn12441183\nn12441390\nn12441552\nn12441958\nn12442548\nn12443323\nn12443736\nn12444095\nn12444898\nn12446200\nn12446519\nn12446737\nn12446908\nn12447121\nn12447346\nn12447581\nn12447891\nn12448136\nn12448361\nn12448700\nn12449296\nn12449526\nn12449784\nn12449934\nn12450344\nn12450607\nn12450840\nn12451070\nn12451240\nn12451399\nn12451566\nn12451915\nn12452256\nn12452480\nn12452673\nn12452836\nn12453018\nn12453186\nn12453714\nn12453857\nn12454159\nn12454436\nn12454556\nn12454705\nn12454793\nn12454949\nn12455950\nn12457091\nn12458550\nn12458713\nn12458874\nn12459629\nn12460146\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462032\nn12462221\nn12462582\nn12462805\nn12463134\nn12463743\nn12463975\nn12464128\nn12464476\nn12464649\nn12465557\nn12466727\nn12467018\nn12467197\nn12467433\nn12467592\nn12468545\nn12468719\nn12469517\nn12470092\nn12470512\nn12470907\nn12472024\nn12473608\nn12473840\nn12474167\nn12474418\nn12475035\nn12475242\nn12475774\nn12476510\nn12477163\nn12477401\nn12477583\nn12477747\nn12477983\nn12478768\nn12479537\nn12480456\nn12480895\nn12481150\nn12481289\nn12481458\nn12482437\nn12482668\nn12482893\nn12483282\nn12483427\nn12483625\nn12483841\nn12484244\nn12484784\nn12485653\nn12485981\nn12486574\nn12487058\nn12488454\nn12488709\nn12489046\nn12489676\nn12489815\nn12490490\nn12491017\nn12491435\nn12491826\nn12492106\nn12492460\nn12492682\nn12492900\nn12493208\nn12493426\nn12493868\nn12494794\nn12495146\nn12495670\nn12495895\nn12496427\nn12496949\nn12497669\nn12498055\nn12498457\nn12499163\nn12499757\nn12499979\nn12500309\nn12500518\nn12500751\nn12501202\nn12504570\nn12504783\nn12505253\nn12506181\nn12506341\nn12506991\nn12507379\nn12507823\nn12508309\nn12508618\nn12508762\nn12509109\nn12509476\nn12509665\nn12509821\nn12509993\nn12510343\nn12510774\nn12511488\nn12511856\nn12512095\nn12512294\nn12512674\nn12513172\nn12513613\nn12513933\nn12514138\nn12514592\nn12514992\nn12515393\nn12515711\nn12515925\nn12516165\nn12516584\nn12516828\nn12517077\nn12517445\nn12517642\nn12518013\nn12518481\nn12519089\nn12519563\nn12520406\nn12521186\nn12521394\nn12522188\nn12522678\nn12522894\nn12523141\nn12523475\nn12523850\nn12524188\nn12525168\nn12525513\nn12525753\nn12526178\nn12526516\nn12526754\nn12527081\nn12527738\nn12528109\nn12528382\nn12528549\nn12528768\nn12528974\nn12529220\nn12529500\nn12529905\nn12530629\nn12530818\nn12531328\nn12531727\nn12532564\nn12532886\nn12533190\nn12533437\nn12534208\nn12534625\nn12534862\nn12536291\nn12537253\nn12537569\nn12538209\nn12539074\nn12539306\nn12539832\nn12540250\nn12540647\nn12540966\nn12541157\nn12541403\nn12542043\nn12542240\nn12543186\nn12543455\nn12543639\nn12543826\nn12544240\nn12544539\nn12545232\nn12545635\nn12545865\nn12546183\nn12546420\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12548564\nn12548804\nn12549005\nn12549192\nn12549420\nn12549799\nn12550210\nn12550408\nn12551173\nn12551457\nn12552309\nn12552893\nn12553742\nn12554029\nn12554526\nn12554729\nn12554911\nn12555255\nn12555859\nn12556656\nn12557064\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12558680\nn12559044\nn12559518\nn12560282\nn12560621\nn12560775\nn12561169\nn12561309\nn12561594\nn12562141\nn12562577\nn12562785\nn12563045\nn12563702\nn12564083\nn12564613\nn12565102\nn12565912\nn12566331\nn12566954\nn12567950\nn12568186\nn12568649\nn12569037\nn12569616\nn12569851\nn12570394\nn12570703\nn12570972\nn12571781\nn12572546\nn12572759\nn12572858\nn12573256\nn12573474\nn12573647\nn12573911\nn12574320\nn12574470\nn12574866\nn12575322\nn12575812\nn12576323\nn12576451\nn12576695\nn12577362\nn12577895\nn12578255\nn12578626\nn12578916\nn12579038\nn12579404\nn12579822\nn12580012\nn12580654\nn12580786\nn12580896\nn12581110\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12583681\nn12583855\nn12584191\nn12584365\nn12584715\nn12585137\nn12585373\nn12585629\nn12586298\nn12586499\nn12586725\nn12586989\nn12587132\nn12587487\nn12587803\nn12588320\nn12588780\nn12589142\nn12589458\nn12589687\nn12589841\nn12590232\nn12590499\nn12590600\nn12590715\nn12591017\nn12591351\nn12591702\nn12592058\nn12592544\nn12592839\nn12593122\nn12593341\nn12593994\nn12594324\nn12594989\nn12595699\nn12595964\nn12596148\nn12596345\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599185\nn12599435\nn12599661\nn12599874\nn12600095\nn12600267\nn12601494\nn12601805\nn12602262\nn12602434\nn12602612\nn12602980\nn12603273\nn12603449\nn12603672\nn12604228\nn12604460\nn12604639\nn12604845\nn12605683\nn12606438\nn12606545\nn12607456\nn12609379\nn12610328\nn12610740\nn12611640\nn12612170\nn12612811\nn12613706\nn12614096\nn12614477\nn12614625\nn12615232\nn12615710\nn12616248\nn12616630\nn12616996\nn12617559\nn12618146\nn12618727\nn12620196\nn12620546\nn12620969\nn12621410\nn12621619\nn12621945\nn12622297\nn12622875\nn12623077\nn12623211\nn12623818\nn12624381\nn12624568\nn12625003\nn12625383\nn12625670\nn12625823\nn12626674\nn12626878\nn12627119\nn12627347\nn12627526\nn12628356\nn12628705\nn12628986\nn12629305\nn12629666\nn12630763\nn12630999\nn12631331\nn12631637\nn12631932\nn12632335\nn12632733\nn12633061\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635151\nn12635359\nn12635532\nn12635744\nn12635955\nn12636224\nn12636885\nn12637123\nn12637485\nn12638218\nn12638556\nn12638753\nn12638964\nn12639168\nn12639376\nn12639584\nn12639736\nn12639910\nn12640081\nn12640284\nn12640435\nn12640607\nn12640839\nn12641007\nn12641180\nn12641413\nn12641931\nn12642090\nn12642200\nn12642435\nn12642600\nn12642964\nn12643113\nn12643313\nn12643473\nn12643688\nn12643877\nn12644283\nn12644902\nn12645174\nn12645530\nn12646072\nn12646197\nn12646397\nn12646605\nn12646740\nn12646950\nn12647231\nn12647376\nn12647560\nn12647787\nn12647893\nn12648045\nn12648196\nn12648424\nn12648693\nn12648888\nn12649065\nn12649317\nn12649539\nn12649866\nn12650038\nn12650229\nn12650379\nn12650556\nn12650805\nn12650915\nn12651229\nn12651611\nn12651821\nn12653218\nn12653436\nn12653633\nn12654227\nn12654857\nn12655062\nn12655245\nn12655351\nn12655498\nn12655605\nn12655726\nn12655869\nn12656369\nn12656528\nn12656685\nn12656909\nn12657082\nn12657755\nn12658118\nn12658308\nn12658481\nn12658603\nn12658715\nn12658846\nn12659064\nn12659356\nn12659539\nn12660601\nn12661045\nn12661227\nn12661538\nn12662074\nn12662379\nn12662772\nn12663023\nn12663254\nn12663359\nn12663804\nn12664005\nn12664187\nn12664469\nn12664710\nn12665048\nn12665271\nn12665659\nn12665857\nn12666050\nn12666159\nn12666369\nn12666965\nn12667406\nn12667582\nn12667964\nn12668131\nn12669803\nn12670334\nn12670758\nn12670962\nn12671651\nn12672289\nn12673588\nn12674120\nn12674685\nn12674895\nn12675299\nn12675515\nn12675876\nn12676134\nn12676370\nn12676534\nn12676703\nn12677120\nn12677331\nn12677612\nn12677841\nn12678794\nn12679023\nn12679432\nn12679593\nn12679876\nn12680402\nn12680652\nn12680864\nn12681376\nn12681579\nn12681893\nn12682411\nn12682668\nn12682882\nn12683096\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686496\nn12686676\nn12686877\nn12687044\nn12687462\nn12687698\nn12687957\nn12688187\nn12688372\nn12688716\nn12689305\nn12690653\nn12691428\nn12691661\nn12692024\nn12692160\nn12692521\nn12692714\nn12693244\nn12693352\nn12693865\nn12694486\nn12695144\nn12695975\nn12696492\nn12696830\nn12697152\nn12697514\nn12698027\nn12698435\nn12698598\nn12698774\nn12699031\nn12699301\nn12699922\nn12700088\nn12700357\nn12702124\nn12703190\nn12703383\nn12703557\nn12703716\nn12703856\nn12704041\nn12704343\nn12704513\nn12705013\nn12705220\nn12705458\nn12705698\nn12705978\nn12706410\nn12707199\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709349\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12710917\nn12711182\nn12711398\nn12711596\nn12711817\nn12711984\nn12712320\nn12712626\nn12713063\nn12713358\nn12713521\nn12713866\nn12714254\nn12714755\nn12714949\nn12715195\nn12715914\nn12716400\nn12716594\nn12717072\nn12717224\nn12717644\nn12718074\nn12718483\nn12718995\nn12719684\nn12719944\nn12720200\nn12720354\nn12721122\nn12721477\nn12722071\nn12723062\nn12723610\nn12724942\nn12725521\nn12725738\nn12725940\nn12726159\nn12726357\nn12726528\nn12726670\nn12726902\nn12727101\nn12727301\nn12727518\nn12727729\nn12727960\nn12728164\nn12728322\nn12728508\nn12728656\nn12728864\nn12729023\nn12729164\nn12729315\nn12729521\nn12729729\nn12729950\nn12730143\nn12730370\nn12730544\nn12730776\nn12731029\nn12731401\nn12731835\nn12732009\nn12732252\nn12732491\nn12732605\nn12732756\nn12732966\nn12733218\nn12733428\nn12733647\nn12733870\nn12734070\nn12734215\nn12735160\nn12736603\nn12736999\nn12737383\nn12737898\nn12738259\nn12739332\nn12739966\nn12740967\nn12741222\nn12741586\nn12741792\nn12742290\nn12742741\nn12742878\nn12743009\nn12743352\nn12743823\nn12743976\nn12744142\nn12744387\nn12744850\nn12745386\nn12745564\nn12746884\nn12747120\nn12748248\nn12749049\nn12749456\nn12749679\nn12749852\nn12750076\nn12750767\nn12751172\nn12751675\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754174\nn12754311\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755559\nn12755727\nn12755876\nn12756457\nn12757115\nn12757303\nn12757458\nn12757668\nn12757816\nn12757930\nn12758014\nn12758099\nn12758176\nn12758250\nn12758325\nn12758399\nn12758471\nn12758555\nn12759273\nn12759668\nn12760539\nn12760875\nn12761284\nn12761702\nn12761905\nn12762049\nn12762405\nn12762896\nn12763529\nn12764008\nn12764202\nn12764507\nn12764978\nn12765115\nn12765402\nn12765846\nn12766043\nn12766595\nn12766869\nn12767208\nn12767423\nn12767648\nn12768369\nn12768682\nn12768809\nn12768933\nn12769065\nn12769219\nn12769318\nn12770529\nn12770892\nn12771085\nn12771192\nn12771390\nn12771597\nn12771890\nn12772753\nn12772908\nn12773142\nn12773651\nn12773917\nn12774299\nn12774641\nn12775070\nn12775393\nn12775717\nn12775919\nn12776558\nn12776774\nn12777436\nn12777680\nn12777778\nn12777892\nn12778398\nn12778605\nn12779603\nn12779851\nn12780325\nn12780563\nn12781940\nn12782530\nn12782915\nn12783316\nn12783730\nn12784371\nn12784889\nn12785724\nn12785889\nn12786273\nn12786464\nn12786836\nn12787364\nn12788854\nn12789054\nn12789554\nn12789977\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12793695\nn12793886\nn12794135\nn12794367\nn12794568\nn12794985\nn12795209\nn12795352\nn12795555\nn12796022\nn12796385\nn12796849\nn12797368\nn12797860\nn12798284\nn12798910\nn12799269\nn12799776\nn12800049\nn12800586\nn12801072\nn12801520\nn12801781\nn12801966\nn12803226\nn12803754\nn12803958\nn12804352\nn12805146\nn12805561\nn12805762\nn12806015\nn12806732\nn12807251\nn12807409\nn12807624\nn12807773\nn12808007\nn12809868\nn12810007\nn12810151\nn12810595\nn12811027\nn12811713\nn12812235\nn12812478\nn12812801\nn12813189\nn12814643\nn12814857\nn12814960\nn12815198\nn12815668\nn12815838\nn12816508\nn12816942\nn12817464\nn12817694\nn12817855\nn12818004\nn12818346\nn12818601\nn12818966\nn12819141\nn12819354\nn12819728\nn12820113\nn12820669\nn12820853\nn12821505\nn12821895\nn12822115\nn12822466\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12824289\nn12824735\nn12825497\nn12826143\nn12827270\nn12827537\nn12827907\nn12828220\nn12828379\nn12828520\nn12828791\nn12828977\nn12829582\nn12829975\nn12830222\nn12830568\nn12831141\nn12831535\nn12831932\nn12832315\nn12832538\nn12832822\nn12833149\nn12833985\nn12834190\nn12834798\nn12834938\nn12835331\nn12835766\nn12836212\nn12836337\nn12836508\nn12836862\nn12837052\nn12837259\nn12837466\nn12837803\nn12839574\nn12839979\nn12840168\nn12840362\nn12840502\nn12840749\nn12841007\nn12841193\nn12841354\nn12842302\nn12842519\nn12842642\nn12842887\nn12843144\nn12843316\nn12843557\nn12843970\nn12844409\nn12844939\nn12845187\nn12845413\nn12845908\nn12846335\nn12846690\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851094\nn12851469\nn12851860\nn12852234\nn12852428\nn12852570\nn12853080\nn12853287\nn12853482\nn12854048\nn12854193\nn12854600\nn12855365\nn12855494\nn12855710\nn12855886\nn12856091\nn12856287\nn12856479\nn12856680\nn12857204\nn12857779\nn12858150\nn12858397\nn12858618\nn12858871\nn12858987\nn12859153\nn12859272\nn12859679\nn12859986\nn12860365\nn12860978\nn12861345\nn12861541\nn12861892\nn12862512\nn12862828\nn12863234\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866333\nn12866459\nn12866635\nn12866968\nn12867184\nn12867449\nn12867826\nn12868019\nn12868880\nn12869061\nn12869478\nn12869668\nn12870048\nn12870225\nn12870535\nn12870682\nn12870891\nn12871272\nn12871696\nn12871859\nn12872458\nn12872914\nn12873341\nn12873984\nn12875269\nn12875697\nn12875861\nn12876899\nn12877244\nn12877493\nn12877637\nn12877838\nn12878169\nn12878325\nn12878784\nn12879068\nn12879527\nn12879963\nn12880244\nn12880462\nn12880638\nn12880799\nn12881105\nn12881913\nn12882158\nn12882779\nn12882945\nn12883265\nn12883628\nn12884100\nn12884260\nn12885045\nn12885265\nn12885510\nn12885754\nn12886185\nn12886402\nn12886600\nn12886831\nn12887293\nn12887532\nn12887713\nn12888016\nn12888234\nn12888457\nn12889219\nn12889412\nn12889579\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12891824\nn12892013\nn12893463\nn12893993\nn12895298\nn12895811\nn12896615\nn12897118\nn12897788\nn12897999\nn12898342\nn12898774\nn12899166\nn12899537\nn12899752\nn12899971\nn12900783\nn12901724\nn12902466\nn12902662\nn12903014\nn12903367\nn12903503\nn12903964\nn12904314\nn12904562\nn12904938\nn12905135\nn12905412\nn12906214\nn12906498\nn12906771\nn12907057\nn12907671\nn12907857\nn12908093\nn12908645\nn12908854\nn12909421\nn12909614\nn12909759\nn12909917\nn12911079\nn12911264\nn12911440\nn12911673\nn12911914\nn12912274\nn12912670\nn12912801\nn12913144\nn12913524\nn12913791\nn12914923\nn12915140\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12918609\nn12918810\nn12918991\nn12919195\nn12919403\nn12919646\nn12919847\nn12920043\nn12920204\nn12920521\nn12920719\nn12920955\nn12921315\nn12921499\nn12921660\nn12921868\nn12922119\nn12922458\nn12922763\nn12923108\nn12923257\nn12924623\nn12925179\nn12925583\nn12926039\nn12926480\nn12926689\nn12927013\nn12927194\nn12927494\nn12927758\nn12928071\nn12928307\nn12928491\nn12928819\nn12929403\nn12929600\nn12930778\nn12930951\nn12931231\nn12931542\nn12931906\nn12932173\nn12932365\nn12932706\nn12932966\nn12933274\nn12934036\nn12934174\nn12934479\nn12934685\nn12934985\nn12935166\nn12935609\nn12936155\nn12936826\nn12937130\nn12938081\nn12938193\nn12938445\nn12938667\nn12939104\nn12939282\nn12939479\nn12939874\nn12940226\nn12940609\nn12941220\nn12941536\nn12941717\nn12942025\nn12942395\nn12942572\nn12942729\nn12943049\nn12943443\nn12943912\nn12944095\nn12945177\nn12945366\nn12945549\nn12946849\nn12947313\nn12947544\nn12947756\nn12947895\nn12948053\nn12948251\nn12948495\nn12949160\nn12949361\nn12950126\nn12950314\nn12950796\nn12951146\nn12951835\nn12952165\nn12952469\nn12952590\nn12952717\nn12953206\nn12953484\nn12953712\nn12954353\nn12954799\nn12955414\nn12955840\nn12956170\nn12956367\nn12956588\nn12956922\nn12957608\nn12957803\nn12957924\nn12958261\nn12958615\nn12959074\nn12959538\nn12960378\nn12960552\nn12960863\nn12961242\nn12961393\nn12961536\nn12961879\nn12963628\nn12964920\nn12965626\nn12965951\nn12966804\nn12966945\nn12968136\nn12968309\nn12969131\nn12969425\nn12969670\nn12969927\nn12970193\nn12970293\nn12970733\nn12971400\nn12971804\nn12972136\nn12973443\nn12973791\nn12973937\nn12974987\nn12975804\nn12976198\nn12976554\nn12978076\nn12979316\nn12979829\nn12980080\nn12980840\nn12981086\nn12981301\nn12981443\nn12981954\nn12982468\nn12982590\nn12982915\nn12983048\nn12983654\nn12983873\nn12983961\nn12984267\nn12984489\nn12984595\nn12985420\nn12985773\nn12985857\nn12986227\nn12987056\nn12987423\nn12987535\nn12988158\nn12988341\nn12988572\nn12989007\nn12989938\nn12990597\nn12991184\nn12991837\nn12992177\nn12992868\nn12994892\nn12995601\nn12997654\nn12997919\nn12998815\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002209\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13004640\nn13004826\nn13004992\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13007629\nn13008157\nn13008315\nn13008485\nn13008689\nn13008839\nn13009085\nn13009244\nn13009429\nn13009656\nn13010694\nn13010951\nn13011221\nn13011595\nn13012253\nn13012469\nn13012973\nn13013534\nn13013764\nn13013965\nn13014097\nn13014265\nn13014409\nn13014581\nn13014741\nn13014879\nn13015509\nn13015688\nn13016076\nn13016289\nn13017102\nn13017240\nn13017439\nn13017610\nn13017789\nn13017979\nn13018088\nn13018232\nn13018407\nn13018906\nn13019496\nn13019643\nn13019835\nn13020191\nn13020481\nn13020964\nn13021166\nn13021332\nn13021543\nn13021689\nn13021867\nn13022210\nn13022709\nn13022903\nn13023134\nn13024012\nn13024500\nn13024653\nn13025647\nn13025854\nn13026015\nn13027557\nn13027879\nn13028611\nn13028937\nn13029122\nn13029326\nn13029610\nn13029760\nn13030337\nn13030616\nn13030852\nn13031193\nn13031323\nn13031474\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033396\nn13033577\nn13033879\nn13034062\nn13034555\nn13034788\nn13035241\nn13035389\nn13035707\nn13035925\nn13036116\nn13036312\nn13036804\nn13037406\nn13037585\nn13037805\nn13038068\nn13038376\nn13038577\nn13038744\nn13039349\nn13040303\nn13040629\nn13040796\nn13041312\nn13041943\nn13042134\nn13042316\nn13042982\nn13043926\nn13044375\nn13044778\nn13045210\nn13045594\nn13045975\nn13046130\nn13046669\nn13047862\nn13048447\nn13049953\nn13050397\nn13050705\nn13050940\nn13051346\nn13052014\nn13052248\nn13052670\nn13052931\nn13053608\nn13054073\nn13054560\nn13055423\nn13055577\nn13055792\nn13055949\nn13056135\nn13056349\nn13056607\nn13056799\nn13057054\nn13057242\nn13057422\nn13057639\nn13058037\nn13058272\nn13058608\nn13059298\nn13059657\nn13060017\nn13060190\nn13061172\nn13061348\nn13061471\nn13061704\nn13062421\nn13063269\nn13063514\nn13064111\nn13064457\nn13065089\nn13065514\nn13066129\nn13066448\nn13066979\nn13067191\nn13067330\nn13067532\nn13067672\nn13068255\nn13068434\nn13068735\nn13068917\nn13069224\nn13069773\nn13070308\nn13070875\nn13071371\nn13071553\nn13071815\nn13072031\nn13072209\nn13072350\nn13072528\nn13072706\nn13072863\nn13073055\nn13073703\nn13074619\nn13074814\nn13075020\nn13075272\nn13075441\nn13075684\nn13075847\nn13076041\nn13076405\nn13076643\nn13076831\nn13077033\nn13077295\nn13078021\nn13079073\nn13079419\nn13079567\nn13080306\nn13080866\nn13081229\nn13081999\nn13082568\nn13083023\nn13083461\nn13084184\nn13084834\nn13085113\nn13085747\nn13090018\nn13090871\nn13091620\nn13091774\nn13091982\nn13092078\nn13092240\nn13092385\nn13092987\nn13093275\nn13093629\nn13094145\nn13094273\nn13095013\nn13096779\nn13098515\nn13098962\nn13099833\nn13099999\nn13100156\nn13100677\nn13102648\nn13102775\nn13103023\nn13103660\nn13103750\nn13103877\nn13104059\nn13107694\nn13107807\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108662\nn13108841\nn13109733\nn13110915\nn13111174\nn13111340\nn13111504\nn13111881\nn13112035\nn13112201\nn13118330\nn13118707\nn13119870\nn13120211\nn13120958\nn13121104\nn13121349\nn13122364\nn13123309\nn13123431\nn13123841\nn13124358\nn13124654\nn13125117\nn13126050\nn13126856\nn13127001\nn13127303\nn13127666\nn13127843\nn13128278\nn13128582\nn13128976\nn13129078\nn13130014\nn13130161\nn13130726\nn13131028\nn13131618\nn13132034\nn13132156\nn13132338\nn13132486\nn13132656\nn13132756\nn13132940\nn13133140\nn13133233\nn13133316\nn13133613\nn13133932\nn13134302\nn13134531\nn13134844\nn13134947\nn13135692\nn13135832\nn13136316\nn13136556\nn13136781\nn13137010\nn13137225\nn13137409\nn13137672\nn13137951\nn13138155\nn13138308\nn13138658\nn13138842\nn13139055\nn13139321\nn13139482\nn13139647\nn13139837\nn13140049\nn13140367\nn13141141\nn13141415\nn13141564\nn13141797\nn13141972\nn13142182\nn13142504\nn13142907\nn13143285\nn13143758\nn13144084\nn13145040\nn13145250\nn13145444\nn13146403\nn13146583\nn13146928\nn13147153\nn13147270\nn13147386\nn13147532\nn13147689\nn13147918\nn13148208\nn13148384\nn13149296\nn13149970\nn13150378\nn13150592\nn13150894\nn13151082\nn13152339\nn13154388\nn13154494\nn13154841\nn13155095\nn13155305\nn13155611\nn13156986\nn13157137\nn13157346\nn13157481\nn13157684\nn13157971\nn13158167\nn13158512\nn13158605\nn13158714\nn13158815\nn13159357\nn13159691\nn13159890\nn13160116\nn13160254\nn13160365\nn13160604\nn13160831\nn13160938\nn13161151\nn13161254\nn13161904\nn13163553\nn13163649\nn13163991\nn13164501\nn13170840\nn13171210\nn13171797\nn13172923\nn13173132\nn13173259\nn13173488\nn13173697\nn13173882\nn13174354\nn13174670\nn13174823\nn13175682\nn13176363\nn13176714\nn13177048\nn13177529\nn13177768\nn13177884\nn13178284\nn13178707\nn13179056\nn13179804\nn13180534\nn13180875\nn13181055\nn13181244\nn13181406\nn13181811\nn13182164\nn13182338\nn13182799\nn13182937\nn13183056\nn13183489\nn13184394\nn13185269\nn13185658\nn13186388\nn13186546\nn13187367\nn13188096\nn13188268\nn13188462\nn13188767\nn13190060\nn13190747\nn13191148\nn13191620\nn13191884\nn13192625\nn13193143\nn13193269\nn13193466\nn13193642\nn13193856\nn13194036\nn13194212\nn13194572\nn13194758\nn13194918\nn13195341\nn13195761\nn13196003\nn13196234\nn13196369\nn13196738\nn13197274\nn13197507\nn13198054\nn13198482\nn13198914\nn13199717\nn13199970\nn13200193\nn13200542\nn13200651\nn13200986\nn13201423\nn13201566\nn13201969\nn13202125\nn13202355\nn13202602\nn13205058\nn13205249\nn13206178\nn13206817\nn13207094\nn13207335\nn13207572\nn13207736\nn13207923\nn13208302\nn13208705\nn13208965\nn13209129\nn13209270\nn13209460\nn13209808\nn13210350\nn13210597\nn13211020\nn13211790\nn13212025\nn13212175\nn13212379\nn13212559\nn13213066\nn13213397\nn13213577\nn13214217\nn13214340\nn13214485\nn13215258\nn13215586\nn13217005\nn13219422\nn13219833\nn13219976\nn13220122\nn13220355\nn13220525\nn13220663\nn13221529\nn13222877\nn13222985\nn13223090\nn13223588\nn13223710\nn13223843\nn13224673\nn13224922\nn13225244\nn13225365\nn13225617\nn13226320\nn13226871\nn13228017\nn13228536\nn13229543\nn13229951\nn13230190\nn13230662\nn13230843\nn13231078\nn13231678\nn13231919\nn13232106\nn13232363\nn13232779\nn13233727\nn13234114\nn13234519\nn13234678\nn13234857\nn13235011\nn13235159\nn13235319\nn13235503\nn13235766\nn13236100\nn13237188\nn13237508\nn13238375\nn13238654\nn13238988\nn13239177\nn13239736\nn13239921\nn13240362\nn13252672\nn13354021\nn13555775\nn13579829\nn13650447\nn13653902\nn13862407\nn13862552\nn13862780\nn13863020\nn13863186\nn13863473\nn13863771\nn13864035\nn13864153\nn13864965\nn13865298\nn13865483\nn13865904\nn13866144\nn13866626\nn13866827\nn13867005\nn13867492\nn13868248\nn13868371\nn13868515\nn13868944\nn13869045\nn13869547\nn13869788\nn13869896\nn13871717\nn13872592\nn13872822\nn13873361\nn13873502\nn13873917\nn13874073\nn13874558\nn13875392\nn13875571\nn13875884\nn13876561\nn13877547\nn13877667\nn13878306\nn13879049\nn13879320\nn13879816\nn13880199\nn13880415\nn13880551\nn13880704\nn13880994\nn13881512\nn13881644\nn13882201\nn13882276\nn13882487\nn13882563\nn13882639\nn13882713\nn13882961\nn13883603\nn13883763\nn13884261\nn13884384\nn13884930\nn13885011\nn13886260\nn13888491\nn13889066\nn13889331\nn13891547\nn13891937\nn13893786\nn13894154\nn13894434\nn13895262\nn13896100\nn13896217\nn13897198\nn13897528\nn13897996\nn13898207\nn13898315\nn13898645\nn13899735\nn13900287\nn13900422\nn13901211\nn13901321\nn13901423\nn13901490\nn13901858\nn13902048\nn13902336\nn13902793\nn13903079\nn13905121\nn13905275\nn13905792\nn13906484\nn13906669\nn13906767\nn13906936\nn13907272\nn13908201\nn13908580\nn13911045\nn13912260\nn13912540\nn13914141\nn13914265\nn13914608\nn13915023\nn13915113\nn13915209\nn13915305\nn13915999\nn13916363\nn13916721\nn13917690\nn13917785\nn13918274\nn13918387\nn13918717\nn13919547\nn13919919\nn13926786\nn14131950\nn14175579\nn14564779\nn14582716\nn14583400\nn14585392\nn14592309\nn14603798\nn14633206\nn14685296\nn14696793\nn14698884\nn14714645\nn14720833\nn14765422\nn14785065\nn14786943\nn14804958\nn14810561\nn14820180\nn14821852\nn14844693\nn14853210\nn14858292\nn14867545\nn14891255\nn14899328\nn14900184\nn14900342\nn14908027\nn14909584\nn14914945\nn14915184\nn14919819\nn14938389\nn14941787\nn14942411\nn14973585\nn14974264\nn14975598\nn14976759\nn14976871\nn14977188\nn14977504\nn14992287\nn14993378\nn15005577\nn15006012\nn15019030\nn15048888\nn15060326\nn15060688\nn15062057\nn15067877\nn15075141\nn15086247\nn15089258\nn15089472\nn15089645\nn15089803\nn15090065\nn15090238\nn15090742\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092409\nn15092650\nn15092751\nn15092942\nn15093049\nn15093137\nn15093298\nn15102359\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet21k_goog_to_12k_indices.txt",
    "content": "1\n3\n4\n5\n6\n7\n8\n9\n10\n11\n13\n14\n15\n16\n17\n18\n19\n20\n21\n23\n24\n26\n27\n28\n29\n30\n31\n32\n33\n34\n37\n38\n41\n43\n44\n45\n46\n47\n48\n49\n50\n51\n53\n55\n56\n57\n58\n60\n61\n62\n63\n64\n65\n66\n67\n68\n69\n70\n71\n72\n73\n74\n75\n76\n77\n78\n79\n80\n81\n82\n83\n84\n85\n86\n89\n90\n91\n93\n94\n95\n96\n97\n99\n100\n101\n102\n103\n105\n107\n108\n109\n110\n111\n112\n113\n114\n115\n116\n117\n118\n119\n120\n121\n122\n123\n124\n125\n126\n127\n128\n129\n130\n131\n132\n133\n134\n135\n137\n138\n140\n141\n142\n143\n144\n146\n147\n148\n149\n151\n152\n153\n154\n156\n157\n158\n159\n161\n162\n164\n165\n166\n167\n168\n169\n170\n171\n172\n173\n175\n176\n179\n180\n181\n182\n184\n188\n192\n193\n195\n196\n197\n199\n200\n203\n206\n209\n210\n211\n212\n213\n214\n215\n216\n217\n218\n219\n220\n221\n222\n223\n224\n225\n226\n227\n230\n231\n235\n249\n250\n251\n252\n253\n254\n289\n292\n295\n301\n306\n307\n312\n313\n315\n317\n320\n324\n325\n326\n327\n332\n341\n343\n347\n352\n353\n354\n356\n359\n360\n366\n367\n368\n369\n370\n377\n379\n380\n382\n383\n384\n385\n386\n392\n395\n398\n402\n405\n408\n410\n411\n413\n415\n416\n418\n422\n423\n424\n430\n431\n440\n441\n451\n452\n455\n456\n457\n460\n461\n464\n465\n466\n468\n469\n470\n471\n472\n473\n474\n475\n477\n479\n482\n486\n489\n490\n491\n492\n493\n496\n499\n500\n502\n503\n505\n510\n511\n512\n513\n514\n515\n516\n520\n523\n524\n525\n526\n527\n528\n529\n530\n533\n536\n538\n539\n540\n541\n542\n543\n544\n545\n546\n547\n548\n549\n550\n552\n553\n554\n555\n556\n557\n558\n559\n560\n561\n562\n563\n564\n566\n567\n568\n569\n570\n571\n572\n573\n574\n575\n576\n577\n578\n580\n581\n583\n584\n585\n586\n587\n588\n589\n590\n591\n592\n595\n596\n598\n601\n602\n603\n604\n605\n607\n608\n609\n610\n611\n612\n613\n614\n615\n616\n618\n619\n620\n621\n623\n624\n628\n629\n630\n631\n632\n634\n635\n636\n637\n638\n639\n640\n641\n643\n644\n645\n646\n647\n648\n649\n650\n651\n653\n654\n655\n656\n657\n658\n659\n660\n661\n663\n664\n665\n666\n667\n668\n669\n670\n671\n672\n673\n674\n675\n677\n678\n679\n680\n681\n682\n683\n684\n685\n686\n687\n688\n689\n691\n692\n693\n694\n695\n696\n697\n698\n700\n701\n702\n703\n704\n705\n706\n707\n708\n710\n711\n713\n714\n715\n716\n717\n718\n719\n720\n721\n722\n723\n727\n728\n730\n732\n733\n734\n736\n737\n738\n739\n740\n741\n742\n743\n744\n745\n746\n747\n748\n749\n751\n752\n753\n755\n757\n758\n759\n761\n762\n763\n764\n765\n766\n767\n768\n769\n770\n773\n774\n775\n776\n777\n778\n780\n781\n782\n783\n784\n785\n786\n787\n789\n790\n791\n792\n794\n796\n798\n799\n801\n804\n805\n807\n808\n809\n810\n811\n812\n813\n816\n817\n818\n822\n823\n824\n825\n826\n827\n828\n829\n830\n831\n832\n833\n834\n835\n836\n838\n839\n840\n841\n842\n843\n845\n846\n847\n848\n849\n850\n851\n852\n853\n854\n855\n856\n857\n858\n861\n862\n863\n864\n865\n866\n867\n868\n869\n870\n871\n872\n873\n874\n875\n876\n877\n878\n879\n880\n881\n882\n883\n884\n885\n886\n887\n888\n889\n891\n892\n894\n895\n896\n897\n899\n900\n901\n903\n904\n905\n908\n909\n910\n912\n913\n916\n919\n920\n922\n925\n931\n932\n933\n934\n935\n936\n939\n941\n944\n945\n946\n947\n949\n950\n951\n952\n953\n954\n955\n958\n960\n961\n963\n964\n968\n969\n970\n971\n976\n979\n983\n986\n990\n991\n992\n993\n994\n995\n996\n997\n998\n999\n1000\n1001\n1002\n1003\n1004\n1005\n1006\n1007\n1008\n1009\n1010\n1011\n1012\n1013\n1014\n1015\n1016\n1017\n1019\n1022\n1024\n1025\n1027\n1029\n1030\n1031\n1032\n1035\n1036\n1037\n1038\n1039\n1040\n1041\n1043\n1044\n1045\n1046\n1047\n1048\n1050\n1051\n1052\n1055\n1056\n1063\n1064\n1065\n1067\n1069\n1070\n1071\n1072\n1075\n1076\n1078\n1079\n1080\n1081\n1083\n1084\n1085\n1086\n1087\n1088\n1089\n1092\n1093\n1094\n1095\n1097\n1099\n1106\n1121\n1140\n1141\n1143\n1144\n1145\n1147\n1148\n1149\n1150\n1151\n1152\n1155\n1157\n1159\n1160\n1161\n1164\n1165\n1166\n1167\n1168\n1169\n1170\n1171\n1172\n1173\n1178\n1179\n1180\n1181\n1182\n1184\n1187\n1190\n1191\n1193\n1195\n1196\n1197\n1199\n1200\n1201\n1202\n1203\n1204\n1205\n1207\n1208\n1209\n1211\n1214\n1215\n1216\n1217\n1218\n1219\n1220\n1221\n1222\n1223\n1224\n1225\n1227\n1229\n1230\n1231\n1232\n1233\n1234\n1235\n1236\n1237\n1238\n1239\n1240\n1241\n1242\n1244\n1245\n1246\n1247\n1249\n1250\n1251\n1252\n1253\n1254\n1256\n1257\n1258\n1259\n1260\n1261\n1263\n1265\n1266\n1267\n1268\n1269\n1271\n1272\n1273\n1274\n1277\n1279\n1283\n1287\n1289\n1298\n1299\n1303\n1304\n1305\n1308\n1313\n1318\n1320\n1323\n1324\n1325\n1326\n1327\n1328\n1330\n1332\n1333\n1335\n1337\n1339\n1340\n1341\n1342\n1343\n1344\n1345\n1349\n1350\n1351\n1352\n1353\n1354\n1355\n1356\n1357\n1358\n1359\n1362\n1364\n1369\n1372\n1373\n1376\n1377\n1378\n1380\n1382\n1384\n1385\n1386\n1387\n1388\n1389\n1390\n1391\n1392\n1393\n1396\n1397\n1398\n1399\n1402\n1404\n1405\n1406\n1407\n1408\n1409\n1411\n1412\n1413\n1416\n1417\n1420\n1424\n1425\n1426\n1427\n1428\n1429\n1430\n1431\n1432\n1433\n1434\n1435\n1436\n1437\n1439\n1440\n1442\n1443\n1445\n1446\n1448\n1450\n1452\n1454\n1455\n1457\n1458\n1459\n1460\n1461\n1462\n1463\n1464\n1466\n1469\n1470\n1474\n1475\n1476\n1477\n1482\n1485\n1486\n1487\n1488\n1489\n1491\n1493\n1494\n1495\n1496\n1497\n1499\n1500\n1502\n1503\n1504\n1505\n1506\n1508\n1509\n1511\n1512\n1513\n1514\n1515\n1516\n1517\n1518\n1519\n1520\n1521\n1522\n1523\n1524\n1525\n1526\n1527\n1528\n1529\n1530\n1531\n1532\n1533\n1534\n1535\n1536\n1537\n1538\n1539\n1540\n1541\n1542\n1543\n1544\n1545\n1546\n1547\n1548\n1549\n1550\n1551\n1552\n1553\n1554\n1555\n1556\n1557\n1558\n1559\n1560\n1561\n1562\n1563\n1564\n1565\n1566\n1567\n1568\n1569\n1570\n1571\n1572\n1573\n1574\n1575\n1576\n1577\n1578\n1582\n1583\n1584\n1586\n1587\n1588\n1589\n1590\n1591\n1592\n1594\n1595\n1597\n1598\n1599\n1600\n1603\n1604\n1605\n1611\n1614\n1615\n1616\n1622\n1624\n1626\n1627\n1628\n1629\n1630\n1631\n1632\n1633\n1634\n1636\n1643\n1644\n1652\n1656\n1659\n1662\n1663\n1665\n1667\n1668\n1669\n1671\n1672\n1679\n1681\n1688\n1692\n1693\n1694\n1695\n1696\n1697\n1698\n1700\n1701\n1702\n1703\n1704\n1709\n1712\n1716\n1729\n1739\n1742\n1747\n1748\n1750\n1754\n1755\n1757\n1758\n1759\n1760\n1761\n1762\n1764\n1767\n1770\n1771\n1773\n1774\n1777\n1778\n1779\n1782\n1783\n1784\n1786\n1787\n1788\n1789\n1790\n1791\n1792\n1793\n1795\n1797\n1798\n1799\n1800\n1803\n1806\n1808\n1809\n1810\n1811\n1814\n1815\n1822\n1824\n1825\n1827\n1831\n1833\n1835\n1836\n1837\n1841\n1842\n1847\n1848\n1850\n1852\n1853\n1854\n1856\n1859\n1860\n1861\n1862\n1864\n1865\n1867\n1874\n1876\n1877\n1878\n1881\n1884\n1891\n1892\n1893\n1895\n1896\n1897\n1898\n1899\n1900\n1901\n1902\n1903\n1904\n1905\n1906\n1907\n1908\n1909\n1910\n1911\n1912\n1913\n1914\n1915\n1916\n1917\n1918\n1919\n1920\n1921\n1922\n1923\n1924\n1925\n1926\n1927\n1928\n1929\n1930\n1931\n1932\n1933\n1934\n1935\n1936\n1937\n1938\n1939\n1940\n1942\n1943\n1944\n1945\n1946\n1947\n1948\n1949\n1950\n1951\n1952\n1953\n1954\n1956\n1959\n1961\n1962\n1963\n1964\n1965\n1966\n1967\n1968\n1969\n1970\n1971\n1972\n1973\n1974\n1975\n1976\n1977\n1978\n1979\n1980\n1981\n1982\n1983\n1984\n1985\n1986\n1987\n1988\n1990\n1992\n1993\n1995\n1996\n1997\n1998\n1999\n2001\n2002\n2004\n2005\n2007\n2008\n2009\n2010\n2011\n2014\n2016\n2017\n2018\n2019\n2021\n2022\n2023\n2026\n2028\n2029\n2030\n2031\n2032\n2033\n2034\n2035\n2036\n2037\n2038\n2039\n2040\n2041\n2042\n2043\n2044\n2045\n2046\n2047\n2048\n2049\n2050\n2051\n2052\n2053\n2054\n2055\n2056\n2058\n2060\n2061\n2062\n2063\n2064\n2065\n2067\n2068\n2069\n2070\n2071\n2072\n2073\n2074\n2075\n2076\n2077\n2078\n2079\n2080\n2081\n2082\n2083\n2084\n2085\n2087\n2088\n2090\n2093\n2094\n2095\n2096\n2100\n2101\n2102\n2103\n2104\n2106\n2107\n2108\n2109\n2110\n2112\n2113\n2114\n2118\n2119\n2120\n2121\n2122\n2123\n2124\n2128\n2129\n2130\n2132\n2134\n2135\n2137\n2138\n2139\n2140\n2141\n2142\n2143\n2144\n2145\n2146\n2147\n2148\n2149\n2150\n2151\n2152\n2153\n2154\n2155\n2156\n2158\n2159\n2163\n2164\n2165\n2167\n2168\n2169\n2172\n2173\n2174\n2176\n2177\n2178\n2180\n2181\n2182\n2183\n2184\n2185\n2187\n2188\n2189\n2190\n2191\n2192\n2193\n2195\n2198\n2199\n2200\n2203\n2206\n2207\n2208\n2209\n2210\n2211\n2212\n2213\n2214\n2216\n2217\n2219\n2220\n2221\n2222\n2223\n2224\n2225\n2226\n2227\n2228\n2229\n2230\n2231\n2232\n2233\n2234\n2236\n2237\n2238\n2239\n2240\n2241\n2242\n2243\n2244\n2245\n2246\n2247\n2248\n2249\n2250\n2251\n2252\n2253\n2255\n2256\n2257\n2258\n2259\n2260\n2261\n2262\n2263\n2264\n2265\n2266\n2267\n2268\n2269\n2270\n2271\n2272\n2273\n2274\n2275\n2276\n2278\n2279\n2280\n2281\n2282\n2283\n2285\n2287\n2288\n2289\n2291\n2292\n2293\n2294\n2295\n2296\n2297\n2298\n2299\n2300\n2301\n2302\n2303\n2304\n2305\n2306\n2307\n2308\n2309\n2310\n2311\n2312\n2313\n2314\n2315\n2316\n2317\n2318\n2319\n2320\n2321\n2322\n2326\n2328\n2329\n2330\n2331\n2332\n2334\n2335\n2336\n2337\n2338\n2339\n2340\n2341\n2342\n2343\n2344\n2345\n2347\n2348\n2349\n2350\n2351\n2352\n2353\n2356\n2357\n2358\n2359\n2360\n2362\n2363\n2364\n2365\n2368\n2369\n2370\n2372\n2374\n2377\n2380\n2381\n2382\n2383\n2385\n2386\n2387\n2388\n2389\n2390\n2391\n2392\n2393\n2395\n2396\n2397\n2398\n2399\n2400\n2401\n2402\n2403\n2404\n2405\n2407\n2408\n2409\n2410\n2411\n2412\n2413\n2416\n2417\n2419\n2420\n2421\n2422\n2423\n2424\n2425\n2426\n2427\n2428\n2430\n2431\n2432\n2433\n2434\n2436\n2437\n2438\n2439\n2441\n2444\n2445\n2447\n2448\n2449\n2450\n2452\n2453\n2454\n2456\n2459\n2461\n2463\n2465\n2469\n2470\n2471\n2472\n2473\n2474\n2494\n2495\n2497\n2498\n2499\n2500\n2505\n2509\n2512\n2513\n2515\n2519\n2520\n2522\n2523\n2525\n2526\n2528\n2530\n2531\n2532\n2533\n2534\n2536\n2537\n2538\n2540\n2542\n2544\n2545\n2547\n2548\n2549\n2557\n2558\n2561\n2562\n2563\n2565\n2567\n2568\n2569\n2570\n2571\n2572\n2573\n2578\n2587\n2588\n2589\n2590\n2595\n2597\n2598\n2609\n2612\n2613\n2615\n2616\n2617\n2618\n2625\n2626\n2627\n2628\n2630\n2631\n2635\n2638\n2639\n2641\n2642\n2644\n2645\n2649\n2654\n2655\n2656\n2658\n2659\n2660\n2663\n2664\n2665\n2666\n2668\n2669\n2670\n2672\n2674\n2675\n2677\n2679\n2680\n2681\n2682\n2683\n2684\n2686\n2689\n2691\n2692\n2693\n2694\n2696\n2699\n2702\n2705\n2706\n2707\n2708\n2712\n2715\n2722\n2723\n2724\n2725\n2727\n2728\n2730\n2731\n2732\n2734\n2737\n2738\n2739\n2741\n2742\n2743\n2745\n2747\n2748\n2749\n2750\n2752\n2760\n2761\n2762\n2764\n2767\n2770\n2774\n2778\n2780\n2791\n2795\n2796\n2805\n2810\n2812\n2814\n2815\n2818\n2820\n2828\n2829\n2832\n2833\n2835\n2837\n2840\n2843\n2844\n2845\n2852\n2859\n2860\n2861\n2862\n2863\n2864\n2865\n2866\n2867\n2868\n2869\n2870\n2871\n2872\n2874\n2875\n2876\n2878\n2879\n2880\n2881\n2882\n2884\n2885\n2886\n2888\n2889\n2890\n2891\n2892\n2893\n2894\n2895\n2897\n2899\n2900\n2903\n2904\n2907\n2910\n2913\n2914\n2916\n2923\n2926\n2932\n2933\n2940\n2944\n2945\n2947\n2949\n2950\n2953\n2955\n2956\n2957\n2958\n2959\n2960\n2963\n2964\n2967\n2970\n2974\n2976\n2979\n2980\n2982\n2984\n2985\n2989\n2990\n2991\n2992\n2993\n2994\n2996\n2999\n3000\n3002\n3005\n3007\n3008\n3009\n3010\n3012\n3013\n3014\n3018\n3019\n3020\n3022\n3024\n3025\n3026\n3027\n3028\n3029\n3030\n3033\n3035\n3036\n3039\n3040\n3042\n3043\n3046\n3047\n3048\n3051\n3053\n3055\n3056\n3059\n3060\n3067\n3069\n3074\n3079\n3086\n3088\n3091\n3093\n3094\n3106\n3111\n3117\n3125\n3129\n3134\n3135\n3136\n3137\n3138\n3139\n3140\n3141\n3142\n3143\n3144\n3145\n3146\n3148\n3149\n3150\n3151\n3153\n3154\n3159\n3160\n3161\n3164\n3165\n3166\n3168\n3169\n3170\n3171\n3172\n3173\n3176\n3177\n3182\n3188\n3191\n3192\n3193\n3194\n3195\n3196\n3200\n3201\n3202\n3203\n3204\n3205\n3206\n3207\n3208\n3209\n3210\n3214\n3218\n3219\n3220\n3221\n3222\n3223\n3225\n3226\n3227\n3228\n3229\n3230\n3231\n3232\n3234\n3235\n3236\n3237\n3238\n3239\n3240\n3241\n3242\n3243\n3244\n3245\n3246\n3247\n3248\n3253\n3258\n3259\n3260\n3261\n3262\n3264\n3265\n3266\n3267\n3268\n3270\n3271\n3273\n3274\n3277\n3278\n3279\n3280\n3281\n3282\n3283\n3284\n3285\n3288\n3289\n3291\n3292\n3296\n3297\n3298\n3299\n3301\n3302\n3304\n3305\n3306\n3307\n3308\n3309\n3310\n3311\n3312\n3315\n3316\n3318\n3320\n3321\n3322\n3324\n3325\n3327\n3328\n3329\n3330\n3332\n3333\n3334\n3335\n3337\n3339\n3340\n3341\n3342\n3343\n3344\n3345\n3348\n3349\n3351\n3352\n3353\n3354\n3355\n3356\n3358\n3360\n3361\n3362\n3363\n3365\n3366\n3368\n3371\n3373\n3375\n3376\n3377\n3378\n3379\n3380\n3381\n3382\n3383\n3384\n3389\n3390\n3392\n3397\n3398\n3400\n3401\n3404\n3405\n3406\n3407\n3408\n3409\n3410\n3411\n3412\n3413\n3415\n3416\n3417\n3419\n3421\n3424\n3425\n3426\n3427\n3428\n3429\n3430\n3431\n3432\n3433\n3434\n3435\n3436\n3438\n3439\n3440\n3441\n3444\n3446\n3448\n3450\n3451\n3452\n3454\n3455\n3456\n3458\n3459\n3461\n3462\n3463\n3466\n3467\n3468\n3469\n3471\n3472\n3473\n3474\n3475\n3476\n3477\n3478\n3479\n3481\n3482\n3485\n3492\n3493\n3494\n3495\n3497\n3498\n3499\n3500\n3501\n3502\n3503\n3505\n3509\n3510\n3511\n3512\n3513\n3517\n3518\n3519\n3520\n3521\n3522\n3526\n3527\n3528\n3533\n3536\n3544\n3546\n3547\n3553\n3554\n3555\n3556\n3559\n3560\n3562\n3563\n3565\n3566\n3567\n3568\n3569\n3574\n3575\n3576\n3584\n3585\n3587\n3599\n3600\n3601\n3602\n3603\n3604\n3605\n3606\n3608\n3609\n3610\n3612\n3613\n3614\n3615\n3616\n3619\n3622\n3623\n3624\n3625\n3627\n3628\n3629\n3630\n3632\n3633\n3634\n3635\n3636\n3638\n3640\n3641\n3644\n3646\n3649\n3650\n3651\n3655\n3656\n3659\n3660\n3662\n3663\n3665\n3671\n3673\n3674\n3683\n3684\n3686\n3687\n3688\n3689\n3690\n3692\n3694\n3695\n3702\n3705\n3707\n3709\n3711\n3714\n3715\n3716\n3720\n3725\n3727\n3731\n3733\n3736\n3737\n3738\n3744\n3746\n3747\n3750\n3753\n3756\n3758\n3761\n3763\n3764\n3765\n3766\n3767\n3768\n3769\n3770\n3771\n3772\n3773\n3774\n3775\n3782\n3785\n3787\n3790\n3798\n3801\n3803\n3812\n3814\n3815\n3816\n3817\n3818\n3819\n3825\n3826\n3827\n3828\n3829\n3832\n3833\n3836\n3837\n3838\n3840\n3842\n3844\n3845\n3846\n3852\n3853\n3854\n3855\n3858\n3860\n3864\n3865\n3867\n3868\n3873\n3874\n3877\n3882\n3883\n3884\n3887\n3888\n3889\n3890\n3894\n3899\n3900\n3901\n3902\n3904\n3908\n3910\n3916\n3918\n3920\n3925\n3928\n3936\n3937\n3939\n3943\n3947\n3948\n3949\n3950\n3951\n3956\n3962\n3963\n3968\n3969\n3970\n3971\n3972\n3974\n3975\n3976\n3977\n3984\n3986\n3988\n3991\n4001\n4005\n4006\n4007\n4009\n4018\n4019\n4020\n4021\n4022\n4023\n4024\n4026\n4028\n4030\n4031\n4032\n4033\n4036\n4038\n4039\n4040\n4041\n4042\n4043\n4062\n4063\n4065\n4066\n4067\n4068\n4071\n4073\n4074\n4075\n4089\n4090\n4094\n4096\n4097\n4099\n4100\n4101\n4102\n4104\n4105\n4107\n4109\n4110\n4112\n4118\n4120\n4129\n4136\n4137\n4138\n4139\n4140\n4141\n4142\n4143\n4144\n4148\n4150\n4151\n4152\n4153\n4154\n4155\n4158\n4159\n4161\n4165\n4167\n4171\n4174\n4176\n4178\n4179\n4181\n4182\n4183\n4185\n4187\n4189\n4190\n4191\n4192\n4198\n4202\n4203\n4204\n4205\n4206\n4207\n4208\n4210\n4211\n4212\n4213\n4214\n4215\n4216\n4217\n4219\n4221\n4222\n4223\n4226\n4227\n4230\n4232\n4233\n4235\n4237\n4242\n4244\n4248\n4249\n4250\n4251\n4252\n4253\n4254\n4255\n4256\n4259\n4261\n4262\n4263\n4264\n4265\n4266\n4267\n4269\n4270\n4272\n4273\n4274\n4276\n4277\n4278\n4280\n4281\n4282\n4283\n4284\n4285\n4290\n4292\n4296\n4297\n4298\n4299\n4301\n4304\n4306\n4307\n4308\n4309\n4310\n4311\n4312\n4313\n4315\n4317\n4318\n4321\n4323\n4324\n4325\n4326\n4327\n4328\n4329\n4330\n4331\n4332\n4334\n4335\n4336\n4338\n4340\n4341\n4344\n4345\n4346\n4349\n4350\n4351\n4352\n4354\n4355\n4356\n4358\n4361\n4362\n4363\n4365\n4366\n4369\n4373\n4374\n4378\n4379\n4380\n4386\n4389\n4390\n4391\n4395\n4396\n4399\n4400\n4401\n4403\n4404\n4406\n4407\n4408\n4410\n4412\n4414\n4416\n4417\n4418\n4419\n4420\n4421\n4423\n4425\n4426\n4427\n4428\n4430\n4431\n4432\n4434\n4435\n4436\n4438\n4439\n4440\n4441\n4442\n4444\n4445\n4450\n4451\n4453\n4454\n4455\n4456\n4458\n4459\n4462\n4463\n4464\n4465\n4466\n4467\n4468\n4469\n4470\n4471\n4473\n4474\n4475\n4476\n4477\n4478\n4479\n4481\n4483\n4484\n4485\n4486\n4487\n4489\n4490\n4491\n4493\n4494\n4495\n4496\n4497\n4498\n4499\n4500\n4501\n4504\n4505\n4506\n4508\n4509\n4510\n4511\n4512\n4515\n4518\n4519\n4521\n4522\n4529\n4530\n4531\n4533\n4536\n4538\n4539\n4540\n4542\n4543\n4544\n4545\n4546\n4547\n4549\n4550\n4551\n4552\n4555\n4556\n4559\n4560\n4561\n4562\n4565\n4567\n4568\n4569\n4570\n4571\n4572\n4574\n4576\n4577\n4579\n4580\n4583\n4585\n4587\n4588\n4591\n4594\n4595\n4596\n4599\n4600\n4603\n4604\n4605\n4606\n4608\n4609\n4610\n4611\n4612\n4613\n4614\n4617\n4618\n4619\n4620\n4621\n4622\n4623\n4624\n4625\n4626\n4627\n4628\n4629\n4631\n4632\n4633\n4634\n4635\n4636\n4639\n4640\n4641\n4642\n4646\n4647\n4648\n4649\n4650\n4651\n4652\n4655\n4656\n4662\n4663\n4664\n4665\n4666\n4667\n4668\n4669\n4670\n4671\n4672\n4676\n4677\n4678\n4679\n4680\n4681\n4683\n4685\n4686\n4687\n4688\n4690\n4691\n4692\n4694\n4695\n4696\n4699\n4702\n4705\n4708\n4709\n4710\n4711\n4712\n4714\n4715\n4716\n4717\n4719\n4722\n4723\n4724\n4725\n4726\n4727\n4728\n4729\n4730\n4732\n4733\n4734\n4736\n4737\n4739\n4740\n4743\n4746\n4748\n4750\n4751\n4752\n4756\n4758\n4759\n4760\n4761\n4762\n4768\n4770\n4771\n4773\n4774\n4775\n4777\n4778\n4779\n4780\n4781\n4783\n4789\n4790\n4793\n4795\n4797\n4798\n4799\n4800\n4801\n4802\n4804\n4806\n4807\n4808\n4812\n4813\n4814\n4815\n4816\n4818\n4819\n4824\n4829\n4831\n4833\n4836\n4837\n4839\n4840\n4842\n4843\n4844\n4847\n4848\n4849\n4851\n4852\n4853\n4854\n4855\n4860\n4861\n4863\n4864\n4865\n4866\n4867\n4869\n4871\n4874\n4875\n4877\n4878\n4879\n4880\n4883\n4884\n4885\n4886\n4887\n4888\n4890\n4894\n4895\n4896\n4897\n4900\n4901\n4903\n4905\n4906\n4908\n4909\n4910\n4912\n4913\n4916\n4917\n4921\n4922\n4923\n4924\n4925\n4926\n4927\n4928\n4929\n4931\n4932\n4933\n4934\n4935\n4936\n4938\n4939\n4940\n4941\n4942\n4943\n4945\n4946\n4947\n4950\n4951\n4953\n4957\n4958\n4960\n4961\n4964\n4965\n4967\n4968\n4970\n4972\n4973\n4976\n4977\n4978\n4979\n4980\n4981\n4982\n4984\n4985\n4986\n4987\n4989\n4990\n4991\n4993\n4994\n4998\n4999\n5001\n5002\n5003\n5004\n5005\n5007\n5008\n5009\n5011\n5012\n5016\n5017\n5020\n5021\n5022\n5023\n5025\n5026\n5027\n5028\n5029\n5031\n5033\n5034\n5037\n5038\n5039\n5041\n5042\n5043\n5046\n5047\n5048\n5051\n5055\n5057\n5060\n5061\n5062\n5063\n5064\n5065\n5068\n5071\n5072\n5073\n5076\n5078\n5079\n5081\n5083\n5084\n5086\n5088\n5090\n5091\n5092\n5093\n5094\n5096\n5098\n5100\n5101\n5102\n5104\n5105\n5109\n5111\n5112\n5114\n5115\n5117\n5119\n5120\n5121\n5122\n5123\n5124\n5125\n5126\n5127\n5129\n5130\n5131\n5132\n5133\n5134\n5135\n5137\n5138\n5139\n5141\n5142\n5143\n5144\n5146\n5148\n5149\n5151\n5153\n5154\n5156\n5157\n5158\n5162\n5163\n5165\n5167\n5168\n5172\n5174\n5175\n5176\n5178\n5179\n5180\n5181\n5183\n5184\n5185\n5186\n5187\n5189\n5191\n5193\n5195\n5196\n5198\n5199\n5201\n5202\n5203\n5204\n5205\n5206\n5207\n5208\n5209\n5210\n5211\n5212\n5213\n5215\n5216\n5217\n5218\n5219\n5221\n5222\n5223\n5224\n5225\n5226\n5227\n5231\n5234\n5235\n5237\n5239\n5240\n5247\n5248\n5249\n5250\n5253\n5254\n5255\n5256\n5258\n5259\n5264\n5265\n5266\n5267\n5269\n5270\n5272\n5273\n5275\n5277\n5278\n5282\n5284\n5288\n5290\n5291\n5292\n5293\n5294\n5295\n5296\n5297\n5298\n5299\n5300\n5301\n5302\n5306\n5307\n5311\n5312\n5313\n5314\n5315\n5316\n5317\n5319\n5320\n5321\n5322\n5323\n5326\n5328\n5329\n5330\n5331\n5332\n5333\n5334\n5335\n5336\n5338\n5339\n5340\n5341\n5343\n5344\n5345\n5346\n5347\n5348\n5353\n5357\n5358\n5360\n5362\n5363\n5364\n5369\n5372\n5373\n5375\n5377\n5378\n5379\n5381\n5385\n5386\n5387\n5388\n5389\n5390\n5391\n5392\n5393\n5395\n5398\n5399\n5400\n5401\n5402\n5403\n5406\n5407\n5410\n5411\n5412\n5413\n5417\n5418\n5419\n5420\n5421\n5422\n5423\n5425\n5426\n5427\n5428\n5429\n5430\n5431\n5432\n5434\n5435\n5437\n5439\n5441\n5443\n5444\n5445\n5446\n5447\n5448\n5450\n5451\n5454\n5455\n5456\n5461\n5463\n5466\n5467\n5471\n5472\n5473\n5474\n5475\n5476\n5477\n5478\n5481\n5482\n5483\n5484\n5485\n5486\n5487\n5488\n5489\n5491\n5493\n5494\n5495\n5496\n5497\n5498\n5499\n5501\n5503\n5504\n5505\n5506\n5507\n5508\n5510\n5511\n5514\n5515\n5517\n5519\n5520\n5521\n5522\n5524\n5529\n5530\n5531\n5532\n5535\n5538\n5540\n5541\n5542\n5544\n5547\n5548\n5549\n5550\n5551\n5552\n5553\n5554\n5555\n5557\n5561\n5563\n5564\n5565\n5566\n5567\n5568\n5569\n5570\n5572\n5574\n5575\n5576\n5577\n5578\n5579\n5580\n5583\n5584\n5586\n5590\n5591\n5592\n5593\n5594\n5595\n5596\n5597\n5598\n5603\n5604\n5606\n5607\n5608\n5609\n5610\n5612\n5613\n5614\n5615\n5617\n5619\n5620\n5621\n5622\n5623\n5624\n5625\n5626\n5627\n5629\n5630\n5631\n5633\n5634\n5635\n5636\n5638\n5639\n5642\n5643\n5647\n5652\n5654\n5656\n5657\n5658\n5659\n5660\n5661\n5663\n5664\n5665\n5667\n5669\n5671\n5672\n5673\n5674\n5676\n5677\n5682\n5683\n5685\n5688\n5690\n5691\n5692\n5694\n5695\n5696\n5697\n5698\n5699\n5701\n5702\n5703\n5704\n5705\n5708\n5709\n5711\n5712\n5713\n5714\n5715\n5716\n5717\n5718\n5725\n5727\n5729\n5736\n5737\n5738\n5741\n5742\n5743\n5748\n5752\n5753\n5754\n5755\n5757\n5758\n5759\n5760\n5761\n5764\n5765\n5766\n5767\n5768\n5769\n5770\n5772\n5773\n5774\n5776\n5777\n5778\n5779\n5782\n5784\n5785\n5786\n5787\n5788\n5789\n5790\n5791\n5792\n5793\n5797\n5798\n5802\n5803\n5804\n5805\n5807\n5808\n5809\n5810\n5811\n5812\n5814\n5816\n5817\n5818\n5823\n5824\n5825\n5828\n5829\n5830\n5831\n5832\n5836\n5837\n5841\n5843\n5845\n5846\n5847\n5848\n5849\n5850\n5851\n5853\n5855\n5857\n5858\n5859\n5860\n5861\n5862\n5863\n5866\n5867\n5868\n5871\n5872\n5873\n5874\n5875\n5879\n5881\n5884\n5885\n5887\n5888\n5891\n5892\n5893\n5896\n5897\n5898\n5899\n5900\n5902\n5904\n5905\n5906\n5907\n5910\n5911\n5912\n5913\n5914\n5915\n5918\n5919\n5920\n5921\n5922\n5924\n5927\n5928\n5931\n5932\n5934\n5935\n5940\n5941\n5942\n5944\n5947\n5949\n5950\n5951\n5952\n5954\n5955\n5956\n5957\n5960\n5961\n5962\n5964\n5965\n5967\n5968\n5969\n5973\n5974\n5976\n5977\n5980\n5981\n5985\n5986\n5987\n5988\n5990\n5991\n5994\n5995\n5996\n5997\n5998\n5999\n6001\n6003\n6004\n6005\n6006\n6008\n6009\n6010\n6012\n6013\n6015\n6016\n6017\n6020\n6021\n6023\n6024\n6025\n6026\n6027\n6028\n6029\n6030\n6032\n6033\n6037\n6040\n6041\n6042\n6043\n6044\n6046\n6047\n6048\n6049\n6050\n6054\n6055\n6056\n6057\n6063\n6065\n6069\n6070\n6072\n6075\n6076\n6077\n6079\n6082\n6083\n6084\n6086\n6087\n6092\n6099\n6102\n6103\n6105\n6109\n6110\n6111\n6114\n6115\n6116\n6118\n6120\n6122\n6124\n6125\n6128\n6129\n6134\n6139\n6140\n6144\n6146\n6147\n6148\n6152\n6153\n6154\n6157\n6158\n6160\n6167\n6168\n6173\n6174\n6175\n6177\n6179\n6180\n6184\n6190\n6191\n6192\n6198\n6201\n6202\n6203\n6204\n6205\n6207\n6210\n6211\n6212\n6214\n6215\n6216\n6217\n6219\n6224\n6225\n6226\n6227\n6228\n6230\n6232\n6234\n6235\n6236\n6237\n6238\n6239\n6241\n6242\n6243\n6248\n6251\n6252\n6253\n6255\n6256\n6259\n6260\n6262\n6266\n6270\n6272\n6273\n6274\n6275\n6281\n6284\n6285\n6286\n6288\n6289\n6290\n6291\n6294\n6297\n6298\n6299\n6300\n6301\n6302\n6303\n6304\n6305\n6306\n6307\n6308\n6309\n6312\n6315\n6319\n6321\n6325\n6326\n6327\n6330\n6331\n6334\n6335\n6336\n6338\n6339\n6340\n6341\n6342\n6343\n6344\n6345\n6347\n6348\n6349\n6350\n6352\n6355\n6356\n6359\n6362\n6363\n6364\n6365\n6367\n6372\n6376\n6378\n6379\n6383\n6385\n6386\n6387\n6388\n6389\n6390\n6392\n6393\n6394\n6395\n6396\n6397\n6398\n6399\n6400\n6401\n6404\n6405\n6407\n6408\n6411\n6412\n6414\n6417\n6418\n6420\n6421\n6422\n6423\n6425\n6426\n6430\n6431\n6433\n6435\n6437\n6439\n6440\n6441\n6442\n6444\n6447\n6448\n6449\n6450\n6451\n6452\n6453\n6454\n6455\n6456\n6458\n6459\n6460\n6462\n6464\n6465\n6467\n6468\n6469\n6470\n6471\n6474\n6475\n6477\n6478\n6479\n6480\n6481\n6482\n6483\n6488\n6490\n6492\n6493\n6495\n6496\n6499\n6500\n6503\n6505\n6506\n6510\n6511\n6513\n6514\n6515\n6517\n6518\n6521\n6522\n6523\n6527\n6531\n6533\n6534\n6535\n6536\n6537\n6540\n6541\n6545\n6546\n6547\n6550\n6551\n6553\n6554\n6556\n6558\n6559\n6560\n6561\n6562\n6563\n6567\n6568\n6571\n6572\n6573\n6574\n6575\n6576\n6577\n6578\n6579\n6583\n6587\n6589\n6590\n6591\n6593\n6594\n6595\n6596\n6597\n6598\n6600\n6601\n6602\n6604\n6605\n6608\n6611\n6612\n6613\n6614\n6615\n6616\n6617\n6618\n6619\n6620\n6621\n6622\n6623\n6629\n6632\n6636\n6638\n6639\n6640\n6643\n6648\n6649\n6651\n6653\n6654\n6655\n6658\n6660\n6661\n6662\n6663\n6665\n6667\n6668\n6669\n6670\n6673\n6674\n6675\n6676\n6677\n6678\n6679\n6681\n6682\n6683\n6686\n6687\n6691\n6692\n6693\n6694\n6695\n6696\n6698\n6700\n6702\n6703\n6705\n6706\n6707\n6708\n6709\n6710\n6712\n6713\n6715\n6716\n6718\n6720\n6721\n6722\n6723\n6725\n6726\n6728\n6735\n6737\n6739\n6740\n6741\n6743\n6744\n6745\n6746\n6747\n6748\n6749\n6751\n6752\n6753\n6754\n6757\n6758\n6763\n6764\n6765\n6766\n6767\n6768\n6770\n6772\n6773\n6774\n6775\n6776\n6778\n6779\n6781\n6783\n6784\n6785\n6786\n6787\n6788\n6791\n6794\n6795\n6797\n6798\n6799\n6800\n6804\n6805\n6806\n6807\n6808\n6809\n6810\n6813\n6814\n6815\n6820\n6822\n6823\n6825\n6826\n6829\n6830\n6831\n6833\n6834\n6837\n6838\n6840\n6841\n6846\n6847\n6850\n6851\n6855\n6857\n6858\n6860\n6863\n6864\n6865\n6866\n6867\n6868\n6870\n6875\n6876\n6877\n6878\n6879\n6880\n6882\n6885\n6886\n6887\n6889\n6890\n6892\n6894\n6898\n6900\n6901\n6902\n6905\n6908\n6909\n6912\n6915\n6916\n6917\n6919\n6920\n6925\n6926\n6928\n6929\n6930\n6931\n6932\n6934\n6935\n6936\n6937\n6939\n6940\n6941\n6944\n6945\n6946\n6950\n6951\n6952\n6953\n6954\n6956\n6958\n6959\n6960\n6961\n6964\n6965\n6966\n6968\n6969\n6973\n6974\n6978\n6980\n6981\n6982\n6985\n6986\n6987\n6990\n6991\n6993\n6994\n6995\n6996\n6997\n6998\n6999\n7000\n7002\n7003\n7004\n7009\n7010\n7011\n7013\n7017\n7018\n7019\n7025\n7026\n7029\n7031\n7038\n7039\n7041\n7042\n7044\n7045\n7046\n7048\n7049\n7050\n7051\n7052\n7055\n7056\n7057\n7059\n7062\n7063\n7064\n7066\n7068\n7069\n7072\n7073\n7075\n7076\n7077\n7078\n7079\n7081\n7082\n7083\n7084\n7085\n7087\n7088\n7090\n7091\n7092\n7093\n7095\n7096\n7097\n7098\n7099\n7100\n7101\n7103\n7104\n7107\n7108\n7110\n7111\n7112\n7113\n7115\n7116\n7117\n7118\n7120\n7121\n7122\n7123\n7126\n7127\n7128\n7129\n7134\n7135\n7136\n7137\n7138\n7142\n7150\n7152\n7153\n7154\n7155\n7156\n7158\n7160\n7161\n7162\n7163\n7164\n7165\n7166\n7167\n7168\n7169\n7170\n7171\n7172\n7173\n7175\n7176\n7177\n7178\n7180\n7181\n7182\n7183\n7186\n7189\n7192\n7193\n7194\n7195\n7196\n7198\n7199\n7200\n7201\n7202\n7203\n7204\n7205\n7206\n7207\n7208\n7212\n7213\n7214\n7215\n7216\n7217\n7218\n7219\n7220\n7222\n7223\n7224\n7225\n7226\n7228\n7230\n7231\n7232\n7237\n7238\n7239\n7241\n7242\n7243\n7244\n7245\n7246\n7247\n7250\n7254\n7256\n7257\n7258\n7259\n7260\n7261\n7263\n7264\n7266\n7267\n7268\n7270\n7271\n7273\n7276\n7277\n7278\n7279\n7280\n7282\n7283\n7284\n7285\n7286\n7287\n7288\n7289\n7290\n7291\n7292\n7293\n7294\n7297\n7299\n7301\n7302\n7305\n7306\n7307\n7309\n7310\n7313\n7314\n7315\n7316\n7317\n7318\n7319\n7321\n7322\n7323\n7324\n7325\n7326\n7327\n7329\n7332\n7333\n7334\n7335\n7336\n7337\n7338\n7340\n7341\n7342\n7344\n7346\n7348\n7349\n7350\n7353\n7354\n7357\n7358\n7363\n7364\n7365\n7370\n7372\n7373\n7375\n7378\n7379\n7380\n7382\n7385\n7386\n7388\n7390\n7391\n7393\n7394\n7396\n7400\n7403\n7406\n7412\n7418\n7419\n7420\n7422\n7424\n7425\n7427\n7428\n7432\n7435\n7436\n7437\n7438\n7440\n7441\n7442\n7443\n7445\n7449\n7450\n7451\n7452\n7454\n7455\n7458\n7459\n7460\n7461\n7462\n7463\n7464\n7465\n7466\n7467\n7469\n7470\n7471\n7472\n7473\n7474\n7475\n7476\n7478\n7479\n7482\n7484\n7485\n7486\n7491\n7492\n7494\n7496\n7497\n7498\n7502\n7503\n7504\n7505\n7506\n7507\n7511\n7513\n7514\n7516\n7517\n7518\n7520\n7521\n7523\n7524\n7525\n7526\n7528\n7530\n7533\n7536\n7539\n7540\n7541\n7542\n7546\n7548\n7551\n7552\n7554\n7556\n7557\n7558\n7559\n7561\n7562\n7563\n7564\n7565\n7566\n7567\n7568\n7570\n7571\n7573\n7574\n7575\n7578\n7584\n7585\n7587\n7590\n7591\n7592\n7595\n7596\n7597\n7601\n7603\n7604\n7606\n7607\n7608\n7610\n7612\n7613\n7616\n7617\n7619\n7622\n7623\n7625\n7626\n7628\n7629\n7630\n7631\n7634\n7637\n7638\n7641\n7642\n7644\n7646\n7650\n7651\n7652\n7655\n7656\n7657\n7658\n7659\n7660\n7661\n7663\n7664\n7665\n7666\n7671\n7672\n7673\n7674\n7679\n7681\n7682\n7685\n7686\n7688\n7690\n7691\n7693\n7694\n7696\n7698\n7703\n7704\n7705\n7707\n7708\n7710\n7711\n7712\n7713\n7715\n7716\n7717\n7718\n7719\n7721\n7722\n7723\n7724\n7725\n7727\n7728\n7729\n7730\n7731\n7732\n7733\n7734\n7736\n7738\n7739\n7740\n7741\n7742\n7746\n7749\n7751\n7753\n7755\n7756\n7757\n7758\n7759\n7760\n7763\n7764\n7768\n7769\n7770\n7773\n7775\n7777\n7778\n7779\n7783\n7785\n7786\n7787\n7788\n7789\n7792\n7793\n7794\n7795\n7798\n7799\n7801\n7805\n7806\n7810\n7813\n7815\n7818\n7820\n7824\n7828\n7830\n7832\n7834\n7835\n7837\n7841\n7843\n7844\n7849\n7852\n7854\n7855\n7856\n7858\n7860\n7862\n7864\n7867\n7868\n7871\n7872\n7873\n7874\n7876\n7878\n7881\n7882\n7884\n7886\n7887\n7889\n7891\n7892\n7894\n7895\n7896\n7902\n7903\n7904\n7905\n7906\n7908\n7911\n7913\n7914\n7915\n7917\n7918\n7919\n7920\n7921\n7923\n7924\n7927\n7928\n7929\n7931\n7934\n7935\n7937\n7938\n7939\n7940\n7941\n7942\n7943\n7944\n7949\n7950\n7951\n7952\n7953\n7954\n7955\n7959\n7962\n7963\n7964\n7966\n7969\n7972\n7973\n7976\n7977\n7981\n7982\n7983\n7984\n7987\n7988\n7989\n7990\n7991\n7992\n7994\n7995\n7997\n7998\n7999\n8000\n8001\n8004\n8005\n8006\n8007\n8008\n8009\n8012\n8017\n8019\n8020\n8021\n8022\n8023\n8024\n8025\n8027\n8028\n8029\n8031\n8033\n8034\n8035\n8036\n8037\n8038\n8039\n8040\n8042\n8043\n8044\n8045\n8046\n8050\n8051\n8052\n8054\n8056\n8060\n8061\n8062\n8064\n8065\n8066\n8068\n8070\n8071\n8072\n8074\n8077\n8078\n8080\n8081\n8082\n8084\n8086\n8087\n8089\n8090\n8093\n8098\n8099\n8101\n8104\n8105\n8106\n8110\n8112\n8113\n8114\n8115\n8116\n8119\n8120\n8121\n8124\n8125\n8126\n8127\n8129\n8131\n8133\n8136\n8138\n8139\n8140\n8141\n8142\n8144\n8145\n8147\n8149\n8150\n8151\n8153\n8154\n8155\n8156\n8157\n8159\n8161\n8162\n8163\n8164\n8166\n8168\n8170\n8171\n8172\n8173\n8174\n8175\n8177\n8178\n8179\n8182\n8183\n8184\n8186\n8191\n8193\n8195\n8197\n8198\n8199\n8201\n8202\n8203\n8204\n8205\n8206\n8207\n8208\n8210\n8211\n8212\n8213\n8215\n8216\n8218\n8220\n8221\n8222\n8225\n8229\n8230\n8231\n8232\n8233\n8236\n8237\n8239\n8240\n8242\n8243\n8244\n8245\n8246\n8250\n8251\n8252\n8254\n8255\n8256\n8257\n8258\n8259\n8261\n8263\n8264\n8267\n8268\n8271\n8272\n8273\n8275\n8276\n8278\n8281\n8282\n8285\n8286\n8288\n8289\n8290\n8294\n8295\n8297\n8298\n8299\n8300\n8303\n8307\n8309\n8310\n8312\n8313\n8315\n8318\n8320\n8322\n8325\n8326\n8327\n8328\n8329\n8330\n8332\n8333\n8335\n8337\n8345\n8346\n8347\n8348\n8352\n8354\n8360\n8362\n8364\n8365\n8368\n8371\n8375\n8376\n8378\n8380\n8381\n8382\n8386\n8388\n8389\n8390\n8392\n8393\n8394\n8396\n8397\n8398\n8399\n8400\n8401\n8402\n8403\n8404\n8405\n8407\n8408\n8409\n8410\n8412\n8414\n8416\n8417\n8418\n8419\n8420\n8421\n8422\n8426\n8428\n8430\n8432\n8433\n8434\n8435\n8436\n8437\n8439\n8440\n8446\n8447\n8448\n8449\n8450\n8451\n8452\n8453\n8454\n8456\n8460\n8462\n8463\n8464\n8467\n8468\n8469\n8470\n8472\n8473\n8474\n8477\n8478\n8481\n8482\n8483\n8484\n8485\n8486\n8490\n8491\n8492\n8493\n8494\n8495\n8496\n8497\n8498\n8500\n8501\n8502\n8503\n8505\n8506\n8508\n8509\n8510\n8511\n8512\n8513\n8516\n8521\n8522\n8524\n8526\n8529\n8531\n8532\n8536\n8538\n8539\n8540\n8541\n8542\n8543\n8547\n8548\n8549\n8552\n8553\n8555\n8556\n8557\n8560\n8561\n8562\n8564\n8565\n8568\n8569\n8570\n8571\n8572\n8573\n8577\n8578\n8580\n8581\n8583\n8584\n8586\n8588\n8589\n8590\n8591\n8593\n8594\n8596\n8597\n8598\n8599\n8600\n8601\n8602\n8603\n8604\n8606\n8607\n8610\n8611\n8613\n8615\n8622\n8625\n8626\n8627\n8628\n8629\n8632\n8636\n8638\n8639\n8641\n8643\n8645\n8646\n8647\n8648\n8649\n8650\n8651\n8652\n8653\n8654\n8655\n8656\n8657\n8658\n8662\n8663\n8664\n8665\n8666\n8667\n8668\n8669\n8670\n8671\n8672\n8673\n8674\n8675\n8676\n8677\n8678\n8679\n8680\n8681\n8682\n8684\n8685\n8686\n8690\n8691\n8692\n8693\n8694\n8695\n8702\n8707\n8708\n8709\n8710\n8711\n8712\n8713\n8715\n8716\n8720\n8723\n8724\n8725\n8728\n8732\n8733\n8737\n8738\n8739\n8740\n8741\n8745\n8746\n8750\n8752\n8753\n8754\n8756\n8757\n8758\n8759\n8761\n8762\n8763\n8766\n8768\n8770\n8771\n8772\n8773\n8775\n8776\n8780\n8781\n8783\n8784\n8785\n8786\n8787\n8788\n8793\n8795\n8797\n8798\n8801\n8803\n8804\n8806\n8807\n8810\n8812\n8814\n8815\n8817\n8820\n8823\n8824\n8826\n8827\n8828\n8829\n8830\n8831\n8833\n8835\n8838\n8839\n8842\n8843\n8845\n8846\n8847\n8848\n8849\n8851\n8854\n8856\n8857\n8858\n8860\n8861\n8863\n8864\n8867\n8869\n8870\n8871\n8872\n8875\n8876\n8878\n8879\n8883\n8884\n8886\n8887\n8888\n8890\n8891\n8892\n8894\n8896\n8897\n8898\n8899\n8900\n8901\n8902\n8903\n8905\n8906\n8908\n8910\n8914\n8915\n8916\n8917\n8918\n8919\n8922\n8923\n8924\n8925\n8926\n8927\n8929\n8931\n8932\n8934\n8936\n8937\n8938\n8939\n8942\n8943\n8944\n8945\n8947\n8948\n8950\n8951\n8954\n8956\n8957\n8959\n8962\n8965\n8966\n8967\n8968\n8969\n8970\n8971\n8976\n8977\n8980\n8981\n8982\n8983\n8984\n8985\n8986\n8987\n8989\n8990\n8991\n8992\n8993\n8994\n8995\n9000\n9001\n9003\n9006\n9007\n9011\n9012\n9013\n9014\n9015\n9019\n9022\n9023\n9024\n9025\n9026\n9028\n9029\n9030\n9031\n9032\n9033\n9034\n9036\n9037\n9039\n9042\n9043\n9047\n9049\n9050\n9051\n9052\n9054\n9055\n9056\n9057\n9058\n9059\n9060\n9061\n9062\n9064\n9065\n9066\n9070\n9071\n9072\n9073\n9074\n9079\n9080\n9081\n9082\n9083\n9087\n9088\n9092\n9093\n9094\n9096\n9097\n9098\n9100\n9101\n9104\n9105\n9106\n9107\n9108\n9109\n9110\n9111\n9112\n9116\n9118\n9119\n9123\n9128\n9130\n9131\n9132\n9133\n9134\n9138\n9139\n9140\n9141\n9142\n9144\n9146\n9147\n9148\n9149\n9150\n9151\n9153\n9154\n9155\n9158\n9159\n9161\n9163\n9165\n9166\n9167\n9168\n9169\n9171\n9173\n9174\n9175\n9176\n9179\n9180\n9183\n9184\n9187\n9188\n9189\n9191\n9193\n9198\n9199\n9201\n9204\n9206\n9207\n9212\n9213\n9214\n9215\n9216\n9217\n9219\n9220\n9221\n9224\n9225\n9226\n9227\n9228\n9229\n9230\n9231\n9232\n9234\n9238\n9239\n9240\n9242\n9243\n9244\n9246\n9250\n9251\n9252\n9253\n9255\n9257\n9258\n9259\n9260\n9265\n9266\n9269\n9270\n9271\n9272\n9273\n9274\n9275\n9276\n9277\n9279\n9281\n9283\n9290\n9293\n9294\n9295\n9296\n9297\n9300\n9303\n9304\n9305\n9306\n9307\n9308\n9309\n9310\n9313\n9314\n9317\n9318\n9319\n9322\n9324\n9327\n9330\n9331\n9333\n9334\n9335\n9336\n9337\n9338\n9342\n9343\n9344\n9345\n9346\n9347\n9349\n9350\n9352\n9354\n9355\n9362\n9363\n9365\n9366\n9367\n9368\n9369\n9370\n9371\n9372\n9376\n9377\n9381\n9382\n9383\n9385\n9386\n9387\n9390\n9391\n9392\n9395\n9396\n9397\n9398\n9399\n9400\n9401\n9402\n9404\n9405\n9407\n9411\n9412\n9413\n9414\n9415\n9416\n9417\n9418\n9420\n9421\n9422\n9423\n9425\n9426\n9427\n9430\n9431\n9437\n9440\n9441\n9442\n9445\n9446\n9447\n9448\n9449\n9450\n9452\n9453\n9454\n9455\n9457\n9460\n9463\n9464\n9465\n9467\n9469\n9470\n9471\n9475\n9476\n9479\n9481\n9482\n9484\n9486\n9488\n9490\n9492\n9493\n9496\n9498\n9500\n9501\n9502\n9503\n9504\n9505\n9513\n9514\n9515\n9516\n9521\n9522\n9523\n9528\n9532\n9533\n9535\n9536\n9537\n9540\n9542\n9543\n9545\n9546\n9548\n9549\n9551\n9552\n9557\n9558\n9566\n9567\n9569\n9570\n9571\n9572\n9574\n9575\n9576\n9577\n9578\n9579\n9581\n9584\n9585\n9586\n9587\n9588\n9590\n9591\n9595\n9597\n9602\n9605\n9608\n9609\n9610\n9615\n9616\n9618\n9622\n9624\n9626\n9627\n9628\n9629\n9630\n9633\n9634\n9635\n9636\n9637\n9639\n9640\n9641\n9642\n9643\n9644\n9647\n9652\n9653\n9654\n9655\n9656\n9659\n9660\n9661\n9664\n9665\n9666\n9667\n9668\n9670\n9673\n9674\n9675\n9676\n9677\n9678\n9679\n9681\n9684\n9686\n9689\n9690\n9691\n9692\n9693\n9695\n9697\n9698\n9703\n9704\n9705\n9706\n9707\n9711\n9712\n9713\n9715\n9717\n9720\n9721\n9724\n9726\n9727\n9728\n9730\n9733\n9734\n9735\n9737\n9738\n9739\n9740\n9741\n9742\n9745\n9752\n9753\n9754\n9755\n9756\n9757\n9759\n9760\n9763\n9764\n9765\n9767\n9770\n9771\n9772\n9773\n9774\n9776\n9777\n9778\n9779\n9780\n9781\n9783\n9785\n9786\n9787\n9792\n9795\n9797\n9798\n9799\n9800\n9801\n9802\n9803\n9806\n9807\n9808\n9810\n9812\n9815\n9820\n9821\n9826\n9827\n9828\n9835\n9836\n9837\n9838\n9839\n9842\n9845\n9849\n9850\n9856\n9858\n9859\n9860\n9861\n9863\n9867\n9869\n9870\n9874\n9876\n9877\n9878\n9879\n9881\n9884\n9886\n9887\n9888\n9889\n9890\n9892\n9894\n9895\n9896\n9897\n9898\n9899\n9900\n9901\n9902\n9903\n9905\n9907\n9908\n9910\n9912\n9913\n9916\n9921\n9922\n9923\n9924\n9925\n9927\n9928\n9929\n9930\n9931\n9932\n9935\n9936\n9937\n9938\n9939\n9941\n9945\n9947\n9948\n9949\n9951\n9952\n9953\n9954\n9956\n9958\n9960\n9961\n9962\n9963\n9964\n9965\n9966\n9967\n9969\n9970\n9972\n9975\n9976\n9977\n9979\n9980\n9981\n9982\n9983\n9984\n9988\n9989\n9990\n9991\n9992\n9993\n9994\n9997\n9999\n10002\n10003\n10010\n10011\n10012\n10013\n10014\n10015\n10016\n10017\n10018\n10022\n10023\n10025\n10028\n10029\n10031\n10033\n10034\n10036\n10038\n10040\n10041\n10042\n10043\n10044\n10045\n10046\n10047\n10048\n10050\n10051\n10052\n10053\n10054\n10055\n10056\n10057\n10058\n10059\n10060\n10062\n10064\n10065\n10066\n10067\n10068\n10070\n10073\n10074\n10075\n10076\n10077\n10078\n10081\n10082\n10083\n10084\n10086\n10087\n10088\n10090\n10091\n10092\n10093\n10094\n10095\n10097\n10100\n10102\n10105\n10106\n10107\n10108\n10109\n10110\n10113\n10117\n10118\n10119\n10121\n10122\n10123\n10124\n10127\n10128\n10134\n10135\n10140\n10141\n10142\n10147\n10148\n10151\n10152\n10153\n10155\n10156\n10157\n10158\n10159\n10160\n10162\n10163\n10164\n10165\n10169\n10171\n10175\n10179\n10181\n10185\n10186\n10187\n10188\n10189\n10190\n10191\n10192\n10195\n10196\n10197\n10199\n10200\n10202\n10206\n10207\n10209\n10210\n10211\n10213\n10217\n10218\n10219\n10220\n10221\n10223\n10224\n10225\n10226\n10227\n10228\n10229\n10231\n10232\n10234\n10235\n10236\n10237\n10240\n10241\n10243\n10244\n10245\n10246\n10247\n10248\n10249\n10250\n10252\n10253\n10254\n10255\n10258\n10260\n10262\n10263\n10264\n10265\n10266\n10269\n10271\n10272\n10273\n10276\n10277\n10279\n10283\n10284\n10286\n10289\n10290\n10291\n10295\n10296\n10297\n10298\n10299\n10300\n10301\n10302\n10303\n10304\n10305\n10306\n10307\n10308\n10311\n10313\n10314\n10315\n10318\n10319\n10322\n10323\n10324\n10325\n10326\n10327\n10328\n10329\n10330\n10331\n10332\n10333\n10335\n10336\n10337\n10338\n10339\n10340\n10343\n10344\n10345\n10346\n10349\n10350\n10351\n10352\n10353\n10354\n10355\n10356\n10360\n10362\n10363\n10366\n10367\n10371\n10372\n10373\n10375\n10377\n10378\n10380\n10381\n10383\n10387\n10388\n10389\n10390\n10391\n10394\n10395\n10397\n10398\n10399\n10401\n10402\n10404\n10406\n10409\n10412\n10413\n10414\n10416\n10417\n10418\n10419\n10420\n10422\n10423\n10424\n10425\n10427\n10429\n10430\n10431\n10432\n10433\n10436\n10438\n10440\n10444\n10447\n10448\n10451\n10453\n10455\n10456\n10459\n10460\n10461\n10462\n10463\n10465\n10468\n10470\n10473\n10476\n10478\n10479\n10481\n10483\n10487\n10488\n10489\n10491\n10494\n10496\n10497\n10499\n10500\n10501\n10504\n10505\n10506\n10508\n10509\n10510\n10511\n10512\n10513\n10514\n10515\n10517\n10518\n10519\n10520\n10521\n10522\n10523\n10524\n10525\n10526\n10527\n10528\n10530\n10531\n10532\n10534\n10535\n10536\n10539\n10542\n10544\n10546\n10547\n10548\n10549\n10550\n10551\n10552\n10553\n10554\n10555\n10556\n10559\n10561\n10563\n10564\n10567\n10570\n10574\n10575\n10576\n10583\n10584\n10585\n10586\n10588\n10589\n10590\n10591\n10592\n10594\n10598\n10607\n10610\n10611\n10612\n10613\n10615\n10617\n10620\n10621\n10623\n10625\n10626\n10627\n10628\n10629\n10631\n10633\n10635\n10636\n10638\n10639\n10641\n10642\n10643\n10644\n10648\n10649\n10650\n10658\n10659\n10662\n10663\n10664\n10665\n10666\n10667\n10668\n10669\n10671\n10672\n10673\n10674\n10675\n10676\n10677\n10678\n10680\n10681\n10683\n10686\n10687\n10688\n10691\n10692\n10694\n10695\n10697\n10698\n10699\n10700\n10701\n10702\n10703\n10708\n10709\n10711\n10712\n10713\n10714\n10718\n10719\n10720\n10721\n10723\n10725\n10726\n10727\n10728\n10729\n10730\n10731\n10733\n10734\n10735\n10738\n10739\n10742\n10748\n10749\n10750\n10751\n10752\n10754\n10755\n10757\n10759\n10760\n10761\n10763\n10765\n10766\n10767\n10768\n10772\n10773\n10774\n10775\n10776\n10777\n10780\n10781\n10782\n10783\n10784\n10786\n10787\n10791\n10792\n10793\n10796\n10798\n10799\n10800\n10802\n10803\n10806\n10807\n10808\n10809\n10810\n10811\n10813\n10814\n10818\n10822\n10824\n10825\n10828\n10830\n10832\n10833\n10835\n10837\n10840\n10841\n10842\n10843\n10844\n10846\n10848\n10849\n10852\n10855\n10856\n10859\n10860\n10862\n10864\n10865\n10867\n10870\n10871\n10872\n10874\n10875\n10876\n10877\n10879\n10880\n10881\n10882\n10883\n10884\n10886\n10889\n10890\n10894\n10896\n10897\n10898\n10899\n10902\n10906\n10907\n10908\n10909\n10910\n10912\n10913\n10914\n10915\n10919\n10920\n10921\n10924\n10927\n10928\n10932\n10933\n10935\n10936\n10938\n10939\n10940\n10941\n10943\n10944\n10945\n10946\n10947\n10948\n10951\n10952\n10953\n10955\n10956\n10957\n10958\n10960\n10962\n10963\n10964\n10968\n10969\n10972\n10973\n10974\n10975\n10980\n10984\n10986\n10987\n10989\n10994\n10997\n10998\n10999\n11001\n11002\n11003\n11004\n11005\n11009\n11012\n11013\n11016\n11017\n11018\n11020\n11022\n11023\n11024\n11025\n11027\n11028\n11029\n11031\n11032\n11033\n11034\n11036\n11039\n11040\n11041\n11044\n11045\n11046\n11049\n11051\n11052\n11053\n11054\n11055\n11056\n11057\n11059\n11060\n11061\n11062\n11064\n11066\n11067\n11068\n11069\n11071\n11073\n11074\n11077\n11078\n11079\n11081\n11082\n11083\n11084\n11085\n11086\n11088\n11089\n11094\n11095\n11096\n11097\n11098\n11099\n11102\n11103\n11104\n11105\n11106\n11107\n11108\n11109\n11110\n11111\n11113\n11114\n11115\n11119\n11120\n11121\n11123\n11126\n11128\n11129\n11130\n11133\n11134\n11135\n11137\n11138\n11139\n11140\n11141\n11143\n11144\n11145\n11146\n11147\n11149\n11151\n11152\n11153\n11154\n11155\n11156\n11158\n11159\n11160\n11161\n11162\n11163\n11164\n11165\n11168\n11169\n11170\n11171\n11172\n11173\n11174\n11177\n11179\n11180\n11181\n11182\n11183\n11185\n11186\n11187\n11188\n11190\n11198\n11201\n11203\n11206\n11208\n11209\n11210\n11211\n11212\n11214\n11216\n11218\n11219\n11220\n11221\n11222\n11223\n11225\n11226\n11227\n11235\n11236\n11238\n11239\n11242\n11243\n11244\n11247\n11248\n11252\n11255\n11257\n11259\n11260\n11262\n11263\n11266\n11269\n11270\n11273\n11274\n11275\n11276\n11277\n11279\n11282\n11283\n11284\n11292\n11293\n11294\n11296\n11298\n11299\n11301\n11304\n11305\n11306\n11307\n11309\n11310\n11311\n11313\n11316\n11318\n11319\n11320\n11323\n11325\n11326\n11327\n11329\n11330\n11331\n11332\n11334\n11335\n11339\n11340\n11341\n11343\n11344\n11345\n11347\n11349\n11350\n11351\n11352\n11354\n11355\n11356\n11357\n11361\n11363\n11365\n11367\n11370\n11372\n11374\n11375\n11377\n11378\n11379\n11382\n11383\n11384\n11385\n11386\n11387\n11388\n11389\n11390\n11391\n11392\n11394\n11396\n11397\n11398\n11400\n11401\n11402\n11403\n11404\n11405\n11407\n11409\n11410\n11412\n11414\n11416\n11418\n11421\n11424\n11427\n11428\n11429\n11431\n11433\n11436\n11437\n11439\n11440\n11441\n11442\n11443\n11447\n11449\n11454\n11455\n11456\n11460\n11464\n11465\n11466\n11468\n11469\n11470\n11472\n11473\n11474\n11477\n11478\n11479\n11482\n11483\n11484\n11485\n11487\n11488\n11489\n11490\n11491\n11492\n11493\n11494\n11495\n11497\n11498\n11499\n11501\n11503\n11504\n11507\n11508\n11514\n11515\n11516\n11517\n11518\n11520\n11521\n11522\n11524\n11527\n11529\n11532\n11536\n11537\n11538\n11539\n11540\n11541\n11542\n11543\n11544\n11549\n11551\n11554\n11556\n11557\n11558\n11560\n11562\n11563\n11566\n11570\n11571\n11572\n11573\n11574\n11575\n11577\n11578\n11579\n11580\n11582\n11584\n11588\n11589\n11590\n11591\n11592\n11593\n11594\n11596\n11597\n11598\n11599\n11600\n11604\n11605\n11606\n11609\n11611\n11612\n11613\n11614\n11615\n11618\n11619\n11620\n11621\n11622\n11623\n11624\n11627\n11628\n11629\n11630\n11631\n11632\n11633\n11634\n11636\n11637\n11638\n11640\n11641\n11643\n11644\n11646\n11647\n11648\n11649\n11650\n11651\n11652\n11653\n11654\n11655\n11656\n11657\n11658\n11659\n11660\n11662\n11664\n11668\n11670\n11671\n11673\n11674\n11675\n11679\n11682\n11683\n11687\n11688\n11689\n11690\n11692\n11693\n11695\n11696\n11697\n11698\n11700\n11701\n11704\n11705\n11708\n11709\n11710\n11711\n11712\n11713\n11715\n11716\n11718\n11721\n11723\n11725\n11726\n11727\n11728\n11729\n11730\n11732\n11733\n11734\n11735\n11736\n11737\n11738\n11740\n11743\n11744\n11747\n11750\n11751\n11753\n11754\n11756\n11757\n11760\n11761\n11763\n11764\n11765\n11769\n11770\n11771\n11773\n11774\n11777\n11778\n11780\n11781\n11782\n11783\n11787\n11788\n11790\n11791\n11793\n11795\n11798\n11799\n11800\n11801\n11802\n11804\n11809\n11810\n11811\n11814\n11815\n11819\n11820\n11821\n11822\n11823\n11827\n11829\n11835\n11836\n11837\n11838\n11839\n11840\n11841\n11842\n11843\n11845\n11846\n11847\n11848\n11849\n11851\n11852\n11856\n11857\n11858\n11862\n11863\n11864\n11865\n11866\n11867\n11868\n11869\n11870\n11871\n11872\n11873\n11875\n11876\n11877\n11878\n11879\n11880\n11881\n11882\n11883\n11884\n11887\n11889\n11890\n11891\n11892\n11894\n11896\n11897\n11898\n11899\n11900\n11902\n11903\n11904\n11905\n11907\n11909\n11910\n11914\n11917\n11918\n11920\n11921\n11922\n11923\n11925\n11927\n11928\n11929\n11930\n11931\n11933\n11937\n11939\n11940\n11941\n11942\n11944\n11947\n11948\n11950\n11951\n11952\n11953\n11954\n11955\n11958\n11960\n11961\n11962\n11964\n11965\n11966\n11968\n11970\n11971\n11972\n11975\n11976\n11979\n11980\n11982\n11984\n11985\n11987\n11990\n11991\n11994\n11995\n11996\n12000\n12002\n12009\n12010\n12011\n12012\n12013\n12014\n12015\n12016\n12018\n12019\n12020\n12021\n12022\n12023\n12024\n12025\n12026\n12027\n12028\n12029\n12031\n12032\n12033\n12034\n12035\n12036\n12037\n12038\n12039\n12040\n12041\n12042\n12044\n12045\n12047\n12048\n12049\n12050\n12051\n12052\n12054\n12055\n12056\n12058\n12061\n12063\n12064\n12065\n12067\n12069\n12071\n12074\n12075\n12081\n12084\n12085\n12088\n12090\n12091\n12092\n12093\n12094\n12095\n12096\n12097\n12099\n12105\n12107\n12109\n12110\n12112\n12121\n12123\n12125\n12131\n12145\n12147\n12148\n12154\n12155\n12157\n12159\n12163\n12170\n12174\n12177\n12178\n12179\n12180\n12181\n12182\n12183\n12184\n12185\n12186\n12187\n12190\n12191\n12192\n12194\n12198\n12200\n12201\n12202\n12203\n12204\n12207\n12208\n12209\n12210\n12211\n12214\n12215\n12217\n12218\n12222\n12225\n12227\n12229\n12230\n12231\n12232\n12239\n12240\n12241\n12242\n12243\n12244\n12245\n12246\n12247\n12248\n12249\n12250\n12251\n12253\n12255\n12256\n12257\n12259\n12260\n12261\n12262\n12263\n12264\n12268\n12272\n12276\n12281\n12282\n12283\n12284\n12285\n12291\n12297\n12298\n12302\n12304\n12306\n12309\n12313\n12315\n12317\n12319\n12322\n12323\n12324\n12325\n12326\n12327\n12328\n12329\n12331\n12332\n12333\n12334\n12335\n12336\n12338\n12340\n12341\n12342\n12346\n12347\n12348\n12349\n12350\n12351\n12352\n12353\n12354\n12355\n12356\n12357\n12359\n12360\n12362\n12363\n12365\n12367\n12369\n12373\n12375\n12376\n12377\n12381\n12382\n12384\n12386\n12387\n12389\n12391\n12393\n12394\n12395\n12396\n12397\n12399\n12400\n12401\n12402\n12404\n12405\n12406\n12407\n12408\n12409\n12411\n12412\n12413\n12414\n12420\n12421\n12422\n12423\n12426\n12430\n12431\n12433\n12434\n12436\n12437\n12438\n12439\n12441\n12443\n12444\n12447\n12453\n12455\n12457\n12463\n12464\n12465\n12467\n12468\n12471\n12474\n12481\n12482\n12483\n12484\n12487\n12491\n12493\n12494\n12495\n12497\n12498\n12501\n12502\n12503\n12505\n12506\n12511\n12512\n12513\n12516\n12519\n12522\n12523\n12525\n12526\n12530\n12533\n12535\n12537\n12539\n12542\n12545\n12546\n12547\n12548\n12550\n12551\n12552\n12554\n12556\n12557\n12559\n12560\n12562\n12564\n12566\n12567\n12568\n12570\n12571\n12572\n12573\n12574\n12575\n12577\n12580\n12581\n12582\n12583\n12585\n12590\n12591\n12593\n12594\n12595\n12597\n12601\n12602\n12604\n12611\n12614\n12615\n12616\n12617\n12619\n12620\n12622\n12623\n12625\n12626\n12627\n12628\n12629\n12630\n12632\n12633\n12634\n12635\n12636\n12640\n12641\n12643\n12644\n12646\n12647\n12648\n12649\n12651\n12653\n12654\n12657\n12659\n12661\n12663\n12665\n12667\n12668\n12671\n12672\n12674\n12675\n12676\n12677\n12678\n12679\n12680\n12681\n12684\n12685\n12686\n12687\n12688\n12689\n12690\n12692\n12693\n12694\n12695\n12697\n12699\n12700\n12701\n12702\n12703\n12704\n12708\n12709\n12710\n12711\n12712\n12713\n12714\n12715\n12717\n12720\n12722\n12723\n12729\n12730\n12731\n12732\n12733\n12734\n12735\n12736\n12737\n12738\n12739\n12740\n12741\n12742\n12743\n12744\n12749\n12751\n12752\n12754\n12755\n12757\n12759\n12760\n12761\n12762\n12767\n12768\n12769\n12770\n12771\n12772\n12773\n12774\n12775\n12776\n12777\n12778\n12780\n12781\n12782\n12783\n12784\n12785\n12786\n12787\n12788\n12790\n12791\n12793\n12794\n12795\n12796\n12798\n12800\n12801\n12802\n12804\n12806\n12807\n12808\n12809\n12810\n12811\n12812\n12816\n12817\n12818\n12819\n12820\n12821\n12822\n12823\n12824\n12825\n12826\n12827\n12828\n12829\n12830\n12831\n12832\n12833\n12834\n12835\n12836\n12837\n12838\n12839\n12840\n12841\n12842\n12843\n12844\n12847\n12848\n12849\n12850\n12856\n12858\n12861\n12864\n12866\n12870\n12871\n12872\n12873\n12874\n12876\n12877\n12878\n12879\n12881\n12882\n12883\n12885\n12887\n12888\n12889\n12890\n12891\n12892\n12894\n12897\n12898\n12899\n12901\n12903\n12904\n12905\n12907\n12908\n12910\n12913\n12914\n12915\n12916\n12920\n12921\n12923\n12924\n12925\n12927\n12928\n12929\n12934\n12935\n12936\n12937\n12938\n12939\n12940\n12941\n12943\n12944\n12945\n12947\n12949\n12950\n12951\n12952\n12956\n12957\n12958\n12962\n12963\n12964\n12966\n12967\n12968\n12969\n12970\n12971\n12972\n12976\n12977\n12978\n12979\n12981\n12982\n12983\n12985\n12986\n12990\n12994\n12995\n12996\n12998\n13000\n13001\n13002\n13003\n13006\n13007\n13010\n13015\n13017\n13021\n13022\n13024\n13026\n13027\n13028\n13029\n13031\n13032\n13033\n13036\n13038\n13040\n13041\n13042\n13045\n13046\n13048\n13049\n13050\n13051\n13052\n13053\n13054\n13056\n13057\n13058\n13059\n13060\n13061\n13062\n13063\n13065\n13067\n13068\n13069\n13070\n13071\n13072\n13074\n13076\n13078\n13079\n13081\n13084\n13085\n13086\n13088\n13089\n13090\n13093\n13094\n13095\n13096\n13098\n13100\n13101\n13103\n13105\n13107\n13108\n13109\n13110\n13114\n13119\n13121\n13126\n13128\n13134\n13135\n13136\n13137\n13140\n13141\n13144\n13145\n13146\n13149\n13150\n13151\n13152\n13153\n13155\n13156\n13157\n13158\n13159\n13163\n13164\n13165\n13169\n13170\n13171\n13172\n13173\n13174\n13175\n13176\n13178\n13180\n13182\n13183\n13187\n13188\n13190\n13194\n13197\n13200\n13203\n13204\n13206\n13207\n13212\n13213\n13214\n13215\n13216\n13222\n13224\n13225\n13226\n13228\n13229\n13230\n13233\n13235\n13236\n13237\n13241\n13246\n13247\n13248\n13249\n13250\n13255\n13256\n13257\n13259\n13260\n13261\n13262\n13264\n13265\n13266\n13267\n13268\n13269\n13270\n13271\n13272\n13277\n13280\n13281\n13284\n13286\n13287\n13298\n13299\n13300\n13301\n13302\n13306\n13307\n13308\n13310\n13311\n13312\n13313\n13314\n13316\n13317\n13318\n13319\n13323\n13325\n13326\n13328\n13329\n13330\n13331\n13332\n13334\n13336\n13337\n13339\n13340\n13341\n13342\n13344\n13346\n13348\n13349\n13350\n13352\n13353\n13354\n13356\n13357\n13358\n13360\n13362\n13364\n13365\n13369\n13372\n13376\n13378\n13379\n13381\n13382\n13385\n13386\n13387\n13388\n13390\n13391\n13396\n13398\n13399\n13400\n13402\n13406\n13407\n13415\n13418\n13424\n13427\n13428\n13430\n13433\n13434\n13438\n13444\n13445\n13448\n13452\n13453\n13454\n13455\n13456\n13467\n13472\n13474\n13476\n13477\n13478\n13479\n13480\n13481\n13482\n13483\n13484\n13485\n13486\n13487\n13489\n13492\n13493\n13494\n13495\n13496\n13498\n13499\n13505\n13507\n13509\n13518\n13519\n13522\n13523\n13524\n13525\n13526\n13531\n13533\n13534\n13535\n13536\n13538\n13540\n13541\n13542\n13543\n13544\n13546\n13547\n13548\n13552\n13554\n13555\n13560\n13566\n13573\n13578\n13582\n13590\n13593\n13595\n13601\n13603\n13604\n13605\n13607\n13609\n13610\n13613\n13614\n13615\n13618\n13620\n13621\n13622\n13623\n13624\n13625\n13626\n13631\n13632\n13634\n13636\n13641\n13642\n13643\n13644\n13645\n13649\n13650\n13651\n13652\n13654\n13655\n13656\n13657\n13658\n13659\n13660\n13661\n13664\n13665\n13666\n13668\n13669\n13671\n13672\n13673\n13674\n13675\n13677\n13678\n13679\n13683\n13685\n13687\n13689\n13690\n13691\n13693\n13695\n13696\n13698\n13699\n13700\n13701\n13702\n13703\n13706\n13709\n13711\n13712\n13713\n13717\n13721\n13724\n13725\n13726\n13727\n13728\n13732\n13733\n13734\n13735\n13736\n13737\n13738\n13740\n13742\n13743\n13744\n13745\n13746\n13747\n13748\n13749\n13751\n13752\n13755\n13756\n13757\n13759\n13761\n13765\n13766\n13767\n13768\n13770\n13771\n13772\n13773\n13774\n13775\n13776\n13777\n13778\n13780\n13782\n13783\n13786\n13787\n13788\n13790\n13791\n13792\n13793\n13795\n13799\n13800\n13801\n13802\n13803\n13804\n13805\n13806\n13808\n13809\n13811\n13812\n13813\n13814\n13815\n13816\n13817\n13818\n13819\n13820\n13821\n13822\n13823\n13824\n13825\n13826\n13827\n13828\n13831\n13832\n13833\n13834\n13835\n13837\n13838\n13839\n13840\n13841\n13842\n13843\n13844\n13845\n13846\n13847\n13849\n13850\n13851\n13852\n13853\n13854\n13855\n13856\n13858\n13859\n13860\n13861\n13862\n13863\n13864\n13865\n13866\n13867\n13868\n13869\n13872\n13873\n13874\n13875\n13876\n13877\n13878\n13879\n13880\n13882\n13883\n13884\n13885\n13886\n13887\n13888\n13889\n13890\n13891\n13894\n13895\n13896\n13897\n13898\n13899\n13900\n13901\n13902\n13903\n13904\n13905\n13906\n13907\n13908\n13909\n13910\n13912\n13916\n13920\n13921\n13923\n13924\n13925\n13929\n13930\n13931\n13933\n13936\n13940\n13941\n13942\n13943\n13944\n13945\n13947\n13949\n13950\n13953\n13954\n13956\n13957\n13958\n13959\n13960\n13962\n13963\n13964\n13965\n13966\n13970\n13971\n13972\n13974\n13976\n13977\n13978\n13982\n13983\n13984\n13985\n13986\n13987\n13988\n13990\n13992\n13993\n13994\n13996\n13997\n13998\n14000\n14002\n14003\n14004\n14006\n14007\n14009\n14010\n14011\n14012\n14013\n14016\n14017\n14020\n14021\n14022\n14025\n14026\n14028\n14029\n14031\n14032\n14033\n14035\n14036\n14038\n14039\n14040\n14042\n14043\n14044\n14046\n14048\n14049\n14050\n14051\n14052\n14055\n14056\n14058\n14059\n14064\n14065\n14066\n14069\n14073\n14079\n14082\n14083\n14084\n14085\n14088\n14090\n14091\n14095\n14097\n14098\n14099\n14100\n14101\n14102\n14103\n14104\n14106\n14108\n14109\n14111\n14112\n14115\n14116\n14117\n14122\n14126\n14127\n14129\n14131\n14132\n14135\n14136\n14137\n14140\n14144\n14148\n14150\n14151\n14153\n14154\n14155\n14156\n14159\n14162\n14164\n14165\n14168\n14171\n14172\n14176\n14177\n14178\n14179\n14180\n14183\n14184\n14188\n14191\n14195\n14196\n14200\n14202\n14205\n14213\n14216\n14217\n14219\n14220\n14221\n14224\n14225\n14226\n14227\n14229\n14237\n14238\n14239\n14240\n14241\n14243\n14245\n14246\n14247\n14248\n14249\n14250\n14252\n14253\n14254\n14255\n14256\n14257\n14259\n14260\n14262\n14264\n14265\n14266\n14267\n14268\n14270\n14272\n14274\n14276\n14277\n14281\n14282\n14284\n14286\n14287\n14288\n14290\n14291\n14293\n14294\n14295\n14298\n14299\n14301\n14303\n14304\n14305\n14306\n14307\n14309\n14310\n14311\n14313\n14314\n14319\n14323\n14324\n14326\n14327\n14329\n14333\n14335\n14336\n14343\n14344\n14346\n14348\n14350\n14351\n14352\n14353\n14354\n14355\n14360\n14362\n14363\n14364\n14365\n14366\n14367\n14369\n14372\n14373\n14374\n14375\n14377\n14378\n14379\n14380\n14381\n14383\n14384\n14385\n14386\n14388\n14389\n14394\n14395\n14397\n14400\n14401\n14402\n14403\n14404\n14405\n14406\n14407\n14409\n14410\n14411\n14412\n14413\n14414\n14415\n14416\n14417\n14418\n14419\n14421\n14422\n14423\n14424\n14425\n14428\n14431\n14432\n14436\n14438\n14441\n14442\n14446\n14447\n14448\n14452\n14455\n14460\n14461\n14463\n14464\n14466\n14474\n14479\n14480\n14481\n14482\n14484\n14485\n14486\n14487\n14488\n14489\n14490\n14493\n14495\n14498\n14501\n14503\n14506\n14507\n14508\n14509\n14511\n14513\n14514\n14516\n14517\n14519\n14522\n14525\n14527\n14531\n14535\n14542\n14543\n14546\n14559\n14567\n14579\n14580\n14581\n14582\n14586\n14590\n14593\n14594\n14596\n14600\n14602\n14603\n14604\n14608\n14611\n14612\n14614\n14616\n14617\n14618\n14621\n14622\n14623\n14624\n14625\n14626\n14627\n14629\n14633\n14634\n14635\n14640\n14641\n14648\n14650\n14652\n14654\n14656\n14657\n14660\n14664\n14665\n14666\n14667\n14669\n14671\n14674\n14677\n14678\n14679\n14680\n14681\n14682\n14683\n14684\n14685\n14687\n14691\n14693\n14697\n14698\n14699\n14700\n14703\n14704\n14706\n14708\n14709\n14710\n14711\n14714\n14715\n14716\n14717\n14718\n14720\n14722\n14723\n14724\n14725\n14726\n14730\n14731\n14735\n14737\n14741\n14744\n14745\n14747\n14749\n14750\n14754\n14760\n14761\n14766\n14767\n14768\n14769\n14770\n14772\n14774\n14775\n14776\n14778\n14780\n14784\n14788\n14791\n14793\n14794\n14796\n14798\n14803\n14805\n14807\n14808\n14810\n14814\n14815\n14819\n14822\n14824\n14835\n14842\n14843\n14852\n14853\n14855\n14856\n14860\n14864\n14865\n14866\n14868\n14869\n14870\n14878\n14881\n14885\n14888\n14893\n14894\n14901\n14902\n14903\n14907\n14908\n14913\n14917\n14920\n14923\n14925\n14928\n14929\n14930\n14931\n14937\n14938\n14939\n14942\n14943\n14944\n14948\n14953\n14954\n14958\n14959\n14963\n14964\n14965\n14966\n14967\n14968\n14969\n14974\n14975\n14976\n14978\n14979\n14981\n14982\n14984\n14985\n14987\n14990\n14991\n14993\n14996\n14998\n14999\n15002\n15007\n15009\n15010\n15014\n15015\n15017\n15019\n15022\n15025\n15030\n15031\n15032\n15034\n15035\n15050\n15063\n15064\n15065\n15067\n15069\n15070\n15072\n15073\n15074\n15075\n15079\n15081\n15082\n15087\n15089\n15095\n15098\n15099\n15101\n15103\n15108\n15109\n15110\n15112\n15113\n15114\n15116\n15118\n15121\n15123\n15124\n15128\n15129\n15130\n15131\n15135\n15138\n15140\n15141\n15142\n15145\n15146\n15148\n15151\n15161\n15167\n15168\n15169\n15170\n15171\n15172\n15173\n15175\n15176\n15177\n15181\n15184\n15188\n15189\n15190\n15191\n15193\n15194\n15197\n15199\n15200\n15204\n15205\n15206\n15208\n15209\n15211\n15212\n15213\n15214\n15217\n15220\n15222\n15225\n15232\n15236\n15238\n15239\n15241\n15242\n15243\n15244\n15246\n15251\n15252\n15257\n15259\n15262\n15265\n15274\n15283\n15293\n15301\n15302\n15305\n15306\n15308\n15309\n15311\n15312\n15320\n15322\n15323\n15324\n15340\n15342\n15343\n15344\n15345\n15346\n15347\n15349\n15352\n15355\n15368\n15369\n15372\n15374\n15376\n15379\n15385\n15396\n15398\n15401\n15405\n15407\n15410\n15412\n15418\n15423\n15424\n15425\n15432\n15434\n15436\n15437\n15441\n15442\n15444\n15446\n15448\n15450\n15451\n15454\n15458\n15460\n15461\n15468\n15470\n15478\n15479\n15480\n15482\n15491\n15495\n15508\n15521\n15522\n15523\n15527\n15536\n15537\n15539\n15540\n15542\n15544\n15545\n15546\n15547\n15551\n15552\n15554\n15558\n15559\n15563\n15565\n15566\n15568\n15573\n15574\n15575\n15578\n15579\n15580\n15581\n15583\n15584\n15590\n15591\n15599\n15601\n15602\n15603\n15604\n15610\n15614\n15619\n15625\n15626\n15639\n15646\n15650\n15654\n15655\n15660\n15661\n15668\n15669\n15674\n15678\n15681\n15684\n15685\n15687\n15690\n15691\n15696\n15702\n15705\n15706\n15707\n15709\n15711\n15713\n15716\n15717\n15718\n15719\n15720\n15721\n15722\n15723\n15724\n15725\n15726\n15728\n15729\n15731\n15732\n15737\n15739\n15744\n15747\n15749\n15750\n15757\n15761\n15764\n15766\n15768\n15775\n15776\n15777\n15780\n15783\n15787\n15788\n15799\n15802\n15804\n15805\n15809\n15814\n15823\n15826\n15829\n15830\n15831\n15833\n15838\n15839\n15842\n15843\n15847\n15850\n15852\n15857\n15861\n15865\n15867\n15869\n15872\n15874\n15880\n15881\n15883\n15884\n15885\n15889\n15893\n15894\n15900\n15901\n15912\n15914\n15919\n15922\n15923\n15924\n15931\n15936\n15938\n15939\n15941\n15942\n15945\n15946\n15947\n15949\n15959\n15960\n15961\n15962\n15964\n15977\n15978\n15983\n15984\n15991\n15994\n15995\n15996\n15998\n16002\n16003\n16004\n16005\n16007\n16008\n16011\n16014\n16015\n16016\n16019\n16020\n16028\n16029\n16039\n16041\n16044\n16046\n16051\n16056\n16060\n16061\n16062\n16071\n16072\n16074\n16079\n16082\n16085\n16092\n16093\n16094\n16096\n16097\n16100\n16101\n16103\n16107\n16108\n16109\n16116\n16117\n16119\n16121\n16129\n16133\n16140\n16141\n16143\n16144\n16146\n16147\n16149\n16150\n16152\n16155\n16156\n16158\n16159\n16163\n16165\n16169\n16172\n16174\n16177\n16178\n16179\n16187\n16192\n16193\n16198\n16199\n16200\n16201\n16214\n16215\n16217\n16219\n16225\n16227\n16236\n16241\n16243\n16244\n16251\n16256\n16258\n16259\n16262\n16264\n16271\n16274\n16280\n16281\n16284\n16288\n16289\n16290\n16291\n16292\n16296\n16302\n16307\n16312\n16313\n16314\n16315\n16317\n16318\n16319\n16322\n16328\n16334\n16335\n16341\n16342\n16343\n16347\n16348\n16349\n16357\n16360\n16363\n16364\n16373\n16375\n16376\n16379\n16380\n16384\n16388\n16389\n16390\n16391\n16392\n16393\n16397\n16400\n16405\n16414\n16416\n16418\n16421\n16423\n16424\n16425\n16426\n16432\n16435\n16436\n16437\n16444\n16447\n16455\n16456\n16459\n16460\n16463\n16474\n16476\n16477\n16478\n16481\n16494\n16498\n16503\n16504\n16508\n16509\n16512\n16513\n16517\n16522\n16526\n16529\n16530\n16540\n16542\n16545\n16547\n16551\n16552\n16555\n16560\n16568\n16576\n16584\n16585\n16587\n16588\n16591\n16592\n16593\n16599\n16603\n16605\n16610\n16616\n16618\n16619\n16622\n16626\n16630\n16635\n16648\n16649\n16651\n16653\n16654\n16655\n16659\n16660\n16665\n16668\n16669\n16670\n16672\n16680\n16689\n16693\n16694\n16695\n16704\n16705\n16708\n16713\n16726\n16727\n16729\n16730\n16735\n16736\n16739\n16740\n16744\n16745\n16750\n16752\n16755\n16759\n16760\n16763\n16764\n16765\n16774\n16777\n16779\n16783\n16787\n16789\n16790\n16791\n16792\n16800\n16801\n16803\n16805\n16812\n16813\n16817\n16818\n16825\n16829\n16833\n16834\n16836\n16838\n16841\n16844\n16845\n16848\n16849\n16850\n16851\n16855\n16865\n16873\n16874\n16878\n16879\n16880\n16881\n16882\n16885\n16887\n16890\n16892\n16894\n16898\n16900\n16902\n16905\n16907\n16908\n16911\n16916\n16917\n16920\n16928\n16931\n16940\n16948\n16957\n16962\n16963\n16965\n16967\n16970\n16972\n16973\n16975\n16977\n16981\n16982\n16988\n16989\n16993\n16994\n16998\n17003\n17004\n17007\n17008\n17010\n17013\n17017\n17021\n17022\n17026\n17029\n17032\n17037\n17038\n17039\n17040\n17052\n17053\n17057\n17063\n17081\n17087\n17094\n17107\n17110\n17111\n17114\n17118\n17121\n17126\n17128\n17133\n17135\n17141\n17144\n17146\n17155\n17156\n17158\n17159\n17165\n17167\n17169\n17171\n17178\n17180\n17183\n17186\n17188\n17192\n17193\n17203\n17208\n17210\n17211\n17217\n17228\n17231\n17248\n17249\n17253\n17254\n17255\n17264\n17268\n17269\n17272\n17274\n17275\n17280\n17286\n17290\n17297\n17298\n17299\n17301\n17305\n17306\n17309\n17310\n17312\n17313\n17314\n17316\n17317\n17319\n17326\n17329\n17330\n17338\n17339\n17340\n17341\n17342\n17345\n17346\n17348\n17350\n17351\n17353\n17354\n17355\n17356\n17359\n17360\n17361\n17362\n17363\n17364\n17366\n17367\n17369\n17370\n17373\n17374\n17375\n17376\n17377\n17379\n17380\n17382\n17384\n17385\n17387\n17388\n17389\n17392\n17393\n17394\n17395\n17396\n17400\n17401\n17402\n17403\n17404\n17405\n17406\n17407\n17408\n17409\n17410\n17411\n17412\n17413\n17414\n17415\n17416\n17417\n17419\n17420\n17422\n17423\n17424\n17425\n17426\n17427\n17429\n17430\n17431\n17432\n17435\n17440\n17442\n17444\n17447\n17448\n17449\n17450\n17451\n17452\n17455\n17456\n17457\n17458\n17459\n17460\n17463\n17464\n17465\n17466\n17468\n17470\n17471\n17482\n17486\n17492\n17493\n17498\n17499\n17503\n17504\n17508\n17509\n17510\n17512\n17516\n17517\n17518\n17520\n17521\n17522\n17523\n17526\n17529\n17534\n17539\n17540\n17551\n17560\n17565\n17566\n17567\n17568\n17569\n17570\n17571\n17574\n17578\n17579\n17584\n17586\n17588\n17591\n17594\n17595\n17596\n17598\n17599\n17601\n17603\n17605\n17606\n17610\n17611\n17612\n17613\n17615\n17618\n17619\n17621\n17625\n17626\n17627\n17630\n17633\n17634\n17635\n17636\n17637\n17641\n17643\n17644\n17646\n17648\n17650\n17651\n17652\n17654\n17655\n17657\n17659\n17660\n17661\n17662\n17663\n17664\n17666\n17672\n17676\n17678\n17679\n17680\n17681\n17684\n17688\n17689\n17692\n17711\n17721\n17722\n17723\n17725\n17726\n17727\n17729\n17730\n17733\n17734\n17735\n17736\n17737\n17738\n17739\n17740\n17742\n17743\n17744\n17745\n17748\n17762\n17763\n17764\n17765\n17770\n17771\n17774\n17775\n17780\n17782\n17783\n17784\n17786\n17787\n17789\n17792\n17793\n17794\n17796\n17798\n17799\n17803\n17804\n17806\n17811\n17812\n17813\n17814\n17815\n17817\n17818\n17820\n17821\n17826\n17835\n17839\n17841\n17842\n17845\n17846\n17847\n17850\n17852\n17853\n17855\n17856\n17857\n17858\n17859\n17860\n17861\n17862\n17863\n17864\n17866\n17868\n17869\n17870\n17871\n17872\n17873\n17874\n17876\n17877\n17878\n17879\n17880\n17881\n17882\n17885\n17886\n17888\n17889\n17890\n17891\n17895\n17897\n17900\n17902\n17905\n17914\n17918\n17919\n17925\n17929\n17934\n17936\n17938\n17939\n17940\n17946\n17947\n17948\n17949\n17950\n17953\n17954\n17955\n17958\n17959\n17960\n17961\n17963\n17964\n17965\n17966\n17970\n17971\n17972\n17973\n17975\n17976\n17979\n17980\n17983\n17984\n17985\n17987\n17990\n17991\n17992\n17999\n18000\n18005\n18007\n18008\n18012\n18017\n18020\n18021\n18022\n18023\n18024\n18025\n18026\n18027\n18028\n18030\n18032\n18035\n18036\n18037\n18038\n18039\n18041\n18045\n18050\n18051\n18057\n18059\n18061\n18063\n18064\n18066\n18067\n18068\n18070\n18071\n18072\n18073\n18077\n18078\n18081\n18085\n18086\n18088\n18089\n18090\n18091\n18092\n18093\n18094\n18096\n18097\n18099\n18100\n18103\n18104\n18106\n18107\n18108\n18110\n18111\n18113\n18114\n18115\n18116\n18117\n18120\n18121\n18123\n18125\n18126\n18130\n18136\n18137\n18138\n18140\n18141\n18142\n18144\n18145\n18146\n18148\n18150\n18151\n18152\n18160\n18161\n18167\n18168\n18173\n18174\n18177\n18178\n18204\n18205\n18206\n18207\n18209\n18213\n18216\n18217\n18218\n18219\n18221\n18222\n18223\n18224\n18227\n18228\n18230\n18231\n18233\n18235\n18236\n18237\n18239\n18240\n18241\n18243\n18246\n18247\n18249\n18250\n18251\n18252\n18254\n18257\n18259\n18260\n18261\n18264\n18265\n18269\n18270\n18271\n18273\n18274\n18275\n18276\n18277\n18280\n18283\n18284\n18286\n18287\n18289\n18290\n18296\n18297\n18298\n18299\n18303\n18304\n18305\n18306\n18307\n18308\n18309\n18311\n18322\n18325\n18326\n18327\n18328\n18331\n18332\n18333\n18334\n18335\n18336\n18338\n18343\n18344\n18349\n18351\n18355\n18357\n18358\n18359\n18360\n18361\n18364\n18368\n18373\n18381\n18386\n18388\n18391\n18392\n18394\n18396\n18398\n18399\n18400\n18401\n18403\n18404\n18405\n18407\n18412\n18413\n18419\n18422\n18423\n18424\n18426\n18441\n18442\n18443\n18444\n18445\n18446\n18447\n18448\n18451\n18452\n18453\n18455\n18456\n18459\n18460\n18461\n18462\n18463\n18466\n18469\n18470\n18473\n18478\n18479\n18480\n18482\n18486\n18487\n18488\n18489\n18491\n18492\n18494\n18495\n18496\n18497\n18498\n18499\n18501\n18506\n18507\n18508\n18509\n18510\n18511\n18512\n18514\n18515\n18516\n18517\n18518\n18522\n18523\n18524\n18525\n18529\n18531\n18532\n18533\n18535\n18536\n18537\n18539\n18540\n18541\n18542\n18543\n18546\n18547\n18549\n18550\n18551\n18553\n18554\n18555\n18556\n18558\n18559\n18560\n18562\n18565\n18570\n18573\n18574\n18575\n18576\n18577\n18578\n18581\n18583\n18585\n18586\n18587\n18588\n18589\n18590\n18591\n18592\n18593\n18594\n18595\n18597\n18598\n18601\n18603\n18604\n18606\n18607\n18610\n18611\n18612\n18613\n18615\n18616\n18617\n18618\n18619\n18620\n18621\n18622\n18623\n18624\n18631\n18632\n18633\n18634\n18636\n18637\n18638\n18641\n18642\n18646\n18647\n18651\n18657\n18659\n18660\n18662\n18664\n18665\n18669\n18670\n18671\n18672\n18680\n18681\n18684\n18685\n18687\n18691\n18694\n18696\n18697\n18698\n18703\n18705\n18715\n18723\n18725\n18728\n18730\n18731\n18735\n18738\n18740\n18741\n18744\n18753\n18754\n18755\n18759\n18763\n18766\n18767\n18775\n18780\n18781\n18782\n18784\n18791\n18792\n18795\n18796\n18798\n18801\n18802\n18805\n18808\n18809\n18816\n18817\n18822\n18825\n18826\n18828\n18829\n18834\n18835\n18836\n18837\n18841\n18842\n18843\n18847\n18848\n18849\n18850\n18852\n18853\n18854\n18855\n18857\n18858\n18859\n18860\n18863\n18864\n18865\n18866\n18867\n18868\n18869\n18870\n18871\n18872\n18873\n18874\n18885\n18886\n18888\n18890\n18900\n18902\n18906\n18908\n18911\n18913\n18914\n18915\n18921\n18924\n18927\n18928\n18929\n18933\n18937\n18939\n18942\n18943\n18945\n18946\n18948\n18949\n18950\n18953\n18954\n18955\n18957\n18962\n18979\n18980\n18981\n18984\n18986\n18988\n18989\n18996\n19012\n19013\n19016\n19021\n19027\n19029\n19031\n19033\n19034\n19035\n19039\n19052\n19056\n19069\n19070\n19073\n19075\n19077\n19078\n19079\n19080\n19082\n19083\n19086\n19087\n19088\n19092\n19093\n19096\n19100\n19102\n19104\n19105\n19106\n19108\n19111\n19112\n19116\n19117\n19120\n19121\n19122\n19124\n19125\n19127\n19129\n19130\n19133\n19134\n19135\n19137\n19138\n19139\n19140\n19142\n19143\n19144\n19145\n19146\n19147\n19150\n19151\n19152\n19154\n19155\n19156\n19157\n19158\n19159\n19160\n19161\n19163\n19165\n19166\n19169\n19172\n19175\n19176\n19179\n19180\n19181\n19183\n19185\n19187\n19190\n19193\n19194\n19195\n19197\n19198\n19199\n19201\n19202\n19204\n19213\n19214\n19215\n19218\n19219\n19220\n19221\n19222\n19223\n19226\n19227\n19228\n19229\n19230\n19232\n19233\n19234\n19236\n19238\n19239\n19240\n19244\n19252\n19254\n19257\n19258\n19259\n19260\n19261\n19262\n19264\n19265\n19266\n19267\n19270\n19271\n19275\n19276\n19277\n19284\n19289\n19290\n19292\n19293\n19295\n19296\n19297\n19300\n19302\n19304\n19305\n19306\n19307\n19310\n19311\n19316\n19317\n19318\n19320\n19321\n19324\n19325\n19330\n19331\n19332\n19333\n19336\n19337\n19339\n19342\n19343\n19344\n19345\n19347\n19348\n19349\n19350\n19351\n19352\n19354\n19356\n19360\n19363\n19364\n19365\n19366\n19367\n19369\n19371\n19373\n19374\n19377\n19379\n19381\n19382\n19385\n19386\n19388\n19402\n19403\n19404\n19405\n19407\n19414\n19417\n19419\n19421\n19423\n19434\n19435\n19436\n19437\n19440\n19441\n19442\n19446\n19447\n19448\n19450\n19451\n19452\n19453\n19456\n19457\n19459\n19461\n19471\n19472\n19473\n19477\n19478\n19481\n19483\n19484\n19485\n19486\n19490\n19492\n19494\n19495\n19496\n19497\n19498\n19501\n19502\n19503\n19506\n19508\n19509\n19511\n19513\n19514\n19516\n19517\n19518\n19519\n19520\n19521\n19522\n19524\n19525\n19526\n19527\n19528\n19530\n19532\n19538\n19539\n19541\n19542\n19543\n19546\n19547\n19548\n19549\n19550\n19551\n19553\n19555\n19556\n19557\n19563\n19566\n19567\n19568\n19575\n19577\n19578\n19582\n19583\n19584\n19587\n19588\n19590\n19592\n19597\n19606\n19607\n19608\n19611\n19613\n19614\n19615\n19616\n19618\n19622\n19624\n19627\n19628\n19629\n19630\n19632\n19633\n19634\n19635\n19638\n19640\n19641\n19642\n19643\n19644\n19648\n19649\n19655\n19656\n19663\n19667\n19668\n19669\n19670\n19672\n19673\n19675\n19676\n19677\n19678\n19679\n19681\n19682\n19683\n19684\n19687\n19688\n19689\n19690\n19692\n19693\n19694\n19696\n19697\n19698\n19699\n19705\n19707\n19709\n19710\n19714\n19717\n19718\n19720\n19721\n19722\n19724\n19728\n19732\n19733\n19734\n19737\n19738\n19741\n19745\n19746\n19756\n19757\n19758\n19759\n19763\n19764\n19767\n19769\n19770\n19773\n19774\n19777\n19782\n19792\n19795\n19797\n19798\n19801\n19802\n19805\n19813\n19817\n19819\n19831\n19833\n19835\n19837\n19838\n19839\n19840\n19841\n19845\n19852\n19858\n19861\n19862\n19863\n19864\n19865\n19866\n19867\n19871\n19872\n19873\n19874\n19879\n19882\n19887\n19889\n19894\n19895\n19896\n19897\n19902\n19905\n19907\n19908\n19909\n19910\n19914\n19916\n19917\n19918\n19922\n19924\n19926\n19927\n19928\n19929\n19930\n19933\n19935\n19938\n19943\n19945\n19946\n19947\n19952\n19953\n19956\n19957\n19964\n19967\n19968\n19969\n19970\n19971\n19972\n19973\n19974\n19975\n19976\n19978\n19985\n19988\n19990\n19992\n19997\n19998\n19999\n20001\n20008\n20010\n20018\n20019\n20020\n20021\n20024\n20025\n20026\n20029\n20030\n20032\n20037\n20042\n20043\n20044\n20045\n20047\n20049\n20050\n20053\n20054\n20055\n20056\n20057\n20058\n20061\n20062\n20063\n20064\n20066\n20068\n20070\n20071\n20074\n20081\n20082\n20084\n20086\n20087\n20092\n20093\n20097\n20098\n20100\n20102\n20103\n20104\n20108\n20110\n20111\n20115\n20116\n20117\n20118\n20122\n20123\n20126\n20127\n20128\n20129\n20140\n20141\n20143\n20145\n20147\n20148\n20149\n20153\n20154\n20155\n20162\n20163\n20165\n20171\n20172\n20174\n20178\n20185\n20187\n20192\n20193\n20195\n20198\n20199\n20207\n20209\n20211\n20214\n20215\n20216\n20218\n20219\n20220\n20221\n20222\n20223\n20224\n20225\n20226\n20228\n20230\n20231\n20232\n20233\n20236\n20239\n20240\n20248\n20250\n20251\n20257\n20262\n20265\n20266\n20267\n20269\n20271\n20278\n20280\n20281\n20282\n20283\n20284\n20286\n20287\n20288\n20289\n20290\n20291\n20295\n20296\n20297\n20300\n20303\n20305\n20311\n20312\n20317\n20318\n20319\n20325\n20326\n20327\n20328\n20330\n20333\n20335\n20337\n20347\n20348\n20349\n20355\n20356\n20357\n20358\n20360\n20362\n20363\n20364\n20365\n20366\n20367\n20368\n20373\n20374\n20376\n20379\n20381\n20385\n20386\n20390\n20392\n20394\n20397\n20398\n20399\n20400\n20405\n20406\n20407\n20408\n20409\n20410\n20413\n20414\n20415\n20416\n20417\n20418\n20420\n20422\n20424\n20425\n20427\n20437\n20441\n20444\n20446\n20449\n20452\n20456\n20457\n20460\n20462\n20471\n20472\n20473\n20475\n20476\n20478\n20480\n20481\n20485\n20489\n20492\n20493\n20494\n20495\n20498\n20499\n20500\n20504\n20505\n20506\n20511\n20512\n20515\n20516\n20517\n20518\n20519\n20520\n20523\n20524\n20526\n20528\n20529\n20530\n20534\n20538\n20542\n20543\n20546\n20549\n20550\n20552\n20553\n20554\n20555\n20557\n20558\n20562\n20563\n20566\n20568\n20569\n20572\n20575\n20577\n20578\n20581\n20583\n20586\n20589\n20590\n20591\n20592\n20594\n20595\n20596\n20597\n20598\n20601\n20603\n20604\n20606\n20607\n20609\n20613\n20614\n20617\n20618\n20619\n20621\n20622\n20624\n20626\n20628\n20629\n20630\n20631\n20635\n20639\n20641\n20642\n20643\n20644\n20651\n20652\n20654\n20656\n20660\n20661\n20662\n20663\n20664\n20665\n20666\n20667\n20668\n20669\n20670\n20672\n20679\n20680\n20682\n20684\n20687\n20688\n20689\n20690\n20692\n20693\n20694\n20695\n20696\n20701\n20702\n20704\n20705\n20706\n20707\n20710\n20711\n20712\n20713\n20714\n20715\n20716\n20717\n20719\n20720\n20724\n20725\n20727\n20728\n20732\n20733\n20734\n20738\n20742\n20749\n20754\n20755\n20756\n20757\n20763\n20764\n20767\n20768\n20777\n20783\n20786\n20787\n20788\n20789\n20790\n20791\n20792\n20793\n20794\n20797\n20798\n20800\n20806\n20808\n20809\n20812\n20814\n20819\n20823\n20824\n20825\n20830\n20831\n20833\n20836\n20837\n20839\n20840\n20843\n20847\n20848\n20850\n20851\n20852\n20853\n20854\n20855\n20859\n20860\n20861\n20863\n20866\n20870\n20873\n20876\n20877\n20880\n20881\n20882\n20884\n20886\n20890\n20895\n20897\n20898\n20900\n20902\n20903\n20904\n20906\n20908\n20911\n20913\n20917\n20919\n20920\n20921\n20926\n20927\n20936\n20937\n20938\n20940\n20941\n20942\n20943\n20946\n20947\n20949\n20950\n20955\n20956\n20959\n20963\n20968\n20979\n20980\n20981\n20982\n20985\n20988\n20989\n20998\n21001\n21002\n21007\n21009\n21012\n21014\n21017\n21024\n21025\n21026\n21027\n21028\n21031\n21035\n21037\n21038\n21039\n21040\n21042\n21043\n21044\n21045\n21046\n21047\n21048\n21049\n21050\n21051\n21053\n21054\n21055\n21056\n21057\n21058\n21059\n21063\n21064\n21065\n21066\n21067\n21068\n21069\n21072\n21076\n21078\n21083\n21084\n21086\n21087\n21088\n21092\n21094\n21100\n21101\n21112\n21113\n21115\n21119\n21121\n21122\n21124\n21125\n21128\n21133\n21136\n21138\n21145\n21146\n21147\n21148\n21149\n21151\n21153\n21156\n21158\n21159\n21163\n21166\n21169\n21170\n21171\n21172\n21174\n21179\n21180\n21181\n21186\n21189\n21190\n21196\n21197\n21198\n21199\n21200\n21201\n21202\n21204\n21219\n21221\n21224\n21229\n21232\n21238\n21252\n21257\n21268\n21269\n21271\n21279\n21281\n21282\n21284\n21286\n21287\n21297\n21303\n21305\n21307\n21311\n21312\n21313\n21315\n21316\n21317\n21318\n21319\n21321\n21327\n21336\n21337\n21339\n21343\n21345\n21349\n21352\n21356\n21357\n21358\n21361\n21363\n21369\n21370\n21374\n21376\n21377\n21378\n21382\n21386\n21388\n21389\n21397\n21398\n21402\n21407\n21408\n21411\n21414\n21415\n21419\n21425\n21428\n21429\n21431\n21432\n21433\n21438\n21441\n21451\n21459\n21464\n21467\n21469\n21476\n21479\n21484\n21485\n21486\n21489\n21494\n21495\n21497\n21501\n21502\n21507\n21511\n21515\n21516\n21517\n21519\n21522\n21524\n21528\n21529\n21532\n21533\n21534\n21537\n21541\n21545\n21547\n21548\n21549\n21550\n21554\n21560\n21563\n21569\n21573\n21576\n21578\n21579\n21580\n21581\n21585\n21589\n21590\n21591\n21598\n21601\n21604\n21606\n21611\n21615\n21618\n21620\n21623\n21625\n21627\n21635\n21637\n21638\n21641\n21644\n21645\n21648\n21649\n21650\n21659\n21661\n21662\n21663\n21665\n21666\n21668\n21669\n21672\n21673\n21675\n21678\n21679\n21680\n21686\n21688\n21689\n21690\n21692\n21702\n21711\n21712\n21713\n21716\n21717\n21721\n21722\n21723\n21724\n21727\n21728\n21729\n21734\n21739\n21740\n21741\n21743\n21744\n21747\n21748\n21749\n21754\n21757\n21758\n21760\n21761\n21762\n21763\n21765\n21772\n21773\n21774\n21777\n21778\n21781\n21782\n21784\n21786\n21791\n21792\n21795\n21796\n21800\n21801\n21803\n21804\n21806\n21811\n21815\n21816\n21817\n21818\n21819\n21823\n21826\n21827\n21828\n21829\n21830\n21831\n21832\n21834\n21836\n21838\n21839\n21841\n21842\n"
  },
  {
    "path": "timm/data/_info/imagenet21k_goog_to_22k_indices.txt",
    "content": "0\n1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n25\n26\n27\n28\n29\n30\n31\n32\n33\n34\n35\n36\n37\n38\n39\n40\n41\n42\n43\n44\n45\n46\n47\n48\n49\n50\n51\n52\n53\n54\n55\n56\n57\n58\n59\n60\n61\n62\n63\n64\n65\n66\n67\n68\n69\n70\n71\n72\n73\n74\n75\n76\n77\n78\n79\n80\n81\n82\n83\n84\n85\n86\n87\n88\n89\n90\n91\n92\n93\n94\n95\n96\n97\n98\n99\n100\n101\n102\n103\n104\n105\n106\n107\n108\n109\n110\n111\n112\n113\n114\n115\n116\n117\n118\n119\n120\n121\n122\n123\n124\n125\n126\n127\n128\n129\n130\n131\n132\n133\n134\n135\n136\n137\n138\n139\n140\n141\n142\n143\n144\n145\n146\n147\n148\n149\n150\n151\n152\n153\n154\n155\n156\n157\n158\n159\n160\n161\n162\n163\n164\n165\n166\n167\n168\n169\n170\n171\n172\n173\n174\n175\n176\n177\n178\n179\n180\n181\n182\n183\n184\n185\n186\n187\n188\n189\n190\n191\n192\n193\n194\n195\n196\n197\n198\n199\n200\n201\n202\n203\n204\n205\n206\n207\n208\n209\n210\n211\n212\n213\n214\n215\n216\n217\n218\n219\n220\n221\n222\n223\n224\n225\n226\n227\n228\n229\n230\n231\n232\n233\n234\n235\n236\n237\n238\n239\n240\n241\n242\n243\n244\n245\n246\n247\n248\n249\n250\n251\n252\n253\n254\n255\n256\n257\n258\n259\n260\n261\n262\n263\n264\n265\n266\n267\n268\n269\n270\n271\n272\n273\n274\n275\n276\n277\n278\n279\n280\n281\n282\n283\n284\n285\n286\n287\n288\n289\n290\n291\n292\n293\n294\n295\n296\n297\n298\n299\n300\n301\n302\n303\n304\n305\n306\n307\n308\n309\n310\n311\n312\n313\n314\n315\n316\n317\n318\n319\n320\n321\n322\n323\n324\n325\n326\n327\n328\n329\n330\n331\n332\n333\n334\n335\n336\n337\n338\n339\n340\n341\n342\n343\n344\n345\n346\n347\n348\n349\n350\n351\n352\n353\n354\n355\n356\n357\n358\n359\n360\n361\n362\n363\n364\n365\n366\n367\n368\n369\n370\n371\n372\n373\n374\n375\n376\n377\n378\n379\n380\n381\n382\n383\n384\n385\n386\n387\n388\n389\n390\n391\n392\n393\n394\n395\n396\n397\n398\n399\n400\n401\n402\n403\n404\n405\n406\n407\n408\n409\n410\n411\n412\n413\n414\n415\n416\n417\n418\n419\n420\n421\n422\n423\n424\n425\n426\n427\n428\n429\n430\n431\n432\n433\n434\n435\n436\n437\n438\n439\n440\n441\n442\n443\n444\n445\n446\n447\n448\n449\n450\n451\n452\n453\n454\n455\n456\n457\n458\n459\n460\n461\n462\n463\n464\n465\n466\n467\n468\n469\n470\n471\n472\n473\n474\n475\n476\n477\n478\n479\n480\n481\n482\n483\n484\n485\n486\n487\n488\n489\n490\n491\n492\n493\n494\n495\n496\n497\n498\n499\n500\n501\n502\n503\n504\n505\n506\n507\n508\n509\n510\n511\n512\n513\n514\n515\n516\n517\n518\n519\n520\n521\n522\n523\n524\n525\n526\n527\n528\n529\n530\n531\n532\n533\n534\n535\n536\n537\n538\n539\n540\n541\n542\n543\n544\n545\n546\n547\n548\n549\n550\n551\n552\n553\n554\n555\n556\n557\n558\n559\n560\n561\n562\n563\n564\n565\n566\n567\n568\n569\n570\n571\n572\n573\n574\n575\n576\n577\n578\n579\n580\n581\n582\n583\n584\n585\n586\n587\n588\n589\n590\n591\n592\n593\n594\n595\n596\n597\n598\n599\n600\n601\n602\n603\n604\n605\n606\n607\n608\n609\n610\n611\n612\n613\n614\n615\n616\n617\n618\n619\n620\n621\n622\n623\n624\n625\n626\n627\n628\n629\n630\n631\n632\n633\n634\n635\n636\n637\n638\n639\n640\n641\n642\n643\n644\n645\n646\n647\n648\n649\n650\n651\n652\n653\n654\n655\n656\n657\n658\n659\n660\n661\n662\n663\n664\n665\n666\n667\n668\n669\n670\n671\n672\n673\n674\n675\n676\n677\n678\n679\n680\n681\n682\n683\n684\n685\n686\n687\n688\n689\n690\n691\n692\n693\n694\n695\n696\n697\n698\n699\n700\n701\n702\n703\n704\n705\n706\n707\n708\n709\n710\n711\n712\n713\n714\n715\n716\n717\n718\n719\n720\n721\n722\n723\n724\n725\n726\n727\n728\n729\n730\n731\n732\n733\n734\n735\n736\n737\n738\n739\n740\n741\n742\n743\n744\n745\n746\n747\n748\n749\n750\n751\n752\n753\n754\n755\n756\n757\n758\n759\n760\n761\n762\n763\n764\n765\n766\n767\n768\n769\n770\n771\n772\n773\n774\n775\n776\n777\n778\n779\n780\n781\n782\n783\n784\n785\n786\n787\n788\n789\n790\n791\n792\n793\n794\n795\n796\n797\n798\n799\n800\n801\n802\n803\n804\n805\n806\n807\n808\n809\n810\n811\n812\n813\n814\n815\n816\n817\n818\n819\n820\n821\n822\n823\n824\n825\n826\n827\n828\n829\n830\n831\n832\n833\n834\n835\n836\n837\n838\n839\n840\n841\n842\n843\n844\n845\n846\n847\n848\n849\n850\n851\n852\n853\n854\n855\n856\n857\n858\n859\n860\n861\n862\n863\n864\n865\n866\n867\n868\n869\n870\n871\n872\n873\n874\n875\n876\n877\n878\n879\n880\n881\n882\n883\n884\n885\n886\n887\n888\n889\n890\n891\n892\n893\n894\n895\n896\n897\n898\n899\n900\n901\n902\n903\n904\n905\n906\n907\n908\n909\n910\n911\n912\n913\n914\n915\n916\n917\n918\n919\n920\n921\n922\n923\n924\n925\n926\n927\n928\n929\n930\n931\n932\n933\n934\n935\n936\n937\n938\n939\n940\n941\n942\n943\n944\n945\n946\n947\n948\n949\n950\n951\n952\n953\n954\n955\n956\n957\n958\n959\n960\n961\n962\n963\n964\n965\n966\n967\n968\n969\n970\n971\n972\n973\n974\n975\n976\n977\n978\n979\n980\n981\n982\n983\n984\n985\n986\n987\n988\n989\n990\n991\n992\n993\n994\n995\n996\n997\n998\n999\n1000\n1001\n1002\n1003\n1004\n1005\n1006\n1007\n1008\n1009\n1010\n1011\n1012\n1013\n1014\n1015\n1016\n1017\n1018\n1019\n1020\n1021\n1022\n1023\n1024\n1025\n1026\n1027\n1028\n1029\n1030\n1031\n1032\n1033\n1034\n1035\n1036\n1037\n1038\n1039\n1040\n1041\n1042\n1043\n1044\n1045\n1046\n1047\n1048\n1049\n1050\n1051\n1052\n1053\n1054\n1055\n1056\n1057\n1058\n1059\n1060\n1061\n1062\n1063\n1064\n1065\n1066\n1067\n1068\n1069\n1070\n1071\n1072\n1073\n1074\n1075\n1076\n1077\n1078\n1079\n1080\n1081\n1082\n1083\n1084\n1085\n1086\n1087\n1088\n1089\n1090\n1091\n1092\n1093\n1094\n1095\n1096\n1097\n1098\n1099\n1100\n1101\n1102\n1103\n1104\n1105\n1106\n1107\n1108\n1109\n1110\n1111\n1112\n1113\n1114\n1115\n1116\n1117\n1118\n1119\n1120\n1121\n1122\n1123\n1124\n1125\n1126\n1127\n1128\n1129\n1130\n1131\n1132\n1133\n1134\n1135\n1136\n1137\n1138\n1139\n1140\n1141\n1142\n1143\n1144\n1145\n1146\n1147\n1148\n1149\n1150\n1151\n1152\n1153\n1154\n1155\n1156\n1157\n1158\n1159\n1160\n1161\n1162\n1163\n1164\n1165\n1166\n1167\n1168\n1169\n1170\n1171\n1172\n1173\n1174\n1175\n1176\n1177\n1178\n1179\n1180\n1181\n1182\n1183\n1184\n1185\n1186\n1187\n1188\n1189\n1190\n1191\n1192\n1193\n1194\n1195\n1196\n1197\n1198\n1199\n1200\n1201\n1202\n1203\n1204\n1205\n1206\n1207\n1208\n1209\n1210\n1211\n1212\n1213\n1214\n1215\n1216\n1217\n1218\n1219\n1220\n1221\n1222\n1223\n1224\n1225\n1226\n1227\n1228\n1229\n1230\n1231\n1232\n1233\n1234\n1235\n1236\n1237\n1238\n1239\n1240\n1241\n1242\n1243\n1244\n1245\n1246\n1247\n1248\n1249\n1250\n1251\n1252\n1253\n1254\n1255\n1256\n1257\n1258\n1259\n1260\n1261\n1262\n1263\n1264\n1265\n1266\n1267\n1268\n1269\n1270\n1271\n1272\n1273\n1274\n1275\n1276\n1277\n1278\n1279\n1280\n1281\n1282\n1283\n1284\n1285\n1286\n1287\n1288\n1289\n1290\n1291\n1292\n1293\n1294\n1295\n1296\n1297\n1298\n1299\n1300\n1301\n1302\n1303\n1304\n1305\n1306\n1307\n1308\n1309\n1310\n1311\n1312\n1313\n1314\n1315\n1316\n1317\n1318\n1319\n1320\n1321\n1322\n1323\n1324\n1325\n1326\n1327\n1328\n1329\n1330\n1331\n1332\n1333\n1334\n1335\n1336\n1337\n1338\n1339\n1340\n1341\n1342\n1343\n1344\n1345\n1346\n1347\n1348\n1349\n1350\n1351\n1352\n1353\n1354\n1355\n1356\n1357\n1358\n1359\n1360\n1361\n1362\n1363\n1364\n1365\n1366\n1367\n1368\n1369\n1370\n1371\n1372\n1373\n1374\n1375\n1376\n1377\n1378\n1379\n1380\n1381\n1382\n1383\n1384\n1385\n1386\n1387\n1388\n1389\n1390\n1391\n1392\n1393\n1394\n1395\n1396\n1397\n1398\n1399\n1400\n1401\n1402\n1403\n1404\n1405\n1406\n1407\n1408\n1409\n1410\n1411\n1412\n1413\n1414\n1415\n1416\n1417\n1418\n1419\n1420\n1421\n1422\n1423\n1424\n1425\n1426\n1427\n1428\n1429\n1430\n1431\n1432\n1433\n1434\n1435\n1436\n1437\n1438\n1439\n1440\n1441\n1442\n1443\n1444\n1445\n1446\n1447\n1448\n1449\n1450\n1451\n1452\n1453\n1454\n1455\n1456\n1457\n1458\n1459\n1460\n1461\n1462\n1463\n1464\n1465\n1466\n1467\n1468\n1469\n1470\n1471\n1472\n1473\n1474\n1475\n1476\n1477\n1478\n1479\n1480\n1481\n1482\n1483\n1484\n1485\n1486\n1487\n1488\n1489\n1490\n1491\n1492\n1493\n1494\n1495\n1496\n1497\n1498\n1499\n1500\n1501\n1502\n1503\n1504\n1505\n1506\n1507\n1508\n1509\n1510\n1511\n1512\n1513\n1514\n1515\n1516\n1517\n1518\n1519\n1520\n1521\n1522\n1523\n1524\n1525\n1526\n1527\n1528\n1529\n1530\n1531\n1532\n1533\n1534\n1535\n1536\n1537\n1538\n1539\n1540\n1541\n1542\n1543\n1544\n1545\n1546\n1547\n1548\n1549\n1550\n1551\n1552\n1553\n1554\n1555\n1556\n1557\n1558\n1559\n1560\n1561\n1562\n1563\n1564\n1565\n1566\n1567\n1568\n1569\n1570\n1571\n1572\n1573\n1574\n1575\n1576\n1577\n1578\n1579\n1580\n1581\n1582\n1583\n1584\n1585\n1586\n1587\n1588\n1589\n1590\n1591\n1592\n1593\n1594\n1595\n1596\n1597\n1598\n1599\n1600\n1601\n1602\n1603\n1604\n1605\n1606\n1607\n1608\n1609\n1610\n1611\n1612\n1613\n1614\n1615\n1616\n1617\n1618\n1619\n1620\n1621\n1622\n1623\n1624\n1625\n1626\n1627\n1628\n1629\n1630\n1631\n1632\n1633\n1634\n1635\n1636\n1637\n1638\n1639\n1640\n1641\n1642\n1643\n1644\n1645\n1646\n1647\n1648\n1649\n1650\n1651\n1652\n1653\n1654\n1655\n1656\n1657\n1658\n1659\n1660\n1661\n1662\n1663\n1664\n1665\n1666\n1667\n1668\n1669\n1670\n1671\n1672\n1673\n1674\n1675\n1676\n1677\n1678\n1679\n1680\n1681\n1682\n1683\n1684\n1685\n1686\n1687\n1688\n1689\n1690\n1691\n1692\n1693\n1694\n1695\n1696\n1697\n1698\n1699\n1700\n1701\n1702\n1703\n1704\n1705\n1706\n1707\n1708\n1709\n1710\n1711\n1712\n1713\n1714\n1715\n1716\n1717\n1718\n1719\n1720\n1721\n1722\n1723\n1724\n1725\n1726\n1727\n1728\n1729\n1730\n1731\n1732\n1733\n1734\n1735\n1736\n1737\n1738\n1739\n1740\n1741\n1742\n1743\n1744\n1745\n1746\n1747\n1748\n1749\n1750\n1751\n1752\n1753\n1754\n1755\n1756\n1757\n1758\n1759\n1760\n1761\n1762\n1763\n1764\n1765\n1766\n1767\n1768\n1769\n1770\n1771\n1772\n1773\n1774\n1775\n1776\n1777\n1778\n1779\n1780\n1781\n1782\n1783\n1784\n1785\n1786\n1787\n1788\n1789\n1790\n1791\n1792\n1793\n1794\n1795\n1796\n1797\n1798\n1799\n1800\n1801\n1802\n1803\n1804\n1805\n1806\n1807\n1808\n1809\n1810\n1811\n1812\n1813\n1814\n1815\n1816\n1817\n1818\n1819\n1820\n1821\n1822\n1823\n1824\n1825\n1826\n1827\n1828\n1829\n1830\n1831\n1832\n1833\n1834\n1835\n1836\n1837\n1838\n1839\n1840\n1841\n1842\n1843\n1844\n1845\n1846\n1847\n1848\n1849\n1850\n1851\n1852\n1853\n1854\n1855\n1856\n1857\n1858\n1859\n1860\n1861\n1862\n1863\n1864\n1865\n1866\n1867\n1868\n1869\n1870\n1871\n1872\n1873\n1874\n1875\n1876\n1877\n1878\n1879\n1880\n1881\n1882\n1883\n1884\n1885\n1886\n1887\n1888\n1889\n1890\n1891\n1892\n1893\n1894\n1895\n1896\n1897\n1898\n1899\n1900\n1901\n1902\n1903\n1904\n1905\n1906\n1907\n1908\n1909\n1910\n1911\n1912\n1913\n1914\n1915\n1916\n1917\n1918\n1919\n1920\n1921\n1922\n1923\n1924\n1925\n1926\n1927\n1928\n1929\n1930\n1931\n1932\n1933\n1934\n1935\n1936\n1937\n1938\n1939\n1940\n1941\n1942\n1943\n1944\n1945\n1946\n1947\n1948\n1949\n1950\n1951\n1952\n1953\n1954\n1955\n1956\n1957\n1958\n1959\n1960\n1961\n1962\n1963\n1964\n1965\n1966\n1967\n1968\n1969\n1970\n1971\n1972\n1973\n1974\n1975\n1976\n1977\n1978\n1979\n1980\n1981\n1982\n1983\n1984\n1985\n1986\n1987\n1988\n1989\n1990\n1991\n1992\n1993\n1994\n1995\n1996\n1997\n1998\n1999\n2000\n2001\n2002\n2003\n2004\n2005\n2006\n2007\n2008\n2009\n2010\n2011\n2012\n2013\n2014\n2015\n2016\n2017\n2018\n2019\n2020\n2021\n2022\n2023\n2024\n2025\n2026\n2027\n2028\n2029\n2030\n2031\n2032\n2033\n2034\n2035\n2036\n2037\n2038\n2039\n2040\n2041\n2042\n2043\n2044\n2045\n2046\n2047\n2048\n2049\n2050\n2051\n2052\n2053\n2054\n2055\n2056\n2057\n2058\n2059\n2060\n2061\n2062\n2063\n2064\n2065\n2066\n2067\n2068\n2069\n2070\n2071\n2072\n2073\n2074\n2075\n2076\n2077\n2078\n2079\n2080\n2081\n2082\n2083\n2084\n2085\n2086\n2087\n2088\n2089\n2090\n2091\n2092\n2093\n2094\n2095\n2096\n2097\n2098\n2099\n2100\n2101\n2102\n2103\n2104\n2105\n2106\n2107\n2108\n2109\n2110\n2111\n2112\n2113\n2114\n2115\n2116\n2117\n2118\n2119\n2120\n2121\n2122\n2123\n2124\n2125\n2126\n2127\n2128\n2129\n2130\n2131\n2132\n2133\n2134\n2135\n2136\n2137\n2138\n2139\n2140\n2141\n2142\n2143\n2144\n2145\n2146\n2147\n2148\n2149\n2150\n2151\n2152\n2153\n2154\n2155\n2156\n2157\n2158\n2159\n2160\n2161\n2162\n2163\n2164\n2165\n2166\n2167\n2168\n2169\n2170\n2171\n2172\n2173\n2174\n2175\n2176\n2177\n2178\n2179\n2180\n2181\n2182\n2183\n2184\n2185\n2186\n2187\n2188\n2189\n2190\n2191\n2192\n2193\n2194\n2195\n2196\n2197\n2198\n2199\n2200\n2201\n2202\n2203\n2204\n2205\n2206\n2207\n2208\n2209\n2210\n2211\n2212\n2213\n2214\n2215\n2216\n2217\n2218\n2219\n2220\n2221\n2222\n2223\n2224\n2225\n2226\n2227\n2228\n2229\n2230\n2231\n2232\n2233\n2234\n2235\n2236\n2237\n2238\n2239\n2240\n2241\n2242\n2243\n2244\n2245\n2246\n2247\n2248\n2249\n2250\n2251\n2252\n2253\n2254\n2255\n2256\n2257\n2258\n2259\n2260\n2261\n2262\n2263\n2264\n2265\n2266\n2267\n2268\n2269\n2270\n2271\n2272\n2273\n2274\n2275\n2276\n2277\n2278\n2279\n2280\n2281\n2282\n2283\n2284\n2285\n2286\n2287\n2288\n2289\n2290\n2291\n2292\n2293\n2294\n2295\n2296\n2297\n2298\n2299\n2300\n2301\n2302\n2303\n2304\n2305\n2306\n2307\n2308\n2309\n2310\n2311\n2312\n2313\n2314\n2315\n2316\n2317\n2318\n2319\n2320\n2321\n2322\n2323\n2324\n2325\n2326\n2327\n2328\n2329\n2330\n2331\n2332\n2333\n2334\n2335\n2336\n2337\n2338\n2339\n2340\n2341\n2342\n2343\n2344\n2345\n2346\n2347\n2348\n2349\n2350\n2351\n2352\n2353\n2354\n2355\n2356\n2357\n2358\n2359\n2360\n2361\n2362\n2363\n2364\n2365\n2366\n2367\n2368\n2369\n2370\n2371\n2372\n2373\n2374\n2375\n2376\n2377\n2378\n2379\n2380\n2381\n2382\n2383\n2384\n2385\n2386\n2387\n2388\n2389\n2390\n2391\n2392\n2393\n2394\n2395\n2396\n2397\n2398\n2399\n2400\n2401\n2402\n2403\n2404\n2405\n2406\n2407\n2408\n2409\n2410\n2411\n2412\n2413\n2414\n2415\n2416\n2417\n2418\n2419\n2420\n2421\n2422\n2423\n2424\n2425\n2426\n2427\n2428\n2429\n2430\n2431\n2432\n2433\n2434\n2435\n2436\n2437\n2438\n2439\n2440\n2441\n2442\n2443\n2444\n2445\n2446\n2447\n2448\n2449\n2450\n2451\n2452\n2453\n2454\n2455\n2456\n2457\n2458\n2459\n2460\n2461\n2462\n2463\n2464\n2465\n2466\n2467\n2468\n2469\n2470\n2471\n2472\n2473\n2474\n2475\n2476\n2477\n2478\n2479\n2480\n2481\n2482\n2483\n2484\n2485\n2486\n2487\n2488\n2489\n2490\n2491\n2492\n2493\n2494\n2495\n2496\n2497\n2498\n2499\n2500\n2501\n2502\n2503\n2504\n2505\n2506\n2507\n2508\n2509\n2510\n2511\n2512\n2513\n2514\n2515\n2516\n2517\n2518\n2519\n2520\n2521\n2522\n2523\n2524\n2525\n2526\n2527\n2528\n2529\n2530\n2531\n2532\n2533\n2534\n2535\n2536\n2537\n2538\n2539\n2540\n2541\n2542\n2543\n2544\n2545\n2546\n2547\n2548\n2549\n2550\n2551\n2552\n2553\n2554\n2555\n2556\n2557\n2558\n2559\n2560\n2561\n2562\n2563\n2564\n2565\n2566\n2567\n2568\n2569\n2570\n2571\n2572\n2573\n2574\n2575\n2576\n2577\n2578\n2579\n2580\n2581\n2582\n2583\n2584\n2585\n2586\n2587\n2588\n2589\n2590\n2591\n2592\n2593\n2594\n2595\n2596\n2597\n2598\n2599\n2600\n2601\n2602\n2603\n2604\n2605\n2606\n2607\n2608\n2609\n2610\n2611\n2612\n2613\n2614\n2615\n2616\n2617\n2618\n2619\n2620\n2621\n2622\n2623\n2624\n2625\n2626\n2627\n2628\n2629\n2630\n2631\n2632\n2633\n2634\n2635\n2636\n2637\n2638\n2639\n2640\n2641\n2642\n2643\n2644\n2645\n2646\n2647\n2648\n2649\n2650\n2651\n2652\n2653\n2654\n2655\n2656\n2657\n2658\n2659\n2660\n2661\n2662\n2663\n2664\n2665\n2666\n2667\n2668\n2669\n2670\n2671\n2672\n2673\n2674\n2675\n2676\n2677\n2678\n2679\n2680\n2681\n2682\n2683\n2684\n2685\n2686\n2687\n2688\n2689\n2690\n2691\n2692\n2693\n2694\n2695\n2696\n2697\n2698\n2699\n2700\n2701\n2702\n2703\n2704\n2705\n2706\n2707\n2708\n2709\n2710\n2711\n2712\n2713\n2714\n2715\n2716\n2717\n2718\n2719\n2720\n2721\n2722\n2723\n2724\n2725\n2726\n2727\n2728\n2729\n2730\n2731\n2732\n2733\n2734\n2735\n2736\n2737\n2738\n2739\n2740\n2741\n2742\n2743\n2744\n2745\n2746\n2747\n2748\n2749\n2750\n2751\n2752\n2753\n2754\n2755\n2756\n2757\n2758\n2759\n2760\n2761\n2762\n2763\n2764\n2765\n2766\n2767\n2768\n2769\n2770\n2771\n2772\n2773\n2774\n2775\n2776\n2777\n2778\n2779\n2780\n2781\n2782\n2783\n2784\n2785\n2786\n2787\n2788\n2789\n2790\n2791\n2792\n2793\n2794\n2795\n2796\n2797\n2798\n2799\n2800\n2801\n2802\n2803\n2804\n2805\n2806\n2807\n2808\n2809\n2810\n2811\n2812\n2813\n2814\n2815\n2816\n2817\n2818\n2819\n2820\n2821\n2822\n2823\n2824\n2825\n2826\n2827\n2828\n2829\n2830\n2831\n2832\n2833\n2834\n2835\n2836\n2837\n2838\n2839\n2840\n2841\n2842\n2843\n2844\n2845\n2846\n2847\n2848\n2849\n2850\n2851\n2852\n2853\n2854\n2855\n2856\n2857\n2858\n2859\n2860\n2861\n2862\n2863\n2864\n2865\n2866\n2867\n2868\n2869\n2870\n2871\n2872\n2873\n2874\n2875\n2876\n2877\n2878\n2879\n2880\n2881\n2882\n2883\n2884\n2885\n2886\n2887\n2888\n2889\n2890\n2891\n2892\n2893\n2894\n2895\n2896\n2897\n2898\n2899\n2900\n2901\n2902\n2903\n2904\n2905\n2906\n2907\n2908\n2909\n2910\n2911\n2912\n2913\n2914\n2915\n2916\n2917\n2918\n2919\n2920\n2921\n2922\n2923\n2924\n2925\n2926\n2927\n2928\n2929\n2930\n2931\n2932\n2933\n2934\n2935\n2936\n2937\n2938\n2939\n2940\n2941\n2942\n2943\n2944\n2945\n2946\n2947\n2948\n2949\n2950\n2951\n2952\n2953\n2954\n2955\n2956\n2957\n2958\n2959\n2960\n2961\n2962\n2963\n2964\n2965\n2966\n2967\n2968\n2969\n2970\n2971\n2972\n2973\n2974\n2975\n2976\n2977\n2978\n2979\n2980\n2981\n2982\n2983\n2984\n2985\n2986\n2987\n2988\n2989\n2990\n2991\n2992\n2993\n2994\n2995\n2996\n2997\n2998\n2999\n3000\n3001\n3002\n3003\n3004\n3005\n3006\n3007\n3008\n3009\n3010\n3011\n3012\n3013\n3014\n3015\n3016\n3017\n3018\n3019\n3020\n3021\n3022\n3023\n3024\n3025\n3026\n3027\n3028\n3029\n3030\n3031\n3032\n3033\n3034\n3035\n3036\n3037\n3038\n3039\n3040\n3041\n3042\n3043\n3044\n3045\n3046\n3047\n3048\n3049\n3050\n3051\n3052\n3053\n3054\n3055\n3056\n3057\n3058\n3059\n3060\n3061\n3062\n3063\n3064\n3065\n3066\n3067\n3068\n3069\n3070\n3071\n3072\n3073\n3074\n3075\n3076\n3077\n3078\n3079\n3080\n3081\n3082\n3083\n3084\n3085\n3086\n3087\n3088\n3089\n3090\n3091\n3092\n3093\n3094\n3095\n3096\n3097\n3098\n3099\n3100\n3101\n3102\n3103\n3104\n3105\n3106\n3107\n3108\n3109\n3110\n3111\n3112\n3113\n3114\n3115\n3116\n3117\n3118\n3119\n3120\n3121\n3122\n3123\n3124\n3125\n3126\n3127\n3128\n3129\n3130\n3131\n3132\n3133\n3134\n3135\n3136\n3137\n3138\n3139\n3140\n3141\n3142\n3143\n3144\n3145\n3146\n3147\n3148\n3149\n3150\n3151\n3152\n3153\n3154\n3155\n3156\n3157\n3158\n3159\n3160\n3161\n3162\n3163\n3164\n3165\n3166\n3167\n3168\n3169\n3170\n3171\n3172\n3173\n3174\n3175\n3176\n3177\n3178\n3179\n3180\n3181\n3182\n3183\n3184\n3185\n3186\n3187\n3188\n3189\n3190\n3191\n3192\n3193\n3194\n3195\n3196\n3197\n3198\n3199\n3200\n3201\n3202\n3203\n3204\n3205\n3206\n3207\n3208\n3209\n3210\n3211\n3212\n3213\n3214\n3215\n3216\n3217\n3218\n3219\n3220\n3221\n3222\n3223\n3224\n3225\n3226\n3227\n3228\n3229\n3230\n3231\n3232\n3233\n3234\n3235\n3236\n3237\n3238\n3239\n3240\n3241\n3242\n3243\n3244\n3245\n3246\n3247\n3248\n3249\n3250\n3251\n3252\n3253\n3254\n3255\n3256\n3257\n3258\n3259\n3260\n3261\n3262\n3263\n3264\n3265\n3266\n3267\n3268\n3269\n3270\n3271\n3272\n3273\n3274\n3275\n3276\n3277\n3278\n3279\n3280\n3281\n3282\n3283\n3284\n3285\n3286\n3287\n3288\n3289\n3290\n3291\n3292\n3293\n3294\n3295\n3296\n3297\n3298\n3299\n3300\n3301\n3302\n3303\n3304\n3305\n3306\n3307\n3308\n3309\n3310\n3311\n3312\n3313\n3314\n3315\n3316\n3317\n3318\n3319\n3320\n3321\n3322\n3323\n3324\n3325\n3326\n3327\n3328\n3329\n3330\n3331\n3332\n3333\n3334\n3335\n3336\n3337\n3338\n3339\n3340\n3341\n3342\n3343\n3344\n3345\n3346\n3347\n3348\n3349\n3350\n3351\n3352\n3353\n3354\n3355\n3356\n3357\n3358\n3359\n3360\n3361\n3362\n3363\n3364\n3365\n3366\n3367\n3368\n3369\n3370\n3371\n3372\n3373\n3374\n3375\n3376\n3377\n3378\n3379\n3380\n3381\n3382\n3383\n3384\n3385\n3386\n3387\n3388\n3389\n3390\n3391\n3392\n3393\n3394\n3395\n3396\n3397\n3398\n3399\n3400\n3401\n3402\n3403\n3404\n3405\n3406\n3407\n3408\n3409\n3410\n3411\n3412\n3413\n3414\n3415\n3416\n3417\n3418\n3419\n3420\n3421\n3422\n3423\n3424\n3425\n3426\n3427\n3428\n3429\n3430\n3431\n3432\n3433\n3434\n3435\n3436\n3437\n3438\n3439\n3440\n3441\n3442\n3443\n3444\n3445\n3446\n3447\n3448\n3449\n3450\n3451\n3452\n3453\n3454\n3455\n3456\n3457\n3458\n3459\n3460\n3461\n3462\n3463\n3464\n3465\n3466\n3467\n3468\n3469\n3470\n3471\n3472\n3473\n3474\n3475\n3476\n3477\n3478\n3479\n3480\n3481\n3482\n3483\n3484\n3485\n3486\n3487\n3488\n3489\n3490\n3491\n3492\n3493\n3494\n3495\n3496\n3497\n3498\n3499\n3500\n3501\n3502\n3503\n3504\n3505\n3506\n3507\n3508\n3509\n3510\n3511\n3512\n3513\n3514\n3515\n3516\n3517\n3518\n3519\n3520\n3521\n3522\n3523\n3524\n3525\n3526\n3527\n3528\n3529\n3530\n3531\n3532\n3533\n3534\n3535\n3536\n3537\n3538\n3539\n3540\n3541\n3542\n3543\n3544\n3545\n3546\n3547\n3548\n3549\n3550\n3551\n3552\n3553\n3554\n3555\n3556\n3557\n3558\n3559\n3560\n3561\n3562\n3563\n3564\n3565\n3566\n3567\n3568\n3569\n3570\n3571\n3572\n3573\n3574\n3575\n3576\n3577\n3578\n3579\n3580\n3581\n3582\n3583\n3584\n3585\n3586\n3587\n3588\n3589\n3590\n3591\n3592\n3593\n3594\n3595\n3596\n3597\n3598\n3599\n3600\n3601\n3602\n3603\n3604\n3605\n3606\n3607\n3608\n3609\n3610\n3611\n3612\n3613\n3614\n3615\n3616\n3617\n3618\n3619\n3620\n3621\n3622\n3623\n3624\n3625\n3626\n3627\n3628\n3629\n3630\n3631\n3632\n3633\n3634\n3635\n3636\n3637\n3638\n3639\n3640\n3641\n3642\n3643\n3644\n3645\n3646\n3647\n3648\n3649\n3650\n3651\n3652\n3653\n3654\n3655\n3656\n3657\n3658\n3659\n3660\n3661\n3662\n3663\n3664\n3665\n3666\n3667\n3668\n3669\n3670\n3671\n3672\n3673\n3674\n3675\n3676\n3677\n3678\n3679\n3680\n3681\n3682\n3683\n3684\n3685\n3686\n3687\n3688\n3689\n3690\n3691\n3692\n3693\n3694\n3695\n3696\n3697\n3698\n3699\n3700\n3701\n3702\n3703\n3704\n3705\n3706\n3707\n3708\n3709\n3710\n3711\n3712\n3713\n3714\n3715\n3716\n3717\n3718\n3719\n3720\n3721\n3722\n3723\n3724\n3725\n3726\n3727\n3728\n3729\n3730\n3731\n3732\n3733\n3734\n3735\n3736\n3737\n3738\n3739\n3740\n3741\n3742\n3743\n3744\n3745\n3746\n3747\n3748\n3749\n3750\n3751\n3752\n3753\n3754\n3755\n3756\n3757\n3758\n3759\n3760\n3761\n3762\n3763\n3764\n3765\n3766\n3767\n3768\n3769\n3770\n3771\n3772\n3773\n3774\n3775\n3776\n3777\n3778\n3779\n3780\n3781\n3782\n3783\n3784\n3785\n3786\n3787\n3788\n3789\n3790\n3791\n3792\n3793\n3794\n3795\n3796\n3797\n3798\n3799\n3800\n3801\n3802\n3803\n3804\n3805\n3806\n3807\n3808\n3809\n3810\n3811\n3812\n3813\n3814\n3815\n3816\n3817\n3818\n3819\n3820\n3821\n3822\n3823\n3824\n3825\n3826\n3827\n3828\n3829\n3830\n3831\n3832\n3833\n3834\n3835\n3836\n3837\n3838\n3839\n3840\n3841\n3842\n3843\n3844\n3845\n3846\n3847\n3848\n3849\n3850\n3851\n3852\n3853\n3854\n3855\n3856\n3857\n3858\n3859\n3860\n3861\n3862\n3863\n3864\n3865\n3866\n3867\n3868\n3869\n3870\n3871\n3872\n3873\n3874\n3875\n3876\n3877\n3878\n3879\n3880\n3881\n3882\n3883\n3884\n3885\n3886\n3887\n3888\n3889\n3890\n3891\n3892\n3893\n3894\n3895\n3896\n3897\n3898\n3899\n3900\n3901\n3902\n3903\n3904\n3905\n3906\n3907\n3908\n3909\n3910\n3911\n3912\n3913\n3914\n3915\n3916\n3917\n3918\n3919\n3920\n3921\n3922\n3923\n3924\n3925\n3926\n3927\n3928\n3929\n3930\n3931\n3932\n3933\n3934\n3935\n3936\n3937\n3938\n3939\n3940\n3941\n3942\n3943\n3944\n3945\n3946\n3947\n3948\n3949\n3950\n3951\n3952\n3953\n3954\n3955\n3956\n3957\n3958\n3959\n3960\n3961\n3962\n3963\n3964\n3965\n3966\n3967\n3968\n3969\n3970\n3971\n3972\n3973\n3974\n3975\n3976\n3977\n3978\n3979\n3980\n3981\n3982\n3983\n3984\n3985\n3986\n3987\n3988\n3989\n3990\n3991\n3992\n3993\n3994\n3995\n3996\n3997\n3998\n3999\n4000\n4001\n4002\n4003\n4004\n4005\n4006\n4007\n4008\n4009\n4010\n4011\n4012\n4013\n4014\n4015\n4016\n4017\n4018\n4019\n4020\n4021\n4022\n4023\n4024\n4025\n4026\n4027\n4028\n4029\n4030\n4031\n4032\n4033\n4034\n4035\n4036\n4037\n4038\n4039\n4040\n4041\n4042\n4043\n4044\n4045\n4046\n4047\n4048\n4049\n4050\n4051\n4052\n4053\n4054\n4055\n4056\n4057\n4058\n4059\n4060\n4061\n4062\n4063\n4064\n4065\n4066\n4067\n4068\n4069\n4070\n4071\n4072\n4073\n4074\n4075\n4076\n4077\n4078\n4079\n4080\n4081\n4082\n4083\n4084\n4085\n4086\n4087\n4088\n4089\n4090\n4091\n4092\n4093\n4094\n4095\n4096\n4097\n4098\n4099\n4100\n4101\n4102\n4103\n4104\n4105\n4106\n4107\n4108\n4109\n4110\n4111\n4112\n4113\n4114\n4115\n4116\n4117\n4118\n4119\n4120\n4121\n4122\n4123\n4124\n4125\n4126\n4127\n4128\n4129\n4130\n4131\n4132\n4133\n4134\n4135\n4136\n4137\n4138\n4139\n4140\n4141\n4142\n4143\n4144\n4145\n4146\n4147\n4148\n4149\n4150\n4151\n4152\n4153\n4154\n4155\n4156\n4157\n4158\n4159\n4160\n4161\n4162\n4163\n4164\n4165\n4166\n4167\n4168\n4169\n4170\n4171\n4172\n4173\n4174\n4175\n4176\n4177\n4178\n4179\n4180\n4181\n4182\n4183\n4184\n4185\n4186\n4187\n4188\n4189\n4190\n4191\n4192\n4193\n4194\n4195\n4196\n4197\n4198\n4199\n4200\n4201\n4202\n4203\n4204\n4205\n4206\n4207\n4208\n4209\n4210\n4211\n4212\n4213\n4214\n4215\n4216\n4217\n4218\n4219\n4220\n4221\n4222\n4223\n4224\n4225\n4226\n4227\n4228\n4229\n4230\n4231\n4232\n4233\n4234\n4235\n4236\n4237\n4238\n4239\n4240\n4241\n4242\n4243\n4244\n4245\n4246\n4247\n4248\n4249\n4250\n4251\n4252\n4253\n4254\n4255\n4256\n4257\n4258\n4259\n4260\n4261\n4262\n4263\n4264\n4265\n4266\n4267\n4268\n4269\n4270\n4271\n4272\n4273\n4274\n4275\n4276\n4277\n4278\n4279\n4280\n4281\n4282\n4283\n4284\n4285\n4286\n4287\n4288\n4289\n4290\n4291\n4292\n4293\n4294\n4295\n4296\n4297\n4298\n4299\n4300\n4301\n4302\n4303\n4304\n4305\n4306\n4307\n4308\n4309\n4310\n4311\n4312\n4313\n4314\n4315\n4316\n4317\n4318\n4319\n4320\n4321\n4322\n4323\n4324\n4325\n4326\n4327\n4328\n4329\n4330\n4331\n4332\n4333\n4334\n4335\n4336\n4337\n4338\n4339\n4340\n4341\n4342\n4343\n4344\n4345\n4346\n4347\n4348\n4349\n4350\n4351\n4352\n4353\n4354\n4355\n4356\n4357\n4358\n4359\n4360\n4361\n4362\n4363\n4364\n4365\n4366\n4367\n4368\n4369\n4370\n4371\n4372\n4373\n4374\n4375\n4376\n4377\n4378\n4379\n4380\n4381\n4382\n4383\n4384\n4385\n4386\n4387\n4388\n4389\n4390\n4391\n4392\n4393\n4394\n4395\n4396\n4397\n4398\n4399\n4400\n4401\n4402\n4403\n4404\n4405\n4406\n4407\n4408\n4409\n4410\n4411\n4412\n4413\n4414\n4415\n4416\n4417\n4418\n4419\n4420\n4421\n4422\n4423\n4424\n4425\n4426\n4427\n4428\n4429\n4430\n4431\n4432\n4433\n4434\n4435\n4436\n4437\n4438\n4439\n4440\n4441\n4442\n4443\n4444\n4445\n4446\n4447\n4448\n4449\n4450\n4451\n4452\n4453\n4454\n4455\n4456\n4457\n4458\n4459\n4460\n4461\n4462\n4463\n4464\n4465\n4466\n4467\n4468\n4469\n4470\n4471\n4472\n4473\n4474\n4475\n4476\n4477\n4478\n4479\n4480\n4481\n4482\n4483\n4484\n4485\n4486\n4487\n4488\n4489\n4490\n4491\n4492\n4493\n4494\n4495\n4496\n4497\n4498\n4499\n4500\n4501\n4502\n4503\n4504\n4505\n4506\n4507\n4508\n4509\n4510\n4511\n4512\n4513\n4514\n4515\n4516\n4517\n4518\n4519\n4520\n4521\n4522\n4523\n4524\n4525\n4526\n4527\n4528\n4529\n4530\n4531\n4532\n4533\n4534\n4535\n4536\n4537\n4538\n4539\n4540\n4541\n4542\n4543\n4544\n4545\n4546\n4547\n4548\n4549\n4550\n4551\n4552\n4553\n4554\n4555\n4556\n4557\n4558\n4559\n4560\n4561\n4562\n4563\n4564\n4565\n4566\n4567\n4568\n4569\n4570\n4571\n4572\n4573\n4574\n4575\n4576\n4577\n4578\n4579\n4580\n4581\n4582\n4583\n4584\n4585\n4586\n4587\n4588\n4589\n4590\n4591\n4592\n4593\n4594\n4595\n4596\n4597\n4598\n4599\n4600\n4601\n4602\n4603\n4604\n4605\n4606\n4607\n4608\n4609\n4610\n4611\n4612\n4613\n4614\n4615\n4616\n4617\n4618\n4619\n4620\n4621\n4622\n4623\n4624\n4625\n4626\n4627\n4628\n4629\n4630\n4631\n4632\n4633\n4634\n4635\n4636\n4637\n4638\n4639\n4640\n4641\n4642\n4643\n4644\n4645\n4646\n4647\n4648\n4649\n4650\n4651\n4652\n4653\n4654\n4655\n4656\n4657\n4658\n4659\n4660\n4661\n4662\n4663\n4664\n4665\n4666\n4667\n4668\n4669\n4670\n4671\n4672\n4673\n4674\n4675\n4676\n4677\n4678\n4679\n4680\n4681\n4682\n4683\n4684\n4685\n4686\n4687\n4688\n4689\n4690\n4691\n4692\n4693\n4694\n4695\n4696\n4697\n4698\n4699\n4700\n4701\n4702\n4703\n4704\n4705\n4706\n4707\n4708\n4709\n4710\n4711\n4712\n4713\n4714\n4715\n4716\n4717\n4718\n4719\n4720\n4721\n4722\n4723\n4724\n4725\n4726\n4727\n4728\n4729\n4730\n4731\n4732\n4733\n4734\n4735\n4736\n4737\n4738\n4739\n4740\n4741\n4742\n4743\n4744\n4745\n4746\n4747\n4748\n4749\n4750\n4751\n4752\n4753\n4754\n4755\n4756\n4757\n4758\n4759\n4760\n4761\n4762\n4763\n4764\n4765\n4766\n4767\n4768\n4769\n4770\n4771\n4772\n4773\n4774\n4775\n4776\n4777\n4778\n4779\n4780\n4781\n4782\n4783\n4784\n4785\n4786\n4787\n4788\n4789\n4790\n4791\n4792\n4793\n4794\n4795\n4796\n4797\n4798\n4799\n4800\n4801\n4802\n4803\n4804\n4805\n4806\n4807\n4808\n4809\n4810\n4811\n4812\n4813\n4814\n4815\n4816\n4817\n4818\n4819\n4820\n4821\n4822\n4823\n4824\n4825\n4826\n4827\n4828\n4829\n4830\n4831\n4832\n4833\n4834\n4835\n4836\n4837\n4838\n4839\n4840\n4841\n4842\n4843\n4844\n4845\n4846\n4847\n4848\n4849\n4850\n4851\n4852\n4853\n4854\n4855\n4856\n4857\n4858\n4859\n4860\n4861\n4862\n4863\n4864\n4865\n4866\n4867\n4868\n4869\n4870\n4871\n4872\n4873\n4874\n4875\n4876\n4877\n4878\n4879\n4880\n4881\n4882\n4883\n4884\n4885\n4886\n4887\n4888\n4889\n4890\n4891\n4892\n4893\n4894\n4895\n4896\n4897\n4898\n4899\n4900\n4901\n4902\n4903\n4904\n4905\n4906\n4907\n4908\n4909\n4910\n4911\n4912\n4913\n4914\n4915\n4916\n4917\n4918\n4919\n4920\n4921\n4922\n4923\n4924\n4925\n4926\n4927\n4928\n4929\n4930\n4931\n4932\n4933\n4934\n4935\n4936\n4937\n4938\n4939\n4940\n4941\n4942\n4943\n4944\n4945\n4946\n4947\n4948\n4949\n4950\n4951\n4952\n4953\n4954\n4955\n4956\n4957\n4958\n4959\n4960\n4961\n4962\n4963\n4964\n4965\n4966\n4967\n4968\n4969\n4970\n4971\n4972\n4973\n4974\n4975\n4976\n4977\n4978\n4979\n4980\n4981\n4982\n4983\n4984\n4985\n4986\n4987\n4988\n4989\n4990\n4991\n4992\n4993\n4994\n4995\n4996\n4997\n4998\n4999\n5000\n5001\n5002\n5003\n5004\n5005\n5006\n5007\n5008\n5009\n5010\n5011\n5012\n5013\n5014\n5015\n5016\n5017\n5018\n5019\n5020\n5021\n5022\n5023\n5024\n5025\n5026\n5027\n5028\n5029\n5030\n5031\n5032\n5033\n5034\n5035\n5036\n5037\n5038\n5039\n5040\n5041\n5042\n5043\n5044\n5045\n5046\n5047\n5048\n5049\n5050\n5051\n5052\n5053\n5054\n5055\n5056\n5057\n5058\n5059\n5060\n5061\n5062\n5063\n5064\n5065\n5066\n5067\n5068\n5069\n5070\n5071\n5072\n5073\n5074\n5075\n5076\n5077\n5078\n5079\n5080\n5081\n5082\n5083\n5084\n5085\n5086\n5087\n5088\n5089\n5090\n5091\n5092\n5093\n5094\n5095\n5096\n5097\n5098\n5099\n5100\n5101\n5102\n5103\n5104\n5105\n5106\n5107\n5108\n5109\n5110\n5111\n5112\n5113\n5114\n5115\n5116\n5117\n5118\n5119\n5120\n5121\n5122\n5123\n5124\n5125\n5126\n5127\n5128\n5129\n5130\n5131\n5132\n5133\n5134\n5135\n5136\n5137\n5138\n5139\n5140\n5141\n5142\n5143\n5144\n5145\n5146\n5147\n5148\n5149\n5150\n5151\n5152\n5153\n5154\n5155\n5156\n5157\n5158\n5159\n5160\n5161\n5162\n5163\n5164\n5165\n5166\n5167\n5168\n5169\n5170\n5171\n5172\n5173\n5174\n5175\n5176\n5177\n5178\n5179\n5180\n5181\n5182\n5183\n5184\n5185\n5186\n5187\n5188\n5189\n5190\n5191\n5192\n5193\n5194\n5195\n5196\n5197\n5198\n5199\n5200\n5201\n5202\n5203\n5204\n5205\n5206\n5207\n5208\n5209\n5210\n5211\n5212\n5213\n5214\n5215\n5216\n5217\n5218\n5219\n5220\n5221\n5222\n5223\n5224\n5225\n5226\n5227\n5228\n5229\n5230\n5231\n5232\n5233\n5234\n5235\n5236\n5237\n5238\n5239\n5240\n5241\n5242\n5243\n5244\n5245\n5246\n5247\n5248\n5249\n5250\n5251\n5252\n5253\n5254\n5255\n5256\n5257\n5258\n5259\n5260\n5261\n5262\n5263\n5264\n5265\n5266\n5267\n5268\n5269\n5270\n5271\n5272\n5273\n5274\n5275\n5276\n5277\n5278\n5279\n5280\n5281\n5282\n5283\n5284\n5285\n5286\n5287\n5288\n5289\n5290\n5291\n5292\n5293\n5294\n5295\n5296\n5297\n5298\n5299\n5300\n5301\n5302\n5303\n5304\n5305\n5306\n5307\n5308\n5309\n5310\n5311\n5312\n5313\n5314\n5315\n5316\n5317\n5318\n5319\n5320\n5321\n5322\n5323\n5324\n5325\n5326\n5327\n5328\n5329\n5330\n5331\n5332\n5333\n5334\n5335\n5336\n5337\n5338\n5339\n5340\n5341\n5342\n5343\n5344\n5345\n5346\n5347\n5348\n5349\n5350\n5351\n5352\n5353\n5354\n5355\n5356\n5357\n5358\n5359\n5360\n5361\n5362\n5363\n5364\n5365\n5366\n5367\n5368\n5369\n5370\n5371\n5372\n5373\n5374\n5375\n5376\n5377\n5378\n5379\n5380\n5381\n5382\n5383\n5384\n5385\n5386\n5387\n5388\n5389\n5390\n5391\n5392\n5393\n5394\n5395\n5396\n5397\n5398\n5399\n5400\n5401\n5402\n5403\n5404\n5405\n5406\n5407\n5408\n5409\n5410\n5411\n5412\n5413\n5414\n5415\n5416\n5417\n5418\n5419\n5420\n5421\n5422\n5423\n5424\n5425\n5426\n5427\n5428\n5429\n5430\n5431\n5432\n5433\n5434\n5435\n5436\n5437\n5438\n5439\n5440\n5441\n5442\n5443\n5444\n5445\n5446\n5447\n5448\n5449\n5450\n5451\n5452\n5453\n5454\n5455\n5456\n5457\n5458\n5459\n5460\n5461\n5462\n5463\n5464\n5465\n5466\n5467\n5468\n5469\n5470\n5471\n5472\n5473\n5474\n5475\n5476\n5477\n5478\n5479\n5480\n5481\n5482\n5483\n5484\n5485\n5486\n5487\n5488\n5489\n5490\n5491\n5492\n5493\n5494\n5495\n5496\n5497\n5498\n5499\n5500\n5501\n5502\n5503\n5504\n5505\n5506\n5507\n5508\n5509\n5510\n5511\n5512\n5513\n5514\n5515\n5516\n5517\n5518\n5519\n5520\n5521\n5522\n5523\n5524\n5525\n5526\n5527\n5528\n5529\n5530\n5531\n5532\n5533\n5534\n5535\n5536\n5537\n5538\n5539\n5540\n5541\n5542\n5543\n5544\n5545\n5546\n5547\n5548\n5549\n5550\n5551\n5552\n5553\n5554\n5555\n5556\n5557\n5558\n5559\n5560\n5561\n5562\n5563\n5564\n5565\n5566\n5567\n5568\n5569\n5570\n5571\n5572\n5573\n5574\n5575\n5576\n5577\n5578\n5579\n5580\n5581\n5582\n5583\n5584\n5585\n5586\n5587\n5588\n5589\n5590\n5591\n5592\n5593\n5594\n5595\n5596\n5597\n5598\n5599\n5600\n5601\n5602\n5603\n5604\n5605\n5606\n5607\n5608\n5609\n5610\n5611\n5612\n5613\n5614\n5615\n5616\n5617\n5618\n5619\n5620\n5621\n5622\n5623\n5624\n5625\n5626\n5627\n5628\n5629\n5630\n5631\n5632\n5633\n5634\n5635\n5636\n5637\n5638\n5639\n5640\n5641\n5642\n5643\n5644\n5645\n5646\n5647\n5648\n5649\n5650\n5651\n5652\n5653\n5654\n5655\n5656\n5657\n5658\n5659\n5660\n5661\n5662\n5663\n5664\n5665\n5666\n5667\n5668\n5669\n5670\n5671\n5672\n5673\n5674\n5675\n5676\n5677\n5678\n5679\n5680\n5681\n5682\n5683\n5684\n5685\n5686\n5687\n5688\n5689\n5690\n5691\n5692\n5693\n5694\n5695\n5696\n5697\n5698\n5699\n5700\n5701\n5702\n5703\n5704\n5705\n5706\n5707\n5708\n5709\n5710\n5711\n5712\n5713\n5714\n5715\n5716\n5717\n5718\n5719\n5720\n5721\n5722\n5723\n5724\n5725\n5726\n5727\n5728\n5729\n5730\n5731\n5732\n5733\n5734\n5735\n5736\n5737\n5738\n5739\n5740\n5741\n5742\n5743\n5744\n5745\n5746\n5747\n5748\n5749\n5750\n5751\n5752\n5753\n5754\n5755\n5756\n5757\n5758\n5759\n5760\n5761\n5762\n5763\n5764\n5765\n5766\n5767\n5768\n5769\n5770\n5771\n5772\n5773\n5774\n5775\n5776\n5777\n5778\n5779\n5780\n5781\n5782\n5783\n5784\n5785\n5786\n5787\n5788\n5789\n5790\n5791\n5792\n5793\n5794\n5795\n5796\n5797\n5798\n5799\n5800\n5801\n5802\n5803\n5804\n5805\n5806\n5807\n5808\n5809\n5810\n5811\n5812\n5813\n5814\n5815\n5816\n5817\n5818\n5819\n5820\n5821\n5822\n5823\n5824\n5825\n5826\n5827\n5828\n5829\n5830\n5831\n5832\n5833\n5834\n5835\n5836\n5837\n5838\n5839\n5840\n5841\n5842\n5843\n5844\n5845\n5846\n5847\n5848\n5849\n5850\n5851\n5852\n5853\n5854\n5855\n5856\n5857\n5858\n5859\n5860\n5861\n5862\n5863\n5864\n5865\n5866\n5867\n5868\n5869\n5870\n5871\n5872\n5873\n5874\n5875\n5876\n5877\n5878\n5879\n5880\n5881\n5882\n5883\n5884\n5885\n5886\n5887\n5888\n5889\n5890\n5891\n5892\n5893\n5894\n5895\n5896\n5897\n5898\n5899\n5900\n5901\n5902\n5903\n5904\n5905\n5906\n5907\n5908\n5909\n5910\n5911\n5912\n5913\n5914\n5915\n5916\n5917\n5918\n5919\n5920\n5921\n5922\n5923\n5924\n5925\n5926\n5927\n5928\n5929\n5930\n5931\n5932\n5933\n5934\n5935\n5936\n5937\n5938\n5939\n5940\n5941\n5942\n5943\n5944\n5945\n5946\n5947\n5948\n5949\n5950\n5951\n5952\n5953\n5954\n5955\n5956\n5957\n5958\n5959\n5960\n5961\n5962\n5963\n5964\n5965\n5966\n5967\n5968\n5969\n5970\n5971\n5972\n5973\n5974\n5975\n5976\n5977\n5978\n5979\n5980\n5981\n5982\n5983\n5984\n5985\n5986\n5987\n5988\n5989\n5990\n5991\n5992\n5993\n5994\n5995\n5996\n5997\n5998\n5999\n6000\n6001\n6002\n6003\n6004\n6005\n6006\n6007\n6008\n6009\n6010\n6011\n6012\n6013\n6014\n6015\n6016\n6017\n6018\n6019\n6020\n6021\n6022\n6023\n6024\n6025\n6026\n6027\n6028\n6029\n6030\n6031\n6032\n6033\n6034\n6035\n6036\n6037\n6038\n6039\n6040\n6041\n6042\n6043\n6044\n6045\n6046\n6047\n6048\n6049\n6050\n6051\n6052\n6053\n6054\n6055\n6056\n6057\n6058\n6059\n6060\n6061\n6062\n6063\n6064\n6065\n6066\n6067\n6068\n6069\n6070\n6071\n6072\n6073\n6074\n6075\n6076\n6077\n6078\n6079\n6080\n6081\n6082\n6083\n6084\n6085\n6086\n6087\n6088\n6089\n6090\n6091\n6092\n6093\n6094\n6095\n6096\n6097\n6098\n6099\n6100\n6101\n6102\n6103\n6104\n6105\n6106\n6107\n6108\n6109\n6110\n6111\n6112\n6113\n6114\n6115\n6116\n6117\n6118\n6119\n6120\n6121\n6122\n6123\n6124\n6125\n6126\n6127\n6128\n6129\n6130\n6131\n6132\n6133\n6134\n6135\n6136\n6137\n6138\n6139\n6140\n6141\n6142\n6143\n6144\n6145\n6146\n6147\n6148\n6149\n6150\n6151\n6152\n6153\n6154\n6155\n6156\n6157\n6158\n6159\n6160\n6161\n6162\n6163\n6164\n6165\n6166\n6167\n6168\n6169\n6170\n6171\n6172\n6173\n6174\n6175\n6176\n6177\n6178\n6179\n6180\n6181\n6182\n6183\n6184\n6185\n6186\n6187\n6188\n6189\n6190\n6191\n6192\n6193\n6194\n6195\n6196\n6197\n6198\n6199\n6200\n6201\n6202\n6203\n6204\n6205\n6206\n6207\n6208\n6209\n6210\n6211\n6212\n6213\n6214\n6215\n6216\n6217\n6218\n6219\n6220\n6221\n6222\n6223\n6224\n6225\n6226\n6227\n6228\n6229\n6230\n6231\n6232\n6233\n6234\n6235\n6236\n6237\n6238\n6239\n6240\n6241\n6242\n6243\n6244\n6245\n6246\n6247\n6248\n6249\n6250\n6251\n6252\n6253\n6254\n6255\n6256\n6257\n6258\n6259\n6260\n6261\n6262\n6263\n6264\n6265\n6266\n6267\n6268\n6269\n6270\n6271\n6272\n6273\n6274\n6275\n6276\n6277\n6278\n6279\n6280\n6281\n6282\n6283\n6284\n6285\n6286\n6287\n6288\n6289\n6290\n6291\n6292\n6293\n6294\n6295\n6296\n6297\n6298\n6299\n6300\n6301\n6302\n6303\n6304\n6305\n6306\n6307\n6308\n6309\n6310\n6311\n6312\n6313\n6314\n6315\n6316\n6317\n6318\n6319\n6320\n6321\n6322\n6323\n6324\n6325\n6326\n6327\n6328\n6329\n6330\n6331\n6332\n6333\n6334\n6335\n6336\n6337\n6338\n6339\n6340\n6341\n6342\n6343\n6344\n6345\n6346\n6347\n6348\n6349\n6350\n6351\n6352\n6353\n6354\n6355\n6356\n6357\n6358\n6359\n6360\n6361\n6362\n6363\n6364\n6365\n6366\n6367\n6368\n6369\n6370\n6371\n6372\n6373\n6374\n6375\n6376\n6377\n6378\n6379\n6380\n6381\n6382\n6383\n6384\n6385\n6386\n6387\n6388\n6389\n6390\n6391\n6392\n6393\n6394\n6395\n6396\n6397\n6398\n6399\n6400\n6401\n6402\n6403\n6404\n6405\n6406\n6407\n6408\n6409\n6410\n6411\n6412\n6413\n6414\n6415\n6416\n6417\n6418\n6419\n6420\n6421\n6422\n6423\n6424\n6425\n6426\n6427\n6428\n6429\n6430\n6431\n6432\n6433\n6434\n6435\n6436\n6437\n6438\n6439\n6440\n6441\n6442\n6443\n6444\n6445\n6446\n6447\n6448\n6449\n6450\n6451\n6452\n6453\n6454\n6455\n6456\n6457\n6458\n6459\n6460\n6461\n6462\n6463\n6464\n6465\n6466\n6467\n6468\n6469\n6470\n6471\n6472\n6473\n6474\n6475\n6476\n6477\n6478\n6479\n6480\n6481\n6482\n6483\n6484\n6485\n6486\n6487\n6488\n6489\n6490\n6491\n6492\n6493\n6494\n6495\n6496\n6497\n6498\n6499\n6500\n6501\n6502\n6503\n6504\n6505\n6506\n6507\n6508\n6509\n6510\n6511\n6512\n6513\n6514\n6515\n6516\n6517\n6518\n6519\n6520\n6521\n6522\n6523\n6524\n6525\n6526\n6527\n6528\n6529\n6530\n6531\n6532\n6533\n6534\n6535\n6536\n6537\n6538\n6539\n6540\n6541\n6542\n6543\n6544\n6545\n6546\n6547\n6548\n6549\n6550\n6551\n6552\n6553\n6554\n6555\n6556\n6557\n6558\n6559\n6560\n6561\n6562\n6563\n6564\n6565\n6566\n6567\n6568\n6569\n6570\n6571\n6572\n6573\n6574\n6575\n6576\n6577\n6578\n6579\n6580\n6581\n6582\n6583\n6584\n6585\n6586\n6587\n6588\n6589\n6590\n6591\n6592\n6593\n6594\n6595\n6596\n6597\n6598\n6599\n6600\n6601\n6602\n6603\n6604\n6605\n6606\n6607\n6608\n6609\n6610\n6611\n6612\n6613\n6614\n6615\n6616\n6617\n6618\n6619\n6620\n6621\n6622\n6623\n6624\n6625\n6626\n6627\n6628\n6629\n6630\n6631\n6632\n6633\n6634\n6635\n6636\n6637\n6638\n6639\n6640\n6641\n6642\n6643\n6644\n6645\n6646\n6647\n6648\n6649\n6650\n6651\n6652\n6653\n6654\n6655\n6656\n6657\n6658\n6659\n6660\n6661\n6662\n6663\n6664\n6665\n6666\n6667\n6668\n6669\n6670\n6671\n6672\n6673\n6674\n6675\n6676\n6677\n6678\n6679\n6680\n6681\n6682\n6683\n6684\n6685\n6686\n6687\n6688\n6689\n6690\n6691\n6692\n6693\n6694\n6695\n6696\n6697\n6698\n6699\n6700\n6701\n6702\n6703\n6704\n6705\n6706\n6707\n6708\n6709\n6710\n6711\n6712\n6713\n6714\n6715\n6716\n6717\n6718\n6719\n6720\n6721\n6722\n6723\n6724\n6725\n6726\n6727\n6728\n6729\n6730\n6731\n6732\n6733\n6734\n6735\n6736\n6737\n6738\n6739\n6740\n6741\n6742\n6743\n6744\n6745\n6746\n6747\n6748\n6749\n6750\n6751\n6752\n6753\n6754\n6755\n6756\n6757\n6758\n6759\n6760\n6761\n6762\n6763\n6764\n6765\n6766\n6767\n6768\n6769\n6770\n6771\n6772\n6773\n6774\n6775\n6776\n6777\n6778\n6779\n6780\n6781\n6782\n6783\n6784\n6785\n6786\n6787\n6788\n6789\n6790\n6791\n6792\n6793\n6794\n6795\n6796\n6797\n6798\n6799\n6800\n6801\n6802\n6803\n6804\n6805\n6806\n6807\n6808\n6809\n6810\n6811\n6812\n6813\n6814\n6815\n6816\n6817\n6818\n6819\n6820\n6821\n6822\n6823\n6824\n6825\n6826\n6827\n6828\n6829\n6830\n6831\n6832\n6833\n6834\n6835\n6836\n6837\n6838\n6839\n6840\n6841\n6842\n6843\n6844\n6845\n6846\n6847\n6848\n6849\n6850\n6851\n6852\n6853\n6854\n6855\n6856\n6857\n6858\n6859\n6860\n6861\n6862\n6863\n6864\n6865\n6866\n6867\n6868\n6869\n6870\n6871\n6872\n6873\n6874\n6875\n6876\n6877\n6878\n6879\n6880\n6881\n6882\n6883\n6884\n6885\n6886\n6887\n6888\n6889\n6890\n6891\n6892\n6893\n6894\n6895\n6896\n6897\n6898\n6899\n6900\n6901\n6902\n6903\n6904\n6905\n6906\n6907\n6908\n6909\n6910\n6911\n6912\n6913\n6914\n6915\n6916\n6917\n6918\n6919\n6920\n6921\n6922\n6923\n6924\n6925\n6926\n6927\n6928\n6929\n6930\n6931\n6932\n6933\n6934\n6935\n6936\n6937\n6938\n6939\n6940\n6941\n6942\n6943\n6944\n6945\n6946\n6947\n6948\n6949\n6950\n6951\n6952\n6953\n6954\n6955\n6956\n6957\n6958\n6959\n6960\n6961\n6962\n6963\n6964\n6965\n6966\n6967\n6968\n6969\n6970\n6971\n6972\n6973\n6974\n6975\n6976\n6977\n6978\n6979\n6980\n6981\n6982\n6983\n6984\n6985\n6986\n6987\n6988\n6989\n6990\n6991\n6992\n6993\n6994\n6995\n6996\n6997\n6998\n6999\n7000\n7001\n7002\n7003\n7004\n7005\n7006\n7007\n7008\n7009\n7010\n7011\n7012\n7013\n7014\n7015\n7016\n7017\n7018\n7019\n7020\n7021\n7022\n7023\n7024\n7025\n7026\n7027\n7028\n7029\n7030\n7031\n7032\n7033\n7034\n7035\n7036\n7037\n7038\n7039\n7040\n7041\n7042\n7043\n7044\n7045\n7046\n7047\n7048\n7049\n7050\n7051\n7052\n7053\n7054\n7055\n7056\n7057\n7058\n7059\n7060\n7061\n7062\n7063\n7064\n7065\n7066\n7067\n7068\n7069\n7070\n7071\n7072\n7073\n7074\n7075\n7076\n7077\n7078\n7079\n7080\n7081\n7082\n7083\n7084\n7085\n7086\n7087\n7088\n7089\n7090\n7091\n7092\n7093\n7094\n7095\n7096\n7097\n7098\n7099\n7100\n7101\n7102\n7103\n7104\n7105\n7106\n7107\n7108\n7109\n7110\n7111\n7112\n7113\n7114\n7115\n7116\n7117\n7118\n7119\n7120\n7121\n7122\n7123\n7124\n7125\n7126\n7127\n7128\n7129\n7130\n7131\n7132\n7133\n7134\n7135\n7136\n7137\n7138\n7139\n7140\n7141\n7142\n7143\n7144\n7145\n7146\n7147\n7148\n7149\n7150\n7151\n7152\n7153\n7154\n7155\n7156\n7157\n7158\n7159\n7160\n7161\n7162\n7163\n7164\n7165\n7166\n7167\n7168\n7169\n7170\n7171\n7172\n7173\n7174\n7175\n7176\n7177\n7178\n7179\n7180\n7181\n7182\n7183\n7184\n7185\n7186\n7187\n7188\n7189\n7190\n7191\n7192\n7193\n7194\n7195\n7196\n7197\n7198\n7199\n7200\n7201\n7202\n7203\n7204\n7205\n7206\n7207\n7208\n7209\n7210\n7211\n7212\n7213\n7214\n7215\n7216\n7217\n7218\n7219\n7220\n7221\n7222\n7223\n7224\n7225\n7226\n7227\n7228\n7229\n7230\n7231\n7232\n7233\n7234\n7235\n7236\n7237\n7238\n7239\n7240\n7241\n7242\n7243\n7244\n7245\n7246\n7247\n7248\n7249\n7250\n7251\n7252\n7253\n7254\n7255\n7256\n7257\n7258\n7259\n7260\n7261\n7262\n7263\n7264\n7265\n7266\n7267\n7268\n7269\n7270\n7271\n7272\n7273\n7274\n7275\n7276\n7277\n7278\n7279\n7280\n7281\n7282\n7283\n7284\n7285\n7286\n7287\n7288\n7289\n7290\n7291\n7292\n7293\n7294\n7295\n7296\n7297\n7298\n7299\n7300\n7301\n7302\n7303\n7304\n7305\n7306\n7307\n7308\n7309\n7310\n7311\n7312\n7313\n7314\n7315\n7316\n7317\n7318\n7319\n7320\n7321\n7322\n7323\n7324\n7325\n7326\n7327\n7328\n7329\n7330\n7331\n7332\n7333\n7334\n7335\n7336\n7337\n7338\n7339\n7340\n7341\n7342\n7343\n7344\n7345\n7346\n7347\n7348\n7349\n7350\n7351\n7352\n7353\n7354\n7355\n7356\n7357\n7358\n7359\n7360\n7361\n7362\n7363\n7364\n7365\n7366\n7367\n7368\n7369\n7370\n7371\n7372\n7373\n7374\n7375\n7376\n7377\n7378\n7379\n7380\n7381\n7382\n7383\n7384\n7385\n7386\n7387\n7388\n7389\n7390\n7391\n7392\n7393\n7394\n7395\n7396\n7397\n7398\n7399\n7400\n7401\n7402\n7403\n7404\n7405\n7406\n7407\n7408\n7409\n7410\n7411\n7412\n7413\n7414\n7415\n7416\n7417\n7418\n7419\n7420\n7421\n7422\n7423\n7424\n7425\n7426\n7427\n7428\n7429\n7430\n7431\n7432\n7433\n7434\n7435\n7436\n7437\n7438\n7439\n7440\n7441\n7442\n7443\n7444\n7445\n7446\n7447\n7448\n7449\n7450\n7451\n7452\n7453\n7454\n7455\n7456\n7457\n7458\n7459\n7460\n7461\n7462\n7463\n7464\n7465\n7466\n7467\n7468\n7469\n7470\n7471\n7472\n7473\n7474\n7475\n7476\n7477\n7478\n7479\n7480\n7481\n7482\n7483\n7484\n7485\n7486\n7487\n7488\n7489\n7490\n7491\n7492\n7493\n7494\n7495\n7496\n7497\n7498\n7499\n7500\n7501\n7502\n7503\n7504\n7505\n7506\n7507\n7508\n7509\n7510\n7511\n7512\n7513\n7514\n7515\n7516\n7517\n7518\n7519\n7520\n7521\n7522\n7523\n7524\n7525\n7526\n7527\n7528\n7529\n7530\n7531\n7532\n7533\n7534\n7535\n7536\n7537\n7538\n7539\n7540\n7541\n7542\n7543\n7544\n7545\n7546\n7547\n7548\n7549\n7550\n7551\n7552\n7553\n7554\n7555\n7556\n7557\n7558\n7559\n7560\n7561\n7562\n7563\n7564\n7565\n7566\n7567\n7568\n7569\n7570\n7571\n7572\n7573\n7574\n7575\n7576\n7577\n7578\n7579\n7580\n7581\n7582\n7583\n7584\n7585\n7586\n7587\n7588\n7589\n7590\n7591\n7592\n7593\n7594\n7595\n7596\n7597\n7598\n7599\n7600\n7601\n7602\n7603\n7604\n7605\n7606\n7607\n7608\n7609\n7610\n7611\n7612\n7613\n7614\n7615\n7616\n7617\n7618\n7619\n7620\n7621\n7622\n7623\n7624\n7625\n7626\n7627\n7628\n7629\n7630\n7631\n7632\n7633\n7634\n7635\n7636\n7637\n7638\n7639\n7640\n7641\n7642\n7643\n7644\n7645\n7646\n7647\n7648\n7649\n7650\n7651\n7652\n7653\n7654\n7655\n7656\n7657\n7658\n7659\n7660\n7661\n7662\n7663\n7664\n7665\n7666\n7667\n7668\n7669\n7670\n7671\n7672\n7673\n7674\n7675\n7676\n7677\n7678\n7679\n7680\n7681\n7682\n7683\n7684\n7685\n7686\n7687\n7688\n7689\n7690\n7691\n7692\n7693\n7694\n7695\n7696\n7697\n7698\n7699\n7700\n7701\n7702\n7703\n7704\n7705\n7706\n7707\n7708\n7709\n7710\n7711\n7712\n7713\n7714\n7715\n7716\n7717\n7718\n7719\n7720\n7721\n7722\n7723\n7724\n7725\n7726\n7727\n7728\n7729\n7730\n7731\n7732\n7733\n7734\n7735\n7736\n7737\n7738\n7739\n7740\n7741\n7742\n7743\n7744\n7745\n7746\n7747\n7748\n7749\n7750\n7751\n7752\n7753\n7754\n7755\n7756\n7757\n7758\n7759\n7760\n7761\n7762\n7763\n7764\n7765\n7766\n7767\n7768\n7769\n7770\n7771\n7772\n7773\n7774\n7775\n7776\n7777\n7778\n7779\n7780\n7781\n7782\n7783\n7784\n7785\n7786\n7787\n7788\n7789\n7790\n7791\n7792\n7793\n7794\n7795\n7796\n7797\n7798\n7799\n7800\n7801\n7802\n7803\n7804\n7805\n7806\n7807\n7808\n7809\n7810\n7811\n7812\n7813\n7814\n7815\n7816\n7817\n7818\n7819\n7820\n7821\n7822\n7823\n7824\n7825\n7826\n7827\n7828\n7829\n7830\n7831\n7832\n7833\n7834\n7835\n7836\n7837\n7838\n7839\n7840\n7841\n7842\n7843\n7844\n7845\n7846\n7847\n7848\n7849\n7850\n7851\n7852\n7853\n7854\n7855\n7856\n7857\n7858\n7859\n7860\n7861\n7862\n7863\n7864\n7865\n7866\n7867\n7868\n7869\n7870\n7871\n7872\n7873\n7874\n7875\n7876\n7877\n7878\n7879\n7880\n7881\n7882\n7883\n7884\n7885\n7886\n7887\n7888\n7889\n7890\n7891\n7892\n7893\n7894\n7895\n7896\n7897\n7898\n7899\n7900\n7901\n7902\n7903\n7904\n7905\n7906\n7907\n7908\n7909\n7910\n7911\n7912\n7913\n7914\n7915\n7916\n7917\n7918\n7919\n7920\n7921\n7922\n7923\n7924\n7925\n7926\n7927\n7928\n7929\n7930\n7931\n7932\n7933\n7934\n7935\n7936\n7937\n7938\n7939\n7940\n7941\n7942\n7943\n7944\n7945\n7946\n7947\n7948\n7949\n7950\n7951\n7952\n7953\n7954\n7955\n7956\n7957\n7958\n7959\n7960\n7961\n7962\n7963\n7964\n7965\n7966\n7967\n7968\n7969\n7970\n7971\n7972\n7973\n7974\n7975\n7976\n7977\n7978\n7979\n7980\n7981\n7982\n7983\n7984\n7985\n7986\n7987\n7988\n7989\n7990\n7991\n7992\n7993\n7994\n7995\n7996\n7997\n7998\n7999\n8000\n8001\n8002\n8003\n8004\n8005\n8006\n8007\n8008\n8009\n8010\n8011\n8012\n8013\n8014\n8015\n8016\n8017\n8018\n8019\n8020\n8021\n8022\n8023\n8024\n8025\n8026\n8027\n8028\n8029\n8030\n8031\n8032\n8033\n8034\n8035\n8036\n8037\n8038\n8039\n8040\n8041\n8042\n8043\n8044\n8045\n8046\n8047\n8048\n8049\n8050\n8051\n8052\n8053\n8054\n8055\n8056\n8057\n8058\n8059\n8060\n8061\n8062\n8063\n8064\n8065\n8066\n8067\n8068\n8069\n8070\n8071\n8072\n8073\n8074\n8075\n8076\n8077\n8078\n8079\n8080\n8081\n8082\n8083\n8084\n8085\n8086\n8087\n8088\n8089\n8090\n8091\n8092\n8093\n8094\n8095\n8096\n8097\n8098\n8099\n8100\n8101\n8102\n8103\n8104\n8105\n8106\n8107\n8108\n8109\n8110\n8111\n8112\n8113\n8114\n8115\n8116\n8117\n8118\n8119\n8120\n8121\n8122\n8123\n8124\n8125\n8126\n8127\n8128\n8129\n8130\n8131\n8132\n8133\n8134\n8135\n8136\n8137\n8138\n8139\n8140\n8141\n8142\n8143\n8144\n8145\n8146\n8147\n8148\n8149\n8150\n8151\n8152\n8153\n8154\n8155\n8156\n8157\n8158\n8159\n8160\n8161\n8162\n8163\n8164\n8165\n8166\n8167\n8168\n8169\n8170\n8171\n8172\n8173\n8174\n8175\n8176\n8177\n8178\n8179\n8180\n8181\n8182\n8183\n8184\n8185\n8186\n8187\n8188\n8189\n8190\n8191\n8192\n8193\n8194\n8195\n8196\n8197\n8198\n8199\n8200\n8201\n8202\n8203\n8204\n8205\n8206\n8207\n8208\n8209\n8210\n8211\n8212\n8213\n8214\n8215\n8216\n8217\n8218\n8219\n8220\n8221\n8222\n8223\n8224\n8225\n8226\n8227\n8228\n8229\n8230\n8231\n8232\n8233\n8234\n8235\n8236\n8237\n8238\n8239\n8240\n8241\n8242\n8243\n8244\n8245\n8246\n8247\n8248\n8249\n8250\n8251\n8252\n8253\n8254\n8255\n8256\n8257\n8258\n8259\n8260\n8261\n8262\n8263\n8264\n8265\n8266\n8267\n8268\n8269\n8270\n8271\n8272\n8273\n8274\n8275\n8276\n8277\n8278\n8279\n8280\n8281\n8282\n8283\n8284\n8285\n8286\n8287\n8288\n8289\n8290\n8291\n8292\n8293\n8294\n8295\n8296\n8297\n8298\n8299\n8300\n8301\n8302\n8303\n8304\n8305\n8306\n8307\n8308\n8309\n8310\n8311\n8312\n8313\n8314\n8315\n8316\n8317\n8318\n8319\n8320\n8321\n8322\n8323\n8324\n8325\n8326\n8327\n8328\n8329\n8330\n8331\n8332\n8333\n8334\n8335\n8336\n8337\n8338\n8339\n8340\n8341\n8342\n8343\n8344\n8345\n8346\n8347\n8348\n8349\n8350\n8351\n8352\n8353\n8354\n8355\n8356\n8357\n8358\n8359\n8360\n8361\n8362\n8363\n8364\n8365\n8366\n8367\n8368\n8369\n8370\n8371\n8372\n8373\n8374\n8375\n8376\n8377\n8378\n8379\n8380\n8381\n8382\n8383\n8384\n8385\n8386\n8387\n8388\n8389\n8390\n8391\n8392\n8393\n8394\n8395\n8396\n8397\n8398\n8399\n8400\n8401\n8402\n8403\n8404\n8405\n8406\n8407\n8408\n8409\n8410\n8411\n8412\n8413\n8414\n8415\n8416\n8417\n8418\n8419\n8420\n8421\n8422\n8423\n8424\n8425\n8426\n8427\n8428\n8429\n8430\n8431\n8432\n8433\n8434\n8435\n8436\n8437\n8438\n8439\n8440\n8441\n8442\n8443\n8444\n8445\n8446\n8447\n8448\n8449\n8450\n8451\n8452\n8453\n8454\n8455\n8456\n8457\n8458\n8459\n8460\n8461\n8462\n8463\n8464\n8465\n8466\n8467\n8468\n8469\n8470\n8471\n8472\n8473\n8474\n8475\n8476\n8477\n8478\n8479\n8480\n8481\n8482\n8483\n8484\n8485\n8486\n8487\n8488\n8489\n8490\n8491\n8492\n8493\n8494\n8495\n8496\n8497\n8498\n8499\n8500\n8501\n8502\n8503\n8504\n8505\n8506\n8507\n8508\n8509\n8510\n8511\n8512\n8513\n8514\n8515\n8516\n8517\n8518\n8519\n8520\n8521\n8522\n8523\n8524\n8525\n8526\n8527\n8528\n8529\n8530\n8531\n8532\n8533\n8534\n8535\n8536\n8537\n8538\n8539\n8540\n8541\n8542\n8543\n8544\n8545\n8546\n8547\n8548\n8549\n8550\n8551\n8552\n8553\n8554\n8555\n8556\n8557\n8558\n8559\n8560\n8561\n8562\n8563\n8564\n8565\n8566\n8567\n8568\n8569\n8570\n8571\n8572\n8573\n8574\n8575\n8576\n8577\n8578\n8579\n8580\n8581\n8582\n8583\n8584\n8585\n8586\n8587\n8588\n8589\n8590\n8591\n8592\n8593\n8594\n8595\n8596\n8597\n8598\n8599\n8600\n8601\n8602\n8603\n8604\n8605\n8606\n8607\n8608\n8609\n8610\n8611\n8612\n8613\n8614\n8615\n8616\n8617\n8618\n8619\n8620\n8621\n8622\n8623\n8624\n8625\n8626\n8627\n8628\n8629\n8630\n8631\n8632\n8633\n8634\n8635\n8636\n8637\n8638\n8639\n8640\n8641\n8642\n8643\n8644\n8645\n8646\n8647\n8648\n8649\n8650\n8651\n8652\n8653\n8654\n8655\n8656\n8657\n8658\n8659\n8660\n8661\n8662\n8663\n8664\n8665\n8666\n8667\n8668\n8669\n8670\n8671\n8672\n8673\n8674\n8675\n8676\n8677\n8678\n8679\n8680\n8681\n8682\n8683\n8684\n8685\n8686\n8687\n8688\n8689\n8690\n8691\n8692\n8693\n8694\n8695\n8696\n8697\n8698\n8699\n8700\n8701\n8702\n8703\n8704\n8705\n8706\n8707\n8708\n8709\n8710\n8711\n8712\n8713\n8714\n8715\n8716\n8717\n8718\n8719\n8720\n8721\n8722\n8723\n8724\n8725\n8726\n8727\n8728\n8729\n8730\n8731\n8732\n8733\n8734\n8735\n8736\n8737\n8738\n8739\n8740\n8741\n8742\n8743\n8744\n8745\n8746\n8747\n8748\n8749\n8750\n8751\n8752\n8753\n8754\n8755\n8756\n8757\n8758\n8759\n8760\n8761\n8762\n8763\n8764\n8765\n8766\n8767\n8768\n8769\n8770\n8771\n8772\n8773\n8774\n8775\n8776\n8777\n8778\n8779\n8780\n8781\n8782\n8783\n8784\n8785\n8786\n8787\n8788\n8789\n8790\n8791\n8792\n8793\n8794\n8795\n8796\n8797\n8798\n8799\n8800\n8801\n8802\n8803\n8804\n8805\n8806\n8807\n8808\n8809\n8810\n8811\n8812\n8813\n8814\n8815\n8816\n8817\n8818\n8819\n8820\n8821\n8822\n8823\n8824\n8825\n8826\n8827\n8828\n8829\n8830\n8831\n8832\n8833\n8834\n8835\n8836\n8837\n8838\n8839\n8840\n8841\n8842\n8843\n8844\n8845\n8846\n8847\n8848\n8849\n8850\n8851\n8852\n8853\n8854\n8855\n8856\n8857\n8858\n8859\n8860\n8861\n8862\n8863\n8864\n8865\n8866\n8867\n8868\n8869\n8870\n8871\n8872\n8873\n8874\n8875\n8876\n8877\n8878\n8879\n8880\n8881\n8882\n8883\n8884\n8885\n8886\n8887\n8888\n8889\n8890\n8891\n8892\n8893\n8894\n8895\n8896\n8897\n8898\n8899\n8900\n8901\n8902\n8903\n8904\n8905\n8906\n8907\n8908\n8909\n8910\n8911\n8912\n8913\n8914\n8915\n8916\n8917\n8918\n8919\n8920\n8921\n8922\n8923\n8924\n8925\n8926\n8927\n8928\n8929\n8930\n8931\n8932\n8933\n8934\n8935\n8936\n8937\n8938\n8939\n8940\n8941\n8942\n8943\n8944\n8945\n8946\n8947\n8948\n8949\n8950\n8951\n8952\n8953\n8954\n8955\n8956\n8957\n8958\n8959\n8960\n8961\n8962\n8963\n8964\n8965\n8966\n8967\n8968\n8969\n8970\n8971\n8972\n8973\n8974\n8975\n8976\n8977\n8978\n8979\n8980\n8981\n8982\n8983\n8984\n8985\n8986\n8987\n8988\n8989\n8990\n8991\n8992\n8993\n8994\n8995\n8996\n8997\n8998\n8999\n9000\n9001\n9002\n9003\n9004\n9005\n9006\n9007\n9008\n9009\n9010\n9011\n9012\n9013\n9014\n9015\n9016\n9017\n9018\n9019\n9020\n9021\n9022\n9023\n9024\n9025\n9026\n9027\n9028\n9029\n9030\n9031\n9032\n9033\n9034\n9035\n9036\n9037\n9038\n9039\n9040\n9041\n9042\n9043\n9044\n9045\n9046\n9047\n9048\n9049\n9050\n9051\n9052\n9053\n9054\n9055\n9056\n9057\n9058\n9059\n9060\n9061\n9062\n9063\n9064\n9065\n9066\n9067\n9068\n9069\n9070\n9071\n9072\n9073\n9074\n9075\n9076\n9077\n9078\n9079\n9080\n9081\n9082\n9083\n9084\n9085\n9086\n9087\n9088\n9089\n9090\n9091\n9092\n9093\n9094\n9095\n9096\n9097\n9098\n9099\n9100\n9101\n9102\n9103\n9104\n9105\n9106\n9107\n9108\n9109\n9110\n9111\n9112\n9113\n9114\n9115\n9116\n9117\n9118\n9119\n9120\n9121\n9122\n9123\n9124\n9125\n9126\n9127\n9128\n9129\n9130\n9131\n9132\n9133\n9134\n9135\n9136\n9137\n9138\n9139\n9140\n9141\n9142\n9143\n9144\n9145\n9146\n9147\n9148\n9149\n9150\n9151\n9152\n9153\n9154\n9155\n9156\n9157\n9158\n9159\n9160\n9161\n9162\n9163\n9164\n9165\n9166\n9167\n9168\n9169\n9170\n9171\n9172\n9173\n9174\n9175\n9176\n9177\n9178\n9179\n9180\n9181\n9182\n9183\n9184\n9185\n9186\n9187\n9188\n9189\n9190\n9191\n9192\n9193\n9194\n9195\n9196\n9197\n9198\n9199\n9200\n9201\n9202\n9203\n9204\n9206\n9207\n9208\n9209\n9210\n9211\n9212\n9213\n9214\n9215\n9216\n9217\n9218\n9219\n9220\n9221\n9222\n9223\n9224\n9225\n9226\n9227\n9228\n9229\n9230\n9231\n9232\n9233\n9234\n9235\n9236\n9237\n9238\n9239\n9240\n9241\n9242\n9243\n9244\n9245\n9246\n9247\n9248\n9249\n9250\n9251\n9252\n9253\n9254\n9255\n9256\n9257\n9258\n9259\n9260\n9261\n9262\n9263\n9264\n9265\n9266\n9267\n9268\n9269\n9270\n9271\n9272\n9273\n9274\n9275\n9276\n9277\n9278\n9279\n9280\n9281\n9282\n9283\n9284\n9285\n9286\n9287\n9288\n9289\n9290\n9291\n9292\n9293\n9294\n9295\n9296\n9297\n9298\n9299\n9300\n9301\n9302\n9303\n9304\n9305\n9306\n9307\n9308\n9309\n9310\n9311\n9312\n9313\n9314\n9315\n9316\n9317\n9318\n9319\n9320\n9321\n9322\n9323\n9324\n9325\n9326\n9327\n9328\n9329\n9330\n9331\n9332\n9333\n9334\n9335\n9336\n9337\n9338\n9339\n9340\n9341\n9342\n9343\n9344\n9345\n9346\n9347\n9348\n9349\n9350\n9351\n9352\n9353\n9354\n9355\n9356\n9357\n9358\n9359\n9360\n9361\n9362\n9363\n9364\n9365\n9366\n9367\n9368\n9369\n9370\n9371\n9372\n9373\n9374\n9375\n9376\n9377\n9378\n9379\n9380\n9381\n9382\n9383\n9384\n9385\n9386\n9387\n9388\n9389\n9390\n9391\n9392\n9393\n9394\n9395\n9396\n9397\n9398\n9399\n9400\n9401\n9402\n9403\n9404\n9405\n9406\n9407\n9408\n9409\n9410\n9411\n9412\n9413\n9414\n9415\n9416\n9417\n9418\n9419\n9420\n9421\n9422\n9423\n9424\n9425\n9426\n9427\n9428\n9429\n9430\n9431\n9432\n9433\n9434\n9435\n9436\n9437\n9438\n9439\n9440\n9441\n9442\n9443\n9444\n9445\n9446\n9447\n9448\n9449\n9450\n9451\n9452\n9453\n9454\n9455\n9456\n9457\n9458\n9459\n9460\n9461\n9462\n9463\n9464\n9465\n9466\n9467\n9468\n9469\n9470\n9471\n9472\n9473\n9474\n9475\n9476\n9477\n9478\n9479\n9480\n9481\n9482\n9483\n9484\n9485\n9486\n9487\n9488\n9489\n9490\n9491\n9492\n9493\n9494\n9495\n9496\n9497\n9498\n9499\n9500\n9501\n9502\n9503\n9504\n9505\n9506\n9507\n9508\n9509\n9510\n9511\n9512\n9513\n9514\n9515\n9516\n9517\n9518\n9519\n9520\n9521\n9522\n9523\n9524\n9525\n9526\n9527\n9528\n9529\n9530\n9531\n9532\n9533\n9534\n9535\n9536\n9537\n9538\n9539\n9540\n9541\n9542\n9543\n9544\n9545\n9546\n9547\n9548\n9549\n9550\n9551\n9552\n9553\n9554\n9555\n9556\n9557\n9558\n9559\n9560\n9561\n9562\n9563\n9564\n9565\n9566\n9567\n9568\n9569\n9570\n9571\n9572\n9573\n9574\n9575\n9576\n9577\n9578\n9579\n9580\n9581\n9582\n9583\n9584\n9585\n9586\n9587\n9588\n9589\n9590\n9591\n9592\n9593\n9594\n9595\n9596\n9597\n9598\n9599\n9600\n9601\n9602\n9603\n9604\n9605\n9606\n9607\n9608\n9609\n9610\n9611\n9612\n9613\n9614\n9615\n9616\n9617\n9618\n9619\n9620\n9621\n9622\n9623\n9624\n9625\n9626\n9627\n9628\n9629\n9630\n9631\n9632\n9633\n9634\n9635\n9636\n9637\n9638\n9639\n9640\n9641\n9642\n9643\n9644\n9645\n9646\n9647\n9648\n9649\n9650\n9651\n9652\n9653\n9654\n9655\n9656\n9657\n9658\n9659\n9660\n9661\n9662\n9663\n9664\n9665\n9666\n9667\n9668\n9669\n9670\n9671\n9672\n9673\n9674\n9675\n9676\n9677\n9678\n9679\n9680\n9681\n9682\n9683\n9684\n9685\n9686\n9687\n9688\n9689\n9690\n9691\n9692\n9693\n9694\n9695\n9696\n9697\n9698\n9699\n9700\n9701\n9702\n9703\n9704\n9705\n9706\n9707\n9708\n9709\n9710\n9711\n9712\n9713\n9714\n9715\n9716\n9717\n9718\n9719\n9720\n9721\n9722\n9723\n9724\n9725\n9726\n9727\n9728\n9729\n9730\n9731\n9732\n9733\n9734\n9735\n9736\n9737\n9738\n9739\n9740\n9741\n9742\n9743\n9744\n9745\n9746\n9747\n9748\n9749\n9750\n9751\n9752\n9753\n9754\n9755\n9756\n9757\n9758\n9759\n9760\n9761\n9762\n9763\n9764\n9765\n9766\n9767\n9768\n9769\n9770\n9771\n9772\n9773\n9774\n9775\n9776\n9777\n9778\n9779\n9780\n9781\n9782\n9783\n9784\n9785\n9786\n9787\n9788\n9789\n9790\n9791\n9792\n9793\n9794\n9795\n9796\n9797\n9798\n9799\n9800\n9801\n9802\n9803\n9804\n9805\n9806\n9807\n9808\n9809\n9810\n9811\n9812\n9813\n9814\n9815\n9816\n9817\n9818\n9819\n9820\n9821\n9822\n9823\n9824\n9825\n9826\n9827\n9828\n9829\n9830\n9831\n9832\n9833\n9834\n9835\n9836\n9837\n9838\n9839\n9840\n9841\n9842\n9843\n9844\n9845\n9846\n9847\n9848\n9849\n9850\n9851\n9852\n9853\n9854\n9855\n9856\n9857\n9858\n9859\n9860\n9861\n9862\n9863\n9864\n9865\n9866\n9867\n9868\n9869\n9870\n9871\n9872\n9873\n9874\n9875\n9876\n9877\n9878\n9879\n9880\n9881\n9882\n9883\n9884\n9885\n9886\n9887\n9888\n9889\n9890\n9891\n9892\n9893\n9894\n9895\n9896\n9897\n9898\n9899\n9900\n9901\n9902\n9903\n9904\n9905\n9906\n9907\n9908\n9909\n9910\n9911\n9912\n9913\n9914\n9915\n9916\n9917\n9918\n9919\n9920\n9921\n9922\n9923\n9924\n9925\n9926\n9927\n9928\n9929\n9930\n9931\n9932\n9933\n9934\n9935\n9936\n9937\n9938\n9939\n9940\n9941\n9942\n9943\n9944\n9945\n9946\n9947\n9948\n9949\n9950\n9951\n9952\n9953\n9954\n9955\n9956\n9957\n9958\n9959\n9960\n9961\n9962\n9963\n9964\n9965\n9966\n9967\n9968\n9969\n9970\n9971\n9972\n9973\n9974\n9975\n9976\n9977\n9978\n9979\n9980\n9981\n9982\n9983\n9984\n9985\n9986\n9987\n9988\n9989\n9990\n9991\n9992\n9993\n9994\n9995\n9996\n9997\n9998\n9999\n10000\n10001\n10002\n10003\n10004\n10005\n10006\n10007\n10008\n10009\n10010\n10011\n10012\n10013\n10014\n10015\n10016\n10017\n10018\n10019\n10020\n10021\n10022\n10023\n10024\n10025\n10026\n10027\n10028\n10029\n10030\n10031\n10032\n10033\n10034\n10035\n10036\n10037\n10038\n10039\n10040\n10041\n10042\n10043\n10044\n10045\n10046\n10047\n10048\n10049\n10050\n10051\n10052\n10053\n10054\n10055\n10056\n10057\n10058\n10059\n10060\n10061\n10062\n10063\n10064\n10065\n10066\n10067\n10068\n10069\n10070\n10071\n10072\n10073\n10074\n10075\n10076\n10077\n10078\n10079\n10080\n10081\n10082\n10083\n10084\n10085\n10086\n10087\n10088\n10089\n10090\n10091\n10092\n10093\n10094\n10095\n10096\n10097\n10098\n10099\n10100\n10101\n10102\n10103\n10104\n10105\n10106\n10107\n10108\n10109\n10110\n10111\n10112\n10113\n10114\n10115\n10116\n10117\n10118\n10119\n10120\n10121\n10122\n10123\n10124\n10125\n10126\n10127\n10128\n10129\n10130\n10131\n10132\n10133\n10134\n10135\n10136\n10137\n10138\n10139\n10140\n10141\n10142\n10143\n10144\n10145\n10146\n10147\n10148\n10149\n10150\n10151\n10152\n10153\n10154\n10155\n10156\n10157\n10158\n10159\n10160\n10161\n10162\n10163\n10164\n10165\n10166\n10167\n10168\n10169\n10170\n10171\n10172\n10173\n10174\n10175\n10176\n10177\n10178\n10179\n10180\n10181\n10182\n10183\n10184\n10185\n10186\n10187\n10188\n10189\n10190\n10191\n10192\n10193\n10194\n10195\n10196\n10197\n10198\n10199\n10200\n10201\n10202\n10203\n10204\n10205\n10206\n10207\n10208\n10209\n10210\n10211\n10212\n10213\n10214\n10215\n10216\n10217\n10218\n10219\n10220\n10221\n10222\n10223\n10224\n10225\n10226\n10227\n10228\n10229\n10230\n10231\n10232\n10233\n10234\n10235\n10236\n10237\n10238\n10239\n10240\n10241\n10242\n10243\n10244\n10245\n10246\n10247\n10248\n10249\n10250\n10251\n10252\n10253\n10254\n10255\n10256\n10257\n10258\n10259\n10260\n10261\n10262\n10263\n10264\n10265\n10266\n10267\n10268\n10269\n10270\n10271\n10272\n10273\n10274\n10275\n10276\n10277\n10278\n10279\n10280\n10281\n10282\n10283\n10284\n10285\n10286\n10287\n10288\n10289\n10290\n10291\n10292\n10293\n10294\n10295\n10296\n10297\n10298\n10299\n10300\n10301\n10302\n10303\n10304\n10305\n10306\n10307\n10308\n10309\n10310\n10311\n10312\n10313\n10314\n10315\n10316\n10317\n10318\n10319\n10320\n10321\n10322\n10323\n10324\n10325\n10326\n10327\n10328\n10329\n10330\n10331\n10332\n10333\n10334\n10335\n10336\n10337\n10338\n10339\n10340\n10341\n10342\n10343\n10344\n10345\n10346\n10347\n10348\n10349\n10350\n10351\n10352\n10353\n10354\n10355\n10356\n10357\n10358\n10359\n10360\n10361\n10362\n10363\n10364\n10365\n10366\n10367\n10368\n10369\n10370\n10371\n10372\n10373\n10374\n10375\n10376\n10377\n10378\n10379\n10380\n10381\n10382\n10383\n10384\n10385\n10386\n10387\n10388\n10389\n10390\n10391\n10392\n10393\n10394\n10395\n10396\n10397\n10398\n10399\n10400\n10401\n10402\n10403\n10404\n10405\n10406\n10407\n10408\n10409\n10410\n10411\n10412\n10413\n10414\n10415\n10416\n10417\n10418\n10419\n10420\n10421\n10422\n10423\n10424\n10425\n10426\n10427\n10428\n10429\n10430\n10431\n10432\n10433\n10434\n10435\n10436\n10437\n10438\n10439\n10440\n10441\n10442\n10443\n10444\n10445\n10446\n10447\n10448\n10449\n10450\n10451\n10452\n10453\n10454\n10455\n10456\n10457\n10458\n10459\n10460\n10461\n10462\n10463\n10464\n10465\n10466\n10467\n10468\n10469\n10470\n10471\n10472\n10473\n10474\n10475\n10476\n10477\n10478\n10479\n10480\n10481\n10482\n10483\n10484\n10485\n10486\n10487\n10488\n10489\n10490\n10491\n10492\n10493\n10494\n10495\n10496\n10497\n10498\n10499\n10500\n10501\n10502\n10503\n10504\n10505\n10506\n10507\n10508\n10509\n10510\n10511\n10512\n10513\n10514\n10515\n10516\n10517\n10518\n10519\n10520\n10521\n10522\n10523\n10524\n10525\n10526\n10527\n10528\n10529\n10530\n10531\n10532\n10533\n10534\n10535\n10536\n10537\n10538\n10539\n10540\n10541\n10542\n10543\n10544\n10545\n10546\n10547\n10548\n10549\n10550\n10551\n10552\n10553\n10554\n10555\n10556\n10557\n10558\n10559\n10560\n10561\n10562\n10563\n10564\n10565\n10566\n10567\n10568\n10569\n10570\n10571\n10572\n10573\n10574\n10575\n10576\n10577\n10578\n10579\n10580\n10581\n10582\n10583\n10584\n10585\n10586\n10587\n10588\n10589\n10590\n10591\n10592\n10593\n10594\n10595\n10596\n10597\n10598\n10599\n10600\n10601\n10602\n10603\n10604\n10605\n10606\n10607\n10608\n10609\n10610\n10611\n10612\n10613\n10614\n10615\n10616\n10617\n10618\n10619\n10620\n10621\n10622\n10623\n10624\n10625\n10626\n10627\n10628\n10629\n10630\n10631\n10632\n10633\n10634\n10635\n10636\n10637\n10638\n10639\n10640\n10641\n10642\n10643\n10644\n10645\n10646\n10647\n10648\n10649\n10650\n10651\n10652\n10653\n10654\n10655\n10656\n10657\n10658\n10659\n10660\n10661\n10662\n10663\n10664\n10665\n10666\n10667\n10668\n10669\n10670\n10671\n10672\n10673\n10674\n10675\n10676\n10677\n10678\n10679\n10680\n10681\n10682\n10683\n10684\n10685\n10686\n10687\n10688\n10689\n10690\n10691\n10692\n10693\n10694\n10695\n10696\n10697\n10698\n10699\n10700\n10701\n10702\n10703\n10704\n10705\n10706\n10707\n10708\n10709\n10710\n10711\n10712\n10713\n10714\n10715\n10716\n10717\n10718\n10719\n10720\n10721\n10722\n10723\n10724\n10725\n10726\n10727\n10728\n10729\n10730\n10731\n10732\n10733\n10734\n10735\n10736\n10737\n10738\n10739\n10740\n10741\n10742\n10743\n10744\n10745\n10746\n10747\n10748\n10749\n10750\n10751\n10752\n10753\n10754\n10755\n10756\n10757\n10758\n10759\n10760\n10761\n10762\n10763\n10764\n10765\n10766\n10767\n10768\n10769\n10770\n10771\n10772\n10773\n10774\n10775\n10776\n10777\n10778\n10779\n10780\n10781\n10782\n10783\n10784\n10785\n10786\n10787\n10788\n10789\n10790\n10791\n10792\n10793\n10794\n10795\n10796\n10797\n10798\n10799\n10800\n10801\n10802\n10803\n10804\n10805\n10806\n10807\n10808\n10809\n10810\n10811\n10812\n10813\n10814\n10815\n10816\n10817\n10818\n10819\n10820\n10821\n10822\n10823\n10824\n10825\n10826\n10827\n10828\n10829\n10830\n10831\n10832\n10833\n10834\n10835\n10836\n10837\n10838\n10839\n10840\n10841\n10842\n10843\n10844\n10845\n10846\n10847\n10848\n10849\n10850\n10851\n10852\n10853\n10854\n10855\n10856\n10857\n10858\n10859\n10860\n10861\n10862\n10863\n10864\n10865\n10866\n10867\n10868\n10869\n10870\n10871\n10872\n10873\n10874\n10875\n10876\n10877\n10878\n10879\n10880\n10881\n10882\n10883\n10884\n10885\n10886\n10887\n10888\n10889\n10890\n10891\n10892\n10893\n10894\n10895\n10896\n10897\n10898\n10899\n10900\n10901\n10902\n10903\n10904\n10905\n10906\n10907\n10908\n10909\n10910\n10911\n10912\n10913\n10914\n10915\n10916\n10917\n10918\n10919\n10920\n10921\n10922\n10923\n10924\n10925\n10926\n10927\n10928\n10929\n10930\n10931\n10932\n10933\n10934\n10935\n10936\n10937\n10938\n10939\n10940\n10941\n10942\n10943\n10944\n10945\n10946\n10947\n10948\n10949\n10950\n10951\n10952\n10953\n10954\n10955\n10956\n10957\n10958\n10959\n10960\n10961\n10962\n10963\n10964\n10965\n10966\n10967\n10968\n10969\n10970\n10971\n10972\n10973\n10974\n10975\n10976\n10977\n10978\n10979\n10980\n10981\n10982\n10983\n10984\n10985\n10986\n10987\n10988\n10989\n10990\n10991\n10992\n10993\n10994\n10995\n10996\n10997\n10998\n10999\n11000\n11001\n11002\n11003\n11004\n11005\n11006\n11007\n11008\n11009\n11010\n11011\n11012\n11013\n11014\n11015\n11016\n11017\n11018\n11019\n11020\n11021\n11022\n11023\n11024\n11025\n11026\n11027\n11028\n11029\n11030\n11031\n11032\n11033\n11034\n11035\n11036\n11037\n11038\n11039\n11040\n11041\n11042\n11043\n11044\n11045\n11046\n11047\n11048\n11049\n11050\n11051\n11052\n11053\n11054\n11055\n11056\n11057\n11058\n11059\n11060\n11061\n11062\n11063\n11064\n11065\n11066\n11067\n11068\n11069\n11070\n11071\n11072\n11073\n11074\n11075\n11076\n11077\n11078\n11079\n11080\n11081\n11082\n11083\n11084\n11085\n11086\n11087\n11088\n11089\n11090\n11091\n11092\n11093\n11094\n11095\n11096\n11097\n11098\n11099\n11100\n11101\n11102\n11103\n11104\n11105\n11106\n11107\n11108\n11109\n11110\n11111\n11112\n11113\n11114\n11115\n11116\n11117\n11118\n11119\n11120\n11121\n11122\n11123\n11124\n11125\n11126\n11127\n11128\n11129\n11130\n11131\n11132\n11133\n11134\n11135\n11136\n11137\n11138\n11139\n11140\n11141\n11142\n11143\n11144\n11145\n11146\n11147\n11148\n11149\n11150\n11151\n11152\n11153\n11154\n11155\n11156\n11157\n11158\n11159\n11160\n11161\n11162\n11163\n11164\n11165\n11166\n11167\n11168\n11169\n11170\n11171\n11172\n11173\n11174\n11175\n11176\n11177\n11178\n11179\n11180\n11181\n11182\n11183\n11184\n11185\n11186\n11187\n11188\n11189\n11190\n11191\n11192\n11193\n11194\n11195\n11196\n11197\n11198\n11199\n11200\n11201\n11202\n11203\n11204\n11205\n11206\n11207\n11208\n11209\n11210\n11211\n11212\n11213\n11214\n11215\n11216\n11217\n11218\n11219\n11220\n11221\n11222\n11223\n11224\n11225\n11226\n11227\n11228\n11229\n11230\n11231\n11232\n11233\n11234\n11235\n11236\n11237\n11238\n11239\n11240\n11241\n11242\n11243\n11244\n11245\n11246\n11247\n11248\n11249\n11250\n11251\n11252\n11253\n11254\n11255\n11256\n11257\n11258\n11259\n11260\n11261\n11262\n11263\n11264\n11265\n11266\n11267\n11268\n11269\n11270\n11271\n11272\n11273\n11274\n11275\n11276\n11277\n11278\n11279\n11280\n11281\n11282\n11283\n11284\n11285\n11286\n11287\n11288\n11289\n11290\n11291\n11292\n11293\n11294\n11295\n11296\n11297\n11298\n11299\n11300\n11301\n11302\n11303\n11304\n11305\n11306\n11307\n11308\n11309\n11310\n11311\n11312\n11313\n11314\n11315\n11316\n11317\n11318\n11319\n11320\n11321\n11322\n11323\n11324\n11325\n11326\n11327\n11328\n11329\n11330\n11331\n11332\n11333\n11334\n11335\n11336\n11337\n11338\n11339\n11340\n11341\n11342\n11343\n11344\n11345\n11346\n11347\n11348\n11349\n11350\n11351\n11352\n11353\n11354\n11355\n11356\n11357\n11358\n11359\n11360\n11361\n11362\n11363\n11364\n11365\n11366\n11367\n11368\n11369\n11370\n11371\n11372\n11373\n11374\n11375\n11376\n11377\n11378\n11379\n11380\n11381\n11382\n11383\n11384\n11385\n11386\n11387\n11388\n11389\n11390\n11391\n11392\n11393\n11394\n11395\n11396\n11397\n11398\n11399\n11400\n11401\n11402\n11403\n11404\n11405\n11406\n11407\n11408\n11409\n11410\n11411\n11412\n11413\n11414\n11415\n11416\n11417\n11418\n11419\n11420\n11421\n11422\n11423\n11424\n11425\n11426\n11427\n11428\n11429\n11430\n11431\n11432\n11433\n11434\n11435\n11436\n11437\n11438\n11439\n11440\n11441\n11442\n11443\n11444\n11445\n11446\n11447\n11448\n11449\n11450\n11451\n11452\n11453\n11454\n11455\n11456\n11457\n11458\n11459\n11460\n11461\n11462\n11463\n11464\n11465\n11466\n11467\n11468\n11469\n11470\n11471\n11472\n11473\n11474\n11475\n11476\n11477\n11478\n11479\n11480\n11481\n11482\n11483\n11484\n11485\n11486\n11487\n11488\n11489\n11490\n11491\n11492\n11493\n11494\n11495\n11496\n11497\n11498\n11499\n11500\n11501\n11502\n11503\n11504\n11505\n11506\n11507\n11508\n11509\n11510\n11511\n11512\n11513\n11514\n11515\n11516\n11517\n11518\n11519\n11520\n11521\n11522\n11523\n11524\n11525\n11526\n11527\n11528\n11529\n11530\n11531\n11532\n11533\n11534\n11535\n11536\n11537\n11538\n11539\n11540\n11541\n11542\n11543\n11544\n11545\n11546\n11547\n11548\n11549\n11550\n11551\n11552\n11553\n11554\n11555\n11556\n11557\n11558\n11559\n11560\n11561\n11562\n11563\n11564\n11565\n11566\n11567\n11568\n11569\n11570\n11571\n11572\n11573\n11574\n11575\n11576\n11577\n11578\n11579\n11580\n11581\n11582\n11583\n11584\n11585\n11586\n11587\n11588\n11589\n11590\n11591\n11592\n11593\n11594\n11595\n11596\n11597\n11598\n11599\n11600\n11601\n11602\n11603\n11604\n11605\n11606\n11607\n11608\n11609\n11610\n11611\n11612\n11613\n11614\n11615\n11616\n11617\n11618\n11619\n11620\n11621\n11622\n11623\n11624\n11625\n11626\n11627\n11628\n11629\n11630\n11631\n11632\n11633\n11634\n11635\n11636\n11637\n11638\n11639\n11640\n11641\n11642\n11643\n11644\n11645\n11646\n11647\n11648\n11649\n11650\n11651\n11652\n11653\n11654\n11655\n11656\n11657\n11658\n11659\n11660\n11661\n11662\n11663\n11664\n11665\n11666\n11667\n11668\n11669\n11670\n11671\n11672\n11673\n11674\n11675\n11676\n11677\n11678\n11679\n11680\n11681\n11682\n11683\n11684\n11685\n11686\n11687\n11688\n11689\n11690\n11691\n11692\n11693\n11694\n11695\n11696\n11697\n11698\n11699\n11700\n11701\n11702\n11703\n11704\n11705\n11706\n11707\n11708\n11709\n11710\n11711\n11712\n11713\n11714\n11715\n11716\n11717\n11718\n11719\n11720\n11721\n11722\n11723\n11724\n11725\n11726\n11727\n11728\n11729\n11730\n11731\n11732\n11733\n11734\n11735\n11736\n11737\n11738\n11739\n11740\n11741\n11742\n11743\n11744\n11745\n11746\n11747\n11748\n11749\n11750\n11751\n11752\n11753\n11754\n11755\n11756\n11757\n11758\n11759\n11760\n11761\n11762\n11763\n11764\n11765\n11766\n11767\n11768\n11769\n11770\n11771\n11772\n11773\n11774\n11775\n11776\n11777\n11778\n11779\n11780\n11781\n11782\n11783\n11784\n11785\n11786\n11787\n11788\n11789\n11790\n11791\n11792\n11793\n11794\n11795\n11796\n11797\n11798\n11799\n11800\n11801\n11802\n11803\n11804\n11805\n11806\n11807\n11808\n11809\n11810\n11811\n11812\n11813\n11814\n11815\n11816\n11817\n11818\n11819\n11820\n11821\n11822\n11823\n11824\n11825\n11826\n11827\n11828\n11829\n11830\n11831\n11832\n11833\n11834\n11835\n11836\n11837\n11838\n11839\n11840\n11841\n11842\n11843\n11844\n11845\n11846\n11847\n11848\n11849\n11850\n11851\n11852\n11853\n11854\n11855\n11856\n11857\n11858\n11859\n11860\n11861\n11862\n11863\n11864\n11865\n11866\n11867\n11868\n11869\n11870\n11871\n11872\n11873\n11874\n11875\n11876\n11877\n11878\n11879\n11880\n11881\n11882\n11883\n11884\n11885\n11886\n11887\n11888\n11889\n11890\n11891\n11892\n11893\n11894\n11895\n11896\n11897\n11898\n11899\n11900\n11901\n11902\n11903\n11904\n11905\n11906\n11907\n11908\n11909\n11910\n11911\n11912\n11913\n11914\n11915\n11916\n11917\n11918\n11919\n11920\n11921\n11922\n11923\n11924\n11925\n11926\n11927\n11928\n11929\n11930\n11931\n11932\n11933\n11934\n11935\n11936\n11937\n11938\n11939\n11940\n11941\n11942\n11943\n11944\n11945\n11946\n11947\n11948\n11949\n11950\n11951\n11952\n11953\n11954\n11955\n11956\n11957\n11958\n11959\n11960\n11961\n11962\n11963\n11964\n11965\n11966\n11967\n11968\n11969\n11970\n11971\n11972\n11973\n11974\n11975\n11976\n11977\n11978\n11979\n11980\n11981\n11982\n11983\n11984\n11985\n11986\n11987\n11988\n11989\n11990\n11991\n11992\n11993\n11994\n11995\n11996\n11997\n11998\n11999\n12000\n12001\n12002\n12003\n12004\n12005\n12006\n12007\n12008\n12009\n12010\n12011\n12012\n12013\n12014\n12015\n12016\n12017\n12018\n12019\n12020\n12021\n12022\n12023\n12024\n12025\n12026\n12027\n12028\n12029\n12030\n12031\n12032\n12033\n12034\n12035\n12036\n12037\n12038\n12039\n12040\n12041\n12042\n12043\n12044\n12045\n12046\n12047\n12048\n12049\n12050\n12051\n12052\n12053\n12054\n12055\n12056\n12057\n12058\n12059\n12060\n12061\n12062\n12063\n12064\n12065\n12066\n12067\n12068\n12069\n12070\n12071\n12072\n12073\n12074\n12075\n12076\n12077\n12078\n12079\n12080\n12081\n12082\n12083\n12084\n12085\n12086\n12087\n12088\n12089\n12090\n12091\n12092\n12093\n12094\n12095\n12096\n12097\n12098\n12099\n12100\n12101\n12102\n12103\n12104\n12105\n12106\n12107\n12108\n12109\n12110\n12111\n12112\n12113\n12114\n12115\n12116\n12117\n12118\n12119\n12120\n12121\n12122\n12123\n12124\n12125\n12126\n12127\n12128\n12129\n12130\n12131\n12132\n12133\n12134\n12135\n12136\n12137\n12138\n12139\n12140\n12141\n12142\n12143\n12144\n12145\n12146\n12147\n12148\n12149\n12150\n12151\n12152\n12153\n12154\n12155\n12156\n12157\n12158\n12159\n12160\n12161\n12162\n12163\n12164\n12165\n12166\n12167\n12168\n12169\n12170\n12171\n12172\n12173\n12174\n12175\n12176\n12177\n12178\n12179\n12180\n12181\n12182\n12183\n12184\n12185\n12186\n12187\n12188\n12189\n12190\n12191\n12192\n12193\n12194\n12195\n12196\n12197\n12198\n12199\n12200\n12201\n12202\n12203\n12204\n12205\n12206\n12207\n12208\n12209\n12210\n12211\n12212\n12213\n12214\n12215\n12216\n12217\n12218\n12219\n12220\n12221\n12222\n12223\n12224\n12225\n12226\n12227\n12228\n12229\n12230\n12231\n12232\n12233\n12234\n12235\n12236\n12237\n12238\n12239\n12240\n12241\n12242\n12243\n12244\n12245\n12246\n12247\n12248\n12249\n12250\n12251\n12252\n12253\n12254\n12255\n12256\n12257\n12258\n12259\n12260\n12261\n12262\n12263\n12264\n12265\n12266\n12267\n12268\n12269\n12270\n12271\n12272\n12273\n12274\n12275\n12276\n12277\n12278\n12279\n12280\n12281\n12282\n12283\n12284\n12285\n12286\n12287\n12288\n12289\n12290\n12291\n12292\n12293\n12294\n12295\n12296\n12297\n12298\n12299\n12300\n12301\n12302\n12303\n12304\n12305\n12306\n12307\n12308\n12309\n12310\n12311\n12312\n12313\n12314\n12315\n12316\n12317\n12318\n12319\n12320\n12321\n12322\n12323\n12324\n12325\n12326\n12327\n12328\n12329\n12330\n12331\n12332\n12333\n12334\n12335\n12336\n12337\n12338\n12339\n12340\n12341\n12342\n12343\n12344\n12345\n12346\n12347\n12348\n12349\n12350\n12351\n12352\n12353\n12354\n12355\n12356\n12357\n12358\n12359\n12360\n12361\n12362\n12363\n12364\n12365\n12366\n12367\n12368\n12369\n12370\n12371\n12372\n12373\n12374\n12375\n12376\n12377\n12378\n12379\n12380\n12381\n12382\n12383\n12384\n12385\n12386\n12387\n12388\n12389\n12390\n12391\n12392\n12393\n12394\n12395\n12396\n12397\n12398\n12399\n12400\n12401\n12402\n12403\n12404\n12405\n12406\n12407\n12408\n12409\n12410\n12411\n12412\n12413\n12414\n12415\n12416\n12417\n12418\n12419\n12420\n12421\n12422\n12423\n12424\n12425\n12426\n12427\n12428\n12429\n12430\n12431\n12432\n12433\n12434\n12435\n12436\n12437\n12438\n12439\n12440\n12441\n12442\n12443\n12444\n12445\n12446\n12447\n12448\n12449\n12450\n12451\n12452\n12453\n12454\n12455\n12456\n12457\n12458\n12459\n12460\n12461\n12462\n12463\n12464\n12465\n12466\n12467\n12468\n12469\n12470\n12471\n12472\n12473\n12474\n12475\n12476\n12477\n12478\n12479\n12480\n12481\n12482\n12483\n12484\n12485\n12486\n12487\n12488\n12489\n12490\n12491\n12492\n12493\n12494\n12495\n12496\n12497\n12498\n12499\n12500\n12501\n12502\n12503\n12504\n12505\n12506\n12507\n12508\n12509\n12510\n12511\n12512\n12513\n12514\n12515\n12516\n12517\n12518\n12519\n12520\n12521\n12522\n12523\n12524\n12525\n12526\n12527\n12528\n12529\n12530\n12531\n12532\n12533\n12534\n12535\n12536\n12537\n12538\n12539\n12540\n12541\n12542\n12543\n12544\n12545\n12546\n12547\n12548\n12549\n12550\n12551\n12552\n12553\n12554\n12555\n12556\n12557\n12558\n12559\n12560\n12561\n12562\n12563\n12564\n12565\n12566\n12567\n12568\n12569\n12570\n12571\n12572\n12573\n12574\n12575\n12576\n12577\n12578\n12579\n12580\n12581\n12582\n12583\n12584\n12585\n12586\n12587\n12588\n12589\n12590\n12591\n12592\n12593\n12594\n12595\n12596\n12597\n12598\n12599\n12600\n12601\n12602\n12603\n12604\n12605\n12606\n12607\n12608\n12609\n12610\n12611\n12612\n12613\n12614\n12615\n12616\n12617\n12618\n12619\n12620\n12621\n12622\n12623\n12624\n12625\n12626\n12627\n12628\n12629\n12630\n12631\n12632\n12633\n12634\n12635\n12636\n12637\n12638\n12639\n12640\n12641\n12642\n12643\n12644\n12645\n12646\n12647\n12648\n12649\n12650\n12651\n12652\n12653\n12654\n12655\n12656\n12657\n12658\n12659\n12660\n12661\n12662\n12663\n12664\n12665\n12666\n12667\n12668\n12669\n12670\n12671\n12672\n12673\n12674\n12675\n12676\n12677\n12678\n12679\n12680\n12681\n12682\n12683\n12684\n12685\n12686\n12687\n12688\n12689\n12690\n12691\n12692\n12693\n12694\n12695\n12696\n12697\n12698\n12699\n12700\n12701\n12702\n12703\n12704\n12705\n12706\n12707\n12708\n12709\n12710\n12711\n12712\n12713\n12714\n12715\n12716\n12717\n12718\n12719\n12720\n12721\n12722\n12723\n12724\n12725\n12726\n12727\n12728\n12729\n12730\n12731\n12732\n12733\n12734\n12735\n12736\n12737\n12738\n12739\n12740\n12741\n12742\n12743\n12744\n12745\n12746\n12747\n12748\n12749\n12750\n12751\n12752\n12753\n12754\n12755\n12756\n12757\n12758\n12759\n12760\n12761\n12762\n12763\n12764\n12765\n12766\n12767\n12768\n12769\n12770\n12771\n12772\n12773\n12774\n12775\n12776\n12777\n12778\n12779\n12780\n12781\n12782\n12783\n12784\n12785\n12786\n12787\n12788\n12789\n12790\n12791\n12792\n12793\n12794\n12795\n12796\n12797\n12798\n12799\n12800\n12801\n12802\n12803\n12804\n12805\n12806\n12807\n12808\n12809\n12810\n12811\n12812\n12813\n12814\n12815\n12816\n12817\n12818\n12819\n12820\n12821\n12822\n12823\n12824\n12825\n12826\n12827\n12828\n12829\n12830\n12831\n12832\n12833\n12834\n12835\n12836\n12837\n12838\n12839\n12840\n12841\n12842\n12843\n12844\n12845\n12846\n12847\n12848\n12849\n12850\n12851\n12852\n12853\n12854\n12855\n12856\n12857\n12858\n12859\n12860\n12861\n12862\n12863\n12864\n12865\n12866\n12867\n12868\n12869\n12870\n12871\n12872\n12873\n12874\n12875\n12876\n12877\n12878\n12879\n12880\n12881\n12882\n12883\n12884\n12885\n12886\n12887\n12888\n12889\n12890\n12891\n12892\n12893\n12894\n12895\n12896\n12897\n12898\n12899\n12900\n12901\n12902\n12903\n12904\n12905\n12906\n12907\n12908\n12909\n12910\n12911\n12912\n12913\n12914\n12915\n12916\n12917\n12918\n12919\n12920\n12921\n12922\n12923\n12924\n12925\n12926\n12927\n12928\n12929\n12930\n12931\n12932\n12933\n12934\n12935\n12936\n12937\n12938\n12939\n12940\n12941\n12942\n12943\n12944\n12945\n12946\n12947\n12948\n12949\n12950\n12951\n12952\n12953\n12954\n12955\n12956\n12957\n12958\n12959\n12960\n12961\n12962\n12963\n12964\n12965\n12966\n12967\n12968\n12969\n12970\n12971\n12972\n12973\n12974\n12975\n12976\n12977\n12978\n12979\n12980\n12981\n12982\n12983\n12984\n12985\n12986\n12987\n12988\n12989\n12990\n12991\n12992\n12993\n12994\n12995\n12996\n12997\n12998\n12999\n13000\n13001\n13002\n13003\n13004\n13005\n13006\n13007\n13008\n13009\n13010\n13011\n13012\n13013\n13014\n13015\n13016\n13017\n13018\n13019\n13020\n13021\n13022\n13023\n13024\n13025\n13026\n13027\n13028\n13029\n13030\n13031\n13032\n13033\n13034\n13035\n13036\n13037\n13038\n13039\n13040\n13041\n13042\n13043\n13044\n13045\n13046\n13047\n13048\n13049\n13050\n13051\n13052\n13053\n13054\n13055\n13056\n13057\n13058\n13059\n13060\n13061\n13062\n13063\n13064\n13065\n13066\n13067\n13068\n13069\n13070\n13071\n13072\n13073\n13074\n13075\n13076\n13077\n13078\n13079\n13080\n13081\n13082\n13083\n13084\n13085\n13086\n13087\n13088\n13089\n13090\n13091\n13092\n13093\n13094\n13095\n13096\n13097\n13098\n13099\n13100\n13101\n13102\n13103\n13104\n13105\n13106\n13107\n13108\n13109\n13110\n13111\n13112\n13113\n13114\n13115\n13116\n13117\n13118\n13119\n13120\n13121\n13122\n13123\n13124\n13125\n13126\n13127\n13128\n13129\n13130\n13131\n13132\n13133\n13134\n13135\n13136\n13137\n13138\n13139\n13140\n13141\n13142\n13143\n13144\n13145\n13146\n13147\n13148\n13149\n13150\n13151\n13152\n13153\n13154\n13155\n13156\n13157\n13158\n13159\n13160\n13161\n13162\n13163\n13164\n13165\n13166\n13167\n13168\n13169\n13170\n13171\n13172\n13173\n13174\n13175\n13176\n13177\n13178\n13179\n13180\n13181\n13182\n13183\n13184\n13185\n13186\n13187\n13188\n13189\n13190\n13191\n13192\n13193\n13194\n13195\n13196\n13197\n13198\n13199\n13200\n13201\n13202\n13203\n13204\n13205\n13206\n13207\n13208\n13209\n13210\n13211\n13212\n13213\n13214\n13215\n13216\n13217\n13218\n13219\n13220\n13221\n13222\n13223\n13224\n13225\n13226\n13227\n13228\n13229\n13230\n13231\n13232\n13233\n13234\n13235\n13236\n13237\n13238\n13239\n13240\n13241\n13242\n13243\n13244\n13245\n13246\n13247\n13248\n13249\n13250\n13251\n13252\n13253\n13254\n13255\n13256\n13257\n13258\n13259\n13260\n13261\n13262\n13263\n13264\n13265\n13266\n13267\n13268\n13269\n13270\n13271\n13272\n13273\n13274\n13275\n13276\n13277\n13278\n13279\n13280\n13281\n13282\n13283\n13284\n13285\n13286\n13287\n13288\n13289\n13290\n13291\n13292\n13293\n13294\n13295\n13296\n13297\n13298\n13299\n13300\n13301\n13302\n13303\n13304\n13305\n13306\n13307\n13308\n13309\n13310\n13311\n13312\n13313\n13314\n13315\n13316\n13317\n13318\n13319\n13320\n13321\n13322\n13323\n13324\n13325\n13326\n13327\n13328\n13329\n13330\n13331\n13332\n13333\n13334\n13335\n13336\n13337\n13338\n13339\n13340\n13341\n13342\n13343\n13344\n13345\n13346\n13347\n13348\n13349\n13350\n13351\n13352\n13353\n13354\n13355\n13356\n13357\n13358\n13359\n13360\n13361\n13362\n13363\n13364\n13365\n13366\n13367\n13368\n13369\n13370\n13371\n13372\n13373\n13374\n13375\n13376\n13377\n13378\n13379\n13380\n13381\n13382\n13383\n13384\n13385\n13386\n13387\n13388\n13389\n13390\n13391\n13392\n13393\n13394\n13395\n13396\n13397\n13398\n13399\n13400\n13401\n13402\n13403\n13404\n13405\n13406\n13407\n13408\n13409\n13410\n13411\n13412\n13413\n13414\n13415\n13416\n13417\n13418\n13419\n13420\n13421\n13422\n13423\n13424\n13425\n13426\n13427\n13428\n13429\n13430\n13431\n13432\n13433\n13434\n13435\n13436\n13437\n13438\n13439\n13440\n13441\n13442\n13443\n13444\n13445\n13446\n13447\n13448\n13449\n13450\n13451\n13452\n13453\n13454\n13455\n13456\n13457\n13458\n13459\n13460\n13461\n13462\n13463\n13464\n13465\n13466\n13467\n13468\n13469\n13470\n13471\n13472\n13473\n13474\n13475\n13476\n13477\n13478\n13479\n13480\n13481\n13482\n13483\n13484\n13485\n13486\n13487\n13488\n13489\n13490\n13491\n13492\n13493\n13494\n13495\n13496\n13497\n13498\n13499\n13500\n13501\n13502\n13503\n13504\n13505\n13506\n13507\n13508\n13509\n13510\n13511\n13512\n13513\n13514\n13515\n13516\n13517\n13518\n13519\n13520\n13521\n13522\n13523\n13524\n13525\n13526\n13527\n13528\n13529\n13530\n13531\n13532\n13533\n13534\n13535\n13536\n13537\n13538\n13539\n13540\n13541\n13542\n13543\n13544\n13545\n13546\n13547\n13548\n13549\n13550\n13551\n13552\n13553\n13554\n13555\n13556\n13557\n13558\n13559\n13560\n13561\n13562\n13563\n13564\n13565\n13566\n13567\n13568\n13569\n13570\n13571\n13572\n13573\n13574\n13575\n13576\n13577\n13578\n13579\n13580\n13581\n13582\n13583\n13584\n13585\n13586\n13587\n13588\n13589\n13590\n13591\n13592\n13593\n13594\n13595\n13596\n13597\n13598\n13599\n13600\n13601\n13602\n13603\n13604\n13605\n13606\n13607\n13608\n13609\n13610\n13611\n13612\n13613\n13614\n13615\n13616\n13617\n13618\n13619\n13620\n13621\n13622\n13623\n13624\n13625\n13626\n13627\n13628\n13629\n13630\n13631\n13632\n13633\n13634\n13635\n13636\n13637\n13638\n13639\n13640\n13641\n13642\n13643\n13644\n13645\n13646\n13647\n13648\n13649\n13650\n13651\n13652\n13653\n13654\n13655\n13656\n13657\n13658\n13659\n13660\n13661\n13662\n13663\n13664\n13665\n13666\n13667\n13668\n13669\n13670\n13671\n13672\n13673\n13674\n13675\n13676\n13677\n13678\n13679\n13680\n13681\n13682\n13683\n13684\n13685\n13686\n13687\n13688\n13689\n13690\n13691\n13692\n13693\n13694\n13695\n13696\n13697\n13698\n13699\n13700\n13701\n13702\n13703\n13704\n13705\n13706\n13707\n13708\n13709\n13710\n13711\n13712\n13713\n13714\n13715\n13716\n13717\n13718\n13719\n13720\n13721\n13722\n13723\n13724\n13725\n13726\n13727\n13728\n13729\n13730\n13731\n13732\n13733\n13734\n13735\n13736\n13737\n13738\n13739\n13740\n13741\n13742\n13743\n13744\n13745\n13746\n13747\n13748\n13749\n13750\n13751\n13752\n13753\n13754\n13755\n13756\n13757\n13758\n13759\n13760\n13761\n13762\n13763\n13764\n13765\n13766\n13767\n13768\n13769\n13770\n13771\n13772\n13773\n13774\n13775\n13776\n13777\n13778\n13779\n13780\n13781\n13782\n13783\n13784\n13785\n13786\n13787\n13788\n13789\n13790\n13791\n13792\n13793\n13794\n13795\n13796\n13797\n13798\n13799\n13800\n13801\n13802\n13803\n13804\n13805\n13806\n13807\n13808\n13809\n13810\n13811\n13812\n13813\n13814\n13815\n13816\n13817\n13818\n13819\n13820\n13821\n13822\n13823\n13824\n13825\n13826\n13827\n13828\n13829\n13830\n13831\n13832\n13833\n13834\n13835\n13836\n13837\n13838\n13839\n13840\n13841\n13842\n13843\n13844\n13845\n13846\n13847\n13848\n13849\n13850\n13851\n13852\n13853\n13854\n13855\n13856\n13857\n13858\n13859\n13860\n13861\n13862\n13863\n13864\n13865\n13866\n13867\n13868\n13869\n13870\n13871\n13872\n13873\n13874\n13875\n13876\n13877\n13878\n13879\n13880\n13881\n13882\n13883\n13884\n13885\n13886\n13887\n13888\n13889\n13890\n13891\n13892\n13893\n13894\n13895\n13896\n13897\n13898\n13899\n13900\n13901\n13902\n13903\n13904\n13905\n13906\n13907\n13908\n13909\n13910\n13911\n13912\n13913\n13914\n13915\n13916\n13917\n13918\n13919\n13920\n13921\n13922\n13923\n13924\n13925\n13926\n13927\n13928\n13929\n13930\n13931\n13932\n13933\n13934\n13935\n13936\n13937\n13938\n13939\n13940\n13941\n13942\n13943\n13944\n13945\n13946\n13947\n13948\n13949\n13950\n13951\n13952\n13953\n13954\n13955\n13956\n13957\n13958\n13959\n13960\n13961\n13962\n13963\n13964\n13965\n13966\n13967\n13968\n13969\n13970\n13971\n13972\n13973\n13974\n13975\n13976\n13977\n13978\n13979\n13980\n13981\n13982\n13983\n13984\n13985\n13986\n13987\n13988\n13989\n13990\n13991\n13992\n13993\n13994\n13995\n13996\n13997\n13998\n13999\n14000\n14001\n14002\n14003\n14004\n14005\n14006\n14007\n14008\n14009\n14010\n14011\n14012\n14013\n14014\n14015\n14016\n14017\n14018\n14019\n14020\n14021\n14022\n14023\n14024\n14025\n14026\n14027\n14028\n14029\n14030\n14031\n14032\n14033\n14034\n14035\n14036\n14037\n14038\n14039\n14040\n14041\n14042\n14043\n14044\n14045\n14046\n14047\n14048\n14049\n14050\n14051\n14052\n14053\n14054\n14055\n14056\n14057\n14058\n14059\n14060\n14061\n14062\n14063\n14064\n14065\n14066\n14067\n14068\n14069\n14070\n14071\n14072\n14073\n14074\n14075\n14076\n14077\n14078\n14079\n14080\n14081\n14082\n14083\n14084\n14085\n14086\n14087\n14088\n14089\n14090\n14091\n14092\n14093\n14094\n14095\n14096\n14097\n14098\n14099\n14100\n14101\n14102\n14103\n14104\n14105\n14106\n14107\n14108\n14109\n14110\n14111\n14112\n14113\n14114\n14115\n14116\n14117\n14118\n14119\n14120\n14121\n14122\n14123\n14124\n14125\n14126\n14127\n14128\n14129\n14130\n14131\n14132\n14133\n14134\n14135\n14136\n14137\n14138\n14139\n14140\n14141\n14142\n14143\n14144\n14145\n14146\n14147\n14148\n14149\n14150\n14151\n14152\n14153\n14154\n14155\n14156\n14157\n14158\n14159\n14160\n14161\n14162\n14163\n14164\n14165\n14166\n14167\n14168\n14169\n14170\n14171\n14172\n14173\n14174\n14175\n14176\n14177\n14178\n14179\n14180\n14181\n14182\n14183\n14184\n14185\n14186\n14187\n14188\n14189\n14190\n14191\n14192\n14193\n14194\n14195\n14196\n14197\n14198\n14199\n14200\n14201\n14202\n14203\n14204\n14205\n14206\n14207\n14208\n14209\n14210\n14211\n14212\n14213\n14214\n14215\n14216\n14217\n14218\n14219\n14220\n14221\n14222\n14223\n14224\n14225\n14226\n14227\n14228\n14229\n14230\n14231\n14232\n14233\n14234\n14235\n14236\n14237\n14238\n14239\n14240\n14241\n14242\n14243\n14244\n14245\n14246\n14247\n14248\n14249\n14250\n14251\n14252\n14253\n14254\n14255\n14256\n14257\n14258\n14259\n14260\n14261\n14262\n14263\n14264\n14265\n14266\n14267\n14268\n14269\n14270\n14271\n14272\n14273\n14274\n14275\n14276\n14277\n14278\n14279\n14280\n14281\n14282\n14283\n14284\n14285\n14286\n14287\n14288\n14289\n14290\n14291\n14292\n14293\n14294\n14295\n14296\n14297\n14298\n14299\n14300\n14301\n14302\n14303\n14304\n14305\n14306\n14307\n14308\n14309\n14310\n14311\n14312\n14313\n14314\n14315\n14316\n14317\n14318\n14319\n14320\n14321\n14322\n14323\n14324\n14325\n14326\n14327\n14328\n14329\n14330\n14331\n14332\n14333\n14334\n14335\n14336\n14337\n14338\n14339\n14340\n14341\n14342\n14343\n14344\n14345\n14346\n14347\n14348\n14349\n14350\n14351\n14352\n14353\n14354\n14355\n14356\n14357\n14358\n14359\n14360\n14361\n14362\n14363\n14364\n14365\n14366\n14367\n14368\n14369\n14370\n14371\n14372\n14373\n14374\n14375\n14376\n14377\n14378\n14379\n14380\n14381\n14382\n14383\n14384\n14385\n14386\n14387\n14388\n14389\n14390\n14391\n14392\n14393\n14394\n14395\n14396\n14397\n14398\n14399\n14400\n14401\n14402\n14403\n14404\n14405\n14406\n14407\n14408\n14409\n14410\n14411\n14412\n14413\n14414\n14415\n14416\n14417\n14418\n14419\n14420\n14421\n14422\n14423\n14424\n14425\n14426\n14427\n14428\n14429\n14430\n14431\n14432\n14433\n14434\n14435\n14436\n14437\n14438\n14439\n14440\n14441\n14442\n14443\n14444\n14445\n14446\n14447\n14448\n14449\n14450\n14451\n14452\n14453\n14454\n14455\n14456\n14457\n14458\n14459\n14460\n14461\n14462\n14463\n14464\n14465\n14466\n14467\n14468\n14469\n14470\n14471\n14472\n14473\n14474\n14475\n14476\n14477\n14478\n14479\n14480\n14481\n14482\n14483\n14484\n14485\n14486\n14487\n14488\n14489\n14490\n14491\n14492\n14493\n14494\n14495\n14496\n14497\n14498\n14499\n14500\n14501\n14502\n14503\n14504\n14505\n14506\n14507\n14508\n14509\n14510\n14511\n14512\n14513\n14514\n14515\n14516\n14517\n14518\n14519\n14520\n14521\n14522\n14523\n14524\n14525\n14526\n14527\n14528\n14529\n14530\n14531\n14532\n14533\n14534\n14535\n14536\n14537\n14538\n14539\n14540\n14541\n14542\n14543\n14544\n14545\n14546\n14547\n14548\n14549\n14550\n14551\n14552\n14553\n14554\n14555\n14556\n14557\n14558\n14559\n14560\n14561\n14562\n14563\n14564\n14565\n14566\n14567\n14568\n14569\n14570\n14571\n14572\n14573\n14574\n14575\n14576\n14577\n14578\n14579\n14580\n14581\n14582\n14583\n14584\n14585\n14586\n14587\n14588\n14589\n14590\n14591\n14592\n14593\n14594\n14595\n14596\n14597\n14598\n14599\n14600\n14601\n14602\n14603\n14604\n14605\n14606\n14607\n14608\n14609\n14610\n14611\n14612\n14613\n14614\n14615\n14616\n14617\n14618\n14619\n14620\n14621\n14622\n14623\n14624\n14625\n14626\n14627\n14628\n14629\n14630\n14631\n14632\n14633\n14634\n14635\n14636\n14637\n14638\n14639\n14640\n14641\n14642\n14643\n14644\n14645\n14646\n14647\n14648\n14649\n14650\n14651\n14652\n14653\n14654\n14655\n14656\n14657\n14658\n14659\n14660\n14661\n14662\n14663\n14664\n14665\n14666\n14667\n14668\n14669\n14670\n14671\n14672\n14673\n14674\n14675\n14676\n14677\n14678\n14679\n14680\n14681\n14682\n14683\n14684\n14685\n14686\n14687\n14688\n14689\n14690\n14691\n14692\n14693\n14694\n14695\n14696\n14697\n14698\n14699\n14700\n14701\n14702\n14703\n14704\n14705\n14706\n14707\n14708\n14709\n14710\n14711\n14712\n14713\n14714\n14715\n14716\n14717\n14718\n14719\n14720\n14721\n14722\n14723\n14724\n14725\n14726\n14727\n14728\n14729\n14730\n14731\n14732\n14733\n14734\n14735\n14736\n14737\n14738\n14739\n14740\n14741\n14742\n14743\n14744\n14745\n14746\n14747\n14748\n14749\n14750\n14751\n14752\n14753\n14754\n14755\n14756\n14757\n14758\n14759\n14760\n14761\n14762\n14763\n14764\n14765\n14766\n14767\n14768\n14769\n14770\n14771\n14772\n14773\n14774\n14775\n14776\n14777\n14778\n14779\n14780\n14781\n14782\n14783\n14784\n14785\n14786\n14787\n14788\n14789\n14790\n14791\n14792\n14793\n14794\n14795\n14796\n14797\n14798\n14799\n14800\n14801\n14802\n14803\n14804\n14805\n14806\n14807\n14808\n14809\n14810\n14811\n14812\n14813\n14814\n14815\n14816\n14817\n14818\n14819\n14820\n14821\n14822\n14823\n14824\n14825\n14826\n14827\n14828\n14829\n14830\n14831\n14832\n14833\n14834\n14835\n14836\n14837\n14838\n14839\n14840\n14841\n14842\n14843\n14844\n14845\n14846\n14847\n14848\n14849\n14850\n14851\n14852\n14853\n14854\n14855\n14856\n14857\n14858\n14859\n14860\n14861\n14862\n14863\n14864\n14865\n14866\n14867\n14868\n14869\n14870\n14871\n14872\n14873\n14874\n14875\n14876\n14877\n14878\n14879\n14880\n14881\n14882\n14883\n14884\n14885\n14886\n14887\n14888\n14889\n14890\n14891\n14892\n14893\n14894\n14895\n14896\n14897\n14898\n14899\n14900\n14901\n14902\n14903\n14904\n14905\n14906\n14907\n14908\n14909\n14910\n14911\n14912\n14913\n14914\n14915\n14916\n14917\n14918\n14919\n14920\n14921\n14922\n14923\n14924\n14925\n14926\n14927\n14928\n14929\n14930\n14931\n14932\n14933\n14934\n14935\n14936\n14937\n14938\n14939\n14940\n14941\n14942\n14943\n14944\n14945\n14946\n14947\n14948\n14949\n14950\n14951\n14952\n14953\n14954\n14955\n14956\n14957\n14958\n14959\n14960\n14961\n14962\n14963\n14964\n14965\n14966\n14967\n14968\n14969\n14970\n14971\n14972\n14973\n14974\n14975\n14976\n14977\n14978\n14979\n14980\n14981\n14982\n14983\n14984\n14985\n14986\n14987\n14988\n14989\n14990\n14991\n14992\n14993\n14994\n14995\n14996\n14997\n14998\n14999\n15000\n15001\n15002\n15003\n15004\n15005\n15006\n15007\n15008\n15009\n15010\n15011\n15012\n15013\n15014\n15015\n15016\n15017\n15018\n15019\n15020\n15021\n15022\n15023\n15024\n15025\n15026\n15028\n15029\n15030\n15031\n15032\n15033\n15034\n15035\n15036\n15037\n15038\n15039\n15040\n15041\n15042\n15043\n15044\n15045\n15046\n15047\n15048\n15049\n15050\n15051\n15052\n15053\n15054\n15055\n15056\n15057\n15058\n15059\n15060\n15061\n15062\n15063\n15064\n15065\n15066\n15067\n15068\n15069\n15070\n15071\n15072\n15073\n15074\n15075\n15076\n15077\n15078\n15079\n15080\n15081\n15082\n15083\n15084\n15085\n15086\n15087\n15088\n15089\n15090\n15091\n15092\n15093\n15094\n15095\n15096\n15097\n15098\n15099\n15100\n15101\n15102\n15103\n15104\n15105\n15106\n15107\n15108\n15109\n15110\n15111\n15112\n15113\n15114\n15115\n15116\n15117\n15118\n15119\n15120\n15121\n15122\n15123\n15124\n15125\n15126\n15127\n15128\n15129\n15130\n15131\n15132\n15133\n15134\n15135\n15136\n15137\n15138\n15139\n15140\n15141\n15142\n15143\n15144\n15145\n15146\n15147\n15148\n15149\n15150\n15151\n15152\n15153\n15154\n15155\n15156\n15157\n15158\n15159\n15160\n15161\n15162\n15163\n15164\n15165\n15166\n15167\n15168\n15169\n15170\n15171\n15172\n15173\n15174\n15175\n15176\n15177\n15178\n15179\n15180\n15181\n15182\n15183\n15184\n15185\n15186\n15187\n15188\n15189\n15190\n15191\n15192\n15193\n15194\n15195\n15196\n15197\n15198\n15199\n15200\n15201\n15202\n15203\n15204\n15205\n15206\n15207\n15208\n15209\n15210\n15211\n15212\n15213\n15214\n15215\n15216\n15217\n15218\n15219\n15220\n15221\n15222\n15223\n15224\n15225\n15226\n15227\n15228\n15229\n15230\n15231\n15232\n15233\n15234\n15235\n15236\n15237\n15238\n15239\n15240\n15241\n15242\n15243\n15244\n15245\n15246\n15247\n15248\n15249\n15250\n15251\n15252\n15253\n15254\n15255\n15256\n15257\n15258\n15259\n15260\n15261\n15262\n15263\n15264\n15265\n15266\n15267\n15268\n15269\n15270\n15271\n15272\n15273\n15274\n15275\n15276\n15277\n15278\n15279\n15280\n15281\n15282\n15283\n15284\n15285\n15286\n15287\n15288\n15289\n15290\n15291\n15292\n15293\n15294\n15295\n15296\n15297\n15298\n15299\n15300\n15301\n15302\n15303\n15304\n15305\n15306\n15307\n15308\n15309\n15310\n15311\n15312\n15313\n15314\n15315\n15316\n15317\n15318\n15319\n15320\n15321\n15322\n15323\n15324\n15325\n15326\n15327\n15328\n15329\n15330\n15331\n15332\n15333\n15334\n15335\n15336\n15337\n15338\n15339\n15340\n15341\n15342\n15343\n15344\n15345\n15346\n15347\n15348\n15349\n15350\n15351\n15352\n15353\n15354\n15355\n15356\n15357\n15358\n15359\n15360\n15361\n15362\n15363\n15364\n15365\n15366\n15367\n15368\n15369\n15370\n15371\n15372\n15373\n15374\n15375\n15376\n15377\n15378\n15379\n15380\n15381\n15382\n15383\n15384\n15385\n15386\n15387\n15388\n15389\n15390\n15391\n15392\n15393\n15394\n15395\n15396\n15397\n15398\n15399\n15400\n15401\n15402\n15403\n15404\n15405\n15406\n15407\n15408\n15409\n15410\n15411\n15412\n15413\n15414\n15415\n15416\n15417\n15418\n15419\n15420\n15421\n15422\n15423\n15424\n15425\n15426\n15427\n15428\n15429\n15430\n15431\n15432\n15433\n15434\n15435\n15436\n15437\n15438\n15439\n15440\n15441\n15442\n15443\n15444\n15445\n15446\n15447\n15448\n15449\n15450\n15451\n15452\n15453\n15454\n15455\n15456\n15457\n15458\n15459\n15460\n15461\n15462\n15463\n15464\n15465\n15466\n15467\n15468\n15469\n15470\n15471\n15472\n15473\n15474\n15475\n15476\n15477\n15478\n15479\n15480\n15481\n15482\n15483\n15484\n15485\n15486\n15487\n15488\n15489\n15490\n15491\n15492\n15493\n15494\n15495\n15496\n15497\n15498\n15499\n15500\n15501\n15502\n15503\n15504\n15505\n15506\n15507\n15508\n15509\n15510\n15511\n15512\n15513\n15514\n15515\n15516\n15517\n15518\n15519\n15520\n15521\n15522\n15523\n15524\n15525\n15526\n15527\n15528\n15529\n15530\n15531\n15532\n15533\n15534\n15535\n15536\n15537\n15538\n15539\n15540\n15541\n15542\n15543\n15544\n15545\n15546\n15547\n15548\n15549\n15550\n15551\n15552\n15553\n15554\n15555\n15556\n15557\n15558\n15559\n15560\n15561\n15562\n15563\n15564\n15565\n15566\n15567\n15568\n15569\n15570\n15571\n15572\n15573\n15574\n15575\n15576\n15577\n15578\n15579\n15580\n15581\n15582\n15583\n15584\n15585\n15586\n15587\n15588\n15589\n15590\n15591\n15592\n15593\n15594\n15595\n15596\n15597\n15598\n15599\n15600\n15601\n15602\n15603\n15604\n15605\n15606\n15607\n15608\n15609\n15610\n15611\n15612\n15613\n15614\n15615\n15616\n15617\n15618\n15619\n15620\n15621\n15622\n15623\n15624\n15625\n15626\n15627\n15628\n15629\n15630\n15631\n15632\n15633\n15634\n15635\n15636\n15637\n15638\n15639\n15640\n15641\n15642\n15643\n15644\n15645\n15646\n15647\n15648\n15649\n15650\n15651\n15652\n15653\n15654\n15655\n15656\n15657\n15658\n15659\n15660\n15661\n15662\n15663\n15664\n15665\n15666\n15667\n15668\n15669\n15670\n15671\n15672\n15673\n15674\n15675\n15676\n15677\n15678\n15679\n15680\n15681\n15682\n15683\n15684\n15685\n15686\n15687\n15688\n15689\n15690\n15691\n15692\n15693\n15694\n15695\n15696\n15697\n15698\n15699\n15700\n15701\n15702\n15703\n15704\n15705\n15706\n15707\n15708\n15709\n15710\n15711\n15712\n15713\n15714\n15715\n15716\n15717\n15718\n15719\n15720\n15721\n15722\n15723\n15724\n15725\n15726\n15727\n15728\n15729\n15730\n15731\n15732\n15733\n15734\n15735\n15736\n15737\n15738\n15739\n15740\n15741\n15742\n15743\n15744\n15745\n15746\n15747\n15748\n15749\n15750\n15751\n15752\n15753\n15754\n15755\n15756\n15757\n15758\n15759\n15760\n15761\n15762\n15763\n15764\n15765\n15766\n15767\n15768\n15769\n15770\n15771\n15772\n15773\n15774\n15775\n15776\n15777\n15778\n15779\n15780\n15781\n15782\n15783\n15784\n15785\n15786\n15787\n15788\n15789\n15790\n15791\n15792\n15793\n15794\n15795\n15796\n15797\n15798\n15799\n15800\n15801\n15802\n15803\n15804\n15805\n15806\n15807\n15808\n15809\n15810\n15811\n15812\n15813\n15814\n15815\n15816\n15817\n15818\n15819\n15820\n15821\n15822\n15823\n15824\n15825\n15826\n15827\n15828\n15829\n15830\n15831\n15832\n15833\n15834\n15835\n15836\n15837\n15838\n15839\n15840\n15841\n15842\n15843\n15844\n15845\n15846\n15847\n15848\n15849\n15850\n15851\n15852\n15853\n15854\n15855\n15856\n15857\n15858\n15859\n15860\n15861\n15862\n15863\n15864\n15865\n15866\n15867\n15868\n15869\n15870\n15871\n15872\n15873\n15874\n15875\n15876\n15877\n15878\n15879\n15880\n15881\n15882\n15883\n15884\n15885\n15886\n15887\n15888\n15889\n15890\n15891\n15892\n15893\n15894\n15895\n15896\n15897\n15898\n15899\n15900\n15901\n15902\n15903\n15904\n15905\n15906\n15907\n15908\n15909\n15910\n15911\n15912\n15913\n15914\n15915\n15916\n15917\n15918\n15919\n15920\n15921\n15922\n15923\n15924\n15925\n15926\n15927\n15928\n15929\n15930\n15931\n15932\n15933\n15934\n15935\n15936\n15937\n15938\n15939\n15940\n15941\n15942\n15943\n15944\n15945\n15946\n15947\n15948\n15949\n15950\n15951\n15952\n15953\n15954\n15955\n15956\n15957\n15958\n15959\n15960\n15961\n15962\n15963\n15964\n15965\n15966\n15967\n15968\n15969\n15970\n15971\n15972\n15973\n15974\n15975\n15976\n15977\n15978\n15979\n15980\n15981\n15982\n15983\n15984\n15985\n15986\n15987\n15988\n15989\n15990\n15991\n15992\n15993\n15994\n15995\n15996\n15997\n15998\n15999\n16000\n16001\n16002\n16003\n16004\n16005\n16006\n16007\n16008\n16009\n16010\n16011\n16012\n16013\n16014\n16015\n16016\n16017\n16018\n16019\n16020\n16021\n16022\n16023\n16024\n16025\n16026\n16027\n16028\n16029\n16030\n16031\n16032\n16033\n16034\n16035\n16036\n16037\n16038\n16039\n16040\n16041\n16042\n16043\n16044\n16045\n16046\n16047\n16048\n16049\n16050\n16051\n16052\n16053\n16054\n16055\n16056\n16057\n16058\n16059\n16060\n16061\n16062\n16063\n16064\n16065\n16066\n16067\n16068\n16069\n16070\n16071\n16072\n16073\n16074\n16075\n16076\n16077\n16078\n16079\n16080\n16081\n16082\n16083\n16084\n16085\n16086\n16087\n16088\n16089\n16090\n16091\n16092\n16093\n16094\n16095\n16096\n16097\n16098\n16099\n16100\n16101\n16102\n16103\n16104\n16105\n16106\n16107\n16108\n16109\n16110\n16111\n16112\n16113\n16114\n16115\n16116\n16117\n16118\n16119\n16120\n16121\n16122\n16123\n16124\n16125\n16126\n16127\n16128\n16129\n16130\n16131\n16132\n16133\n16134\n16135\n16136\n16137\n16138\n16139\n16140\n16141\n16142\n16143\n16144\n16145\n16146\n16147\n16148\n16149\n16150\n16151\n16152\n16153\n16154\n16155\n16156\n16157\n16158\n16159\n16160\n16161\n16162\n16163\n16164\n16165\n16166\n16167\n16168\n16169\n16170\n16171\n16172\n16173\n16174\n16175\n16176\n16177\n16178\n16179\n16180\n16181\n16182\n16183\n16184\n16185\n16186\n16187\n16188\n16189\n16190\n16191\n16192\n16193\n16194\n16195\n16196\n16197\n16198\n16199\n16200\n16201\n16202\n16203\n16204\n16205\n16206\n16207\n16208\n16209\n16210\n16211\n16212\n16213\n16214\n16215\n16216\n16217\n16218\n16219\n16220\n16221\n16222\n16223\n16224\n16225\n16226\n16227\n16228\n16229\n16230\n16231\n16232\n16233\n16234\n16235\n16236\n16237\n16238\n16239\n16240\n16241\n16242\n16243\n16244\n16245\n16246\n16247\n16248\n16249\n16250\n16251\n16252\n16253\n16254\n16255\n16256\n16257\n16258\n16259\n16260\n16261\n16262\n16263\n16264\n16265\n16266\n16267\n16268\n16269\n16270\n16271\n16272\n16273\n16274\n16275\n16276\n16277\n16278\n16279\n16280\n16281\n16282\n16283\n16284\n16285\n16286\n16287\n16288\n16289\n16290\n16291\n16292\n16293\n16294\n16295\n16296\n16297\n16298\n16299\n16300\n16301\n16302\n16303\n16304\n16305\n16306\n16307\n16308\n16309\n16310\n16311\n16312\n16313\n16314\n16315\n16316\n16317\n16318\n16319\n16320\n16321\n16322\n16323\n16324\n16325\n16326\n16327\n16328\n16329\n16330\n16331\n16332\n16333\n16334\n16335\n16336\n16337\n16338\n16339\n16340\n16341\n16342\n16343\n16344\n16345\n16346\n16347\n16348\n16349\n16350\n16351\n16352\n16353\n16354\n16355\n16356\n16357\n16358\n16359\n16360\n16361\n16362\n16363\n16364\n16365\n16366\n16367\n16368\n16369\n16370\n16371\n16372\n16373\n16374\n16375\n16376\n16377\n16378\n16379\n16380\n16381\n16382\n16383\n16384\n16385\n16386\n16387\n16388\n16389\n16390\n16391\n16392\n16393\n16394\n16395\n16396\n16397\n16398\n16399\n16400\n16401\n16402\n16403\n16404\n16405\n16406\n16407\n16408\n16409\n16410\n16411\n16412\n16413\n16414\n16415\n16416\n16417\n16418\n16419\n16420\n16421\n16422\n16423\n16424\n16425\n16426\n16427\n16428\n16429\n16430\n16431\n16432\n16433\n16434\n16435\n16436\n16437\n16438\n16439\n16440\n16441\n16442\n16443\n16444\n16445\n16446\n16447\n16448\n16449\n16450\n16451\n16452\n16453\n16454\n16455\n16456\n16457\n16458\n16459\n16460\n16461\n16462\n16463\n16464\n16465\n16466\n16467\n16468\n16469\n16470\n16471\n16472\n16473\n16474\n16475\n16476\n16477\n16478\n16479\n16480\n16481\n16482\n16483\n16484\n16485\n16486\n16487\n16488\n16489\n16490\n16491\n16492\n16493\n16494\n16495\n16496\n16497\n16498\n16499\n16500\n16501\n16502\n16503\n16504\n16505\n16506\n16507\n16508\n16509\n16510\n16511\n16512\n16513\n16514\n16515\n16516\n16517\n16518\n16519\n16520\n16521\n16522\n16523\n16524\n16525\n16526\n16527\n16528\n16529\n16530\n16531\n16532\n16533\n16534\n16535\n16536\n16537\n16538\n16539\n16540\n16541\n16542\n16543\n16544\n16545\n16546\n16547\n16548\n16549\n16550\n16551\n16552\n16553\n16554\n16555\n16556\n16557\n16558\n16559\n16560\n16561\n16562\n16563\n16564\n16565\n16566\n16567\n16568\n16569\n16570\n16571\n16572\n16573\n16574\n16575\n16576\n16577\n16578\n16579\n16580\n16581\n16582\n16583\n16584\n16585\n16586\n16587\n16588\n16589\n16590\n16591\n16592\n16593\n16594\n16595\n16596\n16597\n16598\n16599\n16600\n16601\n16602\n16603\n16604\n16605\n16606\n16607\n16608\n16609\n16610\n16611\n16612\n16613\n16614\n16615\n16616\n16617\n16618\n16619\n16620\n16621\n16622\n16623\n16624\n16625\n16626\n16627\n16628\n16629\n16630\n16631\n16632\n16633\n16634\n16635\n16636\n16637\n16638\n16639\n16640\n16641\n16642\n16643\n16644\n16645\n16646\n16647\n16648\n16649\n16650\n16651\n16652\n16653\n16654\n16655\n16656\n16657\n16658\n16659\n16660\n16661\n16662\n16663\n16664\n16665\n16666\n16667\n16668\n16669\n16670\n16671\n16672\n16673\n16674\n16675\n16676\n16677\n16678\n16679\n16680\n16681\n16682\n16683\n16684\n16685\n16686\n16687\n16688\n16689\n16690\n16691\n16692\n16693\n16694\n16695\n16696\n16697\n16698\n16699\n16700\n16701\n16702\n16703\n16704\n16705\n16706\n16707\n16708\n16709\n16710\n16711\n16712\n16713\n16714\n16715\n16716\n16717\n16718\n16719\n16720\n16721\n16722\n16723\n16724\n16725\n16726\n16727\n16728\n16729\n16730\n16731\n16732\n16733\n16734\n16735\n16736\n16737\n16738\n16739\n16740\n16741\n16742\n16743\n16744\n16745\n16746\n16747\n16748\n16749\n16750\n16751\n16752\n16753\n16754\n16755\n16756\n16757\n16758\n16759\n16760\n16761\n16762\n16763\n16764\n16765\n16766\n16767\n16768\n16769\n16770\n16771\n16772\n16773\n16774\n16775\n16776\n16777\n16778\n16779\n16780\n16781\n16782\n16783\n16784\n16785\n16786\n16787\n16788\n16789\n16790\n16791\n16792\n16793\n16794\n16795\n16796\n16797\n16798\n16799\n16800\n16801\n16802\n16803\n16804\n16805\n16806\n16807\n16808\n16809\n16810\n16811\n16812\n16813\n16814\n16815\n16816\n16817\n16818\n16819\n16820\n16821\n16822\n16823\n16824\n16825\n16826\n16827\n16828\n16829\n16830\n16831\n16832\n16833\n16834\n16835\n16836\n16837\n16838\n16839\n16840\n16841\n16842\n16843\n16844\n16845\n16846\n16847\n16848\n16849\n16850\n16851\n16852\n16853\n16854\n16855\n16856\n16857\n16858\n16859\n16860\n16861\n16862\n16863\n16864\n16865\n16866\n16867\n16868\n16869\n16870\n16871\n16872\n16873\n16874\n16875\n16876\n16877\n16878\n16879\n16880\n16881\n16882\n16883\n16884\n16885\n16886\n16887\n16888\n16889\n16890\n16891\n16892\n16893\n16894\n16895\n16896\n16897\n16898\n16899\n16900\n16901\n16902\n16903\n16904\n16905\n16906\n16907\n16908\n16909\n16910\n16911\n16912\n16913\n16914\n16915\n16916\n16917\n16918\n16919\n16920\n16921\n16922\n16923\n16924\n16925\n16926\n16927\n16928\n16929\n16930\n16931\n16932\n16933\n16934\n16935\n16936\n16937\n16938\n16939\n16940\n16941\n16942\n16943\n16944\n16945\n16946\n16947\n16948\n16949\n16950\n16951\n16952\n16953\n16954\n16955\n16956\n16957\n16958\n16959\n16960\n16961\n16962\n16963\n16964\n16965\n16966\n16967\n16968\n16969\n16970\n16971\n16972\n16973\n16974\n16975\n16976\n16977\n16978\n16979\n16980\n16981\n16982\n16983\n16984\n16985\n16986\n16987\n16988\n16989\n16990\n16991\n16992\n16993\n16994\n16995\n16996\n16997\n16998\n16999\n17000\n17001\n17002\n17003\n17004\n17005\n17006\n17007\n17008\n17009\n17010\n17011\n17012\n17013\n17014\n17015\n17016\n17017\n17018\n17019\n17020\n17021\n17022\n17023\n17024\n17025\n17026\n17027\n17028\n17029\n17030\n17031\n17032\n17033\n17034\n17035\n17036\n17037\n17038\n17039\n17040\n17041\n17042\n17043\n17044\n17045\n17046\n17047\n17048\n17049\n17050\n17051\n17052\n17053\n17054\n17055\n17056\n17057\n17058\n17059\n17060\n17061\n17062\n17063\n17064\n17065\n17066\n17067\n17068\n17069\n17070\n17071\n17072\n17073\n17074\n17075\n17076\n17077\n17078\n17079\n17080\n17081\n17082\n17083\n17084\n17085\n17086\n17087\n17088\n17089\n17090\n17091\n17092\n17093\n17094\n17095\n17096\n17097\n17098\n17099\n17100\n17101\n17102\n17103\n17104\n17105\n17106\n17107\n17108\n17109\n17110\n17111\n17112\n17113\n17114\n17115\n17116\n17117\n17118\n17119\n17120\n17121\n17122\n17123\n17124\n17125\n17126\n17127\n17128\n17129\n17130\n17131\n17132\n17133\n17134\n17135\n17136\n17137\n17138\n17139\n17140\n17141\n17142\n17143\n17144\n17145\n17146\n17147\n17148\n17149\n17150\n17151\n17152\n17153\n17154\n17155\n17156\n17157\n17158\n17159\n17160\n17161\n17162\n17163\n17164\n17165\n17166\n17167\n17168\n17169\n17170\n17171\n17172\n17173\n17174\n17175\n17176\n17177\n17178\n17179\n17180\n17181\n17182\n17183\n17184\n17185\n17186\n17187\n17188\n17189\n17190\n17191\n17192\n17193\n17194\n17195\n17196\n17197\n17198\n17199\n17200\n17201\n17202\n17203\n17204\n17205\n17206\n17207\n17208\n17209\n17210\n17211\n17212\n17213\n17214\n17215\n17216\n17217\n17218\n17219\n17220\n17221\n17222\n17223\n17224\n17225\n17226\n17227\n17228\n17229\n17230\n17231\n17232\n17233\n17234\n17235\n17236\n17237\n17238\n17239\n17240\n17241\n17242\n17243\n17244\n17245\n17246\n17247\n17248\n17249\n17250\n17251\n17252\n17253\n17254\n17255\n17256\n17257\n17258\n17259\n17260\n17261\n17262\n17263\n17264\n17265\n17266\n17267\n17268\n17269\n17270\n17271\n17272\n17273\n17274\n17275\n17276\n17277\n17278\n17279\n17280\n17281\n17282\n17283\n17284\n17285\n17286\n17287\n17288\n17289\n17290\n17291\n17292\n17293\n17294\n17295\n17296\n17297\n17298\n17299\n17300\n17301\n17302\n17303\n17304\n17305\n17306\n17307\n17308\n17309\n17310\n17311\n17312\n17313\n17314\n17315\n17316\n17317\n17318\n17319\n17320\n17321\n17322\n17323\n17324\n17325\n17326\n17327\n17328\n17329\n17330\n17331\n17332\n17333\n17334\n17335\n17336\n17337\n17338\n17339\n17340\n17341\n17342\n17343\n17344\n17345\n17346\n17347\n17348\n17349\n17350\n17351\n17352\n17353\n17354\n17355\n17356\n17357\n17358\n17359\n17360\n17361\n17362\n17363\n17364\n17365\n17366\n17367\n17368\n17369\n17370\n17371\n17372\n17373\n17374\n17375\n17376\n17377\n17378\n17379\n17380\n17381\n17382\n17383\n17384\n17385\n17386\n17387\n17388\n17389\n17390\n17391\n17392\n17393\n17394\n17395\n17396\n17397\n17398\n17399\n17400\n17401\n17402\n17403\n17404\n17405\n17406\n17407\n17408\n17409\n17410\n17411\n17412\n17413\n17414\n17415\n17416\n17417\n17418\n17419\n17420\n17421\n17422\n17423\n17424\n17425\n17426\n17427\n17428\n17429\n17430\n17431\n17432\n17433\n17434\n17435\n17436\n17437\n17438\n17439\n17440\n17441\n17442\n17443\n17444\n17445\n17446\n17447\n17448\n17449\n17450\n17451\n17452\n17453\n17454\n17455\n17456\n17457\n17458\n17459\n17460\n17461\n17462\n17463\n17464\n17465\n17466\n17467\n17468\n17469\n17470\n17471\n17472\n17473\n17474\n17475\n17476\n17477\n17478\n17479\n17480\n17481\n17482\n17483\n17484\n17485\n17486\n17487\n17488\n17489\n17490\n17491\n17492\n17493\n17494\n17495\n17496\n17497\n17498\n17499\n17500\n17501\n17502\n17503\n17504\n17505\n17506\n17507\n17508\n17509\n17510\n17511\n17512\n17513\n17514\n17515\n17516\n17517\n17518\n17519\n17520\n17521\n17522\n17523\n17524\n17525\n17526\n17527\n17528\n17529\n17530\n17531\n17532\n17533\n17534\n17535\n17536\n17537\n17538\n17539\n17540\n17541\n17542\n17543\n17544\n17545\n17546\n17547\n17548\n17549\n17550\n17551\n17552\n17553\n17554\n17555\n17556\n17557\n17558\n17559\n17560\n17561\n17562\n17563\n17564\n17565\n17566\n17567\n17568\n17569\n17570\n17571\n17572\n17573\n17574\n17575\n17576\n17577\n17578\n17579\n17580\n17581\n17582\n17583\n17584\n17585\n17586\n17587\n17588\n17589\n17590\n17591\n17592\n17593\n17594\n17595\n17596\n17597\n17598\n17599\n17600\n17601\n17602\n17603\n17604\n17605\n17606\n17607\n17608\n17609\n17610\n17611\n17612\n17613\n17614\n17615\n17616\n17617\n17618\n17619\n17620\n17621\n17622\n17623\n17624\n17625\n17626\n17627\n17628\n17629\n17630\n17631\n17632\n17633\n17634\n17635\n17636\n17637\n17638\n17639\n17640\n17641\n17642\n17643\n17644\n17645\n17646\n17647\n17648\n17649\n17650\n17651\n17652\n17653\n17654\n17655\n17656\n17657\n17658\n17659\n17660\n17661\n17662\n17663\n17664\n17665\n17666\n17667\n17668\n17669\n17670\n17671\n17672\n17673\n17674\n17675\n17676\n17677\n17678\n17679\n17680\n17681\n17682\n17683\n17684\n17685\n17686\n17687\n17688\n17689\n17690\n17691\n17692\n17693\n17694\n17695\n17696\n17697\n17698\n17699\n17700\n17701\n17702\n17703\n17704\n17705\n17706\n17707\n17708\n17709\n17710\n17711\n17712\n17713\n17714\n17715\n17716\n17717\n17718\n17719\n17720\n17721\n17722\n17723\n17724\n17725\n17726\n17727\n17728\n17729\n17730\n17731\n17732\n17733\n17734\n17735\n17736\n17737\n17738\n17739\n17740\n17741\n17742\n17743\n17744\n17745\n17746\n17747\n17748\n17749\n17750\n17751\n17752\n17753\n17754\n17755\n17756\n17757\n17758\n17759\n17760\n17761\n17762\n17763\n17764\n17765\n17766\n17767\n17768\n17769\n17770\n17771\n17772\n17773\n17774\n17775\n17776\n17777\n17778\n17779\n17780\n17781\n17782\n17783\n17784\n17785\n17786\n17787\n17788\n17789\n17790\n17791\n17792\n17793\n17794\n17795\n17796\n17797\n17798\n17799\n17800\n17801\n17802\n17803\n17804\n17805\n17806\n17807\n17808\n17809\n17810\n17811\n17812\n17813\n17814\n17815\n17816\n17817\n17818\n17819\n17820\n17821\n17822\n17823\n17824\n17825\n17826\n17827\n17828\n17829\n17830\n17831\n17832\n17833\n17834\n17835\n17836\n17837\n17838\n17839\n17840\n17841\n17842\n17843\n17844\n17845\n17846\n17847\n17848\n17849\n17850\n17851\n17852\n17853\n17854\n17855\n17856\n17857\n17858\n17859\n17860\n17861\n17862\n17863\n17864\n17865\n17866\n17867\n17868\n17869\n17870\n17871\n17872\n17873\n17874\n17875\n17876\n17877\n17878\n17879\n17880\n17881\n17882\n17883\n17884\n17885\n17886\n17887\n17888\n17889\n17890\n17891\n17892\n17893\n17894\n17895\n17896\n17897\n17898\n17899\n17900\n17901\n17902\n17903\n17904\n17905\n17906\n17907\n17908\n17909\n17910\n17911\n17912\n17913\n17914\n17915\n17916\n17917\n17918\n17919\n17920\n17921\n17922\n17923\n17924\n17925\n17926\n17927\n17928\n17929\n17930\n17931\n17932\n17933\n17934\n17935\n17936\n17937\n17938\n17939\n17940\n17941\n17942\n17943\n17944\n17945\n17946\n17947\n17948\n17949\n17950\n17951\n17952\n17953\n17954\n17955\n17956\n17957\n17958\n17959\n17960\n17961\n17962\n17963\n17964\n17965\n17966\n17967\n17968\n17969\n17970\n17971\n17972\n17973\n17974\n17975\n17976\n17977\n17978\n17979\n17980\n17981\n17982\n17983\n17984\n17985\n17986\n17987\n17988\n17989\n17990\n17991\n17992\n17993\n17994\n17995\n17996\n17997\n17998\n17999\n18000\n18001\n18002\n18003\n18004\n18005\n18006\n18007\n18008\n18009\n18010\n18011\n18012\n18013\n18014\n18015\n18016\n18017\n18018\n18019\n18020\n18021\n18022\n18023\n18024\n18025\n18026\n18027\n18028\n18029\n18030\n18031\n18032\n18033\n18034\n18035\n18036\n18037\n18038\n18039\n18040\n18041\n18042\n18043\n18044\n18045\n18046\n18047\n18048\n18049\n18050\n18051\n18052\n18053\n18054\n18055\n18056\n18057\n18058\n18059\n18060\n18061\n18062\n18063\n18064\n18065\n18066\n18067\n18068\n18069\n18070\n18071\n18072\n18073\n18074\n18075\n18076\n18077\n18078\n18079\n18080\n18081\n18082\n18083\n18084\n18085\n18086\n18087\n18088\n18089\n18090\n18091\n18092\n18093\n18094\n18095\n18096\n18097\n18098\n18099\n18100\n18101\n18102\n18103\n18104\n18105\n18106\n18107\n18108\n18109\n18110\n18111\n18112\n18113\n18114\n18115\n18116\n18117\n18118\n18119\n18120\n18121\n18122\n18123\n18124\n18125\n18126\n18127\n18128\n18129\n18130\n18131\n18132\n18133\n18134\n18135\n18136\n18137\n18138\n18139\n18140\n18141\n18142\n18143\n18144\n18145\n18146\n18147\n18148\n18149\n18150\n18151\n18152\n18153\n18154\n18155\n18156\n18157\n18158\n18159\n18160\n18161\n18162\n18163\n18164\n18165\n18166\n18167\n18168\n18169\n18170\n18171\n18172\n18173\n18174\n18175\n18176\n18177\n18178\n18179\n18180\n18181\n18182\n18183\n18184\n18185\n18186\n18187\n18188\n18189\n18190\n18191\n18192\n18193\n18194\n18195\n18196\n18197\n18198\n18199\n18200\n18201\n18202\n18203\n18204\n18205\n18206\n18207\n18208\n18209\n18210\n18211\n18212\n18213\n18214\n18215\n18216\n18217\n18218\n18219\n18220\n18221\n18222\n18223\n18224\n18225\n18226\n18227\n18228\n18229\n18230\n18231\n18232\n18233\n18234\n18235\n18236\n18237\n18238\n18239\n18240\n18241\n18242\n18243\n18244\n18245\n18246\n18247\n18248\n18249\n18250\n18251\n18252\n18253\n18254\n18255\n18256\n18257\n18258\n18259\n18260\n18261\n18262\n18263\n18264\n18265\n18266\n18267\n18268\n18269\n18270\n18271\n18272\n18273\n18274\n18275\n18276\n18277\n18278\n18279\n18280\n18281\n18282\n18283\n18284\n18285\n18286\n18287\n18288\n18289\n18290\n18291\n18292\n18293\n18294\n18295\n18296\n18297\n18298\n18299\n18300\n18301\n18302\n18303\n18304\n18305\n18306\n18307\n18308\n18309\n18310\n18311\n18312\n18313\n18314\n18315\n18316\n18317\n18318\n18319\n18320\n18321\n18322\n18323\n18324\n18325\n18326\n18327\n18328\n18329\n18330\n18331\n18332\n18333\n18334\n18335\n18336\n18337\n18338\n18339\n18340\n18341\n18342\n18343\n18344\n18345\n18346\n18347\n18348\n18349\n18350\n18351\n18352\n18353\n18354\n18355\n18356\n18357\n18358\n18359\n18360\n18361\n18362\n18363\n18364\n18365\n18366\n18367\n18368\n18369\n18370\n18371\n18372\n18373\n18374\n18375\n18376\n18377\n18378\n18379\n18380\n18381\n18382\n18383\n18384\n18385\n18386\n18387\n18388\n18389\n18390\n18391\n18392\n18393\n18394\n18395\n18396\n18397\n18398\n18399\n18400\n18401\n18402\n18403\n18404\n18405\n18406\n18407\n18408\n18409\n18410\n18411\n18412\n18413\n18414\n18415\n18416\n18417\n18418\n18419\n18420\n18421\n18422\n18423\n18424\n18425\n18426\n18427\n18428\n18429\n18430\n18431\n18432\n18433\n18434\n18435\n18436\n18437\n18438\n18439\n18440\n18441\n18442\n18443\n18444\n18445\n18446\n18447\n18448\n18449\n18450\n18451\n18452\n18453\n18454\n18455\n18456\n18457\n18458\n18459\n18460\n18461\n18462\n18463\n18464\n18465\n18466\n18467\n18468\n18469\n18470\n18471\n18472\n18473\n18474\n18475\n18476\n18477\n18478\n18479\n18480\n18481\n18482\n18483\n18484\n18485\n18486\n18487\n18488\n18489\n18490\n18491\n18492\n18493\n18494\n18495\n18496\n18497\n18498\n18499\n18500\n18501\n18502\n18503\n18504\n18505\n18506\n18507\n18508\n18509\n18510\n18511\n18512\n18513\n18514\n18515\n18516\n18517\n18518\n18519\n18520\n18521\n18522\n18523\n18524\n18525\n18526\n18527\n18528\n18529\n18530\n18531\n18532\n18533\n18534\n18535\n18536\n18537\n18538\n18539\n18540\n18541\n18542\n18543\n18544\n18545\n18546\n18547\n18548\n18549\n18550\n18551\n18552\n18553\n18554\n18555\n18556\n18557\n18558\n18559\n18560\n18561\n18562\n18563\n18564\n18565\n18566\n18567\n18568\n18569\n18570\n18571\n18572\n18573\n18574\n18575\n18576\n18577\n18578\n18579\n18580\n18581\n18582\n18583\n18584\n18585\n18586\n18587\n18588\n18589\n18590\n18591\n18592\n18593\n18594\n18595\n18596\n18597\n18598\n18599\n18600\n18601\n18602\n18603\n18604\n18605\n18606\n18607\n18608\n18609\n18610\n18611\n18612\n18613\n18614\n18615\n18616\n18617\n18618\n18619\n18620\n18621\n18622\n18623\n18624\n18625\n18626\n18627\n18628\n18629\n18630\n18631\n18632\n18633\n18634\n18635\n18636\n18637\n18638\n18639\n18640\n18641\n18642\n18643\n18644\n18645\n18646\n18647\n18648\n18649\n18650\n18651\n18652\n18653\n18654\n18655\n18656\n18657\n18658\n18659\n18660\n18661\n18662\n18663\n18664\n18665\n18666\n18667\n18668\n18669\n18670\n18671\n18672\n18673\n18674\n18675\n18676\n18677\n18678\n18679\n18680\n18681\n18682\n18683\n18684\n18685\n18686\n18687\n18688\n18689\n18690\n18691\n18692\n18693\n18694\n18695\n18696\n18697\n18698\n18699\n18700\n18701\n18702\n18703\n18704\n18705\n18706\n18707\n18708\n18709\n18710\n18711\n18712\n18713\n18714\n18715\n18716\n18717\n18718\n18719\n18720\n18721\n18722\n18723\n18724\n18725\n18726\n18727\n18728\n18729\n18730\n18731\n18732\n18733\n18734\n18735\n18736\n18737\n18738\n18739\n18740\n18741\n18742\n18743\n18744\n18745\n18746\n18747\n18748\n18749\n18750\n18751\n18752\n18753\n18754\n18755\n18756\n18757\n18758\n18759\n18760\n18761\n18762\n18763\n18764\n18765\n18766\n18767\n18768\n18769\n18770\n18771\n18772\n18773\n18774\n18775\n18776\n18777\n18778\n18779\n18780\n18781\n18782\n18783\n18784\n18785\n18786\n18787\n18788\n18789\n18790\n18791\n18792\n18793\n18794\n18795\n18796\n18797\n18798\n18799\n18800\n18801\n18802\n18803\n18804\n18805\n18806\n18807\n18808\n18809\n18810\n18811\n18812\n18813\n18814\n18815\n18816\n18817\n18818\n18819\n18820\n18821\n18822\n18823\n18824\n18825\n18826\n18827\n18828\n18829\n18830\n18831\n18832\n18833\n18834\n18835\n18836\n18837\n18838\n18839\n18840\n18841\n18842\n18843\n18844\n18845\n18846\n18847\n18848\n18849\n18850\n18851\n18852\n18853\n18854\n18855\n18856\n18857\n18858\n18859\n18860\n18861\n18862\n18863\n18864\n18865\n18866\n18867\n18868\n18869\n18870\n18871\n18872\n18873\n18874\n18875\n18876\n18877\n18878\n18879\n18880\n18881\n18882\n18883\n18884\n18885\n18886\n18887\n18888\n18889\n18890\n18891\n18892\n18893\n18894\n18895\n18896\n18897\n18898\n18899\n18900\n18901\n18902\n18903\n18904\n18905\n18906\n18907\n18908\n18909\n18910\n18911\n18912\n18913\n18914\n18915\n18916\n18917\n18918\n18919\n18920\n18921\n18922\n18923\n18924\n18925\n18926\n18927\n18928\n18929\n18930\n18931\n18932\n18933\n18934\n18935\n18936\n18937\n18938\n18939\n18940\n18941\n18942\n18943\n18944\n18945\n18946\n18947\n18948\n18949\n18950\n18951\n18952\n18953\n18954\n18955\n18956\n18957\n18958\n18959\n18960\n18961\n18962\n18963\n18964\n18965\n18966\n18967\n18968\n18969\n18970\n18971\n18972\n18973\n18974\n18975\n18976\n18977\n18978\n18979\n18980\n18981\n18982\n18983\n18984\n18985\n18986\n18987\n18988\n18989\n18990\n18991\n18992\n18993\n18994\n18995\n18996\n18997\n18998\n18999\n19000\n19001\n19002\n19003\n19004\n19005\n19006\n19007\n19008\n19009\n19010\n19011\n19012\n19013\n19014\n19015\n19016\n19017\n19018\n19019\n19020\n19021\n19022\n19023\n19024\n19025\n19026\n19027\n19028\n19029\n19030\n19031\n19032\n19033\n19034\n19035\n19036\n19037\n19038\n19039\n19040\n19041\n19042\n19043\n19044\n19045\n19046\n19047\n19048\n19049\n19050\n19051\n19052\n19053\n19054\n19055\n19056\n19057\n19058\n19059\n19060\n19061\n19062\n19063\n19064\n19065\n19066\n19067\n19068\n19069\n19070\n19071\n19072\n19073\n19074\n19075\n19076\n19077\n19078\n19079\n19080\n19081\n19082\n19083\n19084\n19085\n19086\n19087\n19088\n19089\n19090\n19091\n19092\n19093\n19094\n19095\n19096\n19097\n19098\n19099\n19100\n19101\n19102\n19103\n19104\n19105\n19106\n19107\n19108\n19109\n19110\n19111\n19112\n19113\n19114\n19115\n19116\n19117\n19118\n19119\n19120\n19121\n19122\n19123\n19124\n19125\n19126\n19127\n19128\n19129\n19130\n19131\n19132\n19133\n19134\n19135\n19136\n19137\n19138\n19139\n19140\n19141\n19142\n19143\n19144\n19145\n19146\n19147\n19148\n19149\n19150\n19151\n19152\n19153\n19154\n19155\n19156\n19157\n19158\n19159\n19160\n19161\n19162\n19163\n19164\n19165\n19166\n19167\n19168\n19169\n19170\n19171\n19172\n19173\n19174\n19175\n19176\n19177\n19178\n19179\n19180\n19181\n19182\n19183\n19184\n19185\n19186\n19187\n19188\n19189\n19190\n19191\n19192\n19193\n19194\n19195\n19196\n19197\n19198\n19199\n19200\n19201\n19202\n19203\n19204\n19205\n19206\n19207\n19208\n19209\n19210\n19211\n19212\n19213\n19214\n19215\n19216\n19217\n19218\n19219\n19220\n19221\n19222\n19223\n19224\n19225\n19226\n19227\n19228\n19229\n19230\n19231\n19232\n19233\n19234\n19235\n19236\n19237\n19238\n19239\n19240\n19241\n19242\n19243\n19244\n19245\n19246\n19247\n19248\n19249\n19250\n19251\n19252\n19253\n19254\n19255\n19256\n19257\n19258\n19259\n19260\n19261\n19262\n19263\n19264\n19265\n19266\n19267\n19268\n19269\n19270\n19271\n19272\n19273\n19274\n19275\n19276\n19277\n19278\n19279\n19280\n19281\n19282\n19283\n19284\n19285\n19286\n19287\n19288\n19289\n19290\n19291\n19292\n19293\n19294\n19295\n19296\n19297\n19298\n19299\n19300\n19301\n19302\n19303\n19304\n19305\n19306\n19307\n19308\n19309\n19310\n19311\n19312\n19313\n19314\n19315\n19316\n19317\n19318\n19319\n19320\n19321\n19322\n19323\n19324\n19325\n19326\n19327\n19328\n19329\n19330\n19331\n19332\n19333\n19334\n19335\n19336\n19337\n19338\n19339\n19340\n19341\n19342\n19343\n19344\n19345\n19346\n19347\n19348\n19349\n19350\n19351\n19352\n19353\n19354\n19355\n19356\n19357\n19358\n19359\n19360\n19361\n19362\n19363\n19364\n19365\n19366\n19367\n19368\n19369\n19370\n19371\n19372\n19373\n19374\n19375\n19376\n19377\n19378\n19379\n19380\n19381\n19382\n19383\n19384\n19385\n19386\n19387\n19388\n19389\n19390\n19391\n19392\n19393\n19394\n19395\n19396\n19397\n19398\n19399\n19400\n19401\n19402\n19403\n19404\n19405\n19406\n19407\n19408\n19409\n19410\n19411\n19412\n19413\n19414\n19415\n19416\n19417\n19418\n19419\n19420\n19421\n19422\n19423\n19424\n19425\n19426\n19427\n19428\n19429\n19430\n19431\n19432\n19433\n19434\n19435\n19436\n19437\n19438\n19439\n19440\n19441\n19442\n19443\n19444\n19445\n19446\n19447\n19448\n19449\n19450\n19451\n19452\n19453\n19454\n19455\n19456\n19457\n19458\n19459\n19460\n19461\n19462\n19463\n19464\n19465\n19466\n19467\n19468\n19469\n19470\n19471\n19472\n19473\n19474\n19475\n19476\n19477\n19478\n19479\n19480\n19481\n19482\n19483\n19484\n19485\n19486\n19487\n19488\n19489\n19490\n19491\n19492\n19493\n19494\n19495\n19496\n19497\n19498\n19499\n19500\n19501\n19502\n19503\n19504\n19505\n19506\n19507\n19508\n19509\n19510\n19511\n19512\n19513\n19514\n19515\n19516\n19517\n19518\n19519\n19520\n19521\n19522\n19523\n19524\n19525\n19526\n19527\n19528\n19529\n19530\n19531\n19532\n19533\n19534\n19535\n19536\n19537\n19538\n19539\n19540\n19541\n19542\n19543\n19544\n19545\n19546\n19547\n19548\n19549\n19550\n19551\n19552\n19553\n19554\n19555\n19556\n19557\n19558\n19559\n19560\n19561\n19562\n19563\n19564\n19565\n19566\n19567\n19568\n19569\n19570\n19571\n19572\n19573\n19574\n19575\n19576\n19577\n19578\n19579\n19580\n19581\n19582\n19583\n19584\n19585\n19586\n19587\n19588\n19589\n19590\n19591\n19592\n19593\n19594\n19595\n19596\n19597\n19598\n19599\n19600\n19601\n19602\n19603\n19604\n19605\n19606\n19607\n19608\n19609\n19610\n19611\n19612\n19613\n19614\n19615\n19616\n19617\n19618\n19619\n19620\n19621\n19622\n19623\n19624\n19625\n19626\n19627\n19628\n19629\n19630\n19631\n19632\n19633\n19634\n19635\n19636\n19637\n19638\n19639\n19640\n19641\n19642\n19643\n19644\n19645\n19646\n19647\n19648\n19649\n19650\n19651\n19652\n19653\n19654\n19655\n19656\n19657\n19658\n19659\n19660\n19661\n19662\n19663\n19664\n19665\n19666\n19667\n19668\n19669\n19670\n19671\n19672\n19673\n19674\n19675\n19676\n19677\n19678\n19679\n19680\n19681\n19682\n19683\n19684\n19685\n19686\n19687\n19688\n19689\n19690\n19691\n19692\n19693\n19694\n19695\n19696\n19697\n19698\n19699\n19700\n19701\n19702\n19703\n19704\n19705\n19706\n19707\n19708\n19709\n19710\n19711\n19712\n19713\n19714\n19715\n19716\n19717\n19718\n19719\n19720\n19721\n19722\n19723\n19724\n19725\n19726\n19727\n19728\n19729\n19730\n19731\n19732\n19733\n19734\n19735\n19736\n19737\n19738\n19739\n19740\n19741\n19742\n19743\n19744\n19745\n19746\n19747\n19748\n19749\n19750\n19751\n19752\n19753\n19754\n19755\n19756\n19757\n19758\n19759\n19760\n19761\n19762\n19763\n19764\n19765\n19766\n19767\n19768\n19769\n19770\n19771\n19772\n19773\n19774\n19775\n19776\n19777\n19778\n19779\n19780\n19781\n19782\n19783\n19784\n19785\n19786\n19787\n19788\n19789\n19790\n19791\n19792\n19793\n19794\n19795\n19796\n19797\n19798\n19799\n19800\n19801\n19802\n19803\n19804\n19805\n19806\n19807\n19808\n19809\n19810\n19811\n19812\n19813\n19814\n19815\n19816\n19817\n19818\n19819\n19820\n19821\n19822\n19823\n19824\n19825\n19826\n19827\n19828\n19829\n19830\n19831\n19832\n19833\n19834\n19835\n19836\n19837\n19838\n19839\n19840\n19841\n19842\n19843\n19844\n19845\n19846\n19847\n19848\n19849\n19850\n19851\n19852\n19853\n19854\n19855\n19856\n19857\n19858\n19859\n19860\n19861\n19862\n19863\n19864\n19865\n19866\n19867\n19868\n19869\n19870\n19871\n19872\n19873\n19874\n19875\n19876\n19877\n19878\n19879\n19880\n19881\n19882\n19883\n19884\n19885\n19886\n19887\n19888\n19889\n19890\n19891\n19892\n19893\n19894\n19895\n19896\n19897\n19898\n19899\n19900\n19901\n19902\n19903\n19904\n19905\n19906\n19907\n19908\n19909\n19910\n19911\n19912\n19913\n19914\n19915\n19916\n19917\n19918\n19919\n19920\n19921\n19922\n19923\n19924\n19925\n19926\n19927\n19928\n19929\n19930\n19931\n19932\n19933\n19934\n19935\n19936\n19937\n19938\n19939\n19940\n19941\n19942\n19943\n19944\n19945\n19946\n19947\n19948\n19949\n19950\n19951\n19952\n19953\n19954\n19955\n19956\n19957\n19958\n19959\n19960\n19961\n19962\n19963\n19964\n19965\n19966\n19967\n19968\n19969\n19970\n19971\n19972\n19973\n19974\n19975\n19976\n19977\n19978\n19979\n19980\n19981\n19982\n19983\n19984\n19985\n19986\n19987\n19988\n19989\n19990\n19991\n19992\n19993\n19994\n19995\n19996\n19997\n19998\n19999\n20000\n20001\n20002\n20003\n20004\n20005\n20006\n20007\n20008\n20009\n20010\n20011\n20012\n20013\n20014\n20015\n20016\n20017\n20018\n20019\n20020\n20021\n20022\n20023\n20024\n20025\n20026\n20027\n20028\n20029\n20030\n20031\n20032\n20033\n20034\n20035\n20036\n20037\n20038\n20039\n20040\n20041\n20042\n20043\n20044\n20045\n20046\n20047\n20048\n20049\n20050\n20051\n20052\n20053\n20054\n20055\n20056\n20057\n20058\n20059\n20060\n20061\n20062\n20063\n20064\n20065\n20066\n20067\n20068\n20069\n20070\n20071\n20072\n20073\n20074\n20075\n20076\n20077\n20078\n20079\n20080\n20081\n20082\n20083\n20084\n20085\n20086\n20087\n20088\n20089\n20090\n20091\n20092\n20093\n20094\n20095\n20096\n20097\n20098\n20099\n20100\n20101\n20102\n20103\n20104\n20105\n20106\n20107\n20108\n20109\n20110\n20111\n20112\n20113\n20114\n20115\n20116\n20117\n20118\n20119\n20120\n20121\n20122\n20123\n20124\n20125\n20126\n20127\n20128\n20129\n20130\n20131\n20132\n20133\n20134\n20135\n20136\n20137\n20138\n20139\n20140\n20141\n20142\n20143\n20144\n20145\n20146\n20147\n20148\n20149\n20150\n20151\n20152\n20153\n20154\n20155\n20156\n20157\n20158\n20159\n20160\n20161\n20162\n20163\n20164\n20165\n20166\n20167\n20168\n20169\n20170\n20171\n20172\n20173\n20174\n20175\n20176\n20177\n20178\n20179\n20180\n20181\n20182\n20183\n20184\n20185\n20186\n20187\n20188\n20189\n20190\n20191\n20192\n20193\n20194\n20195\n20196\n20197\n20198\n20199\n20200\n20201\n20202\n20203\n20204\n20205\n20206\n20207\n20208\n20209\n20210\n20211\n20212\n20213\n20214\n20215\n20216\n20217\n20218\n20219\n20220\n20221\n20222\n20223\n20224\n20225\n20226\n20227\n20228\n20229\n20230\n20231\n20232\n20233\n20234\n20235\n20236\n20237\n20238\n20239\n20240\n20241\n20242\n20243\n20244\n20245\n20246\n20247\n20248\n20249\n20250\n20251\n20252\n20253\n20254\n20255\n20256\n20257\n20258\n20259\n20260\n20261\n20262\n20263\n20264\n20265\n20266\n20267\n20268\n20269\n20270\n20271\n20272\n20273\n20274\n20275\n20276\n20277\n20278\n20279\n20280\n20281\n20282\n20283\n20284\n20285\n20286\n20287\n20288\n20289\n20290\n20291\n20292\n20293\n20294\n20295\n20296\n20297\n20298\n20299\n20300\n20301\n20302\n20303\n20304\n20305\n20306\n20307\n20308\n20309\n20310\n20311\n20312\n20313\n20314\n20315\n20316\n20317\n20318\n20319\n20320\n20321\n20322\n20323\n20324\n20325\n20326\n20327\n20328\n20329\n20330\n20331\n20332\n20333\n20334\n20335\n20336\n20337\n20338\n20339\n20340\n20341\n20342\n20343\n20344\n20345\n20346\n20347\n20348\n20349\n20350\n20351\n20352\n20353\n20354\n20355\n20356\n20357\n20358\n20359\n20360\n20361\n20362\n20363\n20364\n20365\n20366\n20367\n20368\n20369\n20370\n20371\n20372\n20373\n20374\n20375\n20376\n20377\n20378\n20379\n20380\n20381\n20382\n20383\n20384\n20385\n20386\n20387\n20388\n20389\n20390\n20391\n20392\n20393\n20394\n20395\n20396\n20397\n20398\n20399\n20400\n20401\n20402\n20403\n20404\n20405\n20406\n20407\n20408\n20409\n20410\n20411\n20412\n20413\n20414\n20415\n20416\n20417\n20418\n20419\n20420\n20421\n20422\n20423\n20424\n20425\n20426\n20427\n20428\n20429\n20430\n20431\n20432\n20433\n20434\n20435\n20436\n20437\n20438\n20439\n20440\n20441\n20442\n20443\n20444\n20445\n20446\n20447\n20448\n20449\n20450\n20451\n20452\n20453\n20454\n20455\n20456\n20457\n20458\n20459\n20460\n20461\n20462\n20463\n20464\n20465\n20466\n20467\n20468\n20469\n20470\n20471\n20472\n20473\n20474\n20475\n20476\n20477\n20478\n20479\n20480\n20481\n20482\n20483\n20484\n20485\n20486\n20487\n20488\n20489\n20490\n20491\n20492\n20493\n20494\n20495\n20496\n20497\n20498\n20499\n20500\n20501\n20502\n20503\n20504\n20505\n20506\n20507\n20508\n20509\n20510\n20511\n20512\n20513\n20514\n20515\n20516\n20517\n20518\n20519\n20520\n20521\n20522\n20523\n20524\n20525\n20526\n20527\n20528\n20529\n20530\n20531\n20532\n20533\n20534\n20535\n20536\n20537\n20538\n20539\n20540\n20541\n20542\n20543\n20544\n20545\n20546\n20547\n20548\n20549\n20550\n20551\n20552\n20553\n20554\n20555\n20556\n20557\n20558\n20559\n20560\n20561\n20562\n20563\n20564\n20565\n20566\n20567\n20568\n20569\n20570\n20571\n20572\n20573\n20574\n20575\n20576\n20577\n20578\n20579\n20580\n20581\n20582\n20583\n20584\n20585\n20586\n20587\n20588\n20589\n20590\n20591\n20592\n20593\n20594\n20595\n20596\n20597\n20598\n20599\n20600\n20601\n20602\n20603\n20604\n20605\n20606\n20607\n20608\n20609\n20610\n20611\n20612\n20613\n20614\n20615\n20616\n20617\n20618\n20619\n20620\n20621\n20622\n20623\n20624\n20625\n20626\n20627\n20628\n20629\n20630\n20631\n20632\n20633\n20634\n20635\n20636\n20637\n20638\n20639\n20640\n20641\n20642\n20643\n20644\n20645\n20646\n20647\n20648\n20649\n20650\n20651\n20652\n20653\n20654\n20655\n20656\n20657\n20658\n20659\n20660\n20661\n20662\n20663\n20664\n20665\n20666\n20667\n20668\n20669\n20670\n20671\n20672\n20673\n20674\n20675\n20676\n20677\n20678\n20679\n20680\n20681\n20682\n20683\n20684\n20685\n20686\n20687\n20688\n20689\n20690\n20691\n20692\n20693\n20694\n20695\n20696\n20697\n20698\n20699\n20700\n20701\n20702\n20703\n20704\n20705\n20706\n20707\n20708\n20709\n20710\n20711\n20712\n20713\n20714\n20715\n20716\n20717\n20718\n20719\n20720\n20721\n20722\n20723\n20724\n20725\n20726\n20727\n20728\n20729\n20730\n20731\n20732\n20733\n20734\n20735\n20736\n20737\n20738\n20739\n20740\n20741\n20742\n20743\n20744\n20745\n20746\n20747\n20748\n20749\n20750\n20751\n20752\n20753\n20754\n20755\n20756\n20757\n20758\n20759\n20760\n20761\n20762\n20763\n20764\n20765\n20766\n20767\n20768\n20769\n20770\n20771\n20772\n20773\n20774\n20775\n20776\n20777\n20778\n20779\n20780\n20781\n20782\n20783\n20784\n20785\n20786\n20787\n20788\n20789\n20790\n20791\n20792\n20793\n20794\n20795\n20796\n20797\n20798\n20799\n20800\n20801\n20802\n20803\n20804\n20805\n20806\n20807\n20808\n20809\n20810\n20811\n20812\n20813\n20814\n20815\n20816\n20817\n20818\n20819\n20820\n20821\n20822\n20823\n20824\n20825\n20826\n20827\n20828\n20829\n20830\n20831\n20832\n20833\n20834\n20835\n20836\n20837\n20838\n20839\n20840\n20841\n20842\n20843\n20844\n20845\n20846\n20847\n20848\n20849\n20850\n20851\n20852\n20853\n20854\n20855\n20856\n20857\n20858\n20859\n20860\n20861\n20862\n20863\n20864\n20865\n20866\n20867\n20868\n20869\n20870\n20871\n20872\n20873\n20874\n20875\n20876\n20877\n20878\n20879\n20880\n20881\n20882\n20883\n20884\n20885\n20886\n20887\n20888\n20889\n20890\n20891\n20892\n20893\n20894\n20895\n20896\n20897\n20898\n20899\n20900\n20901\n20902\n20903\n20904\n20905\n20906\n20907\n20908\n20909\n20910\n20911\n20912\n20913\n20914\n20915\n20916\n20917\n20918\n20919\n20920\n20921\n20922\n20923\n20924\n20925\n20926\n20927\n20928\n20929\n20930\n20931\n20932\n20933\n20934\n20935\n20936\n20937\n20938\n20939\n20940\n20941\n20942\n20943\n20944\n20945\n20946\n20947\n20948\n20949\n20950\n20951\n20952\n20953\n20954\n20955\n20956\n20957\n20958\n20959\n20960\n20961\n20962\n20963\n20964\n20965\n20966\n20967\n20968\n20969\n20970\n20971\n20972\n20973\n20974\n20975\n20976\n20977\n20978\n20979\n20980\n20981\n20982\n20983\n20984\n20985\n20986\n20987\n20988\n20989\n20990\n20991\n20992\n20993\n20994\n20995\n20996\n20997\n20998\n20999\n21000\n21001\n21002\n21003\n21004\n21005\n21006\n21007\n21008\n21009\n21010\n21011\n21012\n21013\n21014\n21015\n21016\n21017\n21018\n21019\n21020\n21021\n21022\n21023\n21024\n21025\n21026\n21027\n21028\n21029\n21030\n21031\n21032\n21033\n21034\n21035\n21036\n21037\n21038\n21039\n21040\n21041\n21042\n21043\n21044\n21045\n21046\n21047\n21048\n21049\n21050\n21051\n21052\n21053\n21054\n21055\n21056\n21057\n21058\n21059\n21060\n21061\n21062\n21063\n21064\n21065\n21066\n21067\n21068\n21069\n21070\n21071\n21072\n21073\n21074\n21075\n21076\n21077\n21078\n21079\n21080\n21081\n21082\n21083\n21084\n21085\n21086\n21087\n21088\n21089\n21090\n21091\n21092\n21093\n21094\n21095\n21096\n21097\n21098\n21099\n21100\n21101\n21102\n21103\n21104\n21105\n21106\n21107\n21108\n21109\n21110\n21111\n21112\n21113\n21114\n21115\n21116\n21117\n21118\n21119\n21120\n21121\n21122\n21123\n21124\n21125\n21126\n21127\n21128\n21129\n21130\n21131\n21132\n21133\n21134\n21135\n21136\n21137\n21138\n21139\n21140\n21141\n21142\n21143\n21144\n21145\n21146\n21147\n21148\n21149\n21150\n21151\n21152\n21153\n21154\n21155\n21156\n21157\n21158\n21159\n21160\n21161\n21162\n21163\n21164\n21165\n21166\n21167\n21168\n21169\n21170\n21171\n21172\n21173\n21174\n21175\n21176\n21177\n21178\n21179\n21180\n21181\n21182\n21183\n21184\n21185\n21186\n21187\n21188\n21189\n21190\n21191\n21192\n21193\n21194\n21195\n21196\n21197\n21198\n21199\n21200\n21201\n21202\n21203\n21204\n21205\n21206\n21207\n21208\n21209\n21210\n21211\n21212\n21213\n21214\n21215\n21216\n21217\n21218\n21219\n21220\n21221\n21222\n21223\n21224\n21225\n21226\n21227\n21228\n21229\n21230\n21231\n21232\n21233\n21234\n21235\n21236\n21237\n21238\n21239\n21240\n21241\n21242\n21243\n21244\n21245\n21246\n21247\n21248\n21249\n21250\n21251\n21252\n21253\n21254\n21255\n21256\n21257\n21258\n21259\n21260\n21261\n21262\n21263\n21264\n21265\n21266\n21267\n21268\n21269\n21270\n21271\n21272\n21273\n21274\n21275\n21276\n21277\n21278\n21279\n21280\n21281\n21282\n21283\n21284\n21285\n21286\n21287\n21288\n21289\n21290\n21291\n21292\n21293\n21294\n21295\n21296\n21297\n21298\n21299\n21300\n21301\n21302\n21303\n21304\n21305\n21306\n21307\n21308\n21309\n21310\n21311\n21312\n21313\n21314\n21315\n21316\n21317\n21318\n21319\n21320\n21321\n21322\n21323\n21324\n21325\n21326\n21327\n21328\n21329\n21330\n21331\n21332\n21333\n21334\n21335\n21336\n21337\n21338\n21339\n21340\n21341\n21342\n21343\n21344\n21345\n21346\n21347\n21348\n21349\n21350\n21351\n21352\n21353\n21354\n21355\n21356\n21357\n21358\n21359\n21360\n21361\n21362\n21363\n21364\n21365\n21366\n21367\n21368\n21369\n21370\n21371\n21372\n21373\n21374\n21375\n21376\n21377\n21378\n21379\n21380\n21381\n21382\n21383\n21384\n21385\n21386\n21387\n21388\n21389\n21390\n21391\n21392\n21393\n21394\n21395\n21396\n21397\n21398\n21399\n21400\n21401\n21402\n21403\n21404\n21405\n21406\n21407\n21408\n21409\n21410\n21411\n21412\n21413\n21414\n21415\n21416\n21417\n21418\n21419\n21420\n21421\n21422\n21423\n21424\n21425\n21426\n21427\n21428\n21429\n21430\n21431\n21432\n21433\n21434\n21435\n21436\n21437\n21438\n21439\n21440\n21441\n21442\n21443\n21444\n21445\n21446\n21447\n21448\n21449\n21450\n21451\n21452\n21453\n21454\n21455\n21456\n21457\n21458\n21459\n21460\n21461\n21462\n21463\n21464\n21465\n21466\n21467\n21468\n21469\n21470\n21471\n21472\n21473\n21474\n21475\n21476\n21477\n21478\n21479\n21480\n21481\n21482\n21483\n21484\n21485\n21486\n21487\n21488\n21489\n21490\n21491\n21492\n21493\n21494\n21495\n21496\n21497\n21498\n21499\n21500\n21501\n21502\n21503\n21504\n21505\n21506\n21507\n21508\n21509\n21510\n21511\n21512\n21513\n21514\n21515\n21516\n21517\n21518\n21519\n21520\n21521\n21522\n21523\n21524\n21525\n21526\n21527\n21528\n21529\n21530\n21531\n21532\n21533\n21534\n21535\n21536\n21537\n21538\n21539\n21540\n21541\n21542\n21543\n21544\n21545\n21546\n21547\n21548\n21549\n21550\n21551\n21552\n21553\n21554\n21555\n21556\n21557\n21558\n21559\n21560\n21561\n21562\n21563\n21564\n21565\n21566\n21567\n21568\n21569\n21570\n21571\n21572\n21573\n21574\n21575\n21576\n21577\n21578\n21579\n21580\n21581\n21582\n21583\n21584\n21585\n21586\n21587\n21588\n21589\n21590\n21591\n21592\n21593\n21594\n21595\n21596\n21597\n21598\n21599\n21600\n21601\n21602\n21603\n21604\n21605\n21606\n21607\n21608\n21609\n21610\n21611\n21612\n21613\n21614\n21615\n21616\n21617\n21618\n21619\n21620\n21621\n21622\n21623\n21624\n21625\n21626\n21627\n21628\n21629\n21630\n21631\n21632\n21633\n21634\n21635\n21636\n21637\n21638\n21639\n21640\n21641\n21642\n21643\n21644\n21645\n21646\n21647\n21648\n21649\n21650\n21651\n21652\n21653\n21654\n21655\n21656\n21657\n21658\n21659\n21660\n21661\n21662\n21663\n21664\n21665\n21666\n21667\n21668\n21669\n21670\n21671\n21672\n21673\n21674\n21675\n21676\n21677\n21678\n21679\n21680\n21681\n21682\n21683\n21684\n21685\n21686\n21687\n21688\n21689\n21690\n21691\n21692\n21693\n21694\n21695\n21696\n21697\n21698\n21699\n21700\n21701\n21702\n21703\n21704\n21705\n21706\n21707\n21708\n21709\n21710\n21711\n21712\n21713\n21714\n21715\n21716\n21717\n21718\n21719\n21720\n21721\n21722\n21723\n21724\n21725\n21726\n21727\n21728\n21729\n21730\n21731\n21732\n21733\n21734\n21735\n21736\n21737\n21738\n21739\n21740\n21741\n21742\n21743\n21744\n21745\n21746\n21747\n21748\n21749\n21750\n21751\n21752\n21753\n21754\n21755\n21756\n21757\n21758\n21759\n21760\n21761\n21762\n21763\n21764\n21765\n21766\n21767\n21768\n21769\n21770\n21771\n21772\n21773\n21774\n21775\n21776\n21777\n21778\n21779\n21780\n21781\n21782\n21783\n21784\n21785\n21786\n21787\n21788\n21789\n21790\n21791\n21792\n21793\n21794\n21795\n21796\n21797\n21798\n21799\n21800\n21801\n21802\n21803\n21804\n21805\n21806\n21807\n21808\n21809\n21810\n21811\n21812\n21813\n21814\n21815\n21816\n21817\n21818\n21819\n21820\n21821\n21822\n21823\n21824\n21825\n21826\n21827\n21828\n21829\n21830\n21831\n21832\n21833\n21834\n21835\n21836\n21837\n21838\n21839\n21840\n21841\n21842\n"
  },
  {
    "path": "timm/data/_info/imagenet21k_miil_synsets.txt",
    "content": "n00005787\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440382\nn00440509\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443692\nn00443803\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446493\nn00446804\nn00446980\nn00447073\nn00447221\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00452034\nn00452152\nn00452293\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454983\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00475014\nn00475273\nn00475403\nn00475535\nn00475787\nn00476235\nn00476389\nn00477392\nn00477639\nn00478262\nn00479076\nn00479440\nn00479616\nn00479887\nn00480211\nn00480366\nn00480508\nn00480993\nn00481803\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00825773\nn00887544\nn01055165\nn01314388\nn01314663\nn01314781\nn01315213\nn01316422\nn01317089\nn01317294\nn01317541\nn01317813\nn01317916\nn01318279\nn01318381\nn01318894\nn01319467\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01324431\nn01324610\nn01326291\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01379389\nn01381044\nn01384164\nn01392275\nn01392380\nn01395254\nn01396048\nn01397114\nn01397871\nn01402600\nn01405007\nn01407798\nn01410457\nn01415626\nn01421807\nn01424420\nn01438581\nn01439121\nn01439514\nn01440764\nn01441117\nn01442972\nn01443243\nn01443537\nn01443831\nn01444339\nn01446760\nn01447331\nn01447658\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01451426\nn01453087\nn01454545\nn01455778\nn01456756\nn01457852\nn01459791\nn01462042\nn01462544\nn01464844\nn01468238\nn01468712\nn01469103\nn01471682\nn01472303\nn01477525\nn01477875\nn01482071\nn01482330\nn01483830\nn01484097\nn01484850\nn01485479\nn01486838\nn01487506\nn01488038\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491361\nn01491874\nn01492569\nn01493146\nn01494475\nn01495006\nn01495493\nn01495701\nn01496331\nn01497118\nn01498041\nn01498989\nn01499396\nn01500091\nn01500476\nn01501160\nn01503061\nn01503976\nn01504179\nn01504344\nn01514668\nn01514752\nn01514859\nn01515303\nn01517565\nn01517966\nn01518878\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01524359\nn01526521\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01530575\nn01531178\nn01531344\nn01531512\nn01531811\nn01531971\nn01532325\nn01532511\nn01532829\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534433\nn01534582\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537544\nn01537895\nn01538059\nn01538200\nn01538630\nn01538955\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542786\nn01543175\nn01543632\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01547832\nn01548301\nn01548492\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01551080\nn01551300\nn01551711\nn01552034\nn01552813\nn01553142\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01558993\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560419\nn01560636\nn01560793\nn01560935\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576695\nn01577035\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580077\nn01580870\nn01581166\nn01581730\nn01581984\nn01582220\nn01582398\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587526\nn01587834\nn01588002\nn01588725\nn01589286\nn01589718\nn01589893\nn01591005\nn01591123\nn01591301\nn01591697\nn01592084\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598588\nn01598988\nn01599159\nn01599269\nn01599556\nn01600085\nn01600657\nn01601068\nn01601694\nn01602630\nn01602832\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606522\nn01606672\nn01606809\nn01607600\nn01607812\nn01607962\nn01608265\nn01608432\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01614925\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616318\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622779\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624537\nn01624833\nn01625562\nn01627424\nn01628770\nn01629276\nn01629819\nn01629962\nn01630284\nn01630670\nn01630901\nn01631354\nn01631663\nn01632458\nn01632601\nn01632777\nn01633406\nn01633781\nn01635027\nn01636352\nn01636829\nn01637615\nn01639765\nn01640846\nn01641206\nn01641391\nn01641577\nn01641739\nn01642257\nn01642539\nn01643507\nn01643896\nn01644373\nn01644900\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647303\nn01647640\nn01648139\nn01648620\nn01649170\nn01650167\nn01650690\nn01651059\nn01652026\nn01654637\nn01661091\nn01662622\nn01662784\nn01663401\nn01663782\nn01664065\nn01664369\nn01664492\nn01664674\nn01664990\nn01665541\nn01665932\nn01666228\nn01666585\nn01667114\nn01667432\nn01667778\nn01668091\nn01668436\nn01668665\nn01668892\nn01669191\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01672032\nn01673282\nn01674464\nn01674990\nn01675722\nn01677366\nn01677747\nn01678043\nn01678343\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01682714\nn01683558\nn01684133\nn01684578\nn01685808\nn01687665\nn01687978\nn01688243\nn01689081\nn01689811\nn01690149\nn01691217\nn01692333\nn01692523\nn01693175\nn01693334\nn01693783\nn01694178\nn01694709\nn01694955\nn01695060\nn01696633\nn01697178\nn01697457\nn01697611\nn01698434\nn01698640\nn01698782\nn01699040\nn01699675\nn01701859\nn01704323\nn01713764\nn01726692\nn01727646\nn01728572\nn01728920\nn01729322\nn01729977\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731545\nn01731941\nn01732244\nn01732614\nn01732789\nn01733466\nn01733757\nn01733957\nn01734104\nn01734418\nn01734637\nn01734808\nn01735189\nn01735439\nn01735577\nn01737021\nn01737472\nn01737728\nn01737875\nn01738065\nn01738601\nn01739381\nn01740131\nn01740551\nn01741232\nn01741562\nn01741943\nn01742172\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744401\nn01745125\nn01745484\nn01745902\nn01746359\nn01747589\nn01747885\nn01748264\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01749939\nn01750167\nn01750437\nn01751036\nn01751472\nn01751748\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753488\nn01753959\nn01754370\nn01754533\nn01754876\nn01755581\nn01755740\nn01756089\nn01756291\nn01756508\nn01756733\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01768244\nn01769347\nn01770081\nn01770393\nn01770795\nn01771417\nn01772222\nn01772664\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01775370\nn01776313\nn01777304\nn01778217\nn01779148\nn01779629\nn01782209\nn01782516\nn01784675\nn01785667\nn01786646\nn01787835\nn01789740\nn01790711\nn01791107\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792640\nn01792955\nn01793249\nn01793435\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795545\nn01795735\nn01796340\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01797886\nn01798168\nn01798484\nn01798706\nn01798839\nn01800424\nn01801876\nn01803078\nn01803362\nn01804163\nn01804478\nn01804653\nn01805801\nn01806143\nn01806297\nn01806364\nn01806467\nn01806567\nn01806847\nn01807105\nn01807496\nn01807828\nn01808140\nn01809106\nn01809371\nn01809752\nn01810268\nn01811909\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813948\nn01814217\nn01814370\nn01814755\nn01814921\nn01815601\nn01816887\nn01817263\nn01817346\nn01817953\nn01818299\nn01818515\nn01818832\nn01819115\nn01819313\nn01819465\nn01819734\nn01820052\nn01820348\nn01820546\nn01821076\nn01821203\nn01821869\nn01822300\nn01823013\nn01823414\nn01824035\nn01824575\nn01825278\nn01826364\nn01826680\nn01827403\nn01827793\nn01828096\nn01828556\nn01828970\nn01829413\nn01829869\nn01830042\nn01830915\nn01832167\nn01832493\nn01833805\nn01834177\nn01834540\nn01835276\nn01837072\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01840120\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01842235\nn01842504\nn01843065\nn01843383\nn01843719\nn01844231\nn01844551\nn01844917\nn01845132\nn01846331\nn01847000\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855032\nn01855188\nn01855476\nn01855672\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860187\nn01861778\nn01862399\nn01871265\nn01872401\nn01872772\nn01873310\nn01874434\nn01874928\nn01875313\nn01876034\nn01877134\nn01877606\nn01877812\nn01878929\nn01879217\nn01879509\nn01881171\nn01882714\nn01883070\nn01884834\nn01885498\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889520\nn01891633\nn01893825\nn01896844\nn01897536\nn01899894\nn01900150\nn01903346\nn01904029\nn01904806\nn01904886\nn01905661\nn01906749\nn01909906\nn01910747\nn01913166\nn01914609\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916925\nn01917289\nn01917611\nn01917882\nn01918744\nn01922303\nn01923025\nn01924916\nn01930112\nn01934440\nn01935395\nn01937909\nn01938454\nn01940736\nn01942869\nn01943087\nn01943899\nn01944118\nn01944390\nn01944812\nn01944955\nn01945143\nn01945685\nn01946630\nn01947396\nn01948573\nn01949085\nn01950731\nn01951274\nn01951613\nn01953361\nn01953594\nn01953762\nn01955084\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958531\nn01959492\nn01959985\nn01960177\nn01960459\nn01961985\nn01963317\nn01963571\nn01964049\nn01964271\nn01964441\nn01965529\nn01965889\nn01968897\nn01970164\nn01970667\nn01971280\nn01972541\nn01974773\nn01976146\nn01976868\nn01976957\nn01978287\nn01978455\nn01979874\nn01980166\nn01981276\nn01982068\nn01982347\nn01982650\nn01983481\nn01984245\nn01984695\nn01985128\nn01986214\nn01986806\nn01987545\nn01990007\nn01990800\nn01991028\nn01991520\nn01992773\nn01994910\nn01998183\nn01998741\nn01999186\nn02000954\nn02002075\nn02002556\nn02002724\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006656\nn02006985\nn02007284\nn02007558\nn02008041\nn02008497\nn02008643\nn02008796\nn02009229\nn02009380\nn02009508\nn02009750\nn02009912\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011460\nn02011805\nn02011943\nn02012185\nn02012849\nn02013177\nn02013567\nn02013706\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017213\nn02017475\nn02017725\nn02018027\nn02018207\nn02018368\nn02018795\nn02019190\nn02019929\nn02021050\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025239\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027492\nn02027897\nn02028035\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030287\nn02030837\nn02030996\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02033041\nn02033208\nn02033561\nn02033779\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02036053\nn02036711\nn02037110\nn02037464\nn02037869\nn02038466\nn02038993\nn02040266\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02049088\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051845\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056570\nn02056728\nn02057035\nn02057330\nn02057731\nn02058221\nn02058594\nn02059162\nn02060133\nn02060411\nn02060889\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064338\nn02064816\nn02065026\nn02065407\nn02066245\nn02066707\nn02067240\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02071294\nn02071636\nn02072798\nn02073831\nn02074367\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02077923\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02081571\nn02081798\nn02082791\nn02083346\nn02083672\nn02084071\nn02084732\nn02084861\nn02085272\nn02085374\nn02085620\nn02085936\nn02086079\nn02086240\nn02086646\nn02086753\nn02086910\nn02087046\nn02087122\nn02087394\nn02087551\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02088839\nn02089232\nn02089468\nn02089555\nn02090379\nn02090475\nn02090622\nn02090721\nn02090827\nn02091032\nn02091134\nn02091244\nn02091467\nn02091831\nn02092002\nn02092339\nn02092468\nn02093056\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02094562\nn02094721\nn02094931\nn02095050\nn02095314\nn02095412\nn02095570\nn02095727\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02096756\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02097786\nn02098105\nn02098286\nn02098413\nn02098550\nn02098806\nn02098906\nn02099029\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02099997\nn02100236\nn02100399\nn02100583\nn02100735\nn02100877\nn02101006\nn02101108\nn02101388\nn02101556\nn02101861\nn02102040\nn02102177\nn02102318\nn02102480\nn02102605\nn02102973\nn02103406\nn02103841\nn02104029\nn02104280\nn02104365\nn02104523\nn02104882\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02106854\nn02106966\nn02107142\nn02107312\nn02107420\nn02107574\nn02107683\nn02107908\nn02108089\nn02108254\nn02108422\nn02108551\nn02108672\nn02108915\nn02109047\nn02109525\nn02109811\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111626\nn02111889\nn02112018\nn02112137\nn02112350\nn02112497\nn02112826\nn02113023\nn02113186\nn02113335\nn02113624\nn02113712\nn02113799\nn02114100\nn02114367\nn02114548\nn02114712\nn02114855\nn02115096\nn02115335\nn02115641\nn02115913\nn02116738\nn02117135\nn02117512\nn02117900\nn02118333\nn02119022\nn02119477\nn02119634\nn02119789\nn02120079\nn02120505\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122878\nn02122948\nn02123045\nn02123159\nn02123242\nn02123394\nn02123478\nn02123597\nn02123917\nn02124075\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125311\nn02125494\nn02126028\nn02126139\nn02126640\nn02126787\nn02127052\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128385\nn02128669\nn02128757\nn02128925\nn02129165\nn02129463\nn02129604\nn02129837\nn02129923\nn02129991\nn02130308\nn02131653\nn02132136\nn02132466\nn02132580\nn02132788\nn02133161\nn02133704\nn02134084\nn02134418\nn02135220\nn02136103\nn02137015\nn02137549\nn02138441\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02146371\nn02146700\nn02147173\nn02147591\nn02147947\nn02150482\nn02152740\nn02152881\nn02153109\nn02156871\nn02157206\nn02159955\nn02160947\nn02161338\nn02161457\nn02162561\nn02163297\nn02164464\nn02165105\nn02165456\nn02165877\nn02166567\nn02166826\nn02167151\nn02167820\nn02168245\nn02168699\nn02169023\nn02169497\nn02169705\nn02169974\nn02172182\nn02172518\nn02172870\nn02173113\nn02173373\nn02174001\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02177972\nn02180875\nn02181724\nn02183096\nn02184473\nn02188699\nn02190166\nn02190790\nn02191773\nn02191979\nn02192252\nn02192513\nn02195526\nn02195819\nn02196119\nn02196344\nn02197689\nn02198859\nn02200198\nn02200509\nn02200850\nn02201000\nn02202006\nn02203152\nn02204907\nn02205219\nn02205673\nn02206856\nn02207179\nn02207345\nn02207805\nn02208280\nn02208498\nn02208848\nn02209354\nn02209624\nn02210427\nn02211444\nn02211627\nn02212062\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214341\nn02214773\nn02215770\nn02216211\nn02216365\nn02218371\nn02219486\nn02220518\nn02220804\nn02221083\nn02221414\nn02222035\nn02226429\nn02226821\nn02226970\nn02227247\nn02228341\nn02229156\nn02229544\nn02229765\nn02231052\nn02231487\nn02233338\nn02233943\nn02234355\nn02234848\nn02236044\nn02236241\nn02236355\nn02236896\nn02239774\nn02240068\nn02240517\nn02241426\nn02242137\nn02243562\nn02244797\nn02246628\nn02250822\nn02251775\nn02252226\nn02254697\nn02256656\nn02257284\nn02257985\nn02258198\nn02259212\nn02262449\nn02262803\nn02264232\nn02264363\nn02264885\nn02266050\nn02266864\nn02268148\nn02268443\nn02268853\nn02270623\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276258\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277742\nn02278024\nn02278210\nn02278839\nn02278980\nn02279257\nn02279637\nn02279972\nn02280649\nn02281015\nn02281136\nn02281406\nn02281787\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283951\nn02284611\nn02284884\nn02285801\nn02286089\nn02287004\nn02288268\nn02288789\nn02291748\nn02292692\nn02295064\nn02295390\nn02297442\nn02298218\nn02298541\nn02299157\nn02299505\nn02299846\nn02300797\nn02301935\nn02302244\nn02302620\nn02302969\nn02303284\nn02304036\nn02304432\nn02305085\nn02305929\nn02307325\nn02307681\nn02308139\nn02308471\nn02308735\nn02309242\nn02309337\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02312006\nn02312427\nn02312640\nn02313008\nn02316707\nn02317335\nn02317781\nn02318167\nn02319095\nn02319308\nn02319555\nn02321170\nn02321529\nn02323449\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325366\nn02325722\nn02326432\nn02326862\nn02327028\nn02327656\nn02327842\nn02328150\nn02328429\nn02329401\nn02330245\nn02331046\nn02332156\nn02332755\nn02333546\nn02333909\nn02334201\nn02337001\nn02338145\nn02339376\nn02341475\nn02341974\nn02342885\nn02343320\nn02343772\nn02346627\nn02348173\nn02350105\nn02352591\nn02353861\nn02355227\nn02355477\nn02356381\nn02356612\nn02356798\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358890\nn02359047\nn02359324\nn02359556\nn02359915\nn02360282\nn02361337\nn02361587\nn02361706\nn02363005\nn02363351\nn02364520\nn02364673\nn02364840\nn02365480\nn02366959\nn02367492\nn02370806\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378541\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02385214\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387722\nn02387887\nn02388143\nn02388276\nn02388735\nn02388832\nn02388917\nn02389026\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02390015\nn02390101\nn02390640\nn02391049\nn02391234\nn02391373\nn02391508\nn02391994\nn02392434\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395406\nn02395694\nn02396014\nn02396088\nn02396427\nn02397096\nn02397529\nn02397744\nn02398521\nn02399000\nn02402010\nn02402175\nn02402425\nn02403003\nn02403231\nn02403325\nn02403454\nn02403740\nn02403920\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405799\nn02405929\nn02406174\nn02406533\nn02406647\nn02406749\nn02407071\nn02407276\nn02407390\nn02407625\nn02407959\nn02408429\nn02408817\nn02409508\nn02410011\nn02410509\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412080\nn02412210\nn02412440\nn02412629\nn02413050\nn02413131\nn02413593\nn02414209\nn02414290\nn02414578\nn02414763\nn02415253\nn02415435\nn02415577\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417387\nn02417534\nn02417663\nn02417914\nn02418465\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422106\nn02422391\nn02422699\nn02423022\nn02423218\nn02423589\nn02424085\nn02424305\nn02424486\nn02424909\nn02425228\nn02425887\nn02426481\nn02426813\nn02427032\nn02427470\nn02427576\nn02427724\nn02428349\nn02428508\nn02429456\nn02430045\nn02430559\nn02430830\nn02431122\nn02431337\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433925\nn02434190\nn02434954\nn02437136\nn02437312\nn02437482\nn02437616\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441942\nn02442845\nn02443015\nn02443114\nn02443346\nn02443484\nn02444819\nn02445004\nn02445171\nn02445394\nn02445715\nn02446206\nn02447366\nn02447762\nn02448060\nn02449350\nn02450295\nn02453108\nn02454379\nn02454794\nn02456962\nn02457408\nn02457945\nn02458135\nn02460009\nn02460451\nn02461128\nn02461830\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02472293\nn02472987\nn02473307\nn02474777\nn02475078\nn02475669\nn02480153\nn02480495\nn02480855\nn02481103\nn02481235\nn02481366\nn02481500\nn02481823\nn02482286\nn02482474\nn02482650\nn02483362\nn02483708\nn02484322\nn02484975\nn02485536\nn02486261\nn02486410\nn02486657\nn02486908\nn02487347\nn02487547\nn02487847\nn02488291\nn02488415\nn02488702\nn02489166\nn02490219\nn02490811\nn02491107\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02496913\nn02497673\nn02499022\nn02499316\nn02499808\nn02500267\nn02501583\nn02503517\nn02504013\nn02504458\nn02508021\nn02508213\nn02508742\nn02509197\nn02509515\nn02509815\nn02510455\nn02512053\nn02512830\nn02512938\nn02514041\nn02516188\nn02517442\nn02518324\nn02519148\nn02519686\nn02519862\nn02520147\nn02522399\nn02523427\nn02524202\nn02525382\nn02526121\nn02527057\nn02527271\nn02527622\nn02530421\nn02530999\nn02532028\nn02532602\nn02533209\nn02533834\nn02534734\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02536864\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02541687\nn02542432\nn02543565\nn02548247\nn02549248\nn02549989\nn02555863\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02562315\nn02562796\nn02563182\nn02563648\nn02563792\nn02564270\nn02564720\nn02565072\nn02565324\nn02565573\nn02568087\nn02568959\nn02569484\nn02570164\nn02570838\nn02572196\nn02572484\nn02573704\nn02574271\nn02576575\nn02576906\nn02577403\nn02578771\nn02578928\nn02579303\nn02579928\nn02580336\nn02580679\nn02580830\nn02581957\nn02583890\nn02584145\nn02584449\nn02585872\nn02586543\nn02588286\nn02589623\nn02590094\nn02590702\nn02592055\nn02593019\nn02595702\nn02596067\nn02596381\nn02597608\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02601344\nn02603317\nn02603540\nn02605316\nn02605703\nn02605936\nn02606052\nn02606384\nn02607072\nn02607201\nn02607470\nn02607862\nn02610066\nn02610664\nn02611561\nn02613181\nn02616851\nn02618827\nn02619165\nn02619550\nn02620167\nn02624167\nn02624807\nn02625258\nn02625612\nn02625851\nn02626265\nn02626762\nn02627292\nn02627532\nn02628062\nn02629230\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02639087\nn02639605\nn02640242\nn02640626\nn02640857\nn02641379\nn02643112\nn02643566\nn02643836\nn02644113\nn02649546\nn02650050\nn02652132\nn02653145\nn02653497\nn02654112\nn02654425\nn02654745\nn02655020\nn02655848\nn02656032\nn02656670\nn02657368\nn02657694\nn02658531\nn02660208\nn02660640\nn02663211\nn02666196\nn02666501\nn02666624\nn02666943\nn02667093\nn02667244\nn02667379\nn02667478\nn02667576\nn02669295\nn02669534\nn02669723\nn02670186\nn02670382\nn02670683\nn02672371\nn02672831\nn02675219\nn02676566\nn02676938\nn02678897\nn02679257\nn02680110\nn02680512\nn02680754\nn02681392\nn02682569\nn02682922\nn02683323\nn02683454\nn02683558\nn02683791\nn02685082\nn02686121\nn02686227\nn02686379\nn02686568\nn02687172\nn02687423\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02690373\nn02691156\nn02692086\nn02692232\nn02692877\nn02693246\nn02694045\nn02694426\nn02694662\nn02695627\nn02696165\nn02697221\nn02697675\nn02698634\nn02699494\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701002\nn02702989\nn02703275\nn02704645\nn02704792\nn02704949\nn02705201\nn02705429\nn02705944\nn02708093\nn02708433\nn02708555\nn02708711\nn02709101\nn02709367\nn02709637\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02713003\nn02713364\nn02714751\nn02715229\nn02715513\nn02715712\nn02720048\nn02723165\nn02725872\nn02726017\nn02726305\nn02726681\nn02727016\nn02727141\nn02727426\nn02728440\nn02729837\nn02729965\nn02730930\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02735361\nn02735538\nn02735688\nn02736798\nn02737660\nn02738031\nn02738535\nn02738741\nn02738859\nn02739427\nn02739550\nn02739668\nn02739889\nn02740300\nn02740533\nn02740764\nn02741475\nn02742322\nn02742468\nn02742753\nn02744323\nn02744844\nn02745611\nn02746365\nn02747177\nn02747672\nn02747802\nn02749479\nn02749953\nn02750070\nn02750169\nn02751215\nn02751295\nn02752496\nn02752615\nn02752810\nn02753044\nn02753394\nn02754103\nn02754656\nn02755140\nn02755529\nn02755823\nn02756098\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02758134\nn02758863\nn02758960\nn02759257\nn02759387\nn02759963\nn02760099\nn02760199\nn02760429\nn02760658\nn02760855\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762371\nn02762508\nn02763306\nn02763604\nn02763901\nn02764044\nn02764398\nn02764505\nn02764779\nn02764935\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769748\nn02769963\nn02770211\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771750\nn02772101\nn02772435\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775897\nn02776205\nn02776631\nn02776825\nn02776978\nn02777100\nn02777292\nn02777734\nn02778294\nn02778456\nn02778669\nn02779435\nn02780704\nn02780815\nn02781121\nn02781338\nn02782093\nn02782602\nn02782681\nn02782778\nn02783161\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02785648\nn02786058\nn02786198\nn02786331\nn02786736\nn02786837\nn02787435\nn02787622\nn02788021\nn02788148\nn02788572\nn02789487\nn02790669\nn02790823\nn02790996\nn02791124\nn02791270\nn02792409\nn02792552\nn02793089\nn02793199\nn02793495\nn02793842\nn02794156\nn02794664\nn02795169\nn02795528\nn02795670\nn02796207\nn02796318\nn02796995\nn02797295\nn02797535\nn02797692\nn02799071\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802426\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803934\nn02804123\nn02804252\nn02804414\nn02804515\nn02804610\nn02805983\nn02806088\nn02806379\nn02806530\nn02807133\nn02807523\nn02807616\nn02807731\nn02808185\nn02808304\nn02808440\nn02809105\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02814428\nn02814533\nn02814774\nn02814860\nn02815749\nn02815834\nn02815950\nn02816656\nn02816768\nn02817031\nn02817516\nn02818135\nn02818832\nn02820210\nn02820556\nn02820675\nn02821202\nn02821627\nn02821943\nn02822064\nn02822220\nn02822579\nn02823124\nn02823335\nn02823428\nn02823510\nn02823586\nn02823750\nn02823848\nn02823964\nn02824058\nn02824319\nn02824448\nn02825153\nn02825442\nn02825657\nn02825961\nn02826068\nn02826589\nn02826886\nn02827606\nn02828299\nn02828427\nn02828884\nn02829596\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02833793\nn02834397\nn02834778\nn02835271\nn02835412\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836392\nn02837789\nn02837887\nn02838345\nn02838728\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840245\nn02840619\nn02841187\nn02841315\nn02841506\nn02842573\nn02843029\nn02843158\nn02843276\nn02843553\nn02843684\nn02844307\nn02846141\nn02846511\nn02846733\nn02847631\nn02847852\nn02848216\nn02848523\nn02849154\nn02849885\nn02850732\nn02850950\nn02851099\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02854532\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855925\nn02856237\nn02857477\nn02857644\nn02858304\nn02859184\nn02859343\nn02859443\nn02859955\nn02860415\nn02860640\nn02860847\nn02861022\nn02861147\nn02861387\nn02861886\nn02862048\nn02862916\nn02863014\nn02863426\nn02863536\nn02863750\nn02864504\nn02864593\nn02865351\nn02865665\nn02865931\nn02866386\nn02867715\nn02867966\nn02868638\nn02868975\nn02869155\nn02869249\nn02869737\nn02869837\nn02870526\nn02870676\nn02870880\nn02871005\nn02871147\nn02871314\nn02871439\nn02871525\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873733\nn02873839\nn02874086\nn02874442\nn02874537\nn02876084\nn02876326\nn02876657\nn02877266\nn02877765\nn02877962\nn02878222\nn02878425\nn02879087\nn02879309\nn02879718\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881757\nn02881906\nn02882190\nn02882301\nn02882647\nn02882894\nn02883004\nn02883205\nn02883344\nn02884994\nn02885108\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02887079\nn02887209\nn02887489\nn02887970\nn02888270\nn02889425\nn02889646\nn02890188\nn02890351\nn02890513\nn02890662\nn02890940\nn02891188\nn02891788\nn02892201\nn02892304\nn02892499\nn02892767\nn02892948\nn02893608\nn02893692\nn02893941\nn02894158\nn02894337\nn02894605\nn02895154\nn02895438\nn02896442\nn02897097\nn02897820\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900705\nn02901114\nn02901259\nn02901377\nn02901793\nn02902079\nn02902687\nn02902916\nn02903126\nn02903204\nn02903852\nn02904233\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02906734\nn02907082\nn02907391\nn02907656\nn02907873\nn02908217\nn02908773\nn02909285\nn02909870\nn02910145\nn02910353\nn02910542\nn02910864\nn02911332\nn02912065\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916179\nn02916350\nn02916936\nn02917067\nn02917377\nn02917521\nn02917607\nn02917964\nn02918112\nn02918330\nn02918595\nn02918831\nn02918964\nn02919148\nn02919414\nn02919792\nn02919890\nn02920083\nn02920259\nn02920369\nn02920658\nn02921029\nn02921195\nn02921756\nn02921884\nn02922292\nn02922578\nn02922798\nn02923682\nn02924116\nn02925009\nn02925107\nn02925519\nn02925666\nn02926426\nn02926591\nn02927161\nn02927764\nn02927887\nn02928049\nn02928299\nn02928608\nn02929289\nn02929582\nn02930080\nn02930214\nn02930645\nn02930766\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02934168\nn02934451\nn02935017\nn02935387\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02937958\nn02938886\nn02939185\nn02939866\nn02940385\nn02940570\nn02942349\nn02942460\nn02942699\nn02943241\nn02943871\nn02943964\nn02944075\nn02944146\nn02944459\nn02944579\nn02946127\nn02946270\nn02946348\nn02946509\nn02946824\nn02946921\nn02947660\nn02947818\nn02948072\nn02948557\nn02949202\nn02949542\nn02950256\nn02950632\nn02950826\nn02950943\nn02951358\nn02951585\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02953197\nn02953455\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957755\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02962061\nn02962200\nn02962843\nn02963159\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964843\nn02965216\nn02965300\nn02965783\nn02966193\nn02966545\nn02966687\nn02967294\nn02967626\nn02967782\nn02968074\nn02968333\nn02968473\nn02969010\nn02969323\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971356\nn02971579\nn02971691\nn02972397\nn02973017\nn02973236\nn02973805\nn02973904\nn02974003\nn02974348\nn02974697\nn02975212\nn02976123\nn02976249\nn02976350\nn02976455\nn02976939\nn02977058\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978478\nn02978753\nn02978881\nn02979074\nn02979186\nn02979290\nn02979399\nn02979836\nn02980036\nn02980441\nn02981024\nn02981321\nn02981792\nn02981911\nn02982232\nn02982416\nn02982515\nn02983189\nn02983357\nn02984061\nn02984203\nn02984469\nn02985963\nn02986160\nn02987379\nn02987492\nn02988066\nn02988156\nn02988304\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02991302\nn02991847\nn02992032\nn02992211\nn02992368\nn02992529\nn02992795\nn02993194\nn02993368\nn02994573\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998563\nn02998841\nn02999138\nn02999410\nn02999936\nn03000134\nn03000247\nn03000684\nn03001115\nn03001627\nn03002096\nn03002341\nn03002711\nn03002816\nn03002948\nn03003091\nn03004275\nn03004824\nn03005033\nn03005285\nn03006626\nn03007130\nn03007444\nn03007591\nn03008177\nn03008976\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012897\nn03013438\nn03013580\nn03013850\nn03014440\nn03014705\nn03015149\nn03015254\nn03015478\nn03015851\nn03016389\nn03016609\nn03016737\nn03016868\nn03016953\nn03017070\nn03017168\nn03018209\nn03018349\nn03018712\nn03019434\nn03019685\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03025250\nn03026506\nn03026907\nn03027108\nn03027250\nn03027625\nn03028079\nn03028596\nn03028785\nn03029197\nn03029445\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03032252\nn03032453\nn03032811\nn03033362\nn03033986\nn03034244\nn03034405\nn03034663\nn03035252\nn03035832\nn03036022\nn03037404\nn03037709\nn03038281\nn03038685\nn03038870\nn03039015\nn03039259\nn03039493\nn03039827\nn03039947\nn03040376\nn03041114\nn03041449\nn03041632\nn03041810\nn03042139\nn03042490\nn03042697\nn03043423\nn03043693\nn03043958\nn03044934\nn03045228\nn03045337\nn03045698\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047690\nn03047799\nn03047941\nn03048883\nn03049782\nn03049924\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03054901\nn03055418\nn03055857\nn03057021\nn03057541\nn03057636\nn03057920\nn03058107\nn03058603\nn03059685\nn03061211\nn03061345\nn03061505\nn03061674\nn03062015\nn03062122\nn03062245\nn03062336\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063599\nn03063689\nn03063968\nn03064250\nn03064350\nn03064758\nn03064935\nn03065243\nn03065424\nn03066359\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03071021\nn03071160\nn03072201\nn03072440\nn03073296\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075370\nn03075634\nn03075768\nn03075946\nn03077616\nn03077741\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03080497\nn03080633\nn03082280\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085013\nn03085219\nn03085602\nn03085915\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087816\nn03088389\nn03088580\nn03089624\nn03089753\nn03089879\nn03090000\nn03090172\nn03091044\nn03091374\nn03092166\nn03092314\nn03092656\nn03092883\nn03094159\nn03094503\nn03095699\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098688\nn03098959\nn03099147\nn03099274\nn03099454\nn03099945\nn03100240\nn03100346\nn03100490\nn03100897\nn03101156\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102654\nn03103396\nn03103563\nn03105088\nn03105306\nn03105467\nn03106898\nn03107046\nn03107488\nn03108455\nn03108853\nn03109150\nn03109253\nn03109693\nn03109881\nn03110669\nn03111041\nn03111177\nn03111296\nn03112719\nn03112869\nn03113152\nn03113657\nn03113835\nn03114236\nn03114379\nn03114504\nn03115180\nn03115400\nn03115762\nn03115897\nn03116530\nn03116767\nn03118969\nn03119203\nn03119396\nn03119510\nn03120491\nn03120778\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03123553\nn03123809\nn03123917\nn03124043\nn03124170\nn03124474\nn03124590\nn03125057\nn03125729\nn03125870\nn03126385\nn03126580\nn03126707\nn03127203\nn03127408\nn03127747\nn03127925\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129753\nn03130761\nn03131574\nn03131669\nn03131967\nn03132076\nn03132261\nn03132666\nn03132776\nn03133050\nn03133415\nn03133878\nn03134739\nn03134853\nn03135030\nn03135532\nn03136369\nn03137473\nn03138344\nn03138669\nn03139464\nn03140126\nn03140292\nn03140431\nn03140652\nn03141065\nn03141327\nn03141455\nn03141702\nn03141823\nn03142679\nn03145147\nn03145522\nn03145719\nn03146219\nn03146687\nn03146846\nn03147280\nn03147509\nn03148324\nn03148727\nn03149686\nn03150232\nn03150511\nn03151077\nn03152303\nn03154073\nn03154895\nn03156279\nn03156767\nn03157348\nn03158186\nn03158885\nn03159535\nn03159640\nn03160309\nn03160740\nn03161450\nn03163222\nn03163381\nn03164344\nn03164605\nn03164722\nn03165096\nn03165466\nn03165616\nn03166514\nn03167978\nn03168107\nn03168217\nn03169176\nn03170635\nn03171228\nn03171356\nn03171635\nn03172038\nn03173270\nn03173387\nn03173929\nn03174450\nn03174731\nn03175081\nn03175189\nn03175457\nn03176386\nn03176594\nn03176763\nn03177165\nn03178000\nn03178430\nn03178674\nn03179701\nn03179910\nn03180011\nn03180384\nn03180504\nn03180865\nn03180969\nn03181293\nn03183080\nn03186285\nn03186818\nn03187037\nn03187268\nn03187595\nn03188531\nn03188725\nn03189083\nn03191286\nn03192543\nn03193107\nn03193260\nn03193423\nn03193597\nn03195332\nn03195959\nn03196062\nn03196217\nn03196598\nn03196990\nn03197337\nn03198500\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201208\nn03201529\nn03201638\nn03201776\nn03202354\nn03202940\nn03204306\nn03204558\nn03205458\nn03205574\nn03205669\nn03206282\nn03206718\nn03206908\nn03207305\nn03207630\nn03207743\nn03207835\nn03207941\nn03208556\nn03208938\nn03209359\nn03209910\nn03210245\nn03210372\nn03210552\nn03211117\nn03211789\nn03212114\nn03212811\nn03213538\nn03213826\nn03214253\nn03214582\nn03215508\nn03216402\nn03216710\nn03216828\nn03218198\nn03219010\nn03219135\nn03219483\nn03219966\nn03220237\nn03220513\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03223162\nn03223299\nn03223553\nn03223686\nn03224603\nn03224753\nn03225108\nn03225777\nn03225988\nn03226254\nn03226375\nn03226538\nn03226880\nn03227317\nn03228254\nn03228365\nn03228692\nn03228967\nn03229244\nn03231368\nn03231819\nn03232309\nn03232543\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03236217\nn03236423\nn03236735\nn03237340\nn03237416\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03239054\nn03239259\nn03239726\nn03240140\nn03240683\nn03240892\nn03241093\nn03241335\nn03241496\nn03242506\nn03243218\nn03244047\nn03244231\nn03244775\nn03244919\nn03245724\nn03245889\nn03246454\nn03246933\nn03247083\nn03249342\nn03249569\nn03250089\nn03250279\nn03250405\nn03250847\nn03251533\nn03251766\nn03251932\nn03252637\nn03253279\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254862\nn03255030\nn03255899\nn03256032\nn03256166\nn03256788\nn03256928\nn03257210\nn03257586\nn03258330\nn03258577\nn03258905\nn03259009\nn03259280\nn03259401\nn03259505\nn03260849\nn03261019\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03266371\nn03266749\nn03267113\nn03267468\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269203\nn03269401\nn03271030\nn03271574\nn03272010\nn03272125\nn03272239\nn03272383\nn03272562\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03275681\nn03277459\nn03277771\nn03278248\nn03278914\nn03279508\nn03281145\nn03281673\nn03282295\nn03282401\nn03283221\nn03284743\nn03284886\nn03285578\nn03287351\nn03287733\nn03288500\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03290653\nn03291413\nn03291741\nn03291819\nn03291963\nn03292475\nn03292603\nn03293741\nn03293863\nn03294048\nn03294833\nn03295012\nn03295246\nn03296081\nn03296328\nn03297103\nn03297226\nn03297495\nn03297644\nn03297735\nn03298089\nn03298716\nn03298858\nn03300216\nn03300443\nn03301568\nn03301833\nn03301940\nn03302671\nn03302938\nn03303217\nn03303831\nn03306385\nn03307037\nn03307792\nn03308152\nn03308481\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314608\nn03314780\nn03314884\nn03315644\nn03316105\nn03316406\nn03317788\nn03318294\nn03318865\nn03318983\nn03319457\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320959\nn03321103\nn03321563\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03324928\nn03325088\nn03325584\nn03325941\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03329302\nn03329536\nn03329663\nn03331077\nn03331599\nn03332005\nn03332271\nn03332393\nn03332989\nn03333129\nn03333252\nn03333610\nn03333711\nn03334291\nn03334382\nn03334912\nn03335030\nn03336282\nn03336575\nn03337140\nn03337383\nn03338821\nn03339529\nn03339643\nn03340723\nn03341153\nn03341297\nn03342015\nn03342127\nn03342262\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344393\nn03344642\nn03345487\nn03345837\nn03346135\nn03346455\nn03347037\nn03347617\nn03348868\nn03349469\nn03349771\nn03349892\nn03350204\nn03350602\nn03351434\nn03351979\nn03352628\nn03353951\nn03354207\nn03354903\nn03355768\nn03355925\nn03356858\nn03356982\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03359137\nn03359285\nn03359436\nn03359566\nn03360300\nn03360431\nn03360622\nn03361297\nn03361380\nn03361550\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364599\nn03365231\nn03365374\nn03365592\nn03365991\nn03366823\nn03366974\nn03367059\nn03367410\nn03367545\nn03368352\nn03369276\nn03370387\nn03371875\nn03372029\nn03372549\nn03373237\nn03373611\nn03373943\nn03374372\nn03374473\nn03374649\nn03374838\nn03375329\nn03375575\nn03376159\nn03376279\nn03376595\nn03376938\nn03378005\nn03378174\nn03379051\nn03379204\nn03379343\nn03379828\nn03380724\nn03380867\nn03381126\nn03382292\nn03382413\nn03382856\nn03383099\nn03384352\nn03384891\nn03385557\nn03386011\nn03386544\nn03386726\nn03386870\nn03387653\nn03388043\nn03388183\nn03388323\nn03388549\nn03389611\nn03389761\nn03389889\nn03390075\nn03390786\nn03390983\nn03391301\nn03392741\nn03393017\nn03393761\nn03393912\nn03394272\nn03394480\nn03394649\nn03394916\nn03395514\nn03395859\nn03396074\nn03396654\nn03397087\nn03397266\nn03397532\nn03397947\nn03398153\nn03398228\nn03399677\nn03399761\nn03399971\nn03400231\nn03401129\nn03401279\nn03402188\nn03402941\nn03403643\nn03404149\nn03404251\nn03404360\nn03405265\nn03405595\nn03405725\nn03406966\nn03407369\nn03407865\nn03408054\nn03408444\nn03409297\nn03409393\nn03409591\nn03410571\nn03410740\nn03410938\nn03411079\nn03412058\nn03413684\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415749\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417042\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418915\nn03419014\nn03420345\nn03420801\nn03421324\nn03421485\nn03421669\nn03422072\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424325\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425413\nn03425595\nn03425769\nn03426134\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03431243\nn03431745\nn03432061\nn03432129\nn03433877\nn03434188\nn03434285\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436891\nn03437430\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438863\nn03439348\nn03439814\nn03440216\nn03440682\nn03441112\nn03441345\nn03442597\nn03442756\nn03443005\nn03443149\nn03443371\nn03443912\nn03444034\nn03445326\nn03445617\nn03445777\nn03445924\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447447\nn03447721\nn03448590\nn03448956\nn03449309\nn03449451\nn03450230\nn03450516\nn03450734\nn03450974\nn03451120\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03452741\nn03453231\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455488\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457686\nn03457902\nn03458271\nn03459328\nn03459775\nn03460040\nn03460147\nn03460297\nn03461288\nn03461385\nn03462110\nn03463381\nn03463666\nn03464053\nn03465426\nn03465500\nn03465718\nn03466493\nn03466600\nn03466839\nn03467068\nn03467517\nn03467796\nn03467984\nn03468696\nn03468821\nn03469175\nn03469493\nn03469903\nn03470629\nn03471190\nn03472232\nn03473227\nn03474779\nn03474896\nn03475581\nn03475823\nn03476083\nn03476313\nn03476684\nn03476991\nn03477512\nn03478589\nn03478756\nn03478907\nn03479121\nn03479397\nn03480579\nn03480719\nn03481172\nn03482252\nn03482405\nn03482523\nn03482877\nn03483230\nn03483316\nn03483823\nn03484083\nn03484487\nn03484576\nn03484931\nn03485198\nn03485407\nn03485794\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488188\nn03488438\nn03489162\nn03490006\nn03490119\nn03490884\nn03491032\nn03492250\nn03492542\nn03492922\nn03494278\nn03494537\nn03494706\nn03495039\nn03495258\nn03495570\nn03496296\nn03496612\nn03496892\nn03497352\nn03497657\nn03498441\nn03498662\nn03498781\nn03498962\nn03499354\nn03499468\nn03499907\nn03500209\nn03500389\nn03500699\nn03501614\nn03502200\nn03502331\nn03502509\nn03503477\nn03503997\nn03504205\nn03504723\nn03505133\nn03505383\nn03505504\nn03505667\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507963\nn03508101\nn03509394\nn03509608\nn03510244\nn03511175\nn03511333\nn03512147\nn03513137\nn03513376\nn03514451\nn03514693\nn03514894\nn03516367\nn03516844\nn03516996\nn03517647\nn03517760\nn03517899\nn03518135\nn03518305\nn03518445\nn03518943\nn03519081\nn03519387\nn03520493\nn03521076\nn03521544\nn03521675\nn03521899\nn03522003\nn03522100\nn03523987\nn03524150\nn03524574\nn03525074\nn03525454\nn03527149\nn03527444\nn03527565\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03529860\nn03530511\nn03530642\nn03530910\nn03531281\nn03532342\nn03532672\nn03532919\nn03533014\nn03534580\nn03534776\nn03535024\nn03535780\nn03536122\nn03537241\nn03537412\nn03538037\nn03538179\nn03538406\nn03538634\nn03539433\nn03539546\nn03539678\nn03540090\nn03540267\nn03540595\nn03540914\nn03541091\nn03541269\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543603\nn03543735\nn03543945\nn03544143\nn03544238\nn03544360\nn03545150\nn03545470\nn03545756\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03548086\nn03548402\nn03548626\nn03549199\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03551395\nn03552749\nn03553019\nn03553248\nn03554460\nn03555006\nn03555426\nn03555564\nn03555662\nn03556679\nn03557270\nn03557360\nn03557590\nn03557692\nn03558176\nn03558404\nn03558633\nn03558739\nn03559999\nn03560430\nn03561047\nn03563200\nn03563460\nn03565288\nn03565830\nn03566193\nn03566730\nn03567066\nn03568117\nn03571280\nn03571625\nn03571942\nn03572107\nn03572321\nn03574243\nn03574555\nn03574816\nn03577090\nn03577672\nn03578055\nn03578251\nn03578656\nn03579538\nn03580518\nn03580845\nn03581125\nn03582959\nn03584254\nn03584400\nn03584829\nn03585073\nn03585438\nn03585682\nn03586219\nn03586631\nn03587205\nn03588951\nn03589513\nn03589791\nn03590306\nn03590588\nn03590841\nn03590932\nn03592245\nn03592669\nn03592773\nn03593122\nn03593526\nn03594148\nn03594523\nn03594734\nn03594945\nn03595264\nn03595409\nn03595523\nn03595614\nn03595860\nn03596285\nn03596543\nn03597916\nn03598151\nn03598299\nn03598515\nn03598930\nn03599486\nn03600285\nn03600475\nn03600722\nn03601638\nn03601840\nn03602081\nn03602883\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604843\nn03605598\nn03605722\nn03606251\nn03607029\nn03607659\nn03607923\nn03609235\nn03609397\nn03610098\nn03610418\nn03610524\nn03610682\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614532\nn03614782\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617480\nn03618101\nn03618982\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620967\nn03621049\nn03621377\nn03622058\nn03622839\nn03622931\nn03623198\nn03623338\nn03623556\nn03624134\nn03624400\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626760\nn03627232\nn03628215\nn03628511\nn03629100\nn03629231\nn03629520\nn03630262\nn03630383\nn03631177\nn03631922\nn03632577\nn03632729\nn03632852\nn03633091\nn03633886\nn03635032\nn03635108\nn03635330\nn03635668\nn03636248\nn03636649\nn03637181\nn03637318\nn03637898\nn03638883\nn03639077\nn03639497\nn03640850\nn03640988\nn03641569\nn03642444\nn03642806\nn03643149\nn03643253\nn03643491\nn03643737\nn03644378\nn03644858\nn03645011\nn03645577\nn03646020\nn03646148\nn03646296\nn03646916\nn03647520\nn03648431\nn03649161\nn03649674\nn03649797\nn03649909\nn03650551\nn03651388\nn03651843\nn03652100\nn03652729\nn03652932\nn03653110\nn03653220\nn03653583\nn03653740\nn03653833\nn03654576\nn03655072\nn03655720\nn03656484\nn03656957\nn03657121\nn03657511\nn03658185\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660909\nn03661043\nn03661340\nn03662601\nn03662719\nn03662887\nn03663531\nn03664943\nn03665366\nn03665924\nn03666362\nn03666591\nn03666917\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669886\nn03670208\nn03671914\nn03672827\nn03673027\nn03673450\nn03674440\nn03674731\nn03675235\nn03676087\nn03676483\nn03676623\nn03676759\nn03677115\nn03678558\nn03678729\nn03679384\nn03679712\nn03680355\nn03680512\nn03680734\nn03680858\nn03680942\nn03682487\nn03682877\nn03683079\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684611\nn03684823\nn03685820\nn03686130\nn03686924\nn03687137\nn03687928\nn03688192\nn03688405\nn03688605\nn03688943\nn03689157\nn03690473\nn03690938\nn03691459\nn03691817\nn03692379\nn03692522\nn03693293\nn03693474\nn03693707\nn03693860\nn03694639\nn03695857\nn03696065\nn03696301\nn03696568\nn03697007\nn03697552\nn03698360\nn03698604\nn03698723\nn03698815\nn03699591\nn03699975\nn03700963\nn03701391\nn03703730\nn03703862\nn03703945\nn03704549\nn03706229\nn03706653\nn03708036\nn03708843\nn03709206\nn03709363\nn03709823\nn03710193\nn03710637\nn03710721\nn03711044\nn03711999\nn03712111\nn03712337\nn03713436\nn03714235\nn03715114\nn03715386\nn03715669\nn03715892\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03717622\nn03718212\nn03718335\nn03718458\nn03718581\nn03718789\nn03718935\nn03719053\nn03719343\nn03719743\nn03720163\nn03720891\nn03721047\nn03721252\nn03721384\nn03721590\nn03722007\nn03722288\nn03723267\nn03723781\nn03724066\nn03724417\nn03724538\nn03724623\nn03724756\nn03724870\nn03725035\nn03725600\nn03725717\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03729308\nn03729826\nn03730153\nn03730334\nn03730494\nn03730893\nn03731019\nn03731483\nn03731695\nn03732020\nn03732114\nn03732458\nn03733131\nn03733281\nn03733644\nn03733805\nn03733925\nn03735637\nn03735963\nn03736064\nn03736470\nn03736970\nn03738066\nn03738472\nn03739518\nn03742019\nn03742115\nn03743016\nn03743279\nn03743902\nn03744276\nn03744840\nn03745146\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749807\nn03751269\nn03751458\nn03751757\nn03752185\nn03753077\nn03757604\nn03758089\nn03759243\nn03759661\nn03759954\nn03760310\nn03760671\nn03760944\nn03761084\nn03762332\nn03762434\nn03762602\nn03763968\nn03764276\nn03764736\nn03764822\nn03765561\nn03766044\nn03766322\nn03766508\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768916\nn03769610\nn03769881\nn03770085\nn03770316\nn03770439\nn03770679\nn03770954\nn03772077\nn03772269\nn03772584\nn03773035\nn03773504\nn03774327\nn03774461\nn03775071\nn03775199\nn03775388\nn03775546\nn03775636\nn03775747\nn03775847\nn03776460\nn03777568\nn03777754\nn03778817\nn03779128\nn03781244\nn03781683\nn03781787\nn03782006\nn03782190\nn03782794\nn03783430\nn03784270\nn03784896\nn03785016\nn03785237\nn03785721\nn03786194\nn03786313\nn03786621\nn03786715\nn03786901\nn03787032\nn03787523\nn03788047\nn03788195\nn03788365\nn03788498\nn03788601\nn03788914\nn03789171\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791053\nn03791235\nn03792048\nn03792334\nn03792526\nn03792782\nn03792972\nn03793489\nn03793850\nn03794056\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796401\nn03796522\nn03796605\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03799876\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802393\nn03803284\nn03804744\nn03805180\nn03805280\nn03805725\nn03809312\nn03809603\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812924\nn03813078\nn03814639\nn03814817\nn03814906\nn03815149\nn03815482\nn03815615\nn03816005\nn03816136\nn03816530\nn03816849\nn03817191\nn03817647\nn03818343\nn03819336\nn03819448\nn03819595\nn03819994\nn03820318\nn03820728\nn03821518\nn03822171\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03824381\nn03824713\nn03825080\nn03825788\nn03826039\nn03826186\nn03827536\nn03828020\nn03829954\nn03831382\nn03832144\nn03832673\nn03834040\nn03835197\nn03836062\nn03836451\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03837869\nn03838298\nn03838899\nn03839424\nn03839671\nn03840681\nn03840823\nn03841143\nn03841666\nn03842012\nn03842156\nn03842377\nn03842986\nn03843438\nn03843555\nn03844045\nn03844233\nn03844673\nn03844815\nn03845190\nn03846100\nn03846234\nn03846431\nn03846677\nn03847471\nn03847823\nn03848168\nn03848348\nn03849679\nn03849814\nn03850053\nn03850245\nn03850492\nn03851787\nn03852280\nn03852688\nn03853924\nn03854065\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855604\nn03855756\nn03856012\nn03856465\nn03857687\nn03857828\nn03858085\nn03858183\nn03858418\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860404\nn03861271\nn03861430\nn03861842\nn03862676\nn03862862\nn03863108\nn03863262\nn03863923\nn03864356\nn03864692\nn03865371\nn03865557\nn03865949\nn03866082\nn03868242\nn03868406\nn03868643\nn03868863\nn03870105\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871628\nn03871724\nn03873416\nn03873699\nn03874138\nn03874293\nn03874487\nn03874599\nn03875218\nn03875806\nn03875955\nn03876231\nn03877351\nn03877472\nn03877674\nn03877845\nn03878066\nn03878211\nn03878963\nn03879705\nn03880323\nn03880531\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03884397\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03887185\nn03887330\nn03887697\nn03888257\nn03888605\nn03889503\nn03889726\nn03889871\nn03890093\nn03890233\nn03890514\nn03891051\nn03891251\nn03891332\nn03891538\nn03892178\nn03892425\nn03892557\nn03894051\nn03894379\nn03894677\nn03895866\nn03896103\nn03896233\nn03896419\nn03896526\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03899768\nn03899933\nn03900393\nn03900979\nn03901229\nn03901750\nn03902125\nn03902482\nn03902756\nn03903424\nn03903733\nn03903868\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905947\nn03906224\nn03906463\nn03906997\nn03908204\nn03908618\nn03908714\nn03909020\nn03909160\nn03909406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03913343\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915437\nn03915900\nn03916031\nn03916470\nn03916720\nn03917198\nn03917814\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03920288\nn03920641\nn03920737\nn03920867\nn03923379\nn03923918\nn03924069\nn03924679\nn03926148\nn03927091\nn03927299\nn03927539\nn03928116\nn03928814\nn03929660\nn03929855\nn03930313\nn03930630\nn03931765\nn03931885\nn03933933\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03935116\nn03935234\nn03935335\nn03936466\nn03937543\nn03937835\nn03937931\nn03938037\nn03938244\nn03938401\nn03938522\nn03938725\nn03939178\nn03939677\nn03939844\nn03940256\nn03941013\nn03941231\nn03941417\nn03941684\nn03942813\nn03942920\nn03943115\nn03943266\nn03943920\nn03944024\nn03944138\nn03944341\nn03946076\nn03946162\nn03947466\nn03947798\nn03947888\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03950228\nn03950537\nn03950899\nn03952576\nn03953901\nn03954393\nn03954731\nn03955296\nn03955489\nn03956157\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958227\nn03958752\nn03959014\nn03959701\nn03960374\nn03960490\nn03961711\nn03961939\nn03962852\nn03963198\nn03963294\nn03963645\nn03964495\nn03965456\nn03965907\nn03966206\nn03966976\nn03967270\nn03967396\nn03967562\nn03967942\nn03968293\nn03968581\nn03968728\nn03970156\nn03970546\nn03971218\nn03973285\nn03973402\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03976467\nn03976657\nn03977592\nn03977966\nn03978421\nn03978686\nn03978966\nn03980026\nn03980478\nn03980874\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982430\nn03982642\nn03983396\nn03983612\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986224\nn03986355\nn03986562\nn03986704\nn03986949\nn03987266\nn03987376\nn03987990\nn03988170\nn03989665\nn03990474\nn03991062\nn03991646\nn03991837\nn03992325\nn03992436\nn03992509\nn03992703\nn03993053\nn03993180\nn03993403\nn03993703\nn03994008\nn03994614\nn03995265\nn03995372\nn03995535\nn03995856\nn03996145\nn03996416\nn03996849\nn03998194\nn03998333\nn03999160\nn03999992\nn04000311\nn04000592\nn04001265\nn04001499\nn04001845\nn04003241\nn04003856\nn04004210\nn04004475\nn04004767\nn04004990\nn04005197\nn04005630\nn04008385\nn04008634\nn04009552\nn04009801\nn04011827\nn04012084\nn04012482\nn04013729\nn04015908\nn04016240\nn04016576\nn04016684\nn04016846\nn04018155\nn04018667\nn04019101\nn04019541\nn04019696\nn04020298\nn04020912\nn04021028\nn04021798\nn04022332\nn04023695\nn04023962\nn04024274\nn04024862\nn04024983\nn04025508\nn04026053\nn04026180\nn04026417\nn04026813\nn04027023\nn04027706\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029734\nn04030274\nn04030518\nn04032603\nn04033425\nn04033901\nn04033995\nn04034262\nn04035836\nn04035912\nn04036303\nn04037220\nn04037443\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039381\nn04039742\nn04039848\nn04040247\nn04040373\nn04040759\nn04041069\nn04041243\nn04041408\nn04041544\nn04041747\nn04042358\nn04043411\nn04043733\nn04044307\nn04044498\nn04044716\nn04045255\nn04045397\nn04045644\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047401\nn04048441\nn04049303\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050933\nn04051549\nn04051825\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04054361\nn04054670\nn04056180\nn04056413\nn04056932\nn04057047\nn04057981\nn04058096\nn04058239\nn04058594\nn04059157\nn04059516\nn04059947\nn04060647\nn04061681\nn04061793\nn04061969\nn04062428\nn04063154\nn04063373\nn04063868\nn04064401\nn04064747\nn04064862\nn04065272\nn04065464\nn04065789\nn04066270\nn04067472\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069276\nn04069434\nn04070003\nn04070207\nn04070415\nn04070727\nn04071263\nn04072193\nn04072551\nn04072960\nn04074185\nn04074963\nn04075291\nn04075715\nn04075916\nn04076284\nn04076713\nn04078574\nn04079244\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081281\nn04081699\nn04082562\nn04082710\nn04082886\nn04083309\nn04083800\nn04084889\nn04086273\nn04086446\nn04087432\nn04087709\nn04087826\nn04089376\nn04089666\nn04089836\nn04089976\nn04090263\nn04091097\nn04091693\nn04093625\nn04093775\nn04094720\nn04095109\nn04095210\nn04095342\nn04095577\nn04096066\nn04097373\nn04097760\nn04097866\nn04098513\nn04099003\nn04099175\nn04099429\nn04099969\nn04100519\nn04101701\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04105068\nn04105704\nn04105893\nn04107743\nn04108268\nn04108822\nn04110178\nn04110955\nn04111190\nn04111414\nn04111531\nn04111668\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04114844\nn04115144\nn04115256\nn04115456\nn04115802\nn04115996\nn04116098\nn04116294\nn04116512\nn04117464\nn04118021\nn04118538\nn04118635\nn04118776\nn04119091\nn04119230\nn04119360\nn04119478\nn04119751\nn04120489\nn04120842\nn04121426\nn04121511\nn04121728\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04125021\nn04125257\nn04125853\nn04126066\nn04127249\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04130143\nn04130257\nn04130907\nn04131208\nn04131368\nn04131690\nn04131929\nn04132158\nn04132603\nn04132985\nn04133789\nn04134008\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04136045\nn04136161\nn04136333\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138261\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140631\nn04141076\nn04141198\nn04141327\nn04141712\nn04141838\nn04141975\nn04142434\nn04142731\nn04142999\nn04143140\nn04143897\nn04144241\nn04144539\nn04145863\nn04146050\nn04146343\nn04146504\nn04146614\nn04146862\nn04147183\nn04147793\nn04148054\nn04148579\nn04148703\nn04149083\nn04149813\nn04150153\nn04150980\nn04152593\nn04153025\nn04153751\nn04154152\nn04154340\nn04154565\nn04154938\nn04155068\nn04156140\nn04156946\nn04157320\nn04158807\nn04158956\nn04160372\nn04160586\nn04160847\nn04161358\nn04161981\nn04162433\nn04162706\nn04163530\nn04164406\nn04164757\nn04164868\nn04165409\nn04166281\nn04167346\nn04168199\nn04169437\nn04170037\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172342\nn04172776\nn04172904\nn04173046\nn04173511\nn04173907\nn04174101\nn04175039\nn04175147\nn04176068\nn04176190\nn04176295\nn04177041\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04179712\nn04179824\nn04179913\nn04180063\nn04180229\nn04180888\nn04181228\nn04181561\nn04182152\nn04182322\nn04183217\nn04183329\nn04184316\nn04184435\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186848\nn04187061\nn04187233\nn04187547\nn04187970\nn04188179\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190997\nn04191595\nn04191943\nn04192238\nn04192698\nn04192858\nn04193377\nn04194127\nn04194289\nn04196502\nn04197110\nn04197391\nn04197781\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200800\nn04201064\nn04201297\nn04201733\nn04202417\nn04204081\nn04204238\nn04204347\nn04205318\nn04205505\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208210\nn04208427\nn04208760\nn04208936\nn04209133\nn04209239\nn04209509\nn04209613\nn04210120\nn04210390\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04213353\nn04214046\nn04214282\nn04215153\nn04215402\nn04216634\nn04216860\nn04216963\nn04217546\nn04217882\nn04218564\nn04219185\nn04219424\nn04220250\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04223299\nn04224543\nn04224842\nn04225031\nn04225729\nn04225987\nn04226464\nn04226826\nn04227144\nn04227900\nn04228054\nn04228215\nn04228581\nn04228693\nn04229107\nn04229480\nn04229737\nn04229816\nn04230603\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232800\nn04233124\nn04233715\nn04234455\nn04234887\nn04235291\nn04235860\nn04236377\nn04236809\nn04236935\nn04237423\nn04238128\nn04238321\nn04238617\nn04238763\nn04239074\nn04239436\nn04239786\nn04240752\nn04241249\nn04241573\nn04242408\nn04243546\nn04243941\nn04244379\nn04244997\nn04245508\nn04246060\nn04246271\nn04246731\nn04246855\nn04247011\nn04247630\nn04247736\nn04247876\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250692\nn04250850\nn04251144\nn04251701\nn04251791\nn04252077\nn04252225\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253931\nn04254009\nn04254120\nn04254680\nn04254777\nn04255163\nn04255586\nn04255899\nn04256520\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258138\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259630\nn04260364\nn04261281\nn04261638\nn04262161\nn04263257\nn04263336\nn04263502\nn04264628\nn04264765\nn04264914\nn04265275\nn04265904\nn04266014\nn04266162\nn04266375\nn04266486\nn04266968\nn04267435\nn04269270\nn04269822\nn04269944\nn04270147\nn04270371\nn04270891\nn04271531\nn04272054\nn04272389\nn04272928\nn04273285\nn04273569\nn04273659\nn04273796\nn04273972\nn04274985\nn04275175\nn04275548\nn04275661\nn04277352\nn04277493\nn04277826\nn04278247\nn04278353\nn04278447\nn04279172\nn04279353\nn04279462\nn04281260\nn04281375\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285008\nn04285146\nn04285803\nn04285965\nn04286575\nn04287747\nn04287898\nn04288533\nn04289027\nn04289195\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04292414\nn04292572\nn04292921\nn04293119\nn04294426\nn04294614\nn04294879\nn04295081\nn04295571\nn04295881\nn04296562\nn04297098\nn04297750\nn04297847\nn04298661\nn04299215\nn04299370\nn04299963\nn04300643\nn04301000\nn04301760\nn04303357\nn04303497\nn04304375\nn04304680\nn04305210\nn04305323\nn04305572\nn04306080\nn04306592\nn04306847\nn04307767\nn04307986\nn04308084\nn04308273\nn04308397\nn04309049\nn04309348\nn04309548\nn04309833\nn04310018\nn04310157\nn04310904\nn04311004\nn04311174\nn04311595\nn04312154\nn04312432\nn04313503\nn04313628\nn04314914\nn04315342\nn04315948\nn04316498\nn04317063\nn04317175\nn04317325\nn04317420\nn04317833\nn04317976\nn04318787\nn04318892\nn04319937\nn04320973\nn04321453\nn04322026\nn04322801\nn04323819\nn04324297\nn04324387\nn04325041\nn04325704\nn04326547\nn04326676\nn04326799\nn04326896\nn04327204\nn04327682\nn04328186\nn04328329\nn04328946\nn04329834\nn04329958\nn04330267\nn04330340\nn04330746\nn04330998\nn04331277\nn04331639\nn04332074\nn04332243\nn04332580\nn04333129\nn04333869\nn04334105\nn04334365\nn04334599\nn04335209\nn04335435\nn04335693\nn04335886\nn04336792\nn04337287\nn04338517\nn04338963\nn04339879\nn04340521\nn04340750\nn04340935\nn04341686\nn04344003\nn04344734\nn04344873\nn04345028\nn04345201\nn04346157\nn04346328\nn04346428\nn04347119\nn04347519\nn04347754\nn04348359\nn04349306\nn04349401\nn04350458\nn04350581\nn04350769\nn04350905\nn04351699\nn04353573\nn04354026\nn04354182\nn04354487\nn04354589\nn04355267\nn04355338\nn04355511\nn04355933\nn04356056\nn04356595\nn04356925\nn04357121\nn04357314\nn04357531\nn04358117\nn04358491\nn04358707\nn04358874\nn04359500\nn04360798\nn04360914\nn04361095\nn04361260\nn04363777\nn04363991\nn04364160\nn04364545\nn04365328\nn04366033\nn04366116\nn04366367\nn04367011\nn04367371\nn04367480\nn04367746\nn04367950\nn04368496\nn04369025\nn04369282\nn04370048\nn04370288\nn04370456\nn04370774\nn04371050\nn04371430\nn04371563\nn04371774\nn04372370\nn04373089\nn04373428\nn04373704\nn04373795\nn04373894\nn04374315\nn04374735\nn04375241\nn04375405\nn04375615\nn04376400\nn04376876\nn04377057\nn04378956\nn04379243\nn04379964\nn04380255\nn04380346\nn04380533\nn04380916\nn04381073\nn04381587\nn04381724\nn04381860\nn04381994\nn04382438\nn04382695\nn04382880\nn04383015\nn04383130\nn04383839\nn04384593\nn04384910\nn04385536\nn04385799\nn04386051\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387706\nn04387932\nn04388743\nn04389033\nn04389430\nn04389521\nn04389718\nn04389854\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04392985\nn04393095\nn04393549\nn04393808\nn04394630\nn04395024\nn04395106\nn04395651\nn04396808\nn04396902\nn04397027\nn04397452\nn04397645\nn04397768\nn04398044\nn04398497\nn04398688\nn04398834\nn04398951\nn04399158\nn04399537\nn04399846\nn04400289\nn04400737\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404412\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406817\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409384\nn04409515\nn04409625\nn04409806\nn04410086\nn04411264\nn04412097\nn04412416\nn04413969\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415663\nn04416005\nn04417086\nn04417180\nn04417672\nn04417809\nn04418357\nn04419073\nn04419642\nn04419868\nn04421872\nn04422409\nn04422727\nn04422875\nn04423845\nn04424692\nn04425804\nn04426316\nn04426427\nn04427715\nn04428191\nn04428634\nn04429376\nn04430475\nn04430896\nn04431025\nn04431745\nn04432203\nn04432662\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435653\nn04436012\nn04436185\nn04436329\nn04437953\nn04438304\nn04438507\nn04438897\nn04439585\nn04439712\nn04440963\nn04441662\nn04441790\nn04442312\nn04442441\nn04442741\nn04443164\nn04443257\nn04443766\nn04444749\nn04445040\nn04445154\nn04445327\nn04445952\nn04446276\nn04446844\nn04447028\nn04447276\nn04447443\nn04447861\nn04448070\nn04448361\nn04449290\nn04449966\nn04450133\nn04450243\nn04450640\nn04450749\nn04450994\nn04451318\nn04451818\nn04452528\nn04452615\nn04452757\nn04453037\nn04453156\nn04453390\nn04453666\nn04454908\nn04455250\nn04455652\nn04456115\nn04457474\nn04457767\nn04457910\nn04458633\nn04458843\nn04459018\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461696\nn04461879\nn04462011\nn04462240\nn04463679\nn04464615\nn04464852\nn04465050\nn04465358\nn04465501\nn04465666\nn04466871\nn04467099\nn04467307\nn04467665\nn04468005\nn04469003\nn04469514\nn04469813\nn04471148\nn04471632\nn04472563\nn04473108\nn04474035\nn04474187\nn04474466\nn04475411\nn04475631\nn04476116\nn04476259\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04478512\nn04479046\nn04479823\nn04479939\nn04480033\nn04480853\nn04482177\nn04482297\nn04482393\nn04483073\nn04483307\nn04483925\nn04484432\nn04485082\nn04485423\nn04485884\nn04486054\nn04486213\nn04486934\nn04487081\nn04487394\nn04487724\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491388\nn04491638\nn04491769\nn04492060\nn04492375\nn04492749\nn04493381\nn04494204\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497442\nn04497570\nn04497801\nn04498389\nn04499062\nn04499446\nn04500060\nn04501370\nn04501550\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04503413\nn04503593\nn04504141\nn04505036\nn04505470\nn04506289\nn04506506\nn04506688\nn04507155\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509417\nn04510706\nn04511002\nn04513827\nn04513998\nn04514241\nn04515003\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517823\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04520170\nn04520382\nn04520784\nn04521863\nn04522168\nn04523525\nn04523831\nn04524142\nn04524313\nn04524941\nn04525038\nn04525191\nn04525305\nn04525417\nn04525584\nn04525821\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530566\nn04531098\nn04531873\nn04532106\nn04532398\nn04532670\nn04532831\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535370\nn04535524\nn04536153\nn04536335\nn04536595\nn04536866\nn04538552\nn04539203\nn04539794\nn04540053\nn04540255\nn04541320\nn04541987\nn04542715\nn04542858\nn04542943\nn04543158\nn04543636\nn04543772\nn04543996\nn04544325\nn04544450\nn04545305\nn04545748\nn04545858\nn04546194\nn04546340\nn04547592\nn04548280\nn04548362\nn04549028\nn04549122\nn04549629\nn04549919\nn04550184\nn04551055\nn04552348\nn04552696\nn04553561\nn04553703\nn04554211\nn04554406\nn04554684\nn04554871\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556948\nn04557648\nn04557751\nn04558478\nn04559166\nn04559451\nn04559730\nn04559910\nn04560113\nn04560292\nn04560804\nn04560882\nn04561287\nn04561422\nn04561734\nn04562262\nn04562496\nn04562935\nn04563204\nn04563413\nn04564278\nn04564581\nn04565375\nn04566257\nn04566561\nn04566756\nn04568069\nn04568557\nn04568841\nn04569063\nn04569822\nn04570214\nn04570815\nn04571292\nn04571566\nn04571686\nn04571958\nn04573281\nn04573513\nn04573937\nn04574067\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04577769\nn04578934\nn04579056\nn04579145\nn04579230\nn04579432\nn04579667\nn04579986\nn04580493\nn04581102\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583212\nn04583620\nn04584207\nn04584373\nn04585128\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589593\nn04589890\nn04590021\nn04590129\nn04590263\nn04590553\nn04590746\nn04590933\nn04591157\nn04591517\nn04591713\nn04591887\nn04592005\nn04592099\nn04592465\nn04592741\nn04593077\nn04593185\nn04593376\nn04593524\nn04593866\nn04594218\nn04594489\nn04594828\nn04595028\nn04595285\nn04595855\nn04596742\nn04596852\nn04597309\nn04597400\nn04597804\nn04597913\nn04598318\nn04598582\nn04598965\nn04599124\nn04599235\nn04600312\nn04600912\nn04602762\nn04602956\nn04603399\nn04603729\nn04603872\nn04604644\nn04605163\nn04605321\nn04605572\nn04605726\nn04606251\nn04606574\nn04607035\nn04607242\nn04607869\nn04608329\nn04608435\nn04608567\nn04608923\nn04609531\nn04609651\nn04610013\nn04610274\nn04610503\nn04610676\nn04612026\nn04612373\nn04612504\nn04613015\nn04613696\nn04613939\nn04614655\nn04615226\nn04615644\nn04950952\nn04951071\nn04951186\nn04953296\nn04955160\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970398\nn04970470\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973291\nn04973386\nn04973585\nn04973816\nn04974859\nn04976319\nn04976952\nn04977412\nn04979002\nn04981658\nn05218119\nn05238282\nn05239437\nn05242928\nn05244934\nn05245192\nn05258051\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05263183\nn05263448\nn05282652\nn05302499\nn05399034\nn05399243\nn05418717\nn05450617\nn05451384\nn05453657\nn05486510\nn05526957\nn05538625\nn05578095\nn05581932\nn05586759\nn05716342\nn06255081\nn06263609\nn06266633\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273414\nn06273555\nn06273743\nn06273986\nn06274760\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06359193\nn06359467\nn06415688\nn06417096\nn06470073\nn06592281\nn06595351\nn06596364\nn06596474\nn06596607\nn06596727\nn06785654\nn06793231\nn06794110\nn06874185\nn06883725\nn06892775\nn06998748\nn07005523\nn07248320\nn07273802\nn07461050\nn07556406\nn07556637\nn07556970\nn07557434\nn07560193\nn07560331\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562495\nn07563207\nn07564971\nn07565083\nn07565161\nn07565259\nn07566340\nn07567707\nn07568502\nn07568818\nn07569106\nn07569644\nn07570720\nn07572616\nn07572957\nn07573347\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576781\nn07577144\nn07577374\nn07577538\nn07578093\nn07579575\nn07579688\nn07579787\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581775\nn07581931\nn07582152\nn07582277\nn07582609\nn07582892\nn07583066\nn07584110\nn07584332\nn07584423\nn07584593\nn07585107\nn07585208\nn07585557\nn07585758\nn07585906\nn07586099\nn07586318\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587331\nn07587441\nn07587618\nn07587700\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588817\nn07588947\nn07590320\nn07590502\nn07590611\nn07590752\nn07591049\nn07591473\nn07591586\nn07591961\nn07592094\nn07592481\nn07592768\nn07593004\nn07593199\nn07593471\nn07594066\nn07595649\nn07595914\nn07596684\nn07596967\nn07597145\nn07597365\nn07598256\nn07598734\nn07599911\nn07599998\nn07600177\nn07600285\nn07600696\nn07601290\nn07601572\nn07601686\nn07601809\nn07604956\nn07605040\nn07605380\nn07605474\nn07605597\nn07605804\nn07605944\nn07606538\nn07606669\nn07606764\nn07607138\nn07607605\nn07607967\nn07608098\nn07608339\nn07608429\nn07608866\nn07609215\nn07609407\nn07609632\nn07609840\nn07610620\nn07611046\nn07611148\nn07611267\nn07611358\nn07611839\nn07611991\nn07612137\nn07612367\nn07612632\nn07612996\nn07613266\nn07613480\nn07613815\nn07614198\nn07614500\nn07614730\nn07614825\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615774\nn07616046\nn07616386\nn07616487\nn07616590\nn07616748\nn07617051\nn07617611\nn07617708\nn07617932\nn07618119\nn07618432\nn07619004\nn07619208\nn07619409\nn07620689\nn07621618\nn07623136\nn07624466\nn07625061\nn07627931\nn07628068\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642933\nn07643026\nn07643200\nn07643306\nn07643891\nn07643981\nn07648913\nn07648997\nn07650903\nn07651025\nn07654148\nn07654298\nn07655263\nn07665438\nn07666176\nn07678729\nn07679034\nn07679356\nn07680313\nn07680517\nn07680761\nn07680932\nn07681450\nn07681691\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683360\nn07683490\nn07683617\nn07683786\nn07684084\nn07684164\nn07684289\nn07684517\nn07684600\nn07684938\nn07685031\nn07685218\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688624\nn07688898\nn07689003\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691954\nn07692614\nn07693048\nn07693223\nn07693590\nn07693725\nn07693972\nn07694403\nn07694516\nn07694659\nn07694839\nn07695652\nn07695742\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697313\nn07697537\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07704054\nn07704205\nn07705931\nn07707451\nn07708124\nn07708398\nn07708685\nn07709046\nn07709172\nn07709333\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711569\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714571\nn07714802\nn07714895\nn07714990\nn07715103\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716358\nn07716906\nn07717070\nn07717410\nn07717556\nn07718472\nn07718747\nn07719213\nn07719616\nn07719839\nn07720277\nn07720442\nn07720615\nn07720875\nn07721018\nn07721195\nn07721325\nn07721456\nn07721678\nn07721942\nn07722052\nn07722217\nn07722485\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723968\nn07724269\nn07724492\nn07724654\nn07724943\nn07725255\nn07725376\nn07725531\nn07725789\nn07725888\nn07726095\nn07726525\nn07726672\nn07726796\nn07727048\nn07727458\nn07727578\nn07727868\nn07728053\nn07728181\nn07728585\nn07728708\nn07729384\nn07729485\nn07729828\nn07729926\nn07730033\nn07730207\nn07730320\nn07730406\nn07730708\nn07730855\nn07731006\nn07731284\nn07731587\nn07731767\nn07731952\nn07732168\nn07732636\nn07732747\nn07732904\nn07733394\nn07733567\nn07733712\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734744\nn07734879\nn07735404\nn07735510\nn07735687\nn07735803\nn07736087\nn07736256\nn07736371\nn07736692\nn07736813\nn07737745\nn07739125\nn07739344\nn07739506\nn07740033\nn07740220\nn07740342\nn07740461\nn07740597\nn07740954\nn07741138\nn07741461\nn07742012\nn07742313\nn07742704\nn07743224\nn07743544\nn07743902\nn07744057\nn07744246\nn07744430\nn07744682\nn07744811\nn07745046\nn07745466\nn07745940\nn07746186\nn07746334\nn07746551\nn07747055\nn07747607\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749192\nn07749312\nn07749446\nn07749582\nn07749731\nn07749969\nn07750146\nn07750449\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07752109\nn07752377\nn07752514\nn07752966\nn07753113\nn07753275\nn07753592\nn07753743\nn07753980\nn07754451\nn07754684\nn07754894\nn07755089\nn07755411\nn07755707\nn07755929\nn07756325\nn07756951\nn07757132\nn07757312\nn07757511\nn07757990\nn07758680\nn07759194\nn07759816\nn07760153\nn07760859\nn07761141\nn07761309\nn07762114\nn07762244\nn07762740\nn07762913\nn07763107\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765862\nn07765999\nn07766173\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768230\nn07768423\nn07768694\nn07768858\nn07769584\nn07769731\nn07770034\nn07770763\nn07771212\nn07771731\nn07772147\nn07772274\nn07772788\nn07772935\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07800740\nn07801091\nn07801342\nn07801508\nn07801779\nn07801892\nn07802026\nn07802417\nn07802863\nn07802963\nn07803093\nn07803545\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805594\nn07805731\nn07806120\nn07806221\nn07806633\nn07806774\nn07807002\nn07807171\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808587\nn07808904\nn07809096\nn07810907\nn07812184\nn07814203\nn07814390\nn07814487\nn07814634\nn07815424\nn07815588\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816839\nn07817024\nn07817160\nn07817315\nn07817871\nn07818277\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819769\nn07819896\nn07820145\nn07820497\nn07820683\nn07821260\nn07821758\nn07821919\nn07822197\nn07822323\nn07822518\nn07822845\nn07823105\nn07823280\nn07823460\nn07823698\nn07823951\nn07824191\nn07824702\nn07825194\nn07825972\nn07826091\nn07826453\nn07826930\nn07827130\nn07827284\nn07827410\nn07827750\nn07828642\nn07829248\nn07829331\nn07829412\nn07830593\nn07831146\nn07831267\nn07832416\nn07832902\nn07834065\nn07834507\nn07834618\nn07834872\nn07835331\nn07835457\nn07835921\nn07836838\nn07837002\nn07837362\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07840027\nn07840804\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07843464\nn07843636\nn07843775\nn07844042\nn07844867\nn07845087\nn07845702\nn07846143\nn07847198\nn07847453\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07849336\nn07849619\nn07849733\nn07849912\nn07850083\nn07850329\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07852229\nn07852614\nn07852833\nn07854184\nn07854982\nn07855510\nn07855907\nn07857170\nn07857731\nn07858978\nn07859284\nn07859583\nn07859796\nn07860103\nn07860331\nn07860447\nn07860805\nn07860988\nn07861158\nn07861557\nn07861813\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07863374\nn07863547\nn07863802\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07866015\nn07866151\nn07866277\nn07866409\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07868200\nn07868340\nn07868508\nn07868830\nn07868955\nn07869522\nn07869611\nn07869775\nn07870069\nn07870167\nn07870313\nn07871234\nn07871436\nn07871720\nn07871810\nn07872593\nn07873057\nn07873348\nn07873464\nn07873807\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874780\nn07875152\nn07875436\nn07875693\nn07876651\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879659\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07880968\nn07881205\nn07881404\nn07881800\nn07882497\nn07883031\nn07883251\nn07884567\nn07885705\nn07886057\nn07886176\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888229\nn07888465\nn07888816\nn07889274\nn07889510\nn07889814\nn07890068\nn07890226\nn07890352\nn07890540\nn07890750\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892512\nn07892813\nn07893253\nn07893528\nn07893642\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896661\nn07896893\nn07896994\nn07897116\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07909129\nn07909593\nn07910048\nn07910152\nn07910379\nn07910538\nn07910656\nn07911249\nn07911371\nn07911677\nn07912211\nn07913393\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914777\nn07914995\nn07915094\nn07915491\nn07915618\nn07915918\nn07916041\nn07916183\nn07916319\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07918028\nn07918193\nn07918879\nn07919310\nn07919441\nn07919572\nn07920052\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921239\nn07921455\nn07921615\nn07922512\nn07922764\nn07923748\nn07924033\nn07924276\nn07924443\nn07924560\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925500\nn07925608\nn07925966\nn07926250\nn07926920\nn07927197\nn07927512\nn07927931\nn07928163\nn07928367\nn07928488\nn07928696\nn07928790\nn07928887\nn07929172\nn07929351\nn07929519\nn07930062\nn07930315\nn07930433\nn07930554\nn07930864\nn07931452\nn07931612\nn07931870\nn07932039\nn07932841\nn07933154\nn07933274\nn07933799\nn07934282\nn07935043\nn07935379\nn07935504\nn07935737\nn07935878\nn07936263\nn07936548\nn07936745\nn07937461\nn07938007\nn07938149\nn07938313\nn07942152\nn07951464\nn07954211\nn07977870\nn08182379\nn08242223\nn08249459\nn08256735\nn08376250\nn08492461\nn08494231\nn08495908\nn08505018\nn08517676\nn08518171\nn08521623\nn08524735\nn08539072\nn08547468\nn08547544\nn08551296\nn08555710\nn08560295\nn08571898\nn08573842\nn08578517\nn08579352\nn08580944\nn08583292\nn08583455\nn08584914\nn08596076\nn08598301\nn08598568\nn08611339\nn08614632\nn08616050\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08645104\nn08645212\nn08649711\nn08658309\nn08659446\nn08659861\nn08663703\nn08673039\nn08677424\nn09189157\nn09191635\nn09193705\nn09194227\nn09199101\nn09205509\nn09206896\nn09206985\nn09208496\nn09210862\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09224725\nn09228055\nn09229709\nn09230041\nn09230202\nn09233446\nn09238926\nn09239302\nn09242389\nn09245515\nn09246464\nn09247410\nn09249034\nn09251407\nn09256479\nn09257843\nn09259025\nn09259219\nn09260907\nn09263912\nn09265620\nn09267854\nn09269341\nn09269472\nn09270735\nn09274152\nn09279986\nn09282208\nn09283193\nn09283405\nn09283767\nn09283866\nn09287968\nn09288635\nn09289331\nn09290444\nn09294877\nn09295946\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305898\nn09308572\nn09308743\nn09309168\nn09309292\nn09326662\nn09331251\nn09332890\nn09335809\nn09337253\nn09344324\nn09348460\nn09349648\nn09359803\nn09361517\nn09362945\nn09366317\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09392402\nn09393605\nn09396465\nn09398076\nn09398677\nn09399592\nn09400987\nn09403211\nn09403427\nn09403734\nn09405078\nn09406793\nn09409512\nn09409752\nn09411189\nn09415584\nn09415671\nn09416076\nn09416890\nn09421799\nn09421951\nn09428293\nn09428628\nn09432283\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09457979\nn09460046\nn09461069\nn09466678\nn09468604\nn09472413\nn09472597\nn09475044\nn09475179\nn09475925\nn09481120\nn09505153\nn09606527\nn09607630\nn09607903\nn09610405\nn09616922\nn09618760\nn09618880\nn09618957\nn09619168\nn09620078\nn09620794\nn09621232\nn09622302\nn09624168\nn09624559\nn09626238\nn09627906\nn09629752\nn09632518\nn09635534\nn09636339\nn09637339\nn09638454\nn09638875\nn09639919\nn09641002\nn09643799\nn09644152\nn09650729\nn09651123\nn09652149\nn09654518\nn09659039\nn09659188\nn09661873\nn09666883\nn09670521\nn09675922\nn09676021\nn09676247\nn09676884\nn09679170\nn09681234\nn09683757\nn09683924\nn09684901\nn09686401\nn09688804\nn09689435\nn09689958\nn09690621\nn09691729\nn09692915\nn09693982\nn09694664\nn09694771\nn09695620\nn09695979\nn09696456\nn09696585\nn09696763\nn09697401\nn09698644\nn09700964\nn09701148\nn09701833\nn09703485\nn09705124\nn09705784\nn09706255\nn09707289\nn09708750\nn09708889\nn09711435\nn09712324\nn09712448\nn09712696\nn09713108\nn09714694\nn09715427\nn09717233\nn09718217\nn09718811\nn09718936\nn09719309\nn09719794\nn09720033\nn09720256\nn09720595\nn09720842\nn09722658\nn09723067\nn09724533\nn09724656\nn09724785\nn09725000\nn09725653\nn09725772\nn09727440\nn09727826\nn09728137\nn09728285\nn09730077\nn09730204\nn09730824\nn09731343\nn09731436\nn09732170\nn09733793\nn09734185\nn09734450\nn09734535\nn09734639\nn09736945\nn09738121\nn09740724\nn09742101\nn09742315\nn09743487\nn09743792\nn09744161\nn09744834\nn09747191\nn09747495\nn09749386\nn09750282\nn09750770\nn09750891\nn09751496\nn09751895\nn09752023\nn09752519\nn09753792\nn09756049\nn09757449\nn09758885\nn09759501\nn09760609\nn09763784\nn09764598\nn09764900\nn09767197\nn09770179\nn09770359\nn09772930\nn09774783\nn09776346\nn09779790\nn09782167\nn09782397\nn09785659\nn09785891\nn09787534\nn09787765\nn09789566\nn09791014\nn09791419\nn09791816\nn09792555\nn09792969\nn09793141\nn09796809\nn09797873\nn09800964\nn09801533\nn09805151\nn09805324\nn09809749\nn09809925\nn09811852\nn09813219\nn09814660\nn09816771\nn09818022\nn09820263\nn09822830\nn09823502\nn09823832\nn09824135\nn09824609\nn09827246\nn09827363\nn09828216\nn09830194\nn09830400\nn09830629\nn09832456\nn09833441\nn09833536\nn09834378\nn09834699\nn09835230\nn09835348\nn09835506\nn09836160\nn09836343\nn09836519\nn09836786\nn09838621\nn09839702\nn09840217\nn09840520\nn09841188\nn09841696\nn09842047\nn09842395\nn09842528\nn09843443\nn09843824\nn09844457\nn09845401\nn09846469\nn09846755\nn09846894\nn09847543\nn09851165\nn09854218\nn09854421\nn09855433\nn09856671\nn09858165\nn09859152\nn09861599\nn09861863\nn09861946\nn09862621\nn09863031\nn09866817\nn09871229\nn09871681\nn09871867\nn09872066\nn09873348\nn09873473\nn09873899\nn09874428\nn09874725\nn09874862\nn09877288\nn09877750\nn09877951\nn09881265\nn09881895\nn09886403\nn09889065\nn09889170\nn09889941\nn09890749\nn09893191\nn09893344\nn09893502\nn09894143\nn09894445\nn09895222\nn09895561\nn09896170\nn09896401\nn09896685\nn09899671\nn09899782\nn09899929\nn09901337\nn09902954\nn09903153\nn09903501\nn09904208\nn09904837\nn09905185\nn09906449\nn09911226\nn09913455\nn09913593\nn09915434\nn09915651\nn09916348\nn09917214\nn09917593\nn09918248\nn09918554\nn09920283\nn09923186\nn09923418\nn09923561\nn09923673\nn09924106\nn09924996\nn09927451\nn09928136\nn09929298\nn09929577\nn09930257\nn09930876\nn09931165\nn09932098\nn09932336\nn09932508\nn09933098\nn09934337\nn09934774\nn09936825\nn09938449\nn09941787\nn09941964\nn09942970\nn09943239\nn09943811\nn09944022\nn09944430\nn09945745\nn09946814\nn09951274\nn09951616\nn09953350\nn09954639\nn09964202\nn09967967\nn09971273\nn09972010\nn09972458\nn09974648\nn09975425\nn09976283\nn09976429\nn09981278\nn09981540\nn09981939\nn09988063\nn09988493\nn09988703\nn09989502\nn09990415\nn09990690\nn09990777\nn09991867\nn09993252\nn10001481\nn10002760\nn10004718\nn10005934\nn10007684\nn10009276\nn10013811\nn10015485\nn10017272\nn10019072\nn10019406\nn10020890\nn10024362\nn10025635\nn10026976\nn10027246\nn10033412\nn10033663\nn10034201\nn10034614\nn10036692\nn10036929\nn10037385\nn10037922\nn10038409\nn10039271\nn10040945\nn10042845\nn10043491\nn10043643\nn10048612\nn10049363\nn10053808\nn10054657\nn10055410\nn10058962\nn10060352\nn10069296\nn10070108\nn10070711\nn10075693\nn10076224\nn10076604\nn10076957\nn10077593\nn10078131\nn10078719\nn10078806\nn10079399\nn10080869\nn10081204\nn10082043\nn10082687\nn10082997\nn10084295\nn10085869\nn10086383\nn10087434\nn10091450\nn10091564\nn10091651\nn10092488\nn10092643\nn10092794\nn10092978\nn10093475\nn10093818\nn10095769\nn10098245\nn10098517\nn10098624\nn10098710\nn10098862\nn10102800\nn10104064\nn10105733\nn10107303\nn10112129\nn10115430\nn10116702\nn10117739\nn10117851\nn10120330\nn10120671\nn10123122\nn10123844\nn10127689\nn10129825\nn10131151\nn10131815\nn10132035\nn10134178\nn10134982\nn10135129\nn10137825\nn10140597\nn10140929\nn10141364\nn10141732\nn10142391\nn10142747\nn10143172\nn10144338\nn10145239\nn10145340\nn10145480\nn10145590\nn10145774\nn10145902\nn10146002\nn10146104\nn10146416\nn10147121\nn10147935\nn10148035\nn10150071\nn10150940\nn10151760\nn10152763\nn10153414\nn10153594\nn10155849\nn10157128\nn10159045\nn10159533\nn10160280\nn10164233\nn10164492\nn10165448\nn10167152\nn10167838\nn10168837\nn10169147\nn10173410\nn10173771\nn10174330\nn10174445\nn10175248\nn10178216\nn10182190\nn10183931\nn10185483\nn10185793\nn10186216\nn10187990\nn10188957\nn10189278\nn10191001\nn10192839\nn10195593\nn10200781\nn10202624\nn10203949\nn10205231\nn10205457\nn10207169\nn10208950\nn10209082\nn10209731\nn10210911\nn10212501\nn10215623\nn10216106\nn10221312\nn10223177\nn10225219\nn10226413\nn10229883\nn10233248\nn10235024\nn10235385\nn10236304\nn10237069\nn10237196\nn10237676\nn10241300\nn10242328\nn10243137\nn10243664\nn10247358\nn10247880\nn10249270\nn10249459\nn10252222\nn10253122\nn10253296\nn10258786\nn10259348\nn10259780\nn10259997\nn10260706\nn10260800\nn10261624\nn10262445\nn10262561\nn10262655\nn10263411\nn10263790\nn10267311\nn10267865\nn10274815\nn10275395\nn10276477\nn10277027\nn10279018\nn10280674\nn10282482\nn10282672\nn10283170\nn10288964\nn10289039\nn10289462\nn10290919\nn10293332\nn10296176\nn10296444\nn10297234\nn10297531\nn10297841\nn10298647\nn10298912\nn10299250\nn10300154\nn10300303\nn10300500\nn10303814\nn10304086\nn10304914\nn10305802\nn10308168\nn10308732\nn10313000\nn10313239\nn10313724\nn10314054\nn10314517\nn10314836\nn10315456\nn10315561\nn10316360\nn10317007\nn10317500\nn10318293\nn10318607\nn10320863\nn10321340\nn10323634\nn10324560\nn10325774\nn10327987\nn10328123\nn10331167\nn10332385\nn10332861\nn10333439\nn10333601\nn10333838\nn10334009\nn10339717\nn10340312\nn10341573\nn10342992\nn10343355\nn10345015\nn10346015\nn10347446\nn10348526\nn10353016\nn10355142\nn10355449\nn10356877\nn10357613\nn10359546\nn10360747\nn10362319\nn10362557\nn10364198\nn10366966\nn10368528\nn10368624\nn10369317\nn10370955\nn10373390\nn10375052\nn10375314\nn10375402\nn10376523\nn10377021\nn10377185\nn10378026\nn10380672\nn10382710\nn10382825\nn10384392\nn10384496\nn10385566\nn10386984\nn10387196\nn10387324\nn10393909\nn10395828\nn10396106\nn10400108\nn10400437\nn10400618\nn10401331\nn10401639\nn10403876\nn10405694\nn10406266\nn10406391\nn10406765\nn10407310\nn10407954\nn10410246\nn10411551\nn10415037\nn10419472\nn10419785\nn10420507\nn10421016\nn10421470\nn10421956\nn10422405\nn10427764\nn10431625\nn10432441\nn10435169\nn10435988\nn10439373\nn10439851\nn10441962\nn10449664\nn10450161\nn10450303\nn10451450\nn10453184\nn10461060\nn10464052\nn10465451\nn10465831\nn10467179\nn10467395\nn10469874\nn10470779\nn10472129\nn10473917\nn10474645\nn10476467\nn10477713\nn10481268\nn10482220\nn10483138\nn10485883\nn10486166\nn10487182\nn10488656\nn10493685\nn10495756\nn10498816\nn10498986\nn10499232\nn10499355\nn10500217\nn10500419\nn10500603\nn10502329\nn10504206\nn10505613\nn10506915\nn10508141\nn10508710\nn10509063\nn10510245\nn10512372\nn10513823\nn10514429\nn10521100\nn10521662\nn10522035\nn10522759\nn10523341\nn10524076\nn10525436\nn10525617\nn10528023\nn10529231\nn10530150\nn10530959\nn10536416\nn10540114\nn10542608\nn10542761\nn10542888\nn10548537\nn10548681\nn10550369\nn10553235\nn10559288\nn10559508\nn10559996\nn10560106\nn10562135\nn10562283\nn10563314\nn10563403\nn10565667\nn10566072\nn10568358\nn10568608\nn10569179\nn10572706\nn10572889\nn10574538\nn10574840\nn10575463\nn10577284\nn10578021\nn10578471\nn10581890\nn10582746\nn10583387\nn10583790\nn10585077\nn10588074\nn10588357\nn10588965\nn10595164\nn10595647\nn10598181\nn10599806\nn10602470\nn10602985\nn10603851\nn10604380\nn10604979\nn10607291\nn10607478\nn10610465\nn10610850\nn10611267\nn10611613\nn10613996\nn10618342\nn10620586\nn10620758\nn10622053\nn10624074\nn10624310\nn10624437\nn10624540\nn10627252\nn10628644\nn10629939\nn10630188\nn10631309\nn10633450\nn10634849\nn10635788\nn10638922\nn10639359\nn10639637\nn10642596\nn10644598\nn10645017\nn10649197\nn10652605\nn10655594\nn10657835\nn10661563\nn10665587\nn10665698\nn10667477\nn10667863\nn10671613\nn10671736\nn10672371\nn10674713\nn10675010\nn10678937\nn10679174\nn10680609\nn10680796\nn10682953\nn10685398\nn10686073\nn10686885\nn10688356\nn10689306\nn10690648\nn10692482\nn10694258\nn10696508\nn10698368\nn10701180\nn10701644\nn10701962\nn10707134\nn10707233\nn10709529\nn10711766\nn10718131\nn10719132\nn10721321\nn10726031\nn10727171\nn10727458\nn10728624\nn10730728\nn10732010\nn10734394\nn10734891\nn10737103\nn10738111\nn10739391\nn10740868\nn10745006\nn10746931\nn10747119\nn10748620\nn10750031\nn10750640\nn10753442\nn10754189\nn10755080\nn10756148\nn10757050\nn10763075\nn10763383\nn10763620\nn10765679\nn10772092\nn10773665\nn10780284\nn10780632\nn10782471\nn10787470\nn10791115\nn10791221\nn10792335\nn10793570\nn10802507\nn10804287\nn10806113\nn11448153\nn11487732\nn11508382\nn11524451\nn11532682\nn11533212\nn11536673\nn11537327\nn11542137\nn11542640\nn11544015\nn11545714\nn11547855\nn11552133\nn11552806\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602873\nn11603246\nn11603835\nn11608250\nn11609475\nn11609862\nn11610215\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612349\nn11612575\nn11613219\nn11613459\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615387\nn11615607\nn11615967\nn11616486\nn11616662\nn11617090\nn11617272\nn11617631\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625632\nn11625804\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627908\nn11628087\nn11628456\nn11628793\nn11630017\nn11631854\nn11632167\nn11632619\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11645590\nn11645914\nn11646167\nn11646344\nn11646694\nn11647306\nn11647703\nn11650558\nn11652376\nn11653904\nn11655974\nn11658331\nn11658544\nn11660300\nn11661372\nn11661909\nn11662371\nn11664418\nn11665372\nn11666854\nn11669786\nn11669921\nn11672269\nn11672400\nn11675025\nn11676500\nn11678010\nn11680596\nn11682659\nn11686912\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694664\nn11695599\nn11695974\nn11698042\nn11699442\nn11700058\nn11701066\nn11703669\nn11704093\nn11704620\nn11705171\nn11705387\nn11705776\nn11706761\nn11707229\nn11709205\nn11709674\nn11710136\nn11710393\nn11710827\nn11711537\nn11711764\nn11712282\nn11714382\nn11715430\nn11715678\nn11717577\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11722466\nn11722982\nn11723227\nn11723770\nn11724109\nn11725015\nn11725311\nn11725480\nn11725821\nn11725973\nn11726269\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728945\nn11730602\nn11731659\nn11732567\nn11733054\nn11733312\nn11733548\nn11735053\nn11736694\nn11736851\nn11737534\nn11748811\nn11752937\nn11753143\nn11753355\nn11753700\nn11754893\nn11756092\nn11756669\nn11756870\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759853\nn11760785\nn11761202\nn11762433\nn11769176\nn11769621\nn11769803\nn11770256\nn11772408\nn11772879\nn11773987\nn11774513\nn11777080\nn11778257\nn11779300\nn11780148\nn11781176\nn11782036\nn11782761\nn11783920\nn11784126\nn11784497\nn11785668\nn11786131\nn11786539\nn11788727\nn11789066\nn11789589\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793779\nn11794024\nn11794519\nn11795049\nn11797321\nn11800236\nn11801891\nn11802586\nn11802800\nn11805544\nn11805956\nn11806219\nn11807108\nn11807525\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11810358\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817914\nn11818069\nn11819509\nn11819912\nn11820965\nn11821184\nn11823436\nn11824146\nn11825351\nn11826198\nn11830906\nn11832214\nn11832480\nn11834654\nn11836722\nn11837970\nn11838916\nn11839568\nn11839823\nn11840067\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846765\nn11847169\nn11848479\nn11849467\nn11849871\nn11849983\nn11850521\nn11851258\nn11851578\nn11851839\nn11852028\nn11853356\nn11853813\nn11854479\nn11855274\nn11855553\nn11857875\nn11859472\nn11859737\nn11860555\nn11861641\nn11861853\nn11862835\nn11865874\nn11866248\nn11870418\nn11870747\nn11872146\nn11874081\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877646\nn11878101\nn11879054\nn11879722\nn11879895\nn11882074\nn11882426\nn11883328\nn11887119\nn11888800\nn11889619\nn11890150\nn11891175\nn11892029\nn11892637\nn11892817\nn11893640\nn11894327\nn11894558\nn11894770\nn11895092\nn11896722\nn11897116\nn11898775\nn11900569\nn11901294\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903671\nn11904109\nn11905392\nn11905749\nn11906917\nn11907100\nn11907689\nn11908549\nn11908846\nn11910271\nn11910460\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11918286\nn11918473\nn11921395\nn11923174\nn11923397\nn11923637\nn11924445\nn11924849\nn11925303\nn11925898\nn11926365\nn11926833\nn11927215\nn11928352\nn11928858\nn11929743\nn11931540\nn11931918\nn11933546\nn11933728\nn11934616\nn11934807\nn11935469\nn11939180\nn11939491\nn11939699\nn11940006\nn11940599\nn11941924\nn11943407\nn11943660\nn11943992\nn11944196\nn11944954\nn11945514\nn11945783\nn11946727\nn11947629\nn11947802\nn11948264\nn11948864\nn11949015\nn11949402\nn11950345\nn11950686\nn11950877\nn11953038\nn11953610\nn11953884\nn11954161\nn11954345\nn11954642\nn11955153\nn11955896\nn11956348\nn11956850\nn11957678\nn11958080\nn11959632\nn11959862\nn11960245\nn11961100\nn11961446\nn11962272\nn11962667\nn11963932\nn11965218\nn11965627\nn11966083\nn11966215\nn11966617\nn11966896\nn11968704\nn11968931\nn11969166\nn11969607\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11973341\nn11977303\nn11978233\nn11978551\nn11978713\nn11978961\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11982115\nn11984144\nn11984542\nn11986511\nn11987126\nn11988596\nn11989393\nn11989869\nn11990167\nn11990313\nn11991263\nn11992806\nn11995092\nn11998888\nn12001707\nn12002428\nn12003167\nn12003696\nn12004547\nn12005656\nn12006766\nn12006930\nn12007196\nn12007406\nn12008252\nn12008487\nn12008749\nn12009420\nn12011620\nn12012111\nn12014085\nn12015221\nn12015525\nn12015959\nn12016567\nn12018760\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12023108\nn12023407\nn12023726\nn12024445\nn12024690\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12029635\nn12030908\nn12031139\nn12031927\nn12033709\nn12034141\nn12034384\nn12036939\nn12037499\nn12037691\nn12038406\nn12038585\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044467\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12052447\nn12052787\nn12053405\nn12053690\nn12055516\nn12056217\nn12056601\nn12056758\nn12057211\nn12057447\nn12057660\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12061380\nn12061614\nn12062468\nn12062626\nn12062781\nn12063639\nn12064389\nn12064591\nn12065316\nn12065777\nn12066018\nn12066261\nn12066630\nn12067193\nn12068432\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071744\nn12072722\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077944\nn12078172\nn12079120\nn12079963\nn12080395\nn12080820\nn12081215\nn12083113\nn12083591\nn12083847\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12088223\nn12090890\nn12091213\nn12091377\nn12091550\nn12091953\nn12092262\nn12092417\nn12093329\nn12093600\nn12094612\nn12095020\nn12095647\nn12098403\nn12099342\nn12101870\nn12102133\nn12104238\nn12104501\nn12104734\nn12105125\nn12107710\nn12107970\nn12108871\nn12109365\nn12110085\nn12110778\nn12112008\nn12112609\nn12112918\nn12113195\nn12115180\nn12116429\nn12119238\nn12121610\nn12122725\nn12123741\nn12124627\nn12124818\nn12127460\nn12127768\nn12128071\nn12129134\nn12133462\nn12133682\nn12134025\nn12135049\nn12136392\nn12137120\nn12137569\nn12139575\nn12141167\nn12142085\nn12144313\nn12144580\nn12145477\nn12146311\nn12148757\nn12150722\nn12151615\nn12152532\nn12152722\nn12154773\nn12155009\nn12157056\nn12158031\nn12158443\nn12159055\nn12159388\nn12160303\nn12160490\nn12160857\nn12161056\nn12161969\nn12162181\nn12162425\nn12164363\nn12164656\nn12164881\nn12165170\nn12166128\nn12166424\nn12166793\nn12167075\nn12167436\nn12167602\nn12168565\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12178896\nn12179122\nn12180168\nn12180885\nn12184912\nn12185859\nn12187247\nn12189429\nn12189987\nn12190410\nn12190869\nn12194147\nn12195533\nn12196336\nn12196527\nn12196694\nn12198286\nn12199790\nn12200143\nn12201331\nn12201580\nn12202936\nn12203529\nn12204032\nn12204175\nn12205694\nn12214789\nn12215022\nn12215579\nn12217453\nn12223569\nn12223764\nn12224978\nn12225563\nn12227658\nn12228229\nn12228387\nn12230794\nn12237486\nn12237641\nn12240477\nn12242409\nn12243109\nn12244153\nn12244650\nn12244819\nn12245319\nn12246232\nn12249542\nn12252168\nn12257570\nn12258885\nn12260799\nn12261571\nn12261808\nn12262018\nn12262185\nn12263038\nn12263738\nn12263987\nn12264512\nn12265600\nn12266217\nn12266796\nn12267411\nn12267677\nn12268246\nn12269241\nn12269406\nn12270027\nn12270741\nn12270946\nn12271933\nn12272239\nn12272883\nn12273114\nn12273344\nn12273768\nn12273939\nn12274358\nn12274863\nn12275131\nn12275675\nn12275888\nn12276110\nn12276477\nn12276628\nn12276872\nn12277150\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12279458\nn12279772\nn12280060\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283542\nn12284262\nn12284821\nn12285369\nn12285900\nn12286826\nn12286988\nn12287836\nn12288005\nn12288823\nn12290748\nn12291143\nn12291959\nn12293723\nn12294124\nn12294331\nn12294723\nn12294871\nn12295033\nn12295796\nn12296432\nn12300840\nn12301180\nn12301445\nn12302071\nn12302248\nn12303083\nn12303462\nn12304115\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305819\nn12305986\nn12306089\nn12306717\nn12307076\nn12307240\nn12309277\nn12311579\nn12312728\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12319204\nn12319414\nn12320010\nn12320806\nn12322099\nn12322501\nn12322699\nn12325234\nn12328398\nn12328567\nn12329260\nn12329473\nn12330469\nn12330587\nn12330891\nn12331655\nn12332030\nn12332555\nn12333053\nn12333530\nn12333771\nn12334293\nn12336092\nn12336224\nn12336333\nn12336727\nn12336973\nn12337617\nn12338258\nn12338454\nn12338655\nn12338796\nn12339831\nn12340383\nn12340755\nn12342299\nn12342498\nn12342852\nn12343480\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12347158\nn12350758\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353754\nn12356023\nn12356960\nn12357485\nn12360108\nn12360684\nn12361135\nn12361946\nn12362274\nn12362668\nn12367611\nn12368028\nn12368257\nn12368451\nn12369309\nn12371439\nn12373100\nn12374418\nn12383894\nn12384037\nn12384227\nn12384839\nn12385429\nn12385566\nn12387633\nn12387839\nn12388143\nn12388858\nn12388989\nn12389130\nn12389501\nn12390099\nn12390314\nn12392549\nn12393269\nn12397431\nn12399132\nn12399384\nn12400489\nn12400720\nn12401684\nn12402051\nn12402348\nn12402596\nn12402840\nn12403994\nn12405714\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407890\nn12408077\nn12408717\nn12409231\nn12409470\nn12409840\nn12412355\nn12412606\nn12413165\nn12413301\nn12413419\nn12413880\nn12414035\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12418221\nn12421137\nn12421683\nn12421917\nn12422129\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12428076\nn12428412\nn12428747\nn12429352\nn12432356\nn12433081\nn12433178\nn12433769\nn12435152\nn12435649\nn12435777\nn12437513\nn12437769\nn12437930\nn12441183\nn12441390\nn12441958\nn12443323\nn12446519\nn12448700\nn12449296\nn12449526\nn12450344\nn12450840\nn12451240\nn12451399\nn12451915\nn12452836\nn12453186\nn12454159\nn12454436\nn12454705\nn12454949\nn12455950\nn12457091\nn12458550\nn12459629\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462805\nn12463134\nn12465557\nn12466727\nn12469517\nn12472024\nn12473608\nn12473840\nn12474167\nn12475035\nn12475242\nn12476510\nn12477163\nn12477583\nn12477747\nn12478768\nn12479537\nn12480456\nn12480895\nn12481458\nn12482437\nn12482668\nn12482893\nn12483427\nn12483625\nn12483841\nn12484784\nn12485653\nn12485981\nn12486574\nn12489815\nn12491017\nn12491826\nn12492106\nn12493208\nn12494794\nn12495146\nn12495895\nn12496427\nn12496949\nn12498055\nn12501202\nn12504570\nn12504783\nn12506341\nn12506991\nn12508309\nn12509476\nn12509665\nn12513172\nn12513613\nn12513933\nn12514138\nn12515711\nn12515925\nn12516828\nn12517445\nn12517642\nn12519089\nn12519563\nn12521394\nn12523475\nn12527738\nn12528549\nn12528974\nn12529220\nn12530629\nn12530818\nn12532564\nn12539306\nn12540250\nn12544539\nn12545635\nn12546183\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12549192\nn12552309\nn12554911\nn12556656\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12560282\nn12560621\nn12560775\nn12561169\nn12562785\nn12564083\nn12566954\nn12568186\nn12570394\nn12570703\nn12570972\nn12571781\nn12573474\nn12574320\nn12574866\nn12575322\nn12575812\nn12576323\nn12577895\nn12578626\nn12578916\nn12579038\nn12580654\nn12580896\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12584191\nn12584715\nn12585629\nn12587132\nn12587803\nn12588320\nn12588780\nn12590232\nn12590499\nn12591017\nn12591351\nn12593994\nn12595699\nn12595964\nn12596148\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599435\nn12602262\nn12602980\nn12603449\nn12604228\nn12606438\nn12606545\nn12607456\nn12610328\nn12614477\nn12615232\nn12620196\nn12620546\nn12620969\nn12621410\nn12622297\nn12622875\nn12623077\nn12624381\nn12624568\nn12625383\nn12627119\nn12628986\nn12629305\nn12629666\nn12630763\nn12631331\nn12632335\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635532\nn12635744\nn12635955\nn12636224\nn12638218\nn12638753\nn12639584\nn12640839\nn12641007\nn12641413\nn12642090\nn12642200\nn12643313\nn12643473\nn12644902\nn12645174\nn12646605\nn12646740\nn12647560\nn12647893\nn12648045\nn12648888\nn12649065\nn12649317\nn12649539\nn12650379\nn12650556\nn12651229\nn12651611\nn12651821\nn12655869\nn12656369\nn12656685\nn12657082\nn12658118\nn12658308\nn12658481\nn12659064\nn12659356\nn12659539\nn12662772\nn12663023\nn12665048\nn12665271\nn12665857\nn12666965\nn12670758\nn12671651\nn12675299\nn12675876\nn12676534\nn12676703\nn12680402\nn12680864\nn12681893\nn12682411\nn12682668\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686676\nn12687044\nn12687462\nn12687698\nn12687957\nn12688716\nn12691428\nn12691661\nn12694486\nn12695975\nn12696492\nn12698598\nn12700088\nn12703190\nn12703383\nn12703557\nn12703856\nn12704343\nn12706410\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12711596\nn12711817\nn12711984\nn12713063\nn12713866\nn12714755\nn12717072\nn12717224\nn12719684\nn12719944\nn12720200\nn12723610\nn12724942\nn12725738\nn12726159\nn12726670\nn12727101\nn12727518\nn12729315\nn12729521\nn12729729\nn12731029\nn12731401\nn12731835\nn12732009\nn12732491\nn12732756\nn12732966\nn12733218\nn12733647\nn12733870\nn12734070\nn12737383\nn12737898\nn12739332\nn12741222\nn12741792\nn12743352\nn12744387\nn12745386\nn12746884\nn12749049\nn12749679\nn12749852\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755727\nn12756457\nn12757303\nn12757458\nn12757816\nn12759273\nn12761284\nn12762049\nn12762896\nn12764202\nn12765115\nn12766595\nn12766869\nn12768682\nn12771192\nn12771390\nn12771597\nn12772753\nn12772908\nn12773651\nn12774299\nn12774641\nn12775919\nn12777680\nn12778398\nn12778605\nn12779603\nn12779851\nn12781940\nn12782530\nn12782915\nn12784889\nn12785724\nn12785889\nn12788854\nn12789054\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12794135\nn12794367\nn12794985\nn12795352\nn12795555\nn12796022\nn12797860\nn12799776\nn12801520\nn12801781\nn12803754\nn12805146\nn12805561\nn12806015\nn12806732\nn12807251\nn12807409\nn12807773\nn12810595\nn12811027\nn12812478\nn12813189\nn12814643\nn12815198\nn12816508\nn12817464\nn12817694\nn12818346\nn12818966\nn12819728\nn12820853\nn12821505\nn12821895\nn12822115\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12825497\nn12827270\nn12827537\nn12828220\nn12828791\nn12830222\nn12830568\nn12832315\nn12832538\nn12833149\nn12833985\nn12834798\nn12835331\nn12836212\nn12836337\nn12836508\nn12836862\nn12840362\nn12840749\nn12841007\nn12841193\nn12841354\nn12843970\nn12844939\nn12845413\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851469\nn12853482\nn12854600\nn12855494\nn12856091\nn12856287\nn12856479\nn12856680\nn12858150\nn12858397\nn12858618\nn12858871\nn12859986\nn12860365\nn12861345\nn12861892\nn12862512\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866459\nn12866635\nn12867826\nn12869061\nn12869478\nn12870535\nn12870682\nn12870891\nn12877838\nn12879527\nn12879963\nn12880244\nn12880462\nn12882779\nn12882945\nn12884100\nn12884260\nn12887293\nn12889219\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12893463\nn12893993\nn12895811\nn12898774\nn12899537\nn12899752\nn12901724\nn12902662\nn12904314\nn12905412\nn12906214\nn12908645\nn12909421\nn12909917\nn12911079\nn12911440\nn12911673\nn12913791\nn12914923\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12919403\nn12919646\nn12919847\nn12920204\nn12920955\nn12921868\nn12922763\nn12924623\nn12925179\nn12926480\nn12926689\nn12927013\nn12927494\nn12928071\nn12929403\nn12931542\nn12932173\nn12932365\nn12932966\nn12934036\nn12934174\nn12934479\nn12934985\nn12935609\nn12937130\nn12938193\nn12939282\nn12939874\nn12940226\nn12940609\nn12942395\nn12942572\nn12946849\nn12947313\nn12947544\nn12948053\nn12948251\nn12948495\nn12950126\nn12950314\nn12951146\nn12951835\nn12953206\nn12953484\nn12957924\nn12961879\nn12963628\nn12965626\nn12966945\nn12969131\nn12969425\nn12973443\nn12974987\nn12975804\nn12979829\nn12980840\nn12982468\nn12983048\nn12985420\nn12985773\nn12985857\nn12986227\nn12987056\nn12988158\nn12989938\nn12991184\nn12992177\nn12992868\nn12995601\nn12997654\nn12997919\nn12998815\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13008315\nn13009085\nn13009429\nn13011595\nn13012253\nn13013534\nn13013764\nn13014409\nn13014741\nn13017102\nn13017240\nn13019835\nn13020191\nn13020964\nn13021689\nn13022210\nn13022709\nn13023134\nn13024012\nn13025647\nn13028611\nn13029326\nn13029760\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033577\nn13034062\nn13035241\nn13035707\nn13037406\nn13038068\nn13038744\nn13039349\nn13040303\nn13040629\nn13041312\nn13043926\nn13044375\nn13044778\nn13046669\nn13049953\nn13050397\nn13052670\nn13052931\nn13053608\nn13054073\nn13054560\nn13055423\nn13055577\nn13055949\nn13060190\nn13061348\nn13062421\nn13065089\nn13066448\nn13068255\nn13072528\nn13074619\nn13077033\nn13077295\nn13079073\nn13083023\nn13084184\nn13084834\nn13085747\nn13090871\nn13091620\nn13099999\nn13100677\nn13102775\nn13103877\nn13104059\nn13107694\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108841\nn13111881\nn13121349\nn13122364\nn13123431\nn13125117\nn13126856\nn13127843\nn13128976\nn13130726\nn13131028\nn13131618\nn13132338\nn13132656\nn13133613\nn13133932\nn13134947\nn13135832\nn13136316\nn13136556\nn13137409\nn13138842\nn13141415\nn13141564\nn13142504\nn13145040\nn13145250\nn13146583\nn13147270\nn13148208\nn13150894\nn13154388\nn13154494\nn13155095\nn13155305\nn13158512\nn13160604\nn13163991\nn13172923\nn13173882\nn13177048\nn13177884\nn13180534\nn13180875\nn13181055\nn13181811\nn13183056\nn13183489\nn13185269\nn13187367\nn13190747\nn13192625\nn13193642\nn13193856\nn13194036\nn13194572\nn13195341\nn13196003\nn13197274\nn13197507\nn13198914\nn13199717\nn13199970\nn13200651\nn13201969\nn13205058\nn13206817\nn13207094\nn13207335\nn13209808\nn13213066\nn13214340\nn13215586\nn13219422\nn13219833\nn13219976\nn13220122\nn13221529\nn13223588\nn13223710\nn13223843\nn13226871\nn13229543\nn13231078\nn13232779\nn13234678\nn13235159\nn13235503\nn13237188\nn13238375\nn13238988\nn13579829\nn13653902\nn13862407\nn13863020\nn13863771\nn13864035\nn13865298\nn13865483\nn13865904\nn13868944\nn13869547\nn13869788\nn13869896\nn13872592\nn13872822\nn13873502\nn13873917\nn13875392\nn13875571\nn13876561\nn13878306\nn13879049\nn13879320\nn13880994\nn13881644\nn13882201\nn13882276\nn13882563\nn13886260\nn13895262\nn13896100\nn13896217\nn13897996\nn13898207\nn13900287\nn13900422\nn13901211\nn13901321\nn13901858\nn13902048\nn13902336\nn13905792\nn13907272\nn13908201\nn13908580\nn13912260\nn13912540\nn13914608\nn13915023\nn13915113\nn13916721\nn13918274\nn13918387\nn13919547\nn13919919\nn13926786\nn14131950\nn14564779\nn14685296\nn14696793\nn14698884\nn14765422\nn14785065\nn14810561\nn14820180\nn14844693\nn14858292\nn14900342\nn14908027\nn14915184\nn14919819\nn14973585\nn14974264\nn14976759\nn14976871\nn14977504\nn15019030\nn15062057\nn15067877\nn15075141\nn15086247\nn15089258\nn15090065\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092650\nn15092942\nn15093137\nn15093298\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet21k_miil_w21_synsets.txt",
    "content": "n00005787\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440382\nn00440509\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443692\nn00443803\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446493\nn00446804\nn00446980\nn00447073\nn00447221\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00452034\nn00452152\nn00452293\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454983\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00475014\nn00475273\nn00475403\nn00475535\nn00475787\nn00476235\nn00476389\nn00477392\nn00477639\nn00478262\nn00479076\nn00479440\nn00479616\nn00479887\nn00480211\nn00480366\nn00480508\nn00480993\nn00481803\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00825773\nn00887544\nn01055165\nn01314388\nn01314663\nn01314781\nn01315213\nn01316422\nn01317089\nn01317294\nn01317541\nn01317813\nn01317916\nn01318279\nn01318381\nn01318894\nn01319467\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01324431\nn01324610\nn01326291\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01379389\nn01381044\nn01384164\nn01392275\nn01392380\nn01395254\nn01396048\nn01397114\nn01397871\nn01402600\nn01405007\nn01407798\nn01410457\nn01415626\nn01421807\nn01424420\nn01438581\nn01439121\nn01439514\nn01440764\nn01441117\nn01442972\nn01443243\nn01443537\nn01443831\nn01444339\nn01446760\nn01447331\nn01447658\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01451426\nn01453087\nn01454545\nn01455778\nn01456756\nn01457852\nn01459791\nn01462042\nn01462544\nn01464844\nn01468238\nn01468712\nn01469103\nn01471682\nn01472303\nn01477525\nn01477875\nn01482071\nn01482330\nn01483830\nn01484097\nn01484850\nn01485479\nn01486838\nn01487506\nn01488038\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491361\nn01491874\nn01492569\nn01493146\nn01494475\nn01495006\nn01495493\nn01495701\nn01496331\nn01497118\nn01498041\nn01498989\nn01499396\nn01500091\nn01500476\nn01501160\nn01503061\nn01503976\nn01504179\nn01504344\nn01514668\nn01514752\nn01514859\nn01515303\nn01517565\nn01517966\nn01518878\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01524359\nn01526521\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01530575\nn01531178\nn01531344\nn01531512\nn01531811\nn01531971\nn01532325\nn01532511\nn01532829\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534433\nn01534582\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537544\nn01537895\nn01538059\nn01538200\nn01538630\nn01538955\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542786\nn01543175\nn01543632\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01547832\nn01548301\nn01548492\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01551080\nn01551300\nn01551711\nn01552034\nn01552813\nn01553142\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01558993\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560419\nn01560636\nn01560793\nn01560935\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576695\nn01577035\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580077\nn01580870\nn01581166\nn01581730\nn01581984\nn01582220\nn01582398\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587526\nn01587834\nn01588002\nn01588725\nn01589286\nn01589718\nn01589893\nn01591005\nn01591123\nn01591301\nn01591697\nn01592084\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598588\nn01598988\nn01599159\nn01599269\nn01599556\nn01600085\nn01600657\nn01601068\nn01601694\nn01602630\nn01602832\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606522\nn01606672\nn01606809\nn01607600\nn01607812\nn01607962\nn01608265\nn01608432\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01614925\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616318\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622779\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624537\nn01624833\nn01625562\nn01627424\nn01628770\nn01629276\nn01629819\nn01629962\nn01630284\nn01630670\nn01630901\nn01631354\nn01631663\nn01632458\nn01632601\nn01632777\nn01633406\nn01633781\nn01635027\nn01636352\nn01636829\nn01637615\nn01639765\nn01640846\nn01641206\nn01641391\nn01641577\nn01641739\nn01642257\nn01642539\nn01643507\nn01643896\nn01644373\nn01644900\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647303\nn01647640\nn01648139\nn01648620\nn01649170\nn01650167\nn01650690\nn01651059\nn01652026\nn01654637\nn01661091\nn01662622\nn01662784\nn01663401\nn01663782\nn01664065\nn01664369\nn01664492\nn01664674\nn01664990\nn01665541\nn01665932\nn01666228\nn01666585\nn01667114\nn01667432\nn01667778\nn01668091\nn01668436\nn01668665\nn01668892\nn01669191\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01672032\nn01673282\nn01674464\nn01674990\nn01675722\nn01677366\nn01677747\nn01678043\nn01678343\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01682714\nn01683558\nn01684133\nn01684578\nn01685808\nn01687665\nn01687978\nn01688243\nn01689081\nn01689811\nn01690149\nn01691217\nn01692333\nn01692523\nn01693175\nn01693334\nn01693783\nn01694178\nn01694709\nn01694955\nn01695060\nn01696633\nn01697178\nn01697457\nn01697611\nn01698434\nn01698640\nn01698782\nn01699040\nn01699675\nn01701859\nn01704323\nn01713764\nn01726692\nn01727646\nn01728572\nn01728920\nn01729322\nn01729977\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731545\nn01731941\nn01732244\nn01732614\nn01732789\nn01733466\nn01733757\nn01733957\nn01734104\nn01734418\nn01734637\nn01734808\nn01735189\nn01735439\nn01735577\nn01737021\nn01737472\nn01737728\nn01737875\nn01738065\nn01738601\nn01739381\nn01740131\nn01740551\nn01741232\nn01741562\nn01741943\nn01742172\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744401\nn01745125\nn01745484\nn01745902\nn01746359\nn01747589\nn01747885\nn01748264\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01749939\nn01750167\nn01750437\nn01751036\nn01751472\nn01751748\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753488\nn01753959\nn01754370\nn01754533\nn01754876\nn01755581\nn01755740\nn01756089\nn01756291\nn01756508\nn01756733\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01768244\nn01769347\nn01770081\nn01770393\nn01770795\nn01771417\nn01772222\nn01772664\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01775370\nn01776313\nn01777304\nn01778217\nn01779148\nn01779629\nn01782209\nn01782516\nn01784675\nn01785667\nn01786646\nn01787835\nn01789740\nn01790711\nn01791107\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792640\nn01792955\nn01793249\nn01793435\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795545\nn01795735\nn01796340\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01797886\nn01798168\nn01798484\nn01798706\nn01798839\nn01800424\nn01801876\nn01803078\nn01803362\nn01804163\nn01804478\nn01804653\nn01805801\nn01806143\nn01806297\nn01806364\nn01806467\nn01806567\nn01806847\nn01807105\nn01807496\nn01807828\nn01808140\nn01809106\nn01809371\nn01809752\nn01810268\nn01811909\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813948\nn01814217\nn01814370\nn01814755\nn01814921\nn01815601\nn01816887\nn01817263\nn01817346\nn01817953\nn01818299\nn01818515\nn01818832\nn01819115\nn01819313\nn01819465\nn01819734\nn01820052\nn01820348\nn01820546\nn01821076\nn01821203\nn01821869\nn01822300\nn01823013\nn01823414\nn01824035\nn01824575\nn01825278\nn01826364\nn01826680\nn01827403\nn01827793\nn01828096\nn01828556\nn01828970\nn01829413\nn01829869\nn01830042\nn01830915\nn01832167\nn01832493\nn01833805\nn01834177\nn01834540\nn01835276\nn01837072\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01840120\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01842235\nn01842504\nn01843065\nn01843383\nn01843719\nn01844231\nn01844551\nn01844917\nn01845132\nn01846331\nn01847000\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855032\nn01855188\nn01855476\nn01855672\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860187\nn01861778\nn01862399\nn01871265\nn01872401\nn01872772\nn01873310\nn01874434\nn01874928\nn01875313\nn01876034\nn01877134\nn01877606\nn01877812\nn01878929\nn01879217\nn01879509\nn01881171\nn01882714\nn01883070\nn01884834\nn01885498\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889520\nn01891633\nn01893825\nn01896844\nn01897536\nn01899894\nn01900150\nn01903346\nn01904029\nn01904806\nn01904886\nn01905661\nn01906749\nn01909906\nn01910747\nn01913166\nn01914609\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916925\nn01917289\nn01917611\nn01917882\nn01918744\nn01922303\nn01923025\nn01924916\nn01930112\nn01934440\nn01935395\nn01937909\nn01938454\nn01940736\nn01942869\nn01943087\nn01943899\nn01944118\nn01944390\nn01944812\nn01944955\nn01945143\nn01945685\nn01946630\nn01947396\nn01948573\nn01949085\nn01950731\nn01951274\nn01951613\nn01953361\nn01953594\nn01953762\nn01955084\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958531\nn01959492\nn01959985\nn01960177\nn01960459\nn01961985\nn01963317\nn01963571\nn01964049\nn01964271\nn01964441\nn01965529\nn01965889\nn01968897\nn01970164\nn01970667\nn01971280\nn01972541\nn01974773\nn01976146\nn01976868\nn01976957\nn01978287\nn01978455\nn01979874\nn01980166\nn01981276\nn01982068\nn01982347\nn01982650\nn01983481\nn01984245\nn01984695\nn01985128\nn01986214\nn01986806\nn01987545\nn01990007\nn01990800\nn01991028\nn01991520\nn01992773\nn01994910\nn01998183\nn01998741\nn01999186\nn02000954\nn02002075\nn02002556\nn02002724\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006656\nn02006985\nn02007284\nn02007558\nn02008041\nn02008497\nn02008643\nn02008796\nn02009229\nn02009380\nn02009508\nn02009750\nn02009912\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011460\nn02011805\nn02011943\nn02012185\nn02012849\nn02013177\nn02013567\nn02013706\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017213\nn02017475\nn02017725\nn02018027\nn02018207\nn02018368\nn02018795\nn02019190\nn02019929\nn02021050\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025239\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027492\nn02027897\nn02028035\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030287\nn02030837\nn02030996\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02033041\nn02033208\nn02033561\nn02033779\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02036053\nn02036711\nn02037110\nn02037464\nn02037869\nn02038466\nn02038993\nn02040266\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02049088\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051845\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056570\nn02056728\nn02057035\nn02057330\nn02057731\nn02058221\nn02058594\nn02059162\nn02060133\nn02060411\nn02060889\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064338\nn02064816\nn02065026\nn02065407\nn02066245\nn02066707\nn02067240\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02071294\nn02071636\nn02072798\nn02073831\nn02074367\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02077923\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02081571\nn02081798\nn02082791\nn02083346\nn02083672\nn02084071\nn02084732\nn02084861\nn02085272\nn02085374\nn02085620\nn02085936\nn02086079\nn02086240\nn02086646\nn02086753\nn02086910\nn02087046\nn02087122\nn02087394\nn02087551\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02088839\nn02089232\nn02089468\nn02089555\nn02090379\nn02090475\nn02090622\nn02090721\nn02090827\nn02091032\nn02091134\nn02091244\nn02091467\nn02091831\nn02092002\nn02092339\nn02092468\nn02093056\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02094562\nn02094721\nn02094931\nn02095050\nn02095314\nn02095412\nn02095570\nn02095727\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02096756\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02097786\nn02098105\nn02098286\nn02098413\nn02098550\nn02098806\nn02098906\nn02099029\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02099997\nn02100236\nn02100399\nn02100583\nn02100735\nn02100877\nn02101006\nn02101108\nn02101388\nn02101556\nn02101861\nn02102040\nn02102177\nn02102318\nn02102480\nn02102605\nn02102973\nn02103406\nn02103841\nn02104029\nn02104280\nn02104365\nn02104523\nn02104882\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02106854\nn02106966\nn02107142\nn02107312\nn02107420\nn02107574\nn02107683\nn02107908\nn02108089\nn02108254\nn02108422\nn02108551\nn02108672\nn02108915\nn02109047\nn02109525\nn02109811\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111626\nn02111889\nn02112018\nn02112137\nn02112350\nn02112497\nn02112826\nn02113023\nn02113186\nn02113335\nn02113624\nn02113712\nn02113799\nn02114100\nn02114367\nn02114548\nn02114712\nn02114855\nn02115096\nn02115335\nn02115641\nn02115913\nn02116738\nn02117135\nn02117512\nn02117900\nn02118333\nn02119022\nn02119477\nn02119634\nn02119789\nn02120079\nn02120505\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122878\nn02122948\nn02123045\nn02123159\nn02123242\nn02123394\nn02123478\nn02123597\nn02123917\nn02124075\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125311\nn02125494\nn02126028\nn02126139\nn02126640\nn02126787\nn02127052\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128385\nn02128669\nn02128757\nn02128925\nn02129165\nn02129463\nn02129604\nn02129837\nn02129923\nn02129991\nn02130308\nn02131653\nn02132136\nn02132466\nn02132580\nn02132788\nn02133161\nn02133704\nn02134084\nn02134418\nn02135220\nn02136103\nn02137015\nn02137549\nn02138441\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02146371\nn02146700\nn02147173\nn02147591\nn02147947\nn02150482\nn02152740\nn02152881\nn02153109\nn02156871\nn02157206\nn02159955\nn02160947\nn02161338\nn02161457\nn02162561\nn02163297\nn02164464\nn02165105\nn02165456\nn02165877\nn02166567\nn02166826\nn02167151\nn02167820\nn02168245\nn02168699\nn02169023\nn02169497\nn02169705\nn02169974\nn02172182\nn02172518\nn02172870\nn02173113\nn02173373\nn02174001\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02177972\nn02180875\nn02181724\nn02183096\nn02184473\nn02188699\nn02190166\nn02190790\nn02191773\nn02191979\nn02192252\nn02192513\nn02195526\nn02195819\nn02196119\nn02196344\nn02197689\nn02198859\nn02200198\nn02200509\nn02200850\nn02201000\nn02202006\nn02203152\nn02204907\nn02205219\nn02205673\nn02206856\nn02207179\nn02207345\nn02207805\nn02208280\nn02208498\nn02208848\nn02209354\nn02209624\nn02210427\nn02211444\nn02211627\nn02212062\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214341\nn02214773\nn02215770\nn02216211\nn02216365\nn02218371\nn02219486\nn02220518\nn02220804\nn02221083\nn02221414\nn02222035\nn02226429\nn02226821\nn02226970\nn02227247\nn02228341\nn02229156\nn02229544\nn02229765\nn02231052\nn02231487\nn02233338\nn02233943\nn02234355\nn02234848\nn02236044\nn02236241\nn02236355\nn02236896\nn02239774\nn02240068\nn02240517\nn02241426\nn02242137\nn02243562\nn02244797\nn02246628\nn02250822\nn02251775\nn02252226\nn02254697\nn02256656\nn02257284\nn02257985\nn02258198\nn02259212\nn02262449\nn02262803\nn02264232\nn02264363\nn02264885\nn02266050\nn02266864\nn02268148\nn02268443\nn02268853\nn02270623\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276258\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277742\nn02278024\nn02278210\nn02278839\nn02278980\nn02279257\nn02279637\nn02279972\nn02280649\nn02281015\nn02281136\nn02281406\nn02281787\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283951\nn02284611\nn02284884\nn02285801\nn02286089\nn02287004\nn02288268\nn02288789\nn02291748\nn02292692\nn02295064\nn02295390\nn02297442\nn02298218\nn02298541\nn02299157\nn02299505\nn02299846\nn02300797\nn02301935\nn02302244\nn02302620\nn02302969\nn02303284\nn02304036\nn02304432\nn02305085\nn02305929\nn02307325\nn02307681\nn02308139\nn02308471\nn02308735\nn02309242\nn02309337\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02312006\nn02312427\nn02312640\nn02313008\nn02316707\nn02317335\nn02317781\nn02318167\nn02319095\nn02319308\nn02319555\nn02321170\nn02321529\nn02323449\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325366\nn02325722\nn02326432\nn02326862\nn02327028\nn02327656\nn02327842\nn02328150\nn02328429\nn02329401\nn02330245\nn02331046\nn02332156\nn02332755\nn02333546\nn02333909\nn02334201\nn02337001\nn02338145\nn02339376\nn02341475\nn02341974\nn02342885\nn02343320\nn02343772\nn02346627\nn02348173\nn02350105\nn02352591\nn02353861\nn02355227\nn02355477\nn02356381\nn02356612\nn02356798\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358890\nn02359047\nn02359324\nn02359556\nn02359915\nn02360282\nn02361337\nn02361587\nn02361706\nn02363005\nn02363351\nn02364520\nn02364673\nn02364840\nn02365480\nn02366959\nn02367492\nn02370806\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378541\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02385214\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387722\nn02387887\nn02388143\nn02388276\nn02388735\nn02388832\nn02388917\nn02389026\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02390015\nn02390101\nn02390640\nn02391049\nn02391234\nn02391373\nn02391508\nn02391994\nn02392434\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395406\nn02395694\nn02396014\nn02396088\nn02396427\nn02397096\nn02397529\nn02397744\nn02398521\nn02399000\nn02402010\nn02402175\nn02402425\nn02403003\nn02403231\nn02403325\nn02403454\nn02403740\nn02403920\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405799\nn02405929\nn02406174\nn02406533\nn02406647\nn02406749\nn02407071\nn02407276\nn02407390\nn02407625\nn02407959\nn02408429\nn02408817\nn02409508\nn02410011\nn02410509\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412080\nn02412210\nn02412440\nn02412629\nn02413050\nn02413131\nn02413593\nn02414209\nn02414290\nn02414578\nn02414763\nn02415253\nn02415435\nn02415577\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417387\nn02417534\nn02417663\nn02417914\nn02418465\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422106\nn02422391\nn02422699\nn02423022\nn02423218\nn02423589\nn02424085\nn02424305\nn02424486\nn02424909\nn02425228\nn02425887\nn02426481\nn02426813\nn02427032\nn02427470\nn02427576\nn02427724\nn02428349\nn02428508\nn02429456\nn02430045\nn02430559\nn02430830\nn02431122\nn02431337\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433925\nn02434190\nn02434954\nn02437136\nn02437312\nn02437482\nn02437616\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441942\nn02442845\nn02443015\nn02443114\nn02443346\nn02443484\nn02444819\nn02445004\nn02445171\nn02445394\nn02445715\nn02446206\nn02447366\nn02447762\nn02448060\nn02449350\nn02450295\nn02453108\nn02454379\nn02454794\nn02456962\nn02457408\nn02457945\nn02458135\nn02460009\nn02460451\nn02461128\nn02461830\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02472293\nn02472987\nn02473307\nn02474777\nn02475078\nn02475669\nn02480153\nn02480495\nn02480855\nn02481103\nn02481235\nn02481366\nn02481500\nn02481823\nn02482286\nn02482474\nn02482650\nn02483362\nn02483708\nn02484322\nn02484975\nn02485536\nn02486261\nn02486410\nn02486657\nn02486908\nn02487347\nn02487547\nn02487847\nn02488291\nn02488415\nn02488702\nn02489166\nn02490219\nn02490811\nn02491107\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02496913\nn02497673\nn02499022\nn02499316\nn02499808\nn02500267\nn02501583\nn02503517\nn02504013\nn02504458\nn02508021\nn02508213\nn02508742\nn02509197\nn02509515\nn02509815\nn02510455\nn02512053\nn02512830\nn02512938\nn02514041\nn02516188\nn02517442\nn02518324\nn02519148\nn02519686\nn02519862\nn02520147\nn02522399\nn02523427\nn02524202\nn02525382\nn02526121\nn02527057\nn02527271\nn02527622\nn02530421\nn02530999\nn02532028\nn02532602\nn02533209\nn02533834\nn02534734\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02536864\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02541687\nn02542432\nn02543565\nn02548247\nn02549248\nn02549989\nn02555863\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02562315\nn02562796\nn02563182\nn02563648\nn02563792\nn02564270\nn02564720\nn02565072\nn02565324\nn02565573\nn02568087\nn02568959\nn02569484\nn02570164\nn02570838\nn02572196\nn02572484\nn02573704\nn02574271\nn02576575\nn02576906\nn02577403\nn02578771\nn02578928\nn02579303\nn02579928\nn02580336\nn02580679\nn02580830\nn02581957\nn02583890\nn02584145\nn02584449\nn02585872\nn02586543\nn02588286\nn02589623\nn02590094\nn02590702\nn02592055\nn02593019\nn02595702\nn02596067\nn02596381\nn02597608\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02601344\nn02603317\nn02603540\nn02605316\nn02605703\nn02605936\nn02606052\nn02606384\nn02607072\nn02607201\nn02607470\nn02607862\nn02610066\nn02610664\nn02611561\nn02613181\nn02616851\nn02618827\nn02619165\nn02619550\nn02620167\nn02624167\nn02624807\nn02625258\nn02625612\nn02625851\nn02626265\nn02626762\nn02627292\nn02627532\nn02628062\nn02629230\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02639087\nn02639605\nn02640242\nn02640626\nn02640857\nn02641379\nn02643112\nn02643566\nn02643836\nn02644113\nn02649546\nn02650050\nn02652132\nn02653145\nn02653497\nn02654112\nn02654425\nn02654745\nn02655020\nn02655848\nn02656032\nn02656670\nn02657368\nn02657694\nn02658531\nn02660208\nn02660640\nn02663211\nn02666196\nn02666501\nn02666624\nn02666943\nn02667093\nn02667244\nn02667379\nn02667478\nn02667576\nn02669295\nn02669534\nn02669723\nn02670186\nn02670382\nn02670683\nn02672371\nn02672831\nn02675219\nn02676566\nn02676938\nn02678897\nn02679257\nn02680110\nn02680512\nn02680754\nn02681392\nn02682569\nn02682922\nn02683323\nn02683454\nn02683558\nn02683791\nn02685082\nn02686121\nn02686227\nn02686379\nn02686568\nn02687172\nn02687423\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02690373\nn02691156\nn02692086\nn02692232\nn02692877\nn02693246\nn02694045\nn02694426\nn02694662\nn02695627\nn02696165\nn02697221\nn02697675\nn02698634\nn02699494\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701002\nn02702989\nn02703275\nn02704645\nn02704792\nn02704949\nn02705201\nn02705429\nn02705944\nn02708093\nn02708433\nn02708555\nn02708711\nn02709101\nn02709367\nn02709637\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02713003\nn02713364\nn02714751\nn02715229\nn02715513\nn02715712\nn02720048\nn02723165\nn02725872\nn02726017\nn02726305\nn02726681\nn02727016\nn02727141\nn02727426\nn02728440\nn02729837\nn02729965\nn02730930\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02735361\nn02735538\nn02735688\nn02736798\nn02737660\nn02738031\nn02738535\nn02738741\nn02738859\nn02739427\nn02739550\nn02739668\nn02739889\nn02740300\nn02740533\nn02740764\nn02741475\nn02742322\nn02742468\nn02742753\nn02744323\nn02744844\nn02745611\nn02746365\nn02747177\nn02747672\nn02747802\nn02749479\nn02749953\nn02750070\nn02750169\nn02751215\nn02751295\nn02752496\nn02752615\nn02752810\nn02753044\nn02753394\nn02754103\nn02754656\nn02755140\nn02755529\nn02755823\nn02756098\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02758134\nn02758863\nn02758960\nn02759257\nn02759387\nn02759963\nn02760099\nn02760199\nn02760429\nn02760658\nn02760855\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762371\nn02762508\nn02763306\nn02763604\nn02763901\nn02764044\nn02764398\nn02764505\nn02764779\nn02764935\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769748\nn02769963\nn02770211\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771750\nn02772101\nn02772435\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775897\nn02776205\nn02776631\nn02776825\nn02776978\nn02777100\nn02777292\nn02777734\nn02778294\nn02778456\nn02778669\nn02779435\nn02780704\nn02780815\nn02781121\nn02781338\nn02782093\nn02782602\nn02782681\nn02782778\nn02783161\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02785648\nn02786058\nn02786198\nn02786331\nn02786736\nn02786837\nn02787435\nn02787622\nn02788021\nn02788148\nn02788572\nn02789487\nn02790669\nn02790823\nn02790996\nn02791124\nn02791270\nn02792409\nn02792552\nn02793089\nn02793199\nn02793495\nn02793842\nn02794156\nn02794664\nn02795169\nn02795528\nn02795670\nn02796207\nn02796318\nn02796995\nn02797295\nn02797535\nn02797692\nn02799071\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802426\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803934\nn02804123\nn02804252\nn02804414\nn02804515\nn02804610\nn02805983\nn02806088\nn02806379\nn02806530\nn02807133\nn02807523\nn02807616\nn02807731\nn02808185\nn02808304\nn02808440\nn02809105\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02814428\nn02814533\nn02814774\nn02814860\nn02815749\nn02815834\nn02815950\nn02816656\nn02816768\nn02817031\nn02817516\nn02818135\nn02818832\nn02820210\nn02820556\nn02820675\nn02821202\nn02821627\nn02821943\nn02822064\nn02822220\nn02822579\nn02823124\nn02823335\nn02823428\nn02823510\nn02823586\nn02823750\nn02823848\nn02823964\nn02824058\nn02824319\nn02824448\nn02825153\nn02825442\nn02825657\nn02825961\nn02826068\nn02826589\nn02826886\nn02827606\nn02828299\nn02828427\nn02828884\nn02829596\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02833793\nn02834397\nn02834778\nn02835271\nn02835412\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836392\nn02837789\nn02837887\nn02838345\nn02838728\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840245\nn02840619\nn02841187\nn02841315\nn02841506\nn02842573\nn02843029\nn02843158\nn02843276\nn02843553\nn02843684\nn02844307\nn02846141\nn02846511\nn02846733\nn02847631\nn02847852\nn02848216\nn02848523\nn02849154\nn02849885\nn02850732\nn02850950\nn02851099\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02854532\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855925\nn02856237\nn02857477\nn02857644\nn02858304\nn02859184\nn02859343\nn02859443\nn02859955\nn02860415\nn02860640\nn02860847\nn02861022\nn02861147\nn02861387\nn02861886\nn02862048\nn02862916\nn02863014\nn02863426\nn02863536\nn02863750\nn02864504\nn02864593\nn02865351\nn02865665\nn02865931\nn02866386\nn02867715\nn02867966\nn02868638\nn02868975\nn02869155\nn02869249\nn02869737\nn02869837\nn02870526\nn02870676\nn02870880\nn02871005\nn02871147\nn02871314\nn02871439\nn02871525\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873733\nn02873839\nn02874086\nn02874442\nn02874537\nn02876084\nn02876326\nn02876657\nn02877266\nn02877765\nn02877962\nn02878222\nn02878425\nn02879087\nn02879309\nn02879718\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881757\nn02881906\nn02882190\nn02882301\nn02882647\nn02882894\nn02883004\nn02883205\nn02883344\nn02884994\nn02885108\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02887079\nn02887209\nn02887489\nn02887970\nn02888270\nn02889425\nn02889646\nn02890188\nn02890351\nn02890513\nn02890662\nn02890940\nn02891188\nn02891788\nn02892201\nn02892304\nn02892499\nn02892767\nn02892948\nn02893608\nn02893692\nn02893941\nn02894158\nn02894337\nn02894605\nn02895154\nn02895438\nn02896442\nn02897097\nn02897820\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900705\nn02901114\nn02901259\nn02901377\nn02901793\nn02902079\nn02902687\nn02902916\nn02903126\nn02903204\nn02903852\nn02904233\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02906734\nn02907082\nn02907391\nn02907656\nn02907873\nn02908217\nn02908773\nn02909285\nn02909870\nn02910145\nn02910353\nn02910542\nn02910864\nn02911332\nn02912065\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916179\nn02916350\nn02916936\nn02917067\nn02917377\nn02917521\nn02917607\nn02917964\nn02918112\nn02918330\nn02918595\nn02918831\nn02918964\nn02919148\nn02919414\nn02919792\nn02919890\nn02920083\nn02920259\nn02920369\nn02920658\nn02921029\nn02921195\nn02921756\nn02921884\nn02922292\nn02922578\nn02922798\nn02923682\nn02924116\nn02925009\nn02925107\nn02925519\nn02925666\nn02926426\nn02926591\nn02927161\nn02927764\nn02927887\nn02928049\nn02928299\nn02928608\nn02929289\nn02929582\nn02930080\nn02930214\nn02930645\nn02930766\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02934168\nn02934451\nn02935017\nn02935387\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02937958\nn02938886\nn02939185\nn02939866\nn02940385\nn02940570\nn02942349\nn02942460\nn02942699\nn02943241\nn02943871\nn02943964\nn02944075\nn02944146\nn02944459\nn02944579\nn02946127\nn02946270\nn02946348\nn02946509\nn02946824\nn02946921\nn02947660\nn02947818\nn02948072\nn02948557\nn02949202\nn02949542\nn02950256\nn02950632\nn02950826\nn02950943\nn02951358\nn02951585\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02953197\nn02953455\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957755\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02962061\nn02962200\nn02962843\nn02963159\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964843\nn02965216\nn02965300\nn02965783\nn02966193\nn02966545\nn02966687\nn02967294\nn02967626\nn02967782\nn02968074\nn02968333\nn02968473\nn02969010\nn02969323\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971356\nn02971579\nn02971691\nn02972397\nn02973017\nn02973236\nn02973805\nn02973904\nn02974003\nn02974348\nn02974697\nn02975212\nn02976123\nn02976249\nn02976350\nn02976455\nn02976939\nn02977058\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978478\nn02978753\nn02978881\nn02979074\nn02979186\nn02979290\nn02979399\nn02979836\nn02980036\nn02980441\nn02981024\nn02981321\nn02981792\nn02981911\nn02982232\nn02982416\nn02982515\nn02983189\nn02983357\nn02984061\nn02984203\nn02984469\nn02985963\nn02986160\nn02987379\nn02987492\nn02988066\nn02988156\nn02988304\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02991302\nn02991847\nn02992032\nn02992211\nn02992368\nn02992529\nn02992795\nn02993194\nn02993368\nn02994573\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998563\nn02998841\nn02999138\nn02999410\nn02999936\nn03000134\nn03000247\nn03000684\nn03001115\nn03001627\nn03002096\nn03002341\nn03002711\nn03002816\nn03002948\nn03003091\nn03004275\nn03004824\nn03005033\nn03005285\nn03006626\nn03007130\nn03007444\nn03007591\nn03008177\nn03008976\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012897\nn03013438\nn03013580\nn03013850\nn03014440\nn03014705\nn03015149\nn03015254\nn03015478\nn03015851\nn03016389\nn03016609\nn03016737\nn03016868\nn03016953\nn03017070\nn03017168\nn03018209\nn03018349\nn03018712\nn03019434\nn03019685\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03025250\nn03026506\nn03026907\nn03027108\nn03027250\nn03027625\nn03028079\nn03028596\nn03028785\nn03029197\nn03029445\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03032252\nn03032453\nn03032811\nn03033362\nn03033986\nn03034244\nn03034405\nn03034663\nn03035252\nn03035832\nn03036022\nn03037404\nn03037709\nn03038281\nn03038685\nn03038870\nn03039015\nn03039259\nn03039493\nn03039827\nn03039947\nn03040376\nn03041114\nn03041449\nn03041632\nn03041810\nn03042139\nn03042490\nn03042697\nn03043423\nn03043693\nn03043958\nn03044934\nn03045228\nn03045337\nn03045698\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047690\nn03047799\nn03047941\nn03048883\nn03049782\nn03049924\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03054901\nn03055418\nn03055857\nn03057021\nn03057541\nn03057636\nn03057920\nn03058107\nn03058603\nn03059685\nn03061211\nn03061345\nn03061505\nn03061674\nn03062015\nn03062122\nn03062245\nn03062336\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063599\nn03063689\nn03063968\nn03064250\nn03064350\nn03064758\nn03064935\nn03065243\nn03065424\nn03066359\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03071021\nn03071160\nn03072201\nn03072440\nn03073296\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075370\nn03075634\nn03075768\nn03075946\nn03077616\nn03077741\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03080497\nn03080633\nn03082280\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085013\nn03085219\nn03085602\nn03085915\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087816\nn03088389\nn03088580\nn03089624\nn03089753\nn03089879\nn03090000\nn03090172\nn03091044\nn03091374\nn03092166\nn03092314\nn03092656\nn03092883\nn03094159\nn03094503\nn03095699\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098688\nn03098959\nn03099147\nn03099274\nn03099454\nn03099945\nn03100240\nn03100346\nn03100490\nn03100897\nn03101156\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102654\nn03103396\nn03103563\nn03105088\nn03105306\nn03105467\nn03106898\nn03107046\nn03107488\nn03108455\nn03108853\nn03109150\nn03109253\nn03109693\nn03109881\nn03110669\nn03111041\nn03111177\nn03111296\nn03112719\nn03112869\nn03113152\nn03113657\nn03113835\nn03114236\nn03114379\nn03114504\nn03115180\nn03115400\nn03115762\nn03115897\nn03116530\nn03116767\nn03118969\nn03119203\nn03119396\nn03119510\nn03120491\nn03120778\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03123553\nn03123809\nn03123917\nn03124043\nn03124170\nn03124474\nn03124590\nn03125057\nn03125729\nn03125870\nn03126385\nn03126580\nn03126707\nn03127203\nn03127408\nn03127747\nn03127925\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129753\nn03130761\nn03131574\nn03131669\nn03131967\nn03132076\nn03132261\nn03132666\nn03132776\nn03133050\nn03133415\nn03133878\nn03134739\nn03134853\nn03135030\nn03135532\nn03136369\nn03137473\nn03138344\nn03138669\nn03139464\nn03140126\nn03140292\nn03140431\nn03140652\nn03141065\nn03141327\nn03141455\nn03141702\nn03141823\nn03142679\nn03145147\nn03145522\nn03145719\nn03146219\nn03146687\nn03146846\nn03147280\nn03147509\nn03148324\nn03148727\nn03149686\nn03150232\nn03150511\nn03151077\nn03152303\nn03154073\nn03154895\nn03156279\nn03156767\nn03157348\nn03158186\nn03158885\nn03159535\nn03159640\nn03160309\nn03160740\nn03161450\nn03163222\nn03163381\nn03164344\nn03164605\nn03164722\nn03165096\nn03165466\nn03165616\nn03166514\nn03167978\nn03168107\nn03168217\nn03169176\nn03170635\nn03171228\nn03171356\nn03171635\nn03172038\nn03173270\nn03173387\nn03173929\nn03174450\nn03174731\nn03175081\nn03175189\nn03175457\nn03176386\nn03176594\nn03176763\nn03177165\nn03178000\nn03178430\nn03178674\nn03179701\nn03179910\nn03180011\nn03180384\nn03180504\nn03180865\nn03180969\nn03181293\nn03183080\nn03186285\nn03186818\nn03187037\nn03187268\nn03187595\nn03188531\nn03188725\nn03189083\nn03191286\nn03192543\nn03193107\nn03193260\nn03193423\nn03193597\nn03195332\nn03195959\nn03196062\nn03196217\nn03196598\nn03196990\nn03197337\nn03198500\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201208\nn03201529\nn03201638\nn03201776\nn03202354\nn03202940\nn03204306\nn03204558\nn03205458\nn03205574\nn03205669\nn03206282\nn03206718\nn03206908\nn03207305\nn03207630\nn03207743\nn03207835\nn03207941\nn03208556\nn03208938\nn03209359\nn03209910\nn03210245\nn03210372\nn03210552\nn03211117\nn03211789\nn03212114\nn03212811\nn03213538\nn03213826\nn03214253\nn03214582\nn03215508\nn03216402\nn03216710\nn03216828\nn03218198\nn03219010\nn03219135\nn03219483\nn03219966\nn03220237\nn03220513\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03223162\nn03223299\nn03223553\nn03223686\nn03224603\nn03224753\nn03225108\nn03225777\nn03225988\nn03226254\nn03226375\nn03226538\nn03226880\nn03227317\nn03228254\nn03228365\nn03228692\nn03228967\nn03229244\nn03231368\nn03231819\nn03232309\nn03232543\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03236217\nn03236423\nn03236735\nn03237340\nn03237416\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03239054\nn03239259\nn03239726\nn03240140\nn03240683\nn03240892\nn03241093\nn03241335\nn03241496\nn03242506\nn03243218\nn03244047\nn03244231\nn03244775\nn03244919\nn03245724\nn03245889\nn03246454\nn03246933\nn03247083\nn03249342\nn03249569\nn03250089\nn03250279\nn03250405\nn03250847\nn03251533\nn03251766\nn03251932\nn03252637\nn03253279\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254862\nn03255030\nn03255899\nn03256032\nn03256166\nn03256788\nn03256928\nn03257210\nn03257586\nn03258330\nn03258577\nn03258905\nn03259009\nn03259280\nn03259401\nn03259505\nn03260849\nn03261019\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03266371\nn03266749\nn03267113\nn03267468\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269203\nn03269401\nn03271030\nn03271574\nn03272010\nn03272125\nn03272239\nn03272383\nn03272562\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03275681\nn03277459\nn03277771\nn03278248\nn03278914\nn03279508\nn03281145\nn03281673\nn03282295\nn03282401\nn03283221\nn03284743\nn03284886\nn03285578\nn03287351\nn03287733\nn03288500\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03290653\nn03291413\nn03291741\nn03291819\nn03291963\nn03292475\nn03292603\nn03293741\nn03293863\nn03294048\nn03294833\nn03295012\nn03295246\nn03296081\nn03296328\nn03297103\nn03297226\nn03297495\nn03297644\nn03297735\nn03298089\nn03298716\nn03298858\nn03300216\nn03300443\nn03301568\nn03301833\nn03301940\nn03302671\nn03302938\nn03303217\nn03303831\nn03306385\nn03307037\nn03307792\nn03308152\nn03308481\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314608\nn03314780\nn03314884\nn03315644\nn03316105\nn03316406\nn03317788\nn03318294\nn03318865\nn03318983\nn03319457\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320959\nn03321103\nn03321563\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03324928\nn03325088\nn03325584\nn03325941\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03329302\nn03329536\nn03329663\nn03331077\nn03331599\nn03332005\nn03332271\nn03332393\nn03332989\nn03333129\nn03333252\nn03333610\nn03333711\nn03334291\nn03334382\nn03334912\nn03335030\nn03336282\nn03336575\nn03337140\nn03337383\nn03338821\nn03339529\nn03339643\nn03340723\nn03341153\nn03341297\nn03342015\nn03342127\nn03342262\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344393\nn03344642\nn03345487\nn03345837\nn03346135\nn03346455\nn03347037\nn03347617\nn03348868\nn03349469\nn03349771\nn03349892\nn03350204\nn03350602\nn03351434\nn03351979\nn03352628\nn03353951\nn03354207\nn03354903\nn03355768\nn03355925\nn03356858\nn03356982\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03359137\nn03359285\nn03359436\nn03359566\nn03360300\nn03360431\nn03360622\nn03361297\nn03361380\nn03361550\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364599\nn03365231\nn03365374\nn03365592\nn03365991\nn03366823\nn03366974\nn03367059\nn03367410\nn03367545\nn03368352\nn03369276\nn03370387\nn03371875\nn03372029\nn03372549\nn03373237\nn03373611\nn03373943\nn03374372\nn03374473\nn03374649\nn03374838\nn03375329\nn03375575\nn03376159\nn03376279\nn03376595\nn03376938\nn03378005\nn03378174\nn03379051\nn03379204\nn03379343\nn03379828\nn03380724\nn03380867\nn03381126\nn03382292\nn03382413\nn03382856\nn03383099\nn03384352\nn03384891\nn03385557\nn03386011\nn03386544\nn03386726\nn03386870\nn03387653\nn03388043\nn03388183\nn03388323\nn03388549\nn03389611\nn03389761\nn03389889\nn03390075\nn03390786\nn03390983\nn03391301\nn03392741\nn03393017\nn03393761\nn03393912\nn03394272\nn03394480\nn03394649\nn03394916\nn03395514\nn03395859\nn03396074\nn03396654\nn03397087\nn03397266\nn03397532\nn03397947\nn03398153\nn03398228\nn03399677\nn03399761\nn03399971\nn03400231\nn03401129\nn03401279\nn03402188\nn03402941\nn03403643\nn03404149\nn03404251\nn03404360\nn03405265\nn03405595\nn03405725\nn03406966\nn03407369\nn03407865\nn03408054\nn03408444\nn03409297\nn03409393\nn03409591\nn03410571\nn03410740\nn03410938\nn03411079\nn03412058\nn03413684\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415749\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417042\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418915\nn03419014\nn03420345\nn03420801\nn03421324\nn03421485\nn03421669\nn03422072\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424325\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425413\nn03425595\nn03425769\nn03426134\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03431243\nn03431745\nn03432061\nn03432129\nn03433877\nn03434188\nn03434285\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436891\nn03437430\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438863\nn03439348\nn03439814\nn03440216\nn03440682\nn03441112\nn03441345\nn03442597\nn03442756\nn03443005\nn03443149\nn03443371\nn03443912\nn03444034\nn03445326\nn03445617\nn03445777\nn03445924\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447447\nn03447721\nn03448590\nn03448956\nn03449309\nn03449451\nn03450230\nn03450516\nn03450734\nn03450974\nn03451120\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03452741\nn03453231\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455488\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457686\nn03457902\nn03458271\nn03459328\nn03459775\nn03460040\nn03460147\nn03460297\nn03461288\nn03461385\nn03462110\nn03463381\nn03463666\nn03464053\nn03465426\nn03465500\nn03465718\nn03466493\nn03466600\nn03466839\nn03467068\nn03467517\nn03467796\nn03467984\nn03468696\nn03468821\nn03469175\nn03469493\nn03469903\nn03470629\nn03471190\nn03472232\nn03473227\nn03474779\nn03474896\nn03475581\nn03475823\nn03476083\nn03476313\nn03476684\nn03476991\nn03477512\nn03478589\nn03478756\nn03478907\nn03479121\nn03479397\nn03480579\nn03480719\nn03481172\nn03482252\nn03482405\nn03482523\nn03482877\nn03483230\nn03483316\nn03483823\nn03484083\nn03484487\nn03484576\nn03484931\nn03485198\nn03485407\nn03485794\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488188\nn03488438\nn03489162\nn03490006\nn03490119\nn03490884\nn03491032\nn03492250\nn03492542\nn03492922\nn03494278\nn03494537\nn03494706\nn03495039\nn03495258\nn03495570\nn03496296\nn03496612\nn03496892\nn03497352\nn03497657\nn03498441\nn03498662\nn03498781\nn03498962\nn03499354\nn03499468\nn03499907\nn03500209\nn03500389\nn03500699\nn03501614\nn03502200\nn03502331\nn03502509\nn03503477\nn03503997\nn03504205\nn03504723\nn03505133\nn03505383\nn03505504\nn03505667\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507963\nn03508101\nn03509394\nn03509608\nn03510244\nn03511175\nn03511333\nn03512147\nn03513137\nn03513376\nn03514451\nn03514693\nn03514894\nn03516367\nn03516844\nn03516996\nn03517647\nn03517760\nn03517899\nn03518135\nn03518305\nn03518445\nn03518943\nn03519081\nn03519387\nn03520493\nn03521076\nn03521544\nn03521675\nn03521899\nn03522003\nn03522100\nn03523987\nn03524150\nn03524574\nn03525074\nn03525454\nn03527149\nn03527444\nn03527565\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03529860\nn03530511\nn03530642\nn03530910\nn03531281\nn03532342\nn03532672\nn03532919\nn03533014\nn03534580\nn03534776\nn03535024\nn03535780\nn03536122\nn03537241\nn03537412\nn03538037\nn03538179\nn03538406\nn03538634\nn03539433\nn03539546\nn03539678\nn03540090\nn03540267\nn03540595\nn03540914\nn03541091\nn03541269\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543603\nn03543735\nn03543945\nn03544143\nn03544238\nn03544360\nn03545150\nn03545470\nn03545756\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03548086\nn03548402\nn03548626\nn03549199\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03551395\nn03552749\nn03553019\nn03553248\nn03554460\nn03555006\nn03555426\nn03555564\nn03555662\nn03556679\nn03557270\nn03557360\nn03557590\nn03557692\nn03558176\nn03558404\nn03558633\nn03558739\nn03559999\nn03560430\nn03561047\nn03563200\nn03563460\nn03565288\nn03565830\nn03566193\nn03566730\nn03567066\nn03568117\nn03571280\nn03571625\nn03571942\nn03572107\nn03572321\nn03574243\nn03574555\nn03574816\nn03577090\nn03577672\nn03578055\nn03578251\nn03578656\nn03579538\nn03580518\nn03580845\nn03581125\nn03582959\nn03584254\nn03584400\nn03584829\nn03585073\nn03585438\nn03585682\nn03586219\nn03586631\nn03587205\nn03588951\nn03589513\nn03589791\nn03590306\nn03590588\nn03590841\nn03590932\nn03592245\nn03592669\nn03592773\nn03593122\nn03593526\nn03594148\nn03594523\nn03594734\nn03594945\nn03595264\nn03595409\nn03595523\nn03595614\nn03595860\nn03596285\nn03596543\nn03597916\nn03598151\nn03598299\nn03598515\nn03598930\nn03599486\nn03600285\nn03600475\nn03600722\nn03601638\nn03601840\nn03602081\nn03602883\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604843\nn03605598\nn03605722\nn03606251\nn03607029\nn03607659\nn03607923\nn03609235\nn03609397\nn03610098\nn03610418\nn03610524\nn03610682\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614532\nn03614782\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617480\nn03618101\nn03618982\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620967\nn03621049\nn03621377\nn03622058\nn03622839\nn03622931\nn03623198\nn03623338\nn03623556\nn03624134\nn03624400\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626760\nn03627232\nn03628215\nn03628511\nn03629100\nn03629231\nn03629520\nn03630262\nn03630383\nn03631177\nn03631922\nn03632577\nn03632729\nn03632852\nn03633091\nn03633886\nn03635032\nn03635108\nn03635330\nn03635668\nn03636248\nn03636649\nn03637181\nn03637318\nn03637898\nn03638883\nn03639077\nn03639497\nn03640850\nn03640988\nn03641569\nn03642444\nn03642806\nn03643149\nn03643253\nn03643491\nn03643737\nn03644378\nn03644858\nn03645011\nn03645577\nn03646020\nn03646148\nn03646296\nn03646916\nn03647520\nn03648431\nn03649161\nn03649674\nn03649797\nn03649909\nn03650551\nn03651388\nn03651843\nn03652100\nn03652729\nn03652932\nn03653110\nn03653220\nn03653583\nn03653740\nn03653833\nn03654576\nn03655072\nn03655720\nn03656484\nn03656957\nn03657121\nn03657511\nn03658185\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660909\nn03661043\nn03661340\nn03662601\nn03662719\nn03662887\nn03663531\nn03664943\nn03665366\nn03665924\nn03666362\nn03666591\nn03666917\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669886\nn03670208\nn03671914\nn03672827\nn03673027\nn03673450\nn03674440\nn03674731\nn03675235\nn03676087\nn03676483\nn03676623\nn03676759\nn03677115\nn03678558\nn03678729\nn03679384\nn03679712\nn03680355\nn03680512\nn03680734\nn03680858\nn03680942\nn03682487\nn03682877\nn03683079\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684611\nn03684823\nn03685820\nn03686130\nn03686924\nn03687137\nn03687928\nn03688192\nn03688405\nn03688605\nn03688943\nn03689157\nn03690473\nn03690938\nn03691459\nn03691817\nn03692379\nn03692522\nn03693293\nn03693474\nn03693707\nn03693860\nn03694639\nn03695857\nn03696065\nn03696301\nn03696568\nn03697007\nn03697552\nn03698360\nn03698604\nn03698723\nn03698815\nn03699591\nn03699975\nn03700963\nn03701391\nn03703730\nn03703862\nn03703945\nn03704549\nn03706229\nn03706653\nn03708036\nn03708843\nn03709206\nn03709363\nn03709823\nn03710193\nn03710637\nn03710721\nn03711044\nn03711999\nn03712111\nn03712337\nn03713436\nn03714235\nn03715114\nn03715386\nn03715669\nn03715892\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03717622\nn03718212\nn03718335\nn03718458\nn03718581\nn03718789\nn03718935\nn03719053\nn03719343\nn03719743\nn03720163\nn03720891\nn03721047\nn03721252\nn03721384\nn03721590\nn03722007\nn03722288\nn03723267\nn03723781\nn03724066\nn03724417\nn03724538\nn03724623\nn03724756\nn03724870\nn03725035\nn03725600\nn03725717\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03729308\nn03729826\nn03730153\nn03730334\nn03730494\nn03730893\nn03731019\nn03731483\nn03731695\nn03732020\nn03732114\nn03732458\nn03733131\nn03733281\nn03733644\nn03733805\nn03733925\nn03735637\nn03735963\nn03736064\nn03736470\nn03736970\nn03738066\nn03738472\nn03739518\nn03742019\nn03742115\nn03743016\nn03743279\nn03743902\nn03744276\nn03744840\nn03745146\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749807\nn03751269\nn03751458\nn03751757\nn03752185\nn03753077\nn03757604\nn03758089\nn03759243\nn03759661\nn03759954\nn03760310\nn03760671\nn03760944\nn03761084\nn03762332\nn03762434\nn03762602\nn03763968\nn03764276\nn03764736\nn03764822\nn03765561\nn03766044\nn03766322\nn03766508\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768916\nn03769610\nn03769881\nn03770085\nn03770316\nn03770439\nn03770679\nn03770954\nn03772077\nn03772269\nn03772584\nn03773035\nn03773504\nn03774327\nn03774461\nn03775071\nn03775199\nn03775388\nn03775546\nn03775636\nn03775747\nn03775847\nn03776460\nn03777568\nn03777754\nn03778817\nn03779128\nn03781244\nn03781683\nn03781787\nn03782006\nn03782190\nn03782794\nn03783430\nn03784270\nn03784896\nn03785016\nn03785237\nn03785721\nn03786194\nn03786313\nn03786621\nn03786715\nn03786901\nn03787032\nn03787523\nn03788047\nn03788195\nn03788365\nn03788498\nn03788601\nn03788914\nn03789171\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791053\nn03791235\nn03792048\nn03792334\nn03792526\nn03792782\nn03792972\nn03793489\nn03793850\nn03794056\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796401\nn03796522\nn03796605\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03799876\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802393\nn03803284\nn03804744\nn03805180\nn03805280\nn03805725\nn03809312\nn03809603\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812924\nn03813078\nn03814639\nn03814817\nn03814906\nn03815149\nn03815482\nn03815615\nn03816005\nn03816136\nn03816530\nn03816849\nn03817191\nn03817647\nn03818343\nn03819336\nn03819448\nn03819595\nn03819994\nn03820318\nn03820728\nn03821518\nn03822171\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03824381\nn03824713\nn03825080\nn03825788\nn03826039\nn03826186\nn03827536\nn03828020\nn03829954\nn03831382\nn03832144\nn03832673\nn03834040\nn03835197\nn03836062\nn03836451\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03837869\nn03838298\nn03838899\nn03839424\nn03839671\nn03840681\nn03840823\nn03841143\nn03841666\nn03842012\nn03842156\nn03842377\nn03842986\nn03843438\nn03843555\nn03844045\nn03844233\nn03844673\nn03844815\nn03845190\nn03846100\nn03846234\nn03846431\nn03846677\nn03847471\nn03847823\nn03848168\nn03848348\nn03849679\nn03849814\nn03850053\nn03850245\nn03850492\nn03851787\nn03852280\nn03852688\nn03853924\nn03854065\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855604\nn03855756\nn03856012\nn03856465\nn03857687\nn03857828\nn03858085\nn03858183\nn03858418\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860404\nn03861271\nn03861430\nn03861842\nn03862676\nn03862862\nn03863108\nn03863262\nn03863923\nn03864356\nn03864692\nn03865371\nn03865557\nn03865949\nn03866082\nn03868242\nn03868406\nn03868643\nn03868863\nn03870105\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871628\nn03871724\nn03873416\nn03873699\nn03874138\nn03874293\nn03874487\nn03874599\nn03875218\nn03875806\nn03875955\nn03876231\nn03877351\nn03877472\nn03877674\nn03877845\nn03878066\nn03878211\nn03878963\nn03879705\nn03880323\nn03880531\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03884397\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03887185\nn03887330\nn03887697\nn03888257\nn03888605\nn03889503\nn03889726\nn03889871\nn03890093\nn03890233\nn03890514\nn03891051\nn03891251\nn03891332\nn03891538\nn03892178\nn03892425\nn03892557\nn03894051\nn03894379\nn03894677\nn03895866\nn03896103\nn03896233\nn03896419\nn03896526\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03899768\nn03899933\nn03900393\nn03900979\nn03901229\nn03901750\nn03902125\nn03902482\nn03902756\nn03903424\nn03903733\nn03903868\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905947\nn03906224\nn03906463\nn03906997\nn03908204\nn03908618\nn03908714\nn03909020\nn03909160\nn03909406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03913343\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915437\nn03915900\nn03916031\nn03916470\nn03916720\nn03917198\nn03917814\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03920288\nn03920641\nn03920737\nn03920867\nn03923379\nn03923918\nn03924069\nn03924679\nn03926148\nn03927091\nn03927299\nn03927539\nn03928116\nn03928814\nn03929660\nn03929855\nn03930313\nn03930630\nn03931765\nn03931885\nn03933933\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03935116\nn03935234\nn03935335\nn03936466\nn03937543\nn03937835\nn03937931\nn03938037\nn03938244\nn03938401\nn03938522\nn03938725\nn03939178\nn03939677\nn03939844\nn03940256\nn03941013\nn03941231\nn03941417\nn03941684\nn03942813\nn03942920\nn03943115\nn03943266\nn03943920\nn03944024\nn03944138\nn03944341\nn03946076\nn03946162\nn03947466\nn03947798\nn03947888\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03950228\nn03950537\nn03950899\nn03952576\nn03953901\nn03954393\nn03954731\nn03955296\nn03955489\nn03956157\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958227\nn03958752\nn03959014\nn03959701\nn03960374\nn03960490\nn03961711\nn03961939\nn03962852\nn03963198\nn03963294\nn03963645\nn03964495\nn03965456\nn03965907\nn03966206\nn03966976\nn03967270\nn03967396\nn03967562\nn03967942\nn03968293\nn03968581\nn03968728\nn03970156\nn03970546\nn03971218\nn03973285\nn03973402\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03976467\nn03976657\nn03977592\nn03977966\nn03978421\nn03978686\nn03978966\nn03980026\nn03980478\nn03980874\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982430\nn03982642\nn03983396\nn03983612\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986224\nn03986355\nn03986562\nn03986704\nn03986949\nn03987266\nn03987376\nn03987990\nn03988170\nn03989665\nn03990474\nn03991062\nn03991646\nn03991837\nn03992325\nn03992436\nn03992509\nn03992703\nn03993053\nn03993180\nn03993403\nn03993703\nn03994008\nn03994614\nn03995265\nn03995372\nn03995535\nn03995856\nn03996145\nn03996416\nn03996849\nn03998194\nn03998333\nn03999160\nn03999992\nn04000311\nn04000592\nn04001265\nn04001499\nn04001845\nn04003241\nn04003856\nn04004210\nn04004475\nn04004767\nn04004990\nn04005197\nn04005630\nn04008385\nn04008634\nn04009552\nn04009801\nn04011827\nn04012084\nn04012482\nn04013729\nn04015908\nn04016240\nn04016576\nn04016684\nn04016846\nn04018155\nn04018667\nn04019101\nn04019541\nn04019696\nn04020298\nn04020912\nn04021028\nn04021798\nn04022332\nn04023695\nn04023962\nn04024274\nn04024862\nn04024983\nn04025508\nn04026053\nn04026180\nn04026417\nn04026813\nn04027023\nn04027706\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029734\nn04030274\nn04030518\nn04032603\nn04033425\nn04033901\nn04033995\nn04034262\nn04035836\nn04035912\nn04036303\nn04037220\nn04037443\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039381\nn04039742\nn04039848\nn04040247\nn04040373\nn04040759\nn04041069\nn04041243\nn04041408\nn04041544\nn04041747\nn04042358\nn04043411\nn04043733\nn04044307\nn04044498\nn04044716\nn04045255\nn04045397\nn04045644\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047401\nn04048441\nn04049303\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050933\nn04051549\nn04051825\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04054361\nn04054670\nn04056180\nn04056413\nn04056932\nn04057047\nn04057981\nn04058096\nn04058239\nn04058594\nn04059157\nn04059516\nn04059947\nn04060647\nn04061681\nn04061793\nn04061969\nn04062428\nn04063154\nn04063373\nn04063868\nn04064401\nn04064747\nn04064862\nn04065272\nn04065464\nn04065789\nn04066270\nn04067472\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069276\nn04069434\nn04070003\nn04070207\nn04070415\nn04070727\nn04071263\nn04072193\nn04072551\nn04072960\nn04074185\nn04074963\nn04075291\nn04075715\nn04075916\nn04076284\nn04076713\nn04078574\nn04079244\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081281\nn04081699\nn04082562\nn04082710\nn04082886\nn04083309\nn04083800\nn04084889\nn04086273\nn04086446\nn04087432\nn04087709\nn04087826\nn04089376\nn04089666\nn04089836\nn04089976\nn04090263\nn04091097\nn04091693\nn04093625\nn04093775\nn04094720\nn04095109\nn04095210\nn04095342\nn04095577\nn04096066\nn04097373\nn04097760\nn04097866\nn04098513\nn04099003\nn04099175\nn04099429\nn04099969\nn04100519\nn04101701\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04105068\nn04105704\nn04105893\nn04107743\nn04108268\nn04108822\nn04110178\nn04110955\nn04111190\nn04111414\nn04111531\nn04111668\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04114844\nn04115144\nn04115256\nn04115456\nn04115802\nn04115996\nn04116098\nn04116294\nn04116512\nn04117464\nn04118021\nn04118538\nn04118635\nn04118776\nn04119091\nn04119230\nn04119360\nn04119478\nn04119751\nn04120489\nn04120842\nn04121426\nn04121511\nn04121728\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04125021\nn04125257\nn04125853\nn04126066\nn04127249\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04130143\nn04130257\nn04130907\nn04131208\nn04131368\nn04131690\nn04131929\nn04132158\nn04132603\nn04132985\nn04133789\nn04134008\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04136045\nn04136161\nn04136333\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138261\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140631\nn04141076\nn04141198\nn04141327\nn04141712\nn04141838\nn04141975\nn04142434\nn04142731\nn04142999\nn04143140\nn04143897\nn04144241\nn04144539\nn04145863\nn04146050\nn04146343\nn04146504\nn04146614\nn04146862\nn04147183\nn04147793\nn04148054\nn04148579\nn04148703\nn04149083\nn04149813\nn04150153\nn04150980\nn04152593\nn04153025\nn04153751\nn04154152\nn04154340\nn04154565\nn04154938\nn04155068\nn04156140\nn04156946\nn04157320\nn04158807\nn04158956\nn04160372\nn04160586\nn04160847\nn04161358\nn04161981\nn04162433\nn04162706\nn04163530\nn04164406\nn04164757\nn04164868\nn04165409\nn04166281\nn04167346\nn04168199\nn04169437\nn04170037\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172342\nn04172776\nn04172904\nn04173046\nn04173511\nn04173907\nn04174101\nn04175039\nn04175147\nn04176068\nn04176190\nn04176295\nn04177041\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04179712\nn04179824\nn04179913\nn04180063\nn04180229\nn04180888\nn04181228\nn04181561\nn04182152\nn04182322\nn04183217\nn04183329\nn04184316\nn04184435\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186848\nn04187061\nn04187233\nn04187547\nn04187970\nn04188179\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190997\nn04191595\nn04191943\nn04192238\nn04192698\nn04192858\nn04193377\nn04194127\nn04194289\nn04196502\nn04197110\nn04197391\nn04197781\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200800\nn04201064\nn04201297\nn04201733\nn04202417\nn04204081\nn04204238\nn04204347\nn04205318\nn04205505\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208210\nn04208427\nn04208760\nn04208936\nn04209133\nn04209239\nn04209509\nn04209613\nn04210120\nn04210390\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04213353\nn04214046\nn04214282\nn04215153\nn04215402\nn04216634\nn04216860\nn04216963\nn04217546\nn04217882\nn04218564\nn04219185\nn04219424\nn04220250\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04223299\nn04224543\nn04224842\nn04225031\nn04225729\nn04225987\nn04226464\nn04226826\nn04227144\nn04227900\nn04228054\nn04228215\nn04228581\nn04228693\nn04229107\nn04229480\nn04229737\nn04229816\nn04230603\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232800\nn04233124\nn04233715\nn04234455\nn04234887\nn04235291\nn04235860\nn04236377\nn04236809\nn04236935\nn04237423\nn04238128\nn04238321\nn04238617\nn04238763\nn04239074\nn04239436\nn04239786\nn04240752\nn04241249\nn04241573\nn04242408\nn04243546\nn04243941\nn04244379\nn04244997\nn04245508\nn04246060\nn04246271\nn04246731\nn04246855\nn04247011\nn04247630\nn04247736\nn04247876\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250692\nn04250850\nn04251144\nn04251701\nn04251791\nn04252077\nn04252225\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253931\nn04254009\nn04254120\nn04254680\nn04254777\nn04255163\nn04255586\nn04255899\nn04256520\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258138\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259630\nn04260364\nn04261281\nn04261638\nn04262161\nn04263257\nn04263336\nn04263502\nn04264628\nn04264765\nn04264914\nn04265275\nn04265904\nn04266014\nn04266162\nn04266375\nn04266486\nn04266968\nn04267435\nn04269270\nn04269822\nn04269944\nn04270147\nn04270371\nn04270891\nn04271531\nn04272054\nn04272389\nn04272928\nn04273285\nn04273569\nn04273659\nn04273796\nn04273972\nn04274985\nn04275175\nn04275548\nn04275661\nn04277352\nn04277493\nn04277826\nn04278247\nn04278353\nn04278447\nn04279172\nn04279353\nn04279462\nn04281260\nn04281375\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285008\nn04285146\nn04285803\nn04285965\nn04286575\nn04287747\nn04287898\nn04288533\nn04289027\nn04289195\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04292414\nn04292572\nn04292921\nn04293119\nn04294426\nn04294614\nn04294879\nn04295081\nn04295571\nn04295881\nn04296562\nn04297098\nn04297750\nn04297847\nn04298661\nn04299215\nn04299370\nn04299963\nn04300643\nn04301000\nn04301760\nn04303357\nn04303497\nn04304375\nn04304680\nn04305210\nn04305323\nn04305572\nn04306080\nn04306592\nn04306847\nn04307767\nn04307986\nn04308084\nn04308273\nn04308397\nn04309049\nn04309348\nn04309548\nn04309833\nn04310018\nn04310157\nn04310904\nn04311004\nn04311174\nn04311595\nn04312154\nn04312432\nn04313503\nn04313628\nn04314914\nn04315342\nn04315948\nn04316498\nn04317063\nn04317175\nn04317325\nn04317420\nn04317833\nn04317976\nn04318787\nn04318892\nn04319937\nn04320973\nn04321453\nn04322026\nn04322801\nn04323819\nn04324297\nn04324387\nn04325041\nn04325704\nn04326547\nn04326676\nn04326799\nn04326896\nn04327204\nn04327682\nn04328186\nn04328329\nn04328946\nn04329834\nn04329958\nn04330267\nn04330340\nn04330746\nn04330998\nn04331277\nn04331639\nn04332074\nn04332243\nn04332580\nn04333129\nn04333869\nn04334105\nn04334365\nn04334599\nn04335209\nn04335435\nn04335693\nn04335886\nn04336792\nn04337287\nn04338517\nn04338963\nn04339879\nn04340521\nn04340750\nn04340935\nn04341686\nn04344003\nn04344734\nn04344873\nn04345028\nn04345201\nn04346157\nn04346328\nn04346428\nn04347119\nn04347519\nn04347754\nn04348359\nn04349306\nn04349401\nn04350458\nn04350581\nn04350769\nn04350905\nn04351699\nn04353573\nn04354026\nn04354182\nn04354487\nn04354589\nn04355267\nn04355338\nn04355511\nn04355933\nn04356056\nn04356595\nn04356925\nn04357121\nn04357314\nn04357531\nn04358117\nn04358491\nn04358707\nn04358874\nn04359500\nn04360798\nn04360914\nn04361095\nn04361260\nn04363777\nn04363991\nn04364160\nn04364545\nn04365328\nn04366033\nn04366116\nn04366367\nn04367011\nn04367371\nn04367480\nn04367746\nn04367950\nn04368496\nn04369025\nn04369282\nn04370048\nn04370288\nn04370456\nn04370774\nn04371050\nn04371430\nn04371563\nn04371774\nn04372370\nn04373089\nn04373428\nn04373704\nn04373795\nn04373894\nn04374315\nn04374735\nn04375241\nn04375405\nn04375615\nn04376400\nn04376876\nn04377057\nn04378956\nn04379243\nn04379964\nn04380255\nn04380346\nn04380533\nn04380916\nn04381073\nn04381587\nn04381724\nn04381860\nn04381994\nn04382438\nn04382695\nn04382880\nn04383015\nn04383130\nn04383839\nn04384593\nn04384910\nn04385536\nn04385799\nn04386051\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387706\nn04387932\nn04388743\nn04389033\nn04389430\nn04389521\nn04389718\nn04389854\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04392985\nn04393095\nn04393549\nn04393808\nn04394630\nn04395024\nn04395106\nn04395651\nn04396808\nn04396902\nn04397027\nn04397452\nn04397645\nn04397768\nn04398044\nn04398497\nn04398688\nn04398834\nn04398951\nn04399158\nn04399537\nn04399846\nn04400289\nn04400737\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404412\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406817\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409384\nn04409515\nn04409625\nn04409806\nn04410086\nn04411264\nn04412097\nn04412416\nn04413969\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415663\nn04416005\nn04417086\nn04417180\nn04417672\nn04417809\nn04418357\nn04419073\nn04419642\nn04419868\nn04421872\nn04422409\nn04422727\nn04422875\nn04423845\nn04424692\nn04425804\nn04426316\nn04426427\nn04427715\nn04428191\nn04428634\nn04429376\nn04430475\nn04430896\nn04431025\nn04431745\nn04432203\nn04432662\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435653\nn04436012\nn04436185\nn04436329\nn04437953\nn04438304\nn04438507\nn04438897\nn04439585\nn04439712\nn04440963\nn04441662\nn04441790\nn04442312\nn04442441\nn04442741\nn04443164\nn04443257\nn04443766\nn04444749\nn04445040\nn04445154\nn04445327\nn04445952\nn04446276\nn04446844\nn04447028\nn04447276\nn04447443\nn04447861\nn04448070\nn04448361\nn04449290\nn04449966\nn04450133\nn04450243\nn04450640\nn04450749\nn04450994\nn04451318\nn04451818\nn04452528\nn04452615\nn04452757\nn04453037\nn04453156\nn04453390\nn04453666\nn04454908\nn04455250\nn04455652\nn04456115\nn04457474\nn04457767\nn04457910\nn04458633\nn04458843\nn04459018\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461696\nn04461879\nn04462011\nn04462240\nn04463679\nn04464615\nn04464852\nn04465050\nn04465358\nn04465501\nn04465666\nn04466871\nn04467099\nn04467307\nn04467665\nn04468005\nn04469003\nn04469514\nn04469813\nn04471148\nn04471632\nn04472563\nn04473108\nn04474035\nn04474187\nn04474466\nn04475411\nn04475631\nn04476116\nn04476259\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04478512\nn04479046\nn04479823\nn04479939\nn04480033\nn04480853\nn04482177\nn04482297\nn04482393\nn04483073\nn04483307\nn04483925\nn04484432\nn04485082\nn04485423\nn04485884\nn04486054\nn04486213\nn04486934\nn04487081\nn04487394\nn04487724\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491388\nn04491638\nn04491769\nn04492060\nn04492375\nn04492749\nn04493381\nn04494204\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497442\nn04497570\nn04497801\nn04498389\nn04499062\nn04499446\nn04500060\nn04501370\nn04501550\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04503413\nn04503593\nn04504141\nn04505036\nn04505470\nn04506289\nn04506506\nn04506688\nn04507155\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509417\nn04510706\nn04511002\nn04513827\nn04513998\nn04514241\nn04515003\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517823\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04520170\nn04520382\nn04520784\nn04521863\nn04522168\nn04523525\nn04523831\nn04524142\nn04524313\nn04524941\nn04525038\nn04525191\nn04525305\nn04525417\nn04525584\nn04525821\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530566\nn04531098\nn04531873\nn04532106\nn04532398\nn04532670\nn04532831\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535370\nn04535524\nn04536153\nn04536335\nn04536595\nn04536866\nn04538552\nn04539203\nn04539794\nn04540053\nn04540255\nn04541320\nn04541987\nn04542715\nn04542858\nn04542943\nn04543158\nn04543636\nn04543772\nn04543996\nn04544325\nn04544450\nn04545305\nn04545748\nn04545858\nn04546194\nn04546340\nn04547592\nn04548280\nn04548362\nn04549028\nn04549122\nn04549629\nn04549919\nn04550184\nn04551055\nn04552348\nn04552696\nn04553561\nn04553703\nn04554211\nn04554406\nn04554684\nn04554871\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556948\nn04557648\nn04557751\nn04558478\nn04559166\nn04559451\nn04559730\nn04559910\nn04560113\nn04560292\nn04560804\nn04560882\nn04561287\nn04561422\nn04561734\nn04562262\nn04562496\nn04562935\nn04563204\nn04563413\nn04564278\nn04564581\nn04565375\nn04566257\nn04566561\nn04566756\nn04568069\nn04568557\nn04568841\nn04569063\nn04569822\nn04570214\nn04570815\nn04571292\nn04571566\nn04571686\nn04571958\nn04573281\nn04573513\nn04573937\nn04574067\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04577769\nn04578934\nn04579056\nn04579145\nn04579230\nn04579432\nn04579667\nn04579986\nn04580493\nn04581102\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583212\nn04583620\nn04584207\nn04584373\nn04585128\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589593\nn04589890\nn04590021\nn04590129\nn04590263\nn04590553\nn04590746\nn04590933\nn04591157\nn04591517\nn04591713\nn04591887\nn04592005\nn04592099\nn04592465\nn04592741\nn04593077\nn04593185\nn04593376\nn04593524\nn04593866\nn04594218\nn04594489\nn04594828\nn04595028\nn04595285\nn04595855\nn04596742\nn04596852\nn04597309\nn04597400\nn04597804\nn04597913\nn04598318\nn04598582\nn04598965\nn04599124\nn04599235\nn04600312\nn04600912\nn04602762\nn04602956\nn04603399\nn04603729\nn04603872\nn04604644\nn04605163\nn04605321\nn04605572\nn04605726\nn04606251\nn04606574\nn04607035\nn04607242\nn04607869\nn04608329\nn04608435\nn04608567\nn04608923\nn04609531\nn04609651\nn04610013\nn04610274\nn04610503\nn04610676\nn04612026\nn04612373\nn04612504\nn04613015\nn04613696\nn04613939\nn04614655\nn04615226\nn04615644\nn04950952\nn04951071\nn04951186\nn04953296\nn04955160\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970398\nn04970470\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973291\nn04973386\nn04973585\nn04973816\nn04974859\nn04976319\nn04976952\nn04977412\nn04979002\nn04981658\nn05218119\nn05238282\nn05239437\nn05242928\nn05244934\nn05245192\nn05258051\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05263183\nn05263448\nn05282652\nn05302499\nn05399034\nn05399243\nn05418717\nn05450617\nn05451384\nn05453657\nn05486510\nn05526957\nn05538625\nn05578095\nn05581932\nn05586759\nn05716342\nn06255081\nn06263609\nn06266633\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273414\nn06273555\nn06273743\nn06273986\nn06274760\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06359193\nn06359467\nn06415688\nn06417096\nn06470073\nn06592281\nn06595351\nn06596364\nn06596474\nn06596607\nn06596727\nn06785654\nn06793231\nn06794110\nn06874185\nn06883725\nn06892775\nn06998748\nn07005523\nn07248320\nn07273802\nn07461050\nn07556406\nn07556637\nn07556970\nn07557434\nn07560193\nn07560331\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562495\nn07563207\nn07564971\nn07565083\nn07565161\nn07565259\nn07566340\nn07567707\nn07568502\nn07568818\nn07569106\nn07569644\nn07570720\nn07572616\nn07572957\nn07573347\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576781\nn07577144\nn07577374\nn07577538\nn07578093\nn07579575\nn07579688\nn07579787\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581775\nn07581931\nn07582152\nn07582277\nn07582609\nn07582892\nn07583066\nn07584110\nn07584332\nn07584423\nn07584593\nn07585107\nn07585208\nn07585557\nn07585758\nn07585906\nn07586099\nn07586318\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587331\nn07587441\nn07587618\nn07587700\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588817\nn07588947\nn07590320\nn07590502\nn07590611\nn07590752\nn07591049\nn07591473\nn07591586\nn07591961\nn07592094\nn07592481\nn07592768\nn07593004\nn07593199\nn07593471\nn07594066\nn07595649\nn07595914\nn07596684\nn07596967\nn07597145\nn07597365\nn07598256\nn07598734\nn07599911\nn07599998\nn07600177\nn07600285\nn07600696\nn07601290\nn07601572\nn07601686\nn07601809\nn07604956\nn07605040\nn07605380\nn07605474\nn07605597\nn07605804\nn07605944\nn07606538\nn07606669\nn07606764\nn07607138\nn07607605\nn07607967\nn07608098\nn07608339\nn07608429\nn07608866\nn07609215\nn07609407\nn07609632\nn07609840\nn07610620\nn07611046\nn07611148\nn07611267\nn07611358\nn07611839\nn07611991\nn07612137\nn07612367\nn07612632\nn07612996\nn07613266\nn07613480\nn07613815\nn07614198\nn07614500\nn07614730\nn07614825\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615774\nn07616046\nn07616386\nn07616487\nn07616590\nn07616748\nn07617051\nn07617611\nn07617708\nn07617932\nn07618119\nn07618432\nn07619004\nn07619208\nn07619409\nn07620689\nn07621618\nn07623136\nn07624466\nn07625061\nn07627931\nn07628068\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642933\nn07643026\nn07643200\nn07643306\nn07643891\nn07643981\nn07648913\nn07648997\nn07650903\nn07651025\nn07654148\nn07654298\nn07655263\nn07665438\nn07666176\nn07678729\nn07679034\nn07679356\nn07680313\nn07680517\nn07680761\nn07680932\nn07681450\nn07681691\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683360\nn07683490\nn07683617\nn07683786\nn07684084\nn07684164\nn07684289\nn07684517\nn07684600\nn07684938\nn07685031\nn07685218\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688624\nn07688898\nn07689003\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691954\nn07692614\nn07693048\nn07693223\nn07693590\nn07693725\nn07693972\nn07694403\nn07694516\nn07694659\nn07694839\nn07695652\nn07695742\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697313\nn07697537\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07704054\nn07704205\nn07705931\nn07707451\nn07708124\nn07708398\nn07708685\nn07709046\nn07709172\nn07709333\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711569\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714571\nn07714802\nn07714895\nn07714990\nn07715103\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716358\nn07716906\nn07717070\nn07717410\nn07717556\nn07718472\nn07718747\nn07719213\nn07719616\nn07719839\nn07720277\nn07720442\nn07720615\nn07720875\nn07721018\nn07721195\nn07721325\nn07721456\nn07721678\nn07721942\nn07722052\nn07722217\nn07722485\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723968\nn07724269\nn07724492\nn07724654\nn07724943\nn07725255\nn07725376\nn07725531\nn07725789\nn07725888\nn07726095\nn07726525\nn07726672\nn07726796\nn07727048\nn07727458\nn07727578\nn07727868\nn07728053\nn07728181\nn07728585\nn07728708\nn07729384\nn07729485\nn07729828\nn07729926\nn07730033\nn07730207\nn07730320\nn07730406\nn07730708\nn07730855\nn07731006\nn07731284\nn07731587\nn07731767\nn07731952\nn07732168\nn07732636\nn07732747\nn07732904\nn07733394\nn07733567\nn07733712\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734744\nn07734879\nn07735404\nn07735510\nn07735687\nn07735803\nn07736087\nn07736256\nn07736371\nn07736692\nn07736813\nn07737745\nn07739125\nn07739344\nn07739506\nn07740033\nn07740220\nn07740342\nn07740461\nn07740597\nn07740954\nn07741138\nn07741461\nn07742012\nn07742313\nn07742704\nn07743224\nn07743544\nn07743902\nn07744057\nn07744246\nn07744430\nn07744682\nn07744811\nn07745046\nn07745466\nn07745940\nn07746186\nn07746334\nn07746551\nn07747055\nn07747607\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749192\nn07749312\nn07749446\nn07749582\nn07749731\nn07749969\nn07750146\nn07750449\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07752109\nn07752377\nn07752514\nn07752966\nn07753113\nn07753275\nn07753592\nn07753743\nn07753980\nn07754451\nn07754684\nn07754894\nn07755089\nn07755411\nn07755707\nn07755929\nn07756325\nn07756951\nn07757132\nn07757312\nn07757511\nn07757990\nn07758680\nn07759194\nn07759816\nn07760153\nn07760859\nn07761141\nn07761309\nn07762114\nn07762244\nn07762740\nn07762913\nn07763107\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765862\nn07765999\nn07766173\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768230\nn07768423\nn07768694\nn07768858\nn07769584\nn07769731\nn07770034\nn07770763\nn07771212\nn07771731\nn07772147\nn07772274\nn07772788\nn07772935\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07800740\nn07801091\nn07801342\nn07801508\nn07801779\nn07801892\nn07802026\nn07802417\nn07802863\nn07802963\nn07803093\nn07803545\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805594\nn07805731\nn07806120\nn07806221\nn07806633\nn07806774\nn07807002\nn07807171\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808587\nn07808904\nn07809096\nn07810907\nn07812184\nn07814203\nn07814390\nn07814487\nn07814634\nn07815424\nn07815588\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816839\nn07817024\nn07817160\nn07817315\nn07817871\nn07818277\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819769\nn07819896\nn07820145\nn07820497\nn07820683\nn07821260\nn07821758\nn07821919\nn07822197\nn07822323\nn07822518\nn07822845\nn07823105\nn07823280\nn07823460\nn07823698\nn07823951\nn07824191\nn07824702\nn07825194\nn07825972\nn07826091\nn07826453\nn07826930\nn07827130\nn07827284\nn07827410\nn07827750\nn07828642\nn07829248\nn07829331\nn07829412\nn07830593\nn07831146\nn07831267\nn07832416\nn07832902\nn07834065\nn07834507\nn07834618\nn07834872\nn07835331\nn07835457\nn07835921\nn07836838\nn07837002\nn07837362\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07840027\nn07840804\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07843464\nn07843636\nn07843775\nn07844042\nn07844867\nn07845087\nn07845702\nn07846143\nn07847198\nn07847453\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07849336\nn07849619\nn07849733\nn07849912\nn07850083\nn07850329\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07852229\nn07852614\nn07852833\nn07854184\nn07854982\nn07855510\nn07855907\nn07857170\nn07857731\nn07858978\nn07859284\nn07859583\nn07859796\nn07860103\nn07860331\nn07860447\nn07860805\nn07860988\nn07861158\nn07861557\nn07861813\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07863374\nn07863547\nn07863802\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07866015\nn07866151\nn07866277\nn07866409\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07868200\nn07868340\nn07868508\nn07868830\nn07868955\nn07869522\nn07869611\nn07869775\nn07870069\nn07870167\nn07870313\nn07871234\nn07871436\nn07871720\nn07871810\nn07872593\nn07873057\nn07873348\nn07873464\nn07873807\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874780\nn07875152\nn07875436\nn07875693\nn07876651\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879659\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07880968\nn07881205\nn07881404\nn07881800\nn07882497\nn07883031\nn07883251\nn07884567\nn07885705\nn07886057\nn07886176\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888229\nn07888465\nn07888816\nn07889274\nn07889510\nn07889814\nn07890068\nn07890226\nn07890352\nn07890540\nn07890750\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892512\nn07892813\nn07893253\nn07893528\nn07893642\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896661\nn07896893\nn07896994\nn07897116\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07909129\nn07909593\nn07910048\nn07910152\nn07910379\nn07910538\nn07910656\nn07911249\nn07911371\nn07911677\nn07912211\nn07913393\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914777\nn07914995\nn07915094\nn07915491\nn07915618\nn07915918\nn07916041\nn07916183\nn07916319\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07918028\nn07918193\nn07918879\nn07919310\nn07919441\nn07919572\nn07920052\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921239\nn07921455\nn07921615\nn07922512\nn07922764\nn07923748\nn07924033\nn07924276\nn07924443\nn07924560\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925500\nn07925608\nn07925966\nn07926250\nn07926920\nn07927197\nn07927512\nn07927931\nn07928163\nn07928367\nn07928488\nn07928696\nn07928790\nn07928887\nn07929172\nn07929351\nn07929519\nn07930062\nn07930315\nn07930433\nn07930554\nn07930864\nn07931452\nn07931612\nn07931870\nn07932039\nn07932841\nn07933154\nn07933274\nn07933799\nn07934282\nn07935043\nn07935379\nn07935504\nn07935737\nn07935878\nn07936263\nn07936548\nn07936745\nn07937461\nn07938007\nn07938149\nn07938313\nn07942152\nn07951464\nn07954211\nn07977870\nn08182379\nn08242223\nn08249459\nn08256735\nn08376250\nn08492461\nn08494231\nn08495908\nn08505018\nn08517676\nn08518171\nn08521623\nn08524735\nn08539072\nn08547468\nn08547544\nn08551296\nn08555710\nn08560295\nn08571898\nn08573842\nn08578517\nn08579352\nn08580944\nn08583292\nn08583455\nn08584914\nn08596076\nn08598301\nn08598568\nn08611339\nn08614632\nn08616050\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08645104\nn08645212\nn08649711\nn08658309\nn08659446\nn08659861\nn08663703\nn08673039\nn08677424\nn09189157\nn09191635\nn09193705\nn09194227\nn09199101\nn09205509\nn09206896\nn09206985\nn09208496\nn09210862\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09224725\nn09228055\nn09229709\nn09230041\nn09230202\nn09233446\nn09238926\nn09239302\nn09242389\nn09245515\nn09246464\nn09247410\nn09249034\nn09251407\nn09256479\nn09257843\nn09259025\nn09259219\nn09260907\nn09263912\nn09265620\nn09267854\nn09269341\nn09269472\nn09270735\nn09274152\nn09279986\nn09282208\nn09283193\nn09283405\nn09283767\nn09283866\nn09287968\nn09288635\nn09289331\nn09290444\nn09294877\nn09295946\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305898\nn09308572\nn09308743\nn09309168\nn09309292\nn09326662\nn09331251\nn09332890\nn09335809\nn09337253\nn09344324\nn09348460\nn09349648\nn09359803\nn09361517\nn09362945\nn09366317\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09392402\nn09393605\nn09396465\nn09398076\nn09398677\nn09399592\nn09400987\nn09403211\nn09403427\nn09403734\nn09405078\nn09406793\nn09409512\nn09409752\nn09411189\nn09415584\nn09415671\nn09416076\nn09416890\nn09421799\nn09421951\nn09428293\nn09428628\nn09432283\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09457979\nn09460046\nn09461069\nn09466678\nn09468604\nn09472413\nn09472597\nn09475044\nn09475179\nn09475925\nn09481120\nn09618880\nn09618957\nn09666883\nn09763784\nn09792969\nn09818022\nn09820263\nn09828216\nn09833536\nn09834699\nn09835506\nn09842047\nn09846755\nn09856671\nn09858165\nn09861946\nn09874862\nn09877951\nn09893191\nn09893502\nn09894445\nn09896685\nn09913593\nn09915651\nn09917214\nn09923561\nn09932508\nn09945745\nn09990415\nn09990777\nn10020890\nn10043643\nn10080869\nn10082043\nn10087434\nn10091651\nn10092794\nn10120671\nn10123844\nn10142747\nn10147935\nn10148035\nn10150071\nn10151760\nn10153594\nn10155849\nn10164233\nn10164492\nn10185793\nn10186216\nn10223177\nn10229883\nn10253296\nn10260800\nn10263411\nn10283170\nn10297234\nn10298912\nn10300303\nn10304914\nn10305802\nn10324560\nn10333601\nn10334009\nn10340312\nn10345015\nn10348526\nn10366966\nn10382710\nn10386984\nn10393909\nn10405694\nn10421470\nn10467179\nn10469874\nn10474645\nn10500217\nn10509063\nn10514429\nn10521662\nn10530150\nn10536416\nn10542761\nn10542888\nn10559288\nn10560106\nn10562135\nn10565667\nn10582746\nn10599806\nn10610465\nn10618342\nn10628644\nn10634849\nn10638922\nn10642596\nn10655594\nn10665698\nn10679174\nn10701180\nn10701644\nn10707233\nn10721321\nn10732010\nn10755080\nn10763383\nn10772092\nn10780632\nn10806113\nn11448153\nn11487732\nn11508382\nn11524451\nn11532682\nn11533212\nn11536673\nn11537327\nn11542137\nn11542640\nn11544015\nn11545714\nn11547855\nn11552133\nn11552806\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602873\nn11603246\nn11603835\nn11608250\nn11609475\nn11609862\nn11610215\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612349\nn11612575\nn11613219\nn11613459\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615387\nn11615607\nn11615967\nn11616486\nn11616662\nn11617090\nn11617272\nn11617631\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625632\nn11625804\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627908\nn11628087\nn11628456\nn11628793\nn11630017\nn11631854\nn11632167\nn11632619\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11645590\nn11645914\nn11646167\nn11646344\nn11646694\nn11647306\nn11647703\nn11650558\nn11652376\nn11653904\nn11655974\nn11658331\nn11658544\nn11660300\nn11661372\nn11661909\nn11662371\nn11664418\nn11665372\nn11666854\nn11669786\nn11669921\nn11672269\nn11672400\nn11675025\nn11676500\nn11678010\nn11680596\nn11682659\nn11686912\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694664\nn11695599\nn11695974\nn11698042\nn11699442\nn11700058\nn11701066\nn11703669\nn11704093\nn11704620\nn11705171\nn11705387\nn11705776\nn11706761\nn11707229\nn11709205\nn11709674\nn11710136\nn11710393\nn11710827\nn11711537\nn11711764\nn11712282\nn11714382\nn11715430\nn11715678\nn11717577\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11722466\nn11722982\nn11723227\nn11723770\nn11724109\nn11725015\nn11725311\nn11725480\nn11725821\nn11725973\nn11726269\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728945\nn11730602\nn11731659\nn11732567\nn11733054\nn11733312\nn11733548\nn11735053\nn11736694\nn11736851\nn11737534\nn11748811\nn11752937\nn11753143\nn11753355\nn11753700\nn11754893\nn11756092\nn11756669\nn11756870\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759853\nn11760785\nn11761202\nn11762433\nn11769176\nn11769621\nn11769803\nn11770256\nn11772408\nn11772879\nn11773987\nn11774513\nn11777080\nn11778257\nn11779300\nn11780148\nn11781176\nn11782036\nn11782761\nn11783920\nn11784126\nn11784497\nn11785668\nn11786131\nn11786539\nn11788727\nn11789066\nn11789589\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793779\nn11794024\nn11794519\nn11795049\nn11797321\nn11800236\nn11801891\nn11802586\nn11802800\nn11805544\nn11805956\nn11806219\nn11807108\nn11807525\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11810358\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817914\nn11818069\nn11819509\nn11819912\nn11820965\nn11821184\nn11823436\nn11824146\nn11825351\nn11826198\nn11830906\nn11832214\nn11832480\nn11834654\nn11836722\nn11837970\nn11838916\nn11839568\nn11839823\nn11840067\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846765\nn11847169\nn11848479\nn11849467\nn11849871\nn11849983\nn11850521\nn11851258\nn11851578\nn11851839\nn11852028\nn11853356\nn11853813\nn11854479\nn11855274\nn11855553\nn11857875\nn11859472\nn11859737\nn11860555\nn11861641\nn11861853\nn11862835\nn11865874\nn11866248\nn11870418\nn11870747\nn11872146\nn11874081\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877646\nn11878101\nn11879054\nn11879722\nn11879895\nn11882074\nn11882426\nn11883328\nn11887119\nn11888800\nn11889619\nn11890150\nn11891175\nn11892029\nn11892637\nn11892817\nn11893640\nn11894327\nn11894558\nn11894770\nn11895092\nn11896722\nn11897116\nn11898775\nn11900569\nn11901294\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903671\nn11904109\nn11905392\nn11905749\nn11906917\nn11907100\nn11907689\nn11908549\nn11908846\nn11910271\nn11910460\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11918286\nn11918473\nn11921395\nn11923174\nn11923397\nn11923637\nn11924445\nn11924849\nn11925303\nn11925898\nn11926365\nn11926833\nn11927215\nn11928352\nn11928858\nn11929743\nn11931540\nn11931918\nn11933546\nn11933728\nn11934616\nn11934807\nn11935469\nn11939180\nn11939491\nn11939699\nn11940006\nn11940599\nn11941924\nn11943407\nn11943660\nn11943992\nn11944196\nn11944954\nn11945514\nn11945783\nn11946727\nn11947629\nn11947802\nn11948264\nn11948864\nn11949015\nn11949402\nn11950345\nn11950686\nn11950877\nn11953038\nn11953610\nn11953884\nn11954161\nn11954345\nn11954642\nn11955153\nn11955896\nn11956348\nn11956850\nn11957678\nn11958080\nn11959632\nn11959862\nn11960245\nn11961100\nn11961446\nn11962272\nn11962667\nn11963932\nn11965218\nn11965627\nn11966083\nn11966215\nn11966617\nn11966896\nn11968704\nn11968931\nn11969166\nn11969607\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11973341\nn11977303\nn11978233\nn11978551\nn11978713\nn11978961\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11982115\nn11984144\nn11984542\nn11986511\nn11987126\nn11988596\nn11989393\nn11989869\nn11990167\nn11990313\nn11991263\nn11992806\nn11995092\nn11998888\nn12001707\nn12002428\nn12003167\nn12003696\nn12004547\nn12005656\nn12006766\nn12006930\nn12007196\nn12007406\nn12008252\nn12008487\nn12008749\nn12009420\nn12011620\nn12012111\nn12014085\nn12015221\nn12015525\nn12015959\nn12016567\nn12018760\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12023108\nn12023407\nn12023726\nn12024445\nn12024690\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12029635\nn12030908\nn12031139\nn12031927\nn12033709\nn12034141\nn12034384\nn12036939\nn12037499\nn12037691\nn12038406\nn12038585\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044467\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12052447\nn12052787\nn12053405\nn12053690\nn12055516\nn12056217\nn12056601\nn12056758\nn12057211\nn12057447\nn12057660\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12061380\nn12061614\nn12062468\nn12062626\nn12062781\nn12063639\nn12064389\nn12064591\nn12065316\nn12065777\nn12066018\nn12066261\nn12066630\nn12067193\nn12068432\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071744\nn12072722\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077944\nn12078172\nn12079120\nn12079963\nn12080395\nn12080820\nn12081215\nn12083113\nn12083591\nn12083847\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12088223\nn12090890\nn12091213\nn12091377\nn12091550\nn12091953\nn12092262\nn12092417\nn12093329\nn12093600\nn12094612\nn12095020\nn12095647\nn12098403\nn12099342\nn12101870\nn12102133\nn12104238\nn12104501\nn12104734\nn12105125\nn12107710\nn12107970\nn12108871\nn12109365\nn12110085\nn12110778\nn12112008\nn12112609\nn12112918\nn12113195\nn12115180\nn12116429\nn12119238\nn12121610\nn12122725\nn12123741\nn12124627\nn12124818\nn12127460\nn12127768\nn12128071\nn12129134\nn12133462\nn12133682\nn12134025\nn12135049\nn12136392\nn12137120\nn12137569\nn12139575\nn12141167\nn12142085\nn12144313\nn12144580\nn12145477\nn12146311\nn12148757\nn12150722\nn12151615\nn12152532\nn12152722\nn12154773\nn12155009\nn12157056\nn12158031\nn12158443\nn12159055\nn12159388\nn12160303\nn12160490\nn12160857\nn12161056\nn12161969\nn12162181\nn12162425\nn12164363\nn12164656\nn12164881\nn12165170\nn12166128\nn12166424\nn12166793\nn12167075\nn12167436\nn12167602\nn12168565\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12178896\nn12179122\nn12180168\nn12180885\nn12184912\nn12185859\nn12187247\nn12189429\nn12189987\nn12190410\nn12190869\nn12194147\nn12195533\nn12196336\nn12196527\nn12196694\nn12198286\nn12199790\nn12200143\nn12201331\nn12201580\nn12202936\nn12203529\nn12204032\nn12204175\nn12205694\nn12214789\nn12215022\nn12215579\nn12217453\nn12223569\nn12223764\nn12224978\nn12225563\nn12227658\nn12228229\nn12228387\nn12230794\nn12237486\nn12237641\nn12240477\nn12242409\nn12243109\nn12244153\nn12244650\nn12244819\nn12245319\nn12246232\nn12249542\nn12252168\nn12257570\nn12258885\nn12260799\nn12261571\nn12261808\nn12262018\nn12262185\nn12263038\nn12263738\nn12263987\nn12264512\nn12265600\nn12266217\nn12266796\nn12267411\nn12267677\nn12268246\nn12269241\nn12269406\nn12270027\nn12270741\nn12270946\nn12271933\nn12272239\nn12272883\nn12273114\nn12273344\nn12273768\nn12273939\nn12274358\nn12274863\nn12275131\nn12275675\nn12275888\nn12276110\nn12276477\nn12276628\nn12276872\nn12277150\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12279458\nn12279772\nn12280060\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283542\nn12284262\nn12284821\nn12285369\nn12285900\nn12286826\nn12286988\nn12287836\nn12288005\nn12288823\nn12290748\nn12291143\nn12291959\nn12293723\nn12294124\nn12294331\nn12294723\nn12294871\nn12295033\nn12295796\nn12296432\nn12300840\nn12301180\nn12301445\nn12302071\nn12302248\nn12303083\nn12303462\nn12304115\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305819\nn12305986\nn12306089\nn12306717\nn12307076\nn12307240\nn12309277\nn12311579\nn12312728\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12319204\nn12319414\nn12320010\nn12320806\nn12322099\nn12322501\nn12322699\nn12325234\nn12328398\nn12328567\nn12329260\nn12329473\nn12330469\nn12330587\nn12330891\nn12331655\nn12332030\nn12332555\nn12333053\nn12333530\nn12333771\nn12334293\nn12336092\nn12336224\nn12336333\nn12336727\nn12336973\nn12337617\nn12338258\nn12338454\nn12338655\nn12338796\nn12339831\nn12340383\nn12340755\nn12342299\nn12342498\nn12342852\nn12343480\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12347158\nn12350758\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353754\nn12356023\nn12356960\nn12357485\nn12360108\nn12360684\nn12361135\nn12361946\nn12362274\nn12362668\nn12367611\nn12368028\nn12368257\nn12368451\nn12369309\nn12371439\nn12373100\nn12374418\nn12383894\nn12384037\nn12384227\nn12384839\nn12385429\nn12385566\nn12387633\nn12387839\nn12388143\nn12388858\nn12388989\nn12389130\nn12389501\nn12390099\nn12390314\nn12392549\nn12393269\nn12397431\nn12399132\nn12399384\nn12400489\nn12400720\nn12401684\nn12402051\nn12402348\nn12402596\nn12402840\nn12403994\nn12405714\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407890\nn12408077\nn12408717\nn12409231\nn12409470\nn12409840\nn12412355\nn12412606\nn12413165\nn12413301\nn12413419\nn12413880\nn12414035\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12418221\nn12421137\nn12421683\nn12421917\nn12422129\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12428076\nn12428412\nn12428747\nn12429352\nn12432356\nn12433081\nn12433178\nn12433769\nn12435152\nn12435649\nn12435777\nn12437513\nn12437769\nn12437930\nn12441183\nn12441390\nn12441958\nn12443323\nn12446519\nn12448700\nn12449296\nn12449526\nn12450344\nn12450840\nn12451240\nn12451399\nn12451915\nn12452836\nn12453186\nn12454159\nn12454436\nn12454705\nn12454949\nn12455950\nn12457091\nn12458550\nn12459629\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462805\nn12463134\nn12465557\nn12466727\nn12469517\nn12472024\nn12473608\nn12473840\nn12474167\nn12475035\nn12475242\nn12476510\nn12477163\nn12477583\nn12477747\nn12478768\nn12479537\nn12480456\nn12480895\nn12481458\nn12482437\nn12482668\nn12482893\nn12483427\nn12483625\nn12483841\nn12484784\nn12485653\nn12485981\nn12486574\nn12489815\nn12491017\nn12491826\nn12492106\nn12493208\nn12494794\nn12495146\nn12495895\nn12496427\nn12496949\nn12498055\nn12501202\nn12504570\nn12504783\nn12506341\nn12506991\nn12508309\nn12509476\nn12509665\nn12513172\nn12513613\nn12513933\nn12514138\nn12515711\nn12515925\nn12516828\nn12517445\nn12517642\nn12519089\nn12519563\nn12521394\nn12523475\nn12527738\nn12528549\nn12528974\nn12529220\nn12530629\nn12530818\nn12532564\nn12539306\nn12540250\nn12544539\nn12545635\nn12546183\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12549192\nn12552309\nn12554911\nn12556656\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12560282\nn12560621\nn12560775\nn12561169\nn12562785\nn12564083\nn12566954\nn12568186\nn12570394\nn12570703\nn12570972\nn12571781\nn12573474\nn12574320\nn12574866\nn12575322\nn12575812\nn12576323\nn12577895\nn12578626\nn12578916\nn12579038\nn12580654\nn12580896\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12584191\nn12584715\nn12585629\nn12587132\nn12587803\nn12588320\nn12588780\nn12590232\nn12590499\nn12591017\nn12591351\nn12593994\nn12595699\nn12595964\nn12596148\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599435\nn12602262\nn12602980\nn12603449\nn12604228\nn12606438\nn12606545\nn12607456\nn12610328\nn12614477\nn12615232\nn12620196\nn12620546\nn12620969\nn12621410\nn12622297\nn12622875\nn12623077\nn12624381\nn12624568\nn12625383\nn12627119\nn12628986\nn12629305\nn12629666\nn12630763\nn12631331\nn12632335\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635532\nn12635744\nn12635955\nn12636224\nn12638218\nn12638753\nn12639584\nn12640839\nn12641007\nn12641413\nn12642090\nn12642200\nn12643313\nn12643473\nn12644902\nn12645174\nn12646605\nn12646740\nn12647560\nn12647893\nn12648045\nn12648888\nn12649065\nn12649317\nn12649539\nn12650379\nn12650556\nn12651229\nn12651611\nn12651821\nn12655869\nn12656369\nn12656685\nn12657082\nn12658118\nn12658308\nn12658481\nn12659064\nn12659356\nn12659539\nn12662772\nn12663023\nn12665048\nn12665271\nn12665857\nn12666965\nn12670758\nn12671651\nn12675299\nn12675876\nn12676534\nn12676703\nn12680402\nn12680864\nn12681893\nn12682411\nn12682668\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686676\nn12687044\nn12687462\nn12687698\nn12687957\nn12688716\nn12691428\nn12691661\nn12694486\nn12695975\nn12696492\nn12698598\nn12700088\nn12703190\nn12703383\nn12703557\nn12703856\nn12704343\nn12706410\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12711596\nn12711817\nn12711984\nn12713063\nn12713866\nn12714755\nn12717072\nn12717224\nn12719684\nn12719944\nn12720200\nn12723610\nn12724942\nn12725738\nn12726159\nn12726670\nn12727101\nn12727518\nn12729315\nn12729521\nn12729729\nn12731029\nn12731401\nn12731835\nn12732009\nn12732491\nn12732756\nn12732966\nn12733218\nn12733647\nn12733870\nn12734070\nn12737383\nn12737898\nn12739332\nn12741222\nn12741792\nn12743352\nn12744387\nn12745386\nn12746884\nn12749049\nn12749679\nn12749852\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755727\nn12756457\nn12757303\nn12757458\nn12757816\nn12759273\nn12761284\nn12762049\nn12762896\nn12764202\nn12765115\nn12766595\nn12766869\nn12768682\nn12771192\nn12771390\nn12771597\nn12772753\nn12772908\nn12773651\nn12774299\nn12774641\nn12775919\nn12777680\nn12778398\nn12778605\nn12779603\nn12779851\nn12781940\nn12782530\nn12782915\nn12784889\nn12785724\nn12785889\nn12788854\nn12789054\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12794135\nn12794367\nn12794985\nn12795352\nn12795555\nn12796022\nn12797860\nn12799776\nn12801520\nn12801781\nn12803754\nn12805146\nn12805561\nn12806015\nn12806732\nn12807251\nn12807409\nn12807773\nn12810595\nn12811027\nn12812478\nn12813189\nn12814643\nn12815198\nn12816508\nn12817464\nn12817694\nn12818346\nn12818966\nn12819728\nn12820853\nn12821505\nn12821895\nn12822115\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12825497\nn12827270\nn12827537\nn12828220\nn12828791\nn12830222\nn12830568\nn12832315\nn12832538\nn12833149\nn12833985\nn12834798\nn12835331\nn12836212\nn12836337\nn12836508\nn12836862\nn12840362\nn12840749\nn12841007\nn12841193\nn12841354\nn12843970\nn12844939\nn12845413\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851469\nn12853482\nn12854600\nn12855494\nn12856091\nn12856287\nn12856479\nn12856680\nn12858150\nn12858397\nn12858618\nn12858871\nn12859986\nn12860365\nn12861345\nn12861892\nn12862512\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866459\nn12866635\nn12867826\nn12869061\nn12869478\nn12870535\nn12870682\nn12870891\nn12877838\nn12879527\nn12879963\nn12880244\nn12880462\nn12882779\nn12882945\nn12884100\nn12884260\nn12887293\nn12889219\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12893463\nn12893993\nn12895811\nn12898774\nn12899537\nn12899752\nn12901724\nn12902662\nn12904314\nn12905412\nn12906214\nn12908645\nn12909421\nn12909917\nn12911079\nn12911440\nn12911673\nn12913791\nn12914923\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12919403\nn12919646\nn12919847\nn12920204\nn12920955\nn12921868\nn12922763\nn12924623\nn12925179\nn12926480\nn12926689\nn12927013\nn12927494\nn12928071\nn12929403\nn12931542\nn12932173\nn12932365\nn12932966\nn12934036\nn12934174\nn12934479\nn12934985\nn12935609\nn12937130\nn12938193\nn12939282\nn12939874\nn12940226\nn12940609\nn12942395\nn12942572\nn12946849\nn12947313\nn12947544\nn12948053\nn12948251\nn12948495\nn12950126\nn12950314\nn12951146\nn12951835\nn12953206\nn12953484\nn12957924\nn12961879\nn12963628\nn12965626\nn12966945\nn12969131\nn12969425\nn12973443\nn12974987\nn12975804\nn12979829\nn12980840\nn12982468\nn12983048\nn12985420\nn12985773\nn12985857\nn12986227\nn12987056\nn12988158\nn12989938\nn12991184\nn12992177\nn12992868\nn12995601\nn12997654\nn12997919\nn12998815\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13008315\nn13009085\nn13009429\nn13011595\nn13012253\nn13013534\nn13013764\nn13014409\nn13014741\nn13017102\nn13017240\nn13019835\nn13020191\nn13020964\nn13021689\nn13022210\nn13022709\nn13023134\nn13024012\nn13025647\nn13028611\nn13029326\nn13029760\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033577\nn13034062\nn13035241\nn13035707\nn13037406\nn13038068\nn13038744\nn13039349\nn13040303\nn13040629\nn13041312\nn13043926\nn13044375\nn13044778\nn13046669\nn13049953\nn13050397\nn13052670\nn13052931\nn13053608\nn13054073\nn13054560\nn13055423\nn13055577\nn13055949\nn13060190\nn13061348\nn13062421\nn13065089\nn13066448\nn13068255\nn13072528\nn13074619\nn13077033\nn13077295\nn13079073\nn13083023\nn13084184\nn13084834\nn13085747\nn13090871\nn13091620\nn13099999\nn13100677\nn13102775\nn13103877\nn13104059\nn13107694\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108841\nn13111881\nn13121349\nn13122364\nn13123431\nn13125117\nn13126856\nn13127843\nn13128976\nn13130726\nn13131028\nn13131618\nn13132338\nn13132656\nn13133613\nn13133932\nn13134947\nn13135832\nn13136316\nn13136556\nn13137409\nn13138842\nn13141415\nn13141564\nn13142504\nn13145040\nn13145250\nn13146583\nn13147270\nn13148208\nn13150894\nn13154388\nn13154494\nn13155095\nn13155305\nn13158512\nn13160604\nn13163991\nn13172923\nn13173882\nn13177048\nn13177884\nn13180534\nn13180875\nn13181055\nn13181811\nn13183056\nn13183489\nn13185269\nn13187367\nn13190747\nn13192625\nn13193642\nn13193856\nn13194036\nn13194572\nn13195341\nn13196003\nn13197274\nn13197507\nn13198914\nn13199717\nn13199970\nn13200651\nn13201969\nn13205058\nn13206817\nn13207094\nn13207335\nn13209808\nn13213066\nn13214340\nn13215586\nn13219422\nn13219833\nn13219976\nn13220122\nn13221529\nn13223588\nn13223710\nn13223843\nn13226871\nn13229543\nn13231078\nn13232779\nn13234678\nn13235159\nn13235503\nn13237188\nn13238375\nn13238988\nn13579829\nn13653902\nn13862407\nn13863020\nn13863771\nn13864035\nn13865298\nn13865483\nn13865904\nn13868944\nn13869547\nn13869788\nn13869896\nn13872592\nn13872822\nn13873502\nn13873917\nn13875392\nn13875571\nn13876561\nn13878306\nn13879049\nn13879320\nn13880994\nn13881644\nn13882201\nn13882276\nn13882563\nn13886260\nn13895262\nn13896100\nn13896217\nn13897996\nn13898207\nn13900287\nn13900422\nn13901211\nn13901321\nn13901858\nn13902048\nn13902336\nn13905792\nn13907272\nn13908201\nn13908580\nn13912260\nn13912540\nn13914608\nn13915023\nn13915113\nn13916721\nn13918274\nn13918387\nn13919547\nn13919919\nn13926786\nn14131950\nn14564779\nn14685296\nn14696793\nn14698884\nn14765422\nn14785065\nn14810561\nn14820180\nn14844693\nn14858292\nn14900342\nn14908027\nn14915184\nn14919819\nn14973585\nn14974264\nn14976759\nn14976871\nn14977504\nn15019030\nn15062057\nn15067877\nn15075141\nn15086247\nn15089258\nn15090065\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092650\nn15092942\nn15093137\nn15093298\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet22k_ms_synsets.txt",
    "content": "n01440764\nn01443537\nn01484850\nn01491361\nn01494475\nn01496331\nn01498041\nn01514668\nn01514859\nn01518878\nn01530575\nn01531178\nn01532829\nn01534433\nn01537544\nn01558993\nn01560419\nn01580077\nn01582220\nn01592084\nn01601694\nn01608432\nn01614925\nn01616318\nn01622779\nn01629819\nn01630670\nn01631663\nn01632458\nn01632777\nn01641577\nn01644373\nn01644900\nn01664065\nn01665541\nn01667114\nn01667778\nn01669191\nn01675722\nn01677366\nn01682714\nn01685808\nn01687978\nn01688243\nn01689811\nn01692333\nn01693334\nn01694178\nn01695060\nn01697457\nn01698640\nn01704323\nn01728572\nn01728920\nn01729322\nn01729977\nn01734418\nn01735189\nn01737021\nn01739381\nn01740131\nn01742172\nn01744401\nn01748264\nn01749939\nn01751748\nn01753488\nn01755581\nn01756291\nn01768244\nn01770081\nn01770393\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01776313\nn01784675\nn01795545\nn01796340\nn01797886\nn01798484\nn01806143\nn01806567\nn01807496\nn01817953\nn01818515\nn01819313\nn01820546\nn01824575\nn01828970\nn01829413\nn01833805\nn01843065\nn01843383\nn01847000\nn01855032\nn01855672\nn01860187\nn01871265\nn01872401\nn01873310\nn01877812\nn01882714\nn01883070\nn01910747\nn01914609\nn01917289\nn01924916\nn01930112\nn01943899\nn01944390\nn01945685\nn01950731\nn01955084\nn01968897\nn01978287\nn01978455\nn01980166\nn01981276\nn01983481\nn01984695\nn01985128\nn01986214\nn01990800\nn02002556\nn02002724\nn02006656\nn02007558\nn02009229\nn02009912\nn02011460\nn02012849\nn02013706\nn02017213\nn02018207\nn02018795\nn02025239\nn02027492\nn02028035\nn02033041\nn02037110\nn02051845\nn02056570\nn02058221\nn02066245\nn02071294\nn02074367\nn02077923\nn02085620\nn02085782\nn02085936\nn02086079\nn02086240\nn02086646\nn02086910\nn02087046\nn02087394\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02089078\nn02089867\nn02089973\nn02090379\nn02090622\nn02090721\nn02091032\nn02091134\nn02091244\nn02091467\nn02091635\nn02091831\nn02092002\nn02092339\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02095314\nn02095570\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02098105\nn02098286\nn02098413\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02100236\nn02100583\nn02100735\nn02100877\nn02101006\nn02101388\nn02101556\nn02102040\nn02102177\nn02102318\nn02102480\nn02102973\nn02104029\nn02104365\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02107142\nn02107312\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108422\nn02108551\nn02108915\nn02109047\nn02109525\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111889\nn02112018\nn02112137\nn02112350\nn02112706\nn02113023\nn02113186\nn02113624\nn02113712\nn02113799\nn02113978\nn02114367\nn02114548\nn02114712\nn02114855\nn02115641\nn02115913\nn02116738\nn02117135\nn02119022\nn02119789\nn02120079\nn02120505\nn02123045\nn02123159\nn02123394\nn02123597\nn02124075\nn02125311\nn02127052\nn02128385\nn02128757\nn02128925\nn02129165\nn02129604\nn02130308\nn02132136\nn02133161\nn02134084\nn02134418\nn02137549\nn02138441\nn02165105\nn02165456\nn02167151\nn02168699\nn02169497\nn02172182\nn02174001\nn02177972\nn02190166\nn02206856\nn02219486\nn02226429\nn02229544\nn02231487\nn02233338\nn02236044\nn02256656\nn02259212\nn02264363\nn02268443\nn02268853\nn02276258\nn02277742\nn02279972\nn02280649\nn02281406\nn02281787\nn02317335\nn02319095\nn02321529\nn02325366\nn02326432\nn02328150\nn02342885\nn02346627\nn02356798\nn02361337\nn02363005\nn02364673\nn02389026\nn02391049\nn02395406\nn02396427\nn02397096\nn02398521\nn02403003\nn02408429\nn02410509\nn02412080\nn02415577\nn02417914\nn02422106\nn02422699\nn02423022\nn02437312\nn02437616\nn02441942\nn02442845\nn02443114\nn02443484\nn02444819\nn02445715\nn02447366\nn02454379\nn02457408\nn02480495\nn02480855\nn02481823\nn02483362\nn02483708\nn02484975\nn02486261\nn02486410\nn02487347\nn02488291\nn02488702\nn02489166\nn02490219\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02497673\nn02500267\nn02504013\nn02504458\nn02509815\nn02510455\nn02514041\nn02526121\nn02536864\nn02606052\nn02607072\nn02640242\nn02641379\nn02643566\nn02655020\nn02666196\nn02667093\nn02669723\nn02672831\nn02676566\nn02687172\nn02690373\nn02692877\nn02699494\nn02701002\nn02704792\nn02708093\nn02727426\nn02730930\nn02747177\nn02749479\nn02769748\nn02776631\nn02777292\nn02782093\nn02783161\nn02786058\nn02787622\nn02788148\nn02790996\nn02791124\nn02791270\nn02793495\nn02794156\nn02795169\nn02797295\nn02799071\nn02802426\nn02804414\nn02804610\nn02807133\nn02808304\nn02808440\nn02814533\nn02814860\nn02815834\nn02817516\nn02823428\nn02823750\nn02825657\nn02834397\nn02835271\nn02837789\nn02840245\nn02841315\nn02843684\nn02859443\nn02860847\nn02865351\nn02869837\nn02870880\nn02871525\nn02877765\nn02879718\nn02883205\nn02892201\nn02892767\nn02894605\nn02895154\nn02906734\nn02909870\nn02910353\nn02916936\nn02917067\nn02927161\nn02930766\nn02939185\nn02948072\nn02950826\nn02951358\nn02951585\nn02963159\nn02965783\nn02966193\nn02966687\nn02971356\nn02974003\nn02977058\nn02978881\nn02979186\nn02980441\nn02981792\nn02988304\nn02992211\nn02992529\nn02999410\nn03000134\nn03000247\nn03000684\nn03014705\nn03016953\nn03017168\nn03018349\nn03026506\nn03028079\nn03032252\nn03041632\nn03042490\nn03045698\nn03047690\nn03062245\nn03063599\nn03063689\nn03065424\nn03075370\nn03085013\nn03089624\nn03095699\nn03100240\nn03109150\nn03110669\nn03124043\nn03124170\nn03125729\nn03126707\nn03127747\nn03127925\nn03131574\nn03133878\nn03134739\nn03141823\nn03146219\nn03160309\nn03179701\nn03180011\nn03187595\nn03188531\nn03196217\nn03197337\nn03201208\nn03207743\nn03207941\nn03208938\nn03216828\nn03218198\nn03220513\nn03223299\nn03240683\nn03249569\nn03250847\nn03255030\nn03259280\nn03271574\nn03272010\nn03272562\nn03290653\nn03291819\nn03297495\nn03314780\nn03325584\nn03337140\nn03344393\nn03345487\nn03347037\nn03355925\nn03372029\nn03376595\nn03379051\nn03384352\nn03388043\nn03388183\nn03388549\nn03393912\nn03394916\nn03400231\nn03404251\nn03417042\nn03424325\nn03425413\nn03443371\nn03444034\nn03445777\nn03445924\nn03447447\nn03447721\nn03450230\nn03452741\nn03457902\nn03459775\nn03461385\nn03467068\nn03476684\nn03476991\nn03478589\nn03481172\nn03482405\nn03483316\nn03485407\nn03485794\nn03492542\nn03494278\nn03495258\nn03496892\nn03498962\nn03527444\nn03529860\nn03530642\nn03532672\nn03534580\nn03535780\nn03538406\nn03544143\nn03584254\nn03584829\nn03590841\nn03594734\nn03594945\nn03595614\nn03598930\nn03599486\nn03602883\nn03617480\nn03623198\nn03627232\nn03630383\nn03633091\nn03637318\nn03642806\nn03649909\nn03657121\nn03658185\nn03661043\nn03662601\nn03666591\nn03670208\nn03673027\nn03676483\nn03680355\nn03690938\nn03691459\nn03692522\nn03697007\nn03706229\nn03709823\nn03710193\nn03710637\nn03710721\nn03717622\nn03720891\nn03721384\nn03724870\nn03729826\nn03733131\nn03733281\nn03733805\nn03742115\nn03743016\nn03759954\nn03761084\nn03763968\nn03764736\nn03769881\nn03770439\nn03770679\nn03773504\nn03775071\nn03775546\nn03776460\nn03777568\nn03777754\nn03781244\nn03782006\nn03785016\nn03786901\nn03787032\nn03788195\nn03788365\nn03791053\nn03792782\nn03792972\nn03793489\nn03794056\nn03796401\nn03803284\nn03804744\nn03814639\nn03814906\nn03825788\nn03832673\nn03837869\nn03838899\nn03840681\nn03841143\nn03843555\nn03854065\nn03857828\nn03866082\nn03868242\nn03868863\nn03871628\nn03873416\nn03874293\nn03874599\nn03876231\nn03877472\nn03877845\nn03884397\nn03887697\nn03888257\nn03888605\nn03891251\nn03891332\nn03895866\nn03899768\nn03902125\nn03903868\nn03908618\nn03908714\nn03916031\nn03920288\nn03924679\nn03929660\nn03929855\nn03930313\nn03930630\nn03933933\nn03935335\nn03937543\nn03938244\nn03942813\nn03944341\nn03947888\nn03950228\nn03954731\nn03956157\nn03958227\nn03961711\nn03967562\nn03970156\nn03976467\nn03976657\nn03977966\nn03980874\nn03982430\nn03983396\nn03991062\nn03992509\nn03995372\nn03998194\nn04004767\nn04005630\nn04008634\nn04009552\nn04019541\nn04023962\nn04026417\nn04033901\nn04033995\nn04037443\nn04039381\nn04040759\nn04041544\nn04044716\nn04049303\nn04065272\nn04067472\nn04069434\nn04070727\nn04074963\nn04081281\nn04086273\nn04090263\nn04099969\nn04111531\nn04116512\nn04118538\nn04118776\nn04120489\nn04125021\nn04127249\nn04131690\nn04133789\nn04136333\nn04141076\nn04141327\nn04141975\nn04146614\nn04147183\nn04149813\nn04152593\nn04153751\nn04154565\nn04162706\nn04179913\nn04192698\nn04200800\nn04201297\nn04204238\nn04204347\nn04208210\nn04209133\nn04209239\nn04228054\nn04229816\nn04235860\nn04238763\nn04239074\nn04243546\nn04251144\nn04252077\nn04252225\nn04254120\nn04254680\nn04254777\nn04258138\nn04259630\nn04263257\nn04264628\nn04265275\nn04266014\nn04270147\nn04273569\nn04275548\nn04277352\nn04285008\nn04286575\nn04296562\nn04310018\nn04311004\nn04311174\nn04317175\nn04325704\nn04326547\nn04328186\nn04330267\nn04332243\nn04335435\nn04336792\nn04344873\nn04346328\nn04347754\nn04350905\nn04355338\nn04355933\nn04356056\nn04357314\nn04366367\nn04367480\nn04370456\nn04371430\nn04371774\nn04372370\nn04376876\nn04380533\nn04389033\nn04392985\nn04398044\nn04399382\nn04404412\nn04409515\nn04417672\nn04418357\nn04423845\nn04428191\nn04429376\nn04435653\nn04442312\nn04443257\nn04447861\nn04456115\nn04458633\nn04461696\nn04462240\nn04465501\nn04467665\nn04476259\nn04479046\nn04482393\nn04483307\nn04485082\nn04486054\nn04487081\nn04487394\nn04493381\nn04501370\nn04505470\nn04507155\nn04509417\nn04515003\nn04517823\nn04522168\nn04523525\nn04525038\nn04525305\nn04532106\nn04532670\nn04536866\nn04540053\nn04542943\nn04548280\nn04548362\nn04550184\nn04552348\nn04553703\nn04554684\nn04557648\nn04560804\nn04562935\nn04579145\nn04579432\nn04584207\nn04589890\nn04590129\nn04591157\nn04591713\nn04592741\nn04596742\nn04597913\nn04599235\nn04604644\nn04606251\nn04612504\nn04613696\nn06359193\nn06596364\nn06785654\nn06794110\nn06874185\nn07248320\nn07565083\nn07579787\nn07583066\nn07584110\nn07590611\nn07613480\nn07614500\nn07615774\nn07684084\nn07693725\nn07695742\nn07697313\nn07697537\nn07711569\nn07714571\nn07714990\nn07715103\nn07716358\nn07716906\nn07717410\nn07717556\nn07718472\nn07718747\nn07720875\nn07730033\nn07734744\nn07742313\nn07745940\nn07747607\nn07749582\nn07753113\nn07753275\nn07753592\nn07754684\nn07760859\nn07768694\nn07802026\nn07831146\nn07836838\nn07860988\nn07871810\nn07873807\nn07875152\nn07880968\nn07892512\nn07920052\nn07930864\nn07932039\nn09193705\nn09229709\nn09246464\nn09256479\nn09288635\nn09332890\nn09399592\nn09421951\nn09428293\nn09468604\nn09472597\nn09835506\nn10148035\nn10565667\nn11879895\nn11939491\nn12057211\nn12144580\nn12267677\nn12620546\nn12768682\nn12985857\nn12998815\nn13037406\nn13040303\nn13044778\nn13052670\nn13054560\nn13133613\nn15075141\nn00004475\nn00005787\nn00006024\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288190\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440218\nn00440382\nn00440509\nn00440643\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443375\nn00443517\nn00443692\nn00443803\nn00443917\nn00444142\nn00444340\nn00444490\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446411\nn00446493\nn00446632\nn00446804\nn00446980\nn00447073\nn00447221\nn00447361\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00451768\nn00451866\nn00452034\nn00452152\nn00452293\nn00452734\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453631\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454855\nn00454983\nn00455076\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00474769\nn00474881\nn00475014\nn00475142\nn00475273\nn00475403\nn00475535\nn00475661\nn00475787\nn00476140\nn00476235\nn00476389\nn00477392\nn00477639\nn00477827\nn00478262\nn00479076\nn00479440\nn00479616\nn00479734\nn00479887\nn00480211\nn00480366\nn00480508\nn00480885\nn00480993\nn00481803\nn00481938\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00812526\nn00825773\nn00887544\nn01035504\nn01035667\nn01055165\nn01314388\nn01314663\nn01314781\nn01314910\nn01315213\nn01315330\nn01315581\nn01315805\nn01316422\nn01316579\nn01316734\nn01316949\nn01317089\nn01317294\nn01317391\nn01317541\nn01317813\nn01317916\nn01318053\nn01318279\nn01318381\nn01318478\nn01318660\nn01318894\nn01319001\nn01319187\nn01319467\nn01319685\nn01320872\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01323781\nn01324305\nn01324431\nn01324610\nn01324799\nn01324916\nn01325060\nn01326291\nn01327909\nn01329186\nn01330126\nn01330497\nn01332181\nn01333082\nn01333483\nn01333610\nn01334217\nn01334690\nn01335218\nn01337191\nn01337734\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01340522\nn01340785\nn01340935\nn01341090\nn01342269\nn01347583\nn01349735\nn01350226\nn01350701\nn01351170\nn01351315\nn01357328\nn01357507\nn01358572\nn01359762\nn01362336\nn01363719\nn01365474\nn01365885\nn01366700\nn01367772\nn01368672\nn01369358\nn01369484\nn01374703\nn01374846\nn01375204\nn01376237\nn01376437\nn01376543\nn01377278\nn01377510\nn01377694\nn01378545\nn01379389\nn01380610\nn01380754\nn01381044\nn01382033\nn01384084\nn01384164\nn01384687\nn01385017\nn01385330\nn01386007\nn01386182\nn01386354\nn01387065\nn01389507\nn01390123\nn01390763\nn01392275\nn01392380\nn01393486\nn01394040\nn01394492\nn01394771\nn01395254\nn01396048\nn01396617\nn01397114\nn01397690\nn01397871\nn01400247\nn01400391\nn01402600\nn01403457\nn01404365\nn01404495\nn01405007\nn01405616\nn01407798\nn01410457\nn01411450\nn01412694\nn01413457\nn01414216\nn01415626\nn01415920\nn01416213\nn01418498\nn01418620\nn01419332\nn01419573\nn01419888\nn01421333\nn01421807\nn01422185\nn01422335\nn01422450\nn01423302\nn01423617\nn01424420\nn01425223\nn01427399\nn01429172\nn01438208\nn01438581\nn01439121\nn01439514\nn01439808\nn01440160\nn01440242\nn01440467\nn01441117\nn01441272\nn01441425\nn01441910\nn01442450\nn01442710\nn01442972\nn01443243\nn01443831\nn01444339\nn01444783\nn01445429\nn01445593\nn01445857\nn01446152\nn01446589\nn01446760\nn01447139\nn01447331\nn01447658\nn01447946\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01449980\nn01450661\nn01450950\nn01451115\nn01451295\nn01451426\nn01451863\nn01452345\nn01453087\nn01453475\nn01453742\nn01454545\nn01454856\nn01455317\nn01455461\nn01455778\nn01456137\nn01456454\nn01456756\nn01457082\nn01457407\nn01457852\nn01458746\nn01458842\nn01459791\nn01460303\nn01461315\nn01461646\nn01462042\nn01462544\nn01462803\nn01464844\nn01466257\nn01467336\nn01467804\nn01468238\nn01468712\nn01469103\nn01469723\nn01470145\nn01470479\nn01470733\nn01470895\nn01471682\nn01472303\nn01472502\nn01473806\nn01474283\nn01474864\nn01475232\nn01475940\nn01476418\nn01477080\nn01477525\nn01477875\nn01478511\nn01478969\nn01479213\nn01479820\nn01480106\nn01480516\nn01480880\nn01481331\nn01481498\nn01482071\nn01482330\nn01483021\nn01483522\nn01483830\nn01484097\nn01484285\nn01484447\nn01484562\nn01485479\nn01486010\nn01486540\nn01486838\nn01487506\nn01488038\nn01488918\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491661\nn01491874\nn01492357\nn01492569\nn01492708\nn01492860\nn01493146\nn01493541\nn01493829\nn01494041\nn01494757\nn01494882\nn01495006\nn01495493\nn01495701\nn01497118\nn01497413\nn01497738\nn01498406\nn01498699\nn01498989\nn01499396\nn01499732\nn01500091\nn01500476\nn01500854\nn01501160\nn01501641\nn01501777\nn01501948\nn01502101\nn01503061\nn01503976\nn01504179\nn01504344\nn01514752\nn01514926\nn01515078\nn01515217\nn01515303\nn01516212\nn01517389\nn01517565\nn01517966\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01522450\nn01523105\nn01524359\nn01524761\nn01525720\nn01526521\nn01526766\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01531344\nn01531512\nn01531639\nn01531811\nn01531971\nn01532325\nn01532511\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534582\nn01534762\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537895\nn01538059\nn01538200\nn01538362\nn01538630\nn01538955\nn01539272\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542168\nn01542433\nn01542786\nn01543175\nn01543383\nn01543632\nn01543936\nn01544208\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01546921\nn01547832\nn01548301\nn01548492\nn01548694\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01550761\nn01551080\nn01551300\nn01551711\nn01552034\nn01552333\nn01552813\nn01553142\nn01553527\nn01553762\nn01554017\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01556514\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01559160\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560636\nn01560793\nn01560935\nn01561181\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564101\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568132\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571410\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573627\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576358\nn01576695\nn01577035\nn01577458\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580379\nn01580490\nn01580772\nn01580870\nn01581166\nn01581434\nn01581730\nn01581874\nn01581984\nn01582398\nn01582498\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587278\nn01587526\nn01587834\nn01588002\nn01588431\nn01588725\nn01588996\nn01589286\nn01589718\nn01589893\nn01590220\nn01591005\nn01591123\nn01591301\nn01591697\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01593282\nn01593553\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595624\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598271\nn01598588\nn01598988\nn01599159\nn01599269\nn01599388\nn01599556\nn01599741\nn01600085\nn01600341\nn01600657\nn01601068\nn01601410\nn01602080\nn01602209\nn01602630\nn01602832\nn01603000\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606097\nn01606177\nn01606522\nn01606672\nn01606809\nn01606978\nn01607309\nn01607429\nn01607600\nn01607812\nn01607962\nn01608265\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611674\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01612955\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616551\nn01616764\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01618922\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624212\nn01624305\nn01624537\nn01624833\nn01625121\nn01625562\nn01627424\nn01628331\nn01628770\nn01629276\nn01629962\nn01630148\nn01630284\nn01630901\nn01631175\nn01631354\nn01631512\nn01632047\nn01632308\nn01632601\nn01632952\nn01633406\nn01633781\nn01634227\nn01634522\nn01635027\nn01635176\nn01635480\nn01636127\nn01636352\nn01636510\nn01636829\nn01637112\nn01637338\nn01637615\nn01637932\nn01638194\nn01638329\nn01638722\nn01639187\nn01639765\nn01640846\nn01641206\nn01641391\nn01641739\nn01641930\nn01642097\nn01642257\nn01642391\nn01642539\nn01642943\nn01643255\nn01643507\nn01643896\nn01645466\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647033\nn01647180\nn01647303\nn01647466\nn01647640\nn01648139\nn01648356\nn01648620\nn01649170\nn01649412\nn01649556\nn01649726\nn01650167\nn01650690\nn01650901\nn01651059\nn01651285\nn01651487\nn01651641\nn01651778\nn01652026\nn01652297\nn01653026\nn01653223\nn01653509\nn01653773\nn01654083\nn01654637\nn01654863\nn01655344\nn01661091\nn01661592\nn01661818\nn01662060\nn01662622\nn01662784\nn01663401\nn01663782\nn01664369\nn01664492\nn01664674\nn01664990\nn01665932\nn01666228\nn01666585\nn01667432\nn01668091\nn01668436\nn01668665\nn01668892\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01671705\nn01672032\nn01672432\nn01672611\nn01673282\nn01674216\nn01674464\nn01674990\nn01675352\nn01676755\nn01677747\nn01678043\nn01678343\nn01678657\nn01679005\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01680983\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01683201\nn01683558\nn01684133\nn01684578\nn01684741\nn01685439\nn01686044\nn01686220\nn01686403\nn01686609\nn01686808\nn01687128\nn01687290\nn01687665\nn01688961\nn01689081\nn01689411\nn01690149\nn01690466\nn01691217\nn01691652\nn01691951\nn01692523\nn01692864\nn01693175\nn01693783\nn01694311\nn01694709\nn01694955\nn01696633\nn01697178\nn01697611\nn01697749\nn01697978\nn01698434\nn01698782\nn01699040\nn01699254\nn01699675\nn01701551\nn01701859\nn01702256\nn01702479\nn01703011\nn01703161\nn01703569\nn01704103\nn01704626\nn01705010\nn01705591\nn01705934\nn01707294\nn01708106\nn01708998\nn01709484\nn01709876\nn01710177\nn01711160\nn01712008\nn01712752\nn01713170\nn01713764\nn01714231\nn01715888\nn01717016\nn01717229\nn01717467\nn01718096\nn01718414\nn01719403\nn01721174\nn01721898\nn01722670\nn01722998\nn01723579\nn01724231\nn01724840\nn01725086\nn01725713\nn01726203\nn01726692\nn01727646\nn01728266\nn01729672\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731137\nn01731277\nn01731545\nn01731764\nn01731941\nn01732093\nn01732244\nn01732614\nn01732789\nn01732989\nn01733214\nn01733466\nn01733757\nn01733957\nn01734104\nn01734637\nn01734808\nn01735439\nn01735577\nn01735728\nn01736032\nn01736375\nn01736796\nn01737472\nn01737728\nn01737875\nn01738065\nn01738306\nn01738601\nn01738731\nn01739094\nn01739647\nn01739871\nn01740551\nn01740885\nn01741232\nn01741442\nn01741562\nn01741943\nn01742447\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744555\nn01745125\nn01745484\nn01745902\nn01746191\nn01746359\nn01746952\nn01747285\nn01747589\nn01747885\nn01748389\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01750167\nn01750437\nn01750743\nn01751036\nn01751215\nn01751472\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753959\nn01754370\nn01754533\nn01754876\nn01755740\nn01755952\nn01756089\nn01756508\nn01756733\nn01756916\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01758895\nn01767661\nn01769347\nn01770795\nn01771100\nn01771417\nn01771766\nn01772222\nn01772664\nn01774097\nn01775370\nn01775730\nn01776192\nn01776705\nn01777304\nn01777467\nn01777649\nn01777909\nn01778217\nn01778487\nn01778621\nn01778801\nn01779148\nn01779463\nn01779629\nn01779939\nn01780142\nn01780426\nn01780696\nn01781071\nn01781570\nn01781698\nn01781875\nn01782209\nn01782516\nn01783017\nn01783706\nn01784293\nn01785667\nn01786646\nn01787006\nn01787191\nn01787835\nn01788291\nn01788579\nn01788864\nn01789386\nn01789740\nn01790171\nn01790304\nn01790398\nn01790557\nn01790711\nn01790812\nn01791107\nn01791314\nn01791388\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792530\nn01792640\nn01792808\nn01792955\nn01793085\nn01793159\nn01793249\nn01793340\nn01793435\nn01793565\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795735\nn01795900\nn01796019\nn01796105\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01798168\nn01798706\nn01798839\nn01798979\nn01799302\nn01799679\nn01800195\nn01800424\nn01800633\nn01801088\nn01801479\nn01801672\nn01801876\nn01802159\nn01802721\nn01803078\nn01803362\nn01803641\nn01803893\nn01804163\nn01804478\nn01804653\nn01804921\nn01805070\nn01805321\nn01805801\nn01806061\nn01806297\nn01806364\nn01806467\nn01806847\nn01807105\nn01807828\nn01808140\nn01808291\nn01808596\nn01809106\nn01809371\nn01809752\nn01810268\nn01810700\nn01811243\nn01811909\nn01812187\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813658\nn01813948\nn01814217\nn01814370\nn01814549\nn01814620\nn01814755\nn01814921\nn01815036\nn01815270\nn01815601\nn01816017\nn01816140\nn01816474\nn01816887\nn01817263\nn01817346\nn01818299\nn01818832\nn01819115\nn01819465\nn01819734\nn01820052\nn01820348\nn01820801\nn01821076\nn01821203\nn01821554\nn01821869\nn01822300\nn01822602\nn01823013\nn01823414\nn01823740\nn01824035\nn01824344\nn01824749\nn01825278\nn01825930\nn01826364\nn01826680\nn01826844\nn01827403\nn01827793\nn01828096\nn01828556\nn01829869\nn01830042\nn01830479\nn01830915\nn01831360\nn01831712\nn01832167\nn01832493\nn01832813\nn01833112\nn01833415\nn01834177\nn01834540\nn01835276\nn01835769\nn01835918\nn01836087\nn01836673\nn01837072\nn01837526\nn01838038\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01839949\nn01840120\nn01840412\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01841943\nn01842235\nn01842504\nn01842788\nn01843719\nn01844231\nn01844551\nn01844746\nn01844917\nn01845132\nn01845477\nn01846331\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855188\nn01855476\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860497\nn01860864\nn01861148\nn01861330\nn01861778\nn01862399\nn01871543\nn01871875\nn01872772\nn01874434\nn01874928\nn01875313\nn01875610\nn01876034\nn01876326\nn01876667\nn01877134\nn01877606\nn01878061\nn01878335\nn01878639\nn01878929\nn01879217\nn01879509\nn01879837\nn01880152\nn01880473\nn01880716\nn01880813\nn01881171\nn01881564\nn01881857\nn01882125\nn01883513\nn01883920\nn01884104\nn01884203\nn01884476\nn01884834\nn01885158\nn01885498\nn01886045\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889074\nn01889520\nn01889849\nn01890144\nn01890564\nn01890860\nn01891013\nn01891274\nn01891633\nn01892030\nn01892145\nn01892385\nn01892551\nn01892744\nn01893021\nn01893164\nn01893399\nn01893825\nn01894207\nn01894522\nn01894956\nn01896844\nn01897257\nn01897426\nn01897536\nn01897667\nn01898593\nn01899894\nn01900150\nn01903234\nn01903346\nn01903498\nn01904029\nn01904806\nn01904886\nn01905321\nn01905661\nn01906749\nn01907287\nn01907738\nn01908042\nn01908958\nn01909422\nn01909788\nn01909906\nn01910252\nn01911063\nn01911403\nn01911839\nn01912152\nn01912454\nn01912809\nn01913166\nn01913346\nn01913440\nn01914163\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916588\nn01916925\nn01917611\nn01917882\nn01918744\nn01919385\nn01920051\nn01920438\nn01921059\nn01922303\nn01922717\nn01922948\nn01923025\nn01923404\nn01923890\nn01924800\nn01925270\nn01925695\nn01925916\nn01926379\nn01926689\nn01927159\nn01927456\nn01927928\nn01928215\nn01928517\nn01928865\nn01929186\nn01930852\nn01931140\nn01931520\nn01931714\nn01932151\nn01932936\nn01933151\nn01933478\nn01933988\nn01934440\nn01934844\nn01935176\nn01935395\nn01936391\nn01936671\nn01936858\nn01937579\nn01937909\nn01938454\nn01938735\nn01940736\nn01941223\nn01941340\nn01942177\nn01942869\nn01943087\nn01943541\nn01944118\nn01944812\nn01944955\nn01945143\nn01945340\nn01945845\nn01946277\nn01946630\nn01946827\nn01947139\nn01947396\nn01947997\nn01948446\nn01948573\nn01949085\nn01949499\nn01949973\nn01951274\nn01951613\nn01952029\nn01952712\nn01953361\nn01953594\nn01953762\nn01954516\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958435\nn01958531\nn01959029\nn01959492\nn01959985\nn01960177\nn01960459\nn01961234\nn01961600\nn01961985\nn01962506\nn01962788\nn01963317\nn01963479\nn01963571\nn01964049\nn01964271\nn01964441\nn01964957\nn01965252\nn01965529\nn01965889\nn01966377\nn01966586\nn01967094\nn01967308\nn01967963\nn01968315\nn01969726\nn01970164\nn01970667\nn01971094\nn01971280\nn01971620\nn01971850\nn01972131\nn01972541\nn01973148\nn01974773\nn01975687\nn01976146\nn01976868\nn01976957\nn01977485\nn01978010\nn01978136\nn01978587\nn01978930\nn01979269\nn01979526\nn01979874\nn01980655\nn01981702\nn01982068\nn01982347\nn01982650\nn01983048\nn01983674\nn01983829\nn01984245\nn01985493\nn01985797\nn01986806\nn01987076\nn01987545\nn01987727\nn01988203\nn01988701\nn01988869\nn01989516\nn01989869\nn01990007\nn01990516\nn01991028\nn01991520\nn01992262\nn01992423\nn01992773\nn01993525\nn01993830\nn01994910\nn01995514\nn01995686\nn01996280\nn01996585\nn01997119\nn01997825\nn01998183\nn01998741\nn01999186\nn01999767\nn02000954\nn02002075\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006985\nn02007284\nn02008041\nn02008497\nn02008643\nn02008796\nn02009380\nn02009508\nn02009750\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011805\nn02011943\nn02012185\nn02013177\nn02013567\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02015797\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017475\nn02017725\nn02018027\nn02018368\nn02019190\nn02019438\nn02019929\nn02020219\nn02020578\nn02021050\nn02021281\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027897\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030224\nn02030287\nn02030568\nn02030837\nn02030996\nn02031298\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02032769\nn02033208\nn02033324\nn02033561\nn02033779\nn02033882\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02035402\nn02035656\nn02036053\nn02036228\nn02036711\nn02037464\nn02037869\nn02038141\nn02038466\nn02038993\nn02039171\nn02039497\nn02039780\nn02040266\nn02040505\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02048698\nn02049088\nn02049532\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051474\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056728\nn02057035\nn02057330\nn02057731\nn02057898\nn02058594\nn02058747\nn02059162\nn02059541\nn02059852\nn02060133\nn02060411\nn02060569\nn02060889\nn02061217\nn02061560\nn02061853\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064000\nn02064338\nn02064816\nn02065026\nn02065263\nn02065407\nn02065726\nn02066707\nn02067240\nn02067603\nn02067768\nn02068206\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02070624\nn02070776\nn02071028\nn02071636\nn02072040\nn02072493\nn02072798\nn02073250\nn02073831\nn02074726\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02080713\nn02081060\nn02081571\nn02081798\nn02081927\nn02082056\nn02082190\nn02082791\nn02083346\nn02083672\nn02083780\nn02084071\nn02084732\nn02084861\nn02085019\nn02085118\nn02085272\nn02085374\nn02086346\nn02086478\nn02086753\nn02087122\nn02087314\nn02087551\nn02088745\nn02088839\nn02088992\nn02089232\nn02089468\nn02089555\nn02089725\nn02090129\nn02090253\nn02090475\nn02090827\nn02092173\nn02092468\nn02093056\nn02094562\nn02094721\nn02094931\nn02095050\nn02095212\nn02095412\nn02095727\nn02096756\nn02097786\nn02097967\nn02098550\nn02098806\nn02098906\nn02099029\nn02099997\nn02100399\nn02101108\nn02101670\nn02101861\nn02102605\nn02102806\nn02103181\nn02103406\nn02103841\nn02104184\nn02104280\nn02104523\nn02104882\nn02106854\nn02106966\nn02107420\nn02108254\nn02108672\nn02109150\nn02109256\nn02109391\nn02109687\nn02109811\nn02110532\nn02111626\nn02112497\nn02112826\nn02113335\nn02113892\nn02114100\nn02115012\nn02115096\nn02115335\nn02116185\nn02116450\nn02117512\nn02117646\nn02117900\nn02118176\nn02118333\nn02118643\nn02118707\nn02119247\nn02119359\nn02119477\nn02119634\nn02120278\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122810\nn02122878\nn02122948\nn02123242\nn02123478\nn02123785\nn02123917\nn02124157\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125494\nn02125689\nn02125872\nn02126028\nn02126139\nn02126317\nn02126640\nn02126787\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128598\nn02128669\nn02129463\nn02129530\nn02129837\nn02129923\nn02129991\nn02130086\nn02130545\nn02130925\nn02131653\nn02132320\nn02132466\nn02132580\nn02132788\nn02133400\nn02133704\nn02134971\nn02135220\nn02135610\nn02135844\nn02136103\nn02136285\nn02136452\nn02136794\nn02137015\nn02137302\nn02137722\nn02137888\nn02138169\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02140179\nn02140268\nn02140491\nn02140858\nn02141306\nn02141611\nn02141713\nn02142407\nn02142734\nn02142898\nn02143142\nn02143439\nn02143891\nn02144251\nn02144593\nn02144936\nn02145424\nn02145910\nn02146201\nn02146371\nn02146700\nn02146879\nn02147173\nn02147328\nn02147591\nn02147947\nn02148088\nn02148512\nn02148835\nn02148991\nn02149420\nn02149653\nn02149861\nn02150134\nn02150482\nn02150885\nn02151230\nn02152740\nn02152881\nn02152991\nn02153109\nn02153203\nn02153809\nn02156732\nn02156871\nn02157206\nn02157285\nn02159955\nn02160947\nn02161225\nn02161338\nn02161457\nn02161588\nn02162561\nn02163008\nn02163297\nn02164464\nn02165877\nn02166229\nn02166567\nn02166826\nn02167505\nn02167820\nn02167944\nn02168245\nn02168427\nn02169023\nn02169218\nn02169705\nn02169974\nn02170400\nn02170599\nn02170738\nn02170993\nn02171164\nn02171453\nn02171869\nn02172518\nn02172678\nn02172761\nn02172870\nn02173113\nn02173373\nn02173784\nn02174355\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02176747\nn02176916\nn02177196\nn02177506\nn02177775\nn02178411\nn02178717\nn02179012\nn02179192\nn02179340\nn02179891\nn02180233\nn02180427\nn02180875\nn02181235\nn02181477\nn02181724\nn02182045\nn02182355\nn02182642\nn02182930\nn02183096\nn02183507\nn02183857\nn02184473\nn02184589\nn02184720\nn02185167\nn02185481\nn02186153\nn02186717\nn02187150\nn02187279\nn02187554\nn02187900\nn02188699\nn02189363\nn02189670\nn02190790\nn02191273\nn02191773\nn02191979\nn02192252\nn02192513\nn02192814\nn02193009\nn02193163\nn02194249\nn02194750\nn02195091\nn02195526\nn02195819\nn02196119\nn02196344\nn02196896\nn02197185\nn02197689\nn02197877\nn02198129\nn02198532\nn02198859\nn02199170\nn02199502\nn02200198\nn02200509\nn02200630\nn02200850\nn02201000\nn02201497\nn02201626\nn02202006\nn02202124\nn02202287\nn02202678\nn02203152\nn02203592\nn02203978\nn02204249\nn02204722\nn02204907\nn02205219\nn02205673\nn02206270\nn02207179\nn02207345\nn02207449\nn02207647\nn02207805\nn02208280\nn02208498\nn02208848\nn02208979\nn02209111\nn02209354\nn02209624\nn02209964\nn02210427\nn02210921\nn02211444\nn02211627\nn02211896\nn02212062\nn02212602\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214096\nn02214341\nn02214499\nn02214660\nn02214773\nn02215161\nn02215621\nn02215770\nn02216211\nn02216365\nn02216740\nn02217563\nn02217839\nn02218134\nn02218371\nn02218713\nn02219015\nn02220055\nn02220225\nn02220518\nn02220804\nn02221083\nn02221414\nn02221571\nn02221715\nn02221820\nn02222035\nn02222321\nn02222582\nn02223266\nn02223520\nn02224023\nn02224713\nn02225081\nn02225798\nn02226183\nn02226821\nn02226970\nn02227247\nn02227604\nn02227966\nn02228341\nn02228697\nn02229156\nn02229765\nn02230023\nn02230187\nn02230480\nn02230634\nn02231052\nn02231803\nn02232223\nn02233943\nn02234355\nn02234570\nn02234848\nn02235205\nn02236241\nn02236355\nn02236896\nn02237424\nn02237581\nn02237868\nn02238235\nn02238358\nn02238594\nn02238887\nn02239192\nn02239528\nn02239774\nn02240068\nn02240517\nn02241008\nn02241426\nn02241569\nn02241799\nn02242137\nn02242455\nn02243209\nn02243562\nn02243878\nn02244173\nn02244515\nn02244797\nn02245111\nn02245443\nn02246011\nn02246628\nn02246941\nn02247216\nn02247511\nn02247655\nn02248062\nn02248368\nn02248510\nn02248887\nn02249134\nn02249515\nn02249809\nn02250280\nn02250822\nn02251067\nn02251233\nn02251593\nn02251775\nn02252226\nn02252799\nn02252972\nn02253127\nn02253264\nn02253494\nn02253715\nn02253913\nn02254246\nn02254697\nn02254901\nn02255023\nn02255391\nn02256172\nn02257003\nn02257284\nn02257715\nn02257985\nn02258198\nn02258508\nn02258629\nn02259377\nn02259708\nn02259987\nn02260421\nn02260863\nn02261063\nn02261419\nn02261757\nn02262178\nn02262449\nn02262803\nn02263378\nn02264021\nn02264232\nn02264591\nn02264885\nn02265330\nn02266050\nn02266269\nn02266421\nn02266864\nn02267208\nn02267483\nn02268148\nn02269196\nn02269340\nn02269522\nn02269657\nn02270011\nn02270200\nn02270623\nn02270945\nn02271222\nn02271570\nn02271897\nn02272286\nn02272552\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277422\nn02278024\nn02278210\nn02278463\nn02278839\nn02278980\nn02279257\nn02279637\nn02280458\nn02281015\nn02281136\nn02281267\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283617\nn02283951\nn02284224\nn02284611\nn02284884\nn02285179\nn02285548\nn02285801\nn02286089\nn02286425\nn02286654\nn02287004\nn02287352\nn02287622\nn02287799\nn02287987\nn02288122\nn02288268\nn02288789\nn02289307\nn02289610\nn02289988\nn02290340\nn02290664\nn02290870\nn02291220\nn02291572\nn02291748\nn02292085\nn02292401\nn02292692\nn02293352\nn02293868\nn02294097\nn02294407\nn02294577\nn02295064\nn02295390\nn02295870\nn02296021\nn02296276\nn02296612\nn02296912\nn02297294\nn02297442\nn02297819\nn02297938\nn02298095\nn02298218\nn02298541\nn02299039\nn02299157\nn02299378\nn02299505\nn02299846\nn02300173\nn02300554\nn02300797\nn02301452\nn02301935\nn02302244\nn02302459\nn02302620\nn02302969\nn02303284\nn02303585\nn02303777\nn02304036\nn02304432\nn02304657\nn02304797\nn02305085\nn02305407\nn02305636\nn02305929\nn02306433\nn02306825\nn02307176\nn02307325\nn02307515\nn02307681\nn02307910\nn02308033\nn02308139\nn02308471\nn02308618\nn02308735\nn02309120\nn02309242\nn02309337\nn02309841\nn02310000\nn02310149\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02311748\nn02312006\nn02312175\nn02312325\nn02312427\nn02312640\nn02312912\nn02313008\nn02313360\nn02313709\nn02315487\nn02315821\nn02316707\nn02317781\nn02318167\nn02318687\nn02319308\nn02319555\nn02319829\nn02320127\nn02320465\nn02321170\nn02322047\nn02322992\nn02323449\nn02323902\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325722\nn02325884\nn02326074\nn02326763\nn02326862\nn02327028\nn02327175\nn02327435\nn02327656\nn02327842\nn02328009\nn02328429\nn02328820\nn02328942\nn02329401\nn02330245\nn02331046\nn02331309\nn02331842\nn02332156\nn02332447\nn02332755\nn02332954\nn02333190\nn02333546\nn02333733\nn02333819\nn02333909\nn02334201\nn02334460\nn02334728\nn02335127\nn02335231\nn02336011\nn02336275\nn02336641\nn02336826\nn02337001\nn02337171\nn02337332\nn02337598\nn02337902\nn02338145\nn02338449\nn02338722\nn02338901\nn02339282\nn02339376\nn02339922\nn02340186\nn02340358\nn02340640\nn02340930\nn02341288\nn02341475\nn02341616\nn02341974\nn02342250\nn02342534\nn02343058\nn02343320\nn02343772\nn02344175\nn02344270\nn02344408\nn02344528\nn02344918\nn02345078\nn02345340\nn02345600\nn02345774\nn02345997\nn02346170\nn02346998\nn02347274\nn02347573\nn02347744\nn02348173\nn02348788\nn02349205\nn02349390\nn02349557\nn02349847\nn02350105\nn02350357\nn02350670\nn02350989\nn02351343\nn02351870\nn02352002\nn02352290\nn02352591\nn02352932\nn02353172\nn02353411\nn02353861\nn02354162\nn02354320\nn02354621\nn02354781\nn02355227\nn02355477\nn02356381\nn02356612\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358712\nn02358890\nn02359047\nn02359324\nn02359556\nn02359667\nn02359915\nn02360282\nn02360480\nn02360781\nn02360933\nn02361090\nn02361587\nn02361706\nn02361850\nn02362194\nn02363245\nn02363351\nn02363996\nn02364520\nn02364840\nn02365108\nn02365480\nn02366002\nn02366301\nn02366579\nn02366959\nn02367492\nn02367812\nn02368116\nn02368399\nn02368821\nn02369293\nn02369555\nn02369680\nn02369935\nn02370137\nn02370525\nn02370806\nn02371344\nn02372140\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02375438\nn02375757\nn02375862\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378149\nn02378299\nn02378415\nn02378541\nn02378625\nn02378755\nn02378870\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379743\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381119\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02384741\nn02384858\nn02385002\nn02385098\nn02385214\nn02385580\nn02385676\nn02385776\nn02385898\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386746\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387452\nn02387722\nn02387887\nn02387983\nn02388143\nn02388276\nn02388453\nn02388588\nn02388735\nn02388832\nn02388917\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02389865\nn02389943\nn02390015\nn02390101\nn02390258\nn02390454\nn02390640\nn02390738\nn02390834\nn02390938\nn02391234\nn02391373\nn02391508\nn02391617\nn02391994\nn02392434\nn02392555\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395694\nn02395855\nn02395931\nn02396014\nn02396088\nn02396157\nn02396796\nn02397529\nn02397744\nn02397987\nn02399000\nn02401031\nn02402010\nn02402175\nn02402425\nn02403153\nn02403231\nn02403325\nn02403454\nn02403740\nn02403820\nn02403920\nn02404028\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405440\nn02405577\nn02405692\nn02405799\nn02405929\nn02406046\nn02406174\nn02406432\nn02406533\nn02406647\nn02406749\nn02406859\nn02406952\nn02407071\nn02407172\nn02407276\nn02407390\nn02407521\nn02407625\nn02407763\nn02407959\nn02408660\nn02408817\nn02409038\nn02409202\nn02409508\nn02409870\nn02410011\nn02410141\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412210\nn02412440\nn02412629\nn02412700\nn02412787\nn02412909\nn02412977\nn02413050\nn02413131\nn02413484\nn02413593\nn02413717\nn02413824\nn02413917\nn02414043\nn02414209\nn02414290\nn02414442\nn02414578\nn02414763\nn02414904\nn02415130\nn02415253\nn02415435\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417242\nn02417387\nn02417534\nn02417663\nn02417785\nn02418064\nn02418465\nn02418770\nn02419056\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422391\nn02423218\nn02423362\nn02423589\nn02424085\nn02424305\nn02424486\nn02424589\nn02424695\nn02424909\nn02425086\nn02425228\nn02425532\nn02425887\nn02426176\nn02426481\nn02426813\nn02427032\nn02427183\nn02427470\nn02427576\nn02427724\nn02428089\nn02428349\nn02428508\nn02428842\nn02429456\nn02430045\nn02430559\nn02430643\nn02430748\nn02430830\nn02431122\nn02431337\nn02431441\nn02431542\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433729\nn02433925\nn02434190\nn02434415\nn02434712\nn02434954\nn02435216\nn02435517\nn02435853\nn02436224\nn02436353\nn02436645\nn02437136\nn02437482\nn02437971\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441326\nn02442172\nn02442336\nn02442446\nn02442572\nn02442668\nn02443015\nn02443346\nn02443808\nn02443959\nn02444251\nn02445004\nn02445171\nn02445394\nn02446206\nn02446352\nn02446645\nn02447021\nn02447762\nn02448060\nn02448318\nn02448633\nn02448885\nn02449183\nn02449350\nn02449699\nn02450034\nn02450295\nn02450426\nn02450561\nn02450677\nn02450829\nn02451125\nn02451415\nn02451575\nn02453108\nn02453611\nn02454794\nn02455135\nn02455428\nn02455720\nn02456008\nn02456275\nn02456962\nn02457945\nn02458135\nn02458517\nn02459190\nn02460009\nn02460451\nn02460817\nn02461128\nn02461830\nn02462213\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02470709\nn02470899\nn02471300\nn02471762\nn02472293\nn02472987\nn02473307\nn02473554\nn02473720\nn02473857\nn02473983\nn02474110\nn02474282\nn02474605\nn02474777\nn02475078\nn02475358\nn02475669\nn02476219\nn02476567\nn02476870\nn02477028\nn02477187\nn02477329\nn02477516\nn02477782\nn02478239\nn02478875\nn02479332\nn02480153\nn02481103\nn02481235\nn02481366\nn02481500\nn02482060\nn02482286\nn02482474\nn02482650\nn02483092\nn02484322\nn02484473\nn02485225\nn02485371\nn02485536\nn02485688\nn02485988\nn02486657\nn02486908\nn02487079\nn02487547\nn02487675\nn02487847\nn02488003\nn02488415\nn02488894\nn02489589\nn02490597\nn02490811\nn02491107\nn02491329\nn02491474\nn02492356\nn02492948\nn02493224\nn02494383\nn02495242\nn02496052\nn02496913\nn02498153\nn02498743\nn02499022\nn02499316\nn02499568\nn02499808\nn02500596\nn02501583\nn02501923\nn02502006\nn02502514\nn02502807\nn02503127\nn02503517\nn02503756\nn02504770\nn02505063\nn02505238\nn02505485\nn02505998\nn02506947\nn02507148\nn02507649\nn02508021\nn02508213\nn02508346\nn02508742\nn02509197\nn02509515\nn02511730\nn02512053\nn02512752\nn02512830\nn02512938\nn02513248\nn02513355\nn02513560\nn02513727\nn02513805\nn02513939\nn02515214\nn02515713\nn02516188\nn02516776\nn02517442\nn02517938\nn02518324\nn02518622\nn02519148\nn02519340\nn02519472\nn02519686\nn02519862\nn02520147\nn02520525\nn02520810\nn02521646\nn02522399\nn02522637\nn02522722\nn02522866\nn02523110\nn02523427\nn02523877\nn02524202\nn02524524\nn02524659\nn02524928\nn02525382\nn02525703\nn02526425\nn02526818\nn02527057\nn02527271\nn02527622\nn02528163\nn02529293\nn02529772\nn02530052\nn02530188\nn02530421\nn02530637\nn02530831\nn02530999\nn02531114\nn02531625\nn02532028\nn02532272\nn02532451\nn02532602\nn02532786\nn02532918\nn02533209\nn02533545\nn02533834\nn02534165\nn02534559\nn02534734\nn02535080\nn02535163\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02538406\nn02538562\nn02538985\nn02539424\nn02539573\nn02539894\nn02540412\nn02540983\nn02541257\nn02541687\nn02542017\nn02542432\nn02542958\nn02543255\nn02543565\nn02544274\nn02545841\nn02546028\nn02546331\nn02546627\nn02547014\nn02547733\nn02548247\nn02548689\nn02548884\nn02549248\nn02549376\nn02549989\nn02550203\nn02550460\nn02550655\nn02551134\nn02551668\nn02552171\nn02553028\nn02554730\nn02555863\nn02556373\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02557909\nn02558206\nn02558860\nn02559144\nn02559383\nn02559862\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02561803\nn02561937\nn02562315\nn02562796\nn02562971\nn02563079\nn02563182\nn02563648\nn02563792\nn02563949\nn02564270\nn02564403\nn02564720\nn02564935\nn02565072\nn02565324\nn02565573\nn02566109\nn02566489\nn02566665\nn02567334\nn02567633\nn02568087\nn02568447\nn02568959\nn02569484\nn02569631\nn02569905\nn02570164\nn02570484\nn02570838\nn02571167\nn02571652\nn02571810\nn02572196\nn02572484\nn02573249\nn02573704\nn02574271\nn02574910\nn02575325\nn02575590\nn02576223\nn02576575\nn02576906\nn02577041\nn02577164\nn02577403\nn02577662\nn02577952\nn02578233\nn02578454\nn02578771\nn02578928\nn02579303\nn02579557\nn02579762\nn02579928\nn02580336\nn02580679\nn02580830\nn02581108\nn02581482\nn02581642\nn02581957\nn02582220\nn02582349\nn02582721\nn02583567\nn02583890\nn02584145\nn02584449\nn02585872\nn02586238\nn02586543\nn02587051\nn02587300\nn02587479\nn02587618\nn02587877\nn02588286\nn02588794\nn02588945\nn02589062\nn02589196\nn02589316\nn02589623\nn02589796\nn02590094\nn02590495\nn02590702\nn02590987\nn02591330\nn02591613\nn02591911\nn02592055\nn02592371\nn02592734\nn02593019\nn02593191\nn02593453\nn02593679\nn02594250\nn02594942\nn02595056\nn02595339\nn02595702\nn02596067\nn02596252\nn02596381\nn02596720\nn02597004\nn02597367\nn02597608\nn02597818\nn02597972\nn02598134\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02599958\nn02600298\nn02600503\nn02600798\nn02601344\nn02601767\nn02601921\nn02602059\nn02602405\nn02602760\nn02603317\nn02603540\nn02603862\nn02604157\nn02604480\nn02604954\nn02605316\nn02605703\nn02605936\nn02606384\nn02606751\nn02607201\nn02607470\nn02607862\nn02608284\nn02608547\nn02608860\nn02608996\nn02609302\nn02609823\nn02610066\nn02610373\nn02610664\nn02610980\nn02611561\nn02611898\nn02612167\nn02613181\nn02613572\nn02613820\nn02614140\nn02614482\nn02614653\nn02614978\nn02615298\nn02616128\nn02616397\nn02616851\nn02617537\nn02618094\nn02618513\nn02618827\nn02619165\nn02619550\nn02619861\nn02620167\nn02620578\nn02621258\nn02621908\nn02622249\nn02622547\nn02622712\nn02622955\nn02623445\nn02624167\nn02624551\nn02624807\nn02624987\nn02625258\nn02625612\nn02625851\nn02626089\nn02626265\nn02626471\nn02626762\nn02627037\nn02627292\nn02627532\nn02627835\nn02628062\nn02628259\nn02628600\nn02629230\nn02629716\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02631628\nn02631775\nn02632039\nn02632494\nn02633422\nn02633677\nn02633977\nn02634545\nn02635154\nn02635580\nn02636170\nn02636405\nn02636550\nn02636854\nn02637179\nn02637475\nn02637977\nn02638596\nn02639087\nn02639605\nn02639922\nn02640626\nn02640857\nn02642107\nn02642644\nn02643112\nn02643316\nn02643836\nn02644113\nn02644360\nn02644501\nn02644665\nn02644817\nn02645538\nn02645691\nn02645953\nn02646667\nn02646892\nn02648035\nn02648625\nn02648916\nn02649218\nn02649546\nn02650050\nn02650413\nn02650541\nn02651060\nn02652132\nn02652668\nn02653145\nn02653497\nn02653786\nn02654112\nn02654425\nn02654745\nn02655523\nn02655848\nn02656032\nn02656301\nn02656670\nn02656969\nn02657368\nn02657694\nn02658079\nn02658531\nn02658811\nn02659176\nn02659478\nn02659808\nn02660091\nn02660208\nn02660519\nn02660640\nn02661017\nn02661473\nn02661618\nn02662239\nn02662397\nn02662559\nn02662825\nn02662993\nn02663211\nn02663485\nn02663849\nn02664285\nn02664642\nn02665250\nn02665985\nn02666501\nn02666624\nn02666943\nn02667244\nn02667379\nn02667478\nn02667576\nn02667693\nn02668393\nn02668613\nn02669295\nn02669442\nn02669534\nn02670186\nn02670382\nn02670683\nn02670935\nn02671780\nn02672152\nn02672371\nn02675077\nn02675219\nn02675522\nn02676097\nn02676261\nn02676670\nn02676938\nn02677028\nn02677136\nn02677436\nn02677718\nn02678010\nn02678384\nn02678897\nn02679142\nn02679257\nn02679961\nn02680110\nn02680512\nn02680638\nn02680754\nn02681392\nn02682311\nn02682407\nn02682569\nn02682811\nn02682922\nn02683183\nn02683323\nn02683454\nn02683558\nn02683791\nn02684248\nn02684356\nn02684515\nn02684649\nn02684962\nn02685082\nn02685253\nn02685365\nn02685701\nn02685995\nn02686121\nn02686227\nn02686379\nn02686568\nn02687423\nn02687682\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02689819\nn02690715\nn02691156\nn02692086\nn02692232\nn02692513\nn02692680\nn02693246\nn02693413\nn02693540\nn02694045\nn02694279\nn02694426\nn02694662\nn02694966\nn02695627\nn02695762\nn02696165\nn02696246\nn02696569\nn02696843\nn02697022\nn02697221\nn02697576\nn02697675\nn02697876\nn02698244\nn02698473\nn02698634\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701260\nn02701730\nn02702989\nn02703124\nn02703275\nn02704645\nn02704949\nn02705201\nn02705429\nn02705944\nn02706221\nn02706806\nn02708224\nn02708433\nn02708555\nn02708711\nn02708885\nn02709101\nn02709367\nn02709637\nn02709763\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02711237\nn02711780\nn02712545\nn02712643\nn02713003\nn02713218\nn02713364\nn02713496\nn02714315\nn02714535\nn02714751\nn02715229\nn02715513\nn02715712\nn02716626\nn02720048\nn02720576\nn02721813\nn02723165\nn02724722\nn02725872\nn02726017\nn02726210\nn02726305\nn02726681\nn02727016\nn02727141\nn02727825\nn02728440\nn02729222\nn02729837\nn02729965\nn02730265\nn02730568\nn02731251\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02734835\nn02735268\nn02735361\nn02735538\nn02735688\nn02736396\nn02736798\nn02737351\nn02737660\nn02738031\nn02738271\nn02738449\nn02738535\nn02738741\nn02738859\nn02738978\nn02739123\nn02739427\nn02739550\nn02739668\nn02739889\nn02740061\nn02740300\nn02740533\nn02740764\nn02741367\nn02741475\nn02742070\nn02742194\nn02742322\nn02742468\nn02742753\nn02743426\nn02744323\nn02744844\nn02744961\nn02745492\nn02745611\nn02745816\nn02746008\nn02746225\nn02746365\nn02746595\nn02746683\nn02746978\nn02747063\nn02747672\nn02747802\nn02748183\nn02748359\nn02748491\nn02749169\nn02749292\nn02749670\nn02749790\nn02749953\nn02750070\nn02750169\nn02750320\nn02750652\nn02751067\nn02751215\nn02751295\nn02751490\nn02752199\nn02752496\nn02752615\nn02752810\nn02752917\nn02753044\nn02753394\nn02753710\nn02754103\nn02754656\nn02755140\nn02755352\nn02755529\nn02755675\nn02755823\nn02755984\nn02756098\nn02756854\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02757927\nn02758134\nn02758490\nn02758863\nn02758960\nn02759257\nn02759387\nn02759700\nn02759963\nn02760099\nn02760199\nn02760298\nn02760429\nn02760658\nn02760855\nn02761034\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762169\nn02762371\nn02762508\nn02762725\nn02762909\nn02763083\nn02763198\nn02763306\nn02763604\nn02763714\nn02763901\nn02764044\nn02764398\nn02764505\nn02764614\nn02764779\nn02764935\nn02765028\nn02766168\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768433\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769963\nn02770078\nn02770211\nn02770585\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771547\nn02771750\nn02772101\nn02772435\nn02772554\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775689\nn02775813\nn02775897\nn02776007\nn02776205\nn02776505\nn02776825\nn02776978\nn02777100\nn02777402\nn02777638\nn02777734\nn02777927\nn02778131\nn02778294\nn02778456\nn02778588\nn02778669\nn02779435\nn02779609\nn02779719\nn02779971\nn02780315\nn02780445\nn02780588\nn02780704\nn02780815\nn02781121\nn02781213\nn02781338\nn02781517\nn02781764\nn02782432\nn02782602\nn02782681\nn02782778\nn02783035\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02784998\nn02785648\nn02786198\nn02786331\nn02786463\nn02786611\nn02786736\nn02786837\nn02787120\nn02787269\nn02787435\nn02788021\nn02788386\nn02788462\nn02788572\nn02788689\nn02789487\nn02790669\nn02790823\nn02791532\nn02791665\nn02791795\nn02792409\nn02792552\nn02792948\nn02793089\nn02793199\nn02793296\nn02793414\nn02793684\nn02793842\nn02793930\nn02794008\nn02794368\nn02794474\nn02794664\nn02794779\nn02794972\nn02795528\nn02795670\nn02795783\nn02795978\nn02796207\nn02796318\nn02796412\nn02796623\nn02796995\nn02797535\nn02797692\nn02797881\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02800940\nn02801047\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803809\nn02803934\nn02804123\nn02804252\nn02804515\nn02805283\nn02805845\nn02805983\nn02806088\nn02806379\nn02806530\nn02806762\nn02806875\nn02806992\nn02807523\nn02807616\nn02807731\nn02808185\nn02808829\nn02808968\nn02809105\nn02809241\nn02809364\nn02809491\nn02809605\nn02809736\nn02810139\nn02810270\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812342\nn02812631\nn02812785\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02813981\nn02814116\nn02814338\nn02814428\nn02814774\nn02815478\nn02815749\nn02815950\nn02816494\nn02816656\nn02816768\nn02817031\nn02817251\nn02817386\nn02817650\nn02817799\nn02818135\nn02818254\nn02818687\nn02818832\nn02819697\nn02820085\nn02820210\nn02820556\nn02820675\nn02821202\nn02821415\nn02821543\nn02821627\nn02821943\nn02822064\nn02822220\nn02822399\nn02822579\nn02822762\nn02822865\nn02823124\nn02823335\nn02823510\nn02823586\nn02823848\nn02823964\nn02824058\nn02824152\nn02824319\nn02824448\nn02825153\nn02825240\nn02825442\nn02825872\nn02825961\nn02826068\nn02826259\nn02826459\nn02826589\nn02826683\nn02826812\nn02826886\nn02827148\nn02827606\nn02828115\nn02828299\nn02828427\nn02828884\nn02829246\nn02829353\nn02829510\nn02829596\nn02830157\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02831998\nn02833040\nn02833140\nn02833275\nn02833403\nn02833793\nn02834027\nn02834506\nn02834642\nn02834778\nn02835412\nn02835551\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836268\nn02836392\nn02836513\nn02836607\nn02836900\nn02837134\nn02837567\nn02837887\nn02838014\nn02838178\nn02838345\nn02838577\nn02838728\nn02838958\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840515\nn02840619\nn02841063\nn02841187\nn02841506\nn02841641\nn02841847\nn02842133\nn02842573\nn02842809\nn02843029\nn02843158\nn02843276\nn02843465\nn02843553\nn02843777\nn02843909\nn02844056\nn02844214\nn02844307\nn02844714\nn02845130\nn02845293\nn02845985\nn02846141\nn02846260\nn02846511\nn02846619\nn02846733\nn02846874\nn02847461\nn02847631\nn02847852\nn02848118\nn02848216\nn02848523\nn02848806\nn02848921\nn02849154\nn02849885\nn02850060\nn02850358\nn02850732\nn02850950\nn02851099\nn02851795\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02853218\nn02853336\nn02853745\nn02853870\nn02854378\nn02854532\nn02854630\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855793\nn02855925\nn02856013\nn02856237\nn02856362\nn02857365\nn02857477\nn02857644\nn02857907\nn02858304\nn02859184\nn02859343\nn02859557\nn02859729\nn02859955\nn02860415\nn02860640\nn02861022\nn02861147\nn02861286\nn02861387\nn02861509\nn02861658\nn02861777\nn02861886\nn02862048\nn02862916\nn02863014\nn02863176\nn02863340\nn02863426\nn02863536\nn02863638\nn02863750\nn02864122\nn02864504\nn02864593\nn02864987\nn02865665\nn02865931\nn02866106\nn02866386\nn02866578\nn02867401\nn02867592\nn02867715\nn02867966\nn02868240\nn02868429\nn02868546\nn02868638\nn02868975\nn02869155\nn02869249\nn02869563\nn02869737\nn02870526\nn02870676\nn02870772\nn02871005\nn02871147\nn02871314\nn02871439\nn02871631\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873623\nn02873733\nn02873839\nn02874086\nn02874214\nn02874336\nn02874442\nn02874537\nn02874642\nn02874750\nn02875436\nn02875626\nn02875948\nn02876084\nn02876326\nn02876457\nn02876657\nn02877266\nn02877513\nn02877642\nn02877962\nn02878107\nn02878222\nn02878425\nn02878534\nn02878628\nn02878796\nn02879087\nn02879309\nn02879422\nn02879517\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881546\nn02881757\nn02881906\nn02882190\nn02882301\nn02882483\nn02882647\nn02882894\nn02883004\nn02883101\nn02883344\nn02884225\nn02884450\nn02884859\nn02884994\nn02885108\nn02885233\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02886599\nn02887079\nn02887209\nn02887489\nn02887832\nn02887970\nn02888270\nn02888429\nn02888569\nn02888898\nn02889425\nn02889646\nn02889856\nn02889996\nn02890188\nn02890351\nn02890513\nn02890662\nn02890804\nn02890940\nn02891188\nn02891788\nn02892304\nn02892392\nn02892499\nn02892626\nn02892948\nn02893269\nn02893418\nn02893608\nn02893692\nn02893941\nn02894024\nn02894158\nn02894337\nn02894847\nn02895008\nn02895328\nn02895438\nn02896074\nn02896294\nn02896442\nn02896694\nn02896856\nn02896949\nn02897097\nn02897389\nn02897820\nn02898093\nn02898173\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900459\nn02900594\nn02900705\nn02900857\nn02900987\nn02901114\nn02901259\nn02901377\nn02901481\nn02901620\nn02901793\nn02901901\nn02902079\nn02902687\nn02902816\nn02902916\nn02903006\nn02903126\nn02903204\nn02903727\nn02903852\nn02904109\nn02904233\nn02904505\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02905886\nn02906963\nn02907082\nn02907296\nn02907391\nn02907656\nn02907873\nn02908123\nn02908217\nn02908773\nn02908951\nn02909053\nn02909165\nn02909285\nn02909706\nn02910145\nn02910241\nn02910542\nn02910701\nn02910864\nn02910964\nn02911332\nn02911485\nn02912065\nn02912319\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916065\nn02916179\nn02916350\nn02917377\nn02917521\nn02917607\nn02917742\nn02917964\nn02918112\nn02918330\nn02918455\nn02918595\nn02918831\nn02918964\nn02919148\nn02919308\nn02919414\nn02919648\nn02919792\nn02919890\nn02919976\nn02920083\nn02920164\nn02920259\nn02920369\nn02920503\nn02920658\nn02921029\nn02921195\nn02921292\nn02921406\nn02921592\nn02921756\nn02921884\nn02922159\nn02922292\nn02922461\nn02922578\nn02922798\nn02922877\nn02923129\nn02923535\nn02923682\nn02923915\nn02924116\nn02925009\nn02925107\nn02925385\nn02925519\nn02925666\nn02926426\nn02926591\nn02927053\nn02927764\nn02927887\nn02928049\nn02928299\nn02928413\nn02928608\nn02929184\nn02929289\nn02929462\nn02929582\nn02929923\nn02930080\nn02930214\nn02930339\nn02930645\nn02931013\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02933750\nn02933990\nn02934168\nn02934451\nn02935017\nn02935387\nn02935490\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02936921\nn02937010\nn02937336\nn02937958\nn02938218\nn02938321\nn02938886\nn02939763\nn02939866\nn02940289\nn02940385\nn02940570\nn02940706\nn02941095\nn02941228\nn02941845\nn02942015\nn02942147\nn02942349\nn02942460\nn02942699\nn02943241\nn02943465\nn02943686\nn02943871\nn02943964\nn02944075\nn02944146\nn02944256\nn02944459\nn02944579\nn02944826\nn02945161\nn02945813\nn02945964\nn02946127\nn02946270\nn02946348\nn02946509\nn02946753\nn02946824\nn02946921\nn02947212\nn02947660\nn02947818\nn02947977\nn02948293\nn02948403\nn02948557\nn02948834\nn02948942\nn02949084\nn02949202\nn02949356\nn02949542\nn02950018\nn02950120\nn02950186\nn02950256\nn02950482\nn02950632\nn02950943\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02952798\nn02952935\nn02953056\nn02953197\nn02953455\nn02953552\nn02953673\nn02953850\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02955767\nn02956393\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957252\nn02957427\nn02957755\nn02957862\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02961947\nn02962061\nn02962200\nn02962414\nn02962843\nn02962938\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964075\nn02964196\nn02964295\nn02964634\nn02964843\nn02964934\nn02965024\nn02965122\nn02965216\nn02965300\nn02965529\nn02966068\nn02966545\nn02966786\nn02966942\nn02967081\nn02967170\nn02967294\nn02967407\nn02967540\nn02967626\nn02967782\nn02967991\nn02968074\nn02968210\nn02968333\nn02968473\nn02969010\nn02969163\nn02969323\nn02969527\nn02969634\nn02969886\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971473\nn02971579\nn02971691\nn02971940\nn02972397\nn02972714\nn02972934\nn02973017\nn02973236\nn02973805\nn02973904\nn02974348\nn02974454\nn02974565\nn02974697\nn02975212\nn02975589\nn02975994\nn02976123\nn02976249\nn02976350\nn02976455\nn02976552\nn02976641\nn02976815\nn02976939\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978205\nn02978367\nn02978478\nn02978753\nn02979074\nn02979290\nn02979399\nn02979516\nn02979836\nn02980036\nn02980203\nn02980625\nn02981024\nn02981198\nn02981321\nn02981565\nn02981911\nn02982232\nn02982416\nn02982515\nn02982599\nn02983072\nn02983189\nn02983357\nn02983507\nn02983904\nn02984061\nn02984203\nn02984469\nn02984699\nn02985137\nn02985606\nn02985828\nn02985963\nn02986066\nn02986160\nn02986348\nn02987047\nn02987379\nn02987492\nn02987706\nn02987823\nn02987950\nn02988066\nn02988156\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02990758\nn02991048\nn02991302\nn02991847\nn02992032\nn02992368\nn02992795\nn02993194\nn02993368\nn02993546\nn02994573\nn02994743\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998107\nn02998563\nn02998696\nn02998841\nn02999138\nn02999936\nn03000530\nn03001115\nn03001282\nn03001540\nn03001627\nn03002096\nn03002210\nn03002341\nn03002555\nn03002711\nn03002816\nn03002948\nn03003091\nn03003633\nn03004275\nn03004409\nn03004531\nn03004620\nn03004713\nn03004824\nn03005033\nn03005147\nn03005285\nn03005515\nn03005619\nn03006626\nn03006788\nn03006903\nn03007130\nn03007297\nn03007444\nn03007591\nn03008177\nn03008817\nn03008976\nn03009111\nn03009269\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012159\nn03012373\nn03012499\nn03012644\nn03012734\nn03012897\nn03013006\nn03013438\nn03013580\nn03013850\nn03014440\nn03015149\nn03015254\nn03015478\nn03015631\nn03015851\nn03016209\nn03016389\nn03016609\nn03016737\nn03016868\nn03017070\nn03017698\nn03017835\nn03018209\nn03018614\nn03018712\nn03018848\nn03019198\nn03019304\nn03019434\nn03019685\nn03019806\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03024233\nn03024333\nn03024518\nn03025070\nn03025165\nn03025250\nn03025886\nn03026907\nn03027001\nn03027108\nn03027250\nn03027505\nn03027625\nn03028596\nn03028785\nn03029066\nn03029197\nn03029296\nn03029445\nn03029925\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03031756\nn03032453\nn03032811\nn03033267\nn03033362\nn03033986\nn03034244\nn03034405\nn03034516\nn03034663\nn03035252\nn03035510\nn03035715\nn03035832\nn03036022\nn03036149\nn03036244\nn03036341\nn03036469\nn03036701\nn03036866\nn03037108\nn03037228\nn03037404\nn03037590\nn03037709\nn03038041\nn03038281\nn03038480\nn03038685\nn03038870\nn03039015\nn03039259\nn03039353\nn03039493\nn03039827\nn03039947\nn03040229\nn03040376\nn03040836\nn03041114\nn03041265\nn03041449\nn03041810\nn03042139\nn03042384\nn03042697\nn03042829\nn03042984\nn03043173\nn03043274\nn03043423\nn03043693\nn03043798\nn03043958\nn03044671\nn03044801\nn03044934\nn03045074\nn03045228\nn03045337\nn03045800\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047171\nn03047799\nn03047941\nn03048883\nn03049066\nn03049326\nn03049457\nn03049782\nn03049924\nn03050026\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03052464\nn03052917\nn03053047\nn03053976\nn03054491\nn03054605\nn03054901\nn03055159\nn03055418\nn03055670\nn03055857\nn03056097\nn03056215\nn03056288\nn03056493\nn03056583\nn03056873\nn03057021\nn03057541\nn03057636\nn03057724\nn03057841\nn03057920\nn03058107\nn03058603\nn03058949\nn03059103\nn03059236\nn03059366\nn03059685\nn03059934\nn03060728\nn03061050\nn03061211\nn03061345\nn03061505\nn03061674\nn03061819\nn03061893\nn03062015\nn03062122\nn03062336\nn03062651\nn03062798\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063834\nn03063968\nn03064250\nn03064350\nn03064562\nn03064758\nn03064935\nn03065243\nn03065708\nn03066232\nn03066359\nn03066464\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03070396\nn03070587\nn03070854\nn03071021\nn03071160\nn03071288\nn03071552\nn03072056\nn03072201\nn03072440\nn03072682\nn03073296\nn03073384\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075248\nn03075500\nn03075634\nn03075768\nn03075946\nn03076411\nn03076623\nn03076708\nn03077442\nn03077616\nn03077741\nn03078287\nn03078506\nn03078670\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03079616\nn03079741\nn03080309\nn03080497\nn03080633\nn03080731\nn03080904\nn03081859\nn03081986\nn03082127\nn03082280\nn03082450\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085219\nn03085333\nn03085602\nn03085781\nn03085915\nn03086183\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087521\nn03087643\nn03087816\nn03088389\nn03088580\nn03088707\nn03089477\nn03089753\nn03089879\nn03090000\nn03090172\nn03090437\nn03090710\nn03090856\nn03091044\nn03091223\nn03091374\nn03091907\nn03092053\nn03092166\nn03092314\nn03092476\nn03092656\nn03092883\nn03093427\nn03093792\nn03094159\nn03094503\nn03095965\nn03096439\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098515\nn03098688\nn03098806\nn03098959\nn03099147\nn03099274\nn03099454\nn03099622\nn03099771\nn03099945\nn03100346\nn03100490\nn03100897\nn03101156\nn03101302\nn03101375\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102516\nn03102654\nn03102859\nn03103128\nn03103396\nn03103563\nn03103904\nn03104019\nn03104512\nn03105088\nn03105214\nn03105306\nn03105467\nn03105645\nn03105810\nn03105974\nn03106722\nn03106898\nn03107046\nn03107488\nn03107716\nn03108455\nn03108624\nn03108759\nn03108853\nn03109033\nn03109253\nn03109693\nn03109881\nn03110202\nn03111041\nn03111177\nn03111296\nn03111690\nn03112240\nn03112719\nn03112869\nn03113152\nn03113505\nn03113657\nn03113835\nn03114041\nn03114236\nn03114379\nn03114504\nn03114743\nn03114839\nn03115014\nn03115180\nn03115400\nn03115663\nn03115762\nn03115897\nn03116008\nn03116163\nn03116530\nn03116767\nn03117199\nn03117642\nn03118346\nn03118969\nn03119203\nn03119396\nn03119510\nn03120198\nn03120491\nn03120778\nn03121040\nn03121190\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03122748\nn03123553\nn03123666\nn03123809\nn03123917\nn03124313\nn03124474\nn03124590\nn03125057\nn03125588\nn03125870\nn03126090\nn03126385\nn03126580\nn03126927\nn03127024\nn03127203\nn03127408\nn03127531\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129636\nn03129753\nn03129848\nn03130066\nn03130233\nn03130563\nn03130761\nn03130866\nn03131193\nn03131669\nn03131967\nn03132076\nn03132261\nn03132438\nn03132666\nn03132776\nn03133050\nn03133415\nn03134118\nn03134232\nn03134394\nn03134853\nn03135030\nn03135532\nn03135656\nn03135788\nn03135917\nn03136051\nn03136254\nn03136369\nn03136504\nn03137473\nn03137579\nn03138128\nn03138217\nn03138344\nn03138669\nn03139089\nn03139464\nn03139640\nn03139998\nn03140126\nn03140292\nn03140431\nn03140546\nn03140652\nn03140771\nn03140900\nn03141065\nn03141327\nn03141455\nn03141612\nn03141702\nn03142099\nn03142205\nn03142325\nn03142431\nn03142679\nn03143400\nn03143572\nn03143754\nn03144156\nn03144873\nn03144982\nn03145147\nn03145277\nn03145384\nn03145522\nn03145719\nn03145843\nn03146342\nn03146449\nn03146560\nn03146687\nn03146777\nn03146846\nn03147084\nn03147156\nn03147280\nn03147509\nn03148324\nn03148518\nn03148727\nn03148808\nn03149135\nn03149401\nn03149686\nn03149810\nn03150232\nn03150511\nn03150661\nn03150795\nn03151077\nn03152303\nn03152951\nn03153246\nn03153585\nn03153948\nn03154073\nn03154316\nn03154446\nn03154616\nn03154745\nn03154895\nn03155178\nn03155502\nn03155915\nn03156071\nn03156279\nn03156405\nn03156767\nn03157348\nn03158186\nn03158414\nn03158668\nn03158796\nn03158885\nn03159535\nn03159640\nn03160001\nn03160186\nn03160740\nn03161016\nn03161450\nn03161893\nn03162297\nn03162460\nn03162556\nn03162714\nn03162818\nn03163222\nn03163381\nn03163488\nn03163798\nn03163973\nn03164192\nn03164344\nn03164605\nn03164722\nn03164929\nn03165096\nn03165211\nn03165466\nn03165616\nn03165823\nn03165955\nn03166120\nn03166514\nn03166600\nn03166685\nn03166809\nn03166951\nn03167153\nn03167978\nn03168107\nn03168217\nn03168543\nn03168663\nn03168774\nn03168933\nn03169063\nn03169176\nn03170292\nn03170459\nn03170635\nn03170872\nn03171228\nn03171356\nn03171635\nn03171910\nn03172038\nn03172738\nn03172965\nn03173270\nn03173387\nn03173929\nn03174079\nn03174450\nn03174731\nn03175081\nn03175189\nn03175301\nn03175457\nn03175604\nn03175843\nn03175983\nn03176238\nn03176386\nn03176594\nn03176763\nn03177059\nn03177165\nn03177708\nn03178000\nn03178173\nn03178430\nn03178538\nn03178674\nn03179910\nn03180384\nn03180504\nn03180732\nn03180865\nn03180969\nn03181293\nn03181667\nn03182140\nn03182232\nn03182912\nn03183080\nn03185868\nn03186199\nn03186285\nn03186818\nn03187037\nn03187153\nn03187268\nn03187751\nn03188290\nn03188725\nn03188871\nn03189083\nn03189311\nn03189818\nn03190458\nn03191286\nn03191451\nn03191561\nn03191776\nn03192543\nn03192907\nn03193107\nn03193260\nn03193423\nn03193597\nn03193754\nn03194170\nn03194297\nn03194812\nn03194992\nn03195332\nn03195485\nn03195799\nn03195959\nn03196062\nn03196324\nn03196598\nn03196990\nn03197201\nn03197446\nn03198223\nn03198500\nn03199358\nn03199488\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201529\nn03201638\nn03201776\nn03201895\nn03201996\nn03202354\nn03202481\nn03202760\nn03202940\nn03203089\nn03203806\nn03204134\nn03204306\nn03204436\nn03204558\nn03204955\nn03205143\nn03205304\nn03205458\nn03205574\nn03205669\nn03205903\nn03206023\nn03206158\nn03206282\nn03206405\nn03206602\nn03206718\nn03206908\nn03207305\nn03207548\nn03207630\nn03207835\nn03208556\nn03209359\nn03209477\nn03209666\nn03209910\nn03210245\nn03210372\nn03210552\nn03210683\nn03211117\nn03211413\nn03211616\nn03211789\nn03212114\nn03212247\nn03212406\nn03212811\nn03213014\nn03213361\nn03213538\nn03213715\nn03213826\nn03214253\nn03214450\nn03214582\nn03214966\nn03215076\nn03215191\nn03215337\nn03215508\nn03215749\nn03215930\nn03216199\nn03216402\nn03216562\nn03216710\nn03217653\nn03217739\nn03217889\nn03218446\nn03219010\nn03219135\nn03219483\nn03219612\nn03219859\nn03219966\nn03220095\nn03220237\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03222722\nn03222857\nn03223162\nn03223441\nn03223553\nn03223686\nn03223923\nn03224490\nn03224603\nn03224753\nn03224893\nn03225108\nn03225458\nn03225616\nn03225777\nn03225988\nn03226090\nn03226254\nn03226375\nn03226538\nn03226880\nn03227010\nn03227184\nn03227317\nn03227721\nn03227856\nn03228016\nn03228254\nn03228365\nn03228533\nn03228692\nn03228796\nn03228967\nn03229115\nn03229244\nn03229526\nn03231160\nn03231368\nn03231819\nn03232309\nn03232417\nn03232543\nn03232815\nn03232923\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03235979\nn03236093\nn03236217\nn03236423\nn03236580\nn03236735\nn03237212\nn03237340\nn03237416\nn03237639\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03238762\nn03238879\nn03239054\nn03239259\nn03239607\nn03239726\nn03240140\nn03240892\nn03241093\nn03241335\nn03241496\nn03241903\nn03242120\nn03242264\nn03242390\nn03242506\nn03242995\nn03243218\nn03243625\nn03244047\nn03244231\nn03244388\nn03244775\nn03244919\nn03245271\nn03245421\nn03245724\nn03245889\nn03246197\nn03246312\nn03246454\nn03246653\nn03246933\nn03247083\nn03247351\nn03247495\nn03248835\nn03249342\nn03249956\nn03250089\nn03250279\nn03250405\nn03250588\nn03250952\nn03251100\nn03251280\nn03251533\nn03251766\nn03251932\nn03252231\nn03252324\nn03252422\nn03252637\nn03252787\nn03253071\nn03253187\nn03253279\nn03253714\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254625\nn03254737\nn03254862\nn03255167\nn03255322\nn03255488\nn03255899\nn03256032\nn03256166\nn03256472\nn03256631\nn03256788\nn03256928\nn03257065\nn03257210\nn03257586\nn03258192\nn03258330\nn03258456\nn03258577\nn03258905\nn03259009\nn03259401\nn03259505\nn03260206\nn03260504\nn03260733\nn03260849\nn03261019\nn03261263\nn03261395\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03263338\nn03263640\nn03263758\nn03264906\nn03265032\nn03265754\nn03266195\nn03266371\nn03266620\nn03266749\nn03267113\nn03267468\nn03267696\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269073\nn03269203\nn03269401\nn03270165\nn03270695\nn03270854\nn03271030\nn03271260\nn03271376\nn03271765\nn03271865\nn03272125\nn03272239\nn03272383\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03274561\nn03274796\nn03275125\nn03275311\nn03275566\nn03275681\nn03275864\nn03276179\nn03276696\nn03276839\nn03277004\nn03277149\nn03277459\nn03277602\nn03277771\nn03278248\nn03278914\nn03279153\nn03279364\nn03279508\nn03279804\nn03279918\nn03280216\nn03280394\nn03280644\nn03281145\nn03281524\nn03281673\nn03282060\nn03282295\nn03282401\nn03283221\nn03283413\nn03283827\nn03284308\nn03284482\nn03284743\nn03284886\nn03284981\nn03285578\nn03285730\nn03285912\nn03286572\nn03287351\nn03287733\nn03288003\nn03288500\nn03288643\nn03288742\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03291413\nn03291551\nn03291741\nn03291963\nn03292085\nn03292362\nn03292475\nn03292603\nn03292736\nn03292960\nn03293095\nn03293741\nn03293863\nn03294048\nn03294604\nn03294833\nn03295012\nn03295140\nn03295246\nn03295928\nn03296081\nn03296217\nn03296328\nn03296478\nn03296963\nn03297103\nn03297226\nn03297644\nn03297735\nn03298089\nn03298352\nn03298716\nn03298858\nn03299406\nn03300216\nn03300443\nn03301175\nn03301291\nn03301389\nn03301568\nn03301833\nn03301940\nn03302671\nn03302790\nn03302938\nn03303217\nn03303669\nn03303831\nn03304197\nn03304323\nn03304465\nn03305300\nn03305522\nn03305953\nn03306385\nn03306869\nn03307037\nn03307573\nn03307792\nn03308152\nn03308481\nn03308614\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314378\nn03314608\nn03314884\nn03315644\nn03315805\nn03315990\nn03316105\nn03316406\nn03316873\nn03317233\nn03317510\nn03317673\nn03317788\nn03317889\nn03318136\nn03318294\nn03318865\nn03318983\nn03319167\nn03319457\nn03319576\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320845\nn03320959\nn03321103\nn03321419\nn03321563\nn03321843\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03323211\nn03323319\nn03323703\nn03324629\nn03324814\nn03324928\nn03325088\nn03325288\nn03325403\nn03325691\nn03325941\nn03326073\nn03326371\nn03326475\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03327841\nn03328201\nn03329302\nn03329536\nn03329663\nn03330002\nn03330665\nn03330792\nn03330947\nn03331077\nn03331244\nn03331599\nn03332005\nn03332173\nn03332271\nn03332393\nn03332591\nn03332784\nn03332989\nn03333129\nn03333252\nn03333349\nn03333610\nn03333711\nn03333851\nn03334017\nn03334291\nn03334382\nn03334492\nn03334912\nn03335030\nn03335333\nn03335461\nn03335846\nn03336168\nn03336282\nn03336575\nn03336742\nn03336839\nn03337383\nn03337494\nn03337822\nn03338287\nn03338821\nn03339296\nn03339529\nn03339643\nn03340009\nn03340723\nn03340923\nn03341035\nn03341153\nn03341297\nn03341606\nn03342015\nn03342127\nn03342262\nn03342432\nn03342657\nn03342863\nn03342961\nn03343047\nn03343234\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344509\nn03344642\nn03344784\nn03344935\nn03345837\nn03346135\nn03346289\nn03346455\nn03347472\nn03347617\nn03348142\nn03348868\nn03349020\nn03349296\nn03349367\nn03349469\nn03349599\nn03349771\nn03349892\nn03350204\nn03350352\nn03350456\nn03350602\nn03351151\nn03351262\nn03351434\nn03351979\nn03352232\nn03352366\nn03352628\nn03352961\nn03353281\nn03353951\nn03354207\nn03354903\nn03355468\nn03355768\nn03356038\nn03356279\nn03356446\nn03356559\nn03356858\nn03356982\nn03357081\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03358841\nn03359137\nn03359285\nn03359436\nn03359566\nn03360133\nn03360300\nn03360431\nn03360622\nn03360731\nn03361109\nn03361297\nn03361380\nn03361550\nn03361683\nn03362639\nn03362771\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364156\nn03364599\nn03364937\nn03365231\nn03365374\nn03365592\nn03365991\nn03366464\nn03366721\nn03366823\nn03366974\nn03367059\nn03367321\nn03367410\nn03367545\nn03367875\nn03367969\nn03368048\nn03368352\nn03369276\nn03369407\nn03369512\nn03369866\nn03370387\nn03370646\nn03371875\nn03372549\nn03372822\nn03372933\nn03373237\nn03373611\nn03373943\nn03374102\nn03374282\nn03374372\nn03374473\nn03374570\nn03374649\nn03374838\nn03375171\nn03375329\nn03375575\nn03376159\nn03376279\nn03376771\nn03376938\nn03378005\nn03378174\nn03378342\nn03378442\nn03378593\nn03378765\nn03379204\nn03379343\nn03379719\nn03379828\nn03379989\nn03380301\nn03380647\nn03380724\nn03380867\nn03381126\nn03381231\nn03381450\nn03381565\nn03381776\nn03382104\nn03382292\nn03382413\nn03382533\nn03382708\nn03382856\nn03382969\nn03383099\nn03383211\nn03383378\nn03383468\nn03383562\nn03383821\nn03384167\nn03384891\nn03385295\nn03385557\nn03386011\nn03386343\nn03386544\nn03386726\nn03386870\nn03387323\nn03387653\nn03388323\nn03388711\nn03388990\nn03389611\nn03389761\nn03389889\nn03389983\nn03390075\nn03390327\nn03390673\nn03390786\nn03390983\nn03391301\nn03391613\nn03391770\nn03392648\nn03392741\nn03393017\nn03393199\nn03393324\nn03393761\nn03394149\nn03394272\nn03394480\nn03394649\nn03395256\nn03395401\nn03395514\nn03395859\nn03396074\nn03396580\nn03396654\nn03396997\nn03397087\nn03397266\nn03397412\nn03397532\nn03397947\nn03398153\nn03398228\nn03399579\nn03399677\nn03399761\nn03399971\nn03400972\nn03401129\nn03401279\nn03401721\nn03402188\nn03402369\nn03402511\nn03402785\nn03402941\nn03403643\nn03404012\nn03404149\nn03404360\nn03404449\nn03404900\nn03405111\nn03405265\nn03405595\nn03405725\nn03406759\nn03406966\nn03407369\nn03407865\nn03408054\nn03408264\nn03408340\nn03408444\nn03409297\nn03409393\nn03409591\nn03409920\nn03410022\nn03410147\nn03410303\nn03410423\nn03410571\nn03410740\nn03410938\nn03411079\nn03411208\nn03411339\nn03411927\nn03412058\nn03412220\nn03412387\nn03412511\nn03412906\nn03413124\nn03413264\nn03413428\nn03413684\nn03413828\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415626\nn03415749\nn03415868\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418749\nn03418915\nn03419014\nn03420345\nn03420801\nn03420935\nn03421117\nn03421324\nn03421485\nn03421669\nn03421768\nn03421960\nn03422072\nn03422484\nn03422589\nn03422771\nn03423099\nn03423224\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424204\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425595\nn03425769\nn03426134\nn03426285\nn03426462\nn03426574\nn03426871\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429771\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03430959\nn03431243\nn03431570\nn03431745\nn03432061\nn03432129\nn03432360\nn03432509\nn03433247\nn03433637\nn03433877\nn03434188\nn03434285\nn03434830\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436656\nn03436772\nn03436891\nn03436990\nn03437184\nn03437295\nn03437430\nn03437581\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438780\nn03438863\nn03439348\nn03439631\nn03439814\nn03440216\nn03440682\nn03440876\nn03441112\nn03441345\nn03441465\nn03441582\nn03442288\nn03442487\nn03442597\nn03442756\nn03443005\nn03443149\nn03443543\nn03443912\nn03445326\nn03445617\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447894\nn03448031\nn03448590\nn03448696\nn03448956\nn03449217\nn03449309\nn03449451\nn03449564\nn03449858\nn03450516\nn03450734\nn03450881\nn03450974\nn03451120\nn03451253\nn03451365\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03453231\nn03453320\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455355\nn03455488\nn03455642\nn03455802\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457451\nn03457686\nn03458271\nn03458422\nn03459328\nn03459591\nn03459914\nn03460040\nn03460147\nn03460297\nn03460455\nn03460899\nn03461288\nn03461651\nn03461882\nn03461988\nn03462110\nn03462315\nn03462747\nn03462972\nn03463185\nn03463381\nn03463666\nn03464053\nn03464467\nn03464628\nn03464952\nn03465040\nn03465151\nn03465320\nn03465426\nn03465500\nn03465605\nn03465718\nn03465818\nn03466162\nn03466493\nn03466600\nn03466839\nn03466947\nn03467254\nn03467380\nn03467517\nn03467796\nn03467887\nn03467984\nn03468570\nn03468696\nn03468821\nn03469031\nn03469175\nn03469493\nn03469832\nn03469903\nn03470005\nn03470222\nn03470387\nn03470629\nn03470948\nn03471030\nn03471190\nn03471347\nn03471779\nn03472232\nn03472535\nn03472672\nn03472796\nn03472937\nn03473078\nn03473227\nn03473465\nn03473817\nn03473966\nn03474167\nn03474352\nn03474779\nn03474896\nn03475581\nn03475674\nn03475823\nn03475961\nn03476083\nn03476313\nn03476542\nn03477143\nn03477303\nn03477410\nn03477512\nn03477773\nn03477902\nn03478756\nn03478907\nn03479121\nn03479266\nn03479397\nn03479502\nn03480579\nn03480719\nn03480973\nn03481521\nn03482001\nn03482128\nn03482252\nn03482523\nn03482877\nn03483086\nn03483230\nn03483531\nn03483637\nn03483823\nn03483971\nn03484083\nn03484487\nn03484576\nn03484809\nn03484931\nn03485198\nn03485309\nn03485575\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488111\nn03488188\nn03488438\nn03488603\nn03488784\nn03488887\nn03489048\nn03489162\nn03490006\nn03490119\nn03490324\nn03490449\nn03490649\nn03490784\nn03490884\nn03491032\nn03491724\nn03491988\nn03492087\nn03492250\nn03492922\nn03493219\nn03493792\nn03493911\nn03494537\nn03494706\nn03495039\nn03495570\nn03495671\nn03495941\nn03496183\nn03496296\nn03496486\nn03496612\nn03497100\nn03497352\nn03497657\nn03498441\nn03498536\nn03498662\nn03498781\nn03498866\nn03499354\nn03499468\nn03499907\nn03500090\nn03500209\nn03500295\nn03500389\nn03500457\nn03500557\nn03500699\nn03500838\nn03500971\nn03501152\nn03501288\nn03501520\nn03501614\nn03502200\nn03502331\nn03502509\nn03502777\nn03502897\nn03503097\nn03503233\nn03503358\nn03503477\nn03503567\nn03503718\nn03503997\nn03504205\nn03504293\nn03504723\nn03505015\nn03505133\nn03505383\nn03505504\nn03505667\nn03505764\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507658\nn03507963\nn03508101\nn03508485\nn03508881\nn03509394\nn03509608\nn03509843\nn03510072\nn03510244\nn03510384\nn03510487\nn03510583\nn03510866\nn03510987\nn03511175\nn03511333\nn03512030\nn03512147\nn03512452\nn03512624\nn03512911\nn03513137\nn03513376\nn03514129\nn03514340\nn03514451\nn03514693\nn03514894\nn03515338\nn03515934\nn03516266\nn03516367\nn03516647\nn03516844\nn03516996\nn03517509\nn03517647\nn03517760\nn03517899\nn03517982\nn03518135\nn03518230\nn03518305\nn03518445\nn03518631\nn03518829\nn03518943\nn03519081\nn03519226\nn03519387\nn03519674\nn03519848\nn03520493\nn03521076\nn03521431\nn03521544\nn03521675\nn03521771\nn03521899\nn03522003\nn03522100\nn03522634\nn03522863\nn03522990\nn03523134\nn03523398\nn03523506\nn03523987\nn03524150\nn03524287\nn03524425\nn03524574\nn03524745\nn03524976\nn03525074\nn03525252\nn03525454\nn03525693\nn03525827\nn03526062\nn03527149\nn03527565\nn03527675\nn03528100\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03530189\nn03530511\nn03530910\nn03531281\nn03531447\nn03531546\nn03531691\nn03531982\nn03532342\nn03532919\nn03533014\nn03533392\nn03533486\nn03533654\nn03533845\nn03534695\nn03534776\nn03535024\nn03535284\nn03535647\nn03536122\nn03536568\nn03536761\nn03537085\nn03537241\nn03537412\nn03537550\nn03538037\nn03538179\nn03538300\nn03538542\nn03538634\nn03538817\nn03538957\nn03539103\nn03539293\nn03539433\nn03539546\nn03539678\nn03539754\nn03540090\nn03540267\nn03540476\nn03540595\nn03540914\nn03541091\nn03541269\nn03541393\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542727\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543511\nn03543603\nn03543735\nn03543945\nn03544238\nn03544360\nn03545150\nn03545470\nn03545585\nn03545756\nn03545961\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03547397\nn03547530\nn03547861\nn03548086\nn03548195\nn03548320\nn03548402\nn03548533\nn03548626\nn03548930\nn03549199\nn03549350\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03550420\nn03551084\nn03551395\nn03551582\nn03551790\nn03552001\nn03552449\nn03552749\nn03553019\nn03553248\nn03553486\nn03554375\nn03554460\nn03554645\nn03555006\nn03555217\nn03555426\nn03555564\nn03555662\nn03555862\nn03555996\nn03556173\nn03556679\nn03556811\nn03556992\nn03557270\nn03557360\nn03557590\nn03557692\nn03557840\nn03558007\nn03558176\nn03558404\nn03558633\nn03558739\nn03559373\nn03559531\nn03559999\nn03560430\nn03560860\nn03561047\nn03561169\nn03561573\nn03562565\nn03563200\nn03563460\nn03563710\nn03563967\nn03564849\nn03565288\nn03565565\nn03565710\nn03565830\nn03565991\nn03566193\nn03566329\nn03566555\nn03566730\nn03566860\nn03567066\nn03567635\nn03567788\nn03567912\nn03568117\nn03568818\nn03569014\nn03569174\nn03569293\nn03569494\nn03571280\nn03571439\nn03571625\nn03571853\nn03571942\nn03572107\nn03572205\nn03572321\nn03572631\nn03573574\nn03573848\nn03574243\nn03574416\nn03574555\nn03574816\nn03575958\nn03576215\nn03576443\nn03576955\nn03577090\nn03577312\nn03577474\nn03577672\nn03577818\nn03578055\nn03578251\nn03578656\nn03578981\nn03579538\nn03579982\nn03580518\nn03580615\nn03580845\nn03580990\nn03581125\nn03581531\nn03581897\nn03582508\nn03582959\nn03583419\nn03583621\nn03584400\nn03585073\nn03585337\nn03585438\nn03585551\nn03585682\nn03585778\nn03585875\nn03586219\nn03586631\nn03586911\nn03587205\nn03588216\nn03588841\nn03588951\nn03589313\nn03589513\nn03589672\nn03589791\nn03590306\nn03590475\nn03590588\nn03590932\nn03591116\nn03591313\nn03591592\nn03591798\nn03591901\nn03592245\nn03592669\nn03592773\nn03592931\nn03593122\nn03593222\nn03593526\nn03593862\nn03594010\nn03594148\nn03594277\nn03594523\nn03595055\nn03595264\nn03595409\nn03595523\nn03595860\nn03596099\nn03596285\nn03596543\nn03597147\nn03597317\nn03597916\nn03598151\nn03598299\nn03598385\nn03598515\nn03598646\nn03598783\nn03599964\nn03600285\nn03600475\nn03600722\nn03600977\nn03601442\nn03601638\nn03601840\nn03602081\nn03602194\nn03602365\nn03602686\nn03602790\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604536\nn03604629\nn03604763\nn03604843\nn03605417\nn03605504\nn03605598\nn03605722\nn03605915\nn03606106\nn03606251\nn03606347\nn03606465\nn03607029\nn03607186\nn03607527\nn03607659\nn03607923\nn03608504\nn03609147\nn03609235\nn03609397\nn03609542\nn03609786\nn03609959\nn03610098\nn03610418\nn03610524\nn03610682\nn03610836\nn03610992\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614383\nn03614532\nn03614782\nn03614887\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616091\nn03616225\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617594\nn03617834\nn03618101\nn03618339\nn03618546\nn03618678\nn03618797\nn03618982\nn03619050\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620353\nn03620967\nn03621049\nn03621377\nn03621694\nn03622058\nn03622401\nn03622526\nn03622839\nn03622931\nn03623338\nn03623556\nn03624134\nn03624400\nn03624767\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626272\nn03626418\nn03626502\nn03626760\nn03627954\nn03628071\nn03628215\nn03628421\nn03628511\nn03628728\nn03628831\nn03628984\nn03629100\nn03629231\nn03629520\nn03629643\nn03630262\nn03631177\nn03631811\nn03631922\nn03632100\nn03632577\nn03632729\nn03632852\nn03632963\nn03633341\nn03633632\nn03633886\nn03634034\nn03634899\nn03635032\nn03635108\nn03635330\nn03635516\nn03635668\nn03635932\nn03636248\nn03636649\nn03637027\nn03637181\nn03637480\nn03637787\nn03637898\nn03638014\nn03638180\nn03638623\nn03638743\nn03638883\nn03639077\nn03639230\nn03639497\nn03639675\nn03639880\nn03640850\nn03640988\nn03641569\nn03641947\nn03642144\nn03642341\nn03642444\nn03642573\nn03643149\nn03643253\nn03643491\nn03643737\nn03643907\nn03644073\nn03644378\nn03644858\nn03645011\nn03645168\nn03645290\nn03645577\nn03646020\nn03646148\nn03646296\nn03646809\nn03646916\nn03647423\nn03647520\nn03648219\nn03648431\nn03648667\nn03649003\nn03649161\nn03649288\nn03649674\nn03649797\nn03650551\nn03651388\nn03651605\nn03651843\nn03652100\nn03652389\nn03652729\nn03652826\nn03652932\nn03653110\nn03653220\nn03653454\nn03653583\nn03653740\nn03653833\nn03653975\nn03654576\nn03654826\nn03655072\nn03655470\nn03655720\nn03656484\nn03656957\nn03657239\nn03657511\nn03658102\nn03658635\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660562\nn03660909\nn03661340\nn03662301\nn03662452\nn03662719\nn03662887\nn03663433\nn03663531\nn03663910\nn03664159\nn03664675\nn03664840\nn03664943\nn03665232\nn03665366\nn03665851\nn03665924\nn03666238\nn03666362\nn03666917\nn03667060\nn03667235\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669245\nn03669534\nn03669886\nn03671914\nn03672521\nn03672827\nn03673270\nn03673450\nn03673767\nn03674270\nn03674440\nn03674731\nn03674842\nn03675076\nn03675235\nn03675445\nn03675558\nn03675907\nn03676087\nn03676623\nn03676759\nn03677115\nn03677682\nn03677766\nn03678558\nn03678729\nn03678879\nn03679384\nn03679712\nn03680248\nn03680512\nn03680734\nn03680858\nn03680942\nn03681477\nn03681813\nn03682380\nn03682487\nn03682877\nn03683079\nn03683341\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684489\nn03684611\nn03684740\nn03684823\nn03685307\nn03685486\nn03685640\nn03685820\nn03686130\nn03686363\nn03686470\nn03686924\nn03687137\nn03687928\nn03688066\nn03688192\nn03688405\nn03688504\nn03688605\nn03688707\nn03688832\nn03688943\nn03689157\nn03689570\nn03690168\nn03690279\nn03690473\nn03690851\nn03691817\nn03692004\nn03692136\nn03692272\nn03692379\nn03692842\nn03693293\nn03693474\nn03693707\nn03693860\nn03694196\nn03694356\nn03694639\nn03694761\nn03694949\nn03695122\nn03695452\nn03695616\nn03695753\nn03695857\nn03695957\nn03696065\nn03696301\nn03696445\nn03696568\nn03696746\nn03696909\nn03697366\nn03697552\nn03697812\nn03697913\nn03698123\nn03698226\nn03698360\nn03698604\nn03698723\nn03698815\nn03699280\nn03699591\nn03699754\nn03699975\nn03700963\nn03701191\nn03701391\nn03701640\nn03701790\nn03702248\nn03702440\nn03702582\nn03703075\nn03703203\nn03703463\nn03703590\nn03703730\nn03703862\nn03703945\nn03704549\nn03704834\nn03705379\nn03705808\nn03706415\nn03706653\nn03706939\nn03707171\nn03707372\nn03707597\nn03707766\nn03708036\nn03708425\nn03708843\nn03708962\nn03709206\nn03709363\nn03709545\nn03709644\nn03709960\nn03710079\nn03710294\nn03710421\nn03710528\nn03710937\nn03711044\nn03711711\nn03711999\nn03712111\nn03712337\nn03712444\nn03712887\nn03712981\nn03713069\nn03713151\nn03713436\nn03714235\nn03715114\nn03715275\nn03715386\nn03715669\nn03715892\nn03716228\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03718212\nn03718335\nn03718458\nn03718581\nn03718699\nn03718789\nn03718935\nn03719053\nn03719343\nn03719560\nn03719743\nn03720005\nn03720163\nn03720665\nn03721047\nn03721252\nn03721590\nn03722007\nn03722288\nn03722646\nn03722944\nn03723153\nn03723267\nn03723439\nn03723781\nn03723885\nn03724066\nn03724176\nn03724417\nn03724538\nn03724623\nn03724756\nn03725035\nn03725506\nn03725600\nn03725717\nn03725869\nn03726116\nn03726233\nn03726371\nn03726516\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03728982\nn03729131\nn03729308\nn03729402\nn03729482\nn03729647\nn03729951\nn03730153\nn03730334\nn03730494\nn03730655\nn03730788\nn03730893\nn03731019\nn03731483\nn03731695\nn03731882\nn03732020\nn03732114\nn03732458\nn03732543\nn03732658\nn03733465\nn03733547\nn03733644\nn03733925\nn03735637\nn03735963\nn03736064\nn03736147\nn03736269\nn03736372\nn03736470\nn03736970\nn03738066\nn03738241\nn03738472\nn03739518\nn03739693\nn03742019\nn03742238\nn03743279\nn03743902\nn03744276\nn03744684\nn03744840\nn03745146\nn03745487\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749504\nn03749634\nn03749807\nn03750206\nn03750437\nn03750614\nn03751065\nn03751269\nn03751458\nn03751590\nn03751757\nn03752071\nn03752185\nn03752398\nn03752922\nn03753077\nn03753514\nn03757604\nn03758089\nn03758220\nn03758894\nn03758992\nn03759243\nn03759432\nn03759661\nn03760310\nn03760671\nn03760944\nn03761588\nn03761731\nn03762238\nn03762332\nn03762434\nn03762602\nn03762982\nn03763727\nn03764276\nn03764606\nn03764822\nn03764995\nn03765128\nn03765467\nn03765561\nn03765934\nn03766044\nn03766218\nn03766322\nn03766508\nn03766600\nn03766697\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768132\nn03768683\nn03768823\nn03768916\nn03769610\nn03769722\nn03770085\nn03770224\nn03770316\nn03770520\nn03770834\nn03770954\nn03772077\nn03772269\nn03772584\nn03772674\nn03773035\nn03773835\nn03774327\nn03774461\nn03775199\nn03775388\nn03775636\nn03775747\nn03775847\nn03776167\nn03776877\nn03776997\nn03777126\nn03778459\nn03778817\nn03779000\nn03779128\nn03779246\nn03779370\nn03779884\nn03780047\nn03780799\nn03781055\nn03781467\nn03781594\nn03781683\nn03781787\nn03782190\nn03782794\nn03782929\nn03783304\nn03783430\nn03783575\nn03783873\nn03784139\nn03784270\nn03784793\nn03784896\nn03785142\nn03785237\nn03785499\nn03785721\nn03786096\nn03786194\nn03786313\nn03786621\nn03786715\nn03787523\nn03788047\nn03788498\nn03788601\nn03788914\nn03789171\nn03789400\nn03789603\nn03789794\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791235\nn03792048\nn03792334\nn03792526\nn03793850\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796181\nn03796522\nn03796605\nn03796848\nn03796974\nn03797062\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03798610\nn03798982\nn03799113\nn03799240\nn03799375\nn03799610\nn03799876\nn03800371\nn03800485\nn03800563\nn03800772\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802228\nn03802393\nn03802643\nn03802800\nn03802973\nn03803116\nn03803780\nn03804211\nn03805180\nn03805280\nn03805374\nn03805503\nn03805725\nn03805933\nn03807334\nn03809211\nn03809312\nn03809603\nn03809686\nn03809802\nn03810412\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812263\nn03812382\nn03812789\nn03812924\nn03813078\nn03813176\nn03813946\nn03814528\nn03814727\nn03814817\nn03815149\nn03815278\nn03815482\nn03815615\nn03816005\nn03816136\nn03816394\nn03816530\nn03816849\nn03817191\nn03817331\nn03817522\nn03817647\nn03818001\nn03818343\nn03819047\nn03819336\nn03819448\nn03819595\nn03819994\nn03820154\nn03820318\nn03820728\nn03820950\nn03821145\nn03821424\nn03821518\nn03822171\nn03822361\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03823673\nn03823906\nn03824197\nn03824284\nn03824381\nn03824589\nn03824713\nn03824999\nn03825080\nn03825271\nn03825442\nn03825673\nn03825913\nn03826039\nn03826186\nn03827420\nn03827536\nn03828020\nn03829340\nn03829857\nn03829954\nn03831203\nn03831382\nn03831757\nn03832144\nn03833907\nn03834040\nn03834472\nn03834604\nn03835197\nn03835729\nn03835941\nn03836062\nn03836451\nn03836602\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03838024\nn03838298\nn03838748\nn03839172\nn03839276\nn03839424\nn03839671\nn03839795\nn03840327\nn03840823\nn03841011\nn03841290\nn03841666\nn03842012\nn03842156\nn03842276\nn03842377\nn03842585\nn03842754\nn03842986\nn03843092\nn03843316\nn03843438\nn03843883\nn03844045\nn03844233\nn03844550\nn03844673\nn03844815\nn03844965\nn03845107\nn03845190\nn03845990\nn03846100\nn03846234\nn03846431\nn03846677\nn03846772\nn03846970\nn03847471\nn03847823\nn03848033\nn03848168\nn03848348\nn03848537\nn03849275\nn03849412\nn03849679\nn03849814\nn03849943\nn03850053\nn03850245\nn03850492\nn03850613\nn03851341\nn03851787\nn03852280\nn03852544\nn03852688\nn03853291\nn03853924\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855464\nn03855604\nn03855756\nn03855908\nn03856012\nn03856335\nn03856465\nn03856728\nn03857026\nn03857156\nn03857291\nn03857687\nn03858085\nn03858183\nn03858418\nn03858533\nn03858837\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860234\nn03860404\nn03861048\nn03861271\nn03861430\nn03861596\nn03861842\nn03862379\nn03862676\nn03862862\nn03863108\nn03863262\nn03863657\nn03863783\nn03863923\nn03864139\nn03864356\nn03864692\nn03865288\nn03865371\nn03865557\nn03865820\nn03865949\nn03867854\nn03868044\nn03868324\nn03868406\nn03868643\nn03868763\nn03869838\nn03869976\nn03870105\nn03870290\nn03870546\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871724\nn03871860\nn03872016\nn03872167\nn03872273\nn03873699\nn03873848\nn03873996\nn03874138\nn03874487\nn03874823\nn03875218\nn03875806\nn03875955\nn03876111\nn03877351\nn03877674\nn03878066\nn03878211\nn03878294\nn03878418\nn03878511\nn03878674\nn03878828\nn03878963\nn03879456\nn03879705\nn03880032\nn03880129\nn03880323\nn03880531\nn03881305\nn03881404\nn03881534\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03883664\nn03883773\nn03883944\nn03884554\nn03884639\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885410\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03886940\nn03887185\nn03887330\nn03887512\nn03887899\nn03888022\nn03888808\nn03888998\nn03889397\nn03889503\nn03889626\nn03889726\nn03889871\nn03890093\nn03890233\nn03890358\nn03890514\nn03891051\nn03891538\nn03892178\nn03892425\nn03892557\nn03892728\nn03893935\nn03894051\nn03894379\nn03894677\nn03894933\nn03895038\nn03895170\nn03896103\nn03896233\nn03896419\nn03896526\nn03896628\nn03896984\nn03897130\nn03897634\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03898787\nn03899100\nn03899612\nn03899933\nn03900028\nn03900194\nn03900301\nn03900393\nn03900979\nn03901229\nn03901338\nn03901750\nn03901974\nn03902220\nn03902482\nn03902756\nn03903133\nn03903290\nn03903424\nn03903733\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905361\nn03905540\nn03905730\nn03905947\nn03906106\nn03906224\nn03906463\nn03906590\nn03906789\nn03906894\nn03906997\nn03907475\nn03907654\nn03907908\nn03908111\nn03908204\nn03908456\nn03909020\nn03909160\nn03909406\nn03909516\nn03909658\nn03911406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03912821\nn03913343\nn03913930\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915320\nn03915437\nn03915900\nn03916289\nn03916385\nn03916470\nn03916720\nn03917048\nn03917198\nn03917327\nn03917814\nn03918074\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03919808\nn03920384\nn03920641\nn03920737\nn03920867\nn03923379\nn03923564\nn03923692\nn03923918\nn03924069\nn03924407\nn03924532\nn03926148\nn03926412\nn03926876\nn03927091\nn03927299\nn03927539\nn03927792\nn03928116\nn03928589\nn03928814\nn03928994\nn03929091\nn03929202\nn03929443\nn03930229\nn03930431\nn03930515\nn03931765\nn03931885\nn03931980\nn03932080\nn03932670\nn03933391\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03934890\nn03935116\nn03935234\nn03935883\nn03936269\nn03936466\nn03937835\nn03937931\nn03938037\nn03938401\nn03938522\nn03938725\nn03939062\nn03939178\nn03939281\nn03939440\nn03939565\nn03939677\nn03939844\nn03940256\nn03940894\nn03941013\nn03941231\nn03941417\nn03941586\nn03941684\nn03941887\nn03942028\nn03942600\nn03942920\nn03943115\nn03943266\nn03943623\nn03943714\nn03943833\nn03943920\nn03944024\nn03944138\nn03945459\nn03945615\nn03945817\nn03945928\nn03946076\nn03946162\nn03947111\nn03947343\nn03947466\nn03947798\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03949761\nn03950359\nn03950537\nn03950647\nn03950899\nn03951068\nn03951213\nn03951453\nn03951800\nn03951971\nn03952150\nn03952576\nn03953020\nn03953416\nn03953901\nn03954393\nn03955296\nn03955489\nn03955809\nn03955941\nn03956331\nn03956531\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958338\nn03958630\nn03958752\nn03959014\nn03959123\nn03959227\nn03959701\nn03960374\nn03960490\nn03961394\nn03961630\nn03961828\nn03961939\nn03962525\nn03962685\nn03962852\nn03962932\nn03963028\nn03963198\nn03963294\nn03963483\nn03963645\nn03964495\nn03964611\nn03965456\nn03965907\nn03966206\nn03966325\nn03966582\nn03966751\nn03966976\nn03967270\nn03967396\nn03967942\nn03968293\nn03968479\nn03968581\nn03968728\nn03969510\nn03970363\nn03970546\nn03971218\nn03971321\nn03971960\nn03972146\nn03972372\nn03972524\nn03973003\nn03973285\nn03973402\nn03973520\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03975926\nn03976105\nn03976268\nn03977158\nn03977266\nn03977430\nn03977592\nn03978421\nn03978575\nn03978686\nn03978815\nn03978966\nn03979377\nn03979492\nn03980026\nn03980478\nn03980986\nn03981094\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982642\nn03982767\nn03982895\nn03983499\nn03983612\nn03983712\nn03983928\nn03984125\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986071\nn03986224\nn03986355\nn03986562\nn03986704\nn03986857\nn03986949\nn03987266\nn03987376\nn03987674\nn03987865\nn03987990\nn03988170\nn03988758\nn03988926\nn03989199\nn03989349\nn03989447\nn03989665\nn03989777\nn03989898\nn03990474\nn03991202\nn03991321\nn03991443\nn03991646\nn03991837\nn03992325\nn03992436\nn03992703\nn03992975\nn03993053\nn03993180\nn03993403\nn03993703\nn03993878\nn03994008\nn03994297\nn03994417\nn03994614\nn03994757\nn03995018\nn03995265\nn03995535\nn03995661\nn03995856\nn03996004\nn03996145\nn03996416\nn03996849\nn03997274\nn03997484\nn03997875\nn03998333\nn03998673\nn03999064\nn03999160\nn03999621\nn03999992\nn04000311\nn04000480\nn04000592\nn04000716\nn04000998\nn04001132\nn04001265\nn04001397\nn04001499\nn04001661\nn04001845\nn04002262\nn04002371\nn04002629\nn04003241\nn04003359\nn04003856\nn04004099\nn04004210\nn04004475\nn04004990\nn04005197\nn04005912\nn04006067\nn04006227\nn04006330\nn04006411\nn04007415\nn04007664\nn04008385\nn04009801\nn04009923\nn04010057\nn04010779\nn04010927\nn04011827\nn04012084\nn04012482\nn04012665\nn04013060\nn04013176\nn04013600\nn04013729\nn04014297\nn04015204\nn04015786\nn04015908\nn04016240\nn04016479\nn04016576\nn04016684\nn04016846\nn04017571\nn04017807\nn04018155\nn04018399\nn04018667\nn04019101\nn04019335\nn04019696\nn04019881\nn04020087\nn04020298\nn04020744\nn04020912\nn04021028\nn04021164\nn04021362\nn04021503\nn04021704\nn04021798\nn04022332\nn04022434\nn04022708\nn04022866\nn04023021\nn04023119\nn04023249\nn04023422\nn04023695\nn04024137\nn04024274\nn04024862\nn04024983\nn04025508\nn04025633\nn04026053\nn04026180\nn04026813\nn04026918\nn04027023\nn04027367\nn04027706\nn04027820\nn04027935\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029416\nn04029647\nn04029734\nn04029913\nn04030054\nn04030161\nn04030274\nn04030414\nn04030518\nn04030846\nn04030965\nn04031884\nn04032509\nn04032603\nn04032936\nn04033287\nn04033425\nn04033557\nn04033801\nn04034262\nn04034367\nn04035231\nn04035634\nn04035748\nn04035836\nn04035912\nn04036155\nn04036303\nn04036776\nn04036963\nn04037076\nn04037220\nn04037298\nn04037873\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039041\nn04039209\nn04039742\nn04039848\nn04040247\nn04040373\nn04040540\nn04041069\nn04041243\nn04041408\nn04041747\nn04042076\nn04042204\nn04042358\nn04042632\nn04042795\nn04042985\nn04043168\nn04043411\nn04043733\nn04044307\nn04044498\nn04044955\nn04045085\nn04045255\nn04045397\nn04045644\nn04045787\nn04045941\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047139\nn04047401\nn04047733\nn04047834\nn04048441\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050600\nn04050933\nn04051269\nn04051439\nn04051549\nn04051705\nn04051825\nn04052235\nn04052346\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04053767\nn04054361\nn04054566\nn04054670\nn04055180\nn04055447\nn04055700\nn04055861\nn04056073\nn04056180\nn04056413\nn04056932\nn04057047\nn04057215\nn04057435\nn04057673\nn04057846\nn04057981\nn04058096\nn04058239\nn04058486\nn04058594\nn04058721\nn04059157\nn04059298\nn04059399\nn04059516\nn04059947\nn04060198\nn04060448\nn04060647\nn04060904\nn04061681\nn04061793\nn04061969\nn04062179\nn04062428\nn04062644\nn04062807\nn04063154\nn04063373\nn04063868\nn04064213\nn04064401\nn04064747\nn04064862\nn04065464\nn04065789\nn04065909\nn04066023\nn04066270\nn04066388\nn04066476\nn04066767\nn04067143\nn04067231\nn04067353\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069166\nn04069276\nn04069582\nn04069777\nn04070003\nn04070207\nn04070415\nn04070545\nn04070964\nn04071102\nn04071263\nn04071393\nn04072193\nn04072551\nn04072960\nn04073425\nn04073948\nn04074185\nn04075291\nn04075468\nn04075715\nn04075813\nn04075916\nn04076052\nn04076284\nn04076713\nn04077430\nn04077594\nn04077734\nn04077889\nn04078002\nn04078574\nn04078955\nn04079106\nn04079244\nn04079603\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081699\nn04081844\nn04082344\nn04082562\nn04082710\nn04082886\nn04083113\nn04083309\nn04083649\nn04083800\nn04084517\nn04084682\nn04084889\nn04085017\nn04085574\nn04085873\nn04086066\nn04086446\nn04086663\nn04086794\nn04086937\nn04087126\nn04087432\nn04087709\nn04087826\nn04088229\nn04088343\nn04088441\nn04088696\nn04088797\nn04089152\nn04089376\nn04089666\nn04089836\nn04089976\nn04090548\nn04090781\nn04091097\nn04091466\nn04091584\nn04091693\nn04092168\nn04093157\nn04093223\nn04093625\nn04093775\nn04093915\nn04094060\nn04094250\nn04094438\nn04094608\nn04094720\nn04094859\nn04095109\nn04095210\nn04095342\nn04095577\nn04095938\nn04096066\nn04096733\nn04096848\nn04097085\nn04097373\nn04097622\nn04097760\nn04097866\nn04098169\nn04098260\nn04098399\nn04098513\nn04098795\nn04099003\nn04099175\nn04099429\nn04100174\nn04100519\nn04101375\nn04101497\nn04101701\nn04101860\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04102760\nn04102872\nn04102962\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04104925\nn04105068\nn04105438\nn04105704\nn04105893\nn04107598\nn04107743\nn04107984\nn04108268\nn04108822\nn04108999\nn04110068\nn04110178\nn04110281\nn04110439\nn04110654\nn04110841\nn04110955\nn04111190\nn04111414\nn04111668\nn04111962\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04112921\nn04113038\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04113968\nn04114069\nn04114301\nn04114428\nn04114719\nn04114844\nn04114996\nn04115144\nn04115256\nn04115456\nn04115542\nn04115802\nn04115996\nn04116098\nn04116294\nn04116389\nn04117216\nn04117464\nn04117639\nn04118021\nn04118635\nn04119091\nn04119230\nn04119360\nn04119478\nn04119630\nn04119751\nn04120695\nn04120842\nn04121228\nn04121342\nn04121426\nn04121511\nn04121728\nn04122262\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123123\nn04123228\nn04123317\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04124573\nn04124887\nn04125116\nn04125257\nn04125541\nn04125692\nn04125853\nn04126066\nn04126244\nn04126541\nn04126659\nn04126852\nn04126980\nn04127117\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04129490\nn04129688\nn04129766\nn04130143\nn04130257\nn04130566\nn04130907\nn04131015\nn04131113\nn04131208\nn04131368\nn04131499\nn04131811\nn04131929\nn04132158\nn04132465\nn04132603\nn04132829\nn04132985\nn04133114\nn04134008\nn04134170\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04135933\nn04136045\nn04136161\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138131\nn04138261\nn04138869\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140539\nn04140631\nn04140777\nn04140853\nn04141198\nn04141712\nn04141838\nn04142175\nn04142327\nn04142434\nn04142731\nn04142999\nn04143140\nn04143365\nn04143897\nn04144241\nn04144539\nn04144651\nn04145863\nn04146050\nn04146343\nn04146504\nn04146862\nn04146976\nn04147291\nn04147495\nn04147793\nn04147916\nn04148054\nn04148285\nn04148464\nn04148579\nn04148703\nn04149083\nn04149374\nn04150153\nn04150273\nn04150371\nn04150980\nn04151108\nn04151581\nn04151940\nn04152387\nn04153025\nn04153330\nn04154152\nn04154340\nn04154753\nn04154854\nn04154938\nn04155068\nn04155177\nn04155457\nn04155625\nn04155735\nn04155889\nn04156040\nn04156140\nn04156297\nn04156411\nn04156591\nn04156814\nn04156946\nn04157099\nn04157320\nn04158002\nn04158138\nn04158250\nn04158672\nn04158807\nn04158956\nn04160036\nn04160261\nn04160372\nn04160586\nn04160847\nn04161010\nn04161358\nn04161981\nn04162433\nn04163530\nn04164002\nn04164199\nn04164406\nn04164757\nn04164868\nn04165409\nn04165675\nn04165945\nn04166111\nn04166281\nn04166436\nn04167346\nn04167489\nn04167661\nn04168084\nn04168199\nn04168472\nn04168541\nn04168840\nn04169437\nn04169597\nn04170037\nn04170384\nn04170515\nn04170694\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172230\nn04172342\nn04172512\nn04172607\nn04172776\nn04172904\nn04173046\nn04173172\nn04173511\nn04173907\nn04174026\nn04174101\nn04174234\nn04174500\nn04174705\nn04175039\nn04175147\nn04175574\nn04176068\nn04176190\nn04176295\nn04176528\nn04177041\nn04177329\nn04177545\nn04177654\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04178668\nn04179126\nn04179712\nn04179824\nn04180063\nn04180229\nn04180888\nn04181083\nn04181228\nn04181561\nn04181718\nn04182152\nn04182322\nn04183217\nn04183329\nn04183957\nn04184095\nn04184316\nn04184435\nn04184600\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186624\nn04186848\nn04187061\nn04187233\nn04187547\nn04187751\nn04187885\nn04187970\nn04188064\nn04188179\nn04189092\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190464\nn04190747\nn04190997\nn04191150\nn04191595\nn04191943\nn04192238\nn04192361\nn04192521\nn04192858\nn04193179\nn04193377\nn04193742\nn04193883\nn04194009\nn04194127\nn04194289\nn04196080\nn04196502\nn04196803\nn04196925\nn04197110\nn04197391\nn04197781\nn04197878\nn04198015\nn04198233\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200908\nn04201064\nn04201733\nn04202142\nn04202282\nn04202417\nn04203356\nn04204081\nn04204755\nn04205062\nn04205318\nn04205505\nn04205613\nn04206070\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208427\nn04208582\nn04208760\nn04208936\nn04209509\nn04209613\nn04209811\nn04210012\nn04210120\nn04210288\nn04210390\nn04210591\nn04210858\nn04211001\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04212810\nn04213105\nn04213264\nn04213353\nn04213530\nn04214046\nn04214282\nn04214413\nn04214649\nn04215153\nn04215402\nn04215588\nn04215800\nn04215910\nn04216634\nn04216860\nn04216963\nn04217387\nn04217546\nn04217718\nn04217882\nn04218564\nn04218921\nn04219185\nn04219424\nn04219580\nn04220250\nn04220805\nn04221076\nn04221673\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04222847\nn04223066\nn04223170\nn04223299\nn04224395\nn04224543\nn04224842\nn04225031\nn04225222\nn04225729\nn04225987\nn04226322\nn04226464\nn04226537\nn04226826\nn04226962\nn04227050\nn04227144\nn04227519\nn04227787\nn04227900\nn04228215\nn04228422\nn04228581\nn04228693\nn04229007\nn04229107\nn04229480\nn04229620\nn04229737\nn04229959\nn04230387\nn04230487\nn04230603\nn04230707\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232312\nn04232437\nn04232800\nn04233027\nn04233124\nn04233295\nn04233715\nn04233832\nn04234160\nn04234260\nn04234455\nn04234670\nn04234763\nn04234887\nn04235291\nn04235646\nn04235771\nn04236001\nn04236377\nn04236702\nn04236809\nn04236935\nn04237174\nn04237287\nn04237423\nn04238128\nn04238321\nn04238617\nn04238953\nn04239218\nn04239333\nn04239436\nn04239639\nn04239786\nn04239900\nn04240434\nn04240752\nn04240867\nn04241042\nn04241249\nn04241394\nn04241573\nn04242084\nn04242315\nn04242408\nn04242587\nn04242704\nn04243003\nn04243142\nn04243251\nn04243941\nn04244379\nn04244847\nn04244997\nn04245218\nn04245412\nn04245508\nn04245847\nn04246060\nn04246271\nn04246459\nn04246731\nn04246855\nn04247011\nn04247440\nn04247544\nn04247630\nn04247736\nn04247876\nn04248209\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250599\nn04250692\nn04250850\nn04251701\nn04251791\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253304\nn04253931\nn04254009\nn04254450\nn04255163\nn04255346\nn04255499\nn04255586\nn04255670\nn04255768\nn04255899\nn04256318\nn04256520\nn04256758\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259202\nn04259468\nn04260192\nn04260364\nn04260589\nn04261116\nn04261281\nn04261369\nn04261506\nn04261638\nn04261767\nn04261868\nn04262161\nn04262530\nn04262678\nn04262869\nn04263336\nn04263502\nn04263760\nn04263950\nn04264134\nn04264233\nn04264361\nn04264485\nn04264765\nn04264914\nn04265428\nn04265904\nn04266162\nn04266375\nn04266486\nn04266849\nn04266968\nn04267091\nn04267165\nn04267246\nn04267435\nn04267577\nn04267985\nn04268142\nn04268275\nn04268418\nn04268565\nn04268799\nn04269086\nn04269270\nn04269502\nn04269668\nn04269822\nn04269944\nn04270371\nn04270576\nn04270891\nn04271148\nn04271531\nn04271793\nn04271891\nn04272054\nn04272389\nn04272782\nn04272928\nn04273064\nn04273285\nn04273659\nn04273796\nn04273972\nn04274686\nn04274985\nn04275093\nn04275175\nn04275283\nn04275661\nn04275904\nn04277493\nn04277669\nn04277826\nn04278247\nn04278353\nn04278447\nn04278605\nn04278932\nn04279063\nn04279172\nn04279353\nn04279462\nn04279858\nn04279987\nn04280259\nn04280373\nn04280487\nn04280845\nn04280970\nn04281260\nn04281375\nn04281571\nn04281998\nn04282231\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283784\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285146\nn04285622\nn04285803\nn04285965\nn04286128\nn04286960\nn04287351\nn04287451\nn04287747\nn04287898\nn04287986\nn04288165\nn04288272\nn04288533\nn04288673\nn04289027\nn04289195\nn04289449\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04290762\nn04291069\nn04291242\nn04291759\nn04291992\nn04292080\nn04292221\nn04292414\nn04292572\nn04292921\nn04293119\nn04293258\nn04293744\nn04294212\nn04294426\nn04294614\nn04294879\nn04295081\nn04295353\nn04295571\nn04295777\nn04295881\nn04297098\nn04297750\nn04297847\nn04298053\nn04298661\nn04298765\nn04299215\nn04299370\nn04299963\nn04300358\nn04300509\nn04300643\nn04301000\nn04301242\nn04301474\nn04301760\nn04302200\nn04302863\nn04302988\nn04303095\nn04303258\nn04303357\nn04303497\nn04304215\nn04304375\nn04304680\nn04305016\nn04305210\nn04305323\nn04305471\nn04305572\nn04305947\nn04306080\nn04306592\nn04306847\nn04307419\nn04307767\nn04307878\nn04307986\nn04308084\nn04308273\nn04308397\nn04308583\nn04308807\nn04308915\nn04309049\nn04309348\nn04309548\nn04309833\nn04310157\nn04310507\nn04310604\nn04310721\nn04310904\nn04311595\nn04312020\nn04312154\nn04312432\nn04312654\nn04312756\nn04312916\nn04313220\nn04313503\nn04313628\nn04314107\nn04314216\nn04314522\nn04314632\nn04314914\nn04315342\nn04315713\nn04315828\nn04315948\nn04316498\nn04316815\nn04316924\nn04317063\nn04317325\nn04317420\nn04317833\nn04317976\nn04318131\nn04318787\nn04318892\nn04318982\nn04319545\nn04319774\nn04319937\nn04320405\nn04320598\nn04320871\nn04320973\nn04321121\nn04321453\nn04322026\nn04322531\nn04322692\nn04322801\nn04323519\nn04323819\nn04324120\nn04324297\nn04324387\nn04324515\nn04325041\nn04325208\nn04325804\nn04325968\nn04326676\nn04326799\nn04326896\nn04327204\nn04327544\nn04327682\nn04328054\nn04328329\nn04328580\nn04328703\nn04328946\nn04329477\nn04329681\nn04329834\nn04329958\nn04330109\nn04330189\nn04330340\nn04330669\nn04330746\nn04330896\nn04330998\nn04331277\nn04331443\nn04331639\nn04331765\nn04331892\nn04332074\nn04332580\nn04332987\nn04333129\nn04333869\nn04334105\nn04334365\nn04334504\nn04334599\nn04335209\nn04335693\nn04335886\nn04337157\nn04337287\nn04337503\nn04337650\nn04338517\nn04338963\nn04339062\nn04339191\nn04339638\nn04339879\nn04340019\nn04340521\nn04340750\nn04340935\nn04341133\nn04341288\nn04341414\nn04341686\nn04343511\nn04343630\nn04343740\nn04344003\nn04344734\nn04345028\nn04345201\nn04345787\nn04346003\nn04346157\nn04346428\nn04346511\nn04346679\nn04346855\nn04347119\nn04347519\nn04348070\nn04348184\nn04348359\nn04348988\nn04349189\nn04349306\nn04349401\nn04349913\nn04350104\nn04350235\nn04350458\nn04350581\nn04350688\nn04350769\nn04351550\nn04351699\nn04353573\nn04354026\nn04354182\nn04354387\nn04354487\nn04354589\nn04355115\nn04355267\nn04355511\nn04355684\nn04355821\nn04356595\nn04356772\nn04356925\nn04357121\nn04357531\nn04357930\nn04358117\nn04358256\nn04358491\nn04358707\nn04358874\nn04359034\nn04359124\nn04359217\nn04359335\nn04359500\nn04359589\nn04360501\nn04360798\nn04360914\nn04361095\nn04361260\nn04361937\nn04362624\nn04362821\nn04362972\nn04363082\nn04363210\nn04363412\nn04363671\nn04363777\nn04363874\nn04363991\nn04364160\nn04364397\nn04364545\nn04364827\nn04364994\nn04365112\nn04365229\nn04365328\nn04365484\nn04365751\nn04366033\nn04366116\nn04366832\nn04367011\nn04367371\nn04367746\nn04367950\nn04368109\nn04368235\nn04368365\nn04368496\nn04368695\nn04368840\nn04369025\nn04369282\nn04369485\nn04369618\nn04370048\nn04370288\nn04370600\nn04370774\nn04370955\nn04371050\nn04371563\nn04371979\nn04373089\nn04373428\nn04373563\nn04373704\nn04373795\nn04373894\nn04374315\nn04374521\nn04374735\nn04374907\nn04375080\nn04375241\nn04375405\nn04375615\nn04375775\nn04375926\nn04376400\nn04377057\nn04378489\nn04378651\nn04378956\nn04379096\nn04379243\nn04379964\nn04380255\nn04380346\nn04380916\nn04381073\nn04381450\nn04381587\nn04381724\nn04381860\nn04381994\nn04382334\nn04382438\nn04382537\nn04382695\nn04382880\nn04383015\nn04383130\nn04383301\nn04383839\nn04383923\nn04384593\nn04384910\nn04385079\nn04385157\nn04385536\nn04385799\nn04386051\nn04386456\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387531\nn04387706\nn04387932\nn04388040\nn04388162\nn04388473\nn04388574\nn04388743\nn04389430\nn04389521\nn04389718\nn04389854\nn04389999\nn04390483\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04393095\nn04393301\nn04393549\nn04393808\nn04393913\nn04394031\nn04394261\nn04394421\nn04394630\nn04395024\nn04395106\nn04395332\nn04395651\nn04395875\nn04396226\nn04396335\nn04396650\nn04396808\nn04396902\nn04397027\nn04397168\nn04397261\nn04397452\nn04397645\nn04397768\nn04397860\nn04398497\nn04398688\nn04398834\nn04398951\nn04399046\nn04399158\nn04399537\nn04399846\nn04400109\nn04400289\nn04400499\nn04400737\nn04400899\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402342\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404072\nn04404200\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406552\nn04406687\nn04406817\nn04407257\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409279\nn04409384\nn04409625\nn04409806\nn04409911\nn04410086\nn04410365\nn04410485\nn04410565\nn04410663\nn04410760\nn04410886\nn04411019\nn04411264\nn04411835\nn04411966\nn04412097\nn04412300\nn04412416\nn04413151\nn04413419\nn04413969\nn04414101\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415257\nn04415663\nn04415815\nn04416005\nn04416901\nn04417086\nn04417180\nn04417361\nn04417809\nn04418644\nn04419073\nn04419642\nn04419868\nn04420024\nn04420720\nn04421083\nn04421258\nn04421417\nn04421582\nn04421740\nn04421872\nn04422409\nn04422566\nn04422727\nn04422875\nn04423552\nn04423687\nn04424692\nn04425804\nn04425977\nn04426184\nn04426316\nn04426427\nn04427216\nn04427473\nn04427559\nn04427715\nn04427857\nn04428008\nn04428382\nn04428634\nn04429038\nn04430475\nn04430605\nn04430896\nn04431025\nn04431436\nn04431648\nn04431745\nn04431925\nn04432043\nn04432203\nn04432662\nn04432785\nn04433377\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435552\nn04435759\nn04435870\nn04436012\nn04436185\nn04436329\nn04436401\nn04436542\nn04436832\nn04436992\nn04437276\nn04437380\nn04437670\nn04437953\nn04438304\nn04438507\nn04438643\nn04438897\nn04439505\nn04439585\nn04439712\nn04440597\nn04440963\nn04441093\nn04441528\nn04441662\nn04441790\nn04442441\nn04442582\nn04442741\nn04443164\nn04443433\nn04443766\nn04444121\nn04444218\nn04444749\nn04444953\nn04445040\nn04445154\nn04445327\nn04445610\nn04445782\nn04445952\nn04446162\nn04446276\nn04446844\nn04447028\nn04447156\nn04447276\nn04447443\nn04448070\nn04448185\nn04448361\nn04449290\nn04449449\nn04449550\nn04449700\nn04449966\nn04450133\nn04450243\nn04450465\nn04450640\nn04450749\nn04450994\nn04451139\nn04451318\nn04451636\nn04451818\nn04452528\nn04452615\nn04452757\nn04452848\nn04453037\nn04453156\nn04453390\nn04453666\nn04453910\nn04454654\nn04454792\nn04454908\nn04455048\nn04455250\nn04455579\nn04455652\nn04456011\nn04456472\nn04456734\nn04457157\nn04457326\nn04457474\nn04457638\nn04457767\nn04457910\nn04458201\nn04458843\nn04459018\nn04459122\nn04459243\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461879\nn04462011\nn04462576\nn04463679\nn04464125\nn04464615\nn04464852\nn04465050\nn04465203\nn04465358\nn04465666\nn04466871\nn04467099\nn04467307\nn04467506\nn04467899\nn04468005\nn04469003\nn04469251\nn04469514\nn04469684\nn04469813\nn04470741\nn04471148\nn04471315\nn04471632\nn04471912\nn04472243\nn04472563\nn04472726\nn04472961\nn04473108\nn04473275\nn04473884\nn04474035\nn04474187\nn04474466\nn04475309\nn04475411\nn04475496\nn04475631\nn04475749\nn04475900\nn04476116\nn04476526\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04477725\nn04478066\nn04478383\nn04478512\nn04478657\nn04479287\nn04479405\nn04479526\nn04479694\nn04479823\nn04479939\nn04480033\nn04480141\nn04480303\nn04480527\nn04480853\nn04480995\nn04481524\nn04481642\nn04482177\nn04482297\nn04482975\nn04483073\nn04483925\nn04484024\nn04484432\nn04485423\nn04485586\nn04485750\nn04485884\nn04486213\nn04486322\nn04486616\nn04486934\nn04487724\nn04487894\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491312\nn04491388\nn04491638\nn04491769\nn04491934\nn04492060\nn04492157\nn04492375\nn04492749\nn04493109\nn04493259\nn04494204\nn04495051\nn04495183\nn04495310\nn04495450\nn04495555\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497249\nn04497442\nn04497570\nn04497801\nn04498275\nn04498389\nn04498523\nn04498873\nn04499062\nn04499300\nn04499446\nn04499554\nn04499810\nn04500060\nn04500390\nn04501127\nn04501281\nn04501550\nn04501837\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04502989\nn04503073\nn04503155\nn04503269\nn04503413\nn04503499\nn04503593\nn04503705\nn04504038\nn04504141\nn04504770\nn04505036\nn04505345\nn04505888\nn04506289\nn04506402\nn04506506\nn04506688\nn04506895\nn04506994\nn04507326\nn04507453\nn04507689\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509592\nn04510706\nn04511002\nn04513827\nn04513998\nn04514095\nn04514241\nn04514648\nn04515444\nn04515729\nn04515890\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517999\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04519536\nn04519728\nn04519887\nn04520170\nn04520382\nn04520784\nn04520962\nn04521571\nn04521863\nn04521987\nn04523831\nn04524142\nn04524313\nn04524594\nn04524716\nn04524941\nn04525191\nn04525417\nn04525584\nn04525821\nn04526520\nn04526800\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530456\nn04530566\nn04531098\nn04531873\nn04532022\nn04532398\nn04532504\nn04532831\nn04533042\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535252\nn04535370\nn04535524\nn04536153\nn04536335\nn04536465\nn04536595\nn04536765\nn04537436\nn04538249\nn04538403\nn04538552\nn04538878\nn04539053\nn04539203\nn04539407\nn04539794\nn04540255\nn04540397\nn04540761\nn04541136\nn04541320\nn04541662\nn04541777\nn04541987\nn04542095\nn04542329\nn04542474\nn04542595\nn04542715\nn04542858\nn04543158\nn04543509\nn04543636\nn04543772\nn04543924\nn04543996\nn04544325\nn04544450\nn04545305\nn04545471\nn04545748\nn04545858\nn04545984\nn04546081\nn04546194\nn04546340\nn04546595\nn04546855\nn04547592\nn04549028\nn04549122\nn04549629\nn04549721\nn04549919\nn04550676\nn04551055\nn04551833\nn04552097\nn04552551\nn04552696\nn04553389\nn04553561\nn04554211\nn04554406\nn04554871\nn04554998\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556664\nn04556948\nn04557308\nn04557522\nn04557751\nn04558059\nn04558199\nn04558478\nn04558804\nn04559023\nn04559166\nn04559451\nn04559620\nn04559730\nn04559910\nn04559994\nn04560113\nn04560292\nn04560502\nn04560619\nn04560882\nn04561010\nn04561287\nn04561422\nn04561734\nn04561857\nn04561965\nn04562122\nn04562262\nn04562496\nn04563020\nn04563204\nn04563413\nn04563560\nn04563790\nn04564278\nn04564581\nn04565039\nn04565375\nn04566257\nn04566561\nn04566756\nn04567098\nn04567593\nn04567746\nn04568069\nn04568557\nn04568713\nn04568841\nn04569063\nn04569520\nn04569822\nn04570118\nn04570214\nn04570416\nn04570532\nn04570815\nn04570958\nn04571292\nn04571566\nn04571686\nn04571800\nn04571958\nn04572121\nn04572235\nn04572935\nn04573045\nn04573281\nn04573379\nn04573513\nn04573625\nn04573832\nn04573937\nn04574067\nn04574348\nn04574471\nn04574606\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04576971\nn04577139\nn04577293\nn04577426\nn04577567\nn04577769\nn04578112\nn04578329\nn04578559\nn04578708\nn04578801\nn04578934\nn04579056\nn04579230\nn04579667\nn04579986\nn04580493\nn04581102\nn04581595\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583022\nn04583212\nn04583620\nn04583888\nn04583967\nn04584056\nn04584373\nn04585128\nn04585318\nn04585456\nn04585626\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589434\nn04589593\nn04590021\nn04590263\nn04590553\nn04590746\nn04590933\nn04591056\nn04591249\nn04591359\nn04591517\nn04591631\nn04591887\nn04592005\nn04592099\nn04592356\nn04592465\nn04592596\nn04593077\nn04593185\nn04593376\nn04593524\nn04593629\nn04593866\nn04594114\nn04594218\nn04594489\nn04594742\nn04594828\nn04594919\nn04595028\nn04595285\nn04595501\nn04595611\nn04595762\nn04595855\nn04596116\nn04596492\nn04596852\nn04597066\nn04597309\nn04597400\nn04597804\nn04598136\nn04598318\nn04598416\nn04598582\nn04598965\nn04599124\nn04600312\nn04600486\nn04600912\nn04601041\nn04601159\nn04601938\nn04602762\nn04602840\nn04602956\nn04603399\nn04603729\nn04603872\nn04604276\nn04604806\nn04605057\nn04605163\nn04605321\nn04605446\nn04605572\nn04605726\nn04606574\nn04607035\nn04607242\nn04607640\nn04607759\nn04607869\nn04607982\nn04608329\nn04608435\nn04608567\nn04608809\nn04608923\nn04609531\nn04609651\nn04609811\nn04610013\nn04610176\nn04610274\nn04610503\nn04610676\nn04611351\nn04611795\nn04611916\nn04612026\nn04612159\nn04612257\nn04612373\nn04612840\nn04613015\nn04613158\nn04613939\nn04614505\nn04614655\nn04614844\nn04615149\nn04615226\nn04615644\nn04682018\nn04950713\nn04950952\nn04951071\nn04951186\nn04951373\nn04951716\nn04951875\nn04953296\nn04953678\nn04955160\nn04957356\nn04957589\nn04958634\nn04958865\nn04959061\nn04959230\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963111\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967561\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970312\nn04970398\nn04970470\nn04970631\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973020\nn04973291\nn04973386\nn04973585\nn04973669\nn04973816\nn04974145\nn04974340\nn04974859\nn04975739\nn04976319\nn04976952\nn04977412\nn04978561\nn04979002\nn04979307\nn04981658\nn05102764\nn05218119\nn05233741\nn05235879\nn05238282\nn05239437\nn05241218\nn05241485\nn05241662\nn05242070\nn05242239\nn05242928\nn05244421\nn05244755\nn05244934\nn05245192\nn05257476\nn05257967\nn05258051\nn05258627\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05262698\nn05263183\nn05263316\nn05263448\nn05265736\nn05266096\nn05266879\nn05278922\nn05279953\nn05282652\nn05285623\nn05302499\nn05314075\nn05399034\nn05399243\nn05399356\nn05418717\nn05427346\nn05442594\nn05447757\nn05448704\nn05448827\nn05449196\nn05449661\nn05449959\nn05450617\nn05451099\nn05451384\nn05453412\nn05453657\nn05453815\nn05454833\nn05454978\nn05455113\nn05458173\nn05458576\nn05459101\nn05459457\nn05459769\nn05460759\nn05464534\nn05467054\nn05467758\nn05468098\nn05468739\nn05469664\nn05469861\nn05475397\nn05482922\nn05486510\nn05491154\nn05526957\nn05538625\nn05539947\nn05541509\nn05542893\nn05545879\nn05571341\nn05578095\nn05581932\nn05584746\nn05586759\nn05604434\nn05716342\nn06008896\nn06209940\nn06254669\nn06255081\nn06255613\nn06259898\nn06262567\nn06262943\nn06263202\nn06263369\nn06263609\nn06263762\nn06263895\nn06266417\nn06266633\nn06266710\nn06266878\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273207\nn06273294\nn06273414\nn06273555\nn06273743\nn06273890\nn06273986\nn06274092\nn06274292\nn06274546\nn06274760\nn06274921\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06276902\nn06277025\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06281175\nn06340977\nn06359467\nn06359657\nn06415688\nn06417096\nn06418693\nn06419354\nn06423496\nn06470073\nn06591815\nn06592078\nn06592281\nn06592421\nn06595351\nn06596179\nn06596474\nn06596607\nn06596727\nn06596845\nn06613686\nn06614901\nn06616216\nn06618653\nn06625062\nn06793231\nn06883725\nn06892775\nn06998748\nn07005523\nn07273802\nn07461050\nn07556406\nn07556637\nn07556872\nn07556970\nn07557165\nn07557434\nn07560193\nn07560331\nn07560422\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562017\nn07562172\nn07562379\nn07562495\nn07562651\nn07562881\nn07562984\nn07563207\nn07563366\nn07563642\nn07563800\nn07564008\nn07564101\nn07564292\nn07564515\nn07564629\nn07564796\nn07564971\nn07565161\nn07565259\nn07565608\nn07565725\nn07565945\nn07566092\nn07566231\nn07566340\nn07566863\nn07567039\nn07567139\nn07567390\nn07567611\nn07567707\nn07567980\nn07568095\nn07568241\nn07568389\nn07568502\nn07568625\nn07568818\nn07568991\nn07569106\nn07569423\nn07569543\nn07569644\nn07569873\nn07570021\nn07570530\nn07570720\nn07572353\nn07572616\nn07572858\nn07572957\nn07573103\nn07573347\nn07573453\nn07573563\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575226\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576577\nn07576781\nn07576969\nn07577144\nn07577374\nn07577538\nn07577657\nn07577772\nn07577918\nn07578093\nn07579575\nn07579688\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581607\nn07581775\nn07581931\nn07582027\nn07582152\nn07582277\nn07582441\nn07582609\nn07582811\nn07582892\nn07582970\nn07583197\nn07583865\nn07583978\nn07584228\nn07584332\nn07584423\nn07584593\nn07584859\nn07584938\nn07585015\nn07585107\nn07585208\nn07585474\nn07585557\nn07585644\nn07585758\nn07585906\nn07585997\nn07586099\nn07586179\nn07586318\nn07586485\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587206\nn07587331\nn07587441\nn07587618\nn07587700\nn07587819\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588688\nn07588817\nn07588947\nn07589458\nn07589543\nn07589724\nn07589872\nn07589967\nn07590068\nn07590177\nn07590320\nn07590502\nn07590752\nn07590841\nn07590974\nn07591049\nn07591162\nn07591236\nn07591330\nn07591473\nn07591586\nn07591813\nn07591961\nn07592094\nn07592317\nn07592400\nn07592481\nn07592656\nn07592768\nn07592922\nn07593004\nn07593107\nn07593199\nn07593471\nn07593774\nn07593972\nn07594066\nn07594155\nn07594250\nn07594737\nn07594840\nn07595051\nn07595180\nn07595368\nn07595649\nn07595751\nn07595914\nn07596046\nn07596160\nn07596362\nn07596452\nn07596566\nn07596684\nn07596967\nn07597145\nn07597263\nn07597365\nn07598256\nn07598529\nn07598622\nn07598734\nn07598928\nn07599068\nn07599161\nn07599242\nn07599383\nn07599468\nn07599554\nn07599649\nn07599783\nn07599911\nn07599998\nn07600177\nn07600285\nn07600394\nn07600506\nn07600696\nn07600895\nn07601025\nn07601175\nn07601290\nn07601407\nn07601572\nn07601686\nn07601809\nn07602650\nn07604956\nn07605040\nn07605198\nn07605282\nn07605380\nn07605474\nn07605597\nn07605693\nn07605804\nn07605944\nn07606058\nn07606191\nn07606278\nn07606419\nn07606538\nn07606669\nn07606764\nn07606933\nn07607027\nn07607138\nn07607361\nn07607492\nn07607605\nn07607707\nn07607832\nn07607967\nn07608098\nn07608245\nn07608339\nn07608429\nn07608533\nn07608641\nn07608721\nn07608866\nn07608980\nn07609083\nn07609215\nn07609316\nn07609407\nn07609549\nn07609632\nn07609728\nn07609840\nn07610295\nn07610502\nn07610620\nn07610746\nn07610890\nn07611046\nn07611148\nn07611267\nn07611358\nn07611733\nn07611839\nn07611991\nn07612137\nn07612273\nn07612367\nn07612530\nn07612632\nn07612996\nn07613158\nn07613266\nn07613671\nn07613815\nn07614103\nn07614198\nn07614348\nn07614730\nn07614825\nn07615052\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615954\nn07616046\nn07616174\nn07616265\nn07616386\nn07616487\nn07616590\nn07616748\nn07616906\nn07617051\nn07617188\nn07617344\nn07617447\nn07617526\nn07617611\nn07617708\nn07617839\nn07617932\nn07618029\nn07618119\nn07618281\nn07618432\nn07618587\nn07618684\nn07618871\nn07619004\nn07619208\nn07619301\nn07619409\nn07619508\nn07619881\nn07620047\nn07620145\nn07620327\nn07620597\nn07620689\nn07621264\nn07621497\nn07621618\nn07623136\nn07624466\nn07624666\nn07624757\nn07624924\nn07625061\nn07625324\nn07627931\nn07628068\nn07628181\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642833\nn07642933\nn07643026\nn07643200\nn07643306\nn07643474\nn07643577\nn07643679\nn07643764\nn07643891\nn07643981\nn07644244\nn07648913\nn07648997\nn07650792\nn07650903\nn07651025\nn07654148\nn07654298\nn07655067\nn07655263\nn07663899\nn07665438\nn07666176\nn07672914\nn07678586\nn07678729\nn07678953\nn07679034\nn07679140\nn07679356\nn07680168\nn07680313\nn07680416\nn07680517\nn07680655\nn07680761\nn07680932\nn07681264\nn07681355\nn07681450\nn07681691\nn07681805\nn07681926\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683138\nn07683265\nn07683360\nn07683490\nn07683617\nn07683786\nn07684164\nn07684289\nn07684422\nn07684517\nn07684600\nn07684938\nn07685031\nn07685118\nn07685218\nn07685303\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686299\nn07686461\nn07686634\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688021\nn07688130\nn07688265\nn07688412\nn07688624\nn07688757\nn07688898\nn07689003\nn07689217\nn07689313\nn07689490\nn07689624\nn07689757\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691863\nn07691954\nn07692114\nn07692248\nn07692405\nn07692517\nn07692614\nn07692887\nn07693048\nn07693223\nn07693439\nn07693590\nn07693889\nn07693972\nn07694169\nn07694403\nn07694516\nn07694659\nn07694839\nn07695187\nn07695284\nn07695410\nn07695504\nn07695652\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697408\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07703889\nn07704054\nn07704205\nn07704305\nn07705931\nn07707451\nn07708124\nn07708398\nn07708512\nn07708685\nn07708798\nn07709046\nn07709172\nn07709333\nn07709701\nn07709881\nn07710007\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711683\nn07711799\nn07711907\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714802\nn07714895\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716504\nn07716649\nn07716750\nn07717070\nn07717714\nn07717858\nn07718068\nn07718195\nn07718329\nn07718671\nn07718920\nn07719058\nn07719213\nn07719330\nn07719437\nn07719616\nn07719756\nn07719839\nn07719980\nn07720084\nn07720185\nn07720277\nn07720442\nn07720615\nn07721018\nn07721118\nn07721195\nn07721325\nn07721456\nn07721678\nn07721833\nn07721942\nn07722052\nn07722217\nn07722390\nn07722485\nn07722666\nn07722763\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723753\nn07723968\nn07724078\nn07724173\nn07724269\nn07724492\nn07724654\nn07724819\nn07724943\nn07725158\nn07725255\nn07725376\nn07725531\nn07725663\nn07725789\nn07725888\nn07726009\nn07726095\nn07726230\nn07726386\nn07726525\nn07726672\nn07726796\nn07727048\nn07727140\nn07727252\nn07727377\nn07727458\nn07727578\nn07727741\nn07727868\nn07728053\nn07728181\nn07728284\nn07728391\nn07728585\nn07728708\nn07728804\nn07729000\nn07729142\nn07729225\nn07729384\nn07729485\nn07729828\nn07729926\nn07730207\nn07730320\nn07730406\nn07730562\nn07730708\nn07730855\nn07731006\nn07731122\nn07731284\nn07731436\nn07731587\nn07731767\nn07731952\nn07732168\nn07732302\nn07732433\nn07732525\nn07732636\nn07732747\nn07732904\nn07733005\nn07733124\nn07733217\nn07733394\nn07733567\nn07733712\nn07733847\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734879\nn07735052\nn07735179\nn07735294\nn07735404\nn07735510\nn07735687\nn07735803\nn07735981\nn07736087\nn07736256\nn07736371\nn07736527\nn07736692\nn07736813\nn07736971\nn07737081\nn07737594\nn07737745\nn07738105\nn07738224\nn07739035\nn07739125\nn07739344\nn07739506\nn07739923\nn07740033\nn07740115\nn07740220\nn07740342\nn07740461\nn07740597\nn07740744\nn07740855\nn07740954\nn07741138\nn07741235\nn07741357\nn07741461\nn07741623\nn07741706\nn07741804\nn07741888\nn07742012\nn07742224\nn07742415\nn07742513\nn07742605\nn07742704\nn07743224\nn07743384\nn07743544\nn07743723\nn07743902\nn07744057\nn07744246\nn07744430\nn07744559\nn07744682\nn07744811\nn07745046\nn07745197\nn07745357\nn07745466\nn07745661\nn07746038\nn07746186\nn07746334\nn07746551\nn07746749\nn07746910\nn07747055\nn07747811\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749095\nn07749192\nn07749312\nn07749446\nn07749731\nn07749870\nn07749969\nn07750146\nn07750299\nn07750449\nn07750586\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07751737\nn07751858\nn07751977\nn07752109\nn07752264\nn07752377\nn07752514\nn07752602\nn07752664\nn07752782\nn07752874\nn07752966\nn07753448\nn07753743\nn07753980\nn07754155\nn07754279\nn07754451\nn07754894\nn07755089\nn07755262\nn07755411\nn07755619\nn07755707\nn07755929\nn07756096\nn07756325\nn07756499\nn07756641\nn07756838\nn07756951\nn07757132\nn07757312\nn07757511\nn07757602\nn07757753\nn07757874\nn07757990\nn07758125\nn07758260\nn07758407\nn07758582\nn07758680\nn07758950\nn07759194\nn07759324\nn07759424\nn07759576\nn07759691\nn07759816\nn07760070\nn07760153\nn07760297\nn07760395\nn07760501\nn07760673\nn07760755\nn07761141\nn07761309\nn07761611\nn07761777\nn07761954\nn07762114\nn07762244\nn07762373\nn07762534\nn07762740\nn07762913\nn07763107\nn07763290\nn07763483\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764486\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765517\nn07765612\nn07765728\nn07765862\nn07765999\nn07766173\nn07766409\nn07766530\nn07766723\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768139\nn07768230\nn07768318\nn07768423\nn07768590\nn07768858\nn07769102\nn07769306\nn07769465\nn07769584\nn07769731\nn07769886\nn07770034\nn07770180\nn07770439\nn07770571\nn07770763\nn07770869\nn07771082\nn07771212\nn07771405\nn07771539\nn07771731\nn07771891\nn07772026\nn07772147\nn07772274\nn07772413\nn07772788\nn07772935\nn07773428\nn07774182\nn07774295\nn07774479\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07783827\nn07785487\nn07800091\nn07800487\nn07800636\nn07800740\nn07801007\nn07801091\nn07801342\nn07801508\nn07801709\nn07801779\nn07801892\nn07802152\nn07802246\nn07802417\nn07802767\nn07802863\nn07802963\nn07803093\nn07803213\nn07803310\nn07803408\nn07803545\nn07803779\nn07803895\nn07803992\nn07804152\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805006\nn07805254\nn07805389\nn07805478\nn07805594\nn07805731\nn07805966\nn07806043\nn07806120\nn07806221\nn07806633\nn07806774\nn07806879\nn07807002\nn07807171\nn07807317\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808022\nn07808166\nn07808268\nn07808352\nn07808479\nn07808587\nn07808675\nn07808806\nn07808904\nn07809096\nn07809368\nn07810531\nn07810907\nn07811416\nn07812046\nn07812184\nn07812662\nn07812790\nn07812913\nn07813107\nn07813324\nn07813495\nn07813579\nn07813717\nn07813833\nn07814007\nn07814203\nn07814390\nn07814487\nn07814634\nn07814790\nn07814925\nn07815163\nn07815294\nn07815424\nn07815588\nn07815839\nn07815956\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816726\nn07816839\nn07817024\nn07817160\nn07817315\nn07817465\nn07817599\nn07817758\nn07817871\nn07818029\nn07818133\nn07818277\nn07818422\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819303\nn07819480\nn07819682\nn07819769\nn07819896\nn07820036\nn07820145\nn07820297\nn07820497\nn07820683\nn07820814\nn07820960\nn07821107\nn07821260\nn07821404\nn07821610\nn07821758\nn07821919\nn07822053\nn07822197\nn07822323\nn07822518\nn07822687\nn07822845\nn07823105\nn07823280\nn07823369\nn07823460\nn07823591\nn07823698\nn07823814\nn07823951\nn07824191\nn07824268\nn07824383\nn07824502\nn07824702\nn07824863\nn07824988\nn07825194\nn07825399\nn07825496\nn07825597\nn07825717\nn07825850\nn07825972\nn07826091\nn07826250\nn07826340\nn07826453\nn07826544\nn07826653\nn07826930\nn07827130\nn07827284\nn07827410\nn07827554\nn07827750\nn07827896\nn07828041\nn07828156\nn07828275\nn07828378\nn07828642\nn07828987\nn07829248\nn07829331\nn07829412\nn07830493\nn07830593\nn07830690\nn07830841\nn07830986\nn07831267\nn07831450\nn07831663\nn07831821\nn07831955\nn07832099\nn07832202\nn07832307\nn07832416\nn07832592\nn07832741\nn07832902\nn07833333\nn07833535\nn07833672\nn07833816\nn07833951\nn07834065\nn07834160\nn07834286\nn07834507\nn07834618\nn07834774\nn07834872\nn07835051\nn07835173\nn07835331\nn07835457\nn07835547\nn07835701\nn07835823\nn07835921\nn07836077\nn07836269\nn07836456\nn07836600\nn07836731\nn07837002\nn07837110\nn07837234\nn07837362\nn07837545\nn07837630\nn07837755\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07838659\nn07838811\nn07838905\nn07839055\nn07839172\nn07839312\nn07839478\nn07839593\nn07839730\nn07839864\nn07840027\nn07840124\nn07840219\nn07840304\nn07840395\nn07840520\nn07840672\nn07840804\nn07841037\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07842972\nn07843117\nn07843220\nn07843348\nn07843464\nn07843636\nn07843775\nn07844042\nn07844604\nn07844786\nn07844867\nn07845087\nn07845166\nn07845335\nn07845421\nn07845495\nn07845571\nn07845702\nn07845775\nn07845863\nn07846014\nn07846143\nn07846274\nn07846359\nn07846471\nn07846557\nn07846688\nn07846802\nn07846938\nn07847047\nn07847198\nn07847453\nn07847585\nn07847706\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07848771\nn07848936\nn07849026\nn07849186\nn07849336\nn07849506\nn07849619\nn07849733\nn07849912\nn07850083\nn07850219\nn07850329\nn07851054\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07851926\nn07852045\nn07852229\nn07852302\nn07852376\nn07852452\nn07852532\nn07852614\nn07852712\nn07852833\nn07852919\nn07853125\nn07853232\nn07853345\nn07853445\nn07853560\nn07853648\nn07853762\nn07853852\nn07853946\nn07854066\nn07854184\nn07854266\nn07854348\nn07854455\nn07854614\nn07854707\nn07854813\nn07854982\nn07855105\nn07855188\nn07855317\nn07855413\nn07855510\nn07855603\nn07855721\nn07855812\nn07855907\nn07856045\nn07856186\nn07856270\nn07856756\nn07856895\nn07856992\nn07857076\nn07857170\nn07857356\nn07857598\nn07857731\nn07857959\nn07858114\nn07858197\nn07858336\nn07858484\nn07858595\nn07858841\nn07858978\nn07859142\nn07859284\nn07859583\nn07859796\nn07859951\nn07860103\nn07860208\nn07860331\nn07860447\nn07860548\nn07860629\nn07860805\nn07861158\nn07861247\nn07861334\nn07861557\nn07861681\nn07861813\nn07861983\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07862770\nn07862946\nn07863107\nn07863229\nn07863374\nn07863547\nn07863644\nn07863802\nn07863935\nn07864065\nn07864198\nn07864317\nn07864475\nn07864638\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07865575\nn07865700\nn07865788\nn07866015\nn07866151\nn07866277\nn07866409\nn07866571\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07867883\nn07868045\nn07868200\nn07868340\nn07868508\nn07868684\nn07868830\nn07868955\nn07869111\nn07869291\nn07869391\nn07869522\nn07869611\nn07869775\nn07869937\nn07870069\nn07870167\nn07870313\nn07870478\nn07870620\nn07870734\nn07870894\nn07871065\nn07871234\nn07871335\nn07871436\nn07871588\nn07871720\nn07872593\nn07872748\nn07873057\nn07873198\nn07873348\nn07873464\nn07873679\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874531\nn07874674\nn07874780\nn07874995\nn07875086\nn07875267\nn07875436\nn07875560\nn07875693\nn07875835\nn07875926\nn07876026\nn07876189\nn07876281\nn07876460\nn07876550\nn07876651\nn07876775\nn07876893\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878145\nn07878283\nn07878479\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879560\nn07879659\nn07879821\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07881117\nn07881205\nn07881404\nn07881525\nn07881625\nn07881800\nn07882420\nn07882497\nn07882886\nn07883031\nn07883156\nn07883251\nn07883384\nn07883510\nn07883661\nn07884567\nn07885705\nn07886057\nn07886176\nn07886317\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888058\nn07888229\nn07888378\nn07888465\nn07888816\nn07888909\nn07889193\nn07889274\nn07889510\nn07889814\nn07889990\nn07890068\nn07890226\nn07890352\nn07890540\nn07890617\nn07890750\nn07890890\nn07890970\nn07891095\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892813\nn07893253\nn07893425\nn07893528\nn07893642\nn07893792\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896422\nn07896560\nn07896661\nn07896765\nn07896893\nn07896994\nn07897116\nn07897200\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07898895\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07899976\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902520\nn07902698\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904072\nn07904293\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905618\nn07905770\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07908923\nn07909129\nn07909231\nn07909362\nn07909504\nn07909593\nn07909714\nn07909811\nn07909954\nn07910048\nn07910152\nn07910245\nn07910379\nn07910538\nn07910656\nn07910799\nn07910970\nn07911061\nn07911249\nn07911371\nn07911677\nn07912093\nn07912211\nn07913180\nn07913300\nn07913393\nn07913537\nn07913644\nn07913774\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914686\nn07914777\nn07914887\nn07914995\nn07915094\nn07915213\nn07915366\nn07915491\nn07915618\nn07915800\nn07915918\nn07916041\nn07916183\nn07916319\nn07916437\nn07916582\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07917791\nn07917874\nn07917951\nn07918028\nn07918193\nn07918309\nn07918706\nn07918879\nn07919165\nn07919310\nn07919441\nn07919572\nn07919665\nn07919787\nn07919894\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921090\nn07921239\nn07921360\nn07921455\nn07921615\nn07921834\nn07921948\nn07922041\nn07922147\nn07922512\nn07922607\nn07922764\nn07922955\nn07923748\nn07924033\nn07924276\nn07924366\nn07924443\nn07924560\nn07924655\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925327\nn07925423\nn07925500\nn07925608\nn07925708\nn07925808\nn07925966\nn07926250\nn07926346\nn07926442\nn07926540\nn07926785\nn07926920\nn07927070\nn07927197\nn07927512\nn07927716\nn07927836\nn07927931\nn07928163\nn07928264\nn07928367\nn07928488\nn07928578\nn07928696\nn07928790\nn07928887\nn07928998\nn07929172\nn07929351\nn07929519\nn07929940\nn07930062\nn07930205\nn07930315\nn07930433\nn07930554\nn07931001\nn07931096\nn07931280\nn07931452\nn07931612\nn07931733\nn07931870\nn07932323\nn07932454\nn07932614\nn07932762\nn07932841\nn07933154\nn07933274\nn07933530\nn07933652\nn07933799\nn07933891\nn07934032\nn07934152\nn07934282\nn07934373\nn07934530\nn07934678\nn07934800\nn07934908\nn07935043\nn07935152\nn07935288\nn07935379\nn07935504\nn07935737\nn07935878\nn07936015\nn07936093\nn07936263\nn07936459\nn07936548\nn07936745\nn07936979\nn07937069\nn07937344\nn07937461\nn07937621\nn07938007\nn07938149\nn07938313\nn07938594\nn07942152\nn07951464\nn07954211\nn07977870\nn08079613\nn08182379\nn08238463\nn08242223\nn08249459\nn08253141\nn08256735\nn08376250\nn08385989\nn08492354\nn08492461\nn08494231\nn08495908\nn08496334\nn08500819\nn08500989\nn08501887\nn08505018\nn08506347\nn08511017\nn08517010\nn08517676\nn08518171\nn08519299\nn08521623\nn08523340\nn08524735\nn08539072\nn08539276\nn08540532\nn08547468\nn08547544\nn08551296\nn08554440\nn08555333\nn08555710\nn08558770\nn08558963\nn08559155\nn08560295\nn08569482\nn08571275\nn08571642\nn08571898\nn08573674\nn08573842\nn08578517\nn08579266\nn08579352\nn08580944\nn08583292\nn08583455\nn08583554\nn08583682\nn08584914\nn08586978\nn08589670\nn08596076\nn08597579\nn08598301\nn08598568\nn08599174\nn08599292\nn08611339\nn08611421\nn08613733\nn08614632\nn08616050\nn08618831\nn08619112\nn08623676\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08643267\nn08644045\nn08645104\nn08645212\nn08645318\nn08647264\nn08648917\nn08649711\nn08651104\nn08652376\nn08658309\nn08658918\nn08659242\nn08659331\nn08659446\nn08659861\nn08661878\nn08662427\nn08663051\nn08663703\nn08663860\nn08673039\nn08674344\nn08676253\nn08677424\nn08677801\nn08678783\nn08679167\nn08679269\nn08679562\nn08685188\nn08782627\nn08896327\nn09032191\nn09186592\nn09189157\nn09191635\nn09193551\nn09194227\nn09199101\nn09201998\nn09203827\nn09205509\nn09206896\nn09206985\nn09208496\nn09209025\nn09210862\nn09213434\nn09213565\nn09214060\nn09214269\nn09214916\nn09215023\nn09215437\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09223487\nn09224725\nn09226869\nn09228055\nn09230041\nn09230202\nn09231117\nn09233446\nn09233603\nn09238926\nn09239302\nn09242389\nn09245515\nn09247410\nn09248153\nn09248399\nn09249034\nn09249155\nn09251407\nn09255070\nn09257843\nn09259025\nn09259219\nn09260907\nn09262690\nn09263912\nn09264803\nn09265620\nn09266604\nn09267854\nn09268007\nn09269341\nn09269472\nn09269882\nn09270160\nn09270657\nn09270735\nn09274152\nn09274305\nn09279986\nn09281252\nn09282208\nn09283193\nn09283405\nn09283514\nn09283767\nn09283866\nn09287415\nn09287968\nn09289331\nn09289596\nn09290350\nn09290444\nn09294877\nn09295210\nn09295946\nn09300306\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305031\nn09305898\nn09308572\nn09308743\nn09309046\nn09309168\nn09309292\nn09310616\nn09315159\nn09319604\nn09325824\nn09326662\nn09327077\nn09327538\nn09330378\nn09331251\nn09335693\nn09335809\nn09336555\nn09337048\nn09337253\nn09338013\nn09339810\nn09344198\nn09344324\nn09344724\nn09348460\nn09349648\nn09351905\nn09352849\nn09353815\nn09354511\nn09357346\nn09357447\nn09359803\nn09361517\nn09362316\nn09362945\nn09366017\nn09366317\nn09375606\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09389867\nn09391386\nn09391644\nn09391774\nn09392402\nn09393524\nn09393605\nn09396465\nn09396608\nn09398076\nn09398677\nn09400584\nn09400987\nn09402944\nn09403086\nn09403211\nn09403427\nn09403734\nn09405078\nn09405787\nn09406793\nn09409512\nn09409752\nn09410224\nn09411189\nn09411295\nn09415584\nn09415671\nn09416076\nn09416890\nn09421031\nn09421799\nn09422190\nn09422631\nn09425019\nn09425344\nn09428628\nn09429630\nn09432283\nn09432990\nn09433312\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439032\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452291\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09454744\nn09456207\nn09457979\nn09458269\nn09459979\nn09460046\nn09461069\nn09462600\nn09463226\nn09464486\nn09466678\nn09467696\nn09470027\nn09470222\nn09472413\nn09474010\nn09474412\nn09474765\nn09475044\nn09475179\nn09475925\nn09476123\nn09478210\nn09480959\nn09481120\nn09493983\nn09495962\nn09505153\nn09537660\nn09556121\nn09605110\nn09606009\nn09606527\nn09607630\nn09607782\nn09607903\nn09608709\nn09610255\nn09610405\nn09611722\nn09612700\nn09613118\nn09613191\nn09613690\nn09615336\nn09616573\nn09616922\nn09617161\nn09617435\nn09617577\nn09617696\nn09618760\nn09618880\nn09618957\nn09619168\nn09619452\nn09620078\nn09620794\nn09621232\nn09622049\nn09622302\nn09624168\nn09624559\nn09624899\nn09625401\nn09626238\nn09627807\nn09627906\nn09629065\nn09629246\nn09629752\nn09631129\nn09632274\nn09632518\nn09633969\nn09635534\nn09635635\nn09635973\nn09636339\nn09637339\nn09638454\nn09638875\nn09639382\nn09639919\nn09640327\nn09640715\nn09641002\nn09641578\nn09643799\nn09644152\nn09644657\nn09648743\nn09648911\nn09649067\nn09650729\nn09650839\nn09650989\nn09651123\nn09651968\nn09652149\nn09653144\nn09653438\nn09654079\nn09654518\nn09654898\nn09655213\nn09655466\nn09656077\nn09657206\nn09657748\nn09658254\nn09658398\nn09658815\nn09658921\nn09659039\nn09659188\nn09660010\nn09660240\nn09661873\nn09662038\nn09662661\nn09662951\nn09663248\nn09663786\nn09663999\nn09664556\nn09664908\nn09665367\nn09665545\nn09666349\nn09666476\nn09666883\nn09667358\nn09668199\nn09668437\nn09668562\nn09668988\nn09669631\nn09670280\nn09670521\nn09670909\nn09671089\nn09672590\nn09672725\nn09672840\nn09673091\nn09674412\nn09674786\nn09675045\nn09675673\nn09675799\nn09675922\nn09676021\nn09676247\nn09676884\nn09677427\nn09678747\nn09679028\nn09679170\nn09679925\nn09680908\nn09681107\nn09681234\nn09681973\nn09683180\nn09683757\nn09683924\nn09684082\nn09684901\nn09685233\nn09685806\nn09686262\nn09686401\nn09688233\nn09688804\nn09689435\nn09689958\nn09690083\nn09690208\nn09690496\nn09690621\nn09690864\nn09691604\nn09691729\nn09691858\nn09692125\nn09692915\nn09693244\nn09693982\nn09694664\nn09694771\nn09695019\nn09695132\nn09695514\nn09695620\nn09695979\nn09696456\nn09696585\nn09696763\nn09697401\nn09697986\nn09698644\nn09699020\nn09699642\nn09700125\nn09700964\nn09701148\nn09701833\nn09702134\nn09702673\nn09703101\nn09703344\nn09703485\nn09703708\nn09703809\nn09703932\nn09704057\nn09704157\nn09704283\nn09705003\nn09705124\nn09705671\nn09705784\nn09706029\nn09706255\nn09707061\nn09707289\nn09707735\nn09708750\nn09708889\nn09709531\nn09709673\nn09710041\nn09710164\nn09710886\nn09711132\nn09711435\nn09712324\nn09712448\nn09712696\nn09712967\nn09713108\nn09714120\nn09714694\nn09715165\nn09715303\nn09715427\nn09716047\nn09716933\nn09717233\nn09718217\nn09718811\nn09718936\nn09719309\nn09719794\nn09720033\nn09720256\nn09720595\nn09720702\nn09720842\nn09721244\nn09721444\nn09722064\nn09722658\nn09722817\nn09723067\nn09723819\nn09723944\nn09724234\nn09724533\nn09724656\nn09724785\nn09725000\nn09725229\nn09725546\nn09725653\nn09725772\nn09725935\nn09726621\nn09726811\nn09727440\nn09727826\nn09728137\nn09728285\nn09729062\nn09729156\nn09730077\nn09730204\nn09730824\nn09731343\nn09731436\nn09731571\nn09732170\nn09733459\nn09733793\nn09734185\nn09734450\nn09734535\nn09734639\nn09735258\nn09735654\nn09736485\nn09736798\nn09736945\nn09737050\nn09737161\nn09737453\nn09738121\nn09738400\nn09740724\nn09741074\nn09741331\nn09741722\nn09741816\nn09741904\nn09741999\nn09742101\nn09742315\nn09742927\nn09743487\nn09743601\nn09743792\nn09744161\nn09744346\nn09744462\nn09744679\nn09744834\nn09745229\nn09745324\nn09745834\nn09745933\nn09746936\nn09747191\nn09747495\nn09748101\nn09748408\nn09748648\nn09748889\nn09749386\nn09750282\nn09750641\nn09750770\nn09750891\nn09751076\nn09751496\nn09751622\nn09751895\nn09752023\nn09752519\nn09753348\nn09753792\nn09754152\nn09754217\nn09754633\nn09754907\nn09755086\nn09755241\nn09755555\nn09755788\nn09755893\nn09756049\nn09756195\nn09756961\nn09757449\nn09758173\nn09758885\nn09759501\nn09760290\nn09760609\nn09760913\nn09761068\nn09761753\nn09762011\nn09762385\nn09763272\nn09763784\nn09764201\nn09764598\nn09764732\nn09764900\nn09765118\nn09765278\nn09767197\nn09769076\nn09769525\nn09769929\nn09770179\nn09770359\nn09771435\nn09772330\nn09772746\nn09772930\nn09773962\nn09774167\nn09774783\nn09775907\nn09776346\nn09776642\nn09776807\nn09777870\nn09778266\nn09778537\nn09778783\nn09778927\nn09779124\nn09779280\nn09779461\nn09779790\nn09780395\nn09780828\nn09780984\nn09781398\nn09781504\nn09781650\nn09782167\nn09782397\nn09782855\nn09783537\nn09783776\nn09783884\nn09784043\nn09784160\nn09784564\nn09785236\nn09785659\nn09785891\nn09786115\nn09787534\nn09787765\nn09788073\nn09788237\nn09789150\nn09789566\nn09789898\nn09790047\nn09790482\nn09791014\nn09791419\nn09791816\nn09792125\nn09792555\nn09792969\nn09793141\nn09793352\nn09793946\nn09794550\nn09794668\nn09795010\nn09795124\nn09795334\nn09796809\nn09796974\nn09797742\nn09797873\nn09797998\nn09798096\nn09800469\nn09800964\nn09801102\nn09801275\nn09801533\nn09802445\nn09802641\nn09802951\nn09804230\nn09805151\nn09805324\nn09805475\nn09806944\nn09807075\nn09808080\nn09808591\nn09809279\nn09809538\nn09809749\nn09809925\nn09810166\nn09811568\nn09811712\nn09811852\nn09813219\nn09814252\nn09814381\nn09814488\nn09814567\nn09814660\nn09815455\nn09815790\nn09816654\nn09816771\nn09817174\nn09817386\nn09818022\nn09819477\nn09820044\nn09820263\nn09821831\nn09822830\nn09823153\nn09823287\nn09823502\nn09823832\nn09824135\nn09824609\nn09825096\nn09825750\nn09826204\nn09826605\nn09826821\nn09827246\nn09827363\nn09828216\nn09828403\nn09828988\nn09830194\nn09830400\nn09830629\nn09830759\nn09830926\nn09831962\nn09832456\nn09832633\nn09832978\nn09833111\nn09833275\nn09833441\nn09833536\nn09833751\nn09833997\nn09834258\nn09834378\nn09834699\nn09834885\nn09835017\nn09835153\nn09835230\nn09835348\nn09836160\nn09836343\nn09836519\nn09836786\nn09837459\nn09837720\nn09838295\nn09838370\nn09838621\nn09839702\nn09840217\nn09840435\nn09840520\nn09841188\nn09841515\nn09841696\nn09842047\nn09842288\nn09842395\nn09842528\nn09842823\nn09843443\nn09843602\nn09843716\nn09843824\nn09844457\nn09844898\nn09845401\nn09845849\nn09846142\nn09846469\nn09846586\nn09846755\nn09846894\nn09847267\nn09847344\nn09847543\nn09848110\nn09848489\nn09849167\nn09849990\nn09850760\nn09850974\nn09851165\nn09851575\nn09853541\nn09853645\nn09853881\nn09854218\nn09854421\nn09854915\nn09855433\nn09856401\nn09856671\nn09856827\nn09857007\nn09858165\nn09858299\nn09858733\nn09859152\nn09859285\nn09859975\nn09861287\nn09861599\nn09861863\nn09861946\nn09862183\nn09862621\nn09863031\nn09863339\nn09863749\nn09863936\nn09864632\nn09864968\nn09865068\nn09865162\nn09865398\nn09865672\nn09865744\nn09866115\nn09866354\nn09866559\nn09866661\nn09866817\nn09866922\nn09867069\nn09867154\nn09867311\nn09868270\nn09868782\nn09868899\nn09869317\nn09869447\nn09869578\nn09870096\nn09871095\nn09871229\nn09871681\nn09871867\nn09871952\nn09872066\nn09872557\nn09873348\nn09873473\nn09873769\nn09873899\nn09874428\nn09874725\nn09874862\nn09875025\nn09875979\nn09876701\nn09877288\nn09877587\nn09877750\nn09877951\nn09878921\nn09879552\nn09880189\nn09880741\nn09881265\nn09881358\nn09881895\nn09883047\nn09883452\nn09883807\nn09885059\nn09885866\nn09886403\nn09886540\nn09888635\nn09889065\nn09889170\nn09889691\nn09889941\nn09890192\nn09890749\nn09891730\nn09892262\nn09892513\nn09892693\nn09893191\nn09893344\nn09893502\nn09893600\nn09894143\nn09894445\nn09894654\nn09894909\nn09895222\nn09895480\nn09895561\nn09895701\nn09895902\nn09896170\nn09896311\nn09896401\nn09896685\nn09896826\nn09898020\nn09899289\nn09899671\nn09899782\nn09899929\nn09901337\nn09901502\nn09901642\nn09901786\nn09901921\nn09902128\nn09902353\nn09902731\nn09902851\nn09902954\nn09903153\nn09903501\nn09903639\nn09903936\nn09904208\nn09904837\nn09905050\nn09905185\nn09905530\nn09906293\nn09906449\nn09906704\nn09907804\nn09908769\nn09909660\nn09909929\nn09910222\nn09910374\nn09910556\nn09910840\nn09911226\nn09912431\nn09912681\nn09912907\nn09912995\nn09913329\nn09913455\nn09913593\nn09915434\nn09915651\nn09916348\nn09917214\nn09917345\nn09917481\nn09917593\nn09918248\nn09918554\nn09918867\nn09919061\nn09919200\nn09919451\nn09919899\nn09920106\nn09920283\nn09920901\nn09921034\nn09923003\nn09923186\nn09923418\nn09923561\nn09923673\nn09923996\nn09924106\nn09924195\nn09924313\nn09924437\nn09924996\nn09927089\nn09927451\nn09928136\nn09928451\nn09928845\nn09929202\nn09929298\nn09929577\nn09930257\nn09930628\nn09930876\nn09931165\nn09931418\nn09931640\nn09932098\nn09932336\nn09932508\nn09932788\nn09933020\nn09933098\nn09933842\nn09933972\nn09934337\nn09934488\nn09934774\nn09935107\nn09935434\nn09936825\nn09936892\nn09937056\nn09937688\nn09937802\nn09937903\nn09938080\nn09938449\nn09938991\nn09940725\nn09940818\nn09941089\nn09941571\nn09941787\nn09941964\nn09942697\nn09942970\nn09943239\nn09943811\nn09944022\nn09944160\nn09944430\nn09945021\nn09945223\nn09945319\nn09945603\nn09945745\nn09946814\nn09947127\nn09950457\nn09950728\nn09951070\nn09951274\nn09951524\nn09951616\nn09952163\nn09953052\nn09953350\nn09953615\nn09954355\nn09954639\nn09955406\nn09955944\nn09956578\nn09957523\nn09958133\nn09958292\nn09958447\nn09958569\nn09959142\nn09959658\nn09960688\nn09961198\nn09961331\nn09961469\nn09961605\nn09961739\nn09962966\nn09964202\nn09964411\nn09965515\nn09965787\nn09966470\nn09966554\nn09967063\nn09967406\nn09967555\nn09967816\nn09967967\nn09968259\nn09968652\nn09968741\nn09968845\nn09970088\nn09970192\nn09970402\nn09970822\nn09971273\nn09971385\nn09971839\nn09972010\nn09972458\nn09972587\nn09974648\nn09975425\nn09976024\nn09976283\nn09976429\nn09976728\nn09976917\nn09978442\nn09979321\nn09979913\nn09980458\nn09980805\nn09980985\nn09981092\nn09981278\nn09981540\nn09981939\nn09982152\nn09982525\nn09983314\nn09983572\nn09983889\nn09984960\nn09985470\nn09985809\nn09985978\nn09986450\nn09986700\nn09986904\nn09987045\nn09987161\nn09987239\nn09988063\nn09988311\nn09988493\nn09988703\nn09989502\nn09990415\nn09990690\nn09990777\nn09991740\nn09991867\nn09992538\nn09992837\nn09993252\nn09993651\nn09994400\nn09994673\nn09994808\nn09994878\nn09995829\nn09996039\nn09996304\nn09996481\nn09997622\nn09998788\nn09999135\nn10000294\nn10000459\nn10000787\nn10001217\nn10001481\nn10001764\nn10002257\nn10002760\nn10003476\nn10004718\nn10005006\nn10005934\nn10006177\nn10006748\nn10007684\nn10007809\nn10007995\nn10008123\nn10008254\nn10009162\nn10009276\nn10009484\nn10009671\nn10010062\nn10010243\nn10010632\nn10010767\nn10010864\nn10011360\nn10011486\nn10012484\nn10013811\nn10015215\nn10015485\nn10015792\nn10015897\nn10017272\nn10017422\nn10018747\nn10018861\nn10019072\nn10019187\nn10019406\nn10020366\nn10020533\nn10020670\nn10020807\nn10020890\nn10022908\nn10023264\nn10023506\nn10023656\nn10024025\nn10024362\nn10024937\nn10025060\nn10025295\nn10025391\nn10025635\nn10026976\nn10027246\nn10027590\nn10028402\nn10028541\nn10029068\nn10030277\nn10032987\nn10033412\nn10033572\nn10033663\nn10033888\nn10034201\nn10034614\nn10035952\nn10036266\nn10036444\nn10036692\nn10036929\nn10037080\nn10037385\nn10037588\nn10037922\nn10038119\nn10038409\nn10038620\nn10039271\nn10039946\nn10040240\nn10040698\nn10040945\nn10041373\nn10041887\nn10042690\nn10042845\nn10043024\nn10043491\nn10043643\nn10044682\nn10044879\nn10047199\nn10047459\nn10048117\nn10048367\nn10048612\nn10048836\nn10049363\nn10050043\nn10050880\nn10051026\nn10051761\nn10051861\nn10051975\nn10052694\nn10053439\nn10053808\nn10054657\nn10055297\nn10055410\nn10055566\nn10055730\nn10055847\nn10056103\nn10056611\nn10056719\nn10057271\nn10058411\nn10058962\nn10059067\nn10060075\nn10060175\nn10060352\nn10061043\nn10061195\nn10061431\nn10061882\nn10062042\nn10062176\nn10062275\nn10062492\nn10062594\nn10062716\nn10062905\nn10062996\nn10063635\nn10063919\nn10064831\nn10064977\nn10065758\nn10066206\nn10066314\nn10067011\nn10067305\nn10067600\nn10067968\nn10068234\nn10068425\nn10069296\nn10069981\nn10070108\nn10070377\nn10070449\nn10070563\nn10070711\nn10071332\nn10071557\nn10072054\nn10074249\nn10074578\nn10074735\nn10074841\nn10075299\nn10075693\nn10076224\nn10076483\nn10076604\nn10076957\nn10077106\nn10077593\nn10077879\nn10078131\nn10078719\nn10078806\nn10079399\nn10079893\nn10080117\nn10080508\nn10080869\nn10081204\nn10081842\nn10082043\nn10082299\nn10082423\nn10082562\nn10082687\nn10082997\nn10083677\nn10083823\nn10084043\nn10084295\nn10085101\nn10085869\nn10086383\nn10086744\nn10087434\nn10087736\nn10088200\nn10090745\nn10091349\nn10091450\nn10091564\nn10091651\nn10091861\nn10091997\nn10092488\nn10092643\nn10092794\nn10092978\nn10093167\nn10093475\nn10093818\nn10094320\nn10094584\nn10094782\nn10095265\nn10095420\nn10095769\nn10095869\nn10096126\nn10096508\nn10097262\nn10097477\nn10097590\nn10097842\nn10097995\nn10098245\nn10098388\nn10098517\nn10098624\nn10098710\nn10098862\nn10099002\nn10099375\nn10101308\nn10101634\nn10101981\nn10102800\nn10103155\nn10103228\nn10103921\nn10104064\nn10104487\nn10104756\nn10104888\nn10105085\nn10105733\nn10105906\nn10106387\nn10106509\nn10106995\nn10107173\nn10107303\nn10108018\nn10108089\nn10108464\nn10108832\nn10109443\nn10109662\nn10109826\nn10110093\nn10110731\nn10110893\nn10111358\nn10111779\nn10111903\nn10112129\nn10113249\nn10113583\nn10113869\nn10114476\nn10114550\nn10114662\nn10115430\nn10115946\nn10116370\nn10116478\nn10116702\nn10117017\nn10117267\nn10117415\nn10117739\nn10117851\nn10118301\nn10118743\nn10118844\nn10119609\nn10120330\nn10120671\nn10121026\nn10121246\nn10121714\nn10121800\nn10122300\nn10122531\nn10123122\nn10123844\nn10126177\nn10126424\nn10126708\nn10127186\nn10127689\nn10128519\nn10128748\nn10129338\nn10129825\nn10130686\nn10130877\nn10131151\nn10131268\nn10131590\nn10131815\nn10132035\nn10132502\nn10134178\nn10134396\nn10134760\nn10134982\nn10135129\nn10135197\nn10135297\nn10136615\nn10136959\nn10137825\nn10138369\nn10138472\nn10139077\nn10139651\nn10140051\nn10140597\nn10140683\nn10140783\nn10140929\nn10141364\nn10141732\nn10142166\nn10142391\nn10142537\nn10142747\nn10142946\nn10143172\nn10143595\nn10143725\nn10144338\nn10145239\nn10145340\nn10145480\nn10145590\nn10145774\nn10145902\nn10146002\nn10146104\nn10146416\nn10146816\nn10146927\nn10147121\nn10147262\nn10147710\nn10147935\nn10148305\nn10148825\nn10149436\nn10149867\nn10150071\nn10150794\nn10150940\nn10151133\nn10151261\nn10151367\nn10151570\nn10151760\nn10152306\nn10152616\nn10152763\nn10153155\nn10153414\nn10153594\nn10153865\nn10154013\nn10154186\nn10154601\nn10155222\nn10155600\nn10155849\nn10156629\nn10156831\nn10157016\nn10157128\nn10157271\nn10158506\nn10159045\nn10159289\nn10159533\nn10160188\nn10160280\nn10160412\nn10161622\nn10162016\nn10162194\nn10162354\nn10164025\nn10164233\nn10164492\nn10165448\nn10166189\nn10166394\nn10167152\nn10167361\nn10167565\nn10167838\nn10168012\nn10168183\nn10168584\nn10168837\nn10169147\nn10169241\nn10169419\nn10169796\nn10170060\nn10170681\nn10170866\nn10171219\nn10171456\nn10171567\nn10172080\nn10173410\nn10173579\nn10173665\nn10173771\nn10174253\nn10174330\nn10174445\nn10174589\nn10174695\nn10174971\nn10175248\nn10175725\nn10176913\nn10177150\nn10178077\nn10178216\nn10179069\nn10180580\nn10180791\nn10180923\nn10181445\nn10181547\nn10181799\nn10181878\nn10182190\nn10182402\nn10183347\nn10183931\nn10184505\nn10185148\nn10185483\nn10185793\nn10186068\nn10186143\nn10186216\nn10186350\nn10186686\nn10186774\nn10187130\nn10187491\nn10187990\nn10188715\nn10188856\nn10188957\nn10189278\nn10189597\nn10190122\nn10190516\nn10191001\nn10191388\nn10191613\nn10192839\nn10193650\nn10194231\nn10194775\nn10195056\nn10195155\nn10195261\nn10195593\nn10196404\nn10196725\nn10197392\nn10198437\nn10198832\nn10199251\nn10200246\nn10200781\nn10202225\nn10202624\nn10202763\nn10203949\nn10204177\nn10204833\nn10205231\nn10205344\nn10205457\nn10205714\nn10206173\nn10206506\nn10206629\nn10207077\nn10207169\nn10208189\nn10208847\nn10208950\nn10209082\nn10209731\nn10210137\nn10210512\nn10210648\nn10210911\nn10211036\nn10211666\nn10211830\nn10212231\nn10212501\nn10212780\nn10213034\nn10213429\nn10214062\nn10214390\nn10215623\nn10216106\nn10216403\nn10217208\nn10218043\nn10218164\nn10218292\nn10219240\nn10219453\nn10219879\nn10220080\nn10220924\nn10221312\nn10221520\nn10222170\nn10222259\nn10222497\nn10222716\nn10223069\nn10223177\nn10223606\nn10224578\nn10225219\nn10225931\nn10226413\nn10227166\nn10227266\nn10227393\nn10227490\nn10227698\nn10227793\nn10227985\nn10228278\nn10228468\nn10228592\nn10228712\nn10229883\nn10230216\nn10233248\nn10235024\nn10235269\nn10235385\nn10236304\nn10236521\nn10236842\nn10237069\nn10237196\nn10237464\nn10237556\nn10237676\nn10237799\nn10238272\nn10238375\nn10239928\nn10240082\nn10240235\nn10240417\nn10240821\nn10241024\nn10241300\nn10242328\nn10243137\nn10243273\nn10243483\nn10243664\nn10243872\nn10244108\nn10244359\nn10244913\nn10245029\nn10245341\nn10245507\nn10245639\nn10245863\nn10246317\nn10246395\nn10246703\nn10247358\nn10247880\nn10248008\nn10248198\nn10248377\nn10249191\nn10249270\nn10249459\nn10249869\nn10249950\nn10250712\nn10251329\nn10251612\nn10252075\nn10252222\nn10252354\nn10252547\nn10253122\nn10253296\nn10253479\nn10253611\nn10253703\nn10255459\nn10257221\nn10258602\nn10258786\nn10259348\nn10259780\nn10259997\nn10260473\nn10260706\nn10260800\nn10261211\nn10261511\nn10261624\nn10261862\nn10262343\nn10262445\nn10262561\nn10262655\nn10262880\nn10263146\nn10263411\nn10263790\nn10265281\nn10265801\nn10265891\nn10266016\nn10266328\nn10266848\nn10267166\nn10267311\nn10267865\nn10268629\nn10269199\nn10269289\nn10271677\nn10272782\nn10272913\nn10273064\nn10274173\nn10274318\nn10274815\nn10275249\nn10275395\nn10275848\nn10276045\nn10276477\nn10276942\nn10277027\nn10277638\nn10277815\nn10277912\nn10278456\nn10279018\nn10279778\nn10280034\nn10280130\nn10280598\nn10280674\nn10281546\nn10281770\nn10281896\nn10282482\nn10282672\nn10283170\nn10283366\nn10283546\nn10284064\nn10284871\nn10284965\nn10286282\nn10286539\nn10286749\nn10288964\nn10289039\nn10289176\nn10289462\nn10289766\nn10290422\nn10290541\nn10290813\nn10290919\nn10291110\nn10291469\nn10291822\nn10291942\nn10292316\nn10293332\nn10293590\nn10293861\nn10294020\nn10294139\nn10295371\nn10295479\nn10296176\nn10296444\nn10297234\nn10297367\nn10297531\nn10297841\nn10298202\nn10298271\nn10298647\nn10298912\nn10299125\nn10299250\nn10299700\nn10299875\nn10300041\nn10300154\nn10300303\nn10300500\nn10300654\nn10300829\nn10302576\nn10302700\nn10302905\nn10303037\nn10303814\nn10304086\nn10304650\nn10304914\nn10305635\nn10305802\nn10306004\nn10306279\nn10306496\nn10306595\nn10306890\nn10307114\nn10308066\nn10308168\nn10308275\nn10308504\nn10308653\nn10308732\nn10310783\nn10311506\nn10311661\nn10312287\nn10312491\nn10312600\nn10313000\nn10313239\nn10313441\nn10313724\nn10314054\nn10314182\nn10314517\nn10314836\nn10315217\nn10315456\nn10315561\nn10315730\nn10316360\nn10316527\nn10316862\nn10317007\nn10317500\nn10317963\nn10318293\nn10318607\nn10318686\nn10319313\nn10320484\nn10320863\nn10321126\nn10321340\nn10321632\nn10321882\nn10322238\nn10323634\nn10323752\nn10323999\nn10324560\nn10325549\nn10325774\nn10326776\nn10327143\nn10327987\nn10328123\nn10328328\nn10328437\nn10328696\nn10328941\nn10329035\nn10330593\nn10330931\nn10331098\nn10331167\nn10331258\nn10331347\nn10331841\nn10332110\nn10332385\nn10332861\nn10332953\nn10333044\nn10333165\nn10333317\nn10333439\nn10333601\nn10333838\nn10334009\nn10334461\nn10334782\nn10335246\nn10335801\nn10335931\nn10336411\nn10336904\nn10337488\nn10338231\nn10338391\nn10339179\nn10339251\nn10339717\nn10340312\nn10341243\nn10341343\nn10341446\nn10341573\nn10341955\nn10342180\nn10342367\nn10342543\nn10342893\nn10342992\nn10343088\nn10343355\nn10343449\nn10343554\nn10343869\nn10344121\nn10344203\nn10344319\nn10344656\nn10344774\nn10345015\nn10345100\nn10345302\nn10345422\nn10345659\nn10346015\nn10347204\nn10347446\nn10348526\nn10349243\nn10349750\nn10349836\nn10350220\nn10350774\nn10351064\nn10353016\nn10353355\nn10353928\nn10354265\nn10354754\nn10355142\nn10355306\nn10355449\nn10355688\nn10355806\nn10356450\nn10356877\nn10357012\nn10357613\nn10357737\nn10358032\nn10358124\nn10358575\nn10359117\nn10359422\nn10359546\nn10359659\nn10360366\nn10360747\nn10361060\nn10361194\nn10361296\nn10361525\nn10362003\nn10362319\nn10362557\nn10363445\nn10363573\nn10364198\nn10364502\nn10365514\nn10366145\nn10366276\nn10366966\nn10368291\nn10368528\nn10368624\nn10368711\nn10368798\nn10369095\nn10369317\nn10369417\nn10369528\nn10369699\nn10369955\nn10370381\nn10370955\nn10371052\nn10371221\nn10371330\nn10371450\nn10373390\nn10373525\nn10374541\nn10374849\nn10374943\nn10375052\nn10375314\nn10375402\nn10376523\nn10376890\nn10377021\nn10377185\nn10377291\nn10377542\nn10377633\nn10378026\nn10378113\nn10378780\nn10379376\nn10380126\nn10380499\nn10380672\nn10381804\nn10381981\nn10382157\nn10382302\nn10382480\nn10382710\nn10382825\nn10383094\nn10383237\nn10383505\nn10383816\nn10384214\nn10384392\nn10384496\nn10385566\nn10386196\nn10386754\nn10386874\nn10386984\nn10387196\nn10387324\nn10387836\nn10389865\nn10389976\nn10390600\nn10390698\nn10390807\nn10391416\nn10393909\nn10394434\nn10394786\nn10395073\nn10395209\nn10395390\nn10395828\nn10396106\nn10396337\nn10396727\nn10396908\nn10397001\nn10397142\nn10397392\nn10399130\nn10400003\nn10400108\nn10400205\nn10400437\nn10400618\nn10400998\nn10401204\nn10401331\nn10401639\nn10402709\nn10402824\nn10403633\nn10403876\nn10404426\nn10404998\nn10405540\nn10405694\nn10406266\nn10406391\nn10406765\nn10407310\nn10407954\nn10408809\nn10409459\nn10409752\nn10410246\nn10410996\nn10411356\nn10411551\nn10411867\nn10414239\nn10414768\nn10414865\nn10415037\nn10416567\nn10417288\nn10417424\nn10417551\nn10417682\nn10417843\nn10417969\nn10418101\nn10418735\nn10419047\nn10419472\nn10419630\nn10419785\nn10420031\nn10420277\nn10420507\nn10420649\nn10421016\nn10421470\nn10421956\nn10422405\nn10425946\nn10426454\nn10426630\nn10427223\nn10427359\nn10427764\nn10428004\nn10431122\nn10431625\nn10432189\nn10432441\nn10432875\nn10432957\nn10433077\nn10433452\nn10433610\nn10433737\nn10435169\nn10435251\nn10435716\nn10435988\nn10436334\nn10437014\nn10437137\nn10437262\nn10437698\nn10438172\nn10438619\nn10438842\nn10439373\nn10439523\nn10439727\nn10439851\nn10441037\nn10441124\nn10441694\nn10441962\nn10442093\nn10442232\nn10442417\nn10442573\nn10443032\nn10443659\nn10443830\nn10444194\nn10448322\nn10448455\nn10449664\nn10450038\nn10450161\nn10450303\nn10451450\nn10451590\nn10451858\nn10453184\nn10455619\nn10456070\nn10456138\nn10456696\nn10457214\nn10457444\nn10457903\nn10458111\nn10458356\nn10458596\nn10459882\nn10460033\nn10461060\nn10462588\nn10462751\nn10462860\nn10464052\nn10464542\nn10464711\nn10464870\nn10465002\nn10465451\nn10465831\nn10466198\nn10466564\nn10466918\nn10467179\nn10467395\nn10468750\nn10469611\nn10469874\nn10470779\nn10471640\nn10471732\nn10471859\nn10472129\nn10472447\nn10473453\nn10473562\nn10473789\nn10473917\nn10474064\nn10474343\nn10474446\nn10474645\nn10475835\nn10475940\nn10476467\nn10477713\nn10477955\nn10478118\nn10478293\nn10478462\nn10478827\nn10478960\nn10479135\nn10479328\nn10481167\nn10481268\nn10482054\nn10482220\nn10482587\nn10482921\nn10483138\nn10483395\nn10483799\nn10483890\nn10484858\nn10485298\nn10485883\nn10486166\nn10486236\nn10486561\nn10487182\nn10487363\nn10487592\nn10488016\nn10488309\nn10488656\nn10489426\nn10490421\nn10491998\nn10492086\nn10492727\nn10493199\nn10493419\nn10493685\nn10493835\nn10493922\nn10494195\nn10494373\nn10495167\nn10495421\nn10495555\nn10495756\nn10496393\nn10496489\nn10497135\nn10497534\nn10497645\nn10498046\nn10498699\nn10498816\nn10498986\nn10499110\nn10499232\nn10499355\nn10499631\nn10499857\nn10500217\nn10500419\nn10500603\nn10500824\nn10500942\nn10501453\nn10501635\nn10502046\nn10502329\nn10502950\nn10503818\nn10504090\nn10504206\nn10505347\nn10505613\nn10505732\nn10505942\nn10506336\nn10506544\nn10506915\nn10507070\nn10507380\nn10507482\nn10507565\nn10507692\nn10508141\nn10508379\nn10508710\nn10509063\nn10509161\nn10509810\nn10510245\nn10510974\nn10511771\nn10512201\nn10512372\nn10512708\nn10512859\nn10513509\nn10513823\nn10513938\nn10514051\nn10514121\nn10514255\nn10514429\nn10514784\nn10515863\nn10516527\nn10517137\nn10517283\nn10518349\nn10519126\nn10519494\nn10519984\nn10520286\nn10520544\nn10520964\nn10521100\nn10521662\nn10521853\nn10522035\nn10522324\nn10522759\nn10523341\nn10524076\nn10524223\nn10524869\nn10525134\nn10525436\nn10525617\nn10525878\nn10526534\nn10527147\nn10527334\nn10528023\nn10528148\nn10528493\nn10529231\nn10530150\nn10530383\nn10530571\nn10530959\nn10531109\nn10531445\nn10531838\nn10533874\nn10533983\nn10536134\nn10536274\nn10536416\nn10537708\nn10537906\nn10538629\nn10538733\nn10538853\nn10539015\nn10539160\nn10539278\nn10540114\nn10540252\nn10540656\nn10541833\nn10542608\nn10542761\nn10542888\nn10543161\nn10543937\nn10544232\nn10544748\nn10545792\nn10546428\nn10546633\nn10548419\nn10548537\nn10548681\nn10549510\nn10550252\nn10550369\nn10550468\nn10551576\nn10552393\nn10553140\nn10553235\nn10554024\nn10554141\nn10554846\nn10555059\nn10555430\nn10556033\nn10556518\nn10556704\nn10556825\nn10557246\nn10557854\nn10559009\nn10559288\nn10559508\nn10559683\nn10559996\nn10560106\nn10560637\nn10561222\nn10561320\nn10561736\nn10562135\nn10562283\nn10562509\nn10562968\nn10563314\nn10563403\nn10563711\nn10564098\nn10565502\nn10566072\nn10567613\nn10567722\nn10567848\nn10568200\nn10568358\nn10568443\nn10568608\nn10568915\nn10569011\nn10569179\nn10570019\nn10570704\nn10571907\nn10572706\nn10572889\nn10573957\nn10574311\nn10574538\nn10574840\nn10575463\nn10575594\nn10575787\nn10576223\nn10576316\nn10576676\nn10576818\nn10576962\nn10577182\nn10577284\nn10577710\nn10577820\nn10578021\nn10578162\nn10578471\nn10578656\nn10579062\nn10579549\nn10580030\nn10580437\nn10580535\nn10581648\nn10581890\nn10582604\nn10582746\nn10583387\nn10583790\nn10585077\nn10585217\nn10585628\nn10586166\nn10586265\nn10586444\nn10586903\nn10586998\nn10588074\nn10588357\nn10588724\nn10588965\nn10589666\nn10590146\nn10590239\nn10590452\nn10590903\nn10591072\nn10591811\nn10592049\nn10592811\nn10593521\nn10594147\nn10594523\nn10594857\nn10595164\nn10595647\nn10596517\nn10596899\nn10597505\nn10597745\nn10597889\nn10598013\nn10598181\nn10598459\nn10598904\nn10599215\nn10599806\nn10601234\nn10601362\nn10602119\nn10602470\nn10602985\nn10603528\nn10603851\nn10604275\nn10604380\nn10604634\nn10604880\nn10604979\nn10605253\nn10605737\nn10607291\nn10607478\nn10609092\nn10609198\nn10610465\nn10610850\nn10611267\nn10611613\nn10612210\nn10612373\nn10612518\nn10613996\nn10614507\nn10614629\nn10615179\nn10615334\nn10616578\nn10617024\nn10617193\nn10617397\nn10618234\nn10618342\nn10618465\nn10618685\nn10618848\nn10619492\nn10619642\nn10619888\nn10620212\nn10620586\nn10620758\nn10621294\nn10621400\nn10621514\nn10622053\nn10624074\nn10624310\nn10624437\nn10624540\nn10625860\nn10626630\nn10627252\nn10628097\nn10628644\nn10629329\nn10629647\nn10629939\nn10630093\nn10630188\nn10631131\nn10631309\nn10631654\nn10632576\nn10633298\nn10633450\nn10634464\nn10634849\nn10634990\nn10635788\nn10636488\nn10637483\nn10638922\nn10639238\nn10639359\nn10639637\nn10639817\nn10641223\nn10642596\nn10642705\nn10643095\nn10643837\nn10643937\nn10644598\nn10645017\nn10645223\nn10646032\nn10646140\nn10646433\nn10646641\nn10646780\nn10646942\nn10647745\nn10648237\nn10648696\nn10649197\nn10649308\nn10650162\nn10652605\nn10652703\nn10654015\nn10654211\nn10654321\nn10654827\nn10654932\nn10655169\nn10655442\nn10655594\nn10655730\nn10655986\nn10656120\nn10656223\nn10656969\nn10657306\nn10657556\nn10657835\nn10658304\nn10659042\nn10659762\nn10660128\nn10660621\nn10660883\nn10661002\nn10661216\nn10661563\nn10661732\nn10663315\nn10663549\nn10665302\nn10665587\nn10665698\nn10666752\nn10667477\nn10667709\nn10667863\nn10668450\nn10668666\nn10669991\nn10671042\nn10671613\nn10671736\nn10671898\nn10672371\nn10672540\nn10672662\nn10673296\nn10673776\nn10674130\nn10674713\nn10675010\nn10675142\nn10675609\nn10676018\nn10676434\nn10676569\nn10678937\nn10679174\nn10679503\nn10679610\nn10679723\nn10680609\nn10680796\nn10681194\nn10681557\nn10682713\nn10682953\nn10683675\nn10684146\nn10684630\nn10684827\nn10685398\nn10686073\nn10686517\nn10686694\nn10686885\nn10688356\nn10688811\nn10689306\nn10690268\nn10690421\nn10690648\nn10691318\nn10691937\nn10692090\nn10692482\nn10692883\nn10693235\nn10693334\nn10693824\nn10694258\nn10694939\nn10695450\nn10696101\nn10696508\nn10697135\nn10697282\nn10698368\nn10699558\nn10699752\nn10699981\nn10700105\nn10700201\nn10700640\nn10700963\nn10701180\nn10701644\nn10701962\nn10702167\nn10702615\nn10703221\nn10703336\nn10703480\nn10703692\nn10704238\nn10704712\nn10704886\nn10705448\nn10705615\nn10706812\nn10707134\nn10707233\nn10707707\nn10708292\nn10708454\nn10709529\nn10710171\nn10710259\nn10710778\nn10710913\nn10711483\nn10711766\nn10712229\nn10712374\nn10712474\nn10712690\nn10712835\nn10713254\nn10713686\nn10713843\nn10714195\nn10715030\nn10715347\nn10715789\nn10716576\nn10716864\nn10717055\nn10717196\nn10717337\nn10718131\nn10718349\nn10718509\nn10718665\nn10718952\nn10719036\nn10719132\nn10719267\nn10719807\nn10720197\nn10720453\nn10720964\nn10721124\nn10721321\nn10721612\nn10721708\nn10721819\nn10722029\nn10722575\nn10722965\nn10723230\nn10723597\nn10724132\nn10724372\nn10724570\nn10725280\nn10726031\nn10726786\nn10727016\nn10727171\nn10727458\nn10728117\nn10728233\nn10728624\nn10728998\nn10729330\nn10730542\nn10730728\nn10731013\nn10731732\nn10732010\nn10732521\nn10732854\nn10732967\nn10733820\nn10734394\nn10734741\nn10734891\nn10734963\nn10735173\nn10735298\nn10735984\nn10737103\nn10737264\nn10738111\nn10738215\nn10738670\nn10738871\nn10739135\nn10739297\nn10739391\nn10740594\nn10740732\nn10740868\nn10741152\nn10741367\nn10741493\nn10742005\nn10742111\nn10742546\nn10742997\nn10743124\nn10743356\nn10744078\nn10744164\nn10745006\nn10745770\nn10746931\nn10747119\nn10747424\nn10747548\nn10747965\nn10748142\nn10748506\nn10748620\nn10749928\nn10750031\nn10750188\nn10750640\nn10751026\nn10751152\nn10751265\nn10751710\nn10752480\nn10753061\nn10753182\nn10753339\nn10753442\nn10753989\nn10754189\nn10754281\nn10754449\nn10755080\nn10755164\nn10755394\nn10755648\nn10756061\nn10756148\nn10756261\nn10756641\nn10756837\nn10757050\nn10757492\nn10758337\nn10758445\nn10758949\nn10759151\nn10759331\nn10759982\nn10760199\nn10760622\nn10760951\nn10761190\nn10761326\nn10761519\nn10762212\nn10762480\nn10763075\nn10763245\nn10763383\nn10763620\nn10764465\nn10764622\nn10764719\nn10765305\nn10765587\nn10765679\nn10765885\nn10766260\nn10768148\nn10768272\nn10768903\nn10769084\nn10769188\nn10769321\nn10769459\nn10771066\nn10772092\nn10772580\nn10772937\nn10773665\nn10773800\nn10774329\nn10774756\nn10775003\nn10775128\nn10776052\nn10776339\nn10776887\nn10777299\nn10778044\nn10778148\nn10778711\nn10778999\nn10779610\nn10779897\nn10779995\nn10780284\nn10780632\nn10781236\nn10781817\nn10782362\nn10782471\nn10782791\nn10782940\nn10783240\nn10783539\nn10783646\nn10783734\nn10784113\nn10784544\nn10784922\nn10785480\nn10787470\nn10788852\nn10789415\nn10789709\nn10791115\nn10791221\nn10791820\nn10791890\nn10792335\nn10792506\nn10792856\nn10793570\nn10793799\nn10794014\nn10801561\nn10801802\nn10802507\nn10802621\nn10802953\nn10803031\nn10803282\nn10803978\nn10804287\nn10804636\nn10804732\nn10805501\nn10806113\nn10994097\nn11100798\nn11196627\nn11242849\nn11318824\nn11346873\nn11448153\nn11487732\nn11508382\nn11511327\nn11524451\nn11530008\nn11531193\nn11531334\nn11532682\nn11533212\nn11533999\nn11536567\nn11536673\nn11537327\nn11539289\nn11542137\nn11542640\nn11544015\nn11545350\nn11545524\nn11545714\nn11547562\nn11547855\nn11548728\nn11548870\nn11549009\nn11549245\nn11549779\nn11549895\nn11552133\nn11552386\nn11552594\nn11552806\nn11552976\nn11553240\nn11553522\nn11596108\nn11597657\nn11598287\nn11598686\nn11598886\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602091\nn11602478\nn11602873\nn11603246\nn11603462\nn11603835\nn11604046\nn11608250\nn11609475\nn11609684\nn11609862\nn11610047\nn11610215\nn11610437\nn11610602\nn11610823\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612235\nn11612349\nn11612575\nn11612923\nn11613219\nn11613459\nn11613692\nn11613867\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615259\nn11615387\nn11615607\nn11615812\nn11615967\nn11616260\nn11616486\nn11616662\nn11616852\nn11617090\nn11617272\nn11617631\nn11617878\nn11618079\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11619687\nn11619845\nn11620016\nn11620389\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625391\nn11625632\nn11625804\nn11626010\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627714\nn11627908\nn11628087\nn11628456\nn11628793\nn11629047\nn11629354\nn11630017\nn11630489\nn11631159\nn11631405\nn11631619\nn11631854\nn11631985\nn11632167\nn11632376\nn11632619\nn11632929\nn11633284\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639084\nn11639306\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11644872\nn11645163\nn11645590\nn11645914\nn11646167\nn11646344\nn11646517\nn11646694\nn11646955\nn11647306\nn11647703\nn11647868\nn11648039\nn11648268\nn11648776\nn11649150\nn11649359\nn11649878\nn11650160\nn11650307\nn11650430\nn11650558\nn11650759\nn11652039\nn11652217\nn11652376\nn11652578\nn11652753\nn11652966\nn11653126\nn11653570\nn11653904\nn11654293\nn11654438\nn11654984\nn11655152\nn11655592\nn11655974\nn11656123\nn11656549\nn11656771\nn11657585\nn11658331\nn11658544\nn11658709\nn11659248\nn11659627\nn11660300\nn11661372\nn11661909\nn11662128\nn11662371\nn11662585\nn11662937\nn11663263\nn11664418\nn11665372\nn11666854\nn11668117\nn11669786\nn11669921\nn11672269\nn11672400\nn11674019\nn11674332\nn11675025\nn11675404\nn11675738\nn11676500\nn11676743\nn11676850\nn11677485\nn11677902\nn11678010\nn11678299\nn11678377\nn11679378\nn11680457\nn11680596\nn11682659\nn11683216\nn11683838\nn11684264\nn11684499\nn11684654\nn11685091\nn11685621\nn11686195\nn11686652\nn11686780\nn11686912\nn11687071\nn11687432\nn11687789\nn11687964\nn11688069\nn11688378\nn11689197\nn11689367\nn11689483\nn11689678\nn11689815\nn11689957\nn11690088\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694300\nn11694469\nn11694664\nn11694866\nn11695085\nn11695285\nn11695599\nn11695974\nn11696450\nn11696935\nn11697560\nn11697802\nn11698042\nn11698245\nn11699442\nn11699751\nn11700058\nn11700279\nn11700864\nn11701066\nn11701302\nn11702713\nn11703669\nn11704093\nn11704620\nn11704791\nn11705171\nn11705387\nn11705573\nn11705776\nn11706325\nn11706761\nn11706942\nn11707229\nn11707827\nn11708658\nn11708857\nn11709045\nn11709205\nn11709674\nn11710136\nn11710393\nn11710658\nn11710827\nn11710987\nn11711289\nn11711537\nn11711764\nn11711971\nn11712282\nn11713164\nn11713370\nn11713763\nn11714382\nn11715430\nn11715678\nn11716698\nn11717399\nn11717577\nn11718296\nn11718681\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11721642\nn11722036\nn11722342\nn11722466\nn11722621\nn11722982\nn11723227\nn11723452\nn11723770\nn11723986\nn11724109\nn11724660\nn11725015\nn11725311\nn11725480\nn11725623\nn11725821\nn11725973\nn11726145\nn11726269\nn11726433\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728769\nn11728945\nn11729142\nn11729478\nn11729860\nn11730015\nn11730458\nn11730602\nn11730750\nn11730933\nn11731157\nn11731659\nn11732052\nn11732567\nn11733054\nn11733312\nn11733548\nn11734493\nn11734698\nn11735053\nn11735570\nn11735977\nn11736362\nn11736694\nn11736851\nn11737009\nn11737125\nn11737534\nn11738547\nn11738997\nn11739365\nn11739978\nn11740414\nn11741175\nn11741350\nn11741575\nn11741797\nn11742310\nn11742878\nn11744011\nn11744108\nn11744471\nn11745817\nn11746600\nn11747468\nn11748002\nn11748811\nn11749112\nn11749603\nn11750173\nn11750508\nn11750989\nn11751765\nn11751974\nn11752578\nn11752798\nn11752937\nn11753143\nn11753355\nn11753562\nn11753700\nn11754893\nn11756092\nn11756329\nn11756669\nn11756870\nn11757017\nn11757190\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759609\nn11759853\nn11760785\nn11761202\nn11761650\nn11761836\nn11762018\nn11762433\nn11762927\nn11763142\nn11763625\nn11763874\nn11764478\nn11764814\nn11765568\nn11766046\nn11766189\nn11766432\nn11767354\nn11767877\nn11768816\nn11769176\nn11769621\nn11769803\nn11770256\nn11771147\nn11771539\nn11771746\nn11771924\nn11772408\nn11772879\nn11773408\nn11773628\nn11773987\nn11774513\nn11774972\nn11775340\nn11775626\nn11776234\nn11777080\nn11778092\nn11778257\nn11779300\nn11780148\nn11780424\nn11781176\nn11782036\nn11782266\nn11782761\nn11782878\nn11783162\nn11783920\nn11784126\nn11784497\nn11785276\nn11785668\nn11785875\nn11786131\nn11786539\nn11786843\nn11787190\nn11788039\nn11788727\nn11789066\nn11789438\nn11789589\nn11789962\nn11790089\nn11790788\nn11790936\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793403\nn11793779\nn11794024\nn11794139\nn11794519\nn11795049\nn11795216\nn11795580\nn11796005\nn11796188\nn11797321\nn11797508\nn11797981\nn11798270\nn11798496\nn11798688\nn11798978\nn11799331\nn11799732\nn11800236\nn11800565\nn11801392\nn11801665\nn11801891\nn11802410\nn11802586\nn11802800\nn11802995\nn11805255\nn11805544\nn11805956\nn11806219\nn11806369\nn11806521\nn11806679\nn11806814\nn11807108\nn11807525\nn11807696\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11809754\nn11810030\nn11810358\nn11811059\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11814996\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817160\nn11817501\nn11817914\nn11818069\nn11818636\nn11819509\nn11819912\nn11820965\nn11821184\nn11822300\nn11823043\nn11823305\nn11823436\nn11823756\nn11824146\nn11824344\nn11824747\nn11825351\nn11825749\nn11826198\nn11826569\nn11827541\nn11828577\nn11828973\nn11829205\nn11829672\nn11829922\nn11830045\nn11830252\nn11830400\nn11830714\nn11830906\nn11831100\nn11831297\nn11831521\nn11832214\nn11832480\nn11832671\nn11832899\nn11833373\nn11833749\nn11834272\nn11834654\nn11834890\nn11835251\nn11836327\nn11836722\nn11837204\nn11837351\nn11837562\nn11837743\nn11837970\nn11838413\nn11838916\nn11839460\nn11839568\nn11839823\nn11840067\nn11840246\nn11840476\nn11840764\nn11841247\nn11843441\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846312\nn11846425\nn11846765\nn11847169\nn11848479\nn11848867\nn11849271\nn11849467\nn11849871\nn11849983\nn11850521\nn11850918\nn11851258\nn11851578\nn11851839\nn11852028\nn11852148\nn11852531\nn11853079\nn11853356\nn11853813\nn11854479\nn11855274\nn11855435\nn11855553\nn11855842\nn11856573\nn11857696\nn11857875\nn11858077\nn11858703\nn11858814\nn11859275\nn11859472\nn11859737\nn11860208\nn11860555\nn11861238\nn11861487\nn11861641\nn11861853\nn11862835\nn11863467\nn11863877\nn11865071\nn11865276\nn11865429\nn11865574\nn11865874\nn11866248\nn11866706\nn11867311\nn11868814\nn11869351\nn11869689\nn11870044\nn11870418\nn11870747\nn11871059\nn11871496\nn11871748\nn11872146\nn11872324\nn11872658\nn11873182\nn11873612\nn11874081\nn11874423\nn11874878\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877473\nn11877646\nn11877860\nn11878101\nn11878283\nn11878633\nn11879054\nn11879722\nn11881189\nn11882074\nn11882237\nn11882426\nn11882636\nn11882821\nn11882972\nn11883328\nn11883628\nn11883945\nn11884384\nn11884967\nn11885856\nn11887119\nn11887310\nn11887476\nn11887750\nn11888061\nn11888424\nn11888800\nn11889205\nn11889619\nn11890022\nn11890150\nn11890884\nn11891175\nn11892029\nn11892181\nn11892637\nn11892817\nn11893640\nn11893916\nn11894327\nn11894558\nn11894770\nn11895092\nn11895472\nn11895714\nn11896141\nn11896722\nn11897116\nn11897466\nn11898639\nn11898775\nn11899223\nn11899762\nn11899921\nn11900569\nn11901294\nn11901452\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903333\nn11903671\nn11904109\nn11904274\nn11905392\nn11905749\nn11906127\nn11906514\nn11906917\nn11907100\nn11907405\nn11907689\nn11908549\nn11908846\nn11909864\nn11910271\nn11910460\nn11910666\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11917407\nn11917835\nn11918286\nn11918473\nn11918808\nn11919447\nn11919761\nn11919975\nn11920133\nn11920498\nn11920663\nn11920998\nn11921395\nn11921792\nn11922661\nn11922755\nn11922839\nn11922926\nn11923174\nn11923397\nn11923637\nn11924014\nn11924445\nn11924849\nn11925303\nn11925450\nn11925898\nn11926365\nn11926833\nn11926976\nn11927215\nn11927740\nn11928352\nn11928858\nn11929743\nn11930038\nn11930203\nn11930353\nn11930571\nn11930788\nn11930994\nn11931135\nn11931540\nn11931918\nn11932745\nn11932927\nn11933099\nn11933257\nn11933387\nn11933546\nn11933728\nn11933903\nn11934041\nn11934239\nn11934463\nn11934616\nn11934807\nn11935027\nn11935187\nn11935330\nn11935469\nn11935627\nn11935715\nn11935794\nn11935877\nn11935953\nn11936027\nn11936113\nn11936199\nn11936287\nn11936369\nn11936448\nn11936539\nn11936624\nn11936707\nn11936782\nn11936864\nn11936946\nn11937023\nn11937102\nn11937195\nn11937278\nn11937360\nn11937446\nn11937692\nn11938556\nn11939180\nn11939699\nn11940006\nn11940349\nn11940599\nn11940750\nn11941094\nn11941478\nn11941924\nn11942659\nn11943133\nn11943407\nn11943660\nn11943992\nn11944196\nn11944751\nn11944954\nn11945367\nn11945514\nn11945783\nn11946051\nn11946313\nn11946727\nn11946918\nn11947251\nn11947629\nn11947802\nn11948044\nn11948264\nn11948469\nn11948864\nn11949015\nn11949402\nn11949857\nn11950345\nn11950686\nn11950877\nn11951052\nn11951511\nn11951820\nn11952346\nn11952541\nn11953038\nn11953339\nn11953610\nn11953884\nn11954161\nn11954345\nn11954484\nn11954642\nn11954798\nn11955040\nn11955153\nn11955532\nn11955896\nn11956348\nn11956850\nn11957317\nn11957514\nn11957678\nn11958080\nn11958499\nn11958888\nn11959259\nn11959632\nn11959862\nn11960245\nn11960673\nn11961100\nn11961446\nn11961871\nn11962272\nn11962667\nn11962994\nn11963572\nn11963932\nn11964446\nn11964848\nn11965218\nn11965627\nn11965962\nn11966083\nn11966215\nn11966385\nn11966617\nn11966896\nn11967142\nn11967315\nn11967744\nn11967878\nn11968519\nn11968704\nn11968931\nn11969166\nn11969607\nn11969806\nn11970101\nn11970298\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11972959\nn11973341\nn11973634\nn11973749\nn11974373\nn11974557\nn11974888\nn11975254\nn11976170\nn11976314\nn11976511\nn11976933\nn11977303\nn11977660\nn11977887\nn11978233\nn11978551\nn11978713\nn11978961\nn11979187\nn11979354\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11981475\nn11982115\nn11982545\nn11982939\nn11983375\nn11983606\nn11984144\nn11984542\nn11985053\nn11985321\nn11985739\nn11985903\nn11986511\nn11986729\nn11987126\nn11987349\nn11987511\nn11988132\nn11988596\nn11988893\nn11989087\nn11989393\nn11989869\nn11990167\nn11990313\nn11990627\nn11990920\nn11991263\nn11991549\nn11991777\nn11992479\nn11992806\nn11993203\nn11993444\nn11993675\nn11994150\nn11995092\nn11995396\nn11996251\nn11996677\nn11997032\nn11997160\nn11997969\nn11998492\nn11998888\nn11999278\nn11999656\nn12000191\nn12001294\nn12001707\nn12001924\nn12002428\nn12002651\nn12002826\nn12003167\nn12003696\nn12004120\nn12004547\nn12004987\nn12005656\nn12006306\nn12006766\nn12006930\nn12007196\nn12007406\nn12007766\nn12008252\nn12008487\nn12008749\nn12009047\nn12009420\nn12009792\nn12010628\nn12010815\nn12011370\nn12011620\nn12012111\nn12012253\nn12012510\nn12013035\nn12013511\nn12013701\nn12014085\nn12014355\nn12014923\nn12015221\nn12015525\nn12015959\nn12016434\nn12016567\nn12016777\nn12016914\nn12017127\nn12017326\nn12017511\nn12017664\nn12017853\nn12018014\nn12018100\nn12018188\nn12018271\nn12018363\nn12018447\nn12018530\nn12018760\nn12019035\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12022382\nn12022821\nn12023108\nn12023407\nn12023726\nn12024176\nn12024445\nn12024690\nn12024805\nn12025220\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12028424\nn12029039\nn12029635\nn12030092\nn12030654\nn12030908\nn12031139\nn12031388\nn12031547\nn12031927\nn12032429\nn12032686\nn12033139\nn12033504\nn12033709\nn12034141\nn12034384\nn12034594\nn12035631\nn12035907\nn12036067\nn12036226\nn12036939\nn12037499\nn12037691\nn12038038\nn12038208\nn12038406\nn12038585\nn12038760\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044041\nn12044467\nn12044784\nn12045157\nn12045514\nn12045860\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12048928\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12051514\nn12051792\nn12052267\nn12052447\nn12052787\nn12053405\nn12053690\nn12053962\nn12054195\nn12055073\nn12055516\nn12056099\nn12056217\nn12056601\nn12056758\nn12056990\nn12057447\nn12057660\nn12057895\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12060546\nn12061104\nn12061380\nn12061614\nn12062105\nn12062468\nn12062626\nn12062781\nn12063211\nn12063639\nn12064389\nn12064591\nn12065316\nn12065649\nn12065777\nn12066018\nn12066261\nn12066451\nn12066630\nn12066821\nn12067029\nn12067193\nn12067433\nn12067672\nn12067817\nn12068138\nn12068432\nn12068615\nn12069009\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071259\nn12071477\nn12071744\nn12072210\nn12072722\nn12073217\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077244\nn12077944\nn12078172\nn12078451\nn12078747\nn12079120\nn12079523\nn12079963\nn12080395\nn12080588\nn12080820\nn12081215\nn12081649\nn12082131\nn12083113\nn12083591\nn12083847\nn12084158\nn12084400\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12087961\nn12088223\nn12088327\nn12088495\nn12088909\nn12089320\nn12089496\nn12089846\nn12090890\nn12091213\nn12091377\nn12091550\nn12091697\nn12091953\nn12092262\nn12092417\nn12092629\nn12092930\nn12093329\nn12093600\nn12093885\nn12094244\nn12094401\nn12094612\nn12095020\nn12095281\nn12095412\nn12095543\nn12095647\nn12095934\nn12096089\nn12096395\nn12096563\nn12096674\nn12097396\nn12097556\nn12098403\nn12098524\nn12098827\nn12099342\nn12100187\nn12101870\nn12102133\nn12103680\nn12103894\nn12104104\nn12104238\nn12104501\nn12104734\nn12105125\nn12105353\nn12105828\nn12105981\nn12106134\nn12106323\nn12107002\nn12107191\nn12107710\nn12107970\nn12108432\nn12108613\nn12108871\nn12109365\nn12109827\nn12110085\nn12110236\nn12110352\nn12110475\nn12110778\nn12111238\nn12111627\nn12112008\nn12112337\nn12112609\nn12112918\nn12113195\nn12113323\nn12113657\nn12114010\nn12114590\nn12115180\nn12116058\nn12116429\nn12116734\nn12117017\nn12117235\nn12117326\nn12117695\nn12117912\nn12118414\nn12118661\nn12119099\nn12119238\nn12119390\nn12119539\nn12119717\nn12120347\nn12120578\nn12121033\nn12121187\nn12121610\nn12122442\nn12122725\nn12122918\nn12123648\nn12123741\nn12124172\nn12124627\nn12124818\nn12125001\nn12125183\nn12125584\nn12126084\nn12126360\nn12126736\nn12127460\nn12127575\nn12127768\nn12128071\nn12128306\nn12128490\nn12129134\nn12129738\nn12129986\nn12130549\nn12131405\nn12131550\nn12132092\nn12132956\nn12133151\nn12133462\nn12133682\nn12134025\nn12134486\nn12134695\nn12134836\nn12135049\nn12135576\nn12135729\nn12135898\nn12136392\nn12136581\nn12136720\nn12137120\nn12137569\nn12137791\nn12137954\nn12138110\nn12138248\nn12138444\nn12138578\nn12139196\nn12139575\nn12139793\nn12139921\nn12140511\nn12140759\nn12140903\nn12141167\nn12141385\nn12141495\nn12142085\nn12142357\nn12142450\nn12143065\nn12143215\nn12143405\nn12143676\nn12144313\nn12144987\nn12145148\nn12145477\nn12146311\nn12146488\nn12146654\nn12147226\nn12147835\nn12148757\nn12150722\nn12150969\nn12151170\nn12151615\nn12152031\nn12152251\nn12152532\nn12152722\nn12153033\nn12153224\nn12153580\nn12153741\nn12153914\nn12154114\nn12154773\nn12155009\nn12155583\nn12155773\nn12156679\nn12156819\nn12157056\nn12157179\nn12157769\nn12158031\nn12158443\nn12158798\nn12159055\nn12159388\nn12159555\nn12159804\nn12159942\nn12160125\nn12160303\nn12160490\nn12160857\nn12161056\nn12161285\nn12161577\nn12161744\nn12161969\nn12162181\nn12162425\nn12162758\nn12163035\nn12163279\nn12164363\nn12164656\nn12164881\nn12165170\nn12165384\nn12165758\nn12166128\nn12166424\nn12166793\nn12166929\nn12167075\nn12167436\nn12167602\nn12168565\nn12169099\nn12170585\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12174926\nn12175181\nn12175370\nn12175598\nn12176453\nn12176709\nn12176953\nn12177129\nn12177455\nn12178129\nn12178780\nn12178896\nn12179122\nn12179632\nn12180168\nn12180456\nn12180885\nn12181352\nn12181612\nn12182049\nn12182276\nn12183026\nn12183452\nn12183816\nn12184095\nn12184468\nn12184912\nn12185254\nn12185859\nn12186352\nn12186554\nn12186839\nn12187247\nn12187663\nn12187891\nn12188289\nn12188635\nn12189429\nn12189779\nn12189987\nn12190410\nn12190869\nn12191240\nn12192132\nn12192877\nn12193334\nn12193665\nn12194147\nn12194613\nn12195391\nn12195533\nn12195734\nn12196129\nn12196336\nn12196527\nn12196694\nn12196954\nn12197359\nn12197601\nn12198286\nn12198793\nn12199266\nn12199399\nn12199790\nn12199982\nn12200143\nn12200504\nn12200905\nn12201331\nn12201580\nn12201938\nn12202936\nn12203529\nn12203699\nn12203896\nn12204032\nn12204175\nn12204730\nn12205460\nn12205694\nn12214789\nn12215022\nn12215210\nn12215579\nn12215824\nn12216215\nn12216628\nn12216968\nn12217453\nn12217851\nn12218274\nn12218490\nn12218868\nn12219668\nn12220019\nn12220496\nn12220829\nn12221191\nn12221368\nn12221522\nn12221801\nn12222090\nn12222493\nn12222900\nn12223160\nn12223569\nn12223764\nn12224978\nn12225222\nn12225349\nn12225563\nn12226932\nn12227658\nn12227909\nn12228229\nn12228387\nn12228689\nn12228886\nn12229111\nn12229651\nn12229887\nn12230540\nn12230794\nn12231192\nn12231709\nn12232114\nn12232280\nn12232851\nn12233249\nn12234318\nn12234669\nn12235051\nn12235479\nn12236160\nn12236546\nn12236768\nn12236977\nn12237152\nn12237486\nn12237641\nn12237855\nn12238756\nn12238913\nn12239240\nn12239647\nn12239880\nn12240150\nn12240477\nn12240965\nn12241192\nn12241426\nn12241880\nn12242123\nn12242409\nn12242850\nn12243109\nn12243693\nn12244153\nn12244458\nn12244650\nn12244819\nn12245319\nn12245695\nn12245885\nn12246037\nn12246232\nn12246773\nn12246941\nn12247202\nn12247407\nn12247963\nn12248141\nn12248359\nn12248574\nn12248780\nn12248941\nn12249122\nn12249294\nn12249542\nn12251001\nn12251278\nn12251740\nn12252168\nn12252383\nn12252866\nn12253229\nn12253487\nn12253664\nn12253835\nn12254168\nn12255225\nn12256112\nn12256325\nn12256522\nn12256708\nn12256920\nn12257570\nn12257725\nn12258101\nn12258885\nn12259316\nn12260799\nn12261359\nn12261571\nn12261808\nn12262018\nn12262185\nn12262553\nn12263038\nn12263204\nn12263410\nn12263588\nn12263738\nn12263987\nn12264512\nn12264786\nn12265083\nn12265394\nn12265600\nn12266217\nn12266528\nn12266644\nn12266796\nn12266984\nn12267133\nn12267265\nn12267411\nn12267534\nn12267931\nn12268246\nn12269241\nn12269406\nn12269652\nn12270027\nn12270278\nn12270460\nn12270741\nn12270946\nn12271187\nn12271451\nn12271643\nn12271933\nn12272239\nn12272432\nn12272735\nn12272883\nn12273114\nn12273344\nn12273515\nn12273768\nn12273939\nn12274151\nn12274358\nn12274630\nn12274863\nn12275131\nn12275317\nn12275489\nn12275675\nn12275888\nn12276110\nn12276314\nn12276477\nn12276628\nn12276872\nn12277150\nn12277334\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12278865\nn12279060\nn12279293\nn12279458\nn12279772\nn12280060\nn12280364\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283147\nn12283395\nn12283542\nn12283790\nn12284262\nn12284821\nn12285049\nn12285195\nn12285369\nn12285512\nn12285705\nn12285900\nn12286068\nn12286197\nn12286826\nn12286988\nn12287195\nn12287642\nn12287836\nn12288005\nn12288823\nn12289310\nn12289433\nn12289585\nn12290748\nn12290975\nn12291143\nn12291459\nn12291671\nn12291959\nn12292463\nn12292877\nn12293723\nn12294124\nn12294331\nn12294542\nn12294723\nn12294871\nn12295033\nn12295237\nn12295429\nn12295796\nn12296045\nn12296432\nn12296735\nn12296929\nn12297110\nn12297280\nn12297507\nn12297846\nn12298165\nn12299640\nn12300840\nn12301180\nn12301445\nn12301613\nn12301766\nn12302071\nn12302248\nn12302565\nn12303083\nn12303462\nn12304115\nn12304286\nn12304420\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305654\nn12305819\nn12305986\nn12306089\nn12306270\nn12306717\nn12306938\nn12307076\nn12307240\nn12307756\nn12308112\nn12308447\nn12308907\nn12309277\nn12309630\nn12310021\nn12310349\nn12310638\nn12311045\nn12311224\nn12311413\nn12311579\nn12312110\nn12312728\nn12315060\nn12315245\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12318782\nn12318965\nn12319204\nn12319414\nn12320010\nn12320414\nn12320627\nn12320806\nn12321077\nn12321395\nn12321669\nn12321873\nn12322099\nn12322501\nn12322699\nn12323665\nn12324056\nn12324222\nn12324388\nn12324558\nn12324906\nn12325234\nn12325787\nn12327022\nn12327528\nn12327846\nn12328398\nn12328567\nn12328801\nn12329260\nn12329473\nn12330239\nn12330469\nn12330587\nn12330891\nn12331066\nn12331263\nn12331655\nn12331788\nn12332030\nn12332218\nn12332555\nn12333053\nn12333530\nn12333771\nn12333961\nn12334153\nn12334293\nn12334891\nn12335483\nn12335664\nn12335800\nn12335937\nn12336092\nn12336224\nn12336333\nn12336586\nn12336727\nn12336973\nn12337131\nn12337246\nn12337391\nn12337617\nn12337800\nn12337922\nn12338034\nn12338146\nn12338258\nn12338454\nn12338655\nn12338796\nn12338979\nn12339526\nn12339831\nn12340383\nn12340581\nn12340755\nn12341542\nn12341931\nn12342299\nn12342498\nn12342852\nn12343480\nn12343753\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12346578\nn12346813\nn12346986\nn12347158\nn12349315\nn12349711\nn12350032\nn12350758\nn12351091\nn12351790\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353431\nn12353754\nn12355760\nn12356023\nn12356395\nn12356960\nn12357485\nn12357968\nn12358293\nn12360108\nn12360534\nn12360684\nn12360817\nn12360958\nn12361135\nn12361560\nn12361754\nn12361946\nn12362274\nn12362514\nn12362668\nn12363301\nn12363768\nn12364604\nn12364940\nn12365158\nn12365285\nn12365462\nn12365900\nn12366053\nn12366186\nn12366313\nn12366675\nn12366870\nn12367611\nn12368028\nn12368257\nn12368451\nn12369066\nn12369309\nn12369476\nn12369665\nn12369845\nn12370174\nn12370549\nn12371202\nn12371439\nn12371704\nn12372233\nn12373100\nn12373739\nn12374418\nn12374705\nn12374862\nn12375769\nn12377198\nn12377494\nn12378249\nn12378753\nn12378963\nn12379531\nn12380761\nn12381511\nn12382233\nn12382875\nn12383737\nn12383894\nn12384037\nn12384227\nn12384375\nn12384569\nn12384680\nn12384839\nn12385429\nn12385566\nn12385830\nn12386945\nn12387103\nn12387633\nn12387839\nn12388143\nn12388293\nn12388858\nn12388989\nn12389130\nn12389501\nn12389727\nn12389932\nn12390099\nn12390314\nn12392070\nn12392549\nn12392765\nn12393269\nn12394118\nn12394328\nn12394638\nn12395068\nn12395289\nn12395463\nn12395906\nn12396091\nn12396924\nn12397431\nn12399132\nn12399384\nn12399534\nn12399656\nn12399899\nn12400489\nn12400720\nn12400924\nn12401335\nn12401684\nn12401893\nn12402051\nn12402348\nn12402596\nn12402840\nn12403075\nn12403276\nn12403513\nn12403994\nn12404729\nn12405714\nn12406304\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407396\nn12407545\nn12407715\nn12407890\nn12408077\nn12408280\nn12408466\nn12408717\nn12408873\nn12409231\nn12409470\nn12409651\nn12409840\nn12411461\nn12412355\nn12412606\nn12412987\nn12413165\nn12413301\nn12413419\nn12413642\nn12413880\nn12414035\nn12414159\nn12414329\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12416423\nn12416703\nn12417836\nn12418221\nn12418507\nn12419037\nn12419878\nn12420124\nn12420535\nn12420722\nn12421137\nn12421467\nn12421683\nn12421917\nn12422129\nn12422559\nn12425281\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12427946\nn12428076\nn12428242\nn12428412\nn12428747\nn12429352\nn12430198\nn12430471\nn12430675\nn12431434\nn12432069\nn12432356\nn12432574\nn12432707\nn12433081\nn12433178\nn12433769\nn12433952\nn12434106\nn12434483\nn12434634\nn12434775\nn12434985\nn12435152\nn12435486\nn12435649\nn12435777\nn12435965\nn12436090\nn12436907\nn12437513\nn12437769\nn12437930\nn12439154\nn12439830\nn12441183\nn12441390\nn12441552\nn12441958\nn12442548\nn12443323\nn12443736\nn12444095\nn12444898\nn12446200\nn12446519\nn12446737\nn12446908\nn12447121\nn12447346\nn12447581\nn12447891\nn12448136\nn12448361\nn12448700\nn12449296\nn12449526\nn12449784\nn12449934\nn12450344\nn12450607\nn12450840\nn12451070\nn12451240\nn12451399\nn12451566\nn12451915\nn12452256\nn12452480\nn12452673\nn12452836\nn12453018\nn12453186\nn12453714\nn12453857\nn12454159\nn12454436\nn12454556\nn12454705\nn12454793\nn12454949\nn12455950\nn12457091\nn12458550\nn12458713\nn12458874\nn12459629\nn12460146\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462032\nn12462221\nn12462582\nn12462805\nn12463134\nn12463743\nn12463975\nn12464128\nn12464476\nn12464649\nn12465557\nn12466727\nn12467018\nn12467197\nn12467433\nn12467592\nn12468545\nn12468719\nn12469517\nn12470092\nn12470512\nn12470907\nn12472024\nn12473608\nn12473840\nn12474167\nn12474418\nn12475035\nn12475242\nn12475774\nn12476510\nn12477163\nn12477401\nn12477583\nn12477747\nn12477983\nn12478768\nn12479537\nn12480456\nn12480895\nn12481150\nn12481289\nn12481458\nn12482437\nn12482668\nn12482893\nn12483282\nn12483427\nn12483625\nn12483841\nn12484244\nn12484784\nn12485653\nn12485981\nn12486574\nn12487058\nn12488454\nn12488709\nn12489046\nn12489676\nn12489815\nn12490490\nn12491017\nn12491435\nn12491826\nn12492106\nn12492460\nn12492682\nn12492900\nn12493208\nn12493426\nn12493868\nn12494794\nn12495146\nn12495670\nn12495895\nn12496427\nn12496949\nn12497669\nn12498055\nn12498457\nn12499163\nn12499757\nn12499979\nn12500309\nn12500518\nn12500751\nn12501202\nn12504570\nn12504783\nn12505253\nn12506181\nn12506341\nn12506991\nn12507379\nn12507823\nn12508309\nn12508618\nn12508762\nn12509109\nn12509476\nn12509665\nn12509821\nn12509993\nn12510343\nn12510774\nn12511488\nn12511856\nn12512095\nn12512294\nn12512674\nn12513172\nn12513613\nn12513933\nn12514138\nn12514592\nn12514992\nn12515393\nn12515711\nn12515925\nn12516165\nn12516584\nn12516828\nn12517077\nn12517445\nn12517642\nn12518013\nn12518481\nn12519089\nn12519563\nn12520406\nn12521186\nn12521394\nn12522188\nn12522678\nn12522894\nn12523141\nn12523475\nn12523850\nn12524188\nn12525168\nn12525513\nn12525753\nn12526178\nn12526516\nn12526754\nn12527081\nn12527738\nn12528109\nn12528382\nn12528549\nn12528768\nn12528974\nn12529220\nn12529500\nn12529905\nn12530629\nn12530818\nn12531328\nn12531727\nn12532564\nn12532886\nn12533190\nn12533437\nn12534208\nn12534625\nn12534862\nn12536291\nn12537253\nn12537569\nn12538209\nn12539074\nn12539306\nn12539832\nn12540250\nn12540647\nn12540966\nn12541157\nn12541403\nn12542043\nn12542240\nn12543186\nn12543455\nn12543639\nn12543826\nn12544240\nn12544539\nn12545232\nn12545635\nn12545865\nn12546183\nn12546420\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12548564\nn12548804\nn12549005\nn12549192\nn12549420\nn12549799\nn12550210\nn12550408\nn12551173\nn12551457\nn12552309\nn12552893\nn12553742\nn12554029\nn12554526\nn12554729\nn12554911\nn12555255\nn12555859\nn12556656\nn12557064\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12558680\nn12559044\nn12559518\nn12560282\nn12560621\nn12560775\nn12561169\nn12561309\nn12561594\nn12562141\nn12562577\nn12562785\nn12563045\nn12563702\nn12564083\nn12564613\nn12565102\nn12565912\nn12566331\nn12566954\nn12567950\nn12568186\nn12568649\nn12569037\nn12569616\nn12569851\nn12570394\nn12570703\nn12570972\nn12571781\nn12572546\nn12572759\nn12572858\nn12573256\nn12573474\nn12573647\nn12573911\nn12574320\nn12574470\nn12574866\nn12575322\nn12575812\nn12576323\nn12576451\nn12576695\nn12577362\nn12577895\nn12578255\nn12578626\nn12578916\nn12579038\nn12579404\nn12579822\nn12580012\nn12580654\nn12580786\nn12580896\nn12581110\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12583681\nn12583855\nn12584191\nn12584365\nn12584715\nn12585137\nn12585373\nn12585629\nn12586298\nn12586499\nn12586725\nn12586989\nn12587132\nn12587487\nn12587803\nn12588320\nn12588780\nn12589142\nn12589458\nn12589687\nn12589841\nn12590232\nn12590499\nn12590600\nn12590715\nn12591017\nn12591351\nn12591702\nn12592058\nn12592544\nn12592839\nn12593122\nn12593341\nn12593994\nn12594324\nn12594989\nn12595699\nn12595964\nn12596148\nn12596345\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599185\nn12599435\nn12599661\nn12599874\nn12600095\nn12600267\nn12601494\nn12601805\nn12602262\nn12602434\nn12602612\nn12602980\nn12603273\nn12603449\nn12603672\nn12604228\nn12604460\nn12604639\nn12604845\nn12605683\nn12606438\nn12606545\nn12607456\nn12609379\nn12610328\nn12610740\nn12611640\nn12612170\nn12612811\nn12613706\nn12614096\nn12614477\nn12614625\nn12615232\nn12615710\nn12616248\nn12616630\nn12616996\nn12617559\nn12618146\nn12618727\nn12620196\nn12620969\nn12621410\nn12621619\nn12621945\nn12622297\nn12622875\nn12623077\nn12623211\nn12623818\nn12624381\nn12624568\nn12625003\nn12625383\nn12625670\nn12625823\nn12626674\nn12626878\nn12627119\nn12627347\nn12627526\nn12628356\nn12628705\nn12628986\nn12629305\nn12629666\nn12630763\nn12630999\nn12631331\nn12631637\nn12631932\nn12632335\nn12632733\nn12633061\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635151\nn12635359\nn12635532\nn12635744\nn12635955\nn12636224\nn12636885\nn12637123\nn12637485\nn12638218\nn12638556\nn12638753\nn12638964\nn12639168\nn12639376\nn12639584\nn12639736\nn12639910\nn12640081\nn12640284\nn12640435\nn12640607\nn12640839\nn12641007\nn12641180\nn12641413\nn12641931\nn12642090\nn12642200\nn12642435\nn12642600\nn12642964\nn12643113\nn12643313\nn12643473\nn12643688\nn12643877\nn12644283\nn12644902\nn12645174\nn12645530\nn12646072\nn12646197\nn12646397\nn12646605\nn12646740\nn12646950\nn12647231\nn12647376\nn12647560\nn12647787\nn12647893\nn12648045\nn12648196\nn12648424\nn12648693\nn12648888\nn12649065\nn12649317\nn12649539\nn12649866\nn12650038\nn12650229\nn12650379\nn12650556\nn12650805\nn12650915\nn12651229\nn12651611\nn12651821\nn12653218\nn12653436\nn12653633\nn12654227\nn12654857\nn12655062\nn12655245\nn12655351\nn12655498\nn12655605\nn12655726\nn12655869\nn12656369\nn12656528\nn12656685\nn12656909\nn12657082\nn12657755\nn12658118\nn12658308\nn12658481\nn12658603\nn12658715\nn12658846\nn12659064\nn12659356\nn12659539\nn12660601\nn12661045\nn12661227\nn12661538\nn12662074\nn12662379\nn12662772\nn12663023\nn12663254\nn12663359\nn12663804\nn12664005\nn12664187\nn12664469\nn12664710\nn12665048\nn12665271\nn12665659\nn12665857\nn12666050\nn12666159\nn12666369\nn12666965\nn12667406\nn12667582\nn12667964\nn12668131\nn12669803\nn12670334\nn12670758\nn12670962\nn12671651\nn12672289\nn12673588\nn12674120\nn12674685\nn12674895\nn12675299\nn12675515\nn12675876\nn12676134\nn12676370\nn12676534\nn12676703\nn12677120\nn12677331\nn12677612\nn12677841\nn12678794\nn12679023\nn12679432\nn12679593\nn12679876\nn12680402\nn12680652\nn12680864\nn12681376\nn12681579\nn12681893\nn12682411\nn12682668\nn12682882\nn12683096\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686496\nn12686676\nn12686877\nn12687044\nn12687462\nn12687698\nn12687957\nn12688187\nn12688372\nn12688716\nn12689305\nn12690653\nn12691428\nn12691661\nn12692024\nn12692160\nn12692521\nn12692714\nn12693244\nn12693352\nn12693865\nn12694486\nn12695144\nn12695975\nn12696492\nn12696830\nn12697152\nn12697514\nn12698027\nn12698435\nn12698598\nn12698774\nn12699031\nn12699301\nn12699922\nn12700088\nn12700357\nn12702124\nn12703190\nn12703383\nn12703557\nn12703716\nn12703856\nn12704041\nn12704343\nn12704513\nn12705013\nn12705220\nn12705458\nn12705698\nn12705978\nn12706410\nn12707199\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709349\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12710917\nn12711182\nn12711398\nn12711596\nn12711817\nn12711984\nn12712320\nn12712626\nn12713063\nn12713358\nn12713521\nn12713866\nn12714254\nn12714755\nn12714949\nn12715195\nn12715914\nn12716400\nn12716594\nn12717072\nn12717224\nn12717644\nn12718074\nn12718483\nn12718995\nn12719684\nn12719944\nn12720200\nn12720354\nn12721122\nn12721477\nn12722071\nn12723062\nn12723610\nn12724942\nn12725521\nn12725738\nn12725940\nn12726159\nn12726357\nn12726528\nn12726670\nn12726902\nn12727101\nn12727301\nn12727518\nn12727729\nn12727960\nn12728164\nn12728322\nn12728508\nn12728656\nn12728864\nn12729023\nn12729164\nn12729315\nn12729521\nn12729729\nn12729950\nn12730143\nn12730370\nn12730544\nn12730776\nn12731029\nn12731401\nn12731835\nn12732009\nn12732252\nn12732491\nn12732605\nn12732756\nn12732966\nn12733218\nn12733428\nn12733647\nn12733870\nn12734070\nn12734215\nn12735160\nn12736603\nn12736999\nn12737383\nn12737898\nn12738259\nn12739332\nn12739966\nn12740967\nn12741222\nn12741586\nn12741792\nn12742290\nn12742741\nn12742878\nn12743009\nn12743352\nn12743823\nn12743976\nn12744142\nn12744387\nn12744850\nn12745386\nn12745564\nn12746884\nn12747120\nn12748248\nn12749049\nn12749456\nn12749679\nn12749852\nn12750076\nn12750767\nn12751172\nn12751675\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754174\nn12754311\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755559\nn12755727\nn12755876\nn12756457\nn12757115\nn12757303\nn12757458\nn12757668\nn12757816\nn12757930\nn12758014\nn12758099\nn12758176\nn12758250\nn12758325\nn12758399\nn12758471\nn12758555\nn12759273\nn12759668\nn12760539\nn12760875\nn12761284\nn12761702\nn12761905\nn12762049\nn12762405\nn12762896\nn12763529\nn12764008\nn12764202\nn12764507\nn12764978\nn12765115\nn12765402\nn12765846\nn12766043\nn12766595\nn12766869\nn12767208\nn12767423\nn12767648\nn12768369\nn12768809\nn12768933\nn12769065\nn12769219\nn12769318\nn12770529\nn12770892\nn12771085\nn12771192\nn12771390\nn12771597\nn12771890\nn12772753\nn12772908\nn12773142\nn12773651\nn12773917\nn12774299\nn12774641\nn12775070\nn12775393\nn12775717\nn12775919\nn12776558\nn12776774\nn12777436\nn12777680\nn12777778\nn12777892\nn12778398\nn12778605\nn12779603\nn12779851\nn12780325\nn12780563\nn12781940\nn12782530\nn12782915\nn12783316\nn12783730\nn12784371\nn12784889\nn12785724\nn12785889\nn12786273\nn12786464\nn12786836\nn12787364\nn12788854\nn12789054\nn12789554\nn12789977\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12793695\nn12793886\nn12794135\nn12794367\nn12794568\nn12794985\nn12795209\nn12795352\nn12795555\nn12796022\nn12796385\nn12796849\nn12797368\nn12797860\nn12798284\nn12798910\nn12799269\nn12799776\nn12800049\nn12800586\nn12801072\nn12801520\nn12801781\nn12801966\nn12803226\nn12803754\nn12803958\nn12804352\nn12805146\nn12805561\nn12805762\nn12806015\nn12806732\nn12807251\nn12807409\nn12807624\nn12807773\nn12808007\nn12809868\nn12810007\nn12810151\nn12810595\nn12811027\nn12811713\nn12812235\nn12812478\nn12812801\nn12813189\nn12814643\nn12814857\nn12814960\nn12815198\nn12815668\nn12815838\nn12816508\nn12816942\nn12817464\nn12817694\nn12817855\nn12818004\nn12818346\nn12818601\nn12818966\nn12819141\nn12819354\nn12819728\nn12820113\nn12820669\nn12820853\nn12821505\nn12821895\nn12822115\nn12822466\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12824289\nn12824735\nn12825497\nn12826143\nn12827270\nn12827537\nn12827907\nn12828220\nn12828379\nn12828520\nn12828791\nn12828977\nn12829582\nn12829975\nn12830222\nn12830568\nn12831141\nn12831535\nn12831932\nn12832315\nn12832538\nn12832822\nn12833149\nn12833985\nn12834190\nn12834798\nn12834938\nn12835331\nn12835766\nn12836212\nn12836337\nn12836508\nn12836862\nn12837052\nn12837259\nn12837466\nn12837803\nn12839574\nn12839979\nn12840168\nn12840362\nn12840502\nn12840749\nn12841007\nn12841193\nn12841354\nn12842302\nn12842519\nn12842642\nn12842887\nn12843144\nn12843316\nn12843557\nn12843970\nn12844409\nn12844939\nn12845187\nn12845413\nn12845908\nn12846335\nn12846690\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851094\nn12851469\nn12851860\nn12852234\nn12852428\nn12852570\nn12853080\nn12853287\nn12853482\nn12854048\nn12854193\nn12854600\nn12855365\nn12855494\nn12855710\nn12855886\nn12856091\nn12856287\nn12856479\nn12856680\nn12857204\nn12857779\nn12858150\nn12858397\nn12858618\nn12858871\nn12858987\nn12859153\nn12859272\nn12859679\nn12859986\nn12860365\nn12860978\nn12861345\nn12861541\nn12861892\nn12862512\nn12862828\nn12863234\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866333\nn12866459\nn12866635\nn12866968\nn12867184\nn12867449\nn12867826\nn12868019\nn12868880\nn12869061\nn12869478\nn12869668\nn12870048\nn12870225\nn12870535\nn12870682\nn12870891\nn12871272\nn12871696\nn12871859\nn12872458\nn12872914\nn12873341\nn12873984\nn12875269\nn12875697\nn12875861\nn12876899\nn12877244\nn12877493\nn12877637\nn12877838\nn12878169\nn12878325\nn12878784\nn12879068\nn12879527\nn12879963\nn12880244\nn12880462\nn12880638\nn12880799\nn12881105\nn12881913\nn12882158\nn12882779\nn12882945\nn12883265\nn12883628\nn12884100\nn12884260\nn12885045\nn12885265\nn12885510\nn12885754\nn12886185\nn12886402\nn12886600\nn12886831\nn12887293\nn12887532\nn12887713\nn12888016\nn12888234\nn12888457\nn12889219\nn12889412\nn12889579\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12891824\nn12892013\nn12893463\nn12893993\nn12895298\nn12895811\nn12896615\nn12897118\nn12897788\nn12897999\nn12898342\nn12898774\nn12899166\nn12899537\nn12899752\nn12899971\nn12900783\nn12901724\nn12902466\nn12902662\nn12903014\nn12903367\nn12903503\nn12903964\nn12904314\nn12904562\nn12904938\nn12905135\nn12905412\nn12906214\nn12906498\nn12906771\nn12907057\nn12907671\nn12907857\nn12908093\nn12908645\nn12908854\nn12909421\nn12909614\nn12909759\nn12909917\nn12911079\nn12911264\nn12911440\nn12911673\nn12911914\nn12912274\nn12912670\nn12912801\nn12913144\nn12913524\nn12913791\nn12914923\nn12915140\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12918609\nn12918810\nn12918991\nn12919195\nn12919403\nn12919646\nn12919847\nn12920043\nn12920204\nn12920521\nn12920719\nn12920955\nn12921315\nn12921499\nn12921660\nn12921868\nn12922119\nn12922458\nn12922763\nn12923108\nn12923257\nn12924623\nn12925179\nn12925583\nn12926039\nn12926480\nn12926689\nn12927013\nn12927194\nn12927494\nn12927758\nn12928071\nn12928307\nn12928491\nn12928819\nn12929403\nn12929600\nn12930778\nn12930951\nn12931231\nn12931542\nn12931906\nn12932173\nn12932365\nn12932706\nn12932966\nn12933274\nn12934036\nn12934174\nn12934479\nn12934685\nn12934985\nn12935166\nn12935609\nn12936155\nn12936826\nn12937130\nn12938081\nn12938193\nn12938445\nn12938667\nn12939104\nn12939282\nn12939479\nn12939874\nn12940226\nn12940609\nn12941220\nn12941536\nn12941717\nn12942025\nn12942395\nn12942572\nn12942729\nn12943049\nn12943443\nn12943912\nn12944095\nn12945177\nn12945366\nn12945549\nn12946849\nn12947313\nn12947544\nn12947756\nn12947895\nn12948053\nn12948251\nn12948495\nn12949160\nn12949361\nn12950126\nn12950314\nn12950796\nn12951146\nn12951835\nn12952165\nn12952469\nn12952590\nn12952717\nn12953206\nn12953484\nn12953712\nn12954353\nn12954799\nn12955414\nn12955840\nn12956170\nn12956367\nn12956588\nn12956922\nn12957608\nn12957803\nn12957924\nn12958261\nn12958615\nn12959074\nn12959538\nn12960378\nn12960552\nn12960863\nn12961242\nn12961393\nn12961536\nn12961879\nn12963628\nn12964920\nn12965626\nn12965951\nn12966804\nn12966945\nn12968136\nn12968309\nn12969131\nn12969425\nn12969670\nn12969927\nn12970193\nn12970293\nn12970733\nn12971400\nn12971804\nn12972136\nn12973443\nn12973791\nn12973937\nn12974987\nn12975804\nn12976198\nn12976554\nn12978076\nn12979316\nn12979829\nn12980080\nn12980840\nn12981086\nn12981301\nn12981443\nn12981954\nn12982468\nn12982590\nn12982915\nn12983048\nn12983654\nn12983873\nn12983961\nn12984267\nn12984489\nn12984595\nn12985420\nn12985773\nn12986227\nn12987056\nn12987423\nn12987535\nn12988158\nn12988341\nn12988572\nn12989007\nn12989938\nn12990597\nn12991184\nn12991837\nn12992177\nn12992868\nn12994892\nn12995601\nn12997654\nn12997919\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002209\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13004640\nn13004826\nn13004992\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13007629\nn13008157\nn13008315\nn13008485\nn13008689\nn13008839\nn13009085\nn13009244\nn13009429\nn13009656\nn13010694\nn13010951\nn13011221\nn13011595\nn13012253\nn13012469\nn13012973\nn13013534\nn13013764\nn13013965\nn13014097\nn13014265\nn13014409\nn13014581\nn13014741\nn13014879\nn13015509\nn13015688\nn13016076\nn13016289\nn13017102\nn13017240\nn13017439\nn13017610\nn13017789\nn13017979\nn13018088\nn13018232\nn13018407\nn13018906\nn13019496\nn13019643\nn13019835\nn13020191\nn13020481\nn13020964\nn13021166\nn13021332\nn13021543\nn13021689\nn13021867\nn13022210\nn13022709\nn13022903\nn13023134\nn13024012\nn13024500\nn13024653\nn13025647\nn13025854\nn13026015\nn13027557\nn13027879\nn13028611\nn13028937\nn13029122\nn13029326\nn13029610\nn13029760\nn13030337\nn13030616\nn13030852\nn13031193\nn13031323\nn13031474\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033396\nn13033577\nn13033879\nn13034062\nn13034555\nn13034788\nn13035241\nn13035389\nn13035707\nn13035925\nn13036116\nn13036312\nn13036804\nn13037585\nn13037805\nn13038068\nn13038376\nn13038577\nn13038744\nn13039349\nn13040629\nn13040796\nn13041312\nn13041943\nn13042134\nn13042316\nn13042982\nn13043926\nn13044375\nn13045210\nn13045594\nn13045975\nn13046130\nn13046669\nn13047862\nn13048447\nn13049953\nn13050397\nn13050705\nn13050940\nn13051346\nn13052014\nn13052248\nn13052931\nn13053608\nn13054073\nn13055423\nn13055577\nn13055792\nn13055949\nn13056135\nn13056349\nn13056607\nn13056799\nn13057054\nn13057242\nn13057422\nn13057639\nn13058037\nn13058272\nn13058608\nn13059298\nn13059657\nn13060017\nn13060190\nn13061172\nn13061348\nn13061471\nn13061704\nn13062421\nn13063269\nn13063514\nn13064111\nn13064457\nn13065089\nn13065514\nn13066129\nn13066448\nn13066979\nn13067191\nn13067330\nn13067532\nn13067672\nn13068255\nn13068434\nn13068735\nn13068917\nn13069224\nn13069773\nn13070308\nn13070875\nn13071371\nn13071553\nn13071815\nn13072031\nn13072209\nn13072350\nn13072528\nn13072706\nn13072863\nn13073055\nn13073703\nn13074619\nn13074814\nn13075020\nn13075272\nn13075441\nn13075684\nn13075847\nn13076041\nn13076405\nn13076643\nn13076831\nn13077033\nn13077295\nn13078021\nn13079073\nn13079419\nn13079567\nn13080306\nn13080866\nn13081229\nn13081999\nn13082568\nn13083023\nn13083461\nn13084184\nn13084834\nn13085113\nn13085747\nn13090018\nn13090871\nn13091620\nn13091774\nn13091982\nn13092078\nn13092240\nn13092385\nn13092987\nn13093275\nn13093629\nn13094145\nn13094273\nn13095013\nn13096779\nn13098515\nn13098962\nn13099833\nn13099999\nn13100156\nn13100677\nn13102648\nn13102775\nn13103023\nn13103660\nn13103750\nn13103877\nn13104059\nn13107694\nn13107807\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108662\nn13108841\nn13109733\nn13110915\nn13111174\nn13111340\nn13111504\nn13111881\nn13112035\nn13112201\nn13118330\nn13118707\nn13119870\nn13120211\nn13120958\nn13121104\nn13121349\nn13122364\nn13123309\nn13123431\nn13123841\nn13124358\nn13124654\nn13125117\nn13126050\nn13126856\nn13127001\nn13127303\nn13127666\nn13127843\nn13128278\nn13128582\nn13128976\nn13129078\nn13130014\nn13130161\nn13130726\nn13131028\nn13131618\nn13132034\nn13132156\nn13132338\nn13132486\nn13132656\nn13132756\nn13132940\nn13133140\nn13133233\nn13133316\nn13133932\nn13134302\nn13134531\nn13134844\nn13134947\nn13135692\nn13135832\nn13136316\nn13136556\nn13136781\nn13137010\nn13137225\nn13137409\nn13137672\nn13137951\nn13138155\nn13138308\nn13138658\nn13138842\nn13139055\nn13139321\nn13139482\nn13139647\nn13139837\nn13140049\nn13140367\nn13141141\nn13141415\nn13141564\nn13141797\nn13141972\nn13142182\nn13142504\nn13142907\nn13143285\nn13143758\nn13144084\nn13145040\nn13145250\nn13145444\nn13146403\nn13146583\nn13146928\nn13147153\nn13147270\nn13147386\nn13147532\nn13147689\nn13147918\nn13148208\nn13148384\nn13149296\nn13149970\nn13150378\nn13150592\nn13150894\nn13151082\nn13152339\nn13154388\nn13154494\nn13154841\nn13155095\nn13155305\nn13155611\nn13156986\nn13157137\nn13157346\nn13157481\nn13157684\nn13157971\nn13158167\nn13158512\nn13158605\nn13158714\nn13158815\nn13159357\nn13159691\nn13159890\nn13160116\nn13160254\nn13160365\nn13160604\nn13160831\nn13160938\nn13161151\nn13161254\nn13161904\nn13163553\nn13163649\nn13163991\nn13164501\nn13170840\nn13171210\nn13171797\nn13172923\nn13173132\nn13173259\nn13173488\nn13173697\nn13173882\nn13174354\nn13174670\nn13174823\nn13175682\nn13176363\nn13176714\nn13177048\nn13177529\nn13177768\nn13177884\nn13178284\nn13178707\nn13179056\nn13179804\nn13180534\nn13180875\nn13181055\nn13181244\nn13181406\nn13181811\nn13182164\nn13182338\nn13182799\nn13182937\nn13183056\nn13183489\nn13184394\nn13185269\nn13185658\nn13186388\nn13186546\nn13187367\nn13188096\nn13188268\nn13188462\nn13188767\nn13190060\nn13190747\nn13191148\nn13191620\nn13191884\nn13192625\nn13193143\nn13193269\nn13193466\nn13193642\nn13193856\nn13194036\nn13194212\nn13194572\nn13194758\nn13194918\nn13195341\nn13195761\nn13196003\nn13196234\nn13196369\nn13196738\nn13197274\nn13197507\nn13198054\nn13198482\nn13198914\nn13199717\nn13199970\nn13200193\nn13200542\nn13200651\nn13200986\nn13201423\nn13201566\nn13201969\nn13202125\nn13202355\nn13202602\nn13205058\nn13205249\nn13206178\nn13206817\nn13207094\nn13207335\nn13207572\nn13207736\nn13207923\nn13208302\nn13208705\nn13208965\nn13209129\nn13209270\nn13209460\nn13209808\nn13210350\nn13210597\nn13211020\nn13211790\nn13212025\nn13212175\nn13212379\nn13212559\nn13213066\nn13213397\nn13213577\nn13214217\nn13214340\nn13214485\nn13215258\nn13215586\nn13217005\nn13219422\nn13219833\nn13219976\nn13220122\nn13220355\nn13220525\nn13220663\nn13221529\nn13222877\nn13222985\nn13223090\nn13223588\nn13223710\nn13223843\nn13224673\nn13224922\nn13225244\nn13225365\nn13225617\nn13226320\nn13226871\nn13228017\nn13228536\nn13229543\nn13229951\nn13230190\nn13230662\nn13230843\nn13231078\nn13231678\nn13231919\nn13232106\nn13232363\nn13232779\nn13233727\nn13234114\nn13234519\nn13234678\nn13234857\nn13235011\nn13235159\nn13235319\nn13235503\nn13235766\nn13236100\nn13237188\nn13237508\nn13238375\nn13238654\nn13238988\nn13239177\nn13239736\nn13239921\nn13240362\nn13252672\nn13354021\nn13555775\nn13579829\nn13650447\nn13653902\nn13862407\nn13862552\nn13862780\nn13863020\nn13863186\nn13863473\nn13863771\nn13864035\nn13864153\nn13864965\nn13865298\nn13865483\nn13865904\nn13866144\nn13866626\nn13866827\nn13867005\nn13867492\nn13868248\nn13868371\nn13868515\nn13868944\nn13869045\nn13869547\nn13869788\nn13869896\nn13871717\nn13872592\nn13872822\nn13873361\nn13873502\nn13873917\nn13874073\nn13874558\nn13875392\nn13875571\nn13875884\nn13876561\nn13877547\nn13877667\nn13878306\nn13879049\nn13879320\nn13879816\nn13880199\nn13880415\nn13880551\nn13880704\nn13880994\nn13881512\nn13881644\nn13882201\nn13882276\nn13882487\nn13882563\nn13882639\nn13882713\nn13882961\nn13883603\nn13883763\nn13884261\nn13884384\nn13884930\nn13885011\nn13886260\nn13888491\nn13889066\nn13889331\nn13891547\nn13891937\nn13893786\nn13894154\nn13894434\nn13895262\nn13896100\nn13896217\nn13897198\nn13897528\nn13897996\nn13898207\nn13898315\nn13898645\nn13899735\nn13900287\nn13900422\nn13901211\nn13901321\nn13901423\nn13901490\nn13901858\nn13902048\nn13902336\nn13902793\nn13903079\nn13905121\nn13905275\nn13905792\nn13906484\nn13906669\nn13906767\nn13906936\nn13907272\nn13908201\nn13908580\nn13911045\nn13912260\nn13912540\nn13914141\nn13914265\nn13914608\nn13915023\nn13915113\nn13915209\nn13915305\nn13915999\nn13916363\nn13916721\nn13917690\nn13917785\nn13918274\nn13918387\nn13918717\nn13919547\nn13919919\nn13926786\nn14131950\nn14175579\nn14564779\nn14582716\nn14583400\nn14585392\nn14592309\nn14603798\nn14633206\nn14685296\nn14696793\nn14698884\nn14714645\nn14720833\nn14765422\nn14785065\nn14786943\nn14804958\nn14810561\nn14820180\nn14821852\nn14844693\nn14853210\nn14858292\nn14867545\nn14891255\nn14899328\nn14900184\nn14900342\nn14908027\nn14909584\nn14914945\nn14915184\nn14919819\nn14938389\nn14941787\nn14942411\nn14973585\nn14974264\nn14975598\nn14976759\nn14976871\nn14977188\nn14977504\nn14992287\nn14993378\nn15005577\nn15006012\nn15019030\nn15048888\nn15060326\nn15060688\nn15062057\nn15067877\nn15086247\nn15089258\nn15089472\nn15089645\nn15089803\nn15090065\nn15090238\nn15090742\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092409\nn15092650\nn15092751\nn15092942\nn15093049\nn15093137\nn15093298\nn15102359\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet22k_ms_to_12k_indices.txt",
    "content": "1001\n1003\n1004\n1005\n1006\n1007\n1008\n1009\n1010\n1011\n1013\n1014\n1015\n1016\n1017\n1018\n1019\n1020\n1021\n1023\n1024\n1026\n1027\n1028\n1029\n1030\n1031\n1032\n1033\n1034\n1037\n1038\n1041\n1043\n1044\n1045\n1046\n1047\n1048\n1049\n1050\n1051\n1053\n1055\n1056\n1057\n1058\n1060\n1061\n1062\n1063\n1064\n1065\n1066\n1067\n1068\n1069\n1070\n1071\n1072\n1073\n1074\n1075\n1076\n1077\n1078\n1079\n1080\n1081\n1082\n1083\n1084\n1085\n1086\n1089\n1090\n1091\n1093\n1094\n1095\n1096\n1097\n1099\n1100\n1101\n1102\n1103\n1105\n1107\n1108\n1109\n1110\n1111\n1112\n1113\n1114\n1115\n1116\n1117\n1118\n1119\n1120\n1121\n1122\n1123\n1124\n1125\n1126\n1127\n1128\n1129\n1130\n1131\n1132\n1133\n1134\n1135\n1137\n1138\n1140\n1141\n1142\n1143\n1144\n1146\n1147\n1148\n1149\n1151\n1152\n1153\n1154\n1156\n1157\n1158\n1159\n1161\n1162\n1164\n1165\n1166\n1167\n1168\n1169\n1170\n1171\n1172\n1173\n1175\n1176\n1179\n1180\n1181\n1182\n1184\n1188\n1192\n1193\n1195\n1196\n1197\n1199\n1200\n1203\n1206\n1209\n1210\n1211\n1212\n1213\n1214\n1215\n1216\n1217\n1218\n1219\n1220\n1221\n1222\n1223\n1224\n1225\n1226\n1227\n1230\n1231\n1235\n1249\n1250\n1251\n1252\n1253\n1254\n1289\n1292\n1295\n1301\n1306\n1307\n1312\n1313\n1315\n1317\n1320\n1324\n1325\n1326\n1327\n1332\n1341\n1343\n1347\n1352\n1353\n1354\n1356\n0\n1359\n1365\n1366\n1\n1367\n1368\n1375\n1377\n1378\n1380\n1381\n1382\n1383\n1384\n1390\n1393\n1396\n1400\n1403\n1406\n1408\n1409\n1411\n1413\n1414\n1416\n1420\n1421\n1422\n1428\n1429\n1438\n1439\n1449\n1450\n1453\n1454\n1455\n2\n1458\n1461\n1462\n1463\n1465\n1466\n1467\n1468\n1469\n1470\n1471\n3\n1473\n1475\n1478\n4\n1484\n1485\n1486\n5\n1487\n6\n1492\n1493\n1495\n1496\n1498\n1503\n1504\n1505\n1506\n7\n1507\n8\n1511\n1514\n1515\n9\n1516\n1517\n1518\n1519\n1520\n1523\n1526\n1528\n1529\n1530\n1531\n1532\n1533\n1534\n1535\n1536\n10\n11\n1537\n1538\n1540\n1541\n1542\n1543\n12\n1544\n1545\n1546\n1547\n1548\n1549\n13\n1550\n1552\n1553\n1554\n1555\n1556\n1557\n1558\n1559\n1560\n14\n1561\n1562\n1563\n1565\n1566\n1568\n1569\n1570\n1571\n1572\n1573\n1574\n1575\n1576\n1577\n1580\n1581\n1583\n1586\n1587\n1588\n1589\n1590\n1592\n1593\n1594\n1595\n1596\n1597\n1598\n1599\n1600\n1601\n1603\n1604\n1605\n1606\n1608\n1609\n1613\n1614\n1615\n1616\n1617\n1619\n1620\n1621\n1622\n1623\n1624\n1625\n15\n1627\n1628\n1629\n1630\n1631\n16\n1632\n1633\n1634\n1636\n1637\n1638\n1639\n1640\n1641\n1642\n1643\n1644\n1646\n1647\n1648\n1649\n1650\n1651\n1652\n1653\n1654\n1655\n1656\n1657\n1658\n1660\n1661\n1662\n1663\n1664\n1665\n1666\n1667\n1668\n1669\n1670\n1671\n1672\n1674\n1675\n1676\n1677\n1678\n1679\n1680\n1681\n1683\n1684\n1685\n1686\n1687\n1688\n1689\n1690\n1691\n1693\n1694\n1696\n1697\n1698\n1699\n1700\n1701\n1702\n1703\n1704\n1705\n17\n1709\n1710\n1712\n1714\n18\n1715\n1717\n1718\n1719\n1720\n1721\n1722\n1723\n1724\n1725\n1726\n1727\n1728\n1729\n1730\n1732\n1733\n1734\n1736\n1738\n1739\n1740\n1742\n1743\n1744\n1745\n19\n1746\n1747\n1748\n1749\n1750\n1753\n1754\n1755\n1756\n1757\n1758\n1760\n1761\n1762\n1763\n1764\n1765\n1766\n1767\n1769\n1770\n1771\n1772\n1774\n1776\n1778\n1779\n20\n1783\n1784\n1786\n1787\n1788\n1789\n1790\n1791\n1792\n1795\n1796\n1797\n1801\n1802\n1803\n1804\n21\n1805\n1806\n1807\n1808\n1809\n1810\n1811\n1812\n1813\n1814\n1816\n1817\n1818\n1819\n1820\n1821\n1823\n1824\n1825\n1826\n1827\n1828\n1829\n22\n1830\n1831\n1832\n1833\n1834\n23\n1837\n1838\n1839\n1840\n1841\n1842\n1843\n1844\n1845\n1846\n1847\n1848\n1849\n1850\n1851\n1852\n1853\n24\n1854\n1855\n1856\n1857\n1858\n1859\n1860\n1861\n1862\n1863\n1864\n1866\n1867\n1869\n1870\n25\n1871\n1873\n26\n1874\n1876\n1877\n27\n28\n1880\n29\n1882\n1883\n1886\n1889\n1890\n1892\n1895\n1901\n1902\n1903\n1904\n30\n1905\n1908\n1910\n1913\n1914\n31\n32\n1916\n1917\n1918\n1919\n1920\n1921\n1922\n1925\n1927\n1928\n1930\n1931\n1935\n1936\n1937\n1938\n1943\n1946\n1950\n1953\n1957\n1958\n1959\n1960\n33\n1961\n1962\n1963\n1964\n34\n1965\n1966\n1967\n35\n1968\n36\n1969\n1970\n1971\n1972\n37\n1973\n1974\n1975\n1976\n1977\n1978\n1979\n1981\n1984\n1986\n1987\n38\n39\n1990\n1991\n1992\n1995\n1996\n1997\n1998\n1999\n2000\n2001\n2003\n2004\n2005\n2006\n2007\n40\n2009\n2010\n2011\n41\n2014\n2021\n42\n43\n2023\n44\n2025\n2026\n2027\n45\n2030\n2032\n46\n2033\n47\n2035\n2036\n48\n2037\n2038\n49\n2039\n2042\n50\n2043\n2044\n2046\n2048\n51\n2069\n2088\n2089\n52\n53\n54\n55\n2092\n2093\n2094\n2095\n2096\n2099\n2101\n2103\n2104\n2105\n2108\n2109\n2110\n2111\n56\n2112\n2113\n57\n2114\n2115\n58\n2120\n2121\n2122\n2123\n2125\n59\n60\n2130\n2132\n2134\n2135\n61\n2137\n2138\n2139\n2140\n2141\n2142\n62\n2144\n2145\n2146\n2148\n2151\n2152\n63\n2153\n2154\n2155\n2156\n2157\n2158\n64\n2159\n2160\n2162\n2164\n65\n2165\n2166\n2167\n2168\n2169\n66\n2170\n2171\n2172\n2173\n67\n2174\n2176\n68\n2177\n2178\n2180\n2181\n2182\n2183\n2184\n2185\n2187\n69\n2188\n70\n71\n2189\n2191\n2193\n2194\n72\n73\n74\n75\n76\n77\n2196\n78\n2200\n2204\n2208\n2210\n2219\n2220\n79\n2224\n2225\n2228\n2233\n2238\n2240\n2243\n2244\n2245\n2246\n2247\n2248\n2250\n2252\n2253\n2255\n2257\n2259\n2260\n2261\n2262\n2263\n80\n2264\n81\n2268\n2269\n2270\n2271\n2272\n82\n2273\n83\n2274\n2275\n2278\n2280\n2285\n2288\n2289\n2292\n2293\n2294\n2296\n2298\n84\n2300\n2301\n2302\n85\n2303\n2304\n86\n2305\n2306\n2309\n2310\n2311\n2312\n2315\n2317\n2318\n2319\n2320\n2321\n2322\n2324\n2325\n2326\n2329\n2330\n2333\n2337\n2338\n2339\n87\n2340\n88\n2341\n2342\n89\n2343\n2344\n2345\n2346\n90\n2348\n2349\n2351\n2352\n2354\n2355\n2357\n91\n2360\n2362\n2363\n2365\n2366\n2367\n2368\n92\n93\n2369\n2370\n2372\n2375\n2376\n94\n2380\n2381\n2382\n2387\n2390\n2391\n2392\n2393\n2394\n2396\n2398\n2399\n2400\n2401\n2402\n2404\n2405\n95\n96\n2407\n2408\n2409\n2411\n2412\n2414\n97\n2415\n2416\n2417\n2418\n2419\n2420\n2421\n2422\n2423\n2424\n2425\n2426\n2427\n2428\n2429\n2430\n2431\n2432\n2433\n2434\n2435\n2436\n2437\n2438\n2439\n2440\n2441\n2442\n2443\n2444\n2445\n2446\n2447\n2448\n2449\n2450\n2451\n2452\n2453\n98\n2454\n2455\n99\n2456\n2457\n2458\n2459\n2460\n2461\n2462\n2463\n2464\n2465\n2466\n2467\n2468\n2469\n2470\n2471\n2472\n2473\n2474\n2475\n2476\n100\n2477\n2481\n2482\n101\n2484\n102\n2485\n103\n2486\n2487\n2488\n2490\n2491\n2493\n2494\n104\n2495\n2498\n2499\n2500\n2506\n2509\n105\n106\n2515\n2517\n2519\n2520\n2521\n2522\n2523\n2524\n2525\n2526\n2527\n2529\n2536\n2537\n2545\n2549\n2552\n2555\n2556\n2558\n2560\n2561\n2562\n2564\n2565\n2572\n107\n2580\n108\n2584\n2585\n2586\n2587\n2588\n2589\n2591\n109\n2592\n2593\n2594\n2599\n2602\n110\n111\n2627\n2630\n2635\n2636\n2638\n2642\n2643\n112\n2645\n113\n2646\n2647\n2648\n114\n2652\n2655\n2656\n2658\n2659\n115\n2662\n2663\n2666\n2667\n2668\n116\n2670\n2671\n2672\n2673\n2674\n2675\n2676\n2678\n2680\n2681\n2682\n2683\n2686\n2689\n2691\n2692\n2693\n2694\n2697\n2698\n117\n2706\n2707\n2709\n2713\n2715\n2717\n2718\n2719\n118\n119\n2727\n120\n121\n2730\n2731\n2732\n122\n2736\n123\n124\n2737\n125\n2739\n2741\n2748\n126\n2750\n2751\n2754\n2757\n2764\n2765\n2766\n2768\n2769\n127\n128\n2770\n2771\n2772\n2773\n2774\n2775\n2776\n2777\n2778\n2779\n2780\n129\n2781\n2782\n130\n2783\n2784\n2785\n2786\n131\n2787\n2788\n2789\n132\n2790\n2791\n2792\n2793\n2794\n133\n2795\n2796\n2797\n134\n2798\n2799\n135\n2800\n2801\n2802\n2803\n2804\n2806\n2807\n2808\n2809\n2810\n136\n2811\n2812\n2813\n137\n2814\n138\n2815\n2817\n2820\n2822\n2823\n2824\n2825\n2826\n2827\n2828\n2829\n2830\n139\n2831\n2832\n2833\n2834\n2835\n2836\n140\n2837\n141\n2838\n2839\n2840\n2841\n2842\n2843\n2844\n2845\n2846\n2848\n2850\n2851\n2853\n2854\n2855\n2856\n2857\n142\n2859\n2861\n2862\n2864\n2865\n2866\n2867\n2868\n2871\n2873\n143\n2874\n2875\n2877\n2878\n2879\n2882\n2884\n2885\n2886\n2887\n2888\n2889\n2890\n2891\n2892\n2893\n2894\n2895\n2896\n2897\n2898\n2899\n2900\n2901\n2902\n2903\n2904\n2905\n2906\n2907\n2908\n2909\n2910\n2911\n2912\n2914\n2916\n2917\n2918\n2919\n2920\n2921\n144\n2923\n2924\n2925\n2926\n2927\n2928\n2929\n2930\n2931\n2932\n2933\n2934\n2935\n145\n2936\n2937\n2938\n2939\n146\n2941\n2943\n2946\n2947\n2948\n2949\n2953\n2954\n2955\n2956\n2957\n2959\n2960\n2961\n2962\n2963\n147\n2965\n2966\n2970\n2971\n2972\n2973\n2974\n2975\n2976\n148\n2980\n2981\n2983\n2985\n149\n2987\n2988\n2989\n2990\n2991\n2992\n2993\n2994\n2995\n2996\n150\n2997\n2998\n2999\n3000\n3001\n3002\n3003\n3004\n3005\n3007\n3008\n3012\n3013\n3014\n3016\n3017\n3018\n3021\n3022\n151\n153\n154\n155\n3024\n156\n3025\n157\n158\n3026\n159\n3028\n160\n161\n162\n163\n164\n3030\n3032\n3033\n3034\n167\n168\n3038\n169\n170\n3039\n171\n172\n173\n174\n176\n177\n178\n3041\n3042\n179\n180\n181\n182\n183\n184\n185\n186\n187\n3043\n3044\n3045\n3046\n188\n3048\n189\n3049\n190\n191\n192\n193\n194\n195\n3050\n196\n197\n198\n199\n200\n201\n3051\n202\n203\n204\n3053\n3054\n3055\n3056\n205\n206\n207\n208\n209\n3057\n210\n3058\n211\n212\n213\n214\n3059\n215\n216\n3061\n217\n218\n219\n220\n3062\n221\n3065\n3066\n222\n3068\n223\n3069\n3070\n224\n225\n226\n227\n228\n229\n230\n231\n232\n233\n234\n235\n3071\n3072\n236\n237\n3073\n238\n239\n240\n241\n242\n3074\n243\n244\n3075\n245\n246\n247\n3080\n248\n249\n250\n251\n252\n253\n254\n255\n256\n257\n3082\n258\n259\n260\n261\n3083\n3084\n263\n264\n3085\n265\n266\n267\n3087\n269\n270\n271\n272\n3089\n3090\n273\n274\n275\n276\n3093\n3095\n3097\n277\n3102\n3103\n278\n279\n280\n3105\n3106\n3107\n3108\n3109\n3110\n3111\n3112\n3114\n3115\n281\n282\n3116\n283\n3117\n284\n3118\n3119\n285\n3121\n3122\n3123\n3124\n3125\n286\n3126\n3129\n3130\n3132\n3133\n287\n3134\n3135\n3136\n3137\n3138\n3139\n288\n3141\n289\n290\n291\n3142\n292\n3144\n3145\n3146\n293\n3150\n294\n3152\n3153\n3154\n295\n3156\n296\n297\n3158\n3161\n3163\n3165\n298\n299\n3170\n3171\n3172\n3173\n3174\n3194\n3195\n3197\n3198\n3199\n3200\n3205\n3209\n3212\n3213\n3215\n3219\n3220\n3222\n3223\n3225\n3226\n3228\n3230\n3231\n300\n301\n3232\n3234\n3235\n302\n3237\n3239\n303\n3241\n304\n3243\n3244\n305\n3252\n3255\n3256\n3257\n306\n3260\n3261\n3262\n3263\n3264\n3265\n3266\n307\n3279\n3280\n3281\n3282\n3287\n3289\n3290\n3301\n308\n3304\n3306\n3307\n3308\n3309\n3316\n3317\n3318\n3319\n3321\n3322\n3326\n3329\n3330\n3332\n3333\n3335\n3336\n3340\n3345\n3346\n3347\n309\n3349\n3350\n3353\n3354\n3355\n3356\n3358\n3359\n3360\n3362\n3364\n3365\n3367\n3369\n3370\n3371\n3372\n3373\n3374\n3376\n3379\n3381\n3382\n3383\n3384\n3386\n3389\n310\n3394\n3395\n3396\n3397\n3401\n3404\n311\n3411\n3412\n3413\n3415\n3416\n3418\n312\n3419\n3421\n3424\n313\n3425\n314\n3427\n3428\n3430\n315\n3432\n3433\n3434\n3436\n3444\n3445\n3446\n3448\n3451\n3454\n3458\n3462\n3464\n3475\n3479\n3480\n3489\n316\n3495\n3497\n3498\n317\n3502\n3510\n3511\n3514\n318\n3516\n3518\n3521\n3524\n319\n320\n3531\n3538\n3539\n3540\n3541\n3542\n3543\n3544\n3545\n321\n3546\n3547\n3548\n3549\n3550\n322\n3552\n3553\n3555\n3556\n3557\n3558\n323\n324\n3560\n3561\n325\n326\n3563\n3564\n3565\n3566\n3567\n3568\n3570\n3572\n3573\n3576\n3577\n3580\n3583\n3586\n3587\n3589\n3596\n3599\n3605\n3606\n3613\n3617\n3618\n3620\n3622\n3623\n3626\n3628\n3629\n3630\n3631\n3632\n3633\n3636\n3637\n3640\n3643\n3647\n3649\n3652\n3653\n3655\n3657\n3658\n3662\n3663\n3664\n3665\n3666\n3667\n3669\n3672\n3673\n3675\n3678\n3680\n327\n3681\n3682\n328\n3684\n3685\n3689\n329\n3690\n3692\n3694\n3695\n3696\n3697\n3698\n330\n3699\n331\n3703\n3704\n3707\n3708\n332\n3710\n3713\n3714\n3715\n3718\n3720\n3722\n3723\n3726\n3727\n3734\n3736\n3741\n3746\n3753\n3755\n333\n3759\n3760\n334\n3776\n3782\n3790\n3794\n3799\n3800\n3801\n3802\n335\n3803\n3804\n3805\n3806\n3807\n3808\n3809\n3810\n3812\n3813\n3814\n3815\n3817\n3818\n336\n3823\n3824\n337\n3827\n3828\n3830\n338\n3831\n3832\n3833\n3834\n3837\n3838\n3843\n3849\n3852\n3853\n3854\n3855\n3856\n3857\n3861\n3862\n3863\n3864\n3865\n3866\n3867\n3868\n3869\n3870\n3871\n3875\n3879\n3880\n3881\n3882\n3883\n3884\n3886\n3887\n3888\n3889\n3890\n3891\n3892\n3893\n3895\n3896\n3897\n3898\n3899\n3900\n3901\n3902\n3903\n3904\n3905\n3906\n3907\n3908\n3909\n3914\n3919\n3920\n3921\n3922\n3923\n3925\n3926\n3927\n3928\n3929\n3931\n3932\n3934\n3935\n3938\n3939\n3940\n339\n3941\n3942\n3943\n3944\n3945\n3948\n3949\n3951\n3952\n340\n3956\n3957\n3958\n3960\n3961\n3963\n3964\n3965\n3966\n3967\n3968\n3969\n341\n3970\n3973\n3974\n342\n343\n3977\n3978\n344\n3980\n3982\n3983\n3984\n345\n3986\n3987\n3988\n3989\n3991\n3993\n3994\n3995\n3996\n3997\n3998\n3999\n4002\n4003\n4005\n4006\n4007\n4008\n4009\n4010\n4012\n4014\n4015\n4016\n4017\n4019\n346\n4021\n4024\n4026\n347\n4028\n4029\n4030\n4031\n4032\n348\n4033\n4034\n4035\n4040\n4041\n4043\n4048\n4049\n4051\n4052\n4055\n4056\n349\n4057\n4058\n4059\n4060\n4061\n4062\n4063\n4065\n4066\n4067\n350\n4070\n4073\n4074\n4075\n4076\n4077\n4078\n4079\n4080\n351\n4081\n352\n353\n4082\n4084\n4085\n4086\n4087\n4090\n4092\n4094\n4096\n4097\n4098\n4100\n4101\n4102\n4104\n4105\n4107\n4108\n4109\n4112\n4113\n4114\n4115\n4117\n4118\n4119\n4120\n4121\n4122\n4123\n4124\n4125\n4127\n4128\n4131\n4138\n354\n4139\n355\n4141\n4142\n4143\n4144\n4145\n4146\n356\n4148\n357\n4152\n358\n4153\n359\n360\n4157\n4158\n4159\n361\n4160\n362\n4164\n4165\n4170\n4173\n4181\n363\n4183\n4189\n364\n4190\n4191\n4194\n4195\n4197\n4198\n4200\n4201\n4202\n4203\n4204\n4209\n4210\n4211\n4219\n4220\n4222\n4234\n365\n366\n4235\n4236\n4237\n4238\n367\n4240\n4241\n4242\n368\n369\n4244\n4245\n370\n4248\n371\n372\n4251\n4252\n373\n4254\n4255\n4256\n374\n4258\n375\n4259\n376\n377\n4262\n4263\n378\n379\n380\n381\n382\n4272\n383\n4275\n4276\n4278\n384\n4280\n4286\n385\n386\n4296\n4297\n4299\n4300\n4301\n387\n388\n4303\n4305\n4306\n389\n4315\n4317\n4319\n4321\n4324\n4325\n4326\n4330\n4335\n4337\n4341\n390\n4345\n4346\n4347\n4353\n4355\n4356\n4359\n4362\n4365\n4367\n4370\n4372\n4373\n4374\n4375\n4376\n4377\n391\n4378\n4379\n4380\n4381\n4382\n4383\n4390\n4393\n4395\n4398\n4406\n4409\n4411\n4420\n4422\n4423\n4424\n4425\n4426\n4427\n4433\n4434\n4435\n4436\n4437\n4440\n4441\n4444\n4445\n4446\n4448\n4450\n4452\n4453\n4454\n4460\n4461\n4462\n4463\n4466\n4468\n4472\n4473\n4475\n4476\n4481\n4482\n4485\n4490\n4491\n4492\n4495\n4496\n4497\n4498\n4502\n4507\n4508\n4509\n4510\n4512\n4516\n4518\n4524\n4526\n4528\n4533\n4536\n4544\n4545\n4547\n4551\n4555\n4556\n4557\n4558\n4559\n4564\n4570\n4571\n4576\n4577\n4578\n392\n4579\n393\n4581\n4582\n4583\n4590\n4592\n4594\n4597\n4607\n4611\n4612\n4613\n4615\n4624\n4625\n4626\n4627\n4628\n4629\n4630\n4632\n4634\n4636\n4637\n4638\n4639\n4642\n4644\n4645\n4646\n4647\n4648\n4649\n4668\n4669\n394\n4671\n4672\n395\n4675\n396\n4677\n4678\n4692\n4693\n4697\n4699\n4700\n4702\n4703\n4704\n397\n4706\n4707\n4709\n4711\n4712\n4714\n4720\n4722\n4731\n398\n4738\n4739\n4740\n399\n4741\n4742\n4743\n4744\n4748\n4750\n400\n4751\n4752\n4753\n4754\n4757\n401\n4759\n402\n4764\n4768\n4771\n4773\n4775\n4776\n4778\n4779\n4780\n4782\n4784\n4786\n4787\n4788\n4789\n4795\n4799\n4800\n4801\n4802\n4803\n403\n4804\n4806\n4807\n4808\n4809\n4810\n4811\n4812\n4813\n404\n4816\n4817\n4818\n405\n4821\n4824\n4826\n4827\n4829\n4831\n4836\n4838\n4842\n406\n4843\n4844\n4845\n4846\n4847\n4848\n407\n4851\n4853\n4854\n408\n4855\n4856\n4857\n4858\n4860\n409\n4862\n4863\n4864\n4866\n4867\n4868\n4870\n4871\n4872\n4873\n4874\n4875\n4880\n4882\n4886\n4887\n4888\n4889\n4891\n4894\n4896\n4897\n4898\n4899\n4900\n4901\n4902\n410\n4904\n4906\n4907\n411\n4911\n4912\n4913\n4914\n4915\n4916\n4917\n4918\n4919\n4920\n4922\n4923\n4924\n4926\n4928\n4929\n4932\n4933\n4934\n4937\n4938\n4939\n4940\n4942\n4943\n4944\n4946\n4949\n4950\n4951\n4953\n4954\n4957\n4961\n4962\n412\n4966\n4967\n413\n4975\n4976\n4977\n4981\n4982\n4985\n4986\n4987\n4989\n4990\n4992\n4993\n4994\n4996\n4998\n5000\n5002\n5003\n5004\n5005\n5006\n5007\n5009\n5011\n5012\n5013\n5014\n5016\n5017\n5018\n5020\n5021\n5022\n5024\n5025\n5026\n5027\n5028\n5030\n5031\n5036\n5037\n5039\n5040\n5041\n5042\n5044\n5045\n5048\n5049\n5050\n5051\n5052\n5053\n5054\n5055\n5056\n5057\n5059\n5060\n5061\n5062\n5063\n414\n5064\n5066\n5068\n5069\n5070\n5071\n5072\n5074\n5075\n5076\n5078\n5079\n5080\n5081\n5082\n5083\n5084\n5085\n5086\n5089\n5090\n5091\n415\n5093\n5094\n5095\n416\n5098\n5101\n5102\n5104\n5105\n5112\n5113\n5114\n5116\n417\n5120\n5121\n5122\n418\n5124\n5125\n5126\n5127\n5128\n5130\n419\n5131\n5132\n5135\n5136\n5139\n420\n5140\n421\n5143\n5145\n5146\n5147\n422\n423\n424\n5149\n5151\n5152\n5154\n5155\n425\n5159\n5161\n426\n5164\n427\n5167\n5168\n5171\n5172\n5175\n428\n5176\n5177\n429\n5179\n5180\n5181\n5182\n5183\n5184\n5187\n5188\n5189\n5190\n5191\n5192\n430\n5193\n5194\n5195\n5196\n5197\n5198\n5200\n5201\n5202\n431\n5203\n432\n5206\n5207\n5208\n5209\n433\n5213\n5214\n5215\n5216\n434\n435\n5219\n5220\n5226\n5227\n5228\n5229\n5230\n5231\n5232\n5233\n5234\n5235\n5236\n5240\n5241\n5242\n5243\n5244\n5245\n5247\n5249\n436\n5250\n437\n5252\n438\n5253\n5255\n5256\n5257\n439\n5262\n5265\n5268\n5269\n5270\n5271\n5272\n5274\n5275\n5276\n5277\n5279\n5282\n5283\n440\n5284\n5285\n441\n5286\n5287\n5288\n5290\n5291\n5292\n5294\n442\n5296\n5297\n5300\n5303\n5305\n5307\n5308\n5309\n5313\n5315\n5316\n5317\n5318\n5319\n5325\n443\n5327\n5329\n444\n5330\n5332\n5333\n5334\n5335\n5336\n5338\n445\n5344\n5347\n5349\n5351\n5352\n5353\n5354\n5355\n446\n5357\n5359\n447\n5360\n5364\n5365\n5366\n5367\n5368\n5370\n448\n5375\n5380\n5382\n5384\n5387\n5388\n5390\n5391\n5393\n5394\n5395\n5398\n5399\n5400\n5402\n5403\n5404\n5405\n5406\n5411\n5412\n5414\n5415\n5416\n5417\n5418\n5420\n5422\n5425\n5426\n5428\n5429\n5430\n449\n5433\n5434\n5435\n450\n5436\n5437\n5439\n5443\n5444\n5445\n5446\n5449\n5450\n5452\n5454\n5455\n451\n5457\n5458\n5460\n5461\n5464\n5465\n5469\n5470\n5471\n5472\n5473\n5474\n452\n5475\n5476\n453\n5478\n5479\n5480\n5481\n454\n5483\n5484\n5485\n5486\n5487\n5488\n5490\n5491\n5492\n5495\n5496\n5498\n5502\n5503\n5505\n5506\n455\n5509\n5511\n5512\n5514\n5516\n5517\n456\n5520\n5521\n5522\n5523\n5524\n5525\n5527\n5528\n5529\n5530\n5532\n5533\n5534\n457\n5536\n5540\n5541\n5543\n5544\n5545\n5546\n5547\n5549\n5550\n5551\n5553\n5554\n5558\n5559\n5562\n5563\n5564\n5565\n5567\n5568\n5569\n458\n5570\n5572\n459\n5574\n5577\n5578\n5579\n5581\n5582\n460\n461\n5585\n5586\n5589\n5593\n5595\n5598\n5599\n5600\n5601\n5602\n5603\n5606\n5609\n5610\n5611\n5614\n5616\n5617\n5619\n5621\n5622\n5624\n5626\n5628\n5629\n5630\n5631\n5632\n462\n5635\n5637\n5638\n5639\n5641\n5642\n5646\n463\n5648\n464\n5650\n5652\n5654\n5655\n5656\n5657\n5658\n5659\n5660\n5661\n5662\n5664\n5665\n465\n466\n5666\n5667\n5668\n5670\n5671\n5672\n5674\n5675\n5676\n5677\n5679\n5681\n5682\n5684\n5686\n5687\n5689\n5690\n5691\n5695\n5696\n5698\n5700\n5701\n5705\n5707\n5708\n5709\n5711\n5712\n5713\n5714\n467\n5716\n5717\n5718\n5719\n5721\n5723\n5725\n5727\n5728\n5730\n468\n5732\n5733\n5734\n5735\n5736\n5737\n5738\n5739\n5740\n5741\n5742\n5743\n5744\n5746\n5747\n5748\n5749\n5750\n5752\n5753\n5754\n5755\n5756\n5757\n5758\n5762\n5765\n469\n5767\n5769\n5770\n5777\n5778\n5779\n5780\n5783\n5784\n5785\n5786\n5788\n5789\n5794\n5795\n5796\n5797\n5799\n5800\n5802\n5803\n470\n5806\n5807\n5811\n5813\n5817\n5819\n471\n5820\n472\n473\n5821\n5822\n5823\n5824\n5825\n5826\n5827\n5828\n5832\n5833\n5837\n5838\n5839\n5840\n5841\n5842\n5843\n5845\n5846\n5847\n5848\n5849\n5852\n5854\n5855\n5856\n5857\n5858\n5859\n5860\n5861\n5862\n5864\n5865\n5866\n5867\n474\n5869\n5870\n5871\n5872\n5873\n5878\n5882\n5883\n475\n476\n5886\n477\n5891\n5894\n5895\n5897\n5899\n5900\n5901\n5903\n5907\n5908\n5909\n5910\n5911\n478\n5912\n5913\n5914\n5916\n5919\n5920\n5921\n5922\n479\n5923\n5926\n5927\n5930\n5931\n5932\n5933\n5937\n480\n5938\n5939\n5940\n5941\n5942\n5944\n5945\n5946\n481\n5947\n482\n5948\n5949\n5951\n5952\n483\n5955\n5957\n484\n5959\n5960\n5961\n5962\n5963\n5965\n5966\n5969\n5970\n5971\n5976\n5978\n5981\n5982\n5986\n5987\n485\n5988\n5989\n5990\n5991\n5992\n5995\n5996\n5997\n486\n5998\n487\n5999\n6000\n6001\n6003\n6005\n6006\n6007\n6008\n6009\n6010\n6011\n6013\n6015\n6016\n488\n6017\n489\n490\n491\n6019\n6022\n6023\n6025\n6027\n6028\n6029\n6030\n6032\n6037\n6038\n6039\n6040\n6043\n6046\n6048\n6049\n6050\n6052\n6055\n6056\n6057\n6058\n6059\n6060\n6061\n6062\n6063\n6065\n6069\n6071\n6072\n6073\n6074\n492\n6075\n6076\n6077\n6079\n6081\n6082\n6083\n6084\n493\n6085\n494\n6088\n495\n6090\n6094\n6095\n6096\n6097\n6098\n6099\n6100\n6101\n6102\n6107\n6108\n496\n6110\n6111\n6112\n6113\n6115\n497\n6116\n6117\n6119\n6121\n6122\n6123\n6124\n6125\n6126\n6127\n6128\n6129\n498\n6131\n6132\n6134\n6135\n6136\n6137\n6139\n6140\n6143\n6144\n6148\n6153\n6155\n6157\n6158\n6159\n6160\n6161\n6162\n6164\n6165\n6166\n6168\n6170\n6172\n499\n6173\n6174\n500\n6176\n6181\n6182\n6184\n6187\n6189\n6190\n501\n6192\n6193\n6194\n6195\n6196\n6197\n502\n6199\n6200\n6201\n6202\n6205\n6206\n6208\n6209\n6210\n6211\n6212\n6213\n6214\n6215\n6222\n6224\n6226\n6233\n6234\n6235\n6238\n6239\n6240\n6245\n6249\n6250\n6251\n6252\n6254\n6255\n6256\n503\n6257\n6260\n6261\n6262\n6263\n6264\n504\n505\n6266\n6267\n6268\n6270\n6271\n6272\n506\n6275\n6277\n6278\n6279\n6280\n6281\n6282\n6283\n6284\n6285\n6286\n6290\n6291\n6295\n6296\n6297\n6298\n6300\n6301\n6302\n6303\n6304\n6305\n507\n6308\n6309\n6310\n6315\n6316\n6317\n6320\n6321\n6322\n6323\n6324\n6328\n6329\n6333\n6335\n6337\n6338\n6339\n6340\n6341\n508\n6342\n6344\n6346\n6348\n6349\n6350\n6351\n6352\n6353\n6354\n6357\n6358\n6359\n509\n6362\n6363\n6364\n6365\n6369\n6371\n6374\n6375\n6377\n6378\n6381\n6382\n510\n6385\n6386\n6387\n6388\n6389\n6391\n6393\n6394\n6395\n6396\n6399\n511\n6400\n6401\n6402\n6403\n6406\n6407\n6408\n6409\n6410\n6412\n6415\n6416\n6419\n6420\n6422\n6423\n6428\n6429\n6430\n6432\n6435\n512\n6437\n6438\n6439\n513\n6441\n6442\n6443\n6446\n6447\n6448\n6450\n6451\n6453\n6454\n6455\n6459\n6460\n6462\n6463\n6466\n6467\n6471\n6472\n6473\n6474\n6476\n6477\n6480\n6481\n6482\n6483\n6484\n6485\n6487\n6489\n6490\n514\n515\n6492\n6493\n6494\n516\n6496\n6498\n6499\n517\n6502\n6503\n518\n519\n6505\n6506\n6507\n6508\n6509\n6510\n6512\n6513\n6517\n520\n6520\n6521\n6522\n6523\n6525\n6526\n6527\n6528\n521\n522\n6532\n6533\n6534\n6540\n6542\n6546\n6547\n6549\n6552\n6553\n6554\n6556\n6559\n6560\n6561\n6563\n523\n6568\n6575\n6578\n6579\n524\n6584\n6585\n6586\n6589\n6590\n6591\n6593\n6595\n6597\n6599\n6600\n6603\n6604\n6609\n6614\n6615\n6619\n6621\n6622\n6623\n6627\n6628\n6629\n525\n6632\n6634\n6641\n6642\n6647\n6648\n6649\n6651\n6653\n6654\n6658\n6664\n6665\n6666\n6672\n6675\n6676\n6677\n6678\n6679\n6681\n6684\n6685\n6686\n6688\n6689\n6690\n6691\n6693\n6698\n6699\n6700\n6701\n6702\n6704\n6706\n6708\n526\n6709\n527\n6710\n6711\n6713\n6714\n6715\n6720\n6723\n6724\n6725\n6727\n528\n529\n6730\n6732\n6736\n6740\n6742\n6743\n6744\n6745\n6751\n6754\n6755\n530\n6757\n6758\n6759\n531\n6762\n6765\n6766\n6767\n6768\n6769\n6770\n6771\n6772\n6773\n532\n6774\n6775\n6776\n6779\n6782\n6786\n6788\n6792\n6793\n6794\n6797\n6798\n6801\n6802\n6803\n6805\n533\n6806\n534\n6807\n535\n6808\n6809\n6811\n6812\n6813\n6814\n6816\n6819\n6820\n6823\n6826\n6827\n6828\n6829\n6831\n6836\n6840\n6842\n536\n537\n6847\n6848\n6849\n6850\n6851\n6852\n6854\n538\n6855\n6856\n6857\n6858\n6859\n6860\n6861\n6862\n6865\n539\n6867\n6868\n6871\n6872\n6874\n6877\n6878\n6880\n6881\n6882\n6883\n6885\n6886\n6890\n6891\n6893\n6895\n6897\n6899\n6900\n6901\n6902\n6904\n6907\n6908\n6909\n6910\n6911\n6912\n6913\n6914\n6915\n6916\n6918\n6919\n6920\n6922\n6924\n6925\n6927\n6928\n6929\n6930\n6931\n6934\n6935\n6937\n6938\n540\n6939\n6940\n6941\n6942\n6947\n6949\n6951\n6952\n6954\n6955\n6958\n6959\n6962\n6964\n6965\n6969\n541\n6971\n6972\n6973\n542\n6975\n6978\n6979\n6980\n6984\n6988\n6990\n6991\n6992\n6993\n6994\n6997\n543\n7001\n7002\n7003\n7006\n7007\n7009\n7010\n7012\n7014\n7015\n7016\n544\n7017\n7018\n7022\n7023\n7026\n7027\n7028\n7029\n7030\n7031\n7032\n7033\n7034\n7038\n7042\n7044\n7045\n7046\n7048\n7049\n7050\n7051\n7052\n7053\n7055\n7056\n7057\n7059\n7060\n545\n546\n7065\n7066\n7067\n547\n7068\n7069\n7070\n7071\n7072\n7073\n7074\n7075\n7081\n7084\n7088\n7090\n7091\n7092\n7095\n7100\n7101\n7103\n7105\n7106\n7107\n7110\n7112\n7113\n7114\n7115\n7117\n7119\n7120\n7121\n7122\n7125\n7126\n7127\n7128\n7129\n548\n7130\n7132\n549\n7133\n7136\n7137\n7141\n7142\n7143\n7144\n7145\n7146\n7148\n7150\n7152\n7153\n7155\n7156\n550\n7157\n7158\n7159\n7161\n7162\n7164\n7165\n7167\n7169\n7170\n7171\n7172\n7174\n7175\n7177\n7184\n7186\n7188\n7189\n7190\n7192\n7193\n7194\n7195\n7196\n7197\n7198\n7200\n551\n7201\n7202\n7205\n7206\n7211\n7212\n7213\n7214\n7215\n7216\n7218\n7220\n7221\n7222\n7223\n7224\n7226\n7227\n7229\n7231\n7232\n7233\n7234\n7235\n7236\n7239\n7242\n7243\n7245\n552\n7246\n7247\n7251\n7252\n7253\n7254\n7255\n7256\n7257\n7260\n7261\n7262\n7267\n7269\n7270\n7272\n7273\n7276\n7277\n7278\n7280\n7281\n7284\n7285\n7287\n7288\n7293\n7294\n553\n7297\n7301\n7303\n7304\n7306\n7309\n7310\n7311\n7312\n7313\n7314\n7316\n7321\n7322\n7323\n7324\n7325\n554\n7327\n555\n7330\n7331\n7333\n556\n7335\n7337\n7341\n7343\n7344\n7345\n7348\n7351\n7352\n7355\n7358\n7359\n7360\n7362\n557\n7367\n7368\n7370\n7371\n7372\n7373\n7374\n7376\n7377\n7378\n7379\n7381\n7382\n7383\n7386\n7387\n7388\n7392\n7393\n7394\n7395\n7396\n7398\n7400\n7401\n7402\n7403\n7406\n7407\n7408\n7410\n7411\n7415\n7416\n7420\n7422\n558\n7423\n7426\n7427\n7428\n7431\n7432\n7434\n7435\n7436\n7437\n7438\n7439\n7440\n559\n7442\n7443\n7444\n560\n7449\n7450\n7452\n7456\n7457\n7458\n7464\n7465\n7468\n7470\n561\n7477\n7479\n7480\n7482\n7483\n7484\n7486\n562\n563\n7487\n564\n7490\n7491\n7492\n7494\n7497\n7498\n7499\n7501\n7503\n7504\n7507\n565\n7509\n7510\n7511\n566\n7512\n7514\n7515\n7516\n7517\n7518\n7520\n7521\n7523\n7524\n7525\n7526\n7528\n7529\n7530\n567\n7531\n7532\n7533\n7535\n7536\n7539\n7540\n7542\n568\n7543\n7544\n7546\n7547\n7548\n7549\n7551\n7552\n7553\n7554\n7557\n7558\n7559\n7560\n7565\n7566\n7567\n7568\n7569\n7573\n7581\n7583\n7584\n7585\n7586\n7587\n7589\n7591\n7592\n7593\n7594\n7595\n569\n7596\n7597\n7598\n7599\n7600\n7601\n7602\n7603\n7605\n7606\n7607\n7608\n7610\n7611\n7612\n7613\n7616\n7619\n7622\n7623\n7624\n7625\n7626\n570\n7628\n7629\n7630\n7631\n7632\n571\n7633\n7634\n7635\n7636\n7640\n7641\n7642\n7643\n7644\n7645\n7646\n7647\n7648\n7650\n7651\n7652\n7653\n7654\n7656\n7658\n7659\n7660\n7665\n7666\n7667\n7669\n7670\n7671\n7672\n7673\n7674\n7675\n7678\n7682\n7684\n7685\n7686\n7687\n7688\n7689\n7691\n7692\n7694\n7695\n7696\n7698\n7699\n7701\n7704\n7705\n7706\n7707\n572\n7709\n573\n7710\n7711\n574\n575\n7712\n7713\n7714\n7715\n7716\n576\n577\n7719\n7721\n7723\n7724\n578\n7727\n7728\n7730\n7731\n7734\n7735\n7736\n7737\n7738\n579\n7739\n7741\n7742\n7743\n7744\n7745\n7746\n7747\n7749\n7752\n7753\n7754\n7755\n7756\n7757\n7758\n7760\n580\n7761\n7763\n581\n7766\n7767\n7768\n7771\n582\n7774\n7775\n7780\n7781\n7782\n7787\n7789\n7790\n7792\n7795\n7796\n7797\n583\n7801\n7802\n7804\n7806\n7807\n7809\n7810\n7812\n7816\n7819\n7822\n7828\n7834\n7835\n7836\n7838\n7840\n7841\n584\n585\n7846\n586\n7849\n7850\n7851\n7853\n7854\n7855\n7856\n587\n7861\n588\n7862\n7863\n7865\n589\n7868\n7869\n7870\n7871\n7872\n7873\n7874\n7875\n7876\n590\n591\n7878\n7879\n7880\n7881\n7882\n7883\n7884\n7886\n7887\n7890\n7892\n7893\n7894\n7899\n7900\n7902\n7904\n592\n7905\n593\n7909\n7910\n7911\n594\n7912\n7916\n7918\n595\n7920\n7921\n7922\n7924\n7925\n596\n7927\n7928\n7929\n7931\n7933\n7936\n7939\n7942\n7943\n7944\n7945\n7949\n7951\n7954\n7955\n7957\n7959\n7960\n7961\n7962\n7964\n7965\n7966\n7967\n7968\n7969\n7970\n7971\n7973\n7974\n7976\n7977\n7978\n7981\n7987\n7988\n7990\n7993\n7994\n7995\n7998\n7999\n8000\n8004\n8006\n8007\n8009\n8010\n8011\n8013\n8015\n8016\n8019\n8020\n8022\n8025\n8026\n8028\n8029\n8031\n8032\n8033\n8034\n8037\n8040\n8041\n8044\n8045\n8047\n8049\n8053\n597\n8054\n8057\n8058\n8059\n8060\n8061\n8062\n598\n8064\n599\n8065\n8066\n8071\n600\n8072\n8073\n601\n8079\n8080\n602\n8083\n8085\n8087\n8088\n8090\n8091\n603\n8094\n8099\n8100\n8101\n8103\n8104\n8106\n8107\n8108\n8109\n8111\n8112\n8113\n8114\n8115\n8117\n8118\n8119\n8120\n8121\n8123\n8124\n8125\n604\n8126\n8127\n8128\n8129\n8131\n8133\n8134\n8135\n8136\n8137\n8141\n8144\n8146\n8148\n8150\n8151\n8152\n8153\n8154\n8155\n8158\n8159\n8163\n8164\n8165\n8168\n8170\n8172\n8173\n8174\n8178\n8180\n8181\n8182\n8183\n8184\n8187\n8188\n8189\n8190\n8193\n8194\n8196\n8200\n8201\n8205\n8208\n8210\n8213\n8215\n8219\n8223\n8225\n8227\n8229\n8230\n8232\n8236\n8238\n8239\n8244\n8247\n8249\n8250\n8251\n8253\n8255\n8257\n8259\n8262\n8263\n605\n8266\n606\n8267\n8269\n8271\n8274\n8275\n8277\n8279\n8280\n8282\n8284\n8285\n8287\n607\n8288\n8294\n8295\n8296\n8297\n8298\n8300\n8303\n8305\n608\n609\n8307\n8308\n8309\n610\n8310\n8312\n8313\n8316\n8317\n8318\n8320\n611\n612\n8324\n8325\n8326\n8327\n8328\n8329\n8330\n8331\n613\n8336\n8337\n8338\n8339\n8340\n8341\n8345\n8348\n8349\n8350\n8352\n8355\n8358\n8359\n8362\n8363\n8367\n8368\n8369\n8370\n8373\n8374\n8375\n8376\n8377\n8378\n8380\n8381\n8383\n8384\n8385\n8386\n8387\n8390\n8391\n8392\n8393\n8394\n614\n8397\n8402\n8404\n8405\n8406\n8407\n8408\n8409\n8410\n8412\n8413\n8414\n8416\n8418\n8419\n8420\n615\n8421\n8422\n8423\n8424\n8426\n8427\n8428\n8429\n8430\n8434\n616\n8435\n8437\n8439\n8443\n8444\n8445\n8447\n617\n8448\n8450\n8452\n8453\n8454\n618\n8458\n8459\n8461\n8462\n8463\n8465\n8467\n8468\n8470\n619\n8473\n8478\n8479\n8481\n8484\n8485\n8486\n8490\n620\n8492\n8493\n8494\n8495\n8498\n8499\n8500\n8503\n8504\n8505\n8506\n8508\n8510\n8512\n8515\n8517\n8518\n621\n8519\n8520\n8522\n8523\n8525\n8527\n8528\n8529\n8531\n8532\n8533\n8534\n8535\n8537\n8539\n8540\n8541\n622\n8543\n623\n8546\n8547\n8548\n8549\n8550\n8551\n8553\n624\n8554\n625\n8557\n8558\n8560\n8565\n8567\n8569\n8571\n626\n8572\n8574\n8575\n8576\n8577\n8578\n8579\n8580\n8581\n8583\n8584\n627\n8585\n8587\n628\n8589\n8591\n8592\n8593\n8596\n8600\n629\n8601\n8602\n8603\n8606\n8607\n8609\n8610\n630\n8612\n8613\n8614\n8615\n8619\n8620\n8621\n8623\n8624\n8625\n8626\n8627\n8628\n8630\n8632\n8633\n8636\n8637\n8640\n8641\n8642\n8644\n8645\n8647\n8650\n8651\n8654\n8655\n631\n632\n8657\n8661\n633\n8663\n8664\n8665\n8666\n8669\n8673\n8675\n8676\n8678\n8679\n8681\n634\n8685\n8687\n8690\n8691\n8692\n8693\n8694\n8695\n8697\n8698\n8700\n8702\n8710\n8711\n8712\n8713\n635\n8718\n8724\n8726\n8728\n8729\n636\n637\n638\n639\n8738\n8740\n8741\n8742\n8746\n8748\n8749\n8750\n8752\n8753\n8754\n8756\n8757\n8758\n8759\n8760\n640\n8761\n8762\n8763\n8764\n8766\n8767\n8768\n8769\n8771\n8773\n641\n8775\n8776\n642\n8777\n8778\n8779\n8783\n8785\n8787\n8789\n8790\n8791\n8792\n643\n8793\n8795\n8796\n8802\n8803\n8804\n8805\n8806\n8807\n8808\n8809\n8810\n8812\n644\n8817\n8818\n8819\n8822\n8823\n8824\n8825\n8827\n8828\n8829\n645\n646\n8834\n647\n8835\n8836\n8837\n8838\n8842\n8843\n8844\n8845\n8846\n8847\n8848\n8849\n648\n649\n8851\n8852\n8853\n8855\n8856\n8858\n8859\n8860\n8861\n8862\n8863\n8866\n8871\n8872\n8874\n8876\n8879\n8881\n8882\n8886\n8888\n650\n8889\n8890\n8891\n651\n8895\n8896\n8897\n652\n8900\n653\n8902\n8903\n8906\n8907\n8908\n8910\n8911\n8914\n8915\n8916\n8917\n8918\n8919\n8923\n8924\n654\n8926\n8928\n655\n656\n8931\n8932\n8933\n8934\n8936\n657\n8938\n8939\n658\n8940\n8941\n659\n8942\n8943\n8944\n660\n8946\n661\n662\n8950\n8952\n663\n8961\n8962\n664\n8963\n8964\n8967\n8971\n8973\n665\n8975\n8977\n8979\n8980\n8981\n8982\n666\n667\n8983\n8984\n668\n669\n8985\n8986\n8987\n8988\n8992\n8993\n8994\n8995\n8996\n670\n8997\n8998\n8999\n9000\n671\n672\n673\n9001\n674\n9002\n9003\n9004\n9005\n9006\n9007\n675\n9009\n9010\n9014\n9015\n9016\n9017\n9018\n9019\n9026\n9031\n9032\n9033\n9034\n9035\n9036\n9037\n9039\n9040\n676\n677\n9046\n9047\n9050\n9054\n9055\n9059\n9060\n9061\n9062\n9063\n9067\n9068\n678\n9073\n679\n9074\n9076\n9077\n9078\n9079\n9081\n9082\n9083\n9086\n9088\n9090\n9091\n9092\n9093\n9095\n9096\n9100\n9101\n9103\n9104\n9105\n9106\n9107\n9108\n9113\n9115\n9117\n9118\n680\n9122\n9123\n9125\n9126\n9129\n9131\n9133\n681\n9135\n9138\n9141\n9142\n9144\n9145\n9146\n9147\n9148\n682\n9150\n683\n9154\n9155\n684\n9158\n685\n9160\n9161\n9162\n9163\n9165\n9168\n9170\n9171\n686\n9173\n9174\n9176\n9177\n9180\n9182\n9183\n9184\n9185\n9188\n9189\n9191\n9192\n9196\n9197\n9199\n9200\n9201\n9203\n9204\n9205\n9207\n9209\n687\n9210\n9211\n9212\n9213\n9214\n9215\n9217\n9218\n9220\n9222\n9226\n9227\n688\n9228\n9229\n9230\n9233\n9234\n9235\n9236\n9237\n9238\n9240\n9242\n9243\n9245\n9247\n9248\n9249\n9250\n9253\n9254\n9255\n9256\n9258\n9259\n9261\n689\n690\n9265\n9266\n691\n9270\n9273\n9274\n9275\n9276\n9277\n692\n9278\n693\n9283\n9286\n694\n9287\n695\n9288\n9289\n9290\n9291\n696\n9293\n697\n9294\n698\n9295\n9296\n9301\n9302\n9304\n9307\n9308\n9312\n9313\n9314\n9315\n9316\n699\n9322\n9323\n9324\n9325\n9326\n9328\n9329\n9330\n9331\n9332\n9333\n9334\n9336\n9337\n700\n701\n702\n9344\n9346\n9347\n9348\n9349\n9351\n9352\n703\n704\n9353\n9354\n9355\n9356\n9357\n9359\n9360\n9361\n705\n9365\n9366\n9367\n9368\n9373\n9374\n9375\n9376\n9377\n706\n9381\n9385\n9386\n9387\n9389\n9390\n707\n9392\n9393\n9396\n9397\n708\n9398\n9399\n9400\n9401\n9402\n9403\n9407\n9409\n9410\n9414\n9419\n709\n710\n9421\n9422\n9423\n9427\n9428\n9429\n9430\n9431\n9433\n9435\n9436\n9437\n9438\n9439\n9440\n9442\n9443\n711\n9446\n9447\n9449\n9451\n9453\n9454\n9455\n9456\n9457\n712\n9460\n9461\n9462\n9463\n9466\n9467\n713\n9470\n9473\n9474\n9475\n9477\n9479\n714\n715\n716\n717\n9487\n9488\n718\n9493\n9494\n9495\n9496\n9497\n9499\n9500\n719\n9503\n720\n9504\n9505\n9506\n721\n9507\n9508\n9509\n9511\n9515\n9516\n9517\n9519\n9520\n9521\n9523\n722\n9527\n9528\n9529\n9531\n9533\n9534\n9535\n723\n9540\n9541\n9544\n9545\n724\n9546\n9547\n9548\n9549\n9550\n9551\n725\n9554\n9556\n9563\n9566\n9567\n726\n9568\n9569\n727\n9574\n9575\n9576\n9577\n9578\n9579\n9580\n728\n9583\n9584\n9587\n9588\n9589\n729\n9593\n9596\n9599\n9600\n9602\n9603\n9604\n9605\n9606\n9607\n9611\n9612\n9613\n730\n9614\n9615\n9617\n9618\n731\n9621\n9622\n9629\n9630\n9632\n9633\n9634\n9635\n9636\n9637\n9638\n9639\n732\n733\n9646\n734\n9647\n9649\n9650\n9651\n9654\n9655\n735\n9658\n9659\n9660\n9661\n9662\n9663\n736\n9664\n9666\n737\n9668\n9672\n9673\n9674\n9675\n9676\n9677\n9678\n9679\n9681\n9682\n9683\n9684\n9686\n9687\n9688\n9691\n9692\n9698\n9701\n738\n9702\n9705\n9706\n9707\n9708\n739\n9709\n9711\n9712\n9713\n9714\n9716\n9719\n9722\n740\n9723\n9725\n9727\n9728\n9729\n741\n9733\n9736\n9738\n9739\n9741\n9743\n9745\n9747\n9749\n9750\n9753\n9755\n9757\n9758\n742\n9759\n9760\n743\n9768\n744\n745\n9769\n9774\n9775\n9776\n9781\n9785\n9786\n9788\n9789\n9790\n9793\n9795\n9796\n746\n9798\n9800\n9801\n9803\n9804\n9809\n9810\n9818\n747\n9820\n9821\n9822\n9823\n9825\n9826\n748\n9827\n9828\n9829\n9831\n9834\n9835\n9836\n9837\n9838\n9840\n9841\n9845\n9847\n9852\n9855\n749\n750\n9858\n9863\n9864\n9866\n9870\n751\n9873\n9874\n9875\n9876\n9877\n752\n9880\n9881\n9882\n9883\n753\n9885\n9886\n9887\n754\n9888\n9891\n9896\n9897\n9898\n9899\n755\n9902\n9903\n9904\n9907\n9908\n9909\n9910\n9911\n9913\n9916\n756\n9917\n9918\n9919\n9920\n9921\n9923\n9926\n9928\n9931\n9932\n9933\n9934\n9935\n9937\n9939\n9940\n9945\n9946\n9947\n9948\n9949\n9953\n9954\n9955\n9957\n9959\n9962\n9963\n9966\n9968\n9969\n9970\n9972\n9975\n9976\n9977\n9979\n9980\n9981\n757\n9982\n9983\n9986\n758\n9993\n9994\n9995\n9996\n9997\n9999\n759\n10002\n10003\n10004\n760\n10008\n10009\n10010\n10011\n10012\n10014\n10015\n761\n10016\n10017\n10018\n10020\n10022\n10023\n10024\n10029\n10032\n10034\n10035\n10036\n10037\n10038\n762\n10039\n10042\n10043\n10044\n10046\n10048\n10051\n763\n10056\n10061\n10062\n10063\n10070\n10071\n10072\n10073\n764\n10076\n10079\n10083\n10084\n10090\n10092\n10093\n10094\n10095\n10097\n10101\n10103\n10104\n10108\n10110\n10111\n10112\n765\n10114\n10117\n10119\n10120\n10121\n10122\n10123\n10125\n10127\n10128\n10129\n10130\n10131\n10132\n10133\n10134\n10135\n10136\n10138\n10140\n10141\n10143\n10145\n10146\n10149\n10154\n10155\n10156\n766\n10157\n10159\n10160\n10161\n10162\n10163\n10164\n10167\n10168\n10169\n10170\n10171\n10173\n10177\n10179\n10180\n10181\n10183\n10184\n10185\n10186\n767\n10189\n10191\n768\n10192\n769\n10193\n10194\n10195\n10196\n10198\n770\n10200\n10203\n10204\n10205\n10207\n10208\n10209\n10210\n10211\n10212\n10216\n10217\n10218\n10219\n10220\n10221\n10222\n771\n10226\n10229\n10230\n772\n10237\n10238\n10239\n10240\n10241\n10242\n10243\n10244\n10248\n10249\n10251\n10254\n10255\n773\n10258\n10259\n10261\n10263\n774\n10265\n10266\n10267\n10268\n10269\n10270\n10271\n10272\n10274\n10275\n775\n10276\n10277\n10278\n10279\n10280\n10281\n10282\n10283\n10285\n10287\n10288\n10289\n10290\n10291\n10293\n776\n10296\n777\n10297\n10298\n778\n10301\n10302\n10303\n10304\n10306\n10307\n10308\n10310\n10311\n10312\n10313\n779\n10314\n780\n10318\n10320\n10323\n10324\n10325\n10326\n781\n10327\n10330\n10334\n782\n10335\n783\n10337\n10338\n784\n10341\n10342\n10348\n10349\n10354\n10355\n10356\n10361\n10362\n10365\n10366\n10367\n10369\n10370\n10371\n785\n10372\n10373\n10375\n10376\n10377\n10378\n10382\n10384\n10388\n10392\n10394\n10398\n10399\n10400\n10401\n10402\n10403\n10404\n10405\n10408\n10409\n10410\n10412\n10413\n10415\n10419\n10420\n10422\n10423\n10424\n10426\n10430\n10431\n10432\n10433\n10434\n10436\n10437\n10438\n786\n10439\n10440\n10441\n10443\n10444\n10446\n10447\n10448\n10449\n10452\n10453\n10455\n10456\n10457\n10458\n10459\n10460\n10461\n10462\n10464\n10465\n10466\n10467\n10470\n10472\n10474\n10475\n10476\n10477\n10478\n10481\n10483\n10484\n10485\n787\n10488\n10490\n10494\n10495\n10497\n10500\n10501\n10502\n10506\n10507\n10508\n10509\n10510\n10511\n10512\n10513\n10514\n788\n10515\n10516\n789\n10517\n10520\n10522\n790\n791\n10525\n10526\n10529\n10530\n10531\n10532\n10533\n10534\n10535\n10536\n10537\n10538\n792\n10539\n10541\n10542\n793\n794\n10543\n10544\n10547\n10548\n10549\n10550\n10553\n10554\n10555\n10556\n10557\n10558\n10559\n10560\n10564\n10566\n10567\n10570\n10571\n10575\n10576\n10577\n10579\n10581\n10582\n10584\n10585\n10587\n10591\n10592\n10593\n10594\n10595\n10598\n10599\n10601\n10602\n10603\n10605\n10606\n10608\n10610\n10613\n10616\n795\n10617\n10619\n10620\n10621\n10622\n10623\n10625\n796\n10626\n10627\n10629\n10631\n10632\n10633\n10634\n10635\n10638\n10640\n10642\n10646\n10649\n10650\n797\n10654\n10656\n10657\n10660\n10661\n10662\n10663\n798\n799\n10667\n10669\n10672\n10675\n10677\n10678\n10680\n10682\n800\n10686\n10687\n10689\n10692\n10694\n10695\n10697\n10698\n10699\n10702\n10703\n10704\n10706\n10707\n10708\n10709\n10710\n10711\n10712\n10713\n10715\n10716\n801\n10717\n10718\n802\n803\n10719\n10720\n10721\n10722\n10723\n10725\n10726\n804\n805\n806\n10728\n10731\n10734\n10736\n10738\n10739\n10740\n10741\n10742\n807\n10743\n10744\n10745\n10746\n10747\n808\n10751\n10753\n10754\n10757\n10760\n809\n10764\n10765\n810\n10772\n10773\n811\n10775\n812\n10776\n10777\n10778\n10780\n10784\n10793\n10796\n10797\n813\n10798\n10800\n10802\n10805\n10806\n10808\n10810\n814\n10811\n10812\n10813\n10815\n10817\n815\n10819\n816\n10821\n10823\n10824\n10825\n10826\n10830\n10831\n10832\n10840\n10841\n10844\n10845\n10846\n10847\n10848\n10849\n10850\n10851\n10853\n10854\n10855\n10856\n10857\n10858\n817\n10859\n10861\n10862\n818\n10866\n10867\n10868\n10871\n10872\n10874\n10875\n10877\n10878\n10879\n10880\n10881\n10882\n10883\n10888\n10889\n10891\n10892\n10893\n10894\n10898\n10899\n10900\n10901\n10903\n10905\n819\n10906\n10907\n10908\n10909\n10910\n10912\n10913\n10914\n10917\n10918\n10921\n10927\n10928\n10929\n10930\n10931\n10933\n10934\n10936\n10938\n10939\n10940\n10942\n10944\n10945\n10946\n10947\n10951\n10952\n10953\n10954\n820\n10955\n10958\n10959\n821\n822\n10960\n10962\n10963\n10967\n10968\n10969\n10972\n10974\n10975\n10976\n10978\n10979\n10982\n823\n10983\n10984\n10985\n10986\n10988\n10989\n10993\n10997\n10999\n11000\n11003\n11005\n11007\n11008\n11010\n824\n825\n11014\n11015\n11016\n11017\n11019\n826\n11021\n11024\n11027\n11028\n827\n11031\n11033\n11035\n11036\n11038\n11041\n828\n11042\n11044\n11045\n11046\n11047\n11049\n11050\n829\n11051\n11052\n830\n11054\n11057\n11058\n11062\n11064\n11065\n11066\n11067\n11070\n11074\n11075\n831\n11076\n11077\n11079\n11080\n832\n11081\n11085\n11086\n833\n11089\n11092\n11093\n11097\n11098\n11100\n834\n11102\n11103\n11104\n11105\n11107\n11108\n11109\n11110\n835\n11111\n836\n837\n11114\n11116\n11117\n838\n11118\n11120\n11122\n11123\n11124\n11128\n11129\n11132\n11133\n11134\n11135\n11140\n11144\n11146\n11147\n11149\n11154\n11157\n11158\n839\n11160\n11161\n840\n11162\n11163\n11167\n11170\n11171\n11174\n11175\n841\n11177\n11179\n842\n11180\n843\n844\n11182\n11183\n11185\n11186\n11187\n11188\n11190\n11193\n11194\n11195\n11198\n845\n11199\n11202\n11204\n11205\n11206\n11207\n846\n11208\n11209\n11211\n11212\n11213\n11214\n11216\n11218\n11219\n11220\n11221\n11223\n11225\n11226\n11229\n11230\n11231\n11233\n11234\n11235\n11236\n11237\n11238\n11240\n11241\n11246\n847\n11247\n11248\n11249\n11250\n11253\n11254\n11255\n11256\n11257\n11258\n11259\n11260\n848\n11261\n11263\n11264\n11265\n11269\n11270\n11271\n11273\n11276\n11278\n11279\n11280\n11283\n11284\n11285\n849\n11287\n11288\n11289\n11290\n11292\n11293\n11294\n11295\n11296\n11298\n11300\n11301\n11302\n11303\n11304\n11305\n11307\n11308\n11309\n11310\n11311\n11312\n11313\n11314\n851\n11317\n11318\n11319\n11320\n11321\n11322\n11325\n11327\n11328\n11329\n11330\n11331\n11333\n852\n11334\n11335\n11337\n11345\n11348\n11350\n11353\n11355\n11356\n11357\n11358\n11359\n11361\n11363\n11365\n11366\n11367\n853\n11368\n854\n11370\n11371\n11372\n11380\n11381\n11383\n11384\n855\n11387\n11388\n11391\n11392\n11396\n856\n11400\n857\n11402\n11404\n11405\n11408\n11411\n11412\n11415\n11416\n11417\n11418\n11419\n858\n11423\n11424\n11425\n11433\n11434\n11435\n11437\n11439\n11440\n11442\n11445\n11446\n859\n11447\n11449\n11450\n860\n11452\n11455\n11457\n11458\n11459\n11462\n11464\n11465\n11466\n11468\n11469\n861\n11470\n11472\n11473\n11477\n11478\n11479\n11481\n11482\n11483\n11485\n11487\n11488\n11489\n11490\n11492\n11493\n11494\n11495\n11499\n11501\n11503\n862\n11507\n11509\n11511\n11512\n863\n11514\n11515\n11518\n11519\n11520\n11521\n11522\n11523\n11524\n864\n11525\n11526\n865\n11528\n11530\n11531\n11532\n11534\n866\n11535\n11536\n11537\n11538\n867\n11541\n11542\n11544\n11546\n11548\n11550\n11553\n11556\n11559\n11560\n11561\n11563\n11565\n11568\n868\n11570\n11571\n11572\n11573\n11574\n11578\n869\n11584\n11585\n11586\n11590\n11594\n11595\n870\n11597\n871\n11598\n11600\n872\n11601\n11604\n873\n11605\n11608\n874\n875\n11609\n11611\n11612\n11613\n11614\n11615\n11616\n11617\n11618\n11619\n11621\n11622\n11623\n11625\n11627\n11628\n876\n11631\n11637\n11638\n11639\n11640\n11641\n11643\n11644\n11645\n11647\n11650\n11652\n11655\n877\n11659\n11660\n11661\n11662\n11663\n11664\n11665\n11666\n11671\n11673\n11676\n11678\n11679\n878\n11681\n11683\n11684\n879\n11690\n11691\n11692\n11693\n11694\n880\n11696\n11697\n11698\n11699\n11701\n881\n11706\n11707\n11708\n11709\n11710\n11711\n882\n11713\n11714\n11715\n11716\n11717\n11721\n11722\n11723\n11726\n883\n884\n11728\n11729\n11730\n11733\n885\n11734\n886\n11735\n11736\n11737\n11740\n11741\n11742\n11743\n11744\n11745\n11746\n11747\n11749\n11750\n11751\n887\n11753\n888\n11755\n11757\n11758\n11759\n11760\n11761\n11762\n11763\n11764\n11765\n11766\n11767\n11768\n11769\n11770\n11771\n11773\n889\n11778\n11780\n11781\n11783\n890\n11784\n11788\n11791\n11792\n11796\n11797\n891\n11798\n11800\n11801\n11803\n11804\n11805\n11806\n11808\n11809\n11812\n11813\n11816\n892\n893\n11817\n11818\n11819\n11821\n894\n11823\n895\n11827\n11829\n896\n11830\n11831\n897\n11832\n11834\n11835\n11836\n11837\n11838\n11839\n11840\n11842\n898\n11845\n11848\n11851\n11852\n11854\n11855\n11857\n11858\n899\n11861\n11863\n11864\n11865\n11869\n11870\n900\n11872\n11873\n11876\n11877\n11879\n11880\n11881\n11882\n11886\n11887\n11889\n11890\n11892\n11894\n11897\n11898\n11899\n11900\n11901\n11903\n11908\n11909\n11910\n11913\n11914\n11918\n11919\n11920\n11921\n11922\n11926\n11928\n11934\n11935\n901\n11936\n902\n11937\n11938\n11939\n11940\n11942\n11943\n11944\n11945\n11946\n11948\n11949\n903\n11953\n11954\n11958\n11959\n11960\n11961\n11962\n11963\n11964\n11965\n11966\n11967\n11968\n11969\n11971\n904\n11972\n905\n11973\n11974\n11975\n11976\n11977\n906\n11980\n907\n11982\n11983\n11984\n11986\n908\n11988\n11989\n11990\n11991\n11993\n11994\n11995\n11996\n11998\n12000\n12001\n12005\n909\n12008\n12010\n12011\n12012\n910\n12014\n12016\n12017\n12018\n911\n12019\n12021\n12025\n12027\n12028\n12029\n12030\n912\n12034\n12035\n12037\n12038\n913\n12039\n12040\n12041\n12044\n12046\n12047\n12048\n12050\n12051\n12052\n12054\n12056\n12057\n12058\n12061\n12062\n12065\n914\n12067\n915\n12069\n12071\n12074\n12075\n12078\n12079\n12080\n12084\n12086\n12093\n12094\n12095\n12096\n12097\n12098\n12099\n12100\n12102\n12103\n12104\n12105\n12106\n12107\n12108\n12109\n12110\n12111\n12112\n12113\n12115\n12116\n12117\n12118\n12119\n12120\n12121\n12122\n12123\n12124\n12125\n12126\n12128\n12129\n12131\n12132\n12133\n12134\n12135\n12136\n12138\n12139\n12140\n12142\n12145\n12147\n12148\n12149\n12151\n12153\n12155\n12158\n12159\n12165\n12168\n12169\n12172\n12174\n12175\n12176\n12177\n12178\n12179\n12180\n12181\n12183\n12189\n12191\n12193\n12194\n12196\n12205\n12207\n12209\n12215\n12229\n12231\n12232\n12238\n12239\n12241\n12243\n12247\n12254\n12258\n12261\n12262\n12263\n12264\n12265\n12266\n12267\n12268\n12269\n12270\n12271\n12274\n12275\n12276\n12278\n12282\n12284\n12285\n12286\n12287\n12288\n12291\n12292\n12293\n12294\n12295\n916\n12298\n12300\n12301\n12305\n12308\n12310\n917\n12312\n12313\n12314\n918\n12321\n919\n920\n12322\n12323\n12324\n12325\n921\n12326\n12327\n12328\n12329\n12331\n12333\n12334\n12335\n12337\n12338\n12339\n12340\n12341\n12342\n12346\n12350\n12354\n12359\n12360\n922\n12361\n12362\n12368\n12374\n12375\n12379\n12381\n12383\n12386\n12390\n12392\n12394\n12396\n12399\n12400\n12401\n12402\n12403\n12404\n12405\n12406\n12408\n12409\n12410\n12411\n12412\n12413\n12415\n12417\n12418\n12419\n12423\n12424\n12425\n923\n12426\n12427\n12428\n12429\n12430\n12431\n12432\n12433\n12435\n12436\n12438\n12439\n12441\n12443\n924\n925\n12449\n12450\n12451\n12455\n12456\n12458\n12460\n12461\n12463\n12465\n12467\n12468\n12469\n12470\n12471\n12473\n12474\n12475\n12476\n12478\n12479\n12480\n12481\n12482\n12483\n12485\n12486\n12487\n12488\n12494\n12495\n926\n12496\n12499\n12503\n12504\n12506\n12507\n12509\n12510\n12511\n12512\n12514\n12516\n12517\n12520\n12526\n12528\n12530\n12536\n12537\n12538\n12540\n12541\n12544\n12547\n12554\n12555\n12556\n12557\n12560\n12564\n12566\n12567\n12568\n12570\n12571\n12574\n12575\n12576\n12578\n12579\n12584\n12585\n12586\n12589\n12592\n12595\n12596\n12598\n12599\n12603\n12606\n12608\n12610\n12612\n12615\n12618\n12619\n12620\n12621\n12623\n12624\n12625\n12627\n12629\n12630\n12632\n927\n12634\n12636\n928\n12638\n12639\n12641\n12642\n12643\n12644\n12645\n929\n12647\n12650\n12651\n12652\n12653\n12655\n12660\n12661\n12663\n12664\n12665\n12667\n12671\n12672\n12674\n12681\n12684\n12685\n12686\n12687\n12689\n12690\n12692\n12693\n12695\n12696\n12697\n12698\n12699\n12700\n12702\n12703\n12704\n12705\n12706\n12710\n12711\n12713\n12714\n12716\n12717\n12718\n12719\n12721\n12723\n12724\n12727\n12729\n12731\n12733\n12735\n12737\n12738\n12741\n12742\n12744\n12745\n12746\n12747\n12748\n12749\n12750\n12751\n12754\n12755\n12756\n12757\n930\n12758\n12759\n12761\n12762\n12763\n12764\n12766\n12768\n12769\n12770\n12771\n12772\n12773\n12777\n12778\n12779\n12780\n12781\n12782\n12783\n12784\n12786\n12789\n12791\n12792\n12798\n12799\n12800\n12801\n12802\n12803\n12804\n12805\n12806\n12807\n12808\n12809\n12810\n12811\n12812\n12813\n12818\n12820\n12821\n12823\n931\n12825\n12827\n12828\n12829\n12830\n12835\n932\n12836\n12837\n12838\n12839\n12840\n12841\n12842\n12843\n12844\n933\n934\n12846\n12847\n12848\n12849\n12850\n12851\n12852\n12853\n12855\n12856\n12858\n12859\n12860\n12861\n12863\n12865\n12866\n12867\n12869\n12871\n12872\n12873\n12874\n12875\n12876\n935\n12880\n12881\n12882\n12883\n12884\n12885\n12886\n12887\n12888\n12889\n12890\n12891\n12892\n12893\n12894\n12895\n936\n12896\n12897\n937\n938\n12898\n12899\n12900\n12901\n12902\n12903\n939\n12904\n940\n12907\n941\n942\n943\n944\n12916\n12919\n12921\n12925\n12926\n12927\n945\n12928\n12930\n12931\n12932\n12933\n12935\n12936\n12937\n12939\n12941\n12942\n12943\n12944\n12945\n12946\n12948\n12951\n12952\n12953\n12955\n12957\n12958\n12959\n12961\n12962\n12964\n12967\n12968\n12969\n12970\n12974\n12975\n12977\n12978\n12979\n12981\n12982\n12983\n12988\n12989\n12990\n12991\n946\n12992\n12993\n12994\n12996\n12997\n12998\n13000\n13002\n13003\n13004\n13005\n13009\n13010\n13011\n13015\n13016\n13017\n13019\n13020\n13021\n13022\n13023\n947\n13024\n13028\n13029\n13030\n13031\n13033\n13034\n13035\n13037\n13038\n13042\n13046\n13047\n13048\n13050\n13052\n13053\n13054\n13055\n13058\n13059\n13062\n13067\n948\n13072\n13073\n13075\n13077\n13078\n13079\n13080\n13082\n13083\n13084\n13087\n949\n13090\n13091\n13092\n13095\n950\n13097\n13098\n13099\n13100\n13101\n13102\n13103\n13105\n13106\n13107\n951\n13108\n13109\n13110\n13111\n13113\n13115\n13116\n13117\n13118\n13119\n13120\n13122\n13124\n13126\n13127\n13129\n13132\n952\n953\n954\n13134\n13135\n13138\n955\n13139\n13140\n13142\n13144\n13145\n13147\n13149\n13151\n13152\n13153\n13154\n13158\n13163\n13165\n13170\n13172\n956\n13178\n13179\n13180\n13183\n13184\n13187\n13188\n13189\n13192\n13193\n13194\n13195\n13196\n13198\n13199\n13200\n13201\n13202\n13206\n13207\n13208\n13212\n13213\n13214\n13215\n13216\n13217\n13218\n13219\n13221\n13223\n957\n13225\n13229\n13230\n13232\n13236\n13239\n13242\n13245\n13246\n13248\n13249\n13254\n13255\n13256\n13257\n13258\n13264\n13266\n13267\n13268\n13270\n13271\n958\n13274\n13276\n13277\n13278\n13282\n13287\n13288\n13289\n13290\n13291\n13296\n13297\n13298\n13300\n13301\n13302\n13303\n13305\n13306\n13307\n13308\n13309\n13310\n13311\n13312\n13313\n13318\n13321\n13322\n13325\n13327\n13328\n13339\n13340\n13341\n13342\n13343\n13347\n13348\n13349\n13351\n13352\n13353\n13354\n13355\n13357\n13358\n13359\n13360\n13364\n13366\n13367\n13369\n13370\n13371\n13372\n13373\n13375\n13377\n13378\n13380\n13381\n13382\n13383\n13385\n13387\n13389\n13390\n13391\n13393\n13394\n13395\n13397\n13398\n13399\n13401\n13403\n13405\n13406\n13410\n13413\n13417\n13419\n13420\n13422\n13423\n13426\n13427\n13428\n13429\n13431\n13432\n13437\n13439\n13440\n13441\n13443\n959\n13447\n13455\n13458\n13464\n13467\n13468\n13470\n13473\n13474\n13478\n960\n13484\n13487\n13491\n13492\n13493\n13494\n13495\n13506\n13511\n13513\n13515\n13516\n13517\n13518\n13519\n13520\n13521\n13522\n13523\n13524\n13525\n13526\n13528\n13531\n13532\n13533\n13534\n13535\n13537\n13538\n13544\n13546\n13548\n13557\n13558\n13561\n13562\n13563\n13564\n13565\n13570\n13572\n13573\n13574\n13575\n13577\n13579\n13580\n13581\n13582\n13583\n13585\n13586\n13587\n13591\n13593\n13594\n13599\n13605\n13612\n13617\n13621\n13629\n13632\n13634\n13640\n13642\n13643\n13644\n13646\n13648\n13649\n13652\n961\n13653\n13656\n13658\n13659\n13660\n13661\n13662\n13663\n13664\n13669\n13670\n13672\n13674\n13679\n13680\n13681\n13682\n13683\n13687\n13688\n13689\n13690\n13692\n13693\n13694\n13695\n13696\n13697\n13698\n13699\n13702\n13703\n13704\n13706\n13707\n13709\n13710\n13711\n13712\n13713\n13715\n13716\n13717\n13721\n13723\n13725\n13727\n962\n13728\n13730\n13732\n13733\n963\n13735\n13736\n13737\n13738\n13739\n13742\n964\n13746\n13747\n13748\n13752\n13756\n13759\n13760\n13761\n13762\n13763\n13767\n13768\n13769\n13770\n13771\n13772\n13773\n13775\n13777\n13778\n13779\n13780\n13781\n13782\n13783\n965\n13785\n13786\n13789\n13790\n13791\n13793\n13795\n13799\n13800\n13801\n13802\n13804\n13805\n13806\n13807\n13808\n13809\n13810\n13811\n13812\n13814\n13816\n13817\n13820\n13821\n13822\n13824\n13825\n13826\n13827\n13829\n13833\n13834\n13835\n13836\n13837\n966\n13838\n13839\n13841\n13842\n13844\n13845\n13846\n13847\n13848\n13849\n13850\n13851\n13852\n13853\n13854\n13855\n13856\n13857\n13858\n13859\n13860\n13861\n13864\n13865\n13866\n13867\n13868\n13870\n13871\n13872\n13873\n13874\n13875\n13876\n13877\n13878\n13879\n13880\n13882\n13883\n13884\n13885\n13886\n13887\n13888\n13889\n13891\n13892\n13893\n13894\n13895\n13896\n13897\n13898\n13899\n13900\n13901\n13902\n13905\n13906\n13907\n13908\n13909\n13910\n13911\n13912\n13913\n13915\n13916\n13917\n13918\n13919\n13920\n13921\n13922\n13923\n13924\n13927\n13928\n13929\n13930\n13931\n13932\n13933\n13934\n13935\n13936\n13937\n13938\n13939\n13940\n13941\n13942\n13943\n13945\n13949\n13953\n13954\n13956\n13957\n13958\n13962\n13963\n13964\n13966\n13969\n13973\n13974\n13975\n13976\n13977\n13978\n13980\n13982\n13983\n13986\n13987\n13989\n13990\n13991\n13992\n13993\n13995\n13996\n13997\n13998\n13999\n14003\n14004\n14005\n14007\n14009\n14010\n14011\n967\n14015\n14016\n14017\n14018\n14019\n14020\n14022\n14024\n14025\n14026\n14028\n14029\n14030\n14032\n14034\n14035\n14036\n14038\n14039\n14041\n14042\n14043\n14044\n14045\n14048\n14049\n14052\n14053\n14054\n14057\n14058\n14060\n14061\n14063\n14064\n14065\n14067\n14068\n14070\n14071\n14072\n14074\n14075\n14076\n14078\n14080\n14081\n14082\n968\n14083\n14086\n14087\n14089\n969\n14094\n14095\n14096\n14099\n14103\n14109\n14112\n14113\n14114\n14115\n14118\n14120\n14121\n14125\n14127\n14128\n14129\n14130\n14131\n14132\n14133\n14134\n14136\n14138\n14139\n14141\n14142\n14145\n14146\n14147\n14152\n14156\n14157\n14159\n14161\n14162\n14165\n14166\n14167\n14170\n14174\n14178\n14180\n14181\n14183\n14184\n14185\n14186\n14189\n14192\n14194\n14195\n14198\n14201\n14202\n14206\n14207\n14208\n14209\n14210\n14213\n14214\n14218\n14221\n14225\n14226\n14230\n14232\n14235\n14243\n14246\n14247\n970\n14249\n14250\n14253\n14254\n14255\n14256\n14258\n14266\n14267\n14268\n14269\n14270\n14272\n14274\n971\n14275\n14276\n14277\n14278\n14280\n14281\n14282\n14283\n972\n14284\n14286\n14287\n14289\n973\n14291\n14292\n14293\n14294\n14296\n14298\n14300\n14302\n14303\n14307\n14308\n14310\n14312\n14313\n14314\n14316\n14317\n14319\n974\n14320\n14323\n14324\n14326\n14328\n14329\n14330\n14331\n14332\n14334\n14335\n14336\n14338\n14339\n14344\n14348\n975\n14350\n14351\n14353\n14357\n14359\n14360\n14367\n14368\n14370\n14372\n14374\n14375\n14376\n14377\n14378\n14379\n14384\n14386\n14387\n14388\n14389\n14390\n976\n14392\n14395\n14396\n14397\n14398\n14400\n14401\n14402\n14403\n14404\n14406\n14407\n14408\n14409\n14411\n977\n978\n14416\n14418\n14421\n14422\n14423\n14424\n14425\n14426\n14427\n14428\n14430\n14431\n14432\n14433\n14434\n14435\n14436\n14437\n14438\n14439\n14440\n14442\n14443\n14444\n14445\n14446\n14449\n14452\n14453\n14457\n979\n14461\n980\n14465\n14466\n14467\n14471\n14474\n14479\n14480\n14482\n14483\n14485\n14493\n14498\n14499\n14500\n14501\n14503\n14504\n14505\n14506\n14507\n14508\n14509\n14512\n14514\n14517\n14520\n14522\n14525\n14526\n14527\n14528\n14530\n14532\n14533\n14535\n14536\n14538\n14541\n14544\n14546\n14550\n14554\n14561\n14562\n14565\n14578\n14586\n14598\n14599\n14600\n14601\n14605\n14609\n14612\n14613\n14615\n14619\n14621\n14622\n14623\n14627\n14630\n14631\n14633\n14635\n14636\n14637\n14640\n14641\n14642\n14643\n14644\n14645\n14646\n14648\n14652\n14653\n14654\n14659\n14660\n14667\n14669\n14671\n14673\n14675\n14676\n14679\n14683\n14684\n14685\n14686\n14688\n14690\n14693\n14696\n14697\n14698\n14699\n14700\n14701\n14702\n14703\n14704\n14706\n14710\n14712\n14716\n14717\n14718\n14719\n14722\n14723\n14725\n14727\n14728\n14729\n14730\n14733\n14734\n14735\n14736\n14737\n14739\n14741\n14742\n14743\n14744\n14745\n14749\n14750\n14754\n14756\n14760\n14763\n14764\n14766\n14768\n14769\n14773\n14779\n14780\n14785\n14786\n14787\n14788\n14789\n14791\n14793\n14794\n14795\n14797\n14799\n14803\n14807\n14810\n14812\n14813\n14815\n14817\n14822\n14824\n14826\n14827\n14829\n14833\n14834\n14838\n14841\n14843\n14854\n14861\n14862\n14871\n14872\n14874\n14875\n14879\n14883\n14884\n14885\n14887\n14888\n14889\n14897\n14900\n14904\n14907\n14912\n14913\n14920\n14921\n14922\n14926\n14927\n14932\n14936\n14939\n14942\n14944\n14947\n14948\n14949\n14950\n14956\n14957\n14958\n14961\n14962\n14963\n14967\n14972\n14973\n14977\n14978\n14982\n14983\n981\n14984\n14985\n14986\n14987\n14992\n14993\n14994\n14996\n14997\n14999\n15000\n15002\n15003\n15005\n15008\n15009\n15011\n15014\n15016\n15017\n15020\n15025\n15027\n15028\n15032\n15033\n15035\n15037\n15040\n15043\n15047\n15048\n15049\n15051\n15052\n15067\n15080\n15081\n15082\n15084\n15086\n15087\n15089\n15090\n15091\n15092\n15096\n15098\n15099\n15104\n15106\n15112\n15115\n15116\n15118\n15120\n15125\n15126\n15127\n15129\n15130\n15131\n15133\n15135\n15138\n15140\n15141\n15145\n15146\n15147\n15148\n15152\n15155\n15157\n15158\n15159\n15162\n15163\n15165\n15168\n15178\n15184\n15185\n15186\n15187\n15188\n15189\n15190\n15192\n15193\n15194\n15198\n15201\n15205\n15206\n15207\n15208\n15210\n15211\n15214\n15216\n15217\n15221\n15222\n15223\n15225\n15226\n15228\n15229\n15230\n15231\n15234\n15237\n15239\n15242\n15249\n15253\n15255\n15256\n15258\n15259\n15260\n15261\n15263\n15268\n15269\n15274\n15276\n15279\n15282\n15291\n15300\n15310\n15318\n15319\n15322\n15323\n15325\n15326\n15328\n15329\n15337\n15339\n15340\n15341\n15357\n15359\n15360\n15361\n15362\n15363\n15364\n15366\n15369\n15372\n15385\n15386\n15389\n15391\n15393\n15396\n15402\n15413\n15415\n15418\n15422\n15424\n15427\n15429\n15435\n15440\n15441\n15442\n15449\n15451\n15453\n15454\n15458\n15459\n15461\n15463\n15465\n15467\n15468\n15471\n15475\n15477\n15478\n15485\n15487\n15495\n15496\n15497\n15499\n15508\n15512\n15525\n15538\n15539\n15540\n15544\n15553\n15554\n15556\n15557\n15559\n15561\n15562\n15563\n15564\n15568\n15569\n15571\n15575\n15576\n15580\n15582\n15583\n15585\n15590\n15591\n15592\n15595\n15596\n15597\n15598\n15600\n15601\n15607\n15608\n15616\n15618\n15619\n15620\n15621\n15627\n15631\n15636\n15642\n15643\n15656\n15663\n15667\n15671\n15672\n15677\n15678\n15685\n15686\n15691\n15695\n15698\n15701\n15702\n15704\n15707\n15708\n15713\n15719\n15722\n15723\n15724\n15726\n15728\n15730\n15733\n15734\n15735\n15736\n15737\n15738\n15739\n15740\n15741\n15742\n15743\n15745\n15746\n15748\n982\n15753\n15755\n15760\n15763\n15765\n15766\n15773\n15777\n15780\n15782\n15784\n15791\n15792\n15793\n15796\n15799\n15803\n15804\n15815\n15818\n15820\n15821\n15825\n15830\n15839\n15842\n15845\n15846\n15847\n15849\n15854\n15855\n15858\n15859\n15863\n15866\n15868\n15873\n15877\n15881\n15883\n15885\n15888\n15890\n15896\n15897\n15899\n15900\n15901\n15905\n15909\n15910\n15916\n15917\n15928\n15930\n15935\n15938\n15939\n15940\n15947\n15952\n15954\n15955\n15957\n15958\n15961\n15962\n15963\n15965\n15975\n15976\n15977\n15978\n15980\n15993\n15994\n15999\n16000\n16007\n16010\n16011\n16012\n16014\n16018\n16019\n16020\n16021\n16023\n16024\n16027\n16030\n16031\n16032\n16035\n16036\n16044\n16045\n16055\n16057\n16060\n16062\n16067\n16072\n16076\n16077\n16078\n16087\n16088\n16090\n16095\n16098\n16101\n16108\n16109\n16110\n16112\n16113\n16116\n16117\n16119\n16123\n16124\n16125\n16132\n16133\n16135\n16137\n16145\n16149\n16156\n16157\n16159\n16160\n16162\n16163\n16165\n16166\n16168\n16171\n16172\n16174\n16175\n16179\n16181\n16185\n16188\n16190\n16193\n16194\n16195\n16203\n16208\n16209\n16214\n16215\n16216\n16217\n16230\n16231\n16233\n16235\n16241\n16243\n16252\n16257\n16259\n16260\n16267\n16272\n16274\n16275\n16278\n16280\n16287\n16290\n16296\n16297\n16300\n16304\n16305\n16306\n16307\n16308\n16312\n16318\n16323\n16328\n16329\n16330\n16331\n16333\n16334\n16335\n16338\n16344\n16350\n16351\n16357\n16358\n16359\n16363\n16364\n16365\n16373\n16376\n16379\n16380\n16389\n16391\n16392\n16395\n16396\n16400\n16404\n16405\n16406\n16407\n16408\n16409\n16413\n16416\n16421\n16430\n16432\n16434\n16437\n16439\n16440\n16441\n16442\n16448\n16451\n16452\n16453\n16460\n16463\n16471\n16472\n16475\n16476\n16479\n16490\n16492\n16493\n16494\n16497\n16510\n16514\n16519\n16520\n16524\n16525\n16528\n16529\n16533\n16538\n16542\n16545\n16546\n16556\n16558\n16561\n16563\n16567\n16568\n16571\n16576\n16584\n16592\n16600\n16601\n16603\n16604\n16607\n16608\n16609\n16615\n16619\n16621\n16626\n16632\n16634\n16635\n16638\n16642\n16646\n16651\n16664\n16665\n16667\n16669\n16670\n16671\n16675\n16676\n16681\n16684\n16685\n16686\n16688\n16696\n16705\n16709\n16710\n16711\n16720\n16721\n16724\n16729\n16742\n16743\n16745\n16746\n16751\n16752\n16755\n16756\n983\n16760\n16765\n16767\n16770\n16774\n16775\n16778\n16779\n16780\n16789\n16792\n16794\n16798\n16802\n16804\n16805\n16806\n16807\n16815\n16816\n16818\n16820\n16827\n16828\n16832\n16833\n16840\n16844\n16848\n16849\n16851\n16853\n16856\n16859\n16860\n16863\n16864\n16865\n16866\n16870\n16880\n16888\n16889\n16893\n16894\n16895\n16896\n16897\n16900\n16902\n16905\n16907\n16909\n16913\n16915\n16917\n16920\n16922\n16923\n16926\n16931\n16932\n16935\n16943\n16946\n16955\n16963\n16972\n16977\n16978\n16980\n16982\n16985\n16987\n16988\n16990\n16992\n16996\n16997\n17003\n17004\n17008\n17009\n17013\n17018\n17019\n17022\n17023\n17025\n17028\n17032\n17036\n17037\n17041\n17044\n17047\n17052\n17053\n17054\n17055\n17067\n17068\n17072\n17078\n17096\n17102\n17109\n17122\n17125\n17126\n17129\n17133\n17136\n17141\n17143\n17148\n17150\n17156\n17159\n17161\n17170\n17171\n17173\n17174\n17180\n17182\n17184\n17186\n17193\n17195\n17198\n17201\n17203\n17207\n17208\n17218\n17223\n17225\n17226\n17232\n17243\n17246\n17263\n17264\n17268\n17269\n17270\n17279\n17283\n17284\n17287\n17289\n17290\n17295\n17301\n17305\n17312\n17313\n17314\n17316\n17320\n17321\n17324\n17325\n17327\n17328\n17329\n17331\n17332\n17334\n17341\n17344\n17345\n17353\n17354\n17355\n17356\n17357\n17360\n17361\n17363\n17365\n17366\n17368\n17369\n17370\n17371\n17374\n17375\n17376\n17377\n17378\n17379\n17381\n17382\n17384\n17385\n17388\n17389\n17390\n17391\n17392\n17394\n17395\n17397\n17399\n17400\n17402\n17403\n17404\n17407\n17408\n17409\n17410\n17411\n17415\n17416\n17417\n17418\n17419\n17420\n17421\n17422\n17423\n17424\n17425\n17426\n17427\n17428\n17429\n17430\n17431\n17432\n17434\n17435\n17437\n17438\n17439\n17440\n17441\n17442\n17444\n17445\n17446\n17447\n17450\n17455\n17457\n17459\n17462\n17463\n17464\n17465\n17466\n17467\n17470\n17471\n17472\n17473\n17474\n17475\n17478\n17479\n17480\n17481\n17483\n17485\n17486\n17497\n17501\n17507\n17508\n17513\n17514\n17518\n17519\n17523\n17524\n17525\n17527\n17531\n17532\n17533\n17535\n17536\n17537\n17538\n17541\n17544\n17549\n17554\n17555\n17566\n17575\n17580\n17581\n17582\n17583\n17584\n17585\n17586\n17589\n17593\n17594\n17599\n17601\n17603\n17606\n17609\n17610\n17611\n17613\n17614\n17616\n17618\n17620\n17621\n17625\n17626\n17627\n17628\n17630\n17633\n17634\n17636\n17640\n17641\n17642\n17645\n17648\n17649\n17650\n17651\n17652\n17656\n17658\n17659\n17661\n17663\n17665\n17666\n17667\n17669\n17670\n17672\n17674\n17675\n17676\n17677\n17678\n17679\n17681\n17687\n17691\n17693\n17694\n17695\n17696\n17699\n17703\n17704\n17707\n17726\n17736\n17737\n17738\n17740\n17741\n17742\n17744\n17745\n17748\n17749\n17750\n17751\n17752\n17753\n17754\n17755\n17757\n17758\n17759\n17760\n17763\n17777\n17778\n17779\n17780\n17785\n17786\n17789\n17790\n17795\n17797\n17798\n17799\n17801\n17802\n17804\n17807\n17808\n17809\n17811\n17813\n17814\n17818\n17819\n17821\n17826\n17827\n17828\n17829\n17830\n17832\n17833\n17835\n17836\n17841\n17850\n17854\n17856\n17857\n17860\n17861\n17862\n17865\n17867\n17868\n17870\n17871\n17872\n17873\n17874\n17875\n17876\n17877\n17878\n17879\n17881\n17883\n17884\n17885\n17886\n17887\n17888\n17889\n17891\n17892\n17893\n17894\n17895\n17896\n17897\n17900\n17901\n17903\n17904\n17905\n17906\n17910\n17912\n17915\n17917\n17920\n17929\n17933\n17934\n17940\n17944\n17949\n17951\n17953\n17954\n17955\n17961\n17962\n17963\n17964\n17965\n17968\n17969\n17970\n17973\n17974\n17975\n17976\n17978\n17979\n17980\n17981\n17985\n17986\n17987\n17988\n17990\n17991\n17994\n17995\n17998\n17999\n18000\n18002\n18005\n18006\n18007\n18014\n18015\n18020\n18022\n18023\n18027\n18032\n18035\n18036\n18037\n18038\n18039\n18040\n18041\n18042\n18043\n18045\n18047\n18050\n18051\n984\n18052\n18053\n18055\n18059\n18064\n18065\n18071\n18073\n18075\n18077\n18078\n18080\n18081\n18082\n18084\n18085\n18086\n18087\n18091\n18092\n18095\n18099\n18100\n18102\n18103\n18104\n18105\n18106\n18107\n18108\n18110\n18111\n18113\n18114\n18117\n18118\n18120\n18121\n18122\n18124\n18125\n18127\n18128\n18129\n18130\n18131\n18134\n18135\n18137\n18139\n18140\n18144\n18150\n18151\n18152\n18154\n18155\n18156\n18158\n18159\n18160\n18162\n18164\n18165\n18166\n18174\n18175\n18181\n18182\n18187\n18188\n18191\n18192\n18218\n985\n18219\n18220\n18222\n18226\n18229\n18230\n18231\n18232\n18234\n18235\n18236\n18237\n18240\n18241\n18243\n18244\n18246\n18248\n18249\n18250\n18252\n18253\n18254\n18256\n18259\n18260\n18262\n18263\n18264\n18265\n18267\n18270\n18272\n18273\n18274\n18277\n18278\n18282\n18283\n18284\n18286\n18287\n18288\n18289\n18290\n18293\n18296\n18297\n18299\n18300\n18302\n18303\n18309\n18310\n18311\n18312\n18316\n18317\n18318\n18319\n18320\n18321\n18322\n18324\n18335\n18338\n18339\n18340\n18341\n18344\n18345\n18346\n18347\n18348\n18349\n18351\n18356\n18357\n18362\n18364\n18368\n18370\n18371\n18372\n18373\n18374\n18377\n18381\n18386\n18394\n18399\n18401\n18404\n18405\n18407\n18409\n18411\n18412\n18413\n18414\n18416\n18417\n18418\n18420\n18425\n18426\n18432\n18435\n18436\n18437\n18439\n18454\n18455\n18456\n18457\n18458\n18459\n18460\n18461\n18464\n18465\n18466\n18468\n18469\n18472\n18473\n18474\n18475\n18476\n18479\n18482\n18483\n18486\n18491\n18492\n18493\n18495\n18499\n18500\n18501\n18502\n18504\n18505\n18507\n18508\n18509\n18510\n18511\n18512\n18514\n18519\n18520\n18521\n18522\n18523\n18524\n18525\n18527\n18528\n18529\n18530\n18531\n18535\n18536\n18537\n18538\n18542\n18544\n18545\n18546\n986\n18548\n18549\n18551\n18552\n18553\n18554\n18555\n18558\n18559\n18561\n18562\n18563\n18565\n18566\n18567\n18568\n18570\n18571\n18572\n18574\n18577\n18582\n18585\n18586\n18587\n18588\n18589\n18590\n18593\n18595\n18597\n18598\n18599\n18600\n18601\n18602\n18603\n18604\n18605\n18606\n18607\n18609\n18610\n18613\n18615\n18616\n18618\n18619\n18622\n18623\n18624\n18625\n18627\n18628\n18629\n18630\n18631\n18632\n18633\n18634\n18635\n18636\n18643\n18644\n18645\n18646\n18648\n18649\n18650\n18653\n18654\n18658\n18659\n18663\n18669\n18671\n18672\n18674\n18676\n18677\n18681\n18682\n18683\n18684\n18692\n18693\n18696\n18697\n18699\n18703\n18706\n18708\n18709\n18710\n18715\n18717\n18727\n18735\n18737\n18740\n18742\n18743\n18747\n18750\n18752\n18753\n18756\n18765\n18766\n18767\n18771\n18775\n18778\n18779\n18787\n18792\n18793\n18794\n18796\n18803\n987\n18806\n18807\n18809\n18812\n18813\n18816\n18819\n18820\n18827\n18828\n18833\n18836\n18837\n18839\n18840\n18845\n18846\n18847\n18848\n18852\n18853\n18854\n18858\n18859\n18860\n18861\n18863\n18864\n18865\n18866\n18868\n18869\n18870\n18871\n18874\n18875\n18876\n18877\n18878\n18879\n18880\n18881\n18882\n18883\n18884\n18885\n18896\n18897\n18899\n18901\n18911\n18913\n18917\n18919\n18922\n18924\n18925\n18926\n18932\n18935\n18938\n18939\n18940\n18944\n18948\n18950\n18953\n18954\n18956\n18957\n18959\n18960\n18961\n18964\n18965\n18966\n18968\n18973\n18990\n18991\n18992\n18995\n18997\n18999\n19000\n19007\n19023\n19024\n19027\n19032\n19038\n19040\n19042\n19044\n19045\n19046\n19050\n19063\n19067\n19080\n19081\n19084\n19086\n19088\n19089\n19090\n19091\n19093\n19094\n19097\n19098\n19099\n19103\n19104\n19107\n19111\n988\n19114\n19115\n19116\n19118\n19121\n19122\n19126\n19127\n19130\n19131\n19132\n19134\n19135\n19137\n19139\n19140\n19143\n19144\n19145\n19147\n19148\n19149\n19150\n19152\n19153\n19154\n19155\n19156\n19157\n19160\n19161\n19162\n19164\n19165\n19166\n19167\n19168\n19169\n19170\n19171\n19173\n19175\n19176\n19179\n19182\n19185\n19186\n19189\n19190\n19191\n19193\n19195\n19197\n19200\n19203\n19204\n19205\n19207\n19208\n19209\n19211\n19212\n19214\n19223\n19224\n19225\n19228\n19229\n19230\n19231\n19232\n19233\n19236\n19237\n19238\n19239\n19240\n19242\n19243\n19244\n19246\n19248\n19249\n19250\n19254\n19262\n19264\n19267\n19268\n19269\n19270\n19271\n19272\n19274\n19275\n19276\n19277\n19280\n19281\n19285\n19286\n19287\n19294\n19299\n19300\n19302\n19303\n19305\n19306\n19307\n19310\n19312\n19314\n19315\n19316\n19317\n19320\n19321\n19326\n19327\n19328\n19330\n19331\n19334\n19335\n19340\n19341\n19342\n19343\n19346\n19347\n19349\n19352\n19353\n19354\n19355\n19357\n19358\n19359\n19360\n19361\n19362\n19364\n19366\n19370\n19373\n19374\n19375\n19376\n19377\n19379\n19381\n19383\n19384\n19387\n19389\n19391\n19392\n19395\n19396\n19398\n19412\n19413\n19414\n19415\n19417\n19424\n19427\n19429\n19431\n19433\n19444\n19445\n19446\n19447\n19450\n19451\n19452\n19456\n19457\n19458\n19460\n19461\n19462\n19463\n19466\n19467\n19469\n19471\n19481\n19482\n19483\n19487\n19488\n19491\n19493\n19494\n19495\n19496\n19500\n19502\n19504\n19505\n19506\n19507\n19508\n19511\n19512\n19513\n19516\n19518\n19519\n19521\n19523\n19524\n19526\n19527\n19528\n19529\n19530\n19531\n19532\n19534\n19535\n19536\n19537\n19538\n19540\n19542\n19548\n19549\n19551\n19552\n19553\n19556\n19557\n19558\n19559\n19560\n19561\n19563\n19565\n19566\n19567\n19573\n19576\n19577\n19578\n19585\n19587\n19588\n19592\n19593\n19594\n19597\n19598\n19600\n19602\n19607\n19616\n19617\n19618\n19621\n19623\n19624\n19625\n19626\n19628\n19632\n19634\n19637\n19638\n19639\n19640\n19642\n19643\n19644\n19645\n19648\n19650\n19651\n19652\n19653\n19654\n19658\n19659\n19665\n19666\n19673\n19677\n19678\n19679\n19680\n19682\n19683\n19685\n19686\n19687\n19688\n19689\n19691\n19692\n19693\n19694\n19697\n19698\n19699\n19700\n19702\n19703\n19704\n19706\n19707\n19708\n19709\n19715\n19717\n19719\n19720\n19724\n19727\n19728\n19730\n19731\n19732\n19734\n19738\n19742\n19743\n19744\n19747\n19748\n19751\n19755\n19756\n19766\n19767\n19768\n19769\n19773\n19774\n19777\n19779\n19780\n19783\n19784\n19787\n19792\n19802\n19805\n19807\n19808\n19811\n19812\n19815\n19823\n19827\n19829\n19841\n19843\n19845\n19847\n19848\n19849\n19850\n19851\n19855\n19862\n19868\n19871\n19872\n19873\n19874\n19875\n19876\n19877\n19881\n19882\n19883\n19884\n19889\n19892\n19897\n19899\n19904\n19905\n19906\n19907\n19912\n19915\n19917\n19918\n19919\n19920\n19924\n19926\n19927\n19928\n19932\n19934\n19936\n19937\n19938\n19939\n19940\n19943\n19945\n19948\n19953\n19955\n19956\n19957\n19962\n19963\n19966\n19967\n19974\n19977\n19978\n19979\n19980\n19981\n19982\n19983\n19984\n19985\n19986\n19988\n19995\n19998\n20000\n20002\n20007\n20008\n20009\n20011\n20018\n20020\n20028\n989\n20029\n20030\n20033\n20034\n20035\n20038\n20039\n20041\n20046\n20051\n20052\n20053\n20054\n20056\n20058\n20059\n20062\n20063\n20064\n20065\n20066\n20067\n20070\n20071\n20072\n20073\n20075\n20077\n20079\n20080\n20083\n20090\n20091\n20093\n20095\n20096\n20101\n20102\n20106\n20107\n20109\n20111\n20112\n20113\n20117\n20119\n20120\n20124\n20125\n20126\n20127\n20131\n20132\n20135\n20136\n20137\n20138\n20149\n20150\n20152\n20154\n20156\n20157\n20158\n20162\n20163\n20164\n20171\n20172\n20174\n20180\n20181\n20183\n20187\n20194\n20196\n20201\n20202\n20204\n20207\n20208\n20216\n20218\n20220\n20223\n20224\n20225\n20227\n20228\n20229\n20230\n20231\n20232\n20233\n20234\n20235\n20237\n20239\n20240\n20241\n20242\n20245\n20248\n20249\n20257\n20259\n20260\n20266\n20271\n20274\n20275\n20276\n20278\n20280\n20287\n20289\n20290\n20291\n20292\n20293\n20295\n20296\n20297\n20298\n20299\n20300\n20304\n20305\n20306\n20309\n20312\n20314\n20320\n20321\n20326\n20327\n20328\n20334\n20335\n20336\n20337\n20339\n20342\n20344\n20346\n20356\n20357\n20358\n20364\n20365\n20366\n20367\n20369\n20371\n20372\n20373\n20374\n20375\n20376\n20377\n20382\n20383\n20385\n20388\n20390\n20394\n20395\n20399\n20401\n20403\n20406\n20407\n20408\n20409\n20414\n20415\n20416\n20417\n20418\n20419\n20422\n20423\n20424\n20425\n20426\n20427\n20429\n20431\n20433\n20434\n20436\n20446\n20450\n20453\n20455\n20458\n20461\n20465\n20466\n20469\n990\n20479\n20480\n20481\n20483\n20484\n20486\n20488\n20489\n20493\n20497\n20500\n20501\n20502\n20503\n20506\n20507\n20508\n20512\n20513\n20514\n20519\n20520\n20523\n20524\n20525\n20526\n20527\n20528\n20531\n20532\n20534\n20536\n20537\n20538\n20542\n20546\n20550\n20551\n20554\n20557\n20558\n20560\n20561\n20562\n20563\n20565\n20566\n20570\n20571\n20574\n20576\n20577\n20580\n20583\n20585\n20586\n20589\n20591\n20594\n20597\n20598\n20599\n20600\n20602\n20603\n20604\n20605\n20606\n20609\n20611\n20612\n20614\n20615\n20617\n20621\n20622\n20625\n20626\n20627\n20629\n20630\n20632\n20634\n20636\n20637\n20638\n20639\n20643\n20647\n20649\n20650\n20651\n20652\n20659\n20660\n20662\n20664\n20668\n20669\n20670\n20671\n20672\n20673\n20674\n20675\n20676\n20677\n20678\n20680\n20687\n20688\n20690\n20692\n20695\n20696\n20697\n20698\n20700\n20701\n20702\n20703\n20704\n20709\n20710\n20712\n20713\n20714\n20715\n20718\n20719\n20720\n20721\n20722\n20723\n20724\n20725\n20727\n20728\n20732\n20733\n20735\n20736\n20740\n20741\n20742\n20746\n20750\n20757\n20762\n20763\n20764\n20765\n20771\n20772\n20775\n20776\n20785\n20791\n20794\n20795\n20796\n20797\n20798\n20799\n20800\n20801\n20802\n20805\n20806\n20808\n20814\n20816\n20817\n20820\n20822\n20827\n20831\n20832\n20833\n20838\n20839\n20841\n20844\n20845\n20847\n20848\n20851\n20855\n20856\n20858\n20859\n20860\n20861\n20862\n20863\n20867\n20868\n20869\n20871\n20874\n20878\n20881\n20884\n20885\n20888\n20889\n20890\n20892\n20894\n20898\n20903\n20905\n20906\n20908\n20910\n20911\n20912\n20914\n20916\n20919\n20921\n20925\n20927\n20928\n20929\n20934\n20935\n20944\n20945\n20946\n20948\n20949\n20950\n20951\n20954\n20955\n20957\n20958\n20963\n20964\n20967\n20971\n20976\n20987\n20988\n20989\n20990\n20993\n20996\n20997\n21006\n21009\n21010\n21015\n21017\n21020\n21022\n21025\n21032\n21033\n991\n21034\n21035\n21038\n21042\n21044\n21045\n21046\n21047\n21049\n21050\n21051\n992\n21052\n21053\n21054\n21055\n21056\n21057\n21059\n21060\n21061\n21062\n21063\n21064\n21065\n21069\n21070\n21071\n21072\n21073\n21074\n21075\n21078\n21082\n21084\n21089\n21090\n21092\n21093\n21094\n21098\n21100\n21106\n21107\n21118\n21119\n21121\n21125\n21127\n21128\n21130\n21131\n21134\n21139\n21142\n21144\n21151\n21152\n21153\n21154\n21155\n21157\n21159\n21162\n21164\n21165\n993\n21171\n21174\n21175\n994\n21176\n21178\n21183\n21184\n995\n21189\n21192\n21193\n996\n21199\n21200\n21201\n997\n21202\n21203\n21205\n21220\n21222\n21225\n21230\n21233\n21239\n21253\n21258\n21269\n21270\n21272\n21280\n21282\n21283\n21285\n21287\n21288\n21298\n21304\n21306\n21308\n21312\n21313\n21314\n21316\n21317\n21318\n21319\n21320\n21322\n21328\n21337\n21338\n21340\n21344\n21346\n21350\n21353\n21357\n21358\n21359\n21362\n21364\n998\n21370\n21374\n21376\n21377\n21378\n21382\n21386\n21388\n21389\n21397\n21398\n21402\n21407\n21408\n21411\n21414\n21415\n21419\n21425\n21428\n21429\n21431\n21432\n21433\n21438\n21441\n21451\n21459\n21464\n21467\n21469\n21476\n21479\n21484\n21485\n21486\n21489\n21494\n21495\n21497\n21501\n21502\n21507\n21511\n21515\n21516\n21517\n21519\n21522\n21524\n21528\n21529\n21532\n21533\n21534\n21537\n21541\n21545\n21547\n21548\n21549\n21550\n21554\n21560\n21563\n21569\n21573\n21576\n21578\n21579\n21580\n21581\n21585\n21589\n21590\n21591\n21598\n21601\n21604\n21606\n21611\n21615\n21618\n21620\n21623\n21625\n21627\n21635\n21637\n21638\n21641\n21644\n21645\n21648\n21649\n21650\n21659\n21661\n21662\n21663\n21665\n21666\n21668\n21669\n21672\n21673\n21675\n21678\n21679\n21680\n21686\n21688\n21689\n21690\n21692\n21702\n21711\n21712\n21713\n21716\n21717\n21721\n21722\n21723\n21724\n21727\n21728\n21729\n21734\n21739\n21740\n21741\n21743\n21744\n21747\n21748\n21749\n21754\n21757\n21758\n21760\n21761\n21762\n21763\n21765\n21772\n21773\n21774\n21777\n21778\n21781\n21782\n21784\n21786\n21791\n21792\n21795\n21796\n21800\n21801\n21803\n21804\n21806\n21811\n21815\n21816\n999\n21817\n21818\n21822\n21825\n21826\n21827\n21828\n21829\n21830\n21831\n21833\n21835\n21837\n21838\n21840\n21841\n"
  },
  {
    "path": "timm/data/_info/imagenet22k_ms_to_22k_indices.txt",
    "content": "1000\n1001\n1002\n1003\n1004\n1005\n1006\n1007\n1008\n1009\n1010\n1011\n1012\n1013\n1014\n1015\n1016\n1017\n1018\n1019\n1020\n1021\n1022\n1023\n1024\n1025\n1026\n1027\n1028\n1029\n1030\n1031\n1032\n1033\n1034\n1035\n1036\n1037\n1038\n1039\n1040\n1041\n1042\n1043\n1044\n1045\n1046\n1047\n1048\n1049\n1050\n1051\n1052\n1053\n1054\n1055\n1056\n1057\n1058\n1059\n1060\n1061\n1062\n1063\n1064\n1065\n1066\n1067\n1068\n1069\n1070\n1071\n1072\n1073\n1074\n1075\n1076\n1077\n1078\n1079\n1080\n1081\n1082\n1083\n1084\n1085\n1086\n1087\n1088\n1089\n1090\n1091\n1092\n1093\n1094\n1095\n1096\n1097\n1098\n1099\n1100\n1101\n1102\n1103\n1104\n1105\n1106\n1107\n1108\n1109\n1110\n1111\n1112\n1113\n1114\n1115\n1116\n1117\n1118\n1119\n1120\n1121\n1122\n1123\n1124\n1125\n1126\n1127\n1128\n1129\n1130\n1131\n1132\n1133\n1134\n1135\n1136\n1137\n1138\n1139\n1140\n1141\n1142\n1143\n1144\n1145\n1146\n1147\n1148\n1149\n1150\n1151\n1152\n1153\n1154\n1155\n1156\n1157\n1158\n1159\n1160\n1161\n1162\n1163\n1164\n1165\n1166\n1167\n1168\n1169\n1170\n1171\n1172\n1173\n1174\n1175\n1176\n1177\n1178\n1179\n1180\n1181\n1182\n1183\n1184\n1185\n1186\n1187\n1188\n1189\n1190\n1191\n1192\n1193\n1194\n1195\n1196\n1197\n1198\n1199\n1200\n1201\n1202\n1203\n1204\n1205\n1206\n1207\n1208\n1209\n1210\n1211\n1212\n1213\n1214\n1215\n1216\n1217\n1218\n1219\n1220\n1221\n1222\n1223\n1224\n1225\n1226\n1227\n1228\n1229\n1230\n1231\n1232\n1233\n1234\n1235\n1236\n1237\n1238\n1239\n1240\n1241\n1242\n1243\n1244\n1245\n1246\n1247\n1248\n1249\n1250\n1251\n1252\n1253\n1254\n1255\n1256\n1257\n1258\n1259\n1260\n1261\n1262\n1263\n1264\n1265\n1266\n1267\n1268\n1269\n1270\n1271\n1272\n1273\n1274\n1275\n1276\n1277\n1278\n1279\n1280\n1281\n1282\n1283\n1284\n1285\n1286\n1287\n1288\n1289\n1290\n1291\n1292\n1293\n1294\n1295\n1296\n1297\n1298\n1299\n1300\n1301\n1302\n1303\n1304\n1305\n1306\n1307\n1308\n1309\n1310\n1311\n1312\n1313\n1314\n1315\n1316\n1317\n1318\n1319\n1320\n1321\n1322\n1323\n1324\n1325\n1326\n1327\n1328\n1329\n1330\n1331\n1332\n1333\n1334\n1335\n1336\n1337\n1338\n1339\n1340\n1341\n1342\n1343\n1344\n1345\n1346\n1347\n1348\n1349\n1350\n1351\n1352\n1353\n1354\n1355\n1356\n1357\n1358\n0\n1359\n1360\n1361\n1362\n1363\n1364\n1365\n1366\n1\n1367\n1368\n1369\n1370\n1371\n1372\n1373\n1374\n1375\n1376\n1377\n1378\n1379\n1380\n1381\n1382\n1383\n1384\n1385\n1386\n1387\n1388\n1389\n1390\n1391\n1392\n1393\n1394\n1395\n1396\n1397\n1398\n1399\n1400\n1401\n1402\n1403\n1404\n1405\n1406\n1407\n1408\n1409\n1410\n1411\n1412\n1413\n1414\n1415\n1416\n1417\n1418\n1419\n1420\n1421\n1422\n1423\n1424\n1425\n1426\n1427\n1428\n1429\n1430\n1431\n1432\n1433\n1434\n1435\n1436\n1437\n1438\n1439\n1440\n1441\n1442\n1443\n1444\n1445\n1446\n1447\n1448\n1449\n1450\n1451\n1452\n1453\n1454\n1455\n1456\n1457\n2\n1458\n1459\n1460\n1461\n1462\n1463\n1464\n1465\n1466\n1467\n1468\n1469\n1470\n1471\n3\n1472\n1473\n1474\n1475\n1476\n1477\n1478\n1479\n1480\n1481\n4\n1482\n1483\n1484\n1485\n1486\n5\n1487\n1488\n1489\n6\n1490\n1491\n1492\n1493\n1494\n1495\n1496\n1497\n1498\n1499\n1500\n1501\n1502\n1503\n1504\n1505\n1506\n7\n1507\n8\n1508\n1509\n1510\n1511\n1512\n1513\n1514\n1515\n9\n1516\n1517\n1518\n1519\n1520\n1521\n1522\n1523\n1524\n1525\n1526\n1527\n1528\n1529\n1530\n1531\n1532\n1533\n1534\n1535\n1536\n10\n11\n1537\n1538\n1539\n1540\n1541\n1542\n1543\n12\n1544\n1545\n1546\n1547\n1548\n1549\n13\n1550\n1551\n1552\n1553\n1554\n1555\n1556\n1557\n1558\n1559\n1560\n14\n1561\n1562\n1563\n1564\n1565\n1566\n1567\n1568\n1569\n1570\n1571\n1572\n1573\n1574\n1575\n1576\n1577\n1578\n1579\n1580\n1581\n1582\n1583\n1584\n1585\n1586\n1587\n1588\n1589\n1590\n1591\n1592\n1593\n1594\n1595\n1596\n1597\n1598\n1599\n1600\n1601\n1602\n1603\n1604\n1605\n1606\n1607\n1608\n1609\n1610\n1611\n1612\n1613\n1614\n1615\n1616\n1617\n1618\n1619\n1620\n1621\n1622\n1623\n1624\n1625\n15\n1626\n1627\n1628\n1629\n1630\n1631\n16\n1632\n1633\n1634\n1635\n1636\n1637\n1638\n1639\n1640\n1641\n1642\n1643\n1644\n1645\n1646\n1647\n1648\n1649\n1650\n1651\n1652\n1653\n1654\n1655\n1656\n1657\n1658\n1659\n1660\n1661\n1662\n1663\n1664\n1665\n1666\n1667\n1668\n1669\n1670\n1671\n1672\n1673\n1674\n1675\n1676\n1677\n1678\n1679\n1680\n1681\n1682\n1683\n1684\n1685\n1686\n1687\n1688\n1689\n1690\n1691\n1692\n1693\n1694\n1695\n1696\n1697\n1698\n1699\n1700\n1701\n1702\n1703\n1704\n1705\n17\n1706\n1707\n1708\n1709\n1710\n1711\n1712\n1713\n1714\n18\n1715\n1716\n1717\n1718\n1719\n1720\n1721\n1722\n1723\n1724\n1725\n1726\n1727\n1728\n1729\n1730\n1731\n1732\n1733\n1734\n1735\n1736\n1737\n1738\n1739\n1740\n1741\n1742\n1743\n1744\n1745\n19\n1746\n1747\n1748\n1749\n1750\n1751\n1752\n1753\n1754\n1755\n1756\n1757\n1758\n1759\n1760\n1761\n1762\n1763\n1764\n1765\n1766\n1767\n1768\n1769\n1770\n1771\n1772\n1773\n1774\n1775\n1776\n1777\n1778\n1779\n1780\n20\n1781\n1782\n1783\n1784\n1785\n1786\n1787\n1788\n1789\n1790\n1791\n1792\n1793\n1794\n1795\n1796\n1797\n1798\n1799\n1800\n1801\n1802\n1803\n1804\n21\n1805\n1806\n1807\n1808\n1809\n1810\n1811\n1812\n1813\n1814\n1815\n1816\n1817\n1818\n1819\n1820\n1821\n1822\n1823\n1824\n1825\n1826\n1827\n1828\n1829\n22\n1830\n1831\n1832\n1833\n1834\n23\n1835\n1836\n1837\n1838\n1839\n1840\n1841\n1842\n1843\n1844\n1845\n1846\n1847\n1848\n1849\n1850\n1851\n1852\n1853\n24\n1854\n1855\n1856\n1857\n1858\n1859\n1860\n1861\n1862\n1863\n1864\n1865\n1866\n1867\n1868\n1869\n1870\n25\n1871\n1872\n1873\n26\n1874\n1875\n1876\n1877\n27\n1878\n1879\n28\n1880\n29\n1881\n1882\n1883\n1884\n1885\n1886\n1887\n1888\n1889\n1890\n1891\n1892\n1893\n1894\n1895\n1896\n1897\n1898\n1899\n1900\n1901\n1902\n1903\n1904\n30\n1905\n1906\n1907\n1908\n1909\n1910\n1911\n1912\n1913\n1914\n31\n32\n1915\n1916\n1917\n1918\n1919\n1920\n1921\n1922\n1923\n1924\n1925\n1926\n1927\n1928\n1929\n1930\n1931\n1932\n1933\n1934\n1935\n1936\n1937\n1938\n1939\n1940\n1941\n1942\n1943\n1944\n1945\n1946\n1947\n1948\n1949\n1950\n1951\n1952\n1953\n1954\n1955\n1956\n1957\n1958\n1959\n1960\n33\n1961\n1962\n1963\n1964\n34\n1965\n1966\n1967\n35\n1968\n36\n1969\n1970\n1971\n1972\n37\n1973\n1974\n1975\n1976\n1977\n1978\n1979\n1980\n1981\n1982\n1983\n1984\n1985\n1986\n1987\n1988\n38\n1989\n39\n1990\n1991\n1992\n1993\n1994\n1995\n1996\n1997\n1998\n1999\n2000\n2001\n2002\n2003\n2004\n2005\n2006\n2007\n40\n2008\n2009\n2010\n2011\n2012\n2013\n41\n2014\n2015\n2016\n2017\n2018\n2019\n2020\n2021\n42\n43\n2022\n2023\n2024\n44\n2025\n2026\n2027\n2028\n2029\n45\n2030\n2031\n2032\n46\n2033\n47\n2034\n2035\n2036\n48\n2037\n2038\n49\n2039\n2040\n2041\n2042\n50\n2043\n2044\n2045\n2046\n2047\n2048\n2049\n2050\n2051\n2052\n2053\n2054\n51\n2055\n2056\n2057\n2058\n2059\n2060\n2061\n2062\n2063\n2064\n2065\n2066\n2067\n2068\n2069\n2070\n2071\n2072\n2073\n2074\n2075\n2076\n2077\n2078\n2079\n2080\n2081\n2082\n2083\n2084\n2085\n2086\n2087\n2088\n2089\n2090\n52\n53\n54\n2091\n55\n2092\n2093\n2094\n2095\n2096\n2097\n2098\n2099\n2100\n2101\n2102\n2103\n2104\n2105\n2106\n2107\n2108\n2109\n2110\n2111\n56\n2112\n2113\n57\n2114\n2115\n2116\n2117\n2118\n2119\n58\n2120\n2121\n2122\n2123\n2124\n2125\n2126\n2127\n59\n2128\n2129\n60\n2130\n2131\n2132\n2133\n2134\n2135\n61\n2136\n2137\n2138\n2139\n2140\n2141\n2142\n62\n2143\n2144\n2145\n2146\n2147\n2148\n2149\n2150\n2151\n2152\n63\n2153\n2154\n2155\n2156\n2157\n2158\n64\n2159\n2160\n2161\n2162\n2163\n2164\n65\n2165\n2166\n2167\n2168\n2169\n66\n2170\n2171\n2172\n2173\n67\n2174\n2175\n2176\n68\n2177\n2178\n2179\n2180\n2181\n2182\n2183\n2184\n2185\n2186\n2187\n69\n2188\n70\n71\n2189\n2190\n2191\n2192\n2193\n2194\n72\n73\n74\n2195\n75\n76\n77\n2196\n2197\n2198\n78\n2199\n2200\n2201\n2202\n2203\n2204\n2205\n2206\n2207\n2208\n2209\n2210\n2211\n2212\n2213\n2214\n2215\n2216\n2217\n2218\n2219\n2220\n2221\n2222\n2223\n79\n2224\n2225\n2226\n2227\n2228\n2229\n2230\n2231\n2232\n2233\n2234\n2235\n2236\n2237\n2238\n2239\n2240\n2241\n2242\n2243\n2244\n2245\n2246\n2247\n2248\n2249\n2250\n2251\n2252\n2253\n2254\n2255\n2256\n2257\n2258\n2259\n2260\n2261\n2262\n2263\n80\n2264\n2265\n2266\n2267\n81\n2268\n2269\n2270\n2271\n2272\n82\n2273\n83\n2274\n2275\n2276\n2277\n2278\n2279\n2280\n2281\n2282\n2283\n2284\n2285\n2286\n2287\n2288\n2289\n2290\n2291\n2292\n2293\n2294\n2295\n2296\n2297\n2298\n2299\n84\n2300\n2301\n2302\n85\n2303\n2304\n86\n2305\n2306\n2307\n2308\n2309\n2310\n2311\n2312\n2313\n2314\n2315\n2316\n2317\n2318\n2319\n2320\n2321\n2322\n2323\n2324\n2325\n2326\n2327\n2328\n2329\n2330\n2331\n2332\n2333\n2334\n2335\n2336\n2337\n2338\n2339\n87\n2340\n88\n2341\n2342\n89\n2343\n2344\n2345\n2346\n90\n2347\n2348\n2349\n2350\n2351\n2352\n2353\n2354\n2355\n2356\n2357\n2358\n91\n2359\n2360\n2361\n2362\n2363\n2364\n2365\n2366\n2367\n2368\n92\n93\n2369\n2370\n2371\n2372\n2373\n2374\n2375\n2376\n2377\n2378\n2379\n94\n2380\n2381\n2382\n2383\n2384\n2385\n2386\n2387\n2388\n2389\n2390\n2391\n2392\n2393\n2394\n2395\n2396\n2397\n2398\n2399\n2400\n2401\n2402\n2403\n2404\n2405\n2406\n95\n96\n2407\n2408\n2409\n2410\n2411\n2412\n2413\n2414\n97\n2415\n2416\n2417\n2418\n2419\n2420\n2421\n2422\n2423\n2424\n2425\n2426\n2427\n2428\n2429\n2430\n2431\n2432\n2433\n2434\n2435\n2436\n2437\n2438\n2439\n2440\n2441\n2442\n2443\n2444\n2445\n2446\n2447\n2448\n2449\n2450\n2451\n2452\n2453\n98\n2454\n2455\n99\n2456\n2457\n2458\n2459\n2460\n2461\n2462\n2463\n2464\n2465\n2466\n2467\n2468\n2469\n2470\n2471\n2472\n2473\n2474\n2475\n2476\n100\n2477\n2478\n2479\n2480\n2481\n2482\n101\n2483\n2484\n102\n2485\n103\n2486\n2487\n2488\n2489\n2490\n2491\n2492\n2493\n2494\n104\n2495\n2496\n2497\n2498\n2499\n2500\n2501\n2502\n2503\n2504\n2505\n2506\n2507\n2508\n2509\n105\n106\n2510\n2511\n2512\n2513\n2514\n2515\n2516\n2517\n2518\n2519\n2520\n2521\n2522\n2523\n2524\n2525\n2526\n2527\n2528\n2529\n2530\n2531\n2532\n2533\n2534\n2535\n2536\n2537\n2538\n2539\n2540\n2541\n2542\n2543\n2544\n2545\n2546\n2547\n2548\n2549\n2550\n2551\n2552\n2553\n2554\n2555\n2556\n2557\n2558\n2559\n2560\n2561\n2562\n2563\n2564\n2565\n2566\n2567\n2568\n2569\n2570\n2571\n2572\n2573\n107\n2574\n2575\n2576\n2577\n2578\n2579\n2580\n2581\n2582\n2583\n108\n2584\n2585\n2586\n2587\n2588\n2589\n2590\n2591\n109\n2592\n2593\n2594\n2595\n2596\n2597\n2598\n2599\n2600\n2601\n2602\n2603\n2604\n2605\n110\n2606\n2607\n2608\n2609\n2610\n2611\n2612\n2613\n2614\n2615\n2616\n2617\n111\n2618\n2619\n2620\n2621\n2622\n2623\n2624\n2625\n2626\n2627\n2628\n2629\n2630\n2631\n2632\n2633\n2634\n2635\n2636\n2637\n2638\n2639\n2640\n2641\n2642\n2643\n2644\n112\n2645\n113\n2646\n2647\n2648\n2649\n114\n2650\n2651\n2652\n2653\n2654\n2655\n2656\n2657\n2658\n2659\n2660\n2661\n115\n2662\n2663\n2664\n2665\n2666\n2667\n2668\n2669\n116\n2670\n2671\n2672\n2673\n2674\n2675\n2676\n2677\n2678\n2679\n2680\n2681\n2682\n2683\n2684\n2685\n2686\n2687\n2688\n2689\n2690\n2691\n2692\n2693\n2694\n2695\n2696\n2697\n2698\n2699\n2700\n2701\n2702\n2703\n2704\n117\n2705\n2706\n2707\n2708\n2709\n2710\n2711\n2712\n2713\n2714\n2715\n2716\n2717\n2718\n2719\n2720\n2721\n2722\n118\n119\n2723\n2724\n2725\n2726\n2727\n120\n2728\n121\n2729\n2730\n2731\n2732\n2733\n122\n2734\n2735\n2736\n123\n124\n2737\n2738\n125\n2739\n2740\n2741\n2742\n2743\n2744\n2745\n2746\n2747\n2748\n2749\n126\n2750\n2751\n2752\n2753\n2754\n2755\n2756\n2757\n2758\n2759\n2760\n2761\n2762\n2763\n2764\n2765\n2766\n2767\n2768\n2769\n127\n128\n2770\n2771\n2772\n2773\n2774\n2775\n2776\n2777\n2778\n2779\n2780\n129\n2781\n2782\n130\n2783\n2784\n2785\n2786\n131\n2787\n2788\n2789\n132\n2790\n2791\n2792\n2793\n2794\n133\n2795\n2796\n2797\n134\n2798\n2799\n135\n2800\n2801\n2802\n2803\n2804\n2805\n2806\n2807\n2808\n2809\n2810\n136\n2811\n2812\n2813\n137\n2814\n138\n2815\n2816\n2817\n2818\n2819\n2820\n2821\n2822\n2823\n2824\n2825\n2826\n2827\n2828\n2829\n2830\n139\n2831\n2832\n2833\n2834\n2835\n2836\n140\n2837\n141\n2838\n2839\n2840\n2841\n2842\n2843\n2844\n2845\n2846\n2847\n2848\n2849\n2850\n2851\n2852\n2853\n2854\n2855\n2856\n2857\n2858\n142\n2859\n2860\n2861\n2862\n2863\n2864\n2865\n2866\n2867\n2868\n2869\n2870\n2871\n2872\n2873\n143\n2874\n2875\n2876\n2877\n2878\n2879\n2880\n2881\n2882\n2883\n2884\n2885\n2886\n2887\n2888\n2889\n2890\n2891\n2892\n2893\n2894\n2895\n2896\n2897\n2898\n2899\n2900\n2901\n2902\n2903\n2904\n2905\n2906\n2907\n2908\n2909\n2910\n2911\n2912\n2913\n2914\n2915\n2916\n2917\n2918\n2919\n2920\n2921\n2922\n144\n2923\n2924\n2925\n2926\n2927\n2928\n2929\n2930\n2931\n2932\n2933\n2934\n2935\n145\n2936\n2937\n2938\n2939\n2940\n146\n2941\n2942\n2943\n2944\n2945\n2946\n2947\n2948\n2949\n2950\n2951\n2952\n2953\n2954\n2955\n2956\n2957\n2958\n2959\n2960\n2961\n2962\n2963\n2964\n147\n2965\n2966\n2967\n2968\n2969\n2970\n2971\n2972\n2973\n2974\n2975\n2976\n2977\n2978\n2979\n148\n2980\n2981\n2982\n2983\n2984\n2985\n149\n2986\n2987\n2988\n2989\n2990\n2991\n2992\n2993\n2994\n2995\n2996\n150\n2997\n2998\n2999\n3000\n3001\n3002\n3003\n3004\n3005\n3006\n3007\n3008\n3009\n3010\n3011\n3012\n3013\n3014\n3015\n3016\n3017\n3018\n3019\n3020\n3021\n3022\n151\n152\n153\n154\n155\n3023\n3024\n156\n3025\n157\n158\n3026\n3027\n159\n3028\n160\n161\n162\n163\n164\n3029\n3030\n3031\n165\n3032\n3033\n3034\n3035\n166\n167\n3036\n3037\n168\n3038\n169\n170\n3039\n171\n172\n173\n174\n175\n176\n177\n3040\n178\n3041\n3042\n179\n180\n181\n182\n183\n184\n185\n186\n187\n3043\n3044\n3045\n3046\n3047\n188\n3048\n189\n3049\n190\n191\n192\n193\n194\n195\n3050\n196\n197\n198\n199\n200\n201\n3051\n3052\n202\n203\n204\n3053\n3054\n3055\n3056\n205\n206\n207\n208\n209\n3057\n210\n3058\n211\n212\n213\n214\n3059\n215\n216\n3060\n3061\n217\n218\n219\n220\n3062\n3063\n221\n3064\n3065\n3066\n222\n3067\n3068\n223\n3069\n3070\n224\n225\n226\n227\n228\n229\n230\n231\n232\n233\n234\n235\n3071\n3072\n236\n237\n3073\n238\n239\n240\n241\n242\n3074\n243\n244\n3075\n245\n246\n3076\n3077\n3078\n247\n3079\n3080\n248\n249\n250\n251\n3081\n252\n253\n254\n255\n256\n257\n3082\n258\n259\n260\n261\n3083\n262\n3084\n263\n264\n3085\n265\n266\n267\n3086\n268\n3087\n269\n270\n271\n272\n3088\n3089\n3090\n273\n274\n3091\n3092\n275\n276\n3093\n3094\n3095\n3096\n3097\n3098\n3099\n277\n3100\n3101\n3102\n3103\n278\n279\n3104\n280\n3105\n3106\n3107\n3108\n3109\n3110\n3111\n3112\n3113\n3114\n3115\n281\n282\n3116\n283\n3117\n284\n3118\n3119\n285\n3120\n3121\n3122\n3123\n3124\n3125\n286\n3126\n3127\n3128\n3129\n3130\n3131\n3132\n3133\n287\n3134\n3135\n3136\n3137\n3138\n3139\n288\n3140\n3141\n289\n290\n291\n3142\n3143\n292\n3144\n3145\n3146\n3147\n293\n3148\n3149\n3150\n294\n3151\n3152\n3153\n3154\n295\n3155\n3156\n296\n297\n3157\n3158\n3159\n3160\n3161\n3162\n3163\n3164\n3165\n3166\n298\n3167\n3168\n3169\n299\n3170\n3171\n3172\n3173\n3174\n3175\n3176\n3177\n3178\n3179\n3180\n3181\n3182\n3183\n3184\n3185\n3186\n3187\n3188\n3189\n3190\n3191\n3192\n3193\n3194\n3195\n3196\n3197\n3198\n3199\n3200\n3201\n3202\n3203\n3204\n3205\n3206\n3207\n3208\n3209\n3210\n3211\n3212\n3213\n3214\n3215\n3216\n3217\n3218\n3219\n3220\n3221\n3222\n3223\n3224\n3225\n3226\n3227\n3228\n3229\n3230\n3231\n300\n301\n3232\n3233\n3234\n3235\n302\n3236\n3237\n3238\n3239\n3240\n303\n3241\n3242\n304\n3243\n3244\n3245\n3246\n3247\n3248\n3249\n3250\n3251\n305\n3252\n3253\n3254\n3255\n3256\n3257\n3258\n306\n3259\n3260\n3261\n3262\n3263\n3264\n3265\n3266\n3267\n3268\n3269\n3270\n307\n3271\n3272\n3273\n3274\n3275\n3276\n3277\n3278\n3279\n3280\n3281\n3282\n3283\n3284\n3285\n3286\n3287\n3288\n3289\n3290\n3291\n3292\n3293\n3294\n3295\n3296\n3297\n3298\n3299\n3300\n3301\n3302\n3303\n308\n3304\n3305\n3306\n3307\n3308\n3309\n3310\n3311\n3312\n3313\n3314\n3315\n3316\n3317\n3318\n3319\n3320\n3321\n3322\n3323\n3324\n3325\n3326\n3327\n3328\n3329\n3330\n3331\n3332\n3333\n3334\n3335\n3336\n3337\n3338\n3339\n3340\n3341\n3342\n3343\n3344\n3345\n3346\n3347\n3348\n309\n3349\n3350\n3351\n3352\n3353\n3354\n3355\n3356\n3357\n3358\n3359\n3360\n3361\n3362\n3363\n3364\n3365\n3366\n3367\n3368\n3369\n3370\n3371\n3372\n3373\n3374\n3375\n3376\n3377\n3378\n3379\n3380\n3381\n3382\n3383\n3384\n3385\n3386\n3387\n3388\n3389\n3390\n3391\n310\n3392\n3393\n3394\n3395\n3396\n3397\n3398\n3399\n3400\n3401\n3402\n3403\n3404\n3405\n3406\n3407\n3408\n3409\n3410\n311\n3411\n3412\n3413\n3414\n3415\n3416\n3417\n3418\n312\n3419\n3420\n3421\n3422\n3423\n3424\n313\n3425\n3426\n314\n3427\n3428\n3429\n3430\n3431\n315\n3432\n3433\n3434\n3435\n3436\n3437\n3438\n3439\n3440\n3441\n3442\n3443\n3444\n3445\n3446\n3447\n3448\n3449\n3450\n3451\n3452\n3453\n3454\n3455\n3456\n3457\n3458\n3459\n3460\n3461\n3462\n3463\n3464\n3465\n3466\n3467\n3468\n3469\n3470\n3471\n3472\n3473\n3474\n3475\n3476\n3477\n3478\n3479\n3480\n3481\n3482\n3483\n3484\n3485\n3486\n3487\n3488\n3489\n3490\n3491\n3492\n3493\n316\n3494\n3495\n3496\n3497\n3498\n3499\n3500\n317\n3501\n3502\n3503\n3504\n3505\n3506\n3507\n3508\n3509\n3510\n3511\n3512\n3513\n3514\n318\n3515\n3516\n3517\n3518\n3519\n3520\n3521\n3522\n3523\n3524\n319\n320\n3525\n3526\n3527\n3528\n3529\n3530\n3531\n3532\n3533\n3534\n3535\n3536\n3537\n3538\n3539\n3540\n3541\n3542\n3543\n3544\n3545\n321\n3546\n3547\n3548\n3549\n3550\n3551\n322\n3552\n3553\n3554\n3555\n3556\n3557\n3558\n323\n3559\n324\n3560\n3561\n3562\n325\n326\n3563\n3564\n3565\n3566\n3567\n3568\n3569\n3570\n3571\n3572\n3573\n3574\n3575\n3576\n3577\n3578\n3579\n3580\n3581\n3582\n3583\n3584\n3585\n3586\n3587\n3588\n3589\n3590\n3591\n3592\n3593\n3594\n3595\n3596\n3597\n3598\n3599\n3600\n3601\n3602\n3603\n3604\n3605\n3606\n3607\n3608\n3609\n3610\n3611\n3612\n3613\n3614\n3615\n3616\n3617\n3618\n3619\n3620\n3621\n3622\n3623\n3624\n3625\n3626\n3627\n3628\n3629\n3630\n3631\n3632\n3633\n3634\n3635\n3636\n3637\n3638\n3639\n3640\n3641\n3642\n3643\n3644\n3645\n3646\n3647\n3648\n3649\n3650\n3651\n3652\n3653\n3654\n3655\n3656\n3657\n3658\n3659\n3660\n3661\n3662\n3663\n3664\n3665\n3666\n3667\n3668\n3669\n3670\n3671\n3672\n3673\n3674\n3675\n3676\n3677\n3678\n3679\n3680\n327\n3681\n3682\n3683\n328\n3684\n3685\n3686\n3687\n3688\n3689\n329\n3690\n3691\n3692\n3693\n3694\n3695\n3696\n3697\n3698\n330\n3699\n3700\n3701\n331\n3702\n3703\n3704\n3705\n3706\n3707\n3708\n3709\n332\n3710\n3711\n3712\n3713\n3714\n3715\n3716\n3717\n3718\n3719\n3720\n3721\n3722\n3723\n3724\n3725\n3726\n3727\n3728\n3729\n3730\n3731\n3732\n3733\n3734\n3735\n3736\n3737\n3738\n3739\n3740\n3741\n3742\n3743\n3744\n3745\n3746\n3747\n3748\n3749\n3750\n3751\n3752\n3753\n3754\n3755\n3756\n3757\n333\n3758\n3759\n3760\n3761\n3762\n3763\n3764\n3765\n3766\n3767\n3768\n3769\n3770\n3771\n334\n3772\n3773\n3774\n3775\n3776\n3777\n3778\n3779\n3780\n3781\n3782\n3783\n3784\n3785\n3786\n3787\n3788\n3789\n3790\n3791\n3792\n3793\n3794\n3795\n3796\n3797\n3798\n3799\n3800\n3801\n3802\n335\n3803\n3804\n3805\n3806\n3807\n3808\n3809\n3810\n3811\n3812\n3813\n3814\n3815\n3816\n3817\n3818\n3819\n3820\n3821\n3822\n336\n3823\n3824\n3825\n3826\n337\n3827\n3828\n3829\n3830\n338\n3831\n3832\n3833\n3834\n3835\n3836\n3837\n3838\n3839\n3840\n3841\n3842\n3843\n3844\n3845\n3846\n3847\n3848\n3849\n3850\n3851\n3852\n3853\n3854\n3855\n3856\n3857\n3858\n3859\n3860\n3861\n3862\n3863\n3864\n3865\n3866\n3867\n3868\n3869\n3870\n3871\n3872\n3873\n3874\n3875\n3876\n3877\n3878\n3879\n3880\n3881\n3882\n3883\n3884\n3885\n3886\n3887\n3888\n3889\n3890\n3891\n3892\n3893\n3894\n3895\n3896\n3897\n3898\n3899\n3900\n3901\n3902\n3903\n3904\n3905\n3906\n3907\n3908\n3909\n3910\n3911\n3912\n3913\n3914\n3915\n3916\n3917\n3918\n3919\n3920\n3921\n3922\n3923\n3924\n3925\n3926\n3927\n3928\n3929\n3930\n3931\n3932\n3933\n3934\n3935\n3936\n3937\n3938\n3939\n3940\n339\n3941\n3942\n3943\n3944\n3945\n3946\n3947\n3948\n3949\n3950\n3951\n3952\n3953\n3954\n3955\n340\n3956\n3957\n3958\n3959\n3960\n3961\n3962\n3963\n3964\n3965\n3966\n3967\n3968\n3969\n341\n3970\n3971\n3972\n3973\n3974\n3975\n342\n3976\n343\n3977\n3978\n3979\n344\n3980\n3981\n3982\n3983\n3984\n345\n3985\n3986\n3987\n3988\n3989\n3990\n3991\n3992\n3993\n3994\n3995\n3996\n3997\n3998\n3999\n4000\n4001\n4002\n4003\n4004\n4005\n4006\n4007\n4008\n4009\n4010\n4011\n4012\n4013\n4014\n4015\n4016\n4017\n4018\n4019\n346\n4020\n4021\n4022\n4023\n4024\n4025\n4026\n4027\n347\n4028\n4029\n4030\n4031\n4032\n348\n4033\n4034\n4035\n4036\n4037\n4038\n4039\n4040\n4041\n4042\n4043\n4044\n4045\n4046\n4047\n4048\n4049\n4050\n4051\n4052\n4053\n4054\n4055\n4056\n349\n4057\n4058\n4059\n4060\n4061\n4062\n4063\n4064\n4065\n4066\n4067\n4068\n350\n4069\n4070\n4071\n4072\n4073\n4074\n4075\n4076\n4077\n4078\n4079\n4080\n351\n4081\n352\n353\n4082\n4083\n4084\n4085\n4086\n4087\n4088\n4089\n4090\n4091\n4092\n4093\n4094\n4095\n4096\n4097\n4098\n4099\n4100\n4101\n4102\n4103\n4104\n4105\n4106\n4107\n4108\n4109\n4110\n4111\n4112\n4113\n4114\n4115\n4116\n4117\n4118\n4119\n4120\n4121\n4122\n4123\n4124\n4125\n4126\n4127\n4128\n4129\n4130\n4131\n4132\n4133\n4134\n4135\n4136\n4137\n4138\n354\n4139\n355\n4140\n4141\n4142\n4143\n4144\n4145\n4146\n356\n4147\n4148\n4149\n4150\n4151\n357\n4152\n358\n4153\n359\n4154\n4155\n4156\n360\n4157\n4158\n4159\n361\n4160\n4161\n4162\n4163\n362\n4164\n4165\n4166\n4167\n4168\n4169\n4170\n4171\n4172\n4173\n4174\n4175\n4176\n4177\n4178\n4179\n4180\n4181\n4182\n363\n4183\n4184\n4185\n4186\n4187\n4188\n4189\n364\n4190\n4191\n4192\n4193\n4194\n4195\n4196\n4197\n4198\n4199\n4200\n4201\n4202\n4203\n4204\n4205\n4206\n4207\n4208\n4209\n4210\n4211\n4212\n4213\n4214\n4215\n4216\n4217\n4218\n4219\n4220\n4221\n4222\n4223\n4224\n4225\n4226\n4227\n4228\n4229\n4230\n4231\n4232\n4233\n4234\n365\n366\n4235\n4236\n4237\n4238\n367\n4239\n4240\n4241\n4242\n4243\n368\n369\n4244\n4245\n370\n4246\n4247\n4248\n4249\n4250\n371\n372\n4251\n4252\n4253\n373\n4254\n4255\n4256\n4257\n374\n4258\n375\n4259\n376\n4260\n377\n4261\n4262\n4263\n4264\n4265\n378\n4266\n379\n4267\n4268\n380\n381\n382\n4269\n4270\n4271\n4272\n383\n4273\n4274\n4275\n4276\n4277\n4278\n384\n4279\n4280\n4281\n4282\n4283\n4284\n4285\n4286\n4287\n385\n386\n4288\n4289\n4290\n4291\n4292\n4293\n4294\n4295\n4296\n4297\n4298\n4299\n4300\n4301\n387\n388\n4302\n4303\n4304\n4305\n4306\n4307\n4308\n4309\n4310\n4311\n4312\n389\n4313\n4314\n4315\n4316\n4317\n4318\n4319\n4320\n4321\n4322\n4323\n4324\n4325\n4326\n4327\n4328\n4329\n4330\n4331\n4332\n4333\n4334\n4335\n4336\n4337\n4338\n4339\n4340\n4341\n4342\n390\n4343\n4344\n4345\n4346\n4347\n4348\n4349\n4350\n4351\n4352\n4353\n4354\n4355\n4356\n4357\n4358\n4359\n4360\n4361\n4362\n4363\n4364\n4365\n4366\n4367\n4368\n4369\n4370\n4371\n4372\n4373\n4374\n4375\n4376\n4377\n391\n4378\n4379\n4380\n4381\n4382\n4383\n4384\n4385\n4386\n4387\n4388\n4389\n4390\n4391\n4392\n4393\n4394\n4395\n4396\n4397\n4398\n4399\n4400\n4401\n4402\n4403\n4404\n4405\n4406\n4407\n4408\n4409\n4410\n4411\n4412\n4413\n4414\n4415\n4416\n4417\n4418\n4419\n4420\n4421\n4422\n4423\n4424\n4425\n4426\n4427\n4428\n4429\n4430\n4431\n4432\n4433\n4434\n4435\n4436\n4437\n4438\n4439\n4440\n4441\n4442\n4443\n4444\n4445\n4446\n4447\n4448\n4449\n4450\n4451\n4452\n4453\n4454\n4455\n4456\n4457\n4458\n4459\n4460\n4461\n4462\n4463\n4464\n4465\n4466\n4467\n4468\n4469\n4470\n4471\n4472\n4473\n4474\n4475\n4476\n4477\n4478\n4479\n4480\n4481\n4482\n4483\n4484\n4485\n4486\n4487\n4488\n4489\n4490\n4491\n4492\n4493\n4494\n4495\n4496\n4497\n4498\n4499\n4500\n4501\n4502\n4503\n4504\n4505\n4506\n4507\n4508\n4509\n4510\n4511\n4512\n4513\n4514\n4515\n4516\n4517\n4518\n4519\n4520\n4521\n4522\n4523\n4524\n4525\n4526\n4527\n4528\n4529\n4530\n4531\n4532\n4533\n4534\n4535\n4536\n4537\n4538\n4539\n4540\n4541\n4542\n4543\n4544\n4545\n4546\n4547\n4548\n4549\n4550\n4551\n4552\n4553\n4554\n4555\n4556\n4557\n4558\n4559\n4560\n4561\n4562\n4563\n4564\n4565\n4566\n4567\n4568\n4569\n4570\n4571\n4572\n4573\n4574\n4575\n4576\n4577\n4578\n392\n4579\n4580\n393\n4581\n4582\n4583\n4584\n4585\n4586\n4587\n4588\n4589\n4590\n4591\n4592\n4593\n4594\n4595\n4596\n4597\n4598\n4599\n4600\n4601\n4602\n4603\n4604\n4605\n4606\n4607\n4608\n4609\n4610\n4611\n4612\n4613\n4614\n4615\n4616\n4617\n4618\n4619\n4620\n4621\n4622\n4623\n4624\n4625\n4626\n4627\n4628\n4629\n4630\n4631\n4632\n4633\n4634\n4635\n4636\n4637\n4638\n4639\n4640\n4641\n4642\n4643\n4644\n4645\n4646\n4647\n4648\n4649\n4650\n4651\n4652\n4653\n4654\n4655\n4656\n4657\n4658\n4659\n4660\n4661\n4662\n4663\n4664\n4665\n4666\n4667\n4668\n4669\n4670\n394\n4671\n4672\n395\n4673\n4674\n4675\n4676\n396\n4677\n4678\n4679\n4680\n4681\n4682\n4683\n4684\n4685\n4686\n4687\n4688\n4689\n4690\n4691\n4692\n4693\n4694\n4695\n4696\n4697\n4698\n4699\n4700\n4701\n4702\n4703\n4704\n397\n4705\n4706\n4707\n4708\n4709\n4710\n4711\n4712\n4713\n4714\n4715\n4716\n4717\n4718\n4719\n4720\n4721\n4722\n4723\n4724\n4725\n4726\n4727\n4728\n4729\n4730\n4731\n4732\n4733\n4734\n4735\n4736\n4737\n398\n4738\n4739\n4740\n399\n4741\n4742\n4743\n4744\n4745\n4746\n4747\n4748\n4749\n4750\n400\n4751\n4752\n4753\n4754\n4755\n4756\n4757\n401\n4758\n4759\n4760\n4761\n4762\n402\n4763\n4764\n4765\n4766\n4767\n4768\n4769\n4770\n4771\n4772\n4773\n4774\n4775\n4776\n4777\n4778\n4779\n4780\n4781\n4782\n4783\n4784\n4785\n4786\n4787\n4788\n4789\n4790\n4791\n4792\n4793\n4794\n4795\n4796\n4797\n4798\n4799\n4800\n4801\n4802\n4803\n403\n4804\n4805\n4806\n4807\n4808\n4809\n4810\n4811\n4812\n4813\n4814\n404\n4815\n4816\n4817\n4818\n4819\n4820\n405\n4821\n4822\n4823\n4824\n4825\n4826\n4827\n4828\n4829\n4830\n4831\n4832\n4833\n4834\n4835\n4836\n4837\n4838\n4839\n4840\n4841\n4842\n406\n4843\n4844\n4845\n4846\n4847\n4848\n407\n4849\n4850\n4851\n4852\n4853\n4854\n408\n4855\n4856\n4857\n4858\n4859\n4860\n409\n4861\n4862\n4863\n4864\n4865\n4866\n4867\n4868\n4869\n4870\n4871\n4872\n4873\n4874\n4875\n4876\n4877\n4878\n4879\n4880\n4881\n4882\n4883\n4884\n4885\n4886\n4887\n4888\n4889\n4890\n4891\n4892\n4893\n4894\n4895\n4896\n4897\n4898\n4899\n4900\n4901\n4902\n410\n4903\n4904\n4905\n4906\n4907\n4908\n4909\n411\n4910\n4911\n4912\n4913\n4914\n4915\n4916\n4917\n4918\n4919\n4920\n4921\n4922\n4923\n4924\n4925\n4926\n4927\n4928\n4929\n4930\n4931\n4932\n4933\n4934\n4935\n4936\n4937\n4938\n4939\n4940\n4941\n4942\n4943\n4944\n4945\n4946\n4947\n4948\n4949\n4950\n4951\n4952\n4953\n4954\n4955\n4956\n4957\n4958\n4959\n4960\n4961\n4962\n4963\n4964\n4965\n412\n4966\n4967\n4968\n4969\n4970\n4971\n4972\n413\n4973\n4974\n4975\n4976\n4977\n4978\n4979\n4980\n4981\n4982\n4983\n4984\n4985\n4986\n4987\n4988\n4989\n4990\n4991\n4992\n4993\n4994\n4995\n4996\n4997\n4998\n4999\n5000\n5001\n5002\n5003\n5004\n5005\n5006\n5007\n5008\n5009\n5010\n5011\n5012\n5013\n5014\n5015\n5016\n5017\n5018\n5019\n5020\n5021\n5022\n5023\n5024\n5025\n5026\n5027\n5028\n5029\n5030\n5031\n5032\n5033\n5034\n5035\n5036\n5037\n5038\n5039\n5040\n5041\n5042\n5043\n5044\n5045\n5046\n5047\n5048\n5049\n5050\n5051\n5052\n5053\n5054\n5055\n5056\n5057\n5058\n5059\n5060\n5061\n5062\n5063\n414\n5064\n5065\n5066\n5067\n5068\n5069\n5070\n5071\n5072\n5073\n5074\n5075\n5076\n5077\n5078\n5079\n5080\n5081\n5082\n5083\n5084\n5085\n5086\n5087\n5088\n5089\n5090\n5091\n5092\n415\n5093\n5094\n5095\n416\n5096\n5097\n5098\n5099\n5100\n5101\n5102\n5103\n5104\n5105\n5106\n5107\n5108\n5109\n5110\n5111\n5112\n5113\n5114\n5115\n5116\n5117\n5118\n417\n5119\n5120\n5121\n5122\n5123\n418\n5124\n5125\n5126\n5127\n5128\n5129\n5130\n419\n5131\n5132\n5133\n5134\n5135\n5136\n5137\n5138\n5139\n420\n5140\n421\n5141\n5142\n5143\n5144\n5145\n5146\n5147\n422\n423\n424\n5148\n5149\n5150\n5151\n5152\n5153\n5154\n5155\n5156\n5157\n425\n5158\n5159\n5160\n5161\n426\n5162\n5163\n5164\n5165\n5166\n427\n5167\n5168\n5169\n5170\n5171\n5172\n5173\n5174\n5175\n428\n5176\n5177\n5178\n429\n5179\n5180\n5181\n5182\n5183\n5184\n5185\n5186\n5187\n5188\n5189\n5190\n5191\n5192\n430\n5193\n5194\n5195\n5196\n5197\n5198\n5199\n5200\n5201\n5202\n431\n5203\n432\n5204\n5205\n5206\n5207\n5208\n5209\n5210\n5211\n5212\n433\n5213\n5214\n5215\n5216\n434\n435\n5217\n5218\n5219\n5220\n5221\n5222\n5223\n5224\n5225\n5226\n5227\n5228\n5229\n5230\n5231\n5232\n5233\n5234\n5235\n5236\n5237\n5238\n5239\n5240\n5241\n5242\n5243\n5244\n5245\n5246\n5247\n5248\n5249\n436\n5250\n437\n5251\n5252\n438\n5253\n5254\n5255\n5256\n5257\n5258\n5259\n439\n5260\n5261\n5262\n5263\n5264\n5265\n5266\n5267\n5268\n5269\n5270\n5271\n5272\n5273\n5274\n5275\n5276\n5277\n5278\n5279\n5280\n5281\n5282\n5283\n440\n5284\n5285\n441\n5286\n5287\n5288\n5289\n5290\n5291\n5292\n5293\n5294\n442\n5295\n5296\n5297\n5298\n5299\n5300\n5301\n5302\n5303\n5304\n5305\n5306\n5307\n5308\n5309\n5310\n5311\n5312\n5313\n5314\n5315\n5316\n5317\n5318\n5319\n5320\n5321\n5322\n5323\n5324\n5325\n5326\n443\n5327\n5328\n5329\n444\n5330\n5331\n5332\n5333\n5334\n5335\n5336\n5337\n5338\n5339\n5340\n5341\n5342\n5343\n445\n5344\n5345\n5346\n5347\n5348\n5349\n5350\n5351\n5352\n5353\n5354\n5355\n446\n5356\n5357\n5358\n5359\n447\n5360\n5361\n5362\n5363\n5364\n5365\n5366\n5367\n5368\n5369\n5370\n448\n5371\n5372\n5373\n5374\n5375\n5376\n5377\n5378\n5379\n5380\n5381\n5382\n5383\n5384\n5385\n5386\n5387\n5388\n5389\n5390\n5391\n5392\n5393\n5394\n5395\n5396\n5397\n5398\n5399\n5400\n5401\n5402\n5403\n5404\n5405\n5406\n5407\n5408\n5409\n5410\n5411\n5412\n5413\n5414\n5415\n5416\n5417\n5418\n5419\n5420\n5421\n5422\n5423\n5424\n5425\n5426\n5427\n5428\n5429\n5430\n449\n5431\n5432\n5433\n5434\n5435\n450\n5436\n5437\n5438\n5439\n5440\n5441\n5442\n5443\n5444\n5445\n5446\n5447\n5448\n5449\n5450\n5451\n5452\n5453\n5454\n5455\n5456\n451\n5457\n5458\n5459\n5460\n5461\n5462\n5463\n5464\n5465\n5466\n5467\n5468\n5469\n5470\n5471\n5472\n5473\n5474\n452\n5475\n5476\n5477\n453\n5478\n5479\n5480\n5481\n454\n5482\n5483\n5484\n5485\n5486\n5487\n5488\n5489\n5490\n5491\n5492\n5493\n5494\n5495\n5496\n5497\n5498\n5499\n5500\n5501\n5502\n5503\n5504\n5505\n5506\n5507\n5508\n455\n5509\n5510\n5511\n5512\n5513\n5514\n5515\n5516\n5517\n5518\n5519\n456\n5520\n5521\n5522\n5523\n5524\n5525\n5526\n5527\n5528\n5529\n5530\n5531\n5532\n5533\n5534\n5535\n457\n5536\n5537\n5538\n5539\n5540\n5541\n5542\n5543\n5544\n5545\n5546\n5547\n5548\n5549\n5550\n5551\n5552\n5553\n5554\n5555\n5556\n5557\n5558\n5559\n5560\n5561\n5562\n5563\n5564\n5565\n5566\n5567\n5568\n5569\n458\n5570\n5571\n5572\n5573\n459\n5574\n5575\n5576\n5577\n5578\n5579\n5580\n5581\n5582\n460\n5583\n5584\n461\n5585\n5586\n5587\n5588\n5589\n5590\n5591\n5592\n5593\n5594\n5595\n5596\n5597\n5598\n5599\n5600\n5601\n5602\n5603\n5604\n5605\n5606\n5607\n5608\n5609\n5610\n5611\n5612\n5613\n5614\n5615\n5616\n5617\n5618\n5619\n5620\n5621\n5622\n5623\n5624\n5625\n5626\n5627\n5628\n5629\n5630\n5631\n5632\n5633\n462\n5634\n5635\n5636\n5637\n5638\n5639\n5640\n5641\n5642\n5643\n5644\n5645\n5646\n5647\n463\n5648\n5649\n464\n5650\n5651\n5652\n5653\n5654\n5655\n5656\n5657\n5658\n5659\n5660\n5661\n5662\n5663\n5664\n5665\n465\n466\n5666\n5667\n5668\n5669\n5670\n5671\n5672\n5673\n5674\n5675\n5676\n5677\n5678\n5679\n5680\n5681\n5682\n5683\n5684\n5685\n5686\n5687\n5688\n5689\n5690\n5691\n5692\n5693\n5694\n5695\n5696\n5697\n5698\n5699\n5700\n5701\n5702\n5703\n5704\n5705\n5706\n5707\n5708\n5709\n5710\n5711\n5712\n5713\n5714\n5715\n467\n5716\n5717\n5718\n5719\n5720\n5721\n5722\n5723\n5724\n5725\n5726\n5727\n5728\n5729\n5730\n468\n5731\n5732\n5733\n5734\n5735\n5736\n5737\n5738\n5739\n5740\n5741\n5742\n5743\n5744\n5745\n5746\n5747\n5748\n5749\n5750\n5751\n5752\n5753\n5754\n5755\n5756\n5757\n5758\n5759\n5760\n5761\n5762\n5763\n5764\n5765\n469\n5766\n5767\n5768\n5769\n5770\n5771\n5772\n5773\n5774\n5775\n5776\n5777\n5778\n5779\n5780\n5781\n5782\n5783\n5784\n5785\n5786\n5787\n5788\n5789\n5790\n5791\n5792\n5793\n5794\n5795\n5796\n5797\n5798\n5799\n5800\n5801\n5802\n5803\n5804\n470\n5805\n5806\n5807\n5808\n5809\n5810\n5811\n5812\n5813\n5814\n5815\n5816\n5817\n5818\n5819\n471\n5820\n472\n473\n5821\n5822\n5823\n5824\n5825\n5826\n5827\n5828\n5829\n5830\n5831\n5832\n5833\n5834\n5835\n5836\n5837\n5838\n5839\n5840\n5841\n5842\n5843\n5844\n5845\n5846\n5847\n5848\n5849\n5850\n5851\n5852\n5853\n5854\n5855\n5856\n5857\n5858\n5859\n5860\n5861\n5862\n5863\n5864\n5865\n5866\n5867\n5868\n474\n5869\n5870\n5871\n5872\n5873\n5874\n5875\n5876\n5877\n5878\n5879\n5880\n5881\n5882\n5883\n5884\n475\n5885\n476\n5886\n477\n5887\n5888\n5889\n5890\n5891\n5892\n5893\n5894\n5895\n5896\n5897\n5898\n5899\n5900\n5901\n5902\n5903\n5904\n5905\n5906\n5907\n5908\n5909\n5910\n5911\n478\n5912\n5913\n5914\n5915\n5916\n5917\n5918\n5919\n5920\n5921\n5922\n479\n5923\n5924\n5925\n5926\n5927\n5928\n5929\n5930\n5931\n5932\n5933\n5934\n5935\n5936\n5937\n480\n5938\n5939\n5940\n5941\n5942\n5943\n5944\n5945\n5946\n481\n5947\n482\n5948\n5949\n5950\n5951\n5952\n5953\n483\n5954\n5955\n5956\n5957\n5958\n484\n5959\n5960\n5961\n5962\n5963\n5964\n5965\n5966\n5967\n5968\n5969\n5970\n5971\n5972\n5973\n5974\n5975\n5976\n5977\n5978\n5979\n5980\n5981\n5982\n5983\n5984\n5985\n5986\n5987\n485\n5988\n5989\n5990\n5991\n5992\n5993\n5994\n5995\n5996\n5997\n486\n5998\n487\n5999\n6000\n6001\n6002\n6003\n6004\n6005\n6006\n6007\n6008\n6009\n6010\n6011\n6012\n6013\n6014\n6015\n6016\n488\n6017\n489\n490\n6018\n491\n6019\n6020\n6021\n6022\n6023\n6024\n6025\n6026\n6027\n6028\n6029\n6030\n6031\n6032\n6033\n6034\n6035\n6036\n6037\n6038\n6039\n6040\n6041\n6042\n6043\n6044\n6045\n6046\n6047\n6048\n6049\n6050\n6051\n6052\n6053\n6054\n6055\n6056\n6057\n6058\n6059\n6060\n6061\n6062\n6063\n6064\n6065\n6066\n6067\n6068\n6069\n6070\n6071\n6072\n6073\n6074\n492\n6075\n6076\n6077\n6078\n6079\n6080\n6081\n6082\n6083\n6084\n493\n6085\n494\n6086\n6087\n6088\n495\n6089\n6090\n6091\n6092\n6093\n6094\n6095\n6096\n6097\n6098\n6099\n6100\n6101\n6102\n6103\n6104\n6105\n6106\n6107\n6108\n6109\n496\n6110\n6111\n6112\n6113\n6114\n6115\n497\n6116\n6117\n6118\n6119\n6120\n6121\n6122\n6123\n6124\n6125\n6126\n6127\n6128\n6129\n6130\n498\n6131\n6132\n6133\n6134\n6135\n6136\n6137\n6138\n6139\n6140\n6141\n6142\n6143\n6144\n6145\n6146\n6147\n6148\n6149\n6150\n6151\n6152\n6153\n6154\n6155\n6156\n6157\n6158\n6159\n6160\n6161\n6162\n6163\n6164\n6165\n6166\n6167\n6168\n6169\n6170\n6171\n6172\n499\n6173\n6174\n6175\n500\n6176\n6177\n6178\n6179\n6180\n6181\n6182\n6183\n6184\n6185\n6186\n6187\n6188\n6189\n6190\n501\n6191\n6192\n6193\n6194\n6195\n6196\n6197\n6198\n502\n6199\n6200\n6201\n6202\n6203\n6204\n6205\n6206\n6207\n6208\n6209\n6210\n6211\n6212\n6213\n6214\n6215\n6216\n6217\n6218\n6219\n6220\n6221\n6222\n6223\n6224\n6225\n6226\n6227\n6228\n6229\n6230\n6231\n6232\n6233\n6234\n6235\n6236\n6237\n6238\n6239\n6240\n6241\n6242\n6243\n6244\n6245\n6246\n6247\n6248\n6249\n6250\n6251\n6252\n6253\n6254\n6255\n6256\n503\n6257\n6258\n6259\n6260\n6261\n6262\n6263\n6264\n504\n505\n6265\n6266\n6267\n6268\n6269\n6270\n6271\n6272\n506\n6273\n6274\n6275\n6276\n6277\n6278\n6279\n6280\n6281\n6282\n6283\n6284\n6285\n6286\n6287\n6288\n6289\n6290\n6291\n6292\n6293\n6294\n6295\n6296\n6297\n6298\n6299\n6300\n6301\n6302\n6303\n6304\n6305\n6306\n507\n6307\n6308\n6309\n6310\n6311\n6312\n6313\n6314\n6315\n6316\n6317\n6318\n6319\n6320\n6321\n6322\n6323\n6324\n6325\n6326\n6327\n6328\n6329\n6330\n6331\n6332\n6333\n6334\n6335\n6336\n6337\n6338\n6339\n6340\n6341\n508\n6342\n6343\n6344\n6345\n6346\n6347\n6348\n6349\n6350\n6351\n6352\n6353\n6354\n6355\n6356\n6357\n6358\n6359\n6360\n6361\n509\n6362\n6363\n6364\n6365\n6366\n6367\n6368\n6369\n6370\n6371\n6372\n6373\n6374\n6375\n6376\n6377\n6378\n6379\n6380\n6381\n6382\n510\n6383\n6384\n6385\n6386\n6387\n6388\n6389\n6390\n6391\n6392\n6393\n6394\n6395\n6396\n6397\n6398\n6399\n511\n6400\n6401\n6402\n6403\n6404\n6405\n6406\n6407\n6408\n6409\n6410\n6411\n6412\n6413\n6414\n6415\n6416\n6417\n6418\n6419\n6420\n6421\n6422\n6423\n6424\n6425\n6426\n6427\n6428\n6429\n6430\n6431\n6432\n6433\n6434\n6435\n6436\n512\n6437\n6438\n6439\n6440\n513\n6441\n6442\n6443\n6444\n6445\n6446\n6447\n6448\n6449\n6450\n6451\n6452\n6453\n6454\n6455\n6456\n6457\n6458\n6459\n6460\n6461\n6462\n6463\n6464\n6465\n6466\n6467\n6468\n6469\n6470\n6471\n6472\n6473\n6474\n6475\n6476\n6477\n6478\n6479\n6480\n6481\n6482\n6483\n6484\n6485\n6486\n6487\n6488\n6489\n6490\n514\n515\n6491\n6492\n6493\n6494\n6495\n516\n6496\n6497\n6498\n6499\n517\n6500\n6501\n6502\n6503\n6504\n518\n519\n6505\n6506\n6507\n6508\n6509\n6510\n6511\n6512\n6513\n6514\n6515\n6516\n6517\n6518\n6519\n520\n6520\n6521\n6522\n6523\n6524\n6525\n6526\n6527\n6528\n521\n6529\n6530\n6531\n522\n6532\n6533\n6534\n6535\n6536\n6537\n6538\n6539\n6540\n6541\n6542\n6543\n6544\n6545\n6546\n6547\n6548\n6549\n6550\n6551\n6552\n6553\n6554\n6555\n6556\n6557\n6558\n6559\n6560\n6561\n6562\n6563\n523\n6564\n6565\n6566\n6567\n6568\n6569\n6570\n6571\n6572\n6573\n6574\n6575\n6576\n6577\n6578\n6579\n6580\n524\n6581\n6582\n6583\n6584\n6585\n6586\n6587\n6588\n6589\n6590\n6591\n6592\n6593\n6594\n6595\n6596\n6597\n6598\n6599\n6600\n6601\n6602\n6603\n6604\n6605\n6606\n6607\n6608\n6609\n6610\n6611\n6612\n6613\n6614\n6615\n6616\n6617\n6618\n6619\n6620\n6621\n6622\n6623\n6624\n6625\n6626\n6627\n6628\n6629\n6630\n6631\n525\n6632\n6633\n6634\n6635\n6636\n6637\n6638\n6639\n6640\n6641\n6642\n6643\n6644\n6645\n6646\n6647\n6648\n6649\n6650\n6651\n6652\n6653\n6654\n6655\n6656\n6657\n6658\n6659\n6660\n6661\n6662\n6663\n6664\n6665\n6666\n6667\n6668\n6669\n6670\n6671\n6672\n6673\n6674\n6675\n6676\n6677\n6678\n6679\n6680\n6681\n6682\n6683\n6684\n6685\n6686\n6687\n6688\n6689\n6690\n6691\n6692\n6693\n6694\n6695\n6696\n6697\n6698\n6699\n6700\n6701\n6702\n6703\n6704\n6705\n6706\n6707\n6708\n526\n6709\n527\n6710\n6711\n6712\n6713\n6714\n6715\n6716\n6717\n6718\n6719\n6720\n6721\n6722\n6723\n6724\n6725\n6726\n6727\n528\n6728\n6729\n529\n6730\n6731\n6732\n6733\n6734\n6735\n6736\n6737\n6738\n6739\n6740\n6741\n6742\n6743\n6744\n6745\n6746\n6747\n6748\n6749\n6750\n6751\n6752\n6753\n6754\n6755\n530\n6756\n6757\n6758\n6759\n531\n6760\n6761\n6762\n6763\n6764\n6765\n6766\n6767\n6768\n6769\n6770\n6771\n6772\n6773\n532\n6774\n6775\n6776\n6777\n6778\n6779\n6780\n6781\n6782\n6783\n6784\n6785\n6786\n6787\n6788\n6789\n6790\n6791\n6792\n6793\n6794\n6795\n6796\n6797\n6798\n6799\n6800\n6801\n6802\n6803\n6804\n6805\n533\n6806\n534\n6807\n535\n6808\n6809\n6810\n6811\n6812\n6813\n6814\n6815\n6816\n6817\n6818\n6819\n6820\n6821\n6822\n6823\n6824\n6825\n6826\n6827\n6828\n6829\n6830\n6831\n6832\n6833\n6834\n6835\n6836\n6837\n6838\n6839\n6840\n6841\n6842\n536\n6843\n6844\n6845\n537\n6846\n6847\n6848\n6849\n6850\n6851\n6852\n6853\n6854\n538\n6855\n6856\n6857\n6858\n6859\n6860\n6861\n6862\n6863\n6864\n6865\n539\n6866\n6867\n6868\n6869\n6870\n6871\n6872\n6873\n6874\n6875\n6876\n6877\n6878\n6879\n6880\n6881\n6882\n6883\n6884\n6885\n6886\n6887\n6888\n6889\n6890\n6891\n6892\n6893\n6894\n6895\n6896\n6897\n6898\n6899\n6900\n6901\n6902\n6903\n6904\n6905\n6906\n6907\n6908\n6909\n6910\n6911\n6912\n6913\n6914\n6915\n6916\n6917\n6918\n6919\n6920\n6921\n6922\n6923\n6924\n6925\n6926\n6927\n6928\n6929\n6930\n6931\n6932\n6933\n6934\n6935\n6936\n6937\n6938\n540\n6939\n6940\n6941\n6942\n6943\n6944\n6945\n6946\n6947\n6948\n6949\n6950\n6951\n6952\n6953\n6954\n6955\n6956\n6957\n6958\n6959\n6960\n6961\n6962\n6963\n6964\n6965\n6966\n6967\n6968\n6969\n541\n6970\n6971\n6972\n6973\n6974\n542\n6975\n6976\n6977\n6978\n6979\n6980\n6981\n6982\n6983\n6984\n6985\n6986\n6987\n6988\n6989\n6990\n6991\n6992\n6993\n6994\n6995\n6996\n6997\n543\n6998\n6999\n7000\n7001\n7002\n7003\n7004\n7005\n7006\n7007\n7008\n7009\n7010\n7011\n7012\n7013\n7014\n7015\n7016\n544\n7017\n7018\n7019\n7020\n7021\n7022\n7023\n7024\n7025\n7026\n7027\n7028\n7029\n7030\n7031\n7032\n7033\n7034\n7035\n7036\n7037\n7038\n7039\n7040\n7041\n7042\n7043\n7044\n7045\n7046\n7047\n7048\n7049\n7050\n7051\n7052\n7053\n7054\n7055\n7056\n7057\n7058\n7059\n7060\n7061\n7062\n545\n7063\n7064\n546\n7065\n7066\n7067\n547\n7068\n7069\n7070\n7071\n7072\n7073\n7074\n7075\n7076\n7077\n7078\n7079\n7080\n7081\n7082\n7083\n7084\n7085\n7086\n7087\n7088\n7089\n7090\n7091\n7092\n7093\n7094\n7095\n7096\n7097\n7098\n7099\n7100\n7101\n7102\n7103\n7104\n7105\n7106\n7107\n7108\n7109\n7110\n7111\n7112\n7113\n7114\n7115\n7116\n7117\n7118\n7119\n7120\n7121\n7122\n7123\n7124\n7125\n7126\n7127\n7128\n7129\n548\n7130\n7131\n7132\n549\n7133\n7134\n7135\n7136\n7137\n7138\n7139\n7140\n7141\n7142\n7143\n7144\n7145\n7146\n7147\n7148\n7149\n7150\n7151\n7152\n7153\n7154\n7155\n7156\n550\n7157\n7158\n7159\n7160\n7161\n7162\n7163\n7164\n7165\n7166\n7167\n7168\n7169\n7170\n7171\n7172\n7173\n7174\n7175\n7176\n7177\n7178\n7179\n7180\n7181\n7182\n7183\n7184\n7185\n7186\n7187\n7188\n7189\n7190\n7191\n7192\n7193\n7194\n7195\n7196\n7197\n7198\n7199\n7200\n551\n7201\n7202\n7203\n7204\n7205\n7206\n7207\n7208\n7209\n7210\n7211\n7212\n7213\n7214\n7215\n7216\n7217\n7218\n7219\n7220\n7221\n7222\n7223\n7224\n7225\n7226\n7227\n7228\n7229\n7230\n7231\n7232\n7233\n7234\n7235\n7236\n7237\n7238\n7239\n7240\n7241\n7242\n7243\n7244\n7245\n552\n7246\n7247\n7248\n7249\n7250\n7251\n7252\n7253\n7254\n7255\n7256\n7257\n7258\n7259\n7260\n7261\n7262\n7263\n7264\n7265\n7266\n7267\n7268\n7269\n7270\n7271\n7272\n7273\n7274\n7275\n7276\n7277\n7278\n7279\n7280\n7281\n7282\n7283\n7284\n7285\n7286\n7287\n7288\n7289\n7290\n7291\n7292\n7293\n7294\n7295\n7296\n553\n7297\n7298\n7299\n7300\n7301\n7302\n7303\n7304\n7305\n7306\n7307\n7308\n7309\n7310\n7311\n7312\n7313\n7314\n7315\n7316\n7317\n7318\n7319\n7320\n7321\n7322\n7323\n7324\n7325\n554\n7326\n7327\n7328\n7329\n555\n7330\n7331\n7332\n7333\n556\n7334\n7335\n7336\n7337\n7338\n7339\n7340\n7341\n7342\n7343\n7344\n7345\n7346\n7347\n7348\n7349\n7350\n7351\n7352\n7353\n7354\n7355\n7356\n7357\n7358\n7359\n7360\n7361\n7362\n557\n7363\n7364\n7365\n7366\n7367\n7368\n7369\n7370\n7371\n7372\n7373\n7374\n7375\n7376\n7377\n7378\n7379\n7380\n7381\n7382\n7383\n7384\n7385\n7386\n7387\n7388\n7389\n7390\n7391\n7392\n7393\n7394\n7395\n7396\n7397\n7398\n7399\n7400\n7401\n7402\n7403\n7404\n7405\n7406\n7407\n7408\n7409\n7410\n7411\n7412\n7413\n7414\n7415\n7416\n7417\n7418\n7419\n7420\n7421\n7422\n558\n7423\n7424\n7425\n7426\n7427\n7428\n7429\n7430\n7431\n7432\n7433\n7434\n7435\n7436\n7437\n7438\n7439\n7440\n559\n7441\n7442\n7443\n7444\n7445\n7446\n7447\n7448\n560\n7449\n7450\n7451\n7452\n7453\n7454\n7455\n7456\n7457\n7458\n7459\n7460\n7461\n7462\n7463\n7464\n7465\n7466\n7467\n7468\n7469\n7470\n7471\n7472\n7473\n7474\n7475\n7476\n561\n7477\n7478\n7479\n7480\n7481\n7482\n7483\n7484\n7485\n7486\n562\n563\n7487\n564\n7488\n7489\n7490\n7491\n7492\n7493\n7494\n7495\n7496\n7497\n7498\n7499\n7500\n7501\n7502\n7503\n7504\n7505\n7506\n7507\n565\n7508\n7509\n7510\n7511\n566\n7512\n7513\n7514\n7515\n7516\n7517\n7518\n7519\n7520\n7521\n7522\n7523\n7524\n7525\n7526\n7527\n7528\n7529\n7530\n567\n7531\n7532\n7533\n7534\n7535\n7536\n7537\n7538\n7539\n7540\n7541\n7542\n568\n7543\n7544\n7545\n7546\n7547\n7548\n7549\n7550\n7551\n7552\n7553\n7554\n7555\n7556\n7557\n7558\n7559\n7560\n7561\n7562\n7563\n7564\n7565\n7566\n7567\n7568\n7569\n7570\n7571\n7572\n7573\n7574\n7575\n7576\n7577\n7578\n7579\n7580\n7581\n7582\n7583\n7584\n7585\n7586\n7587\n7588\n7589\n7590\n7591\n7592\n7593\n7594\n7595\n569\n7596\n7597\n7598\n7599\n7600\n7601\n7602\n7603\n7604\n7605\n7606\n7607\n7608\n7609\n7610\n7611\n7612\n7613\n7614\n7615\n7616\n7617\n7618\n7619\n7620\n7621\n7622\n7623\n7624\n7625\n7626\n7627\n570\n7628\n7629\n7630\n7631\n7632\n571\n7633\n7634\n7635\n7636\n7637\n7638\n7639\n7640\n7641\n7642\n7643\n7644\n7645\n7646\n7647\n7648\n7649\n7650\n7651\n7652\n7653\n7654\n7655\n7656\n7657\n7658\n7659\n7660\n7661\n7662\n7663\n7664\n7665\n7666\n7667\n7668\n7669\n7670\n7671\n7672\n7673\n7674\n7675\n7676\n7677\n7678\n7679\n7680\n7681\n7682\n7683\n7684\n7685\n7686\n7687\n7688\n7689\n7690\n7691\n7692\n7693\n7694\n7695\n7696\n7697\n7698\n7699\n7700\n7701\n7702\n7703\n7704\n7705\n7706\n7707\n572\n7708\n7709\n573\n7710\n7711\n574\n575\n7712\n7713\n7714\n7715\n7716\n576\n577\n7717\n7718\n7719\n7720\n7721\n7722\n7723\n7724\n7725\n7726\n578\n7727\n7728\n7729\n7730\n7731\n7732\n7733\n7734\n7735\n7736\n7737\n7738\n579\n7739\n7740\n7741\n7742\n7743\n7744\n7745\n7746\n7747\n7748\n7749\n7750\n7751\n7752\n7753\n7754\n7755\n7756\n7757\n7758\n7759\n7760\n580\n7761\n7762\n7763\n7764\n581\n7765\n7766\n7767\n7768\n7769\n7770\n7771\n582\n7772\n7773\n7774\n7775\n7776\n7777\n7778\n7779\n7780\n7781\n7782\n7783\n7784\n7785\n7786\n7787\n7788\n7789\n7790\n7791\n7792\n7793\n7794\n7795\n7796\n7797\n7798\n583\n7799\n7800\n7801\n7802\n7803\n7804\n7805\n7806\n7807\n7808\n7809\n7810\n7811\n7812\n7813\n7814\n7815\n7816\n7817\n7818\n7819\n7820\n7821\n7822\n7823\n7824\n7825\n7826\n7827\n7828\n7829\n7830\n7831\n7832\n7833\n7834\n7835\n7836\n7837\n7838\n7839\n7840\n7841\n7842\n584\n585\n7843\n7844\n7845\n7846\n7847\n7848\n586\n7849\n7850\n7851\n7852\n7853\n7854\n7855\n7856\n7857\n587\n7858\n7859\n7860\n7861\n588\n7862\n7863\n7864\n7865\n589\n7866\n7867\n7868\n7869\n7870\n7871\n7872\n7873\n7874\n7875\n7876\n590\n7877\n591\n7878\n7879\n7880\n7881\n7882\n7883\n7884\n7885\n7886\n7887\n7888\n7889\n7890\n7891\n7892\n7893\n7894\n7895\n7896\n7897\n7898\n7899\n7900\n7901\n7902\n7903\n7904\n592\n7905\n7906\n7907\n7908\n593\n7909\n7910\n7911\n594\n7912\n7913\n7914\n7915\n7916\n7917\n7918\n595\n7919\n7920\n7921\n7922\n7923\n7924\n7925\n7926\n596\n7927\n7928\n7929\n7930\n7931\n7932\n7933\n7934\n7935\n7936\n7937\n7938\n7939\n7940\n7941\n7942\n7943\n7944\n7945\n7946\n7947\n7948\n7949\n7950\n7951\n7952\n7953\n7954\n7955\n7956\n7957\n7958\n7959\n7960\n7961\n7962\n7963\n7964\n7965\n7966\n7967\n7968\n7969\n7970\n7971\n7972\n7973\n7974\n7975\n7976\n7977\n7978\n7979\n7980\n7981\n7982\n7983\n7984\n7985\n7986\n7987\n7988\n7989\n7990\n7991\n7992\n7993\n7994\n7995\n7996\n7997\n7998\n7999\n8000\n8001\n8002\n8003\n8004\n8005\n8006\n8007\n8008\n8009\n8010\n8011\n8012\n8013\n8014\n8015\n8016\n8017\n8018\n8019\n8020\n8021\n8022\n8023\n8024\n8025\n8026\n8027\n8028\n8029\n8030\n8031\n8032\n8033\n8034\n8035\n8036\n8037\n8038\n8039\n8040\n8041\n8042\n8043\n8044\n8045\n8046\n8047\n8048\n8049\n8050\n8051\n8052\n8053\n597\n8054\n8055\n8056\n8057\n8058\n8059\n8060\n8061\n8062\n598\n8063\n8064\n599\n8065\n8066\n8067\n8068\n8069\n8070\n8071\n600\n8072\n8073\n8074\n8075\n8076\n8077\n601\n8078\n8079\n8080\n8081\n8082\n602\n8083\n8084\n8085\n8086\n8087\n8088\n8089\n8090\n8091\n8092\n603\n8093\n8094\n8095\n8096\n8097\n8098\n8099\n8100\n8101\n8102\n8103\n8104\n8105\n8106\n8107\n8108\n8109\n8110\n8111\n8112\n8113\n8114\n8115\n8116\n8117\n8118\n8119\n8120\n8121\n8122\n8123\n8124\n8125\n604\n8126\n8127\n8128\n8129\n8130\n8131\n8132\n8133\n8134\n8135\n8136\n8137\n8138\n8139\n8140\n8141\n8142\n8143\n8144\n8145\n8146\n8147\n8148\n8149\n8150\n8151\n8152\n8153\n8154\n8155\n8156\n8157\n8158\n8159\n8160\n8161\n8162\n8163\n8164\n8165\n8166\n8167\n8168\n8169\n8170\n8171\n8172\n8173\n8174\n8175\n8176\n8177\n8178\n8179\n8180\n8181\n8182\n8183\n8184\n8185\n8186\n8187\n8188\n8189\n8190\n8191\n8192\n8193\n8194\n8195\n8196\n8197\n8198\n8199\n8200\n8201\n8202\n8203\n8204\n8205\n8206\n8207\n8208\n8209\n8210\n8211\n8212\n8213\n8214\n8215\n8216\n8217\n8218\n8219\n8220\n8221\n8222\n8223\n8224\n8225\n8226\n8227\n8228\n8229\n8230\n8231\n8232\n8233\n8234\n8235\n8236\n8237\n8238\n8239\n8240\n8241\n8242\n8243\n8244\n8245\n8246\n8247\n8248\n8249\n8250\n8251\n8252\n8253\n8254\n8255\n8256\n8257\n8258\n8259\n8260\n8261\n8262\n8263\n8264\n8265\n605\n8266\n606\n8267\n8268\n8269\n8270\n8271\n8272\n8273\n8274\n8275\n8276\n8277\n8278\n8279\n8280\n8281\n8282\n8283\n8284\n8285\n8286\n8287\n607\n8288\n8289\n8290\n8291\n8292\n8293\n8294\n8295\n8296\n8297\n8298\n8299\n8300\n8301\n8302\n8303\n8304\n8305\n608\n609\n8306\n8307\n8308\n8309\n610\n8310\n8311\n8312\n8313\n8314\n8315\n8316\n8317\n8318\n8319\n8320\n8321\n8322\n611\n612\n8323\n8324\n8325\n8326\n8327\n8328\n8329\n8330\n8331\n8332\n8333\n8334\n8335\n613\n8336\n8337\n8338\n8339\n8340\n8341\n8342\n8343\n8344\n8345\n8346\n8347\n8348\n8349\n8350\n8351\n8352\n8353\n8354\n8355\n8356\n8357\n8358\n8359\n8360\n8361\n8362\n8363\n8364\n8365\n8366\n8367\n8368\n8369\n8370\n8371\n8372\n8373\n8374\n8375\n8376\n8377\n8378\n8379\n8380\n8381\n8382\n8383\n8384\n8385\n8386\n8387\n8388\n8389\n8390\n8391\n8392\n8393\n8394\n614\n8395\n8396\n8397\n8398\n8399\n8400\n8401\n8402\n8403\n8404\n8405\n8406\n8407\n8408\n8409\n8410\n8411\n8412\n8413\n8414\n8415\n8416\n8417\n8418\n8419\n8420\n615\n8421\n8422\n8423\n8424\n8425\n8426\n8427\n8428\n8429\n8430\n8431\n8432\n8433\n8434\n616\n8435\n8436\n8437\n8438\n8439\n8440\n8441\n8442\n8443\n8444\n8445\n8446\n8447\n617\n8448\n8449\n8450\n8451\n8452\n8453\n8454\n8455\n618\n8456\n8457\n8458\n8459\n8460\n8461\n8462\n8463\n8464\n8465\n8466\n8467\n8468\n8469\n8470\n619\n8471\n8472\n8473\n8474\n8475\n8476\n8477\n8478\n8479\n8480\n8481\n8482\n8483\n8484\n8485\n8486\n8487\n8488\n8489\n8490\n8491\n620\n8492\n8493\n8494\n8495\n8496\n8497\n8498\n8499\n8500\n8501\n8502\n8503\n8504\n8505\n8506\n8507\n8508\n8509\n8510\n8511\n8512\n8513\n8514\n8515\n8516\n8517\n8518\n621\n8519\n8520\n8521\n8522\n8523\n8524\n8525\n8526\n8527\n8528\n8529\n8530\n8531\n8532\n8533\n8534\n8535\n8536\n8537\n8538\n8539\n8540\n8541\n622\n8542\n8543\n8544\n623\n8545\n8546\n8547\n8548\n8549\n8550\n8551\n8552\n8553\n624\n8554\n8555\n8556\n625\n8557\n8558\n8559\n8560\n8561\n8562\n8563\n8564\n8565\n8566\n8567\n8568\n8569\n8570\n8571\n626\n8572\n8573\n8574\n8575\n8576\n8577\n8578\n8579\n8580\n8581\n8582\n8583\n8584\n627\n8585\n8586\n8587\n628\n8588\n8589\n8590\n8591\n8592\n8593\n8594\n8595\n8596\n8597\n8598\n8599\n8600\n629\n8601\n8602\n8603\n8604\n8605\n8606\n8607\n8608\n8609\n8610\n8611\n630\n8612\n8613\n8614\n8615\n8616\n8617\n8618\n8619\n8620\n8621\n8622\n8623\n8624\n8625\n8626\n8627\n8628\n8629\n8630\n8631\n8632\n8633\n8634\n8635\n8636\n8637\n8638\n8639\n8640\n8641\n8642\n8643\n8644\n8645\n8646\n8647\n8648\n8649\n8650\n8651\n8652\n8653\n8654\n8655\n8656\n631\n632\n8657\n8658\n8659\n8660\n8661\n633\n8662\n8663\n8664\n8665\n8666\n8667\n8668\n8669\n8670\n8671\n8672\n8673\n8674\n8675\n8676\n8677\n8678\n8679\n8680\n8681\n8682\n8683\n634\n8684\n8685\n8686\n8687\n8688\n8689\n8690\n8691\n8692\n8693\n8694\n8695\n8696\n8697\n8698\n8699\n8700\n8701\n8702\n8703\n8704\n8705\n8706\n8707\n8708\n8709\n8710\n8711\n8712\n8713\n8714\n8715\n8716\n635\n8717\n8718\n8719\n8720\n8721\n8722\n8723\n8724\n8725\n8726\n8727\n8728\n8729\n8730\n8731\n636\n8732\n8733\n637\n8734\n8735\n8736\n638\n639\n8737\n8738\n8739\n8740\n8741\n8742\n8743\n8744\n8745\n8746\n8747\n8748\n8749\n8750\n8751\n8752\n8753\n8754\n8755\n8756\n8757\n8758\n8759\n8760\n640\n8761\n8762\n8763\n8764\n8765\n8766\n8767\n8768\n8769\n8770\n8771\n8772\n8773\n8774\n641\n8775\n8776\n642\n8777\n8778\n8779\n8780\n8781\n8782\n8783\n8784\n8785\n8786\n8787\n8788\n8789\n8790\n8791\n8792\n643\n8793\n8794\n8795\n8796\n8797\n8798\n8799\n8800\n8801\n8802\n8803\n8804\n8805\n8806\n8807\n8808\n8809\n8810\n8811\n8812\n8813\n8814\n8815\n644\n8816\n8817\n8818\n8819\n8820\n8821\n8822\n8823\n8824\n8825\n8826\n8827\n8828\n8829\n8830\n8831\n645\n646\n8832\n8833\n8834\n647\n8835\n8836\n8837\n8838\n8839\n8840\n8841\n8842\n8843\n8844\n8845\n8846\n8847\n8848\n8849\n648\n8850\n649\n8851\n8852\n8853\n8854\n8855\n8856\n8857\n8858\n8859\n8860\n8861\n8862\n8863\n8864\n8865\n8866\n8867\n8868\n8869\n8870\n8871\n8872\n8873\n8874\n8875\n8876\n8877\n8878\n8879\n8880\n8881\n8882\n8883\n8884\n8885\n8886\n8887\n8888\n650\n8889\n8890\n8891\n651\n8892\n8893\n8894\n8895\n8896\n8897\n8898\n8899\n652\n8900\n8901\n653\n8902\n8903\n8904\n8905\n8906\n8907\n8908\n8909\n8910\n8911\n8912\n8913\n8914\n8915\n8916\n8917\n8918\n8919\n8920\n8921\n8922\n8923\n8924\n8925\n654\n8926\n8927\n8928\n655\n8929\n656\n8930\n8931\n8932\n8933\n8934\n8935\n8936\n657\n8937\n8938\n8939\n658\n8940\n8941\n659\n8942\n8943\n8944\n8945\n660\n8946\n8947\n8948\n661\n662\n8949\n8950\n8951\n8952\n8953\n8954\n8955\n8956\n8957\n8958\n663\n8959\n8960\n8961\n8962\n664\n8963\n8964\n8965\n8966\n8967\n8968\n8969\n8970\n8971\n8972\n8973\n665\n8974\n8975\n8976\n8977\n8978\n8979\n8980\n8981\n8982\n666\n667\n8983\n8984\n668\n669\n8985\n8986\n8987\n8988\n8989\n8990\n8991\n8992\n8993\n8994\n8995\n8996\n670\n8997\n8998\n8999\n9000\n671\n672\n673\n9001\n674\n9002\n9003\n9004\n9005\n9006\n9007\n9008\n675\n9009\n9010\n9011\n9012\n9013\n9014\n9015\n9016\n9017\n9018\n9019\n9020\n9021\n9022\n9023\n9024\n9025\n9026\n9027\n9028\n9029\n9030\n9031\n9032\n9033\n9034\n9035\n9036\n9037\n9038\n9039\n9040\n9041\n9042\n9043\n676\n9044\n9045\n677\n9046\n9047\n9048\n9049\n9050\n9051\n9052\n9053\n9054\n9055\n9056\n9057\n9058\n9059\n9060\n9061\n9062\n9063\n9064\n9065\n9066\n9067\n9068\n9069\n9070\n9071\n678\n9072\n9073\n679\n9074\n9075\n9076\n9077\n9078\n9079\n9080\n9081\n9082\n9083\n9084\n9085\n9086\n9087\n9088\n9089\n9090\n9091\n9092\n9093\n9094\n9095\n9096\n9097\n9098\n9099\n9100\n9101\n9102\n9103\n9104\n9105\n9106\n9107\n9108\n9109\n9110\n9111\n9112\n9113\n9114\n9115\n9116\n9117\n9118\n9119\n9120\n680\n9121\n9122\n9123\n9124\n9125\n9126\n9127\n9128\n9129\n9130\n9131\n9132\n9133\n681\n9134\n9135\n9136\n9137\n9138\n9139\n9140\n9141\n9142\n9143\n9144\n9145\n9146\n9147\n9148\n682\n9149\n9150\n9151\n683\n9152\n9153\n9154\n9155\n9156\n9157\n684\n9158\n9159\n685\n9160\n9161\n9162\n9163\n9164\n9165\n9166\n9167\n9168\n9169\n9170\n9171\n686\n9172\n9173\n9174\n9175\n9176\n9177\n9178\n9179\n9180\n9181\n9182\n9183\n9184\n9185\n9186\n9187\n9188\n9189\n9190\n9191\n9192\n9193\n9194\n9195\n9196\n9197\n9198\n9199\n9200\n9201\n9202\n9203\n9204\n9205\n9206\n9207\n9208\n9209\n687\n9210\n9211\n9212\n9213\n9214\n9215\n9216\n9217\n9218\n9219\n9220\n9221\n9222\n9223\n9224\n9225\n9226\n9227\n688\n9228\n9229\n9230\n9231\n9232\n9233\n9234\n9235\n9236\n9237\n9238\n9239\n9240\n9241\n9242\n9243\n9244\n9245\n9246\n9247\n9248\n9249\n9250\n9251\n9252\n9253\n9254\n9255\n9256\n9257\n9258\n9259\n9260\n9261\n689\n9262\n9263\n690\n9264\n9265\n9266\n9267\n691\n9268\n9269\n9270\n9271\n9272\n9273\n9274\n9275\n9276\n9277\n692\n9278\n9279\n9280\n9281\n9282\n693\n9283\n9284\n9285\n9286\n694\n9287\n695\n9288\n9289\n9290\n9291\n9292\n696\n9293\n697\n9294\n698\n9295\n9296\n9297\n9298\n9299\n9300\n9301\n9302\n9303\n9304\n9305\n9306\n9307\n9308\n9309\n9310\n9311\n9312\n9313\n9314\n9315\n9316\n9317\n9318\n9319\n699\n9320\n9321\n9322\n9323\n9324\n9325\n9326\n9327\n9328\n9329\n9330\n9331\n9332\n9333\n9334\n9335\n9336\n9337\n9338\n700\n9339\n9340\n701\n702\n9341\n9342\n9343\n9344\n9345\n9346\n9347\n9348\n9349\n9350\n9351\n9352\n703\n704\n9353\n9354\n9355\n9356\n9357\n9358\n9359\n9360\n9361\n9362\n9363\n9364\n705\n9365\n9366\n9367\n9368\n9369\n9370\n9371\n9372\n9373\n9374\n9375\n9376\n9377\n9378\n9379\n9380\n706\n9381\n9382\n9383\n9384\n9385\n9386\n9387\n9388\n9389\n9390\n707\n9391\n9392\n9393\n9394\n9395\n9396\n9397\n708\n9398\n9399\n9400\n9401\n9402\n9403\n9404\n9405\n9406\n9407\n9408\n9409\n9410\n9411\n9412\n9413\n9414\n9415\n9416\n9417\n9418\n9419\n9420\n709\n710\n9421\n9422\n9423\n9424\n9425\n9426\n9427\n9428\n9429\n9430\n9431\n9432\n9433\n9434\n9435\n9436\n9437\n9438\n9439\n9440\n9441\n9442\n9443\n711\n9444\n9445\n9446\n9447\n9448\n9449\n9450\n9451\n9452\n9453\n9454\n9455\n9456\n9457\n9458\n712\n9459\n9460\n9461\n9462\n9463\n9464\n9465\n9466\n9467\n9468\n9469\n713\n9470\n9471\n9472\n9473\n9474\n9475\n9476\n9477\n9478\n9479\n9480\n9481\n9482\n9483\n714\n715\n9484\n716\n9485\n9486\n717\n9487\n9488\n9489\n9490\n9491\n9492\n718\n9493\n9494\n9495\n9496\n9497\n9498\n9499\n9500\n719\n9501\n9502\n9503\n720\n9504\n9505\n9506\n721\n9507\n9508\n9509\n9510\n9511\n9512\n9513\n9514\n9515\n9516\n9517\n9518\n9519\n9520\n9521\n9522\n9523\n9524\n9525\n9526\n722\n9527\n9528\n9529\n9530\n9531\n9532\n9533\n9534\n9535\n723\n9536\n9537\n9538\n9539\n9540\n9541\n9542\n9543\n9544\n9545\n724\n9546\n9547\n9548\n9549\n9550\n9551\n9552\n725\n9553\n9554\n9555\n9556\n9557\n9558\n9559\n9560\n9561\n9562\n9563\n9564\n9565\n9566\n9567\n726\n9568\n9569\n9570\n9571\n727\n9572\n9573\n9574\n9575\n9576\n9577\n9578\n9579\n9580\n728\n9581\n9582\n9583\n9584\n9585\n9586\n9587\n9588\n9589\n9590\n9591\n729\n9592\n9593\n9594\n9595\n9596\n9597\n9598\n9599\n9600\n9601\n9602\n9603\n9604\n9605\n9606\n9607\n9608\n9609\n9610\n9611\n9612\n9613\n730\n9614\n9615\n9616\n9617\n9618\n9619\n731\n9620\n9621\n9622\n9623\n9624\n9625\n9626\n9627\n9628\n9629\n9630\n9631\n9632\n9633\n9634\n9635\n9636\n9637\n9638\n9639\n9640\n9641\n9642\n732\n733\n9643\n9644\n9645\n9646\n734\n9647\n9648\n9649\n9650\n9651\n9652\n9653\n9654\n9655\n735\n9656\n9657\n9658\n9659\n9660\n9661\n9662\n9663\n736\n9664\n9665\n9666\n737\n9667\n9668\n9669\n9670\n9671\n9672\n9673\n9674\n9675\n9676\n9677\n9678\n9679\n9680\n9681\n9682\n9683\n9684\n9685\n9686\n9687\n9688\n9689\n9690\n9691\n9692\n9693\n9694\n9695\n9696\n9697\n9698\n9699\n9700\n9701\n738\n9702\n9703\n9704\n9705\n9706\n9707\n9708\n739\n9709\n9710\n9711\n9712\n9713\n9714\n9715\n9716\n9717\n9718\n9719\n9720\n9721\n9722\n740\n9723\n9724\n9725\n9726\n9727\n9728\n9729\n9730\n9731\n9732\n741\n9733\n9734\n9735\n9736\n9737\n9738\n9739\n9740\n9741\n9742\n9743\n9744\n9745\n9746\n9747\n9748\n9749\n9750\n9751\n9752\n9753\n9754\n9755\n9756\n9757\n9758\n742\n9759\n9760\n743\n9761\n9762\n9763\n9764\n9765\n9766\n9767\n9768\n744\n745\n9769\n9770\n9771\n9772\n9773\n9774\n9775\n9776\n9777\n9778\n9779\n9780\n9781\n9782\n9783\n9784\n9785\n9786\n9787\n9788\n9789\n9790\n9791\n9792\n9793\n9794\n9795\n9796\n9797\n746\n9798\n9799\n9800\n9801\n9802\n9803\n9804\n9805\n9806\n9807\n9808\n9809\n9810\n9811\n9812\n9813\n9814\n9815\n9816\n9817\n9818\n747\n9819\n9820\n9821\n9822\n9823\n9824\n9825\n9826\n748\n9827\n9828\n9829\n9830\n9831\n9832\n9833\n9834\n9835\n9836\n9837\n9838\n9839\n9840\n9841\n9842\n9843\n9844\n9845\n9846\n9847\n9848\n9849\n9850\n9851\n9852\n9853\n9854\n9855\n9856\n9857\n749\n750\n9858\n9859\n9860\n9861\n9862\n9863\n9864\n9865\n9866\n9867\n9868\n9869\n9870\n9871\n751\n9872\n9873\n9874\n9875\n9876\n9877\n9878\n9879\n752\n9880\n9881\n9882\n9883\n9884\n753\n9885\n9886\n9887\n754\n9888\n9889\n9890\n9891\n9892\n9893\n9894\n9895\n9896\n9897\n9898\n9899\n755\n9900\n9901\n9902\n9903\n9904\n9905\n9906\n9907\n9908\n9909\n9910\n9911\n9912\n9913\n9914\n9915\n9916\n756\n9917\n9918\n9919\n9920\n9921\n9922\n9923\n9924\n9925\n9926\n9927\n9928\n9929\n9930\n9931\n9932\n9933\n9934\n9935\n9936\n9937\n9938\n9939\n9940\n9941\n9942\n9943\n9944\n9945\n9946\n9947\n9948\n9949\n9950\n9951\n9952\n9953\n9954\n9955\n9956\n9957\n9958\n9959\n9960\n9961\n9962\n9963\n9964\n9965\n9966\n9967\n9968\n9969\n9970\n9971\n9972\n9973\n9974\n9975\n9976\n9977\n9978\n9979\n9980\n9981\n757\n9982\n9983\n9984\n9985\n9986\n9987\n9988\n9989\n9990\n9991\n9992\n758\n9993\n9994\n9995\n9996\n9997\n9998\n9999\n759\n10000\n10001\n10002\n10003\n10004\n10005\n760\n10006\n10007\n10008\n10009\n10010\n10011\n10012\n10013\n10014\n10015\n761\n10016\n10017\n10018\n10019\n10020\n10021\n10022\n10023\n10024\n10025\n10026\n10027\n10028\n10029\n10030\n10031\n10032\n10033\n10034\n10035\n10036\n10037\n10038\n762\n10039\n10040\n10041\n10042\n10043\n10044\n10045\n10046\n10047\n10048\n10049\n10050\n10051\n10052\n10053\n10054\n10055\n763\n10056\n10057\n10058\n10059\n10060\n10061\n10062\n10063\n10064\n10065\n10066\n10067\n10068\n10069\n10070\n10071\n10072\n10073\n764\n10074\n10075\n10076\n10077\n10078\n10079\n10080\n10081\n10082\n10083\n10084\n10085\n10086\n10087\n10088\n10089\n10090\n10091\n10092\n10093\n10094\n10095\n10096\n10097\n10098\n10099\n10100\n10101\n10102\n10103\n10104\n10105\n10106\n10107\n10108\n10109\n10110\n10111\n10112\n765\n10113\n10114\n10115\n10116\n10117\n10118\n10119\n10120\n10121\n10122\n10123\n10124\n10125\n10126\n10127\n10128\n10129\n10130\n10131\n10132\n10133\n10134\n10135\n10136\n10137\n10138\n10139\n10140\n10141\n10142\n10143\n10144\n10145\n10146\n10147\n10148\n10149\n10150\n10151\n10152\n10153\n10154\n10155\n10156\n766\n10157\n10158\n10159\n10160\n10161\n10162\n10163\n10164\n10165\n10166\n10167\n10168\n10169\n10170\n10171\n10172\n10173\n10174\n10175\n10176\n10177\n10178\n10179\n10180\n10181\n10182\n10183\n10184\n10185\n10186\n10187\n767\n10188\n10189\n10190\n10191\n768\n10192\n769\n10193\n10194\n10195\n10196\n10197\n10198\n770\n10199\n10200\n10201\n10202\n10203\n10204\n10205\n10206\n10207\n10208\n10209\n10210\n10211\n10212\n10213\n10214\n10215\n10216\n10217\n10218\n10219\n10220\n10221\n10222\n10223\n10224\n771\n10225\n10226\n10227\n10228\n10229\n10230\n10231\n10232\n10233\n10234\n10235\n10236\n772\n10237\n10238\n10239\n10240\n10241\n10242\n10243\n10244\n10245\n10246\n10247\n10248\n10249\n10250\n10251\n10252\n10253\n10254\n10255\n10256\n773\n10257\n10258\n10259\n10260\n10261\n10262\n10263\n10264\n774\n10265\n10266\n10267\n10268\n10269\n10270\n10271\n10272\n10273\n10274\n10275\n775\n10276\n10277\n10278\n10279\n10280\n10281\n10282\n10283\n10284\n10285\n10286\n10287\n10288\n10289\n10290\n10291\n10292\n10293\n10294\n10295\n776\n10296\n777\n10297\n10298\n778\n10299\n10300\n10301\n10302\n10303\n10304\n10305\n10306\n10307\n10308\n10309\n10310\n10311\n10312\n10313\n779\n10314\n10315\n780\n10316\n10317\n10318\n10319\n10320\n10321\n10322\n10323\n10324\n10325\n10326\n781\n10327\n10328\n10329\n10330\n10331\n10332\n10333\n10334\n782\n10335\n10336\n783\n10337\n10338\n784\n10339\n10340\n10341\n10342\n10343\n10344\n10345\n10346\n10347\n10348\n10349\n10350\n10351\n10352\n10353\n10354\n10355\n10356\n10357\n10358\n10359\n10360\n10361\n10362\n10363\n10364\n10365\n10366\n10367\n10368\n10369\n10370\n10371\n785\n10372\n10373\n10374\n10375\n10376\n10377\n10378\n10379\n10380\n10381\n10382\n10383\n10384\n10385\n10386\n10387\n10388\n10389\n10390\n10391\n10392\n10393\n10394\n10395\n10396\n10397\n10398\n10399\n10400\n10401\n10402\n10403\n10404\n10405\n10406\n10407\n10408\n10409\n10410\n10411\n10412\n10413\n10414\n10415\n10416\n10417\n10418\n10419\n10420\n10421\n10422\n10423\n10424\n10425\n10426\n10427\n10428\n10429\n10430\n10431\n10432\n10433\n10434\n10435\n10436\n10437\n10438\n786\n10439\n10440\n10441\n10442\n10443\n10444\n10445\n10446\n10447\n10448\n10449\n10450\n10451\n10452\n10453\n10454\n10455\n10456\n10457\n10458\n10459\n10460\n10461\n10462\n10463\n10464\n10465\n10466\n10467\n10468\n10469\n10470\n10471\n10472\n10473\n10474\n10475\n10476\n10477\n10478\n10479\n10480\n10481\n10482\n10483\n10484\n10485\n10486\n10487\n787\n10488\n10489\n10490\n10491\n10492\n10493\n10494\n10495\n10496\n10497\n10498\n10499\n10500\n10501\n10502\n10503\n10504\n10505\n10506\n10507\n10508\n10509\n10510\n10511\n10512\n10513\n10514\n788\n10515\n10516\n789\n10517\n10518\n10519\n10520\n10521\n10522\n790\n791\n10523\n10524\n10525\n10526\n10527\n10528\n10529\n10530\n10531\n10532\n10533\n10534\n10535\n10536\n10537\n10538\n792\n10539\n10540\n10541\n10542\n793\n794\n10543\n10544\n10545\n10546\n10547\n10548\n10549\n10550\n10551\n10552\n10553\n10554\n10555\n10556\n10557\n10558\n10559\n10560\n10561\n10562\n10563\n10564\n10565\n10566\n10567\n10568\n10569\n10570\n10571\n10572\n10573\n10574\n10575\n10576\n10577\n10578\n10579\n10580\n10581\n10582\n10583\n10584\n10585\n10586\n10587\n10588\n10589\n10590\n10591\n10592\n10593\n10594\n10595\n10596\n10597\n10598\n10599\n10600\n10601\n10602\n10603\n10604\n10605\n10606\n10607\n10608\n10609\n10610\n10611\n10612\n10613\n10614\n10615\n10616\n795\n10617\n10618\n10619\n10620\n10621\n10622\n10623\n10624\n10625\n796\n10626\n10627\n10628\n10629\n10630\n10631\n10632\n10633\n10634\n10635\n10636\n10637\n10638\n10639\n10640\n10641\n10642\n10643\n10644\n10645\n10646\n10647\n10648\n10649\n10650\n10651\n10652\n797\n10653\n10654\n10655\n10656\n10657\n10658\n10659\n10660\n10661\n10662\n10663\n798\n10664\n799\n10665\n10666\n10667\n10668\n10669\n10670\n10671\n10672\n10673\n10674\n10675\n10676\n10677\n10678\n10679\n10680\n10681\n10682\n10683\n10684\n10685\n800\n10686\n10687\n10688\n10689\n10690\n10691\n10692\n10693\n10694\n10695\n10696\n10697\n10698\n10699\n10700\n10701\n10702\n10703\n10704\n10705\n10706\n10707\n10708\n10709\n10710\n10711\n10712\n10713\n10714\n10715\n10716\n801\n10717\n10718\n802\n803\n10719\n10720\n10721\n10722\n10723\n10724\n10725\n10726\n804\n10727\n805\n806\n10728\n10729\n10730\n10731\n10732\n10733\n10734\n10735\n10736\n10737\n10738\n10739\n10740\n10741\n10742\n807\n10743\n10744\n10745\n10746\n10747\n10748\n10749\n808\n10750\n10751\n10752\n10753\n10754\n10755\n10756\n10757\n10758\n10759\n10760\n10761\n10762\n10763\n809\n10764\n10765\n10766\n10767\n10768\n10769\n10770\n10771\n810\n10772\n10773\n811\n10774\n10775\n812\n10776\n10777\n10778\n10779\n10780\n10781\n10782\n10783\n10784\n10785\n10786\n10787\n10788\n10789\n10790\n10791\n10792\n10793\n10794\n10795\n10796\n10797\n813\n10798\n10799\n10800\n10801\n10802\n10803\n10804\n10805\n10806\n10807\n10808\n10809\n10810\n814\n10811\n10812\n10813\n10814\n10815\n10816\n10817\n10818\n815\n10819\n10820\n816\n10821\n10822\n10823\n10824\n10825\n10826\n10827\n10828\n10829\n10830\n10831\n10832\n10833\n10834\n10835\n10836\n10837\n10838\n10839\n10840\n10841\n10842\n10843\n10844\n10845\n10846\n10847\n10848\n10849\n10850\n10851\n10852\n10853\n10854\n10855\n10856\n10857\n10858\n817\n10859\n10860\n10861\n10862\n10863\n818\n10864\n10865\n10866\n10867\n10868\n10869\n10870\n10871\n10872\n10873\n10874\n10875\n10876\n10877\n10878\n10879\n10880\n10881\n10882\n10883\n10884\n10885\n10886\n10887\n10888\n10889\n10890\n10891\n10892\n10893\n10894\n10895\n10896\n10897\n10898\n10899\n10900\n10901\n10902\n10903\n10904\n10905\n819\n10906\n10907\n10908\n10909\n10910\n10911\n10912\n10913\n10914\n10915\n10916\n10917\n10918\n10919\n10920\n10921\n10922\n10923\n10924\n10925\n10926\n10927\n10928\n10929\n10930\n10931\n10932\n10933\n10934\n10935\n10936\n10937\n10938\n10939\n10940\n10941\n10942\n10943\n10944\n10945\n10946\n10947\n10948\n10949\n10950\n10951\n10952\n10953\n10954\n820\n10955\n10956\n10957\n10958\n10959\n821\n822\n10960\n10961\n10962\n10963\n10964\n10965\n10966\n10967\n10968\n10969\n10970\n10971\n10972\n10973\n10974\n10975\n10976\n10977\n10978\n10979\n10980\n10981\n10982\n823\n10983\n10984\n10985\n10986\n10987\n10988\n10989\n10990\n10991\n10992\n10993\n10994\n10995\n10996\n10997\n10998\n10999\n11000\n11001\n11002\n11003\n11004\n11005\n11006\n11007\n11008\n11009\n11010\n11011\n824\n11012\n11013\n825\n11014\n11015\n11016\n11017\n11018\n11019\n11020\n826\n11021\n11022\n11023\n11024\n11025\n11026\n11027\n11028\n11029\n11030\n827\n11031\n11032\n11033\n11034\n11035\n11036\n11037\n11038\n11039\n11040\n11041\n828\n11042\n11043\n11044\n11045\n11046\n11047\n11048\n11049\n11050\n829\n11051\n11052\n830\n11053\n11054\n11055\n11056\n11057\n11058\n11059\n11060\n11061\n11062\n11063\n11064\n11065\n11066\n11067\n11068\n11069\n11070\n11071\n11072\n11073\n11074\n11075\n831\n11076\n11077\n11078\n11079\n11080\n832\n11081\n11082\n11083\n11084\n11085\n11086\n833\n11087\n11088\n11089\n11090\n11091\n11092\n11093\n11094\n11095\n11096\n11097\n11098\n11099\n11100\n834\n11101\n11102\n11103\n11104\n11105\n11106\n11107\n11108\n11109\n11110\n835\n11111\n11112\n11113\n836\n837\n11114\n11115\n11116\n11117\n838\n11118\n11119\n11120\n11121\n11122\n11123\n11124\n11125\n11126\n11127\n11128\n11129\n11130\n11131\n11132\n11133\n11134\n11135\n11136\n11137\n11138\n11139\n11140\n11141\n11142\n11143\n11144\n11145\n11146\n11147\n11148\n11149\n11150\n11151\n11152\n11153\n11154\n11155\n11156\n11157\n11158\n839\n11159\n11160\n11161\n840\n11162\n11163\n11164\n11165\n11166\n11167\n11168\n11169\n11170\n11171\n11172\n11173\n11174\n11175\n841\n11176\n11177\n11178\n11179\n842\n11180\n843\n11181\n844\n11182\n11183\n11184\n11185\n11186\n11187\n11188\n11189\n11190\n11191\n11192\n11193\n11194\n11195\n11196\n11197\n11198\n845\n11199\n11200\n11201\n11202\n11203\n11204\n11205\n11206\n11207\n846\n11208\n11209\n11210\n11211\n11212\n11213\n11214\n11215\n11216\n11217\n11218\n11219\n11220\n11221\n11222\n11223\n11224\n11225\n11226\n11227\n11228\n11229\n11230\n11231\n11232\n11233\n11234\n11235\n11236\n11237\n11238\n11239\n11240\n11241\n11242\n11243\n11244\n11245\n11246\n847\n11247\n11248\n11249\n11250\n11251\n11252\n11253\n11254\n11255\n11256\n11257\n11258\n11259\n11260\n848\n11261\n11262\n11263\n11264\n11265\n11266\n11267\n11268\n11269\n11270\n11271\n11272\n11273\n11274\n11275\n11276\n11277\n11278\n11279\n11280\n11281\n11282\n11283\n11284\n11285\n11286\n849\n11287\n11288\n11289\n11290\n11291\n11292\n11293\n11294\n11295\n11296\n11297\n11298\n11299\n11300\n11301\n11302\n11303\n11304\n11305\n11306\n11307\n11308\n11309\n11310\n11311\n11312\n11313\n11314\n11315\n11316\n851\n11317\n11318\n11319\n11320\n11321\n11322\n11323\n11324\n11325\n11326\n11327\n11328\n11329\n11330\n11331\n11332\n11333\n852\n11334\n11335\n11336\n11337\n11338\n11339\n11340\n11341\n11342\n11343\n11344\n11345\n11346\n11347\n11348\n11349\n11350\n11351\n11352\n11353\n11354\n11355\n11356\n11357\n11358\n11359\n11360\n11361\n11362\n11363\n11364\n11365\n11366\n11367\n853\n11368\n854\n11369\n11370\n11371\n11372\n11373\n11374\n11375\n11376\n11377\n11378\n11379\n11380\n11381\n11382\n11383\n11384\n11385\n11386\n855\n11387\n11388\n11389\n11390\n11391\n11392\n11393\n11394\n11395\n11396\n11397\n11398\n856\n11399\n11400\n11401\n857\n11402\n11403\n11404\n11405\n11406\n11407\n11408\n11409\n11410\n11411\n11412\n11413\n11414\n11415\n11416\n11417\n11418\n11419\n11420\n858\n11421\n11422\n11423\n11424\n11425\n11426\n11427\n11428\n11429\n11430\n11431\n11432\n11433\n11434\n11435\n11436\n11437\n11438\n11439\n11440\n11441\n11442\n11443\n11444\n11445\n11446\n859\n11447\n11448\n11449\n11450\n860\n11451\n11452\n11453\n11454\n11455\n11456\n11457\n11458\n11459\n11460\n11461\n11462\n11463\n11464\n11465\n11466\n11467\n11468\n11469\n861\n11470\n11471\n11472\n11473\n11474\n11475\n11476\n11477\n11478\n11479\n11480\n11481\n11482\n11483\n11484\n11485\n11486\n11487\n11488\n11489\n11490\n11491\n11492\n11493\n11494\n11495\n11496\n11497\n11498\n11499\n11500\n11501\n11502\n11503\n11504\n862\n11505\n11506\n11507\n11508\n11509\n11510\n11511\n11512\n11513\n863\n11514\n11515\n11516\n11517\n11518\n11519\n11520\n11521\n11522\n11523\n11524\n864\n11525\n11526\n865\n11527\n11528\n11529\n11530\n11531\n11532\n11533\n11534\n866\n11535\n11536\n11537\n11538\n11539\n867\n11540\n11541\n11542\n11543\n11544\n11545\n11546\n11547\n11548\n11549\n11550\n11551\n11552\n11553\n11554\n11555\n11556\n11557\n11558\n11559\n11560\n11561\n11562\n11563\n11564\n11565\n11566\n11567\n11568\n868\n11569\n11570\n11571\n11572\n11573\n11574\n11575\n11576\n11577\n11578\n11579\n869\n11580\n11581\n11582\n11583\n11584\n11585\n11586\n11587\n11588\n11589\n11590\n11591\n11592\n11593\n11594\n11595\n870\n11596\n11597\n871\n11598\n11599\n11600\n872\n11601\n11602\n11603\n11604\n873\n11605\n11606\n11607\n11608\n874\n875\n11609\n11610\n11611\n11612\n11613\n11614\n11615\n11616\n11617\n11618\n11619\n11620\n11621\n11622\n11623\n11624\n11625\n11626\n11627\n11628\n11629\n11630\n876\n11631\n11632\n11633\n11634\n11635\n11636\n11637\n11638\n11639\n11640\n11641\n11642\n11643\n11644\n11645\n11646\n11647\n11648\n11649\n11650\n11651\n11652\n11653\n11654\n11655\n11656\n11657\n11658\n877\n11659\n11660\n11661\n11662\n11663\n11664\n11665\n11666\n11667\n11668\n11669\n11670\n11671\n11672\n11673\n11674\n11675\n11676\n11677\n11678\n11679\n878\n11680\n11681\n11682\n11683\n11684\n11685\n11686\n879\n11687\n11688\n11689\n11690\n11691\n11692\n11693\n11694\n880\n11695\n11696\n11697\n11698\n11699\n11700\n11701\n11702\n881\n11703\n11704\n11705\n11706\n11707\n11708\n11709\n11710\n11711\n882\n11712\n11713\n11714\n11715\n11716\n11717\n11718\n11719\n11720\n11721\n11722\n11723\n11724\n11725\n11726\n11727\n883\n884\n11728\n11729\n11730\n11731\n11732\n11733\n885\n11734\n886\n11735\n11736\n11737\n11738\n11739\n11740\n11741\n11742\n11743\n11744\n11745\n11746\n11747\n11748\n11749\n11750\n11751\n11752\n887\n11753\n11754\n888\n11755\n11756\n11757\n11758\n11759\n11760\n11761\n11762\n11763\n11764\n11765\n11766\n11767\n11768\n11769\n11770\n11771\n11772\n11773\n11774\n889\n11775\n11776\n11777\n11778\n11779\n11780\n11781\n11782\n11783\n890\n11784\n11785\n11786\n11787\n11788\n11789\n11790\n11791\n11792\n11793\n11794\n11795\n11796\n11797\n891\n11798\n11799\n11800\n11801\n11802\n11803\n11804\n11805\n11806\n11807\n11808\n11809\n11810\n11811\n11812\n11813\n11814\n11815\n11816\n892\n893\n11817\n11818\n11819\n11820\n11821\n894\n11822\n11823\n11824\n11825\n895\n11826\n11827\n11828\n11829\n896\n11830\n11831\n897\n11832\n11833\n11834\n11835\n11836\n11837\n11838\n11839\n11840\n11841\n11842\n11843\n11844\n898\n11845\n11846\n11847\n11848\n11849\n11850\n11851\n11852\n11853\n11854\n11855\n11856\n11857\n11858\n11859\n11860\n899\n11861\n11862\n11863\n11864\n11865\n11866\n11867\n11868\n11869\n11870\n900\n11871\n11872\n11873\n11874\n11875\n11876\n11877\n11878\n11879\n11880\n11881\n11882\n11883\n11884\n11885\n11886\n11887\n11888\n11889\n11890\n11891\n11892\n11893\n11894\n11895\n11896\n11897\n11898\n11899\n11900\n11901\n11902\n11903\n11904\n11905\n11906\n11907\n11908\n11909\n11910\n11911\n11912\n11913\n11914\n11915\n11916\n11917\n11918\n11919\n11920\n11921\n11922\n11923\n11924\n11925\n11926\n11927\n11928\n11929\n11930\n11931\n11932\n11933\n11934\n11935\n901\n11936\n902\n11937\n11938\n11939\n11940\n11941\n11942\n11943\n11944\n11945\n11946\n11947\n11948\n11949\n11950\n11951\n11952\n903\n11953\n11954\n11955\n11956\n11957\n11958\n11959\n11960\n11961\n11962\n11963\n11964\n11965\n11966\n11967\n11968\n11969\n11970\n11971\n904\n11972\n905\n11973\n11974\n11975\n11976\n11977\n906\n11978\n11979\n11980\n11981\n907\n11982\n11983\n11984\n11985\n11986\n11987\n908\n11988\n11989\n11990\n11991\n11992\n11993\n11994\n11995\n11996\n11997\n11998\n11999\n12000\n12001\n12002\n12003\n12004\n12005\n12006\n12007\n909\n12008\n12009\n12010\n12011\n12012\n910\n12013\n12014\n12015\n12016\n12017\n12018\n911\n12019\n12020\n12021\n12022\n12023\n12024\n12025\n12026\n12027\n12028\n12029\n12030\n12031\n912\n12032\n12033\n12034\n12035\n12036\n12037\n12038\n913\n12039\n12040\n12041\n12042\n12043\n12044\n12045\n12046\n12047\n12048\n12049\n12050\n12051\n12052\n12053\n12054\n12055\n12056\n12057\n12058\n12059\n12060\n12061\n12062\n12063\n12064\n12065\n914\n12066\n12067\n12068\n915\n12069\n12070\n12071\n12072\n12073\n12074\n12075\n12076\n12077\n12078\n12079\n12080\n12081\n12082\n12083\n12084\n12085\n12086\n12087\n12088\n12089\n12090\n12091\n12092\n12093\n12094\n12095\n12096\n12097\n12098\n12099\n12100\n12101\n12102\n12103\n12104\n12105\n12106\n12107\n12108\n12109\n12110\n12111\n12112\n12113\n12114\n12115\n12116\n12117\n12118\n12119\n12120\n12121\n12122\n12123\n12124\n12125\n12126\n12127\n12128\n12129\n12130\n12131\n12132\n12133\n12134\n12135\n12136\n12137\n12138\n12139\n12140\n12141\n12142\n12143\n12144\n12145\n12146\n12147\n12148\n12149\n12150\n12151\n12152\n12153\n12154\n12155\n12156\n12157\n12158\n12159\n12160\n12161\n12162\n12163\n12164\n12165\n12166\n12167\n12168\n12169\n12170\n12171\n12172\n12173\n12174\n12175\n12176\n12177\n12178\n12179\n12180\n12181\n12182\n12183\n12184\n12185\n12186\n12187\n12188\n12189\n12190\n12191\n12192\n12193\n12194\n12195\n12196\n12197\n12198\n12199\n12200\n12201\n12202\n12203\n12204\n12205\n12206\n12207\n12208\n12209\n12210\n12211\n12212\n12213\n12214\n12215\n12216\n12217\n12218\n12219\n12220\n12221\n12222\n12223\n12224\n12225\n12226\n12227\n12228\n12229\n12230\n12231\n12232\n12233\n12234\n12235\n12236\n12237\n12238\n12239\n12240\n12241\n12242\n12243\n12244\n12245\n12246\n12247\n12248\n12249\n12250\n12251\n12252\n12253\n12254\n12255\n12256\n12257\n12258\n12259\n12260\n12261\n12262\n12263\n12264\n12265\n12266\n12267\n12268\n12269\n12270\n12271\n12272\n12273\n12274\n12275\n12276\n12277\n12278\n12279\n12280\n12281\n12282\n12283\n12284\n12285\n12286\n12287\n12288\n12289\n12290\n12291\n12292\n12293\n12294\n12295\n12296\n12297\n916\n12298\n12299\n12300\n12301\n12302\n12303\n12304\n12305\n12306\n12307\n12308\n12309\n12310\n12311\n917\n12312\n12313\n12314\n12315\n12316\n12317\n12318\n12319\n12320\n918\n12321\n919\n920\n12322\n12323\n12324\n12325\n921\n12326\n12327\n12328\n12329\n12330\n12331\n12332\n12333\n12334\n12335\n12336\n12337\n12338\n12339\n12340\n12341\n12342\n12343\n12344\n12345\n12346\n12347\n12348\n12349\n12350\n12351\n12352\n12353\n12354\n12355\n12356\n12357\n12358\n12359\n12360\n922\n12361\n12362\n12363\n12364\n12365\n12366\n12367\n12368\n12369\n12370\n12371\n12372\n12373\n12374\n12375\n12376\n12377\n12378\n12379\n12380\n12381\n12382\n12383\n12384\n12385\n12386\n12387\n12388\n12389\n12390\n12391\n12392\n12393\n12394\n12395\n12396\n12397\n12398\n12399\n12400\n12401\n12402\n12403\n12404\n12405\n12406\n12407\n12408\n12409\n12410\n12411\n12412\n12413\n12414\n12415\n12416\n12417\n12418\n12419\n12420\n12421\n12422\n12423\n12424\n12425\n923\n12426\n12427\n12428\n12429\n12430\n12431\n12432\n12433\n12434\n12435\n12436\n12437\n12438\n12439\n12440\n12441\n12442\n12443\n12444\n924\n12445\n12446\n12447\n925\n12448\n12449\n12450\n12451\n12452\n12453\n12454\n12455\n12456\n12457\n12458\n12459\n12460\n12461\n12462\n12463\n12464\n12465\n12466\n12467\n12468\n12469\n12470\n12471\n12472\n12473\n12474\n12475\n12476\n12477\n12478\n12479\n12480\n12481\n12482\n12483\n12484\n12485\n12486\n12487\n12488\n12489\n12490\n12491\n12492\n12493\n12494\n12495\n926\n12496\n12497\n12498\n12499\n12500\n12501\n12502\n12503\n12504\n12505\n12506\n12507\n12508\n12509\n12510\n12511\n12512\n12513\n12514\n12515\n12516\n12517\n12518\n12519\n12520\n12521\n12522\n12523\n12524\n12525\n12526\n12527\n12528\n12529\n12530\n12531\n12532\n12533\n12534\n12535\n12536\n12537\n12538\n12539\n12540\n12541\n12542\n12543\n12544\n12545\n12546\n12547\n12548\n12549\n12550\n12551\n12552\n12553\n12554\n12555\n12556\n12557\n12558\n12559\n12560\n12561\n12562\n12563\n12564\n12565\n12566\n12567\n12568\n12569\n12570\n12571\n12572\n12573\n12574\n12575\n12576\n12577\n12578\n12579\n12580\n12581\n12582\n12583\n12584\n12585\n12586\n12587\n12588\n12589\n12590\n12591\n12592\n12593\n12594\n12595\n12596\n12597\n12598\n12599\n12600\n12601\n12602\n12603\n12604\n12605\n12606\n12607\n12608\n12609\n12610\n12611\n12612\n12613\n12614\n12615\n12616\n12617\n12618\n12619\n12620\n12621\n12622\n12623\n12624\n12625\n12626\n12627\n12628\n12629\n12630\n12631\n12632\n927\n12633\n12634\n12635\n12636\n12637\n928\n12638\n12639\n12640\n12641\n12642\n12643\n12644\n12645\n929\n12646\n12647\n12648\n12649\n12650\n12651\n12652\n12653\n12654\n12655\n12656\n12657\n12658\n12659\n12660\n12661\n12662\n12663\n12664\n12665\n12666\n12667\n12668\n12669\n12670\n12671\n12672\n12673\n12674\n12675\n12676\n12677\n12678\n12679\n12680\n12681\n12682\n12683\n12684\n12685\n12686\n12687\n12688\n12689\n12690\n12691\n12692\n12693\n12694\n12695\n12696\n12697\n12698\n12699\n12700\n12701\n12702\n12703\n12704\n12705\n12706\n12707\n12708\n12709\n12710\n12711\n12712\n12713\n12714\n12715\n12716\n12717\n12718\n12719\n12720\n12721\n12722\n12723\n12724\n12725\n12726\n12727\n12728\n12729\n12730\n12731\n12732\n12733\n12734\n12735\n12736\n12737\n12738\n12739\n12740\n12741\n12742\n12743\n12744\n12745\n12746\n12747\n12748\n12749\n12750\n12751\n12752\n12753\n12754\n12755\n12756\n12757\n930\n12758\n12759\n12760\n12761\n12762\n12763\n12764\n12765\n12766\n12767\n12768\n12769\n12770\n12771\n12772\n12773\n12774\n12775\n12776\n12777\n12778\n12779\n12780\n12781\n12782\n12783\n12784\n12785\n12786\n12787\n12788\n12789\n12790\n12791\n12792\n12793\n12794\n12795\n12796\n12797\n12798\n12799\n12800\n12801\n12802\n12803\n12804\n12805\n12806\n12807\n12808\n12809\n12810\n12811\n12812\n12813\n12814\n12815\n12816\n12817\n12818\n12819\n12820\n12821\n12822\n12823\n931\n12824\n12825\n12826\n12827\n12828\n12829\n12830\n12831\n12832\n12833\n12834\n12835\n932\n12836\n12837\n12838\n12839\n12840\n12841\n12842\n12843\n12844\n933\n12845\n934\n12846\n12847\n12848\n12849\n12850\n12851\n12852\n12853\n12854\n12855\n12856\n12857\n12858\n12859\n12860\n12861\n12862\n12863\n12864\n12865\n12866\n12867\n12868\n12869\n12870\n12871\n12872\n12873\n12874\n12875\n12876\n935\n12877\n12878\n12879\n12880\n12881\n12882\n12883\n12884\n12885\n12886\n12887\n12888\n12889\n12890\n12891\n12892\n12893\n12894\n12895\n936\n12896\n12897\n937\n938\n12898\n12899\n12900\n12901\n12902\n12903\n939\n12904\n12905\n12906\n940\n12907\n941\n942\n12908\n12909\n12910\n12911\n12912\n943\n12913\n944\n12914\n12915\n12916\n12917\n12918\n12919\n12920\n12921\n12922\n12923\n12924\n12925\n12926\n12927\n945\n12928\n12929\n12930\n12931\n12932\n12933\n12934\n12935\n12936\n12937\n12938\n12939\n12940\n12941\n12942\n12943\n12944\n12945\n12946\n12947\n12948\n12949\n12950\n12951\n12952\n12953\n12954\n12955\n12956\n12957\n12958\n12959\n12960\n12961\n12962\n12963\n12964\n12965\n12966\n12967\n12968\n12969\n12970\n12971\n12972\n12973\n12974\n12975\n12976\n12977\n12978\n12979\n12980\n12981\n12982\n12983\n12984\n12985\n12986\n12987\n12988\n12989\n12990\n12991\n946\n12992\n12993\n12994\n12995\n12996\n12997\n12998\n12999\n13000\n13001\n13002\n13003\n13004\n13005\n13006\n13007\n13008\n13009\n13010\n13011\n13012\n13013\n13014\n13015\n13016\n13017\n13018\n13019\n13020\n13021\n13022\n13023\n947\n13024\n13025\n13026\n13027\n13028\n13029\n13030\n13031\n13032\n13033\n13034\n13035\n13036\n13037\n13038\n13039\n13040\n13041\n13042\n13043\n13044\n13045\n13046\n13047\n13048\n13049\n13050\n13051\n13052\n13053\n13054\n13055\n13056\n13057\n13058\n13059\n13060\n13061\n13062\n13063\n13064\n13065\n13066\n13067\n13068\n948\n13069\n13070\n13071\n13072\n13073\n13074\n13075\n13076\n13077\n13078\n13079\n13080\n13081\n13082\n13083\n13084\n13085\n13086\n13087\n13088\n949\n13089\n13090\n13091\n13092\n13093\n13094\n13095\n950\n13096\n13097\n13098\n13099\n13100\n13101\n13102\n13103\n13104\n13105\n13106\n13107\n951\n13108\n13109\n13110\n13111\n13112\n13113\n13114\n13115\n13116\n13117\n13118\n13119\n13120\n13121\n13122\n13123\n13124\n13125\n13126\n13127\n13128\n13129\n13130\n13131\n13132\n952\n953\n13133\n954\n13134\n13135\n13136\n13137\n13138\n955\n13139\n13140\n13141\n13142\n13143\n13144\n13145\n13146\n13147\n13148\n13149\n13150\n13151\n13152\n13153\n13154\n13155\n13156\n13157\n13158\n13159\n13160\n13161\n13162\n13163\n13164\n13165\n13166\n13167\n13168\n13169\n13170\n13171\n13172\n13173\n13174\n13175\n13176\n13177\n956\n13178\n13179\n13180\n13181\n13182\n13183\n13184\n13185\n13186\n13187\n13188\n13189\n13190\n13191\n13192\n13193\n13194\n13195\n13196\n13197\n13198\n13199\n13200\n13201\n13202\n13203\n13204\n13205\n13206\n13207\n13208\n13209\n13210\n13211\n13212\n13213\n13214\n13215\n13216\n13217\n13218\n13219\n13220\n13221\n13222\n13223\n13224\n957\n13225\n13226\n13227\n13228\n13229\n13230\n13231\n13232\n13233\n13234\n13235\n13236\n13237\n13238\n13239\n13240\n13241\n13242\n13243\n13244\n13245\n13246\n13247\n13248\n13249\n13250\n13251\n13252\n13253\n13254\n13255\n13256\n13257\n13258\n13259\n13260\n13261\n13262\n13263\n13264\n13265\n13266\n13267\n13268\n13269\n13270\n13271\n958\n13272\n13273\n13274\n13275\n13276\n13277\n13278\n13279\n13280\n13281\n13282\n13283\n13284\n13285\n13286\n13287\n13288\n13289\n13290\n13291\n13292\n13293\n13294\n13295\n13296\n13297\n13298\n13299\n13300\n13301\n13302\n13303\n13304\n13305\n13306\n13307\n13308\n13309\n13310\n13311\n13312\n13313\n13314\n13315\n13316\n13317\n13318\n13319\n13320\n13321\n13322\n13323\n13324\n13325\n13326\n13327\n13328\n13329\n13330\n13331\n13332\n13333\n13334\n13335\n13336\n13337\n13338\n13339\n13340\n13341\n13342\n13343\n13344\n13345\n13346\n13347\n13348\n13349\n13350\n13351\n13352\n13353\n13354\n13355\n13356\n13357\n13358\n13359\n13360\n13361\n13362\n13363\n13364\n13365\n13366\n13367\n13368\n13369\n13370\n13371\n13372\n13373\n13374\n13375\n13376\n13377\n13378\n13379\n13380\n13381\n13382\n13383\n13384\n13385\n13386\n13387\n13388\n13389\n13390\n13391\n13392\n13393\n13394\n13395\n13396\n13397\n13398\n13399\n13400\n13401\n13402\n13403\n13404\n13405\n13406\n13407\n13408\n13409\n13410\n13411\n13412\n13413\n13414\n13415\n13416\n13417\n13418\n13419\n13420\n13421\n13422\n13423\n13424\n13425\n13426\n13427\n13428\n13429\n13430\n13431\n13432\n13433\n13434\n13435\n13436\n13437\n13438\n13439\n13440\n13441\n13442\n13443\n13444\n13445\n13446\n959\n13447\n13448\n13449\n13450\n13451\n13452\n13453\n13454\n13455\n13456\n13457\n13458\n13459\n13460\n13461\n13462\n13463\n13464\n13465\n13466\n13467\n13468\n13469\n13470\n13471\n13472\n13473\n13474\n13475\n13476\n13477\n13478\n13479\n13480\n13481\n13482\n13483\n960\n13484\n13485\n13486\n13487\n13488\n13489\n13490\n13491\n13492\n13493\n13494\n13495\n13496\n13497\n13498\n13499\n13500\n13501\n13502\n13503\n13504\n13505\n13506\n13507\n13508\n13509\n13510\n13511\n13512\n13513\n13514\n13515\n13516\n13517\n13518\n13519\n13520\n13521\n13522\n13523\n13524\n13525\n13526\n13527\n13528\n13529\n13530\n13531\n13532\n13533\n13534\n13535\n13536\n13537\n13538\n13539\n13540\n13541\n13542\n13543\n13544\n13545\n13546\n13547\n13548\n13549\n13550\n13551\n13552\n13553\n13554\n13555\n13556\n13557\n13558\n13559\n13560\n13561\n13562\n13563\n13564\n13565\n13566\n13567\n13568\n13569\n13570\n13571\n13572\n13573\n13574\n13575\n13576\n13577\n13578\n13579\n13580\n13581\n13582\n13583\n13584\n13585\n13586\n13587\n13588\n13589\n13590\n13591\n13592\n13593\n13594\n13595\n13596\n13597\n13598\n13599\n13600\n13601\n13602\n13603\n13604\n13605\n13606\n13607\n13608\n13609\n13610\n13611\n13612\n13613\n13614\n13615\n13616\n13617\n13618\n13619\n13620\n13621\n13622\n13623\n13624\n13625\n13626\n13627\n13628\n13629\n13630\n13631\n13632\n13633\n13634\n13635\n13636\n13637\n13638\n13639\n13640\n13641\n13642\n13643\n13644\n13645\n13646\n13647\n13648\n13649\n13650\n13651\n13652\n961\n13653\n13654\n13655\n13656\n13657\n13658\n13659\n13660\n13661\n13662\n13663\n13664\n13665\n13666\n13667\n13668\n13669\n13670\n13671\n13672\n13673\n13674\n13675\n13676\n13677\n13678\n13679\n13680\n13681\n13682\n13683\n13684\n13685\n13686\n13687\n13688\n13689\n13690\n13691\n13692\n13693\n13694\n13695\n13696\n13697\n13698\n13699\n13700\n13701\n13702\n13703\n13704\n13705\n13706\n13707\n13708\n13709\n13710\n13711\n13712\n13713\n13714\n13715\n13716\n13717\n13718\n13719\n13720\n13721\n13722\n13723\n13724\n13725\n13726\n13727\n962\n13728\n13729\n13730\n13731\n13732\n13733\n13734\n963\n13735\n13736\n13737\n13738\n13739\n13740\n13741\n13742\n13743\n13744\n964\n13745\n13746\n13747\n13748\n13749\n13750\n13751\n13752\n13753\n13754\n13755\n13756\n13757\n13758\n13759\n13760\n13761\n13762\n13763\n13764\n13765\n13766\n13767\n13768\n13769\n13770\n13771\n13772\n13773\n13774\n13775\n13776\n13777\n13778\n13779\n13780\n13781\n13782\n13783\n965\n13784\n13785\n13786\n13787\n13788\n13789\n13790\n13791\n13792\n13793\n13794\n13795\n13796\n13797\n13798\n13799\n13800\n13801\n13802\n13803\n13804\n13805\n13806\n13807\n13808\n13809\n13810\n13811\n13812\n13813\n13814\n13815\n13816\n13817\n13818\n13819\n13820\n13821\n13822\n13823\n13824\n13825\n13826\n13827\n13828\n13829\n13830\n13831\n13832\n13833\n13834\n13835\n13836\n13837\n966\n13838\n13839\n13840\n13841\n13842\n13843\n13844\n13845\n13846\n13847\n13848\n13849\n13850\n13851\n13852\n13853\n13854\n13855\n13856\n13857\n13858\n13859\n13860\n13861\n13862\n13863\n13864\n13865\n13866\n13867\n13868\n13869\n13870\n13871\n13872\n13873\n13874\n13875\n13876\n13877\n13878\n13879\n13880\n13881\n13882\n13883\n13884\n13885\n13886\n13887\n13888\n13889\n13890\n13891\n13892\n13893\n13894\n13895\n13896\n13897\n13898\n13899\n13900\n13901\n13902\n13903\n13904\n13905\n13906\n13907\n13908\n13909\n13910\n13911\n13912\n13913\n13914\n13915\n13916\n13917\n13918\n13919\n13920\n13921\n13922\n13923\n13924\n13925\n13926\n13927\n13928\n13929\n13930\n13931\n13932\n13933\n13934\n13935\n13936\n13937\n13938\n13939\n13940\n13941\n13942\n13943\n13944\n13945\n13946\n13947\n13948\n13949\n13950\n13951\n13952\n13953\n13954\n13955\n13956\n13957\n13958\n13959\n13960\n13961\n13962\n13963\n13964\n13965\n13966\n13967\n13968\n13969\n13970\n13971\n13972\n13973\n13974\n13975\n13976\n13977\n13978\n13979\n13980\n13981\n13982\n13983\n13984\n13985\n13986\n13987\n13988\n13989\n13990\n13991\n13992\n13993\n13994\n13995\n13996\n13997\n13998\n13999\n14000\n14001\n14002\n14003\n14004\n14005\n14006\n14007\n14008\n14009\n14010\n14011\n14012\n14013\n14014\n967\n14015\n14016\n14017\n14018\n14019\n14020\n14021\n14022\n14023\n14024\n14025\n14026\n14027\n14028\n14029\n14030\n14031\n14032\n14033\n14034\n14035\n14036\n14037\n14038\n14039\n14040\n14041\n14042\n14043\n14044\n14045\n14046\n14047\n14048\n14049\n14050\n14051\n14052\n14053\n14054\n14055\n14056\n14057\n14058\n14059\n14060\n14061\n14062\n14063\n14064\n14065\n14066\n14067\n14068\n14069\n14070\n14071\n14072\n14073\n14074\n14075\n14076\n14077\n14078\n14079\n14080\n14081\n14082\n968\n14083\n14084\n14085\n14086\n14087\n14088\n14089\n969\n14090\n14091\n14092\n14093\n14094\n14095\n14096\n14097\n14098\n14099\n14100\n14101\n14102\n14103\n14104\n14105\n14106\n14107\n14108\n14109\n14110\n14111\n14112\n14113\n14114\n14115\n14116\n14117\n14118\n14119\n14120\n14121\n14122\n14123\n14124\n14125\n14126\n14127\n14128\n14129\n14130\n14131\n14132\n14133\n14134\n14135\n14136\n14137\n14138\n14139\n14140\n14141\n14142\n14143\n14144\n14145\n14146\n14147\n14148\n14149\n14150\n14151\n14152\n14153\n14154\n14155\n14156\n14157\n14158\n14159\n14160\n14161\n14162\n14163\n14164\n14165\n14166\n14167\n14168\n14169\n14170\n14171\n14172\n14173\n14174\n14175\n14176\n14177\n14178\n14179\n14180\n14181\n14182\n14183\n14184\n14185\n14186\n14187\n14188\n14189\n14190\n14191\n14192\n14193\n14194\n14195\n14196\n14197\n14198\n14199\n14200\n14201\n14202\n14203\n14204\n14205\n14206\n14207\n14208\n14209\n14210\n14211\n14212\n14213\n14214\n14215\n14216\n14217\n14218\n14219\n14220\n14221\n14222\n14223\n14224\n14225\n14226\n14227\n14228\n14229\n14230\n14231\n14232\n14233\n14234\n14235\n14236\n14237\n14238\n14239\n14240\n14241\n14242\n14243\n14244\n14245\n14246\n14247\n14248\n970\n14249\n14250\n14251\n14252\n14253\n14254\n14255\n14256\n14257\n14258\n14259\n14260\n14261\n14262\n14263\n14264\n14265\n14266\n14267\n14268\n14269\n14270\n14271\n14272\n14273\n14274\n971\n14275\n14276\n14277\n14278\n14279\n14280\n14281\n14282\n14283\n972\n14284\n14285\n14286\n14287\n14288\n14289\n14290\n973\n14291\n14292\n14293\n14294\n14295\n14296\n14297\n14298\n14299\n14300\n14301\n14302\n14303\n14304\n14305\n14306\n14307\n14308\n14309\n14310\n14311\n14312\n14313\n14314\n14315\n14316\n14317\n14318\n14319\n974\n14320\n14321\n14322\n14323\n14324\n14325\n14326\n14327\n14328\n14329\n14330\n14331\n14332\n14333\n14334\n14335\n14336\n14337\n14338\n14339\n14340\n14341\n14342\n14343\n14344\n14345\n14346\n14347\n14348\n975\n14349\n14350\n14351\n14352\n14353\n14354\n14355\n14356\n14357\n14358\n14359\n14360\n14361\n14362\n14363\n14364\n14365\n14366\n14367\n14368\n14369\n14370\n14371\n14372\n14373\n14374\n14375\n14376\n14377\n14378\n14379\n14380\n14381\n14382\n14383\n14384\n14385\n14386\n14387\n14388\n14389\n14390\n976\n14391\n14392\n14393\n14394\n14395\n14396\n14397\n14398\n14399\n14400\n14401\n14402\n14403\n14404\n14405\n14406\n14407\n14408\n14409\n14410\n14411\n977\n14412\n14413\n14414\n14415\n978\n14416\n14417\n14418\n14419\n14420\n14421\n14422\n14423\n14424\n14425\n14426\n14427\n14428\n14429\n14430\n14431\n14432\n14433\n14434\n14435\n14436\n14437\n14438\n14439\n14440\n14441\n14442\n14443\n14444\n14445\n14446\n14447\n14448\n14449\n14450\n14451\n14452\n14453\n14454\n14455\n14456\n14457\n14458\n979\n14459\n14460\n14461\n980\n14462\n14463\n14464\n14465\n14466\n14467\n14468\n14469\n14470\n14471\n14472\n14473\n14474\n14475\n14476\n14477\n14478\n14479\n14480\n14481\n14482\n14483\n14484\n14485\n14486\n14487\n14488\n14489\n14490\n14491\n14492\n14493\n14494\n14495\n14496\n14497\n14498\n14499\n14500\n14501\n14502\n14503\n14504\n14505\n14506\n14507\n14508\n14509\n14510\n14511\n14512\n14513\n14514\n14515\n14516\n14517\n14518\n14519\n14520\n14521\n14522\n14523\n14524\n14525\n14526\n14527\n14528\n14529\n14530\n14531\n14532\n14533\n14534\n14535\n14536\n14537\n14538\n14539\n14540\n14541\n14542\n14543\n14544\n14545\n14546\n14547\n14548\n14549\n14550\n14551\n14552\n14553\n14554\n14555\n14556\n14557\n14558\n14559\n14560\n14561\n14562\n14563\n14564\n14565\n14566\n14567\n14568\n14569\n14570\n14571\n14572\n14573\n14574\n14575\n14576\n14577\n14578\n14579\n14580\n14581\n14582\n14583\n14584\n14585\n14586\n14587\n14588\n14589\n14590\n14591\n14592\n14593\n14594\n14595\n14596\n14597\n14598\n14599\n14600\n14601\n14602\n14603\n14604\n14605\n14606\n14607\n14608\n14609\n14610\n14611\n14612\n14613\n14614\n14615\n14616\n14617\n14618\n14619\n14620\n14621\n14622\n14623\n14624\n14625\n14626\n14627\n14628\n14629\n14630\n14631\n14632\n14633\n14634\n14635\n14636\n14637\n14638\n14639\n14640\n14641\n14642\n14643\n14644\n14645\n14646\n14647\n14648\n14649\n14650\n14651\n14652\n14653\n14654\n14655\n14656\n14657\n14658\n14659\n14660\n14661\n14662\n14663\n14664\n14665\n14666\n14667\n14668\n14669\n14670\n14671\n14672\n14673\n14674\n14675\n14676\n14677\n14678\n14679\n14680\n14681\n14682\n14683\n14684\n14685\n14686\n14687\n14688\n14689\n14690\n14691\n14692\n14693\n14694\n14695\n14696\n14697\n14698\n14699\n14700\n14701\n14702\n14703\n14704\n14705\n14706\n14707\n14708\n14709\n14710\n14711\n14712\n14713\n14714\n14715\n14716\n14717\n14718\n14719\n14720\n14721\n14722\n14723\n14724\n14725\n14726\n14727\n14728\n14729\n14730\n14731\n14732\n14733\n14734\n14735\n14736\n14737\n14738\n14739\n14740\n14741\n14742\n14743\n14744\n14745\n14746\n14747\n14748\n14749\n14750\n14751\n14752\n14753\n14754\n14755\n14756\n14757\n14758\n14759\n14760\n14761\n14762\n14763\n14764\n14765\n14766\n14767\n14768\n14769\n14770\n14771\n14772\n14773\n14774\n14775\n14776\n14777\n14778\n14779\n14780\n14781\n14782\n14783\n14784\n14785\n14786\n14787\n14788\n14789\n14790\n14791\n14792\n14793\n14794\n14795\n14796\n14797\n14798\n14799\n14800\n14801\n14802\n14803\n14804\n14805\n14806\n14807\n14808\n14809\n14810\n14811\n14812\n14813\n14814\n14815\n14816\n14817\n14818\n14819\n14820\n14821\n14822\n14823\n14824\n14825\n14826\n14827\n14828\n14829\n14830\n14831\n14832\n14833\n14834\n14835\n14836\n14837\n14838\n14839\n14840\n14841\n14842\n14843\n14844\n14845\n14846\n14847\n14848\n14849\n14850\n14851\n14852\n14853\n14854\n14855\n14856\n14857\n14858\n14859\n14860\n14861\n14862\n14863\n14864\n14865\n14866\n14867\n14868\n14869\n14870\n14871\n14872\n14873\n14874\n14875\n14876\n14877\n14878\n14879\n14880\n14881\n14882\n14883\n14884\n14885\n14886\n14887\n14888\n14889\n14890\n14891\n14892\n14893\n14894\n14895\n14896\n14897\n14898\n14899\n14900\n14901\n14902\n14903\n14904\n14905\n14906\n14907\n14908\n14909\n14910\n14911\n14912\n14913\n14914\n14915\n14916\n14917\n14918\n14919\n14920\n14921\n14922\n14923\n14924\n14925\n14926\n14927\n14928\n14929\n14930\n14931\n14932\n14933\n14934\n14935\n14936\n14937\n14938\n14939\n14940\n14941\n14942\n14943\n14944\n14945\n14946\n14947\n14948\n14949\n14950\n14951\n14952\n14953\n14954\n14955\n14956\n14957\n14958\n14959\n14960\n14961\n14962\n14963\n14964\n14965\n14966\n14967\n14968\n14969\n14970\n14971\n14972\n14973\n14974\n14975\n14976\n14977\n14978\n14979\n14980\n14981\n14982\n14983\n981\n14984\n14985\n14986\n14987\n14988\n14989\n14990\n14991\n14992\n14993\n14994\n14995\n14996\n14997\n14998\n14999\n15000\n15001\n15002\n15003\n15004\n15005\n15006\n15007\n15008\n15009\n15010\n15011\n15012\n15013\n15014\n15015\n15016\n15017\n15018\n15019\n15020\n15021\n15022\n15023\n15024\n15025\n15026\n15027\n15028\n15029\n15030\n15031\n15032\n15033\n15034\n15035\n15036\n15037\n15038\n15039\n15040\n15041\n15042\n15043\n15044\n15045\n15046\n15047\n15048\n15049\n15050\n15051\n15052\n15053\n15054\n15055\n15056\n15057\n15058\n15059\n15060\n15061\n15062\n15063\n15064\n15065\n15066\n15067\n15068\n15069\n15070\n15071\n15072\n15073\n15074\n15075\n15076\n15077\n15078\n15079\n15080\n15081\n15082\n15083\n15084\n15085\n15086\n15087\n15088\n15089\n15090\n15091\n15092\n15093\n15094\n15095\n15096\n15097\n15098\n15099\n15100\n15101\n15102\n15103\n15104\n15105\n15106\n15107\n15108\n15109\n15110\n15111\n15112\n15113\n15114\n15115\n15116\n15117\n15118\n15119\n15120\n15121\n15122\n15123\n15124\n15125\n15126\n15127\n15128\n15129\n15130\n15131\n15132\n15133\n15134\n15135\n15136\n15137\n15138\n15139\n15140\n15141\n15142\n15143\n15144\n15145\n15146\n15147\n15148\n15149\n15150\n15151\n15152\n15153\n15154\n15155\n15156\n15157\n15158\n15159\n15160\n15161\n15162\n15163\n15164\n15165\n15166\n15167\n15168\n15169\n15170\n15171\n15172\n15173\n15174\n15175\n15176\n15177\n15178\n15179\n15180\n15181\n15182\n15183\n15184\n15185\n15186\n15187\n15188\n15189\n15190\n15191\n15192\n15193\n15194\n15195\n15196\n15197\n15198\n15199\n15200\n15201\n15202\n15203\n15204\n15205\n15206\n15207\n15208\n15209\n15210\n15211\n15212\n15213\n15214\n15215\n15216\n15217\n15218\n15219\n15220\n15221\n15222\n15223\n15224\n15225\n15226\n15227\n15228\n15229\n15230\n15231\n15232\n15233\n15234\n15235\n15236\n15237\n15238\n15239\n15240\n15241\n15242\n15243\n15244\n15245\n15246\n15247\n15248\n15249\n15250\n15251\n15252\n15253\n15254\n15255\n15256\n15257\n15258\n15259\n15260\n15261\n15262\n15263\n15264\n15265\n15266\n15267\n15268\n15269\n15270\n15271\n15272\n15273\n15274\n15275\n15276\n15277\n15278\n15279\n15280\n15281\n15282\n15283\n15284\n15285\n15286\n15287\n15288\n15289\n15290\n15291\n15292\n15293\n15294\n15295\n15296\n15297\n15298\n15299\n15300\n15301\n15302\n15303\n15304\n15305\n15306\n15307\n15308\n15309\n15310\n15311\n15312\n15313\n15314\n15315\n15316\n15317\n15318\n15319\n15320\n15321\n15322\n15323\n15324\n15325\n15326\n15327\n15328\n15329\n15330\n15331\n15332\n15333\n15334\n15335\n15336\n15337\n15338\n15339\n15340\n15341\n15342\n15343\n15344\n15345\n15346\n15347\n15348\n15349\n15350\n15351\n15352\n15353\n15354\n15355\n15356\n15357\n15358\n15359\n15360\n15361\n15362\n15363\n15364\n15365\n15366\n15367\n15368\n15369\n15370\n15371\n15372\n15373\n15374\n15375\n15376\n15377\n15378\n15379\n15380\n15381\n15382\n15383\n15384\n15385\n15386\n15387\n15388\n15389\n15390\n15391\n15392\n15393\n15394\n15395\n15396\n15397\n15398\n15399\n15400\n15401\n15402\n15403\n15404\n15405\n15406\n15407\n15408\n15409\n15410\n15411\n15412\n15413\n15414\n15415\n15416\n15417\n15418\n15419\n15420\n15421\n15422\n15423\n15424\n15425\n15426\n15427\n15428\n15429\n15430\n15431\n15432\n15433\n15434\n15435\n15436\n15437\n15438\n15439\n15440\n15441\n15442\n15443\n15444\n15445\n15446\n15447\n15448\n15449\n15450\n15451\n15452\n15453\n15454\n15455\n15456\n15457\n15458\n15459\n15460\n15461\n15462\n15463\n15464\n15465\n15466\n15467\n15468\n15469\n15470\n15471\n15472\n15473\n15474\n15475\n15476\n15477\n15478\n15479\n15480\n15481\n15482\n15483\n15484\n15485\n15486\n15487\n15488\n15489\n15490\n15491\n15492\n15493\n15494\n15495\n15496\n15497\n15498\n15499\n15500\n15501\n15502\n15503\n15504\n15505\n15506\n15507\n15508\n15509\n15510\n15511\n15512\n15513\n15514\n15515\n15516\n15517\n15518\n15519\n15520\n15521\n15522\n15523\n15524\n15525\n15526\n15527\n15528\n15529\n15530\n15531\n15532\n15533\n15534\n15535\n15536\n15537\n15538\n15539\n15540\n15541\n15542\n15543\n15544\n15545\n15546\n15547\n15548\n15549\n15550\n15551\n15552\n15553\n15554\n15555\n15556\n15557\n15558\n15559\n15560\n15561\n15562\n15563\n15564\n15565\n15566\n15567\n15568\n15569\n15570\n15571\n15572\n15573\n15574\n15575\n15576\n15577\n15578\n15579\n15580\n15581\n15582\n15583\n15584\n15585\n15586\n15587\n15588\n15589\n15590\n15591\n15592\n15593\n15594\n15595\n15596\n15597\n15598\n15599\n15600\n15601\n15602\n15603\n15604\n15605\n15606\n15607\n15608\n15609\n15610\n15611\n15612\n15613\n15614\n15615\n15616\n15617\n15618\n15619\n15620\n15621\n15622\n15623\n15624\n15625\n15626\n15627\n15628\n15629\n15630\n15631\n15632\n15633\n15634\n15635\n15636\n15637\n15638\n15639\n15640\n15641\n15642\n15643\n15644\n15645\n15646\n15647\n15648\n15649\n15650\n15651\n15652\n15653\n15654\n15655\n15656\n15657\n15658\n15659\n15660\n15661\n15662\n15663\n15664\n15665\n15666\n15667\n15668\n15669\n15670\n15671\n15672\n15673\n15674\n15675\n15676\n15677\n15678\n15679\n15680\n15681\n15682\n15683\n15684\n15685\n15686\n15687\n15688\n15689\n15690\n15691\n15692\n15693\n15694\n15695\n15696\n15697\n15698\n15699\n15700\n15701\n15702\n15703\n15704\n15705\n15706\n15707\n15708\n15709\n15710\n15711\n15712\n15713\n15714\n15715\n15716\n15717\n15718\n15719\n15720\n15721\n15722\n15723\n15724\n15725\n15726\n15727\n15728\n15729\n15730\n15731\n15732\n15733\n15734\n15735\n15736\n15737\n15738\n15739\n15740\n15741\n15742\n15743\n15744\n15745\n15746\n15747\n15748\n982\n15749\n15750\n15751\n15752\n15753\n15754\n15755\n15756\n15757\n15758\n15759\n15760\n15761\n15762\n15763\n15764\n15765\n15766\n15767\n15768\n15769\n15770\n15771\n15772\n15773\n15774\n15775\n15776\n15777\n15778\n15779\n15780\n15781\n15782\n15783\n15784\n15785\n15786\n15787\n15788\n15789\n15790\n15791\n15792\n15793\n15794\n15795\n15796\n15797\n15798\n15799\n15800\n15801\n15802\n15803\n15804\n15805\n15806\n15807\n15808\n15809\n15810\n15811\n15812\n15813\n15814\n15815\n15816\n15817\n15818\n15819\n15820\n15821\n15822\n15823\n15824\n15825\n15826\n15827\n15828\n15829\n15830\n15831\n15832\n15833\n15834\n15835\n15836\n15837\n15838\n15839\n15840\n15841\n15842\n15843\n15844\n15845\n15846\n15847\n15848\n15849\n15850\n15851\n15852\n15853\n15854\n15855\n15856\n15857\n15858\n15859\n15860\n15861\n15862\n15863\n15864\n15865\n15866\n15867\n15868\n15869\n15870\n15871\n15872\n15873\n15874\n15875\n15876\n15877\n15878\n15879\n15880\n15881\n15882\n15883\n15884\n15885\n15886\n15887\n15888\n15889\n15890\n15891\n15892\n15893\n15894\n15895\n15896\n15897\n15898\n15899\n15900\n15901\n15902\n15903\n15904\n15905\n15906\n15907\n15908\n15909\n15910\n15911\n15912\n15913\n15914\n15915\n15916\n15917\n15918\n15919\n15920\n15921\n15922\n15923\n15924\n15925\n15926\n15927\n15928\n15929\n15930\n15931\n15932\n15933\n15934\n15935\n15936\n15937\n15938\n15939\n15940\n15941\n15942\n15943\n15944\n15945\n15946\n15947\n15948\n15949\n15950\n15951\n15952\n15953\n15954\n15955\n15956\n15957\n15958\n15959\n15960\n15961\n15962\n15963\n15964\n15965\n15966\n15967\n15968\n15969\n15970\n15971\n15972\n15973\n15974\n15975\n15976\n15977\n15978\n15979\n15980\n15981\n15982\n15983\n15984\n15985\n15986\n15987\n15988\n15989\n15990\n15991\n15992\n15993\n15994\n15995\n15996\n15997\n15998\n15999\n16000\n16001\n16002\n16003\n16004\n16005\n16006\n16007\n16008\n16009\n16010\n16011\n16012\n16013\n16014\n16015\n16016\n16017\n16018\n16019\n16020\n16021\n16022\n16023\n16024\n16025\n16026\n16027\n16028\n16029\n16030\n16031\n16032\n16033\n16034\n16035\n16036\n16037\n16038\n16039\n16040\n16041\n16042\n16043\n16044\n16045\n16046\n16047\n16048\n16049\n16050\n16051\n16052\n16053\n16054\n16055\n16056\n16057\n16058\n16059\n16060\n16061\n16062\n16063\n16064\n16065\n16066\n16067\n16068\n16069\n16070\n16071\n16072\n16073\n16074\n16075\n16076\n16077\n16078\n16079\n16080\n16081\n16082\n16083\n16084\n16085\n16086\n16087\n16088\n16089\n16090\n16091\n16092\n16093\n16094\n16095\n16096\n16097\n16098\n16099\n16100\n16101\n16102\n16103\n16104\n16105\n16106\n16107\n16108\n16109\n16110\n16111\n16112\n16113\n16114\n16115\n16116\n16117\n16118\n16119\n16120\n16121\n16122\n16123\n16124\n16125\n16126\n16127\n16128\n16129\n16130\n16131\n16132\n16133\n16134\n16135\n16136\n16137\n16138\n16139\n16140\n16141\n16142\n16143\n16144\n16145\n16146\n16147\n16148\n16149\n16150\n16151\n16152\n16153\n16154\n16155\n16156\n16157\n16158\n16159\n16160\n16161\n16162\n16163\n16164\n16165\n16166\n16167\n16168\n16169\n16170\n16171\n16172\n16173\n16174\n16175\n16176\n16177\n16178\n16179\n16180\n16181\n16182\n16183\n16184\n16185\n16186\n16187\n16188\n16189\n16190\n16191\n16192\n16193\n16194\n16195\n16196\n16197\n16198\n16199\n16200\n16201\n16202\n16203\n16204\n16205\n16206\n16207\n16208\n16209\n16210\n16211\n16212\n16213\n16214\n16215\n16216\n16217\n16218\n16219\n16220\n16221\n16222\n16223\n16224\n16225\n16226\n16227\n16228\n16229\n16230\n16231\n16232\n16233\n16234\n16235\n16236\n16237\n16238\n16239\n16240\n16241\n16242\n16243\n16244\n16245\n16246\n16247\n16248\n16249\n16250\n16251\n16252\n16253\n16254\n16255\n16256\n16257\n16258\n16259\n16260\n16261\n16262\n16263\n16264\n16265\n16266\n16267\n16268\n16269\n16270\n16271\n16272\n16273\n16274\n16275\n16276\n16277\n16278\n16279\n16280\n16281\n16282\n16283\n16284\n16285\n16286\n16287\n16288\n16289\n16290\n16291\n16292\n16293\n16294\n16295\n16296\n16297\n16298\n16299\n16300\n16301\n16302\n16303\n16304\n16305\n16306\n16307\n16308\n16309\n16310\n16311\n16312\n16313\n16314\n16315\n16316\n16317\n16318\n16319\n16320\n16321\n16322\n16323\n16324\n16325\n16326\n16327\n16328\n16329\n16330\n16331\n16332\n16333\n16334\n16335\n16336\n16337\n16338\n16339\n16340\n16341\n16342\n16343\n16344\n16345\n16346\n16347\n16348\n16349\n16350\n16351\n16352\n16353\n16354\n16355\n16356\n16357\n16358\n16359\n16360\n16361\n16362\n16363\n16364\n16365\n16366\n16367\n16368\n16369\n16370\n16371\n16372\n16373\n16374\n16375\n16376\n16377\n16378\n16379\n16380\n16381\n16382\n16383\n16384\n16385\n16386\n16387\n16388\n16389\n16390\n16391\n16392\n16393\n16394\n16395\n16396\n16397\n16398\n16399\n16400\n16401\n16402\n16403\n16404\n16405\n16406\n16407\n16408\n16409\n16410\n16411\n16412\n16413\n16414\n16415\n16416\n16417\n16418\n16419\n16420\n16421\n16422\n16423\n16424\n16425\n16426\n16427\n16428\n16429\n16430\n16431\n16432\n16433\n16434\n16435\n16436\n16437\n16438\n16439\n16440\n16441\n16442\n16443\n16444\n16445\n16446\n16447\n16448\n16449\n16450\n16451\n16452\n16453\n16454\n16455\n16456\n16457\n16458\n16459\n16460\n16461\n16462\n16463\n16464\n16465\n16466\n16467\n16468\n16469\n16470\n16471\n16472\n16473\n16474\n16475\n16476\n16477\n16478\n16479\n16480\n16481\n16482\n16483\n16484\n16485\n16486\n16487\n16488\n16489\n16490\n16491\n16492\n16493\n16494\n16495\n16496\n16497\n16498\n16499\n16500\n16501\n16502\n16503\n16504\n16505\n16506\n16507\n16508\n16509\n16510\n16511\n16512\n16513\n16514\n16515\n16516\n16517\n16518\n16519\n16520\n16521\n16522\n16523\n16524\n16525\n16526\n16527\n16528\n16529\n16530\n16531\n16532\n16533\n16534\n16535\n16536\n16537\n16538\n16539\n16540\n16541\n16542\n16543\n16544\n16545\n16546\n16547\n16548\n16549\n16550\n16551\n16552\n16553\n16554\n16555\n16556\n16557\n16558\n16559\n16560\n16561\n16562\n16563\n16564\n16565\n16566\n16567\n16568\n16569\n16570\n16571\n16572\n16573\n16574\n16575\n16576\n16577\n16578\n16579\n16580\n16581\n16582\n16583\n16584\n16585\n16586\n16587\n16588\n16589\n16590\n16591\n16592\n16593\n16594\n16595\n16596\n16597\n16598\n16599\n16600\n16601\n16602\n16603\n16604\n16605\n16606\n16607\n16608\n16609\n16610\n16611\n16612\n16613\n16614\n16615\n16616\n16617\n16618\n16619\n16620\n16621\n16622\n16623\n16624\n16625\n16626\n16627\n16628\n16629\n16630\n16631\n16632\n16633\n16634\n16635\n16636\n16637\n16638\n16639\n16640\n16641\n16642\n16643\n16644\n16645\n16646\n16647\n16648\n16649\n16650\n16651\n16652\n16653\n16654\n16655\n16656\n16657\n16658\n16659\n16660\n16661\n16662\n16663\n16664\n16665\n16666\n16667\n16668\n16669\n16670\n16671\n16672\n16673\n16674\n16675\n16676\n16677\n16678\n16679\n16680\n16681\n16682\n16683\n16684\n16685\n16686\n16687\n16688\n16689\n16690\n16691\n16692\n16693\n16694\n16695\n16696\n16697\n16698\n16699\n16700\n16701\n16702\n16703\n16704\n16705\n16706\n16707\n16708\n16709\n16710\n16711\n16712\n16713\n16714\n16715\n16716\n16717\n16718\n16719\n16720\n16721\n16722\n16723\n16724\n16725\n16726\n16727\n16728\n16729\n16730\n16731\n16732\n16733\n16734\n16735\n16736\n16737\n16738\n16739\n16740\n16741\n16742\n16743\n16744\n16745\n16746\n16747\n16748\n16749\n16750\n16751\n16752\n16753\n16754\n16755\n16756\n16757\n16758\n16759\n983\n16760\n16761\n16762\n16763\n16764\n16765\n16766\n16767\n16768\n16769\n16770\n16771\n16772\n16773\n16774\n16775\n16776\n16777\n16778\n16779\n16780\n16781\n16782\n16783\n16784\n16785\n16786\n16787\n16788\n16789\n16790\n16791\n16792\n16793\n16794\n16795\n16796\n16797\n16798\n16799\n16800\n16801\n16802\n16803\n16804\n16805\n16806\n16807\n16808\n16809\n16810\n16811\n16812\n16813\n16814\n16815\n16816\n16817\n16818\n16819\n16820\n16821\n16822\n16823\n16824\n16825\n16826\n16827\n16828\n16829\n16830\n16831\n16832\n16833\n16834\n16835\n16836\n16837\n16838\n16839\n16840\n16841\n16842\n16843\n16844\n16845\n16846\n16847\n16848\n16849\n16850\n16851\n16852\n16853\n16854\n16855\n16856\n16857\n16858\n16859\n16860\n16861\n16862\n16863\n16864\n16865\n16866\n16867\n16868\n16869\n16870\n16871\n16872\n16873\n16874\n16875\n16876\n16877\n16878\n16879\n16880\n16881\n16882\n16883\n16884\n16885\n16886\n16887\n16888\n16889\n16890\n16891\n16892\n16893\n16894\n16895\n16896\n16897\n16898\n16899\n16900\n16901\n16902\n16903\n16904\n16905\n16906\n16907\n16908\n16909\n16910\n16911\n16912\n16913\n16914\n16915\n16916\n16917\n16918\n16919\n16920\n16921\n16922\n16923\n16924\n16925\n16926\n16927\n16928\n16929\n16930\n16931\n16932\n16933\n16934\n16935\n16936\n16937\n16938\n16939\n16940\n16941\n16942\n16943\n16944\n16945\n16946\n16947\n16948\n16949\n16950\n16951\n16952\n16953\n16954\n16955\n16956\n16957\n16958\n16959\n16960\n16961\n16962\n16963\n16964\n16965\n16966\n16967\n16968\n16969\n16970\n16971\n16972\n16973\n16974\n16975\n16976\n16977\n16978\n16979\n16980\n16981\n16982\n16983\n16984\n16985\n16986\n16987\n16988\n16989\n16990\n16991\n16992\n16993\n16994\n16995\n16996\n16997\n16998\n16999\n17000\n17001\n17002\n17003\n17004\n17005\n17006\n17007\n17008\n17009\n17010\n17011\n17012\n17013\n17014\n17015\n17016\n17017\n17018\n17019\n17020\n17021\n17022\n17023\n17024\n17025\n17026\n17027\n17028\n17029\n17030\n17031\n17032\n17033\n17034\n17035\n17036\n17037\n17038\n17039\n17040\n17041\n17042\n17043\n17044\n17045\n17046\n17047\n17048\n17049\n17050\n17051\n17052\n17053\n17054\n17055\n17056\n17057\n17058\n17059\n17060\n17061\n17062\n17063\n17064\n17065\n17066\n17067\n17068\n17069\n17070\n17071\n17072\n17073\n17074\n17075\n17076\n17077\n17078\n17079\n17080\n17081\n17082\n17083\n17084\n17085\n17086\n17087\n17088\n17089\n17090\n17091\n17092\n17093\n17094\n17095\n17096\n17097\n17098\n17099\n17100\n17101\n17102\n17103\n17104\n17105\n17106\n17107\n17108\n17109\n17110\n17111\n17112\n17113\n17114\n17115\n17116\n17117\n17118\n17119\n17120\n17121\n17122\n17123\n17124\n17125\n17126\n17127\n17128\n17129\n17130\n17131\n17132\n17133\n17134\n17135\n17136\n17137\n17138\n17139\n17140\n17141\n17142\n17143\n17144\n17145\n17146\n17147\n17148\n17149\n17150\n17151\n17152\n17153\n17154\n17155\n17156\n17157\n17158\n17159\n17160\n17161\n17162\n17163\n17164\n17165\n17166\n17167\n17168\n17169\n17170\n17171\n17172\n17173\n17174\n17175\n17176\n17177\n17178\n17179\n17180\n17181\n17182\n17183\n17184\n17185\n17186\n17187\n17188\n17189\n17190\n17191\n17192\n17193\n17194\n17195\n17196\n17197\n17198\n17199\n17200\n17201\n17202\n17203\n17204\n17205\n17206\n17207\n17208\n17209\n17210\n17211\n17212\n17213\n17214\n17215\n17216\n17217\n17218\n17219\n17220\n17221\n17222\n17223\n17224\n17225\n17226\n17227\n17228\n17229\n17230\n17231\n17232\n17233\n17234\n17235\n17236\n17237\n17238\n17239\n17240\n17241\n17242\n17243\n17244\n17245\n17246\n17247\n17248\n17249\n17250\n17251\n17252\n17253\n17254\n17255\n17256\n17257\n17258\n17259\n17260\n17261\n17262\n17263\n17264\n17265\n17266\n17267\n17268\n17269\n17270\n17271\n17272\n17273\n17274\n17275\n17276\n17277\n17278\n17279\n17280\n17281\n17282\n17283\n17284\n17285\n17286\n17287\n17288\n17289\n17290\n17291\n17292\n17293\n17294\n17295\n17296\n17297\n17298\n17299\n17300\n17301\n17302\n17303\n17304\n17305\n17306\n17307\n17308\n17309\n17310\n17311\n17312\n17313\n17314\n17315\n17316\n17317\n17318\n17319\n17320\n17321\n17322\n17323\n17324\n17325\n17326\n17327\n17328\n17329\n17330\n17331\n17332\n17333\n17334\n17335\n17336\n17337\n17338\n17339\n17340\n17341\n17342\n17343\n17344\n17345\n17346\n17347\n17348\n17349\n17350\n17351\n17352\n17353\n17354\n17355\n17356\n17357\n17358\n17359\n17360\n17361\n17362\n17363\n17364\n17365\n17366\n17367\n17368\n17369\n17370\n17371\n17372\n17373\n17374\n17375\n17376\n17377\n17378\n17379\n17380\n17381\n17382\n17383\n17384\n17385\n17386\n17387\n17388\n17389\n17390\n17391\n17392\n17393\n17394\n17395\n17396\n17397\n17398\n17399\n17400\n17401\n17402\n17403\n17404\n17405\n17406\n17407\n17408\n17409\n17410\n17411\n17412\n17413\n17414\n17415\n17416\n17417\n17418\n17419\n17420\n17421\n17422\n17423\n17424\n17425\n17426\n17427\n17428\n17429\n17430\n17431\n17432\n17433\n17434\n17435\n17436\n17437\n17438\n17439\n17440\n17441\n17442\n17443\n17444\n17445\n17446\n17447\n17448\n17449\n17450\n17451\n17452\n17453\n17454\n17455\n17456\n17457\n17458\n17459\n17460\n17461\n17462\n17463\n17464\n17465\n17466\n17467\n17468\n17469\n17470\n17471\n17472\n17473\n17474\n17475\n17476\n17477\n17478\n17479\n17480\n17481\n17482\n17483\n17484\n17485\n17486\n17487\n17488\n17489\n17490\n17491\n17492\n17493\n17494\n17495\n17496\n17497\n17498\n17499\n17500\n17501\n17502\n17503\n17504\n17505\n17506\n17507\n17508\n17509\n17510\n17511\n17512\n17513\n17514\n17515\n17516\n17517\n17518\n17519\n17520\n17521\n17522\n17523\n17524\n17525\n17526\n17527\n17528\n17529\n17530\n17531\n17532\n17533\n17534\n17535\n17536\n17537\n17538\n17539\n17540\n17541\n17542\n17543\n17544\n17545\n17546\n17547\n17548\n17549\n17550\n17551\n17552\n17553\n17554\n17555\n17556\n17557\n17558\n17559\n17560\n17561\n17562\n17563\n17564\n17565\n17566\n17567\n17568\n17569\n17570\n17571\n17572\n17573\n17574\n17575\n17576\n17577\n17578\n17579\n17580\n17581\n17582\n17583\n17584\n17585\n17586\n17587\n17588\n17589\n17590\n17591\n17592\n17593\n17594\n17595\n17596\n17597\n17598\n17599\n17600\n17601\n17602\n17603\n17604\n17605\n17606\n17607\n17608\n17609\n17610\n17611\n17612\n17613\n17614\n17615\n17616\n17617\n17618\n17619\n17620\n17621\n17622\n17623\n17624\n17625\n17626\n17627\n17628\n17629\n17630\n17631\n17632\n17633\n17634\n17635\n17636\n17637\n17638\n17639\n17640\n17641\n17642\n17643\n17644\n17645\n17646\n17647\n17648\n17649\n17650\n17651\n17652\n17653\n17654\n17655\n17656\n17657\n17658\n17659\n17660\n17661\n17662\n17663\n17664\n17665\n17666\n17667\n17668\n17669\n17670\n17671\n17672\n17673\n17674\n17675\n17676\n17677\n17678\n17679\n17680\n17681\n17682\n17683\n17684\n17685\n17686\n17687\n17688\n17689\n17690\n17691\n17692\n17693\n17694\n17695\n17696\n17697\n17698\n17699\n17700\n17701\n17702\n17703\n17704\n17705\n17706\n17707\n17708\n17709\n17710\n17711\n17712\n17713\n17714\n17715\n17716\n17717\n17718\n17719\n17720\n17721\n17722\n17723\n17724\n17725\n17726\n17727\n17728\n17729\n17730\n17731\n17732\n17733\n17734\n17735\n17736\n17737\n17738\n17739\n17740\n17741\n17742\n17743\n17744\n17745\n17746\n17747\n17748\n17749\n17750\n17751\n17752\n17753\n17754\n17755\n17756\n17757\n17758\n17759\n17760\n17761\n17762\n17763\n17764\n17765\n17766\n17767\n17768\n17769\n17770\n17771\n17772\n17773\n17774\n17775\n17776\n17777\n17778\n17779\n17780\n17781\n17782\n17783\n17784\n17785\n17786\n17787\n17788\n17789\n17790\n17791\n17792\n17793\n17794\n17795\n17796\n17797\n17798\n17799\n17800\n17801\n17802\n17803\n17804\n17805\n17806\n17807\n17808\n17809\n17810\n17811\n17812\n17813\n17814\n17815\n17816\n17817\n17818\n17819\n17820\n17821\n17822\n17823\n17824\n17825\n17826\n17827\n17828\n17829\n17830\n17831\n17832\n17833\n17834\n17835\n17836\n17837\n17838\n17839\n17840\n17841\n17842\n17843\n17844\n17845\n17846\n17847\n17848\n17849\n17850\n17851\n17852\n17853\n17854\n17855\n17856\n17857\n17858\n17859\n17860\n17861\n17862\n17863\n17864\n17865\n17866\n17867\n17868\n17869\n17870\n17871\n17872\n17873\n17874\n17875\n17876\n17877\n17878\n17879\n17880\n17881\n17882\n17883\n17884\n17885\n17886\n17887\n17888\n17889\n17890\n17891\n17892\n17893\n17894\n17895\n17896\n17897\n17898\n17899\n17900\n17901\n17902\n17903\n17904\n17905\n17906\n17907\n17908\n17909\n17910\n17911\n17912\n17913\n17914\n17915\n17916\n17917\n17918\n17919\n17920\n17921\n17922\n17923\n17924\n17925\n17926\n17927\n17928\n17929\n17930\n17931\n17932\n17933\n17934\n17935\n17936\n17937\n17938\n17939\n17940\n17941\n17942\n17943\n17944\n17945\n17946\n17947\n17948\n17949\n17950\n17951\n17952\n17953\n17954\n17955\n17956\n17957\n17958\n17959\n17960\n17961\n17962\n17963\n17964\n17965\n17966\n17967\n17968\n17969\n17970\n17971\n17972\n17973\n17974\n17975\n17976\n17977\n17978\n17979\n17980\n17981\n17982\n17983\n17984\n17985\n17986\n17987\n17988\n17989\n17990\n17991\n17992\n17993\n17994\n17995\n17996\n17997\n17998\n17999\n18000\n18001\n18002\n18003\n18004\n18005\n18006\n18007\n18008\n18009\n18010\n18011\n18012\n18013\n18014\n18015\n18016\n18017\n18018\n18019\n18020\n18021\n18022\n18023\n18024\n18025\n18026\n18027\n18028\n18029\n18030\n18031\n18032\n18033\n18034\n18035\n18036\n18037\n18038\n18039\n18040\n18041\n18042\n18043\n18044\n18045\n18046\n18047\n18048\n18049\n18050\n18051\n984\n18052\n18053\n18054\n18055\n18056\n18057\n18058\n18059\n18060\n18061\n18062\n18063\n18064\n18065\n18066\n18067\n18068\n18069\n18070\n18071\n18072\n18073\n18074\n18075\n18076\n18077\n18078\n18079\n18080\n18081\n18082\n18083\n18084\n18085\n18086\n18087\n18088\n18089\n18090\n18091\n18092\n18093\n18094\n18095\n18096\n18097\n18098\n18099\n18100\n18101\n18102\n18103\n18104\n18105\n18106\n18107\n18108\n18109\n18110\n18111\n18112\n18113\n18114\n18115\n18116\n18117\n18118\n18119\n18120\n18121\n18122\n18123\n18124\n18125\n18126\n18127\n18128\n18129\n18130\n18131\n18132\n18133\n18134\n18135\n18136\n18137\n18138\n18139\n18140\n18141\n18142\n18143\n18144\n18145\n18146\n18147\n18148\n18149\n18150\n18151\n18152\n18153\n18154\n18155\n18156\n18157\n18158\n18159\n18160\n18161\n18162\n18163\n18164\n18165\n18166\n18167\n18168\n18169\n18170\n18171\n18172\n18173\n18174\n18175\n18176\n18177\n18178\n18179\n18180\n18181\n18182\n18183\n18184\n18185\n18186\n18187\n18188\n18189\n18190\n18191\n18192\n18193\n18194\n18195\n18196\n18197\n18198\n18199\n18200\n18201\n18202\n18203\n18204\n18205\n18206\n18207\n18208\n18209\n18210\n18211\n18212\n18213\n18214\n18215\n18216\n18217\n18218\n985\n18219\n18220\n18221\n18222\n18223\n18224\n18225\n18226\n18227\n18228\n18229\n18230\n18231\n18232\n18233\n18234\n18235\n18236\n18237\n18238\n18239\n18240\n18241\n18242\n18243\n18244\n18245\n18246\n18247\n18248\n18249\n18250\n18251\n18252\n18253\n18254\n18255\n18256\n18257\n18258\n18259\n18260\n18261\n18262\n18263\n18264\n18265\n18266\n18267\n18268\n18269\n18270\n18271\n18272\n18273\n18274\n18275\n18276\n18277\n18278\n18279\n18280\n18281\n18282\n18283\n18284\n18285\n18286\n18287\n18288\n18289\n18290\n18291\n18292\n18293\n18294\n18295\n18296\n18297\n18298\n18299\n18300\n18301\n18302\n18303\n18304\n18305\n18306\n18307\n18308\n18309\n18310\n18311\n18312\n18313\n18314\n18315\n18316\n18317\n18318\n18319\n18320\n18321\n18322\n18323\n18324\n18325\n18326\n18327\n18328\n18329\n18330\n18331\n18332\n18333\n18334\n18335\n18336\n18337\n18338\n18339\n18340\n18341\n18342\n18343\n18344\n18345\n18346\n18347\n18348\n18349\n18350\n18351\n18352\n18353\n18354\n18355\n18356\n18357\n18358\n18359\n18360\n18361\n18362\n18363\n18364\n18365\n18366\n18367\n18368\n18369\n18370\n18371\n18372\n18373\n18374\n18375\n18376\n18377\n18378\n18379\n18380\n18381\n18382\n18383\n18384\n18385\n18386\n18387\n18388\n18389\n18390\n18391\n18392\n18393\n18394\n18395\n18396\n18397\n18398\n18399\n18400\n18401\n18402\n18403\n18404\n18405\n18406\n18407\n18408\n18409\n18410\n18411\n18412\n18413\n18414\n18415\n18416\n18417\n18418\n18419\n18420\n18421\n18422\n18423\n18424\n18425\n18426\n18427\n18428\n18429\n18430\n18431\n18432\n18433\n18434\n18435\n18436\n18437\n18438\n18439\n18440\n18441\n18442\n18443\n18444\n18445\n18446\n18447\n18448\n18449\n18450\n18451\n18452\n18453\n18454\n18455\n18456\n18457\n18458\n18459\n18460\n18461\n18462\n18463\n18464\n18465\n18466\n18467\n18468\n18469\n18470\n18471\n18472\n18473\n18474\n18475\n18476\n18477\n18478\n18479\n18480\n18481\n18482\n18483\n18484\n18485\n18486\n18487\n18488\n18489\n18490\n18491\n18492\n18493\n18494\n18495\n18496\n18497\n18498\n18499\n18500\n18501\n18502\n18503\n18504\n18505\n18506\n18507\n18508\n18509\n18510\n18511\n18512\n18513\n18514\n18515\n18516\n18517\n18518\n18519\n18520\n18521\n18522\n18523\n18524\n18525\n18526\n18527\n18528\n18529\n18530\n18531\n18532\n18533\n18534\n18535\n18536\n18537\n18538\n18539\n18540\n18541\n18542\n18543\n18544\n18545\n18546\n18547\n986\n18548\n18549\n18550\n18551\n18552\n18553\n18554\n18555\n18556\n18557\n18558\n18559\n18560\n18561\n18562\n18563\n18564\n18565\n18566\n18567\n18568\n18569\n18570\n18571\n18572\n18573\n18574\n18575\n18576\n18577\n18578\n18579\n18580\n18581\n18582\n18583\n18584\n18585\n18586\n18587\n18588\n18589\n18590\n18591\n18592\n18593\n18594\n18595\n18596\n18597\n18598\n18599\n18600\n18601\n18602\n18603\n18604\n18605\n18606\n18607\n18608\n18609\n18610\n18611\n18612\n18613\n18614\n18615\n18616\n18617\n18618\n18619\n18620\n18621\n18622\n18623\n18624\n18625\n18626\n18627\n18628\n18629\n18630\n18631\n18632\n18633\n18634\n18635\n18636\n18637\n18638\n18639\n18640\n18641\n18642\n18643\n18644\n18645\n18646\n18647\n18648\n18649\n18650\n18651\n18652\n18653\n18654\n18655\n18656\n18657\n18658\n18659\n18660\n18661\n18662\n18663\n18664\n18665\n18666\n18667\n18668\n18669\n18670\n18671\n18672\n18673\n18674\n18675\n18676\n18677\n18678\n18679\n18680\n18681\n18682\n18683\n18684\n18685\n18686\n18687\n18688\n18689\n18690\n18691\n18692\n18693\n18694\n18695\n18696\n18697\n18698\n18699\n18700\n18701\n18702\n18703\n18704\n18705\n18706\n18707\n18708\n18709\n18710\n18711\n18712\n18713\n18714\n18715\n18716\n18717\n18718\n18719\n18720\n18721\n18722\n18723\n18724\n18725\n18726\n18727\n18728\n18729\n18730\n18731\n18732\n18733\n18734\n18735\n18736\n18737\n18738\n18739\n18740\n18741\n18742\n18743\n18744\n18745\n18746\n18747\n18748\n18749\n18750\n18751\n18752\n18753\n18754\n18755\n18756\n18757\n18758\n18759\n18760\n18761\n18762\n18763\n18764\n18765\n18766\n18767\n18768\n18769\n18770\n18771\n18772\n18773\n18774\n18775\n18776\n18777\n18778\n18779\n18780\n18781\n18782\n18783\n18784\n18785\n18786\n18787\n18788\n18789\n18790\n18791\n18792\n18793\n18794\n18795\n18796\n18797\n18798\n18799\n18800\n18801\n18802\n18803\n987\n18804\n18805\n18806\n18807\n18808\n18809\n18810\n18811\n18812\n18813\n18814\n18815\n18816\n18817\n18818\n18819\n18820\n18821\n18822\n18823\n18824\n18825\n18826\n18827\n18828\n18829\n18830\n18831\n18832\n18833\n18834\n18835\n18836\n18837\n18838\n18839\n18840\n18841\n18842\n18843\n18844\n18845\n18846\n18847\n18848\n18849\n18850\n18851\n18852\n18853\n18854\n18855\n18856\n18857\n18858\n18859\n18860\n18861\n18862\n18863\n18864\n18865\n18866\n18867\n18868\n18869\n18870\n18871\n18872\n18873\n18874\n18875\n18876\n18877\n18878\n18879\n18880\n18881\n18882\n18883\n18884\n18885\n18886\n18887\n18888\n18889\n18890\n18891\n18892\n18893\n18894\n18895\n18896\n18897\n18898\n18899\n18900\n18901\n18902\n18903\n18904\n18905\n18906\n18907\n18908\n18909\n18910\n18911\n18912\n18913\n18914\n18915\n18916\n18917\n18918\n18919\n18920\n18921\n18922\n18923\n18924\n18925\n18926\n18927\n18928\n18929\n18930\n18931\n18932\n18933\n18934\n18935\n18936\n18937\n18938\n18939\n18940\n18941\n18942\n18943\n18944\n18945\n18946\n18947\n18948\n18949\n18950\n18951\n18952\n18953\n18954\n18955\n18956\n18957\n18958\n18959\n18960\n18961\n18962\n18963\n18964\n18965\n18966\n18967\n18968\n18969\n18970\n18971\n18972\n18973\n18974\n18975\n18976\n18977\n18978\n18979\n18980\n18981\n18982\n18983\n18984\n18985\n18986\n18987\n18988\n18989\n18990\n18991\n18992\n18993\n18994\n18995\n18996\n18997\n18998\n18999\n19000\n19001\n19002\n19003\n19004\n19005\n19006\n19007\n19008\n19009\n19010\n19011\n19012\n19013\n19014\n19015\n19016\n19017\n19018\n19019\n19020\n19021\n19022\n19023\n19024\n19025\n19026\n19027\n19028\n19029\n19030\n19031\n19032\n19033\n19034\n19035\n19036\n19037\n19038\n19039\n19040\n19041\n19042\n19043\n19044\n19045\n19046\n19047\n19048\n19049\n19050\n19051\n19052\n19053\n19054\n19055\n19056\n19057\n19058\n19059\n19060\n19061\n19062\n19063\n19064\n19065\n19066\n19067\n19068\n19069\n19070\n19071\n19072\n19073\n19074\n19075\n19076\n19077\n19078\n19079\n19080\n19081\n19082\n19083\n19084\n19085\n19086\n19087\n19088\n19089\n19090\n19091\n19092\n19093\n19094\n19095\n19096\n19097\n19098\n19099\n19100\n19101\n19102\n19103\n19104\n19105\n19106\n19107\n19108\n19109\n19110\n19111\n19112\n988\n19113\n19114\n19115\n19116\n19117\n19118\n19119\n19120\n19121\n19122\n19123\n19124\n19125\n19126\n19127\n19128\n19129\n19130\n19131\n19132\n19133\n19134\n19135\n19136\n19137\n19138\n19139\n19140\n19141\n19142\n19143\n19144\n19145\n19146\n19147\n19148\n19149\n19150\n19151\n19152\n19153\n19154\n19155\n19156\n19157\n19158\n19159\n19160\n19161\n19162\n19163\n19164\n19165\n19166\n19167\n19168\n19169\n19170\n19171\n19172\n19173\n19174\n19175\n19176\n19177\n19178\n19179\n19180\n19181\n19182\n19183\n19184\n19185\n19186\n19187\n19188\n19189\n19190\n19191\n19192\n19193\n19194\n19195\n19196\n19197\n19198\n19199\n19200\n19201\n19202\n19203\n19204\n19205\n19206\n19207\n19208\n19209\n19210\n19211\n19212\n19213\n19214\n19215\n19216\n19217\n19218\n19219\n19220\n19221\n19222\n19223\n19224\n19225\n19226\n19227\n19228\n19229\n19230\n19231\n19232\n19233\n19234\n19235\n19236\n19237\n19238\n19239\n19240\n19241\n19242\n19243\n19244\n19245\n19246\n19247\n19248\n19249\n19250\n19251\n19252\n19253\n19254\n19255\n19256\n19257\n19258\n19259\n19260\n19261\n19262\n19263\n19264\n19265\n19266\n19267\n19268\n19269\n19270\n19271\n19272\n19273\n19274\n19275\n19276\n19277\n19278\n19279\n19280\n19281\n19282\n19283\n19284\n19285\n19286\n19287\n19288\n19289\n19290\n19291\n19292\n19293\n19294\n19295\n19296\n19297\n19298\n19299\n19300\n19301\n19302\n19303\n19304\n19305\n19306\n19307\n19308\n19309\n19310\n19311\n19312\n19313\n19314\n19315\n19316\n19317\n19318\n19319\n19320\n19321\n19322\n19323\n19324\n19325\n19326\n19327\n19328\n19329\n19330\n19331\n19332\n19333\n19334\n19335\n19336\n19337\n19338\n19339\n19340\n19341\n19342\n19343\n19344\n19345\n19346\n19347\n19348\n19349\n19350\n19351\n19352\n19353\n19354\n19355\n19356\n19357\n19358\n19359\n19360\n19361\n19362\n19363\n19364\n19365\n19366\n19367\n19368\n19369\n19370\n19371\n19372\n19373\n19374\n19375\n19376\n19377\n19378\n19379\n19380\n19381\n19382\n19383\n19384\n19385\n19386\n19387\n19388\n19389\n19390\n19391\n19392\n19393\n19394\n19395\n19396\n19397\n19398\n19399\n19400\n19401\n19402\n19403\n19404\n19405\n19406\n19407\n19408\n19409\n19410\n19411\n19412\n19413\n19414\n19415\n19416\n19417\n19418\n19419\n19420\n19421\n19422\n19423\n19424\n19425\n19426\n19427\n19428\n19429\n19430\n19431\n19432\n19433\n19434\n19435\n19436\n19437\n19438\n19439\n19440\n19441\n19442\n19443\n19444\n19445\n19446\n19447\n19448\n19449\n19450\n19451\n19452\n19453\n19454\n19455\n19456\n19457\n19458\n19459\n19460\n19461\n19462\n19463\n19464\n19465\n19466\n19467\n19468\n19469\n19470\n19471\n19472\n19473\n19474\n19475\n19476\n19477\n19478\n19479\n19480\n19481\n19482\n19483\n19484\n19485\n19486\n19487\n19488\n19489\n19490\n19491\n19492\n19493\n19494\n19495\n19496\n19497\n19498\n19499\n19500\n19501\n19502\n19503\n19504\n19505\n19506\n19507\n19508\n19509\n19510\n19511\n19512\n19513\n19514\n19515\n19516\n19517\n19518\n19519\n19520\n19521\n19522\n19523\n19524\n19525\n19526\n19527\n19528\n19529\n19530\n19531\n19532\n19533\n19534\n19535\n19536\n19537\n19538\n19539\n19540\n19541\n19542\n19543\n19544\n19545\n19546\n19547\n19548\n19549\n19550\n19551\n19552\n19553\n19554\n19555\n19556\n19557\n19558\n19559\n19560\n19561\n19562\n19563\n19564\n19565\n19566\n19567\n19568\n19569\n19570\n19571\n19572\n19573\n19574\n19575\n19576\n19577\n19578\n19579\n19580\n19581\n19582\n19583\n19584\n19585\n19586\n19587\n19588\n19589\n19590\n19591\n19592\n19593\n19594\n19595\n19596\n19597\n19598\n19599\n19600\n19601\n19602\n19603\n19604\n19605\n19606\n19607\n19608\n19609\n19610\n19611\n19612\n19613\n19614\n19615\n19616\n19617\n19618\n19619\n19620\n19621\n19622\n19623\n19624\n19625\n19626\n19627\n19628\n19629\n19630\n19631\n19632\n19633\n19634\n19635\n19636\n19637\n19638\n19639\n19640\n19641\n19642\n19643\n19644\n19645\n19646\n19647\n19648\n19649\n19650\n19651\n19652\n19653\n19654\n19655\n19656\n19657\n19658\n19659\n19660\n19661\n19662\n19663\n19664\n19665\n19666\n19667\n19668\n19669\n19670\n19671\n19672\n19673\n19674\n19675\n19676\n19677\n19678\n19679\n19680\n19681\n19682\n19683\n19684\n19685\n19686\n19687\n19688\n19689\n19690\n19691\n19692\n19693\n19694\n19695\n19696\n19697\n19698\n19699\n19700\n19701\n19702\n19703\n19704\n19705\n19706\n19707\n19708\n19709\n19710\n19711\n19712\n19713\n19714\n19715\n19716\n19717\n19718\n19719\n19720\n19721\n19722\n19723\n19724\n19725\n19726\n19727\n19728\n19729\n19730\n19731\n19732\n19733\n19734\n19735\n19736\n19737\n19738\n19739\n19740\n19741\n19742\n19743\n19744\n19745\n19746\n19747\n19748\n19749\n19750\n19751\n19752\n19753\n19754\n19755\n19756\n19757\n19758\n19759\n19760\n19761\n19762\n19763\n19764\n19765\n19766\n19767\n19768\n19769\n19770\n19771\n19772\n19773\n19774\n19775\n19776\n19777\n19778\n19779\n19780\n19781\n19782\n19783\n19784\n19785\n19786\n19787\n19788\n19789\n19790\n19791\n19792\n19793\n19794\n19795\n19796\n19797\n19798\n19799\n19800\n19801\n19802\n19803\n19804\n19805\n19806\n19807\n19808\n19809\n19810\n19811\n19812\n19813\n19814\n19815\n19816\n19817\n19818\n19819\n19820\n19821\n19822\n19823\n19824\n19825\n19826\n19827\n19828\n19829\n19830\n19831\n19832\n19833\n19834\n19835\n19836\n19837\n19838\n19839\n19840\n19841\n19842\n19843\n19844\n19845\n19846\n19847\n19848\n19849\n19850\n19851\n19852\n19853\n19854\n19855\n19856\n19857\n19858\n19859\n19860\n19861\n19862\n19863\n19864\n19865\n19866\n19867\n19868\n19869\n19870\n19871\n19872\n19873\n19874\n19875\n19876\n19877\n19878\n19879\n19880\n19881\n19882\n19883\n19884\n19885\n19886\n19887\n19888\n19889\n19890\n19891\n19892\n19893\n19894\n19895\n19896\n19897\n19898\n19899\n19900\n19901\n19902\n19903\n19904\n19905\n19906\n19907\n19908\n19909\n19910\n19911\n19912\n19913\n19914\n19915\n19916\n19917\n19918\n19919\n19920\n19921\n19922\n19923\n19924\n19925\n19926\n19927\n19928\n19929\n19930\n19931\n19932\n19933\n19934\n19935\n19936\n19937\n19938\n19939\n19940\n19941\n19942\n19943\n19944\n19945\n19946\n19947\n19948\n19949\n19950\n19951\n19952\n19953\n19954\n19955\n19956\n19957\n19958\n19959\n19960\n19961\n19962\n19963\n19964\n19965\n19966\n19967\n19968\n19969\n19970\n19971\n19972\n19973\n19974\n19975\n19976\n19977\n19978\n19979\n19980\n19981\n19982\n19983\n19984\n19985\n19986\n19987\n19988\n19989\n19990\n19991\n19992\n19993\n19994\n19995\n19996\n19997\n19998\n19999\n20000\n20001\n20002\n20003\n20004\n20005\n20006\n20007\n20008\n20009\n20010\n20011\n20012\n20013\n20014\n20015\n20016\n20017\n20018\n20019\n20020\n20021\n20022\n20023\n20024\n20025\n20026\n20027\n20028\n989\n20029\n20030\n20031\n20032\n20033\n20034\n20035\n20036\n20037\n20038\n20039\n20040\n20041\n20042\n20043\n20044\n20045\n20046\n20047\n20048\n20049\n20050\n20051\n20052\n20053\n20054\n20055\n20056\n20057\n20058\n20059\n20060\n20061\n20062\n20063\n20064\n20065\n20066\n20067\n20068\n20069\n20070\n20071\n20072\n20073\n20074\n20075\n20076\n20077\n20078\n20079\n20080\n20081\n20082\n20083\n20084\n20085\n20086\n20087\n20088\n20089\n20090\n20091\n20092\n20093\n20094\n20095\n20096\n20097\n20098\n20099\n20100\n20101\n20102\n20103\n20104\n20105\n20106\n20107\n20108\n20109\n20110\n20111\n20112\n20113\n20114\n20115\n20116\n20117\n20118\n20119\n20120\n20121\n20122\n20123\n20124\n20125\n20126\n20127\n20128\n20129\n20130\n20131\n20132\n20133\n20134\n20135\n20136\n20137\n20138\n20139\n20140\n20141\n20142\n20143\n20144\n20145\n20146\n20147\n20148\n20149\n20150\n20151\n20152\n20153\n20154\n20155\n20156\n20157\n20158\n20159\n20160\n20161\n20162\n20163\n20164\n20165\n20166\n20167\n20168\n20169\n20170\n20171\n20172\n20173\n20174\n20175\n20176\n20177\n20178\n20179\n20180\n20181\n20182\n20183\n20184\n20185\n20186\n20187\n20188\n20189\n20190\n20191\n20192\n20193\n20194\n20195\n20196\n20197\n20198\n20199\n20200\n20201\n20202\n20203\n20204\n20205\n20206\n20207\n20208\n20209\n20210\n20211\n20212\n20213\n20214\n20215\n20216\n20217\n20218\n20219\n20220\n20221\n20222\n20223\n20224\n20225\n20226\n20227\n20228\n20229\n20230\n20231\n20232\n20233\n20234\n20235\n20236\n20237\n20238\n20239\n20240\n20241\n20242\n20243\n20244\n20245\n20246\n20247\n20248\n20249\n20250\n20251\n20252\n20253\n20254\n20255\n20256\n20257\n20258\n20259\n20260\n20261\n20262\n20263\n20264\n20265\n20266\n20267\n20268\n20269\n20270\n20271\n20272\n20273\n20274\n20275\n20276\n20277\n20278\n20279\n20280\n20281\n20282\n20283\n20284\n20285\n20286\n20287\n20288\n20289\n20290\n20291\n20292\n20293\n20294\n20295\n20296\n20297\n20298\n20299\n20300\n20301\n20302\n20303\n20304\n20305\n20306\n20307\n20308\n20309\n20310\n20311\n20312\n20313\n20314\n20315\n20316\n20317\n20318\n20319\n20320\n20321\n20322\n20323\n20324\n20325\n20326\n20327\n20328\n20329\n20330\n20331\n20332\n20333\n20334\n20335\n20336\n20337\n20338\n20339\n20340\n20341\n20342\n20343\n20344\n20345\n20346\n20347\n20348\n20349\n20350\n20351\n20352\n20353\n20354\n20355\n20356\n20357\n20358\n20359\n20360\n20361\n20362\n20363\n20364\n20365\n20366\n20367\n20368\n20369\n20370\n20371\n20372\n20373\n20374\n20375\n20376\n20377\n20378\n20379\n20380\n20381\n20382\n20383\n20384\n20385\n20386\n20387\n20388\n20389\n20390\n20391\n20392\n20393\n20394\n20395\n20396\n20397\n20398\n20399\n20400\n20401\n20402\n20403\n20404\n20405\n20406\n20407\n20408\n20409\n20410\n20411\n20412\n20413\n20414\n20415\n20416\n20417\n20418\n20419\n20420\n20421\n20422\n20423\n20424\n20425\n20426\n20427\n20428\n20429\n20430\n20431\n20432\n20433\n20434\n20435\n20436\n20437\n20438\n20439\n20440\n20441\n20442\n20443\n20444\n20445\n20446\n20447\n20448\n20449\n20450\n20451\n20452\n20453\n20454\n20455\n20456\n20457\n20458\n20459\n20460\n20461\n20462\n20463\n20464\n20465\n20466\n20467\n20468\n20469\n20470\n990\n20471\n20472\n20473\n20474\n20475\n20476\n20477\n20478\n20479\n20480\n20481\n20482\n20483\n20484\n20485\n20486\n20487\n20488\n20489\n20490\n20491\n20492\n20493\n20494\n20495\n20496\n20497\n20498\n20499\n20500\n20501\n20502\n20503\n20504\n20505\n20506\n20507\n20508\n20509\n20510\n20511\n20512\n20513\n20514\n20515\n20516\n20517\n20518\n20519\n20520\n20521\n20522\n20523\n20524\n20525\n20526\n20527\n20528\n20529\n20530\n20531\n20532\n20533\n20534\n20535\n20536\n20537\n20538\n20539\n20540\n20541\n20542\n20543\n20544\n20545\n20546\n20547\n20548\n20549\n20550\n20551\n20552\n20553\n20554\n20555\n20556\n20557\n20558\n20559\n20560\n20561\n20562\n20563\n20564\n20565\n20566\n20567\n20568\n20569\n20570\n20571\n20572\n20573\n20574\n20575\n20576\n20577\n20578\n20579\n20580\n20581\n20582\n20583\n20584\n20585\n20586\n20587\n20588\n20589\n20590\n20591\n20592\n20593\n20594\n20595\n20596\n20597\n20598\n20599\n20600\n20601\n20602\n20603\n20604\n20605\n20606\n20607\n20608\n20609\n20610\n20611\n20612\n20613\n20614\n20615\n20616\n20617\n20618\n20619\n20620\n20621\n20622\n20623\n20624\n20625\n20626\n20627\n20628\n20629\n20630\n20631\n20632\n20633\n20634\n20635\n20636\n20637\n20638\n20639\n20640\n20641\n20642\n20643\n20644\n20645\n20646\n20647\n20648\n20649\n20650\n20651\n20652\n20653\n20654\n20655\n20656\n20657\n20658\n20659\n20660\n20661\n20662\n20663\n20664\n20665\n20666\n20667\n20668\n20669\n20670\n20671\n20672\n20673\n20674\n20675\n20676\n20677\n20678\n20679\n20680\n20681\n20682\n20683\n20684\n20685\n20686\n20687\n20688\n20689\n20690\n20691\n20692\n20693\n20694\n20695\n20696\n20697\n20698\n20699\n20700\n20701\n20702\n20703\n20704\n20705\n20706\n20707\n20708\n20709\n20710\n20711\n20712\n20713\n20714\n20715\n20716\n20717\n20718\n20719\n20720\n20721\n20722\n20723\n20724\n20725\n20726\n20727\n20728\n20729\n20730\n20731\n20732\n20733\n20734\n20735\n20736\n20737\n20738\n20739\n20740\n20741\n20742\n20743\n20744\n20745\n20746\n20747\n20748\n20749\n20750\n20751\n20752\n20753\n20754\n20755\n20756\n20757\n20758\n20759\n20760\n20761\n20762\n20763\n20764\n20765\n20766\n20767\n20768\n20769\n20770\n20771\n20772\n20773\n20774\n20775\n20776\n20777\n20778\n20779\n20780\n20781\n20782\n20783\n20784\n20785\n20786\n20787\n20788\n20789\n20790\n20791\n20792\n20793\n20794\n20795\n20796\n20797\n20798\n20799\n20800\n20801\n20802\n20803\n20804\n20805\n20806\n20807\n20808\n20809\n20810\n20811\n20812\n20813\n20814\n20815\n20816\n20817\n20818\n20819\n20820\n20821\n20822\n20823\n20824\n20825\n20826\n20827\n20828\n20829\n20830\n20831\n20832\n20833\n20834\n20835\n20836\n20837\n20838\n20839\n20840\n20841\n20842\n20843\n20844\n20845\n20846\n20847\n20848\n20849\n20850\n20851\n20852\n20853\n20854\n20855\n20856\n20857\n20858\n20859\n20860\n20861\n20862\n20863\n20864\n20865\n20866\n20867\n20868\n20869\n20870\n20871\n20872\n20873\n20874\n20875\n20876\n20877\n20878\n20879\n20880\n20881\n20882\n20883\n20884\n20885\n20886\n20887\n20888\n20889\n20890\n20891\n20892\n20893\n20894\n20895\n20896\n20897\n20898\n20899\n20900\n20901\n20902\n20903\n20904\n20905\n20906\n20907\n20908\n20909\n20910\n20911\n20912\n20913\n20914\n20915\n20916\n20917\n20918\n20919\n20920\n20921\n20922\n20923\n20924\n20925\n20926\n20927\n20928\n20929\n20930\n20931\n20932\n20933\n20934\n20935\n20936\n20937\n20938\n20939\n20940\n20941\n20942\n20943\n20944\n20945\n20946\n20947\n20948\n20949\n20950\n20951\n20952\n20953\n20954\n20955\n20956\n20957\n20958\n20959\n20960\n20961\n20962\n20963\n20964\n20965\n20966\n20967\n20968\n20969\n20970\n20971\n20972\n20973\n20974\n20975\n20976\n20977\n20978\n20979\n20980\n20981\n20982\n20983\n20984\n20985\n20986\n20987\n20988\n20989\n20990\n20991\n20992\n20993\n20994\n20995\n20996\n20997\n20998\n20999\n21000\n21001\n21002\n21003\n21004\n21005\n21006\n21007\n21008\n21009\n21010\n21011\n21012\n21013\n21014\n21015\n21016\n21017\n21018\n21019\n21020\n21021\n21022\n21023\n21024\n21025\n21026\n21027\n21028\n21029\n21030\n21031\n21032\n21033\n991\n21034\n21035\n21036\n21037\n21038\n21039\n21040\n21041\n21042\n21043\n21044\n21045\n21046\n21047\n21048\n21049\n21050\n21051\n992\n21052\n21053\n21054\n21055\n21056\n21057\n21058\n21059\n21060\n21061\n21062\n21063\n21064\n21065\n21066\n21067\n21068\n21069\n21070\n21071\n21072\n21073\n21074\n21075\n21076\n21077\n21078\n21079\n21080\n21081\n21082\n21083\n21084\n21085\n21086\n21087\n21088\n21089\n21090\n21091\n21092\n21093\n21094\n21095\n21096\n21097\n21098\n21099\n21100\n21101\n21102\n21103\n21104\n21105\n21106\n21107\n21108\n21109\n21110\n21111\n21112\n21113\n21114\n21115\n21116\n21117\n21118\n21119\n21120\n21121\n21122\n21123\n21124\n21125\n21126\n21127\n21128\n21129\n21130\n21131\n21132\n21133\n21134\n21135\n21136\n21137\n21138\n21139\n21140\n21141\n21142\n21143\n21144\n21145\n21146\n21147\n21148\n21149\n21150\n21151\n21152\n21153\n21154\n21155\n21156\n21157\n21158\n21159\n21160\n21161\n21162\n21163\n21164\n21165\n21166\n21167\n21168\n993\n21169\n21170\n21171\n21172\n21173\n21174\n21175\n994\n21176\n21177\n21178\n21179\n21180\n21181\n21182\n21183\n21184\n995\n21185\n21186\n21187\n21188\n21189\n21190\n21191\n21192\n21193\n21194\n21195\n21196\n21197\n21198\n996\n21199\n21200\n21201\n997\n21202\n21203\n21204\n21205\n21206\n21207\n21208\n21209\n21210\n21211\n21212\n21213\n21214\n21215\n21216\n21217\n21218\n21219\n21220\n21221\n21222\n21223\n21224\n21225\n21226\n21227\n21228\n21229\n21230\n21231\n21232\n21233\n21234\n21235\n21236\n21237\n21238\n21239\n21240\n21241\n21242\n21243\n21244\n21245\n21246\n21247\n21248\n21249\n21250\n21251\n21252\n21253\n21254\n21255\n21256\n21257\n21258\n21259\n21260\n21261\n21262\n21263\n21264\n21265\n21266\n21267\n21268\n21269\n21270\n21271\n21272\n21273\n21274\n21275\n21276\n21277\n21278\n21279\n21280\n21281\n21282\n21283\n21284\n21285\n21286\n21287\n21288\n21289\n21290\n21291\n21292\n21293\n21294\n21295\n21296\n21297\n21298\n21299\n21300\n21301\n21302\n21303\n21304\n21305\n21306\n21307\n21308\n21309\n21310\n21311\n21312\n21313\n21314\n21315\n21316\n21317\n21318\n21319\n21320\n21321\n21322\n21323\n21324\n21325\n21326\n21327\n21328\n21329\n21330\n21331\n21332\n21333\n21334\n21335\n21336\n21337\n21338\n21339\n21340\n21341\n21342\n21343\n21344\n21345\n21346\n21347\n21348\n21349\n21350\n21351\n21352\n21353\n21354\n21355\n21356\n21357\n21358\n21359\n21360\n21361\n21362\n21363\n21364\n21365\n21366\n21367\n21368\n21369\n998\n21370\n21371\n21372\n21373\n21374\n21375\n21376\n21377\n21378\n21379\n21380\n21381\n21382\n21383\n21384\n21385\n21386\n21387\n21388\n21389\n21390\n21391\n21392\n21393\n21394\n21395\n21396\n21397\n21398\n21399\n21400\n21401\n21402\n21403\n21404\n21405\n21406\n21407\n21408\n21409\n21410\n21411\n21412\n21413\n21414\n21415\n21416\n21417\n21418\n21419\n21420\n21421\n21422\n21423\n21424\n21425\n21426\n21427\n21428\n21429\n21430\n21431\n21432\n21433\n21434\n21435\n21436\n21437\n21438\n21439\n21440\n21441\n21442\n21443\n21444\n21445\n21446\n21447\n21448\n21449\n21450\n21451\n21452\n21453\n21454\n21455\n21456\n21457\n21458\n21459\n21460\n21461\n21462\n21463\n21464\n21465\n21466\n21467\n21468\n21469\n21470\n21471\n21472\n21473\n21474\n21475\n21476\n21477\n21478\n21479\n21480\n21481\n21482\n21483\n21484\n21485\n21486\n21487\n21488\n21489\n21490\n21491\n21492\n21493\n21494\n21495\n21496\n21497\n21498\n21499\n21500\n21501\n21502\n21503\n21504\n21505\n21506\n21507\n21508\n21509\n21510\n21511\n21512\n21513\n21514\n21515\n21516\n21517\n21518\n21519\n21520\n21521\n21522\n21523\n21524\n21525\n21526\n21527\n21528\n21529\n21530\n21531\n21532\n21533\n21534\n21535\n21536\n21537\n21538\n21539\n21540\n21541\n21542\n21543\n21544\n21545\n21546\n21547\n21548\n21549\n21550\n21551\n21552\n21553\n21554\n21555\n21556\n21557\n21558\n21559\n21560\n21561\n21562\n21563\n21564\n21565\n21566\n21567\n21568\n21569\n21570\n21571\n21572\n21573\n21574\n21575\n21576\n21577\n21578\n21579\n21580\n21581\n21582\n21583\n21584\n21585\n21586\n21587\n21588\n21589\n21590\n21591\n21592\n21593\n21594\n21595\n21596\n21597\n21598\n21599\n21600\n21601\n21602\n21603\n21604\n21605\n21606\n21607\n21608\n21609\n21610\n21611\n21612\n21613\n21614\n21615\n21616\n21617\n21618\n21619\n21620\n21621\n21622\n21623\n21624\n21625\n21626\n21627\n21628\n21629\n21630\n21631\n21632\n21633\n21634\n21635\n21636\n21637\n21638\n21639\n21640\n21641\n21642\n21643\n21644\n21645\n21646\n21647\n21648\n21649\n21650\n21651\n21652\n21653\n21654\n21655\n21656\n21657\n21658\n21659\n21660\n21661\n21662\n21663\n21664\n21665\n21666\n21667\n21668\n21669\n21670\n21671\n21672\n21673\n21674\n21675\n21676\n21677\n21678\n21679\n21680\n21681\n21682\n21683\n21684\n21685\n21686\n21687\n21688\n21689\n21690\n21691\n21692\n21693\n21694\n21695\n21696\n21697\n21698\n21699\n21700\n21701\n21702\n21703\n21704\n21705\n21706\n21707\n21708\n21709\n21710\n21711\n21712\n21713\n21714\n21715\n21716\n21717\n21718\n21719\n21720\n21721\n21722\n21723\n21724\n21725\n21726\n21727\n21728\n21729\n21730\n21731\n21732\n21733\n21734\n21735\n21736\n21737\n21738\n21739\n21740\n21741\n21742\n21743\n21744\n21745\n21746\n21747\n21748\n21749\n21750\n21751\n21752\n21753\n21754\n21755\n21756\n21757\n21758\n21759\n21760\n21761\n21762\n21763\n21764\n21765\n21766\n21767\n21768\n21769\n21770\n21771\n21772\n21773\n21774\n21775\n21776\n21777\n21778\n21779\n21780\n21781\n21782\n21783\n21784\n21785\n21786\n21787\n21788\n21789\n21790\n21791\n21792\n21793\n21794\n21795\n21796\n21797\n21798\n21799\n21800\n21801\n21802\n21803\n21804\n21805\n21806\n21807\n21808\n21809\n21810\n21811\n21812\n21813\n21814\n21815\n21816\n999\n21817\n21818\n21819\n21820\n21821\n21822\n21823\n21824\n21825\n21826\n21827\n21828\n21829\n21830\n21831\n21832\n21833\n21834\n21835\n21836\n21837\n21838\n21839\n21840\n21841\n"
  },
  {
    "path": "timm/data/_info/imagenet22k_synsets.txt",
    "content": "n00004475\nn00005787\nn00006024\nn00006484\nn00007846\nn00015388\nn00017222\nn00021265\nn00021939\nn00120010\nn00141669\nn00288000\nn00288190\nn00288384\nn00324978\nn00326094\nn00433458\nn00433661\nn00433802\nn00434075\nn00439826\nn00440039\nn00440218\nn00440382\nn00440509\nn00440643\nn00440747\nn00440941\nn00441073\nn00441824\nn00442115\nn00442437\nn00442847\nn00442981\nn00443231\nn00443375\nn00443517\nn00443692\nn00443803\nn00443917\nn00444142\nn00444340\nn00444490\nn00444651\nn00444846\nn00444937\nn00445055\nn00445226\nn00445351\nn00445685\nn00445802\nn00446311\nn00446411\nn00446493\nn00446632\nn00446804\nn00446980\nn00447073\nn00447221\nn00447361\nn00447463\nn00447540\nn00447957\nn00448126\nn00448232\nn00448466\nn00448640\nn00448748\nn00448872\nn00448958\nn00449054\nn00449168\nn00449295\nn00449517\nn00449695\nn00449796\nn00449892\nn00449977\nn00450070\nn00450335\nn00450700\nn00450866\nn00450998\nn00451186\nn00451370\nn00451563\nn00451635\nn00451768\nn00451866\nn00452034\nn00452152\nn00452293\nn00452734\nn00452864\nn00453126\nn00453313\nn00453396\nn00453478\nn00453631\nn00453935\nn00454237\nn00454395\nn00454493\nn00454624\nn00454855\nn00454983\nn00455076\nn00455173\nn00456465\nn00463246\nn00463543\nn00464277\nn00464478\nn00464651\nn00464894\nn00466273\nn00466377\nn00466524\nn00466630\nn00466712\nn00466880\nn00467320\nn00467536\nn00467719\nn00467995\nn00468299\nn00468480\nn00469651\nn00470554\nn00470682\nn00470830\nn00470966\nn00471437\nn00471613\nn00474568\nn00474657\nn00474769\nn00474881\nn00475014\nn00475142\nn00475273\nn00475403\nn00475535\nn00475661\nn00475787\nn00476140\nn00476235\nn00476389\nn00477392\nn00477639\nn00477827\nn00478262\nn00479076\nn00479440\nn00479616\nn00479734\nn00479887\nn00480211\nn00480366\nn00480508\nn00480885\nn00480993\nn00481803\nn00481938\nn00482122\nn00482298\nn00483205\nn00483313\nn00483409\nn00483508\nn00483605\nn00483705\nn00483848\nn00523513\nn00812526\nn00825773\nn00887544\nn01035504\nn01035667\nn01055165\nn01314388\nn01314663\nn01314781\nn01314910\nn01315213\nn01315330\nn01315581\nn01315805\nn01316422\nn01316579\nn01316734\nn01316949\nn01317089\nn01317294\nn01317391\nn01317541\nn01317813\nn01317916\nn01318053\nn01318279\nn01318381\nn01318478\nn01318660\nn01318894\nn01319001\nn01319187\nn01319467\nn01319685\nn01320872\nn01321123\nn01321230\nn01321456\nn01321579\nn01321770\nn01321854\nn01322221\nn01322343\nn01322508\nn01322604\nn01322685\nn01322898\nn01322983\nn01323068\nn01323155\nn01323261\nn01323355\nn01323493\nn01323599\nn01323781\nn01324305\nn01324431\nn01324610\nn01324799\nn01324916\nn01325060\nn01326291\nn01327909\nn01329186\nn01330126\nn01330497\nn01332181\nn01333082\nn01333483\nn01333610\nn01334217\nn01334690\nn01335218\nn01337191\nn01337734\nn01338685\nn01339083\nn01339336\nn01339471\nn01339801\nn01340014\nn01340522\nn01340785\nn01340935\nn01341090\nn01342269\nn01347583\nn01349735\nn01350226\nn01350701\nn01351170\nn01351315\nn01357328\nn01357507\nn01358572\nn01359762\nn01362336\nn01363719\nn01365474\nn01365885\nn01366700\nn01367772\nn01368672\nn01369358\nn01369484\nn01374703\nn01374846\nn01375204\nn01376237\nn01376437\nn01376543\nn01377278\nn01377510\nn01377694\nn01378545\nn01379389\nn01380610\nn01380754\nn01381044\nn01382033\nn01384084\nn01384164\nn01384687\nn01385017\nn01385330\nn01386007\nn01386182\nn01386354\nn01387065\nn01389507\nn01390123\nn01390763\nn01392275\nn01392380\nn01393486\nn01394040\nn01394492\nn01394771\nn01395254\nn01396048\nn01396617\nn01397114\nn01397690\nn01397871\nn01400247\nn01400391\nn01402600\nn01403457\nn01404365\nn01404495\nn01405007\nn01405616\nn01407798\nn01410457\nn01411450\nn01412694\nn01413457\nn01414216\nn01415626\nn01415920\nn01416213\nn01418498\nn01418620\nn01419332\nn01419573\nn01419888\nn01421333\nn01421807\nn01422185\nn01422335\nn01422450\nn01423302\nn01423617\nn01424420\nn01425223\nn01427399\nn01429172\nn01438208\nn01438581\nn01439121\nn01439514\nn01439808\nn01440160\nn01440242\nn01440467\nn01440764\nn01441117\nn01441272\nn01441425\nn01441910\nn01442450\nn01442710\nn01442972\nn01443243\nn01443537\nn01443831\nn01444339\nn01444783\nn01445429\nn01445593\nn01445857\nn01446152\nn01446589\nn01446760\nn01447139\nn01447331\nn01447658\nn01447946\nn01448291\nn01448594\nn01448951\nn01449374\nn01449712\nn01449980\nn01450661\nn01450950\nn01451115\nn01451295\nn01451426\nn01451863\nn01452345\nn01453087\nn01453475\nn01453742\nn01454545\nn01454856\nn01455317\nn01455461\nn01455778\nn01456137\nn01456454\nn01456756\nn01457082\nn01457407\nn01457852\nn01458746\nn01458842\nn01459791\nn01460303\nn01461315\nn01461646\nn01462042\nn01462544\nn01462803\nn01464844\nn01466257\nn01467336\nn01467804\nn01468238\nn01468712\nn01469103\nn01469723\nn01470145\nn01470479\nn01470733\nn01470895\nn01471682\nn01472303\nn01472502\nn01473806\nn01474283\nn01474864\nn01475232\nn01475940\nn01476418\nn01477080\nn01477525\nn01477875\nn01478511\nn01478969\nn01479213\nn01479820\nn01480106\nn01480516\nn01480880\nn01481331\nn01481498\nn01482071\nn01482330\nn01483021\nn01483522\nn01483830\nn01484097\nn01484285\nn01484447\nn01484562\nn01484850\nn01485479\nn01486010\nn01486540\nn01486838\nn01487506\nn01488038\nn01488918\nn01489501\nn01489709\nn01489920\nn01490112\nn01490360\nn01490670\nn01491006\nn01491361\nn01491661\nn01491874\nn01492357\nn01492569\nn01492708\nn01492860\nn01493146\nn01493541\nn01493829\nn01494041\nn01494475\nn01494757\nn01494882\nn01495006\nn01495493\nn01495701\nn01496331\nn01497118\nn01497413\nn01497738\nn01498041\nn01498406\nn01498699\nn01498989\nn01499396\nn01499732\nn01500091\nn01500476\nn01500854\nn01501160\nn01501641\nn01501777\nn01501948\nn01502101\nn01503061\nn01503976\nn01504179\nn01504344\nn01514668\nn01514752\nn01514859\nn01514926\nn01515078\nn01515217\nn01515303\nn01516212\nn01517389\nn01517565\nn01517966\nn01518878\nn01519563\nn01519873\nn01520576\nn01521399\nn01521756\nn01522450\nn01523105\nn01524359\nn01524761\nn01525720\nn01526521\nn01526766\nn01527194\nn01527347\nn01527617\nn01527917\nn01528396\nn01528654\nn01528845\nn01529672\nn01530439\nn01530575\nn01531178\nn01531344\nn01531512\nn01531639\nn01531811\nn01531971\nn01532325\nn01532511\nn01532829\nn01533000\nn01533339\nn01533481\nn01533651\nn01533893\nn01534155\nn01534433\nn01534582\nn01534762\nn01535140\nn01535469\nn01535690\nn01536035\nn01536186\nn01536334\nn01536644\nn01536780\nn01537134\nn01537544\nn01537895\nn01538059\nn01538200\nn01538362\nn01538630\nn01538955\nn01539272\nn01539573\nn01539925\nn01540090\nn01540233\nn01540566\nn01540832\nn01541102\nn01541386\nn01541760\nn01541922\nn01542168\nn01542433\nn01542786\nn01543175\nn01543383\nn01543632\nn01543936\nn01544208\nn01544389\nn01544704\nn01545574\nn01546039\nn01546506\nn01546921\nn01547832\nn01548301\nn01548492\nn01548694\nn01548865\nn01549053\nn01549430\nn01549641\nn01549886\nn01550172\nn01550761\nn01551080\nn01551300\nn01551711\nn01552034\nn01552333\nn01552813\nn01553142\nn01553527\nn01553762\nn01554017\nn01554448\nn01555004\nn01555305\nn01555809\nn01556182\nn01556514\nn01557185\nn01557962\nn01558149\nn01558307\nn01558461\nn01558594\nn01558765\nn01558993\nn01559160\nn01559477\nn01559639\nn01559804\nn01560105\nn01560280\nn01560419\nn01560636\nn01560793\nn01560935\nn01561181\nn01561452\nn01561732\nn01562014\nn01562265\nn01562451\nn01563128\nn01563449\nn01563746\nn01563945\nn01564101\nn01564217\nn01564394\nn01564773\nn01564914\nn01565078\nn01565345\nn01565599\nn01565930\nn01566207\nn01566645\nn01567133\nn01567678\nn01567879\nn01568132\nn01568294\nn01568720\nn01568892\nn01569060\nn01569262\nn01569423\nn01569566\nn01569836\nn01569971\nn01570267\nn01570421\nn01570676\nn01570839\nn01571410\nn01571904\nn01572328\nn01572489\nn01572654\nn01572782\nn01573074\nn01573240\nn01573360\nn01573627\nn01573898\nn01574045\nn01574390\nn01574560\nn01574801\nn01575117\nn01575401\nn01575745\nn01576076\nn01576358\nn01576695\nn01577035\nn01577458\nn01577659\nn01577941\nn01578180\nn01578575\nn01579028\nn01579149\nn01579260\nn01579410\nn01579578\nn01579729\nn01580077\nn01580379\nn01580490\nn01580772\nn01580870\nn01581166\nn01581434\nn01581730\nn01581874\nn01581984\nn01582220\nn01582398\nn01582498\nn01582856\nn01583209\nn01583495\nn01583828\nn01584225\nn01584695\nn01584853\nn01585121\nn01585287\nn01585422\nn01585715\nn01586020\nn01586374\nn01586941\nn01587278\nn01587526\nn01587834\nn01588002\nn01588431\nn01588725\nn01588996\nn01589286\nn01589718\nn01589893\nn01590220\nn01591005\nn01591123\nn01591301\nn01591697\nn01592084\nn01592257\nn01592387\nn01592540\nn01592694\nn01593028\nn01593282\nn01593553\nn01594004\nn01594372\nn01594787\nn01594968\nn01595168\nn01595450\nn01595624\nn01595974\nn01596273\nn01596608\nn01597022\nn01597336\nn01597737\nn01597906\nn01598074\nn01598271\nn01598588\nn01598988\nn01599159\nn01599269\nn01599388\nn01599556\nn01599741\nn01600085\nn01600341\nn01600657\nn01601068\nn01601410\nn01601694\nn01602080\nn01602209\nn01602630\nn01602832\nn01603000\nn01603152\nn01603600\nn01603812\nn01603953\nn01604330\nn01604968\nn01605630\nn01606097\nn01606177\nn01606522\nn01606672\nn01606809\nn01606978\nn01607309\nn01607429\nn01607600\nn01607812\nn01607962\nn01608265\nn01608432\nn01608814\nn01609062\nn01609391\nn01609751\nn01609956\nn01610100\nn01610226\nn01610552\nn01610955\nn01611472\nn01611674\nn01611800\nn01611969\nn01612122\nn01612275\nn01612476\nn01612628\nn01612955\nn01613177\nn01613294\nn01613615\nn01613807\nn01614038\nn01614343\nn01614556\nn01614925\nn01615121\nn01615303\nn01615458\nn01615703\nn01616086\nn01616318\nn01616551\nn01616764\nn01617095\nn01617443\nn01617766\nn01618082\nn01618503\nn01618922\nn01619310\nn01619536\nn01619835\nn01620135\nn01620414\nn01620735\nn01621127\nn01621635\nn01622120\nn01622352\nn01622483\nn01622779\nn01622959\nn01623110\nn01623425\nn01623615\nn01623706\nn01623880\nn01624115\nn01624212\nn01624305\nn01624537\nn01624833\nn01625121\nn01625562\nn01627424\nn01628331\nn01628770\nn01629276\nn01629819\nn01629962\nn01630148\nn01630284\nn01630670\nn01630901\nn01631175\nn01631354\nn01631512\nn01631663\nn01632047\nn01632308\nn01632458\nn01632601\nn01632777\nn01632952\nn01633406\nn01633781\nn01634227\nn01634522\nn01635027\nn01635176\nn01635480\nn01636127\nn01636352\nn01636510\nn01636829\nn01637112\nn01637338\nn01637615\nn01637932\nn01638194\nn01638329\nn01638722\nn01639187\nn01639765\nn01640846\nn01641206\nn01641391\nn01641577\nn01641739\nn01641930\nn01642097\nn01642257\nn01642391\nn01642539\nn01642943\nn01643255\nn01643507\nn01643896\nn01644373\nn01644900\nn01645466\nn01645776\nn01646292\nn01646388\nn01646555\nn01646648\nn01646802\nn01646902\nn01647033\nn01647180\nn01647303\nn01647466\nn01647640\nn01648139\nn01648356\nn01648620\nn01649170\nn01649412\nn01649556\nn01649726\nn01650167\nn01650690\nn01650901\nn01651059\nn01651285\nn01651487\nn01651641\nn01651778\nn01652026\nn01652297\nn01653026\nn01653223\nn01653509\nn01653773\nn01654083\nn01654637\nn01654863\nn01655344\nn01661091\nn01661592\nn01661818\nn01662060\nn01662622\nn01662784\nn01663401\nn01663782\nn01664065\nn01664369\nn01664492\nn01664674\nn01664990\nn01665541\nn01665932\nn01666228\nn01666585\nn01667114\nn01667432\nn01667778\nn01668091\nn01668436\nn01668665\nn01668892\nn01669191\nn01669372\nn01669654\nn01670092\nn01670535\nn01670802\nn01671125\nn01671479\nn01671705\nn01672032\nn01672432\nn01672611\nn01673282\nn01674216\nn01674464\nn01674990\nn01675352\nn01675722\nn01676755\nn01677366\nn01677747\nn01678043\nn01678343\nn01678657\nn01679005\nn01679307\nn01679626\nn01679962\nn01680264\nn01680478\nn01680655\nn01680813\nn01680983\nn01681328\nn01681653\nn01681940\nn01682172\nn01682435\nn01682714\nn01683201\nn01683558\nn01684133\nn01684578\nn01684741\nn01685439\nn01685808\nn01686044\nn01686220\nn01686403\nn01686609\nn01686808\nn01687128\nn01687290\nn01687665\nn01687978\nn01688243\nn01688961\nn01689081\nn01689411\nn01689811\nn01690149\nn01690466\nn01691217\nn01691652\nn01691951\nn01692333\nn01692523\nn01692864\nn01693175\nn01693334\nn01693783\nn01694178\nn01694311\nn01694709\nn01694955\nn01695060\nn01696633\nn01697178\nn01697457\nn01697611\nn01697749\nn01697978\nn01698434\nn01698640\nn01698782\nn01699040\nn01699254\nn01699675\nn01701551\nn01701859\nn01702256\nn01702479\nn01703011\nn01703161\nn01703569\nn01704103\nn01704323\nn01704626\nn01705010\nn01705591\nn01705934\nn01707294\nn01708106\nn01708998\nn01709484\nn01709876\nn01710177\nn01711160\nn01712008\nn01712752\nn01713170\nn01713764\nn01714231\nn01715888\nn01717016\nn01717229\nn01717467\nn01718096\nn01718414\nn01719403\nn01721174\nn01721898\nn01722670\nn01722998\nn01723579\nn01724231\nn01724840\nn01725086\nn01725713\nn01726203\nn01726692\nn01727646\nn01728266\nn01728572\nn01728920\nn01729322\nn01729672\nn01729977\nn01730185\nn01730307\nn01730563\nn01730812\nn01730960\nn01731137\nn01731277\nn01731545\nn01731764\nn01731941\nn01732093\nn01732244\nn01732614\nn01732789\nn01732989\nn01733214\nn01733466\nn01733757\nn01733957\nn01734104\nn01734418\nn01734637\nn01734808\nn01735189\nn01735439\nn01735577\nn01735728\nn01736032\nn01736375\nn01736796\nn01737021\nn01737472\nn01737728\nn01737875\nn01738065\nn01738306\nn01738601\nn01738731\nn01739094\nn01739381\nn01739647\nn01739871\nn01740131\nn01740551\nn01740885\nn01741232\nn01741442\nn01741562\nn01741943\nn01742172\nn01742447\nn01742821\nn01743086\nn01743605\nn01743936\nn01744100\nn01744270\nn01744401\nn01744555\nn01745125\nn01745484\nn01745902\nn01746191\nn01746359\nn01746952\nn01747285\nn01747589\nn01747885\nn01748264\nn01748389\nn01748686\nn01748906\nn01749244\nn01749582\nn01749742\nn01749939\nn01750167\nn01750437\nn01750743\nn01751036\nn01751215\nn01751472\nn01751748\nn01752165\nn01752585\nn01752736\nn01753032\nn01753180\nn01753488\nn01753959\nn01754370\nn01754533\nn01754876\nn01755581\nn01755740\nn01755952\nn01756089\nn01756291\nn01756508\nn01756733\nn01756916\nn01757115\nn01757343\nn01757677\nn01757901\nn01758141\nn01758757\nn01758895\nn01767661\nn01768244\nn01769347\nn01770081\nn01770393\nn01770795\nn01771100\nn01771417\nn01771766\nn01772222\nn01772664\nn01773157\nn01773549\nn01773797\nn01774097\nn01774384\nn01774750\nn01775062\nn01775370\nn01775730\nn01776192\nn01776313\nn01776705\nn01777304\nn01777467\nn01777649\nn01777909\nn01778217\nn01778487\nn01778621\nn01778801\nn01779148\nn01779463\nn01779629\nn01779939\nn01780142\nn01780426\nn01780696\nn01781071\nn01781570\nn01781698\nn01781875\nn01782209\nn01782516\nn01783017\nn01783706\nn01784293\nn01784675\nn01785667\nn01786646\nn01787006\nn01787191\nn01787835\nn01788291\nn01788579\nn01788864\nn01789386\nn01789740\nn01790171\nn01790304\nn01790398\nn01790557\nn01790711\nn01790812\nn01791107\nn01791314\nn01791388\nn01791463\nn01791625\nn01791954\nn01792042\nn01792158\nn01792429\nn01792530\nn01792640\nn01792808\nn01792955\nn01793085\nn01793159\nn01793249\nn01793340\nn01793435\nn01793565\nn01793715\nn01794158\nn01794344\nn01794651\nn01795088\nn01795545\nn01795735\nn01795900\nn01796019\nn01796105\nn01796340\nn01796519\nn01796729\nn01797020\nn01797307\nn01797601\nn01797886\nn01798168\nn01798484\nn01798706\nn01798839\nn01798979\nn01799302\nn01799679\nn01800195\nn01800424\nn01800633\nn01801088\nn01801479\nn01801672\nn01801876\nn01802159\nn01802721\nn01803078\nn01803362\nn01803641\nn01803893\nn01804163\nn01804478\nn01804653\nn01804921\nn01805070\nn01805321\nn01805801\nn01806061\nn01806143\nn01806297\nn01806364\nn01806467\nn01806567\nn01806847\nn01807105\nn01807496\nn01807828\nn01808140\nn01808291\nn01808596\nn01809106\nn01809371\nn01809752\nn01810268\nn01810700\nn01811243\nn01811909\nn01812187\nn01812337\nn01812662\nn01812866\nn01813088\nn01813385\nn01813532\nn01813658\nn01813948\nn01814217\nn01814370\nn01814549\nn01814620\nn01814755\nn01814921\nn01815036\nn01815270\nn01815601\nn01816017\nn01816140\nn01816474\nn01816887\nn01817263\nn01817346\nn01817953\nn01818299\nn01818515\nn01818832\nn01819115\nn01819313\nn01819465\nn01819734\nn01820052\nn01820348\nn01820546\nn01820801\nn01821076\nn01821203\nn01821554\nn01821869\nn01822300\nn01822602\nn01823013\nn01823414\nn01823740\nn01824035\nn01824344\nn01824575\nn01824749\nn01825278\nn01825930\nn01826364\nn01826680\nn01826844\nn01827403\nn01827793\nn01828096\nn01828556\nn01828970\nn01829413\nn01829869\nn01830042\nn01830479\nn01830915\nn01831360\nn01831712\nn01832167\nn01832493\nn01832813\nn01833112\nn01833415\nn01833805\nn01834177\nn01834540\nn01835276\nn01835769\nn01835918\nn01836087\nn01836673\nn01837072\nn01837526\nn01838038\nn01838598\nn01839086\nn01839330\nn01839598\nn01839750\nn01839949\nn01840120\nn01840412\nn01840775\nn01841102\nn01841288\nn01841441\nn01841679\nn01841943\nn01842235\nn01842504\nn01842788\nn01843065\nn01843383\nn01843719\nn01844231\nn01844551\nn01844746\nn01844917\nn01845132\nn01845477\nn01846331\nn01847000\nn01847089\nn01847170\nn01847253\nn01847407\nn01847806\nn01847978\nn01848123\nn01848323\nn01848453\nn01848555\nn01848648\nn01848840\nn01848976\nn01849157\nn01849466\nn01849676\nn01849863\nn01850192\nn01850373\nn01850553\nn01850873\nn01851038\nn01851207\nn01851375\nn01851573\nn01851731\nn01851895\nn01852142\nn01852329\nn01852400\nn01852671\nn01852861\nn01853195\nn01853498\nn01853666\nn01853870\nn01854415\nn01854700\nn01854838\nn01855032\nn01855188\nn01855476\nn01855672\nn01856072\nn01856155\nn01856380\nn01856553\nn01856890\nn01857079\nn01857325\nn01857512\nn01857632\nn01857851\nn01858281\nn01858441\nn01858780\nn01858845\nn01858906\nn01859190\nn01859325\nn01859496\nn01859689\nn01859852\nn01860002\nn01860187\nn01860497\nn01860864\nn01861148\nn01861330\nn01861778\nn01862399\nn01871265\nn01871543\nn01871875\nn01872401\nn01872772\nn01873310\nn01874434\nn01874928\nn01875313\nn01875610\nn01876034\nn01876326\nn01876667\nn01877134\nn01877606\nn01877812\nn01878061\nn01878335\nn01878639\nn01878929\nn01879217\nn01879509\nn01879837\nn01880152\nn01880473\nn01880716\nn01880813\nn01881171\nn01881564\nn01881857\nn01882125\nn01882714\nn01883070\nn01883513\nn01883920\nn01884104\nn01884203\nn01884476\nn01884834\nn01885158\nn01885498\nn01886045\nn01886756\nn01887474\nn01887623\nn01887787\nn01887896\nn01888045\nn01888181\nn01888264\nn01888411\nn01889074\nn01889520\nn01889849\nn01890144\nn01890564\nn01890860\nn01891013\nn01891274\nn01891633\nn01892030\nn01892145\nn01892385\nn01892551\nn01892744\nn01893021\nn01893164\nn01893399\nn01893825\nn01894207\nn01894522\nn01894956\nn01896844\nn01897257\nn01897426\nn01897536\nn01897667\nn01898593\nn01899894\nn01900150\nn01903234\nn01903346\nn01903498\nn01904029\nn01904806\nn01904886\nn01905321\nn01905661\nn01906749\nn01907287\nn01907738\nn01908042\nn01908958\nn01909422\nn01909788\nn01909906\nn01910252\nn01910747\nn01911063\nn01911403\nn01911839\nn01912152\nn01912454\nn01912809\nn01913166\nn01913346\nn01913440\nn01914163\nn01914609\nn01914830\nn01915700\nn01915811\nn01916187\nn01916388\nn01916481\nn01916588\nn01916925\nn01917289\nn01917611\nn01917882\nn01918744\nn01919385\nn01920051\nn01920438\nn01921059\nn01922303\nn01922717\nn01922948\nn01923025\nn01923404\nn01923890\nn01924800\nn01924916\nn01925270\nn01925695\nn01925916\nn01926379\nn01926689\nn01927159\nn01927456\nn01927928\nn01928215\nn01928517\nn01928865\nn01929186\nn01930112\nn01930852\nn01931140\nn01931520\nn01931714\nn01932151\nn01932936\nn01933151\nn01933478\nn01933988\nn01934440\nn01934844\nn01935176\nn01935395\nn01936391\nn01936671\nn01936858\nn01937579\nn01937909\nn01938454\nn01938735\nn01940736\nn01941223\nn01941340\nn01942177\nn01942869\nn01943087\nn01943541\nn01943899\nn01944118\nn01944390\nn01944812\nn01944955\nn01945143\nn01945340\nn01945685\nn01945845\nn01946277\nn01946630\nn01946827\nn01947139\nn01947396\nn01947997\nn01948446\nn01948573\nn01949085\nn01949499\nn01949973\nn01950731\nn01951274\nn01951613\nn01952029\nn01952712\nn01953361\nn01953594\nn01953762\nn01954516\nn01955084\nn01955933\nn01956344\nn01956481\nn01956764\nn01957335\nn01958038\nn01958346\nn01958435\nn01958531\nn01959029\nn01959492\nn01959985\nn01960177\nn01960459\nn01961234\nn01961600\nn01961985\nn01962506\nn01962788\nn01963317\nn01963479\nn01963571\nn01964049\nn01964271\nn01964441\nn01964957\nn01965252\nn01965529\nn01965889\nn01966377\nn01966586\nn01967094\nn01967308\nn01967963\nn01968315\nn01968897\nn01969726\nn01970164\nn01970667\nn01971094\nn01971280\nn01971620\nn01971850\nn01972131\nn01972541\nn01973148\nn01974773\nn01975687\nn01976146\nn01976868\nn01976957\nn01977485\nn01978010\nn01978136\nn01978287\nn01978455\nn01978587\nn01978930\nn01979269\nn01979526\nn01979874\nn01980166\nn01980655\nn01981276\nn01981702\nn01982068\nn01982347\nn01982650\nn01983048\nn01983481\nn01983674\nn01983829\nn01984245\nn01984695\nn01985128\nn01985493\nn01985797\nn01986214\nn01986806\nn01987076\nn01987545\nn01987727\nn01988203\nn01988701\nn01988869\nn01989516\nn01989869\nn01990007\nn01990516\nn01990800\nn01991028\nn01991520\nn01992262\nn01992423\nn01992773\nn01993525\nn01993830\nn01994910\nn01995514\nn01995686\nn01996280\nn01996585\nn01997119\nn01997825\nn01998183\nn01998741\nn01999186\nn01999767\nn02000954\nn02002075\nn02002556\nn02002724\nn02003037\nn02003204\nn02003577\nn02003839\nn02004131\nn02004492\nn02004855\nn02005399\nn02005790\nn02006063\nn02006364\nn02006656\nn02006985\nn02007284\nn02007558\nn02008041\nn02008497\nn02008643\nn02008796\nn02009229\nn02009380\nn02009508\nn02009750\nn02009912\nn02010272\nn02010453\nn02010728\nn02011016\nn02011281\nn02011460\nn02011805\nn02011943\nn02012185\nn02012849\nn02013177\nn02013567\nn02013706\nn02014237\nn02014524\nn02014941\nn02015357\nn02015554\nn02015797\nn02016066\nn02016358\nn02016659\nn02016816\nn02016956\nn02017213\nn02017475\nn02017725\nn02018027\nn02018207\nn02018368\nn02018795\nn02019190\nn02019438\nn02019929\nn02020219\nn02020578\nn02021050\nn02021281\nn02021795\nn02022684\nn02023341\nn02023855\nn02023992\nn02024185\nn02024479\nn02024763\nn02025043\nn02025239\nn02025389\nn02026059\nn02026629\nn02026948\nn02027075\nn02027357\nn02027492\nn02027897\nn02028035\nn02028175\nn02028342\nn02028451\nn02028727\nn02028900\nn02029087\nn02029378\nn02029706\nn02030035\nn02030224\nn02030287\nn02030568\nn02030837\nn02030996\nn02031298\nn02031585\nn02031934\nn02032222\nn02032355\nn02032480\nn02032769\nn02033041\nn02033208\nn02033324\nn02033561\nn02033779\nn02033882\nn02034129\nn02034295\nn02034661\nn02034971\nn02035210\nn02035402\nn02035656\nn02036053\nn02036228\nn02036711\nn02037110\nn02037464\nn02037869\nn02038141\nn02038466\nn02038993\nn02039171\nn02039497\nn02039780\nn02040266\nn02040505\nn02041085\nn02041246\nn02041678\nn02041875\nn02042046\nn02042180\nn02042472\nn02042759\nn02043063\nn02043333\nn02043808\nn02044178\nn02044517\nn02044778\nn02044908\nn02045369\nn02045596\nn02045864\nn02046171\nn02046759\nn02046939\nn02047045\nn02047260\nn02047411\nn02047517\nn02047614\nn02047975\nn02048115\nn02048353\nn02048698\nn02049088\nn02049532\nn02050004\nn02050313\nn02050442\nn02050586\nn02050809\nn02051059\nn02051474\nn02051845\nn02052204\nn02052365\nn02052775\nn02053083\nn02053425\nn02053584\nn02054036\nn02054502\nn02054711\nn02055107\nn02055658\nn02055803\nn02056228\nn02056570\nn02056728\nn02057035\nn02057330\nn02057731\nn02057898\nn02058221\nn02058594\nn02058747\nn02059162\nn02059541\nn02059852\nn02060133\nn02060411\nn02060569\nn02060889\nn02061217\nn02061560\nn02061853\nn02062017\nn02062430\nn02062744\nn02063224\nn02063662\nn02064000\nn02064338\nn02064816\nn02065026\nn02065263\nn02065407\nn02065726\nn02066245\nn02066707\nn02067240\nn02067603\nn02067768\nn02068206\nn02068541\nn02068974\nn02069412\nn02069701\nn02069974\nn02070174\nn02070430\nn02070624\nn02070776\nn02071028\nn02071294\nn02071636\nn02072040\nn02072493\nn02072798\nn02073250\nn02073831\nn02074367\nn02074726\nn02075296\nn02075612\nn02075927\nn02076196\nn02076402\nn02076779\nn02077152\nn02077384\nn02077658\nn02077787\nn02077923\nn02078292\nn02078574\nn02078738\nn02079005\nn02079389\nn02079851\nn02080146\nn02080415\nn02080713\nn02081060\nn02081571\nn02081798\nn02081927\nn02082056\nn02082190\nn02082791\nn02083346\nn02083672\nn02083780\nn02084071\nn02084732\nn02084861\nn02085019\nn02085118\nn02085272\nn02085374\nn02085620\nn02085782\nn02085936\nn02086079\nn02086240\nn02086346\nn02086478\nn02086646\nn02086753\nn02086910\nn02087046\nn02087122\nn02087314\nn02087394\nn02087551\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02088745\nn02088839\nn02088992\nn02089078\nn02089232\nn02089468\nn02089555\nn02089725\nn02089867\nn02089973\nn02090129\nn02090253\nn02090379\nn02090475\nn02090622\nn02090721\nn02090827\nn02091032\nn02091134\nn02091244\nn02091467\nn02091635\nn02091831\nn02092002\nn02092173\nn02092339\nn02092468\nn02093056\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02094562\nn02094721\nn02094931\nn02095050\nn02095212\nn02095314\nn02095412\nn02095570\nn02095727\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02096756\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02097786\nn02097967\nn02098105\nn02098286\nn02098413\nn02098550\nn02098806\nn02098906\nn02099029\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02099997\nn02100236\nn02100399\nn02100583\nn02100735\nn02100877\nn02101006\nn02101108\nn02101388\nn02101556\nn02101670\nn02101861\nn02102040\nn02102177\nn02102318\nn02102480\nn02102605\nn02102806\nn02102973\nn02103181\nn02103406\nn02103841\nn02104029\nn02104184\nn02104280\nn02104365\nn02104523\nn02104882\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02106854\nn02106966\nn02107142\nn02107312\nn02107420\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108254\nn02108422\nn02108551\nn02108672\nn02108915\nn02109047\nn02109150\nn02109256\nn02109391\nn02109525\nn02109687\nn02109811\nn02109961\nn02110063\nn02110185\nn02110341\nn02110532\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111626\nn02111889\nn02112018\nn02112137\nn02112350\nn02112497\nn02112706\nn02112826\nn02113023\nn02113186\nn02113335\nn02113624\nn02113712\nn02113799\nn02113892\nn02113978\nn02114100\nn02114367\nn02114548\nn02114712\nn02114855\nn02115012\nn02115096\nn02115335\nn02115641\nn02115913\nn02116185\nn02116450\nn02116738\nn02117135\nn02117512\nn02117646\nn02117900\nn02118176\nn02118333\nn02118643\nn02118707\nn02119022\nn02119247\nn02119359\nn02119477\nn02119634\nn02119789\nn02120079\nn02120278\nn02120505\nn02120997\nn02121620\nn02121808\nn02122298\nn02122430\nn02122510\nn02122580\nn02122725\nn02122810\nn02122878\nn02122948\nn02123045\nn02123159\nn02123242\nn02123394\nn02123478\nn02123597\nn02123785\nn02123917\nn02124075\nn02124157\nn02124313\nn02124484\nn02124623\nn02125010\nn02125081\nn02125311\nn02125494\nn02125689\nn02125872\nn02126028\nn02126139\nn02126317\nn02126640\nn02126787\nn02127052\nn02127292\nn02127381\nn02127482\nn02127586\nn02127678\nn02127808\nn02128385\nn02128598\nn02128669\nn02128757\nn02128925\nn02129165\nn02129463\nn02129530\nn02129604\nn02129837\nn02129923\nn02129991\nn02130086\nn02130308\nn02130545\nn02130925\nn02131653\nn02132136\nn02132320\nn02132466\nn02132580\nn02132788\nn02133161\nn02133400\nn02133704\nn02134084\nn02134418\nn02134971\nn02135220\nn02135610\nn02135844\nn02136103\nn02136285\nn02136452\nn02136794\nn02137015\nn02137302\nn02137549\nn02137722\nn02137888\nn02138169\nn02138441\nn02138647\nn02138777\nn02139199\nn02139671\nn02140049\nn02140179\nn02140268\nn02140491\nn02140858\nn02141306\nn02141611\nn02141713\nn02142407\nn02142734\nn02142898\nn02143142\nn02143439\nn02143891\nn02144251\nn02144593\nn02144936\nn02145424\nn02145910\nn02146201\nn02146371\nn02146700\nn02146879\nn02147173\nn02147328\nn02147591\nn02147947\nn02148088\nn02148512\nn02148835\nn02148991\nn02149420\nn02149653\nn02149861\nn02150134\nn02150482\nn02150885\nn02151230\nn02152740\nn02152881\nn02152991\nn02153109\nn02153203\nn02153809\nn02156732\nn02156871\nn02157206\nn02157285\nn02159955\nn02160947\nn02161225\nn02161338\nn02161457\nn02161588\nn02162561\nn02163008\nn02163297\nn02164464\nn02165105\nn02165456\nn02165877\nn02166229\nn02166567\nn02166826\nn02167151\nn02167505\nn02167820\nn02167944\nn02168245\nn02168427\nn02168699\nn02169023\nn02169218\nn02169497\nn02169705\nn02169974\nn02170400\nn02170599\nn02170738\nn02170993\nn02171164\nn02171453\nn02171869\nn02172182\nn02172518\nn02172678\nn02172761\nn02172870\nn02173113\nn02173373\nn02173784\nn02174001\nn02174355\nn02174659\nn02175014\nn02175569\nn02175916\nn02176261\nn02176439\nn02176747\nn02176916\nn02177196\nn02177506\nn02177775\nn02177972\nn02178411\nn02178717\nn02179012\nn02179192\nn02179340\nn02179891\nn02180233\nn02180427\nn02180875\nn02181235\nn02181477\nn02181724\nn02182045\nn02182355\nn02182642\nn02182930\nn02183096\nn02183507\nn02183857\nn02184473\nn02184589\nn02184720\nn02185167\nn02185481\nn02186153\nn02186717\nn02187150\nn02187279\nn02187554\nn02187900\nn02188699\nn02189363\nn02189670\nn02190166\nn02190790\nn02191273\nn02191773\nn02191979\nn02192252\nn02192513\nn02192814\nn02193009\nn02193163\nn02194249\nn02194750\nn02195091\nn02195526\nn02195819\nn02196119\nn02196344\nn02196896\nn02197185\nn02197689\nn02197877\nn02198129\nn02198532\nn02198859\nn02199170\nn02199502\nn02200198\nn02200509\nn02200630\nn02200850\nn02201000\nn02201497\nn02201626\nn02202006\nn02202124\nn02202287\nn02202678\nn02203152\nn02203592\nn02203978\nn02204249\nn02204722\nn02204907\nn02205219\nn02205673\nn02206270\nn02206856\nn02207179\nn02207345\nn02207449\nn02207647\nn02207805\nn02208280\nn02208498\nn02208848\nn02208979\nn02209111\nn02209354\nn02209624\nn02209964\nn02210427\nn02210921\nn02211444\nn02211627\nn02211896\nn02212062\nn02212602\nn02212958\nn02213107\nn02213239\nn02213543\nn02213663\nn02213788\nn02214096\nn02214341\nn02214499\nn02214660\nn02214773\nn02215161\nn02215621\nn02215770\nn02216211\nn02216365\nn02216740\nn02217563\nn02217839\nn02218134\nn02218371\nn02218713\nn02219015\nn02219486\nn02220055\nn02220225\nn02220518\nn02220804\nn02221083\nn02221414\nn02221571\nn02221715\nn02221820\nn02222035\nn02222321\nn02222582\nn02223266\nn02223520\nn02224023\nn02224713\nn02225081\nn02225798\nn02226183\nn02226429\nn02226821\nn02226970\nn02227247\nn02227604\nn02227966\nn02228341\nn02228697\nn02229156\nn02229544\nn02229765\nn02230023\nn02230187\nn02230480\nn02230634\nn02231052\nn02231487\nn02231803\nn02232223\nn02233338\nn02233943\nn02234355\nn02234570\nn02234848\nn02235205\nn02236044\nn02236241\nn02236355\nn02236896\nn02237424\nn02237581\nn02237868\nn02238235\nn02238358\nn02238594\nn02238887\nn02239192\nn02239528\nn02239774\nn02240068\nn02240517\nn02241008\nn02241426\nn02241569\nn02241799\nn02242137\nn02242455\nn02243209\nn02243562\nn02243878\nn02244173\nn02244515\nn02244797\nn02245111\nn02245443\nn02246011\nn02246628\nn02246941\nn02247216\nn02247511\nn02247655\nn02248062\nn02248368\nn02248510\nn02248887\nn02249134\nn02249515\nn02249809\nn02250280\nn02250822\nn02251067\nn02251233\nn02251593\nn02251775\nn02252226\nn02252799\nn02252972\nn02253127\nn02253264\nn02253494\nn02253715\nn02253913\nn02254246\nn02254697\nn02254901\nn02255023\nn02255391\nn02256172\nn02256656\nn02257003\nn02257284\nn02257715\nn02257985\nn02258198\nn02258508\nn02258629\nn02259212\nn02259377\nn02259708\nn02259987\nn02260421\nn02260863\nn02261063\nn02261419\nn02261757\nn02262178\nn02262449\nn02262803\nn02263378\nn02264021\nn02264232\nn02264363\nn02264591\nn02264885\nn02265330\nn02266050\nn02266269\nn02266421\nn02266864\nn02267208\nn02267483\nn02268148\nn02268443\nn02268853\nn02269196\nn02269340\nn02269522\nn02269657\nn02270011\nn02270200\nn02270623\nn02270945\nn02271222\nn02271570\nn02271897\nn02272286\nn02272552\nn02272871\nn02273392\nn02274024\nn02274259\nn02274822\nn02275560\nn02275773\nn02276078\nn02276258\nn02276355\nn02276749\nn02276902\nn02277094\nn02277268\nn02277422\nn02277742\nn02278024\nn02278210\nn02278463\nn02278839\nn02278980\nn02279257\nn02279637\nn02279972\nn02280458\nn02280649\nn02281015\nn02281136\nn02281267\nn02281406\nn02281787\nn02282257\nn02282385\nn02282553\nn02282903\nn02283077\nn02283201\nn02283617\nn02283951\nn02284224\nn02284611\nn02284884\nn02285179\nn02285548\nn02285801\nn02286089\nn02286425\nn02286654\nn02287004\nn02287352\nn02287622\nn02287799\nn02287987\nn02288122\nn02288268\nn02288789\nn02289307\nn02289610\nn02289988\nn02290340\nn02290664\nn02290870\nn02291220\nn02291572\nn02291748\nn02292085\nn02292401\nn02292692\nn02293352\nn02293868\nn02294097\nn02294407\nn02294577\nn02295064\nn02295390\nn02295870\nn02296021\nn02296276\nn02296612\nn02296912\nn02297294\nn02297442\nn02297819\nn02297938\nn02298095\nn02298218\nn02298541\nn02299039\nn02299157\nn02299378\nn02299505\nn02299846\nn02300173\nn02300554\nn02300797\nn02301452\nn02301935\nn02302244\nn02302459\nn02302620\nn02302969\nn02303284\nn02303585\nn02303777\nn02304036\nn02304432\nn02304657\nn02304797\nn02305085\nn02305407\nn02305636\nn02305929\nn02306433\nn02306825\nn02307176\nn02307325\nn02307515\nn02307681\nn02307910\nn02308033\nn02308139\nn02308471\nn02308618\nn02308735\nn02309120\nn02309242\nn02309337\nn02309841\nn02310000\nn02310149\nn02310334\nn02310585\nn02310717\nn02310941\nn02311060\nn02311617\nn02311748\nn02312006\nn02312175\nn02312325\nn02312427\nn02312640\nn02312912\nn02313008\nn02313360\nn02313709\nn02315487\nn02315821\nn02316707\nn02317335\nn02317781\nn02318167\nn02318687\nn02319095\nn02319308\nn02319555\nn02319829\nn02320127\nn02320465\nn02321170\nn02321529\nn02322047\nn02322992\nn02323449\nn02323902\nn02324045\nn02324431\nn02324514\nn02324587\nn02324850\nn02325366\nn02325722\nn02325884\nn02326074\nn02326432\nn02326763\nn02326862\nn02327028\nn02327175\nn02327435\nn02327656\nn02327842\nn02328009\nn02328150\nn02328429\nn02328820\nn02328942\nn02329401\nn02330245\nn02331046\nn02331309\nn02331842\nn02332156\nn02332447\nn02332755\nn02332954\nn02333190\nn02333546\nn02333733\nn02333819\nn02333909\nn02334201\nn02334460\nn02334728\nn02335127\nn02335231\nn02336011\nn02336275\nn02336641\nn02336826\nn02337001\nn02337171\nn02337332\nn02337598\nn02337902\nn02338145\nn02338449\nn02338722\nn02338901\nn02339282\nn02339376\nn02339922\nn02340186\nn02340358\nn02340640\nn02340930\nn02341288\nn02341475\nn02341616\nn02341974\nn02342250\nn02342534\nn02342885\nn02343058\nn02343320\nn02343772\nn02344175\nn02344270\nn02344408\nn02344528\nn02344918\nn02345078\nn02345340\nn02345600\nn02345774\nn02345997\nn02346170\nn02346627\nn02346998\nn02347274\nn02347573\nn02347744\nn02348173\nn02348788\nn02349205\nn02349390\nn02349557\nn02349847\nn02350105\nn02350357\nn02350670\nn02350989\nn02351343\nn02351870\nn02352002\nn02352290\nn02352591\nn02352932\nn02353172\nn02353411\nn02353861\nn02354162\nn02354320\nn02354621\nn02354781\nn02355227\nn02355477\nn02356381\nn02356612\nn02356798\nn02356977\nn02357111\nn02357401\nn02357585\nn02357911\nn02358091\nn02358390\nn02358584\nn02358712\nn02358890\nn02359047\nn02359324\nn02359556\nn02359667\nn02359915\nn02360282\nn02360480\nn02360781\nn02360933\nn02361090\nn02361337\nn02361587\nn02361706\nn02361850\nn02362194\nn02363005\nn02363245\nn02363351\nn02363996\nn02364520\nn02364673\nn02364840\nn02365108\nn02365480\nn02366002\nn02366301\nn02366579\nn02366959\nn02367492\nn02367812\nn02368116\nn02368399\nn02368821\nn02369293\nn02369555\nn02369680\nn02369935\nn02370137\nn02370525\nn02370806\nn02371344\nn02372140\nn02372584\nn02372952\nn02373336\nn02374149\nn02374451\nn02375302\nn02375438\nn02375757\nn02375862\nn02376542\nn02376679\nn02376791\nn02376918\nn02377063\nn02377181\nn02377291\nn02377388\nn02377480\nn02377603\nn02377703\nn02378149\nn02378299\nn02378415\nn02378541\nn02378625\nn02378755\nn02378870\nn02378969\nn02379081\nn02379183\nn02379329\nn02379430\nn02379630\nn02379743\nn02379908\nn02380052\nn02380335\nn02380464\nn02380583\nn02380745\nn02380875\nn02381004\nn02381119\nn02381261\nn02381364\nn02381460\nn02381609\nn02381831\nn02382039\nn02382132\nn02382204\nn02382338\nn02382437\nn02382635\nn02382750\nn02382850\nn02382948\nn02383231\nn02384741\nn02384858\nn02385002\nn02385098\nn02385214\nn02385580\nn02385676\nn02385776\nn02385898\nn02386014\nn02386141\nn02386224\nn02386310\nn02386496\nn02386746\nn02386853\nn02386968\nn02387093\nn02387254\nn02387346\nn02387452\nn02387722\nn02387887\nn02387983\nn02388143\nn02388276\nn02388453\nn02388588\nn02388735\nn02388832\nn02388917\nn02389026\nn02389128\nn02389261\nn02389346\nn02389559\nn02389779\nn02389865\nn02389943\nn02390015\nn02390101\nn02390258\nn02390454\nn02390640\nn02390738\nn02390834\nn02390938\nn02391049\nn02391234\nn02391373\nn02391508\nn02391617\nn02391994\nn02392434\nn02392555\nn02392824\nn02393161\nn02393580\nn02393807\nn02393940\nn02394477\nn02395003\nn02395406\nn02395694\nn02395855\nn02395931\nn02396014\nn02396088\nn02396157\nn02396427\nn02396796\nn02397096\nn02397529\nn02397744\nn02397987\nn02398521\nn02399000\nn02401031\nn02402010\nn02402175\nn02402425\nn02403003\nn02403153\nn02403231\nn02403325\nn02403454\nn02403740\nn02403820\nn02403920\nn02404028\nn02404186\nn02404432\nn02404573\nn02404906\nn02405101\nn02405302\nn02405440\nn02405577\nn02405692\nn02405799\nn02405929\nn02406046\nn02406174\nn02406432\nn02406533\nn02406647\nn02406749\nn02406859\nn02406952\nn02407071\nn02407172\nn02407276\nn02407390\nn02407521\nn02407625\nn02407763\nn02407959\nn02408429\nn02408660\nn02408817\nn02409038\nn02409202\nn02409508\nn02409870\nn02410011\nn02410141\nn02410509\nn02410702\nn02410900\nn02411206\nn02411705\nn02411999\nn02412080\nn02412210\nn02412440\nn02412629\nn02412700\nn02412787\nn02412909\nn02412977\nn02413050\nn02413131\nn02413484\nn02413593\nn02413717\nn02413824\nn02413917\nn02414043\nn02414209\nn02414290\nn02414442\nn02414578\nn02414763\nn02414904\nn02415130\nn02415253\nn02415435\nn02415577\nn02415829\nn02416104\nn02416519\nn02416820\nn02416880\nn02416964\nn02417070\nn02417242\nn02417387\nn02417534\nn02417663\nn02417785\nn02417914\nn02418064\nn02418465\nn02418770\nn02419056\nn02419336\nn02419634\nn02419796\nn02420509\nn02420828\nn02421136\nn02421449\nn02421792\nn02422106\nn02422391\nn02422699\nn02423022\nn02423218\nn02423362\nn02423589\nn02424085\nn02424305\nn02424486\nn02424589\nn02424695\nn02424909\nn02425086\nn02425228\nn02425532\nn02425887\nn02426176\nn02426481\nn02426813\nn02427032\nn02427183\nn02427470\nn02427576\nn02427724\nn02428089\nn02428349\nn02428508\nn02428842\nn02429456\nn02430045\nn02430559\nn02430643\nn02430748\nn02430830\nn02431122\nn02431337\nn02431441\nn02431542\nn02431628\nn02431785\nn02431976\nn02432291\nn02432511\nn02432704\nn02432983\nn02433318\nn02433546\nn02433729\nn02433925\nn02434190\nn02434415\nn02434712\nn02434954\nn02435216\nn02435517\nn02435853\nn02436224\nn02436353\nn02436645\nn02437136\nn02437312\nn02437482\nn02437616\nn02437971\nn02438173\nn02438272\nn02438580\nn02439033\nn02439398\nn02441326\nn02441942\nn02442172\nn02442336\nn02442446\nn02442572\nn02442668\nn02442845\nn02443015\nn02443114\nn02443346\nn02443484\nn02443808\nn02443959\nn02444251\nn02444819\nn02445004\nn02445171\nn02445394\nn02445715\nn02446206\nn02446352\nn02446645\nn02447021\nn02447366\nn02447762\nn02448060\nn02448318\nn02448633\nn02448885\nn02449183\nn02449350\nn02449699\nn02450034\nn02450295\nn02450426\nn02450561\nn02450677\nn02450829\nn02451125\nn02451415\nn02451575\nn02453108\nn02453611\nn02454379\nn02454794\nn02455135\nn02455428\nn02455720\nn02456008\nn02456275\nn02456962\nn02457408\nn02457945\nn02458135\nn02458517\nn02459190\nn02460009\nn02460451\nn02460817\nn02461128\nn02461830\nn02462213\nn02469248\nn02469472\nn02469914\nn02470238\nn02470325\nn02470709\nn02470899\nn02471300\nn02471762\nn02472293\nn02472987\nn02473307\nn02473554\nn02473720\nn02473857\nn02473983\nn02474110\nn02474282\nn02474605\nn02474777\nn02475078\nn02475358\nn02475669\nn02476219\nn02476567\nn02476870\nn02477028\nn02477187\nn02477329\nn02477516\nn02477782\nn02478239\nn02478875\nn02479332\nn02480153\nn02480495\nn02480855\nn02481103\nn02481235\nn02481366\nn02481500\nn02481823\nn02482060\nn02482286\nn02482474\nn02482650\nn02483092\nn02483362\nn02483708\nn02484322\nn02484473\nn02484975\nn02485225\nn02485371\nn02485536\nn02485688\nn02485988\nn02486261\nn02486410\nn02486657\nn02486908\nn02487079\nn02487347\nn02487547\nn02487675\nn02487847\nn02488003\nn02488291\nn02488415\nn02488702\nn02488894\nn02489166\nn02489589\nn02490219\nn02490597\nn02490811\nn02491107\nn02491329\nn02491474\nn02492035\nn02492356\nn02492660\nn02492948\nn02493224\nn02493509\nn02493793\nn02494079\nn02494383\nn02495242\nn02496052\nn02496913\nn02497673\nn02498153\nn02498743\nn02499022\nn02499316\nn02499568\nn02499808\nn02500267\nn02500596\nn02501583\nn02501923\nn02502006\nn02502514\nn02502807\nn02503127\nn02503517\nn02503756\nn02504013\nn02504458\nn02504770\nn02505063\nn02505238\nn02505485\nn02505998\nn02506947\nn02507148\nn02507649\nn02508021\nn02508213\nn02508346\nn02508742\nn02509197\nn02509515\nn02509815\nn02510455\nn02511730\nn02512053\nn02512752\nn02512830\nn02512938\nn02513248\nn02513355\nn02513560\nn02513727\nn02513805\nn02513939\nn02514041\nn02515214\nn02515713\nn02516188\nn02516776\nn02517442\nn02517938\nn02518324\nn02518622\nn02519148\nn02519340\nn02519472\nn02519686\nn02519862\nn02520147\nn02520525\nn02520810\nn02521646\nn02522399\nn02522637\nn02522722\nn02522866\nn02523110\nn02523427\nn02523877\nn02524202\nn02524524\nn02524659\nn02524928\nn02525382\nn02525703\nn02526121\nn02526425\nn02526818\nn02527057\nn02527271\nn02527622\nn02528163\nn02529293\nn02529772\nn02530052\nn02530188\nn02530421\nn02530637\nn02530831\nn02530999\nn02531114\nn02531625\nn02532028\nn02532272\nn02532451\nn02532602\nn02532786\nn02532918\nn02533209\nn02533545\nn02533834\nn02534165\nn02534559\nn02534734\nn02535080\nn02535163\nn02535258\nn02535537\nn02535759\nn02536165\nn02536456\nn02536864\nn02537085\nn02537319\nn02537525\nn02537716\nn02538010\nn02538216\nn02538406\nn02538562\nn02538985\nn02539424\nn02539573\nn02539894\nn02540412\nn02540983\nn02541257\nn02541687\nn02542017\nn02542432\nn02542958\nn02543255\nn02543565\nn02544274\nn02545841\nn02546028\nn02546331\nn02546627\nn02547014\nn02547733\nn02548247\nn02548689\nn02548884\nn02549248\nn02549376\nn02549989\nn02550203\nn02550460\nn02550655\nn02551134\nn02551668\nn02552171\nn02553028\nn02554730\nn02555863\nn02556373\nn02556846\nn02557182\nn02557318\nn02557591\nn02557749\nn02557909\nn02558206\nn02558860\nn02559144\nn02559383\nn02559862\nn02560110\nn02561108\nn02561381\nn02561514\nn02561661\nn02561803\nn02561937\nn02562315\nn02562796\nn02562971\nn02563079\nn02563182\nn02563648\nn02563792\nn02563949\nn02564270\nn02564403\nn02564720\nn02564935\nn02565072\nn02565324\nn02565573\nn02566109\nn02566489\nn02566665\nn02567334\nn02567633\nn02568087\nn02568447\nn02568959\nn02569484\nn02569631\nn02569905\nn02570164\nn02570484\nn02570838\nn02571167\nn02571652\nn02571810\nn02572196\nn02572484\nn02573249\nn02573704\nn02574271\nn02574910\nn02575325\nn02575590\nn02576223\nn02576575\nn02576906\nn02577041\nn02577164\nn02577403\nn02577662\nn02577952\nn02578233\nn02578454\nn02578771\nn02578928\nn02579303\nn02579557\nn02579762\nn02579928\nn02580336\nn02580679\nn02580830\nn02581108\nn02581482\nn02581642\nn02581957\nn02582220\nn02582349\nn02582721\nn02583567\nn02583890\nn02584145\nn02584449\nn02585872\nn02586238\nn02586543\nn02587051\nn02587300\nn02587479\nn02587618\nn02587877\nn02588286\nn02588794\nn02588945\nn02589062\nn02589196\nn02589316\nn02589623\nn02589796\nn02590094\nn02590495\nn02590702\nn02590987\nn02591330\nn02591613\nn02591911\nn02592055\nn02592371\nn02592734\nn02593019\nn02593191\nn02593453\nn02593679\nn02594250\nn02594942\nn02595056\nn02595339\nn02595702\nn02596067\nn02596252\nn02596381\nn02596720\nn02597004\nn02597367\nn02597608\nn02597818\nn02597972\nn02598134\nn02598573\nn02598878\nn02599052\nn02599347\nn02599557\nn02599958\nn02600298\nn02600503\nn02600798\nn02601344\nn02601767\nn02601921\nn02602059\nn02602405\nn02602760\nn02603317\nn02603540\nn02603862\nn02604157\nn02604480\nn02604954\nn02605316\nn02605703\nn02605936\nn02606052\nn02606384\nn02606751\nn02607072\nn02607201\nn02607470\nn02607862\nn02608284\nn02608547\nn02608860\nn02608996\nn02609302\nn02609823\nn02610066\nn02610373\nn02610664\nn02610980\nn02611561\nn02611898\nn02612167\nn02613181\nn02613572\nn02613820\nn02614140\nn02614482\nn02614653\nn02614978\nn02615298\nn02616128\nn02616397\nn02616851\nn02617537\nn02618094\nn02618513\nn02618827\nn02619165\nn02619550\nn02619861\nn02620167\nn02620578\nn02621258\nn02621908\nn02622249\nn02622547\nn02622712\nn02622955\nn02623445\nn02624167\nn02624551\nn02624807\nn02624987\nn02625258\nn02625612\nn02625851\nn02626089\nn02626265\nn02626471\nn02626762\nn02627037\nn02627292\nn02627532\nn02627835\nn02628062\nn02628259\nn02628600\nn02629230\nn02629716\nn02630281\nn02630615\nn02630739\nn02631041\nn02631330\nn02631475\nn02631628\nn02631775\nn02632039\nn02632494\nn02633422\nn02633677\nn02633977\nn02634545\nn02635154\nn02635580\nn02636170\nn02636405\nn02636550\nn02636854\nn02637179\nn02637475\nn02637977\nn02638596\nn02639087\nn02639605\nn02639922\nn02640242\nn02640626\nn02640857\nn02641379\nn02642107\nn02642644\nn02643112\nn02643316\nn02643566\nn02643836\nn02644113\nn02644360\nn02644501\nn02644665\nn02644817\nn02645538\nn02645691\nn02645953\nn02646667\nn02646892\nn02648035\nn02648625\nn02648916\nn02649218\nn02649546\nn02650050\nn02650413\nn02650541\nn02651060\nn02652132\nn02652668\nn02653145\nn02653497\nn02653786\nn02654112\nn02654425\nn02654745\nn02655020\nn02655523\nn02655848\nn02656032\nn02656301\nn02656670\nn02656969\nn02657368\nn02657694\nn02658079\nn02658531\nn02658811\nn02659176\nn02659478\nn02659808\nn02660091\nn02660208\nn02660519\nn02660640\nn02661017\nn02661473\nn02661618\nn02662239\nn02662397\nn02662559\nn02662825\nn02662993\nn02663211\nn02663485\nn02663849\nn02664285\nn02664642\nn02665250\nn02665985\nn02666196\nn02666501\nn02666624\nn02666943\nn02667093\nn02667244\nn02667379\nn02667478\nn02667576\nn02667693\nn02668393\nn02668613\nn02669295\nn02669442\nn02669534\nn02669723\nn02670186\nn02670382\nn02670683\nn02670935\nn02671780\nn02672152\nn02672371\nn02672831\nn02675077\nn02675219\nn02675522\nn02676097\nn02676261\nn02676566\nn02676670\nn02676938\nn02677028\nn02677136\nn02677436\nn02677718\nn02678010\nn02678384\nn02678897\nn02679142\nn02679257\nn02679961\nn02680110\nn02680512\nn02680638\nn02680754\nn02681392\nn02682311\nn02682407\nn02682569\nn02682811\nn02682922\nn02683183\nn02683323\nn02683454\nn02683558\nn02683791\nn02684248\nn02684356\nn02684515\nn02684649\nn02684962\nn02685082\nn02685253\nn02685365\nn02685701\nn02685995\nn02686121\nn02686227\nn02686379\nn02686568\nn02687172\nn02687423\nn02687682\nn02687821\nn02687992\nn02688273\nn02688443\nn02689144\nn02689274\nn02689434\nn02689748\nn02689819\nn02690373\nn02690715\nn02691156\nn02692086\nn02692232\nn02692513\nn02692680\nn02692877\nn02693246\nn02693413\nn02693540\nn02694045\nn02694279\nn02694426\nn02694662\nn02694966\nn02695627\nn02695762\nn02696165\nn02696246\nn02696569\nn02696843\nn02697022\nn02697221\nn02697576\nn02697675\nn02697876\nn02698244\nn02698473\nn02698634\nn02699494\nn02699629\nn02699770\nn02699915\nn02700064\nn02700258\nn02700895\nn02701002\nn02701260\nn02701730\nn02702989\nn02703124\nn02703275\nn02704645\nn02704792\nn02704949\nn02705201\nn02705429\nn02705944\nn02706221\nn02706806\nn02708093\nn02708224\nn02708433\nn02708555\nn02708711\nn02708885\nn02709101\nn02709367\nn02709637\nn02709763\nn02709908\nn02710044\nn02710201\nn02710324\nn02710429\nn02710600\nn02711237\nn02711780\nn02712545\nn02712643\nn02713003\nn02713218\nn02713364\nn02713496\nn02714315\nn02714535\nn02714751\nn02715229\nn02715513\nn02715712\nn02716626\nn02720048\nn02720576\nn02721813\nn02723165\nn02724722\nn02725872\nn02726017\nn02726210\nn02726305\nn02726681\nn02727016\nn02727141\nn02727426\nn02727825\nn02728440\nn02729222\nn02729837\nn02729965\nn02730265\nn02730568\nn02730930\nn02731251\nn02731398\nn02731629\nn02731900\nn02732072\nn02732572\nn02732827\nn02733213\nn02733524\nn02734725\nn02734835\nn02735268\nn02735361\nn02735538\nn02735688\nn02736396\nn02736798\nn02737351\nn02737660\nn02738031\nn02738271\nn02738449\nn02738535\nn02738741\nn02738859\nn02738978\nn02739123\nn02739427\nn02739550\nn02739668\nn02739889\nn02740061\nn02740300\nn02740533\nn02740764\nn02741367\nn02741475\nn02742070\nn02742194\nn02742322\nn02742468\nn02742753\nn02743426\nn02744323\nn02744844\nn02744961\nn02745492\nn02745611\nn02745816\nn02746008\nn02746225\nn02746365\nn02746595\nn02746683\nn02746978\nn02747063\nn02747177\nn02747672\nn02747802\nn02748183\nn02748359\nn02748491\nn02749169\nn02749292\nn02749479\nn02749670\nn02749790\nn02749953\nn02750070\nn02750169\nn02750320\nn02750652\nn02751067\nn02751215\nn02751295\nn02751490\nn02752199\nn02752496\nn02752615\nn02752810\nn02752917\nn02753044\nn02753394\nn02753710\nn02754103\nn02754656\nn02755140\nn02755352\nn02755529\nn02755675\nn02755823\nn02755984\nn02756098\nn02756854\nn02756977\nn02757061\nn02757337\nn02757462\nn02757714\nn02757810\nn02757927\nn02758134\nn02758490\nn02758863\nn02758960\nn02759257\nn02759387\nn02759700\nn02759963\nn02760099\nn02760199\nn02760298\nn02760429\nn02760658\nn02760855\nn02761034\nn02761206\nn02761392\nn02761557\nn02761696\nn02761834\nn02762169\nn02762371\nn02762508\nn02762725\nn02762909\nn02763083\nn02763198\nn02763306\nn02763604\nn02763714\nn02763901\nn02764044\nn02764398\nn02764505\nn02764614\nn02764779\nn02764935\nn02765028\nn02766168\nn02766320\nn02766534\nn02766792\nn02767038\nn02767147\nn02767433\nn02767665\nn02767956\nn02768114\nn02768226\nn02768433\nn02768655\nn02768973\nn02769075\nn02769290\nn02769669\nn02769748\nn02769963\nn02770078\nn02770211\nn02770585\nn02770721\nn02770830\nn02771004\nn02771166\nn02771286\nn02771547\nn02771750\nn02772101\nn02772435\nn02772554\nn02772700\nn02773037\nn02773838\nn02774152\nn02774630\nn02774921\nn02775039\nn02775178\nn02775483\nn02775689\nn02775813\nn02775897\nn02776007\nn02776205\nn02776505\nn02776631\nn02776825\nn02776978\nn02777100\nn02777292\nn02777402\nn02777638\nn02777734\nn02777927\nn02778131\nn02778294\nn02778456\nn02778588\nn02778669\nn02779435\nn02779609\nn02779719\nn02779971\nn02780315\nn02780445\nn02780588\nn02780704\nn02780815\nn02781121\nn02781213\nn02781338\nn02781517\nn02781764\nn02782093\nn02782432\nn02782602\nn02782681\nn02782778\nn02783035\nn02783161\nn02783324\nn02783459\nn02783900\nn02783994\nn02784124\nn02784998\nn02785648\nn02786058\nn02786198\nn02786331\nn02786463\nn02786611\nn02786736\nn02786837\nn02787120\nn02787269\nn02787435\nn02787622\nn02788021\nn02788148\nn02788386\nn02788462\nn02788572\nn02788689\nn02789487\nn02790669\nn02790823\nn02790996\nn02791124\nn02791270\nn02791532\nn02791665\nn02791795\nn02792409\nn02792552\nn02792948\nn02793089\nn02793199\nn02793296\nn02793414\nn02793495\nn02793684\nn02793842\nn02793930\nn02794008\nn02794156\nn02794368\nn02794474\nn02794664\nn02794779\nn02794972\nn02795169\nn02795528\nn02795670\nn02795783\nn02795978\nn02796207\nn02796318\nn02796412\nn02796623\nn02796995\nn02797295\nn02797535\nn02797692\nn02797881\nn02799071\nn02799175\nn02799323\nn02799897\nn02800213\nn02800497\nn02800675\nn02800940\nn02801047\nn02801184\nn02801450\nn02801525\nn02801823\nn02801938\nn02802215\nn02802426\nn02802544\nn02802721\nn02802990\nn02803349\nn02803539\nn02803666\nn02803809\nn02803934\nn02804123\nn02804252\nn02804414\nn02804515\nn02804610\nn02805283\nn02805845\nn02805983\nn02806088\nn02806379\nn02806530\nn02806762\nn02806875\nn02806992\nn02807133\nn02807523\nn02807616\nn02807731\nn02808185\nn02808304\nn02808440\nn02808829\nn02808968\nn02809105\nn02809241\nn02809364\nn02809491\nn02809605\nn02809736\nn02810139\nn02810270\nn02810471\nn02810782\nn02811059\nn02811204\nn02811350\nn02811468\nn02811618\nn02811719\nn02811936\nn02812201\nn02812342\nn02812631\nn02812785\nn02812949\nn02813252\nn02813399\nn02813544\nn02813645\nn02813752\nn02813981\nn02814116\nn02814338\nn02814428\nn02814533\nn02814774\nn02814860\nn02815478\nn02815749\nn02815834\nn02815950\nn02816494\nn02816656\nn02816768\nn02817031\nn02817251\nn02817386\nn02817516\nn02817650\nn02817799\nn02818135\nn02818254\nn02818687\nn02818832\nn02819697\nn02820085\nn02820210\nn02820556\nn02820675\nn02821202\nn02821415\nn02821543\nn02821627\nn02821943\nn02822064\nn02822220\nn02822399\nn02822579\nn02822762\nn02822865\nn02823124\nn02823335\nn02823428\nn02823510\nn02823586\nn02823750\nn02823848\nn02823964\nn02824058\nn02824152\nn02824319\nn02824448\nn02825153\nn02825240\nn02825442\nn02825657\nn02825872\nn02825961\nn02826068\nn02826259\nn02826459\nn02826589\nn02826683\nn02826812\nn02826886\nn02827148\nn02827606\nn02828115\nn02828299\nn02828427\nn02828884\nn02829246\nn02829353\nn02829510\nn02829596\nn02830157\nn02831237\nn02831335\nn02831595\nn02831724\nn02831894\nn02831998\nn02833040\nn02833140\nn02833275\nn02833403\nn02833793\nn02834027\nn02834397\nn02834506\nn02834642\nn02834778\nn02835271\nn02835412\nn02835551\nn02835724\nn02835829\nn02835915\nn02836035\nn02836174\nn02836268\nn02836392\nn02836513\nn02836607\nn02836900\nn02837134\nn02837567\nn02837789\nn02837887\nn02838014\nn02838178\nn02838345\nn02838577\nn02838728\nn02838958\nn02839110\nn02839351\nn02839592\nn02839910\nn02840134\nn02840245\nn02840515\nn02840619\nn02841063\nn02841187\nn02841315\nn02841506\nn02841641\nn02841847\nn02842133\nn02842573\nn02842809\nn02843029\nn02843158\nn02843276\nn02843465\nn02843553\nn02843684\nn02843777\nn02843909\nn02844056\nn02844214\nn02844307\nn02844714\nn02845130\nn02845293\nn02845985\nn02846141\nn02846260\nn02846511\nn02846619\nn02846733\nn02846874\nn02847461\nn02847631\nn02847852\nn02848118\nn02848216\nn02848523\nn02848806\nn02848921\nn02849154\nn02849885\nn02850060\nn02850358\nn02850732\nn02850950\nn02851099\nn02851795\nn02851939\nn02852043\nn02852173\nn02852360\nn02853016\nn02853218\nn02853336\nn02853745\nn02853870\nn02854378\nn02854532\nn02854630\nn02854739\nn02854926\nn02855089\nn02855390\nn02855701\nn02855793\nn02855925\nn02856013\nn02856237\nn02856362\nn02857365\nn02857477\nn02857644\nn02857907\nn02858304\nn02859184\nn02859343\nn02859443\nn02859557\nn02859729\nn02859955\nn02860415\nn02860640\nn02860847\nn02861022\nn02861147\nn02861286\nn02861387\nn02861509\nn02861658\nn02861777\nn02861886\nn02862048\nn02862916\nn02863014\nn02863176\nn02863340\nn02863426\nn02863536\nn02863638\nn02863750\nn02864122\nn02864504\nn02864593\nn02864987\nn02865351\nn02865665\nn02865931\nn02866106\nn02866386\nn02866578\nn02867401\nn02867592\nn02867715\nn02867966\nn02868240\nn02868429\nn02868546\nn02868638\nn02868975\nn02869155\nn02869249\nn02869563\nn02869737\nn02869837\nn02870526\nn02870676\nn02870772\nn02870880\nn02871005\nn02871147\nn02871314\nn02871439\nn02871525\nn02871631\nn02871824\nn02871963\nn02872333\nn02872529\nn02872752\nn02873520\nn02873623\nn02873733\nn02873839\nn02874086\nn02874214\nn02874336\nn02874442\nn02874537\nn02874642\nn02874750\nn02875436\nn02875626\nn02875948\nn02876084\nn02876326\nn02876457\nn02876657\nn02877266\nn02877513\nn02877642\nn02877765\nn02877962\nn02878107\nn02878222\nn02878425\nn02878534\nn02878628\nn02878796\nn02879087\nn02879309\nn02879422\nn02879517\nn02879718\nn02880189\nn02880393\nn02880546\nn02880842\nn02880940\nn02881193\nn02881546\nn02881757\nn02881906\nn02882190\nn02882301\nn02882483\nn02882647\nn02882894\nn02883004\nn02883101\nn02883205\nn02883344\nn02884225\nn02884450\nn02884859\nn02884994\nn02885108\nn02885233\nn02885338\nn02885462\nn02885882\nn02886321\nn02886434\nn02886599\nn02887079\nn02887209\nn02887489\nn02887832\nn02887970\nn02888270\nn02888429\nn02888569\nn02888898\nn02889425\nn02889646\nn02889856\nn02889996\nn02890188\nn02890351\nn02890513\nn02890662\nn02890804\nn02890940\nn02891188\nn02891788\nn02892201\nn02892304\nn02892392\nn02892499\nn02892626\nn02892767\nn02892948\nn02893269\nn02893418\nn02893608\nn02893692\nn02893941\nn02894024\nn02894158\nn02894337\nn02894605\nn02894847\nn02895008\nn02895154\nn02895328\nn02895438\nn02896074\nn02896294\nn02896442\nn02896694\nn02896856\nn02896949\nn02897097\nn02897389\nn02897820\nn02898093\nn02898173\nn02898269\nn02898369\nn02898585\nn02898711\nn02899439\nn02900160\nn02900459\nn02900594\nn02900705\nn02900857\nn02900987\nn02901114\nn02901259\nn02901377\nn02901481\nn02901620\nn02901793\nn02901901\nn02902079\nn02902687\nn02902816\nn02902916\nn02903006\nn02903126\nn02903204\nn02903727\nn02903852\nn02904109\nn02904233\nn02904505\nn02904640\nn02904803\nn02904927\nn02905036\nn02905152\nn02905886\nn02906734\nn02906963\nn02907082\nn02907296\nn02907391\nn02907656\nn02907873\nn02908123\nn02908217\nn02908773\nn02908951\nn02909053\nn02909165\nn02909285\nn02909706\nn02909870\nn02910145\nn02910241\nn02910353\nn02910542\nn02910701\nn02910864\nn02910964\nn02911332\nn02911485\nn02912065\nn02912319\nn02912557\nn02912894\nn02913152\nn02914991\nn02915904\nn02916065\nn02916179\nn02916350\nn02916936\nn02917067\nn02917377\nn02917521\nn02917607\nn02917742\nn02917964\nn02918112\nn02918330\nn02918455\nn02918595\nn02918831\nn02918964\nn02919148\nn02919308\nn02919414\nn02919648\nn02919792\nn02919890\nn02919976\nn02920083\nn02920164\nn02920259\nn02920369\nn02920503\nn02920658\nn02921029\nn02921195\nn02921292\nn02921406\nn02921592\nn02921756\nn02921884\nn02922159\nn02922292\nn02922461\nn02922578\nn02922798\nn02922877\nn02923129\nn02923535\nn02923682\nn02923915\nn02924116\nn02925009\nn02925107\nn02925385\nn02925519\nn02925666\nn02926426\nn02926591\nn02927053\nn02927161\nn02927764\nn02927887\nn02928049\nn02928299\nn02928413\nn02928608\nn02929184\nn02929289\nn02929462\nn02929582\nn02929923\nn02930080\nn02930214\nn02930339\nn02930645\nn02930766\nn02931013\nn02931148\nn02931294\nn02931417\nn02931836\nn02932019\nn02932400\nn02932523\nn02932693\nn02932891\nn02933112\nn02933340\nn02933462\nn02933649\nn02933750\nn02933990\nn02934168\nn02934451\nn02935017\nn02935387\nn02935490\nn02935658\nn02935891\nn02936176\nn02936281\nn02936402\nn02936570\nn02936714\nn02936921\nn02937010\nn02937336\nn02937958\nn02938218\nn02938321\nn02938886\nn02939185\nn02939763\nn02939866\nn02940289\nn02940385\nn02940570\nn02940706\nn02941095\nn02941228\nn02941845\nn02942015\nn02942147\nn02942349\nn02942460\nn02942699\nn02943241\nn02943465\nn02943686\nn02943871\nn02943964\nn02944075\nn02944146\nn02944256\nn02944459\nn02944579\nn02944826\nn02945161\nn02945813\nn02945964\nn02946127\nn02946270\nn02946348\nn02946509\nn02946753\nn02946824\nn02946921\nn02947212\nn02947660\nn02947818\nn02947977\nn02948072\nn02948293\nn02948403\nn02948557\nn02948834\nn02948942\nn02949084\nn02949202\nn02949356\nn02949542\nn02950018\nn02950120\nn02950186\nn02950256\nn02950482\nn02950632\nn02950826\nn02950943\nn02951358\nn02951585\nn02951703\nn02951843\nn02952109\nn02952237\nn02952374\nn02952485\nn02952585\nn02952674\nn02952798\nn02952935\nn02953056\nn02953197\nn02953455\nn02953552\nn02953673\nn02953850\nn02954163\nn02954340\nn02954938\nn02955065\nn02955247\nn02955540\nn02955767\nn02956393\nn02956699\nn02956795\nn02956883\nn02957008\nn02957135\nn02957252\nn02957427\nn02957755\nn02957862\nn02958343\nn02959942\nn02960352\nn02960690\nn02960903\nn02961035\nn02961225\nn02961451\nn02961544\nn02961947\nn02962061\nn02962200\nn02962414\nn02962843\nn02962938\nn02963159\nn02963302\nn02963503\nn02963692\nn02963821\nn02963987\nn02964075\nn02964196\nn02964295\nn02964634\nn02964843\nn02964934\nn02965024\nn02965122\nn02965216\nn02965300\nn02965529\nn02965783\nn02966068\nn02966193\nn02966545\nn02966687\nn02966786\nn02966942\nn02967081\nn02967170\nn02967294\nn02967407\nn02967540\nn02967626\nn02967782\nn02967991\nn02968074\nn02968210\nn02968333\nn02968473\nn02969010\nn02969163\nn02969323\nn02969527\nn02969634\nn02969886\nn02970408\nn02970534\nn02970685\nn02970849\nn02971167\nn02971356\nn02971473\nn02971579\nn02971691\nn02971940\nn02972397\nn02972714\nn02972934\nn02973017\nn02973236\nn02973805\nn02973904\nn02974003\nn02974348\nn02974454\nn02974565\nn02974697\nn02975212\nn02975589\nn02975994\nn02976123\nn02976249\nn02976350\nn02976455\nn02976552\nn02976641\nn02976815\nn02976939\nn02977058\nn02977330\nn02977438\nn02977619\nn02977936\nn02978055\nn02978205\nn02978367\nn02978478\nn02978753\nn02978881\nn02979074\nn02979186\nn02979290\nn02979399\nn02979516\nn02979836\nn02980036\nn02980203\nn02980441\nn02980625\nn02981024\nn02981198\nn02981321\nn02981565\nn02981792\nn02981911\nn02982232\nn02982416\nn02982515\nn02982599\nn02983072\nn02983189\nn02983357\nn02983507\nn02983904\nn02984061\nn02984203\nn02984469\nn02984699\nn02985137\nn02985606\nn02985828\nn02985963\nn02986066\nn02986160\nn02986348\nn02987047\nn02987379\nn02987492\nn02987706\nn02987823\nn02987950\nn02988066\nn02988156\nn02988304\nn02988486\nn02988679\nn02988963\nn02989099\nn02990373\nn02990758\nn02991048\nn02991302\nn02991847\nn02992032\nn02992211\nn02992368\nn02992529\nn02992795\nn02993194\nn02993368\nn02993546\nn02994573\nn02994743\nn02995345\nn02995871\nn02995998\nn02997391\nn02997607\nn02997910\nn02998003\nn02998107\nn02998563\nn02998696\nn02998841\nn02999138\nn02999410\nn02999936\nn03000134\nn03000247\nn03000530\nn03000684\nn03001115\nn03001282\nn03001540\nn03001627\nn03002096\nn03002210\nn03002341\nn03002555\nn03002711\nn03002816\nn03002948\nn03003091\nn03003633\nn03004275\nn03004409\nn03004531\nn03004620\nn03004713\nn03004824\nn03005033\nn03005147\nn03005285\nn03005515\nn03005619\nn03006626\nn03006788\nn03006903\nn03007130\nn03007297\nn03007444\nn03007591\nn03008177\nn03008817\nn03008976\nn03009111\nn03009269\nn03009794\nn03010473\nn03010656\nn03010795\nn03010915\nn03011018\nn03011355\nn03011741\nn03012013\nn03012159\nn03012373\nn03012499\nn03012644\nn03012734\nn03012897\nn03013006\nn03013438\nn03013580\nn03013850\nn03014440\nn03014705\nn03015149\nn03015254\nn03015478\nn03015631\nn03015851\nn03016209\nn03016389\nn03016609\nn03016737\nn03016868\nn03016953\nn03017070\nn03017168\nn03017698\nn03017835\nn03018209\nn03018349\nn03018614\nn03018712\nn03018848\nn03019198\nn03019304\nn03019434\nn03019685\nn03019806\nn03019938\nn03020034\nn03020416\nn03020692\nn03021228\nn03024064\nn03024233\nn03024333\nn03024518\nn03025070\nn03025165\nn03025250\nn03025886\nn03026506\nn03026907\nn03027001\nn03027108\nn03027250\nn03027505\nn03027625\nn03028079\nn03028596\nn03028785\nn03029066\nn03029197\nn03029296\nn03029445\nn03029925\nn03030262\nn03030353\nn03030557\nn03030880\nn03031012\nn03031152\nn03031422\nn03031756\nn03032252\nn03032453\nn03032811\nn03033267\nn03033362\nn03033986\nn03034244\nn03034405\nn03034516\nn03034663\nn03035252\nn03035510\nn03035715\nn03035832\nn03036022\nn03036149\nn03036244\nn03036341\nn03036469\nn03036701\nn03036866\nn03037108\nn03037228\nn03037404\nn03037590\nn03037709\nn03038041\nn03038281\nn03038480\nn03038685\nn03038870\nn03039015\nn03039259\nn03039353\nn03039493\nn03039827\nn03039947\nn03040229\nn03040376\nn03040836\nn03041114\nn03041265\nn03041449\nn03041632\nn03041810\nn03042139\nn03042384\nn03042490\nn03042697\nn03042829\nn03042984\nn03043173\nn03043274\nn03043423\nn03043693\nn03043798\nn03043958\nn03044671\nn03044801\nn03044934\nn03045074\nn03045228\nn03045337\nn03045698\nn03045800\nn03046029\nn03046133\nn03046257\nn03046802\nn03046921\nn03047052\nn03047171\nn03047690\nn03047799\nn03047941\nn03048883\nn03049066\nn03049326\nn03049457\nn03049782\nn03049924\nn03050026\nn03050453\nn03050546\nn03050655\nn03050864\nn03051041\nn03051249\nn03051396\nn03051540\nn03052464\nn03052917\nn03053047\nn03053976\nn03054491\nn03054605\nn03054901\nn03055159\nn03055418\nn03055670\nn03055857\nn03056097\nn03056215\nn03056288\nn03056493\nn03056583\nn03056873\nn03057021\nn03057541\nn03057636\nn03057724\nn03057841\nn03057920\nn03058107\nn03058603\nn03058949\nn03059103\nn03059236\nn03059366\nn03059685\nn03059934\nn03060728\nn03061050\nn03061211\nn03061345\nn03061505\nn03061674\nn03061819\nn03061893\nn03062015\nn03062122\nn03062245\nn03062336\nn03062651\nn03062798\nn03062985\nn03063073\nn03063199\nn03063338\nn03063485\nn03063599\nn03063689\nn03063834\nn03063968\nn03064250\nn03064350\nn03064562\nn03064758\nn03064935\nn03065243\nn03065424\nn03065708\nn03066232\nn03066359\nn03066464\nn03066849\nn03067093\nn03067212\nn03067339\nn03067518\nn03068181\nn03068998\nn03069752\nn03070059\nn03070193\nn03070396\nn03070587\nn03070854\nn03071021\nn03071160\nn03071288\nn03071552\nn03072056\nn03072201\nn03072440\nn03072682\nn03073296\nn03073384\nn03073545\nn03073694\nn03073977\nn03074380\nn03074855\nn03075097\nn03075248\nn03075370\nn03075500\nn03075634\nn03075768\nn03075946\nn03076411\nn03076623\nn03076708\nn03077442\nn03077616\nn03077741\nn03078287\nn03078506\nn03078670\nn03078802\nn03078995\nn03079136\nn03079230\nn03079494\nn03079616\nn03079741\nn03080309\nn03080497\nn03080633\nn03080731\nn03080904\nn03081859\nn03081986\nn03082127\nn03082280\nn03082450\nn03082656\nn03082807\nn03082979\nn03084420\nn03084834\nn03085013\nn03085219\nn03085333\nn03085602\nn03085781\nn03085915\nn03086183\nn03086457\nn03086580\nn03086670\nn03086868\nn03087069\nn03087245\nn03087366\nn03087521\nn03087643\nn03087816\nn03088389\nn03088580\nn03088707\nn03089477\nn03089624\nn03089753\nn03089879\nn03090000\nn03090172\nn03090437\nn03090710\nn03090856\nn03091044\nn03091223\nn03091374\nn03091907\nn03092053\nn03092166\nn03092314\nn03092476\nn03092656\nn03092883\nn03093427\nn03093792\nn03094159\nn03094503\nn03095699\nn03095965\nn03096439\nn03096960\nn03097362\nn03097535\nn03097673\nn03098140\nn03098515\nn03098688\nn03098806\nn03098959\nn03099147\nn03099274\nn03099454\nn03099622\nn03099771\nn03099945\nn03100240\nn03100346\nn03100490\nn03100897\nn03101156\nn03101302\nn03101375\nn03101517\nn03101664\nn03101796\nn03101986\nn03102371\nn03102516\nn03102654\nn03102859\nn03103128\nn03103396\nn03103563\nn03103904\nn03104019\nn03104512\nn03105088\nn03105214\nn03105306\nn03105467\nn03105645\nn03105810\nn03105974\nn03106722\nn03106898\nn03107046\nn03107488\nn03107716\nn03108455\nn03108624\nn03108759\nn03108853\nn03109033\nn03109150\nn03109253\nn03109693\nn03109881\nn03110202\nn03110669\nn03111041\nn03111177\nn03111296\nn03111690\nn03112240\nn03112719\nn03112869\nn03113152\nn03113505\nn03113657\nn03113835\nn03114041\nn03114236\nn03114379\nn03114504\nn03114743\nn03114839\nn03115014\nn03115180\nn03115400\nn03115663\nn03115762\nn03115897\nn03116008\nn03116163\nn03116530\nn03116767\nn03117199\nn03117642\nn03118346\nn03118969\nn03119203\nn03119396\nn03119510\nn03120198\nn03120491\nn03120778\nn03121040\nn03121190\nn03121298\nn03121431\nn03121897\nn03122073\nn03122202\nn03122295\nn03122748\nn03123553\nn03123666\nn03123809\nn03123917\nn03124043\nn03124170\nn03124313\nn03124474\nn03124590\nn03125057\nn03125588\nn03125729\nn03125870\nn03126090\nn03126385\nn03126580\nn03126707\nn03126927\nn03127024\nn03127203\nn03127408\nn03127531\nn03127747\nn03127925\nn03128085\nn03128248\nn03128427\nn03128519\nn03129001\nn03129471\nn03129636\nn03129753\nn03129848\nn03130066\nn03130233\nn03130563\nn03130761\nn03130866\nn03131193\nn03131574\nn03131669\nn03131967\nn03132076\nn03132261\nn03132438\nn03132666\nn03132776\nn03133050\nn03133415\nn03133878\nn03134118\nn03134232\nn03134394\nn03134739\nn03134853\nn03135030\nn03135532\nn03135656\nn03135788\nn03135917\nn03136051\nn03136254\nn03136369\nn03136504\nn03137473\nn03137579\nn03138128\nn03138217\nn03138344\nn03138669\nn03139089\nn03139464\nn03139640\nn03139998\nn03140126\nn03140292\nn03140431\nn03140546\nn03140652\nn03140771\nn03140900\nn03141065\nn03141327\nn03141455\nn03141612\nn03141702\nn03141823\nn03142099\nn03142205\nn03142325\nn03142431\nn03142679\nn03143400\nn03143572\nn03143754\nn03144156\nn03144873\nn03144982\nn03145147\nn03145277\nn03145384\nn03145522\nn03145719\nn03145843\nn03146219\nn03146342\nn03146449\nn03146560\nn03146687\nn03146777\nn03146846\nn03147084\nn03147156\nn03147280\nn03147509\nn03148324\nn03148518\nn03148727\nn03148808\nn03149135\nn03149401\nn03149686\nn03149810\nn03150232\nn03150511\nn03150661\nn03150795\nn03151077\nn03152303\nn03152951\nn03153246\nn03153585\nn03153948\nn03154073\nn03154316\nn03154446\nn03154616\nn03154745\nn03154895\nn03155178\nn03155502\nn03155915\nn03156071\nn03156279\nn03156405\nn03156767\nn03157348\nn03158186\nn03158414\nn03158668\nn03158796\nn03158885\nn03159535\nn03159640\nn03160001\nn03160186\nn03160309\nn03160740\nn03161016\nn03161450\nn03161893\nn03162297\nn03162460\nn03162556\nn03162714\nn03162818\nn03163222\nn03163381\nn03163488\nn03163798\nn03163973\nn03164192\nn03164344\nn03164605\nn03164722\nn03164929\nn03165096\nn03165211\nn03165466\nn03165616\nn03165823\nn03165955\nn03166120\nn03166514\nn03166600\nn03166685\nn03166809\nn03166951\nn03167153\nn03167978\nn03168107\nn03168217\nn03168543\nn03168663\nn03168774\nn03168933\nn03169063\nn03169176\nn03170292\nn03170459\nn03170635\nn03170872\nn03171228\nn03171356\nn03171635\nn03171910\nn03172038\nn03172738\nn03172965\nn03173270\nn03173387\nn03173929\nn03174079\nn03174450\nn03174731\nn03175081\nn03175189\nn03175301\nn03175457\nn03175604\nn03175843\nn03175983\nn03176238\nn03176386\nn03176594\nn03176763\nn03177059\nn03177165\nn03177708\nn03178000\nn03178173\nn03178430\nn03178538\nn03178674\nn03179701\nn03179910\nn03180011\nn03180384\nn03180504\nn03180732\nn03180865\nn03180969\nn03181293\nn03181667\nn03182140\nn03182232\nn03182912\nn03183080\nn03185868\nn03186199\nn03186285\nn03186818\nn03187037\nn03187153\nn03187268\nn03187595\nn03187751\nn03188290\nn03188531\nn03188725\nn03188871\nn03189083\nn03189311\nn03189818\nn03190458\nn03191286\nn03191451\nn03191561\nn03191776\nn03192543\nn03192907\nn03193107\nn03193260\nn03193423\nn03193597\nn03193754\nn03194170\nn03194297\nn03194812\nn03194992\nn03195332\nn03195485\nn03195799\nn03195959\nn03196062\nn03196217\nn03196324\nn03196598\nn03196990\nn03197201\nn03197337\nn03197446\nn03198223\nn03198500\nn03199358\nn03199488\nn03199647\nn03199775\nn03199901\nn03200231\nn03200357\nn03200539\nn03200701\nn03200906\nn03201035\nn03201208\nn03201529\nn03201638\nn03201776\nn03201895\nn03201996\nn03202354\nn03202481\nn03202760\nn03202940\nn03203089\nn03203806\nn03204134\nn03204306\nn03204436\nn03204558\nn03204955\nn03205143\nn03205304\nn03205458\nn03205574\nn03205669\nn03205903\nn03206023\nn03206158\nn03206282\nn03206405\nn03206602\nn03206718\nn03206908\nn03207305\nn03207548\nn03207630\nn03207743\nn03207835\nn03207941\nn03208556\nn03208938\nn03209359\nn03209477\nn03209666\nn03209910\nn03210245\nn03210372\nn03210552\nn03210683\nn03211117\nn03211413\nn03211616\nn03211789\nn03212114\nn03212247\nn03212406\nn03212811\nn03213014\nn03213361\nn03213538\nn03213715\nn03213826\nn03214253\nn03214450\nn03214582\nn03214966\nn03215076\nn03215191\nn03215337\nn03215508\nn03215749\nn03215930\nn03216199\nn03216402\nn03216562\nn03216710\nn03216828\nn03217653\nn03217739\nn03217889\nn03218198\nn03218446\nn03219010\nn03219135\nn03219483\nn03219612\nn03219859\nn03219966\nn03220095\nn03220237\nn03220513\nn03220692\nn03221059\nn03221351\nn03221540\nn03221720\nn03222176\nn03222318\nn03222516\nn03222722\nn03222857\nn03223162\nn03223299\nn03223441\nn03223553\nn03223686\nn03223923\nn03224490\nn03224603\nn03224753\nn03224893\nn03225108\nn03225458\nn03225616\nn03225777\nn03225988\nn03226090\nn03226254\nn03226375\nn03226538\nn03226880\nn03227010\nn03227184\nn03227317\nn03227721\nn03227856\nn03228016\nn03228254\nn03228365\nn03228533\nn03228692\nn03228796\nn03228967\nn03229115\nn03229244\nn03229526\nn03231160\nn03231368\nn03231819\nn03232309\nn03232417\nn03232543\nn03232815\nn03232923\nn03233123\nn03233624\nn03233744\nn03233905\nn03234164\nn03234952\nn03235042\nn03235180\nn03235327\nn03235796\nn03235979\nn03236093\nn03236217\nn03236423\nn03236580\nn03236735\nn03237212\nn03237340\nn03237416\nn03237639\nn03237839\nn03237992\nn03238131\nn03238286\nn03238586\nn03238762\nn03238879\nn03239054\nn03239259\nn03239607\nn03239726\nn03240140\nn03240683\nn03240892\nn03241093\nn03241335\nn03241496\nn03241903\nn03242120\nn03242264\nn03242390\nn03242506\nn03242995\nn03243218\nn03243625\nn03244047\nn03244231\nn03244388\nn03244775\nn03244919\nn03245271\nn03245421\nn03245724\nn03245889\nn03246197\nn03246312\nn03246454\nn03246653\nn03246933\nn03247083\nn03247351\nn03247495\nn03248835\nn03249342\nn03249569\nn03249956\nn03250089\nn03250279\nn03250405\nn03250588\nn03250847\nn03250952\nn03251100\nn03251280\nn03251533\nn03251766\nn03251932\nn03252231\nn03252324\nn03252422\nn03252637\nn03252787\nn03253071\nn03253187\nn03253279\nn03253714\nn03253796\nn03253886\nn03254046\nn03254189\nn03254374\nn03254625\nn03254737\nn03254862\nn03255030\nn03255167\nn03255322\nn03255488\nn03255899\nn03256032\nn03256166\nn03256472\nn03256631\nn03256788\nn03256928\nn03257065\nn03257210\nn03257586\nn03258192\nn03258330\nn03258456\nn03258577\nn03258905\nn03259009\nn03259280\nn03259401\nn03259505\nn03260206\nn03260504\nn03260733\nn03260849\nn03261019\nn03261263\nn03261395\nn03261603\nn03261776\nn03262072\nn03262248\nn03262519\nn03262717\nn03262809\nn03262932\nn03263076\nn03263338\nn03263640\nn03263758\nn03264906\nn03265032\nn03265754\nn03266195\nn03266371\nn03266620\nn03266749\nn03267113\nn03267468\nn03267696\nn03267821\nn03268142\nn03268311\nn03268645\nn03268790\nn03268918\nn03269073\nn03269203\nn03269401\nn03270165\nn03270695\nn03270854\nn03271030\nn03271260\nn03271376\nn03271574\nn03271765\nn03271865\nn03272010\nn03272125\nn03272239\nn03272383\nn03272562\nn03272810\nn03272940\nn03273061\nn03273551\nn03273740\nn03273913\nn03274265\nn03274435\nn03274561\nn03274796\nn03275125\nn03275311\nn03275566\nn03275681\nn03275864\nn03276179\nn03276696\nn03276839\nn03277004\nn03277149\nn03277459\nn03277602\nn03277771\nn03278248\nn03278914\nn03279153\nn03279364\nn03279508\nn03279804\nn03279918\nn03280216\nn03280394\nn03280644\nn03281145\nn03281524\nn03281673\nn03282060\nn03282295\nn03282401\nn03283221\nn03283413\nn03283827\nn03284308\nn03284482\nn03284743\nn03284886\nn03284981\nn03285578\nn03285730\nn03285912\nn03286572\nn03287351\nn03287733\nn03288003\nn03288500\nn03288643\nn03288742\nn03288886\nn03289660\nn03289985\nn03290096\nn03290195\nn03290653\nn03291413\nn03291551\nn03291741\nn03291819\nn03291963\nn03292085\nn03292362\nn03292475\nn03292603\nn03292736\nn03292960\nn03293095\nn03293741\nn03293863\nn03294048\nn03294604\nn03294833\nn03295012\nn03295140\nn03295246\nn03295928\nn03296081\nn03296217\nn03296328\nn03296478\nn03296963\nn03297103\nn03297226\nn03297495\nn03297644\nn03297735\nn03298089\nn03298352\nn03298716\nn03298858\nn03299406\nn03300216\nn03300443\nn03301175\nn03301291\nn03301389\nn03301568\nn03301833\nn03301940\nn03302671\nn03302790\nn03302938\nn03303217\nn03303669\nn03303831\nn03304197\nn03304323\nn03304465\nn03305300\nn03305522\nn03305953\nn03306385\nn03306869\nn03307037\nn03307573\nn03307792\nn03308152\nn03308481\nn03308614\nn03309110\nn03309356\nn03309465\nn03309687\nn03309808\nn03313333\nn03314227\nn03314378\nn03314608\nn03314780\nn03314884\nn03315644\nn03315805\nn03315990\nn03316105\nn03316406\nn03316873\nn03317233\nn03317510\nn03317673\nn03317788\nn03317889\nn03318136\nn03318294\nn03318865\nn03318983\nn03319167\nn03319457\nn03319576\nn03319745\nn03320046\nn03320262\nn03320421\nn03320519\nn03320845\nn03320959\nn03321103\nn03321419\nn03321563\nn03321843\nn03321954\nn03322570\nn03322704\nn03322836\nn03322940\nn03323096\nn03323211\nn03323319\nn03323703\nn03324629\nn03324814\nn03324928\nn03325088\nn03325288\nn03325403\nn03325584\nn03325691\nn03325941\nn03326073\nn03326371\nn03326475\nn03326660\nn03326795\nn03326948\nn03327133\nn03327234\nn03327553\nn03327691\nn03327841\nn03328201\nn03329302\nn03329536\nn03329663\nn03330002\nn03330665\nn03330792\nn03330947\nn03331077\nn03331244\nn03331599\nn03332005\nn03332173\nn03332271\nn03332393\nn03332591\nn03332784\nn03332989\nn03333129\nn03333252\nn03333349\nn03333610\nn03333711\nn03333851\nn03334017\nn03334291\nn03334382\nn03334492\nn03334912\nn03335030\nn03335333\nn03335461\nn03335846\nn03336168\nn03336282\nn03336575\nn03336742\nn03336839\nn03337140\nn03337383\nn03337494\nn03337822\nn03338287\nn03338821\nn03339296\nn03339529\nn03339643\nn03340009\nn03340723\nn03340923\nn03341035\nn03341153\nn03341297\nn03341606\nn03342015\nn03342127\nn03342262\nn03342432\nn03342657\nn03342863\nn03342961\nn03343047\nn03343234\nn03343354\nn03343560\nn03343737\nn03343853\nn03344305\nn03344393\nn03344509\nn03344642\nn03344784\nn03344935\nn03345487\nn03345837\nn03346135\nn03346289\nn03346455\nn03347037\nn03347472\nn03347617\nn03348142\nn03348868\nn03349020\nn03349296\nn03349367\nn03349469\nn03349599\nn03349771\nn03349892\nn03350204\nn03350352\nn03350456\nn03350602\nn03351151\nn03351262\nn03351434\nn03351979\nn03352232\nn03352366\nn03352628\nn03352961\nn03353281\nn03353951\nn03354207\nn03354903\nn03355468\nn03355768\nn03355925\nn03356038\nn03356279\nn03356446\nn03356559\nn03356858\nn03356982\nn03357081\nn03357267\nn03357716\nn03358172\nn03358380\nn03358726\nn03358841\nn03359137\nn03359285\nn03359436\nn03359566\nn03360133\nn03360300\nn03360431\nn03360622\nn03360731\nn03361109\nn03361297\nn03361380\nn03361550\nn03361683\nn03362639\nn03362771\nn03362890\nn03363363\nn03363549\nn03363749\nn03364008\nn03364156\nn03364599\nn03364937\nn03365231\nn03365374\nn03365592\nn03365991\nn03366464\nn03366721\nn03366823\nn03366974\nn03367059\nn03367321\nn03367410\nn03367545\nn03367875\nn03367969\nn03368048\nn03368352\nn03369276\nn03369407\nn03369512\nn03369866\nn03370387\nn03370646\nn03371875\nn03372029\nn03372549\nn03372822\nn03372933\nn03373237\nn03373611\nn03373943\nn03374102\nn03374282\nn03374372\nn03374473\nn03374570\nn03374649\nn03374838\nn03375171\nn03375329\nn03375575\nn03376159\nn03376279\nn03376595\nn03376771\nn03376938\nn03378005\nn03378174\nn03378342\nn03378442\nn03378593\nn03378765\nn03379051\nn03379204\nn03379343\nn03379719\nn03379828\nn03379989\nn03380301\nn03380647\nn03380724\nn03380867\nn03381126\nn03381231\nn03381450\nn03381565\nn03381776\nn03382104\nn03382292\nn03382413\nn03382533\nn03382708\nn03382856\nn03382969\nn03383099\nn03383211\nn03383378\nn03383468\nn03383562\nn03383821\nn03384167\nn03384352\nn03384891\nn03385295\nn03385557\nn03386011\nn03386343\nn03386544\nn03386726\nn03386870\nn03387323\nn03387653\nn03388043\nn03388183\nn03388323\nn03388549\nn03388711\nn03388990\nn03389611\nn03389761\nn03389889\nn03389983\nn03390075\nn03390327\nn03390673\nn03390786\nn03390983\nn03391301\nn03391613\nn03391770\nn03392648\nn03392741\nn03393017\nn03393199\nn03393324\nn03393761\nn03393912\nn03394149\nn03394272\nn03394480\nn03394649\nn03394916\nn03395256\nn03395401\nn03395514\nn03395859\nn03396074\nn03396580\nn03396654\nn03396997\nn03397087\nn03397266\nn03397412\nn03397532\nn03397947\nn03398153\nn03398228\nn03399579\nn03399677\nn03399761\nn03399971\nn03400231\nn03400972\nn03401129\nn03401279\nn03401721\nn03402188\nn03402369\nn03402511\nn03402785\nn03402941\nn03403643\nn03404012\nn03404149\nn03404251\nn03404360\nn03404449\nn03404900\nn03405111\nn03405265\nn03405595\nn03405725\nn03406759\nn03406966\nn03407369\nn03407865\nn03408054\nn03408264\nn03408340\nn03408444\nn03409297\nn03409393\nn03409591\nn03409920\nn03410022\nn03410147\nn03410303\nn03410423\nn03410571\nn03410740\nn03410938\nn03411079\nn03411208\nn03411339\nn03411927\nn03412058\nn03412220\nn03412387\nn03412511\nn03412906\nn03413124\nn03413264\nn03413428\nn03413684\nn03413828\nn03414029\nn03414162\nn03414676\nn03415252\nn03415486\nn03415626\nn03415749\nn03415868\nn03416094\nn03416489\nn03416640\nn03416775\nn03416900\nn03417042\nn03417202\nn03417345\nn03417749\nn03417970\nn03418158\nn03418242\nn03418402\nn03418618\nn03418749\nn03418915\nn03419014\nn03420345\nn03420801\nn03420935\nn03421117\nn03421324\nn03421485\nn03421669\nn03421768\nn03421960\nn03422072\nn03422484\nn03422589\nn03422771\nn03423099\nn03423224\nn03423306\nn03423479\nn03423568\nn03423719\nn03423877\nn03424204\nn03424325\nn03424489\nn03424630\nn03424862\nn03425241\nn03425325\nn03425413\nn03425595\nn03425769\nn03426134\nn03426285\nn03426462\nn03426574\nn03426871\nn03427202\nn03427296\nn03428090\nn03428226\nn03428349\nn03429003\nn03429137\nn03429288\nn03429682\nn03429771\nn03429914\nn03430091\nn03430313\nn03430418\nn03430551\nn03430959\nn03431243\nn03431570\nn03431745\nn03432061\nn03432129\nn03432360\nn03432509\nn03433247\nn03433637\nn03433877\nn03434188\nn03434285\nn03434830\nn03435593\nn03435743\nn03435991\nn03436075\nn03436182\nn03436417\nn03436549\nn03436656\nn03436772\nn03436891\nn03436990\nn03437184\nn03437295\nn03437430\nn03437581\nn03437741\nn03437829\nn03437941\nn03438071\nn03438257\nn03438661\nn03438780\nn03438863\nn03439348\nn03439631\nn03439814\nn03440216\nn03440682\nn03440876\nn03441112\nn03441345\nn03441465\nn03441582\nn03442288\nn03442487\nn03442597\nn03442756\nn03443005\nn03443149\nn03443371\nn03443543\nn03443912\nn03444034\nn03445326\nn03445617\nn03445777\nn03445924\nn03446070\nn03446268\nn03446832\nn03447075\nn03447358\nn03447447\nn03447721\nn03447894\nn03448031\nn03448590\nn03448696\nn03448956\nn03449217\nn03449309\nn03449451\nn03449564\nn03449858\nn03450230\nn03450516\nn03450734\nn03450881\nn03450974\nn03451120\nn03451253\nn03451365\nn03451711\nn03451798\nn03452267\nn03452449\nn03452594\nn03452741\nn03453231\nn03453320\nn03453443\nn03454110\nn03454211\nn03454442\nn03454536\nn03454707\nn03454885\nn03455355\nn03455488\nn03455642\nn03455802\nn03456024\nn03456186\nn03456299\nn03456447\nn03456548\nn03456665\nn03457008\nn03457451\nn03457686\nn03457902\nn03458271\nn03458422\nn03459328\nn03459591\nn03459775\nn03459914\nn03460040\nn03460147\nn03460297\nn03460455\nn03460899\nn03461288\nn03461385\nn03461651\nn03461882\nn03461988\nn03462110\nn03462315\nn03462747\nn03462972\nn03463185\nn03463381\nn03463666\nn03464053\nn03464467\nn03464628\nn03464952\nn03465040\nn03465151\nn03465320\nn03465426\nn03465500\nn03465605\nn03465718\nn03465818\nn03466162\nn03466493\nn03466600\nn03466839\nn03466947\nn03467068\nn03467254\nn03467380\nn03467517\nn03467796\nn03467887\nn03467984\nn03468570\nn03468696\nn03468821\nn03469031\nn03469175\nn03469493\nn03469832\nn03469903\nn03470005\nn03470222\nn03470387\nn03470629\nn03470948\nn03471030\nn03471190\nn03471347\nn03471779\nn03472232\nn03472535\nn03472672\nn03472796\nn03472937\nn03473078\nn03473227\nn03473465\nn03473817\nn03473966\nn03474167\nn03474352\nn03474779\nn03474896\nn03475581\nn03475674\nn03475823\nn03475961\nn03476083\nn03476313\nn03476542\nn03476684\nn03476991\nn03477143\nn03477303\nn03477410\nn03477512\nn03477773\nn03477902\nn03478589\nn03478756\nn03478907\nn03479121\nn03479266\nn03479397\nn03479502\nn03480579\nn03480719\nn03480973\nn03481172\nn03481521\nn03482001\nn03482128\nn03482252\nn03482405\nn03482523\nn03482877\nn03483086\nn03483230\nn03483316\nn03483531\nn03483637\nn03483823\nn03483971\nn03484083\nn03484487\nn03484576\nn03484809\nn03484931\nn03485198\nn03485309\nn03485407\nn03485575\nn03485794\nn03487090\nn03487331\nn03487444\nn03487533\nn03487642\nn03487774\nn03487886\nn03488111\nn03488188\nn03488438\nn03488603\nn03488784\nn03488887\nn03489048\nn03489162\nn03490006\nn03490119\nn03490324\nn03490449\nn03490649\nn03490784\nn03490884\nn03491032\nn03491724\nn03491988\nn03492087\nn03492250\nn03492542\nn03492922\nn03493219\nn03493792\nn03493911\nn03494278\nn03494537\nn03494706\nn03495039\nn03495258\nn03495570\nn03495671\nn03495941\nn03496183\nn03496296\nn03496486\nn03496612\nn03496892\nn03497100\nn03497352\nn03497657\nn03498441\nn03498536\nn03498662\nn03498781\nn03498866\nn03498962\nn03499354\nn03499468\nn03499907\nn03500090\nn03500209\nn03500295\nn03500389\nn03500457\nn03500557\nn03500699\nn03500838\nn03500971\nn03501152\nn03501288\nn03501520\nn03501614\nn03502200\nn03502331\nn03502509\nn03502777\nn03502897\nn03503097\nn03503233\nn03503358\nn03503477\nn03503567\nn03503718\nn03503997\nn03504205\nn03504293\nn03504723\nn03505015\nn03505133\nn03505383\nn03505504\nn03505667\nn03505764\nn03506028\nn03506184\nn03506370\nn03506560\nn03506727\nn03506880\nn03507241\nn03507458\nn03507658\nn03507963\nn03508101\nn03508485\nn03508881\nn03509394\nn03509608\nn03509843\nn03510072\nn03510244\nn03510384\nn03510487\nn03510583\nn03510866\nn03510987\nn03511175\nn03511333\nn03512030\nn03512147\nn03512452\nn03512624\nn03512911\nn03513137\nn03513376\nn03514129\nn03514340\nn03514451\nn03514693\nn03514894\nn03515338\nn03515934\nn03516266\nn03516367\nn03516647\nn03516844\nn03516996\nn03517509\nn03517647\nn03517760\nn03517899\nn03517982\nn03518135\nn03518230\nn03518305\nn03518445\nn03518631\nn03518829\nn03518943\nn03519081\nn03519226\nn03519387\nn03519674\nn03519848\nn03520493\nn03521076\nn03521431\nn03521544\nn03521675\nn03521771\nn03521899\nn03522003\nn03522100\nn03522634\nn03522863\nn03522990\nn03523134\nn03523398\nn03523506\nn03523987\nn03524150\nn03524287\nn03524425\nn03524574\nn03524745\nn03524976\nn03525074\nn03525252\nn03525454\nn03525693\nn03525827\nn03526062\nn03527149\nn03527444\nn03527565\nn03527675\nn03528100\nn03528263\nn03528523\nn03528901\nn03529175\nn03529444\nn03529629\nn03529860\nn03530189\nn03530511\nn03530642\nn03530910\nn03531281\nn03531447\nn03531546\nn03531691\nn03531982\nn03532342\nn03532672\nn03532919\nn03533014\nn03533392\nn03533486\nn03533654\nn03533845\nn03534580\nn03534695\nn03534776\nn03535024\nn03535284\nn03535647\nn03535780\nn03536122\nn03536568\nn03536761\nn03537085\nn03537241\nn03537412\nn03537550\nn03538037\nn03538179\nn03538300\nn03538406\nn03538542\nn03538634\nn03538817\nn03538957\nn03539103\nn03539293\nn03539433\nn03539546\nn03539678\nn03539754\nn03540090\nn03540267\nn03540476\nn03540595\nn03540914\nn03541091\nn03541269\nn03541393\nn03541537\nn03541696\nn03541923\nn03542333\nn03542605\nn03542727\nn03542860\nn03543012\nn03543112\nn03543254\nn03543394\nn03543511\nn03543603\nn03543735\nn03543945\nn03544143\nn03544238\nn03544360\nn03545150\nn03545470\nn03545585\nn03545756\nn03545961\nn03546112\nn03546235\nn03546340\nn03547054\nn03547229\nn03547397\nn03547530\nn03547861\nn03548086\nn03548195\nn03548320\nn03548402\nn03548533\nn03548626\nn03548930\nn03549199\nn03549350\nn03549473\nn03549589\nn03549732\nn03549897\nn03550153\nn03550289\nn03550420\nn03551084\nn03551395\nn03551582\nn03551790\nn03552001\nn03552449\nn03552749\nn03553019\nn03553248\nn03553486\nn03554375\nn03554460\nn03554645\nn03555006\nn03555217\nn03555426\nn03555564\nn03555662\nn03555862\nn03555996\nn03556173\nn03556679\nn03556811\nn03556992\nn03557270\nn03557360\nn03557590\nn03557692\nn03557840\nn03558007\nn03558176\nn03558404\nn03558633\nn03558739\nn03559373\nn03559531\nn03559999\nn03560430\nn03560860\nn03561047\nn03561169\nn03561573\nn03562565\nn03563200\nn03563460\nn03563710\nn03563967\nn03564849\nn03565288\nn03565565\nn03565710\nn03565830\nn03565991\nn03566193\nn03566329\nn03566555\nn03566730\nn03566860\nn03567066\nn03567635\nn03567788\nn03567912\nn03568117\nn03568818\nn03569014\nn03569174\nn03569293\nn03569494\nn03571280\nn03571439\nn03571625\nn03571853\nn03571942\nn03572107\nn03572205\nn03572321\nn03572631\nn03573574\nn03573848\nn03574243\nn03574416\nn03574555\nn03574816\nn03575958\nn03576215\nn03576443\nn03576955\nn03577090\nn03577312\nn03577474\nn03577672\nn03577818\nn03578055\nn03578251\nn03578656\nn03578981\nn03579538\nn03579982\nn03580518\nn03580615\nn03580845\nn03580990\nn03581125\nn03581531\nn03581897\nn03582508\nn03582959\nn03583419\nn03583621\nn03584254\nn03584400\nn03584829\nn03585073\nn03585337\nn03585438\nn03585551\nn03585682\nn03585778\nn03585875\nn03586219\nn03586631\nn03586911\nn03587205\nn03588216\nn03588841\nn03588951\nn03589313\nn03589513\nn03589672\nn03589791\nn03590306\nn03590475\nn03590588\nn03590841\nn03590932\nn03591116\nn03591313\nn03591592\nn03591798\nn03591901\nn03592245\nn03592669\nn03592773\nn03592931\nn03593122\nn03593222\nn03593526\nn03593862\nn03594010\nn03594148\nn03594277\nn03594523\nn03594734\nn03594945\nn03595055\nn03595264\nn03595409\nn03595523\nn03595614\nn03595860\nn03596099\nn03596285\nn03596543\nn03597147\nn03597317\nn03597916\nn03598151\nn03598299\nn03598385\nn03598515\nn03598646\nn03598783\nn03598930\nn03599486\nn03599964\nn03600285\nn03600475\nn03600722\nn03600977\nn03601442\nn03601638\nn03601840\nn03602081\nn03602194\nn03602365\nn03602686\nn03602790\nn03602883\nn03603442\nn03603594\nn03603722\nn03604156\nn03604311\nn03604400\nn03604536\nn03604629\nn03604763\nn03604843\nn03605417\nn03605504\nn03605598\nn03605722\nn03605915\nn03606106\nn03606251\nn03606347\nn03606465\nn03607029\nn03607186\nn03607527\nn03607659\nn03607923\nn03608504\nn03609147\nn03609235\nn03609397\nn03609542\nn03609786\nn03609959\nn03610098\nn03610418\nn03610524\nn03610682\nn03610836\nn03610992\nn03612010\nn03612814\nn03612965\nn03613294\nn03613592\nn03614007\nn03614383\nn03614532\nn03614782\nn03614887\nn03615300\nn03615406\nn03615563\nn03615655\nn03615790\nn03616091\nn03616225\nn03616428\nn03616763\nn03616979\nn03617095\nn03617312\nn03617480\nn03617594\nn03617834\nn03618101\nn03618339\nn03618546\nn03618678\nn03618797\nn03618982\nn03619050\nn03619196\nn03619275\nn03619396\nn03619650\nn03619793\nn03619890\nn03620052\nn03620353\nn03620967\nn03621049\nn03621377\nn03621694\nn03622058\nn03622401\nn03622526\nn03622839\nn03622931\nn03623198\nn03623338\nn03623556\nn03624134\nn03624400\nn03624767\nn03625355\nn03625539\nn03625646\nn03625943\nn03626115\nn03626272\nn03626418\nn03626502\nn03626760\nn03627232\nn03627954\nn03628071\nn03628215\nn03628421\nn03628511\nn03628728\nn03628831\nn03628984\nn03629100\nn03629231\nn03629520\nn03629643\nn03630262\nn03630383\nn03631177\nn03631811\nn03631922\nn03632100\nn03632577\nn03632729\nn03632852\nn03632963\nn03633091\nn03633341\nn03633632\nn03633886\nn03634034\nn03634899\nn03635032\nn03635108\nn03635330\nn03635516\nn03635668\nn03635932\nn03636248\nn03636649\nn03637027\nn03637181\nn03637318\nn03637480\nn03637787\nn03637898\nn03638014\nn03638180\nn03638623\nn03638743\nn03638883\nn03639077\nn03639230\nn03639497\nn03639675\nn03639880\nn03640850\nn03640988\nn03641569\nn03641947\nn03642144\nn03642341\nn03642444\nn03642573\nn03642806\nn03643149\nn03643253\nn03643491\nn03643737\nn03643907\nn03644073\nn03644378\nn03644858\nn03645011\nn03645168\nn03645290\nn03645577\nn03646020\nn03646148\nn03646296\nn03646809\nn03646916\nn03647423\nn03647520\nn03648219\nn03648431\nn03648667\nn03649003\nn03649161\nn03649288\nn03649674\nn03649797\nn03649909\nn03650551\nn03651388\nn03651605\nn03651843\nn03652100\nn03652389\nn03652729\nn03652826\nn03652932\nn03653110\nn03653220\nn03653454\nn03653583\nn03653740\nn03653833\nn03653975\nn03654576\nn03654826\nn03655072\nn03655470\nn03655720\nn03656484\nn03656957\nn03657121\nn03657239\nn03657511\nn03658102\nn03658185\nn03658635\nn03658858\nn03659292\nn03659686\nn03659809\nn03659950\nn03660124\nn03660562\nn03660909\nn03661043\nn03661340\nn03662301\nn03662452\nn03662601\nn03662719\nn03662887\nn03663433\nn03663531\nn03663910\nn03664159\nn03664675\nn03664840\nn03664943\nn03665232\nn03665366\nn03665851\nn03665924\nn03666238\nn03666362\nn03666591\nn03666917\nn03667060\nn03667235\nn03667552\nn03667664\nn03667829\nn03668067\nn03668279\nn03668488\nn03668803\nn03669245\nn03669534\nn03669886\nn03670208\nn03671914\nn03672521\nn03672827\nn03673027\nn03673270\nn03673450\nn03673767\nn03674270\nn03674440\nn03674731\nn03674842\nn03675076\nn03675235\nn03675445\nn03675558\nn03675907\nn03676087\nn03676483\nn03676623\nn03676759\nn03677115\nn03677682\nn03677766\nn03678558\nn03678729\nn03678879\nn03679384\nn03679712\nn03680248\nn03680355\nn03680512\nn03680734\nn03680858\nn03680942\nn03681477\nn03681813\nn03682380\nn03682487\nn03682877\nn03683079\nn03683341\nn03683457\nn03683606\nn03683708\nn03683995\nn03684143\nn03684224\nn03684489\nn03684611\nn03684740\nn03684823\nn03685307\nn03685486\nn03685640\nn03685820\nn03686130\nn03686363\nn03686470\nn03686924\nn03687137\nn03687928\nn03688066\nn03688192\nn03688405\nn03688504\nn03688605\nn03688707\nn03688832\nn03688943\nn03689157\nn03689570\nn03690168\nn03690279\nn03690473\nn03690851\nn03690938\nn03691459\nn03691817\nn03692004\nn03692136\nn03692272\nn03692379\nn03692522\nn03692842\nn03693293\nn03693474\nn03693707\nn03693860\nn03694196\nn03694356\nn03694639\nn03694761\nn03694949\nn03695122\nn03695452\nn03695616\nn03695753\nn03695857\nn03695957\nn03696065\nn03696301\nn03696445\nn03696568\nn03696746\nn03696909\nn03697007\nn03697366\nn03697552\nn03697812\nn03697913\nn03698123\nn03698226\nn03698360\nn03698604\nn03698723\nn03698815\nn03699280\nn03699591\nn03699754\nn03699975\nn03700963\nn03701191\nn03701391\nn03701640\nn03701790\nn03702248\nn03702440\nn03702582\nn03703075\nn03703203\nn03703463\nn03703590\nn03703730\nn03703862\nn03703945\nn03704549\nn03704834\nn03705379\nn03705808\nn03706229\nn03706415\nn03706653\nn03706939\nn03707171\nn03707372\nn03707597\nn03707766\nn03708036\nn03708425\nn03708843\nn03708962\nn03709206\nn03709363\nn03709545\nn03709644\nn03709823\nn03709960\nn03710079\nn03710193\nn03710294\nn03710421\nn03710528\nn03710637\nn03710721\nn03710937\nn03711044\nn03711711\nn03711999\nn03712111\nn03712337\nn03712444\nn03712887\nn03712981\nn03713069\nn03713151\nn03713436\nn03714235\nn03715114\nn03715275\nn03715386\nn03715669\nn03715892\nn03716228\nn03716887\nn03716966\nn03717131\nn03717285\nn03717447\nn03717622\nn03718212\nn03718335\nn03718458\nn03718581\nn03718699\nn03718789\nn03718935\nn03719053\nn03719343\nn03719560\nn03719743\nn03720005\nn03720163\nn03720665\nn03720891\nn03721047\nn03721252\nn03721384\nn03721590\nn03722007\nn03722288\nn03722646\nn03722944\nn03723153\nn03723267\nn03723439\nn03723781\nn03723885\nn03724066\nn03724176\nn03724417\nn03724538\nn03724623\nn03724756\nn03724870\nn03725035\nn03725506\nn03725600\nn03725717\nn03725869\nn03726116\nn03726233\nn03726371\nn03726516\nn03726760\nn03726993\nn03727067\nn03727465\nn03727605\nn03727837\nn03727946\nn03728437\nn03728982\nn03729131\nn03729308\nn03729402\nn03729482\nn03729647\nn03729826\nn03729951\nn03730153\nn03730334\nn03730494\nn03730655\nn03730788\nn03730893\nn03731019\nn03731483\nn03731695\nn03731882\nn03732020\nn03732114\nn03732458\nn03732543\nn03732658\nn03733131\nn03733281\nn03733465\nn03733547\nn03733644\nn03733805\nn03733925\nn03735637\nn03735963\nn03736064\nn03736147\nn03736269\nn03736372\nn03736470\nn03736970\nn03738066\nn03738241\nn03738472\nn03739518\nn03739693\nn03742019\nn03742115\nn03742238\nn03743016\nn03743279\nn03743902\nn03744276\nn03744684\nn03744840\nn03745146\nn03745487\nn03745571\nn03746005\nn03746155\nn03746330\nn03746486\nn03748162\nn03749504\nn03749634\nn03749807\nn03750206\nn03750437\nn03750614\nn03751065\nn03751269\nn03751458\nn03751590\nn03751757\nn03752071\nn03752185\nn03752398\nn03752922\nn03753077\nn03753514\nn03757604\nn03758089\nn03758220\nn03758894\nn03758992\nn03759243\nn03759432\nn03759661\nn03759954\nn03760310\nn03760671\nn03760944\nn03761084\nn03761588\nn03761731\nn03762238\nn03762332\nn03762434\nn03762602\nn03762982\nn03763727\nn03763968\nn03764276\nn03764606\nn03764736\nn03764822\nn03764995\nn03765128\nn03765467\nn03765561\nn03765934\nn03766044\nn03766218\nn03766322\nn03766508\nn03766600\nn03766697\nn03766935\nn03767112\nn03767203\nn03767459\nn03767745\nn03767966\nn03768132\nn03768683\nn03768823\nn03768916\nn03769610\nn03769722\nn03769881\nn03770085\nn03770224\nn03770316\nn03770439\nn03770520\nn03770679\nn03770834\nn03770954\nn03772077\nn03772269\nn03772584\nn03772674\nn03773035\nn03773504\nn03773835\nn03774327\nn03774461\nn03775071\nn03775199\nn03775388\nn03775546\nn03775636\nn03775747\nn03775847\nn03776167\nn03776460\nn03776877\nn03776997\nn03777126\nn03777568\nn03777754\nn03778459\nn03778817\nn03779000\nn03779128\nn03779246\nn03779370\nn03779884\nn03780047\nn03780799\nn03781055\nn03781244\nn03781467\nn03781594\nn03781683\nn03781787\nn03782006\nn03782190\nn03782794\nn03782929\nn03783304\nn03783430\nn03783575\nn03783873\nn03784139\nn03784270\nn03784793\nn03784896\nn03785016\nn03785142\nn03785237\nn03785499\nn03785721\nn03786096\nn03786194\nn03786313\nn03786621\nn03786715\nn03786901\nn03787032\nn03787523\nn03788047\nn03788195\nn03788365\nn03788498\nn03788601\nn03788914\nn03789171\nn03789400\nn03789603\nn03789794\nn03789946\nn03790230\nn03790512\nn03790755\nn03790953\nn03791053\nn03791235\nn03792048\nn03792334\nn03792526\nn03792782\nn03792972\nn03793489\nn03793850\nn03794056\nn03794136\nn03794798\nn03795123\nn03795269\nn03795758\nn03795976\nn03796181\nn03796401\nn03796522\nn03796605\nn03796848\nn03796974\nn03797062\nn03797182\nn03797264\nn03797390\nn03797896\nn03798061\nn03798442\nn03798610\nn03798982\nn03799113\nn03799240\nn03799375\nn03799610\nn03799876\nn03800371\nn03800485\nn03800563\nn03800772\nn03800933\nn03801353\nn03801533\nn03801671\nn03801760\nn03801880\nn03802007\nn03802228\nn03802393\nn03802643\nn03802800\nn03802973\nn03803116\nn03803284\nn03803780\nn03804211\nn03804744\nn03805180\nn03805280\nn03805374\nn03805503\nn03805725\nn03805933\nn03807334\nn03809211\nn03809312\nn03809603\nn03809686\nn03809802\nn03810412\nn03810952\nn03811295\nn03811444\nn03811847\nn03811965\nn03812263\nn03812382\nn03812789\nn03812924\nn03813078\nn03813176\nn03813946\nn03814528\nn03814639\nn03814727\nn03814817\nn03814906\nn03815149\nn03815278\nn03815482\nn03815615\nn03816005\nn03816136\nn03816394\nn03816530\nn03816849\nn03817191\nn03817331\nn03817522\nn03817647\nn03818001\nn03818343\nn03819047\nn03819336\nn03819448\nn03819595\nn03819994\nn03820154\nn03820318\nn03820728\nn03820950\nn03821145\nn03821424\nn03821518\nn03822171\nn03822361\nn03822504\nn03822656\nn03822767\nn03823111\nn03823216\nn03823312\nn03823673\nn03823906\nn03824197\nn03824284\nn03824381\nn03824589\nn03824713\nn03824999\nn03825080\nn03825271\nn03825442\nn03825673\nn03825788\nn03825913\nn03826039\nn03826186\nn03827420\nn03827536\nn03828020\nn03829340\nn03829857\nn03829954\nn03831203\nn03831382\nn03831757\nn03832144\nn03832673\nn03833907\nn03834040\nn03834472\nn03834604\nn03835197\nn03835729\nn03835941\nn03836062\nn03836451\nn03836602\nn03836906\nn03836976\nn03837422\nn03837606\nn03837698\nn03837869\nn03838024\nn03838298\nn03838748\nn03838899\nn03839172\nn03839276\nn03839424\nn03839671\nn03839795\nn03840327\nn03840681\nn03840823\nn03841011\nn03841143\nn03841290\nn03841666\nn03842012\nn03842156\nn03842276\nn03842377\nn03842585\nn03842754\nn03842986\nn03843092\nn03843316\nn03843438\nn03843555\nn03843883\nn03844045\nn03844233\nn03844550\nn03844673\nn03844815\nn03844965\nn03845107\nn03845190\nn03845990\nn03846100\nn03846234\nn03846431\nn03846677\nn03846772\nn03846970\nn03847471\nn03847823\nn03848033\nn03848168\nn03848348\nn03848537\nn03849275\nn03849412\nn03849679\nn03849814\nn03849943\nn03850053\nn03850245\nn03850492\nn03850613\nn03851341\nn03851787\nn03852280\nn03852544\nn03852688\nn03853291\nn03853924\nn03854065\nn03854421\nn03854506\nn03854722\nn03854815\nn03855214\nn03855333\nn03855464\nn03855604\nn03855756\nn03855908\nn03856012\nn03856335\nn03856465\nn03856728\nn03857026\nn03857156\nn03857291\nn03857687\nn03857828\nn03858085\nn03858183\nn03858418\nn03858533\nn03858837\nn03859000\nn03859170\nn03859280\nn03859495\nn03859608\nn03859958\nn03860234\nn03860404\nn03861048\nn03861271\nn03861430\nn03861596\nn03861842\nn03862379\nn03862676\nn03862862\nn03863108\nn03863262\nn03863657\nn03863783\nn03863923\nn03864139\nn03864356\nn03864692\nn03865288\nn03865371\nn03865557\nn03865820\nn03865949\nn03866082\nn03867854\nn03868044\nn03868242\nn03868324\nn03868406\nn03868643\nn03868763\nn03868863\nn03869838\nn03869976\nn03870105\nn03870290\nn03870546\nn03870672\nn03870980\nn03871083\nn03871371\nn03871524\nn03871628\nn03871724\nn03871860\nn03872016\nn03872167\nn03872273\nn03873416\nn03873699\nn03873848\nn03873996\nn03874138\nn03874293\nn03874487\nn03874599\nn03874823\nn03875218\nn03875806\nn03875955\nn03876111\nn03876231\nn03877351\nn03877472\nn03877674\nn03877845\nn03878066\nn03878211\nn03878294\nn03878418\nn03878511\nn03878674\nn03878828\nn03878963\nn03879456\nn03879705\nn03880032\nn03880129\nn03880323\nn03880531\nn03881305\nn03881404\nn03881534\nn03882611\nn03882960\nn03883054\nn03883385\nn03883524\nn03883664\nn03883773\nn03883944\nn03884397\nn03884554\nn03884639\nn03884778\nn03884926\nn03885028\nn03885194\nn03885293\nn03885410\nn03885535\nn03885669\nn03885788\nn03885904\nn03886053\nn03886641\nn03886762\nn03886940\nn03887185\nn03887330\nn03887512\nn03887697\nn03887899\nn03888022\nn03888257\nn03888605\nn03888808\nn03888998\nn03889397\nn03889503\nn03889626\nn03889726\nn03889871\nn03890093\nn03890233\nn03890358\nn03890514\nn03891051\nn03891251\nn03891332\nn03891538\nn03892178\nn03892425\nn03892557\nn03892728\nn03893935\nn03894051\nn03894379\nn03894677\nn03894933\nn03895038\nn03895170\nn03895866\nn03896103\nn03896233\nn03896419\nn03896526\nn03896628\nn03896984\nn03897130\nn03897634\nn03897943\nn03898129\nn03898271\nn03898395\nn03898633\nn03898787\nn03899100\nn03899612\nn03899768\nn03899933\nn03900028\nn03900194\nn03900301\nn03900393\nn03900979\nn03901229\nn03901338\nn03901750\nn03901974\nn03902125\nn03902220\nn03902482\nn03902756\nn03903133\nn03903290\nn03903424\nn03903733\nn03903868\nn03904060\nn03904183\nn03904433\nn03904657\nn03904782\nn03904909\nn03905361\nn03905540\nn03905730\nn03905947\nn03906106\nn03906224\nn03906463\nn03906590\nn03906789\nn03906894\nn03906997\nn03907475\nn03907654\nn03907908\nn03908111\nn03908204\nn03908456\nn03908618\nn03908714\nn03909020\nn03909160\nn03909406\nn03909516\nn03909658\nn03911406\nn03911513\nn03911658\nn03911767\nn03911866\nn03912218\nn03912821\nn03913343\nn03913930\nn03914106\nn03914337\nn03914438\nn03914583\nn03914831\nn03915118\nn03915320\nn03915437\nn03915900\nn03916031\nn03916289\nn03916385\nn03916470\nn03916720\nn03917048\nn03917198\nn03917327\nn03917814\nn03918074\nn03918480\nn03918737\nn03919096\nn03919289\nn03919430\nn03919808\nn03920288\nn03920384\nn03920641\nn03920737\nn03920867\nn03923379\nn03923564\nn03923692\nn03923918\nn03924069\nn03924407\nn03924532\nn03924679\nn03926148\nn03926412\nn03926876\nn03927091\nn03927299\nn03927539\nn03927792\nn03928116\nn03928589\nn03928814\nn03928994\nn03929091\nn03929202\nn03929443\nn03929660\nn03929855\nn03930229\nn03930313\nn03930431\nn03930515\nn03930630\nn03931765\nn03931885\nn03931980\nn03932080\nn03932670\nn03933391\nn03933933\nn03934042\nn03934229\nn03934311\nn03934565\nn03934656\nn03934890\nn03935116\nn03935234\nn03935335\nn03935883\nn03936269\nn03936466\nn03937543\nn03937835\nn03937931\nn03938037\nn03938244\nn03938401\nn03938522\nn03938725\nn03939062\nn03939178\nn03939281\nn03939440\nn03939565\nn03939677\nn03939844\nn03940256\nn03940894\nn03941013\nn03941231\nn03941417\nn03941586\nn03941684\nn03941887\nn03942028\nn03942600\nn03942813\nn03942920\nn03943115\nn03943266\nn03943623\nn03943714\nn03943833\nn03943920\nn03944024\nn03944138\nn03944341\nn03945459\nn03945615\nn03945817\nn03945928\nn03946076\nn03946162\nn03947111\nn03947343\nn03947466\nn03947798\nn03947888\nn03948242\nn03948459\nn03948830\nn03948950\nn03949145\nn03949317\nn03949761\nn03950228\nn03950359\nn03950537\nn03950647\nn03950899\nn03951068\nn03951213\nn03951453\nn03951800\nn03951971\nn03952150\nn03952576\nn03953020\nn03953416\nn03953901\nn03954393\nn03954731\nn03955296\nn03955489\nn03955809\nn03955941\nn03956157\nn03956331\nn03956531\nn03956623\nn03956785\nn03956922\nn03957315\nn03957420\nn03957762\nn03957991\nn03958227\nn03958338\nn03958630\nn03958752\nn03959014\nn03959123\nn03959227\nn03959701\nn03960374\nn03960490\nn03961394\nn03961630\nn03961711\nn03961828\nn03961939\nn03962525\nn03962685\nn03962852\nn03962932\nn03963028\nn03963198\nn03963294\nn03963483\nn03963645\nn03964495\nn03964611\nn03965456\nn03965907\nn03966206\nn03966325\nn03966582\nn03966751\nn03966976\nn03967270\nn03967396\nn03967562\nn03967942\nn03968293\nn03968479\nn03968581\nn03968728\nn03969510\nn03970156\nn03970363\nn03970546\nn03971218\nn03971321\nn03971960\nn03972146\nn03972372\nn03972524\nn03973003\nn03973285\nn03973402\nn03973520\nn03973628\nn03973839\nn03973945\nn03974070\nn03974915\nn03975035\nn03975657\nn03975788\nn03975926\nn03976105\nn03976268\nn03976467\nn03976657\nn03977158\nn03977266\nn03977430\nn03977592\nn03977966\nn03978421\nn03978575\nn03978686\nn03978815\nn03978966\nn03979377\nn03979492\nn03980026\nn03980478\nn03980874\nn03980986\nn03981094\nn03981340\nn03981566\nn03981760\nn03981924\nn03982232\nn03982331\nn03982430\nn03982642\nn03982767\nn03982895\nn03983396\nn03983499\nn03983612\nn03983712\nn03983928\nn03984125\nn03984234\nn03984381\nn03984643\nn03984759\nn03985069\nn03985232\nn03985441\nn03985881\nn03986071\nn03986224\nn03986355\nn03986562\nn03986704\nn03986857\nn03986949\nn03987266\nn03987376\nn03987674\nn03987865\nn03987990\nn03988170\nn03988758\nn03988926\nn03989199\nn03989349\nn03989447\nn03989665\nn03989777\nn03989898\nn03990474\nn03991062\nn03991202\nn03991321\nn03991443\nn03991646\nn03991837\nn03992325\nn03992436\nn03992509\nn03992703\nn03992975\nn03993053\nn03993180\nn03993403\nn03993703\nn03993878\nn03994008\nn03994297\nn03994417\nn03994614\nn03994757\nn03995018\nn03995265\nn03995372\nn03995535\nn03995661\nn03995856\nn03996004\nn03996145\nn03996416\nn03996849\nn03997274\nn03997484\nn03997875\nn03998194\nn03998333\nn03998673\nn03999064\nn03999160\nn03999621\nn03999992\nn04000311\nn04000480\nn04000592\nn04000716\nn04000998\nn04001132\nn04001265\nn04001397\nn04001499\nn04001661\nn04001845\nn04002262\nn04002371\nn04002629\nn04003241\nn04003359\nn04003856\nn04004099\nn04004210\nn04004475\nn04004767\nn04004990\nn04005197\nn04005630\nn04005912\nn04006067\nn04006227\nn04006330\nn04006411\nn04007415\nn04007664\nn04008385\nn04008634\nn04009552\nn04009801\nn04009923\nn04010057\nn04010779\nn04010927\nn04011827\nn04012084\nn04012482\nn04012665\nn04013060\nn04013176\nn04013600\nn04013729\nn04014297\nn04015204\nn04015786\nn04015908\nn04016240\nn04016479\nn04016576\nn04016684\nn04016846\nn04017571\nn04017807\nn04018155\nn04018399\nn04018667\nn04019101\nn04019335\nn04019541\nn04019696\nn04019881\nn04020087\nn04020298\nn04020744\nn04020912\nn04021028\nn04021164\nn04021362\nn04021503\nn04021704\nn04021798\nn04022332\nn04022434\nn04022708\nn04022866\nn04023021\nn04023119\nn04023249\nn04023422\nn04023695\nn04023962\nn04024137\nn04024274\nn04024862\nn04024983\nn04025508\nn04025633\nn04026053\nn04026180\nn04026417\nn04026813\nn04026918\nn04027023\nn04027367\nn04027706\nn04027820\nn04027935\nn04028074\nn04028221\nn04028315\nn04028581\nn04028764\nn04029416\nn04029647\nn04029734\nn04029913\nn04030054\nn04030161\nn04030274\nn04030414\nn04030518\nn04030846\nn04030965\nn04031884\nn04032509\nn04032603\nn04032936\nn04033287\nn04033425\nn04033557\nn04033801\nn04033901\nn04033995\nn04034262\nn04034367\nn04035231\nn04035634\nn04035748\nn04035836\nn04035912\nn04036155\nn04036303\nn04036776\nn04036963\nn04037076\nn04037220\nn04037298\nn04037443\nn04037873\nn04037964\nn04038231\nn04038338\nn04038440\nn04038727\nn04039041\nn04039209\nn04039381\nn04039742\nn04039848\nn04040247\nn04040373\nn04040540\nn04040759\nn04041069\nn04041243\nn04041408\nn04041544\nn04041747\nn04042076\nn04042204\nn04042358\nn04042632\nn04042795\nn04042985\nn04043168\nn04043411\nn04043733\nn04044307\nn04044498\nn04044716\nn04044955\nn04045085\nn04045255\nn04045397\nn04045644\nn04045787\nn04045941\nn04046091\nn04046277\nn04046400\nn04046590\nn04046974\nn04047139\nn04047401\nn04047733\nn04047834\nn04048441\nn04049303\nn04049405\nn04049585\nn04049753\nn04050066\nn04050313\nn04050600\nn04050933\nn04051269\nn04051439\nn04051549\nn04051705\nn04051825\nn04052235\nn04052346\nn04052442\nn04052658\nn04052757\nn04053508\nn04053677\nn04053767\nn04054361\nn04054566\nn04054670\nn04055180\nn04055447\nn04055700\nn04055861\nn04056073\nn04056180\nn04056413\nn04056932\nn04057047\nn04057215\nn04057435\nn04057673\nn04057846\nn04057981\nn04058096\nn04058239\nn04058486\nn04058594\nn04058721\nn04059157\nn04059298\nn04059399\nn04059516\nn04059947\nn04060198\nn04060448\nn04060647\nn04060904\nn04061681\nn04061793\nn04061969\nn04062179\nn04062428\nn04062644\nn04062807\nn04063154\nn04063373\nn04063868\nn04064213\nn04064401\nn04064747\nn04064862\nn04065272\nn04065464\nn04065789\nn04065909\nn04066023\nn04066270\nn04066388\nn04066476\nn04066767\nn04067143\nn04067231\nn04067353\nn04067472\nn04067658\nn04067818\nn04067921\nn04068441\nn04068601\nn04069166\nn04069276\nn04069434\nn04069582\nn04069777\nn04070003\nn04070207\nn04070415\nn04070545\nn04070727\nn04070964\nn04071102\nn04071263\nn04071393\nn04072193\nn04072551\nn04072960\nn04073425\nn04073948\nn04074185\nn04074963\nn04075291\nn04075468\nn04075715\nn04075813\nn04075916\nn04076052\nn04076284\nn04076713\nn04077430\nn04077594\nn04077734\nn04077889\nn04078002\nn04078574\nn04078955\nn04079106\nn04079244\nn04079603\nn04079933\nn04080138\nn04080454\nn04080705\nn04080833\nn04081281\nn04081699\nn04081844\nn04082344\nn04082562\nn04082710\nn04082886\nn04083113\nn04083309\nn04083649\nn04083800\nn04084517\nn04084682\nn04084889\nn04085017\nn04085574\nn04085873\nn04086066\nn04086273\nn04086446\nn04086663\nn04086794\nn04086937\nn04087126\nn04087432\nn04087709\nn04087826\nn04088229\nn04088343\nn04088441\nn04088696\nn04088797\nn04089152\nn04089376\nn04089666\nn04089836\nn04089976\nn04090263\nn04090548\nn04090781\nn04091097\nn04091466\nn04091584\nn04091693\nn04092168\nn04093157\nn04093223\nn04093625\nn04093775\nn04093915\nn04094060\nn04094250\nn04094438\nn04094608\nn04094720\nn04094859\nn04095109\nn04095210\nn04095342\nn04095577\nn04095938\nn04096066\nn04096733\nn04096848\nn04097085\nn04097373\nn04097622\nn04097760\nn04097866\nn04098169\nn04098260\nn04098399\nn04098513\nn04098795\nn04099003\nn04099175\nn04099429\nn04099969\nn04100174\nn04100519\nn04101375\nn04101497\nn04101701\nn04101860\nn04102037\nn04102162\nn04102285\nn04102406\nn04102618\nn04102760\nn04102872\nn04102962\nn04103094\nn04103206\nn04103364\nn04103665\nn04103769\nn04103918\nn04104147\nn04104384\nn04104500\nn04104770\nn04104925\nn04105068\nn04105438\nn04105704\nn04105893\nn04107598\nn04107743\nn04107984\nn04108268\nn04108822\nn04108999\nn04110068\nn04110178\nn04110281\nn04110439\nn04110654\nn04110841\nn04110955\nn04111190\nn04111414\nn04111531\nn04111668\nn04111962\nn04112147\nn04112252\nn04112430\nn04112579\nn04112654\nn04112752\nn04112921\nn04113038\nn04113194\nn04113316\nn04113406\nn04113641\nn04113765\nn04113968\nn04114069\nn04114301\nn04114428\nn04114719\nn04114844\nn04114996\nn04115144\nn04115256\nn04115456\nn04115542\nn04115802\nn04115996\nn04116098\nn04116294\nn04116389\nn04116512\nn04117216\nn04117464\nn04117639\nn04118021\nn04118538\nn04118635\nn04118776\nn04119091\nn04119230\nn04119360\nn04119478\nn04119630\nn04119751\nn04120489\nn04120695\nn04120842\nn04121228\nn04121342\nn04121426\nn04121511\nn04121728\nn04122262\nn04122349\nn04122492\nn04122578\nn04122685\nn04122825\nn04123026\nn04123123\nn04123228\nn04123317\nn04123448\nn04123567\nn04123740\nn04124098\nn04124202\nn04124370\nn04124488\nn04124573\nn04124887\nn04125021\nn04125116\nn04125257\nn04125541\nn04125692\nn04125853\nn04126066\nn04126244\nn04126541\nn04126659\nn04126852\nn04126980\nn04127117\nn04127249\nn04127395\nn04127521\nn04127633\nn04127904\nn04128413\nn04128499\nn04128710\nn04128837\nn04129490\nn04129688\nn04129766\nn04130143\nn04130257\nn04130566\nn04130907\nn04131015\nn04131113\nn04131208\nn04131368\nn04131499\nn04131690\nn04131811\nn04131929\nn04132158\nn04132465\nn04132603\nn04132829\nn04132985\nn04133114\nn04133789\nn04134008\nn04134170\nn04134523\nn04134632\nn04135024\nn04135118\nn04135315\nn04135710\nn04135933\nn04136045\nn04136161\nn04136333\nn04136510\nn04136800\nn04137089\nn04137217\nn04137355\nn04137444\nn04137773\nn04137897\nn04138131\nn04138261\nn04138869\nn04138977\nn04139140\nn04139395\nn04139859\nn04140064\nn04140539\nn04140631\nn04140777\nn04140853\nn04141076\nn04141198\nn04141327\nn04141712\nn04141838\nn04141975\nn04142175\nn04142327\nn04142434\nn04142731\nn04142999\nn04143140\nn04143365\nn04143897\nn04144241\nn04144539\nn04144651\nn04145863\nn04146050\nn04146343\nn04146504\nn04146614\nn04146862\nn04146976\nn04147183\nn04147291\nn04147495\nn04147793\nn04147916\nn04148054\nn04148285\nn04148464\nn04148579\nn04148703\nn04149083\nn04149374\nn04149813\nn04150153\nn04150273\nn04150371\nn04150980\nn04151108\nn04151581\nn04151940\nn04152387\nn04152593\nn04153025\nn04153330\nn04153751\nn04154152\nn04154340\nn04154565\nn04154753\nn04154854\nn04154938\nn04155068\nn04155177\nn04155457\nn04155625\nn04155735\nn04155889\nn04156040\nn04156140\nn04156297\nn04156411\nn04156591\nn04156814\nn04156946\nn04157099\nn04157320\nn04158002\nn04158138\nn04158250\nn04158672\nn04158807\nn04158956\nn04160036\nn04160261\nn04160372\nn04160586\nn04160847\nn04161010\nn04161358\nn04161981\nn04162433\nn04162706\nn04163530\nn04164002\nn04164199\nn04164406\nn04164757\nn04164868\nn04165409\nn04165675\nn04165945\nn04166111\nn04166281\nn04166436\nn04167346\nn04167489\nn04167661\nn04168084\nn04168199\nn04168472\nn04168541\nn04168840\nn04169437\nn04169597\nn04170037\nn04170384\nn04170515\nn04170694\nn04170933\nn04171208\nn04171459\nn04171629\nn04171831\nn04172107\nn04172230\nn04172342\nn04172512\nn04172607\nn04172776\nn04172904\nn04173046\nn04173172\nn04173511\nn04173907\nn04174026\nn04174101\nn04174234\nn04174500\nn04174705\nn04175039\nn04175147\nn04175574\nn04176068\nn04176190\nn04176295\nn04176528\nn04177041\nn04177329\nn04177545\nn04177654\nn04177755\nn04177820\nn04177931\nn04178190\nn04178329\nn04178668\nn04179126\nn04179712\nn04179824\nn04179913\nn04180063\nn04180229\nn04180888\nn04181083\nn04181228\nn04181561\nn04181718\nn04182152\nn04182322\nn04183217\nn04183329\nn04183957\nn04184095\nn04184316\nn04184435\nn04184600\nn04184880\nn04185071\nn04185529\nn04185804\nn04185946\nn04186051\nn04186268\nn04186455\nn04186624\nn04186848\nn04187061\nn04187233\nn04187547\nn04187751\nn04187885\nn04187970\nn04188064\nn04188179\nn04189092\nn04189282\nn04189651\nn04189816\nn04190052\nn04190376\nn04190464\nn04190747\nn04190997\nn04191150\nn04191595\nn04191943\nn04192238\nn04192361\nn04192521\nn04192698\nn04192858\nn04193179\nn04193377\nn04193742\nn04193883\nn04194009\nn04194127\nn04194289\nn04196080\nn04196502\nn04196803\nn04196925\nn04197110\nn04197391\nn04197781\nn04197878\nn04198015\nn04198233\nn04198355\nn04198453\nn04198562\nn04198722\nn04198797\nn04199027\nn04200000\nn04200258\nn04200537\nn04200800\nn04200908\nn04201064\nn04201297\nn04201733\nn04202142\nn04202282\nn04202417\nn04203356\nn04204081\nn04204238\nn04204347\nn04204755\nn04205062\nn04205318\nn04205505\nn04205613\nn04206070\nn04206225\nn04206356\nn04206570\nn04206790\nn04207151\nn04207343\nn04207596\nn04207763\nn04207903\nn04208065\nn04208210\nn04208427\nn04208582\nn04208760\nn04208936\nn04209133\nn04209239\nn04209509\nn04209613\nn04209811\nn04210012\nn04210120\nn04210288\nn04210390\nn04210591\nn04210858\nn04211001\nn04211219\nn04211356\nn04211528\nn04211857\nn04211970\nn04212165\nn04212282\nn04212467\nn04212810\nn04213105\nn04213264\nn04213353\nn04213530\nn04214046\nn04214282\nn04214413\nn04214649\nn04215153\nn04215402\nn04215588\nn04215800\nn04215910\nn04216634\nn04216860\nn04216963\nn04217387\nn04217546\nn04217718\nn04217882\nn04218564\nn04218921\nn04219185\nn04219424\nn04219580\nn04220250\nn04220805\nn04221076\nn04221673\nn04221823\nn04222210\nn04222307\nn04222470\nn04222723\nn04222847\nn04223066\nn04223170\nn04223299\nn04224395\nn04224543\nn04224842\nn04225031\nn04225222\nn04225729\nn04225987\nn04226322\nn04226464\nn04226537\nn04226826\nn04226962\nn04227050\nn04227144\nn04227519\nn04227787\nn04227900\nn04228054\nn04228215\nn04228422\nn04228581\nn04228693\nn04229007\nn04229107\nn04229480\nn04229620\nn04229737\nn04229816\nn04229959\nn04230387\nn04230487\nn04230603\nn04230707\nn04230808\nn04231272\nn04231693\nn04231905\nn04232153\nn04232312\nn04232437\nn04232800\nn04233027\nn04233124\nn04233295\nn04233715\nn04233832\nn04234160\nn04234260\nn04234455\nn04234670\nn04234763\nn04234887\nn04235291\nn04235646\nn04235771\nn04235860\nn04236001\nn04236377\nn04236702\nn04236809\nn04236935\nn04237174\nn04237287\nn04237423\nn04238128\nn04238321\nn04238617\nn04238763\nn04238953\nn04239074\nn04239218\nn04239333\nn04239436\nn04239639\nn04239786\nn04239900\nn04240434\nn04240752\nn04240867\nn04241042\nn04241249\nn04241394\nn04241573\nn04242084\nn04242315\nn04242408\nn04242587\nn04242704\nn04243003\nn04243142\nn04243251\nn04243546\nn04243941\nn04244379\nn04244847\nn04244997\nn04245218\nn04245412\nn04245508\nn04245847\nn04246060\nn04246271\nn04246459\nn04246731\nn04246855\nn04247011\nn04247440\nn04247544\nn04247630\nn04247736\nn04247876\nn04248209\nn04248396\nn04248507\nn04248851\nn04249415\nn04249582\nn04249882\nn04250224\nn04250473\nn04250599\nn04250692\nn04250850\nn04251144\nn04251701\nn04251791\nn04252077\nn04252225\nn04252331\nn04252560\nn04252653\nn04253057\nn04253168\nn04253304\nn04253931\nn04254009\nn04254120\nn04254450\nn04254680\nn04254777\nn04255163\nn04255346\nn04255499\nn04255586\nn04255670\nn04255768\nn04255899\nn04256318\nn04256520\nn04256758\nn04256891\nn04257223\nn04257684\nn04257790\nn04257986\nn04258138\nn04258333\nn04258438\nn04258618\nn04258732\nn04258859\nn04259202\nn04259468\nn04259630\nn04260192\nn04260364\nn04260589\nn04261116\nn04261281\nn04261369\nn04261506\nn04261638\nn04261767\nn04261868\nn04262161\nn04262530\nn04262678\nn04262869\nn04263257\nn04263336\nn04263502\nn04263760\nn04263950\nn04264134\nn04264233\nn04264361\nn04264485\nn04264628\nn04264765\nn04264914\nn04265275\nn04265428\nn04265904\nn04266014\nn04266162\nn04266375\nn04266486\nn04266849\nn04266968\nn04267091\nn04267165\nn04267246\nn04267435\nn04267577\nn04267985\nn04268142\nn04268275\nn04268418\nn04268565\nn04268799\nn04269086\nn04269270\nn04269502\nn04269668\nn04269822\nn04269944\nn04270147\nn04270371\nn04270576\nn04270891\nn04271148\nn04271531\nn04271793\nn04271891\nn04272054\nn04272389\nn04272782\nn04272928\nn04273064\nn04273285\nn04273569\nn04273659\nn04273796\nn04273972\nn04274686\nn04274985\nn04275093\nn04275175\nn04275283\nn04275548\nn04275661\nn04275904\nn04277352\nn04277493\nn04277669\nn04277826\nn04278247\nn04278353\nn04278447\nn04278605\nn04278932\nn04279063\nn04279172\nn04279353\nn04279462\nn04279858\nn04279987\nn04280259\nn04280373\nn04280487\nn04280845\nn04280970\nn04281260\nn04281375\nn04281571\nn04281998\nn04282231\nn04282494\nn04282872\nn04282992\nn04283096\nn04283255\nn04283378\nn04283585\nn04283784\nn04283905\nn04284002\nn04284341\nn04284438\nn04284572\nn04284869\nn04285008\nn04285146\nn04285622\nn04285803\nn04285965\nn04286128\nn04286575\nn04286960\nn04287351\nn04287451\nn04287747\nn04287898\nn04287986\nn04288165\nn04288272\nn04288533\nn04288673\nn04289027\nn04289195\nn04289449\nn04289576\nn04289690\nn04289827\nn04290079\nn04290259\nn04290507\nn04290615\nn04290762\nn04291069\nn04291242\nn04291759\nn04291992\nn04292080\nn04292221\nn04292414\nn04292572\nn04292921\nn04293119\nn04293258\nn04293744\nn04294212\nn04294426\nn04294614\nn04294879\nn04295081\nn04295353\nn04295571\nn04295777\nn04295881\nn04296562\nn04297098\nn04297750\nn04297847\nn04298053\nn04298661\nn04298765\nn04299215\nn04299370\nn04299963\nn04300358\nn04300509\nn04300643\nn04301000\nn04301242\nn04301474\nn04301760\nn04302200\nn04302863\nn04302988\nn04303095\nn04303258\nn04303357\nn04303497\nn04304215\nn04304375\nn04304680\nn04305016\nn04305210\nn04305323\nn04305471\nn04305572\nn04305947\nn04306080\nn04306592\nn04306847\nn04307419\nn04307767\nn04307878\nn04307986\nn04308084\nn04308273\nn04308397\nn04308583\nn04308807\nn04308915\nn04309049\nn04309348\nn04309548\nn04309833\nn04310018\nn04310157\nn04310507\nn04310604\nn04310721\nn04310904\nn04311004\nn04311174\nn04311595\nn04312020\nn04312154\nn04312432\nn04312654\nn04312756\nn04312916\nn04313220\nn04313503\nn04313628\nn04314107\nn04314216\nn04314522\nn04314632\nn04314914\nn04315342\nn04315713\nn04315828\nn04315948\nn04316498\nn04316815\nn04316924\nn04317063\nn04317175\nn04317325\nn04317420\nn04317833\nn04317976\nn04318131\nn04318787\nn04318892\nn04318982\nn04319545\nn04319774\nn04319937\nn04320405\nn04320598\nn04320871\nn04320973\nn04321121\nn04321453\nn04322026\nn04322531\nn04322692\nn04322801\nn04323519\nn04323819\nn04324120\nn04324297\nn04324387\nn04324515\nn04325041\nn04325208\nn04325704\nn04325804\nn04325968\nn04326547\nn04326676\nn04326799\nn04326896\nn04327204\nn04327544\nn04327682\nn04328054\nn04328186\nn04328329\nn04328580\nn04328703\nn04328946\nn04329477\nn04329681\nn04329834\nn04329958\nn04330109\nn04330189\nn04330267\nn04330340\nn04330669\nn04330746\nn04330896\nn04330998\nn04331277\nn04331443\nn04331639\nn04331765\nn04331892\nn04332074\nn04332243\nn04332580\nn04332987\nn04333129\nn04333869\nn04334105\nn04334365\nn04334504\nn04334599\nn04335209\nn04335435\nn04335693\nn04335886\nn04336792\nn04337157\nn04337287\nn04337503\nn04337650\nn04338517\nn04338963\nn04339062\nn04339191\nn04339638\nn04339879\nn04340019\nn04340521\nn04340750\nn04340935\nn04341133\nn04341288\nn04341414\nn04341686\nn04343511\nn04343630\nn04343740\nn04344003\nn04344734\nn04344873\nn04345028\nn04345201\nn04345787\nn04346003\nn04346157\nn04346328\nn04346428\nn04346511\nn04346679\nn04346855\nn04347119\nn04347519\nn04347754\nn04348070\nn04348184\nn04348359\nn04348988\nn04349189\nn04349306\nn04349401\nn04349913\nn04350104\nn04350235\nn04350458\nn04350581\nn04350688\nn04350769\nn04350905\nn04351550\nn04351699\nn04353573\nn04354026\nn04354182\nn04354387\nn04354487\nn04354589\nn04355115\nn04355267\nn04355338\nn04355511\nn04355684\nn04355821\nn04355933\nn04356056\nn04356595\nn04356772\nn04356925\nn04357121\nn04357314\nn04357531\nn04357930\nn04358117\nn04358256\nn04358491\nn04358707\nn04358874\nn04359034\nn04359124\nn04359217\nn04359335\nn04359500\nn04359589\nn04360501\nn04360798\nn04360914\nn04361095\nn04361260\nn04361937\nn04362624\nn04362821\nn04362972\nn04363082\nn04363210\nn04363412\nn04363671\nn04363777\nn04363874\nn04363991\nn04364160\nn04364397\nn04364545\nn04364827\nn04364994\nn04365112\nn04365229\nn04365328\nn04365484\nn04365751\nn04366033\nn04366116\nn04366367\nn04366832\nn04367011\nn04367371\nn04367480\nn04367746\nn04367950\nn04368109\nn04368235\nn04368365\nn04368496\nn04368695\nn04368840\nn04369025\nn04369282\nn04369485\nn04369618\nn04370048\nn04370288\nn04370456\nn04370600\nn04370774\nn04370955\nn04371050\nn04371430\nn04371563\nn04371774\nn04371979\nn04372370\nn04373089\nn04373428\nn04373563\nn04373704\nn04373795\nn04373894\nn04374315\nn04374521\nn04374735\nn04374907\nn04375080\nn04375241\nn04375405\nn04375615\nn04375775\nn04375926\nn04376400\nn04376876\nn04377057\nn04378489\nn04378651\nn04378956\nn04379096\nn04379243\nn04379964\nn04380255\nn04380346\nn04380533\nn04380916\nn04381073\nn04381450\nn04381587\nn04381724\nn04381860\nn04381994\nn04382334\nn04382438\nn04382537\nn04382695\nn04382880\nn04383015\nn04383130\nn04383301\nn04383839\nn04383923\nn04384593\nn04384910\nn04385079\nn04385157\nn04385536\nn04385799\nn04386051\nn04386456\nn04386664\nn04386792\nn04387095\nn04387201\nn04387261\nn04387400\nn04387531\nn04387706\nn04387932\nn04388040\nn04388162\nn04388473\nn04388574\nn04388743\nn04389033\nn04389430\nn04389521\nn04389718\nn04389854\nn04389999\nn04390483\nn04390577\nn04390873\nn04390977\nn04391445\nn04391838\nn04392113\nn04392526\nn04392764\nn04392985\nn04393095\nn04393301\nn04393549\nn04393808\nn04393913\nn04394031\nn04394261\nn04394421\nn04394630\nn04395024\nn04395106\nn04395332\nn04395651\nn04395875\nn04396226\nn04396335\nn04396650\nn04396808\nn04396902\nn04397027\nn04397168\nn04397261\nn04397452\nn04397645\nn04397768\nn04397860\nn04398044\nn04398497\nn04398688\nn04398834\nn04398951\nn04399046\nn04399158\nn04399537\nn04399846\nn04400109\nn04400289\nn04400499\nn04400737\nn04400899\nn04401088\nn04401578\nn04401680\nn04401828\nn04401949\nn04402057\nn04402342\nn04402449\nn04402580\nn04402746\nn04402984\nn04403413\nn04403524\nn04403638\nn04403925\nn04404072\nn04404200\nn04404412\nn04404817\nn04404997\nn04405540\nn04405762\nn04405907\nn04406239\nn04406552\nn04406687\nn04406817\nn04407257\nn04407435\nn04407686\nn04408871\nn04409011\nn04409128\nn04409279\nn04409384\nn04409515\nn04409625\nn04409806\nn04409911\nn04410086\nn04410365\nn04410485\nn04410565\nn04410663\nn04410760\nn04410886\nn04411019\nn04411264\nn04411835\nn04411966\nn04412097\nn04412300\nn04412416\nn04413151\nn04413419\nn04413969\nn04414101\nn04414199\nn04414319\nn04414476\nn04414675\nn04414909\nn04415257\nn04415663\nn04415815\nn04416005\nn04416901\nn04417086\nn04417180\nn04417361\nn04417672\nn04417809\nn04418357\nn04418644\nn04419073\nn04419642\nn04419868\nn04420024\nn04420720\nn04421083\nn04421258\nn04421417\nn04421582\nn04421740\nn04421872\nn04422409\nn04422566\nn04422727\nn04422875\nn04423552\nn04423687\nn04423845\nn04424692\nn04425804\nn04425977\nn04426184\nn04426316\nn04426427\nn04427216\nn04427473\nn04427559\nn04427715\nn04427857\nn04428008\nn04428191\nn04428382\nn04428634\nn04429038\nn04429376\nn04430475\nn04430605\nn04430896\nn04431025\nn04431436\nn04431648\nn04431745\nn04431925\nn04432043\nn04432203\nn04432662\nn04432785\nn04433377\nn04433585\nn04434207\nn04434531\nn04434932\nn04435180\nn04435552\nn04435653\nn04435759\nn04435870\nn04436012\nn04436185\nn04436329\nn04436401\nn04436542\nn04436832\nn04436992\nn04437276\nn04437380\nn04437670\nn04437953\nn04438304\nn04438507\nn04438643\nn04438897\nn04439505\nn04439585\nn04439712\nn04440597\nn04440963\nn04441093\nn04441528\nn04441662\nn04441790\nn04442312\nn04442441\nn04442582\nn04442741\nn04443164\nn04443257\nn04443433\nn04443766\nn04444121\nn04444218\nn04444749\nn04444953\nn04445040\nn04445154\nn04445327\nn04445610\nn04445782\nn04445952\nn04446162\nn04446276\nn04446844\nn04447028\nn04447156\nn04447276\nn04447443\nn04447861\nn04448070\nn04448185\nn04448361\nn04449290\nn04449449\nn04449550\nn04449700\nn04449966\nn04450133\nn04450243\nn04450465\nn04450640\nn04450749\nn04450994\nn04451139\nn04451318\nn04451636\nn04451818\nn04452528\nn04452615\nn04452757\nn04452848\nn04453037\nn04453156\nn04453390\nn04453666\nn04453910\nn04454654\nn04454792\nn04454908\nn04455048\nn04455250\nn04455579\nn04455652\nn04456011\nn04456115\nn04456472\nn04456734\nn04457157\nn04457326\nn04457474\nn04457638\nn04457767\nn04457910\nn04458201\nn04458633\nn04458843\nn04459018\nn04459122\nn04459243\nn04459362\nn04459610\nn04459773\nn04459909\nn04460130\nn04461437\nn04461570\nn04461696\nn04461879\nn04462011\nn04462240\nn04462576\nn04463679\nn04464125\nn04464615\nn04464852\nn04465050\nn04465203\nn04465358\nn04465501\nn04465666\nn04466871\nn04467099\nn04467307\nn04467506\nn04467665\nn04467899\nn04468005\nn04469003\nn04469251\nn04469514\nn04469684\nn04469813\nn04470741\nn04471148\nn04471315\nn04471632\nn04471912\nn04472243\nn04472563\nn04472726\nn04472961\nn04473108\nn04473275\nn04473884\nn04474035\nn04474187\nn04474466\nn04475309\nn04475411\nn04475496\nn04475631\nn04475749\nn04475900\nn04476116\nn04476259\nn04476526\nn04476831\nn04476972\nn04477219\nn04477387\nn04477548\nn04477725\nn04478066\nn04478383\nn04478512\nn04478657\nn04479046\nn04479287\nn04479405\nn04479526\nn04479694\nn04479823\nn04479939\nn04480033\nn04480141\nn04480303\nn04480527\nn04480853\nn04480995\nn04481524\nn04481642\nn04482177\nn04482297\nn04482393\nn04482975\nn04483073\nn04483307\nn04483925\nn04484024\nn04484432\nn04485082\nn04485423\nn04485586\nn04485750\nn04485884\nn04486054\nn04486213\nn04486322\nn04486616\nn04486934\nn04487081\nn04487394\nn04487724\nn04487894\nn04488202\nn04488427\nn04488530\nn04488742\nn04488857\nn04489008\nn04489695\nn04489817\nn04490091\nn04491312\nn04491388\nn04491638\nn04491769\nn04491934\nn04492060\nn04492157\nn04492375\nn04492749\nn04493109\nn04493259\nn04493381\nn04494204\nn04495051\nn04495183\nn04495310\nn04495450\nn04495555\nn04495698\nn04495843\nn04496614\nn04496726\nn04496872\nn04497249\nn04497442\nn04497570\nn04497801\nn04498275\nn04498389\nn04498523\nn04498873\nn04499062\nn04499300\nn04499446\nn04499554\nn04499810\nn04500060\nn04500390\nn04501127\nn04501281\nn04501370\nn04501550\nn04501837\nn04501947\nn04502059\nn04502197\nn04502502\nn04502670\nn04502851\nn04502989\nn04503073\nn04503155\nn04503269\nn04503413\nn04503499\nn04503593\nn04503705\nn04504038\nn04504141\nn04504770\nn04505036\nn04505345\nn04505470\nn04505888\nn04506289\nn04506402\nn04506506\nn04506688\nn04506895\nn04506994\nn04507155\nn04507326\nn04507453\nn04507689\nn04508163\nn04508489\nn04508949\nn04509171\nn04509260\nn04509417\nn04509592\nn04510706\nn04511002\nn04513827\nn04513998\nn04514095\nn04514241\nn04514648\nn04515003\nn04515444\nn04515729\nn04515890\nn04516116\nn04516214\nn04516354\nn04516672\nn04517211\nn04517408\nn04517823\nn04517999\nn04518132\nn04518343\nn04518643\nn04518764\nn04519153\nn04519536\nn04519728\nn04519887\nn04520170\nn04520382\nn04520784\nn04520962\nn04521571\nn04521863\nn04521987\nn04522168\nn04523525\nn04523831\nn04524142\nn04524313\nn04524594\nn04524716\nn04524941\nn04525038\nn04525191\nn04525305\nn04525417\nn04525584\nn04525821\nn04526520\nn04526800\nn04526964\nn04527648\nn04528079\nn04528968\nn04529108\nn04529681\nn04529962\nn04530283\nn04530456\nn04530566\nn04531098\nn04531873\nn04532022\nn04532106\nn04532398\nn04532504\nn04532670\nn04532831\nn04533042\nn04533199\nn04533499\nn04533594\nn04533700\nn04533802\nn04533946\nn04534127\nn04534359\nn04534520\nn04534895\nn04535252\nn04535370\nn04535524\nn04536153\nn04536335\nn04536465\nn04536595\nn04536765\nn04536866\nn04537436\nn04538249\nn04538403\nn04538552\nn04538878\nn04539053\nn04539203\nn04539407\nn04539794\nn04540053\nn04540255\nn04540397\nn04540761\nn04541136\nn04541320\nn04541662\nn04541777\nn04541987\nn04542095\nn04542329\nn04542474\nn04542595\nn04542715\nn04542858\nn04542943\nn04543158\nn04543509\nn04543636\nn04543772\nn04543924\nn04543996\nn04544325\nn04544450\nn04545305\nn04545471\nn04545748\nn04545858\nn04545984\nn04546081\nn04546194\nn04546340\nn04546595\nn04546855\nn04547592\nn04548280\nn04548362\nn04549028\nn04549122\nn04549629\nn04549721\nn04549919\nn04550184\nn04550676\nn04551055\nn04551833\nn04552097\nn04552348\nn04552551\nn04552696\nn04553389\nn04553561\nn04553703\nn04554211\nn04554406\nn04554684\nn04554871\nn04554998\nn04555291\nn04555400\nn04555600\nn04555700\nn04555897\nn04556408\nn04556533\nn04556664\nn04556948\nn04557308\nn04557522\nn04557648\nn04557751\nn04558059\nn04558199\nn04558478\nn04558804\nn04559023\nn04559166\nn04559451\nn04559620\nn04559730\nn04559910\nn04559994\nn04560113\nn04560292\nn04560502\nn04560619\nn04560804\nn04560882\nn04561010\nn04561287\nn04561422\nn04561734\nn04561857\nn04561965\nn04562122\nn04562262\nn04562496\nn04562935\nn04563020\nn04563204\nn04563413\nn04563560\nn04563790\nn04564278\nn04564581\nn04565039\nn04565375\nn04566257\nn04566561\nn04566756\nn04567098\nn04567593\nn04567746\nn04568069\nn04568557\nn04568713\nn04568841\nn04569063\nn04569520\nn04569822\nn04570118\nn04570214\nn04570416\nn04570532\nn04570815\nn04570958\nn04571292\nn04571566\nn04571686\nn04571800\nn04571958\nn04572121\nn04572235\nn04572935\nn04573045\nn04573281\nn04573379\nn04573513\nn04573625\nn04573832\nn04573937\nn04574067\nn04574348\nn04574471\nn04574606\nn04574999\nn04575723\nn04575824\nn04576002\nn04576211\nn04576971\nn04577139\nn04577293\nn04577426\nn04577567\nn04577769\nn04578112\nn04578329\nn04578559\nn04578708\nn04578801\nn04578934\nn04579056\nn04579145\nn04579230\nn04579432\nn04579667\nn04579986\nn04580493\nn04581102\nn04581595\nn04581829\nn04582205\nn04582349\nn04582771\nn04582869\nn04583022\nn04583212\nn04583620\nn04583888\nn04583967\nn04584056\nn04584207\nn04584373\nn04585128\nn04585318\nn04585456\nn04585626\nn04585745\nn04585980\nn04586072\nn04586581\nn04586932\nn04587327\nn04587404\nn04587559\nn04587648\nn04588739\nn04589190\nn04589325\nn04589434\nn04589593\nn04589890\nn04590021\nn04590129\nn04590263\nn04590553\nn04590746\nn04590933\nn04591056\nn04591157\nn04591249\nn04591359\nn04591517\nn04591631\nn04591713\nn04591887\nn04592005\nn04592099\nn04592356\nn04592465\nn04592596\nn04592741\nn04593077\nn04593185\nn04593376\nn04593524\nn04593629\nn04593866\nn04594114\nn04594218\nn04594489\nn04594742\nn04594828\nn04594919\nn04595028\nn04595285\nn04595501\nn04595611\nn04595762\nn04595855\nn04596116\nn04596492\nn04596742\nn04596852\nn04597066\nn04597309\nn04597400\nn04597804\nn04597913\nn04598136\nn04598318\nn04598416\nn04598582\nn04598965\nn04599124\nn04599235\nn04600312\nn04600486\nn04600912\nn04601041\nn04601159\nn04601938\nn04602762\nn04602840\nn04602956\nn04603399\nn04603729\nn04603872\nn04604276\nn04604644\nn04604806\nn04605057\nn04605163\nn04605321\nn04605446\nn04605572\nn04605726\nn04606251\nn04606574\nn04607035\nn04607242\nn04607640\nn04607759\nn04607869\nn04607982\nn04608329\nn04608435\nn04608567\nn04608809\nn04608923\nn04609531\nn04609651\nn04609811\nn04610013\nn04610176\nn04610274\nn04610503\nn04610676\nn04611351\nn04611795\nn04611916\nn04612026\nn04612159\nn04612257\nn04612373\nn04612504\nn04612840\nn04613015\nn04613158\nn04613696\nn04613939\nn04614505\nn04614655\nn04614844\nn04615149\nn04615226\nn04615644\nn04682018\nn04950713\nn04950952\nn04951071\nn04951186\nn04951373\nn04951716\nn04951875\nn04953296\nn04953678\nn04955160\nn04957356\nn04957589\nn04958634\nn04958865\nn04959061\nn04959230\nn04959672\nn04960277\nn04960582\nn04961062\nn04961331\nn04961691\nn04962062\nn04962240\nn04963111\nn04963307\nn04963588\nn04963740\nn04964001\nn04964799\nn04964878\nn04965179\nn04965451\nn04965661\nn04966543\nn04966941\nn04967191\nn04967561\nn04967674\nn04967801\nn04967882\nn04968056\nn04968139\nn04968749\nn04968895\nn04969242\nn04969540\nn04969798\nn04969952\nn04970059\nn04970312\nn04970398\nn04970470\nn04970631\nn04970916\nn04971211\nn04971313\nn04972350\nn04972451\nn04972801\nn04973020\nn04973291\nn04973386\nn04973585\nn04973669\nn04973816\nn04974145\nn04974340\nn04974859\nn04975739\nn04976319\nn04976952\nn04977412\nn04978561\nn04979002\nn04979307\nn04981658\nn05102764\nn05218119\nn05233741\nn05235879\nn05238282\nn05239437\nn05241218\nn05241485\nn05241662\nn05242070\nn05242239\nn05242928\nn05244421\nn05244755\nn05244934\nn05245192\nn05257476\nn05257967\nn05258051\nn05258627\nn05259914\nn05260127\nn05260240\nn05261310\nn05262422\nn05262534\nn05262698\nn05263183\nn05263316\nn05263448\nn05265736\nn05266096\nn05266879\nn05278922\nn05279953\nn05282652\nn05285623\nn05302499\nn05314075\nn05399034\nn05399243\nn05399356\nn05418717\nn05427346\nn05442594\nn05447757\nn05448704\nn05448827\nn05449196\nn05449661\nn05449959\nn05450617\nn05451099\nn05451384\nn05453412\nn05453657\nn05453815\nn05454833\nn05454978\nn05455113\nn05458173\nn05458576\nn05459101\nn05459457\nn05459769\nn05460759\nn05464534\nn05467054\nn05467758\nn05468098\nn05468739\nn05469664\nn05469861\nn05475397\nn05482922\nn05486510\nn05491154\nn05526957\nn05538625\nn05539947\nn05541509\nn05542893\nn05545879\nn05571341\nn05578095\nn05581932\nn05584746\nn05586759\nn05604434\nn05716342\nn06008896\nn06209940\nn06254669\nn06255081\nn06255613\nn06259898\nn06262567\nn06262943\nn06263202\nn06263369\nn06263609\nn06263762\nn06263895\nn06266417\nn06266633\nn06266710\nn06266878\nn06266973\nn06267145\nn06267564\nn06267655\nn06267758\nn06267893\nn06267991\nn06271778\nn06272290\nn06272612\nn06272803\nn06273207\nn06273294\nn06273414\nn06273555\nn06273743\nn06273890\nn06273986\nn06274092\nn06274292\nn06274546\nn06274760\nn06274921\nn06275095\nn06275353\nn06275471\nn06276501\nn06276697\nn06276902\nn06277025\nn06277135\nn06277280\nn06278338\nn06278475\nn06281040\nn06281175\nn06340977\nn06359193\nn06359467\nn06359657\nn06415688\nn06417096\nn06418693\nn06419354\nn06423496\nn06470073\nn06591815\nn06592078\nn06592281\nn06592421\nn06595351\nn06596179\nn06596364\nn06596474\nn06596607\nn06596727\nn06596845\nn06613686\nn06614901\nn06616216\nn06618653\nn06625062\nn06785654\nn06793231\nn06794110\nn06874185\nn06883725\nn06892775\nn06998748\nn07005523\nn07248320\nn07273802\nn07461050\nn07556406\nn07556637\nn07556872\nn07556970\nn07557165\nn07557434\nn07560193\nn07560331\nn07560422\nn07560542\nn07560652\nn07560903\nn07561112\nn07561590\nn07561848\nn07562017\nn07562172\nn07562379\nn07562495\nn07562651\nn07562881\nn07562984\nn07563207\nn07563366\nn07563642\nn07563800\nn07564008\nn07564101\nn07564292\nn07564515\nn07564629\nn07564796\nn07564971\nn07565083\nn07565161\nn07565259\nn07565608\nn07565725\nn07565945\nn07566092\nn07566231\nn07566340\nn07566863\nn07567039\nn07567139\nn07567390\nn07567611\nn07567707\nn07567980\nn07568095\nn07568241\nn07568389\nn07568502\nn07568625\nn07568818\nn07568991\nn07569106\nn07569423\nn07569543\nn07569644\nn07569873\nn07570021\nn07570530\nn07570720\nn07572353\nn07572616\nn07572858\nn07572957\nn07573103\nn07573347\nn07573453\nn07573563\nn07573696\nn07574176\nn07574426\nn07574504\nn07574602\nn07574780\nn07574923\nn07575076\nn07575226\nn07575392\nn07575510\nn07575726\nn07575984\nn07576182\nn07576438\nn07576577\nn07576781\nn07576969\nn07577144\nn07577374\nn07577538\nn07577657\nn07577772\nn07577918\nn07578093\nn07579575\nn07579688\nn07579787\nn07579917\nn07580053\nn07580253\nn07580359\nn07580470\nn07580592\nn07581249\nn07581346\nn07581607\nn07581775\nn07581931\nn07582027\nn07582152\nn07582277\nn07582441\nn07582609\nn07582811\nn07582892\nn07582970\nn07583066\nn07583197\nn07583865\nn07583978\nn07584110\nn07584228\nn07584332\nn07584423\nn07584593\nn07584859\nn07584938\nn07585015\nn07585107\nn07585208\nn07585474\nn07585557\nn07585644\nn07585758\nn07585906\nn07585997\nn07586099\nn07586179\nn07586318\nn07586485\nn07586604\nn07586718\nn07586894\nn07587023\nn07587111\nn07587206\nn07587331\nn07587441\nn07587618\nn07587700\nn07587819\nn07587962\nn07588111\nn07588193\nn07588299\nn07588419\nn07588574\nn07588688\nn07588817\nn07588947\nn07589458\nn07589543\nn07589724\nn07589872\nn07589967\nn07590068\nn07590177\nn07590320\nn07590502\nn07590611\nn07590752\nn07590841\nn07590974\nn07591049\nn07591162\nn07591236\nn07591330\nn07591473\nn07591586\nn07591813\nn07591961\nn07592094\nn07592317\nn07592400\nn07592481\nn07592656\nn07592768\nn07592922\nn07593004\nn07593107\nn07593199\nn07593471\nn07593774\nn07593972\nn07594066\nn07594155\nn07594250\nn07594737\nn07594840\nn07595051\nn07595180\nn07595368\nn07595649\nn07595751\nn07595914\nn07596046\nn07596160\nn07596362\nn07596452\nn07596566\nn07596684\nn07596967\nn07597145\nn07597263\nn07597365\nn07598256\nn07598529\nn07598622\nn07598734\nn07598928\nn07599068\nn07599161\nn07599242\nn07599383\nn07599468\nn07599554\nn07599649\nn07599783\nn07599911\nn07599998\nn07600177\nn07600285\nn07600394\nn07600506\nn07600696\nn07600895\nn07601025\nn07601175\nn07601290\nn07601407\nn07601572\nn07601686\nn07601809\nn07602650\nn07604956\nn07605040\nn07605198\nn07605282\nn07605380\nn07605474\nn07605597\nn07605693\nn07605804\nn07605944\nn07606058\nn07606191\nn07606278\nn07606419\nn07606538\nn07606669\nn07606764\nn07606933\nn07607027\nn07607138\nn07607361\nn07607492\nn07607605\nn07607707\nn07607832\nn07607967\nn07608098\nn07608245\nn07608339\nn07608429\nn07608533\nn07608641\nn07608721\nn07608866\nn07608980\nn07609083\nn07609215\nn07609316\nn07609407\nn07609549\nn07609632\nn07609728\nn07609840\nn07610295\nn07610502\nn07610620\nn07610746\nn07610890\nn07611046\nn07611148\nn07611267\nn07611358\nn07611733\nn07611839\nn07611991\nn07612137\nn07612273\nn07612367\nn07612530\nn07612632\nn07612996\nn07613158\nn07613266\nn07613480\nn07613671\nn07613815\nn07614103\nn07614198\nn07614348\nn07614500\nn07614730\nn07614825\nn07615052\nn07615190\nn07615289\nn07615460\nn07615569\nn07615671\nn07615774\nn07615954\nn07616046\nn07616174\nn07616265\nn07616386\nn07616487\nn07616590\nn07616748\nn07616906\nn07617051\nn07617188\nn07617344\nn07617447\nn07617526\nn07617611\nn07617708\nn07617839\nn07617932\nn07618029\nn07618119\nn07618281\nn07618432\nn07618587\nn07618684\nn07618871\nn07619004\nn07619208\nn07619301\nn07619409\nn07619508\nn07619881\nn07620047\nn07620145\nn07620327\nn07620597\nn07620689\nn07621264\nn07621497\nn07621618\nn07623136\nn07624466\nn07624666\nn07624757\nn07624924\nn07625061\nn07625324\nn07627931\nn07628068\nn07628181\nn07631926\nn07639069\nn07641928\nn07642361\nn07642471\nn07642742\nn07642833\nn07642933\nn07643026\nn07643200\nn07643306\nn07643474\nn07643577\nn07643679\nn07643764\nn07643891\nn07643981\nn07644244\nn07648913\nn07648997\nn07650792\nn07650903\nn07651025\nn07654148\nn07654298\nn07655067\nn07655263\nn07663899\nn07665438\nn07666176\nn07672914\nn07678586\nn07678729\nn07678953\nn07679034\nn07679140\nn07679356\nn07680168\nn07680313\nn07680416\nn07680517\nn07680655\nn07680761\nn07680932\nn07681264\nn07681355\nn07681450\nn07681691\nn07681805\nn07681926\nn07682197\nn07682316\nn07682477\nn07682624\nn07682808\nn07682952\nn07683039\nn07683138\nn07683265\nn07683360\nn07683490\nn07683617\nn07683786\nn07684084\nn07684164\nn07684289\nn07684422\nn07684517\nn07684600\nn07684938\nn07685031\nn07685118\nn07685218\nn07685303\nn07685399\nn07685546\nn07685730\nn07685918\nn07686021\nn07686202\nn07686299\nn07686461\nn07686634\nn07686720\nn07686873\nn07687053\nn07687211\nn07687381\nn07687469\nn07687626\nn07687789\nn07688021\nn07688130\nn07688265\nn07688412\nn07688624\nn07688757\nn07688898\nn07689003\nn07689217\nn07689313\nn07689490\nn07689624\nn07689757\nn07689842\nn07690019\nn07690152\nn07690273\nn07690431\nn07690511\nn07690585\nn07690739\nn07690892\nn07691091\nn07691237\nn07691539\nn07691650\nn07691758\nn07691863\nn07691954\nn07692114\nn07692248\nn07692405\nn07692517\nn07692614\nn07692887\nn07693048\nn07693223\nn07693439\nn07693590\nn07693725\nn07693889\nn07693972\nn07694169\nn07694403\nn07694516\nn07694659\nn07694839\nn07695187\nn07695284\nn07695410\nn07695504\nn07695652\nn07695742\nn07695878\nn07695965\nn07696403\nn07696527\nn07696625\nn07696728\nn07696839\nn07696977\nn07697100\nn07697313\nn07697408\nn07697537\nn07697699\nn07697825\nn07698250\nn07698401\nn07698543\nn07698672\nn07698782\nn07700003\nn07703889\nn07704054\nn07704205\nn07704305\nn07705931\nn07707451\nn07708124\nn07708398\nn07708512\nn07708685\nn07708798\nn07709046\nn07709172\nn07709333\nn07709701\nn07709881\nn07710007\nn07710283\nn07710616\nn07710952\nn07711080\nn07711232\nn07711371\nn07711569\nn07711683\nn07711799\nn07711907\nn07712063\nn07712267\nn07712382\nn07712559\nn07712748\nn07712856\nn07712959\nn07713074\nn07713267\nn07713395\nn07713763\nn07713895\nn07714078\nn07714188\nn07714287\nn07714448\nn07714571\nn07714802\nn07714895\nn07714990\nn07715103\nn07715221\nn07715407\nn07715561\nn07715721\nn07716034\nn07716203\nn07716358\nn07716504\nn07716649\nn07716750\nn07716906\nn07717070\nn07717410\nn07717556\nn07717714\nn07717858\nn07718068\nn07718195\nn07718329\nn07718472\nn07718671\nn07718747\nn07718920\nn07719058\nn07719213\nn07719330\nn07719437\nn07719616\nn07719756\nn07719839\nn07719980\nn07720084\nn07720185\nn07720277\nn07720442\nn07720615\nn07720875\nn07721018\nn07721118\nn07721195\nn07721325\nn07721456\nn07721678\nn07721833\nn07721942\nn07722052\nn07722217\nn07722390\nn07722485\nn07722666\nn07722763\nn07722888\nn07723039\nn07723177\nn07723330\nn07723559\nn07723753\nn07723968\nn07724078\nn07724173\nn07724269\nn07724492\nn07724654\nn07724819\nn07724943\nn07725158\nn07725255\nn07725376\nn07725531\nn07725663\nn07725789\nn07725888\nn07726009\nn07726095\nn07726230\nn07726386\nn07726525\nn07726672\nn07726796\nn07727048\nn07727140\nn07727252\nn07727377\nn07727458\nn07727578\nn07727741\nn07727868\nn07728053\nn07728181\nn07728284\nn07728391\nn07728585\nn07728708\nn07728804\nn07729000\nn07729142\nn07729225\nn07729384\nn07729485\nn07729828\nn07729926\nn07730033\nn07730207\nn07730320\nn07730406\nn07730562\nn07730708\nn07730855\nn07731006\nn07731122\nn07731284\nn07731436\nn07731587\nn07731767\nn07731952\nn07732168\nn07732302\nn07732433\nn07732525\nn07732636\nn07732747\nn07732904\nn07733005\nn07733124\nn07733217\nn07733394\nn07733567\nn07733712\nn07733847\nn07734017\nn07734183\nn07734292\nn07734417\nn07734555\nn07734744\nn07734879\nn07735052\nn07735179\nn07735294\nn07735404\nn07735510\nn07735687\nn07735803\nn07735981\nn07736087\nn07736256\nn07736371\nn07736527\nn07736692\nn07736813\nn07736971\nn07737081\nn07737594\nn07737745\nn07738105\nn07738224\nn07739035\nn07739125\nn07739344\nn07739506\nn07739923\nn07740033\nn07740115\nn07740220\nn07740342\nn07740461\nn07740597\nn07740744\nn07740855\nn07740954\nn07741138\nn07741235\nn07741357\nn07741461\nn07741623\nn07741706\nn07741804\nn07741888\nn07742012\nn07742224\nn07742313\nn07742415\nn07742513\nn07742605\nn07742704\nn07743224\nn07743384\nn07743544\nn07743723\nn07743902\nn07744057\nn07744246\nn07744430\nn07744559\nn07744682\nn07744811\nn07745046\nn07745197\nn07745357\nn07745466\nn07745661\nn07745940\nn07746038\nn07746186\nn07746334\nn07746551\nn07746749\nn07746910\nn07747055\nn07747607\nn07747811\nn07747951\nn07748157\nn07748276\nn07748416\nn07748574\nn07748753\nn07748912\nn07749095\nn07749192\nn07749312\nn07749446\nn07749582\nn07749731\nn07749870\nn07749969\nn07750146\nn07750299\nn07750449\nn07750586\nn07750736\nn07750872\nn07751004\nn07751148\nn07751280\nn07751451\nn07751737\nn07751858\nn07751977\nn07752109\nn07752264\nn07752377\nn07752514\nn07752602\nn07752664\nn07752782\nn07752874\nn07752966\nn07753113\nn07753275\nn07753448\nn07753592\nn07753743\nn07753980\nn07754155\nn07754279\nn07754451\nn07754684\nn07754894\nn07755089\nn07755262\nn07755411\nn07755619\nn07755707\nn07755929\nn07756096\nn07756325\nn07756499\nn07756641\nn07756838\nn07756951\nn07757132\nn07757312\nn07757511\nn07757602\nn07757753\nn07757874\nn07757990\nn07758125\nn07758260\nn07758407\nn07758582\nn07758680\nn07758950\nn07759194\nn07759324\nn07759424\nn07759576\nn07759691\nn07759816\nn07760070\nn07760153\nn07760297\nn07760395\nn07760501\nn07760673\nn07760755\nn07760859\nn07761141\nn07761309\nn07761611\nn07761777\nn07761954\nn07762114\nn07762244\nn07762373\nn07762534\nn07762740\nn07762913\nn07763107\nn07763290\nn07763483\nn07763629\nn07763792\nn07763987\nn07764155\nn07764315\nn07764486\nn07764630\nn07764847\nn07765073\nn07765208\nn07765361\nn07765517\nn07765612\nn07765728\nn07765862\nn07765999\nn07766173\nn07766409\nn07766530\nn07766723\nn07766891\nn07767002\nn07767171\nn07767344\nn07767549\nn07767709\nn07767847\nn07768068\nn07768139\nn07768230\nn07768318\nn07768423\nn07768590\nn07768694\nn07768858\nn07769102\nn07769306\nn07769465\nn07769584\nn07769731\nn07769886\nn07770034\nn07770180\nn07770439\nn07770571\nn07770763\nn07770869\nn07771082\nn07771212\nn07771405\nn07771539\nn07771731\nn07771891\nn07772026\nn07772147\nn07772274\nn07772413\nn07772788\nn07772935\nn07773428\nn07774182\nn07774295\nn07774479\nn07774596\nn07774719\nn07774842\nn07775050\nn07775197\nn07783827\nn07785487\nn07800091\nn07800487\nn07800636\nn07800740\nn07801007\nn07801091\nn07801342\nn07801508\nn07801709\nn07801779\nn07801892\nn07802026\nn07802152\nn07802246\nn07802417\nn07802767\nn07802863\nn07802963\nn07803093\nn07803213\nn07803310\nn07803408\nn07803545\nn07803779\nn07803895\nn07803992\nn07804152\nn07804323\nn07804543\nn07804657\nn07804771\nn07804900\nn07805006\nn07805254\nn07805389\nn07805478\nn07805594\nn07805731\nn07805966\nn07806043\nn07806120\nn07806221\nn07806633\nn07806774\nn07806879\nn07807002\nn07807171\nn07807317\nn07807472\nn07807594\nn07807710\nn07807834\nn07807922\nn07808022\nn07808166\nn07808268\nn07808352\nn07808479\nn07808587\nn07808675\nn07808806\nn07808904\nn07809096\nn07809368\nn07810531\nn07810907\nn07811416\nn07812046\nn07812184\nn07812662\nn07812790\nn07812913\nn07813107\nn07813324\nn07813495\nn07813579\nn07813717\nn07813833\nn07814007\nn07814203\nn07814390\nn07814487\nn07814634\nn07814790\nn07814925\nn07815163\nn07815294\nn07815424\nn07815588\nn07815839\nn07815956\nn07816052\nn07816164\nn07816296\nn07816398\nn07816575\nn07816726\nn07816839\nn07817024\nn07817160\nn07817315\nn07817465\nn07817599\nn07817758\nn07817871\nn07818029\nn07818133\nn07818277\nn07818422\nn07818572\nn07818689\nn07818825\nn07818995\nn07819166\nn07819303\nn07819480\nn07819682\nn07819769\nn07819896\nn07820036\nn07820145\nn07820297\nn07820497\nn07820683\nn07820814\nn07820960\nn07821107\nn07821260\nn07821404\nn07821610\nn07821758\nn07821919\nn07822053\nn07822197\nn07822323\nn07822518\nn07822687\nn07822845\nn07823105\nn07823280\nn07823369\nn07823460\nn07823591\nn07823698\nn07823814\nn07823951\nn07824191\nn07824268\nn07824383\nn07824502\nn07824702\nn07824863\nn07824988\nn07825194\nn07825399\nn07825496\nn07825597\nn07825717\nn07825850\nn07825972\nn07826091\nn07826250\nn07826340\nn07826453\nn07826544\nn07826653\nn07826930\nn07827130\nn07827284\nn07827410\nn07827554\nn07827750\nn07827896\nn07828041\nn07828156\nn07828275\nn07828378\nn07828642\nn07828987\nn07829248\nn07829331\nn07829412\nn07830493\nn07830593\nn07830690\nn07830841\nn07830986\nn07831146\nn07831267\nn07831450\nn07831663\nn07831821\nn07831955\nn07832099\nn07832202\nn07832307\nn07832416\nn07832592\nn07832741\nn07832902\nn07833333\nn07833535\nn07833672\nn07833816\nn07833951\nn07834065\nn07834160\nn07834286\nn07834507\nn07834618\nn07834774\nn07834872\nn07835051\nn07835173\nn07835331\nn07835457\nn07835547\nn07835701\nn07835823\nn07835921\nn07836077\nn07836269\nn07836456\nn07836600\nn07836731\nn07836838\nn07837002\nn07837110\nn07837234\nn07837362\nn07837545\nn07837630\nn07837755\nn07837912\nn07838073\nn07838233\nn07838441\nn07838551\nn07838659\nn07838811\nn07838905\nn07839055\nn07839172\nn07839312\nn07839478\nn07839593\nn07839730\nn07839864\nn07840027\nn07840124\nn07840219\nn07840304\nn07840395\nn07840520\nn07840672\nn07840804\nn07841037\nn07841345\nn07841495\nn07841639\nn07841800\nn07841907\nn07842044\nn07842130\nn07842202\nn07842308\nn07842433\nn07842605\nn07842753\nn07842972\nn07843117\nn07843220\nn07843348\nn07843464\nn07843636\nn07843775\nn07844042\nn07844604\nn07844786\nn07844867\nn07845087\nn07845166\nn07845335\nn07845421\nn07845495\nn07845571\nn07845702\nn07845775\nn07845863\nn07846014\nn07846143\nn07846274\nn07846359\nn07846471\nn07846557\nn07846688\nn07846802\nn07846938\nn07847047\nn07847198\nn07847453\nn07847585\nn07847706\nn07847827\nn07847917\nn07848093\nn07848196\nn07848338\nn07848771\nn07848936\nn07849026\nn07849186\nn07849336\nn07849506\nn07849619\nn07849733\nn07849912\nn07850083\nn07850219\nn07850329\nn07851054\nn07851298\nn07851443\nn07851554\nn07851641\nn07851767\nn07851926\nn07852045\nn07852229\nn07852302\nn07852376\nn07852452\nn07852532\nn07852614\nn07852712\nn07852833\nn07852919\nn07853125\nn07853232\nn07853345\nn07853445\nn07853560\nn07853648\nn07853762\nn07853852\nn07853946\nn07854066\nn07854184\nn07854266\nn07854348\nn07854455\nn07854614\nn07854707\nn07854813\nn07854982\nn07855105\nn07855188\nn07855317\nn07855413\nn07855510\nn07855603\nn07855721\nn07855812\nn07855907\nn07856045\nn07856186\nn07856270\nn07856756\nn07856895\nn07856992\nn07857076\nn07857170\nn07857356\nn07857598\nn07857731\nn07857959\nn07858114\nn07858197\nn07858336\nn07858484\nn07858595\nn07858841\nn07858978\nn07859142\nn07859284\nn07859583\nn07859796\nn07859951\nn07860103\nn07860208\nn07860331\nn07860447\nn07860548\nn07860629\nn07860805\nn07860988\nn07861158\nn07861247\nn07861334\nn07861557\nn07861681\nn07861813\nn07861983\nn07862095\nn07862244\nn07862348\nn07862461\nn07862611\nn07862770\nn07862946\nn07863107\nn07863229\nn07863374\nn07863547\nn07863644\nn07863802\nn07863935\nn07864065\nn07864198\nn07864317\nn07864475\nn07864638\nn07864756\nn07864934\nn07865105\nn07865196\nn07865484\nn07865575\nn07865700\nn07865788\nn07866015\nn07866151\nn07866277\nn07866409\nn07866571\nn07866723\nn07866868\nn07867021\nn07867164\nn07867324\nn07867421\nn07867616\nn07867751\nn07867883\nn07868045\nn07868200\nn07868340\nn07868508\nn07868684\nn07868830\nn07868955\nn07869111\nn07869291\nn07869391\nn07869522\nn07869611\nn07869775\nn07869937\nn07870069\nn07870167\nn07870313\nn07870478\nn07870620\nn07870734\nn07870894\nn07871065\nn07871234\nn07871335\nn07871436\nn07871588\nn07871720\nn07871810\nn07872593\nn07872748\nn07873057\nn07873198\nn07873348\nn07873464\nn07873679\nn07873807\nn07874063\nn07874159\nn07874259\nn07874343\nn07874441\nn07874531\nn07874674\nn07874780\nn07874995\nn07875086\nn07875152\nn07875267\nn07875436\nn07875560\nn07875693\nn07875835\nn07875926\nn07876026\nn07876189\nn07876281\nn07876460\nn07876550\nn07876651\nn07876775\nn07876893\nn07877187\nn07877299\nn07877675\nn07877849\nn07877961\nn07878145\nn07878283\nn07878479\nn07878647\nn07878785\nn07878926\nn07879072\nn07879174\nn07879350\nn07879450\nn07879560\nn07879659\nn07879821\nn07879953\nn07880080\nn07880213\nn07880325\nn07880458\nn07880751\nn07880880\nn07880968\nn07881117\nn07881205\nn07881404\nn07881525\nn07881625\nn07881800\nn07882420\nn07882497\nn07882886\nn07883031\nn07883156\nn07883251\nn07883384\nn07883510\nn07883661\nn07884567\nn07885705\nn07886057\nn07886176\nn07886317\nn07886463\nn07886572\nn07886849\nn07887099\nn07887192\nn07887304\nn07887461\nn07887634\nn07887967\nn07888058\nn07888229\nn07888378\nn07888465\nn07888816\nn07888909\nn07889193\nn07889274\nn07889510\nn07889814\nn07889990\nn07890068\nn07890226\nn07890352\nn07890540\nn07890617\nn07890750\nn07890890\nn07890970\nn07891095\nn07891189\nn07891309\nn07891433\nn07891726\nn07892418\nn07892512\nn07892813\nn07893253\nn07893425\nn07893528\nn07893642\nn07893792\nn07893891\nn07894102\nn07894298\nn07894451\nn07894551\nn07894703\nn07894799\nn07894965\nn07895100\nn07895237\nn07895435\nn07895595\nn07895710\nn07895839\nn07895962\nn07896060\nn07896165\nn07896287\nn07896422\nn07896560\nn07896661\nn07896765\nn07896893\nn07896994\nn07897116\nn07897200\nn07897438\nn07897600\nn07897750\nn07897865\nn07897975\nn07898117\nn07898247\nn07898333\nn07898443\nn07898617\nn07898745\nn07898895\nn07899003\nn07899108\nn07899292\nn07899434\nn07899533\nn07899660\nn07899769\nn07899899\nn07899976\nn07900225\nn07900406\nn07900616\nn07900734\nn07900825\nn07900958\nn07901355\nn07901457\nn07901587\nn07902121\nn07902336\nn07902443\nn07902520\nn07902698\nn07902799\nn07902937\nn07903101\nn07903208\nn07903543\nn07903643\nn07903731\nn07903841\nn07903962\nn07904072\nn07904293\nn07904395\nn07904637\nn07904760\nn07904865\nn07904934\nn07905038\nn07905296\nn07905386\nn07905474\nn07905618\nn07905770\nn07905979\nn07906111\nn07906284\nn07906572\nn07906718\nn07906877\nn07907037\nn07907161\nn07907342\nn07907429\nn07907548\nn07907831\nn07907943\nn07908411\nn07908567\nn07908647\nn07908812\nn07908923\nn07909129\nn07909231\nn07909362\nn07909504\nn07909593\nn07909714\nn07909811\nn07909954\nn07910048\nn07910152\nn07910245\nn07910379\nn07910538\nn07910656\nn07910799\nn07910970\nn07911061\nn07911249\nn07911371\nn07911677\nn07912093\nn07912211\nn07913180\nn07913300\nn07913393\nn07913537\nn07913644\nn07913774\nn07913882\nn07914006\nn07914128\nn07914271\nn07914413\nn07914586\nn07914686\nn07914777\nn07914887\nn07914995\nn07915094\nn07915213\nn07915366\nn07915491\nn07915618\nn07915800\nn07915918\nn07916041\nn07916183\nn07916319\nn07916437\nn07916582\nn07917133\nn07917272\nn07917392\nn07917507\nn07917618\nn07917791\nn07917874\nn07917951\nn07918028\nn07918193\nn07918309\nn07918706\nn07918879\nn07919165\nn07919310\nn07919441\nn07919572\nn07919665\nn07919787\nn07919894\nn07920052\nn07920222\nn07920349\nn07920540\nn07920663\nn07920872\nn07920989\nn07921090\nn07921239\nn07921360\nn07921455\nn07921615\nn07921834\nn07921948\nn07922041\nn07922147\nn07922512\nn07922607\nn07922764\nn07922955\nn07923748\nn07924033\nn07924276\nn07924366\nn07924443\nn07924560\nn07924655\nn07924747\nn07924834\nn07924955\nn07925116\nn07925229\nn07925327\nn07925423\nn07925500\nn07925608\nn07925708\nn07925808\nn07925966\nn07926250\nn07926346\nn07926442\nn07926540\nn07926785\nn07926920\nn07927070\nn07927197\nn07927512\nn07927716\nn07927836\nn07927931\nn07928163\nn07928264\nn07928367\nn07928488\nn07928578\nn07928696\nn07928790\nn07928887\nn07928998\nn07929172\nn07929351\nn07929519\nn07929940\nn07930062\nn07930205\nn07930315\nn07930433\nn07930554\nn07930864\nn07931001\nn07931096\nn07931280\nn07931452\nn07931612\nn07931733\nn07931870\nn07932039\nn07932323\nn07932454\nn07932614\nn07932762\nn07932841\nn07933154\nn07933274\nn07933530\nn07933652\nn07933799\nn07933891\nn07934032\nn07934152\nn07934282\nn07934373\nn07934530\nn07934678\nn07934800\nn07934908\nn07935043\nn07935152\nn07935288\nn07935379\nn07935504\nn07935737\nn07935878\nn07936015\nn07936093\nn07936263\nn07936459\nn07936548\nn07936745\nn07936979\nn07937069\nn07937344\nn07937461\nn07937621\nn07938007\nn07938149\nn07938313\nn07938594\nn07942152\nn07951464\nn07954211\nn07977870\nn08079613\nn08182379\nn08238463\nn08242223\nn08249459\nn08253141\nn08256735\nn08376250\nn08385989\nn08492354\nn08492461\nn08494231\nn08495908\nn08496334\nn08500819\nn08500989\nn08501887\nn08505018\nn08506347\nn08511017\nn08517010\nn08517676\nn08518171\nn08519299\nn08521623\nn08523340\nn08524735\nn08539072\nn08539276\nn08540532\nn08547468\nn08547544\nn08551296\nn08554440\nn08555333\nn08555710\nn08558770\nn08558963\nn08559155\nn08560295\nn08569482\nn08571275\nn08571642\nn08571898\nn08573674\nn08573842\nn08578517\nn08579266\nn08579352\nn08580944\nn08583292\nn08583455\nn08583554\nn08583682\nn08584914\nn08586978\nn08589670\nn08596076\nn08597579\nn08598301\nn08598568\nn08599174\nn08599292\nn08611339\nn08611421\nn08613733\nn08614632\nn08616050\nn08618831\nn08619112\nn08623676\nn08628141\nn08633683\nn08640531\nn08640739\nn08640962\nn08643267\nn08644045\nn08645104\nn08645212\nn08645318\nn08647264\nn08648917\nn08649711\nn08651104\nn08652376\nn08658309\nn08658918\nn08659242\nn08659331\nn08659446\nn08659861\nn08661878\nn08662427\nn08663051\nn08663703\nn08663860\nn08673039\nn08674344\nn08676253\nn08677424\nn08677801\nn08678783\nn08679167\nn08679269\nn08679562\nn08685188\nn08782627\nn08896327\nn09032191\nn09186592\nn09189157\nn09191635\nn09193551\nn09193705\nn09194227\nn09199101\nn09201998\nn09203827\nn09205509\nn09206896\nn09206985\nn09208496\nn09209025\nn09210862\nn09213434\nn09213565\nn09214060\nn09214269\nn09214916\nn09215023\nn09215437\nn09217230\nn09218315\nn09218494\nn09218641\nn09219233\nn09223487\nn09224725\nn09226869\nn09228055\nn09229709\nn09230041\nn09230202\nn09231117\nn09233446\nn09233603\nn09238926\nn09239302\nn09242389\nn09245515\nn09246464\nn09247410\nn09248153\nn09248399\nn09249034\nn09249155\nn09251407\nn09255070\nn09256479\nn09257843\nn09259025\nn09259219\nn09260907\nn09262690\nn09263912\nn09264803\nn09265620\nn09266604\nn09267854\nn09268007\nn09269341\nn09269472\nn09269882\nn09270160\nn09270657\nn09270735\nn09274152\nn09274305\nn09279986\nn09281252\nn09282208\nn09283193\nn09283405\nn09283514\nn09283767\nn09283866\nn09287415\nn09287968\nn09288635\nn09289331\nn09289596\nn09290350\nn09290444\nn09294877\nn09295210\nn09295946\nn09300306\nn09300905\nn09302616\nn09303008\nn09303528\nn09304750\nn09305031\nn09305898\nn09308572\nn09308743\nn09309046\nn09309168\nn09309292\nn09310616\nn09315159\nn09319604\nn09325824\nn09326662\nn09327077\nn09327538\nn09330378\nn09331251\nn09332890\nn09335693\nn09335809\nn09336555\nn09337048\nn09337253\nn09338013\nn09339810\nn09344198\nn09344324\nn09344724\nn09348460\nn09349648\nn09351905\nn09352849\nn09353815\nn09354511\nn09357346\nn09357447\nn09359803\nn09361517\nn09362316\nn09362945\nn09366017\nn09366317\nn09375606\nn09376198\nn09376526\nn09376786\nn09381242\nn09382099\nn09384106\nn09389867\nn09391386\nn09391644\nn09391774\nn09392402\nn09393524\nn09393605\nn09396465\nn09396608\nn09398076\nn09398677\nn09399592\nn09400584\nn09400987\nn09402944\nn09403086\nn09403211\nn09403427\nn09403734\nn09405078\nn09405787\nn09406793\nn09409512\nn09409752\nn09410224\nn09411189\nn09411295\nn09415584\nn09415671\nn09416076\nn09416890\nn09421031\nn09421799\nn09421951\nn09422190\nn09422631\nn09425019\nn09425344\nn09428293\nn09428628\nn09429630\nn09432283\nn09432990\nn09433312\nn09433442\nn09433839\nn09435739\nn09436444\nn09436708\nn09437454\nn09438844\nn09438940\nn09439032\nn09439213\nn09442595\nn09443281\nn09443641\nn09444783\nn09445008\nn09445289\nn09447666\nn09448690\nn09450163\nn09451237\nn09452291\nn09452395\nn09452760\nn09453008\nn09454153\nn09454412\nn09454744\nn09456207\nn09457979\nn09458269\nn09459979\nn09460046\nn09461069\nn09462600\nn09463226\nn09464486\nn09466678\nn09467696\nn09468604\nn09470027\nn09470222\nn09472413\nn09472597\nn09474010\nn09474412\nn09474765\nn09475044\nn09475179\nn09475925\nn09476123\nn09478210\nn09480959\nn09481120\nn09493983\nn09495962\nn09505153\nn09537660\nn09556121\nn09605110\nn09606009\nn09606527\nn09607630\nn09607782\nn09607903\nn09608709\nn09610255\nn09610405\nn09611722\nn09612700\nn09613118\nn09613191\nn09613690\nn09615336\nn09616573\nn09616922\nn09617161\nn09617435\nn09617577\nn09617696\nn09618760\nn09618880\nn09618957\nn09619168\nn09619452\nn09620078\nn09620794\nn09621232\nn09622049\nn09622302\nn09624168\nn09624559\nn09624899\nn09625401\nn09626238\nn09627807\nn09627906\nn09629065\nn09629246\nn09629752\nn09631129\nn09632274\nn09632518\nn09633969\nn09635534\nn09635635\nn09635973\nn09636339\nn09637339\nn09638454\nn09638875\nn09639382\nn09639919\nn09640327\nn09640715\nn09641002\nn09641578\nn09643799\nn09644152\nn09644657\nn09648743\nn09648911\nn09649067\nn09650729\nn09650839\nn09650989\nn09651123\nn09651968\nn09652149\nn09653144\nn09653438\nn09654079\nn09654518\nn09654898\nn09655213\nn09655466\nn09656077\nn09657206\nn09657748\nn09658254\nn09658398\nn09658815\nn09658921\nn09659039\nn09659188\nn09660010\nn09660240\nn09661873\nn09662038\nn09662661\nn09662951\nn09663248\nn09663786\nn09663999\nn09664556\nn09664908\nn09665367\nn09665545\nn09666349\nn09666476\nn09666883\nn09667358\nn09668199\nn09668437\nn09668562\nn09668988\nn09669631\nn09670280\nn09670521\nn09670909\nn09671089\nn09672590\nn09672725\nn09672840\nn09673091\nn09674412\nn09674786\nn09675045\nn09675673\nn09675799\nn09675922\nn09676021\nn09676247\nn09676884\nn09677427\nn09678747\nn09679028\nn09679170\nn09679925\nn09680908\nn09681107\nn09681234\nn09681973\nn09683180\nn09683757\nn09683924\nn09684082\nn09684901\nn09685233\nn09685806\nn09686262\nn09686401\nn09688233\nn09688804\nn09689435\nn09689958\nn09690083\nn09690208\nn09690496\nn09690621\nn09690864\nn09691604\nn09691729\nn09691858\nn09692125\nn09692915\nn09693244\nn09693982\nn09694664\nn09694771\nn09695019\nn09695132\nn09695514\nn09695620\nn09695979\nn09696456\nn09696585\nn09696763\nn09697401\nn09697986\nn09698644\nn09699020\nn09699642\nn09700125\nn09700964\nn09701148\nn09701833\nn09702134\nn09702673\nn09703101\nn09703344\nn09703485\nn09703708\nn09703809\nn09703932\nn09704057\nn09704157\nn09704283\nn09705003\nn09705124\nn09705671\nn09705784\nn09706029\nn09706255\nn09707061\nn09707289\nn09707735\nn09708750\nn09708889\nn09709531\nn09709673\nn09710041\nn09710164\nn09710886\nn09711132\nn09711435\nn09712324\nn09712448\nn09712696\nn09712967\nn09713108\nn09714120\nn09714694\nn09715165\nn09715303\nn09715427\nn09716047\nn09716933\nn09717233\nn09718217\nn09718811\nn09718936\nn09719309\nn09719794\nn09720033\nn09720256\nn09720595\nn09720702\nn09720842\nn09721244\nn09721444\nn09722064\nn09722658\nn09722817\nn09723067\nn09723819\nn09723944\nn09724234\nn09724533\nn09724656\nn09724785\nn09725000\nn09725229\nn09725546\nn09725653\nn09725772\nn09725935\nn09726621\nn09726811\nn09727440\nn09727826\nn09728137\nn09728285\nn09729062\nn09729156\nn09730077\nn09730204\nn09730824\nn09731343\nn09731436\nn09731571\nn09732170\nn09733459\nn09733793\nn09734185\nn09734450\nn09734535\nn09734639\nn09735258\nn09735654\nn09736485\nn09736798\nn09736945\nn09737050\nn09737161\nn09737453\nn09738121\nn09738400\nn09740724\nn09741074\nn09741331\nn09741722\nn09741816\nn09741904\nn09741999\nn09742101\nn09742315\nn09742927\nn09743487\nn09743601\nn09743792\nn09744161\nn09744346\nn09744462\nn09744679\nn09744834\nn09745229\nn09745324\nn09745834\nn09745933\nn09746936\nn09747191\nn09747495\nn09748101\nn09748408\nn09748648\nn09748889\nn09749386\nn09750282\nn09750641\nn09750770\nn09750891\nn09751076\nn09751496\nn09751622\nn09751895\nn09752023\nn09752519\nn09753348\nn09753792\nn09754152\nn09754217\nn09754633\nn09754907\nn09755086\nn09755241\nn09755555\nn09755788\nn09755893\nn09756049\nn09756195\nn09756961\nn09757449\nn09758173\nn09758885\nn09759501\nn09760290\nn09760609\nn09760913\nn09761068\nn09761753\nn09762011\nn09762385\nn09763272\nn09763784\nn09764201\nn09764598\nn09764732\nn09764900\nn09765118\nn09765278\nn09767197\nn09769076\nn09769525\nn09769929\nn09770179\nn09770359\nn09771435\nn09772330\nn09772746\nn09772930\nn09773962\nn09774167\nn09774783\nn09775907\nn09776346\nn09776642\nn09776807\nn09777870\nn09778266\nn09778537\nn09778783\nn09778927\nn09779124\nn09779280\nn09779461\nn09779790\nn09780395\nn09780828\nn09780984\nn09781398\nn09781504\nn09781650\nn09782167\nn09782397\nn09782855\nn09783537\nn09783776\nn09783884\nn09784043\nn09784160\nn09784564\nn09785236\nn09785659\nn09785891\nn09786115\nn09787534\nn09787765\nn09788073\nn09788237\nn09789150\nn09789566\nn09789898\nn09790047\nn09790482\nn09791014\nn09791419\nn09791816\nn09792125\nn09792555\nn09792969\nn09793141\nn09793352\nn09793946\nn09794550\nn09794668\nn09795010\nn09795124\nn09795334\nn09796809\nn09796974\nn09797742\nn09797873\nn09797998\nn09798096\nn09800469\nn09800964\nn09801102\nn09801275\nn09801533\nn09802445\nn09802641\nn09802951\nn09804230\nn09805151\nn09805324\nn09805475\nn09806944\nn09807075\nn09808080\nn09808591\nn09809279\nn09809538\nn09809749\nn09809925\nn09810166\nn09811568\nn09811712\nn09811852\nn09813219\nn09814252\nn09814381\nn09814488\nn09814567\nn09814660\nn09815455\nn09815790\nn09816654\nn09816771\nn09817174\nn09817386\nn09818022\nn09819477\nn09820044\nn09820263\nn09821831\nn09822830\nn09823153\nn09823287\nn09823502\nn09823832\nn09824135\nn09824609\nn09825096\nn09825750\nn09826204\nn09826605\nn09826821\nn09827246\nn09827363\nn09828216\nn09828403\nn09828988\nn09830194\nn09830400\nn09830629\nn09830759\nn09830926\nn09831962\nn09832456\nn09832633\nn09832978\nn09833111\nn09833275\nn09833441\nn09833536\nn09833751\nn09833997\nn09834258\nn09834378\nn09834699\nn09834885\nn09835017\nn09835153\nn09835230\nn09835348\nn09835506\nn09836160\nn09836343\nn09836519\nn09836786\nn09837459\nn09837720\nn09838295\nn09838370\nn09838621\nn09839702\nn09840217\nn09840435\nn09840520\nn09841188\nn09841515\nn09841696\nn09842047\nn09842288\nn09842395\nn09842528\nn09842823\nn09843443\nn09843602\nn09843716\nn09843824\nn09844457\nn09844898\nn09845401\nn09845849\nn09846142\nn09846469\nn09846586\nn09846755\nn09846894\nn09847267\nn09847344\nn09847543\nn09848110\nn09848489\nn09849167\nn09849990\nn09850760\nn09850974\nn09851165\nn09851575\nn09853541\nn09853645\nn09853881\nn09854218\nn09854421\nn09854915\nn09855433\nn09856401\nn09856671\nn09856827\nn09857007\nn09858165\nn09858299\nn09858733\nn09859152\nn09859285\nn09859975\nn09861287\nn09861599\nn09861863\nn09861946\nn09862183\nn09862621\nn09863031\nn09863339\nn09863749\nn09863936\nn09864632\nn09864968\nn09865068\nn09865162\nn09865398\nn09865672\nn09865744\nn09866115\nn09866354\nn09866559\nn09866661\nn09866817\nn09866922\nn09867069\nn09867154\nn09867311\nn09868270\nn09868782\nn09868899\nn09869317\nn09869447\nn09869578\nn09870096\nn09871095\nn09871229\nn09871681\nn09871867\nn09871952\nn09872066\nn09872557\nn09873348\nn09873473\nn09873769\nn09873899\nn09874428\nn09874725\nn09874862\nn09875025\nn09875979\nn09876701\nn09877288\nn09877587\nn09877750\nn09877951\nn09878921\nn09879552\nn09880189\nn09880741\nn09881265\nn09881358\nn09881895\nn09883047\nn09883452\nn09883807\nn09885059\nn09885866\nn09886403\nn09886540\nn09888635\nn09889065\nn09889170\nn09889691\nn09889941\nn09890192\nn09890749\nn09891730\nn09892262\nn09892513\nn09892693\nn09893191\nn09893344\nn09893502\nn09893600\nn09894143\nn09894445\nn09894654\nn09894909\nn09895222\nn09895480\nn09895561\nn09895701\nn09895902\nn09896170\nn09896311\nn09896401\nn09896685\nn09896826\nn09898020\nn09899289\nn09899671\nn09899782\nn09899929\nn09901337\nn09901502\nn09901642\nn09901786\nn09901921\nn09902128\nn09902353\nn09902731\nn09902851\nn09902954\nn09903153\nn09903501\nn09903639\nn09903936\nn09904208\nn09904837\nn09905050\nn09905185\nn09905530\nn09906293\nn09906449\nn09906704\nn09907804\nn09908769\nn09909660\nn09909929\nn09910222\nn09910374\nn09910556\nn09910840\nn09911226\nn09912431\nn09912681\nn09912907\nn09912995\nn09913329\nn09913455\nn09913593\nn09915434\nn09915651\nn09916348\nn09917214\nn09917345\nn09917481\nn09917593\nn09918248\nn09918554\nn09918867\nn09919061\nn09919200\nn09919451\nn09919899\nn09920106\nn09920283\nn09920901\nn09921034\nn09923003\nn09923186\nn09923418\nn09923561\nn09923673\nn09923996\nn09924106\nn09924195\nn09924313\nn09924437\nn09924996\nn09927089\nn09927451\nn09928136\nn09928451\nn09928845\nn09929202\nn09929298\nn09929577\nn09930257\nn09930628\nn09930876\nn09931165\nn09931418\nn09931640\nn09932098\nn09932336\nn09932508\nn09932788\nn09933020\nn09933098\nn09933842\nn09933972\nn09934337\nn09934488\nn09934774\nn09935107\nn09935434\nn09936825\nn09936892\nn09937056\nn09937688\nn09937802\nn09937903\nn09938080\nn09938449\nn09938991\nn09940725\nn09940818\nn09941089\nn09941571\nn09941787\nn09941964\nn09942697\nn09942970\nn09943239\nn09943811\nn09944022\nn09944160\nn09944430\nn09945021\nn09945223\nn09945319\nn09945603\nn09945745\nn09946814\nn09947127\nn09950457\nn09950728\nn09951070\nn09951274\nn09951524\nn09951616\nn09952163\nn09953052\nn09953350\nn09953615\nn09954355\nn09954639\nn09955406\nn09955944\nn09956578\nn09957523\nn09958133\nn09958292\nn09958447\nn09958569\nn09959142\nn09959658\nn09960688\nn09961198\nn09961331\nn09961469\nn09961605\nn09961739\nn09962966\nn09964202\nn09964411\nn09965515\nn09965787\nn09966470\nn09966554\nn09967063\nn09967406\nn09967555\nn09967816\nn09967967\nn09968259\nn09968652\nn09968741\nn09968845\nn09970088\nn09970192\nn09970402\nn09970822\nn09971273\nn09971385\nn09971839\nn09972010\nn09972458\nn09972587\nn09974648\nn09975425\nn09976024\nn09976283\nn09976429\nn09976728\nn09976917\nn09978442\nn09979321\nn09979913\nn09980458\nn09980805\nn09980985\nn09981092\nn09981278\nn09981540\nn09981939\nn09982152\nn09982525\nn09983314\nn09983572\nn09983889\nn09984960\nn09985470\nn09985809\nn09985978\nn09986450\nn09986700\nn09986904\nn09987045\nn09987161\nn09987239\nn09988063\nn09988311\nn09988493\nn09988703\nn09989502\nn09990415\nn09990690\nn09990777\nn09991740\nn09991867\nn09992538\nn09992837\nn09993252\nn09993651\nn09994400\nn09994673\nn09994808\nn09994878\nn09995829\nn09996039\nn09996304\nn09996481\nn09997622\nn09998788\nn09999135\nn10000294\nn10000459\nn10000787\nn10001217\nn10001481\nn10001764\nn10002257\nn10002760\nn10003476\nn10004718\nn10005006\nn10005934\nn10006177\nn10006748\nn10007684\nn10007809\nn10007995\nn10008123\nn10008254\nn10009162\nn10009276\nn10009484\nn10009671\nn10010062\nn10010243\nn10010632\nn10010767\nn10010864\nn10011360\nn10011486\nn10012484\nn10013811\nn10015215\nn10015485\nn10015792\nn10015897\nn10017272\nn10017422\nn10018747\nn10018861\nn10019072\nn10019187\nn10019406\nn10020366\nn10020533\nn10020670\nn10020807\nn10020890\nn10022908\nn10023264\nn10023506\nn10023656\nn10024025\nn10024362\nn10024937\nn10025060\nn10025295\nn10025391\nn10025635\nn10026976\nn10027246\nn10027590\nn10028402\nn10028541\nn10029068\nn10030277\nn10032987\nn10033412\nn10033572\nn10033663\nn10033888\nn10034201\nn10034614\nn10035952\nn10036266\nn10036444\nn10036692\nn10036929\nn10037080\nn10037385\nn10037588\nn10037922\nn10038119\nn10038409\nn10038620\nn10039271\nn10039946\nn10040240\nn10040698\nn10040945\nn10041373\nn10041887\nn10042690\nn10042845\nn10043024\nn10043491\nn10043643\nn10044682\nn10044879\nn10047199\nn10047459\nn10048117\nn10048367\nn10048612\nn10048836\nn10049363\nn10050043\nn10050880\nn10051026\nn10051761\nn10051861\nn10051975\nn10052694\nn10053439\nn10053808\nn10054657\nn10055297\nn10055410\nn10055566\nn10055730\nn10055847\nn10056103\nn10056611\nn10056719\nn10057271\nn10058411\nn10058962\nn10059067\nn10060075\nn10060175\nn10060352\nn10061043\nn10061195\nn10061431\nn10061882\nn10062042\nn10062176\nn10062275\nn10062492\nn10062594\nn10062716\nn10062905\nn10062996\nn10063635\nn10063919\nn10064831\nn10064977\nn10065758\nn10066206\nn10066314\nn10067011\nn10067305\nn10067600\nn10067968\nn10068234\nn10068425\nn10069296\nn10069981\nn10070108\nn10070377\nn10070449\nn10070563\nn10070711\nn10071332\nn10071557\nn10072054\nn10074249\nn10074578\nn10074735\nn10074841\nn10075299\nn10075693\nn10076224\nn10076483\nn10076604\nn10076957\nn10077106\nn10077593\nn10077879\nn10078131\nn10078719\nn10078806\nn10079399\nn10079893\nn10080117\nn10080508\nn10080869\nn10081204\nn10081842\nn10082043\nn10082299\nn10082423\nn10082562\nn10082687\nn10082997\nn10083677\nn10083823\nn10084043\nn10084295\nn10085101\nn10085869\nn10086383\nn10086744\nn10087434\nn10087736\nn10088200\nn10090745\nn10091349\nn10091450\nn10091564\nn10091651\nn10091861\nn10091997\nn10092488\nn10092643\nn10092794\nn10092978\nn10093167\nn10093475\nn10093818\nn10094320\nn10094584\nn10094782\nn10095265\nn10095420\nn10095769\nn10095869\nn10096126\nn10096508\nn10097262\nn10097477\nn10097590\nn10097842\nn10097995\nn10098245\nn10098388\nn10098517\nn10098624\nn10098710\nn10098862\nn10099002\nn10099375\nn10101308\nn10101634\nn10101981\nn10102800\nn10103155\nn10103228\nn10103921\nn10104064\nn10104487\nn10104756\nn10104888\nn10105085\nn10105733\nn10105906\nn10106387\nn10106509\nn10106995\nn10107173\nn10107303\nn10108018\nn10108089\nn10108464\nn10108832\nn10109443\nn10109662\nn10109826\nn10110093\nn10110731\nn10110893\nn10111358\nn10111779\nn10111903\nn10112129\nn10113249\nn10113583\nn10113869\nn10114476\nn10114550\nn10114662\nn10115430\nn10115946\nn10116370\nn10116478\nn10116702\nn10117017\nn10117267\nn10117415\nn10117739\nn10117851\nn10118301\nn10118743\nn10118844\nn10119609\nn10120330\nn10120671\nn10121026\nn10121246\nn10121714\nn10121800\nn10122300\nn10122531\nn10123122\nn10123844\nn10126177\nn10126424\nn10126708\nn10127186\nn10127689\nn10128519\nn10128748\nn10129338\nn10129825\nn10130686\nn10130877\nn10131151\nn10131268\nn10131590\nn10131815\nn10132035\nn10132502\nn10134178\nn10134396\nn10134760\nn10134982\nn10135129\nn10135197\nn10135297\nn10136615\nn10136959\nn10137825\nn10138369\nn10138472\nn10139077\nn10139651\nn10140051\nn10140597\nn10140683\nn10140783\nn10140929\nn10141364\nn10141732\nn10142166\nn10142391\nn10142537\nn10142747\nn10142946\nn10143172\nn10143595\nn10143725\nn10144338\nn10145239\nn10145340\nn10145480\nn10145590\nn10145774\nn10145902\nn10146002\nn10146104\nn10146416\nn10146816\nn10146927\nn10147121\nn10147262\nn10147710\nn10147935\nn10148035\nn10148305\nn10148825\nn10149436\nn10149867\nn10150071\nn10150794\nn10150940\nn10151133\nn10151261\nn10151367\nn10151570\nn10151760\nn10152306\nn10152616\nn10152763\nn10153155\nn10153414\nn10153594\nn10153865\nn10154013\nn10154186\nn10154601\nn10155222\nn10155600\nn10155849\nn10156629\nn10156831\nn10157016\nn10157128\nn10157271\nn10158506\nn10159045\nn10159289\nn10159533\nn10160188\nn10160280\nn10160412\nn10161622\nn10162016\nn10162194\nn10162354\nn10164025\nn10164233\nn10164492\nn10165448\nn10166189\nn10166394\nn10167152\nn10167361\nn10167565\nn10167838\nn10168012\nn10168183\nn10168584\nn10168837\nn10169147\nn10169241\nn10169419\nn10169796\nn10170060\nn10170681\nn10170866\nn10171219\nn10171456\nn10171567\nn10172080\nn10173410\nn10173579\nn10173665\nn10173771\nn10174253\nn10174330\nn10174445\nn10174589\nn10174695\nn10174971\nn10175248\nn10175725\nn10176913\nn10177150\nn10178077\nn10178216\nn10179069\nn10180580\nn10180791\nn10180923\nn10181445\nn10181547\nn10181799\nn10181878\nn10182190\nn10182402\nn10183347\nn10183931\nn10184505\nn10185148\nn10185483\nn10185793\nn10186068\nn10186143\nn10186216\nn10186350\nn10186686\nn10186774\nn10187130\nn10187491\nn10187990\nn10188715\nn10188856\nn10188957\nn10189278\nn10189597\nn10190122\nn10190516\nn10191001\nn10191388\nn10191613\nn10192839\nn10193650\nn10194231\nn10194775\nn10195056\nn10195155\nn10195261\nn10195593\nn10196404\nn10196725\nn10197392\nn10198437\nn10198832\nn10199251\nn10200246\nn10200781\nn10202225\nn10202624\nn10202763\nn10203949\nn10204177\nn10204833\nn10205231\nn10205344\nn10205457\nn10205714\nn10206173\nn10206506\nn10206629\nn10207077\nn10207169\nn10208189\nn10208847\nn10208950\nn10209082\nn10209731\nn10210137\nn10210512\nn10210648\nn10210911\nn10211036\nn10211666\nn10211830\nn10212231\nn10212501\nn10212780\nn10213034\nn10213429\nn10214062\nn10214390\nn10215623\nn10216106\nn10216403\nn10217208\nn10218043\nn10218164\nn10218292\nn10219240\nn10219453\nn10219879\nn10220080\nn10220924\nn10221312\nn10221520\nn10222170\nn10222259\nn10222497\nn10222716\nn10223069\nn10223177\nn10223606\nn10224578\nn10225219\nn10225931\nn10226413\nn10227166\nn10227266\nn10227393\nn10227490\nn10227698\nn10227793\nn10227985\nn10228278\nn10228468\nn10228592\nn10228712\nn10229883\nn10230216\nn10233248\nn10235024\nn10235269\nn10235385\nn10236304\nn10236521\nn10236842\nn10237069\nn10237196\nn10237464\nn10237556\nn10237676\nn10237799\nn10238272\nn10238375\nn10239928\nn10240082\nn10240235\nn10240417\nn10240821\nn10241024\nn10241300\nn10242328\nn10243137\nn10243273\nn10243483\nn10243664\nn10243872\nn10244108\nn10244359\nn10244913\nn10245029\nn10245341\nn10245507\nn10245639\nn10245863\nn10246317\nn10246395\nn10246703\nn10247358\nn10247880\nn10248008\nn10248198\nn10248377\nn10249191\nn10249270\nn10249459\nn10249869\nn10249950\nn10250712\nn10251329\nn10251612\nn10252075\nn10252222\nn10252354\nn10252547\nn10253122\nn10253296\nn10253479\nn10253611\nn10253703\nn10255459\nn10257221\nn10258602\nn10258786\nn10259348\nn10259780\nn10259997\nn10260473\nn10260706\nn10260800\nn10261211\nn10261511\nn10261624\nn10261862\nn10262343\nn10262445\nn10262561\nn10262655\nn10262880\nn10263146\nn10263411\nn10263790\nn10265281\nn10265801\nn10265891\nn10266016\nn10266328\nn10266848\nn10267166\nn10267311\nn10267865\nn10268629\nn10269199\nn10269289\nn10271677\nn10272782\nn10272913\nn10273064\nn10274173\nn10274318\nn10274815\nn10275249\nn10275395\nn10275848\nn10276045\nn10276477\nn10276942\nn10277027\nn10277638\nn10277815\nn10277912\nn10278456\nn10279018\nn10279778\nn10280034\nn10280130\nn10280598\nn10280674\nn10281546\nn10281770\nn10281896\nn10282482\nn10282672\nn10283170\nn10283366\nn10283546\nn10284064\nn10284871\nn10284965\nn10286282\nn10286539\nn10286749\nn10288964\nn10289039\nn10289176\nn10289462\nn10289766\nn10290422\nn10290541\nn10290813\nn10290919\nn10291110\nn10291469\nn10291822\nn10291942\nn10292316\nn10293332\nn10293590\nn10293861\nn10294020\nn10294139\nn10295371\nn10295479\nn10296176\nn10296444\nn10297234\nn10297367\nn10297531\nn10297841\nn10298202\nn10298271\nn10298647\nn10298912\nn10299125\nn10299250\nn10299700\nn10299875\nn10300041\nn10300154\nn10300303\nn10300500\nn10300654\nn10300829\nn10302576\nn10302700\nn10302905\nn10303037\nn10303814\nn10304086\nn10304650\nn10304914\nn10305635\nn10305802\nn10306004\nn10306279\nn10306496\nn10306595\nn10306890\nn10307114\nn10308066\nn10308168\nn10308275\nn10308504\nn10308653\nn10308732\nn10310783\nn10311506\nn10311661\nn10312287\nn10312491\nn10312600\nn10313000\nn10313239\nn10313441\nn10313724\nn10314054\nn10314182\nn10314517\nn10314836\nn10315217\nn10315456\nn10315561\nn10315730\nn10316360\nn10316527\nn10316862\nn10317007\nn10317500\nn10317963\nn10318293\nn10318607\nn10318686\nn10319313\nn10320484\nn10320863\nn10321126\nn10321340\nn10321632\nn10321882\nn10322238\nn10323634\nn10323752\nn10323999\nn10324560\nn10325549\nn10325774\nn10326776\nn10327143\nn10327987\nn10328123\nn10328328\nn10328437\nn10328696\nn10328941\nn10329035\nn10330593\nn10330931\nn10331098\nn10331167\nn10331258\nn10331347\nn10331841\nn10332110\nn10332385\nn10332861\nn10332953\nn10333044\nn10333165\nn10333317\nn10333439\nn10333601\nn10333838\nn10334009\nn10334461\nn10334782\nn10335246\nn10335801\nn10335931\nn10336411\nn10336904\nn10337488\nn10338231\nn10338391\nn10339179\nn10339251\nn10339717\nn10340312\nn10341243\nn10341343\nn10341446\nn10341573\nn10341955\nn10342180\nn10342367\nn10342543\nn10342893\nn10342992\nn10343088\nn10343355\nn10343449\nn10343554\nn10343869\nn10344121\nn10344203\nn10344319\nn10344656\nn10344774\nn10345015\nn10345100\nn10345302\nn10345422\nn10345659\nn10346015\nn10347204\nn10347446\nn10348526\nn10349243\nn10349750\nn10349836\nn10350220\nn10350774\nn10351064\nn10353016\nn10353355\nn10353928\nn10354265\nn10354754\nn10355142\nn10355306\nn10355449\nn10355688\nn10355806\nn10356450\nn10356877\nn10357012\nn10357613\nn10357737\nn10358032\nn10358124\nn10358575\nn10359117\nn10359422\nn10359546\nn10359659\nn10360366\nn10360747\nn10361060\nn10361194\nn10361296\nn10361525\nn10362003\nn10362319\nn10362557\nn10363445\nn10363573\nn10364198\nn10364502\nn10365514\nn10366145\nn10366276\nn10366966\nn10368291\nn10368528\nn10368624\nn10368711\nn10368798\nn10369095\nn10369317\nn10369417\nn10369528\nn10369699\nn10369955\nn10370381\nn10370955\nn10371052\nn10371221\nn10371330\nn10371450\nn10373390\nn10373525\nn10374541\nn10374849\nn10374943\nn10375052\nn10375314\nn10375402\nn10376523\nn10376890\nn10377021\nn10377185\nn10377291\nn10377542\nn10377633\nn10378026\nn10378113\nn10378780\nn10379376\nn10380126\nn10380499\nn10380672\nn10381804\nn10381981\nn10382157\nn10382302\nn10382480\nn10382710\nn10382825\nn10383094\nn10383237\nn10383505\nn10383816\nn10384214\nn10384392\nn10384496\nn10385566\nn10386196\nn10386754\nn10386874\nn10386984\nn10387196\nn10387324\nn10387836\nn10389865\nn10389976\nn10390600\nn10390698\nn10390807\nn10391416\nn10393909\nn10394434\nn10394786\nn10395073\nn10395209\nn10395390\nn10395828\nn10396106\nn10396337\nn10396727\nn10396908\nn10397001\nn10397142\nn10397392\nn10399130\nn10400003\nn10400108\nn10400205\nn10400437\nn10400618\nn10400998\nn10401204\nn10401331\nn10401639\nn10402709\nn10402824\nn10403633\nn10403876\nn10404426\nn10404998\nn10405540\nn10405694\nn10406266\nn10406391\nn10406765\nn10407310\nn10407954\nn10408809\nn10409459\nn10409752\nn10410246\nn10410996\nn10411356\nn10411551\nn10411867\nn10414239\nn10414768\nn10414865\nn10415037\nn10416567\nn10417288\nn10417424\nn10417551\nn10417682\nn10417843\nn10417969\nn10418101\nn10418735\nn10419047\nn10419472\nn10419630\nn10419785\nn10420031\nn10420277\nn10420507\nn10420649\nn10421016\nn10421470\nn10421956\nn10422405\nn10425946\nn10426454\nn10426630\nn10427223\nn10427359\nn10427764\nn10428004\nn10431122\nn10431625\nn10432189\nn10432441\nn10432875\nn10432957\nn10433077\nn10433452\nn10433610\nn10433737\nn10435169\nn10435251\nn10435716\nn10435988\nn10436334\nn10437014\nn10437137\nn10437262\nn10437698\nn10438172\nn10438619\nn10438842\nn10439373\nn10439523\nn10439727\nn10439851\nn10441037\nn10441124\nn10441694\nn10441962\nn10442093\nn10442232\nn10442417\nn10442573\nn10443032\nn10443659\nn10443830\nn10444194\nn10448322\nn10448455\nn10449664\nn10450038\nn10450161\nn10450303\nn10451450\nn10451590\nn10451858\nn10453184\nn10455619\nn10456070\nn10456138\nn10456696\nn10457214\nn10457444\nn10457903\nn10458111\nn10458356\nn10458596\nn10459882\nn10460033\nn10461060\nn10462588\nn10462751\nn10462860\nn10464052\nn10464542\nn10464711\nn10464870\nn10465002\nn10465451\nn10465831\nn10466198\nn10466564\nn10466918\nn10467179\nn10467395\nn10468750\nn10469611\nn10469874\nn10470779\nn10471640\nn10471732\nn10471859\nn10472129\nn10472447\nn10473453\nn10473562\nn10473789\nn10473917\nn10474064\nn10474343\nn10474446\nn10474645\nn10475835\nn10475940\nn10476467\nn10477713\nn10477955\nn10478118\nn10478293\nn10478462\nn10478827\nn10478960\nn10479135\nn10479328\nn10481167\nn10481268\nn10482054\nn10482220\nn10482587\nn10482921\nn10483138\nn10483395\nn10483799\nn10483890\nn10484858\nn10485298\nn10485883\nn10486166\nn10486236\nn10486561\nn10487182\nn10487363\nn10487592\nn10488016\nn10488309\nn10488656\nn10489426\nn10490421\nn10491998\nn10492086\nn10492727\nn10493199\nn10493419\nn10493685\nn10493835\nn10493922\nn10494195\nn10494373\nn10495167\nn10495421\nn10495555\nn10495756\nn10496393\nn10496489\nn10497135\nn10497534\nn10497645\nn10498046\nn10498699\nn10498816\nn10498986\nn10499110\nn10499232\nn10499355\nn10499631\nn10499857\nn10500217\nn10500419\nn10500603\nn10500824\nn10500942\nn10501453\nn10501635\nn10502046\nn10502329\nn10502950\nn10503818\nn10504090\nn10504206\nn10505347\nn10505613\nn10505732\nn10505942\nn10506336\nn10506544\nn10506915\nn10507070\nn10507380\nn10507482\nn10507565\nn10507692\nn10508141\nn10508379\nn10508710\nn10509063\nn10509161\nn10509810\nn10510245\nn10510974\nn10511771\nn10512201\nn10512372\nn10512708\nn10512859\nn10513509\nn10513823\nn10513938\nn10514051\nn10514121\nn10514255\nn10514429\nn10514784\nn10515863\nn10516527\nn10517137\nn10517283\nn10518349\nn10519126\nn10519494\nn10519984\nn10520286\nn10520544\nn10520964\nn10521100\nn10521662\nn10521853\nn10522035\nn10522324\nn10522759\nn10523341\nn10524076\nn10524223\nn10524869\nn10525134\nn10525436\nn10525617\nn10525878\nn10526534\nn10527147\nn10527334\nn10528023\nn10528148\nn10528493\nn10529231\nn10530150\nn10530383\nn10530571\nn10530959\nn10531109\nn10531445\nn10531838\nn10533874\nn10533983\nn10536134\nn10536274\nn10536416\nn10537708\nn10537906\nn10538629\nn10538733\nn10538853\nn10539015\nn10539160\nn10539278\nn10540114\nn10540252\nn10540656\nn10541833\nn10542608\nn10542761\nn10542888\nn10543161\nn10543937\nn10544232\nn10544748\nn10545792\nn10546428\nn10546633\nn10548419\nn10548537\nn10548681\nn10549510\nn10550252\nn10550369\nn10550468\nn10551576\nn10552393\nn10553140\nn10553235\nn10554024\nn10554141\nn10554846\nn10555059\nn10555430\nn10556033\nn10556518\nn10556704\nn10556825\nn10557246\nn10557854\nn10559009\nn10559288\nn10559508\nn10559683\nn10559996\nn10560106\nn10560637\nn10561222\nn10561320\nn10561736\nn10562135\nn10562283\nn10562509\nn10562968\nn10563314\nn10563403\nn10563711\nn10564098\nn10565502\nn10565667\nn10566072\nn10567613\nn10567722\nn10567848\nn10568200\nn10568358\nn10568443\nn10568608\nn10568915\nn10569011\nn10569179\nn10570019\nn10570704\nn10571907\nn10572706\nn10572889\nn10573957\nn10574311\nn10574538\nn10574840\nn10575463\nn10575594\nn10575787\nn10576223\nn10576316\nn10576676\nn10576818\nn10576962\nn10577182\nn10577284\nn10577710\nn10577820\nn10578021\nn10578162\nn10578471\nn10578656\nn10579062\nn10579549\nn10580030\nn10580437\nn10580535\nn10581648\nn10581890\nn10582604\nn10582746\nn10583387\nn10583790\nn10585077\nn10585217\nn10585628\nn10586166\nn10586265\nn10586444\nn10586903\nn10586998\nn10588074\nn10588357\nn10588724\nn10588965\nn10589666\nn10590146\nn10590239\nn10590452\nn10590903\nn10591072\nn10591811\nn10592049\nn10592811\nn10593521\nn10594147\nn10594523\nn10594857\nn10595164\nn10595647\nn10596517\nn10596899\nn10597505\nn10597745\nn10597889\nn10598013\nn10598181\nn10598459\nn10598904\nn10599215\nn10599806\nn10601234\nn10601362\nn10602119\nn10602470\nn10602985\nn10603528\nn10603851\nn10604275\nn10604380\nn10604634\nn10604880\nn10604979\nn10605253\nn10605737\nn10607291\nn10607478\nn10609092\nn10609198\nn10610465\nn10610850\nn10611267\nn10611613\nn10612210\nn10612373\nn10612518\nn10613996\nn10614507\nn10614629\nn10615179\nn10615334\nn10616578\nn10617024\nn10617193\nn10617397\nn10618234\nn10618342\nn10618465\nn10618685\nn10618848\nn10619492\nn10619642\nn10619888\nn10620212\nn10620586\nn10620758\nn10621294\nn10621400\nn10621514\nn10622053\nn10624074\nn10624310\nn10624437\nn10624540\nn10625860\nn10626630\nn10627252\nn10628097\nn10628644\nn10629329\nn10629647\nn10629939\nn10630093\nn10630188\nn10631131\nn10631309\nn10631654\nn10632576\nn10633298\nn10633450\nn10634464\nn10634849\nn10634990\nn10635788\nn10636488\nn10637483\nn10638922\nn10639238\nn10639359\nn10639637\nn10639817\nn10641223\nn10642596\nn10642705\nn10643095\nn10643837\nn10643937\nn10644598\nn10645017\nn10645223\nn10646032\nn10646140\nn10646433\nn10646641\nn10646780\nn10646942\nn10647745\nn10648237\nn10648696\nn10649197\nn10649308\nn10650162\nn10652605\nn10652703\nn10654015\nn10654211\nn10654321\nn10654827\nn10654932\nn10655169\nn10655442\nn10655594\nn10655730\nn10655986\nn10656120\nn10656223\nn10656969\nn10657306\nn10657556\nn10657835\nn10658304\nn10659042\nn10659762\nn10660128\nn10660621\nn10660883\nn10661002\nn10661216\nn10661563\nn10661732\nn10663315\nn10663549\nn10665302\nn10665587\nn10665698\nn10666752\nn10667477\nn10667709\nn10667863\nn10668450\nn10668666\nn10669991\nn10671042\nn10671613\nn10671736\nn10671898\nn10672371\nn10672540\nn10672662\nn10673296\nn10673776\nn10674130\nn10674713\nn10675010\nn10675142\nn10675609\nn10676018\nn10676434\nn10676569\nn10678937\nn10679174\nn10679503\nn10679610\nn10679723\nn10680609\nn10680796\nn10681194\nn10681557\nn10682713\nn10682953\nn10683675\nn10684146\nn10684630\nn10684827\nn10685398\nn10686073\nn10686517\nn10686694\nn10686885\nn10688356\nn10688811\nn10689306\nn10690268\nn10690421\nn10690648\nn10691318\nn10691937\nn10692090\nn10692482\nn10692883\nn10693235\nn10693334\nn10693824\nn10694258\nn10694939\nn10695450\nn10696101\nn10696508\nn10697135\nn10697282\nn10698368\nn10699558\nn10699752\nn10699981\nn10700105\nn10700201\nn10700640\nn10700963\nn10701180\nn10701644\nn10701962\nn10702167\nn10702615\nn10703221\nn10703336\nn10703480\nn10703692\nn10704238\nn10704712\nn10704886\nn10705448\nn10705615\nn10706812\nn10707134\nn10707233\nn10707707\nn10708292\nn10708454\nn10709529\nn10710171\nn10710259\nn10710778\nn10710913\nn10711483\nn10711766\nn10712229\nn10712374\nn10712474\nn10712690\nn10712835\nn10713254\nn10713686\nn10713843\nn10714195\nn10715030\nn10715347\nn10715789\nn10716576\nn10716864\nn10717055\nn10717196\nn10717337\nn10718131\nn10718349\nn10718509\nn10718665\nn10718952\nn10719036\nn10719132\nn10719267\nn10719807\nn10720197\nn10720453\nn10720964\nn10721124\nn10721321\nn10721612\nn10721708\nn10721819\nn10722029\nn10722575\nn10722965\nn10723230\nn10723597\nn10724132\nn10724372\nn10724570\nn10725280\nn10726031\nn10726786\nn10727016\nn10727171\nn10727458\nn10728117\nn10728233\nn10728624\nn10728998\nn10729330\nn10730542\nn10730728\nn10731013\nn10731732\nn10732010\nn10732521\nn10732854\nn10732967\nn10733820\nn10734394\nn10734741\nn10734891\nn10734963\nn10735173\nn10735298\nn10735984\nn10737103\nn10737264\nn10738111\nn10738215\nn10738670\nn10738871\nn10739135\nn10739297\nn10739391\nn10740594\nn10740732\nn10740868\nn10741152\nn10741367\nn10741493\nn10742005\nn10742111\nn10742546\nn10742997\nn10743124\nn10743356\nn10744078\nn10744164\nn10745006\nn10745770\nn10746931\nn10747119\nn10747424\nn10747548\nn10747965\nn10748142\nn10748506\nn10748620\nn10749928\nn10750031\nn10750188\nn10750640\nn10751026\nn10751152\nn10751265\nn10751710\nn10752480\nn10753061\nn10753182\nn10753339\nn10753442\nn10753989\nn10754189\nn10754281\nn10754449\nn10755080\nn10755164\nn10755394\nn10755648\nn10756061\nn10756148\nn10756261\nn10756641\nn10756837\nn10757050\nn10757492\nn10758337\nn10758445\nn10758949\nn10759151\nn10759331\nn10759982\nn10760199\nn10760622\nn10760951\nn10761190\nn10761326\nn10761519\nn10762212\nn10762480\nn10763075\nn10763245\nn10763383\nn10763620\nn10764465\nn10764622\nn10764719\nn10765305\nn10765587\nn10765679\nn10765885\nn10766260\nn10768148\nn10768272\nn10768903\nn10769084\nn10769188\nn10769321\nn10769459\nn10771066\nn10772092\nn10772580\nn10772937\nn10773665\nn10773800\nn10774329\nn10774756\nn10775003\nn10775128\nn10776052\nn10776339\nn10776887\nn10777299\nn10778044\nn10778148\nn10778711\nn10778999\nn10779610\nn10779897\nn10779995\nn10780284\nn10780632\nn10781236\nn10781817\nn10782362\nn10782471\nn10782791\nn10782940\nn10783240\nn10783539\nn10783646\nn10783734\nn10784113\nn10784544\nn10784922\nn10785480\nn10787470\nn10788852\nn10789415\nn10789709\nn10791115\nn10791221\nn10791820\nn10791890\nn10792335\nn10792506\nn10792856\nn10793570\nn10793799\nn10794014\nn10801561\nn10801802\nn10802507\nn10802621\nn10802953\nn10803031\nn10803282\nn10803978\nn10804287\nn10804636\nn10804732\nn10805501\nn10806113\nn10994097\nn11100798\nn11196627\nn11242849\nn11318824\nn11346873\nn11448153\nn11487732\nn11508382\nn11511327\nn11524451\nn11530008\nn11531193\nn11531334\nn11532682\nn11533212\nn11533999\nn11536567\nn11536673\nn11537327\nn11539289\nn11542137\nn11542640\nn11544015\nn11545350\nn11545524\nn11545714\nn11547562\nn11547855\nn11548728\nn11548870\nn11549009\nn11549245\nn11549779\nn11549895\nn11552133\nn11552386\nn11552594\nn11552806\nn11552976\nn11553240\nn11553522\nn11596108\nn11597657\nn11598287\nn11598686\nn11598886\nn11599324\nn11600372\nn11601177\nn11601333\nn11601918\nn11602091\nn11602478\nn11602873\nn11603246\nn11603462\nn11603835\nn11604046\nn11608250\nn11609475\nn11609684\nn11609862\nn11610047\nn11610215\nn11610437\nn11610602\nn11610823\nn11611087\nn11611233\nn11611356\nn11611561\nn11611758\nn11612018\nn11612235\nn11612349\nn11612575\nn11612923\nn11613219\nn11613459\nn11613692\nn11613867\nn11614039\nn11614250\nn11614420\nn11614713\nn11615026\nn11615259\nn11615387\nn11615607\nn11615812\nn11615967\nn11616260\nn11616486\nn11616662\nn11616852\nn11617090\nn11617272\nn11617631\nn11617878\nn11618079\nn11618290\nn11618525\nn11618861\nn11619227\nn11619455\nn11619687\nn11619845\nn11620016\nn11620389\nn11620673\nn11621029\nn11621281\nn11621547\nn11621727\nn11621950\nn11622184\nn11622368\nn11622591\nn11622771\nn11623105\nn11623815\nn11623967\nn11624192\nn11624531\nn11625003\nn11625223\nn11625391\nn11625632\nn11625804\nn11626010\nn11626152\nn11626409\nn11626585\nn11626826\nn11627168\nn11627512\nn11627714\nn11627908\nn11628087\nn11628456\nn11628793\nn11629047\nn11629354\nn11630017\nn11630489\nn11631159\nn11631405\nn11631619\nn11631854\nn11631985\nn11632167\nn11632376\nn11632619\nn11632929\nn11633284\nn11634736\nn11635152\nn11635433\nn11635830\nn11636204\nn11636835\nn11639084\nn11639306\nn11639445\nn11640132\nn11643835\nn11644046\nn11644226\nn11644462\nn11644872\nn11645163\nn11645590\nn11645914\nn11646167\nn11646344\nn11646517\nn11646694\nn11646955\nn11647306\nn11647703\nn11647868\nn11648039\nn11648268\nn11648776\nn11649150\nn11649359\nn11649878\nn11650160\nn11650307\nn11650430\nn11650558\nn11650759\nn11652039\nn11652217\nn11652376\nn11652578\nn11652753\nn11652966\nn11653126\nn11653570\nn11653904\nn11654293\nn11654438\nn11654984\nn11655152\nn11655592\nn11655974\nn11656123\nn11656549\nn11656771\nn11657585\nn11658331\nn11658544\nn11658709\nn11659248\nn11659627\nn11660300\nn11661372\nn11661909\nn11662128\nn11662371\nn11662585\nn11662937\nn11663263\nn11664418\nn11665372\nn11666854\nn11668117\nn11669786\nn11669921\nn11672269\nn11672400\nn11674019\nn11674332\nn11675025\nn11675404\nn11675738\nn11676500\nn11676743\nn11676850\nn11677485\nn11677902\nn11678010\nn11678299\nn11678377\nn11679378\nn11680457\nn11680596\nn11682659\nn11683216\nn11683838\nn11684264\nn11684499\nn11684654\nn11685091\nn11685621\nn11686195\nn11686652\nn11686780\nn11686912\nn11687071\nn11687432\nn11687789\nn11687964\nn11688069\nn11688378\nn11689197\nn11689367\nn11689483\nn11689678\nn11689815\nn11689957\nn11690088\nn11690254\nn11690455\nn11691046\nn11691857\nn11692265\nn11692792\nn11693981\nn11694300\nn11694469\nn11694664\nn11694866\nn11695085\nn11695285\nn11695599\nn11695974\nn11696450\nn11696935\nn11697560\nn11697802\nn11698042\nn11698245\nn11699442\nn11699751\nn11700058\nn11700279\nn11700864\nn11701066\nn11701302\nn11702713\nn11703669\nn11704093\nn11704620\nn11704791\nn11705171\nn11705387\nn11705573\nn11705776\nn11706325\nn11706761\nn11706942\nn11707229\nn11707827\nn11708658\nn11708857\nn11709045\nn11709205\nn11709674\nn11710136\nn11710393\nn11710658\nn11710827\nn11710987\nn11711289\nn11711537\nn11711764\nn11711971\nn11712282\nn11713164\nn11713370\nn11713763\nn11714382\nn11715430\nn11715678\nn11716698\nn11717399\nn11717577\nn11718296\nn11718681\nn11719286\nn11720353\nn11720643\nn11720891\nn11721337\nn11721642\nn11722036\nn11722342\nn11722466\nn11722621\nn11722982\nn11723227\nn11723452\nn11723770\nn11723986\nn11724109\nn11724660\nn11725015\nn11725311\nn11725480\nn11725623\nn11725821\nn11725973\nn11726145\nn11726269\nn11726433\nn11726707\nn11727091\nn11727358\nn11727540\nn11727738\nn11728099\nn11728769\nn11728945\nn11729142\nn11729478\nn11729860\nn11730015\nn11730458\nn11730602\nn11730750\nn11730933\nn11731157\nn11731659\nn11732052\nn11732567\nn11733054\nn11733312\nn11733548\nn11734493\nn11734698\nn11735053\nn11735570\nn11735977\nn11736362\nn11736694\nn11736851\nn11737009\nn11737125\nn11737534\nn11738547\nn11738997\nn11739365\nn11739978\nn11740414\nn11741175\nn11741350\nn11741575\nn11741797\nn11742310\nn11742878\nn11744011\nn11744108\nn11744471\nn11745817\nn11746600\nn11747468\nn11748002\nn11748811\nn11749112\nn11749603\nn11750173\nn11750508\nn11750989\nn11751765\nn11751974\nn11752578\nn11752798\nn11752937\nn11753143\nn11753355\nn11753562\nn11753700\nn11754893\nn11756092\nn11756329\nn11756669\nn11756870\nn11757017\nn11757190\nn11757653\nn11757851\nn11758122\nn11758276\nn11758483\nn11758799\nn11759224\nn11759404\nn11759609\nn11759853\nn11760785\nn11761202\nn11761650\nn11761836\nn11762018\nn11762433\nn11762927\nn11763142\nn11763625\nn11763874\nn11764478\nn11764814\nn11765568\nn11766046\nn11766189\nn11766432\nn11767354\nn11767877\nn11768816\nn11769176\nn11769621\nn11769803\nn11770256\nn11771147\nn11771539\nn11771746\nn11771924\nn11772408\nn11772879\nn11773408\nn11773628\nn11773987\nn11774513\nn11774972\nn11775340\nn11775626\nn11776234\nn11777080\nn11778092\nn11778257\nn11779300\nn11780148\nn11780424\nn11781176\nn11782036\nn11782266\nn11782761\nn11782878\nn11783162\nn11783920\nn11784126\nn11784497\nn11785276\nn11785668\nn11785875\nn11786131\nn11786539\nn11786843\nn11787190\nn11788039\nn11788727\nn11789066\nn11789438\nn11789589\nn11789962\nn11790089\nn11790788\nn11790936\nn11791341\nn11791569\nn11792029\nn11792341\nn11792742\nn11793403\nn11793779\nn11794024\nn11794139\nn11794519\nn11795049\nn11795216\nn11795580\nn11796005\nn11796188\nn11797321\nn11797508\nn11797981\nn11798270\nn11798496\nn11798688\nn11798978\nn11799331\nn11799732\nn11800236\nn11800565\nn11801392\nn11801665\nn11801891\nn11802410\nn11802586\nn11802800\nn11802995\nn11805255\nn11805544\nn11805956\nn11806219\nn11806369\nn11806521\nn11806679\nn11806814\nn11807108\nn11807525\nn11807696\nn11807979\nn11808299\nn11808468\nn11808721\nn11808932\nn11809094\nn11809271\nn11809437\nn11809594\nn11809754\nn11810030\nn11810358\nn11811059\nn11811473\nn11811706\nn11811921\nn11812094\nn11812910\nn11813077\nn11814584\nn11814996\nn11815491\nn11815721\nn11815918\nn11816121\nn11816336\nn11816649\nn11816829\nn11817160\nn11817501\nn11817914\nn11818069\nn11818636\nn11819509\nn11819912\nn11820965\nn11821184\nn11822300\nn11823043\nn11823305\nn11823436\nn11823756\nn11824146\nn11824344\nn11824747\nn11825351\nn11825749\nn11826198\nn11826569\nn11827541\nn11828577\nn11828973\nn11829205\nn11829672\nn11829922\nn11830045\nn11830252\nn11830400\nn11830714\nn11830906\nn11831100\nn11831297\nn11831521\nn11832214\nn11832480\nn11832671\nn11832899\nn11833373\nn11833749\nn11834272\nn11834654\nn11834890\nn11835251\nn11836327\nn11836722\nn11837204\nn11837351\nn11837562\nn11837743\nn11837970\nn11838413\nn11838916\nn11839460\nn11839568\nn11839823\nn11840067\nn11840246\nn11840476\nn11840764\nn11841247\nn11843441\nn11844371\nn11844892\nn11845557\nn11845793\nn11845913\nn11846312\nn11846425\nn11846765\nn11847169\nn11848479\nn11848867\nn11849271\nn11849467\nn11849871\nn11849983\nn11850521\nn11850918\nn11851258\nn11851578\nn11851839\nn11852028\nn11852148\nn11852531\nn11853079\nn11853356\nn11853813\nn11854479\nn11855274\nn11855435\nn11855553\nn11855842\nn11856573\nn11857696\nn11857875\nn11858077\nn11858703\nn11858814\nn11859275\nn11859472\nn11859737\nn11860208\nn11860555\nn11861238\nn11861487\nn11861641\nn11861853\nn11862835\nn11863467\nn11863877\nn11865071\nn11865276\nn11865429\nn11865574\nn11865874\nn11866248\nn11866706\nn11867311\nn11868814\nn11869351\nn11869689\nn11870044\nn11870418\nn11870747\nn11871059\nn11871496\nn11871748\nn11872146\nn11872324\nn11872658\nn11873182\nn11873612\nn11874081\nn11874423\nn11874878\nn11875523\nn11875691\nn11875938\nn11876204\nn11876432\nn11876634\nn11876803\nn11877193\nn11877283\nn11877473\nn11877646\nn11877860\nn11878101\nn11878283\nn11878633\nn11879054\nn11879722\nn11879895\nn11881189\nn11882074\nn11882237\nn11882426\nn11882636\nn11882821\nn11882972\nn11883328\nn11883628\nn11883945\nn11884384\nn11884967\nn11885856\nn11887119\nn11887310\nn11887476\nn11887750\nn11888061\nn11888424\nn11888800\nn11889205\nn11889619\nn11890022\nn11890150\nn11890884\nn11891175\nn11892029\nn11892181\nn11892637\nn11892817\nn11893640\nn11893916\nn11894327\nn11894558\nn11894770\nn11895092\nn11895472\nn11895714\nn11896141\nn11896722\nn11897116\nn11897466\nn11898639\nn11898775\nn11899223\nn11899762\nn11899921\nn11900569\nn11901294\nn11901452\nn11901597\nn11901759\nn11901977\nn11902200\nn11902389\nn11902709\nn11902982\nn11903333\nn11903671\nn11904109\nn11904274\nn11905392\nn11905749\nn11906127\nn11906514\nn11906917\nn11907100\nn11907405\nn11907689\nn11908549\nn11908846\nn11909864\nn11910271\nn11910460\nn11910666\nn11915214\nn11915658\nn11915899\nn11916467\nn11916696\nn11917407\nn11917835\nn11918286\nn11918473\nn11918808\nn11919447\nn11919761\nn11919975\nn11920133\nn11920498\nn11920663\nn11920998\nn11921395\nn11921792\nn11922661\nn11922755\nn11922839\nn11922926\nn11923174\nn11923397\nn11923637\nn11924014\nn11924445\nn11924849\nn11925303\nn11925450\nn11925898\nn11926365\nn11926833\nn11926976\nn11927215\nn11927740\nn11928352\nn11928858\nn11929743\nn11930038\nn11930203\nn11930353\nn11930571\nn11930788\nn11930994\nn11931135\nn11931540\nn11931918\nn11932745\nn11932927\nn11933099\nn11933257\nn11933387\nn11933546\nn11933728\nn11933903\nn11934041\nn11934239\nn11934463\nn11934616\nn11934807\nn11935027\nn11935187\nn11935330\nn11935469\nn11935627\nn11935715\nn11935794\nn11935877\nn11935953\nn11936027\nn11936113\nn11936199\nn11936287\nn11936369\nn11936448\nn11936539\nn11936624\nn11936707\nn11936782\nn11936864\nn11936946\nn11937023\nn11937102\nn11937195\nn11937278\nn11937360\nn11937446\nn11937692\nn11938556\nn11939180\nn11939491\nn11939699\nn11940006\nn11940349\nn11940599\nn11940750\nn11941094\nn11941478\nn11941924\nn11942659\nn11943133\nn11943407\nn11943660\nn11943992\nn11944196\nn11944751\nn11944954\nn11945367\nn11945514\nn11945783\nn11946051\nn11946313\nn11946727\nn11946918\nn11947251\nn11947629\nn11947802\nn11948044\nn11948264\nn11948469\nn11948864\nn11949015\nn11949402\nn11949857\nn11950345\nn11950686\nn11950877\nn11951052\nn11951511\nn11951820\nn11952346\nn11952541\nn11953038\nn11953339\nn11953610\nn11953884\nn11954161\nn11954345\nn11954484\nn11954642\nn11954798\nn11955040\nn11955153\nn11955532\nn11955896\nn11956348\nn11956850\nn11957317\nn11957514\nn11957678\nn11958080\nn11958499\nn11958888\nn11959259\nn11959632\nn11959862\nn11960245\nn11960673\nn11961100\nn11961446\nn11961871\nn11962272\nn11962667\nn11962994\nn11963572\nn11963932\nn11964446\nn11964848\nn11965218\nn11965627\nn11965962\nn11966083\nn11966215\nn11966385\nn11966617\nn11966896\nn11967142\nn11967315\nn11967744\nn11967878\nn11968519\nn11968704\nn11968931\nn11969166\nn11969607\nn11969806\nn11970101\nn11970298\nn11970586\nn11971248\nn11971406\nn11971783\nn11971927\nn11972291\nn11972759\nn11972959\nn11973341\nn11973634\nn11973749\nn11974373\nn11974557\nn11974888\nn11975254\nn11976170\nn11976314\nn11976511\nn11976933\nn11977303\nn11977660\nn11977887\nn11978233\nn11978551\nn11978713\nn11978961\nn11979187\nn11979354\nn11979527\nn11979715\nn11979964\nn11980318\nn11980682\nn11981192\nn11981475\nn11982115\nn11982545\nn11982939\nn11983375\nn11983606\nn11984144\nn11984542\nn11985053\nn11985321\nn11985739\nn11985903\nn11986511\nn11986729\nn11987126\nn11987349\nn11987511\nn11988132\nn11988596\nn11988893\nn11989087\nn11989393\nn11989869\nn11990167\nn11990313\nn11990627\nn11990920\nn11991263\nn11991549\nn11991777\nn11992479\nn11992806\nn11993203\nn11993444\nn11993675\nn11994150\nn11995092\nn11995396\nn11996251\nn11996677\nn11997032\nn11997160\nn11997969\nn11998492\nn11998888\nn11999278\nn11999656\nn12000191\nn12001294\nn12001707\nn12001924\nn12002428\nn12002651\nn12002826\nn12003167\nn12003696\nn12004120\nn12004547\nn12004987\nn12005656\nn12006306\nn12006766\nn12006930\nn12007196\nn12007406\nn12007766\nn12008252\nn12008487\nn12008749\nn12009047\nn12009420\nn12009792\nn12010628\nn12010815\nn12011370\nn12011620\nn12012111\nn12012253\nn12012510\nn12013035\nn12013511\nn12013701\nn12014085\nn12014355\nn12014923\nn12015221\nn12015525\nn12015959\nn12016434\nn12016567\nn12016777\nn12016914\nn12017127\nn12017326\nn12017511\nn12017664\nn12017853\nn12018014\nn12018100\nn12018188\nn12018271\nn12018363\nn12018447\nn12018530\nn12018760\nn12019035\nn12019827\nn12020184\nn12020507\nn12020736\nn12020941\nn12022054\nn12022382\nn12022821\nn12023108\nn12023407\nn12023726\nn12024176\nn12024445\nn12024690\nn12024805\nn12025220\nn12026018\nn12026476\nn12026981\nn12027222\nn12027658\nn12028424\nn12029039\nn12029635\nn12030092\nn12030654\nn12030908\nn12031139\nn12031388\nn12031547\nn12031927\nn12032429\nn12032686\nn12033139\nn12033504\nn12033709\nn12034141\nn12034384\nn12034594\nn12035631\nn12035907\nn12036067\nn12036226\nn12036939\nn12037499\nn12037691\nn12038038\nn12038208\nn12038406\nn12038585\nn12038760\nn12038898\nn12039317\nn12041446\nn12043444\nn12043673\nn12043836\nn12044041\nn12044467\nn12044784\nn12045157\nn12045514\nn12045860\nn12046028\nn12046428\nn12046815\nn12047345\nn12047884\nn12048056\nn12048399\nn12048928\nn12049282\nn12049562\nn12050533\nn12050959\nn12051103\nn12051514\nn12051792\nn12052267\nn12052447\nn12052787\nn12053405\nn12053690\nn12053962\nn12054195\nn12055073\nn12055516\nn12056099\nn12056217\nn12056601\nn12056758\nn12056990\nn12057211\nn12057447\nn12057660\nn12057895\nn12058192\nn12058630\nn12058822\nn12059314\nn12059625\nn12060546\nn12061104\nn12061380\nn12061614\nn12062105\nn12062468\nn12062626\nn12062781\nn12063211\nn12063639\nn12064389\nn12064591\nn12065316\nn12065649\nn12065777\nn12066018\nn12066261\nn12066451\nn12066630\nn12066821\nn12067029\nn12067193\nn12067433\nn12067672\nn12067817\nn12068138\nn12068432\nn12068615\nn12069009\nn12069217\nn12069679\nn12070016\nn12070381\nn12070583\nn12070712\nn12071259\nn12071477\nn12071744\nn12072210\nn12072722\nn12073217\nn12073554\nn12073991\nn12074408\nn12074867\nn12075010\nn12075151\nn12075299\nn12075830\nn12076223\nn12076577\nn12076852\nn12077244\nn12077944\nn12078172\nn12078451\nn12078747\nn12079120\nn12079523\nn12079963\nn12080395\nn12080588\nn12080820\nn12081215\nn12081649\nn12082131\nn12083113\nn12083591\nn12083847\nn12084158\nn12084400\nn12084555\nn12084890\nn12085267\nn12085664\nn12086012\nn12086192\nn12086539\nn12086778\nn12087961\nn12088223\nn12088327\nn12088495\nn12088909\nn12089320\nn12089496\nn12089846\nn12090890\nn12091213\nn12091377\nn12091550\nn12091697\nn12091953\nn12092262\nn12092417\nn12092629\nn12092930\nn12093329\nn12093600\nn12093885\nn12094244\nn12094401\nn12094612\nn12095020\nn12095281\nn12095412\nn12095543\nn12095647\nn12095934\nn12096089\nn12096395\nn12096563\nn12096674\nn12097396\nn12097556\nn12098403\nn12098524\nn12098827\nn12099342\nn12100187\nn12101870\nn12102133\nn12103680\nn12103894\nn12104104\nn12104238\nn12104501\nn12104734\nn12105125\nn12105353\nn12105828\nn12105981\nn12106134\nn12106323\nn12107002\nn12107191\nn12107710\nn12107970\nn12108432\nn12108613\nn12108871\nn12109365\nn12109827\nn12110085\nn12110236\nn12110352\nn12110475\nn12110778\nn12111238\nn12111627\nn12112008\nn12112337\nn12112609\nn12112918\nn12113195\nn12113323\nn12113657\nn12114010\nn12114590\nn12115180\nn12116058\nn12116429\nn12116734\nn12117017\nn12117235\nn12117326\nn12117695\nn12117912\nn12118414\nn12118661\nn12119099\nn12119238\nn12119390\nn12119539\nn12119717\nn12120347\nn12120578\nn12121033\nn12121187\nn12121610\nn12122442\nn12122725\nn12122918\nn12123648\nn12123741\nn12124172\nn12124627\nn12124818\nn12125001\nn12125183\nn12125584\nn12126084\nn12126360\nn12126736\nn12127460\nn12127575\nn12127768\nn12128071\nn12128306\nn12128490\nn12129134\nn12129738\nn12129986\nn12130549\nn12131405\nn12131550\nn12132092\nn12132956\nn12133151\nn12133462\nn12133682\nn12134025\nn12134486\nn12134695\nn12134836\nn12135049\nn12135576\nn12135729\nn12135898\nn12136392\nn12136581\nn12136720\nn12137120\nn12137569\nn12137791\nn12137954\nn12138110\nn12138248\nn12138444\nn12138578\nn12139196\nn12139575\nn12139793\nn12139921\nn12140511\nn12140759\nn12140903\nn12141167\nn12141385\nn12141495\nn12142085\nn12142357\nn12142450\nn12143065\nn12143215\nn12143405\nn12143676\nn12144313\nn12144580\nn12144987\nn12145148\nn12145477\nn12146311\nn12146488\nn12146654\nn12147226\nn12147835\nn12148757\nn12150722\nn12150969\nn12151170\nn12151615\nn12152031\nn12152251\nn12152532\nn12152722\nn12153033\nn12153224\nn12153580\nn12153741\nn12153914\nn12154114\nn12154773\nn12155009\nn12155583\nn12155773\nn12156679\nn12156819\nn12157056\nn12157179\nn12157769\nn12158031\nn12158443\nn12158798\nn12159055\nn12159388\nn12159555\nn12159804\nn12159942\nn12160125\nn12160303\nn12160490\nn12160857\nn12161056\nn12161285\nn12161577\nn12161744\nn12161969\nn12162181\nn12162425\nn12162758\nn12163035\nn12163279\nn12164363\nn12164656\nn12164881\nn12165170\nn12165384\nn12165758\nn12166128\nn12166424\nn12166793\nn12166929\nn12167075\nn12167436\nn12167602\nn12168565\nn12169099\nn12170585\nn12171098\nn12171316\nn12171966\nn12172364\nn12172481\nn12172906\nn12173069\nn12173664\nn12173912\nn12174311\nn12174521\nn12174926\nn12175181\nn12175370\nn12175598\nn12176453\nn12176709\nn12176953\nn12177129\nn12177455\nn12178129\nn12178780\nn12178896\nn12179122\nn12179632\nn12180168\nn12180456\nn12180885\nn12181352\nn12181612\nn12182049\nn12182276\nn12183026\nn12183452\nn12183816\nn12184095\nn12184468\nn12184912\nn12185254\nn12185859\nn12186352\nn12186554\nn12186839\nn12187247\nn12187663\nn12187891\nn12188289\nn12188635\nn12189429\nn12189779\nn12189987\nn12190410\nn12190869\nn12191240\nn12192132\nn12192877\nn12193334\nn12193665\nn12194147\nn12194613\nn12195391\nn12195533\nn12195734\nn12196129\nn12196336\nn12196527\nn12196694\nn12196954\nn12197359\nn12197601\nn12198286\nn12198793\nn12199266\nn12199399\nn12199790\nn12199982\nn12200143\nn12200504\nn12200905\nn12201331\nn12201580\nn12201938\nn12202936\nn12203529\nn12203699\nn12203896\nn12204032\nn12204175\nn12204730\nn12205460\nn12205694\nn12214789\nn12215022\nn12215210\nn12215579\nn12215824\nn12216215\nn12216628\nn12216968\nn12217453\nn12217851\nn12218274\nn12218490\nn12218868\nn12219668\nn12220019\nn12220496\nn12220829\nn12221191\nn12221368\nn12221522\nn12221801\nn12222090\nn12222493\nn12222900\nn12223160\nn12223569\nn12223764\nn12224978\nn12225222\nn12225349\nn12225563\nn12226932\nn12227658\nn12227909\nn12228229\nn12228387\nn12228689\nn12228886\nn12229111\nn12229651\nn12229887\nn12230540\nn12230794\nn12231192\nn12231709\nn12232114\nn12232280\nn12232851\nn12233249\nn12234318\nn12234669\nn12235051\nn12235479\nn12236160\nn12236546\nn12236768\nn12236977\nn12237152\nn12237486\nn12237641\nn12237855\nn12238756\nn12238913\nn12239240\nn12239647\nn12239880\nn12240150\nn12240477\nn12240965\nn12241192\nn12241426\nn12241880\nn12242123\nn12242409\nn12242850\nn12243109\nn12243693\nn12244153\nn12244458\nn12244650\nn12244819\nn12245319\nn12245695\nn12245885\nn12246037\nn12246232\nn12246773\nn12246941\nn12247202\nn12247407\nn12247963\nn12248141\nn12248359\nn12248574\nn12248780\nn12248941\nn12249122\nn12249294\nn12249542\nn12251001\nn12251278\nn12251740\nn12252168\nn12252383\nn12252866\nn12253229\nn12253487\nn12253664\nn12253835\nn12254168\nn12255225\nn12256112\nn12256325\nn12256522\nn12256708\nn12256920\nn12257570\nn12257725\nn12258101\nn12258885\nn12259316\nn12260799\nn12261359\nn12261571\nn12261808\nn12262018\nn12262185\nn12262553\nn12263038\nn12263204\nn12263410\nn12263588\nn12263738\nn12263987\nn12264512\nn12264786\nn12265083\nn12265394\nn12265600\nn12266217\nn12266528\nn12266644\nn12266796\nn12266984\nn12267133\nn12267265\nn12267411\nn12267534\nn12267677\nn12267931\nn12268246\nn12269241\nn12269406\nn12269652\nn12270027\nn12270278\nn12270460\nn12270741\nn12270946\nn12271187\nn12271451\nn12271643\nn12271933\nn12272239\nn12272432\nn12272735\nn12272883\nn12273114\nn12273344\nn12273515\nn12273768\nn12273939\nn12274151\nn12274358\nn12274630\nn12274863\nn12275131\nn12275317\nn12275489\nn12275675\nn12275888\nn12276110\nn12276314\nn12276477\nn12276628\nn12276872\nn12277150\nn12277334\nn12277578\nn12277800\nn12278107\nn12278371\nn12278650\nn12278865\nn12279060\nn12279293\nn12279458\nn12279772\nn12280060\nn12280364\nn12281241\nn12281788\nn12281974\nn12282235\nn12282527\nn12282737\nn12282933\nn12283147\nn12283395\nn12283542\nn12283790\nn12284262\nn12284821\nn12285049\nn12285195\nn12285369\nn12285512\nn12285705\nn12285900\nn12286068\nn12286197\nn12286826\nn12286988\nn12287195\nn12287642\nn12287836\nn12288005\nn12288823\nn12289310\nn12289433\nn12289585\nn12290748\nn12290975\nn12291143\nn12291459\nn12291671\nn12291959\nn12292463\nn12292877\nn12293723\nn12294124\nn12294331\nn12294542\nn12294723\nn12294871\nn12295033\nn12295237\nn12295429\nn12295796\nn12296045\nn12296432\nn12296735\nn12296929\nn12297110\nn12297280\nn12297507\nn12297846\nn12298165\nn12299640\nn12300840\nn12301180\nn12301445\nn12301613\nn12301766\nn12302071\nn12302248\nn12302565\nn12303083\nn12303462\nn12304115\nn12304286\nn12304420\nn12304703\nn12304899\nn12305089\nn12305293\nn12305475\nn12305654\nn12305819\nn12305986\nn12306089\nn12306270\nn12306717\nn12306938\nn12307076\nn12307240\nn12307756\nn12308112\nn12308447\nn12308907\nn12309277\nn12309630\nn12310021\nn12310349\nn12310638\nn12311045\nn12311224\nn12311413\nn12311579\nn12312110\nn12312728\nn12315060\nn12315245\nn12315598\nn12315999\nn12316444\nn12316572\nn12317296\nn12318378\nn12318782\nn12318965\nn12319204\nn12319414\nn12320010\nn12320414\nn12320627\nn12320806\nn12321077\nn12321395\nn12321669\nn12321873\nn12322099\nn12322501\nn12322699\nn12323665\nn12324056\nn12324222\nn12324388\nn12324558\nn12324906\nn12325234\nn12325787\nn12327022\nn12327528\nn12327846\nn12328398\nn12328567\nn12328801\nn12329260\nn12329473\nn12330239\nn12330469\nn12330587\nn12330891\nn12331066\nn12331263\nn12331655\nn12331788\nn12332030\nn12332218\nn12332555\nn12333053\nn12333530\nn12333771\nn12333961\nn12334153\nn12334293\nn12334891\nn12335483\nn12335664\nn12335800\nn12335937\nn12336092\nn12336224\nn12336333\nn12336586\nn12336727\nn12336973\nn12337131\nn12337246\nn12337391\nn12337617\nn12337800\nn12337922\nn12338034\nn12338146\nn12338258\nn12338454\nn12338655\nn12338796\nn12338979\nn12339526\nn12339831\nn12340383\nn12340581\nn12340755\nn12341542\nn12341931\nn12342299\nn12342498\nn12342852\nn12343480\nn12343753\nn12344283\nn12344483\nn12344700\nn12344837\nn12345280\nn12345899\nn12346578\nn12346813\nn12346986\nn12347158\nn12349315\nn12349711\nn12350032\nn12350758\nn12351091\nn12351790\nn12352287\nn12352639\nn12352844\nn12352990\nn12353203\nn12353431\nn12353754\nn12355760\nn12356023\nn12356395\nn12356960\nn12357485\nn12357968\nn12358293\nn12360108\nn12360534\nn12360684\nn12360817\nn12360958\nn12361135\nn12361560\nn12361754\nn12361946\nn12362274\nn12362514\nn12362668\nn12363301\nn12363768\nn12364604\nn12364940\nn12365158\nn12365285\nn12365462\nn12365900\nn12366053\nn12366186\nn12366313\nn12366675\nn12366870\nn12367611\nn12368028\nn12368257\nn12368451\nn12369066\nn12369309\nn12369476\nn12369665\nn12369845\nn12370174\nn12370549\nn12371202\nn12371439\nn12371704\nn12372233\nn12373100\nn12373739\nn12374418\nn12374705\nn12374862\nn12375769\nn12377198\nn12377494\nn12378249\nn12378753\nn12378963\nn12379531\nn12380761\nn12381511\nn12382233\nn12382875\nn12383737\nn12383894\nn12384037\nn12384227\nn12384375\nn12384569\nn12384680\nn12384839\nn12385429\nn12385566\nn12385830\nn12386945\nn12387103\nn12387633\nn12387839\nn12388143\nn12388293\nn12388858\nn12388989\nn12389130\nn12389501\nn12389727\nn12389932\nn12390099\nn12390314\nn12392070\nn12392549\nn12392765\nn12393269\nn12394118\nn12394328\nn12394638\nn12395068\nn12395289\nn12395463\nn12395906\nn12396091\nn12396924\nn12397431\nn12399132\nn12399384\nn12399534\nn12399656\nn12399899\nn12400489\nn12400720\nn12400924\nn12401335\nn12401684\nn12401893\nn12402051\nn12402348\nn12402596\nn12402840\nn12403075\nn12403276\nn12403513\nn12403994\nn12404729\nn12405714\nn12406304\nn12406488\nn12406715\nn12406902\nn12407079\nn12407222\nn12407396\nn12407545\nn12407715\nn12407890\nn12408077\nn12408280\nn12408466\nn12408717\nn12408873\nn12409231\nn12409470\nn12409651\nn12409840\nn12411461\nn12412355\nn12412606\nn12412987\nn12413165\nn12413301\nn12413419\nn12413642\nn12413880\nn12414035\nn12414159\nn12414329\nn12414449\nn12414818\nn12414932\nn12415595\nn12416073\nn12416423\nn12416703\nn12417836\nn12418221\nn12418507\nn12419037\nn12419878\nn12420124\nn12420535\nn12420722\nn12421137\nn12421467\nn12421683\nn12421917\nn12422129\nn12422559\nn12425281\nn12426623\nn12426749\nn12427184\nn12427391\nn12427566\nn12427757\nn12427946\nn12428076\nn12428242\nn12428412\nn12428747\nn12429352\nn12430198\nn12430471\nn12430675\nn12431434\nn12432069\nn12432356\nn12432574\nn12432707\nn12433081\nn12433178\nn12433769\nn12433952\nn12434106\nn12434483\nn12434634\nn12434775\nn12434985\nn12435152\nn12435486\nn12435649\nn12435777\nn12435965\nn12436090\nn12436907\nn12437513\nn12437769\nn12437930\nn12439154\nn12439830\nn12441183\nn12441390\nn12441552\nn12441958\nn12442548\nn12443323\nn12443736\nn12444095\nn12444898\nn12446200\nn12446519\nn12446737\nn12446908\nn12447121\nn12447346\nn12447581\nn12447891\nn12448136\nn12448361\nn12448700\nn12449296\nn12449526\nn12449784\nn12449934\nn12450344\nn12450607\nn12450840\nn12451070\nn12451240\nn12451399\nn12451566\nn12451915\nn12452256\nn12452480\nn12452673\nn12452836\nn12453018\nn12453186\nn12453714\nn12453857\nn12454159\nn12454436\nn12454556\nn12454705\nn12454793\nn12454949\nn12455950\nn12457091\nn12458550\nn12458713\nn12458874\nn12459629\nn12460146\nn12460697\nn12460957\nn12461109\nn12461466\nn12461673\nn12462032\nn12462221\nn12462582\nn12462805\nn12463134\nn12463743\nn12463975\nn12464128\nn12464476\nn12464649\nn12465557\nn12466727\nn12467018\nn12467197\nn12467433\nn12467592\nn12468545\nn12468719\nn12469517\nn12470092\nn12470512\nn12470907\nn12472024\nn12473608\nn12473840\nn12474167\nn12474418\nn12475035\nn12475242\nn12475774\nn12476510\nn12477163\nn12477401\nn12477583\nn12477747\nn12477983\nn12478768\nn12479537\nn12480456\nn12480895\nn12481150\nn12481289\nn12481458\nn12482437\nn12482668\nn12482893\nn12483282\nn12483427\nn12483625\nn12483841\nn12484244\nn12484784\nn12485653\nn12485981\nn12486574\nn12487058\nn12488454\nn12488709\nn12489046\nn12489676\nn12489815\nn12490490\nn12491017\nn12491435\nn12491826\nn12492106\nn12492460\nn12492682\nn12492900\nn12493208\nn12493426\nn12493868\nn12494794\nn12495146\nn12495670\nn12495895\nn12496427\nn12496949\nn12497669\nn12498055\nn12498457\nn12499163\nn12499757\nn12499979\nn12500309\nn12500518\nn12500751\nn12501202\nn12504570\nn12504783\nn12505253\nn12506181\nn12506341\nn12506991\nn12507379\nn12507823\nn12508309\nn12508618\nn12508762\nn12509109\nn12509476\nn12509665\nn12509821\nn12509993\nn12510343\nn12510774\nn12511488\nn12511856\nn12512095\nn12512294\nn12512674\nn12513172\nn12513613\nn12513933\nn12514138\nn12514592\nn12514992\nn12515393\nn12515711\nn12515925\nn12516165\nn12516584\nn12516828\nn12517077\nn12517445\nn12517642\nn12518013\nn12518481\nn12519089\nn12519563\nn12520406\nn12521186\nn12521394\nn12522188\nn12522678\nn12522894\nn12523141\nn12523475\nn12523850\nn12524188\nn12525168\nn12525513\nn12525753\nn12526178\nn12526516\nn12526754\nn12527081\nn12527738\nn12528109\nn12528382\nn12528549\nn12528768\nn12528974\nn12529220\nn12529500\nn12529905\nn12530629\nn12530818\nn12531328\nn12531727\nn12532564\nn12532886\nn12533190\nn12533437\nn12534208\nn12534625\nn12534862\nn12536291\nn12537253\nn12537569\nn12538209\nn12539074\nn12539306\nn12539832\nn12540250\nn12540647\nn12540966\nn12541157\nn12541403\nn12542043\nn12542240\nn12543186\nn12543455\nn12543639\nn12543826\nn12544240\nn12544539\nn12545232\nn12545635\nn12545865\nn12546183\nn12546420\nn12546617\nn12546962\nn12547215\nn12547503\nn12548280\nn12548564\nn12548804\nn12549005\nn12549192\nn12549420\nn12549799\nn12550210\nn12550408\nn12551173\nn12551457\nn12552309\nn12552893\nn12553742\nn12554029\nn12554526\nn12554729\nn12554911\nn12555255\nn12555859\nn12556656\nn12557064\nn12557438\nn12557556\nn12557681\nn12558230\nn12558425\nn12558680\nn12559044\nn12559518\nn12560282\nn12560621\nn12560775\nn12561169\nn12561309\nn12561594\nn12562141\nn12562577\nn12562785\nn12563045\nn12563702\nn12564083\nn12564613\nn12565102\nn12565912\nn12566331\nn12566954\nn12567950\nn12568186\nn12568649\nn12569037\nn12569616\nn12569851\nn12570394\nn12570703\nn12570972\nn12571781\nn12572546\nn12572759\nn12572858\nn12573256\nn12573474\nn12573647\nn12573911\nn12574320\nn12574470\nn12574866\nn12575322\nn12575812\nn12576323\nn12576451\nn12576695\nn12577362\nn12577895\nn12578255\nn12578626\nn12578916\nn12579038\nn12579404\nn12579822\nn12580012\nn12580654\nn12580786\nn12580896\nn12581110\nn12582231\nn12582665\nn12582846\nn12583126\nn12583401\nn12583681\nn12583855\nn12584191\nn12584365\nn12584715\nn12585137\nn12585373\nn12585629\nn12586298\nn12586499\nn12586725\nn12586989\nn12587132\nn12587487\nn12587803\nn12588320\nn12588780\nn12589142\nn12589458\nn12589687\nn12589841\nn12590232\nn12590499\nn12590600\nn12590715\nn12591017\nn12591351\nn12591702\nn12592058\nn12592544\nn12592839\nn12593122\nn12593341\nn12593994\nn12594324\nn12594989\nn12595699\nn12595964\nn12596148\nn12596345\nn12596709\nn12596849\nn12597134\nn12597466\nn12597798\nn12598027\nn12599185\nn12599435\nn12599661\nn12599874\nn12600095\nn12600267\nn12601494\nn12601805\nn12602262\nn12602434\nn12602612\nn12602980\nn12603273\nn12603449\nn12603672\nn12604228\nn12604460\nn12604639\nn12604845\nn12605683\nn12606438\nn12606545\nn12607456\nn12609379\nn12610328\nn12610740\nn12611640\nn12612170\nn12612811\nn12613706\nn12614096\nn12614477\nn12614625\nn12615232\nn12615710\nn12616248\nn12616630\nn12616996\nn12617559\nn12618146\nn12618727\nn12620196\nn12620546\nn12620969\nn12621410\nn12621619\nn12621945\nn12622297\nn12622875\nn12623077\nn12623211\nn12623818\nn12624381\nn12624568\nn12625003\nn12625383\nn12625670\nn12625823\nn12626674\nn12626878\nn12627119\nn12627347\nn12627526\nn12628356\nn12628705\nn12628986\nn12629305\nn12629666\nn12630763\nn12630999\nn12631331\nn12631637\nn12631932\nn12632335\nn12632733\nn12633061\nn12633638\nn12633994\nn12634211\nn12634429\nn12634734\nn12634986\nn12635151\nn12635359\nn12635532\nn12635744\nn12635955\nn12636224\nn12636885\nn12637123\nn12637485\nn12638218\nn12638556\nn12638753\nn12638964\nn12639168\nn12639376\nn12639584\nn12639736\nn12639910\nn12640081\nn12640284\nn12640435\nn12640607\nn12640839\nn12641007\nn12641180\nn12641413\nn12641931\nn12642090\nn12642200\nn12642435\nn12642600\nn12642964\nn12643113\nn12643313\nn12643473\nn12643688\nn12643877\nn12644283\nn12644902\nn12645174\nn12645530\nn12646072\nn12646197\nn12646397\nn12646605\nn12646740\nn12646950\nn12647231\nn12647376\nn12647560\nn12647787\nn12647893\nn12648045\nn12648196\nn12648424\nn12648693\nn12648888\nn12649065\nn12649317\nn12649539\nn12649866\nn12650038\nn12650229\nn12650379\nn12650556\nn12650805\nn12650915\nn12651229\nn12651611\nn12651821\nn12653218\nn12653436\nn12653633\nn12654227\nn12654857\nn12655062\nn12655245\nn12655351\nn12655498\nn12655605\nn12655726\nn12655869\nn12656369\nn12656528\nn12656685\nn12656909\nn12657082\nn12657755\nn12658118\nn12658308\nn12658481\nn12658603\nn12658715\nn12658846\nn12659064\nn12659356\nn12659539\nn12660601\nn12661045\nn12661227\nn12661538\nn12662074\nn12662379\nn12662772\nn12663023\nn12663254\nn12663359\nn12663804\nn12664005\nn12664187\nn12664469\nn12664710\nn12665048\nn12665271\nn12665659\nn12665857\nn12666050\nn12666159\nn12666369\nn12666965\nn12667406\nn12667582\nn12667964\nn12668131\nn12669803\nn12670334\nn12670758\nn12670962\nn12671651\nn12672289\nn12673588\nn12674120\nn12674685\nn12674895\nn12675299\nn12675515\nn12675876\nn12676134\nn12676370\nn12676534\nn12676703\nn12677120\nn12677331\nn12677612\nn12677841\nn12678794\nn12679023\nn12679432\nn12679593\nn12679876\nn12680402\nn12680652\nn12680864\nn12681376\nn12681579\nn12681893\nn12682411\nn12682668\nn12682882\nn12683096\nn12683407\nn12683571\nn12683791\nn12684379\nn12685431\nn12685831\nn12686077\nn12686274\nn12686496\nn12686676\nn12686877\nn12687044\nn12687462\nn12687698\nn12687957\nn12688187\nn12688372\nn12688716\nn12689305\nn12690653\nn12691428\nn12691661\nn12692024\nn12692160\nn12692521\nn12692714\nn12693244\nn12693352\nn12693865\nn12694486\nn12695144\nn12695975\nn12696492\nn12696830\nn12697152\nn12697514\nn12698027\nn12698435\nn12698598\nn12698774\nn12699031\nn12699301\nn12699922\nn12700088\nn12700357\nn12702124\nn12703190\nn12703383\nn12703557\nn12703716\nn12703856\nn12704041\nn12704343\nn12704513\nn12705013\nn12705220\nn12705458\nn12705698\nn12705978\nn12706410\nn12707199\nn12707781\nn12708293\nn12708654\nn12708941\nn12709103\nn12709349\nn12709688\nn12709901\nn12710295\nn12710415\nn12710577\nn12710693\nn12710917\nn12711182\nn12711398\nn12711596\nn12711817\nn12711984\nn12712320\nn12712626\nn12713063\nn12713358\nn12713521\nn12713866\nn12714254\nn12714755\nn12714949\nn12715195\nn12715914\nn12716400\nn12716594\nn12717072\nn12717224\nn12717644\nn12718074\nn12718483\nn12718995\nn12719684\nn12719944\nn12720200\nn12720354\nn12721122\nn12721477\nn12722071\nn12723062\nn12723610\nn12724942\nn12725521\nn12725738\nn12725940\nn12726159\nn12726357\nn12726528\nn12726670\nn12726902\nn12727101\nn12727301\nn12727518\nn12727729\nn12727960\nn12728164\nn12728322\nn12728508\nn12728656\nn12728864\nn12729023\nn12729164\nn12729315\nn12729521\nn12729729\nn12729950\nn12730143\nn12730370\nn12730544\nn12730776\nn12731029\nn12731401\nn12731835\nn12732009\nn12732252\nn12732491\nn12732605\nn12732756\nn12732966\nn12733218\nn12733428\nn12733647\nn12733870\nn12734070\nn12734215\nn12735160\nn12736603\nn12736999\nn12737383\nn12737898\nn12738259\nn12739332\nn12739966\nn12740967\nn12741222\nn12741586\nn12741792\nn12742290\nn12742741\nn12742878\nn12743009\nn12743352\nn12743823\nn12743976\nn12744142\nn12744387\nn12744850\nn12745386\nn12745564\nn12746884\nn12747120\nn12748248\nn12749049\nn12749456\nn12749679\nn12749852\nn12750076\nn12750767\nn12751172\nn12751675\nn12752205\nn12753007\nn12753245\nn12753573\nn12753762\nn12754003\nn12754174\nn12754311\nn12754468\nn12754648\nn12754781\nn12754981\nn12755225\nn12755387\nn12755559\nn12755727\nn12755876\nn12756457\nn12757115\nn12757303\nn12757458\nn12757668\nn12757816\nn12757930\nn12758014\nn12758099\nn12758176\nn12758250\nn12758325\nn12758399\nn12758471\nn12758555\nn12759273\nn12759668\nn12760539\nn12760875\nn12761284\nn12761702\nn12761905\nn12762049\nn12762405\nn12762896\nn12763529\nn12764008\nn12764202\nn12764507\nn12764978\nn12765115\nn12765402\nn12765846\nn12766043\nn12766595\nn12766869\nn12767208\nn12767423\nn12767648\nn12768369\nn12768682\nn12768809\nn12768933\nn12769065\nn12769219\nn12769318\nn12770529\nn12770892\nn12771085\nn12771192\nn12771390\nn12771597\nn12771890\nn12772753\nn12772908\nn12773142\nn12773651\nn12773917\nn12774299\nn12774641\nn12775070\nn12775393\nn12775717\nn12775919\nn12776558\nn12776774\nn12777436\nn12777680\nn12777778\nn12777892\nn12778398\nn12778605\nn12779603\nn12779851\nn12780325\nn12780563\nn12781940\nn12782530\nn12782915\nn12783316\nn12783730\nn12784371\nn12784889\nn12785724\nn12785889\nn12786273\nn12786464\nn12786836\nn12787364\nn12788854\nn12789054\nn12789554\nn12789977\nn12790430\nn12791064\nn12791329\nn12793015\nn12793284\nn12793494\nn12793695\nn12793886\nn12794135\nn12794367\nn12794568\nn12794985\nn12795209\nn12795352\nn12795555\nn12796022\nn12796385\nn12796849\nn12797368\nn12797860\nn12798284\nn12798910\nn12799269\nn12799776\nn12800049\nn12800586\nn12801072\nn12801520\nn12801781\nn12801966\nn12803226\nn12803754\nn12803958\nn12804352\nn12805146\nn12805561\nn12805762\nn12806015\nn12806732\nn12807251\nn12807409\nn12807624\nn12807773\nn12808007\nn12809868\nn12810007\nn12810151\nn12810595\nn12811027\nn12811713\nn12812235\nn12812478\nn12812801\nn12813189\nn12814643\nn12814857\nn12814960\nn12815198\nn12815668\nn12815838\nn12816508\nn12816942\nn12817464\nn12817694\nn12817855\nn12818004\nn12818346\nn12818601\nn12818966\nn12819141\nn12819354\nn12819728\nn12820113\nn12820669\nn12820853\nn12821505\nn12821895\nn12822115\nn12822466\nn12822769\nn12822955\nn12823717\nn12823859\nn12824053\nn12824289\nn12824735\nn12825497\nn12826143\nn12827270\nn12827537\nn12827907\nn12828220\nn12828379\nn12828520\nn12828791\nn12828977\nn12829582\nn12829975\nn12830222\nn12830568\nn12831141\nn12831535\nn12831932\nn12832315\nn12832538\nn12832822\nn12833149\nn12833985\nn12834190\nn12834798\nn12834938\nn12835331\nn12835766\nn12836212\nn12836337\nn12836508\nn12836862\nn12837052\nn12837259\nn12837466\nn12837803\nn12839574\nn12839979\nn12840168\nn12840362\nn12840502\nn12840749\nn12841007\nn12841193\nn12841354\nn12842302\nn12842519\nn12842642\nn12842887\nn12843144\nn12843316\nn12843557\nn12843970\nn12844409\nn12844939\nn12845187\nn12845413\nn12845908\nn12846335\nn12846690\nn12847008\nn12847374\nn12847927\nn12848499\nn12849061\nn12849279\nn12849416\nn12849952\nn12850168\nn12850336\nn12850906\nn12851094\nn12851469\nn12851860\nn12852234\nn12852428\nn12852570\nn12853080\nn12853287\nn12853482\nn12854048\nn12854193\nn12854600\nn12855365\nn12855494\nn12855710\nn12855886\nn12856091\nn12856287\nn12856479\nn12856680\nn12857204\nn12857779\nn12858150\nn12858397\nn12858618\nn12858871\nn12858987\nn12859153\nn12859272\nn12859679\nn12859986\nn12860365\nn12860978\nn12861345\nn12861541\nn12861892\nn12862512\nn12862828\nn12863234\nn12863624\nn12864160\nn12865037\nn12865562\nn12865708\nn12865824\nn12866002\nn12866162\nn12866333\nn12866459\nn12866635\nn12866968\nn12867184\nn12867449\nn12867826\nn12868019\nn12868880\nn12869061\nn12869478\nn12869668\nn12870048\nn12870225\nn12870535\nn12870682\nn12870891\nn12871272\nn12871696\nn12871859\nn12872458\nn12872914\nn12873341\nn12873984\nn12875269\nn12875697\nn12875861\nn12876899\nn12877244\nn12877493\nn12877637\nn12877838\nn12878169\nn12878325\nn12878784\nn12879068\nn12879527\nn12879963\nn12880244\nn12880462\nn12880638\nn12880799\nn12881105\nn12881913\nn12882158\nn12882779\nn12882945\nn12883265\nn12883628\nn12884100\nn12884260\nn12885045\nn12885265\nn12885510\nn12885754\nn12886185\nn12886402\nn12886600\nn12886831\nn12887293\nn12887532\nn12887713\nn12888016\nn12888234\nn12888457\nn12889219\nn12889412\nn12889579\nn12889713\nn12890265\nn12890490\nn12890685\nn12890928\nn12891093\nn12891305\nn12891469\nn12891643\nn12891824\nn12892013\nn12893463\nn12893993\nn12895298\nn12895811\nn12896615\nn12897118\nn12897788\nn12897999\nn12898342\nn12898774\nn12899166\nn12899537\nn12899752\nn12899971\nn12900783\nn12901724\nn12902466\nn12902662\nn12903014\nn12903367\nn12903503\nn12903964\nn12904314\nn12904562\nn12904938\nn12905135\nn12905412\nn12906214\nn12906498\nn12906771\nn12907057\nn12907671\nn12907857\nn12908093\nn12908645\nn12908854\nn12909421\nn12909614\nn12909759\nn12909917\nn12911079\nn12911264\nn12911440\nn12911673\nn12911914\nn12912274\nn12912670\nn12912801\nn12913144\nn12913524\nn12913791\nn12914923\nn12915140\nn12915568\nn12915811\nn12916179\nn12916511\nn12917901\nn12918609\nn12918810\nn12918991\nn12919195\nn12919403\nn12919646\nn12919847\nn12920043\nn12920204\nn12920521\nn12920719\nn12920955\nn12921315\nn12921499\nn12921660\nn12921868\nn12922119\nn12922458\nn12922763\nn12923108\nn12923257\nn12924623\nn12925179\nn12925583\nn12926039\nn12926480\nn12926689\nn12927013\nn12927194\nn12927494\nn12927758\nn12928071\nn12928307\nn12928491\nn12928819\nn12929403\nn12929600\nn12930778\nn12930951\nn12931231\nn12931542\nn12931906\nn12932173\nn12932365\nn12932706\nn12932966\nn12933274\nn12934036\nn12934174\nn12934479\nn12934685\nn12934985\nn12935166\nn12935609\nn12936155\nn12936826\nn12937130\nn12938081\nn12938193\nn12938445\nn12938667\nn12939104\nn12939282\nn12939479\nn12939874\nn12940226\nn12940609\nn12941220\nn12941536\nn12941717\nn12942025\nn12942395\nn12942572\nn12942729\nn12943049\nn12943443\nn12943912\nn12944095\nn12945177\nn12945366\nn12945549\nn12946849\nn12947313\nn12947544\nn12947756\nn12947895\nn12948053\nn12948251\nn12948495\nn12949160\nn12949361\nn12950126\nn12950314\nn12950796\nn12951146\nn12951835\nn12952165\nn12952469\nn12952590\nn12952717\nn12953206\nn12953484\nn12953712\nn12954353\nn12954799\nn12955414\nn12955840\nn12956170\nn12956367\nn12956588\nn12956922\nn12957608\nn12957803\nn12957924\nn12958261\nn12958615\nn12959074\nn12959538\nn12960378\nn12960552\nn12960863\nn12961242\nn12961393\nn12961536\nn12961879\nn12963628\nn12964920\nn12965626\nn12965951\nn12966804\nn12966945\nn12968136\nn12968309\nn12969131\nn12969425\nn12969670\nn12969927\nn12970193\nn12970293\nn12970733\nn12971400\nn12971804\nn12972136\nn12973443\nn12973791\nn12973937\nn12974987\nn12975804\nn12976198\nn12976554\nn12978076\nn12979316\nn12979829\nn12980080\nn12980840\nn12981086\nn12981301\nn12981443\nn12981954\nn12982468\nn12982590\nn12982915\nn12983048\nn12983654\nn12983873\nn12983961\nn12984267\nn12984489\nn12984595\nn12985420\nn12985773\nn12985857\nn12986227\nn12987056\nn12987423\nn12987535\nn12988158\nn12988341\nn12988572\nn12989007\nn12989938\nn12990597\nn12991184\nn12991837\nn12992177\nn12992868\nn12994892\nn12995601\nn12997654\nn12997919\nn12998815\nn13000891\nn13001041\nn13001206\nn13001366\nn13001529\nn13001930\nn13002209\nn13002750\nn13002925\nn13003061\nn13003254\nn13003522\nn13003712\nn13004423\nn13004640\nn13004826\nn13004992\nn13005329\nn13005984\nn13006171\nn13006631\nn13006894\nn13007034\nn13007417\nn13007629\nn13008157\nn13008315\nn13008485\nn13008689\nn13008839\nn13009085\nn13009244\nn13009429\nn13009656\nn13010694\nn13010951\nn13011221\nn13011595\nn13012253\nn13012469\nn13012973\nn13013534\nn13013764\nn13013965\nn13014097\nn13014265\nn13014409\nn13014581\nn13014741\nn13014879\nn13015509\nn13015688\nn13016076\nn13016289\nn13017102\nn13017240\nn13017439\nn13017610\nn13017789\nn13017979\nn13018088\nn13018232\nn13018407\nn13018906\nn13019496\nn13019643\nn13019835\nn13020191\nn13020481\nn13020964\nn13021166\nn13021332\nn13021543\nn13021689\nn13021867\nn13022210\nn13022709\nn13022903\nn13023134\nn13024012\nn13024500\nn13024653\nn13025647\nn13025854\nn13026015\nn13027557\nn13027879\nn13028611\nn13028937\nn13029122\nn13029326\nn13029610\nn13029760\nn13030337\nn13030616\nn13030852\nn13031193\nn13031323\nn13031474\nn13032115\nn13032381\nn13032618\nn13032923\nn13033134\nn13033396\nn13033577\nn13033879\nn13034062\nn13034555\nn13034788\nn13035241\nn13035389\nn13035707\nn13035925\nn13036116\nn13036312\nn13036804\nn13037406\nn13037585\nn13037805\nn13038068\nn13038376\nn13038577\nn13038744\nn13039349\nn13040303\nn13040629\nn13040796\nn13041312\nn13041943\nn13042134\nn13042316\nn13042982\nn13043926\nn13044375\nn13044778\nn13045210\nn13045594\nn13045975\nn13046130\nn13046669\nn13047862\nn13048447\nn13049953\nn13050397\nn13050705\nn13050940\nn13051346\nn13052014\nn13052248\nn13052670\nn13052931\nn13053608\nn13054073\nn13054560\nn13055423\nn13055577\nn13055792\nn13055949\nn13056135\nn13056349\nn13056607\nn13056799\nn13057054\nn13057242\nn13057422\nn13057639\nn13058037\nn13058272\nn13058608\nn13059298\nn13059657\nn13060017\nn13060190\nn13061172\nn13061348\nn13061471\nn13061704\nn13062421\nn13063269\nn13063514\nn13064111\nn13064457\nn13065089\nn13065514\nn13066129\nn13066448\nn13066979\nn13067191\nn13067330\nn13067532\nn13067672\nn13068255\nn13068434\nn13068735\nn13068917\nn13069224\nn13069773\nn13070308\nn13070875\nn13071371\nn13071553\nn13071815\nn13072031\nn13072209\nn13072350\nn13072528\nn13072706\nn13072863\nn13073055\nn13073703\nn13074619\nn13074814\nn13075020\nn13075272\nn13075441\nn13075684\nn13075847\nn13076041\nn13076405\nn13076643\nn13076831\nn13077033\nn13077295\nn13078021\nn13079073\nn13079419\nn13079567\nn13080306\nn13080866\nn13081229\nn13081999\nn13082568\nn13083023\nn13083461\nn13084184\nn13084834\nn13085113\nn13085747\nn13090018\nn13090871\nn13091620\nn13091774\nn13091982\nn13092078\nn13092240\nn13092385\nn13092987\nn13093275\nn13093629\nn13094145\nn13094273\nn13095013\nn13096779\nn13098515\nn13098962\nn13099833\nn13099999\nn13100156\nn13100677\nn13102648\nn13102775\nn13103023\nn13103660\nn13103750\nn13103877\nn13104059\nn13107694\nn13107807\nn13107891\nn13108131\nn13108323\nn13108481\nn13108545\nn13108662\nn13108841\nn13109733\nn13110915\nn13111174\nn13111340\nn13111504\nn13111881\nn13112035\nn13112201\nn13118330\nn13118707\nn13119870\nn13120211\nn13120958\nn13121104\nn13121349\nn13122364\nn13123309\nn13123431\nn13123841\nn13124358\nn13124654\nn13125117\nn13126050\nn13126856\nn13127001\nn13127303\nn13127666\nn13127843\nn13128278\nn13128582\nn13128976\nn13129078\nn13130014\nn13130161\nn13130726\nn13131028\nn13131618\nn13132034\nn13132156\nn13132338\nn13132486\nn13132656\nn13132756\nn13132940\nn13133140\nn13133233\nn13133316\nn13133613\nn13133932\nn13134302\nn13134531\nn13134844\nn13134947\nn13135692\nn13135832\nn13136316\nn13136556\nn13136781\nn13137010\nn13137225\nn13137409\nn13137672\nn13137951\nn13138155\nn13138308\nn13138658\nn13138842\nn13139055\nn13139321\nn13139482\nn13139647\nn13139837\nn13140049\nn13140367\nn13141141\nn13141415\nn13141564\nn13141797\nn13141972\nn13142182\nn13142504\nn13142907\nn13143285\nn13143758\nn13144084\nn13145040\nn13145250\nn13145444\nn13146403\nn13146583\nn13146928\nn13147153\nn13147270\nn13147386\nn13147532\nn13147689\nn13147918\nn13148208\nn13148384\nn13149296\nn13149970\nn13150378\nn13150592\nn13150894\nn13151082\nn13152339\nn13154388\nn13154494\nn13154841\nn13155095\nn13155305\nn13155611\nn13156986\nn13157137\nn13157346\nn13157481\nn13157684\nn13157971\nn13158167\nn13158512\nn13158605\nn13158714\nn13158815\nn13159357\nn13159691\nn13159890\nn13160116\nn13160254\nn13160365\nn13160604\nn13160831\nn13160938\nn13161151\nn13161254\nn13161904\nn13163553\nn13163649\nn13163991\nn13164501\nn13170840\nn13171210\nn13171797\nn13172923\nn13173132\nn13173259\nn13173488\nn13173697\nn13173882\nn13174354\nn13174670\nn13174823\nn13175682\nn13176363\nn13176714\nn13177048\nn13177529\nn13177768\nn13177884\nn13178284\nn13178707\nn13179056\nn13179804\nn13180534\nn13180875\nn13181055\nn13181244\nn13181406\nn13181811\nn13182164\nn13182338\nn13182799\nn13182937\nn13183056\nn13183489\nn13184394\nn13185269\nn13185658\nn13186388\nn13186546\nn13187367\nn13188096\nn13188268\nn13188462\nn13188767\nn13190060\nn13190747\nn13191148\nn13191620\nn13191884\nn13192625\nn13193143\nn13193269\nn13193466\nn13193642\nn13193856\nn13194036\nn13194212\nn13194572\nn13194758\nn13194918\nn13195341\nn13195761\nn13196003\nn13196234\nn13196369\nn13196738\nn13197274\nn13197507\nn13198054\nn13198482\nn13198914\nn13199717\nn13199970\nn13200193\nn13200542\nn13200651\nn13200986\nn13201423\nn13201566\nn13201969\nn13202125\nn13202355\nn13202602\nn13205058\nn13205249\nn13206178\nn13206817\nn13207094\nn13207335\nn13207572\nn13207736\nn13207923\nn13208302\nn13208705\nn13208965\nn13209129\nn13209270\nn13209460\nn13209808\nn13210350\nn13210597\nn13211020\nn13211790\nn13212025\nn13212175\nn13212379\nn13212559\nn13213066\nn13213397\nn13213577\nn13214217\nn13214340\nn13214485\nn13215258\nn13215586\nn13217005\nn13219422\nn13219833\nn13219976\nn13220122\nn13220355\nn13220525\nn13220663\nn13221529\nn13222877\nn13222985\nn13223090\nn13223588\nn13223710\nn13223843\nn13224673\nn13224922\nn13225244\nn13225365\nn13225617\nn13226320\nn13226871\nn13228017\nn13228536\nn13229543\nn13229951\nn13230190\nn13230662\nn13230843\nn13231078\nn13231678\nn13231919\nn13232106\nn13232363\nn13232779\nn13233727\nn13234114\nn13234519\nn13234678\nn13234857\nn13235011\nn13235159\nn13235319\nn13235503\nn13235766\nn13236100\nn13237188\nn13237508\nn13238375\nn13238654\nn13238988\nn13239177\nn13239736\nn13239921\nn13240362\nn13252672\nn13354021\nn13555775\nn13579829\nn13650447\nn13653902\nn13862407\nn13862552\nn13862780\nn13863020\nn13863186\nn13863473\nn13863771\nn13864035\nn13864153\nn13864965\nn13865298\nn13865483\nn13865904\nn13866144\nn13866626\nn13866827\nn13867005\nn13867492\nn13868248\nn13868371\nn13868515\nn13868944\nn13869045\nn13869547\nn13869788\nn13869896\nn13871717\nn13872592\nn13872822\nn13873361\nn13873502\nn13873917\nn13874073\nn13874558\nn13875392\nn13875571\nn13875884\nn13876561\nn13877547\nn13877667\nn13878306\nn13879049\nn13879320\nn13879816\nn13880199\nn13880415\nn13880551\nn13880704\nn13880994\nn13881512\nn13881644\nn13882201\nn13882276\nn13882487\nn13882563\nn13882639\nn13882713\nn13882961\nn13883603\nn13883763\nn13884261\nn13884384\nn13884930\nn13885011\nn13886260\nn13888491\nn13889066\nn13889331\nn13891547\nn13891937\nn13893786\nn13894154\nn13894434\nn13895262\nn13896100\nn13896217\nn13897198\nn13897528\nn13897996\nn13898207\nn13898315\nn13898645\nn13899735\nn13900287\nn13900422\nn13901211\nn13901321\nn13901423\nn13901490\nn13901858\nn13902048\nn13902336\nn13902793\nn13903079\nn13905121\nn13905275\nn13905792\nn13906484\nn13906669\nn13906767\nn13906936\nn13907272\nn13908201\nn13908580\nn13911045\nn13912260\nn13912540\nn13914141\nn13914265\nn13914608\nn13915023\nn13915113\nn13915209\nn13915305\nn13915999\nn13916363\nn13916721\nn13917690\nn13917785\nn13918274\nn13918387\nn13918717\nn13919547\nn13919919\nn13926786\nn14131950\nn14175579\nn14564779\nn14582716\nn14583400\nn14585392\nn14592309\nn14603798\nn14633206\nn14685296\nn14696793\nn14698884\nn14714645\nn14720833\nn14765422\nn14785065\nn14786943\nn14804958\nn14810561\nn14820180\nn14821852\nn14844693\nn14853210\nn14858292\nn14867545\nn14891255\nn14899328\nn14900184\nn14900342\nn14908027\nn14909584\nn14914945\nn14915184\nn14919819\nn14938389\nn14941787\nn14942411\nn14973585\nn14974264\nn14975598\nn14976759\nn14976871\nn14977188\nn14977504\nn14992287\nn14993378\nn15005577\nn15006012\nn15019030\nn15048888\nn15060326\nn15060688\nn15062057\nn15067877\nn15075141\nn15086247\nn15089258\nn15089472\nn15089645\nn15089803\nn15090065\nn15090238\nn15090742\nn15091129\nn15091304\nn15091473\nn15091669\nn15091846\nn15092059\nn15092227\nn15092409\nn15092650\nn15092751\nn15092942\nn15093049\nn15093137\nn15093298\nn15102359\nn15102455\nn15102894\n"
  },
  {
    "path": "timm/data/_info/imagenet22k_to_12k_indices.txt",
    "content": "1\n3\n4\n5\n6\n7\n8\n9\n10\n11\n13\n14\n15\n16\n17\n18\n19\n20\n21\n23\n24\n26\n27\n28\n29\n30\n31\n32\n33\n34\n37\n38\n41\n43\n44\n45\n46\n47\n48\n49\n50\n51\n53\n55\n56\n57\n58\n60\n61\n62\n63\n64\n65\n66\n67\n68\n69\n70\n71\n72\n73\n74\n75\n76\n77\n78\n79\n80\n81\n82\n83\n84\n85\n86\n89\n90\n91\n93\n94\n95\n96\n97\n99\n100\n101\n102\n103\n105\n107\n108\n109\n110\n111\n112\n113\n114\n115\n116\n117\n118\n119\n120\n121\n122\n123\n124\n125\n126\n127\n128\n129\n130\n131\n132\n133\n134\n135\n137\n138\n140\n141\n142\n143\n144\n146\n147\n148\n149\n151\n152\n153\n154\n156\n157\n158\n159\n161\n162\n164\n165\n166\n167\n168\n169\n170\n171\n172\n173\n175\n176\n179\n180\n181\n182\n184\n188\n192\n193\n195\n196\n197\n199\n200\n203\n206\n209\n210\n211\n212\n213\n214\n215\n216\n217\n218\n219\n220\n221\n222\n223\n224\n225\n226\n227\n230\n231\n235\n249\n250\n251\n252\n253\n254\n289\n292\n295\n301\n306\n307\n312\n313\n315\n317\n320\n324\n325\n326\n327\n332\n341\n343\n347\n352\n353\n354\n356\n359\n360\n366\n367\n368\n369\n370\n377\n379\n380\n382\n383\n384\n385\n386\n392\n395\n398\n402\n405\n408\n410\n411\n413\n415\n416\n418\n422\n423\n424\n430\n431\n440\n441\n451\n452\n455\n456\n457\n460\n461\n464\n465\n466\n468\n469\n470\n471\n472\n473\n474\n475\n477\n479\n482\n486\n489\n490\n491\n492\n493\n496\n499\n500\n502\n503\n505\n510\n511\n512\n513\n514\n515\n516\n520\n523\n524\n525\n526\n527\n528\n529\n530\n533\n536\n538\n539\n540\n541\n542\n543\n544\n545\n546\n547\n548\n549\n550\n552\n553\n554\n555\n556\n557\n558\n559\n560\n561\n562\n563\n564\n566\n567\n568\n569\n570\n571\n572\n573\n574\n575\n576\n577\n578\n580\n581\n583\n584\n585\n586\n587\n588\n589\n590\n591\n592\n595\n596\n598\n601\n602\n603\n604\n605\n607\n608\n609\n610\n611\n612\n613\n614\n615\n616\n618\n619\n620\n621\n623\n624\n628\n629\n630\n631\n632\n634\n635\n636\n637\n638\n639\n640\n641\n643\n644\n645\n646\n647\n648\n649\n650\n651\n653\n654\n655\n656\n657\n658\n659\n660\n661\n663\n664\n665\n666\n667\n668\n669\n670\n671\n672\n673\n674\n675\n677\n678\n679\n680\n681\n682\n683\n684\n685\n686\n687\n688\n689\n691\n692\n693\n694\n695\n696\n697\n698\n700\n701\n702\n703\n704\n705\n706\n707\n708\n710\n711\n713\n714\n715\n716\n717\n718\n719\n720\n721\n722\n723\n727\n728\n730\n732\n733\n734\n736\n737\n738\n739\n740\n741\n742\n743\n744\n745\n746\n747\n748\n749\n751\n752\n753\n755\n757\n758\n759\n761\n762\n763\n764\n765\n766\n767\n768\n769\n770\n773\n774\n775\n776\n777\n778\n780\n781\n782\n783\n784\n785\n786\n787\n789\n790\n791\n792\n794\n796\n798\n799\n801\n804\n805\n807\n808\n809\n810\n811\n812\n813\n816\n817\n818\n822\n823\n824\n825\n826\n827\n828\n829\n830\n831\n832\n833\n834\n835\n836\n838\n839\n840\n841\n842\n843\n845\n846\n847\n848\n849\n850\n851\n852\n853\n854\n855\n856\n857\n858\n861\n862\n863\n864\n865\n866\n867\n868\n869\n870\n871\n872\n873\n874\n875\n876\n877\n878\n879\n880\n881\n882\n883\n884\n885\n886\n887\n888\n889\n891\n892\n894\n895\n896\n897\n899\n900\n901\n903\n904\n905\n908\n909\n910\n912\n913\n916\n919\n920\n922\n925\n931\n932\n933\n934\n935\n936\n939\n941\n944\n945\n946\n947\n949\n950\n951\n952\n953\n954\n955\n958\n960\n961\n963\n964\n968\n969\n970\n971\n976\n979\n983\n986\n990\n991\n992\n993\n994\n995\n996\n997\n998\n999\n1000\n1001\n1002\n1003\n1004\n1005\n1006\n1007\n1008\n1009\n1010\n1011\n1012\n1013\n1014\n1015\n1016\n1017\n1019\n1022\n1024\n1025\n1027\n1029\n1030\n1031\n1032\n1035\n1036\n1037\n1038\n1039\n1040\n1041\n1043\n1044\n1045\n1046\n1047\n1048\n1050\n1051\n1052\n1055\n1056\n1063\n1064\n1065\n1067\n1069\n1070\n1071\n1072\n1075\n1076\n1078\n1079\n1080\n1081\n1083\n1084\n1085\n1086\n1087\n1088\n1089\n1092\n1093\n1094\n1095\n1097\n1099\n1106\n1121\n1140\n1141\n1143\n1144\n1145\n1147\n1148\n1149\n1150\n1151\n1152\n1155\n1157\n1159\n1160\n1161\n1164\n1165\n1166\n1167\n1168\n1169\n1170\n1171\n1172\n1173\n1178\n1179\n1180\n1181\n1182\n1184\n1187\n1190\n1191\n1193\n1195\n1196\n1197\n1199\n1200\n1201\n1202\n1203\n1204\n1205\n1207\n1208\n1209\n1211\n1214\n1215\n1216\n1217\n1218\n1219\n1220\n1221\n1222\n1223\n1224\n1225\n1227\n1229\n1230\n1231\n1232\n1233\n1234\n1235\n1236\n1237\n1238\n1239\n1240\n1241\n1242\n1244\n1245\n1246\n1247\n1249\n1250\n1251\n1252\n1253\n1254\n1256\n1257\n1258\n1259\n1260\n1261\n1263\n1265\n1266\n1267\n1268\n1269\n1271\n1272\n1273\n1274\n1277\n1279\n1283\n1287\n1289\n1298\n1299\n1303\n1304\n1305\n1308\n1313\n1318\n1320\n1323\n1324\n1325\n1326\n1327\n1328\n1330\n1332\n1333\n1335\n1337\n1339\n1340\n1341\n1342\n1343\n1344\n1345\n1349\n1350\n1351\n1352\n1353\n1354\n1355\n1356\n1357\n1358\n1359\n1362\n1364\n1369\n1372\n1373\n1376\n1377\n1378\n1380\n1382\n1384\n1385\n1386\n1387\n1388\n1389\n1390\n1391\n1392\n1393\n1396\n1397\n1398\n1399\n1402\n1404\n1405\n1406\n1407\n1408\n1409\n1411\n1412\n1413\n1416\n1417\n1420\n1424\n1425\n1426\n1427\n1428\n1429\n1430\n1431\n1432\n1433\n1434\n1435\n1436\n1437\n1439\n1440\n1442\n1443\n1445\n1446\n1448\n1450\n1452\n1454\n1455\n1457\n1458\n1459\n1460\n1461\n1462\n1463\n1464\n1466\n1469\n1470\n1474\n1475\n1476\n1477\n1482\n1485\n1486\n1487\n1488\n1489\n1491\n1493\n1494\n1495\n1496\n1497\n1499\n1500\n1502\n1503\n1504\n1505\n1506\n1508\n1509\n1511\n1512\n1513\n1514\n1515\n1516\n1517\n1518\n1519\n1520\n1521\n1522\n1523\n1524\n1525\n1526\n1527\n1528\n1529\n1530\n1531\n1532\n1533\n1534\n1535\n1536\n1537\n1538\n1539\n1540\n1541\n1542\n1543\n1544\n1545\n1546\n1547\n1548\n1549\n1550\n1551\n1552\n1553\n1554\n1555\n1556\n1557\n1558\n1559\n1560\n1561\n1562\n1563\n1564\n1565\n1566\n1567\n1568\n1569\n1570\n1571\n1572\n1573\n1574\n1575\n1576\n1577\n1578\n1582\n1583\n1584\n1586\n1587\n1588\n1589\n1590\n1591\n1592\n1594\n1595\n1597\n1598\n1599\n1600\n1603\n1604\n1605\n1611\n1614\n1615\n1616\n1622\n1624\n1626\n1627\n1628\n1629\n1630\n1631\n1632\n1633\n1634\n1636\n1643\n1644\n1652\n1656\n1659\n1662\n1663\n1665\n1667\n1668\n1669\n1671\n1672\n1679\n1681\n1688\n1692\n1693\n1694\n1695\n1696\n1697\n1698\n1700\n1701\n1702\n1703\n1704\n1709\n1712\n1716\n1729\n1739\n1742\n1747\n1748\n1750\n1754\n1755\n1757\n1758\n1759\n1760\n1761\n1762\n1764\n1767\n1770\n1771\n1773\n1774\n1777\n1778\n1779\n1782\n1783\n1784\n1786\n1787\n1788\n1789\n1790\n1791\n1792\n1793\n1795\n1797\n1798\n1799\n1800\n1803\n1806\n1808\n1809\n1810\n1811\n1814\n1815\n1822\n1824\n1825\n1827\n1831\n1833\n1835\n1836\n1837\n1841\n1842\n1847\n1848\n1850\n1852\n1853\n1854\n1856\n1859\n1860\n1861\n1862\n1864\n1865\n1867\n1874\n1876\n1877\n1878\n1881\n1884\n1891\n1892\n1893\n1895\n1896\n1897\n1898\n1899\n1900\n1901\n1902\n1903\n1904\n1905\n1906\n1907\n1908\n1909\n1910\n1911\n1912\n1913\n1914\n1915\n1916\n1917\n1918\n1919\n1920\n1921\n1922\n1923\n1924\n1925\n1926\n1927\n1928\n1929\n1930\n1931\n1932\n1933\n1934\n1935\n1936\n1937\n1938\n1939\n1940\n1942\n1943\n1944\n1945\n1946\n1947\n1948\n1949\n1950\n1951\n1952\n1953\n1954\n1956\n1959\n1961\n1962\n1963\n1964\n1965\n1966\n1967\n1968\n1969\n1970\n1971\n1972\n1973\n1974\n1975\n1976\n1977\n1978\n1979\n1980\n1981\n1982\n1983\n1984\n1985\n1986\n1987\n1988\n1990\n1992\n1993\n1995\n1996\n1997\n1998\n1999\n2001\n2002\n2004\n2005\n2007\n2008\n2009\n2010\n2011\n2014\n2016\n2017\n2018\n2019\n2021\n2022\n2023\n2026\n2028\n2029\n2030\n2031\n2032\n2033\n2034\n2035\n2036\n2037\n2038\n2039\n2040\n2041\n2042\n2043\n2044\n2045\n2046\n2047\n2048\n2049\n2050\n2051\n2052\n2053\n2054\n2055\n2056\n2058\n2060\n2061\n2062\n2063\n2064\n2065\n2067\n2068\n2069\n2070\n2071\n2072\n2073\n2074\n2075\n2076\n2077\n2078\n2079\n2080\n2081\n2082\n2083\n2084\n2085\n2087\n2088\n2090\n2093\n2094\n2095\n2096\n2100\n2101\n2102\n2103\n2104\n2106\n2107\n2108\n2109\n2110\n2112\n2113\n2114\n2118\n2119\n2120\n2121\n2122\n2123\n2124\n2128\n2129\n2130\n2132\n2134\n2135\n2137\n2138\n2139\n2140\n2141\n2142\n2143\n2144\n2145\n2146\n2147\n2148\n2149\n2150\n2151\n2152\n2153\n2154\n2155\n2156\n2158\n2159\n2163\n2164\n2165\n2167\n2168\n2169\n2172\n2173\n2174\n2176\n2177\n2178\n2180\n2181\n2182\n2183\n2184\n2185\n2187\n2188\n2189\n2190\n2191\n2192\n2193\n2195\n2198\n2199\n2200\n2203\n2206\n2207\n2208\n2209\n2210\n2211\n2212\n2213\n2214\n2216\n2217\n2219\n2220\n2221\n2222\n2223\n2224\n2225\n2226\n2227\n2228\n2229\n2230\n2231\n2232\n2233\n2234\n2236\n2237\n2238\n2239\n2240\n2241\n2242\n2243\n2244\n2245\n2246\n2247\n2248\n2249\n2250\n2251\n2252\n2253\n2255\n2256\n2257\n2258\n2259\n2260\n2261\n2262\n2263\n2264\n2265\n2266\n2267\n2268\n2269\n2270\n2271\n2272\n2273\n2274\n2275\n2276\n2278\n2279\n2280\n2281\n2282\n2283\n2285\n2287\n2288\n2289\n2291\n2292\n2293\n2294\n2295\n2296\n2297\n2298\n2299\n2300\n2301\n2302\n2303\n2304\n2305\n2306\n2307\n2308\n2309\n2310\n2311\n2312\n2313\n2314\n2315\n2316\n2317\n2318\n2319\n2320\n2321\n2322\n2326\n2328\n2329\n2330\n2331\n2332\n2334\n2335\n2336\n2337\n2338\n2339\n2340\n2341\n2342\n2343\n2344\n2345\n2347\n2348\n2349\n2350\n2351\n2352\n2353\n2356\n2357\n2358\n2359\n2360\n2362\n2363\n2364\n2365\n2368\n2369\n2370\n2372\n2374\n2377\n2380\n2381\n2382\n2383\n2385\n2386\n2387\n2388\n2389\n2390\n2391\n2392\n2393\n2395\n2396\n2397\n2398\n2399\n2400\n2401\n2402\n2403\n2404\n2405\n2407\n2408\n2409\n2410\n2411\n2412\n2413\n2416\n2417\n2419\n2420\n2421\n2422\n2423\n2424\n2425\n2426\n2427\n2428\n2430\n2431\n2432\n2433\n2434\n2436\n2437\n2438\n2439\n2441\n2444\n2445\n2447\n2448\n2449\n2450\n2452\n2453\n2454\n2456\n2459\n2461\n2463\n2465\n2469\n2470\n2471\n2472\n2473\n2474\n2494\n2495\n2497\n2498\n2499\n2500\n2505\n2509\n2512\n2513\n2515\n2519\n2520\n2522\n2523\n2525\n2526\n2528\n2530\n2531\n2532\n2533\n2534\n2536\n2537\n2538\n2540\n2542\n2544\n2545\n2547\n2548\n2549\n2557\n2558\n2561\n2562\n2563\n2565\n2567\n2568\n2569\n2570\n2571\n2572\n2573\n2578\n2587\n2588\n2589\n2590\n2595\n2597\n2598\n2609\n2612\n2613\n2615\n2616\n2617\n2618\n2625\n2626\n2627\n2628\n2630\n2631\n2635\n2638\n2639\n2641\n2642\n2644\n2645\n2649\n2654\n2655\n2656\n2658\n2659\n2660\n2663\n2664\n2665\n2666\n2668\n2669\n2670\n2672\n2674\n2675\n2677\n2679\n2680\n2681\n2682\n2683\n2684\n2686\n2689\n2691\n2692\n2693\n2694\n2696\n2699\n2702\n2705\n2706\n2707\n2708\n2712\n2715\n2722\n2723\n2724\n2725\n2727\n2728\n2730\n2731\n2732\n2734\n2737\n2738\n2739\n2741\n2742\n2743\n2745\n2747\n2748\n2749\n2750\n2752\n2760\n2761\n2762\n2764\n2767\n2770\n2774\n2778\n2780\n2791\n2795\n2796\n2805\n2810\n2812\n2814\n2815\n2818\n2820\n2828\n2829\n2832\n2833\n2835\n2837\n2840\n2843\n2844\n2845\n2852\n2859\n2860\n2861\n2862\n2863\n2864\n2865\n2866\n2867\n2868\n2869\n2870\n2871\n2872\n2874\n2875\n2876\n2878\n2879\n2880\n2881\n2882\n2884\n2885\n2886\n2888\n2889\n2890\n2891\n2892\n2893\n2894\n2895\n2897\n2899\n2900\n2903\n2904\n2907\n2910\n2913\n2914\n2916\n2923\n2926\n2932\n2933\n2940\n2944\n2945\n2947\n2949\n2950\n2953\n2955\n2956\n2957\n2958\n2959\n2960\n2963\n2964\n2967\n2970\n2974\n2976\n2979\n2980\n2982\n2984\n2985\n2989\n2990\n2991\n2992\n2993\n2994\n2996\n2999\n3000\n3002\n3005\n3007\n3008\n3009\n3010\n3012\n3013\n3014\n3018\n3019\n3020\n3022\n3024\n3025\n3026\n3027\n3028\n3029\n3030\n3033\n3035\n3036\n3039\n3040\n3042\n3043\n3046\n3047\n3048\n3051\n3053\n3055\n3056\n3059\n3060\n3067\n3069\n3074\n3079\n3086\n3088\n3091\n3093\n3094\n3106\n3111\n3117\n3125\n3129\n3134\n3135\n3136\n3137\n3138\n3139\n3140\n3141\n3142\n3143\n3144\n3145\n3146\n3148\n3149\n3150\n3151\n3153\n3154\n3159\n3160\n3161\n3164\n3165\n3166\n3168\n3169\n3170\n3171\n3172\n3173\n3176\n3177\n3182\n3188\n3191\n3192\n3193\n3194\n3195\n3196\n3200\n3201\n3202\n3203\n3204\n3205\n3206\n3207\n3208\n3209\n3210\n3214\n3218\n3219\n3220\n3221\n3222\n3223\n3225\n3226\n3227\n3228\n3229\n3230\n3231\n3232\n3234\n3235\n3236\n3237\n3238\n3239\n3240\n3241\n3242\n3243\n3244\n3245\n3246\n3247\n3248\n3253\n3258\n3259\n3260\n3261\n3262\n3264\n3265\n3266\n3267\n3268\n3270\n3271\n3273\n3274\n3277\n3278\n3279\n3280\n3281\n3282\n3283\n3284\n3285\n3288\n3289\n3291\n3292\n3296\n3297\n3298\n3299\n3301\n3302\n3304\n3305\n3306\n3307\n3308\n3309\n3310\n3311\n3312\n3315\n3316\n3318\n3320\n3321\n3322\n3324\n3325\n3327\n3328\n3329\n3330\n3332\n3333\n3334\n3335\n3337\n3339\n3340\n3341\n3342\n3343\n3344\n3345\n3348\n3349\n3351\n3352\n3353\n3354\n3355\n3356\n3358\n3360\n3361\n3362\n3363\n3365\n3366\n3368\n3371\n3373\n3375\n3376\n3377\n3378\n3379\n3380\n3381\n3382\n3383\n3384\n3389\n3390\n3392\n3397\n3398\n3400\n3401\n3404\n3405\n3406\n3407\n3408\n3409\n3410\n3411\n3412\n3413\n3415\n3416\n3417\n3419\n3421\n3424\n3425\n3426\n3427\n3428\n3429\n3430\n3431\n3432\n3433\n3434\n3435\n3436\n3438\n3439\n3440\n3441\n3444\n3446\n3448\n3450\n3451\n3452\n3454\n3455\n3456\n3458\n3459\n3461\n3462\n3463\n3466\n3467\n3468\n3469\n3471\n3472\n3473\n3474\n3475\n3476\n3477\n3478\n3479\n3481\n3482\n3485\n3492\n3493\n3494\n3495\n3497\n3498\n3499\n3500\n3501\n3502\n3503\n3505\n3509\n3510\n3511\n3512\n3513\n3517\n3518\n3519\n3520\n3521\n3522\n3526\n3527\n3528\n3533\n3536\n3544\n3546\n3547\n3553\n3554\n3555\n3556\n3559\n3560\n3562\n3563\n3565\n3566\n3567\n3568\n3569\n3574\n3575\n3576\n3584\n3585\n3587\n3599\n3600\n3601\n3602\n3603\n3604\n3605\n3606\n3608\n3609\n3610\n3612\n3613\n3614\n3615\n3616\n3619\n3622\n3623\n3624\n3625\n3627\n3628\n3629\n3630\n3632\n3633\n3634\n3635\n3636\n3638\n3640\n3641\n3644\n3646\n3649\n3650\n3651\n3655\n3656\n3659\n3660\n3662\n3663\n3665\n3671\n3673\n3674\n3683\n3684\n3686\n3687\n3688\n3689\n3690\n3692\n3694\n3695\n3702\n3705\n3707\n3709\n3711\n3714\n3715\n3716\n3720\n3725\n3727\n3731\n3733\n3736\n3737\n3738\n3744\n3746\n3747\n3750\n3753\n3756\n3758\n3761\n3763\n3764\n3765\n3766\n3767\n3768\n3769\n3770\n3771\n3772\n3773\n3774\n3775\n3782\n3785\n3787\n3790\n3798\n3801\n3803\n3812\n3814\n3815\n3816\n3817\n3818\n3819\n3825\n3826\n3827\n3828\n3829\n3832\n3833\n3836\n3837\n3838\n3840\n3842\n3844\n3845\n3846\n3852\n3853\n3854\n3855\n3858\n3860\n3864\n3865\n3867\n3868\n3873\n3874\n3877\n3882\n3883\n3884\n3887\n3888\n3889\n3890\n3894\n3899\n3900\n3901\n3902\n3904\n3908\n3910\n3916\n3918\n3920\n3925\n3928\n3936\n3937\n3939\n3943\n3947\n3948\n3949\n3950\n3951\n3956\n3962\n3963\n3968\n3969\n3970\n3971\n3972\n3974\n3975\n3976\n3977\n3984\n3986\n3988\n3991\n4001\n4005\n4006\n4007\n4009\n4018\n4019\n4020\n4021\n4022\n4023\n4024\n4026\n4028\n4030\n4031\n4032\n4033\n4036\n4038\n4039\n4040\n4041\n4042\n4043\n4062\n4063\n4065\n4066\n4067\n4068\n4071\n4073\n4074\n4075\n4089\n4090\n4094\n4096\n4097\n4099\n4100\n4101\n4102\n4104\n4105\n4107\n4109\n4110\n4112\n4118\n4120\n4129\n4136\n4137\n4138\n4139\n4140\n4141\n4142\n4143\n4144\n4148\n4150\n4151\n4152\n4153\n4154\n4155\n4158\n4159\n4161\n4165\n4167\n4171\n4174\n4176\n4178\n4179\n4181\n4182\n4183\n4185\n4187\n4189\n4190\n4191\n4192\n4198\n4202\n4203\n4204\n4205\n4206\n4207\n4208\n4210\n4211\n4212\n4213\n4214\n4215\n4216\n4217\n4219\n4221\n4222\n4223\n4226\n4227\n4230\n4232\n4233\n4235\n4237\n4242\n4244\n4248\n4249\n4250\n4251\n4252\n4253\n4254\n4255\n4256\n4259\n4261\n4262\n4263\n4264\n4265\n4266\n4267\n4269\n4270\n4272\n4273\n4274\n4276\n4277\n4278\n4280\n4281\n4282\n4283\n4284\n4285\n4290\n4292\n4296\n4297\n4298\n4299\n4301\n4304\n4306\n4307\n4308\n4309\n4310\n4311\n4312\n4313\n4315\n4317\n4318\n4321\n4323\n4324\n4325\n4326\n4327\n4328\n4329\n4330\n4331\n4332\n4334\n4335\n4336\n4338\n4340\n4341\n4344\n4345\n4346\n4349\n4350\n4351\n4352\n4354\n4355\n4356\n4358\n4361\n4362\n4363\n4365\n4366\n4369\n4373\n4374\n4378\n4379\n4380\n4386\n4389\n4390\n4391\n4395\n4396\n4399\n4400\n4401\n4403\n4404\n4406\n4407\n4408\n4410\n4412\n4414\n4416\n4417\n4418\n4419\n4420\n4421\n4423\n4425\n4426\n4427\n4428\n4430\n4431\n4432\n4434\n4435\n4436\n4438\n4439\n4440\n4441\n4442\n4444\n4445\n4450\n4451\n4453\n4454\n4455\n4456\n4458\n4459\n4462\n4463\n4464\n4465\n4466\n4467\n4468\n4469\n4470\n4471\n4473\n4474\n4475\n4476\n4477\n4478\n4479\n4481\n4483\n4484\n4485\n4486\n4487\n4489\n4490\n4491\n4493\n4494\n4495\n4496\n4497\n4498\n4499\n4500\n4501\n4504\n4505\n4506\n4508\n4509\n4510\n4511\n4512\n4515\n4518\n4519\n4521\n4522\n4529\n4530\n4531\n4533\n4536\n4538\n4539\n4540\n4542\n4543\n4544\n4545\n4546\n4547\n4549\n4550\n4551\n4552\n4555\n4556\n4559\n4560\n4561\n4562\n4565\n4567\n4568\n4569\n4570\n4571\n4572\n4574\n4576\n4577\n4579\n4580\n4583\n4585\n4587\n4588\n4591\n4594\n4595\n4596\n4599\n4600\n4603\n4604\n4605\n4606\n4608\n4609\n4610\n4611\n4612\n4613\n4614\n4617\n4618\n4619\n4620\n4621\n4622\n4623\n4624\n4625\n4626\n4627\n4628\n4629\n4631\n4632\n4633\n4634\n4635\n4636\n4639\n4640\n4641\n4642\n4646\n4647\n4648\n4649\n4650\n4651\n4652\n4655\n4656\n4662\n4663\n4664\n4665\n4666\n4667\n4668\n4669\n4670\n4671\n4672\n4676\n4677\n4678\n4679\n4680\n4681\n4683\n4685\n4686\n4687\n4688\n4690\n4691\n4692\n4694\n4695\n4696\n4699\n4702\n4705\n4708\n4709\n4710\n4711\n4712\n4714\n4715\n4716\n4717\n4719\n4722\n4723\n4724\n4725\n4726\n4727\n4728\n4729\n4730\n4732\n4733\n4734\n4736\n4737\n4739\n4740\n4743\n4746\n4748\n4750\n4751\n4752\n4756\n4758\n4759\n4760\n4761\n4762\n4768\n4770\n4771\n4773\n4774\n4775\n4777\n4778\n4779\n4780\n4781\n4783\n4789\n4790\n4793\n4795\n4797\n4798\n4799\n4800\n4801\n4802\n4804\n4806\n4807\n4808\n4812\n4813\n4814\n4815\n4816\n4818\n4819\n4824\n4829\n4831\n4833\n4836\n4837\n4839\n4840\n4842\n4843\n4844\n4847\n4848\n4849\n4851\n4852\n4853\n4854\n4855\n4860\n4861\n4863\n4864\n4865\n4866\n4867\n4869\n4871\n4874\n4875\n4877\n4878\n4879\n4880\n4883\n4884\n4885\n4886\n4887\n4888\n4890\n4894\n4895\n4896\n4897\n4900\n4901\n4903\n4905\n4906\n4908\n4909\n4910\n4912\n4913\n4916\n4917\n4921\n4922\n4923\n4924\n4925\n4926\n4927\n4928\n4929\n4931\n4932\n4933\n4934\n4935\n4936\n4938\n4939\n4940\n4941\n4942\n4943\n4945\n4946\n4947\n4950\n4951\n4953\n4957\n4958\n4960\n4961\n4964\n4965\n4967\n4968\n4970\n4972\n4973\n4976\n4977\n4978\n4979\n4980\n4981\n4982\n4984\n4985\n4986\n4987\n4989\n4990\n4991\n4993\n4994\n4998\n4999\n5001\n5002\n5003\n5004\n5005\n5007\n5008\n5009\n5011\n5012\n5016\n5017\n5020\n5021\n5022\n5023\n5025\n5026\n5027\n5028\n5029\n5031\n5033\n5034\n5037\n5038\n5039\n5041\n5042\n5043\n5046\n5047\n5048\n5051\n5055\n5057\n5060\n5061\n5062\n5063\n5064\n5065\n5068\n5071\n5072\n5073\n5076\n5078\n5079\n5081\n5083\n5084\n5086\n5088\n5090\n5091\n5092\n5093\n5094\n5096\n5098\n5100\n5101\n5102\n5104\n5105\n5109\n5111\n5112\n5114\n5115\n5117\n5119\n5120\n5121\n5122\n5123\n5124\n5125\n5126\n5127\n5129\n5130\n5131\n5132\n5133\n5134\n5135\n5137\n5138\n5139\n5141\n5142\n5143\n5144\n5146\n5148\n5149\n5151\n5153\n5154\n5156\n5157\n5158\n5162\n5163\n5165\n5167\n5168\n5172\n5174\n5175\n5176\n5178\n5179\n5180\n5181\n5183\n5184\n5185\n5186\n5187\n5189\n5191\n5193\n5195\n5196\n5198\n5199\n5201\n5202\n5203\n5204\n5205\n5206\n5207\n5208\n5209\n5210\n5211\n5212\n5213\n5215\n5216\n5217\n5218\n5219\n5221\n5222\n5223\n5224\n5225\n5226\n5227\n5231\n5234\n5235\n5237\n5239\n5240\n5247\n5248\n5249\n5250\n5253\n5254\n5255\n5256\n5258\n5259\n5264\n5265\n5266\n5267\n5269\n5270\n5272\n5273\n5275\n5277\n5278\n5282\n5284\n5288\n5290\n5291\n5292\n5293\n5294\n5295\n5296\n5297\n5298\n5299\n5300\n5301\n5302\n5306\n5307\n5311\n5312\n5313\n5314\n5315\n5316\n5317\n5319\n5320\n5321\n5322\n5323\n5326\n5328\n5329\n5330\n5331\n5332\n5333\n5334\n5335\n5336\n5338\n5339\n5340\n5341\n5343\n5344\n5345\n5346\n5347\n5348\n5353\n5357\n5358\n5360\n5362\n5363\n5364\n5369\n5372\n5373\n5375\n5377\n5378\n5379\n5381\n5385\n5386\n5387\n5388\n5389\n5390\n5391\n5392\n5393\n5395\n5398\n5399\n5400\n5401\n5402\n5403\n5406\n5407\n5410\n5411\n5412\n5413\n5417\n5418\n5419\n5420\n5421\n5422\n5423\n5425\n5426\n5427\n5428\n5429\n5430\n5431\n5432\n5434\n5435\n5437\n5439\n5441\n5443\n5444\n5445\n5446\n5447\n5448\n5450\n5451\n5454\n5455\n5456\n5461\n5463\n5466\n5467\n5471\n5472\n5473\n5474\n5475\n5476\n5477\n5478\n5481\n5482\n5483\n5484\n5485\n5486\n5487\n5488\n5489\n5491\n5493\n5494\n5495\n5496\n5497\n5498\n5499\n5501\n5503\n5504\n5505\n5506\n5507\n5508\n5510\n5511\n5514\n5515\n5517\n5519\n5520\n5521\n5522\n5524\n5529\n5530\n5531\n5532\n5535\n5538\n5540\n5541\n5542\n5544\n5547\n5548\n5549\n5550\n5551\n5552\n5553\n5554\n5555\n5557\n5561\n5563\n5564\n5565\n5566\n5567\n5568\n5569\n5570\n5572\n5574\n5575\n5576\n5577\n5578\n5579\n5580\n5583\n5584\n5586\n5590\n5591\n5592\n5593\n5594\n5595\n5596\n5597\n5598\n5603\n5604\n5606\n5607\n5608\n5609\n5610\n5612\n5613\n5614\n5615\n5617\n5619\n5620\n5621\n5622\n5623\n5624\n5625\n5626\n5627\n5629\n5630\n5631\n5633\n5634\n5635\n5636\n5638\n5639\n5642\n5643\n5647\n5652\n5654\n5656\n5657\n5658\n5659\n5660\n5661\n5663\n5664\n5665\n5667\n5669\n5671\n5672\n5673\n5674\n5676\n5677\n5682\n5683\n5685\n5688\n5690\n5691\n5692\n5694\n5695\n5696\n5697\n5698\n5699\n5701\n5702\n5703\n5704\n5705\n5708\n5709\n5711\n5712\n5713\n5714\n5715\n5716\n5717\n5718\n5725\n5727\n5729\n5736\n5737\n5738\n5741\n5742\n5743\n5748\n5752\n5753\n5754\n5755\n5757\n5758\n5759\n5760\n5761\n5764\n5765\n5766\n5767\n5768\n5769\n5770\n5772\n5773\n5774\n5776\n5777\n5778\n5779\n5782\n5784\n5785\n5786\n5787\n5788\n5789\n5790\n5791\n5792\n5793\n5797\n5798\n5802\n5803\n5804\n5805\n5807\n5808\n5809\n5810\n5811\n5812\n5814\n5816\n5817\n5818\n5823\n5824\n5825\n5828\n5829\n5830\n5831\n5832\n5836\n5837\n5841\n5843\n5845\n5846\n5847\n5848\n5849\n5850\n5851\n5853\n5855\n5857\n5858\n5859\n5860\n5861\n5862\n5863\n5866\n5867\n5868\n5871\n5872\n5873\n5874\n5875\n5879\n5881\n5884\n5885\n5887\n5888\n5891\n5892\n5893\n5896\n5897\n5898\n5899\n5900\n5902\n5904\n5905\n5906\n5907\n5910\n5911\n5912\n5913\n5914\n5915\n5918\n5919\n5920\n5921\n5922\n5924\n5927\n5928\n5931\n5932\n5934\n5935\n5940\n5941\n5942\n5944\n5947\n5949\n5950\n5951\n5952\n5954\n5955\n5956\n5957\n5960\n5961\n5962\n5964\n5965\n5967\n5968\n5969\n5973\n5974\n5976\n5977\n5980\n5981\n5985\n5986\n5987\n5988\n5990\n5991\n5994\n5995\n5996\n5997\n5998\n5999\n6001\n6003\n6004\n6005\n6006\n6008\n6009\n6010\n6012\n6013\n6015\n6016\n6017\n6020\n6021\n6023\n6024\n6025\n6026\n6027\n6028\n6029\n6030\n6032\n6033\n6037\n6040\n6041\n6042\n6043\n6044\n6046\n6047\n6048\n6049\n6050\n6054\n6055\n6056\n6057\n6063\n6065\n6069\n6070\n6072\n6075\n6076\n6077\n6079\n6082\n6083\n6084\n6086\n6087\n6092\n6099\n6102\n6103\n6105\n6109\n6110\n6111\n6114\n6115\n6116\n6118\n6120\n6122\n6124\n6125\n6128\n6129\n6134\n6139\n6140\n6144\n6146\n6147\n6148\n6152\n6153\n6154\n6157\n6158\n6160\n6167\n6168\n6173\n6174\n6175\n6177\n6179\n6180\n6184\n6190\n6191\n6192\n6198\n6201\n6202\n6203\n6204\n6205\n6207\n6210\n6211\n6212\n6214\n6215\n6216\n6217\n6219\n6224\n6225\n6226\n6227\n6228\n6230\n6232\n6234\n6235\n6236\n6237\n6238\n6239\n6241\n6242\n6243\n6248\n6251\n6252\n6253\n6255\n6256\n6259\n6260\n6262\n6266\n6270\n6272\n6273\n6274\n6275\n6281\n6284\n6285\n6286\n6288\n6289\n6290\n6291\n6294\n6297\n6298\n6299\n6300\n6301\n6302\n6303\n6304\n6305\n6306\n6307\n6308\n6309\n6312\n6315\n6319\n6321\n6325\n6326\n6327\n6330\n6331\n6334\n6335\n6336\n6338\n6339\n6340\n6341\n6342\n6343\n6344\n6345\n6347\n6348\n6349\n6350\n6352\n6355\n6356\n6359\n6362\n6363\n6364\n6365\n6367\n6372\n6376\n6378\n6379\n6383\n6385\n6386\n6387\n6388\n6389\n6390\n6392\n6393\n6394\n6395\n6396\n6397\n6398\n6399\n6400\n6401\n6404\n6405\n6407\n6408\n6411\n6412\n6414\n6417\n6418\n6420\n6421\n6422\n6423\n6425\n6426\n6430\n6431\n6433\n6435\n6437\n6439\n6440\n6441\n6442\n6444\n6447\n6448\n6449\n6450\n6451\n6452\n6453\n6454\n6455\n6456\n6458\n6459\n6460\n6462\n6464\n6465\n6467\n6468\n6469\n6470\n6471\n6474\n6475\n6477\n6478\n6479\n6480\n6481\n6482\n6483\n6488\n6490\n6492\n6493\n6495\n6496\n6499\n6500\n6503\n6505\n6506\n6510\n6511\n6513\n6514\n6515\n6517\n6518\n6521\n6522\n6523\n6527\n6531\n6533\n6534\n6535\n6536\n6537\n6540\n6541\n6545\n6546\n6547\n6550\n6551\n6553\n6554\n6556\n6558\n6559\n6560\n6561\n6562\n6563\n6567\n6568\n6571\n6572\n6573\n6574\n6575\n6576\n6577\n6578\n6579\n6583\n6587\n6589\n6590\n6591\n6593\n6594\n6595\n6596\n6597\n6598\n6600\n6601\n6602\n6604\n6605\n6608\n6611\n6612\n6613\n6614\n6615\n6616\n6617\n6618\n6619\n6620\n6621\n6622\n6623\n6629\n6632\n6636\n6638\n6639\n6640\n6643\n6648\n6649\n6651\n6653\n6654\n6655\n6658\n6660\n6661\n6662\n6663\n6665\n6667\n6668\n6669\n6670\n6673\n6674\n6675\n6676\n6677\n6678\n6679\n6681\n6682\n6683\n6686\n6687\n6691\n6692\n6693\n6694\n6695\n6696\n6698\n6700\n6702\n6703\n6705\n6706\n6707\n6708\n6709\n6710\n6712\n6713\n6715\n6716\n6718\n6720\n6721\n6722\n6723\n6725\n6726\n6728\n6735\n6737\n6739\n6740\n6741\n6743\n6744\n6745\n6746\n6747\n6748\n6749\n6751\n6752\n6753\n6754\n6757\n6758\n6763\n6764\n6765\n6766\n6767\n6768\n6770\n6772\n6773\n6774\n6775\n6776\n6778\n6779\n6781\n6783\n6784\n6785\n6786\n6787\n6788\n6791\n6794\n6795\n6797\n6798\n6799\n6800\n6804\n6805\n6806\n6807\n6808\n6809\n6810\n6813\n6814\n6815\n6820\n6822\n6823\n6825\n6826\n6829\n6830\n6831\n6833\n6834\n6837\n6838\n6840\n6841\n6846\n6847\n6850\n6851\n6855\n6857\n6858\n6860\n6863\n6864\n6865\n6866\n6867\n6868\n6870\n6875\n6876\n6877\n6878\n6879\n6880\n6882\n6885\n6886\n6887\n6889\n6890\n6892\n6894\n6898\n6900\n6901\n6902\n6905\n6908\n6909\n6912\n6915\n6916\n6917\n6919\n6920\n6925\n6926\n6928\n6929\n6930\n6931\n6932\n6934\n6935\n6936\n6937\n6939\n6940\n6941\n6944\n6945\n6946\n6950\n6951\n6952\n6953\n6954\n6956\n6958\n6959\n6960\n6961\n6964\n6965\n6966\n6968\n6969\n6973\n6974\n6978\n6980\n6981\n6982\n6985\n6986\n6987\n6990\n6991\n6993\n6994\n6995\n6996\n6997\n6998\n6999\n7000\n7002\n7003\n7004\n7009\n7010\n7011\n7013\n7017\n7018\n7019\n7025\n7026\n7029\n7031\n7038\n7039\n7041\n7042\n7044\n7045\n7046\n7048\n7049\n7050\n7051\n7052\n7055\n7056\n7057\n7059\n7062\n7063\n7064\n7066\n7068\n7069\n7072\n7073\n7075\n7076\n7077\n7078\n7079\n7081\n7082\n7083\n7084\n7085\n7087\n7088\n7090\n7091\n7092\n7093\n7095\n7096\n7097\n7098\n7099\n7100\n7101\n7103\n7104\n7107\n7108\n7110\n7111\n7112\n7113\n7115\n7116\n7117\n7118\n7120\n7121\n7122\n7123\n7126\n7127\n7128\n7129\n7134\n7135\n7136\n7137\n7138\n7142\n7150\n7152\n7153\n7154\n7155\n7156\n7158\n7160\n7161\n7162\n7163\n7164\n7165\n7166\n7167\n7168\n7169\n7170\n7171\n7172\n7173\n7175\n7176\n7177\n7178\n7180\n7181\n7182\n7183\n7186\n7189\n7192\n7193\n7194\n7195\n7196\n7198\n7199\n7200\n7201\n7202\n7203\n7204\n7205\n7206\n7207\n7208\n7212\n7213\n7214\n7215\n7216\n7217\n7218\n7219\n7220\n7222\n7223\n7224\n7225\n7226\n7228\n7230\n7231\n7232\n7237\n7238\n7239\n7241\n7242\n7243\n7244\n7245\n7246\n7247\n7250\n7254\n7256\n7257\n7258\n7259\n7260\n7261\n7263\n7264\n7266\n7267\n7268\n7270\n7271\n7273\n7276\n7277\n7278\n7279\n7280\n7282\n7283\n7284\n7285\n7286\n7287\n7288\n7289\n7290\n7291\n7292\n7293\n7294\n7297\n7299\n7301\n7302\n7305\n7306\n7307\n7309\n7310\n7313\n7314\n7315\n7316\n7317\n7318\n7319\n7321\n7322\n7323\n7324\n7325\n7326\n7327\n7329\n7332\n7333\n7334\n7335\n7336\n7337\n7338\n7340\n7341\n7342\n7344\n7346\n7348\n7349\n7350\n7353\n7354\n7357\n7358\n7363\n7364\n7365\n7370\n7372\n7373\n7375\n7378\n7379\n7380\n7382\n7385\n7386\n7388\n7390\n7391\n7393\n7394\n7396\n7400\n7403\n7406\n7412\n7418\n7419\n7420\n7422\n7424\n7425\n7427\n7428\n7432\n7435\n7436\n7437\n7438\n7440\n7441\n7442\n7443\n7445\n7449\n7450\n7451\n7452\n7454\n7455\n7458\n7459\n7460\n7461\n7462\n7463\n7464\n7465\n7466\n7467\n7469\n7470\n7471\n7472\n7473\n7474\n7475\n7476\n7478\n7479\n7482\n7484\n7485\n7486\n7491\n7492\n7494\n7496\n7497\n7498\n7502\n7503\n7504\n7505\n7506\n7507\n7511\n7513\n7514\n7516\n7517\n7518\n7520\n7521\n7523\n7524\n7525\n7526\n7528\n7530\n7533\n7536\n7539\n7540\n7541\n7542\n7546\n7548\n7551\n7552\n7554\n7556\n7557\n7558\n7559\n7561\n7562\n7563\n7564\n7565\n7566\n7567\n7568\n7570\n7571\n7573\n7574\n7575\n7578\n7584\n7585\n7587\n7590\n7591\n7592\n7595\n7596\n7597\n7601\n7603\n7604\n7606\n7607\n7608\n7610\n7612\n7613\n7616\n7617\n7619\n7622\n7623\n7625\n7626\n7628\n7629\n7630\n7631\n7634\n7637\n7638\n7641\n7642\n7644\n7646\n7650\n7651\n7652\n7655\n7656\n7657\n7658\n7659\n7660\n7661\n7663\n7664\n7665\n7666\n7671\n7672\n7673\n7674\n7679\n7681\n7682\n7685\n7686\n7688\n7690\n7691\n7693\n7694\n7696\n7698\n7703\n7704\n7705\n7707\n7708\n7710\n7711\n7712\n7713\n7715\n7716\n7717\n7718\n7719\n7721\n7722\n7723\n7724\n7725\n7727\n7728\n7729\n7730\n7731\n7732\n7733\n7734\n7736\n7738\n7739\n7740\n7741\n7742\n7746\n7749\n7751\n7753\n7755\n7756\n7757\n7758\n7759\n7760\n7763\n7764\n7768\n7769\n7770\n7773\n7775\n7777\n7778\n7779\n7783\n7785\n7786\n7787\n7788\n7789\n7792\n7793\n7794\n7795\n7798\n7799\n7801\n7805\n7806\n7810\n7813\n7815\n7818\n7820\n7824\n7828\n7830\n7832\n7834\n7835\n7837\n7841\n7843\n7844\n7849\n7852\n7854\n7855\n7856\n7858\n7860\n7862\n7864\n7867\n7868\n7871\n7872\n7873\n7874\n7876\n7878\n7881\n7882\n7884\n7886\n7887\n7889\n7891\n7892\n7894\n7895\n7896\n7902\n7903\n7904\n7905\n7906\n7908\n7911\n7913\n7914\n7915\n7917\n7918\n7919\n7920\n7921\n7923\n7924\n7927\n7928\n7929\n7931\n7934\n7935\n7937\n7938\n7939\n7940\n7941\n7942\n7943\n7944\n7949\n7950\n7951\n7952\n7953\n7954\n7955\n7959\n7962\n7963\n7964\n7966\n7969\n7972\n7973\n7976\n7977\n7981\n7982\n7983\n7984\n7987\n7988\n7989\n7990\n7991\n7992\n7994\n7995\n7997\n7998\n7999\n8000\n8001\n8004\n8005\n8006\n8007\n8008\n8009\n8012\n8017\n8019\n8020\n8021\n8022\n8023\n8024\n8025\n8027\n8028\n8029\n8031\n8033\n8034\n8035\n8036\n8037\n8038\n8039\n8040\n8042\n8043\n8044\n8045\n8046\n8050\n8051\n8052\n8054\n8056\n8060\n8061\n8062\n8064\n8065\n8066\n8068\n8070\n8071\n8072\n8074\n8077\n8078\n8080\n8081\n8082\n8084\n8086\n8087\n8089\n8090\n8093\n8098\n8099\n8101\n8104\n8105\n8106\n8110\n8112\n8113\n8114\n8115\n8116\n8119\n8120\n8121\n8124\n8125\n8126\n8127\n8129\n8131\n8133\n8136\n8138\n8139\n8140\n8141\n8142\n8144\n8145\n8147\n8149\n8150\n8151\n8153\n8154\n8155\n8156\n8157\n8159\n8161\n8162\n8163\n8164\n8166\n8168\n8170\n8171\n8172\n8173\n8174\n8175\n8177\n8178\n8179\n8182\n8183\n8184\n8186\n8191\n8193\n8195\n8197\n8198\n8199\n8201\n8202\n8203\n8204\n8205\n8206\n8207\n8208\n8210\n8211\n8212\n8213\n8215\n8216\n8218\n8220\n8221\n8222\n8225\n8229\n8230\n8231\n8232\n8233\n8236\n8237\n8239\n8240\n8242\n8243\n8244\n8245\n8246\n8250\n8251\n8252\n8254\n8255\n8256\n8257\n8258\n8259\n8261\n8263\n8264\n8267\n8268\n8271\n8272\n8273\n8275\n8276\n8278\n8281\n8282\n8285\n8286\n8288\n8289\n8290\n8294\n8295\n8297\n8298\n8299\n8300\n8303\n8307\n8309\n8310\n8312\n8313\n8315\n8318\n8320\n8322\n8325\n8326\n8327\n8328\n8329\n8330\n8332\n8333\n8335\n8337\n8345\n8346\n8347\n8348\n8352\n8354\n8360\n8362\n8364\n8365\n8368\n8371\n8375\n8376\n8378\n8380\n8381\n8382\n8386\n8388\n8389\n8390\n8392\n8393\n8394\n8396\n8397\n8398\n8399\n8400\n8401\n8402\n8403\n8404\n8405\n8407\n8408\n8409\n8410\n8412\n8414\n8416\n8417\n8418\n8419\n8420\n8421\n8422\n8426\n8428\n8430\n8432\n8433\n8434\n8435\n8436\n8437\n8439\n8440\n8446\n8447\n8448\n8449\n8450\n8451\n8452\n8453\n8454\n8456\n8460\n8462\n8463\n8464\n8467\n8468\n8469\n8470\n8472\n8473\n8474\n8477\n8478\n8481\n8482\n8483\n8484\n8485\n8486\n8490\n8491\n8492\n8493\n8494\n8495\n8496\n8497\n8498\n8500\n8501\n8502\n8503\n8505\n8506\n8508\n8509\n8510\n8511\n8512\n8513\n8516\n8521\n8522\n8524\n8526\n8529\n8531\n8532\n8536\n8538\n8539\n8540\n8541\n8542\n8543\n8547\n8548\n8549\n8552\n8553\n8555\n8556\n8557\n8560\n8561\n8562\n8564\n8565\n8568\n8569\n8570\n8571\n8572\n8573\n8577\n8578\n8580\n8581\n8583\n8584\n8586\n8588\n8589\n8590\n8591\n8593\n8594\n8596\n8597\n8598\n8599\n8600\n8601\n8602\n8603\n8604\n8606\n8607\n8610\n8611\n8613\n8615\n8622\n8625\n8626\n8627\n8628\n8629\n8632\n8636\n8638\n8639\n8641\n8643\n8645\n8646\n8647\n8648\n8649\n8650\n8651\n8652\n8653\n8654\n8655\n8656\n8657\n8658\n8662\n8663\n8664\n8665\n8666\n8667\n8668\n8669\n8670\n8671\n8672\n8673\n8674\n8675\n8676\n8677\n8678\n8679\n8680\n8681\n8682\n8684\n8685\n8686\n8690\n8691\n8692\n8693\n8694\n8695\n8702\n8707\n8708\n8709\n8710\n8711\n8712\n8713\n8715\n8716\n8720\n8723\n8724\n8725\n8728\n8732\n8733\n8737\n8738\n8739\n8740\n8741\n8745\n8746\n8750\n8752\n8753\n8754\n8756\n8757\n8758\n8759\n8761\n8762\n8763\n8766\n8768\n8770\n8771\n8772\n8773\n8775\n8776\n8780\n8781\n8783\n8784\n8785\n8786\n8787\n8788\n8793\n8795\n8797\n8798\n8801\n8803\n8804\n8806\n8807\n8810\n8812\n8814\n8815\n8817\n8820\n8823\n8824\n8826\n8827\n8828\n8829\n8830\n8831\n8833\n8835\n8838\n8839\n8842\n8843\n8845\n8846\n8847\n8848\n8849\n8851\n8854\n8856\n8857\n8858\n8860\n8861\n8863\n8864\n8867\n8869\n8870\n8871\n8872\n8875\n8876\n8878\n8879\n8883\n8884\n8886\n8887\n8888\n8890\n8891\n8892\n8894\n8896\n8897\n8898\n8899\n8900\n8901\n8902\n8903\n8905\n8906\n8908\n8910\n8914\n8915\n8916\n8917\n8918\n8919\n8922\n8923\n8924\n8925\n8926\n8927\n8929\n8931\n8932\n8934\n8936\n8937\n8938\n8939\n8942\n8943\n8944\n8945\n8947\n8948\n8950\n8951\n8954\n8956\n8957\n8959\n8962\n8965\n8966\n8967\n8968\n8969\n8970\n8971\n8976\n8977\n8980\n8981\n8982\n8983\n8984\n8985\n8986\n8987\n8989\n8990\n8991\n8992\n8993\n8994\n8995\n9000\n9001\n9003\n9006\n9007\n9011\n9012\n9013\n9014\n9015\n9019\n9022\n9023\n9024\n9025\n9026\n9028\n9029\n9030\n9031\n9032\n9033\n9034\n9036\n9037\n9039\n9042\n9043\n9047\n9049\n9050\n9051\n9052\n9054\n9055\n9056\n9057\n9058\n9059\n9060\n9061\n9062\n9064\n9065\n9066\n9070\n9071\n9072\n9073\n9074\n9079\n9080\n9081\n9082\n9083\n9087\n9088\n9092\n9093\n9094\n9096\n9097\n9098\n9100\n9101\n9104\n9105\n9106\n9107\n9108\n9109\n9110\n9111\n9112\n9116\n9118\n9119\n9123\n9128\n9130\n9131\n9132\n9133\n9134\n9138\n9139\n9140\n9141\n9142\n9144\n9146\n9147\n9148\n9149\n9150\n9151\n9153\n9154\n9155\n9158\n9159\n9161\n9163\n9165\n9166\n9167\n9168\n9169\n9171\n9173\n9174\n9175\n9176\n9179\n9180\n9183\n9184\n9187\n9188\n9189\n9191\n9193\n9198\n9199\n9201\n9204\n9205\n9206\n9211\n9212\n9213\n9214\n9215\n9216\n9218\n9219\n9220\n9223\n9224\n9225\n9226\n9227\n9228\n9229\n9230\n9231\n9233\n9237\n9238\n9239\n9241\n9242\n9243\n9245\n9249\n9250\n9251\n9252\n9254\n9256\n9257\n9258\n9259\n9264\n9265\n9268\n9269\n9270\n9271\n9272\n9273\n9274\n9275\n9276\n9278\n9280\n9282\n9289\n9292\n9293\n9294\n9295\n9296\n9299\n9302\n9303\n9304\n9305\n9306\n9307\n9308\n9309\n9312\n9313\n9316\n9317\n9318\n9321\n9323\n9326\n9329\n9330\n9332\n9333\n9334\n9335\n9336\n9337\n9341\n9342\n9343\n9344\n9345\n9346\n9348\n9349\n9351\n9353\n9354\n9361\n9362\n9364\n9365\n9366\n9367\n9368\n9369\n9370\n9371\n9375\n9376\n9380\n9381\n9382\n9384\n9385\n9386\n9389\n9390\n9391\n9394\n9395\n9396\n9397\n9398\n9399\n9400\n9401\n9403\n9404\n9406\n9410\n9411\n9412\n9413\n9414\n9415\n9416\n9417\n9419\n9420\n9421\n9422\n9424\n9425\n9426\n9429\n9430\n9436\n9439\n9440\n9441\n9444\n9445\n9446\n9447\n9448\n9449\n9451\n9452\n9453\n9454\n9456\n9459\n9462\n9463\n9464\n9466\n9468\n9469\n9470\n9474\n9475\n9478\n9480\n9481\n9483\n9485\n9487\n9489\n9491\n9492\n9495\n9497\n9499\n9500\n9501\n9502\n9503\n9504\n9512\n9513\n9514\n9515\n9520\n9521\n9522\n9527\n9531\n9532\n9534\n9535\n9536\n9539\n9541\n9542\n9544\n9545\n9547\n9548\n9550\n9551\n9556\n9557\n9565\n9566\n9568\n9569\n9570\n9571\n9573\n9574\n9575\n9576\n9577\n9578\n9580\n9583\n9584\n9585\n9586\n9587\n9589\n9590\n9594\n9596\n9601\n9604\n9607\n9608\n9609\n9614\n9615\n9617\n9621\n9623\n9625\n9626\n9627\n9628\n9629\n9632\n9633\n9634\n9635\n9636\n9638\n9639\n9640\n9641\n9642\n9643\n9646\n9651\n9652\n9653\n9654\n9655\n9658\n9659\n9660\n9663\n9664\n9665\n9666\n9667\n9669\n9672\n9673\n9674\n9675\n9676\n9677\n9678\n9680\n9683\n9685\n9688\n9689\n9690\n9691\n9692\n9694\n9696\n9697\n9702\n9703\n9704\n9705\n9706\n9710\n9711\n9712\n9714\n9716\n9719\n9720\n9723\n9725\n9726\n9727\n9729\n9732\n9733\n9734\n9736\n9737\n9738\n9739\n9740\n9741\n9744\n9751\n9752\n9753\n9754\n9755\n9756\n9758\n9759\n9762\n9763\n9764\n9766\n9769\n9770\n9771\n9772\n9773\n9775\n9776\n9777\n9778\n9779\n9780\n9782\n9784\n9785\n9786\n9791\n9794\n9796\n9797\n9798\n9799\n9800\n9801\n9802\n9805\n9806\n9807\n9809\n9811\n9814\n9819\n9820\n9825\n9826\n9827\n9834\n9835\n9836\n9837\n9838\n9841\n9844\n9848\n9849\n9855\n9857\n9858\n9859\n9860\n9862\n9866\n9868\n9869\n9873\n9875\n9876\n9877\n9878\n9880\n9883\n9885\n9886\n9887\n9888\n9889\n9891\n9893\n9894\n9895\n9896\n9897\n9898\n9899\n9900\n9901\n9902\n9904\n9906\n9907\n9909\n9911\n9912\n9915\n9920\n9921\n9922\n9923\n9924\n9926\n9927\n9928\n9929\n9930\n9931\n9934\n9935\n9936\n9937\n9938\n9940\n9944\n9946\n9947\n9948\n9950\n9951\n9952\n9953\n9955\n9957\n9959\n9960\n9961\n9962\n9963\n9964\n9965\n9966\n9968\n9969\n9971\n9974\n9975\n9976\n9978\n9979\n9980\n9981\n9982\n9983\n9987\n9988\n9989\n9990\n9991\n9992\n9993\n9996\n9998\n10001\n10002\n10009\n10010\n10011\n10012\n10013\n10014\n10015\n10016\n10017\n10021\n10022\n10024\n10027\n10028\n10030\n10032\n10033\n10035\n10037\n10039\n10040\n10041\n10042\n10043\n10044\n10045\n10046\n10047\n10049\n10050\n10051\n10052\n10053\n10054\n10055\n10056\n10057\n10058\n10059\n10061\n10063\n10064\n10065\n10066\n10067\n10069\n10072\n10073\n10074\n10075\n10076\n10077\n10080\n10081\n10082\n10083\n10085\n10086\n10087\n10089\n10090\n10091\n10092\n10093\n10094\n10096\n10099\n10101\n10104\n10105\n10106\n10107\n10108\n10109\n10112\n10116\n10117\n10118\n10120\n10121\n10122\n10123\n10126\n10127\n10133\n10134\n10139\n10140\n10141\n10146\n10147\n10150\n10151\n10152\n10154\n10155\n10156\n10157\n10158\n10159\n10161\n10162\n10163\n10164\n10168\n10170\n10174\n10178\n10180\n10184\n10185\n10186\n10187\n10188\n10189\n10190\n10191\n10194\n10195\n10196\n10198\n10199\n10201\n10205\n10206\n10208\n10209\n10210\n10212\n10216\n10217\n10218\n10219\n10220\n10222\n10223\n10224\n10225\n10226\n10227\n10228\n10230\n10231\n10233\n10234\n10235\n10236\n10239\n10240\n10242\n10243\n10244\n10245\n10246\n10247\n10248\n10249\n10251\n10252\n10253\n10254\n10257\n10259\n10261\n10262\n10263\n10264\n10265\n10268\n10270\n10271\n10272\n10275\n10276\n10278\n10282\n10283\n10285\n10288\n10289\n10290\n10294\n10295\n10296\n10297\n10298\n10299\n10300\n10301\n10302\n10303\n10304\n10305\n10306\n10307\n10310\n10312\n10313\n10314\n10317\n10318\n10321\n10322\n10323\n10324\n10325\n10326\n10327\n10328\n10329\n10330\n10331\n10332\n10334\n10335\n10336\n10337\n10338\n10339\n10342\n10343\n10344\n10345\n10348\n10349\n10350\n10351\n10352\n10353\n10354\n10355\n10359\n10361\n10362\n10365\n10366\n10370\n10371\n10372\n10374\n10376\n10377\n10379\n10380\n10382\n10386\n10387\n10388\n10389\n10390\n10393\n10394\n10396\n10397\n10398\n10400\n10401\n10403\n10405\n10408\n10411\n10412\n10413\n10415\n10416\n10417\n10418\n10419\n10421\n10422\n10423\n10424\n10426\n10428\n10429\n10430\n10431\n10432\n10435\n10437\n10439\n10443\n10446\n10447\n10450\n10452\n10454\n10455\n10458\n10459\n10460\n10461\n10462\n10464\n10467\n10469\n10472\n10475\n10477\n10478\n10480\n10482\n10486\n10487\n10488\n10490\n10493\n10495\n10496\n10498\n10499\n10500\n10503\n10504\n10505\n10507\n10508\n10509\n10510\n10511\n10512\n10513\n10514\n10516\n10517\n10518\n10519\n10520\n10521\n10522\n10523\n10524\n10525\n10526\n10527\n10529\n10530\n10531\n10533\n10534\n10535\n10538\n10541\n10543\n10545\n10546\n10547\n10548\n10549\n10550\n10551\n10552\n10553\n10554\n10555\n10558\n10560\n10562\n10563\n10566\n10569\n10573\n10574\n10575\n10582\n10583\n10584\n10585\n10587\n10588\n10589\n10590\n10591\n10593\n10597\n10606\n10609\n10610\n10611\n10612\n10614\n10616\n10619\n10620\n10622\n10624\n10625\n10626\n10627\n10628\n10630\n10632\n10634\n10635\n10637\n10638\n10640\n10641\n10642\n10643\n10647\n10648\n10649\n10657\n10658\n10661\n10662\n10663\n10664\n10665\n10666\n10667\n10668\n10670\n10671\n10672\n10673\n10674\n10675\n10676\n10677\n10679\n10680\n10682\n10685\n10686\n10687\n10690\n10691\n10693\n10694\n10696\n10697\n10698\n10699\n10700\n10701\n10702\n10707\n10708\n10710\n10711\n10712\n10713\n10717\n10718\n10719\n10720\n10722\n10724\n10725\n10726\n10727\n10728\n10729\n10730\n10732\n10733\n10734\n10737\n10738\n10741\n10747\n10748\n10749\n10750\n10751\n10753\n10754\n10756\n10758\n10759\n10760\n10762\n10764\n10765\n10766\n10767\n10771\n10772\n10773\n10774\n10775\n10776\n10779\n10780\n10781\n10782\n10783\n10785\n10786\n10790\n10791\n10792\n10795\n10797\n10798\n10799\n10801\n10802\n10805\n10806\n10807\n10808\n10809\n10810\n10812\n10813\n10817\n10821\n10823\n10824\n10827\n10829\n10831\n10832\n10834\n10836\n10839\n10840\n10841\n10842\n10843\n10845\n10847\n10848\n10851\n10854\n10855\n10858\n10859\n10861\n10863\n10864\n10866\n10869\n10870\n10871\n10873\n10874\n10875\n10876\n10878\n10879\n10880\n10881\n10882\n10883\n10885\n10888\n10889\n10893\n10895\n10896\n10897\n10898\n10901\n10905\n10906\n10907\n10908\n10909\n10911\n10912\n10913\n10914\n10918\n10919\n10920\n10923\n10926\n10927\n10931\n10932\n10934\n10935\n10937\n10938\n10939\n10940\n10942\n10943\n10944\n10945\n10946\n10947\n10950\n10951\n10952\n10954\n10955\n10956\n10957\n10959\n10961\n10962\n10963\n10967\n10968\n10971\n10972\n10973\n10974\n10979\n10983\n10985\n10986\n10988\n10993\n10996\n10997\n10998\n11000\n11001\n11002\n11003\n11004\n11008\n11011\n11012\n11015\n11016\n11017\n11019\n11021\n11022\n11023\n11024\n11026\n11027\n11028\n11030\n11031\n11032\n11033\n11035\n11038\n11039\n11040\n11043\n11044\n11045\n11048\n11050\n11051\n11052\n11053\n11054\n11055\n11056\n11058\n11059\n11060\n11061\n11063\n11065\n11066\n11067\n11068\n11070\n11072\n11073\n11076\n11077\n11078\n11080\n11081\n11082\n11083\n11084\n11085\n11087\n11088\n11093\n11094\n11095\n11096\n11097\n11098\n11101\n11102\n11103\n11104\n11105\n11106\n11107\n11108\n11109\n11110\n11112\n11113\n11114\n11118\n11119\n11120\n11122\n11125\n11127\n11128\n11129\n11132\n11133\n11134\n11136\n11137\n11138\n11139\n11140\n11142\n11143\n11144\n11145\n11146\n11148\n11150\n11151\n11152\n11153\n11154\n11155\n11157\n11158\n11159\n11160\n11161\n11162\n11163\n11164\n11167\n11168\n11169\n11170\n11171\n11172\n11173\n11176\n11178\n11179\n11180\n11181\n11182\n11184\n11185\n11186\n11187\n11189\n11197\n11200\n11202\n11205\n11207\n11208\n11209\n11210\n11211\n11213\n11215\n11217\n11218\n11219\n11220\n11221\n11222\n11224\n11225\n11226\n11234\n11235\n11237\n11238\n11241\n11242\n11243\n11246\n11247\n11251\n11254\n11256\n11258\n11259\n11261\n11262\n11265\n11268\n11269\n11272\n11273\n11274\n11275\n11276\n11278\n11281\n11282\n11283\n11291\n11292\n11293\n11295\n11297\n11298\n11300\n11303\n11304\n11305\n11306\n11308\n11309\n11310\n11312\n11315\n11317\n11318\n11319\n11322\n11324\n11325\n11326\n11328\n11329\n11330\n11331\n11333\n11334\n11338\n11339\n11340\n11342\n11343\n11344\n11346\n11348\n11349\n11350\n11351\n11353\n11354\n11355\n11356\n11360\n11362\n11364\n11366\n11369\n11371\n11373\n11374\n11376\n11377\n11378\n11381\n11382\n11383\n11384\n11385\n11386\n11387\n11388\n11389\n11390\n11391\n11393\n11395\n11396\n11397\n11399\n11400\n11401\n11402\n11403\n11404\n11406\n11408\n11409\n11411\n11413\n11415\n11417\n11420\n11423\n11426\n11427\n11428\n11430\n11432\n11435\n11436\n11438\n11439\n11440\n11441\n11442\n11446\n11448\n11453\n11454\n11455\n11459\n11463\n11464\n11465\n11467\n11468\n11469\n11471\n11472\n11473\n11476\n11477\n11478\n11481\n11482\n11483\n11484\n11486\n11487\n11488\n11489\n11490\n11491\n11492\n11493\n11494\n11496\n11497\n11498\n11500\n11502\n11503\n11506\n11507\n11513\n11514\n11515\n11516\n11517\n11519\n11520\n11521\n11523\n11526\n11528\n11531\n11535\n11536\n11537\n11538\n11539\n11540\n11541\n11542\n11543\n11548\n11550\n11553\n11555\n11556\n11557\n11559\n11561\n11562\n11565\n11569\n11570\n11571\n11572\n11573\n11574\n11576\n11577\n11578\n11579\n11581\n11583\n11587\n11588\n11589\n11590\n11591\n11592\n11593\n11595\n11596\n11597\n11598\n11599\n11603\n11604\n11605\n11608\n11610\n11611\n11612\n11613\n11614\n11617\n11618\n11619\n11620\n11621\n11622\n11623\n11626\n11627\n11628\n11629\n11630\n11631\n11632\n11633\n11635\n11636\n11637\n11639\n11640\n11642\n11643\n11645\n11646\n11647\n11648\n11649\n11650\n11651\n11652\n11653\n11654\n11655\n11656\n11657\n11658\n11659\n11661\n11663\n11667\n11669\n11670\n11672\n11673\n11674\n11678\n11681\n11682\n11686\n11687\n11688\n11689\n11691\n11692\n11694\n11695\n11696\n11697\n11699\n11700\n11703\n11704\n11707\n11708\n11709\n11710\n11711\n11712\n11714\n11715\n11717\n11720\n11722\n11724\n11725\n11726\n11727\n11728\n11729\n11731\n11732\n11733\n11734\n11735\n11736\n11737\n11739\n11742\n11743\n11746\n11749\n11750\n11752\n11753\n11755\n11756\n11759\n11760\n11762\n11763\n11764\n11768\n11769\n11770\n11772\n11773\n11776\n11777\n11779\n11780\n11781\n11782\n11786\n11787\n11789\n11790\n11792\n11794\n11797\n11798\n11799\n11800\n11801\n11803\n11808\n11809\n11810\n11813\n11814\n11818\n11819\n11820\n11821\n11822\n11826\n11828\n11834\n11835\n11836\n11837\n11838\n11839\n11840\n11841\n11842\n11844\n11845\n11846\n11847\n11848\n11850\n11851\n11855\n11856\n11857\n11861\n11862\n11863\n11864\n11865\n11866\n11867\n11868\n11869\n11870\n11871\n11872\n11874\n11875\n11876\n11877\n11878\n11879\n11880\n11881\n11882\n11883\n11886\n11888\n11889\n11890\n11891\n11893\n11895\n11896\n11897\n11898\n11899\n11901\n11902\n11903\n11904\n11906\n11908\n11909\n11913\n11916\n11917\n11919\n11920\n11921\n11922\n11924\n11926\n11927\n11928\n11929\n11930\n11932\n11936\n11938\n11939\n11940\n11941\n11943\n11946\n11947\n11949\n11950\n11951\n11952\n11953\n11954\n11957\n11959\n11960\n11961\n11963\n11964\n11965\n11967\n11969\n11970\n11971\n11974\n11975\n11978\n11979\n11981\n11983\n11984\n11986\n11989\n11990\n11993\n11994\n11995\n11999\n12001\n12008\n12009\n12010\n12011\n12012\n12013\n12014\n12015\n12017\n12018\n12019\n12020\n12021\n12022\n12023\n12024\n12025\n12026\n12027\n12028\n12030\n12031\n12032\n12033\n12034\n12035\n12036\n12037\n12038\n12039\n12040\n12041\n12043\n12044\n12046\n12047\n12048\n12049\n12050\n12051\n12053\n12054\n12055\n12057\n12060\n12062\n12063\n12064\n12066\n12068\n12070\n12073\n12074\n12080\n12083\n12084\n12087\n12089\n12090\n12091\n12092\n12093\n12094\n12095\n12096\n12098\n12104\n12106\n12108\n12109\n12111\n12120\n12122\n12124\n12130\n12144\n12146\n12147\n12153\n12154\n12156\n12158\n12162\n12169\n12173\n12176\n12177\n12178\n12179\n12180\n12181\n12182\n12183\n12184\n12185\n12186\n12189\n12190\n12191\n12193\n12197\n12199\n12200\n12201\n12202\n12203\n12206\n12207\n12208\n12209\n12210\n12213\n12214\n12216\n12217\n12221\n12224\n12226\n12228\n12229\n12230\n12231\n12238\n12239\n12240\n12241\n12242\n12243\n12244\n12245\n12246\n12247\n12248\n12249\n12250\n12252\n12254\n12255\n12256\n12258\n12259\n12260\n12261\n12262\n12263\n12267\n12271\n12275\n12280\n12281\n12282\n12283\n12284\n12290\n12296\n12297\n12301\n12303\n12305\n12308\n12312\n12314\n12316\n12318\n12321\n12322\n12323\n12324\n12325\n12326\n12327\n12328\n12330\n12331\n12332\n12333\n12334\n12335\n12337\n12339\n12340\n12341\n12345\n12346\n12347\n12348\n12349\n12350\n12351\n12352\n12353\n12354\n12355\n12356\n12358\n12359\n12361\n12362\n12364\n12366\n12368\n12372\n12374\n12375\n12376\n12380\n12381\n12383\n12385\n12386\n12388\n12390\n12392\n12393\n12394\n12395\n12396\n12398\n12399\n12400\n12401\n12403\n12404\n12405\n12406\n12407\n12408\n12410\n12411\n12412\n12413\n12419\n12420\n12421\n12422\n12425\n12429\n12430\n12432\n12433\n12435\n12436\n12437\n12438\n12440\n12442\n12443\n12446\n12452\n12454\n12456\n12462\n12463\n12464\n12466\n12467\n12470\n12473\n12480\n12481\n12482\n12483\n12486\n12490\n12492\n12493\n12494\n12496\n12497\n12500\n12501\n12502\n12504\n12505\n12510\n12511\n12512\n12515\n12518\n12521\n12522\n12524\n12525\n12529\n12532\n12534\n12536\n12538\n12541\n12544\n12545\n12546\n12547\n12549\n12550\n12551\n12553\n12555\n12556\n12558\n12559\n12561\n12563\n12565\n12566\n12567\n12569\n12570\n12571\n12572\n12573\n12574\n12576\n12579\n12580\n12581\n12582\n12584\n12589\n12590\n12592\n12593\n12594\n12596\n12600\n12601\n12603\n12610\n12613\n12614\n12615\n12616\n12618\n12619\n12621\n12622\n12624\n12625\n12626\n12627\n12628\n12629\n12631\n12632\n12633\n12634\n12635\n12639\n12640\n12642\n12643\n12645\n12646\n12647\n12648\n12650\n12652\n12653\n12656\n12658\n12660\n12662\n12664\n12666\n12667\n12670\n12671\n12673\n12674\n12675\n12676\n12677\n12678\n12679\n12680\n12683\n12684\n12685\n12686\n12687\n12688\n12689\n12691\n12692\n12693\n12694\n12696\n12698\n12699\n12700\n12701\n12702\n12703\n12707\n12708\n12709\n12710\n12711\n12712\n12713\n12714\n12716\n12719\n12721\n12722\n12728\n12729\n12730\n12731\n12732\n12733\n12734\n12735\n12736\n12737\n12738\n12739\n12740\n12741\n12742\n12743\n12748\n12750\n12751\n12753\n12754\n12756\n12758\n12759\n12760\n12761\n12766\n12767\n12768\n12769\n12770\n12771\n12772\n12773\n12774\n12775\n12776\n12777\n12779\n12780\n12781\n12782\n12783\n12784\n12785\n12786\n12787\n12789\n12790\n12792\n12793\n12794\n12795\n12797\n12799\n12800\n12801\n12803\n12805\n12806\n12807\n12808\n12809\n12810\n12811\n12815\n12816\n12817\n12818\n12819\n12820\n12821\n12822\n12823\n12824\n12825\n12826\n12827\n12828\n12829\n12830\n12831\n12832\n12833\n12834\n12835\n12836\n12837\n12838\n12839\n12840\n12841\n12842\n12843\n12846\n12847\n12848\n12849\n12855\n12857\n12860\n12863\n12865\n12869\n12870\n12871\n12872\n12873\n12875\n12876\n12877\n12878\n12880\n12881\n12882\n12884\n12886\n12887\n12888\n12889\n12890\n12891\n12893\n12896\n12897\n12898\n12900\n12902\n12903\n12904\n12906\n12907\n12909\n12912\n12913\n12914\n12915\n12919\n12920\n12922\n12923\n12924\n12926\n12927\n12928\n12933\n12934\n12935\n12936\n12937\n12938\n12939\n12940\n12942\n12943\n12944\n12946\n12948\n12949\n12950\n12951\n12955\n12956\n12957\n12961\n12962\n12963\n12965\n12966\n12967\n12968\n12969\n12970\n12971\n12975\n12976\n12977\n12978\n12980\n12981\n12982\n12984\n12985\n12989\n12993\n12994\n12995\n12997\n12999\n13000\n13001\n13002\n13005\n13006\n13009\n13014\n13016\n13020\n13021\n13023\n13025\n13026\n13027\n13028\n13030\n13031\n13032\n13035\n13037\n13039\n13040\n13041\n13044\n13045\n13047\n13048\n13049\n13050\n13051\n13052\n13053\n13055\n13056\n13057\n13058\n13059\n13060\n13061\n13062\n13064\n13066\n13067\n13068\n13069\n13070\n13071\n13073\n13075\n13077\n13078\n13080\n13083\n13084\n13085\n13087\n13088\n13089\n13092\n13093\n13094\n13095\n13097\n13099\n13100\n13102\n13104\n13106\n13107\n13108\n13109\n13113\n13118\n13120\n13125\n13127\n13133\n13134\n13135\n13136\n13139\n13140\n13143\n13144\n13145\n13148\n13149\n13150\n13151\n13152\n13154\n13155\n13156\n13157\n13158\n13162\n13163\n13164\n13168\n13169\n13170\n13171\n13172\n13173\n13174\n13175\n13177\n13179\n13181\n13182\n13186\n13187\n13189\n13193\n13196\n13199\n13202\n13203\n13205\n13206\n13211\n13212\n13213\n13214\n13215\n13221\n13223\n13224\n13225\n13227\n13228\n13229\n13232\n13234\n13235\n13236\n13240\n13245\n13246\n13247\n13248\n13249\n13254\n13255\n13256\n13258\n13259\n13260\n13261\n13263\n13264\n13265\n13266\n13267\n13268\n13269\n13270\n13271\n13276\n13279\n13280\n13283\n13285\n13286\n13297\n13298\n13299\n13300\n13301\n13305\n13306\n13307\n13309\n13310\n13311\n13312\n13313\n13315\n13316\n13317\n13318\n13322\n13324\n13325\n13327\n13328\n13329\n13330\n13331\n13333\n13335\n13336\n13338\n13339\n13340\n13341\n13343\n13345\n13347\n13348\n13349\n13351\n13352\n13353\n13355\n13356\n13357\n13359\n13361\n13363\n13364\n13368\n13371\n13375\n13377\n13378\n13380\n13381\n13384\n13385\n13386\n13387\n13389\n13390\n13395\n13397\n13398\n13399\n13401\n13405\n13406\n13414\n13417\n13423\n13426\n13427\n13429\n13432\n13433\n13437\n13443\n13444\n13447\n13451\n13452\n13453\n13454\n13455\n13466\n13471\n13473\n13475\n13476\n13477\n13478\n13479\n13480\n13481\n13482\n13483\n13484\n13485\n13486\n13488\n13491\n13492\n13493\n13494\n13495\n13497\n13498\n13504\n13506\n13508\n13517\n13518\n13521\n13522\n13523\n13524\n13525\n13530\n13532\n13533\n13534\n13535\n13537\n13539\n13540\n13541\n13542\n13543\n13545\n13546\n13547\n13551\n13553\n13554\n13559\n13565\n13572\n13577\n13581\n13589\n13592\n13594\n13600\n13602\n13603\n13604\n13606\n13608\n13609\n13612\n13613\n13614\n13617\n13619\n13620\n13621\n13622\n13623\n13624\n13625\n13630\n13631\n13633\n13635\n13640\n13641\n13642\n13643\n13644\n13648\n13649\n13650\n13651\n13653\n13654\n13655\n13656\n13657\n13658\n13659\n13660\n13663\n13664\n13665\n13667\n13668\n13670\n13671\n13672\n13673\n13674\n13676\n13677\n13678\n13682\n13684\n13686\n13688\n13689\n13690\n13692\n13694\n13695\n13697\n13698\n13699\n13700\n13701\n13702\n13705\n13708\n13710\n13711\n13712\n13716\n13720\n13723\n13724\n13725\n13726\n13727\n13731\n13732\n13733\n13734\n13735\n13736\n13737\n13739\n13741\n13742\n13743\n13744\n13745\n13746\n13747\n13748\n13750\n13751\n13754\n13755\n13756\n13758\n13760\n13764\n13765\n13766\n13767\n13769\n13770\n13771\n13772\n13773\n13774\n13775\n13776\n13777\n13779\n13781\n13782\n13785\n13786\n13787\n13789\n13790\n13791\n13792\n13794\n13798\n13799\n13800\n13801\n13802\n13803\n13804\n13805\n13807\n13808\n13810\n13811\n13812\n13813\n13814\n13815\n13816\n13817\n13818\n13819\n13820\n13821\n13822\n13823\n13824\n13825\n13826\n13827\n13830\n13831\n13832\n13833\n13834\n13836\n13837\n13838\n13839\n13840\n13841\n13842\n13843\n13844\n13845\n13846\n13848\n13849\n13850\n13851\n13852\n13853\n13854\n13855\n13857\n13858\n13859\n13860\n13861\n13862\n13863\n13864\n13865\n13866\n13867\n13868\n13871\n13872\n13873\n13874\n13875\n13876\n13877\n13878\n13879\n13881\n13882\n13883\n13884\n13885\n13886\n13887\n13888\n13889\n13890\n13893\n13894\n13895\n13896\n13897\n13898\n13899\n13900\n13901\n13902\n13903\n13904\n13905\n13906\n13907\n13908\n13909\n13911\n13915\n13919\n13920\n13922\n13923\n13924\n13928\n13929\n13930\n13932\n13935\n13939\n13940\n13941\n13942\n13943\n13944\n13946\n13948\n13949\n13952\n13953\n13955\n13956\n13957\n13958\n13959\n13961\n13962\n13963\n13964\n13965\n13969\n13970\n13971\n13973\n13975\n13976\n13977\n13981\n13982\n13983\n13984\n13985\n13986\n13987\n13989\n13991\n13992\n13993\n13995\n13996\n13997\n13999\n14001\n14002\n14003\n14005\n14006\n14008\n14009\n14010\n14011\n14012\n14015\n14016\n14019\n14020\n14021\n14024\n14025\n14027\n14028\n14030\n14031\n14032\n14034\n14035\n14037\n14038\n14039\n14041\n14042\n14043\n14045\n14047\n14048\n14049\n14050\n14051\n14054\n14055\n14057\n14058\n14063\n14064\n14065\n14068\n14072\n14078\n14081\n14082\n14083\n14084\n14087\n14089\n14090\n14094\n14096\n14097\n14098\n14099\n14100\n14101\n14102\n14103\n14105\n14107\n14108\n14110\n14111\n14114\n14115\n14116\n14121\n14125\n14126\n14128\n14130\n14131\n14134\n14135\n14136\n14139\n14143\n14147\n14149\n14150\n14152\n14153\n14154\n14155\n14158\n14161\n14163\n14164\n14167\n14170\n14171\n14175\n14176\n14177\n14178\n14179\n14182\n14183\n14187\n14190\n14194\n14195\n14199\n14201\n14204\n14212\n14215\n14216\n14218\n14219\n14220\n14223\n14224\n14225\n14226\n14228\n14236\n14237\n14238\n14239\n14240\n14242\n14244\n14245\n14246\n14247\n14248\n14249\n14251\n14252\n14253\n14254\n14255\n14256\n14258\n14259\n14261\n14263\n14264\n14265\n14266\n14267\n14269\n14271\n14273\n14275\n14276\n14280\n14281\n14283\n14285\n14286\n14287\n14289\n14290\n14292\n14293\n14294\n14297\n14298\n14300\n14302\n14303\n14304\n14305\n14306\n14308\n14309\n14310\n14312\n14313\n14318\n14322\n14323\n14325\n14326\n14328\n14332\n14334\n14335\n14342\n14343\n14345\n14347\n14349\n14350\n14351\n14352\n14353\n14354\n14359\n14361\n14362\n14363\n14364\n14365\n14366\n14368\n14371\n14372\n14373\n14374\n14376\n14377\n14378\n14379\n14380\n14382\n14383\n14384\n14385\n14387\n14388\n14393\n14394\n14396\n14399\n14400\n14401\n14402\n14403\n14404\n14405\n14406\n14408\n14409\n14410\n14411\n14412\n14413\n14414\n14415\n14416\n14417\n14418\n14420\n14421\n14422\n14423\n14424\n14427\n14430\n14431\n14435\n14437\n14440\n14441\n14445\n14446\n14447\n14451\n14454\n14459\n14460\n14462\n14463\n14465\n14473\n14478\n14479\n14480\n14481\n14483\n14484\n14485\n14486\n14487\n14488\n14489\n14492\n14494\n14497\n14500\n14502\n14505\n14506\n14507\n14508\n14510\n14512\n14513\n14515\n14516\n14518\n14521\n14524\n14526\n14530\n14534\n14541\n14542\n14545\n14558\n14566\n14578\n14579\n14580\n14581\n14585\n14589\n14592\n14593\n14595\n14599\n14601\n14602\n14603\n14607\n14610\n14611\n14613\n14615\n14616\n14617\n14620\n14621\n14622\n14623\n14624\n14625\n14626\n14628\n14632\n14633\n14634\n14639\n14640\n14647\n14649\n14651\n14653\n14655\n14656\n14659\n14663\n14664\n14665\n14666\n14668\n14670\n14673\n14676\n14677\n14678\n14679\n14680\n14681\n14682\n14683\n14684\n14686\n14690\n14692\n14696\n14697\n14698\n14699\n14702\n14703\n14705\n14707\n14708\n14709\n14710\n14713\n14714\n14715\n14716\n14717\n14719\n14721\n14722\n14723\n14724\n14725\n14729\n14730\n14734\n14736\n14740\n14743\n14744\n14746\n14748\n14749\n14753\n14759\n14760\n14765\n14766\n14767\n14768\n14769\n14771\n14773\n14774\n14775\n14777\n14779\n14783\n14787\n14790\n14792\n14793\n14795\n14797\n14802\n14804\n14806\n14807\n14809\n14813\n14814\n14818\n14821\n14823\n14834\n14841\n14842\n14851\n14852\n14854\n14855\n14859\n14863\n14864\n14865\n14867\n14868\n14869\n14877\n14880\n14884\n14887\n14892\n14893\n14900\n14901\n14902\n14906\n14907\n14912\n14916\n14919\n14922\n14924\n14927\n14928\n14929\n14930\n14936\n14937\n14938\n14941\n14942\n14943\n14947\n14952\n14953\n14957\n14958\n14962\n14963\n14964\n14965\n14966\n14967\n14968\n14973\n14974\n14975\n14977\n14978\n14980\n14981\n14983\n14984\n14986\n14989\n14990\n14992\n14995\n14997\n14998\n15001\n15006\n15008\n15009\n15013\n15014\n15016\n15018\n15021\n15024\n15028\n15029\n15030\n15032\n15033\n15048\n15061\n15062\n15063\n15065\n15067\n15068\n15070\n15071\n15072\n15073\n15077\n15079\n15080\n15085\n15087\n15093\n15096\n15097\n15099\n15101\n15106\n15107\n15108\n15110\n15111\n15112\n15114\n15116\n15119\n15121\n15122\n15126\n15127\n15128\n15129\n15133\n15136\n15138\n15139\n15140\n15143\n15144\n15146\n15149\n15159\n15165\n15166\n15167\n15168\n15169\n15170\n15171\n15173\n15174\n15175\n15179\n15182\n15186\n15187\n15188\n15189\n15191\n15192\n15195\n15197\n15198\n15202\n15203\n15204\n15206\n15207\n15209\n15210\n15211\n15212\n15215\n15218\n15220\n15223\n15230\n15234\n15236\n15237\n15239\n15240\n15241\n15242\n15244\n15249\n15250\n15255\n15257\n15260\n15263\n15272\n15281\n15291\n15299\n15300\n15303\n15304\n15306\n15307\n15309\n15310\n15318\n15320\n15321\n15322\n15338\n15340\n15341\n15342\n15343\n15344\n15345\n15347\n15350\n15353\n15366\n15367\n15370\n15372\n15374\n15377\n15383\n15394\n15396\n15399\n15403\n15405\n15408\n15410\n15416\n15421\n15422\n15423\n15430\n15432\n15434\n15435\n15439\n15440\n15442\n15444\n15446\n15448\n15449\n15452\n15456\n15458\n15459\n15466\n15468\n15476\n15477\n15478\n15480\n15489\n15493\n15506\n15519\n15520\n15521\n15525\n15534\n15535\n15537\n15538\n15540\n15542\n15543\n15544\n15545\n15549\n15550\n15552\n15556\n15557\n15561\n15563\n15564\n15566\n15571\n15572\n15573\n15576\n15577\n15578\n15579\n15581\n15582\n15588\n15589\n15597\n15599\n15600\n15601\n15602\n15608\n15612\n15617\n15623\n15624\n15637\n15644\n15648\n15652\n15653\n15658\n15659\n15666\n15667\n15672\n15676\n15679\n15682\n15683\n15685\n15688\n15689\n15694\n15700\n15703\n15704\n15705\n15707\n15709\n15711\n15714\n15715\n15716\n15717\n15718\n15719\n15720\n15721\n15722\n15723\n15724\n15726\n15727\n15729\n15730\n15735\n15737\n15742\n15745\n15747\n15748\n15755\n15759\n15762\n15764\n15766\n15773\n15774\n15775\n15778\n15781\n15785\n15786\n15797\n15800\n15802\n15803\n15807\n15812\n15821\n15824\n15827\n15828\n15829\n15831\n15836\n15837\n15840\n15841\n15845\n15848\n15850\n15855\n15859\n15863\n15865\n15867\n15870\n15872\n15878\n15879\n15881\n15882\n15883\n15887\n15891\n15892\n15898\n15899\n15910\n15912\n15917\n15920\n15921\n15922\n15929\n15934\n15936\n15937\n15939\n15940\n15943\n15944\n15945\n15947\n15957\n15958\n15959\n15960\n15962\n15975\n15976\n15981\n15982\n15989\n15992\n15993\n15994\n15996\n16000\n16001\n16002\n16003\n16005\n16006\n16009\n16012\n16013\n16014\n16017\n16018\n16026\n16027\n16037\n16039\n16042\n16044\n16049\n16054\n16058\n16059\n16060\n16069\n16070\n16072\n16077\n16080\n16083\n16090\n16091\n16092\n16094\n16095\n16098\n16099\n16101\n16105\n16106\n16107\n16114\n16115\n16117\n16119\n16127\n16131\n16138\n16139\n16141\n16142\n16144\n16145\n16147\n16148\n16150\n16153\n16154\n16156\n16157\n16161\n16163\n16167\n16170\n16172\n16175\n16176\n16177\n16185\n16190\n16191\n16196\n16197\n16198\n16199\n16212\n16213\n16215\n16217\n16223\n16225\n16234\n16239\n16241\n16242\n16249\n16254\n16256\n16257\n16260\n16262\n16269\n16272\n16278\n16279\n16282\n16286\n16287\n16288\n16289\n16290\n16294\n16300\n16305\n16310\n16311\n16312\n16313\n16315\n16316\n16317\n16320\n16326\n16332\n16333\n16339\n16340\n16341\n16345\n16346\n16347\n16355\n16358\n16361\n16362\n16371\n16373\n16374\n16377\n16378\n16382\n16386\n16387\n16388\n16389\n16390\n16391\n16395\n16398\n16403\n16412\n16414\n16416\n16419\n16421\n16422\n16423\n16424\n16430\n16433\n16434\n16435\n16442\n16445\n16453\n16454\n16457\n16458\n16461\n16472\n16474\n16475\n16476\n16479\n16492\n16496\n16501\n16502\n16506\n16507\n16510\n16511\n16515\n16520\n16524\n16527\n16528\n16538\n16540\n16543\n16545\n16549\n16550\n16553\n16558\n16566\n16574\n16582\n16583\n16585\n16586\n16589\n16590\n16591\n16597\n16601\n16603\n16608\n16614\n16616\n16617\n16620\n16624\n16628\n16633\n16646\n16647\n16649\n16651\n16652\n16653\n16657\n16658\n16663\n16666\n16667\n16668\n16670\n16678\n16687\n16691\n16692\n16693\n16702\n16703\n16706\n16711\n16724\n16725\n16727\n16728\n16733\n16734\n16737\n16738\n16742\n16743\n16748\n16750\n16753\n16757\n16758\n16761\n16762\n16763\n16772\n16775\n16777\n16781\n16785\n16787\n16788\n16789\n16790\n16798\n16799\n16801\n16803\n16810\n16811\n16815\n16816\n16823\n16827\n16831\n16832\n16834\n16836\n16839\n16842\n16843\n16846\n16847\n16848\n16849\n16853\n16863\n16871\n16872\n16876\n16877\n16878\n16879\n16880\n16883\n16885\n16888\n16890\n16892\n16896\n16898\n16900\n16903\n16905\n16906\n16909\n16914\n16915\n16918\n16926\n16929\n16938\n16946\n16955\n16960\n16961\n16963\n16965\n16968\n16970\n16971\n16973\n16975\n16979\n16980\n16986\n16987\n16991\n16992\n16996\n17001\n17002\n17005\n17006\n17008\n17011\n17015\n17019\n17020\n17024\n17027\n17030\n17035\n17036\n17037\n17038\n17050\n17051\n17055\n17061\n17079\n17085\n17092\n17105\n17108\n17109\n17112\n17116\n17119\n17124\n17126\n17131\n17133\n17139\n17142\n17144\n17153\n17154\n17156\n17157\n17163\n17165\n17167\n17169\n17176\n17178\n17181\n17184\n17186\n17190\n17191\n17201\n17206\n17208\n17209\n17215\n17226\n17229\n17246\n17247\n17251\n17252\n17253\n17262\n17266\n17267\n17270\n17272\n17273\n17278\n17284\n17288\n17295\n17296\n17297\n17299\n17303\n17304\n17307\n17308\n17310\n17311\n17312\n17314\n17315\n17317\n17324\n17327\n17328\n17336\n17337\n17338\n17339\n17340\n17343\n17344\n17346\n17348\n17349\n17351\n17352\n17353\n17354\n17357\n17358\n17359\n17360\n17361\n17362\n17364\n17365\n17367\n17368\n17371\n17372\n17373\n17374\n17375\n17377\n17378\n17380\n17382\n17383\n17385\n17386\n17387\n17390\n17391\n17392\n17393\n17394\n17398\n17399\n17400\n17401\n17402\n17403\n17404\n17405\n17406\n17407\n17408\n17409\n17410\n17411\n17412\n17413\n17414\n17415\n17417\n17418\n17420\n17421\n17422\n17423\n17424\n17425\n17427\n17428\n17429\n17430\n17433\n17438\n17440\n17442\n17445\n17446\n17447\n17448\n17449\n17450\n17453\n17454\n17455\n17456\n17457\n17458\n17461\n17462\n17463\n17464\n17466\n17468\n17469\n17480\n17484\n17490\n17491\n17496\n17497\n17501\n17502\n17506\n17507\n17508\n17510\n17514\n17515\n17516\n17518\n17519\n17520\n17521\n17524\n17527\n17532\n17537\n17538\n17549\n17558\n17563\n17564\n17565\n17566\n17567\n17568\n17569\n17572\n17576\n17577\n17582\n17584\n17586\n17589\n17592\n17593\n17594\n17596\n17597\n17599\n17601\n17603\n17604\n17608\n17609\n17610\n17611\n17613\n17616\n17617\n17619\n17623\n17624\n17625\n17628\n17631\n17632\n17633\n17634\n17635\n17639\n17641\n17642\n17644\n17646\n17648\n17649\n17650\n17652\n17653\n17655\n17657\n17658\n17659\n17660\n17661\n17662\n17664\n17670\n17674\n17676\n17677\n17678\n17679\n17682\n17686\n17687\n17690\n17709\n17719\n17720\n17721\n17723\n17724\n17725\n17727\n17728\n17731\n17732\n17733\n17734\n17735\n17736\n17737\n17738\n17740\n17741\n17742\n17743\n17746\n17760\n17761\n17762\n17763\n17768\n17769\n17772\n17773\n17778\n17780\n17781\n17782\n17784\n17785\n17787\n17790\n17791\n17792\n17794\n17796\n17797\n17801\n17802\n17804\n17809\n17810\n17811\n17812\n17813\n17815\n17816\n17818\n17819\n17824\n17833\n17837\n17839\n17840\n17843\n17844\n17845\n17848\n17850\n17851\n17853\n17854\n17855\n17856\n17857\n17858\n17859\n17860\n17861\n17862\n17864\n17866\n17867\n17868\n17869\n17870\n17871\n17872\n17874\n17875\n17876\n17877\n17878\n17879\n17880\n17883\n17884\n17886\n17887\n17888\n17889\n17893\n17895\n17898\n17900\n17903\n17912\n17916\n17917\n17923\n17927\n17932\n17934\n17936\n17937\n17938\n17944\n17945\n17946\n17947\n17948\n17951\n17952\n17953\n17956\n17957\n17958\n17959\n17961\n17962\n17963\n17964\n17968\n17969\n17970\n17971\n17973\n17974\n17977\n17978\n17981\n17982\n17983\n17985\n17988\n17989\n17990\n17997\n17998\n18003\n18005\n18006\n18010\n18015\n18018\n18019\n18020\n18021\n18022\n18023\n18024\n18025\n18026\n18028\n18030\n18033\n18034\n18035\n18036\n18037\n18039\n18043\n18048\n18049\n18055\n18057\n18059\n18061\n18062\n18064\n18065\n18066\n18068\n18069\n18070\n18071\n18075\n18076\n18079\n18083\n18084\n18086\n18087\n18088\n18089\n18090\n18091\n18092\n18094\n18095\n18097\n18098\n18101\n18102\n18104\n18105\n18106\n18108\n18109\n18111\n18112\n18113\n18114\n18115\n18118\n18119\n18121\n18123\n18124\n18128\n18134\n18135\n18136\n18138\n18139\n18140\n18142\n18143\n18144\n18146\n18148\n18149\n18150\n18158\n18159\n18165\n18166\n18171\n18172\n18175\n18176\n18202\n18203\n18204\n18205\n18207\n18211\n18214\n18215\n18216\n18217\n18219\n18220\n18221\n18222\n18225\n18226\n18228\n18229\n18231\n18233\n18234\n18235\n18237\n18238\n18239\n18241\n18244\n18245\n18247\n18248\n18249\n18250\n18252\n18255\n18257\n18258\n18259\n18262\n18263\n18267\n18268\n18269\n18271\n18272\n18273\n18274\n18275\n18278\n18281\n18282\n18284\n18285\n18287\n18288\n18294\n18295\n18296\n18297\n18301\n18302\n18303\n18304\n18305\n18306\n18307\n18309\n18320\n18323\n18324\n18325\n18326\n18329\n18330\n18331\n18332\n18333\n18334\n18336\n18341\n18342\n18347\n18349\n18353\n18355\n18356\n18357\n18358\n18359\n18362\n18366\n18371\n18379\n18384\n18386\n18389\n18390\n18392\n18394\n18396\n18397\n18398\n18399\n18401\n18402\n18403\n18405\n18410\n18411\n18417\n18420\n18421\n18422\n18424\n18439\n18440\n18441\n18442\n18443\n18444\n18445\n18446\n18449\n18450\n18451\n18453\n18454\n18457\n18458\n18459\n18460\n18461\n18464\n18467\n18468\n18471\n18476\n18477\n18478\n18480\n18484\n18485\n18486\n18487\n18489\n18490\n18492\n18493\n18494\n18495\n18496\n18497\n18499\n18504\n18505\n18506\n18507\n18508\n18509\n18510\n18512\n18513\n18514\n18515\n18516\n18520\n18521\n18522\n18523\n18527\n18529\n18530\n18531\n18533\n18534\n18535\n18537\n18538\n18539\n18540\n18541\n18544\n18545\n18547\n18548\n18549\n18551\n18552\n18553\n18554\n18556\n18557\n18558\n18560\n18563\n18568\n18571\n18572\n18573\n18574\n18575\n18576\n18579\n18581\n18583\n18584\n18585\n18586\n18587\n18588\n18589\n18590\n18591\n18592\n18593\n18595\n18596\n18599\n18601\n18602\n18604\n18605\n18608\n18609\n18610\n18611\n18613\n18614\n18615\n18616\n18617\n18618\n18619\n18620\n18621\n18622\n18629\n18630\n18631\n18632\n18634\n18635\n18636\n18639\n18640\n18644\n18645\n18649\n18655\n18657\n18658\n18660\n18662\n18663\n18667\n18668\n18669\n18670\n18678\n18679\n18682\n18683\n18685\n18689\n18692\n18694\n18695\n18696\n18701\n18703\n18713\n18721\n18723\n18726\n18728\n18729\n18733\n18736\n18738\n18739\n18742\n18751\n18752\n18753\n18757\n18761\n18764\n18765\n18773\n18778\n18779\n18780\n18782\n18789\n18790\n18793\n18794\n18796\n18799\n18800\n18803\n18806\n18807\n18814\n18815\n18820\n18823\n18824\n18826\n18827\n18832\n18833\n18834\n18835\n18839\n18840\n18841\n18845\n18846\n18847\n18848\n18850\n18851\n18852\n18853\n18855\n18856\n18857\n18858\n18861\n18862\n18863\n18864\n18865\n18866\n18867\n18868\n18869\n18870\n18871\n18872\n18883\n18884\n18886\n18888\n18898\n18900\n18904\n18906\n18909\n18911\n18912\n18913\n18919\n18922\n18925\n18926\n18927\n18931\n18935\n18937\n18940\n18941\n18943\n18944\n18946\n18947\n18948\n18951\n18952\n18953\n18955\n18960\n18977\n18978\n18979\n18982\n18984\n18986\n18987\n18994\n19010\n19011\n19014\n19019\n19025\n19027\n19029\n19031\n19032\n19033\n19037\n19050\n19054\n19067\n19068\n19071\n19073\n19075\n19076\n19077\n19078\n19080\n19081\n19084\n19085\n19086\n19090\n19091\n19094\n19098\n19100\n19102\n19103\n19104\n19106\n19109\n19110\n19114\n19115\n19118\n19119\n19120\n19122\n19123\n19125\n19127\n19128\n19131\n19132\n19133\n19135\n19136\n19137\n19138\n19140\n19141\n19142\n19143\n19144\n19145\n19148\n19149\n19150\n19152\n19153\n19154\n19155\n19156\n19157\n19158\n19159\n19161\n19163\n19164\n19167\n19170\n19173\n19174\n19177\n19178\n19179\n19181\n19183\n19185\n19188\n19191\n19192\n19193\n19195\n19196\n19197\n19199\n19200\n19202\n19211\n19212\n19213\n19216\n19217\n19218\n19219\n19220\n19221\n19224\n19225\n19226\n19227\n19228\n19230\n19231\n19232\n19234\n19236\n19237\n19238\n19242\n19250\n19252\n19255\n19256\n19257\n19258\n19259\n19260\n19262\n19263\n19264\n19265\n19268\n19269\n19273\n19274\n19275\n19282\n19287\n19288\n19290\n19291\n19293\n19294\n19295\n19298\n19300\n19302\n19303\n19304\n19305\n19308\n19309\n19314\n19315\n19316\n19318\n19319\n19322\n19323\n19328\n19329\n19330\n19331\n19334\n19335\n19337\n19340\n19341\n19342\n19343\n19345\n19346\n19347\n19348\n19349\n19350\n19352\n19354\n19358\n19361\n19362\n19363\n19364\n19365\n19367\n19369\n19371\n19372\n19375\n19377\n19379\n19380\n19383\n19384\n19386\n19400\n19401\n19402\n19403\n19405\n19412\n19415\n19417\n19419\n19421\n19432\n19433\n19434\n19435\n19438\n19439\n19440\n19444\n19445\n19446\n19448\n19449\n19450\n19451\n19454\n19455\n19457\n19459\n19469\n19470\n19471\n19475\n19476\n19479\n19481\n19482\n19483\n19484\n19488\n19490\n19492\n19493\n19494\n19495\n19496\n19499\n19500\n19501\n19504\n19506\n19507\n19509\n19511\n19512\n19514\n19515\n19516\n19517\n19518\n19519\n19520\n19522\n19523\n19524\n19525\n19526\n19528\n19530\n19536\n19537\n19539\n19540\n19541\n19544\n19545\n19546\n19547\n19548\n19549\n19551\n19553\n19554\n19555\n19561\n19564\n19565\n19566\n19573\n19575\n19576\n19580\n19581\n19582\n19585\n19586\n19588\n19590\n19595\n19604\n19605\n19606\n19609\n19611\n19612\n19613\n19614\n19616\n19620\n19622\n19625\n19626\n19627\n19628\n19630\n19631\n19632\n19633\n19636\n19638\n19639\n19640\n19641\n19642\n19646\n19647\n19653\n19654\n19661\n19665\n19666\n19667\n19668\n19670\n19671\n19673\n19674\n19675\n19676\n19677\n19679\n19680\n19681\n19682\n19685\n19686\n19687\n19688\n19690\n19691\n19692\n19694\n19695\n19696\n19697\n19703\n19705\n19707\n19708\n19712\n19715\n19716\n19718\n19719\n19720\n19722\n19726\n19730\n19731\n19732\n19735\n19736\n19739\n19743\n19744\n19754\n19755\n19756\n19757\n19761\n19762\n19765\n19767\n19768\n19771\n19772\n19775\n19780\n19790\n19793\n19795\n19796\n19799\n19800\n19803\n19811\n19815\n19817\n19829\n19831\n19833\n19835\n19836\n19837\n19838\n19839\n19843\n19850\n19856\n19859\n19860\n19861\n19862\n19863\n19864\n19865\n19869\n19870\n19871\n19872\n19877\n19880\n19885\n19887\n19892\n19893\n19894\n19895\n19900\n19903\n19905\n19906\n19907\n19908\n19912\n19914\n19915\n19916\n19920\n19922\n19924\n19925\n19926\n19927\n19928\n19931\n19933\n19936\n19941\n19943\n19944\n19945\n19950\n19951\n19954\n19955\n19962\n19965\n19966\n19967\n19968\n19969\n19970\n19971\n19972\n19973\n19974\n19976\n19983\n19986\n19988\n19990\n19995\n19996\n19997\n19999\n20006\n20008\n20016\n20017\n20018\n20019\n20022\n20023\n20024\n20027\n20028\n20030\n20035\n20040\n20041\n20042\n20043\n20045\n20047\n20048\n20051\n20052\n20053\n20054\n20055\n20056\n20059\n20060\n20061\n20062\n20064\n20066\n20068\n20069\n20072\n20079\n20080\n20082\n20084\n20085\n20090\n20091\n20095\n20096\n20098\n20100\n20101\n20102\n20106\n20108\n20109\n20113\n20114\n20115\n20116\n20120\n20121\n20124\n20125\n20126\n20127\n20138\n20139\n20141\n20143\n20145\n20146\n20147\n20151\n20152\n20153\n20160\n20161\n20163\n20169\n20170\n20172\n20176\n20183\n20185\n20190\n20191\n20193\n20196\n20197\n20205\n20207\n20209\n20212\n20213\n20214\n20216\n20217\n20218\n20219\n20220\n20221\n20222\n20223\n20224\n20226\n20228\n20229\n20230\n20231\n20234\n20237\n20238\n20246\n20248\n20249\n20255\n20260\n20263\n20264\n20265\n20267\n20269\n20276\n20278\n20279\n20280\n20281\n20282\n20284\n20285\n20286\n20287\n20288\n20289\n20293\n20294\n20295\n20298\n20301\n20303\n20309\n20310\n20315\n20316\n20317\n20323\n20324\n20325\n20326\n20328\n20331\n20333\n20335\n20345\n20346\n20347\n20353\n20354\n20355\n20356\n20358\n20360\n20361\n20362\n20363\n20364\n20365\n20366\n20371\n20372\n20374\n20377\n20379\n20383\n20384\n20388\n20390\n20392\n20395\n20396\n20397\n20398\n20403\n20404\n20405\n20406\n20407\n20408\n20411\n20412\n20413\n20414\n20415\n20416\n20418\n20420\n20422\n20423\n20425\n20435\n20439\n20442\n20444\n20447\n20450\n20454\n20455\n20458\n20460\n20469\n20470\n20471\n20473\n20474\n20476\n20478\n20479\n20483\n20487\n20490\n20491\n20492\n20493\n20496\n20497\n20498\n20502\n20503\n20504\n20509\n20510\n20513\n20514\n20515\n20516\n20517\n20518\n20521\n20522\n20524\n20526\n20527\n20528\n20532\n20536\n20540\n20541\n20544\n20547\n20548\n20550\n20551\n20552\n20553\n20555\n20556\n20560\n20561\n20564\n20566\n20567\n20570\n20573\n20575\n20576\n20579\n20581\n20584\n20587\n20588\n20589\n20590\n20592\n20593\n20594\n20595\n20596\n20599\n20601\n20602\n20604\n20605\n20607\n20611\n20612\n20615\n20616\n20617\n20619\n20620\n20622\n20624\n20626\n20627\n20628\n20629\n20633\n20637\n20639\n20640\n20641\n20642\n20649\n20650\n20652\n20654\n20658\n20659\n20660\n20661\n20662\n20663\n20664\n20665\n20666\n20667\n20668\n20670\n20677\n20678\n20680\n20682\n20685\n20686\n20687\n20688\n20690\n20691\n20692\n20693\n20694\n20699\n20700\n20702\n20703\n20704\n20705\n20708\n20709\n20710\n20711\n20712\n20713\n20714\n20715\n20717\n20718\n20722\n20723\n20725\n20726\n20730\n20731\n20732\n20736\n20740\n20747\n20752\n20753\n20754\n20755\n20761\n20762\n20765\n20766\n20775\n20781\n20784\n20785\n20786\n20787\n20788\n20789\n20790\n20791\n20792\n20795\n20796\n20798\n20804\n20806\n20807\n20810\n20812\n20817\n20821\n20822\n20823\n20828\n20829\n20831\n20834\n20835\n20837\n20838\n20841\n20845\n20846\n20848\n20849\n20850\n20851\n20852\n20853\n20857\n20858\n20859\n20861\n20864\n20868\n20871\n20874\n20875\n20878\n20879\n20880\n20882\n20884\n20888\n20893\n20895\n20896\n20898\n20900\n20901\n20902\n20904\n20906\n20909\n20911\n20915\n20917\n20918\n20919\n20924\n20925\n20934\n20935\n20936\n20938\n20939\n20940\n20941\n20944\n20945\n20947\n20948\n20953\n20954\n20957\n20961\n20966\n20977\n20978\n20979\n20980\n20983\n20986\n20987\n20996\n20999\n21000\n21005\n21007\n21010\n21012\n21015\n21022\n21023\n21024\n21025\n21026\n21029\n21033\n21035\n21036\n21037\n21038\n21040\n21041\n21042\n21043\n21044\n21045\n21046\n21047\n21048\n21049\n21051\n21052\n21053\n21054\n21055\n21056\n21057\n21061\n21062\n21063\n21064\n21065\n21066\n21067\n21070\n21074\n21076\n21081\n21082\n21084\n21085\n21086\n21090\n21092\n21098\n21099\n21110\n21111\n21113\n21117\n21119\n21120\n21122\n21123\n21126\n21131\n21134\n21136\n21143\n21144\n21145\n21146\n21147\n21149\n21151\n21154\n21156\n21157\n21161\n21164\n21167\n21168\n21169\n21170\n21172\n21177\n21178\n21179\n21184\n21187\n21188\n21194\n21195\n21196\n21197\n21198\n21199\n21200\n21202\n21217\n21219\n21222\n21227\n21230\n21236\n21250\n21255\n21266\n21267\n21269\n21277\n21279\n21280\n21282\n21284\n21285\n21295\n21301\n21303\n21305\n21309\n21310\n21311\n21313\n21314\n21315\n21316\n21317\n21319\n21325\n21334\n21335\n21337\n21341\n21343\n21347\n21350\n21354\n21355\n21356\n21359\n21361\n21367\n21368\n21372\n21374\n21375\n21376\n21380\n21384\n21386\n21387\n21395\n21396\n21400\n21405\n21406\n21409\n21412\n21413\n21417\n21423\n21426\n21427\n21429\n21430\n21431\n21436\n21439\n21449\n21457\n21462\n21465\n21467\n21474\n21477\n21482\n21483\n21484\n21487\n21492\n21493\n21495\n21499\n21500\n21505\n21509\n21513\n21514\n21515\n21517\n21520\n21522\n21526\n21527\n21530\n21531\n21532\n21535\n21539\n21543\n21545\n21546\n21547\n21548\n21552\n21558\n21561\n21567\n21571\n21574\n21576\n21577\n21578\n21579\n21583\n21587\n21588\n21589\n21596\n21599\n21602\n21604\n21609\n21613\n21616\n21618\n21621\n21623\n21625\n21633\n21635\n21636\n21639\n21642\n21643\n21646\n21647\n21648\n21657\n21659\n21660\n21661\n21663\n21664\n21666\n21667\n21670\n21671\n21673\n21676\n21677\n21678\n21684\n21686\n21687\n21688\n21690\n21700\n21709\n21710\n21711\n21714\n21715\n21719\n21720\n21721\n21722\n21725\n21726\n21727\n21732\n21737\n21738\n21739\n21741\n21742\n21745\n21746\n21747\n21752\n21755\n21756\n21758\n21759\n21760\n21761\n21763\n21770\n21771\n21772\n21775\n21776\n21779\n21780\n21782\n21784\n21789\n21790\n21793\n21794\n21798\n21799\n21801\n21802\n21804\n21809\n21813\n21814\n21815\n21816\n21817\n21821\n21824\n21825\n21826\n21827\n21828\n21829\n21830\n21832\n21834\n21836\n21837\n21839\n21840\n"
  },
  {
    "path": "timm/data/_info/imagenet_a_indices.txt",
    "content": "6\n11\n13\n15\n17\n22\n23\n27\n30\n37\n39\n42\n47\n50\n57\n70\n71\n76\n79\n89\n90\n94\n96\n97\n99\n105\n107\n108\n110\n113\n124\n125\n130\n132\n143\n144\n150\n151\n207\n234\n235\n254\n277\n283\n287\n291\n295\n298\n301\n306\n307\n308\n309\n310\n311\n313\n314\n315\n317\n319\n323\n324\n326\n327\n330\n334\n335\n336\n347\n361\n363\n372\n378\n386\n397\n400\n401\n402\n404\n407\n411\n416\n417\n420\n425\n428\n430\n437\n438\n445\n456\n457\n461\n462\n470\n472\n483\n486\n488\n492\n496\n514\n516\n528\n530\n539\n542\n543\n549\n552\n557\n561\n562\n569\n572\n573\n575\n579\n589\n606\n607\n609\n614\n626\n627\n640\n641\n642\n643\n658\n668\n677\n682\n684\n687\n701\n704\n719\n736\n746\n749\n752\n758\n763\n765\n768\n773\n774\n776\n779\n780\n786\n792\n797\n802\n803\n804\n813\n815\n820\n823\n831\n833\n835\n839\n845\n847\n850\n859\n862\n870\n879\n880\n888\n890\n897\n900\n907\n913\n924\n932\n933\n934\n937\n943\n945\n947\n951\n954\n956\n957\n959\n971\n972\n980\n981\n984\n986\n987\n988\n"
  },
  {
    "path": "timm/data/_info/imagenet_a_synsets.txt",
    "content": "n01498041\nn01531178\nn01534433\nn01558993\nn01580077\nn01614925\nn01616318\nn01631663\nn01641577\nn01669191\nn01677366\nn01687978\nn01694178\nn01698640\nn01735189\nn01770081\nn01770393\nn01774750\nn01784675\nn01819313\nn01820546\nn01833805\nn01843383\nn01847000\nn01855672\nn01882714\nn01910747\nn01914609\nn01924916\nn01944390\nn01985128\nn01986214\nn02007558\nn02009912\nn02037110\nn02051845\nn02077923\nn02085620\nn02099601\nn02106550\nn02106662\nn02110958\nn02119022\nn02123394\nn02127052\nn02129165\nn02133161\nn02137549\nn02165456\nn02174001\nn02177972\nn02190166\nn02206856\nn02219486\nn02226429\nn02231487\nn02233338\nn02236044\nn02259212\nn02268443\nn02279972\nn02280649\nn02281787\nn02317335\nn02325366\nn02346627\nn02356798\nn02361337\nn02410509\nn02445715\nn02454379\nn02486410\nn02492035\nn02504458\nn02655020\nn02669723\nn02672831\nn02676566\nn02690373\nn02701002\nn02730930\nn02777292\nn02782093\nn02787622\nn02793495\nn02797295\nn02802426\nn02814860\nn02815834\nn02837789\nn02879718\nn02883205\nn02895154\nn02906734\nn02948072\nn02951358\nn02980441\nn02992211\nn02999410\nn03014705\nn03026506\nn03124043\nn03125729\nn03187595\nn03196217\nn03223299\nn03250847\nn03255030\nn03291819\nn03325584\nn03355925\nn03384352\nn03388043\nn03417042\nn03443371\nn03444034\nn03445924\nn03452741\nn03483316\nn03584829\nn03590841\nn03594945\nn03617480\nn03666591\nn03670208\nn03717622\nn03720891\nn03721384\nn03724870\nn03775071\nn03788195\nn03804744\nn03837869\nn03840681\nn03854065\nn03888257\nn03891332\nn03935335\nn03982430\nn04019541\nn04033901\nn04039381\nn04067472\nn04086273\nn04099969\nn04118538\nn04131690\nn04133789\nn04141076\nn04146614\nn04147183\nn04179913\nn04208210\nn04235860\nn04252077\nn04252225\nn04254120\nn04270147\nn04275548\nn04310018\nn04317175\nn04344873\nn04347754\nn04355338\nn04366367\nn04376876\nn04389033\nn04399382\nn04442312\nn04456115\nn04482393\nn04507155\nn04509417\nn04532670\nn04540053\nn04554684\nn04562935\nn04591713\nn04606251\nn07583066\nn07695742\nn07697313\nn07697537\nn07714990\nn07718472\nn07720875\nn07734744\nn07749582\nn07753592\nn07760859\nn07768694\nn07831146\nn09229709\nn09246464\nn09472597\nn09835506\nn11879895\nn12057211\nn12144580\nn12267677\n"
  },
  {
    "path": "timm/data/_info/imagenet_r_indices.txt",
    "content": "1\n2\n4\n6\n8\n9\n11\n13\n22\n23\n26\n29\n31\n39\n47\n63\n71\n76\n79\n84\n90\n94\n96\n97\n99\n100\n105\n107\n113\n122\n125\n130\n132\n144\n145\n147\n148\n150\n151\n155\n160\n161\n162\n163\n171\n172\n178\n187\n195\n199\n203\n207\n208\n219\n231\n232\n234\n235\n242\n245\n247\n250\n251\n254\n259\n260\n263\n265\n267\n269\n276\n277\n281\n288\n289\n291\n292\n293\n296\n299\n301\n308\n309\n310\n311\n314\n315\n319\n323\n327\n330\n334\n335\n337\n338\n340\n341\n344\n347\n353\n355\n361\n362\n365\n366\n367\n368\n372\n388\n390\n393\n397\n401\n407\n413\n414\n425\n428\n430\n435\n437\n441\n447\n448\n457\n462\n463\n469\n470\n471\n472\n476\n483\n487\n515\n546\n555\n558\n570\n579\n583\n587\n593\n594\n596\n609\n613\n617\n621\n629\n637\n657\n658\n701\n717\n724\n763\n768\n774\n776\n779\n780\n787\n805\n812\n815\n820\n824\n833\n847\n852\n866\n875\n883\n889\n895\n907\n928\n931\n932\n933\n934\n936\n937\n943\n945\n947\n948\n949\n951\n953\n954\n957\n963\n965\n967\n980\n981\n983\n988\n"
  },
  {
    "path": "timm/data/_info/imagenet_r_synsets.txt",
    "content": "n01443537\nn01484850\nn01494475\nn01498041\nn01514859\nn01518878\nn01531178\nn01534433\nn01614925\nn01616318\nn01630670\nn01632777\nn01644373\nn01677366\nn01694178\nn01748264\nn01770393\nn01774750\nn01784675\nn01806143\nn01820546\nn01833805\nn01843383\nn01847000\nn01855672\nn01860187\nn01882714\nn01910747\nn01944390\nn01983481\nn01986214\nn02007558\nn02009912\nn02051845\nn02056570\nn02066245\nn02071294\nn02077923\nn02085620\nn02086240\nn02088094\nn02088238\nn02088364\nn02088466\nn02091032\nn02091134\nn02092339\nn02094433\nn02096585\nn02097298\nn02098286\nn02099601\nn02099712\nn02102318\nn02106030\nn02106166\nn02106550\nn02106662\nn02108089\nn02108915\nn02109525\nn02110185\nn02110341\nn02110958\nn02112018\nn02112137\nn02113023\nn02113624\nn02113799\nn02114367\nn02117135\nn02119022\nn02123045\nn02128385\nn02128757\nn02129165\nn02129604\nn02130308\nn02134084\nn02138441\nn02165456\nn02190166\nn02206856\nn02219486\nn02226429\nn02233338\nn02236044\nn02268443\nn02279972\nn02317335\nn02325366\nn02346627\nn02356798\nn02363005\nn02364673\nn02391049\nn02395406\nn02398521\nn02410509\nn02423022\nn02437616\nn02445715\nn02447366\nn02480495\nn02480855\nn02481823\nn02483362\nn02486410\nn02510455\nn02526121\nn02607072\nn02655020\nn02672831\nn02701002\nn02749479\nn02769748\nn02793495\nn02797295\nn02802426\nn02808440\nn02814860\nn02823750\nn02841315\nn02843684\nn02883205\nn02906734\nn02909870\nn02939185\nn02948072\nn02950826\nn02951358\nn02966193\nn02980441\nn02992529\nn03124170\nn03272010\nn03345487\nn03372029\nn03424325\nn03452741\nn03467068\nn03481172\nn03494278\nn03495258\nn03498962\nn03594945\nn03602883\nn03630383\nn03649909\nn03676483\nn03710193\nn03773504\nn03775071\nn03888257\nn03930630\nn03947888\nn04086273\nn04118538\nn04133789\nn04141076\nn04146614\nn04147183\nn04192698\nn04254680\nn04266014\nn04275548\nn04310018\nn04325704\nn04347754\nn04389033\nn04409515\nn04465501\nn04487394\nn04522168\nn04536866\nn04552348\nn04591713\nn07614500\nn07693725\nn07695742\nn07697313\nn07697537\nn07714571\nn07714990\nn07718472\nn07720875\nn07734744\nn07742313\nn07745940\nn07749582\nn07753275\nn07753592\nn07768694\nn07873807\nn07880968\nn07920052\nn09472597\nn09835506\nn10565667\nn12267677\n"
  },
  {
    "path": "timm/data/_info/imagenet_real_labels.json",
    "content": "[[], [970, 795], [230, 231], [809], [516, 850], [57], [334], [700], [674], [332], [109], [286], [370], [757], [595], [147], [327, 108], [21, 22], [478], [517], [334], [], [948], [727], [23], [619, 526, 846], [270], [167], [64, 55], [858], [324], [573], [150], [981], [586], [887], [], [398], [], [74], [516], [756], [129], [198], [256], [725], [565], [162, 167], [717, 581], [390, 467], [92], [29], [844], [591], [358], [468], [], [994], [872], [588], [608, 474], [183], [107], [40, 46], [842], [390], [101], [887], [870], [903, 841], [], [149], [21], [476], [80], [424], [159], [275], [175], [461], [970], [160], [788], [58], [479, 817], [498], [374], [28], [487], [50], [270], [383], [366], [484, 724], [373], [705], [330], [142], [949], [348, 349], [473], [159], [872], [878], [201], [906], [70], [889, 486], [632], [608, 774, 630, 636], [122], [720], [227], [], [162], [959], [638], [], [655, 851, 598], [645], [718], [483], [852], [397], [312, 988, 311], [457, 834], [352], [82], [934], [283], [802], [742], [276], [234, 236], [751], [342], [526, 528, 784], [328], [], [251], [163], [328], [771], [726], [977], [], [265], [], [590], [977, 978], [681, 810, 620, 508], [637], [39], [115], [937], [274], [277], [763], [905, 789], [646], [], [894], [647], [504], [937], [687], [781], [666], [583], [158], [825], [212], [659], [257, 222], [436], [199], [140], [248], [339], [230], [361], [909, 910, 926], [935], [638, 639], [654, 785], [289], [867], [], [103], [584], [243], [703], [449, 975], [771], [118], [396], [934], [16], [548], [993], [704], [841, 457], [233], [401, 593, 819], [827], [376], [146], [606], [922], [431], [284], [889], [475], [977, 978], [475], [984], [16], [77], [610, 453], [254], [636], [662], [473], [207], [25], [427, 463], [215], [230, 173], [35], [741], [125], [518, 652, 663, 465], [289], [425], [973], [], [167], [121], [445], [702], [], [366], [678], [764], [125], [349], [13], [179], [522], [], [989], [], [647, 438], [660], [801, 836, 837, 983], [533], [487], [27], [644], [750, 721], [865, 850], [1], [176], [694], [488, 664, 695, 508], [798], [809], [652, 413], [], [], [821], [421], [361], [920], [761], [27], [464], [92], [182], [897], [612], [610, 918], [283], [881], [906], [728], [426], [554], [], [531], [869], [730], [0], [866], [738, 580], [547], [43], [64], [69], [176], [329], [544, 926], [288, 290], [991], [591], [346], [1], [607], [934], [784, 828], [572], [], [888], [654], [546, 402], [390], [702], [24], [102], [949, 953, 954, 923], [810, 508], [361], [280], [65], [777], [359], [234], [21], [7], [525], [737, 886, 760, 894], [938], [254], [616, 733], [707], [463], [60], [], [531, 487, 623, 893], [380], [982], [305], [355], [503], [], [495], [472], [293], [816], [195], [738, 905], [475], [481], [431], [260], [130], [627], [977, 978], [622], [696], [300], [37], [133], [637], [867], [465], [592], [741], [908, 404, 895], [91], [109], [426], [694], [546], [208], [488, 649], [786], [959], [], [834, 906], [879, 568], [649], [228], [621], [630, 703], [107], [818, 598], [420], [], [133], [185], [471], [230], [974], [74], [76], [852], [383], [267], [], [359], [484], [510], [33], [177], [935], [310], [987, 998], [270], [598], [199], [998], [836, 837, 608], [14], [97], [856], [398], [319], [549, 681, 620], [92], [765], [840, 728, 412], [769, 945], [160], [265, 266], [638, 639], [846], [722], [183], [674], [468], [], [748, 636], [867], [636], [], [912], [721], [16], [199], [170], [], [946], [350], [557], [361], [361], [594], [861], [208], [606], [734], [767], [746], [788], [346], [153], [739], [414], [915], [], [152], [943], [849], [], [100], [546], [657], [764], [141], [39], [993], [758], [190], [888], [18], [], [341], [875], [359], [388], [894], [437], [987, 998], [517], [372], [286], [754, 662], [713], [915], [964], [146], [529], [416], [376], [147], [902], [26], [398], [175], [270], [335], [899, 559, 532, 505, 762, 923], [540], [607], [495], [257, 222], [801], [576, 879, 982, 472], [301], [166], [56], [868, 967, 968, 659], [], [], [567], [277], [], [651], [377], [684], [832], [39], [219], [863], [868], [794], [80], [983], [269, 347], [238], [781], [223], [521, 926], [830], [260], [491], [896], [220], [680], [48], [542], [], [820], [148], [113, 114], [99], [143], [691, 570], [796], [986], [346], [367], [939], [875], [625], [481, 482, 848], [464], [812], [705], [], [466], [781], [499], [617, 338], [679, 488], [858], [795], [437], [11], [625], [965], [874], [949, 954], [600, 517], [86], [133], [149], [865], [480, 582, 760, 886], [325], [499], [834], [506, 421], [298], [900], [905], [202], [740], [258], [762], [297, 295], [132], [240, 238], [833], [471], [386], [898], [162], [288, 290], [450], [850], [232], [273], [954], [965], [611], [643], [147], [290], [866, 977], [186], [156], [776, 683], [775], [987, 998], [333], [325], [572], [927], [744, 657], [777, 623], [833], [551], [301], [716], [485], [102], [791], [959], [404], [987, 998], [415], [455], [242, 852], [], [517], [16], [320], [632], [568], [], [216], [332], [769, 726], [923, 959], [861, 605], [134], [677], [288], [10], [919, 733], [852], [], [104], [712], [388], [261], [609, 479], [673, 681, 620, 526, 664, 508], [], [579], [450], [628], [217], [810, 878], [763], [208], [126], [442, 497], [864], [232], [776], [942], [336], [978], [681, 620], [512, 587], [78], [668], [699], [746], [46, 39], [968, 809, 618, 828], [330], [615], [], [62], [116], [127], [955], [306], [425], [190], [370], [187], [971], [897, 411], [396], [744, 657], [840, 463], [718], [116], [836, 837], [994], [419], [764], [214], [285], [641], [951], [882], [13], [829], [453], [216], [665], [521], [268], [468], [418], [728], [], [449], [194], [362], [928, 963, 948, 923], [924], [249], [524, 461], [992], [571], [283], [608], [129], [486], [859], [498], [21], [467], [591], [924], [556], [97], [898], [586], [10], [202], [67], [649], [141], [603], [727], [101], [995], [278], [964], [238, 240], [423, 424], [489, 634], [533], [424, 423], [451], [555], [732], [514], [803], [300], [551], [753], [411], [315], [963], [], [389], [559, 578, 601], [673, 742, 526, 527, 662, 664, 508], [839], [299], [578, 689], [112], [960], [632], [867], [], [61], [427], [367], [926], [465, 597, 413], [34], [773], [654], [131], [874], [281, 282], [891], [956], [201], [267], [], [200], [673, 508], [424, 423], [907], [57], [27], [906, 578, 834, 459], [7], [322, 946], [934], [663], [423, 424], [687], [836, 837], [958], [645], [119], [306], [930], [124], [694], [777, 524, 461], [205], [137], [849], [681, 620, 526, 508], [380], [586], [916], [478], [182], [874], [715], [487], [], [19], [161, 162, 785], [915], [730], [678, 487, 830], [822], [], [699], [689, 819, 578], [673], [], [], [624], [679], [887], [581], [665], [903], [746, 622], [585, 440], [800], [899], [669], [81], [746], [866], [935], [668], [295], [893], [265], [628], [987, 923], [367], [294], [727], [12], [435, 876], [192, 186], [589], [70], [129], [454], [17], [946], [204], [181], [163], [80], [940], [587], [21], [198], [25], [932], [339], [480], [465, 413], [883], [453, 619, 818], [807], [287], [], [614], [814], [591, 689, 601], [919], [508], [479], [452], [155], [41], [163], [606], [8, 7], [], [515, 808, 693], [858], [506], [23], [976, 447], [801, 397, 983], [856, 595], [753], [5], [186], [667], [305], [46], [303], [], [927], [91], [34], [675, 654], [406], [65], [76], [517], [806], [330, 331], [], [130], [103], [56], [], [78], [31], [372], [225, 235], [431], [159], [187], [930], [888], [96], [836, 837, 655, 879, 444], [994], [872, 622, 759], [302], [566], [33], [619], [694], [406], [20], [18], [371], [320], [780], [997], [730], [613], [105], [810, 878], [311], [883], [367], [243], [], [], [515, 39, 47], [412], [921], [332], [514, 464], [276], [629], [917], [77], [643], [556], [998], [328], [723], [161], [250], [1], [919], [392], [264], [652, 847, 465, 408, 413], [488, 633], [968, 495, 504], [188], [884], [335], [795], [241, 238], [842], [71], [862], [254], [27], [409], [444], [433], [324], [322], [688], [579], [562], [917, 335], [803], [863], [44], [719], [16], [384], [328], [348], [194], [678], [593], [9], [], [25], [913, 983], [260, 667], [104], [72, 815], [223], [268], [283], [784, 477], [53], [615, 465], [100], [543], [133], [159], [439], [151], [355], [392], [577], [72], [383], [619, 846], [145], [109], [988], [824], [293], [], [821], [484], [608, 806, 966, 572], [259], [344], [132], [128], [154], [210], [508], [638, 639], [138, 83], [256, 233, 252], [376], [720], [464], [960, 968, 504], [999], [455], [613], [314], [993], [17], [759], [843], [591, 721], [330], [681, 810, 620, 531], [432], [778], [489, 372], [468], [489], [375], [263], [], [418], [377], [878], [283], [838, 631], [442], [382], [641], [628], [592], [59], [223], [587], [724], [207], [228], [8], [962], [575], [988], [402, 889], [551], [990], [141], [120], [207], [118], [946], [828, 463], [786], [166, 167], [256], [986], [28], [283], [636, 834, 671], [720], [411], [80], [678, 211], [29], [606], [636, 748], [156], [91], [734], [569], [458], [84], [230], [274], [707], [75], [965], [260], [978], [709], [372], [717], [763, 764], [96], [958], [884], [327], [140], [88], [156], [137, 98, 99], [559, 836, 837, 842], [669], [492], [771], [653], [484, 871, 913], [], [787], [827], [644], [393], [386], [654], [137], [715], [906], [724], [633, 477, 823], [516], [64], [850], [321], [611], [392], [509], [207], [903, 655, 638], [397], [582, 949], [188], [652, 465, 830], [750], [259], [294], [450], [511], [477], [255], [814], [781], [177], [654], [806, 911], [680], [769], [830], [273], [24], [463, 977, 978], [321], [480], [331], [21], [556], [481], [420], [195], [216], [215], [152], [333], [646], [152], [635], [128], [993], [351], [928], [267], [830], [], [335], [319], [786], [816], [334], [509], [444], [155], [902], [526, 527, 664], [483, 581, 479, 817, 511], [346], [482], [173], [438], [], [], [374], [548], [552], [619, 607], [411, 478], [451], [277], [715, 652], [], [855], [694], [709], [611], [168], [113], [782, 851], [974], [147], [69], [546, 650, 402, 818, 819], [11], [543], [629], [127], [652, 465, 764, 413], [349], [975, 628], [922, 412], [484], [78], [204], [399], [192, 186], [543], [89], [423], [323], [764], [970], [829], [645], [542], [809, 925], [195], [732], [474], [741], [820], [238], [643], [977, 978], [234], [844], [717], [925], [57], [806, 911], [444], [], [245], [], [923, 868], [791], [401], [896, 804], [773], [977], [875], [637], [442], [652, 847], [873], [472], [977, 978, 608, 502], [926], [102], [810, 878], [784], [], [355], [643], [279], [92], [523], [50], [510], [765], [681, 620, 526, 664, 281, 508], [870], [748], [253], [749], [], [452, 911], [824, 775], [261], [562], [911], [289], [950], [456], [449], [117], [97], [101], [291], [346], [809], [997], [168], [896, 861], [714], [126], [593], [8], [432], [72], [158], [958], [662], [945], [47], [919], [427], [809, 762], [185], [685], [122, 124], [660], [449, 536], [434, 533], [178], [356], [128], [819, 517], [157], [404], [23], [939, 582, 943], [204, 155], [756], [797], [916], [254], [9], [471], [577, 904], [255], [882], [654], [261, 174], [923, 931], [950], [360], [246], [872], [578, 982], [675], [418], [556], [216, 220], [928, 923, 960], [402], [911], [601], [179], [975, 638, 639], [303], [709, 526, 470, 767], [778], [664, 553, 697, 851], [178], [500], [], [557], [745], [611], [401], [571], [621], [206], [89], [394], [481], [627], [333], [701], [644], [364], [450], [979], [203], [872], [795], [265, 267], [118], [705], [565], [519], [641], [75], [], [590], [749], [374], [986], [76], [83], [14], [945], [683], [770], [74], [211], [429], [269], [], [505], [150], [344], [858], [45], [959], [884], [333], [953], [86], [204], [62], [928, 960], [257], [178], [178], [274], [], [552, 37], [147], [919, 920, 555, 733], [566], [74], [248], [399], [281], [768], [296], [327], [502], [721], [310], [944], [377], [825], [404], [], [17], [356], [860], [750], [926], [345], [957], [488, 830], [843], [430], [656, 919], [871], [424, 610], [141, 142], [653], [930], [977], [744], [673, 681, 620, 526, 527, 782, 664, 508], [840], [471], [], [863], [122], [851, 981, 664], [803], [544], [365], [326], [80], [166], [304], [398], [821], [456], [738, 428, 580], [149], [505], [366, 367, 369], [872], [173], [944], [220], [780], [492], [437], [888], [185], [12], [33], [763, 764], [740], [522], [917], [921, 638, 639], [86], [193, 187, 852], [], [300], [741], [262], [839], [307], [673, 681, 620, 526, 632, 508], [859], [49], [658], [966], [], [215], [64], [867], [370], [690], [68], [403], [433], [313], [138], [868, 813], [968, 504], [966, 907, 572], [], [587], [862], [67], [328], [390], [81], [968, 187], [15], [872], [519], [494], [405], [786], [423], [593], [917, 454], [65], [149], [558, 541, 542], [], [868, 945, 923], [894], [454, 921], [651], [943], [559], [], [72], [921, 763], [567], [861], [687], [40, 47], [257], [766], [169], [578, 982], [889, 486], [87], [448], [654], [789], [790], [185], [798], [35], [275], [636], [783], [353], [81], [960], [139, 140], [586], [44], [254], [603], [533], [37], [489], [159], [30], [963], [551], [906], [374], [816], [951], [671], [724], [671, 535], [37], [219], [669], [532, 762], [482, 754], [42, 26], [898], [], [330, 331], [951], [810, 878], [874], [481], [641], [], [472], [92], [559, 846, 818], [890], [659, 828], [840], [684], [235], [559], [264], [673, 526, 527, 782, 664, 508], [979], [518], [840], [548], [956], [221], [548], [], [167], [157], [], [547], [470], [665], [251], [53], [897], [350], [], [607], [], [264], [], [209], [343], [681, 620], [], [786], [127], [323], [861], [836, 837], [], [361], [581, 479, 656], [715, 652, 439], [43], [872, 917], [968], [114], [27], [536], [740], [417], [100], [692], [902, 488], [], [779], [307], [482], [31], [327], [896], [299], [994], [122, 124], [387], [114], [390], [327], [90], [478], [16], [320], [654], [711], [486], [518], [], [219, 220], [816], [78], [494], [255], [308], [204, 243, 155], [], [78], [977], [263, 185], [401], [603, 653], [779], [556], [690], [399], [265, 181], [304], [167], [950], [152], [438, 647], [227], [157], [588, 790], [599], [924], [], [475], [877], [763], [809, 925], [358, 359], [785], [927], [434], [812], [642], [867], [884, 406], [785], [139], [779], [39], [786], [771], [466], [], [894], [170], [867], [492], [905, 831], [175], [], [631], [778], [25, 28], [884], [116], [], [860], [467], [965], [923, 960], [370], [171], [936], [602], [781], [], [142], [605], [894], [700, 999], [], [306], [546], [550], [761], [621], [595], [515, 583], [320], [939, 813, 909, 910, 567], [179], [840], [], [769, 798], [215], [954], [385, 101], [223], [], [729], [759], [559], [87], [116], [236], [554], [911, 636], [661, 479], [168, 211], [828], [520, 529, 516, 431], [719], [978, 437], [100], [538], [], [697], [21], [240, 241], [312], [634], [515], [309], [685], [783], [61], [998, 987], [886], [111], [427], [314], [350], [719], [71], [286], [588], [616], [132], [670], [], [], [877], [558], [591], [251], [788], [232], [908, 895], [471], [754], [959], [767], [8], [690], [496], [], [407], [767], [647], [715], [629], [13], [407], [268], [842], [738], [943], [320], [810, 878], [195, 202], [922], [262], [185], [184], [], [197, 199], [502], [40], [941], [106], [900], [6], [949, 954], [247], [30], [47], [505], [460], [40, 46], [921, 604], [216], [473], [590], [872, 759], [315], [], [39], [], [404], [765], [608, 678, 841], [155, 204], [124], [181], [386], [113], [575], [689], [557, 624, 436], [230], [499], [818], [726], [932, 415], [217], [727], [737, 455, 760, 886], [230, 231], [541], [732], [686], [395], [547, 565], [623], [732], [344], [670], [506], [650, 818, 819], [], [675], [120], [970, 979, 858], [74], [292], [831, 721, 745], [483, 460, 975], [529, 831], [212], [961], [715], [751, 479], [583], [], [706, 762], [893], [865], [749], [134], [131], [248, 249, 537], [], [], [314], [540], [565], [661], [382], [235], [750, 721, 697], [], [], [], [406], [768], [562], [452], [196, 198], [899, 968, 504, 505], [10], [171], [500], [716], [318], [357], [330, 331], [106], [22], [577], [573], [481, 482], [910, 438], [283], [542], [457], [897], [502], [72], [305], [541, 542], [915], [633], [755], [991], [333], [571], [524], [51], [16], [479], [932], [894], [644], [822, 542], [515, 467, 728], [175], [126], [483, 698], [402], [270], [352], [792], [248, 250], [828], [772], [], [340], [14], [285], [351], [77], [529], [356], [46, 47], [505], [162], [868], [859], [672], [959], [369], [832], [907, 440], [674], [783], [673, 526, 527, 664, 508], [146], [785], [883], [628], [871], [632], [586], [219], [951], [946], [93], [64], [877], [980], [497], [296], [61, 62], [673, 650, 664, 526, 527, 632, 508], [896], [489, 981], [677], [], [758], [653], [487], [507], [496], [], [417], [668], [471], [628], [847], [658], [90], [987], [135], [308], [], [], [724], [64, 55], [299], [810, 878], [730], [575], [835], [394], [0, 758], [988], [376], [300], [612], [546], [137], [412], [874], [277], [398], [392], [156], [581], [124], [992], [65], [552, 903], [781], [121], [447], [662], [845], [449], [847], [34], [792], [754], [148], [996], [23], [692], [141], [513], [89], [796], [636], [673, 681, 620, 664, 526, 527, 508], [190], [84], [], [952], [683], [], [610], [414], [958], [838], [974], [954], [], [532, 799], [], [10], [129], [682, 708], [184, 232], [613], [585], [], [614], [547], [332], [683, 889], [437], [637], [809], [741], [854], [5], [154], [594], [569], [538], [499], [867], [153], [727], [251], [956], [583], [442], [400, 667], [962, 923], [187], [640], [607], [320, 319], [933, 923], [449], [24], [679, 488], [104], [62], [37], [879], [241], [578, 982], [745], [842, 977, 978], [738, 580], [], [650, 819, 854], [1], [133], [123], [424, 423], [614], [162, 167], [229], [610], [534], [524], [840, 911], [932], [559], [560, 981], [333], [565], [821], [904], [269], [222], [114, 947], [], [91], [846], [139], [537], [252], [], [652, 413], [928, 927], [354], [556], [345, 690], [722], [601], [803], [241], [682], [300], [490], [721, 831], [386], [250], [5], [651, 659, 411, 813], [], [742, 713], [156], [981], [570], [608, 610, 841, 894], [662], [598], [217, 852, 239], [43], [], [212], [218], [763], [106], [839, 873], [238], [220], [744, 657], [301], [777], [356], [625], [98], [], [138], [545], [199], [574], [217], [614], [243], [200, 155], [247], [185], [984], [539], [211], [684], [173], [92, 95], [654], [174], [297], [246], [], [775], [799], [370], [808], [956], [500], [2], [358], [801], [686], [773], [936, 939, 940], [605], [749], [779], [618], [993], [805], [924], [589], [145], [215], [], [938, 37], [752], [12], [481], [906, 834], [769], [401], [918], [836, 837, 951], [], [275], [799], [369], [515, 40, 42], [504], [137], [761], [8, 765], [166], [677], [767], [430], [469, 616], [400, 834, 667], [325], [927], [390], [802], [84], [], [736], [745], [23], [92], [712], [630], [410], [474], [221], [793], [83], [309], [], [165], [843], [579, 881], [397], [222], [104], [426], [488, 479, 695], [195], [886], [401, 881], [265], [466], [194, 185], [949], [331], [551], [315], [221], [172], [550], [629], [806, 658], [889], [897], [769], [832], [10], [608, 774], [525], [111], [593], [968, 504], [704], [868], [347], [569], [], [596], [738], [763], [59], [953], [506], [585], [922], [22], [807], [676], [279], [363], [14], [378], [], [60], [608], [357], [872], [612], [154], [740, 587, 783, 477], [877], [265], [230, 220], [612], [785], [690], [433, 728], [423], [89], [275], [975], [653], [584], [292], [330], [580], [284], [976], [374], [669], [70], [6], [251], [443], [340], [263], [208], [59], [483], [140], [535], [947, 997], [140], [241], [707], [755], [977, 978], [121], [452], [571], [508], [677], [357], [122], [163], [150], [60], [979], [418], [701], [428], [], [24], [37], [977, 638, 639], [37], [122], [243], [766], [399, 786], [588], [652], [457, 158, 151], [102], [405], [29], [395], [467, 596], [449, 718, 975], [672], [752], [366, 367], [612], [825], [], [908, 404], [], [18], [996], [430], [82], [968], [834], [558], [370], [977, 978, 879], [700], [261], [513], [464], [185], [218], [274], [432], [831, 765, 799], [652], [768], [758], [289], [201], [209], [105, 106], [378], [560], [435], [632], [270], [309], [733], [57], [959], [106], [22], [289], [197], [861], [996], [192, 187], [632], [296], [382], [681, 810, 620, 508], [775], [320], [350], [704], [972, 977], [498], [442], [596], [525], [748], [514, 819, 638, 639], [578, 903, 689, 601], [536], [965], [], [470, 921], [899], [526, 527, 782, 664, 508], [880], [699], [292], [113], [820], [763], [925], [807], [681, 620, 526], [994], [247], [865], [423, 424, 762], [803], [760], [243], [590], [181], [489], [865, 850], [929, 443], [727], [415], [55], [690], [738, 703], [385], [527], [617], [117], [544], [200], [784, 507], [850], [141], [754], [255], [496], [847], [435], [515, 596], [291], [415], [808], [707], [956], [62], [289], [], [238], [316], [], [398], [383], [997], [393, 108], [17], [166], [617], [139], [284], [433], [628], [617, 823], [700], [80], [701], [773], [793], [11], [347], [785], [678], [], [806, 850], [538], [920], [851], [965], [147], [57], [545], [19], [836, 837, 975], [999, 153, 700], [119], [910], [962], [183], [73, 74, 815], [517], [579], [140], [346], [], [627], [863], [814], [665], [65], [529], [466], [155], [655], [540], [712], [726], [247], [894], [239], [34], [820], [], [528], [678], [189], [117], [695], [380], [293], [413], [114], [389, 391], [784], [332], [], [68], [690, 346], [313], [855], [842, 731], [10], [830], [472, 718], [456], [723], [668], [461], [558], [143], [745], [523, 975, 978], [221], [946], [123], [931], [654], [27], [406], [510], [261], [746], [981], [2], [933], [], [508], [754], [342], [456], [704], [905, 750], [545], [363], [113], [205], [187, 636], [945], [370], [647, 600], [155], [790], [589], [532], [191], [28], [532], [67], [456], [506], [143], [245], [433], [417], [35, 37], [203], [803], [273], [322, 769], [919], [340], [873], [87], [754], [816], [241, 238], [], [843], [844], [643], [563], [842, 978], [421], [87], [406], [946], [979], [806], [877], [619, 846], [417], [782], [105], [465], [624], [830], [352], [746], [33], [552], [863], [855], [688], [597], [281], [185], [15], [907], [221], [625], [506], [563], [], [37], [605], [679], [944], [58], [402], [80], [961], [763], [828], [577], [93], [315], [300], [81], [178], [1], [841], [729], [923], [66, 68], [409], [256], [76], [818], [369], [128], [391], [567], [591], [365], [387], [766], [], [], [], [458], [172], [769, 767], [457, 541], [20], [883], [356], [545], [405], [716], [212], [730], [635], [277], [256], [25], [868], [728], [540], [386], [864], [971], [53], [351], [], [320], [707], [691, 570], [978], [870, 903], [630], [62], [989], [128], [515, 775, 564, 669], [362], [311], [822, 542], [325], [642], [695], [473], [761], [365], [715], [], [559], [950], [999], [158, 151], [704], [647], [627], [227], [3], [676, 236], [349], [936, 923], [715], [993], [60], [17], [703], [290], [33], [225], [968], [323], [377], [462], [108], [51], [], [592], [870], [421], [126], [564], [801], [959], [270], [928, 923, 960], [578, 982], [651, 732], [577], [129], [958], [841], [350], [78], [259], [559, 721], [0, 391], [958], [877], [474], [650], [141], [755], [283], [907, 440], [2, 3], [744, 657], [104], [929], [991], [94], [772], [879], [628], [105], [975, 634], [619, 846], [828], [988], [936], [591], [], [738], [280], [156], [240], [708, 460, 975], [80], [546], [936], [196], [694], [340], [], [386, 101], [672, 792], [], [135], [191], [611], [472, 693], [624, 453, 454], [221], [894, 638, 639], [72], [535, 479], [824], [], [298], [286], [758, 472], [85], [555], [794], [961], [958], [936], [990], [180, 243], [71], [608, 514], [], [224], [868], [454], [611], [309], [539], [317], [393], [610, 443], [338], [520], [430], [276], [841], [93], [749], [281, 282], [892], [294], [457], [945], [923], [458], [817, 479], [555], [624], [855], [135], [110], [510], [], [783], [451], [866], [419], [206], [492], [265, 266], [94], [713], [720], [457, 459], [89], [], [47], [], [435], [908, 404], [119], [762, 766], [890], [215], [719], [844], [717], [358], [687], [643], [970], [394], [730], [618, 813, 659, 567], [914], [639], [475], [473], [18], [492], [396], [64], [679], [105], [], [450], [834], [49], [538, 668], [821], [697], [794], [781], [656], [748], [968], [624, 454], [909, 567], [], [915], [689, 601], [126], [13], [724], [379], [], [673, 508], [734], [80], [763], [43], [352], [], [697], [535], [532, 923, 868], [894], [504], [884], [], [688], [297, 295], [154], [984, 475], [629], [238], [268], [653], [681, 620], [63], [302], [914], [439], [144], [643, 819], [150], [1], [], [228], [144], [211], [57], [820], [28], [387], [], [37], [690], [567], [351], [785], [787, 524], [343], [488, 600], [135], [24], [369], [528], [271], [520], [585], [683], [225, 235], [912], [44], [265], [], [335], [74], [2], [793], [868], [573], [374], [590], [26], [911, 533], [306], [443], [387], [603], [602], [231], [877], [202], [652], [978, 510], [907], [337], [612, 670], [2], [169], [607], [681, 810, 620], [135], [518, 920, 671], [917], [761], [847], [362], [27, 455], [707], [647, 968], [524, 461], [479], [724], [], [351], [756], [342], [253, 703], [351], [873], [176], [956], [673, 681, 620, 526, 527, 664, 508], [724], [633], [199], [613], [479], [], [777], [], [419, 605], [320], [939, 567, 926], [669], [256], [223], [605], [880], [593], [469], [337], [630], [839], [752, 852], [846], [528], [105], [630], [514, 515], [125], [742], [94], [776], [], [512], [738], [968], [270], [455], [182], [58], [181], [674], [96], [118], [37], [453], [148], [203], [770], [894, 799], [11, 14], [101], [715, 671], [970], [601], [495], [786], [57], [33, 973], [990], [400], [716], [788], [337], [812, 908], [739], [292], [878], [9], [61], [361], [605], [218], [344], [232], [844], [832], [246], [596], [120], [950, 953], [896, 999, 648, 861], [975], [853], [921], [348, 349], [537], [866], [], [836, 837, 518, 898, 671], [39], [76], [], [449, 975, 472], [862], [138], [719], [], [262], [981, 429], [930], [22], [], [913], [617, 823], [821], [150], [825], [369], [474], [922], [], [343], [], [312, 937], [823], [951], [676, 235], [862], [92], [346], [28], [497], [549], [72], [195], [212, 251], [37], [112], [648], [107], [623], [139], [929], [170], [99], [475], [713], [], [264], [813], [432], [916], [475], [526, 664], [976], [44], [749, 526], [204], [121], [622, 759, 414], [194], [685], [283], [362], [555], [474], [17], [587], [368], [460, 718], [247], [885], [109], [737], [865], [783], [739], [462], [548], [136], [999, 876, 435], [579, 402], [351], [274], [641], [982], [426], [], [538], [354], [], [890], [954], [229], [824, 911], [728, 861], [932], [626], [681, 620], [107], [646], [69], [702], [987, 998], [607], [478], [], [792], [908], [898], [304], [73], [401], [], [923, 122], [268, 151], [593], [], [373], [503], [302], [402], [481, 482], [750, 721], [], [374], [446], [492], [755], [277], [91], [], [], [], [372], [531], [479], [763], [359], [595], [642], [654, 603], [499], [467], [346], [650], [804, 631], [111], [], [487], [693, 472], [929], [134], [661], [629], [117], [689], [743], [479, 817, 511], [447], [232], [321], [351], [621], [494], [200], [470], [316], [786], [981, 429], [258], [485], [960, 923], [144], [], [395], [506], [789], [325], [297], [384], [], [453], [614], [349], [645], [608], [673, 526, 527, 916, 664, 508], [807], [818], [830], [370], [754], [762, 923], [], [903], [213], [581], [6], [], [802], [579, 881], [242], [696], [683], [939], [139], [475], [749], [685], [219], [702], [661], [834, 906, 630], [495], [816], [945], [327], [386], [374], [298], [377], [979], [625], [888], [227], [176], [677, 587, 784], [11], [907, 953], [276], [105], [546], [650, 834, 906], [578], [739], [298], [14], [4], [526], [47], [281, 897], [791], [984], [839], [842, 625], [986], [411], [582], [52], [43], [778], [306], [820], [150], [], [822, 542], [78], [887], [], [], [948], [829], [259], [199], [804], [580, 315], [513], [84], [821], [814], [929], [928, 960], [957], [921], [608], [744, 657], [172], [392], [820], [937], [610, 114], [331], [51], [113], [7], [7], [60], [836, 837], [592], [396], [650], [687], [873], [431], [159], [182], [430], [837, 465, 597], [950], [566], [31], [440], [234], [726], [113], [980], [517], [221], [572], [598], [376], [913], [843], [531], [803], [125], [685], [255], [801, 983, 327], [417], [666], [891], [530], [57], [520], [166], [745], [450], [], [385, 386], [716], [650, 819], [564], [355], [353], [308], [883, 725], [775], [131], [939], [83], [116, 126], [], [955], [18], [628], [480], [841, 823], [594], [405], [162], [95], [605, 748], [430], [338], [86], [305], [648, 794], [403], [], [939, 943], [497], [236], [413], [350], [854], [93], [49, 50], [719], [187], [262], [405], [62], [955], [608, 679], [260], [331], [265], [938], [370], [497], [26], [581, 407], [660], [119], [67], [89], [683], [779], [801], [263], [368], [862], [], [486, 776, 683], [143], [689, 501], [], [541], [916], [530], [40], [], [634, 561], [177], [208], [425, 825], [836, 638, 639], [], [547], [198], [671, 518, 615], [110], [232], [1], [558], [911, 796], [126], [807], [619], [616, 618], [526], [88], [759, 474], [249], [33], [386], [30], [900], [898, 680], [472, 693], [370], [123], [608, 897, 651, 567], [], [482], [546, 631], [197], [566], [515], [721], [18], [774], [962, 923], [272], [361], [494], [55], [], [783], [946], [430], [293], [195], [100], [120], [619, 409, 442, 892], [578, 689, 982, 601], [751], [764], [336], [441], [448], [558], [55], [283, 435], [561], [291], [189], [465], [182], [179], [574], [257], [896], [487], [696, 738], [], [534], [856], [346], [], [416], [], [911], [391], [762], [258], [535], [230], [983], [378], [402], [310], [780], [11], [923, 891], [369, 379], [672], [329], [39, 44, 47], [147], [88], [582, 953, 954], [879, 638, 639], [687], [102], [97], [631], [917, 921], [969], [221], [85], [], [999, 435, 794], [246], [159], [76], [], [962, 923, 935], [155], [794], [951], [481, 482], [146], [31], [737, 519], [], [872], [563], [252], [300], [665], [317], [195, 245], [], [517], [126], [125], [29], [17], [230, 231], [25, 28], [453, 553], [48], [976], [767], [572], [201], [752], [352], [230], [671], [312], [110], [316], [711], [13], [670], [788], [551], [770], [903], [90], [771], [656, 468], [181], [464], [579, 881], [492, 588], [619, 846], [24], [957], [500], [582, 692], [817], [890], [], [986], [769], [785], [989], [13], [38], [165], [548], [457], [923, 934], [565], [263], [492], [858], [607], [744, 657], [417], [483], [466, 799], [795], [698], [747], [851], [161], [614], [627], [971, 542], [478], [896], [], [174], [934], [], [929], [332], [487, 457], [962, 923], [7], [], [245], [956], [286], [], [115], [], [481, 482], [609], [547], [356], [994], [637], [636, 748], [604], [448], [152], [163], [107], [], [97], [353], [898, 585, 631], [404], [946], [853], [292], [140], [860], [734], [739], [243], [964], [673, 526, 508], [519], [899, 968, 725, 504, 572], [341], [277], [759, 635], [707], [661], [286], [863], [401], [79], [299], [826], [274, 277], [31], [604], [88], [697], [755], [], [538], [], [305], [440, 441], [327], [867], [], [756, 412], [275], [521, 926], [138], [791], [365], [305], [976, 977, 978], [10, 858], [478], [489], [50], [896], [324], [86], [386], [789, 614], [699], [133], [408], [565], [], [611], [77], [998, 941, 987], [769], [248, 537, 250], [543], [255], [458], [573], [], [338], [944], [697, 819, 854], [797], [491], [714, 402], [578, 627, 982], [16], [382], [531], [58], [933], [269], [653], [960, 813], [200], [791, 922], [549], [522], [155], [632], [110], [453, 894], [182], [513], [396], [243], [], [368], [440, 455], [181], [438], [759], [], [331], [259], [728], [807], [596], [634], [517], [], [599], [157], [159], [324], [581, 479], [179], [115], [645], [816], [155], [604], [673, 526, 527, 664, 508], [29], [], [550], [972], [284], [403], [874], [315], [637], [393], [421], [], [459, 845], [221], [963], [558], [258, 222], [400], [40, 44], [991], [444, 670], [147], [50], [763], [502, 638, 639], [], [72], [648], [137], [119], [548, 851], [610], [475], [487], [72], [], [559], [858], [935], [731], [455], [], [973], [395], [786], [263], [734], [], [308], [728, 412], [256], [289], [79], [349], [461], [591], [72, 815], [], [408], [54], [634], [601, 578], [70], [671], [166], [439], [869, 841, 523], [72], [923], [327], [651], [856], [427, 756], [989], [834, 982, 906], [], [551], [389, 983], [559], [75], [75], [902], [830], [102], [369], [921], [551], [], [70], [366], [769], [107], [517], [314], [272], [434], [238, 239], [607], [58], [8, 7], [832], [220], [182], [975, 980, 703], [978], [869, 617], [378], [748], [248], [431], [866], [960], [659], [468, 919], [953, 954], [930], [873], [40, 46], [518], [466], [833], [408], [923], [937], [], [508], [314], [734], [696], [956], [615], [320], [946], [196], [914], [123], [903], [34], [56], [213], [512], [208], [99], [372], [205], [677], [786], [], [788], [772], [94], [99], [783, 677, 463], [], [555], [770, 836, 837, 733, 862, 610], [882], [937, 938], [592], [11], [], [13], [736, 515], [418, 767], [197], [914], [524], [233], [882], [], [], [315], [83], [788, 502], [124], [979], [320], [169], [491], [861], [784], [893], [517, 540], [340], [501], [52], [514], [72], [366], [961], [224, 208], [702], [275], [], [267], [421], [54], [963], [275], [756], [316], [945], [606], [198], [177], [928], [612], [497, 663], [587, 784], [], [375], [75], [709], [678, 638, 523, 818, 819], [360], [442], [330, 331], [566], [150], [], [], [], [8, 7], [557, 663, 442], [706, 421, 970], [458], [51], [2, 3], [702], [659, 923], [553], [8], [17], [17], [880], [734, 407], [273], [933], [953], [891], [464], [237], [860], [669], [857], [2], [448, 858], [869], [69, 110], [350], [273], [73], [], [727], [506], [306], [91], [], [181], [926], [17], [24], [545], [957], [845], [104], [513], [53, 759], [777], [847], [187], [378], [696], [940], [200], [155], [409], [229], [123], [850], [723], [578], [247], [847], [956], [51], [890], [907], [], [646], [182], [12], [958], [980], [172, 173], [999, 499, 700], [844], [21], [811], [830], [512], [531], [236], [581, 479, 511], [542], [565], [169], [], [453], [900], [], [740], [235], [192], [371], [475], [121], [126], [50], [161], [240], [421, 976, 978], [422], [172], [803], [], [673, 526, 527, 782, 664, 508], [230], [479], [978, 515], [841, 501], [545], [758], [4], [123], [329], [503], [966], [281], [209], [401], [687], [483], [30], [949], [873, 839], [974], [576], [514], [988], [173], [], [164], [991], [882], [609], [756], [], [], [936], [113, 125], [453], [], [471], [67], [242], [257, 850], [141], [571], [321], [631], [586], [902], [793], [378], [608, 421, 869], [520, 669], [748], [121], [84, 7], [396], [670], [453], [797], [917], [746], [608, 972], [421, 506], [997], [910], [257], [802], [688], [210], [905, 859], [337], [753], [519], [750], [625], [476], [651, 527, 664], [683], [962, 923], [800], [802], [874], [853], [625], [585], [93], [928], [407], [955], [807], [54], [], [402, 836, 837], [494], [87], [756], [], [244], [522], [347], [692], [886], [182], [864, 867], [884], [985], [914, 484, 780], [635], [304, 302], [18], [], [610], [440], [808, 968, 504], [850], [698, 483], [469, 926], [], [518, 568, 570], [575], [928, 762, 923, 927], [106], [977, 638, 639], [770], [519], [542, 559, 541], [456, 733], [], [617], [235], [200], [280], [842, 879, 977, 978], [191], [740], [553, 750, 831, 894], [576], [983], [962, 923], [217], [199], [7], [828], [656], [217], [23], [47], [439], [152, 155], [400, 667], [18], [197], [631], [442, 494], [235], [377], [], [966, 572], [777], [528], [238], [997, 947], [183], [133], [861], [394], [425], [389], [13], [294], [243], [69], [850], [56], [143], [360], [547], [462], [], [552], [611], [322], [572], [494], [197], [833], [708], [548, 851, 632], [918], [124], [459], [149], [361], [520], [458], [270], [186, 193], [667], [675, 850, 757], [453], [833], [716], [190], [], [30], [949, 954], [211], [834, 517, 906, 630, 671], [374], [], [670, 518], [450], [914], [39], [261], [], [463], [], [100], [488, 679], [995], [760], [230, 231], [110], [], [251], [], [814], [490, 600], [38], [683], [994], [553], [673, 508], [277], [839], [564, 669], [920], [483], [551, 629], [757], [217], [877], [60], [785], [533], [1], [401], [214], [853], [126], [295], [318], [892], [719], [462], [124], [240], [516], [535], [149], [521], [152], [393], [562], [195], [962, 933, 923], [419], [1], [103], [423], [824], [582], [780], [370], [228], [581], [456], [], [984], [], [997, 947], [114], [837, 841], [333], [490, 524, 461, 787], [889], [858], [93], [6, 983], [], [656], [986], [991], [812], [608, 465, 597], [857], [311], [652], [610], [445], [246], [231], [673, 664, 526, 527, 632, 508], [221], [], [149], [304], [], [], [285], [354], [966], [78], [], [31], [500], [617], [665], [946], [604], [130], [246], [464], [237], [339], [50], [809, 923], [859], [], [], [581], [550], [], [898, 585], [201], [701], [274], [12], [153], [12], [], [345], [], [368], [225], [], [9], [41], [527, 782, 916, 664], [932], [981], [776], [363], [239], [694], [], [232], [905], [669, 564], [850], [195], [179], [328], [849], [167], [539], [173], [166], [829], [680], [145], [37], [268], [523], [394], [718, 975], [779], [567], [377], [670], [965], [139], [472, 693], [355], [538], [167], [841, 501], [212], [788, 795], [918], [897], [610], [718, 888], [726], [158], [145], [868], [], [361], [654], [327], [869], [417], [305], [350], [578, 689], [879], [401], [241], [937], [600], [284], [537], [172], [494], [408, 414, 465, 608], [695], [696], [525], [805], [961, 909], [627], [949], [647], [35, 37], [911, 658, 824, 568], [944, 946], [923], [346], [457, 834], [349], [79], [], [612], [104], [104], [596, 284], [835], [614], [568], [322], [301], [265], [758], [866], [829], [358], [977, 978], [906], [24], [571], [334], [785], [694], [299], [], [654], [722], [511, 479], [272], [271], [409], [515], [6], [927], [337], [708, 557, 538], [997], [673, 664, 526, 527, 782, 632, 508], [895], [353], [], [385, 101], [236], [174], [214], [642], [932], [440], [904], [903], [766], [975], [11], [283], [416], [792], [36], [35, 37], [544, 521, 910, 926], [598], [578], [281], [990], [110], [391], [859], [], [959], [693], [688], [588], [497], [753], [350], [44], [529], [760], [945], [303], [985], [51], [], [111], [412], [708], [179], [52], [581], [852], [734], [884], [608, 610, 836, 837, 557], [625], [711], [960], [936, 938], [807, 637], [226], [], [276], [195], [863], [457], [88], [], [760], [180], [593, 650], [543], [654], [939, 943], [698], [956], [594], [841, 911], [], [694], [496], [544], [198], [693], [956], [243], [102], [118], [783], [248, 250], [189], [5], [479], [507], [438], [973], [168], [434], [814, 913], [214], [349], [817], [726], [821], [585], [9], [908, 895], [333], [334], [], [580], [201], [386], [985, 716], [195, 159], [430], [546, 776, 650, 819, 632], [207], [261], [209], [895], [358], [321], [681, 620, 951], [333], [711], [286], [445], [293], [880, 430], [], [818], [996], [327], [573], [526], [843], [713], [847], [179], [268], [248, 250], [337], [177], [968], [688], [652], [962], [383], [220], [815], [810, 878], [146], [39], [455], [52], [141], [463], [828], [981], [787], [497], [620], [786], [615], [240, 238], [893], [30], [486], [825], [418], [649], [64, 55], [779], [48], [621], [159], [570], [43], [539], [], [], [], [945], [392], [606], [208, 250], [538], [949], [91], [207], [985], [951], [580], [79], [259], [645], [826], [581, 751, 817, 479], [640], [47], [453, 454, 624], [896, 435], [725], [], [384], [121], [], [234, 214], [894], [991], [315], [374], [], [], [614], [569], [497], [605], [339], [], [378], [82], [], [576], [610], [905, 532, 441, 572, 834, 966], [416], [780], [129], [], [386], [573], [628], [853], [982], [786], [672, 970], [908], [325], [331], [380], [551], [487], [], [859], [882, 613], [125], [245], [379], [561], [840], [867], [437], [52], [646], [536], [382], [647], [323], [], [175], [874], [578, 903, 689, 885], [535], [937], [462], [433], [189], [654], [592], [357], [94], [341, 703], [468, 919, 920], [377], [148], [362], [14], [326], [319], [659], [857], [681, 620, 508], [849], [841, 608, 636], [289], [785], [779], [870], [], [302], [], [373], [29], [486], [201], [239], [735], [954], [143], [563], [48], [807], [430], [571], [345, 690], [690], [129], [399], [393], [181], [391], [907, 478], [400], [647], [544], [], [871], [697], [263], [774], [916], [708], [509], [135], [812], [385], [214], [285], [76], [261], [390], [590], [595], [397], [936], [168], [525], [], [502], [914], [449], [750], [471], [528], [19], [966], [879], [38, 44], [121], [837, 441], [801], [893], [], [793], [104], [873], [144, 977], [313], [267], [204], [868, 923], [488], [236], [334], [26], [427], [621], [715, 764], [692], [104], [627], [578, 903, 601], [595], [921], [785], [204], [759], [721], [214], [330], [564], [565], [59], [], [489], [515], [23], [191], [125], [629], [578], [514, 788], [580], [171], [444], [681, 620, 508], [215], [289], [742], [175], [821], [826], [93], [241], [], [719], [62], [11], [995], [497], [77], [162], [], [494, 442], [357, 337], [497], [977], [990], [363], [506], [264, 263], [103], [609], [671], [548, 905, 851, 831, 598], [306], [], [48], [445], [875], [387], [731], [361], [], [132], [82], [923, 924], [257], [945], [843], [819], [481], [147], [292], [968, 651, 504], [971, 815], [76, 568], [245], [82], [870], [671], [], [46], [463], [845], [944], [12], [602], [483], [4], [182], [958], [900], [301], [335], [734], [515], [428], [789], [481, 482], [902], [234, 852], [417], [802], [655, 500], [363], [354], [64, 55], [281], [796], [150], [668], [618, 284], [144], [159], [194], [345], [414], [482], [296], [446], [814], [516, 601], [425, 730], [534, 729], [729], [868, 923], [813], [475], [129], [740], [573], [437], [760], [792], [873], [644, 470], [279], [907], [434], [229], [610, 655], [795], [185], [521], [232], [672, 570], [165], [902, 769, 726], [649, 979], [400, 667], [788], [812], [715], [237], [747], [608, 728, 824, 630, 414], [39, 48], [424, 423], [590, 487], [291], [771], [15], [485], [836], [297], [38], [967], [329], [138], [739], [518, 652, 691, 570], [867], [608, 459], [804], [665], [515, 906], [9], [919], [111], [], [923], [5], [421], [533, 539], [674], [828], [836, 837], [963, 567], [399], [545], [892], [381], [597, 763], [473], [540], [248, 250], [80], [612], [806], [31], [965], [823], [446], [346], [921], [817], [195], [479], [725], [518], [467], [634], [968], [921], [141], [284], [546, 650, 402], [923], [454], [296], [171], [865], [276], [132], [970], [970, 980], [443], [653], [66, 68], [874], [259], [300], [445], [580], [738], [], [889], [904], [252], [], [897], [8, 7], [168, 205], [960], [235, 242], [510], [839], [752], [958], [436], [543], [797], [442, 538], [537], [7], [491], [160], [659, 438, 647], [872, 759, 622, 732, 414], [402, 703], [876], [465], [453, 454], [800], [757], [626], [912, 716], [880], [720], [880], [248, 249], [801], [452], [265], [379], [841, 447], [108, 991], [862], [394], [715], [812], [32], [452, 433], [678], [8], [79], [747], [316], [48], [], [232, 760], [648, 720], [588], [74], [563], [970, 518, 671], [972, 858], [566], [996], [596], [335], [476], [83], [513], [868], [], [968, 849, 725, 504], [892], [309, 984], [496], [673, 742, 664, 526, 527, 508], [632], [40, 46], [], [366], [570], [11], [691], [], [427], [850], [446], [434], [48], [], [20], [796], [467], [528], [363], [871], [112], [222], [872], [434], [561], [238], [962, 692], [713], [41], [338], [847], [943], [15], [774], [610], [148], [497, 442], [84], [836, 837, 678], [130], [885], [288], [340], [844], [654], [213], [974], [849], [419], [669], [35], [280], [323], [142], [157], [64], [553], [931], [379], [357], [466], [646], [967], [377], [256], [283], [289], [486], [], [722], [850], [], [92], [], [330], [378], [538], [151], [], [122], [6], [443], [670], [672, 471], [946], [38], [224], [752], [444], [420], [906], [372], [755], [480], [490], [613], [259], [578, 639], [646], [793], [235], [], [737], [777], [584], [553], [822], [388], [585], [862], [523], [745], [582, 941, 728], [82], [56], [195, 697], [801, 570], [787], [885], [740], [122], [618], [389], [153, 204], [563], [190], [610, 487], [967], [822, 542], [399], [565], [649], [696], [298, 299], [964, 951], [207], [26], [206], [635], [739], [523], [512], [385], [487], [500], [675], [38], [777], [934], [777], [624, 283, 453, 454], [805], [960, 967, 968], [859, 868, 521, 651], [337], [621], [169], [515, 680], [863], [886], [626], [406], [630], [45], [365], [512], [333], [415], [909, 926, 968, 504], [495], [886], [91], [105], [273], [421], [829], [153], [417], [510], [519], [979], [941], [569], [992], [263], [948], [819], [936], [], [84], [750], [139], [716], [395], [677], [949], [], [968, 504], [509], [378], [423], [305], [451], [59], [968, 504], [206], [32], [506, 421], [24], [140], [804], [715], [581, 656, 436, 479], [312, 311], [938, 923, 935], [684], [316], [94], [988], [42], [908, 404, 895], [908], [963], [252], [322], [900, 540, 812], [], [426], [180], [821], [], [502], [739], [261, 174], [650], [494], [581, 479, 817], [35], [90], [591], [432], [613], [626], [102], [489], [411], [168], [627], [834, 458], [903, 558], [], [372], [900], [49], [206], [766], [53], [783], [265], [71], [812], [136], [589], [61], [], [64, 59], [69], [710], [129], [], [496], [15], [911, 474], [659], [120], [], [432], [428], [140], [801], [217], [669], [994], [330, 332], [999, 281, 700], [958], [12], [608], [487, 402], [548, 851], [334], [750, 721], [241], [624, 453, 454], [392], [736], [301], [441, 572], [386, 101], [], [581, 436, 479], [810, 878], [568], [106], [482], [850], [28], [770], [1], [843], [655], [94], [464, 597], [741], [693], [468], [660], [917], [329], [], [654], [871], [390], [342], [], [572, 966], [950], [120], [146], [302], [], [519], [], [], [197], [505], [155], [825], [188, 189], [96], [237], [726], [325], [229], [507], [457, 834], [93], [], [260], [930], [510], [346], [983], [395], [317], [289], [554], [34], [713, 742], [992], [162], [211, 159], [401], [88], [559], [760], [484], [636], [309], [14], [78], [725, 901], [378], [431], [267], [223], [423, 424, 589], [973], [681, 810, 620], [618, 469], [], [167], [383], [117], [], [302], [479, 436], [389], [663], [346], [323], [822], [126], [432], [524], [994], [968], [], [355], [562], [420, 683, 875], [789], [847], [60], [842, 638], [720], [724, 536], [373], [398], [780], [673, 620, 664, 526, 527, 846, 632, 508], [540], [616], [104], [873], [417], [436], [277, 278], [668], [945], [184, 191], [682, 708], [225], [], [546], [674], [146], [580], [903], [665], [821], [682], [216], [684, 784], [571], [621], [287], [120], [774], [849], [223], [498], [608], [193, 194, 187], [982], [1], [771], [882], [469], [], [388], [344], [377], [610], [816], [621], [940, 463], [435], [515], [603], [402, 559, 836], [450], [], [800], [628], [865], [610], [], [15], [762], [775], [539], [531], [185], [579], [482], [398], [419], [976], [650], [771], [491], [910], [69], [207], [939], [], [100], [134], [506, 421], [249], [525], [171], [999, 861], [287], [497, 884], [], [249, 250], [600], [765], [609], [216], [788, 831], [210], [781], [923, 550, 967, 968, 762], [781], [198], [673], [235], [684], [429], [828], [86], [869], [215], [209], [435, 151], [397], [430], [791], [187], [436], [849], [603, 764], [144], [591], [808], [793], [909, 827, 926], [272], [], [80], [313], [923], [251], [53], [430], [119], [562, 825], [499], [919, 733], [359], [57], [820], [131], [330], [507], [781], [975, 703], [286], [761], [231], [841, 885, 630, 636], [128], [713], [780, 724], [604], [], [307], [880], [955], [910, 729, 828], [338], [928], [], [494], [340], [822, 577], [500], [859], [202], [975, 562], [633], [856], [210], [834, 836, 837, 650, 906, 819], [], [658], [366], [634], [160], [134], [277, 278], [155], [570], [102], [27], [421], [50], [401], [785], [906], [288], [487], [966, 572], [671], [788], [759], [377], [690], [816], [655], [72], [748], [592], [241], [893], [560], [18], [246], [901], [270], [782, 664, 830], [414], [819], [196, 198], [122], [839], [622, 759], [456], [278], [724], [333], [664, 971], [610, 841], [498], [965], [409], [241, 238], [136], [114], [453, 553, 894], [8], [869], [932], [587], [519, 950], [354], [648], [], [489, 22], [903], [442], [987, 998], [44], [795], [265], [933], [911], [748], [23], [396], [795], [1], [802], [], [], [479], [81], [525], [836, 837, 841, 978, 501], [626], [356], [610], [470], [666], [846], [91], [137], [], [529], [569], [993], [452], [616], [940], [293], [351], [604], [244], [551], [47], [354], [481], [800], [455, 440], [711], [23], [5], [700, 999], [148], [536], [886], [368], [246], [468], [672], [879], [171], [541, 62], [714], [28], [169], [993], [17], [442, 497, 858], [839], [679, 721], [160], [845], [251], [898], [423], [480], [581, 468], [500], [396], [883, 572], [431], [956], [361], [53], [817], [49], [729], [522], [], [939], [338], [391], [965], [625], [884, 406], [774], [546, 776, 158], [839, 718], [458], [213], [48], [950], [478], [431, 697], [34], [352], [703], [931], [830], [968, 504], [], [938], [320], [195], [121], [774, 977, 978], [437], [563], [26], [362], [16], [328], [841], [673, 526, 527, 782, 664, 508], [469], [13], [463], [14], [922], [231], [26], [921, 445], [], [22], [996], [222], [440, 737, 455], [232], [133], [607], [293], [117], [343], [476], [291], [565], [521], [825], [724], [295], [219, 220], [364], [258], [], [483], [], [710], [474, 911], [538], [64, 55], [], [539], [573], [603], [], [393], [923], [934], [922], [469], [871], [], [402], [474, 799], [616], [544], [50], [], [414], [595, 866], [825], [], [131], [515], [351], [297], [976], [577], [764], [903], [699], [335], [229], [666], [444], [168], [560], [847], [], [286], [], [6], [64], [218], [747], [669], [287], [825], [], [370], [957], [662], [875], [963], [165], [260], [646], [778], [197], [753], [996], [930], [453, 742, 681, 620], [677], [518], [63], [346], [517], [610], [672, 797], [276], [721], [383], [571], [787], [735], [75], [834, 681, 906, 526], [345, 346, 730], [54], [443], [597], [652], [770], [212], [116], [368], [388], [87], [690], [368], [854], [117], [], [105], [457, 834], [93], [], [342], [96], [834], [406], [17], [798], [866], [930, 415], [471], [574], [83], [698], [799], [24], [208], [459, 445], [946], [981], [887], [732], [687], [68], [966, 572], [999], [478], [263], [417], [244], [128], [974], [580], [515], [2], [893], [532], [56], [169], [714], [617, 691, 570], [366], [141], [38, 45], [309], [731, 861], [957], [845], [], [732], [411], [668], [850], [747], [565], [989], [508], [322], [547], [50], [752], [455], [806, 630], [103], [752, 852], [483], [845], [56], [427], [10], [881], [426], [300], [864], [184, 191], [316], [158], [557, 718], [253], [550], [260], [638, 639], [250], [842, 814, 977, 978, 693, 445, 639], [366], [], [388], [237, 158], [252], [743], [391], [816], [76], [399], [897, 285], [441], [], [6], [98], [289], [653, 493], [914], [696], [863], [701], [100], [825], [977, 978], [965], [384], [70], [605], [937, 962, 935], [979], [22], [669], [899], [64, 59], [640], [345, 347], [354], [491], [29], [141], [963], [27], [563], [250], [155], [236], [793], [969], [739], [780, 914], [125], [813, 567], [920, 779], [429, 463], [303], [665, 518], [12], [673, 810, 527, 664, 508], [370], [429], [793], [809], [16], [276], [], [679], [748], [323], [204], [201], [784], [286], [138], [303], [192, 185], [774], [], [669], [288], [489], [659], [588], [912], [735], [611], [99], [], [938], [358, 359], [218], [807], [907], [550], [36, 37], [834, 655], [904], [919], [699], [840], [698], [578, 819], [592], [767], [518, 670], [117], [258], [], [592], [256], [], [666], [265, 267], [833], [602], [474], [541], [614, 894], [760], [241], [507], [557, 22], [854], [418], [260], [673, 892, 681, 620, 526, 508], [269], [203], [277], [491], [48], [738, 999, 905, 700], [455], [239], [642], [236], [178], [403], [], [25], [546, 819], [834, 906], [776], [755], [816], [338], [778], [89], [], [560], [], [665], [939, 943], [914], [29], [683], [130], [0], [], [57], [335], [190], [971], [294], [175], [955], [524, 461], [583], [], [346], [157], [134], [], [112], [987, 998, 809, 923, 925], [90], [], [39], [345, 690], [678], [175], [749], [149], [813, 910, 926], [965], [101], [491], [122], [954], [434, 797], [311], [679], [597], [], [362], [786], [767], [27], [751], [724], [409], [694], [724, 536], [904], [197], [692, 790, 509], [], [901], [554], [928, 930, 923], [865], [65], [789], [958], [427], [927], [950], [274], [379, 381], [656], [320], [132], [855], [10], [41], [938], [553], [557], [897, 651, 760], [839], [811], [692, 760, 700], [616, 830], [593, 650], [610], [366], [885], [835], [291], [543], [448], [435], [486], [679], [750], [919], [734], [], [534], [964], [82], [], [287], [294], [714], [784], [991], [103], [925], [226], [63], [214], [578], [872, 681, 620, 622, 759, 414], [738], [135], [434], [610], [907, 440], [703], [112], [], [772], [606], [137], [162, 167], [744, 657], [], [277, 278], [321], [763], [104], [466], [303], [238], [726], [358], [216], [112], [], [32], [70], [594], [392], [159], [12], [206], [238, 216], [536], [791], [190], [674], [223], [610, 402], [44, 26], [539], [479], [81], [194, 203], [247, 215], [880], [17], [794], [], [], [420], [896], [], [409], [42], [114, 947], [433, 460, 975, 977], [710], [989], [745], [907, 440], [261], [303], [657], [518], [565], [614], [847], [607], [866], [371], [676, 199], [726], [478], [490], [700, 999], [978], [836, 837, 619], [305], [], [768], [648, 631], [265], [399], [523], [], [896, 648], [], [410], [913], [211], [512], [522], [336, 337], [681, 620, 632], [159], [307], [], [944], [808, 638, 639], [731], [796], [20], [392], [571], [576], [518, 830], [873], [789], [928], [311], [24], [858], [974], [422], [241], [729], [569], [494], [684], [387], [746], [45], [95], [582], [819, 854], [380], [521], [252], [504], [], [439, 541, 542], [174], [630], [937], [349], [542], [577], [465], [239], [378], [568], [218], [969], [610, 898], [844], [975, 703], [479, 817], [999, 434], [103], [865, 850], [210], [244], [881], [127], [426], [728, 790], [763], [903], [280], [744, 884], [301], [931], [822], [127], [256], [48], [418, 629], [352], [736], [343], [733], [405], [685], [638, 639], [808], [207], [722], [992], [985], [850], [506], [31], [], [739], [601], [344], [190], [876, 435], [810, 878], [673, 419], [850], [460, 975, 536], [874], [103], [852], [750, 242, 831], [176], [992], [895], [785], [281], [994], [378], [622], [374], [140], [414], [952], [678], [51], [321], [898], [586], [858], [602], [843], [440, 441], [285], [], [553, 493], [699], [109], [945], [948], [746], [293], [217], [223], [474], [42], [955], [332], [424, 423], [], [913], [678], [972], [131], [34], [850], [857], [619, 750, 846, 721], [769], [], [28], [742], [766], [836, 638, 639], [238], [744, 657], [233], [], [762, 923, 959], [], [135], [645], [964, 923], [559], [609], [78], [894], [800], [803], [636], [469], [167], [196], [247], [711], [275], [659, 959, 762, 923], [138], [730], [695], [992], [88], [], [407], [41, 44], [988], [239], [932], [152], [678], [156], [615], [601], [295], [925], [735], [639], [683], [822], [732], [], [665], [651], [859], [619, 846], [500, 825], [736], [388], [346], [183], [943], [152], [443], [479], [52], [150], [174], [911], [828], [281, 282, 539], [56], [595], [49], [699], [589], [817, 573], [80], [538], [130], [], [315], [917], [766], [498], [678], [617, 823, 153], [619, 846, 750, 721], [154], [930], [96], [289], [737, 455], [874], [308], [884], [898, 455, 680, 711, 968, 473, 826], [], [578], [795], [218], [693, 472], [375], [311], [137], [755], [566], [], [107], [606], [540], [774], [510], [911, 824], [392], [], [233], [570], [462], [88], [893], [763], [926], [142], [877], [371], [673, 681], [479], [975, 977], [163], [196, 837, 198, 836], [265], [416], [377], [256], [148], [397], [571], [876, 435], [380], [561], [243], [834], [932], [150], [585], [688], [382], [0], [322], [388], [946], [75], [473], [458], [375], [660], [687], [882], [583], [967, 968], [527], [255], [], [604], [937, 942], [], [249], [680], [250], [243], [62], [791], [62], [154], [73], [596], [754], [47], [], [488, 841, 843], [37], [18], [288, 290], [], [244], [224], [237], [12], [], [624, 453], [443], [727], [], [384], [327], [472], [257], [944], [787], [889, 486], [977, 978, 445], [334], [], [157], [412], [], [892], [26], [40], [815], [603], [265], [977, 978], [16], [547], [352], [49], [339], [608, 610], [349], [742], [401], [495], [], [509], [814], [146], [604], [341], [602], [578], [702], [996], [107], [95], [736, 515], [577, 641], [116], [44, 26], [276], [279], [558], [386], [748, 600], [133], [242], [616], [379], [850], [349], [552], [635], [384], [292], [798], [457], [995], [], [429], [109], [814], [895], [80], [], [723], [335], [810, 878], [449], [245], [159], [907], [209], [933], [80], [762, 959], [690], [728], [184], [522], [109], [208], [551], [984], [982], [138], [], [891], [], [428, 792], [51], [416], [636], [750, 721], [100], [114], [109], [670], [727], [511], [754], [300], [724], [703], [636], [481, 485, 632], [], [189], [460, 437], [621], [513], [150], [755], [875], [351], [759], [301], [202], [198], [324], [144], [119, 120], [171], [971], [620], [656], [305], [907], [113], [865], [270], [345], [706], [980], [], [479], [416], [180], [93], [199], [105], [94], [677, 587], [185], [], [394], [352], [550], [908], [31], [147], [884, 406], [928, 850], [557], [528], [148], [4], [278], [474], [919, 733], [650], [465], [279], [512], [841], [439], [56], [349], [], [747], [271], [740], [916], [112, 506], [100], [449], [319], [375], [513, 579, 881], [542], [300], [220], [496], [866], [645], [107], [816], [506], [32], [472], [850], [330, 331], [598], [653], [360], [179], [172], [175], [984], [806], [970, 915], [579], [544, 926], [226], [401], [117], [372], [], [335], [951], [750, 721], [491], [856], [165, 234], [743, 905], [898, 585], [566, 439], [488, 843], [987, 998], [987, 998], [899], [132], [571], [778], [543], [88], [924], [767], [569], [55, 59], [113], [542], [704], [44], [884, 532, 762, 923, 572], [459], [750], [29], [152, 157], [61], [], [437], [863], [875], [164], [722], [785], [927], [], [751], [364], [864], [250], [700], [554], [830], [794], [365], [219], [], [650, 558], [], [237, 180], [], [773], [295], [413], [177], [914], [563], [569], [303], [921], [], [670], [140], [738, 957], [274], [785, 180], [26], [311], [8], [945, 939, 943], [450], [754], [228], [239], [566], [561], [486, 889], [237], [874], [362], [264, 263], [662], [], [977], [199], [254, 262], [289], [304], [839, 718], [248, 250], [804], [900], [364], [182], [284, 861], [421], [65], [445], [916], [26], [709], [955], [135], [630], [421], [919], [217], [5], [790], [237], [997], [686], [31], [460], [88], [738, 421], [296], [45], [470], [825], [], [323], [956], [570], [352], [442, 494], [366], [311], [749], [87], [479, 817, 511], [894], [868, 470, 923], [213], [981], [347], [533], [483], [724, 536], [76], [395], [903], [367], [], [293], [780], [909], [342], [955], [803], [768], [], [948], [414, 478], [701], [777, 623], [758], [367], [543, 422], [708], [514], [488, 695], [692, 917], [836, 837, 977, 978], [915], [966], [437], [207], [85], [341], [232], [654], [263], [779], [394], [476], [], [367], [643], [741], [883], [412], [327], [758], [291], [936], [739], [560], [778], [141], [153], [890], [207], [734], [846, 619], [896, 999], [979], [570], [903], [109], [868, 987], [], [93], [890], [], [489, 15], [140], [570], [512], [770], [74], [529], [233], [669], [281], [72, 815], [312], [410], [440], [363], [231], [110], [992], [786], [765], [], [578], [619, 858], [351], [619], [905, 495], [857], [518, 652, 691, 570], [253], [799], [129], [686], [916], [100], [26], [299], [617], [745], [214], [], [577], [967], [963], [64, 55], [538], [22], [296], [709], [454, 652], [604], [428], [482], [53], [696], [544], [819], [546, 650, 819, 822, 542], [463, 925], [382], [], [], [362], [8], [726], [625, 554], [771], [717, 733], [767], [356], [554], [293], [396], [684], [235], [552, 733], [932], [3], [679], [507], [203], [398], [943, 945], [470], [50], [662], [936], [114], [508], [574], [846], [125], [628], [637], [358], [56], [], [576], [906], [309, 410], [873], [388], [728], [119], [864], [], [911], [239], [938], [745], [580], [576], [90], [405], [695], [215], [], [600], [519], [868, 532, 762, 923, 572], [436], [465], [372], [423], [878], [853], [696], [93], [976], [53], [360], [955], [942], [679], [], [252], [959], [426], [680], [585], [749], [393], [283], [601], [70], [], [448], [247], [711], [854], [638, 639], [507], [32], [805], [957], [795], [807], [838, 631], [960], [489], [820], [489], [204], [241, 238], [802], [364], [871], [228], [790], [174], [690], [], [540], [466], [], [886], [66], [481, 482], [734], [371], [785], [279], [636], [518], [167], [582], [679], [13], [915], [552], [878], [552], [489, 273], [617, 823], [111], [582, 790], [505], [314], [711], [419], [267], [719], [786], [838], [298, 357], [89], [13], [68], [441], [2, 3], [514], [64], [332], [547], [185, 186], [], [73], [643], [386, 101], [], [752], [647], [], [470], [343], [302], [181], [493], [831], [394], [39, 47], [232], [949], [638, 639], [745], [485, 761], [834, 630, 637], [224, 852, 205], [26], [992], [186], [769, 587], [579, 881], [850, 854], [507], [814], [19], [632, 851, 548], [875], [168, 159], [317], [912], [732], [747], [464], [566], [235], [105], [593], [71], [575], [218], [254], [167], [110], [267], [87], [172], [49, 50], [966, 572], [870], [64], [], [418, 767], [579], [538], [388], [651], [465], [166], [325], [574], [681, 810, 620, 508], [543], [978], [76], [538], [806], [993], [], [964], [104], [204], [696], [370], [610], [949, 647], [208], [558, 699, 541], [894], [164], [763], [428], [485], [514], [220], [211], [98], [399, 501], [114], [749, 542], [32, 30], [780], [395], [333], [626], [488], [841, 523, 412], [433], [566], [82], [], [145], [465], [652, 764], [66, 68], [643], [968], [896, 725], [122], [515, 869, 763], [967], [27], [121], [200], [308], [113], [243], [874], [997], [173], [444], [91], [145], [744, 812, 657], [43], [555], [555], [372], [545], [617, 823, 487], [368], [266], [419], [901], [470], [610], [608, 748], [3], [], [], [910], [386, 101], [486], [336], [760], [130], [513, 776, 875], [976], [132], [541, 542], [], [309], [407], [500], [101], [174], [535], [228], [794], [299], [906], [762, 554], [24], [115], [299], [], [809, 532, 923, 925, 926], [78], [652], [], [666], [451], [391], [784], [243], [924], [655], [], [609], [191], [607], [636], [318], [908, 404], [338], [57], [480], [374], [923], [505], [671, 898, 535], [682, 562], [70], [814], [548], [514], [478], [353], [185], [48], [328, 109], [436], [], [250], [398], [374], [385], [293], [48], [509], [462], [423], [981], [932, 415], [920], [796], [346], [247, 159], [550], [146], [652], [2], [497], [693], [256], [996], [277], [241], [423, 424, 831], [663], [462], [261], [213], [769], [440], [], [640], [228], [8], [450], [101], [948, 950, 957], [48], [57], [608, 523], [405], [805], [369], [133], [467], [], [232], [675, 208], [], [870], [322], [872, 622, 759], [123], [623], [49], [977, 978], [220], [877], [778], [22], [165], [719], [372], [95], [123], [337], [889], [776], [115], [574], [217], [938], [973], [887], [168, 178], [291], [888], [469, 919], [515], [669], [576, 693, 954], [913], [866], [375], [9], [236], [24], [369], [952], [923, 809, 947], [122], [584], [397], [806, 559, 463, 610], [84], [818], [], [394], [778], [619, 846, 721, 831], [212], [754], [245], [654], [244], [250], [156], [562], [933], [202], [910, 567], [764, 413], [], [427, 756], [155], [197, 199], [334], [24], [118], [110], [397], [420], [244], [640], [933], [228], [659, 952], [582, 680, 791], [463], [92], [138], [692, 960, 582], [77], [939], [38], [842, 433], [381], [174], [431], [987, 938, 923], [440], [971], [560], [424], [92], [508], [839], [698], [558], [729, 495], [711], [669], [811], [84], [744, 657], [426], [662], [26], [960, 931, 415], [799], [964, 813], [651], [813], [874], [366], [], [183], [738], [878], [975], [916], [149], [923], [451], [944], [761], [836, 837, 610, 870], [], [249], [960], [168], [610], [834, 588], [996], [747], [738], [892], [145], [426], [987, 998], [205, 213], [546, 650, 819], [239], [781], [673, 664, 526, 527, 782, 632, 508], [127], [726], [117], [562], [653], [404], [476], [9], [288], [531], [172, 177], [513, 715, 439], [586], [364], [453], [133], [180], [899, 619, 849], [553, 728], [836, 837, 885], [474], [858], [692], [252], [161], [525], [737], [487], [686], [73, 74], [339], [979], [810, 878], [230, 231], [426], [687], [835], [187], [998], [546], [37], [593], [990], [367], [508], [526], [36], [217], [], [473], [242], [207], [963], [750, 721], [563], [281, 282], [], [678], [742], [208], [18], [218], [212], [728], [367], [74], [520], [890], [570], [692], [275], [971], [428], [408], [442], [274], [702], [131], [849, 505], [994], [400, 667], [216], [501], [453, 624], [729], [844], [397], [], [], [], [987], [459], [173], [513], [650], [609], [581, 656], [865], [647, 659], [544], [870], [137], [522], [681, 620], [867], [817], [300], [675], [205], [463], [223], [], [52], [830], [443], [431], [893], [512], [461], [402], [41], [257], [750, 846, 721], [69], [127], [700], [759], [608], [384], [937], [298], [492], [362], [14], [958], [705], [827], [613], [427], [783], [673, 742, 526, 527, 782, 664, 508], [376], [577], [569], [894], [384], [262], [556], [162], [394], [898], [439], [48], [998], [188], [700], [459], [933], [985], [828], [399], [396], [], [801], [411], [769], [198], [829], [493], [632], [751, 479], [211], [222], [163], [979], [594], [189], [557], [927], [], [783], [114], [49], [885], [490, 524, 461], [893], [872], [128], [488], [], [472, 693], [694], [150], [796], [22], [608, 873, 414], [867], [372], [711], [900], [204], [175], [606], [392], [283], [692], [821], [], [20], [255], [880], [105], [710], [], [737], [183], [], [399], [462], [963], [816], [783], [38], [20], [809, 925], [18], [489], [809, 659, 925], [619, 846, 818], [916], [257], [526, 539, 588, 738, 883], [815], [360], [128], [608, 515], [138], [415], [987, 998], [499], [216], [], [804], [878], [505], [242], [598], [56], [865], [166, 167], [544], [677], [370], [558, 889], [487], [28], [678], [713, 742], [324, 946], [73, 74], [747], [584], [96], [382], [578], [161], [291], [940], [129], [929], [749], [], [349], [454], [44, 634], [362], [473], [552], [798], [87], [953], [676, 597], [643], [47], [915], [968, 534, 504], [896], [435, 876], [879], [563], [871], [], [59], [682], [645, 735], [], [487], [737], [], [43], [210], [103], [584], [595], [834, 906], [274], [], [860], [], [], [977, 978], [928, 659, 949, 927], [574], [630], [901, 725], [555], [503], [397], [414], [717], [727], [503, 828], [631], [783], [795, 862], [457], [27], [447], [365], [342], [], [48], [216], [724], [840], [934], [111, 114], [255], [544, 909, 469, 926], [93], [245], [563], [347], [814], [278], [810, 878], [185, 193], [313], [465], [358], [752], [457], [204], [5], [601], [937], [818], [490], [632], [559], [67, 54], [987, 998], [787], [741, 885], [], [221, 206], [252], [52], [546], [666], [749], [196], [724], [963], [], [955], [321], [223], [63], [759], [442, 497, 409], [47], [42, 44], [412], [637], [974], [388], [328], [162], [292], [825], [692], [192], [], [519, 478], [375], [918], [147], [992], [29], [173], [61, 62], [709], [889], [685], [109], [321], [580], [754], [], [315], [159], [772], [693], [349], [607], [], [699], [118], [305], [126], [], [], [606], [769], [387], [220], [55], [], [516], [470], [75], [48], [947], [126], [361], [494], [392], [780, 914, 536], [277], [268], [635], [274], [397], [394], [586], [703], [458], [402], [], [143], [949, 923], [177], [892], [478], [500], [451], [820], [958], [15], [113], [532, 762, 923, 572], [5], [995], [530], [258], [974], [661], [731], [140], [975], [421, 825], [863], [180], [739], [709], [548], [821], [653, 535], [866], [555], [765], [448], [336], [147], [207], [], [229], [67], [720], [822, 541, 542], [750], [651], [], [703], [953], [652, 847, 471], [612], [481, 485, 592, 605], [18], [681, 810, 620, 508], [59], [113], [524, 461], [977, 978], [261], [819, 541, 542], [153], [403], [910], [541, 542], [316], [558, 917, 921], [845, 638], [509], [768], [477], [704], [72], [470], [41], [775, 842, 977, 978, 445], [236, 237], [55], [153], [], [184, 202, 191], [442], [], [20], [920], [328], [23], [77], [994], [654], [489], [547], [], [308], [], [472], [64], [991], [], [723], [649], [99, 100], [970, 795], [681, 620], [727], [785], [486], [106], [], [137], [102], [705, 466, 799], [647], [], [395], [2], [61], [845], [894], [647], [843], [823], [472, 693], [336], [127], [], [153, 204], [], [685], [149], [851, 532, 831], [929, 227], [781], [329], [987, 998], [387], [721], [119], [502], [802], [373], [523], [398], [896, 999, 281, 700], [372], [369], [675], [261], [944, 946], [725, 572], [785], [461], [229], [568], [130], [59], [367], [752, 852], [358], [456], [555], [320], [957], [716], [846], [9], [], [630], [149], [], [515, 836, 559], [839], [198], [], [103], [696], [774, 614, 879], [157], [841, 825], [895], [], [476], [415], [509], [], [552], [70], [856], [36], [294], [225], [649], [840], [184], [489], [210], [133], [196], [307], [896, 876, 435], [694], [136], [710], [336], [400, 857, 667], [271, 277], [228], [699], [], [536], [347], [216, 716, 220], [203], [822, 542], [275], [714], [828], [571], [137], [6], [654], [679, 459], [663], [187], [260], [464], [670], [72], [612], [985], [5, 6], [471], [], [206], [244], [524], [971], [659], [642], [598], [264], [714], [156], [420], [420, 650, 402, 818, 819, 889], [129], [223], [903, 501], [479, 511], [612], [713], [720], [452], [283], [120], [836, 453, 837], [521, 962], [748, 636], [919], [251], [972, 23], [481], [594], [579], [171], [859], [769, 767], [26], [625], [306], [913], [236], [679], [], [152], [611], [], [490], [476], [376], [840], [249], [953], [938], [872], [507], [202, 189], [947, 997], [464], [627], [326], [865], [388], [], [870], [777, 596, 597, 763], [971], [197, 183], [811], [], [181], [1], [51], [194], [566], [855], [805], [635], [452], [58], [716], [752], [264], [345], [143], [619, 846], [441], [39], [179], [193], [917, 921], [538], [231], [466], [169], [776], [64], [484], [258], [275], [977, 978], [706, 423, 532, 923], [173], [277], [361], [536], [718, 510], [587], [859], [430], [977, 978, 853], [506, 733], [337], [986], [351], [679], [533], [666], [337], [350], [50], [968, 504], [852], [837, 703, 921], [674], [215], [755], [311], [88], [15], [253], [553], [616], [790], [963], [717], [822], [23], [], [786], [], [403], [], [732], [725], [72, 815], [], [394], [0], [333], [339], [461], [145], [903], [500], [977, 978], [23], [561], [921], [607], [708], [291], [292], [682], [617], [278], [957], [206, 221], [668], [40], [293], [594], [655], [344], [475], [142], [160], [469], [108], [780], [641], [229], [96], [88], [411], [249, 537], [849], [773], [108], [740, 587, 784, 477], [600, 823], [554], [770, 898, 649], [280], [248, 249, 250], [288], [340], [809, 943, 499], [972], [626], [255], [479], [396], [592], [169], [912, 339], [57], [586], [665], [687], [533], [850], [243], [], [956], [702], [408], [], [622], [778], [657], [429], [138], [455], [927], [985], [242, 703], [204], [0], [78], [677], [87], [421], [567], [706, 879], [140], [141], [159], [754], [113, 125], [790], [453, 740], [355], [491], [524], [5], [290], [139], [708], [917], [], [698], [609], [73], [545], [399], [7], [166], [347], [970], [790], [253], [440], [86], [954], [444], [902], [652], [37], [71], [338], [661], [790], [802], [], [455], [12], [394], [871], [828], [531], [852, 187], [740], [681, 810, 620, 508], [152], [834, 457, 906], [659], [], [122], [785], [484, 871], [362], [647], [199], [770], [859], [937], [440], [421], [424], [984], [660], [443], [759], [353], [665], [701], [137], [61], [784], [391], [], [311], [890], [812], [580], [571], [385], [237], [307], [376], [], [670], [821], [352], [328], [913], [836, 703, 796], [216], [908], [], [160, 177], [136], [650], [290], [604], [529], [581], [805], [595], [368], [462], [953], [456, 341], [395], [437], [567], [492], [972], [271], [158], [273, 274], [], [548, 598], [112], [798], [524], [412], [314], [995], [512], [261], [681, 620, 527, 664, 508], [176], [522], [521], [52, 60], [286], [344], [309], [763], [173], [209], [278], [752], [], [900], [721], [923], [992], [891], [339], [569], [0], [190], [168], [380], [401], [13], [426], [342], [276], [673, 681, 526, 527, 664, 508], [770], [500], [489, 507], [375], [855, 828], [409, 892], [198], [335], [682], [835], [606], [499], [916], [907, 499, 411], [701], [640], [797], [815], [19], [520], [36, 37], [956], [169], [0], [54], [365], [962], [878], [854], [568], [629], [300], [744, 657], [728, 545], [366], [572], [923], [132], [992], [650, 558], [0], [218], [], [953], [496], [20], [502], [154], [482, 605], [165], [343], [991], [434], [], [518], [977, 978], [965, 440], [892], [677], [24], [249], [818, 437], [288], [91], [6], [619, 818], [612], [688], [601], [819], [923, 868], [], [221], [514], [631], [957], [489, 695], [731, 861, 999], [753], [262], [744, 657], [753], [919], [156], [147], [683, 699], [730], [771], [464], [315], [121], [962, 935], [209], [715, 524, 461], [779], [970], [263], [556], [995], [606], [673, 526, 527, 664, 508], [140], [540], [390, 973], [4], [379], [76], [957], [267], [626], [992], [526, 720], [158], [640], [211, 159], [405], [383], [879, 412], [272], [], [213], [828], [252], [570], [223], [112], [602], [457], [37], [125], [398], [477], [201], [903, 689, 601], [799], [375], [57], [515, 413], [744, 657], [], [697, 610], [538, 668], [674], [69], [428], [131], [508], [278], [898], [205], [643], [], [37], [964], [7], [379], [588], [834, 532, 505], [755], [548, 553, 527], [986], [484], [5], [370], [696], [], [400, 667], [267], [3, 147, 149], [186], [], [890], [416], [681, 620, 916, 664], [], [651], [116], [396], [235], [17], [92], [404], [504], [270], [492, 519], [643], [162], [939, 582], [643, 903], [720], [175], [197, 233], [927], [987], [834, 906], [342], [660], [], [136], [869, 655, 630, 539], [239], [795], [74, 815], [701], [374], [], [87], [], [134], [82], [420], [464, 763], [758], [485, 685, 475, 511], [158], [645], [396], [465], [366], [438], [154], [437], [271], [948], [53], [335], [995], [83], [915], [245], [251, 246], [546], [], [198], [95], [488], [763], [911, 701], [283], [837, 978, 890], [979], [], [650], [831], [], [990], [251], [887, 501, 439], [269], [852], [540], [573, 479], [959], [487], [99], [376], [], [550], [763], [8], [25], [396], [263], [424], [455], [965], [320], [731], [172], [320], [302], [896, 804, 434], [557, 472], [906], [644], [485, 592], [640], [340], [195], [310], [181], [127], [386], [842, 977], [721], [], [195, 179], [145], [191], [375], [931, 950, 954, 923], [], [847], [92], [601], [566], [851], [705], [113], [897], [360], [615], [666], [526, 765], [230], [606], [849, 505, 859], [7], [162], [112], [176], [148], [467], [725, 872], [26], [103], [141, 976], [268], [25], [935], [836, 837, 630], [27], [438], [82], [625], [], [382], [281], [599], [570], [479], [41], [230], [742], [], [788], [62], [769, 398], [146], [515], [453, 606], [39, 119], [211], [520, 669], [583], [377], [481, 482], [191], [804], [497], [843], [229], [343], [], [550], [241, 238], [195], [170, 177], [419], [139], [79], [565], [], [355], [310], [564], [898, 414, 608], [], [741], [406], [816], [846], [349], [669], [112], [131], [824], [888, 705], [6], [673, 526, 527, 782, 664, 508], [776, 650], [721, 831], [], [134], [182], [689, 819, 578, 488, 885], [888], [449], [140, 142], [760], [628], [984], [396], [913], [915], [], [100], [801], [], [204], [746], [216], [532], [501], [456], [473], [716], [], [200], [588], [997], [690], [697, 443, 828], [904], [669], [621], [170], [706, 879, 401, 762], [247], [259], [929], [220], [516], [25, 28], [569], [91], [398], [810, 508], [229], [957], [], [], [554], [192], [700], [570], [492], [675, 478], [614], [654], [736], [869, 671], [652], [372], [948], [403, 536], [674], [805], [454, 921], [445], [363], [16], [], [928, 949, 927], [253], [88], [208], [561], [839], [629], [842, 445], [252], [], [36, 37], [], [571], [], [815], [528], [354], [615], [81], [980], [884], [371], [11], [380], [], [606], [713], [207], [544], [796], [102], [902], [99], [24], [387], [886], [773], [370], [836, 970, 414], [201], [455], [764, 413], [917], [84], [246], [72], [534, 729], [780, 724], [88], [178], [321], [911, 533], [426], [232], [632], [], [154], [133], [729], [630], [809], [89], [218, 156], [804], [701], [644], [775], [832], [238], [362], [797], [584], [359], [668], [977, 978, 445], [622, 759], [], [], [958], [268], [34], [62], [443], [618, 809, 659], [547], [133], [208], [597], [810, 664, 527, 782, 508], [346], [283, 750], [735], [760], [440, 574], [866], [], [549], [802], [44, 60], [146], [260], [115], [40], [313], [930], [769, 587], [474], [957], [734], [301], [469], [807], [281], [289], [143], [290], [368], [589], [814], [792], [722], [], [583], [], [265], [], [666], [970, 795], [621], [26], [223], [451], [196, 198], [707], [313], [619, 750, 846, 721], [305], [29], [382], [340], [180, 243], [739], [977, 638, 639], [546], [514], [352], [368], [347], [770], [425], [608, 610], [685], [514], [811], [], [], [582], [], [475], [952], [928], [784], [607], [336], [945], [96], [785], [297, 295], [545], [4], [64], [44], [18], [801, 397, 983], [609, 977, 978], [208], [252], [358], [423], [130], [704], [424], [143], [286], [107], [93], [], [350], [866], [100], [719], [], [137], [129], [958], [], [809, 762, 923, 926], [170, 177], [855], [543], [803], [626], [248], [717], [450], [895], [178], [971], [423, 424], [923, 960], [530], [384], [422], [24], [370], [400, 667], [324], [984], [654], [302], [690, 345], [640], [184], [722], [805], [860], [100], [971], [141, 142], [159], [], [273], [394], [826], [709], [784], [428], [198], [286], [969], [221], [851, 548], [120], [358], [645], [939, 940, 943, 950], [543], [132], [159], [537], [234], [602], [51], [], [639], [330], [261], [533], [], [500], [926], [191], [346], [173], [], [3], [280, 278], [393], [374], [386], [791], [143], [422], [237], [755], [127], [410], [913], [176], [], [509], [85, 86], [927], [903, 526, 528, 782], [891], [971], [], [112], [985], [156], [188], [332], [487], [326], [744, 657, 403], [933], [556], [134], [206], [786], [235], [685], [957], [965], [255], [862], [827], [25], [779], [637], [542], [896, 434, 861], [495], [112], [344], [750, 564], [201], [632], [417], [19], [822], [441], [758], [352], [681, 620, 508], [840, 463], [], [144], [105], [134], [867], [57], [], [548, 851, 598, 632], [985], [628], [37], [648], [405], [332], [938], [319], [320], [71], [944], [936], [48], [95], [568], [441], [810], [586], [396], [8], [156], [45], [685], [734], [775], [167], [203], [822], [39], [111], [288], [638, 639], [463], [140], [802], [604], [975], [769, 418, 709], [896], [375], [247], [860], [52], [561], [], [943], [744], [814], [411], [317], [516, 520, 431], [63], [510], [386], [], [313], [859], [576], [469], [251], [863], [592], [447], [], [420], [8], [697], [873], [798], [365], [], [117, 62], [726], [606], [560], [], [692, 567], [969], [267, 852], [177], [410], [781], [77], [580], [670], [234, 165], [974], [61], [310], [828], [], [749], [607], [831], [954], [153], [577], [17], [280], [386, 101], [618], [263], [247], [984], [575], [105], [166], [912], [634], [581, 479, 656], [203], [26], [226], [774, 470], [788], [90], [516], [548], [220], [], [556], [725], [328], [525], [667], [238, 240], [875], [892], [934], [17], [612], [474, 911], [57], [914], [419], [490], [], [997], [294], [909], [214], [778], [772], [922], [30], [136], [153], [195], [415], [315], [435], [178], [828], [383], [277, 278], [207], [821], [712], [408], [], [536], [681, 620, 603], [898], [0, 133], [868, 415], [135], [209], [12], [251], [113], [537], [], [857], [541], [351], [806], [], [614], [553], [45], [382], [790, 998], [510], [497, 663], [526, 782, 851], [206], [536], [786], [257, 222], [939, 945], [838], [547, 820], [470], [701], [576], [917], [14], [], [736], [464], [966, 907], [979], [688], [548], [562], [686], [], [553, 851], [628], [14], [487], [215], [145], [183], [227], [839], [408], [968, 504], [988], [870], [1], [627], [905], [922], [766], [529, 692], [271], [852], [546, 453], [237], [259], [629], [655], [998, 987, 575], [515], [229], [427, 756], [869, 433], [650, 851, 541], [205], [757], [940], [159], [37], [845], [97], [284], [773], [140, 142], [838], [10], [373], [49], [492, 786], [91], [177], [146], [614], [779], [], [976], [458], [275], [791], [883], [775], [11], [334], [303], [120], [935], [], [792], [441], [285], [387], [568], [162], [83], [132], [678], [305], [684], [127], [701], [421, 818, 506], [836, 837, 552], [327], [325], [623], [189], [712], [638, 639], [284], [697], [838, 680, 631], [958], [758], [355], [907, 692], [12], [896, 434], [113], [137], [939, 943], [803], [245], [], [886], [986], [808], [377], [581, 479], [13], [24], [758], [898], [417, 701], [750], [28], [434], [991], [66], [257], [], [543], [617], [937], [904, 969], [289], [419], [472], [609, 586, 652], [333, 335, 760], [88], [393], [941], [697], [759], [368], [417, 869, 501], [320], [834, 977, 978, 982], [438], [275], [877], [520], [208], [245], [], [514, 664, 655], [311], [987], [462], [418], [399], [794, 799], [249], [18], [957], [76], [400, 667], [986], [653], [80], [615], [1], [458], [690], [468], [], [], [591], [152], [409, 826], [563], [750, 721], [939, 943], [196], [58], [48], [203], [], [518], [922], [35], [714], [], [665], [754], [210], [687], [851, 548], [198], [210, 852], [890], [127], [559, 594], [689, 443], [832], [487], [444], [55], [430], [663], [425], [808, 977, 978, 445], [], [377], [387], [916], [533], [], [], [828], [611], [884, 538], [697], [758], [316], [197], [392], [557, 751, 733, 479], [425], [715], [769], [410], [94], [391], [638, 639], [581, 479, 661], [821], [267], [365], [587, 499], [476], [584], [939], [820], [923], [39], [187], [947], [62], [654, 656], [884], [338], [557, 562], [938], [345], [32], [9], [438], [801], [404], [894, 281, 285], [896], [809, 659, 762], [346], [51], [4, 394], [586], [861], [455], [433, 793], [947], [874], [307], [445], [267], [493, 526], [953], [328], [949], [], [141], [386], [234, 165], [955], [967, 968], [667], [], [583], [162, 167], [180], [944], [959], [], [729], [], [106], [51], [646], [670], [698], [586], [120], [], [980], [730], [159], [973], [383], [713], [635], [], [281], [734], [938], [528], [315], [911], [598], [747], [178], [480], [925], [946], [546, 402, 819], [629], [], [970], [174], [689], [431], [867], [34], [760, 827], [466], [530], [131], [972], [28], [511], [475], [305], [712], [512], [950], [], [], [623], [605], [253], [809, 909, 926], [267], [10], [555], [466], [668], [225], [700, 999], [610, 823], [801, 973, 983], [259], [825], [683], [116], [350], [209], [327], [510], [406], [844], [644], [743], [], [519], [281], [681, 620], [385], [85], [759], [747], [766], [196, 198], [224, 223], [198], [559], [188], [66, 68], [744, 657], [], [919], [761], [756], [244], [543], [703], [719], [628], [217], [529, 219], [210], [487], [393, 108], [190], [127], [443], [296], [940], [431], [296], [744, 655, 657], [387], [804], [], [790], [710], [785], [531], [984], [667], [766], [], [218], [560], [843], [49], [638, 639], [9], [553], [233], [890, 445], [15], [], [484], [497], [782, 664, 810], [154], [787], [314, 861], [621], [730], [321], [673, 681, 810, 620, 526, 664, 508], [760], [706], [494], [591], [906], [382], [182], [239], [792], [487], [514], [696], [632], [], [946], [803], [792], [450], [], [185], [489], [], [73, 77], [529, 728], [501, 887], [650], [155], [629], [888], [132, 211], [425], [98], [77], [852], [645], [637], [956], [687], [25], [446], [632], [579], [505], [697], [286], [751], [390], [49, 50], [907], [450], [349], [], [782, 664], [456], [406], [274], [489], [331], [476], [825], [756], [877], [926], [], [], [523], [143], [254], [587], [868, 415], [387], [452], [716], [592], [366], [371], [594], [864, 586], [831, 967, 968, 608, 504], [885], [247], [546, 650, 819], [19], [355], [994], [769, 709], [223], [467], [584], [411, 678, 868], [107], [58], [698], [532, 762, 923, 572], [974], [], [521], [399], [412], [384, 375], [841], [20], [654], [844], [271], [331], [522], [499, 600], [39, 44], [314], [22], [736], [715], [759], [840], [729], [905, 532], [177, 490], [841], [845, 470], [], [640], [850, 823], [160], [471], [35, 49], [906], [79], [717, 581, 479], [801], [430], [934], [40, 44], [992], [894], [873], [37], [711], [80], [543], [107], [], [721], [732, 759], [330], [597, 763], [319], [548, 556, 851], [573], [224], [194], [538], [686], [384], [160], [414], [721], [392], [874], [], [592], [645], [280], [948, 949, 923], [260], [868, 923], [228], [881], [982], [771], [748], [], [740], [307], [708], [489, 852], [], [210, 211], [515], [531], [], [824], [], [680], [215], [984], [572], [148], [967, 923], [654], [707], [253], [], [746], [684], [333], [584], [185], [747], [635], [411], [455], [893], [165], [581, 518, 479, 661], [963], [], [877], [768], [139], [410, 309, 599], [137], [], [681, 620, 526], [644], [], [283], [113], [320], [283], [10], [], [632], [385], [664, 526, 527, 632], [631], [850], [537], [451], [611], [268], [634], [96], [149], [318], [61], [281], [427], [642], [291], [604], [], [991], [437], [254], [212], [995], [488, 439], [946], [857], [750], [840], [644], [378], [778], [], [108], [150], [59], [554, 628], [369], [993], [61], [342], [343], [44], [977, 978], [833], [451], [721, 831], [258], [544, 813, 910, 926, 469, 827], [750, 915], [143], [72], [612], [181], [12, 957], [198], [379], [745], [749], [206], [145], [515, 204], [486, 889], [668], [533], [573], [678, 518], [608, 872], [121], [119], [582], [43], [322], [562], [], [216], [697], [315], [921], [618, 623, 499], [188], [], [277], [305], [997], [], [977, 978], [889], [], [768], [359], [48], [840, 463], [356], [136], [981, 429], [780], [904], [145], [372], [555], [531], [332], [868], [832], [814], [548], [616], [423], [134], [317], [168], [616], [653], [439], [825], [421], [828], [27], [883], [987, 998], [397], [793], [418, 709], [], [542], [102], [40], [103], [27], [743], [448], [414], [], [664], [119], [199], [420], [940], [255], [281], [323], [383], [483], [255], [550], [874], [], [36], [637], [193, 187], [696, 806], [721], [471], [819, 822], [868, 923, 118], [40], [488, 843], [919], [325], [825], [341], [184], [264], [737, 907, 760, 440], [309], [404], [55], [491], [8], [531], [575], [912], [324], [168], [134], [13], [992], [683], [307], [855], [706, 559, 976], [], [618], [407], [842, 433], [269], [724], [90], [161], [715], [780, 914], [474], [476], [143], [736], [238], [476], [759, 475], [558], [268], [849], [151, 158], [232], [866], [526], [801, 691, 983, 570], [], [57], [279], [197], [103], [248], [418], [360], [403], [385, 386], [361], [], [109], [89], [721], [358, 359], [706, 846, 789, 765], [816], [360], [391], [256], [470], [748], [952], [0], [], [387], [71], [92], [674], [707, 709, 528], [], [936], [240, 241], [49, 50], [905], [369], [854], [385], [438], [19], [520], [223], [588], [363], [780], [150], [374], [333], [204], [939], [553], [716, 853], [47], [821], [918], [504, 957], [781], [], [731], [], [978], [755], [641], [], [432], [673, 681, 620], [813], [581, 479], [25], [737, 455], [784], [368], [], [506], [753], [917, 921], [773], [71], [894], [], [610, 543], [331], [554], [991], [44, 26], [66], [260], [849], [135], [785], [732, 759], [869], [418], [32], [630], [796], [970], [421], [], [659], [454, 917], [170], [563], [802], [500], [95], [255], [23], [852], [982], [39], [397], [239], [225], [701], [], [903, 691], [759], [72, 815], [518, 671], [518, 671], [16], [948], [], [484], [711], [615], [779, 414], [984], [38], [197], [855], [162], [908], [117], [944], [487], [953], [949], [243, 254], [431], [333], [560], [364], [354], [748, 667], [171], [127], [626], [640], [959], [535], [446], [], [904], [999, 700], [776, 683, 889], [774, 655, 831, 502], [263], [132], [441], [802], [342], [865, 207], [323], [96], [652, 413], [615], [], [815], [140, 142], [159], [317], [107], [], [182], [961], [], [4], [702], [673, 453, 526, 527, 664], [178], [923], [308], [191], [747], [], [911, 735], [453], [86], [170], [133], [142], [7], [], [403], [135], [884], [944], [336], [945], [], [880], [927], [24], [520], [27], [407], [492], [393], [16], [531], [970], [209], [489], [365], [647, 659], [958], [64], [727], [116], [574], [8], [225], [161], [489, 919], [99], [440], [], [872, 622, 759], [755], [956], [896], [941], [33], [], [17], [294], [42], [449, 975], [794], [874], [683], [121], [706], [742], [911, 533], [264], [963], [752], [564], [734], [561], [280], [932], [227, 805], [906], [], [147], [721], [540], [965], [673, 526, 527, 846, 831, 664, 508], [156], [29], [425], [325], [359], [535], [834, 906], [527, 664, 508], [], [366], [], [109, 973], [838], [570], [433], [143], [92], [735], [239], [874], [957], [170], [254], [400, 667], [721], [229], [405], [766], [18], [330], [515, 752], [422], [956], [], [], [74, 815], [888], [908, 895], [265], [363], [770], [670], [323], [794], [39], [40], [20], [369], [944], [831], [368], [942], [121], [406], [525], [284], [731], [37], [66, 68], [392, 973], [738, 427], [652, 413], [317], [918], [286], [822], [704], [860], [42], [808], [288, 290], [390], [577], [453, 799], [697], [638, 639], [868, 964], [783], [44], [], [773], [89], [151], [158, 263], [], [957], [902], [888], [732], [825], [472], [323], [490], [708, 682], [974], [530], [647], [258], [818], [], [224], [418], [528, 707], [396], [896], [667], [429], [298], [574], [540], [180], [137], [119], [167], [588], [920], [324], [291, 292], [459], [365], [235], [987, 998], [612], [147], [363], [103], [631], [339], [318], [476], [384], [603], [95], [805], [832], [818, 729], [683, 579, 731], [534], [670], [153], [726], [688], [433], [359], [310], [627], [659, 949, 923], [513, 776], [80], [583], [], [277], [874], [423], [789], [811], [79], [436, 581, 479], [350], [969, 504], [392], [69], [821], [625], [393], [74, 815], [], [411], [833], [144], [108], [644], [291], [14], [602], [653], [901], [26], [115], [612], [618, 666], [800], [], [609], [28], [441], [859], [449], [25], [806], [752], [30], [896], [], [867], [738], [310], [429], [748], [552, 716], [195], [601], [952], [691], [73], [641], [410], [192], [547], [210], [578, 982], [634], [467], [83], [451], [44], [210, 164], [692], [566], [737, 582], [710], [395], [966, 948, 923, 572], [104], [], [232], [211], [570], [232], [40], [862], [79], [345, 690], [213], [749], [812], [828], [358], [], [76], [291], [], [654], [351], [826], [245], [162], [425], [72], [550], [825], [32, 31], [971], [295], [493], [151], [], [327], [872, 759], [286], [834, 906], [342], [833], [292], [352], [284], [563], [607], [433, 793, 638, 639], [629], [513, 543], [306], [889], [222], [513], [223], [152], [273], [157], [443, 841], [532], [155], [363], [353], [235], [868], [406], [453, 454], [906], [781], [712, 719], [205], [273], [35], [797], [153], [554], [748], [320], [321], [908, 895], [810, 878], [524, 461], [], [], [97], [766], [324], [880], [5], [318], [999, 247], [558], [682, 708, 562], [81], [485], [371], [], [685], [167], [221], [827], [429], [591], [723], [168], [841], [771], [715], [249], [426], [776], [], [913], [209, 703], [77], [487], [], [280], [410], [453, 589], [869], [604], [702], [748], [993], [755, 733], [317], [444], [82], [440], [763], [876, 435, 282], [909], [646], [650, 402], [399], [488], [], [802], [732], [839, 718, 821], [572], [832], [], [], [581], [696], [13], [494], [434], [946], [301], [389], [863], [127], [735], [], [82], [534], [73], [522, 281], [791], [341], [329], [514], [614], [107], [79], [645], [964], [911, 735], [483, 975], [922], [716, 309, 599], [518], [155], [779], [888], [909, 987], [223], [98], [839], [480], [360], [164], [900], [703], [944], [418], [715], [102], [532, 411, 931, 933], [449], [841], [317], [313], [757], [336], [341], [], [94], [200, 155], [690], [], [], [306], [], [989], [348], [435], [376], [], [652, 764, 413, 734], [642], [883], [597], [780], [322], [649], [], [1], [13], [475], [], [130], [709], [529, 631], [242], [120], [681], [652, 413], [179], [447], [48], [673, 681, 526, 527, 664, 508], [888], [789], [584], [], [896, 414, 487], [528], [447], [762], [963], [893], [818, 610], [460], [774], [], [84], [891], [942], [673, 681, 526, 782, 664, 508], [240], [769], [884], [777], [491, 477], [285], [869], [732], [475], [918, 281], [892], [684], [800], [577], [641], [470], [424], [453], [602], [301], [294], [755], [731], [759], [61, 62], [794], [769, 302], [422], [708], [450], [350], [124], [326], [982], [479, 660, 511], [662, 719], [574], [887, 501], [152], [579], [627], [578, 689], [235], [538, 668], [548, 851, 632], [418, 845], [0], [596], [403], [34], [831], [376], [561], [359], [733], [890], [87], [202], [397], [67, 68], [205], [777], [992], [631], [295], [868], [582], [684], [205], [], [], [639], [432, 683], [462], [928, 960], [716], [419], [80], [432], [189], [577], [888, 821], [796], [792], [637], [606], [], [524, 461], [], [849, 883], [], [709], [265], [857], [213], [460], [162, 164], [377], [], [588, 790], [942, 952], [406], [994], [417], [798], [719], [258], [557, 914], [], [611], [773], [13], [752], [263], [621], [432], [471], [932], [706], [625], [213], [916], [813, 567], [597, 763], [747], [533], [632], [156], [862], [829], [971], [557], [289], [638, 639], [413], [], [400], [906], [234], [759], [37], [595], [99], [676], [843], [475], [116], [638, 639], [780], [518, 958, 671], [910], [812], [543], [489, 207], [616], [310, 301], [956], [692, 968], [938], [317], [950], [960, 868, 845, 927, 415], [63], [823], [403], [993], [329], [484], [486], [704], [113], [344], [953], [44], [552], [84], [80], [316], [987, 998], [918], [489, 315], [749], [459], [], [954], [17], [638], [613], [352], [612], [323], [397], [905, 846, 725], [241], [557, 497], [873], [125], [492], [911], [333, 151], [883], [527, 916, 664], [173], [798], [30], [320], [935], [625], [219], [39], [198], [652], [666], [387], [157], [919], [960, 967, 968, 504, 923], [560], [102], [138], [365], [923], [888], [208], [253], [650], [284], [783, 784], [556], [150], [0, 389, 758], [22], [395], [763], [199], [], [122], [740], [704], [537], [146], [], [429], [90], [772], [443], [196, 198], [531], [], [195], [796], [984], [], [696], [709], [543], [235], [375], [481], [690], [639], [593], [337], [544, 469], [957], [878], [141], [825], [941], [949], [857], [655, 721, 831], [410], [66, 68], [253], [624], [85], [116], [583], [248, 249], [707], [578], [45], [273], [32], [948], [922], [398], [167], [645], [224], [392], [664, 526, 782], [938, 923], [395], [544], [963], [903], [248, 249], [537], [382], [31], [828], [274], [270], [971], [367], [797], [267], [737], [843, 977, 978], [357], [6], [723], [975], [172], [404], [421], [452], [588, 790], [38], [315], [324], [987], [105], [770], [679], [], [66, 68, 54], [888], [553], [476], [910, 948], [673], [836, 837, 906], [497], [361], [860], [893], [440], [759], [218], [744, 657], [513, 875], [153], [928, 960, 923], [], [309], [724], [], [352], [156], [197, 199], [714], [56], [209], [110], [45], [815], [738], [91], [248, 537], [137], [599], [673, 453, 553, 526, 527, 664], [634], [947], [90], [324], [274], [824], [222], [542], [984], [748, 636], [545], [871], [], [444], [216, 219, 214], [581, 511], [3], [61], [150], [52], [108], [564], [681, 620, 284], [341], [570], [749], [133], [281, 282], [340], [752, 852], [48], [291], [723], [865], [346], [712], [621], [579, 881], [432, 566, 683], [174], [844], [291], [66], [266], [574], [], [69], [382], [649], [992], [774, 655], [416], [199], [42], [903, 786], [652, 764, 413], [616], [658, 533], [631], [949, 990], [106], [672, 797], [654], [133], [851, 548, 453], [281], [], [930], [256], [104], [746], [531], [382], [528], [403], [457, 834], [955], [976, 972, 437], [424], [849], [707], [349], [155], [184], [430], [782, 916, 664, 508], [586], [288], [460], [273], [133], [472], [289], [869, 454, 824], [334], [601, 578], [417, 971], [352], [81], [288], [873], [413], [334], [418], [171], [474], [113], [425], [137], [783], [422], [291], [603], [475], [673, 526, 527, 664], [525], [31], [71], [248, 250], [], [508], [57], [841], [137], [138], [252], [677], [578, 846, 689, 601], [956], [27], [], [466], [954], [576], [252], [724], [], [795], [544], [953], [954], [923, 470], [933], [123], [984], [286], [992], [733], [193], [810, 508], [893], [321], [703], [519], [443], [687], [482], [967], [252], [190], [756], [475], [461], [135], [755], [636], [299], [416], [112], [966, 883, 572], [172], [873], [], [800], [175, 184], [498], [98], [914], [656], [689], [172], [572], [479, 661], [657], [366], [949], [340], [107], [322], [721], [421], [0], [571], [532, 762], [899, 725], [295], [95], [486], [443], [147], [553], [939], [606], [188], [184], [479], [139], [909, 827, 926], [652, 465], [69], [641], [145], [218], [898], [320], [194], [8, 7], [778], [148], [529], [806], [636], [], [882], [645], [931], [], [643], [737, 455], [333], [388], [694], [455, 440], [836, 837], [4], [801], [], [201], [814], [281], [542], [42], [132], [645], [], [323], [30], [677], [454], [836, 837, 869, 636], [933], [419], [110], [969], [208], [545], [375], [467], [], [455], [899, 868], [550], [437], [723], [735], [224], [56], [724], [336, 337], [917], [953], [468], [183], [], [88], [622], [25], [899, 505], [543], [896], [484], [972], [966, 907, 572], [592], [882], [540], [936, 939, 943, 945], [], [436], [96], [632, 605], [297], [250], [182], [433, 445], [930], [173], [472, 693], [265], [187], [466], [578], [702], [23], [497, 442, 858], [256], [129], [337], [383], [954], [935, 910, 659], [40, 46], [726], [905, 526, 664, 831, 846, 721, 851, 750, 894], [792], [439], [955], [316], [996], [853], [985], [], [574], [], [934], [496, 245], [907, 532, 762], [128], [327], [588, 606], [752], [503], [172], [949], [177], [738, 580], [448], [729], [759], [15], [949, 950, 923, 957], [873], [833], [133], [434, 435], [248], [796, 911], [852], [], [688], [361], [], [370, 375], [517, 718], [968, 969], [444, 518], [575], [82], [], [], [112], [472], [], [476], [], [312], [191], [447], [486], [320], [417, 575], [676, 263], [806], [358, 359], [521], [429], [987, 335], [217], [238], [724], [875], [612], [453, 881], [199], [915], [], [330, 331], [950], [529], [870], [164], [864], [822, 541, 542], [931], [194], [73], [225], [448], [471], [379], [718, 839], [121], [729, 534], [476], [601], [259], [43], [903], [349], [], [], [374], [413], [572], [388], [], [949, 950], [716], [698], [67], [35], [241], [884], [298], [213], [655], [], [251, 212], [240, 241], [141], [244], [370], [944], [515, 238, 596], [34], [460, 975], [769], [277], [807], [912], [273], [957], [988], [710, 767], [696], [942, 658], [984], [39], [535], [988], [400, 667], [656, 475], [321], [27], [875], [497], [64], [530, 479], [414], [559], [888], [223], [314], [912, 425], [655], [107], [622, 784], [119], [673], [139], [], [], [850], [930, 934], [874, 757], [136], [112], [], [651], [193, 187], [603], [382], [780], [776, 819], [904], [], [283], [920], [270], [846], [637], [889], [976], [579, 881], [609], [961], [508], [436], [582], [802], [449, 718], [515, 552, 834], [10], [], [], [223], [288, 290], [47], [43], [], [866], [435], [797], [307], [995], [192], [70], [37], [367], [965], [702], [276], [194], [343], [19], [654], [496], [241, 238], [269], [515], [646], [], [143], [302], [111], [740, 783], [557], [629], [488, 679], [476], [464], [26], [464], [388], [234], [279], [685], [841, 728], [362], [174], [862], [720], [88], [763], [451], [512], [434], [585, 589], [16], [968, 659], [848], [908, 895], [787], [167], [591], [927, 928], [], [4], [1], [196], [455], [215], [545], [620, 499], [603], [451], [], [], [532], [737], [338], [296], [300], [73], [692, 772], [815], [106], [235], [660], [240], [343], [886], [265], [320], [172], [], [438], [886], [688], [546, 650, 402, 819], [884], [829], [610], [969, 508], [555], [697], [759, 872], [426], [610, 841, 697], [592], [277], [457], [235], [569], [614], [481], [215, 216], [752], [229], [204], [908], [572], [952], [249], [703], [832, 979], [364], [14], [383], [971], [821], [572, 415], [405], [615], [826], [634], [218], [42], [533], [645, 733], [206], [654], [207], [420], [489], [705], [287], [775], [235], [995], [121], [630], [558], [709], [697], [832], [898, 455], [813], [856, 958], [453], [], [454], [340], [281], [101], [148], [278], [908], [387], [570], [389], [], [412], [115], [305], [830], [836, 837], [802], [60], [915], [747], [872], [282], [154], [], [296], [295], [], [608, 882], [964], [288], [582, 791], [150], [402, 546], [446], [229], [87], [779], [651], [581], [752], [421], [871], [381], [633], [507], [646], [769], [438], [506], [401], [613], [832], [147], [573], [608, 824], [999], [987], [923], [487], [881], [236], [276], [], [40], [228], [486], [452, 245], [987, 998], [398], [632], [688], [854], [268], [62], [428], [884, 406], [376], [203], [800], [297], [235, 172], [581, 479], [764], [405, 839], [583], [798], [669], [759], [315, 462], [331], [402], [393], [616], [], [603], [], [28], [492], [522], [49], [713], [800], [973], [381], [962, 923], [307], [109], [], [759], [975, 703], [267], [962, 935], [94], [], [808], [449], [813], [248, 250], [500], [548], [128], [211], [21], [637], [], [478], [870], [993], [69], [715], [493], [925], [263], [861], [292], [485, 592], [789], [129], [86], [623], [566], [939], [868, 923, 572], [218], [57], [658], [870], [349], [393], [453, 463], [543], [554], [330], [220], [218], [855], [293], [665], [420], [788], [237, 158], [875], [673, 681, 526, 782, 664], [654, 757], [959, 762, 923], [448], [921], [499], [289], [884], [488], [], [198], [985], [777], [921], [], [250], [369], [], [], [443], [245], [608, 474], [685], [280], [255], [261], [673, 681, 620, 697], [], [413], [410, 309, 599], [750, 721, 414], [21], [338], [524, 461], [946], [353], [488, 695], [987, 567, 923], [154], [27], [382], [769], [583], [584], [726], [659], [484], [103], [120], [756], [453, 624, 765], [475], [326], [441], [397], [345], [472], [141], [385, 386], [836, 837, 678, 977, 978], [158], [873], [651], [968], [883], [], [31], [486], [871], [221], [867], [496], [496], [934], [11], [632], [244], [21], [806], [512, 623], [53], [619, 846], [756], [842, 445], [523], [87], [748], [463], [20], [443], [797, 282], [463], [962, 935], [335, 703], [85], [687], [679], [434], [783], [957], [887], [221], [9, 876, 435], [987, 998], [292], [359], [672], [431], [199], [409], [], [739], [907, 572, 966], [225], [], [321], [211], [346], [656], [942], [322], [129], [725], [581, 479, 817], [518, 570], [218], [429], [968], [244], [373], [22], [878], [421, 428, 834, 869, 501], [453, 454, 624], [38], [11], [28], [48], [837, 806], [836, 837], [649], [808, 978], [13], [254], [261], [375], [403], [972], [148], [383], [692, 478], [942], [248], [679], [240], [792], [608, 799], [560], [568], [492], [874], [721], [234, 236], [579], [36], [808, 822], [769, 35], [518], [227], [976], [940], [628], [], [578], [264], [77], [619, 846], [916], [589], [305], [595], [686], [868], [], [867], [892], [9], [326], [181], [851], [836, 837, 608], [819, 608], [740], [331], [416], [], [538, 668], [427], [149], [117], [923], [840, 462], [662], [383], [517, 488, 600], [515], [644], [748], [98], [435], [832], [912], [899, 725], [519], [933, 923], [853, 762], [305], [330], [], [32], [263, 264], [638, 639], [688], [270], [], [], [177], [515], [325], [], [805], [554], [12], [275], [275], [185], [631], [547], [513, 683], [666], [809, 967, 968], [219, 836, 837], [119], [84], [113], [578], [162], [727], [354], [607], [721, 831], [322], [256], [79], [871], [294], [560], [46, 47], [21], [35], [315], [659, 809], [59], [211], [169], [54], [834], [145], [780], [810, 878], [986], [682], [10], [617, 845], [], [281], [342], [], [587, 677], [246], [938], [784], [353], [54], [242], [381], [613], [303], [119], [199], [362], [911, 796], [924], [533], [921], [208, 211], [441], [21], [794], [94], [717], [571], [421], [619], [545, 846], [326], [829], [179, 180], [916], [723], [754], [579, 881], [234], [197], [978, 445], [953], [748], [517, 554, 625, 536, 510], [999], [513], [654], [12], [129], [650, 401, 402, 818, 819, 632], [833], [92], [], [], [191], [484], [344], [654, 656, 479], [235], [659], [536, 403], [402], [804, 631], [959], [849], [], [246], [665], [650, 593], [672], [248], [662], [], [294], [], [3], [975, 693, 472], [676], [261], [507], [223], [699], [422], [65], [932], [283, 478], [836, 837, 869, 650, 818, 819], [645], [992], [502, 539], [949], [], [258], [409, 437], [113], [13], [336], [265], [170], [179], [139], [836, 837], [579], [13], [1], [794], [143], [784], [609], [856], [937], [691], [89], [748], [896], [252], [120], [428, 954], [557, 701], [333], [517, 536], [640, 562], [677], [299], [40, 44], [896], [313], [792], [683, 432], [192], [], [546], [380], [518], [419, 617, 823], [661, 479], [], [610, 602], [721], [345], [], [721, 831, 281], [770, 703], [300], [354], [272], [890], [813], [264], [325], [738], [859], [747], [], [221], [100], [], [647], [735], [830], [821, 839], [348], [], [652, 764], [203], [68], [715], [723], [545], [414, 893], [955], [121], [952], [38, 44], [232], [275], [711], [], [513], [], [132], [], [589], [939, 943, 945], [525], [981], [565], [52], [655], [128, 127], [454, 919], [78], [836, 837], [689, 601], [139], [237], [913], [504], [8], [806, 655], [], [94], [207], [996, 109], [650, 402, 819], [580], [641], [673], [72], [118], [983], [524, 652, 465], [612], [254], [1], [790], [401], [941], [591], [3], [474], [570], [634], [554], [574], [488, 975], [357], [], [], [930], [431], [184, 189, 191], [81], [612], [950], [879], [957], [877], [992], [650, 402, 819], [386, 101], [630], [161], [983], [254], [194], [6], [293], [960], [822, 541], [874], [140], [393], [994], [434], [510], [590], [341], [968, 725, 504], [515, 610, 714, 402], [905, 532, 831, 799], [254], [836, 837, 445], [37], [281], [930, 868, 441, 762, 923], [36], [992], [915], [512], [615], [653, 728], [459], [852, 219], [], [281], [966, 503], [407], [532, 789], [812], [103], [910], [610], [940, 942], [485, 553, 632], [575], [450], [], [274], [66], [269, 272], [364], [740], [22], [237, 158], [717], [494], [416, 602], [951, 122], [307], [399], [998], [968, 849], [256, 205], [489, 93], [271], [257, 222], [0], [809], [801], [605], [695], [916], [760], [434, 756, 793], [757], [63], [483], [185], [201], [528], [61], [905], [529], [385], [136], [482], [448], [], [36], [90], [275], [800], [764], [602], [], [504], [], [762, 559], [245], [650, 558, 819], [353, 351], [53], [221], [17], [], [940, 941, 942], [193], [544], [699], [], [626], [372], [220], [225], [], [441], [527, 782, 664], [530], [88], [834, 906], [826], [62], [67, 68], [478], [978], [582], [673, 664, 527], [], [427], [899], [714, 402], [107], [884, 406], [412], [676], [283], [810, 878], [800], [281], [122], [390], [988], [918], [], [99], [325], [546, 650, 402, 819], [74], [287], [169], [160], [771], [53], [610], [169], [479, 751, 879, 817], [525], [440], [141], [150], [136], [708], [], [823], [239], [123], [740], [170], [166], [982], [58], [106], [89], [728, 858], [685], [581], [661], [569], [250], [564], [713], [401], [301], [533], [775], [478], [752, 852], [785], [423, 424, 585, 589, 526, 782, 851, 664], [769, 515], [], [866, 595], [695], [206, 221], [513, 875], [106], [476], [4], [251], [939], [921], [437], [774], [359], [248], [880], [242], [748], [63], [837, 639], [174], [251], [265], [89], [469], [547, 565], [420], [988], [763, 597], [254], [42], [857], [54], [181], [967, 504], [107], [240, 239], [705], [154], [524], [993], [696], [905], [45], [640], [247], [835], [661], [551, 629], [700], [778], [118], [910], [488, 778, 600], [417], [690, 984, 345], [128], [530], [677, 783], [25], [876, 589, 207, 435], [146], [11], [271, 277], [593], [791], [673], [589], [794], [660], [518, 830], [270], [399], [], [862], [566], [832], [78], [282], [412, 335], [705], [474], [106], [557], [311], [569], [234], [215], [788], [133], [252, 262], [825], [], [513], [677], [777], [985], [204], [532, 572], [955], [29], [896, 804], [781], [367], [724], [13], [738, 580], [], [794], [], [487, 761], [314], [716], [541, 542], [699], [20], [], [389], [569], [923, 965], [608, 770], [554], [166], [225], [244], [62], [], [478], [463], [732], [595], [211], [584], [943], [30], [917], [726], [838], [808], [932, 478], [855], [541], [], [], [781, 557], [271], [803], [656], [445], [], [336], [210], [53], [609, 479], [319], [521], [415], [244], [11], [119], [233], [], [485, 754, 632], [776, 819], [462], [134], [419], [22], [309], [27], [511], [502], [681, 620, 526, 527, 782, 664, 508], [524, 461], [417], [610], [975], [951], [755], [510], [192, 463], [53], [603], [84], [161], [877], [971], [855], [343], [297], [168], [318], [214], [881], [453, 454, 624], [892], [717], [497], [320], [695], [104], [406], [991], [363], [825], [], [16], [], [791], [833], [155], [818], [515, 870], [684], [757], [367], [413], [], [194], [327], [306], [886], [752, 852], [954], [993], [382], [807], [311, 312], [96], [53], [827], [330], [338], [865], [694], [588], [25], [483, 979, 825], [848], [807], [525], [195], [22], [136], [774], [816], [231], [866], [567, 827], [], [7], [982], [343], [933], [958], [700], [921], [143], [709], [157], [680, 805], [223], [574], [133], [932], [721, 831], [], [852], [13], [155], [407], [535], [629], [750], [67], [245], [68], [220, 213], [521, 813, 909, 910, 567, 926], [625], [71], [809, 762, 923, 926], [673, 810, 508], [], [342], [217], [71], [785], [325], [990], [114], [589], [118], [277], [304], [738], [866], [572], [994, 116, 126], [13], [654], [], [529], [973], [696], [252], [899], [268], [190], [911], [544], [256], [426], [488, 826], [805], [624], [612], [], [142], [148], [720], [974], [748, 636], [376], [628], [92], [501], [866, 595], [110], [409, 892], [328], [341], [417, 616], [896, 861], [155], [711], [424], [939, 940, 941, 942, 943], [526], [411], [8, 84], [63], [485], [582], [547], [827], [928, 659], [321], [962, 923], [252], [488], [751], [22], [749], [723], [397], [21], [695], [609, 660], [803], [966], [640], [514], [252], [756], [489], [373], [500], [581, 479, 511], [923, 964], [430], [370], [971], [412], [917], [898], [283], [128], [302], [385], [], [655], [856, 958], [144], [653], [182], [988], [], [474], [433, 639], [905, 283], [583], [25], [333], [161], [348], [495], [836, 837, 906, 656, 785], [873], [405], [309, 599], [616, 843], [814], [645], [604], [223], [248, 250], [576], [102], [729], [275], [43], [64, 59], [523], [387], [991], [93], [246], [], [], [517], [453], [908, 812, 404], [835], [618], [107], [129], [575], [462], [765], [208], [311, 312], [960], [403, 536], [814], [376], [713], [991], [302], [329], [217], [], [40, 46], [199], [105], [753], [670], [482], [363], [516, 520], [777], [484, 871], [479, 817], [208], [604], [230], [381], [474], [909], [984], [799], [441], [76], [669], [339], [441], [380], [924], [40], [825], [323], [950], [45], [800], [617, 720, 823], [234], [33], [39], [182], [832], [234], [287], [481, 626], [698], [431], [666], [806, 911, 658], [349], [9], [641], [57], [335], [253], [774], [865, 850], [473], [15], [506], [450], [372], [344], [832], [230, 231], [361], [783], [387], [92], [732], [936], [995], [], [625], [33], [892], [346], [712], [308], [175], [970, 795, 796], [75], [235], [132], [15], [287], [448], [302], [555], [118], [590, 605], [339], [339], [759], [498], [950, 951], [252], [433, 638, 639], [134], [535], [236], [740], [934], [701], [430], [970], [940], [555], [160], [505], [624, 453, 454], [337], [308, 309], [361], [418], [470], [977], [347], [750, 533], [708], [249], [643], [928, 960], [778], [373], [260], [432], [947], [865], [558], [307], [162], [109], [455, 760, 440], [440], [450], [], [423, 424, 585], [97], [79], [], [580], [469], [435], [545], [387], [673, 526, 527, 782, 664, 508], [404], [737, 898], [467], [], [709], [251], [660], [], [829], [518], [532, 470], [508], [357], [465], [42], [], [199, 588], [904], [393], [470], [546, 841], [212], [479], [489], [586, 652], [143], [797], [260], [858], [814], [674], [], [63], [106], [788], [905], [572], [424, 423], [695], [628], [62], [], [2], [38], [], [711, 721], [232], [64], [769], [794], [608, 610, 559], [833], [190], [98], [898], [862], [456], [798], [319], [892], [228], [108], [706], [402], [208], [233], [810, 508], [], [], [705], [828], [744, 657], [378], [75], [795], [254], [916], [690, 958, 345], [852], [553], [369], [232, 264], [793, 794], [269], [649], [532], [534], [431], [874], [114], [392], [562], [453], [432], [797], [756], [903, 585], [573], [722], [748], [553], [243], [750, 721], [499], [297], [897, 971], [645], [275], [666], [], [780], [773], [567], [286], [347], [77], [581, 479, 656], [250], [847], [910], [106], [937], [261], [355], [625], [149], [656], [505], [959], [808], [712], [996], [493], [421], [915], [264, 263], [], [487], [869], [867, 675], [615, 890], [467], [739], [833], [594], [618], [226], [313], [219], [399], [59, 64], [295], [683, 558, 432, 566], [371], [742], [242], [809, 532, 762, 923, 959], [424, 423], [520], [340], [], [205], [78], [712], [246], [327], [914], [605], [], [144], [788, 502], [879], [408, 575], [63], [136], [601], [447], [760], [486], [568], [178, 282], [], [963], [499], [560], [858], [779], [134], [572], [673, 526, 527, 664], [879, 689], [576], [803], [514, 515, 655], [993], [63], [894], [896, 495], [940, 942], [610], [964], [899], [869], [738], [453, 917, 921], [333], [163], [590], [976], [752], [619, 846, 721, 892, 831], [205], [806, 655, 630, 502], [918], [308], [], [776], [483], [570, 518], [354], [192], [407], [544], [443], [], [], [195], [973], [171], [580], [205], [344], [291], [568], [75], [734], [483], [298, 63], [712], [70], [704], [530], [417], [388], [602], [659], [422], [524], [20], [560], [529], [422], [544, 909, 762], [16], [559], [990], [101], [562], [313], [522], [658], [387], [932], [330], [610], [854], [914], [759], [], [373], [539], [439], [533], [252], [912], [261, 174], [248, 250], [689, 594, 601], [535], [431], [724], [656, 784], [100], [332], [416], [259], [268], [142], [962, 923], [982], [121], [961, 923], [592], [483], [180], [836, 837], [], [685], [408], [322], [695], [843], [], [], [195], [182], [133], [581, 479, 436, 535, 511], [420], [269], [30], [739], [811], [191], [], [352, 351], [5], [529], [928], [214], [214], [994], [153], [729], [], [936], [125], [37], [123], [550], [243], [541, 542], [849], [659], [125], [902], [936], [], [], [898, 918], [767], [], [971], [40, 46], [55], [130], [6], [879], [284], [214], [222], [402], [392], [], [215, 218], [237], [556], [380], [342], [757], [], [881, 486], [175], [330], [749], [38], [669], [993], [597], [48], [826], [923, 926], [52], [277], [479], [347], [966], [946], [544, 827], [691], [137, 146], [384], [663], [95], [197, 183], [185], [957], [784], [283], [535], [292], [238], [80], [466], [148], [705, 547], [673, 526, 527, 782, 664, 508], [883], [808], [300], [279], [432], [323], [53], [481], [836, 638, 639], [102], [821], [357], [393], [471], [447], [838], [451], [766], [950], [586, 977], [652], [724, 733], [705], [268], [897], [831], [804], [60, 62], [], [953], [740, 359], [926], [480], [993], [950], [867], [79], [486], [831, 282], [277], [], [255], [919], [799], [647], [168], [899, 901], [108], [228], [348], [805], [884], [934], [53], [426], [268], [994], [8], [849, 504, 505], [338], [110], [130], [354], [427], [711], [161], [156, 285], [505], [84], [839], [512], [884], [545], [118], [546], [715, 524, 787], [], [886], [514], [388], [41, 44], [91], [915], [916], [513, 650, 819], [563], [], [324], [909, 926], [152], [158], [170], [383], [831], [909, 849], [8], [375], [414], [], [119, 120], [69], [230, 231], [912, 716], [325], [59], [46], [268], [951], [666], [106], [], [685], [588], [992], [721], [798], [715], [458], [], [402], [95], [53], [560], [440, 441, 455], [], [374], [327], [128], [478], [513, 439], [746], [510], [526, 844, 742], [483], [280], [265], [932], [518], [499], [62], [203], [212], [318], [], [310], [291], [815], [695], [635], [70, 904], [485, 592], [803, 228], [293], [267], [917], [141], [52], [812], [351], [545], [24], [796], [485, 530], [480], [608], [530], [744, 657], [724], [498], [143], [570], [693, 472], [560], [194], [999, 281, 700], [783, 784], [676], [919], [727], [550], [573], [109, 973], [327], [], [787], [963], [425], [505], [368], [74, 815], [], [498, 854], [822], [258], [731], [861], [138], [626], [551], [312], [305], [372], [393], [321], [806, 831], [345], [185], [972], [269], [520, 669], [550], [379], [], [532], [818], [592], [697], [107], [21], [377], [445, 638, 639], [831], [472], [6], [], [852], [779], [472, 693], [224], [809, 659, 923], [732], [842, 638, 639], [155], [650], [303], [582, 519, 950], [731, 861], [34], [], [801, 445], [822], [155], [366], [815], [376], [593], [311], [55], [895], [750], [105], [839], [545], [626], [179], [423], [561], [596, 639], [636], [352], [152], [774], [371], [991], [844], [688], [840], [914], [850], [28], [640], [389], [137], [929], [204], [632], [245], [868, 923], [970, 795], [876], [762], [418, 487, 620], [996], [424], [803], [21], [409], [849], [158], [452, 911], [307], [331], [377], [651], [215], [658, 911], [867], [201, 254], [118], [914], [343], [894], [340], [925], [364], [], [279], [410], [424], [907], [146], [612], [669], [], [196], [674], [476], [4], [389, 391], [72], [927], [975], [157], [148], [], [476], [367], [970, 795], [494, 497, 442, 858], [658], [192], [332], [69], [497], [601], [2, 814], [978], [], [165], [673, 526, 527, 782, 664, 508], [807], [639], [199], [642], [340], [135], [446], [541], [363], [451], [309], [], [104], [487, 810, 590], [218], [492], [862], [905], [529, 977, 978], [333], [194], [], [650, 401, 402, 546, 559, 818, 819, 889], [545], [307], [609], [517], [205], [45], [477], [716], [36], [940], [17], [677], [244], [581, 479, 511], [409, 892], [656], [791], [777], [147], [195], [219], [516], [546], [735], [954], [227], [359], [902], [216], [783], [471], [13], [161], [938], [427], [2], [66], [393], [10], [45], [783], [257], [520], [], [354], [479], [415], [771], [977, 978], [448], [502], [350], [741], [513], [361], [480, 886], [741, 884], [7], [329, 973], [161], [31], [936], [631], [738], [160], [403], [248, 250], [165], [979], [346], [847], [635, 767], [374], [739], [350], [763], [927], [813, 567], [700], [20], [292], [9], [960, 470, 923], [], [19], [], [318], [434], [803], [28], [879], [502], [554], [484], [630], [532, 923], [390], [123], [872], [678], [782, 664], [655], [851], [767], [479, 511], [519], [97], [144], [302], [231], [407], [602], [629], [96], [103], [805], [332], [865], [214], [384], [753], [895], [214], [951], [699], [255], [625], [421, 841], [292], [948], [731], [823], [728], [937], [118], [714], [551], [98], [903, 617], [87], [190], [878], [410], [611], [230, 231], [274], [513], [578, 834, 982], [234, 805], [154], [572], [983], [650, 541], [190], [379], [963], [], [794], [419], [445], [447], [408], [719], [900], [206], [260], [552], [859], [750], [928], [242], [602, 638, 639], [511, 479], [183], [462], [806], [962], [351], [756], [729], [416], [], [910], [778, 467], [570], [498], [427], [283, 284], [6], [65], [673, 526, 527, 782, 664, 508], [555], [175], [281], [236], [], [626], [508], [824], [535], [900], [673, 526, 527, 916, 664, 508], [980], [964], [910], [765], [920, 733], [141], [479], [466], [254], [411], [430], [404], [586, 437, 408], [956], [284], [121], [], [567, 411], [161], [953], [211], [116], [416], [13], [], [279], [866], [395], [203], [13], [466], [843], [254], [603], [572], [707], [507], [523, 830], [176], [388], [198], [709], [439], [258], [11], [367], [513], [189], [736], [573], [936], [969, 440, 572], [69], [836, 837, 842, 445], [846], [778], [107], [693], [495], [270], [942], [593], [742], [134], [336], [344], [572], [929, 912], [132], [341, 342], [416], [418], [361], [], [704, 656, 479], [891], [999, 692], [173], [269], [568], [858], [35, 37], [847], [314], [977, 978], [673, 526, 527, 782, 664, 508], [807], [747], [37], [706], [987], [612], [500], [867], [], [355], [444], [670], [873], [940, 941, 942], [927], [53], [567], [422], [650, 402], [900], [318], [778], [509], [726], [583], [404], [217], [124], [767], [383], [759], [720], [129], [], [408], [679], [968], [133], [], [], [], [766], [], [22], [425], [220], [897, 651, 760], [385], [346], [924], [62], [576], [454], [118], [832], [492, 493, 495], [379], [997], [192], [712], [842, 433, 638, 639], [697], [300], [720], [736], [400, 667], [965], [843], [779], [566], [682, 562], [403], [4], [], [291], [968], [5], [132], [405], [], [545], [843, 445], [6], [898], [140], [286], [559], [102], [], [172], [798], [78], [81], [70], [523, 869], [33], [], [194], [191], [973, 983], [891], [576], [977, 978], [], [525], [386], [76], [692], [905], [986], [587], [488, 679], [652, 413], [776], [328, 108], [658], [], [880], [842], [198], [915], [400], [457, 834], [], [328, 116], [588, 790], [649], [307], [570], [774], [303], [754], [274], [317], [192], [322], [435, 876], [183], [525], [770], [976], [743], [721], [535], [749], [444], [756], [473], [518], [67], [56], [95], [], [974], [780], [754, 605], [840], [583], [3], [], [784], [923, 964], [963], [908, 404], [411], [586], [456], [774], [79], [577], [61], [52], [991], [559], [582, 851], [700], [813], [111], [436], [483, 958], [967], [571], [917, 413], [522], [243], [992], [952], [145], [973], [798], [473], [749], [94], [], [47], [841, 918], [374], [152, 155], [680], [698, 538], [96], [417], [99], [738, 559], [912], [809, 923, 924], [499], [416], [616], [699], [332], [743], [233], [64], [489], [751, 468, 479], [701], [91], [964], [], [967, 968], [217], [452], [], [836, 837], [64], [357], [874], [236], [789], [187], [365], [195], [9], [778], [484], [28], [170], [], [753], [90], [684], [681, 620], [144], [106], [601], [141], [688], [46], [756], [195], [896], [148], [691], [309], [763], [307], [152], [], [5], [836, 837], [543], [], [732], [323], [219], [91], [879, 977, 978], [282], [154], [941], [351], [], [22], [503], [992], [122], [891], [74], [390], [43], [126], [304], [69], [71], [407], [195], [488, 600], [935], [56], [825], [975, 977, 979], [903], [271, 280], [182], [594], [23], [331], [879], [597], [987, 998], [199], [977, 978, 728], [300], [943], [834, 906], [], [792], [280], [811], [914], [545], [288], [179], [701], [411], [120], [448], [607], [506, 421], [687], [59], [74], [733], [767], [], [87], [278], [], [304], [], [174], [936], [408], [153], [245], [551], [156], [934], [606], [657], [791], [], [716], [142], [315], [], [409], [270], [434, 794], [57], [532], [979], [502], [774], [917], [616], [12], [39], [923], [594], [421], [77], [836, 837, 844], [494], [824, 474], [518, 665], [962, 923], [735], [148], [876, 435], [844], [158], [903], [763], [178], [439], [540], [992], [], [431], [94], [48], [909], [849], [233], [588, 790], [310], [354], [829], [11], [789], [712], [650, 819], [975, 671], [348], [889], [694], [892], [354, 349, 350], [880], [117], [901], [365], [842, 879, 977, 978], [224], [581, 479, 717], [587, 677], [679, 435, 578], [969], [856], [478], [168], [688], [], [274], [], [749], [984], [492], [128], [361], [453], [473], [292], [283], [100], [668], [644], [34], [11], [859], [416], [995], [945], [140], [366], [7], [345], [695], [24], [450], [699], [994], [675], [564], [731], [260], [658], [20], [184], [33], [460, 718, 150], [375], [360], [366], [810, 878], [735], [576], [116], [145], [670, 518], [405, 839], [309, 917, 599], [567, 827], [588], [712], [], [595], [988], [820], [451], [110], [490], [565], [442], [918], [200], [786], [261], [573], [521], [294], [448], [71], [386, 101], [548], [760], [585], [587, 784, 596, 477], [896, 804, 999, 794, 861], [12], [681, 620], [563], [185, 186], [595], [867], [474], [332], [215, 218], [], [661], [301], [500], [337], [997], [435, 876], [888], [43], [], [309], [567], [], [101], [85], [410], [758], [160], [896], [993], [939], [802, 518], [], [435], [332], [552], [76], [282, 797], [256], [834, 906], [692, 950], [658], [978, 824], [732], [709], [905, 589, 740], [875], [636], [406], [947], [896], [487, 681, 620, 916, 508], [334], [130], [513, 776, 683, 875], [0], [980], [411], [417], [871], [141], [], [558], [876, 435], [], [523], [210], [71], [59], [535], [726], [675, 580, 608, 889], [862], [490], [914], [858, 445], [603], [854, 406], [], [849], [638, 639], [610, 836, 837], [680, 750, 697], [349], [813], [689, 887], [545], [295], [589], [7], [656], [888], [], [194], [573], [164], [332], [420], [994, 114, 947], [939], [439], [729], [440], [66, 67], [356], [474], [], [696], [387], [842, 977, 978], [778], [261], [836, 837], [200], [867, 864], [677], [419], [990], [98], [739], [72], [359], [214], [977], [682], [836, 837], [], [407], [734], [224], [219], [], [52], [103], [716], [717], [916], [140], [912], [663], [911], [270], [335], [], [659, 923], [240], [759], [832], [975], [990], [427], [756, 792], [345], [799], [381], [287], [529, 823], [333], [681, 620, 526], [819], [617], [58], [239], [134], [666], [846, 750], [673, 664, 526, 527, 632, 508], [265], [418, 709], [297], [], [134], [349], [194], [538], [114], [458, 703], [755], [934, 692, 948], [559], [396], [57], [64, 55], [226], [977, 978], [208], [562], [306], [780], [135], [997], [481], [500], [406, 892], [237], [494], [738], [314], [424], [], [688], [139], [881], [217, 215], [564], [2], [256], [115], [302, 306], [225], [366], [578, 552, 689, 982], [547], [696], [31], [619, 846], [934], [619, 846], [308], [950, 954], [26], [411], [668], [400, 667], [], [545], [673, 526, 527, 782, 664, 508], [371], [227, 232], [350], [70], [283], [99], [365], [405], [12], [844], [107], [964], [360], [765], [596], [784], [418], [515, 230], [867, 569], [896, 804, 794, 861], [941], [], [922], [624], [761], [386], [641], [575], [693], [658, 760], [358], [615], [], [352], [714], [417], [111], [], [563], [297, 295], [417], [565], [674], [973], [967, 968, 504, 923], [561], [398], [449], [], [390], [281], [628], [531], [270], [533], [153], [608, 474], [807], [737], [528], [548], [742, 681, 620, 526], [11], [522], [277], [166], [893], [], [51], [4], [597], [422], [145], [232], [138], [884], [978, 611], [769], [625], [880], [392], [476], [658], [327], [524, 461], [557, 538, 698], [312], [358], [557], [904, 905], [431], [269, 272], [560], [299], [582, 948, 951], [784, 587, 477, 740], [606], [67], [970], [518], [809], [787], [24], [], [239], [888], [462], [431], [], [242], [], [708], [263], [261], [46], [617], [390], [921], [42], [881], [999], [447], [351], [418], [660, 436], [218], [349], [782, 664, 508], [70], [270], [728], [53], [593, 650], [103], [975, 693, 472], [94], [], [863], [770, 539], [989], [947], [5], [864], [121], [610], [215], [720], [511], [670], [915], [296], [778, 485], [183], [516, 905, 526, 493], [330], [788, 630], [741, 399], [4], [333], [733], [89], [536, 913, 724], [63], [706, 519, 428, 716], [283], [301], [617], [44], [899], [925], [907], [772], [987, 998], [959], [670], [435], [425], [354], [299], [257, 222], [248, 250], [581], [517], [750, 564, 669], [193], [729], [793], [800], [424], [41], [475], [659], [149], [659], [605], [277], [354], [701], [528], [281], [720], [349], [54], [710], [286], [4], [460], [418, 709, 767], [272], [281], [611], [236], [548, 493, 851], [423], [162, 676], [720], [], [520], [], [233], [867], [213], [827], [634], [489, 638, 639], [690, 345], [242], [494], [646], [672], [224], [85], [44], [553], [583], [103, 395], [344], [135], [770], [892], [251], [656, 879], [365], [578, 689], [605], [441], [772], [347], [900, 756], [746], [915], [390], [960], [269], [210], [489, 219], [959], [896], [625], [], [432], [236], [645], [670], [738], [911], [], [652, 465, 597, 413], [900], [296], [333], [468, 603], [239], [], [40, 46], [], [], [309], [576], [66, 68], [54], [277], [910], [551], [860], [581], [10], [829], [974], [414], [316], [520], [638], [281], [], [773], [343], [83], [], [], [], [670], [656], [122], [179], [506], [839, 405], [335], [964], [237], [543, 433, 445, 638, 639], [500], [529], [577], [497, 538], [690], [895], [70], [188], [791], [159], [757], [294], [297], [29], [911], [716, 757], [116], [292], [32], [234], [838, 720, 631], [991], [], [956], [758], [209], [656, 479], [481, 482], [615], [640], [889], [444], [941], [857], [452, 911], [514, 515], [28], [11], [519], [198], [725], [497], [], [996], [170], [935], [176], [10], [779], [35, 37], [728], [203], [962, 923], [119], [349], [384], [110], [805], [365], [75], [536], [938, 935], [496], [], [554], [721], [202], [125], [109], [854], [146], [232], [476], [498], [309], [504], [237], [686], [244], [145, 148], [236], [217], [], [878], [482, 548, 851, 598, 632], [354], [292], [744, 908], [213], [697], [877], [965], [863], [302], [642], [220], [923], [283], [], [722], [618], [661], [152], [64, 55], [851], [822], [525], [363], [186], [654, 757], [], [338], [511], [963], [608], [831], [926], [106], [655], [523], [294], [44], [94], [], [840], [638, 639], [936], [148], [69], [563], [97], [552], [37], [311], [], [583], [42, 44], [817], [589], [576], [318], [198], [400, 667], [979], [468, 479], [109], [897], [632], [357], [555], [372], [447], [762, 853], [976], [554], [162], [834, 652, 906], [225], [797], [708], [978], [624], [819, 541], [319], [], [275], [707], [184], [250], [681, 810, 620], [737], [939], [265, 267], [700], [570], [728], [13], [156], [907, 966], [952], [685], [422], [301], [131], [437], [865], [411], [259], [], [553], [433], [809], [208, 243], [299], [311], [744, 657], [267], [], [986], [34, 977], [548, 850, 851], [632], [64], [548], [110], [372], [828], [996], [611], [355, 489], [184], [64, 55], [273], [], [419], [], [193], [425], [562], [289], [359], [160], [513], [124], [937], [452], [610], [908, 895], [281], [965], [396], [973], [335], [387], [875], [642], [52], [486], [698], [], [361], [189], [901], [635], [238], [94], [25], [574], [639], [492], [], [822], [643], [], [505], [709], [157], [406], [194], [488], [610, 759, 794], [541, 542], [985], [105], [291], [744, 657], [832], [217], [989], [307], [147], [592], [170], [612], [108], [306], [314], [41, 46], [34], [324], [7], [31], [239], [753], [557, 733], [902], [324], [336], [720], [703], [378], [650], [652], [18], [578, 216], [763, 597], [827], [], [769], [673, 526, 527, 782, 664, 508], [683], [488, 616, 887], [681, 620], [234, 214], [267], [341, 342], [], [], [670, 655, 414], [298], [322], [681, 810, 620, 508], [939, 945], [576], [671], [], [806, 630], [27], [805], [149], [62], [518], [308], [615], [393, 973], [455], [422], [487], [379], [276], [17], [497], [217, 212], [4], [], [109], [779], [713], [841, 731], [16], [153], [988], [507], [40], [400, 667], [65], [679], [982], [428], [728], [522], [100], [358], [497], [594], [667], [], [42], [2, 3], [], [722], [247], [915], [26], [981], [79, 630], [298], [], [501], [614, 584], [], [79], [497], [733, 557], [589, 639], [553], [594], [821], [896, 910, 608], [670], [375], [524], [211], [983], [892], [172], [85], [318], [409, 892], [256], [405], [682], [517], [744, 652, 657, 471], [], [], [56], [992], [579], [917], [499], [195], [823], [966, 572], [67], [661], [], [205], [20], [632], [272], [582, 937, 938], [193], [596], [870, 825], [912, 348], [688], [285], [234, 236], [725], [944, 946], [184], [957], [453], [401], [320, 319], [657], [975], [139], [900], [948], [787], [756], [32, 26], [], [75], [460], [518], [501, 885], [564], [643], [635], [529], [77], [627], [378], [119], [858], [497], [575], [241, 238], [], [334], [976], [989], [774], [433], [617], [552], [248, 250], [961], [1], [884], [262], [438], [641], [686], [486], [239], [625], [533], [879], [193, 201], [423, 424], [421], [186], [208], [786], [968], [693], [140], [422], [713], [953], [623], [360], [958], [2], [263], [251], [169], [839], [72, 815], [672], [404], [169], [919], [215], [933], [550], [], [43], [162, 168], [136], [664], [244], [418], [396], [756], [604], [636], [28], [208], [942], [39, 43], [951], [19], [591, 850], [358, 359], [701], [512, 907, 950, 951, 954, 572], [111], [518], [17], [986], [554], [634], [20], [88], [882], [903], [128], [570], [421], [667], [210], [513], [], [122], [866], [177, 170], [663], [160], [378], [512, 473], [], [932], [149], [955], [], [548, 651, 831], [195], [765], [], [560], [], [199], [836, 837, 748], [578, 689, 885], [742], [51], [619, 818], [329], [853], [586], [], [41], [84], [129], [485, 592], [933], [926, 544], [309, 599], [987, 998], [243], [952], [662], [834, 906], [395], [996], [], [624, 453], [429], [298], [488, 858], [841, 823], [185], [745, 851, 598], [529], [525], [], [176], [608], [847], [429], [950], [385, 386], [816], [108], [326], [691], [977], [671], [219], [2], [], [166], [605], [52], [], [246], [243], [164], [362], [315], [584], [224], [], [542], [770, 841, 970], [679], [583], [528], [543], [742], [], [879], [664], [327], [301], [800], [209], [], [], [829], [608, 514, 610, 655], [119], [31], [316], [387], [487], [638, 639], [80], [950, 954], [348], [966, 720, 572], [171], [761], [531], [507], [255], [717, 479], [70], [797, 765], [], [212], [118], [187], [890], [781], [202], [123], [551], [273], [797], [448], [821], [769], [321], [463], [407], [144], [911], [44], [818], [554], [966, 907], [138], [427], [865, 610], [660, 799], [568], [529, 478], [951, 725], [27], [284], [332], [254], [281, 282], [422, 747], [521], [516, 520], [805, 261], [2, 3], [192], [5], [146], [406], [264, 263], [], [458], [854], [500], [608, 514, 515], [991], [778], [100], [293], [479], [996], [936], [340], [781], [765], [64, 55], [800], [453, 454, 624], [520], [287], [821, 839], [311, 312], [37], [376], [940], [535], [163], [182], [29], [768], [337], [], [973], [420], [], [596], [990], [536], [611], [396], [682], [932], [87], [], [801], [315], [743], [478, 722], [910], [929], [518, 414], [94], [92], [81], [47], [740], [593], [], [492], [164], [668], [332], [487], [596], [304], [244], [], [968], [155], [59, 916, 55], [330], [697], [904], [295], [29], [225], [746], [77], [238], [880], [100], [], [581], [521], [805], [67], [469], [172], [271], [937, 938], [370], [575], [495], [430], [75], [514], [557], [524], [563], [312, 311], [], [], [745], [374], [706], [621], [565], [428], [492], [644], [16], [269], [619], [273], [882], [334], [140, 142], [850, 282], [937], [770], [587, 784], [205], [983], [], [540], [284], [198, 199], [], [187], [399], [582, 948, 949, 950, 954], [215], [976], [], [783], [869], [539], [930, 582, 415], [39, 26], [337], [435], [361], [325], [677], [618, 926], [910], [57], [425], [912], [908], [578, 982, 571], [], [900], [371], [931], [940], [920], [505], [339], [], [581, 479, 717], [386, 101], [939], [280], [536, 628], [454, 655], [], [868, 951, 923], [892], [752, 852], [217], [952], [29], [448], [341], [211], [677, 587], [], [409, 892], [120], [186, 193], [62], [], [], [20, 13], [539], [744, 657], [413], [], [351], [], [11], [470], [326], [799], [849, 850], [567], [430], [301], [316], [222], [919], [969, 470, 923], [425], [182], [443], [301], [566], [299], [55], [299], [822], [842], [554], [575], [101], [994], [337], [309], [736, 762], [], [238], [518, 665], [313, 315], [875], [845], [816], [943, 953], [769], [393, 108], [83], [113], [557], [453], [242], [713], [], [133], [751, 979, 479], [211], [5], [100], [210], [567], [278], [333], [755], [765], [613, 810, 508], [942], [892], [740], [852], [181], [82], [], [310, 504], [956], [373], [49, 50], [635], [485, 754], [522], [], [458], [684], [571], [995], [], [571], [209], [755], [0], [226], [612], [540], [197, 198], [785], [572], [379], [], [833], [546, 650, 819], [626], [903], [806, 610], [282], [], [484], [943], [39], [801, 983], [888], [365], [926], [256], [897], [48], [718, 821], [220], [861], [433], [849], [854], [711, 631], [31], [682], [381], [81], [190], [442, 663], [218], [522], [926], [986], [185], [726], [362], [539], [638, 639], [581, 479], [863], [343], [697], [925], [565], [940], [618, 923], [641], [], [972, 825], [], [339], [992], [], [185], [914], [197], [717], [], [832], [76], [93], [], [718], [294], [844], [753], [], [668], [838], [232], [303], [176], [224], [125], [319], [64, 59], [75], [360], [204], [42], [913], [552], [909], [330], [471], [758], [156], [265, 267], [898], [857], [51], [145], [374], [928], [509], [12], [525], [894], [946], [], [840], [923], [804], [886, 440, 860], [661], [606], [789], [909, 987, 926], [841], [519], [176], [316], [177], [66, 68], [808, 515], [531], [388, 872], [243], [135], [684], [242, 159], [872], [606], [296, 427, 756], [678, 487, 854], [883], [904], [803], [520, 529], [581, 656, 479], [], [754], [749], [764], [372], [693], [549], [], [447], [143], [463], [25], [922], [160], [726], [992], [453, 454, 624, 402], [], [302], [765, 706], [812], [645], [140], [301], [159], [488], [307], [142], [449, 858, 733], [41], [836, 747], [272], [659], [177], [236], [664], [18], [772], [679], [654], [565], [549], [383], [728, 478], [970], [959], [735], [952], [15], [434], [687], [871], [217], [825], [358], [109], [495], [30], [853, 645], [805], [207], [165, 234], [894], [536], [215], [312], [392], [776], [610, 47], [505], [75], [393], [173], [720], [531], [], [487, 681, 590], [942], [129], [886], [284], [409], [298], [928], [724], [737], [604], [0], [0], [640], [232, 151], [410], [591], [680], [], [421], [717, 733, 479], [], [363], [210], [13], [219], [755], [263], [147], [287], [115], [491], [448], [780], [249, 250], [926], [], [761], [692], [303], [972], [836, 837, 958], [40, 46], [710], [293], [979], [173], [257], [681, 620], [749], [488], [288], [916], [941], [], [792], [154], [691], [], [], [640], [759], [611], [118], [], [63], [193, 235, 852], [871], [19], [400, 667], [896, 804, 999, 905, 861], [80], [433], [608, 414], [245], [880], [185], [292], [169], [85], [902], [], [567], [962], [649, 977, 978], [269], [427], [482], [382], [488, 723], [638], [505], [959], [364], [805], [497], [587, 596], [457, 834], [977, 150], [], [743], [145], [73, 77], [578, 689, 601], [168, 159], [830], [109], [766], [130], [763], [448], [993], [788], [491], [738, 944], [375], [435], [700, 999], [79], [146], [447], [269], [622], [420], [510], [578, 689], [283], [417], [673, 508], [186], [619, 846], [], [925], [467], [468], [180], [879, 912], [578, 601], [688], [102], [553], [483], [218, 156], [387], [196, 198], [487], [738, 428], [689], [323], [591], [], [9], [871], [749], [950, 951], [466], [615], [314], [615, 597], [609], [316], [488], [184], [128, 856], [669], [615], [249], [56, 472], [], [520], [189, 190], [822], [361], [537], [394], [417], [527], [242, 243], [], [385], [697], [158], [732], [172], [755], [], [132], [984], [550], [453, 454, 526], [910], [230], [771], [278], [31], [536], [586], [715], [909, 926], [97], [327], [122], [759], [157], [162], [], [732], [933], [649], [763], [788], [29], [598], [568], [422], [896, 804, 838, 585, 631], [822], [192, 193], [713], [586], [807], [75], [322], [120], [472], [737, 455], [588], [173, 958], [19], [349], [286], [701], [692], [194], [649], [769], [390, 395], [987, 935, 923], [47], [62], [570], [983], [130], [100], [519], [619, 846], [619, 846], [161], [768], [214], [254], [90], [234], [694], [311], [720], [], [780], [], [397], [], [349], [704], [628], [332], [337], [793], [757], [865, 850], [270], [], [989], [], [], [51], [49], [187], [254], [178], [], [245], [424], [13], [766], [584], [409, 892], [116], [17], [19], [613], [454], [751], [157], [994], [951], [111, 52], [997], [672], [77], [345], [581, 479], [30], [476], [587], [189], [550], [22], [0], [456], [200], [], [704], [49], [532, 923, 572], [], [313], [379], [420], [], [258], [28], [253], [606], [968, 504], [915], [950], [403], [535, 671], [378], [376], [565], [495], [], [414], [303], [546], [406, 887], [113], [105], [518], [164], [789, 539], [990], [938], [347], [740], [53], [172], [90], [59], [466], [906], [933], [53], [444], [140], [769, 709, 710, 767], [193], [230, 231], [561], [306], [], [614], [439, 764], [118], [808], [], [268], [577], [652, 413], [529], [367, 369], [], [492], [24], [681, 620], [137], [978], [627], [549], [136], [], [777], [182], [362], [329], [671], [1], [112], [883], [987], [703], [], [], [786], [536], [867], [104], [928], [235], [862], [828], [427], [929], [23], [958], [549], [43], [342], [971], [814], [140], [575], [552], [301], [676, 197], [430], [608, 977, 978], [303], [235], [544], [645], [807], [110], [114], [836, 976], [454], [419], [642], [581, 479, 817], [591], [79], [856], [177], [930, 844], [765], [496], [478], [231], [773], [97], [674], [991], [375], [102], [486, 650, 558, 819], [85], [109], [573], [78], [479], [401], [846], [268], [301], [892], [466], [], [497], [908], [577, 488], [308], [506], [497], [939, 943], [455], [977], [988], [89], [508], [554], [128], [30], [316], [12], [687], [423, 424], [], [553, 493], [19], [52], [76], [690], [872, 841], [553], [514], [548, 851], [374], [878], [896], [238], [45], [989], [763], [418, 720, 872, 759, 622], [18], [590], [684], [957], [673, 681, 526, 527, 782, 664, 508], [270, 279], [985], [895], [535], [129], [653], [932], [90], [331], [131], [346], [495], [495], [386, 101], [167, 212], [109, 828], [59], [293], [765], [217], [668], [653], [352], [118], [], [652, 413], [698], [568], [793], [932], [413, 670], [641], [822], [620, 508], [], [743], [202], [480], [981], [569], [61], [701], [417], [958], [535], [293], [], [753], [352], [609], [355], [553], [976], [292], [], [910], [509], [716, 637], [468], [858], [85], [511], [18], [692], [351], [382], [844], [939], [816], [], [704], [678], [342], [425], [194], [386], [153], [118], [799], [600], [452], [287], [630], [309], [613], [87], [647], [721], [578, 982, 703], [755], [475], [721], [19], [548], [869], [959], [57], [886], [453], [411], [302, 305], [923], [696, 463], [123], [109], [982], [818], [611], [152], [406], [745], [592], [950, 951], [442, 494], [593], [297, 295], [671], [42, 44], [994], [538], [556], [584], [92], [269], [938], [278], [64], [670], [364], [0], [], [844], [958], [813, 910, 954], [749], [881], [725], [743], [171, 172], [168], [372], [931, 790, 415], [908, 404], [251], [], [369], [58], [436, 479], [762, 532], [], [951], [30, 31], [715], [894], [867], [716], [], [440, 412], [513, 875, 822], [], [970, 795], [347], [937, 567], [427], [595], [915], [344], [679], [572, 966], [234], [288], [338], [654], [221, 206], [37], [986], [883], [312], [663], [387], [435], [294], [577], [], [649], [769], [837], [308], [570], [913], [779], [753], [955], [277], [363], [], [547, 820], [608, 597, 763], [850], [], [62], [287], [413], [], [155], [80], [908, 895], [407], [489, 781], [], [53], [435, 876], [460], [731], [558], [], [601], [186], [502], [140, 142], [535], [514], [489], [542], [87], [], [37], [319], [655], [339], [894], [579, 432, 819], [582], [173], [360, 337, 357], [340], [939, 943], [568], [932, 868], [865], [87], [916], [41], [387], [981], [818, 884], [849], [116], [352], [292], [147], [72], [536], [515, 764], [614, 966, 532, 762, 923, 572], [892], [715], [], [424], [327], [670], [673, 664, 526, 527, 508], [39, 46], [732], [383], [], [550], [320], [62], [], [617], [], [186], [963], [660], [96], [446], [393, 108], [3], [512], [709], [294], [], [295], [760], [561], [650, 479, 608, 609, 610], [839], [704], [117], [971], [188], [162], [30], [515], [547, 820], [439], [112], [521, 926], [797], [738], [129], [748], [], [821], [438], [], [939, 940], [355], [824], [629], [], [147], [472], [376], [782], [884], [639], [424], [981], [69], [701], [608, 824], [130], [30], [737, 920, 762], [526, 786], [666], [571], [132], [709, 696], [430], [758], [261], [428], [], [], [550], [], [875, 819], [644], [222], [221], [490], [101], [457, 617, 712, 633], [616], [311], [178], [430], [495], [995], [492], [], [512], [996], [537], [771], [894], [], [860], [709], [187], [264], [225], [483], [478], [933], [218], [915], [190], [754], [980], [], [405], [68], [557], [650], [496], [795], [779], [511], [138], [344], [748], [157], [], [184], [769, 418, 767], [240, 241, 238], [147], [893], [360], [391], [298], [806, 655], [156], [573], [], [410, 309, 599], [929], [240, 238], [619, 846], [617, 823], [625], [108, 991], [718], [626], [219], [691, 570, 958], [867], [512, 473], [638, 639], [439], [99], [926], [242, 243], [112], [397], [708, 682, 458, 439], [962, 659, 923], [719], [542], [853], [802], [107], [725], [132], [404], [420], [44], [373], [825], [583], [61], [475], [793], [920], [82], [67], [722], [168, 159], [298], [502], [861], [815], [311], [599], [111], [893], [908, 895], [371], [332], [557], [192], [346], [87], [25], [737], [534], [], [167], [937], [607], [156], [663], [169], [], [144], [899], [974], [684], [24], [575], [], [682], [286], [], [49, 50], [420], [635], [], [435], [806, 630], [16], [118], [352], [42], [14], [673, 674], [548], [755], [16], [145], [673, 742, 526, 527, 782, 664, 508], [979], [615], [404], [], [867], [259], [906], [800, 903, 552], [806], [15], [969], [807], [153], [625, 724], [852], [624], [12], [717], [261], [445], [203], [872, 759], [], [228], [711], [948], [825], [], [796], [861], [518, 842], [278, 280], [466], [327, 123], [363], [548, 851, 632], [588], [756], [579], [263], [577], [52], [722], [715], [554], [45], [110], [546, 714, 402], [922], [902], [608], [673, 968, 526, 504, 508], [], [255], [173], [986], [382], [568], [496], [87], [293], [468, 919], [608], [416], [372], [979], [376], [121], [815], [451], [768], [32, 30], [265], [715, 744], [114], [405], [652, 413], [704], [427], [229], [977, 775], [853], [809, 618, 659, 925], [750, 917, 697, 921], [171], [654], [951], [480], [], [973], [894], [354], [52], [341], [738], [793], [241], [96], [742], [677], [849], [396], [996], [572], [215], [295], [395], [679], [274], [245], [118], [816], [435, 631], [21], [892], [560], [], [144], [834, 906], [914], [533], [199], [576], [432], [71], [982], [186], [641], [165], [293], [391], [], [251], [902], [937, 939, 943, 950, 951, 954], [510], [290], [399, 728], [278], [587], [600], [397], [951], [248], [216], [625], [676], [], [840], [215], [900], [47], [167], [391], [698], [787], [302], [165], [604], [496], [290], [801], [715], [508], [516, 520], [39], [624, 453, 454], [903], [788], [373], [801, 329, 842], [679], [110], [430], [], [301], [289], [942], [705], [206], [810, 508], [985], [979], [246], [922], [820], [485, 754], [], [146], [269], [591, 434], [570], [], [49], [310], [455], [31], [658, 911], [198], [259], [943, 931, 933], [525], [438], [513], [691], [744, 657], [649, 487], [193], [535], [809, 909, 923, 926], [814], [635], [135], [953, 954], [465], [260, 232], [242], [685], [610, 836, 837], [516], [948], [373], [797], [], [61], [912], [897], [763], [], [191], [532], [931], [975], [162], [494], [644], [737], [629], [791], [801], [466], [532, 762], [716], [], [525], [339], [542], [521], [175], [339], [999, 159], [267], [326], [892], [880], [561], [131], [836, 837, 841, 610], [953], [218], [4], [581], [432], [470], [208], [4], [831], [668], [113], [107], [690], [579], [995], [106], [407], [425], [405], [538], [118], [368], [78], [434], [808, 642], [], [], [967], [331], [], [267], [234, 165], [199], [387], [444], [892], [883], [899], [41], [978], [104], [211], [51], [608, 630], [488], [648], [873], [199], [], [630], [127], [], [88], [363], [536], [888], [239], [802, 621], [483], [752], [532], [218], [564], [884], [655], [637], [38], [877], [877], [170], [611], [969, 659], [214], [320], [808], [692], [419], [591], [132], [167, 173], [434], [99, 100], [927], [95], [], [112], [449], [], [301], [74, 815, 309], [332], [508], [116], [20], [632, 851, 548], [81], [916], [15], [725], [194], [208], [77], [677], [355], [136], [779], [375], [298], [135], [212], [866], [410], [867], [190], [349], [507], [199], [140], [356], [222], [614], [615], [391], [964], [792], [353, 343], [851], [37], [831], [390], [980], [693], [93], [986], [471], [419], [], [371], [353], [238], [744], [], [192, 185], [729], [103], [768], [264, 171], [589], [994], [673, 664, 526, 527, 632, 761, 508], [286], [305], [733], [], [483], [237], [67], [], [379], [33], [7], [476], [378], [588], [746], [726], [234], [664, 851], [428], [116], [914], [759], [], [983], [172], [27], [410, 309, 599], [148], [285], [234, 177], [], [792, 834, 630], [89], [158], [752, 852], [741], [836, 837], [158], [378], [152], [669], [369, 381], [601], [231], [648, 720], [690], [608], [210], [344], [733], [610], [699], [512], [481], [], [340], [569], [], [], [731], [316], [44], [239], [455], [261, 230], [765], [], [945], [808], [], [662], [206, 221], [661], [650], [247], [810, 878], [606], [886], [208], [44], [], [133], [248], [679], [188], [], [587, 784, 477], [638, 639], [908, 404], [389], [503], [428], [303], [9], [994], [995], [162, 167], [501], [688], [974], [693], [923, 982, 762], [445], [563], [402, 546], [997, 947], [406], [144], [476], [354], [], [307], [518, 671], [65], [459], [831], [707], [15], [159], [129], [79], [207], [716], [483], [198], [171], [896, 804], [392], [223], [197], [961, 659], [258], [672, 797], [834, 457, 527, 664, 508], [410], [205], [775, 699], [486], [510], [806, 911, 496], [183], [524], [893], [829], [376], [11], [317], [976, 977, 978], [272], [529], [161], [727], [904], [474], [314], [780, 724], [935], [354], [863], [987, 998], [95], [], [948, 949], [], [836, 638, 639], [571], [49], [342], [178], [], [195], [292], [801], [515, 808], [191], [879], [235], [574], [593], [66], [505], [225], [], [907, 966], [625], [180], [466], [639], [380], [426], [945], [37], [161, 162], [103], [751], [611], [936], [759], [701], [943], [629], [714], [389], [224], [815], [601], [819], [655], [301], [408], [740], [831], [282], [984], [], [389], [564], [25], [960], [474], [688], [957], [97], [312], [443], [846], [941], [262], [492], [985], [414, 608], [507], [578, 495, 601], [275], [205], [588], [193, 187], [89], [224], [890], [497], [583], [239], [990], [367], [], [543], [480], [989], [520], [484], [249], [593], [349], [344], [897, 799], [968, 504], [901], [829], [508], [821], [364], [165], [871], [480], [212], [], [499], [617], [400, 667], [222], [338], [413], [], [290], [], [897], [], [397], [286], [721, 831], [952], [112], [582], [558, 541, 542], [], [483], [449], [], [980], [332], [136], [56], [716], [690, 345], [835], [768], [558, 432, 889], [141], [444], [270], [637], [749], [123], [572], [538, 727], [952], [600], [33], [419], [286], [186], [397], [797], [495], [997], [692, 623], [805], [2], [609], [793], [698], [991], [217], [259], [583], [273], [900], [500], [857], [461], [996], [7], [3, 147], [110], [752], [355], [757], [99], [646], [719], [378], [293], [773], [2, 3], [531], [896, 651, 827], [774, 608, 610], [288, 290], [716], [], [673, 526, 527, 782, 664, 508], [418], [803], [768], [348], [640], [365], [220], [402], [378], [], [948], [], [635], [291], [944], [730], [1], [308], [112], [165], [616], [254], [707], [532, 762, 572], [305], [209], [], [679], [733, 858], [], [965, 923], [800], [604], [104, 489], [441], [436], [465, 597, 734], [280], [164, 165], [480], [997], [402], [70], [767], [454], [171], [391], [282, 539], [474], [526, 527, 664, 508], [533], [595], [573], [511], [908], [176], [915], [197, 199], [530, 409], [], [810, 878], [783], [666], [538], [435], [850], [609], [71, 119], [], [], [671], [411], [535], [395], [231], [234], [249], [666], [888], [610], [997, 947], [314], [167], [557], [315], [473], [968, 504], [502], [3], [409, 892], [335], [859], [238], [581], [748], [450], [397], [737, 455, 440], [], [284], [727], [556, 827], [171], [480], [35], [384], [556], [940], [611], [447], [806], [463], [994], [594], [909, 567, 478], [999], [226], [35, 876], [73, 77], [127], [889], [69], [435], [237, 158], [466], [766], [308], [759], [994], [774, 655, 825], [698], [124], [538], [731], [484, 871], [30], [561], [441], [161], [832], [769], [898, 836, 837], [880, 518], [392], [51], [659, 923, 928, 945, 959], [280], [207], [429], [314], [566], [451], [547], [686], [972], [442], [473], [851, 633], [882], [235, 676], [157], [927], [972], [658, 824], [206], [960], [597], [], [620, 508], [460], [473], [718, 975, 437], [947], [615], [336], [815], [974], [707], [858], [849], [398], [780, 914], [363], [239], [908], [514, 788], [147], [25], [547], [697], [131], [600], [354], [165], [772], [572], [175], [399], [719], [338], [300], [655, 630], [968], [337, 943], [581, 479], [899], [815], [424], [330, 331], [48], [515, 420], [952], [288], [771], [341], [842], [562], [989], [], [730], [892], [324], [268], [974], [571], [550], [651, 412, 60, 868, 616], [770], [233], [758], [863], [618], [730], [842], [404], [264], [453], [272], [342], [294], [239], [114], [487], [824, 678], [608], [927], [969], [642], [], [542], [453], [880], [436], [355], [787], [128], [999, 700], [627], [581, 479, 817], [], [614], [873], [548], [543], [858], [465], [57], [29], [442, 858], [233], [988], [323], [255], [90], [630], [738], [170], [456], [7], [52], [868, 651, 659], [560], [685, 785], [], [383], [273], [339], [425], [609], [624], [968, 911, 849, 505], [74], [617], [966, 572], [317], [289], [610], [517, 600], [788], [989], [171], [11], [911, 658], [334], [187], [791], [458], [86], [], [333], [288], [949], [619, 846, 851], [641], [248], [733], [180], [667], [674], [639], [667], [230], [75], [479], [231], [747], [591], [157], [172], [410], [906], [677], [766], [], [420], [483], [], [26], [902], [113], [989], [270, 272], [597], [799], [86], [19], [456], [857], [396], [962, 923], [952], [500], [321], [526], [41], [679], [467], [334], [460], [573], [892], [607], [841], [470], [382], [918], [879], [133], [316], [581, 751, 468, 895, 479], [591], [545], [806, 459], [289], [784], [582], [130], [311], [214], [259], [932], [251], [358, 359], [], [470], [], [], [], [], [804, 503], [606], [32], [703], [612], [407], [305], [602], [681, 810, 620, 526, 508], [900], [], [339], [418], [433], [765], [], [618], [609], [932], [937], [535], [869], [981], [610], [122], [627], [], [118], [542], [175], [295], [692, 487], [56], [599], [793], [765], [23], [323], [551, 748, 629], [801, 570], [], [342], [69], [540], [259], [998], [], [797], [252], [568], [834], [], [96], [82], [486], [471], [320], [702], [921], [525], [], [690], [51], [113], [865], [919], [498], [], [325], [297], [606], [611], [496], [858], [136], [740, 756], [681, 620, 664, 526, 527, 632, 508], [548, 851], [652, 413], [886], [423], [857], [218, 156], [925], [], [353], [236], [216], [786], [488], [171, 172], [], [], [116], [666, 924], [649], [615], [686], [296], [242], [228], [668], [940], [891], [819], [279], [712], [459], [822], [777], [276], [702], [898], [884], [326], [472], [630], [932], [453], [130], [917], [], [555], [173], [973], [225], [931], [683, 594], [], [380], [192], [966], [138], [908], [53], [], [74], [144], [814], [516], [73], [845], [770, 608, 610], [298], [618], [104], [289], [850, 855], [484], [579], [57], [708, 887], [320], [929], [603], [109, 973], [5], [73], [668], [615, 652, 465, 413], [568], [649], [], [869], [105], [531], [135], [963], [366], [852], [468], [701], [740, 519], [985], [332], [524], [346], [336], [178], [2], [506], [300], [83], [251], [435, 151, 156], [853], [196], [434], [405], [911], [789], [251], [660, 557], [143], [306], [428], [], [619], [978, 638, 639], [156], [622], [387], [928, 960], [908], [508], [850], [436], [822], [298], [952], [408], [], [47], [573], [79], [168, 159], [633], [], [297, 295], [], [22], [], [], [512], [308], [433, 638, 639], [177], [32], [], [51], [105], [], [908], [189], [453, 454, 624], [816], [626], [975], [170], [825], [801, 838, 570], [749], [480], [510], [270], [476], [941], [900], [972, 437], [474], [170], [703], [330], [617, 823], [648], [910, 567], [953], [306], [104], [548, 453, 553, 851], [458], [309, 599], [273, 274], [341], [727], [149], [956], [477, 868, 623], [495], [792], [899], [674], [676], [677], [7], [], [72, 74], [90], [860], [677], [779], [750, 211], [868], [78], [189], [527], [253], [291], [385], [434], [687], [146, 147], [41], [548], [110], [757], [221], [692], [812, 908, 404], [834, 806, 630], [257, 222], [611], [831], [983], [281], [354], [650, 526], [355], [281], [33], [652, 465, 570, 413], [515], [385], [547], [614], [], [144], [169], [845], [915], [244], [], [40, 46], [662], [184], [958], [355], [304], [212], [63], [722], [819], [308], [882], [533], [467, 341], [659, 923], [250], [852], [], [979], [212], [939], [999, 905, 700], [610, 678], [226], [14], [99], [30], [751, 479], [453], [318], [830], [971, 502], [777, 524, 461, 596], [978, 445], [646], [911], [744, 657, 812], [257], [898], [275], [131], [547], [], [626], [335], [981], [410], [266], [343], [783], [434], [140, 142], [], [445], [557, 468, 733], [592], [738], [364], [508], [877], [448], [377], [233], [376], [627], [], [973], [997, 947], [575], [], [], [277], [351], [746], [836, 837, 605], [788], [284], [996], [542], [487], [550], [508], [69], [886], [528], [83], [583], [841], [673, 681, 620, 526, 527, 664, 508], [197], [540], [774, 977], [902], [863], [], [], [205], [], [881], [729], [463], [968, 504, 505], [271, 274], [191], [864], [], [264], [901], [], [762], [843], [853], [822, 541, 542], [], [214], [69], [264], [706], [418], [56], [53], [383], [504], [869, 445, 638], [461], [213], [], [709], [879], [554], [93], [333], [308], [958], [738], [479], [50], [861], [615], [833], [], [987, 998], [805], [870], [], [700], [611], [], [279], [492, 630], [487], [293], [460, 975, 437, 733], [685], [], [410], [854], [196, 198], [594], [656], [677, 587], [450], [858], [561], [773], [407], [691], [32], [490], [343], [769], [276], [144], [621], [452, 911, 658], [453, 885], [169], [308], [296], [407], [595], [453, 454, 921], [816], [476], [933], [576], [563], [369], [615], [842, 977, 978], [290], [440], [347], [206, 221], [785], [20], [919, 920], [488, 679, 714], [967, 968, 504], [814], [317], [681, 810, 620], [41], [190], [791], [431], [315], [766], [294], [942], [563], [788], [784, 923], [207], [113], [722], [111], [756], [475], [573], [520], [170, 177], [], [847], [929], [200, 155], [227], [674], [734], [52], [537, 248], [], [296], [738], [515], [760], [709], [928, 868, 923, 927], [253], [26], [611], [835], [], [305], [642], [188], [482], [], [852], [167], [352], [652], [379], [464], [649], [531], [446], [677], [887], [744, 657, 733], [], [330], [953], [589], [4], [831], [808], [616], [324], [457, 834, 906], [851], [838], [733], [], [155, 204], [794], [29], [709], [249], [364], [421], [583], [1], [820], [151], [341], [521], [296], [], [94], [572], [683], [536], [591], [532, 760], [383], [858], [7], [801, 983], [38, 44], [312, 314], [383], [79], [651], [323], [642, 542], [161], [494, 7], [70, 123], [556], [315], [990], [610, 750, 564, 697], [443, 411], [161], [19], [741], [586], [660], [263], [265], [400], [111], [610, 836, 837], [990], [976, 978], [709], [279], [295], [555], [158], [768, 610], [554], [408], [261], [211], [664], [502], [394], [439], [], [12], [893], [880], [338], [349], [656, 791], [79, 988], [574], [925], [604], [653], [966], [71], [], [721, 750], [], [], [265], [243], [89], [354], [], [], [260], [812], [298], [617], [427, 509], [792], [511], [365], [450], [503], [852], [851], [404], [757], [655], [756], [546, 650, 819, 542], [161], [118], [406], [42], [65], [484], [672], [825], [53], [914], [937], [756], [941], [769, 777], [498], [241, 238], [311], [90], [162], [534], [952], [185], [647], [393, 973], [141], [590], [433], [862], [394], [309], [987], [274], [616], [884, 406], [68], [617, 823], [324], [981, 429], [949, 951], [72], [973], [797], [920], [127], [363], [659], [], [132], [550], [705, 547], [46, 47], [50], [81, 82], [514], [239], [484, 871], [890], [932], [219], [284], [673, 664, 526, 527, 508], [48], [802], [68], [], [777], [954], [425], [775], [696], [450], [834, 906], [846], [544], [599, 951], [15], [835], [136], [205], [929], [931, 587, 792], [328], [829], [919], [984], [976], [453, 409], [396], [547], [683], [565], [260], [116], [187], [423], [697], [671], [54], [], [544], [308], [938], [190], [887, 406], [910], [649], [893], [367], [564], [327], [672], [441], [], [839], [313], [584], [203], [304], [560], [364], [948], [929], [309], [799], [565], [19], [630], [445], [607], [125], [746, 622], [634], [49], [362], [854], [840], [538], [869, 636], [817, 511, 479], [491], [118], [231], [519, 478], [230], [177], [141], [185], [791, 582], [80, 136], [286], [441], [], [517], [284], [421, 539], [83], [985, 324], [], [395], [21], [650, 822], [44, 26], [705, 489], [701], [351], [183], [771], [757], [679], [739], [992, 947], [565], [147], [270], [982], [21], [892], [], [745], [449], [776], [287], [163, 168], [965], [904, 981], [694], [777, 531, 587, 487], [835], [460], [604], [480], [72], [367], [260], [771], [20], [742], [814], [815], [476], [572], [67], [213], [824], [168], [163], [556], [761], [23], [90], [745], [619, 846], [80], [241], [96, 904], [709, 767], [532, 953, 762, 923], [471], [759], [407], [429], [419, 741], [390], [581], [908, 895], [834], [245], [162], [424, 423], [40], [283], [215], [446], [435], [126], [785], [997], [29], [183], [139], [428], [453, 526], [483], [909], [119, 120], [981], [574], [513], [], [154], [1], [248, 249, 250], [835], [], [557, 762, 733, 670], [280], [576], [310], [265, 266], [687], [122], [801, 973, 983], [676], [840], [567], [], [909], [350], [389], [142], [185], [296], [994], [652], [341], [169], [366], [579], [863], [185], [185], [119], [485], [796], [459, 445], [431], [625, 724, 540], [1], [164], [305, 302], [419], [407], [881], [931], [609], [216], [791], [185, 189], [977, 978], [], [500], [916], [218], [407], [778, 526], [631], [242], [489, 695], [882], [488, 671], [728], [982], [360], [177], [983], [354], [324], [463], [734], [513], [479, 661], [659], [899, 647], [702], [280], [492], [68], [655], [565], [410], [182], [560], [668], [207], [367], [549], [772], [], [674], [586], [132], [868, 966, 923], [472], [550], [882], [674], [687], [911, 824], [480, 707], [534], [525], [410, 599], [596], [145], [10], [548], [521], [223], [648], [814], [480], [643], [618, 813, 910], [872, 652, 413], [532], [401], [194], [518, 465, 597, 413], [849], [513], [10], [659], [34], [512], [96], [56], [513, 776, 875, 541], [520], [770, 788, 630, 502], [624], [84], [30], [330], [732], [466], [89], [866, 958], [116], [968, 504], [568, 765], [], [154], [449], [631], [996], [162], [884, 406], [642], [129], [970, 349], [814], [378], [560], [324], [510], [641], [581, 479], [351], [31], [556], [443], [537], [616], [898], [353], [79], [571], [902, 488], [964], [955], [], [418, 563], [945], [112], [], [730], [220], [384], [158], [610], [210], [966, 907, 572], [878], [125], [362], [119, 39], [722], [466], [286], [815], [150], [93], [898, 455], [368], [542], [363], [425], [703], [721], [583], [311], [232, 249], [866, 595], [], [243], [415], [73, 815], [902], [913], [33], [772, 488], [806], [368], [499], [54], [183], [480, 478], [864], [275], [], [593], [293], [666, 924], [850], [614, 696], [819, 854], [456], [495], [546], [560], [22], [217], [28], [616], [993], [974], [925], [218], [], [28], [69], [605], [832], [612], [512], [], [999], [62], [447], [994], [276], [489, 236], [812], [643], [921], [408], [], [], [292], [278], [286], [913], [957], [992, 528], [], [871], [249], [236], [417], [874], [38], [21], [505], [200, 204, 155], [115], [798], [230, 231], [895], [144], [288, 290], [455], [288], [488, 679], [102], [40], [587], [387], [315], [324], [375], [592], [64], [911, 658], [526], [218], [], [978, 638, 639], [539], [], [680, 697], [16], [317], [772], [675], [873], [86], [592], [47], [], [124], [619], [605], [942], [954, 950], [423, 424], [666], [475], [645], [863], [22], [442, 663, 858], [689, 601], [524], [321], [49], [528], [905], [742], [614, 697], [921], [533], [459], [894, 759], [521], [608], [104], [665], [915], [601], [135], [], [253], [356], [897, 851], [], [63], [791], [689], [24], [429], [136], [532], [373], [383], [80], [373], [874, 829], [638, 639], [748], [948, 950, 951], [177], [407], [379], [740], [349], [176], [353], [301], [626], [716], [236], [472], [310], [567], [661], [667], [650, 828], [467], [974], [51], [], [460, 974], [897], [153], [492], [386, 101], [221], [239], [556], [819, 854], [973], [251], [818, 920], [792], [409], [532, 831], [355], [708, 884], [205], [548, 526, 851, 532], [816], [470], [766], [881], [476], [579], [212], [910, 567], [950], [653], [282], [238, 240, 241], [62], [732], [668], [942], [999, 434, 861], [909], [55], [500], [217], [184], [969, 987], [240, 241], [914], [484], [32], [288], [290], [253], [63], [416], [999, 794], [261, 254], [336], [777], [312], [325], [], [245], [990], [231], [537], [774], [180], [582], [271, 277], [573], [455], [], [657], [50], [385], [], [15], [918], [118], [339], [816], [403], [549], [861], [820], [372], [230], [470], [670], [128], [569], [529], [317], [415], [], [553], [], [456], [], [986], [473], [730], [936], [237, 151], [388], [452], [120], [], [672], [260], [630], [685], [922], [931], [938], [], [103], [661], [94], [402], [577], [384], [613], [799], [768], [889], [748], [], [35], [680, 470], [704], [807], [], [], [499], [786], [28], [14], [468], [678], [396], [596], [83], [405], [574, 575], [551], [453], [957], [875], [666], [551], [305], [178], [926], [965], [235], [], [990], [967, 968], [464, 763, 597], [173], [654], [4], [819, 541, 542], [341], [660], [991], [145], [372], [58], [375], [119], [24], [388], [78], [959], [137], [434], [98], [676], [389], [209, 850], [84], [682], [707], [524, 461], [654, 656, 792], [236], [99], [365], [757], [954, 651], [210, 211], [256], [162], [895], [423], [216], [366], [201], [673, 742, 526, 527, 664, 508], [706], [211], [315], [426], [], [209, 805], [255], [654, 733], [], [866], [504], [645], [449, 976], [459, 655], [], [255], [681], [777], [321], [666], [401], [119], [801, 836, 842, 433, 638, 639], [548], [550], [261], [], [869, 652], [], [913], [596], [], [608, 610, 836, 837], [], [986], [845], [], [594], [608, 610, 903], [865], [54], [534], [297, 369], [391, 801, 983], [601], [746], [784], [996], [486], [673, 681, 810, 620, 508], [558], [620], [614], [82], [834, 650, 906], [609, 860], [903, 836, 837, 465, 501, 763], [534], [762], [300], [227], [483], [193], [441, 572], [343], [814], [8, 7], [650], [449, 975], [133], [819, 854], [863], [256], [65], [518], [683], [938], [449], [425], [921], [740], [186], [720], [681, 620], [393], [697, 589], [169], [886], [153], [712], [968, 504], [95], [205], [59], [673, 526, 527, 662, 664, 508], [137], [658], [5], [918], [719], [], [949, 923], [744, 657], [961], [862], [378], [694], [815], [505], [], [86], [268], [397], [375], [306], [742], [902], [778], [605], [252], [518], [196], [863], [581], [388], [232], [378], [947], [764, 413], [251], [475], [], [57], [50], [933], [321], [690], [329], [500], [854], [679], [393], [882], [595], [942], [144], [549], [976], [424, 423], [317], [], [825, 858, 958], [502], [740, 459], [309, 599], [632], [378], [311], [40, 44], [12], [647], [78], [260], [788], [464, 950, 954], [493], [644], [992], [160], [891], [399], [567], [836, 837], [604], [293], [836, 837], [223], [449], [289], [171], [742], [191, 189], [153], [467], [720], [353], [987], [907, 892], [643], [829], [924], [624, 453], [546], [374], [419], [980], [793], [640], [611], [350], [91], [588, 790], [488, 679], [867], [], [573], [809], [708], [378], [252], [130], [168, 211], [740], [824], [816], [382], [329], [987, 998], [42], [536, 517, 510], [149], [288, 290], [337], [334], [901], [521], [], [667], [518], [64], [100], [823], [], [310], [617], [197], [693], [548, 664, 526, 851], [], [547], [], [41, 44], [707, 528], [306], [262], [922], [], [32, 30], [331], [951], [428], [618, 659, 926], [479], [64, 55], [385], [448], [680], [882], [536], [832], [346], [82], [380], [981, 429], [791], [940], [920], [181], [258], [806, 630], [477], [721], [329], [509], [195], [455], [544], [], [222], [929], [516], [383], [43], [814], [472, 693], [652], [13], [528], [419], [300], [207], [417], [140], [581], [70], [746], [61], [579], [703], [88], [680], [778], [159], [330], [178], [809, 659, 923], [621], [265, 266], [], [710], [], [487, 681, 620, 281], [994], [144], [313], [382], [63], [], [524, 461], [38], [400, 667], [336], [943, 923], [869], [303], [486], [265], [479], [838], [967], [929], [579], [578, 689, 562, 601], [186], [878], [395], [801, 983], [352, 351], [541], [283], [235], [111], [842, 978], [898], [389], [144], [711], [65], [386], [947, 997], [382], [707, 484, 914], [468], [581, 734, 479], [643], [767], [546], [756], [607], [336], [755], [630], [619], [985], [578], [546, 650, 819], [277, 278], [929], [613], [592], [820], [313], [250], [604], [740], [319], [391], [366], [327], [45], [248], [560], [507], [908, 404], [859], [605], [55], [410], [522], [92], [195], [314], [], [909, 469], [902], [812], [259, 526], [726], [513], [962], [976, 150], [986], [349], [273], [], [965], [923], [683], [673, 681, 620, 526, 527, 664, 508], [269], [700], [468], [28], [], [679, 488], [601], [383], [347], [416], [762], [763], [518, 616], [44], [970], [116], [], [835], [808], [614], [6], [], [353], [351], [406], [382], [881], [643], [232, 267], [717], [863], [757], [], [326], [101, 386], [308], [548, 851, 598, 632], [346], [923, 959], [271], [771], [864], [561], [563], [682, 698], [487], [125], [543], [432], [543], [110], [968, 849], [890], [399], [524, 461], [381], [], [973], [165], [393], [648], [758], [271], [530], [804, 631], [], [542], [222], [922], [785], [109], [495, 532, 729], [599], [239], [304], [138], [], [266, 570], [137], [617], [], [949], [299], [579, 881], [327, 328, 112], [99], [503], [954], [780], [806], [683, 819], [310], [813], [962], [107], [488, 695], [990], [621], [770], [21], [66, 68], [361], [132], [83], [888], [912], [834, 755], [304], [400], [864], [296], [649], [83], [568, 869], [418], [532], [296], [393], [527, 664], [187], [564], [926], [394], [154], [453, 454], [730], [278], [879], [919], [], [354], [202], [1], [28], [802], [264], [553, 526], [989], [581], [984], [810, 508], [195], [163], [65, 973], [383], [315], [512], [293], [824], [629], [223], [673, 742, 681, 620, 526, 527, 664, 508], [420], [139], [46], [330], [325], [910], [832], [782, 851], [129], [237], [301], [500], [854], [180], [774], [955], [507], [898, 711], [486], [935], [524, 461], [226], [405], [554], [435, 876], [901], [532, 762, 923, 572], [800], [300], [82], [973], [18], [893], [584], [913], [902], [156], [296], [247], [798], [653], [755], [893], [405], [534, 729], [796], [611], [457], [327], [56], [700], [680], [889], [806], [322], [825], [412], [36], [33], [673, 619, 526, 527, 782, 846, 664, 508], [407], [994], [564], [907], [847], [406], [225], [324], [806], [393], [122], [501, 568], [640], [155], [708], [331], [312], [309, 599], [41], [174], [604], [707], [286], [972, 976], [760], [946], [930], [849], [558], [589], [594], [60], [817, 479], [342], [580], [651], [56], [175], [733], [665], [7], [445], [444], [385], [376], [721, 636, 831], [690, 471], [258], [843], [725], [575, 479], [172], [954], [577], [70], [373], [409], [198], [774, 655, 703], [770, 543], [512], [619, 846], [610], [783], [907, 883, 532], [245], [857], [30], [508], [545], [269], [967, 504], [232], [223], [640], [], [192, 185, 186], [275], [45], [463], [392], [209], [337], [947], [], [100], [221], [685], [458], [771], [914], [14], [878], [325], [737], [281], [308, 79], [380], [585], [601], [281], [896, 435, 794], [761], [23], [666], [642], [155], [375], [681, 810, 620, 508], [525], [82], [995], [973], [415], [155], [912], [], [809], [895], [781], [147], [16], [860], [830], [239], [82], [297], [297], [29], [916], [82], [487], [808], [739], [921], [495, 532, 725], [496, 765], [164], [514], [357], [174], [559], [820], [477], [661], [436], [834, 630], [448], [248], [27], [540], [523, 414], [175, 189], [1], [985], [128], [646], [235], [722], [553], [661], [801], [940], [90], [], [586], [356], [341], [981], [579, 401, 881], [318], [72, 815], [71], [661], [756], [310], [293], [354], [438], [181], [23], [828], [989], [578, 982], [467], [289], [595], [569], [788], [370], [44], [241], [660, 733], [741], [], [800], [669], [850], [973, 983], [673, 681, 620, 526, 664], [348], [448], [592], [890], [81], [845, 720, 692], [341], [507], [954], [367], [364], [158], [697], [35], [520, 516], [727], [243], [489, 270], [546, 650, 819, 542], [566], [75], [615], [140], [706, 765], [236], [472], [83], [987, 998], [533], [], [319], [658], [832], [111], [605], [543], [973, 801, 983], [124], [365], [320], [616], [526, 527, 782, 664, 673, 508], [], [736], [447], [621], [830], [979], [145], [420], [653], [357], [355], [], [109], [], [298], [612], [642], [], [15], [479, 511], [42], [7], [897], [794], [705, 547], [571], [428], [233], [916, 664], [681, 620, 508], [46], [522], [229], [124], [609], [924], [399, 501], [156], [926], [258], [688], [808], [411], [751, 479], [510], [651], [302], [851], [], [864], [787], [103], [652], [], [], [40], [280], [606], [836, 837, 869], [868, 987, 809, 923], [975], [850], [551, 629], [729], [975, 703], [155, 204], [887], [981, 781], [703], [920, 717], [920, 414], [872, 818, 759], [816], [673], [669], [339], [636], [498], [392], [545], [592], [34], [38], [481, 453, 485, 632], [966], [429], [], [388], [], [836, 837, 775], [617], [464], [255], [375], [115], [195, 790], [479, 656], [213], [603], [711], [293], [822, 871], [35], [273], [875], [314], [], [86], [144], [856], [548, 851, 598, 632, 281, 285], [908, 718, 888], [659], [572, 966], [213], [849], [905], [215], [805], [872], [496], [766], [713], [36], [304], [821], [724], [182], [88], [652], [846], [150], [375], [71], [311], [725], [189, 191], [97], [542], [650, 818, 819, 822], [], [689, 501], [909, 926], [400, 667], [214], [103], [132], [191, 189], [950, 951], [259], [489], [577], [769], [617, 731, 823], [113], [927], [456], [103], [528], [], [], [203], [673, 508], [222], [822], [59], [270], [300], [111], [455, 907, 440], [766], [298], [835], [711], [670], [264, 253], [762, 532], [222], [589], [901], [15], [63], [424], [714], [292], [232, 217], [807, 654], [130], [984], [919], [928, 949], [129], [757], [371], [394], [84], [738], [421], [980], [341, 719], [388], [454, 487, 728], [229], [856], [495], [275], [812], [], [636], [64], [], [152], [95], [649], [92], [92], [710], [], [439], [908], [616], [694], [890], [822, 542], [770, 478], [270], [], [], [688], [578, 601, 982], [772], [243], [180], [482], [804], [417], [134], [526], [838], [987, 998], [486], [800], [33], [48], [904], [567, 827], [645], [236], [223], [910], [798], [842], [697], [904], [784], [56], [442], [949], [273], [67, 68], [948], [], [56], [152], [511, 479], [135], [], [548, 851], [228], [701], [451], [322], [209, 805], [520], [218], [173, 180], [437], [56], [14], [775], [871], [75], [], [920], [499, 955], [997], [874], [958], [], [613, 526, 527, 664], [858], [182, 185], [401], [238, 239], [156], [49, 50], [591], [730], [33], [682], [115, 327], [469], [66, 68], [174], [], [404], [134], [636], [284], [995], [], [571], [946], [642], [517], [457], [258], [601], [50], [903], [590], [175], [649], [551], [654], [3], [], [], [642], [769], [690], [753], [981], [440, 441, 572], [581], [877], [43], [593], [372], [197], [727], [347], [275], [887], [211, 210], [834, 906, 762, 638], [344], [773], [719], [], [856], [252], [439, 461, 465], [849, 505], [715, 251], [789], [659, 760], [967], [505], [], [511], [34, 977, 978], [765], [233], [132], [539], [175], [736], [893], [119], [22], [447, 600], [], [829], [459, 445], [555, 652], [620, 681, 470], [772, 679], [463], [208, 282], [785], [121], [195, 151], [466], [939], [828], [643], [664, 527, 761], [123], [674], [715, 764], [77, 319], [85], [834], [677], [801, 842, 433, 983], [951], [537], [722], [104], [713], [954], [267], [154], [678], [443], [247], [945], [814], [495], [], [701], [590], [946], [996], [], [59], [137], [230, 231], [], [659], [923], [591], [713], [795], [328], [], [656, 479], [735], [201], [485], [923], [298], [31], [624, 453, 454], [759], [994], [234], [431], [922], [562, 663, 442], [528], [149], [208], [115], [699], [660], [452, 793], [634], [118], [672], [], [779], [999], [190], [192], [858], [171], [], [], [143], [146], [518], [877], [87], [263], [104], [742, 553], [356], [346], [767], [295, 297], [485, 664, 526, 527, 851, 632], [499], [576], [529], [774, 655, 836, 837, 636], [618], [156], [133], [793], [796], [24], [], [41], [251], [196], [696], [884, 406], [], [309, 599], [579], [892], [124], [460, 718, 468, 733], [515], [618], [997], [260], [280], [12], [714], [904], [879], [41], [149, 150], [], [], [145], [685], [311], [800], [817, 751, 479], [369], [723], [896], [734], [356], [88], [692], [633], [957], [393], [862], [905], [578, 585], [172], [566], [126], [515], [323], [113], [230], [814], [688], [250], [191], [70], [318], [429], [84], [584], [81], [511, 479], [29], [308], [147], [682], [669], [243], [934, 567], [904], [858], [331], [750, 726], [906], [440], [939], [164], [91], [28], [154], [740, 477], [882], [366], [574], [], [699], [661], [662], [521], [472, 693], [155], [896], [774, 655], [931], [461], [327], [915], [89], [734], [246, 159], [488], [358], [546, 921], [49], [788], [], [446], [59], [919], [], [63], [505], [478], [], [832], [178], [64], [528], [363], [674], [427, 756], [752, 852], [456], [356], [], [822], [581, 479], [690], [], [864], [997], [849], [35], [33], [233], [915], [470], [987], [697], [195, 245], [719], [857], [248, 249, 250], [896, 859, 495, 827], [281], [668], [808], [198], [177, 170, 676], [104], [598], [477], [735], [135], [415], [361], [5], [86], [], [156], [600, 769], [777, 623], [], [385], [992], [314], [986], [964], [684], [642, 542], [710], [349], [267], [194], [400], [659], [565], [945, 943], [], [279], [12], [860], [466], [769, 455], [274], [967], [74], [281], [576], [915], [821], [619, 846], [52], [602], [911], [455], [993], [], [386], [218, 215], [841], [971], [267], [378], [166], [977, 978], [], [382], [474], [717], [946, 309], [], [385], [252], [977, 978], [909], [29], [137], [554], [564], [670], [67], [137], [612], [], [183], [25], [751], [46], [146], [], [803], [854], [75], [], [611], [498], [329], [276], [309], [723], [255], [334], [568], [263], [131], [456], [63], [73, 74, 815], [802], [557, 442], [585, 838], [605], [834, 906], [996], [776], [564], [349], [393], [832], [], [395], [673, 681, 526, 527, 664, 508], [20], [711, 563], [256, 676], [404], [235, 658], [196, 198], [907, 572, 966], [322], [982], [349], [175], [109], [896], [761], [514, 788], [400, 667], [879], [434], [], [859, 651, 760, 827], [], [695], [585], [851], [245], [], [430], [479], [663], [723], [648], [257, 249, 222], [276], [178], [774], [690], [937], [923], [802], [191], [220], [306], [508], [54], [237], [785], [452], [349], [914], [], [], [581, 479], [136], [214], [951], [178], [943], [455], [24], [105], [497], [298], [333], [596, 763, 764], [], [918], [227], [576], [557], [331], [905], [458], [489], [709], [490], [547], [611], [306], [905], [967, 968], [79], [944], [323], [199], [819, 566], [734], [633], [680], [942], [410], [342], [950], [827], [503], [339], [624, 453], [236], [134], [150], [879], [670, 628], [367, 379], [4], [801], [246], [], [103], [638, 639], [28], [150], [923], [855], [323], [52], [289], [895], [950], [], [2, 148], [473], [750, 282], [789], [630], [459], [717], [83], [55], [83], [842, 457], [168, 159], [738], [741], [724], [243], [655, 445, 638, 639], [701], [64], [786], [532, 762], [323], [504], [265], [360], [600, 825], [403], [945], [89], [361], [673, 508], [170], [706], [570], [481], [13], [488, 843], [898], [22], [340], [714], [444], [696], [219], [416], [742], [718, 536], [960], [65], [939, 943], [770, 806], [570], [968, 504, 415], [], [91], [821], [39], [503], [616], [834, 522], [844], [507], [430], [577], [93], [168], [640, 868], [649, 825], [45], [207], [641], [473], [12], [325], [192], [712, 790], [467], [526, 786], [], [881], [958], [164, 166, 167], [], [814, 970, 484], [685], [181], [830], [610, 838], [451], [181], [180], [756], [103], [320], [683], [2, 3], [553], [14], [306], [831, 968, 846, 619, 504], [394], [703], [926], [756], [379], [458], [336], [853], [267], [141], [679], [25], [273], [165], [69], [809], [113], [114], [148], [], [340], [137], [291, 340], [149], [9], [574], [781], [834, 400, 667], [196], [], [954], [436], [695], [945], [767], [350], [140], [698], [292], [810, 878], [315], [243], [774], [780], [], [232], [723], [993], [351], [6, 842], [658], [838, 434], [], [40], [77], [492], [48, 49], [23], [], [512], [161], [121], [351], [], [748], [32], [184, 189], [812], [315], [274], [106], [684], [367], [577], [121], [628], [444], [281, 282], [263, 230], [724], [213], [678], [388], [573, 518], [679], [682], [424], [930, 963, 868, 923, 813, 415], [473], [886], [421], [300], [908], [174], [529], [172], [614], [726], [605], [225], [453], [497, 406], [936, 909, 926], [791], [243], [66], [337], [684], [767], [299], [840], [74], [834, 630], [188], [971], [382], [669], [276], [30], [787], [166], [989], [347], [661, 479], [470], [625], [908, 404], [358], [519, 907], [147], [205, 197], [528], [480], [734], [549], [421], [588], [170], [229], [885], [826], [570], [596], [539], [908], [425], [934], [700], [82], [118], [184], [43], [87], [487], [80], [426], [221], [89], [529], [475], [601], [341, 342], [142], [553], [788, 502], [428], [], [162], [490], [419, 720], [339], [40, 46], [215], [44], [965], [299], [721], [888], [730], [530], [459], [96], [481, 482], [63], [855], [342], [562], [489], [857], [589], [590], [316], [763], [335], [412], [72], [550], [425, 912], [943], [575], [96], [332], [536], [285], [], [917], [563], [921], [385], [754], [132], [183], [834], [327], [962], [230], [393], [189], [339], [918], [977], [272], [914, 536], [98], [510], [273], [116], [510], [521], [707], [341], [488, 843], [886], [555], [200, 244], [76], [365], [609], [], [898], [564], [79], [994], [993], [78], [207], [522], [249], [14], [998], [8], [10], [], [2, 3], [], [298], [710], [501], [973, 991], [331], [41, 26], [690], [], [644], [128], [660], [602], [524, 461], [186], [685], [428], [384], [754], [386], [72], [911, 824], [], [488], [397], [155], [980], [208], [], [], [436], [594], [79], [491], [956], [777], [395], [788], [777], [112], [751], [838, 692], [200], [688], [26], [350], [582, 412], [905], [143], [685], [790], [803], [424], [970, 979], [845], [438], [87], [836, 837], [130], [466], [604], [726], [558], [468], [820], [836, 837, 617], [745], [294], [949, 923], [185], [366], [184], [265], [625], [987], [359], [108], [], [443], [323], [610, 899], [911], [245], [952], [180, 435], [732], [], [378], [815], [213], [806], [429, 981], [709], [744, 657], [556], [238], [548, 851], [179], [], [254], [72], [814], [263], [523, 728], [977, 978, 853], [547], [120], [7], [127], [154], [223], [39], [656], [192, 852], [192], [0], [112], [209], [538], [54], [78], [916], [362], [688], [561], [256], [], [468, 718, 839], [58], [14], [693], [842], [530], [209, 210], [860], [869], [737, 455], [537], [228], [118], [907, 818], [689], [792], [704], [688], [385], [736], [897], [823], [895], [986], [375], [200], [336], [280], [609], [596], [119], [93], [228], [119], [836, 837], [692, 943], [9], [427], [614], [558], [205], [610, 796], [941], [787, 524, 461], [250], [781], [145], [664], [884], [251], [770], [], [], [], [482], [950], [477], [437], [], [], [775], [], [481], [206], [966], [298], [86], [750], [696], [967, 968], [743], [376], [531], [818, 862], [562], [951, 503, 572], [559, 799], [842], [644], [301], [774, 414, 842, 464, 978], [349], [252], [34], [348], [361], [57], [154], [], [934, 415], [261], [], [879, 344], [478], [], [207], [698], [970, 979], [565], [900], [632], [453], [358, 359], [481], [], [46], [393], [43], [363], [359], [921], [678], [515, 880], [746], [721], [37], [670], [859], [311], [46], [426], [73], [44, 26], [611], [136], [281], [172], [489, 10], [622], [69], [895], [231], [35], [422], [687, 406], [616], [47], [353], [99], [581], [760], [514, 774, 523, 655], [636], [110], [918], [547], [206], [511], [306], [919], [650], [838], [606], [555], [], [210, 164], [], [834], [157], [480], [259], [852], [769], [559], [216], [], [244], [], [949], [246], [652], [593], [418], [344], [], [495], [730], [275], [151, 508, 158], [244], [437], [932], [40, 46], [836, 837, 841, 970], [279], [890], [522], [32], [49], [241, 238], [641], [646], [548, 851], [], [166], [10], [699], [613], [170], [795], [535], [369, 379], [136], [196], [387], [362], [600], [9], [281], [], [404], [177], [192], [874], [], [281], [640], [231], [128, 135], [540], [707], [636], [], [], [532], [94], [996], [310], [241, 238], [948], [259], [376], [356], [953], [89], [396], [513], [870], [915], [292], [163, 168], [297], [271, 272, 273], [906], [322], [261], [607], [], [104], [67], [985], [810, 878], [992], [215], [298], [315], [283], [463], [647, 828], [770], [728, 735], [213], [301], [314], [674], [779], [400, 667], [720], [564], [700], [733, 919, 920], [640], [964], [90], [574], [248], [772], [939, 942, 948], [], [13], [736], [377], [673, 681, 620, 664, 526, 527, 782, 508], [503], [31], [472], [754], [479, 817], [219], [772, 711], [384], [892], [404], [286], [115], [130], [988], [439], [1], [], [285], [711], [524, 461], [448], [483], [528], [172], [515], [951], [608, 487, 824, 502], [], [851, 892], [934, 923], [258], [515, 643], [], [673, 592], [], [881], [302, 303], [697], [945], [198], [880], [20], [758], [306], [283], [936, 923], [612], [743], [50], [502], [673, 810, 526, 527, 782, 664, 508], [518, 465, 597, 413], [285], [19], [518], [293], [185], [773], [503], [251], [908, 895], [537], [], [715, 524, 461, 883], [], [], [343], [722], [667], [286], [280], [], [225], [518], [236], [487], [989], [463, 758], [25], [353], [607], [801, 983], [758], [809], [539], [161], [313], [], [12], [209], [973], [17], [705, 547], [143], [948], [171], [685], [125], [836, 837, 650, 819], [], [75], [], [746], [953], [113], [843], [279], [928], [749], [761], [600], [738, 580], [165], [604], [161], [333], [66], [524], [745], [84], [674], [55], [353, 350], [142], [722], [747], [582, 617, 728], [801], [617, 845], [236], [68], [507], [4], [905, 750, 846], [419], [998], [141], [118], [476], [736], [357], [947], [929], [731, 762], [489], [106], [282], [928, 960, 966, 923, 572], [610, 731], [728], [888], [649], [869], [], [340], [832], [987, 121], [], [19], [604], [303], [4], [700], [541], [662], [168, 210], [136], [679], [607], [], [787], [18], [489], [623], [744, 657, 517], [394], [89], [462], [934], [345, 690], [910], [604], [449], [645], [645], [801], [84], [804], [834, 655, 975, 630], [168], [18], [690, 346], [161], [425], [775], [608, 584], [292], [260], [24], [158], [248, 249], [964], [77], [642], [95], [20], [805], [961], [884], [96], [551], [732], [424], [771], [965], [551], [500], [829], [181], [887], [866], [999, 218, 700], [831], [538], [676], [981], [102], [43], [312], [469], [702], [188, 189], [901], [611], [738], [510], [164], [819], [912], [142], [832], [48], [646], [811], [81], [965], [460], [866], [32], [511, 479], [577], [671, 444], [92, 95], [371], [404], [76], [971], [294], [694], [988], [530, 619, 846], [344], [417], [52], [364], [], [886], [90], [299], [489, 600], [861, 435, 285], [991], [151], [610, 903], [286], [948, 572], [557, 733], [690], [582, 879, 692, 954, 955], [684], [820], [425], [376], [410], [404], [759, 447], [377], [814], [795], [738], [181], [263], [50], [619], [256], [909], [252], [641], [801], [673, 526, 527, 664, 508], [173], [949], [], [800], [962, 923], [478], [123], [722], [135], [369], [28], [323], [132], [316], [69], [175], [656], [987], [85], [889, 486], [738], [340], [331], [], [196], [214], [144], [], [610], [932], [962, 923], [280], [252], [188, 190], [532], [676], [360], [], [300], [412], [589], [879, 775], [538, 727], [615], [574], [617], [435, 789], [654], [981, 429], [746], [6], [856], [187], [578], [177], [402], [489], [108], [642], [847], [288, 290], [], [173], [241], [249], [288], [395], [33], [247], [958], [923, 806, 936], [632], [258], [43], [881], [803], [455], [585], [], [586, 652], [291], [0], [991], [842, 977, 978], [], [356], [254], [], [779], [688], [668], [677], [217], [327], [976, 978], [254], [316], [497], [44], [373], [177], [647], [11], [363], [162], [54], [114], [75], [32, 28], [660], [61, 62], [929], [352], [561], [9], [491], [137], [696], [267], [98], [464], [210], [], [259], [892], [350], [995], [743], [426], [44], [264], [585], [744, 657], [691], [669], [357], [892], [944], [230, 231], [711], [], [322], [407], [798], [948], [54], [329], [52], [986], [], [745], [236], [873], [682], [772], [682], [705], [498], [846], [], [], [393], [672], [717, 479], [436], [743], [765], [253], [608], [425], [148], [334], [193, 187], [0], [988], [201], [258], [680], [783], [808], [805], [177], [94], [788], [858], [952], [701], [62], [787], [349], [600, 894], [200, 175], [425], [], [564], [351], [433], [169, 172], [611], [19], [110], [923, 925], [271], [695], [607], [972], [44], [674], [673, 526, 527, 664, 508], [222], [547], [809], [], [937], [423], [631], [966, 459, 445], [299], [449], [], [357], [], [343], [770], [105], [515], [809], [88], [515], [128], [630], [999, 191], [379], [750, 281], [746], [245], [102], [839, 975], [877], [884], [917, 453, 454], [391], [81], [330, 331], [], [168, 159], [], [479], [587], [509], [72], [574], [299], [922], [711], [337], [113], [182], [725], [988], [346], [452, 968, 504], [626, 893], [316], [638, 639], [880], [641], [922], [933, 923], [96, 489, 93], [587], [892], [300], [281, 283], [31], [248], [829], [192], [], [403], [767], [40, 489, 46], [521], [382], [84], [966, 907], [656, 627, 468], [810], [492, 750, 831, 414], [929], [486], [967, 968, 504], [281], [376], [504], [], [873], [418], [608], [553, 493], [443], [752, 852], [604], [110], [306], [850], [955], [953], [262], [531], [189], [279], [677], [90], [209], [742, 662], [211], [737], [64], [785], [681, 810, 620], [611, 954], [203], [845, 966], [714], [793], [491], [904], [474], [309], [731], [854], [211], [671, 535], [], [877], [768, 414], [822], [683], [462], [975], [529], [620], [497, 538], [263, 231], [681, 620], [283], [364], [535], [889], [145], [159], [746], [301, 310], [198], [240], [889], [416], [469], [839, 718, 978, 821], [23], [553], [796], [970, 671], [962, 937, 923, 959], [698], [499], [166], [932], [689, 601], [545], [96], [], [323], [925], [278], [95], [309], [500], [222], [896, 999, 861], [488], [324], [593], [], [463], [613], [63], [180], [685], [637], [38], [341], [542], [343], [988], [656], [130], [681, 620], [963], [648], [308], [939], [125], [301], [791], [569], [425], [309, 599], [529], [228], [431], [182], [178], [450], [153], [419], [145], [301], [459, 655, 638, 639], [788], [908, 895], [719], [221], [546, 650, 819], [826], [988], [91], [382], [689], [335], [720], [548], [159], [260], [223], [259], [972], [87], [555], [], [326], [874], [71], [679], [53], [367], [69], [703], [766], [556], [714], [512], [418], [376], [68], [834, 400], [924], [320], [908, 913, 404, 977, 978], [809], [843], [560], [835], [610, 836, 837], [768], [765], [879], [365], [678], [207], [373], [896], [820], [874], [490], [81], [708], [], [762, 868, 659, 532, 470, 923, 924], [787], [138], [192], [203], [731], [836, 837, 839, 460, 718], [928], [562], [791, 254], [20], [938], [287], [330], [628], [471], [174], [513], [684], [630], [360], [590], [200], [344], [934, 959, 923], [656], [330], [562], [963], [515, 652], [881], [186], [352], [37], [809, 925], [926], [], [825, 706], [538, 698], [531], [987, 998], [526, 782, 664], [159], [382], [230], [587], [970, 795], [706], [182], [], [349], [41], [72], [215], [433, 638, 639], [103], [616], [409], [207], [950], [423], [453], [564], [844], [911], [833], [496], [945], [106], [631], [912], [15], [812, 908, 404], [580], [957], [836, 837, 630], [872], [974], [956], [738], [775], [721], [218], [738], [], [772], [6], [632], [543], [], [206], [806], [816], [98], [122], [912], [221], [422], [186], [376], [923, 700], [358, 359], [795, 799], [36], [305], [774], [530], [137], [515], [447], [306], [299], [338], [695], [404], [988], [148], [342], [243], [771], [51], [325], [311], [199, 251], [420], [155, 157], [349], [281], [734], [649], [473], [892], [510], [194], [314], [391], [72], [622, 759], [578, 834, 457, 982], [662], [95], [861], [865], [], [681, 620, 526, 508], [748], [832], [435], [782, 664], [], [637], [251], [], [276], [137], [654], [159], [818], [230], [537], [880], [413], [68], [605], [927], [354, 680], [863], [9], [39], [428], [941], [658], [793], [571], [652, 625, 447], [139, 141], [153], [434, 823], [820], [995], [46], [502], [737], [319], [859], [248], [746], [829], [969], [649], [319], [884], [297], [450], [237], [439], [426], [989], [177], [993], [489], [795], [128], [154], [513, 566], [811], [775], [367], [], [547], [679], [375], [91], [730, 603], [411], [96], [912], [307], [62], [591, 659], [783], [3, 983], [505], [832], [727], [330], [581, 479, 436, 511], [646], [738, 532], [942], [248], [839], [926], [643, 876, 435], [117], [317], [681, 620, 526, 664, 508], [116], [660], [981], [74, 77], [482], [], [968, 923], [872, 622, 759, 414], [870], [673, 527, 782, 664, 508], [64, 59], [161], [738], [994], [13], [119], [365], [157], [198], [193], [793], [977, 978, 472], [526, 495, 786], [962, 923, 935], [125], [3], [235], [497], [786], [810, 333, 508], [578], [845], [291], [], [257], [805], [472], [], [375], [443], [], [507], [924, 965], [774], [93], [514], [10, 11, 14], [934], [947], [443], [437], [367], [837], [514], [126], [549], [373], [623], [851], [670], [636], [468], [149], [416], [863], [203], [448], [908, 404], [30], [548, 782, 851, 598, 632], [2], [776], [487, 590], [607], [751, 479], [927], [43], [761], [407], [602], [168], [258], [920, 919], [931], [958], [955], [75], [7], [141], [], [191], [273], [75], [460], [496], [398], [], [262], [667], [63], [645], [712], [776], [], [723], [988], [673, 527, 664, 508], [713], [355], [487, 531], [454], [978, 222], [559], [800], [687], [737], [222], [384], [940], [], [272], [543], [103], [51], [777, 787], [590], [465], [926], [452], [597], [610], [227], [981], [749], [751], [331], [222], [940, 942], [956], [608, 681, 620], [592], [346], [663], [205], [684], [178], [607], [44], [47], [954], [602], [411], [813], [133], [871], [43], [58], [140], [511], [576], [606], [368], [741], [368], [587], [15], [724], [], [876, 435], [898, 680], [72], [879], [776], [385, 907], [900], [909, 926], [445], [21], [], [725], [437], [6], [896], [990], [498], [], [116], [932], [369], [234], [881], [311], [], [491], [682], [267], [220], [734], [279], [148], [997], [], [385, 386], [844], [801], [129], [709], [822], [495], [987, 998], [675], [852], [], [], [], [218], [470], [], [584], [315], [515, 819], [136], [780, 977, 914, 978], [636], [941], [941], [65], [657, 475], [152], [900], [799], [956], [957], [525], [45], [903, 689, 501, 887], [547], [853], [726], [810, 878], [784], [632], [841, 794], [852], [337], [992], [353], [598], [797], [889], [121], [701], [321], [562], [943], [452], [129], [610], [466], [0], [98], [581, 717], [228], [4], [555], [844], [528], [3], [487], [898], [277], [393], [342], [929], [896], [943], [], [211], [898], [590], [], [11], [726], [866], [990], [873], [610], [893], [952], [407], [885], [327], [359], [], [], [165], [449], [174], [281], [804], [176], [975], [757], [530], [397], [875], [619], [516], [687], [627], [243], [220], [], [131], [205], [], [470], [253], [307], [593], [62], [987, 998], [861], [907, 440, 572], [594], [449], [], [897], [619, 846], [755], [82], [510], [754], [613], [], [635], [183], [277], [363], [928], [321], [728, 936], [307], [292], [20], [835], [488, 616], [956], [301], [255], [538], [355], [866, 853], [546, 650, 818, 819], [300, 302], [306], [393], [804], [925], [794], [868, 931, 968, 532, 504], [427], [410], [], [801, 983, 570], [363], [941], [385], [812], [876, 435, 794], [681, 620, 285], [142], [], [551], [581], [253], [749], [453], [102], [899, 505], [679], [83], [310], [255], [608, 515], [923, 572], [99], [509], [445, 638], [679], [479, 751], [200], [89], [338], [744, 586, 657, 408], [820], [849], [992], [33], [139], [733], [896, 861], [938], [138], [674, 333], [610], [496], [290], [640], [499], [853], [], [944], [53], [576], [270], [636], [79], [], [201], [884, 406, 857], [127], [568], [], [785], [], [987, 998], [806, 975, 445], [835, 733], [258], [789], [658], [182], [739], [], [986], [767], [326], [762, 572], [229], [112], [685], [373], [873], [], [333], [659], [133], [165], [675, 757], [855], [451], [692, 509], [655, 843], [8], [56], [332, 478], [979], [505], [473], [202, 189], [672], [660], [334], [460], [769, 77, 815, 798], [293], [], [995], [65], [934], [690], [568], [317], [340], [850], [399], [10], [29], [544], [], [746], [352], [221], [717], [396], [315], [875], [720], [557], [92], [17], [441, 572], [455], [303], [834, 906], [442], [65], [534], [684], [974], [96], [889], [679], [857], [856], [679], [831], [40], [569], [412], [125], [322], [], [352], [991], [401], [440], [259], [751], [441, 932], [391], [421], [162], [226], [228, 229], [281, 282], [708, 682], [516, 431], [786], [200], [550], [500], [803], [523], [970], [781], [397], [669], [673, 508], [143], [113], [271, 277], [889], [932], [472], [569], [645], [], [783], [673, 526, 527, 782, 664, 508], [131], [884], [204], [195], [570], [225], [904], [14], [184], [566], [7], [987, 998], [575], [693, 472], [28], [635], [155], [29], [842], [987], [34], [217], [407], [773, 455], [557], [994], [77], [271], [94], [650], [], [827], [449], [299], [75], [809, 942, 659], [821], [418, 709, 838, 767], [336], [757], [779], [786], [49, 50], [688], [817, 511, 479], [165], [67], [145], [407], [369], [216], [58], [695], [239], [622], [19], [740], [], [213], [576], [], [906, 834, 630], [812], [], [485], [456], [851], [10], [549], [773], [143], [28], [218], [840], [86], [], [], [195], [], [337], [254], [935], [561], [599], [651], [613], [11], [75], [862], [], [47], [506], [904], [740, 756], [917, 921], [920], [912], [77], [286], [], [126], [274], [24], [20], [904], [16], [0], [144], [248], [502], [687], [357], [336], [518, 671, 444], [11], [242], [274], [523, 721], [161], [711], [521, 618, 651, 813, 827], [388], [84], [62], [687], [374], [], [504], [216, 219], [158], [216], [672], [559, 818, 819], [], [962, 923], [72], [636], [863], [325], [421, 632], [], [162], [691], [975], [652], [113], [36], [899], [288], [328], [896], [579], [555], [486, 889], [719], [223], [19, 13], [781], [608], [314], [43], [943], [566], [994], [125], [388], [479, 817], [727], [318], [518], [574], [867], [540], [506], [882], [300], [613], [66], [865, 850], [973], [157], [727], [750, 591], [], [398], [198], [602], [259], [512], [905, 854], [36, 37], [420], [162], [564, 750], [382], [95], [244], [715], [596], [247], [409], [], [890], [581], [736], [360], [4], [154], [286], [598], [96], [739], [30], [765], [806, 630], [21], [334], [343], [402], [3], [149], [803], [872, 453], [177], [203], [410], [511], [997], [199], [281], [128], [246], [520], [405], [164], [866], [468], [95], [634, 858], [206, 221], [780, 914, 921], [276], [955], [420], [270], [881], [], [40, 46], [249], [772], [478], [857], [637], [675], [419], [426], [259], [353], [185], [178], [554], [602], [354], [241, 238], [639], [3], [761], [288], [755], [], [264], [19], [937, 938], [306], [416], [168], [880], [], [447], [191], [69], [705, 547], [704], [218], [552], [662], [940, 941, 942], [173, 251], [], [121], [178], [914], [971], [206], [610, 890], [719], [31], [159], [619, 846], [225], [610, 465], [113], [281], [], [113], [212], [], [612], [300], [702], [819], [674], [513, 776, 819], [335], [498], [870], [702], [63], [204, 153], [730], [635], [996], [803], [131], [803], [977], [111], [], [792], [357, 358], [681, 819, 620], [], [965], [307], [50], [408], [826], [92], [879], [910, 567, 926], [513], [867], [514, 515, 898, 808], [100], [570, 691, 652], [489], [418], [387], [866], [350], [870], [420], [166], [540], [345], [819, 818, 632], [417], [640], [662], [914], [650, 541, 558, 819], [68], [707, 637], [557, 919], [96], [902], [172], [902], [587], [447], [959], [507], [132], [789], [342], [66], [875, 566, 541], [764], [51], [390], [791], [416], [517], [896], [], [18], [985, 309], [515, 469], [39], [395], [809, 959], [833, 913], [947], [126], [850], [813], [723], [73], [544], [165], [187], [886], [], [37], [147], [912, 824, 447], [864], [842], [], [723], [72], [539], [633], [609], [220], [489], [418], [555], [430], [113], [439], [221], [727], [616], [272, 280], [], [428, 195], [863], [530], [], [251], [], [979], [579], [306], [619, 846], [939], [751], [676], [281], [974], [859], [547], [703], [769], [888], [61], [], [218], [496], [392], [88], [904], [856], [323], [281], [804], [491], [122], [408], [809, 925], [785], [164], [], [968, 721], [259], [284], [11], [], [491], [147], [449], [504], [952], [488, 695], [661], [242, 243, 805], [102], [139], [2], [], [785], [251], [174], [425, 858], [489], [836, 837], [958], [44], [348], [266, 219, 156], [193], [24], [167], [518, 444], [970, 976], [766], [862], [733], [], [951], [934], [450], [], [649], [150], [955], [94], [135], [], [522], [641], [459, 978, 445], [836, 837], [606], [980], [95], [46, 59], [], [], [386], [287], [518], [], [578, 903, 689], [102], [186], [685], [252], [736], [179], [322], [475], [866], [], [427], [278], [602], [582, 950, 790, 953, 954], [120], [372], [641], [910], [626], [448], [803], [983], [319], [3], [202], [658], [528], [956], [], [500], [722], [759], [770, 788], [90], [892], [], [350], [188], [920], [576], [760], [908], [215, 218], [621], [407], [208], [610, 841], [526, 882, 606], [964], [534], [344], [726], [81], [83], [266, 267], [2, 3], [855], [201, 589], [654, 475], [234, 795], [72], [947], [], [426, 635], [16], [681, 620], [379], [765], [736], [888, 821], [27], [152], [53], [540], [903], [85], [64], [95], [834], [786], [908], [243], [253, 273], [479, 436], [74], [652, 847], [417], [711], [583], [639], [], [], [923], [131], [316], [510], [193], [372], [140], [770, 788], [842, 433, 639], [625], [], [34], [22], [259], [744], [878], [472], [470], [216], [690], [179], [30], [288], [518, 491], [694], [522], [1], [320], [809, 659], [850], [95], [529], [204], [890], [93], [865], [868, 495, 572], [546, 650, 664, 527, 819], [946], [629], [815], [661, 479], [488], [311], [130], [781], [90], [93], [250], [239], [684], [137], [94], [707], [570], [572], [268], [673], [449], [198], [787], [618, 926], [965], [930, 934, 923], [333], [344], [128, 131], [464, 787], [], [462], [382], [176], [441, 572], [861], [81], [509, 582], [713], [120], [858], [621], [263, 236], [248, 249], [345], [762], [57], [12], [703], [150], [734], [881], [866], [416, 602], [267], [840], [400, 667], [62], [399], [], [17], [426], [81], [127], [445], [88], [981], [912], [109], [673, 526, 527, 664, 508], [220], [693], [740], [699], [], [182], [213], [201], [243], [], [376], [535], [275], [958], [605, 526, 784, 477], [240, 241, 238], [973], [459], [225], [564], [846], [275], [86], [363, 501], [640], [512], [564], [355], [968, 505], [738], [636], [630], [142], [10], [], [315], [387], [931], [992, 997, 947], [543], [258], [610], [668], [404], [], [50], [922], [923, 122], [574], [741], [456], [967, 968, 504], [543], [156], [770, 788, 916], [646], [35], [488, 600], [673, 904, 905, 526, 527, 664, 508], [796], [646], [393, 108], [226], [777, 524, 461, 787], [827], [920], [989], [30], [165], [361], [524, 461], [], [387], [432], [], [385, 101], [489, 368], [355], [705], [148], [549], [995], [], [123], [384], [916], [95], [652, 764], [396], [807], [], [992], [783], [299], [529], [958], [211], [961], [87], [232], [369], [664], [130], [], [444], [515], [894], [453, 831], [790], [660], [668], [919], [], [14], [327], [297, 295], [898], [102], [905, 794], [39], [217], [194], [869], [40], [475], [8], [927], [], [108], [588], [638, 639], [745], [232], [11], [], [875], [443], [245], [], [820], [], [577], [277], [494], [], [542, 822], [444, 637], [907], [], [423], [45], [105], [530], [352], [754], [675], [141], [476], [], [681, 620], [683], [388], [111], [497, 663], [171], [139], [530], [189], [125], [804], [994], [581, 479], [939, 943], [553, 493], [], [459], [872], [316], [], [289], [125], [131], [422], [617], [946], [336], [], [963], [539], [960], [812], [727], [128], [150], [127], [472], [936, 909, 926], [263, 253], [448], [923, 968, 849, 762, 828], [77], [416], [890], [311], [709, 767], [417], [479, 661], [216], [407], [138], [], [903], [805], [405], [989], [330], [16], [480], [519], [], [610, 589], [216], [], [810, 508], [216], [588], [938], [604], [341], [82], [651], [847], [], [67], [409, 892], [582, 936, 940], [333], [111], [], [432], [993], [178], [234], [750, 721], [341], [645], [449], [608, 744, 841], [975, 447], [349], [515, 665], [0], [511], [34], [638, 639], [911], [841], [741, 539], [299], [508], [62], [819], [981], [518, 665, 671], [955], [484, 914, 821], [782, 664, 281], [430], [905, 799], [], [131], [192], [48], [726], [92], [155], [362], [510], [607], [588], [238, 241], [187], [508], [862], [873], [911], [842], [809], [538], [866], [733], [977, 978], [], [499], [809, 923, 925], [403], [532], [901], [], [209], [35], [844], [232], [507], [299], [497], [111], [563], [680], [995], [403], [633], [340], [804], [], [517], [139], [936], [452], [17], [609], [247], [], [672], [560], [102], [356], [498, 919], [403], [], [143], [820], [324], [739], [479], [85], [330], [558], [], [433, 842, 639], [340], [67], [90], [318], [4], [532], [76], [544], [403], [764], [], [874], [537], [365], [45], [494], [95], [581, 661, 479], [145], [777, 623, 499], [429], [554], [8], [268], [140], [], [343], [787], [522], [398], [276], [864], [313], [974], [781], [217], [], [892], [364], [180], [44], [587, 784], [923], [676], [], [896], [586], [606], [770, 806, 608, 610], [804], [228], [336], [739], [432], [], [16], [73], [707], [916], [291], [279], [267, 265], [53], [825], [962], [807], [399, 501], [812], [995], [640], [139], [320], [245], [891], [540], [696, 477], [955], [738], [636], [528], [545], [316], [619, 846], [838, 551, 711, 629, 631], [], [53], [761], [491], [768], [701], [489], [468], [355], [24], [726], [812], [245], [55], [896], [332], [938], [614], [356], [56], [311], [317], [494], [150], [720], [139], [486], [118], [], [744, 657], [74], [794], [903], [], [23], [772, 679, 488], [104], [437], [602], [753], [456], [389], [908], [687], [22], [748], [682], [451], [894], [919], [308], [792], [161], [383], [681, 620, 526, 916], [915], [], [401], [439, 873], [235], [754], [662], [621], [821], [33], [847], [433], [585], [], [526, 673, 508], [482, 754], [552], [386, 101], [974], [825], [248, 249], [538, 698], [183], [46], [647, 845, 438], [240, 238], [874], [], [932], [763], [608], [17], [842, 459], [955], [758], [990], [38], [354], [853], [], [997], [212], [702], [745, 572], [], [696], [635], [449], [10], [91], [194], [873], [847], [250], [91], [989], [679], [784], [146], [255], [631, 838], [688], [13], [971], [157], [879], [], [165], [836, 837], [], [561], [458], [739], [], [869], [490], [806, 911, 502], [807], [], [], [40, 46], [560], [22], [568, 824, 869], [519, 907], [712], [144], [], [236], [858], [552], [146], [239], [256, 234], [], [957], [704], [791], [567, 926], [827], [377], [], [910], [160], [601, 578], [260], [542], [690], [146], [777], [651], [159], [371], [189], [64], [683], [814], [416], [717], [773, 659], [940], [465, 597, 630, 413], [468], [636], [145], [348], [398], [530], [869, 824], [880], [12], [933], [381], [146], [802], [127], [153], [968, 504], [814], [894], [637], [55], [359], [641], [635], [396], [537], [], [41, 44, 26], [937], [318], [12], [890], [266], [808, 836, 837], [624], [538], [575], [959], [10], [632], [72], [918, 721, 608, 750], [548], [740], [], [321], [661], [38], [991], [444], [573], [205], [619], [667], [807], [602], [757], [205], [], [67], [710], [145], [181], [64, 55], [619, 846, 721, 831], [100], [261], [28], [900], [552, 903], [772], [513, 776, 822, 541, 542], [897], [936], [140], [600], [], [329], [603], [642], [135], [658], [184], [416], [283], [950], [570], [655, 806], [794], [], [954], [921], [563], [], [554], [830], [277], [121], [839], [93], [711], [], [77], [818], [794, 861], [946], [208], [927], [211], [647], [693], [868], [267], [404], [979], [132], [120], [193], [653], [569], [489], [983], [770], [272], [752], [845], [448], [396], [742], [728], [], [321], [], [621], [291], [575], [243, 254], [820], [421, 693], [315], [589], [207], [274], [356], [730], [869], [619, 846, 721, 883, 831], [284], [311], [673, 526, 527, 664, 508], [424, 423], [886], [733], [724], [489, 444], [41], [324], [69], [376], [835], [323], [479], [6], [754], [452, 151], [204, 155], [], [320], [481], [337], [859], [324], [245], [619, 846], [865], [], [717], [459], [86], [118], [355], [], [525], [], [398], [570], [389], [422], [343], [74], [148], [211], [846], [126], [682], [923, 924], [], [293], [263], [699], [491], [], [42], [146], [408], [931], [655, 752, 852], [], [115], [657], [223], [881, 579, 889], [332], [962, 659], [558], [865], [295], [434], [572], [95], [108], [98], [846], [156], [337], [819], [750], [], [648], [], [195], [627], [180], [856], [975, 977, 472], [123], [289, 293], [109], [749], [177], [684], [584], [546, 650, 402, 818, 819], [], [472], [698, 538], [52], [587], [535], [375], [240, 241, 238], [922], [869], [673, 681, 526, 527, 782, 664, 508], [470], [847, 403], [714, 402], [608], [481, 482], [6], [418, 918], [], [90], [496], [903], [174], [], [281], [673, 810, 526, 527, 782, 664, 508], [640, 919, 841, 468, 728, 608], [358], [203], [421], [754, 632], [990], [686], [460], [844], [150], [258], [71], [446], [40, 44], [419], [865], [318], [722], [364], [585], [], [466], [914], [211], [858], [868], [230], [715], [339], [], [696], [482], [84], [909, 910, 926], [581, 479, 436], [], [], [], [226], [861], [882], [341], [792], [], [827], [360], [438], [318], [2], [229], [999, 435, 861], [275], [103], [672], [286], [98], [408], [942], [679], [35], [688], [79], [171], [232, 852], [22], [654], [436], [182], [950], [688], [816], [222], [773], [472], [296], [951], [517, 540], [911, 735], [383], [173], [41], [962], [467], [846], [664], [233], [905, 869], [82], [692], [475], [928, 960], [699], [741, 735], [378], [209], [569], [808], [589], [4], [166], [922], [952], [839], [770], [857], [174], [261], [406], [740, 783], [264], [41], [556], [448], [242], [680], [744, 657], [420], [824, 474, 911], [675], [50], [568, 248], [352, 353], [984, 425, 853], [777], [768], [265], [894], [], [619, 846, 470], [793], [12, 14], [967, 968, 504, 923], [823], [61], [419], [569], [656, 858], [431], [315], [508], [746], [453, 454, 624], [654], [74, 815], [444], [3, 4], [74], [199], [35], [232], [231], [524, 461], [111], [256, 218], [994], [], [810, 590], [964], [806, 870, 843, 850], [211], [519], [452], [637], [198], [946], [821], [508], [217], [873], [258, 279], [790], [672], [578], [614], [281], [594], [654], [465, 597], [51], [504], [106], [22], [821], [45], [516], [524, 461, 787], [694], [], [], [363], [767], [39], [7], [585], [647], [722], [510], [457], [174], [439], [919], [516], [215], [119], [233], [245], [871, 536], [929], [946], [71], [842, 445], [281], [123], [58], [497], [205], [], [438], [279], [710], [897], [912], [512], [689], [], [879, 614], [181], [388], [761], [509], [188], [537], [439], [112, 977, 978], [687], [975, 703], [], [773], [859], [14], [552], [190], [549], [500], [385], [524, 461], [802], [332], [49], [397], [913], [945], [176], [198], [26], [], [107], [], [868, 849, 504], [101], [847], [809, 924], [247], [736], [813], [385, 862], [142], [585], [4], [971], [730], [707], [445], [821], [795], [168], [780], [295], [581, 479, 436], [790], [361], [587, 792], [875], [675], [481], [104], [5], [941, 923], [454, 911, 474], [], [262], [456, 970, 445, 638], [508], [981, 429], [707], [475], [325], [851], [292], [412], [], [907, 440], [755], [495], [486], [941], [601, 578, 982], [206], [371], [896, 861], [686], [923], [672, 899, 469, 827], [420], [440, 441], [], [797], [596], [], [354], [944], [464, 676], [338], [462], [930], [731], [680], [679], [938], [413], [438], [455, 600], [162, 167], [164, 166], [813, 567], [921], [7], [106], [321], [897], [131], [921], [110], [453, 454, 559], [737], [259], [71], [690, 345], [144], [453], [370], [267], [640], [968, 504], [941], [411], [695], [225], [205], [10], [704], [72], [876, 435], [307], [650], [987, 923], [455], [728], [734], [680], [497], [877], [317], [591, 868], [595], [635], [852], [987, 998], [654], [970], [417], [56], [479, 511], [280], [256], [394], [422, 559], [205], [962, 923], [], [123], [991], [891], [416], [761], [983], [871], [981, 429], [291], [603], [5, 6], [595], [723], [544], [2], [873], [668], [], [], [898], [458], [880], [962, 467, 499], [179], [340], [515], [729], [700, 999], [245], [97], [330], [655], [629], [919], [71], [421], [519, 907], [977, 978, 445], [], [391], [230], [645], [], [283], [518, 671], [866], [31], [], [], [678], [521], [458], [150], [486], [], [347], [645], [], [466], [288], [745], [702], [562], [618, 909], [719], [918], [335], [344], [575], [499], [602], [952], [520, 680, 431, 529, 850, 443], [], [933], [874], [387], [234], [51], [61], [165, 187], [87], [], [61], [383], [194], [373], [193], [866], [470], [570], [257, 258, 489], [269], [14], [115], [393], [], [772], [937], [625], [673, 553, 526, 527, 664, 508], [979], [10], [511], [916], [388], [279], [], [523], [2], [902, 488], [768], [], [157], [24], [950], [944], [230, 231], [337], [612], [846], [215], [625], [529], [258], [985], [769, 798], [769, 114], [443], [205], [15], [578, 885], [683, 875, 558], [800], [281], [889], [434], [770], [519], [508], [673, 664, 526, 527, 508], [325], [803], [760, 415], [360], [743], [640], [729], [573], [731], [91], [], [301], [], [145], [931], [816], [723], [], [669], [941], [810], [730], [811, 281], [605], [22], [945], [678], [911, 658], [751], [], [292], [520, 697], [480], [230], [705], [536], [327], [232], [624], [110], [301], [889], [23], [429], [668], [337], [110], [864], [910], [448], [807], [723], [58], [105], [439], [199], [96], [746], [769, 606], [429], [], [650], [312, 311], [824], [866], [995], [554], [898], [577], [980], [768], [570], [850, 911], [10], [444], [977], [177], [443], [911], [352], [], [24], [708], [170], [860], [56], [936], [5], [318], [589], [648], [937], [668, 538, 607], [692], [836, 879, 822], [270], [543], [228], [923, 947], [933], [567], [920], [907], [880, 972], [615, 543], [568], [320], [927], [957], [329], [88], [104, 489], [461], [591], [896], [338], [971], [608, 518, 734, 465, 413], [797], [969], [999], [129], [373], [159], [366], [844], [647], [482], [142], [983], [129], [205], [245], [717], [52], [908, 404, 895], [], [453, 850], [473], [808], [332], [858], [448], [668], [700], [829], [795], [21, 127], [197, 199, 836, 837], [281, 282], [904], [763], [681, 620, 508], [256], [51], [612], [805], [155], [439], [373], [908], [546, 650, 819], [138], [111], [502, 539], [562], [702], [753], [304], [425], [828, 845], [307], [872, 759], [941], [923, 907, 532, 470, 762, 572], [], [169], [588], [33], [498], [557, 733], [107], [546, 889], [490], [597], [139], [806, 655], [778], [673], [], [287], [97], [332], [463], [33, 983], [636], [486], [183], [950, 951], [], [822, 542], [56], [723], [], [39], [240, 241], [696], [864], [921, 917], [977, 978], [868, 588, 692], [160], [824, 775], [790], [49], [761], [7], [235], [803, 637], [276], [584], [71], [756], [645], [629, 508], [774], [858], [53], [750], [836, 837, 906], [38, 45], [640], [856], [602], [225], [953], [484], [466], [769], [491], [489], [326], [71], [331], [66], [302], [434], [], [409, 531], [511], [745], [519], [114], [], [429], [418], [334], [318], [162], [182], [614, 818], [225], [740, 783, 477], [80], [], [14], [499], [591], [497, 884], [568], [100], [894], [486], [354], [], [521, 926], [514, 515, 597, 763, 445], [924], [63], [477], [676, 173], [888, 718, 839], [277, 278], [60], [], [716, 13], [913], [207], [375], [652, 465, 830], [340], [156], [154], [253], [251], [861], [277], [785], [317], [514, 655], [], [617, 823], [483], [382], [613], [48], [777], [812], [502], [198], [263], [306], [37], [35], [184, 191], [801], [262], [485], [], [576], [150], [700, 950], [333], [30], [23], [130], [50], [619, 750, 846, 721], [677], [249], [557], [35], [108], [], [400, 667], [960, 868], [348], [649], [830], [996], [670], [660], [494], [851], [662], [751, 479], [675], [851], [454, 917], [227], [747], [56], [332], [214], [930], [127], [987, 998], [921], [66, 68], [], [283], [784], [386], [996], [744, 657], [652, 465, 413], [239], [296], [359], [945], [876, 435, 282], [651], [], [509], [124], [66], [], [981], [572], [334], [127], [319], [900], [29], [327], [28], [382], [344], [731], [399], [680, 898], [156], [995], [161], [78], [367], [494], [774, 464], [951], [480], [81], [252], [464], [532, 453], [52], [], [735], [], [301], [354], [338], [653, 665], [482, 485], [992], [562], [676], [], [219], [570], [542], [974], [713], [538, 727], [801, 107], [725, 505], [937], [891], [], [290], [513, 875, 819], [850], [755], [866], [687], [344], [441, 572], [924], [237], [903], [93], [92], [350], [923, 951, 762], [162], [267], [335, 845], [], [411], [774], [357], [137], [581, 586], [608, 464], [411], [660], [162, 166], [810, 878], [937], [661], [558], [168], [89], [732, 622, 759], [235], [247], [384], [845], [871], [686], [993], [196], [345], [548], [404], [391], [174], [686], [755], [14], [143], [779], [914], [2], [930], [538], [912, 825], [478, 239], [478], [], [75], [922], [401], [730], [399, 840, 462, 741], [971], [32], [40, 46], [791], [525], [685], [672], [863], [754], [366], [205], [580], [202], [474], [416], [598], [635], [986], [914], [897], [607], [453, 454, 624], [757], [11], [960, 928], [136], [], [747], [311], [784, 587, 740, 477], [249], [326], [], [], [337, 360], [823], [58], [], [189], [936], [886], [762], [402, 593], [], [4], [851], [944], [708], [845], [164], [945], [256], [53], [821], [455], [918], [119], [55], [462], [20], [857], [650, 402, 819], [646], [197], [439], [752], [774, 412, 671, 836, 837, 733], [8], [670], [845], [617, 515, 860], [802], [853], [32], [650, 683], [139], [487], [401], [168], [82], [877], [781, 409], [305], [652, 830, 764, 413], [853], [723], [534, 729], [578, 876, 689, 435, 794], [858, 807], [884], [353], [218], [451], [879], [504, 968], [196, 198], [], [458], [805], [21], [864], [589], [384], [652, 465], [881], [458], [], [659], [847], [813], [923], [506], [198], [103], [912], [854], [674], [673, 664, 526, 527, 632, 508], [0], [84], [183], [5], [37], [840, 462], [478], [270], [541], [81], [927], [810, 878], [677], [471], [649], [416], [929, 509], [251], [], [366], [335], [464], [625], [20], [776, 650], [561], [379], [559], [415], [139], [757], [142], [569], [], [201], [895], [576], [663], [491], [64], [39], [185, 182], [866], [844], [326], [530], [322], [407], [548], [579], [84], [717, 751, 479], [680], [812], [940], [284], [250], [484], [677], [297], [880, 731], [368], [291], [7], [296], [731, 861], [15], [31], [], [783], [431], [244], [16], [377], [639], [628], [908, 404], [185], [730], [660], [362], [647, 969], [519], [323], [978], [509], [721], [], [], [608], [309], [591], [316], [484], [], [496], [836, 837, 853, 762], [976], [922], [956], [619, 818], [422], [103], [624, 453], [871], [326], [270], [986], [478], [907, 440], [843], [685], [311], [426], [792], [764], [908], [280], [280], [], [503], [865, 509], [637], [672], [153], [110], [45], [595], [995], [916], [923], [], [375], [376], [219], [735], [], [406, 857], [], [963], [586], [148], [199], [56], [287], [473], [937], [449], [861], [195], [707], [584], [497], [514, 689], [704], [538], [533], [904], [692], [76], [286], [], [783], [216], [189], [25], [500], [102], [821], [795], [737, 455, 907, 440], [862], [760], [377], [179], [637], [999, 648], [685], [511, 479], [393], [390], [275], [626], [337], [464], [310], [968, 504], [116], [222], [272], [747], [845], [815], [40], [30], [402, 819], [966], [], [580], [873], [580], [448, 494], [957], [893], [557], [139, 140], [], [628, 536], [324], [578], [203], [757], [609], [947], [321], [945], [485], [610], [472, 693], [653, 463], [544, 909, 849, 469], [172], [118], [319], [518], [837, 678], [694], [962, 923], [957], [938], [422], [525], [], [135], [890], [224], [923], [100], [967], [42], [926], [566], [724], [114], [249], [], [913], [407], [804], [528], [254], [480], [441], [207], [607], [357], [85], [396], [694], [543], [875], [519, 956], [257], [873], [5, 6], [553], [105], [268], [], [304], [866], [157], [775], [896], [599], [528], [71], [351], [636], [464], [99], [336], [17], [39], [770], [882], [], [72], [659], [661], [836, 837, 487], [], [6], [352], [861], [307], [328], [341], [735], [733], [], [152], [732, 759], [924], [717], [867], [229], [], [662], [757], [577], [309], [581, 479], [724], [766], [842, 433], [587], [923], [645], [229], [685], [732], [340], [530], [352], [865], [826], [820], [853], [495], [475, 15], [25], [534], [822, 542], [311], [337, 334], [907, 499, 470], [749], [347], [260], [412], [442], [199], [834, 487], [498], [65, 56], [764], [789], [], [766], [811], [660, 757], [650, 402], [562], [968, 504], [353], [244], [570], [438], [795], [198], [298], [838, 551, 629, 631], [21], [90], [248], [17], [532, 762, 923], [669], [413], [716], [85], [467], [861], [893], [317], [803], [225], [426, 685], [410], [925], [], [185], [814], [351], [578, 452, 689, 538, 601], [974], [80], [343], [496, 529, 411], [84], [884], [433, 639], [322], [927], [550], [651], [512], [940], [988], [790], [791], [909, 567], [638, 639], [367], [400, 667], [], [25], [736], [474, 452], [95], [822], [90], [119, 39], [242], [86], [638, 639], [504, 850], [596], [54], [], [320], [773, 532, 923, 572, 762], [737], [916], [287], [168], [375], [129], [959], [546, 650, 818, 819, 542], [816], [597], [558], [551], [], [], [553], [603], [466], [], [], [80], [27], [162], [434], [82], [222], [532], [15], [730], [595], [382], [785, 464], [881], [753], [76], [112], [204], [], [618, 813], [350], [506], [947], [130], [278], [932], [338], [41], [401], [285], [32], [829], [156], [190], [226], [340], [327], [365], [498], [435, 794], [619, 846], [611], [910], [262], [905], [], [524], [503], [659], [558], [795], [807], [761], [984], [947, 125], [112], [299], [84], [122], [847], [847], [472], [219], [864, 586, 652, 413], [650, 568, 608], [44], [952], [149], [89], [583], [565], [145], [], [806], [31], [232], [703], [858], [73, 74, 815], [644], [70], [745, 620], [513, 875], [685], [173], [840, 587, 758], [836, 837, 842], [479], [742, 620, 664, 527, 508], [798], [79], [746], [198], [316], [727], [252, 262], [258], [597], [302], [859], [932], [637], [761], [209], [297], [442], [993], [32, 152], [350], [989], [815], [432], [779], [], [1], [880], [578, 834, 836, 837, 458], [163], [506], [804], [672], [110], [143], [934], [566], [214], [911, 253, 735], [524, 461, 958], [525, 718, 437], [], [518], [119], [177], [400, 667], [295], [], [789], [234], [929], [638, 639], [597], [937], [494], [257], [62], [], [347], [369], [924], [539], [397], [317], [126], [580], [550, 968], [810, 878], [255], [576, 536], [459], [188], [], [33], [723], [594], [435, 58], [39], [85], [199], [888], [883, 739], [409], [147], [478], [462], [592], [716], [342], [143], [], [494], [74], [666], [464], [218], [411], [365], [900], [247], [754], [174], [515, 790, 636], [868], [12], [535], [887], [300], [39], [938], [427], [287], [203], [52], [94], [361], [317], [793], [935], [928], [977], [431], [776], [61], [505], [51], [63], [71], [116], [914, 536], [672], [], [401], [495, 532], [205, 478], [449, 975], [951], [671], [344], [806], [330], [697], [281], [615], [333], [528], [699], [651, 827], [812], [], [603], [], [337], [457], [948, 950, 954], [808, 842, 977, 978], [], [618, 813], [417], [60], [801], [453], [132], [582], [332], [114], [324], [489, 134], [260], [825], [], [968, 762], [775], [474], [], [87], [702], [579], [544, 827, 469], [], [344], [968], [667], [261], [988], [593], [931], [688], [438, 728], [845], [694], [843], [715, 652], [], [170], [979], [378], [], [582, 936, 943], [872, 759], [740], [378], [108], [127], [935], [203], [931, 933], [351], [254], [915], [633], [967], [429], [751], [268], [10], [983], [578, 982, 601], [744, 657], [556], [970], [268], [105], [464, 597], [482, 632], [104], [255], [569], [582], [272], [], [115], [399, 501], [133], [548], [241], [796], [111], [371], [891], [797], [957], [345], [666], [342], [159], [608, 117], [562], [608, 610], [260], [393], [296], [682], [608, 774, 788], [148], [776], [], [537], [203], [207], [765], [517, 821, 536, 510], [459, 434], [478], [683], [495], [875], [683, 566], [233], [985], [511], [710], [959], [973], [988], [673, 526, 527, 782, 664, 508], [235], [424], [206], [224], [539], [396], [945], [281, 285], [884, 406], [702], [542, 541], [605], [484, 814], [774], [601], [9], [652], [950], [993], [374], [603], [616], [206], [586], [930], [647], [343], [269], [328], [156], [153], [], [484, 871], [385, 101], [885], [], [794], [], [291], [664, 782, 662], [981], [410], [47], [364], [290], [708, 517], [852], [115], [916], [528], [115], [754], [459, 608], [360], [], [322], [872, 652, 447], [551], [751, 479], [97], [185, 153, 187], [202], [966], [971], [597], [599], [77], [72], [839], [629], [111], [718], [698], [223], [934], [360], [993], [632, 818, 819], [364], [984], [770, 806], [728], [528], [581], [322], [77], [894], [445], [869], [384], [617, 823], [], [38, 26], [160], [479], [250, 220, 248], [276], [901], [923], [308], [342], [], [838, 711, 648, 585, 631], [], [], [279], [78], [97], [], [746], [532], [688], [568, 831], [599, 955], [109, 973], [357], [919], [466], [908], [339], [573], [643], [715, 524, 461, 787], [301], [677], [], [143], [158], [451], [139], [700], [436], [774, 681, 620, 750, 721, 846], [704], [369], [936], [736], [901], [287], [], [835], [638, 639], [442], [734], [329, 397], [], [310], [970, 518, 671], [110], [580], [709, 710], [735], [265], [565], [560], [43], [282, 478], [800], [388], [177], [275], [340], [766], [290], [196], [148], [865, 850], [63], [157], [825], [240, 241], [920], [], [752], [694], [870], [770, 488, 843], [160], [700, 999], [], [958], [387], [556], [737], [76], [456], [701], [81], [942], [262], [61], [992], [545], [77], [262], [783], [548, 869, 655, 851], [367], [373], [115], [399], [], [], [565], [446], [129], [6], [748], [417], [970], [358], [815], [], [767], [71], [290], [909, 926], [788], [416], [399, 824, 600], [696], [358], [905, 750, 721], [272], [920, 829], [558], [854], [248], [160], [323], [622, 759], [870], [796], [318], [775], [140], [614, 887], [954, 950], [581, 479, 717], [784, 740], [571], [116], [519], [136], [355], [75], [956], [88], [491], [288], [874], [228], [559], [293], [241], [500], [486], [943], [350], [407], [146], [], [47], [99], [303], [402], [879], [], [51], [160], [573], [457], [842, 463], [872, 420], [221], [389], [527, 782, 916, 664, 508], [826], [382], [], [125], [581, 436, 479], [97], [254], [802], [499], [977, 978], [660], [629], [432], [261], [193, 153], [242], [838], [], [698], [257], [923], [222], [157], [570], [112], [359], [451], [292], [513, 776, 683, 875, 822, 541, 542], [378], [496], [775], [879], [836, 837], [752], [723], [724], [202], [295], [447], [801, 842], [185], [810, 878], [93], [380], [984], [964], [122], [384], [151], [], [528], [989], [612], [704], [220], [768], [86], [100], [573], [2, 3], [959], [], [459], [994], [498, 598], [286], [303], [276], [341], [14], [953], [856], [248], [797], [350], [903], [760], [103], [413], [608, 770, 414], [936], [840], [134], [34, 978], [302], [211], [597], [852], [135], [552], [356], [927], [214], [164], [292], [41], [477], [769], [709], [], [488], [500], [640], [918], [483], [117], [95], [897], [884], [853], [99], [472, 693], [213], [202], [592], [14], [767], [875], [228], [], [277], [608, 481, 482], [243], [204], [132], [875], [126], [439], [724, 536], [528], [962], [400, 667], [312], [477], [267], [716], [569], [339], [819, 541], [809, 618, 925], [193], [], [711], [967, 968, 923], [779], [533], [330], [], [], [668], [736], [262], [136], [379], [], [671], [281, 285], [955], [573], [968, 504], [331], [132], [784], [592], [621], [215], [172], [458], [150], [303], [799], [], [654], [515], [49], [490], [15], [223], [262], [682], [301], [592], [635], [291], [718, 628, 540], [625], [360], [716], [752], [20], [623, 795], [421], [618, 659], [122], [183], [232], [221], [], [399], [704], [19], [], [568], [317], [542], [822], [561, 950], [968], [719], [151], [], [679], [386], [581, 479, 511], [485], [], [946], [642], [368], [25], [239], [472], [550, 967, 968], [647], [368], [83], [854], [801], [], [772, 748], [118], [87], [873], [772], [114], [935], [218], [], [464], [966, 907, 572], [2], [622], [449], [961], [777, 499], [691], [69], [622, 759], [221], [257], [28], [328], [826], [821], [15], [73], [24], [357], [957], [29], [438], [521], [134], [866], [147], [187], [], [289], [671], [138], [26], [], [758], [738, 211], [617, 823], [613], [777], [217], [], [458], [772], [953], [835], [], [64, 55], [942], [327], [392], [871], [858], [810, 508], [833], [786], [924], [779], [586], [612], [402], [318], [842], [782], [], [673, 526, 527, 782, 664, 508], [501], [536], [153], [928], [819], [2, 3], [55], [138], [57], [661], [659], [173], [683], [655], [114], [669], [357], [887, 857], [277], [114], [616], [145], [355], [607], [2, 3], [385, 101], [859], [94], [813, 909], [896], [875], [652], [0, 389, 758], [984], [], [388], [74, 815], [11], [785], [540], [904], [], [860], [397], [810, 878], [489], [299], [171], [325], [546], [659], [555], [600], [437], [936], [353], [528], [739], [839], [727], [967], [121], [638, 639], [], [980], [196, 197, 198, 199], [258], [714], [], [729], [927], [67], [322], [579], [342], [8], [904, 905], [767], [40, 911, 27], [935], [296], [738], [882], [15], [439], [164], [580], [77], [331, 332, 338], [432], [150], [292], [188], [563], [391], [522], [492], [353], [804, 844], [660], [668], [619], [262], [661], [165], [683], [454, 624], [403], [201], [341], [90], [669], [474], [199], [942], [], [310], [859], [889], [482], [863], [925], [910], [], [491], [350], [183], [795], [586], [260], [681, 620], [265, 266], [439], [735], [32], [984], [], [668], [494], [278], [290], [292], [884], [785], [488], [833], [362], [], [128], [204], [583], [18], [127], [738, 968, 505], [99], [478], [155], [439], [111], [377], [976], [174], [836, 837, 459, 445], [466], [917], [12], [145], [883], [57], [898], [935], [918, 762, 923], [769], [], [130], [335], [760], [376], [937], [224, 223], [130], [621], [], [30], [497], [593], [], [658], [32, 28], [116, 126], [357], [277], [129], [407], [368], [515], [11], [408], [], [103], [57], [865], [506], [849], [770], [827], [730], [207], [562], [159], [184], [976], [74], [737, 651], [333], [309], [203], [533], [994], [25], [467], [771], [897], [332], [584], [269], [673, 527, 664, 508], [618, 562], [581, 479, 717], [454], [204], [267], [346], [706, 532], [959], [885], [434], [643], [142], [249], [505], [99], [341], [805], [], [373], [394], [789], [988], [870], [], [306], [790, 126], [793, 259], [115], [264], [510], [70], [772], [154, 478], [], [755], [36], [638, 639], [523], [765], [335], [911], [119], [], [169], [617], [276], [143], [310], [550], [228], [809], [357], [812], [565], [273], [457, 834], [64], [502], [336], [899, 521, 532, 412], [235], [696], [499], [175], [16], [165], [537], [783], [284], [606], [], [483], [], [935], [854], [917], [555], [444], [867], [578, 982], [633, 316], [714], [719], [25, 28], [280], [305], [], [793], [42], [587], [160], [261], [736], [281, 285], [687], [776], [918], [], [534, 729], [497], [987, 998], [352, 353], [661], [332], [143], [], [397], [495, 692], [655, 630, 474], [42], [979], [982, 703], [506], [672], [135], [4], [963], [205], [836, 837, 919], [364], [112], [645], [802], [481, 453], [691], [604], [633], [990], [160], [747], [425], [650], [73], [], [870], [954, 955, 953, 923], [628], [968], [41], [143], [420], [917], [142], [790], [988], [329], [568], [491], [954], [677], [7, 8], [609], [252], [527, 782, 673, 475], [800], [377], [439], [549], [213], [0], [76], [162], [905], [253], [971], [362], [913], [900], [290], [842, 693, 472, 445], [336], [774], [301], [621], [453, 454], [238], [43], [96], [], [989], [487, 620], [881], [761], [970, 795], [736], [80], [455], [651], [858], [228], [867, 919], [932], [401], [631], [827], [771], [999, 700], [651, 760], [875], [242, 243], [651, 631], [85], [165], [141, 142], [615], [244], [28], [575], [59], [700, 999], [928, 923, 960], [338], [557], [267], [868], [354], [601], [685], [520], [933, 934], [88], [714], [181], [459], [711], [762], [860, 919], [358], [13], [96], [472], [165], [694], [519], [], [839], [], [618, 813, 910, 532], [67], [80], [20], [113], [515, 695], [341], [608], [41], [18], [252], [738], [406], [980], [384], [838], [474], [161, 162, 167], [49], [], [84], [149], [406], [], [652, 413], [352], [760], [40], [82], [581, 479], [734], [57], [676], [115], [12], [363], [144], [733], [921], [945, 948, 950, 953], [449, 975], [127], [844], [986], [281], [471], [310], [273], [153, 203], [473], [258], [256], [1], [225], [426], [869, 879], [502], [227], [405], [59], [968, 504], [895], [444], [11], [770], [18], [893], [351], [50], [507], [775], [695], [592], [339], [748, 911, 692], [166], [515, 775], [916], [250], [214], [174], [552], [23], [432], [942], [257, 222], [], [130], [999, 700], [236], [395], [947], [637], [313], [141, 142], [818, 819, 854], [673, 487, 810], [480], [416, 638, 639], [699, 541, 542], [659], [966, 572], [806], [934], [518, 671], [220], [490], [276], [81], [682, 458], [805], [], [815], [581], [515], [543], [751], [142], [880], [2], [634], [792], [], [], [684], [665, 670], [763], [153], [296], [568], [203], [992], [741], [157], [], [737], [570, 830], [663], [496], [735], [88], [879], [], [993], [797], [385], [], [430], [3], [465], [67], [410], [795], [605], [823], [35], [873], [251], [866], [535, 479], [990], [992], [255], [984], [659], [866], [670], [69], [], [524, 787, 915], [882], [389], [991], [], [544], [564], [896, 804], [855], [984], [692], [298], [594], [557], [372], [652, 413], [528], [562], [743], [213], [937], [916], [191], [229], [923], [980], [], [630], [411], [695], [411], [895], [602], [68], [132], [51], [198], [710], [799, 831], [844], [1], [580], [798], [972], [64, 59], [375], [434], [10], [951], [220], [898], [195, 805], [60], [847], [551, 629], [964], [379], [986], [842], [205], [594], [191], [225], [229], [894], [794, 435], [611], [891], [99], [646], [941], [385], [358, 173], [774], [837, 836, 733], [], [703], [560], [268], [974], [150], [114], [], [390, 973], [216], [753], [131], [682], [822], [666], [416], [725], [291], [270], [212], [905], [567], [345, 690], [149], [920], [777], [94], [974], [760], [427], [723], [16], [528], [178], [562], [459, 543], [888], [280], [], [215], [], [740], [317], [709], [539], [239], [969, 692], [], [25], [736], [529, 830, 610], [63], [841, 697], [274], [63], [834, 869], [760], [396], [476], [71], [401], [382], [468], [923, 521, 762, 926], [357], [832], [977, 638, 639], [404], [11], [5], [433, 638, 639], [160], [941], [], [165], [247], [434], [576], [892], [259, 462], [938, 939, 943], [75], [619], [373, 463], [538, 668], [497, 884, 406], [892], [190], [392], [615], [30], [37], [364], [616], [414, 518, 535], [821], [487], [538], [], [817], [31], [977, 978], [646], [502], [434], [641], [355], [961], [64, 55], [752, 852], [382], [470], [253], [150], [110], [5], [195], [399, 636], [], [45], [816], [806, 630], [402], [987, 998], [617], [190], [626], [720], [6], [547], [980], [911, 539], [862], [208], [518, 489, 671], [], [640], [107], [917, 921], [90], [138], [508], [193, 186], [113], [118], [88], [520], [179], [98], [132], [196], [265, 267], [106], [76], [33], [180], [120], [848], [724], [404], [170], [], [584], [847], [644], [774], [28], [302], [819, 546], [399], [70], [769, 798], [578, 982], [831], [367], [919], [135], [161, 168], [455], [94], [944], [174], [616], [776], [], [115], [867], [733], [644], [849, 285], [955], [866], [78], [255], [911, 533], [532], [97], [153], [638, 639], [911], [439, 570, 764], [433], [847], [893], [855], [], [335], [302], [477], [543], [446], [333], [354], [940], [314], [560], [126], [358], [6], [553], [], [692, 886], [311], [], [251], [841, 759], [32, 30], [], [833], [518], [233], [21], [348], [296], [873], [748], [355, 489], [762], [853], [137], [495], [610], [279], [563], [707], [], [821, 703, 839, 975], [970, 979], [102], [601], [291], [637], [121], [317], [963], [95], [364], [838, 487, 459, 445, 638], [307], [512, 473], [224, 214], [475], [870], [23], [866], [], [156], [151], [336], [66], [901], [], [229], [757], [117], [221], [521, 809, 909, 987, 926], [844], [287], [404], [524], [756], [629], [], [], [309], [799], [173], [216], [448], [619, 846], [953], [13], [140], [], [768, 836, 842], [561], [573, 518], [629], [594, 982], [398], [68], [588, 790], [204, 185], [836, 837], [685], [279], [350], [688], [271], [55], [70], [645], [673, 526, 527, 664, 508], [607], [162], [97], [985], [515, 643], [139], [942], [278], [553], [291], [749], [], [], [205], [632], [827], [982], [701], [809, 925], [194], [881], [683, 432, 566], [182], [819], [142], [178], [989], [37], [421, 525, 975], [260], [305], [566], [846], [628], [556], [706, 789, 539, 799], [379], [254], [418], [369], [254], [770], [709], [353], [834, 457, 630], [], [360], [270], [950], [928, 868, 923], [760], [761], [23], [262], [194], [795], [31], [68], [252], [735], [358, 359], [682, 781], [725], [96], [763], [533], [809], [206], [873], [392], [798], [208, 179], [307], [606], [834, 515, 836, 837, 906], [628], [231, 156], [742, 872], [405], [651, 760, 827], [541], [802], [178], [70], [455], [292], [819, 854], [995], [393, 983], [775], [280], [13], [3], [946], [204], [], [136], [213], [470], [344], [596], [382], [748, 636], [909], [793], [186], [141], [616], [548, 851, 598, 632], [150], [], [693], [275], [740, 519], [224, 223], [], [], [622], [75], [720], [928, 960, 923], [772], [340], [819], [378], [804, 469], [789, 421], [273], [852, 186], [644], [], [301], [236], [9], [621], [15], [96], [48], [296], [222], [281], [715, 652, 764, 413], [249, 250], [549], [207], [860], [373], [80], [863], [312, 311], [552], [763], [494], [55], [562], [38], [131], [517], [276], [472, 693], [], [372], [566], [], [445], [495], [741], [823], [89], [549], [33], [288], [350], [753], [0], [979], [540], [921], [985], [], [194], [216], [866], [737], [779, 506], [802], [928], [416], [614], [713, 742], [218], [164], [951], [810, 878], [874], [435], [850, 732, 759], [729], [796], [734], [521], [698], [526, 453, 454, 608, 740], [], [182], [166], [102], [225, 465], [960], [896], [188], [989], [313], [322], [610, 862], [396], [327], [744, 657], [928, 960, 868, 415], [], [240, 238], [286], [557], [654], [929], [358], [302], [905], [989], [307], [578, 689], [943], [213], [933], [], [827], [694], [705], [360], [710], [341, 342], [836, 837], [538], [780], [205], [762, 923, 122], [70], [], [48], [273], [835], [809, 925], [448], [747], [220], [850], [187], [825], [472, 693], [79], [593], [719], [], [316], [723], [203], [398], [736], [581, 733, 479], [790, 588], [946], [86], [500], [474], [386, 101], [602], [836, 837], [131], [353], [18], [928], [329, 842], [705, 547], [681, 620, 526, 916, 906], [801], [274, 271], [], [713], [], [], [619, 883], [972], [614, 887], [313], [287], [829], [432], [355], [541], [40], [543], [648], [261], [459, 978, 445], [843], [572, 966], [487], [43], [], [964], [229], [254], [138], [870], [827, 849], [332], [247], [148], [434, 912], [599], [731], [100], [], [538], [498], [82], [671], [153], [875], [422], [72], [840], [548], [586, 864], [891], [930, 868, 967, 968, 504], [308], [301, 918], [128], [62], [750], [], [133], [720], [194], [13], [481], [575], [954, 943], [180], [78], [814], [42], [], [221], [301], [], [67], [258], [730], [3], [355], [240], [284], [879], [513], [690], [695], [280], [716], [300], [912], [574], [205], [131], [847], [843], [348], [800], [674], [658], [608, 869], [734], [459], [714], [295], [723], [699], [489], [607], [148], [809], [69], [347], [866], [203, 156], [412], [225], [35], [488], [16], [951], [], [293], [253], [189], [809, 926], [934, 923], [906], [509], [485], [517], [410], [772, 679, 488], [448], [526], [358], [721, 761, 831], [324], [536], [991], [892, 409], [613], [577], [47], [349], [406], [810, 655, 508], [461], [843], [594], [670], [], [483], [471], [248, 249], [668], [46], [392], [948], [], [694], [794], [562], [858, 467], [723], [902, 488], [650], [218, 215], [], [], [292], [40], [948, 572], [71], [918], [517], [146], [642], [181], [868, 923, 968, 725], [809, 659, 729], [819], [520], [440], [258], [792], [308], [795], [701], [304], [70], [518], [619, 846], [165], [276], [364], [392], [698], [930, 931], [105], [], [188], [221], [315], [169], [428], [77], [], [596], [288], [710, 767], [957], [553, 526], [58], [861], [305], [], [612], [619, 846], [907], [611], [152], [44], [456], [3], [814], [362], [896], [866], [523], [489, 274], [844], [905, 789], [], [917], [442], [], [199], [], [11], [686], [485, 685, 754], [334], [293], [505], [], [232, 852], [0, 758], [388], [189], [307], [], [], [375], [424, 423], [518], [38], [617], [111], [421], [752], [725], [908, 895], [107, 108], [291], [386], [707], [44], [578, 689], [114], [73, 74], [166], [668], [421], [259], [601], [908], [428], [881], [836, 837], [198], [302], [], [545], [226], [], [240, 241, 238], [834, 906], [235], [532], [517, 554, 536], [1], [445, 977, 236], [301], [675], [453, 454], [395, 758], [24], [263], [965], [301], [684], [558], [755], [684], [769, 633], [739], [151], [996, 309], [263], [154, 155], [928, 960], [], [613], [979], [813, 501], [779], [458], [728], [681, 620, 526], [319], [], [4], [131], [12], [182], [568], [608, 836, 837, 655, 636], [992], [802], [936, 943], [279], [34], [283], [540], [810], [364], [928], [283], [571], [521], [968, 504], [525], [370], [200, 155, 204], [481], [851], [396], [382], [652, 413], [], [232], [278], [625], [924], [342], [242], [829], [577], [264], [435], [440], [771, 507], [616], [674], [56], [472], [], [457], [466], [714, 542], [], [254], [162], [703], [395], [], [267], [140], [147], [303], [916], [616, 695], [971], [559, 764, 413], [835], [803], [469], [29], [341], [310], [], [9], [704], [270], [459], [3], [517, 839, 718], [756], [517, 975, 977, 536], [], [858], [], [947], [703], [228], [294], [963], [394], [864], [915], [979], [253], [380], [896], [117], [], [583], [836, 837], [794], [310], [701], [101], [552], [705, 888], [687], [15], [627], [], [552], [48], [364], [428], [471], [221], [549], [813, 910], [732], [279], [456], [711], [770], [28], [132], [826], [920], [344], [374], [237], [496], [], [96], [622], [503], [910, 659], [171], [585], [135], [393], [266], [669], [], [], [859], [66], [989], [569], [242], [962, 813, 827], [716], [746], [761], [346], [439], [113], [463, 412], [497, 442], [452, 689], [205], [673, 742, 681, 526, 527, 662, 664, 508], [741], [440], [874], [727], [660], [127], [0], [180, 195], [311], [], [525], [442, 494], [353], [453, 818], [786], [100], [240, 241, 238], [916], [160], [757], [164], [293], [654], [476], [919], [926], [713], [783], [262], [388], [829], [902], [438], [], [282], [521], [364], [177], [833], [658], [596], [215, 218], [576], [358], [752], [424, 423], [223], [78], [859], [605], [193], [156], [841], [82], [643], [77], [403], [173], [514, 836, 837], [], [420], [111], [33], [56], [249], [88], [830], [673, 478], [199], [136], [814], [591], [128], [], [984], [158], [372], [205, 750, 721], [555], [824, 633], [833], [125], [693], [589], [216], [571], [681, 491], [269, 249], [206], [602], [363], [538, 668], [155], [], [352], [389, 567], [], [4], [207], [616, 625, 724], [953], [], [300], [551], [586], [81], [190], [97], [104], [362], [714], [550], [160], [949], [883], [759], [989], [628], [741], [884], [803], [142], [95], [945], [208], [567, 827, 926], [204], [819], [654, 671], [55], [449], [235], [450], [793], [240, 241], [], [417], [436], [22], [2], [37], [886], [618], [277], [642, 462], [317], [733], [349], [754], [796], [425], [908, 404], [314], [416, 638], [8], [518], [893], [], [671], [574], [908, 895], [300], [224], [], [341], [292], [972, 976], [971], [918], [578, 689, 501, 885], [846], [444], [938], [], [471], [317], [657], [898], [836, 869], [302], [48], [175], [949], [728], [681, 620, 478], [14], [645], [141], [399], [614], [359], [], [920], [865, 411], [28], [758], [976], [318], [971], [409], [677], [500], [556], [279], [], [178], [616, 494], [206], [], [988], [673, 681, 810, 620, 527, 782, 664, 508], [928, 923], [439], [71], [37], [853], [881], [172], [170], [595], [156], [889, 541], [491], [45], [906], [866], [649], [901], [232, 249], [753], [149], [511], [895], [465], [590], [376], [545], [39], [364], [476], [782, 664], [3], [63], [599], [684], [69], [311], [681, 620, 508], [127], [611], [27], [667], [726], [865], [630], [135], [545], [838], [343], [49], [], [624], [123], [418], [205], [883], [993], [243], [766], [53], [151], [31], [215], [427], [213], [880], [208], [], [475], [621], [526, 673, 681, 620], [], [104], [578, 654], [490], [235], [702], [720], [832], [732], [528], [999, 861], [503], [639], [745], [362], [238], [731], [738], [204], [966, 907], [598], [719], [11], [444], [667], [215], [151], [407], [985], [50], [314], [], [216], [954], [], [962], [108, 973], [294], [], [945], [317], [16, 19], [388], [788], [806], [23], [883], [210], [836, 837], [929], [525], [], [301], [534], [959], [], [836, 775, 655], [461], [127], [822], [800], [996], [524, 461], [597], [169], [857], [36], [101], [21], [570], [255], [185], [635], [836, 457], [926], [470], [71], [851, 548], [290], [250], [489, 251], [], [333], [373], [820], [175], [], [453, 493], [318], [], [547], [165], [653], [911, 824, 474], [695], [395], [123], [430], [334], [657], [908, 814], [258, 259], [596], [523, 664], [834, 906], [188], [829], [563], [14], [724, 536], [235], [687], [147], [154], [936], [69], [760], [593], [606], [682], [114], [646], [857], [538, 185, 975], [897, 651, 760], [880], [112, 125], [922], [881], [662], [831, 721, 608], [661], [684], [391], [859], [518], [574], [902, 488], [967, 968, 504], [268], [529, 667], [61], [373], [873], [183], [785], [], [], [882], [929], [577], [434], [143], [804], [381], [], [], [436], [306], [953], [749], [367], [479], [522], [16], [815], [36], [58], [85], [610, 770, 862], [927], [58], [651], [954], [864], [830], [298], [212], [], [262], [987], [665], [558, 593, 819], [873], [276], [35, 36], [467], [956], [713], [753], [948], [231], [564], [899, 532, 725], [69], [947], [869], [423], [86], [838, 631], [2], [416, 702], [816], [825], [696], [925], [177], [35], [40, 46], [176], [242], [473], [], [], [115], [162, 167], [92], [682], [], [177], [], [34], [990], [968, 809, 849, 659, 923], [842], [430], [162, 166], [505, 899], [602], [907], [582], [488, 679, 455], [], [941], [780], [681, 620, 760, 508], [750], [190], [761, 831], [98], [], [513, 439], [543], [], [578, 982, 601], [949], [382], [149], [199], [235], [704], [], [794], [892], [784], [143], [268], [274], [138], [455, 440, 444], [171], [694], [779], [899], [883], [813, 942], [821], [230, 231], [], [296], [581], [98], [365], [387], [213], [756], [286], [56], [514], [300], [446], [650], [24], [357], [826], [673, 553, 526, 527, 782, 664, 508], [896, 897, 827], [475], [388], [574], [929], [992], [941], [648], [810, 508], [301], [761], [182], [294], [102], [356], [], [352], [430], [957], [9], [191], [419], [369], [207], [825], [28], [307], [996], [141], [119], [997, 947], [300], [617, 823], [59], [303], [368], [333], [130], [83, 883], [901], [336], [396, 973], [780], [784, 792, 477], [], [], [908, 404, 895], [625], [369], [125], [743], [300], [131], [678], [865], [168, 159], [666], [56], [70], [436, 581, 479], [574], [530], [728, 412], [374], [], [300], [716], [304], [791, 582], [265, 266], [482], [152], [811], [42], [971], [697, 823], [956], [451], [685], [20], [290], [709, 710, 526, 692], [73], [71], [455], [707], [62], [185], [545], [844], [933], [796], [405], [14], [783], [377], [979], [], [], [451], [230], [], [680], [811], [988], [400, 667], [179], [879], [587], [690], [428], [384], [366], [727], [923], [518, 671], [586], [905], [54], [223], [836, 837, 445], [565], [328, 973, 991], [212], [179], [782, 851, 664], [562], [703], [340], [302], [501], [13], [758], [229], [855], [122], [525], [430], [978], [233], [824], [360], [67], [461], [292], [464], [949, 927], [939, 943], [552], [222], [106], [346], [487], [256], [623], [202], [992], [502], [837, 433, 445], [329], [754], [520], [117], [642], [39, 47, 978], [619, 846], [889], [30], [92], [426], [231], [442], [], [373], [323], [894], [], [], [84], [738], [790], [604], [869], [846], [581], [518, 880], [], [791], [99, 8, 730], [626], [587], [673, 742, 664, 526, 527, 782, 508], [53], [489, 981], [387], [985], [286], [76], [110], [619, 846], [595], [388], [434, 533], [826], [745], [], [363], [879], [19], [768], [31], [475], [419, 648, 720], [938], [646], [320], [730], [240, 248], [910], [83], [900], [903], [942], [254], [910], [943, 923], [152], [775, 459], [96], [426], [216], [787], [339], [], [87], [44, 26], [3], [920], [181], [368], [858], [36], [825, 409, 892], [521], [921], [115], [911, 796], [616, 87], [832], [935, 923], [], [539], [105], [581, 479, 511], [228], [615], [290], [89, 951], [515], [500], [11], [527], [321], [603], [96], [94], [246], [57], [8, 7], [342], [14], [967, 968, 923], [150], [624], [575], [8], [854], [], [], [483], [124], [921, 667], [976], [], [510], [424, 423], [127], [197, 205], [403], [874], [658], [770, 788], [213], [773], [256], [744, 657], [675], [711], [595], [], [203], [849], [], [], [673, 742, 620, 526, 527, 664, 508], [513, 903], [551], [564], [358], [176], [839, 821], [925], [388], [354], [350], [253], [399, 501], [561], [598], [794], [804], [820], [929], [123], [529], [407], [64], [515, 593], [823], [366], [896], [907, 572], [], [], [596], [453, 454, 905, 750], [992], [169], [190], [557, 692, 509], [], [1], [968, 659], [], [805], [463], [187], [911, 636], [774], [], [976], [497], [999, 861], [636], [693, 472], [], [571], [971], [523], [548], [38], [808], [915], [652, 683], [1], [614], [261], [75], [929, 245], [], [206], [311], [923, 71, 868], [47], [], [538], [984], [608, 514], [230], [372], [549, 742], [981], [949], [196], [641, 642], [521], [267], [919, 858], [823], [297], [405], [289], [674], [410], [2], [758], [233], [199], [915], [407], [610], [99], [332], [723], [970, 979], [326], [897], [226], [898, 681, 620], [306], [248, 249], [], [900], [644], [774], [763], [8], [813, 910], [868], [108], [190], [516, 431], [839], [358], [], [612], [22], [914, 484], [258], [26], [384], [190], [810, 878], [954], [], [699], [902], [721], [959], [214], [177], [129], [232], [909, 827], [436], [781], [911, 658], [], [560], [], [82], [979], [897], [412], [800], [683], [98], [109], [311], [635], [686], [929], [320], [778], [53], [10], [683], [980], [97], [987, 998], [883], [], [970], [575], [177], [394], [948], [914], [125], [971], [374], [396], [703], [449, 979], [122], [550, 967, 968, 505], [], [198], [654], [596], [933, 934], [352], [608, 806], [579], [681, 810, 620, 508], [23], [317], [585], [877], [836, 837, 971], [6], [747], [375], [], [172], [481, 482], [173], [66, 68], [770], [777, 623], [386, 101], [36], [242, 243], [686], [472], [86], [219], [868, 949, 953], [905, 919], [336], [952], [40], [337], [2, 3], [224], [237], [588, 610, 492, 636], [114], [441, 572], [169], [637], [768, 560], [963, 966, 762, 923], [809, 659], [503], [], [792], [777], [], [472], [422], [416], [431], [731], [877], [395], [76], [6], [767], [314], [635], [196], [934], [360], [783], [344], [710], [164], [267], [294], [807], [571], [343], [359], [922, 918], [840], [640], [642], [484], [116], [170], [456], [267], [350], [], [132], [129], [71], [407], [88], [992], [488, 600], [626], [919], [513, 650, 819], [538], [732], [342], [701], [391, 758], [836, 837, 842, 445], [935], [927], [507], [923], [892], [205], [520], [96], [171], [675, 671], [800], [], [492], [518, 691, 570], [106], [62], [988], [463, 696], [], [], [116], [509], [834], [], [713], [425], [568], [492], [39], [631], [785], [908, 895], [124], [963, 335], [390], [969, 474], [221], [241], [876, 435], [199], [763, 597], [681, 620, 508], [162], [278], [], [940], [672], [193], [311], [814, 977, 978], [275], [952], [416], [136], [], [150], [387], [940], [448], [350], [967], [974], [120], [], [514], [723], [310], [287], [536], [234], [894], [52], [213], [842], [898], [], [810, 878], [608, 836, 837, 841], [193], [256], [923], [391], [617], [269], [663], [545], [], [], [608], [762, 532], [146], [297], [578, 630, 982, 601], [26], [390], [128], [189], [], [880, 414, 671], [914], [383], [17], [50], [512], [], [694], [248, 250], [252], [458], [162], [511], [600], [459], [961], [553, 446], [472], [673, 664, 526, 527, 632, 508], [522], [351], [421], [679], [360], [918], [914], [], [500], [418, 709, 710], [597], [974], [122], [976], [938], [92], [751], [509], [386], [109], [539], [277], [309], [802], [132], [143], [], [865, 968], [773], [962], [447], [], [805], [778], [737], [285], [992], [636], [44], [477], [607], [127], [71], [470], [250], [], [], [518, 665, 671], [771], [841, 610], [273], [694], [997], [366], [230], [72], [245], [238, 241], [653], [313], [879], [62], [911, 533], [390], [843], [18], [160], [988], [965], [812], [350], [813], [930, 934], [1], [689], [786], [351], [934], [403], [676, 597], [45], [456, 777, 623, 787], [367], [274], [568], [299], [679], [563], [327], [650], [829], [679], [633], [996], [644], [422], [131, 134], [719], [], [923], [17], [929], [416], [311], [708], [679], [467, 766], [898, 596], [11], [345, 690], [289], [673, 526, 527, 782, 664, 508], [], [705, 460, 975], [159], [712], [288, 290], [594], [274, 277], [732], [808, 515], [426], [75], [174], [864], [175], [902], [612], [332], [780], [372], [802], [], [809, 923], [507], [299], [228], [718, 821], [763], [858], [867], [789], [38, 44, 463, 26], [654], [], [578, 601, 415], [747], [269], [995], [466], [935], [579], [56], [546, 650, 819], [62], [483], [272], [608], [450], [776], [295], [677], [110], [708, 862], [70], [192], [428], [529], [791], [505], [447], [321], [156], [658], [724], [299], [898, 664, 527, 782, 508], [149], [144], [664, 508], [736], [78], [218], [234], [715], [789], [19], [462, 734], [514], [611], [926], [175, 185], [], [791], [], [116], [703], [440], [569], [626], [728], [581, 874], [404], [37], [405], [], [969], [713], [526, 664, 508], [933], [], [546, 486, 650, 402, 819, 541], [921], [471], [150], [666], [], [258, 270], [458], [419], [233], [746, 455], [403], [489], [227], [952], [533], [854], [132], [17], [23], [814], [114], [], [825], [205], [993], [581], [390], [445], [832], [89], [110], [711], [496], [226, 698], [911], [9], [252], [836, 837, 123], [112], [759], [263], [11], [724], [647], [693], [159], [814, 975], [711, 631], [322], [881], [27], [575], [823], [342], [137], [288], [422, 543], [304], [640], [613], [371], [24], [739], [440, 831, 455, 721, 737], [472, 693], [99], [586], [568], [], [735], [584], [299], [480], [225], [487], [23], [144], [673, 681, 620, 508], [69], [794], [717], [], [544], [447], [476], [735], [887], [484], [78], [835, 858], [251], [634], [363], [397], [685], [809, 925], [283], [543], [935], [643], [513], [521], [30], [31], [412], [957], [], [440], [394, 758], [223], [872, 745, 761], [292], [647], [326], [792], [758], [8, 7], [561], [604], [557], [47], [725], [942], [327], [267], [987, 924], [488], [956], [347], [135], [817], [819], [530, 531, 409, 892], [489], [155], [859], [420], [359], [298], [553], [608], [456], [349], [178], [578, 601], [217], [950], [673, 526, 527, 664, 508], [496], [920], [368], [772], [32], [518, 671], [742], [476], [], [993], [677, 587, 783], [659], [750, 189], [216], [626], [823], [105], [394], [505], [899], [100], [], [663], [94], [301], [551], [482, 754, 761], [], [755], [249], [71], [11], [673, 526, 527, 664, 508], [235], [847], [], [962, 923], [816], [150], [], [257], [161], [819, 601], [551], [424, 423], [892], [512], [723], [298], [884], [908, 404], [812], [119], [171], [968, 849], [265], [346], [853], [745], [313], [911, 824], [921], [763], [913], [600], [260], [973, 991], [884], [49], [625], [651], [203], [136], [307], [40, 46], [634], [488], [248, 250], [853], [820], [288, 293], [802], [601], [100], [679], [], [875], [168], [111], [961], [452, 911], [], [727], [588], [376], [239], [39], [84], [658], [497, 442], [], [24], [320], [755], [890], [236], [875], [559], [5], [438], [205], [796], [929], [547], [99], [431], [88], [847], [369], [25], [565], [674], [31], [], [383], [349], [101], [944], [476], [494], [10], [328], [159], [505], [563], [680], [83], [679], [140], [258], [768], [335], [122], [125], [876], [371], [65], [661], [829], [945], [618, 910], [578, 457, 689, 982, 601], [778], [785], [741], [923], [546, 650, 818, 819], [589], [40], [798], [148], [876], [], [66], [], [551], [], [713, 742], [990], [], [549, 616], [604], [50], [918], [673, 742, 620, 664, 526, 527, 632, 508], [784], [562], [411, 849, 762], [], [], [299], [874], [744, 657], [968], [904], [84], [533], [200, 232], [18], [469], [], [484], [622], [232], [398], [65], [680, 529], [69], [857], [908, 404], [85], [990], [239], [905, 750, 894, 799], [869], [82], [861], [872, 759], [572], [796], [572], [901, 907], [431], [722], [621], [], [252], [987], [487], [9], [480], [117], [310, 314], [681, 620], [10, 15], [977], [124], [91], [818], [827], [232], [964], [], [283], [14], [435], [220], [155], [152], [666], [805], [642, 542], [822], [579, 875], [205], [534], [515], [401], [744, 657], [691], [801], [548, 851], [22], [181], [141], [736], [42], [28], [431], [339], [152], [916], [581, 479], [74], [182], [], [280, 985], [578], [687], [], [262], [162], [299], [386, 101], [451], [869], [673, 664, 526, 527, 632, 508], [844, 539], [431], [297], [65], [369], [45], [486, 889], [568], [940], [], [], [260], [405], [353], [450], [104], [825], [144], [99], [], [93], [956], [27], [452, 911], [157], [62], [344], [493], [225], [195], [181], [538], [], [728], [975, 616], [631], [421], [805], [380], [204], [616, 600], [258], [95], [398], [364], [518], [], [847], [570], [890, 445], [548, 851, 831, 598], [798], [623], [731], [581, 479, 817, 511], [5, 390, 973], [195], [824, 834], [673, 526, 527, 664, 508], [366], [864], [214], [882], [991], [239, 222], [766], [54], [744], [17], [155], [757], [153], [183], [223], [980], [451], [805], [472], [326], [5], [179], [107], [518, 880], [350], [339], [626], [489, 919, 412], [666], [932], [295], [890], [417], [101], [631], [141], [234], [382], [959], [6], [293], [871], [90], [224], [874], [97], [354], [633], [454], [331], [948, 950, 951], [486], [383], [604], [996], [998, 987], [936], [570], [392], [255], [694], [308], [311], [337], [359], [901], [571], [495], [881], [423], [334], [289], [741], [309, 599], [19], [109], [653], [929], [581, 734], [455], [167], [125], [102], [722], [40, 46], [443], [937], [371], [341], [867], [463], [882], [773], [111, 52], [54], [82], [244], [601], [39], [481, 482], [638, 639], [449, 718, 733], [716], [738, 559], [652], [331], [236], [147], [765], [], [932], [110], [654], [669], [29], [420], [69], [69], [524, 461], [918], [251], [], [281, 283], [139], [627, 479], [79], [736], [872, 949], [488, 535], [126], [716], [409], [909, 910], [81], [210], [21], [92], [22], [], [26], [650], [102], [], [404], [33], [], [845], [33, 973], [471], [898, 605], [425], [255], [117], [], [962], [], [198], [246], [421, 506], [683], [877], [66, 68], [756], [196], [821], [603], [626], [136], [181], [752, 852], [533], [95], [488], [549], [531], [282], [358], [703], [152], [749], [696], [89], [564, 750], [392], [914], [18], [457], [867], [602], [825], [705, 547, 733], [190], [281], [661], [27], [488], [941, 923], [445], [864], [945], [], [724], [905], [229], [530], [82], [948], [342], [887], [397], [354], [497], [143], [669], [119], [368], [360], [17], [], [52], [968, 849], [235], [401], [581, 479, 436], [899], [890], [314], [662], [421], [975], [376], [851], [], [234], [543], [], [937], [395, 758], [537], [367], [8, 7], [553], [850], [395], [292], [736], [772], [771], [75], [54, 60], [330], [754], [399], [967, 968, 504], [], [241], [322], [115], [681, 810, 620, 662], [820], [466], [444], [], [278], [477], [673, 526, 527, 664, 508], [582, 998, 987], [148], [], [785], [497], [537], [461], [900], [927], [], [621], [25], [106], [153], [128], [233], [10], [268], [284], [], [397], [], [107], [610, 976], [191], [717], [931], [718], [528], [326], [728, 458], [241], [42, 44], [676], [966, 907], [149], [703], [561], [852], [238], [100], [851, 532, 825], [324], [450], [552], [852], [549, 633], [162, 166, 167], [417, 975], [535], [483], [], [72], [55], [849, 505], [], [581], [619], [809], [912], [], [279], [259], [530], [386, 101], [432], [907], [514, 841, 608, 610, 630, 636], [310], [162, 167], [251, 246], [753], [935], [863], [945], [946, 322], [38], [277], [231], [652], [227], [494], [688], [832], [743], [592], [365], [169], [679], [742], [487], [405], [975], [481, 482], [71], [291], [340], [937, 923], [557], [849], [961], [729], [], [568], [341], [849], [760], [], [260], [254], [759], [934], [847], [418], [931], [846, 526], [952], [943], [], [179], [58], [960], [320], [293], [462], [297], [240, 241], [16], [520], [295], [488], [323], [512, 473], [438], [39], [732], [602], [721], [31], [247], [621], [308], [45], [], [86], [470, 736], [581, 479, 817, 511], [899], [118], [641], [459], [101], [98], [368], [139], [690], [186], [205, 246], [582, 951], [35, 37], [518, 665], [754], [195], [402], [576], [418, 709], [733], [159], [892], [100], [44, 26], [903], [801], [263], [734], [672], [997], [918], [992], [273], [913], [322], [426], [298], [869, 879], [66], [872, 759], [212], [452], [578], [456], [886], [93], [671], [248, 250], [572], [131], [62], [52], [153, 155], [718, 821], [546], [189], [28], [418], [32], [20], [21], [829], [157], [854], [201], [515], [795], [382], [955], [369, 381], [300], [297], [97], [566], [225], [115], [774, 731, 861], [314], [501], [207], [985], [675], [832], [833], [], [479], [287], [700], [536], [336], [392], [], [405], [867], [741], [603], [951, 949, 950, 954, 923], [119], [942], [460], [522], [800], [708], [456], [91], [104], [70], [886], [73, 815], [208], [75], [440], [100], [582], [821], [314], [363], [661], [94], [651], [], [386], [140, 142], [188], [597], [324], [281], [31], [992], [672], [365], [569], [174], [348, 349], [991], [604], [], [158], [629], [], [603], [789], [568, 715, 716], [385], [983], [197], [725], [38], [884], [784, 477], [386], [], [30], [567], [779], [27], [481, 482], [958], [852, 752], [591], [680], [760], [523, 655, 765], [721, 750], [673, 681, 620, 526, 527, 664, 508], [125], [716, 912], [411], [712], [429], [850], [747], [472], [269], [149], [739], [489], [], [886], [278], [706], [614], [134], [678], [833], [680, 910, 659, 828], [267], [900], [662], [403], [147], [847], [65], [169], [], [662], [293], [392], [205, 589], [435], [271], [75], [979], [501], [238], [262], [662], [794], [949], [404], [923], [564], [734], [903], [323], [358, 359], [380], [382], [504], [76], [117], [48], [619, 846], [289], [973], [463], [877], [719], [931], [907, 572], [667], [6, 983], [500], [218], [], [937, 567], [361], [], [432], [636], [374], [], [565], [544, 672, 596], [520], [670, 518], [652], [197], [500], [30], [39], [92], [189, 175], [733], [], [458], [], [737], [295], [810], [506], [652, 413], [424, 423], [117], [428], [455], [95], [458], [659, 666], [305], [521], [239], [775, 842, 616], [355], [908], [755], [72], [293], [330], [907, 966], [764], [707], [627], [952], [614, 887], [789], [183], [347], [441], [], [867], [694], [637], [454], [174], [896, 804], [247], [851], [185], [985], [239], [608, 836, 837, 582], [127], [], [114], [], [552], [10], [683], [70], [], [512], [820], [162, 168], [247], [845], [457], [852], [924], [685], [842, 879, 977, 978, 445, 638, 639], [], [580], [383], [385], [250], [444], [3], [801, 433, 793], [152], [162], [393], [280], [943, 923], [562], [339], [496], [636], [203], [921, 446], [392], [], [682], [57], [179], [262], [586, 652, 413], [578, 903, 689, 885], [], [388], [347], [806, 630], [228], [494], [279], [600], [739], [131], [232], [205], [], [610, 487, 655], [674], [661], [253], [115], [842, 638, 639], [569], [649], [64, 55], [400, 667], [201], [801], [277], [49], [40, 46], [928], [287], [500], [267], [850], [674], [834], [411], [19], [758], [438], [954], [769], [964], [918], [880], [734], [555], [539], [816], [919], [339], [967, 968, 504], [213], [425, 858], [789, 799], [759], [614], [4], [401], [959], [196], [678, 487], [389], [175], [42], [275], [26], [48], [790], [764], [762], [89], [294], [667], [755], [999], [726], [847], [127], [834, 906], [774], [816], [395], [647], [853], [345], [17], [712], [628], [923, 934], [868], [800], [480], [930, 934, 936], [934], [295], [520, 516], [181], [572], [522], [894], [703], [159], [], [924], [883], [435, 794], [757], [196], [867], [22], [729], [430], [165, 852], [137], [724], [63], [324], [127], [], [4], [], [869], [256, 244], [552], [529], [171], [317], [140], [548], [750], [928, 923, 960, 927], [304], [896], [319], [974], [96], [595], [640], [320], [134], [460], [297], [65], [441], [307], [570], [343], [953], [350], [560], [195], [678], [557], [518], [], [645], [367], [576], [852], [147], [207, 219], [497], [238, 241], [748, 414], [145, 146], [235], [393], [32, 30], [559], [966, 441], [], [375], [814, 977], [107], [], [921], [710], [909, 926], [207], [834, 630, 703], [190], [466], [846], [19], [247], [283], [315], [334], [986], [], [832], [11], [57], [673, 526, 527, 782, 664, 282, 508], [513], [284], [69], [292], [], [769, 773], [185], [161, 785], [603], [516, 520, 697], [248], [514], [679, 327], [241, 238], [254], [495], [468], [312], [368], [700], [72, 815], [318], [539], [772], [263], [524], [151], [889], [656, 479], [135], [903, 689], [619, 846], [653], [951], [623], [901], [79], [32, 31], [249], [], [], [336], [377], [162], [], [234], [679], [673, 553, 526, 527, 782, 664, 508], [102], [411], [892], [437], [225], [629], [101], [582, 519, 939, 943], [868], [700], [923, 934, 933], [389], [742], [843], [98], [9], [981], [112, 327], [710], [567], [292], [], [803, 866], [205], [572], [471], [678], [473], [560], [85], [588], [105], [17], [176], [], [524, 461], [887], [488, 718, 536], [644], [555], [], [404], [813, 910, 659], [824], [582], [48], [88], [980], [85], [955], [825], [183], [756], [395], [530], [163], [845], [132], [715], [139], [679], [549], [484], [489], [445, 638], [944], [581], [325], [40, 46], [310], [812], [850], [76], [483], [950], [854], [447], [874], [276], [710], [857], [161], [32, 30, 31], [360], [816], [1], [883], [298], [836, 837, 655, 879], [], [921], [884], [778], [754], [482], [715], [525], [927], [898, 784], [], [725], [122], [699], [403], [24], [76], [191], [761], [534], [238, 240], [526, 527, 782, 664], [], [3], [794], [174], [716], [569], [342], [261], [245], [284], [771], [662], [974], [147], [531], [768], [646], [234], [], [641], [912], [493], [975, 822, 541, 542], [752], [212, 217], [18], [353], [653], [512], [141], [388], [57], [398], [486], [836, 837], [540], [119], [901], [], [974], [357], [913], [158], [554], [105], [634], [703], [625], [299], [66, 68], [915], [115], [591], [251], [459, 445], [378], [814], [908], [188], [674], [980], [430], [940], [672], [102], [628], [323], [464], [718], [721], [15], [], [973, 108], [214], [401], [758], [390], [960], [439], [948, 957], [92], [869, 818], [621], [240, 239], [], [301], [], [25], [679], [664, 851], [319], [463], [972, 500], [332], [411], [909], [510], [524, 461], [91], [734], [], [384], [202], [81], [268], [470, 406], [509], [894], [922], [851, 548], [604], [424, 922], [658], [515, 808, 639], [995], [386, 101], [389], [825], [589], [991], [636], [866], [884], [794], [86], [449], [849], [553], [], [384], [31], [935, 937, 923], [701], [397], [972], [531], [130], [729], [891], [275], [17], [164], [], [563], [205], [575], [774], [828], [532, 762], [], [386], [482], [903], [602], [312], [257], [886], [531], [344], [161], [338], [779], [264], [340], [203], [893], [131], [958], [940], [258], [689], [29], [207], [484], [], [57], [427], [291], [], [717, 656, 436, 479], [781], [24], [513], [947, 997, 114], [720], [480], [364], [499], [47], [], [265], [802], [598], [119], [69], [916], [240, 241], [785], [593], [132], [60], [640], [997], [492], [159], [184], [505], [264], [645], [435, 876], [680], [396], [594], [919], [324, 325], [], [245, 254], [579], [551], [92], [624, 453], [], [438], [853], [826], [297], [669], [], [874], [384], [558], [682], [117], [572], [990], [42], [606], [437], [681, 810, 620], [], [87], [496], [0], [41], [632], [776], [526, 786], [627], [], [728], [562], [539], [385], [73, 815], [47], [259], [59], [48], [120], [669], [528], [322], [37], [458], [281], [950], [912], [781], [673, 613, 761, 605], [256], [214], [896], [], [518], [292], [], [318], [958], [162], [195], [800], [43], [439], [], [628], [428], [952], [41], [453, 454], [184], [123], [956, 957], [498], [935], [324], [651], [525], [704], [457], [844], [754], [], [810], [754], [107], [849], [532, 398], [791], [], [582, 790], [315], [520], [874], [138], [448], [491], [], [223], [748], [112], [213], [242], [137], [260], [523], [182], [305, 306], [868], [365], [700, 999], [636], [128], [268], [252], [694], [241, 238], [246, 251], [98], [426], [522], [948], [514], [371, 373], [487], [56], [329], [502], [456, 652], [139], [520], [833], [724], [318], [499], [148], [108], [384, 383], [305], [36], [234], [437], [73], [125], [315], [132], [373], [67], [615], [361], [919], [667, 151], [870], [90], [901], [331], [955], [766], [], [], [962, 923, 935], [136], [43], [37], [33], [178], [244], [886], [], [714], [727], [704], [435, 876], [221], [543], [126], [384], [325], [91], [498], [291], [335], [603], [], [201], [510], [666], [649], [483], [948], [232], [551], [198], [751, 479], [763], [513], [], [259], [560], [473], [877], [421, 904, 905], [], [783], [969], [468], [5], [811], [316], [35], [617], [907, 440], [408], [533], [687], [641], [963], [], [209], [782, 664], [213, 248], [899], [672], [645], [987, 998], [270], [679], [779], [849], [330], [618, 909, 828], [973], [614], [], [828], [495], [69], [624, 453, 454], [753], [623], [547], [362], [848, 632], [428], [708, 458], [893], [936], [699], [702], [228], [626], [868, 967, 968, 504], [], [], [871], [897], [481, 482], [379], [633], [904], [455], [287], [262], [432], [308], [985], [489, 733, 919], [49], [989], [316], [117], [471], [519, 907, 440], [544, 909, 828], [917, 921], [172], [697], [267], [], [140], [587, 596], [443, 836, 837], [921], [], [57], [350], [838, 631], [568], [59], [205], [310], [242], [309], [535], [518], [323], [325], [263], [20], [582, 938], [240], [279], [219], [191], [126], [466], [683, 558], [603], [988], [551], [458, 708], [131], [210], [650], [322], [783], [477], [219], [981], [2], [196], [427], [628], [304], [], [], [113], [419], [746], [34], [836, 837, 656, 785], [48], [767], [630], [892], [882], [457], [], [386, 101], [60], [5], [72], [900], [614], [717, 479], [749], [813], [563], [440], [169], [385, 101], [514, 948, 836, 837, 852, 489, 636], [388], [100], [379], [456], [400, 667], [999, 893], [809, 925], [351], [834, 906], [224, 223], [206, 221], [660], [688], [], [209], [396], [549], [], [721], [942, 923], [352], [578], [769, 71], [805], [599], [585], [639], [767], [849, 505], [581, 479], [841, 610], [313], [746], [834, 585], [839], [468], [53], [418, 709, 710, 767], [929, 968], [], [62], [783], [528], [132], [194], [137], [337], [941], [544], [594], [400, 667], [712], [], [250], [824], [], [72], [327], [714], [380], [], [577], [371], [900], [93], [111], [223], [486], [749], [637], [428], [98], [211], [], [695], [815], [890], [15], [868, 505], [891], [73], [617, 823], [], [941], [301], [681, 620], [44, 48], [668], [923], [607], [684], [322], [980], [928], [892], [572], [888], [76], [873], [518], [], [400], [136], [114], [310], [911], [599], [806], [257], [516, 431], [27], [965], [], [87], [660], [24], [540], [462], [340], [48], [], [7], [774], [624, 453], [322], [177, 170], [46], [605], [931], [], [323], [478], [148], [738], [217], [603], [908, 895], [262], [796], [517, 733], [9], [458], [997], [781], [92], [531], [], [543], [15], [548], [759], [343], [530], [335], [385], [395], [136], [105], [8], [892], [839, 718], [860], [507, 695], [664], [467], [706], [938], [735], [8, 7], [651], [72], [279], [], [921, 917], [295], [191], [996], [848, 632], [263], [609, 500], [728, 281], [280], [277], [378], [52], [588, 692, 415], [], [949], [73], [283], [182], [601], [306], [395], [40, 46], [122], [425], [862], [777], [195], [99], [539], [604], [329], [976, 979], [852], [], [659], [126], [289], [654], [551], [611], [579, 881], [563], [728], [315], [792], [931], [114], [902], [372], [424, 423], [205, 653], [149], [792], [121], [540], [], [274], [514], [128], [399], [903], [], [646], [883], [942], [734], [669], [2], [588], [615], [966], [935], [837, 518, 671], [559], [778], [], [677], [237], [645], [59], [802, 518], [251, 575], [911], [352], [619], [510], [571], [357], [330], [347], [389, 395], [300], [414], [449, 975], [85], [597], [207], [938], [11], [911], [933], [173], [823], [440], [194], [315], [308], [470], [53], [52], [491], [909, 926], [136], [118], [658], [608, 869, 824], [945], [918], [96], [242], [392], [426], [168], [989], [348, 825], [39], [638, 639], [], [350], [746], [50], [84], [650], [274], [991], [599], [295], [], [417], [317], [253], [645], [193, 187], [452, 911], [825], [406], [651, 187], [19], [932], [27], [494], [834, 906, 982], [316], [479, 511], [123], [72], [106], [514], [361], [160], [539], [54], [682, 538], [219], [162], [218], [811], [109], [312, 311], [], [452], [793], [], [488, 843], [299], [350], [289], [203], [834, 906], [572], [707], [351], [855], [866], [996], [159], [840], [948], [690], [592], [801, 570], [165], [18], [696], [655], [480], [580], [694], [707], [715], [340], [736, 681, 620], [727], [422], [208], [], [632], [142], [733], [62], [], [4], [288], [791], [838, 711], [650], [348], [790], [625], [272], [669], [208], [], [58], [525], [193], [378], [798], [565], [781], [344], [61], [293], [13], [782, 851], [239], [409], [794], [938], [944], [865], [388], [202, 189], [322], [194], [430], [371], [172], [546, 650, 819], [2, 3], [912], [394], [622, 759], [312], [945], [801, 983], [903, 789], [985], [162], [397], [769, 622], [], [331], [574], [141], [649], [567], [], [718, 637], [61], [85], [487], [578, 854], [57], [328], [583], [863], [566], [375], [], [], [240], [990], [277, 278], [625], [961], [], [969], [503, 572], [949], [508], [866], [381], [560], [27], [72], [338], [466], [375], [227], [352], [603], [137], [265], [754], [892], [589], [573], [286], [325], [754], [539], [613], [987, 923], [787], [959], [63], [192], [83], [507], [489, 86], [869], [738, 580], [385], [196], [924], [], [610], [], [414], [536, 484, 871], [538], [168, 211, 159], [143], [132], [961, 659], [858], [371], [292], [2, 3], [902], [301], [544], [100], [675], [335], [92], [309], [737], [418], [522], [907], [842], [769], [232], [482], [70], [866], [576], [732], [480], [146], [324], [574], [699], [82], [540], [591], [479], [409, 892], [], [940], [127], [29], [255], [], [282], [655, 570], [313], [30], [963], [], [213, 205], [661], [13], [741], [93], [366], [974], [329], [387], [790], [435], [328], [911, 533, 539], [564], [263], [183], [944], [98], [578, 982], [952], [656, 784, 477], [353], [786], [372], [], [142], [369], [14], [284], [540], [], [243], [976], [224, 805, 223], [], [78], [420], [687], [55], [285], [736], [203, 246], [521], [911], [320], [769], [132], [258], [891], [650], [809, 618], [514, 763, 445], [905, 493], [295], [230, 231], [74], [], [877], [770], [267], [40, 46], [521], [889], [80], [140, 142], [817], [268], [129], [69], [459, 445], [24], [176], [487], [714], [], [576], [135], [517], [929], [599], [347], [117], [802], [732], [868], [49, 50], [788], [85], [741], [642], [239], [471], [443], [481], [232], [153], [790], [826], [], [], [83], [937], [], [750], [533], [581], [], [560], [619, 844, 846, 761], [98], [514, 515], [752], [825], [75], [493], [371], [29], [328], [234], [738], [32], [644, 470], [630], [786], [354], [407], [33], [239], [], [79], [368], [166], [836, 837], [626], [916], [322], [733, 862], [205], [622, 179, 245], [781], [659, 923, 925, 809, 950], [], [190], [595], [369], [858], [861], [432], [517], [51], [836, 837, 975], [159], [682], [985], [578, 601], [645], [301], [444, 671], [326], [344], [943, 923], [], [147], [856], [597], [508], [261], [187], [722], [344], [451], [], [311, 312], [121], [535], [315], [891], [432], [448], [], [362], [209], [494], [488], [135], [217], [442], [176], [94], [276], [107], [], [793], [826], [], [880], [914], [14], [62], [567, 827], [], [828], [907], [275], [937], [851], [933], [73], [930, 415], [531], [920], [167], [422], [482], [721], [406], [], [774], [426], [438], [967, 968, 911, 504], [578, 834, 982], [382], [858], [112], [340], [169], [891], [146], [162, 167], [273], [716], [227], [662, 632, 761], [], [642], [2], [532], [638, 639], [561], [347], [400, 667], [731], [175], [582, 728], [908, 404], [67, 54], [9, 489], [805], [], [627, 654], [749], [138], [652, 465, 792, 413], [577], [180], [205], [185], [437], [302], [886], [368], [], [439], [771], [], [93], [187], [], [15], [554], [324], [], [274], [721], [883], [28], [233], [544, 909, 827], [766], [44], [320], [247], [500, 286], [], [355], [779], [681, 620, 526, 508], [453], [897], [148], [478], [658], [825], [984], [11], [399], [823], [140], [127], [309], [763, 597], [898], [675], [61], [210], [194], [997], [339], [], [962], [374], [801, 836, 445], [986], [871], [109], [619], [115], [116], [452], [751], [205], [896, 804], [382], [998], [506, 117], [656], [464], [779], [784], [289], [905, 619, 846, 831], [309, 599], [394], [10], [824, 735], [900], [683], [780, 976, 914, 405], [], [711], [371], [643], [205], [534], [290], [582], [115], [379], [221], [951], [820], [], [224], [879, 977], [159], [608, 999, 861], [523], [636], [717], [324], [759], [944], [365], [955], [996], [613], [34], [866], [579, 421], [270], [953], [538], [437], [163], [571], [], [822, 542], [86], [], [574], [681, 526, 664, 761], [608, 515, 788], [338, 333], [93], [522], [946], [560], [652, 872], [542], [944], [936], [422], [], [319], [183], [996], [157], [28], [515], [85], [187], [181], [257], [696], [106], [203], [871], [554], [19], [902], [782, 664], [901], [741], [179], [22, 23], [508], [597], [767], [389], [616], [559], [860], [510], [345], [904], [107], [481], [410], [], [588], [], [], [987, 998], [], [213], [84], [647, 968, 809, 659], [63], [368], [], [227], [700], [72], [145], [876, 435], [130], [779], [], [702], [], [489, 85], [364], [719], [658], [933], [76], [943, 692, 963, 868], [951], [172], [837, 454], [276], [622], [453, 454, 553, 917], [164], [839, 660], [271], [301], [509], [591], [13], [444], [144, 540], [801], [157], [576], [788], [18], [397], [863], [842], [196], [731], [854], [800], [153], [487], [561], [394], [460], [], [], [393, 108], [825], [442], [830, 691], [980], [140], [405], [564], [695], [191], [], [332], [13], [], [93], [234, 236], [555, 734], [169], [573], [854], [805], [405], [], [602], [256], [261], [999], [778], [879], [880, 879], [985], [262], [252], [516], [630], [31], [31], [66], [66], [202], [333], [650], [254], [428], [129], [257], [749], [79], [816], [376], [367], [344], [55], [440], [618, 813, 909, 827], [991], [74, 815], [772], [159], [712], [870], [581, 479, 511], [491], [987], [363], [336], [537], [231], [604], [862], [300], [529], [30], [948], [651], [9], [845], [673, 526, 527, 664, 508], [352, 351], [74], [234], [905, 831], [707], [441], [565], [764], [58], [291], [6], [671, 518, 535], [477], [385], [683], [44], [833], [21], [87], [], [55], [194], [713], [194], [83], [452], [830], [590], [643], [845], [613], [288, 290], [221], [362], [939], [], [882], [682], [582, 950, 951], [225], [326], [414], [158], [65], [181], [], [375], [], [710], [6], [313], [256], [673, 613, 681, 620, 526, 527, 662, 632, 508], [419], [98], [780], [805], [898], [52], [836, 837, 552, 459], [961], [97], [995], [574], [576], [304], [664, 782, 527], [559], [185], [687], [352], [81], [581], [173], [836, 837, 850], [584], [473], [896, 567], [306], [574], [], [900], [], [168], [114], [424], [], [34], [], [795], [661, 479], [], [113], [783], [], [911, 533, 539], [468], [834, 650, 851], [739], [104], [480], [781], [988], [518], [], [981], [952], [450], [446], [], [703], [370], [188], [505, 827], [844], [984], [362], [], [532], [82], [748], [497], [532], [], [677], [569], [257], [246], [349], [862], [372], [645, 733], [247], [], [907, 720], [379], [287], [65, 395], [524, 461, 728], [], [250], [847], [], [301], [851], [], [187], [844], [535], [335], [398], [323], [453], [528], [520], [948], [222], [305], [230], [157], [281, 282], [], [351], [35], [112], [673, 527, 761, 664, 633], [682], [943], [715, 524, 461], [896], [], [861], [422], [628], [217], [922], [12], [321], [777], [87], [768], [126], [284], [65], [139], [31], [497, 557], [307], [619, 818], [745], [706], [688], [915], [279], [130], [822], [609], [], [552], [567, 926], [959], [716], [300], [916], [920], [622], [145], [977, 978], [], [272], [892], [506], [125], [615], [872], [702], [272], [466], [758], [738, 580, 428], [658, 911], [923], [387], [863], [556], [202], [991], [485, 632], [886], [87], [565], [801], [162], [390], [360], [161], [144], [], [875], [771], [457], [836, 837, 785], [153], [433], [481, 482], [834], [96], [462], [21], [471], [773], [440, 455], [231], [88], [684], [572], [576], [379], [984], [484], [7], [407], [787], [231], [941], [592], [919], [581, 654], [657], [957], [881], [258], [337], [111], [999, 861], [930], [104], [542], [497], [673, 664, 526, 527, 508], [23], [251], [917], [862], [64], [526, 664, 508], [404], [160], [123], [381], [], [843, 702], [971], [289], [799], [753], [711], [303], [480], [72, 815], [215], [581], [887], [748], [453], [786], [273, 274], [], [985], [], [807], [970, 980], [2], [426], [720], [99], [628], [], [635], [781], [612, 879], [547], [51], [14], [570], [152], [308], [908, 404], [386], [763], [955], [], [157], [68], [], [194], [495], [232], [927], [495], [577], [829], [269], [956], [680], [236], [], [49], [682], [138], [], [884], [722], [361], [255], [530, 844], [273], [958], [357], [206], [741], [785], [535], [372], [391], [355], [289], [912], [493], [851], [195], [4], [622], [808], [855], [564], [394], [485], [301], [713], [763, 597], [379], [265], [183], [166, 958], [787, 501], [113], [29], [477], [240, 241, 238], [203], [907, 910, 532, 923, 924, 936, 966, 762], [130], [548, 782, 851, 598, 664, 889], [434], [939, 943], [717], [955], [76], [735], [561], [222], [636], [146], [48], [714, 539], [994], [415], [860], [856], [659], [421, 882], [114], [628, 536], [475], [], [], [683], [284], [288], [372], [515], [599], [384], [990], [288], [19], [58], [514, 836, 837, 703], [], [884], [930], [98], [486], [370], [231], [977], [840], [973], [277], [380], [676], [41], [934], [], [646], [569], [310], [971], [390], [710], [791], [597, 763], [], [378], [186], [654], [496], [431], [376], [834, 457], [588, 285], [], [691, 638, 639], [704], [82], [576], [850], [779], [353], [319], [542], [954, 950], [123], [636], [699, 541], [617], [678], [443], [58], [666], [77], [106], [460, 557, 718, 814], [734], [955], [561], [426], [947], [294], [414, 703, 841, 608], [944], [471], [111], [155], [286], [724], [893], [538], [641], [423, 424], [430], [], [680], [373], [304], [450], [58], [602], [637], [174], [800], [23], [722], [289], [756], [448, 853], [287], [683], [644], [463], [977, 978], [881], [300], [524, 461], [855], [], [111], [514, 792], [651, 700], [], [320], [485, 848], [621], [577], [405], [988], [938], [481], [880], [], [603], [33], [673, 742, 664, 526, 527, 782, 632, 508], [90], [], [714], [221], [708], [], [70], [512], [814], [281], [], [993], [218], [490], [347], [164], [957], [968, 918], [565], [595, 958], [815], [884, 406], [608, 610, 836, 837], [86], [945], [903], [671], [535], [398], [781], [239], [756], [768], [854], [455], [106], [387], [983], [383], [274], [682], [], [908, 404], [581, 479, 817], [913], [507], [771], [675, 478], [172], [672], [91], [154], [98], [948], [565], [728], [298], [268], [335], [434, 435], [223], [85, 86], [850, 791], [682], [256], [416], [292], [968], [376], [581], [], [781], [], [], [776], [810, 878], [162], [], [812], [913], [957], [970], [297], [615], [425], [2], [321], [190], [770], [602], [440], [394], [88], [144, 127], [604], [], [462], [9], [654], [155], [], [481, 482], [591], [574], [274], [329], [968, 618], [123], [905], [319], [546], [296], [623], [173, 176], [868, 923, 659, 532], [375], [452], [394], [525], [358, 359], [], [73, 77], [497], [998], [], [418], [105], [647], [437], [218], [636], [559], [300], [762], [620, 594], [], [459, 445, 638], [868, 438], [985], [906], [948, 572, 849], [189], [749], [720], [241], [692, 948, 950, 951], [269], [478], [462], [437], [45], [896, 435, 861], [635], [733], [670], [], [206], [455], [329], [678], [19], [547], [419], [], [724], [212], [852], [20], [661], [989], [919], [369], [626], [650, 818, 819, 632], [101], [422], [116], [691], [496], [907, 760], [314], [122, 123], [983], [89], [892], [494], [371], [769, 911], [414, 455, 631], [542], [], [], [254], [85], [796], [973, 991], [777], [45], [31], [380], [458], [337], [950], [770], [498], [762], [979, 972], [242, 243], [952], [], [658], [328], [901], [200], [634], [414], [292], [776], [868], [357], [742, 728], [134], [85], [553], [198], [729], [28], [88], [314], [160], [], [799], [510], [913], [707], [205], [204, 155], [553, 621, 882], [152], [349], [619, 846], [56], [667, 263], [801, 983], [653], [269], [429, 981], [42], [448], [], [853], [444], [776], [847], [870], [159], [], [494], [66], [148], [], [162], [507], [31], [670], [811], [257], [198], [863], [958], [776], [], [321], [986], [322], [940], [712], [825], [518], [501, 568], [195], [287], [340], [796], [836, 837, 775, 759, 445], [551], [162, 167], [819], [424], [489], [32], [793], [37], [236], [710, 767], [777], [591], [], [433], [459], [886], [380], [834, 982], [532], [434, 631], [878], [308, 309], [194], [586, 847], [284], [418], [33], [234], [647], [834, 570], [105], [99], [146], [122], [], [97], [429], [], [539], [996], [216], [811], [894], [77], [749], [66], [524, 461], [], [650], [668], [790], [93], [179], [313], [889], [524], [713], [489, 381], [843], [343], [272], [412], [16], [679], [777], [94], [680], [290], [279], [], [719], [], [117], [270], [693, 472, 445], [], [8], [841], [822], [523], [102], [712], [467], [343], [838], [602], [478], [86], [461, 465], [426], [626], [742], [752, 852], [786], [92], [288], [189], [908], [425], [192], [27], [257], [986], [836, 837, 793], [460], [311], [870], [115], [579], [29], [], [810, 878], [911], [2], [877], [189], [321], [347], [], [321], [854], [459], [205], [670], [911], [681, 810, 620, 508], [599], [943], [931], [985], [425], [], [191], [7], [23], [800], [876], [813], [231, 232], [831], [967, 504], [716, 573], [961], [277], [241], [900], [225], [378], [922, 441, 762], [587], [240, 241], [820], [236], [312, 311], [149], [518, 671], [896], [594], [962, 923], [949], [256, 220], [868], [], [874], [159], [936], [226], [782, 851], [849], [110], [373], [989], [543], [533, 824, 735], [181], [436], [], [488, 679], [381], [10], [245], [269], [81], [995], [968], [359], [904, 905, 831], [789], [647], [303], [23], [609], [650, 906, 834, 632], [340], [458], [861], [296], [193], [2], [581, 479, 717], [170], [768], [361], [917], [612], [901, 427], [979], [125], [90], [390], [346], [881], [98], [547], [974], [234], [188], [35], [298], [369], [683, 432], [771], [757], [436], [778, 943], [910, 659], [697], [236, 237], [500], [49], [979], [524, 461], [489, 429, 981], [653], [381], [400, 667], [434], [590], [], [904, 309], [107], [457, 869], [805], [661], [324], [217], [441, 572], [914, 780], [174], [759], [64, 55], [88], [], [605], [188, 189], [], [727], [198], [190], [497], [236], [310], [675], [42], [723], [187, 201], [944], [895], [809], [722], [143], [400, 667], [810, 620, 526, 508], [44], [221], [365], [930, 588], [346], [836, 837], [276], [925], [811, 753], [381], [40], [121], [908, 895], [732], [470], [763, 597], [816], [997, 947], [365], [122], [152], [611], [517, 733], [136], [673, 526, 527, 664, 508], [123], [819], [879], [], [13], [], [711], [845], [208], [96, 489], [110], [533], [950], [518, 671], [564], [219], [729], [156], [296], [913], [435], [195], [487], [704], [23], [109, 973], [47], [48], [748, 893], [48], [276], [487], [830], [49, 50], [307], [888], [449, 853], [], [40], [984], [272], [370], [196], [790], [489, 59], [76], [911, 658], [73], [727], [672], [851], [981], [883, 942], [], [336], [], [861], [444], [540], [927], [352], [375], [78], [902], [], [688], [546, 650, 402, 818, 819], [504, 850], [343], [480, 608, 539, 799], [166], [857], [495], [993], [], [40, 41, 44, 46], [70], [738], [], [632], [752, 852], [], [192], [179], [466], [670, 518], [732], [], [262], [45], [255], [513, 650], [676, 488], [19], [389], [223], [167], [659], [179], [346], [883], [459, 445], [98], [425], [], [354], [483], [], [279], [843], [735, 223], [783], [191], [], [820], [548, 664, 851, 632], [225, 235], [437], [162], [275], [617, 501], [312], [766], [105], [109], [697, 470], [334], [585], [], [513], [518, 429], [547], [54], [612], [574], [765], [391], [496], [831], [872], [0, 391, 758], [841], [], [922], [134], [355], [325], [523], [0], [893], [605], [], [759], [244], [933], [465], [514, 788], [49], [189], [894], [358, 359], [233], [], [501], [851], [702], [808], [507], [515, 451], [703, 578, 601], [816], [640], [390], [82], [774], [230], [599], [293], [120], [787], [830, 836, 837, 610], [948], [45], [323], [842], [19], [978], [904], [481, 482], [945], [866], [], [899], [232, 231], [756], [467], [], [757], [444], [502], [603], [18], [265], [671], [767, 692], [183], [729], [246], [959], [442], [997, 947], [473], [357], [439], [695], [197], [272, 62, 67], [450], [302, 314], [14], [207], [257], [627], [673, 526, 527, 782, 664, 508], [181], [573], [520], [257], [], [62], [698, 538], [565], [371], [], [52], [351], [94], [774, 608, 610], [995], [149], [340], [963], [975, 979], [489, 429], [], [336], [256], [790], [305], [900], [], [130], [617], [2], [299], [191], [985, 309], [656], [], [972], [489, 429, 981], [928], [980], [560], [580], [98], [789], [473], [987, 998], [651, 655], [305], [739], [614], [430], [402], [42], [659], [631], [588, 850], [722], [828], [3], [], [107], [786], [616], [993], [949], [851], [84], [922], [616], [988], [682], [], [769], [595], [914], [433], [], [370], [535], [757], [240, 241, 238], [938], [], [983], [842, 433, 445], [640], [], [834, 906, 630], [331], [920], [859], [825], [529], [875], [132], [62], [714], [571], [536, 403], [334], [], [], [37], [983], [845], [807, 561], [376], [382], [606], [560], [], [7], [], [315], [98], [673, 526, 527, 664, 508], [913], [711], [76], [], [550], [117], [224], [3], [], [197], [405], [771], [584], [623, 563], [], [317], [557], [987, 998], [566], [237], [421], [248], [0], [514], [916], [], [384], [793], [554], [593], [480], [433, 639], [24], [977], [422], [165], [316], [11], [608, 836, 837, 869, 464], [777, 490, 461, 464], [489], [385], [616], [271], [552, 619, 493, 846], [744, 657], [742], [812], [480], [286], [325], [549], [38], [299], [677], [491], [269], [528], [112], [286], [265], [440], [314], [513], [384], [608], [], [], [149], [342], [726], [403, 895], [457], [331, 478], [230, 222], [944], [362], [619], [581], [33], [783], [42], [352], [424], [444], [385, 386], [109], [802], [409, 892], [509], [923, 926], [955], [59], [], [108], [], [491], [752], [90], [835], [], [498], [174], [52], [127], [695], [449], [779], [601], [887, 884, 406], [121], [678], [44], [916], [38], [702], [937], [868], [391], [], [811], [470], [677], [619, 846], [9], [743], [809, 926], [0, 515, 853], [636], [], [337], [4], [630], [472], [910], [741], [98], [138], [545], [302], [132], [680], [870], [76], [384], [8], [], [651], [], [192], [644], [521, 659, 950], [627], [73, 74, 815], [897], [49], [632], [299], [653], [283], [670], [770, 478], [929], [715], [382], [37], [6], [542], [713], [335], [441], [577], [146], [968], [10], [856], [], [593, 541], [140], [3, 6], [940, 943, 948], [], [850, 765], [726], [70], [575], [681, 841, 620], [763, 413], [], [789], [57], [125], [128], [268], [307], [710], [701], [210], [354], [313, 315], [962, 923], [658], [724], [718], [], [288, 290], [3], [], [942], [451], [528], [398], [109, 973], [70], [421], [624], [367], [], [981, 429], [607], [64], [22], [471], [164], [6], [225], [], [908, 404], [605], [423], [], [], [18], [313], [640], [55], [642], [243], [37], [483], [800], [736], [351], [492], [843], [771], [169], [111], [895], [653], [18], [145], [], [483], [578, 885], [656], [575, 479], [130], [319], [342], [0], [], [297, 295], [608], [193], [810], [486], [], [], [840], [653], [467, 499], [1], [524], [971], [835], [288], [894], [85], [155], [763], [168], [608, 728], [174], [241], [623], [448], [0], [484], [966], [550], [], [720], [650, 818, 819], [540], [480], [946], [], [985], [707], [], [835], [325], [603], [21], [719], [122], [443], [117], [654], [876, 435], [259], [340], [847], [659], [305], [976], [185], [84], [311], [37], [771], [265, 266], [518], [149], [], [418], [363], [123], [642], [618], [559], [280], [228], [882], [558], [464, 608, 610], [666], [586], [147], [907, 671], [3], [242], [552], [640], [744, 657], [18], [629], [890], [921, 917], [768], [], [988], [613], [438], [560], [305], [236], [920], [78], [936], [769, 77], [579], [711], [768], [17], [383], [9], [628], [215], [528], [869, 742, 526, 655, 630], [346], [], [389], [275], [584], [383], [479, 817], [517], [604], [780], [677, 587], [485, 685], [319], [980], [152, 155], [567], [726], [761], [673, 681, 526, 527, 664, 508], [326], [489, 747], [744, 657], [544, 964, 926], [940], [247], [740, 477], [], [121], [699], [387], [548, 613, 664, 526, 527, 851], [606], [320, 985], [778], [911], [546, 650, 819], [460], [363], [492], [225, 419], [878], [305], [], [175], [130], [428], [776, 439], [314], [], [650, 822, 542], [471], [281], [2], [193], [475], [112], [141], [873], [350], [574], [745], [286], [655], [137], [310], [766], [974], [680], [539], [913], [342], [941], [256], [881], [396], [645], [180], [86], [], [597], [805, 205], [870], [336], [238], [789], [618], [750], [395], [422], [308], [518], [], [517], [607], [941], [749], [546, 402], [964, 987], [210], [903], [], [380], [224], [717], [693], [342, 343], [381], [910], [273], [581, 479, 436, 511], [241], [1], [632], [113], [581, 479, 817, 511], [836, 837], [], [617], [325], [334], [514, 876, 435], [636], [67], [138], [514, 515, 655, 958], [985], [960], [857], [730], [263], [643], [672], [], [964], [954], [838], [681, 620], [773], [807], [37], [417], [380], [866], [666, 924], [316], [12], [102], [], [652], [968, 504], [592], [915, 853], [834, 630], [759], [578, 894], [822], [68], [428], [681, 620], [629], [8, 792, 958], [158], [827], [789], [149], [21], [257, 258], [618], [432, 683], [900], [183], [452, 850, 610], [144], [126], [852], [795], [168], [67, 68], [292], [410], [100], [825], [660], [309], [195], [467, 125], [330], [479, 436], [387], [481], [495], [566], [480], [704], [], [777], [314], [178], [750, 564], [748], [655], [758], [492], [412], [356], [762], [548], [147], [122], [153], [9], [151], [832], [10], [12], [739], [47], [355], [914], [398], [725], [182], [284], [87], [309], [610, 758], [839], [344], [199], [573], [672], [854], [101], [783, 535], [69], [306], [], [75], [948], [610, 836, 837], [291], [948], [49, 679], [820], [400], [462], [19], [232], [267], [922], [171], [133], [744, 657], [361], [815], [259], [28], [372], [937], [234], [847], [474], [649], [779], [731], [694], [950, 951], [920], [115], [6], [777, 764], [150], [224, 235], [419], [834, 906], [587], [713], [384], [807], [615, 890], [979], [159], [593, 650], [735], [37], [244], [119], [551], [896], [861], [326], [190], [376], [], [905, 846, 721, 831], [146], [360], [485, 848, 851, 632], [594], [127], [694], [152], [48], [495], [21], [960, 470], [758], [203], [], [], [988], [152], [559], [829], [704], [646], [], [294], [809, 923], [50], [68], [937], [569], [521], [58], [768], [73, 74], [515], [694], [814, 977, 978], [], [974], [775], [727], [242], [644], [603], [75], [835], [], [345], [49], [92], [459], [137], [294], [647, 967, 606], [968], [204], [879], [831], [471], [643, 881], [112], [967], [328], [781], [338], [61], [856], [578, 982], [976, 972], [41, 44, 26], [118, 119], [939, 943], [149, 150], [898], [503], [231], [940], [615], [431], [696], [880], [596], [833], [916], [768], [], [951], [162, 230], [996], [185], [911, 658], [103], [668], [821], [495], [509], [158], [560], [876, 912, 435], [937], [705, 825], [999], [930], [67], [938], [479], [585], [756], [621], [923, 499], [690, 345], [681, 620], [773], [869], [869], [311], [56], [], [673, 742, 664, 526, 527, 782, 632, 508], [739], [969], [867], [208], [862], [804], [840], [54], [495, 725], [685], [474], [585, 655], [761], [150], [12], [451], [477], [47], [494], [857], [719], [972], [742, 713, 664, 526], [535], [766], [847], [956], [825], [794], [934, 478], [665], [840], [751], [822], [581, 479], [], [858], [401], [272], [169], [977, 978], [870], [107], [853], [785], [204], [942], [17], [177], [492], [608, 903, 841], [444], [502], [723, 549], [927], [336], [], [444], [133], [191], [95], [596], [924], [947], [816, 911], [166], [981, 429], [777], [90], [537], [680], [820], [209], [139], [953], [696], [205], [674], [926], [], [171, 237], [770, 788], [70], [452], [560], [94], [715], [597, 413, 671], [32], [574], [252], [27], [814, 693], [871], [299], [907, 440], [122], [243], [526], [186], [578, 834, 523, 906, 630], [28], [882], [846], [980], [301], [111], [495], [320], [70, 985], [778], [], [561], [780, 975], [501], [837, 670], [723], [391], [93], [243], [47], [233], [63], [650], [991], [805], [866, 730], [427], [192], [360], [15], [273], [575], [407], [916], [74], [], [354], [828], [451], [236], [758], [170], [], [825], [419], [79], [97], [779], [626], [820], [108], [932], [655], [703], [920], [503], [557], [988], [804], [937], [314], [431], [773], [138], [945], [507], [599], [], [], [896], [970, 979], [919], [840], [474], [637], [120], [489, 791], [721], [313], [380], [577], [809], [980], [698], [780], [796, 837, 836], [992], [610, 697], [938, 942, 943], [967, 504], [34], [258], [917], [187], [87], [175], [68], [60], [19], [403], [], [713], [867], [647], [140, 94], [865, 692], [], [376], [261], [787], [217], [440], [761], [102], [901], [], [763], [131], [952], [386], [128], [200], [955], [522], [268], [690, 345], [741], [43], [704, 581, 919], [770], [250], [235, 465], [], [851], [794], [926], [628], [744, 657], [546], [], [977], [130], [373], [940], [872], [258], [997, 623, 696], [119], [840], [458], [835, 855], [520], [], [812], [680], [142], [128, 144], [101], [], [329], [398], [50], [636], [335], [509], [693], [199], [242], [807], [], [610, 770, 862, 733], [592], [337], [386, 101], [110], [], [123], [22], [254], [91], [], [999], [679], [695], [769, 418, 709, 600], [44], [469], [896, 999, 861], [49], [331], [169], [887], [737], [349], [348], [220], [581, 479], [846], [608, 806], [27], [568], [], [281], [312], [101], [90], [39], [494], [44], [189], [746], [662], [63], [282], [292], [], [286], [166, 167], [621], [890], [581, 479], [196, 198], [89], [513], [281], [355], [96], [719], [417], [952], [670], [255], [928, 960], [651, 760], [551, 629, 696], [119], [688], [], [301], [992], [738], [450], [726], [501], [723], [255], [177], [703], [293], [114], [842, 529, 562], [364], [810, 508], [867, 675], [996], [344], [649], [312, 311], [525], [321], [321], [385], [325], [455], [621], [933], [146], [150], [566], [], [826], [], [584], [564], [936], [970], [342], [85], [], [], [871], [279], [108], [28], [808], [932], [232, 231], [457], [344], [740], [926], [421, 765], [249], [670, 415], [526, 400], [431], [], [967, 968, 923], [383], [363], [176], [363], [164], [235], [651], [392, 393, 108, 973], [318], [95], [615], [574], [367], [73], [512], [863], [301], [308], [766], [531], [891], [879], [166], [333], [207], [400, 667], [589], [363], [], [204], [872], [959], [231], [574], [344], [398], [132], [13], [517], [986], [836, 837], [139], [], [462], [127], [513, 875], [549, 968, 504], [22], [894], [813, 659], [549], [682], [526, 787], [770, 605], [436, 733], [288], [459, 445, 638], [228, 265], [51], [367], [561], [308], [868, 954], [928, 927], [296], [401, 881], [67], [749], [297, 295], [7], [722], [216], [681, 810, 620, 508], [645], [548], [306], [79], [662], [722], [430], [756], [638], [378], [760], [507], [844], [923], [840], [666], [900], [618, 926], [221], [], [510], [], [928], [974], [643, 692, 478], [807], [50], [950], [923], [60, 68], [861], [398], [646], [144], [146], [728], [690, 345], [204], [206], [370], [960], [983], [945], [371], [329], [67], [817, 511, 479], [968, 504], [33], [], [676], [513], [155], [373], [198], [820], [134], [770], [588], [362], [64], [847], [474], [866], [581, 817, 479], [10], [255], [512], [933], [430], [631], [108], [367], [317], [603], [999], [621], [484, 871], [915], [983], [375], [186], [195], [287], [340], [56], [975, 497], [560], [295], [987], [206], [861], [770], [969, 659], [292], [506], [188], [784], [397], [300], [815], [349, 350], [419], [174], [573], [6], [926], [897], [], [895], [678], [520, 516], [701], [899, 849], [167], [866, 730], [652, 764, 413], [644], [137], [874], [494], [557, 602, 733], [721], [636], [923, 960], [128], [817, 511], [611], [869], [31], [409], [297, 295], [997], [562], [521], [897], [499], [452], [492], [388], [], [], [449], [394], [173], [920], [285], [584], [813, 910], [891], [711], [144], [222], [813, 909, 910], [351], [633], [273], [362], [638, 639], [261], [489], [225], [196], [335], [], [148], [966], [713], [40, 44], [203], [555], [806], [630], [474], [18], [764], [651], [], [198], [512], [164], [644, 720], [112], [181], [709], [582, 953], [813], [609, 586, 413], [601], [], [], [82], [168], [453, 454], [37], [73, 74, 815], [67], [159], [], [], [324], [718], [311], [534], [976], [230, 231], [607], [476], [400, 667], [274], [765], [814], [143], [109], [806], [701], [433], [], [171], [41], [582, 950, 954], [581, 661, 479], [560], [497], [815], [341, 342], [753], [248], [102], [680], [262], [738, 633], [157], [329], [516, 850], [821], [715], [8], [], [569], [426], [946], [770], [333], [754], [839, 978], [275], [486, 819, 889], [321], [461], [123], [453, 885, 887], [827], [139], [281], [276], [241], [], [836, 837], [611, 207], [948], [696], [317], [77], [614, 879], [684], [707], [479], [618], [851], [680], [553], [138], [362], [927], [381], [47], [989], [920], [359], [793], [881], [890], [81], [608, 615, 792], [244], [652], [347], [984], [681, 620, 508], [581, 717, 479], [377], [720], [258], [194], [784], [478], [451], [660], [416], [308], [914], [532], [412], [662], [361], [688], [985], [121], [754], [863], [577], [231], [443], [200], [104], [203], [189, 191], [547], [673], [209], [621], [105], [450], [752], [810, 878], [], [300], [149], [73], [840], [946], [447], [464, 608], [234], [], [783], [739], [979, 525], [292], [971], [145], [608], [256], [926], [408], [691], [273], [360], [434, 533], [835], [326], [299], [679], [852], [59], [594], [616], [151], [308], [557], [529], [212], [729], [907, 440], [868], [], [971], [661], [], [244], [307], [974], [46], [226], [748], [563], [128], [535], [882], [830], [623], [200], [404], [995], [326], [489, 395], [288], [652, 847, 465, 413], [18], [140], [886], [278], [], [643, 759], [373], [933], [294], [830], [737], [723], [75], [140], [172], [], [234], [838, 631], [625], [690], [677], [261], [76], [161], [943], [61], [487], [851], [162, 166], [555, 475], [354], [], [987, 998], [309], [372], [686], [], [187], [95], [936], [339], [716, 765], [95], [527, 592, 782, 664, 508], [980], [627], [], [86], [311], [214], [692], [548], [690, 345], [406], [908, 404, 812], [892], [850], [431], [950, 951], [465, 652, 413], [570], [], [95], [46], [299], [984], [835], [625], [623], [589], [946], [584], [254], [753], [679], [864], [379], [755], [909], [70], [84], [904], [520, 850], [382], [122], [2], [484, 814], [639], [222, 207], [6], [733, 920], [745], [422], [797], [861], [107], [587], [], [714], [921], [811], [624, 453], [726], [], [928, 960, 954, 572], [712], [173, 253], [650, 402, 818, 819, 632], [995], [686], [962], [681, 620, 664, 508, 477], [803], [306], [620, 526, 664, 508], [982], [324], [971, 724], [920], [440], [405], [440], [954], [417], [581, 734], [974], [791], [369], [581], [423], [637], [990], [858], [791], [400, 667, 733], [818, 872, 622, 759], [9], [748], [328], [670, 518], [250], [326], [531], [673, 526, 527, 664, 508], [139], [469], [317], [352], [704], [694], [148], [74], [935], [130], [987, 998], [136], [813], [259, 265, 153], [465, 763], [38], [928, 960], [628], [248, 250], [471], [476], [41], [432], [188], [715], [801, 983], [135], [283], [607], [935, 567, 923], [], [], [840], [963], [245], [128], [297], [344], [749], [249], [], [126], [419], [], [861], [195], [357], [770, 811], [617, 823], [86], [839], [679], [992], [], [558, 402, 699], [628], [628], [673], [358], [485, 754], [346, 351], [905, 825], [487], [100], [820], [673, 664, 526, 527, 632, 508], [769], [582], [815], [999], [651], [113], [823, 836], [851], [467], [727], [151], [410], [], [], [54], [907, 532, 440, 966, 762], [2], [514], [447], [], [555], [458], [], [], [634], [277, 278], [59], [946], [336], [], [378], [237], [938], [810, 878], [95], [961], [81], [19], [629], [], [539], [538], [245], [693], [290], [339], [49, 50], [999, 281, 861], [], [32], [47], [859, 521, 651, 760], [693, 472], [639], [713], [616], [], [510], [210], [79], [], [783], [655], [236], [550], [953], [744, 657], [929], [894], [253], [851], [732], [659, 809], [], [415], [205], [188], [], [210], [717, 581, 479], [529], [185, 186], [897], [999, 700], [275], [130], [206], [293], [531], [776], [538], [360], [863], [57], [407], [148], [6], [760], [855], [167], [778], [68], [91], [801], [45], [2], [792], [35], [863], [683], [146], [163], [519], [899, 951], [205], [489], [367], [], [151, 188], [874], [546, 650, 402, 818, 819], [619, 314], [831], [377], [282], [279], [272], [161], [73], [775], [554], [602], [149], [782, 851], [373], [297], [384], [535], [785], [347], [175], [853], [14], [472], [275], [608, 806, 841, 831], [658], [609], [888], [48], [322], [284], [457], [969, 470], [383], [955], [259], [603], [705], [2], [307], [531], [229], [487], [760], [247], [286], [862], [483], [685], [721], [624], [66], [674], [137], [517], [22], [739], [519], [274], [744, 657], [], [980, 975], [307], [600], [868, 968, 504], [882], [996], [953], [41], [770], [270, 279], [281, 282], [], [611], [373], [506], [882], [428], [516], [448], [138], [733], [743], [406], [679], [708, 975], [384], [859], [213], [888], [502], [150], [646], [], [981], [], [651, 813, 567, 827], [939, 943], [20], [902], [846], [64], [573], [651, 567, 760], [605], [638, 639], [779], [711], [458, 708], [162], [778], [75], [259], [872], [552, 515], [659, 937], [199], [659], [184], [61, 62], [518, 671], [123], [110], [308], [635], [993], [912], [613], [385, 386], [583], [889], [849, 505], [125], [662], [147], [334], [841], [244], [950], [842, 977, 978], [], [358], [193], [741, 687, 884, 406], [312], [153], [224], [349], [947], [203], [264], [185], [454], [693], [79], [421], [144], [968], [45], [258], [625], [200], [985, 301], [610], [579], [913], [867], [579, 582], [931], [259], [564], [900], [987, 998], [864], [81], [813], [528], [626], [770], [883], [257], [732], [54], [338], [836, 837, 841], [792, 428], [272], [413], [804], [922], [546, 650, 402, 819, 541], [], [441], [852], [316], [277], [457], [605], [658], [40], [515], [], [755], [25], [992], [809, 925], [887], [898, 680], [877], [16], [506], [230, 231], [796], [615], [324], [918], [333], [374], [392], [545], [760], [640], [359], [], [763, 597], [112], [190], [294], [791], [496], [64], [], [225], [646], [129], [908, 895], [], [486], [167], [651], [905], [736], [594], [], [206], [294], [], [145], [593], [72], [], [13], [138], [834], [326], [770], [222], [155], [375], [560, 768], [433], [907], [248, 250], [794], [15], [824], [810, 878], [549], [], [295], [710, 809], [481, 482], [176], [206], [981], [83], [499, 923], [434], [775], [977, 842, 978], [866], [43], [], [291], [951], [531], [848], [840], [370], [393], [], [767], [818], [314], [768], [806], [347], [263, 151], [493], [496], [268], [173], [195], [786], [], [873], [816], [447], [910, 567], [22], [758], [696], [190], [207], [573], [2], [377], [546], [986], [], [200], [868, 813], [857], [766], [972, 976], [], [], [281], [622, 759], [966], [684], [546, 819], [433], [85], [807], [269], [38], [426], [49, 50], [819], [942], [585], [743], [207], [206], [642], [816], [519], [316], [762, 532], [205], [], [28], [845], [539, 741], [0], [847], [99], [510], [836, 837], [866], [], [], [15], [500], [115], [719], [903], [123], [703, 463, 738], [659], [], [29], [872], [767], [372], [195], [191], [252], [546, 819], [659], [284], [487], [28], [165], [726], [355], [272], [879], [271], [600], [], [241], [8, 912], [958], [472], [105], [810, 878], [313], [562], [870], [259], [744, 657, 812], [392], [814], [608, 770], [888], [98], [333], [725], [233], [447], [139], [717], [82], [540], [77], [329], [333], [244], [517, 625], [325], [639], [789], [115], [536], [965], [840, 462], [389], [284], [882], [896], [588, 948], [933], [711], [360], [756], [508], [95], [636], [276], [420], [962, 987, 923], [207], [796], [660, 557], [32, 31], [923], [646], [980], [82], [958], [85], [244], [182], [294], [804], [697], [366], [479, 535], [999], [824], [], [724], [92], [232], [877], [946], [], [496], [812], [985], [813, 567], [662], [883], [100], [359], [444], [771], [426], [917], [811], [775, 977, 978], [983], [871], [322], [187], [940], [8], [343], [207], [859], [630], [155], [306], [322], [681, 620, 526], [69], [59], [336], [393, 327], [93], [174], [170], [68], [206], [85], [477], [185], [112], [934], [463], [730], [650, 819, 822, 632, 542], [562], [870], [977], [928, 923], [451], [168], [898, 655], [14], [89, 414], [715], [775], [734], [765], [38], [681, 620, 526, 846, 632, 508], [396], [951], [815], [], [149], [597], [594], [546, 650, 819], [354], [266, 267], [488, 695, 508], [363], [455], [247], [813, 910], [55], [471], [249], [536], [2, 3, 973], [503], [288], [795, 970], [673, 681, 620, 526, 527, 664, 508], [85], [47], [160], [722], [452], [655], [201], [788, 502], [648], [643], [477], [203], [42], [210], [23], [], [342], [741], [77], [], [113], [704], [501], [721, 697], [534, 729], [382], [89], [], [161], [303], [579, 881], [38], [137], [908, 404], [661], [496], [333], [217], [924], [899, 868, 968, 809, 463, 659], [609, 465, 413], [728], [50], [937], [369], [630], [778], [153], [968, 504], [], [459], [581, 479], [578], [304, 301], [899], [22], [863], [384], [308], [619, 846], [471], [], [334], [679], [382], [87], [460, 718, 975, 977, 978], [499], [842, 977, 978], [842], [649], [26], [761], [738, 825], [460], [157], [719], [864], [585], [634], [88], [618, 809, 926], [161], [933, 923], [195], [950], [243], [395], [879], [285], [991], [333], [934], [648, 760], [992], [907, 440], [578], [250], [176], [570], [80], [879], [23], [51], [700, 999], [290], [91], [208], [596], [814], [764], [77], [480, 785, 731, 414], [366], [411], [330], [836, 837, 774, 655], [557], [56], [240], [119], [695], [586], [669], [331], [361], [526, 527, 664, 508], [671], [37], [382], [105], [458], [768], [658], [518, 671], [281], [], [], [72], [22], [921], [132], [369], [547], [41, 48], [], [248, 250], [783], [282], [350], [608, 792], [584], [], [933], [], [385, 386, 101], [], [], [83], [921, 917], [], [369], [972], [92], [], [623, 923], [382], [738, 580], [593], [], [347], [647, 968, 532], [902], [514, 515, 476, 765], [476], [308], [390], [954], [790, 952, 954], [957], [616, 972], [935], [138], [224], [37], [320], [381], [425], [216], [923, 572], [441, 572], [891], [627], [715, 652, 764], [791], [187], [980], [834, 869, 906], [36], [581], [145], [989], [818], [427], [728], [216], [888], [131], [903], [427, 756], [261], [36], [544, 909], [], [], [447], [418], [537], [], [337], [293], [917], [437], [247], [489, 275, 276], [923], [805], [512], [346], [847], [871], [82], [190], [465, 597, 728], [892, 721], [347], [682], [641], [858], [5, 6], [652, 465, 413], [944], [864], [562], [295], [300], [439], [888], [135], [40], [218], [548], [763], [168, 159], [82], [0], [88], [900], [417], [673], [984], [437], [400], [479], [931], [257], [558], [400], [511, 479], [287], [935, 469, 923], [695], [385, 386], [294], [633], [882], [539], [854], [151], [52], [641, 808], [716], [329], [226], [823], [3], [953], [141], [], [648], [], [332], [907], [160], [308], [758], [522], [219], [806], [842], [602], [22], [258], [734], [520], [148], [252], [248], [348], [17], [916], [793], [659], [617, 823], [], [650, 401, 819], [178], [32], [944], [], [757], [634], [23], [15], [239], [471], [697], [419], [151, 158], [941], [746], [24], [553], [481, 482], [773], [700], [18], [391], [757], [480], [680], [42], [66, 68], [873], [], [117], [], [232], [], [331], [], [274], [129], [804], [454], [538], [654], [], [411], [797], [259], [236], [443], [657], [211], [55], [936], [25], [140], [643], [836, 837], [22], [936], [809, 925], [453], [997, 947], [410], [697], [], [], [617, 438], [118], [995], [387], [801], [777], [989], [562], [472], [348], [513], [720], [755], [215], [939], [865], [], [893], [761], [582], [277], [113], [110], [772], [794], [709], [521, 947], [739], [347], [656], [83], [898], [164], [490], [], [684], [304], [72], [839], [552], [472, 693], [86], [422], [977, 978], [947], [490], [910], [], [346], [39, 47], [312, 311], [703], [270], [600], [720], [15], [890], [986], [563], [447], [976], [839], [440], [217], [404], [461], [153], [863], [349], [902], [898], [836, 837, 617, 789], [132], [268], [490], [], [866, 596], [486, 559], [541], [716], [28], [], [198], [498], [5], [768], [546], [188], [920, 475], [260], [634], [772], [776, 819], [559], [928, 927], [747], [728], [579, 881], [567], [241], [695], [78], [161], [115], [157], [561], [183], [164], [413], [27], [116], [489, 815], [381], [], [610, 430], [251], [462], [], [738], [605], [573], [605], [702], [13], [291], [22], [343], [991], [], [123], [198], [23], [276], [40], [679], [230], [104], [840], [70], [209], [332], [926], [947], [628], [291], [222, 257], [652, 413], [292], [975], [289], [798], [641], [673, 418, 526, 527, 664, 508], [570], [998, 939, 943], [380], [209], [173], [189], [299], [630], [181], [245], [], [291], [829], [956], [176], [575], [324], [4, 391], [915], [464, 597, 763], [83], [740], [52], [713], [205], [504, 441, 572], [], [], [138], [681, 810, 620], [761], [777], [109, 973], [], [541], [223], [613], [180], [512, 623], [540], [154], [], [], [217], [594], [903], [], [465, 796], [964], [923], [], [], [896, 804, 631], [398], [523], [550], [917], [840], [574], [608, 796, 806, 478], [704], [572, 966], [357, 958], [597], [399], [84], [], [364], [40], [429], [436], [920], [163], [680], [2], [557], [190], [156], [722], [571], [894], [439], [756], [871], [198], [564], [438], [373], [149], [232], [391], [269], [933], [721], [], [151], [151], [248, 539], [814], [866, 595], [655], [421], [698], [454], [568], [865], [267], [183], [553, 493], [281], [], [237], [721], [477, 587, 784], [333], [663], [382], [296], [652, 764, 413], [951], [987, 998], [270], [996], [952], [620, 662], [929], [369], [79], [118], [], [359], [697], [761], [29], [617, 438], [37], [222], [233], [106], [76], [36], [53], [310], [608, 515, 610, 841], [283], [], [795], [19], [290], [326], [], [36], [290], [118], [293], [506], [989], [996], [366], [], [], [746], [391], [834, 655], [73, 77], [782, 664], [255], [85], [953], [316], [123], [919, 860], [347], [178], [274], [84], [], [418, 709, 748, 563], [323], [632], [724, 536], [841], [402], [638, 639], [25], [345, 730], [588], [874], [], [363], [130], [532], [673, 526, 527, 782, 664, 508], [707], [401], [576], [413], [0], [603], [674, 630], [197], [828], [978, 437], [], [498], [298], [325], [], [413, 439], [863], [97], [680], [89, 284, 799], [97], [709], [573], [368], [805], [284], [683], [222], [411, 828], [659], [86], [633], [642], [792], [459], [155, 204], [], [756], [798], [673], [], [338], [296], [178], [462], [843], [187, 201], [218], [117], [169], [711], [], [389, 391], [634], [713], [330], [553], [772, 869, 488, 464], [122], [523], [625], [54], [402], [889], [692], [29], [356, 359], [578, 515, 689, 982, 601], [97], [213], [98], [729], [], [459, 445], [20], [358], [], [950], [485], [169], [240], [548], [891], [650, 819], [125], [], [184], [588], [476], [666], [140], [754], [559], [937], [385], [913], [643, 906], [], [718], [516, 669], [673, 504, 508], [50], [596], [866, 803], [135], [496], [667], [486], [211], [18], [387], [563], [931], [142], [767], [310], [910, 411], [448, 489], [245], [64, 55], [439], [64], [157], [240], [578], [922], [288], [842, 523, 433, 795], [808], [108], [934], [861], [209], [517, 540], [112], [769], [423], [652], [], [187, 201], [839], [22], [130], [289], [746], [780], [447], [995], [780, 914], [888], [179, 180], [], [27], [], [373], [879], [536], [582, 936, 939, 943], [146], [518], [659, 949, 950], [218], [], [475], [684], [820], [75], [], [960, 968], [49], [650], [173], [], [565], [405], [690], [345], [652, 822, 541, 542], [889], [343], [], [944], [300], [784], [780, 724], [768], [514], [], [35], [741], [], [983], [83], [906], [518], [229], [487, 590], [218], [864], [91], [147], [617, 823], [237], [920], [866], [469], [746], [581], [892], [], [911], [962], [89], [], [154], [487], [714], [378], [627], [515, 348], [247], [343], [18], [529], [], [142], [739], [332], [491], [517], [926], [220], [930], [926], [977, 978], [581, 479, 817, 511], [974, 468], [912, 977, 978], [606], [577], [40], [464], [488, 600], [], [784], [492], [996], [589], [439], [650, 632], [438], [588, 790], [251], [780], [130], [319], [521], [543], [357], [137], [622, 759], [399], [689], [], [240, 238], [608, 681, 620], [752], [39], [116], [263], [842], [522], [684], [], [], [665], [713], [], [], [791, 582], [850], [336], [823], [971], [588, 813, 910], [9], [227], [945], [307], [194], [578, 689, 601, 831], [379], [726], [695], [800], [831], [802], [131], [71], [686], [485, 848], [352], [501], [810, 878, 658], [185], [765], [18], [496], [209], [437], [698, 483], [964], [103], [276], [388], [243], [841, 911], [578, 982], [228], [799], [773], [741], [575], [15], [424, 919], [581, 479], [], [296], [203], [586], [62], [888], [227], [695], [771], [40, 46], [335, 412], [486], [637, 879], [8], [42], [854], [136], [39, 47], [325], [299], [609], [231], [577], [233], [814], [382], [978], [472, 693], [524, 461], [], [355], [], [979, 821], [537], [249, 250], [], [965], [398], [553], [850], [811], [804], [83], [613], [680], [94], [803, 586], [56], [608, 610, 841], [407], [151], [18], [615], [489, 818], [889, 831], [], [390], [741], [712, 126], [821], [471], [63], [578, 982], [983], [762], [275], [396], [459, 445], [], [172, 173], [148], [834, 522], [472], [], [578, 689, 601], [901], [539], [378], [239], [], [], [894], [897, 534, 729], [532], [896], [522], [459], [439], [344], [691], [372], [875], [513, 776, 875], [5, 6], [], [314], [198], [14], [42], [429], [], [555], [832], [986], [591], [359], [311], [446], [349], [222], [518, 671], [602], [290], [399], [682], [413], [750], [351], [568], [792], [], [581, 479], [389], [842, 977, 978], [31], [577], [996], [592], [821], [117], [140, 142], [], [431], [81], [759], [618, 813, 910], [523], [914, 780], [66, 54], [379], [672], [293], [567], [29], [673, 664, 526, 527, 782, 508], [], [189], [731], [745], [899], [156], [240, 241], [], [17], [77, 815], [806], [76], [596], [580, 807], [581, 717], [905], [117], [865], [897], [893], [431], [928, 923], [588], [454], [490, 524, 787], [497, 406], [563], [783], [646], [82], [44], [416], [339], [669], [8], [220], [722], [255], [494], [230], [826, 488], [273], [181], [349], [391], [995], [265, 266], [283], [57], [810, 651, 508], [518], [170], [913], [436], [], [464], [], [619, 846], [203], [138], [421], [], [], [564], [213], [736], [125], [789], [], [744], [899], [575], [483], [516, 520, 721], [470], [82], [489], [258], [330], [53], [291], [303], [730], [52], [229], [75], [854], [330], [702], [781], [325], [612], [515, 841], [211], [727], [668], [818], [775], [831], [649], [4], [107], [420], [900], [], [751, 479], [257], [750], [894], [949], [628], [410], [533], [874], [745], [293], [224], [896, 804, 711, 585, 631], [28], [422], [810], [619, 846], [254], [967], [583], [541], [618], [281], [697], [638, 639], [479], [316], [582, 936], [452], [470], [738], [172], [868], [206], [], [417], [364], [131], [464], [514], [104], [667], [325], [864], [664], [110], [539], [69], [747], [941], [503], [], [565], [338], [720], [215], [409, 892], [989], [606], [871, 913], [], [860], [421], [683], [144], [957], [596], [211, 243], [218], [], [904, 281, 282], [289], [376], [569], [417, 557, 562], [258], [505], [673, 526, 527, 782, 664, 508], [530], [449], [], [860], [865, 692], [946], [694], [656], [353], [984], [258, 222], [465], [], [636], [867], [94], [], [403], [379], [455], [206], [722], [230], [303], [922], [705, 547], [600, 517], [334], [392, 109], [627], [270], [159], [711], [101], [884], [404], [492], [335], [288], [], [699], [245], [650, 819], [617], [233], [316], [153], [778], [624], [905], [728], [], [143], [727], [640], [331], [541], [27], [46, 47], [987, 998], [322], [633], [879], [847], [892], [108], [78], [669], [191], [414], [], [400, 667], [845], [88], [], [533], [522], [683], [395], [398, 529], [343], [131], [347], [321], [503], [199, 197], [182], [281], [711], [509], [172, 173], [41], [349], [685], [86], [270], [281], [156], [616], [979], [69], [967], [732], [578], [614, 879], [867], [923], [753], [168, 159], [565], [114], [870], [14], [313], [298], [903, 584], [], [515, 910], [269], [927], [459], [793], [198], [213], [366], [544], [546], [930], [649], [77], [409], [469], [614], [66], [965], [537], [454], [179], [83], [350], [179], [771], [517], [581, 479, 817, 511], [390], [766], [], [467], [519], [896, 281], [357], [114], [2, 3], [403], [843], [978], [474], [40], [33], [151], [269], [543], [293], [770, 774, 655], [362], [772, 679], [250], [425], [723], [530], [193], [], [955], [561], [581, 489, 479], [], [40], [777, 623, 542], [961], [917], [819], [664], [], [672], [758], [566], [53], [910], [764], [574], [207], [], [946], [756], [242], [809, 910, 925], [419], [918], [727], [903], [634], [], [615, 652, 465, 413], [], [628], [168, 159], [877], [109], [811], [579], [638, 639], [594], [888], [753], [93], [], [884], [234], [509], [224], [450], [373], [152], [701], [632], [344], [849], [42], [843], [602], [33, 973], [909, 659, 951], [409, 892], [987], [400], [], [455], [492, 857], [618, 567], [644], [494, 442], [430], [646], [514], [962], [306], [868, 968, 923], [44], [872], [765], [707], [836, 837, 842], [175], [554], [], [995], [], [293], [], [448], [57], [920], [], [213, 852], [999], [247, 151], [372], [471], [386, 101], [441], [924], [144], [58], [368], [767], [480], [258], [861], [], [189], [431], [376], [816], [315], [346], [397], [40], [577], [], [424, 423], [], [402, 889], [692], [183], [273], [805], [877], [746], [427], [581], [793], [961, 910, 659], [896], [372], [599], [840, 462], [515, 824], [744, 657], [986], [333], [453, 831], [674], [236, 852], [764], [131], [], [386], [814], [976], [547], [322], [323], [968], [466], [713], [481, 482], [944], [237], [708], [535], [295], [559], [481], [532], [], [358, 359], [151], [258], [748, 600], [659], [624, 453, 454], [999], [788], [666], [103], [855], [57], [472], [275], [83], [841], [], [344], [10], [947], [97], [685], [250], [995], [410], [429, 527, 916, 664], [], [84], [581, 436, 479], [], [156], [459], [98], [300], [8], [751], [614], [644], [53], [402], [616], [759], [], [], [427], [749], [91], [229], [], [91], [710], [], [220], [318], [6], [468], [836, 837], [771], [587], [282], [], [669], [258], [127], [631], [857], [679, 616], [737, 898, 886], [], [48], [772], [108], [789], [433, 842], [123], [481, 482], [161, 195], [609], [562], [979], [644], [908], [968], [733, 557], [897], [572], [893], [355], [578, 654, 982], [813], [227], [82], [403], [462], [], [], [271], [655], [765], [750], [], [32], [468, 479], [109], [51], [579, 881], [], [701], [928, 923, 960], [403], [219], [232], [402], [64, 55], [551], [], [], [554], [626], [863], [849], [0], [870], [754, 507], [407], [747], [15], [939], [330], [233], [905, 619], [982], [315, 311], [802], [987, 926, 998], [252], [489], [154], [498], [346], [566], [917, 794], [85], [287], [456], [], [719], [870], [638, 639], [738], [775, 819, 842, 602], [901], [730], [868, 532, 923, 572], [835], [826], [766], [117], [274], [9], [831], [479, 436], [49], [770, 518, 414, 842, 978], [], [84], [611], [], [683], [215], [474], [340], [], [224], [839], [544, 521, 926], [68], [836, 837, 869], [924], [672], [219], [957], [129], [720], [626], [222], [107], [190], [853], [654], [715], [389, 390, 395], [311], [928], [822], [909, 544, 336, 469], [954], [309], [763, 764], [573], [886], [325], [], [86], [192], [36], [366], [926], [131], [49], [545], [42], [210], [710], [11], [338], [657, 812], [491], [484], [872], [82], [596], [603], [599], [102], [333], [364], [365], [366], [71], [16], [37], [890], [832], [872], [175], [647], [], [482], [756], [393], [375], [496], [943], [616], [562], [690], [616], [], [562], [373], [743], [40, 46], [88], [819, 822], [33], [880], [157], [], [848], [831], [], [941], [866], [53], [140], [134], [830], [367], [922], [691, 692], [673, 681, 620, 526, 782, 664], [247], [827], [736], [546], [743], [260], [770, 774, 655], [160], [270], [171], [52], [540], [], [372], [468], [], [647], [875], [896, 553, 493, 894], [63], [330], [63], [338], [136], [802], [450], [514], [582], [192], [533], [189], [952], [855], [755], [48], [28], [396], [302], [100], [345, 475], [], [], [], [193], [57], [8, 7], [773], [481], [391], [723], [357], [952], [40], [975, 976], [], [], [495, 894], [440], [260], [712], [880], [518], [689], [147], [433], [110], [136], [479], [177], [121], [383], [970, 976, 979], [64], [554], [554], [845], [829], [874, 654], [963], [551], [], [418], [270], [328], [495], [150], [449], [191], [801], [500], [302], [193], [], [991], [211], [540], [315], [335], [327], [449], [388], [114], [672], [133], [538, 668], [776, 513], [625], [949], [412], [992], [793], [], [41], [814], [881], [186], [246], [497], [346], [662], [264], [384], [703], [259], [874], [467], [33], [601], [644], [528], [228], [944], [430], [311], [], [68], [87], [937], [364], [512], [748], [354], [283], [268], [512], [339], [918], [582], [573], [781], [171], [419, 823, 845], [392], [592], [901], [6], [19], [778], [18], [262, 243], [562], [], [159], [786], [], [835], [], [750], [162], [602], [585], [830], [701], [960], [497], [698], [736], [275], [909], [686], [999, 700], [824], [849], [296], [294], [], [116], [320], [], [755], [379], [71], [576], [962, 532, 923], [59], [718], [254], [882], [983], [], [463], [951], [993], [972, 500], [263], [738, 939], [92], [901], [671], [303], [842], [65], [29], [256], [49], [632], [883], [393], [652, 691, 895], [852], [93], [805], [53], [430], [626], [123], [892], [184, 170], [167], [209], [296], [987], [646], [320], [], [273], [832], [414], [34], [729], [340], [38], [136], [501], [335], [480], [103], [321], [849], [241], [726], [847], [], [836, 837], [240], [3], [76], [848], [651], [615, 465], [574], [656], [57], [594], [915], [362], [608, 515], [272], [457], [822], [888], [297], [640], [449], [425, 716], [517], [734], [440], [862], [], [12], [792], [738, 723], [773], [621, 412], [571], [], [479, 817], [1, 728], [], [22], [699], [857], [64], [66, 68], [6], [739], [], [619], [559], [240, 241, 238], [244], [921], [671], [166], [930], [568], [860], [821], [994], [255], [352], [646], [783], [786], [319], [358], [591], [923], [250], [23], [207, 692], [934], [269], [172], [834, 451, 457], [436], [692], [224], [451], [738, 580], [223], [815], [], [678], [443], [671], [946], [361], [432], [278], [], [349], [309], [269], [435, 876], [321], [738, 716], [315], [868, 968, 849, 505, 828], [], [386], [159], [361], [983], [874], [980], [849], [103], [236], [669], [201], [583], [941], [681, 810, 620], [505], [393, 108, 973], [671], [988], [538, 727], [277, 278], [471], [265, 266], [634], [745], [696], [578, 971, 982], [607], [582, 950], [437], [644], [543], [974], [], [894], [274], [126], [261], [947], [401], [100], [876, 445], [754, 632], [356], [289], [106], [676, 570], [569], [486], [385, 865], [650, 819], [973, 123], [809], [768], [227], [537], [444], [551], [760], [], [61], [908, 404, 812], [576], [734], [104], [], [19], [110], [], [83], [827, 534, 411], [350], [424, 423], [348], [973], [744, 657, 812], [382], [953], [911, 474, 894, 735], [324], [567], [], [420], [155], [], [841], [834, 906, 907], [505], [804], [286], [967, 441], [162], [128], [239], [6], [894], [123], [903], [478], [729], [], [99], [517], [677, 587], [2], [559], [437], [6], [651], [660, 757], [105], [872], [532], [119], [862], [583], [155], [316], [162, 882], [549], [809, 925], [235, 434], [440, 441, 455], [597], [86], [259], [450], [120], [8], [456, 872], [290], [723], [959], [756], [713], [631], [243], [962], [77], [], [714], [399], [940], [634], [986], [530], [98], [196], [360], [935], [148], [917], [606], [139], [973], [370], [446], [734], [529], [704, 444], [], [250], [41], [897], [193], [401], [821], [195, 811], [946], [819], [302], [251], [681, 620, 761, 508], [636], [273], [388], [911], [292], [546, 650, 818, 819], [993], [897], [487, 635], [384], [218, 215], [309], [258], [859], [221], [202], [462], [70], [997], [514, 655, 824], [463], [467], [732], [492], [368], [], [329, 126], [685], [408], [186], [700, 999], [647], [728, 703], [672], [492], [482], [928, 659, 949], [626], [715], [339], [581, 717, 479], [328], [431], [824], [247], [488], [279], [903, 650, 819, 851], [101], [692], [553], [35], [89], [888], [328], [580], [481], [278], [748], [40], [], [], [814], [722], [123], [410], [90], [874], [538, 727], [866, 595], [901], [276], [999, 700, 861], [508], [296], [654], [640], [226, 170], [651], [], [102], [433], [659], [], [614], [40], [278], [311], [852], [740], [135], [934], [139], [], [513, 875, 566], [382], [117], [734], [984], [592], [], [896, 804, 648, 861], [792], [353], [579], [947], [369], [879], [941, 942], [543], [521], [], [831], [890], [976], [840, 882], [763], [481], [273], [864], [221], [322], [705, 850], [521, 809, 827, 926], [284, 453], [993], [912], [728], [343], [], [575], [], [178], [400, 667], [670], [431], [], [955], [148], [329, 126], [836, 837, 869], [757], [68, 58], [775], [625], [129], [331], [138], [661], [969], [], [320], [621], [893], [603], [223], [505], [773], [], [913], [573, 479], [314, 126], [459, 445], [], [693], [805], [360], [913], [171], [615], [40], [785], [868, 968, 504], [72], [388], [23], [417], [793], [581, 479], [992], [853], [], [882], [317], [834, 630], [428], [503], [], [833], [569], [292], [309, 599], [938], [940], [695], [786], [104], [218], [314], [777], [398], [773], [498], [269], [], [175], [438, 728], [963], [475], [857], [912, 348], [], [138], [644], [851], [929, 452], [135], [116], [76], [31], [192], [271], [412], [790], [711], [205], [366], [995], [311], [627], [987], [581, 479], [], [321], [], [715], [], [357], [214], [840], [247], [], [191], [714, 679], [928], [140], [80], [232, 247], [606], [610], [486], [800], [568], [747], [527, 916, 664, 508], [288], [255], [915], [782, 664], [453, 454, 624], [463], [983], [583], [324], [652], [145], [385], [728, 636], [695], [295], [692], [209], [526, 799], [], [260], [204], [114], [228], [675], [782, 664], [347], [34], [780], [397], [314], [], [78], [114], [637], [865], [727], [], [417], [], [491], [891], [], [810, 878], [530], [262], [607], [531, 692], [391], [883], [104], [614], [453], [907, 440], [916], [254], [966], [], [502], [9], [659], [107], [953], [105], [749], [79], [376], [601], [861], [690], [942], [140], [638, 639], [326], [433], [515, 402], [342], [484], [915], [366], [883], [716], [983], [660], [673, 742, 526, 527, 782, 664, 508], [797], [343], [894], [558], [970], [548, 851, 598, 632], [830], [481], [945], [546, 402, 819], [991], [927], [521], [309], [133], [414], [327], [413], [70], [352], [160], [462], [918], [673, 526, 527, 664, 508], [711], [264], [274], [424, 423], [653], [335], [754], [465, 413], [], [544], [747], [893], [463], [869, 885, 568, 894], [433, 691, 983, 570], [594], [706], [647, 332], [441], [], [836, 837], [530], [84], [943], [829], [], [760], [], [396], [984], [629], [180], [229], [172], [520], [], [39], [207], [281], [356, 358], [297], [841], [106], [776], [], [225], [312, 311], [343], [405], [362], [993], [770, 842, 610], [870], [698], [220], [983], [424], [400, 667], [434], [748], [], [376], [203], [929], [887], [86], [], [47], [856], [575], [948], [751], [700, 999], [], [205], [284], [121], [442], [235], [622, 759], [541, 542], [537], [681, 620, 526, 527, 782, 664, 508], [991], [525], [926], [87], [72], [], [588, 790], [788], [375], [547], [739], [604], [777], [208], [], [236], [65], [132], [159], [], [276], [997], [129], [647], [637], [], [21], [489, 429, 981], [849], [868, 923], [208], [284], [960, 827], [773], [652, 413], [484, 871], [138], [613], [149], [358, 359], [454], [364], [429], [653], [677, 783], [308], [654], [850], [896], [146], [914], [45], [27], [690], [918], [734], [855], [423], [288], [33], [66, 68], [958], [481, 605], [581, 479], [990], [789], [948], [376], [279], [649], [588], [136], [244], [191], [472], [304], [], [320, 319], [629], [83], [265], [933], [259], [854], [736], [799], [284], [505], [172], [299], [543], [], [456, 489], [256], [], [205], [72], [903], [188], [85], [71], [545], [612], [982], [616], [475, 815], [505], [894], [815, 126], [776], [], [117], [180], [76], [], [858], [518], [622], [745, 851, 598], [946, 309], [445], [92], [127], [294], [472, 693], [570], [808, 879], [], [547], [595], [886], [551], [31], [966, 907, 572], [385], [214], [97], [491], [71], [652], [182], [505, 827], [152], [548], [661], [896, 943], [170], [165, 237], [671], [993], [258], [619], [619, 846, 504], [796], [798], [40], [649], [244], [912], [326], [280], [434], [36], [234], [958], [541, 542], [960], [659], [791], [908], [6, 976], [714], [150], [242], [187], [238], [987, 998, 463], [608, 652, 465, 597, 413], [650, 541, 819, 822], [395], [677], [329], [82], [404], [997, 947], [471], [58], [44], [33], [931], [333], [304], [974], [], [559], [280], [905, 721, 831], [555], [384], [82], [860], [592], [], [642], [995], [570], [923], [315], [762, 923], [513, 875], [868, 923, 521, 809, 926], [257], [801], [211], [580], [798], [227], [359], [746], [566], [660, 977, 978], [489], [247], [555, 247], [453], [565], [531], [762], [652], [43], [253], [443], [544, 469], [323], [702], [319], [485, 632], [820], [751, 573, 479], [972], [339], [15], [116], [111], [31], [989], [159], [973], [128], [525], [692, 790], [66], [565], [30], [212], [638], [], [456], [485, 851, 632], [225], [253], [807], [607], [467], [6], [641], [881, 579], [297], [492], [554], [418], [860], [379], [537], [67], [169], [673, 681, 620, 905, 526, 508], [803], [853], [751, 479], [757], [642], [], [194, 175], [984, 985], [562], [915], [675], [937], [71], [61], [288], [592], [711], [106], [59], [], [477], [873], [860], [79], [496], [435], [], [], [185], [331], [493], [232], [637], [31], [172], [469, 567, 505], [659], [670], [657], [193], [944], [559], [937, 941], [460, 437], [924], [326], [589], [228], [682], [907, 440], [764, 413], [7], [168], [570], [673, 681, 810, 620], [167], [12], [229], [907], [89], [896, 285], [514], [621], [], [391, 758], [], [670, 518], [671], [185], [599], [343], [326], [624, 884], [594], [306], [965], [581], [210], [410], [90], [485], [261], [584], [798], [141], [280], [51], [774], [320], [358, 360], [260], [456], [49], [810, 508], [551, 629, 631], [621], [295], [946], [], [821, 444], [606], [331], [711], [591], [], [333], [227], [329], [37], [948], [906, 834, 501, 630], [388], [789], [638, 639], [636], [203, 186], [246], [638, 639], [494], [110], [136], [154], [626], [866, 661], [484], [620, 681], [85], [425], [151], [94], [24], [380], [594], [590], [144], [488, 778, 600], [463], [49, 50], [193], [132], [201], [137, 975], [431, 281], [867, 517, 536, 510], [34], [], [218], [84], [97], [933], [320, 319], [599], [709], [698], [818], [255], [814], [905], [211], [711], [782, 851], [409, 892], [], [809, 923], [585], [588, 790], [554], [970], [34], [117], [673, 526, 527, 664, 508], [470], [101], [96], [371, 382], [228], [335], [414], [327], [126], [265], [], [618], [720], [803], [357], [582], [], [], [713], [851], [479], [431], [548], [721, 831], [330], [842, 764], [591], [236], [589], [505], [106], [228], [503], [713], [331], [651], [222], [149], [], [284], [810, 878], [707], [150], [467], [547], [850], [964], [586], [630], [180], [708], [342], [66], [], [874], [558], [], [195], [950], [20], [524, 461], [645], [514, 655], [169], [504], [612], [733], [965], [157], [382], [212, 251], [40, 46], [476], [166], [578, 982], [394], [187], [449, 536], [19], [752, 852], [809], [825], [447], [745], [208], [545], [407], [670, 518], [802], [941], [140], [200], [267], [73, 74], [985], [593], [], [387], [238], [826], [741, 697], [721], [642], [167], [930], [915], [585], [573], [546], [310], [927], [538, 668], [71], [887], [810, 878], [551, 629], [351], [44], [623, 784], [738], [160], [561], [164], [461], [136], [284], [86], [], [93], [], [835], [755], [57], [537], [243, 254], [923, 959], [248, 250], [], [], [], [974], [295], [90], [975, 698], [979], [719], [900], [710], [302], [449, 536, 557, 733], [383], [434, 435], [579], [276], [773], [280], [649], [338], [824, 735], [865], [431, 850], [144], [834, 435], [305], [978], [211], [538], [406], [676], [677], [389], [765], [819], [564], [992], [718], [792], [347], [870], [874], [371], [267], [795], [421, 981], [758], [33], [856], [60], [382], [985], [500], [605], [979], [770], [375], [6], [666], [884], [300], [702], [274], [789], [877], [479], [], [673, 681, 620, 526], [643], [340], [732], [842, 433, 638, 639], [145], [809], [], [849], [687], [397], [237], [839], [763], [487, 590], [222], [801], [616], [294], [482], [327], [532, 762], [], [881, 579], [807], [926], [781], [836, 837], [534], [869], [356], [514, 836, 837, 869, 501, 636], [924], [574], [494], [401], [588], [825], [266, 267], [7], [349], [635], [484], [893], [652, 847], [], [345, 690, 462, 463], [743], [619, 846], [842], [], [211], [9], [], [], [910], [309], [139], [906], [73], [930, 907, 470], [934, 923], [40, 46], [759], [152], [397], [936], [22], [], [963], [773], [630], [352], [954], [684], [918], [10], [191], [653], [242, 243], [], [561], [88], [145], [198], [147], [43], [773], [913], [23], [43], [264, 263], [915], [60], [930, 931, 415], [740], [84], [68], [479], [652, 465, 413], [745], [253], [828], [], [699], [254], [702], [41], [922], [457], [379], [83], [479, 511], [160], [796], [203], [447], [494], [770], [944], [839], [834, 836, 837, 457], [871], [236], [62], [184], [260], [153], [715], [144], [176], [], [966, 532, 470, 762, 923, 572], [262], [578, 903], [760], [605], [797], [64], [154], [570], [872, 759], [301], [659], [294], [575], [990], [552], [31], [336], [884], [954], [758], [901], [], [315], [873], [549], [406], [517], [151], [223], [920, 405], [898], [616, 913], [416, 602], [227], [918], [215], [565], [841], [991], [962], [648], [769], [214], [168], [483], [504], [284], [593], [337], [966, 907], [415], [665], [968, 532, 762, 923], [283], [98], [457], [], [690, 345], [468], [114], [608, 836, 837], [382], [344], [877], [569], [608, 617, 438], [459, 445], [211], [383], [977], [], [60], [171, 173], [280], [735], [949], [414], [792], [327], [343], [45], [830, 678], [127], [784], [327], [438], [949], [641], [252], [993], [340], [773], [111], [956], [656], [257], [803], [104], [673, 742, 526, 527, 782, 664, 508], [995], [379], [814], [673, 742, 508, 526, 664, 782, 412], [798], [635], [582, 631], [30], [822], [512], [689], [888], [349], [533], [537], [940], [537], [399], [429], [334], [694], [14], [52], [677], [99], [62], [412], [469], [834, 650, 402], [471], [199], [89], [321], [976, 977, 978], [46], [638, 639], [712], [401], [362], [450], [349], [805], [45], [433], [566], [987, 998], [85], [945], [928, 923, 960], [160], [217], [17], [659], [798], [769, 533, 824], [890], [], [359], [663], [179], [485], [353], [962], [51], [294], [], [309], [94], [483], [984], [964], [716], [500], [726], [718], [], [582, 519, 945, 948, 950], [272], [714], [894], [418], [723], [933, 923], [282], [974], [530], [962, 813, 567, 505, 827], [176], [627], [836, 875], [], [363], [], [852], [19], [177, 172], [926], [739], [348], [507], [362], [400], [991], [605], [66], [409], [383], [622, 759], [2, 3], [963], [478, 592], [580], [581, 479, 627], [845], [49], [216], [984], [148], [], [729], [385, 716], [425], [990], [622], [809, 618, 926, 959], [790], [182, 607], [534], [], [560], [350], [], [376], [92], [699], [801], [671], [417], [90], [484], [916], [572], [57], [877], [625], [479], [810, 508], [262], [118], [426], [159], [1], [905], [283], [508], [553], [879, 638, 639], [724], [389], [332], [250], [739], [475], [192], [91], [715, 652], [438], [396], [61], [401], [362], [920], [533], [327], [517], [145], [364], [572], [771], [689, 578], [518], [821], [296], [107], [582, 939, 940, 943], [297], [148], [829], [608], [916], [793, 830], [226], [941], [178], [845], [665], [159], [497], [535], [641], [839], [366], [514], [758], [673, 664, 526, 527, 782, 508], [756], [800], [855], [518], [], [], [], [905], [722], [319], [744, 652, 847, 657], [805], [771], [753, 282], [713], [872, 622, 759], [347], [808], [606], [530], [867], [76], [977, 978], [958], [850], [], [], [174], [707], [604], [424, 423, 636], [702], [559], [343], [662], [404, 895], [502], [931], [268, 179], [849], [418], [909, 923, 926], [501, 665], [18], [424], [423], [291], [568], [581], [708], [481], [187], [803, 555], [192], [595], [491], [737, 455], [], [641], [782, 664, 281, 285], [93], [783], [], [603], [731], [713], [], [84], [268], [567], [556], [618], [568, 655], [901], [321], [155], [981, 429], [692], [159], [318], [866, 575], [669], [201], [181], [968], [668], [641], [942], [263], [98], [958], [143], [681, 620, 526], [869], [927], [437], [662], [537], [358], [270], [248], [833, 913], [685], [419, 719], [404], [923, 934, 933], [928, 572], [], [695], [], [486], [962], [563], [26], [758], [752], [336], [752], [610], [95], [104], [], [633], [400, 667], [495], [533], [192], [8], [224], [594], [533], [884], [909, 532, 883], [518], [482], [194], [168, 159], [841, 894], [105], [31], [984], [678], [652], [669], [737, 901, 440], [682], [857], [310], [], [738, 834, 906], [], [326], [93], [913], [427], [583], [372], [878], [375], [242], [418], [643, 454, 917], [15], [962], [397], [316], [548], [520], [93], [468], [548, 851, 789, 632], [101], [38], [446], [481, 482], [], [568, 748], [440], [473], [150], [488], [95], [431], [578], [64], [510], [895], [], [597], [724], [399], [334], [35], [951], [288], [182], [177], [417], [820], [205], [], [654], [415], [123], [201], [207, 208], [], [241], [204], [615], [511, 581, 479], [512], [], [494], [89], [276], [], [713], [850], [999, 700], [699], [571], [453, 606], [732], [754], [285], [452], [], [139], [15], [755], [240], [851], [812], [723], [65], [139], [753], [221, 206], [581, 717], [972], [306], [339], [653], [80], [422], [390], [521], [514], [], [182], [611], [542], [342], [646], [311], [], [230, 478], [316], [402], [858], [612], [645], [330], [544, 521], [497], [584], [521], [607], [531], [], [593], [813], [100], [157], [364], [616], [521], [724], [440, 441], [199], [241, 238], [890], [523], [532], [585], [263], [662], [687], [349], [751], [90], [237], [781], [909, 828, 926], [168], [880], [956], [619], [486, 401], [523], [603], [846, 883, 532], [632], [187], [433, 693], [579, 881], [770, 979], [469], [718], [64], [589], [202], [238], [293], [229], [863], [], [416], [922], [892], [418], [573, 479], [515], [262], [958], [55], [303], [787], [529, 793, 831], [926], [], [141], [902], [9], [], [948], [845, 531], [746], [423, 424, 892], [462], [318], [314], [920], [759], [995], [484], [275, 276], [658], [541], [831], [690, 346], [205], [583], [424], [29], [87], [824], [18], [521], [513, 683, 558, 432], [451], [361], [745], [162], [316], [100], [438], [861], [666], [770], [605], [543], [129], [528], [782, 664], [583], [722], [724], [959], [177, 170], [888], [495], [189], [109], [54], [243], [531], [473], [659], [809, 969], [14], [594], [67], [193, 191], [58], [770], [208], [547], [232], [834, 836, 837, 906], [762], [423], [877], [274], [163], [928], [347], [54], [589], [407], [671], [740], [553, 493], [391], [962, 987, 923], [133], [836, 837, 629], [353], [], [554], [192], [633], [855], [232, 250], [307], [531], [713], [64], [311], [490], [62], [329], [694], [587], [102], [259], [378], [627], [], [49], [421], [82], [547], [203], [], [626], [993], [888, 839], [566], [62], [272], [891], [450], [319], [], [163], [985], [668, 562], [961], [681, 620, 526], [296], [], [428], [802], [930, 966, 907], [192], [522], [736], [240, 241, 238, 239], [314], [933], [], [552, 283], [535], [9], [387], [638, 639], [875], [519], [], [443], [], [], [704], [453], [811], [383], [116], [474], [205], [425], [632], [152], [], [160], [935], [334], [932], [705, 537, 248], [79], [981, 429], [723], [681, 620, 526, 664, 508], [349], [841], [860], [806], [], [406], [39], [389], [155], [238], [984], [654], [413], [676, 246], [872], [532], [885], [723], [111], [264], [546, 818, 819, 541], [426, 635], [454], [75], [98], [205, 246], [270], [992], [537], [510], [843], [271], [892], [321], [750], [565], [750, 735], [847], [924], [51], [575], [142], [574], [203], [683], [44], [626], [490], [208], [589], [878], [377], [799], [11], [275], [115], [265], [692], [321], [938], [496], [200, 244], [285], [], [179], [217], [616], [], [660], [698, 538], [708], [672], [664, 851], [131], [204], [362], [582, 943], [219], [868, 935, 809, 923], [273], [402], [571], [603], [626], [546, 402, 819], [957], [], [827], [496], [946], [187], [881], [281], [995], [475], [280], [146], [551], [425], [892], [618], [], [34], [390, 149], [758], [595, 730], [960], [307], [87], [819], [765], [727], [269], [175], [673, 681, 268, 620, 508], [683], [355], [841], [353], [262], [195], [383], [810, 878], [203], [328], [899], [733], [827], [367], [718], [986], [605], [757, 535], [618, 909, 827], [907, 440], [4], [521], [923], [], [674], [491], [758], [243], [806], [23], [679], [573], [816], [780, 914], [466], [644], [188], [], [873], [619], [419], [884], [567], [259, 261], [735], [801], [118], [424, 589], [521, 618, 809], [910], [181], [204], [], [741, 765], [44], [612], [313], [531], [529], [377], [902], [973], [921, 917], [934], [339], [803], [609], [820], [119], [676], [505], [110], [540], [682], [271], [488], [843], [629], [174], [651, 504], [], [874], [701], [667], [27], [577], [201], [31], [979], [927], [836, 837, 970], [435, 281], [918], [526], [38], [857], [476], [605], [628], [539, 316], [572], [233], [771], [666], [867], [596], [], [164], [388], [992], [412], [802], [988], [877], [268], [523], [87], [517, 600], [513, 650, 819], [], [569], [970], [219], [86], [320, 319], [44], [436], [962, 923], [2], [178], [424, 423], [853], [525], [], [589], [93], [190], [931, 868], [901], [722], [], [794], [809, 923, 925], [905], [821, 693], [224], [374], [775], [98], [886], [752], [139], [578, 585, 982], [22], [57], [460, 437], [810, 878], [287], [988], [451], [587], [361], [459, 445], [479], [822, 542], [100], [], [647], [574], [546], [], [604], [629], [557], [683, 558], [654, 734], [170], [629], [397], [297], [333], [252], [597], [823], [324], [421], [277], [834, 432], [858], [280], [430], [392], [941], [548, 851], [494], [158], [], [515], [89], [583], [266], [719], [467, 499], [264], [628], [788], [], [9], [569], [182], [162], [764, 413], [43], [760], [], [364], [920], [871], [351], [45], [], [591], [], [115], [141], [32], [516, 431, 797], [987, 998], [40], [686], [613], [352, 138], [576], [451], [539], [557], [908], [235], [142], [90], [659, 700], [300], [343], [], [409, 826], [718], [557], [826], [725], [522], [602], [63], [827], [406], [], [481], [777], [345, 730], [270, 279], [923], [327], [387], [779], [113], [867], [467], [989], [203], [108], [372], [474], [508], [760, 737, 886], [776], [61], [83], [220], [54], [721], [195], [765], [355], [644, 470], [93], [597], [763], [135], [608], [230, 232], [889], [376], [184], [], [111], [17], [364], [826], [53], [496], [797], [263], [505], [105], [717, 733], [639], [681, 620, 508], [762], [], [755], [49, 50], [897], [450], [240], [850], [693, 472], [880], [672], [217], [337], [948], [142], [989], [740, 440], [156], [591], [950], [204], [697], [234], [], [297], [926], [978], [25, 28], [324], [385], [454], [762], [673, 664, 526, 527, 508], [7, 8], [342], [159], [592], [806], [818], [613], [950], [900], [142], [878], [462], [501], [25], [915], [942], [373], [109], [87], [953], [364], [487, 619, 526, 846, 504], [62], [849], [605], [390], [4], [153], [340], [836, 837], [899], [606], [288], [102], [174], [587, 784, 477], [791], [557, 858, 738], [237], [405, 538, 603], [913], [436], [951], [10, 15], [208], [671], [670], [823], [154], [366], [40], [880], [672], [244], [392], [740], [830], [], [932], [992], [650], [811], [478], [624, 453, 454], [280], [15], [631], [351], [279], [963, 966, 532, 762, 923, 572], [902], [912, 716], [974], [387], [608, 741], [670], [111], [738], [286], [738], [944], [54], [294], [652, 764], [723], [112], [361, 759, 794], [], [246], [777], [777, 499], [19], [462], [92], [564], [109], [449], [254], [727], [428], [168], [415], [590], [479], [615], [66], [524, 461], [602], [990], [990], [], [733, 127], [983], [573], [474], [147], [], [539], [468, 407], [981], [309], [996], [160], [373], [663], [620, 508], [466], [85], [660], [792], [865, 850], [242, 180], [281], [502], [789], [524], [803], [682], [104], [729], [], [228], [259], [252], [339], [417, 866, 595], [208], [], [776, 650, 819], [57], [], [608], [643], [], [232, 248], [738], [90], [808], [820], [999], [640], [610], [611], [294], [617], [], [834, 906, 893], [784], [334], [403], [931], [389], [289], [188], [], [], [707], [987, 998], [503], [576], [524, 461], [619, 846], [338], [524], [], [267], [449], [15, 91], [277], [111], [73, 815], [613], [383], [143], [496], [968, 504], [849, 827], [365], [239], [666], [109], [550, 521, 651], [888], [77], [], [661], [968, 114, 504], [512], [672, 970], [490], [748], [272], [658], [962, 942], [373], [463], [140], [809, 567], [568, 825, 608], [620, 508], [], [560], [932, 415], [853], [745], [713], [981, 429], [679, 488, 695], [106], [536, 540, 510], [578, 689, 982], [50], [524, 461, 715], [263], [560], [525], [117], [732], [826], [137], [284], [608, 423], [64], [795], [], [185], [571], [89], [210], [339], [244], [151], [670], [711], [101], [50], [213], [715, 524], [708], [676, 269], [534], [479, 751], [], [520], [440], [977], [], [948], [], [70], [890], [489], [358], [868], [823], [171], [921, 764], [779], [887, 497, 406], [967], [370], [780], [10], [714], [890], [81], [92], [785], [587, 477], [737, 582, 440], [416], [138], [452], [444], [532], [9], [986], [667], [395], [897], [423], [89], [339], [764], [709, 836, 837, 767], [672], [370], [618, 469], [10], [991], [971], [67], [616], [281, 282], [659], [909], [832], [834, 906, 400], [837, 582, 954], [927], [699], [458], [110], [867], [], [690, 345], [335], [], [150], [221], [580], [308], [544], [271], [176], [316], [102], [], [346], [234], [714], [552], [828], [813], [26], [], [269], [232], [522], [437], [249], [708], [], [836, 542, 822], [600], [446], [125], [857], [278], [], [418], [655], [162], [477], [623], [970], [508], [697, 478], [756], [985], [], [593], [338], [13], [57], [230, 231], [649], [987, 943], [], [860], [193], [290], [318], [675], [360], [436, 479], [], [589], [238], [772, 488], [481], [947], [441], [770, 674], [491], [5], [86], [424], [100], [537], [332], [596], [783], [43], [563], [117], [305], [259], [869, 457], [687], [988], [186], [804], [99], [213], [554], [933], [400, 667], [318], [652], [619], [], [123], [988], [829], [280], [223], [578], [818], [534], [230], [552], [673], [672, 669], [698], [308], [144], [211], [222], [916], [8], [234], [301], [321], [8], [487], [44, 633], [346], [514], [640], [803], [882], [571], [820], [494], [673, 620, 527, 664, 508], [70], [519], [166], [582], [590], [19], [316], [524, 461], [80], [724], [931], [], [127], [888], [756], [458], [688], [4], [20], [773], [398], [203], [395], [795, 615], [735], [905], [23], [631], [772], [555], [263], [64], [796], [467], [727, 538], [222], [], [], [277], [358], [471], [328], [832], [289], [741], [399], [112], [867], [10], [22], [832], [234], [647, 332], [896, 804], [], [241, 238], [], [326], [523], [], [12], [65, 973], [477], [370], [681, 620, 526, 664], [267], [728], [834], [615], [920], [553], [201], [822], [789], [710], [], [715], [387], [458], [418, 623], [95], [898, 762, 572], [485, 526], [363], [380], [74], [538, 858], [392], [769, 438], [389], [930], [563], [426], [29], [798], [844], [696], [470], [194], [383], [], [922], [198], [880], [543], [291], [40, 46], [953], [980], [297], [310], [183], [849], [174], [], [433], [679], [835], [725], [546, 806], [156], [235], [727], [418], [260], [529], [517], [21], [553], [97], [771], [780], [945], [], [388], [822], [605], [891], [207], [], [319], [943], [672], [643, 903], [905, 532, 799], [208], [292], [478], [156], [], [89], [883], [545], [875], [448, 637], [230], [520], [184], [190], [561], [965], [317], [759], [35, 37], [], [99], [993], [2], [868], [692], [76], [244], [169], [646], [903], [], [205], [772], [185], [145], [80], [936], [236], [21], [263], [873], [696], [960, 910], [582], [994], [], [464], [193, 189], [419], [486], [342], [831], [199], [1], [735], [], [807], [809, 925], [572], [677, 587, 783, 784], [251], [778], [311], [325], [777], [768], [143], [311], [45], [], [420], [609], [961, 499, 728], [644], [881], [913], [130], [16], [472], [836, 837, 445], [862], [675], [187], [896], [884, 501], [695], [610], [391], [696], [867], [779], [167], [904], [812], [761], [652, 597, 764, 413], [835], [], [735], [126], [634], [998], [927], [0], [540], [659, 556, 827], [101], [48], [586], [811], [187], [131], [442], [576], [484, 536], [842], [738], [393], [367], [], [973], [284], [467], [58], [38], [985], [720], [644], [90], [97], [260], [38], [915], [479], [561], [616], [497, 406, 857], [68], [595], [344], [303], [490], [59], [842], [829], [584], [356], [544], [673], [80], [60], [253, 846], [504], [188], [902], [834, 906], [329], [624], [0], [795], [865], [697, 610], [641], [389], [547], [20], [235, 174], [754], [], [608], [165], [381], [0], [978], [658], [650, 402, 819], [209], [432], [561], [241], [], [426], [117], [295], [662], [382], [236], [637], [394], [793], [358], [544], [305, 302], [165], [427, 756], [181], [918], [645], [585], [808], [69], [993], [303], [135], [165], [87], [324], [679, 455], [814], [198], [918], [223], [240, 238], [370], [462], [979], [29], [4], [122], [], [338], [411], [211], [772], [557], [879, 242, 850], [531], [688], [5], [251], [761], [158], [491], [591], [384], [225], [571], [113], [259], [], [18, 86], [815], [955], [133], [294], [63], [795, 703], [483], [265], [910], [292], [140], [905], [270, 207], [535], [205], [603], [537], [804], [553], [165], [654], [155], [164], [], [996], [913], [971], [42], [714], [182], [54], [240, 241, 238, 239], [938], [744, 657], [908, 404], [240, 241], [318], [784], [185], [591], [424], [920], [375], [492], [471], [687, 406], [238, 241], [501], [], [327], [774], [41], [718], [], [133], [89], [736], [79], [627, 795], [], [768], [417], [769, 418, 772, 623], [595], [], [753, 894], [135], [416], [77], [63], [495], [766], [], [972, 825], [892], [997, 947], [588], [895], [692], [952], [54], [938], [909], [288, 290], [732], [892, 409], [383], [297], [], [731, 861], [64], [80], [98], [766, 341], [204], [257, 222], [524, 461], [933], [648], [242], [329], [478], [355, 912], [535], [35], [311], [884], [464], [760], [527, 664, 508], [], [453], [386], [800], [191], [716, 765], [329], [698], [578, 982], [851, 548], [33], [710], [14], [161], [105], [73], [854], [410], [102], [], [], [136], [137], [841], [310], [400, 667], [47], [506], [572], [270], [85], [764], [692, 969, 588, 728], [21, 22], [325], [798], [33], [141], [109], [673, 681, 620, 526, 527, 664], [794], [500], [567], [335], [506], [829], [33, 983], [45], [965], [550], [447], [510], [933], [976], [109], [643], [987, 998], [400, 667], [371], [686], [25], [], [864], [37], [681, 620], [196], [744], [473], [849], [612], [542], [675], [58], [], [612], [253], [805], [], [368], [412], [647], [768], [93], [260], [], [200], [277, 278], [775], [902], [382], [36], [357], [198], [568], [374], [171], [105], [762], [474], [166], [], [846], [855], [25], [727], [893], [432], [820], [801], [962, 923], [405], [721], [2], [911], [16], [652, 465], [], [741], [849, 725], [571], [582], [122], [555], [909, 567], [957], [360], [38], [332], [760, 664], [855], [644], [930], [591], [610], [66], [728], [524, 461], [511], [253], [769], [224], [694], [817, 479], [595], [949, 953], [692], [683, 558], [212], [294], [615], [626], [441], [57], [233], [], [363], [821], [], [], [149], [], [207], [842, 500], [679], [756], [64], [676], [399], [710], [852], [548], [946], [320], [884], [389, 391], [404], [945], [485], [392], [], [83], [648], [864], [466], [25], [], [748], [450], [239], [113], [666], [755], [91, 14], [0], [323], [393, 108], [547], [809, 923, 926], [52], [42], [428, 670], [9], [471], [736], [476], [685], [458, 401], [420], [300], [], [625], [986], [553, 493, 883], [915], [164], [122], [], [545], [42], [780], [963, 964, 567, 572], [17], [565], [458], [761, 223], [669], [95], [], [224], [886], [27], [196], [353, 372], [782, 664, 810, 508], [456], [860], [217], [183], [281], [975], [71], [763, 597], [634], [881], [937, 923, 963], [857], [738, 949], [527, 664, 508], [92], [21], [716], [229], [67], [384], [752], [], [544], [98], [983], [273], [881], [400, 667], [754], [680], [139], [], [329, 108], [554], [756], [544], [173], [394], [506], [986], [140], [768], [776], [799], [865], [], [481], [272], [880], [810, 878], [532], [125], [350], [39], [950], [852], [41], [911], [528], [759, 622], [700], [844], [477], [90], [554, 628], [720], [652], [737, 455, 760, 440], [499], [104], [], [964], [459], [557], [41], [795], [], [67, 58], [833], [322], [490], [948], [964], [507], [429], [926], [917], [84], [716], [716], [411], [74], [707], [642], [472], [280], [61, 62], [262], [342], [618], [7], [670], [341, 342], [841], [622, 759, 478], [195], [236, 237], [], [517], [226], [357], [864, 717], [938], [450, 462], [39], [134], [763], [83], [900], [323], [353], [739], [19], [668], [317], [865], [], [222], [896], [19], [], [654], [319], [463], [30], [905], [879], [27], [548, 851], [168], [122], [977], [882], [67], [949, 953], [96], [584], [57], [716], [711], [], [533], [420, 559], [232], [849], [866], [793], [836, 837, 151], [981], [385, 101], [581, 818], [162, 167], [396], [367], [281], [964], [898], [530], [619, 750, 846, 721], [143], [], [509], [52], [206], [552], [574], [707, 886], [275], [429], [448], [512], [515], [], [709], [226], [543], [477], [973], [544], [853], [265], [812], [306], [], [519], [640], [62], [59], [392], [548], [827], [372], [364], [835], [5, 6], [297], [0], [], [328], [497], [946], [721], [138], [593], [904], [93], [862], [769], [403], [487], [969], [488, 635], [898], [53], [39], [358, 359], [686], [666], [493], [391], [771], [7], [864, 919, 733], [124], [380], [928, 923, 960], [78], [174], [230, 231], [437], [127], [88], [608, 718, 975], [178], [135], [66], [64, 55], [484], [], [586], [975], [327], [543], [789], [179], [43], [518, 671], [497], [113], [238], [354, 676], [968], [289], [891], [849, 725], [389], [791], [652, 465], [554], [457, 338], [722], [22], [382], [891], [948], [377], [686], [985, 324], [374], [284], [915], [], [954], [561], [711], [710], [], [789], [], [480], [171], [382], [628], [893], [115], [642], [488], [368], [256], [636], [158], [714], [], [726], [504, 985], [], [137], [668], [581], [133], [962, 923], [631], [708, 698, 671], [125], [907, 440], [772], [86], [], [118], [288, 290], [487], [438], [50], [28], [379], [108], [], [696], [83], [626], [253], [673, 681, 620, 664, 526, 527, 508], [112], [396], [770], [865, 850], [152], [847], [936], [22], [593], [776], [562], [769], [549], [339], [793, 697], [41], [654], [], [863], [888], [370], [962, 762, 966, 532, 923, 572], [146], [489], [51], [755], [491], [526], [940], [9], [383], [575], [181], [347], [496], [783], [513, 683, 875, 558], [600], [789], [960, 923], [529], [372], [815], [295], [566], [189], [845], [170], [589], [377], [], [750, 655], [139], [684], [595], [869, 618, 824], [842, 445], [517], [223], [903], [833], [800], [250], [], [], [454], [163, 168], [299], [432], [719], [417], [62], [980], [575], [], [850, 220], [156], [298], [367], [167], [194], [407], [547], [481], [36, 58], [91], [700, 999], [203], [633, 937, 333], [354], [678, 636], [315], [187, 700], [948], [849], [543], [807], [351], [918], [328], [453, 633], [834, 906, 630], [802, 518], [677], [520], [236, 237], [924], [129], [21], [595, 866], [832], [], [], [646], [182], [46], [367], [985], [995], [760], [384], [556], [722], [], [412], [], [10], [354], [168], [545, 589, 861], [96], [25], [101], [150], [225], [472, 693], [235], [194], [294], [491], [109], [416], [150], [249], [140], [285], [867], [787], [886], [986], [437], [244], [482, 754], [52], [497], [], [811], [305], [368], [302], [160], [290], [469], [], [531], [565], [677], [933], [656], [756], [223], [834, 906], [13], [582, 692, 790], [983], [889], [], [92], [35], [254], [685], [552], [578], [504], [781], [430], [696], [690], [928], [898, 836, 837, 774, 842, 502], [288], [560], [208], [453], [153], [995], [562], [216], [267], [523], [252], [566], [893], [467, 341], [108], [72], [545], [597], [10], [659], [475], [228], [736], [807], [883, 532, 762, 923, 572], [930], [355], [562], [847], [872, 447], [], [365], [64], [], [271], [145], [613], [325], [261], [654], [670], [301], [412], [994], [747], [470], [], [404], [859], [682, 562], [853], [997], [232], [78], [399], [922], [], [946], [371], [855], [615], [734], [601, 578, 689], [212], [], [864], [165], [47], [113], [418], [741], [735], [738, 944], [742], [789], [651], [572], [142], [299], [110], [502], [916], [171], [484], [673, 526, 527, 664, 508], [738], [823], [235], [97], [990], [595], [459], [930], [104], [555], [594], [624, 454], [126], [569], [827], [1], [343], [18], [], [139], [643, 474], [420], [458], [], [853], [], [886], [698], [939, 943], [812], [342], [74], [132], [82], [279], [228], [802], [947], [425], [844], [399], [25], [381], [394], [63], [297], [], [477], [21], [552, 151], [135], [338], [424, 423], [482], [876, 435], [524, 461, 501], [498], [178], [84], [602], [328], [900], [103], [703, 323, 998], [281], [588], [145], [441], [440], [1], [450], [404], [680], [864, 627], [231], [122], [29], [900], [97], [624], [546], [171, 268], [894], [887, 406], [508], [25, 28], [723], [671], [148], [269], [889], [514, 464], [557], [709, 748], [419], [198], [160], [166], [836, 837], [188, 189], [68], [359], [462, 792], [], [160], [454, 917], [487], [762], [583], [97], [46], [630], [877], [652], [109], [400, 667], [110], [254], [754], [253], [296], [293], [359], [448], [117], [99], [267], [606], [990], [11], [531], [79], [507], [802], [719], [488], [], [798], [953], [323], [398], [130], [226], [311], [65], [287], [901], [144], [361], [218], [650], [673, 681, 620, 526, 527, 664, 508], [130], [621], [739], [577], [465, 413], [530], [106], [122], [491, 634], [68], [256], [688], [836, 837, 869], [307], [758], [573], [687], [641], [792], [], [855], [99], [564], [309], [913], [700, 999], [451], [725, 505], [5, 6], [431], [420], [713], [190], [], [548, 851], [630], [722], [28], [646], [606], [428], [587, 784, 477], [91], [959], [718], [626], [668], [476], [500], [920], [555], [284], [524, 461], [33], [89], [991], [99], [188], [221], [476], [274], [607], [184], [304], [], [165], [299], [153, 266], [457, 920], [20], [75], [766], [489], [162], [376], [108], [818], [630], [535], [899, 725, 572], [630], [910], [833], [], [412], [], [893], [589], [938], [783], [384], [], [333], [716], [262], [701], [249], [240, 241], [221], [369], [], [133], [378], [437], [131], [698], [921, 692, 917], [623, 917], [619, 846, 721, 831], [889], [441], [549, 692], [70], [459], [784, 587], [147], [841], [318], [627], [343], [507], [489, 243], [391, 758], [911], [736], [884], [883], [991], [618], [758], [754], [873], [39], [379], [518], [261], [830], [569], [], [21], [451], [291], [72], [753], [489], [802], [531], [674], [698], [832], [157], [47], [], [888], [406], [778], [515, 836, 837], [694], [596], [], [424], [907, 470], [573], [225], [56], [850, 760], [465], [684], [850, 765], [950, 951], [365], [881], [43], [], [108], [61, 62], [952], [92], [82], [913], [503], [567], [589], [481], [246], [756], [53], [599], [793], [200], [244, 537], [58], [178], [904], [363], [988], [616], [126], [], [551], [83], [632], [400], [442], [], [70], [81], [716], [311], [391], [335], [965], [], [433], [685], [334], [343], [474], [395], [180], [14], [977, 978, 437], [761], [247], [152], [221], [973], [355], [341, 572], [374], [661], [412], [977], [558], [742], [133], [], [259], [895], [442], [105], [117], [530], [216], [847], [772], [805], [849], [], [44], [824], [298], [983], [340], [960, 931, 923], [903], [353], [162], [], [364], [843, 602], [348], [494], [485, 632], [402], [100], [352], [704], [4], [51], [855], [732], [176], [214], [], [849, 505], [107], [79], [730], [185], [757], [13], [844], [708], [624, 453, 454], [319], [79], [527, 592, 664], [215, 218], [943], [250], [992], [519], [533], [986], [922], [610], [633, 769], [21], [475], [309], [716], [42], [560], [222], [796], [514, 515], [325], [498], [216], [497], [591, 721, 885], [839], [378], [612], [893], [653], [956], [], [878], [374], [417], [569], [], [686], [328], [], [], [688], [725], [423], [], [65], [711], [761], [643], [987, 998], [720], [543], [], [303], [], [389], [235], [1], [86], [760], [38], [9], [612], [785], [780], [80], [50], [103], [939, 943, 945], [564, 750], [854], [857], [253], [113], [981], [178], [470], [119], [210, 852], [448, 637], [882], [526, 784], [651], [], [878], [351], [33], [771], [578], [735], [681, 620, 508], [], [4], [107], [311], [71], [270], [967, 504], [102], [246], [699], [408], [940], [], [510], [578, 689, 601], [182], [], [881], [946], [186], [683], [517, 540, 510], [946], [508], [630], [281], [323], [155], [297], [], [619, 846], [555], [882], [], [992], [613], [843], [796], [733, 541, 542], [546, 650, 402, 818, 819], [133], [465], [85], [436], [16], [731], [119], [441], [457, 834], [252], [529, 667], [982], [909, 567, 827], [619, 750, 846, 721], [360], [726], [822], [190], [108], [637], [417], [90], [172], [836, 837], [339], [764], [804], [90], [30], [234], [331], [600], [506], [960, 582], [419], [797], [620], [173], [744, 657], [135], [863], [813], [935], [824], [494, 442], [261], [787], [], [191], [781], [372], [753], [526], [148], [963, 945], [375], [770, 788], [296], [854], [908, 895], [488], [439], [121], [314], [101], [275], [618], [558], [582, 937, 938], [404, 895], [386, 101], [305], [733], [165, 234], [608, 428], [396], [], [88], [430], [625], [594, 579], [74], [309], [638, 639], [], [298, 63], [622], [821], [658], [617], [348, 349], [1], [694], [695], [809, 926], [785], [244], [951], [520, 431], [487, 605], [606], [406], [199], [624], [76], [609], [743], [933], [131], [548, 598, 632], [836, 837, 655], [762], [553], [254], [178], [197], [553], [88], [555], [6], [514, 515], [245, 183], [15], [488, 679], [], [717], [920], [267], [989], [999], [634], [313], [400, 667], [366], [839], [635], [851, 632], [713], [597], [71], [435], [888], [457, 834, 906], [787], [292], [400, 667], [808], [176], [975], [106], [748], [487, 590], [104], [30], [973], [818], [20], [866], [], [514, 655], [643, 306], [251], [518, 652, 465, 413], [597], [565], [407], [738], [21], [814], [883], [956], [365, 379], [431], [312], [872], [695], [621], [928], [187], [199], [625], [578], [179], [750, 414], [263], [972], [51], [367], [], [402, 881], [961, 963, 964], [655], [852], [887], [891], [809, 925, 923], [], [458], [702], [144], [], [205], [579], [708, 596], [114], [45], [717], [426], [821], [588, 790], [890], [33], [392], [325], [738], [145], [800], [511], [12], [518], [272], [398], [560], [853], [868, 532, 441, 762, 923], [], [88], [741], [563], [267], [545, 745, 619, 818, 831], [607], [970, 795], [795], [313], [701], [763], [169], [839], [351], [641], [], [833], [26], [], [644], [], [318], [517], [986], [], [], [321], [895], [7], [800], [454], [233], [850, 211], [153], [874], [740], [20], [323], [573], [946], [133], [207], [889], [492], [371], [905], [811], [576], [218], [562], [], [713], [920], [495], [346], [530], [294], [8], [343], [253], [713], [194], [959], [903], [661], [321], [245], [890], [694], [141, 142], [813], [270], [322, 946], [333], [714, 402], [], [486], [418, 563], [602], [456], [328], [956], [701], [], [691], [774], [824], [836, 837, 979], [141], [489], [557], [825], [382], [765], [476], [780], [476], [933], [697], [], [388], [945], [738, 653], [799], [358], [945], [446], [548, 664, 526, 527, 508], [277], [826], [350], [616], [244], [], [56], [644, 470], [44], [708], [351], [742], [989], [485], [767], [586], [306], [873], [236], [352], [567], [968, 504], [788], [18], [954], [404], [965], [728], [34], [], [990], [586], [205, 174, 223], [420], [415], [], [761], [649], [552], [972], [898, 692], [60], [974], [411], [699], [659], [271], [945, 939, 943], [53], [382], [76], [730], [944], [462], [573], [346], [328], [827], [153, 265], [315], [776], [908, 895], [578, 650, 818], [770], [452], [683], [319], [329], [747], [], [885], [], [713], [315], [10], [455], [119], [661], [744, 657], [901], [963], [533], [515, 655, 818, 731, 608, 630], [805], [555], [363], [513, 875, 402], [120], [391], [751, 479], [], [110], [245], [761], [682], [], [693], [301, 304], [403], [320], [462, 655], [495], [820], [481], [46, 47], [891], [656], [], [479], [156], [10], [666], [637], [563], [261], [264], [106], [895], [905, 619, 846, 831], [72, 815], [], [282], [851], [109], [304], [738], [634], [695], [400], [659, 940, 813], [780], [643, 570], [596], [938], [], [318], [495], [36], [790], [], [518, 665], [101], [487], [772, 949], [75], [994], [177], [929, 338], [313, 414], [517, 540], [865, 850], [287], [365], [631], [910], [822], [845], [554], [874, 779, 920], [24], [824], [763, 597], [953], [352], [650], [91], [51], [758], [102], [271], [481, 482], [929], [182], [234], [111], [154], [955], [162], [653], [150], [70], [514, 652], [604], [661], [635], [962], [211], [195], [603], [892], [], [772], [322], [838, 551, 629], [993], [393], [582, 945], [322], [349], [997, 588, 947, 790], [451], [583], [703], [167], [128], [136], [466], [34], [964, 937, 945], [683], [605], [625], [553], [405], [252], [789], [784], [846], [770, 543], [949], [145], [547, 716], [301], [90], [896, 905, 435], [499], [252], [896], [206], [814, 977, 978], [594], [369], [770, 830, 608], [442], [703], [100], [980], [66], [890], [715, 524, 461], [329], [827, 926], [28], [608, 489], [578, 689, 703], [8], [963], [224], [], [132], [674], [641], [740], [452], [301], [], [835, 708], [770, 791, 480, 502], [465], [684], [898], [972], [947], [635], [74], [], [334], [205, 213], [312], [883], [602], [222], [303], [299], [773], [305], [255], [923, 928], [665, 671], [], [364], [524], [296], [197], [336], [945], [386], [313], [942], [826], [823], [506], [357], [644, 532], [458], [376], [642], [194], [181], [], [45], [296], [118], [19], [386], [604], [752], [404], [66], [678], [572], [618], [147], [690], [295], [5], [245], [552], [683, 558, 432, 566], [152, 155], [748], [257], [588], [781], [244], [553], [731], [358, 359], [622, 759], [618], [38], [12, 475], [710], [662], [566], [919], [], [206], [92], [851], [], [246], [957], [224, 223], [29], [892], [433], [673, 929, 681, 620, 526], [12], [994], [781], [92], [555, 570], [195], [692], [955], [284], [812], [272], [137], [741], [902], [972], [162], [929], [875], [914], [949, 927], [146], [735], [297], [24], [896, 435, 794], [405], [16], [], [575], [276], [824, 836, 837], [190], [325], [800], [292], [341], [], [718, 975, 536], [950], [979], [853], [449], [226], [132], [], [900], [422], [832], [388], [699], [759], [96], [0], [], [831], [997], [489, 733], [], [432], [672], [51], [44], [710], [503], [559], [357], [904], [272], [866], [33, 35], [962, 935, 937, 923], [364], [300], [912, 339], [533], [762, 884], [863], [95], [829], [573], [822], [288], [89], [], [251], [920], [200, 204], [130], [], [864, 479], [8], [750, 564], [577], [533], [], [33], [680], [429], [67, 68], [681, 620], [97], [977], [157], [], [523], [34], [681, 810, 620, 508], [338], [699], [142], [746], [812], [951], [88], [349], [431], [911], [107], [475], [766], [674, 333], [162], [647], [384], [819], [122], [754], [907], [153], [652, 764], [342], [406], [430], [56], [25], [], [252], [897], [302], [365], [108], [788, 502], [365], [28], [680], [863], [955], [68], [433], [], [539], [566], [212], [893], [76], [508], [479, 817], [742], [255], [267], [], [815], [920], [230], [637], [465], [516, 520], [876, 435, 794], [750], [], [572], [489], [668], [798], [306], [619, 846], [929], [774, 788, 502], [236, 165], [636], [666], [154], [532], [491], [765], [220], [115], [952], [135], [889, 486], [], [403], [792], [], [144], [94], [146], [554], [688], [118], [768], [517, 847], [640], [197], [], [69], [327], [790], [17], [199], [628], [135], [226], [933, 923], [735], [432], [286], [698], [189], [554], [346], [252], [433, 639], [292], [828], [849], [995], [], [292], [652], [884, 406], [241], [680], [275], [905, 750, 721], [713], [373], [399], [487], [897], [659, 969], [102], [289], [477], [216], [651], [868], [930], [247], [319], [673], [235], [829], [524, 461], [75], [735], [111], [591], [], [886], [711], [922], [318], [629], [797], [434], [867], [989], [203], [], [328], [318], [823], [770], [421], [251], [802], [938], [890], [553, 493], [173], [394], [914], [489], [262], [12], [274], [216], [278], [803], [592], [546, 402], [654], [25], [], [839], [347], [615], [662], [706], [840], [886], [535, 479], [472], [513], [871], [882], [352], [880], [607], [975], [25], [898], [977, 978], [], [39], [146], [219], [517], [], [528], [477], [721], [371], [192], [300], [], [820], [], [], [31], [629], [822], [614], [239], [820], [210], [615], [685], [], [836, 837, 879, 535], [197], [], [663], [356], [540], [273], [276], [299], [263], [291], [887], [768], [76], [466], [513], [863], [51], [850], [347], [256], [218], [490], [239], [581, 751, 479], [574], [819], [478], [], [655], [685], [75], [545], [358], [987], [265], [738, 470], [786], [226], [702], [535], [165], [977, 978], [346], [218, 215], [224], [928, 923, 960], [907, 440], [133], [735], [50], [827], [752, 852], [891], [327], [386], [51], [96], [838], [802], [20], [129], [514], [475], [581, 656, 475, 479], [], [928, 712], [488, 695], [338], [], [119], [604], [802], [617], [640], [523], [515], [518], [306], [414], [829], [403], [64, 55], [138], [4], [598], [232, 239], [949], [406], [361], [704], [756, 412], [629], [], [968, 967], [748, 636], [197, 198, 199], [738], [790], [251], [166], [78], [332], [527], [941], [502], [878], [492], [9], [82], [966, 907], [783], [806], [453, 454, 526, 527, 782, 664], [127], [190], [], [723], [656, 475, 479], [674], [650], [184], [117], [649], [418], [659], [354], [770], [681, 620, 526, 527, 782, 664, 508], [876], [], [704], [728], [267], [741], [257, 222], [542], [213], [937], [513, 776, 875], [402], [], [599], [983], [240, 241], [], [], [188, 190], [134], [322], [12], [896], [128], [470, 862], [668], [350], [608], [], [230], [], [], [843], [467], [872, 622, 759], [720], [], [106], [14], [399], [257, 222], [373], [223], [144], [800], [129], [434], [983, 801], [335], [9, 340], [953], [751, 479], [588], [972], [47], [303], [990], [547], [637], [686], [517], [530], [918], [834, 906], [963], [820], [318], [379], [983], [815], [780], [839], [272], [210], [740, 783, 784], [321], [570], [661], [593], [84], [581, 661, 479], [611], [730], [868, 567], [794], [348], [906], [70], [], [11], [674], [872], [666], [72], [805], [290], [289], [357], [392], [206], [859], [86], [237], [638, 639], [140], [493], [136], [990], [577], [728, 412], [689], [14], [806], [544], [139], [560, 981], [194], [844], [428], [692], [366], [682], [320, 319], [883], [838, 631], [894], [622], [191], [94], [67], [992], [103], [603], [43], [881], [278], [348], [804], [746], [262], [218], [619, 532, 846], [522], [389], [757], [300], [860], [781], [692], [210], [349], [], [472], [660], [398], [489], [], [523], [251], [], [386, 101], [847], [202], [896], [608], [979], [911], [959], [], [997], [51], [335], [731], [148], [693, 919, 472, 733], [450], [731], [984], [847], [71], [223], [142], [955], [], [], [576], [684], [438, 126], [739], [328], [890], [778], [496], [131], [923], [734], [374], [244], [673], [499], [700], [986], [964, 923], [92], [81], [836, 593], [326], [49, 50], [14], [250], [793], [391], [937], [210, 178], [823], [753], [122], [11], [435, 876], [1], [933], [263, 247], [719], [708], [761], [], [644], [766], [961], [774], [693], [738], [646, 884, 406], [533], [373], [], [712], [], [231], [604], [148], [595], [], [1], [905, 789, 799], [727], [804, 896], [842, 978], [986], [769], [532, 762], [434], [636], [205], [504, 968, 254], [532, 495], [132], [583], [619], [992], [783], [904, 905, 968, 610, 504], [294], [456], [444], [], [56], [140], [478], [3], [784, 499], [336], [639], [304], [319], [692], [379], [759], [738, 580], [788], [857], [114], [464, 608, 610], [192, 186], [791], [905], [85], [128], [38], [782, 851, 664], [584], [690], [72], [217], [160], [428], [959], [653], [491], [391], [923], [234], [757], [41], [169], [902], [790], [992], [553], [720], [795, 796], [], [575], [985], [604], [460], [934], [783], [368], [296], [792], [608, 610, 531], [67], [630], [721], [688], [651], [225], [648], [391], [468], [8, 7], [696], [371], [190], [882], [55], [859], [985], [22], [595], [326], [189], [228], [772], [635], [677], [915], [0], [774], [273], [208], [435], [197], [985], [922], [571], [66, 68], [295], [247], [740], [986], [934], [747], [631], [], [570, 691], [780], [524], [73, 815], [35], [988], [879], [623], [31], [918], [258], [757], [768], [2, 3], [671], [744, 657], [959], [177], [434], [827], [354], [587], [635], [168, 211], [833], [398], [761], [732], [222], [127], [52, 111], [291], [368], [107], [700], [351], [752], [335], [834], [454], [154], [872], [767], [334], [893, 446], [555], [830], [661], [428], [180], [479], [529], [603], [242], [320], [310], [349], [268], [840], [256], [955], [], [892], [423], [], [102], [514, 655], [421], [536, 718, 814, 977, 978], [83], [506], [777, 623], [977], [490], [400, 667], [404], [197], [229], [25], [425], [], [764, 597], [477], [584, 523], [], [42], [970, 795, 796], [745], [854], [864], [129], [831], [136], [939], [], [339], [470], [918], [319], [580], [769], [990], [188, 189], [9], [851], [460], [96], [893], [933], [968], [908, 404], [421, 825], [923], [519], [], [642], [28], [811], [110], [481], [102], [797], [868], [762, 934], [375], [], [581, 479, 874, 751], [876], [267], [40], [545], [495, 532], [30], [352], [433], [413], [872, 764], [365], [], [322], [719], [650], [], [212], [517], [863], [325], [], [791], [275], [562], [854], [168], [606], [19], [862], [], [175], [619], [], [252], [868, 415], [669], [526], [132], [310], [786], [618, 926], [708], [901], [109], [774], [151], [544, 909], [370], [62], [960, 954], [954], [628], [2], [52], [306], [963, 809, 923], [146], [803], [673, 681, 620, 526, 664, 508], [774], [], [235], [334], [779, 654], [617], [10, 478], [123], [7], [64, 55], [580], [], [841], [458], [46], [512], [221], [229, 200], [111], [443], [171], [376], [991], [178], [740], [165], [636], [257, 222], [319], [347], [], [641], [691], [177], [495], [875, 671], [540], [850], [923, 959], [516, 750, 431], [896, 804, 905, 700, 799], [891], [162], [393], [105], [217], [934, 923], [358], [125], [702], [469, 919], [131], [654], [897], [143], [439], [166], [632, 733], [630], [907, 440], [677], [614], [80], [], [446], [463], [214], [792], [874], [258], [810, 878], [25], [255], [990], [522], [507], [993], [217], [827], [890], [6], [769, 695], [488, 695], [872], [510], [803], [305], [35], [898, 671], [277], [174], [655], [87], [], [758], [385], [930, 868, 968, 923], [971], [451], [226], [543], [330], [770, 788, 630], [421], [760, 827], [345], [292], [330], [197], [477], [], [603], [238], [132], [305], [335], [789, 799], [171], [212], [604], [420], [818, 819], [529, 669], [269], [460], [277], [479, 817, 475], [682], [116], [661, 479], [299], [674], [650], [727], [582, 941, 951], [469], [572, 966], [938], [61], [834], [87], [451], [127], [32], [427], [614], [533], [345], [512], [607], [366], [171], [809, 618, 923], [219], [256], [], [912], [673, 526, 527, 782, 664, 508], [104], [348], [50], [253], [746], [], [903], [131, 134], [641], [822, 887], [581], [], [879, 977], [163], [294], [617], [290], [708], [678], [29], [234], [99], [], [215], [548, 664, 851, 894], [247], [707], [924], [891], [720], [923, 924], [744, 657], [888], [188], [768], [809, 925], [412], [715], [115], [948], [621], [328], [49, 50], [178], [449, 975], [833], [365], [], [965], [719], [652, 733], [518, 444], [840], [307], [760], [816], [771], [522], [289], [385], [766], [673, 526, 527, 782, 664, 508], [], [659, 923, 926], [51], [990], [253], [854], [391], [852], [891], [834, 895], [625], [373, 377], [155], [851, 921], [], [191], [452], [113], [600, 116, 126], [851], [433], [157], [], [97], [239], [323], [746], [48], [158], [703], [784, 508], [849], [386, 101], [299], [817], [722], [440], [408], [674], [868], [871], [736], [246], [985], [829], [410], [], [119, 121], [412], [320], [393], [], [843], [966], [884, 538], [63], [713], [774, 788], [748], [792], [893], [847], [782, 664], [464], [962, 932, 923], [530, 719], [788], [323], [109], [373], [434], [739], [431], [76], [859], [608, 602], [430], [755], [288], [933], [786], [567], [536], [], [291], [72], [848, 632], [138], [767], [509], [287], [255], [179], [896, 794, 861], [], [131], [438], [950], [587, 813, 910], [538, 668], [486, 594, 501], [479], [51], [816], [930], [238], [321], [992], [614], [642], [487], [329], [], [653], [653], [327], [797], [435, 876], [251], [891], [357], [452], [61], [176], [279], [515, 808], [576], [696], [143], [108], [217], [997], [581, 479, 717], [650], [809], [808], [], [934], [22], [679], [890], [275], [73, 77], [726], [869, 975], [549, 623], [838], [652], [67], [64], [484, 536, 628], [590], [357], [442], [965], [442, 494], [975], [651, 909, 827], [618, 926], [468], [96], [], [987, 998], [655], [650], [204], [840], [396], [806], [349], [899, 647, 849, 505], [872], [698], [809, 910], [870], [10], [822, 541, 542], [242], [548, 485, 851, 632], [], [], [241], [403], [327], [], [], [413], [537], [349, 350], [759], [612], [84], [1], [212], [783], [806], [], [588], [892], [955], [594], [891], [82], [673, 810, 527, 508], [672], [119], [417], [712], [626], [48], [372], [162], [339], [954, 951], [921], [557], [56], [812], [302], [717], [295], [159], [], [747], [1], [356], [458], [512], [102], [922], [], [309, 599], [644], [983], [255], [], [276], [488], [292], [894], [509], [665], [44], [359], [30], [312], [24], [167], [424], [218], [272], [947], [723], [35], [781], [672], [], [262], [995], [43], [201], [248], [670], [733], [1, 124], [492], [], [757], [124], [831], [829], [546, 650, 819], [84], [911], [183], [873], [20], [476], [475], [208], [435], [665], [817], [834, 683], [956], [640], [109], [579, 881], [752, 852], [89], [543], [332], [926], [], [539, 741], [991], [493], [440], [518], [442], [719], [425], [880], [397], [963], [840, 462, 463], [641], [751], [804], [923, 928, 291, 737], [35], [349, 350], [638, 639], [336], [923], [760], [621], [945], [133], [1], [886], [437], [265, 266], [971], [827, 840], [812], [256], [977], [442, 437], [302], [62], [434], [231], [149], [], [872], [603], [245], [270], [50], [581], [428], [721, 285, 831], [467], [412], [395, 758], [330], [391], [634], [325], [494], [169], [518, 570], [143], [511], [849], [454], [671], [515, 420], [673, 526, 527, 782, 664, 508], [883], [812], [248, 249, 537], [160], [199], [748], [530], [190], [103], [163], [117], [892], [], [616, 159], [400, 667], [796], [703], [335], [834], [673, 742, 664, 526, 527, 632, 508], [33], [849, 505], [], [15], [602], [172], [], [], [298], [37], [130], [527, 664], [], [465], [838], [294], [581, 717, 479], [746], [743], [220], [572], [], [451, 679], [931], [843], [794], [641], [154], [148], [75], [16], [790], [216], [612, 741], [873], [810, 878], [162, 166], [786], [259], [789], [484, 628], [710, 767], [224, 223], [423, 424], [658], [670], [162], [], [547], [294], [63], [926], [591], [227, 235], [437], [763, 597], [161, 676], [342], [698], [928, 659, 923], [8], [205], [788, 502], [804], [537], [464], [826], [874, 555], [248], [583], [408], [616], [304], [185], [682], [520], [169], [769], [40], [562], [463, 434], [753], [207], [676, 248], [], [995], [871], [568], [169], [990], [840], [522], [335], [346], [479], [215], [515], [858], [230], [967], [546], [673, 526, 527, 664, 508], [940, 941, 942], [797], [939], [160], [963], [658], [251, 805], [982, 439], [524, 461], [253], [979], [277], [540], [], [407], [783, 784], [583], [544, 827], [239], [160], [245], [419], [331], [25], [22], [988], [243], [], [458], [455], [116], [986], [899, 505], [268], [416], [640], [420], [354], [739], [111], [384], [616], [810, 878], [541, 542], [910], [480], [897], [], [780], [629], [866], [185], [], [966], [898, 195], [588], [238, 207], [738], [65], [222], [646], [391, 758], [100], [521], [252], [535], [884], [232, 761], [497], [881], [457, 667], [823], [577], [330], [602], [], [725, 505], [879], [522], [49], [813], [239], [886], [347], [208], [294], [320], [87], [715, 652, 671], [929], [212], [94], [533], [903], [812], [921, 917], [583], [748], [295], [372], [], [361], [108, 973], [], [455], [49, 50], [987, 998], [919, 733], [282], [274, 277], [367], [430], [44], [81], [399], [24], [120], [357], [531], [101], [644], [283], [], [], [982], [355]]"
  },
  {
    "path": "timm/data/_info/imagenet_synset_to_definition.txt",
    "content": "n00004475\ta living thing that has (or can develop) the ability to act or function independently\nn00005787\torganisms (plants and animals) that live at or near the bottom of a sea\nn00006024\tan organism that depends on complex organic substances for nutrition\nn00006484\t(biology) the basic structural and functional unit of all organisms; they may exist as independent units of life (as in monads) or may form colonies or tissues as in higher plants and animals\nn00007846\ta human being\nn00015388\ta living organism characterized by voluntary movement\nn00017222\t(botany) a living organism lacking the power of locomotion\nn00021265\tany substance that can be metabolized by an animal to give energy and build tissue\nn00021939\ta man-made object taken as a whole\nn00120010\tthe act of hopping; jumping upward or forward (especially on one foot)\nn00141669\tthe act of reporting your presence (as at an airport or a hotel)\nn00288000\tmaneuvers of a horse in response to body signals by the rider\nn00288190\ta light leap by a horse in which both hind legs leave the ground before the forelegs come down\nn00288384\ta cadenced trot executed by the horse in one spot\nn00324978\twalking on a tightrope or slack rope\nn00326094\tthe sport or pastime of scaling rock masses on mountain sides (especially with the help of ropes and special equipment)\nn00433458\ta sport that necessarily involves body contact between opposing players\nn00433661\ta sport that is played outdoors\nn00433802\ta sport that involves exercises intended to display strength and balance and agility\nn00434075\tthe gymnastic moves of an acrobat\nn00439826\tparticipating in athletic sports performed on a running track or on the field associated with it\nn00440039\tthe act of participating in an athletic competition involving running on a track\nn00440218\tthe act of participating in an athletic competition in which you must jump\nn00440382\tthe act of jumping as far as possible from a running start\nn00440509\tthe act of jumping as high as possible over a horizontal bar\nn00440643\tjumping over the bar backwards and head first\nn00440747\ta sport in which participants must travel on skis\nn00440941\tthe sport of skiing across the countryside (rather than downhill)\nn00441073\tthe act of performing a jump on skis from a high ramp overhanging a snow covered slope\nn00441824\tsports that involve bodies of water\nn00442115\tthe act of swimming\nn00442437\tthe act of swimming\nn00442847\ta brief swim in water\nn00442981\ta headlong plunge into water\nn00443231\tthe act of someone who floats on the water\nn00443375\ta floating position with the face down and arms stretched forward\nn00443517\ta dive in which the abdomen bears the main force of impact with the water\nn00443692\tdiving into the water from a steep overhanging cliff\nn00443803\ta dive in which the diver somersaults before entering the water\nn00443917\ta dive in which the diver throws the feet forward to complete a full backward somersault and enters the water feet first and facing away from the diving board\nn00444142\ta dive in which the diver throws the feet forward and up to complete a half backward somersault and enters the water facing the diving board\nn00444340\ta dive in which the diver bends to touch the ankles before straightening out\nn00444490\ta dive in which the diver arches the back with arms outstretched before entering the water\nn00444651\tunderwater swimming without any more breathing equipment than a snorkel\nn00444846\tskin diving with scuba apparatus\nn00444937\tskin diving with a snorkel\nn00445055\tthe sport of riding a surfboard toward the shore on the crest of a wave\nn00445226\tskiing on water while being towed by a motorboat\nn00445351\tthe act of rowing as a sport\nn00445685\trowing by a single oarsman in a racing shell\nn00445802\tfighting with the fists\nn00446311\tboxing for money\nn00446411\tboxing at close quarters\nn00446493\ta boxing or wrestling match\nn00446632\ta boxing tactic: pretending to be trapped against the ropes while your opponent wears himself out throwing punches\nn00446804\tmaking the motions of attack and defense with the fists and arms; a part of training for a boxer\nn00446980\tthe sport of shooting arrows with a bow\nn00447073\tthe sport of riding on a sled or sleigh\nn00447221\triding on a long light sled with low handrails\nn00447361\triding a light one-man toboggan\nn00447463\triding on a bobsled\nn00447540\tthe sport of hand-to-hand struggle between unarmed contestants who try to throw each other down\nn00447957\ta style of wrestling where the wrestlers are forbidden to tackle or trip or use holds below the waist\nn00448126\twrestling for money\nn00448232\ta Japanese form of wrestling; you lose if you are forced out of a small ring or if any part of your body (other than your feet) touches the ground\nn00448466\tthe sport of gliding on skates\nn00448640\tskating on ice\nn00448748\tice skating where the skates trace outlines of selected figures\nn00448872\tskating using Rollerblades\nn00448958\tskating on wheels\nn00449054\tthe sport of skating on a skateboard\nn00449168\tcompetitive skating on speed skates (usually around an oval course)\nn00449295\tthe sport of engaging in contests of speed\nn00449517\tthe sport of racing automobiles\nn00449695\tthe sport of racing boats\nn00449796\tracing in high-speed motor boats\nn00449892\tthe sport of racing camels\nn00449977\tthe sport of racing greyhounds\nn00450070\tthe sport of racing horses\nn00450335\tthe sport of siting on the back of a horse while controlling its movements\nn00450700\ta sport that tests horsemanship\nn00450866\ta sport in which people ride across country on ponies\nn00450998\triding horses in competitions over set courses to demonstrate skill in jumping over obstacles\nn00451186\triding horses across country over obstructions to demonstrate horsemanship\nn00451370\tthe sport of traveling on a bicycle or motorcycle\nn00451563\triding a bicycle\nn00451635\triding a motorcycle\nn00451768\tbicycling or motorcycling on sand dunes\nn00451866\tsport that involves killing animals (especially hunting)\nn00452034\tthe activity at a bullfight\nn00452152\tparticipation in the sport of matching gamecocks in a cockfight\nn00452293\tthe pursuit and killing or capture of wild animals regarded as a sport\nn00452734\ta hunt in which beaters force the game to flee in the direction of the hunter\nn00452864\thunting rabbits with beagles\nn00453126\thunting with dogs (usually greyhounds) that are trained to chase game (such as hares) by sight instead of by scent\nn00453313\thunting deer\nn00453396\thunting ducks\nn00453478\tmounted hunters follow hounds in pursuit of a fox\nn00453631\tthe sport of hunting wild boar with spears\nn00453935\tthe act of someone who fishes as a diversion\nn00454237\tfishing with a hook and line (and usually a pole)\nn00454395\tangling with an artificial fly as a lure\nn00454493\tangling by drawing a baited line through the water\nn00454624\tthe act of throwing a fishing line out over the water by means of a rod and reel\nn00454855\tthe single-handed rod casting of a relatively heavy (artificial) bait\nn00454983\tcasting an artificial fly as a lure\nn00455076\ta cast that falls beyond the intended spot\nn00455173\tcasting (artificial) bait far out into the ocean (up to 200 yards) with the waves breaking around you\nn00456465\ta game played in daylight\nn00463246\ta game involving athletic activity\nn00463543\ta game played on an ice rink by two opposing teams of six skaters each who try to knock a flat round puck into the opponents' goal with angled sticks\nn00464277\ta game with two players who use rackets to strike a ball that is tethered to the top of a pole; the object is to wrap the string around the pole\nn00464478\ta game played in a swimming pool by two teams of swimmers who try to throw an inflated ball into the opponents' goal\nn00464651\tan athletic game that is played outdoors\nn00464894\ta game played on a large open course with 9 or 18 holes; the object is use as few strokes as possible in playing all the holes\nn00466273\tplaying golf for money\nn00466377\tthe activity of playing 18 holes of golf\nn00466524\tgolf scoring by total strokes taken\nn00466630\tgolf scoring by holes won\nn00466712\ta novelty version of golf played with golf balls and putters on a miniature course featuring many obstacles\nn00466880\ta game in which players hit a wooden ball through a series of hoops; the winner is the first to traverse all the hoops and hit a peg\nn00467320\ta game in which iron rings (or open iron rings) are thrown at a stake in the ground in the hope of encircling it\nn00467536\ta game in which players use long sticks to shove wooden disks onto the scoring area marked on a smooth surface\nn00467719\tan outdoor game played on a field of specified dimensions\nn00467995\ta game resembling ice hockey that is played on an open field; two opposing teams use curved sticks try to drive a ball into the opponents' net\nn00468299\ta simple version of hockey played by children on the streets (or on ice or on a field) using a ball or can as the puck\nn00468480\tany of various games played with a ball (round or oval) in which two teams try to kick or carry or propel the ball into each other's goal\nn00469651\ta game played by two teams of 11 players on a rectangular field 100 yards long; teams try to get possession of the ball and advance it across the opponents goal line in a series of (running or passing) plays\nn00470554\tfootball played for pay\nn00470682\ta version of American football in which the ball carrier is touched rather than tackled\nn00470830\ta traditional Irish game resembling hockey; played by two teams of 15 players each\nn00470966\ta form of football played with an oval ball\nn00471437\ta field game played with a ball (especially baseball)\nn00471613\ta ball game played with a bat and ball between two teams of nine players; teams take turns at bat trying to score runs\nn00474568\tthe game of baseball\nn00474657\tplaying baseball for money\nn00474769\tbaseball as distinguished from softball\nn00474881\ta game in which a pitcher does not allow any opposing player to reach base\nn00475014\ta game in which a pitcher allows the opposing team no hits\nn00475142\ta game in which a pitcher allows the opposing team only one hit\nn00475273\ta game in which a pitcher allows the opposing team only 2 hits\nn00475403\ta game in which a pitcher allows the opposing team only 3 hits\nn00475535\ta game in which a pitcher allows the opposing team 4 hits\nn00475661\ta game in which a pitcher allows the opposing team 5 hits\nn00475787\ta game closely resembling baseball that is played on a smaller diamond and with a ball that is larger and softer\nn00476140\tan English ball game similar to baseball\nn00476235\ta form of baseball played in the streets with a rubber ball and broomstick handle\nn00476389\ta game played with a ball and bat by two teams of 11 players; teams take turns trying to score runs\nn00477392\ta game invented by American Indians; now played by two teams who use long-handled rackets to catch and carry and throw the ball toward the opponents' goal\nn00477639\ta game similar to field hockey but played on horseback using long-handled mallets and a wooden ball\nn00477827\ta game using a leather ball six feet in diameter; the two side try to push it across the opponents' goal\nn00478262\ta football game in which two teams of 11 players try to kick or head a ball into the opponents' goal\nn00479076\tan athletic game played on a court\nn00479440\ta game played in a walled court or against a single wall by two or four players who strike a rubber ball with their hands\nn00479616\ta game played on a handball court with short-handled rackets\nn00479734\ta game resembling handball; played on a court with a front wall and two side walls\nn00479887\ta game played in an enclosed court by two or four players who strike the ball with long-handled rackets\nn00480211\ta game in which two teams hit an inflated ball over a high net using their hands\nn00480366\ta Basque or Spanish game played in a court with a ball and a wickerwork racket\nn00480508\ta game played on a court with light long-handled rackets used to volley a shuttlecock over a net\nn00480885\tan ancient racket game\nn00480993\ta game played on a court by two opposing teams of 5 players; points are scored by throwing the ball through an elevated horizontal hoop\nn00481803\tplaying basketball for money\nn00481938\tgame played mainly on board ocean liners; players toss a ring back and forth over a net that is stretched across a small court\nn00482122\ta team game that resembles basketball; a soccer ball is to be thrown so that it passes through a ring on the top of a post\nn00482298\ta game played with rackets by two or four players who hit a ball back and forth over a net that divides the court\nn00483205\tplaying tennis for money\nn00483313\ttennis played with one person on each side\nn00483409\tbadminton played with one person on each side\nn00483508\ttennis played with two players on each side\nn00483605\tbadminton played with two players on each side\nn00483705\tan ancient form of tennis played in a four-walled court\nn00483848\tan Italian game similar to tennis\nn00523513\tan active diversion requiring physical exertion and competition\nn00812526\tthe act of grasping\nn00825773\ta sport adapted from jujitsu (using principles of not resisting) and similar to wrestling; developed in Japan\nn00887544\ta sport that involves competition between teams of players\nn01035504\tthe traditional Passover supper of Jesus with his disciples on the eve of his crucifixion\nn01035667\t(Judaism) the ceremonial dinner on the first night (or both nights) of Passover\nn01055165\tthe act of encamping and living in tents in a camp\nn01314388\tany unwanted and destructive insect or other animal that attacks food or crops or livestock etc.\nn01314663\ta regional term for `creature' (especially for domestic animals)\nn01314781\tan animal that creeps or crawls (such as worms or spiders or insects)\nn01314910\ta person or other animal that moves abruptly and rapidly\nn01315213\tan animal that makes short high-pitched sounds\nn01315330\tan animal that has a body temperature that is relatively constant and independent of the environmental temperature\nn01315581\tan animal whose body temperature varies with the temperature of its surroundings; any animal except birds and mammals\nn01315805\tany animal that lives and grazes in the grassy open land of western North America (especially horses, cattle, sheep)\nn01316422\tany animal that feeds on refuse and other decaying organic matter\nn01316579\ta fish that lives and feeds on the bottom of a body of water\nn01316734\ta scavenger that feeds low on the food chain\nn01316949\tan animal trained for and used for heavy labor\nn01317089\tan animal such as a donkey or ox or elephant used for transporting loads or doing other heavy work\nn01317294\tan animal used for pulling heavy loads\nn01317391\tan animal (such as a mule or burro or horse) used to carry loads\nn01317541\tany of various animals that have been tamed and made fit for a human environment\nn01317813\tan animal being fattened or suitable for fattening\nn01317916\tan animal that feeds on a particular source of food\nn01318053\ta domestic animal (especially a young steer or heifer) kept as stock until fattened or matured and suitable for a breeding establishment\nn01318279\tany recently hatched animal (especially birds)\nn01318381\ta single domestic animal\nn01318478\tan animal (especially birds and fish) that travels between different habitats at particular times of the year\nn01318660\tan animal (especially birds and arthropods and reptiles) that periodically shed their outer layer (feathers or cuticle or skin or hair)\nn01318894\ta domesticated animal kept for companionship or amusement\nn01319001\ta person or other animal having powers of endurance or perseverance\nn01319187\ta creature (especially a whale) that has been prevented from attaining full growth\nn01319467\tany of numerous animals inhabiting the sea including e.g. fishes and molluscs and many mammals\nn01319685\tunwanted marine creatures that are caught in the nets while fishing for another species\nn01320872\tan animal that produces gametes (ova) that can be fertilized by male gametes (spermatozoa)\nn01321123\tfemale of certain aquatic animals e.g. octopus or lobster\nn01321230\tan animal that produces gametes (spermatozoa) that can fertilize female gametes (ova)\nn01321456\tany mature animal\nn01321579\tany immature animal\nn01321770\ta young animal without a mother\nn01321854\tany immature mammal\nn01322221\ta very young mammal\nn01322343\tyoung of any of various canines such as a dog or wolf\nn01322508\ta young wolf\nn01322604\ta young dog\nn01322685\tthe young of certain carnivorous mammals such as the bear or wolf or lion\nn01322898\ta young lion\nn01322983\ta young bear\nn01323068\ta young tiger\nn01323155\tyoung of any of various fur-bearing animals\nn01323261\ta young mammal that has not been weaned\nn01323355\tmale parent of an animal especially a domestic animal such as a horse\nn01323493\tfemale parent of an animal especially domestic livestock\nn01323599\ta pedigreed animal of unmixed lineage; used especially of horses\nn01323781\tany creature of exceptional size\nn01324305\tan animal that has undergone mutation\nn01324431\tany animal that feeds on flesh\nn01324610\tany animal that feeds chiefly on grass and other plants\nn01324799\tany organism that feeds mainly on insects\nn01324916\tan animal having teeth consolidated with the summit of the alveolar ridge without sockets\nn01325060\tan animal having teeth fused with the inner surface of the alveolar ridge without sockets\nn01326291\tany organism of microscopic size\nn01327909\ta hybrid produced by crossing parents that are homozygous except for a single gene locus that has two alleles (as in Mendel's experiments with garden peas)\nn01329186\ta large heterogeneous group of RNA viruses divisible into groups on the basis of the virions; they have been recovered from arthropods, bats, and rodents; most are borne by arthropods; they are linked by the epidemiologic concept of transmission between vertebrate hosts by arthropod vectors (mosquitoes, ticks, sandflies, midges, etc.) that feed on blood; they can cause mild fevers, hepatitis, hemorrhagic fever, and encephalitis\nn01330126\tany of a group of viruses including those that in humans cause upper respiratory infections or infectious pinkeye\nn01330497\tanimal viruses belonging to the family Arenaviridae\nn01332181\ta filovirus that causes Marburg disease; carried by animals; can be used as a bioweapon\nn01333082\ta family of arborviruses carried by arthropods\nn01333483\tan animal virus that causes vesicular stomatitis\nn01333610\ta family of arboviruses carried by arthropods\nn01334217\ta type of smallpox virus that has a fatality rate of up to 25 percent\nn01334690\tthe smallest of viruses; a plant virus with its RNA arranged in a circular chromosome without a protein coat\nn01335218\ta bacteriophage that infects the bacterium Escherichia coli\nn01337191\ta group of viruses including those causing mumps and measles\nn01337734\tthe virus causing poliomyelitis\nn01338685\tany of the animal viruses that cause painful blisters on the skin\nn01339083\ta herpes virus that causes oral herpes\nn01339336\ta herpes virus that causes shingles\nn01339471\ta herpes virus that causes chickenpox and shingles\nn01339801\tany of a group of herpes viruses that enlarge epithelial cells and can cause birth defects; can affect humans with impaired immunological systems\nn01340014\tthe member of the herpes virus family that is responsible for chickenpox\nn01340522\ta virus the can initiate various kinds of tumors in mice\nn01340785\ta neurotropic non-arbovirus of the family Rhabdoviridae that causes rabies\nn01340935\tany of a group of non-arboviruses including the rotavirus causing infant enteritis\nn01341090\tthe reovirus causing infant enteritis\nn01342269\torganisms that typically reproduce by asexual budding or fission and whose nutritional mode is absorption or photosynthesis or chemosynthesis\nn01347583\tconsidered ancient life forms that evolved separately from bacteria and blue-green algae\nn01349735\ta rodlike bacterium (especially any of the rod-shaped or branched bacteria in the root nodules of nitrogen-fixing plants)\nn01350226\ta species of bacillus that causes anthrax in humans and in animals (cattle and swine and sheep and rabbits and mice and guinea pigs); can be used a bioweapon\nn01350701\ta bacillus bacterium that causes the plague; aerosolized bacteria can be used as a bioweapon\nn01351170\tan aerobic Gram-negative coccobacillus that causes brucellosis; can be used as a bioweapon\nn01351315\tany flagellated aerobic bacteria having a spirally twisted rodlike form\nn01357328\tanaerobic bacterium producing botulin the toxin that causes botulism\nn01357507\tanaerobic Gram-positive rod bacterium that produces epsilon toxin; can be used as a bioweapon\nn01358572\tpredominantly photosynthetic prokaryotic organisms containing a blue pigment in addition to chlorophyll; occur singly or in colonies in diverse habitats; important as phytoplankton\nn01359762\tlarge colonial bacterium common in tropical open-ocean waters; important in carbon and nitrogen fixation\nn01362336\tsoil bacteria that convert nitrites to nitrates\nn01363719\tspirally twisted elongate rodlike bacteria usually living in stagnant water\nn01365474\ta genus of Gram-negative aerobic bacteria that occur as pathogens and parasite in many animals (including humans)\nn01365885\tthe pus-producing bacterium that causes gonorrhea\nn01366700\ta species of bacterium that causes diphtheria\nn01367772\trod-shaped Gram-negative bacteria; most occur normally or pathogenically in intestines of humans and other animals\nn01368672\ta genus of nonmotile rod-shaped Gram-negative enterobacteria; some cause respiratory and other infections\nn01369358\ta form of salmonella that causes food poisoning in humans\nn01369484\ta form of salmonella that causes typhoid fever\nn01374703\tany of the nitrobacteria that oxidize nitrites into nitrates\nn01374846\tany of the nitrobacteria that oxidize ammonia into nitrites\nn01375204\tany bacteria (some of which are pathogenic for humans and animals) belonging to the order Actinomycetales\nn01376237\taerobic bacteria (some of which produce the antibiotic streptomycin)\nn01376437\tsource of the antibiotic erythromycin\nn01376543\tsource of the antibiotic streptomycin\nn01377278\tcause of tuberculosis\nn01377510\tbacteria that produce pus\nn01377694\tany of various rod-shaped Gram-negative bacteria\nn01378545\tbacteria that form colonies in self-produced slime; inhabit moist soils or decaying plant matter or animal waste\nn01379389\tspherical Gram-positive parasitic bacteria that tend to form irregular colonies; some cause boils or septicemia or infections\nn01380610\tGram-positive bacteria usually occurring in pairs\nn01380754\tbacterium causing pneumonia in mice and humans\nn01381044\tspherical Gram-positive bacteria occurring in pairs or chains; cause e.g. scarlet fever and tonsillitis\nn01382033\tparasitic or free-living bacteria; many pathogenic to humans and other animals\nn01384084\tunicellular algae\nn01384164\tanimal constituent of plankton; mainly small crustaceans and fish larvae\nn01384687\tan animal or plant that lives in or on a host (another animal or plant); it obtains nourishment from the host without benefiting or killing the host\nn01385017\tany of various parasites that live in the internal organs of animals (especially intestinal worms)\nn01385330\tany external parasitic organism (as fleas)\nn01386007\tany disease-producing agent (especially a virus or bacterium or other microorganism)\nn01386182\teither of two different animal or plant species living in close association but not interdependent\nn01386354\tan organism such as an insect that habitually shares the nest of a species of ant\nn01387065\tany of the unicellular protists\nn01389507\tany of diverse minute acellular or unicellular organisms usually nonphotosynthetic\nn01390123\tprotozoa that move and capture food by forming pseudopods\nn01390763\tprotozoa with spherical bodies and stiff radiating pseudopods\nn01392275\tany ameba of the genus Endamoeba\nn01392380\tnaked freshwater or marine or parasitic protozoa that form temporary pseudopods for feeding and locomotion\nn01393486\tmarine protozoan having a rounded shell with spiny processes\nn01394040\tany of various rhizopods of the order Testacea characterized by having a shell\nn01394492\tan amoeba-like protozoan with a chitinous shell resembling an umbrella\nn01394771\ta protozoan with an ovoid shell of cemented sand grains\nn01395254\ta protozoan with a microscopic appendage extending from the surface of the cell\nn01396048\tany member of the genus Paramecium\nn01396617\tany of several trumpet-shaped ciliate protozoans that are members of the genus Stentor\nn01397114\tprimitive chlorophyll-containing mainly aquatic eukaryotic organisms lacking true stems and roots and leaves\nn01397690\tan edible seaweed with a mild flavor\nn01397871\tany of various seaweeds that grow underwater in shallow beds\nn01400247\talgae having the pigments chlorophyll and carotene and xanthophyll\nn01400391\tany alga of the division Chrysophyta with its chlorophyll masked by yellow pigment\nn01402600\talgae having the chlorophyll masked by brown and yellow pigments\nn01403457\tlarge brown seaweeds having fluted leathery fronds\nn01404365\tany of various algae of the family Fucaceae\nn01404495\ta fossilized cast or impression of algae of the order Fucales\nn01405007\tany member of the genus Fucus\nn01405616\tsimilar to and found with black rockweed\nn01407798\talgae that are clear green in color; often growing on wet ricks or damp wood or the surface of stagnant water\nn01410457\tfree-floating freshwater green algae\nn01411450\tany alga of the genus Chlorella\nn01412694\tany of various submerged aquatic algae of the genus Chara having nodes with whorled filamentlike branches; usually encrusted with calcium carbonate deposits\nn01413457\tfreshwater green algae\nn01414216\tany of various red algae having graceful rose to purple fronds (e.g. dulse or carrageen)\nn01415626\tan organism with cells characteristic of all life forms except primitive microorganisms such as bacteria; i.e. an organism with `good' or membrane-bound nuclei in its cells\nn01415920\ta unicellular organism having cells lacking membrane-bound nuclei; bacteria are the prime example but also included are blue-green algae and actinomycetes and mycoplasma\nn01416213\tone of the distinct individuals forming a colonial animal such as a bryozoan or hydrozoan\nn01418498\tflagellate protozoan that causes leishmaniasis\nn01418620\tflagellate protozoan lacking photosynthesis and other plant-like characteristics\nn01419332\tflagellates with several flagella\nn01419573\ta flagellate that is the cause of the frequently fatal fish disease costiasis\nn01419888\ta suspected cause of diarrhea in humans\nn01421333\tcommon in fresh and salt water appearing along the shore as algal blooms\nn01421807\tparasitic spore-forming protozoan\nn01422185\tone of the minute active bodies into which sporozoans divide in one stage of their life cycle\nn01422335\ta sporozoan in the active feeding stage of its life cycle\nn01422450\ta cell that arises from the asexual division of a parent sporozoan during its life cycle\nn01423302\tparasitic on the digestive epithelium of vertebrates and higher invertebrates\nn01423617\tvermiform protozoans parasitic in insects and other invertebrates\nn01424420\tparasitic protozoan of the genus Plasmodium that causes malaria in humans\nn01425223\tparasitic in birds\nn01427399\tparasite of arthropods and fishes that invade and destroy host cells\nn01429172\tin some classifications considered a superorder comprising the Cypriniformes and the Siluriformes\nn01438208\ta soft-finned fish of the order Cypriniformes\nn01438581\tslender freshwater fishes of Eurasia and Africa resembling catfishes\nn01439121\tsoft-finned mainly freshwater fishes typically having toothless jaws and cycloid scales\nn01439514\tany of various freshwater fish of the family Cyprinidae\nn01439808\tlarge Old World freshwater bottom-feeding fish introduced into Europe from Asia; inhabits ponds and sluggish streams and often raised for food; introduced into United States where it has become a pest\nn01440160\tscaleless domestic carp\nn01440242\tdomestic carp with some large shining scales\nn01440467\tEuropean freshwater fish having a flattened body and silvery scales; of little value as food\nn01440764\tfreshwater dace-like game fish of Europe and western Asia noted for ability to survive outside water\nn01441117\tsmall European freshwater fish with a slender bluish-green body\nn01441272\tEuropean freshwater game fish with a thick spindle-shaped body\nn01441425\tany of numerous small silvery North American cyprinid fishes especially of the genus Notropis\nn01441910\tthe common North American shiner\nn01442450\tEuropean freshwater food fish having a greenish back\nn01442710\tEuropean freshwater fish resembling the roach\nn01442972\tvery small European freshwater fish common in gravelly streams\nn01443243\tsmall slender European freshwater fish often used as bait by anglers\nn01443537\tsmall golden or orange-red freshwater fishes of Eurasia used as pond or aquarium fishes\nn01443831\tEuropean carp closely resembling wild goldfish\nn01444339\teel-shaped freshwater fish of South America having electric organs in its body\nn01444783\ta cypriniform fish of the family Catostomidae\nn01445429\tany of several large suckers of the Mississippi valley\nn01445593\tfish of the lower Mississippi\nn01445857\twidely distributed in warm clear shallow streams\nn01446152\tNorth American sucker with reddish fins\nn01446589\tany member of the family Cyprinodontidae\nn01446760\tsmall mostly marine warm-water carp-like schooling fishes; used as bait or aquarium fishes or in mosquito control\nn01447139\tsilver-and-black killifish of saltwater marshes along the Atlantic coast of the United States\nn01447331\tblack-barred fish of bays and coastal marshes of the Atlantic and Gulf Coast of the United States\nn01447658\tfound in small streams of tropical America; often kept in aquariums; usually hermaphroditic\nn01447946\ta fish with a dark-blue back and whitish sides with red stripes; found in swamps and streams of Florida\nn01448291\tfreshwater fish of Central America having a long swordlike tail; popular aquarium fish\nn01448594\tsmall freshwater fish of South America and the West Indies; often kept in aquariums\nn01448951\tsmall usually brightly-colored viviparous surface-feeding fishes of fresh or brackish warm waters; often used in mosquito control\nn01449374\tsilvery topminnow with rows of black spots of tropical North America and West Indies; important in mosquito control\nn01449712\tsmall stocky Mexican fish; popular aquarium fish\nn01449980\tpopular aquarium fish\nn01450661\tvery small, brightly colored (especially red) nocturnal fishes of shallow waters or tropical reefs; they make sounds like a squirrel's bark\nn01450950\ton reefs from Bermuda and Florida to northern South America\nn01451115\ta squirrelfish found from South Carolina to Bermuda and Gulf of Mexico\nn01451295\tbright red fish of West Indies and Bermuda\nn01451426\tthe larger squirrelfishes\nn01451863\tfish having a luminous organ beneath eye; of warm waters of the western Pacific and Puerto Rico\nn01452345\tfish of deep dark waters having a light organ below each eye\nn01453087\tEuropean dory\nn01453475\tfish with a projecting snout\nn01453742\tfish with large eyes and long snouts\nn01454545\tslender tropical fish with a long tubular snout and bony plates instead of scales\nn01454856\tsmall (2-4 inches) pugnacious mostly scaleless spiny-backed fishes of northern fresh and littoral waters having elaborate courtship; subjects of much research\nn01455317\tof rivers and coastal regions\nn01455461\tconfined to rivers\nn01455778\tfish with long tubular snout and slim body covered with bony plates\nn01456137\tsmall (4 inches) fish found off the Florida Gulf Coast\nn01456454\ta fish 8 inches long; found from eastern Florida to western Caribbean\nn01456756\tsmall fish with horse-like heads bent sharply downward and curled tails; swim in upright position\nn01457082\tsmall bottom-dwelling fish of warm seas having a compressed body and a long snout with a toothless mouth\nn01457407\tslender tropical shallow-water East Indian fish covered with transparent plates\nn01457852\ttropical Atlantic fish with a long snout; swims snout down\nn01458746\tthin protective membrane in some protozoa\nn01458842\tan animal organism in the early stages of growth and differentiation that in higher forms merge into fetal stages but in lower forms terminate in commencement of larval life\nn01459791\tan unborn or unhatched vertebrate in the later stages of development showing the main recognizable features of the mature animal\nn01460303\ta human fetus whose weight is less than 0.5 kilogram when removed or expelled from the mother's body\nn01461315\tthe mass of eggs deposited by fish or amphibians or molluscs\nn01461646\tearly stage of an embryo produced by cleavage of an ovum; a liquid-filled sphere whose wall is composed of a single layer of cells; during this stage (about eight days after fertilization) implantation in the wall of the uterus occurs\nn01462042\tthe blastula of a placental mammal in which some differentiation of cells has occurred\nn01462544\tdouble-walled stage of the embryo resulting from invagination of the blastula; the outer layer of cells is the ectoderm and the inner layer differentiates into the mesoderm and endoderm\nn01462803\ta solid mass of blastomeres that forms when the zygote splits; develops into the blastula\nn01464844\tnutritive material of an ovum stored for the nutrition of an embryo (especially the yellow mass of a bird or reptile egg)\nn01466257\tany animal of the phylum Chordata having a notochord or spinal column\nn01467336\tfish-like animals having a notochord rather than a true spinal column\nn01467804\tsmall translucent lancet-shaped burrowing marine animal; primitive forerunner of the vertebrates\nn01468238\tprimitive marine animal having a saclike unsegmented body and a urochord that is conspicuous in the larva\nn01468712\tminute sedentary marine invertebrate having a saclike body with siphons through which water enters and leaves\nn01469103\tascidian that can contract its body and eject streams of water\nn01469723\tminute floating marine tunicate having a transparent body with an opening at each end\nn01470145\tfree-swimming oceanic tunicate with a barrel-shaped transparent body\nn01470479\tany member of the class Larvacea\nn01470733\tfree-swimming tadpole-shaped pelagic tunicate resembling larvae of other tunicates\nn01470895\tfree-swimming larva of ascidians; they have a tail like a tadpole that contains the notochord\nn01471682\tanimals having a bony or cartilaginous skeleton with a segmented spinal column and a large brain enclosed in a skull or cranium\nn01472303\thigher vertebrates (reptiles, birds and mammals) possessing an amnion during development\nn01472502\tany member of the Amniota\nn01473806\tanimal living wholly or chiefly in or on water\nn01474283\teel-shaped vertebrate without jaws or paired appendages including the cyclostomes and some extinct forms\nn01474864\textinct fish-like jawless vertebrate having a heavily armored body; of the Paleozoic\nn01475232\textinct jawless fish with the anterior part of the body covered with bony plates; of the Silurian and Devonian\nn01475940\textinct small freshwater jawless fish usually having a heterocercal tail and an armored head; of the Silurian and Devonian\nn01476418\tsmall (2 inches long) extinct eellike fish with a finned tail and a notochord and having cone-shaped teeth containing cellular bone; late Cambrian to late Triassic; possible predecessor of the cyclostomes\nn01477080\tprimitive aquatic vertebrate\nn01477525\tprimitive eellike freshwater or anadromous cyclostome having round sucking mouth with a rasping tongue\nn01477875\tlarge anadromous lamprey sometimes used as food; destructive of native fish fauna in the Great Lakes\nn01478511\teellike cyclostome having a tongue with horny teeth in a round mouth surrounded by eight tentacles; feeds on dead or trapped fishes by boring into their bodies\nn01478969\ttypical hagfish\nn01479213\ta fossil hagfish of the genus Eptatretus\nn01479820\ta vertebrate animal possessing true jaws\nn01480106\tfish-like vertebrate with bony plates on head and upper body; dominant in seas and rivers during the Devonian; considered the earliest vertebrate with jaws\nn01480516\tfishes in which the skeleton may be calcified but not ossified\nn01480880\tfish with high compressed head and a body tapering off into a long tail\nn01481331\ta deep-sea fish with a tapering body, smooth skin, and long threadlike tail\nn01481498\tlarge European chimaera\nn01482071\tany of numerous fishes of the class Chondrichthyes characterized by a cartilaginous skeleton and placoid scales: sharks; rays; skates\nn01482330\tany of numerous elongate mostly marine carnivorous fishes with heterocercal caudal fins and tough skin covered with small toothlike scales\nn01483021\tlarge primitive shark widely distributed in warm seas\nn01483522\tfierce pelagic and oceanic sharks\nn01483830\tvoracious pointed-nose shark of northern Atlantic and Pacific\nn01484097\tpowerful mackerel shark of the Atlantic and Pacific\nn01484285\tvery swift active bluish shark found worldwide in warm waters; important game fish\nn01484447\tsimilar to shortfin mako but darker blue\nn01484562\tcommon blue-grey shark of southwest Pacific; sport and food fish\nn01484850\tlarge aggressive shark widespread in warm seas; known to attack humans\nn01485479\tlarge harmless plankton-eating northern shark; often swims slowly or floats at the sea surface\nn01486010\tlarge pelagic shark of warm seas with a whiplike tail used to round up small fish on which to feed\nn01486540\tshark of the western Pacific with flattened body and mottled skin\nn01486838\tsmall bottom-dwelling shark of warm shallow waters on both coasts of North America and South America and from southeast Asia to Australia\nn01487506\tshallow-water shark with sharp jagged teeth found on both sides of Atlantic; sometimes dangerous to swimmers\nn01488038\tlarge spotted shark of warm surface waters worldwide; resembles a whale and feeds chiefly on plankton\nn01488918\tany of numerous sharks from small relatively harmless bottom-dwellers to large dangerous oceanic and coastal species\nn01489501\ta most common shark in temperate and tropical coastal waters worldwide; heavy-bodied and dangerous\nn01489709\tmost common grey shark along coasts of middle Atlantic states; sluggish and occasionally caught by fishermen\nn01489920\twidely distributed shallow-water shark with fins seemingly dipped in ink\nn01490112\tlarge deep-water shark with white-tipped dorsal fin; worldwide distribution; most dangerous shark\nn01490360\trelatively slender blue-grey shark; nearly worldwide in tropical and temperate waters\nn01490670\tcommon shallow-water schooling shark of the Atlantic from North Carolina to Brazil and off west Africa; dangerous\nn01491006\tslender cosmopolitan, pelagic shark; blue body shades to white belly; dangerous especially during maritime disasters\nn01491361\tlarge dangerous warm-water shark with striped or spotted body\nn01491661\tPacific shark valued for its fins (used by Chinese in soup) and liver (rich in vitamin A)\nn01491874\tany of several small sharks\nn01492357\tsmall bottom-dwelling shark found along both Atlantic coasts\nn01492569\tsmooth dogfish of European coastal waters\nn01492708\tfound along the Atlantic coast of the Americas\nn01492860\tfound from the northern Gulf of Mexico to Brazil\nn01493146\tsmooth dogfish of Pacific and Indian Oceans and Red Sea having white-tipped dorsal and caudal fins\nn01493541\tsmall bottom-dwelling dogfishes\nn01493829\tdestructive dogfish of the Atlantic coastal waters of America and Europe; widely used in anatomy classes\nn01494041\tdogfish of Pacific coast of North America\nn01494475\tmedium-sized live-bearing shark with eyes at either end of a flattened hammer-shaped head; worldwide in warm waters; can be dangerous\nn01494757\tfished for the hides and vitamin-rich liver\nn01494882\tfished for the hide and vitamin-rich liver\nn01495006\tsmall harmless hammerhead having a spade-shaped head; abundant in bays and estuaries\nn01495493\tsharks with broad flat bodies and winglike pectoral fins but that swim the way sharks do\nn01495701\tcartilaginous fishes having horizontally flattened bodies and enlarged winglike pectoral fins with gills on the underside; most swim by moving the pectoral fins\nn01496331\tany sluggish bottom-dwelling ray of the order Torpediniformes having a rounded body and electric organs on each side of the head capable of emitting strong electric discharges\nn01497118\tprimitive ray with sharp teeth on each edge of a long flattened snout\nn01497413\tcommonly found in tropical bays and estuaries; not aggressive\nn01497738\tprimitive tropical bottom-dwelling ray with a guitar-shaped body\nn01498041\tlarge venomous ray with large barbed spines near the base of a thin whiplike tail capable of inflicting severe wounds\nn01498406\tone of the largest stingrays; found from Cape Cod to Cape Hatteras\nn01498699\ta stingray with a short tail and a broad fin\nn01498989\tpowerful free-swimming tropical ray noted for `soaring' by flapping winglike fins; usually harmless but has venomous tissue near base of the tail as in stingrays\nn01499396\tray with back covered with white or yellow spots; widely distributed in warm seas\nn01499732\tlarge ray found along eastern coast of North America\nn01500091\textremely large pelagic tropical ray that feeds on plankton and small fishes; usually harmless but its size make it dangerous if harpooned\nn01500476\tlargest manta (to 22 feet across wings); found worldwide but common in Gulf of Mexico and along southern coasts of United States; primarily oceanic\nn01500854\tsmall manta (to 4 feet) that travels in schools\nn01501160\tlarge edible rays having a long snout and thick tail with pectoral fins continuous with the head; swim by undulating the edges of the pectoral fins\nn01501641\tcommon European skate used as food\nn01501777\tmost plentiful skate in North American inshore waters in summer; to 21 inches\nn01501948\tcold-water bottom fish with spines on the back; to 40 inches\nn01502101\tone of the largest skates (to 5 feet); an active skate easy to hook\nn01503061\twarm-blooded egg-laying vertebrates characterized by feathers and forelimbs modified as wings\nn01503976\tsmall bird; adults talking to children sometimes use these words to refer to small birds\nn01504179\tyoung bird that has just fledged or become capable of flying\nn01504344\tyoung bird not yet fledged\nn01514668\tadult male bird\nn01514752\ta cock bred and trained for fighting\nn01514859\tadult female bird\nn01514926\ta bird that has built (or is building) a nest\nn01515078\tany bird associated with night: owl; nightingale; nighthawk; etc\nn01515217\tany bird that cries at night\nn01515303\tany bird that migrates seasonally\nn01516212\textinct primitive toothed bird of the Jurassic period having a long feathered tail and hollow bones; usually considered the most primitive of all birds\nn01517389\textinct primitive toothed bird with a long feathered tail and three free clawed digits on each wing\nn01517565\tflightless birds having flat breastbones lacking a keel for attachment of flight muscles: ostriches; cassowaries; emus; moas; rheas; kiwis; elephant birds\nn01517966\tbirds having keeled breastbones for attachment of flight muscles\nn01518878\tfast-running African flightless bird with two-toed feet; largest living bird\nn01519563\tlarge black flightless bird of Australia and New Guinea having a horny head crest\nn01519873\tlarge Australian flightless bird similar to the ostrich but smaller\nn01520576\tnocturnal flightless bird of New Zealand having a long neck and stout legs; only surviving representative of the order Apterygiformes\nn01521399\tlarger of two tall fast-running flightless birds similar to ostriches but three-toed; found from Brazil to Patagonia\nn01521756\tsmaller of two tall fast-running flightless birds similar to ostriches but three-toed; found from Peru to Strait of Magellan\nn01522450\thuge (to 9 ft.) extinct flightless bird of Madagascar\nn01523105\textinct flightless bird of New Zealand\nn01524359\tperching birds mostly small and living near the ground with feet having 4 toes arranged to allow for gripping the perch; most are songbirds; hatchlings are helpless\nn01524761\tchiefly arboreal birds especially of the order Coraciiformes\nn01525720\tpasserine bird having specialized vocal apparatus\nn01526521\tany bird having a musical call\nn01526766\tAustralasian bird with tongue and bill adapted for extracting nectar\nn01527194\tsmall sparrow-like songbird of mountainous regions of Eurasia\nn01527347\tsmall brownish European songbird\nn01527617\tany of numerous predominantly Old World birds noted for their singing\nn01527917\tbrown-speckled European lark noted for singing while hovering at a great height\nn01528396\tOld World bird having a very long tail that jerks up and down as it walks\nn01528654\ta songbird that lives mainly on the ground in open country; has streaky brown plumage\nn01528845\ta common pipit that is brown above and white below; widely distributed in northern and central Europe and in Asia\nn01529672\tany of numerous small songbirds with short stout bills adapted for crushing seeds\nn01530439\tsmall European finch with a cheerful song\nn01530575\tEurasian finch\nn01531178\tsmall European finch having a crimson face and yellow-and-black wings\nn01531344\tsmall Old World finch whose male has a red breast and forehead\nn01531512\tsmall yellow-and-black Eurasian finch with a sharp beak\nn01531639\tSouth American species of scarlet finch with black head and wings and tail\nn01531811\tsmall siskin-like finch with a red crown and a rosy breast and rump\nn01531971\tsmall siskin-like finch with a red crown\nn01532325\tAmerican finch whose male has yellow body plumage in summer\nn01532511\tsmall finch of North American coniferous forests\nn01532829\tsmall finch originally of the western United States and Mexico\nn01533000\tNorth American finch having a raspberry-red head and breast and rump\nn01533339\tany of several small Old World finches\nn01533481\tnative to the Canary Islands and Azores; popular usually yellow cage bird noted for its song\nn01533651\tany of various brown and yellow finches of parts of Europe\nn01533893\tfinch with a bill whose tips cross when closed\nn01534155\tcommon European finch mostly black and white with red throat and breast\nn01534433\tsmall North American finch seen chiefly in winter\nn01534582\tcommon North American junco having grey plumage and eyes with dark brown irises\nn01534762\tsparrow-like North American finches\nn01535140\tcommon North American finch noted for its evening song\nn01535469\tcommon North American finch with a white patch on the throat and black-and-white striped crown\nn01535690\tfinch with black-and-white striped crown\nn01536035\tsmall North American finch common in urban areas\nn01536186\tcommon North American finch of brushy pasturelands\nn01536334\tfinch common in winter in the northern U.S.\nn01536644\tsmall songbird common in North America\nn01536780\tNorth American finch of marshy area\nn01537134\tany of numerous seed-eating songbirds of Europe or North America\nn01537544\tsmall deep blue North American bunting\nn01537895\tbrownish Old World bunting often eaten as a delicacy\nn01538059\tEuropean bunting inhabiting marshy areas\nn01538200\tEuropean bunting the male being bright yellow\nn01538362\tcommon in Russia and Siberia\nn01538630\twhite Arctic bunting\nn01538955\tsmall bright-colored tropical American songbird with a curved bill for sucking nectar\nn01539272\tany of several honeycreepers\nn01539573\tany of several small dull-colored singing birds feeding on seeds or insects\nn01539925\tsmall hardy brown-and-grey bird native to Europe\nn01540090\tEurasian sparrow smaller than the house sparrow\nn01540233\tany of various finches of Europe or America having a massive and powerful bill\nn01540566\tNorth American grosbeak\nn01540832\ta common large finch of Eurasia\nn01541102\tlarge grosbeak of coniferous forests of Old and New Worlds\nn01541386\tcrested thick-billed North American finch having bright red plumage in the male\nn01541760\tcrested grey-and-red bird of southwest United States and Mexico\nn01541922\tany of numerous long-tailed American finches\nn01542168\tcommon towhee of eastern North America\nn01542433\ttowhee of the Rocky Mountains\nn01542786\tfinch-like African and Asian colonial birds noted for their elaborately woven nests\nn01543175\tcommon Indian weaverbird\nn01543383\tmostly black African weaverbird\nn01543632\tsmall finch-like Indonesian weaverbird that frequents rice fields\nn01543936\tred Asian weaverbirds often kept as cage birds\nn01544208\tusually brightly-colored Australian weaverbirds; often kept as cage birds\nn01544389\tsmall Australian weaverbird with markings like a zebra's\nn01544704\tsmall to medium-sized finches of the Hawaiian islands\nn01545574\tAustralian bird that resembles a pheasant; the courting male displays long tail feathers in a lyre shape\nn01546039\tsmall fast-running Australian bird resembling a wren and frequenting brush or scrub\nn01546506\tsmall birds of the Old World tropics having bright plumage and short wide bills\nn01546921\ta passerine bird of the suborder Tyranni\nn01547832\tlarge American birds that characteristically catch insects on the wing\nn01548301\tlarge American flycatcher\nn01548492\ta kingbird seen in western United States; head and back are pale grey and the breast is yellowish and the tail is black\nn01548694\ta kingbird seen in the southwestern United States; largely grey with a yellow abdomen\nn01548865\ta kingbird that breeds in North America and winters in tropical America; distinguished by a white band on the tip of the tail\nn01549053\ta kingbird that breeds in the southeastern United States and winters in tropical America; similar to but larger than the eastern kingbird\nn01549430\tsmall olive-colored woodland flycatchers of eastern North America\nn01549641\tsmall flycatcher of western North America\nn01549886\tsmall dun-colored North American flycatcher\nn01550172\ttropical American flycatcher found as far north as southern Texas and Arizona; adult male has bright scarlet and black plumage\nn01550761\tpasserine bird of New World tropics\nn01551080\ttropical bird of northern South America the male having brilliant red or orange plumage and an erectile disklike crest\nn01551300\tbird of the Andes similar to Rupicola rupicola\nn01551711\tany of numerous small bright-colored birds of Central America and South America having short bills and elaborate courtship behavior\nn01552034\tany of several tropical American birds of the genus Procnias having a bell-like call\nn01552333\tblack tropical American bird having a large overhanging crest and long feathered wattle\nn01552813\tsmall brownish South American birds that build oven-shaped clay nests\nn01553142\tany of various dull-colored South American birds that feeding on ants some following army ant swarms\nn01553527\ta kind of antbird\nn01553762\tantbirds superficially resembling shrikes\nn01554017\ta kind of antbird\nn01554448\tany of numerous South American and Central American birds with a curved bill and stiffened tail feathers that climb and feed like woodpeckers\nn01555004\tany bird of the genus Pitta; brilliantly colored chiefly terrestrial birds with short wings and tail and stout bills\nn01555305\tgrey flycatcher of the southwestern United States and Mexico and Central America having a long forked tail and white breast and salmon and scarlet markings\nn01555809\tany of a large group of small songbirds that feed on insects taken on the wing\nn01556182\tcommon European woodland flycatcher with greyish-brown plumage\nn01556514\tAustralian and southeastern Asian birds with a melodious whistling call\nn01557185\tsongbirds characteristically having brownish upper plumage with a spotted breast\nn01557962\tlarge European thrush that feeds on mistletoe berries\nn01558149\tcommon Old World thrush noted for its song\nn01558307\tmedium-sized Eurasian thrush seen chiefly in winter\nn01558461\tsmall European thrush having reddish flanks\nn01558594\tcommon black European thrush\nn01558765\tEuropean thrush common in rocky areas; the male has blackish plumage with a white band around the neck\nn01558993\tlarge American thrush having a rust-red breast and abdomen\nn01559160\trobin of Mexico and Central America\nn01559477\tNorth American thrush noted for its complex and appealing song\nn01559639\ttawny brown North American thrush noted for its song\nn01559804\tlarge thrush common in eastern American woodlands; noted for its melodious song\nn01560105\tEuropean songbird noted for its melodious nocturnal song\nn01560280\tlarge nightingale of eastern Europe\nn01560419\tnightingale spoken of in Persian poetry\nn01560636\tsongbirds having a chattering call\nn01560793\tcommon European chat with black plumage and a reddish-brown breast\nn01560935\tbrown-and-buff European songbird of grassy meadows\nn01561181\ta dull grey North American thrush noted for its beautiful song\nn01561452\tEuropean songbird with a reddish breast and tail; related to Old World robins\nn01561732\tsmall songbird of northern America and Eurasia having a distinctive white rump\nn01562014\tblue North American songbird\nn01562265\tsmall Old World songbird with a reddish breast\nn01562451\tsongbird of northern Europe and Asia\nn01563128\ta small active songbird\nn01563449\tvery small North American and South American warblers\nn01563746\tsmall birds resembling warblers but having some of the habits of titmice\nn01563945\tEuropean kinglet with a black-bordered yellow crown patch\nn01564101\tAmerican golden-crested kinglet\nn01564217\tAmerican kinglet with a notable song and in the male a red crown patch\nn01564394\tsmall active brownish or greyish Old World birds\nn01564773\tsmall brownish-grey warbler with a black crown\nn01564914\tgreyish-brown Old World warbler with a white throat and underparts\nn01565078\tOld World warbler similar to the greater whitethroat but smaller\nn01565345\tEuropean woodland warbler with dull yellow plumage\nn01565599\tsmall European warbler that breeds among reeds and wedges and winters in Africa\nn01565930\tsmall Asiatic and African bird; constructs nests like those of tailorbirds\nn01566207\ttropical Asian warbler that stitches leaves together to form and conceal its nest\nn01566645\tany of various insectivorous Old World birds with a loud incessant song; in some classifications considered members of the family Muscicapidae\nn01567133\tsmall bright-colored American songbird with a weak unmusical song\nn01567678\tsmall grey-blue wood warbler with yellow throat and breast; of eastern North America\nn01567879\tyellow wood warbler with a black crown\nn01568132\tany of numerous American wood warblers that feed on insects caught on the wing\nn01568294\tflycatching warbler of eastern North America the male having bright orange on sides and wings and tail\nn01568720\tNorth American wood warbler; olive green and yellow striped with black\nn01568892\tyellow-throated American wood warbler\nn01569060\tblack-and-white North American wood warbler having an orange-and-black head and throat\nn01569262\tcommon warbler of western North America\nn01569423\tsimilar to Audubon's warbler\nn01569566\tNorth American warbler having a black-and-white head\nn01569836\tbirds having a chattering call\nn01569971\tAmerican warbler noted for imitating songs of other birds\nn01570267\tAmerican warbler; builds a dome-shaped nest on the ground\nn01570421\tbrownish North American warbler found near streams\nn01570676\tsmall olive-colored American warblers with yellow breast and throat\nn01570839\tan American warbler\nn01571410\tvelvety black Australian bird of paradise with green and purple iridescence on head and tail\nn01571904\tAmerican songbird; male is black and orange or yellow\nn01572328\ta kind of New World oriole\nn01572489\teastern subspecies of northern oriole\nn01572654\twestern subspecies of northern oriole\nn01572782\tthe male is chestnut-and-black\nn01573074\tNorth American songbirds having a yellow breast\nn01573240\ta meadowlark of eastern North America\nn01573360\ta meadowlark of western North America\nn01573627\tblack-and-red or black-and-yellow orioles of the American tropics\nn01573898\tmigratory American songbird\nn01574045\tany bird of the family Icteridae whose male is black or predominantly black\nn01574390\tlong-tailed American blackbird having iridescent black plumage\nn01574560\teastern United States grackle\nn01574801\tNorth American blackbird whose bluish-black plumage is rusty-edged in the fall\nn01575117\tNorth American blackbird that follows cattle and lays eggs in other birds' nests\nn01575401\tNorth American blackbird with scarlet patches on the wings\nn01575745\tmostly tropical songbird; the male is usually bright orange and black\nn01576076\tbright yellow songbird with black wings\nn01576358\tgreenish-yellow Australian oriole feeding chiefly on figs and other fruits\nn01576695\tgregarious birds native to the Old World\nn01577035\tgregarious bird having plumage with dark metallic gloss; builds nests around dwellings and other structures; naturalized worldwide\nn01577458\tglossy black bird with pink back and abdomen; chiefly Asian\nn01577659\ttropical Asian starlings\nn01577941\tdark brown crested bird of southeastern Asia\nn01578180\tglossy black Asiatic starling often taught to mimic speech\nn01578575\tbirds of the crow family\nn01579028\tblack birds having a raucous call\nn01579149\tcommon crow of North America\nn01579260\tlarge black bird with a straight bill and long wedge-shaped tail\nn01579410\tcommon gregarious Old World bird about the size and color of the American crow\nn01579578\tcommon black-and-grey Eurasian bird noted for thievery\nn01579729\ta European corvine bird of small or medium size with red legs and glossy black plumage\nn01580077\tcrested largely blue bird\nn01580379\ta European jay\nn01580490\tfawn-colored jay with black-and-white crest and blue-and-black wings\nn01580772\ta North American jay\nn01580870\tcommon jay of eastern North America; bright blue with grey breast\nn01581166\ta jay of northern North America with black-capped head and no crest; noted for boldness in thievery\nn01581434\ta Canada jay with a white head; widely distributed from Montana to Arizona\nn01581730\tspeckled birds that feed on nuts\nn01581874\tOld World nutcracker\nn01581984\tnutcracker of the western United States\nn01582220\tlong-tailed black-and-white crow that utters a raucous chattering call\nn01582398\ta common magpie of Eurasia\nn01582498\ta magpie of Rocky Mountains in North America\nn01582856\tblack-and-white oscine birds that resemble magpies\nn01583209\tlarge carnivorous Australian bird with the shrike-like habit of impaling prey on thorns\nn01583495\tbluish black fruit-eating bird with a bell-like call\nn01583828\tcrow-sized black-and-white bird; a good mimic often caged\nn01584225\tany of several small active brown birds of the northern hemisphere with short upright tails; they feed on insects\nn01584695\tsmall wren of coniferous forests of northern hemisphere\nn01584853\tcommon American wren that nests around houses\nn01585121\ta wren of the genus Cistothorus that frequents marshes\nn01585287\tAmerican wren that inhabits tall reed beds\nn01585422\tsmall American wren inhabiting wet sedgy meadows\nn01585715\twren inhabiting badlands and mesa country of western United States and Mexico\nn01586020\tlarge United States wren with a musical call\nn01586374\tlarge harsh-voiced American wren of arid regions of the United States southwest and Mexico\nn01586941\tlong-tailed grey-and-white songbird of the southern United States able to mimic songs of other birds\nn01587278\tmockingbird of Mexico\nn01587526\tNorth American songbird whose call resembles a cat's mewing\nn01587834\tthrush-like American songbird able to mimic other birdsongs\nn01588002\tcommon large songbird of eastern United States having reddish-brown plumage\nn01588431\tbirds of New Zealand that resemble wrens\nn01588725\tshort-tailed bird resembling a wren\nn01588996\tsmall green-and-bronze bird\nn01589286\tany of various small insectivorous birds of the northern hemisphere that climb up a tree trunk supporting themselves on stiff tail feathers and their feet\nn01589718\ta common creeper in North America with a down-curved bill\nn01589893\tcommon European brown-and-buff tree creeper with down-curved bill\nn01590220\tcrimson-and-grey songbird that inhabits town walls and mountain cliffs of southern Eurasia and northern Africa\nn01591005\ta kind of nuthatch\nn01591123\tbluish-grey nuthatch with reddish breast; of northern coniferous forests\nn01591301\tbluish-grey nuthatch with black head and white breast; of eastern North America\nn01591697\tsmall insectivorous birds\nn01592084\tany of various small grey-and-black songbirds of North America\nn01592257\tchickadee having a dark crown\nn01592387\tcrested titmouse of eastern and midwestern United States\nn01592540\tsouthern United States chickadee similar to the blackcap but smaller\nn01592694\twidely distributed European titmouse with bright cobalt blue wings and tail and crown of the head\nn01593028\tactive grey titmice of western North America\nn01593282\tsmall brown bird of California resembling a wren\nn01593553\tvery small yellow-headed titmouse of western North America\nn01594004\tfruit-eating mostly brilliant blue songbird of the East Indies\nn01594372\tsmall long-winged songbird noted for swift graceful flight and the regularity of its migrations\nn01594787\tcommon swallow of North America and Europe that nests in barns etc.\nn01594968\tNorth American swallow that lives in colonies and builds bottle-shaped mud nests on cliffs and walls\nn01595168\tof Australia and Polynesia; nests in tree cavities\nn01595450\tbluish-green-and-white North American swallow; nests in tree cavities\nn01595624\tany of various swallows with squarish or slightly forked tail and long pointed wings; migrate around Martinmas\nn01595974\tcommon small European martin that builds nests under the eaves of houses\nn01596273\tswallow of the northern hemisphere that nests in tunnels dug in clay or sand banks\nn01596608\tlarge North American martin of which the male is blue-black\nn01597022\tAustralasian and Asiatic bird related to the shrikes and resembling a swallow\nn01597336\tany of numerous New World woodland birds having brightly colored males\nn01597737\tthe male is bright red with black wings and tail\nn01597906\tof western North America; male is black and yellow and orange-red\nn01598074\tof middle and southern United States; male is deep rose-red the female mostly yellow\nn01598271\tcommon tanager of southwestern United States and Mexico\nn01598588\tany of numerous Old World birds having a strong hooked bill that feed on smaller animals\nn01598988\tshrikes that impale their prey on thorns\nn01599159\ta common European butcherbird\nn01599269\ta butcherbird of northern North America\nn01599388\ta butcherbird of western North America; grey with white underparts\nn01599556\ta common shrike of southeastern United States having black bands around the eyes\nn01599741\ta shrike of central North America; winters in Texas and the southern Mississippi valley\nn01600085\tan African shrike\nn01600341\ta kind of bush shrike\nn01600657\tany of various birds of the Australian region whose males build ornamented structures resembling bowers in order to attract females\nn01601068\tof southeast Australia; male is glossy violet blue; female is light grey-green\nn01601410\tlarge bowerbird of northern Australia\nn01601694\tsmall stocky diving bird without webbed feet; frequents fast-flowing streams and feeds along the bottom\nn01602080\ta water ouzel of Europe\nn01602209\ta water ouzel of western North America\nn01602630\tany of various small insectivorous American birds chiefly olive-grey in color\nn01602832\tof northern North America having red irises and an olive-grey body with white underparts\nn01603000\tof eastern North America having a bluish-grey head and mostly green body\nn01603152\tcommon vireo of northeastern North America with bluish slaty-grey head\nn01603600\tbrown velvety-plumaged songbirds of the northern hemisphere having crested heads and red waxy wing tips\nn01603812\twidely distributed over temperate North America\nn01603953\tlarge waxwing of northern North America; similar to but larger than the cedar waxwing\nn01604330\tany of numerous carnivorous birds that hunt and kill other animals\nn01604968\tin some classifications an alternative name for the Falconiformes\nn01605630\tdiurnal bird of prey typically having short rounded wings and a long tail\nn01606097\tan unfledged or nestling hawk\nn01606177\tmale hawk especially male peregrine or gyrfalcon\nn01606522\tlarge hawk of Eurasia and North America used in falconry\nn01606672\tsmall hawk of Eurasia and northern Africa\nn01606809\tbluish-grey North American hawk having a darting flight\nn01606978\tnontechnical term for any hawks said to prey on poultry\nn01607309\tany hawk of the genus Buteo\nn01607429\tdark brown American hawk species having a reddish-brown tail\nn01607600\tlarge hawk of the northern hemisphere that feeds chiefly on small rodents and is beneficial to farmers\nn01607812\tNorth American hawk with reddish brown shoulders\nn01607962\tthe common European short-winged hawk\nn01608265\tOld World hawk that feeds on bee larvae and small rodents and reptiles\nn01608432\tany of several small graceful hawks of the family Accipitridae having long pointed wings and feeding on insects and small animals\nn01608814\tdark Old World kite feeding chiefly on carrion\nn01609062\tgraceful North American black-and-white kite\nn01609391\tgrey-and-white American kite of warm and tropical regions\nn01609751\thawks that hunt over meadows and marshes and prey on small terrestrial animals\nn01609956\tOld World harrier frequenting marshy regions\nn01610100\tbrownish European harrier\nn01610226\tcommon harrier of North America and Europe; nests in marshes and open land\nn01610552\tany of numerous large Old World hawks intermediate in some respects between typical hawks and typical eagles\nn01610955\tdiurnal birds of prey having long pointed powerful wings adapted for swift flight\nn01611472\ta widely distributed falcon formerly used in falconry\nn01611674\tfemale falcon especially a female peregrine falcon\nn01611800\tlarge and rare Arctic falcon having white and dark color phases\nn01611969\tsmall Old World falcon that hovers in the air against a wind\nn01612122\tsmall North American falcon\nn01612275\tsmall falcon of Europe and America having dark plumage with black-barred tail; used in falconry\nn01612476\tsmall Old World falcon formerly trained and flown at small birds\nn01612628\tany of various long-legged carrion-eating hawks of South America and Central America\nn01612955\twidespread from southern United States to Central America; rusty black with black-and-white breast and tail\nn01613177\tSouth American caracara\nn01613294\tany of various large keen-sighted diurnal birds of prey noted for their broad wings and strong soaring flight\nn01613615\ta bird that is still young\nn01613807\ta young eagle\nn01614038\tlarge black-and-white crested eagle of tropical America\nn01614343\tlarge eagle of mountainous regions of the northern hemisphere having a golden-brown head and neck\nn01614556\tbrownish eagle of Africa and parts of Asia\nn01614925\ta large eagle of North America that has a white head and dark wings and body\nn01615121\tany of various large eagles that usually feed on fish\nn01615303\tfound on coasts of the northwestern Pacific\nn01615458\tbulky greyish-brown eagle with a short wedge-shaped white tail; of Europe and Greenland\nn01615703\tof southeast Europe and central Asia\nn01616086\tlarge harmless hawk found worldwide that feeds on fish and builds a bulky nest often occupied for years\nn01616318\tany of various large diurnal birds of prey having naked heads and weak claws and feeding chiefly on carrion\nn01616551\tin some classifications considered the family comprising the Old World vultures which are more often included in the family Accipitridae\nn01616764\tany of several large vultures of Africa and Eurasia\nn01617095\tlarge vulture of southern Europe and northern Africa having pale plumage with black wings\nn01617443\tthe largest Eurasian bird of prey; having black feathers hanging around the bill\nn01617766\tsmall mostly white vulture of Africa and southern Eurasia\nn01618082\tof southern Eurasia and northern Africa\nn01618503\tlarge long-legged African bird of prey that feeds on reptiles\nn01618922\tlarge birds of prey superficially similar to Old World vultures\nn01619310\ta New World vulture that is common in South America and Central America and the southern United States\nn01619536\tthe largest flying birds in the western hemisphere\nn01619835\tlarge vulture of the high Andes having black plumage and white neck ruff\nn01620135\tNorth American condor; chiefly dull black; almost extinct\nn01620414\tAmerican vulture smaller than the turkey buzzard\nn01620735\tlarge black-and-white vulture of South America and Central America; have colorful wattles and wartlike protuberances on head and neck\nn01621127\tnocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes\nn01621635\tyoung owl\nn01622120\tsmall European owl\nn01622352\tlarge owls having prominent ear tufts\nn01622483\tbrown North American horned owl\nn01622779\tlarge dish-faced owl of northern North America and western Eurasia\nn01622959\treddish-brown European owl having a round head with black eyes\nn01623110\tlarge owl of eastern North America having its breast and abdomen streaked with brown\nn01623425\tsmall North American owl having hornlike tufts of feathers whose call sounds like a quavering whistle\nn01623615\tany owl that has a screeching cry\nn01623706\tany of several small owls having ear tufts and a whistling call\nn01623880\ta large owl of North America found in forests from British Columbia to central Mexico; has dark brown plumage and a heavily spotted chest\nn01624115\tEuropean scops owl\nn01624212\tAsian scops owl\nn01624305\tany owl that hoots as distinct from screeching\nn01624537\tgrey-and-white diurnal hawk-like owl of northern parts of the northern hemisphere\nn01624833\tslender European owl of coniferous forests with long ear tufts\nn01625121\talmost extinct owl of New Zealand\nn01625562\tmottled buff and white owl often inhabiting barns and other structures; important in rodent control\nn01627424\tcold-blooded vertebrate typically living on land but breeding in water; aquatic larvae undergo metamorphosis into adult form\nn01628331\tearly tetrapod amphibian found in Greenland\nn01628770\tamphibians that resemble lizards\nn01629276\tany of various typically terrestrial amphibians that resemble lizards and that return to water only to breed\nn01629819\ta kind of European salamander\nn01629962\tEuropean salamander having dark skin with usually yellow spots\nn01630148\tovoviviparous amphibian of the Alps\nn01630284\tsmall usually bright-colored semiaquatic salamanders of North America and Europe and northern Asia\nn01630670\tsmall semiaquatic salamander\nn01630901\tred terrestrial form of a common North American newt\nn01631175\tany of several rough-skinned newts found in western North America\nn01631354\tnewt of humid coast from Alaska to southern California\nn01631512\tnewt that is similar to Taricha granulosa in characteristics and habitat\nn01631663\ta newt in its terrestrial stage of development\nn01632047\tsmall to moderate-sized terrestrial or semiaquatic New World salamander\nn01632308\tbrownish-black burrowing salamander of southeastern United States\nn01632458\tglossy black North American salamander with yellow spots\nn01632601\twidely distributed brown or black North American salamander with vertical yellowish blotches\nn01632777\tlarval salamander of mountain lakes of Mexico that usually lives without metamorphosing\nn01632952\tany of several large aquatic salamanders\nn01633406\tlarge salamander of North American rivers and streams\nn01633781\tlarge (up to more than three feet) edible salamander of Asia\nn01634227\tEuropean aquatic salamander with permanent external gills that lives in caves\nn01634522\taquatic North American salamander with red feathery external gills\nn01635027\tsalamanders found near cold streams throughout the year\nn01635176\tlarge (to 7 inches) salamander of western North America\nn01635480\tsmall large-eyed semiaquatic salamander of the United States Northwest\nn01636127\tmostly terrestrial salamanders that breathe through their thin moist skin; lay eggs in moist places on land; rarely enter water\nn01636352\tcommon salamander of eastern North America\nn01636510\tsalamander of the Pacific coast of North America\nn01636829\tcommon North American salamander mottled with dull brown or greyish-black\nn01637112\tany of several North American salamanders adapted for climbing with well-developed limbs and long somewhat squared-off toes\nn01637338\tyellow-spotted brown salamander of California woodlands\nn01637615\tany of several small slim salamanders of the Pacific coast of the United States\nn01637932\tany of several salamanders with webbed toes and very long extensile tongues; excellent climbers that move with ease over smooth rock surfaces\nn01638194\tprimarily a cave dweller in the Mount Shasta area\nn01638329\tsimilar to Shasta salamander; lives in cliff crevices and taluses\nn01638722\taquatic eel-shaped salamander having two pairs of very small feet; of still muddy waters in the southern United States\nn01639187\teellike aquatic North American salamander with small forelimbs and no hind limbs; have permanent external gills\nn01639765\tany of various tailless stout-bodied amphibians with long hind limbs for leaping; semiaquatic and terrestrial species\nn01640846\tinsectivorous usually semiaquatic web-footed amphibian with smooth moist skin and long hind legs\nn01641206\twide-ranging light-brown frog of moist North American woodlands especially spruce\nn01641391\tcommon North American green or brownish frog having white-edged dark oval spots\nn01641577\tlargest North American frog; highly aquatic with a deep-pitched voice\nn01641739\tsimilar to bullfrog; found in or near marshes and ponds; of United States and Canada\nn01641930\tmountain frog found near water; of United States Northwest to California\nn01642097\tlargest living frog; up to a foot and weighing up to 10 lbs; Africa\nn01642257\ta meadow frog of eastern North America\nn01642391\tMexican frog found within a jump or two of water\nn01642539\ta common semiterrestrial European frog\nn01642943\ttoothed frogs: terrestrial or aquatic or arboreal\nn01643255\tsmall terrestrial frog of tropical America\nn01643507\tof southwest United States and Mexico; call is like a dog's bark\nn01643896\tlarge toothed frog of South America and Central America resembling the bullfrog\nn01644373\tany of various Old World arboreal frogs distinguished from true frogs by adhesive suckers on the toes\nn01644900\twestern North American frog with a taillike copulatory organ\nn01645466\tprimitive New Zealand frog with four unwebbed toes on forefeet and five on hind feet\nn01645776\ttailless amphibian similar to a frog but more terrestrial and having drier warty skin\nn01646292\tany toad of the genus Bufo\nn01646388\tlargest known toad species; native to Central America; valuable destroyer of insect pests\nn01646555\tcommon toad of Europe\nn01646648\tcommon brownish-yellow short-legged toad of western Europe; runs rather than hops\nn01646802\tcommon toad of America\nn01646902\tEurasian toad with variable chiefly green coloring\nn01647033\tsmall green or yellow-green toad with small black bars and stripes\nn01647180\tof high Sierra Nevada meadows and forest borders\nn01647303\tnocturnal burrowing toad of mesquite woodland and prairies of the United States southwest\nn01647466\ta uniformly warty stocky toad of washes and streams of semiarid southwestern United States\nn01647640\tof a great variety of habitats from southern Alaska to Baja California west of the Rockies\nn01648139\tEuropean toad whose male carries the fertilized eggs wrapped around its hind legs until they hatch\nn01648356\tsimilar in habit to Alytes obstetricians\nn01648620\ttoad of central and eastern Europe having red or orange patches mixed with black on its underside\nn01649170\ta burrowing toad of the northern hemisphere with a horny spade-like projection on each hind foot\nn01649412\tthis spadefoot toad live in California\nn01649556\tthis spadefoot toad lives in the southwestern United States\nn01649726\tthis spadefoot toad lives in plains and hills and river bottoms in areas of low rainfall east of the Rocky Mountains\nn01650167\tarboreal amphibians usually having adhesive disks at the tip of each toe; of southeast Asia and Australia and America\nn01650690\ta small brown tree toad having a shrill call heard near wetlands of eastern United States and Canada in early spring\nn01650901\tthe most commonly heard frog on the Pacific coast of America\nn01651059\ta small chiefly ground dweller that stays within easy jumping distance of water; of United States southwest and northern Mexico\nn01651285\ta form of tree toad\nn01651487\teither of two frogs with a clicking call\nn01651641\ta cricket frog of eastern and central United States\nn01651778\ta cricket frog of eastern United States\nn01652026\tany of several small North American frogs having a loud call\nn01652297\tterrestrial burrowing nocturnal frog of grassy terrain and scrub forests having very hard upper surface of head; of the United States southwest\nn01653026\tsmall secretive toad with smooth tough skin of central and western North America\nn01653223\tsmall toad of southeastern United States\nn01653509\tmostly of Central America\nn01653773\talmost completely aquatic frog native to Africa and Panama and northern South America\nn01654083\ta South American toad; incubates its young in pits in the skin of its back\nn01654637\ta tongueless frog native to Africa; established in the United States as result of release of laboratory and aquarium animals\nn01654863\ta South American toad\nn01655344\tany of the small slender limbless burrowing wormlike amphibians of the order Gymnophiona; inhabit moist soil in tropical regions\nn01661091\tany cold-blooded vertebrate of the class Reptilia including tortoises, turtles, snakes, lizards, alligators, crocodiles, and extinct forms\nn01661592\tprimitive reptile having no opening in the temporal region of the skull; all extinct except turtles\nn01661818\treptile having a pair of openings in the skull behind each eye\nn01662060\tused in former classifications to include all living reptiles except turtles; superseded by the two subclasses Lepidosauria and Archosauria\nn01662622\ta reptile of the order Chelonia\nn01662784\tany of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming\nn01663401\tany of various large turtles with limbs modified into flippers; widely distributed in warm seas\nn01663782\tlarge tropical turtle with greenish flesh used for turtle soup\nn01664065\tvery large carnivorous sea turtle; wide-ranging in warm open seas\nn01664369\ta marine turtle\nn01664492\tgrey sea turtle of the Atlantic and Gulf Coasts of North America\nn01664674\tolive-colored sea turtle of tropical Pacific and Indian and the southern Atlantic oceans\nn01664990\tpugnacious tropical sea turtle with a hawk-like beak; source of food and the best tortoiseshell\nn01665541\twide-ranging marine turtle with flexible leathery carapace; largest living turtle\nn01665932\tlarge aggressive freshwater turtle with powerful jaws\nn01666228\tlarge-headed turtle with powerful hooked jaws found in or near water; prone to bite\nn01666585\tlarge species having three ridges on its back; found in southeastern United States\nn01667114\tbottom-dwelling freshwater turtle inhabiting muddy rivers of North America and Central America\nn01667432\tsmall freshwater turtle having a strong musky odor\nn01667778\tany of various edible North American web-footed turtles living in fresh or brackish water\nn01668091\tof marshes along Atlantic and Gulf Coasts of United States\nn01668436\tfreshwater turtle of Chesapeake Bay tributaries having red markings on the lower shell\nn01668665\tfreshwater turtle of United States and South America; frequently raised commercially; some young sold as pets\nn01668892\tlarge river turtle of the southern United States and northern Mexico\nn01669191\tchiefly terrestrial turtle of North America; shell can be closed tightly\nn01669372\tprimarily a prairie turtle of western United States and northern Mexico\nn01669654\tfreshwater turtles having bright yellow and red markings; common in the eastern United States\nn01670092\tusually herbivorous land turtles having clawed elephant-like limbs; worldwide in arid area except Australia and Antarctica\nn01670535\tsmall land tortoise of southern Europe\nn01670802\tvery large tortoises of the Galapagos and Seychelles islands\nn01671125\tburrowing edible land tortoise of southeastern North America\nn01671479\tburrowing tortoise of the arid western United States and northern Mexico; may be reclassified as a member of genus Xerobates\nn01671705\tclose relative to the desert tortoise; may be reclassified as a member of genus Xerobates\nn01672032\tvoracious aquatic turtle with a flat flexible shell covered by a leathery skin; can inflict painful bites\nn01672432\triver turtle of western United States with a warty shell; prefers quiet water\nn01672611\triver turtle of Mississippi basin; prefers running water\nn01673282\tonly extant member of the order Rhynchocephalia of large spiny lizard-like diapsid reptiles of coastal islands off New Zealand\nn01674216\tany of various reptiles of the suborder Sauria which includes lizards; in former classifications included also the crocodiles and dinosaurs\nn01674464\trelatively long-bodied reptile with usually two pairs of legs and a tapering tail\nn01674990\tany of various small chiefly tropical and usually nocturnal insectivorous terrestrial lizards typically with immovable eyelids; completely harmless\nn01675352\ta gecko that has membranous expansions along the sides of its body and limbs and tail that enable it to glide short distances\nn01675722\tany of several geckos with dark bands across the body and differing from typical geckos in having movable eyelids; of United States southwest and Florida Gulf Coast\nn01676755\tlizards of the New World and Madagascar and some Pacific islands; typically having a long tail and bright throat patch in males\nn01677366\tlarge herbivorous tropical American arboreal lizards with a spiny crest along the back; used as human food in Central America and South America\nn01677747\tshore-dwelling seaweed-eating lizard of the Galapagos Islands\nn01678043\tsmall long-tailed lizard of arid areas of southwestern United States and northwestern Mexico\nn01678343\ta herbivorous lizard that lives among rocks in the arid parts of southwestern United States and Mexico\nn01678657\tswift lizard with long black-banded tail and long legs; of deserts of United States and Mexico\nn01679005\twith long pointed scales around toes; of deserts of United States and Mexico\nn01679307\tany of several slender lizards without external ear openings: of plains of western United States and Mexico\nn01679626\tany of several robust long-tailed lizards with collars of two dark bands; of central and western United States and northern Mexico\nn01679962\tany of several large lizards with many dark spots; of western United States and northern Mexico\nn01680264\tany of numerous lizards with overlapping ridged pointed scales; of North America and Central America\nn01680478\tspiny lizard often seen basking on fences in the United States and northern Mexico\nn01680655\tcommon western lizard; seen on logs or rocks\nn01680813\tsmall active lizard of United States and north to British Columbia\nn01680983\ta ground dweller that prefers open ground and scattered low bushes; of United States west between Rocky and Sierra Nevada Mountains\nn01681328\tone of the most abundant lizards in the arid western United States\nn01681653\ta climbing lizard of western United States and northern Mexico\nn01681940\tinsectivorous lizard with hornlike spines on the head and spiny scales on the body; of western North America\nn01682172\tof arid and semiarid open country\nn01682435\tsmall crested arboreal lizard able to run on its hind legs; of tropical America\nn01682714\tsmall arboreal tropical American insectivorous lizards with the ability to change skin color\nn01683201\ta lizard of the genus Amphisbaena; harmless wormlike limbless lizard of warm or tropical regions having concealed eyes and ears and a short blunt tail\nn01683558\tsmall secretive nocturnal lizard of southwestern North America and Cuba; bear live young\nn01684133\talert agile lizard with reduced limbs and an elongated body covered with shiny scales; more dependent on moisture than most lizards; found in tropical regions worldwide\nn01684578\tfound in western North American grasslands and open woodlands\nn01684741\tfrequents oak and pine habitats in rocky mountainous areas of United States southwest and Mexico\nn01685439\ttropical New World lizard with a long tail and large rectangular scales on the belly and a long tail\nn01685808\tany of numerous very agile and alert New World lizards\nn01686044\tvery swift lizard of eastern and central United States\nn01686220\thaving distinct longitudinal stripes: of Colorado Plateau from Arizona to western Colorado\nn01686403\thaving longitudinal stripes overlaid with light spots; upland lizard of United States southwest and Mexico\nn01686609\tactive lizard having a network of dusky dark markings; of semiarid areas from Oregon and Idaho to Baja California\nn01686808\tmarkings are darker and more marked than in western whiptail; from southeastern Colorado to eastern Chihuahua\nn01687128\tlarge (to 3 feet) blackish yellow-banded South American lizard; raid henhouses; used as food\nn01687290\tcrocodile-like lizard of South America having powerful jaws for crushing snails and mussels\nn01687665\ta lizard of the family Agamidae\nn01687978\tsmall terrestrial lizard of warm regions of the Old World\nn01688243\tlarge arboreal insectivorous Australian lizard with a ruff of skin around the neck\nn01688961\tany lizard of the genus Moloch\nn01689081\tdesert lizard that feeds on ants\nn01689411\tany of a small family of lizards widely distributed in warm areas; all are harmless and useful as destroyers of e.g. slugs and insects\nn01689811\tslim short-limbed lizard having a distinctive fold on each side that permits expansion; of western North America\nn01690149\tsmall burrowing legless European lizard with tiny eyes; popularly believed to be blind\nn01690466\tsnakelike lizard of Europe and Asia and North America with vestigial hind limbs and the ability to regenerate its long fragile tail\nn01691217\tdegenerate wormlike burrowing lizard of California closely related to alligator lizards\nn01691652\ta stout-bodied pleurodont lizard of Borneo\nn01691951\tany of two or three large heavy-bodied lizards; only known venomous lizards\nn01692333\tlarge orange and black lizard of southwestern United States; not dangerous unless molested\nn01692523\tlizard with black and yellowish beadlike scales; of western Mexico\nn01692864\tOld World terrestrial lizard\nn01693175\ta common and widely distributed lizard of Europe and central Asia\nn01693334\ta common Eurasian lizard about a foot long\nn01693783\tlizard of Africa and Madagascar able to change skin color and having a projectile tongue\nn01694178\ta chameleon found in Africa\nn01694311\ta kind of chameleon\nn01694709\tany of various large tropical carnivorous lizards of Africa and Asia and Australia; fabled to warn of crocodiles\nn01694955\tdestroys crocodile eggs\nn01695060\tthe largest lizard in the world (10 feet); found on Indonesian islands\nn01696633\textant archosaurian reptile\nn01697178\tlarge voracious aquatic reptile having a long snout with massive jaws and sharp teeth and a body covered with bony plates; of sluggish tropical waters\nn01697457\ta dangerous crocodile widely distributed in Africa\nn01697611\testuarine crocodile of eastern Asia and Pacific islands\nn01697749\ta variety of crocodile\nn01697978\tcrocodile of southeast Asia similar to but smaller than the gavial\nn01698434\teither of two amphibious reptiles related to crocodiles but with shorter broader snouts\nn01698640\tlarge alligator of the southeastern United States\nn01698782\tsmall alligator of the Yangtze valley of China having unwebbed digits\nn01699040\ta semiaquatic reptile of Central and South America that resembles an alligator but has a more heavily armored belly\nn01699254\tcaiman with bony ridges about the eyes; found from southern Mexico to Argentina\nn01699675\tlarge fish-eating Indian crocodilian with a long slender snout\nn01701551\tdinosaurs having bony armour\nn01701859\therbivorous ornithischian dinosaur with a row of bony plates along its back and a spiked tail probably used as a weapon\nn01702256\thaving the back covered with thick bony plates; thought to have walked with a sprawling gait resembling a lizard's\nn01702479\theavily armored and highly spiked dinosaur with semi-upright posture\nn01703011\tbipedal herbivorous dinosaurs with bony crowns\nn01703161\tbipedal herbivore having 10 inches of bone atop its head; largest boneheaded dinosaur ever found\nn01703569\tany of several four-footed herbivorous dinosaurs with enormous beaked skulls; of the late Cretaceous in North America and Mongolia\nn01704103\tsmall horned dinosaur\nn01704323\thuge ceratopsian dinosaur having three horns and the neck heavily armored with a very solid frill\nn01704626\tan unusual ceratopsian dinosaur having many large spikes around the edge of its bony frill and a long nose horn; late Cretaceous\nn01705010\tprimitive dinosaur actually lacking horns and having only the beginning of a frill; long hind limbs and short forelimbs; may have been bipedal\nn01705591\tbipedal herbivorous dinosaur\nn01705934\tany of numerous large bipedal ornithischian dinosaurs having a horny duck-like bill and webbed feet; may have been partly aquatic\nn01707294\tlarge duck-billed dinosaur of the Cretaceous period\nn01708106\therbivorous or carnivorous dinosaur having a three-pronged pelvis like that of a crocodile\nn01708998\tvery large herbivorous dinosaur of the Jurassic and Cretaceous having a small head a long neck and tail and five-toed limbs; largest known land animal\nn01709484\thuge quadrupedal herbivorous dinosaur common in North America in the late Jurassic\nn01709876\ta dinosaur that could grow to be as tall as a building five stories tall\nn01710177\ta huge quadrupedal herbivore with long neck and tail; of late Jurassic in western North America\nn01711160\thuge herbivorous dinosaur of Cretaceous found in Argentina\nn01712008\tany of numerous carnivorous dinosaurs of the Triassic to Cretaceous with short forelimbs that walked or ran on strong hind legs\nn01712752\tprimitive medium-sized theropod; swift-running bipedal carnivorous dinosaur having grasping hands with sharp claws and a short horn between the nostrils; Jurassic in North America\nn01713170\tone of the oldest known dinosaurs; late Triassic; cannibalistic\nn01713764\tlarge carnivorous bipedal dinosaur having enormous teeth with knifelike serrations; may have been a scavenger rather than an active predator; later Cretaceous period in North America\nn01714231\tlate Jurassic carnivorous dinosaur; similar to but somewhat smaller than tyrannosaurus\nn01715888\tlightly built medium-sized dinosaur having extremely long limbs and necks with small heads and big brains and large eyes\nn01717016\tadvanced carnivorous theropod\nn01717229\tadvanced carnivorous theropod\nn01717467\tsmall active carnivore that probably fed on protoceratops; possibly related more closely to birds than to other dinosaurs\nn01718096\tswift agile wolf-sized bipedal dinosaur having a large curved claw on each hind foot; of the Cretaceous\nn01718414\tlarge (20-ft) and swift carnivorous dinosaur having an upright slashing claw 15 inches long on each hind foot; early Cretaceous\nn01719403\textinct reptile having a single pair of lateral temporal openings in the skull\nn01721174\ta kind of therapsid\nn01721898\tlarge primitive reptile having a tall spinal sail; of the Permian or late Paleozoic in Europe and North America\nn01722670\tcarnivorous dinosaur of the Permian in North America having a crest or dorsal sail\nn01722998\tan extinct reptile of the Jurassic and Cretaceous having a bird-like beak and membranous wings supported by the very long fourth digit of each forelimb\nn01723579\textinct flying reptile\nn01724231\tany of several marine reptiles of the Mesozoic having a body like a porpoise with dorsal and tail fins and paddle-shaped limbs\nn01724840\tichthyosaurs of the Jurassic\nn01725086\tan ichthyosaur of the genus Stenopterygius\nn01725713\textinct marine reptile with a small head on a long neck a short tail and four paddle-shaped limbs; of the Jurassic and Cretaceous\nn01726203\textinct marine reptile with longer more slender limbs than plesiosaurs and less completely modified for swimming\nn01726692\tlimbless scaly elongate reptile; some are venomous\nn01727646\tmostly harmless temperate-to-tropical terrestrial or arboreal or aquatic snakes\nn01728266\tany of various harmless North American snakes that were formerly believed to take tail in mouth and roll along like a hoop\nn01728572\tsmall reddish wormlike snake of eastern United States\nn01728920\tany of numerous small nonvenomous North American snakes with a yellow or orange ring around the neck\nn01729322\tharmless North American snake with upturned nose; may spread its head and neck or play dead when disturbed\nn01729672\tany of various pale blotched snakes with a blunt snout of southwestern North America\nn01729977\teither of two North American chiefly insectivorous snakes that are green in color\nn01730185\tof western and central United States\nn01730307\tof southern and eastern United States\nn01730563\tany of numerous African colubrid snakes\nn01730812\tslender fast-moving North American snakes\nn01730960\tblackish racer of the eastern United States that grows to six feet\nn01731137\tbluish-green blacksnake found from Ohio to Texas\nn01731277\tslender fast-moving Eurasian snake\nn01731545\tany of several small fast-moving snakes with long whiplike tails\nn01731764\ta whipsnake of southern United States and Mexico; tail resembles a braided whip\nn01731941\ta whipsnake of scrublands and rocky hillsides\nn01732093\tboth terrestrial and arboreal snake of United States southwest\nn01732244\tany of various nonvenomous rodent-eating snakes of North America and Asia\nn01732614\tlarge harmless snake of southeastern United States; often on farms\nn01732789\tlarge harmless shiny black North American snake\nn01732989\tlarge North American snake\nn01733214\tenter buildings in pursuit of prey\nn01733466\tnocturnal burrowing snake of western United States with shiny tan scales\nn01733757\tany of several large harmless rodent-eating North American burrowing snakes\nn01733957\tbull snake of western North America that invades rodent burrows\nn01734104\tany of several bull snakes of eastern and southeastern United States found chiefly in pine woods; now threatened\nn01734418\tany of numerous nonvenomous North American constrictors; feed on other snakes and small mammals\nn01734637\twidespread in United States except northern regions; black or brown with yellow bands\nn01734808\tnonvenomous tan and brown king snake with an arrow-shaped occipital spot; southeastern ones have red stripes like coral snakes\nn01735189\tany of numerous nonvenomous longitudinally-striped viviparous North American and Central American snakes\nn01735439\ta garter snake that is widespread in North America\nn01735577\tslender yellow-striped North American garter snake; prefers wet places\nn01735728\tyellow- or reddish-striped snake of temperate woodlands and grasslands to tropics\nn01736032\tsecretive snake of city dumps and parks as well as prairies and open woods; feeds on earthworms; of central United States\nn01736375\tsmall shy brightly-ringed terrestrial snake of arid or semiarid areas of western North America\nn01736796\tin some classifications placed in genus Haldea; small reddish-grey snake of eastern North America\nn01737021\tany of various mostly harmless snakes that live in or near water\nn01737472\tin some classifications placed in the genus Nerodia; western United States snake that seldom ventures far from water\nn01737728\tany of numerous North American water snakes inhabiting fresh waters\nn01737875\tharmless European snake with a bright yellow collar; common in England\nn01738065\ta small harmless grass snake\nn01738306\tharmless woodland snake of southeastern United States\nn01738601\tsmall North American burrowing snake\nn01738731\ta sand snake of southwestern United States; lives in fine to coarse sand or loamy soil in which it `swims'; banding resembles that of coral snakes\nn01739094\tsmall secretive ground-living snake; found from central United States to Argentina\nn01739381\tslender arboreal snake found from southern Arizona to Bolivia\nn01739647\tmildly venomous snake with a lyre-shaped mark on the head; found in rocky areas from southwestern United States to Central America\nn01739871\tof desert regions of southwestern North America\nn01740131\tnocturnal prowler of western United States and Mexico\nn01740551\twormlike burrowing snake of warm regions having vestigial eyes\nn01740885\tburrows among roots of shrubs and beneath rocks in desert and rocky hillside areas and beach sand of western United States\nn01741232\tlarge dark-blue nonvenomous snake that invades burrows; found in southern North America and Mexico\nn01741442\ta variety of indigo snake\nn01741562\tany of various large nonvenomous snakes that kill their prey by crushing it in its coils\nn01741943\tany of several chiefly tropical constrictors with vestigial hind limbs\nn01742172\tvery large boa of tropical America and West Indies\nn01742447\tboa of grasslands and woodlands of western North America; looks and feels like rubber with tail and head of similar shape\nn01742821\tboa of rocky desert of southwestern United States\nn01743086\tlarge arboreal boa of tropical South America\nn01743605\tlarge Old World boas\nn01743936\tAustralian python with a variegated pattern on its back\nn01744100\tof southeast Asia and East Indies; the largest snake in the world\nn01744270\tvery large python of southeast Asia\nn01744401\tvery large python of tropical and southern Africa\nn01744555\ta python having the color of amethyst\nn01745125\tany of numerous venomous fanged snakes of warmer parts of both hemispheres\nn01745484\tany of several venomous New World snakes brilliantly banded in red and black and either yellow or white; widely distributed in South America and Central America\nn01745902\tranges from Central America to southeastern United States\nn01746191\tranges from Central America to southwestern United States\nn01746359\tany of various venomous elapid snakes of Asia and Africa and Australia\nn01746952\tsmall widely distributed arboreal snake of southern Africa banded in black and orange\nn01747285\tsmall venomous but harmless snake marked with black-and-white on red\nn01747589\tvenomous but sluggish reddish-brown snake of Australia\nn01747885\tvenomous Asiatic and African elapid snakes that can expand the skin of the neck into a hood\nn01748264\ta cobra of tropical Africa and Asia\nn01748389\tcobra used by the Pharaohs as a symbol of their power over life and death\nn01748686\taggressive cobra widely distributed in Africa; rarely bites but spits venom that may cause blindness\nn01748906\tlarge cobra of southeastern Asia and the East Indies; the largest venomous snake; sometimes placed in genus Naja\nn01749244\thighly venomous snake of southern Africa able to spit venom up to seven feet\nn01749582\tarboreal snake of central and southern Africa whose bite is often fatal\nn01749742\ta highly venomous southern African mamba dreaded because of its quickness and readiness to bite\nn01749939\tgreen phase of the black mamba\nn01750167\tvenomous Australian snake resembling an adder\nn01750437\thighly venomous brown-and-yellow snake of Australia and Tasmania\nn01750743\tlarge semiaquatic snake of Australia; black above with red belly\nn01751036\tbrightly colored venomous but nonaggressive snake of southeastern Asia and Malay peninsula\nn01751215\tsluggish krait banded with black and yellow\nn01751472\tlarge highly venomous snake of northeastern Australia\nn01751748\tany of numerous venomous aquatic viviparous snakes having a fin-like tail; of warm littoral seas; feed on fish which they immobilize with quick-acting venom\nn01752165\tvenomous Old World snakes characterized by hollow venom-conducting fangs in the upper jaw\nn01752585\tsmall terrestrial viper common in northern Eurasia\nn01752736\tof southern Europe; similar to but smaller than the adder\nn01753032\tlarge African viper that inflates its body when alarmed\nn01753180\tlarge heavy-bodied brilliantly marked and extremely venomous west African viper\nn01753488\thighly venomous viper of northern Africa and southwestern Asia having a horny spine above each eye\nn01753959\tNew World vipers with hollow fangs and a heat-sensitive pit on each side of the head\nn01754370\tcommon coppery brown pit viper of upland eastern United States\nn01754533\tvenomous semiaquatic snake of swamps in southern United States\nn01754876\tpit viper with horny segments at the end of the tail that rattle when shaken\nn01755581\tlarge deadly rattlesnake with diamond-shaped markings\nn01755740\twidely distributed in rugged ground of eastern United States\nn01755952\tsouthern variety\nn01756089\twidely distributed between the Mississippi and the Rockies\nn01756291\tsmall pale-colored desert rattlesnake of southwestern United States; body moves in an s-shaped curve\nn01756508\tlargest and most dangerous North American snake; of southwestern United States and Mexico\nn01756733\tmountain rock dweller of Mexico and most southern parts of United States southwest\nn01756916\thaving irregularly cross-banded back; of arid foothills and canyons of southern Arizona and Mexico\nn01757115\textremely dangerous; most common in areas of scattered scrubby growth; from Mojave Desert to western Texas and into Mexico\nn01757343\tmarkings vary but usually harmonize with background; of southwestern Arizona and Baja California\nn01757677\tpygmy rattlesnake found in moist areas from the Great Lakes to Mexico; feeds on mice and small amphibians\nn01757901\tsmall pygmy rattlesnake\nn01758141\tlarge extremely venomous pit viper of Central America and South America\nn01758757\tthe dead body of an animal especially one slaughtered and dressed for food\nn01758895\tthe dead and rotting body of an animal; unfit for human food\nn01767661\tinvertebrate having jointed limbs and a segmented body with an exoskeleton made of chitin\nn01768244\tan extinct arthropod that was abundant in Paleozoic times; had an exoskeleton divided into three parts\nn01769347\tair-breathing arthropods characterized by simple eyes and four pairs of legs\nn01770081\tspiderlike arachnid with a small rounded body and very long thin legs\nn01770393\tarachnid of warm dry regions having a long segmented tail ending in a venomous stinger\nn01770795\tsmall nonvenomous arachnid resembling a tailless scorpion\nn01771100\tminute arachnid sometimes found in old papers\nn01771417\tnonvenomous arachnid that resembles a scorpion and that has a long thin tail without a stinger\nn01771766\tlarge whip-scorpion of Mexico and southern United States that emits a vinegary odor when alarmed\nn01772222\tpredatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body; they spin silk to make cocoons for eggs or traps for prey\nn01772664\ta spider that spins a circular (or near circular) web\nn01773157\ta widely distributed North American garden spider\nn01773549\tan orange and tan spider with darkly banded legs that spins an orb web daily\nn01773797\ta spider common in European gardens\nn01774097\tspider having a comb-like row of bristles on each hind foot\nn01774384\tvenomous New World spider; the female is black with an hourglass-shaped red mark on the underside of the abdomen\nn01774750\tlarge hairy tropical spider with fangs that can inflict painful but not highly venomous bites\nn01775062\tground spider that hunts its prey instead of using a web\nn01775370\tlarge southern European spider once thought to be the cause of tarantism (uncontrollable bodily movement)\nn01775730\tAmerican spider that constructs a silk-lined nest with a hinged lid\nn01776192\tmite or tick\nn01776313\tany of two families of small parasitic arachnids with barbed proboscis; feed on blood of warm-blooded animals\nn01776705\tticks having a hard shield on the back and mouth parts that project from the head\nn01777304\ta northeastern tick now recognized as same species as Ixodes scapularis\nn01777467\ta tick that usually does not bite humans; transmits Lyme disease spirochete to dusky-footed wood rats\nn01777649\ta tick that feeds on dusky-footed wood rat and bites humans; principal vector for Lyme disease in western United States especially northern California\nn01777909\tparasitic on mice of genus Peromyscus and bites humans; principal vector for Lyme disease in eastern United States (especially New England); northern form was for a time known as Ixodes dammini (deer tick)\nn01778217\tparasitic on sheep and cattle as well as humans; can transmit looping ill in sheep (acute viral disease of the nervous system); a vector for Lyme disease spirochete\nn01778487\tbites humans; a vector for Lyme disease spirochete\nn01778621\tusually does not bite humans; transmits Lyme disease spirochete to cottontail rabbits and wood rats\nn01778801\tusually does not bite humans; transmits Lyme disease spirochete to cottontail rabbits and wood rats\nn01779148\tcommon tick that can transmit Rocky Mountain spotted fever and tularemia\nn01779463\ttick lacking a dorsal shield and having mouth parts on the under side of the head\nn01779629\tany of numerous very small to minute arachnids often infesting animals or plants or stored foods\nn01779939\ta mite that spins a web\nn01780142\tvery small free-living arachnid that is parasitic on animals or plants; related to ticks\nn01780426\tmite that in all stages feeds on other arthropods\nn01780696\tmite that as nymph and adult feeds on early stages of small arthropods but whose larvae are parasitic on terrestrial vertebrates\nn01781071\tlarval mite that sucks the blood of vertebrates including human beings causing intense irritation\nn01781570\tany of several mites of the order Acarina\nn01781698\twhitish mites that attack the skin of humans and other animals\nn01781875\tany of several varieties of mite that burrow into plants and cause a reddish-brown discoloration on the leaves or fruit\nn01782209\tweb-spinning mite that attacks garden plants and fruit trees\nn01782516\tsmall web-spinning mite; a serious orchard pest\nn01783017\tgeneral term for any terrestrial arthropod having an elongated body composed of many similar segments: e.g. centipedes and millipedes\nn01783706\tminute arthropod often infesting the underground parts of truck-garden and greenhouse crops\nn01784293\tan arthropod of the division Tardigrada\nn01784675\tchiefly nocturnal predacious arthropod having a flattened body of 15 to 173 segments each with a pair of legs, the foremost pair being modified as prehensors\nn01785667\tlong-legged centipede common in damp places as e.g. cellars\nn01786646\tany of numerous herbivorous nonpoisonous arthropods having a cylindrical body of 20 to 100 or more segments most with two pairs of legs\nn01787006\tany of various small spiderlike marine arthropods having small thin bodies and long slender legs\nn01787191\tused in some classifications; includes the orders Xiphosura and Eurypterida\nn01787835\tlarge marine arthropod of the Atlantic coast of North America having a domed carapace that is shaped like a horseshoe and a stiff pointed tail; a living fossil related to the wood louse\nn01788291\thorseshoe crab of the coast of eastern Asia\nn01788579\tlarge extinct scorpion-like arthropod considered related to horseshoe crabs\nn01788864\twormlike arthropod having two pairs of hooks at the sides of the mouth; parasitic in nasal sinuses of mammals\nn01789386\theavy-bodied largely ground-feeding domestic or game birds\nn01789740\ta domesticated gallinaceous bird thought to be descended from the red jungle fowl\nn01790171\tan English breed of large domestic fowl having five toes (the hind toe doubled)\nn01790304\tan American breed of domestic fowl\nn01790398\tEnglish breed of compact domestic fowl; raised primarily to crossbreed to produce roasters\nn01790557\tsmall plump hybrid developed by crossbreeding Plymouth Rock and Cornish fowl\nn01790711\tany of several breeds reared for cockfighting\nn01790812\tAsian breed of large fowl with dense plumage and feathered legs\nn01791107\tsmall Asiatic wild bird; believed to be ancestral to domestic fowl\nn01791314\tmale jungle fowl\nn01791388\tfemale jungle fowl\nn01791463\ta jungle fowl of southeastern Asia that is considered ancestral to the domestic fowl\nn01791625\ta domestic fowl bred for flesh or eggs; believed to have been developed from the red jungle fowl\nn01791954\tany of various small breeds of fowl\nn01792042\tyoung bird especially of domestic fowl\nn01792158\tadult male chicken\nn01792429\ta young domestic cock; not older than one year\nn01792530\tcastrated male chicken\nn01792640\tadult female chicken\nn01792808\ta hen that has just laid an egg and emits a shrill squawk\nn01792955\ta domestic hen ready to brood\nn01793085\ta hen with chicks\nn01793159\ta hen that lays eggs\nn01793249\tyoung hen usually less than a year old\nn01793340\ta young chicken having tender meat\nn01793435\tAmerican breed of heavy-bodied brownish-red general-purpose chicken\nn01793565\tAmerican breed of chicken having barred grey plumage raised for meat and brown eggs\nn01793715\tEnglish breed of large chickens with white skin\nn01794158\tlarge gallinaceous bird with fan-shaped tail; widely domesticated for food\nn01794344\tmale turkey\nn01794651\twild turkey of Central America and northern South America\nn01795088\tpopular game bird having a plump body and feathered legs and feet\nn01795545\tgrouse of which the male is bluish-black\nn01795735\tlarge northern European grouse that is black with a lyre-shaped tail\nn01795900\ta black grouse of western Asia\nn01796019\tmale black grouse\nn01796105\tfemale black grouse\nn01796340\tlarge Arctic and subarctic grouse with feathered feet and usually white winter plumage\nn01796519\treddish-brown grouse of upland moors of Great Britain\nn01796729\tfemale red grouse\nn01797020\tlarge black Old World grouse\nn01797307\tNorth American grouse that feeds on evergreen buds and needles\nn01797601\tlarge grouse of sagebrush regions of North America\nn01797886\tvalued as a game bird in eastern United States and Canada\nn01798168\tlarge grouse of prairies and open forests of western North America\nn01798484\tbrown mottled North American grouse of western prairies\nn01798706\tthe most common variety of prairie chicken\nn01798839\ta smaller prairie chicken of western Texas\nn01798979\textinct prairie chicken\nn01799302\tany of several large turkey-like game birds of the family Cracidae; native to jungles of tropical America; resembling the curassows and valued as food\nn01799679\tlarge crested arboreal game bird of warm parts of the Americas having long legs and tails; highly esteemed as game and food\nn01800195\ta kind of guan\nn01800424\tslender arboreal guan resembling a wild turkey; native to Central America and Mexico; highly regarded as game birds\nn01800633\tof Mexico and Texas\nn01801088\tlarge-footed short-winged birds of Australasia; build mounds of decaying vegetation to incubate eggs\nn01801479\tAustralian mound bird; incubates eggs naturally in sandy mounds\nn01801672\tadult female mallee fowl\nn01801876\tblack megapode of wooded regions of Australia and New Guinea\nn01802159\tCelebes megapode that lays eggs in holes in sandy beaches\nn01802721\ta kind of game bird in the family Phasianidae\nn01803078\tlarge long-tailed gallinaceous bird native to the Old World but introduced elsewhere\nn01803362\tcommon pheasant having bright plumage and a white neck ring\nn01803641\tboth sexes are brightly colored\nn01803893\tlarge brilliantly patterned East Indian pheasant\nn01804163\tbrightly colored crested pheasant of mountains of western and central Asia\nn01804478\ta popular North American game bird; named for its call\nn01804653\ta favorite game bird of eastern and central United States\nn01804921\tsmall game bird with a rounded body and small tail\nn01805070\tthe typical Old World quail\nn01805321\tbrilliantly colored pheasant of southern Asia\nn01805801\tvery large terrestrial southeast Asian pheasant often raised as an ornamental bird\nn01806061\ta young peafowl\nn01806143\tmale peafowl; having a crested head and very large fanlike tail marked with iridescent eyes or spots\nn01806297\tfemale peafowl\nn01806364\tpeafowl of India and Ceylon\nn01806467\tpeafowl of southeast Asia\nn01806567\tsmall gallinaceous game birds\nn01806847\tplump chunky bird of coastal California and Oregon\nn01807105\tbrilliantly colored Asian pheasant having wattles and two fleshy processes on the head\nn01807496\tsmall Old World gallinaceous game birds\nn01807828\tcommon European partridge\nn01808140\tcommon western European partridge with red legs\nn01808291\tof mountainous areas of southern Europe\nn01808596\tCalifornia partridge; slightly larger than the California quail\nn01809106\ta west African bird having dark plumage mottled with white; native to Africa but raised for food in many parts of the world\nn01809371\tfemale guinea fowl\nn01809752\tcrested ill-smelling South American bird whose young have claws on the first and second digits of the wings\nn01810268\theavy-bodied small-winged South American game bird resembling a gallinaceous bird but related to the ratite birds\nn01810700\ta cosmopolitan order of land birds having small heads and short legs with four unwebbed toes\nn01811243\textinct heavy flightless bird of Mauritius related to pigeons\nn01811909\twild and domesticated birds having a heavy body and short legs\nn01812187\tone of a breed of pigeon that enlarge their crop until their breast is puffed out\nn01812337\tany of numerous small pigeons\nn01812662\tpale grey Eurasian pigeon having black-striped wings from which most domestic species are descended\nn01812866\twild pigeon of western North America; often mistaken for the now extinct passenger pigeon\nn01813088\tEurasian pigeon with white patches on wings and neck\nn01813385\tany of several Old World wild doves\nn01813532\tthe common European wild dove noted for its plaintive cooing\nn01813658\tgreyish Old World turtledove with a black band around the neck; often caged\nn01813948\tsmall Australian dove\nn01814217\twild dove of the United States having a mournful call\nn01814370\tdomesticated pigeon raised for sport or food\nn01814549\tan unfledged pigeon\nn01814620\tfancy domestic pigeon having blue-and-white plumage and heavily muffed feet\nn01814755\tpigeon that executes backward somersaults in flight or on the ground\nn01814921\tpigeon trained to return home\nn01815036\ta homing pigeon used to carry messages\nn01815270\tgregarious North American migratory pigeon now extinct\nn01815601\tpigeon-like bird of arid regions of the Old World having long pointed wings and tail and precocial downy young\nn01816017\tsandgrouse of India\nn01816140\tsandgrouse of Europe and Africa having elongated middle tail feathers\nn01816474\tEurasiatic sandgrouse with a black patch on the belly\nn01816887\tusually brightly colored zygodactyl tropical birds with short hooked beaks and the ability to mimic sounds\nn01817263\tan archaic term for a parrot\nn01817346\ta tame parrot\nn01817953\tcommonly domesticated grey parrot with red-and-black tail and white face; native to equatorial Africa\nn01818299\tmainly green tropical American parrots\nn01818515\tlong-tailed brilliantly colored parrot of Central America and South America; among the largest and showiest of parrots\nn01818832\tlarge brownish-green New Zealand parrot\nn01819115\twhite or light-colored crested parrot of the Australian region; often kept as cage birds\nn01819313\twhite cockatoo with a yellow erectile crest\nn01819465\twhite Australian cockatoo with roseate tinged plumage\nn01819734\tsmall grey Australian parrot with a yellow crested head\nn01820052\tsmall African parrot noted for showing affection for their mates\nn01820348\tsmall brightly colored Australasian parrots having a brush-tipped tongue for feeding on nectar and soft fruits\nn01820546\tany of various small lories\nn01820801\tlorikeet with a colorful coat\nn01821076\ta kind of lorikeet\nn01821203\tany of numerous small slender long-tailed parrots\nn01821554\textinct parakeet whose range extended far into the United States\nn01821869\tsmall Australian parakeet usually light green with black and yellow markings in the wild but bred in many colors\nn01822300\tAfrican parakeet\nn01822602\tbirds having zygodactyl feet (except for the touracos)\nn01823013\tany of numerous European and North American birds having pointed wings and a long tail\nn01823414\tcommon cuckoo of Europe having a distinctive two-note call; lays eggs in the nests of other birds\nn01823740\tNorth American cuckoo; builds a nest and rears its own young\nn01824035\tspeedy largely terrestrial bird found from California and Mexico to Texas\nn01824344\tblack tropical American cuckoo\nn01824575\tOld World ground-living cuckoo having a long dagger-like hind claw\nn01824749\tcommon coucal of India and China\nn01825278\tlarge brightly crested bird of Africa\nn01825930\tchiefly short-legged arboreal nonpasserine birds that nest in holes\nn01826364\tOld World bird that tumbles or rolls in flight; related to kingfishers\nn01826680\tcommon European blue-and-green roller with a reddish-brown back\nn01826844\tMadagascan roller with terrestrial and crepuscular habits that feeds on e.g. insects and worms\nn01827403\tnonpasserine large-headed bird with a short tail and long sharp bill; usually crested and bright-colored; feed mostly on fish\nn01827793\tsmall kingfisher with greenish-blue and orange plumage\nn01828096\tgreyish-blue North American kingfisher with a chestnut band on its chest\nn01828556\tAustralian kingfisher having a loud cackling cry\nn01828970\tcolorful chiefly tropical Old World bird having a strong graceful flight; feeds on especially bees\nn01829413\tbird of tropical Africa and Asia having a very large bill surmounted by a bony protuberance; related to kingfishers\nn01829869\tany of several crested Old World birds with a slender downward-curved bill\nn01830042\tpinkish-brown hoopoe with black-and-white wings\nn01830479\ttropical African bird having metallic blackish plumage but no crest\nn01830915\ttropical American bird resembling a blue jay and having greenish and bluish plumage\nn01831360\ttiny insectivorous West Indian bird having red-and-green plumage and a long straight bill\nn01831712\tnonpasserine bird having long wings and weak feet; spends much of its time in flight\nn01832167\ta small bird that resembles a swallow and is noted for its rapid flight\nn01832493\tcommon European bird with a shrieking call that nests chiefly about eaves of buildings or on cliffs\nn01832813\tAmerican swift that nests in e.g. unused chimneys\nn01833112\tswift of eastern Asia; produces the edible bird's nest\nn01833415\tbirds of southeast Asia and East Indies differing from true swifts in having upright crests and nesting in trees\nn01833805\ttiny American bird having brilliant iridescent plumage and long slender bills; wings are specialized for vibrating flight\nn01834177\ta kind of hummingbird\nn01834540\tany of various South American hummingbirds with a sharp pointed bill\nn01835276\tmainly crepuscular or nocturnal nonpasserine birds with mottled greyish-brown plumage and large eyes; feed on insects\nn01835769\tOld World goatsucker\nn01835918\tlarge whippoorwill-like bird of the southern United States\nn01836087\tAmerican nocturnal goatsucker with grey-and-white plumage\nn01836673\tgoatsucker of western North America\nn01837072\tinsectivorous bird of Australia and southeastern Asia having a wide frog-like mouth\nn01837526\tnocturnal fruit-eating bird of South America that has fatty young yielding an oil that is used instead of butter\nn01838038\tany of numerous nonpasserine insectivorous climbing birds usually having strong bills for boring wood\nn01838598\tbird with strong claws and a stiff tail adapted for climbing and a hard chisel-like bill for boring into wood for insects\nn01839086\twoodpecker of Europe and western Asia\nn01839330\tsmall North American woodpecker with black and white plumage and a small bill\nn01839598\tNorth American woodpecker\nn01839750\tlarge flicker of eastern North America with a red neck and yellow undersurface to wings and tail\nn01839949\tsouthwestern United States bird like the yellow-shafted flicker but lacking the red neck\nn01840120\twestern United States bird with red undersurface to wings and tail\nn01840412\tlarge black-and-white woodpecker of southern United States and Cuba having an ivory bill; nearly extinct\nn01840775\tblack-and-white North American woodpecker having a red head and neck\nn01841102\tsmall American woodpecker that feeds on sap from e.g. apple and maple trees\nn01841288\teastern North American sapsucker having a pale yellow abdomen\nn01841441\twestern North American sapsucker\nn01841679\tOld World woodpecker with a peculiar habit of twisting the neck\nn01841943\tsmall woodpeckers of South America and Africa and East Indies having soft rounded tail feathers\nn01842235\tsmall brightly colored stout-billed tropical bird having short weak wings\nn01842504\tbrownish tropical American bird having a large head with fluffed out feathers\nn01842788\tsmall bird of tropical Africa and Asia; feeds on beeswax and honey and larvae\nn01843065\ttropical American insectivorous bird having a long sharp bill and iridescent green or bronze plumage\nn01843383\tbrilliantly colored arboreal fruit-eating bird of tropical America having a very large thin-walled beak\nn01843719\tsmall toucan\nn01844231\tforest bird of warm regions of the New World having brilliant lustrous plumage and long tails\nn01844551\tlarge trogon of Central America and South America having golden-green and scarlet plumage\nn01844746\tvery rare Central American bird; the national bird of Guatemala\nn01844917\twading and swimming and diving birds of either fresh or salt water\nn01845132\tfreshwater aquatic bird\nn01845477\tchiefly web-footed swimming birds\nn01846331\tsmall wild or domesticated web-footed broad-billed swimming bird usually having a depressed body and short legs\nn01847000\tadult male of a wild or domestic duck\nn01847089\tchild's word for a duck\nn01847170\tyoung duck\nn01847253\tany of various ducks of especially bays and estuaries that dive for their food\nn01847407\tany of numerous shallow-water ducks that feed by upending and dabbling\nn01847806\twild dabbling duck from which domestic ducks are descended; widely distributed\nn01847978\ta dusky duck of northeastern United States and Canada\nn01848123\tany of various small short-necked dabbling river ducks of Europe and America\nn01848323\tcommon teal of Eurasia and North America\nn01848453\tAmerican teal\nn01848555\tsmall Eurasian teal\nn01848648\tfreshwater duck of Eurasia and northern Africa related to mallards and teals\nn01848840\ta widgeon the male of which has a white crown\nn01848976\tfreshwater duck of the northern hemisphere having a broad flat bill\nn01849157\tlong-necked river duck of the Old and New Worlds having elongated central tail feathers\nn01849466\tOld World gooselike duck slightly larger than a mallard with variegated mostly black-and-white plumage and a red bill\nn01849676\tfemale sheldrake\nn01849863\treddish-brown stiff-tailed duck of North America and northern South America\nn01850192\tsmall North American diving duck; males have bushy head plumage\nn01850373\tlarge-headed swift-flying diving duck of Arctic regions\nn01850553\tNorth American goldeneye diving duck\nn01850873\tNorth American wild duck valued for sport and food\nn01851038\theavy-bodied Old World diving duck having a grey-and-black body and reddish head\nn01851207\tNorth American diving duck with a grey-and-black body and reddish-brown head\nn01851375\tdiving ducks of North America having a bluish-grey bill\nn01851573\tlarge scaup of North America having a greenish iridescence on the head of the male\nn01851731\tcommon scaup of North America; males have purplish heads\nn01851895\tan undomesticated duck (especially a mallard)\nn01852142\tshowy North American duck that nests in hollow trees\nn01852329\tmale wood duck\nn01852400\tshowy crested Asiatic duck; often domesticated\nn01852671\tlarge crested wild duck of Central America and South America; widely domesticated\nn01852861\tany of various large diving ducks found along the seacoast: eider; scoter; merganser\nn01853195\tduck of the northern hemisphere much valued for the fine soft down of the females\nn01853498\tlarge black diving duck of northern parts of the northern hemisphere\nn01853666\ta variety of scoter\nn01853870\ta common long-tailed sea duck of the northern parts of the United States\nn01854415\tlarge crested fish-eating diving duck having a slender hooked bill with serrated edges\nn01854700\tcommon merganser of Europe and North America\nn01854838\tcommon North American diving duck considered a variety of the European goosander\nn01855032\twidely distributed merganser of America and Europe\nn01855188\tsmallest merganser and most expert diver; found in northern Eurasia\nn01855476\tsmall North American duck with a high circular crest on the male's head\nn01855672\tweb-footed long-necked typically gregarious migratory aquatic birds usually larger and less aquatic than ducks\nn01856072\tyoung goose\nn01856155\tmature male goose\nn01856380\tvery large wild goose of northeast Asia; interbreeds freely with the greylag\nn01856553\tcommon grey wild goose of Europe; ancestor of many domestic breeds\nn01856890\tNorth American wild goose having dark plumage in summer but white in winter\nn01857079\tblue goose in the white color phase\nn01857325\tsmall dark geese that breed in the north and migrate southward\nn01857512\tthe best known variety of brant goose\nn01857632\tcommon greyish-brown wild goose of North America with a loud, trumpeting call\nn01857851\tEuropean goose smaller than the brant; breeds in the far north\nn01858281\tlarge white South American bird intermediate in some respects between ducks and swans\nn01858441\tstately heavy-bodied aquatic bird with very long neck and usually white plumage as adult\nn01858780\tadult male swan\nn01858845\tfemale swan\nn01858906\ta young swan\nn01859190\tsoundless Eurasian swan; commonly domesticated\nn01859325\tcommon Old World swan noted for its whooping call\nn01859496\tswan that nests in tundra regions of the New and Old Worlds\nn01859689\tNorth American subspecies of tundra swan having a soft whistling note\nn01859852\tEurasian subspecies of tundra swan; smaller than the whooper\nn01860002\tlarge pure white wild swan of western North America having a sonorous cry\nn01860187\tlarge Australian swan having black plumage and a red bill\nn01860497\tgooselike aquatic bird of South America having a harsh trumpeting call\nn01860864\tscreamer having a hornlike process projecting from the forehead\nn01861148\tdistinguished from the horned screamer by a feathery crest on the back of the head\nn01861330\tlargest crested screamer; native to southern Brazil and Argentina\nn01861778\tany warm-blooded vertebrate having the skin more or less covered with hair; young are born alive except for the small subclass of monotremes and nourished with milk\nn01862399\tanimals that nourish their young with milk\nn01871265\tany mammal with prominent tusks (especially an elephant or wild boar)\nn01871543\tprimitive oviparous mammals found only in Australia and Tasmania and New Guinea\nn01871875\tthe most primitive mammals comprising the only extant members of the subclass Prototheria\nn01872401\ta burrowing monotreme mammal covered with spines and having a long snout and claws for hunting ants and termites; native to Australia\nn01872772\ta burrowing monotreme mammal covered with spines and having a long snout and claws for hunting ants and termites; native to New Guinea\nn01873310\tsmall densely furred aquatic monotreme of Australia and Tasmania having a broad bill and tail and webbed feet; only species in the family Ornithorhynchidae\nn01874434\tmammals of which the females have a pouch (the marsupium) containing the teats where the young are fed and carried\nn01874928\tnocturnal arboreal marsupial having a naked prehensile tail found from southern North America to northern South America\nn01875313\tomnivorous opossum of the eastern United States; noted for feigning death when in danger; esteemed as food in some areas; considered same species as the crab-eating opossum of South America\nn01875610\tSouth American opossum\nn01876034\tterrestrial marsupials of southern South America that resemble shrews\nn01876326\tany of various agile ratlike terrestrial marsupials of Australia and adjacent islands; insectivorous and herbivorous\nn01876667\tbandicoot with leathery ears like a rabbit\nn01877134\tany of several herbivorous leaping marsupials of Australia and New Guinea having large powerful hind legs and a long thick tail\nn01877606\tvery large greyish-brown Australian kangaroo formerly abundant in open wooded areas\nn01877812\tany of various small or medium-sized kangaroos; often brightly colored\nn01878061\ta small wallaby having a height of 30 inches\nn01878335\tsmall Australian wallaby that resembles a hare and has persistent teeth\nn01878639\tsmall wallabies with a horny nail on the tip of the tail\nn01878929\tslender long-legged Australian wallabies living in caves and rocky areas\nn01879217\tsmall reddish-brown wallabies of scrubby areas of Australia and New Guinea\nn01879509\tarboreal wallabies of New Guinea and northern Australia having hind and forelegs of similar length\nn01879837\tsmall kangaroo of northeastern Australia\nn01880152\tany of several rabbit-sized ratlike Australian kangaroos\nn01880473\tAustralian rat kangaroos\nn01880716\tshort-nosed rat kangaroo\nn01880813\tbrush-tailed rat kangaroo\nn01881171\tsmall furry Australian arboreal marsupials having long usually prehensile tails\nn01881564\twoolly-haired monkey-like arboreal marsupial of New Guinea and northern Australia\nn01881857\tbushy-tailed phalanger\nn01882125\tnocturnal phalangers that move with gliding leaps using parachute-like folds of skin along the sides of the body\nn01882714\tsluggish tailless Australian arboreal marsupial with grey furry ears and coat; feeds on eucalyptus leaves and bark\nn01883070\tburrowing herbivorous Australian marsupials about the size of a badger\nn01883513\tsmall carnivorous nocturnal marsupials of Australia and Tasmania\nn01883920\tany of several more or less arboreal marsupials somewhat resembling martens\nn01884104\ta variety of dasyure\nn01884203\tcarnivorous arboreal cat-like marsupials of Australia and Tasmania\nn01884476\trare doglike carnivorous marsupial of Tasmania having stripes on its back; probably extinct\nn01884834\tsmall ferocious carnivorous marsupial having a mostly black coat and long tail\nn01885158\tany of numerous small sharp-nosed insectivorous marsupials superficially resembling mice or rats\nn01885498\tsmall Australian marsupial having long snout and strong claws for feeding on termites; nearly extinct\nn01886045\tsmall burrowing Australian marsupial that resembles a mole\nn01886756\tmammals having a placenta; all mammals except monotremes and marsupials\nn01887474\tany animals kept for use or profit\nn01887623\tmature male of various mammals of which the female is called `cow'; e.g. whales or elephants or especially cattle\nn01887787\tmature female of mammals of which the male is called `bull'\nn01887896\tyoung of domestic cattle\nn01888045\tyoung of various large placental mammals e.g. whale or giraffe or elephant or buffalo\nn01888181\tan animal in its second year\nn01888264\tmature male of various mammals (especially deer or antelope)\nn01888411\tmature female of mammals of which the male is called `buck'\nn01889074\tsmall insect-eating mainly nocturnal terrestrial or fossorial mammals\nn01889520\tsmall velvety-furred burrowing mammal having small eyes and fossorial forefeet\nn01889849\tamphibious mole of eastern North America having pink fleshy tentacles around the nose\nn01890144\tmole of eastern North America\nn01890564\tmole of southern Africa having iridescent guard hairs mixed with the underfur\nn01890860\tslender mole having a long snout and tail\nn01891013\tshrew mole of eastern Asia\nn01891274\tgreyish-black shrew mole of the United States and Canada\nn01891633\tsmall mouselike mammal with a long snout; related to moles\nn01892030\tcommon American shrew\nn01892145\tcommonest shrew of moist habitats in North America\nn01892385\tNorth American shrew with tail less than half its body length\nn01892551\tany of several small semiaquatic shrews usually living near swift-flowing streams\nn01892744\twater shrew of North America\nn01893021\twidely distributed Old World water shrew\nn01893164\ta type of water shrew\nn01893399\tsmall brown shrew of grassy regions of eastern United States\nn01893825\tsmall nocturnal Old World mammal covered with both hair and protective spines\nn01894207\tsmall often spiny insectivorous mammal of Madagascar; resembles a hedgehog\nn01894522\tprolific animal that feeds chiefly on earthworms\nn01894956\tamphibious African insectivorous mammal that resembles an otter\nn01896844\tdown of the eider duck\nn01897257\ta supplementary feather (usually small) on the underside of the base of the shaft of some feathers in some birds\nn01897426\tone of the long curved tail feathers of a rooster\nn01897536\tfeathers covering the body of an adult bird and determining its shape\nn01897667\ttuft of small stiff feathers on the first digit of a bird's wing\nn01898593\ta long narrow feather on the back (saddle) of a domestic fowl\nn01899894\tthe mane of a horse\nn01900150\ta filamentous projection or process on an organism\nn01903234\ta protective structure resembling a scale\nn01903346\tlarge bony or horny plate as on an armadillo or turtle or the underside of a snake\nn01903498\thard plate or element of the exoskeleton of some arthropods\nn01904029\t(zoology) the part of a turtle's shell forming its underside\nn01904806\ta shell of a scallop\nn01904886\ta shell of an oyster\nn01905321\touter sheath of the pupa of certain insects\nn01905661\tany animal lacking a backbone or notochord; the term is not used as a scientific classification\nn01906749\tprimitive multicellular marine animal whose porous body is supported by a fibrous skeletal framework; usually occurs in sessile colonies\nn01907287\tany of the flagellated cells in sponges having a collar of cytoplasm around the flagellum; they maintain a flow of water through the body\nn01907738\ta siliceous sponge (with glassy spicules) of the class Hyalospongiae\nn01908042\ta deep-water marine sponge having a cylindrical skeleton of intricate glassy latticework; found in the waters of the East Indies and the eastern coast of Asia\nn01908958\tany animal of the subkingdom Metazoa; all animals except protozoans and sponges\nn01909422\tradially symmetrical animals having saclike bodies with only one opening and tentacles with stinging structures; they occur in polyp and medusa forms\nn01909788\tthe flat ciliated free-swimming larva of hydrozoan coelenterates\nn01909906\tone of two forms that coelenterates take (e.g. a hydra or coral): usually sedentary with a hollow cylindrical body usually with a ring of tentacles around the mouth\nn01910252\tone of two forms that coelenterates take: it is the free-swimming sexual phase in the life cycle of a coelenterate; in this phase it has a gelatinous umbrella-shaped body and tentacles\nn01910747\tany of numerous usually marine and free-swimming coelenterates that constitute the sexually reproductive forms of hydrozoans and scyphozoans\nn01911063\tany of various usually free-swimming marine coelenterates having a gelatinous medusoid stage as the dominant phase of its life cycle\nn01911403\ta type of jellyfish\nn01911839\tcolonial coelenterates having the polyp phase dominant\nn01912152\tsmall tubular solitary freshwater hydrozoan polyp\nn01912454\ta floating or swimming oceanic colony of polyps often transparent or showily colored\nn01912809\tsmall creatures resembling pieces of fuzzy rope; each with a cluster of swimming bells serving as the head and long elastic tentacles for drawing in prey\nn01913166\tlarge siphonophore having a bladderlike float and stinging tentacles\nn01913346\tcolonial siphonophore of up to 130 ft long\nn01913440\tlarge siphonophore of up to 50 ft long\nn01914163\tsessile marine coelenterates including solitary and colonial polyps; the medusoid phase is entirely suppressed\nn01914609\tmarine polyps that resemble flowers but have oral rings of tentacles; differ from corals in forming no hard skeleton\nn01914830\tany sea anemone or related animal\nn01915700\tfleshy featherlike warm-water colonies\nn01915811\tmarine colonial polyp characterized by a calcareous skeleton; masses in a variety of shapes often forming reefs\nn01916187\tcorals having a horny or calcareous branching skeleton\nn01916388\tcorals forming featherlike colonies\nn01916481\tcorals having a treelike or fan-shaped horny skeleton\nn01916588\tcorals of especially the Mediterranean having pink or red color used for ornaments and jewelry\nn01916925\tcorals having calcareous skeletons aggregations of which form reefs and islands\nn01917289\tmassive reef-building coral having a convoluted and furrowed surface\nn01917611\tlarge branching coral resembling antlers\nn01917882\tflattened disk-shaped stony coral (usually solitary and unattached)\nn01918744\tbiradially symmetrical hermaphroditic solitary marine animals resembling jellyfishes having for locomotion eight rows of cilia arranged like teeth in a comb\nn01919385\tdelicately iridescent thimble-shaped ctenophores\nn01920051\tctenophore have long tentacles and flattened body\nn01920438\tctenophore having a rounded body with longitudinal rows of cilia\nn01921059\tctenophore having a ribbon-shaped iridescent gelatinous body\nn01922303\tany of numerous relatively small elongated soft-bodied animals especially of the phyla Annelida and Chaetognatha and Nematoda and Nemertea and Platyhelminthes; also many insect larvae\nn01922717\tworm that is parasitic on the intestines of vertebrates especially roundworms and tapeworms and flukes\nn01922948\ta larva of a woodborer\nn01923025\tany of various insects or larvae or mollusks that bore into wood\nn01923404\tany of various worms living parasitically in intestines of vertebrates having a retractile proboscis covered with many hooked spines\nn01923890\tany worm of the Chaetognatha; transparent marine worm with horizontal lateral and caudal fins and a row of movable curved spines at each side of the mouth\nn01924800\tencysted saclike larva of the tapeworm\nn01924916\tparasitic or free-living worms having a flattened body\nn01925270\tfree-swimming mostly freshwater flatworms; popular in laboratory studies for the ability to regenerate lost parts\nn01925695\tparasitic flatworms having external suckers for attaching to a host\nn01925916\ttadpole-shaped parasitic larva of a trematode worm; tail disappears in adult stage\nn01926379\tflatworm parasitic in liver and bile ducts of domestic animals and humans\nn01926689\tfluke that is parasitic on humans and swine; common in eastern Asia\nn01927159\tflatworms parasitic in the blood vessels of mammals\nn01927456\tribbonlike flatworms that are parasitic in the intestines of humans and other vertebrates\nn01927928\ttapeworms whose larvae are parasitic in humans and domestic animals\nn01928215\ttapeworms parasitic in humans which uses the pig as its intermediate host\nn01928517\tsoft unsegmented marine worms that have a threadlike proboscis and the ability to stretch and contract\nn01928865\tslender animal with tentacles and a tubelike outer covering; lives on the deep ocean bottom\nn01929186\tminute aquatic multicellular organisms having a ciliated wheel-like organ for feeding and locomotion; constituents of freshwater plankton\nn01930112\tunsegmented worms with elongated rounded body pointed at both ends; mostly free-living but some are parasitic\nn01930852\tintestinal parasite of humans and pigs\nn01931140\tintestinal parasite of domestic fowl\nn01931520\tsmall threadlike worm infesting human intestines and rectum especially in children\nn01931714\tany of various small free-living plant-parasitic roundworms\nn01932151\tminute eelworm that feeds on organisms that cause fermentation in e.g. vinegar\nn01932936\tparasitic nematode occurring in the intestines of pigs and rats and human beings and producing larvae that form cysts in skeletal muscles\nn01933151\tparasitic bloodsucking roundworms having hooked mouth parts to fasten to the intestinal wall of human and other hosts\nn01933478\tslender threadlike roundworms living in the blood and tissues of vertebrates; transmitted as larvae by biting insects\nn01933988\tparasitic roundworm of India and Africa that lives in the abdomen or beneath the skin of humans and other vertebrates\nn01934440\tworms with cylindrical bodies segmented both internally and externally\nn01934844\tsmall primitive marine worm lacking external segmentation and resembling polychaete larvae\nn01935176\thermaphroditic terrestrial and aquatic annelids having bristles borne singly along the length of the body\nn01935395\tterrestrial worm that burrows into and helps aerate soil; often surfaces when the ground is cool or wet; used as bait by anglers\nn01936391\tchiefly marine annelids possessing both sexes and having paired appendages (parapodia) bearing bristles\nn01936671\tmarine worms having a row of tufted gills along each side of the back; often used for fishing bait\nn01936858\tany of several large worms having a broad flattened body with a mat of coarse hairs covering the back\nn01937579\ta segmented marine worm with bright red body; often used for bait\nn01937909\tcarnivorous or bloodsucking aquatic or terrestrial worms typically having a sucker at each end\nn01938454\tlarge European freshwater leech formerly used for bloodletting\nn01938735\tany of several large freshwater leeches\nn01940736\tinvertebrate having a soft unsegmented body usually enclosed in a shell\nn01941223\tburrowing marine mollusk\nn01941340\tany of various seashore mollusks having a tapering tubular shell open at each end and a foot pointed like a spade for burrowing\nn01942177\ta class of mollusks typically having a one-piece coiled shell and flattened muscular foot with a head bearing stalked eyes\nn01942869\tany of various large edible marine gastropods of the genus Haliotis having an ear-shaped shell with pearly interior\nn01943087\tan abalone found near the Channel Islands\nn01943541\tany of numerous tropical marine snails that as adults have the outer lip of the aperture produced into a series of long curved spines\nn01943899\tany of various edible tropical marine gastropods of the genus Strombus having a brightly-colored spiral shell with large outer lip\nn01944118\ta large variety of conch\nn01944390\tfreshwater or marine or terrestrial gastropod mollusk usually having an external enclosing spiral shell\nn01944812\tone of the chief edible snails\nn01944955\tany of several inedible snails of the genus Helix; often destructive pests\nn01945143\tserious garden pest having a brown shell with paler zigzag markings; nearly cosmopolitan in distribution\nn01945340\ta kind of garden snail\nn01945685\tany of various terrestrial gastropods having an elongated slimy body and no external shell\nn01945845\tany of several creeping marine gastropods with a spirally coiled shell: whelks; tritons; moon shells; neritids\nn01946277\toperculate seasnail of coastal waters with a short spiral shell\nn01946630\ta neritid gastropod having a short smooth or spirally ridged shell with thick usually toothed outer lip and toothed operculum\nn01946827\tgastropod having reddish toothlike projections around the shell opening; of the Caribbean area\nn01947139\tornately marked and brightly colored snails of brackish waters\nn01947396\tlarge carnivorous marine gastropods of coastal waters and intertidal regions having a strong snail-like shell\nn01947997\tmarine gastropods having smooth rounded shells that form short spires\nn01948446\tedible marine gastropod\nn01948573\tany of various usually marine gastropods with low conical shells; found clinging to rocks in littoral areas\nn01949085\tmarine limpet\nn01949499\tmarine limpet having a conical shell with an opening at the apex\nn01949973\tminute conical gastropod superficially resembling a limpet but living and feeding on freshwater plants\nn01950731\tany of various marine gastropods of the suborder Nudibranchia having a shell-less and often beautifully colored body\nn01951274\tnaked marine gastropod having a soft body with reduced internal shell and two pairs of ear-like tentacles\nn01951613\ta kind of sea slug\nn01952029\tmarine gastropod mollusk having a very small thin shell\nn01952712\tany member of the genus Physa\nn01953361\tany of numerous tropical marine gastropods of the genus Cypraea having highly polished usually brightly marked shells\nn01953594\tcowrie whose shell is used for money in parts of the southern Pacific and in parts of Africa\nn01953762\tcowrie whose shell is used for ornament\nn01954516\tdeep-water wormlike mollusks lacking calcareous plates on the body but having fine slimy spicules on the covering mantle\nn01955084\tprimitive elongated bilaterally symmetrical marine mollusk having a mantle covered with eight calcareous plates\nn01955933\tmarine or freshwater mollusks having a soft body with platelike gills enclosed within two shells hinged together\nn01956344\ta young oyster or other bivalve\nn01956481\tburrowing marine mollusk living on sand or mud; the shell closes with viselike firmness\nn01956764\tthe shell of a marine organism\nn01957335\tan edible clam with thin oval-shaped shell found in coastal regions of the United States and Europe\nn01958038\tan edible American clam; the heavy shells were used as money by some American Indians\nn01958346\ta young quahog\nn01958435\ta half-grown quahog\nn01958531\ta large edible clam found burrowing deeply in sandy mud along the Pacific coast of North America; weighs up to six pounds; has siphons that can extend to several feet and cannot be withdrawn into the shell\nn01959029\tmarine clam having a long narrow curved thin shell\nn01959492\ta large clam inhabiting reefs in the southern Pacific and weighing up to 500 pounds\nn01959985\tcommon edible, burrowing European bivalve mollusk that has a strong, rounded shell with radiating ribs\nn01960177\tcommon edible European cockle\nn01960459\tmarine mollusks having a rough irregular shell; found on the sea bed mostly in coastal waters\nn01961234\ta large oyster native to Japan and introduced along the Pacific coast of the United States; a candidate for introduction in Chesapeake Bay\nn01961600\tcommon edible oyster of Atlantic coast of North America\nn01961985\ttropical marine bivalve found chiefly off eastern Asia and Pacific coast of North America and Central America; a major source of pearls\nn01962506\tthin-shelled bivalve having the right valve deeply notched\nn01962788\tmarine bivalve common in Philippine coastal waters characterized by a large thin flat translucent shell\nn01963317\tmarine bivalve mollusk having a heavy toothed shell with a deep boat-like inner surface\nn01963479\tred-blooded clam\nn01963571\tmarine or freshwater bivalve mollusk that lives attached to rocks etc.\nn01964049\tmarine bivalve mollusk having a dark elongated shell; live attached to solid objects especially in intertidal zones\nn01964271\ta mussel with a dark shell that lives attached to rocks\nn01964441\tbivalve mollusk abundant in rivers of central United States\nn01964957\tthe pearly lining of the dark shells is a source of mother-of-pearl\nn01965252\tmussel with thin fragile shells having only rudimentary hinge teeth\nn01965529\tinch long mollusk imported accidentally from Europe; clogs utility inlet pipes and feeds on edible freshwater mussels\nn01965889\tedible marine bivalve having a fluted fan-shaped shell that swim by expelling water from the shell in a series of snapping motions\nn01966377\ta small scallop inhabiting shallow waters and mud flats of the Atlantic coast of North America\nn01966586\ta large scallop inhabiting deep waters of the Atlantic coast of North America\nn01967094\twormlike marine bivalve that bores into wooden piers and ships by means of drill-like shells\nn01967308\ttypical shipworm\nn01967963\tmarine bivalve that bores into rock or clay or wood by means of saw-like shells\nn01968315\tmarine mollusk characterized by well-developed head and eyes and sucker-bearing tentacles\nn01968897\tcephalopod of the Indian and Pacific oceans having a spiral shell with pale pearly partitions\nn01969726\ta cephalopod with eight arms but lacking an internal shell\nn01970164\tbottom-living cephalopod having a soft oval body with eight long tentacles\nn01970667\tcephalopod mollusk of warm seas whose females have delicate papery spiral shells\nn01971094\tcephalopods having eight short tentacles plus two long ones\nn01971280\twidely distributed fast-moving ten-armed cephalopod mollusk having a long tapered body with triangular tail fins\nn01971620\tsomewhat flattened cylindrical squid\nn01971850\textremely active cylindrical squid with short strong arms and large rhombic terminal fins\nn01972131\tlargest mollusk known about but never seen (to 60 feet long)\nn01972541\tten-armed oval-bodied cephalopod with narrow fins as long as the body and a large calcareous internal shell\nn01973148\ta small tropical cephalopod of the genus Spirula having prominent eyes and short arms and a many-chambered shell coiled in a flat spiral\nn01974773\tany mainly aquatic arthropod usually having a segmented body and chitinous exoskeleton\nn01975687\ta major subclass of crustaceans\nn01976146\tcrustaceans characteristically having five pairs of locomotor appendages each joined to a segment of the thorax\nn01976868\ttypical crabs\nn01976957\tdecapod having eyes on short stalks and a broad flattened carapace with a small abdomen folded under the thorax and pincers\nn01977485\tlarge edible crab of the southern coast of the United States (particularly Florida)\nn01978010\tedible crab that has not recently molted and so has a hard shell\nn01978136\tedible crab that has recently molted and not yet formed its new shell\nn01978287\tsmall edible crab of Pacific coast of North America\nn01978455\tcrab of eastern coast of North America\nn01978587\tlarge red deep-water crab of the eastern coast of North America\nn01978930\tmarine crab with some legs flattened and fringed for swimming\nn01979269\tcrab of the English coasts\nn01979526\tbrightly spotted crab of sandy beaches of the Atlantic coast of the United States\nn01979874\tbluish edible crab of Atlantic and Gulf Coasts of North America\nn01980166\tburrowing crab of American coastal regions having one claw much enlarged in the male\nn01980655\ttiny soft-bodied crab living commensally in the mantles of certain bivalve mollusks\nn01981276\tlarge edible crab of northern Pacific waters especially along the coasts of Alaska and Japan\nn01981702\tany of numerous crabs with very long legs and small triangular bodies\nn01982068\ta large spider crab of Europe\nn01982347\tvery large deep-water Japanese crab\nn01982650\tany of several edible marine crustaceans of the families Homaridae and Nephropsidae and Palinuridae\nn01983048\tlarge edible marine crustaceans having large pincers on the first pair of legs\nn01983481\tlobster of Atlantic coast of America\nn01983674\tlobster of Atlantic coast of Europe\nn01983829\tsmall lobster of southern Africa\nn01984245\tedible European lobster resembling the American lobster but slenderer\nn01984695\tlarge edible marine crustacean having a spiny carapace but lacking the large pincers of true lobsters\nn01985128\tsmall freshwater decapod crustacean that resembles a lobster\nn01985493\tsmall crayfish of Europe and Asia and western North America\nn01985797\tcommon large crayfishes of eastern North America\nn01986214\tsmall soft-bodied marine crustaceans living in cast-off shells of gastropods\nn01986806\tsmall slender-bodied chiefly marine decapod crustaceans with a long tail and single pair of pincers; many species are edible\nn01987076\tsmall shrimp that makes a snapping noise with one of their enlarged chelae\nn01987545\tshrimp-like decapod crustacean having two pairs of pincers; most are edible\nn01987727\tlarge (a foot or more) edible freshwater prawn common in Australian rivers\nn01988203\tedible tropical and warm-water prawn\nn01988701\tshrimp-like planktonic crustaceans; major source of food for e.g. baleen whales\nn01988869\tfood for jellyfish\nn01989516\tshrimp-like crustaceans whose females carry eggs and young in a pouch between the legs\nn01989869\ta kind of crustacean\nn01990007\ttropical marine burrowing crustaceans with large grasping appendages\nn01990516\ta kind of mantis shrimp\nn01990800\tany of various small terrestrial or aquatic crustaceans with seven pairs of legs adapted for crawling\nn01991028\tany of various small terrestrial isopods having a flat elliptical segmented body; found in damp habitats\nn01991520\tsmall terrestrial isopod with a convex segmented body that can roll up into a ball\nn01992262\tterrestrial isopod having an oval segmented body (a shape like a sow)\nn01992423\tmarine isopod crustacean\nn01992773\ta kind of malacostracan crustacean\nn01993525\tsmall amphipod crustacean having a grotesque form suggestive of the praying mantis; found chiefly on seaweed\nn01993830\tamphipod crustacean parasitic on cetaceans\nn01994910\tminute freshwater crustacean having a round body enclosed in a transparent shell; moves about like a flea by means of hairy branched antennae\nn01995514\tsmall freshwater branchiopod having a transparent body with many appendages; swims on its back\nn01995686\tcommon to saline lakes\nn01996280\ta kind of branchiopod crustacean\nn01996585\tminute marine or freshwater crustaceans usually having six pairs of limbs on the thorax; some abundant in plankton and others parasitic on fish\nn01997119\tminute free-swimming freshwater copepod having a large median eye and pear-shaped body and long antennae used in swimming; important in some food chains and as intermediate hosts of parasitic worms that affect man e.g. Guinea worms\nn01997825\ttiny marine and freshwater crustaceans with a shrimp-like body enclosed in a bivalve shell\nn01998183\tmarine crustaceans with feathery food-catching appendages; free-swimming as larvae; as adults form a hard shell and live attached to submerged surfaces\nn01998741\tbarnacle that attaches to rocks especially in intertidal zones\nn01999186\tstalked barnacle that attaches to ship bottoms or floating timbers\nn01999767\tany of numerous velvety-skinned wormlike carnivorous animals common in tropical forests having characteristics of both arthropods and annelid worms\nn02000954\tany of many long-legged birds that wade in water in search of food\nn02002075\tlarge mostly Old World wading birds typically having white-and-black plumage\nn02002556\tthe common stork of Europe; white with black wing feathers and a red bill\nn02002724\tOld World stork that is glossy black above and white below\nn02003037\tlarge Indian stork with a military gait\nn02003204\tlarge African black-and-white carrion-eating stork; its downy underwing feathers are used to trim garments\nn02003577\tstork with a grooved bill whose upper and lower parts touch only at the base and tip\nn02003839\tlarge white stork of warm regions of the world especially America\nn02004131\tlarge black-and-white stork of tropical Africa; its red bill has a black band around the middle\nn02004492\tlarge mostly white Australian stork\nn02004855\tan American stork that resembles the true ibises in having a downward-curved bill; inhabits wooded swamps of New World tropics\nn02005399\tlarge stork-like bird of the valley of the White Nile with a broad bill suggesting a wooden shoe\nn02005790\twading birds of warm regions having long slender down-curved bills\nn02006063\tany of several Old World birds of the genus Ibis\nn02006364\tAfrican ibis venerated by ancient Egyptians\nn02006656\twading birds having a long flat bill with a tip like a spoon\nn02006985\tpure white crested spoonbill of southern Eurasia and northeastern Africa\nn02007284\ttropical rose-colored New World spoonbill\nn02007558\tlarge pink to scarlet web-footed wading bird with down-bent bill; inhabits brackish lakes\nn02008041\tgrey or white wading bird with long neck and long legs and (usually) long bill\nn02008497\tlarge American heron having bluish-grey plumage\nn02008643\tlarge white heron of Florida and the Florida Keys\nn02008796\tany of various usually white herons having long plumes during breeding season\nn02009229\tsmall bluish-grey heron of the western hemisphere\nn02009380\tsmall New World egret\nn02009508\tOld World egret\nn02009750\twidely distributed Old World white egret\nn02009912\ta common egret of the genus Egretta found in America; it is a variety of the Old World white egret Casmerodius albus\nn02010272\tsmall white egret widely distributed in warm regions often found around grazing animals\nn02010453\tnocturnal or crepuscular herons\nn02010728\tnight heron of both Old and New Worlds\nn02011016\tNorth American night heron\nn02011281\ttropical American heron related to night herons\nn02011460\trelatively small compact tawny-brown heron with nocturnal habits and a booming cry; found in marshes\nn02011805\ta kind of bittern\nn02011943\ta kind of bittern\nn02012185\tsmall American bittern\nn02012849\tlarge long-necked wading bird of marshes and plains in many parts of the world\nn02013177\trare North American crane having black-and-white plumage and a trumpeting call\nn02013567\twading bird of South America and Central America\nn02013706\twading bird of Florida, Cuba and Jamaica having a drooping bill and a distinctive wailing call\nn02014237\tBrazilian Cariama; sole representative of the genus Cariama\nn02014524\tArgentinian Cariama\nn02014941\tany of numerous widely distributed small wading birds of the family Rallidae having short wings and very long toes for running on soft mud\nn02015357\tflightless New Zealand rail of thievish disposition having short wings each with a spur used in fighting\nn02015554\tany of several short-billed Old World rails\nn02015797\tcommon Eurasian rail that frequents grain fields\nn02016066\tEurasian rail of swamps and marshes\nn02016358\tany of various small aquatic birds of the genus Gallinula distinguished from rails by a frontal shield and a resemblance to domestic hens\nn02016659\tNorth American dark bluish-grey gallinule\nn02016816\tblack gallinule that inhabits ponds and lakes\nn02016956\tgallinules with showy purplish plumage\nn02017213\tpurple gallinule of southern Europe\nn02017475\tAmerican purple gallinule\nn02017725\tflightless New Zealand birds similar to gallinules\nn02018027\tslate-black slow-flying birds somewhat resembling ducks\nn02018207\ta coot found in North America\nn02018368\ta coot found in Eurasia\nn02018795\tlarge heavy-bodied chiefly terrestrial game bird capable of powerful swift flight; classified with wading birds but frequents grassy steppes\nn02019190\tlargest European land bird\nn02019438\tpopular Australian game bird\nn02019929\tsmall quail-like terrestrial bird of southern Eurasia and northern Africa that lacks a hind toe; classified with wading birds but inhabits grassy plains\nn02020219\ta variety of button quail having stripes\nn02020578\tsmall Australian bird related to the button quail; classified as wading bird but inhabits plains\nn02021050\tlarge gregarious crane-like bird of the forests of South America having glossy black plumage and a loud prolonged cry; easily domesticated\nn02021281\ttrumpeter of Brazil and Guiana; often kept to protect poultry in Brazil\nn02021795\ta bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.\nn02022684\tany of numerous wading birds that frequent mostly seashores and estuaries\nn02023341\tany of numerous chiefly shorebirds of relatively compact build having straight bills and large pointed wings; closely related to the sandpipers\nn02023855\tsmall plover of eastern North America\nn02023992\tAmerican plover of inland waters and fields having a distinctive cry\nn02024185\trare plover of upland areas of Eurasia\nn02024479\tplovers of Europe and America having the backs marked with golden-yellow spots\nn02024763\tlarge crested Old World plover having wattles and spurs\nn02025043\tmigratory shorebirds of the plover family that turn over stones in searching for food\nn02025239\tcommon Arctic turnstone that winters in South America and Australia\nn02025389\tcommon turnstone of the Pacific coast of North America\nn02026059\tany of numerous usually small wading birds having a slender bill and piping call; closely related to the plovers\nn02026629\tsandpiper-like shorebird of Pacific coasts of North America and South America\nn02026948\ta variety of sandpiper\nn02027075\tcommon North American sandpiper\nn02027357\tsmallest American sandpiper\nn02027492\tsmall common sandpiper that breeds in northern or Arctic regions and winters in southern United States or Mediterranean regions\nn02027897\tlarge European sandpiper with greenish legs\nn02028035\ta common Old World wading bird with long red legs\nn02028175\teither of two North American shorebird with yellow legs\nn02028342\ta variety of yellowlegs\nn02028451\ta variety of yellowlegs\nn02028727\tAmerican sandpiper that inflates its chest when courting\nn02028900\ta sandpiper that breeds in the Arctic and winters in the southern hemisphere\nn02029087\tOld World sandpiper with a curved bill like a curlew\nn02029378\tsmall sandpiper that breeds in the Arctic and migrates southward along sandy coasts in most of world\nn02029706\tlarge plover-like sandpiper of North American fields and uplands\nn02030035\tcommon Eurasian sandpiper; the male has an erectile neck ruff in breeding season\nn02030224\tfemale ruff\nn02030287\tany of several long-legged shorebirds having a loud whistling cry\nn02030568\ttattler of Pacific coastal regions\nn02030837\tlarge North American shorebird of eastern and Gulf Coasts\nn02030996\tgame bird of the sandpiper family that resembles a snipe\nn02031298\tshort-legged long-billed migratory Old World woodcock\nn02031585\tsmall long-billed woodcock; prized as a game bird\nn02031934\tOld or New World straight-billed game bird of the sandpiper family; of marshy areas; similar to the woodcocks\nn02032222\tcommon snipe of Eurasia and Africa\nn02032355\tAmerican snipe\nn02032480\tOld World snipe larger and darker than the whole snipe\nn02032769\ta small short-billed Old World snipe\nn02033041\tshorebird of the sandpiper family that resembles a snipe\nn02033208\ta dowitcher with a grey back\nn02033324\ta dowitcher with a red breast\nn02033561\tlarge migratory shorebirds of the sandpiper family; closely related to woodcocks but having a down-curved bill\nn02033779\tcommon Eurasian curlew\nn02033882\tNew World curlew that breeds in northern North America\nn02034129\tlarge wading bird that resembles a curlew; has a long slightly upturned bill\nn02034295\tNew World godwit\nn02034661\tlong-legged three-toed black-and-white wading bird of inland ponds and marshes or brackish lagoons\nn02034971\tstilt of southwestern United States to northern South America having black plumage extending from the head down the back of the neck\nn02035210\tstilt of Europe and Africa and Asia having mostly white plumage but with black wings\nn02035402\tstilt of the southwest Pacific including Australia and New Zealand having mostly white plumage but with black wings and nape of neck\nn02035656\tblackish stilt of New Zealand sometimes considered a color phase of the white-headed stilt\nn02036053\tlong-legged three-toed wading bird of brackish marshes of Australia\nn02036228\tweb-footed Australian stilt with reddish-brown pectoral markings\nn02036711\tlong-legged web-footed black-and-white shorebird with slender upward-curving bill\nn02037110\tblack-and-white shorebird with stout legs and bill; feed on oysters etc.\nn02037464\tsmall sandpiper-like shorebird having lobate toes and being good swimmers; breed in the Arctic and winter in the tropics\nn02037869\tphalarope of northern oceans and lakes\nn02038141\tbreeds in Arctic regions of Old and New Worlds; large flocks often seen far out at sea\nn02038466\tbreeds on the northern great plains of Canada\nn02038993\tOld World shorebird with long pointed wings and short legs; closely related to the coursers\nn02039171\tswift-footed terrestrial plover-like bird of southern Asia and Africa; related to the pratincoles\nn02039497\tcourser of desert and semidesert regions of the Old World\nn02039780\tAfrican courser that feeds on insect parasites on crocodiles\nn02040266\tlarge-headed large-eyed crepuscular or nocturnal shorebird of the Old World and tropical America having a thickened knee joint\nn02040505\tgull family; skimmer family; jaeger family; auk family\nn02041085\tlong-winged web-footed aquatic bird of the gull family\nn02041246\tmostly white aquatic bird having long pointed wings and short legs\nn02041678\tthe common gull of Eurasia and northeastern North America\nn02041875\twhite gull having a black back and wings\nn02042046\tlarge gull of the northern hemisphere\nn02042180\tsmall black-headed European gull\nn02042472\twhite Arctic gull; migrates as far south as England and New Brunswick\nn02042759\tsmall pearl-grey gull of northern regions; nests on cliffs and has a rudimentary hind toe\nn02043063\tsmall slender gull having narrow wings and a forked tail\nn02043333\tcommon tern of Eurasia and America having white black and grey plumage\nn02043808\tgull-like seabird that flies along the surface of the water with an elongated lower mandible immersed to skim out food\nn02044178\trapacious seabird that pursues weaker birds to make them drop their prey\nn02044517\ta variety of jaeger\nn02044778\tgull-like jaeger of northern seas\nn02044908\tlarge brown skua of the northern Atlantic\nn02045369\tblack-and-white short-necked web-footed diving bird of northern seas\nn02045596\tany of several small auks of the northern Pacific coasts\nn02045864\tblack-and-white northern Atlantic auk having a compressed sharp-edged bill\nn02046171\tsmall short-billed auk abundant in Arctic regions\nn02046759\tsmall black or brown speckled auks of northern seas\nn02046939\tnorthern Atlantic guillemot\nn02047045\tnorthern Pacific guillemot\nn02047260\tblack-and-white diving bird of northern seas\nn02047411\tthe most frequent variety of murre\nn02047517\ta variety of murre\nn02047614\tany of two genera of northern seabirds having short necks and brightly colored compressed bills\nn02047975\tcommon puffin of the northern Atlantic\nn02048115\tnorthern Pacific puffin\nn02048353\tnorthern Pacific puffin having a large yellow plume over each eye\nn02048698\tseabirds of the order Gaviiformes\nn02049088\tlarge somewhat primitive fish-eating diving bird of the northern hemisphere having webbed feet placed far back; related to the grebes\nn02049532\taquatic birds related to the loons\nn02050004\tsmall compact-bodied almost completely aquatic bird that builds floating nests; similar to loons but smaller and with lobate rather than webbed feet\nn02050313\tlarge Old World grebe with black ear tufts\nn02050442\tlarge stocky grebe of circumpolar regions having a dark neck\nn02050586\tsmall grebe with yellow ear tufts and a black neck; found in Eurasia and southern Africa as well as western United States\nn02050809\tsmall European grebe\nn02051059\tAmerican grebe having a black-banded whitish bill\nn02051474\tlarge fish-eating seabird with four-toed webbed feet\nn02051845\tlarge long-winged warm-water seabird having a large bill with a distensible pouch for fish\nn02052204\tlarge American pelican; white with black wing feathers\nn02052365\tsimilar to American white pelican\nn02052775\tlong-billed warm-water seabird with wide wingspan and forked tail\nn02053083\tlarge heavily built seabird with a long stout bill noted for its plunging dives for fish\nn02053425\tvery large white gannet with black wing tips\nn02053584\tsmall tropical gannet having a bright bill or bright feet or both\nn02054036\tlarge voracious dark-colored long-necked seabird with a distensible pouch for holding fish; used in Asia to catch fish\nn02054502\tfish-eating bird of warm inland waters having a long flexible neck and slender sharp-pointed bill\nn02054711\tblackish New World snakebird of swampy regions\nn02055107\tmostly white web-footed tropical seabird often found far from land\nn02055658\tflightless cold-water seabirds: penguins\nn02055803\tshort-legged flightless birds of cold southern especially Antarctic regions having webbed feet and wings modified as flippers\nn02056228\tmedium-sized penguins occurring in large colonies on the Adelie Coast of Antarctica\nn02056570\tlarge penguin on islands bordering the Antarctic Circle\nn02056728\tthe largest penguin; an Antarctic penguin\nn02057035\tsmall penguin of South America and southern Africa with a braying call\nn02057330\tsmall penguin of the Falkland Islands and New Zealand\nn02057731\tbird of the open seas\nn02057898\tlarge long-winged bird with hooked bill and tubular nostrils that wanders the open seas\nn02058221\tlarge web-footed birds of the southern hemisphere having long narrow wings; noted for powerful gliding flight\nn02058594\tvery large albatross; white with wide black wings\nn02058747\ta variety of albatross with black feet\nn02059162\trelatively small long-winged tube-nosed bird that flies far from land\nn02059541\tlarge black petrel of southern seas having a white mark on the chin\nn02059852\tlarge brownish petrel chiefly of Antarctic seas\nn02060133\theavy short-tailed oceanic bird of polar regions\nn02060411\tlong-winged oceanic bird that in flight skims close to the waves\nn02060569\tsmall black-and-white shearwater common in the northeastern Atlantic\nn02060889\tany of various small petrels having dark plumage with paler underparts\nn02061217\tsooty black petrel with white markings; of the northern Atlantic and Mediterranean\nn02061560\tmedium-sized storm petrel\nn02061853\tany of several small diving birds of southern hemisphere seas; somewhat resemble auks\nn02062017\twhales and dolphins; manatees and dugongs; walruses; seals\nn02062430\tlarge aquatic carnivorous mammal with fin-like forelimbs no hind limbs, including: whales; dolphins; porpoises; narwhals\nn02062744\tany of the larger cetacean mammals having a streamlined body and breathing through a blowhole on the head\nn02063224\twhale with plates of whalebone along the upper jaw for filtering plankton from the water\nn02063662\tlarge Arctic whalebone whale; allegedly the `right' whale to hunt because of its valuable whalebone and oil\nn02064000\tlarge-mouthed Arctic whale\nn02064338\tany of several baleen whales of the family Balaenopteridae having longitudinal grooves on the throat and a small pointed dorsal fin\nn02064816\tlargest mammal ever known; bluish-grey migratory whalebone whale mostly of southern hemisphere\nn02065026\tlarge flat-headed whalebone whale having deep furrows along the throat; of Atlantic and Pacific\nn02065263\tsimilar to but smaller than the finback whale\nn02065407\tsmall finback of coastal waters of Atlantic and Pacific\nn02065726\tlarge whalebone whale with long flippers noted for arching or humping its back as it dives\nn02066245\tmedium-sized greyish-black whale of the northern Pacific\nn02066707\tany of several whales having simple conical teeth and feeding on fish etc.\nn02067240\tlarge whale with a large cavity in the head containing spermaceti and oil; also a source of ambergris\nn02067603\tsmall sperm whale of warm waters of both coasts of North America\nn02067768\tvery small (to 8 feet) sperm whale of central coasts of Atlantic and Pacific\nn02068206\tany of several whales inhabiting all oceans and having beaklike jaws with vestigial teeth in the upper jaw\nn02068541\tnorthern Atlantic beaked whale with a bulbous forehead\nn02068974\tany of various small toothed whales with a beaklike snout; larger than porpoises\nn02069412\tblack-and-white dolphin that leaps high out of the water\nn02069701\tany of several dolphins with rounded forehead and well-developed beak; chiefly of northern Atlantic and Mediterranean\nn02069974\tthe most common dolphin of northern Atlantic and Mediterranean; often kept captive and trained to perform\nn02070174\ta bottlenose dolphin found in the Pacific Ocean\nn02070430\tany of several small gregarious cetacean mammals having a blunt snout and many teeth\nn02070624\tthe common porpoise of the northern Atlantic and Pacific\nn02070776\ta short porpoise that lives in the Gulf of California; an endangered species\nn02071028\tslaty-grey blunt-nosed dolphin common in northern seas\nn02071294\tpredatory black-and-white toothed whale with large dorsal fin; common in cold seas\nn02071636\tsmall dark-colored whale of the Atlantic coast of the United States; the largest male acts as pilot or leader for the school\nn02072040\tany of several long-snouted usually freshwater dolphins of South America and southern Asia\nn02072493\tsmall Arctic whale the male having a long spiral ivory tusk\nn02072798\tsmall northern whale that is white when adult\nn02073250\tany of two families of large herbivorous aquatic mammals with paddle-shaped tails and flipper-like forelimbs and no hind limbs\nn02073831\tsirenian mammal of tropical coastal waters of America; the flat tail is rounded\nn02074367\tsirenian tusked mammal found from eastern Africa to Australia; the flat tail is bilobate\nn02074726\textinct large sirenian mammal formerly found near the Asiatic coast of the Bering Sea\nn02075296\ta terrestrial or aquatic flesh-eating mammal\nn02075612\tan animal that feeds on both animal and vegetable substances\nn02075927\taquatic carnivorous mammal having a streamlined body specialized for swimming with limbs modified as flippers\nn02076196\tany of numerous marine mammals that come on shore to breed; chiefly of cold regions\nn02076402\tsilvery grey Antarctic seal subsisting on crustaceans\nn02076779\tpinniped mammal having external ear flaps and hind limbs used for locomotion on land; valued for its soft underfur\nn02077152\teared seal of the southern hemisphere; the thick soft underfur is the source of sealskin\nn02077384\ta fur seal of the Pacific coast of California and southward\nn02077658\tan eared seal of the northern Pacific\nn02077787\tof Pacific coast from Alaska southward to California\nn02077923\tany of several large eared seals of the northern Pacific related to fur seals but lacking their valuable coat\nn02078292\tof the southern coast of South America\nn02078574\toften trained as a show animal\nn02078738\ta variety of sea lion found in Australia\nn02079005\tlargest sea lion; of the northern Pacific\nn02079389\tany of several seals lacking external ear flaps and having a stiff hairlike coat with hind limbs reduced to swimming flippers\nn02079851\tsmall spotted seal of coastal waters of the northern hemisphere\nn02080146\tcommon Arctic seal; the young are all white\nn02080415\teither of two large northern Atlantic earless seals having snouts like trunks\nn02080713\tmedium-sized greyish to yellow seal with bristles each side of muzzle; of the Arctic Ocean\nn02081060\tmedium-sized blackish-grey seal with large inflatable sac on the head; of Arctic and northern Atlantic waters\nn02081571\teither of two large northern marine mammals having ivory tusks and tough hide over thick blubber\nn02081798\ta walrus of northern Atlantic and Arctic waters\nn02081927\ta walrus of the Bering Sea and northern Pacific\nn02082056\tin some classifications considered a suborder of Carnivora\nn02082190\tterrestrial carnivores; having toes separated to the base: dogs; cats; bears; badgers; raccoons\nn02082791\tnocturnal burrowing mammal of the grasslands of Africa that feeds on termites; sole extant representative of the order Tubulidentata\nn02083346\tany of various fissiped mammals with nonretractile claws and typically long muzzles\nn02083672\tfemale of any member of the dog family\nn02083780\ta bitch used for breeding\nn02084071\ta member of the genus Canis (probably descended from the common wolf) that has been domesticated by man since prehistoric times; occurs in many breeds\nn02084732\tinformal terms for dogs\nn02084861\tan inferior dog or one of mixed breed\nn02085019\ta nervous belligerent little mongrel dog\nn02085118\townerless half-wild mongrel dog common around Asian villages especially India\nn02085272\ta dog small and tame enough to be held in the lap\nn02085374\tany of several breeds of very small dogs kept purely as pets\nn02085620\tan old breed of tiny short-haired dog with protruding eyes from Mexico held to antedate Aztec civilization\nn02085782\tbreed of toy dogs originating in Japan having a silky black-and-white or red-and-white coat\nn02085936\tbreed of toy dogs having a long straight silky white coat\nn02086079\ta Chinese breed of small short-legged dogs with a long silky coat and broad flat muzzle\nn02086240\ta Chinese breed of small dog similar to a Pekingese\nn02086346\ta very small spaniel\nn02086478\tBritish breed having a long silky coat and rounded head with a short upturned muzzle\nn02086646\tred-and-white variety of English toy spaniel\nn02086753\ta toy English spaniel with a black-and-tan coat; named after Charles II who popularized it\nn02086910\tsmall slender toy spaniel with erect ears and a black-spotted brown to white coat\nn02087046\ta small active dog\nn02087122\ta dog used in hunting game\nn02087314\ta dog trained for coursing\nn02087394\ta powerful short-haired African hunting dog having a crest of reversed hair along the spine\nn02087551\tany of several breeds of dog used for hunting typically having large drooping ears\nn02088094\ttall graceful breed of hound with a long silky coat; native to the Near East\nn02088238\tsmooth-haired breed of hound with short legs and long ears\nn02088364\ta small short-legged smooth-coated breed of hound\nn02088466\ta breed of large powerful hound of European origin having very acute smell and used in tracking\nn02088632\ta very fast American hound; white mottled with bluish grey\nn02088745\tlarge hound used in hunting wild boars\nn02088839\tany of several breeds of hound developed for hunting raccoons\nn02088992\tany dog trained to hunt raccoons\nn02089078\tAmerican breed of large powerful hound dogs used for hunting raccoons and other game\nn02089232\tsmall long-bodied short-legged German breed of dog having a short sleek coat and long drooping ears; suited for following game into burrows\nn02089468\tinformal term\nn02089555\tmedium-sized glossy-coated hounds developed for hunting foxes\nn02089725\tan American breed of foxhounds used for hunting both in packs and individually\nn02089867\tan American breed of foxhound\nn02089973\tan English breed slightly larger than the American foxhounds originally used to hunt in packs\nn02090129\ta hound that resembles a foxhound but is smaller; used to hunt rabbits\nn02090253\ta brindle-coated American hound used in hunting bears and wild boars\nn02090379\ta speedy red or red-and-tan American hound\nn02090475\tthe largest breed of dogs; formerly used to hunt wolves\nn02090622\ttall fast-moving dog breed\nn02090721\tlarge breed of hound with a rough thick coat\nn02090827\ta tall slender dog of an ancient breed noted for swiftness and keen sight; used as a racing dog\nn02091032\ta toy dog developed from the greyhound\nn02091134\tsmall slender dog of greyhound type developed in England\nn02091244\tbreed of slender agile medium-sized hound found chiefly in the Balearic Islands; said to have been bred originally by the Pharaohs of ancient Egypt\nn02091467\tbreed of compact medium-sized dog with a heavy grey coat developed in Norway for hunting elk\nn02091635\thardy British hound having long pendulous ears and a thick coarse shaggy coat with an oily undercoat; bred for hunting otters\nn02091831\told breed of tall swift keen-eyed hunting dogs resembling greyhounds; from Egypt and southwestern Asia\nn02092002\tvery large and tall rough-coated dog bred for hunting deer; known as the royal dog of Scotland\nn02092173\ta large heavy hound formerly used in hunting stags and other large game; similar to but larger than a foxhound\nn02092339\tlarge breed of hound having a smooth greyish coat; originated in Germany\nn02092468\tany of several usually small short-bodied breeds originally trained to hunt animals living underground\nn02093056\ta powerful short-haired terrier originated in England by crossing the bulldog with terriers\nn02093256\tEnglish breed of strong stocky dog having a broad skull and smooth coat\nn02093428\tAmerican breed of muscular terriers with a short close-lying stiff coat\nn02093647\ta light terrier groomed to resemble a lamb\nn02093754\tsmall rough-coated terrier of British origin\nn02093859\tan Irish breed of medium-sized terriers with a silky blue-grey coat\nn02093991\tmedium-sized breed with a wiry brown coat; developed in Ireland\nn02094114\tEnglish breed of small terrier with a straight wiry grizzled coat and dropped ears\nn02094258\tEnglish breed of small short-legged terrier with a straight wiry red or grey or black-and-tan coat and erect ears\nn02094433\tvery small breed having a long glossy coat of bluish-grey and tan\nn02094562\tany of several breeds of terrier developed to catch rats\nn02094721\ta breed of short-haired rat terrier with a black-and-tan coat that was developed in Manchester, England\nn02094931\tbreed of small Manchester terrier\nn02095050\tsmall lively black-and-white terriers formerly used to dig out foxes\nn02095212\ta fox terrier with smooth hair\nn02095314\ta fox terrier with wiry hair\nn02095412\ta terrier with wiry hair\nn02095570\tbreed of wire-haired terrier originally from the Lake District of England and used for hunting\nn02095727\twire-haired terrier resembling Airedales but smaller; developed in Wales for hunting\nn02095889\ta wire-haired terrier with short legs that was first bred in Sealyham\nn02096051\tbreed of large wiry-coated terrier bred in Yorkshire\nn02096177\tsmall rough-haired breed of terrier from Scotland\nn02096294\tsmall greyish wire-haired breed of terrier from Australia similar to the cairn\nn02096437\ta breed of small terrier with long wiry coat and drooping ears\nn02096585\tsmall pug-faced American terrier breed having a smooth brindle or black coat with white markings\nn02096756\told German breed of sturdy black or greyish wire-haired terriers having a blunt muzzle ranging in size from fairly small to very large; used as ratters and guard dogs or police dogs\nn02097047\ta small schnauzer\nn02097130\ta large schnauzer\nn02097209\ta medium-sized schnauzer\nn02097298\told Scottish breed of small long-haired usually black terrier with erect tail and ears\nn02097474\tbreed of medium-sized terriers bred in Tibet resembling Old English sheepdogs with fluffy curled tails\nn02097658\tAustralian breed of toy dogs having a silky blue coat\nn02097786\tScottish breed of terrier with shaggy hair and long low body with short legs; native to the Isle of Skye\nn02097967\tselectively bred small Skye terrier with erect ears and a long silky coat\nn02098105\tIrish breed of medium-sized terrier with an abundant coat any shade of wheat and very hairy head and muzzle\nn02098286\tsmall white long-coated terrier developed in Scotland\nn02098413\ta breed of terrier having a long heavy coat raised in Tibet as watchdogs\nn02098550\ta dog trained to work with sportsmen when they hunt with guns\nn02098806\ta gun dog trained to locate or retrieve birds\nn02098906\ta dog accustomed to water and usually trained to retrieve waterfowl\nn02099029\ta dog with heavy water-resistant coat that can be trained to retrieve game\nn02099267\tan English breed having a shiny black or liver-colored coat; retrieves game from land or water\nn02099429\tan English breed having a tightly curled black or liver-colored coat; retrieves game from land or water\nn02099601\tan English breed having a long silky golden coat\nn02099712\tbreed originally from Labrador having a short black or golden-brown coat\nn02099849\tAmerican breed having a short thick oily coat ranging from brown to light tan\nn02099997\ta strong slender smooth-haired dog of Spanish origin having a white coat with brown or black patches; scents out and points to game\nn02100236\tliver or liver-and-white hunting dog developed in Germany; 3/4 pointer and 1/4 bloodhound\nn02100399\ta long-haired dog formerly trained to crouch on finding game but now to point\nn02100583\tHungarian hunting dog resembling the Weimaraner but having a rich deep red coat\nn02100735\tan English breed having a plumed tail and a soft silky coat that is chiefly white\nn02100877\tan Irish breed with a chestnut-brown or mahogany-red coat\nn02101006\ta Scottish breed with a black-and-tan coat\nn02101108\tany of several breeds of small to medium-sized gun dogs with a long silky coat and long frilled ears\nn02101388\ttall active short-tailed French breed of bird dog having a usually smooth orange- or liver-and-white coat\nn02101556\ta thickset spaniel with longish silky hair\nn02101670\tlarge usually black hunting and retrieving spaniel with a dense flat or slightly wavy coat; cross between cocker and Sussex spaniel\nn02101861\ta large spaniel with wavy silky coat usually black or liver and white\nn02102040\ta breed having typically a black-and-white coat\nn02102177\ta red-and-white breed slightly smaller than the English springer spaniel\nn02102318\ta small breed with wavy silky hair; originally developed in England\nn02102480\tan English breed with short legs and a golden liver-colored coat\nn02102605\tany dog of two large curly-coated breeds used for hunting waterfowl\nn02102806\tbreed of medium-sized spaniels originating in America having chocolate or liver-colored curly coat\nn02102973\tbreed of large spaniels developed in Ireland having a heavy coat of liver-colored curls and a topknot of long curls and a nearly hairless tail\nn02103181\tbreed of medium-sized long-headed dogs with downy undercoat and harsh wiry outer coat; originated in Holland but largely developed in France\nn02103406\tany of several breeds of usually large powerful dogs bred to work as draft animals and guard and guide dogs\nn02103841\ta dog trained to guard property\nn02104029\tlong-established Hungarian breed of tall light-footed but sturdy white dog; used also as a hunting dog\nn02104184\ta watchdog trained to attack on command\nn02104280\ta dog trained to guard a house\nn02104365\tbreed of small stocky black dogs originally used as watchdogs on boats in the Netherlands and Belgium\nn02104523\tany of various usually long-haired breeds of dog reared to herd and guard sheep\nn02104882\thardy working dog developed in Belgium for herding sheep\nn02105056\tblack-coated sheepdog with a heavily plumed tail\nn02105162\tfawn-colored short-haired sheepdog\nn02105251\told French breed of large strong usually black dogs having a long tail and long wavy and slightly stiff coat\nn02105412\tan Australian sheepdog with pointed ears\nn02105505\tHungarian breed of large powerful shaggy-coated white dog; used also as guard dog\nn02105641\tlarge sheepdog with a profuse shaggy bluish-grey-and-white coat and short tail; believed to trace back to the Roman occupation of Britain\nn02105855\ta small sheepdog resembling a collie that was developed in the Shetland Islands\nn02106030\ta silky-coated sheepdog with a long ruff and long narrow head developed in Scotland\nn02106166\tdeveloped in the area between Scotland and England usually having a black coat with white on the head and tip of tail used for herding both sheep and cattle\nn02106382\trough-coated breed used originally in Belgium for herding and guarding cattle\nn02106550\tGerman breed of large vigorous short-haired cattle dogs\nn02106662\tbreed of large shepherd dogs used in police work and as a guide for the blind\nn02106854\tany dog trained to assist police especially in tracking\nn02106966\tany of three breeds of dogs whose ears and tail are usually cropped\nn02107142\tmedium large breed of dog of German origin with a glossy black and tan coat; used as a watchdog\nn02107312\tsmall German version of a Doberman pinscher\nn02107420\tany of four Swiss breeds\nn02107574\tthe largest of the four Swiss breeds\nn02107683\tlarge powerful long-haired black-coated Swiss dog with deep tan or russet markings on legs and white blaze and feet and chest marking; formerly used for draft\nn02107908\ta smaller of the four Swiss breeds\nn02108000\tthe smallest of the Sennenhunde\nn02108089\ta breed of stocky medium-sized short-haired dog with a brindled coat and square-jawed muzzle developed in Germany\nn02108254\tan old breed of powerful deep-chested smooth-coated dog used chiefly as a watchdog and guard dog\nn02108422\tlarge powerful breed developed by crossing the bulldog and the mastiff\nn02108551\tvery large powerful rough-coated dog native to central Asia\nn02108672\ta sturdy thickset short-haired breed with a large head and strong undershot lower jaw; developed originally in England for bull baiting\nn02108915\tsmall stocky version of the bulldog having a sleek coat and square head\nn02109047\tvery large powerful smooth-coated breed of dog\nn02109150\ta dog trained to guide the blind\nn02109256\t(trademark) a guide dog trained to guide a blind person\nn02109391\tdog trained to assist the deaf by signaling the occurrence of certain sounds\nn02109525\ta Swiss alpine breed of large powerful dog with a thick coat of hair used as a rescue dog\nn02109687\ta dog that can alert or assist people with seizure disorders\nn02109811\ta dog trained to draw a sled usually in a team\nn02109961\tbreed of heavy-coated Arctic sled dog\nn02110063\tbreed of sled dog developed in Alaska\nn02110185\tbreed of sled dog developed in northeastern Siberia; they resemble the larger Alaskan malamutes\nn02110341\ta large breed having a smooth white coat with black or brown spots; originated in Dalmatia\nn02110532\ta brown-spotted dalmatian\nn02110627\tEuropean breed of small dog resembling a terrier with dark wiry hair and a tufted muzzle\nn02110806\tsmall smooth-haired breed of African origin having a tightly curled tail and the inability to bark\nn02110958\tsmall compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle\nn02111129\ta large dog (usually with a golden coat) produced by crossing a St Bernard and a Newfoundland\nn02111277\ta breed of very large heavy dogs with a thick coarse usually black coat; highly intelligent dogs and vigorous swimmers; developed in Newfoundland\nn02111500\tbred of large heavy-coated white dogs resembling the Newfoundland\nn02111626\tany of various stocky heavy-coated breeds of dogs native to northern regions having pointed muzzles and erect ears with a curled furry tail\nn02111889\tSiberian breed of white or cream-colored dog of the spitz family\nn02112018\tbreed of very small compact long-haired dogs of the spitz type\nn02112137\tbreed of medium-sized dogs with a thick coat and fluffy curled tails and distinctive blue-black tongues; believed to have originated in northern China\nn02112350\ta spitz-like dog having a shaggy greyish coat and tightly curled tail originating in Holland\nn02112497\tbreed of various very small compact wiry-coated dogs of Belgian origin having a short bearded muzzle\nn02112706\ta variety of Brussels griffon having a short smooth coat\nn02112826\teither of two Welsh breeds of long-bodied short-legged dogs with erect ears and a fox-like head\nn02113023\tthe smaller and straight-legged variety of corgi having pointed ears and a short tail\nn02113186\tslightly bowlegged variety of corgi having rounded ears and a long tail\nn02113335\tan intelligent dog with a heavy curly solid-colored coat that is usually clipped; an old breed sometimes trained as sporting dogs or as performing dogs\nn02113624\tthe breed of very small poodles\nn02113712\ta breed of small poodles\nn02113799\ta breed or medium-sized poodles\nn02113892\tthe largest breed of poodle\nn02113978\tany of an old breed of small nearly hairless dogs of Mexico\nn02114100\tany of various predatory carnivorous canine mammals of North America and Eurasia that usually hunt in packs\nn02114367\ta wolf with a brindled grey coat living in forested northern regions of North America\nn02114548\twolf of Arctic North America having white fur and a black-tipped tail\nn02114712\treddish-grey wolf of southwestern North America\nn02114855\tsmall wolf native to western North America\nn02115012\toffspring of a coyote and a dog\nn02115096\tOld World nocturnal canine mammal closely related to the dog; smaller than a wolf; sometimes hunts in a pack but usually singly or as a member of a pair\nn02115335\tany of various undomesticated mammals of the family Canidae that are thought to resemble domestic dogs as distinguished from jackals or wolves\nn02115641\twolflike yellowish-brown wild dog of Australia\nn02115913\tfierce wild dog of the forests of central and southeast Asia that hunts in packs\nn02116185\twild dog of northern South America\nn02116450\tsmall wild dog of eastern Asia having facial markings like those of a raccoon\nn02116738\ta powerful doglike mammal of southern and eastern Africa that hunts in large packs; now rare in settled area\nn02117135\tdoglike nocturnal mammal of Africa and southern Asia that feeds chiefly on carrion\nn02117512\tof northern Africa and Arabia and India\nn02117646\tof southern Africa\nn02117900\tAfrican hyena noted for its distinctive howl\nn02118176\tstriped hyena of southeast Africa that feeds chiefly on insects\nn02118333\talert carnivorous mammal with pointed muzzle and ears and a bushy tail; most are predators that do not hunt in packs\nn02118643\ta female fox\nn02118707\ta conventional name for a fox used in tales following usage in the old epic `Reynard the Fox'\nn02119022\tthe common Old World fox; having reddish-brown fur; commonly considered a single circumpolar species\nn02119247\tred fox in the color phase when its pelt is mostly black\nn02119359\tred fox in the color phase when its pelt is tipped with white\nn02119477\tNew World fox; often considered the same species as the Old World fox\nn02119634\tsmall grey fox of the plains of western North America\nn02119789\tsmall grey fox of southwestern United States; may be a subspecies of Vulpes velox\nn02120079\tthickly-furred fox of Arctic regions; brownish in summer and white in winter\nn02120278\ta variety of Arctic fox having a pale grey winter coat\nn02120505\tdark grey American fox; from Central America through southern United States\nn02120997\tany of various lithe-bodied roundheaded fissiped mammals, many with retractile claws\nn02121620\tfeline mammal usually having thick soft fur and no ability to roar: domestic cats; wildcats\nn02121808\tany domesticated member of the genus Felis\nn02122298\tinformal terms referring to a domestic cat\nn02122430\ta cat proficient at mousing\nn02122510\ta homeless cat\nn02122580\tan animal that has strayed (especially a domestic animal)\nn02122725\tmale cat\nn02122810\ta castrated tomcat\nn02122878\tfemale cat\nn02122948\tyoung domestic cat\nn02123045\ta cat with a grey or tawny coat mottled with black\nn02123159\ta cat having a striped coat\nn02123242\ta cat having black and cream-colored and yellowish markings\nn02123394\ta long-haired breed of cat\nn02123478\ta long-haired breed of cat similar to the Persian cat\nn02123597\ta slender short-haired blue-eyed breed of cat having a pale coat with dark ears paws face and tail tip\nn02123785\tSiamese cat having a bluish cream-colored body and dark grey points\nn02123917\ta short-haired breed with body similar to the Siamese cat but having a solid dark brown or grey coat\nn02124075\ta domestic cat of Egypt\nn02124157\ta term applied indiscriminately in the United States to any short-haired bluish-grey cat\nn02124313\ta small slender short-haired breed of African origin having brownish fur with a reddish undercoat\nn02124484\ta short-haired tailless breed of cat believed to originate on the Isle of Man\nn02124623\tany small or medium-sized cat resembling the domestic cat and living in the wild\nn02125010\ta desert wildcat\nn02125081\tbushy-tailed wildcat of Europe that resembles the domestic cat and is regarded as the ancestor of the domestic cat\nn02125311\tlarge American feline resembling a lion\nn02125494\tnocturnal wildcat of Central America and South America having a dark-spotted buff-brown coat\nn02125689\tlong-bodied long-tailed tropical American wildcat\nn02125872\twidely distributed wildcat of Africa and Asia Minor\nn02126028\tsmall Asiatic wildcat\nn02126139\tslender long-legged African wildcat having large untufted ears and tawny black-spotted coat\nn02126317\tsmall spotted wildcat of southern Asia and Malaysia\nn02126640\tsmall spotted wildcat found from Texas to Brazil\nn02126787\tsmall wildcat of the mountains of Siberia and Tibet and Mongolia\nn02127052\tshort-tailed wildcats with usually tufted ears; valued for their fur\nn02127292\tof northern Eurasia\nn02127381\tof northern North America\nn02127482\tsmall lynx of North America\nn02127586\tof southern Europe\nn02127678\tof deserts of northern Africa and southern Asia\nn02127808\tany of several large cats typically able to roar and living in the wild\nn02128385\tlarge feline of African and Asian forests usually having a tawny coat with black spots\nn02128598\tfemale leopard\nn02128669\ta leopard in the black color phase\nn02128757\tlarge feline of upland central Asia having long thick whitish fur\nn02128925\ta large spotted feline of tropical America similar to the leopard; in some classifications considered a member of the genus Felis\nn02129165\tlarge gregarious predatory feline of Africa and India having a tawny coat with a shaggy mane in the male\nn02129463\ta female lion\nn02129530\ta small or young lion\nn02129604\tlarge feline of forests in most of Asia having a tawny coat with black stripes; endangered\nn02129837\tsouthern short-haired tiger\nn02129923\ta female tiger\nn02129991\toffspring of a male lion and a female tiger\nn02130086\toffspring of a male tiger and a female lion\nn02130308\tlong-legged spotted cat of Africa and southwestern Asia having nonretractile claws; the swiftest mammal; can be trained to run down game\nn02130545\tany of many extinct cats of the Old and New Worlds having long swordlike upper canine teeth; from the Oligocene through the Pleistocene\nn02130925\tNorth American sabertooth; culmination of sabertooth development\nn02131653\tmassive plantigrade carnivorous or omnivorous mammals with long shaggy coats and strong claws\nn02132136\tlarge ferocious bear of Eurasia\nn02132320\ta conventional name for a bear used in tales following usage in the old epic `Reynard the Fox'\nn02132466\tyellowish-grey Syrian brown bear\nn02132580\tpowerful brownish-yellow bear of the uplands of western North America\nn02132788\tbrown bear of coastal Alaska and British Columbia\nn02133161\tbrown to black North American bear; smaller and less ferocious than the brown bear\nn02133400\treddish-brown color phase of the American black bear\nn02133704\tbear with a black coat living in central and eastern Asia\nn02134084\twhite bear of Arctic regions\nn02134418\tcommon coarse-haired long-snouted bear of south-central Asia\nn02134971\tsmall cat-like predatory mammals of warmer parts of the Old World\nn02135220\tcat-like mammal typically secreting musk used in perfumes\nn02135610\tcommon civet of India and southeast Asia\nn02135844\ta common civet of southeast Asia\nn02136103\tarboreal civet of Asia having a long prehensile tail and shaggy black hair\nn02136285\tlarge primitive cat-like carnivores inhabiting forests of Madagascar\nn02136452\tlargest carnivore of Madagascar; intermediate in some respects between cats and civets\nn02136794\tcivet of Madagascar\nn02137015\tagile Old World viverrine having a spotted coat and long ringed tail\nn02137302\tan East Indian civet\nn02137549\tagile grizzled Old World viverrine; preys on snakes and rodents\nn02137722\tkeen-sighted viverrine of southern Asia about the size of a ferret; often domesticated\nn02137888\tnorthern African mongoose; in ancient times thought to devour crocodile eggs\nn02138169\tspotted or striped arboreal civet of southeast Asia and East Indies\nn02138441\ta mongoose-like viverrine of South Africa having a face like a lemur and only four toes\nn02138647\ta meerkat with a thin and elongated tail\nn02138777\tburrowing diurnal meerkat of southern Africa; often kept as a pet\nn02139199\tnocturnal mouselike mammal with forelimbs modified to form membranous wings and anatomical adaptations for echolocation by which they navigate\nn02139671\tlarge Old World bat of warm and tropical regions that feeds on fruit\nn02140049\tlarge bat with a head that resembles the head of a fox\nn02140179\ta variety of fruit bat\nn02140268\ta variety of fruit bat\nn02140491\tany of various fruit bats of the genus Nyctimene distinguished by nostrils drawn out into diverging tubes\nn02140858\ta variety of fruit eating bat\nn02141306\ttypically having large ears and feeding primarily on insects; worldwide in distribution\nn02141611\ta carnivorous bat with ears like a mouse\nn02141713\tbat having a leaflike flap at the end of the nose; especially of the families Phyllostomatidae and Rhinolophidae and Hipposideridae\nn02142407\tlarge-eared greyish bat of southern California and northwestern Mexico\nn02142734\tNew World bat with a pointed nose leaf; found from southern United States to Paraguay\nn02142898\ta variety of leaf-nosed bat\nn02143142\tsmall-eared Mexican bat with a long slender nose\nn02143439\ta bat of the family Rhinolophidae having a horseshoe-shaped leaf on the nose\nn02143891\tany of numerous bats of the family Hipposideridae of northwest Africa or Philippines or Australia having a horseshoe-shaped leaf on the nose\nn02144251\ta common bat of northwestern Australia having orange or yellow fur\nn02144593\tany New or Old World carnivorous bat erroneously thought to suck blood but in fact feeding on insects\nn02144936\tlarge carnivorous Old World bat with very large ears\nn02145424\ta variety of carnivorous bat\nn02145910\tcommon Eurasian bat with white-tipped hairs in its coat\nn02146201\tNorth American bat of a brick or rusty red color with hairs tipped with white\nn02146371\tany of numerous medium to small insectivorous bats found worldwide in caves and trees and buildings\nn02146700\tthe small common North American bat; widely distributed\nn02146879\tsmall bat of southwest United States that lives in caves etc.\nn02147173\trather large North American brown bat; widely distributed\nn02147328\tcommon brown bat of Europe\nn02147591\tdrab yellowish big-eared bat that lives in caves\nn02147947\tsmall European brown bat\nn02148088\tone of the smallest bats of eastern North America\nn02148512\ta large bat of the southwestern United States having spots and enormous ears\nn02148835\tany of various Old or New World bats having very long ears\nn02148991\tbat of western North America having extremely large ears\nn02149420\tsmall swift insectivorous bat with leathery ears and a long tail; common in warm regions\nn02149653\tthe common freetail bat of southern United States having short velvety fur; migrates southward for winter\nn02149861\tsmall brown bat of California and northern Mexico\nn02150134\ta soft-furred chocolate-brown bat with folded ears and small wings; often runs along the ground\nn02150482\tany of various tropical American bats of the family Desmodontidae that bite mammals and birds to feed on their blood\nn02150885\tmouse-sized bat of tropical Central America and South America having sharp incisor and canine teeth; feeds on the blood of birds and mammals\nn02151230\tsimilar in size and habits to Desmodus rotundus; of tropical America including southern California and Texas\nn02152740\tany animal that lives by preying on other animals\nn02152881\tanimal hunted or caught for food\nn02152991\tanimal hunted for food or sport\nn02153109\tlarge animals that are hunted for sport\nn02153203\tany bird (as grouse or pheasant) that is hunted for sport\nn02153809\ta burrowing mammal having limbs adapted for digging\nn02156732\ta vertebrate animal having four feet or legs or leglike appendages\nn02156871\tan animal especially a mammal having four limbs specialized for walking\nn02157206\tan animal having six feet\nn02157285\tan animal with two feet\nn02159955\tsmall air-breathing arthropod\nn02160947\tan insect that lives in a colony with other insects of the same species\nn02161225\tinsects that undergo complete metamorphosis\nn02161338\tan insect that strips the leaves from plants\nn02161457\tan insect that carries pollen from one flower to another\nn02161588\tany of various insects that deposit their eggs in plants causing galls in which the larvae feed\nn02162561\tany of various mecopterous insects of the family Panorpidae of the northern hemisphere having a long beak and long antennae; males have a tail like that of a scorpion except it is not venomous\nn02163008\tany of various mecopterous insects of the family Bittacidae\nn02163297\tany of numerous minute wingless primitive insects possessing a special abdominal appendage that allows the characteristic nearly perpetual springing pattern; found in soil rich in organic debris or on the surface of snow or water\nn02164464\tinsect having biting mouthparts and front wings modified to form horny covers overlying the membranous rear wings\nn02165105\tactive usually bright-colored beetle that preys on other insects\nn02165456\tsmall round bright-colored and spotted beetle that usually feeds on aphids and other insect pests\nn02165877\tred ladybug with a black spot on each wing\nn02166229\tintroduced into the United States from Mexico; feeds on the foliage of the bean plant\nn02166567\ta variety of ladybug\nn02166826\tnative to Australia; introduced elsewhere to control scale insects\nn02167151\tpredacious shining black or metallic terrestrial beetle that destroys many injurious insects\nn02167505\tbeetle that ejects audibly a pungent vapor when disturbed\nn02167820\tany beetle of the genus Calosoma\nn02167944\tlarge metallic blue-green beetle that preys on caterpillars; found in North America\nn02168245\tnocturnal beetle common in warm regions having luminescent abdominal organs\nn02168427\tthe luminous larva or wingless grub-like female of a firefly\nn02168699\tlong-bodied beetle having very long antennae\nn02169023\tany of several beetles whose larvae bore holes in dead or dying trees especially conifers\nn02169218\tlarge beetle whose larvae bore holes in pine trees\nn02169497\tbrightly colored beetle that feeds on plant leaves; larvae infest roots and stems\nn02169705\tany small leaf beetle having enlarged hind legs and capable of jumping\nn02169974\tblack-and-yellow beetle that feeds in adult and larval stages on potato leaves; originally of eastern Rocky Mountains; now worldwide\nn02170400\tsmall beetle whose larvae are household pests feeding on woolen fabrics\nn02170599\ta small black and red and white carpet beetle\nn02170738\ta carpet beetle that is solid black in color\nn02170993\tpredacious on other insects; usually brightly colored or metallic\nn02171164\tEuropean beetle; infests beehives\nn02171453\tbeetle having antennae with hard platelike terminal segments\nn02171869\tany of numerous species of stout-bodied beetles having heads with horny spikes\nn02172182\tany of numerous beetles that roll balls of dung on which they feed and in which they lay eggs\nn02172518\tscarabaeid beetle considered divine by ancient Egyptians\nn02172678\tany of various dung beetles\nn02172761\tOld World dung beetle that flies with a droning sound\nn02172870\tany of various large usually brown North American leaf-eating beetles common in late spring; the larvae feed on roots of grasses etc.\nn02173113\tlarge greenish June beetle of southern United States\nn02173373\tsmall metallic green and brown beetle native to eastern Asia; serious plant pest in North America\nn02173784\tintroduced into United States from the Orient; larvae feed on roots of sugarcane and other grasses\nn02174001\tany of various large chiefly tropical beetles having horns on the head; pest on coconuts\nn02174355\tany of various beetles of the family (or subfamily) Melolonthidae\nn02174659\tany of various large European beetles destructive to vegetation as both larvae and adult\nn02175014\tcommon North American beetle: larvae feed on roots and adults on leaves and flowers of e.g. rose bushes or apple trees or grape vines\nn02175569\ta common metallic green European beetle: larvae feed on plant roots and adults on leaves and flowers of e.g. roses\nn02175916\ta kind of lamellicorn beetle; the male has branched mandibles resembling antlers\nn02176261\tany of various widely distributed beetles\nn02176439\table to right itself when on its back by flipping into the air with a clicking sound\nn02176747\ttropical American click beetle having bright luminous spots\nn02176916\twormlike larva of various elaterid beetles; feeds on roots of many crop plants\nn02177196\tany of numerous aquatic beetles usually having a smooth oval body and flattened hind legs for swimming\nn02177506\taquatic beetle that circles rapidly on the water surface\nn02177775\tbores through wood making a ticking sound popularly thought to presage death\nn02177972\tany of several families of mostly small beetles that feed on plants and plant products; especially snout beetles and seed beetles\nn02178411\tsmall weevil having a prolonged snout; destructive to e.g. grains and nuts\nn02178717\tgreyish weevil that lays its eggs in cotton bolls destroying the cotton\nn02179012\tbeetle that produces a secretion that blisters the skin\nn02179192\tany of various beetles that exude an oily substance from the leg joints that deters enemies\nn02179340\tgreen beetle of southern Europe\nn02179891\ta vector of the fungus causing Dutch elm disease\nn02180233\tsmall beetle that bores tunnels in the bark and wood of trees; related to weevils\nn02180427\tsmall beetle that likes to bore through the bark of spruce trees and eat the cambium which eventually kills the tree\nn02180875\tactive beetle typically having predatory or scavenging habits\nn02181235\tsluggish hard-bodied black terrestrial weevil whose larvae feed on e.g. decaying plant material or grain\nn02181477\tthe larva of beetles of the family Tenebrionidae\nn02181724\tan insect that infests flour and stored grains\nn02182045\ta small beetle that infests the seeds of legumes\nn02182355\tlarvae live in and feed on seeds of the pea plant\nn02182642\tlarvae live in and feed on growing or stored beans\nn02182930\tbrown weevil that infests stored grain especially rice\nn02183096\ta beetle from China that has been found in the United States and is a threat to hardwood trees; lives inside the tree; no natural predators in the United States\nn02183507\tany of a small order of slender typically tropical insects that nest in colonies in silken tunnels that they spin\nn02183857\twingless usually flattened bloodsucking insect parasitic on warm-blooded animals\nn02184473\thead or body louse\nn02184589\tinfests the head and body of humans\nn02184720\ta parasitic louse that infests the body of human beings\nn02185167\ta louse that infests the pubic region of the human body\nn02185481\twingless insect with mouth parts adapted for biting; mostly parasitic on birds\nn02186153\tany wingless bloodsucking parasitic insect noted for ability to leap\nn02186717\tthe most common flea attacking humans\nn02187150\tflea that attacks dogs and cats\nn02187279\tflea that breeds chiefly on cats and dogs and rats\nn02187554\tsmall tropical flea; the fertile female burrows under the skin of the host including humans\nn02187900\tparasitic on especially the heads of chickens\nn02188699\tinsects having usually a single pair of functional wings (anterior pair) with the posterior pair reduced to small knobbed structures and mouth parts adapted for sucking or lapping or piercing\nn02189363\tfragile mosquito-like flies that produce galls on plants\nn02189670\tsmall fly whose larvae damage wheat and other grains\nn02190166\ttwo-winged insects characterized by active flight\nn02190790\tcommon fly that frequents human habitations and spreads many diseases\nn02191273\tbloodsucking African fly; transmits sleeping sickness etc.\nn02191773\tlarge usually hairy metallic blue or green fly; lays eggs in carrion or dung or wounds\nn02191979\tblowfly with iridescent blue body; makes a loud buzzing noise in flight\nn02192252\tblowfly with brilliant coppery green body\nn02192513\tfly whose larvae feed on carrion or the flesh of living animals\nn02192814\tbristly fly whose larvae live parasitically in caterpillars and other insects; important in control of noxious insects\nn02193009\tany of various large flies that annoy livestock\nn02193163\tstout-bodied hairy dipterous fly whose larvae are parasites on humans and other mammals\nn02194249\tlarge tropical American fly; parasitic on humans and other mammals\nn02194750\tlarvae are parasitic on sheep\nn02195091\thairy bee-like fly whose larvae produce lumpy abscesses (warbles) under the skin of cattle\nn02195526\tlarge swift fly the female of which sucks blood of various animals\nn02195819\thairy nectar-eating fly that resembles a bee; larvae are parasitic on larvae of bees and related insects\nn02196119\tswift predatory fly having a strong body like a bee with the proboscis hardened for sucking juices of other insects captured on the wing\nn02196344\tany of numerous small insects whose larvae feed on fruits\nn02196896\tlarvae bore into and feed on apples\nn02197185\tsmall black-and-white fly that damages citrus and other fruits by implanting eggs that hatch inside the fruit\nn02197689\tsmall fruit fly used by Thomas Hunt Morgan in studying basic mechanisms of inheritance\nn02197877\tflies whose larvae feed on pickles and imperfectly sealed preserves\nn02198129\tany of various small moths or dipterous flies whose larvae burrow into and feed on leaf tissue especially of the family Gracilariidae\nn02198532\tbloodsucking dipterous fly parasitic on birds and mammals\nn02198859\twinged fly parasitic on horses\nn02199170\twingless fly that is an external parasite on sheep and cattle\nn02199502\tsmall black European fly introduced into North America; sucks blood from cattle especially at the base of the horn\nn02200198\ttwo-winged insect whose female has a long proboscis to pierce the skin and suck the blood of humans and animals\nn02200509\tlarva of a mosquito\nn02200630\t(British usage) mosquito\nn02200850\tmosquito that transmits yellow fever and dengue\nn02201000\tstriped native of Japan thriving in southwestern and midwestern United States and spreading to the Caribbean; potential carrier of serious diseases\nn02201497\tany mosquito of the genus Anopheles\nn02201626\ttransmits the malaria parasite\nn02202006\tcommon house mosquito\nn02202124\twidespread tropical mosquito that transmits filarial worms\nn02202287\tany of various small biting flies: midges; biting midges; black flies; sand flies\nn02202678\tminute two-winged insect that sucks the blood of mammals and birds and other insects\nn02203152\tminute two-winged mosquito-like fly lacking biting mouthparts; appear in dancing swarms especially near water\nn02203592\tmosquito-like insect whose larvae feed on fungi or decaying vegetation\nn02203978\ta fly of the family Psychodidae\nn02204249\tany of various small dipterous flies; bloodsucking females can transmit sandfly fever and leishmaniasis\nn02204722\tminute blackish gregarious flies destructive to mushrooms and seedlings\nn02204907\tlarva of fungus gnat that feed on cereals and other grains; they march in large companies in regular order when the food is exhausted\nn02205219\tlong-legged slender flies that resemble large mosquitoes but do not bite\nn02205673\tsmall blackish stout-bodied biting fly having aquatic larvae; sucks the blood of birds as well as humans and other mammals\nn02206270\tinsects having two pairs of membranous wings and an ovipositor specialized for stinging or piercing\nn02206856\tany of numerous hairy-bodied insects including social and solitary species\nn02207179\tstingless male bee in a colony of social bees (especially honeybees) whose sole function is to mate with the queen\nn02207345\tfertile egg-laying female bee\nn02207449\tsterile member of a colony of social insects that forages for food and cares for the larvae\nn02207647\ta wingless sterile ant or termite having a large head and powerful jaws adapted for defending the colony\nn02207805\tsterile bee specialized to collect food and maintain the hive\nn02208280\tsocial bee often domesticated for the honey it produces\nn02208498\ta strain of bees that originated in Brazil in the 1950s as a cross between an aggressive African bee and a honeybee; retains most of the traits of the African bee; now spread as far north as Texas\nn02208848\tdark-colored ill-tempered honeybee supposedly of German origin\nn02208979\tgreyish highly productive European honeybee that has a quiet disposition\nn02209111\tyellowish honeybee resembling the Carniolan bee in its habits\nn02209354\tlarge solitary bee that lays eggs in tunnels bored into wood or plant stems\nn02209624\trobust hairy social bee of temperate regions\nn02209964\ta bee that is parasitic in the nests of bumblebees\nn02210427\ta bee that is a member of the genus Andrena\nn02210921\ta common solitary bee important for pollinating alfalfa in the western United States\nn02211444\tbee that cuts rounded pieces from leaves and flowers to line its nest\nn02211627\tany of numerous solitary bees that build nests of hardened mud and sand\nn02211896\tsolitary bee that builds nests of mud or pebbles cemented together and attached to a plant\nn02212062\tsocial or solitary hymenopterans typically having a slender body with the abdomen attached by a narrow stalk and having a formidable sting\nn02212602\tmostly social nest-building wasps\nn02212958\tany of several social wasps that construct nests of a substance like paper\nn02213107\tlarge stinging paper wasp\nn02213239\tEuropean hornet introduced into the United States\nn02213543\ta variety of vespid wasp\nn02213663\tNorth American hornet\nn02213788\tsmall yellow-marked social wasp commonly nesting in the ground\nn02214096\ta variety of paper wasp\nn02214341\tany of various solitary wasps that construct nests of hardened mud for their young\nn02214499\tany of various solitary wasps that construct vase-shaped cells of mud for their eggs\nn02214660\ta family of wasps\nn02214773\ta solitary wasp of the family Mutillidae; the body has a coat of brightly colored velvety hair and the females are wingless\nn02215161\tany of various solitary wasps\nn02215621\tsolitary wasp that constructs nests of hardened mud or clay for the young\nn02215770\tsolitary wasp that digs nests in the soil and stocks them with paralyzed insects for the larvae\nn02216211\tlarge black or rust-colored wasp that preys on cicadas\nn02216365\twasp that constructs mud cells on a solid base in which females place eggs laid in paralyzed insect larvae\nn02216740\tsmall solitary wasp that produces galls on oaks and other plants\nn02217563\tany of various tiny insects whose larvae are parasites on eggs and larvae of other insects; many are beneficial in destroying injurious insects\nn02217839\tlarva of chalcid flies injurious to the straw of wheat and other grains\nn02218134\ta variety of chalcid fly\nn02218371\thymenopterous insect that resembles a wasp and whose larvae are parasitic on caterpillars and other insect larvae\nn02218713\tinsect whose female has a saw-like ovipositor for inserting eggs into the leaf or stem tissue of a host plant\nn02219015\tsmall black sawfly native to Europe but established in eastern United States; larvae mine the leaves of birches causing serious defoliation\nn02219486\tsocial insect living in organized colonies; characteristically the males and fertile queen have wings during breeding season; wingless sterile females are the workers\nn02220055\tsmall red ant of warm regions; a common household pest\nn02220225\ttiny glossy black ant; nests outdoors but invades houses for food\nn02220518\ttropical nomadic ant that preys mainly on other insects\nn02220804\tant that nests in decaying wood in which it bores tunnels for depositing eggs\nn02221083\tomnivorous ant of tropical and subtropical America that can inflict a painful sting\nn02221414\treddish-brown European ant typically living in anthills in woodlands\nn02221571\tany of various ants captured as larvae and enslaved by another species\nn02221715\tan ant frequently enslaved\nn02221820\tan ant that attacks colonies of other ant species and carries off the young to be reared as slave ants\nn02222035\tslave-making ant widely distributed over the northern hemisphere\nn02222321\tany of the large fierce Australian ants of the genus Myrmecia\nn02222582\tsmall reddish slave-making ant species\nn02223266\twhitish soft-bodied ant-like social insect that feeds on wood\nn02223520\tany of various termites that live in and feed on dry wood that is not connected with the soil\nn02224023\tdestructive European termite\nn02224713\tAustralian termite; sole living species of Mastotermes; called a living fossil; apparent missing link between cockroaches and termites\nn02225081\textinct termite found in amber in the Dominican Republic\nn02225798\textremely destructive dry-wood termite of warm regions\nn02226183\tany of various insects having leathery forewings and membranous hind wings and chewing mouthparts\nn02226429\tterrestrial plant-eating insect with hind legs adapted for leaping\nn02226821\tgrasshopper with short antennae\nn02226970\tmigratory grasshoppers of warm regions having short antennae\nn02227247\tOld World locust that travels in vast swarms stripping large areas of vegetation\nn02227604\tserious pest of grain-growing and range areas of central and western United States\nn02227966\tgrasshoppers with long threadlike antennae and well-developed stridulating organs on the forewings of the male\nn02228341\tlarge green long-horned grasshopper of North America; males produce shrill sounds by rubbing together special organs on the forewings\nn02228697\tlarge dark wingless cricket-like katydid of arid parts of western United States\nn02229156\tlarge wingless nocturnal grasshopper that burrows in loose soil along the Pacific coast of the United States\nn02229544\tleaping insect; male makes chirping noises by rubbing the forewings together\nn02229765\tdigs in moist soil and feeds on plant roots\nn02230023\tlives in human dwellings; naturalized in parts of America\nn02230187\tcommon American black cricket; attacks crops and also enters dwellings\nn02230480\tpale arboreal American cricket noted for loud stridulation\nn02230634\tpale yellowish tree cricket widely distributed in North America\nn02231052\tlarge cylindrical or flattened mostly tropical insects with long strong legs that feed on plants; walking sticks and leaf insects\nn02231487\tany of various mostly tropical insects having long twiglike bodies\nn02231803\ta variety of stick insect\nn02232223\ttropical insect having a flattened leaflike body; common in southern Asia and the East Indies\nn02233338\tany of numerous chiefly nocturnal insects; some are domestic pests\nn02233943\tdark brown cockroach originally from orient now nearly cosmopolitan in distribution\nn02234355\tlarge reddish brown free-flying cockroach originally from southern United States but now widely distributed\nn02234570\twidely distributed in warm countries\nn02234848\tsmall light-brown cockroach brought to United States from Europe; a common household pest\nn02235205\tlarge tropical American cockroaches\nn02236044\tpredacious long-bodied large-eyed insect of warm regions; rests with forelimbs raised as in prayer\nn02236241\tthe common mantis\nn02236355\tgeneral term for any insect or similar creeping or crawling invertebrate\nn02236896\tinsects with sucking mouthparts and forewings thickened and leathery at the base; usually show incomplete metamorphosis\nn02237424\tsmall bright-colored insect that feeds on plant juices\nn02237581\ta variety of leaf bug\nn02237868\tyellow or orange leaf bug with four black stripes down the back; widespread in central and eastern North America\nn02238235\tvector of viral plant diseases\nn02238358\twidespread plant and fruit pest\nn02238594\tsmall bug having body and wings covered with a lacy network of raised lines\nn02238887\ta true bug: usually bright-colored; pest of cultivated crops and some fruit trees\nn02239192\tsmall black-and-white insect that feeds on cereal grasses\nn02239528\ta true bug\nn02239774\tlarge black American bug that sucks sap of vines of the gourd family\nn02240068\tlarge sap-sucking bug with leaflike expansions on the legs\nn02240517\tbug of temperate regions that infests especially beds and feeds on human blood\nn02241008\tpredaceous aquatic insect that swims on its back and may inflict painful bites\nn02241426\tany of various insects of the order Hemiptera and especially of the suborder Heteroptera\nn02241569\ttrue bugs: insects whose forewings are membranous but have leathery tips\nn02241799\ta true bug: large aquatic bug adapted to living in or on the surface of water\nn02242137\tlarge water bug with piercing and sucking mouthparts; feeds on young fishes\nn02242455\tlong-legged aquatic insect having the front legs fitted for seizing and holding prey and the abdomen extended by a long breathing tube\nn02243209\tcarnivorous aquatic bug having paddle-like hind legs\nn02243562\tlong-legged bug that skims about on the surface of water\nn02243878\ta variety of water strider\nn02244173\ta true bug: long-legged predacious bug living mostly on other insects; a few suck blood of mammals\nn02244515\tlarge bloodsucking bug\nn02244797\tlarge predatory North American bug that sucks the blood of other insects\nn02245111\ta true bug: brightly colored bug that can exude a stain\nn02245443\ta true bug: bug that damages and stains the lint of developing cotton\nn02246011\tinsects having membranous forewings and hind wings\nn02246628\tminute insect that feeds on plant juices; related to scale insects\nn02246941\twhitefly that attacks citrus trees\nn02247216\twhitefly that inhabits greenhouses\nn02247511\ta variety of whitefly\nn02247655\ta strain of pest accidentally imported into Florida from the Middle East then spread to California where it is a very serious pest feeding on almost all vegetable crops and poinsettias\nn02248062\tfeeds primarily on cotton\nn02248368\tscale insects and mealybugs\nn02248510\tsmall homopterous insect that usually lives and feeds on plants and secretes a protective waxy covering\nn02248887\tan insect active in all stages\nn02249134\tpest on citrus trees\nn02249515\tinsect having a firm covering of wax especially in the female\nn02249809\tsmall east Asian insect naturalized in the United States that damages fruit trees\nn02250280\tMexican red scale insect that feeds on cacti; the source of a red dye\nn02250822\tscalelike plant-eating insect coated with a powdery waxy secretion; destructive especially of fruit trees\nn02251067\tdestructive especially to citrus\nn02251233\tAsiatic insect introduced accidentally into United States; pest on citrus and apple trees\nn02251593\tfeeds on a wide variety of cultivated plants but especially destructive to citrus\nn02251775\tany of several small insects especially aphids that feed by sucking the juices from plants\nn02252226\tany of various small plant-sucking insects\nn02252799\tbright green aphid; feeds on and causes curling of apple leaves\nn02252972\tblackish aphid that infests e.g. beans and sugar beets\nn02253127\tgreenish aphid; pest on garden and crop plants\nn02253264\tyellowish green aphid that is especially destructive to peaches\nn02253494\texcretes a honeylike substance eaten by ants\nn02253715\tsecretes a waxy substance like a mass of fine curly white cotton or woolly threads\nn02253913\tprimarily a bark feeder on aerial parts and roots of apple and other trees\nn02254246\tattacks alders\nn02254697\tany of various insects that feed and form galls on conifers\nn02254901\ta variety of adelgid\nn02255023\ta variety of adelgid\nn02255391\tan insect that feeds on hemlocks; its egg sacs are small fuzzy white balls like artificial snow on a Christmas tree\nn02256172\tsmall active cicada-like insect with hind legs adapted for leaping; feeds on plant juices\nn02256656\tstout-bodied insect with large membranous wings; male has drum-like organs for producing a high-pitched drone\nn02257003\tits distinctive song is heard during July and August\nn02257284\tNorth American cicada; appears in great numbers at infrequent intervals because the nymphs take 13 to 17 years to mature\nn02257715\tsmall leaping herbivorous insect that lives in a mass of protective froth which it and its larvae secrete\nn02257985\ta variety of spittlebug\nn02258198\tNorth American insect that severely damages grasses\nn02258508\tNorth American insect that attacks pines\nn02258629\tfeeds on pines in northern United States\nn02259212\tsmall leaping insect that sucks the juices of plants\nn02259377\trelated to the leafhoppers and spittlebugs but rarely damages cultivated plants\nn02259708\tsmall leaping insect that sucks juices of branches and twigs\nn02259987\tlarge brightly marked tropical insect with a process like a snout that was formerly thought to emit light\nn02260421\tsmall soft-bodied insect with chewing mouthparts and either no wings or two pairs\nn02260863\tsmall winged insect living on the bark and leaves of trees and feeding on e.g. fungi and decaying plant matter\nn02261063\tany of several insects living on the bark of plants\nn02261419\tminute wingless psocopterous insects injurious to books and papers\nn02261757\ta variety of booklouse\nn02262178\tshort-lived insect\nn02262449\tslender insect with delicate membranous wings having an aquatic larval stage and terrestrial adult stage usually lasting less than two days\nn02262803\tprimitive winged insect with a flattened body; used as bait by fishermen; aquatic gilled larvae are carnivorous and live beneath stones\nn02263378\tinsect having biting mouthparts and four large membranous wings with netlike veins\nn02264021\twinged insect resembling a dragonfly; the larvae (doodlebugs) dig conical pits where they wait to catch e.g. ants\nn02264232\tthe larva of any of several insects\nn02264363\tany of two families of insects with gauzy wings (Chrysopidae and Hemerobiidae); larvae feed on insect pests such as aphids\nn02264591\tcarnivorous larva of lacewing flies\nn02264885\tpale green unpleasant-smelling lacewing fly having carnivorous larvae\nn02265330\tsmall dark-colored lacewing fly\nn02266050\tlarge soft-bodied insect having long slender mandibles in the male; aquatic larvae often used as bait\nn02266269\tlarge brown aquatic larva of the dobsonfly; used as fishing bait\nn02266421\tsimilar to but smaller than the dobsonfly; larvae are used as fishing bait\nn02266864\tdark-colored insect having predaceous aquatic larvae\nn02267208\tpredatory insect of western North America having a long necklike prothorax\nn02267483\tinsect that resembles a mantis; larvae are parasites in the nests of spiders and wasps\nn02268148\tlarge primitive predatory aquatic insect having two pairs of membranous wings\nn02268443\tslender-bodied non-stinging insect having iridescent wings that are outspread at rest; adults and nymphs feed on mosquitoes etc.\nn02268853\tslender non-stinging insect similar to but smaller than the dragonfly but having wings folded when at rest\nn02269196\tcaddis fly\nn02269340\tsmall moth-like insect having two pairs of hairy membranous wings and aquatic larvae\nn02269522\tinsect larva that constructs a protective case around its body\nn02269657\tlarva of the caddis fly; constructs a case of silk covered with sand or plant debris\nn02270011\tprimitive wingless insects: bristletail\nn02270200\tsmall wingless insect with a long bristlelike tail\nn02270623\tsilver-grey wingless insect found in houses feeding on book bindings and starched clothing\nn02270945\tlives in warm moist areas e.g. around furnaces\nn02271222\twingless insect living in dark moist places as under dead tree trunks; they make erratic leaps when disturbed\nn02271570\tan insect of the order Thysanoptera\nn02271897\tany of various small to minute sucking insects with narrow feathery wings if any; they feed on plant sap and many are destructive\nn02272286\tinjurious to growing tobacco and peanuts\nn02272552\tinjurious to onion plants and sometimes tobacco\nn02272871\tany of numerous insects of the order Dermaptera having elongate bodies and slender many-jointed antennae and a pair of large pincers at the rear of the abdomen\nn02273392\tsometimes destructive to cultivated bulbs\nn02274024\tinsect that in the adult state has four wings more or less covered with tiny scales\nn02274259\tdiurnal insect typically having a slender body with knobbed antennae and broad colorful wings\nn02274822\tmedium to large butterflies found worldwide typically having brightly colored wings and much-reduced nonfunctional forelegs carried folded on the breast\nn02275560\tof temperate regions; having dark purple wings with yellow borders\nn02275773\tbrilliantly colored; larvae feed on nettles\nn02276078\tAmerican butterfly having dark brown wings with white and golden orange spots\nn02276258\tany of several brightly colored butterflies\nn02276355\tof temperate Europe and Asia; having black wings with red and white markings\nn02276749\tEurasian butterfly with brown wings and white markings\nn02276902\tNorth American butterfly with blue-black wings crossed by a broad white band\nn02277094\tsimilar to the banded purple but with red spots on underwing surfaces\nn02277268\tshowy American butterfly resembling the monarch but smaller\nn02277422\tnymphalid butterfly having angular notches on the outer edges of the forewings\nn02277742\tany of various butterflies belonging to the family Satyridae\nn02278024\tanglewing butterfly with a comma-shaped mark on the underside of each hind wing\nn02278210\tbutterfly with brownish wings marked with black and silver\nn02278463\tbutterfly with silver spots on the underside of the hind wings\nn02278839\tlarge richly colored butterfly\nn02278980\tlarge European butterfly the male of which has wings shaded with purple\nn02279257\tEuropean butterfly having reddish-brown wings each marked with a purple eyespot\nn02279637\tlarge tropical butterfly with degenerate forelegs and an unpleasant taste\nn02279972\tlarge migratory American butterfly having deep orange wings with black and white markings; the larvae feed on milkweed\nn02280458\tany of numerous pale-colored butterflies having three pairs of well-developed legs\nn02280649\twhite butterfly whose larvae (cabbageworms) feed on cabbage\nn02281015\tsmall widely distributed form\nn02281136\tOld World form of cabbage butterfly\nn02281267\tcommon North American form of cabbage butterfly\nn02281406\tany of numerous yellow or orange butterflies\nn02281787\tany of various butterflies of the family Lycaenidae\nn02282257\tany of numerous small butterflies of the family Lycaenidae\nn02282385\tany of various small butterflies of the family Lycaenidae having coppery wings\nn02282553\tcommon copper butterfly of central and eastern North America\nn02282903\tsmall butterflies having striped markings under the wings\nn02283077\tlarvae are pests of various economic plants\nn02283201\ttypically crepuscular or nocturnal insect having a stout body and feathery or hairlike antennae\nn02283617\tany of various moths that have powdery wings\nn02283951\tany of numerous small moths having lightly fringed wings; larvae are leaf rollers or live in fruits and galls\nn02284224\tmoth whose larvae form nests by rolling and tying leaves with spun silk\nn02284611\tsmall Indian moth infesting e.g. tea and coffee plants\nn02284884\tCalifornia moth whose larvae live in especially oranges\nn02285179\ta small grey moth whose larvae live in apples and English walnuts\nn02285548\tdull-colored moth whose larvae have tufts of hair on the body and feed on the leaves of many deciduous trees\nn02285801\tlarva of a tussock moth\nn02286089\tEuropean moth introduced into North America; a serious pest of shade trees\nn02286425\tsmall brown and white European moth introduced into eastern United States; pest of various shade and fruit trees\nn02286654\twhite furry-bodied European moth with a yellow tail tuft\nn02287004\tslender-bodied broad-winged moth whose larvae are called measuring worms\nn02287352\tmoth whose larvae are spring cankerworms\nn02287622\tNorth American moth with grey-winged males and wingless females; larvae are fall cankerworms\nn02287799\tgreen caterpillar of a geometrid moth; pest of various fruit and shade trees\nn02287987\tvariably colored looper; larva of Paleacrita vernata\nn02288122\tgreen or brown white-striped looper; larva of Alsophila pometaria\nn02288268\tsmall hairless caterpillar having legs on only its front and rear segments; mostly larvae of moths of the family Geometridae\nn02288789\tusually tropical slender-bodied long-legged moth whose larvae are crop pests\nn02289307\tmoth whose larvae live in and feed on bee honeycombs\nn02289610\tnative to Europe; in America the larvae bore into the stem and crown of corn and other plants\nn02289988\tsmall moth whose larvae damage stored grain and flour\nn02290340\tsmall moth whose larvae feed on tobacco and other dried plant products\nn02290664\ta moth whose larvae feed on and mat together with webbing various stored products of vegetable origin\nn02290870\tmoth whose larvae attack dried fruits and cereal products\nn02291220\tsmall dull-colored moth with chewing mouthparts\nn02291572\tsmall yellowish moths whose larvae feed on wool or fur\nn02291748\tany of several small yellowish or buff-colored moths whose larvae eat organic matter e.g. woolens\nn02292085\tthe larvae live in tubes of its food material fastened with silk that it spins\nn02292401\tmoth that forms a web in which it lives\nn02292692\tlarvae feed on carpets and other woolens\nn02293352\tsmall slender-winged moths whose larvae are agricultural pests\nn02293868\tmoth whose larvae feed on grain\nn02294097\tsmall moth whose larvae feed on kernels of stored grains\nn02294407\tgreyish-brown moth whose larva is the potato tuberworm\nn02294577\tlarva of potato moth; mines in leaves and stems of e.g. potatoes and tobacco\nn02295064\tusually dull-colored medium-sized nocturnal moth; the usually smooth-bodied larvae are destructive agricultural pests\nn02295390\tNorth American moth whose larvae feed on young plant stems cutting them off at the ground\nn02295870\tmoth having dull forewings and brightly colored hind wings\nn02296021\tmoth having dull forewings and red-marked hind wings\nn02296276\tEuropean moth with white antler-like markings on the forewings; the larvae damage pastures and grasslands\nn02296612\tmedium-sized moth whose larvae are corn earworms\nn02296912\tlarvae (of a noctuid moth) that travel in large groups and destroy grains and alfalfa in the midwestern states\nn02297294\tmoth whose destructive larvae travel in multitudes\nn02297442\tnoctuid moth larvae that travel in multitudes destroying especially grass and grain\nn02297819\tmoth whose larvae are beet armyworms\nn02297938\tmoth larva that eats foliage of beets and other vegetables\nn02298095\tmoth whose larvae are fall armyworms\nn02298218\tlarva of a migratory American noctuid moth; destroys grasses and small grains\nn02298541\tany of various moths with long narrow forewings capable of powerful flight and hovering over flowers to feed\nn02299039\tmoth whose larvae are tobacco hornworms\nn02299157\tlarge green white-striped hawkmoth larva that feeds on tobacco and related plants; similar to tomato hornworm\nn02299378\tmoth whose larvae are tomato hornworms\nn02299505\tlarge green white-striped hawkmoth larva that feeds on tomato and potato plants; similar to tobacco hornworm\nn02299846\tEuropean hawkmoth with markings on the back resembling a human skull\nn02300173\tmoderate-sized Asiatic moth whose larvae feed on mulberry leaves and produce silk\nn02300554\tstocky creamy-white Asiatic moth found almost entirely under human care; the source of most of the silk commerce\nn02300797\tthe commercially bred hairless white caterpillar of the domestic silkworm moth which spins a cocoon that can be processed to yield silk fiber; the principal source of commercial silk\nn02301452\tlarge brightly colored and usually tropical moth; larvae spin silken cocoons\nn02301935\tlarge moth of temperate forests of Eurasia having heavily scaled transparent wings\nn02302244\tlarge American moth having yellow wings with purplish or brownish markings; larvae feed on e.g. maple and pine trees\nn02302459\tany silkworm moth of the family Saturniidae\nn02302620\tlarva of a saturniid moth; spins a large amount of strong silk in constructing its cocoon\nn02302969\tlarge pale-green American moth with long-tailed hind wings and a yellow crescent-shaped mark on each forewing\nn02303284\tNorth American silkworm moth; larvae feed on the leaves of forest trees\nn02303585\tlarge Asiatic moth introduced into the United States; larvae feed on the ailanthus\nn02303777\tlarge green silkworm of the cynthia moth\nn02304036\tlarge yellow American moth having a large eyelike spot on each hind wing; the larvae have stinging spines\nn02304432\tvery large yellowish-brown American silkworm moth with large eyespots on hind wings; larvae feed on fruit and shade trees\nn02304657\ta Chinese moth that produces a brownish silk\nn02304797\toriental moth that produces brownish silk\nn02305085\tgiant saturniid moth widespread in Asia; sometimes cultured for silk\nn02305407\tstout-bodied broad-winged moth with conspicuously striped or spotted wings; larvae are hairy caterpillars\nn02305636\tmedium-sized moth with long richly colored and intricately patterned wings; larvae are called woolly bears\nn02305929\tlarge red-and-black European moth; larvae feed on leaves of ragwort; introduced into United States to control ragwort\nn02306433\tmedium-sized stout-bodied neutral-colored moths with comb-like antennae\nn02306825\tmoth having nonfunctional mouthparts as adults; larvae feed on tree foliage and spin egg-shaped cocoons\nn02307176\tmoth whose larvae are tent caterpillars\nn02307325\tthe larvae of moths that build and live in communal silken webs in orchard and shade trees\nn02307515\tmoth whose gregarious larvae spin webs resembling carpets\nn02307681\tlarvae of a gregarious North American moth that spins a web resembling a carpet rather than a tent; serious defoliator of deciduous trees\nn02307910\tmedium-sized hairy moths; larvae are lappet caterpillars\nn02308033\tlarva of a lappet moth\nn02308139\tseveral gregarious moth larvae that spin webs over foliage on which they feed\nn02308471\ta variety of moth that spins a web in which it lives\nn02308618\tmoth whose larvae are fall webworms\nn02308735\ta variety of webworm\nn02309120\ta variety of webworm\nn02309242\tan insect or other arthropod between molts\nn02309337\ta wormlike and often brightly colored and hairy or spiny larva of a butterfly or moth\nn02309841\tlarva of the European corn borer moth; a serious pest of maize\nn02310000\tany of various moth caterpillars that destroy cotton bolls\nn02310149\tlarvae of a gelechiid moth introduced from Asia; feeds on the seeds of cotton bolls\nn02310334\tlarva of a noctuid moth; highly destructive to especially corn and cotton and tomato crops\nn02310585\ttoxic green larva of a cabbage butterfly\nn02310717\tcaterpillar of numerous moths characterized by a dense coat of woolly hairs; feed on plants and some are destructive pests\nn02310941\tlarva of moth of the family Arctiidae\nn02311060\tthe immature free-living form of most invertebrates and amphibians and fish which at hatching from the egg is fundamentally unlike its parent and must metamorphose\nn02311617\ta larva of an insect with incomplete metamorphosis (as the dragonfly or mayfly)\nn02311748\tslender transparent larva of eels and certain fishes\nn02312006\ta soft thick wormlike larva of certain beetles and other insects\nn02312175\tthe larva of the housefly and blowfly commonly found in decaying organic matter\nn02312325\ttough-skinned larva of certain crane flies\nn02312427\tan insect in the inactive stage of development (when it is not feeding) intermediate between larva and adult\nn02312640\tpupa of a moth or butterfly enclosed in a cocoon\nn02312912\tan adult insect produced after metamorphosis\nn02313008\tthe only fertile female in a colony of social insects such as bees and ants and termites; its function is to lay eggs\nn02313360\thermaphrodite wormlike animal living in mud of the sea bottom\nn02313709\tsessile aquatic animal forming mossy colonies of small polyps each having a curved or circular ridge bearing tentacles; attach to stones or seaweed and reproduce by budding\nn02315487\tmarine animal with bivalve shell having a pair of arms bearing tentacles for capturing food; found worldwide\nn02315821\tsmall unsegmented marine worm that when disturbed retracts its anterior portion into the body giving the appearance of a peanut\nn02316707\tmarine invertebrates with tube feet and five-part radially symmetrical bodies\nn02317335\techinoderms characterized by five arms extending from a central disk\nn02317781\tan animal resembling a starfish with fragile whiplike arms radiating from a small central disc\nn02318167\tany starfish-like animal of the genera Euryale or Astrophyton or Gorgonocephalus having slender complexly branched interlacing arms radiating from a central disc\nn02318687\ta variety of basket star\nn02319095\tshallow-water echinoderms having soft bodies enclosed in thin spiny globular shells\nn02319308\ta sea urchin that can be eaten\nn02319555\tflattened disklike sea urchins that live on sandy bottoms\nn02319829\tsea urchin having a heart-shaped body in a rigid spiny shell\nn02320127\tprimitive echinoderms having five or more feathery arms radiating from a central disk\nn02320465\tcrinoid with delicate radiating arms and a stalked body attached to a hard surface\nn02321170\tfree-swimming stalkless crinoid with ten feathery arms; found on muddy sea bottoms\nn02321529\techinoderm having a flexible sausage-shaped body, tentacles surrounding the mouth and tube feet; free-living mud feeders\nn02322047\tof warm coasts from Australia to Asia; used as food especially by Chinese\nn02322992\tin former classifications considered a suborder of Rodentia coextensive with the order Lagomorpha: gnawing animals\nn02323449\trelative large gnawing animals; distinguished from rodents by having two pairs of upper incisors specialized for gnawing\nn02323902\trabbits and hares\nn02324045\tany of various burrowing animals of the family Leporidae having long ears and short tails; some domesticated and raised for pets or food\nn02324431\tthe long ears of a rabbit\nn02324514\tcastrated male rabbit\nn02324587\t(usually informal) especially a young rabbit\nn02324850\tcommon greyish-brown burrowing animal native to southern Europe and northern Africa but introduced elsewhere; widely domesticated and developed in various colors and for various needs; young are born naked and helpless\nn02325366\tcommon small rabbit of North America having greyish or brownish fur and a tail with a white underside; a host for Ixodes pacificus and Ixodes scapularis (Lyme disease ticks)\nn02325722\twidely distributed in United States except northwest and far west regions\nn02325884\ta wood rabbit of southeastern United States swamps and lowlands\nn02326074\ta wood rabbit of marshy coastal areas from North Carolina to Florida\nn02326432\tswift timid long-eared mammal larger than a rabbit having a divided upper lip and long hind legs; young born furred and with open eyes\nn02326763\ta young hare especially one in its first year\nn02326862\tlarge hare introduced in North America; does not turn white in winter\nn02327028\tlarge hare of western North America\nn02327175\tlargest hare of northern plains and western mountains of United States; brownish-grey in summer and pale grey in winter; tail nearly always all white\nn02327435\tthe common jackrabbit of grasslands and open areas of western United States; has large black-tipped ears and black streak on the tail\nn02327656\ta large hare of northern North America; it is almost completely white in winter\nn02327842\tlarge large-footed North American hare; white in winter\nn02328009\tred breed of domestic rabbits; hybrid between Old World rabbit and hare\nn02328150\tdomestic breed of rabbit with long white silky hair\nn02328429\tsmall short-eared burrowing mammal of rocky uplands of Asia and western North America\nn02328820\tNorth American pika\nn02328942\tsimilar to little chief hare and may be same species\nn02329401\trelatively small placental mammals having a single pair of constantly growing incisor teeth specialized for gnawing\nn02330245\tany of numerous small rodents typically resembling diminutive rats having pointed snouts and small ears on elongated bodies with slender usually hairless tails\nn02331046\tany of various long-tailed rodents similar to but larger than a mouse\nn02331309\tany of various rodents with cheek pouches\nn02331842\ta rodent that is a member of the family Muridae\nn02332156\tbrownish-grey Old World mouse now a common household pest worldwide\nn02332447\tsmall reddish-brown Eurasian mouse inhabiting e.g. cornfields\nn02332755\tany nocturnal Old World mouse of the genus Apodemus inhabiting woods and fields and gardens\nn02332954\ta mouse with a genetic defect that prevents them from growing hair and also prevents them from immunologically rejecting human cells and tissues; widely used in preclinical trials\nn02333190\tnocturnal yellowish-brown mouse inhabiting woods and fields and gardens\nn02333546\tcommon domestic rat; serious pest worldwide\nn02333733\tbrown rat that infests wharves\nn02333819\tbrown rat commonly found in sewers\nn02333909\tcommon household pest originally from Asia that has spread worldwide\nn02334201\tburrowing scaly-tailed rat of India and Ceylon\nn02334460\tlarge Australian rat with hind legs adapted for leaping\nn02334728\tleaping rodent of Australian desert areas\nn02335127\tany of various amphibious rats\nn02335231\tamphibious rat of Australia and New Guinea\nn02336011\ta variety of rodent\nn02336275\tany of several small greyish New World mice inhabiting e.g. grain fields\nn02336641\tany of various New World woodland mice\nn02336826\tAmerican woodland mouse with white feet and underparts\nn02337001\tbrownish New World mouse; most widely distributed member of the genus\nn02337171\tburrowing mouse of desert areas of southwestern United States\nn02337332\tlarge dark mouse of southeastern United States\nn02337598\tvery small dark greyish brown mouse resembling a house mouse; of Texas and Mexico\nn02337902\tinsectivorous mouse of western North America\nn02338145\tbeaver-like aquatic rodent of North America with dark glossy brown fur\nn02338449\tof Florida wetlands\nn02338722\tdestructive long-haired burrowing rat of southern North America and Central America\nn02338901\tany of various small short-tailed rodents of the northern hemisphere having soft fur grey above and white below with furred tails and large ears; some are hosts for Ixodes pacificus and Ixodes scapularis (Lyme disease ticks)\nn02339282\ta wood rat with dusky feet\nn02339376\tany of various small mouselike rodents of the family Cricetidae (especially of genus Microtus) having a stout short-tailed body and inconspicuous ears and inhabiting fields or meadows\nn02339922\tany of several bushy-tailed rodents of the genus Neotoma of western North America; hoards food and other objects\nn02340186\thost to Lyme disease tick (Ixodes pacificus) in northern California\nn02340358\tlarge greyish-brown wood rat of the southeastern United States\nn02340640\thardy agile rat of grassy marshes of Mexico and the southeastern United States\nn02340930\tshort-tailed glossy-furred burrowing vole of the eastern United States\nn02341288\twidely distributed in grasslands of northern United States and Canada\nn02341475\tof western North America\nn02341616\ttypical vole of the extended prairie region of central United States and southern Canada\nn02341974\tcommon large Eurasian vole\nn02342250\tany of several voles of mountainous regions of Eurasia and America\nn02342534\tany of several vole-like terrestrial or arboreal rodents of cold forested regions of Canada and western United States\nn02342885\tshort-tailed Old World burrowing rodent with large cheek pouches\nn02343058\ta variety of hamster common to Europe and Asia\nn02343320\tsmall light-colored hamster often kept as a pet\nn02343772\tsmall Old World burrowing desert rodent with long soft pale fur and hind legs adapted for leaping\nn02344175\tgerbil of northern Africa\nn02344270\ta gerbil that is popular as a pet\nn02344408\tsouthern European gerbil\nn02344528\tany of various short-tailed furry-footed rodents of circumpolar distribution\nn02344918\tnotable for mass migrations even into the sea where many drown\nn02345078\tof northwestern Canada and Alaska\nn02345340\tOld World lemming\nn02345600\tNorth American lemming having a white winter coat and some claws much enlarged\nn02345774\tof northern Canada\nn02345997\tof low bogs and meadows of northeastern and central United States and southern Canada\nn02346170\tof wet alpine and subalpine meadows of Canada and Alaska\nn02346627\trelatively large rodents with sharp erectile bristles mingled with the fur\nn02346998\tterrestrial porcupine\nn02347274\tporcupine with a tuft of large beaded bristles on the tail\nn02347573\tporcupine of Borneo and Sumatra having short spines and a long tail\nn02347744\tarboreal porcupine\nn02348173\tporcupine of northeastern North America with barbed spines concealed in the coarse fur; often gnaws buildings for salt and grease\nn02348788\tany of various small nocturnal burrowing desert rodents with cheek pouches and long hind legs and tail\nn02349205\tsmall pale yellowish soft-furred rodent of southwestern United States and Mexico\nn02349390\tsmall rodent of open areas of United States plains states\nn02349557\tlarge stiff-haired rodent of shortgrass prairies of United States\nn02349847\tlarge pocket mouse of Mexico\nn02350105\tany of various leaping rodents of desert regions of North America and Mexico; largest members of the family Heteromyidae\nn02350357\tmost widely distributed kangaroo rat: plains and mountain areas of central and western United States\nn02350670\tsmall silky-haired pouched rodent; similar to but smaller than kangaroo rats\nn02350989\tany of several primitive mouselike rodents with long hind legs and no cheek pouches; of woodlands of Eurasia and North America\nn02351343\twidely distributed in northeastern and central United States and Canada\nn02351870\tmouselike jumping rodent\nn02352002\tsmall nocturnal jumping rodent with long hind legs; of arid parts of Asia and northern Africa\nn02352290\ta variety of jerboa\nn02352591\tsmall furry-tailed squirrel-like Old World rodent that becomes torpid in cold weather\nn02352932\tlarge European dormouse\nn02353172\ta variety of dormouse\nn02353411\tdormouse of southern Europe and northern Africa\nn02353861\tburrowing rodent of the family Geomyidae having large external cheek pouches; of Central America and southwestern North America\nn02354162\tgopher of chiefly grasslands of central North America\nn02354320\tgopher of Alabama and Georgia and Florida\nn02354621\tof valleys and mountain meadows of western United States\nn02354781\tgreyish to brown gopher of western and central United States\nn02355227\ta kind of arboreal rodent having a long bushy tail\nn02355477\tany typical arboreal squirrel\nn02356381\tcommon medium-large squirrel of eastern North America; now introduced into England\nn02356612\tlarge grey squirrel of far western areas of United States\nn02356798\texceptionally large arboreal squirrel of eastern United States\nn02356977\tfox squirrel or grey squirrel in the black color phase\nn02357111\tcommon reddish-brown squirrel of Europe and parts of Asia\nn02357401\tof northern United States and Canada\nn02357585\tfar western United States counterpart of the red squirrel\nn02357911\tsmall ground squirrel of western United States\nn02358091\tany of various terrestrial burrowing rodents of Old and New Worlds; often destroy crops\nn02358390\tcommon black-striped reddish-brown ground squirrel of western North America; resembles a large chipmunk\nn02358584\trather large central Eurasian ground squirrel\nn02358712\tof sagebrush and grassland areas of western United States and Canada\nn02358890\tlarge grey ground squirrel of rocky areas of the southwestern United States\nn02359047\tlarge ground squirrel of the North American far north\nn02359324\tany of several rodents of North American prairies living in large complex burrows having a barking cry\nn02359556\ttail is black tipped\nn02359667\ttail is white tipped\nn02359915\tsmall striped semiterrestrial eastern American squirrel with cheek pouches\nn02360282\ta burrowing ground squirrel of western America and Asia; has cheek pouches and a light and dark stripe running down the body\nn02360480\tterrestrial Siberian squirrel\nn02360781\tNew World flying squirrels\nn02360933\tsmall large-eyed nocturnal flying squirrel of eastern United States\nn02361090\tlarge flying squirrel; chiefly of Canada\nn02361337\tstocky coarse-furred burrowing rodent with a short bushy tail found throughout the northern hemisphere; hibernates in winter\nn02361587\treddish brown North American marmot\nn02361706\tlarge North American mountain marmot\nn02361850\theavy-bodied yellowish-brown marmot of rocky areas of western North America\nn02362194\tnocturnal rodent of Asia having furry folds of skin between forelegs and hind legs enabling it to move by gliding leaps\nn02363005\tlarge semiaquatic rodent with webbed hind feet and a broad flat tail; construct complex dams and underwater lodges\nn02363245\ta European variety of beaver\nn02363351\ta variety of beaver found in almost all areas of North America except Florida\nn02363996\tbulky nocturnal burrowing rodent of uplands of the Pacific coast of North America; the most primitive living rodent\nn02364520\tshort-tailed rough-haired South American rodent\nn02364673\tstout-bodied nearly tailless domesticated cavy; often kept as a pet and widely used in research\nn02364840\tSouth American cavy; possibly ancestral to the domestic guinea pig\nn02365108\thare-like rodent of the pampas of Argentina\nn02365480\tpig-sized tailless South American amphibious rodent with partly webbed feet; largest living rodent\nn02366002\tagile long-legged rabbit-sized rodent of Central America and South America and the West Indies; valued as food\nn02366301\tlarge burrowing rodent of South America and Central America; highly esteemed as food\nn02366579\trodent of mountains of western South America\nn02366959\taquatic South American rodent resembling a small beaver; bred for its fur\nn02367492\tsmall rodent with soft pearly grey fur; native to the Andes but bred in captivity for fur\nn02367812\ta rodent native to the mountains of Chile and Peru and now bred in captivity\nn02368116\tgregarious burrowing rodent larger than the chinchillas\nn02368399\tratlike rodent with soft fur and large ears of the Andes\nn02368821\tfurry short-limbed tailless rodent resembling a true mole in habits and appearance; of eastern Europe and Middle East\nn02369293\tAfrican rodent resembling a mole in habits and appearance\nn02369555\tsmall nearly naked African mole rat of desert areas\nn02369680\tfetal-looking colonial rodent of East Africa; neither mole nor rat; they feed on tubers and have a social structure similar to that of honeybees and termites\nn02369935\tan especially large mole rat and the only member of a colony of naked mole rats to bear offspring which are sired by only a few males\nn02370137\tcolonial mole rat of western Africa; similar to naked mole rat\nn02370525\tin former classifications a major division of Mammalia comprising all hoofed mammals; now divided into the orders Perissodactyla (odd-toed ungulates) and Artiodactyla (even-toed ungulates)\nn02370806\tany of a number of mammals with hooves that are superficially similar but not necessarily closely related taxonomically\nn02371344\ta mammal having nails or claws\nn02372140\ta variety of dinocerate\nn02372584\tany of several small ungulate mammals of Africa and Asia with rodent-like incisors and feet with hooflike toes\nn02372952\thyrax that lives in rocky areas\nn02373336\tplacental mammals having hooves with an odd number of toes on each foot\nn02374149\thoofed mammals having slender legs and a flat coat with a narrow mane along the back of the neck\nn02374451\tsolid-hoofed herbivorous quadruped domesticated since prehistoric times\nn02375302\ta horse having a brownish coat thickly sprinkled with white or gray\nn02375438\ta horse stabled with another or one of several horses owned by the same person\nn02375757\ta word for horse used by children or in adult slang\nn02375862\tearliest horse; extinct primitive dog-sized four-toed Eocene animal\nn02376542\ta young horse\nn02376679\ta young female horse under the age of four\nn02376791\ta young male horse under the age of four\nn02376918\tthe male of species Equus caballus\nn02377063\ta colt with undescended testicles\nn02377181\tuncastrated adult male horse\nn02377291\tadult male horse kept for breeding\nn02377388\tcastrated male horse\nn02377480\tfemale equine animal\nn02377603\ta female horse used for breeding\nn02377703\ta lightweight horse kept for riding only\nn02378149\ta fresh horse especially (formerly) to replace one killed or injured in battle\nn02378299\tespecially a light saddle horse for a woman\nn02378415\thorse used in war\nn02378541\thorse trained for battle\nn02378625\tformerly a strong swift horse ridden into battle\nn02378755\t(literary) a spirited horse for state or war\nn02378870\ta mettlesome or fiery horse\nn02378969\ta saddle horse used for transportation rather than sport etc.\nn02379081\ta light saddle horse trained for herding cattle\nn02379183\ta small powerful horse originally bred for sprinting in quarter-mile races in Virginia\nn02379329\tan American breed of small compact saddle horses\nn02379430\ta horse marked by stamina and trained to move at a fast running walk\nn02379630\ta high-stepping horse originating in Kentucky\nn02379743\ta hardy breed of saddle horse developed in western North America and characteristically having a spotted rump\nn02379908\ta spirited graceful and intelligent riding horse native to Arabia\nn02380052\ta compact and sturdy saddle horse that is bred and trained in Vienna; smart and docile and excellent for dressage\nn02380335\ta range horse of the western United States\nn02380464\ta small agile horse specially bred and trained for playing polo\nn02380583\tsmall hardy range horse of the western plains descended from horses brought by the Spanish\nn02380745\tan unbroken or imperfectly broken mustang\nn02380875\ta wild horse that is vicious and difficult or impossible to break in\nn02381004\thorse of a light yellowish dun color with dark mane and tail\nn02381119\tan emaciated horse likely soon to become carrion and so attractive to crows\nn02381261\thorse of a dull brownish grey color\nn02381364\thorse of a light gray or whitish color\nn02381460\tundomesticated or feral domestic horse\nn02381609\tEuropean wild horse extinct since the early 20th century\nn02381831\twild horse of central Asia that resembles an ass; now endangered\nn02382039\ta small native range horse\nn02382132\ta horse kept for hire\nn02382204\tan old or over-worked horse\nn02382338\ta horse used to pull a plow\nn02382437\tany of various breeds of small gentle horses usually less than five feet high at the shoulder\nn02382635\tbreed of very small pony with long shaggy mane and tail\nn02382750\tbreed of small ponies originally from Wales\nn02382850\tstocky breed of pony with a fawn-colored nose\nn02382948\ta horse bred for racing\nn02383231\ta racehorse belonging to a breed that originated from a cross between Arabian stallions and English mares\nn02384741\ta horse trained to run in steeplechases\nn02384858\tan animal that races\nn02385002\tan animal that wins in a contest of speed\nn02385098\tan informal term for a racehorse\nn02385214\ta racehorse considered one year old until the second Jan. 1 following its birth\nn02385580\ta racehorse about which little is known\nn02385676\ta racehorse that runs well on a muddy racetrack\nn02385776\ta horse that fails to run in a race for which it has been entered\nn02385898\ta horse behind which a hunter hides while stalking game\nn02386014\thorse used for pulling vehicles\nn02386141\tstocky short-legged harness horse\nn02386224\ta compact breed of harness horse\nn02386310\ta horse used for plowing and hauling and other heavy labor\nn02386496\thorse adapted for drawing heavy loads\nn02386746\ta workhorse used as a pack animal\nn02386853\tdraft horse kept for pulling carts\nn02386968\theavy feathered-legged breed of draft horse originally from Scotland\nn02387093\tone of a breed of grey or black draft horses originally used in France to draw heavy coaches or artillery\nn02387254\ta quiet plodding workhorse\nn02387346\tBritish breed of large heavy draft horse\nn02387452\ta draft horse harnessed alongside the shaft or pole of a vehicle\nn02387722\ta horse kept at an inn or post house for use by mail carriers or for rent to travelers\nn02387887\tstrong draft horse for drawing coaches\nn02387983\ta horse trained to a special gait in which both feet on one side leave the ground together\nn02388143\ta horse used to set the pace in racing\nn02388276\ta horse trained to trot; especially a horse trained for harness racing\nn02388453\tthe horse having a starting position next to the inside rail in a harness race\nn02388588\ta horse trained to lift its feet high off the ground while walking or trotting\nn02388735\ta dark golden-brown or reddish-brown horse\nn02388832\ta solid dark brown horse\nn02388917\ta horse of a moderate reddish-brown color\nn02389026\ta horse of a brownish orange to light brown color\nn02389128\ta horse of light tan or golden color with cream-colored or white mane and tail\nn02389261\ta spotted or calico horse or pony\nn02389346\thardy and sure-footed animal smaller and with longer ears than the horse\nn02389559\tdomestic beast of burden descended from the African wild ass; patient but stubborn\nn02389779\tsmall donkey used as a pack animal\nn02389865\tBritish informal for donkey\nn02389943\tmale donkey\nn02390015\tfemale donkey\nn02390101\thybrid offspring of a male donkey and a female horse; usually sterile\nn02390258\thybrid offspring of a male horse and a female donkey or ass; usually sterile\nn02390454\tany of several equine mammals of Asia and northeast Africa\nn02390640\ta wild ass of Africa\nn02390738\twild ass of Tibet and Mongolia\nn02390834\tAsiatic wild ass\nn02390938\tMongolian wild ass\nn02391049\tany of several fleet black-and-white striped African equines\nn02391234\tof the plains of central and eastern Africa\nn02391373\tnarrow-striped nearly extinct zebra of southern Africa\nn02391508\tzebra with less continuous stripes\nn02391617\tmammal of South Africa that resembled a zebra; extinct since late 19th century\nn02391994\tmassive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout\nn02392434\thaving one horn\nn02392555\textinct thick-haired species of Arctic regions\nn02392824\tlarge light-grey African rhinoceros having two horns; endangered; sometimes placed in genus Diceros\nn02393161\tAfrican rhino; in danger of extinction\nn02393580\tlarge inoffensive chiefly nocturnal ungulate of tropical America and southeast Asia having a heavy body and fleshy snout\nn02393807\ta tapir found in South America and Central America\nn02393940\ta tapir found in Malaya and Sumatra\nn02394477\tplacental mammal having hooves with an even number of functional toes on each foot\nn02395003\tstout-bodied short-legged omnivorous animals\nn02395406\tdomestic swine\nn02395694\ta young pig\nn02395855\tan unweaned piglet\nn02395931\ta pig fattened to provide meat\nn02396014\tan uncastrated male hog\nn02396088\tan adult female hog\nn02396157\ta mongrel hog with a thin body and long legs and a ridged back; a wild or semi-wild descendant of improved breeds; found chiefly in the southeastern United States\nn02396427\tOld World wild swine having a narrow body and prominent tusks from which most domestic swine come; introduced in United States\nn02396796\tIndonesian wild pig with enormous curved canine teeth\nn02397096\tAfrican wild swine with warty protuberances on the face and large protruding tusks\nn02397529\tnocturnal gregarious pig-like wild animals of North America and South America\nn02397744\tdark grey peccary with an indistinct white collar; of semi desert areas of Mexico and southwestern United States\nn02397987\tblackish peccary with whitish cheeks; larger than the collared peccary\nn02398521\tmassive thick-skinned herbivorous animal living in or around rivers of tropical Africa\nn02399000\tany of various cud-chewing hoofed mammals having a stomach divided into four (occasionally three) compartments\nn02401031\thollow-horned ruminants\nn02402010\tany of various members of the genus Bos\nn02402175\tany of various wild bovines especially of the genera Bos or closely related Bibos\nn02402425\tdomesticated bovine animals as a group regardless of sex or age\nn02403003\tan adult castrated bull of the genus Bos; especially Bos taurus\nn02403153\tyearling heifer or bullock\nn02403231\tcastrated bull\nn02403325\tuncastrated adult male of domestic cattle\nn02403454\tfemale of domestic cattle\nn02403740\tyoung cow\nn02403820\tyoung bull\nn02403920\tmotherless calf in a range herd of cattle\nn02404028\tan unbranded range animal (especially a stray calf); belongs to the first person who puts a brand on it\nn02404186\tcattle that are reared for their meat\nn02404432\tlong-horned beef cattle formerly common in southwestern United States\nn02404573\tany of several breeds of Indian cattle; especially a large American heat and tick resistant greyish humped breed evolved in the Gulf States by interbreeding Indian cattle and now used chiefly for crossbreeding\nn02404906\tdomesticated ox having a humped back and long horns and a large dewlap; used chiefly as a draft animal in India and east Asia\nn02405101\tlarge recently extinct long-horned European wild ox; considered one of the ancestors of domestic cattle\nn02405302\tlarge long-haired wild ox of Tibet often domesticated\nn02405440\twild ox of the Malay Archipelago\nn02405577\ta breed of dual-purpose cattle developed in Wales\nn02405692\thornless short-haired breed of beef and dairy cattle\nn02405799\tBrahman and shorthorn crossbreed of red cattle; hardy in hot regions\nn02405929\tblack hornless breed from Scotland\nn02406046\ttall large-horned humped cattle of South Africa; used for meat or draft\nn02406174\tcattle that are reared for their milk\nn02406432\thardy breed of dairy cattle from Ayr, Scotland\nn02406533\tlarge hardy brown breed of dairy cattle from Switzerland\nn02406647\tlarge white or cream-colored breed from France\nn02406749\ta breed of diary cattle developed on the island of Jersey\nn02406859\tred dual-purpose cattle of English origin\nn02406952\ta variety of cattle produced by crossbreeding with a superior breed\nn02407071\tEnglish breed of short-horned cattle\nn02407172\tbreed evolved from shorthorn beef cattle\nn02407276\tbreed of hardy black chiefly beef cattle native to Scotland\nn02407390\ta breed of dairy cattle from northern Holland\nn02407521\tbreed of dairy cattle from the island of Guernsey\nn02407625\thardy English breed of dairy cattle raised extensively in United States\nn02407763\thardy breed of cattle resulting from crossing domestic cattle with the American buffalo; yields leaner beef than conventional breeds\nn02407959\tany of several Old World animals resembling oxen including, e.g., water buffalo; Cape buffalo\nn02408429\tan Asian buffalo that is often domesticated for use as a draft animal\nn02408660\tupland buffalo of eastern Asia where true water buffaloes do not thrive; used for draft and milk\nn02408817\twater buffalo of the Philippines\nn02409038\tsmall buffalo of the Celebes having small straight horns\nn02409202\tsmall buffalo of Mindoro in the Philippines\nn02409508\tlarge often savage buffalo of southern Africa having upward-curving horns; mostly in game reserves\nn02409870\tgenus of Asiatic wild oxen\nn02410011\twild ox of mountainous areas of eastern India\nn02410141\tox of southeast Asia sometimes considered a domesticated breed of the gaur\nn02410509\tany of several large humped bovids having shaggy manes and large heads and short horns\nn02410702\tlarge shaggy-haired brown bison of North American plains\nn02410900\tEuropean bison having a smaller and higher head than the North American bison\nn02411206\tlarge shaggy-coated bovid mammal of Canada and Greenland; intermediate in size and anatomy between an ox and a sheep\nn02411705\twoolly usually horned ruminant mammal related to the goat\nn02411999\tfemale sheep\nn02412080\tuncastrated adult male sheep\nn02412210\tmale sheep especially a castrated one\nn02412440\tyoung sheep\nn02412629\ta very young lamb\nn02412700\tchild's word for a sheep or lamb\nn02412787\ta sheep up to the age of one year; one yet to be sheared\nn02412909\ttwo-year-old sheep\nn02412977\ta karakul lamb\nn02413050\tsheep with a black coat\nn02413131\tany of various breeds raised for wool or edible meat or skin\nn02413484\tsheep with long wool originating in the Cotswold Hills\nn02413593\tBritish breed of hornless dark-faced domestic sheep\nn02413717\tlong-wooled mutton sheep originally from Lincolnshire\nn02413824\thorned sheep of Devon; valued for mutton\nn02413917\thardy hornless sheep of the Cheviot Hills noted for its short thick wool\nn02414043\thardy coarse-haired sheep of central Asia; lambs are valued for their soft curly black fur\nn02414209\ta domestic long-wool sheep\nn02414290\twhite sheep originating in Spain and producing a heavy fleece of exceptional quality\nn02414442\thardy sheep developed from the merino producing both good mutton and fine wool\nn02414578\tundomesticated sheep\nn02414763\twild sheep of semidesert regions in central Asia\nn02414904\tAsiatic wild sheep with exceptionally large horns; sometimes considered a variety of the argali (or Ovis ammon)\nn02415130\tbearded reddish sheep of southern Asia\nn02415253\tlarge white wild sheep of northwestern Canada and Alaska\nn02415435\tany wild sheep inhabiting mountainous regions\nn02415577\twild sheep of mountainous regions of western North America having massive curled horns\nn02415829\twild mountain sheep of Corsica and Sardinia\nn02416104\twild sheep of northern Africa\nn02416519\tany of numerous agile ruminants related to sheep but having a beard and straight horns\nn02416820\tyoung goat\nn02416880\tmale goat\nn02416964\tfemale goat\nn02417070\tany of various breeds of goat raised for milk or meat or wool\nn02417242\tHimalayan goat having a silky undercoat highly prized as cashmere wool\nn02417387\ta domestic breed of goat raised for its long silky hair which is the true mohair\nn02417534\tundomesticated goat\nn02417663\twild goat of Iran and adjacent regions\nn02417785\tlarge Himalayan goat with large spiraled horns\nn02417914\twild goat of mountain areas of Eurasia and northern Africa having large recurved horns\nn02418064\tbovid related to goats but having antelope-like features: mountain goats; gorals; serows; chamois; gnu goats\nn02418465\tsure-footed mammal of mountainous northwestern North America\nn02418770\tsmall goat antelope with small conical horns; of southern Asian mountains\nn02419056\tshort-horned dark-coated goat antelope of mountain areas of southern and southeastern Asia\nn02419336\thoofed mammal of mountains of Eurasia having upright horns with backward-hooked tips\nn02419634\tlarge heavily built goat antelope of eastern Himalayan area\nn02419796\tgraceful Old World ruminant with long legs and horns directed upward and backward; includes gazelles; springboks; impalas; addax; gerenuks; blackbucks; dik-diks\nn02420509\tcommon Indian antelope with a dark back and spiral horns\nn02420828\tslender East African antelope with slim neck and backward-curving horns\nn02421136\tlarge antelope with lightly spiraled horns of desert regions of northern Africa\nn02421449\tlarge African antelope having a head with horns like an ox and a long tufted tail\nn02421792\tany of several small antelopes of eastern Africa of the genus Madoqua; the size of a large rabbit\nn02422106\ta large African antelope with lyre-shaped horns that curve backward\nn02422391\ta large South African antelope; considered the swiftest hoofed mammal\nn02422699\tAfrican antelope with ridged curved horns; moves with enormous leaps\nn02423022\tsmall swift graceful antelope of Africa and Asia having lustrous eyes\nn02423218\tEast African gazelle; the smallest gazelle\nn02423362\ta kind of gazelle\nn02423589\ta South African gazelle noted for springing lightly into the air\nn02424085\tlarge forest antelope of central Africa having a reddish-brown coat with white stripes and spiral horns\nn02424305\teither of two spiral-horned antelopes of the African bush\nn02424486\ta variety of kudu\nn02424589\ta smaller variety of kudu\nn02424695\tany of several antelopes of the genus Tragelaphus having striped markings resembling a harness\nn02424909\tspiral-horned South African antelope with a fringe of white hairs along back and neck\nn02425086\tshaggy antelope of mountains of Ethiopia\nn02425228\tantelope with white markings like a harness and twisted horns\nn02425532\tlarge Indian antelope; male is blue-grey with white markings; female is brownish with no horns\nn02425887\tlarge black East African antelope with sharp backward-curving horns\nn02426176\tgoat-like antelope of central Eurasia having a stubby nose like a proboscis\nn02426481\tsmall plains antelope of southeastern Africa\nn02426813\teither of two large African antelopes of the genus Taurotragus having short spirally twisted horns in both sexes\nn02427032\tdark fawn-colored eland of southern and eastern Africa\nn02427183\tlarge dark striped eland of western equatorial Africa\nn02427470\tan orange-brown antelope of southeast Africa\nn02427576\ttawny-colored African antelope inhabiting wet grassy plains; a threatened species\nn02427724\tany of several large African antelopes of the genus Kobus having curved ridged horns and frequenting e.g. swamps and rivers\nn02428089\tan African antelope closely related to the waterbuck\nn02428349\tlarge African antelope with long straight nearly upright horns\nn02428508\tlarge South African oryx with a broad black band along its flanks\nn02428842\tcow-like creature with the glossy coat of a horse and the agility of a goat and the long horns of an antelope; characterized as a cow that lives the life of a goat\nn02429456\tfleet antelope-like ruminant of western North American plains with small branched horns\nn02430045\tdistinguished from Bovidae by the male's having solid deciduous antlers\nn02430559\tadult male deer\nn02430643\tstag with antlers of 12 or more branches\nn02430748\tmale deer in his second year\nn02430830\ta young deer\nn02431122\tcommon deer of temperate Europe and Asia\nn02431337\ta male deer, especially an adult male red deer\nn02431441\ta female deer, especially an adult female red deer\nn02431542\tmale red deer in its second year\nn02431628\ta deer of southern Asia with antlers that have three tines\nn02431785\tlarge North American deer with large much-branched antlers in the male\nn02431976\tsmall deer of Japan with slightly forked antlers\nn02432291\tcommon North American deer; tail has a white underside\nn02432511\tlong-eared deer of western North America with two-pronged antlers\nn02432704\tmule deer of western Rocky Mountains\nn02432983\tlarge northern deer with enormous flattened antlers in the male; called `elk' in Europe and `moose' in North America\nn02433318\tsmall Eurasian deer\nn02433546\tsmall graceful deer of Eurasian woodlands having small forked antlers\nn02433729\tmale roe deer\nn02433925\tArctic deer with large antlers in both sexes; called `reindeer' in Eurasia and `caribou' in North America\nn02434190\tany of several large caribou living in coniferous forests of southern Canada; in some classifications included in the species Rangifer tarandus\nn02434415\tof tundra of northern Canada; in some classifications included in the species Rangifer tarandus\nn02434712\tsmall South American deer with unbranched antlers\nn02434954\tsmall Asian deer with small antlers and a cry like a bark\nn02435216\tsmall heavy-limbed upland deer of central Asia; male secretes valued musk\nn02435517\tlarge Chinese deer surviving only in domesticated herds\nn02435853\tvery small hornless deer-like ruminant of tropical Asia and west Africa\nn02436224\tsmall chevrotain of southeastern Asia\nn02436353\tchevrotain somewhat larger than the kanchil; found in India and Malaya\nn02436645\tlargest chevrotain; of marshy areas of west Africa\nn02437136\tcud-chewing mammal used as a draft or saddle animal in desert regions\nn02437312\tone-humped camel of the hot deserts of northern Africa and southwestern Asia\nn02437482\ttwo-humped camel of the cold deserts of central Asia\nn02437616\twild or domesticated South American cud-chewing animal related to camels but smaller and lacking a hump\nn02437971\tused in the Andes as a beast of burden and source of wool; considered a domesticated variety of the guanaco\nn02438173\twild llama\nn02438272\tdomesticated llama with long silky fleece; believed to be a domesticated variety of the guanaco\nn02438580\tsmall wild cud-chewing Andean animal similar to the guanaco but smaller; valued for its fleecy undercoat\nn02439033\ttallest living quadruped; having a spotted coat and small horns and very long neck and legs; of savannahs of tropical Africa\nn02439398\tsimilar to the giraffe but smaller with much shorter neck and stripe on the legs\nn02441326\tfissiped fur-bearing carnivorous mammals\nn02441942\tsmall carnivorous mammal with short legs and elongated body and neck\nn02442172\tmustelid of northern hemisphere in its white winter coat\nn02442336\tthe ermine in its brown summer coat with black-tipped tail\nn02442446\tof Canada and northeastern United States\nn02442572\tof Europe\nn02442668\tthe common American weasel distinguished by large size and black-tipped tail\nn02442845\tslender-bodied semiaquatic mammal having partially webbed feet; valued for its fur\nn02443015\tusually rich dark brown\nn02443114\tdark brown mustelid of woodlands of Eurasia that gives off an unpleasant odor when threatened\nn02443346\tdomesticated albino variety of the European polecat bred for hunting rats and rabbits\nn02443484\tmusteline mammal of prairie regions of United States; nearly extinct\nn02443808\tsouthern African weasel\nn02443959\tsmall slender burrowing muishond with white top of the head\nn02444251\tferret-sized muishond often tamed\nn02444819\tfreshwater carnivorous mammal having webbed and clawed feet and dark brown fur\nn02445004\tsociable aquatic animal widely distributed along streams and lake borders in North America\nn02445171\totter found in Europe and Asia\nn02445394\tlarge marine otter of northern Pacific coasts having very thick dark brown fur\nn02445715\tAmerican musteline mammal typically ejecting an intensely malodorous fluid when startled; in some classifications put in a separate subfamily Mephitinae\nn02446206\tmost common and widespread North American skunk\nn02446352\tof Mexico and southernmost parts of southwestern United States\nn02446645\tlarge naked-muzzled skunk with white back and tail; of southwestern North America and Mexico\nn02447021\tsmall skunk with a marbled black and white coat; of United States and Mexico\nn02447366\tsturdy carnivorous burrowing mammal with strong claws; widely distributed in the northern hemisphere\nn02447762\ta variety of badger native to America\nn02448060\ta variety of badger native to Europe and Asia\nn02448318\tnocturnal badger-like carnivore of wooded regions of Africa and southern Asia\nn02448633\tsmall ferret-like badger of southeast Asia\nn02448885\tsoutheast Asian badger with a snout like a pig\nn02449183\tstocky shaggy-coated North American carnivorous mammal\nn02449350\tmusteline mammal of northern Eurasia\nn02449699\tcarnivore of Central America and South America resembling a weasel with a greyish-white back and dark underparts\nn02450034\tagile slender-bodied arboreal mustelids somewhat larger than weasels\nn02450295\tdark brown marten of northern Eurasian coniferous forests\nn02450426\tmarten of northern Asian forests having luxuriant dark brown fur\nn02450561\tvalued for its fur\nn02450677\tEurasian marten having a brown coat with pale breast and throat\nn02450829\tlarge dark brown North American arboreal carnivorous mammal\nn02451125\tlarge yellow and black marten of southern China and Burma\nn02451415\tlong-tailed arboreal mustelid of Central America and South America\nn02451575\tanimals that exist only in fiction (usually in children's stories)\nn02453108\tany of various nonruminant hoofed mammals having very thick skin: elephant; rhinoceros; hippopotamus\nn02453611\tprimitive terrestrial mammal with few if any teeth; of tropical Central America and South America\nn02454379\tburrowing chiefly nocturnal mammal with body covered with strong horny plates\nn02454794\thaving nine hinged bands of bony plates; ranges from Texas to Paraguay\nn02455135\tSouth American armadillo with three bands of bony plates\nn02455428\tnaked-tailed armadillo of tropical South America\nn02455720\tArgentine armadillo with six movable bands and hairy underparts\nn02456008\tabout three feet long exclusive of tail\nn02456275\tvery small Argentine armadillo with pale silky hair and pink plates on head and neck\nn02456962\tany of several slow-moving arboreal mammals of South America and Central America; they hang from branches back downward and feed on leaves and fruits\nn02457408\ta sloth that has three long claws on each forefoot and each hindfoot\nn02457945\trelatively small fast-moving sloth with two long claws on each front foot\nn02458135\ta sloth of Central America that has two long claws on each forefoot and three long claws on each hindfoot\nn02458517\ta large extinct ground sloth\nn02459190\ta variety of extinct edentate\nn02460009\tany of several tropical American mammals of the family Myrmecophagidae which lack teeth and feed on ants and termites\nn02460451\tlarge shaggy-haired toothless anteater with long tongue and powerful claws; of South America\nn02460817\tsquirrel-sized South American toothless anteater with long silky golden fur\nn02461128\tsmall toothless anteater with prehensile tail and four-clawed forelimbs; of tropical South America and Central America\nn02461830\ttoothless mammal of southern Africa and Asia having a body covered with horny scales and a long snout for feeding on ants and termites\nn02462213\tmargin between the skin of the pastern and the horn of the hoof\nn02469248\ta feather covering the shoulder of a bird\nn02469472\ta larval frog or toad\nn02469914\tany placental mammal of the order Primates; has good eyesight and flexible hands and feet\nn02470238\tan ape or monkey\nn02470325\tany of various primates with short tails or no tail at all\nn02470709\tany member of the suborder Anthropoidea including monkeys and apes and hominids\nn02470899\tany tailless ape of the families Pongidae and Hylobatidae\nn02471300\ta primate of the superfamily Hominoidea\nn02471762\ta primate of the family Hominidae\nn02472293\tany living or extinct member of the family Hominidae characterized by superior intelligence, articulate speech, and erect carriage\nn02472987\tall of the living human inhabitants of the earth\nn02473307\textinct species of primitive hominid with upright stature but small brain\nn02473554\tformer genus of primitive apelike men now Homo erectus\nn02473720\tfossil remains found in Java; formerly called Pithecanthropus erectus\nn02473857\tfossils found near Beijing, China; they were lost during World War II\nn02473983\tgenus to which Peking man was formerly assigned\nn02474110\textinct primitive hominid of late Pleistocene; Java; formerly Javanthropus\nn02474282\tformer genus of primitive man; now Homo soloensis: comprises Solo man\nn02474605\textinct species of upright East African hominid having some advanced humanlike characteristics\nn02474777\tthe only surviving hominid; species to which modern man belongs; bipedal primate having language and ability to make and use complex tools; brain volume at least 1400 cc\nn02475078\textinct robust human of Middle Paleolithic in Europe and western Asia\nn02475358\textinct human of Upper Paleolithic in Europe\nn02475669\tsubspecies of Homo sapiens; includes all modern races\nn02476219\tany of several extinct humanlike bipedal primates with relatively small brains of the genus Australopithecus; from 1 to 4 million years ago\nn02476567\tfossils found in Ethiopia; from 3.5 to 4 million years ago\nn02476870\tgracile hominid of southern Africa; from about 3 million years ago\nn02477028\tlarge-toothed hominid of eastern Africa; from 1 to 2 million years ago\nn02477187\tgenus to which Australopithecus boisei was formerly assigned\nn02477329\tlarge-toothed hominid of southern Africa; from 1.5 to 2 million years ago; formerly Paranthropus\nn02477516\tformer classification for Australopithecus robustus\nn02477782\tfossil primates found in India\nn02478239\tfossil hominoids from northern central Hungary; late Miocene\nn02478875\tan anthropoid ape of the genus Proconsul\nn02479332\textinct primate of about 38 million years ago; fossils found in Egypt\nn02480153\tany of the large anthropoid apes of the family Pongidae\nn02480495\tlarge long-armed ape of Borneo and Sumatra having arboreal habits\nn02480855\tlargest anthropoid ape; terrestrial and vegetarian; of forests of central west Africa\nn02481103\ta kind of gorilla\nn02481235\ta kind of gorilla\nn02481366\tgorilla of Kivu highlands\nn02481500\tan adult male gorilla with grey hairs across the back\nn02481823\tintelligent somewhat arboreal ape of equatorial African forests\nn02482060\tmasked or pale-faced chimpanzees of western Africa; distantly related to the eastern and central chimpanzees; possibly a distinct species\nn02482286\tlong-haired chimpanzees of east-central Africa; closely related to the central chimpanzees\nn02482474\tblack-faced chimpanzees of central Africa; closely related to eastern chimpanzees\nn02482650\tsmall chimpanzee of swamp forests in Zaire; a threatened species\nn02483092\tgibbons and siamangs\nn02483362\tsmallest and most perfectly anthropoid arboreal ape having long arms and no tail; of southern Asia and East Indies\nn02483708\tlarge black gibbon of Sumatra having the 2nd and 3rd toes partially united by a web\nn02484322\tany of various long-tailed primates (excluding the prosimians)\nn02484473\tof Africa or Arabia or Asia; having nonprehensile tails and nostrils close together\nn02484975\tsmall slender African monkey having long hind limbs and tail and long hair around the face\nn02485225\tsmallest guenon monkey; of swampy central and west African forests\nn02485371\twhite and olive green East African monkey with long white tufts of hair beside the face\nn02485536\tSouth African monkey with black face and hands\nn02485688\tcommon savannah monkey with greenish-grey back and yellow tail\nn02485988\tlarge agile arboreal monkey with long limbs and tail and white upper eyelids\nn02486261\treddish long-tailed monkey of west Africa\nn02486410\tlarge terrestrial monkeys having doglike muzzles\nn02486657\tgreyish baboon of southern and eastern Africa\nn02486908\tbaboon of west Africa with a bright red and blue muzzle and blue hindquarters\nn02487079\tsimilar to the mandrill but smaller and less brightly colored\nn02487347\tshort-tailed monkey of rocky regions of Asia and Africa\nn02487547\tof southern Asia; used in medical research\nn02487675\tIndian macaque with a bonnet-like tuft of hair\nn02487847\ttailless macaque of rocky cliffs and forests of northwestern Africa and Gibraltar\nn02488003\tmonkey of southeast Asia, Borneo and the Philippines\nn02488291\tslender long-tailed monkey of Asia\nn02488415\tlangur of southern Asia; regarded as sacred in India\nn02488702\tarboreal monkey of western and central Africa with long silky fur and reduced thumbs\nn02488894\ta colobus monkey with a reddish brown coat and white silky fringes down both sides of the body\nn02489166\tBorneo monkey having a long bulbous nose\nn02489589\thairy-faced arboreal monkeys having widely separated nostrils and long usually prehensile tails\nn02490219\tsmall soft-furred South American and Central American monkey with claws instead of nails\nn02490597\ta marmoset\nn02490811\tthe smallest monkey; of tropical forests of the Amazon\nn02491107\tsmall South American marmoset with silky fur and long nonprehensile tail\nn02491329\tgolden South American monkey with long soft hair forming a mane\nn02491474\tSouth American tamarin with a tufted head\nn02492035\tmonkey of Central America and South America having thick hair on the head that resembles a monk's cowl\nn02492356\tnocturnal monkey of Central America and South America with large eyes and thick fur\nn02492660\tmonkey of tropical South American forests having a loud howling cry\nn02492948\tsmall arboreal monkey of tropical South America with long hair and bushy nonprehensile tail\nn02493224\tmedium-sized tree-dwelling monkey of the Amazon basin; only New World monkey with a short tail\nn02493509\tsmall South American monkeys with long beautiful fur and long nonprehensile tail\nn02493793\tarboreal monkey of tropical America with long slender legs and long prehensile tail\nn02494079\tsmall long-tailed monkey of Central American and South America with greenish fur and black muzzle\nn02494383\tlarge monkeys with dark skin and woolly fur of the Amazon and Orinoco basins\nn02495242\tinsectivorous arboreal mammal of southeast Asia that resembles a squirrel with large eyes and long sharp snout\nn02496052\tprimitive primates having large ears and eyes and characterized by nocturnal habits\nn02496913\tlarge-eyed arboreal prosimian having foxy faces and long furry tails\nn02497673\tsmall lemur having its tail barred with black\nn02498153\tnocturnal lemur with long bony fingers and rodent-like incisor teeth closely related to the lemurs\nn02498743\tslim-bodied lemur of southern India and Sri Lanka\nn02499022\tstocky lemur of southeastern Asia\nn02499316\ta kind of lemur\nn02499568\ta kind of lemur\nn02499808\tagile long-tailed nocturnal African lemur with dense woolly fur and large eyes and ears\nn02500267\tlarge short-tailed lemur of Madagascar having thick silky fur in black and white and fawn\nn02500596\tnocturnal indris with thick grey-brown fur and a long tail\nn02501583\tnocturnal arboreal primate of Indonesia and the Philippines having huge eyes and digits ending in pads to facilitate climbing; the only primate that spurns all plant material as food living entirely on insects and small vertebrates\nn02501923\ta variety of tarsier\nn02502006\ta variety of tarsier\nn02502514\tarboreal nocturnal mammal of southeast Asia and the Philippines resembling a lemur and having a fold of skin on each side from neck to tail that is used for long gliding leaps\nn02502807\ta variety of flying lemur\nn02503127\tmassive herbivorous mammals having tusks and a long trunk\nn02503517\tfive-toed pachyderm\nn02503756\ta wild and vicious elephant separated from the herd\nn02504013\tAsian elephant having smaller ears and tusks primarily in the male\nn02504458\tan elephant native to Africa having enormous flapping ears and ivory tusks\nn02504770\tany of numerous extinct elephants widely distributed in the Pleistocene; extremely large with hairy coats and long upcurved tusks\nn02505063\tvery hairy mammoth common in colder portions of the northern hemisphere\nn02505238\ta variety of mammoth\nn02505485\tlargest known mammoth; of America\nn02505998\textinct elephant-like mammal that flourished worldwide from Miocene through Pleistocene times; differ from mammoths in the form of the molar teeth\nn02506947\tan animal that walks with the entire sole of the foot touching the ground as e.g. bears and human beings\nn02507148\tan animal that walks so that only the toes touch the ground as e.g. dogs and cats and horses\nn02507649\tplantigrade carnivorous mammals\nn02508021\tan omnivorous nocturnal mammal native to North America and Central America\nn02508213\tNorth American raccoon\nn02508346\ta South American raccoon\nn02508742\traccoon-like omnivorous mammal of Mexico and the southwestern United States having a long bushy tail with black and white rings\nn02509197\tarboreal fruit-eating mammal of tropical America with a long prehensile tail\nn02509515\tomnivorous mammal of Central America and South America\nn02509815\treddish-brown Old World raccoon-like carnivore; in some classifications considered unrelated to the giant pandas\nn02510455\tlarge black-and-white herbivorous mammal of bamboo forests of China and Tibet; in some classifications considered a member of the bear family or of a separate family Ailuropodidae\nn02511730\ta bird that twitters\nn02512053\tany of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills\nn02512752\ta young or small fish\nn02512830\tany fish providing sport for the angler\nn02512938\tany fish used for food by human beings\nn02513248\tany fish useless for food or sport or even as bait\nn02513355\tfish that live on the sea bottom (particularly the commercially important gadoid fish like cod and haddock, or flatfish like flounder)\nn02513560\ta fish that is young\nn02513727\tthe young of various fishes\nn02513805\tany of various fishes that carry their eggs and their young in their mouths\nn02513939\ta female fish at spawning time\nn02514041\ta large marine food fish common on the coasts of Australia, New Zealand, and southern Africa\nn02515214\tany fish of the order Crossopterygii; most known only in fossil form\nn02515713\tfish thought to have been extinct since the Cretaceous period but found in 1938 off the coast of Africa\nn02516188\tair-breathing fish having an elongated body and fleshy paired fins; certain species construct mucus-lined mud coverings in which to survive drought\nn02516776\textinct lungfish\nn02517442\tany of numerous mostly freshwater bottom-living fishes of Eurasia and North America with barbels like whiskers around the mouth\nn02517938\tOld World freshwater catfishes having naked skin and a long anal fin more or less merged with the eellike caudal fin\nn02518324\tlarge elongated catfish of central and eastern Europe\nn02518622\tfreshwater catfish of the Nile and tropical central Africa having an electric organ\nn02519148\tany of several common freshwater catfishes of the United States\nn02519340\tcatfish common in eastern United States\nn02519472\tfreshwater catfish of eastern United States\nn02519686\tfreshwater food fish common throughout central United States\nn02519862\ta large catfish of the Mississippi valley\nn02520147\tlarge catfish of central United States having a flattened head and projecting jaw\nn02520525\tSouth American catfish having the body covered with bony plates\nn02520810\tany of numerous marine fishes most of which are mouthbreeders; not used for food\nn02521646\ta soft-finned fish of the family Gadidae\nn02522399\tmajor food fish of Arctic and cold-temperate waters\nn02522637\tyoung codfish\nn02522722\tone of the world's most important commercial fishes\nn02522866\tclosely related to Atlantic cod\nn02523110\ta food fish of the Atlantic waters of Europe resembling the cod; sometimes placed in genus Gadus\nn02523427\telongate freshwater cod of northern Europe and Asia and North America having barbels around its mouth\nn02523877\timportant food fish on both sides of the Atlantic; related to cod but usually smaller\nn02524202\timportant food and game fish of northern seas (especially the northern Atlantic); related to cod\nn02524524\tany of several marine food fishes related to cod\nn02524659\tfound off Atlantic coast of North America\nn02524928\tAmerican hakes\nn02525382\tlarge edible marine fish of northern coastal waters; related to cod\nn02525703\tdeep-sea fish with a large head and body and long tapering tail\nn02526121\tvoracious snakelike marine or freshwater fishes with smooth slimy usually scaleless skin and having a continuous vertical fin but no ventral fins\nn02526425\tyoung eel\nn02526818\teels that live in fresh water as adults but return to sea to spawn; found in Europe and America; marketed both fresh and smoked\nn02527057\tNew Zealand eel\nn02527271\tfamily of brightly colored voracious eels of warm coastal waters; generally nonaggressive to humans but larger species are dangerous if provoked\nn02527622\tlarge dark-colored scaleless marine eel found in temperate and tropical coastal waters; some used for food\nn02528163\ta bony fish of the subclass Teleostei\nn02529293\tfish of sandy areas of western Pacific and Indian oceans having an angular snout for burrowing into sand\nn02529772\tany of numerous soft-finned schooling food fishes of shallow waters of northern seas\nn02530052\tthe edible young of especially herrings and sprats and smelts\nn02530188\tthe young of a herring or sprat or similar fish\nn02530421\therring-like food fishes that migrate from the sea to fresh water to spawn\nn02530637\tshad of Atlantic coast of North America; naturalized to Pacific coast\nn02530831\tshad that spawns in streams of the Mississippi drainage; very similar to Alosa sapidissima\nn02530999\tEuropean shad\nn02531114\tshad-like food fish that runs rivers to spawn; often salted or smoked; sometimes placed in genus Pomolobus\nn02531625\tshad-like North American marine fishes used for fish meal and oil and fertilizer\nn02532028\tcommercially important food fish of northern waters of both Atlantic and Pacific\nn02532272\timportant food fish; found in enormous shoals in the northern Atlantic\nn02532451\timportant food fish of the northern Pacific\nn02532602\tany of various small edible herring or related food fishes frequently canned\nn02532786\tany of various young herrings (other than brislings) canned as sardines in Norway\nn02532918\tsmall herring processed like a sardine\nn02533209\tsmall fishes found in great schools along coasts of Europe; smaller and rounder than herring\nn02533545\tsmall pilchards common off the pacific coast of North America\nn02533834\tsmall herring-like plankton-eating fishes often canned whole or as paste; abundant in tropical waters worldwide\nn02534165\testeemed for its flavor; usually preserved or used for sauces and relishes\nn02534559\tsoft-finned fishes of cold and temperate waters\nn02534734\tany of various large food and game fishes of northern waters; usually migrate from salt to fresh water to spawn\nn02535080\ta young salmon up to 2 years old\nn02535163\tfemale salmon that has recently spawned\nn02535258\tmale salmon that has recently spawned\nn02535537\tfound in northern coastal Atlantic waters or tributaries; adults do not die after spawning\nn02535759\tAtlantic salmon confined to lakes of New England and southeastern Canada\nn02536165\tsmall salmon with red flesh; found in rivers and tributaries of the northern Pacific and valued as food; adults die after spawning\nn02536456\tlarge Pacific salmon valued as food; adults die after spawning\nn02536864\tsmall salmon of northern Pacific coasts and the Great Lakes\nn02537085\tany of various game and food fishes of cool fresh waters mostly smaller than typical salmons\nn02537319\tspeckled trout of European rivers; introduced in North America\nn02537525\tfound in Pacific coastal waters and streams from lower California to Alaska\nn02537716\tsilvery marine variety of brown trout that migrates to fresh water to spawn\nn02538010\tlarge fork-tailed trout of lakes of Canada and the northern United States\nn02538216\tNorth American freshwater trout; introduced in Europe\nn02538406\tany of several small trout-like fish of the genus Salvelinus\nn02538562\tsmall trout of northern waters; landlocked populations in Quebec and northern New England\nn02538985\tsilvery herring-like freshwater food fish of cold lakes of the northern hemisphere\nn02539424\tfound in the Great Lakes and north to Alaska\nn02539573\timportant food fish of cold deep lakes of North America\nn02539894\ta whitefish with a bronze back; of northern North America and Siberia\nn02540412\tsmall trout-like silvery marine or freshwater food fishes of cold northern waters\nn02540983\tthe common smelt of Europe\nn02541257\tvery small northern fish; forage for sea birds and marine mammals and other fishes\nn02541687\tlarge silvery game fish of warm Atlantic coastal waters especially off Florida\nn02542017\tgame fish resembling the tarpon but smaller\nn02542432\tslender silvery marine fish found in tropical mud flats and mangrove lagoons\nn02542958\tany of various small silver-scaled salmon-like marine fishes\nn02543255\tsmall fish having rows of luminous organs along each side; some surface at night\nn02543565\ttropical fishes with large mouths in lizard-like heads; found worldwide\nn02544274\tlarge elongate scaleless oceanic fishes with sharp teeth and a long dorsal fin that resembles a sail\nn02545841\tlarge elliptical brightly colored deep-sea fish of Atlantic and Pacific and Mediterranean\nn02546028\tfrom Nova Scotia to West Indies and Gulf of Mexico\nn02546331\tmarine fish having a long compressed ribbonlike body\nn02546627\tdeep-sea ribbonfish\nn02547014\tthin deep-water tropical fish 20 to 30 feet long having a red dorsal fin\nn02547733\tbottom-dweller of warm western Atlantic coastal waters having a flattened scaleless body that crawls about on fleshy pectoral and pelvic fins\nn02548247\tfishes having large mouths with a wormlike filament attached for luring prey\nn02548689\tbottom-dwelling fish having scaleless slimy skin and a broad thick head with a wide mouth\nn02548884\ta variety of toadfish\nn02549248\tfish having a frog-like mouth with a lure on the snout\nn02549376\tsmall fantastically formed and colored fishes found among masses of sargassum\nn02549989\telongate European surface-dwelling predacious fishes with long toothed jaws; abundant in coastal waters\nn02550203\tfound in warm waters of western Atlantic\nn02550460\ttropical marine fishes having enlarged winglike fins used for brief gliding flight\nn02550655\thaving only pectoral fins enlarged\nn02551134\ttropical and subtropical marine and freshwater fishes having an elongated body and long protruding lower jaw\nn02551668\tslender long-beaked fish of temperate Atlantic waters\nn02552171\ta teleost fish with fins that are supported by sharp inflexible rays\nn02553028\tfood fish of the northern Pacific related to greenlings\nn02554730\tany of numerous spiny-finned fishes of the order Perciformes\nn02555863\tany of numerous spiny-finned fishes of various families of the order Perciformes\nn02556373\ta small perch of India whose gills are modified to allow it to breathe air; has spiny pectoral fins that enable it to travel on land\nn02556846\tspiny-finned freshwater food and game fishes\nn02557182\tNorth American perch\nn02557318\ta perch native to Europe\nn02557591\tany of several pike-like fishes of the perch family\nn02557749\tpike-like freshwater perches\nn02557909\tvariety inhabiting the Great Lakes\nn02558206\ta small snail-eating perch of the Tennessee River\nn02558860\telongate compressed somewhat eel-shaped fishes\nn02559144\tdeep-sea fishes\nn02559383\tfound living within the alimentary canals of e.g. sea cucumbers or between the shells of pearl oysters in or near shallow seagrass beds\nn02559862\ta kind of percoid fish\nn02560110\tlarge tropical American food and game fishes of coastal and brackish waters; resemble pike\nn02561108\tany of several elongate long-snouted freshwater game and food fishes widely distributed in cooler parts of the northern hemisphere\nn02561381\tvoracious piscivorous pike of waters of northern hemisphere\nn02561514\tlarge (60 to 80 pounds) sport fish of North America\nn02561661\tany of several North American species of small pike\nn02561803\tcommon in quiet waters of eastern United States\nn02561937\tsmall but gamy pickerel of Atlantic coastal states\nn02562315\tsmall carnivorous freshwater percoid fishes of North America usually having a laterally compressed body and metallic luster: crappies; black bass; bluegills; pumpkinseed\nn02562796\tsmall sunfishes of central United States rivers\nn02562971\ta crappie that is black\nn02563079\ta crappie that is white\nn02563182\tany of various usually edible freshwater percoid fishes having compressed bodies and shiny scales; especially (but not exclusively) of the genus Lepomis\nn02563648\tsmall brilliantly colored North American sunfish\nn02563792\timportant edible sunfish of eastern and central United States\nn02563949\tinhabits streams from South Carolina to Florida; esteemed panfish\nn02564270\tNorth American food and game fish\nn02564403\tgame and food fish of upper Mississippi and Great Lakes\nn02564720\twidely distributed and highly prized American freshwater game fishes (sunfish family)\nn02564935\ta variety of black bass\nn02565072\ta variety of black bass; the angle of the jaw falls below the eye\nn02565324\ta large black bass; the angle of the jaw falls behind the eye\nn02565573\tnontechnical name for any of numerous edible marine and freshwater spiny-finned fishes\nn02566109\tmarine food sport fishes mainly of warm coastal waters\nn02566489\tsmall silvery food and game fish of eastern United States streams\nn02566665\tNorth American freshwater bass resembling the larger marine striped bass\nn02567334\tsmall marine fish with black mouth and gill cavity\nn02567633\ta kind of sea bass\nn02568087\tmarine food and game fish with dark longitudinal stripes; migrates upriver to spawn; sometimes placed in the genus Morone\nn02568447\tbrown fish of the Atlantic and Mediterranean found around rocks and shipwrecks\nn02568959\tusually solitary bottom sea basses of warm seas\nn02569484\tany of several mostly spotted fishes that resemble groupers\nn02569631\tfound around rocky coasts or on reefs\nn02569905\tdeep-sea fish of tropical Atlantic\nn02570164\tlarge dark grouper with a thick head and rough scales\nn02570484\tfishes with slimy mucus-covered skin; found in the warm Atlantic coastal waters of America\nn02570838\tsmall to medium-sized shallow-water fishes of the Pacific coast of North America\nn02571167\tPacific coast fish\nn02571652\tred fishes of American coastal tropical waters having very large eyes and rough scales\nn02571810\tbrightly colored carnivorous fish of western Atlantic and West Indies waters\nn02572196\tsmall red fishes of coral reefs and inshore tropical waters\nn02572484\ta cardinalfish found in tropical Atlantic coastal waters\nn02573249\tyellow-spotted violet food fish of warm deep waters\nn02573704\tbluish warm-water marine food and game fish that follow schools of small fishes into shallow waters\nn02574271\tlarge dark-striped tropical food and game fish related to remoras; found worldwide in coastal to open waters\nn02574910\tmarine fishes with a flattened elongated body and a sucking disk on the head for attaching to large fish or moving objects\nn02575325\tremoras found attached to sharks\nn02575590\tlarge blue Pacific remora that attaches to whales and dolphins\nn02576223\ta percoid fish of the family Carangidae\nn02576575\tany of several fast-swimming predacious fishes of tropical to warm temperate seas\nn02576906\tfish of western Atlantic and Gulf of Mexico\nn02577041\tfish of western Atlantic and Gulf of Mexico\nn02577164\tfish of western Atlantic: Cape Cod to Brazil\nn02577403\tstreamlined cigar-shaped jack; good game fish\nn02577662\tany of several New World tropical fishes having tiny embedded scales\nn02577952\tfish having greatly elongated front rays on dorsal and anal fins\nn02578233\tany of several silvery marine fishes with very flat bodies\nn02578454\tsimilar to moonfish but with eyes high on the truncated forehead\nn02578771\tany of several amber to coppery fork-tailed warm-water carangid fishes\nn02578928\tgame fish of southern California and Mexico having a yellow tail fin\nn02579303\tlarge game fish of Australia and New Zealand\nn02579557\tany of several deep-bodied food fishes of western Atlantic and Gulf of Mexico\nn02579762\tfound in coastal waters New England to Brazil except clear waters of West Indies\nn02579928\tlarge game fish; found in waters of the West Indies\nn02580336\tany of a number of fishes of the family Carangidae\nn02580679\ta California food fish\nn02580830\tlarge elongated compressed food fish of the Atlantic waters of Europe\nn02581108\tof Atlantic coastal waters; commonly used for bait\nn02581482\tsmall silvery fish; Nova Scotia to Brazil\nn02581642\tsmall fusiform fish of western Atlantic\nn02581957\tlarge slender food and game fish widely distributed in warm seas (especially around Hawaii)\nn02582220\tthe more common dolphinfish valued as food; about six feet long\nn02582349\ta kind of dolphinfish\nn02582721\tdeep-bodied sooty-black pelagic spiny-finned fish of the northern Atlantic and northern Pacific; valued for food\nn02583567\tany freshwater fish of the family Characinidae\nn02583890\tbrightly colored tropical freshwater fishes\nn02584145\tsmall bright red and blue aquarium fish from streams in Brazil and Colombia\nn02584449\tsmall voraciously carnivorous freshwater fishes of South America that attack and destroy living animals\nn02585872\tfreshwater fishes of tropical America and Africa and Asia similar to American sunfishes; some are food fishes; many small ones are popular in aquariums\nn02586238\timportant food fish of the Nile and other rivers of Africa and Asia Minor\nn02586543\tany of several large sharp-toothed marine food and sport fishes of the family Lutjanidae of mainly tropical coastal waters\nn02587051\tan esteemed food fish with pinkish red head and body; common in the Atlantic coastal waters of North America and the Gulf of Mexico\nn02587300\tfound in shallow waters off the coast of Florida\nn02587479\tsimilar to and often marketed as `red snapper'\nn02587618\tfood fish of warm Caribbean and Atlantic waters\nn02587877\tsuperior food fish of the tropical Atlantic and Caribbean with broad yellow stripe along the sides and on the tail\nn02588286\tmedium-sized tropical marine food fishes that utter grunting sounds when caught\nn02588794\ta grunt with a red mouth that is found from Florida to Brazil\nn02588945\ta kind of grunt\nn02589062\tfound off the West Indies and Florida\nn02589196\tof warm Atlantic waters\nn02589316\ta grunt found from Florida to Brazil and Gulf of Mexico\nn02589623\tblack and gold grunt found from Bermuda to Caribbean to Brazil\nn02589796\tdusky grey food fish found from Louisiana and Florida southward\nn02590094\tfound from Long Island southward\nn02590495\tspiny-finned food fishes of warm waters having well-developed teeth\nn02590702\tany of numerous marine percoid fishes especially (but not exclusively) of the family Sparidae\nn02590987\timportant deep-bodied food and sport fish of warm and tropical coastal waters; found worldwide\nn02591330\tfood fish of the Mediterranean and Atlantic coasts of Europe and America\nn02591613\tfood fish of European coastal waters\nn02591911\tsea bream of warm Atlantic waters\nn02592055\tlarge (up to 20 lbs) food fish of the eastern coast of the United States and Mexico\nn02592371\tsimilar to sea bream; small spiny-finned fish found in bays along the southeastern coast of the United States\nn02592734\tfrom Florida and Bahamas to Brazil\nn02593019\tAustralian food fish having a pinkish body with blue spots\nn02593191\timportant dark-colored edible food and game fish of Australia\nn02593453\tfound in Atlantic coastal waters of North America from South Carolina to Maine; esteemed as a panfish\nn02593679\tporgy of southern Atlantic coastal waters of North America\nn02594250\twidely distributed family of carnivorous percoid fishes having a large air bladder used to produce sound\nn02594942\ta kind of drumfish\nn02595056\tblack-and-white drumfish with an erect elongated dorsal fin\nn02595339\tsmall silvery drumfish often mistaken for white perch; found along coasts of United States from New York to Mexico\nn02595702\tlarge edible fish found off coast of United States from Massachusetts to Mexico\nn02596067\tlarge important food fish of Australia; almost indistinguishable from the maigre\nn02596252\tlarge European marine food fish\nn02596381\tany of several fishes that make a croaking noise\nn02596720\ta silvery-bodied croaker with dark markings and tiny barbels\nn02597004\ta fish of the Pacific coast of North America\nn02597367\tany of several food fishes of North American coastal waters\nn02597608\tany of several food and game fishes of the drum family indigenous to warm Atlantic waters of the North American coast\nn02597818\twhiting of the southeastern coast of North America\nn02597972\twhiting of the east coast of United States; closely resembles king whiting\nn02598134\tbluish-grey whiting of California coast\nn02598573\tsmall silvery marine food fish found off California\nn02598878\tsilvery and bluish drumfish of shallow California coastal waters\nn02599052\tany of several sciaenid fishes of North American coastal waters\nn02599347\tfood and game fish of North American coastal waters with a mouth from which hooks easily tear out\nn02599557\tweakfish of southern Atlantic and Gulf Coasts of United States\nn02599958\tbottom dwelling marine warm water fishes with two barbels on the chin\nn02600298\tbrightly colored tropical fishes with chin barbels\nn02600503\tbody bright scarlet with 2 yellow to reddish strips on side\nn02600798\tschooling goatfish; greyish with yellow stripe\nn02601344\tfreshwater or coastal food fishes a spindle-shaped body; found worldwide\nn02601767\tmost important commercial mullet in eastern United States\nn02601921\tsilvery mullet of Atlantic and Pacific coasts\nn02602059\tsimilar to the striped mullet and takes its place in the Caribbean region\nn02602405\tsmall fishes having a silver stripe along each side; abundant along the Atlantic coast of the United States\nn02602760\ta relatively large silversides of the Pacific coast of North America (known to reach 18 inches in length)\nn02603317\tany voracious marine fish of the genus Sphyraena having an elongated cylindrical body and large mouth with projecting lower jaw and long strong teeth\nn02603540\tlarge (up to 6 ft) greyish-brown barracuda highly regarded as a food and sport fish; may be dangerous to swimmers\nn02603862\tlittle-known nocturnal fish of warm shallow seas with an oblong compressed body\nn02604157\tschooling fishes mostly of Indian and western Pacific oceans; two species in western Atlantic\nn02604480\tfood and game fish around Bermuda and Florida; often follow ships\nn02604954\tdeep-bodied disk-shaped food fish of warmer western Atlantic coastal waters\nn02605316\tsmall usually brilliantly colored tropical marine fishes having narrow deep bodies with large broad fins; found worldwide\nn02605703\tany fish of the genus Chaetodon\nn02605936\ta butterfly fish of the genus Pomacanthus\nn02606052\tgold and black butterflyfish found from West Indies to Brazil\nn02606384\tsmall brilliantly colored tropical marine fishes of coral reefs\nn02606751\ta blue and yellow damselfish of Bermuda and Florida and the West Indies\nn02607072\tlive associated with sea anemones\nn02607201\tan anemone fish of the genus Amphiprion\nn02607470\tlarge blue-grey black-striped damselfish; nearly worldwide\nn02607862\tchiefly tropical marine fishes with fleshy lips and powerful teeth; usually brightly colored\nn02608284\tfound around the Great Barrier Reef\nn02608547\tlarge wrasse of western Atlantic; head of male resembles a pig's snout\nn02608860\tsmall wrasse of tropical Atlantic\nn02608996\tbluish and bronze wrasse; found from Florida keys to Brazil\nn02609302\tsmall Atlantic wrasse the male of which has a brilliant blue head\nn02609823\ta kind of razor fish\nn02610066\tlarge dark-colored food fish of the Atlantic coast of North America\nn02610373\tcommon in north Atlantic coastal waters of the United States\nn02610664\tgaudy tropical fishes with parrotlike beaks formed by fusion of teeth\nn02610980\tmullet-like tropical marine fishes having pectoral fins with long threadlike rays\nn02611561\tsmall large-mouthed tropical marine fishes common along sandy bottoms; males brood egg balls in their mouths; popular aquarium fishes\nn02611898\theavy-bodied marine bottom-lurkers with eyes on flattened top of the head\nn02612167\tsmall pallid fishes of shoal tropical waters of North America and South America having eyes on stalks atop head; they burrow in sand to await prey\nn02613181\tsmall usually scaleless fishes with comb-like teeth living about rocky shores; are territorial and live in holes between rocks\nn02613572\tEuropean scaleless blenny\nn02613820\tinhabits both coasts of tropical Atlantic\nn02614140\tmostly small blennioid fishes of coral reefs and seagrass beds\nn02614482\ttropical American fishes; males are aggressively defensive of their territory\nn02614653\tfound from Florida to Cuba\nn02614978\tsmall eellike fishes common in shallow waters of the northern Atlantic\nn02615298\tslippery scaleless food fish of the northern Atlantic coastal waters\nn02616128\teellike fishes found in subarctic coastal waters\nn02616397\teellike Atlantic bottom fish with large almost vertical mouth\nn02616851\tlarge ferocious northern deep-sea food fishes with strong teeth and no pelvic fins\nn02617537\tan eelpout of northern Europe that is viviparous\nn02618094\tcommon along northeastern coast of North America\nn02618513\tvery small silvery eellike schooling fishes that burrow into sandy beaches\nn02618827\tsmall often brightly colored scaleless marine bottom-dwellers; found in tropical and warm temperate waters of Europe and America\nn02619165\tsmall spiny-finned fish of coastal or brackish waters having a large head and elongated tapering body having the ventral fins modified as a sucker\nn02619550\tfound in tropical coastal regions of Africa and Asia; able to move on land on strong pectoral fins\nn02619861\ttropical fish that resembles a goby and rests quietly on the bottom in shallow water\nn02620167\tpallid bottom-dwelling flat-headed fish with large eyes and a duck-like snout\nn02620578\tany of several small freshwater fishes that catch insects by squirting water at them and knocking them into the water; found in Indonesia and Australia\nn02621258\tbrightly colored coral-reef fish with knifelike spines at the tail\nn02621908\tsnake mackerels; elongated marine fishes with oily flesh; resembles mackerels; found worldwide\nn02622249\tpredatory tropical fishes with jutting jaws and strong teeth\nn02622547\tlarge snake mackerel with rings like spectacles around its eyes\nn02622712\tvery large deep-water snake mackerel\nn02622955\tlong-bodied marine fishes having a long whiplike scaleless body and sharp teeth; closely related to snake mackerel\nn02623445\timportant marine food and game fishes found in all tropical and temperate seas; some are at least partially endothermic and can thrive in colder waters\nn02624167\tany of various fishes of the family Scombridae\nn02624551\timportant food fish of the northern Atlantic and Mediterranean; its body is greenish-blue with dark bars and small if any scales\nn02624807\tmedium-sized mackerel of temperate Atlantic and Gulf of Mexico\nn02624987\tsmall mackerel found nearly worldwide\nn02625258\tlarge fast-moving predacious food and game fish; found worldwide\nn02625612\tany of several large marine food fishes of the genus Scomberomorus\nn02625851\tlarge mackerel with long pointed snout; important food and game fish of the eastern Atlantic coast southward to Brazil\nn02626089\ta large commercially important mackerel of the Atlantic coastal waters of North America\nn02626265\tlarge edible mackerel of temperate United States coastal Atlantic waters\nn02626471\ta Spanish mackerel of western North America\nn02626762\tany very large marine food and game fish of the genus Thunnus; related to mackerel; chiefly of warm waters\nn02627037\tlarge pelagic tuna the source of most canned tuna; reaches 93 pounds and has long pectoral fins; found worldwide in tropical and temperate waters\nn02627292\tlargest tuna; to 1500 pounds; of mostly temperate seas: feed in polar regions but breed in tropics\nn02627532\tmay reach 400 pounds; worldwide in tropics\nn02627835\tany of various scombroid fishes intermediate in size and characteristics between mackerels and tunas\nn02628062\tmedium-sized tuna-like food fish of warm Atlantic and Pacific waters; less valued than tuna\nn02628259\tcommon bonito of Pacific coast of the Americas; its dark oily flesh cans well\nn02628600\toceanic schooling tuna of considerable value in Pacific but less in Atlantic; reaches 75 pounds; very similar to if not the same as oceanic bonito\nn02629230\tfish whose flesh is dried and flaked for Japanese cookery; may be same species as skipjack tuna\nn02629716\tlarge toothless marine food fish with a long swordlike upper jaw; not completely cold-blooded i.e. they are able to warm their brains and eyes: worldwide in warm waters but feed on cold ocean floor coming to surface at night\nn02630281\tlarge pelagic game fish having an elongated upper jaw and long dorsal fin that resembles a sail\nn02630615\ta kind of sailfish\nn02630739\tgiant warm-water game fish having a prolonged and rounded toothless upper jaw\nn02631041\tlarge long-jawed oceanic sport fishes; related to sailfishes and spearfishes; not completely cold-blooded i.e. able to warm their brains and eyes\nn02631330\tlargest marlin; may reach 2000 pounds; found worldwide in warm seas\nn02631475\tlarge game fish in the Pacific Ocean; may reach 1000 pounds\nn02631628\tPacific food and game fish marked with dark blue vertical stripes\nn02631775\tsmall marlin (to 180 pounds) of western Atlantic\nn02632039\tany of several large vigorous pelagic fishes resembling sailfishes but with first dorsal fin much reduced; worldwide but rare\nn02632494\tlarge silvery fish found worldwide in warm seas but nowhere common; resembles a whale and feeds on plankton\nn02633422\tsmall food fish of Atlantic coast\nn02633677\tsmaller than Florida pompano; common in West Indies\nn02633977\tbutterfish up to a foot long of Atlantic waters from Chesapeake Bay to Argentina\nn02634545\tlarger butterfishes of the western Atlantic from the New York area to the northern Gulf of Mexico\nn02635154\tblackish fish of New England waters\nn02635580\tvery small (to 3 inches) flattened marine fish with a sucking disc on the abdomen for clinging to rocks etc.\nn02636170\tlarge food fish of warm waters worldwide having long anal and dorsal fins that with a caudal fin suggest a three-lobed tail\nn02636405\ttripletail found from Cape Cod to northern South America\nn02636550\ttripletail found in the Pacific\nn02636854\tsmall silvery schooling fishes with protrusible mouths found in warm coastal waters\nn02637179\tpopular panfish from Bermuda and Gulf of Mexico to Brazil\nn02637475\tsilvery mojarra found along sandy shores of the western Atlantic\nn02637977\ta small fish of the genus Sillago; excellent food fish\nn02638596\tprimitive fishes having thick bony scales with a shiny covering\nn02639087\tprimitive long-bodied carnivorous freshwater fish with a very long dorsal fin; found in sluggish waters of North America\nn02639605\tprimitive fish of the Mississippi valley having a long paddle-shaped snout\nn02639922\tfish of larger rivers of China similar to the Mississippi paddlefish\nn02640242\tlarge primitive fishes valued for their flesh and roe; widely distributed in the North Temperate Zone\nn02640626\tfood and game fish of marine and fresh waters of northwestern coast of North America\nn02640857\tvaluable source of caviar and isinglass; found in Black and Caspian seas\nn02641379\tprimitive predaceous North American fish covered with hard scales and having long jaws with needlelike teeth\nn02642107\tfishes having the head armored with bony plates\nn02642644\tany of numerous carnivorous usually bottom-dwelling warm-water marine fishes found worldwide but most abundant in the Pacific\nn02643112\tmarine fishes having a tapering body with an armored head and venomous spines\nn02643316\ta kind of scorpionfish\nn02643566\tbrightly striped fish of the tropical Pacific having elongated spiny fins\nn02643836\tvenomous tropical marine fish resembling a piece of rock\nn02644113\tmarine food fish found among rocks along the northern coasts of Europe and America\nn02644360\ta rockfish of the Pacific coastal waters of North America\nn02644501\ta commercially important fish of the Pacific coast of North America\nn02644665\ta large fish of the Pacific coast of North America\nn02644817\tlarge fish of northern Atlantic coasts of America and Europe\nn02645538\tfreshwater sculpin with a large flattened bony-plated head with hornlike spines\nn02645691\tsmall freshwater sculpin of Europe and North America\nn02645953\tlarge sculpin of western Atlantic; inflates itself when caught\nn02646667\tclumsy soft thick-bodied northern Atlantic fish with pelvic fins fused into a sucker; edible roe used for caviar\nn02646892\tany of several very small lumpfishes\nn02648035\tnorthern Atlantic sea poacher\nn02648625\tfood fish of the northern Pacific\nn02648916\tcommon food and sport fish of western coast of North America\nn02649218\tgreenling with whitish body marked with black bands\nn02649546\tfood fish of the Indonesian region of the Pacific; resembles gurnards\nn02650050\tbottom-dwelling coastal fishes with spiny armored heads and fingerlike pectoral fins used for crawling along the sea bottom\nn02650413\ta kind of gurnard\nn02650541\tAmerican gurnard; mostly found in bays and estuaries\nn02651060\tlarge searobin; found from Nova Scotia to Florida\nn02652132\ttropical fish with huge fanlike pectoral fins for underwater gliding; unrelated to searobins\nn02652668\ttropical marine fishes having the teeth fused into a beak and thick skin covered with bony plates or spines\nn02653145\tany of numerous compressed deep-bodied tropical fishes with sandpapery skin and erectile spines in the first dorsal fin\nn02653497\ttropical Atlantic fish\nn02653786\tnarrow flattened warm-water fishes with leathery skin and a long file-like dorsal spine\nn02654112\tany of several brightly colored tropical filefishes\nn02654425\tany of numerous small tropical fishes having body and head encased in bony plates\nn02654745\ttrunkfish having hornlike spines over the eyes\nn02655020\tany of numerous marine fishes whose elongated spiny body can inflate itself with water or air to form a globe; several species contain a potent nerve poison; closely related to spiny puffers\nn02655523\tpuffers having rigid or erectile spines\nn02655848\tspines become erect when the body is inflated; worldwide in warm waters\nn02656032\tsimilar to but smaller than porcupinefish\nn02656301\tany of several fishes having rigid flattened spines\nn02656670\tamong the largest bony fish; pelagic fish having an oval compressed body with high dorsal and anal fins and caudal fin reduced to a rudder-like lobe; worldwide in warm waters\nn02656969\tcaudal fin has a central projection\nn02657368\tany of several families of fishes having flattened bodies that swim along the sea floor on one side of the body with both eyes on the upper side\nn02657694\tany of various European and non-European marine flatfish\nn02658079\tflounders with both eyes on the right side of the head\nn02658531\tlarge European food fish\nn02658811\timportant food fish of Europe\nn02659176\tAmerican flounder having a yellowish tail\nn02659478\timportant American food fish in the winter\nn02659808\tEuropean flatfish highly valued as food\nn02660091\tlarge American food fish\nn02660208\tmarine food fish of the northern Atlantic or northern Pacific; the largest flatfish and one of the largest teleost fishes\nn02660519\tlargest United States flatfish\nn02660640\ta righteye flounder found in the Pacific\nn02661017\tflatfishes with both eyes on the left side of the head\nn02661473\tflounder of southern United States\nn02661618\tflounder of eastern coast of North America\nn02662239\ta lefteye flounder found in coastal waters from New England to Brazil\nn02662397\ta whiff found in waters from the Bahamas and northern Gulf of Mexico to Brazil\nn02662559\tsmall food fishes of the Pacific coast of North America\nn02662825\tvery thin translucent flounder of the Atlantic coast of North America\nn02662993\tEuropean food fish\nn02663211\ta large brownish European flatfish\nn02663485\tleft-eyed marine flatfish whose tail tapers to a point; of little commercial value\nn02663849\tright-eyed flatfish; many are valued as food; most common in warm seas especially European\nn02664285\thighly valued as food\nn02664642\tpopular pale brown food flatfish of the Pacific coast of North America\nn02665250\tuseless as food; in coastal streams from Maine to Texas and Panama\nn02665985\ta fabric woven from goat hair and camel hair\nn02666196\ta calculator that performs arithmetic functions by manually sliding counters on rods or in grooves\nn02666501\ta ship abandoned on the high seas\nn02666624\tthe battery used to heat the filaments of a vacuum tube\nn02666943\ta building where animals are butchered\nn02667093\t(Arabic) a loose black robe from head to toe; traditionally worn by Muslim women\nn02667244\ta condenser having 2 or 3 lenses with wide aperture for use in microscopes\nn02667379\ta monastery ruled by an abbot\nn02667478\ta convent ruled by an abbess\nn02667576\ta church associated with a monastery or convent\nn02667693\ta surveying instrument consisting of a spirit level and a sighting tube; used to measure the angle of inclination of a line from the observer to the target\nn02668393\ta tool or machine used for wearing down or smoothing or polishing\nn02668613\ta primitive stone artifact (usually made of sandstone) used as an abrader\nn02669295\ta masonry support that touches and directly receives thrust or pressure of an arch or bridge\nn02669442\tan arch supported by an abutment\nn02669534\ta costume worn on formal occasions by the faculty or students of a university or college\nn02669723\ta gown worn by academics or judges\nn02670186\ta valve that regulates the supply of fuel to the engine\nn02670382\ta scientific instrument that increases the kinetic energy of charged particles\nn02670683\ta pedal that controls the throttle valve\nn02670935\tan instrument for measuring the acceleration of aircraft or rockets\nn02671780\tclothing that is worn or carried, but not part of your main clothing\nn02672152\ta lens implant containing a hinge that allows for both near and far vision (thus mimicking the natural lens of a young person)\nn02672371\tliving quarters provided for public convenience\nn02672831\ta portable box-shaped free-reed instrument; the reeds are made to vibrate by air from the bellows controlled by the player\nn02675077\ta disk coated with cellulose acetate\nn02675219\ta fabric made from fibers of cellulose acetate\nn02675522\ta compound lens system that forms an image free from chromatic aberration\nn02676097\ta delay line based on the time of propagation of sound waves\nn02676261\ta device for amplifying or transmitting sound\nn02676566\tsound is not amplified by electrical means\nn02676670\ta modem that converts electrical signals to telephone tones and back again\nn02676938\tthe citadel in ancient Greek towns\nn02677028\ta synthetic fabric\nn02677136\tused especially by artists\nn02677436\tan instrument for measuring the intensity of electromagnetic radiation (usually by the photochemical effect)\nn02677718\tthe operating part that transmits power to a mechanism\nn02678010\ta type of LCD screen used for some portable computers; there is a separate circuit for each pixel\nn02678384\ta mechanism that puts something into automatic action\nn02678897\tdevice that enables something to be used in a way different from that for which it was intended or makes different pieces of apparatus compatible\nn02679142\ta machine that adds numbers\nn02679257\ta calculator that performs simple arithmetic functions\nn02679961\ta printer that automatically prints addresses on letters for mailing\nn02680110\tbandage consisting of a medical dressing of plain absorbent gauze held in place by a plastic or fabric tape coated with adhesive\nn02680512\ta nearly horizontal passage from the surface into a mine\nn02680638\ta hotel room that shares a wall with another hotel room\nn02680754\tcan be changed to different settings\nn02681392\tsun-dried brick; used in hot dry climates\nn02682311\tan edge tool used to cut and shape wood\nn02682407\ta harp having strings tuned in unison; they sound when wind passes over them\nn02682569\tan apparatus for exposing something to the air (as sewage)\nn02682811\ta torpedo designed to be launched from an airplane\nn02682922\ta dispenser that holds a substance under pressure and that can release it as a fine spray (usually by means of a propellant gas)\nn02683183\ta trademark for a loosely woven cotton fabric that is used to make shirts and underwear\nn02683323\ta blanket knitted or crocheted in strips or squares; sometimes used as a shawl\nn02683454\ta wig that gives the appearance of an Afro hairdo\nn02683558\ta device injects fuel into a hot exhaust for extra thrust\nn02683791\ta fragrant lotion for a man's face after shaving\nn02684248\tpottery that is veined and mottled to resemble agate\nn02684356\ta device that causes material to gather into rounded balls\nn02684515\tornamental tagged cord or braid on the shoulder of a uniform\nn02684649\tmetal or plastic sheath over the end of a shoelace or ribbon\nn02684962\ta place of assembly for the people in ancient Greece\nn02685082\ta long plume (especially one of egret feathers) worn on a hat or a piece of jewelry in the shape of a plume\nn02685253\tan airfoil that controls lateral motion\nn02685365\ta safety restraint in an automobile; the bag inflates on collision and prevents the driver or passenger from being thrown forward\nn02685701\ta vehicular brake that operates by compressed air; especially for heavy vehicles\nn02685995\tan atomizer to spray paint by means of compressed air\nn02686121\ta subsonic jet airliner operated over short distances\nn02686227\ta compressor that takes in air at atmospheric pressure and delivers it at a higher pressure\nn02686379\ta system that keeps air cool and dry\nn02686568\ta vehicle that can fly\nn02687172\ta large warship that carries planes and has a long flat deck for takeoffs and landings\nn02687423\tthe engine that powers and aircraft\nn02687682\ta mechanical device using confined air to absorb the shock of motion\nn02687821\ta large structure at an airport where aircraft can be stored and maintained\nn02687992\ta place where planes take off and land\nn02688273\ta filter that removes dust from the air that passes through it\nn02688443\ta device that provides reactive force when in motion relative to the surrounding air; can lift or control a plane in flight\nn02689144\tthe framework and covering of an airplane or rocket (excluding the engines)\nn02689274\ta gun that propels a projectile by compressed air\nn02689434\ta hammer driven by compressed air\nn02689748\ta pneumatic horn\nn02689819\ta warm cupboard where you put newly washed clothes until they are completely dry\nn02690373\ta commercial airplane that carries passengers\nn02690715\ta mailer for airmail\nn02691156\tan aircraft that has a fixed wing and is powered by propellers or jets\nn02692086\ta propeller that rotates to push against air\nn02692232\tan airfield equipped with control tower and hangars as well as accommodations for passengers and cargo\nn02692513\ta pump that moves air in or out of something\nn02692680\ta shipboard radar that searches for aircraft\nn02692877\ta steerable self-propelled aircraft\nn02693246\ta terminal that serves air travelers or air freight\nn02693413\ta missile designed to be launched from one airplane at another\nn02693540\ta missile designed to be launched from an airplane at a target on the ground\nn02694045\tpart of a church divided laterally from the nave proper by rows of pillars or columns\nn02694279\t(Arabian Nights) a magical lamp from which Aladdin summoned a genie\nn02694426\ta device that signals the occurrence of some undesirable event\nn02694662\ta clock that wakes a sleeper at some preset time\nn02694966\ta white linen liturgical vestment with sleeves; worn by priests\nn02695627\tany of various Spanish fortresses or palaces built by the Moors\nn02695762\tthermometer consisting of a glass capillary tube marked with degrees Celsius or Fahrenheit and containing alcohol which rises or falls as it expands or contracts with changes in temperature\nn02696165\ta tavern where ale is sold\nn02696246\tan obsolete kind of container used for distillation; two retorts connected by a tube\nn02696569\tdevice for measuring pain caused by pressure\nn02696843\tsurveying instrument used with a plane table for drawing lines of sight on a distant object and for measuring angles\nn02697022\tsurveying instrument consisting of the upper movable part of a theodolite including the telescope and its attachments\nn02697221\twomen's clothing that has a fitted top and a flared skirt that is widest at the hemline\nn02697576\ta screw with a hexagonal hole in the head\nn02697675\ta wrench for Allen screws\nn02697876\ta wrench with a v-shaped jaw and serrations on one side (resembles the open jaws of an alligator)\nn02698244\ta tray for collecting the offering from a congregation\nn02698473\ta thin glossy fabric made of the wool of the Lama pacos, or made of a rayon or cotton imitation of that wool\nn02698634\ta stout staff with a metal point; used by mountain climbers\nn02699494\ta raised structure on which gifts or sacrifices to a god are made\nn02699629\tthe table in Christian churches where communion is given\nn02699770\ta painted or carved screen placed above and behind an altar or communion table\nn02699915\tan instrument that measures the altitude and azimuth of celestial bodies; used in navigation\nn02700064\tan old term for an electric generator that produces alternating current (especially in automobiles)\nn02700258\tan instrument that measures the height above ground; used in navigation\nn02700895\ta violin made by Nicolo Amati or a member of his family\nn02701002\ta vehicle that takes people to and from hospitals\nn02701260\tarea reserved for persons leading the responsive `amens'\nn02701730\ta free-reed instrument in which air is drawn in through reeds by suction bellows\nn02702989\ta meter that measures the flow of electrical current in amperes\nn02703124\tan atomic clock based on vibrational frequency of the nitrogen atom in the ammonia molecule\nn02703275\tprojectiles to be fired from a gun\nn02704645\tan airplane designed to take off and land on water\nn02704792\ta flat-bottomed motor vehicle that can travel on land or water\nn02704949\tan oval large stadium with tiers of seats; an arena in which contests and spectacles are held\nn02705201\ta sloping gallery with seats for spectators (as in an operating room or theater)\nn02705429\tan ancient jar with two handles and a narrow neck; used to hold oil or wine\nn02705944\telectronic equipment that increases strength of signals passing through it\nn02706221\ta flask that has two handles; used by Romans for wines or oils\nn02706806\tan arcade featuring coin-operated game machines\nn02708093\ta clock that displays the time of day by the position of hands on a dial\nn02708224\ta computer that represents information by variable quantities (e.g., positions or voltages)\nn02708433\ta watch that represents time by the position of hands on a dial\nn02708555\ta beam balance of great precision used in quantitative chemical analysis\nn02708711\tan instrument that performs analyses\nn02708885\ta distorted projection or perspective; especially an image distorted in such a way that it becomes visible only when viewed in a special manner\nn02709101\tcompound lens or lens system designed to be free of astigmatism and able to form approximately point images\nn02709367\ta mechanical device that prevents a vessel from moving\nn02709637\tthe chain or rope that attaches an anchor to a vessel\nn02709763\ta light in the rigging of a ship that is riding at anchor\nn02709908\ta circuit in a computer that fires only when all of its inputs fire\nn02710044\tmetal supports for logs in a fireplace\nn02710201\tan automaton that resembles a human being\nn02710324\ta chamber having very little reverberation\nn02710429\ta gauge for recording the speed and direction of wind\nn02710600\ta barometer that measures pressure without using fluids\nn02711237\ta series of X rays representing the action of the heart and its blood vessels after the injection of a radiopaque substance\nn02711780\ta modified microscope used to study capillary vessels\nn02712545\tan L-shaped metal bracket\nn02712643\ta bulldozer with an angled moldboard to push earth to one side\nn02713003\ta brace worn to strengthen the ankle\nn02713218\ta sock that reaches just above the ankle\nn02713364\ta shoe for a child or woman that has a strap around the ankle\nn02713496\tan elephant goad with a sharp spike and a hook\nn02714315\ta positively charged electrode by which electrons leave an electrical device\nn02714535\tthe negatively charged terminal of a voltaic cell or storage battery that supplies current\nn02714751\tan electronic device that answers the telephone and records messages\nn02715229\tan electrical device that sends or receives radio or television signals\nn02715513\ta large entrance or reception room or area\nn02715712\tartillery designed to shoot upward at airplanes\nn02716626\ta defensive missile designed to shoot down incoming intercontinental ballistic missiles\nn02720048\ta paint used to protect against the accumulation of barnacles etc. on underwater surfaces\nn02720576\tworn by fliers and astronauts to counteract the forces of gravity and acceleration\nn02721813\ta piece of ornamented cloth that protects the back of a chair from hair oils\nn02723165\tan astringent substance applied to the skin to reduce perspiration\nn02724722\ta shipboard system to fire rockets at submarines\nn02725872\ta heavy block of iron or steel on which hot metals are shaped by hammering\nn02726017\tthe traditional dress of Vietnamese women consisting of a tunic with long sleeves and panels front and back; the tunic is worn over trousers\nn02726210\tthe great hall in ancient Persian palaces\nn02726305\ta suite of rooms usually on one floor of an apartment house\nn02726681\ta building that is divided into apartments\nn02727016\tan man-made opening; usually small\nn02727141\ta device that controls amount of light admitted\nn02727426\ta shed containing a number of beehives\nn02727825\tequipment designed to serve a specific function\nn02728440\tclothing in general\nn02729222\ta handcart from which apples and other fruit are sold in the street\nn02729837\tdurable goods for home or office use\nn02729965\ta device or control that is very useful for a particular job\nn02730265\ta device for applying a substance\nn02730568\t(usually plural) furnishings and equipment (especially for a ship or hotel)\nn02730930\ta garment of cloth or leather or plastic that is tied about the waist and worn to protect your clothing\nn02731251\t(usually used in the plural) a cord used to tie an apron at the waist\nn02731398\ta domed or vaulted recess or projection on a building especially the east end of a church; usually contains the altar\nn02731629\ta device (trade name Aqua-Lung) that lets divers breathe under water; scuba is an acronym for self-contained underwater breathing apparatus\nn02731900\ta board that is pulled by a speedboat as a person stands on it and skims over the top of the water\nn02732072\ta tank or pool or bowl filled with water for keeping live fish and underwater animals\nn02732572\tan ornament that interlaces simulated foliage in an intricate design\nn02732827\ta framework that supports climbing plants\nn02733213\ta structure composed of a series of arches supported by columns\nn02733524\t(architecture) a masonry construction (usually curved) for spanning an opening and supporting the weight above it\nn02734725\tan architectural product or work\nn02734835\tthe lowest part of an entablature; rests immediately on the capitals of the columns\nn02735268\ta support for the arch of the foot\nn02735361\ta lamp that produces light when electric current flows across the gap between two electrodes\nn02735538\ta waterproof overshoe that protects shoes from water or snow\nn02735688\ta part of a structure having some specific characteristic or function\nn02736396\ta passageway between buildings or giving access to a basement\nn02736798\ta sock knitted or woven with an argyle design (usually used in the plural)\nn02737351\ta boat built by Noah to save his family and animals from the flood\nn02737660\tthe part of an armchair or sofa that supports the elbow and forearm of a seated person\nn02738031\tweaponry used by military or naval force\nn02738271\tcoil in which voltage is induced by motion through a magnetic field\nn02738449\ta band worn around the upper arm\nn02738535\tchair with a support on each side for arms\nn02738741\ta medieval helmet with a visor and a neck guard\nn02738859\ta pad worn by football players and hockey goalkeepers\nn02738978\ta hole through which you put your arm and where a sleeve can be attached\nn02739123\t(archeology) a bracelet worn around the wrist or arm\nn02739427\ta band worn around the arm for decoration\nn02739550\ta large wardrobe or cabinet; originally used for storing weapons\nn02739668\tprotective covering made of metal and used in combat\nn02739889\ta military combat vehicle on wheels with light armor (and usually a machine gun)\nn02740061\tan armor-plated truck with strong doors and locks used to transport money or valuables\nn02740300\t(military) an armored vehicle (usually equipped with caterpillar treads) that is used to transport infantry\nn02740533\ta vehicle that is protected by armor plate\nn02740764\tspecially hardened steel plate used to protect fortifications or vehicles from enemy fire\nn02741367\ta place where arms are manufactured\nn02741475\ta support for the arm\nn02742070\tan obsolete firearm with a long barrel\nn02742194\tan arrangement of aerials spaced to give desired directional characteristics\nn02742322\tespecially fine or decorative clothing\nn02742468\ta restraint that slows airplanes as they land on the flight deck of an aircraft carrier\nn02742753\ta projectile with a straight thin shaft and an arrowhead on one end and stabilizing vanes on the other; intended to be shot from a bow\nn02743426\tall the weapons and equipment that a country has\nn02744323\ta major or main route\nn02744844\tan X ray of a joint after the injection of a contrast medium\nn02744961\ta type of endoscope that is inserted into a joint for visual examination\nn02745492\ta pump that replaces the natural heart\nn02745611\ta navigational instrument based on a gyroscope; it artificially provides a simulated horizon for the pilot\nn02745816\ta metal or plastic part that is surgically implanted to replace a natural joint (possibly elbow or wrist but usually hip or knee)\nn02746008\ta machine that uses dialysis to remove impurities and waste products from the bloodstream before returning the blood to the patient's body\nn02746225\ta synthetic covering with two layers used experimentally to treat burn victims\nn02746365\tlarge but transportable armament\nn02746595\ta shell fired by artillery\nn02746683\ta factory loft that has been converted into an artist's workroom and living area\nn02746978\ta school specializing in art\nn02747063\ta cravat with wide square ends; secured with an ornamental pin\nn02747177\ta bin that holds rubbish until it is collected\nn02747672\ta receptacle fitted beneath the grate in which ashes collect and are removed\nn02747802\ta receptacle for the ash from smokers' cigars or cigarettes\nn02748183\ta short-handled device with a globe containing a sponge; used for sprinkling holy water\nn02748359\tthe basin or other vessel that holds holy water in Roman Catholic Churches\nn02748491\ta pump that draws air or another gas through a liquid\nn02749169\ta powdered form of aspirin\nn02749292\tan armored vehicle with the chassis of a tank (but no turret) and a large gun; used as an antitank weapon and to support infantry\nn02749479\tany of the automatic rifles or semiautomatic rifles with large magazines designed for military use\nn02749670\tthe slender spear of the Bantu-speaking people of Africa\nn02749790\ta group of machine parts that fit together to form a self-contained unit\nn02749953\ta unit consisting of components that have been fitted together\nn02750070\ta hall where many people can congregate\nn02750169\ta factory where manufactured parts are assembled into a finished product\nn02750320\tan arrangement of coils used in sensitive electrical instruments; the coils are arranged to give zero resultant external magnetic field when a current passes through them and to have zero electromotive force induced in them by an external magnetic field\nn02750652\thas a moving magnet and astatic coils arranged to cancel the effect of the Earth's magnetic field\nn02751067\ta transparent dome on top of an airplane where the navigator can make celestial observations\nn02751215\tan early form of sextant\nn02751295\tany telescope designed to collect and record electromagnetic radiation from cosmic sources\nn02751490\ta satellite equipped with a telescope to observe infrared radiation\nn02752199\ta place where reading materials are available\nn02752496\ta sock worn for athletic events\nn02752615\ta support for the genitals worn by men engaging in strenuous exercise\nn02752810\ta figure of a man used as a supporting column\nn02752917\tan instrument that measures rate of evaporation of water\nn02753044\ta nuclear weapon in which enormous energy is released by nuclear fission (splitting the nuclei of a heavy element like uranium 235 or plutonium 239)\nn02753394\ta timepiece that derives its time scale from the vibration of atoms or molecules\nn02753710\ta nuclear reactor that uses controlled nuclear fission to generate energy\nn02754103\ta dispenser that turns a liquid (such as perfume) into a fine mist\nn02754656\tthe central area in a building; open to the sky\nn02755140\ta shallow and rectangular briefcase\nn02755352\ta connection that fastens things together\nn02755529\ta military submarine designed and armed to attack enemy shipping\nn02755675\tan electrical device for attenuating the strength of an electrical signal\nn02755823\t(architecture) a low wall at the top of the entablature; hides the roof\nn02755984\ta fan that blows heated air out of the attic of a building\nn02756098\tclothing of a distinctive style or for a particular occasion\nn02756854\tan amplifier that increases the amplitude of reproduced sound\nn02756977\ta cassette for audiotape\nn02757061\tcompact discs used to reproduce sound (voice and music)\nn02757337\tan instrument used to measure the sensitivity of hearing\nn02757462\ta system of electronic equipment for recording or reproducing sound\nn02757714\tmagnetic tape for use in recording sound\nn02757810\ta tape recording of sound\nn02757927\tmaterials using sight or sound to present information\nn02758134\tthe area of a theater or concert hall where the audience sits\nn02758490\thand tool for boring holes\nn02758863\tan expressway in a German-speaking country\nn02758960\ta device for heating substances above their boiling point; used to manufacture chemicals or to sterilize surgical instruments\nn02759257\tan optical device for focussing a camera or other instrument automatically\nn02759387\tan aircraft that is supported in flight by unpowered rotating horizontal wings (or blades); forward propulsion is provided by a conventional propeller\nn02759700\ta hypodermic syringe to use in injecting yourself with a liquid\nn02759963\ta firearm that reloads itself\nn02760099\ta cafeteria where food is served from machines\nn02760199\ta vending machine from which you can get food\nn02760298\ta choke that automatically controls the flow of air to the carburetor\nn02760429\ta firearm that reloads itself and keeps firing until the trigger is released\nn02760658\ta pistol that will keep firing until the ammunition is gone or the trigger is released\nn02760855\tlight machine gun\nn02761034\ta transmission that automatically changes the gears according to the speed of the car\nn02761206\tequipment used to achieve automatic control or operation\nn02761392\ta mechanism that can move automatically\nn02761557\tthe engine that propels an automobile\nn02761696\ta factory where automobiles are manufactured\nn02761834\ta device on an automobile for making a warning noise\nn02762169\ta navigational device that automatically keeps ships or planes or spacecraft on a steady course\nn02762371\ta radiogram produced by radiation emitted by the specimen being photographed\nn02762508\tan expressway in an Italian-speaking country\nn02762725\t(nautical) an extra boiler (as a ship's boiler that is used while the ship is in port)\nn02762909\t(nautical) a small engine (as one used on board ships to operate a windlass)\nn02763083\ta supplementary pump available if needed\nn02763198\ta submarine for research purposes\nn02763306\ta data storage device that is not the main memory of a computer\nn02763604\ta building where birds are kept\nn02763714\ta pointed tool for marking surfaces or for punching small holes\nn02763901\ta canopy made of canvas to shelter people or things from rain or sun\nn02764044\tan edge tool with a heavy bladed head mounted across a handle\nn02764398\tthe handle of an ax\nn02764505\tthe cutting head of an ax\nn02764614\tthe center around which something rotates\nn02764779\ta shaft on which a wheel rotates\nn02764935\tan iron bar that serves as an axletree\nn02765028\ta dead axle on a carriage or wagon that has terminal spindles on which the wheels revolve\nn02766168\ta woman's headscarf folded into a triangle and tied under the chin; worn by Russian peasant women\nn02766320\ta small bed for babies; enclosed by sides to prevent the baby from falling\nn02766534\ta small vehicle with four wheels in which a baby or child is pushed around\nn02766792\ta small grand piano\nn02767038\tpowder used to prevent a baby's diaper from chafing\nn02767147\ta shoe designed to be worn by infants\nn02767433\ta support that you can lean against while sitting\nn02767665\tthe part of a garment that covers the back of your body\nn02767956\tany of the seats occupied by backbenchers in the House of Commons\nn02768114\ta board used to support the back of someone or something\nn02768226\ta raised vertical board with basket attached; used to play basketball\nn02768433\tthe part of a network that connects other networks together\nn02768655\ta brace worn to support the back\nn02768973\tthe board on which backgammon is played\nn02769075\t(computer science) the area of the screen in graphical user interfaces against which icons and windows appear\nn02769290\tan excavator whose shovel bucket is attached to a hinged boom and is drawn backward to move earth\nn02769669\tlighting from behind\nn02769748\ta bag carried by a strap on your back or shoulder\nn02769963\ta tent that can be carried in a backpack\nn02770078\tplate armor protecting the back; worn as part of a cuirass\nn02770211\ta porch for the back door\nn02770585\ta handsaw that is stiffened by metal reinforcement along the upper edge\nn02770721\ta long-handled scratcher for scratching your back\nn02770830\ta seat at the back of a vehicle (especially the seat at the back of an automobile)\nn02771004\tthe typewriter key used for back spacing\nn02771166\ta second staircase at the rear of a building\nn02771286\ta stay that supports the back of something\nn02771547\t(baseball) a fence or screen (as behind home plate) to prevent the ball from traveling out of the playing field\nn02771750\ta sword with only one cutting edge\nn02772101\ta computer system for making backups\nn02772435\tthe court on which badminton is played\nn02772554\tequipment for playing the game of badminton\nn02772700\ta light long-handled racket used by badminton players\nn02773037\ta flexible container with a single opening\nn02773838\ta portable rectangular container for carrying clothes\nn02774152\ta container used for carrying money and small personal items or accessories (especially by women)\nn02774630\tcases used to carry belongings when traveling\nn02774921\tthe portable equipment and supplies of an army\nn02775039\ta railway car where passengers' bags are carried\nn02775178\tan area in an airport where arriving passengers can collect the luggage that has been carried in the hold of the aircraft\nn02775483\ta tubular wind instrument; the player blows air into a bag and squeezes it out through the drone\nn02775689\tthe outer defensive wall that surrounds the outer courtyard of a castle\nn02775813\tthe outer courtyard of a castle\nn02775897\ta temporary bridge designed for rapid construction\nn02776007\ta large pan that is filled with hot water; smaller pans containing food can be set in the larger pan to keep food warm or to cook food slowly\nn02776205\tsomething used to lure fish or other animals into danger so they can be trapped or killed\nn02776505\ta bright green fabric napped to resemble felt; used to cover gaming tables\nn02776631\ta workplace where baked goods (breads and cakes and pastries) are produced or sold\nn02776825\ta cap that is close-fitting and woolen and covers all of the head but the face\nn02776978\ta stringed instrument that has a triangular body and three strings\nn02777100\ta scale for weighing; depends on pull of gravity\nn02777292\ta gymnastic apparatus used by women gymnasts\nn02777402\ta wheel that regulates the rate of movement in a machine; especially a wheel oscillating against the hairspring of a timepiece to regulate its beat\nn02777638\ta cotton knit fabric used for underwear\nn02777734\ta platform projecting from the wall of a building and surrounded by a balustrade or railing or parapet\nn02777927\tan upper floor projecting from the rear over the main floor in an auditorium\nn02778131\tornamented canopy supported by columns or suspended from a roof or projected from a wall (as over an altar)\nn02778294\ta wide (ornamented) belt worn over the right shoulder to support a sword or bugle by the left hip\nn02778456\ta large bundle bound for storage or transport\nn02778588\twire used to make bales\nn02778669\tround object that is hit or thrown or kicked in games\nn02779435\ta spherical object used as a plaything\nn02779609\theavy iron ball attached to a prisoner by a chain\nn02779719\ta joint that can rotate within a socket\nn02779971\tan electrical device for starting and regulating fluorescent and discharge lamps\nn02780315\tbearings containing small metal balls\nn02780445\ta general purpose cartridge having a primer and a ball and a full charge of powder\nn02780588\tfloating ball that controls level in a water tank\nn02780704\ta suit or dress for formal occasions\nn02780815\tvery short skirt worn by ballerinas\nn02781121\tthe most formal gown; worn to a ball\nn02781213\ta moving-coil galvanometer that measures electric charge\nn02781338\ta missile that is guided in the first part of its flight but falls freely as it approaches target\nn02781517\ta physical pendulum consisting of a large mass suspended from a rod; when it is struck by a projectile its displacement is used to measure the projectile's velocity\nn02781764\ta medical instrument that measures the mechanical force of cardiac contractions and the amount of blood passing through the heart during a specified period by measuring the recoil of the body as blood is pumped from the ventricles\nn02782093\tlarge tough nonrigid bag filled with gas or heated air\nn02782432\ta bomb carried by a balloon\nn02782602\tany light loose sail\nn02782681\ta box where voters deposit their ballots\nn02782778\ta facility in which ball games are played (especially baseball games)\nn02783035\ta hammer with one round and one flat end; used in working metal\nn02783161\ta pen that has a small metal ball as the point of transfer of ink to paper\nn02783324\tlarge room used mainly for dancing\nn02783459\tany valve that checks flow by the seating of a ball\nn02783900\ta light raft made of balsa\nn02783994\tone of a number of closely spaced supports for a railing\nn02784124\ta ship designed to transport bananas\nn02784998\ta restraint put around something to hold it together\nn02785648\ta piece of soft material that covers and protects an injured part of the body\nn02786058\ttrade name for an adhesive bandage to cover small cuts or blisters\nn02786198\tlarge and brightly colored handkerchief; often used as a neckerchief\nn02786331\ta light cylindrical box for holding light articles of attire (especially hats)\nn02786463\ta decorated dart that is implanted in the neck or shoulders of the bull during a bull fight\nn02786611\ta broad cartridge belt worn over the shoulder by soldiers\nn02786736\ta type of concertina popular in South America\nn02786837\tan endless saw consisting of a toothed metal band that is driven around two wheels\nn02787120\ta large ornate wagon for carrying a musical band\nn02787269\ta metal pipe filled with explosive, used to detonate land mines or to clear a path through barbed wire\nn02787435\tcheap showy jewelry or ornament on clothing\nn02787622\ta stringed instrument of the guitar family that has long neck and circular body\nn02788021\tlong strip of cloth or paper used for decoration or advertising\nn02788148\ta railing at the side of a staircase or balcony to prevent people from falling\nn02788386\tan upholstered bench\nn02788462\ta loose fitting jacket; originally worn in India\nn02788572\tbowl for baptismal water\nn02788689\ta rigid piece of metal or wood; usually used as a fastening or obstruction or weapon\nn02789487\ta counter where you can obtain food or drink\nn02790669\ta rack to hold meat for cooking over hot charcoal usually out of doors\nn02790823\tstrong wire with barbs at regular intervals used to prevent passage\nn02790996\ta bar to which heavy discs are attached at each end; used in weightlifting\nn02791124\ta large fixed adjustable chair in which barbers seat their customers\nn02791270\ta shop where men can get their hair cut\nn02791532\ta gun carriage elevated so that the gun can be fired over the parapet\nn02791665\ta tower that is part of a defensive structure (such as a castle)\nn02791795\ta bit for horses that is a solid bar of metal\nn02792409\ta vessel (such as a yacht) that can be chartered without a captain or crew or provisions\nn02792552\ta flatbottom boat for carrying heavy loads (especially on canals)\nn02792948\ta long pole used to propel or guide a barge\nn02793089\tthe second lowest brass wind instrument\nn02793199\ta sailing ship with 3 (or more) masts\nn02793296\ta magnet in the form of a bar with magnetic poles at each end\nn02793414\ta catcher's mask with bars\nn02793495\tan outlying farm building for storing grain or animal feed and housing farm animals\nn02793684\tan opaque adjustable flap on a lamp fixture; used in photography to cut off light from particular areas\nn02793842\tthe large sliding door of a barn\nn02793930\ta yard adjoining a barn\nn02794008\ta recording barometer; automatically records on paper the variations in atmospheric pressure\nn02794156\tan instrument that measures atmospheric pressure\nn02794368\ta knife resembling a cleaver; used in the Philippines\nn02794474\ta horse-drawn carriage having four wheels; has an outside seat for the driver and facing inside seats for two couples and a folding top\nn02794664\tan impact printer that uses a bar to carry the type slugs\nn02794779\ta building or group of buildings used to house military personnel\nn02794972\tan elongated tethered balloon or blimp with cables or net suspended from it to deter enemy planes that are flying low\nn02795169\ta cylindrical container that holds liquids\nn02795528\ta tube through which a bullet travels when a gun is fired\nn02795670\ta cheap drinking and dancing establishment\nn02795783\ta knot used for tying fishing leaders together; the ends of the two leaders are wrapped around each other two or three times\nn02795978\ta musical instrument that makes music by rotation of a cylinder studded with pegs\nn02796207\tthe simplest form of vault; a single continuous arch\nn02796318\ta pin for holding women's hair in place\nn02796412\ta barrier (usually thrown up hastily) to impede the advance of an enemy\nn02796623\ta structure or object that impedes free movement\nn02796995\ta room or establishment where alcoholic drinks are served over a counter\nn02797295\ta cart for carrying small loads; has handles and one or more wheels\nn02797535\ta structure or device in which one end is counterbalanced by the other (on the principle of the seesaw)\nn02797692\ta support or foundation\nn02797881\ta place that the runner must touch before scoring\nn02799071\ta ball used in playing baseball\nn02799175\tan implement used in baseball by the batter\nn02799323\ta cap with a bill\nn02799897\tequipment used in playing baseball\nn02800213\tthe handwear used by fielders in playing baseball\nn02800497\tthe lowermost portion of a structure partly or wholly below ground level; often used for storage\nn02800675\tthe ground floor facade or interior in Renaissance architecture\nn02800940\ta shipboard missile system\nn02801047\ta Roman building used for public administration\nn02801184\tan early Christian church designed like a Roman basilica; or a Roman Catholic church or cathedral accorded certain privileges\nn02801450\tancient brass cannon\nn02801525\ta bowl-shaped vessel; usually used for holding food or liquids\nn02801823\ta medieval steel helmet\nn02801938\ta container that is usually woven and has handles\nn02802215\thorizontal circular metal hoop supporting a net through which players try to throw the basketball\nn02802426\tan inflated ball used in playing basketball\nn02802544\tthe court on which basketball is played\nn02802721\tsports equipment used in playing basketball\nn02802990\ta cloth woven of two or more threads interlaced to suggest the weave of a basket\nn02803349\tthe member with the lowest range of a family of musical instruments\nn02803539\ta large clarinet whose range is an octave below the B-flat clarinet\nn02803666\ta large drum with two heads; makes a sound of indefinite but very low pitch\nn02803809\ta tenor clarinet; pitched in the key of F below the B-flat clarinet\nn02803934\tlargest and lowest member of the violin family\nn02804123\tthe guitar with six strings that has the lowest pitch\nn02804252\tthe lowest brass wind instrument\nn02804414\ta basket (usually hooded) used as a baby's bed\nn02804515\ta perambulator that resembles a bassinet\nn02804610\ta double-reed instrument; the tenor of the oboe family\nn02805283\ta tube with a rubber bulb used to take up and release melted fat or gravy in order to moisten roasting meat\nn02805845\ta cudgel used to give someone a beating on the soles of the feet\nn02805983\tprojecting part of a rampart or other fortification\nn02806088\ta stronghold into which people could go for shelter during a battle\nn02806379\ta club used for hitting a ball in various games\nn02806530\ta vessel containing liquid in which something is immersed (as to process it or to maintain it at a constant temperature or to lubricate it)\nn02806762\ta wheelchair usually pushed by an attendant, as at a spa\nn02806875\ta building containing public baths\nn02806992\ta building containing dressing rooms for bathers\nn02807133\ta tight-fitting cap that keeps hair dry while swimming\nn02807523\ta scented oil added to your bath water\nn02807616\ta loose-fitting robe of towelling; worn after a bath or swim\nn02807731\ta room (as in a residence) containing a bathtub or shower and usually a washbasin and toilet\nn02808185\ta preparation that softens or scents a bath\nn02808304\ta large towel; to dry yourself after a bath\nn02808440\ta relatively large open container that you fill with water and use to wash the body\nn02808829\tnavigable deep diving vessel for underwater exploration\nn02808968\tspherical deep diving apparatus (lowered by a cable) for underwater exploration\nn02809105\ta dyed fabric; a removable wax is used where the dye is not wanted\nn02809241\ta thin plain-weave cotton or linen fabric; used for shirts or dresses\nn02809364\ta thin tapered rod used by a conductor to lead an orchestra or choir\nn02809491\ta hollow cylinder passed from runner to runner in a relay race\nn02809605\ta hollow metal rod that is wielded or twirled by a drum major or drum majorette\nn02809736\ta short staff carried by some officials to symbolize an office or an authority\nn02810139\ta ram used to break down doors of fortified buildings\nn02810270\tan area on a baseball diamond (on either side of home plate) marked by lines within which the batter must stand when at bat\nn02810471\ta device that produces electricity; may have several primary or secondary cells arranged in parallel or series\nn02810782\ta series of stamps operated in one mortar for crushing ores\nn02811059\ta movable screen placed behind home base to catch balls during batting practice\nn02811204\ta glove worn by batters in baseball to give a firmer grip on the bat\nn02811350\ta helmet worn by the batter in baseball\nn02811468\ta broadax used as a weapon\nn02811618\ta cruiser of maximum speed and firepower\nn02811719\ta military uniform designed for field service\nn02811936\ta rampart built around the top of a castle with regular gaps for firing arrows or guns\nn02812201\tlarge and heavily armoured warship\nn02812342\tan arrangement of sights that makes possible the rapid aiming of a firearm at short ranges\nn02812631\ta compartment in an aircraft used for some specific purpose\nn02812785\ta compartment on a ship between decks; often used as a hospital\nn02812949\ta knife that can be fixed to the end of a rifle and used as a weapon\nn02813252\tan aromatic liquid originally obtained by distilling the leaves of the bayberry tree with rum\nn02813399\ta window that sticks out from the outside wall of a house\nn02813544\ta shop where a variety of goods are sold\nn02813645\ta street of small shops (especially in Orient)\nn02813752\ta portable rocket launcher used by infantrymen as an antitank weapon\nn02813981\tbattery for supplying a constant positive voltage to the plate of a vacuum tube\nn02814116\tan air gun in which BBs are propelled by compressed air\nn02814338\ta house built on or near a beach\nn02814428\tvery large towel to dry yourself after swimming\nn02814533\ta car that has a long body and rear door with space behind rear seat\nn02814774\tclothing to be worn at a beach\nn02814860\ta tower with a light that gives warning of shoals to passing ships\nn02815478\ta plane with a concave blade for making moulding with beadwork\nn02815749\ta cup (usually without a handle)\nn02815834\ta flatbottomed jar made of glass or plastic; used for chemistry\nn02815950\tlong thick piece of wood or metal or concrete, etc., used in construction\nn02816494\ta balance consisting of a lever with two equal arms and a pan suspended from each arm\nn02816656\ta small cloth bag filled with dried beans; thrown in games\nn02816768\ta small skullcap; formerly worn by schoolboys and college freshmen\nn02817031\ta rotating support placed between moving parts to allow them to move easily\nn02817251\ta rein designed to keep the horse's head in the desired position\nn02817386\tany wall supporting a floor or the roof of a building\nn02817516\ttall hat; worn by some British soldiers on ceremonial occasions\nn02817650\tan implement for beating\nn02817799\ta musical instrument that sounds by means of a vibrating reed\nn02818135\ta hat made with the fur of a beaver (or similar material)\nn02818254\ta movable piece of armor on a medieval helmet used to protect the lower face\nn02818687\ta mercury thermometer that measures small differences or changes in temperature\nn02818832\ta piece of furniture that provides a place to sleep\nn02819697\ta foundation of earth or rock supporting a road or railroad track\nn02820085\tan overnight boardinghouse with breakfast\nn02820210\tcoverings that are used on a bed\nn02820556\ta heavy corded fabric similar to corduroy; used for clothing\nn02820675\ta lightweight jacket worn over bedclothes (as when sitting in bed)\nn02821202\ta shallow vessel used by a bedridden patient for defecation and urination\nn02821415\tany of 4 vertical supports at the corners of a bedstead\nn02821543\tbedding rolled up for carrying\nn02821627\ta room used primarily for sleeping\nn02821943\tfurniture intended for use in a bedroom\nn02822064\ta furnished sitting room with sleeping accommodations (and some plumbing)\nn02822220\tdecorative cover for a bed\nn02822399\t(usually plural) one of the springs holding up the mattress of a bed\nn02822579\tthe framework of a bed\nn02822762\ta photograph of a muscular man in minimal attire\nn02822865\ta man-made receptacle that houses a swarm of bees\nn02823124\tan electronic device that generates a series of beeps when the person carrying it is being paged\nn02823335\ta barrel that holds beer\nn02823428\ta bottle that holds beer\nn02823510\ta can that holds beer\nn02823586\ttavern with an outdoor area (usually resembling a garden) where beer and other alcoholic drinks are served\nn02823750\ta relatively large glass for serving beer\nn02823848\ta hall or barroom featuring beer and (usually) entertainment\nn02823964\ta drip mat placed under a glass of beer\nn02824058\ta mug intended for serving beer\nn02824152\ta wood or metal bar to which a rope can be secured (as on a ship or in mountain climbing)\nn02824319\ta room (often at the top of a tower) where bells are hung\nn02824448\ta hollow device made of metal that makes a ringing sound when struck\nn02825153\ta round arch resting on corbels\nn02825240\ta stoneware drinking jug with a long neck; decorated with a caricature of Cardinal Bellarmine (17th century)\nn02825442\ttrousers with legs that flare; worn by sailors; absurdly wide hems were fashionable in the 1960s\nn02825657\ta small shelter for bells; has a gable or shed roof\nn02825872\ta foundry where bells are cast\nn02825961\tan extension of a gable that serves as a bell cote\nn02826068\ta bell-shaped glass cover used to protect and display delicate objects or to cover scientific apparatus or to contain gases\nn02826259\ta mechanical device that blows a strong current of air; used to make a fire burn more fiercely or to sound a musical instrument\nn02826459\ta handle or cord that is pulled to ring a doorbell or a servant's bell etc.\nn02826589\ta button that is pushed to ring a bell\nn02826683\ta seat that has a bell shape (on some 18th century chairs)\nn02826812\ta bell-shaped tent\nn02826886\ta tower that supports or shelters a bell\nn02827148\ta cloth band that is worn around the waist (as on infants until the navel has healed)\nn02827606\ta band to tie or buckle around the body (usually at the waist)\nn02828115\tammunition (usually of small caliber) loaded in flexible linked strips for use in a machine gun\nn02828299\tthe buckle used to fasten a belt\nn02828427\tthe material of which belts are made\nn02828884\ta long seat for more than one person\nn02829246\ta clamp used to hold work in place on a workbench\nn02829353\tany of various stops on a workbench against which work can be pushed (as while chiseling or planing)\nn02829510\tlathe mounted on a workbench\nn02829596\ta small punch press mounted on a workbench\nn02830157\ta tool for bending\nn02831237\ta cap with no brim or bill; made of soft cloth\nn02831335\ta limousine with a glass partition between the front and back seats\nn02831595\tshort pants that end at the knee\nn02831724\ta bed on a ship or train; usually in tiers\nn02831894\ta broom made of twigs tied together on a long handle\nn02831998\ta refractory-lined furnace used to convert pig iron into steel by the Bessemer process\nn02833040\ta house of worship (especially one for sailors)\nn02833140\ta licensed bookmaker's shop that is not at the race track\nn02833275\ta cyclotron that accelerates protons up to several billion electron volts\nn02833403\ta hand tool consisting of two rules that are hinged together so you can draw or measure angles of any size\nn02833793\tgears that mesh at an angle\nn02834027\tthe ordinary clarinet with a middle range\nn02834397\ttop part of an apron; covering the chest\nn02834506\tan attractive outfit\nn02834642\ta cocked hat with the brim turned up to form two points\nn02834778\ta wheeled vehicle that has two wheels and is moved by foot pedals\nn02835271\ta bicycle with two sets of pedals and two seats\nn02835412\ta chain that transmits the power from the pedals to the rear wheel of a bicycle\nn02835551\ta clip worn around a cyclist's ankles that keeps trousers from becoming caught in the bicycle chain\nn02835724\ta small pump that fills bicycle tires with air\nn02835829\ta rack for parking bicycles\nn02835915\ta seat for the rider of a bicycle\nn02836035\tthe wheel of a bicycle\nn02836174\ta basin for washing genitals and anal area\nn02836268\ta stand to support a corpse or a coffin prior to burial\nn02836392\ta coffin along with its stand\nn02836513\tan interior door\nn02836607\teyeglasses having two focal lengths, one for near vision and the other for far vision\nn02836900\ta reliable and deadly 15,000-pound fragmentation bomb that explodes just above ground with a large radius; the largest conventional bomb in existence; used in Afghanistan\nn02837134\tthe large display board at the New York Stock Exchange that reports on stocks traded on the exchange\nn02837567\tthe middle part of a slack rope (as distinguished from its ends)\nn02837789\ta woman's very brief bathing suit\nn02837887\tsmall and tight-fitting underpants; worn by women\nn02838014\twhere the sides of the vessel curve in to form the bottom\nn02838178\teither of two lengthwise fins attached along the outside of a ship's bilge; reduces rolling\nn02838345\ta pump to remove bilgewater\nn02838577\t(nautical) a well where seepage drains to be pumped away\nn02838728\ta brim that projects to the front to shade the eyes\nn02838958\ta long-handled saw with a curved blade\nn02839110\tlarge outdoor signboard\nn02839351\tball used in playing billiards\nn02839592\ta room in which billiards is played\nn02839910\ta container; usually has a lid\nn02840134\tsomething used to tie or bind\nn02840245\tholds loose papers or magazines\nn02840515\ta workshop where books are bound\nn02840619\tthe protective covering on the front, back, and spine of a book\nn02841063\ta plastic bag used to line a trash or garbage bin\nn02841187\ta nonmagnetic housing for a ship's compass (usually in front of the helm)\nn02841315\tan optical instrument designed for simultaneous use by both eyes\nn02841506\ta light microscope adapted to the use of both eyes\nn02841641\ta microchip that uses tiny strands of DNA to latch onto and quickly recognize thousands of genes at a time; intended for use in a biological environment\nn02841847\ta loose one-piece garment worn to protect the wearer against dangerous biological or chemical agents\nn02842133\ta kind of early movie projector\nn02842573\told fashioned airplane; has two wings one above the other\nn02842809\ta switch consisting of a twig or a bundle of twigs from a birch tree; used to hit people as punishment\nn02843029\ta canoe made with the bark of a birch tree\nn02843158\tan ornamental basin (usually in a garden) for birds to bathe in\nn02843276\ta cage in which a bird can be kept\nn02843465\ta device for imitating a birdcall\nn02843553\tan outdoor device that supplies food for wild birds\nn02843684\ta shelter for birds\nn02843777\tsmall lead shot for shotgun shells\nn02843909\ta stiff cap with ridges across the crown; worn by Roman Catholic clergy\nn02844056\t(chess) a piece that can be moved diagonally over unoccupied squares of the same color\nn02844214\ta small informal restaurant; serves wine\nn02844307\tthe cutting part of a drill; usually pointed and threaded and is replaceable in a brace or bitstock or drill press\nn02844714\tpiece of metal held in horse's mouth by reins and used to control the horse while riding\nn02845130\ta removable dental appliance that is worn in the palate for diagnostic or therapeutic purposes\nn02845293\ta dental X-ray film that can be held in place by the teeth during radiography\nn02845985\ta protective coating of asphalt and filter used on structural metals that are exposed to weathering\nn02846141\tblack clothing (worn as a sign of mourning)\nn02846260\t(board games) the darker pieces\nn02846511\tsheet of slate; for writing with chalk\nn02846619\tan eraser that removes chalk marks from blackboard\nn02846733\tequipment that records information about the performance of an aircraft during flight\nn02846874\tthe makeup (usually burnt cork) used by a performer in order to imitate a Negro\nn02847461\ta piece of metal covered by leather with a flexible handle; used for hitting people\nn02847631\ta black bow tie worn with a dinner jacket\nn02847852\ta wash that colors a surface black\nn02848118\ta bag that fills with air\nn02848216\tthe flat part of a tool or weapon that (usually) has a cutting edge\nn02848523\tflat surface that rotates and pushes against air or water\nn02848806\tthe part of the skate that slides on the ice\nn02848921\ta cartridge containing an explosive charge but no bullet\nn02849154\tbedding that keeps a person warm in bed\nn02849885\ta furnace for smelting of iron from iron oxide ores; combustion is intensified by a blast of air\nn02850060\ta small tube filled with detonating substances; used to detonate high explosives\nn02850358\tlightweight single-breasted jacket; often striped in the colors of a club or school\nn02850732\tan electrically powered mixer with whirling blades that mix or chop or liquefy foods\nn02850950\ta small nonrigid airship used for observation or as a barrage balloon\nn02851099\ta protective covering that keeps things out or hinders sight\nn02851795\ta curve or bend in the road that you cannot see around as you are driving\nn02851939\ta cloth used to cover the eyes\nn02852043\tflashy, ostentatious jewelry\nn02852173\ta light that flashes on and off; used as a signal or to send messages\nn02852360\tpackaging in which a product is sealed between a cardboard backing and clear plastic cover\nn02853016\thousing in a large building that is divided into separate units\nn02853218\tprevents access or progress\nn02853336\ta ship that runs through or around a naval blockade\nn02853745\tpulley blocks with associated rope or cable\nn02853870\ta large bomb used to demolish extensive areas (as a city block)\nn02854378\ta stronghold that is reinforced for protection from enemy fire; with apertures for defensive fire\nn02854532\ta small plane used on end grains of wood\nn02854630\ta motor vehicle equipped to collect blood donations\nn02854739\tunderpants worn by women\nn02854926\ta top worn by women\nn02855089\ta device that produces a current of air\nn02855390\ta burner that mixes air and gas to produce a very hot flame\nn02855701\ta high shoe with laces over the tongue\nn02855793\ta club used as a weapon\nn02855925\tblue clothing\nn02856013\ta blue poker chip with the highest value\nn02856237\ta short musket of wide bore with a flared muzzle\nn02856362\ta file with parallel edges\nn02857365\ta structure of boards\nn02857477\ta private house that provides accommodations and meals for paying guests\nn02857644\ta room where a committee meets (such as the board of directors of a company)\nn02857907\tthe boarding that surrounds an ice hockey rink\nn02858304\ta small vessel for travel on water\nn02859184\ta stiff hat made of straw with a flat crown\nn02859343\tpole-handled hook used to pull or push boats\nn02859443\ta shed at the edge of a river or lake; used to store boats\nn02859557\ta seat consisting of a board and a rope; used while working aloft or over the side of a ship\nn02859729\ta train taking passengers to or from a port\nn02859955\ta place where boats are built or maintained or stored\nn02860415\ta winder around which thread or tape or film or other flexible materials can be wound\nn02860640\ta flat wire hairpin whose prongs press tightly together; used to hold bobbed hair in place\nn02860847\ta long racing sled (for 2 or more people) with a steering mechanism\nn02861022\tformerly two short sleds coupled together\nn02861147\twooden ball that is bowled in the Italian game of bocce\nn02861286\ta small Hispanic shop selling wine and groceries\nn02861387\tpart of a dress above the waist\nn02861509\ta blunt needle for threading ribbon through loops\nn02861658\ta small sharp-pointed tool for punching holes in leather or fabric\nn02861777\tformerly a long hairpin; usually with an ornamental head\nn02861886\tthe external structure of a vehicle\nn02862048\tarmor that protects the wearer's whole body\nn02862916\tlotion applied to the body after bathing\nn02863014\ta one-piece tight-fitting undergarment for women that covers the torso (and may have sleeves and legs)\nn02863176\tplethysmograph consisting of a chamber surrounding the entire body; used in studies of respiration\nn02863340\ta pad worn by hockey goalkeeper\nn02863426\tthe exterior body of a motor vehicle\nn02863536\tan automatic double-barreled antiaircraft gun\nn02863638\tan unidentified (and possibly enemy) aircraft\nn02863750\tsealed vessel where water is converted to steam\nn02864122\ta nuclear reactor that uses water as a coolant and moderator; the water boils in the reactor core and the steam produced can drive a steam turbine\nn02864504\ta short jacket; worn mostly by women\nn02864593\ta strong post (as on a wharf or quay or ship for attaching mooring lines)\nn02864987\tlong heavy knife with a single edge; of Philippine origin\nn02865351\ta cord fastened around the neck with an ornamental clasp and worn as a necktie\nn02865665\ta screw that screws into a nut to form a fastener\nn02865931\tthe part of a lock that is engaged or withdrawn with a key\nn02866106\ta sliding bar in a breech-loading firearm that ejects an empty cartridge and replaces it and closes the breech\nn02866386\tan implement for cutting bolts\nn02866578\tan explosive device fused to explode under specific conditions\nn02867401\ta twilled fabric used for dresses; the warp is silk and the weft is worsted\nn02867592\tstrong sealed vessel for measuring heat of combustion\nn02867715\ta military aircraft that drops bombs during flight\nn02867966\ta jacket gathered into a band at the waist\nn02868240\tone of the smaller bombs that are released from a cluster bomb\nn02868429\ta device on an aircraft for carrying bombs\nn02868546\tan explosive bomb or artillery shell\nn02868638\ta chamber (often underground) reinforced against bombing and provided with food and living facilities; used during air raids\nn02868975\ta small porous bowl made of bone ash used in assaying to separate precious metals from e.g. lead\nn02869155\tfine porcelain that contains bone ash\nn02869249\ta percussion instrument consisting of a pair of hollow pieces of wood or bone (usually held between the thumb and fingers) that are made to click together (as by Spanish dancers) in rhythm with the dance\nn02869563\tany wheeled vehicle that is dilapidated and uncomfortable\nn02869737\ta small drum; played with the hands\nn02869837\ta hat tied under the chin\nn02870526\ta number of sheets (ticket or stamps etc.) bound together on one edge\nn02870676\ta bag in which students carry their books\nn02870772\ta bookbinder's workshop; a place for binding books\nn02870880\ta piece of furniture with shelves for storing books\nn02871005\ta support placed at the end of a row of books to keep them upright (on a shelf or table)\nn02871147\ta marker (a piece of paper or ribbon) placed between the pages of a book to mark the reader's place\nn02871314\ta van with shelves of books; serves as a mobile library or bookstore\nn02871439\ta shelf on which to keep books\nn02871525\ta shop where books are sold\nn02871631\tany of various more-or-less horizontal spars or poles used to extend the foot of a sail or for handling cargo or in mooring\nn02871824\ta pole carrying an overhead microphone projected over a film or tv set\nn02871963\ta curved piece of wood; when properly thrown will return to thrower\nn02872333\tthe first stage of a multistage rocket\nn02872529\tan amplifier for restoring the strength of a transmitted signal\nn02872752\tfootwear that covers the whole foot and lower leg\nn02873520\tprotective casing for something that resembles a leg\nn02873623\tcamp for training military recruits\nn02873733\ta slipper that is soft and wool (for babies)\nn02873839\tsmall area set off by walls for special use\nn02874086\ta small shop at a fair; for selling goods or entertainment\nn02874214\ta table (in a restaurant or bar) surrounded by two high-backed benches\nn02874336\tprotective stockings worn with or in place of boots\nn02874442\thas V-shaped notch for pulling off boots\nn02874537\ta long lace for fastening boots\nn02874642\tthe part of a boot above the instep\nn02874750\ta strap that is looped and sewn to the top of a boot for pulling it on\nn02875436\ta drill for penetrating rock\nn02875626\tan ionization chamber lined with boron or filled with boron trifluoride gas for counting low velocity neutrons\nn02875948\tformerly a British reform school for youths considered too young to send to prison\nn02876084\tcloth that covers the chest or breasts\nn02876326\ta rocking chair that has a high spindle back and a decorative top panel\nn02876457\ta wine bottle made of leather\nn02876657\ta glass or plastic vessel used for storing drinks or other liquids; typically cylindrical without handles and with a narrow neck that can be plugged or capped\nn02877266\ta vessel fitted with a flexible teat and filled with milk or formula; used as a substitute for breast feeding infants and very young children\nn02877513\ta place where bottles can be deposited for recycling\nn02877642\ta cylindrical brush on a thin shaft that is used to clean bottles\nn02877765\ta cap that seals a bottle\nn02877962\tan opener for removing caps or corks from bottles\nn02878107\ta plant where beverages are put into bottles with caps\nn02878222\ta cargo ship\nn02878425\ta fabric of uneven yarn that has an uneven knobby effect\nn02878534\ta lady's bedroom or private sitting room\nn02878628\tan inlaid furniture decoration; tortoiseshell and yellow and white metal form scrolls in cabinetwork\nn02878796\tan antipersonnel land mine\nn02879087\tan arrangement of flowers that is usually given as a present\nn02879309\ta shop that sells women's clothes and jewelry\nn02879422\ta flower that is worn in a buttonhole\nn02879517\ta slightly curved piece of resilient wood with taut horsehair strands; used in playing certain stringed instruments\nn02879718\ta weapon for shooting arrows, composed of a curved piece of resilient wood with a taut cord to propel the arrow\nn02880189\ta knot with two loops and loose ends; used to tie shoelaces\nn02880393\ta weapon consisting of arrows and the bow to shoot them\nn02880546\tstringed instruments that are played with a bow\nn02880842\ta stout hunting knife with a single edge\nn02880940\ta dish that is round and open at the top for serving foods\nn02881193\ta round vessel that is open at the top; used chiefly for holding food or liquids\nn02881546\ta wooden ball (with flattened sides so that it rolls on a curved course) used in the game of lawn bowling\nn02881757\ta felt hat that is round and hard with a narrow brim\nn02881906\ta loop knot that neither slips nor jams\nn02882190\ta building that contains several alleys for bowling\nn02882301\ta large ball with finger holes used in the sport of bowling\nn02882483\tequipment used in bowling\nn02882647\ta club-shaped wooden object used in bowling; set up in triangular groups of ten as the target\nn02882894\ta special shoe worn when bowling\nn02883004\ta spar projecting from the bow of a vessel\nn02883101\tthe string of an archer's bow\nn02883205\ta man's tie that ties in a bow\nn02883344\ta (usually rectangular) container; may have a lid\nn02884225\tprivate area in a theater or grandstand where a small group can watch the performance\nn02884450\tthe driver's seat on a coach\nn02884859\ta beam built up from boards; has a hollow rectangular cross section\nn02884994\ta simple camera shaped like a rectangular box\nn02885108\ta freight car with roof and sliding doors in the sides\nn02885233\ta short coat that hangs loosely from the shoulders\nn02885338\tequipment used in boxing\nn02885462\tboxing equipment consisting of big and padded coverings for the fists of the fighters; worn for the sport of boxing\nn02885882\tthe office where tickets of admission are sold\nn02886321\ta coiled bedspring in a frame that is covered with cloth\nn02886434\ta wrench with a closed loop (a socket) that fits over a nut or bolt head\nn02886599\ta structural member used to stiffen a framework\nn02887079\tan appliance that corrects dental irregularities\nn02887209\ta support that steadies or strengthens something else\nn02887489\telastic straps that hold trousers up (usually used in the plural)\nn02887832\ta drill consisting of a bit and a brace to hold and turn it\nn02887970\tjewelry worn around the wrist for decoration\nn02888270\ta protective covering for the wrist or arm that is used in archery and fencing and other sports\nn02888429\ta wrench shaped like a brace (has a handle shaped like a crank) and a socket head\nn02888569\ta support projecting from a wall (as to hold a shelf)\nn02888898\tan awl for making small holes for brads or small screws\nn02889425\ta restraint used to slow or stop a vehicle\nn02889646\tanything that slows or hinders a process\nn02889856\ta band that can be tightened around a shaft to stop its rotation\nn02889996\ta cylinder that contains brake fluid that is compressed by a piston\nn02890188\ta disk or plate that is fixed to the wheel; pressure is applied to it by the brake pads\nn02890351\ta hollow cast-iron cylinder attached to the wheel that forms part of the brakes\nn02890513\tthe lining on the brake shoes that comes in contact with the brake drum\nn02890662\tone of the pads that apply friction to both sides of the brake disk\nn02890804\tfoot pedal that moves a piston in the master brake cylinder\nn02890940\ta restraint provided when the brake linings are moved hydraulically against the brake drum to retard the wheel's rotation\nn02891188\ta braking device consisting of a combination of interacting parts that work to slow a motor vehicle\nn02891788\ta wind instrument that consists of a brass tube (usually of variable length) that is blown by means of a cup-shaped or funnel-shaped mouthpiece\nn02892201\ta memorial made of brass\nn02892304\tan ornament or utensil made of brass\nn02892392\tarmor plate that protects the arm\nn02892499\ta small restaurant serving beer and wine as well as food; usually cheap\nn02892626\t(formerly) a golfing wood with a face more elevated that a driver but less than a spoon\nn02892767\tan undergarment worn by women to support their breasts\nn02892948\ta small metal weapon; worn over the knuckles on the back of the hand\nn02893269\ta partition (often temporary) of planks or cloth that is used to control ventilation in a mine\nn02893418\tlarge metal container in which coal or charcoal is burned; warms people who must stay outside for long times\nn02893608\ta basket for serving bread\nn02893692\ta container used to keep bread or cake in\nn02893941\ta knife used to cut bread\nn02894024\tan article that is fragile and easily broken\nn02894158\ta place for light meals (usually near a kitchen)\nn02894337\ta table where breakfast is eaten\nn02894605\ta protective structure of stone or concrete; extends from shore into the water to prevent a beach from washing away\nn02894847\ta portable drill with a plate that is pressed against the chest to force the drill point into the work\nn02895008\tan implant for cosmetic purposes to replace a breast that has been surgically removed\nn02895154\tarmor plate that protects the chest; the front part of a cuirass\nn02895328\ta pocket inside of a man's coat\nn02895438\ta device that measures chemicals (especially the alcohol content) in a person's expired breath\nn02896074\ta metal block in breech-loading firearms that is withdrawn to insert a cartridge and replaced to close the breech before firing\nn02896294\ta garment that provides covering for the loins\nn02896442\ttrousers ending above the knee\nn02896694\ta life buoy in the form of a ring with short breeches for support; used to transfer people from a ship\nn02896856\ta gun that is loaded at the breech\nn02896949\ta nuclear reactor that produces more fissile material than it burns\nn02897097\ta submachine gun operated by gas pressure; used by the British in World War II\nn02897389\ta combination brewery and restaurant; beer is brewed for consumption on the premises and served along with food\nn02897820\trectangular block of clay baked by the sun or in a kiln; used as a building or paving material\nn02898093\ta kiln for making bricks\nn02898173\ta hammer used in laying bricks\nn02898269\ta trowel used in masonry\nn02898369\tmasonry done with bricks and mortar\nn02898585\ta gown worn by the bride at a wedding\nn02898711\ta structure that allows people or vehicles to cross an obstacle such as a river or canal or railway etc.\nn02899439\tthe link between two lenses; rests on the nose\nn02900160\theadgear for a horse; includes a headstall and bit and reins to give the rider or driver control\nn02900459\ta path suitable for riding or leading horses (but not for cars)\nn02900594\ta bit resembling a snaffle bit; used with a separate curb\nn02900705\ta case with a handle; for carrying papers or files or books\nn02900857\ta bomb consisting of an explosive and timer hidden inside a briefcase\nn02900987\ta portable computer housed in a box that resembles a briefcase\nn02901114\tshort tight-fitting underpants (trade name Jockey shorts)\nn02901259\ta penal institution (especially on board a ship)\nn02901377\ttwo-masted sailing vessel square-rigged on both masts\nn02901481\ta medieval coat of chain mail consisting of metal rings sewn onto leather or cloth\nn02901620\ttwo-masted sailing vessel square-rigged on the foremast and fore-and-aft rigged on the mainmast\nn02901793\ta pomade to make the hair manageable and lustrous\nn02901901\ta code name for a small computerized heat-seeking missile that was supposed to intercept and destroy enemy missiles\nn02902079\ta circular projection that sticks outward from the crown of a hat\nn02902687\ta brush that is made with the short stiff hairs of an animal or plant\nn02902816\tinformal term for breeches\nn02902916\tan arrow with a wide barbed head\nn02903006\ta large ax with a broad cutting blade\nn02903126\ta small spit or skewer\nn02903204\ta mechanical device for scattering something (seed or fertilizer or sand etc.) in all directions\nn02903727\ta closely woven silk or synthetic fabric with a narrow crosswise rib\nn02903852\ta densely textured woolen fabric with a lustrous finish\nn02904109\ta short-handled hatchet with a broad blade opposite a hammerhead\nn02904233\ta carpet woven on a wide loom to obviate the need for seams\nn02904505\tall of the armament that is fired from one side of a warship\nn02904640\ta sword with a broad blade and (usually) two cutting edges; used to cut rather than stab\nn02904803\tthick heavy expensive material with a raised pattern\nn02904927\ta thick and heavy shoe\nn02905036\tan oven or part of a stove used for broiling\nn02905152\tan arch with a gap at the apex; the gap is usually filled with some decoration\nn02905886\ta slender tubular instrument used to examine the bronchial tubes\nn02906734\ta cleaning implement for sweeping; bundle of straws or twigs attached to a long handle\nn02906963\ta small room for storing brooms and other cleaning equipment\nn02907082\tthe handle of a broom\nn02907296\tlight carriage; pulled by a single horse\nn02907391\ta portable .30 caliber automatic rifle operated by gas pressure and fed by cartridges from a magazine; used by United States troops in World War I and in World War II and in the Korean War\nn02907656\ta belt-fed machine gun capable of firing more than 500 rounds per minute; used by United States troops in World War II and the Korean War\nn02907873\ta row house built of brownstone; reddish brown in color\nn02908123\ta woman's short housecoat or wrapper\nn02908217\tan implement that has hairs or bristles firmly set into a handle\nn02908773\ta carpet with a strong linen warp and a heavy pile of colored woolen yarns drawn up in uncut loops to form a pattern\nn02908951\tfine lace with a raised or applique design\nn02909053\ta dome-shaped covering made of transparent glass or plastic\nn02909165\tan instrument that records the tracks of ionizing particles\nn02909285\ta kind of ink-jet printer\nn02909706\tan open horse-drawn carriage with four wheels; has a seat attached to a flexible board between the two axles\nn02909870\ta roughly cylindrical vessel that is open at the top\nn02910145\ta low single seat as in cars or planes\nn02910241\t(formerly) a cheap saloon selling liquor by the bucket\nn02910353\tfastener that fastens together two ends of a belt or strap; often has loose prong\nn02910542\ta coarse cotton fabric stiffened with glue; used in bookbinding and to stiffen clothing\nn02910701\ta saw that is set in a frame in the shape of an H; used with both hands to cut wood that is held in a sawbuck\nn02910864\tbreeches made of buckskin\nn02910964\tan implement consisting of soft material mounted on a block; used for polishing (as in manicuring)\nn02911332\ta power tool used to buff surfaces\nn02911485\t(computer science) a part of RAM used for temporary storage of data that is waiting to be sent to a device; used to compensate for differences in the rate of flow of data between components of a computer system\nn02912065\ta piece of furniture that stands at the side of a dining room; has shelves and drawers\nn02912319\ta wheel that is covered with soft material\nn02912557\ta small lightweight carriage; drawn by a single horse\nn02912894\ta brass instrument without valves; used for military calls and fanfares\nn02913152\ta structure that has a roof and walls and stands more or less permanently in one place\nn02914991\ta whole structure (as a building) made up of interconnected or related structures\nn02915904\ta clip with a spring that closes the metal jaws\nn02916065\ta wrench designed to provide a firm grip on something\nn02916179\tlarge powerful tractor; a large blade in front flattens areas of ground\nn02916350\ta projectile that is fired from a gun\nn02916936\ta vest capable of resisting the impact of a bullet\nn02917067\ta high-speed passenger train\nn02917377\ta portable loudspeaker with built-in microphone and amplifier\nn02917521\tgold or silver in bars or ingots\nn02917607\ta small carpenter's plane with the cutting edge near the front\nn02917742\ta large cell where prisoners (people awaiting trial or sentence or refugees or illegal immigrants) are confined together temporarily\nn02917964\ta place on a baseball field where relief pitchers can warm up during a game\nn02918112\ta stadium where bullfights take place\nn02918330\ta fencelike structure around a deck (usually plural)\nn02918455\ta small boat that ferries supplies and commodities for sale to a larger ship at anchor\nn02918595\ta mechanical device consisting of bars at either end of a vehicle to absorb shock and prevent serious damage\nn02918831\ta glass filled to the brim (especially as a toast)\nn02918964\ta small low-powered electrically powered vehicle driven on a special platform where there are many others to be dodged\nn02919148\tvertical bars attached to a bumper to prevent locking bumpers with another vehicle\nn02919308\ta jack for lifting a motor vehicle by the bumper\nn02919414\ta package of several things tied together for carrying or storing\nn02919648\ta plug used to close a hole in a barrel or flask\nn02919792\ta small house with a single story\nn02919890\tan elasticized rope\nn02919976\ta hole in a barrel or cask; used to fill or empty it\nn02920083\ta rough bed (as at a campsite)\nn02920164\ta long trough for feeding cattle\nn02920259\tbeds built one above the other\nn02920369\ta hazard on a golf course\nn02920503\ta fortification of earth; mostly or entirely below ground\nn02920658\ta large container for storing fuel\nn02921029\ta gas burner used in laboratories; has an air valve to regulate the mixture of gas and air\nn02921195\ta loosely woven fabric used for flags, etc.\nn02921292\tsmall bit used in dentistry or surgery\nn02921406\ta lightweight belted raincoat typically made of tan gabardine with a distinctive tartan lining; named for the original manufacturer\nn02921592\tmeasuring instrument consisting of a graduated glass tube with a tap at the bottom; used for titration\nn02921756\ta warning device that is tripped off by the occurrence of a burglary\nn02921884\ta chamber that is used as a grave\nn02922159\tcloth used to cover a corpse in preparation for burial\nn02922292\t(archeology) a heap of earth placed over prehistoric tombs\nn02922461\ta chisel of tempered steel with a sharp point; used for engraving\nn02922578\ta loose garment (usually with veiled holes for the eyes) worn by Muslim women especially in India and Pakistan\nn02922798\tcoarse jute fabric\nn02922877\ta bag into which secret documents are placed before being burned\nn02923129\tan apparatus for burning fuel (or refuse)\nn02923535\ta long hooded cloak woven of wool in one piece; worn by Arabs and Moors\nn02923682\ta fully automatic pistol; a small submachine gun\nn02923915\trotary file for smoothing rough edges left on a workpiece\nn02924116\ta vehicle carrying many passengers; used for public transport\nn02925009\ta basket large enough to hold a bushel\nn02925107\ta cylindrical metal lining used to reduce friction\nn02925385\ta loose fitting jacket; resembles a shirt with four patch pockets and a belt\nn02925519\ta suit of clothes traditionally worn by businessmen\nn02925666\ta boot reaching halfway up to the knee\nn02926426\ta close-fitting and strapless top without sleeves that is worn by women either as lingerie or for evening dress\nn02926591\ta framework worn at the back below the waist for giving fullness to a woman's skirt\nn02927053\ta large sharp knife for cutting or trimming meat\nn02927161\ta shop in which meat and poultry (and sometimes fish) are sold\nn02927764\ta small dish (often with a cover) for holding butter at the table\nn02927887\ta valve in a carburetor that consists of a disc that turns and acts as a throttle\nn02928049\ta small knife with a dull blade; for cutting or spreading butter\nn02928299\ta hinge mortised flush into the edge of the door and jamb\nn02928413\ta joint made by fastening ends together without overlapping\nn02928608\ta round fastener sewn to shirts and coats etc to fit through buttonholes\nn02929184\ta hook for pulling a button through a buttonhole\nn02929289\ta support usually of stone or brick; supports the wall of a building\nn02929462\ta blunt arrow without a barb; an arrow used for target practice\nn02929582\ta butt joint that is welded\nn02929923\ta small jet-propelled winged missile that carries a bomb\nn02930080\ta signaling device that makes a buzzing sound\nn02930214\ttrademark for men's underwear\nn02930339\ta capacitor that provides low impedance over certain (high) frequencies\nn02930645\ta side road little traveled (as in the countryside)\nn02930766\ta car driven by a person whose job is to take passengers where they want to go in exchange for money\nn02931013\tsmall two-wheeled horse-drawn carriage; with two seats and a folding hood\nn02931148\ta compartment at the front of a motor vehicle or locomotive where driver sits\nn02931294\ta small tent used as a dressing room beside the sea or a swimming pool\nn02931417\ta spot that is open late at night and that provides entertainment (as singers or dancers) as well as dancing and food and drink\nn02931836\ta heavy wooden pole (such as the trunk of a young fir) that is tossed as a test of strength (in the Highlands of northern Scotland)\nn02932019\tthe enclosed compartment of an aircraft or spacecraft where passengers are carried\nn02932400\ta small house built of wood; usually in a wooded area\nn02932523\ta car on a freight train for use of the train crew; usually the last car on the train\nn02932693\ta class of accommodations on a ship or train or plane that are less expensive than first class accommodations\nn02932891\ta large motorboat that has a cabin and plumbing and other conveniences necessary for living on board\nn02933112\ta piece of furniture resembling a cupboard with doors and shelves and drawers; for storage or display\nn02933340\thousing for electronic instruments, as radio or television\nn02933462\ta storage compartment for clothes and valuables; usually it has a lock\nn02933649\twoodwork finished by hand by a cabinetmaker\nn02933750\ta liner with cabins for passengers\nn02933990\ta television system that transmits over cables\nn02934168\ta conductor for transmitting electrical or optical signals or electric power\nn02934451\ta conveyance for passengers or freight on a cable railway\nn02935017\t(computer science) RAM memory that is set aside as a specialized buffer storage that is continually updated; used to optimize data transfers between system elements with different characteristics\nn02935387\ta can for storing tea\nn02935490\tan atomic clock based on the energy difference between two states of the caesium nucleus in a magnetic field\nn02935658\ta small restaurant where drinks and snacks are sold\nn02935891\ta restaurant where you serve yourself and pay a cashier\nn02936176\ta tray for carrying your food in a cafeteria\nn02936281\tinformal British term for a cafe\nn02936402\ta (cotton or silk) cloak with full sleeves and sash reaching down to the ankles; worn by men in the Levant\nn02936570\ta woman's dress style that imitates the caftan cloaks worn by men in the Near East\nn02936714\tan enclosure made or wire or metal bars in which birds or animals can be kept\nn02936921\tthe net that is the goal in ice hockey\nn02937010\tlightweight parka; waterproof\nn02937336\ta two-wheeled military vehicle carrying artillery ammunition\nn02937958\tthe folding hood of a horse-drawn carriage\nn02938218\ta shoe covering the ankle; worn by ancient Romans\nn02938321\ta water-base paint containing zinc oxide and glue and coloring; used as a wash for walls and ceilings\nn02938886\ta small machine that is used for mathematical calculations\nn02939185\ta very large pot that is used for boiling\nn02939763\tcoarse cloth with a bright print\nn02939866\tan instrument for measuring the distance between two points (often used in the plural)\nn02940289\ta bulletin board backstage in a theater\nn02940385\ta center equipped to handle a large volume of telephone calls (especially for taking orders or serving customers)\nn02940570\ta small display that will show you the telephone number of the party calling you\nn02940706\ta musical instrument consisting of a series of steam whistles played from a keyboard\nn02941095\ta measuring instrument that determines quantities of heat\nn02941228\ta high-crowned black cap (usually made of felt or sheepskin) worn by men in Turkey and Iran and the Caucasus\nn02941845\ta medieval hood of mail suspended from a basinet to protect the head and neck\nn02942015\tan arch with a straight horizontal extrados and a slightly arched intrados\nn02942147\ta finely woven white linen\nn02942349\ta portable television camera and videocassette recorder\nn02942460\ta soft tan cloth made with the hair of a camel\nn02942699\tequipment for taking photographs (usually consisting of a lightproof box with a lens at one end and light-sensitive film at the other)\nn02943241\ta lens that focuses the image in a camera\nn02943465\tan optical device consisting of an attachment that enables an observer to view simultaneously the image and a drawing surface for sketching it\nn02943686\ta darkened enclosure in which images of outside objects are projected through a small aperture or lens onto a facing surface\nn02943871\ta tripod used to support a camera\nn02943964\ta loose shirt or tunic; originally worn in the Middle Ages\nn02944075\ta short negligee\nn02944146\ta short sleeveless undergarment for women\nn02944256\ta fabric of Asian origin; originally made of silk and camel's hair\nn02944459\tdevice or stratagem for concealment or deceit\nn02944579\tfabric dyed with splotches of green and brown and black and tan; intended to make the wearer of a garment made of this fabric hard to distinguish from the background\nn02944826\ttemporary living quarters specially built by the army for soldiers\nn02945161\ttemporary lodgings in the country for travelers or vacationers\nn02945813\tshelter for persons displaced by war or political oppression or for religious beliefs\nn02945964\ta broad-brimmed felt hat with a high crown; formerly worn by the United States Army and Marine personnel\nn02946127\ta bell tower; usually stands alone unattached to a building\nn02946270\ta light folding chair\nn02946348\ta recreational vehicle equipped for camping out while traveling\nn02946509\ta trailer equipped for occupancy (especially for holiday trips)\nn02946753\ta folding stool\nn02946824\thas cams attached to it\nn02946921\tairtight sealed metal container for food or drink or paint etc.\nn02947212\tlong and narrow strip of water made for boats or for irrigation\nn02947660\ta long boat that carries freight and is narrow enough to be used in canals\nn02947818\tbranched candlestick; ornamental; has several lights\nn02947977\ta miniature camera with a fast lens\nn02948072\tstick of wax with a wick in the middle\nn02948293\ta bowling pin that is thin by comparison with a tenpin\nn02948403\tan implement with a small cup at the end of a handle; used to extinguish the flame of a candle\nn02948557\ta holder with sockets for candles\nn02948834\tloops of soft yarn are cut to give a tufted pattern\nn02948942\ta thermometer used to determine the temperature of candy syrups during cooking\nn02949084\ta stiff switch used to hit students as punishment\nn02949202\ta stick that people can lean on to help them walk\nn02949356\tan instrument of punishment formerly used in China for petty criminals; consists of a heavy wooden collar enclosing the neck and arms\nn02949542\tmetal container for storing dry foods such as tea or flour\nn02950018\ta factory where food is canned\nn02950120\ta small can\nn02950186\ta wooden bucket\nn02950256\ta large artillery gun that is usually on wheels\nn02950482\theavy automatic gun fired from an airplane\nn02950632\t(Middle Ages) a cylindrical piece of armor plate to protect the arm\nn02950826\theavy gun fired from a tank\nn02950943\ta solid projectile that in former times was fired from a cannon\nn02951358\tsmall and light boat; pointed at both ends; propelled with a paddle\nn02951585\ta device for cutting cans open\nn02951703\ta jar used in ancient Egypt to contain entrails of an embalmed body\nn02951843\ta covering (usually of cloth) that serves as a roof to shelter an area from the weather\nn02952109\tthe umbrellalike part of a parachute that fills with air\nn02952237\tthe transparent covering of an aircraft cockpit\nn02952374\ta flask for carrying water; used by soldiers or travelers\nn02952485\trestaurant in a factory; where workers can eat\nn02952585\ta recreation room in an institution\nn02952674\ta restaurant outside; often for soldiers or policemen\nn02952798\tsells food and personal items to personnel at an institution or school or camp etc.\nn02952935\ta peavey having a hook instead of a spike; used for handling logs\nn02953056\tprojecting horizontal beam fixed at one end only\nn02953197\tbridge constructed of two cantilevers that meet in the middle\nn02953455\tthe back of a saddle seat\nn02953552\ta soft thick crinkled dress crepe; heavier than crepe de Chine\nn02953673\ta heavy, closely woven fabric (used for clothing or chairs or sails or tents)\nn02953850\tthe mat that forms the floor of the ring in which boxers or professional wrestlers compete\nn02954163\ta tent made of canvas fabric\nn02954340\ta tight-fitting headdress\nn02954938\ta top (as for a bottle)\nn02955065\tsomething serving as a cover or protection\nn02955247\tan electrical device characterized by its capacity to store an electric charge\nn02955540\tstable gear consisting of a decorated covering for a horse, especially (formerly) for a warhorse\nn02955767\ta sleeveless garment like a cloak but shorter\nn02956393\ta warship of the first rank in size and armament\nn02956699\ta building occupied by a state legislature\nn02956795\ta bottle opener to pry off caps\nn02956883\ta long cloak with a hood that can be pulled over the head\nn02957008\ta long overcoat with a hood that can be pulled over the head\nn02957135\ta threaded screw for machine parts; screws into a tapped hole\nn02957252\ta windlass rotated in a horizontal plane around a vertical axis; used on ships for weighing anchor or raising heavy sails\nn02957427\ta stone that forms the top of wall or building\nn02957755\ta small container\nn02957862\ta wooden armchair with a saddle seat and a low back that has vertical spindles\nn02958343\ta motor vehicle with four wheels; usually propelled by an internal combustion engine\nn02959942\ta wheeled vehicle adapted to the rails of railroad\nn02960352\twhere passengers ride up and down\nn02960690\tan oblong metal ring with a spring clip; used in mountaineering to attach a rope to a piton or to connect two ropes\nn02960903\ta bottle with a stopper; for serving wine or water\nn02961035\tan inn in some eastern countries with a large courtyard that provides accommodation for caravans\nn02961225\ta lead-acid storage battery in a motor vehicle; usually a 12-volt battery of six cells; the heart of the car's electrical system\nn02961451\tlight automatic rifle\nn02961544\ta bomb placed in a car and wired to explode when the ignition is started or by remote control or by a timing device\nn02961947\thas carbon electrodes\nn02962061\ta large bottle for holding corrosive liquids; usually cushioned in a special container\nn02962200\tmixes air with gasoline vapor prior to explosion\nn02962414\ta trailer that can be loaded with new cars for delivery to sales agencies\nn02962843\ta small case for carrying business cards\nn02962938\ta piece of electronic equipment for continual observation of the function of the heart\nn02963159\tknitted jacket that is fastened up the front with buttons or a zipper\nn02963302\tan alphabetical listing of items (e.g., books in a library) with a separate card for each item\nn02963503\tmedical instrument that records electric currents associated with contractions of the heart\nn02963692\ta directional microphone with a cardioid pattern of sensitivity\nn02963821\tthe door of a car\nn02963987\ta room for gambling on card games\nn02964075\ta small light table with folding legs; can be folded for storage\nn02964196\ta table for playing cards (as in a casino)\nn02964295\ta ferry that transports motor vehicles\nn02964634\tthe space in a ship or aircraft for storing cargo\nn02964843\ta large container for freight\nn02964934\tdoor used to load or unload cargo\nn02965024\thatch opening into the cargo compartment\nn02965122\ta helicopter that carries cargo\nn02965216\ta liner that carries cargo\nn02965300\ta ship designed to carry cargo\nn02965529\tset of bells hung in a bell tower\nn02965783\ta mirror that the driver of a car can use\nn02966068\ta luxurious carriage suitable for nobility in the 16th and 17th century\nn02966193\ta large, rotating machine with seats for children to ride or amusement\nn02966545\ta hammer with a cleft at one end for pulling nails\nn02966687\ta set of carpenter's tools\nn02966786\ta straight bar of light metal with a spirit level in it\nn02966942\ta short-handled mallet with a wooden head used to strike a chisel or wedge\nn02967081\ta rule used by a carpenter\nn02967170\ta steel square used by carpenters; larger than a try square\nn02967294\ttraveling bag made of carpet; widely used in 19th century\nn02967407\timplement for beating dust out of carpets\nn02967540\ta loom for weaving carpeting\nn02967626\ta pad placed under a carpet\nn02967782\ta cleaning implement with revolving brushes that pick up dirt as the implement is pushed over a carpet\nn02967991\tused to nail down carpets\nn02968074\tgarage for one or two cars consisting of a flat roof supported on poles\nn02968210\ta large galleon sailed in the Mediterranean as a merchantman\nn02968333\tsmall individual study area in a library\nn02968473\ta vehicle with wheels drawn by one or more horses\nn02969010\ta machine part that carries something else\nn02969163\ta roundheaded bolt for timber; threaded along part of the shank; inserted into holes already drilled\nn02969323\tone of the two sides of a motorway where traffic travels in one direction only usually in two or three lanes\nn02969527\ta wrench designed for use with carriage bolts\nn02969634\ta knot used to connect the ends of two large ropes or hawsers\nn02969886\ta rack attached to a vehicle; for carrying luggage or skis or the like\nn02970408\ta capacious bag or basket\nn02970534\tbox-shaped baby bed with handles (for a baby to sleep in while being carried)\nn02970685\ta seat in a car\nn02970849\ta heavy open wagon usually having two wheels and drawn by an animal\nn02971167\ta tire consisting of a rubber ring around the rim of an automobile wheel\nn02971356\ta box made of cardboard; opens by flaps on top\nn02971473\ta cartridge (usually with paper casing)\nn02971579\ta train that transports passengers and their automobiles\nn02971691\tammunition consisting of a cylindrical casing containing an explosive charge and a bullet; fired from a rifle or handgun\nn02971940\tan electro-acoustic transducer that is the part of the arm of a record player that holds the needle and that is removable\nn02972397\ta broad belt with loops or pockets for holding ammunition\nn02972714\ta mechanism in a firearm that pulls an empty shell case out of the chamber and passes it to the ejector\nn02972934\ta fuse cased in a tube\nn02973017\ta metal frame or container holding cartridges; can be inserted into an automatic gun\nn02973236\ta wheel that has wooden spokes and a metal rim\nn02973805\ta large fork used in carving cooked meat\nn02973904\ta large knife used to carve cooked meat\nn02974003\ta wheel that has a tire and rim and hubcap; used to propel the car\nn02974348\ta supporting column carved in the shape of a person\nn02974454\tan apparatus used to liquefy air or oxygen etc.\nn02974565\ta number of transformers in series; provides a high-voltage source\nn02974697\ta portable container for carrying several objects\nn02975212\ta glass container used to store and display items in a shop or museum or home\nn02975589\t(printing) the receptacle in which a compositor has his type, which is divided into compartments for the different letters, spaces, or numbers\nn02975994\ta water-base paint made with a protein precipitated from milk\nn02976123\ta knife with a fixed blade that is carried in a sheath\nn02976249\ta metal blade with a handle; used as cutlery\nn02976350\ta window sash that is hinged (usually on one side)\nn02976455\ta window with one or more casements\nn02976552\tmilitary barracks in a garrison town\nn02976641\ta metallic cylinder packed with shot and used as ammunition in a firearm\nn02976815\ta counter at a large party where you can purchase drinks by the glass\nn02976939\ta strongbox for holding cash\nn02977058\tan unattended machine (outside some banks) that dispenses money when a personal coded card is used\nn02977330\ta soft fabric made from the wool of the Cashmere goat\nn02977438\ta cashbox with an adding machine to register transactions; used in shops to add up the bill\nn02977619\tthe enclosing frame around a door or window opening\nn02977936\ta public building for gambling and entertainment\nn02978055\tsmall and often ornate box for holding jewels or other valuables\nn02978205\t(15-16th century) any armor for the head; usually ornate without a visor\nn02978367\ta light open casque without a visor or beaver\nn02978478\ta reflecting telescope that has a paraboloidal primary mirror and a hyperboloidal secondary mirror; light is brought to a focus through an aperture in the center of the primary mirror\nn02978753\tlarge deep dish in which food can be cooked and served\nn02978881\ta container that holds a magnetic tape used for recording or playing sound or video\nn02979074\ta tape deck for playing and recording cassette tapes\nn02979186\telectronic equipment for playing cassettes\nn02979290\ta recorder for recording or playing cassettes\nn02979399\ta cassette that contains magnetic tape\nn02979516\ta black garment reaching down to the ankles; worn by priests or choristers\nn02979836\tbandage consisting of a firm covering (often made of plaster of Paris) that immobilizes broken bones while they heal\nn02980036\ta pivoting roller attached to the bottom of furniture or trucks or portable machines to make them movable\nn02980203\ta shaker with a perforated top for sprinkling powdered sugar\nn02980441\ta large building formerly occupied by a ruler and fortified against attack\nn02980625\t(chess) the piece that can move any number of unoccupied squares in a direction parallel to the sides of the chessboard\nn02981024\tan underground tunnel with recesses where bodies were buried (as in ancient Rome)\nn02981198\ta decorated bier on which a coffin rests in state during a funeral\nn02981321\ta converter that uses a platinum-iridium catalyst to oxidize pollutants and carbon monoxide into carbon dioxide and water; an antipollution device on an automotive exhaust system\nn02981565\ta chemical reactor for converting oils with high boiling points into fuels with lower boiling points in the presence of a catalyst\nn02981792\ta sailboat with two parallel hulls held together by single deck\nn02981911\tan engine that provided medieval artillery used during sieges; a heavy war engine for hurling large stones and other missiles\nn02982232\ta device that launches aircraft from a warship\nn02982416\ta sailboat with a single mast set far forward\nn02982515\ta receptacle for cat excrement\nn02982599\ta fastener that fastens or locks a door or window\nn02983072\tan enclosure or receptacle for odds and ends\nn02983189\ta mask to protect the face of the catcher in baseball\nn02983357\ta structure in which water is collected (especially a natural drainage area)\nn02983507\ta large tracked vehicle that is propelled by two endless metal belts; frequently used for moving earth in construction and farm work\nn02983904\ta throne that is the official chair of a bishop\nn02984061\tany large and important church\nn02984203\tthe principal Christian church building of a bishop's diocese\nn02984469\ta thin flexible tube inserted into the body to permit introduction or withdrawal of fluids or to keep the passageway open\nn02984699\ta negatively charged electrode that is the source of electrons entering an electrical device\nn02985137\ta vacuum tube in which a hot cathode emits a beam of electrons that pass through a high voltage anode and are focused or deflected before hitting a phosphorescent screen\nn02985606\ta whip with nine knotted cords\nn02985828\ta hitch in the middle of rope that has two eyes into which tackle can be hooked\nn02985963\ta bottle that holds catsup\nn02986066\ta freight car for transporting cattle\nn02986160\ta bridge over a ditch consisting of parallel metal bars that allow pedestrians and vehicles to pass, but not cattle\nn02986348\ta cargo ship for the transport of livestock\nn02987047\tan instrument or substance used to destroy tissue for medical reasons (eg removal of a wart) by burning it with a hot iron or an electric current or a caustic or by freezing it\nn02987379\ta soft felt hat with a wide flexible brim\nn02987492\ta stout sword with a curved blade and thick back\nn02987706\ta concave molding shaped like a quarter circle in cross section\nn02987823\ta wall formed of two thicknesses of masonry with a space between them\nn02987950\tbattery used to maintain the grid potential in a vacuum tube\nn02988066\ta clamp in the shape of the letter C\nn02988156\ta drive that reads a compact disc and that is connected to an audio system\nn02988304\ta stand-alone piece of electronic equipment that either has its own display or attaches to a television set\nn02988486\ta compact disc on which you can write only once and thereafter is read-only memory\nn02988679\ta compact disk that is used with a computer (rather than with an audio system); a large amount of digital information can be stored and accessed but it cannot be altered by the user\nn02988963\ta drive that is connected to a computer and on which a CD-ROM can be `played'\nn02989099\ta chest made of cedar\nn02990373\tthe overhead upper surface of a covered space\nn02990758\ta musical instrument consisting of graduated steel plates that are struck by hammers activated by a keyboard\nn02991048\ta device that delivers an electric current as the result of a chemical reaction\nn02991302\ta room where a prisoner is kept\nn02991847\tstorage space where wines are stored\nn02992032\ta division of a prison (usually consisting of several cells)\nn02992211\ta large stringed instrument; seated player holds it upright while playing\nn02992368\ta transparent paperlike product that is impervious to moisture and used to wrap candy or cigarettes etc.\nn02992529\ta hand-held mobile radiotelephone for use in an area divided into small sections, each with its own short-range transmitter/receiver\nn02992795\ttransparent or semitransparent adhesive tape (trade names Scotch tape and Sellotape) used for sealing or attaching or mending\nn02993194\ta monument built to honor people whose remains are interred elsewhere or whose remains cannot be recovered\nn02993368\ta container for burning incense (especially one that is swung on a chain in a religious ritual)\nn02993546\ta building dedicated to a particular activity\nn02994573\ta tool with a conical point that is used to make indentations in metal (especially to mark points for drilling)\nn02994743\ta thermometer calibrated in degrees centigrade\nn02995345\t(computer science) the part of a computer (a microprocessor chip) that does most of the data processing\nn02995871\ta pump that use centrifugal force to discharge fluid into a pipe\nn02995998\tan apparatus that uses centrifugal force to separate particles from a suspension\nn02997391\tan artifact made of hard brittle material produced from nonmetallic minerals by firing at high temperatures\nn02997607\tutensils made from ceramic material\nn02997910\ta bowl for holding breakfast cereal\nn02998003\ta paper box in which breakfast cereals are sold\nn02998107\ta waterproof waxed cloth once used as a shroud\nn02998563\ta covered cistern; waste water and sewage flow into it\nn02998696\t(Yiddish) an inexpensive showy trinket\nn02998841\ta cloth used as a head covering (and veil and shawl) by Muslim and Hindu women\nn02999138\ta metal pan over a heater; used to cook or to keep things warm at the table\nn02999410\ta series of (usually metal) rings or links fitted into one another to make a flexible ligament\nn02999936\tanything that acts as a restraint\nn03000134\ta fence of steel wires woven into a diamond pattern\nn03000247\t(Middle Ages) flexible armor made of interlinked metal rings\nn03000530\tan impact printer that carries the type slugs by links of a revolving chain\nn03000684\tportable power saw; teeth linked to form an endless chain\nn03001115\tone of a chain of retail stores under the same management and selling the same merchandise\nn03001282\ta pipe wrench used for turning large pipes; an adjustable chain circles the pipe with its ends connected to the head whose teeth engage the pipe\nn03001540\tanother name for chain tongs\nn03001627\ta seat for one person, with a support for the back\nn03002096\ta particular seat in an orchestra\nn03002210\ta ceremonial chair for an exalted or powerful person\nn03002341\ta ski lift on which riders (skiers or sightseers) are seated and carried up or down a mountainside; seats are hung from an endless overhead cable\nn03002555\ta carriage consisting of two wheels and a calash top; drawn by a single horse\nn03002711\ta long chair; for reclining\nn03002816\ta Swiss house with a sloping roof and wide eaves or a house built in this style\nn03002948\ta bowl-shaped drinking vessel; especially the Eucharistic cup\nn03003091\ta piece of calcite or a similar substance, usually in the shape of a crayon, that is used to write or draw on blackboards or other flat surfaces\nn03003633\ta soft lightweight fabric (usually printed)\nn03004275\ta receptacle for urination or defecation in the bedroom\nn03004409\ta lightweight fabric woven with white threads across a colored warp\nn03004531\ta bit that is used for beveling\nn03004620\ta plane that makes a beveled edge\nn03004713\ta piece of chamois used for washing windows or cars\nn03004824\tarea around the altar of a church for the clergy and choir; often enclosed by a lattice or railing\nn03005033\ta government building housing the office of a chancellor\nn03005147\tan office of archives for public or ecclesiastic records; a court of public records\nn03005285\tbranched lighting fixture; often ornate; hangs from the ceiling\nn03005515\tcandles and other commodities sold by a chandler\nn03005619\tmedieval plate armor to protect a horse's head\nn03006626\treed pipe with finger holes on which the melody is played\nn03006788\ta chapel endowed for singing Masses for the soul of the donor\nn03006903\t(usually in the plural) leather leggings without a seat; joined by a belt; often have flared outer flaps; worn over trousers by cowboys to protect their legs\nn03007130\ta place of worship that has its own altar\nn03007297\ta house used as a residence by a chapter of a fraternity\nn03007444\ta building attached to a monastery or cathedral; used as a meeting place for the chapter\nn03007591\ta printer that prints a single character at a time\nn03008177\ta delicatessen that specializes in meats\nn03008817\tan accelerator in which high-energy ions escape from plasma following charge exchange\nn03008976\ta device for charging or recharging batteries\nn03009111\ta light four-wheel horse-drawn ceremonial carriage\nn03009269\ta two-wheeled horse-drawn battle vehicle; used in war and races in ancient Egypt and Greece and Rome\nn03009794\ta vault or building where corpses or bones are deposited\nn03010473\tthe skeleton of a motor vehicle consisting of a steel frame supported on springs that holds the body and motor\nn03010656\ta metal mounting for the circuit components of an electronic device\nn03010795\ta long sleeveless vestment worn by a priest when celebrating Mass\nn03010915\tan impressive country house (or castle) in France\nn03011018\ta chain formerly worn at the waist by women; for carrying a purse or bunch of keys etc.\nn03011355\tone of the flat round pieces used in playing the game of checkers\nn03011741\ta counter in a supermarket where you pay for your purchases\nn03012013\teither of two straps of a bridle that connect the bit to the headpiece\nn03012159\ttray on which cheeses are served\nn03012373\ta coarse loosely woven cotton gauze; originally used to wrap cheeses\nn03012499\ta kitchen utensil (board or handle) with a wire for cutting cheese\nn03012644\ta press for shaping cheese curd\nn03012734\ta bomb laden with chemical agents that are released when the bomb explodes\nn03012897\tan industrial plant where chemicals are produced\nn03013006\tan apparatus for holding substances that are undergoing a chemical reaction\nn03013438\ta loose-fitting dress hanging straight from the shoulders without a waist\nn03013580\ta woman's sleeveless undergarment\nn03013850\ta heavy fabric woven with chenille cord; used in rugs and bedspreads\nn03014440\tany of 16 white and 16 black pieces used in playing the game of chess\nn03014705\tbox with a lid; used for storage; usually large and sturdy\nn03015149\tan overstuffed davenport with upright armrests\nn03015254\tfurniture with drawers for keeping clothes\nn03015478\tprotective garment consisting of a pad worn in baseball by catchers and by football players\nn03015631\tdefensive structure consisting of a movable obstacle composed of barbed wire or spikes attached to a wooden frame; used to obstruct cavalry\nn03015851\ta full length mirror mounted in a frame in which it can be tilted\nn03016209\ta movable barrier used in motor racing; sometimes placed before a dangerous corner to reduce speed as cars pass in single file\nn03016389\ta farm building for housing poultry\nn03016609\ta galvanized wire network with a hexagonal mesh; used to build fences\nn03016737\tan enclosed yard for keeping poultry\nn03016868\ta sheer fabric of silk or rayon\nn03016953\ta tall elegant chest of drawers\nn03017070\ta bedroom for a child\nn03017168\ta percussion instrument consisting of a set of tuned bells that are struck with a hammer; used as an orchestral instrument\nn03017698\twalls that project out from the wall of a room and surround the chimney base\nn03017835\ta corner by a fireplace\nn03018209\thigh quality porcelain originally made only in China\nn03018349\ta cabinet (usually with glass doors) for storing and displaying china\nn03018614\ta thick twilled fabric of wool and cotton\nn03018712\ta collapsible paper lantern in bright colors; used for decorative purposes\nn03018848\tintricate or ingenious puzzle consisting of boxes within boxes\nn03019198\ta horizontal bar on which you can chin yourself\nn03019304\ta coarse twilled cotton fabric frequently used for uniforms\nn03019434\ttrousers made with chino cloth\nn03019685\ta rest on which a violinist can place the chin\nn03019806\ta strap attached to a hat; passes under the chin and holds the hat in place\nn03019938\ta brightly printed and glazed cotton fabric\nn03020034\telectronic equipment consisting of a small crystal of a silicon semiconductor fabricated to carry out a number of electronic functions in an integrated circuit\nn03020416\ta small disk-shaped counter used to represent money when gambling\nn03020692\tan edge tool with a flat steel blade with a cutting edge\nn03021228\ta short mantle or cape fastened at the shoulder; worn by men in ancient Greece\nn03024064\tthe area occupied by singers; the part of the chancel between sanctuary and nave\nn03024233\ta gallery in a church occupied by the choir\nn03024333\ta valve that controls the flow of air into the carburetor of a gasoline engine\nn03024518\ta coil of low resistance and high inductance used in electrical circuits to pass direct current and attenuate alternating current\nn03025070\tBritish slang (dated) for a prison\nn03025165\ta child's word for locomotive\nn03025250\ta woman's shoe with a very high thick sole\nn03025886\ta stringed instrument of the group including harps, lutes, lyres, and zithers\nn03026506\ta stocking that is filled with small Christmas presents\nn03026907\tan accurate timer for recording time\nn03027001\tan accurate clock (especially used in navigation)\nn03027108\tan instrument for accurate measurements of small intervals of time\nn03027250\ta holding device consisting of adjustable jaws that center a workpiece in a lathe or center a tool in a drill\nn03027505\ta wagon equipped with a cookstove and provisions (for cowboys)\nn03027625\ta shoe that comes up to the ankle and is laced through two or three pairs of eyelets; often made of suede\nn03028079\ta place for public (especially Christian) worship\nn03028596\ta bell in a church tower (usually sounded to summon people to church)\nn03028785\ta fanciful hat of the kind worn by Black women for Sunday worship\nn03029066\tcan opener that has a triangular pointed end that pierces the tops of cans\nn03029197\tthe tower of a church\nn03029296\ttight trousers worn by people from the Indian subcontinent (typically with a kameez or kurta)\nn03029445\ta vessel in which cream is agitated to separate butterfat from buttermilk\nn03029925\ta press that is used to extract the juice from apples\nn03030262\ta narrow paper band around a cigar\nn03030353\ta box for holding cigars\nn03030557\tan implement for cutting the tip off of a cigar\nn03030880\tsmall part of a cigarette that is left after smoking\nn03031012\ta small flat case for holding cigarettes; can be carried in a purse or a pocket\nn03031152\ta tube that holds a cigarette while it is being smoked\nn03031422\ta lighter for cigars or cigarettes\nn03031756\tstable gear consisting of a band around a horse's belly that holds the saddle in place\nn03032252\ta theater where films are shown\nn03032453\tan ornamental carving consisting of five arcs arranged in a circle\nn03032811\tany circular or rotating mechanism\nn03033267\tdecorated metal band worn around the head\nn03033362\tan electrical device that provides a path for electrical current to flow\nn03033986\ta printed circuit that can be inserted into expansion slots in a computer to increase the computer's capabilities\nn03034244\ta device that trips like a switch and opens the circuit when overloaded\nn03034405\telectronic equipment consisting of a system of circuits\nn03034516\ta plane with a flexible face that can plane concave or convex surfaces\nn03034663\ta power saw that has a steel disk with cutting teeth on the periphery; rotates on a spindle\nn03035252\ta canvas tent to house the audience at a circus performance\nn03035510\tan artificial reservoir for storing liquids; especially an underground tank for storing rainwater\nn03035715\ta tank that holds the water used to flush a toilet\nn03035832\ta 16th century musical instrument resembling a guitar with a pear-shaped soundbox and wire strings\nn03036022\ta building that houses administrative offices of a municipal government\nn03036149\tpainting depicting a city or urban area\nn03036244\tan urban university in a large city\nn03036341\tcivilian garb as opposed to a military uniform\nn03036469\tordinary clothing as distinguished from uniforms, work clothes, clerical garb, etc.\nn03036701\ta simple valve with a hinge on one side; allows fluid to flow in only one direction\nn03036866\ta device (generally used by carpenters) that holds things firmly together\nn03037108\ta dredging bucket with hinges like the shell of a clam\nn03037228\tmetal striker that hangs inside a bell and makes a sound by hitting the side\nn03037404\tphotographic equipment used to synchronize sound and motion picture; boards held in front of a movie camera are banged together\nn03037590\ta closed carriage with four wheels and seats for four passengers\nn03037709\ta single-reed instrument with a straight tube\nn03038041\ta form of voltaic cell once used as a standard for electromotive force\nn03038281\ta fastener (as a buckle or hook) that is used to hold two things together\nn03038480\ta large knife with one or more folding blades\nn03038685\ta room in a school where lessons take place\nn03038870\tan early stringed instrument like a piano but with more delicate sound\nn03039015\ta stringed instrument that has a keyboard\nn03039259\ttarget used in skeet or trapshooting\nn03039353\tan antipersonnel land mine whose blast is aimed at the oncoming enemy\nn03039493\ta large double-edged broadsword; formerly used by Scottish Highlanders\nn03039827\tshop where dry cleaning is done\nn03039947\tany of a large class of implements used for cleaning\nn03040229\ta pad used as a cleaning implement\nn03040376\ta room that is virtually free of dust or bacteria; used in laboratory work and in assembly or repair of precision equipment\nn03040836\ta road on which you are not allowed to stop (unless you have a breakdown)\nn03041114\ta fastener (usually with two projecting horns) around which a rope can be secured\nn03041265\ta metal or leather projection (as from the sole of a shoe); prevents slipping\nn03041449\tshoes with leather or metal projections on the soles\nn03041632\ta butcher's knife having a large square blade\nn03041810\tpart of an interior wall rising above the adjacent roof with windows admitting light\nn03042139\ta coupler shaped like the letter U with holes through each end so a bolt or pin can pass through the holes to complete the coupling; used to attach a drawbar to a plow or wagon or trailer etc.\nn03042384\tthe cords used to suspend a hammock\nn03042490\ta rock and adobe dwelling built on sheltered ledges in the sides of a cliff\nn03042697\ta framework of bars or logs for children to climb on\nn03042829\tthe flattened part of a nail or bolt or rivet\nn03042984\ta small slip noose made with seizing\nn03043173\ta tool used to clinch nails or bolts or rivets\nn03043274\ta healthcare facility for outpatient care\nn03043423\ta mercury thermometer designed to measure the temperature of the human body; graduated to cover a range a few degrees on either side of the normal body temperature\nn03043693\ta hard brick used as a paving stone\nn03043798\tan instrument used by surveyors in order to measure an angle of inclination or elevation\nn03043958\tany of various small fasteners used to hold loose articles together\nn03044671\ta short piece of wire with alligator clips on both ends\nn03044801\ta device (as an earring, sunglasses, microphone etc.) that is attached by clips\nn03044934\tscissors for cutting hair or finger nails (often used in the plural)\nn03045074\tshears for cutting grass or shrubbery (often used in the plural)\nn03045228\ta fast sailing ship used in former times\nn03045337\ta loose outer garment\nn03045698\tanything that covers or conceals\nn03045800\ta room where coats and other articles can be left temporarily\nn03046029\ta woman's close-fitting hat that resembles a helmet\nn03046133\ta low transparent cover put over young plants to protect them from cold\nn03046257\ta timepiece that shows the time of day\nn03046802\ta physical pendulum used to regulate a clockwork mechanism\nn03046921\ta radio that includes a clock that can be set to turn it on automatically\nn03047052\ta tower with a large clock visible high up on an outside face\nn03047171\tany mechanism of geared wheels that is driven by a coiled spring; resembles the works of a mechanical clock\nn03047690\tfootwear usually with wooden soles\nn03047799\tenamelware in which colored areas are separated by thin metal strips\nn03047941\ta courtyard with covered walks (as in religious institutions)\nn03048883\ta complete electrical circuit around which current flows or a signal circulates\nn03049066\ta television system that is not used for broadcasting but is connected by cables to designated monitors (as in a factory or theater)\nn03049326\ta control system with a feedback loop that is active\nn03049457\ta small private room for study or prayer\nn03049782\ta photographic lens with a short focal length used to take pictures at short ranges\nn03049924\ta flat woolen cap with a stiff peak\nn03050026\ta covering made of cloth\nn03050453\ta brush used for cleaning clothing\nn03050546\ta closet where clothes are stored\nn03050655\ta dryer that dries clothes wet from washing\nn03050864\ta hamper that holds dirty clothes to be washed or wet clothes to be dried\nn03051041\ta framework on which to hang clothes (as for drying)\nn03051249\twood or plastic fastener; for holding clothes on a clothesline\nn03051396\tan upright pole with pegs or hooks on which to hang clothing\nn03051540\ta covering designed to be worn on a person's body\nn03052464\ta store where men's clothes are sold\nn03052917\ta short nail with a flat head; used to attach sheet metal to wood\nn03053047\ta knot used to fasten a line temporarily to a post or spar\nn03053976\trailroad car having a bar and tables and lounge chairs\nn03054491\ta room used for the activities of a club\nn03054605\tbomb consisting of a canister that is dropped from a plane and that opens to release a cluster of bomblets (usually fragmentation bombs) over a wide area\nn03054901\ta coupling that connects or disconnects driving and driven parts of a driving mechanism\nn03055159\ta pedal or lever that engages or disengages a rotating shaft and a driving mechanism\nn03055418\ta woman's strapless purse that is carried in the hand\nn03055670\ta carriage pulled by four horses with one driver\nn03055857\ta small building for housing coaches and carriages and other vehicles\nn03056097\tfreight car with fixed sides and no roof; for transporting coal\nn03056215\ta chute for coal\nn03056288\ta shed for storing coal\nn03056493\ta hand shovel for shoveling coal\nn03056583\ta raised framework around a hatchway on a ship to keep water out\nn03056873\ta brake on a bicycle that engages with reverse pressure on the pedals\nn03057021\tan outer garment that has sleeves and covers the body from shoulder down; worn outdoors\nn03057541\ta button on a coat\nn03057636\ta closet for storing outerwear\nn03057724\ta dress that is tailored like a coat and buttons up the front\nn03057841\ta short close-fitting coat\nn03057920\ta hanger that is shaped like a person's shoulders and used to hang garments on\nn03058107\ta thin layer covering something\nn03058603\ta heavy fabric suitable for coats\nn03058949\ta layer of paint covering something else\nn03059103\ta rack with hooks for temporarily holding coats and hats\nn03059236\tthe loose back flap of a coat that hangs below the waist\nn03059366\ta transmission line for high-frequency signals\nn03059685\ta dense elaborate spider web that is more efficient than the orb web\nn03059934\ta fabric so delicate and transparent as to resemble a web of a spider\nn03060728\ta high-voltage machine in which rectifiers charge capacitors that discharge and drive charged particles through an accelerating tube\nn03061050\that with opposing brims turned up and caught together to form points\nn03061211\tanything used as a toy horse (such as a rocking horse or one knee of an adult)\nn03061345\ta small light flimsy boat\nn03061505\tcompartment where the pilot sits while flying the aircraft\nn03061674\tseat where the driver sits while driving a racing car\nn03061819\ta pit for cockfights\nn03061893\ta cap worn by court jesters; adorned with a strip of red\nn03062015\ta dress suitable for formal occasions\nn03062122\ta barroom in a hotel or restaurant where cocktails are served\nn03062245\ta shaker for mixing cocktails\nn03062336\ta small casserole in which individual portions can be cooked and served\nn03062651\t(15th-16th century) a flap for the crotch of men's tight-fitting breeches\nn03062798\toptical device used to follow the path of a celestial body and reflect its light into a telescope; has a movable and a fixed mirror\nn03062985\ta can for storing ground coffee\nn03063073\ta cup from which coffee is drunk\nn03063199\tfilter (usually of paper) that passes the coffee and retains the coffee grounds\nn03063338\ta kitchen appliance for brewing coffee automatically\nn03063485\ta mill that grinds roasted coffee beans\nn03063599\ta mug intended for serving coffee\nn03063689\ttall pot in which coffee is brewed\nn03063834\ta stand (usually movable) selling hot coffee and food (especially at night)\nn03063968\tlow table where magazines can be placed and coffee or cocktails are served\nn03064250\tan urn in which coffee is made and kept hot\nn03064350\ta chest especially for storing valuables\nn03064562\ta still consisting of an apparatus for the fractional distillation of ethanol from fermentation on an industrial scale\nn03064758\tbox in which a corpse is buried or cremated\nn03064935\ttooth on the rim of gear wheel\nn03065243\ta skullcap worn by nuns under a veil or by soldiers under a hood of mail or formerly by British sergeants-at-law\nn03065424\ta structure consisting of something wound in a continuous series of loops\nn03065708\treactor consisting of a spiral of insulated wire that introduces inductance into a circuit\nn03066232\ta transformer that supplies high voltage to spark plugs in a gasoline engine\nn03066359\ta spring in the shape of a coil\nn03066464\tthe part of a slot machine that serves as a receptacle for the coins\nn03066849\tbowl-shaped strainer; used to wash or drain foods\nn03067093\ta cathode that is a source of electrons without being heated\nn03067212\tnarrow chisel made of steel; used to cut stone or bricks\nn03067339\ta cream used cosmetically (mostly by women) for softening and cleaning the skin\nn03067518\tprotective covering consisting of a wooden frame with a glass top in which small plants are protected from the cold\nn03068181\ta band that fits around the neck and is usually folded over\nn03068998\tanything worn or placed about the neck\nn03069752\ta complex of buildings in which an institution of higher education is housed\nn03070059\ta cone-shaped chuck used for holding cylindrical pieces in a lathe\nn03070193\tan accelerator in which two beams of particles are forced to collide head on\nn03070396\ta workplace consisting of a coal mine plus all the buildings and equipment connected with it\nn03070587\toptical device consisting of a tube containing a convex achromatic lens at one end and a slit at the other with the slit at the focus of the lens; light rays leave the slit as a parallel beam\nn03070854\ta small telescope attached to a large telescope to use in setting the line of the larger one\nn03071021\ta perfumed liquid made of essential oils and alcohol\nn03071160\tstructure consisting of a row of evenly spaced columns\nn03071288\tan elongated fiberoptic endoscope for examining the entire colon from cecum to rectum\nn03071552\ta measuring instrument used in colorimetric analysis to determine the quantity of a substance from the color it yields with specific reagents\nn03072056\ta distinguishing emblem\nn03072201\ta television that transmits images in color\nn03072440\ta television tube that displays images in full color\nn03072682\ta wash of whitewash or other water-base paint tinted with a colored pigment\nn03073296\ta kind of revolver\nn03073384\ta sharp steel wedge that precedes the plow and cuts vertically through the soil\nn03073545\ta sepulchral vault or other structure having recesses in the walls to receive cinerary urns\nn03073694\ta niche for a funeral urn containing the ashes of the cremated dead\nn03073977\t(architecture) a tall vertical cylindrical structure standing upright and used to support a structure\nn03074380\ta vertical cylindrical structure standing alone and not supporting anything (such as a monument)\nn03074855\ta flat device with narrow pointed teeth on one edge; disentangles or arranges hair\nn03075097\tany of several tools for straightening fibers\nn03075248\ta machine that separates and straightens the fibers of cotton or wool\nn03075370\tlock that can be opened only by turning dials in a special sequence\nn03075500\ta woodworking plane that has interchangeable cutters of various shapes\nn03075634\tharvester that heads and threshes and cleans grain while moving across the field\nn03075768\tdevice used for an infant to suck or bite on\nn03075946\ta space module in which astronauts can live and control the spacecraft and communicate with earth\nn03076411\ta retail store that sells equipment and provisions (usually to military personnel)\nn03076623\ta snack bar in a film studio\nn03076708\tarticles of commerce\nn03077442\tan ax with a long handle and a head that has one cutting edge and one blunt side\nn03077616\ta sitting room (usually at school or university)\nn03077741\tan artificial satellite that relays signals back to earth; moves in a geostationary orbit\nn03078287\ta system for communicating\nn03078506\ta center where the members of a community can gather for social or cultural activities\nn03078670\tswitch for reversing the direction of an electric current\nn03078802\ta passenger train that is ridden primarily by passengers who travel regularly from one place to another\nn03078995\ta small cosmetics case with a mirror; to be carried in a woman's purse\nn03079136\ta small and economical car\nn03079230\ta digitally encoded recording on an optical disk that is smaller than a phonograph record; played back by a laser\nn03079494\trecording equipment for making compact disks\nn03079616\ta stairway or ladder that leads from one deck to another on a ship\nn03079741\ta partitioned section, chamber, or separate room within a larger enclosed area\nn03080309\ta space into which an area is subdivided\nn03080497\tnavigational instrument for finding directions\nn03080633\tdrafting instrument used for drawing circles\nn03080731\tcompass in the form of a card that rotates so that 0 degrees or North points to magnetic north\nn03080904\ta handsaw with a narrow triangular blade for cutting curves\nn03081859\tan enclosure of residences and other building (especially in the Orient)\nn03081986\ta lens system consisting of two or more lenses on the same axis\nn03082127\ta pair of levers hinged at the fulcrum\nn03082280\tlight microscope that has two converging lens systems: the objective and the eyepiece\nn03082450\ta cloth pad or dressing (with or without medication) applied firmly to some part of the body (to relieve discomfort or reduce fever)\nn03082656\tbandage that stops the flow of blood from an artery by applying pressure\nn03082807\ta mechanical device that compresses gasses\nn03082979\ta machine for performing calculations automatically\nn03084420\ta circuit that is part of a computer\nn03084834\ta tomograph that constructs a 3-D model of an object by combining parallel planes\nn03085013\ta keyboard that is a data input device for computers; arrangement of keys is modelled after the typewriter keyboard\nn03085219\ta device that displays signals on a computer screen\nn03085333\t(computer science) a network of computers\nn03085602\ta screen used to display the output of a computer to the user\nn03085781\ta store that sells computers to the small businessperson or personal user\nn03085915\ta system of one or more computers and associated software with common storage\nn03086183\ta penal camp where political prisoners or prisoners of war are confined (usually under harsh conditions)\nn03086457\ta grand piano suitable for concert performances\nn03086580\ta hall where concerts are given\nn03086670\tfree-reed instrument played like an accordion by pushing its ends together to force air through the reeds\nn03086868\tcoiled barbed wire used as an obstacle\nn03087069\ta machine with a large revolving drum in which cement is mixed with other materials to make concrete\nn03087245\tvacuum pump used to obtain a high vacuum\nn03087366\tlens used to concentrate light on an object\nn03087521\ta hollow coil that condenses by abstracting heat\nn03087643\tan apparatus that converts vapor into liquid\nn03087816\tmicrophone consisting of a capacitor with one plate fixed and the other forming the diaphragm moved by sound waves\nn03088389\thousing consisting of a complex of dwelling units (as an apartment house) in which each unit is individually owned\nn03088580\tone of the dwelling units in a condominium\nn03088707\ta device designed to transmit electricity, heat, etc.\nn03089477\ta friction clutch in which the frictional surfaces are cone-shaped\nn03089624\ta confectioner's shop\nn03089753\ta center where conferences can be conducted\nn03089879\ta room in which a conference can be held\nn03090000\tthe table that conferees sit around as they hold a meeting\nn03090172\ta booth where a priest sits to hear confessions\nn03090437\ta map projection in which a small area is rendered in its true shape\nn03090710\tan ankle high shoe with elastic gussets in the sides\nn03090856\ta map projection of the globe onto a cone with its point over one of the earth's poles\nn03091044\ta rod that transmits motion (especially one that connects a rotating wheel to a reciprocating shaft)\nn03091223\ta hotel room that shares a wall with an adjoining room and is connected by a private door\nn03091374\tan instrumentality that connects\nn03091907\ta raised bridge on a submarine; often used for entering and exiting\nn03092053\tan armored pilothouse on a warship\nn03092166\ta greenhouse in which plants are arranged in a pleasing manner\nn03092314\ta schoolhouse with special facilities for fine arts\nn03092476\tan ornamental scroll-shaped bracket (especially one used to support a wall fixture)\nn03092656\ta scientific instrument consisting of displays and an input device that an operator can use to monitor and control a system (especially a computer system)\nn03092883\ta small table fixed to a wall or designed to stand against a wall\nn03093427\tdiplomatic building that serves as the residence or workplace of a consul\nn03093792\t(electronics) a junction where things (as two electrical conductors) touch or are in physical contact\nn03094159\ta thin curved glass or plastic lens designed to fit over the cornea in order to correct vision or to deliver medication\nn03094503\tany object that can be used to hold things (especially a large metal boxlike object of standardized dimensions that can be loaded from one form of transport to another)\nn03095699\ta cargo ship designed to hold containerized cargoes\nn03095965\t(physics) a system designed to prevent the accidental release of radioactive material from a reactor\nn03096439\tthe bassoon that is the largest instrument in the oboe family\nn03096960\ta mechanism that controls the operation of a machine\nn03097362\tthe operational center for a group of related activities\nn03097535\ta feedback circuit that subtracts from the input\nn03097673\t(computer science) the key on a computer keyboard that is used (in combination with some other key) to type control characters\nn03098140\telectrical device consisting of a flat insulated surface that contains switches and dials and meters for controlling other electrical devices\nn03098515\ta steel or aluminum rod that can be moved up or down to control the rate of the nuclear reaction\nn03098688\ta room housing control equipment (as in a recording studio)\nn03098806\ta system for controlling the operation of another system\nn03098959\ta tower with an elevated workspace enclosed in glass for the visual observation of aircraft around an airport\nn03099147\ta space heater that transfers heat to the surrounding air by convection\nn03099274\ta store selling a limited variety of food and pharmaceutical items; open long hours for the convenience of customers\nn03099454\ta religious residence especially for nuns\nn03099622\ta building for religious assembly (especially Nonconformists, e.g., Quakers)\nn03099771\tlens such that a beam of light passing through it is brought to a point or focus\nn03099945\ta device for changing one substance or form or state into another\nn03100240\ta car that has top that can be folded or removed\nn03100346\ta sofa that can be converted into a bed\nn03100490\tsomething that serves as a means of transportation\nn03100897\ta moving belt that transports objects (as in a factory)\nn03101156\ta utensil for cooking\nn03101302\ta fire for cooking\nn03101375\ta detached or outdoor shelter for cooking\nn03101517\ta kitchen utensil used to cut a sheet of cookie dough into desired shapes before baking\nn03101664\ta jar in which cookies are kept (and sometimes money is hidden)\nn03101796\ta cooking utensil consisting of a flat rectangular metal sheet used for baking cookies or biscuits\nn03101986\ta kitchen utensil made of material that does not melt easily; used for cooking\nn03102371\ta stove for cooking (especially a wood- or coal-burning kitchen stove)\nn03102516\ta cooling system that uses a fluid to transfer heat from one place to another\nn03102654\ta refrigerator for cooling liquids\nn03102859\ta mechanism for keeping something cool\nn03103128\tequipment in a motor vehicle that cools the engine\nn03103396\ta cooling system used in industry to cool hot water (by partial evaporation) before reusing it as a coolant\nn03103563\ta raccoon cap with the tail hanging down the back\nn03103904\ta long cloak; worn by a priest or bishop on ceremonial occasions\nn03104019\ta handsaw with a taut thin blade; used for cutting small curves in wood\nn03104512\tutensils made with copper\nn03105088\tmechanical device used in printing; holds the copy for the compositor\nn03105214\ta dish in the form of a scallop shell\nn03105306\ta small rounded boat made of hides stretched over a wicker frame; still used in some parts of Great Britain\nn03105467\t(architecture) a triangular bracket of brick or stone (usually of slight extent)\nn03105645\t(architecture) an arch constructed of masonry courses that are corbelled until they meet\nn03105810\t(architecture) a step on the top of a gable wall\nn03105974\t(architecture) a gable having corbie-steps or corbel steps\nn03106722\ta cut pile fabric with vertical ribs; usually made of cotton\nn03106898\ta light insulated conductor for household use\nn03107046\tthe ropes in the rigging of a ship\nn03107488\tcotton trousers made of corduroy cloth\nn03107716\ta bar of magnetic material (as soft iron) that passes through a coil and serves to increase the inductance of the coil\nn03108455\ta hollow drilling bit that is the cutting part of a core drill; allows core samples to be taken\nn03108624\ta drill that removes a cylindrical core from the drill hole\nn03108759\ta device for removing the core from apples\nn03108853\tthe plug in the mouth of a bottle (especially a wine bottle)\nn03109033\ta machine that is used to put corks in bottles\nn03109150\ta bottle opener that pulls corks\nn03109253\ta crib for storing and drying ears of corn\nn03109693\t(architecture) solid exterior angle of a building; especially one formed by a cornerstone\nn03109881\tan interior angle formed by two meeting walls\nn03110202\ta square post supporting a structural member at the corner of a building\nn03110669\ta brass musical instrument with a brilliant tone; has a narrow tube and a flared bell and is played by means of valves\nn03111041\tthe topmost projecting part of an entablature\nn03111177\ta molding at the corner between the ceiling and the top of a wall\nn03111296\ta decorative framework to conceal curtain fixtures at the top of a window casing\nn03111690\ta penal institution maintained by the government\nn03112240\ta small strip of corrugated steel with sharp points on one side; hammered across wood joints in rough carpentry\nn03112719\ta piece of body armor for the trunk; usually consists of a breastplate and back piece\nn03112869\ta woman's close-fitting foundation garment\nn03113152\ta toiletry designed to beautify the body\nn03113505\ta large proton synchrotron; uses frequency modulation of an electric field to accelerate protons\nn03113657\tthe attire worn in a play or at a fancy dress ball\nn03113835\tthe attire characteristic of a country or a time or a social class\nn03114041\tunusual or period attire not characteristic of or appropriate to the time and place\nn03114236\tthe prevalent fashion of dress (including accessories and hair style as well as garments)\nn03114379\ta padded cloth covering to keep a teapot warm\nn03114504\ta small bed that folds up for storage or transport\nn03114743\ta tent providing shelter for a family\nn03114839\tfastener consisting of a wedge or pin inserted through a slot to hold two other pieces together\nn03115014\ta cotter consisting of a split pin that is secured (after passing through a hole) by splitting the ends apart\nn03115180\tfabric woven from cotton fibers\nn03115400\ta stout cotton fabric with nap on only one side\nn03115663\ta textile mill for making cotton textiles\nn03115762\ta narrow bed on which a patient lies during psychiatric or psychoanalytic treatment\nn03115897\ta flat coat of paint or varnish used by artists as a primer\nn03116008\ta compartment on a European passenger train; contains 4 to 6 berths for sleeping\nn03116163\ta reflecting telescope so constructed that the light is led to a plate holder or spectrograph\nn03116530\ttable consisting of a horizontal surface over which business is transacted\nn03116767\ta calculator that keeps a record of the number of times something happens\nn03117199\tgame equipment (as a piece of wood, plastic, or ivory) used for keeping a count or reserving a space in various card or board games\nn03117642\ta bit for enlarging the upper part of a hole\nn03118346\ta measuring instrument for counting individual ionizing events\nn03118969\ta house (usually large and impressive) on an estate in the country\nn03119203\ta retail store serving a sparsely populated region; usually stocked with a wide variety of merchandise\nn03119396\ta car with two doors and front seats and a luggage compartment\nn03119510\ta mechanical device that serves to connect the ends of adjacent objects\nn03120198\tan area wholly or partly surrounded by walls or buildings\nn03120491\ta specially marked horizontal area within which a game is played\nn03120778\ta room in which a lawcourt sits\nn03121040\tthe residence of a sovereign or nobleman\nn03121190\tan acrylic fabric resembling wool\nn03121298\ta building that houses judicial courts\nn03121431\ta government building that houses the offices of a county government\nn03121897\ta loose-fitting protective garment that is worn over other clothing\nn03122073\ta bridge whose passageway is protected by a roof and enclosing sides\nn03122202\ta litter with a cover for privacy\nn03122295\ta large wagon with broad wheels and an arched canvas top; used by the United States pioneers to cross the prairies in the 19th century\nn03122748\tan artifact that covers something else (usually to protect or shelter or conceal it)\nn03123553\ta decorative bedspread (usually quilted)\nn03123666\tcovering consisting of a plate used to cover over or close in a chamber or receptacle\nn03123809\ta barn for cows\nn03123917\ta bell hung around the neck of cow so that the cow can be easily located\nn03124043\ta boot with a high arch and fancy stitching; worn by American cowboys\nn03124170\ta hat with a wide brim and a soft crown; worn by American ranch hands\nn03124313\ta heavy flexible whip braided from leather made from the hide of a cow\nn03124474\ta loose hood or hooded robe (as worn by a monk)\nn03124590\ta pen for cattle\nn03125057\tthe main circuit board for a computer\nn03125588\tglazed china with a network of fine cracks on the surface\nn03125729\ta baby bed with sides and rockers\nn03125870\ta vehicle designed for navigation in or on water or air or through outer space\nn03126090\ta strip of metal with ends bent at right angles; used to hold masonry together\nn03126385\tan iron spike attached to the shoe to prevent slipping on ice when walking or climbing\nn03126580\ta hinged pair of curved iron bars; used to raise heavy objects\nn03126707\tlifts and moves heavy objects; lifting tackle is suspended from a pivoted boom that rotates around a vertical axis\nn03126927\tan instrument for measuring skull sizes\nn03127024\ta hand tool consisting of a rotating shaft with parallel handle\nn03127203\thousing for a crankshaft\nn03127408\ta rotating shaft driven by (or driving) a crank\nn03127531\ta strong protective barrier that is erected around a racetrack or in the middle of a dual-lane highway in order to reduce the likelihood of severe accidents\nn03127747\ta padded helmet worn by people riding bicycles or motorcycles; protects the head in case of accidents\nn03127925\ta rugged box (usually made of wood); used for shipping\nn03128085\tneckwear worn in a slipknot with long ends overlapping vertically in front\nn03128248\twriting implement consisting of a colored stick of composition wax used for writing and drawing\nn03128427\ta patchwork quilt without a design\nn03128519\ttoiletry consisting of any of various substances in the form of a thick liquid that have a soothing and moisturizing effect when applied to the skin\nn03129001\ta small pitcher for serving cream\nn03129471\ta hospital where foundlings (infant children of unknown parents) are taken in and cared for\nn03129636\ta representation of Christ's nativity in the stable at Bethlehem\nn03129753\ta kind of sideboard or buffet\nn03129848\ta wicker basket used by anglers to hold fish\nn03130066\ta furnace where a corpse can be burned and reduced to ashes\nn03130233\ta mortuary where corpses are cremated\nn03130563\ta soft thin light fabric with a crinkled surface\nn03130761\ta very thin crepe of silk or silklike fabric\nn03130866\tan adjustable wrench designed to fit hexagonal nuts with the adjusting screw built into the head of the wrench\nn03131193\tan unglazed heavy fabric; brightly printed; used for slipcovers and draperies\nn03131574\tbaby bed with high sides made of slats\nn03131669\ta bin or granary for storing grains\nn03131967\tthe ball used in playing cricket\nn03132076\tthe club used in playing cricket\nn03132261\tsports equipment used in playing cricket\nn03132438\tfastener consisting of a metal ring for lining a small hole to permit the attachment of cords or lines\nn03132666\ta stiff coarse fabric used to stiffen hats or clothing\nn03132776\ta full stiff petticoat made of crinoline fabric\nn03133050\ta needle with a hook on the end; used in crocheting\nn03133415\tan earthen jar (made of baked clay)\nn03133878\tan electric cooker that maintains a relatively low temperature\nn03134118\ta long staff with one end being hook shaped\nn03134232\telectromagnetic radiometer consisting of a small paddlewheel that rotates when placed in daylight\nn03134394\tthe original gas-discharge cathode-ray tube\nn03134739\ta wooden ball used in playing croquet\nn03134853\tsports equipment used in playing croquet\nn03135030\ta mallet used to strike the ball in croquet\nn03135532\ta wooden structure consisting of an upright post with a transverse piece\nn03135656\tlong thin horizontal crosspiece between two vertical posts\nn03135788\tgame equipment consisting of a horizontal bar to be jumped or vaulted over\nn03135917\ta horizontal bar that goes across something\nn03136051\tany of the seats in the House of Commons used by members who do not vote regularly with either the government or the Opposition\nn03136254\ta rock drill having cruciform cutting edges; used in mining\nn03136369\ta bow fixed transversely on a wooden stock grooved to direct the arrow (quarrel)\nn03136504\thandsaw that cuts at right angles to the grain (or major axis)\nn03137473\tthe lowermost sail on a mizzenmast\nn03137579\ta transverse brace\nn03138128\ta small tool or hooklike implement\nn03138217\ta small rake used by a croupier to move chips around on the table\nn03138344\ta heavy iron lever with one end forged into a wedge\nn03138669\tan ornamental jeweled headdress signifying sovereignty\nn03139089\t(dentistry) dental appliance consisting of an artificial crown for a broken or decayed tooth\nn03139464\tregalia (jewelry and other paraphernalia) worn by a sovereign on state occasions\nn03139640\ta lens made of optical crown glass\nn03139998\tplatform for a lookout at or near the top of a mast\nn03140126\ta vessel made of material that does not melt easily; used for high temperature chemical reactions\nn03140292\trepresentation of the cross on which Jesus died\nn03140431\tbottle that holds wine or oil or vinegar for the table\nn03140546\ta stand for cruets containing various condiments\nn03140652\tcontrol mechanism for keeping an automobile at a set speed\nn03140771\tan unmanned aircraft that is a self-contained bomb\nn03140900\ta large fast warship; smaller than a battleship and larger than a destroyer\nn03141065\ta car in which policemen cruise the streets; equipped with radiotelephonic communications to headquarters\nn03141327\ta passenger ship used commercially for pleasure cruises\nn03141455\ta strap from the back of a saddle passing under the horse's tail; prevents saddle from slipping forward\nn03141612\tsmall jar; holds liquid (oil or water)\nn03141702\ta device that crushes something\nn03141823\ta wooden or metal staff that fits under the armpit and reaches to the ground; used by disabled person while walking\nn03142099\ta thermometer designed to measure low temperatures\nn03142205\ta measuring instrument for measuring freezing and melting points\nn03142325\ta thermostat that operates at very low temperatures\nn03142431\ta cellar or vault or underground burial chamber (especially beneath a church)\nn03142679\ta protective cover that protects the face of a watch\nn03143400\ta detector consisting of a fine wire in contact with a galena crystal; acts as a rectifier\nn03143572\ta microphone in which sound waves vibrate a piezoelectric crystal that generates a varying voltage\nn03143754\tan oscillator that produces electrical oscillations at a frequency determined by the physical characteristics of a piezoelectric quartz crystal\nn03144156\tan early radio receiver using a crystal detector\nn03144873\tbody armor that protects the elbow\nn03144982\tan instrument of punishment consisting of a chair in which offenders were ducked in water\nn03145147\tclock that announces the hours with a sound like the call of the cuckoo\nn03145277\tthe galley or pantry of a small ship\nn03145384\ta club that is used as a weapon\nn03145522\tsports implement consisting of a tapering rod used to strike a cue ball in pool or billiards\nn03145719\tthe ball that the billiard player or pool player strikes with his cue\nn03145843\tthe lap consisting of a turned-back hem encircling the end of the sleeve or leg\nn03146219\tmedieval body armor that covers the chest and back\nn03146342\tarmor plate that protects the thigh\nn03146449\ta passage with access only at one end\nn03146560\ta specialized endoscope for visually examining a woman's pelvic organs\nn03146687\ta gutter in a roof\nn03146777\ta divided skirt\nn03146846\ta farm implement used to break up the surface of the soil (for aeration and weed control and conservation of moisture)\nn03147084\ta medieval musket\nn03147156\ta heavy cannon with a long barrel used in the 16th and 17th centuries\nn03147280\ta transverse and totally enclosed drain under a road or railway\nn03147509\ta small open container usually used for drinking; usually has a handle\nn03148324\ta small room (or recess) or cabinet used for storage space\nn03148518\thook (usually on the underside of a shelf) for hanging cups\nn03148727\ta roof in the form of a dome\nn03148808\ta vertical cylindrical furnace for melting iron for casting\nn03149135\ta horse's bit with an attached chain or strap to check the horse\nn03149401\ta roof with two or more slopes on each side of the ridge\nn03149686\ta paving stone forming part of a curb\nn03149810\ta surgical instrument shaped like a scoop to remove tissue from a bodily cavity\nn03150232\ta mechanical device consisting of a cylindrical tube around which the hair is wound to curl it\nn03150511\ta cylindrical metal home appliance that heats a lock of hair that has been curled around it\nn03150661\ta square comb with rows of small teeth; used to curry horses\nn03150795\t(computer science) indicator consisting of a movable spot of light (an icon) on a visual display; moving it allows the user to point to commands or screen positions\nn03151077\thanging cloth used as a blind (especially for a window)\nn03152303\ta government building where customs are collected and where ships are cleared to enter or leave the country\nn03152951\ta representation (drawing or model) of something in which the outside is omitted to reveal the inner parts\nn03153246\ta short heavy curved sword with one edge; formerly used by sailors\nn03153585\ta device that terminates the flow in a pipe\nn03153948\ta switch that interrupts an electric circuit in the event of an overload\nn03154073\ta cutting implement; a tool for cutting\nn03154316\ta sailing vessel with a single mast set further back than the mast of a sloop\nn03154446\ta tool used for cutting or slicing\nn03154616\ta room where films or tapes are edited (by cutting out unwanted parts)\nn03154745\ta low stool; formerly in Scotland, a seat in a church where an offender was publicly rebuked\nn03154895\tembroidery in which the design is outlined in a buttonhole stitch and the intervening material is cut away\nn03155178\ta cafe whose customers sit at computer terminals and log on to the internet while they eat and drink\nn03155502\ta primitive style of masonry characterized by use of massive stones of irregular shape and size\nn03155915\ta writing implement with a small toothed wheel that cuts small holes in a stencil\nn03156071\tan accelerator that imparts energies of several million electron-volts to rapidly moving particles\nn03156279\ta cylindrical container for oxygen or compressed air\nn03156405\ta chamber within which piston moves\nn03156767\ta lock in which a cylinder rotates to move a bolt; tumblers are pins; inserting the key lifts and aligns the pins to free the cylinder to rotate\nn03157348\ta percussion instrument consisting of a concave brass disk; makes a loud crashing sound when hit with a drumstick or when two are struck together\nn03158186\tRussian country house\nn03158414\ta kind of polyester fabric\nn03158668\tthe section of a pedestal between the base and the surbase\nn03158796\ta plane for making a dado groove\nn03158885\ta short knife with a pointed blade used for piercing or stabbing\nn03159535\ta farm where dairy products are produced\nn03159640\ta platform raised above the surrounding level to give prominence to the person on it\nn03160001\ta wheel around which is a set of print characters that make a typing impression on paper\nn03160186\ta printer that uses a daisy print wheel\nn03160309\ta barrier constructed to contain the flow of water or to keep out the sea\nn03160740\ta fabric of linen or cotton or silk or wool with a reversible pattern woven into it\nn03161016\ta device that dampens or moistens something\nn03161450\ta device that decreases the amplitude of electronic, mechanical, acoustical, or aerodynamic oscillations\nn03161893\tdamper consisting of a small felted block that drops onto a piano string to stop its vibration\nn03162297\ta lantern with a single opening and a sliding panel that can be closed to conceal the light\nn03162460\ta room in which photographs are developed\nn03162556\ta long needle with an eye large enough for heavy darning or embroidery thread\nn03162714\ta tapered tuck made in dressmaking\nn03162818\ta small narrow pointed missile that is thrown or shot\nn03163222\tinstrument panel on an automobile or airplane containing dials and controls\nn03163381\ta loose and brightly colored African shirt\nn03163488\ta mechanical damper; the vibrating part is attached to a piston that moves in a chamber filled with liquid\nn03163798\tconverter for changing information from one code to another\nn03163973\ta device that can be used to insert data into a computer or other computational device\nn03164192\ta multiplexer that permits two or more data sources to share a common transmission medium\nn03164344\tsystem consisting of the network of all communication channels used within an organization\nn03164605\ta large sofa usually convertible into a bed\nn03164722\ta small decorative writing desk\nn03164929\ta crane-like device (usually one of a pair) for suspending or lowering equipment (as a lifeboat)\nn03165096\tan armless couch; a seat by day and a bed by night\nn03165211\tan accounting journal as a physical object\nn03165466\ta nursery for the supervision of preschool children while the parents work\nn03165616\ta school building without boarding facilities\nn03165823\tan axle that carries a wheel but without power to drive it\nn03165955\t(nautical) a round hardwood disk with holes and a grooved perimeter used to tighten a shroud\nn03166120\ta train or bus or taxi traveling empty\nn03166514\tthe official residence of a dean\nn03166600\tthe bed on which a person dies\nn03166685\ta concentration camp where prisoners are likely to die or be killed\nn03166809\tthe cellblock in a prison where those condemned to death await execution\nn03166951\ta bell rung to announce a death\nn03167153\tthe car seat beside the driver of an automobile; believed to be the most dangerous place to sit in a car in case of an accident\nn03167978\tstreet name for a packet of illegal drugs\nn03168107\ta porch that resembles the deck on a ship\nn03168217\ta folding chair for use outdoors; a wooden frame supports a length of canvas\nn03168543\ta superstructure on the upper deck of a ship\nn03168663\t(paper making) a frame used to form paper pulp into sheets\nn03168774\trough edge left by a deckle on handmade paper or produced artificially on machine-made paper\nn03168933\tan instrument for measuring magnetic declination\nn03169063\ta machine that converts a coded text into ordinary language\nn03169176\ta low-cut neckline on a woman's dress\nn03170292\tart produced by decorating a surface with cutouts and then coating it with several layers of varnish or lacquer\nn03170459\t(computer science) a file server that can be used only as a file server\nn03170635\telectric refrigerator (trade name Deepfreeze) in which food is frozen and stored for long periods of time\nn03170872\ta tight-fitting hat with visors front and back; formerly worn by hunters\nn03171228\tthe weaponry available for the defense of a region\nn03171356\ta structure used to defend against attack\nn03171635\tan electronic device that administers an electric shock of preset voltage to the heart through the chest wall in an attempt to restore the normal rhythm of the heart during ventricular fibrillation\nn03171910\tthe arrangement of defensive fortifications to protect against enemy fire\nn03172038\ta device intended to turn aside the flow of something (water or air or smoke etc)\nn03172738\ta mechanism that automatically delays the release of a camera shutter for a fixed period of time so that the photographer can appear in the picture\nn03172965\ta circuit designed to introduce a calculated delay into the transmission of a signal\nn03173270\ta style of glazed earthenware; usually white with blue decoration\nn03173387\ta shop selling ready-to-eat food products\nn03173929\ta van suitable for delivering goods or services to customers\nn03174079\tan airplane with wings that give it the appearance of an isosceles triangle\nn03174450\tlarge bottle with a short narrow neck; often has small handles at neck and is enclosed in wickerwork\nn03174731\tsmall coffee cup; for serving black coffee\nn03175081\ta room that is comfortable and secluded\nn03175189\ta coarse durable twill-weave cotton fabric\nn03175301\ta measuring instrument for determining density or specific gravity\nn03175457\ta measuring instrument for determining optical or photographic density\nn03175604\ta device to repair teeth or replace missing teeth\nn03175843\ta soft thread for cleaning the spaces between the teeth\nn03175983\tan implant that replaces a natural tooth\nn03176238\ta high speed drill that dentists use to cut into teeth\nn03176386\ta dental appliance that artificially replaces missing teeth\nn03176594\ta toiletry applied to the skin in order to mask unpleasant odors\nn03176763\ta large retail store organized into departments offering a variety of merchandise; commonly part of a retail chain\nn03177059\tlounge where passengers can await departure\nn03177165\ta cosmetic for temporary removal of undesired hair\nn03177708\ta device used by physician to press a part down or aside\nn03178000\tnavigational instrument used to measure the depth of a body of water (as by ultrasound or radar)\nn03178173\ta gauge for measuring the depth of grooves or holes or other concavities\nn03178430\ta simple crane having lifting tackle slung from a boom\nn03178538\ta framework erected over an oil well to allow drill tubes to be raised and lowered\nn03178674\ta pocket pistol of large caliber with a short barrel\nn03179701\ta piece of furniture with a writing surface and usually drawers or other compartments\nn03179910\ta telephone set that sits on a desk or table\nn03180011\ta personal computer small enough to fit conveniently in an individual workspace\nn03180384\ta spoon larger than a teaspoon and smaller than a tablespoon\nn03180504\ta small fast lightly armored but heavily armed warship\nn03180732\twarship smaller than a destroyer; designed to escort fleets or convoys\nn03180865\ta house that stands alone\nn03180969\tany device that receives a signal or stimulus (as heat or pressure or light or motion etc.) and responds to it in a distinctive manner\nn03181293\telectronic equipment that detects the presence of radio signals or radioactivity\nn03181667\tan institution where juvenile offenders can be held temporarily (usually under the supervision of a juvenile court)\nn03182140\ta fuse containing an explosive\nn03182232\ta mechanical or electrical explosive device or a small amount of explosive; can be used to initiate the reaction of a disrupting explosive\nn03182912\tphotographic equipment consisting of a chemical solution for developing film\nn03183080\tan instrumentality invented for a particular purpose\nn03185868\tvacuum flask that holds liquid air or helium for scientific experiments\nn03186199\ta long loincloth worn by Hindu men\nn03186285\ta lateen-rigged sailing vessel used by Arabs\nn03186818\ta disc on a telephone that is rotated a fixed distance for each number called\nn03187037\tthe circular graduated indicator on various measuring instruments\nn03187153\tthe control on a radio or television set that is used for tuning\nn03187268\t(computer science) a small temporary window in a graphical user interface that appears in order to request information from the user; after the information has been provided the user dismisses the box with `okay' or `cancel'\nn03187595\ta telephone with a dial for registering the number to be called\nn03187751\ta medical instrument for separating substances in solution by unequal diffusion through semipermeable membranes\nn03188290\tfabric covered with glittering ornaments such as sequins or rhinestones\nn03188531\tgarment consisting of a folded cloth drawn up between the legs and fastened at the waist; worn by infants to catch excrement\nn03188725\ta fabric (usually cotton or linen) with a distinctive woven pattern of small repeated figures\nn03188871\ta foghorn that makes a signal consisting of two tones\nn03189083\ta mechanical device in a camera that controls size of aperture of the lens\nn03189311\telectro-acoustic transducer that vibrates to receive or produce sound waves\nn03189818\ta medical instrument for local heating of bodily tissues for medical purposes\nn03190458\ta wooden hand tool with a pointed end; used to make holes in the ground for planting seeds or bulbs\nn03191286\ta small container (open at one end) in which dice are shaken by hand and from which they are thrown\nn03191451\ta mechanical device used for dicing food\nn03191561\ta man's detachable insert (usually starched) to simulate the front of a shirt\nn03191776\ta small third seat in the back of an old-fashioned two-seater\nn03192543\ta tape recorder that records and reproduces dictation\nn03192907\ta cutting tool that is fitted into a diestock and used for cutting male (external) screw threads on screws or bolts or pipes or rods\nn03193107\tan internal-combustion engine that burns heavy oil\nn03193260\ta locomotive driven by the electric current generated by a diesel engine\nn03193423\ta locomotive driven by a hydraulic transmission system powered by a diesel engine\nn03193597\ta locomotive driven by a diesel engine\nn03193754\ta device that holds the dies that cut external threads on metal cylinders\nn03194170\tan analog computer designed to solve differential equations\nn03194297\ta bevel gear that permits rotation of two shafts at different speeds; used on the rear axle of automobiles to allow wheels to rotate at different speeds on curves\nn03194812\toptical device that distributes the light of a lamp evenly\nn03194992\tbaffle that distributes sound waves evenly\nn03195332\tautoclave consisting of a vessel in which plant or animal materials are digested\nn03195485\ttemporary living quarters\nn03195799\tdevice for converting digital signals into analogue signals\nn03195959\ta digital tape recording of sound\nn03196062\ta camera that encodes an image digitally and store it for later reproduction\nn03196217\ta clock that displays the time of day digitally\nn03196324\ta computer that represents information by numerical (binary) digits\nn03196598\ta display that gives the information in the form of characters (numbers or letters)\nn03196990\ta generic name for digital lines that are provided by telephone companies to their local subscribers and that carry data at high speeds\nn03197201\tan electronic voltmeter that gives readings in digits\nn03197337\ta watch with a digital display\nn03197446\tdevice for converting analogue signals into digital signals\nn03198223\ta surgical instrument that is used to dilate or distend an opening or an organ\nn03198500\ta vibrating device that substitutes for an erect penis to provide vaginal stimulation\nn03199358\ta strong cotton fabric with a raised pattern; used for bedcovers and curtains\nn03199488\ta rheostat that varies the current through an electric light in order to control the level of illumination\nn03199647\ta restaurant that resembles a dining car\nn03199775\ta small area off of a kitchen that is used for dining\nn03199901\ta small boat of shallow draft with cross thwarts for seats and rowlocks for oars with which it is propelled\nn03200231\tan area arranged for dining\nn03200357\ta passenger car where food is served in transit\nn03200539\ta large room at a college or university; used especially for dining\nn03200701\ta room used for dining\nn03200906\tfurniture intended for use in a dining room\nn03201035\tdining-room furniture consisting of a table on which meals can be served\nn03201208\ta table at which meals are served\nn03201529\ta bell rung to announce that dinner has been served\nn03201638\ta gown for evening wear\nn03201776\tsemiformal evening dress for men\nn03201895\ta large napkin used when dinner is served\nn03201996\ta pail in which a workman carries his lunch or dinner\nn03202354\tthe dining table where dinner is served and eaten\nn03202481\ta theater at which dinner is included in the price of admission\nn03202760\ta semiconductor that consists of a p-n junction\nn03202940\ta thermionic tube having two electrodes; used as a rectifier\nn03203089\ta candle that is made by repeated dipping in a pool of wax or tallow\nn03203806\tgovernment building in which diplomats live or work\nn03204134\tan aerial half a wavelength long consisting of two rods connected to a transmission line at the center\nn03204306\ta ladle that has a cup with a long handle\nn03204436\ta graduated rod dipped into a container to indicate the fluid level\nn03204558\t(computer science) one of a set of small on-off switches mounted in computer hardware; used in place of jumpers to configure the machine for a user\nn03204955\tan antenna that transmits or receives signals only in a narrow angle\nn03205143\ta microphone that is designed to receive sound from a particular direction\nn03205304\tradio; determines the direction of incoming radio waves\nn03205458\ta relatively long dagger with a straight blade\nn03205574\ta dress with a tight bodice and full skirt\nn03205669\ta full skirt with a gathered waistband\nn03205903\tan atom bomb that leaves considerable radioactive contamination\nn03206023\ta lamp that generates light by a discharge between two electrodes in a gas\nn03206158\ta pipe through which fluids can be discharged\nn03206282\ta public dance hall for dancing to recorded popular music\nn03206405\ta sales outlet offering goods at a discounted price\nn03206602\ta disk used in throwing competitions\nn03206718\tany attire that modifies the appearance in order to conceal the wearer's identity\nn03206908\ta piece of dishware normally used as a container for holding or serving food\nn03207305\tdirectional antenna consisting of a parabolic reflector for microwave or radio frequency radiation\nn03207548\tlarge pan for washing dishes\nn03207630\ta rack for holding dishes as dishwater drains off of them\nn03207743\ta cloth for washing dishes\nn03207835\ta towel for drying dishes\nn03207941\ta machine for washing dishes\nn03208556\ta flat circular plate\nn03208938\thydraulic brake in which friction is applied to both sides of a spinning disk by the brake pads\nn03209359\ta friction clutch in which the frictional surfaces are disks\nn03209477\t(computer science) a circuit or chip that translates commands into a form that can control a hard disk drive\nn03209666\tcomputer hardware that holds and spins a magnetic or optical disk and reads and writes information on it\nn03209910\ta small plastic magnetic disk enclosed in a stiff envelope with a radial slit; used to store data or programs for a microcomputer\nn03210245\ta harrow with a series of disks set on edge at an angle\nn03210372\tcase consisting of an oblong container (usually having a lock) for carrying dispatches or other valuables\nn03210552\tclinic where medicine and medical supplies are dispensed\nn03210683\ta container so designed that the contents can be used in prescribed amounts\nn03211117\tan electronic device that represents information in visual form\nn03211413\t(computer science) an electronic device that converts information in memory to video output to a display\nn03211616\ta vertical surface on which information can be displayed to public view\nn03211789\ta window of a store facing onto the street; used to display merchandise for sale in the store\nn03212114\ta kitchen appliance for disposing of garbage\nn03212247\ta high explosive that is used to damage the target that is under attack\nn03212406\tthe staff on which wool or flax is wound before spinning\nn03212811\ta plant and works where alcoholic drinks are made by distillation\nn03213014\telectrical device that distributes voltage to the spark plugs of a gasoline engine in the order of the firing sequence\nn03213361\tthe cam inside the distributor that rotates to contact spark plug terminals in the correct order\nn03213538\tthe cap of the distributor that holds in place the wires from the distributor to the spark plugs\nn03213715\tthe housing that supports the distributor cam\nn03213826\ta contact in the distributor; as the rotor turns its projecting arm contacts them and current flows to the spark plugs\nn03214253\ta long narrow excavation in the earth\nn03214450\ta spade with a long handle for digging narrow ditches\nn03214582\tkit used by sailors and soldiers\nn03214966\ta long backless sofa (usually with pillows against a wall)\nn03215076\ta Muslim council chamber or law court\nn03215191\ta bomber that releases its bombs during a steep dive toward the target\nn03215337\ta lens such that a parallel beam of light passing through it is caused to diverge or spread out\nn03215508\ta highway divided down the middle by a barrier that separates traffic going in different directions\nn03215749\ta drafting instrument resembling a compass that is used for dividing lines into equal segments or for transferring measurements\nn03215930\tdiving apparatus for underwater work; has an open bottom and is supplied with compressed air\nn03216199\tforked stick that is said to dip down to indicate underground water or oil\nn03216402\ta weighted and hermetically sealed garment supplied with air; worn by underwater divers\nn03216562\ta large metal pot (12 gallon camp kettle) for cooking; used in military camps\nn03216710\ta disposable cup made of paper; for holding drinks\nn03216828\tlanding in a harbor next to a pier where ships are loaded and unloaded or repaired; may have gates to let water in or out\nn03217653\ta fine smooth soft woolen fabric\nn03217739\ta cart drawn by a dog\nn03217889\ta bag for food that a customer did not eat at a restaurant; the transparent pretense is that the food is taken home to feed the customer's dog\nn03218198\ta sled pulled by dogs\nn03218446\ta wrench with a handle shaped like a crank\nn03219010\ta small round piece of linen placed under a dish or bowl\nn03219135\ta small replica of a person; used as a toy\nn03219483\ta house so small that it is likened to a child's plaything\nn03219612\tconveyance consisting of a wheeled platform for moving heavy objects\nn03219859\ta woman's cloak with dolman sleeves\nn03219966\ta hussar's jacket worn over the shoulders\nn03220095\ta sleeve with a large armhole and tight cuff\nn03220237\ta prehistoric megalithic tomb typically having two large upright stones and a capstone\nn03220513\ta hemispherical roof\nn03220692\ta stadium that has a roof\nn03221059\ta mask covering the upper part of the face but with holes for the eyes\nn03221351\t(computer science) an electronic device that must be attached to a computer in order for it to use protected software\nn03221540\ta short thick jacket; often worn by workmen\nn03221720\ta swinging or sliding barrier that will close the entrance to a room or building or vehicle\nn03222176\ta room that is entered via a door\nn03222318\ta structure where people live or work (usually ordered along a street or road)\nn03222516\ta push button at an outer door that gives a ringing or buzzing signal when pushed\nn03222722\tthe frame that supports a door\nn03222857\ta jamb for a door\nn03223162\ta lock on an exterior door\nn03223299\ta mat placed outside an exterior door for wiping the shoes before entering\nn03223441\ta nail with a large head; formerly used to decorate doors\nn03223553\ta nameplate fastened to a door; indicates the person who works or lives there\nn03223686\tthe sill of a door; a horizontal piece of wood or stone that forms the bottom of a doorway and offers support when passing through a doorway\nn03223923\ta stop that keeps open doors from moving\nn03224490\tradar that uses the Doppler shift to measure velocity\nn03224603\ta gabled extension built out from a sloping roof to accommodate a vertical window\nn03224753\tthe window in a gabled extension built to accommodate a window\nn03224893\ta college or university building containing living quarters for students\nn03225108\ta large sleeping room containing several beds\nn03225458\ta measuring instrument for measuring doses of ionizing radiation (X-rays or radioactivity)\nn03225616\tan ornamental hanging of rich fabric hung behind the altar of a church or at the sides of a chancel\nn03225777\ta printer that represents each character as a pattern of dots from a dot matrix\nn03225988\ta bed wide enough to accommodate two sleepers\nn03226090\tan ax that has cutting edges on both sides of the head\nn03226254\ttwo saucepans, one fitting inside the other\nn03226375\ta jacket having fronts that overlap enough for two separate rows of buttons\nn03226538\ta suit with a double-breasted jacket\nn03226880\ttwo vertical doors that meet in the middle of the door frame when closed\nn03227010\ta window with two panes of glass and a space between them; reduces heat and noise transmission through the window\nn03227184\ta window having two sashes that slide up and down\nn03227317\ta knit fabric similar to jersey that is made with two sets of needles producing a double thickness joined by interlocking stitches\nn03227721\tan electronic device that doubles the voltage or the frequency of an input signal\nn03227856\ta pair of joined reeds that vibrate together to produce the sound in some woodwinds\nn03228016\ta woodwind that has a pair of joined reeds that vibrate together\nn03228254\ta man's close-fitting jacket; worn during the Renaissance\nn03228365\ta crossbar on a wagon or carriage to which two whiffletrees are attached in order to harness two horses abreast\nn03228533\ta small syringe with detachable nozzles; used for vaginal lavage and enemas\nn03228692\ta birdhouse for pigeons\nn03228796\ta medicinal powder made essentially of ipecac and opium; formerly used to relieve pain and induce perspiration\nn03228967\ta mortise joint formed by interlocking tenons and mortises\nn03229115\ta woodworking plane designed to make the grooves for dovetail joints\nn03229244\ta fastener that is inserted into holes in two adjacent pieces and holds them together\nn03229526\tthe front half of the stage (as seen from the audience)\nn03231160\tan instrument used by a draftsman in making drawings\nn03231368\ta worktable with adjustable top\nn03231819\ta sniper rifle with a telescopic sight\nn03232309\ta ditch for carrying off excess water or sewage\nn03232417\ta system of watercourses or drains for carrying off excess water\nn03232543\ta filter in a sink drain; traps debris but passes water\nn03232815\ta removable plug for holding water in a tub or basin\nn03232923\ta sterile covering arranged over a patient's body during a medical examination or during surgery in order to reduce the possibility of contamination\nn03233123\tcloth gracefully draped and arranged in loose folds\nn03233624\ta strong metal bar bearing a hook to attach something to be pulled\nn03233744\ta bridge that can be raised to block passage or to allow boats or ships to pass beneath it\nn03233905\ta boxlike container in a piece of furniture; made so as to slide in and out\nn03234164\tunderpants worn by men\nn03234952\tcolored chalks used by artists\nn03235042\ta formal room where visitors can be received and entertained\nn03235180\ta private compartment on a sleeping car with three bunks and a toilet\nn03235327\ta woodworker's knife to shave surfaces\nn03235796\ta bag that is closed at the top with a drawstring\nn03235979\ta low heavy horse cart without sides; used for haulage\nn03236093\tbattleship that has big guns all of the same caliber\nn03236217\ta power shovel to remove material from a channel or riverbed\nn03236423\ta barge (or a vessel resembling a barge) that is used for dredging\nn03236580\ta bucket for lifting material from a channel or riverbed\nn03236735\ta one-piece garment for a woman; has skirt and bodice\nn03237212\ta dress uniform for formal occasions\nn03237340\ta cabinet with shelves\nn03237416\ta man's hat with a tall crown; usually covered with silk or with beaver fur\nn03237639\ta cloth covering for a wound or sore\nn03237839\ta small piece of luggage for carrying brushes and bottles and toilet articles while traveling\nn03237992\ta robe worn before dressing or while lounging\nn03238131\ta room in which you can change clothes\nn03238286\ta woman's loose jacket; worn while dressing\nn03238586\tlow table with mirror or mirrors where one sits while dressing or applying makeup\nn03238762\ta rack used primarily to display dresses for sale in a store\nn03238879\ta man's white shirt (with a starch front) for evening wear (usually with a tuxedo)\nn03239054\tformalwear consisting of full evening dress for men\nn03239259\ta military uniform worn on formal occasions\nn03239607\ta large fishnet supported by floats; it drifts with the current\nn03239726\ta tool with a sharp point and cutting edges for making holes in hard materials (usually rotating rapidly or by repeated blows)\nn03240140\ta rotating power drill powered by an electric motor\nn03240683\tdrilling rig consisting of an offshore platform (floating or fixed to the sea bed) from which many oil wells can be bored radially\nn03240892\ta machine tool with a separate, upright stand; an electric drill is pressed into the work automatically or with a hand lever\nn03241093\trig used in drilling for oil or gas\nn03241335\ta public fountain to provide a jet of drinking water\nn03241496\ta vessel intended for drinking\nn03241903\ta downward hanging loop in a line that runs to a building\nn03242120\ta small mat placed under a glass to protect a surface from condensation\nn03242264\tpan under a refrigerator for collecting liquid waste\nn03242390\tpan for catching drippings under roasting meat\nn03242506\ta coffeepot for making drip coffee\nn03242995\ta mechanism by which force or power is transmitted in a machine\nn03243218\t(computer science) a device that writes data onto or reads data from a storage medium\nn03243625\tmechanism that transmits power from the engine to the driving wheels of a motor vehicle\nn03244047\ta golf club (a wood) with a near vertical face that is used for hitting long shots from the tee\nn03244231\ta rotating shaft that transmits power from the engine to the point of application\nn03244388\ta road leading up to a private house\nn03244775\t(golf) the long iron with the most nearly vertical face\nn03244919\ta wheel that drives a motor vehicle (transforms torque into a tractive force)\nn03245271\ta parachute used to decelerate an object that is moving rapidly\nn03245421\ta small parachute that pulls the main parachute from its storage pack\nn03245724\ta pipe of the bagpipe that is tuned to produce a single continuous tone\nn03245889\tan aircraft without a pilot that is operated by remote control\nn03246197\ta blunt pointed arch drawn from two centers within the span\nn03246312\ta large piece of cloth laid over the floor or furniture while a room is being painted\nn03246454\ta curtain that can be lowered and raised onto a stage from the flies; often used as background scenery\nn03246653\tdevice for making large forgings\nn03246933\ta table that has a drop-leaf to enlarge its surface\nn03247083\tpipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time\nn03247351\tan open horse-drawn carriage with four wheels; formerly used in Poland and Russia\nn03247495\ta stonemason's chisel with a broad edge for dressing stone\nn03248835\ta rug made of a coarse fabric having a cotton warp and a wool filling\nn03249342\ta retail shop where medicine and other articles are sold\nn03249569\ta musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end\nn03249956\ta cylindrical metal container used for shipping or storage of liquids\nn03250089\thydraulic brake in which friction is applied to the inside of a spinning drum by the brake shoe\nn03250279\ta membrane that is stretched taut over a drum\nn03250405\ta line printer in which the type is mounted on a rotating drum that contains a full character set for each printing position\nn03250588\ta power tool used for sanding wood; an endless loop of sandpaper is moved at high speed by an electric motor\nn03250847\ta stick used for playing a drum\nn03250952\ta voltaic battery consisting of two or more dry cells\nn03251100\tan ordinary thermometer with a dry bulb; used to measure the air temperature\nn03251280\ta small Leclanche cell containing no free liquid; the electrolyte is a paste and the negative zinc pole forms the container of the cell; used in flashlights, portable radios, etc.\nn03251533\ta large dock from which water can be pumped out; used for building ships or for repairing a ship below its waterline\nn03251766\tan appliance that removes moisture\nn03251932\ta fly (fisherman's lure) that skims the surface of the water\nn03252231\ta kiln for drying and seasoning lumber\nn03252324\tmasonry without mortar\nn03252422\ta steel needle for engraving without acid on a bare copper plate\nn03252637\ta stone wall made with stones fitted together without mortar\nn03252787\ta type of passive matrix display in which the top and bottom half of the screen are refreshed simultaneously\nn03253071\ta heavy cotton fabric of plain weave; used for clothing and tents\nn03253187\ta boardwalk laid across muddy ground\nn03253279\ta bowling pin that is short and squat by comparison with a tenpin\nn03253714\ta clay pipe with a short stem\nn03253796\ta coarse heavy woolen fabric\nn03253886\ta large cylindrical bag of heavy cloth; for carrying personal belongings\nn03254046\ta warm coat made of duffel; usually has a hood and fastens with toggles\nn03254189\teither of two low shelters on either side of a baseball diamond where the players and coaches sit during the game\nn03254374\ta canoe made by hollowing out and shaping a large log\nn03254625\tthe organ stop having a tone of soft sweet string quality\nn03254737\ta trapezoidal zither whose metal strings are struck with light hammers\nn03254862\ta stringed instrument used in American folk music; an elliptical body and a fretted fingerboard and three strings\nn03255030\tan exercising weight; two spheres connected by a short bar that serves as a handle\nn03255167\ta bomb that falls because of gravity and is not guided to a target\nn03255322\ta small elevator used to convey food (or other goods) from one floor of a building to another\nn03255488\ta soft-nosed small-arms bullet that expands when it hits a target and causes a gaping wound\nn03255899\ta cart that can be tilted to empty contents without handling\nn03256032\ta container designed to receive and transport and dump waste\nn03256166\ttruck whose contents can be emptied without handling; the front end of the platform can be pneumatically raised so that the load is discharged by gravity\nn03256472\ta surveyor's level having a short telescope fixed to a horizontally rotating table and a spirit level\nn03256631\ta cone-shaped paper hat formerly placed on the head of slow or lazy pupils\nn03256788\ta recreational vehicle with large tires used on beaches or sand dunes\nn03256928\ta dark cell (usually underground) where prisoners can be confined\nn03257065\tan apartment having rooms on two floors that are connected by a staircase\nn03257210\ta house with two units sharing a common wall\nn03257586\tapparatus that makes copies of typed, written or drawn material\nn03258192\ta bag into which dirt is sucked by a vacuum cleaner\nn03258330\ta piece of cloth used for dusting\nn03258456\ta removable plastic protective covering for a piece of equipment\nn03258577\ta large piece of cloth used to cover furniture that is not in use for a long period\nn03258905\ta dry swab for dusting floors\nn03259009\ta short-handled receptacle into which dust can be swept\nn03259280\tan oven consisting of a metal box for cooking in front of a fire\nn03259401\tiron or earthenware cooking pot; used for stews\nn03259505\thousing that someone is living in\nn03260206\ta workshop where dyeing is done\nn03260504\tgenerator consisting of a coil (the armature) that rotates between the poles of an electromagnet (the field magnet) causing a current to flow in the armature\nn03260733\tmeasuring instrument designed to measure power\nn03260849\ta chair designed by Charles Eames; originally made of molded plywood; seat and back shaped to fit the human body\nn03261019\tone of two flaps attached to a cap to keep the ears warm\nn03261263\ta radar that is part of an early warning system\nn03261395\ta network of radar installations designed to detect enemy missiles or aircraft while there is still time to intercept them\nn03261603\teither of a pair of ear coverings (usually connected by a headband) that are worn to keep the ears warm in cold weather\nn03261776\telectro-acoustic transducer for converting electric signals into sounds; it is held over or inserted into the ear\nn03262072\ta plug of cotton, wax, or rubber that is fitted into the ear canal for protection against the entry of water or loud noise\nn03262248\tan earphone that is inserted into the ear canal\nn03262519\tceramic ware made of porous clay fired at low heat\nn03262717\tan earthen rampart\nn03262809\tan upright tripod for displaying something (usually an artist's canvas)\nn03262932\ta comfortable upholstered armchair\nn03263076\tthe overhang at the lower edge of a roof\nn03263338\tattire that is appropriate to wear in a church\nn03263640\tovolo molding between the shaft and the abacus of a Doric column\nn03263758\ta sonograph that creates an image of the heart and its abnormalities\nn03264906\tgarden tool for cutting grass around the edges of a yard\nn03265032\tany cutting tool with a sharp cutting edge (as a chisel or knife or plane or gouge)\nn03265754\ta furnished apartment with a kitchenette and bathroom\nn03266195\ta decorative molding; a series of egg-shaped figures alternating with another shape\nn03266371\ta mixer for beating eggs or whipping cream\nn03266620\ta sandglass that runs for three minutes; used to time the boiling of eggs\nn03266749\ta soft quilt usually filled with the down of the eider\nn03267113\ta black pool ball bearing the number 8; should be the last to go in certain pool games\nn03267468\ta pilot's seat in an airplane that can be forcibly ejected in the case of an emergency; then the pilot descends by parachute\nn03267696\ta fabric made of yarns containing an elastic material\nn03267821\ta bandage containing stretchable material that can apply local pressure\nn03268142\tan elastic adhesive bandage for covering cuts or wounds\nn03268311\tthe part of a sleeve that covers the elbow joint\nn03268645\tprotective garment consisting of a pad worn over the elbow by football and hockey players\nn03268790\ta car that is powered by electricity\nn03268918\ta cable that provides an electrical connection for telephone or television or power stations\nn03269073\tcontact that allows current to pass from one conductor to another\nn03269203\tconverter that converts alternating current into direct current or vice versa\nn03269401\ta device that produces or is powered by electricity\nn03270165\tequipment in a motor vehicle that provides electricity to start the engine and ignite the fuel and operate the lights and windshield wiper and heater and air conditioner and radio\nn03270695\ta bell activated by the magnetic effect of an electric current\nn03270854\ta blanket containing and electric heating element that can be controlled to the desired temperature by a rheostat\nn03271030\tan instrument of execution by electrocution; resembles an ordinary seat for one person\nn03271260\ta clock using a small electric motor\nn03271376\tan electric lamp in which the light comes from an electric discharge between two electrodes in a glass tube\nn03271574\ta fan run by an electric motor\nn03271765\ta frying pan heated by electricity\nn03271865\tany furnace in which the heat is provided by an electric current\nn03272010\ta guitar whose sound is amplified by electrical means\nn03272125\ta hammer driven by electric motor\nn03272239\ta small electric space heater\nn03272383\ta lamp powered by electricity\nn03272562\ta locomotive that is powered by an electric motor\nn03272810\ta meter for measuring the amount of electric power used\nn03272940\ta food mixer powered by an electric motor\nn03273061\ta motor that converts electricity to mechanical work\nn03273551\t(music) an electronic simulation of a pipe organ\nn03273740\ta kitchen range in which the heat for cooking is provided by electric power\nn03273913\ta refrigerator in which the coolant is pumped around by an electric motor\nn03274265\ta toothbrush with an electric motor in the handle that vibrates the head of the brush\nn03274435\ta typewriter powered by an electric motor\nn03274561\ta transducer that converts electrical to acoustic energy or vice versa\nn03274796\ta conductor used to make electrical contact with some part of a circuit\nn03275125\tmeasuring instrument that uses the interaction of the magnetic fields of two coils to measure current or voltage or power\nn03275311\tmedical instrument that records electric currents generated by the brain\nn03275566\tan apparatus for the electrical transmission of pictures\nn03275681\ta fixed capacitor consisting of two electrodes separated by an electrolyte\nn03275864\ta cell containing an electrolyte in which an applied voltage causes a reaction to occur that would not occur otherwise (such as the breakdown of water into hydrogen and oxygen)\nn03276179\ta temporary magnet made by coiling wire around an iron core; when current flows in the coil the iron becomes a magnet\nn03276696\tmeter to measure electrostatic voltage differences; draws no current from the source\nn03276839\ta medical instrument that records the electrical waves associated with the activity of skeletal muscles\nn03277004\tcollider that consists of an accelerator that collides electrons and positrons\nn03277149\tthe electrode that is the source of electrons in a cathode-ray tube or electron microscope; consists of a cathode that emits a stream of electrons and the electrostatic or electromagnetic apparatus that focuses it\nn03277459\ta balance that generates a current proportional to the displacement of the pan\nn03277602\t(telecommunication) converter for converting a signal from one frequency to another\nn03277771\ta device that accomplishes its purpose electronically\nn03278248\tequipment that involves the controlled conduction of electrons (especially in a gas or vacuum or semiconductor)\nn03278914\tan electronic monitor that monitors fetal heartbeat and the mother's uterine contractions during childbirth\nn03279153\ta musical instrument that generates sounds electronically\nn03279364\ta voltmeter whose sensitivity is increased by amplification\nn03279508\ta microscope that is similar in purpose to a light microscope but achieves much greater resolving power by using a parallel beam of electrons to illuminate the object instead of a beam of light\nn03279804\ta vacuum tube that amplifies a flow of electrons\nn03279918\ta simple electrostatic generator that generates repeated charges of static electricity\nn03280216\tmeasuring instrument that detects electric charge; two gold leaves diverge owing to repulsion of charges with like sign\nn03280394\telectrical device that produces a high voltage by building up a charge of static electricity\nn03280644\ta printer that uses an electric charge to deposit toner on paper\nn03281145\tlifting device consisting of a platform or cage that is raised and lowered mechanically in a vertical shaft in order to move people from one floor to another in a building\nn03281524\tthe airfoil on the tailplane of an aircraft that makes it ascend or descend\nn03281673\ta vertical shaft in a building to permit the passage of an elevator from floor to floor\nn03282060\ta long artificial mound of stone or earth; built to hold back water or to support a road or as protection\nn03282295\ta diplomatic building where ambassadors live or work\nn03282401\ta superfluous ornament\nn03283221\ta room in a hospital or clinic staffed and equipped to provide emergency care to persons requiring immediate medical treatment\nn03283413\ta basin used by bedridden patients for vomiting\nn03283827\tthe electrode in a transistor where electrons originate\nn03284308\ta container that has been emptied\nn03284482\ta light-sensitive coating on paper or film; consists of fine grains of silver bromide suspended in a gelatin\nn03284743\tany smooth glossy coating that resembles ceramic glaze\nn03284886\ta paint that dries to a hard glossy finish\nn03284981\tcooking utensil of enameled iron\nn03285578\ta paint consisting of pigment mixed with melted beeswax; it is fixed with heat after application\nn03285730\tan X ray of the brain made by replacing spinal fluid with a gas (usually oxygen) to improve contrast\nn03285912\ta structure consisting of an area that has been enclosed for some purpose\nn03286572\ta long slender medical instrument for examining the interior of a bodily organ or performing minor surgery\nn03287351\ta device that supplies electrical energy\nn03287733\tmotor that converts thermal energy to mechanical work\nn03288003\tan instrument or machine that is used in warfare, such as a battering ram, catapult, artillery piece, etc.\nn03288500\ta room (as on a ship) in which the engine is located\nn03288643\tmachinery consisting of engines collectively\nn03288742\ta double-reed woodwind instrument similar to an oboe but lower in pitch\nn03288886\ta saddle having a steel cantle and pommel and no horn\nn03289660\tphotographic equipment consisting of an optical projector used to enlarge a photograph\nn03289985\ta coordinated outfit (set of clothing)\nn03290096\tcolors flown by a ship to show its nationality\nn03290195\t(architecture) the structure consisting of the part of a classical temple above the columns between a capital and the roof\nn03290653\ta wall unit containing sound and television systems\nn03291413\ta hand shovel carried by infantrymen for digging trenches\nn03291551\tan entrenched fortification; a position protected by trenches\nn03291741\tany wrapper or covering\nn03291819\ta flat (usually rectangular) container for a letter, thin package, etc.\nn03291963\tthe bag containing the gas in a balloon\nn03292085\ta crude stone artifact (as a chipped flint); possibly the earliest tools\nn03292362\tarmor plate that protects the shoulder\nn03292475\ta fencing sword similar to a foil but with a heavier blade\nn03292603\ta large table centerpiece with branching holders for fruit or sweets or flowers\nn03292736\ta system of epicyclic gears in which at least one wheel axis itself revolves about another fixed axis\nn03292960\tan optical projector that gives images of both transparent and opaque objects\nn03293095\ta mixture of resins and waxes to remove cosmetically undesirable hair; mixture is applied hot to the surface and after cooling is pulled away taking the hairs with it\nn03293741\telectronic equipment that reduces frequency distortion\nn03293863\ta telescope whose mounting has only two axes of motion, one parallel to the Earth's axis and the other one at right angles to it\nn03294048\tan instrumentality needed for an undertaking or to perform a service\nn03294604\t(computer science) a read-only memory chip that can be erased by ultraviolet light and programmed again with new data\nn03294833\tan implement used to erase something\nn03295012\ta right-angled optical prism used to turn an inverted image upright\nn03295140\ta structure that has been erected\nn03295246\ta conical flask with a wide base and narrow neck\nn03295928\thatchway that provides a means of escape in an emergency\nn03296081\tmechanical device that regulates movement\nn03296217\tgear that engages a rocking lever\nn03296328\ta steep artificial slope in front of a fortification\nn03296478\ta shield; especially one displaying a coat of arms\nn03296963\tan optical instrument for examining the inside of the esophagus\nn03297103\ta sandal with a sole made of rope or rubber and a cloth upper part\nn03297226\ta trellis on which ornamental shrub or fruit tree is trained to grow flat\nn03297495\ta coffee maker that forces live steam under pressure through dark roasted coffee grounds\nn03297644\ta cafe where espresso is served\nn03297735\ta public or private structure (business or governmental or educational) including buildings and equipment for business or residence\nn03298089\ta small (and usually shabby) cafe selling wine and beer and coffee\nn03298352\ta transdermal patch that allows estradiol to be absorbed into the blood stream; used in treating estrogen deficiency and in hormone replacement therapy\nn03298716\ta piece of furniture with open shelves for displaying small ornaments\nn03298858\ta soft cotton or worsted fabric with an open mesh; used for curtains or clothing etc.\nn03299406\tan etched plate made with the use of acid\nn03300216\ta type of network technology for local area networks; coaxial cable carries radio frequency signals between computers at a rate of 10 megabits per second\nn03300443\tany of several types of coaxial cable used in ethernets\nn03301175\ta jacket hanging to the waist and cut square at the bottom\nn03301291\tsmall ornamental ladies' bag for small articles\nn03301389\tmeasuring instrument consisting of a graduated glass tube for measuring volume changes in chemical reactions between gases\nn03301568\ta bass horn (brass wind instrument) that is the tenor of the tuba family\nn03301833\ta cooling system that cools by evaporation\nn03301940\ta handbag used with evening wear\nn03302671\tan exercise device resembling a stationary bike\nn03302790\ta device designed to provide exercise for the user\nn03302938\tsystem consisting of the parts of an engine through which burned gases or steam are discharged\nn03303217\ta fan that moves air out of an enclosure\nn03303669\ta valve through which burned gases from a cylinder escape into the exhaust manifold\nn03303831\ta large hall for holding exhibitions\nn03304197\ta guided missile developed by the French government for use against ships\nn03304323\ta bit with a cutting blade that can be adjusted to different sizes\nn03304465\ta bolt that has an attachment that expands as the bolt is driven into a surface\nn03305300\ta rapid automatic system to detect plastic explosives in passengers' luggage using X-ray technology and computers; designed for use in airports\nn03305522\tdevice that bursts with sudden violence from internal energy\nn03305953\ta system for screening luggage in airports; an agent passes a swab around or inside luggage and then runs the swab through a machine that can detect trace amounts of explosives\nn03306385\tpublic transport consisting of a fast train or bus that makes only a few scheduled stops\nn03306869\tan additional telephone set that is connected to the same telephone line\nn03307037\tan electric cord used to extend the length of a power cord\nn03307573\ta heat engine in which ignition occurs outside the chamber (cylinder or turbine) in which heat is converted to mechanical energy\nn03307792\ta drive with its own power supply and fan mounted outside the computer system enclosure and connected to the computer by a cable\nn03308152\tan instrument for extracting tight-fitting components\nn03308481\tmakeup provided by a cosmetic pencil that is used to darken the eyebrows\nn03308614\ta small vessel with a rim curved to fit the orbit of the eye; use to apply medicated or cleansing solution to the eyeball\nn03309110\tmakeup applied to emphasize the shape of the eyes\nn03309356\ta protective cloth covering for an injured eye\nn03309465\tcombination of lenses at the viewing end of optical instruments\nn03309687\tmakeup consisting of a cosmetic substance used to darken the eyes\nn03309808\tartifact made by weaving or felting or knitting or crocheting natural or synthetic fibers\nn03313333\tthe face or front of a building\nn03314227\tface mask consisting of a strong wire mesh on the front of football helmets\nn03314378\tmask that provides a protective covering for the face in such sports as baseball or football or hockey\nn03314608\ta protective covering for the front of a machine or device (as a door lock or computer component)\nn03314780\tcosmetic powder for the face\nn03314884\ta piece of more-or-less transparent material that covers the face\nn03315644\ta protective covering that protects the outside of a building\nn03315805\ta lining applied to the edge of a garment for ornamentation or strengthening\nn03315990\tan ornamental coating to a building\nn03316105\tduplicator that transmits the copy by wire or radio\nn03316406\ta plant consisting of one or more buildings with facilities for manufacturing\nn03316873\ta whaling ship equipped to process whale products at sea\nn03317233\ta bundle of sticks and branches bound together\nn03317510\tthe stitch that ties a group of parallel threads together in fagoting\nn03317673\ta thermometer calibrated in degrees Fahrenheit\nn03317788\tglazed earthenware decorated with opaque colors\nn03317889\ta ribbed woven fabric of silk or rayon or cotton\nn03318136\ta pulley-block used to guide a rope forming part of a ship's rigging to avoid chafing\nn03318294\ta small colored light used for decoration (especially at Christmas)\nn03318865\ta short broad slightly convex medieval sword with a sharp point\nn03318983\tthe hinged protective covering that protects the keyboard of a piano when it is not being played\nn03319167\ta shelter to protect occupants from the fallout from an atomic bomb\nn03319457\ta mask worn as part of a masquerade costume\nn03319576\ta removable denture\nn03319745\ta recreation room in a private house\nn03320046\ta device for creating a current of air by movement of a surface or surfaces\nn03320262\ta belt driven by the crankshaft that drives a fan that pulls air through the radiator\nn03320421\tblade of a rotating fan\nn03320519\ta costume worn as a disguise at a masquerade party\nn03320845\ta small flag used by surveyors or soldiers to mark a position\nn03320959\ta semicircular window over a door or window; usually has sash bars like the ribs of a fan\nn03321103\ta jet engine in which a fan driven by a turbine provides extra air to the burner and gives extra thrust\nn03321419\tan airplane propelled by a fanjet engine\nn03321563\ta waist pack worn with the pouch in back\nn03321843\tthe carved tracery on fan vaulting\nn03321954\tan elaborate system of vaulting in which the ribs diverge like fans\nn03322570\ta building on a farm\nn03322704\tan open-air marketplace for farm products\nn03322836\thouse for a farmer and family\nn03322940\ta machine used in farming\nn03323096\ta farm together with its buildings\nn03323211\tan area adjacent to farm buildings\nn03323319\ta hoop worn beneath a skirt to extend it horizontally; worn by European women in the 16th and 17th centuries\nn03323703\trestraint that attaches to something or holds something in place\nn03324629\tnuclear reactor in which nuclear fissions are caused by fast neutrons because little or no moderator is used\nn03324814\ta health spa that specializes in helping people lose weight\nn03324928\tmilitary uniform worn by military personnel when doing menial labor\nn03325088\ta regulator for controlling the flow of a liquid from a reservoir\nn03325288\ta piece of armor plate below the breastplate\nn03325403\tan upholstered armchair\nn03325584\ta long thin fluffy scarf of feathers or fur\nn03325691\ta thin tapering edge\nn03325941\ta hat made of felt with a creased crown\nn03326073\ta circuit that feeds back some of the output to the input of a system\nn03326371\ta building where livestock are fattened for market\nn03326475\tseam made by turning under or folding together and stitching the seamed materials to avoid rough edges\nn03326660\trim (or part of the rim) into which spokes are inserted\nn03326795\ta fabric made of compressed matted animal fibers\nn03326948\ta pen with a writing tip made of felt (trade name Magic Marker)\nn03327133\ta fast narrow sailing ship of the Mediterranean\nn03327234\ta barrier that serves to enclose an area\nn03327553\ta face mask made of fine mesh that is worn over a fencer's face\nn03327691\ta sword used in the sport of fencing\nn03327841\ta barrier that surrounds the wheels of a vehicle to block splashing water or mud\nn03328201\tan inclined metal frame at the front of a locomotive to clear the track\nn03329302\ta vertical rotating mechanism consisting of a large wheel with suspended seats that remain upright as the wheel rotates; provides a ride at an amusement park\nn03329536\ta metal cap or band placed on a wooden pole to prevent splitting\nn03329663\ta boat that transports people or vehicles across a body of water and operates on a regular schedule\nn03330002\ta switch (a stick or cane or flat paddle) used to punish children\nn03330665\ta curtain of fabric draped and bound at intervals to form graceful curves\nn03330792\ta stethoscope placed on the pregnant woman's abdomen to listen for the fetal heartbeat\nn03330947\ta shackle for the ankles or feet\nn03331077\ta felt cap (usually red) for a man; shaped like a flat-topped cone with a tassel that hangs from the crown\nn03331244\ta leatherlike material made by compressing layers of paper or cloth\nn03331599\ta cable made of optical fibers that can transmit large amounts of information at the speed of light\nn03332005\ta flexible medical instrument involving fiber optics that is used to examine internal organs\nn03332173\ta lightweight triangular scarf worn by a woman\nn03332271\ta bow used in playing the violin\nn03332393\tmovable artillery (other than antiaircraft) used by armies in the field (especially for direct support of front-line troops)\nn03332591\tthe electric coil around a field magnet that produces the magneto motive force to set up the flux in an electric machine\nn03332784\ta transistor in which most current flows in a channel whose effective resistance can be controlled by a transverse electric field\nn03332989\telectron microscope used to observe the surface structure of a solid\nn03333129\ta small refracting telescope\nn03333252\tball used in playing field hockey\nn03333349\ta temporary military hospital near the battle lines\nn03333610\ta building for indoor sports\nn03333711\tthe lens that is farthest from the eye in an optical device with more than one lens\nn03333851\ta magnet that provides a magnetic field in a dynamo or electric motor\nn03334017\tan early form of color TV in which successive fields are scanned in three primary colors\nn03334291\ta canvas tent for use in the field\nn03334382\ta temporary fortification built by troops in the field\nn03334492\ta small high-pitched flute similar to a piccolo; has a shrill tone and is used chiefly to accompany drums in a marching band\nn03334912\tan extra car wheel and tire for a four-wheel vehicle\nn03335030\ta high-speed military or naval airplane designed to destroy enemy aircraft in the air\nn03335333\ta fixed chair from which a saltwater angler can fight a hooked fish\nn03335461\ta covering consisting of anything intended to conceal something regarded as shameful\nn03335846\ta knot having the shape of the numeral 8; tied in a rope that has been passed through a hole or pulley and that prevents the rope from coming loose\nn03336168\ta loom for weaving figured fabrics\nn03336282\tan ice skate worn for figure skating; has a slightly curved blade and a row of jagged points at the front of the blade\nn03336575\ta thin wire (usually tungsten) that is heated white hot by the passage of an electric current\nn03336742\ta bobbin used in spinning silk into thread\nn03336839\ta steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal\nn03337140\toffice furniture consisting of a container for keeping papers in order\nn03337383\tfolder that holds papers together in a filing cabinet\nn03337494\t(computer science) a digital computer that provides workstations on a network with controlled access to shared resources\nn03337822\tdelicate and intricate ornamentation (usually in gold or silver or other fine twisted wire)\nn03338287\t(dentistry) a dental appliance consisting of any of various substances (as metal or plastic) inserted into a prepared cavity in a tooth\nn03338821\tphotographic material consisting of a base of celluloid covered with a photographic emulsion; used to make negatives or transparencies\nn03339296\ta thin sheet of (usually plastic and usually transparent) material used to wrap or cover things\nn03339529\ta mechanism for advancing film in a camera or projector\nn03339643\tdevice that removes something from whatever passes through it\nn03340009\tan electrical device that alters the frequency spectrum of signals passing through it\nn03340723\toptical device that helps a user to find the target of interest\nn03340923\telaborate or showy attire and accessories\nn03341035\ta comb with teeth set close together\nn03341153\tone of the parts of a glove that provides covering for a finger or thumb\nn03341297\ta narrow strip of wood on the neck of some stringed instruments (violin or cello or guitar etc) where the strings are held against the wood with the fingers\nn03341606\tsmall bowl for rinsing the fingers at table\nn03342015\tpaint that has the consistency of jelly\nn03342127\ta painting produced by spreading paint with the fingers\nn03342262\ta flat protective covering (on a door or wall etc) to prevent soiling by dirty fingers\nn03342432\ta sheath worn to protect a finger\nn03342657\tthe final coating of plaster applied to walls and ceilings\nn03342863\tthe final coat of paint\nn03342961\ta race car that finishes a race\nn03343047\ta metal plate projecting from the keel of a shallow vessel to give it greater lateral stability\nn03343234\ta wooden plug forming a flue pipe (as the mouthpiece of a recorder)\nn03343354\ta tubular wind instrument with 8 finger holes and a fipple mouthpiece\nn03343560\ta fireplace in which a relatively small fire is burning\nn03343737\tan alarm that is tripped off by fire or smoke\nn03343853\ta portable gun\nn03344305\ta bell rung to give a fire alarm\nn03344393\ta boat equipped to fight fires on ships or along a waterfront\nn03344509\ta furnace (as on a steam locomotive) in which fuel is burned\nn03344642\tbrick made of fire clay; used for lining e.g. furnaces and chimneys\nn03344784\tnaval radar that controls the delivery of fire on a military target\nn03344935\tnaval weaponry consisting of a system for controlling the delivery of fire on a military target\nn03345487\tany of various large trucks that carry firemen and equipment to the site of a fire\nn03345837\ta manually operated device for extinguishing small fires\nn03346135\tmetal fireside implements\nn03346289\tan ax that has a long handle and a head with one cutting edge and a point on the other side\nn03346455\tan open recess in a wall at the base of a chimney where a fire can be built\nn03347037\ta metal screen before an open fire for protection (especially against flying sparks)\nn03347472\ttongs for taking hold of burning coals\nn03347617\ta watchtower where a lookout is posted to watch for fires\nn03348142\t(computing) a security system consisting of a combination of hardware and software that limits the exposure of a computer or computer network to attack from crackers; commonly used on local area networks that are connected to the internet\nn03348868\tchamber that is the part of a gun that receives the charge\nn03349020\tstriker that ignites the charge by striking the primer\nn03349296\ta small wooden keg\nn03349367\ta chisel with a thin blade for woodworking\nn03349469\tkit consisting of a set of bandages and medicines for giving first aid\nn03349599\ta station providing emergency care or treatment before regular medical aid can be obtained\nn03349771\tthe base that must be touched first by a base runner in baseball\nn03349892\tthe most expensive accommodations on a ship or train or plane\nn03350204\ta transparent bowl in which small fish are kept\nn03350352\ta knot for tying a line to a spar or ring\nn03350456\ta knot for tying the ends of two lines together\nn03350602\t(angling) any bright artificial bait consisting of plastic or metal mounted with hooks and trimmed with feathers\nn03351151\ta sharp barbed hook for catching fish\nn03351262\ta vessel for fishing; often has a well to keep the catch alive\nn03351434\tgear used in fishing\nn03351979\ta rod of wood or steel or fiberglass that is used in fishing to extend the fishing line\nn03352232\ta butt joint formed by bolting fish plates to the sides of two rails or beams\nn03352366\ta small table knife with a spatula blade used for eating fish\nn03352628\ta net that will enclose fish when it is pulled in\nn03352961\ta food turner with a broad blade used for turning or serving fish or other food that is cooked in a frying pan\nn03353281\tany of the items furnishing or equipping a room (especially built-in furniture)\nn03353951\ta varnish dissolved in alcohol and sprayed over pictures to prevent smudging\nn03354207\ta house or other dwelling in need of repair (usually offered for sale at a low price)\nn03354903\temblem usually consisting of a rectangular piece of cloth of distinctive design\nn03355468\ta small fipple flute with four finger holes and two thumb holes\nn03355768\ta large metal or pottery vessel with a handle and spout; used to hold alcoholic beverages (usually wine)\nn03355925\ta tall staff or pole on which a flag is raised\nn03356038\tthe ship that carries the commander of a fleet and flies his flag\nn03356279\tan implement consisting of handle with a free swinging stick at the end; used in manual threshing\nn03356446\ta flaming torch (such as are used in processions at night)\nn03356559\ta weapon that squirts ignited fuel for several yards\nn03356858\ta projection used for strength or for attaching to another object\nn03356982\ta soft light woolen fabric; used for clothing\nn03357081\t(usually in the plural) trousers made of flannel or gabardine or tweed or white cloth\nn03357267\ta cotton fabric imitating flannel\nn03357716\ta movable airfoil that is part of an aircraft wing; used to increase lift or drag\nn03358172\ta lamp for providing momentary light to take a photograph\nn03358380\ta bright patch of color used for decoration or identification\nn03358726\ta camera with a photoflash attachment\nn03358841\tan electrical device that automatically turns a lamp on and off (as for an advertising display)\nn03359137\ta small portable battery-powered electric lamp\nn03359285\ta small dry battery containing dry cells; used to power flashlights\nn03359436\tnonvolatile storage that can be electrically erased and programmed anew\nn03359566\tbottle that has a narrow neck\nn03360133\tan arch with mutually supporting voussoirs that has a straight horizontal extrados and intrados\nn03360300\tan open truck bed or trailer with no sides; used to carry large heavy objects\nn03360431\ta printing press where the type is carried on a flat bed under a cylinder that holds paper and rolls over the type\nn03360622\ta bench on which a weightlifter lies to do exercises\nn03360731\tfreight car without permanent sides or roof\nn03361109\ta file with two flat surfaces\nn03361297\ta tiny flat\nn03361380\ta type of video display that is thin and flat; commonly used in laptop computers\nn03361550\tfootwear (shoes or slippers) with no heel (or a very low heel)\nn03361683\ta screwdriver with a flat wedge-shaped tip that fits into a slot in the head of a screw\nn03362639\ta soft bulky fabric with deep pile; used chiefly for clothing\nn03362771\ta submarine carrying ballistic missiles\nn03362890\t(heraldry) charge consisting of a conventionalized representation of an iris\nn03363363\tsimulator consisting of a machine on the ground that simulates the conditions of flying a plane\nn03363549\tan obsolete gunlock that has flint embedded in the hammer; the flint makes a spark that ignites the charge\nn03363749\ta muzzle loader that had a flintlock type of gunlock\nn03364008\ta backless sandal held to the foot by a thong between the big toe and the second toe\nn03364156\ta shoe for swimming; the paddle-like front is an aid in swimming (especially underwater)\nn03364599\ta hand tool with a flat face used for smoothing and finishing the surface of plaster or cement or stucco\nn03364937\tdry dock that can be submerged under a vessel and then raised\nn03365231\ta seaplane equipped with pontoons for landing or taking off from water\nn03365374\tlight that is a source of artificial illumination having a broad beam; used in photography\nn03365592\tthe inside lower horizontal surface (as of a room, hallway, tent, or other structure)\nn03365991\ta structure consisting of a room or set of rooms at a single position along a vertical scale\nn03366464\tthe legislative hall where members debate and vote and conduct other business\nn03366721\tthe floor of an automobile\nn03366823\ta covering for a floor\nn03366974\tjoist that supports a floor\nn03367059\ta lamp that stands on the floor\nn03367321\ta cheap lodging house\nn03367410\ta shop where flowers and ornamental plants are sold\nn03367545\ta soft loosely twisted thread used in embroidery\nn03367875\tthe floating wreckage of a ship\nn03367969\ta bin for holding flour\nn03368048\ta mill for grinding grain into flour\nn03368352\ta bed in which flowers are growing\nn03369276\ta brass instrument resembling a cornet but with a wider bore\nn03369407\tan automotive power coupling\nn03369512\ta kind of fluid coupling in which the flywheel is the driving rotor\nn03369866\twatercourse that consists of an open artificial chute filled with water for power or for carrying logs\nn03370387\tlamp consisting of a tube coated on the inside with a fluorescent material; mercury vapor in the tube emits ultraviolet radiation that is converted to visible radiation by the fluorescent material\nn03370646\tan X-ray machine that combines an X-ray source and a fluorescent screen to enable direct observation\nn03371875\ta toilet that is cleaned of waste by the flow of water through it\nn03372029\ta high-pitched woodwind instrument; a slender tube closed at one end with finger holes on one end and an opening near the closed end across which the breath is blown\nn03372549\ta tall narrow wineglass\nn03372822\tan applicator for applying flux (as in soldering)\nn03372933\tmeter that measures magnetic flux by the current it generates in a coil\nn03373237\tfisherman's lure consisting of a fishhook decorated to look like an insect\nn03373611\ta large seaplane that floats with its fuselage in the water rather than on pontoons\nn03373943\ta buttress that stands apart from the main structure and connected to it by an arch\nn03374102\t(Asian folktale) an imaginary carpet that will fly people anywhere they wish to go\nn03374282\tthe outermost of two or more jibs\nn03374372\ta long flexible fishing rod used in fly fishing\nn03374473\ta tent with a fly front\nn03374570\ta trap for catching flies\nn03374649\tregulator consisting of a heavy wheel that stores kinetic energy and smooths the operation of a reciprocating engine\nn03374838\tshort chain or ribbon attaching a pocket watch to a man's vest\nn03375171\ta warning device consisting of a horn that generates a loud low tone\nn03375329\theadlight that provides strong beam for use in foggy weather\nn03375575\ta light slender flexible sword tipped by a button\nn03376159\ta pen for sheep\nn03376279\tcovering that is folded over to protect the contents\nn03376595\ta chair that can be folded flat for storage\nn03376771\tan interior door that opens by folding back in sections (rather than by swinging on hinges)\nn03376938\ta saw with a toothed blade that folds into a handle (the way a pocketknife folds)\nn03378005\tan area (as in a shopping mall) where fast food is sold (usually around a common eating area)\nn03378174\ta kitchen appliance with interchangeable blades; used for shredding or blending or chopping or slicing food\nn03378342\ta hamper for packing and transporting food\nn03378442\ta support resembling a pedal extremity\nn03378593\tfilm that has been shot\nn03378765\tthe inflated oblong ball used in playing American football\nn03379051\ta padded helmet with a face mask to protect the head of football players\nn03379204\ta stadium where football games are held\nn03379343\ta small bathtub for warming or washing or disinfecting the feet\nn03379719\thydraulic brake operated by pressing on a foot pedal\nn03379828\ta bridge designed for pedestrians\nn03379989\ta place providing support for the foot in standing or climbing\nn03380301\ta trunk for storing personal possessions; usually kept at the foot of a bed (as in a barracks)\nn03380647\ta ruler one foot long\nn03380724\ta low seat or a stool to rest the feet of a seated person\nn03380867\tcovering for a person's feet\nn03381126\tclothing worn on a person's feet\nn03381231\tan extractor consisting of a pair of pincers used in medical treatment (especially for the delivery of babies)\nn03381450\tpump used to force a liquid up and expel it under pressure\nn03381565\tsailing vessel with a fore-and-aft rig\nn03381776\tany sail not set on a yard and whose normal position is in a fore-and-aft direction\nn03382104\tliving quarters consisting of a superstructure in the bow of a merchant ship where the crew is housed\nn03382292\tthe outer or front court of a building or of a group of buildings\nn03382413\tthe deck between the bridge and the forecastle\nn03382533\tthe part of a book that faces inward when the book is shelved; the part opposite the spine\nn03382708\t(computer science) a window for an active application\nn03382856\tthe mast nearest the bow in vessels with two or more masts\nn03382969\ta carpenter's plane intermediate between a jack plane and a jointer plane\nn03383099\tthe lowest sail on the foremast of a square-rigged vessel\nn03383211\tan adjustable stay from the foremast to the deck or bowsprit; controls the bending of the mast\nn03383378\ta platform at the head of a foremast\nn03383468\tthe topmast next above the foremast\nn03383562\tthe topsail on a foremast\nn03383821\tfurnace consisting of a special hearth where metal is heated before shaping\nn03384167\tan agricultural tool used for lifting or digging; has a handle and metal prongs\nn03384352\ta small industrial vehicle with a power operated forked platform in front that can be inserted under loads to lift and move them\nn03384891\tattire to wear on formal occasions in the evening\nn03385295\tany of various plastic laminates containing melamine\nn03385557\tdefensive structure consisting of walls or mounds built around a stronghold to strengthen it\nn03386011\ta fortified defensive structure\nn03386343\ta .45-caliber pistol\nn03386544\tpendulum with a long wire; can swing in any direction; the change in the swing plane demonstrates the earth's rotation\nn03386726\ta light plain-weave or twill-weave silk or silklike fabric (usually with a printed design)\nn03386870\tprotective garment that is intended to keep the wearer dry and warm in bad weather\nn03387323\ta woman's undergarment worn to give shape to the contours of the body\nn03387653\tfactory where metal castings are produced\nn03388043\ta structure from which an artificially produced jet of water arises\nn03388183\ta pen that is supplied with ink from a reservoir in its barrel\nn03388323\ta long necktie that is tied in a slipknot with one end hanging in front of the other\nn03388549\ta bed with posts at the four corners that can be used to support a canopy or curtains\nn03388711\tan artillery gun that throws a shot weighing four pounds\nn03388990\tan internal-combustion engine in which an explosive mixture is drawn into the cylinder on the first stroke and is compressed and ignited on the second stroke; work is done on the third stroke and the products of combustion are exhausted on the fourth stroke\nn03389611\ta transmission that provides power directly to all four wheels of a motor vehicle\nn03389761\ta motor vehicle with a four-wheel drive transmission system\nn03389889\ta hackney carriage with four wheels\nn03389983\ta light shotgun used for fowling\nn03390075\ta small dugout with a pit for individual shelter against enemy fire\nn03390327\ta bomb with only 10 to 20 per cent explosive and the remainder consisting of casings designed to break into many small high-velocity fragments; most effective against troops and vehicles\nn03390673\ta basket for holding dried fruit (especially raisins or figs)\nn03390786\tsloping or horizontal rampart of pointed stakes\nn03390983\ta framework that supports and protects a picture or a mirror\nn03391301\tthe framework for a pair of eyeglasses\nn03391613\t(computer science) a buffer that stores the contents of an image pixel by pixel\nn03391770\ta structure supporting or containing something\nn03392648\ta type of hydroelectric turbine\nn03392741\ta machine that automatically stamps letters or packages passing through it and computes the total charge\nn03393017\ta public house that is not controlled by a brewery and so is free to sell different brands of beer and ale\nn03393199\ta reed that does not fit closely over the aperture\nn03393324\ta wind instrument with a free reed\nn03393761\ta clutch (as on the rear wheel of a bicycle) that allows wheels to turn freely (as in coasting)\nn03393912\ta railway car that carries freight\nn03394149\tan elevator designed for carrying freight\nn03394272\ta long-distance express freight train between industrial centers and seaports with facilities for rapid loading and unloading of goods\nn03394480\ta railroad train consisting of freight cars\nn03394649\ta light door with transparent or glazed panels extending the full length\nn03394916\ta brass musical instrument consisting of a conical tube that is coiled into a spiral and played by means of valves\nn03395256\ta varnish for wood consisting of shellac dissolved in alcohol\nn03395401\ta mansard roof with sides that are nearly perpendicular\nn03395514\ta French door situated in an exterior wall of a building\nn03395859\tlens composed of a number of small lenses arranged to make a lightweight lens of large diameter and short focal length\nn03396074\ta small bar of metal across the fingerboard of a musical instrument; when the string is stopped by a finger at the metal bar it will produce a note of the desired pitch\nn03396580\ta monastery of friars\nn03396654\ta clutch in which one part turns the other by the friction between them\nn03396997\ta heavy woolen fabric with a long nap\nn03397087\tan architectural ornament consisting of a horizontal sculptured band between the architrave and the cornice\nn03397266\ta United States warship larger than a destroyer and smaller than a cruiser\nn03397412\ta medium size square-rigged warship of the 18th and 19th centuries\nn03397532\ta strip of pleated material used as a decoration or a trim\nn03397947\ta light, plastic disk about 10 inches in diameter; propelled with a flip of the wrist for recreation or competition\nn03398153\ta habit worn by clerics\nn03398228\ta man's coat having knee-length skirts front and back; worn in the 19th century\nn03399579\tan adornment worn on the forehead\nn03399677\ta porch for the front door\nn03399761\ta projector for digital input\nn03399971\ta coin-operated gambling machine that produces random combinations of symbols (usually pictures of different fruits) on rotating dials; certain combinations win money for the player\nn03400231\ta pan used for frying foods\nn03400972\ta filter in the fuel line that screens out dirt and rust particles from the fuel\nn03401129\tan indicator of the amount of fuel remaining in a vehicle\nn03401279\tmechanical system to inject atomized fuel directly into the cylinders of an internal-combustion engine; avoids the need for a carburetor\nn03401721\tequipment in a motor vehicle or aircraft that delivers fuel to the engine\nn03402188\tthe naval or military uniform that is specified by regulations to be worn on ceremonial occasions\nn03402369\ta lead bullet that is covered with a jacket of a harder metal (usually copper)\nn03402511\ta long skirt gathered at the waist\nn03402785\ta device that generates a gas for the purpose of disinfecting or eradicating pests\nn03402941\ta mortuary where those who knew the deceased can come to pay their last respects\nn03403643\ta conically shaped utensil having a narrow tube at the small end; used to channel the flow of substances into a container with a small mouth\nn03404012\tan ambulance used to transport patients to a mental hospital\nn03404149\ta garment made of the dressed hairy coat of a mammal\nn03404251\ta coat made of fur\nn03404360\ta hat made of fur\nn03404449\tan enclosed chamber in which heat is produced to heat buildings, destroy refuse, smelt or refine ores, etc.\nn03404900\tlining consisting of material with a high melting point; used to line the inside walls of a furnace\nn03405111\ta room (usually in the basement of a building) that contains a furnace for heating the building\nn03405265\t(usually plural) the instrumentalities (furniture and appliances and other movable accessories including curtains and rugs) that make a home (or other area) livable\nn03405595\t(usually plural) accessory wearing apparel\nn03405725\tfurnishings that make a room or other area ready for occupancy\nn03406759\ta neckpiece made of fur\nn03406966\ta long shallow trench in the ground (especially one made by a plow)\nn03407369\tan electrical device that can interrupt the flow of electrical current when it is overloaded\nn03407865\ta spirally grooved spindle in a clock that counteracts the diminishing power of the uncoiling mainspring\nn03408054\tthe central body of an airplane that is designed to accommodate the crew and passengers (or cargo)\nn03408264\ta light flintlock musket\nn03408340\ta strong cotton and linen fabric with a slight nap\nn03408444\tmattress consisting of a pad of cotton batting that is used for sleeping on the floor or on a raised frame\nn03409297\ta firm durable fabric with a twill weave\nn03409393\tthe vertical triangular wall between the sloping ends of gable roof\nn03409591\ta double sloping roof with a ridge and gables at each end\nn03409920\tappliances collectively\nn03410022\tan iron hook with a handle; used for landing large fish\nn03410147\ta spar rising aft from a mast to support the head of a quadrilateral fore-and-aft sail\nn03410303\ta sharp metal spike or spur that is fastened to the leg of a gamecock\nn03410423\ta quadrilateral fore-and-aft sail suspended from a gaff\nn03410571\ta triangular fore-and-aft sail with its foot along the gaff and its luff on the topmast\nn03410740\trestraint put into a person's mouth to prevent speaking or shouting\nn03410938\tlegging consisting of a cloth or leather covering for the leg from the knee to the ankle\nn03411079\ta shoe covering the ankle with elastic gores in the sides\nn03411208\ta type of refracting telescope that is no longer used in astronomy\nn03411339\ta large square-rigged sailing ship with three or more masts; used by the Spanish for commerce and war from the 15th to 18th centuries\nn03411927\ta long usually narrow room used for some specific purpose\nn03412058\ta room or series of rooms where works of art are exhibited\nn03412220\tthe area for food preparation on a ship\nn03412387\tthe kitchen area for food preparation on an airliner\nn03412511\t(classical antiquity) a crescent-shaped seagoing vessel propelled by oars\nn03412906\tan instrument of execution consisting of a wooden frame from which a condemned person is executed by hanging\nn03413124\talternative terms for gallows\nn03413264\tmeter for detecting or comparing or measuring small electric currents\nn03413428\ta public building in which a variety of games of chance can be played (operated as a business)\nn03413684\ta gable roof with two slopes on each side and the lower slope being steeper\nn03413828\tthe game equipment needed in order to play a particular game\nn03414029\ta canvas or leather bag for carrying game (especially birds) killed by a hunter\nn03414162\tequipment or apparatus used in playing a game\nn03414676\ta table used for gambling; may be equipped with a gameboard and slots for chips\nn03415252\tcolloquial terms for an umbrella\nn03415486\ta temporary bridge for getting on and off a vessel at dockside\nn03415626\ta power saw that has several parallel blades making simultaneous cuts\nn03415749\ta temporary passageway of planks (as over mud on a building site)\nn03415868\tthe convergence of two parallel railroad tracks in a narrow place; the inner rails cross and run parallel and then diverge so a train remains on its own tracks at all times\nn03416094\ta framework of steel bars raised on side supports to bridge over or around something; can display railway signals above several tracks or can support a traveling crane etc.\nn03416489\tan outbuilding (or part of a building) for housing automobiles\nn03416640\ta repair shop where cars and trucks are serviced and repaired\nn03416775\ta semiautomatic rifle\nn03416900\ta receptacle where waste can be discarded\nn03417042\ta truck for collecting domestic refuse\nn03417202\tthe first wale laid next to the keel of a wooden ship\nn03417345\ta plot of ground where plants are cultivated\nn03417749\ta yard or lawn adjoining a house\nn03417970\ta rake used by gardeners\nn03418158\ta spade used by gardeners\nn03418242\tused for working in gardens or yards\nn03418402\ta trowel used by gardeners\nn03418618\ta spout that terminates in a grotesquely carved figure of a person or animal\nn03418749\ta loose high-necked blouse with long sleeves; styled after the red flannel shirts worn by Garibaldi's soldiers\nn03418915\ta press for extracting juice from garlic\nn03419014\tan article of clothing\nn03420345\ta suitcase that unfolds to be hung up\nn03420801\ta wedge-shaped wool or cotton cap; worn as part of a uniform\nn03420935\tan instrument of execution for execution by strangulation\nn03421117\ta band (usually elastic) worn around the leg to hold up a stocking (or around the arm to hold up a sleeve)\nn03421324\ta wide belt of elastic with straps hanging from it; worn by women to hold up stockings\nn03421485\ta knitting stitch that results in a pattern of horizontal ridges formed by knitting both sides (instead of purling one side)\nn03421669\ta car with relatively low fuel efficiency\nn03421768\t(military) bomb consisting of an explosive projectile filled with a toxic gas that is released when the bomb explodes\nn03421960\ta pipe with one or more burners projecting from a wall\nn03422072\tburner such that combustible gas issues from a nozzle to form a steady flame\nn03422484\ta nuclear reactor using gas as a coolant\nn03422589\ta tube in which an electric discharge takes place through a gas\nn03422771\tan internal-combustion engine similar to a gasoline engine but using natural gas instead of gasoline vapor\nn03423099\ta device to convey illuminating gas from the pipe to the gas burner\nn03423224\ta furnace that burns gas\nn03423306\ta gun that fires gas shells\nn03423479\ta heater that burns gas for heat\nn03423568\ta large gas-tight spherical or cylindrical tank for holding gas to be used as fuel\nn03423719\tseal consisting of a ring for packing pistons or sealing a pipe joint\nn03423877\ta lamp that burns illuminating gas\nn03424204\ta maser in which microwave radiation interacts with gas molecules\nn03424325\ta protective mask with a filter; protects the face and lungs against poisonous gases\nn03424489\ta meter for measuring the amount of gas flowing through a particular pipe\nn03424630\tan internal-combustion engine that burns gasoline; most automobiles are driven by gasoline engines\nn03424862\tgauge that indicates the amount of gasoline left in the gasoline tank of a vehicle\nn03425241\ta domestic oven fueled by gas\nn03425325\ta cremation chamber fueled by gas\nn03425413\ta pump in a service station that draws gasoline from underground storage tanks\nn03425595\ta range with gas rings and an oven for cooking with gas\nn03425769\tgas burner consisting of a circular metal pipe with several small holes through which gas can escape to be burned\nn03426134\ta tank for holding gasoline to supply a vehicle\nn03426285\tthermometer that measures temperature by changes in the pressure of a gas kept at constant volume\nn03426462\ta type of endoscope for visually examining the stomach\nn03426574\tturbine that converts the chemical energy of a liquid fuel into mechanical energy by internal combustion; gaseous products of the fuel (which is burned in compressed air) are expanded through a turbine\nn03426871\ta ship powered by a gas turbine\nn03427202\ta gangster's pistol\nn03427296\ta movable barrier in a fence or wall\nn03428090\ta house built at a gateway; usually the gatekeeper's residence\nn03428226\ta drop-leaf table with the drop-leaves supported by hinged legs\nn03428349\teither of two posts that bound a gate\nn03429003\ta skirt whose fabric is drawn together around the waist\nn03429137\tan early form of machine gun having several barrels that fire in sequence as they are rotated\nn03429288\ta measuring instrument for measuring and indicating a quantity such as the thickness of wire or the amount of rain etc.\nn03429682\ta glove with long sleeve\nn03429771\ta glove of armored leather; protects the hand\nn03429914\ta net of transparent fabric with a loose open weave\nn03430091\t(medicine) bleached cotton cloth of plain weave used for bandages and dressings\nn03430313\ta small mallet used by a presiding officer or a judge\nn03430418\ta small roofed building affording shade and rest\nn03430551\ta toothed wheel that engages another toothed mechanism in order to change the speed or direction of transmitted motion\nn03430959\tequipment consisting of miscellaneous articles needed for a particular operation or sport etc.\nn03431243\ta mechanism for transmitting motion for some specific purpose (as the steering gear of a vehicle)\nn03431570\tthe shell (metal casing) in which a train of gears is sealed\nn03431745\twheelwork consisting of a connected set of rotating gears by which force is transmitted or motion or torque is changed\nn03432061\ta set of gears\nn03432129\ta mechanical device for engaging and disengaging gears\nn03432360\tcounter tube that detects ionizing radiations\nn03432509\tan ionization chamber contained in a tube in a Geiger counter\nn03433247\ta microchip that holds DNA probes that form half of the DNA double helix and can recognize DNA from samples being tested\nn03433637\ta large bomb (500 to 2,000 pounds that is 50% explosive) whose explosion creates a blast and whose metal casing creates some fragmentation effect\nn03433877\tengine that converts mechanical energy into electrical energy by electromagnetic induction\nn03434188\tan apparatus that produces a vapor or gas\nn03434285\tan electronic device for producing a signal voltage\nn03434830\tblack academic gown widely used by Protestant clergymen\nn03435593\ta lightweight dome constructed of interlocking polygons; invented by R. Buckminster Fuller\nn03435743\ta thin silk dress material\nn03435991\ta horse-drawn carriage in India\nn03436075\tstairway in India leading down to a landing on the water\nn03436182\ta portable stereo\nn03436417\ta shop that sells miscellaneous articles appropriate as gifts\nn03436549\tornamental wrapping for gifts\nn03436656\tsmall two-wheeled horse-drawn carriage; with two seats and no hood\nn03436772\ttender that is a light ship's boat; often for personal use of captain\nn03436891\tlong and light rowing boat; especially for racing\nn03436990\ta cluster of hooks (without barbs) that is drawn through a school of fish to hook their bodies; used when fish are not biting\nn03437184\tthe meeting place of a medieval guild\nn03437295\ta flat fishnet suspended vertically in the water to entangle fish by their gills\nn03437430\ta coating of gold or of something that looks like gold\nn03437581\tan appliance that allows an object (such as a ship's compass) to remain horizontal even as its support tips\nn03437741\ta clothing fabric in a plaid weave\nn03437829\tan ornate candle holder; often with a mirror\nn03437941\ta beam made usually of steel; a main support in a structure\nn03438071\ta band of material around the waist that strengthens a skirt or trousers\nn03438257\ta container for holding liquids while drinking\nn03438661\tglassware collectively\nn03438780\ta tool for cutting glass\nn03438863\ta case for carrying spectacles\nn03439348\ta parsonage (especially one provided for the holder of a benefice)\nn03439631\ta Scottish cap with straight sides and a crease along the top from front to back; worn by Highlanders as part of military dress\nn03439814\taircraft supported only by the dynamic action of air against its surfaces\nn03440216\ta navigational system involving satellites and computers that can determine the latitude and longitude of a receiver on Earth by computing the time difference for signals from different satellites to reach the receiver\nn03440682\ta percussion instrument consisting of a set of graduated metal bars mounted on a frame and played with small hammers\nn03440876\ta small locker at the stern of a boat or between decks of a ship\nn03441112\thandwear: covers the hand and wrist\nn03441345\tcompartment on the dashboard of a car\nn03441465\ta gas-discharge tube with a hot cathode; used in stroboscopes\nn03441582\ta gas-discharge tube consisting of a cold cathode and a diode in a tube filled with gas; the color of the glow depends on the particular gas\nn03442288\tcarvings or engravings (especially on precious stones)\nn03442487\tthe art of engraving on precious stones\nn03442597\tindicator provided by the stationary arm whose shadow indicates the time on the sundial\nn03442756\tgame equipment consisting of the place toward which players of a game try to advance a ball or puck in order to score points\nn03443005\t(sports) the area immediately in front of the goal\nn03443149\tone of a pair of posts (usually joined by a crossbar) that are set up as a goal at each end of a playing field\nn03443371\ta drinking glass with a base and stem\nn03443543\t(in India and Malaysia) a warehouse\nn03443912\ttight-fitting spectacles worn to protect the eyes\nn03444034\ta small low motor vehicle with four wheels and an open framework; used for racing\nn03445326\ta thin plating of gold on something\nn03445617\tgolf equipment consisting of a bag for carrying golf clubs and balls\nn03445777\ta small hard ball used in playing golf; dimpled to reduce wind resistance\nn03445924\ta small motor vehicle in which golfers can ride between shots\nn03446070\tgolf equipment used by a golfer to hit a golf ball\nn03446268\t(golf) the head of the club which strikes the ball\nn03446832\tsports equipment used in playing golf\nn03447075\ta glove worn by golfers to give a firm grip on the handle of the golf club\nn03447358\ta grotesque black doll\nn03447447\tlong narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice\nn03447721\ta percussion instrument consisting of a metal plate that is struck with a softheaded drumstick\nn03447894\tdirection finder that determines the angular direction of incoming radio signals\nn03448031\tan intricate knot tied by Gordius, the king of Phrygia, and cut by the sword of Alexander the Great after he heard that whoever undid it would become ruler of Asia\nn03448590\tarmor plate that protects the neck\nn03448696\ta gauze fabric with an extremely fine texture\nn03448956\ta pointed arch; usually has a joint (instead of a keystone) at the apex\nn03449217\tan opaque watercolor prepared with gum\nn03449309\tand edge tool with a blade like a trough for cutting channels or grooves\nn03449451\tbottle made from the dried shell of a bottle gourd\nn03449564\ta building that houses a branch of government\nn03449858\tan office where government employees work\nn03450230\ta woman's dress, usually with a close-fitting bodice and a long flared skirt, often worn on formal occasions\nn03450516\touterwear consisting of a long flowing garment used for official or ceremonial occasions\nn03450734\tprotective garment worn by surgeons during operations\nn03450881\ta mechanical device for gripping an object\nn03450974\ta container from which a person draws a wrapped item at random without knowing the contents\nn03451120\ta bar attached parallel to a wall to provide a handgrip for steadying yourself\nn03451253\tcup to be passed around for the final toast after a meal\nn03451365\ta crossing that uses an underpass or overpass\nn03451711\ta cylindrical graduate\nn03451798\ta rude decoration inscribed on rocks or walls\nn03452267\tan antique record player; the sound of the vibrating needle is amplified acoustically\nn03452449\ta storehouse for threshed grain or animal feed\nn03452594\ta pendulum clock enclosed in a tall narrow case\nn03452741\ta piano with the strings on a horizontal harp-shaped frame; usually supported by three legs\nn03453231\ta kind of stone-grey enamelware\nn03453320\ta reef knot crossed the wrong way and therefore insecure\nn03453443\tan arbor where grapes are grown\nn03454110\ta light anchor for small boats\nn03454211\ta tool consisting of several hooks for grasping and holding; often thrown with a rope\nn03454442\ta skirt made of long blades of grass\nn03454536\ta frame of iron bars to hold a fire\nn03454707\ta barrier that has parallel or crossed bars blocking a passage but admitting air\nn03454885\tutensil with sharp perforations for shredding foods (as vegetables or cheese)\nn03455355\ta tool used by an engraver\nn03455488\ta stone that is used to mark a grave\nn03455642\ta measuring instrument for measuring variations in the gravitational field of the earth\nn03455802\tan intaglio print produced by gravure\nn03456024\ta dish (often boat-shaped) for serving gravy or sauce\nn03456186\tclothing that is a grey color\nn03456299\ta hand-operated pump that resembles a revolver; forces grease into parts of a machine\nn03456447\ta greasy substance used as makeup by actors\nn03456548\ta small restaurant specializing in short-order fried foods\nn03456665\ta heavy coat worn over clothes in winter\nn03457008\tthe principal hall in a castle or mansion; can be used for dining or entertainment\nn03457451\tarmor plate that protects legs below the knee\nn03457686\ta greengrocer's grocery store\nn03457902\ta building with glass walls and roof; for the cultivation and exhibition of plants under controlled conditions\nn03458271\ta small explosive bomb thrown by hand or fired from a missile\nn03458422\ta cooking utensil of parallel metal bars; used to grill fish or meat\nn03459328\tcooking utensil consisting of a flat heated surface (as on top of a stove) on which food is cooked\nn03459591\ta framework of metal bars used as a partition or a grate\nn03459775\tgrating that admits cooling air to car's radiator\nn03459914\ta restaurant where food is cooked on a grill\nn03460040\ta machine tool that polishes metal\nn03460147\ta wheel composed of abrasive material; used for grinding\nn03460297\ta revolving stone shaped like a disk; used to grind or sharpen or polish edge tools\nn03460455\ta small suitcase\nn03460899\ta mill for grinding grain (especially the customer's own grain)\nn03461288\ta sack for holding customer's groceries\nn03461385\ta marketplace where groceries are sold\nn03461651\ta coarse fabric of silk mixed with wool or mohair and often stiffened with gum\nn03461882\ttwo barrel vaults intersecting at right angles\nn03461988\ta device that makes grooves by cutting or punching\nn03462110\ta silk or silklike fabric with crosswise ribs\nn03462315\ta needlepoint stitch covering two horizontal and two vertical threads\nn03462747\ta connection between an electrical device and a large conducting body, such as the earth (which is taken to be at zero voltage)\nn03462972\tbait scattered on the water to attract fish\nn03463185\ta communication system for sending continuous radio messages to an airplane pilot who is making a ground-controlled approach to landing\nn03463381\tthe floor of a building that is at or nearest to the level of the ground around the building\nn03463666\ta waterproofed piece of cloth spread on the ground (as under a tent) to protect from moisture\nn03464053\tminimal clothing worn by stripteasers; a narrow strip of fabric that covers the pubic area, passes between the thighs, and is supported by a waistband\nn03464467\ta device designed to prevent injury or accidents\nn03464628\ta boat that is on guard duty (as in a harbor) around a fleet of warships\nn03464952\ta room used by soldiers on guard\nn03465040\ta cell in which soldiers who are prisoners are confined\nn03465151\ta warship (at anchor or under way) required to maintain a higher degree of readiness than others in its squadron\nn03465320\tthe car on a train that is occupied by the guard\nn03465426\ta small round table\nn03465500\ta violin made by a member of the Guarneri family\nn03465605\ta house separate from the main house; for housing guests\nn03465718\ta bedroom that is kept for the use of guests\nn03465818\ta system of equipment for automatically guiding the path of a vehicle (especially a missile)\nn03466162\ta rocket-propelled missile whose path can be controlled during flight either by radio signals or by internal homing devices\nn03466493\ta cruiser that carries guided missiles\nn03466600\ta frigate that carries guided missiles\nn03466839\tthe hall of a guild or corporation\nn03466947\tan architectural decoration formed by two intersecting wavy bands\nn03467068\tinstrument of execution that consists of a weighted blade between two vertical poles; used for beheading people\nn03467254\ta short blouse with sleeves that is worn under a jumper or pinafore dress\nn03467380\ta piece of starched cloth covering the shoulders of a nun's habit\nn03467517\ta stringed instrument usually having six strings; played by strumming or plucking\nn03467796\ta plectrum used to pluck a guitar\nn03467887\ta Russian prison camp for political prisoners\nn03467984\ta weapon that discharges a missile at high velocity (especially from a metal tube or barrel)\nn03468570\ta small shallow-draft boat carrying mounted guns; used by costal patrols\nn03468696\ta framework on which a gun is mounted for firing\nn03468821\ta case for storing a gun\nn03469031\tan emplacement for a gun\nn03469175\ta self-contained weapons platform housing guns and capable of rotation\nn03469493\tthe action that ignites the charge in a firearm\nn03469832\tguns collectively\nn03469903\ta bag made of burlap\nn03470005\ta ballistic pendulum consisting of a suspended gun; the velocity of a projectile in the bore of a gun can be measured by the recoil when the gun is discharged\nn03470222\tmilitary quarters of midshipmen and junior officers on a British warship\nn03470387\ta sight used for aiming a gun\nn03470629\tlever that activates the firing mechanism of a gun\nn03470948\ta metal stretcher with wheels\nn03471030\tan oil well with a strong natural flow so that pumping is not necessary\nn03471190\ta piece of material used to strengthen or enlarge a garment\nn03471347\ta metal plate used to strengthen a joist\nn03471779\ta cable, wire, or rope that is used to brace something (especially a tent)\nn03472232\tsports equipment used in gymnastic exercises\nn03472535\ta canvas shoe with a pliable rubber sole\nn03472672\tclothes prescribed for wear while participating in gymnastic exercise\nn03472796\ta sleeveless tunic worn by English girls as part of a school uniform\nn03472937\ta taxicab that cruises for customers although it is licensed only to respond to calls\nn03473078\ta compass that does not depend on magnetism but uses a gyroscope instead\nn03473227\trotating mechanism in the form of a universally mounted spinning wheel that offers resistance to turns in any direction\nn03473465\ta stabilizer consisting of a heavy gyroscope that spins on a vertical axis; reduces side-to-side rolling of a ship or plane\nn03473817\t(Middle Ages) a light sleeveless coat of chain mail worn under the hauberk\nn03473966\ta distinctive attire worn by a member of a religious order\nn03474167\tattire that is typically worn by a horseback rider (especially a woman's attire)\nn03474352\tthe main house on a ranch or large estate\nn03474779\tsaw used with one hand for cutting metal\nn03474896\tthe handle of a weapon or tool\nn03475581\ta brush used to groom a person's hair\nn03475674\tcloth woven from horsehair or camelhair; used for upholstery or stiffening in garments\nn03475823\ta toiletry for the hair\nn03475961\ta small net that some women wear over their hair to keep it in place\nn03476083\ta covering or bunch of human or artificial hair used for disguise or adornment\nn03476313\ta double pronged pin used to hold women's hair in place\nn03476542\tan uncomfortable shirt made of coarse animal hair; worn next to the skin as a penance\nn03476684\ta decorative hinged clip that girls and women put in their hair to hold it in place\nn03476991\ttoiletry consisting of a commercial preparation that is sprayed on the hair to hold it in place\nn03477143\ta fine spiral spring that regulates the movement of the balance wheel in a timepiece\nn03477303\ta gun trigger that responds with little pressure\nn03477410\ta pike fitted with an ax head\nn03477512\tbook binding in which the spine and part of the sides are bound in one material and the rest in another\nn03477773\ta hatchet with a broad blade on one end and a hammer head of the other\nn03477902\ta knot used to fasten a rope temporarily to an object; usually tied double\nn03478589\ta motor vehicle propelled by half tracks; frequently used by the military\nn03478756\ta large building for meetings or entertainment\nn03478907\ta large room for gatherings or entertainment\nn03479121\ta large building used by a college or university for teaching or research\nn03479266\ta building containing trophies honoring famous people\nn03479397\ta university dormitory\nn03479502\ta piece of furniture where coats and hats and umbrellas can be hung; usually has a mirror\nn03480579\ta woman's top that fastens behind the back and neck leaving the back and arms uncovered\nn03480719\trope or canvas headgear for a horse, with a rope for leading\nn03480973\tstable gear consisting of either of two curved supports that are attached to the collar of a draft horse and that hold the traces\nn03481172\ta hand tool with a heavy rigid head and a handle; used to deliver an impulsive force by striking\nn03481521\ta power tool for drilling rocks\nn03482001\ta heavy metal sphere attached to a flexible wire; used in the hammer throw\nn03482128\tthe striking part of a hammer\nn03482252\ta hanging bed of canvas or rope netting (usually suspended between two trees); swings easily\nn03482405\ta basket usually with a cover\nn03482523\ta rotating pointer on the face of a timepiece\nn03482877\ta small rubber ball used in playing the game of handball\nn03483086\ta rectangular frame with handles at both ends; carried by two people\nn03483230\ta bell that is held in the hand\nn03483316\ta hand-held electric blower that can blow warm air onto the hair; used for styling hair\nn03483531\ta bow drawn by hand as distinguished from a crossbow\nn03483637\ta brake operated by hand; usually operates by mechanical linkage\nn03483823\ta calculator small enough to hold in the hand or carry in a pocket\nn03483971\ta small railroad car propelled by hand or by a small motor\nn03484083\twheeled vehicle that can be pushed by a person; may have one or two or four wheels\nn03484487\tmoisturizing cream for the hands\nn03484576\tshackle that consists of a metal loop that can be locked around the wrist; usually used in pairs\nn03484809\ta small portable drill held and operated by hand\nn03484931\tlight microscope consisting of a single convex lens that is used to produce an enlarged image\nn03485198\ta mirror intended to be held in the hand\nn03485309\ta grenade designed to be thrown by hand\nn03485407\ta portable battery-powered computer small enough to be carried in your pocket\nn03485575\tan appendage to hold onto\nn03485794\ta square piece of cloth used for wiping the eyes or nose or as a costume accessory\nn03487090\tthe shaped bar used to steer a bicycle\nn03487331\ta loom powered by hand\nn03487444\tlotion used to soften the hands\nn03487533\tluggage that is light enough to be carried by hand\nn03487642\toutgrown garment passed down from one person to another\nn03487774\ta lawn mower that is operated by hand\nn03487886\ta pump worked by hand\nn03488111\ta support for the hand\nn03488188\ta saw used with one hand for cutting wood\nn03488438\ttelephone set with the mouthpiece and earpiece mounted on a single handle\nn03488603\ta shovel that is operated by hand\nn03488784\ta metal bar (or length of pipe) used as a lever\nn03488887\ta stamp (usually made of rubber) for imprinting a mark or design by hand\nn03489048\ta hand-operated lever that controls the throttle valve\nn03489162\ta tool used with workers' hands\nn03490006\ta small towel used to dry the hands or face\nn03490119\ta handcart that has a frame with two low wheels and a ledge at the bottom and handles at the top; used to move crates or other heavy objects\nn03490324\tclothing for the hands\nn03490449\tcontrol consisting of a wheel whose rim serves as the handle by which a part is operated\nn03490649\ta wheel worked by hand\nn03490784\tan airplane with a bad maintenance record\nn03490884\tanything from which something can be hung\nn03491032\ta glider resembling a large kite; the rider hangs from it while descending from a height\nn03491724\ta rope that is used by a hangman to execute persons who have been condemned to death by hanging\nn03491988\ta coil of rope or wool or yarn\nn03492087\ta two-wheeled horse-drawn covered carriage with the driver's seat above and behind the passengers\nn03492250\ta place of refuge and comfort and security\nn03492542\ta rigid magnetic disk mounted permanently in a drive unit\nn03492922\ta lightweight protective helmet (plastic or metal) worn by construction workers\nn03493219\ta car that resembles a convertible but has a fixed rigid top\nn03493792\tinstrumentalities (tools or implements) made of metal\nn03493911\ta store selling hardware\nn03494278\ta small rectangular free-reed instrument having a row of free reeds set back in air holes and played by blowing into the desired hole\nn03494537\ta free-reed instrument in which air is forced through the reeds by bellows\nn03494706\tstable gear consisting of an arrangement of leather straps fitted to a draft animal so that it can be attached to and pull a cart\nn03495039\ta support consisting of an arrangement of straps for holding something to the body (especially one supporting a person suspended from a parachute)\nn03495258\ta chordophone that has a triangular frame consisting of a sounding board and a pillar and a curved neck; the strings stretched between the neck and the soundbox are plucked with the fingers\nn03495570\ta pair of curved vertical supports for a lampshade\nn03495671\ta spear with a shaft and barbed point for throwing; used for catching large fish or whales; a strong line is attached to it\nn03495941\ta cannon or similar gun that fires harpoons\nn03496183\ta cylindrical log with a device that registers distance\nn03496296\ta clavier with strings that are plucked by plectra mounted on pivots\nn03496486\ta loosely woven tweed made in the Outer Hebrides\nn03496612\ta cultivator that pulverizes or smooths the soil\nn03496892\tfarm machine that gathers a food crop from the fields\nn03497100\tan inexpensive restaurant\nn03497352\ta fastener for a door or lid; a hinged metal plate is fitted over a staple and is locked with a pin or padlock\nn03497657\theaddress that protects the head from bad weather; has shaped crown and usually a brim\nn03498441\ta round piece of luggage for carrying hats\nn03498536\ta movable barrier covering a hatchway\nn03498662\ta sloping rear car door that is lifted to open\nn03498781\ta car having a hatchback door\nn03498866\ta comb for separating flax fibers\nn03498962\ta small ax with a short handle used with one hand (usually to chop wood)\nn03499354\ta long sturdy pin used by women to secure a hat to their hair\nn03499468\ta long (usually sleeveless) tunic of chain mail formerly worn as defensive armor\nn03499907\tguitar whose steel strings are twanged while being pressed with a movable steel bar for a glissando effect\nn03500090\tthe hole that an anchor rope passes through\nn03500209\tlarge heavy rope for nautical use\nn03500295\ta knot uniting the ends of two lines\nn03500389\ta bale of hay\nn03500457\ta long-handled fork for turning or lifting hay\nn03500557\ta loft in a barn where hay is stored\nn03500699\ta farm machine that treats hay to cause more rapid and even drying\nn03500838\ta frame attached to a wagon to increase the amount of hay it can carry\nn03500971\ta rack that holds hay for feeding livestock\nn03501152\tan obstacle on a golf course\nn03501288\ta projection out from one end\nn03501520\t(nautical) a toilet on board a boat or ship\nn03501614\tthe striking part of a tool\nn03502200\ta vertical board or panel forming the head of a bedstead\nn03502331\ta garment that covers the head and face\nn03502509\tclothing for the head\nn03502777\ta machine that cuts the heads off grain and moves them into a wagon\nn03502897\ta framing member crossing and supporting the ends of joists, studs, or rafters so as to transfer their weight to parallel joists, studs, or rafters\nn03503097\tbrick that is laid sideways at the top of a wall\nn03503233\thorizontal beam used as a finishing piece over a door or window\nn03503358\ta mooring line that secures the bow of a boat or ship to a wharf\nn03503477\ta gasket to seal a cylinder head\nn03503567\ta gate upstream from a lock or canal that is used to control the flow of water at the upper end\nn03503718\tstable gear consisting of any part of a harness that fits about the horse's head\nn03503997\ta powerful light with reflector; attached to the front of an automobile or locomotive\nn03504205\ta protective helmet for the head\nn03504293\tthe front bowling pin in the triangular arrangement of ten pins\nn03504723\t(usually plural) the office that serves as the administrative center of an enterprise\nn03505015\ta waterway that feeds water to a mill or water wheel or turbine\nn03505133\ta rest for the head\nn03505383\tany sail set forward of the foremast of a vessel\nn03505504\ta kerchief worn over the head and tied under the chin\nn03505667\treceiver consisting of a pair of headphones\nn03505764\ta shop specializing in articles of interest to drug users\nn03506028\tthe band that is the part of a bridle that fits around a horse's head\nn03506184\tthe stationary support in a machine or power tool that supports and drives a revolving part (as a chuck or the spindle on a lathe)\nn03506370\ta place of business with equipment and facilities for exercising and improving physical fitness\nn03506560\ta conical acoustic device formerly used to direct sound to the ear of a hearing-impaired person\nn03506727\tan electronic device that amplifies sound and is worn to compensate for poor hearing\nn03506880\ta vehicle for carrying a coffin to a church or a cemetery; formerly drawn by horses but now usually a motor vehicle\nn03507241\thome symbolized as a part of the fireplace\nn03507458\ta rug spread out in front of a fireplace\nn03507658\ta pump to maintain circulation during heart surgery; diverts blood from the heart and oxygenates it and then pumps it through the body\nn03507963\tany engine that makes use of heat to do work\nn03508101\tdevice that heats water or supplies warmth to a room\nn03508485\tdevice that transfers heat from one liquid to another without allowing them to mix\nn03508881\theater consisting of electrical heating elements contained in a flexible pad\nn03509394\telectric heater consisting of a high-power incandescent lamp that emits infrared radiation\nn03509608\tapparatus that extracts heat from a liquid that is at a higher temperature than its surroundings; can be used to transfer heat from a reservoir outside in order to heat a building\nn03509843\ta missile with a guidance system that directs it toward targets emitting infrared radiation (as the emissions of a jet engine)\nn03510072\ta protective covering that protects a spacecraft from overheating on reentry\nn03510244\ta metal conductor specially designed to conduct (and radiate) heat\nn03510384\ta large medieval helmet supported on the shoulders\nn03510487\ta bar used as a lever (as in twisting rope)\nn03510583\ta non-buoyant aircraft that requires a source of power to hold it aloft and to propel it\nn03510866\tan oboe pitched an octave below the ordinary oboe\nn03510987\tduplicator consisting of a gelatin plate from which ink can be taken to make a copy\nn03511175\ta fence formed by a row of closely planted shrubs or bushes\nn03511333\ta garden tool for trimming hedges\nn03512030\ta tuba that coils over the shoulder of the musician\nn03512147\tan aircraft without wings that obtains its lift from the rotation of overhead blades\nn03512452\tan apparatus for sending telegraphic messages by using a mirror to turn the sun's rays off and on\nn03512624\tan instrument used to measure the angular separation of two stars that are too far apart to be included in the field of view of an ordinary telescope\nn03512911\tsteering mechanism for a vessel; a mechanical device by which a vessel is steered\nn03513137\ta protective headgear made of hard material to resist blows\nn03513376\tarmor plate that protects the head\nn03514129\ta measuring instrument to determine (usually by centrifugation) the relative amounts of corpuscles and plasma in the blood\nn03514340\ta stitch used in sewing hems on skirts and dresses\nn03514451\ta surgical instrument that stops bleeding by clamping the blood vessel\nn03514693\ta stitch in which parallel threads are drawn and exposed threads are caught together in groups\nn03514894\ta roost for hens at night\nn03515338\temblem indicating the right of a person to bear arms\nn03515934\tthe abode of a hermit\nn03516266\ta twilled fabric with a herringbone pattern\nn03516367\ta pattern of columns of short parallel lines with all the lines in one column sloping one way and lines in adjacent columns sloping the other way; it is used in weaving, masonry, parquetry, embroidery\nn03516647\ta reflecting telescope with the mirror slightly tilted to throw the image to the side where it can be viewed\nn03516844\t(19th century) a man's high tasseled boot\nn03516996\ta radio receiver that combines a locally generated frequency with the carrier frequency to produce a supersonic signal that is demodulated and amplified\nn03517509\ta portable brazier that burns charcoal and has a grill for cooking\nn03517647\tan area where you can be alone\nn03517760\tequipment for the reproduction of sound with high fidelity\nn03517899\tthe main altar in a church\nn03517982\ta cannon that can be fired at a high elevation for relatively short ranges\nn03518135\ta tall glass for serving highballs\nn03518230\ta high diving board\nn03518305\ta tall chest of drawers divided into two sections and supported on four legs\nn03518445\ta chair for feeding a very young child; has four long legs and a footrest and a detachable tray\nn03518631\ta forward gear with a gear ratio that gives the greatest vehicle velocity for a given engine speed\nn03518829\tcymbals that are operated by a foot pedal\nn03518943\ta fluorescent marker used to mark important passages in a text\nn03519081\ta cosmetic used to highlight the eyes or cheekbones\nn03519226\ta filter that passes frequencies above a certain value and attenuates frequencies below that value\nn03519387\ttower consisting of a multistoried building of offices or apartments\nn03519674\ta dining table in a dining-hall raised on a platform; seats are reserved for distinguished persons\nn03519848\ta handloom in which the warp is carried vertically; for weaving tapestry\nn03520493\ta headscarf worn by Muslim women; conceals the hair and neck and usually has a face veil that covers the face\nn03521076\ta joint that holds two parts together so that one can swing relative to the other\nn03521431\tthe gatepost on which the gate is hung\nn03521544\ta very high boot; used especially for fishing\nn03521675\ta flask that holds spirits\nn03521771\tprotective garment consisting of a pad worn by football and hockey players\nn03521899\ta pocket in rear of trousers\nn03522003\ta stadium for horse shows or horse races\nn03522100\ta roof having sloping ends as well as sloping sides\nn03522634\ta knot that can be undone by pulling against the strain that holds it; a temporary knot\nn03522863\ta connection between a vehicle and the load that it pulls\nn03522990\ta fixed post with a ring to which a horse can be hitched to prevent it from straying\nn03523134\ta fixed horizontal rail to which a horse can be hitched to prevent it from straying\nn03523398\ta hard steel edge tool used to cut gears\nn03523506\ta long skirt very narrow below the knees, worn between 1910 and 1914\nn03523987\tan ice skate worn for playing hockey; has a short blade and a strong boot to protect the feet and ankles\nn03524150\tsports implement consisting of a stick used by hockey players to move the puck\nn03524287\tan open box attached to a long pole handle; bricks or mortar are carried on the shoulder\nn03524425\t(physics) scientific instrument that traces the path of a charged particle\nn03524574\ta tool with a flat blade attached at right angles to a long handle\nn03524745\tthe handle of a hoe\nn03524976\ta large cask especially one holding 63 gals\nn03525074\tlifting device for raising heavy or cumbersome objects\nn03525252\ta cell in a jail or prison\nn03525454\ta holding device\nn03525693\ta jail in a courthouse where accused persons can be confined during a trial\nn03525827\ta device for holding something\nn03526062\ta pen where livestock is temporarily confined\nn03527149\tsilverware serving dishes\nn03527444\ta sheath (usually leather) for carrying a handgun\nn03527565\ta belt with loops or slots for carrying small hand tools\nn03527675\t(Judaism) sanctuary comprised of the innermost chamber of the Tabernacle in the temple of Solomon where the Ark of the Covenant was kept\nn03528100\tan institution where people are cared for\nn03528263\tan appliance that does a particular job in the home\nn03528523\ta computer intended for use in the home\nn03528901\t(baseball) base consisting of a rubber slab where the batter stands; it must be touched by a base runner in order to score\nn03529175\ta classroom in which all students in a particular grade (or in a division of a grade) meet at certain times under the supervision of a teacher who takes attendance and does other administrative business\nn03529444\ta rough loosely woven fabric originally made with yarn that was spun at home\nn03529629\tdwelling that is usually a farmhouse and adjoining land\nn03529860\ttelevision and video equipment designed to reproduce in the home the experience of being in a movie theater\nn03530189\ta torpedo that is guided to its target (as by the sound of a ship's engines)\nn03530511\ta whetstone made of fine gritstone; used for sharpening razors\nn03530642\ta framework of hexagonal cells resembling the honeycomb built by bees\nn03530910\tprotective covering consisting of a metal part that covers the engine\nn03531281\ta headdress that protects the head and face\nn03531447\tthe folding roof of a carriage\nn03531546\tmetal covering leading to a vent that exhausts smoke or fumes\nn03531691\t(falconry) a leather covering for a hawk's head\nn03531982\ta catch that holds the hood of a car shut\nn03532342\ta curved or bent implement for suspending or pulling something\nn03532672\ta mechanical device that is curved or bent to suspend or hold or pull something\nn03532919\ta catch for locking a door\nn03533014\tan oriental tobacco pipe with a long flexible tube connected to a container where the smoke is cooled by passing through water\nn03533392\ta kind of fastener used on clothing\nn03533486\ta system of components assembled together for a particular purpose\nn03533654\ta device providing a connection between a power source and a user\nn03533845\ta wrench with a hook that fits over a nut or bolt head\nn03534580\ta skirt stiffened with hoops\nn03534695\tslang for a jail\nn03534776\ta kind of vacuum cleaner\nn03535024\tchest for storage of clothing (trousseau) and household goods in anticipation of marriage\nn03535284\tfunnel-shaped receptacle; contents pass by gravity into a receptacle below\nn03535647\ta loosely woven coarse fabric of cotton or linen; used in clothing\nn03535780\tgymnastic apparatus consisting of a bar supported in a horizontal position by uprights at both ends\nn03536122\tthe horizontal airfoil of an aircraft's tail assembly that is fixed and to which the elevator is hinged\nn03536568\tthe horizontal stabilizer and elevator in the tail assembly of an aircraft\nn03536761\tan alarm device that makes a loud warning sound\nn03537085\ta noisemaker (as at parties or games) that makes a loud noise when you blow through it\nn03537241\ta device having the shape of a horn\nn03537412\ta button that you press to activate the horn of an automobile\nn03537550\tan ancient (now obsolete) single-reed woodwind; usually made of bone\nn03538037\ta padded gymnastic apparatus on legs\nn03538179\ta conveyance (railroad car or trailer) for transporting racehorses\nn03538300\tan early form of streetcar that was drawn by horses\nn03538406\theavy cart; drawn by a horse; used for farm work\nn03538542\ta cloth for the trapping of a horse\nn03538634\ta wheeled vehicle drawn by one or more horses\nn03538817\ta fabric made from fibers taken from the mane or tail of horses; used for upholstery\nn03538957\ta wig made of horsehair\nn03539103\tan early term for an automobile\nn03539293\ta large pistol (usually in a holster) formerly carried by horsemen\nn03539433\tU-shaped plate nailed to underside of horse's hoof\nn03539546\tgame equipment consisting of an open ring of iron used in playing horseshoes\nn03539678\ta trail for horses\nn03539754\ta whip for controlling horses\nn03540090\tman's close-fitting garment of the 16th and 17th centuries covering the legs and reaching up to the waist; worn with a doublet\nn03540267\tsocks and stockings and tights collectively (the British include underwear)\nn03540476\ta lodging for travelers (especially one kept by a monastic order)\nn03540595\ta health facility where patients receive treatment\nn03540914\ta single bed with a frame in three sections so the head or middle or foot can be raised as required\nn03541091\ta room in a hospital for the care of patients\nn03541269\ta ship built to serve as a hospital; used for wounded in wartime\nn03541393\ta military train built to transport wounded troops to a hospital\nn03541537\tinexpensive supervised lodging (especially for youths on bicycling trips)\nn03541696\ta hotel providing overnight lodging for travelers\nn03541923\tballoon for travel through the air in a basket suspended below a large bag of heated air\nn03542333\ta building where travelers can pay for lodging and meals and other services\nn03542605\ta building that houses both a hotel and a casino\nn03542727\ta business establishment that combines a casino and a hotel\nn03542860\ta bedroom (usually with bath) in a hotel\nn03543012\ta direct telephone line between two officials\nn03543112\tskin-tight very short pants worn by young women as an outer garment\nn03543254\ta portable electric appliance for heating or cooking or keeping food warm\nn03543394\ta car modified to increase its speed and acceleration\nn03543511\ta lively entertainment spot\nn03543603\ta very large tub (large enough for more than one bather) filled with hot water\nn03543735\ta stoppered receptacle (usually made of rubber) that is to be filled with hot water and used for warming a bed or parts of the body\nn03543945\ttextile with a pattern of small broken or jagged checks\nn03544143\ta sandglass that runs for sixty minutes\nn03544238\tthe shorter hand of a clock that points to the hours\nn03544360\ta dwelling that serves as living quarters for one or more families\nn03545150\ta building in which something is sheltered or located\nn03545470\ta barge that is designed and equipped for use as a dwelling\nn03545585\tlights that illuminate the audience's part of a theater or other auditorium\nn03545756\tan unstable construction with playing cards\nn03545961\t(formerly) a jail or other place of detention for persons convicted of minor offences\nn03546112\tpaint used to cover the exterior woodwork of a house\nn03546235\tthe roof of a house\nn03546340\tstructures collectively in which people are housed\nn03547054\tsmall crude shelter used as a dwelling\nn03547229\ta craft capable of moving over water or land on a cushion of air created by jet engines\nn03547397\ta (usually canopied) seat for riding on the back of a camel or elephant\nn03547530\ta sandal with flat heels and an upper of woven leather straps\nn03547861\ta system of air transportation in which local airports offer air transportation to a central airport where long-distance flights are available\nn03548086\tcap that fits over the hub of a wheel\nn03548195\ttoweling consisting of coarse absorbent cotton or linen fabric\nn03548320\ta woman's fitted jacket\nn03548402\tplaything consisting of a tubular plastic hoop for swinging around the hips\nn03548533\ta ship that has been wrecked and abandoned\nn03548626\tthe frame or body of ship\nn03548930\ta vestment worn by a priest at High Mass in the Roman Catholic Church; a silk shawl\nn03549199\ta high mobility, multipurpose, military vehicle with four-wheel drive\nn03549350\ta watch with a hinged metal lid to protect the crystal\nn03549473\ta large sharp knife with a handle shaped to fit the grip\nn03549589\ta light movable barrier that competitors must leap over in certain races\nn03549732\ta deck at the top of a passenger ship\nn03549897\tan oil lamp with a glass chimney and perforated metal lid to protect the flame from high winds; candlestick with a glass chimney\nn03550153\ttemporary military shelter\nn03550289\ta cage (usually made of wood and wire mesh) for small animals\nn03550420\tan encampment of huts (chiefly military)\nn03551084\tbrake system in which a brake pedal moves a piston in the master cylinder; brake fluid then applies great force to the brake pads or shoes\nn03551395\tpress in which a force applied by a piston to a small area is transmitted through water to another piston having a large area\nn03551582\ta water pump that uses the kinetic energy of flowing water to force a small fraction of that water to a reservoir at a higher level\nn03551790\ta mechanism operated by the resistance offered or the pressure transmitted when a liquid is forced through a small opening or tube\nn03552001\ta transmission that depends on a hydraulic system\nn03552449\tturbine consisting of a large and efficient version of a water wheel used to drive an electric generator\nn03552749\ta speedboat that is equipped with winglike structures that lift it so that it skims the water at high speeds\nn03553019\ta device consisting of a flat or curved piece (as a metal plate) so that its surface reacts to the water it is passing through\nn03553248\ta nuclear weapon that releases atomic energy by union of light (hydrogen) nuclei at high temperatures to form helium\nn03553486\ta measuring instrument for determining the specific gravity of a liquid or solid\nn03554375\ta wet and dry bulb hygrometer\nn03554460\tmeasuring instrument for measuring the relative humidity of the atmosphere\nn03554645\thygrometer that shows variations in the relative humidity of the atmosphere\nn03555006\ta large chamber in which the oxygen pressure is above normal for the atmosphere; used in treating breathing disorders or carbon monoxide poisoning\nn03555217\ta roller coaster that goes up 200 feet or higher and can catapult riders from 0 to 70 mph in 4 seconds by motors originally designed to launch rockets\nn03555426\ta huge supermarket (usually built on the outskirts of a town)\nn03555564\ta hollow needle\nn03555662\ta piston syringe that is fitted with a hypodermic needle for giving injections\nn03555862\tan altimeter that uses the boiling point of water to determine land elevation\nn03555996\tX ray of the uterus and Fallopian tubes; usually done in diagnosing infertility (to see if there any blockages)\nn03556173\tgirder having a cross section resembling the letter `I'\nn03556679\tan ax used by mountain climbers for cutting footholds in ice\nn03556811\ta sailing vessel with runners and a cross-shaped frame; suitable for traveling over ice\nn03556992\ta ship with a reinforced bow to break up ice and keep channels open for navigation\nn03557270\ta teaspoon with a long handle\nn03557360\tan ice rink for playing ice hockey\nn03557590\tan electric refrigerator to supply ice cubes\nn03557692\tan appliance included in some electric refrigerators for making ice cubes\nn03557840\ta waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling\nn03558007\tpick consisting of a steel rod with a sharp point; used for breaking up blocks of ice\nn03558176\ta rink with a floor of ice for ice hockey or ice skating\nn03558404\tskate consisting of a boot with a steel blade fitted to the sole\nn03558633\ttongs for lifting blocks of ice\nn03558739\ta tray for making cubes of ice in a refrigerator\nn03559373\tthe first practical television-camera for picture pickup; invented in 1923 by Vladimir Kosma Zworykin\nn03559531\ta likeness of a person's face constructed from descriptions given to police; uses a set of transparencies of various facial features that can be combined to build up a picture of the person sought\nn03559999\ta pulley on a shaft that presses against a guide belt to guide or tighten it\nn03560430\tan Eskimo hut; usually built of blocks (of sod or snow) in the shape of a dome\nn03560860\tan induction coil that converts current from a battery into the high-voltage current required by spark plugs\nn03561047\ta key that operates the ignition switch of an automotive engine\nn03561169\tswitch that operates a solenoid that closes a circuit to operate the starter\nn03561573\ta hostel for pilgrims in Turkey\nn03562565\ta bandage of cloth impregnated with a substance (e.g., plaster of Paris) that hardens soon after it is applied\nn03563200\ta printer that prints by mechanical impacts\nn03563460\tthe blade of a rotor (as in the compressor of a jet engine)\nn03563710\ta prosthesis placed permanently in tissue\nn03563967\tinstrumentation (a piece of equipment or tool) used to effect an end\nn03564849\t(dentistry) an imprint of the teeth and gums in wax or plaster\nn03565288\ta device produced by pressure on a surface\nn03565565\tan explosive device that is improvised\nn03565710\ta turbine that is driven by jets direct against the blades\nn03565830\ta wood or metal receptacle placed on your desk to hold your incoming material\nn03565991\ta bomb that is designed to start fires; is most effective against flammable targets (such as fuel)\nn03566193\ta furnace for incinerating (especially to dispose of refuse)\nn03566329\ta simple machine for elevating objects; consists of plane surface that makes an acute angle with the horizontal\nn03566555\ta measuring instrument for measuring the angle of magnetic dip (as from an airplane)\nn03566730\tan instrument showing the angle that an aircraft makes with the horizon\nn03566860\ta decorative coating of contrasting material that is applied to a surface as an inlay or overlay\nn03567066\tapparatus consisting of a box designed to maintain a constant temperature by the use of a thermostat; used for chicks or premature infants\nn03567635\t(computer science) a register used to determine the address of an operand\nn03567788\ta large sailing ship that was engaged in the British trade with India\nn03567912\ta bottle-shaped club used in exercises\nn03568117\ta device for showing the operating condition of some system\nn03568818\ta coil for producing a high voltage from a low-voltage source\nn03569014\tan electrical device (typically a conducting coil) that introduces inductance into a circuit\nn03569174\ta canal that is operated by one or more industries\nn03569293\ta system to control a plane or spacecraft; uses inertial forces\nn03569494\tan air pump operated by hand to inflate something (as a tire)\nn03571280\ta dispenser that produces a chemical vapor to be inhaled in order to relieve nasal congestion\nn03571439\ta contrivance for injecting (e.g., water into the boiler of a steam engine or particles into an accelerator etc.)\nn03571625\ta bottle of ink\nn03571853\tan eraser that removes ink marks\nn03571942\ta printer that produces characters by projecting electrically charged droplets of ink\nn03572107\ta linen tape used for trimming as a decoration\nn03572205\ta tray or stand for writing implements and containers for ink\nn03572321\ta small well holding writing ink into which a pen can be dipped\nn03572631\t(dentistry) a filling consisting of a solid substance (as gold or porcelain) fitted to a cavity in a tooth and cemented into place\nn03573574\tcaliper for measuring inside dimensions (the size of a cavity or hole); points on its legs curve outward\nn03573848\tthe inner sole of a shoe or boot where the foot rests\nn03574243\tthe part of a shoe or stocking that covers the arch of the foot\nn03574416\tmedical apparatus that puts a liquid into a cavity drop by drop\nn03574555\tan establishment consisting of a building or complex of buildings where an organization for the promotion of some cause is situated\nn03574816\ta device that requires skill for proper use\nn03575958\tan instrument designed and used to punish a condemned person\nn03576215\tan instrument of punishment designed and used to inflict torture on the condemned person\nn03576443\tglyptic art consisting of a sunken or depressed engraving or carving on a stone or gem (as opposed to cameo)\nn03576955\ta valve that controls the flow of fluid through an intake\nn03577090\ta microelectronic computer circuit incorporated into a chip or semiconductor; a whole system rather than a single component\nn03577312\ta measuring instrument for measuring the area of an irregular plane figure\nn03577474\ta computer network similar to but separate from the internet; devoted to the dissemination of information to and for the Intelligence Community\nn03577672\ta fast maneuverable fighter plane designed to intercept enemy aircraft\nn03577818\ta junction of highways on different levels that permits traffic to move from one to another without crossing traffic streams\nn03578055\ta communication system linking different rooms within a building or ship etc\nn03578251\ta ballistic missile that is capable of traveling from one continent to another\nn03578656\t(computer science) computer circuit consisting of the hardware and associated circuitry that links one device with another (especially a computer and a hard disk drive or other peripherals)\nn03578981\tany measuring instrument that uses interference patterns to make accurate measurements of waves\nn03579538\ta door that closes off rooms within a building\nn03579982\ta heat engine in which combustion occurs inside the engine rather than in a separate furnace; heat expands a gas that either moves a piston or turns a gas turbine\nn03580518\ta drive mounted inside of a computer\nn03580615\ta computer network consisting of a worldwide network of computer networks that use the TCP/IP network protocols to facilitate data transmission and exchange\nn03580845\ta telephonic intercommunication system linking different rooms in a building or ship etc\nn03580990\ta device for automatically interrupting an electric current\nn03581125\ta junction where one street or road crosses another\nn03581531\tsmall opening between things\nn03581897\tan artificial lens that is implanted into the eye of someone to replace a damaged natural lens or someone who has had a cataract removed\nn03582508\tX-ray picture of the kidneys and ureters after injection of a radiopaque dye\nn03582959\tan electrical converter that converts direct current into alternating current\nn03583419\ta type of reaction-propulsion engine to propel rockets in space; a stream of positive ions is accelerated to a high velocity by an electric field\nn03583621\ta measuring instrument that measures the amount of ionizing radiation\nn03584254\t(trademark) a pocket-sized device used to play music files\nn03584400\t(trademark) an iPod that can also play video files\nn03584829\thome appliance consisting of a flat metal base that is heated and used to smooth cloth\nn03585073\ta golf club that has a relatively narrow metal head\nn03585337\timplement used to brand live stock\nn03585438\tmetal shackles; for hands or legs\nn03585551\ta wooden warship of the 19th century that is plated with iron or steel armor\nn03585682\ta foundry where cast iron is produced\nn03585778\t(c. 1840) an early term for a locomotive\nn03585875\tgarments (clothes or linens) that are to be (or have been) ironed\nn03586219\trespirator that produces alternations in air pressure in a chamber surrounding a patient's chest to force air into and out of the lungs thus providing artificial respiration\nn03586631\tthe merchandise that is sold in an ironmonger's shop\nn03586911\tthe workplace where iron is smelted or where iron goods are made\nn03587205\ta ditch to supply dry land with water artificially\nn03588216\ta voluminous cotton outer garment (usually white) traditionally worn by Muslim women of northern Africa and the Middle East; covers the entire body\nn03588841\ta ruffle on the front of a woman's blouse or a man's shirt\nn03588951\ttool for exerting pressure or lifting\nn03589313\tgame equipment consisting of one of several small six-pointed metal pieces that are picked up while bouncing a ball in the game of jacks\nn03589513\tan electrical device consisting of a connector socket designed for the insertion of a plug\nn03589672\ta small ball at which players aim in lawn bowling\nn03589791\ta short coat\nn03590306\tan outer wrapping or casing\nn03590475\tthe tough metal shell casing for certain kinds of ammunition\nn03590588\tplaything consisting of a toy clown that jumps out of a box when the lid is opened\nn03590841\tlantern carved from a pumpkin\nn03590932\ta carpenter's plane for rough finishing\nn03591116\t(nautical) a hanging ladder of ropes or chains supporting wooden or metal rungs or steps\nn03591313\ta lightweight cotton cloth with a smooth and slightly stiff finish; used for clothing and bandages\nn03591592\ta loom with an attachment for forming openings for the passage of the shuttle between the warp threads; used in weaving figured fabrics\nn03591798\ta highly figured fabric woven on a Jacquard loom\nn03591901\ta flap along the edge of a garment; used in medieval clothing\nn03592245\ta correctional institution used to detain persons who are in the lawful custody of the government (either accused persons awaiting trial or convicted persons serving a sentence)\nn03592669\ta shutter made of angled slats\nn03592773\tupright consisting of a vertical side member of a door or window frame\nn03592931\ta transmitter used to broadcast electronic jamming\nn03593122\ta jar for holding jellies or preserves\nn03593222\tlacquer with a durable glossy black finish, originally from the orient\nn03593526\ta vessel (usually cylindrical) with a wide mouth and without handles\nn03593862\ta kind of artificial heart that has been used with some success\nn03594010\tan open two-wheeled one-horse cart formerly widely used in Ireland\nn03594148\ta spear thrown as a weapon or in competitive field events\nn03594277\tholding device consisting of one or both of the opposing parts of a tool that close to hold an object\nn03594523\thydraulic tool inserted into a wrecked vehicle and used to pry the wreckage apart in order to provide access to people trapped inside\nn03594734\t(usually plural) close-fitting trousers of heavy denim for manual work or casual wear\nn03594945\ta car suitable for traveling over rough terrain\nn03595055\ta loose cloak with a hood; worn in the Middle East and northern Africa\nn03595264\ta tight sleeveless and collarless jacket (often made of leather) worn by men in former times\nn03595409\ta large wine bottle (holds 4/5 of a gallon)\nn03595523\ta slightly elastic machine-knit fabric\nn03595614\ta close-fitting pullover shirt\nn03595860\tan airplane powered by one or more jet engines\nn03596099\tan extendible bridge for loading passengers onto large commercial airplanes; provides protected access to the plane from the gate\nn03596285\ta gas turbine produces a stream of hot gas that propels a jet plane by reaction propulsion\nn03596543\ta large jet plane that carries passengers\nn03597147\tan optical instrument used by jewelers; has one or more lenses and is used to view features not readily seen\nn03597317\ta headdress adorned with jewels\nn03597916\ta small lyre-shaped musical instrument that is placed between the teeth and played by twanging a wire tongue while changing the shape of the mouth cavity\nn03598151\tany triangular fore-and-aft sail (set forward of the foremast)\nn03598299\ta spar that extends the bowsprit\nn03598385\ta device that holds a piece of machine work and guides the tools operating on it\nn03598515\ta fisherman's lure with one or more hooks that is jerked up and down in the water\nn03598646\tany small mast on a sailing vessel; especially the mizzenmast of a yawl\nn03598783\tfine-toothed power saw with a narrow blade; used to cut curved outlines\nn03598930\ta puzzle that requires you to reassemble a picture that has been mounted on a stiff base and cut into interlocking pieces\nn03599486\ta small two-wheeled cart for one passenger; pulled by one person\nn03599964\ta government office in a town where information about available jobs is displayed and where unemployment benefits are administered\nn03600285\tflared trousers ending at the calves; worn with riding boots\nn03600475\ta short riding boot that fastens with a buckle at the side\nn03600722\tfine woodwork done by a joiner\nn03600977\tjunction by which parts or objects are joined together\nn03601442\ta pinpoint bomb guidance device that can be strapped to a gravity bomb thus converting dumb bombs into smart bombs\nn03601638\ta long carpenter's plane used to shape the edges of boards so they will fit together\nn03601840\tbeam used to support floors or roofs\nn03602081\ta yawl used by a ship's sailors for general work\nn03602194\ta large drinking bowl\nn03602365\ta Chinese temple or shrine for idol worship\nn03602686\tthe bearing of a journal\nn03602790\tmetal housing for a journal bearing\nn03602883\ta manual control consisting of a vertical handle that can move freely in two directions; used as an input device to computers or to devices controlled by computers\nn03603442\ta structure of vertical and horizontal rods where children can climb and play\nn03603594\tany of various Chinese boats with a high poop and lugsails\nn03603722\ta large bottle with a narrow mouth\nn03604156\ta cabinet containing an automatic record player; records are played by inserting a coin\nn03604311\ta very large jet plane\nn03604400\ta sleeveless dress resembling an apron; worn over other clothing\nn03604536\ta loose jacket or blouse worn by workmen\nn03604629\ta small connector used to make temporary electrical connections\nn03604763\ta coverall worn by children\nn03604843\ta jumper that consists of a short piece of wire\nn03605417\ta folding seat in an automobile\nn03605504\tone-piece uniform worn by parachutists\nn03605598\tone-piece garment fashioned after a parachutist's uniform\nn03605722\tthe place where two or more things come together\nn03605915\tsomething that joins or connects\nn03606106\ta junction unit for connecting 2 cables without the need for plugs\nn03606251\ta shop that sells cheap secondhand goods\nn03606347\tan enclosure within a courtroom for the jury\nn03606465\ta temporary mast to replace one that has broken off\nn03607029\ta carved doll wearing the costume of a particular Pueblo spirit; usually presented to a child as a gift\nn03607186\tan Arab headdress consisting of a square piece of cloth folded into a triangle and fastened over the crown by an agal\nn03607527\ta cap that is wrapped around by a turban and worn by Muslim religious elders\nn03607659\ta type of submachine gun made in Russia\nn03607923\ta long tunic worn by many people from the Indian subcontinent (usually with a salwar or churidars)\nn03608504\t(Swahili) a long garment (usually white) with long sleeves; worn by men in East Africa\nn03609147\tmeasures thermal conductivity\nn03609235\ta small canoe consisting of a light frame made watertight with animal skins; used by Eskimos\nn03609397\ta toy wind instrument that has a membrane that makes a sound when you hum into the mouthpiece\nn03609542\tone of the main longitudinal beams (or plates) of the hull of a vessel; can extend vertically into the water to provide lateral stability\nn03609786\triver boat with a shallow draught and a keel but no sails; used to carry freight; moved by rowing or punting or towing\nn03609959\ta longitudinal beam connected to the keel of ship to strengthen it\nn03610098\tthe main tower within the walls of a medieval castle or fortress\nn03610418\tsmall cask or barrel\nn03610524\toutbuilding that serves as a shelter for a dog\nn03610682\ta cap with a flat circular top and a visor\nn03610836\tmedical instrument to examine the cornea in order to detect irregularities in its anterior surface\nn03610992\ta square scarf that is folded into a triangle and worn over the head or about the neck\nn03612010\ta sailing vessel with two masts; the mizzen is forward of the rudderpost\nn03612814\ta metal pot for stewing or boiling; usually has a lid\nn03612965\ta large hemispherical brass or copper percussion instrument with a drumhead that can be tuned by adjusting the tension on it\nn03613294\tmetal device shaped in such a way that when it is inserted into the appropriate lock the lock's mechanism can be rotated\nn03613592\ta lever (as in a keyboard) that actuates a mechanism when depressed\nn03614007\tdevice consisting of a set of keys on a piano or organ or typewriter or typesetting machine or computer or the like\nn03614383\ta buffer that keeps track of key strokes until the computer is ready to respond to them\nn03614532\ta musical instrument that is played by means of a keyboard\nn03614782\tthe hole where a key is inserted\nn03614887\ta handsaw with a long narrow blade for cutting short radius curves; similar to a compass saw\nn03615300\ta coarse homespun cotton cloth made in India\nn03615406\ta sturdy twilled cloth of a yellowish brown color used especially for military uniforms\nn03615563\ta military uniform made of khaki fabric\nn03615655\ta headscarf worn by observant Muslim women that hangs down to just above the waist\nn03615790\ta curved steel knife with a razor-sharp edge used in combat by the Gurkhas; has cultural and religious significance in Nepal\nn03616091\tpleat in back of a straight skirt to allow ease in walking\nn03616225\tscientific instrument consisting of an electronic circuit that permits only voltage pulses of predetermined height to pass\nn03616428\ta swiveling metal rod attached to a bicycle or motorcycle or other two-wheeled vehicle; the rod lies horizontally when not in use but can be kicked into a vertical position as a support to hold the vehicle upright when it is not being ridden\nn03616763\ta starter (as on a motorcycle) that is activated with the foot and the weight of the body\nn03616979\ta glove made of fine soft leather (as kidskin)\nn03617095\ta furnace for firing or burning or drying such things as porcelain or bricks\nn03617312\ta knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland\nn03617480\ta loose robe; imitated from robes originally worn by Japanese\nn03617594\ta cathode-ray tube in a television receiver; translates the received signal into a picture on a luminescent screen\nn03617834\ta device invented by Edison that gave an impression of movement as an endless loop of film moved continuously over a light source with a rapid shutter; precursor of the modern motion picture\nn03618101\t(chess) the weakest but the most important piece\nn03618339\ta checker that has been moved to the opponent's first row where it is promoted to a piece that is free to move either forward or backward\nn03618546\tbolt that provides a steering joint in a motor vehicle\nn03618678\tpost connecting the crossbeam to the apex of a triangular truss\nn03618797\ta laboratory apparatus for producing a gas (usually hydrogen sulfide) by the action of a liquid on a solid without heating\nn03618982\ta Scottish church\nn03619050\ta ceremonial four-inch curved dagger that Sikh men and women are obliged to wear at all times\nn03619196\ta long dress worn by women\nn03619275\ta garment resembling a tunic that was worn by men in the Middle Ages\nn03619396\tgear consisting of a set of articles or tools for a specified purpose\nn03619650\ta case for containing a set of articles\nn03619793\ta knapsack (usually for a soldier)\nn03619890\ta room equipped for preparing meals\nn03620052\ta home appliance used in preparing food\nn03620353\tsmall kitchen\nn03620967\ta table in the kitchen\nn03621049\ta utensil used in preparing food\nn03621377\thardware utensils for use in a kitchen\nn03621694\ta barrage balloon with lobes at one end that keep it headed into the wind\nn03622058\ta kind of loud horn formerly used on motor vehicles\nn03622401\tcarbon arc lamp that emits an intense light used in producing films\nn03622526\tan electron tube used to generate or amplify electromagnetic radiation in the microwave region by velocity modulation\nn03622839\ta brace worn to strengthen the knee\nn03622931\ta sock or stocking that reaches up to just below the knees\nn03623198\tprotective garment consisting of a pad worn by football or baseball or hockey players\nn03623338\tarmor plate that protects the knee\nn03623556\tedge tool used as a cutting instrument; has a pointed blade with a sharp edge and a handle\nn03624134\ta weapon with a handle and blade with a sharp point\nn03624400\tthe blade of a knife\nn03624767\ta chessman shaped to resemble the head of a horse; can move two squares horizontally and one vertically (or vice versa)\nn03625355\ta fabric made by knitting\nn03625539\ta textile machine that makes knitted fabrics\nn03625646\tneedle consisting of a slender rod with pointed ends; usually used in pairs\nn03625943\tknitted clothing\nn03626115\ta circular rounded projection or protuberance\nn03626272\tan ornament in the shape of a ball on the hilt of a sword or dagger\nn03626418\ta small knob\nn03626502\ta short wooden club with a heavy knob on one end; used by aborigines in southern Africa\nn03626760\ta device (usually metal and ornamental) attached by a hinge to a door\nn03627232\tany of various fastenings formed by looping and tying a rope (or cord) upon itself or to another rope or to another object\nn03627954\ta joint allowing movement in one plane only\nn03628071\ta cosmetic preparation used by women in Egypt and Arabia to darken the edges of their eyelids\nn03628215\tJapanese stringed instrument that resembles a zither; has a rectangular wooden sounding board and usually 13 silk strings that are plucked with the fingers\nn03628421\ta pen for livestock in southern Africa\nn03628511\tcitadel of a Russian town\nn03628728\ta Malayan dagger with a wavy blade\nn03628831\ta Renaissance woodwind with a double reed and a curving tube (crooked horn)\nn03628984\ta measuring instrument used to measure the speed of sound\nn03629100\tan oriental rug woven by Kurds that is noted for fine colors and durability\nn03629231\ta loose collarless shirt worn by many people on the Indian subcontinent (usually with a salwar or churidars or pyjama)\nn03629520\ta shallow drinking cup with two handles; used in ancient Greece\nn03629643\tscientific instrument consisting of a rotating drum holding paper on which a stylus traces a continuous record (as of breathing or blood pressure)\nn03630262\ta workbench in a laboratory\nn03630383\ta light coat worn to protect clothing from substances used while working in a laboratory\nn03631177\ta delicate decorative fabric woven in an open web of symmetrical patterns\nn03631811\ta hard glossy coating\nn03631922\ta decorative work made of wood and covered with lacquer and often inlaid with ivory or precious metals\nn03632100\tball used in playing lacrosse\nn03632577\tthe backrest of a chair that consists of two uprights with connecting slats\nn03632729\ta chair with a ladder-back\nn03632852\ta fire engine carrying ladders\nn03632963\ta woman's restroom in a public (or semipublic) building\nn03633091\ta spoon-shaped vessel with a long handle; frequently used to transfer liquids from one container to another\nn03633341\ta small chapel in a church; dedicated to the Virgin Mary\nn03633632\tan Australian percussion instrument used for playing bush music; a long stick with bottle caps nailed loosely to it; played by hitting it with a stick or banging it on the ground\nn03633886\ta heavy woodscrew with a square or hexagonal head that is driven in with a wrench\nn03634034\tdwelling built on piles in or near a lake; specifically in prehistoric villages\nn03634899\tsupport column consisting of a steel cylinder filled with concrete\nn03635032\ta monastery for lamas\nn03635108\tshort and decorative hanging for a shelf edge or top of a window casing\nn03635330\ta fabric interwoven with threads of metal\nn03635516\ta clean room free of all extraneous particles; used in fabricating microprocessors\nn03635668\ta sheet of material made by bonding two or more sheets or layers\nn03635932\ta layered structure\nn03636248\tan artificial source of visible illumination\nn03636649\ta piece of furniture holding one or more electric light bulbs\nn03637027\thousing that holds a lamp (as in a movie projector)\nn03637181\ta metal post supporting an outdoor lamp (such as a streetlight)\nn03637318\ta protective ornamental shade used to screen a light bulb from direct view\nn03637480\ta veranda or roofed patio often furnished and used as a living room\nn03637787\tan acutely pointed Gothic arch, like a lance\nn03637898\ta narrow window having a lancet arch and without tracery\nn03638014\ta four-wheel covered carriage with a roof divided into two parts (front and back) that can be let down separately\nn03638180\ta space vehicle that is designed to land on the moon or another planet\nn03638623\tnaval craft designed for putting ashore troops and equipment\nn03638743\ta flap on the underside of the wing that is lowered to slow the plane for landing\nn03638883\tan undercarriage that supports the weight of the plane when it is on the ground\nn03639077\ta bag-shaped fishnet on a long handle to take a captured fish from the water\nn03639230\tone of two parts of the landing gear of a helicopter\nn03639497\ta telephone line that travels over terrestrial circuits\nn03639675\tan explosive mine hidden underground; explodes when stepped on or driven over\nn03639880\ta government office where business relating to public lands is transacted\nn03640850\tan emollient containing wool fat (a fatty substance obtained from the wool of sheep)\nn03640988\tlight in a transparent protective case\nn03641569\ta cord with an attached hook that is used to fire certain types of cannon\nn03641947\tthe part of a piece of clothing that covers the thighs\nn03642144\ta slender endoscope inserted through an incision in the abdominal wall in order to examine the abdominal organs or to perform minor surgery\nn03642341\twriting board used on the lap as a table or desk\nn03642444\tlap at the front of a coat; continuation of the coat collar\nn03642573\tjoint made by overlapping two ends and joining them together\nn03642806\ta portable computer small enough to use in your lap\nn03643149\ta medical instrument for examining the larynx\nn03643253\tan acronym for light amplification by stimulated emission of radiation; an optical device that produces an intense monochromatic beam of coherent light\nn03643491\ta smart bomb that seeks the laser light reflected off of the target and uses it to correct its descent\nn03643737\telectrostatic printer that focuses a laser beam to form images that are transferred to paper electrostatically\nn03643907\tleather strip that forms the flexible part of a whip\nn03644073\trope that is used for fastening something to something else\nn03644378\ta long noosed rope used to catch animals\nn03644858\tcatch for fastening a door or gate; a bar that can be lowered or slid into a groove\nn03645011\tspring-loaded doorlock that can only be opened from the outside with a key\nn03645168\ta leather strap or thong used to attach a sandal or shoe to the foot\nn03645290\tkey for raising or drawing back a latch or opening an outside door\nn03645577\ta triangular fore-and-aft sail used especially in the Mediterranean\nn03646020\ta water-base paint that has a latex binder\nn03646148\ta narrow thin strip of wood used as backing for plaster or to make latticework\nn03646296\tmachine tool for shaping metal or wood; the workpiece turns about a horizontal axis against a fixed tool\nn03646809\ta public toilet in a military area\nn03646916\tframework consisting of an ornamental design made of strips of wood or metal\nn03647423\ta motorboat with an open deck or a half deck\nn03647520\tarmament in the form of a device capable of launching a rocket\nn03648219\tgarments or white goods that can be cleaned by laundering\nn03648431\thandcart for moving a load of laundry\nn03648667\tvan that picks up and delivers laundry\nn03649003\ta skirt consisting of a rectangle of calico or printed cotton; worn by Polynesians (especially Samoans)\nn03649161\tjeweled pendant worn on a chain around the neck\nn03649288\t(Old Testament) large basin used by a priest in an ancient Jewish temple to perform ritual ablutions\nn03649674\tchair left outside for use on a lawn or in a garden\nn03649797\tfurniture intended for use on a lawn or in a garden\nn03649909\tgarden tool for mowing grass on lawns\nn03650551\tkit consisting of a complete outfit (clothing and accessories) for a new baby\nn03651388\ta battery with lead electrodes with dilute sulphuric acid as the electrolyte; each cell generates about 2 volts\nn03651605\twire connecting an antenna to a receiver or a transmitter to a transmission line\nn03651843\trein to direct the horse's head left or right\nn03652100\tpencil that has graphite as the marking substance\nn03652389\tlong narrow spring consisting of several layers of metal springs bracketed together\nn03652729\trough shelter whose roof has only one slope\nn03652826\ttent that is attached to the side of a building\nn03652932\trestraint consisting of a rope (or light chain) used to restrain an animal\nn03653110\tfabric made to look like leather\nn03653220\timplement consisting of a strip of leather\nn03653454\tvoltaic cell that produces approximately 1.5 volts\nn03653583\tdesk or stand with a slanted top used to hold a text at the proper height for a lecturer\nn03653740\tclassroom where lectures are given\nn03653833\tleather shorts often worn with suspenders; worn especially by men and boys in Bavaria\nn03653975\ttop rail of a fence or balustrade\nn03654576\ta cloth covering consisting of the part of a pair of trousers that covers a person's leg\nn03654826\tone of the supports for a piece of furniture\nn03655072\ta garment covering the leg (usually extending from the knee to the ankle)\nn03655470\tan electrostatic capacitor of historical interest\nn03655720\tinformal clothing designed to be worn when you are relaxing\nn03656484\ta transparent optical device used to converge or diverge transmitted light and to form images\nn03656957\telectronic equipment that uses a magnetic or electric field in order to focus a beam of electrons\nn03657121\tcap used to keep lens free of dust when not in use\nn03657239\ta clear plastic lens that is implanted in the eye; usually done when the natural lens has been removed in a cataract operation\nn03657511\ta tight-fitting garment of stretchy material that covers the body from the shoulders to the thighs (and may have long sleeves or legs reaching down to the ankles); worn by ballet dancers and acrobats for practice or performance\nn03658102\tcase for carrying letters\nn03658185\tdull knife used to cut open the envelopes in which letters are mailed or to slit uncut pages of books\nn03658635\tan embankment that is built in order to prevent a river from overflowing\nn03658858\tindicator that establishes the horizontal when a bubble is centered in a tube of liquid\nn03659292\ta rigid bar pivoted about a fulcrum\nn03659686\ta flat metal tumbler in a lever lock\nn03659809\ta simple machine that gives a mechanical advantage when given a fulcrum\nn03659950\ta lock whose tumblers are levers that must be raised to a given position so that the bolt can move\nn03660124\ta popular brand of jeans\nn03660562\ta slow cargo ship built during World War II\nn03660909\ta room where books are kept\nn03661043\ta building that houses a collection of books and other materials\nn03661340\ta movable top or cover (hinged or separate) for closing the opening at the top of a box, chest, jar, pan, etc.\nn03662301\ta condenser: during distillation the vapor passes through a tube that is cooled by water\nn03662452\ta polygraph that records bodily changes sometimes associated with lying\nn03662601\ta strong sea boat designed to rescue people from a sinking ship\nn03662719\ta life preserver in the form of a ring of buoyant material\nn03662887\tlife preserver consisting of a sleeveless jacket of buoyant or inflatable design\nn03663433\tlife assurance office\nn03663531\trescue equipment consisting of a buoyant belt or jacket to keep a person from drowning\nn03663910\tmedical equipment that assists or replaces important bodily functions and so enables a patient to live who otherwise might not survive\nn03664159\tequipment that makes life possible in otherwise deadly environmental conditions\nn03664675\ta device for lifting heavy loads\nn03664840\tpump used to lift rather than force a liquid up\nn03664943\tany connection or unifying bond\nn03665232\ta metal band used to attach a reed to the mouthpiece of a clarinet or saxophone\nn03665366\tany device serving as a source of illumination\nn03665851\ta rifle or pistol\nn03665924\telectric lamp consisting of a transparent or translucent glass housing containing a wire filament (usually tungsten) that emits light when heated by electricity\nn03666238\twiring that provides power to electric lights\nn03666362\tdiode such that light emitted at a p-n junction is proportional to the bias current; color depends on the material used\nn03666591\ta device for lighting or igniting fuel or charges or fires\nn03666917\taircraft supported by its own buoyancy\nn03667060\ta transparent filter that reduces the light (or some wavelengths of the light) passing through it\nn03667235\tapparatus for supplying artificial light effects for the stage or a film\nn03667552\ta submachine gun not greater than .30 millimeter\nn03667664\tphotographic equipment that measures the intensity of light\nn03667829\tmicroscope consisting of an optical instrument that magnifies the image of an object\nn03668067\ta metallic conductor that is attached to a high point and leads to the ground; protects the building from destruction by lightning\nn03668279\t(computer science) a pointer that when pointed at a computer display senses whether or not the spot is illuminated\nn03668488\ta ship equipped like a lighthouse and anchored where a permanent lighthouse would be impracticable\nn03668803\ta type of inflatable air mattress\nn03669245\ta two-wheeled horse-drawn vehicle used to pull a field gun or caisson\nn03669534\ta kiln used to reduce naturally occurring forms of calcium carbonate to lime\nn03669886\t(electronics) a nonlinear electronic circuit whose output is limited in amplitude; used to limit the instantaneous amplitude of a waveform (to clip off the peaks of a waveform)\nn03670208\tlarge luxurious car; usually driven by a chauffeur\nn03671914\tions are accelerated along a linear path by voltage differences on electrodes along the path\nn03672521\ta fabric woven with fibers from the flax plant\nn03672827\tprinter that serves as an output device on a computer; prints a whole line of characters at a time\nn03673027\ta large commercial ship (especially one that carries passengers on a regular schedule)\nn03673270\ta piece of cloth that is used as the inside surface of a garment\nn03673450\twomen's underwear and nightclothes\nn03673767\ta protective covering that protects an inside surface\nn03674270\tan interconnecting circuit between two or more locations for the purpose of transmitting and receiving data\nn03674440\ta mechanical system of rods or springs or pivots that transmits power or motion\nn03674731\tan early form of flight simulator\nn03674842\ta design carved in relief into a block of linoleum\nn03675076\ta knife having a short stiff blade with a curved point used for cutting linoleum\nn03675235\ta typesetting machine operated from a keyboard that casts an entire line as a single slug of metal\nn03675445\ta rough fabric of linen warp and wool or cotton woof\nn03675558\ta stick about a meter long with a point on one end (to stick in the ground) and a forked head on the other end (to hold a lighted match); formerly used to fire cannons\nn03675907\ta type of forceps\nn03676087\tmakeup that makes the lips shiny\nn03676483\tmakeup that is used to color the lips\nn03676623\ta small glass for serving a small amount of liqueur (typically after dinner)\nn03676759\ta digital display that uses liquid crystal cells that change reflectivity in an applied electric field; used for portable computer displays and watches etc.\nn03677115\ta nuclear reactor using liquid metal as a coolant\nn03677682\ta fabric woven with lisle thread\nn03677766\tmoldboard plow with a double moldboard designed to move dirt to either side of a central furrow\nn03678558\tbin (usually in or outside a public building) into which the public can put rubbish\nn03678729\ta small theater for experimental drama or collegiate or community groups\nn03678879\tthe axle of a self-propelled vehicle that provides the driving power\nn03679384\thousing available for people to live in\nn03679712\ta room in a private house or establishment where people can sit and talk and relax\nn03680248\telectrical device to which electrical power is delivered\nn03680355\ta low leather step-in shoe; the top resembles a moccasin but it has a broad flat heel\nn03680512\ta car that is lent as a replacement for one that is under repair\nn03680734\ta rounded projection that is part of a larger structure\nn03680858\ttrap for catching lobsters\nn03680942\tpublic transport consisting of a bus or train that stops at all stations or stops\nn03681477\ta local computer network for communication between computers; especially a network connecting computers and word processors and other electronic office equipment to create a communication system between offices\nn03681813\tan oscillator whose output heterodynes with the incoming radio signal to produce sum and difference tones\nn03682380\ta battle-ax formerly used by Scottish Highlanders\nn03682487\ta fastener fitted to a door or drawer to keep it firmly closed\nn03682877\ta restraint incorporated into the ignition switch to prevent the use of a vehicle by persons who do not have the key\nn03683079\tenclosure consisting of a section of canal that can be closed to control the water level; used to raise or lower vessels that pass through it\nn03683341\ta mechanism that detonates the charge of a gun\nn03683457\ta system of locks in a canal or waterway\nn03683606\ta fastener that locks or closes\nn03683708\ta room (as at an athletic facility or workplace) where you can change clothes and which contains lockers for the temporary storage of your clothing and personal possessions\nn03683995\ta small ornamental case; usually contains a picture or a lock of hair and is worn on a necklace\nn03684143\ta gate that can be locked\nn03684224\tpliers that can be locked in place\nn03684489\twasher that prevents a nut from loosening\nn03684611\tmachine stitch in which the top thread interlocks with the bobbin thread\nn03684740\tjail in a local police station\nn03684823\ta wheeled vehicle consisting of a self-propelled engine that is used to draw trains along railway tracks\nn03685307\tany of various Native American dwellings\nn03685486\ta small (rustic) house used as a temporary shelter\nn03685640\tsmall house at the entrance to the grounds of a country mansion; usually occupied by a gatekeeper or gardener\nn03685820\ta house where rooms are rented\nn03686130\tfloor consisting of open space at the top of a house just below roof; often used for storage\nn03686363\ta raised shelter in which pigeons are kept\nn03686470\tfloor consisting of a large unpartitioned space over a factory or warehouse or other commercial space\nn03686924\ta cabin built with logs\nn03687137\ta roofed arcade or gallery with open sides stretching along the front or side of a building; often at an upper level\nn03687928\ta powerful wooden bow drawn by hand; usually 5-6 feet long; used in medieval England\nn03688066\tan iron with a long shaft and a steep face; for hitting long low shots\nn03688192\twarm underwear with long legs\nn03688405\ta sleeve extending from shoulder to wrist\nn03688504\ta long swivel cannon formerly used by the navy\nn03688605\ttrousers reaching to the foot\nn03688707\tan undergarment with shirt and drawers in one piece\nn03688832\ta mirror; usually a ladies' dressing mirror\nn03688943\ta structure commanding a wide view of its surroundings\nn03689157\ta textile machine for weaving yarn into a textile\nn03689570\tany of various knots used to make a fixed loop in a rope\nn03690168\teyeglasses that are held to the eyes with a long handle\nn03690279\ta cross with two crossbars, one above and one below the midpoint of the vertical, the lower longer than the upper\nn03690473\ta large truck designed to carry heavy loads; usually without sides\nn03690851\ta globular water bottle used in Asia\nn03690938\tany of various cosmetic preparations that are applied to the skin\nn03691459\telectro-acoustic transducer that converts electrical signals into sounds loud enough to be heard at a distance\nn03691817\ta room (as in a hotel or airport) with seating where people can wait\nn03692004\tan article of clothing designed for comfort and leisure wear\nn03692136\ta man's soft jacket usually with a tie belt; worn at home\nn03692272\tpajamas worn while lounging\nn03692379\tclothing suitable for relaxation\nn03692522\tsmall magnifying glass (usually set in an eyepiece) used by jewelers and horologists\nn03692842\ta window with glass louvers\nn03693293\ta stylized or decorative knot used as an emblem of love\nn03693474\tsmall sofa that seats two people\nn03693707\ta large drinking vessel (usually with two handles) that people drink out of in turn at a banquet\nn03693860\ta low chest or table with drawers and supported on four legs\nn03694196\ta filter that passes frequencies below a certain value and attenuates frequencies above that value\nn03694356\ta handloom in which the warp is carried horizontally; for weaving tapestry\nn03694639\ta long-playing phonograph record; designed to be played at 33.3 rpm\nn03694761\ta square plate bearing the letter L that is attached to both ends of a car to indicate that the driver is a learner\nn03694949\thole in a platform on a mast through which a sailor can climb without going out on the shrouds\nn03695122\tmechanical system of lubricating internal combustion engines in which a pump forces oil into the engine bearings\nn03695452\t(nautical) the forward edge of a fore-and-aft sail that is next to the mast\nn03695616\ta projecting piece that is used to lift or support or turn something\nn03695753\ta racing sled for one or two people\nn03695857\ta German semiautomatic pistol\nn03695957\tcarrier (as behind a bicycle seat) for luggage\nn03696065\tcompartment in an automobile that carries luggage or shopping or tools\nn03696301\tcarrier for holding luggage above the seats of a train or on top of a car\nn03696445\tsmall fishing boat rigged with one or more lugsails\nn03696568\ta sail with four corners that is hoisted from a yard that is oblique to the mast\nn03696746\ta wrench with jaws that have projecting lugs to engage the object that is to be rotated\nn03696909\ta short warm outer jacket\nn03697007\ta mill for dressing logs and lumber\nn03697366\ta spacecraft that carries astronauts from the command module to the surface of the moon and back\nn03697552\ta restaurant (in a facility) where lunch can be purchased\nn03697812\ttemporary fortification like a detached bastion\nn03697913\ta long piece of brightly colored cloth (cotton or silk) used as clothing (a skirt or loincloth or sash etc.) in India and Pakistan and Burma\nn03698123\ta crescent-shaped metal ornament of the Bronze Age\nn03698226\tpottery with a metallic sheen produced by adding metallic oxides to the glaze\nn03698360\tchordophone consisting of a plucked instrument having a pear-shaped body, a usually bent neck, and a fretted fingerboard\nn03698604\ta liner equipped for sumptuous living\nn03698723\ta public hall for lectures and concerts\nn03698815\ta roofed gate to a churchyard, formerly used as a temporary shelter for the bier during funerals\nn03699280\ta harp used by ancient Greeks for accompaniment\nn03699591\ta large heavy knife used in Central and South America as a weapon or for cutting vegetation\nn03699754\ta projecting parapet supported by corbels on a medieval castle; has openings through which stones or boiling water could be dropped on an enemy\nn03699975\tany mechanical or electrical device that transmits or modifies energy to perform or assist in the performance of human tasks\nn03700963\ta device for overcoming resistance at one point by applying force at some other point\nn03701191\ta bolt with a square or hexagonal head on one end and a threaded shaft on the other end; tightened with a wrench; used to connect metal parts\nn03701391\ta rapidly firing automatic gun (often mounted)\nn03701640\tmachines or machine systems collectively\nn03701790\ta screw used either with a nut or with a tapped hole; slotted head can be driven by a screwdriver\nn03702248\ta powered machine for cutting or shaping or finishing metals or other materials\nn03702440\ta vise with two parallel iron jaws and a wide opening below\nn03702582\tspeedometer for measuring the speed of an aircraft relative to the speed of sound\nn03703075\ta heavy woolen cloth heavily napped and felted, often with a plaid design\nn03703203\ta flat-bottomed boat used on upper Great Lakes\nn03703463\ta short plaid coat made of made of thick woolen material\nn03703590\ta lightweight waterproof (usually rubberized) fabric\nn03703730\ta relatively coarse lace; made by weaving and knotting cords\nn03703862\ta light patterned cotton cloth\nn03703945\tan inflatable life jacket\nn03704549\ta rack for displaying magazines\nn03704834\tan early form of slide projector\nn03705379\t(physics) a device that attracts iron and produces a magnetic field\nn03705808\tcontainer consisting of any configuration of magnetic fields used to contain a plasma during controlled thermonuclear reactions\nn03706229\tcompass based on an indicator (as a magnetic needle) that points to the magnetic north\nn03706415\t(computer science) a computer memory consisting of an array of magnetic cores; now superseded by semiconductor memories\nn03706653\t(computer science) a memory device consisting of a flat disk covered with a magnetic coating on which information is stored\nn03706939\tan electromagnet (as on a tape recorder) that converts electrical variations into magnetic variations that can be stored on a surface and later retrieved\nn03707171\t(nautical) a marine mine that is detonated by a mechanism that responds to magnetic material (as the steel hull of a ship)\nn03707372\ta slender magnet suspended in a magnetic compass on a mounting with little friction; used to indicate the direction of the earth's magnetic pole\nn03707597\trecorder consisting of equipment for making records on magnetic media\nn03707766\ta short strip of magnetic tape attached to a credit card or debit card; it contains data that will tell a reading device who you are and what your account number is, etc.\nn03708036\tmemory device consisting of a long thin plastic strip coated with iron oxide; used to record audio or video signals or to store computer information\nn03708425\ta small dynamo with a secondary winding that produces a high voltage enabling a spark to jump between the poles of a spark plug in a gasoline engine\nn03708843\ta meter to compare strengths of magnetic fields\nn03708962\ta diode vacuum tube in which the flow of electrons from a central cathode to a cylindrical anode is controlled by crossed magnetic and electric fields; used mainly in microwave oscillators\nn03709206\ta scientific instrument that magnifies an image\nn03709363\ta large wine bottle for liquor or wine\nn03709545\ta rolling hitch similar to a clove hitch\nn03709644\ta conveyance that transports the letters and packages that are conveyed by the postal system\nn03709823\tletter carrier's shoulder bag\nn03709960\tpouch used in the shipment of mail\nn03710079\ta boat for carrying mail\nn03710193\ta private box for delivery of mail\nn03710294\ta railway car in which mail is transported and sorted\nn03710421\ta drop where mail can be deposited\nn03710528\ta container for something to be mailed\nn03710637\ttights for dancers or gymnasts\nn03710721\ta woman's one-piece bathing suit\nn03710937\ta sorter for sorting mail according to the address\nn03711044\ta train that carries mail\nn03711711\ta large digital computer serving 100-400 users and occupying a special air-conditioned room\nn03711999\tthe chief mast of a sailing vessel with two or more masts\nn03712111\trotor consisting of large rotating airfoils on a single-rotor helicopter that produce the lift to support the helicopter in the air\nn03712337\tthe lowermost sail on the mainmast\nn03712444\tthe most important spring in a mechanical device (especially a clock or watch); as it uncoils it drives the mechanism\nn03712887\tthe topmast next above the mainmast\nn03712981\ta topsail set on the mainmast\nn03713069\tyard for a square mainsail\nn03713151\ta small house\nn03713436\thighly decorated earthenware with a glaze of tin oxide\nn03714235\tcosmetics applied to the face to improve or change your appearance\nn03715114\treflecting telescope in which the aberration of the concave mirror is reduced by a meniscus lens\nn03715275\ta cane made from the stem of a rattan palm\nn03715386\ta tool resembling a hammer but with a large head (usually wooden); used to drive wedges or ram down paving stones or for crushing or beating or flattening or smoothing\nn03715669\ta light drumstick with a rounded head that is used to strike such percussion instruments as chimes, kettledrums, marimbas, glockenspiels, etc.\nn03715892\ta sports implement with a long handle and a head like a hammer; used in sports (polo or croquet) to hit a ball\nn03716228\tX-ray film of the soft tissue of the breast\nn03716887\tan early type of mandolin\nn03716966\ta stringed instrument related to the lute, usually played with a plectrum\nn03717131\ta container (usually in a barn or stable) from which cattle or horses feed\nn03717285\tclothes dryer for drying and ironing laundry by passing it between two heavy heated rollers\nn03717447\ta hole (usually with a flush cover) through which a person can gain access to an underground structure\nn03717622\ta flush iron cover for a manhole (as in a street)\nn03718212\ta warship intended for combat\nn03718335\ta pressure gauge for comparing pressures of a gas\nn03718458\tthe mansion of a lord or wealthy person\nn03718581\tthe large room of a manor or castle\nn03718699\ta man-portable surface-to-air missile\nn03718789\ta hip roof having two slopes on each side\nn03718935\tthe residence of a clergyman (especially a Presbyterian clergyman)\nn03719053\ta large and imposing house\nn03719343\tshelf that projects from wall above fireplace\nn03719560\tshort cape worn by women\nn03719743\ta woman's silk or lace scarf\nn03720005\ta light weight jacket with a high collar; worn by Mao Zedong and the Chinese people during his regime\nn03720163\ta diagrammatic representation of the earth's surface (or part of it)\nn03720665\tan assembly plant in Mexico (near the United States border); parts are shipped into Mexico and the finished product is shipped back across the border\nn03720891\ta percussion instrument consisting of a hollow gourd containing pebbles or beans; often played in pairs\nn03721047\ta small ball of glass that is used in various games\nn03721252\tequipage for marching\nn03721384\ta percussion instrument with wooden bars tuned to produce a chromatic scale and with resonators; played with small mallets\nn03721590\ta fancy dock for small yachts and cabin cruisers\nn03722007\ta writing implement for making a mark\nn03722288\tan area in a town where a public mercantile establishment is set up\nn03722646\ta pointed iron hand tool that is used to separate strands of a rope or cable (as in splicing)\nn03722944\ta dress crepe; similar to Canton crepe\nn03723153\tpermanent canopy over an entrance of a hotel etc.\nn03723267\tinlaid veneers are fitted together to form a design or picture that is then used to ornament furniture\nn03723439\tthe bed shared by a newly wed couple\nn03723781\ta circular masonry fort for coastal defence\nn03723885\ta harness strap that connects the nose piece to the girth; prevents the horse from throwing back its head\nn03724066\tmakeup that is used to darken and thicken the eye lashes\nn03724176\tan acronym for microwave amplification by stimulated emission of radiation; an amplifier that works on the same principle as a laser and emits coherent microwave radiation\nn03724417\ta kitchen utensil used for mashing (e.g. potatoes)\nn03724538\tmiddle-distance iron\nn03724623\tiron with a lofted face for hitting high shots to the green\nn03724756\t(Islam) a Muslim place of worship\nn03724870\ta covering to disguise or conceal the face\nn03725035\ta protective covering worn over the face\nn03725506\ta type of fiberboard\nn03725600\ta glass jar with an air-tight screw top; used in home canning\nn03725717\tstructure built of stone or brick by a mason\nn03725869\ta level longer than a carpenter's level\nn03726116\ta business establishment that offers therapeutic massage\nn03726233\ta place where illicit sex is available under the guise of therapeutic massage\nn03726371\ta mass spectrometer that produces a graphical representation of the mass spectrum\nn03726516\tspectroscope for obtaining a mass spectrum by deflecting ions into a thin slit and measuring the ion current with an electrometer\nn03726760\ta vertical spar for supporting sails\nn03726993\tany sturdy upright pole\nn03727067\tan ancient Egyptian mud-brick tomb with a rectangular base and sloping sides and flat roof\nn03727465\tthe principal bedroom in a house; usually occupied by the head of the household\nn03727605\tthe most outstanding work of a creative artist or craftsman\nn03727837\ta thick flat pad used as a floor covering\nn03727946\tsports equipment consisting of a piece of thick padding on the floor for gymnastic sports\nn03728437\tlighter consisting of a thin piece of wood or cardboard tipped with combustible chemical; ignites with friction\nn03728982\ta burning piece of wood or cardboard\nn03729131\ta board that has a groove cut into one edge and a tongue cut into the other so they fit tightly together (as in a floor)\nn03729308\ta small folder of paper safety matches\nn03729402\ta box for holding matches\nn03729482\tan early style of musket; a slow-burning wick would be lowered into a hole in the breech to ignite the charge\nn03729647\ta plane having cutters designed to make the tongues and grooves on the edges of matchboards\nn03729826\ta short thin stick of wood used in making matches\nn03729951\tthings needed for doing or making something\nn03730153\tequipment and supplies of a military force\nn03730334\ta hospital that provides care for women during pregnancy and childbirth and for newborn infants\nn03730494\ta hospital ward that provides care for women during pregnancy and childbirth and for newborn infants\nn03730655\tmold used in the production of phonograph records, type, or other relief surface\nn03730788\ta kind of stopper knot\nn03730893\ta covering of coarse fabric (usually of straw or hemp)\nn03731019\ta kind of pick that is used for digging; has a flat blade set at right angles to the handle\nn03731483\tbedclothes that provide a cover for a mattress\nn03731695\ta heavy long-handled hammer used to drive stakes or wedges\nn03731882\ta long stick that a painter uses to support the hand holding the brush\nn03732020\ttrademark for a repeating rifle or pistol\nn03732114\ta large burial chamber, usually above ground\nn03732458\ta long skirt ending below the calf\nn03732543\tan obsolete water-cooled machine gun having a single barrel\nn03732658\tthermometer that records the highest and lowest temperatures reached during a period of time\nn03733131\ta vertical pole or post decorated with streamers that can be held by dancers celebrating May Day\nn03733281\tcomplex system of paths or tunnels in which it is easy to get lost\nn03733465\ta large hardwood drinking bowl\nn03733547\tan instrumentality for accomplishing some end\nn03733644\ta container of some standard capacity that is used to obtain fixed amounts of a substance\nn03733805\tgraduated cup used to measure liquid or granular ingredients\nn03733925\tinstrument that shows the extent or amount or quantity or degree of something\nn03735637\tmeasuring instrument having a sequence of marks at regular intervals; used as a reference in making measurements\nn03735963\tcounter where meats are displayed for sale\nn03736064\ta mill for grinding meat\nn03736147\ta strong pointed hook from which the carcasses of animals are hung\nn03736269\ta small house (on a farm) where meat is stored\nn03736372\ta safe for storing meat\nn03736470\ta thermometer that is inserted into the center of a roast (with the top away from the heat source); used to measure how well done the meat is\nn03736970\tmechanism consisting of a device that works on mechanical principles\nn03738066\ta mechanically operated piano that uses a roll of perforated paper to activate the keys\nn03738241\ta system of elements that interact on mechanical principles\nn03738472\tdevice consisting of a piece of machinery; has moving parts that perform some function\nn03739518\tbuilding where medicine is practiced\nn03739693\tinstrument used in the practice of medicine\nn03742019\theavy ball used in physical training\nn03742115\tcabinet that holds medicines and toiletries\nn03742238\tthe computer-based telephone system of the United States National Library of Medicine that provides rapid linkage to MEDLARS\nn03743016\tmemorial consisting of a very large stone forming part of a prehistoric structure (especially in western Europe)\nn03743279\ta cone-shaped acoustic device held to the mouth to intensify and direct the human voice\nn03743902\ta structure erected to commemorate persons or events\nn03744276\tan electronic memory device\nn03744684\ta RAM microchip that can be plugged into a computer to provide additional memory\nn03744840\ta device that preserves information for retrieval\nn03745146\tthe facility where wild animals are housed for exhibition\nn03745487\tgarments that must be repaired\nn03745571\ta tall upright megalith; found primarily in England and northern France\nn03746005\t(Judaism) a candelabrum with nine branches; used during the Hanukkah festival\nn03746155\t(Judaism) a candelabrum with seven branches used in ceremonies to symbolize the seven days of Creation\nn03746330\tclothing that is designed for men to wear\nn03746486\ta public toilet for men\nn03748162\ta place of business for retailing goods\nn03749504\tbarometer that shows pressure by the height of a column of mercury\nn03749634\ta primary cell consisting of a zinc anode and a cathode of mercury oxide and an electrolyte of potassium hydroxide\nn03749807\tthermometer consisting of mercury contained in a bulb at the bottom of a graduated sealed glass capillary tube marked in degrees Celsius or Fahrenheit; mercury expands with a rise in temperature causing a thin thread of mercury to rise in the tube\nn03750206\tultraviolet lamp that emits a strong bluish light (rich in ultraviolet radiation) as electric current passes through mercury vapor\nn03750437\tthe golden covering of the ark of the covenant\nn03750614\ta solid section between two crenels in a crenelated battlement\nn03751065\ta (large) military dining room where service personnel eat or relax\nn03751269\twaist-length jacket tapering to a point at the back; worn by officers in the mess for formal dinners\nn03751458\tkit containing a metal dish and eating utensils; used by soldiers and campers\nn03751590\t(law) a dwelling house and its adjacent buildings and the adjacent land used by the household\nn03751757\tdetector that gives a signal when it detects the presence of metal; used to detect the presence of stray bits of metal in food products or to find buried metal\nn03752071\ta fabric made of a yarn that is partly or entirely of metal\nn03752185\tscrew made of metal\nn03752398\tgolf wood with a metal head instead of the traditional wooden head\nn03752922\ta small unmanned balloon set aloft to observe atmospheric conditions\nn03753077\tany of various measuring instruments for measuring a quantity\nn03753514\ta rule one meter long (usually marked off in centimeters and millimeters)\nn03757604\tclicking pendulum indicates the exact tempo of a piece of music\nn03758089\tintermediate floor just above the ground floor\nn03758220\tfirst or lowest balcony\nn03758894\tbalance for weighing very small objects\nn03758992\ta small brewery; consumption of the product is mainly elsewhere\nn03759243\tsmall sheet of microfilm on which many pages of material have been photographed; a magnification system is used to read the material\nn03759432\tfilm on which materials are photographed at greatly reduced size; useful for storage; a magnification system is used to read the material\nn03759661\tcaliper for measuring small distances\nn03759954\tdevice for converting sound waves into electrical energy\nn03760310\tintegrated circuit semiconductor chip that performs the bulk of the processing and controls the parts of a system\nn03760671\tmagnifier of the image of small objects\nn03760944\tscientific instrument that cuts thin slices of something for microscopic examination\nn03761084\tkitchen appliance that cooks food by passing an electromagnetic wave through it; heat results from the absorption of energy by the water molecules in the food\nn03761588\tdiathermy machine that uses microwave radiation as the source of heat\nn03761731\tlinear accelerator that uses microwaves\nn03762238\tblouse with a sailor collar\nn03762332\tlong iron with a nearly vertical face\nn03762434\t(Islam) a niche in the wall of a mosque that indicates the direction of Mecca\nn03762602\t(Islam) a design in the shape of niche in a Muslim prayer rug; during worship the niche must be pointed toward Mecca\nn03762982\thospital for soldiers and other military personnel\nn03763727\tliving quarters for personnel on a military post\nn03763968\tprescribed identifying uniform for soldiers\nn03764276\tvehicle used by the armed forces\nn03764606\tsnack bar that sells milk drinks and light refreshments (such as ice cream)\nn03764736\tlarge can for transporting milk\nn03764822\ta van (typically powered by electricity) with an open side that is used to deliver milk to houses\nn03764995\tmachine consisting of a suction apparatus for milking cows mechanically\nn03765128\tlow three-legged stool with a half round seat; used to sit on while milking a cow\nn03765467\twagon for delivering milk\nn03765561\tmachinery that processes materials by grinding or crushing\nn03765934\tdam to make a millpond to provide power for a water mill\nn03766044\tmachine tool in which metal that is secured to a carriage is fed against rotating cutters that shape it\nn03766218\ta sensitive ammeter graduated in milliamperes\nn03766322\thats for women; the wares sold by a milliner\nn03766508\tshop selling women's hats\nn03766600\tcorrugated edge of a coin\nn03766697\tsensitive voltmeter that can measure voltage in millivolts\nn03766935\tone of a pair of heavy flat disk-shaped stones that are rotated against one another to grind the grain\nn03767112\tany load that is difficult to carry\nn03767203\twater wheel that is used to drive machinery in a mill\nn03767459\ta rotary duplicator that uses a stencil through which ink is pressed (trade mark Roneo)\nn03767745\tslender tower with balconies\nn03767966\ta kitchen utensil that cuts or chops food (especially meat) into small pieces\nn03768132\texplosive device that explodes on contact; designed to destroy vehicles or ships or to kill or maim personnel\nn03768683\tdetector consisting of an electromagnetic device; used to locate explosive mines\nn03768823\tship equipped for laying marine mines\nn03768916\texcavation consisting of a vertical or sloping passageway for finding or mining ore or for ventilating a mine\nn03769610\tsideboard with compartments for holding bottles\nn03769722\tsmall motorcycle with a low frame and small wheels and elevated handlebars\nn03769881\ta light bus (4 to 10 passengers)\nn03770085\ta car that is even smaller than a subcompact car\nn03770224\ta digital computer of medium size\nn03770316\tbuilding where the business of a government department is transacted\nn03770439\ta very short skirt\nn03770520\tsubmersible vessel for one or two persons; for naval operations or underwater exploration\nn03770679\ta small box-shaped passenger van; usually has removable seats; used as a family car\nn03770834\ttrimming on ceremonial robes consisting of white or light grey fur\nn03770954\tfur coat made from the soft lustrous fur of minks\nn03772077\tany of certain cathedrals and large churches; originally connected to a monastery\nn03772269\ta plant where money is coined by authority of the government\nn03772584\tpoints to the minutes\nn03772674\ta strategic weapon system using a guided missile of intercontinental range; missiles are equipped with nuclear warheads and dispersed in hardened silos\nn03773035\tpolished surface that forms images by reflecting light\nn03773504\ta rocket carrying a warhead of conventional or nuclear explosives; may be ballistic or directed by remote control\nn03773835\tnaval weaponry providing a defense system\nn03774327\thand tool for guiding handsaws in making crosscuts or miter joints\nn03774461\tjoint that forms a corner; usually both sides are bevelled at a 45-degree angle to form a 90-degree corner\nn03775071\tglove that encases the thumb separately and the other four fingers together\nn03775199\ta kitchen utensil that is used for mixing foods\nn03775388\telectronic equipment that mixes two or more input signals to give a single output signal\nn03775546\tbowl used with an electric mixer\nn03775636\tsingle faucet for separate hot and cold water pipes\nn03775747\tfore-and-aft sail set on the mizzenmast\nn03775847\tthird mast from the bow in a vessel having three or more masts; the after and shorter mast of a yawl, ketch, or dandy\nn03776167\tlarge high frilly cap with a full crown; formerly worn indoors by women\nn03776460\ta large house trailer that can be connected to utilities and can be parked in one place and used as permanent housing\nn03776877\tsoft leather shoe; originally worn by Native Americans\nn03776997\tfull-scale working model of something built for study or testing or display\nn03777126\tmodern convenience; the appliances and conveniences characteristic of a modern house\nn03777568\tthe first widely available automobile powered by a gasoline engine; mass-produced by Henry Ford from 1908 to 1927\nn03777754\t(from a combination of MOdulate and DEModulate) electronic equipment consisting of a device used to connect computers by a telephone line\nn03778459\t(architecture) one of a set of ornamental brackets under a cornice\nn03778817\tcomputer circuit consisting of an assembly of electronic components (as of computer hardware)\nn03779000\tdetachable compartment of a spacecraft\nn03779128\tfabric made with yarn made from the silky hair of the Angora goat\nn03779246\tsilk fabric with a wavy surface pattern\nn03779370\tcontainer into which liquid is poured to create a given shape when it hardens\nn03779884\twedge formed by the curved part of a steel plow blade that turns the furrow\nn03780047\tplow that has a moldboard\nn03780799\ta durable cotton fabric with a velvety nap\nn03781055\ta crude incendiary bomb made of a bottle filled with flammable liquid and fitted with a rag wick\nn03781244\tthe residence of a religious community\nn03781467\ta long loose habit worn by monks in a monastery\nn03781594\ta drawstring bag for holding money\nn03781683\tbelt with a concealed section for holding money\nn03781787\ta piece of electronic equipment that keeps track of the operation of a system continuously and warns of trouble\nn03782006\telectronic equipment that is used to check the quality or content of electronic transmissions\nn03782190\tdisplay produced by a device that takes signals and displays them on a television screen or a computer monitor\nn03782794\tadjustable wrench that has one fixed and one adjustable jaw\nn03782929\ta heavy cloth in basket weave\nn03783304\tpainting done in a range of tones of a single color\nn03783430\tlens for correcting defective vision in one eye; held in place by facial muscles\nn03783575\ta lens with a single focus that is used after cataract surgery to provide clear distance vision\nn03783873\tan airplane with a single wing\nn03784139\ta typesetting machine operated from a keyboard that sets separate characters\nn03784270\t(Roman Catholic Church) a vessel (usually of gold or silver) in which the consecrated Host is exposed for adoration\nn03784793\ta tower for mooring airships\nn03784896\ta round arch that widens before rounding off\nn03785016\ta motorbike that can be pedaled or driven by a low-powered gasoline engine\nn03785142\tthe handle of a mop\nn03785237\ta thick velvety synthetic fabric used for carpets and soft upholstery\nn03785499\ta building (or room) where dead bodies are kept before burial or cremation\nn03785721\ta metal helmet worn by common soldiers in the 16th century\nn03786096\ta woman's informal dress for housework\nn03786194\tformal attire for men during the daytime\nn03786313\ta sitting room used during the daylight hours\nn03786621\tan armchair with an adjustable back\nn03786715\ta muzzle-loading high-angle gun with a short barrel that fires shells at high elevations for a short range\nn03786901\ta bowl-shaped vessel in which substances can be ground and mixed with a pestle\nn03787032\tan academic cap with a flat square with a tassel on top\nn03787523\ta joint made by inserting tenon on one piece into mortise holes in the other\nn03788047\ttransducer formed by the light-sensitive surface on a television camera tube\nn03788195\t(Islam) a Muslim place of worship that usually has a minaret\nn03788365\ta fine net or screen (especially around beds) to protect against mosquitos\nn03788498\ta motor hotel\nn03788601\ta sleeping room in a motel\nn03788914\ta woman's loose unbelted dress\nn03789171\ta camera that takes a sequence of photographs that can give the illusion of motion when viewed in rapid succession\nn03789400\tphotographic film several hundred feet long and wound on a spool; to be used in a movie camera\nn03789603\ta multicolored woolen fabric woven of mixed threads in 14th to 17th century England\nn03789794\ta garment made of motley (especially a court jester's costume)\nn03789946\tmachine that converts other forms of energy into mechanical energy and so imparts motion\nn03790230\ta boat propelled by an internal-combustion engine\nn03790512\ta motor vehicle with two wheels and a strong frame\nn03790755\ta hotel for motorists; provides direct access from rooms to parking area\nn03790953\ta wheelchair propelled by a motor\nn03791053\ta wheeled vehicle with small wheels and a low-powered gasoline engine geared to the rear wheel\nn03791235\ta self-propelled wheeled vehicle that does not run on rails\nn03792048\tstructure consisting of an artificial heap or bank usually of earth or stones\nn03792334\t(baseball) the slight elevation on which the pitcher stands\nn03792526\ta mounting consisting of a piece of metal (as in a ring or other jewelry) that holds a gem in place\nn03792782\ta bicycle with a sturdy frame and fat tires; originally designed for riding in mountainous country\nn03792972\ta lightweight tent with a floor; flaps close with a zipper\nn03793489\ta hand-operated electronic device that controls the coordinates of a cursor on your computer screen as you move it around on a pad; on the bottom of the device is a ball that rolls on the surface of the pad\nn03793850\ta push button on the mouse\nn03794056\ta trap for catching mice\nn03794136\ttoiletry consisting of an aerosol foam used in hair styling\nn03794798\tthe aperture of a wind instrument into which the player blows directly\nn03795123\tan acoustic device; the part of a telephone into which a person speaks\nn03795269\t(especially boxing) equipment that protects an athlete's mouth\nn03795758\tthe driving and regulating parts of a mechanism (as of a watch or clock)\nn03795976\tprojects successive frames from a reel of film to create moving pictures\nn03796181\ta galvanometer that is operated by the force exerted by an electric current flowing in a movable coil suspended in a magnetic field\nn03796401\ta van used for moving home or office furniture\nn03796522\ta brick made from baked mud\nn03796605\ta curved piece above the wheel of a bicycle or motorcycle to protect the rider from water or mud thrown up by the wheels\nn03796848\ta reed hut in the marshlands of Iraq; rare since the marshes were drained\nn03796974\ta warm tubular covering for the hands\nn03797062\ta kiln with an inner chamber for firing things at a low temperature\nn03797182\ta scarf worn around the neck\nn03797264\tcivilian dress worn by a person who is entitled to wear a military uniform\nn03797390\twith handle and usually cylindrical\nn03797896\ta protective covering of rotting vegetable matter spread to reduce evaporation and soil erosion\nn03798061\ta slipper that has no fitting around the heel\nn03798442\ta recorder with two or more channels; makes continuous records of two or more signals simultaneously\nn03798610\ta plane with two or more engines\nn03798982\ta movie theater than has several different auditoriums in the same building\nn03799113\ta device that can interleave two or more activities\nn03799240\ta computer that uses two or more processing units under integrated control\nn03799375\ta rocket having two or more rocket engines (each with its own fuel) that are fired in succession and jettisoned when the fuel is exhausted\nn03799610\tmilitary supplies\nn03799876\ta bed that can be folded or swung into a cabinet when not being used\nn03800371\ta small bagpipe formerly popular in France\nn03800485\ta small simple oboe\nn03800563\ta depository for collecting and displaying objects having scientific or historical or artistic value\nn03800772\tan anchor used for semipermanent moorings; has a bowl-shaped head that will dig in however it falls\nn03800933\tany of various devices or contrivances that can be used to produce musical tones or sounds\nn03801353\tproduces music by means of pins on a revolving cylinder that strike the tuned teeth of a comb-like metal plate\nn03801533\ta theater in which vaudeville is staged\nn03801671\ta school specializing in music\nn03801760\ta light stand for holding sheets of printed music\nn03801880\ta stool for piano players; usually adjustable in height\nn03802007\ta muzzle-loading shoulder gun with a long barrel; formerly used by infantrymen\nn03802228\ta solid projectile that is shot by a musket\nn03802393\tplain-woven cotton fabric\nn03802643\ta drinking cup with a bar inside the rim to keep a man's mustache out of the drink\nn03802800\ta plaster containing powdered black mustard; applied to the skin as a counterirritant or rubefacient\nn03802973\ta device used to soften the tone of a musical instrument\nn03803116\tan obsolete firearm that was loaded through the muzzle\nn03803284\ta leather or wire restraint that fits over an animal's snout (especially a dog's nose and jaws) and prevents it from eating or biting\nn03803780\tX-ray film of the spinal cord and spinal nerve roots and subarachnoid space\nn03804211\ta streamlined enclosure for an aircraft engine\nn03804744\ta thin pointed piece of metal that is hammered into materials as a fastener\nn03805180\ta brush used to clean a person's fingernails\nn03805280\ta small flat file for shaping the nails\nn03805374\tflattened boss on the end of nail opposite to the point\nn03805503\tsomething resembling the head of a nail that is used as an ornamental device\nn03805725\ta cosmetic lacquer that dries quickly and that is applied to the nails to color them or make them shiny\nn03805933\ta soft lightweight muslin used especially for babies\nn03807334\ta set of graduated rods formerly used to do multiplication and division by a method invented by John Napier\nn03809211\tan aromatic ointment used in antiquity\nn03809312\ta commercial airliner with a single aisle\nn03809603\tcorduroy with narrow ribs\nn03809686\ta vestibule leading to the nave of a church\nn03809802\tportico at the west end of an early Christian basilica or church\nn03810412\ta tube inserted into the trachea through the nose and pharynx; used to deliver oxygen\nn03810952\tmemorial consisting of a structure or natural landmark of historic interest; set aside by national government for preservation and public enjoyment\nn03811295\ta submarine that is propelled by nuclear power\nn03811444\ta system that provides information useful in determining the position and course of a ship or aircraft\nn03811847\tequipment for a navy\nn03811965\tnaval weaponry consisting of a large gun carried on a warship\nn03812263\tnaval weaponry consisting of a missile carried on a warship\nn03812382\tnaval equipment consisting of a shipboard radar\nn03812789\ta shipboard system for collecting and displaying tactical data\nn03812924\tweaponry for warships\nn03813078\tthe central area of a church\nn03813176\tan instrument used for navigating\nn03813946\ta very large wine bottle holding the equivalent of 20 normal bottles of wine; used especially for display\nn03814528\ta band around the collar of a garment\nn03814639\ta brace worn to steady the neck\nn03814727\tan ornamental white cravat\nn03814817\ta kerchief worn around the neck\nn03814906\tjewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament (especially by women)\nn03815149\tdecoration worn about the neck (fur piece or tight necklace) as an ornament\nn03815278\tthe line formed by the edge of a garment around the neck\nn03815482\tan article of apparel worn about the neck\nn03815615\tneckwear consisting of a long narrow piece of material worn (mostly by men) under a collar and tied in knot at the front\nn03816005\tarticles of clothing worn about the neck\nn03816136\ta sharp pointed implement (usually steel)\nn03816394\ta slender pointer for indicating the reading on the scale of a measuring instrument\nn03816530\tsmall pliers with long thin jaws for fine work\nn03816849\ta creation created or assembled by needle and thread\nn03817191\ta piece of photographic film showing an image with light and shade or colors reversed\nn03817331\tthe pole of a magnet that points toward the south when the magnet is suspended freely\nn03817522\tthe terminal of a battery that is connected to the negative plate\nn03817647\ta loose dressing gown for women\nn03818001\ta stone tool from the Neolithic Age\nn03818343\ta lamp consisting of a small gas-discharge tube containing neon at low pressure; luminescence is produced by the action of currents at high frequencies that are wrapped a few turns around the tube\nn03819047\ta measuring instrument that uses a grid for measuring the altitude, direction, and velocity of movement of clouds\nn03819336\tfurniture pieces made to fit close together\nn03819448\tdevice consisting of an artificial egg left in a nest to induce hens to lay their eggs in it\nn03819595\tan open fabric of string or rope or wire woven together at regular intervals\nn03819994\ta trap made of netting to catch fish or birds or insects\nn03820154\tgame equipment consisting of a strip of netting dividing the playing area in tennis or badminton\nn03820318\ta goal lined with netting (as in soccer or hockey)\nn03820728\t(electronics) a system of interconnected electronic components or circuits\nn03820950\ta system of intersecting lines or channels\nn03821145\tatom bomb that produces lethal neutrons with less blast\nn03821424\tthe central pillar of a circular staircase\nn03821518\tthe post at the top or bottom of a flight of stairs; it supports the handrail\nn03822171\tthe physical object that is the product of a newspaper publisher\nn03822361\ta reading room (in a library or club) where newspapers and other periodicals can be read\nn03822504\tan office in which news is processed by a newspaper or news agency or television or radio station\nn03822656\ta stall where newspapers and other periodicals are sold\nn03822767\treflecting telescope in which the image is viewed through an eyepiece perpendicular to main axis\nn03823111\tthe writing point of a pen\nn03823216\tan iron with considerable loft\nn03823312\ta rechargeable battery with a nickel cathode and a cadmium anode; often used in emergency systems because of its low discharge rate when not in use\nn03823673\ta storage battery having a nickel oxide cathode and an iron anode with an electrolyte of potassium hydroxide; each cell gives about 1.2 volts\nn03823906\toptical device that produces plane-polarized light\nn03824197\ta doorbell to be used at night\nn03824284\ta cloth cap worn in bed\nn03824381\tlingerie consisting of a loose dress designed to be worn in bed by women\nn03824589\tdoorlock operated by a knob on the inside and a key on the outside\nn03824713\tlight (as a candle or small bulb) that burns in a bedroom at night (as for children or invalids)\nn03824999\tnightclothes worn by men\nn03825080\tgarments designed to be worn in bed\nn03825271\ta bowling pin of the type used in playing ninepins or (in England) skittles\nn03825442\tball used to knock down ninepins\nn03825673\ta fine strong sheer silky fabric made of silk or rayon or nylon\nn03825788\ta flexible cap on a baby's feeding bottle or pacifier\nn03825913\ta rubber or plastic shield to protect the nipples of nursing women\nn03826039\ta face veil covering the lower part of the face (up to the eyes) worn by observant Muslim women\nn03826186\ta prefabricated hut of corrugated iron having a semicircular cross section\nn03827420\trough brick masonry used to fill in the gaps in a wooden frame\nn03827536\ta device (such as a clapper or bell or horn) used to make a loud noise at a celebration\nn03828020\ta passenger car for passengers who want to avoid tobacco smoke\nn03829340\tcomputer storage that is not lost when the power is turned off\nn03829857\tloose-fitting single-breasted jacket\nn03829954\ta water wheel with buckets attached to the rim; used to raise water for transfer to an irrigation channel\nn03831203\ta canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head\nn03831382\ta strap that is the part of a bridle that goes over the animal's nose\nn03831757\ta flute that is played by blowing through the nostrils (used in some Asian countries)\nn03832144\ta wheel located under the nose of an airplane that is part of the plane's landing gear\nn03832673\ta small compact portable computer\nn03833907\tship whose motive power comes from the energy of a nuclear reactor\nn03834040\t(physics) any of several kinds of apparatus that maintain and control a nuclear reaction for the production of energy or artificial elements\nn03834472\ta rocket engine in which a nuclear reactor is used to heat a propellant\nn03834604\ta weapon of mass destruction whose explosive power derives from a nuclear reaction\nn03835197\ta painting of a naked human figure\nn03835729\tan embroidered rug made from a coarse Indian felt\nn03835941\ta long loose habit worn by nuns in a convent\nn03836062\ta child's room for a baby\nn03836451\ta fastener made by screwing a nut onto a threaded bolt\nn03836602\ta compound lever used to crack nuts open\nn03836906\ta synthetic fabric\nn03836976\twomen's stockings made from a sheer material (nylon or rayon or silk)\nn03837422\tan implement used to propel or steer a boat\nn03837606\ta kiln for drying hops\nn03837698\ta building containing an oast (a kiln for drying hops); usually has a conical or pyramidal roof\nn03837869\ta stone pillar having a rectangular cross section tapering towards a pyramidal top\nn03838024\tthe billiard ball that is intended to be the first ball struck by the cue ball\nn03838298\tthe lens or system of lenses in a telescope or microscope that is nearest the object being viewed\nn03838748\ta bandage in which successive turns proceed obliquely up or down a limb\nn03838899\ta slender double-reed instrument; a woodwind with a conical bore and a double-reed mouthpiece\nn03839172\tan alto oboe; precursor of the English horn\nn03839276\tan oboe pitched a minor third lower than the ordinary oboe; used to perform baroque music\nn03839424\tlookout consisting of a dome-shaped observatory\nn03839671\ta building designed and equipped to observe astronomical phenomena\nn03839795\tan obstruction that stands in the way (and must be removed or surmounted or circumvented)\nn03840327\ta prosthesis used to close an opening (as to close an opening of the hard palate in cases of cleft palate)\nn03840681\tegg-shaped terra cotta wind instrument with a mouthpiece and finger holes\nn03840823\ta measuring instrument for measuring angles to a celestial body; similar to a sextant but with 45 degree calibration\nn03841011\tcaliper having the points on its legs both curve in the same direction\nn03841143\ta meter that shows mileage traversed\nn03841290\ta circular or oval window; 17th or 18th century French architecture\nn03841666\tplace of business where professional or clerical duties are performed\nn03842012\ta building containing offices where work is done\nn03842156\tfurniture intended for use in an office\nn03842276\ta mess for the exclusive use of officers\nn03842377\telectronic equipment not in direct communication (or under the control of) the central processing unit\nn03842585\ta molding that (in section) has the shape of an S with the convex part above and the concave part below\nn03842754\ta pointed arch having an S-shape on both sides\nn03842986\ta meter for measuring electrical resistance in ohms\nn03843092\toil paint containing pigment that is used by an artist\nn03843316\ta can with a long nozzle to apply oil to machinery\nn03843438\tcloth treated on one side with a drying oil or synthetic resin\nn03843555\ta filter that removes impurities from the oil used to lubricate an internal-combustion engine\nn03843883\theater that burns oil (as kerosine) for heating or cooking\nn03844045\ta lamp that burns oil (as kerosine) for light\nn03844233\tpaint in which a drying oil is the vehicle\nn03844550\ta pump that keeps a supply of oil on moving parts\nn03844673\ta refinery for petroleum\nn03844815\ta macintosh made from cotton fabric treated with oil and pigment to make it waterproof\nn03844965\ta thin film of oil floating on top of water (especially crude oil spilled from a ship)\nn03845107\ta whetstone for use with oil\nn03845190\ta cargo ship designed to carry crude oil in bulk\nn03845990\tnecktie indicating the school the wearer attended\nn03846100\ta cloth of an olive-brown color used for military uniforms\nn03846234\tmilitary uniform of the United States Army; made from cloth of a dull olive color\nn03846431\ta seated statue of the supreme god of ancient Greek mythology created for the temple at Olympia; the statue was 40 feet tall and rested on a base that was 12 feet high\nn03846677\tpan for cooking omelets\nn03846772\tan antenna that sends or receives signals equally in all directions\nn03846970\ta navigational system consisting of a network of radio beacons that provide aircraft with information about exact position and bearing\nn03847471\ta dome that is shaped like a bulb; characteristic of Russian and Byzantine church architecture\nn03847823\ta public marketplace where food and merchandise is sold\nn03848033\tan incomplete electrical circuit in which no current flows\nn03848168\ta wrench having parallel jaws at fixed separation (often on both ends of the handle)\nn03848348\ta hand tool used for opening sealed containers (bottles or cans)\nn03848537\ta furnace for making steel in which the steel is placed on a shallow hearth and flames of burning gas and hot air play over it\nn03849275\ta woodworking plane designed to cut rabbets\nn03849412\trear gunsight having an open notch instead of a peephole or telescope\nn03849679\tornamental work (such as embroidery or latticework) having a pattern of openings\nn03849814\ta building where musical dramas are performed\nn03849943\ta large cloak worn over evening clothes\nn03850053\tbinocular microscope used in surgery to provide a clear view of small and inaccessible parts of the body (as in microsurgery)\nn03850245\ta room in a hospital equipped for the performance of surgical operations\nn03850492\ttable on which the patient lies during a surgical operation\nn03850613\tmedical instrument for examining the retina of the eye\nn03851341\ta device for producing or controlling light\nn03851787\ta disk coated with plastic that can store digital data as tiny pits etched in the surface; is read with a laser that scans the surface\nn03852280\tan instrument designed to aid vision\nn03852544\ta pyrometer that uses the color of the light emitted by a hot object\nn03852688\tan astronomical telescope designed to collect and record light from cosmic sources\nn03853291\tlowered area in front of a stage where an orchestra accompanies the performers\nn03853924\tan early bicycle with a very large front wheel and small back wheel\nn03854065\twind instrument whose sound is produced by means of pipes arranged in sets supplied with air from a bellows and controlled from a large complex musical keyboard\nn03854421\ta sheer stiff muslin\nn03854506\ta self-luminous diode (it glows when an electrical field is applied to the electrodes) that does not require backlighting or diffusers\nn03854722\ta gallery occupied by a church organ\nn03854815\tthe flues and stops on a pipe organ\nn03855214\ta fabric made of silk or a silklike fabric that resembles organdy\nn03855333\ta projecting bay window corbeled or cantilevered out from a wall\nn03855464\ta red or orange-red flag used as a standard by early French kings\nn03855604\ta gasket consisting of a flat ring of rubber or plastic; used to seal a joint against high pressure\nn03855756\tan acrylic fiber or the lightweight crease-resistant fabric made with Orlon yarns\nn03855908\tthe fourth or lowest deck\nn03856012\ta public institution for the care of orphans\nn03856335\ta richly embroidered edging on an ecclesiastical vestment\nn03856465\tplanetarium consisting of an apparatus that illustrates the relative positions and motions of bodies in the solar system by rotation and revolution of balls moved by wheelwork; sometimes incorporated in a clock\nn03856728\ta now obsolete picture pickup tube in a television camera; electrons emitted from a photoemissive surface in proportion to the intensity of the incident light are focused onto the target causing secondary emission of electrons\nn03857026\ta photographic film sensitive to green and blue and violet light\nn03857156\theavier-than-air craft that is propelled by the flapping of wings\nn03857291\tan ophthalmoscope with a layer of water to neutralize the refraction of the cornea\nn03857687\ta device for making a record of the wave forms of fluctuating voltages or currents\nn03857828\telectronic equipment that provides visual images of varying electrical quantities\nn03858085\tany receptacle for the burial of human bones\nn03858183\tmedical instrument consisting of a magnifying lens and light; used for examining the external ear (the auditory meatus and especially the tympanic membrane)\nn03858418\tthick cushion used as a seat\nn03858533\ta dungeon with the only entrance or exit being a trap door in the ceiling\nn03858837\ta wood or metal receptacle placed on your desk to hold your outgoing material\nn03859000\tinternal-combustion engine that mounts at stern of small boat\nn03859170\ta motorboat with an outboard motor\nn03859280\ta building that is subordinate to and separate from a main building\nn03859495\tclothing for use outdoors\nn03859608\tthe outlet of a river or drain or other source of water\nn03859958\ta set of clothing (with accessories)\nn03860234\ta shop that provides equipment for some specific purpose\nn03860404\ta small outbuilding with a bench having holes through which a user can defecate\nn03861048\telectronic or electromechanical equipment connected to a computer and used to transfer data out of the computer in the form of text, images, sounds, or other media\nn03861271\ta stabilizer for a canoe; spars attach to a shaped log or float parallel to the hull\nn03861430\ta seagoing canoe (as in South Pacific) with an outrigger to prevent it from upsetting\nn03861596\tcaliper for measuring outside dimensions; points on its legs curve inward\nn03861842\tcar mirror that reflects the view at side and behind car\nn03862379\tsubsidiary defensive structure lying outside the main fortified area\nn03862676\tkitchen appliance used for baking or roasting\nn03862862\ta thermometer that registers the temperature inside an oven\nn03863108\t(usually plural) work clothing consisting of denim trousers (usually with a bib and shoulder straps)\nn03863262\ta loose protective coverall or smock worn over ordinary clothing for dirty work\nn03863657\tan additional protective coating (as of paint or varnish)\nn03863783\ta high gear used at high speeds to maintain the driving speed with less output power\nn03863923\ta garment worn over other garments\nn03864139\ta simple small knot (often used as part of other knots)\nn03864356\tprojection that extends beyond or hangs over something else\nn03864692\ta projector operated by a speaker; projects the image over the speaker's head\nn03865288\ta shelf over a mantelpiece\nn03865371\ta small traveling bag to carry clothing and accessories for staying overnight\nn03865557\tbridge formed by the upper level of a crossing of two highways at different levels\nn03865820\ta manually operated device to correct the operation of an automatic device\nn03865949\tfootwear that protects your shoes from water or snow or cold\nn03866082\tan outer skirt worn over another skirt\nn03867854\ta wooden framework bent in the shape of a U; its upper ends are attached to the horizontal yoke and the loop goes around the neck of an ox\nn03868044\tgeneral term for an ancient and prestigious and privileged university (especially Oxford University or Cambridge University)\nn03868242\ta cart that is drawn by an ox\nn03868324\tan oval or round dormer window\nn03868406\ta low shoe laced over the instep\nn03868643\ta measuring instrument that measures the oxygen in arterial blood\nn03868763\ta blowtorch that burns oxyacetylene\nn03868863\ta breathing device that is placed over the mouth and nose; supplies oxygen from an attached storage tank\nn03869838\ta bar (as in a restaurant) that specializes in oysters prepared in different ways\nn03869976\ta workplace where oysters are bred and grown\nn03870105\ta high-performance car that leads a parade of competing cars through the pace lap and then pulls off the course\nn03870290\tan implanted electronic device that takes over the function of the natural cardiac pacemaker\nn03870546\ta convenient package or parcel (as of cigarettes or film)\nn03870672\ta bundle (especially one carried on the back)\nn03870980\ta cream that cleanses and tones the skin\nn03871083\ta wrapped container\nn03871371\ta store that sells alcoholic beverages for consumption elsewhere\nn03871524\tmaterial used to make packages\nn03871628\ta small package or bundle\nn03871724\ta large crate in which goods are packed for shipment or storage\nn03871860\ta plant where livestock are slaughtered and processed and packed as meat products\nn03872016\ta building where foodstuffs are processed and packed\nn03872167\ta large needle used to sew up canvas packages\nn03872273\ta saddle for pack animals to which loads can be attached\nn03873416\ta short light oar used without an oarlock to propel a canoe or small boat\nn03873699\ta blade of a paddle wheel or water wheel\nn03873848\tsmall wooden bat with a flat surface; used for hitting balls in various games\nn03873996\ta wooden covering for the upper part of a paddlewheel\nn03874138\ta steam vessel propelled by paddle wheels\nn03874293\ta large wheel fitted with paddles and driven by an engine in order to propel a boat\nn03874487\tpen where racehorses are saddled and paraded before a race\nn03874599\ta detachable lock; has a hinged shackle that can be passed through the staple of a hasp or the links in a chain and then snapped shut\nn03874823\ta printer that prints one page at a time\nn03875218\ta substance used as a coating to protect or decorate a surface (especially a mixture of pigment suspended in a liquid); dries to form a hard coating\nn03875806\ta capsule filled with water-soluble dye used as a projectile in playing the game of paintball\nn03875955\tan air gun used in the game of paintball; designed to simulate a semiautomatic\nn03876111\ta box containing a collection of cubes or tubes of artists' paint\nn03876231\ta brush used as an applicator (to apply paint)\nn03877351\ta soft wool fabric with a colorful swirled pattern of curved shapes\nn03877472\t(usually plural) loose-fitting nightclothes worn for sleeping or lounging; have a jacket top and trousers\nn03877674\ta pair of loose trousers tied by a drawstring around the waist; worn by men and women in some Asian countries\nn03877845\tofficial residence of an exalted person (as a sovereign)\nn03878066\ta large and stately mansion\nn03878211\ta large ornate exhibition hall\nn03878294\ta closed litter carried on the shoulders of four bearers\nn03878418\ta stone tool from the Paleolithic age\nn03878511\ta public place in ancient Greece or Rome devoted to the training of wrestlers and other athletes\nn03878674\tboard that provides a flat surface on which artists mix paints and the range of colors used\nn03878828\ta spatula used by artists for mixing or applying or scraping off oil paints\nn03878963\tfortification consisting of a strong fence made of stakes driven into the ground\nn03879456\ta hand tool with a flat blade used by potters for mixing and shaping clay\nn03879705\tone of the rounded armor plates at the armpits of a suit of armor\nn03880032\tcloak or mantle worn by men in ancient Rome\nn03880129\t(Roman Catholic Church) vestment consisting of a band encircling the shoulders with two lappets hanging in front and back\nn03880323\tshallow container made of metal\nn03880531\tcooking utensil consisting of a wide metal vessel\nn03881305\tturner for serving or turning pancakes\nn03881404\tphotographic film sensitive to light of all colors (including red)\nn03881534\ta police cruiser\nn03882611\ta panel or section of panels in a wall or door\nn03882960\tthe handle of a pan\nn03883054\ta button to push in order to summon help in case of an emergency\nn03883385\ta large basket (usually one of a pair) carried by a beast of burden or on by a person\nn03883524\teither of a pair of bags or boxes hung over the rear wheel of a vehicle (as a bicycle)\nn03883664\ta small pan or cup (usually of tin)\nn03883773\ta circular prison with cells distributed around a central surveillance station; proposed by Jeremy Bentham in 1791\nn03883944\tan area where everything is visible\nn03884397\ta primitive wind instrument consisting of several parallel pipes bound together\nn03884554\ttrousers worn in former times\nn03884639\ta large moving van (especially one used for moving furniture)\nn03884778\t(antiquity) a temple to all the gods\nn03884926\ta monument commemorating a nation's dead heroes\nn03885028\tshort underpants for women or children (usually used in the plural)\nn03885194\tany fabric used to make trousers\nn03885293\tthe leg of a pair of trousers\nn03885410\tmechanical device used to copy a figure or plan on a different scale\nn03885535\ta small storeroom for storing foods or wines\nn03885669\ta pair of pants and a matching jacket worn by women\nn03885788\ta woman's undergarment that combines a girdle and panties\nn03885904\ta woman's tights consisting of underpants and stockings\nn03886053\tan armored vehicle or tank\nn03886641\ta chain made of loops of colored paper; used to decorate a room\nn03886762\ta wire or plastic clip for holding sheets of paper together\nn03886940\ta cutting implement for cutting sheets of paper to the desired size\nn03887185\ta fastener for holding a sheet of paper in place\nn03887330\ta device for inserting sheets of paper into a printer or typewriter\nn03887512\ta mill where paper is manufactured\nn03887697\ta disposable towel made of absorbent paper\nn03887899\ta parabolic reflector for light radiation\nn03888022\ta concave reflector used to produce a parallel beam when the source is placed at its focus or to focus an incoming parallel beam\nn03888257\trescue equipment consisting of a device that fills with air and retards your fall\nn03888605\tgymnastic apparatus consisting of two parallel wooden rods supported on uprights\nn03888808\ta closed circuit in which the current divides into two or more paths before recombining to complete the circuit\nn03888998\tan interface between a computer and a printer where the computer sends multiple bits of information to the printer simultaneously\nn03889397\ta stout straight knife used in Malaysia and Indonesia\nn03889503\tfortification consisting of a low wall\nn03889626\ta low wall along the edge of a roof or balcony\nn03889726\tparachute that will lift a person up into the air when it is towed by a motorboat or a car\nn03889871\ta handheld collapsible source of shade\nn03890093\ta small sharp knife used in paring fruits or vegetables\nn03890233\ta tall slender glass with a short stem in which parfait is served\nn03890358\tornamental plasterwork\nn03890514\tcomputer that registers bets and divides the total amount bet among those who won\nn03891051\ta kind of heavy jacket (`windcheater' is a British term)\nn03891251\ta bench in a public park\nn03891332\ta coin-operated timer located next to a parking space; depositing money into it entitles you to park your car there for a specified length of time\nn03891538\treception room in an inn or club where visitors can be received\nn03892178\ta floor made of parquetry\nn03892425\ta patterned wood inlay used to cover a floor\nn03892557\tan official residence provided by a church for its parson or vicar or rector\nn03892728\ta sturdy rectangular table with block legs at the four corners; the top and the legs are the same width\nn03893935\ta denture replacing one or more teeth in a dental arch\nn03894051\ta chamber in which particles can be made visible\nn03894379\ta vertical structure that divides or separates (as a wall divides one room from another)\nn03894677\ta bin for holding spare parts\nn03894933\ta telephone line serving two or more subscribers\nn03895038\ta wall erected on the line between two properties and shared by both owners\nn03895170\ta courtyard or portico in front of a building (especially a cathedral)\nn03895866\ta railcar where passengers ride\nn03896103\ta ship built to carry passengers\nn03896233\ta train that carries passengers\nn03896419\ta van that carries passengers\nn03896526\ta mounting for a picture using gummed tape\nn03896628\ta type of LCD display used for some portable computers; parallel wires run both vertically and horizontally and pixels are turned on when the wires intersecting at that pixel are both energized\nn03896984\tkey that secures entrance everywhere\nn03897130\tan opening that resembles a window between two rooms (especially a shelved opening between a kitchen and dining room that is used to pass dishes)\nn03897634\ta serving cart for displaying pastry desserts to restaurant patrons\nn03897943\ta piece of cloth used as decoration or to mend or cover a hole\nn03898129\ta length of wire that has a plug at each end; used to make connections at a patchboard\nn03898271\ta heavy perfume made from the patchouli plant\nn03898395\ta flat pocket sewn to the outside of a garment\nn03898633\ta quilt made by sewing patches of different materials together\nn03898787\ta cigar-shaped log with rotary fins that measure the ship's speed\nn03899100\ta type of lift having a chain of open compartments that move continually in an endless loop so that (agile) passengers can step on or off at each floor\nn03899612\ta fine coating of oxide on the surface of a metal\nn03899768\tusually paved outdoor area adjoining a residence\nn03899933\ta bakery specializing in French pastry\nn03900028\ta scarf worn by Sikh men\nn03900194\ta vessel assigned to patrol an area\nn03900301\ta pan for cooking patties or pasties\nn03900393\ta setting with precious stones so closely set that no metal shows\nn03900979\tlarge and often sumptuous tent\nn03901229\ta machine for laying pavement\nn03901338\t(Middle Ages) a large heavy oblong shield protecting the whole body; originally carried but sometimes set up in permanent position\nn03901750\t(chess) the least powerful piece; moves only forward and captures only to the side; it can be promoted to a more powerful piece if it reaches the 8th rank\nn03901974\ta shop where loans are made with personal property as security\nn03902125\ta coin-operated telephone\nn03902220\ta removable circuit board for a personal computer; fits into a slot in the mother board\nn03902482\ta grove of peach trees\nn03902756\ta sailor's heavy woolen double-breasted jacket\nn03903133\ta stout lever with a sharp spike; used for handling logs\nn03903290\tan adornment worn on the chest or breast\nn03903424\ta lever that is operated with the foot\nn03903733\tsnug trousers ending at the calves; worn by women and girls\nn03903868\tan architectural support or base (as for a column or statue)\nn03904060\ta table supported by a single central column\nn03904183\tstreet crossing where pedestrians have right of way; often marked in some way (especially with diagonal stripes)\nn03904433\ta tricycle (usually propelled by pedalling); used in the Orient for transporting passengers for hire\nn03904657\ta triangular gable between a horizontal entablature and a sloping roof\nn03904782\tmeasuring instrument for recording the number of steps taken in walking\nn03904909\ta device for peeling vegetables or fruits\nn03905361\trear gunsight having an adjustable eyepiece with a small aperture through which the front sight and the target are aligned\nn03905540\ta wooden pin pushed or driven into a surface\nn03905730\ta holder attached to the gunwale of a boat that holds the oar in place and acts as a fulcrum for rowing\nn03905947\tregulator that can be turned to regulate the pitch of the strings of a stringed instrument\nn03906106\ta prosthesis that replaces a missing leg\nn03906224\ta board perforated with regularly spaced holes into which pegs can be fitted\nn03906463\ta bit with a bar mouthpiece that is designed to combine a curb and snaffle\nn03906590\tan acronym for pedestrian light control; a pedestrian crossing with traffic lights that are controlled by pedestrians\nn03906789\ta sleeveless cape that is lined or trimmed with fur\nn03906894\tmeasuring instrument for performing pelvimetry\nn03906997\ta writing implement with a point from which ink flows\nn03907475\ta penal institution where prisoners are exiled (often located on an island from which escape is difficult or impossible)\nn03907654\tan institution where persons are confined for punishment and to protect the public\nn03907908\t(ice hockey) an enclosed bench to the side of an ice-hockey rink for players who are serving time penalties\nn03908111\ta drawing executed with pen and ink\nn03908204\ta thin cylindrical pointed writing implement; a rod of marking substance encased in wood\nn03908456\ta cosmetic in a long thin stick; designed to be applied to a particular part of the face\nn03908618\ta box for holding pencils\nn03908714\ta rotary implement for sharpening the point on pencils\nn03909020\tan earring with a pendant ornament\nn03909160\tan apparatus consisting of an object mounted so that it swings freely under the influence of gravity\nn03909406\ta clock regulated by a pendulum\nn03909516\t(18th century) a watch with a balance wheel having a fake pendulum attached to it\nn03909658\ta bomb with about 30% explosive and a casing designed to penetrate hardened targets before the explosive detonates\nn03911406\tan implant that creates an artificial erection\nn03911513\ta correctional institution for those convicted of major crimes\nn03911658\ta small pocketknife; originally used to cut quill pens\nn03911767\ta small flashlight resembling a fountain pen\nn03911866\ta long flag; often tapering\nn03912218\tan inexpensive fipple flute\nn03912821\tan apartment located on the top floors of a building\nn03913343\ta thermionic tube having five electrodes\nn03913930\ta garment worn by women in ancient Greece; cloth caught at the shoulders and draped in folds to the waist\nn03914106\ta flared ruffle attached to the waistline of a dress or jacket or blouse\nn03914337\ta mill for grinding pepper\nn03914438\ta shaker with a perforated top for sprinkling ground pepper\nn03914583\ta nonlethal aerosol spray made with the pepper derivative oleoresin capiscum; used to cause temporary blindness and incapacitate an attacker; also used as a bear deterrent\nn03914831\ta fine closely woven cotton fabric\nn03915118\ta coffeepot in which boiling water ascends through a central tube and filters back down through a basket of ground coffee beans\nn03915320\ta detonator that explodes when struck\nn03915437\ta musical instrument in which the sound is produced by one object striking another\nn03915900\ta line of small holes for tearing at a particular place\nn03916031\ta toiletry that emits and diffuses a fragrant odor\nn03916289\tan establishment where perfumes are made\nn03916385\tstore where perfumes are sold\nn03916470\tperfumes in general\nn03916720\t(computer science) electronic equipment connected by cable to the CPU of a computer\nn03917048\tan optical instrument that provides a view of an otherwise obstructed field\nn03917198\ta colonnade surrounding a building or enclosing a court\nn03917327\ta wig for men that was fashionable in the 17th and 18th centuries\nn03917814\ta fabric that has been chemically processed to resist wrinkles and hold its shape\nn03918074\ta machine that can continue to do work indefinitely without drawing energy from some external source; impossible under the law of conservation of energy\nn03918480\ta small digital computer based on a microprocessor and designed to be used by one person at a time\nn03918737\ta lightweight consumer electronic device that looks like a hand-held computer but instead performs specific tasks; can serve as a diary or a personal database or a telephone or an alarm clock etc.\nn03919096\ta military vehicle (usually armored) for transporting military personnel and their equipment\nn03919289\ta club-shaped hand tool for grinding and mixing substances in a mortar\nn03919430\ta heavy tool of stone or iron (usually with a flat base and a handle) that is used to grind and mix material (as grain or drugs or pigments) against a slab of stone\nn03919808\tregulator consisting of a small cock or faucet or valve for letting out air or releasing compression or draining\nn03920288\ta shallow dish used to culture bacteria\nn03920384\tgauze saturated with petrolatum\nn03920641\ta shop where pet animals can be purchased\nn03920737\tundergarment worn under a skirt\nn03920867\tlong bench with backs; used in church by the congregation\nn03923379\ta small bottle that contains a drug (especially a sealed sterile container for injection by needle)\nn03923564\ta screw with a special head having crossed slots\nn03923692\ta screwdriver for use with Phillips screws\nn03923918\ta stylus that formerly made sound by following a groove in a phonograph record\nn03924069\tsound recording consisting of a disk with a continuous groove; used to reproduce music by rotating while a phonograph needle tracks in the groove\nn03924407\ta cathode that emits electrons when illuminated\nn03924532\tsurgical instrument containing a laser for use in photocoagulation\nn03924679\ta copier that uses photographic methods of making copies\nn03926148\tequipment used by a photographer\nn03926412\tlight-sensitive paper on which photograph can be printed\nn03926876\tmeasuring instrument for measuring the luminous intensity of a source by comparing it (visually or photoelectrically) with a standard source\nn03927091\ta photograph taken with the help of a microscope\nn03927299\ta duplicating machine that makes quick positive or negative copies directly on the surface of prepared paper\nn03927539\ta photocopy made on a Photostat machine\nn03927792\tpendulum consisting of an actual object allowed to rotate freely around a horizontal axis\nn03928116\ta keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds\nn03928589\taction consisting of a system of levers that move a felt hammer to strike the strings when a key is depressed\nn03928814\ta bank of keys on a musical instrument\nn03928994\tthin steel wire of high tensile strength\nn03929091\ta small flute; pitched an octave above the standard flute\nn03929202\ta heavy iron tool with a wooden handle and a curved head that is pointed on both ends\nn03929443\ta thin sharp implement used for removing unwanted material\nn03929660\ta small thin device (of metal or plastic or ivory) used to pluck a stringed instrument\nn03929855\ta spiked helmet worn by German soldiers\nn03930229\ta boat serving as a picket\nn03930313\ta fence made of upright pickets\nn03930431\ta ship serving as a picket\nn03930515\ta barrel holding vinegar in which cucumbers are pickled\nn03930630\ta light truck with an open body and low sides and a tailboard\nn03931044\ta visual representation (of an object or scene or person or abstraction) produced on a surface\nn03931765\ta framework in which a picture is mounted\nn03931885\ta woman's dressy hat with a wide brim\nn03931980\trail fixed to a wall for hanging pictures\nn03932080\ta large window with a single pane (usually overlooking a view)\nn03932670\ta separate part consisting of fabric\nn03933391\tlodging for occasional or secondary use\nn03933933\ta support for two adjacent bridge spans\nn03934042\t(architecture) a vertical supporting structure (as a portion of wall between two doors or windows)\nn03934229\tan arch supported on piers\nn03934311\ta large mirror between two windows\nn03934565\ta low table set below a pier glass\nn03934656\ta representation of the Virgin Mary mourning over the dead body of Jesus\nn03934890\ta measuring instrument for measuring high pressures\nn03935116\tmold consisting of a bed of sand in which pig iron is cast\nn03935234\ta farm where pigs are raised or kept\nn03935335\ta child's coin bank (often shaped like a pig)\nn03935883\ta rectangular column that usually projects about a third of its width from the wall to which it is attached\nn03936269\ta column of wood or steel or concrete that is driven into the ground to provide support for a structure\nn03936466\ta machine that drives piling into the ground\nn03937543\ta small bottle for holding pills\nn03937835\ta small round woman's hat\nn03937931\ta seat behind the rider of a horse or motorbike etc.\nn03938037\ta wooden instrument of punishment on a post with holes for the wrists and neck; offenders were locked in and so exposed to public scorn\nn03938244\ta cushion to support the head of a sleeping person\nn03938401\ta cast-iron or steel block for supporting a journal or bearing\nn03938522\ta handmade lace worked on a pillow with threads wound on bobbins; the pattern is marked out on the pillow by pins\nn03938725\tbed linen consisting of a decorative cover for a pillow\nn03939062\ta small bit that drills a first hole to guide a larger drill\nn03939178\ta boat to carry pilots to and from large ships\nn03939281\tsmall auxiliary gas burner that provides a flame to ignite a larger gas burner\nn03939440\ta thick blue cloth used to make overcoats and coats for sailors etc\nn03939565\ta locomotive that precedes a train to check the track\nn03939677\tan enclosed compartment from which a vessel can be navigated\nn03939844\tindicator consisting of a light to indicate whether power is on or a motor is in operation\nn03940256\ta small slender (often pointed) piece of wood or metal used to support or fasten or attach things\nn03940894\tflagpole used to mark the position of the hole on a golf green\nn03941013\tcylindrical tumblers consisting of two parts that are held in place by springs; when they are aligned with a key the bolt can be thrown\nn03941231\tplaything consisting of a container filled with toys and candy; suspended from a height for blindfolded children to break with sticks\nn03941417\tgame equipment on which pinball is played\nn03941586\tspectacles clipped to the nose by a spring\nn03941684\ta hand tool for holding consisting of a compound lever for grasping\nn03941887\ta lever with a pointed projection that serves as a fulcrum; used to roll heavy wheels\nn03942028\ta variety of clip for holding pin curls\nn03942600\ta pen where stray animals are confined\nn03942813\tlight hollow ball used in playing table tennis\nn03942920\tthe head of a pin\nn03943115\ta gear with a small number of teeth designed to mesh with a larger wheel or rack\nn03943266\t(architecture) a slender upright spire at the top of a buttress of tower\nn03943623\tsmall puncture (as if made by a pin)\nn03943714\ta very thin stripe (especially a white stripe on a dark fabric)\nn03943833\ta fabric with very thin stripes\nn03943920\ta suit made from a fabric with very thin stripes\nn03944024\ta pin or bolt forming the pivot of a hinge\nn03944138\ta toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind\nn03944341\ta wheel that has numerous pins that are set at right angles to its rim\nn03945459\ta small fipple flute that is played with the left hand while the right hand is free to beat a tabor\nn03945615\ta tubular wind instrument\nn03945817\ta small homemade bomb usually contained in a metal pipe\nn03945928\tcleaning implement consisting of a flexible tufted wire that is used to clean a pipe stem\nn03946076\ta hand tool for cutting pipe\nn03946162\tfitting consisting of threaded pieces of pipe for joining pipes together\nn03947111\tmeasuring instrument consisting of a graduated glass tube used to measure or transfer precise volumes of a liquid by drawing the liquid up into the tube\nn03947343\ta clamp for holding pipe that is to be cut or threaded\nn03947466\tadjustable wrench for gripping and turning a pipe; has two serrated jaws that are adjusted to grip the pipe\nn03947798\ttightly woven fabric with raised cords\nn03947888\ta ship that is manned by pirates\nn03948242\ta ski run densely packed with snow\nn03948459\ta firearm that is held and fired with one hand\nn03948830\ta handle (as of a gun or saw) shaped like the butt of a pistol\nn03948950\tmechanical device that has a plunging or thrusting motion\nn03949145\tseal consisting of a split metal ring that seals the gap between a piston and the cylinder wall\nn03949317\tconnecting rod that moves or is moved by a piston\nn03949761\t(auto racing) an area at the side of a racetrack where the race cars are serviced and refueled\nn03950228\tan open vessel with a handle and a spout for pouring\nn03950359\ta long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay\nn03950537\ta wedge used to loft the golf ball over obstacles\nn03950647\ta small pipe sounding a tone of standard frequency; used to establish the starting pitch for unaccompanied singing\nn03950899\ta lightweight hat worn in tropical countries for protection from the sun\nn03951068\ta metal spike with a hole for a rope; mountaineers drive it into ice or rock to use as a hold\nn03951213\tmeasuring instrument consisting of a combined Pitot tube and static tube that measures total and static pressure; used in aircraft to measure airspeed\nn03951453\tmeasuring instrument consisting of a right-angled tube with an open end that is directed in opposition to the flow of a fluid and used to measure the velocity of fluid flow\nn03951800\ta large two-handed saw formerly used to cut logs into planks; one man stood above the log and the other in a pit below\nn03951971\taxis consisting of a short shaft that supports something that turns\nn03952150\ta window that opens by pivoting either horizontally or vertically\nn03952576\ta shop where pizzas are made and sold\nn03953020\tan establishment (a factory or an assembly plant or retail store or warehouse etc.) where business is conducted, goods are made or stored or processed or where services are rendered\nn03953416\tany building where congregations gather for prayer\nn03953901\ta piece of cloth sewn under an opening\nn03954393\ta flat metal disk ready for stamping as a coin\nn03954731\ta carpenter's hand tool with an adjustable blade for smoothing or shaping wood\nn03955296\ta power tool for smoothing or shaping wood\nn03955489\ta seat on a commercial airliner\nn03955809\tan apparatus or model for representing the solar systems\nn03955941\tan optical device for projecting images of celestial bodies and other astronomical phenomena onto the inner surface of a hemispherical dome\nn03956157\ta building housing an instrument for projecting the positions of the planets onto a domed ceiling\nn03956331\tan outer gear that revolves about a central sun gear of an epicyclic train\nn03956531\ta bed of boards (without a mattress)\nn03956623\t(nautical) a covering or flooring constructed of planks (as on a ship)\nn03956785\ta notebook for recording appointments and things to be done, etc.\nn03956922\tbuildings for carrying on industrial labor\nn03957315\ta decorative pot for house plants\nn03957420\tadhesive tape used in dressing wounds\nn03957762\twallboard with a gypsum plaster core bonded to layers of paper or fiberboard; used instead of plaster or wallboard to make interior walls\nn03957991\ta trowel used to spread and smooth plaster\nn03958227\ta bag made of thin plastic material\nn03958338\ta bomb made of plastic explosive\nn03958630\ta laminate made by bonding plastic layers\nn03958752\twrapping consisting of a very thin transparent sheet of plastic\nn03959014\ta metal breastplate that was worn under a coat of mail\nn03959123\tthe front of man's dress shirt\nn03959227\tthe ornamental front of a woman's bodice or shirt\nn03959701\ta metal sheathing of uniform thickness (such as the shield attached to an artillery piece to protect the gunners)\nn03960374\ta shallow receptacle for collection in church\nn03960490\tstructural member consisting of a horizontal beam that provides bearing and anchorage\nn03961394\tthe roller on a typewriter against which the keys strike\nn03961630\twork table of a machine tool\nn03961711\ta rack for holding plates to dry after they have been washed\nn03961828\trail or narrow shelf fixed to a wall to display plates\nn03961939\ta raised horizontal surface\nn03962525\tany military structure or vehicle bearing weapons\nn03962685\tthe combination of a particular computer and a particular operating system\nn03962852\ta bed without springs\nn03962932\trocking chair on a stationary base\nn03963028\ta thin coating of metal deposited on a surface\nn03963198\ta large shallow dish used for serving food\nn03963294\telectronic equipment comprising the part of a tape recorder that reproduces the recorded material\nn03963483\ta box for a child's toys and personal things (especially at a boarding school)\nn03963645\tyard consisting of an outdoor area for children's play\nn03964495\ta portable enclosure in which babies may be left to play\nn03964611\ta sports outfit for women or children; usually consists of shorts and a blouse\nn03965456\tmercantile establishment consisting of a carefully landscaped complex of shops representing leading merchandisers; usually includes restaurants and a convenient parking area; a modern version of the traditional marketplace\nn03965907\tany of various types of fold formed by doubling fabric back upon itself and then pressing or stitching into shape\nn03966206\tan enclosed space in which the air pressure is higher than outside\nn03966325\ta measuring instrument for measuring changes in volume of a part or organ or whole body (usually resulting from fluctuations in the amount of blood it contains)\nn03966582\ta small thin metal plate held against the body and struck with a plexor in percussive examinations\nn03966751\t(medicine) a small hammer with a rubber head used in percussive examinations of the chest and in testing reflexes\nn03966976\ta gripping hand tool with two hinged arms and (usually) serrated jaws\nn03967270\ta light gym shoe with a rubber sole and a canvas top\nn03967396\tan instrument (usually driven by a computer) for drawing graphs or pictures\nn03967562\ta farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing\nn03967942\tblockage consisting of an object designed to fill a hole tightly\nn03968293\tan electrical device with two or three pins that is inserted in a socket to make an electrical connection\nn03968479\ta fuse with a thread that screws into a socket\nn03968581\ta hole into which a plug fits (especially a hole where water drains away)\nn03968728\tthe metal bob of a plumb line\nn03969510\ta carpenter's level with a plumb line at right angles to it\nn03970156\thand tool consisting of a stick with a rubber suction cup at one end; used to clean clogged drains\nn03970363\tmen's baggy knickers hanging below the knees; formerly worn for sports (especially golf)\nn03970546\ta fabric with a nap that is longer and softer than velvet\nn03971218\ta laminate made of thin layers of wood\nn03971321\ta power drill powered by compressed air\nn03971960\tthe junction between a p-type semiconductor and an n-type semiconductor\nn03972146\ta junction transistor having an n-type semiconductor between a p-type semiconductor that serves as an emitter and a p-type semiconductor that serves as a collector\nn03972372\ta cooking vessel designed to poach food (such as fish or eggs)\nn03972524\ta small pouch inside a garment for carrying small articles\nn03973003\ta small battleship built to conform with treaty limitations on tonnage and armament (from 1925 to 1930)\nn03973285\ta small comb suitable for carrying in a pocket\nn03973402\ta flap that covers the access to a pocket\nn03973520\ta handkerchief that is carried in a pocket\nn03973628\ta knife with a blade that folds into the handle; suitable for carrying in the pocket\nn03973839\ta watch that is carried in a small watch pocket\nn03973945\ta detachable container of fuel on an airplane\nn03974070\tplaything consisting of a pole with foot rests and a strong spring; propelled by jumping\nn03974915\ta lightweight photographic camera with an autofocus\nn03975035\tan arch with a pointed apex; characteristic of Gothic architecture\nn03975657\ta trowel used to fill and finish masonry joints with mortar or cement\nn03975788\tlace worked with a needle in a buttonhole stitch on a paper pattern\nn03975926\tfire iron consisting of a metal rod with a handle; used to stir a fire\nn03976105\tan optical device used to measure the rotation of the plane of vibration of polarized light\nn03976268\t(trade mark) a plastic film that can polarize a beam of light; often used in sunglasses to eliminate glare\nn03976467\ta camera that develops and produces a positive print within seconds\nn03976657\ta long (usually round) rod of wood or metal or plastic\nn03977158\ta long fiberglass sports implement used for pole vaulting\nn03977266\ta battle ax used in the Middle Ages; a long handled ax and a pick\nn03977430\tan ax used to slaughter cattle; has a hammer opposite the blade\nn03977592\ta boat used by harbor police\nn03977966\tvan used by police to transport prisoners\nn03978421\ta temporary booth in a polling place which people enter to cast their votes\nn03978575\twooden ball that is struck with mallets in playing polo\nn03978686\ta mallet used to strike the ball in polo\nn03978815\ta woman's dress with a tight bodice and an overskirt drawn back to reveal a colorful underskirt\nn03978966\ta shirt with short sleeves designed for comfort and casual wear\nn03979377\tany of a large class of synthetic fabrics\nn03979492\ta medical instrument that records several physiological processes simultaneously (e.g., pulse rate and blood pressure and respiration and perspiration)\nn03980026\thairdressing consisting of a perfumed oil or ointment\nn03980478\ta gymnastic horse with a cylindrical body covered with leather and two upright handles (pommels) near the center; held upright by two steel supports, one at each end\nn03980874\ta blanket-like cloak with a hole in the center for the head\nn03980986\ta soft thin cloth woven from raw silk (or an imitation)\nn03981094\ta dagger with a slender blade\nn03981340\tthe vestments and other insignia of a pontiff (especially a bishop)\nn03981566\t(nautical) a floating structure (as a flat-bottomed boat) that serves as a dock or to support a bridge\nn03981760\ta temporary bridge built over a series of pontoons\nn03981924\ta cart with an underslung axle and two seats\nn03982232\tball used in playing pool\nn03982331\ta room with pool tables where pool is played\nn03982430\tgame equipment consisting of a heavy table on which pool is played\nn03982642\tan exposed partial weather deck on the stern superstructure of a ship\nn03982767\tbox for collecting alms, especially one in a church\nn03982895\tan establishment maintained at public expense in order to provide housing for the poor and homeless\nn03983396\ta bottle for holding soft drinks\nn03983499\tplaything consisting of a toy gun that makes a popping sound\nn03983612\ta ribbed fabric used in clothing and upholstery\nn03983712\ta container for cooking popcorn\nn03983928\ta mushroom-shaped valve that rises perpendicularly from its seat; commonly used in internal-combustion engines\nn03984125\ta small tent that is easy to carry and quick to set up\nn03984234\tceramic ware made of a more or less translucent ceramic\nn03984381\ta structure attached to the exterior of a building often forming a covered entrance\nn03984643\tman's hat with a low, flat crown and a snap brim\nn03984759\ta shallow metal bowl (usually with a handle)\nn03985069\ta small light typewriter; usually with a case in which it can be carried\nn03985232\ta personal computer that can easily be carried by hand\nn03985441\ta circular saw that is portable and is operated with a hand grip\nn03985881\tgate consisting of an iron or wooden grating that hangs in the entry to a castle or fortified town; can be lowered to prevent passage\nn03986071\tcanopy extending out from a building entrance to shelter those getting in and out of vehicles\nn03986224\ta carriage entrance passing through a building to an enclosed courtyard\nn03986355\ta large, flat, thin case for carrying loose papers or drawings or maps; usually leather\nn03986562\ta window in a ship or airplane\nn03986704\ta porch or entrance to a building consisting of a covered and often columned area\nn03986857\ta heavy curtain hung across a doorway\nn03986949\ta large travelling bag made of stiff leather\nn03987266\ta camera with a portrait lens\nn03987376\ta compound camera lens with a relatively high aperture\nn03987674\tthe pole of a magnet that points toward the north when the magnet is suspended freely\nn03987865\tthe terminal of a battery that is connected to the positive plate\nn03987990\ta tomograph that produces cross-sectional X-rays of metabolic processes in the body\nn03988170\tan upright consisting of a piece of timber or metal fixed firmly in an upright position\nn03988758\tmeter for bulk mailings that imprints correct prepaid postage on pieces of mail and records the total charge\nn03988926\ta structure consisting of vertical beams (posts) supporting a horizontal beam (lintel)\nn03989199\tclosed horse-drawn carriage with four wheels; formerly used to transport passengers and mail\nn03989349\ta small gate in the rear of a fort or castle\nn03989447\ta commissary on a United States Army post\nn03989665\ta shovel used to sink postholes\nn03989777\twind instrument used by postilions of the 18th and 19th centuries\nn03989898\tan inn for exchanging post horses and accommodating riders\nn03990474\tmetal or earthenware cooking vessel that is usually round and deep; often has a handle and lid\nn03991062\ta container in which plants are cultivated\nn03991202\ta bulbous stove in which wood or coal is burned\nn03991321\tsomething that seems impressive but in fact lacks substance\nn03991443\tresistors connected in series across a voltage source; used to obtain a desired fraction of the voltage\nn03991646\ta resistor with three terminals, the third being an adjustable center terminal; used to adjust voltages in radios and TV sets\nn03991837\ta measuring instrument for measuring direct current electromotive forces\nn03992325\ta jar of mixed flower petals and spices used as perfume\nn03992436\ta shard of pottery\nn03992509\ta horizontal rotating wheel holding the clay being shaped by a potter\nn03992703\tceramic ware made from clay and baked in a kiln\nn03992975\ta pot that holds 2 quarts\nn03993053\ttoilet consisting of a small seat used by young children\nn03993180\ta small or medium size container for holding or carrying things\nn03993403\ta medical dressing consisting of a soft heated mass of meal or clay that is spread on a cloth and applied to the skin to treat inflamed areas or improve circulation etc.\nn03993703\ta public enclosure for stray or unlicensed dogs\nn03993878\ttrap consisting of an arrangement of nets directing fish into an enclosure\nn03994008\tany of various cosmetic or medical preparations dispensed in the form of a pulverized powder\nn03994297\tammunition consisting of gunpowder and bullets for muskets\nn03994417\ta substance such that one to three tablespoons dissolved in a glass of warm water is a homemade emetic\nn03994614\tcontainer for carrying gunpowder; made of the hollow horn of an animal\nn03994757\tkeg (usually made of metal) for gunpowder or blasting powder\nn03995018\ta brake on an automobile that magnifies a small force applied to the brake pedal into a proportionately larger force applied to slow or stop the vehicle\nn03995265\ta cord to conduct power to an electrical appliance\nn03995372\ta power tool for drilling holes into hard materials\nn03995535\tcable used to distribute electricity\nn03995661\ta loom operated mechanically\nn03995856\ta lawn mower powered by a gasoline motor\nn03996004\ta device for converting a power supply to a voltage required by particular equipment\nn03996145\ta power tool for cutting wood\nn03996416\ta machine for excavating\nn03996849\tautomotive steering where engineer power amplifies the torque applied to the steering wheel\nn03997274\ta device that transfers power from an engine (as in a tractor or other motor vehicle) to another piece of equipment (as to a pump or jackhammer)\nn03997484\ta tool driven by a motor\nn03997875\tthe tent of an ancient Roman general\nn03998194\ta small rug used by Muslims during their devotions\nn03998333\t(Judaism) a shawl with a ritually knotted fringe at each corner; worn by Jews at morning prayer\nn03998673\tremoves dust particles from gases by electrostatic precipitation\nn03999064\ta prefabricated structure\nn03999160\tbuilding reserved for the officiating clergy\nn03999621\troom in which a monarch or other great person receives guests, assemblies, etc.\nn03999992\tany machine that exerts pressure to form or shape or cut materials or extract liquids or compress solids\nn04000311\ta machine used for printing\nn04000480\tclamp to prevent wooden rackets from warping when not in use\nn04000592\tbox reserved for reporters (as at a sports event)\nn04000716\tan area (sometimes in a balcony) set aside for reporters (especially in a legislative hall)\nn04000998\tthe greatest amount of sail that a ship can carry safely\nn04001132\tcabin consisting of the pressurized section of an aircraft or spacecraft\nn04001265\tautoclave for cooking at temperatures above the boiling point of water\nn04001397\ta dome-shaped building that is pressurized\nn04001499\tgauge for measuring and indicating fluid pressure\nn04001661\ta nuclear reactor that uses water as a coolant and moderator; the steam produced can drive a steam turbine\nn04001845\tprotective garment consisting of an inflatable suit for space or high altitude flying\nn04002262\ta sharp metal spike to hold a candle\nn04002371\tlow bench for kneeling on\nn04002629\tcoil forming the part of an electrical circuit such that changing current in it induces a current in a neighboring circuit\nn04003241\ta portable paraffin cooking stove; used by campers\nn04003359\ta man's double-breasted frock coat\nn04003856\ta fabric with a dyed pattern pressed onto it (usually by engraved rollers)\nn04004099\ta buffer that stores data until the printer is ready\nn04004210\tcomputer circuit consisting of an electronic sub-assembly; copper conductors are laminated on an insulating board or card and circuit components are inserted into holes and dip soldered\nn04004475\ta machine that prints\nn04004767\t(computer science) an output device that prints the results of data processing\nn04004990\ta cable between a computer and a printer\nn04005197\treligious residence in a monastery governed by a prior or a convent governed by a prioress\nn04005630\ta correctional institution where persons are confined while on trial or for punishment\nn04005912\ta camp for prisoners of war\nn04006067\ta privately owned warship commissioned to prey on the commercial shipping or warships of an enemy nation\nn04006227\ta telephone line serving a single subscriber\nn04006330\thedge of privet plants\nn04006411\ta flexible slender surgical instrument with a blunt end that is used to explore wounds or body cavities\nn04007415\tan endoscope for examining the rectum\nn04007664\ta pointed instrument that is used to prod into a state of motion\nn04008385\tmechanical system in a factory whereby an article is conveyed through sites at which successive operations are performed on it\nn04008634\ta weapon that is forcibly thrown or projected at a targets but is not self-propelled\nn04009552\tan optical instrument that projects an enlarged image onto a screen\nn04009801\tan optical device for projecting a beam of light\nn04009923\ta rope fitted with a hook and used for towing a gun carriage\nn04010057\ta knot in the rope used to drag a gun carriage\nn04010779\ta device that displays words for people to read\nn04010927\ta pointed projection\nn04011827\ta mechanical device that rotates to push against air or water\nn04012084\tan airplane that is driven by a propeller\nn04012482\tan airplane with an external propeller that is driven by a turbojet engine\nn04012665\tcounter tube whose output pulse is proportional to number of ions produced\nn04013060\ta system that provides a propelling or driving force\nn04013176\tthe wall that separates the stage from the auditorium in a modern theater\nn04013600\tthe arch over the opening in the proscenium wall\nn04013729\tcorrective consisting of a replacement for a part of the body\nn04014297\ta covering that is intend to protect from damage or injury\nn04015204\tclothing that is intended to protect the wearer from injury\nn04015786\ta collider that collides beams of protons and antiprotons\nn04015908\tdrafting instrument used to draw or measure angles\nn04016240\ta long-handled pruning saw with a curved blade at the end and sometimes a clipper; used to prune small trees\nn04016479\ta knife with a curved or hooked blade\nn04016576\ta handsaw used for pruning trees\nn04016684\tshears with strong blades used for light pruning of woody plants\nn04016846\tan ancient stringed instrument similar to the lyre or zither but having a trapezoidal sounding board under the strings\nn04017571\ta hygrometer consisting of a dry-bulb thermometer and a wet-bulb thermometer; their difference indicates the dryness of the surrounding air\nn04017807\ta small fast unarmored and lightly armed torpedo boat; P(atrol) T(orpedo) boat\nn04018155\tan electronic amplification system used as a communication system in public areas\nn04018399\ttavern consisting of a building with a bar and public rooms; often provides light meals\nn04018667\ta toilet that is available to the public\nn04019101\tconveyance for passengers or mail or freight\nn04019335\tstructures (such as highways or schools or bridges or docks) constructed at government expense for public use\nn04019541\ta vulcanized rubber disk 3 inches in diameter that is used instead of a ball in ice hockey\nn04019696\ta device used for pulling something\nn04019881\ta device (as a decorative loop of cord or fabric) for holding or drawing something back\nn04020087\ta chain (usually with a handle at the end) that is pulled in order to operate some mechanism (e.g. to flush a toilet)\nn04020298\ta simple machine consisting of a wheel with a groove in which a rope can run to change the direction or point of application of a force applied to the rope\nn04020744\tdesignated paved area beside a main road where cars can stop temporarily\nn04020912\tluxurious passenger car; for day or night travel\nn04021028\ta sweater that is put on by pulling it over the head\nn04021164\tcleaning implement consisting of an oily rag attached by a cord to a weight; is pulled through the barrel of a rifle or handgun to clean it\nn04021362\tan electronic counter that counts the number of electric pulses\nn04021503\ta generator of single or multiple voltage pulses; usually adjustable for pulse rate\nn04021704\ta circuit that times pulses\nn04021798\ta mechanical device that moves fluid or gas by pressure or suction\nn04022332\ta low-cut shoe without fastenings\nn04022434\taction mechanism in a modern rifle or shotgun; a back and forward motion of a sliding lever ejects the empty shell case and cocks the firearm and loads a new round\nn04022708\ta house where pumps (e.g. to irrigate) are installed and operated\nn04022866\ta pump house at a spa where medicinal waters are pumped and where patrons gather\nn04023021\ta type of pliers\nn04023119\tan enclosure in the middle of a ship's hold that protects the ship's pumps\nn04023249\ta tool for making holes or indentations\nn04023422\ta small board full of holes; each hole contains a slip of paper with symbols printed on it; a gambler pays a small sum for the privilege of pushing out a slip in the hope of obtaining one that entitles him to a prize\nn04023695\ta large bowl for serving beverages; usually with a ladle\nn04023962\tan inflated ball or bag that is suspended and punched for training in boxing\nn04024137\tpunch consisting of pliers for perforating paper or leather\nn04024274\ta power driven press used to shape metal parts\nn04024862\ta small light basket used as a measure for fruits\nn04024983\tan open flat-bottomed boat used in shallow waters and propelled by a long pole\nn04025508\ta wedge-shaped tent; usually without a floor or windows\nn04025633\ta screen used in India to separate women from men or strangers\nn04026053\tan apparatus for removing impurities\nn04026180\ta basic knitting stitch\nn04026417\ta small bag for carrying money\nn04026813\ta bicycle that must be pedaled\nn04026918\ta wide broom that is pushed ahead of the sweeper\nn04027023\tan electrical switch operated by pressing\nn04027367\ta radio receiver that can be tuned by pressing buttons\nn04027706\ta sandal attached to the foot by a thong over the toes\nn04027820\ta small gasoline engine (as on motor boat)\nn04027935\ta strip of cloth wound around the leg to form legging; used by soldiers in World War I\nn04028074\tthe iron normally used on the putting green\nn04028221\ta spatula used to mix or apply putty\nn04028315\ta game that tests your ingenuity\nn04028581\ta large vertical steel tower supporting high-tension power lines\nn04028764\ta tower for guiding pilots or marking the turning point in a race\nn04029416\ta large tent shaped like a pyramid; can hold half a dozen people\nn04029647\ta design produced by pyrography\nn04029734\ta thermometer designed to measure high temperatures\nn04029913\ta pyrometer consisting of a series of cones that melt at different temperatures\nn04030054\ta thermostat that operates at very high temperatures\nn04030161\tany receptacle in which wafers for the Eucharist are kept\nn04030274\ta chest in which coins from the mint are held to await assay\nn04030414\ta small box used by ancient Greeks to hold medicines\nn04030518\ta rectangular area surrounded on all sides by buildings\nn04030846\ta measuring instrument for measuring altitude of heavenly bodies\nn04030965\ta stereophonic sound recording or reproducing system using four separate channels\nn04031884\tliving accommodations (especially those assigned to military personnel)\nn04032509\ta long stout staff used as a weapon\nn04032603\ta stamp mill for stamping quartz\nn04032936\ta mercury-vapor lamp that is enclosed in a quartz container instead of a glass container\nn04033287\t(chess) the most powerful piece\nn04033425\tone of four face cards in a deck bearing a picture of a queen\nn04033557\tvertical tie post in a roof truss\nn04033801\ta primitive stone mill for grinding corn by hand\nn04033901\tpen made from a bird's feather\nn04033995\tbedding made of two layers of cloth filled with stuffing and stitched together\nn04034262\ta bedspread constructed like a thin quilt\nn04034367\ta material used for making a quilt, or a quilted fabric\nn04035231\tcalculator consisting of a cord with attached cords; used by ancient Peruvians for calculating and keeping records\nn04035634\ta molding having a small groove in it\nn04035748\twhip with a leather thong at the end\nn04035836\tcase for holding arrows\nn04035912\tthe keystone of an arch\nn04036155\tgame equipment consisting of a ring of iron or circle of rope used in playing the game of quoits\nn04036303\tthe standard typewriter keyboard; the keys for Q, W, E, R, T, and Y are the first six from the left on the top row of letter keys\nn04036776\ta rectangular groove made to hold two pieces together\nn04036963\ta joint formed by fitting together two rabbeted boards\nn04037076\tan indoor TV antenna; consists of two extendible rods that form a V\nn04037220\ta hutch for rabbits\nn04037298\ta small sloop having the keep of a knockabout but with finer lines and carrying more sail\nn04037443\ta fast car that competes in races\nn04037873\ta canal for a current of water\nn04037964\ta boat propelled by oarsmen and designed for racing\nn04038231\ta light narrow racing boat for two or more oarsmen\nn04038338\ta shell for a single oarsman\nn04038440\ta support for displaying various articles\nn04038727\tframework for holding objects\nn04039041\tan instrument of torture that stretches or disjoints or mutilates victims\nn04039209\ta wheel gear (the pinion) meshes with a toothed rack; converts rotary to reciprocating motion (and vice versa)\nn04039381\ta sports implement (usually consisting of a handle and an oval frame with a tightly interlaced network of strings) used to strike a ball (or shuttlecock) in various games\nn04039742\tthe ball used in playing the game of racquetball\nn04039848\tmeasuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects\nn04040247\tpneumatic tire that has radial-ply casing\nn04040373\tan internal-combustion engine having cylinders arranged radially around a central crankcase\nn04040540\ta pyrometer for estimating the temperature of distant sources of heat; radiation is focussed on a thermojunction connected in circuit with a galvanometer\nn04040759\ta mechanism consisting of a metal honeycomb through which hot fluids circulate; heat is transferred from the fluid through the honeycomb to the airstream that is created either by the motion of the vehicle or by a fan\nn04041069\theater consisting of a series of pipes for circulating steam or hot water to heat rooms or buildings\nn04041243\tcap on the opening in the top of a radiator through which a coolant liquid can be added\nn04041408\ta flexible hose between the radiator and the engine block\nn04041544\ta communication system based on broadcasting electromagnetic waves\nn04041747\tomnidirectional antenna comprising the part of a radio receiver by means of which radio signals are received\nn04042076\ta chassis for a radio receiver\nn04042204\ta direction finder that gives a bearing by determining the direction of incoming radio signals\nn04042358\ta photographic image produced on a radiosensitive surface by radiation other than visible light (especially by X-rays or gamma rays)\nn04042632\tradio telescope that uses interference patterns from two antennas instead of a parabolic antenna\nn04042795\ta two-way radio communication system (usually microwave); part of a more extensive telecommunication network\nn04042985\tmeter to detect and measure radiant energy (electromagnetic or acoustic)\nn04043168\tradiometer that is extremely sensitive\nn04043411\telectronic equipment consisting of a combination of a radio receiver and a record player\nn04043733\tan electronic receiver that detects and demodulates and amplifies transmitted signals\nn04044307\tthe use of radio to send telegraphic messages (usually by Morse code)\nn04044498\ta telephone that communicates by radio waves rather than along cables\nn04044716\tastronomical telescope that picks up electromagnetic radiations in the radio-frequency range from extraterrestrial sources\nn04044955\tequipment used to treat diseases with x-rays or radioactivity\nn04045085\ttransmitter that is the part of a radio system that transmits signals\nn04045255\ta housing for a radar antenna; transparent to radio waves\nn04045397\ta flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers\nn04045644\tone of several parallel sloping beams that support a roof\nn04045787\ta foundation (usually on soft ground) consisting of an extended layer of reinforced concrete\nn04045941\ta small piece of cloth or paper\nn04046091\ta bag in which rags are kept\nn04046277\ta garment (coat or sweater) that has raglan sleeves\nn04046400\ta sleeve that extends in one piece to the neckline of a coat or sweater with seams from the armhole to the neck\nn04046590\ta horizontal bar (usually of wood or metal)\nn04046974\ta fence (usually made of split logs laid across each other at an angle)\nn04047139\ta railroad depot in a theater of operations where military supplies are unloaded for distribution\nn04047401\ta barrier consisting of a horizontal bar and supports\nn04047733\tmaterial for making rails or rails collectively\nn04047834\ta bed on which railroad track is laid\nn04048441\ta tunnel through which the railroad track runs\nn04049303\ta barrel used as a cistern to hold rainwater\nn04049405\ta water-resistant coat\nn04049585\tgauge consisting of an instrument to measure the quantity of precipitation\nn04049753\ta percussion instrument that is made from a dried cactus branch that is hollowed out and filled with small pebbles and capped at both ends; makes the sound of falling rain when tilted; origin was in Chile where tribesmen used it in ceremonies to bring rain\nn04050066\ta long-handled tool with a row of teeth at its head; used to move leaves or loosen soil\nn04050313\tthe handle of a rake\nn04050600\t(computer science) a virtual drive that is created by setting aside part of the random-access memory to use as if it were a group of sectors\nn04050933\ta small fireproof dish used for baking and serving individual portions\nn04051269\ta simple type of jet engine; must be launched at high speed\nn04051439\ta tool for driving something with force\nn04051549\tan inclined surface connecting two levels\nn04051705\tan arch whose support is higher on one side than on the other\nn04051825\tan embankment built around a space for defensive purposes\nn04052235\ta rod used to ram the charge into a muzzle-loading firearm\nn04052346\ta rod used to clean the barrel of a firearm\nn04052442\tfarm consisting of a large tract of land along with facilities needed to raise livestock (especially cattle)\nn04052658\ta one story house with a low pitched roof\nn04052757\tthe most common computer memory which can be used by programs to perform necessary tasks while the computer is on; an integrated circuit memory chip allows information to be stored or accessed in any order and all storage locations are equally accessible\nn04053508\ta measuring instrument (acoustic or optical or electronic) for finding the distance of an object\nn04053677\texhaust hood over a kitchen range\nn04053767\tsurveying instrument consisting of a straight rod painted in bands of alternate red and white each one foot wide; used for sightings by surveyors\nn04054361\ta straight sword with a narrow blade and two edges\nn04054566\t(plural) rare collector's items\nn04054670\ta coarse file with sharp pointed projections\nn04055180\tmechanical device consisting of a toothed wheel or rack engaged with a pawl that permits it to move in only one direction\nn04055447\ttoothed wheel held in place by a pawl or detent and turned by a lever\nn04055700\ta tavern below street level featuring beer; originally a German restaurant in the basement of city hall\nn04055861\t(nautical) a small horizontal rope between the shrouds of a sailing ship; they form a ladder for climbing aloft\nn04056073\ta thin round file shaped like the tail of a rat\nn04056180\ta switch made from the stems of the rattan palms\nn04056413\ta trap for catching rats\nn04056932\ta synthetic silklike fabric\nn04057047\tedge tool used in shaving\nn04057215\ta blade that has very sharp edge\nn04057435\ta jet or rocket engine based on a form of aerodynamic propulsion in which the vehicle emits a high-speed stream\nn04057673\ta turbine with blades arranged to develop torque from gradual decrease of steam pressure from inlet to exhaust\nn04057846\tan electrical device used to introduce reactance into a circuit\nn04057981\ta lamp that provides light for reading\nn04058096\ta room set aside for reading\nn04058239\t(computer science) memory whose contents can be accessed and read but cannot be changed\nn04058486\ta memory chip providing read-only memory\nn04058594\tan electronic device the displays information is a visual form\nn04058721\t(computer science) a tiny electromagnetic coil and metal pole used to write and read magnetic patterns on a disk\nn04059157\tready-made clothing\nn04059298\tthe main memory in a virtual memory system\nn04059399\ta drill that is used to shape or enlarge holes\nn04059516\ta squeezer with a conical ridged center that is used for squeezing juice from citrus fruit\nn04059947\tcar mirror that reflects the view out of the rear window\nn04060198\tan alcohol thermometer calibrated in degrees Reaumur\nn04060448\ta long woolen or linen scarf covering the head and shoulders (also used as a sling for holding a baby); traditionally worn by Latin-American women\nn04060647\tset that receives radio or tv signals\nn04060904\ta container that is used to put or keep things in\nn04061681\ta counter (as in a hotel) where guests are received\nn04061793\ta room for receiving and entertaining visitors (as in a private house or hotel)\nn04061969\tan enclosure that is set back or indented\nn04062179\tan internal-combustion engine in which the crankshaft is turned by pistons moving up and down in cylinders\nn04062428\tan armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it\nn04062644\ta military airplane used to gain information about an enemy\nn04062807\tfast armored military vehicle with four-wheel drive and open top\nn04063154\tan automatic mechanical device on a record player that causes new records to be played without manual intervention\nn04063373\tequipment for making records\nn04063868\ta storage device on which information (sounds or images) have been recorded\nn04064213\taudio system for recoding sound\nn04064401\tmachine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically\nn04064747\ta sleeve for storing a phonograph record\nn04064862\ta hospital room for the care of patients immediately after surgery\nn04065272\ta motorized wheeled vehicle used for camping or other recreational activities\nn04065464\ta room equipped for informal entertaining\nn04065789\ta bin for depositing things to be recycled\nn04065909\ta plant for reprocessing used or abandoned materials\nn04066023\t(British informal) a provincial British university of relatively recent founding; distinguished from Oxford University and Cambridge University\nn04066270\ta strip of red carpeting laid down for dignitaries to walk on\nn04066388\tan entrenched stronghold or refuge\nn04066476\t(military) a temporary or supplementary fortification; typically square or polygonal without flanking defenses\nn04066767\tgearing that reduces an input speed to a slower output speed\nn04067143\torgan pipe with a vibrating reed\nn04067231\tan organ stop with the tone of a reed instrument\nn04067353\ta square knot used in a reef line\nn04067472\twinder consisting of a revolving spool with a handle; attached to a fishing rod\nn04067658\ta roll of photographic film holding a series of frames to be projected by a movie projector\nn04067818\ta communal dining-hall (usually in a monastery)\nn04067921\ta long narrow dining table supported by a stretcher between two trestles\nn04068441\tan industrial plant for purifying a crude substance\nn04068601\toptical telescope consisting of a large concave mirror that produces an image that is magnified by the eyepiece\nn04069166\ta meter that measures the reflectance of a surface\nn04069276\tdevice that reflects radiation\nn04069434\tcamera that allows the photographer to view and focus the exact scene being photographed\nn04069582\tcondenser such that vapor over a boiling liquid is condensed and flows back into the vessel to prevent its contents from boiling dry\nn04069777\tcorrectional institution for the detention and discipline and training of young or first offenders\nn04070003\tan apparatus that reforms the molecular structure of hydrocarbons to produce richer fuel\nn04070207\toptical telescope that has a large convex lens that produces an image that is viewed through the eyepiece\nn04070415\tmeasuring instrument for measuring the refractive index of a substance\nn04070545\ta cooling system for chilling or freezing (usually for preservative purposes)\nn04070727\twhite goods in which food can be stored at low temperatures\nn04070964\ta freight car that is equipped with refrigeration system\nn04071102\ta shelter from danger or hardship\nn04071263\tparaphernalia indicative of royalty (or other high office)\nn04071393\tthe military uniform and insignia of a regiment\nn04072193\tany of various controls or devices for regulating or controlling fluid flow, pressure, temperature, etc.\nn04072551\tone of a pair of long straps (usually connected to the bit or the headpiece) used to control a horse\nn04072960\telectrical device such that current flowing through it in one circuit can switch on and off a current in a second circuit\nn04073425\ta device that when pressed will release part of a mechanism\nn04073948\tresidence that is a place of religious seclusion (such as a monastery)\nn04074185\ta container where religious relics are stored or displayed (especially relics of saints)\nn04074963\ta device that can be used to control a machine or apparatus from a distance\nn04075291\ta terminal connected to a computer by a data link\nn04075468\ta hard disk that can be removed from the disk drive; removal prevents unauthorized use\nn04075715\ta coat of stucco applied to a masonry wall\nn04075813\ta fabric with prominent rounded crosswise ribs\nn04075916\ta shop specializing in repairs and maintenance\nn04076052\t(electronics) electronic device that amplifies a signal before transmitting it again\nn04076284\ta firearm that can fire several rounds without reloading\nn04076713\ta burial vault (usually for some famous person)\nn04077430\tan audio system that can reproduce and amplify signals to produce sound\nn04077594\tcannon that provides plate armor for the upper arm\nn04077734\tequipment used to rescue passengers in case of emergency\nn04077889\ta center where research is done\nn04078002\ta network of fine lines used by astronomers as a reference for measurements on star photographs\nn04078574\ttank used for collecting and storing a liquid (as water or oil)\nn04078955\tdevice for resetting instruments or controls\nn04079106\ta push button that you press to activate the reset mechanism\nn04079244\tthe official house or establishment of an important person (as a sovereign or president)\nn04079603\tpyrometer that measures high temperatures by the resistance in a heated wire\nn04079933\tan electrical device that resists the flow of electrical current\nn04080138\tany system that resonates\nn04080454\ta hollow chamber whose dimensions allow the resonant oscillation of electromagnetic or acoustic waves\nn04080705\ta fashionable hotel usually in a resort area\nn04080833\ta breathing device for administering long-term artificial respiration\nn04081281\ta building where people go to eat\nn04081699\ta building used for shelter by travelers (especially in areas where there are no hotels)\nn04081844\ta device that retards something's motion\nn04082344\ta breathing apparatus used for resuscitation by forcing oxygen into the lungs of a person who has undergone asphyxia or arrest of respiration\nn04082562\ta dental appliance that holds teeth (or a prosthesis) in position after orthodontic treatment\nn04082710\ta wall that is built to resist lateral pressure (especially a wall built to prevent the advance of a mass of earth)\nn04082886\ta network of fine lines, dots, cross hairs, or wires in the focal plane of the eyepiece of an optical instrument\nn04083113\tan arrangement resembling a net or network\nn04083309\ta woman's drawstring handbag; usually made of net or beading or brocade; used in 18th and 19th centuries\nn04083649\ta vessel where substances are distilled or decomposed by heat\nn04083800\tsurgical instrument that holds back the edges of a surgical incision\nn04084517\tthe key on electric typewriters or computer keyboards that causes a carriage return and a line feed\nn04084682\ta furnace in which the material that is being treated is heated indirectly by flames that are directed at the roof and walls of the furnace\nn04084889\ta lapel on a woman's garment; turned back to show the reverse side\nn04085017\tthe gears by which the motion of a machine can be reversed\nn04085574\ta garment (especially a coat) that can be worn inside out (with either side of the cloth showing)\nn04085873\ta facing (usually masonry) that supports an embankment\nn04086066\ta barrier against explosives\nn04086273\ta pistol with a revolving cylinder (usually having six chambers for bullets)\nn04086446\ta door consisting of four orthogonal partitions that rotate about a central pivot; a door designed to equalize the air pressure in tall buildings\nn04086663\tan instrument for measuring the flow of liquids (especially arterial blood)\nn04086794\tresistor for regulating current\nn04086937\tmedical instrument consisting of a mirror mounted at an angle on a rod; used to examine the nasal passages (through the nasopharynx)\nn04087126\tsupport resembling the rib of an animal\nn04087432\ta ribbon used as a decoration\nn04087709\tvault that resembles a groined vault but has ribbed arches\nn04087826\ta framework of ribs\nn04088229\tbuilding complex in a continuous row along a road\nn04088343\ta type of pliers\nn04088441\ta kitchen utensil used for ricing soft foods by extruding them through small holes\nn04088696\ta coarse sieve (as for gravel)\nn04088797\ta mechanical device that you ride for amusement or excitement\nn04089152\ta beam laid along the edge where two sloping sides of a roof meet at the top; provides an attachment for the upper ends of rafters\nn04089376\teither of a pair of lifelines running alongside the bowsprit of a ship\nn04089666\ta boot without laces that is worn for riding horses; part of a riding habit\nn04089836\ta short whip with a thong at the end and a handle for opening gates\nn04089976\ta power mower you can ride on\nn04090263\ta shoulder firearm with a long barrel and a rifled bore\nn04090548\ta bullet designed to be fired from a rifle; no longer made spherical in shape\nn04090781\ta grenade that is thrown from a launching device attached to the barrel of a rifle\nn04091097\tgear (including necessary machinery) for a particular enterprise\nn04091466\ta long slender pointed sable brush used by artists\nn04091584\ta sailing vessel with a specified rig\nn04091693\tgear consisting of ropes etc. supporting a ship's masts and sails\nn04092168\ta person's costume (especially if bizarre)\nn04093157\ta small ring\nn04093223\tgymnastic apparatus consisting of a pair of heavy metal circles (usually covered with leather) suspended by ropes; used for gymnastic exercises\nn04093625\tbuilding that contains a surface for ice skating or roller skating\nn04093775\ta firearm designed to disperse rioters rather than to inflict serious injury or death\nn04093915\ta cord that is pulled to open a parachute from its pack during a descent\nn04094060\ta cord that is pulled to open the gasbag of a balloon wide enough to release gas and so causes the balloon to descend\nn04094250\ta steel lever with one end formed into a ripping chisel and the other a gooseneck with a claw for pulling nails\nn04094438\ta long chisel with a slightly bent cutting end; used for heavy prying or cleaning mortises\nn04094608\ta handsaw for cutting with the grain of the wood\nn04094720\tstructural member consisting of the vertical part of a stair or step\nn04094859\ta vertical pipe in a building\nn04095109\tan ostentatiously elegant hotel\nn04095210\ta boat used on rivers or to ply a river\nn04095342\theavy pin having a head at one end and the other end being hammered flat after being passed through holes in the pieces that are fastened together\nn04095577\ta machine for driving rivets\nn04095938\tmetal tweezers used by marijuana smokers to hold a roach\nn04096066\tan open way (generally public) for travel or transportation\nn04096733\ta bed supporting a road\nn04096848\ta barrier set up by police to stop traffic on a street or road in order to catch a fugitive or inspect traffic etc.\nn04097085\tan inn (usually outside city limits on a main road) providing meals and liquor and dancing and (sometimes) gambling\nn04097373\tan open automobile having a front seat and a rumble seat\nn04097622\ta road (especially that part of a road) over which vehicles travel\nn04097760\ta special cooking pan for roasting\nn04097866\tany loose flowing garment\nn04098169\tequipment used in robotics\nn04098260\toptical device that produces plane-polarized ultraviolet light\nn04098399\ta drill bit that has hardened rotating rollers\nn04098513\ta curved support that permits the supported object to rock to and fro\nn04098795\ta trough that can be rocked back and forth; used by gold miners to shake auriferous earth in water in order to separate the gold\nn04099003\ta lever pivoted at the center; used especially to push a valve down in an internal-combustion engine\nn04099175\ta jet engine containing its own propellant and driven by reaction propulsion\nn04099429\tany vehicle self-propelled by a rocket engine\nn04099969\ta chair mounted on rockers\nn04100174\ta long thin implement made of metal or wood\nn04100519\tan enclosure for cattle that have been rounded up\nn04101375\tphotographic film rolled up inside a container to protect it from light\nn04101497\ta cylinder that revolves\nn04101701\ta small wheel without spokes (as on a roller skate)\nn04101860\tbandage consisting of a strip of sterile fabric (of variable width) rolled into a cylinder to facilitate application\nn04102037\ta shoe with a line of rollers fixed to the sole\nn04102162\t(trademark) an in-line skate\nn04102285\ta window shade that rolls up out of the way\nn04102406\televated railway in an amusement park (usually with sharp curves and steep inclines)\nn04102618\ta shoe with pairs of rollers fixed to the sole\nn04102760\ta towel with the ends sewn together, hung on a roller\nn04102872\tphotographic film wound on a spool\nn04102962\ta hitch for fastening a line to a spar or another rope\nn04103094\tsteel mill where metal is rolled into sheets and bars\nn04103206\tutensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough\nn04103364\tcollection of wheeled vehicles owned by a railroad or motor carrier\nn04103665\ta woman's foundation garment rolled on to the hips\nn04103769\ta dispenser of a liquid cosmetic (such as a deodorant) having a revolving ball as an applicator\nn04103918\ta method of transport (as a ferry or train or plane) that vehicles roll onto at the beginning and roll off of at the destination\nn04104147\t(trademark) a desktop rotary card index with removable cards; usually used for names, addresses, and telephone numbers\nn04104384\ta round arch drawn from a single center\nn04104500\ta building constructed by the ancient Romans\nn04104770\ta one-piece garment for children to wear at play; the lower part is shaped like bloomers\nn04104925\ta screen in a church; separates the nave from the choir or chancel\nn04105068\ta protective covering that covers or forms the top of a building\nn04105438\tprotective covering on top of a motor vehicle\nn04105704\tmaterial used to construct a roof\nn04105893\tan area within a building enclosed by walls and floor and ceiling\nn04107598\ta small private compartment for one on a sleeping car\nn04107743\tlight that provides general illumination for a room\nn04107984\ta shelter with perches for fowl or other birds\nn04108268\ta strong line\nn04108822\ta bridge consisting of ropes\nn04108999\ta ski tow offering only a moving rope to hold onto\nn04110068\tperfume consisting of water scented with oil of roses\nn04110178\tcircular window filled with tracery\nn04110281\ta bag filled with rosin; used by baseball pitchers to improve their grip on the ball\nn04110439\t(computer science) the actuator that moves a read/write head to the proper data track\nn04110654\tan internal-combustion engine in which power is transmitted directly to rotating components\nn04110841\ta printing press for printing from a revolving cylinder\nn04110955\ta mechanism that rotates\nn04111190\ta revolving rod that transmits power or motion\nn04111414\ta restaurant that specializes in roasted and barbecued meats\nn04111531\tan oven or broiler equipped with a rotating spit on which meat cooks as it turns\nn04111668\trotating mechanism consisting of an assembly of rotating airfoils\nn04111962\tthe rotating armature of a motor or generator\nn04112147\tthe revolving bar of a distributor\nn04112252\tthe long airfoil that rotates to provide the lift that supports a helicopter in the air\nn04112430\tthe axis around which the major rotor of a helicopter turns\nn04112579\ta large circular room\nn04112654\ta building having a circular plan and a dome\nn04112752\tmakeup consisting of a pink or red powder applied to the cheeks\nn04112921\ta rough preliminary model\nn04113038\ta roll of coins wrapped in paper\nn04113194\ta wheel with teeth for making a row of perforations\nn04113316\tthe ball used to play roulette\nn04113406\tgame equipment consisting of a wheel with slots that is used for gambling; the wheel rotates horizontally and players bet on which slot the roulette ball will stop in\nn04113641\ta charge of ammunition for a single shot\nn04113765\tan arch formed in a continuous curve; characteristic of Roman architecture\nn04113968\ta spherical flask with a narrow neck\nn04114069\tround piece of armor plate that protects the armpit\nn04114301\ta file with a circular cross section; used to file the inside of holes\nn04114428\tworkplace consisting of a circular building for repairing locomotives\nn04114719\ta power tool with a shaped cutter; used in carpentry for cutting grooves\nn04114844\t(computer science) a device that forwards data packets between computer networks\nn04114996\ta woodworking plane with a narrow cutting head that will make grooves with smooth bottoms\nn04115144\ta small spiked wheel at the end of a spur\nn04115256\ta house that is one of a row of identical houses situated side by side and sharing common walls\nn04115456\ta rowboat\nn04115542\tan arch that is formed with more than one concentric row of voussoirs\nn04115802\ta sail set next above the topgallant on a royal mast\nn04115996\ttopmast immediately above the topgallant mast\nn04116098\ta narrow band of elastic rubber used to hold things (such as papers) together\nn04116294\ta high boot made of rubber\nn04116389\ta bullet made of hard rubber; designed for use in crowd control\nn04116512\tan eraser made of rubber (or of a synthetic material with properties similar to rubber); commonly mounted at one end of a pencil\nn04117216\t(nautical) steering mechanism consisting of a hinged vertical plate mounted at the stern of a vessel\nn04117464\ta hinged vertical airfoil mounted at the tail of an aircraft and used to make horizontal course changes\nn04117639\tthe vertical blade on a rudder\nn04118021\tfloor covering consisting of a piece of thick heavy fabric (usually with nap or pile)\nn04118538\tinflated oval ball used in playing rugby\nn04118635\ta ruined building\nn04118776\tmeasuring stick consisting of a strip of wood or metal or plastic with a straight edge that is used for drawing straight lines and measuring lengths\nn04119091\ta servant's seat (or luggage compartment) in the rear of a carriage\nn04119230\ta folding outside seat in the back of some early cars\nn04119360\ta large drinking glass (ovoid bowl on a stem) for drinking toasts\nn04119478\ta recreation room for noisy activities (parties or children's play etc)\nn04119630\ta fork-like spoon with a cutting edge; coined by Edward Lear\nn04119751\tone of the crosspieces that form the steps of a ladder\nn04120489\ta light comfortable shoe designed for running\nn04120695\ta matching jacket and pants worn by joggers and made of fabric that absorbs perspiration\nn04120842\ta strip of level paved surface where planes can take off and land\nn04121228\ta tallow candle with a rush stem as the wick\nn04121342\ta reddish brown homespun fabric\nn04121426\ta shag rug made in Sweden\nn04121511\ta fencing sword with a v-shaped blade and a slightly curved handle\nn04121728\ta portable power saw with a reciprocating blade; can be used with a variety of blades depending on the application and kind of cut; generally have a plate that rides on the surface that is being cut\nn04122262\ta scarf (or trimming) made of sable\nn04122349\tan artist's brush made of sable hairs\nn04122492\ta fur coat made of sable furs\nn04122578\ta shoe carved from a single block of wood\nn04122685\ta small soft bag containing perfumed powder; used to perfume items in a drawer or chest\nn04122825\ta bag made of paper or plastic for holding customer's purchases\nn04123026\ta woman's full loose hiplength jacket\nn04123123\ta medieval musical instrument resembling a trombone\nn04123228\ta coarse cloth resembling sacking\nn04123317\ta garment made of coarse sacking; formerly worn as an indication of remorse\nn04123448\tman's hiplength coat with a straight back; the jacket of a suit\nn04123567\tcoarse fabric used for bags or sacks\nn04123740\ta seat for the rider of a horse or camel\nn04124098\ta large bag (or pair of bags) hung over a saddle\nn04124202\tstable gear consisting of a blanket placed under the saddle\nn04124370\tan oxford with a saddle of contrasting color\nn04124488\tworkshop where a saddler works\nn04124573\ta chair seat that is slightly concave and sometimes has a thickened ridge in the center\nn04124887\ta decorative overcast or running stitch, especially in a contrasting color\nn04125021\tstrongbox where valuables can be safely kept\nn04125116\ta ventilated or refrigerated cupboard for securing provisions from pests\nn04125257\ta fireproof metal strongbox (usually in a bank) for storing valuables\nn04125541\ta house used as a hiding place or refuge by members of certain organizations\nn04125692\tan undecorated arch that is included in order to strengthen or support a construction\nn04125853\tbelt attaching you to some object as a restraint in order to prevent you from getting hurt\nn04126066\tbicycle that has two wheels of equal size; pedals are connected to the rear wheel by a multiplying gear\nn04126244\ta bolt that cannot be moved from outside the door or gate\nn04126541\ta fireproof theater curtain to be dropped in case of fire\nn04126659\ta slow-burning fuse consisting of a tube or cord filled or saturated with combustible matter; used to ignite detonators from a distance\nn04126852\tan oil lamp that will not ignite flammable gases (methane)\nn04126980\ta paper match that strikes only on a specially prepared surface\nn04127117\ta large strong net to catch circus acrobats who fall or jump from a trapeze\nn04127249\ta pin in the form of a clasp; has a guard so the point of the pin will not stick the user\nn04127395\ta railing placed alongside a stairway or road for safety\nn04127521\ta razor with a guard to prevent deep cuts in the skin\nn04127633\ta valve in a container in which pressure can build up (as a steam boiler); it opens automatically when the pressure reaches a dangerous level\nn04127904\ta large piece of fabric (usually canvas fabric) by means of which wind is used to propel a sailing vessel\nn04128413\tany structure that resembles a sail\nn04128499\ta small sailing vessel; usually with a single mast\nn04128710\ta strong fabric (such as cotton canvas) used for making sails and tents\nn04128837\ta vessel that is powered by the wind; often having several masts\nn04129490\ta warship that was powered by sails and equipped with many heavy guns; not built after the middle of the 19th century\nn04129688\ta cap worn by sailors\nn04129766\ta boy's ensemble; copied from a sailor's uniform\nn04130143\ta bar where diners can assemble a salad to their own taste\nn04130257\ta large bowl for mixing and serving a salad\nn04130566\ta hydrometer that determines the concentration of salt solutions by measuring their density\nn04130907\ta light medieval helmet with a slit for vision\nn04131015\telegant sitting room where guests are received\nn04131113\tgallery where works of art can be displayed\nn04131208\ta shop where hairdressers and beauticians work\nn04131368\ta type of house built in New England; has two stories in front and one behind\nn04131499\ta small container for holding salt at the dining table\nn04131690\ta shaker with a perforated top for sprinkling salt\nn04131811\ta plant where salt is produced commercially\nn04131929\ta tray (or large plate) for serving food or drinks; usually made of silver\nn04132158\ta pair of light loose trousers with a tight fit around the ankles; worn by women from the Indian subcontinent (usually with a kameez)\nn04132465\tleather belt supported by a strap over the right shoulder\nn04132603\ta Japanese stringed instrument resembling a banjo with a long neck and three strings and a fretted fingerboard and a rectangular soundbox; played with a plectrum\nn04132829\ta heavy silk fabric (often woven with silver or gold threads); used to make clothing in the Middle Ages\nn04132985\ta metal urn with a spigot at the base; used in Russia to boil water for tea\nn04133114\tan Asian skiff usually propelled by two oars\nn04133789\ta shoe consisting of a sole fastened by straps to the foot\nn04134008\ta bag filled with sand; used as a weapon or to build walls or as ballast\nn04134170\ta tool that throws out a blast of steam laden with sand; used to clean or grind hard surfaces\nn04134523\tmold consisting of a box with sand shaped to mold metal\nn04134632\ttimepiece in which the passage of time is indicated by the flow of sand from one transparent container to another through a narrow passage\nn04135024\ta wedge used to get out of sand traps\nn04135118\tsignboard consisting of two hinged boards that hang front and back from the shoulders of a walker and are used to display advertisements\nn04135315\ta disposable absorbent pad (trade name Kotex); worn to absorb menstrual flow\nn04135710\ta thin plastic film made of saran (trade name Saran Wrap) that sticks to itself; used for wrapping food\nn04135933\ta fine soft silk fabric often used for linings\nn04136045\ta stone coffin (usually bearing sculpture or inscriptions)\nn04136161\ta dress worn primarily by Hindu women; consists of several yards of light material that is draped around the body\nn04136333\ta loose skirt consisting of brightly colored fabric wrapped around the body; worn by both women and men in the South Pacific\nn04136510\ta framework that holds the panes of a window in the window frame\nn04136800\ta lock attached to the sashes of a double hung window that can fix both in the shut position\nn04137089\ta window with (usually two) sashes that slide vertically to let in air\nn04137217\tluggage consisting of a small case with a flat bottom and (usually) a shoulder strap\nn04137355\ta cotton fabric with a satiny finish\nn04137444\tman-made equipment that orbits around the earth or the moon\nn04137773\ta receiver on a communications satellite\nn04137897\ta television system in which the signal is transmitted to an orbiting satellite that receives the signal and amplifies it and transmits it back to earth\nn04138131\ta transmitter on a communications satellite\nn04138261\ta smooth fabric of silk or rayon; has a glossy face and a dull back\nn04138869\ta cheap handgun that is easily obtained\nn04138977\ta deep pan with a handle; used for stewing or boiling\nn04139140\ta cooking pot that has handles on either side and tight fitting lid; used for stewing or boiling\nn04139395\ta Finnish steam bath; steam is produced by pouring water over heated rocks\nn04139859\ta container (usually with a slot in the top) for keeping money at home\nn04140064\thand tool having a toothed blade for cutting\nn04140539\ta shotgun with short barrels\nn04140631\ta framework for holding wood that is being sawed\nn04140777\ta large sawing machine\nn04140853\ta tool used to bend each alternate sawtooth at a slight angle outward\nn04141076\ta single-reed woodwind with a conical bore\nn04141198\tany of a family of brass wind instruments that resemble a bugle with valves\nn04141327\ta sheath for a sword or dagger or bayonet\nn04141712\ta system of scaffolds\nn04141838\tan indicator having a graduated sequence of marks\nn04141975\ta measuring instrument for weighing; shows amount of mass\nn04142175\tan electronic pulse counter used to count pulses that occur too rapidly to be recorded individually\nn04142327\ta ladder used to scale walls (as in an attack)\nn04142434\ta thin straight surgical knife used in dissection and surgery\nn04142731\ta radio receiver that moves automatically across some selected range of frequencies looking for some signal or condition\nn04142999\ta radar dish that rotates or oscillates in order to scan a broad area\nn04143140\tan electronic device that generates a digital representation of an image for data input to a computer\nn04143365\tan upright in house framing\nn04143897\ta garment worn around the head or neck or shoulders for warmth or decoration\nn04144241\ta joint made by notching the ends of two pieces of timber or metal so that they will lock together end-to-end\nn04144539\ta small rug; several can be used in a room\nn04144651\ta graver used to scoop out broad areas when engraving wood or metal\nn04145863\treflecting telescope that has plate that corrects for aberration so a wide area of sky can be photographed\nn04146050\ta building where young people receive education\nn04146343\ta bag for carrying school books and supplies\nn04146504\ta bell rung to announce beginning or ending of class\nn04146614\ta bus used to transport children to or from school\nn04146862\ta ship used to train students as sailors\nn04146976\testablishment including the plant and equipment for providing education from kindergarten through high school\nn04147183\tsailing vessel used in former times\nn04147291\ta large beer glass\nn04147495\tan instrument used by scientists\nn04147793\ta curved oriental saber; the edge is on the convex side of the blade\nn04147916\tcounter tube in which light flashes when exposed to ionizing radiation\nn04148054\tan edge tool having two crossed pivoting blades\nn04148285\ta measuring instrument that measures the hardness of materials by penetrating them with a stylus that has a diamond point\nn04148464\tan arch that supports part of the wall\nn04148579\ta decorative wall bracket for holding candles or other sources of light\nn04148703\ta candle or flaming torch secured in a sconce\nn04149083\ta large ladle\nn04149374\tchild's two-wheeled vehicle operated by foot\nn04149813\ta large board for displaying the score of a contest (and some other information)\nn04150153\ta small abrasive cleaning pad used for scouring pots and pans\nn04150273\ta barge carrying bulk materials in an open hold\nn04150371\tany of various flat-bottomed boats with sloping ends\nn04150980\tany of various hand tools for scraping\nn04151108\ta device used for scratching\nn04151581\ta protective covering consisting of netting; can be mounted in a frame\nn04151940\ta covering that serves to conceal or shelter something\nn04152387\tpartition consisting of a decorative frame or panel that serves to divide a space\nn04152593\tthe display that is electronically created on the surface of the large end of a cathode-ray tube\nn04153025\ta door that consists of a frame holding metallic or plastic netting; used to allow ventilation and to keep insects from entering a building through the open door\nn04153330\tfabric of metal or plastic mesh\nn04153751\ta fastener with a tapered threaded shank and a slotted head\nn04154152\ta propeller with several angled blades that rotates to push against water or air\nn04154340\ta simple machine of the inclined-plane type consisting of a spirally threaded cylindrical rod that engages with a similarly threaded hole\nn04154565\ta hand tool for driving screws; has a tip that fits into the head of a screw\nn04154753\ta woodscrew having its shank bent into a ring\nn04154854\ta wrench for turning a screw\nn04154938\tthe raised helical rib going around a screw\nn04155068\tthe top of a container that must be screwed off and on\nn04155177\tadjustable wrench that has one jaw that can be adjusted by turning a screw\nn04155457\ta sharp-pointed awl for marking wood or metal to be cut\nn04155625\ta firm open-weave fabric used for a curtain in the theater\nn04155735\ta carving (or engraving) on whalebone, whale ivory, walrus tusk, etc., usually by American whalers\nn04155889\ta room in a monastery that is set aside for writing or copying manuscripts\nn04156040\ta purifier that removes impurities from a gas\nn04156140\ta brush with short stiff bristles for heavy cleaning\nn04156297\ta narrow woodworking plane used to cut away excess stock\nn04156411\ta lightweight flexible sandal with a sturdy sole; worn as play shoes by children and as sportswear by adults\nn04156591\ta hoe that is used by pushing rather than pulling\nn04156814\teach of a pair of short oars that are used by a single oarsman\nn04156946\ta long oar that is mounted at the stern of a boat and moved left and right to propel the boat forward\nn04157099\ta small room (in large old British houses) next to the kitchen; where kitchen utensils are cleaned and kept and other rough household jobs are done\nn04157320\ta three-dimensional work of plastic art\nn04158002\tcontainer for coal; shaped to permit pouring the coal onto the fire\nn04158138\tan ancient Greek drinking cup; two handles and footed base\nn04158250\tan edge tool for cutting grass; has a long handle that must be held with both hands and a curved blade that moves parallel to the ground\nn04158672\ta cylindrical drawstring bag used by sailors to hold their clothing and other gear\nn04158807\ta boat that is seaworthy; that is adapted to the open seas\nn04158956\ta sailor's storage chest for personal property\nn04160036\tfastener consisting of a resinous composition that is plastic when warm; used for sealing documents and parcels and letters\nn04160261\ta garment (as a jacket or coat or robe) made of sealskin\nn04160372\tjoint consisting of a line formed by joining two pieces\nn04160586\tan airplane that can land on or take off from water\nn04160847\ta light source with reflectors that projects a beam of light in a particular direction\nn04161010\ta hot iron used to destroy tissue\nn04161358\tany support where you can sit (especially the part of a chair or bench etc. on which you sit)\nn04161981\tfurniture that is designed for sitting on\nn04162433\tthe cloth covering for the buttocks\nn04162706\ta safety belt used in a car or plane to hold you in your seat in case of an accident\nn04163530\tsmall pruning shears with a spring that holds the handles open and a single blade that closes against a flat surface\nn04164002\tcoil such that current is induced in it by passing a current through the primary coil\nn04164199\trearmost or uppermost area in the balcony containing the least expensive seats\nn04164406\tthe base that must be touched second by a base runner in baseball\nn04164757\thand marking seconds on a timepiece\nn04164868\ta desk used for writing\nn04165409\ta piece of furniture made up of sections that can be arranged individually or together\nn04165675\ta blanket (or toy) that a child carries around in order to reduce anxiety\nn04165945\tan electrical device that sets off an alarm when someone tries to break in\nn04166111\t(computing) a system that enforces boundaries between computer networks\nn04166281\ta car that is closed and that has front and rear seats and two or four doors\nn04166436\ta closed litter for one passenger\nn04167346\ta mechanical device that sows grass seed or grain evenly over the ground\nn04167489\ta missile equipped with a device that is attracted toward some kind of emission (heat or light or sound or radio waves)\nn04167661\ta light puckered fabric (usually striped)\nn04168084\ta shallow arch; an arch that is less than a semicircle\nn04168199\t(trademark) a self-balancing personal transportation device with two wheels; can operate in any level pedestrian environment\nn04168472\ta glass for beer\nn04168541\ta large fishnet that hangs vertically, with floats at the top and weights at the bottom\nn04168840\ta measuring instrument for detecting and measuring the intensity and direction and duration of movements of the ground (as an earthquake)\nn04169437\ta switch that is used to select among alternatives\nn04169597\ta photoelectric cell that uses a strip of selenium\nn04170037\ta wheeled vehicle that carries in itself a means of propulsion\nn04170384\ta thermometer that records the temperature automatically\nn04170515\tan electric starting motor that automatically starts an internal-combustion engine\nn04170694\ta system consisting of a generator and a motor so connected that the motor will assume the same relative position as the generator; the generator and the motor are synchronized\nn04170933\tthe edge of a fabric that is woven so that it will not ravel or fray\nn04171208\tan apparatus for visual signaling with lights or mechanically moving arms\nn04171459\tan autoloader that fires only one shot at each pull of the trigger\nn04171629\ta pistol that is a semiautomatic firearm capable of loading and firing continuously\nn04171831\ta conductor made with semiconducting material\nn04172107\ta dwelling that is attached to something on only one side\nn04172230\ta paint that dries with a finish between glossy and flat\nn04172342\ta trailer having wheels only in the rear; the front is supported by the towing vehicle\nn04172512\tflat braided cordage that is used on ships\nn04172607\ta measuring instrument for measuring the light sensitivity of film over a range of exposures\nn04172776\ta small shelter with an open front to protect a sentry from the weather\nn04172904\ta garment that can be purchased separately and worn in combinations with other garments\nn04173046\tlarge tank where solid matter or sewage is disintegrated by bacteria\nn04173172\tfilm consisting of a succession of related shots that develop a given subject in a movie\nn04173511\t(chemistry) an apparatus that can determine the sequence of monomers in a polymer\nn04173907\ta long brightly colored shawl; worn mainly by Mexican men\nn04174026\ta twilled woolen fabric\nn04174101\ta sewing machine that overcasts the raw edges of a fabric with a V-shaped stitch\nn04174234\tan interface (commonly used for modems and mice and some printers) that transmits data a bit at a time\nn04174500\tan obsolete bass cornet; resembles a snake\nn04174705\ta single notch in a row of notches\nn04175039\tutensil used in serving food or drink\nn04175147\t(computer science) a computer that provides client stations with access to files and printers as shared resources to a computer network\nn04175574\ta recreational center for servicemen\nn04176068\ta handcart for serving food\nn04176190\ta dish used for serving food\nn04176295\tcontrol system that converts a small mechanical motion into one requiring much greater power; may include a negative feedback system\nn04176528\tany electronic equipment that receives or transmits radio or tv signals\nn04177041\ta gun that is set to fire on any intruder that comes in contact with the wire that sets it off\nn04177329\ta screw (often without a head) that fits into the boss or hub of a wheel or cam etc. and prevents motion of the part relative to the shaft on which it is mounted\nn04177545\ta screw that is used to adjust the tension on a spring\nn04177654\ta try square with an adjustable sliding head\nn04177755\ta small sofa\nn04177820\ta long wooden bench with a back\nn04177931\ta center in an underprivileged area that provides community services\nn04178190\ta shellac based phonograph record that played at 78 revolutions per minute\nn04178329\timpressive monuments created in the ancient world that were regarded with awe\nn04178668\ta plant for disposing of sewage\nn04179126\ta waste pipe that carries away sewage or surface water\nn04179712\ta workbasket in which sewing materials can be stored\nn04179824\ta kit of articles used in sewing\nn04179913\ta textile machine used as a home appliance for sewing\nn04180063\ta needle used in sewing to pull thread through cloth\nn04180229\ta room set aside for sewing\nn04180888\ta measuring instrument for measuring the angular distance between celestial objects; resembles an octant\nn04181083\ta ceramic or mural decoration made by scratching off a surface layer to reveal the ground\nn04181228\ta restraint that confines or restricts freedom (especially something used to tie down or restrain a prisoner)\nn04181561\ta U-shaped bar; the open end can be passed through chain links and closed with a bar\nn04181718\tprotective covering that protects something from direct sunlight\nn04182152\ta shallow rectangular box with a transparent front used to protect and display small items (jewelry, coins, etc.)\nn04182322\ta long rod or pole (especially the handle of an implement or the body of a weapon like a spear or arrow)\nn04183217\ta rug with long pile\nn04183329\ta container in which something can be shaken\nn04183957\tcylinder forming the part of a bolt between the thread and the head\nn04184095\tcylinder forming a long narrow part of something\nn04184316\ta heavy silk fabric with a rough surface (or a cotton imitation)\nn04184435\ta machine tool for shaping metal or wood\nn04184600\ta tool for shaping metal\nn04184880\ta smooth crisp fabric\nn04185071\tany implement that is used to make something (an edge or a point) sharper\nn04185529\ta pen with indelible ink that will write on any surface\nn04185804\ta razor powered by an electric motor\nn04185946\ta brush used to apply lather prior to shaving\nn04186051\ttoiletry consisting of a preparation of soap and fatty acids that forms a rich lather for softening the beard before shaving\nn04186268\ttoiletry consisting of a liquid preparation containing many small bubbles that soften the beard before shaving\nn04186455\tcloak consisting of an oblong piece of cloth used to cover the head and shoulders\nn04186624\ta medieval oboe\nn04186848\tlarge scissors with strong blades\nn04187061\ta protective covering (as for a knife or sword)\nn04187233\tprotective covering consisting, for example, of a layer of boards applied to the studs and joists of a building to strengthen it and serve as a foundation for a weatherproof exterior\nn04187547\tan outbuilding with a single story; used for shelter or storage\nn04187751\ta bell hung round the neck of a sheep so that the sheep can be easily located\nn04187885\ta knot for shortening a line\nn04187970\ta coat made of sheepskin\nn04188064\tfarm devoted to raising sheep\nn04188179\tbed linen consisting of a large rectangular piece of cotton or linen cloth; used in pairs\nn04189092\ta hitch used for temporarily tying a rope to the middle of another rope (or to an eye)\nn04189282\tfabric from which bed sheets are made\nn04189651\ta pile in a row of piles driven side by side to retain earth or prevent seepage\nn04189816\ta kind of plasterboard\nn04190052\ta support that consists of a horizontal surface for holding objects\nn04190376\ta bracket to support a shelf\nn04190464\tammunition consisting of a cylindrical metal casing containing an explosive charge and a projectile; fired from a large gun\nn04190747\tthe housing or outer covering of something\nn04190997\ta very light narrow racing boat\nn04191150\ta thin varnish made by dissolving lac in ethanol; used to finish wood\nn04191595\ta structure that provides privacy and protection from danger\nn04191943\tprotective covering that provides protection from the weather\nn04192238\ttemporary housing for homeless or displaced persons\nn04192361\ta workshop that offers jobs to members of the physically or developmentally disabled population\nn04192521\ta furniture style that originated in England around 1800; simple in design with straight lines and classical ornamentation\nn04192698\tarmor carried on the arm to intercept blows\nn04192858\ta protective covering or structure\nn04193179\tshield consisting of an arrangement of metal mesh or plates designed to protect electronic equipment from ambient electromagnetic interference\nn04193377\tthe key on the typewriter keyboard that shifts from lower-case letters to upper-case letters\nn04193742\ta cudgel made of hardwood (usually oak or blackthorn)\nn04193883\ta thin wedge of material (wood or metal or stone) for driving into crevices\nn04194009\ta small signboard outside the office of a lawyer or doctor, e.g.\nn04194127\ta stiff protective garment worn by hockey players or a catcher in baseball to protect the shins\nn04194289\ta vessel that carries passengers or freight\nn04196080\ta system designed to work as a coherent entity on board a naval ship\nn04196502\tconveyance provided by the ships belonging to one country or industry\nn04196803\ta room where goods are packaged and shipped\nn04196925\ta shipboard system consisting of an acoustic detection system that is towed behind the ship\nn04197110\ta wrecked ship (or a part of one)\nn04197391\ta garment worn on the upper half of the body\nn04197781\ta button on a shirt\nn04197878\ta dress that is tailored like a shirt and has buttons all the way down the front\nn04198015\tthe front of a shirt (usually the part not covered by a jacket)\nn04198233\tany of various fabrics used to make men's shirts\nn04198355\tthe sleeve of a shirt\nn04198453\tfabric forming the tail of a shirt\nn04198562\ta blouse with buttons down the front\nn04198722\ta knife used as a weapon\nn04198797\ta mechanical damper; absorbs energy of sudden impulses\nn04199027\tfootwear shaped to fit the foot (below the ankle) with a flexible upper of leather or plastic and a sole and heel of heavier material\nn04200000\t(card games) a case from which playing cards are dealt one at a time\nn04200258\tan oblong rectangular (usually cardboard) box designed to hold a pair of shoes\nn04200537\ta device used for easing the foot into a shoe\nn04200800\ta shop where shoes are sold\nn04200908\ta wooden or metal device that is inserted into a shoe to preserve its shape when it is not being worn\nn04201064\tan ancient musical horn made from the horn of a ram; used in ancient times by the Israelites to sound a warning or a summons; used in synagogues today on solemn occasions\nn04201297\ta translucent screen made of a wooden frame covered with rice paper\nn04201733\tanother name for a station wagon\nn04202142\ta small country house used by hunters during the shooting season\nn04202282\tdevice that resembles a spiked walking stick but the top opens into a seat\nn04202417\ta mercantile establishment for the retail sale of goods or services\nn04203356\ta bell attached to the door of a small shop; warns the proprietor that a customer has entered the shop\nn04204081\ta bag made of plastic or strong paper (often with handles); used to transport goods after shopping\nn04204238\ta handbasket used to carry goods while shopping\nn04204347\ta handcart that holds groceries or other goods while shopping\nn04204755\taccidental contact between two points in an electric circuit that have a potential difference\nn04205062\tan iron with a short shaft and pitched face; for hitting short high shots\nn04205318\ttrousers that end at or above the knee\nn04205505\ta sleeve extending from the shoulder to the elbow\nn04205613\ta diathermy machine that uses short wave radiation as the source of heat\nn04206070\tsports equipment consisting of a heavy metal ball used in the shot put\nn04206225\ta small glass adequate to hold a single swallow of whiskey\nn04206356\tfirearm that is a double-barreled smoothbore shoulder weapon for firing shot at short ranges\nn04206570\ta shell containing lead shot; used in shotguns\nn04206790\ttower of a kind once used to make shot; molten lead was poured through a sieve and dropped into water\nn04207151\tthe part of a garment that covers or fits over the shoulder\nn04207343\ta large handbag that can be carried by a strap looped over the shoulder\nn04207596\tan arch consisting of a horizontal lintel supported at each end by corbels that project into the aperture\nn04207763\ta holster worn over your shoulder so a gun can be concealed under your jacket\nn04207903\tprotective garment consisting of a hard rounded pad worn by football players to protect their shoulders\nn04208065\tpatch worn on the shoulder of a military uniform to indicate rank\nn04208210\ta hand tool for lifting loose material; consists of a curved container or scoop and a handle\nn04208427\ta fire iron consisting of a small shovel used to scoop coals or ashes in a fireplace\nn04208582\ta stiff broad-brimmed hat with the brim turned up at the sides and projecting in front; worn by some clergymen in Britain\nn04208760\ta river steamboat on which theatrical performances could be given (especially on the Mississippi River)\nn04208936\ta plumbing fixture that sprays water over you\nn04209133\ta tight cap worn to keep hair dry while showering\nn04209239\ta curtain that keeps water from splashing out of the shower area\nn04209509\ta room with several showers\nn04209613\tbooth for washing yourself, usually in a bathroom\nn04209811\tan area where merchandise (such as cars) can be displayed\nn04210012\tshell containing lead pellets that explodes in flight\nn04210120\ta device that shreds documents (usually in order to prevent the wrong people from reading them)\nn04210288\ta vessel engaged in shrimping\nn04210390\ta place of worship hallowed by association with some sacred thing or person\nn04210591\tthe clinging transparent plastic film that is used to shrinkwrap something\nn04210858\timplant consisting of a tube made of plastic or rubber; for draining fluids within the body\nn04211001\ta conductor having low resistance in parallel with another device to divert a fraction of the current\nn04211219\ta small locomotive used to move cars around but not to make trips\nn04211356\ta hinged blind for a window\nn04211528\ta mechanical device on a camera that opens and closes to control the time of a photographic exposure\nn04211857\tbobbin that passes the weft thread between the warp threads\nn04211970\tpublic transport that consists of a bus or train or airplane that plies back and forth between two points\nn04212165\tshuttle consisting of a bus that travels between two points\nn04212282\tbadminton equipment consisting of a ball of cork or rubber with a crown of feathers\nn04212467\ta helicopter that shuttles back and forth\nn04212810\ta light conical canvas tent erected on a tripod with ventilation at the top\nn04213105\t(nautical) a room for the treatment of the sick or injured (as on a ship)\nn04213264\tthe bed on which a sick person lies\nn04213353\tan edge tool for cutting grass or crops; has a curved blade and a short handle\nn04213530\ta room to which a sick person is confined\nn04214046\ta board that forms part of the side of a bed or crib\nn04214282\tconveyance consisting of a small carrier attached to the side of a motorcycle\nn04214413\ta small chapel off the side aisle of a church\nn04214649\tlight carried by a boat that indicates the boat's direction; vessels at night carry a red light on the port bow and a green light on the starboard bow\nn04215153\ta saddle for a woman; rider sits with both feet on the same side of the horse\nn04215402\twalk consisting of a paved area for pedestrians; usually beside a street or roadway\nn04215588\ta wall that forms the side of a structure\nn04215800\ta paddle steamer having a paddle wheel on each side\nn04215910\tair-to-air missile with infrared homing device\nn04216634\ta strainer for separating lumps from powdered material or grading particles\nn04216860\ta household sieve (as for flour)\nn04216963\tan optical instrument for aiding the eye in aiming, as on a firearm or surveying instrument\nn04217387\tan endoscope (a flexible fiberoptic probe) for examining the sigmoid colon\nn04217546\ta building from which signals are sent to control the movements of railway trains\nn04217718\ta device used to send signals\nn04217882\tstructure displaying a board on which advertisements can be posted\nn04218564\ta tubular acoustic device inserted in the exhaust system that is designed to reduce noise\nn04218921\ta small receptacle with a handle and a hinged lid; used for collecting crumbs or ashes\nn04219185\ta vacuum coffee maker\nn04219424\ta fabric made from the fine threads produced by certain insect larvae\nn04219580\tthe brightly colored garments of a jockey; emblematic of the stable\nn04220250\ta cylindrical tower used for storing silage\nn04220805\ta thin layer of silver deposited on something\nn04221076\ta drawing made on specially prepared paper with an instrument having a silver tip (15th and 16th centuries)\nn04221673\ta hypothetical pendulum suspended by a weightless frictionless thread of constant length\nn04221823\ta machine that simulates an environment for the purpose of training or research\nn04222210\ta bed for one occupant\nn04222307\ta jacket having fronts that overlap only enough for a single row of buttons\nn04222470\ta suit having a single-breasted jacket\nn04222723\ta propeller plane with a single propeller\nn04222847\ta beating-reed instrument with a single reed (as a clarinet or saxophone)\nn04223066\ta helicopter having a single rotor\nn04223170\ta stick used instead of a sword for fencing\nn04223299\ta collarless men's undergarment for the upper part of the body\nn04224395\tan acoustic device producing a loud often wailing sound as a signal or warning\nn04224543\ta ship that is one of two or more similar ships built at the same time\nn04224842\ta stringed instrument of India; has a long neck and movable frets; has 6 or 7 metal strings for playing and usually 13 resonating strings\nn04225031\ta bathtub in which your buttocks and hips are immersed as if you were sitting in a chair and you bathe in a sitting position\nn04225222\ta carton containing six bottles or cans\nn04225729\tsports equipment that is worn on the feet to enable the wearer to glide along and to be propelled by the alternate actions of the legs\nn04225987\ta board with wheels that is ridden in a standing or crouching position and propelled by foot\nn04226322\ta brace that extends from the rear of the keel to support the rudderpost\nn04226464\tcoils of worsted yarn\nn04226537\tthe internal supporting structure that gives an artifact its shape\nn04226826\ta passkey with much of the bit filed away so that it can open different locks\nn04226962\ta domed beehive made of twisted straw\nn04227050\ta large round wicker basket (used on farms)\nn04227144\tpreliminary drawing for later elaboration\nn04227519\tan implement for sketching\nn04227787\tan arch whose jambs are not at right angles with the face\nn04227900\ta long pin for holding meat in position while it is being roasted\nn04228054\tnarrow wood or metal or plastic runners used in pairs for gliding over snow\nn04228215\tone of a pair of mechanical devices that are attached to a ski and that will grip a ski boot; the bindings should release in case of a fall\nn04228422\ta vehicle resembling a bicycle but having skis instead of wheels; the rider wears short skis for balancing\nn04228581\ta stiff boot that is fastened to a ski with a ski binding\nn04228693\ta close-fitting woolen cap; often has a tapering tail with a tassel\nn04229007\ta tractor used to haul logs over rough terrain\nn04229107\ta crash helmet\nn04229480\tany of various small boats propelled by oars or by sails or by a motor\nn04229620\ta steep downward ramp from which skiers jump\nn04229737\ta hotel at a ski resort\nn04229816\ta woolen face mask to protect the face from cold while skiing on snow\nn04229959\ta cooking utensil used to skim fat from the surface of liquids\nn04230387\ta parka to be worn while skiing\nn04230487\tan airplane equipped with skis so it can land on a snowfield\nn04230603\ta pole with metal points used as an aid in skiing\nn04230707\ta carrier for holding skis on top of a vehicle\nn04230808\ta garment hanging from the waist; worn mainly by girls and women\nn04231272\tcloth covering that forms the part of a garment below the waist\nn04231693\ta powered conveyance that carries skiers up a hill\nn04231905\tmen's underwear consisting of cotton T-shirt and shorts\nn04232153\trounded brimless cap fitting the crown of the head\nn04232312\tan elevated box for viewing events at a sports stadium\nn04232437\ta hook that is imagined to be suspended from the sky\nn04232800\ta window in a roof to admit daylight\nn04233027\tthe sail above the royal on a square-rigger\nn04233124\ta very tall building with many stories\nn04233295\tan elevated walkway between buildings (usually enclosed)\nn04233715\t(usually in the plural) pants for casual wear\nn04233832\tcasual dress consisting of slacks and matching jacket\nn04234160\ta weapon (a sword or dagger) used for slashing\nn04234260\ta pocket in a garment (usually below the waist) to which access is provided by a vertical or diagonal slit in the outside of the garment\nn04234455\ta thin strip (wood or metal)\nn04234670\t(formerly) a writing tablet made of slate\nn04234763\ta pencil of soft slate (or soapstone) used for writing on a slate\nn04234887\ta roof covered with slate\nn04235291\ta vehicle mounted on runners and pulled by horses or dogs; for transportation over snow\nn04235646\ta piece of furniture that can be opened up into a bed\nn04235771\tpajamas with feet; worn by children\nn04235860\tlarge padded bag designed to be slept in outdoors; usually rolls up like a bedroll\nn04236001\ta passenger car that has berths for sleeping\nn04236377\tthe part of a garment that is attached at the armhole and that provides a cloth covering for the arm\nn04236702\tsmall case into which an object fits\nn04236809\ta bed with solid headboard and footboard that roll outward at the top\nn04236935\ta bell attached to a sleigh, or to the harness of a horse that is pulling a sleigh\nn04237174\tiron bar used to loosen and rake clinkers out of furnaces\nn04237287\tknife especially designed for slicing particular foods, as cheese\nn04237423\ta machine for cutting; usually with a revolving blade\nn04238128\tplaything consisting of a sloping chute down which children can slide\nn04238321\ta fastener for locking together two toothed edges by means of a sliding tab\nn04238617\tprojector that projects an enlarged image of a slide onto a screen\nn04238763\tanalog computer consisting of a handheld instrument used for rapid calculations; have been replaced by pocket calculators\nn04238953\tvalve that opens and closes a passageway by sliding over a port\nn04239074\ta door that opens by sliding instead of swinging\nn04239218\trower's seat that slides fore and aft\nn04239333\ta window that open by sliding horizontally\nn04239436\tbandage to support an injured forearm; consisting of a wide triangular piece of cloth hanging from around the neck\nn04239639\ta simple weapon consisting of a looped strap in which a projectile is whirled and then released\nn04239786\ta shoe that has a strap that wraps around the heel\nn04239900\tdispenser consisting of a tubular ring around the propeller hub of an airplane through which antifreeze solution is spread over the blades\nn04240434\ta friction clutch that will slip when the torque is too great\nn04240752\ta removable fitted cloth covering for upholstered furniture\nn04240867\tpliers with a joint adjustable to two positions in order to increase the opening of the jaws\nn04241042\ta knot at the end of a cord or rope that can slip along the cord or rope around which it is made\nn04241249\tan article of clothing (garment or shoe) that is easily slipped on or off\nn04241394\tlow footwear that can be slipped on and off easily; usually worn indoors\nn04241573\tconnection consisting of a metal ring on a rotating part of a machine; provides a continuous electrical connection through brushes on stationary contacts\nn04242084\t(ophthalmology) a lamp that emits a narrow but intense beam of light that enables an ophthalmologist, using a microscope, to view the retina and optic nerve\nn04242315\tnarrow trench for shelter in battle\nn04242408\ta sailing vessel with a single mast set about one third of the boat's length aft of the bow\nn04242587\ta sailing or steam warship having cannons on only one deck\nn04242704\ta bowl into which the dregs of teacups and coffee cups are emptied at the table\nn04243003\ta large pail used to receive waste water from a washbasin or chamber pot\nn04243142\tcheap clothing (as formerly issued to sailors in Britain)\nn04243251\ta store that sells cheap ready-made clothing\nn04243546\ta slot machine that is used for gambling\nn04243941\ta machine that is operated by the insertion of a coin in a slot\nn04244379\tconduit that carries a rapid flow of water controlled by a sluicegate\nn04244847\ta sailing ship (usually rigged like a sloop or cutter) used in fishing and sailing along the coast\nn04244997\ta boat that is small\nn04245218\tinterface consisting of a standard port between a computer and its peripherals that is used in some computers\nn04245412\ta ship that is small\nn04245508\tpersonal items conforming to regulations that are sold aboard ship or at a naval base and charged to the person's pay\nn04245847\ta bomb that can be guided (by a laser beam or radio) to its target\nn04246060\ta bottle containing smelling salts\nn04246271\tembroidery consisting of ornamental needlework on a garment that is made by gathering the cloth tightly in stitches\nn04246459\ta bomb that gives off thick smoke when it explodes; used to make a smoke screen or to mark a position\nn04246731\ta small house where smoke is used to cure meat or fish\nn04246855\ta passenger car for passengers who wish to smoke\nn04247011\t(military) screen consisting of a cloud of smoke that obscures movements\nn04247440\troom in a hotel or club set apart for smokers\nn04247544\ta firearm that has no rifling\nn04247630\ta small plane for finish work\nn04247736\tusually inexpensive bar\nn04247876\ta simple jointed bit for a horse; without a curb\nn04248209\ta fastener used on clothing; fastens with a snapping sound\nn04248396\ta brim that can be turned up and down on opposite sides\nn04248507\ta hat with a snap brim\nn04248851\ta trap for birds or small mammals; often has a slip noose\nn04249415\ta small drum with two heads and a snare stretched across the lower head\nn04249582\ta pulley-block that can be opened to receive the bight of a rope\nn04249882\ta globular glass with a small top; used for serving brandy\nn04250224\tan extremely powerful rifle developed for the military; capable of destroying light armored vehicles and aircraft more than a mile away\nn04250473\t(plural) hand shears for cutting sheet metal\nn04250599\ta kind of snowmobile\nn04250692\tan ornamental net in the shape of a bag that confines a woman's hair; pins or ties at the back of the head\nn04250850\tair passage provided by a retractable device containing intake and exhaust pipes; permits a submarine to stay submerged for extended periods of time\nn04251144\tbreathing device consisting of a bent tube fitting into a swimmer's mouth and extending above the surface; allows swimmer to breathe while face down in the water\nn04251701\ta mound or heap of snow\nn04251791\ta board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes\nn04252077\ttracked vehicle for travel on snow having skis in front\nn04252225\ta vehicle used to push snow from roads\nn04252331\ta device to help you walk on deep snow; a lightweight frame shaped like a racquet is strengthened with cross pieces and contains a network of thongs; one is worn on each foot\nn04252560\ta child's overgarment for cold weather\nn04252653\ta machine that removes snow by scooping it up and throwing it forcefully through a chute\nn04253057\ta small ornamental box for carrying snuff in your pocket\nn04253168\ta cone-shaped implement with a handle; for extinguishing candles\nn04253304\tscissors for cropping and holding the snuff of a candlewick\nn04253931\ta crate for packing soap\nn04254009\ta bathroom or kitchen fixture for holding a bar of soap\nn04254120\tdispenser of liquid soap\nn04254450\ta cleaning pad containing soap\nn04254680\tan inflated ball used in playing soccer\nn04254777\thosiery consisting of a cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee\nn04255163\treceptacle where something (a pipe or probe or end of a bone) is inserted\nn04255346\ta wrench with a handle onto which sockets of different sizes can be fitted\nn04255499\ta plain plinth that supports a wall\nn04255586\ta can for holding soft drinks\nn04255670\tan apparatus for dispensing soda water\nn04255768\ta counter where ice cream and sodas and sundaes are prepared and served\nn04255899\ta house built of sod or adobe laid in horizontal courses\nn04256318\tlamp in which an electric current passed through a tube of sodium vapor makes a yellow light; used is street lighting\nn04256520\tan upholstered seat for more than one person\nn04256758\tthe underside of a part of a building (such as an arch or overhang or beam etc.)\nn04256891\tball used in playing softball\nn04257223\ta pedal on a piano that moves the action closer to the strings and so soften the sound\nn04257684\tdrain that conveys liquid waste from toilets, etc.\nn04257790\telectrical device consisting of a large array of connected solar cells\nn04257986\ta cell that converts solar energy into electrical energy\nn04258138\ta concave mirror that concentrates the rays of the sun; can produce high temperatures\nn04258333\ta heater that makes direct use of solar energy\nn04258438\ta house designed to use solar radiation for heating; usually has large areas of glass in front of heat-absorbing materials\nn04258618\ta telescope designed to make observations of the sun\nn04258732\ta system that converts sunlight into heat\nn04258859\ta hand tool with a heatable tip; used to melt and apply solder\nn04259202\ta coil of wire around an iron core; becomes a magnet when current passes through the coil\nn04259468\tarmor plate that protects the foot; consists of mail with a solid toe and heel\nn04259630\ta straw hat with a tall crown and broad brim; worn in American southwest and in Mexico\nn04260192\tdepth finder for determining depth of water or a submerged object by means of ultrasound waves\nn04260364\tan image of a structure that is produced by ultrasonography (reflections of high-frequency sound waves); used to observe fetal growth or to study bodily organs\nn04260589\tan instrument that uses the differential transmission and reflection of ultrasonic waves in order to provide an image of a bodily organ\nn04261116\ta machine for sorting things (such as punched cards or letters) into classes\nn04261281\tan open-air market in an Arabian city\nn04261369\tcontact (the part of a bell) against which the clapper strikes\nn04261506\ta resonating chamber in a musical instrument (as the body of a violin)\nn04261638\ta movie camera that records sounds in synchrony with the visual images\nn04261767\ta device for making soundings\nn04261868\tmotion-picture film with sound effects and dialogue recorded on it\nn04262161\t(music) resonator consisting of a thin board whose vibrations reinforce the sound of the instrument\nn04262530\ta research rocket used to obtain information about the atmosphere at various altitudes\nn04262678\ta recording of acoustic signals\nn04262869\ta spectrograph for acoustic spectra\nn04263257\ta bowl for serving soup\nn04263336\ta ladle for serving soup\nn04263502\ta spoon with a rounded bowl for eating soup\nn04263760\tany device serving as a source of visible electromagnetic radiation\nn04263950\tan organ stop resulting in a soft muted sound\nn04264134\ta narrow braid used as a decorative trimming\nn04264233\ta long cassock with buttons down the front; worn by Roman Catholic priests\nn04264361\twaterproof hat with wide slanting brim longer in back than in front\nn04264485\tsoybeans bought or sold at an agreed price for delivery at a specified future date\nn04264628\tthe bar-shaped typewriter key that introduces spaces when used\nn04264765\ta spacecraft designed to transport people and support human life in outer space\nn04264914\ta craft capable of traveling in outer space; technically, a satellite around the sun\nn04265275\theater consisting of a self-contained (usually portable) unit to warm a room\nn04265428\ta helmet worn by astronauts while in outer space\nn04265904\ta rocket powerful enough to travel into outer space\nn04266014\ta reusable spacecraft with wings for a controlled descent through the Earth's atmosphere\nn04266162\ta manned artificial satellite in a fixed orbit designed for scientific research\nn04266375\ta pressure suit worn by astronauts while in outer space\nn04266486\ta sturdy hand shovel that can be pushed into the earth with the foot\nn04266849\ta thin bit with a center point and cutting edges on either side\nn04266968\ta complicated highway interchange with multiple overpasses\nn04267091\ta German machine gun\nn04267165\tan elastic synthetic fabric\nn04267246\tan approximately triangular surface area between two adjacent arches and the horizontal plane above them\nn04267435\ta fore-and-aft sail set on the aftermost lower mast (usually the mizzenmast) of a vessel\nn04267577\ta stout rounded pole of wood or metal used to support rigging\nn04267985\ta horizontal pipe having fine holes drilled throughout its length so as to deliver a spray of water\nn04268142\ta wire net to stop sparks from an open fireplace or smokestack\nn04268275\telectrical device to reduce sparking when electrical contacts are opened or closed\nn04268418\tan instrument that detects ionizing radiation from elementary particles\nn04268565\tan induction coil used to create sparks\nn04268799\ta component of an ignition system; consists of two shaped electrodes and the space between them\nn04269086\t(on early automobiles) a lever mounted on the steering column and used to adjust the timing of the ignition\nn04269270\telectrical device that fits into the cylinder head of an internal-combustion engine and ignites the gas by means of an electric spark\nn04269502\ta wrench for removing or tightening spark plugs into the cylinder head of an internal combustion engine\nn04269668\tan early radio transmitter using a discharge across a spark gap as the source of its power\nn04269822\ta cloth covering (a legging) that covers the instep and ankles\nn04269944\ta hand tool with a thin flexible blade used to mix or spread soft substances\nn04270147\ta turner with a narrow flexible blade\nn04270371\ta telephone with a microphone and loudspeaker; can be used without picking up a handset; several people can participate in a call at the same time\nn04270576\ta trumpet-shaped acoustic device to intensify and direct the human voice; formerly held to the ear by a hard-of-hearing person\nn04270891\ta long pointed rod used as a tool or weapon\nn04271148\tan implement with a shaft and barbed point used for catching fish\nn04271531\ta store that sells only one kind of merchandise\nn04271793\ta bottle for holding urine specimens\nn04271891\tan elaborate and remarkable display on a lavish scale\nn04272054\toptical instrument consisting of a frame that holds a pair of lenses for correcting defective vision\nn04272389\ta woman's pump with medium heel; usually in contrasting colors for toe and heel\nn04272782\ta spectroscope by which spectra can be photographed\nn04272928\ta photometer for comparing two light radiations wavelength by wavelength\nn04273064\tan optical instrument for spectrographic analysis\nn04273285\ta medical instrument for dilating a bodily passage or cavity in order to examine the interior\nn04273569\ta fast motorboat\nn04273659\ta hindrance to speeding created by a crosswise ridge in the surface of a roadway\nn04273796\ta meter fixed to a vehicle that measures and displays its speed\nn04273972\tan ice skate with a long blade; worn for racing\nn04274686\ta measuring instrument for measuring the curvature of a surface\nn04274985\ta pressure gauge for measuring blood pressure\nn04275093\ta mill for grinding spices\nn04275175\ta rack for displaying containers filled with spices\nn04275283\ta skillet made of cast iron\nn04275548\ta web resembling the webs spun by spiders\nn04275661\tsports equipment consisting of a sharp point on the sole of a shoe worn by athletes\nn04275904\ta large stout nail\nn04277352\ta stick or pin used to twist the yarn in spinning\nn04277493\tany of various rotating shafts that serve as axes for larger rotating parts\nn04277669\ta piece of wood that has been turned on a lathe; used as a baluster, chair leg, etc.\nn04277826\ta clothes dryer that uses centrifugal motion to dry the clothes that are put into it\nn04278247\tearly model harpsichord with only one string per note\nn04278353\ta small and compactly built upright piano\nn04278447\ta large and usually triangular headsail; carried by a yacht as a headsail when running before the wind\nn04278605\tfisherman's lure; revolves when drawn through the water\nn04278932\tspinning machine that draws, twists, and winds yarn\nn04279063\tan early spinning machine with multiple spindles\nn04279172\ta textile machine for spinning yarn and thread\nn04279353\ta fishing rod designed for casting a spinning lure\nn04279462\ta small domestic spinning machine with a single spindle that is driven by hand or foot\nn04279858\tan oblique bandage in which successive turns overlap preceding turns\nn04279987\ta screwdriver with a ratchet (so the blade turns in only one direction) and a spiral in the handle (so the blade rotates) with downward pressure on the handle\nn04280259\ta spring that is wound like a spiral\nn04280373\ta lamp that burns a volatile liquid fuel such as alcohol\nn04280487\ta stove that burns a volatile liquid fuel such as alcohol\nn04280845\ta measuring instrument for measuring the vital capacity of the lungs\nn04280970\ta skewer for holding meat over a fire\nn04281260\ta receptacle for spit (usually in a public place)\nn04281375\tprotective covering consisting of a panel to protect people from the splashing water or mud etc.\nn04281571\ta protective covering over or beside a wheel to protect the upper part of a vehicle from splashes of mud\nn04281998\ta junction where two things (as paper or film or magnetic tape) have been joined together\nn04282231\ta mechanical device for joining two pieces of paper or film or magnetic tape\nn04282494\tan orthopedic mechanical device used to immobilize and protect a part of the body (as a broken leg)\nn04282872\ta rail that is split from a log\nn04282992\ta brand of fine English porcelain\nn04283096\ta hinged airfoil on the upper surface of an aircraft wing that is raised to reduce lift and increase drag\nn04283255\tan airfoil mounted on the rear of a car to reduce lift at high speeds\nn04283378\tsupport consisting of a radial member of a wheel joining the hub to the rim\nn04283585\ta small plane that has a handle on each side of its blade; used for shaping or smoothing cylindrical wooden surfaces (originally wheel spokes)\nn04283784\tany soft porous fabric (especially in a loose honeycomb weave)\nn04283905\ta wet mop with a sponge as the absorbent\nn04284002\ta piece of cutlery with a shallow bowl-shaped container and a handle; used to stir or serve or take up food\nn04284341\tformerly a golfing wood with an elevated face\nn04284438\ttrademark for a plastic eating utensil that has both tines and a bowl like a spoon\nn04284572\ta fur or leather pouch worn at the front of the kilt as part of the traditional dress of Scottish Highlanders\nn04284869\ta maneuverable kite controlled by two lines and flown with both hands\nn04285008\ta small low car with a high-powered engine; usually seats two persons\nn04285146\tequipment needed to participate in a particular sport\nn04285622\tan implement used in a sport\nn04285803\tattire worn for sport or for casual wear\nn04285965\ta high-performance four-wheel drive car built on a truck chassis\nn04286128\ta business establishment for entertainment\nn04286575\ta lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer\nn04286960\teach of the welds made by welding at a separate point\nn04287351\tan oil well that is spouting\nn04287451\ta chock or bar wedged under a wheel or between the spokes to prevent a vehicle from rolling down an incline\nn04287747\tan applicator resembling a gun for applying liquid substances (as paint) in the form of a spray\nn04287898\tpaint applied with a spray gun\nn04287986\ta hand tool for spreading something\nn04288165\tan ornament that resembles a spray of leaves or flowers\nn04288272\ta metal elastic device that returns to its shape or position when pushed or pulled or pressed\nn04288533\ta balance that measure weight by the tension on a helical spring\nn04288673\ta flexible board for jumping upward\nn04289027\tmechanical device that attaches to a garden hose for watering lawn or garden\nn04289195\ta system for extinguishing fires; water from a network of overhead pipes is released through nozzles that open automatically with the rise in temperature\nn04289449\ta light spar that crosses a fore-and-aft sail diagonally\nn04289576\ta fore-and-aft sail extended by a sprit\nn04289690\tthin wheel with teeth that engage with a chain\nn04289827\troller that has teeth on the rims to pull film or paper through\nn04290079\t(nautical) small stuff consisting of a lightweight rope made of several rope yarns loosely wound together\nn04290259\ta sharp prod fixed to a rider's heel and used to urge a horse onward\nn04290507\tgear wheels that mesh in the same plane\nn04290615\ta Russian artificial satellite\nn04290762\ta satellite with sensors to detect nuclear explosions\nn04291069\ta room in a police station where members of the force assemble for roll call and duty assignments\nn04291242\ta hand tool consisting of two straight arms at right angles; used to construct or test right angles\nn04291759\ta double knot made of two half hitches and used to join the ends of two cords\nn04291992\ta square-rigged sailing ship\nn04292080\ta four-sided sail set beneath a horizontal yard suspended at the middle from a mast\nn04292221\trubber ball used in playing squash\nn04292414\ta small racket with a long handle used for playing squash\nn04292572\tthe loudspeaker on an intercom or public address system\nn04292921\tT-shaped cleaning implement with a rubber edge across the top; drawn across a surface to remove water (as in washing windows)\nn04293119\ta kitchen utensil for squeezing juice from fruit\nn04293258\tan electric circuit that cuts off a receiver when the signal becomes weaker than the noise\nn04293744\ta small arch built across the interior angle of two walls (usually to support a spire)\nn04294212\ta device for making something stable\nn04294426\tairfoil consisting of a device for stabilizing an aircraft\nn04294614\ta rigid metal bar between the front suspensions and between the rear suspensions of cars and trucks; serves to stabilize the chassis\nn04294879\ta farm building for housing horses or other livestock\nn04295081\tgear for a horse\nn04295353\taccommodation for animals (especially for horses)\nn04295571\tstorage space in a library consisting of an extensive arrangement of bookshelves where most of the books are stored\nn04295777\ta base or platform on which hay or corn is stacked\nn04295881\ta large structure for open-air sports or entertainments\nn04296562\ta large platform on which people can stand and can be seen by an audience\nn04297098\ta large coach-and-four formerly used to carry passengers and mail on regular routes between towns\nn04297750\ta window made of stained glass\nn04297847\ta strip of carpet for laying on stairs\nn04298053\ta rod that holds a stair-carpet in the angle between two steps\nn04298661\ta vertical well around which there is a stairway\nn04298765\ta strong wooden or metal post with a point at one end so it can be driven into the ground\nn04299215\ta booth where articles are displayed for sale\nn04299370\ta compartment in a stable where a single animal is confined and fed\nn04299963\ta block or die used to imprint a mark or design\nn04300358\ta mill in which ore is crushed with stamps\nn04300509\ta power tool that stamps\nn04300643\tany vertical post or rod used as a support\nn04301000\ta small table for holding articles of various kinds\nn04301242\tan upright pole or beam (especially one used as a support)\nn04301474\ta primary cell used as a standard of electromotive force\nn04301760\ta transmission that is operated manually with a gear lever and a clutch pedal\nn04302200\ta large printing press that exerts pressure vertically\nn04302863\ta light open horse-drawn carriage with two or four wheels and one seat\nn04302988\ta steam-powered automobile\nn04303095\tpaper fastener consisting of a short length of U-shaped wire that can fasten papers together\nn04303258\ta short U-shaped wire nail for securing cables\nn04303357\ta hand-held machine for driving staples home\nn04303497\ta machine that inserts staples into sheets of paper in order to fasten them together\nn04304215\ta spacecraft designed to carry a crew into interstellar space (especially in science fiction)\nn04304375\tan electric motor for starting an engine\nn04304680\ta movable barrier on the starting line of a race course\nn04305016\tan electric furnace in which an electric arc provides the source of heat for making steel\nn04305210\ta government building in which a state legislature meets\nn04305323\ta mansion that is (or formerly was) occupied by an aristocratic family\nn04305471\ta prison maintained by a state of the U.S.\nn04305572\ta guest cabin\nn04305947\ta measuring instrument used to measure static pressure in a stream of fluid\nn04306080\ta facility equipped with special equipment and personnel for a particular purpose\nn04306592\tmechanical device consisting of the stationary part of a motor or generator in or around which the rotor revolves\nn04306847\ta sculpture representing a human or animal\nn04307419\t(nautical) brace consisting of a heavy rope or wire cable used as a support for a mast or spar\nn04307767\ta fore-and-aft sail set on a stay (as between two masts)\nn04307878\ta restaurant that specializes in steaks\nn04307986\ta sharp table knife used in eating steak\nn04308084\tan aircraft designed in accordance with technology that makes detection by radar difficult\nn04308273\ta bomber that is difficult to detect by radar\nn04308397\ta fighter that is difficult to detect by radar; is built for precise targeting and uses laser-guided bombs\nn04308583\ta room that can be filled with steam in which people bathe; `vapour bath' is a British term\nn04308807\ta boat propelled by a steam engine\nn04308915\tthe chamber from which steam is distributed to a cylinder\nn04309049\texternal-combustion engine in which heat is used to raise steam which either turns a turbine or forces a piston to move up and down in a cylinder\nn04309348\ta ship powered by one or more steam engines\nn04309548\ta cooking utensil that can be used to cook food by steaming it\nn04309833\ta pressing iron that can emit steam\nn04310018\ta locomotive powered by a steam engine\nn04310157\tvehicle equipped with heavy wide smooth rollers for compacting roads and pavements\nn04310507\ta power shovel that is driven by steam\nn04310604\tturbine in which steam strikes blades and makes them turn\nn04310721\ta whistle in which the sound is produced by steam; usually attached to a steam boiler\nn04310904\tknife sharpener consisting of a ridged steel rod\nn04311004\ta steel bridge constructed in the form of an arch\nn04311174\ta concave percussion instrument made from the metal top of an oil drum; has an array of flattened areas that produce different tones when struck (of Caribbean origin)\nn04311595\ta factory where steel is made\nn04312020\tabrader consisting of a pad of steel wool used for polishing or smoothing\nn04312154\ta portable balance consisting of a pivoted bar with arms of unequal length\nn04312432\ta tall tower that forms the superstructure of a building (usually a church or temple) and that tapers to a point at the top\nn04312654\tthe cheapest accommodations on a passenger ship\nn04312756\ta gear that couples the steering wheel to the steering linkage of a motor vehicle\nn04312916\tmechanism consisting of a system of rods and levers connected to the front wheels of a motor vehicle; the steering gear pushes it left or right which swivels the front wheels, causing the vehicle to turn\nn04313220\ta mechanism by which something is steered (especially a motor vehicle)\nn04313503\ta handwheel that is used for steering\nn04313628\tan ancient upright stone slab bearing markings\nn04314107\ta watch that is wound by turning a knob at the stem\nn04314216\ta sheet of material (metal, plastic, cardboard, waxed paper, silk, etc.) that has been perforated with a pattern (printing or a design); ink or paint can pass through the perforations to create the printed pattern on the surface below\nn04314522\ta lightweight British submachine gun\nn04314632\ta machine for typewriting shorthand characters\nn04314914\tsupport consisting of a place to rest the foot while ascending or descending a stairway\nn04315342\ta transformer that reduces voltage\nn04315713\ta stool that has one or two steps that fold under the seat\nn04315828\ta transformer that increases voltage\nn04315948\treproducer in which two microphones feed two or more loudspeakers to give a three-dimensional effect to the sound\nn04316498\tan optical device for viewing stereoscopic photographs\nn04316815\ta naval gun able to fire astern at a ship in chase\nn04316924\t(nautical) the principal upright timber at the stern of a vessel\nn04317063\ta paddle steamer having the paddle wheel in the stern\nn04317175\ta medical instrument for listening to the sounds generated inside the body\nn04317325\ta saucepan used for stewing\nn04317420\tan implement consisting of a length of wood\nn04317833\ta long thin implement resembling a length of wood\nn04317976\ta lever used by a pilot to control the ailerons and elevators of an airplane\nn04318131\ta long implement (usually made of wood) that is shaped so that hockey or polo players can hit a puck or ball\nn04318787\tan upright that is a member in a door or window frame\nn04318892\ta small dagger with a tapered blade\nn04318982\tan apparatus used for the distillation of liquids; consists of a vessel in which a substance is vaporized by heat and a condenser where the vapor is condensed\nn04319545\ta pantry or storeroom connected with the kitchen (especially in a large house) for preparing tea and beverages and for storing liquors and preserves and tea etc\nn04319774\ta large pipe wrench with L-shaped adjustable jaws that tighten as pressure on the handle is increased\nn04319937\tone of two stout poles with foot rests in the middle; used for walking high above the ground\nn04320405\ta portable low altitude surface-to-air missile system using infrared guidance and an impact fuse; fired from the shoulder\nn04320598\ta small bomb designed to give off a foul odor when it explodes\nn04320871\tan implement used for stirring\nn04320973\tsupport consisting of metal loops into which rider's feet go\nn04321121\ta hand-operated reciprocating pump; used in fighting fires\nn04321453\ta short straight stick of wood\nn04322026\tthe handle of a handgun or the butt end of a rifle or shotgun or part of the support of a machine gun or artillery gun\nn04322531\tfortification consisting of a fence made of a line of stout posts set firmly for defense\nn04322692\tboxcar with latticed sides; for transporting livestock\nn04322801\ta racing car with the basic chassis of a commercially available car\nn04323519\tknit used especially for infants' wear and undergarments\nn04323819\tclose-fitting hosiery to cover the foot and leg; come in matched pairs (usually used in the plural)\nn04324120\tany equipment constantly used as part of a profession or occupation\nn04324297\ta pot used for preparing soup stock\nn04324387\tstoreroom for storing goods and supplies used in a business\nn04324515\ta former instrument of punishment consisting of a heavy timber frame with holes in which the feet (and sometimes the hands) of an offender could be locked\nn04325041\tan ornamented saddle used by cowboys; has a high horn to hold the lariat\nn04325208\tenclosed yard where cattle, pigs, horses, or sheep are kept temporarily\nn04325704\ta wide scarf worn about their shoulders by women\nn04325804\tgarment consisting of a V-shaped panel of stiff material worn over the chest and stomach in the 16th century\nn04325968\ta suction pump used to remove the contents of the stomach\nn04326547\ta fence built of rough stones; used to separate fields\nn04326676\tceramic ware that is fired in high heat and vitrified and nonporous\nn04326799\tmasonry done with stone\nn04326896\ta simple seat without a back or arms\nn04327204\tsmall porch or set of steps at the front entrance of a house\nn04327544\tan acid bath used to stop the action of a developer\nn04327682\tfaucet consisting of a rotating device for regulating flow of a liquid\nn04328054\ta knot that prevents a rope from passing through a hole\nn04328186\ta timepiece that can be started or stopped for exact timing (as of a race)\nn04328329\ta voltaic battery that stores electric charge\nn04328580\ta cell that can be recharged\nn04328703\tcontainer consisting of a set of magnets set in a doughnut-shaped ring around which charged particles from an accelerator can be kept circulating until they are used\nn04328946\tthe area in any structure that provides space for storage\nn04329477\ta room in which things are stored\nn04329681\tan underground shelter where you can go until a storm passes\nn04329834\tan extra outer door for protection against severe weather or winter\nn04329958\ta window outside an ordinary window to protect against severe weather or winter\nn04330109\tbasin for holy water\nn04330189\tan archaic drinking vessel\nn04330267\tany heating apparatus\nn04330340\ta kitchen appliance used for cooking food\nn04330669\ta small machine bolt\nn04330746\tchimney consisting of a metal pipe of large diameter that is used to connect a stove to a flue\nn04330896\tplate iron that is thinner than tank iron\nn04330998\ta violin made by Antonio Stradivari or a member of his family\nn04331277\ta straight-backed chair without arms\nn04331443\thand tool consisting of a flat rigid rectangular bar (metal or wood) that can be used to draw straight lines (or test their straightness)\nn04331639\ta device for straightening\nn04331765\ta rock drill with flutes that are straight\nn04331892\tpin consisting of a short straight stiff piece of wire with a pointed end; used to fasten pieces of cloth or paper together\nn04332074\ta razor with a straight cutting edge enclosed in a case that forms a handle when the razor is opened for use\nn04332243\ta filter to retain larger pieces while smaller pieces and liquids pass through\nn04332580\ta garment similar to a jacket that is used to bind the arms tightly against the body as a means of restraining a violent person\nn04332987\twhip consisting of a strip of leather used in flogging\nn04333129\tan elongated leather strip (or a strip of similar material) for binding things together or holding something in position\nn04333869\ta hinge with two long straps; one strap is fastened to the surface of a moving part (e.g., a door or lid) and the other is fastened to the adjacent stationary frame\nn04334105\ta woman's garment that exposes the shoulders and has no shoulder straps\nn04334365\tan artificial fly that has wings extending back beyond the crook of the fishhook\nn04334504\ta streamlined train\nn04334599\ta thoroughfare (usually including sidewalks) that is lined with buildings\nn04335209\tthe part of a thoroughfare between the sidewalks; the part of the thoroughfare on which vehicles travel\nn04335435\ta wheeled vehicle that runs on rails and is propelled by electricity\nn04335693\tordinary clothing suitable for public appearances (as opposed to costumes or sports apparel or work clothes etc.)\nn04335886\ta lamp supported on a lamppost; for illuminating a street\nn04336792\ta litter for transporting people who are ill or wounded or dead; usually consists of a sheet of canvas stretched between two poles\nn04337157\ta wooden framework on which canvas is stretched and fixed for oil painting\nn04337287\ttrousers made of a stretchy fabric\nn04337503\ta tool or rod used to level off grain or other granular material that is heaped in a measure\nn04337650\tan implement for sharpening scythes\nn04338517\ta musical instrument in which taut strings provide the source of sound\nn04338963\ta long horizontal timber to connect uprights\nn04339062\tbrace consisting of a longitudinal member to strengthen a fuselage or hull\nn04339191\ta very narrow necktie usually tied in a bow\nn04339638\tthin piece of wood or metal\nn04339879\tlight consisting of long tubes (instead of bulbs) that provide the illumination\nn04340019\ta mercantile establishment consisting of a row of various stores and business and restaurants along a road or busy street; usually opening on a parking lot\nn04340521\tscientific instrument that provides a flashing light synchronized with the periodic movement of an object; can make moving object appear stationary\nn04340750\ta strongly made box for holding money or valuables; can be locked\nn04340935\ta strongly fortified defensive structure\nn04341133\ta burglarproof and fireproof room in which valuables are kept\nn04341288\ta leather strap used to sharpen razors\nn04341414\tsupport that is a constituent part of any structure or building\nn04341686\ta thing constructed; a complex entity constructed of many parts\nn04343511\ta center for student activities at a college or university\nn04343630\ta reading lamp with a flexible neck; used on a desk\nn04343740\ta building on a college campus dedicated to social and organizational activities of the student body\nn04344003\ta small permanent magnet in a metal container; when the magnet clicks against the container it indicates that the magnet is directly over an iron nail that holds the wallboard to a stud\nn04344734\tan apartment with a living space and a bathroom and a small kitchen\nn04344873\tconvertible consisting of an upholstered couch that can be converted into a double bed\nn04345028\ta room used for reading and writing and studying\nn04345201\ta classroom reserved for study\nn04345787\ta nut used to tighten a stuffing box\nn04346003\t(cricket) any of three upright wooden posts that form the wicket\nn04346157\ta weapon designed to disable a victim temporarily by delivering a nonlethal high-voltage electric shock\nn04346328\ta dome-shaped shrine erected by Buddhists\nn04346428\ta pen for swine\nn04346511\ta pointed tool for writing or drawing or engraving\nn04346679\ta sharp pointed device attached to the cartridge of a record player\nn04346855\ta unit assembled separately but designed to fit with other units in a manufactured product\nn04347119\ta car smaller than a compact car\nn04347519\tmachine gun that is a portable automatic firearm\nn04347754\ta submersible warship usually armed with torpedoes\nn04348070\ta torpedo designed to be launched from a submarine\nn04348184\ta warship designed to operate under water\nn04348359\tan apparatus intended for use under water\nn04348988\ta machine that subtracts numbers\nn04349189\ta token that is used to pay for entry to the subway system\nn04349306\ta train that runs in a subway system\nn04349401\ta loudspeaker that is designed to reproduce very low bass frequencies\nn04349913\ta cup-shaped device (made of rubber, glass, or plastic) that produces a partial vacuum; used to adhere or draw something to a surface\nn04350104\ta pump for raising fluids by suction\nn04350235\ta bathhouse for hot air baths or steam baths\nn04350458\ta fabric made to resemble suede leather\nn04350581\ta dish in which sugar is served\nn04350688\ta refinery for sugar\nn04350769\ta spoon for serving sugar; often made in the shape of a seashell\nn04350905\ta set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color\nn04351550\tapartment consisting of a series of connected rooms used as a living unit (as in a hotel)\nn04351699\ta fabric used for suits\nn04353573\ta light two-wheeled vehicle for one person; drawn by one horse\nn04354026\ta country house (usually located in the country) that provides a cool place to live in the summer\nn04354182\tthe circular ring in which Sumo wrestlers compete\nn04354387\tan oil reservoir in an internal combustion engine\nn04354487\ta suction pump for removing liquid from a sump\nn04354589\ta large bonnet that shades the face; worn by girls and women\nn04355115\tthe best attire you have which is worn to church on Sunday\nn04355267\tan unroofed deck\nn04355338\ttimepiece that indicates the daylight hours by the shadow that the gnomon casts on a calibrated dial\nn04355511\ta light loose sleeveless summer dress with a wide neckline and thin shoulder straps that expose the arms and shoulders\nn04355684\tmiscellaneous objects too numerous or too small to be specified\nn04355821\tthe central gear in an epicyclic train\nn04355933\ta convex lens that focuses the rays of the sun; used to start a fire\nn04356056\tspectacles that are darkened or polarized to protect the eyes from the glare of the sun\nn04356595\ta hat with a broad brim that protects the face from direct exposure to the sun\nn04356772\ta mercury-vapor lamp used in medical or cosmetic treatments\nn04356925\ta room enclosed largely with glass and affording exposure to the sun\nn04357121\tan automobile roof having a sliding or raisable panel\nn04357314\ta cream spread on the skin; contains a chemical (as PABA) to filter out ultraviolet light and so protect from sunburn\nn04357531\ta child's garment consisting of a brief top and shorts\nn04357930\tcompressor that forces increased oxygen into the cylinders of an internal-combustion engine\nn04358117\ta mainframe computer that is one of the most powerful available at a given time\nn04358256\ta collider that operates at very low temperatures\nn04358491\tan extensive electronic network (such as the internet) used for the rapid transfer of sound and video and graphics in digital form\nn04358707\ta large self-service grocery store selling groceries and dairy products and household goods\nn04358874\tstructure consisting of the part of a ship above the main deck\nn04359034\tthe largest class of oil tankers\nn04359124\tusually a small luxurious nightclub\nn04359217\twalking stick made from the wood of an American tropical vine\nn04359335\ta mechanical device for holding something and supplying it as needed\nn04359500\ta closet for storing supplies\nn04359589\tany device that bears the weight of another thing\nn04360501\tsupporting structure that holds up or provides a foundation\nn04360798\ta column that supports a heavy weight\nn04360914\telasticized stocking intended to reduce pressure on the veins of the leg (as in case of varicose veins)\nn04361095\ta structure that serves to support something\nn04361260\ta tower that serves to support something\nn04361937\ta tunic worn over a knight's armor\nn04362624\tgauge consisting of a scriber mounted on an adjustable stand; used to test the accuracy of plane surfaces\nn04362821\ta ski tow that pulls skiers up a slope without lifting them off the ground\nn04362972\ta naval radar to search for surface targets\nn04363082\ta warship that operates on the surface of the water\nn04363210\ta guided missile fired from land or shipboard against an airborne target\nn04363412\tthe shipboard system that fires missiles at aircraft\nn04363671\ta boat that can be launched or landed in heavy surf\nn04363777\ta loose outer coat usually of rich material\nn04363874\tany of several knots used in tying stitches or ligatures\nn04363991\ta room where a doctor or dentist can be consulted\nn04364160\telectrical device inserted in a power line to protect equipment from sudden fluctuations in current\nn04364397\ta loosely woven cotton dressing for incisions made during surgery\nn04364545\ta medical instrument used in surgery\nn04364827\ta very sharp knife used in surgery\nn04364994\ta loose-fitting white ecclesiastical vestment with wide sleeves\nn04365112\ta light four-wheeled horse-drawn carriage; has two or four seats\nn04365229\ta man's overcoat in the style of a frock coat\nn04365328\ta closed-circuit television system used to maintain close observation of a person or group\nn04365484\tan instrument used by surveyors\nn04365751\tsurveying instrument consisting basically of a small telescope with an attached spirit level rotating around a vertical axis; for measuring relative heights of land\nn04366033\ta bar where sushi is served\nn04366116\ta mechanical system of springs or shock absorbers connecting the wheels and axles to the chassis of a wheeled vehicle\nn04366367\ta bridge that has a roadway supported by cables that are anchored at both ends\nn04366832\ta bandage of elastic fabric applied to uplift a dependant part (as the scrotum or a pendulous breast)\nn04367011\ta pedal on a piano that lifts the dampers from the strings and so allows them to continue vibrating\nn04367371\ta seam used in surgery\nn04367480\tcleaning implement consisting of absorbent material fastened to a handle; for cleaning floors\nn04367746\timplement consisting of a small piece of cotton that is used to apply medication or cleanse a wound or obtain a specimen of a secretion\nn04367950\ta garment (a gown or narrow strips of cloth) for an infant\nn04368109\ta bundle containing the personal belongings of a swagman\nn04368235\tan iron block cut with holes and grooves to assist in cold working metal\nn04368365\ta short cane or stick covered with leather and carried by army officers\nn04368496\ta man's full-dress jacket with two long tapering tails at the back\nn04368695\tan amphibious vehicle typically having four-wheel drive and a raised body\nn04368840\tsoft woolen fabric used especially for baby clothes\nn04369025\tan enveloping bandage\nn04369282\tan implement with a flat part (of mesh or plastic) and a long handle; used to kill insects\nn04369485\ta porous bag (usually of canvas) that holds water and cools it by evaporation\nn04369618\ta band of fabric or leather sewn inside the crown of a hat\nn04370048\ta crocheted or knitted garment covering the upper part of the body\nn04370288\tloose-fitting trousers with elastic cuffs; worn by athletes\nn04370456\tcotton knit pullover with long sleeves worn during athletic activity\nn04370600\tfactory where workers do piecework for poor pay and are prevented from forming unions; common in the clothing industry\nn04370774\tgarment consisting of sweat pants and a sweatshirt\nn04370955\ta long oar used in an open boat\nn04371050\ta second hand that is mounted on the same center as the hour and minute hand and is read on the minutes\nn04371430\tswimsuit worn by men while swimming\nn04371563\ttight fitting garment worn for swimming\nn04371774\tmechanical device used as a plaything to support someone swinging back and forth\nn04371979\ta door that swings on a double hinge; opens in either direction\nn04372370\tcontrol consisting of a mechanical or electrical or electronic device for making or breaking or changing the connections in a circuit\nn04373089\ta pocketknife with a blade that springs open at the press of a button\nn04373428\ta locomotive for switching rolling stock in a railroad yard\nn04373563\ta coupling (as in a chain) that has one end that turns on a headed pin\nn04373704\ta chair that swivels on its base\nn04373795\ta small stick used to stir mixed drinks\nn04373894\ta cutting or thrusting weapon that has a long metal blade and a hilt with a hand guard\nn04374315\ta cane concealing a sword or dagger\nn04374521\ta wrench with an S-shaped handle\nn04374735\t(Judaism) the place of worship for a Jewish congregation\nn04374907\tcyclotron that achieves relativistic velocities by modulating the frequency of the accelerating electric field\nn04375080\ta device used in photography to synchronize the peak of a flash with the opening of the camera shutter\nn04375241\tan automotive system for shifting gears in which the gears revolve at the same speed and so shift smoothly\nn04375405\telectrical converter consisting of a synchronous machine that converts alternating to direct current or vice versa\nn04375615\telectric motor in which the speed of rotation is proportional to the frequency of the A.C. power\nn04375775\tcyclotron in which the electric field is maintained at a constant frequency\nn04375926\tan instrument that indicates whether two periodic motions are synchronous (especially an instrument that enables a pilot to synchronize the propellers of a plane that has two or more engines)\nn04376400\t(music) an electronic instrument (usually played with a keyboard) that generates and modifies sounds electronically and can imitate a variety of other musical instruments\nn04376876\ta medical instrument used to inject or withdraw fluids\nn04377057\tinstrumentality that combines interrelated interacting artifacts designed to work as a coherent entity\nn04378489\ta short sleeveless outer tunic emblazoned with a coat of arms; worn by a knight over his armor or by a herald\nn04378651\t(Judaism) a portable sanctuary in which the Jews carried the Ark of the Covenant on their exodus\nn04378956\ta sock with a separation for the big toe; worn with thong sandals by the Japanese\nn04379096\tthe key on a typewriter or a word processor that causes a tabulation\nn04379243\ta piece of furniture having a smooth flat top that is usually supported by one or more vertical legs\nn04379964\ta piece of furniture with tableware for a meal laid out on it\nn04380255\ta fork for eating at a dining table\nn04380346\ta knife used for eating at dining table\nn04380533\ta lamp that sits on a table\nn04380916\ta circular saw mounted under a table or bench so that the blade of the saw projects up through a slot\nn04381073\ta spoon larger than a dessert spoon; used for serving\nn04381450\ta chair with an arm that has been widened for writing\nn04381587\ta table used for playing table tennis\nn04381724\tpaddle used to play table tennis\nn04381860\tthe top horizontal work surface of a table\nn04381994\tarticles for use at the table (dishes and silverware and glassware)\nn04382334\ta small drum with one head of soft calfskin\nn04382438\ta low stool in the shape of a drum\nn04382537\tscientific instrument used by psychologists; presents visual stimuli for brief exposures\nn04382695\ta tachometer that produces a graphical record of its readings; used to record the speed and duration of trips in a motor vehicle\nn04382880\tmeasuring instrument for indicating speed of rotation\nn04383015\ta theodolite designed for rapid measurements\nn04383130\ta short nail with a sharp point and a large head\nn04383301\ta light hammer that is used to drive tacks\nn04383839\ta crisp smooth lustrous fabric\nn04383923\tthe railing around the stern of a ship\nn04384593\ta gate at the rear of a vehicle; can be lowered for loading\nn04384910\tlamp (usually red) mounted at the rear of a motor vehicle\nn04385079\tcustom-made clothing\nn04385157\tchalk used by tailors to make temporary marks on cloth\nn04385536\ta pipe carrying fumes from the muffler to the rear of a car\nn04385799\trotor consisting of a rotating airfoil on the tail of a single-rotor helicopter; keeps the helicopter from spinning in the direction opposite to the rotation of the main rotor\nn04386051\tsupport consisting of the movable part of a lathe that slides along the bed in alignment with the headstock and is locked into position to support the free end of the workpiece\nn04386456\tany of various devices for reducing slack (as in a sewing machine) or taking up motion (as in a loom)\nn04386664\ta winged sandal (as worn by Hermes in Graeco-Roman art)\nn04386792\ta toilet powder made of purified talc and usually scented; absorbs excess moisture\nn04387095\ta woolen cap of Scottish origin\nn04387201\ta drum\nn04387261\ta frame made of two hoops; used for embroidering\nn04387400\ta shallow drum with a single drumhead and with metallic disks in the sides\nn04387531\tplain-woven (often glazed) fabric of wool or wool and cotton used especially formerly for linings and garments and curtains\nn04387706\ta tool for tamping (e.g., for tamping tobacco into a pipe bowl or a charge into a drill hole etc.)\nn04387932\ttampon used to absorb menstrual flow\nn04388040\tplug for the muzzle of a gun to keep out dust and moisture\nn04388162\tplug of cotton or other absorbent material; inserted into wound or body cavity to absorb exuded fluids (especially blood)\nn04388473\ta clay oven used in northern India and Pakistan\nn04388574\ta Chinese puzzle consisting of a square divided into seven pieces that must be arranged to match particular designs\nn04388743\ta large (usually metallic) vessel for holding gases or liquids\nn04389033\tan enclosed armored military vehicle; has a cannon and moves on caterpillar treads\nn04389430\tlarge drinking vessel with one handle\nn04389521\ta freight car that transports liquids or gases in bulk\nn04389718\tan armored vehicle equipped with an antitank gun and capable of high speeds\nn04389854\ta locomotive that carries its own fuel and water; no tender is needed\nn04389999\tan airplane constructed to transport chemicals that can be dropped in order to fight a forest fire\nn04390483\ta shell fired by the cannon on a tank\nn04390577\ta tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening; often worn over a shirt or blouse\nn04390873\ta loudspeaker\nn04390977\ta plug for a bunghole in a cask\nn04391445\ta paperlike cloth made in the South Pacific by pounding tapa bark\nn04391838\ta recording made on magnetic tape\nn04392113\tmeasuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths\nn04392526\telectronic equipment for making or playing magnetic tapes (but without amplifiers or speakers); a component in an audio system\nn04392764\ta mechanism that transports magnetic tape across the read/write heads of a tape playback/recorder\nn04392985\telectronic equipment for playing back magnetic tapes\nn04393095\ta magnetic recorder using magnetic tape\nn04393301\ta file with converging edges\nn04393549\ta heavy textile with a woven design; used for curtains and upholstery\nn04393808\ta lever that is moved in order to tap something else\nn04393913\ta wrench for turning a tap to create an internal screw thread\nn04394031\t(chemical analysis) a counterweight used in chemical analysis; consists of an empty container that counterbalances the weight of the container holding chemicals\nn04394261\tsports equipment consisting of an object set up for a marksman or archer to aim at\nn04394421\ta shipboard system for the detection and identification and location of a target with enough detail to permit effective weapon employment\nn04394630\ta paved surface having compressed layers of broken rocks held together with tar\nn04395024\twaterproofed canvas\nn04395106\ta cloth having a crisscross design\nn04395332\tone of two pieces of armor plate hanging from the fauld to protect the upper thighs\nn04395651\ta design on the skin made by tattooing\nn04395875\ta building with a bar that is licensed to sell alcoholic drinks\nn04396226\ta leather strap for punishing children\nn04396335\ta meter in a taxi that registers the fare (based on the length of the ride)\nn04396650\ta surface lift where riders hold a bar and are pulled up the hill on their skis\nn04396808\tsmall paper bag holding a measure of tea\nn04396902\ta kitchen utensil consisting of a perforated metal ball for making tea\nn04397027\tserving cart for serving tea or light refreshments\nn04397168\tchest for storing or transporting tea\nn04397261\tmaterials and equipment used in teaching\nn04397452\ta cup from which tea is drunk\nn04397645\ta long loose-fitting gown formerly popular for wear at afternoon tea\nn04397768\tkettle for boiling water to make tea\nn04397860\ta covered spoon with perforations\nn04398044\tpot for brewing tea; usually has a spout and handle\nn04398497\ta restaurant where tea and light meals are available\nn04398688\ta small spoon used for stirring tea or coffee; holds about one fluid dram\nn04398834\ta device to keep back tea leaves when pouring a cup of tea\nn04398951\ta small table for serving afternoon tea\nn04399046\ta tray that accommodates a tea service\nn04399158\tan urn in which tea is brewed and from which it is served\nn04399382\tplaything consisting of a child's toy bear (usually plush and stuffed with soft materials)\nn04399537\ta short peg put into the ground to hold a golf ball off the ground\nn04399846\ta hinge that looks like the letter T when it is opened; similar to a strap hinge except that one strap has been replaced by half of a butt hinge that can be mortised flush into the stationary frame\nn04400109\ta building that houses telecommunications equipment\nn04400289\ta communication system for communicating at a distance\nn04400499\tapparatus used to communicate at a distance over a wire (usually in Morse code)\nn04400737\tkey consisting of a lever that sends a telegraph signal when it is depressed and the circuit is closed\nn04400899\tany scientific instrument for observing events at a distance and transmitting the information back to the observer\nn04401088\telectronic equipment that converts sound into electrical signals that can be transmitted over distances and then converts received signals back into sounds\nn04401578\telectric bell that rings to signal a call\nn04401680\tbooth for using a telephone\nn04401828\tthe telephone wire that connects to the handset\nn04401949\ta jack for plugging in a telephone\nn04402057\ta telephone connection\nn04402342\ta plug for connecting a telephone\nn04402449\ttall pole supporting telephone wires\nn04402580\tearphone that converts electrical signals into sounds\nn04402746\ta communication system that transmits sound between distant points\nn04402984\tthe wire that carries telegraph and telephone signals\nn04403413\ta camera lens that magnifies the image\nn04403524\ta prompter for television performers\nn04403638\ta magnifier of images of distant objects\nn04403925\tgunsight consisting of a telescope on a firearm for use as a sight\nn04404072\ta thermometer that registers the temperature at some distant point\nn04404200\ta character printer connected to a telegraph that operates like a typewriter\nn04404412\ta telecommunication system that transmits images of objects (stationary or moving) between distant points\nn04404817\tan omnidirectional antenna tuned to the broadcast frequencies assigned to television\nn04404997\ttelevision equipment consisting of a lens system that focuses an image on a photosensitive mosaic that is scanned by an electron beam\nn04405540\telectronic equipment that broadcasts or receives electromagnetic waves representing images and sound\nn04405762\tmonitor used in a studio for monitoring the program being broadcast\nn04405907\tan electronic device that receives television signals and displays them on a screen\nn04406239\ta room set aside for viewing television\nn04406552\ttransmitter that is part of a television system\nn04406687\tone of the conveyances (or cars) in a telpherage\nn04406817\ta transportation system in which cars (telphers) are suspended from cables and operated on electricity\nn04407257\tpigment mixed with water-soluble glutinous materials such as size and egg yolk\nn04407435\tplace of worship consisting of an edifice for the worship of a deity\nn04407686\tan edifice devoted to special or exalted purposes\nn04408871\ta connection intended to be used for a limited time\nn04409011\tship that usually provides supplies to other ships\nn04409128\ta boat for communication between ship and shore\nn04409279\tcar attached to a locomotive to carry fuel and water\nn04409384\ta run-down apartment house barely meeting minimal standards\nn04409515\tball about the size of a fist used in playing tennis\nn04409625\ta camp where tennis is taught\nn04409806\ta racket used to play tennis\nn04409911\ta projection at the end of a piece of wood that is shaped to fit into a mortise and form a mortise joint\nn04410086\tany of various drums with small heads\nn04410365\ta tenor bassoon; pitched a fifth higher than the ordinary bassoon\nn04410485\ta nail 3 inches long\nn04410565\tone of the bottle-shaped pins used in bowling\nn04410663\ta manometer for measuring vapor pressure\nn04410760\ta measuring instrument for measuring the surface tension of a liquid\nn04410886\ta measuring instrument for measuring the tension in a wire or fiber or beam\nn04411019\ta measuring instrument for measuring the moisture content of soil\nn04411264\ta portable shelter (usually of canvas stretched over supporting poles and fastened to the ground with ropes and pegs)\nn04411835\ta framework with hooks used for stretching and drying cloth\nn04411966\tone of a series of hooks used to hold cloth on a tenter\nn04412097\tflap consisting of a piece of canvas that can be drawn back to provide entrance to a tent\nn04412300\ta peg driven into the ground to hold a rope supporting a tent\nn04412416\ta Native American tent; usually of conical shape\nn04413151\ta contact on an electrical device (such as a battery) at which electric current enters or leaves\nn04413419\telectronic equipment consisting of a device providing access to a computer; has a keyboard and display\nn04413969\ta house that is part of a terrace\nn04414101\ta hard unglazed brownish-red earthenware\nn04414199\ta vivarium in which selected living plants are kept and observed\nn04414319\tearthenware made from the reddish-brown clay found on the Aegean island of Lemnos\nn04414476\ta pile fabric (usually cotton) with uncut loops on both sides; used to make bath towels and bath robes\nn04414675\ta step-up transformer with an air core; used to produce high voltages at high frequencies\nn04414909\ta small square tile of stone or glass used in making mosaics\nn04415257\tequipment required to perform a test\nn04415663\ta rocket fired for test purposes\nn04415815\ta room in which tests are conducted\nn04416005\ta movable protective covering that provided protection from above; used by Roman troops when approaching the walls of a besieged fortification\nn04416901\ta figure consisting of four stylized human arms or legs (or bent lines) radiating from a center\nn04417086\ta thermionic tube having four electrodes\nn04417180\ta machine for making textiles\nn04417361\ta factory for making textiles\nn04417672\ta house roof made with a plant material (as straw)\nn04417809\ta building where theatrical performances or motion-picture shows can be presented\nn04418357\ta hanging cloth that conceals the stage from the view of the audience; rises or parts at the beginning and descends or closes between acts and at the end of a performance\nn04418644\tany of various lights used in a theater\nn04419073\ta surveying instrument for measuring horizontal and vertical angles, consisting of a small telescope mounted on a tripod\nn04419642\tan electronic musical instrument; melodies can be played by moving the right hand between two rods that serve as antennas to control pitch; the left hand controls phrasing\nn04419868\ta printer that produces characters by applying heat to special paper that is sensitive to heat\nn04420024\ta nuclear reactor in which nuclear fissions are caused by neutrons that are slowed down by a moderator\nn04420720\ta kind of thermometer consisting of two wires of different metals that are joined at both ends; one junction is at the temperature to be measured and the other is held at a fixed lower temperature; the current generated in the circuit is proportional to the temperature difference\nn04421083\ta thermometer that uses thermoelectric current to measure temperature\nn04421258\ta thermometer that records temperature variations on a graph as a function of time\nn04421417\tmedical instrument that uses an infrared camera to reveal temperature variations on the surface of the body\nn04421582\ta hydrometer that includes a thermometer\nn04421740\ta junction between two dissimilar metals across which a voltage appears\nn04421872\tmeasuring instrument for measuring temperature\nn04422409\ta nuclear reactor that uses controlled nuclear fusion to generate energy\nn04422566\ta kind of thermometer for measuring heat radiation; consists of several thermocouple junctions in series\nn04422727\tvacuum flask that preserves temperature of hot or cold drinks\nn04422875\ta regulator for automatically regulating temperature by starting or stopping the supply of heat\nn04423552\tprotective garment consisting of a pad worn over the thighs by football players\nn04423687\tone of two shafts extending from the body of a cart or carriage on either side of the animal that pulls it\nn04423845\ta small metal cap to protect the finger while sewing; can be used as a small container\nn04424692\tshears with one serrate blade; used for thinning hair\nn04425804\tthe base that must be touched third by a base runner in baseball\nn04425977\tthe third from the lowest forward ratio gear in the gear box of a motor vehicle\nn04426184\ta rail through which electric current is supplied to an electric locomotive\nn04426316\ta thin strip of leather; often used to lash things together\nn04426427\tunderpants resembling a G-string; worn by women especially under very tight pants\nn04427216\ta round arch whose inner curve is drawn with circles having three centers\nn04427473\tany ship having three decks\nn04427559\tradar that will report altitude as well as azimuth and distance of a target\nn04427715\ta business suit consisting of a jacket and vest and trousers\nn04427857\tthe spine and much of the sides are a different material from the rest of the cover\nn04428008\tan electric switch that has three terminals; used to control a circuit from two different locations\nn04428191\ta farm machine for separating seeds or grain from the husks and straw\nn04428382\ta floor or ground area for threshing or treading out grain\nn04428634\ta shop that sells secondhand goods at reduced prices\nn04429038\tprotective garment worn by hockey goalkeeper and catcher in baseball\nn04429376\tthe chair of state for a monarch, bishop, etc.\nn04430475\ta bearing designed to take thrusts parallel to the axis of revolution\nn04430605\ta small rocket engine that provides the thrust needed to maneuver a spacecraft\nn04430896\tthe part of a glove that provides a covering for the thumb\nn04431025\ta finger hole made to fit the thumb (as in a bowling ball)\nn04431436\tscrew designed to be turned with the thumb and fingers\nn04431648\tprotective covering for an injured thumb\nn04431745\ta tack for attaching papers to a bulletin board or drawing board\nn04431925\ta noisemaker that makes a sound like thunder\nn04432043\ta crosspiece spreading the gunnels of a boat; used as a seat in a rowboat\nn04432203\ta jeweled headdress worn by women on formal occasions\nn04432662\ta strong fabric used for mattress and pillow covers\nn04432785\ta small coil in series with the anode of a vacuum tube and coupled to the grid to provide feedback\nn04433377\ta horizontal beam used to prevent two other structural members from spreading apart or separating\nn04433585\tone of the cross braces that support the rails on a railway track\nn04434207\ta rack for storing ties\nn04434531\teither of two rods that link the steering gear to the front wheels\nn04434932\tskintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls\nn04435180\ta flat thin rectangular slab (as of fired clay or rubber or linoleum) used to cover surfaces\nn04435552\ta cutter (tool for cutting) for floor tiles\nn04435653\ta roof made of fired clay tiles\nn04435759\tlever used to turn the rudder on a boat\nn04435870\ta device for emptying a cask by tilting it without disturbing the dregs\nn04436012\ta pedestal table whose top is hinged so that it can be tilted to a vertical position\nn04436185\ta beam made of wood\nn04436329\ta post made of wood\nn04436401\ta hitch used to secure a rope to a log or spar; often supplemented by a half hitch\nn04436542\tsmall hand drum similar to a tambourine; formerly carried by itinerant jugglers\nn04436832\ta bomb that has a detonating mechanism that can be set to go off at a particular time\nn04436992\tcontainer for preserving historical records to be discovered at some future time\nn04437276\tclock used to record the hours that people work\nn04437380\tchronoscope for measuring the time difference between two events\nn04437670\ta fuse made to burn for a given time (especially to explode a bomb)\nn04437953\ta measuring instrument or device for keeping time\nn04438304\ta timepiece that measures a time interval and signals its end\nn04438507\ta regulator that activates or deactivates a mechanism at set times\nn04438643\ta switch set to operate at a desired time\nn04438897\ta vessel (box, can, pan, etc.) made of tinplate and used mainly in baking\nn04439505\ta box for holding tinder\nn04439585\tprong on a fork or pitchfork or antler\nn04439712\tfoil made of tin or an alloy of tin and lead\nn04440597\ta woman's fur shoulder cape with hanging ends; often consisting of the whole fur of a fox or marten\nn04440963\tchain attached to wheels to increase traction on ice or snow\nn04441093\thand tool consisting of a lever that is used to force the casing of a pneumatic tire onto a steel wheel\nn04441528\ta hat (Cockney rhyming slang: `tit for tat' rhymes with `hat')\nn04441662\tbarn originally built to hold tithes paid in kind and common in England\nn04441790\tan apparatus for performing a titration\nn04442312\ta kitchen appliance (usually electric) for toasting bread\nn04442441\tkitchen appliance consisting of a small electric oven for toasting or warming food\nn04442582\tlong-handled fork for cooking or toasting frankfurters or bread etc. (especially over an open fire)\nn04442741\ta rack for holding slices of toast\nn04443164\ta pouch for carrying pipe tobacco\nn04443257\ta shop that sells pipes and pipe tobacco and cigars and cigarettes\nn04443433\ta long narrow sled without runners; boards curve upward in front\nn04443766\ta drinking mug in the shape of a stout man wearing a three-cornered hat\nn04444121\ta bell used to sound an alarm\nn04444218\tthe part of footwear that provides a covering for the toes\nn04444749\ta protective leather or steel cover for the toe of a boot or shoe, reinforcing or decorating it\nn04444953\ta small foothold used in climbing\nn04445040\ta one-piece cloak worn by men in ancient Rome\nn04445154\t(ancient Rome) a toga worn by a youth as a symbol of manhood and citizenship\nn04445327\ta fastener consisting of a peg or pin or crosspiece that is inserted into an eye at the end of a rope or a chain or a cable in order to fasten it to something (as another rope or chain or cable)\nn04445610\ta fastener consisting of a threaded bolt and a hinged spring-loaded toggle; used to fasten objects to hollow walls\nn04445782\ta joint made by two arms attached by a pivot; used to apply pressure at the two ends by straightening the joint\nn04445952\ta hinged switch that can assume either of two positions\nn04446162\tinformal terms for clothing\nn04446276\ta room or building equipped with one or more toilets\nn04446844\ta waterproof bag for holding bathrooms items (soap and toothpaste etc.) when you are travelling\nn04447028\tthe bowl of a toilet that can be flushed with water\nn04447156\ta kit for carrying toilet articles while traveling\nn04447276\ta fine powder for spreading on the body (as after bathing)\nn04447443\tartifacts used in making your toilet (washing and taking care of your body)\nn04447861\tthe hinged seat on a toilet\nn04448070\ta perfumed liquid lighter than cologne\nn04448185\ta doughnut-shaped chamber used in fusion research; a plasma is heated and confined in a magnetic bottle\nn04448361\ta metal or plastic disk that can be redeemed or used in designated slot machines\nn04449290\ta booth at a tollgate where the toll collector collects tolls\nn04449449\ta bridge where toll is charged for crossing\nn04449550\ta gate or bar across a toll bridge or toll road which is lifted when the toll is paid\nn04449700\ta telephone line for long-distance calls\nn04449966\tweapon consisting of a fighting ax; used by North American Indians\nn04450133\ta .45-caliber submachine gun\nn04450243\tX-ray machine in which a computer builds a detailed image of a particular plane through an object from multiple X-ray measurements\nn04450465\tmechanical device consisting of a light balanced arm that carries the cartridge\nn04450640\ta lotion for cleansing the skin and contracting the pores\nn04450749\tany of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below\nn04450994\tthe flap of material under the laces of a shoe or boot\nn04451139\ta mortise joint made by fitting a projection on the edge of one board into a matching groove on another board\nn04451318\ta thin depressor used to press the tongue down during an examination of the mouth and throat\nn04451636\tmeasuring instrument for measuring tension or pressure (especially for measuring intraocular pressure in testing for glaucoma)\nn04451818\tan implement used in the practice of a vocation\nn04452528\ta bag in which tools are carried\nn04452615\ta box or chest or cabinet for holding hand tools\nn04452757\ta shed for storing tools\nn04452848\tsomething resembling the tooth of an animal\nn04453037\tone of a number of uniform projections on a gear\nn04453156\tsmall brush; has long handle; used to clean teeth\nn04453390\tpick consisting of a small strip of wood or plastic; used to pick food from between the teeth\nn04453666\ta garment (especially for women) that extends from the shoulders to the waist or hips\nn04453910\tcovering for a hole (especially a hole in the top of a container)\nn04454654\ta mast fixed to the head of a topmast on a square-rigged vessel\nn04454792\ta sail set on a yard of a topgallant mast\nn04454908\ta garden having shrubs clipped or trimmed into decorative shapes especially of animals\nn04455048\theaddress consisting of a decorative ribbon or bow worn in the hair\nn04455250\tthe mast next above a lower mast and topmost in a fore-and-aft rig\nn04455579\ta woman's short coat\nn04455652\ta sail (or either of a pair of sails) immediately above the lowermost sail of a mast and supported by a topmast\nn04456011\ta tall white hat with a pouched crown; worn by chefs\nn04456115\ta light usually carried in the hand; consists of some flammable substance\nn04456472\tarmament consisting of a long cylindrical self-propelled underwater projectile that detonates on contact with a target\nn04456734\ta small explosive device that is placed on a railroad track and fires when a train runs over it; the sound of the explosion warns the engineer of danger ahead\nn04457157\tan explosive device that is set off in an oil well (or a gas well) to start or to increase the flow of oil (or gas)\nn04457326\tsmall high-speed warship designed for torpedo attacks in coastal waters\nn04457474\tsmall destroyer that was the forerunner of modern destroyers; designed to destroy torpedo boats\nn04457638\ta tube near the waterline of a vessel through which a torpedo is fired\nn04457767\tconverter for transmitting and amplifying torque (especially by hydraulic means)\nn04457910\ta wrench that has a gauge that indicates the amount of torque being applied\nn04458201\ta room in which torture is inflicted\nn04458633\ta tribal emblem consisting of a pillar carved and painted with totemic figures; erected by Indian tribes of the northwest Pacific coast\nn04458843\ta computer display that enables the user to interact with the computer by touching areas on the screen\nn04459018\ta small hairpiece to cover partial baldness\nn04459122\tlarge open car seating four with folding top\nn04459243\tinexpensive accommodations on a ship or train\nn04459362\ta rectangular piece of absorbent cloth (or paper) for drying or wiping\nn04459610\tany of various fabrics (linen or cotton) used to make towels\nn04459773\ta rack consisting of one or more bars on which towels can be hung\nn04459909\ta horizontal bar a few inches from a wall for holding towels\nn04460130\ta structure taller than its diameter; can stand alone or be attached to a larger building\nn04461437\ta government building that houses administrative offices of a town government\nn04461570\ta path along a canal or river used by animals towing boats\nn04461696\ta truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)\nn04461879\ta device regarded as providing amusement\nn04462011\tchest for storage of toys\nn04462240\tshop where toys are sold\nn04462576\ta screening device for traces of explosives; used at airline terminals\nn04463679\ta bar or pair of parallel bars of rolled steel making the railway along which railroad cars or other vehicles can roll\nn04464125\ta groove on a phonograph recording\nn04464615\tan electronic device consisting of a rotatable ball in a housing; used to position the cursor and move images on a computer screen\nn04464852\ta self-propelled vehicle that moves on tracks\nn04465050\tone of many houses of similar design constructed together on a tract of land\nn04465203\thousing consisting of similar houses constructed together on a tract of land\nn04465358\tsteam-powered locomotive for drawing heavy loads along surfaces other than tracks\nn04465501\ta wheeled vehicle with large wheels; used in farming and other applications\nn04465666\ta truck that has a cab but no body; used for pulling large trailers or vans\nn04466871\ta lightweight motorcycle equipped with rugged tires and suspension; an off-road motorcycle designed for riding cross country or over unpaved ground\nn04467099\ta wheeled vehicle that can be pulled by a car or truck and is equipped for occupancy\nn04467307\ta large transport conveyance designed to be pulled by a truck or tractor\nn04467506\ta camp where space for house trailers can be rented; utilities are generally provided\nn04467665\ta truck consisting of a tractor and trailer together\nn04467899\tthe rear edge of an airfoil\nn04468005\tpublic transport provided by a line of railway cars coupled together and drawn by a locomotive\nn04469003\tthe track on which trams or streetcars run\nn04469251\tan adjustable pothook set in a fireplace\nn04469514\tgymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame; used for tumbling\nn04469684\ta commercial steamer for hire; one having no regular schedule\nn04469813\ta conveyance that transports passengers or freight in carriers suspended from cables and supported by a series of towers\nn04470741\ta medicated adhesive pad placed on the skin for absorption of a time released dose of medication into the bloodstream\nn04471148\tstructure forming the transverse part of a cruciform church; crosses the nave at right angles\nn04471315\tan electrical device by which alternating current of one voltage is changed to another voltage\nn04471632\ta semiconductor device capable of amplification\nn04471912\ta telescope mounted on an axis running east and west and used to time the transit of a celestial body across the meridian\nn04472243\tthe gears that transmit power from an automobile engine via the driveshaft to the live axle\nn04472563\trotating shaft that transmits rotary motion from the engine to the differential\nn04472726\tset used to broadcast radio or tv signals\nn04472961\ta horizontal crosspiece across a window or separating a door from a window over it\nn04473108\ta window above a door that is usually hinged to a horizontal crosspiece over the door\nn04473275\telectrical device designed to receive a specific signal and automatically transmit a specific reply\nn04473884\ta crane for moving material with dispatch as in loading and unloading ships\nn04474035\ta long truck for carrying motor vehicles\nn04474187\ta ship for carrying soldiers or military equipment\nn04474466\ta device in which something (usually an animal) can be caught and penned\nn04475309\ta hinged or sliding door in a floor or ceiling\nn04475411\ta swing used by circus acrobats\nn04475496\ta horizontal beam that extends across something\nn04475631\ta small lightweight iron that can be carried while traveling\nn04475749\ta conical fishnet dragged through the water at great depths\nn04475900\ta long fishing line with many shorter lines and hooks attached to it (usually suspended between buoys)\nn04476116\ta fishing boat that uses a trawl net or dragnet to catch fish\nn04476259\tan open receptacle for holding or displaying or serving articles or food\nn04476526\ttable linen consisting of a small cloth for a tray\nn04476831\tstructural member consisting of the horizontal part of a stair or step\nn04476972\tthe part (as of a wheel or shoe) that makes contact with the ground\nn04477219\ta mill that is powered by men or animals walking on a circular belt or climbing steps\nn04477387\tan exercise device consisting of an endless belt on which a person can walk or jog without changing place\nn04477548\ta chest filled with valuables\nn04477725\ta 16th-century ship loaded with treasure\nn04478066\ta wooden peg that is used to fasten timbers in shipbuilding; water causes the peg to swell and hold the timbers fast\nn04478383\ta pointed arch having cusps in the intrados on either side of the apex\nn04478512\tlatticework used to support climbing plants\nn04478657\ta ditch dug as a fortification having a parapet of the excavated earth\nn04479046\ta military style raincoat; belted with deep pockets\nn04479287\ta knife with a double-edged blade for hand-to-hand fighting\nn04479405\ta drill for cutting circular holes around a center\nn04479526\ta surgical instrument used to remove sections of bone from the skull\nn04479694\tsawhorses used in pairs to support a horizontal tabletop\nn04479823\ta supporting tower used to support a bridge\nn04479939\ta bridge supported by trestlework\nn04480033\ta table supported on trestles\nn04480141\ta supporting structure composed of a system of connected trestles; for a bridge or pier or scaffold e.g.\nn04480303\ttight-fitting trousers; usually of tartan\nn04480527\ta balloon sent up to test air currents\nn04480853\ta percussion instrument consisting of a metal bar bent in the shape of an open triangle\nn04480995\tany of various triangular drafting instruments used to draw straight lines at specified angles\nn04481524\ta dining table with couches along three sides in ancient Rome\nn04481642\ta dining room (especially a dining room containing a dining table with couches along three sides)\nn04482177\tcocked hat with the brim turned up to form three points\nn04482297\ta knitted fabric or one resembling knitting\nn04482393\ta vehicle with three wheels that is moved by foot pedals\nn04482975\ta spear with three prongs\nn04483073\ta device that activates or releases or causes something to happen\nn04483307\ta fast sailboat with 3 parallel hulls\nn04483925\ta machine that trims timber\nn04484024\tan arch built between trimmers in a floor (to support the weight of a hearth)\nn04484432\ta thermionic vacuum tube having three electrodes; fluctuations of the charge on the grid control the flow from cathode to anode which makes amplification possible\nn04485082\ta three-legged rack used for support\nn04485423\tart consisting of a painting or carving (especially an altarpiece) on three panels (usually hinged together)\nn04485586\ta wire stretched close to the ground that activates something (a trap or camera or weapon) when tripped over\nn04485750\tancient Greek or Roman galley or warship having three tiers of oars on each side\nn04485884\ta figure consisting of three stylized human arms or legs (or three bent lines) radiating from a center\nn04486054\ta monumental archway; usually they are built to commemorate some notable victory\nn04486213\ta stand with short feet used under a hot dish on a table\nn04486322\ta three-legged metal stand for supporting a cooking vessel in a hearth\nn04486616\ta Russian carriage pulled by three horses abreast\nn04486934\ta fisherman's lure that is used in trolling\nn04487081\ta passenger bus with an electric motor that draws power from overhead wires\nn04487394\ta brass instrument consisting of a long tube whose length can be varied by a U-shaped slide\nn04487724\tany land or sea or air vehicle designed to carry troops\nn04487894\tship for transporting troops\nn04488202\ta case in which to display trophies\nn04488427\ta long narrow shallow receptacle\nn04488530\ta garment (or part of a garment) designed for or relating to trousers\nn04488742\ta cuff on the bottoms of trouser legs\nn04488857\ta home appliance in which trousers can be hung and the wrinkles pressed out\nn04489008\t(usually in the plural) a garment extending from the waist to the knee or ankle, covering each leg separately\nn04489695\tthe personal outfit of a bride; clothes and accessories and linens\nn04489817\ta small hand tool with a handle and flat metal blade; used for scooping or spreading plaster or similar materials\nn04490091\tan automotive vehicle suitable for hauling\nn04491312\ta conical squinch\nn04491388\ta short stout club used primarily by policemen\nn04491638\ta low bed to be slid under a higher bed\nn04491769\tluggage consisting of a large strong case used when traveling or for storage\nn04491934\tpuffed breeches of the 16th and 17th centuries usually worn over hose\nn04492060\thinged lid for a trunk\nn04492157\ta telephone line connecting two exchanges directly\nn04492375\ta framework of beams (rafters, posts, struts) forming a rigid structure that supports a roof or bridge or other structure\nn04492749\ta bridge supported by trusses\nn04493109\ta square having a metal ruler set at right angles to another straight piece\nn04493259\ta square used by draftsmen to draw parallel lines\nn04493381\ta large open vessel for holding or storing liquids\nn04494204\telectronic device consisting of a system of electrodes arranged in an evacuated glass or metal envelope\nn04495051\ta box for storing eatables (especially at boarding school)\nn04495183\ta detachable yoke of linen or lace worn over the breast of a low-cut dress\nn04495310\ta bag used for carrying food\nn04495450\ta candy store in Great Britain\nn04495555\ta low elliptical or pointed arch; usually drawn from four centers\nn04495698\ta scarf worn around the head by Muslim women in Malaysia; conceals the hair but not the face\nn04495843\ta powerful small boat designed to pull or push larger ships\nn04496614\ta fine (often starched) net used for veils or tutus or gowns\nn04496726\ta clothes dryer that spins wet clothes inside a cylinder with heated air\nn04496872\ta glass with a flat bottom but no handle or stem; originally had a round bottom\nn04497249\ta farm dumpcart for carrying dung; carts of this type were used to carry prisoners to the guillotine during the French Revolution\nn04497442\ta large cask especially one holding a volume equivalent to 2 butts or 252 gals\nn04497570\tany of a variety of loose fitting cloaks extending to the hips or knees\nn04497801\ta metal implement with two prongs that gives a fixed tone when struck; used to tune musical instruments\nn04498275\ttent that is an Eskimo summer dwelling\nn04498389\ta traditional Muslim headdress consisting of a long scarf wrapped around the head\nn04498523\trotary engine in which the kinetic energy of a moving fluid is converted into mechanical energy by causing a bladed rotor to rotate\nn04498873\tgenerator consisting of a steam turbine coupled to an electric generator for the production of electric power\nn04499062\tlarge deep serving dish with a cover; for serving soups and stews\nn04499300\ta steam room where facilities are available for a bath followed by a shower and massage\nn04499446\ta bath towel with rough loose pile\nn04499554\tan ornamental knot that resembles a small turban\nn04499810\tan oblong metal coupling with a swivel at one end and an internal thread at the other into which a threaded rod can be screwed in order to form a unit that can be adjusted for length or tension\nn04500060\tcooking utensil having a flat flexible part and a long handle; used for turning or serving food\nn04500390\tworkshop where objects are made on a lathe\nn04501127\t(from 16th to 19th centuries) gates set across a road to prevent passage until a toll had been paid\nn04501281\ta roasting spit that can be turned\nn04501370\ta gate consisting of a post that acts as a pivot for rotating arms; set in a passageway for controlling the persons entering\nn04501550\ta circular horizontal platform that rotates a phonograph record while it is being played\nn04501837\ta revolving tray placed on a dining table\nn04501947\ta small tower extending above a building\nn04502059\ta clock with more than one dial to show the time in all directions from a tower\nn04502197\ta sweater or jersey with a high close-fitting collar\nn04502502\tthick woolen fabric used for clothing; originated in Scotland\nn04502670\ta loudspeaker that reproduces higher audio frequency sounds\nn04502851\ta .22 caliber firearm (pistol or rifle)\nn04502989\ta .22-caliber pistol\nn04503073\ta .22-caliber rifle\nn04503155\ta cloth with parallel diagonal lines or ribs\nn04503269\ta weave used to produce the effect of parallel diagonal ribs\nn04503413\tone of a pair of identical beds\nn04503499\ta jet plane propelled by two jet engines\nn04503593\ta bit or drill having deep helical grooves\nn04503705\ta timber measuring (slightly under) 2 inches by 4 inches in cross section\nn04504038\ta tent designed for occupancy by two persons\nn04504141\ta business suit consisting of a matching jacket and skirt or trousers\nn04504770\ta printer that sets textual material in type\nn04505036\thand-operated character printer for printing written messages one character at a time\nn04505345\ta carriage for carrying a sheet of paper\nn04505470\ta keyboard for manually entering characters to be printed\nn04505888\tsoft green felt hat with a feather or brush cockade\nn04506289\ta small guitar having four strings\nn04506402\tloose long overcoat of heavy fabric; usually belted\nn04506506\ta high speed centrifuge used to determine the relative molecular masses of large molecules in high polymers and proteins\nn04506688\tlight microscope that uses scattered light to show particles too small to see with ordinary microscopes\nn04506895\ta synthetic suede cloth\nn04506994\tany source of illumination that emits ultraviolet radiation\nn04507155\ta lightweight handheld collapsible canopy\nn04507326\ta small tent with a single supporting pole and radiating metal ribs\nn04507453\tframework that serves as a support for the body of a vehicle\nn04507689\tseal consisting of a coating of a tar or rubberlike material on the underside of a motor vehicle to retard corrosion\nn04508163\ta garment worn under other garments\nn04508489\tan undergarment that covers the body from the waist no further than to the thighs; usually worn next to the skin\nn04508949\tundergarment worn next to the skin and under the outer garments\nn04509171\twomen's underwear\nn04509260\ta pair of parallel bars set at different heights; used in women's gymnastics\nn04509417\ta vehicle with a single wheel that is driven by pedals\nn04509592\tclothing of distinctive design worn by members of a particular group as a means of identification\nn04510706\tcoupling that connects two rotating shafts allowing freedom of movement in all directions\nn04511002\testablishment where a seat of higher learning is housed, including administrative and living quarters as well as facilities for research and teaching\nn04513827\tcovering (padding and springs and webbing and fabric) on a piece of furniture\nn04513998\tthe fabric used in upholstering\nn04514095\tany of several very heavy and sometimes curved sewing needles used by upholsterers\nn04514241\ta brassiere that lifts and supports the breasts\nn04514648\tthe higher of two berths\nn04515003\ta piano with a vertical sounding board\nn04515444\ta tool used to thicken or spread metal (the end of a bar or a rivet etc.) by forging or hammering or swaging\nn04515729\tthe part of a building above the ground floor\nn04515890\ta vessel that holds water for washing the hands\nn04516116\ta large vase that usually has a pedestal or feet\nn04516214\ta large pot for making coffee or tea\nn04516354\ta car that has been previously owned; not a new car\nn04516672\tan implement for practical use (especially in a household)\nn04517211\ta type of submachine gun that is designed and manufactured in Israel\nn04517408\ta dwelling (a second home) where you live while you are on vacation\nn04517823\tan electrical home appliance that cleans by suction\nn04517999\ta chamber from which nearly all matter (especially air) has been removed\nn04518132\tflask with double walls separated by vacuum; used to maintain substances at high or low temperatures\nn04518343\ta gauge for indicating negative atmospheric pressure\nn04518643\ta type of bobbin lace with floral patterns\nn04518764\ta small overnight bag for short trips\nn04519153\tcontrol consisting of a mechanical device for controlling the flow of a fluid\nn04519536\tdevice in a brass wind instrument for varying the length of the air column to alter the pitch of a tone\nn04519728\tinternal-combustion engine having both inlet and exhaust valves located in the cylinder head\nn04519887\tcannon of plate armor protecting the forearm\nn04520170\ta truck with an enclosed cargo space\nn04520382\ta camper equipped with living quarters\nn04520784\ta fin attached to the tail of an arrow, bomb or missile in order to stabilize or guide it\nn04520962\ta device that puts out a substance in the form of a vapor (especially for medicinal inhalation)\nn04521571\tpropeller for which the angle of the blades is adjustable\nn04521863\ta measuring instrument for measuring variations in a magnetic field\nn04521987\ta coating that provides a hard, lustrous, transparent finish to a surface\nn04522168\tan open jar of glass or porcelain used as an ornament or to hold flowers\nn04523525\tan arched brick or stone ceiling or roof\nn04523831\ta strongroom or compartment (often made of steel) for safekeeping of valuables\nn04524142\ta gymnastic horse without pommels and with one end elongated; used lengthwise for vaulting\nn04524313\ta conveyance that transports people or objects\nn04524594\tnylon fabric used as a fastening\nn04524716\tany of several early bicycles with pedals on the front wheel\nn04524941\theavy fabric that resembles velvet\nn04525038\ta silky densely piled fabric with a plain back\nn04525191\ta usually cotton fabric with a short pile imitating velvet\nn04525305\ta slot machine for selling goods\nn04525417\tcoating consisting of a thin layer of superior wood glued to a base of inferior wood\nn04525584\ta window blind made of horizontal strips that overlap when closed\nn04525821\ta diagram that uses circles to represent mathematical or logical sets pictorially inside a rectangle (the universal set); elements that are common to more than one set are represented by intersections of the circles\nn04526520\ta mechanical system in a building that provides fresh air\nn04526800\ta shaft in a building; serves as an air passage for ventilation\nn04526964\ta device (such as a fan) that introduces fresh air or expels foul air\nn04527648\ta porch along the outside of a building (sometimes partly enclosed)\nn04528079\ta green patina that forms on copper or brass or bronze that has been exposed to the air or water for long periods of time\nn04528968\ta caliper with a vernier scale for very fine measurements\nn04529108\ta small movable scale that slides along a main scale; the small scale is calibrated to indicate fractional divisions of the main scale\nn04529681\ta file in which records are stored upright on one edge\nn04529962\ta stabilizer that is part of the vertical tail structure of an airplane\nn04530283\tthe vertical airfoil in the tail assembly of an aircraft\nn04530456\ta pistol for firing Very-light flares\nn04530566\ta craft designed for water transportation\nn04531098\tan object used as a container (especially for liquids)\nn04531873\ta man's sleeveless garment worn underneath a coat\nn04532022\tan archaic term for clothing\nn04532106\tgown (especially ceremonial garments) worn by the clergy\nn04532398\ta small pocket in a man's vest\nn04532504\ta room in a church where sacred vessels and vestments are kept or meetings are held\nn04532670\tbridge consisting of a series of arches supported by piers used to carry a road (or railroad) over a valley\nn04532831\ta percussion instrument similar to a xylophone but having metal bars and rotating disks in the resonators that produce a vibrato sound\nn04533042\tmechanical device that produces vibratory motion; used for massage\nn04533199\ta mechanical device that vibrates\nn04533499\ta brand of gramophone\nn04533594\ta soft wool fabric made from the fleece of the vicuna\nn04533700\ta cassette for videotape\nn04533802\ta magnetic tape recorder for recording (and playing back) TV programs\nn04533946\ta digital recording (as of a movie) on an optical disk that can be played on a computer or a television set\nn04534127\ta recording of both the visual and audible components (especially one containing a recording of a movie or television program)\nn04534359\ta relatively wide magnetic tape for use in recording visual images and associated sound\nn04534520\ta video recording made on magnetic tape\nn04534895\ta candle lighted by a worshiper in a church\nn04535252\tpretentious and luxurious country residence with extensive grounds\nn04535370\tcountry house in ancient Rome consisting of residential quarters and farm buildings around a courtyard\nn04535524\tdetached or semidetached suburban house\nn04536153\tany of a family of bowed stringed instruments that preceded the violin family\nn04536335\ta bowed stringed instrument slightly larger than a violin, tuned a fifth lower\nn04536465\ta member of the viol family with approximately the range of a viola\nn04536595\tviol that is the bass member of the viol family with approximately the range of the cello\nn04536765\tviol that is the tenor of the viol family\nn04536866\tbowed stringed instrument that is the highest member of the violin family; this instrument has four strings and a hollow body and an unfretted fingerboard and is played with a bow\nn04537436\ta legless rectangular harpsichord; played (usually by women) in the 16th and 17th centuries\nn04538249\ta measuring instrument for measuring viscosity\nn04538403\ta rayon fabric made from viscose (cellulose xanthate) fibers\nn04538552\ta holding device attached to a workbench; has two jaws to hold workpiece firmly in place\nn04538878\ta piece of armor plate (with eye slits) fixed or hinged to a medieval helmet to protect the face\nn04539053\t(British) British term for video display\nn04539203\tan indoor enclosure for keeping and raising living animals and plants and observing them under natural conditions\nn04539407\ta fabric made from a twilled mixture of cotton and wool\nn04539794\ta light semitransparent fabric\nn04540053\tan inflated ball used in playing volleyball\nn04540255\tthe high net that separates the two teams and over which the volleyball must pass\nn04540397\ta transformer whose voltage ratio of transformation can be adjusted\nn04540761\tan electric cell that generates an electromotive force by an irreversible conversion of chemical to electrical energy; cannot be recharged\nn04541136\tbattery consisting of voltaic cells arranged in series; the earliest electric battery devised by Volta\nn04541320\tmeter that measures the potential difference between two points\nn04541662\tan entrance to an amphitheater or stadium\nn04541777\tany digital computer incorporating the ideas of stored programs and serial counters that were proposed in 1946 by von Neumann and his colleagues\nn04541987\ta booth in which a person can cast a private vote\nn04542095\ta mechanical device for recording and counting votes mechanically\nn04542329\twedge-shaped stone building block used in constructing an arch or vault\nn04542474\tan organ stop producing a gentle tremolo effect\nn04542595\tan organ reed stop producing tones imitative of the human voice\nn04542715\twaterproof hip boots (sometimes extending to the chest) worn by anglers\nn04542858\ta shallow pool for children\nn04542943\ta kitchen appliance for baking waffles; the appliance usually consists of two indented metal pans hinged together so that they create a pattern on the waffle\nn04543158\tany of various kinds of wheeled vehicles drawn by an animal or a tractor\nn04543509\ta child's four-wheeled toy cart sometimes used for coasting\nn04543636\ta metal hoop forming the tread of a wheel\nn04543772\ta wheel of a wagon\nn04543924\tlarge open farm wagon\nn04543996\twooden panels that can be used to line the walls of a room\nn04544325\ta wainscoted wall (or wainscoted walls collectively)\nn04544450\ta small pouch (usually with a zipper) that attaches to a belt and is worn around the waist\nn04545305\tan enclosing framework on casters or wheels; helps babies learn to walk\nn04545471\ta light enclosing framework (trade name Zimmer) with rubber castors or wheels and handles; helps invalids or the handicapped or the aged to walk\nn04545748\ta shoe designed for comfortable walking\nn04545858\tsmall portable radio link (receiver and transmitter)\nn04545984\ta small room large enough to admit entrance\nn04546081\ta light comfortable shoe designed for vigorous walking\nn04546194\ta stick carried in the hand for support in walking\nn04546340\t(trademark) a pocket-sized stereo system with light weight earphones\nn04546595\tan apartment in a building without an elevator\nn04546855\tan architectural partition with a height and length greater than its thickness; used to divide or enclose an area or to support another structure\nn04547592\ta masonry fence (as around an estate or garden)\nn04548280\ta clock mounted on a wall\nn04548362\ta pocket-size case for holding papers and paper money\nn04549028\ta canvas tent with four vertical walls\nn04549122\ta piece of furniture having several units that stands against one wall of a room\nn04549629\ta rod used by a magician or water diviner\nn04549721\ta rotary engine that is a four-stroke internal-combustion engine without reciprocating parts\nn04549919\tblock forming a division of a hospital (or a suite of rooms) shared by patients who need a similar kind of care\nn04550184\ta tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes\nn04550676\tmilitary quarters for dining and recreation for officers of a warship (except the captain)\nn04551055\ta storehouse for goods and merchandise\nn04551833\ta long-handled covered pan holding live coals to warm a bed\nn04552097\tfull ceremonial regalia\nn04552348\tan aircraft designed and used for combat\nn04552551\ta room where strategic decisions are made (especially for military or political campaigns)\nn04552696\ta government ship that is available for waging war\nn04553389\ta thin coat of water-base paint\nn04553561\ta fabric treated to be easily washable and to require no ironing\nn04553703\ta basin for washing the hands (`wash-hand basin' is a British expression)\nn04554211\tprotective covering consisting of a broad plank along a gunwale to keep water from splashing over the side\nn04554406\tdevice consisting of a corrugated surface to scrub clothes on\nn04554684\ta home appliance for washing clothes and linens automatically\nn04554871\tseal consisting of a flat disk placed to prevent leakage\nn04554998\ta building or outbuilding where laundry is done\nn04555291\ta lavatory (particularly a lavatory in a public place)\nn04555400\tfurniture consisting of a table or stand to hold a basin and pitcher of water for washing: `wash-hand stand' is a British term\nn04555600\ta tub in which clothes or linens can be washed\nn04555700\ta container with an open top; for discarded paper and other rubbish\nn04555897\ta small portable timepiece\nn04556408\ta knitted dark blue wool cap worn by seamen in cold or stormy weather\nn04556533\tthe metal case in which the works of a watch are housed\nn04556664\tlaboratory glassware; a shallow glass dish used as an evaporating surface or to cover a beaker\nn04556948\tan observation tower for a lookout to watch over prisoners or watch for fires or enemies\nn04557308\tpaint in which water is used as the vehicle\nn04557522\ta bed with a mattress made of strong plastic that is filled with water\nn04557648\ta bottle for holding water\nn04557751\ta butt set on end to contain water especially to store rainwater\nn04558059\tcart with a tank for water (especially with fresh water for sale)\nn04558199\tchute with flowing water down which toboggans and inner tubes and people slide into a pool\nn04558478\ta toilet in Britain\nn04558804\ta water-base paint (with water-soluble pigments); used by artists\nn04559023\tnuclear reactor using water as a coolant\nn04559166\ta device for cooling and dispensing drinking water\nn04559451\ta faucet for drawing water from a pipe or cask\nn04559620\ta filter to remove impurities from the water supply\nn04559730\tgauge for indicating the level of water in e.g. a tank or boiler or reservoir\nn04559910\ta glass for drinking water\nn04559994\thazard provided by ponds of water that the golfer must avoid\nn04560113\ta heater and storage tank to supply heated water\nn04560292\ta container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants\nn04560502\twater cart with a tank and sprinkler for sprinkling roads\nn04560619\ta container filled with water that surrounds a machine to cool it; especially that surrounding the cylinder block of an engine\nn04560804\ta jug that holds water\nn04560882\ta pool or stream in a steeplechase or similar contest\nn04561010\ta water gauge that shows the level by showing the surface of the water in a trough or U-shaped tube\nn04561287\tmeter for measuring the quantity of water passing through a particular outlet\nn04561422\ta mill powered by a water wheel\nn04561734\tany fabric impervious to water\nn04561857\ta coating capable of making a surface waterproof\nn04561965\tthe pump in the cooling system of an automobile that cause the water to circulate\nn04562122\ta motorboat resembling a motor scooter\nn04562262\tbroad ski for skimming over water towed by a speedboat\nn04562496\ta channel through which water is discharged (especially one used for drainage from the gutters of a roof)\nn04562935\ta large reservoir for water\nn04563020\ta wagon that carries water (as for troops or work gangs or to sprinkle down dusty dirt roads in the summertime)\nn04563204\ta wheel that rotates by direct action of water; a simple turbine\nn04563413\ta wheel with buckets attached to its rim; raises water from a stream or pond\nn04563560\ta life preserver consisting of a connected pair of inflatable bags that fit under a person's arms and provide buoyancy; used by children learning to swim\nn04563790\tworkplace where water is stored and purified and distributed for a community\nn04564278\tan instrument for measuring in watts the flow of power in an electrical circuit\nn04564581\tan effigy (usually of a famous person) made of wax\nn04565039\tstructure consisting of a sloping way down to the water from the place where ships are built or repaired\nn04565375\tany instrument or instrumentality used in fighting or hunting\nn04566257\tweapons considered collectively\nn04566561\tmilitary vehicle that is a light truck designed to carry mortars or machine guns and their crews\nn04566756\tweathervane with a vane in the form of a rooster\nn04567098\ta simple barometer for indicating changes in atmospheric pressure\nn04567593\ta satellite that transmits frequent picture of the earth below\nn04567746\tan oceangoing vessel equipped to make meteorological observations\nn04568069\tmechanical device attached to an elevated structure; rotates freely to show the direction of the wind\nn04568557\tan intricate trap that entangles or ensnares its victim\nn04568713\ta fabric (especially a fabric in the process of being woven)\nn04568841\ta strong fabric woven in strips\nn04569063\ta digital camera designed to take digital photographs and transmit them over the internet\nn04569520\tsomething solid that is usable as an inclined plane (shaped like a V) that can be pushed between two things to separate them\nn04569822\t(golf) an iron with considerable loft and a broad sole\nn04570118\ta shoe with a wedge heel\nn04570214\ta type of pottery made by Josiah Wedgwood and his successors; typically has a classical decoration in white on a blue background\nn04570416\ta hand tool for removing weeds\nn04570532\ta black garment (dress) worn by a widow as a sign of mourning\nn04570815\ta small suitcase to carry clothing and accessories for a weekend trip\nn04570958\tplatform scale flush with a roadway for weighing vehicles and cattle etc\nn04571292\tsports equipment used in calisthenic exercises and weightlifting; it is not attached to anything and is raised and lowered by use of the hands and arms\nn04571566\ta low dam built across a stream to raise its level or divert its flow\nn04571686\ta fence or wattle built across a stream to catch or retain fish\nn04571800\ta wheeled vehicle carrying information and gifts from local merchants for new residents in an area\nn04571958\ta metal joint formed by softening with heat and fusing or hammering together\nn04572121\ta mask that you wear for protection when doing welding\nn04572235\tan assembly of parts welded together\nn04572935\ta cavity or vessel used to contain liquid\nn04573045\ta structure built over a well\nn04573281\ta raised or strengthened seam\nn04573379\ta standard voltaic cell (trademark Weston)\nn04573513\ta bar for mixing drinks that has a sink with running water\nn04573625\ta thermometer with a bulb that is covered with moist muslin; used in a psychrometer to measure humidity\nn04573832\ta primary voltaic cell having a liquid electrolyte\nn04573937\tfisherman's fly that floats under the surface of the water\nn04574067\ta close-fitting garment made of a permeable material; worn in cold water (as by skin divers) to retain body heat\nn04574348\ta long narrow boat designed for quick turning and use in rough seas\nn04574471\ta ship engaged in whale fishing\nn04574606\ta gun (or device resembling a gun) for discharging a projectile (especially a harpoon) at a whale\nn04574999\ta simple machine consisting of a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle (as in vehicles or other machines)\nn04575723\ta circular helm to control the rudder of a vessel\nn04575824\thoist so arranged that a rope unwinding from a wheel is wound onto a cylindrical drum or shaft coaxial with the wheel\nn04576002\ta movable chair mounted on large wheels; for invalids or those who cannot walk; frequently propelled by the occupant\nn04576211\ta vehicle that moves on wheels and usually has a container for transporting things or people\nn04576971\tmechanical device including an arrangement of wheel in a machine (especially a train of gears)\nn04577139\tlight rowboat for use in racing or for transporting goods and passengers in inland waters and harbors\nn04577293\tsailing barge used especially in East Anglia\nn04577426\ta flat stone for sharpening edged tools or knives\nn04577567\ta crossbar that is attached to the traces of a draft horse and to the vehicle or implement that the horse is pulling\nn04577769\tan instrument with a handle and a flexible lash that is used for whipping\nn04578112\ta strong worsted or cotton fabric with a diagonal rib\nn04578329\tpost formerly used in public to which offenders are tied to be whipped\nn04578559\ta sewing stitch passing over an edge diagonally\nn04578708\ta revolving mechanism\nn04578801\ta small short-handled broom used to brush clothes\nn04578934\ta mixer incorporating a coil of wires; used for whipping eggs or cream\nn04579056\ta bottle for holding whiskey\nn04579145\ta jug that contains whiskey\nn04579230\ta space beneath a dome or arch in which sounds produced at certain points are clearly audible at certain distant points\nn04579432\tacoustic device that forces air or steam against an edge or into a cavity and so produces a loud shrill sound\nn04579667\ta small wind instrument that produces a whistling sound by blowing into it\nn04579986\t(board games) the lighter pieces\nn04580493\tlarge electrical home appliances (refrigerators or washing machines etc.) that are typically finished in white enamel\nn04581102\twash consisting of lime and size in water; used for whitening walls and other surfaces\nn04581595\ta building where prostitutes are available\nn04581829\ta loosely woven cord (in a candle or oil lamp) that draws fuel by capillary action up into the flame\nn04582205\twork made of interlaced slender branches (especially willow branches)\nn04582349\ta basket made of wickerwork\nn04582771\ta small arch used as croquet equipment\nn04582869\tcricket equipment consisting of a set of three stumps topped by crosspieces; used in playing cricket\nn04583022\ta lodge consisting of a frame covered with matting or brush; used by nomadic American Indians in the southwestern United States\nn04583212\ta camera lens having a wider than normal angle of view (and usually a short focal length); produces an image that is foreshortened in the center and increasingly distorted in the periphery\nn04583620\ta commercial airliner with two aisles\nn04583888\tcorduroy with wide ribs\nn04583967\ta lookout atop a coastal house\nn04584056\t(trademark) a hollow plastic ball with cutouts\nn04584207\thairpiece covering the head and made of real or synthetic hair\nn04584373\ta Native American lodge frequently having an oval shape and covered with bark or hides\nn04585128\ta carpet woven on a Jacquard loom with loops like a Brussels carpet but having the loops cut to form a close velvety pile\nn04585318\theaddress of cloth; worn over the head and around the neck and ears by medieval women\nn04585456\ta plain or twilled fabric of wool and cotton used especially for warm shirts or skirts and pajamas\nn04585626\tcotton flannelette with a nap on both sides\nn04585745\tlifting device consisting of a horizontal cylinder turned by a crank on which a cable or rope winds\nn04585980\ta shoulder rifle\nn04586072\thedge or fence of trees designed to lessen the force of the wind and reduce erosion\nn04586581\tmechanical device used to wind another device that is driven by a spring (as a clock)\nn04586932\ta musical instrument in which the sound is produced by an enclosed column of air that is moved by the breath\nn04587327\ta large sailing ship\nn04587404\tgenerator that extracts usable energy from winds\nn04587559\ta mill that is powered by the wind\nn04587648\ta framework of wood or metal that contains a glass windowpane and is built into a wall or roof to admit light or air\nn04588739\t(computer science) a rectangular part of a computer screen that contains a display different from the rest of the screen\nn04589190\ta blind for privacy or to keep out light\nn04589325\ta long narrow box for growing plants on a windowsill\nn04589434\tan envelope with a transparent panel that reveals the address on the enclosure\nn04589593\tthe framework that supports a window\nn04589890\tscreen to keep insects from entering a building through the open window\nn04590021\ta bench or similar seat built into a window recess\nn04590129\tan opaque window blind that can cover or uncover a window\nn04590263\tthe sill of a window; the horizontal member at the bottom of the window frame\nn04590553\ttransparent screen (as of glass) to protect occupants of a vehicle\nn04590746\ta mechanical device that cleans the windshield\nn04590933\tstraight chair having a shaped seat and a back of many spindles\nn04591056\ta wide triangular slipknot for tying a tie\nn04591157\ta wide necktie worn in a loose bow\nn04591249\tweather vane shaped like a T and located at an airfield\nn04591359\ta structure resembling a tunnel where air is blown at known velocities for testing parts of aircraft\nn04591517\ta turbine that is driven by the wind\nn04591631\ta bar that serves only wine\nn04591713\ta bottle for holding wine\nn04591887\ta bucket of ice used to chill a bottle of wine\nn04592005\ta barrel that holds wine\nn04592099\ta glass that has a stem and in which wine is served\nn04592356\ta press that is used to extract the juice from grapes\nn04592465\tdistillery where wine is made\nn04592596\tan animal skin (usually a goatskin) that forms a bag and is used to hold and dispense wine\nn04592741\tone of the horizontal airfoils on either side of the fuselage of an airplane\nn04593077\teasy chair having wings on each side of a high back\nn04593185\ta threaded nut with winglike projections for thumb and forefinger leverage in turning\nn04593376\ta decorative toecap having a point extending toward the throat of the shoe\nn04593524\ta shoe having a wing-tip toecap\nn04593629\tblind consisting of a leather eyepatch sewn to the side of the halter that prevents a horse from seeing something on either side\nn04593866\tcontact consisting of a conducting arm that rotates over a series of fixed contacts and comes to rest on an outlet\nn04594114\telectric motor that moves the windshield wiper\nn04594218\tligament made of metal and used to fasten things or make cages or fences etc\nn04594489\ta metal conductor that carries electricity over a distance\nn04594742\tfabric woven of metallic wire\nn04594828\tan edge tool used in cutting wire\nn04594919\tgauge for measuring the diameter of wire\nn04595028\ta local area network that uses high frequency radio signals to transmit and receive data over distances of a few hundred feet; uses ethernet protocol\nn04595285\tan impact printer in which each character is represented by a pattern of dots made by wires or styli\nn04595501\tan early type of magnetic recorder using iron wire\nn04595611\ta hand tool used by electricians to remove insulation from the cut end of an insulated wire\nn04595762\tmesh netting made of wires\nn04595855\ta circuit of wires for the distribution of electricity\nn04596116\ta magical cap that secures whatever one wishes for\nn04596492\ta box enclosure for a witness when testifying\nn04596742\tpan with a convex bottom; used for frying in Chinese cooking\nn04596852\tclothing that is designed for women to wear\nn04597066\ta golf club with a long shaft used to hit long shots; originally made with a wooden head\nn04597309\ta carving created by carving wood\nn04597400\ta chisel for working wood; it is either struck with a mallet or pushed by hand\nn04597804\tware for domestic use made of wood\nn04597913\ta spoon made of wood\nn04598136\ta metal screw that tapers to a point so that it can be driven into wood with a screwdriver\nn04598318\ta shed for storing firewood or garden tools\nn04598416\ta vise with jaws that are padded in order to hold lumber without denting it\nn04598582\tany wind instrument other than the brass instruments\nn04598965\tthe yarn woven across the warp yarn in weaving\nn04599124\ta loudspeaker that reproduces lower audio frequency sounds\nn04599235\ta fabric made from the hair of sheep\nn04600312\tcontainer for holding implements and materials for work (especially for sewing)\nn04600486\ta strong worktable for a carpenter or mechanic\nn04600912\tclothing worn for doing manual labor\nn04601041\ta county jail that holds prisoners for periods up to 18 months\nn04601159\ta poorhouse where able-bodied poor are compelled to labor\nn04601938\twork consisting of a piece of metal being machined\nn04602762\troom where work is done\nn04602840\tthe internal mechanism of a device\nn04602956\theavy-duty shirts worn for manual or physical work\nn04603399\ta desktop digital computer that is conventionally considered to be more powerful than a microcomputer\nn04603729\ta table designed for a particular task\nn04603872\theavy-duty clothes for manual or physical work\nn04604276\tcomputer network consisting of a collection of internet sites that offer text and graphics and sound and animation resources through the hypertext transfer protocol\nn04604644\trail fence consisting of a zigzag of interlocking rails\nn04604806\tgear consisting of a shaft with screw thread (the worm) that meshes with a toothed wheel (the worm wheel); changes the direction of the axis of rotary motion\nn04605057\tgear with the thread of a worm\nn04605163\ta woolen fabric with a hard textured surface and no nap; woven of worsted yarns \"he wore a worsted suit\"\nn04605321\ta tightly twisted woolen yarn spun from long-staple wool\nn04605446\tcloak that is folded or wrapped around a person\nn04605572\ta garment (as a dress or coat) with a full length opening; adjusts to the body by wrapping around\nn04605726\tthe covering (usually paper or cellophane) in which something is wrapped\nn04606251\ta ship that has been destroyed at sea\nn04606574\ta hand tool that is used to hold or twist a nut or bolt\nn04607035\ta mat on which wrestling matches are conducted\nn04607242\ta clothes dryer consisting of two rollers between which the wet clothes are squeezed\nn04607640\tprotective garment consisting of a pad worn by football players\nn04607759\tpin joining a piston to a connecting rod\nn04607869\ta watch that is worn strapped to the wrist\nn04607982\tan arm of a tablet-armed chair; widened to provide a writing surface\nn04608329\ta desk for writing (usually with a sloping top)\nn04608435\ta portable case containing writing materials and having a writing surface\nn04608567\tan implement that is used to write\nn04608809\ta page printer that uses the xerographic process\nn04608923\ta duplicator (trade mark Xerox) that copies graphic matter by the action of light on an electrically charged photoconductive insulating surface in which the latent image is developed with a resinous powder\nn04609531\tphotographic film used to make X-ray pictures\nn04609651\tan apparatus that provides a source of X rays\nn04609811\ta vacuum tube containing a metal target onto which a beam of electrons is directed at high energy for the generation of X rays\nn04610013\tan expensive vessel propelled by sail or power and used for cruising or racing\nn04610176\ta light folding armchair for outdoor use\nn04610274\ta sharply directional antenna\nn04610503\tan enclosure for animals (as chicken or livestock)\nn04610676\ta long horizontal spar tapered at the end and used to support and spread a square sail or lateen\nn04611351\teither end of the yard of a square-rigged ship\nn04611795\t(football) a marker indicating the yard line\nn04611916\ta ruler or tape that is three feet long\nn04612026\ta skullcap worn by religious Jews (especially at prayer)\nn04612159\tthe face veil worn by Muslim women\nn04612257\ta long Turkish knife with a curved blade having a single edge\nn04612373\ta sailing vessel with two masts; a small mizzen is aft of the rudderpost\nn04612504\ta ship's small boat (usually rowed by 4 or 6 oars)\nn04612840\tstable gear that joins two draft animals at the neck so they can work together as a team\nn04613015\tfabric comprising a fitted part at the top of a garment\nn04613158\ta connection (like a clamp or vise) between two things so they move together\nn04613696\ta circular domed dwelling that is portable and self-supporting; originally used by nomadic Mongol and Turkic people of central Asia but now used as inexpensive alternative or temporary housing\nn04613939\tthe trademark for a machine that smooths the ice in an ice-skating rink\nn04614505\tthe sight setting that will cause a projectile to hit the center of the target with no wind blowing\nn04614655\ta rectangular tiered temple or terraced mound erected by the ancient Assyrians and Babylonians\nn04614844\tone of a pair of small metallic cymbals worn on the thumb and middle finger; used in belly dancing in rhythm with the dance\nn04615149\ta crude homemade pistol\nn04615226\ta musical stringed instrument with strings stretched over a flat sounding board; it is laid flat and played with a plectrum and with fingers\nn04615644\ta flashy suit of extreme cut\nn04682018\tgraded markings that indicate light or shaded areas in a drawing or painting\nn04950713\tthe direction, texture, or pattern of fibers found in wood or leather or stone or in a woven fabric\nn04950952\ttexture produced by the fibers in wood\nn04951071\ta texture like that of wood\nn04951186\ta texture like that of marble\nn04951373\tthe visual effect of illumination on objects or scenes as created in pictures\nn04951716\tan indication of radiant light drawn around the head of a saint\nn04951875\tlightness created by sunlight\nn04953296\ta spatially localized brightness\nn04953678\tthe visual property of something having a milky brightness and a play of colors from the surface\nn04955160\tthe property of being smooth and shiny\nn04957356\tany of three pigments from which all colors can be obtained by mixing\nn04957589\tany of three primary colors of light from which all colors can be obtained by additive mixing\nn04958634\tthe visual property of being without chromatic color\nn04958865\tan irregular arrangement of patches of color\nn04959061\tan absence of normal pigmentation especially in the skin (as in albinism) or in red blood cells\nn04959230\ta quality of a given color that differs slightly from another color\nn04959672\ta color that has hue\nn04960277\tthe quality or state of the achromatic color of least lightness (bearing the least resemblance to white)\nn04960582\ta very dark black\nn04961062\ta very light white\nn04961331\ta shade of white the color of bleached bones\nn04961691\ta neutral achromatic color midway between white and black\nn04962062\ta light shade of grey\nn04962240\ta very dark grey color\nn04963111\ta blood-red color\nn04963307\ta bright orange-red color produced in cotton cloth with alizarine dye\nn04963588\ta deep and vivid red color\nn04963740\ta red color that reflects little light\nn04964001\ta dark purplish-red color\nn04964799\ta dark purplish-red color\nn04964878\ta dark purplish-red to dark brownish-red color\nn04965179\torange color or pigment; any of a range of colors between red and yellow\nn04965451\tan orange color closer to red than to yellow\nn04965661\tyellow color or pigment; the chromatic color resembling the hue of sunflowers or ripe lemons\nn04966543\ta strong yellow color\nn04966941\ta variable yellow tint; dull yellow, often diluted with white\nn04967191\tgreen color or pigment; resembling the color of growing grass\nn04967561\tthe property of being somewhat green\nn04967674\tthe property of a moderate green color resembling the waters of the sea\nn04967801\tthe color of sage leaves\nn04967882\tdark to moderate or greyish green\nn04968056\tthe green color of an emerald\nn04968139\ta color that is lighter and greener than olive\nn04968749\ta light green color varying from bluish green to yellowish green\nn04968895\tblue color or pigment; resembling the color of the clear sky in the daytime\nn04969242\ta light shade of blue\nn04969540\ta greyish blue color\nn04969798\ta shade of blue tinged with green\nn04969952\ta shade of blue tinged with purple\nn04970059\ta purple color or pigment\nn04970312\ta vivid purplish-red color\nn04970398\ta blue-violet color\nn04970470\ta pale purple color\nn04970631\ta shade of purple tinged with red\nn04970916\ta light shade of red\nn04971211\ta pink or reddish-pink color\nn04971313\ta dusty pink color\nn04972350\tthe brown color of chestnuts\nn04972451\ta medium brown to dark-brown color\nn04972801\ta brown that is light but unsaturated\nn04973020\ta light brown the color of topaz\nn04973291\ta very light brown\nn04973386\ta shade of brown with a tinge of red\nn04973585\ta bright reddish-brown color\nn04973669\ta reddish-brown color resembling the color of polished copper\nn04973816\ta reddish-brown color resembling the red soil used as body paint by American Indians\nn04974145\ta color varying from dark purplish brown to dark red\nn04974340\ta yellow-green color of low brightness and saturation\nn04974859\ta vivid blue to purple-blue color\nn04975739\teither one of two chromatic colors that when mixed together give white (in the case of lights) or grey (in the case of pigments)\nn04976319\tcoloration of living tissues by pigment\nn04976952\tthe coloring of a person's face\nn04977412\ta healthy reddish complexion\nn04978561\ta color produced by a pattern of differently colored dots that together simulate the desired color\nn04979002\tconspicuous coloration or markings of an animal serving to warn off predators\nn04979307\tcoloring that conceals or disguises an animal's shape\nn04981658\ta characteristic sound\nn05102764\tthe center of the circle of curvature\nn05218119\tthe dead body of a human being\nn05233741\tsmall indentation in the middle of the lower jawbone\nn05235879\ta riblike supporting or strengthening part of an animal or plant\nn05238282\ta natural protective body covering and site of the sense of touch\nn05239437\ta piece of skin taken from a donor area and surgically grafted at the site of an injury or burn\nn05241218\tany of the cells making up the epidermis\nn05241485\ta cell in the basal layer of the epidermis that produces melanin under the control of the melanocyte-stimulating hormone\nn05241662\ta cell in the germinal layer of the skin (the prickle-cell layer); has many spines and radiating processes\nn05242070\tan epithelial cell that is shaped like a column; some have cilia\nn05242239\tany of various columnar epithelial cells in the central nervous system that develop into neuroglia\nn05242928\tan epithelial cell that is flat like a plate and form a single layer of epithelial tissue\nn05244421\ta plaque consisting of tangles of amyloid protein in nervous tissue (a pathological mark of Alzheimer's disease)\nn05244755\ta film of mucus and bacteria deposited on the teeth that encourages the development of dental caries\nn05244934\ta patch of skin that is discolored but not usually elevated; caused by various diseases\nn05245192\ta small brownish spot (of the pigment melanin) on the skin\nn05257476\ta woman's hairstyle in which the hair gives a puffy appearance\nn05257967\ta fat sausage-shaped curl\nn05258051\ta lock of hair growing (or falling) over the forehead\nn05258627\ta spiral curl plastered on the forehead or cheek\nn05259914\ta plait of braided hair\nn05260127\ta smooth hair style with the ends of the hair curled inward\nn05260240\ta hair style in which the front hair is swept up from the forehead\nn05261310\thair resembling thatched roofing material\nn05262422\tslang for a mustache\nn05262534\ta large bushy moustache (with hair growing sometimes down the sides of the mouth)\nn05262698\ta bushy droopy mustache\nn05263183\tshort stiff hairs growing on a man's face when he has not shaved for a few days\nn05263316\ta short pointed beard (named after the artist Anthony Vandyke)\nn05263448\ta small patch of facial hair just below the lower lip and above the chin\nn05265736\talimentary tract smear of material obtained from the esophagus\nn05266096\talimentary tract smear of material obtained from the duodenum\nn05266879\ta bit of tissue or blood or urine that is taken for diagnostic purposes\nn05278922\t(anatomy) a point or small area\nn05279953\tthe concavity in the head of the scapula that receives the head of the humerus to form the shoulder joint\nn05282652\ta gap or vacant space between two teeth\nn05285623\tthe fatty network of connective tissue that fills the cavities of bones\nn05302499\tthe opening through which food is taken in and vocalizations emerge\nn05314075\teither of the corners of the eye where the upper and lower eyelids meet\nn05399034\tproduced by mammary glands of female mammals for feeding their young\nn05399243\tmilk secreted by a woman who has recently given birth\nn05399356\tmilky fluid secreted for the first day or two after parturition\nn05418717\ta blood vessel that carries blood from the capillaries toward the heart\nn05427346\ta nerve cell whose body is outside the central nervous system\nn05442594\tthe sex chromosome that is present in both sexes: singly in males and doubly in females\nn05447757\ta cell of an embryo\nn05448704\ta precursor of leukocytes that normally occurs only in bone marrow\nn05448827\tan erythroblast having granules of ferritin\nn05449196\tmature bone cell\nn05449661\tabnormally large red blood cell (associated with pernicious anemia)\nn05449959\tblood cells that engulf and digest bacteria and fungi; an important part of the body's defense system\nn05450617\ta macrophage that is found in connective tissue\nn05451099\ta phagocyte that does not circulate in the blood but is fixed in the liver or spleen or bone marrow etc.\nn05451384\tan agranulocytic leukocyte that normally makes up a quarter of the white blood cell count but increases in the presence of infection\nn05453412\ta large immature monocyte normally found in bone marrow\nn05453657\tthe chief phagocytic leukocyte; stains with either basic or acid dyes\nn05453815\ta neutrophil that ingests small things (as bacteria)\nn05454833\tan abnormal red blood cell that has a crescent shape and an abnormal form of hemoglobin\nn05454978\tan abnormal red blood cell containing granules of iron not bound in hemoglobin\nn05455113\tan abnormal spherical red blood cell\nn05458173\tmature ovum after penetration by sperm but before the formation of a zygote\nn05458576\ta female gametocyte that develops into an ovum after two meiotic divisions\nn05459101\tan immature gamete produced by a spermatocyte; develops into a spermatozoon\nn05459457\ta cell in the testes that secretes the hormone testosterone\nn05459769\tan elongated contractile cell in striated muscle tissue\nn05460759\tcells of the smooth muscles\nn05464534\tsmall gaps in the myelin sheath of medullated axons\nn05467054\tsustentacular tissue that surrounds and supports neurons in the central nervous system; glial and neural cells together compose the tissue of the central nervous system\nn05467758\tcomparatively large neuroglial cell\nn05468098\ta kind of astrocyte found in the grey matter\nn05468739\ta cell of the oligodendroglia\nn05469664\tspecial nerve endings in the muscles and tendons and other organs that respond to stimuli regarding the position and movement of the body\nn05469861\tshort fiber that conducts toward the cell body of the neuron\nn05475397\ta nerve fiber that carries impulses toward the central nervous system\nn05482922\ta space in the meninges beneath the arachnoid membrane and above the pia mater that contains the cerebrospinal fluid\nn05486510\tthe layer of unmyelinated neurons (the grey matter) forming the cortex of the cerebrum\nn05491154\tthe cortex of the kidney containing the glomeruli and the convoluted tubules\nn05526957\ta fold of skin covering the tip of the penis\nn05538625\tthe upper part of the human body or the front part of the body in animals; contains the face and brains\nn05539947\tthe skin that covers the top of the head\nn05541509\teither prominence of the frontal bone above each orbit\nn05542893\tan immovable joint (especially between the bones of the skull)\nn05545879\tthe large opening at the base of the cranium through which the spinal cord passes\nn05571341\tthe junction between the esophagus and the stomach epithelium\nn05578095\tthe back part of the human foot\nn05581932\tthe dead skin at the base of a fingernail or toenail\nn05584746\ta loose narrow strip of skin near the base of a fingernail; tearing it produces a painful sore that is easily infected\nn05586759\tthe exterior protective or supporting structure or shell of many animals (especially invertebrates) including bony or horny parts such as nails or scales or hoofs\nn05604434\ta wall of the abdomen\nn05716342\ta distinctive tart flavor characteristic of lemons\nn06008896\tone of the fixed reference lines of a coordinate system\nn06209940\tan extensive mental viewpoint\nn06254669\ta means or instrumentality for storing or communicating information\nn06255081\ta medium for the expression or achievement of something\nn06255613\ta medium for written communication\nn06259898\ta path over which electrical signals can pass\nn06262567\ta medium that disseminates moving pictures\nn06262943\tthe film industry\nn06263202\ta press not restricted or controlled by government censorship regarding politics or ideology\nn06263369\tthe print media responsible for gathering and publishing news in the form of newspapers or magazines\nn06263609\ta medium that disseminates printed matter\nn06263762\ta medium for storing information\nn06263895\tany storage medium in which different patterns of magnetization are used to represent stored bits or bytes of information\nn06266417\tnewspapers and magazines collectively\nn06266633\tBritish journalism\nn06266710\tjournalism that presents a story primarily through the use of pictures\nn06266878\tphotography of newsworthy events\nn06266973\tprinted material (text and pictures) produced by an intaglio printing process in a rotary press\nn06267145\ta daily or weekly publication on folded sheets; contains news and articles and advertisements\nn06267564\ta newspaper that is published every day\nn06267655\ta newspaper or official journal\nn06267758\ta newspaper written and published by students in a school\nn06267893\tnewspaper with half-size pages\nn06267991\tsensationalist journalism\nn06271778\t(often plural) systems used in transmitting messages over a distance electronically\nn06272290\ttransmitting speech at a distance\nn06272612\ta computerized system for answering and routing telephone calls; telephone messages can be recorded and stored and relayed\nn06272803\ta telephone connection\nn06273207\ta return call\nn06273294\ta telephone call that the receiving party is asked to pay for\nn06273414\tlets you transfer your incoming calls to any telephone that you can dial direct\nn06273555\ta telephone call to a radio station or a television station in which the caller participates in the on-going program\nn06273743\ta way of letting you know that someone else is calling when you are using your telephone\nn06273890\ta hostile telephone call (from a crank)\nn06273986\ta telephone call made within a local calling area\nn06274092\ta telephone call made outside the local calling area\nn06274292\ta long-distance telephone call at charges above a local rate\nn06274546\ta telephone call that you request be made a specific time in order to wake you up at that time (especially in hotels)\nn06274760\ta way of adding a third party to your conversation without the assistance of a telephone operator\nn06274921\tcommunicating at a distance by electric transmission over wire\nn06275095\ta telegram sent abroad\nn06275353\ttransmission by radio waves\nn06275471\ttelegraphy that uses transmission by radio rather than by wire\nn06276501\ttelephony that uses transmission by radio rather than by wire\nn06276697\ttaking part in a radio or tv program\nn06276902\ta system for distributing radio or tv programs\nn06277025\tcommunicates two or more signals over a common channel\nn06277135\tmedium for communication\nn06277280\tbroadcasting visual images of stationary or moving objects\nn06278338\ttelevision that is transmitted over cable directly to the receiver\nn06278475\ta television system that has more than the usual number of lines per frame so its pictures show more detail\nn06281040\tquality or fidelity of a received broadcast\nn06281175\tthe detection that a signal is being received\nn06340977\ta Hebrew title of respect for a wise and highly educated man\nn06359193\ta computer connected to the internet that maintains a series of web pages on the World Wide Web\nn06359467\ta site on the internet where a number of users can communicate in real time (typically one dedicated to a particular topic)\nn06359657\ta site that the owner positions as an entrance to other sites on the internet\nn06415688\ta small notebook for rough notes\nn06417096\t(Roman Catholic Church) a book of prayers to be recited daily certain priests and members of religious orders\nn06418693\ta reference book containing words (usually with their meanings)\nn06419354\tan abridged dictionary of a size convenient to hold in the hand\nn06423496\ta handbook of tables used to facilitate computation\nn06470073\twriting that provides information (especially information of an official nature)\nn06591815\tone or more recordings issued together; originally released on 12-inch phonograph records (usually with attractive record covers) and later on cassette audiotape and compact disc\nn06592078\tan album whose recordings are unified by some theme (instrumental or lyrical or narrative or compositional)\nn06592281\talbums of rock music that aspired to the status of art; first appeared in the 1960s\nn06592421\tconcept album compiling a performer's work or work supporting some worthy cause\nn06595351\ta periodic publication containing pictures and stories and articles of interest to those who purchase it or subscribe to it\nn06596179\t(British) a magazine that is printed in color and circulated with a newspaper (especially on weekends)\nn06596364\ta magazine devoted to comic strips\nn06596474\ta magazine devoted to reports of current events; usually published weekly\nn06596607\tan inexpensive magazine printed on poor quality paper\nn06596727\ta magazine printed on good quality paper\nn06596845\ta magazine published for and read by members of a particular trade group\nn06613686\ta form of entertainment that enacts a story by sound and a sequence of images giving the illusion of continuous movement\nn06614901\ta scene that is filmed but is not used in the final editing of the film\nn06616216\ta movie featuring shooting and violence\nn06618653\ta low-budget Western movie produced by a European (especially an Italian) film company\nn06625062\ta letter from the pope sent to all Roman Catholic bishops throughout the world\nn06785654\ta puzzle in which words corresponding to numbered clues are to be found and written in to squares in the puzzle\nn06793231\ta public display of a message\nn06794110\ta sign visible from the street\nn06874185\ta visual signal to control the flow of traffic at intersections\nn06883725\tthe official emblem of the Nazi Party and the Third Reich; a cross with the arms bent at right angles in a clockwise direction\nn06892775\ta performance of music by players or singers not involving theatrical staging\nn06998748\tphotographs or other visual representations in a printed publication\nn07005523\tthe enhanced response of an antenna in a given direction as indicated by a loop in its radiation pattern\nn07248320\ta paper jacket for a book; a jacket on which promotional information is usually printed\nn07273802\ta mound of stones piled up as a memorial or to mark a boundary or path\nn07461050\tan equestrian competition; the first day is dressage; the second is cross-country jumping; the third is stadium jumping\nn07556406\tfood that is simply prepared and gives a sense of wellbeing; typically food with a high sugar or carbohydrate content that is associated with childhood or with home cooking\nn07556637\tany substance that can be used as food\nn07556872\teatables (especially sweets)\nn07556970\tpart of a meal served at one time\nn07557165\tsomething considered choice to eat\nn07557434\ta particular item of prepared food\nn07560193\tinexpensive food (hamburgers or chicken or milkshakes) prepared and served quickly\nn07560331\tfood to be eaten with the fingers\nn07560422\tsolid and liquid nourishment taken into the body through the mouth\nn07560542\tfood that fulfills the requirements of Jewish dietary law\nn07560652\tthe food and drink that are regularly served or consumed\nn07560903\tthe usual food and drink consumed by an organism (person or animal)\nn07561112\ta prescribed selection of foods\nn07561590\ta regulated daily food allowance\nn07561848\ta diet that contains adequate amounts of all the necessary nutrients required for healthy growth and activity\nn07562017\ta diet of foods that are not irritating\nn07562172\ta diet of fluids with minimal residues (fat-free broth or strained fruit juices or gelatin); cannot be used for more than one day postoperative\nn07562379\ta diet designed to help control the symptoms of diabetes\nn07562495\tsomething added to complete a diet or to make up for a dietary deficiency\nn07562651\ta diet of foods high in starch that increases carbohydrate reserves in muscles\nn07562881\ta reducing diet that enjoys temporary popularity\nn07562984\tdiet prescribed to treat celiac disease; eliminates such foods as wheat and rye and oats and beans and cabbage and turnips and cucumbers that are rich in gluten\nn07563207\ta diet high in plant and animal proteins; used to treat malnutrition or to increase muscle mass\nn07563366\ta diet designed to patients with vitamin deficiencies\nn07563642\tdiet prescribed for bedridden or convalescent people; does not include fried or highly seasoned foods\nn07563800\ta diet of foods that can be served in liquid or strained form (plus custards or puddings); prescribed after certain kinds of surgery\nn07564008\ta diet that is low on calories\nn07564101\ta diet containing limited amounts of fat and stressing foods high in carbohydrates; used in treatment of some gallbladder conditions\nn07564292\ta diet that limits the intake of salt (sodium chloride); often used in treating hypertension or edema or certain other disorders\nn07564515\ta diet consisting chiefly of beans and whole grains\nn07564629\ta diet designed to help you lose weight (especially fat)\nn07564796\ta diet that does not require chewing; advised for those with intestinal disorders\nn07564971\ta diet excluding all meat and fish\nn07565083\tthe dishes making up a meal\nn07565161\tinformal terms for a meal\nn07565259\tfood or meals in general\nn07565608\ta meal eaten in a mess hall by service personnel\nn07565725\tthe food allowance for one day (especially for service personnel)\nn07565945\trations issued for United States troops in the field\nn07566092\ta small package of emergency rations; issued to United States troops in World War II\nn07566231\ta canned field ration issued by the United States Army\nn07566340\ta substance that can be used or prepared for use as food\nn07566863\tfoodstuff rich in natural starch (especially potatoes, rice, bread)\nn07567039\tflour or meal or grain used in baking bread\nn07567139\ta digestible substance used to give color to food\nn07567390\ta concentrated form of a foodstuff; the bulk is reduced by removing water\nn07567611\ta concentrated form of tomatoes\nn07567707\tcoarsely ground foodstuff; especially seeds of various cereal grasses or pulse\nn07567980\tcoarsely ground grain in the form of pellets (as for pet food)\nn07568095\tcoarsely ground corn\nn07568241\tfine meal made from cereal grain especially wheat; often used as a cooked cereal or in puddings\nn07568389\tmeal made from ground matzos\nn07568502\tmeal made from rolled or ground oats\nn07568625\tmeal made from dried peas\nn07568818\tcoarse, indigestible plant food low in nutrients; its bulk stimulates intestinal peristalsis\nn07568991\tfood prepared from the husks of cereal grains\nn07569106\tfine powdery foodstuff obtained by grinding and sifting the meal of a cereal grain\nn07569423\tflour that does not contain a raising agent\nn07569543\tflour prepared from wheat\nn07569644\tflour made by grinding the entire wheat berry including the bran; (`whole meal flour' is British usage)\nn07569873\tmeal made from soybeans\nn07570021\tmilled product of durum wheat (or other hard wheat) used in pasta\nn07570530\ta feed consisting primarily of corn gluten\nn07570720\ta source of materials to nourish the body\nn07572353\ta stock or supply of foods\nn07572616\ta supply of food especially for a household\nn07572858\tfood preserved by freezing\nn07572957\tfood preserved by canning\nn07573103\tmeat preserved in a can or tin\nn07573347\ta canned meat made largely from pork\nn07573453\tfood preserved by dehydration\nn07573563\ta substantial and nourishing meal\nn07573696\tthe food served and eaten at one time\nn07574176\twhatever happens to be available especially when offered to an unexpected guest or when brought by guests and shared by all\nn07574426\ta light meal or repast\nn07574504\tsnacks and drinks served as a light meal\nn07574602\tthe first meal of the day (usually in the morning)\nn07574780\ta breakfast that usually includes a roll and coffee or tea\nn07574923\tcombination breakfast and lunch; usually served in late morning\nn07575076\ta midday meal\nn07575226\tlunch (usually at a restaurant) where business is discussed and the cost is charged as a business expense\nn07575392\tsubstantial early evening meal including tea\nn07575510\ta light midafternoon meal of tea and sandwiches or cakes\nn07575726\tthe main meal of the day served in the evening or at midday\nn07575984\ta light evening meal; served in early evening if dinner is at midday or served late in the evening at bedtime\nn07576182\ta meal set out on a buffet at which guests help themselves\nn07576438\tany informal meal eaten outside or on an excursion\nn07576577\tan informal meal cooked and eaten outdoors\nn07576781\ta cookout in which food is cooked over an open fire; especially a whole animal carcass roasted on a spit\nn07576969\ta cookout at the seashore where clams and fish and other foods are cooked--usually on heated stones covered with seaweed\nn07577144\ta cookout where fried fish is the main course\nn07577374\ta light informal meal\nn07577538\t(Yiddish) a snack or light meal\nn07577657\ta large satisfying meal\nn07577772\ta meal consisting of a sandwich of bread and cheese and a salad\nn07577918\ta snack taken during a break in the work day\nn07578093\ta meal that is well prepared and greatly enjoyed\nn07579575\tthe principal dish of a meal\nn07579688\tthe most important dish of a meal\nn07579787\ta main course served on a plate\nn07579917\ta dish of marinated vegetables and meat or fish; served with rice\nn07580053\ta dish that is served with, but is subordinate to, a main course\nn07580253\ta dish or meal given prominence in e.g. a restaurant\nn07580359\tfood cooked and served in a casserole\nn07580470\tchicken cooked and served in a casserole\nn07580592\tchicken casserole prepared with tomatoes and mushrooms and herbs in the Italian style\nn07581249\ta course of appetizers in an Italian meal\nn07581346\tfood or drink to stimulate the appetite (usually served before a meal or as the first course)\nn07581607\tan appetizer consisting usually of a thin slice of bread or toast spread with caviar or cheese or other savory food\nn07581775\tan appetizer served as a first course at a meal\nn07581931\ta mixture of sliced or diced fruits\nn07582027\ta cocktail of cold cooked crabmeat and a sauce\nn07582152\ta cocktail of cold cooked shrimp and a sauce\nn07582277\ta dish served as an appetizer before the main meal\nn07582441\tspicy or savory condiment\nn07582609\ttasty mixture or liquid into which bite-sized foods are dipped\nn07582811\ta dip made of cooked beans\nn07582892\ta dip made of cheeses\nn07582970\ta dip made of clams and soft cream cheese\nn07583066\ta dip made of mashed avocado mixed with chopped onions and other seasonings\nn07583197\tliquid food especially of meat or fish or vegetable stock often containing pieces of solid food\nn07583865\tthe soup that a restaurant is featuring on a given day\nn07583978\tsoup that contains small noodles in the shape of letters of the alphabet\nn07584110\tclear soup usually of beef or veal or chicken\nn07584228\ta tomato-flavored consomme; often served chilled\nn07584332\ta thick cream soup made from shellfish\nn07584423\ta Russian or Polish soup usually containing beet juice as a foundation\nn07584593\ta thin soup of meat or fish or vegetable stock\nn07584859\tused to feed infants\nn07584938\ta clear seasoned broth\nn07585015\ta stock made with beef\nn07585107\ta stock made with chicken\nn07585208\tliquid in which meat and vegetables are simmered; used as a basis for e.g. soups or sauces\nn07585474\ta cube of dehydrated stock\nn07585557\tsoup made from chicken broth\nn07585644\tsoup made from chicken boiled with leeks\nn07585758\ta soup made with chopped tomatoes and onions and cucumbers and peppers and herbs; served cold\nn07585906\ta soup or stew thickened with okra pods\nn07585997\ta clear soup garnished with julienne vegetables\nn07586099\tsoup cooked in a large pot\nn07586179\tsoup made from a calf's head or other meat in imitation of green turtle soup\nn07586318\ta soup of eastern India that is flavored with curry; prepared with a meat or chicken base\nn07586485\ta soup made from the skinned tail of an ox\nn07586604\ta thick soup made of dried peas (usually made into a puree)\nn07586718\ta soup made with vegetables and tripe and seasoned with peppercorns; often contains dumplings\nn07586894\tsoup made with a variety of vegetables\nn07587023\tthick (often creamy) soup\nn07587111\ta stew of vegetables and (sometimes) meat\nn07587206\tsoup usually made of the flesh of green turtles\nn07587331\tmade by stirring beaten eggs into a simmering broth\nn07587441\ta thick soup or stew made with milk and bacon and onions and potatoes\nn07587618\tchowder containing corn\nn07587700\tchowder containing clams\nn07587819\ta chowder made with clams and tomatoes and other vegetables and seasonings\nn07587962\ta thick chowder made with clams and potatoes and onions and salt pork and milk\nn07588111\tchowder containing fish\nn07588193\ta soup with won ton dumplings\nn07588299\tmade of stock and split peas with onions carrots and celery\nn07588419\tmade of fresh green peas and stock with shredded lettuce onion and celery\nn07588574\tmade of stock and lentils with onions carrots and celery\nn07588688\ta thick soup made from beef or mutton with vegetables and pearl barley\nn07588817\ta creamy potato soup flavored with leeks and onions; usually served cold\nn07588947\tfood prepared by stewing especially meat or fish with vegetables\nn07589458\ta Polish stew of cabbage and meat\nn07589543\tspicy southern specialty: chicken (or small game) with corn and tomatoes and lima beans and okra and onions and potatoes\nn07589724\tthick spicy stew of whatever meat and whatever vegetables are available; southern United States\nn07589872\ta gathering at which burgoo stew is served\nn07589967\tSpanish version of burgoo\nn07590068\tIrish version of burgoo\nn07590177\tthick stew made of rice and chicken and small game; southern U.S.\nn07590320\ta rich meat stew highly seasoned with paprika\nn07590502\ta stew (or thick soup) made with meat and vegetables\nn07590611\ta stew of meat and potatoes cooked in a tightly covered pot\nn07590752\tmeat is browned before stewing\nn07590841\tmade with sauerkraut and caraway seeds and served with sour cream\nn07590974\tmade of lamb or pork\nn07591049\tmeat (especially mutton) stewed with potatoes and onions\nn07591162\toysters in cream\nn07591236\tdiced lobster meat in milk or cream\nn07591330\ta stew of meat and vegetables and hardtack that is eaten by sailors\nn07591473\ta stew made with fish\nn07591586\thighly seasoned Mediterranean soup or stew made of several kinds of fish and shellfish with tomatoes and onions or leeks and seasoned with saffron and garlic and herbs\nn07591813\thighly seasoned soup or stew made of freshwater fishes (eel, carp, perch) with wine and stock\nn07591961\tsaffron-flavored dish made of rice with shellfish and chicken\nn07592094\tpieces of chicken or other meat stewed in gravy with e.g. carrots and onions and served with noodles or dumplings\nn07592317\ta stew made with chicken\nn07592400\ta stew made with turkey\nn07592481\ta stew made with beef\nn07592656\twell-seasoned stew of meat and vegetables\nn07592768\ta vegetable stew; usually made with tomatoes, eggplant, zucchini, peppers, onion, and seasonings\nn07592922\tragout of game in a rich sauce\nn07593004\ttraditional French stew of vegetables and beef\nn07593107\ta thin stew of meat and vegetables\nn07593199\tan assortment of foods starting with herring or smoked eel or salmon etc with bread and butter; then cheeses and eggs and pickled vegetables and aspics; finally hot foods; served as a buffet meal\nn07593471\ta choice or delicious dish\nn07593774\ta commercial preparation containing most of the ingredients for a dish\nn07593972\ta commercial mix for making brownies\nn07594066\ta commercial mix for making a cake\nn07594155\ta commercial mix for making lemonade\nn07594250\ta commercially prepared mixture of flour and salt and a leavening agent\nn07594737\ta small tasty bit of food\nn07594840\tan aromatic or spicy dish served at the end of dinner or as an hors d'oeuvre\nn07595051\ta savory jelly made with gelatin obtained by boiling calves' feet\nn07595180\tburnt sugar; used to color and flavor food\nn07595368\trefined sugar molded into rectangular shapes convenient as single servings\nn07595649\tsugar from sugarcane used as sweetening agent\nn07595751\tvery finely granulated sugar that was formerly sprinkled from a castor\nn07595914\tsugar granulated into a fine powder\nn07596046\tsugar in the form of small grains\nn07596160\tfinely powdered sugar used to make icing\nn07596362\tdextrose used as sweetening agent\nn07596452\tunrefined or only partly refined sugar\nn07596566\tlight brown cane sugar; originally from Guyana\nn07596684\ta food rich in sugar\nn07596967\tcandy and other sweets considered collectively\nn07597145\tpreserved or candied fruit\nn07597263\ta sweetened delicacy (as a preserve or pastry)\nn07597365\ta rich sweet made of flavored sugar and often combined with fruit or nuts\nn07598256\ta candy shaped as a bar\nn07598529\ta bar of candy made with carob powder\nn07598622\ta British sweet made with molasses and butter and almonds\nn07598734\tcandy that is brittle\nn07598928\ta brittle transparent candy made by melting and cooling cane sugar\nn07599068\ta British candy flavored with brandy\nn07599161\ta large round hard candy\nn07599242\ta hard candy with lemon flavor and a yellow color and (usually) the shape of a lemon\nn07599383\tround piece of tart hard candy\nn07599468\tround flat candy\nn07599554\ta patty flavored with peppermint\nn07599649\ta candy that usually has a center of fondant or fruit or nuts coated in chocolate\nn07599783\tcaramelized sugar cooled in thin sheets\nn07599911\tbrittle containing peanuts\nn07599998\ta preparation (usually made of sweetened chicle) for chewing\nn07600177\ta ball of chewing gum with a coating of colored sugar\nn07600285\ta kind of chewing gum that can be blown into bubbles\nn07600394\ta hard brittle candy made with butter and brown sugar\nn07600506\tfruit cooked in sugar syrup and encrusted with a sugar crystals\nn07600696\tan apple that is covered with a candy-like substance (usually caramelized sugar)\nn07600895\tstrips of gingerroot cooked in sugar syrup and coated with sugar\nn07601025\tstrips of grapefruit peel cooked in sugar syrup and coated with sugar\nn07601175\tstrips of lemon peel cooked in sugar and coated with sugar\nn07601290\tstrips of orange peel cooked in sugar and coated with sugar\nn07601407\tstrips of citrus peel cooked in a sugar syrup\nn07601572\ta hard candy in the shape of a rod (usually with stripes)\nn07601686\ta small yellow and white candy shaped to resemble a kernel of corn\nn07601809\tfirm chewy candy made from caramelized sugar and butter and milk\nn07602650\tthe sweet central portion of a piece of candy that is enclosed in chocolate or some other covering\nn07604956\tcandy containing a fruit or nut\nn07605040\ta candy made by spinning sugar that has been boiled to a high temperature\nn07605198\tsugar-coated nut or fruit piece\nn07605282\tsilvery candy beads used for decorating cakes\nn07605380\tcandy made of a thick creamy sugar paste\nn07605474\tsoft creamy candy\nn07605597\tfudge made with chocolate or cocoa\nn07605693\twhite creamy fudge made with egg whites\nn07605804\tfudge made with brown sugar and butter and milk and nuts\nn07605944\ta jellied candy coated with sugar crystals\nn07606058\tchewy fruit-flavored jellied candy (sometimes medicated to soothe a sore throat)\nn07606191\ta crisp candy made with honey\nn07606278\ta candy that is flavored with a mint oil\nn07606419\ta candy that is flavored with an extract of the horehound plant\nn07606538\ta candy flavored with peppermint oil\nn07606669\tsugar-glazed jellied candy\nn07606764\tany of several bite-sized candies\nn07606933\ta candy kiss that resembles toffee\nn07607027\ta kiss made of sugar and egg white and baked slowly\nn07607138\ta kiss that consists of a conical bite-sized piece of chocolate\nn07607361\ta black candy flavored with the dried root of the licorice plant\nn07607492\ta candy shaped like a small lifesaver\nn07607605\thard candy on a stick\nn07607707\ta small aromatic or medicated candy\nn07607832\ta scented lozenge used to sweeten the breath (e.g. to conceal the odor of tobacco)\nn07607967\ta medicated lozenge used to soothe the throat\nn07608098\tspongy confection made of gelatin and sugar and corn syrup and dusted with powdered sugar\nn07608245\talmond paste and egg whites\nn07608339\tnuts or fruit pieces in a sugar paste\nn07608429\ta bar of nougat candy often dipped in chocolate\nn07608533\tpaste of nuts and sugar on a pastry base cut into bars\nn07608641\tbar of peanuts in taffy\nn07608721\tpopcorn combined with a thick sugar or molasses or caramel syrup and formed into balls\nn07608866\tcookie-sized candy made of brown sugar and butter and pecans\nn07608980\tsugar in large hard clear crystals on a string\nn07609083\thard bright-colored stick candy (typically flavored with peppermint)\nn07609215\tmade by boiling pure sugar until it hardens\nn07609316\tany of various small sugary candies\nn07609407\tchewy candy of sugar or syrup boiled until thick and pulled until glossy\nn07609549\ttaffy made of molasses\nn07609632\tcreamy chocolate candy\nn07609728\ta jellied candy typically flavored with rose water\nn07609840\ta dish served as the last course of a meal\nn07610295\t(classical mythology) the food and drink of the gods; mortals who ate it became immortal\nn07610502\tfruit dessert made of oranges and bananas with shredded coconut\nn07610620\tcake covered with ice cream and meringue browned quickly in an oven\nn07610746\tsweet almond-flavored milk pudding thickened with gelatin or cornstarch; usually molded\nn07610890\ta mold lined with cake or crumbs and filled with fruit or whipped cream or custard\nn07611046\tdessert of stewed or baked fruit\nn07611148\tdessert made by baking fruit wrapped in pastry\nn07611267\topen pastry filled with fruit or custard\nn07611358\tany of various desserts prepared by freezing\nn07611733\tdessert made of sweetened milk coagulated with rennet\nn07611839\ta light creamy dish made from fish or meat and set with gelatin\nn07611991\ta rich, frothy, creamy dessert made with whipped egg whites and heavy cream\nn07612137\ta dessert consisting of a meringue base or cup filled with fruit and whipped cream\nn07612273\tice cream and peaches with a liqueur\nn07612367\ta dessert made of sugar and stiffly beaten egg whites or cream and usually flavored with fruit\nn07612530\tdessert made of prune puree and whipped cream\nn07612632\tany of various soft sweet desserts thickened usually with flour and baked or boiled or steamed\nn07612996\t(British) the dessert course of a meal (`pud' is used informally)\nn07613158\tsweetened cream beaten with wine or liquor\nn07613266\tan Italian dessert consisting of layers of sponge cake soaked with coffee and brandy or liqueur layered with mascarpone cheese and topped with grated chocolate\nn07613480\ta cold pudding made of layers of sponge cake spread with fruit or jelly; may be decorated with nuts, cream, or chocolate\nn07613671\ta trifle soaked in wine and decorated with almonds and candied fruit\nn07613815\tfruit-flavored dessert (trade mark Jell-O) made from a commercially prepared gelatin powder\nn07614103\tapples wrapped in pastry and baked\nn07614198\ta frozen dessert with fruit flavoring (especially one containing no milk)\nn07614348\tan ice containing no milk but having a mushy consistency; usually made from fruit juice\nn07614500\tfrozen dessert containing cream and sugar and flavoring\nn07614730\tice cream in a crisp conical wafer\nn07614825\tice cream flavored with chocolate\nn07615052\ta block of ice cream with 3 or 4 layers of different colors and flavors\nn07615190\tice cream flavored with fresh peaches\nn07615289\ta frozen dessert made primarily of fruit juice and sugar, but also containing milk or egg-white or gelatin\nn07615460\tice cream flavored with fresh strawberries\nn07615569\tice cream containing chopped candied fruits\nn07615671\tice cream flavored with vanilla extract\nn07615774\tice cream or water ice on a small wooden stick\nn07615954\tsimilar to ice cream but made of milk\nn07616046\ta soft frozen dessert of sweetened flavored yogurt\nn07616174\tball of crushed ice with fruit syrup\nn07616265\tball of ice cream covered with coconut and usually chocolate sauce\nn07616386\tlayers of ice cream and syrup and whipped cream\nn07616487\tice cream served with a topping\nn07616590\ta dessert of sliced fruit and ice cream covered with whipped cream and cherries and nuts\nn07616748\ta banana split lengthwise and topped with scoops of ice cream and sauces and nuts and whipped cream\nn07616906\ta chilled dessert consisting of a mixture of custard and nuts and (sometimes) liquor\nn07617051\tdessert resembling ice cream but with a boiled custard base\nn07617188\tany of various soft thick unsweetened baked dishes\nn07617344\ta bland custard or pudding especially of oatmeal\nn07617447\tmousse made with fish\nn07617526\tmousse made with chicken\nn07617611\tdessert mousse made with chocolate\nn07617708\ta rich steamed or boiled pudding that resembles cake\nn07617839\tpudding made with grated carrots\nn07617932\tpudding made of corn and cream and egg\nn07618029\ta pudding cooked by steaming\nn07618119\ta stiff flour pudding steamed or boiled usually and containing e.g. currants and raisins and citron\nn07618281\tsweet vanilla flavored custard-like pudding usually thickened with flour rather than eggs\nn07618432\tsweet chocolate flavored custard-like pudding usually thickened with flour rather than eggs\nn07618587\tbaked pudding of apples and breadcrumbs\nn07618684\ta rich frozen pudding made of chopped chestnuts and maraschino cherries and candied fruits and liqueur or rum\nn07618871\ta pudding made with strained split peas mixed with egg\nn07619004\tsweetened mixture of milk and eggs baked or boiled or frozen\nn07619208\tbaked custard topped with caramel\nn07619301\tcustard sauce flavored with vanilla or a liqueur\nn07619409\tcustard sprinkled with sugar and broiled\nn07619508\ta custard containing fruit\nn07619881\tgranular preparation of cassava starch used to thicken especially puddings\nn07620047\tsweet pudding thickened with tapioca\nn07620145\tpudding made of suet pastry spread with jam or fruit and rolled up and baked or steamed\nn07620327\ta sweet or savory pudding made with suet and steamed or boiled\nn07620597\ta rich custard set with gelatin\nn07620689\tcherry preserved in true or imitation maraschino liqueur\nn07621264\tcolored beads of sugar used as a topping on e.g. candies and cookies\nn07621497\tlight foamy custard-like dessert served hot or chilled\nn07621618\tsomething (such as parsley) added to a dish for flavor or decoration\nn07623136\ta dough of flour and water and shortening\nn07624466\ta dish made by folding a piece of pastry over a filling\nn07624666\tturnover with an apple filling\nn07624757\t(Yiddish) a baked or fried turnover filled with potato or meat or cheese; often eaten as a snack\nn07624924\tsmall fruit or meat turnover baked or fried\nn07625061\tsmall turnover of Indian origin filled with vegetables or meat and fried and served hot\nn07625324\tindividual serving of minced e.g. meat or fish in a rich creamy sauce baked in a small pastry mold or timbale shell\nn07627931\tdough used for very light flaky rich pastries\nn07628068\ttissue thin sheets of pastry used especially in Greek dishes\nn07628181\tbatter for making light hollow cases to hold various fillings\nn07631926\tice cream molded to look like a cake\nn07639069\ta small ring-shaped friedcake\nn07641928\ta fried ball or patty of flaked fish and mashed potatoes\nn07642361\ta long fillet of fish breaded and fried\nn07642471\tfruit preserved by cooking with sugar\nn07642742\tthick dark spicy puree of apples\nn07642833\ta Chinese preserve of mixed fruits and ginger\nn07642933\tpreserve of crushed fruit\nn07643026\ta conserve with a thick consistency; made with lemons and butter and eggs and sugar\nn07643200\tmade with strawberries\nn07643306\ta preserve made of the jelled juice of fruit\nn07643474\tjelly made from apple juice\nn07643577\ta tart apple jelly made from crab apples\nn07643679\tjelly made from grape juice\nn07643764\ta preserve made of the pulp and rind of citrus fruits\nn07643891\tmarmalade made from oranges\nn07643981\tan edible jelly (sweet or pungent) made with gelatin and used as a dessert or salad base or a coating for foods\nn07644244\tjellied dessert made with gelatin and fruit juice or water\nn07648913\tcrisp spicy chicken wings\nn07648997\tchicken wings cooked in barbecue sauce\nn07650792\tsoft semiliquid food\nn07650903\tfood chopped into small bits\nn07651025\tfood prepared by cooking and straining or processed in a blender\nn07654148\tmeat that has been barbecued or grilled in a highly seasoned sauce\nn07654298\tan Indian dish made with highly seasoned rice and meat or fish or vegetables\nn07655067\tlightly sauteed veal cutlets spread with a Soubise sauce and liver paste then sprinkled with grated Parmesan and baked briefly\nn07655263\ta dish of sauteed food\nn07663899\tsmall flat mass of chopped food\nn07665438\tsauteed veal cutlet in a breadcrumb-and-cheese coating served with tomato sauce\nn07666176\tthin slices of veal stuffed with cheese and ham and then sauteed\nn07672914\ta spread made chiefly from vegetable oils and used as a substitute for butter\nn07678586\tspiced mixture of chopped raisins and apples and other ingredients with or without meat\nn07678729\ta mixture of seasoned ingredients used to stuff meats and vegetables\nn07678953\tstuffing for turkey\nn07679034\tstuffing made with oysters\nn07679140\tmixture of ground raw chicken and mushrooms with pistachios and truffles and onions and parsley and lots of butter and bound with eggs\nn07679356\tfood made from dough of flour or meal and usually raised with yeast or baking powder and then baked\nn07680168\ta yeast-raised bread made of white flour and cornmeal and molasses\nn07680313\ta small loaf or roll of soft bread\nn07680416\ta rich currant cake or bun\nn07680517\ta crisp stick-shaped roll; often served with soup\nn07680655\ta long slender crusty breadstick\nn07680761\tdark steamed bread made of cornmeal wheat and flour with molasses and soda and milk or water\nn07680932\tsmall rounded bread either plain or sweet\nn07681264\tsweetened buns to be eaten with tea\nn07681355\tbread containing caraway seeds\nn07681450\t(Judaism) a loaf of white bread containing eggs and leavened with yeast; often formed into braided loaves and glazed with eggs before baking\nn07681691\tbread flavored with cinnamon often containing raisins\nn07681805\tbread made with cracked wheat that has been ground fine\nn07681926\ta thin crisp wafer made of flour and water with or without leavening and shortening; unsweetened or semisweet\nn07682197\ta small piece of toasted or fried bread; served in soup or salads\nn07682316\tbread made with whole wheat flour\nn07682477\tround, raised muffin cooked on a griddle; usually split and toasted before being eaten\nn07682624\tany of various breads made from usually unleavened dough\nn07682808\tFrench or Italian bread sliced and spread with garlic butter then crisped in the oven\nn07682952\tbread made with gluten flour\nn07683039\tbread made of graham (whole wheat) flour\nn07683138\ta technical name for the bread used in the service of Mass or Holy Communion\nn07683265\tthe thin wafer-like bread of Scandinavia\nn07683360\ta flat bread made of oat or barley flour; common in New England and Scotland\nn07683490\tflat pancake-like bread cooked on a griddle\nn07683617\tusually small round bread that can open into a pocket for filling\nn07683786\ta shaped mass of baked bread that is usually sliced before eating\nn07684084\ta loaf of French bread\nn07684164\tbrittle flat bread eaten at Passover\nn07684289\tleavened bread baked in a clay oven in India; usually shaped like a teardrop\nn07684422\tbread containing finely minced onions\nn07684517\tbread containing raisins\nn07684600\tbreads made with a leavening agent that permits immediate baking\nn07684938\tmoist bread containing banana pulp\nn07685031\tbread containing chopped dates\nn07685118\tbread containing chopped dates and nuts\nn07685218\tbread containing chopped nuts\nn07685303\tthin flat unleavened cake of baked oatmeal\nn07685399\tround loaf made with soda and buttermilk; often containing caraway seeds and raisins\nn07685546\tusually cooked in a skillet over an open fire: especially cornbread with ham bits and sometimes Irish soda bread\nn07685730\tany of various breads made entirely or partly with rye flour\nn07685918\tbread made of coarse rye flour\nn07686021\t(Judaism) bread made with rye flour; usually contains caraway seeds\nn07686202\ta rye bread made with molasses or brown sugar\nn07686299\ta moist aromatic yeast-raised bread made with rye flour and molasses and orange rind\nn07686461\twhite wheat bread raised by a salt-tolerant bacterium in a mixture of salt and either cornmeal or potato pulp\nn07686634\ta crisp bread of fine white flour\nn07686720\tmade with a starter of a small amount of dough in which fermentation is active\nn07686873\tslices of bread that have been toasted\nn07687053\tthin disk of unleavened bread used in a religious service (especially in the celebration of the Eucharist)\nn07687211\tbread made with finely ground and usually bleached wheat flour\nn07687381\tnarrow French stick loaf\nn07687469\ta crusty sourdough bread often baked in long slender tapered loaves or baguettes\nn07687626\tunsweetened yeast-raised bread made without shortening and baked in long thick loaves with tapered ends\nn07687789\tbread made primarily of cornmeal\nn07688021\tbaked in a pan or on a griddle (southern and midland)\nn07688130\tcornbread usually containing ham or bacon bits and cooked in a skillet\nn07688265\tcorn bread wrapped in cabbage leaves and baked in hot ashes (southern)\nn07688412\tthin usually unleavened johnnycake made of cornmeal; originally baked on the blade of a hoe over an open fire (southern)\nn07688624\tcornbread often made without milk or eggs and baked or fried (southern)\nn07688757\tsmall oval cake of corn bread baked or fried (chiefly southern)\nn07688898\tdeep-fried cornbread ball (southern)\nn07689003\tcornbread usually cooked pancake-style on a griddle (chiefly New England)\nn07689217\tform of johnnycake\nn07689313\tsoft bread made of cornmeal and sometimes rice or hominy; must be served with a spoon (chiefly southern)\nn07689490\tbuttered toast with sugar and cinnamon (and nutmeg and grated lemon peel)\nn07689624\tbuttered toast with sugar and grated orange rind and a little orange juice\nn07689757\tvery thin crisp brown toast\nn07689842\tslice of sweet raised bread baked again until it is brown and hard and crisp\nn07690019\ta long bun shaped to hold a frankfurter\nn07690152\ta round bun shaped to hold a hamburger patty\nn07690273\ta sweet quick bread baked in a cup-shaped pan\nn07690431\tmuffin containing bran\nn07690511\tcornbread muffin\nn07690585\tlight puffy bread made of a puff batter and traditionally baked in the pan with roast beef\nn07690739\tlight hollow muffin made of a puff batter (individual Yorkshire pudding) baked in a deep muffin cup\nn07690892\tsmall biscuit (rich with cream and eggs) cut into diamonds or sticks and baked in an oven or (especially originally) on a griddle\nn07691091\ta scone made by dropping a spoonful of batter on a griddle\nn07691237\tmoderately sweet raised roll containing spices and raisins and citron and decorated with a cross-shaped sugar glaze\nn07691539\ta light roll rich with eggs and butter and somewhat sweet\nn07691650\tvery rich flaky crescent-shaped roll\nn07691758\tyeast-raised roll with a hard crust\nn07691863\tyeast-raised roll with a soft crust\nn07691954\trounded raised poppy-seed roll made of a square piece of dough by folding the corners in to the center\nn07692114\tyeast-raised dinner roll made by folding a disk of dough before baking\nn07692248\tyeast-raised dinner roll made by baking three small balls of dough in each cup of a muffin pan\nn07692405\tyeast-raised roll flavored with onion\nn07692517\tflat crusty-bottomed onion roll\nn07692614\tany of numerous yeast-raised sweet rolls with our without raisins or nuts or spices or a glaze\nn07692887\talmond-flavored yeast-raised pastry shaped in an irregular semicircle resembling a bear's claw\nn07693048\trolled dough spread with cinnamon and sugar (and raisins) then sliced before baking\nn07693223\trolled dough spread with sugar and nuts then sliced and baked in muffin tins with honey or sugar and butter in the bottom\nn07693439\tpinwheel-shaped rolls spread with cinnamon and sugar and filled with e.g. jam before baking\nn07693590\tlight sweet yeast-raised roll usually filled with fruits or cheese\nn07693725\t(Yiddish) glazed yeast-raised doughnut-shaped roll with hard crust\nn07693889\tbagel flavored with onion\nn07693972\tsmall round bread leavened with baking-powder or soda\nn07694169\tbiscuit made from dough rolled and cut\nn07694403\tleavened with baking powder\nn07694516\tvery tender biscuit partially leavened with buttermilk and soda\nn07694659\tvery short biscuit dough baked as individual biscuits or a round loaf; served with sweetened fruit and usually whipped cream\nn07694839\tvery hard unsalted biscuit or bread; a former ship's staple\nn07695187\ta cracker sprinkled with salt before baking\nn07695284\tunsweetened cracker leavened slightly with soda and cream of tartar\nn07695410\ta small dry usually round cracker\nn07695504\ta thin flour-and-water biscuit usually made without shortening; often served with cheese\nn07695652\tsemisweet whole-wheat cracker\nn07695742\tglazed and salted cracker typically in the shape of a loose knot\nn07695878\ta pretzel made of soft bread\nn07695965\ttwo (or more) slices of bread with a filling between them\nn07696403\ta serving consisting of a sandwich or sandwiches with garnishes\nn07696527\ta sandwich\nn07696625\ta sandwich made with a filling of sliced ham\nn07696728\ta sandwich made with a filling of sliced chicken\nn07696839\tmade with three slices of usually toasted bread\nn07696977\tsandwich without a covering slice of bread\nn07697100\ta sandwich consisting of a fried cake of minced beef served on a bun, often with other ingredients\nn07697313\ta hamburger with melted cheese on it\nn07697408\ta sandwich that resembles a hamburger but made with tuna instead of beef\nn07697537\ta frankfurter served hot on a bun\nn07697699\tground beef (not a patty) cooked in a spicy sauce and served on a bun\nn07697825\ta large sandwich made of a long crusty roll split lengthwise and filled with meats and cheese (and tomato and onion and lettuce and condiments); different names are used in different sections of the United States\nn07698250\ta Greek sandwich: sliced roast lamb with onion and tomato stuffed into pita bread\nn07698401\tsandwich filled with slices of bacon and tomato with lettuce\nn07698543\ta hot sandwich with corned beef and Swiss cheese and sauerkraut on rye bread\nn07698672\ta sandwich made from a western omelet\nn07698782\ta sandwich in which the filling is rolled up in a soft tortilla\nn07700003\tspaghetti served with a tomato sauce\nn07703889\tsweetened porridge made of tapioca or flour or oatmeal cooked quickly in milk or water\nn07704054\ta thin porridge (usually oatmeal or cornmeal)\nn07704205\ta Chinese rice gruel eaten for breakfast\nn07704305\ta thin porridge or soup (usually oatmeal and water flavored with meat)\nn07705931\tedible reproductive body of a seed plant especially one having sweet flesh\nn07707451\tedible seeds or roots or stems or leaves or bulbs or tubers or nonsweet fruits of any of numerous herbaceous plant\nn07708124\ta vegetable cut into thin strips (usually used as a garnish)\nn07708398\tan uncooked vegetable\nn07708512\traw vegetables cut into bite-sized strips and served with a dip\nn07708685\tcelery stalks cut into small sticks\nn07708798\tthe seedpod of a leguminous plant (such as peas or beans or lentils)\nn07709046\tedible seeds of various pod-bearing plants (peas or beans or lentils etc.)\nn07709172\tany of various herbaceous plants whose leaves or stems or flowers are cooked and used for food or seasoning\nn07709333\tany of various leafy plants or their leaves and stems eaten as vegetables\nn07709701\tsucculent and aromatic young dark green leaves used in Chinese and Vietnamese and Japanese cooking\nn07709881\tcheeselike food made of curdled soybean milk\nn07710007\tany of several fruits of plants of the family Solanaceae; especially of the genera Solanum, Capsicum, and Lycopersicon\nn07710283\tany of various fleshy edible underground roots or tubers\nn07710616\tan edible tuber native to South America; a staple food of Ireland\nn07710952\tpotato that has been cooked by baking it in an oven\nn07711080\tstrips of potato fried in deep fat\nn07711232\tsliced pieces of potato fried in a pan until brown and crisp\nn07711371\ta baked potato served with the jacket on\nn07711569\tpotato that has been peeled and boiled and then mashed\nn07711683\tcrisp fried potato peeling\nn07711799\tsimilar to the common potato\nn07711907\tedible tuberous root of various yam plants of the genus Dioscorea grown in the tropics world-wide for food\nn07712063\tthe edible tuberous root of the sweet potato vine which is grown widely in warm regions of the United States\nn07712267\tsweet potato with deep orange flesh that remains moist when baked\nn07712382\tfood for light meals or for eating between meals\nn07712559\ta thin crisp slice of potato fried in deep fat\nn07712748\tthin piece of cornmeal dough fried\nn07712856\ta small piece of tortilla\nn07712959\ta tortilla chip topped with cheese and chili-pepper and broiled\nn07713074\tegg-shaped vegetable having a shiny skin typically dark purple but occasionally white or yellow\nn07713267\tlong pinkish sour leafstalks usually eaten cooked and sweetened\nn07713395\ta vegetable of the mustard family: especially mustard greens; various cabbages; broccoli; cauliflower; brussels sprouts\nn07713763\tleaves eaten as cooked greens\nn07713895\tany of various types of cabbage\nn07714078\tcoarse curly-leafed cabbage\nn07714188\tkale that has smooth leaves\nn07714287\telongated head of crisp celery-like stalks and light green leaves\nn07714448\telongated head of dark green leaves on thick white stalks\nn07714571\tany of several varieties of cabbage having a large compact globular head; may be steamed or boiled or stir-fried or used raw in coleslaw\nn07714802\tcompact head of purplish-red leaves\nn07714895\thead of soft crinkly leaves\nn07714990\tbranched green undeveloped flower heads\nn07715103\tcompact head of white undeveloped flowers\nn07715221\tthe small edible cabbage-like buds growing along a stalk of the brussels sprout plant\nn07715407\tslightly bitter dark green leaves and clustered flower buds\nn07715561\tedible fruit of a squash plant; eaten as a vegetable\nn07715721\tany of various fruits of the gourd family that mature during the summer; eaten while immature and before seeds and rind harden\nn07716034\tsquash having yellow skin and yellowish flesh and usually elongated neck\nn07716203\tyellow squash with a thin curved neck and somewhat warty skin\nn07716358\tsmall cucumber-shaped vegetable marrow; typically dark green\nn07716504\tlarge elongated squash with creamy to deep green skins\nn07716649\tsquash resembling zucchini\nn07716750\tround greenish-white squash having one face flattened with a scalloped edge\nn07716906\tmedium-sized oval squash with flesh in the form of strings that resemble spaghetti\nn07717070\tany of various fruits of the gourd family with thick rinds and edible yellow to orange flesh that mature in the fall and can be stored for several months\nn07717410\tsmall dark green or yellow ribbed squash with yellow to orange flesh\nn07717556\tbuff-colored squash with a long usually straight neck and sweet orange flesh\nn07717714\tlarge football-shaped winter squash with a warty grey-green rind\nn07717858\tlarge squash shaped somewhat like a turban usually with a rounded central portion protruding from the blossom end\nn07718068\tdrum-shaped squash with dark green rind marked in silver or grey\nn07718195\tglobose or ovoid squash with striped grey and green warty rind\nn07718329\ta squash with a hard rind and an elongated curved neck\nn07718472\tcylindrical green fruit with thin green rind and white flesh eaten as a vegetable; related to melons\nn07718671\tsmall prickly cucumber\nn07718747\ta thistlelike flower head with edible fleshy leaves and heart\nn07718920\tthe tender fleshy center of the immature artichoke flower\nn07719058\tsunflower tuber eaten raw or boiled or sliced thin and fried as Saratoga chips\nn07719213\tedible young shoots of the asparagus plant\nn07719330\tedible young shoots of bamboo\nn07719437\ta newly grown bud (especially from a germinating seed)\nn07719616\tany of various sprouted beans: especially mung beans or lentils or edible soybeans\nn07719756\tsprouted alfalfa seeds\nn07719839\tround red root vegetable\nn07719980\tyoung leaves of the beetroot\nn07720084\twhite-rooted beet grown as a source of sugar\nn07720185\tcultivated as feed for livestock\nn07720277\tlong succulent whitish stalks with large green leaves\nn07720442\tsweet and hot varieties of fruits of plants of the genus Capsicum\nn07720615\tlarge mild crisp thick-walled capsicum peppers usually bell-shaped or somewhat oblong; commonly used in salads\nn07720875\tlarge bell-shaped sweet pepper in green or red or yellow or orange or black varieties\nn07721018\ta sweet pepper that becomes red when ripe\nn07721118\tround sweet pepper\nn07721195\tfully ripened sweet red pepper; usually cooked\nn07721325\tany of various pungent capsicum fruits\nn07721456\tvery hot and finely tapering pepper of special pungency\nn07721678\thot green or red pepper of southwestern United States and Mexico\nn07721833\ta ripe jalapeno that has been dried for use in cooking\nn07721942\ta long and often twisted hot red pepper\nn07722052\tvery hot red peppers; usually long and thin; some very small\nn07722217\tan aromatic flavorful vegetable\nn07722390\tmild flat onion grown in warm areas\nn07722485\ta young onion before the bulb has enlarged; eaten in salads\nn07722666\tsweet-flavored onion grown in Georgia\nn07722763\tlarge mild and succulent onion; often eaten raw\nn07722888\tflat mild onion having purplish tunics; used as garnish on hamburgers and salads\nn07723039\trelated to onions; white cylindrical bulb and flat dark-green leaves\nn07723177\tsmall mild-flavored onion-like or garlic-like clustered bulbs used for seasoning\nn07723330\tgreens suitable for eating uncooked as in salads\nn07723559\tleaves of any of various plants of Lactuca sativa\nn07723753\tlettuce with relatively soft leaves in a loose head; easily broken or bruised\nn07723968\tlettuce with delicate and relatively crunchy leaves\nn07724078\tlettuce with relatively crisp leaves\nn07724173\tlettuce with relatively soft leaves\nn07724269\tlettuce with crisp tightly packed light-green leaves in a firm head\nn07724492\tlettuce with long dark-green leaves in a loosely packed elongated head\nn07724654\tlettuce with loosely curled leaves that do not form a compact head\nn07724819\tleaves having celery-like stems eaten raw or cooked\nn07724943\tany of various edible seeds of plants of the family Leguminosae used for food\nn07725158\tOld World tropical bean\nn07725255\tround flat seed of the lentil plant used for food\nn07725376\tseed of a pea plant used for food\nn07725531\tfresh pea\nn07725663\ta variety of large pea that is commonly processed and sold in cans\nn07725789\tgreen peas with flat edible pods\nn07725888\tgreen peas with edible pods that are very crisp and not flat\nn07726009\tdried hulled pea; used in soup\nn07726095\tlarge white roundish Asiatic legume; usually dried\nn07726230\tsmall highly nutritious seed of the tropical pigeon-pea plant\nn07726386\tcoarse small-seeded pea often used as food when young and tender\nn07726525\tmarrowfat peas that have been soaked overnight and then boiled; served with fish and chips\nn07726672\teaten fresh as shell beans or dried\nn07726796\tany of numerous beans eaten either fresh or dried\nn07727048\tlarge dark red bean; usually dried\nn07727140\twhite-seeded bean; usually dried\nn07727252\tmottled or spotted bean of southwestern United States; usually dried\nn07727377\tMexican bean; usually dried\nn07727458\tblack-seeded bean of South America; usually dried\nn07727578\tbeans eaten before they are ripe as opposed to dried\nn07727741\ta French bean variety with light-colored seeds; usually dried\nn07727868\timmature bean pod eaten as a vegetable\nn07728053\ttender green beans without strings that easily snap into sections\nn07728181\tgreen beans with strings that must be removed\nn07728284\tflat-podded green bean\nn07728391\tlong bean pods usually sliced into half-inch lengths; a favorite in Britain\nn07728585\tvery small and slender green bean\nn07728708\tsnap beans with yellow pods\nn07728804\tunripe beans removed from the pod before cooking\nn07729000\tbroad flat beans simmered gently; never eaten raw\nn07729142\trelatively large lima beans\nn07729225\tsmall flat green bean similar to lima beans\nn07729384\tshell beans cooked as lima beans\nn07729485\tthe most highly proteinaceous vegetable known; the fruit of the soybean plant is used in a variety of foods and as fodder (especially as a replacement for animal protein)\nn07729828\tseeds shelled and cooked as lima beans\nn07729926\tseeds used as livestock feed\nn07730033\tonly parts eaten are roots and especially stalks (blanched and used as celery); related to artichokes\nn07730207\torange root; important source of carotene\nn07730320\ta stick of carrot eaten raw\nn07730406\tstalks eaten raw or cooked or used as seasoning\nn07730562\tany of several types of commercially grown celery having green stalks\nn07730708\tthickened edible aromatic root of a variety of celery plant\nn07730855\tcrisp spiky leaves with somewhat bitter taste\nn07731006\tprized variety of chicory having globose heads of red leaves\nn07731122\ta drink resembling coffee that is sometimes substituted for it\nn07731284\troot of the chicory plant roasted and ground to substitute for or adulterate coffee\nn07731436\ttrade mark for a coffee substitute invented by C. W. Post and made with chicory and roasted grains\nn07731587\tvariety of endive having leaves with irregular frilled edges\nn07731767\tyoung broad-leaved endive plant deprived of light to form a narrow whitish head\nn07731952\tears of corn that can be prepared and served for human food\nn07732168\tcorn that can be eaten as a vegetable while still young and soft\nn07732302\thulled corn with the bran and germ removed\nn07732433\thominy prepared by bleaching in lye\nn07732525\thominy prepared by milling to pellets of medium size\nn07732636\tsmall kernels of corn exploded by heat\nn07732747\tpungent leaves of any of numerous cruciferous herbs\nn07732904\tcresses that grow in clear ponds and streams\nn07733005\tcress cultivated for salads and garnishes\nn07733124\tcress cultivated for winter salads\nn07733217\tedible leaves of the common dandelion collected from the wild; used in salads and in making wine\nn07733394\tlong mucilaginous green pods; may be simmered or sauteed but used especially in soups and stews\nn07733567\tfleshy turnip-shaped edible stem of the kohlrabi plant\nn07733712\tleaves collected from the wild\nn07733847\tleafy greens collected from the wild and used as a substitute for spinach\nn07734017\tmildly acid red or yellow pulpy fruit eaten as a vegetable\nn07734183\tany of several large tomatoes with thick flesh\nn07734292\tsmall red to yellow tomatoes\nn07734417\ta kind of cherry tomato that is frequently used in cooking rather than eaten raw\nn07734555\tsmall edible yellow to purple tomato-like fruit enclosed in a bladderlike husk\nn07734744\tfleshy body of any of numerous edible fungi\nn07734879\tmushrooms stuffed with any of numerous mixtures of e.g. meats or nuts or seafood or spinach\nn07735052\teither of two long roots eaten cooked\nn07735179\tlong white salsify\nn07735294\tlong black salsify\nn07735404\twhitish edible root; eaten cooked\nn07735510\tusually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn\nn07735687\tpungent fleshy edible root\nn07735803\troot of any of several members of the mustard family\nn07735981\twhite root of a turnip plant\nn07736087\tthe large yellow root of a rutabaga plant used as food\nn07736256\ttender leaves of young white turnips\nn07736371\tlarge sour-tasting arrowhead-shaped leaves used in salads and sauces\nn07736527\tgreens having small tart oval to pointed leaves; preferred to common sorrel for salads\nn07736692\tdark green leaves; eaten cooked or raw in salads\nn07736813\ttropical starchy tuberous root\nn07736971\tedible subterranean fungus of the genus Tuber\nn07737081\ta hard-shelled seed consisting of an edible kernel or meat enclosed in a woody or leathery shell\nn07737594\tnut tasting like roasted chestnuts; a staple food of Australian aborigines\nn07737745\tpod of the peanut vine containing usually 2 nuts or seeds; `groundnut' and `monkey nut' are British terms\nn07738105\tfruit (especially peach) whose flesh does not adhere to the pit\nn07738224\tfruit (especially peach) whose flesh adheres strongly to the pit\nn07739035\tfruit that has fallen from the tree\nn07739125\tfruit with red or yellow or green skin and sweet to tart crisp whitish flesh\nn07739344\tsmall sour apple; suitable for preserving\nn07739506\tan apple used primarily for eating raw without cooking\nn07739923\tan American eating apple with red or yellow and red skin\nn07740033\tlarge apple with a red skin\nn07740115\ta yellow Pippin with distinctive flavor\nn07740220\tvariety of sweet eating apples\nn07740342\ta sweet eating apple with yellow skin\nn07740461\ta sweet eating apple with bright red skin; most widely grown apple worldwide\nn07740597\tan eating apple that somewhat resembles a McIntosh; used as both an eating and a cooking apple\nn07740744\tyellow apple that ripens in late autumn; eaten raw\nn07740855\tred late-ripening apple; primarily eaten raw\nn07740954\tearly-ripening apple popular in the northeastern United States; primarily eaten raw but suitable for applesauce\nn07741138\tsimilar to McIntosh; juicy and late-ripening\nn07741235\tlarge late-ripening apple with skin striped with yellow and red\nn07741357\tany of several varieties of apples with red skins\nn07741461\tany of numerous superior eating apples with yellow or greenish yellow skin flushed with red\nn07741623\tused primarily as eating apples\nn07741706\tapple grown chiefly in the Shenandoah Valley\nn07741804\tcrisp apple with dark red skin\nn07741888\tcrisp tart apple; good for eating raw and suitable for cooking\nn07742012\tan apple used primarily in cooking for pies and applesauce etc\nn07742224\tvery large cooking apple\nn07742313\tapple with a green skin and hard tart flesh\nn07742415\tapple used primarily in cooking\nn07742513\tapple used primarily in cooking\nn07742605\tlarge red apple used primarily for baking\nn07742704\tany of numerous small and pulpy edible fruits; used as desserts or in making jams and jellies and preserves\nn07743224\tblue-black berries similar to American blueberries\nn07743384\tblue-black berry similar to blueberries and bilberries of the eastern United States\nn07743544\tsweet edible dark-blue berries of either low-growing or high-growing blueberry plants\nn07743723\tspicy red berrylike fruit; source of wintergreen oil\nn07743902\tvery tart red berry used for sauce or juice\nn07744057\ttart red berries similar to American cranberries but smaller\nn07744246\tany of several tart red or black berries used primarily for jellies and jams\nn07744430\tcurrant-like berry used primarily in jams and jellies\nn07744559\tsmall black berries used in jams and jellies\nn07744682\tsmall red berries used primarily in jams and jellies\nn07744811\tlarge sweet black or very dark purple edible aggregate fruit of any of various bushes of the genus Rubus\nn07745046\tlarge raspberry-flavored fruit; cross between blackberries and raspberries\nn07745197\tblackberry-like fruits of any of several trailing blackberry bushes\nn07745357\tlarge red variety of the dewberry\nn07745466\tred or black edible aggregate berries usually smaller than the related blackberries\nn07745661\tedible purple or red berries\nn07745940\tsweet fleshy red fruit\nn07746038\tsmall edible dark purple to black berry with large pits; southern United States\nn07746186\torange fruit resembling a plum; edible when fully ripe\nn07746334\tacid red or yellow cherry-like fruit of a tropical American shrub very rich in vitamin C\nn07746551\tdeeply ridged yellow-brown tropical fruit; used raw as a vegetable or in salad or when fully ripe as a dessert\nn07746749\ttropical cylindrical fruit resembling a pinecone with pineapple-banana flavor\nn07746910\tedible scarlet plumlike fruit of a South African plant\nn07747055\tany of numerous fruits of the genus Citrus having thick rind and juicy pulp; grown in warm regions\nn07747607\tround yellow to orange fruit of any of several citrus trees\nn07747811\tlarge sweet easily-peeled Florida fruit with deep orange rind\nn07747951\ta somewhat flat reddish-orange loose skinned citrus of China\nn07748157\ta mandarin orange of a deep reddish orange color and few seeds\nn07748276\tmedium-sized largely seedless mandarin orange with thin smooth skin\nn07748416\tany of various deep orange mandarins grown in the United States and southern Africa\nn07748574\tlarge sweet juicy hybrid between tangerine and grapefruit having a thick wrinkled skin\nn07748753\thighly acidic orange used especially in marmalade\nn07748912\torange with sweet juicy pulp; often has a thin skin\nn07749095\tsweet almost seedless orange of Israel\nn07749192\tseedless orange enclosing a small secondary fruit at the apex\nn07749312\tvariety of sweet orange cultivated extensively in Florida and California\nn07749446\tsmall oval citrus fruit with thin sweet rind and very acid pulp\nn07749582\tyellow oval fruit with juicy acidic flesh\nn07749731\tthe green acidic fruit of any of various lime trees\nn07749870\tsmall yellow-green limes of southern Florida\nn07749969\tlarge yellow fruit with somewhat acid juicy pulp; usual serving consists of a half\nn07750146\tlarge pear-shaped fruit similar to grapefruit but with coarse dry pulp\nn07750299\tmore aromatic and acid tasting than oranges; used in beverages and marmalade\nn07750449\tlarge lemonlike fruit with thick aromatic rind; usually preserved\nn07750586\toval-shaped edible seed of the almond tree\nn07750736\tan almond covered with a sugar coating that is hard and flavored and colored\nn07750872\tdowny yellow to rosy-colored fruit resembling a small peach\nn07751004\tdowny juicy fruit with sweet yellowish or whitish flesh\nn07751148\ta variety or mutation of the peach that has a smooth skin\nn07751280\thighly colored edible fruit of pitahaya cactus having bright red juice; often as large as a peach\nn07751451\tany of numerous varieties of small to medium-sized round or oval fruit having a smooth skin and a single pit\nn07751737\tdark purple plum of the damson tree\nn07751858\tsweet green or greenish-yellow variety of plum\nn07751977\tsmall dark purple fruit used especially in jams and pies\nn07752109\tsmall sour dark purple fruit of especially the Allegheny plum bush\nn07752264\ta large red plum served as dessert\nn07752377\tfruit preserved by drying\nn07752514\tapricots preserved by drying\nn07752602\tdried plum\nn07752664\tdried grape\nn07752782\tdried seedless grape\nn07752874\tseeded grape that has been dried\nn07752966\tsmall dried seedless raisin grown in the Mediterranean region and California; used in cooking\nn07753113\tfleshy sweet pear-shaped yellowish or purple multiple fruit eaten fresh or preserved or dried\nn07753275\tlarge sweet fleshy tropical fruit with a terminal tuft of stiff leaves; widely cultivated\nn07753448\tWest Indian fruit resembling the mango; often pickled\nn07753592\telongated crescent-shaped yellow fruit with soft sweet flesh\nn07753743\tegg-shaped tropical fruit of certain passionflower vines; used for sherbets and confectionery and drinks\nn07753980\tthe egg-shaped edible fruit of tropical American vines related to passionflowers\nn07754155\tapple-sized passion fruit of the West Indies\nn07754279\tthe edible yellow fruit of the Jamaica honeysuckle\nn07754451\ta large round seedless or seeded fruit with a texture like bread; eaten boiled or baked or roasted or ground into flour; the roasted seeds resemble chestnuts\nn07754684\timmense East Indian fruit resembling breadfruit; it contains an edible pulp and nutritious seeds that are commonly roasted\nn07754894\tseed of the cacao tree; ground roasted beans are source of chocolate\nn07755089\tpowder of ground roasted cacao beans with most of the fat removed\nn07755262\tovoid orange-yellow mealy sweet fruit of Florida and West Indies\nn07755411\tany of numerous fruits of the gourd family having a hard rind and sweet juicy flesh\nn07755619\ta bite of melon cut as a sphere\nn07755707\tthe fruit of a muskmelon vine; any of several sweet melons related to cucumbers\nn07755929\tthe fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh\nn07756096\tthe fruit of the winter melon vine; a green melon with pale green to orange flesh that keeps well\nn07756325\tthe fruit of a variety of winter melon vine; a large smooth greenish-white melon with pale green flesh\nn07756499\tthe fruit of a variety of winter melon vine; a large green melon with orange flesh\nn07756641\tthe fruit of a variety of muskmelon vine; a melon with netlike markings and deep green flesh\nn07756838\tmelon having yellowish rind and whitish flesh\nn07756951\tlarge oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp\nn07757132\ta red fruit with a single hard stone\nn07757312\tany of several fruits of cultivated cherry trees that have sweet flesh\nn07757511\tdark red or blackish sweet cherry\nn07757602\tlarge heart-shaped sweet cherry with soft flesh\nn07757753\theart cherry with dark flesh and skin cherry\nn07757874\tMexican black cherry\nn07757990\tacid cherries used for pies and preserves\nn07758125\tpale red sour cherry with colorless or nearly colorless juice\nn07758260\tcultivated sour cherry with dark-colored skin and juice\nn07758407\tplum-shaped whitish to almost black fruit used for preserves; tropical American\nn07758582\tany of various small cucumbers pickled whole\nn07758680\tany of various juicy fruit of the genus Vitis with green or purple skins; grow in clusters\nn07758950\tpurplish-black wild grape of the eastern United States with tough skins that slip easily from the flesh; cultivated in many varieties\nn07759194\tslipskin grape; a purple table grape of the northeastern United States\nn07759324\tslipskin grape; a reddish American table grape\nn07759424\tdull-purple grape of southern United States\nn07759576\tamber-green muscadine grape of southeastern United States\nn07759691\ta grape whose skin slips readily from the pulp\nn07759816\tgrape from a cultivated variety of the common grape vine of Europe\nn07760070\tred table grape of California\nn07760153\tsweet aromatic grape used for raisins and wine\nn07760297\tdark reddish-purple table grape of California\nn07760395\tpale yellow seedless grape used for raisins and wine\nn07760501\tvariety of wine grape originally grown in Hungary; the prototype of vinifera grapes\nn07760673\tpurplish-red table grape\nn07760755\tseedless green table grape of California\nn07760859\tthe fruit of any of several tropical American trees of the genus Annona having soft edible pulp\nn07761141\tlarge tropical fruit with leathery skin and soft pulp; related to custard apples\nn07761309\tlarge spiny tropical fruit with tart pulp related to custard apples\nn07761611\tsweet pulpy tropical fruit with thick scaly rind and shiny black seeds\nn07761777\twhitish tropical fruit with a pinkish tinge related to custard apples; grown in the southern United States\nn07761954\tovoid yellow fruit with very fragrant peach-colored flesh; related to custard apples\nn07762114\tfruit with yellow flesh; related to custard apples\nn07762244\tlarge oval melon-like tropical fruit with yellowish flesh\nn07762373\tSouth African fruit smelling and tasting like apricots; used for pickles and preserves\nn07762534\tmaroon-purple gooseberry-like fruit of India having tart-sweet purple pulp used especially for preserves\nn07762740\tred pear-shaped tropical fruit with poisonous seeds; flesh is poisonous when unripe or overripe\nn07762913\thuge fruit native to southeastern Asia `smelling like Hell and tasting like Heaven'; seeds are roasted and eaten like nuts\nn07763107\tdark-green kiwi-sized tropical fruit with white flesh; used chiefly for jellies and preserves\nn07763290\tround one-inch Caribbean fruit with green leathery skin and sweet juicy translucent pulp; eaten like grapes\nn07763483\ta succulent orange-sized tropical fruit with a thick rind\nn07763629\tfuzzy brown egg-shaped fruit with slightly tart green flesh\nn07763792\tyellow olive-sized semitropical fruit with a large free stone and relatively little flesh; used for jellies\nn07763987\ttwo- to three-inch tropical fruit with juicy flesh suggestive of both peaches and pineapples\nn07764155\tlarge oval tropical fruit having smooth skin, juicy aromatic pulp, and a large hairy seed\nn07764315\ttropical fruit with a rough brownish skin and very sweet brownish pulp\nn07764486\tbrown oval fruit flesh makes excellent sherbet\nn07764630\tlarge tropical seed pod with very tangy pulp that is eaten fresh or cooked with rice and fish or preserved for curries and chutneys\nn07764847\ta pear-shaped tropical fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed\nn07765073\tsweet edible fruit of the date palm with a single long woody seed\nn07765208\tberrylike fruit of an elder used for e.g. wines and jellies\nn07765361\ttropical fruit having yellow skin and pink pulp; eaten fresh or used for e.g. jellies\nn07765517\tpurplish tropical fruit\nn07765612\tyellow oval tropical fruit\nn07765728\tfruit of the wild plum of southern United States\nn07765862\ttough-skinned purple grapelike tropical fruit grown in Brazil\nn07765999\tdark red plumlike fruit of Old World buckthorn trees\nn07766173\tChinese fruit having a thin brittle shell enclosing a sweet jellylike pulp and a single seed; often dried\nn07766409\tAsian fruit similar to litchi\nn07766530\tglobular or ovoid tropical fruit with thick russet leathery rind and juicy yellow or reddish flesh\nn07766723\ttropical fruit from the Philippines having a mass of small seeds embedded in sweetish white pulp\nn07766891\tcrabapple-like fruit used for preserves\nn07767002\ta South African globular fruit with brown leathery skin and pithy flesh having a sweet-acid taste\nn07767171\tsweet usually dark purple blackberry-like fruit of any of several mulberry trees of the genus Morus\nn07767344\tone-seeded fruit of the European olive tree usually pickled and used as a relish\nn07767549\tolives picked ripe and cured in brine then dried or pickled or preserved canned or in oil\nn07767709\tolives picked green and pickled in brine; infrequently stuffed with e.g. pimento\nn07767847\tsweet juicy gritty-textured fruit available in many varieties\nn07768068\tgreenish-yellow pear\nn07768139\ta pear with firm flesh and a green skin\nn07768230\tjuicy yellow pear\nn07768318\tsmall yellowish- to reddish-brown pear\nn07768423\tstarchy banana-like fruit; eaten (always cooked) as a staple vegetable throughout the tropics\nn07768590\thybrid between plum and apricot\nn07768694\tlarge globular fruit having many seeds with juicy red pulp in a tough brownish-red rind\nn07768858\tround or pear-shaped spiny fruit of any of various prickly pear cacti\nn07769102\tsmall yellow to orange fruit of the Barbados gooseberry cactus used in desserts and preserves and jellies\nn07769306\tred Australian fruit; used for dessert or in jam\nn07769465\tedible nutlike seed of the quandong fruit\nn07769584\taromatic acid-tasting pear-shaped fruit used in preserves\nn07769731\tpleasantly acid bright red oval Malayan fruit covered with soft spines\nn07769886\tfruit of an East Indian tree similar to the rambutan but sweeter\nn07770034\tfragrant oval yellowish tropical fruit used in jellies and confections\nn07770180\tacid gritty-textured fruit\nn07770439\tAfrican gourd-like fruit with edible pulp\nn07770571\tmany are used as seasoning\nn07770763\tthe edible seed of a pumpkin\nn07770869\tseed of betel palm; chewed with leaves of the betel pepper and lime as a digestive stimulant and narcotic in southeastern Asia\nn07771082\tsmall sweet triangular nut of any of various beech trees\nn07771212\tnut of any of various walnut trees having a wrinkled two-lobed seed with a hard shell\nn07771405\tAmerican walnut having a very hard and thick woody shell\nn07771539\tnut with a wrinkled two-lobed seed and hard but relatively thin shell; widely used in cooking\nn07771731\tthree-sided tropical American nut with white oily meat and hard brown shell\nn07771891\toily egg-shaped nut of an American tree of the walnut family\nn07772026\ta large nutlike seed of a South American tree\nn07772147\tkidney-shaped nut edible only when roasted\nn07772274\tedible nut of any of various chestnut trees of the genus Castanea\nn07772413\tsmall nut of either of two small chestnut trees of the southern United States; resembles a hazelnut\nn07772788\tnut of any of several trees of the genus Corylus\nn07772935\tlarge hard-shelled oval nut with a fibrous husk containing thick white meat surrounding a central cavity filled (when fresh) with fluid or milk\nn07773428\tclear to whitish fluid from within a fresh coconut\nn07774182\tnut of Brazilian or West Indian palms\nn07774295\tsmall hard-shelled nut of North American hickory trees especially the shagbark hickories\nn07774479\ta flavoring extracted from the kola nut\nn07774596\tnutlike seed with sweet and crisp white meat\nn07774719\tsmooth brown oval nut of south central United States\nn07774842\tedible seed of any of several nut pines especially some pinons of southwestern North America\nn07775050\tnut of Mediterranean trees having an edible green kernel\nn07775197\tedible seed of sunflowers; used as food and poultry feed and as a source of oil\nn07783827\tpaste made primarily of anchovies; used in sauces and spreads\nn07785487\ta pickled herring filet that has been rolled or wrapped around a pickle\nn07800091\tfood for domestic livestock\nn07800487\ta concentrated feed for cattle; processed in the form of blocks or cakes\nn07800636\tfeed given to young animals isolated in a creep\nn07800740\tcoarse food (especially for livestock) composed of entire plants or the leaves and stalks of a cereal crop\nn07801007\tgrain grown for cattle feed\nn07801091\tbulky food like grass or hay for browsing or grazing horses or cattle\nn07801342\tfodder harvested while green and kept succulent by partial fermentation as in a silo\nn07801508\tmass of e.g. linseed or cottonseed or soybean from which the oil has been pressed; used as food for livestock\nn07801709\tground oil cake\nn07801779\tleguminous plant grown for hay or forage\nn07801892\ta bean plant cultivated for use animal fodder\nn07802026\tgrass mowed and cured for use as fodder\nn07802152\ta grass grown for hay\nn07802246\tthe dried stalks and leaves of a field crop (especially corn) used as animal fodder after the grain has been harvested\nn07802417\tfoodstuff prepared from the starchy grains of cereal grasses\nn07802767\tgrain intended to be or that has been ground\nn07802863\tthe hulled and crushed grain of various cereals\nn07802963\tsmall seed of any of various annual cereal grasses especially Setaria italica\nn07803093\ta grain of barley\nn07803213\tbarley ground into small round pellets\nn07803310\tgrain ground into flour\nn07803408\tparched crushed wheat\nn07803545\tgrains of common wheat; sometimes cooked whole or cracked as cereal; usually ground into flour\nn07803779\tgrains of wheat that have been crushed into small pieces\nn07803895\theavy and filling (and usually starchy) food\nn07803992\tembryo of the wheat kernel; removed before milling and eaten as a source of vitamins\nn07804152\tseed of the annual grass Avena sativa (spoken of primarily in the plural as `oats')\nn07804323\tgrains used as food either unpolished or more often polished\nn07804543\tunpolished rice retaining the yellowish-brown outer layer\nn07804657\thaving husk or outer brown layers removed\nn07804771\tgrains of aquatic grass of North America\nn07804900\trice in the husk either gathered or still in the field\nn07805006\twet feed (especially for pigs) consisting of mostly kitchen waste mixed with water or skimmed or sour milk\nn07805254\tmixture of ground animal feeds\nn07805389\tdry mash for poultry\nn07805478\tfood of a ruminant regurgitated to be chewed again\nn07805594\tfood given to birds; usually mixed seeds\nn07805731\tfood prepared for animal pets\nn07805966\tfood prepared for dogs\nn07806043\tfood prepared for cats\nn07806120\ta mixture of seeds used to feed caged birds\nn07806221\tfood mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens\nn07806633\tsalad tossed with a dressing\nn07806774\ttossed salad composed primarily of salad greens\nn07806879\ttypically having fried croutons and dressing made with a raw egg\nn07807002\tcooked meats and eggs and vegetables usually arranged in rows around the plate and dressed with a salad dressing\nn07807171\ttypically containing tomatoes and anchovies and garnished with black olives and capers\nn07807317\tcontaining meat or chicken or cheese in addition to greens and vegetables\nn07807472\tthe combination salad prepared as a particular chef's specialty\nn07807594\tany of various salads having chopped potatoes as the base\nn07807710\ta salad having any of various pastas as the base\nn07807834\thaving macaroni as the base\nn07807922\tsalad composed of fruits\nn07808022\ttypically made of apples and celery with nuts or raisins and dressed with mayonnaise\nn07808166\tlettuce and crabmeat dressed with sauce Louis\nn07808268\tbased on pickled herring\nn07808352\tsalad composed primarily of chopped canned tuna fish\nn07808479\tsalad composed primarily of chopped chicken meat\nn07808587\tbasically shredded cabbage\nn07808675\tsavory jelly based on fish or meat stock used as a mold for meats or vegetables\nn07808806\tsalad of meats or vegetables in gelatin\nn07808904\ta finely chopped salad with tomatoes and parsley and mint and scallions and bulgur wheat\nn07809096\tfood that is a component of a mixture in cooking\nn07809368\tsomething added to food primarily for the savor it imparts\nn07810531\ta cube of evaporated seasoned meat extract\nn07810907\ta preparation (a sauce or relish or spice) to enhance flavor or enjoyment\nn07811416\taromatic potherb used in cookery for its savory qualities\nn07812046\ta mixture of finely chopped fresh herbs\nn07812184\tany of a variety of pungent aromatic vegetable substances used for flavoring food\nn07812662\tan aromatic oil obtained from the spearmint plant\nn07812790\tfragrant yellow oil obtained from the lemon peel\nn07812913\toil or flavoring obtained from the creeping wintergreen or teaberry plant\nn07813107\twhite crystalline form of especially sodium chloride used to season and preserve food\nn07813324\tground celery seed and salt\nn07813495\tground dried onion and salt\nn07813579\tcombination of salt and vegetable extracts and spices and monosodium glutamate\nn07813717\tcrystals of citric acid used as seasoning\nn07813833\tChinese seasoning made by grinding star anise and fennel and pepper and cloves and cinnamon\nn07814007\tground dried berrylike fruit of a West Indian allspice tree; suggesting combined flavors of cinnamon and nutmeg and cloves\nn07814203\tspice from the dried aromatic bark of the Ceylon cinnamon tree; used as rolled strips or ground\nn07814390\tdried rolled strips of cinnamon bark\nn07814487\tspice from dried unopened flower bud of the clove tree; used whole or ground\nn07814634\taromatic seeds of the cumin herb of the carrot family\nn07814790\tfennel seeds are ground and used as a spice or as an ingredient of a spice mixture\nn07814925\tpungent rhizome of the common ginger plant; used fresh as a seasoning especially in Asian cookery\nn07815163\tdried ground gingerroot\nn07815294\tspice made from the dried fleshy covering of the nutmeg seed\nn07815424\thard aromatic seed of the nutmeg tree used as spice when grated or ground\nn07815588\tpungent seasoning from the berry of the common pepper plant of East India; use whole or ground\nn07815839\tpepper that is ground from whole peppercorns with husks on\nn07815956\tpepper ground from husked peppercorns\nn07816052\tdried root bark of the sassafras tree\nn07816164\tleaves of the common basil; used fresh or dried\nn07816296\tdried leaf of the bay laurel\nn07816398\tan herb whose leaves are used to flavor sauces and punches; young leaves can be eaten in salads or cooked\nn07816575\tbitter leaves used sparingly in salads; dried flowers used in soups and tisanes\nn07816726\tleaves used sparingly in soups and stews\nn07816839\tfresh ferny parsley-like leaves used as a garnish with chicken and veal and omelets and green salads and spinach\nn07817024\tcylindrical leaves used fresh as a mild onion-flavored seasoning\nn07817160\tleaves make a popular tisane; young leaves used in salads or cooked\nn07817315\tparsley-like herb used as seasoning or garnish\nn07817465\tdried coriander seeds used whole or ground\nn07817599\tleaves used sparingly (because of bitter overtones) in sauces and soups and stuffings\nn07817758\tleaves used for seasoning\nn07817871\taromatic bulbous stem base eaten cooked or raw in salads\nn07818029\taromatic anis-scented seeds\nn07818133\taromatic seeds used as seasoning especially in curry\nn07818277\taromatic bulb used as seasoning\nn07818422\tone of the small bulblets that can be split off of the axis of a larger garlic bulb\nn07818572\tlarge flat leaves used as chive is used\nn07818689\tlemony leaves used for a tisane or in soups or fruit punches\nn07818825\tstalks eaten like celery or candied like angelica; seeds used for flavoring or pickled like capers\nn07818995\tpungent leaves used as seasoning with meats and fowl and in stews and soups and omelets\nn07819166\tthe leaves of a mint plant used fresh or candied\nn07819303\tblack or white seeds ground to make mustard pastes or powders\nn07819480\tpungent powder or paste prepared from ground mustard seeds\nn07819682\tvery hot prepared mustard\nn07819769\tflowers and seeds and leaves all used as flavorings\nn07819896\taromatic herb with flat or crinkly leaves that are cut finely and used to garnish food\nn07820036\tleaves sometimes used for salad\nn07820145\textremely pungent leaves used fresh or dried as seasoning for especially meats\nn07820297\tleaves sometimes used for flavoring fruit or claret cup but should be used with great caution: can cause irritation like poison ivy\nn07820497\taromatic fresh or dried grey-green leaves used widely as seasoning for meats and fowl and game etc\nn07820683\tfresh leaves used in omelets and fritters and with lamb\nn07820814\teither of two aromatic herbs of the mint family\nn07820960\therb with delicately flavored leaves with many uses\nn07821107\tresinous leaves used in stews and stuffings and meat loaf\nn07821260\tfragrant dark green leaves used to flavor May wine\nn07821404\tfresh ferny leaves and green seeds used as garnish in salads and cold vegetables; dried seeds used in confectionery and liqueurs\nn07821610\tfresh leaves (or leaves preserved in vinegar) used as seasoning\nn07821758\tleaves can be used as seasoning for almost any meat and stews and stuffings and vegetables\nn07821919\tground dried rhizome of the turmeric plant used as seasoning\nn07822053\tpickled flower buds used as a pungent relish in various dishes and sauces\nn07822197\tthick spicy sauce made from tomatoes\nn07822323\taromatic seeds used as seasoning like cinnamon and cloves especially in pickles and barbecue sauces\nn07822518\tground pods and seeds of pungent red peppers of the genus Capsicum\nn07822687\tpowder made of ground chili peppers mixed with e.g. cumin and garlic and oregano\nn07822845\ttomatoes and onions and peppers (sweet or hot) simmered with vinegar and sugar and various seasonings\nn07823105\ta spicy condiment made of chopped fruits or vegetables cooked in vinegar and sugar with ginger and spices\nn07823280\tpungent bottled sauce for steak\nn07823369\tspicy tomato-based sauce for tacos\nn07823460\tspicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods\nn07823591\tsweetened diluted vinegar with chopped mint leaves\nn07823698\tsauce made of cranberries and sugar\nn07823814\tpungent blend of cumin and ground coriander seed and turmeric and other spices\nn07823951\t(East Indian cookery) a pungent dish of vegetables or meats flavored with curry powder and usually eaten with rice\nn07824191\tcurry made with lamb\nn07824268\ta thick sweet and pungent Chinese condiment\nn07824383\tgrated horseradish root\nn07824502\tmixtures of vinegar or wine and oil with various spices and seasonings; used for soaking foods before cooking\nn07824702\ta mild powdered seasoning made from dried pimientos\nn07824863\ta mild seasoning made from a variety of pimiento grown in Spain\nn07824988\tvegetables (especially cucumbers) preserved in brine or vinegar\nn07825194\tpickle preserved in brine or vinegar flavored with dill seed\nn07825399\tthinly sliced sweet pickles\nn07825496\trelish of chopped (usually sweet) pickles\nn07825597\trelish of chopped pickled cucumbers and green peppers and onion\nn07825717\tpickle cured in brine and preserved in sugar and vinegar\nn07825850\tpuree of stewed apples usually sweetened and spiced\nn07825972\tthin sauce made of fermented soy beans\nn07826091\tvery spicy sauce (trade name Tabasco) made from fully-aged red peppers\nn07826250\tthick concentrated tomato puree\nn07826340\taromatic stems or leaves or roots of Angelica Archangelica\nn07826453\tcandied stalks of the angelica plant\nn07826544\tflavoring made from almonds macerated in alcohol\nn07826653\tliquorice-flavored seeds, used medicinally and in cooking and liquors\nn07826930\tanise-scented star-shaped fruit or seed used in Asian cooking and medicine\nn07827130\tberrylike cone of a common juniper; used in making gin\nn07827284\tdried pungent stigmas of the Old World saffron crocus\nn07827410\tsmall oval seeds of the sesame plant\nn07827554\taromatic seeds of the caraway plant; used widely as seasoning\nn07827750\tsmall grey seed of a poppy flower; used whole or ground in baked items\nn07827896\taromatic threadlike foliage of the dill plant used as seasoning\nn07828041\tseed of the dill plant used as seasoning\nn07828156\tseed of the celery plant used as seasoning\nn07828275\ta flavoring made from (or imitating) lemons\nn07828378\twhite crystalline compound used as a food additive to enhance flavor; often used in Chinese cooking\nn07828642\tlong bean-like fruit; seeds are used as flavoring\nn07828987\tsour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative\nn07829248\tvinegar made from cider\nn07829331\tvinegar made from wine\nn07829412\tflavorful relish or dressing or topping served as an accompaniment to food\nn07830493\tmade of white sauce and mashed anchovies\nn07830593\ta pungent peppery sauce\nn07830690\tbutter and sugar creamed together with brandy or other flavoring and served with rich puddings\nn07830841\tcreamy white sauce with horseradish and mustard\nn07830986\tsauce for pasta; contains mushrooms and ham and chopped vegetables and beef and tomato paste\nn07831146\tsauce for pasta; contains eggs and bacon or ham and grated cheese\nn07831267\tsauce made with a puree of tomatoes (or strained tomatoes) with savory vegetables and other seasonings; can be used on pasta\nn07831450\tmayonnaise with chopped pickles and sometimes capers and shallots and parsley and hard-cooked egg; sauce for seafood especially fried fish\nn07831663\twhite or veloute sauce with wine and stock variously seasoned with onions and herbs; for fish or meat\nn07831821\tbrown sauce with mushrooms and red wine or Madeira\nn07831955\tcreamy white sauce made with bread instead of flour and seasoned with cloves and onion\nn07832099\tfor Chinese dishes: plum preserves and chutney\nn07832202\tfor Chinese dishes: peach preserves and chutney\nn07832307\tfor Chinese dishes: apricot preserves and chutney\nn07832416\ta sauce typically served with pasta; contains crushed basil leaves and garlic and pine nuts and Parmesan cheese in olive oil\nn07832592\tveloute sauce seasoned with chopped chervil, chives, tarragon, shallots and capers\nn07832741\ta mayonnaise sauce flavored with herbs and mustard and capers; served with e.g. salad and cold meat\nn07832902\tsavory dressings for salads; basically of two kinds: either the thin French or vinaigrette type or the creamy mayonnaise type\nn07833333\tmayonnaise and heavy cream combined with chopped green pepper and green onion seasoned with chili sauce and Worcestershire sauce and lemon juice\nn07833535\tcreamy dressing containing crumbled blue cheese\nn07833672\tvinaigrette containing crumbled Roquefort or blue cheese\nn07833816\toil and vinegar with mustard and garlic\nn07833951\tvinaigrette with chili sauce and chopped watercress\nn07834065\tvinaigrette and mashed anchovies\nn07834160\ta vinaigrette with garlic and herbs: oregano and basil and dill\nn07834286\thalf mayonnaise and half vinaigrette seasoned with minced garlic and mashed anchovies and grated Parmesan cheese; especially good for combination salads\nn07834507\tegg yolks and oil and vinegar\nn07834618\tmayonnaise with tarragon or dill and chopped watercress and spinach or cucumber\nn07834774\tgarlic mayonnaise\nn07834872\tmayonnaise with horseradish grated onion and chili sauce or catsup; sometimes with caviar added\nn07835051\ta creamy salad dressing resembling mayonnaise\nn07835173\tmayonnaise with chili sauce or catsup and minced olives and peppers and hard-cooked egg\nn07835331\tspicy sweet and sour sauce usually based on catsup or chili sauce\nn07835457\teggs and butter with lemon juice\nn07835547\ta sauce like hollandaise but made with white wine and tarragon and shallots instead of lemon juice\nn07835701\tbutter creamed with white wine and shallots and parsley\nn07835823\tbrown sauce with beef marrow and red wine\nn07835921\treduced red wine with onions and parsley and thyme and butter\nn07836077\tbouillon or beef stock thickened with butter and flour roux and variously seasoned with herbs or Worcestershire etc.\nn07836269\tbrown sauce with tomatoes and a caramelized mixture of minced carrots and onions and celery seasoned with Madeira\nn07836456\ta sauce based on soy sauce\nn07836600\ta white sauce of fat, broth, and vegetables (used especially with braised meat)\nn07836731\twhite sauce with grated cheese\nn07836838\tsauce made with unsweetened chocolate or cocoa and sugar and water\nn07837002\tthick chocolate sauce served hot\nn07837110\tusually catsup with horseradish and lemon juice\nn07837234\tbutter creamed with parsley and tarragon and beef extract\nn07837362\tmilk thickened with a butter and flour roux\nn07837545\twhite sauce made with cream\nn07837630\tonion-flavored creamy cheese sauce with egg yolk and grated cheese\nn07837755\tsauce Espagnole with extra beef stock simmered down and seasoned with dry wine or sherry\nn07837912\tthe seasoned but not thickened juices that drip from cooking meats; often a little water is added\nn07838073\ta sauce made by adding stock, flour, or other ingredients to the juice and fat that drips from cooking meats\nn07838233\tany of numerous sauces for spaghetti or other kinds of pasta\nn07838441\tsauce for pasta; contains tomatoes and garlic and herbs\nn07838551\tspicy sauce often containing chocolate\nn07838659\tbrown sauce and tomato puree with onions and mushrooms and dry white wine\nn07838811\tbrown sauce and sauteed mushrooms\nn07838905\tsauce of prepared mustard thinned with vinegar and vegetable oil with sugar and seasonings\nn07839055\twhite sauce with whipping cream and shrimp butter\nn07839172\tveloute sauce with sauteed chopped onion and paprika and cream\nn07839312\tfor venison: brown sauce with sauteed vegetables and trimmings and marinade and plenty of pepper\nn07839478\ta mixture of fat and flour heated and used as a basis for sauces\nn07839593\tveloute or brown sauce with sauteed chopped onion and dry white wine and sour cream\nn07839730\tveloute sauce with sauteed chopped onions and whipping cream\nn07839864\tbrown sauce with sauteed chopped onions and parsley and dry white wine or vinegar\nn07840027\twhite sauce made with stock instead of milk\nn07840124\tegg-thickened veloute\nn07840219\tallemande sauce with capers\nn07840304\tallemande sauce with chopped parsley\nn07840395\tallemande sauce with curry powder and coconut milk instead of stock\nn07840520\ta savory sauce of vinegar and soy sauce and spices\nn07840672\twhite liquid obtained from compressing fresh coconut meat\nn07840804\toval reproductive body of a fowl (especially a hen) used as food\nn07841037\tthe white part of an egg; the nutritive and protective gelatinous substance surrounding the yolk consisting mainly of albumin dissolved in water\nn07841345\tthe yellow spherical part of an egg that is surrounded by the albumen\nn07841495\tegg cooked briefly in the shell in gently boiling water\nn07841639\tan egg boiled gently until both the white and the yolk solidify\nn07841800\ta colored hard-boiled egg used to celebrate Easter\nn07841907\tan egg-shaped candy used to celebrate Easter\nn07842044\tegg-shaped chocolate candy\nn07842130\tegg-shaped candy\nn07842202\tegg cooked in gently boiling water\nn07842308\teggs beaten and cooked to a soft firm consistency while stirring\nn07842433\thalved hard-cooked egg with the yolk mashed with mayonnaise and seasonings and returned to the white\nn07842605\tegg cooked individually in cream or butter in a small ramekin\nn07842753\tbeaten eggs or an egg mixture cooked until just set; may be folded around e.g. ham or cheese or jelly\nn07842972\teggs beaten with milk or cream and cooked until set\nn07843117\tomelet cooked quickly and slid onto a plate\nn07843220\tsouffle-like omelet made by beating and adding the whites separately\nn07843348\ta firm omelet that has diced ham and peppers and onions\nn07843464\tlight fluffy dish of egg yolks and stiffly beaten egg whites mixed with e.g. cheese or fish or fruit\nn07843636\teggs cooked by sauteing in oil or butter; sometimes turned and cooked on both sides\nn07843775\tmilk and butter and cheese\nn07844042\ta white nutritious liquid secreted by mammals and used as food by human beings\nn07844604\tany of several nutritive milklike liquids\nn07844786\tmilk that has turned sour\nn07844867\ta milk substitute containing soybean flour and water; used in some infant formulas and in making tofu\nn07845087\ta liquid food for infants\nn07845166\tmilk that has been exposed briefly to high temperatures to destroy microorganisms and prevent fermentation\nn07845335\tmilk obtained from dairy cows\nn07845421\tthe milk of a yak\nn07845495\tthe milk of a goat\nn07845571\tmilk fermented by bacteria; used to treat gastrointestinal disorders\nn07845702\tunpasteurized milk\nn07845775\tmilk heated almost to boiling\nn07845863\tmilk with the fat particles broken up and dispersed uniformly so the cream will not rise\nn07846014\tmilk from dairies regulated by an authorized medical milk commission\nn07846143\tdehydrated milk\nn07846274\tdehydrated skimmed milk\nn07846359\tmilk concentrated by evaporation\nn07846471\tsweetened evaporated milk\nn07846557\tmilk from which the cream has been skimmed\nn07846688\tmilk from which some of the cream has been removed\nn07846802\tmilk from which no constituent (such as fat) has been removed\nn07846938\tmilk from which some of the cream has been removed\nn07847047\tresidue from making butter from sour raw milk; or pasteurized milk curdled by adding a culture\nn07847198\tthe part of milk containing the butterfat\nn07847453\tthick cream made from scalded milk\nn07847585\tcream with a fat content of 48% or more\nn07847706\thalf milk and half light cream; contains 10% to 18% butterfat\nn07847827\tcontains more than 36% butterfat\nn07847917\tcream that has at least 18% butterfat\nn07848093\tartificially soured light cream\nn07848196\tcream that has enough butterfat (30% to 36%) to be whipped\nn07848338\tan edible emulsion of fat globules made by churning milk or cream; for cooking and table use\nn07848771\tbutter made clear by heating and removing the sediment of milk solids\nn07848936\tclarified butter used in Indian cookery\nn07849026\tclarified butter browned slowly and seasoned with vinegar or lemon juice and capers\nn07849186\tclarified butter browned slowly and seasoned with lemon juice and parsley\nn07849336\ta custard-like food made from curdled milk\nn07849506\tyogurt with sweetened blueberries or blueberry jam\nn07849619\tan Indian side dish of yogurt and chopped cucumbers and spices\nn07849733\twatery part of milk produced when raw milk sours and coagulates\nn07849912\tcoagulated milk; used to make cheese\nn07850083\ta coagulated liquid resembling milk curd\nn07850219\traw milk that has soured and thickened\nn07850329\ta solid food prepared from the pressed curd of milk\nn07851054\t(usually plural) a part of a fruit or vegetable that is pared or cut off; especially the skin or peel\nn07851298\tsoft unripened cheese made of sweet milk and cream\nn07851443\tfresh soft French cheese containing at least 60% fat\nn07851554\tsoft mild Italian cream cheese\nn07851641\tfresh soft French cheese containing at least 72% fat\nn07851767\tmild white cheese made from curds of soured skim milk\nn07851926\tmade by blending several lots of cheese\nn07852045\tcheese containing a blue mold\nn07852229\tEnglish blue cheese\nn07852302\tFrench blue cheese\nn07852376\tItalian blue cheese\nn07852452\tblue cheese of Denmark\nn07852532\tblue cheese of Bavaria\nn07852614\tsoft creamy white cheese; milder than Camembert\nn07852712\tsemisoft sweet American cheese from whole milk in a brick form\nn07852833\trich soft creamy French cheese\nn07852919\thard smooth-textured cheese; originally made in Cheddar in southwestern England\nn07853125\tinformal names for American cheddar\nn07853232\ta mild yellow English cheese with a crumbly texture\nn07853345\ta smooth firm mild orange-red cheese\nn07853445\tmild yellow Dutch cheese made in balls encased in a red covering\nn07853560\tmade from goats' milk\nn07853648\tmild cream-colored Dutch cheese shaped in balls\nn07853762\thard or semihard cheese grated\nn07853852\tany cheese originally molded by hand\nn07853946\ta soft cheese with a strong odor and flavor\nn07854066\ta soft white cheese with a very strong pungent odor and flavor\nn07854184\tmild white Italian cheese\nn07854266\tsemisoft pale-yellow cheese\nn07854348\thard dry sharp-flavored Italian cheese; often grated\nn07854455\tfresh unripened cheese of a smooth texture made from pasteurized milk, a starter, and rennet\nn07854614\tsoft Italian cheese like cottage cheese\nn07854707\tcheese formed in long strings twisted together\nn07854813\thard pale yellow cheese with many holes from Switzerland\nn07854982\tSwiss cheese with large holes\nn07855105\tSwiss cheese with small holes\nn07855188\ta hard green Swiss cheese made with skim-milk curd and flavored with clover\nn07855317\ttrademark: soft processed American cheese\nn07855413\tground nuts blended with a little butter\nn07855510\ta spread made from ground peanuts\nn07855603\ta very sweet white spread resembling marshmallow candy\nn07855721\tbutter blended with minced onion\nn07855812\tbutter blended with mashed pimento\nn07855907\tbutter blended with chopped shrimp or seasoned with essence from shrimp shells\nn07856045\tbutter blended with chopped lobster or seasoned with essence from lobster shells\nn07856186\tbutter made from yaks' milk\nn07856270\ta tasty mixture to be spread on bread or crackers or used in preparing other dishes\nn07856756\tspread made of cheese mixed with butter or cream or cream cheese and seasonings\nn07856895\tbutter blended with mashed anchovies\nn07856992\ta paste of fish or shellfish\nn07857076\tbutter seasoned with mashed garlic\nn07857170\ta thick paste made from fermented soybeans and barley or rice malt; used in Japanese cooking to make soups or sauces\nn07857356\tthe thick green root of the wasabi plant that the Japanese use in cooking and that tastes like strong horseradish; in powder or paste form it is often eaten with raw fish\nn07857598\tfor preparing snails: butter seasoned with shallots and garlic and parsley\nn07857731\ta thick spread made from mashed chickpeas, tahini, lemon juice and garlic; used especially as a dip for pita; originated in the Middle East\nn07857959\tliver or meat or fowl finely minced or ground and variously seasoned\nn07858114\ta pate made from duck liver\nn07858197\ta pate made from goose liver (marinated in Cognac) and truffles\nn07858336\ta spread consisting of capers and black olives and anchovies made into a puree with olive oil\nn07858484\ta thick Middle Eastern paste made from ground sesame seeds\nn07858595\tsomething added to foods to make them taste sweeter\nn07858841\tan artificial sweetener made from aspartic acid; used as a calorie-free sweetener\nn07858978\ta sweet yellow liquid produced by bees\nn07859142\ta crystalline substance 500 times sweeter than sugar; used as a calorie-free sweetener\nn07859284\ta white crystalline carbohydrate used as a sweetener and preservative\nn07859583\ta thick sweet sticky liquid\nn07859796\tsugar and water and sometimes corn syrup boiled together; used as sweetening especially in drinks\nn07859951\tthick dark syrup produced by boiling down juice from sugar cane; especially during sugar refining\nn07860103\tmade from juice of sweet sorghum\nn07860208\ta pale cane syrup\nn07860331\tthin syrup made from pomegranate juice; used in mixed drinks\nn07860447\tmade by concentrating sap from sugar maples\nn07860548\tsyrup prepared from corn\nn07860629\t(Old Testament) food that God gave the Israelites during the Exodus\nn07860805\ta liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking\nn07860988\ta flour mixture stiff enough to knead or roll\nn07861158\tany of various doughs for bread\nn07861247\tbatter for making pancakes\nn07861334\tbatter for making fritters\nn07861557\tchicken and onions and mushrooms braised in red wine and seasonings\nn07861681\tchicken cooked in a sauce made with tomatoes, garlic, and olive oil\nn07861813\trice and chicken cooked together with or without other ingredients and variously seasoned\nn07861983\ta Cantonese dish of chicken and sauteed vegetables\nn07862095\trice and chicken cooked together Spanish style; highly seasoned especially with saffron\nn07862244\teggs (fried or scrambled) served with bacon\nn07862348\tbaked or roasted with a spicy sauce\nn07862461\tbeef and mushrooms and onions stewed in red wine and seasonings\nn07862611\trare-roasted beef tenderloin coated with mushroom paste in puff pastry\nn07862770\ta Russian dish made with patties of ground meat (mixed with onions and bread and milk) and served with a sauce of sour cream\nn07862946\tcorned beef simmered with onions and cabbage and usually other vegetables\nn07863107\tdried navy beans baked slowly with molasses and salt pork\nn07863229\tleftover cabbage fried with cooked potatoes and sometimes meat\nn07863374\ta dish that contains pasta as its main ingredient\nn07863547\ttubular pasta filled with meat or cheese\nn07863644\tbeef stewed in beer seasoned with garlic and served with boiled potatoes\nn07863802\tpuffy dish of cheese and eggs (whites beaten separately) and white sauce\nn07863935\tbraised chicken with onions and mushrooms in a wine and tomato sauce\nn07864065\tthin slices of chicken stuffed with cheese and ham and then sauteed\nn07864198\tchicken fried than oven-baked and served with milk gravy\nn07864317\tchicken simmered in broth with onions and paprika then mixed with sour cream\nn07864475\tchicken prepared in a cream sauce with mushrooms and served over pasta; usually topped with cheese\nn07864638\ta pasta dish with cream sauce and mushrooms\nn07864756\tpounded chicken cutlets rolled around butter (that has been seasoned with herbs) and then covered with crumbs and fried\nn07864934\tground beef and chili peppers or chili powder often with tomatoes and kidney beans\nn07865105\ta hotdog with chili con carne on it\nn07865196\tmeat or fish stir-fried with vegetables (e.g., celery, onions, peppers or bean sprouts) seasoned with ginger and garlic and soy sauce; served with rice; created in the United States and frequently served in Chinese restaurants there\nn07865484\tchop suey served with fried noodles\nn07865575\tusually made of flaked salt cod and mashed potatoes\nn07865700\tseafood served in a scallop shell\nn07865788\tscallops in white wine sauce served in scallop shells\nn07866015\tminced cooked meats (or vegetables) in thick white sauce; breaded and deep-fried\nn07866151\ta dish of minced meat topped with mashed potatoes\nn07866277\tminced cooked meat or fish coated in egg and breadcrumbs and fried in deep fat\nn07866409\twell-seasoned rice (with nuts or currants or minced lamb) simmered or braised in stock\nn07866571\tomelet containing onions and celery and chopped meat or fish\nn07866723\tminced vegetables and meat wrapped in a pancake and fried\nn07866868\ttoasted English muffin topped with ham and a poached egg (or an oyster) and hollandaise sauce\nn07867021\ttortilla with meat filling baked in tomato sauce seasoned with chili\nn07867164\tsmall croquette of mashed chick peas or fava beans seasoned with sesame seeds\nn07867324\tfried fish and french-fried potatoes\nn07867421\thot cheese or chocolate melted to the consistency of a sauce into which bread or fruits are dipped\nn07867616\tfondue made of cheese melted in wine for dipping bread and sometimes fruits\nn07867751\tfondue made of chocolate melted with milk or cream for dipping fruits\nn07867883\tcubes of meat or seafood cooked in hot oil and then dipped in any of various sauces\nn07868045\tcubes of beef cooked in hot oil and then dipped in various tasty sauces\nn07868200\tbread slice dipped in egg and milk and fried; topped with sugar or fruit or syrup\nn07868340\tboiled rice mixed with scallions and minced pork or shrimp and quickly scrambled with eggs\nn07868508\tItalian omelet with diced vegetables and meats; cooked until bottom is set then inverted into another pan to cook the top\nn07868684\thind legs of frogs used as food; resemble chicken and cooked as chicken\nn07868830\tboned poultry stuffed then cooked and covered with aspic; served cold\nn07868955\twell-seasoned balls of ground fish and eggs and crushed crumbs simmered in fish stock\nn07869111\tmade of sheep's or calf's viscera minced with oatmeal and suet and onions and boiled in the animal's stomach\nn07869291\teggs (scrambled or fried) served with ham\nn07869391\tchopped meat mixed with potatoes and browned\nn07869522\thash made with corned beef\nn07869611\tspicy Creole dish of rice and ham, sausage, chicken, or shellfish with tomatoes, peppers, onions, and celery\nn07869775\tcubes of meat marinated and cooked on a skewer usually with vegetables\nn07869937\ta dish of rice and hard-boiled eggs and cooked flaked fish\nn07870069\tmade of lamb\nn07870167\tbaked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables\nn07870313\tseafood in Newburg sauce served on toast or rice\nn07870478\tlobster in Newburg sauce served on buttered toast or rice\nn07870620\tshrimp in Newburg sauce usually served in a rice ring\nn07870734\tlobster butter and cream and egg yolks seasoned with onions and sherry or Madeira\nn07870894\tdiced lobster mixed with Mornay sauce placed back in the shell and sprinkled with grated cheese and browned\nn07871065\tdried cod soaked in a lye solution before boiling to give it a gelatinous consistency\nn07871234\tmacaroni prepared in a cheese sauce\nn07871335\tmixed diced fruits or vegetables; hot or cold\nn07871436\tground meat formed into a ball and fried or simmered in broth\nn07871588\tmeat patties rolled in rice and simmered in a tomato sauce\nn07871720\tmeatballs simmered in stock\nn07871810\ta baked loaf of ground meat\nn07872593\tcasserole of eggplant and ground lamb with onion and tomatoes bound with white sauce and beaten eggs\nn07872748\tsliced veal knuckle or shin bone cooked with olive oil and wine and tomatoes and served with rice or vegetables\nn07873057\tvery tender and very nutritious tissue from marrowbones\nn07873198\ta dish of roast pheasant served in a manner characteristic of expensive restaurants\nn07873348\tsmall frankfurters wrapped in biscuit dough and baked\nn07873464\trice cooked in well-seasoned broth with onions or celery and usually poultry or game or shellfish and sometimes tomatoes\nn07873679\tpilaf made with bulgur wheat instead of rice and usually without meat\nn07873807\tItalian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese\nn07874063\ttomato and cheese pizza with sausage\nn07874159\ttomato and cheese pizza with pepperoni\nn07874259\tpizza with lots of cheese\nn07874343\ttomato and cheese pizza with anchovies\nn07874441\tpizza made with a thick crust\nn07874531\tHawaiian dish of taro root pounded to a paste and often allowed to ferment\nn07874674\tdried beans cooked with pork and tomato sauce\nn07874780\tsoft food made by boiling oatmeal or other meal or legumes in water or milk until thick\nn07874995\tporridge made of rolled oats\nn07875086\tthick gruel\nn07875152\tdeep-dish meat and vegetable pie or a meat stew with dumplings\nn07875267\tdish originating in Indonesia; a wide variety of foods and sauces are served with rice\nn07875436\trice cooked with broth and sprinkled with grated cheese\nn07875560\ta dish consisting of a slice of meat that is rolled around a filling and cooked\nn07875693\tflaked fish baked in a loaf with bread crumbs and various seasonings\nn07875835\tfish loaf made with flaked salmon\nn07875926\tground beef patty usually with a sauce\nn07876026\tpot roast marinated several days in seasoned vinegar before cooking; usually served with potato dumplings\nn07876189\tshredded cabbage fermented in brine\nn07876281\tsauteed cutlets (usually veal or poultry) that have been pounded thin and coated with flour\nn07876460\tthin sauteed cutlets of veal\nn07876550\tlarge shrimp sauteed in oil or butter and garlic\nn07876651\thard-cooked egg encased in sausage meat then breaded and deep-fried\nn07876775\tcreamy scrambled eggs on toast spread with anchovy paste\nn07876893\tscraps of meat (usually pork) boiled with cornmeal and shaped into loaves for slicing and frying\nn07877187\tspaghetti with meatballs in a tomato sauce\nn07877299\tspicy rice with tomatoes and onions and green peppers\nn07877675\tground beef mixed with raw egg and e.g. onions and capers and anchovies; eaten raw\nn07877849\tstrips of steak sauteed with green peppers and onions\nn07877961\tsteak covered with crushed peppercorns pan-broiled and served with brandy-and-butter sauce\nn07878145\tsauteed strips of beef and mushrooms in sour cream sauce served with noodles\nn07878283\tparboiled head of cabbage scooped out and filled with a hash of chopped e.g. beef or ham and baked; served with tomato or cheese sauce\nn07878479\t(Judaism) roasted fowl intestines with a seasoned filling of matzo meal and suet\nn07878647\tparboiled green peppers stuffed usually with rice and meat and baked briefly\nn07878785\ttomato cases filled with various mixtures and baked briefly\nn07878926\ttomato cases filled with various salad mixtures and served cold\nn07879072\tfresh corn and lima beans with butter or cream\nn07879174\tthin beef strips (or chicken or pork) cooked briefly at the table with onions and greens and soy sauce\nn07879350\tvery thinly sliced raw fish\nn07879450\trice (with raw fish) wrapped in seaweed\nn07879560\tsteak braised in tomato and onion mixture\nn07879659\tcorn and cornmeal dough stuffed with a meat mixture then wrapped in corn husks and steamed\nn07879821\ta meat mixture covered with cornbread topping that resembles a Mexican dish\nn07879953\tvegetables and seafood dipped in batter and deep-fried\nn07880080\tbeef or chicken or seafood marinated in spicy soy sauce and grilled or broiled\nn07880213\ta pate or fancy meatloaf baked in an earthenware casserole\nn07880325\tcheese melted with ale or beer served over toast\nn07880458\tdeep-fried breaded veal cutlets\nn07880751\ta tortilla rolled cupped around a filling\nn07880880\ta taco with a chicken filling\nn07880968\ta flour tortilla folded around a filling\nn07881117\ta burrito with a beef filling\nn07881205\ta tortilla that is filled with cheese and heated\nn07881404\ta flat tortilla with various fillings piled on it\nn07881525\ta flat tortilla topped with refried beans\nn07881625\tdried beans cooked and mashed and then fried in lard with various seasonings\nn07881800\tany liquid suitable for drinking\nn07882420\tany thin watery drink\nn07882497\tany foodstuff made by combining different ingredients\nn07882886\ta commercially prepared mixture of dry ingredients\nn07883031\ta food mixture used to fill pastry or sandwiches etc.\nn07883156\ta sweet filling made of prunes or apricots\nn07883251\ta medicinal or magical or poisonous beverage\nn07883384\ta substance believed to cure all ills\nn07883510\ta hypothetical substance believed to maintain life indefinitely; once sought by alchemists\nn07883661\ta drink credited with magical power; can make the one who takes it love the one who gave it\nn07884567\ta liquor or brew containing alcohol as the active agent\nn07885705\ta mixture containing half alcohol by volume at 60 degrees Fahrenheit\nn07886057\tan alcoholic beverage (especially beer) made at home\nn07886176\tan illicitly distilled (and usually inferior) alcoholic liquor\nn07886317\tan alcoholic drink made from the aromatic roots of the kava shrub\nn07886463\talcoholic beverage taken before a meal as an appetizer\nn07886572\tdrink made by steeping and boiling and fermenting rather than distilling\nn07886849\ta general name for alcoholic beverages made by fermenting a cereal (or mixture of cereals) flavored with hops\nn07887099\tbeer drawn from a keg\nn07887192\ta dysphemism for beer (especially for lager that effervesces)\nn07887304\ta dark lager produced in Munich since the 10th century; has a distinctive taste of malt\nn07887461\ta very strong lager traditionally brewed in the fall and aged through the winter for consumption in the spring\nn07887634\ta general term for beer made with bottom fermenting yeast (usually by decoction mashing); originally it was brewed in March or April and matured until September\nn07887967\tlager with reduced alcohol content\nn07888058\ta strong lager made originally in Germany for the Oktoberfest celebration; sweet and copper-colored\nn07888229\ta pale lager with strong flavor of hops; first brewed in the Bohemian town of Pilsen\nn07888378\tunlicensed drinking establishment\nn07888465\ta general name for beers made from wheat by top fermentation; usually very pale and cloudy and effervescent\nn07888816\ta German wheat beer of bock strength\nn07888909\ta cereal grain (usually barley) that is kiln-dried after having been germinated by soaking in water; used especially in brewing and distilling\nn07889193\tunfermented or fermenting malt\nn07889274\ta lager of high alcohol content; by law it is considered too alcoholic to be sold as lager or beer\nn07889510\ta general name for beer made with a top fermenting yeast; in some of the United States an ale is (by law) a brew of more than 4% alcohol by volume\nn07889814\tEnglish term for a dry sharp-tasting ale with strong flavor of hops (usually on draft)\nn07889990\ta strong dark English ale\nn07890068\tan amber colored ale brewed with pale malts; similar to bitter but drier and lighter\nn07890226\ta very dark sweet ale brewed from roasted unmalted barley\nn07890352\ta strong very dark heavy-bodied ale made from pale malt and roasted unmalted barley and (often) caramel malt with hops\nn07890540\ta kind of bitter stout\nn07890617\tfermented beverage resembling beer but made from rye or barley\nn07890750\tmade of fermented honey and water\nn07890890\tspiced or medicated mead\nn07890970\thoney diluted in water; becomes mead when fermented\nn07891095\twine mixed with honey\nn07891189\tdrink that resembles beer but with less than 1/2 percent alcohol\nn07891309\tcarbonated slightly alcoholic drink flavored with fermented ginger\nn07891433\tJapanese alcoholic beverage made from fermented rice; usually served hot\nn07891726\tfermented juice (of grapes especially)\nn07892418\ta season's yield of wine from a vineyard\nn07892512\twine having a red color derived from skins of dark-colored grapes\nn07892813\tpale yellowish wine made from white grapes or red grapes with skins removed before fermentation\nn07893253\tpinkish table wine from red grapes whose skins were removed after fermentation began\nn07893425\tused in a communion service\nn07893528\teffervescent wine\nn07893642\ta white sparkling wine either produced in Champagne or resembling that produced there\nn07893792\tpink sparkling wine originally from Germany\nn07893891\tred table wine from the Burgundy region of France (or any similar wine made elsewhere)\nn07894102\tdry fruity light red wine drunk within a few months after it is made; from the Beaujolais district in southeastern France\nn07894298\tred Bordeaux wine from the Medoc district of southwestern France\nn07894451\ta sweet white wine from the Canary Islands\nn07894551\tdry white table wine of Chablis, France or a wine resembling it\nn07894703\ta white Burgundy wine\nn07894799\tdry white table wine resembling Chablis but made from Chardonnay grapes\nn07894965\tdry red California table wine made from purple Pinot grapes\nn07895100\tdry white California table wine made from white Pinot grapes\nn07895237\tany of several red or white wines produced around Bordeaux, France or wines resembling them\nn07895435\tdry red Bordeaux or Bordeaux-like wine\nn07895595\tdry red Italian table wine from the Chianti region of Tuscany\nn07895710\tsuperior Bordeaux type of red wine\nn07895839\tdry red wine made from a grape grown widely in Bordeaux and California\nn07895962\ta California wine\nn07896060\tany of various wines produced in California\nn07896165\ta wine from southeastern France on the Mediterranean coast\nn07896287\tstill sweet wine often served with dessert or after a meal\nn07896422\t(trademark) a sweet aromatic French wine (red or white) used chiefly as an aperitif\nn07896560\tinexpensive wine sold in large bottles or jugs\nn07896661\tfine Burgundy wine usually white and dry\nn07896765\tGerman white wine from the Moselle valley or a similar wine made elsewhere\nn07896893\tdry white wine from the Loire valley in France\nn07896994\ta cheap wine of inferior quality\nn07897116\tGreek wine flavored with resin\nn07897200\tany of several white wines from the Rhine River valley in Germany (`hock' is British usage)\nn07897438\tfragrant dry or sweet white wine from the Rhine valley or a similar wine from California\nn07897600\ta sweetened Rhenish wine (especially one from Hesse in western Germany)\nn07897750\tany of various wines from the Rhone River valley in France\nn07897865\tdry red table wine from the Rioja region of northern Spain\nn07897975\tany of various light dry strong white wine from Spain and Canary Islands (including sherry)\nn07898117\tfull-bodied red wine from around the town of Saint Emilion in Bordeaux\nn07898247\tdry white Italian wine from Verona\nn07898333\tdry fruity red wine from California\nn07898443\tsemisweet golden-colored table or dessert wine from around Bordeaux in France; similar wine from California\nn07898617\tsweet wine from grapes partially sun-dried on the vine or on straw mats\nn07898745\twine containing not more than 14 percent alcohol usually served with a meal\nn07898895\tHungarian wine made from Tokay grapes\nn07899003\tcheap French table wine of unspecified origin\nn07899108\tany of several white wines flavored with aromatic herbs; used as aperitifs or in mixed drinks\nn07899292\tsweet dark amber variety\nn07899434\tdry pale amber variety\nn07899533\tmade in California and the Loire valley in France\nn07899660\ta dry white Italian wine made from Verdicchio grapes\nn07899769\ta dry white French wine (either still or sparkling) made in the Loire valley\nn07899899\ta sweet white French wine\nn07899976\ta wine that is a blend of several varieties of grapes with no one grape predominating; a wine that does not carry the name of any specific grape\nn07900225\ta wine made principally from one grape and carrying the name of that grape\nn07900406\twine to which alcohol (usually grape brandy) has been added\nn07900616\tan amber dessert wine from the Madeira Islands\nn07900734\tsweet Madeira wine\nn07900825\tsweet dark-red dessert wine originally from Portugal\nn07900958\tdry to sweet amber wine from the Jerez region of southern Spain or similar wines produced elsewhere; usually drunk as an aperitif\nn07901355\tdark sweet or semisweet dessert wine from Sicily\nn07901457\twine from muscat grapes\nn07901587\tan alcoholic beverage that is distilled rather than fermented\nn07902121\tnonflavored alcohol of 95 percent or 190 proof used for blending with straight whiskies and in making gin and liqueurs\nn07902336\tstrong distilled liquor or brandy\nn07902443\tstrong coarse brandy\nn07902520\twhiskey illegally distilled from a corn mash\nn07902698\thomemade gin especially that made illegally\nn07902799\tScandinavian liquor usually flavored with caraway seeds\nn07902937\tany of various strong liquors distilled from the fermented sap of toddy palms or from fermented molasses\nn07903101\talcoholic liquor flavored with bitter herbs and roots\nn07903208\tdistilled from wine or fermented fruit juice\nn07903543\tdistilled from hard cider\nn07903643\tdry apple brandy made in Normandy\nn07903731\tdry brandy distilled in the Armagnac district of France\nn07903841\thigh quality grape brandy distilled in the Cognac district of France\nn07903962\tItalian brandy made from residue of grapes after pressing\nn07904072\tfrom fermented juice of black morello cherries\nn07904293\ta colorless plum brandy popular in the Balkans\nn07904395\tstrong liquor flavored with juniper berries\nn07904637\tgin flavored with sloes (fruit of the blackthorn)\nn07904760\tgin made in the Netherlands\nn07904865\trum cut with water\nn07904934\ta Greek liquor flavored with anise\nn07905038\tliquor distilled from fermented molasses\nn07905296\tdark rum from Guyana\nn07905386\theavy pungent rum from Jamaica\nn07905474\tany of various strong liquors especially a Dutch spirit distilled from potatoes\nn07905618\tfermented Mexican drink from juice of various agave plants especially the maguey\nn07905770\ta colorless Mexican liquor distilled from fermented juices of certain desert plants of the genus Agavaceae (especially the century plant)\nn07905979\tMexican liquor made from fermented juices of an agave plant\nn07906111\tunaged colorless liquor originating in Russia\nn07906284\ta liquor made from fermented mash of grain\nn07906572\tmixture of two or more whiskeys or of a whiskey and neutral spirits\nn07906718\twhiskey distilled from a mash of corn and malt and rye and aged in charred oak barrels\nn07906877\twhiskey distilled from a mash of not less than 80 percent corn\nn07907037\tany strong spirits (such as strong whisky or rum)\nn07907161\twhiskey made in Ireland chiefly from barley\nn07907342\tunlawfully distilled Irish whiskey\nn07907429\twhiskey distilled from rye or rye and malt\nn07907548\twhiskey distilled in Scotland; especially whiskey made from malted barley in a pot still\nn07907831\tany whiskey distilled from sour mash\nn07907943\tstrong highly flavored sweet liquor usually drunk after a meal\nn07908411\tstrong green liqueur flavored with wormwood and anise\nn07908567\tan Italian almond liqueur\nn07908647\tliquorice-flavored usually colorless sweet liqueur made from aniseed\nn07908812\ta French liqueur originally made by Benedictine monks\nn07908923\taromatic green or yellow liqueur flavored with orange peel and hyssop and peppermint oils; made at monastery near Grenoble, France\nn07909129\tcoffee-flavored liqueur\nn07909231\tsweet liqueur flavored with vanilla and cacao beans\nn07909362\tsweet green or white mint-flavored liqueur\nn07909504\tstrawberry-flavored liqueur\nn07909593\ta sweet Scotch whisky liqueur\nn07909714\tgolden Italian liqueur flavored with herbs\nn07909811\tliqueur flavored with orange\nn07909954\tflavored with sour orange peel\nn07910048\ttype of curacao having higher alcoholic content\nn07910152\tan orange-flavored French liqueur\nn07910245\tliqueur flavored with caraway seed or cumin\nn07910379\tdistilled from fermented juice of bitter wild marasca cherries\nn07910538\tsimilar to absinthe but containing no wormwood\nn07910656\t(registered trademark) a liqueur flavored with anise\nn07910799\tsmall drink served after dinner (especially several liqueurs poured carefully so as to remain in separate layers)\nn07910970\tcoffee-flavored liqueur made in Mexico\nn07911061\tsweet liqueur made from wine and brandy flavored with plum or peach or apricot kernels and bitter almonds\nn07911249\tan Italian liqueur made with elderberries and flavored with licorice\nn07911371\tmade of two or more ingredients\nn07911677\ta short mixed drink\nn07912093\tSouth African mixed drink made by mixing ice cream with whisky\nn07912211\ta mixed drink made of alcoholic liquor mixed with water or a carbonated beverage and served in a tall glass\nn07913180\tclub soda or fruit juice used to mix with alcohol\nn07913300\tport wine mulled with oranges and cloves\nn07913393\ta cocktail made with vodka and spicy tomato juice\nn07913537\ta Bloody Mary made without alcohol\nn07913644\ta cocktail made with vodka and beef bouillon or consomme\nn07913774\ttall sweetened iced drink of wine or liquor with fruit\nn07913882\ttall iced drink of liquor (usually gin) with fruit juice\nn07914006\tan iced drink especially white wine and fruit juice\nn07914128\ta drink that refreshes\nn07914271\ta thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk\nn07914413\ta cocktail made with rum and lime or lemon juice\nn07914586\tdaiquiri with crushed strawberries\nn07914686\ta daiquiri made without alcohol\nn07914777\ta mixed drink made of wine mixed with a sparkling water\nn07914887\thot or cold alcoholic mixed drink containing a beaten egg\nn07914995\ta cocktail made of gin or vodka and lime juice\nn07915094\tgin and quinine water\nn07915213\ta cocktail made of creme de menthe and cream (sometimes with creme de cacao)\nn07915366\ta cocktail made of vodka or gin and orange juice and Galliano\nn07915491\tbourbon and sugar and mint over crushed ice\nn07915618\ta cocktail made with whiskey and sweet vermouth with a dash of bitters\nn07915800\ta manhattan cocktail made with Scotch whiskey\nn07915918\ta cocktail made of tequila and triple sec with lime and lemon juice\nn07916041\ta cocktail made of gin (or vodka) with dry vermouth\nn07916183\ta cocktail made of gin and sweet vermouth\nn07916319\tmartini made with vodka rather than gin\nn07916437\ta cocktail made of whiskey and bitters and sugar with fruit slices\nn07916582\ta cocktail made of gin and brandy with lemon juice and grenadine shaken with an egg white and ice\nn07917133\ta cocktail made with bourbon with bitters and Pernod and sugar served with lemon peel\nn07917272\ta cocktail made with vodka and orange juice\nn07917392\ta cocktail made of orange liqueur with lemon juice and brandy\nn07917507\ta highball with Scotch malt whiskey and club soda\nn07917618\ta highball with liquor and water with sugar and lemon or lime juice\nn07917791\ta sling made with brandy\nn07917874\ta sling made with gin\nn07917951\ta sling made with rum\nn07918028\ta cocktail made of a liquor (especially whiskey or gin) mixed with lemon or lime juice and sugar\nn07918193\ta sour made with whiskey\nn07918309\ta cocktail made of made of creme de menthe and brandy\nn07918706\tany of various tall frothy mixed drinks made usually of rum and lime juice and sugar shaken with ice\nn07918879\ta mixed drink made of liquor and water with sugar and spices and served hot\nn07919165\tseveral kinds of rum with fruit juice and usually apricot liqueur\nn07919310\tan effervescent beverage (usually alcoholic)\nn07919441\tsweetened coffee with Irish whiskey and whipped cream\nn07919572\tequal parts of coffee and hot milk\nn07919665\tsmall cup of strong black coffee without milk or cream\nn07919787\tcoffee with the caffeine removed\nn07919894\tcoffee made by passing boiling water through a perforated container packed with finely ground coffee\nn07920052\tstrong black coffee brewed by forcing hot water under pressure through finely ground coffee beans\nn07920222\tstrong espresso coffee with a topping of frothed steamed milk\nn07920349\tequal parts of espresso and hot milk topped with cinnamon and nutmeg and usually whipped cream\nn07920540\ta strong sweetened coffee served over ice with cream\nn07920663\tdehydrated coffee that can be made into a drink by adding hot water\nn07920872\ta superior dark coffee made from beans from Arabia\nn07920989\ta flavoring made from coffee mixed with chocolate\nn07921090\ta flavoring made by boiling down the juice of the bitter cassava; used in West Indian cooking\nn07921239\ta drink made from pulverized coffee beans; usually sweetened\nn07921360\tmilk flavored with chocolate syrup\nn07921455\ta beverage made from juice pressed from apples\nn07921615\talcoholic drink from fermented cider; `cider' and `cyder' are European (especially British) usages for the fermented beverage\nn07921834\tstrong cider (as made in western England)\nn07921948\tunfermented cider\nn07922041\tsweet cider heated with spices and citrus fruit\nn07922147\ta fermented and often effervescent beverage made from juice of pears; similar in taste to hard cider\nn07922512\tany alcoholic beverage of inferior quality\nn07922607\tan amount of an alcoholic drink (usually liquor) that is poured or gulped\nn07922764\ta beverage made from cocoa powder and milk and sugar; usually drunk hot\nn07922955\tcocoa of superior quality\nn07923748\tthe liquid part that can be extracted from plant or animal tissue by squeezing or cooking\nn07924033\tdrink produced by squeezing or crushing fruit\nn07924276\tfruit juice especially when undiluted\nn07924366\tthe juice of apples\nn07924443\tthe juice of cranberries (always diluted and sweetened)\nn07924560\tthe juice of grapes\nn07924655\tgrape juice before or during fermentation\nn07924747\tthe juice of grapefruits\nn07924834\tbottled or freshly squeezed juice of oranges\nn07924955\torange juice that has been concentrated and frozen\nn07925116\tthe juice of pineapples (usually bottled or canned)\nn07925229\tusually freshly squeezed juice of lemons\nn07925327\tusually freshly squeezed juice of limes\nn07925423\tjuice from papayas\nn07925500\tthe juice of tomatoes (usually bottled or canned)\nn07925608\tusually freshly squeezed juice of carrots\nn07925708\tbrand name for canned mixed vegetable juices\nn07925808\tan alcoholic beverage made from fermented mare's milk; made originally by nomads of central Asia\nn07925966\ta sweetened beverage of diluted fruit juice\nn07926250\tsweetened beverage of diluted lemon juice\nn07926346\tsweetened beverage of lime juice and water\nn07926442\tsweetened beverage of diluted orange juice\nn07926540\tpowder made of dried milk and malted cereals\nn07926785\tSouth American tea-like drink made from leaves of a South American holly called mate\nn07926920\twine heated with sugar and spices and often citrus fruit\nn07927070\twine and hot water with sugar and lemon juice and nutmeg\nn07927197\tnonalcoholic beverage (usually carbonated)\nn07927512\ta sweet drink containing carbonated water and flavoring\nn07927716\tcarbonated drink containing an extract from bark of birch trees\nn07927836\ttart lemon-flavored carbonated drink\nn07927931\tcarbonated drink flavored with extract from kola nuts (`dope' is a southernism in the United States)\nn07928163\tsweet carbonated drink flavored with vanilla\nn07928264\tmade of milk and flavored syrup with soda water\nn07928367\tginger-flavored carbonated drink\nn07928488\torange-flavored carbonated drink\nn07928578\tcarbonated drink with fruit syrup and a little phosphoric acid\nn07928696\tCoca Cola is a trademarked cola\nn07928790\tPepsi Cola is a trademarked cola\nn07928887\tcarbonated drink containing extracts of roots and herbs\nn07928998\tcarbonated drink flavored with an extract from sarsaparilla root or with birch oil and sassafras\nn07929172\tlime- or lemon-flavored carbonated water containing quinine\nn07929351\ta seed of the coffee tree; ground to make coffee\nn07929519\ta beverage consisting of an infusion of ground coffee beans\nn07929940\tblack coffee with Cognac and lemon peel and sugar\nn07930062\ta punch made of fruit juices mixed with water or soda water (with or without alcohol)\nn07930205\ta punch made of spirits and milk and sugar and spices\nn07930315\ta mixed drink containing champagne and orange juice\nn07930433\ta mixed drink made of pineapple juice and coconut cream and rum\nn07930554\tan iced mixed drink usually containing alcohol and prepared for multiple servings; normally served in a punch bowl\nn07930864\ta punch served in a pitcher instead of a punch bowl\nn07931001\ta punch containing a sparkling wine\nn07931096\ta punch made of claret and brandy with lemon juice and sugar and sometimes sherry or curacao and fresh fruit\nn07931280\ta punch made of sweetened ale or wine heated with spices and roasted apples; especially at Christmas\nn07931452\ta cocktail made of rum and lime or lemon juice with sugar and sometimes bitters\nn07931612\ta cocktail made with vodka, coffee liqueur, and milk or cream\nn07931733\ta punch made of rum and brandy and water or tea sweetened with sugar syrup\nn07931870\ta punch made of Moselle and sugar and sparkling water or champagne flavored with sweet woodruff\nn07932039\ta punch made of sweetened milk or cream mixed with eggs and usually alcoholic liquor\nn07932323\ta drink resembling beer; made from fermented cassava juice\nn07932454\ta brew made by fermenting molasses and other sugars with the sap of spruce trees (sometimes with malt)\nn07932614\ta mixed drink made of sweetened lime juice and soda water usually with liquor\nn07932762\ta rickey made with gin\nn07932841\tdried leaves of the tea shrub; used to make tea\nn07933154\ta measured amount of tea in a bag for an individual serving of tea\nn07933274\ta beverage made by steeping tea leaves in water\nn07933530\ta beverage that resembles tea but is not made from tea leaves\nn07933652\ta beverage for children containing hot water and milk and sugar and a small amount of tea\nn07933799\ta cup of tea\nn07933891\ttea-like drink made of leaves of various herbs\nn07934032\tinfusion of e.g. dried or fresh flowers or leaves\nn07934152\ttea-like drink made from camomile leaves and flowers\nn07934282\tstrong tea served over ice\nn07934373\ttea made by exposing tea leaves steeped in water to the direct rays of the sun; usually served with ice\nn07934530\tfermented tea leaves\nn07934678\tblack tea grown in China\nn07934800\ta fine variety of black tea grown in northern India\nn07934908\ta superior grade of black tea; grown in India and Sri Lanka and Java\nn07935043\ta fine quality of black tea native to China\nn07935152\ttea leaves that have been steamed and dried without fermenting\nn07935288\ta Chinese green tea with twisted leaves\nn07935379\tChinese tea leaves that have been partially fermented before being dried\nn07935504\ta liquid necessary for the life of most animals and plants\nn07935737\tdrinking water (often spring water) that is put into bottles and offered for sale\nn07935878\tpure natural water from a stream or brook; often distinguished from soda water\nn07936015\twater from a spring\nn07936093\twater sweetened with sugar\nn07936263\twater suitable for drinking\nn07936459\twater served ice-cold or with ice\nn07936548\teffervescent beverage artificially charged with carbon dioxide\nn07936745\twater naturally or artificially impregnated with mineral salts or gasses; often effervescent; often used therapeutically\nn07936979\tnaturally effervescent mineral water\nn07937069\tsparkling mineral water from springs at Vichy, France or water similar to it\nn07937344\tfood that will decay rapidly if not refrigerated\nn07937461\ta spicy dish that originated in northern Africa; consists of pasta steamed with a meat and vegetable stew\nn07937621\ta cheese dish made with egg and bread crumbs that is baked and served in individual fireproof dishes\nn07938007\ta pill or tablet containing several vitamins\nn07938149\ta pill containing one or more vitamins; taken as a dietary supplement\nn07938313\tfood traditionally eaten by African-Americans in the South\nn07938594\ta dish or dessert that is formed in or on a mold\nn07942152\t(plural) any group of human beings (men or women or children) collectively\nn07951464\tseveral things grouped together or considered as a whole\nn07954211\ta collection of rules or prescribed standards on the basis of which decisions are made\nn07977870\ta collection of literary documents or records kept for reference or borrowing\nn08079613\ta team of professional baseball players who play and travel together\nn08182379\ta large number of things or people considered together\nn08238463\ta body of students who are taught together\nn08242223\ta small group of indispensable persons or things\nn08249459\ta group of musicians playing brass and woodwind and percussion instruments\nn08253141\ta party of people assembled for dancing\nn08256735\ta party of people at a wedding\nn08376250\ta series of things depending on each other as if linked together\nn08385989\ta meeting of influential people to conduct business while eating breakfast\nn08492354\tany habitation at a high altitude\nn08492461\tthe marketplace in ancient Greece\nn08494231\ta commercially operated park with stalls and shows for amusement\nn08495908\tapoapsis in solar orbit; the point in the orbit of a planet or comet that is at the greatest distance from the sun\nn08496334\t(golf) the part of the fairway leading onto the green\nn08500819\tthe part of outer space within the solar system\nn08500989\tthe space between stars\nn08501887\tthe space between galaxies\nn08505018\ta large wilderness area\nn08506347\ta region much like a desert but usually located between a desert and the surrounding regions\nn08511017\t(nautical) at the ends of the transverse deck beams of a vessel\nn08517010\ta defensive post at the end of a bridge nearest to the enemy\nn08517676\ta place on a bus route where buses stop to discharge and take on passengers\nn08518171\ta site where people on holiday can pitch a tent\nn08519299\ta storage site (such as a small reservoir) that delays the flow of water downstream\nn08521623\ta tract of land used for burials\nn08523340\tpoint where the hairline meets the midpoint of the forehead\nn08524735\ta large and densely populated urban area; may include several independent administrative districts\nn08539072\tthe central area or commercial center of a town or city\nn08539276\toutlying areas (as of a city or town)\nn08540532\tone of the administrative divisions of a large city\nn08547468\ta pasture for cows\nn08547544\tthe top line of a hill, mountain, or wave\nn08551296\ta diocese of the Eastern Orthodox Church\nn08554440\ta residential district located on the outskirts of a city\nn08555333\ta wealthy residential suburb\nn08555710\tlow space beneath a floor of a building; gives workers access to wiring or plumbing\nn08558770\tthe domain ruled by a sheik\nn08558963\tany address at which you dwell more than temporarily\nn08559155\t(law) the residence where you have your permanent home or principal establishment and to where, whenever you are absent, you intend to return; every person is compelled to have one and only one domicile at a time\nn08560295\ta holiday resort offering ranch activities (riding and camping)\nn08569482\ta rural area where farming is practiced\nn08571275\t(sports) the middle part of a playing field (as in football or lacrosse)\nn08571642\ta narrow field that has been cleared to check the spread of a prairie fire or forest fire\nn08571898\tan open-air street market for inexpensive or secondhand articles\nn08573674\tthe line along which opposing armies face each other\nn08573842\tan accumulation of refuse and discarded matter\nn08578517\ta region including the bottom of the sea and the littoral zones\nn08579266\ta district where gold is mined\nn08579352\ta field where grain is grown\nn08580944\ta position some distance below the top of a mast to which a flag is lowered in mourning or to signal distress\nn08583292\tthe line formed by the lower edge of a skirt or coat\nn08583455\ta breeding ground for herons; a heron rookery\nn08583554\tthe line formed by the lower edge of hip-length garment\nn08583682\tthe line formed by measuring the hip at its greatest part\nn08584914\ta small unpretentious out-of-the-way place\nn08586978\ta field where junk is collected and stored for resale\nn08589670\tan isogram connecting points of equal magnetic inclination\nn08596076\tthe region of the shore of a lake or sea or ocean\nn08597579\teither of two points where the lines of force of the Earth's magnetic field are vertical\nn08598301\tland where grass or grasslike vegetation grows and is the dominant form of plant life\nn08598568\ta place that attracts many visitors\nn08599174\ta meridian that passes through the observer's zenith\nn08599292\tmeridian at zero degree longitude from which east and west are reckoned (usually the Greenwich longitude in England)\nn08611339\tthe center point on a shield\nn08611421\ta space where automobiles are not allowed to park\nn08613733\twhere the air is unconfined\nn08614632\tan open area for holding fairs or exhibitions or circuses\nn08616050\ta field covered with grass or herbage and suitable for grazing by livestock\nn08618831\tperiapsis in solar orbit; the point in the orbit of a planet or comet where it is nearest to the sun\nn08619112\tperiapsis in orbit around the moon\nn08623676\tthe specific site in the body where an infection originates\nn08628141\tan older or native quarter of many cities in northern Africa; the quarter in which the citadel is located\nn08633683\tthe area of a city (such as a harbor or dockyard) alongside a body of water\nn08640531\ta hotel located in a resort area\nn08640739\tan area where many people go for recreation\nn08640962\tthe part of a golf course bordering the fairway where the grass is not cut short\nn08643267\t(India) a place of religious retreat for Hindus\nn08644045\t(nautical) a place of refuge (as for a ship)\nn08645104\tan uncultivated region covered with scrub vegetation\nn08645212\tan area of open or forested country\nn08645318\ta tract of open rolling country (especially upland)\nn08647264\tthe yard associated with a school\nn08648917\ta place that is frequently exhibited and visited for its historical interest or natural beauty\nn08649711\tspace by the side of a bed (especially the bed of a sick or dying person)\nn08651104\ta line that marks the side boundary of a playing field\nn08652376\ta resort with lodging and facilities for skiing\nn08658309\ta layer in a soil profile\nn08658918\ta layer of rock with a particular composition (especially of fossils); for dating the stratum\nn08659242\ta seam of coal\nn08659331\tthe part of a coal seam that is being cut\nn08659446\ta geographic region (land or sea) under which something valuable is found\nn08659861\ta region rich in petroleum deposits (especially one with producing oil wells)\nn08661878\tthe part of the Earth's surface between the Arctic Circle and the Tropic of Cancer or between the Antarctic Circle and the Tropic of Capricorn; characterized by temperate climate\nn08662427\tlevel space where heavy guns can be mounted behind the parapet at the top of a rampart\nn08663051\tthe limit of a nation's territorial waters\nn08663703\tthe top of a desk\nn08663860\tthe upper part of anything\nn08673039\ta native village in Malaysia\nn08674344\tregions adjacent to the tropics\nn08676253\tan urban area in a Spanish-speaking country\nn08677424\televated open grassland in southern Africa\nn08677801\tthe highest point (of something)\nn08678783\ta line corresponding to the surface of the water when the vessel is afloat on an even keel; often painted on the hull of a ship\nn08679167\ta line marking the highest level reached\nn08679269\ta line marking the lowest level reached\nn08679562\tthe watershed of a continent (especially the watershed of North America formed by a series of mountain ridges extending from Alaska to Mexico)\nn08685188\ta belt-shaped region in the heavens on either side to the ecliptic; divided into 12 constellations or signs for astrological purposes\nn08782627\tan island in the Aegean Sea\nn08896327\tcountry or territory ruled by a sultan\nn09032191\tone of the cantons of Switzerland\nn09186592\tthe deep sea (2000 meters or more) where there is no light\nn09189157\tthe lofty nest of a bird of prey (such as a hawk or eagle)\nn09191635\ta bubble of air\nn09193551\ta flat resulting from repeated deposits of alluvial material by running water\nn09193705\tany high mountain\nn09194227\ta glacier that moves down from a high valley\nn09199101\ta mound of earth made by ants as they dig their nest\nn09201998\tunderground bed or layer yielding ground water for wells and springs etc\nn09203827\ta group of many islands in a large body of water\nn09205509\ta sharp narrow ridge found in rugged mountains\nn09206896\ta stream or brook\nn09206985\tan upward slope or grade (as in a road)\nn09208496\t(astronomy) a cluster of stars (or a small constellation)\nn09209025\tthe lower layer of the crust\nn09210862\tan island consisting of a circular coral reef surrounding a lagoon\nn09213434\ta long ridge or pile\nn09213565\tsloping land (especially the slope beside a body of water)\nn09214060\ta submerged (or partly submerged) ridge in a river or along a shore\nn09214269\ta pit where wood or charcoal is burned to make a bed of hot coals suitable for barbecuing meat\nn09214916\ta long coral reef near and parallel to the shore\nn09215023\tany of the elementary particles having a mass equal to or greater than that of a proton and that participate in strong interactions; a hadron with a baryon number of +1\nn09215437\ta natural depression in the surface of the land often with a lake at the bottom of it\nn09217230\tan area of sand sloping down to the water of a sea or lake\nn09218315\ta structure of small hexagonal cells constructed from beeswax by bees and used to store honey and larvae\nn09218494\tsomething to which a mountain climber's rope can be secured\nn09218641\ta mountain or tall hill\nn09219233\ta narrow ledge or shelf typically at the top or bottom of a slope\nn09223487\ta calculus formed in the bladder\nn09224725\ta high steep bank (usually formed by river erosion)\nn09226869\ta pit created to provide earth that can be used as fill at another site\nn09228055\ta slope or hillside\nn09229709\ta hollow globule of gas (e.g., air or carbon dioxide)\nn09230041\ta hole made by an animal, usually for shelter\nn09230202\ta hill that rises abruptly from the surrounding region; has a flat top and sloping sides\nn09231117\ta large crater caused by the violent explosion of a volcano that collapses into a depression\nn09233446\ta ravine formed by a river in an area with little rainfall\nn09233603\tthe steeply sloping side of a canyon\nn09238926\ta geological formation consisting of an underground enclosure with access from the surface of the ground or from the sea\nn09239302\ta large cave or a large chamber in a cave\nn09242389\ta deep opening in the earth's surface\nn09245515\ta steep-walled semicircular basin in a mountain; may contain a lake\nn09246464\ta steep high face of rock\nn09247410\ta visible mass of water or ice particles suspended at a considerable altitude\nn09248153\ta slope down which sleds may coast\nn09248399\tland in a coastal area\nn09249034\ta pass between mountain peaks\nn09249155\ta crater that has collected cosmic material hitting the earth\nn09251407\t(astronomy) a relatively small extraterrestrial body consisting of a frozen mass that travels around the sun in a highly elliptical orbit\nn09255070\ta glacier that spreads out from a central mass of ice\nn09256479\ta reef consisting of coral consolidated into limestone\nn09257843\tsmall or narrow cave in the side of a cliff or mountain\nn09259025\ta steep rugged rock or cliff\nn09259219\ta bowl-shaped depression formed by the impact of a meteorite or bomb\nn09260907\tarable land that is worked by plowing and sowing and raising crops\nn09262690\tan open river valley (in a hilly area)\nn09263912\ta narrow pass (especially one between mountains)\nn09264803\ta low triangular area of alluvial deposits where a river divides before entering a larger body of water\nn09265620\ta downward slope or bend\nn09266604\ta domed rock formation where a core of rock has moved upward and pierced through the more brittle overlying strata\nn09267854\ta piece of turf dug out of a lawn or fairway (by an animals hooves or a golf club)\nn09268007\t(golf) the cavity left when a piece of turf is cut from the ground by the club head in making a stroke\nn09269341\t(usually plural) a rolling treeless highland with little soil\nn09269472\tthe downward slope of a hill\nn09269882\ta gully that is shallower than a ravine\nn09270160\tthe nest of a squirrel\nn09270657\ta mound of glacial drift\nn09270735\ta ridge of sand created by the wind; found in deserts or near lakes and oceans\nn09274152\ta long steep slope or cliff at the edge of a plateau or ridge; usually formed by erosion\nn09274305\t(geology) a long winding ridge of post glacial gravel and other sediment; deposited by meltwater from glaciers or ice sheets\nn09279986\ta ball of fire (such as the sun or a ball-shaped discharge of lightning)\nn09281252\ta red dwarf star in which luminosity can change several magnitudes in a few minutes\nn09282208\tthe ground on which people and animals move about\nn09283193\tany inanimate object (as a towel or money or clothing or dishes or books or toys etc.) that can transmit infectious agents from one person to another\nn09283405\ta relatively low hill on the lower slope of a mountain\nn09283514\tthe lower wall of an inclined fault\nn09283767\tland forming the forward margin of something\nn09283866\tthe part of the seashore between the highwater mark and the low-water mark\nn09287415\ta particle that mediates the interaction of two elementary particles\nn09287968\t(geology) the geological features of the earth\nn09288635\ta spring that discharges hot water and steam\nn09289331\ta slowly moving mass of ice\nn09289596\ta narrow secluded valley (in the mountains)\nn09290350\ta hole in the ground made by gophers\nn09290444\ta deep ravine (usually with a river running through it)\nn09294877\ta small cave (usually with attractive features)\nn09295210\ta small iceberg or ice floe just large enough to be hazardous for shipping\nn09295946\ta narrow gorge with a stream running through it\nn09300306\tdeep ditch cut by running water (especially after a prolonged downpour)\nn09300905\tmany objects thrown forcefully through the air\nn09302616\televated (e.g., mountainous) land\nn09303008\ta local and well-defined elevation of the land\nn09303528\tthe side or slope of a hill\nn09304750\ta depression hollowed out of solid matter\nn09305031\ta small valley between mountains\nn09305898\ta natural spring of water at a temperature of 70 F or above\nn09308572\ta large mass of ice floating at sea; usually broken off of a polar glacier\nn09308743\ta mass of ice and snow that permanently covers a large area of land (e.g., the polar regions or a mountain peak)\nn09309046\ta large flat mass of ice (larger than an ice floe) floating at sea\nn09309168\ta flat mass of ice (smaller than an ice field) floating at sea\nn09309292\ta large mass of ice\nn09310616\ta geological fault in which one side is above the other\nn09315159\ta particle that is electrically charged (positive or negative); an atom or molecule or group that has lost or gained one or more electrons\nn09319604\ta relatively narrow strip of land (with water on both sides) connecting two larger land areas\nn09325824\ta calculus formed in the kidney\nn09326662\ta small natural hill\nn09327077\ta small hill rising up from the African veld\nn09327538\ta disk-shaped region of minor planets outside the orbit of Neptune\nn09330378\tthe bottom of a lake\nn09331251\tland bordering a lake\nn09332890\tthe shore of a lake\nn09335693\tthe seacoast first sighted on a voyage (or flight over water)\nn09335809\ta low area that has been filled in\nn09336555\tthe foam resulting from excessive sweating (as on a horse)\nn09337048\tan accidental hole that allows something (fluid or light etc.) to enter or escape\nn09337253\ta projecting ridge on a mountain or submerged under water\nn09338013\tan elementary particle that participates in weak interactions; has a baryon number of 0\nn09339810\tthe solid part of the earth consisting of the crust and outer mantle\nn09344198\tlow level country\nn09344324\ta crater on the Earth's Moon\nn09344724\ta flat-bottomed volcanic crater that was formed by an explosion; often filled with water\nn09348460\ta block of the earth's crust bounded by faults and shifted to form peaks of a mountain range\nn09349648\ta bend or curve, as in a stream or river\nn09351905\tflat tableland with steep edges\nn09352849\tstony or metallic object that is the remains of a meteoroid that has reached the earth's surface\nn09353815\ta fossil that must be studied microscopically\nn09354511\tthe middle of a stream\nn09357346\ta mound of earth made by moles while burrowing\nn09357447\ta geological formation in which all strata are inclined in the same direction\nn09359803\ta land mass that projects well above its surroundings; higher than a hill\nn09361517\tthe side or slope of a mountain\nn09362316\tthe point where a stream issues into a larger body of water\nn09362945\ta term used in Scottish names of promontories\nn09366017\ta sunken or depressed geological formation\nn09366317\ta raised or elevated geological formation\nn09375606\ta ravine or gully in southern Asia\nn09376198\ta large body of water constituting a principal part of the hydrosphere\nn09376526\tthe bottom of a sea or ocean\nn09376786\tland bordering an ocean\nn09381242\tthe part of a rock formation that appears above the surface of the surrounding land\nn09382099\tthe land inside an oxbow bend in a river\nn09384106\ta meteorite composed principally of olivine and metallic iron\nn09389867\ta hole made in something\nn09391386\tthe intensely luminous surface of a star (especially the sun)\nn09391644\ta gentle slope leading from the base of a mountain to a region of flat land\nn09391774\ta type of glaciation characteristic of Alaska; large valley glaciers meet to form an almost stagnant sheet of ice\nn09392402\tan area planted with pine trees or related conifers\nn09393524\tthe beach at a seaside resort\nn09393605\textensive tract of level open land\nn09396465\ta promontory extending out into a large body of water\nn09396608\ta glacier near the Arctic or Antarctic poles\nn09398076\ta pit or hole produced by wear or weathering (especially in a road surface)\nn09398677\ta very steep cliff\nn09399592\ta natural elevation (especially a rocky one that juts out into the sea)\nn09400584\tcalculus in a salivary gland\nn09400987\ta degenerate neutron star; small and extremely dense; rotates very fast and emits regular pulses of polarized radiation\nn09402944\ta pit filled with loose wet sand into which objects are sucked down\nn09403086\ta hole in the ground as a nest made by wild rabbits\nn09403211\tany object that radiates energy\nn09403427\tan arc of colored light in the sky caused by refraction of the sun's rays by rain\nn09403734\ta series of hills or mountains\nn09405078\tland suitable for grazing livestock\nn09405787\ta deep narrow steep-sided valley (especially one formed by running water)\nn09406793\ta submerged ridge of rock or coral near the surface of the water\nn09409512\ta long narrow natural elevation or striation\nn09409752\ta long narrow range of hills\nn09410224\ta valley with steep sides; formed by a rift in the earth's crust\nn09411189\twoodlands along the banks of stream or river\nn09411295\tone of a series of small ridges produced in sand by water currents or by wind\nn09415584\tthe bank of a river\nn09415671\ta channel occupied (or formerly occupied) by a river\nn09416076\ta lump or mass of hard consolidated mineral matter\nn09416890\tthe inner top surface of a covered area or hollow space\nn09421031\ta shallow basin in a desert region; contains salt and gypsum that was deposited by an evaporated salt lake\nn09421799\ta submerged bank of sand near a shore or in a river; can be exposed at low tide\nn09421951\ta bar of sand\nn09422190\ta large pit in sandy ground from which sand is dug\nn09422631\ta low area where waste is buried between layers of earth\nn09425019\ta pit over which lumber is positioned to be sawed by two men with a long two-handed saw\nn09425344\t(geology) flat elevated land with poor soil and little vegetation that is scarred by dry channels of glacial origin (especially in eastern Washington)\nn09428293\tthe shore of a sea or ocean\nn09428628\tthe shore of a sea or ocean regarded as a resort\nn09429630\ta long and tall sand dune with a sharp crest; common in the Sahara\nn09432283\ta rigid covering that envelops an object\nn09432990\tsomething that shines (with emitted or reflected light)\nn09433312\ta sandbank in a stretch of water that is visible at low tide\nn09433442\tthe land along the edge of a body of water\nn09433839\ta boundary line between land and water\nn09435739\ta depression in the ground communicating with a subterranean passage (especially in limestone) and formed by solution or by collapse of a cavern roof\nn09436444\ta snow-covered slope for skiing\nn09436708\tthe atmosphere and outer space as viewed from the earth\nn09437454\tan elevated geological formation\nn09438844\ta covering of snow (as on a mountain peak)\nn09438940\ta mass of snow heaped up by the wind\nn09439032\ta permanent wide expanse of snow\nn09439213\tthe froth produced by soaps or detergents\nn09442595\ta narrow strip of land that juts out into the sea\nn09443281\tthe trail left by a person or an animal; what the hunter follows in pursuing game\nn09443641\tfoam or froth on the sea\nn09444783\tany celestial body visible (as a point of light) from the Earth at night\nn09445008\ta steep place (as on a hill)\nn09445289\textensive plain without trees (associated with eastern Russia and Siberia)\nn09447666\ta poetic term for a shore (as the area periodically covered and uncovered by the tides)\nn09448690\ta channel occupied (or formerly occupied) by a stream\nn09450163\tthe star that is the source of light and heat for the planets in the solar system\nn09451237\ta star that explodes and becomes extremely luminous in the process\nn09452291\ta low area (especially a marshy area between ridges)\nn09452395\tlow land that is seasonally flooded; has more woody plants than a marsh and better drainage than a bog\nn09452760\ta rounded elevation (especially one on an ocean floor)\nn09453008\ta relatively flat highland\nn09454153\ta sloping mass of loose rocks at the base of a cliff\nn09454412\ta twisted and tangled mass that is highly interwoven\nn09454744\ta natural accumulation of bitumens at the surface of the earth; often acts as a trap for animals whose bones are thus preserved\nn09456207\ta level shelf of land interrupting a declivity (with steep slopes above and below)\nn09457979\ta basin that is full of water at high tide\nn09458269\tland near the sea that is overflowed by the tide\nn09459979\ta high rocky hill\nn09460046\ta prominent rock or pile of rocks on a hill\nn09461069\ta multiple star in the constellation of Orion\nn09462600\tthe lowest atmospheric layer; from 4 to 11 miles high (depending on latitude)\nn09463226\ta vast treeless plain in the Arctic regions where the subsoil is permanently frozen\nn09464486\tan object that emits or reflects light in an intermittent flickering manner\nn09466678\tthe upward slope of a hill\nn09467696\ta urinary stone\nn09468604\ta long depression in the surface of the land that usually contains a river\nn09470027\tindirect transmission of an infectious agent that occurs when a vehicle (or fomite) touches a person's body or is ingested\nn09470222\ta layer of ore between layers of rock\nn09472413\ta bowl-shaped geological formation at the top of a volcano\nn09472597\ta mountain formed by volcanic material\nn09474010\tgully or streambed in northern Africa and the Middle East that remains dry except during rainy season\nn09474412\ta vertical (or almost vertical) smooth rock face (as of a cave or mountain)\nn09474765\ta series of connected underground tunnels occupied by rabbits\nn09475044\thabitation for wasps or hornets\nn09475179\tnatural or artificial channel through which water flows\nn09475925\tland bordering a body of water\nn09476123\tunderground surface below which the ground is wholly saturated with water\nn09478210\tany of various hard colored rocks (especially rocks consisting of chert or basalt)\nn09480959\tfossil trail of a worm\nn09481120\t(geology) a piece of rock of different origin from the igneous rock in which it is embedded\nn09493983\t(Greek mythology) a sorceress who detained Odysseus on her island and turned his men into swine\nn09495962\twinged monster with the head of an eagle and the body of a lion\nn09505153\ta leader in religious or sacred affairs\nn09537660\tany expected deliverer\nn09556121\t(Roman mythology) a vestal virgin who became the mother by Mars of the twins Romulus and Remus\nn09605110\ta reference to yourself or myself etc.; `take care of number one' means to put your own interests first\nn09606009\ta person who enjoys taking risks\nn09606527\ta person who is unusual\nn09607630\ta person who is appointed to a job or position\nn09607782\tsomeone engaged in a dangerous but potentially rewarding adventure\nn09607903\ta Jew of eastern European or German descent\nn09608709\ta person who helps people or institutions (especially with financial help)\nn09610255\ta person unable to distinguish differences in hue\nn09610405\ta person who holds no title\nn09611722\tsomeone appointed by a court to assume responsibility for the interests of a minor or incompetent person\nn09612700\tan investor who deliberately decides to go against the prevailing wisdom of other investors\nn09613118\tan Italian farmer\nn09613191\ta person who participates in competitions\nn09613690\tone of two or more signers of the same document (as a treaty or declaration)\nn09615336\ta participant in a formal discussion\nn09616573\ta specialist in wine making\nn09616922\ta person who tries to please or amuse\nn09617161\tan orator who delivers eulogies or panegyrics\nn09617435\ta former gambler\nn09617577\ta research worker who conducts experiments\nn09617696\ta person who enjoys testing innovative ideas\nn09618760\tsomeone who expounds and interprets or explains\nn09618880\ta former president\nn09618957\ta part of a person that is used to refer to a person\nn09619168\ta person who belongs to the sex that can have babies\nn09619452\ta worker who performs the last step in a manufacturing process\nn09620078\ta person who inhabits a particular place\nn09620794\tan indigenous person who was born in a particular place\nn09621232\ta person born in a particular place or country\nn09622049\ta young person, not fully developed\nn09622302\ta person who loves someone or is loved by someone\nn09624168\ta person who belongs to the sex that cannot have babies\nn09624559\ta negotiator who acts as a link between parties\nn09624899\ta woman who is a mediator\nn09625401\ta person who owes allegiance to that nation\nn09626238\ta person who is of equal standing with another in a group\nn09627807\tthe winner of a lottery\nn09627906\ta person who receives something\nn09629065\ta person addicted to religion or a religious zealot\nn09629246\ta person who enjoys sensuality\nn09629752\ta person who changes location\nn09631129\ta person who for some reason is not wanted or welcome\nn09632274\ta person who lacks technical training\nn09632518\ta person who works at a specific occupation\nn09633969\ta person who transgresses moral or civil law\nn09635534\tan African who is Black\nn09635635\ta white native of Cape Province who is a descendant of Dutch settlers and who speaks Afrikaans\nn09635973\t(according to Nazi doctrine) a Caucasian person of Nordic descent (and not a Jew)\nn09636339\ta person with dark skin who comes from Africa (or whose ancestors came from Africa)\nn09637339\ta woman who is Black\nn09638454\tan offspring of a black and a white parent\nn09638875\ta member of the Caucasoid race\nn09639382\ta member of the Sunni Muslim people living in northwestern Caucasia\nn09639919\ta member of a group of Semitic-speaking peoples of the Middle East and northern Africa\nn09640327\tan inhabitant of ancient Chaldea\nn09640715\ta member of an ancient warlike people living in Elam east of Babylonia as early as 3000 BC\nn09641002\ta man who is White\nn09641578\ta white person of Anglo-Saxon ancestry who belongs to a Protestant denomination\nn09643799\t(slang) a disparaging term for an Asian person (especially for North Vietnamese soldiers in the Vietnam War)\nn09644152\ta member of the nomadic peoples of Mongolia\nn09644657\ta member of the Mongolian people of central Asia who invaded Russia in the 13th century\nn09648743\ta member of any of various Indian peoples of central Mexico\nn09648911\ta member of the Nahuatl people who established an empire in Mexico that was overthrown by Cortes in 1519\nn09649067\ta member of an early Mesoamerican civilization centered around Veracruz that flourished between 1300 and 400 BC\nn09650729\ta member of the Siouan people of southeastern Mississippi\nn09650839\ta member of a warlike group of Algonquians living in the northwestern plains\nn09650989\ta member of a group of Siouan people who constituted a division of the Teton Sioux\nn09651123\ta group of Plains Indians formerly living in what is now North and South Dakota and Nebraska and Kansas and Arkansas and Louisiana and Oklahoma and Texas\nn09651968\ta member of a North American Indian people living on the western plains (now living in Oklahoma and Montana)\nn09652149\ta member of the Muskhogean people formerly living in northern Mississippi\nn09653144\ta member of a North American Indian people living around the mouth of the Colorado River\nn09653438\ta member of the Shoshonean people who formerly lived between Wyoming and the Mexican border but are now chiefly in Oklahoma\nn09654079\tany member of the Creek Confederacy (especially the Muskogee) formerly living in Georgia and Alabama but now chiefly in Oklahoma\nn09654518\ta member of an Algonquian people formerly living in New Jersey and New York and parts of Delaware and Pennsylvania\nn09654898\ta member of a North American Indian people of southern California\nn09655213\ta member of a North American Indian people living on the California coast near Monterey\nn09655466\ta member of the Caddo people of northeastern Texas\nn09656077\ta member of a North American Indian people of Cataract Canyon in Arizona\nn09657206\ta member of the Siouan people who constituted a division of the Teton Sioux and who formerly lived in the western Dakotas; they were prominent in resisting the white encroachment into the northern Great Plains\nn09657748\ta member of the Siouan people formerly living in Iowa and Minnesota and Missouri\nn09658254\ta member of the North American Indian people of Oregon\nn09658398\ta member of a North American Indian people of southeastern California and northwestern Mexico\nn09658815\ta member of a Mayan people of north central Guatemala\nn09658921\ta member of a Caddo people formerly living in north central Texas\nn09659039\ta member of the Algonquian people formerly inhabiting southern Wisconsin and northern Illinois\nn09659188\ta member of a North American Indian people living in northern Baja California\nn09660010\ta member of the Algonquian people of northeastern Maine and New Brunswick\nn09660240\ta member of a North American Indian people of the Gila river valley in Arizona\nn09661873\ta member of the Algonquian people formerly living in the Hudson valley and eastward to the Housatonic\nn09662038\ta member of any of the peoples formerly living in southeastern United States and speaking Muskhogean languages\nn09662661\ta member of an Athapaskan people that migrated to Arizona and New Mexico and Utah\nn09662951\ta member of the Wakashan people living on Vancouver Island and in the Cape Flattery region of northwestern Washington\nn09663248\ta member of the Siouan people who constituted a division of the Teton Sioux and who formerly inhabited the Black Hills of western South Dakota\nn09663786\ta member of the Siouan people formerly living in Missouri in the valleys of the Missouri and Osage rivers; oil was found on Osage lands early in the 20th century\nn09663999\ta member of the Iroquoian people formerly living east of Lake Ontario\nn09664556\ta member of either of two Shoshonean peoples (northern Paiute and southern Paiute) related to the Aztecs and living in the southwestern United States\nn09664908\ta member of the Algonquian people related to the Malecite and living in northeastern Maine and New Brunswick\nn09665367\ta member of the Algonquian people belonging to the Abnaki confederacy and living in the Penobscot valley in northern Maine\nn09665545\ta member of a North American Indian people speaking one of the Penutian languages\nn09666349\ta member of the Algonquian people originally of Michigan and Wisconsin\nn09666476\ta member of the Algonquian people who formerly lived in eastern Virginia\nn09666883\ta deified spirit of the Pueblo people\nn09667358\ta member of a group of North American Indians speaking a Salishan language and living on the northwest coast of North America\nn09668199\ta member of a North American Indian people who lived in Oregon along the Columbia river and its tributaries in Washington and northern Idaho\nn09668437\ta member of the Indian people of northern California and southern Oregon\nn09668562\ta member of the Algonquian people formerly living along the Tennessee river\nn09668988\ta member of a group of Siouan people who constituted a division of the Teton Sioux\nn09669631\ta member of the large western branch of Sioux people which was made up of several groups that lived on the plains\nn09670280\ta member of a group of peoples of Mexico\nn09670521\ta member of the Taracahitian people of north central Mexico\nn09670909\ta member of an Iroquois people who formerly lived in North Carolina and then moved to New York State and joined the Iroquois\nn09671089\ta member of the Siouan people of Virginia and North Carolina\nn09672590\ta member of an extinct North American Indian people who lived in northern California\nn09672725\ta member of a North American Indian people of central Arizona\nn09672840\ta member of the North American Indian people of the San Joaquin Valley\nn09673091\ta member of the North American Indian people of Arizona and adjacent Mexico and California\nn09674412\ta member of an agricultural people in southeastern India\nn09674786\ta member of a formerly tribal people now living in south central India\nn09675045\ta member of the Dravidian people living in southeastern India\nn09675673\ta member of a pastoral people living in the Nilgiri Hills of southern India\nn09675799\ta member of a Dravidian people living on the southwestern coast of India\nn09675922\ta member of the people of Gujarat\nn09676021\ta member of the people of Kashmir\nn09676247\ta member of the majority people of Punjab in northwestern India\nn09676884\tany member of the people of eastern Europe or Asian Russia who speak a Slavonic language\nn09677427\tadherent of Anabaptism\nn09678747\ta member of Christian denomination that expects the imminent advent of Christ\nn09679028\ta Christian as contrasted with a Jew\nn09679170\ta person who is not a member of one's own religion; used in this sense by Mormons and Hindus\nn09679925\ta member of a Catholic church\nn09680908\ta member of the church formed in the 19th century by German Catholics who refused to accept the infallibility of the Pope\nn09681107\ta member of the Uniat Church\nn09681234\ta member of the Coptic Church\nn09681973\ta woman who is a Jew\nn09683180\ta Muslim who is involved in a jihad\nn09683757\tone who follows the teachings of Buddha\nn09683924\tan adherent of the doctrines of Zen Buddhism\nn09684082\tan adherent of Mahayana Buddhism\nn09684901\ta Hindu religious teacher; used as a title of respect\nn09685233\tworshipper of Krishna and member of the International Society for Krishna Consciousness\nn09685806\ta believer in Shintoism\nn09686262\ta person of mixed European and African descent\nn09686401\ta person of mixed European and Asian descent\nn09688233\ta Gaelic-speaking Celt in Ireland or Scotland or the Isle of Man\nn09688804\ta member of the ancient Germanic peoples who spread from the Rhine into the Roman Empire in the 4th century\nn09689435\ta native or inhabitant of Afghanistan\nn09689958\ta native or inhabitant of Albania\nn09690083\ta native or inhabitant of Algeria\nn09690208\tany member of the peoples speaking a language in the Altaic language group\nn09690496\ta native or inhabitant of Andorra\nn09690621\ta native or inhabitant of Angola\nn09690864\ta native or inhabitant of the island of Anguilla in the West Indies\nn09691604\ta native or inhabitant of Austria\nn09691729\ta native or inhabitant of the Bahamas\nn09691858\ta native or inhabitant of Bahrain\nn09692125\ta member of a subgroup of people who inhabit Lesotho\nn09692915\ta member of a pastoral Bantu people living in Namibia, Botswana, and Angola\nn09693244\ta member of a Bantu people in southeastern Congo\nn09693982\ta native or inhabitant of Barbados\nn09694664\ta native or inhabitant of Bolivia\nn09694771\ta native or inhabitant of Borneo\nn09695019\ta native or inhabitant of Rio de Janeiro\nn09695132\ta member of the South American Indian people living in Brazil and Paraguay\nn09695514\ta native or inhabitant of Brunei\nn09695620\ta native or inhabitant of Bulgaria\nn09695979\ta native or inhabitant of Byelorussia\nn09696456\ta native or inhabitant of Cameroon\nn09696585\ta native or inhabitant of Canada\nn09696763\ta Canadian descended from early French settlers and whose native language is French\nn09697401\ta native or inhabitant of Central America\nn09697986\ta native or inhabitant of Chile\nn09698644\ta native or inhabitant of the Republic of the Congo\nn09699020\ta native or inhabitant of Cyprus\nn09699642\ta native or inhabitant of Denmark\nn09700125\ta native or inhabitant of Djibouti\nn09700964\ta native or inhabitant of Great Britain\nn09701148\ta native or inhabitant of England\nn09701833\ta woman who is a native or inhabitant of England\nn09702134\ta person of Anglo-Saxon (especially British) descent whose native tongue is English and whose culture is strongly influenced by English culture as in WASP for `White Anglo-Saxon Protestant'\nn09702673\ta member of a Germanic people who conquered England and merged with the Saxons and Jutes to become Anglo-Saxons\nn09703101\tan inhabitant of Wessex\nn09703344\ta member of a Germanic people who invaded northern Italy in the 6th century\nn09703485\ta man of English descent\nn09703708\ta resident of Cambridge\nn09703809\ta man who is a native or inhabitant of Cornwall\nn09703932\ta woman who is a native or resident of Cornwall\nn09704057\ta resident of Lancaster\nn09704157\ta member (or supporter) of the house of Lancaster\nn09704283\ta native of Newcastle-upon-Tyne\nn09705003\ta native or resident of Oxford\nn09705124\ta native or inhabitant of Ethiopia\nn09705671\ta member of the Semitic speaking people of northern Ethiopia\nn09705784\ta native or inhabitant of Eritrea\nn09706029\ta native or inhabitant of Finland\nn09706255\ta member of a Finnish people living in the northwestern Urals in Russia\nn09707061\ta member of the Livonian-speaking people of Latvia\nn09707289\ta native or inhabitant of Lithuania\nn09707735\tone of the people of mixed Ostyak and Samoyed origin in Siberia\nn09708750\ta native or resident of Paris\nn09708889\ta female native or resident of Paris\nn09709531\ta person descended from French ancestors in southern United States (especially Louisiana)\nn09709673\ta person of European descent born in the West Indies or Latin America\nn09710041\ta native or inhabitant of Gabon\nn09710164\ta native or inhabitant of Greece\nn09710886\ta member of one of four linguistic divisions of the prehistoric Greeks\nn09711132\ta resident of Athens\nn09711435\ta resident of Laconia\nn09712324\ta native or inhabitant of Guyana\nn09712448\ta native or inhabitant of Haiti\nn09712696\ta member of a people inhabiting the northern Malay Peninsula and Malaysia and parts of the western Malay Archipelago\nn09712967\ta member of the predominantly Muslim people in the southern Philippines\nn09713108\ta native or inhabitant of Holland\nn09714120\ta native or inhabitant of Iceland\nn09714694\ta native or inhabitant of Iraq\nn09715165\ta man who is a native or inhabitant of Ireland\nn09715303\ta woman who is a native or inhabitant of Ireland\nn09715427\ta resident of Dublin\nn09716047\ta native or inhabitant of Italy\nn09716933\ta resident of modern Rome\nn09717233\ta member of an ancient Oscan-speaking people of the central Apennines north of Rome who were conquered and assimilated into the Roman state in 290 BC\nn09718217\ta native or inhabitant of Japan\nn09718811\ta native or inhabitant of Jordan\nn09718936\ta native or inhabitant of Korea who speaks the Korean language\nn09719309\ta native or inhabitant of Kenya\nn09719794\ta member of a Buddhist people inhabiting the area of the Mekong River in Laos and Thailand and speaking the Lao language; related to the Thais\nn09720033\ta member of an indigenous nomadic people living in northern Scandinavia and herding reindeer\nn09720256\ta native of Latin America\nn09720595\ta native or inhabitant of Lebanon\nn09720702\t(formerly) a native or inhabitant of the Levant\nn09720842\ta native or inhabitant of Liberia\nn09721244\ta native or inhabitant of Luxembourg\nn09721444\ta native or inhabitant of Macedon\nn09722064\ta Malaysian from Sabah\nn09722658\ta native or inhabitant of Mexico\nn09722817\ta person of Mexican descent\nn09723067\ta Mexican (or person of Mexican descent) living in the United States\nn09723819\ta native or inhabitant of Namibia\nn09723944\ta native or inhabitant of Nauru\nn09724234\ta member of Hindu people descended from brahmins and Rajputs who live in Nepal\nn09724533\ta native or inhabitant of New Zealand\nn09724656\ta native or inhabitant of Nicaragua\nn09724785\ta native or inhabitant of Nigeria\nn09725000\ta member of a Negroid people living chiefly in northern Nigeria\nn09725229\ta native or inhabitant of North America\nn09725546\ta native or inhabitant of Nova Scotia\nn09725653\ta native or inhabitant of Oman\nn09725772\ta native or inhabitant of Pakistan\nn09725935\ta member of a Dravidian people living in Pakistan\nn09726621\ta member of a native Indian group in South America\nn09726811\ta member of an American Indian peoples of northeastern South America and the Lesser Antilles\nn09727440\ta native or inhabitant of the Philippines\nn09727826\ta native or inhabitant of Polynesia\nn09728137\ta native or inhabitant of Qatar\nn09728285\ta native or inhabitant of Romania\nn09729062\ta resident of Moscow\nn09729156\ta native or inhabitant of Georgia in Asia\nn09730077\ta native or inhabitant of Sarawak\nn09730204\tan inhabitant of Scandinavia\nn09730824\ta native or inhabitant of Senegal\nn09731343\ta native of Slovenia\nn09731436\ta native or inhabitant of South Africa\nn09731571\ta native or inhabitant of South America\nn09732170\ta native or inhabitant of Sudan\nn09733459\ta native or inhabitant of Syria\nn09733793\ta native or inhabitant of Tahiti\nn09734185\ta native or inhabitant of Tanzania\nn09734450\ta native or inhabitant of Tibet\nn09734535\ta native or inhabitant of Togo\nn09734639\ta member of a nomadic Berber people of the Sahara\nn09735258\tany member of the peoples speaking a Turkic language\nn09735654\ta member of a people of Turkic speech living in the Volga region in eastern Russia\nn09736485\ta member of a Turkic people living in Turkmenistan and neighboring areas\nn09736798\ta member of a Turkic people of Uzbekistan and neighboring areas\nn09736945\ta native or inhabitant of Uganda\nn09737050\ta native or inhabitant of the Ukraine\nn09737161\ta member of a Turkic people of northeastern Siberia (mainly in the Lena river basin)\nn09737453\ta member of the Tungus speaking people of Mongolian race who are a nomadic people widely spread over eastern Siberia; related to the Manchu\nn09738121\ta member of the largest ethnic group in southeastern Nigeria\nn09738400\ta native or inhabitant of a North American or Central American or South American country\nn09740724\tan American who was born in Britain or one whose ancestors were British\nn09741074\ta member or descendant of any of the aboriginal peoples of Alaska\nn09741331\ta native or resident of Arkansas\nn09741722\ta native or resident of the Carolinas\nn09741816\ta native or resident of Colorado\nn09741904\ta native or resident of Connecticut\nn09741999\ta native or resident of Delaware\nn09742101\ta native or resident of Florida\nn09742315\tan American who was born in Germany or whose ancestors were German\nn09742927\ta native or resident of Illinois\nn09743487\ta native or resident of Maine\nn09743601\ta native or resident of Maryland\nn09743792\ta native or resident of Minnesota\nn09744161\ta native or resident of Nebraska\nn09744346\ta native or resident of New Hampshire\nn09744462\ta native of resident of New Jersey\nn09744679\ta native or resident of New York (especially of New York City)\nn09744834\ta native or resident of North Carolina\nn09745229\ta native or resident of Oregon\nn09745324\ta native or resident of Pennsylvania\nn09745834\ta native or resident of Texas\nn09745933\ta native or resident of Utah\nn09746936\ta native or inhabitant of Uruguay\nn09747191\ta native or inhabitant of Vietnam\nn09747495\ta native or inhabitant of Gambia\nn09748101\ta native or inhabitant of the former republic of East Germany\nn09748408\tan inhabitant of Berlin\nn09748648\ta German inhabitant of Prussia\nn09748889\ta native or inhabitant of Ghana\nn09749386\ta native or inhabitant of Guinea\nn09750282\ta native or inhabitant of Papua New Guinea or New Guinea\nn09750641\ta member of the French-speaking people living in Belgium\nn09750770\ta native or inhabitant of Yemen\nn09750891\ta native or inhabitant of Yugoslavia\nn09751076\ta member of a Slavic people who settled in Serbia and neighboring areas in the 6th and 7th centuries\nn09751496\ta member of the Negroid people of southern South Africa\nn09751622\ta native or inhabitant of Zaire\nn09751895\ta native or inhabitant of Zimbabwe\nn09752023\ta member of the tall Negroid people of eastern South Africa; some live in KwaZulu-Natal under the traditional clan system but many now work in the cities\nn09752519\t(astrology) a person who is born while the sun is in Gemini\nn09753348\t(astrology) a person who is born while the sun is in Sagittarius\nn09753792\t(astrology) a person who is born while the sun is in Pisces\nn09754152\ta French abbot\nn09754217\tthe superior of a group of nuns\nn09754633\tone who gives up or relinquishes or renounces something\nn09754907\tone who shortens or abridges or condenses a written work\nn09755086\tone who makes abstracts or summarizes information\nn09755241\ta fugitive who runs away and hides to avoid arrest or prosecution\nn09755555\tsomeone who grants absolution\nn09755788\ta novice learning the rudiments of some subject\nn09755893\tone whose behavior departs substantially from the norm of a group\nn09756049\tone who helps or encourages or incites another\nn09756195\ta signer of a 1679 address to Charles II in which those who petitioned for the reconvening of parliament were condemned and abhorred\nn09756961\ta person who is loathsome or disgusting\nn09757449\ta person who descends down a nearly vertical face by using a doubled rope that is wrapped around the body and attached to some high point\nn09758173\tsomeone who practices self denial as a spiritual discipline\nn09758885\tan administrator in a college or university\nn09759501\tsomeone elected to honorary membership in an academy\nn09760290\ta person who procures or advises or commands the commission of a felony but who is not present at its perpetration\nn09760609\tone paid to accompany or assist or live with another\nn09760913\ta person who provides musical accompaniment (usually on a piano)\nn09761068\ta person who joins with another in carrying out some plan (especially an unethical or illegal plan)\nn09761753\tsomeone in charge of a client's account for an advertising agency or brokerage or other service business\nn09762011\ta defendant in a criminal proceeding\nn09762385\tsomeone who imputes guilt or blame\nn09763272\tsomeone who takes LSD\nn09763784\ta person with whom you are acquainted\nn09764201\ta person who acquires something (usually permanently)\nn09764598\tan acrobat who performs in the air (as on a rope or trapeze)\nn09764732\tthe case officer designated to perform an act during a clandestine operation (especially in a hostile area)\nn09764900\ta person who is a participating member of an organization\nn09765118\ta citizen who takes an active role in the community (as in crime prevention and neighborhood watch)\nn09765278\ta theatrical performer\nn09767197\ta person who acts and gets things done\nn09769076\tsomeone who is so ardently devoted to something that it resembles an addiction\nn09769525\ta discussant who offers an example or a reason or a proof\nn09769929\tone who investigates insurance claims or claims for damages and recommends an effective settlement\nn09770179\tan officer who acts as military assistant to a more senior officer\nn09770359\ta general's adjutant; chief administrative officer\nn09771435\tsomeone who admires a young woman\nn09772330\tsomeone (such as a child) who has been adopted\nn09772746\tsomeone who commits adultery or fornication\nn09772930\ta woman adulterer\nn09773962\tsomeone whose business is advertising\nn09774167\tsomeone who receives advice\nn09774783\ta person who pleads for a cause or propounds an idea\nn09775907\tan engineer concerned with the design and construction of aircraft\nn09776346\ta subordinate or subsidiary associate; a person who is affiliated with another or with an organization\nn09776642\tan affluent person; a person who is financially well off\nn09776807\ta serious devotee of some particular music genre or musical performer\nn09777870\ta sergeant of the lowest rank in the military\nn09778266\tan operative serving as a penetration into an intelligence target\nn09778537\tan unpleasant person who is annoying or exasperating\nn09778783\tone who agitates; a political troublemaker\nn09778927\ta person who claims that they cannot have true knowledge about the existence of God (but does not deny that God might exist)\nn09779124\tsomeone who is doubtful or noncommittal about something\nn09779280\tsomeone involved in a contest or battle (as in an agon)\nn09779461\ta newspaper columnist who answers questions and offers advice on personal problems to people who write in\nn09779790\tsomeone concerned with the science or art or business of cultivating the soil\nn09780395\ta military attache who is a commissioned or warrant officer in an air force\nn09780828\tan officer in the airforce\nn09780984\ta flighty scatterbrained simpleton\nn09781398\tsomeone who travels by airplane\nn09781504\ta person who alarms others needlessly\nn09781650\ta person with congenital albinism: white hair and milky skin; eyes are usually pink\nn09782167\ta person who drinks alcohol to excess habitually\nn09782397\ta member of a municipal legislative body (as a city council)\nn09782855\ta person with alexia\nn09783537\tsomeone to whom the title of property is transferred\nn09783776\tsomeone from whom the title of property is transferred\nn09783884\ta person who can read but is disinclined to derive information from literary sources\nn09784043\ta mathematician whose specialty is algebra\nn09784160\tsomeone who communicates in allegories\nn09784564\ta speaker or writer who makes use of alliteration\nn09785236\tan official in a British hospital who looks after the social and material needs of the patients\nn09785659\ta mountain climber who specializes in difficult climbs\nn09785891\ta boy serving as an acolyte\nn09786115\ta singer whose voice lies in the alto clef\nn09787534\ta diplomat of the highest rank; accredited as representative from one country to another\nn09787765\tan informal representative\nn09788073\tan attacker who waits in a concealed position to launch a surprise attack\nn09788237\tan adviser to the court on some matter of law who is not a party to the case; usually someone who wants to influence the outcome of a lawsuit involving matters of wide public interest\nn09789150\tsomeone who adheres to the doctrine that ordinary moral distinctions are invalid\nn09789566\tsomeone who has had a limb removed by amputation\nn09789898\tsomeone who looks for analogies or who reasons by analogy\nn09790047\tan illiterate person who does not know the alphabet\nn09790482\tsomeone who is skilled at analyzing data\nn09791014\tan analyst of conditions affecting a particular industry\nn09791419\tsomeone skilled in planning marketing campaigns\nn09791816\tan advocate of anarchism\nn09792125\ta detested person\nn09792555\tsomeone from whom you are descended (but usually more remote than a grandparent)\nn09792969\ta television reporter who coordinates a broadcast to which several correspondents contribute\nn09793141\ta person who lived in ancient times\nn09793352\ta person skilled in telling anecdotes\nn09793946\ta fisherman who uses a hook and line\nn09794550\tthe technician who produces animated cartoons\nn09794668\tone who accepts the doctrine of animism\nn09795010\ta commentator who writes notes to a text\nn09795124\treads news, commercials on radio or television\nn09795334\tsomeone who proclaims a message publicly\nn09796809\ta person who is opposed (to an action or policy or practice etc.)\nn09796974\ta person who is opposed to the United States and its policies\nn09797742\tsomeone who hates and would persecute Jews\nn09797873\ta soldier in the Australian and New Zealand army corps during World War I\nn09797998\ta person assumed to have been raised by apes\nn09798096\tsomeone afflicted by aphakia; someone lacking the natural lenses of the eyes\nn09800469\tthe party who appeals a decision of a lower court\nn09800964\tan official who is appointed\nn09801102\ta person who seizes or arrests (especially a person who seizes or arrests in the name of justice)\nn09801275\tthe butt of a prank played on April 1st\nn09801533\tan ambitious and aspiring young person\nn09802445\ta person who is fully aware of something and understands it\nn09802641\tsomeone who takes for his or her own use (especially without permission)\nn09802951\ta scholar who specializes in Arab languages and culture\nn09804230\ta person who archaizes\nn09805151\ta bishop of highest rank\nn09805324\ta person who is expert in the use of a bow and arrow\nn09805475\tsomeone who creates plans to be used in making something (such as buildings)\nn09806944\ta person in charge of collecting and cataloguing archives\nn09807075\ta senior clergyman and dignitary\nn09808080\ta follower of Aristotle or an adherent of Aristotelianism\nn09808591\ta nobleman entitled to bear heraldic arms\nn09809279\ta military attache who is a commissioned or warrant officer in an army\nn09809538\ta member of the military who is trained in engineering and construction work\nn09809749\tan officer in the armed forces\nn09809925\ta musician who adapts a composition for particular voices or instruments or for another style of performance\nn09810166\tsomeone who arrives (or has arrived)\nn09811568\ta person afflicted with arthritis\nn09811712\tsomeone who pronounces words\nn09811852\ta serviceman in the artillery\nn09813219\ta person who poses for a painter or sculptor\nn09814252\tan analyst who assays (performs chemical tests on) metals\nn09814381\tsomeone who is a member of a legislative assembly\nn09814488\ta woman assemblyman\nn09814567\ta person who assents\nn09814660\tsomeone who claims to speak the truth\nn09815455\t(law) the party to whom something is assigned (e.g., someone to whom a right or property is legally transferred)\nn09815790\ta person who contributes to the fulfillment of a need or furtherance of an effort or purpose\nn09816654\ta teacher or lower rank than an associate professor\nn09816771\ta person who joins with others in some activity or endeavor\nn09817174\ta person with subordinate membership in a society, institution, or commercial enterprise\nn09817386\ta teacher lower in rank than a full professor but higher than an assistant professor\nn09818022\ta person trained to travel in a spacecraft\nn09819477\ta scientist knowledgeable about cosmography\nn09820044\tsomeone who denies the existence of god\nn09820263\ta person trained to compete in sports\nn09821831\tsomeone who waits on or tends to or attends to the needs of another\nn09822830\tthe chief law officer of a country or state\nn09823153\ta student who attends a course but does not take it for credit\nn09823287\t(ancient Rome) a religious official who interpreted omens to guide public policy\nn09823502\tthe sister of your father or mother; the wife of your uncle\nn09823832\ta foreign girl serving as an au pair\nn09824135\ta person who behaves in a tyrannical manner\nn09824609\t(usually plural) persons who exercise (administrative) control over others\nn09825096\tan authority who authorizes (people or actions)\nn09825750\tsomeone whose occupation is repairing and maintaining automobiles\nn09826204\tsomeone who operates an aircraft\nn09826605\ta woman aviator\nn09826821\t(in India) a native nursemaid who looks after children\nn09827246\tused as a Hindi courtesy title; equivalent to English `Mr'\nn09827363\t(slang) sometimes used as a term of address for attractive young women\nn09828216\tan unborn child; a human fetus\nn09828403\ta member of the baby boom generation in the 1950s\nn09828988\tsomeone who runs an establishment that houses and cares for babies for a fee\nn09830194\t(football) a person who plays in the backfield\nn09830400\ta member of the House of Commons who is not a party leader\nn09830629\ta hiker who wears a backpack\nn09830759\tan expert adviser involved in making important decisions but usually lacking official status\nn09830926\tsomeone who is willing to trade favors or services for mutual advantage\nn09831962\ta person who does harm to others\nn09832456\ta worthless or immoral woman\nn09832633\ta homeless woman who carries all her possessions with her in shopping bags\nn09832978\tthe agent to whom property involved in a bailment is delivered\nn09833111\tan officer of the court who is employed to execute writs and processes and make arrests etc.\nn09833275\tthe person who delivers personal property (goods or money) in trust to the bailee in a bailment\nn09833441\ta child: son or daughter\nn09833536\tsomeone who bakes bread or cake\nn09833751\tan acrobat who balances himself in difficult positions\nn09833997\ta person who refuses to comply\nn09834258\ta demanding woman who destroys men's confidence\nn09834378\t(football) the player who is carrying (and trying to advance) the ball on an offensive play\nn09834699\ta trained dancer who is a member of a ballet company\nn09834885\ta man who directs and teaches and rehearses dancers for a ballet company\nn09835017\ta woman who directs and teaches and rehearses dancers for a ballet company\nn09835153\ta ballet enthusiast\nn09835230\ta team athlete who is skilled at stealing or catching the ball\nn09835348\tsomeone who flies a balloon\nn09835506\tan athlete who plays baseball\nn09836160\tsomeone who fights bulls\nn09836343\tthe bullfighter who implants decorated darts (banderillas) into the neck or shoulders of the bull during a bull fight\nn09836519\tthe principal bullfighter who is appointed to make the final passes and kill the bull\nn09836786\tthe horseman who pricks the bull with a lance early in the bullfight to goad the bull and to make it keep its head low\nn09837459\ta player in a band (especially a military band)\nn09837720\tthe person in charge of the bank in a gambling game\nn09838295\ta robber of banks\nn09838370\tsomeone who has insufficient assets to cover their debts\nn09838621\tweighs 115-126 pounds\nn09839702\ta female bartender\nn09840217\ta very wealthy or powerful businessman\nn09840435\ta British peer of the lowest rank\nn09840520\ta nobleman (in various countries) of varying rank\nn09841188\tan employee who mixes and serves alcoholic drinks at a bar\nn09841515\ta coach of baseball players\nn09841696\ta baseball player on the team at bat who is on base (or attempting to reach a base)\nn09842047\tan athlete who plays basketball\nn09842288\tsomeone skilled in weaving baskets\nn09842395\tearly Amerindians related to the Pueblo; known for skill in making baskets\nn09842528\tan adult male singer with the lowest voice\nn09842823\tthe illegitimate offspring of unmarried parents\nn09843443\t(baseball) a boy who takes care of bats and other baseball equipment\nn09843602\ta person who takes a bath\nn09843716\tan orderly assigned to serve a British military officer\nn09843824\tsomeone who twirls a baton\nn09844457\ta native or an inhabitant of Bavaria\nn09844898\ta person who is paid to pray for the soul of another\nn09845401\ta person who diverts suspicion from someone (especially a woman who accompanies a male homosexual in order to conceal his homosexuality)\nn09845849\ta member of the beat generation; a nonconformist in dress and behavior\nn09846142\tsomeone who gives you advice about your personal appearance\nn09846469\ta member of a nomadic tribe of Arabs\nn09846586\tsomeone suffering from enuresis; someone who urinates while asleep in bed\nn09846755\ta farmer who keeps bees for their honey\nn09846894\tsomeone whose favorite drink is beer or ale\nn09847267\ta man who is a beggar\nn09847344\ta woman who is a beggar\nn09847543\ta woman of advanced age\nn09848110\tone who believes in the existence of a god or gods\nn09848489\ta supporter who accepts something as true\nn09849167\ta person who casts metal bells\nn09849990\ta newly married man (especially one who has long been a bachelor)\nn09850760\tone of the ancient Norse warriors legendary for working themselves into a frenzy before a battle and fighting with reckless savagery and insane fury\nn09850974\tan enemy who lays siege to your position\nn09851165\tthe person who is most outstanding or excellent; someone who tops all others\nn09851575\tthe person to whom you are engaged\nn09853541\tan authoritarian leader and invader of privacy\nn09853645\ta prejudiced person who is intolerant of any opinions differing from his own\nn09853881\tan important influential person\nn09854218\tan older sister\nn09854421\tsomeone who plays billiards\nn09854915\tsomeone with special training in biochemistry\nn09855433\tsomeone who writes an account of a person's life\nn09856401\ta person with a strong interest in birds\nn09856671\ta baby born; an offspring\nn09856827\ta social reformer who advocates birth control and family planning\nn09857007\ta person who is sexually attracted to both sexes\nn09858165\ta person who attained the rank of expert in the martial arts (judo or karate)\nn09858299\ta criminal who extorts money from someone by threatening to expose embarrassing information about them\nn09858733\tan activist member of a largely American group of Blacks called the Nation of Islam\nn09859152\ta smith who forges and shapes iron with a hammer and anvil\nn09859285\ta dashing young man\nn09859684\ta worker who bleaches (cloth or flour etc.)\nn09859975\ta participant in a blind date (someone you meet for the first time when you have a date with them)\nn09861287\ta person dressed all in blue (as a soldier or sailor)\nn09861599\ta woman having literary or intellectual interests\nn09861863\ta person who builds boats\nn09861946\tsomeone who drives or rides in a boat\nn09862183\ta petty officer on a merchant ship who controls the work of other seamen\nn09862621\tan informal term for a British policeman\nn09863031\tsomeone who escorts and protects a prominent person\nn09863339\t(British slang) a scientist or technician engaged in military research\nn09863749\temotionally charged terms used to refer to extreme radicals or revolutionaries\nn09863936\ta Russian member of the left-wing majority group that followed Lenin and eventually became the Russian communist party\nn09864632\tan entertainer who has a sensational effect\nn09864968\ta male bound to serve without wages\nn09865068\ta female slave\nn09865162\ta female bound to serve without wages\nn09865398\tsomeone bound to labor without wages\nn09865672\ta book salesman\nn09865744\ta worker whose trade is binding books\nn09866115\tsomeone who records the transactions of a business\nn09866354\ta maker of books; someone who edits or publishes or binds books\nn09866559\tsomeone who spends a great deal of time reading\nn09866661\ta thief who steals goods that are in a store\nn09866817\ta person who polishes shoes and boots\nn09866922\tsomeone who makes or sells illegal liquor\nn09867069\ta maker of boots\nn09867154\tan inhabitant of a border area (especially the border between Scotland and England)\nn09867311\tsomeone who patrols the borders of a country\nn09868270\ta biologist specializing in the study of plants\nn09868782\tan opportunist who profits from the misfortunes of others\nn09868899\ta visitor of a city boulevard (especially in Paris)\nn09869317\ta hunter who kills predatory wild animals in order to collect a bounty\nn09869447\tsomeone who pursues fugitives or criminals for whom a reward is offered\nn09869578\ta member of the European royal family that ruled France\nn09870096\ta cricketer who delivers the ball to the batsman in cricket\nn09871095\ta boxer noted for an ability to deliver hard punches\nn09871229\ta male child (a familiar term of address to a boy)\nn09871681\ta boy who is a member of the Boy Scouts\nn09871867\ta man who is considered naive\nn09871952\tan extremely talented young male person\nn09872066\ta very boastful and talkative person\nn09872557\ta member of the highest of the four Hindu varnas\nn09873348\ta fighter (especially one who participates in brawls)\nn09873473\tone whose earnings are the primary source of support for their dependents\nn09873769\tsomeone who swims the breaststroke\nn09873899\ta person who breeds animals\nn09874428\ta good fellow; helpful and trustworthy\nn09874725\ta woman participant in her own marriage ceremony\nn09874862\tan unmarried woman who attends the bride at a wedding\nn09875025\tan operative who acts as a courier or go-between from a case officer to a secret agent in a hostile area\nn09875979\ta journalist who broadcasts on radio or television\nn09876701\t(Roman Catholic Church) a title given to a monk and used as form of address\nn09877288\ta brother by marriage\nn09877587\ta viewer who looks around casually without seeking anything in particular\nn09877750\ta native or resident of Birmingham, England\nn09877951\ta close friend who accompanies his buddies in their activities\nn09878921\tan investor with an optimistic market outlook; an investor who expects prices to rise and so buys now for resale later\nn09879552\ta hired thug\nn09880189\ta young waitress in a nightclub whose costume includes the tail and ears of a rabbit\nn09880741\ta thief who enters a building with intent to steal\nn09881265\tthe treasurer at a college or university\nn09881358\ta restaurant attendant who sets tables and assists waiters and clears away dirty dishes\nn09881895\tthe newspaper editor responsible for business news\nn09883047\ta traveler whose expenses are paid by the business he works for\nn09883452\ta person (or thing) that breaks up or overpowers something\nn09883807\ta person who meddles in the affairs of others\nn09885059\ta meddler who tends to butt in\nn09885866\ta woodworker who specializes in making furniture\nn09886403\tan attendant who carries the golf clubs for a player\nn09886540\ta military trainee (as at a military academy)\nn09888635\ta person who announces the changes of steps during a dance\nn09889065\ta female prostitute who can be hired by telephone\nn09889170\tsomeone skilled in penmanship\nn09889691\ta politician who is running for public office\nn09889941\tsomeone living temporarily in a tent or lodge for recreation\nn09890192\ta follower who is not a member of an ingroup\nn09890749\tsomeone who is considered for something (for an office or prize or honor etc.)\nn09891730\ta specialist in canon law\nn09892262\ta conservative advocate of capitalism\nn09892513\ta dining-room attendant who is in charge of the waiters and the seating of customers\nn09892693\tthe pilot in charge of an airship\nn09893191\tan officer holding a rank below a major but above a lieutenant\nn09893344\tthe leader of a group of people\nn09893502\tan animal that is confined\nn09893600\ta person held in the grip of a strong emotion or passion\nn09894143\t(Roman Catholic Church) one of a group of more than 100 prominent bishops in the Sacred College who advise the Pope and elect new Popes\nn09894445\ta specialist in cardiology; a specialist in the structure and function and disorders of the heart\nn09894654\tsomeone who plays (or knows how to play) card games\nn09894909\ta professional card player who makes a living by cheating at card games\nn09895222\ta professional who is intent on furthering his or her career by any possible means and often at the expense of their own integrity\nn09895480\ta man who is a careerist\nn09895561\ta person who is responsible for attending to the needs of a child or dependent adult\nn09895701\ta custodian who is hired to take care of something (property or a person)\nn09895902\tan official who performs the duties of an office temporarily\nn09896170\tsomeone who parodies in an exaggerated manner\nn09896311\ta musician who plays a carillon\nn09896401\ta singer of carols\nn09896685\ta woodworker who makes or repairs wooden objects\nn09896826\tsomeone who constantly criticizes in a petty way\nn09898020\ta follower of Cartesian thought\nn09899289\ta person responsible for receiving payments for goods and services (as in a shop or restaurant)\nn09899671\tsomeone injured or killed in an accident\nn09899782\tsomeone injured or killed or captured or missing in a military engagement\nn09899929\tsomeone whose reasoning is subtle and often specious\nn09901337\tone who instructs catechumens in preparation for baptism (especially one using a catechism)\nn09901502\ta new convert being taught the principles of Christianity by a catechist\nn09901642\tsomeone who provides food and service (as for a party)\nn09901786\tthe ecclesiastical title of the leaders of the Nestorian and Armenian churches\nn09901921\ta person who breeds and cares for cats\nn09902128\ta royalist supporter of Charles I during the English Civil War\nn09902353\ta soldier mounted on horseback\nn09902731\tsomeone who lives in a cave\nn09902851\tan officiating priest celebrating the Eucharist\nn09902954\ta person who is celebrating\nn09903153\ta widely known person\nn09903501\tsomeone who plays a violoncello\nn09903639\ta person who is authorized to read publications or correspondence or to watch theatrical performances and suppress in whole or in part anything considered obscene or politically unacceptable\nn09903936\tsomeone who censures or condemns\nn09904208\tsomeone who is at least 100 years old\nn09904837\ta person who takes a position in the political center\nn09905050\t(ancient Rome) the leader of 100 soldiers\nn09905185\tan accountant who has passed certain examinations and met all other statutory and licensing requirements of a United States state to be certified by that state\nn09905530\t(Yiddish) an attractive, unconventional woman\nn09906293\ta maid who is employed to clean and care for bedrooms (now primarily in hotels)\nn09906449\ta changeable or inconstant person\nn09906704\tsomeone who has won first place in a competition\nn09907804\ta retail dealer in provisions and supplies\nn09908769\ta chaplain in a prison\nn09909660\ta worker whose job is to make charcoal\nn09909929\tthe official temporarily in charge of a diplomatic mission in the absence of the ambassador\nn09910222\tthe driver of a chariot\nn09910374\ta person who charms others (usually by personal attractiveness)\nn09910556\ta British or Canadian accountant who is a member of a professional body that has a royal charter\nn09910840\ta stock market analyst who tries to predict market trends from graphs of recent prices of securities\nn09911226\ta human female employed to do housework\nn09912431\ta man with a chauvinistic belief in the inferiority of women\nn09912681\ta miserly person\nn09912907\ta native or inhabitant of Chechnya\nn09912995\tone who checks the correctness of something\nn09913329\ta spectator who shouts encouragement\nn09913455\tsomeone who leads the cheers by spectators at a sporting event\nn09913593\tan enthusiastic and vocal supporter\nn09915434\tEgyptian Pharaoh of the 27th century BC who commissioned the Great Pyramid at Giza\nn09915651\ta chess player of great skill\nn09916348\tthe corporate executive responsible for the operations of the firm; reports to a board of directors; may appoint other managers (including a president)\nn09917214\tthe senior officer of a service of the armed forces\nn09917345\ta person with the senior noncommissioned naval rank\nn09917481\ta member of the British Cabinet\nn09917593\ta young person of either sex\nn09918248\ta human offspring (son or daughter) of any age\nn09918554\tan immature childish person\nn09918867\ta prodigy whose talents are recognized at an early age\nn09919061\tsomeone who cleans soot from chimneys\nn09919200\ta therapist who practices chiropractic\nn09919451\ta dismissive term for a girl who is immature or who lacks respect\nn09919899\tan unfortunate person who is unable to perform effectively because of nervous tension or agitation\nn09920106\t(ancient Greece) leader of a group or festival; leader of a chorus\nn09920283\tsomeone who creates new dances\nn09920901\ta woman who dances in a chorus line\nn09921034\tone who is the object of choice; who is given preference\nn09923003\ta guide who conducts and informs sightseers\nn09923186\ta smoker of cigars\nn09923418\ta person of no influence\nn09923561\tan acrobat who performs acrobatic feats in a circus\nn09923673\ta native or naturalized member of a state or other political community\nn09923996\tthe newspaper editor in charge of editing local news\nn09924106\tan important municipal official\nn09924195\ta financier who works in one of the banks in the City of London\nn09924313\ta city dweller with sophisticated manners and clothing\nn09924437\ta leader in municipal affairs\nn09924996\ta leader of the political movement dedicated to securing equal opportunity for members of minority groups\nn09927089\tsomeone whose occupation is cleaning\nn09927451\ta member of the clergy and a spiritual leader of the Christian Church\nn09928136\ta clergyman or other person in religious orders\nn09928451\tan employee who performs clerical work (e.g., keeps records or accounts)\nn09928845\tan intellectual who is ostentatiously and irritatingly knowledgeable\nn09929202\tsomeone who is expert in climatology\nn09929298\tsomeone who climbs as a sport; especially someone who climbs mountains\nn09929577\ta practitioner (of medicine or psychology) who does clinical work instead of laboratory experiments\nn09930257\t(baseball) a relief pitcher who can protect a lead in the last inning or two of the game\nn09930628\ta negative term for a homosexual man who chooses not to reveal his sexual orientation\nn09930876\ta person who amuses others by ridiculous behavior\nn09931165\ta rude or vulgar fool\nn09931418\ta person who gives private instruction (as in singing, acting, etc.)\nn09931640\t(sports) someone in charge of training an athlete or a team\nn09932098\tan assistant baseball coach in charge of pitchers\nn09932336\ta man who drives a coach (or carriage)\nn09932508\tsomeone who works in a coal mine\nn09932788\ta member of a coastguard\nn09933020\tAustralian term for a pal\nn09933098\ta person who makes or repairs shoes\nn09933842\tused affectionately to refer to an eccentric but amusing old man\nn09933972\tone of two or more beneficiaries of the same benefit\nn09934337\ta subordinate who performs an important but routine function\nn09934488\ta cognitive scientist who studies the neurophysiological foundations of mental phenomena\nn09934774\ta man hairdresser\nn09935107\tsomeone who is a source of new words or new expressions\nn09935434\tan associate in an activity or endeavor or sphere of common interest\nn09936825\tan Irish girl\nn09936892\ta student enrolled in a college or university\nn09937056\ta student (or former student) at a college or university\nn09937688\ta resident of a colony\nn09937802\ta believer in colonialism\nn09937903\tsomeone who helps to found a colony\nn09938080\ta lyric soprano who specializes in coloratura vocal music\nn09938449\ta ceremonial escort for the (regimental) colors\nn09938991\ta person of exceptional importance and reputation\nn09940725\tan actor in a comedy\nn09940818\ta female comedian\nn09941089\tsomeone with a promising future\nn09941571\ta commissioned naval officer who ranks above a lieutenant commander and below a captain\nn09941787\tthe officer who holds the supreme command\nn09941964\tan officer in command of a military unit\nn09942697\tan official of the Communist Party who was assigned to teach party principles to a military unit\nn09942970\ta military officer holding a commission\nn09943239\ta commissioned officer in the Army or Air Force or Marine Corps\nn09943811\ta government administrator\nn09944022\ta member of a commission\nn09944160\ta member of a committee\nn09944430\ta woman who is a member of a committee\nn09945021\ta commissioned naval officer who ranks above a captain and below a rear admiral; the lowest grade of admiral\nn09945223\ta person entitled to receive Communion\nn09945319\ta socialist who advocates communism\nn09945603\ta member of the communist party\nn09945745\tsomeone who travels regularly from home in a suburb to work in a city\nn09946814\tBritish term for someone who introduces television acts or cabarets etc\nn09947127\tsomeone makes things complex\nn09950457\ta person with a compulsive disposition; someone who feels compelled to do certain things\nn09950728\tsomeone trained in computer science and linguistics who uses computers for natural language processing\nn09951070\ta scientist who specializes in the theory of computation and the design of computers\nn09951274\ta person who uses computers for work or entertainment or communication or business\nn09951524\ta fellow member of the Communist Party\nn09951616\tsomeone who attends concerts\nn09952163\tsomeone who tries to bring peace\nn09953052\tthe person who collects fares on a public conveyance\nn09953350\tsomeone who makes candies and other sweets\nn09953615\ta supporter of the Confederate States of America\nn09954355\ta priest who hears confession and gives absolution\nn09954639\tsomeone to whom private matters are confided\nn09955406\ta believer in the teachings of Confucius\nn09955944\tinformal abbreviation of `representative'\nn09956578\tsomeone who is victorious by force of arms\nn09957523\ta member of a Conservative Party\nn09958133\ta Protestant in England who is not a member of the Church of England\nn09958292\ta Protestant who is a follower of Anglicanism\nn09958447\tthe person to whom merchandise is delivered over\nn09958569\tthe person who delivers over or commits merchandise\nn09959142\ta lawman with less authority and jurisdiction than a sheriff\nn09959658\tan artist of the school of constructivism\nn09960688\tsomeone (a person or firm) who contracts to build things\nn09961198\ta woman singer having a contralto voice\nn09961331\ta writer whose work is published in a newspaper or magazine or as part of a book\nn09961469\tsomeone with a compulsive desire to exert control over situations and people\nn09961605\ta person who is recovering from illness\nn09961739\tthe member of a group whose duty it is to convene meetings\nn09962966\ta person serving a sentence in a jail or prison\nn09964202\ta relief pilot on an airplane\nn09964411\tsomeone who copies the words or behavior of another\nn09965515\tsomeone having the same religion as another person\nn09965787\ta defensive football player stationed outside the linebackers\nn09966470\ta supporter of corporatism\nn09966554\tsomeone who communicates by means of letters\nn09967063\tsomeone who sells or applies cosmetics\nn09967406\ta sophisticated person who has travelled in many countries\nn09967555\ta member of a Slavic people living in southern European Russia and Ukraine and adjacent parts of Asia and noted for their horsemanship and military skill; they formed an elite cavalry corps in czarist Russia\nn09967816\ta specialist in the systematic recording and analysis of the costs incident to production\nn09967967\tone of two actors who are given equal status as stars in a play or film\nn09968259\tsomeone who designs or supplies costumes (as for a play or masquerade)\nn09968652\ta medieval English villein\nn09968741\ta peasant farmer in the Scottish Highlands\nn09968845\tsomeone who gives advice about problems\nn09970088\tsomeone who attempts to prevent terrorism\nn09970192\ta spy who works against enemy espionage\nn09970402\tfemale equivalent of a count or earl\nn09970822\ta negotiator willing to compromise\nn09971273\ta woman who lives in the country and has country ways\nn09971385\tan advisor employed by the government to assist people in rural areas with methods of farming and home economics\nn09971839\tan attendant at the court of a sovereign\nn09972010\tthe child of your aunt or uncle\nn09972458\ta very pretty girl who works as a photographer's model\nn09972587\ta large unpleasant woman\nn09974648\ta skilled worker who practices some trade or handicraft\nn09975425\ta creator of great skill in the manual arts\nn09976024\ta gambler who plays the game of craps\nn09976283\tsomeone deranged and possibly dangerous\nn09976429\ta human being; `wight' is an archaic term\nn09976728\ta person to whom money is owed by a debtor; someone to whom an obligation exists\nn09976917\tsomeone unpleasantly strange or eccentric\nn09978442\ta specialist in criminology\nn09979321\tanyone who expresses a reasoned judgment of something\nn09979913\ta very wealthy man\nn09980458\tsomeone who questions a witness carefully (especially about testimony given earlier)\nn09980805\ta voter who is registered as a member of one political party but who votes in the primary of another party\nn09980985\tsomeone who collects and pays bets at a gaming table\nn09981092\ta male heir apparent to a throne\nn09981278\tthe wife of a crown prince\nn09981540\tdecoder skilled in the analysis of codes and cryptograms\nn09981939\ta junior Boy Scout\nn09982152\ta man whose wife committed adultery\nn09982525\ta member of an unorthodox cult who generally lives outside of conventional society under the direction of a charismatic leader\nn09983314\ta Mexican woman who practices healing techniques inherited from the Mayans\nn09983572\ta person authorized to conduct religious worship\nn09983889\tthe custodian of a collection (as a museum or library)\nn09984960\ta foreign purchaser who buys goods outright for resale\nn09985470\tsomeone who carves the meat\nn09985809\ta writer of science fiction set in a lawless subculture of an oppressive society dominated by computer technology\nn09985978\ta human being whose body has been taken over in whole or in part by electromechanical devices\nn09986450\ta performer on the cymbals\nn09986700\ta member of a group of ancient Greek philosophers who advocated the doctrine that virtue is the only good and that the essence of virtue is self-control\nn09986904\ta geneticist who specializes in the cellular components associated with heredity\nn09987045\ta biologist who studies the structure and function of cells\nn09987161\ta person having great power\nn09987239\ta male monarch or emperor (especially of Russia prior to 1917)\nn09988063\tan informal term for a father; probably derived from baby talk\nn09988311\ta man who works in a dairy\nn09988493\tchief lama and once ruler of Tibet\nn09988703\tsomeone who wastes time\nn09989502\ta performer who dances professionally\nn09990415\ta person who participates in a social gathering arranged for dancing (as a ball)\nn09990690\tsomeone who does clog dancing\nn09990777\ta professional teacher of dancing\nn09991740\ta political candidate who is not well known but could win unexpectedly\nn09991867\ta special loved one\nn09992538\ta participant in a date\nn09992837\ta female human offspring\nn09993252\tsomeone who takes more time than necessary; someone who lags behind\nn09993651\ta schoolchild at a boarding school who has meals at school but sleeps at home\nn09994400\ta laborer who works by the day; for daily wages\nn09994673\ta Protestant layman who assists the minister\nn09994808\ta woman deacon\nn09994878\ta dead shot\nn09995829\tsomeone skilled at informal chitchat\nn09996039\tsomeone who withdraws from a social group or environment\nn09996304\ta nonenterprising person who is not paying his way\nn09996481\ta person with a severe auditory impairment\nn09997622\ta person who owes a creditor; someone who has the obligation of paying a debt\nn09998788\ta member of a ship's crew who performs manual labor\nn09999135\tone who attacks the reputation of another by slander or libel\nn10000294\ta contractor concerned with the development and manufacture of systems of defense\nn10000459\ta person who believes that God created the universe and then abandoned it\nn10000787\ta person appointed or elected to represent others\nn10001217\tsomeone employed to make deliveries\nn10001481\ta political leader who seeks support by appealing to popular passions and prejudices\nn10001764\ta person with great powers and abilities\nn10002257\ta scientist who studies the growth and density of populations and their vital statistics\nn10002760\tsomeone who participates in a public display of group feeling\nn10003476\ta woman who supervises a den of Cub Scouts\nn10004718\tthe head of a department\nn10005006\ta person who has deposited money in a bank or similar institution\nn10005934\ta member of the lower chamber of a legislative assembly (such as in France)\nn10006177\ta doctor who specializes in the physiology and pathology of the skin\nn10006748\tsomeone who descends\nn10007684\ta ballplayer who is designated to bat in place of the pitcher\nn10007809\ta person who devises plots or intrigues\nn10007995\ta hotel receptionist\nn10008123\ta military officer who is not assigned to active duty\nn10008254\tthe police sergeant on duty in a police station\nn10009162\tsome held in custody\nn10009276\ta police officer who investigates crimes\nn10009484\tan investigator engaged or employed in obtaining information not easily available to the public\nn10009671\tone who disparages or belittles the worth of something\nn10010062\tsomeone who develops real estate (especially someone who prepares a site for residential or commercial use)\nn10010243\tan ideological defector from the party line (especially from orthodox communism)\nn10010632\tsomeone to whom property (especially realty) is devised by will\nn10010767\tsomeone who devises real property in a will\nn10010864\tsomeone who eats greedily or voraciously\nn10011360\ta logician skilled in dialectic\nn10011486\tsomeone who keeps a diary or journal\nn10012484\ta specialist in the study of nutrition\nn10013811\ta bishop having jurisdiction over a diocese\nn10015215\tsomeone who supervises the actors and directs the action in the production of a show\nn10015485\tmember of a board of directors\nn10015792\ta middle-aged man with lecherous inclinations\nn10015897\tsomeone who refuses to believe (as in a divinity)\nn10017272\ta person who announces and plays popular recorded music\nn10017422\temployee of a transportation company who controls the departures of vehicles according to weather conditions and in the interest of efficient service\nn10018747\ta painter who introduces distortions\nn10018861\tsomeone who markets merchandise\nn10019072\tan official prosecutor for a judicial district\nn10019187\ta manager who supervises the sales activity for a district\nn10019406\tsomeone who dives (into water)\nn10020366\ta divorced woman or a woman who is separated from her husband\nn10020533\ta woman who was formerly a particular man's wife\nn10020670\ta lawyer specializing in actions for divorce or annulment\nn10020807\ta teacher at some universities\nn10020890\ta licensed medical practitioner\nn10022908\tsomeone whose style is out of fashion\nn10023264\tformerly the chief magistrate in the republics of Venice and Genoa\nn10023506\tsomeone who prevents you from enjoying something that they themselves have no need for\nn10023656\ta stubborn person of arbitrary or arrogant opinions\nn10024025\tan adult with a long narrow head\nn10024362\ta person (not necessarily a spouse) with whom you cohabit and share a long-term sexual relationship\nn10024937\ta native or inhabitant of the Dominican Republic\nn10025060\ta clergyman; especially a settled minister or parson\nn10025295\tthe head of an organized crime family\nn10025391\tan adherent of Donatism\nn10025635\tan Italian woman of rank\nn10026976\tsomeone who sleeps in any convenient place\nn10027246\tsomeone who closely resembles a famous person (especially an actor)\nn10027590\ta person who says one thing and does another\nn10028402\ta person who is destitute\nn10028541\ta woman who is the senior member of a group\nn10029068\tan artist skilled at drawing\nn10030277\tsomeone who writes plays\nn10032987\tsomeone who is dreaming\nn10033412\tsomeone who makes or mends dresses\nn10033572\tsomeone who models dresses\nn10033663\ta person who dribbles\nn10033888\ta basketball player who is dribbling the ball to advance it\nn10034201\ta person who drinks alcoholic beverages (especially to excess)\nn10034614\ta person who drinks liquids\nn10035952\ta narcotics addict\nn10036266\ta person who takes drugs\nn10036444\ta pre-Christian priest among the Celts of ancient Gaul and Britain and Ireland\nn10036692\ta female drum major\nn10036929\tsomeone who plays a drum\nn10037080\tsomeone who is intoxicated\nn10037385\ta chronic drinker\nn10037588\tan adherent of an esoteric monotheistic religious sect living in the relative security of the mountains of Syria and Lebanon who believes that Al-hakim was an incarnation of God\nn10037922\ta reformer who opposes the use of intoxicating beverages\nn10038119\ta nurse who cares for but does not suckle an infant\nn10038409\tthe wife of a duke or a woman holding ducal title in her own right\nn10038620\ta British peer of the highest rank\nn10039271\tan incompetent or clumsy person\nn10039946\ta basketball player who is able to make dunk shots\nn10040240\ta counselor who admonishes frankly and sternly\nn10040698\ta person suffering from indigestion\nn10040945\tan alert and energetic person\nn10041373\ta British peer ranking below a marquess and above a viscount\nn10041887\tsomeone who earn wages in return for their labor\nn10042690\ta secret listener to private conversations\nn10042845\ta person with an unusual or odd personality\nn10043024\tsomeone who selects according to the eclectic method\nn10043491\tan economist who uses statistical and mathematical methods\nn10043643\tan expert in the science of economics\nn10044682\ta person with a thin body\nn10044879\ta person responsible for the editorial aspects of publication; the person who determines the final content of a text (especially of a newspaper or magazine)\nn10047199\ta self-centered person with little regard for others\nn10047459\ta conceited and self-centered person\nn10048117\ta man who ejaculates semen\nn10048367\tany of various church officers\nn10048612\tany influential person whose advice is highly respected\nn10048836\tofficial who won the office in a free election\nn10049363\ta person who installs or repairs electrical or telephone lines\nn10050043\tthe author of a mournful poem lamenting the dead\nn10050880\ta public speaker trained in voice production and gesture and delivery\nn10051026\tsomeone who frees others from bondage\nn10051761\ta physician who specializes in embryology\nn10051861\ta professor or minister who is retired from assigned duties\nn10051975\tsomeone who leaves one country to settle in another\nn10052694\tsomeone sent on a mission to represent the interests of someone else\nn10053439\ta woman emperor or the wife of an emperor\nn10053808\ta worker who is hired to perform a job\nn10054657\ta person or firm that employs workers\nn10055297\ta female sorcerer or magician\nn10055410\ta woman who is considered to be dangerously seductive\nn10055566\ta person who compiles information for encyclopedias\nn10055730\ta heavy person with a soft and rounded body\nn10055847\tan armed adversary (especially a member of an opposing military force)\nn10056103\tsomeone who imparts energy and vitality and spirit to other people\nn10056611\ta man at one end of a row of people\nn10056719\ta man at one end of line of performers in a minstrel show; carries on humorous dialogue with the interlocutor\nn10057271\ta person who transfers his ownership interest in something by signing a check or negotiable security\nn10058411\ta person who delights in having or using or experiencing something\nn10058962\ta female enlisted person in the armed forces\nn10059067\tsomeone who appreciates wine\nn10060075\tone who enters a competition\nn10060175\tsomeone who enters\nn10060352\tsomeone who organizes a business venture and assumes the risk for it\nn10061043\ta diplomat having less authority than an ambassador\nn10061195\ta person who is trained in or engaged in enzymology\nn10061431\ta bishop or metropolitan in charge of an eparchy in the Eastern Church\nn10061882\ta medical scientist who studies the transmission and control of epidemic diseases\nn10062042\tan inferior imitator of some distinguished writer or artist of musician\nn10062176\ta person who has epilepsy\nn10062275\ta member of the Episcopal church\nn10062492\ta personal attendant of the British royal family\nn10062594\tan official charged with the care of the horses of princes or nobles\nn10062716\tan erotic person\nn10062905\tsomeone who escapes\nn10062996\ta person who escapes into a world of fantasy\nn10063635\ta member of a people inhabiting the Arctic (northern Canada or Greenland or Alaska or eastern Siberia); the Algonquians called them Eskimo (`eaters of raw flesh') but they call themselves the Inuit (`the people')\nn10063919\tsomeone employed to spy on another country or business competitor\nn10064831\ta worker skilled in giving beauty treatments (manicures and facials etc.)\nn10064977\tsomeone who etches\nn10065758\tan anthropologist who studies ethnology\nn10066206\ta student enrolled in (or graduated from) Eton College\nn10066314\ta lexicographer who specializes in etymology\nn10067011\ta preacher of the Christian gospel\nn10067305\t(when capitalized) any of the spiritual leaders who are assumed to be authors of the Gospels in the New Testament: Matthew, Mark, Luke, and John\nn10067600\tsomeone who plans social events as a profession (usually for government or corporate officials)\nn10067968\tan investigator who observes carefully\nn10068234\tsomeone who administers a test to determine your qualifications\nn10068425\ta viceroy who governed a large province in the Roman Empire\nn10069296\ta performer (usually of musical works)\nn10069981\ta secretary having administrative duties and responsibilities\nn10070108\ta vice president holding executive power\nn10070377\ta woman executor\nn10070449\ta person skilled in exegesis (especially of religious texts)\nn10070563\tsomeone who organizes an exhibit for others to see\nn10070711\tsomeone who deliberately behaves in such a way as to attract attention\nn10071332\ta person who is voluntarily absent from home or country\nn10071557\ta philosopher who emphasizes freedom of choice and personal responsibility but who regards human existence in a hostile universe as unexplainable\nn10072054\tsomeone who practices exorcism\nn10074249\ta person who was formerly a spouse\nn10074578\ta nonresident doctor or medical student; connected with a hospital but not living there\nn10074735\ta person who holds extreme views\nn10074841\t(psychology) a person concerned more with practical realities than with inner thoughts and feelings\nn10075299\ta spectator who can describe what happened\nn10075693\tsomeone who makes progress easier\nn10076224\ta generous benefactor\nn10076483\ta Spanish member of General Franco's political party\nn10076604\ta person who breeds and trains hawks and who follows the sport of falconry\nn10076957\tsomeone who falsifies\nn10077106\ta person attached to the household of a high official (as a pope or bishop) who renders service in return for support\nn10077593\tan ardent follower and admirer\nn10077879\ta person motivated by irrational enthusiasm (as for a cause)\nn10078131\ta person having a strong liking for something\nn10078719\ta boy who has grown up on a farm\nn10078806\ta person who operates a farm\nn10079399\ta hired hand on a farm\nn10079893\tan adherent of fascism or other right-wing authoritarian views\nn10080117\tan Italian fascist under Mussolini\nn10080508\tanyone who submits to the belief that they are powerless to change their destiny\nn10080869\ta male parent (also used as a term of address to your father)\nn10081204\t`Father' is a term of address for priests in some churches (especially the Roman Catholic Church or the Orthodox Catholic Church); `Padre' is frequently used in the military\nn10081842\ta man (often a powerful or influential man) who arouses emotions usually felt for your real father and with whom you identify psychologically\nn10082043\tthe father of your spouse\nn10082299\tan excessively polite and well-dressed boy\nn10082423\ta member of a group of French painters who followed fauvism\nn10082562\ta United States politician favored mainly in his or her home state\nn10082687\ta professional boxer who weighs between 123 and 126 pounds\nn10082997\tan advocate of federalism\nn10083677\ta communist sympathizer (but not a member of the Communist Party)\nn10083823\ta woman who is an aristocrat\nn10084043\ta child who is female\nn10084295\ta youthful female person\nn10085101\ta dealer in stolen property\nn10085869\ta man who is engaged to be married\nn10086383\ta member of the cricket team that is fielding rather than batting\nn10086744\ta football official\nn10087434\ta military or naval pilot of fighter planes\nn10087736\ta party who files a notice with a law court\nn10088200\tthe person who directs the making of a film\nn10090745\tsomeone who comes upon something after searching\nn10091349\tthe head of a fire department\nn10091450\ta performer who pretends to swallow fire\nn10091564\ta belligerent grouch\nn10091651\ta member of a fire department who tries to extinguish fires\nn10091861\tan official who is responsible for the prevention and investigation of fires\nn10091997\tsomeone who walks barefoot on burning coals\nn10092488\t(baseball) the person who plays first base\nn10092643\tthe offspring who came first in the order of birth\nn10092794\tthe wife of a chief executive\nn10092978\ta commissioned officer in the Army or Air Force or Marines ranking above a 2nd lieutenant and below a captain\nn10093167\tsomeone convicted for the first time\nn10093475\ta sergeant in the Army above the rank of staff sergeant and below master sergeant\nn10093818\tsomeone who sells fish\nn10094320\ta person who whips himself as a religious penance\nn10094584\ta senior naval officer above the rank of captain\nn10094782\ta slick spokesperson who can turn any criticism to the advantage of their employer\nn10095265\ta back stationed wide of the scrimmage line; used as a pass receiver\nn10095420\ta young woman in the 1920s who flaunted her unconventional conduct and dress\nn10095769\tan associate who shares an apartment with you\nn10095869\ta person who uses flattery\nn10096126\ta female fool\nn10096508\ta medical officer specializing in aviation medicine\nn10097262\tan employee of a retail store who supervises sales personnel and helps with customer problems\nn10097477\tsomeone who is unsuccessful\nn10097590\ta native or resident of Florence, Italy\nn10097842\ta young girl who carries flowers in a (wedding) procession\nn10097995\ta woman who sells flowers in the street\nn10098245\tsomeone who plays the flute\nn10098388\ta debtor who flees to avoid paying\nn10098517\tan amateur boxer who weighs no more than 112 pounds\nn10098624\tweighs no more than 115 pounds\nn10098710\ta personal enemy\nn10098862\tsomeone who does folk dances\nn10099002\ta folk writer who composes in verse\nn10099375\ta person who accepts the leadership of another\nn10101308\ta football player who has achieved a reputation for success\nn10101634\tan athlete who plays American football\nn10101981\ta man employed as a servant in a large establishment (as a palace) to run errands and do chores\nn10102800\tthe founder of a family\nn10103155\ta woman ancestor\nn10103228\ta spy for a foreign country\nn10103921\tsomeone who is excluded from or is not a member of a group\nn10104064\ta person who exercises control and makes decisions\nn10104487\ta man who is foreperson of a jury\nn10104756\tsomeone trained in forestry\nn10104888\ta woman in charge of a group of workers\nn10105085\tsomeone who makes copies illegally\nn10105733\tthe person who plays the position of forward in certain games, such as basketball, soccer, or hockey\nn10105906\tyour foster brother is a male who is not a son of your parents but who is raised by your parents\nn10106387\ta man who is a foster parent\nn10106509\ta woman who is a foster parent and raises another's child\nn10106995\tyour foster sister is a female who is not a daughter of your parents but who is raised by your parents\nn10107173\tsomeone who is raised as a son although not related by birth\nn10107303\ta person who founds or establishes some institution\nn10108018\ta woman founder\nn10108089\tsomeone who has run the mile in less that 4 minutes\nn10108464\tsomeone who writes a new law or plan\nn10108832\ta person who hates France and everything French\nn10109443\ta person or animal that is markedly unusual or deformed\nn10109662\tsomeone acting freely or even irresponsibly\nn10109826\t(sports) a professional athlete who is free to sign a contract to play for any team\nn10110093\tone of an interracial group of civil rights activists who rode buses through parts of the South in order to protest racial segregation\nn10110731\tsomeone who gratifies physical appetites (especially for food and drink) with more than the usual freedom\nn10110893\tsomeone who takes advantage of the generosity of others\nn10111358\tan advocate of unrestricted international trade\nn10111779\ta person who follows the basic theories or practices of Sigmund Freud\nn10111903\ta male member of a religious order that originally relied solely on alms\nn10112129\ta male religious living in a cloister and devoting himself to contemplation and prayer and work\nn10113249\ta woman who lives on the frontier\nn10113583\ta person used as a cover for some questionable activity\nn10113869\tsomeone who masturbates by rubbing against another person (as in a crowd)\nn10114476\ta stupid despised man\nn10114550\tsomeone who engages in sexual intercourse\nn10114662\ta conservative who is old-fashioned or dull in attitude or appearance\nn10115430\t(football) the running back who plays the fullback position on the offensive team\nn10115946\tan acrobat who performs on a tightrope or slack rope\nn10116370\ta supporter of fundamentalism\nn10116478\tsomeone who solicits financial contributions\nn10116702\ta theologian who believes that the Scripture prophecies of the Apocalypse (the Book of Revelation) will be fulfilled in the future\nn10117017\ta person who delights in designing or building or using gadgets\nn10117267\tsomeone who writes comic material for public performers\nn10117415\ta comedian who uses gags\nn10117739\ta person who gains weight\nn10117851\talliterative term for girl (or woman)\nn10118301\ta disreputable or clumsy man\nn10118743\ta musician who performs upon the viola da gamba\nn10118844\ta person who wagers money on the outcome of games or sporting events\nn10119609\ta girl of impish appeal\nn10120330\tsomeone employed to collect and dispose of refuse\nn10120671\tsomeone employed to work in a garden\nn10121026\tsomeone who cuts cloth etc. to measure in making garments\nn10121246\tsomeone who kills by strangling\nn10121714\tsomeone employed by a gas company\nn10121800\ta physician who specializes in diseases of the gastrointestinal tract\nn10122300\ta person who gathers\nn10122531\ta spectator who stares stupidly without intelligent awareness\nn10123122\ta French policeman\nn10123844\ta general officer of the highest rank\nn10126177\tsomeone who originates or causes or initiates something\nn10126424\ta biologist who specializes in genetics\nn10126708\ta natural father or mother\nn10127186\tinformal abbreviation of `gentleman'\nn10127689\ta specialist in geology\nn10128519\ta geologist who uses physical principles to study the properties of the earth\nn10128748\ta writer who gives the credit of authorship to someone else\nn10129338\tthe idealized American girl of the 1890s as pictured by C. D. Gibson\nn10129825\ta young woman\nn10130686\ta girl or young woman with whom a man is romantically involved\nn10130877\tany female friend\nn10131151\tan extremely talented young female person\nn10131268\ta member of the moderate republican party that was in power during the French Revolution; the Girondists were overthrown by their more radical rivals the Jacobins\nn10131590\ta Spanish male Gypsy\nn10131815\t(ancient Rome) a professional combatant or a captive who entertained the public by engaging in mortal combat\nn10132035\tsomeone skilled in blowing bottles from molten glass\nn10132502\tsomeone who gathers something in small pieces (e.g. information) slowly and carefully\nn10134178\ta person who tends a flock of goats\nn10134396\tan infant who is sponsored by an adult (the godparent) at baptism\nn10134760\tany man who serves as a sponsor for a child at baptism\nn10134982\ta person who sponsors someone (the godchild) at baptism\nn10135129\ta male godchild\nn10135197\tan employee whose duties include running errands\nn10135297\ta zealously energetic person (especially a salesman)\nn10136615\tan artisan who makes jewelry and other objects out of gold\nn10136959\tsomeone who plays the game of golf\nn10137825\ta (Venetian) boatman who propels a gondola\nn10138369\tany person who is on your side\nn10138472\ta white male Southerner with an unpretentious convivial manner and conservative or intolerant attitudes and a strong sense of fellowship with and loyalty to other members of his peer group\nn10139077\ta person who voluntarily offers help or sympathy in times of trouble\nn10139651\ta journalist who writes a column of gossip about celebrities\nn10140051\tan attacker who gouges out the antagonist's eye\nn10140597\ta governor of high rank\nn10140683\tan unpleasant person who grabs inconsiderately\nn10140783\ta judge who assigns grades to something\nn10140929\tsomeone who has completed the course of study (including hospital practice) at a nurses training school\nn10141364\ta linguist who specializes in the study of grammar and syntax\nn10141732\ta female grandchild\nn10142166\ta middle-aged or elderly woman who is stylish and highly respected\nn10142391\tthe father of your father or mother\nn10142537\tdirector of the court of Inquisition (especially in Spain and Portugal)\nn10142747\tthe mother of your father or mother\nn10142946\ta player of exceptional or world class skill in chess or bridge\nn10143172\ta parent of your father or mother\nn10143595\ta recipient of a grant\nn10143725\ta person who grants or gives something\nn10144338\ta man who is divorced from (or separated from) his wife\nn10145239\tan aunt of your father or mother\nn10145340\ta child of your grandson or granddaughter\nn10145480\ta daughter of your grandson or granddaughter\nn10145590\ta mother of your grandparent\nn10145774\ta parent of your grandparent\nn10145902\ta son of your grandson or granddaughter\nn10146002\ta son of your niece or nephew\nn10146104\ta daughter of your niece or nephew\nn10146416\ta soldier who is a member of the United States Army Special Forces\nn10146816\tan infantryman equipped with grenades\nn10146927\ta person who greets\nn10147121\ta Latin American (disparaging) term for foreigners (especially Americans and Englishmen)\nn10147262\ta person who grins\nn10147710\ta retail merchant who sells foodstuffs (and some household supplies)\nn10147935\ta man who has recently been married\nn10148035\ta man participant in his own marriage ceremony\nn10148305\ta bad-tempered person\nn10148825\ta commissioned officer (especially one in the Royal Air Force) equivalent in rank to a colonel in the army\nn10149436\ta person who grunts\nn10149867\tsomeone who guards prisoners\nn10150071\ta person who keeps watch over something or someone\nn10150794\ta person who guesses\nn10150940\ta visitor to whom hospitality is extended\nn10151133\ta customer of a hotel or restaurant etc.\nn10151261\tthe person in whose honor a gathering is held\nn10151367\ta person with temporary permission to work in another country\nn10151570\tsomeone who shows the way by leading or advising\nn10151760\ta musician who plays the guitar\nn10152306\ta noncommissioned officer ranking above a staff sergeant in the marines\nn10152616\ta Hindu or Buddhist religious leader and spiritual teacher\nn10152763\ta recognized leader in some field or of some movement\nn10153155\t(British slang) boss\nn10153414\tan informal term for a youth or man\nn10153594\tan athlete who is skilled in gymnastics\nn10153865\tsomeone who spends all leisure time playing sports or working out in a gymnasium or health spa\nn10154013\ta specialist in gynecology\nn10154186\ta member of a people with dark skin and hair who speak Romany and who traditionally live by seasonal work and fortunetelling; they are believed to have originated in northern India but now are living on all continents (but mostly in Europe, North Africa, and North America)\nn10154601\tone who works hard at boring tasks\nn10155222\ta programmer who breaks into computer systems in order to steal or change or destroy information as a form of cyber-terrorism\nn10155600\tan intense bargainer\nn10155849\tsomeone who cuts or beautifies hair\nn10156629\ta Muslim physician\nn10156831\ta member of a people of southeastern China (especially Hong Kong, Canton, and Taiwan) who migrated from the north in the 12th century\nn10157016\ta guard who carries a halberd (as a symbol of his duty)\nn10157128\t(football) the running back who plays the offensive halfback position\nn10157271\tone of siblings who have only one parent in common\nn10158506\ta member of the crew of a ship\nn10159045\tone who trains or exhibits animals\nn10159289\ta man skilled in various odd jobs and other small tasks\nn10159533\ta rider of a hang glider\nn10160188\ta conservative who is uncompromising\nn10160280\ta clown or buffoon (after the Harlequin character in the commedia dell'arte)\nn10160412\ta mediator who brings one thing into harmonious agreement with another\nn10161622\ta user of hashish\nn10162016\ta professional killer\nn10162194\ta person who hates\nn10162354\tsomeone who makes and sells hats\nn10164025\tthe head of a tribe or clan\nn10164233\tpresiding officer of a school\nn10164492\tthe person in charge of nursing in a medical institution\nn10165448\tsomeone who listens attentively\nn10166189\ta charming person who is irresponsible in emotional relationships\nn10166394\ta person who does not acknowledge your god\nn10167152\ta wrestler who weighs more than 214 pounds\nn10167361\tan actor who plays villainous roles\nn10167565\tsomeone who tries to embarrass you with gibes and questions and objections\nn10167838\tsomeone who counterbalances one transaction (as a bet) against another in order to protect against loss\nn10168012\ta respondent who avoids giving a clear direct answer\nn10168183\tsomeone motivated by desires for sensual pleasures\nn10168584\ta person who is entitled by law or by the terms of a will to inherit the estate of another\nn10168837\tan heir whose right to an inheritance cannot be defeated if that person outlives the ancestor\nn10169147\ta female heir\nn10169241\ta person who expects to inherit but whose right can be defeated by the birth of a nearer relative\nn10169419\ta rowdy or mischievous person (usually a young man)\nn10169796\tthe person who steers a ship\nn10170060\ta newly hired employee\nn10170681\ta doctor who specializes in diseases of the blood and blood-forming organs\nn10170866\ta person who has hemiplegia (is paralyzed on one side of the body)\nn10171219\t(formal) a person who announces important news\nn10171456\ta therapist who heals by the use of herbs\nn10171567\tsomeone who drives a herd\nn10172080\tone having both male and female sexual characteristics and organs; at birth an unambiguous assignment of male or female cannot be made\nn10173410\ta woman possessing heroic qualities or a woman who has performed heroic deeds\nn10173579\tsomeone addicted to heroin\nn10173665\tsomeone who worships heroes\nn10173771\ta German man; used before the name as a title equivalent to Mr in English\nn10174253\ta corrupt politician\nn10174330\ta person of intellectual or erudite tastes\nn10174445\ta senior diplomat from one country to another who is assigned ambassadorial rank\nn10174589\ta person of great ability and ambition\nn10174695\ta native of the Highlands of Scotland\nn10174971\tan arrogant or conceited person of importance\nn10175248\ta preeminent authority or major proponent of a movement or doctrine\nn10175725\tsomeone who uses force to take over a vehicle (especially an airplane) in order to reach an alternative destination\nn10176913\ta person who works only for money\nn10177150\ta person who is an authority on history and who studies it and writes about it\nn10178077\ta person who travels by getting free rides from passing vehicles\nn10178216\tsomeone who hits\nn10179069\ta person who pursues an activity in their spare time for pleasure\nn10180580\ta negotiator who hopes to gain concessions by refusing to come to terms\nn10180791\tan official who remains in office after his term\nn10180923\tan armed thief\nn10181445\ta male friend from your neighborhood or hometown\nn10181547\ta fellow male member of a youth gang\nn10181799\tsomeone buying a house\nn10181878\ta fellow female member of a youth gang\nn10182190\tsomeone unfortunate without housing\nn10182402\ta practitioner of homeopathy\nn10183347\ta wife who has married a man with whom she has been living for some time (especially if she is pregnant at the time)\nn10183931\tan escort for a distinguished guest or for the casket at a military funeral\nn10184505\t(rugby) the player in the middle of the front row of the scrum who tries to capture the ball with the foot\nn10185148\ta person who hopes\nn10185483\ta musician who plays a horn (especially a French horn)\nn10185793\ta man skilled in equitation\nn10186068\ta hard bargainer\nn10186143\ta woman horseman\nn10186216\ta cowboy who takes care of the saddle horses\nn10186350\tan expert in the science of cultivating plants (fruit or flowers or vegetables or ornamental plants)\nn10186686\ta chaplain in a hospital\nn10186774\tthe owner or manager of an inn\nn10187130\ta person who invites guests to a social event (such as a party in his or her own home) and who is responsible for them while they are there\nn10187491\ta woman host\nn10187990\tan owner or manager of hotels\nn10188715\ta servant who is employed to perform domestic task in a household\nn10188856\tteacher in charge of a school boardinghouse\nn10188957\tsomeone who resides in the same house with you\nn10189278\ta physician (especially an intern) who lives in a hospital and cares for hospitalized patients under the supervision of the medical staff of the hospital\nn10189597\ta custodian who lives in and cares for a house while the regular occupant is away (usually without an exchange of money)\nn10190122\ta commissioner in charge of public housing\nn10190516\ta seller of shoddy goods\nn10191001\ta person who hugs\nn10191388\tan advocate of the principles of humanism; someone concerned with the interests and welfare of humans\nn10191613\tsomeone devoted to the promotion of human welfare and to social reforms\nn10192839\ta well-built sexually attractive man\nn10193650\ta woman hunter\nn10194231\ta man who was formerly a certain woman's husband\nn10194775\ta geologist skilled in hydrology\nn10195056\ta person with hyperopia; a farsighted person\nn10195155\ta person who has abnormally high blood pressure\nn10195261\ta person who induces hypnosis\nn10195593\ta person who professes beliefs and opinions that he or she does not hold in order to conceal his or her real feelings or motives\nn10196404\tsomeone who cuts and delivers ice\nn10196725\tsomeone who attacks cherished ideas or traditional institutions\nn10197392\tan advocate of some ideology\nn10198437\tsomeone who is adored blindly and excessively\nn10198832\ta lover blind with admiration and devotion\nn10199251\t(Islam) the man who leads prayers in a mosque; for Shiites an imam is a recognized authority on Islamic theology and law and a spiritual guide\nn10200246\ta believer in imperialism\nn10200781\ta person whose actions and opinions strongly influence the course of events\nn10202225\ta man with whom you are in love or have an intimate relationship\nn10202624\tthe official who holds an office\nn10202763\ta person whose disease is incurable\nn10203949\ta person inducted into an organization or social group\nn10204177\tsomeone who manages or has significant financial interest in an industrial enterprise\nn10204833\ta person who murders an infant\nn10205231\tone of lesser rank or station or quality\nn10205344\tan inhabitant of Hell\nn10205457\t(baseball) a person who plays a position in the infield\nn10205714\tan intruder (as troops) with hostile intent\nn10206173\tone who reveals confidential information in return for money\nn10206506\tan artless innocent young girl (especially as portrayed on the stage)\nn10206629\tan actress who specializes in playing the role of an artless innocent young girl\nn10207077\ta person of great and varied learning\nn10207169\ta relative by marriage\nn10208189\ta private detective\nn10208847\ta high ranking police officer\nn10208950\ta military officer responsible for investigations\nn10209082\ta person who initiates a course of action\nn10209731\tan agent who sells insurance\nn10210137\ta person who takes part in an armed rebellion against the constituted authority (especially in the hope of improving conditions)\nn10210512\ta government analyst of information about an enemy or potential enemy\nn10210648\ta person who specializes in designing architectural interiors and their furnishings\nn10210911\ta person who takes part in a conversation\nn10211036\tthe performer in the middle of a minstrel line who engages the others in talk\nn10211666\ta chess player who has been awarded the highest title by an international chess organization\nn10211830\ta member of a socialist or communist international\nn10212231\ta specialist in internal medicine\nn10212501\tsomeone who mediates between speakers of different languages\nn10212780\tsomeone who uses art to represent something\nn10213034\t(law) a party who interposes in a pending proceeding\nn10213429\t(psychology) a person who tends to shrink from social contacts and to become preoccupied with their own thoughts\nn10214062\tsomeone who enters by force in order to conquer\nn10214390\tan official who can invalidate or nullify\nn10215623\tsomeone who investigates\nn10216106\tsomeone who commits capital in order to gain financial returns\nn10216403\tsomeone who watches examination candidates to prevent cheating\nn10217208\tsomeone who is indifferent or hostile to religion\nn10218043\ta student or graduate at an Ivy League school\nn10218164\ta person able to do a variety of different jobs acceptably well\nn10218292\ta follower of Andrew Jackson or his ideas\nn10219240\tan unknown or fictitious woman who is a party to legal proceedings\nn10219453\ta loyal supporter\nn10219879\ta member of an Indo-European people widely scattered throughout the northwest of the Indian subcontinent and consisting of Muslims and Hindus and Sikhs\nn10220080\ta native or inhabitant of Java\nn10220924\tsomeone with two personalities - one good and one evil\nn10221312\ta professional clown employed to entertain a king or nobleman in the Middle Ages\nn10221520\ta member of the Jesuit order\nn10222170\ta shameless impudent scheming woman\nn10222259\ta woman who jilts a lover\nn10222497\tsomeone who buys large quantities of goods and resells to merchants rather than to the ultimate customers\nn10222716\tan applicant who is being considered for a job\nn10223069\tsomeone whose comfort is actually discouraging\nn10223177\tsomeone employed to ride horses in horse races\nn10223606\tan unknown or fictitious man who is a party to legal proceedings\nn10224578\ta writer for newspapers and magazines\nn10225219\ta public official authorized to decide questions brought before a court of justice\nn10225931\tan officer assigned to the judge advocate general\nn10226413\ta performer who juggles objects and performs tricks of manual dexterity\nn10227166\ta follower or advocate of Carl Jung's theories\nn10227266\tthe younger of two persons\nn10227393\ta third-year undergraduate\nn10227490\ta son who has the same first name as his father\nn10227698\tweighs no more than 130 pounds\nn10227793\tweighs no more than 154 pounds\nn10227985\ta legal scholar versed in civil law or the law of nations\nn10228278\tsomeone who serves (or waits to be called to serve) on a jury\nn10228468\ta local magistrate with limited powers\nn10228592\tformerly a high judicial officer\nn10228712\ta masked dancer during a Pueblo religious ceremony who is thought to embody some particular spirit\nn10229883\ta musician who plays a keyboard instrument\nn10230216\tone of the Turkish viceroys who ruled Egypt between 1867 and 1914\nn10233248\tan important person who can bring leaders to power through the exercise of political influence\nn10235024\ta competitor who holds a preeminent position\nn10235269\tCounsel to the Crown when the British monarch is a king\nn10235385\ta barrister selected to serve as counsel to the British ruler\nn10236304\ta person having kinship with another or others\nn10236521\tone related on the mother's side\nn10236842\ta person with unusual sexual tastes\nn10237069\ta female relative\nn10237196\tsomeone who kisses\nn10237464\thelp hired to work in the kitchen\nn10237556\tan enlisted person who is assigned to assist the cooks\nn10237676\ta member of the Ku Klux Klan\nn10237799\tsomeone with an irrational urge to steal in the absence of an economic motive\nn10238272\ta person in a kneeling position\nn10238375\toriginally a person of noble birth trained to arms and chivalry; today in Great Britain a person honored by the sovereign for personal merit\nn10239928\t(Yiddish) a big shot who knows it and acts that way; a boastful immoderate person\nn10240082\ta person who knows or apprehends\nn10240235\tsomeone who thinks he knows everything and refuses to accept advice or information from others\nn10240417\ta member of a kolkhoz\nn10240821\ta member of the royal or warrior Hindu caste\nn10241024\tan assistant (often the father of the soon-to-be-born child) who provides support for a woman in labor by encouraging her to use techniques learned in childbirth-preparation classes\nn10241300\tsomeone who works with their hands; someone engaged in manual labor\nn10242328\ta member of the British Labour Party\nn10243137\ta polite name for any woman\nn10243273\ta lady appointed to attend to a queen or princess\nn10243483\ta maid who is a lady's personal attendant\nn10243664\ta Tibetan or Mongolian priest of Lamaism\nn10243872\ta sweet innocent mild-mannered person (especially a child)\nn10244108\tan elected official still in office but not slated to continue\nn10244359\t(when gas was used for streetlights) a person who lights and extinguishes streetlights\nn10244913\ta person who administers a landed estate\nn10245029\ta count who had jurisdiction over a large territory in medieval Germany\nn10245341\tan inexperienced sailor; a sailor on the first voyage\nn10245507\ta person who lives and works on land\nn10245639\ta holder or proprietor of land\nn10245863\tsomeone who arranges features of the landscape or garden attractively\nn10246317\ta cross-country skier\nn10246395\ta person who languishes\nn10246703\tan expert on precious stones and the art of cutting and engraving them\nn10247358\ta girl or young woman who is unmarried\nn10247880\ta person who is a member of those peoples whose languages derived from Latin\nn10248008\tan inhabitant of ancient Latium\nn10248198\ta person who is broad-minded and tolerant (especially in standards of religious belief and conduct)\nn10248377\tbeliever in imminent approach of the millennium; practitioner of active evangelism\nn10249191\ta solicitor in Scotland\nn10249270\ta maker of laws; someone who gives a code of laws\nn10249459\tan officer of the law\nn10249869\ta student in law school\nn10249950\ta professional person authorized to practice law; conducts lawsuits or gives legal advice\nn10250712\ta layman who is authorized by the bishop to read parts of the service in an Anglican or Episcopal church\nn10251329\ta lazy person\nn10251612\ta surreptitious informant\nn10252075\ta tenant who holds a lease\nn10252222\ta public lecturer at certain universities\nn10252354\tsomeone who reads the lessons in a church service; someone ordained in a minor order of the Roman Catholic Church\nn10252547\tsomeone who lectures professionally\nn10253122\ta person who uses the left hand with greater skill than the right\nn10253296\ta personal representative with legal standing (as by power of attorney or the executor of a will)\nn10253479\ta member of a legation\nn10253611\tsomeone to whom a legacy is bequeathed\nn10253703\ta soldier who is a member of a legion (especially the French Foreign Legion)\nn10255459\tan athlete who has earned a letter in a school sport\nn10257221\tsomeone who releases people from captivity or bondage\nn10258602\tan official who can issue a license or give authoritative permission (especially one who licenses publications)\nn10258786\tholds a license (degree) from a (European) university\nn10259348\ta commissioned military officer\nn10259780\ta commissioned officer in the United States Army or Air Force or Marines holding a rank above major and below colonel\nn10259997\ta commissioned officer in the Navy ranking above a lieutenant and below a commander\nn10260473\tan officer holding a commissioned rank in the United States Navy or United States Coast Guard; below lieutenant and above ensign\nn10260706\ta living person\nn10260800\tan attendant employed at a beach or pool to protect swimmers from accidents\nn10261211\ta tenant whose legal right to retain possession of buildings or lands lasts as long as they (or some other person) live\nn10261511\tan amateur boxer who weighs no more than 106 pounds\nn10261624\ta professional boxer who weighs between 169 and 175 pounds\nn10261862\tan amateur boxer who weighs no more than 179 pounds\nn10262343\ta woman inconstant in love\nn10262445\ta professional boxer who weighs between 131 and 135 pounds\nn10262561\ta wrestler who weighs 139-154 pounds\nn10262655\tan amateur boxer who weighs no more than 132 pounds\nn10262880\ta very small person (resembling a Lilliputian)\nn10263146\ta specialist in the study of freshwater ponds and lakes\nn10263411\tone of the players on the line of scrimmage\nn10263790\ta commissioned officer with combat units (not a staff officer or a supply officer)\nn10265281\tsomeone who tries to attract social lions as guests\nn10265801\ta speaker who lisps\nn10265891\tassessor who makes out the tax lists\nn10266016\ta critic of literature\nn10266328\ta person who can read and write\nn10266848\t(law) a party to a lawsuit; someone involved in litigation\nn10267166\ta person who litters public places with refuse\nn10267311\ta younger brother\nn10267865\ta younger sister\nn10268629\tsomeone who is employed to persuade legislators to vote for legislation that favors the lobbyist's employer\nn10269199\tsomeone who makes or repairs locks\nn10269289\tsomeone (physician or clergyman) who substitutes temporarily for another member of the same profession\nn10271677\ta titled peer of the realm\nn10272782\ta gambler who loses a bet\nn10272913\ta contestant who loses the contest\nn10273064\ta person with a record of failing; someone who loses consistently\nn10274173\ta successful womanizer; a man who behaves selfishly in his sexual relationships with women\nn10274318\ta person who causes trouble by speaking indiscreetly\nn10274815\tan undergraduate who is not yet a senior\nn10275249\ta native of the Lowlands of Scotland\nn10275395\ta person who is loyal to their allegiance (especially in times of revolt)\nn10275848\tany opponent of technological progress\nn10276045\ta person who fells trees\nn10276477\ta taxonomist who classifies organisms into large groups on the basis of major characteristics\nn10276942\tan archaic term for a lunatic\nn10277027\ta person with a mania for setting things on fire\nn10277638\ta musician who plays the lute\nn10277815\tfollower of Lutheranism\nn10277912\ta person who writes the words for songs\nn10278456\tan official who carries a mace of office\nn10279018\ta craftsman skilled in operating machine tools\nn10279778\ttitle used for a married Frenchwoman\nn10280034\tan unnaturally frenzied or distraught woman\nn10280130\tan artist of consummate skill\nn10280598\ta reformed prostitute\nn10280674\tsomeone who performs magic tricks to amuse an audience\nn10281546\ta magician or sorcerer of ancient times\nn10281770\ta great rani; a princess in India or the wife of a maharaja\nn10281896\t(Hinduism) term of respect for a brahmin sage\nn10282482\tan unmarried girl (especially a virgin)\nn10282672\ta female domestic\nn10283170\ta commissioned military officer in the United States Army or Air Force or Marines; below lieutenant colonel and above captain\nn10283366\ta university student who is studying a particular field as the principal subject\nn10283546\tthe chief steward or butler of a great household\nn10284064\ta person who makes things\nn10284871\ta newcomer to Hawaii\nn10284965\ta person who is discontented or disgusted\nn10286282\tthe leader of a town or community in some parts of Asia Minor and the Indian subcontinent\nn10286539\tsomeone shirking their duty by feigning illness or incapacity\nn10286749\ta believer in Malthusian theory\nn10288964\tany handsome young man\nn10289039\tthe generic use of the word to refer to any human being\nn10289176\ta male subordinate\nn10289462\ta woman manager\nn10289766\tany high government official or bureaucrat\nn10290422\ta person skilled in maneuvering\nn10290541\ta person who has an obsession with or excessive enthusiasm for something\nn10290813\tan adherent of Manichaeism\nn10290919\ta beautician who cleans and trims and polishes the fingernails\nn10291110\ta person who handles things manually\nn10291469\ta heavily armed and mounted soldier in medieval times\nn10291822\tsomeone inclined to act first and think later\nn10291942\ta man devoted to literary or scholarly activities\nn10292316\tsomeone who manufactures something\nn10293332\twalks with regular or stately step\nn10293590\ta noblewoman ranking below a duchess and above a countess\nn10293861\ta German nobleman ranking above a count (corresponding in rank to a British marquess)\nn10294020\tthe military governor of a frontier province in medieval Germany\nn10294139\ta member of the United States Marine Corps\nn10295371\ta British peer ranking below a duke and above an earl\nn10295479\tnobleman (in various countries) ranking above a count\nn10296176\t(in some countries) a military officer of highest rank\nn10296444\tsomeone who demands exact conformity to rules and forms\nn10297234\ta person or animal that is adopted by a team or other group as a symbolic figure\nn10297367\tsomeone who obtains pleasure from receiving punishment\nn10297531\ta craftsman who works with stone or brick\nn10297841\ta participant in a masquerade\nn10298202\ta male massager\nn10298271\ta female massager\nn10298647\tdirects the work of others\nn10298912\tan officer who is licensed to command a merchant ship\nn10299125\tthe senior petty officer; responsible for discipline aboard ship\nn10299250\ta person who acts as host at formal occasions (makes an introductory speech and introduces other speakers)\nn10299700\ta person who practices masturbation\nn10299875\tsomeone who arranges (or tries to arrange) marriages for others\nn10300041\tthe officer below the master on a commercial ship\nn10300154\tinformal term for a friend of the same sex\nn10300303\tthe partner of an animal (especially a sexual partner)\nn10300500\tan informal use of the Latin word for mother; sometimes used by British schoolboys or used facetiously\nn10300654\ta person judged suitable for admission or employment\nn10300829\tsomeone who thinks that nothing exists but physical matter\nn10302576\ta female head of a family or tribe\nn10302700\ta feisty older woman with a big bosom (as drawn in cartoons)\nn10302905\tsomeone who has been admitted to a college or university\nn10303037\ta married woman (usually middle-aged with children) who is staid and dignified\nn10303814\tthe head of a city government\nn10304086\tthe wife of a mayor\nn10304650\ta person trained to design and construct machines\nn10304914\t(golf) the winner at medal play of a tournament\nn10305635\ta medical practitioner in the armed forces\nn10305802\tsomeone who practices medicine\nn10306004\ta scientist who studies disease processes\nn10306279\tsomeone who serves as an intermediary between the living and the dead\nn10306496\ta pathological egotist\nn10306595\tsomeone subject to melancholia\nn10306890\tan eastern Christian in Egypt or Syria who adheres to the Orthodox faith as defined by the council of Chalcedon in 451 and as accepted by the Byzantine emperor\nn10307114\ta worker who melts substances (metal or wax etc.)\nn10308066\ta person who is not a member\nn10308168\ta member of a governing board\nn10308275\ta member of a clan\nn10308504\ta person who learns by rote\nn10308653\ta follower of Mendelism\nn10308732\ta skilled worker who mends or repairs things\nn10310783\ta member of one of the various peoples inhabiting Mesoamerica\nn10311506\t(nautical) an associate with whom you share meals in the same mess (as on a ship)\nn10311661\ta woman of mixed racial ancestry (especially mixed European and Native American ancestry)\nn10312287\ta specialist who studies processes in the earth's atmosphere that cause weather conditions\nn10312491\tpolicewoman who is assigned to write parking tickets\nn10312600\ta follower of Wesleyanism as practiced by the Methodist Church\nn10313000\ta person in western Canada who is of Caucasian and American Indian ancestry\nn10313239\tin the Eastern Orthodox Church this title is given to a position between bishop and patriarch; equivalent to archbishop in western Christianity\nn10313441\ta soprano with a voice between soprano and contralto\nn10313724\tan economist who specializes in microeconomics\nn10314054\ta man who is roughly between 45 and 65 years old\nn10314182\tsomeone who is neither a highbrow nor a lowbrow\nn10314517\tan amateur boxer who weighs no more than 165 pounds\nn10314836\ta woman skilled in aiding the delivery of babies\nn10315217\tthe emperor of Japan; when regarded as a religious leader the emperor is called tenno\nn10315456\ta native or inhabitant of Milan\nn10315561\ta runner in a one-mile race\nn10315730\ta braggart soldier (a stock figure in comedy)\nn10316360\tan attache who is a specialist in military matters\nn10316527\ta chaplain in one of the military services\nn10316862\ta leader of military forces\nn10317007\tany person in the armed services who holds a position of authority or command\nn10317500\ta member of the military police who polices soldiers and guards prisoners\nn10317963\tthe responsible official at a mill that is under absentee ownership\nn10318293\ta workman in a mill or factory\nn10318607\ta woman millionaire\nn10318686\ta workman who designs or erects mills and milling machinery\nn10319313\tsomeone (usually in totalitarian countries) who is assigned to watch over foreign visitors\nn10320484\tan engineer concerned with the construction and operation of mines\nn10320863\ta person appointed to a high office in the government\nn10321126\tsomeone who serves as a minister\nn10321340\ta player on a minor-league baseball team\nn10321632\tan American militiaman prior to and during the American Revolution\nn10321882\tsomeone who dislikes people in general\nn10322238\tsomeone unable to adapt to their circumstances\nn10323634\ta woman master who directs the work of others\nn10323752\tan adulterous woman; a woman who has an ongoing extramarital sexual relationship with a man\nn10323999\ta person whose ancestors belonged to two or more races\nn10324560\ta person who poses for a photographer or painter or sculptor\nn10325549\tsomeone who shows impressive and stylish excellence\nn10325774\ta person who creates models\nn10326776\ta moderator who makes less extreme or uncompromising\nn10327143\ta biologist who studies the structure and activity of macromolecules essential to life\nn10327987\ta native or inhabitant of Monaco\nn10328123\tan advocate of the theory that economic fluctuations are caused by increases or decreases in the supply of money\nn10328328\tsomeone whose main interest in life is moneymaking\nn10328437\tsomeone who is successful in accumulating wealth\nn10328696\ta member of the Mongoloid race\nn10328941\ta person who knows only one language\nn10329035\tan entertainer who performs alone\nn10330593\ta person who holds a second job (usually after hours)\nn10330931\ta philosopher who specializes in morals and moral problems\nn10331098\ta learned fool\nn10331167\tsomeone who does a morris dance\nn10331258\tan enemy who wants to kill you\nn10331347\tthe person who accepts a mortgage\nn10331841\tone whose business is the management of funerals\nn10332110\ta marauder and plunderer (originally operating in the bogs between England and Scotland)\nn10332385\ta woman who has given birth to a child (also used as a term of address to your mother)\nn10332861\ta term of address for a mother superior\nn10332953\ta term of address for an elderly woman\nn10333044\ta woman who evokes the feelings usually reserved for a mother\nn10333165\ta person who cares for the needs of others (especially in an overprotective or interfering way)\nn10333317\tthe mother of your spouse\nn10333439\ta boy excessively attached to his mother; lacking normal masculine interests\nn10333601\ta daughter who is favored by and similar to her mother\nn10333838\ta policeman who rides a motorcycle (and who checks the speeds of motorists)\nn10334009\ta traveler who rides a motorcycle\nn10334461\tprehistoric Amerindians who built altar mounds\nn10334782\ta flamboyant deceiver; one who attracts customers with tricks or jokes\nn10335246\ta person who is feeling grief (as grieving over someone who has died)\nn10335801\ta spokesperson (as a lawyer)\nn10335931\tsomeone who moves\nn10336411\tsomeone who goes to see movies\nn10336904\tformerly an itinerant peddler of muffins\nn10337488\ta neutral or uncommitted person (especially in politics)\nn10338231\ta Muslim trained in the doctrine and law of Islam; the head of a mosque\nn10338391\ta chewer who makes a munching noise\nn10339179\ta woman murderer\nn10339251\tsomeone suspected of committing murder\nn10339717\ta traveler who drives (or travels with) a dog team\nn10340312\tsomeone who plays a musical instrument (as a profession)\nn10341243\ta student of musicology\nn10341343\tsomeone who teaches music\nn10341446\ta foot soldier armed with a musket\nn10341573\ta Muslim woman\nn10341955\ta person who mutilates or destroys or disfigures or cripples\nn10342180\tsomeone who is openly rebellious and refuses to obey authorities (especially seamen or soldiers)\nn10342367\ta deaf person who is unable to speak\nn10342543\ta person who speaks softly and indistinctly\nn10342893\tsomeone who muzzles animals\nn10342992\ta native or inhabitant of ancient Mycenae\nn10343088\ta botanist who specializes in the study of fungi\nn10343355\ta person with myopia; a nearsighted person\nn10343449\ta follower who carries out orders without question\nn10343554\tsomeone who believes in the existence of realities beyond human comprehension\nn10343869\tan expert on mythology\nn10344121\ta naive or inexperienced person\nn10344203\ta worker who attaches something by nailing it\nn10344319\tan insipid weakling who is foolishly sentimental\nn10344656\tsomeone who pretends that famous people are his/her friends\nn10344774\ta person who gives a name or names\nn10345015\tyour grandmother\nn10345100\ta woman who is the custodian of children\nn10345302\ta lawman concerned with narcotics violations\nn10345422\tsomeone in love with themselves\nn10345659\tan informer or spy working for the police\nn10346015\tan advocate of national independence of or a strong national government\nn10347204\ta professional dancing girl in India\nn10347446\tnaval officer in command of a fleet of warships\nn10348526\ta member of a Naval Special Warfare unit who is trained for unconventional warfare\nn10349243\tsomeone who systematically obstructs some action that others want to take\nn10349750\tan early name for any Christian\nn10349836\ta member of a group of Jews who (during the early history of the Christian Church) accepted Jesus as the Messiah; they accepted the Gospel According to Matthew but rejected the Epistles of St. Paul and continued to follow Jewish law and celebrate Jewish holidays; they were later declared heretic by the Church of Rome\nn10350220\ta German member of Adolf Hitler's political party\nn10350774\t(Yiddish) a timid unfortunate simpleton\nn10351064\ta lover who necks\nn10353016\ta baby from birth to four weeks\nn10353355\ta son of your brother or sister\nn10353928\ta specialist in neurobiology\nn10354265\ta medical specialist in the nervous system and the disorders affecting it\nn10354754\tsomeone who does surgery on the nervous system (especially the brain)\nn10355142\tone who does not side with any party in a war or dispute\nn10355306\tan advocate of neutrality in international affairs\nn10355449\tany new participant in some activity\nn10355688\ta recent arrival\nn10355806\ta supporter of the economic policies in the United States known as the New Deal\nn10356450\tthe editor of a newspaper\nn10356877\tsomeone who reads out broadcast news bulletin\nn10357012\ta follower of Isaac Newton\nn10357613\ta daughter of your brother or sister\nn10357737\ta selfish person who is unwilling to give or spend\nn10358032\ta porter on duty during the night\nn10358124\tmember of a secret mounted band in United States South after the American Civil War; committed acts of intimidation and revenge\nn10358575\tsomeone who objects to siting something in their own neighborhood but does not object to it being sited elsewhere; an acronym for not in my backyard\nn10359117\tan observant Muslim woman who covers her face and hands when in public or in the presence of any man outside her immediate family\nn10359422\tsomeone who makes small and unjustified criticisms\nn10359546\twinner of a Nobel prize\nn10359659\tan undercover agent who is given no official cover\nn10360366\tsomeone who has announced they are not a candidate; especially a politician who has announced that he or she is not a candidate for some political office\nn10360747\ta military officer appointed from enlisted personnel\nn10361060\ta person is not easily classified and not very interesting\nn10361194\ta person who is not a driver\nn10361296\ta person who does not participate\nn10361525\ta person regarded as nonexistent and having no rights; a person whose existence is systematically ignored (especially for ideological or political reasons)\nn10362003\tsomeone who does not live in a particular place\nn10362319\ta person who does not smoke tobacco\nn10362557\ta member of the American Baptist Convention\nn10363445\tsomeone who takes notice\nn10363573\tone who writes novels\nn10364198\tsomeone who has entered a religious order but has not taken final vows\nn10364502\ta chemist who specializes in nuclear chemistry\nn10365514\tsomeone who nudges; someone who gives a gentle push\nn10366145\t(obstetrics) a woman who has never give birth to a child\nn10366276\ta mathematician specializing in number theory\nn10366966\tone skilled in caring for young children or the sick (usually under the supervision of a physician)\nn10368291\tan infant considered in relation to its nurse\nn10368528\ta voluptuously beautiful young woman\nn10368624\ta sexually attractive young woman\nn10368711\ta person seized by nympholepsy\nn10368798\ta woman with abnormal sexual desires\nn10369095\ta woman oarsman\nn10369317\ta musician who plays the oboe\nn10369417\ta person who is deliberately vague\nn10369528\tan expert who observes and comments on something\nn10369699\ta physician specializing in obstetrics\nn10369955\ta member of a military force who is residing in a conquered foreign country\nn10370381\ta believer in occultism; someone versed in the occult arts\nn10370955\ta connoisseur of fine wines; a grape nut\nn10371052\tsomeone who presents something to another for acceptance or rejection\nn10371221\tthe person who holds an office\nn10371330\ta young man who is employed to do odd jobs in a business office\nn10371450\tsomeone who is appointed or elected to an office and who holds a position of trust\nn10373390\ta clergyman who officiates at a religious ceremony or service\nn10373525\tany federal law-enforcement officer\nn10374541\ta worker who produces or sells petroleum\nn10374849\ta powerful person in the oil business\nn10374943\tan old person who receives an old-age pension\nn10375052\ta vivacious elderly man\nn10375314\tyour own wife\nn10375402\tan informal term for your father\nn10376523\tan elderly person\nn10376890\tan elderly man\nn10377021\ta woman who is old\nn10377185\tone of the rulers in an oligarchy\nn10377291\tan athlete who participates in the Olympic games\nn10377542\ta person who eats all kinds of foods\nn10377633\ta specialist in oncology\nn10378026\tsomeone who looks on\nn10378113\tone who practices onomancy\nn10378780\tsomeone who owns or operates a business\nn10379376\ta person who places expediency above principle\nn10380126\ta person disposed to take a favorable view of things\nn10380499\ta member of a society founded in Ireland in 1795 to uphold Protestantism and the British sovereign\nn10380672\ta person who delivers a speech or oration\nn10381804\ta male hospital attendant who has general duties that do not involve the medical treatment of patients\nn10381981\ta soldier who serves as an attendant to a superior officer\nn10382157\tthe first sergeant of a company; duties formerly included the conveyance of orders\nn10382302\ta person being ordained\nn10382480\ta clergyman appointed to prepare condemned prisoners for death\nn10382710\ta street musician who plays a hand organ or hurdy-gurdy\nn10382825\ta person who plays an organ\nn10383094\tan employee who sacrifices his own individuality for the good of an organization\nn10383237\ta person who brings order and organization to an enterprise\nn10383505\tsomeone who enlists workers to join a union\nn10383816\tsomeone who creates new things\nn10384214\ta zoologist who studies birds\nn10384392\ta child who has lost both parents\nn10384496\tsomeone or something who lacks support or care or supervision\nn10385566\ta therapist who manipulates the skeleton and muscles\nn10386196\tsomeone who is excellent at something\nn10386754\ta woman who spends time outdoors (e.g., hunting and fishing)\nn10386874\ta fielder in cricket who is stationed in the outfield\nn10386984\t(baseball) a person who plays in the outfield\nn10387196\tthe person who plays right field\nn10387324\t(baseball) a pitcher who throws with the right hand\nn10387836\ta person who lives away from his place of work\nn10389865\tan occupant who owns the home that he/she lives in\nn10389976\ta Japanese supervisor\nn10390600\ta collector of miscellaneous useless objects\nn10390698\tan employer who exploits Italian immigrants in the U.S.\nn10390807\tan owner or proprietor of an inn in Italy\nn10391416\ta boy who is employed to run errands\nn10393909\ta worker who is employed to cover objects with paint\nn10394434\ta member of the Paleo-American peoples who were the earliest human inhabitants of North America and South America during the late Pleistocene epoch\nn10394786\ta specialist in paleontology\nn10395073\tone of the mourners carrying the coffin at a funeral\nn10395209\tfortuneteller who predicts your future by the lines on your palms\nn10395390\tsomeone who pampers or spoils by excessive indulgence\nn10395828\tthe lama next in rank to the Dalai Lama\nn10396106\ta member of a panel\nn10396337\ta beggar who approaches strangers asking for money\nn10396727\ta freelance photographer who pursues celebrities trying to take candid photographs of them to sell to newspapers or magazines\nn10396908\ta boy who sells or delivers newspapers\nn10397001\tone whose occupation is decorating walls with wallpaper\nn10397142\tsomeone who passes bad checks or counterfeit paper money\nn10397392\tan American Indian infant\nn10399130\ta medieval cleric who raised money for the church by selling papal indulgences\nn10400003\ta person afflicted with paresis (partial paralysis)\nn10400108\ta member of a parish\nn10400205\ta commissioner in charge of public parks\nn10400437\tan elected member of the British Parliament: a member of the House of Commons\nn10400618\ta person who is employed to look after the affairs of businesses that are affected by legislation of the British Parliament\nn10400998\tmimics literary or musical style for comic effect\nn10401204\tsomeone who kills his or her parent\nn10401331\ta copycat who does not understand the words or acts being imitated\nn10401639\tsomeone who has or gives or receives a part or a share\nn10402709\tsomeone who works less than the customary or standard time\nn10402824\ta person involved in legal proceedings\nn10403633\ta member of a political party who follows strictly the party line\nn10403876\ta traveler riding in a vehicle (a boat or bus or car or plane or train etc) who is not operating it\nn10404426\ta student who passes an examination\nn10404998\ta workman who pastes\nn10405540\tan informal use of the Latin word for father; sometimes used by British schoolboys or used facetiously\nn10405694\ta person who requires medical care\nn10406266\ta man who is older and higher in rank than yourself\nn10406391\tany of the early biblical characters regarded as fathers of the human race\nn10406765\tthe male head of family or tribe\nn10407310\tone who loves and defends his or her country\nn10407954\tsomeone who supports or champions something\nn10408809\tsomeone who makes patterns (as for sewing or carpentry or metalworking)\nn10409459\ta person who lends money at interest in exchange for personal property that is deposited as security\nn10409752\ta person who pays money for something\nn10410246\ta member of a military force that is assigned (often with international sanction) to preserve peace in a trouble area\nn10410996\tone of a (chiefly European) class of agricultural laborers\nn10411356\ta person who pays more attention to formal rules and book learning than they merit\nn10411551\tsomeone who travels about selling his wares (as on the streets or at carnivals)\nn10411867\ta man who has sex (usually sodomy) with a boy as the passive partner\nn10414239\ta person who studies the theory and practice of prison management\nn10414768\tan athlete who competes in a pentathlon\nn10414865\tany member of a Pentecostal religious body\nn10415037\ta musician who plays percussion instruments\nn10416567\ta dentist specializing in diseases of the gums and other structure surrounding the teeth\nn10417288\ta member of a Kurdish guerilla organization that fights for a free Kurdish state\nn10417424\ta person of considerable prominence\nn10417551\ta person who manages the affairs of another\nn10417682\tanother word for person; a person not meriting identification\nn10417843\ta diplomat who is acceptable to the government to which he is sent\nn10417969\ta diplomat who is unacceptable to the government to which he is sent\nn10418101\ta person who represents an abstract quality\nn10418735\ta person who perspires\nn10419047\ta person whose behavior deviates from what is acceptable especially in sexual behavior\nn10419472\ta person who expects the worst\nn10419630\ta persistently annoying person\nn10419785\ta boyish or immature man; after the boy in Barrie's play who never grows up\nn10420031\tone praying humbly for something\nn10420277\ta member of a petit jury\nn10420507\tsomeone left in charge of pets while their owners are away from home\nn10420649\ta lover who gently fondles and caresses the loved one\nn10421016\tthe title of the ancient Egyptian kings\nn10421470\ta health professional trained in the art of preparing and dispensing drugs\nn10421956\tsomeone who makes charitable donations intended to increase human well-being\nn10422405\ta collector and student of postage stamps\nn10425946\ta wise person who is calm and rational; someone who lives a life of reason with equanimity\nn10426454\ta specialist in phonetics\nn10426630\ta specialist in phonology\nn10427223\ta journalist who presents a story primarily through the use of photographs\nn10427359\tsomeone who practices photometry\nn10427764\ttherapist who treats injury or dysfunction with exercises and other physical treatments of the disorder\nn10428004\ta scientist trained in physics\nn10431122\ta person who makes pianos\nn10431625\ta person who chooses or selects out\nn10432189\ta person who is picnicking\nn10432441\tsomeone who journeys in foreign lands\nn10432875\ta unpleasant or tiresome person\nn10432957\ta prominent supporter\nn10433077\ta consumer of amphetamine pills\nn10433452\ta person qualified to guide ships through difficult waters going into or out of a harbor\nn10433610\ta supposedly primitive man later proven to be a hoax\nn10433737\tsomeone who procures customers for whores (in England they call a pimp a ponce)\nn10435169\ta smoker who uses a pipe\nn10435251\tsomeone who is small and insignificant\nn10435716\ta person who urinates\nn10435988\t(baseball) the person who does the pitching\nn10436334\tan aggressive salesman who uses a fast line of talk to sell something\nn10437014\ta disparaging term for an appointee\nn10437137\ta miner who extracts minerals from a placer by washing or dredging\nn10437262\tsomeone who uses another person's words or ideas as if they were his own\nn10437698\tan inhabitant of a plains region (especially the Great Plains of North America)\nn10438172\ta person who makes plans\nn10438619\tthe owner or manager of a plantation\nn10438842\ta worker skilled in applying plaster\nn10439373\ta blond whose hair is a pale silvery (often artificially colored) blond\nn10439523\ta bore who makes excessive use of platitudes\nn10439727\ta man devoted to the pursuit of pleasure\nn10439851\ta person who participates in or is skilled at some game\nn10441037\ta companion at play\nn10441124\ta pleasing entertainer\nn10441694\tsomeone who makes or gives a pledge\nn10441962\ta diplomat who is fully authorized to represent his or her government\nn10442093\tsomeone who plies a trade\nn10442232\tsomeone who moves slowly\nn10442417\tsomeone who works slowly and monotonously for long hours\nn10442573\ta clerk who marks data on a chart\nn10443032\ta craftsman who installs and repairs pipes and fixtures and appliances\nn10443659\ta philosopher who believes that no single explanation can account for all the phenomena of nature\nn10443830\ta cleric who holds more than one benefice at a time\nn10444194\ta writer of poems (the term is usually reserved for writers of good poetry)\nn10448322\ta policeman stationed at an intersection to direct traffic\nn10448455\ta woman who is the forefront of an important enterprise\nn10449664\ta person who holds an insurance policy; usually, the client in whose name an insurance policy is written\nn10450038\tsomeone who is imprisoned because of their political views\nn10450161\ta social scientist specializing in the study of government\nn10450303\ta person active in party politics\nn10451450\ta schemer who tries to gain advantage in an organization in sly or underhanded ways\nn10451590\tsomeone who conducts surveys of public opinion\nn10451858\ta person or organization that causes pollution of the environment\nn10453184\tsomeone who shoots pool\nn10455619\ta painter or drawer of portraits\nn10456070\ta woman poseur\nn10456138\tsomeone who emphasizes observable facts and excludes metaphysical speculation about origins or ultimate causes\nn10456696\ta scholar or researcher who is involved in academic study beyond the level of a doctoral degree\nn10457214\ta female poster child\nn10457444\tsomeone who assumes or takes something for granted as the basis of an argument\nn10457903\ta citizen who does not hold any official or public position\nn10458111\ta thinker who focuses on the problem as stated and tries to synthesize information and knowledge to achieve a solution\nn10458356\tan advocate of full legal protection for embryos and fetuses; someone opposed to legalized induced abortion\nn10458596\tan expert in prosthetics\nn10459882\tone submitting a request or application especially one seeking admission into a religious order\nn10460033\ta worker in an inn or public house who serves customers and does various chores\nn10461060\ta dealer in poultry and poultry products\nn10462588\t(computing) a computer user who needs the fastest and most powerful computers available\nn10462751\ta worker at a power station\nn10462860\tsomeone who practices a learned profession\nn10464052\tsomeone who prays to God\nn10464542\tteacher at a university or college (especially at Cambridge or Oxford)\nn10464711\tone who precedes you in time (as in holding a position or office)\nn10464870\ta bidder in bridge who makes a preemptive bid\nn10465002\tsomeone who acquires land by preemption\nn10465451\tan infant that is born prior to 37 weeks of gestation\nn10465831\tan elder in the Presbyterian Church\nn10466198\tan advocate who presents a person (as for an award or a degree or an introduction etc.)\nn10466564\ta theologian who believes that the Scripture prophecies of the Apocalypse (the Book of Revelation) are being fulfilled at the present time\nn10466918\tsomeone who keeps safe from harm or danger\nn10467179\tthe chief executive of a republic\nn10467395\tthe person who holds the office of head of state of the United States government\nn10468750\tthe head administrative officer of a college or university\nn10469611\tsomeone employed to arrange publicity (for a firm or a public figure)\nn10469874\ta photographer who works for a newspaper\nn10470779\ta clergyman in Christian churches who has the authority to perform or administer various religious rites; one of the Holy Orders\nn10471640\ta leading female ballet dancer\nn10471732\ta distinguished female operatic singer; a female operatic star\nn10471859\ta vain and temperamental person\nn10472129\t(obstetrics) a woman who is pregnant for the first time\nn10472447\tan achondroplastic dwarf whose small size is the result of a genetic defect; body parts and mental and sexual development are normal\nn10473453\ta suitor who fulfills the dreams of his beloved\nn10473562\ta prince who is the husband of a reigning female sovereign\nn10473789\ta petty or insignificant prince who rules some unimportant principality\nn10473917\tthe male heir apparent of the British sovereign\nn10474064\ta female member of a royal family other than the queen (especially the daughter of a sovereign)\nn10474343\tthe eldest daughter of a British sovereign\nn10474446\tthe major party to a financial transaction at a stock exchange; buys and sells for his own account\nn10474645\tthe educator who has executive authority for a school\nn10475835\tsomeone who sells etchings and engravings etc.\nn10475940\tthe head of a religious order; in an abbey the prior is next below the abbot\nn10476467\tan enlisted man of the lowest rank in the Army or Marines\nn10477713\ta nurse in training who is undergoing a trial period\nn10477955\tsomeone who processes things (foods or photographs or applicants etc.)\nn10478118\tsomeone who personally delivers a process (a writ compelling attendance in court) or court papers to the defendant\nn10478293\ta provincial governor of consular rank in the Roman Republic and Roman Empire\nn10478462\tan official in a modern colony who has considerable administrative power\nn10478827\ta doctor specializing in diseases of the rectum and anus\nn10478960\tsomeone who supervises (an examination)\nn10479135\t(ancient Rome) someone employed by the Roman Emperor to manage finance and taxes\nn10479328\tsomeone who obtains or acquires\nn10481167\tsomeone who sells stock shares at a profit\nn10481268\ta person who designs and writes and tests computer programs\nn10482054\ta person who makes a promise\nn10482220\tsomeone who is an active supporter and advocate\nn10482587\t(law) one who promulgates laws (announces a law as a way of putting it into execution)\nn10482921\ta person who disseminates messages calculated to assist some cause or some government\nn10483138\tsomeone who spreads the news\nn10483395\tmember of the stage crew in charge of properties\nn10483799\ta woman prophet\nn10483890\tsomeone who speaks by divine inspiration; someone who is an interpreter of the will of God\nn10484858\ta government official who conducts criminal prosecutions on behalf of the state\nn10485298\tsomeone who explores an area for mineral deposits\nn10485883\tan advocate of protectionism\nn10486166\ta woman protege\nn10486236\ta zoologist who studies protozoans\nn10486561\tthe supervisor of the military police\nn10487182\ta worker who thins out and trims trees and shrubs\nn10487363\ta composer of sacred songs\nn10487592\ta sociologist who studies election trends\nn10488016\ta physician who specializes in psychiatry\nn10488309\ta person apparently sensitive to things beyond the natural range of perception\nn10488656\ta person (usually a psychologist but sometimes a linguist) who studies the psychological basis of human language\nn10489426\ta psychologist trained in psychophysics\nn10490421\tthe keeper of a public house\nn10491998\ta short fat person\nn10492086\ta woman in childbirth or shortly thereafter\nn10492727\ta person on whom another person vents their anger\nn10493199\t(football) a person who kicks the football by dropping it from the hands and contacting it with the foot before it hits the ground\nn10493419\tsomeone who propels a boat with a pole\nn10493685\tone who operates puppets or marionettes\nn10493835\tan inexperienced young person\nn10493922\tan agent who purchases goods or services for another\nn10494195\tsomeone who adheres to strict religious principles; someone opposed to sensual pleasures\nn10494373\ta member of a group of English Protestants who in the 16th and 17th centuries thought that the Protestant Reformation under Elizabeth was incomplete and advocated the simplification and regulation of forms of worship\nn10495167\ta person who pursues some plan or goal\nn10495421\tsomeone who pushes\nn10495555\tan unlicensed dealer in illegal drugs\nn10495756\tone who intrudes or pushes himself forward\nn10496393\t(Yiddish) a fool; an idiot\nn10496489\tany member of various peoples having an average height of less than five feet\nn10497135\tan Islamic judge\nn10497534\ta person who is paralyzed in both arms and both legs\nn10497645\tone of four children born at the same time from the same pregnancy\nn10498046\tone who quakes and trembles with (or as with) fear\nn10498699\tan unspecified person\nn10498816\t(football) the person who plays quarterback\nn10498986\tan army officer who provides clothing and subsistence for troops\nn10499110\ta staff officer in charge of supplies for a whole army\nn10499232\ta native or inhabitant of Quebec (especially one who speaks French)\nn10499355\ta female sovereign ruler\nn10499631\tthe sovereign ruler of England\nn10499857\tthe wife or widow of a king\nn10500217\tsomething personified as a woman who is considered the best or most important of her kind\nn10500419\tthe wife of a reigning king\nn10500603\ta queen dowager who is mother of the reigning sovereign\nn10500824\tCounsel to the Crown when the British monarch is a queen\nn10500942\tthe host or chairman of a radio or tv quiz show or panel game\nn10501453\tsomeone able to acquire new knowledge and skills rapidly and easily\nn10501635\ta religious mystic who follows quietism\nn10502046\ta person who gives up too easily\nn10502329\tspiritual leader of a Jewish congregation; qualified to expound and apply Jewish law\nn10502950\ta person with a prejudiced belief that one race is superior to others\nn10503818\ta biologist who studies the effects of radiation on living organisms\nn10504090\ta scientist trained in radiological technology\nn10504206\ta medical specialist who uses radioactive substances and X-rays in the treatment of disease\nn10505347\tAmerican Indian medicine man who attempt to make it rain\nn10505613\ta bridge partner who increases the partner's bid\nn10505732\ta prince or king in India\nn10505942\ta dissolute man in fashionable society\nn10506336\ta harshly demanding overseer\nn10506544\ta hired hand on a ranch\nn10506915\ta commissioned officer who has been promoted from enlisted status\nn10507070\tsomeone who rants and raves; speaks in a violent or loud manner\nn10507380\tsomeone who is suspected of committing rape\nn10507482\tsomeone who performs rap music\nn10507565\ta recorder appointed by a committee to prepare reports of the meetings\nn10507692\ta rare or unique person\nn10508141\ta person who pays local rates (especially a householder)\nn10508379\tan inexperienced and untrained recruit\nn10508710\ta person who enjoys reading\nn10509063\tsomeone who teaches students to read\nn10509161\ta philosopher who believes that universals are real and exist independently of anyone thinking of them\nn10509810\ta person who is authorized to act as an agent for the sale of land\nn10510245\tan admiral junior to a vice admiral\nn10510974\tthe tennis player who receives the serve\nn10511771\tsomeone who recites from memory\nn10512201\tany new member or supporter (as in the armed forces)\nn10512372\ta recently enlisted soldier\nn10512708\tsomeone who supplies members or employees\nn10512859\ta sergeant deputized to enlist recruits\nn10513509\ta member of the military police in Britain\nn10513823\tsomeone who has red hair\nn10513938\ta poor White person in the southern United States\nn10514051\ta dancer of reels\nn10514121\ta person who enacts a role in an event that occurred earlier\nn10514255\ta person whose case has been referred to a specialist or professional group\nn10514429\t(sports) the chief official (as in boxing or American football) who is expected to ensure fair play\nn10514784\tone whose work is to refine a specific thing\nn10515863\tliberal Jew who tries to adapt all aspects of Judaism to modern circumstances\nn10516527\ta graduate nurse who has passed examinations for registration\nn10517137\ta person employed to keep a record of the owners of stocks and bonds issued by the company\nn10517283\tholder of a British professorship created by a royal patron\nn10518349\ta person who reduces the intensity (e.g., of fears) and calms and pacifies\nn10519126\tone retired from society for religious reasons\nn10519494\tleader of a religious order\nn10519984\tsomeone who works for a company that moves furniture\nn10520286\ta modern scholar who is in a position to acquire more than superficial knowledge about many different interests\nn10520544\tsomeone who rebels and becomes an outlaw\nn10520964\tsomeone whose income is from property rents or bond interest and other investments\nn10521100\ta skilled worker whose job is to repair things\nn10521662\ta person who investigates and reports or edits news stories\nn10521853\ta female newsperson\nn10522035\ta person who represents others\nn10522324\ta person without moral scruples\nn10522759\tsomeone who saves something from danger or violence\nn10523341\ta member of a military reserve\nn10524076\tthe representative of Puerto Rico in the United States House of Representatives\nn10524223\ta person who respects someone or something; usually used in the negative\nn10524869\tthe proprietor of a restaurant\nn10525134\ta person who directs and restrains\nn10525436\ta merchant who sells goods at retail\nn10525617\tsomeone who has retired from active working\nn10525878\tthe official in each electorate who holds the election and returns the results\nn10526534\ta person who returns after a lengthy absence\nn10527147\ta Communist who tries to rewrite Marxism to justify a retreat from the revolutionary position\nn10527334\ta radical supporter of political or social revolution\nn10528023\ta physician specializing in rheumatic diseases\nn10528148\ta primitive hominid resembling Neanderthal man but living in Africa\nn10528493\ta writer who composes rhymes; a maker of poor verses (usually used as terms of contempt for minor or inferior poets)\nn10529231\ta person who possesses great material wealth\nn10530150\ta traveler who actively rides a vehicle (as a bicycle or motorcycle)\nn10530383\tsomeone who teaches horsemanship\nn10530571\ta soldier whose weapon is a rifle\nn10530959\ta person who uses the right hand more skillfully than the left\nn10531109\tthe most helpful assistant\nn10531445\ta contestant entered in a competition under false pretenses\nn10531838\ta person who leads (especially in illicit activities)\nn10533874\ta workman who is employed to repair roads\nn10533983\tsomeone who communicates vocally in a very loud voice\nn10536134\tan engineer who builds and tests rockets\nn10536274\ta clever thinker\nn10536416\ta famous singer of rock music\nn10537708\ta member of the imperial family that ruled Russia\nn10537906\tan artist of the Romantic Movement or someone influenced by Romanticism\nn10538629\ta craftsman who makes ropes\nn10538733\ta cowboy who uses a lasso to rope cattle or horses\nn10538853\ta decoy who lures customers into a gambling establishment (especially one with a fixed game)\nn10539015\tan acrobat who performs on a rope stretched at some height above the ground\nn10539160\t(a literary reference to) a pretty young girl\nn10539278\ta member of a secret 17th-century society of philosophers and scholars versed in mystical and metaphysical and alchemical lore\nn10540114\tcolloquial term for a member of the Royal Canadian Mounted Police\nn10540252\ta member of the volunteer cavalry regiment led by Theodore Roosevelt in the Spanish-American War (1898)\nn10540656\ta brachycephalic person\nn10541833\ta person who exercises authority over civilian affairs\nn10542608\ta person who is employed to deliver messages or documents\nn10542761\tsomeone who travels on foot by running\nn10542888\ta trained athlete who competes in foot races\nn10543161\t(football) a back on the offensive team (a fullback or halfback) who tries to advance the ball by carrying it on plays from the line of scrimmage\nn10543937\tsomeone who migrates as part of a rush to a new gold field or a new territory\nn10544232\tan unsophisticated country person\nn10544748\tsomeone who commits sabotage or deliberately causes wrecks\nn10545792\tsomeone who obtains pleasure from inflicting pain or others\nn10546428\tthe ship's officer in charge of navigation\nn10546633\tany member of a ship's crew\nn10548419\ta woman salesperson\nn10548537\ta man salesperson\nn10548681\ta person employed to represent a business and to sell its merchandise (as to customers in a store or to customers who are visited)\nn10549510\tsomeone who salvages\nn10550252\ta person with advertising boards hanging from the shoulders\nn10550369\ta traditional Zulu healer and respected elder\nn10550468\ta married male American Indian\nn10551576\ta military engineer who does sapping (digging trenches or undermining fortifications)\nn10552393\tthe Scots' term for an English person\nn10553140\ta governor of a province in ancient Persia\nn10553235\tsomeone who walks at a leisurely pace\nn10554024\ta person who performs in the operettas of Gilbert and Sullivan\nn10554141\tone who is employed to saw wood\nn10554846\tsomeone who buys something and resells it at a price far above the initial cost\nn10555059\ta person who spreads malicious gossip\nn10555430\ta reckless and unprincipled reprobate\nn10556033\ta painter of theatrical scenery\nn10556518\ta planner who draws up a personal scheme of action\nn10556704\tsomeone who is afflicted with schizophrenia\nn10556825\t(Yiddish) a dolt who is a habitual bungler\nn10557246\t(slang) a merchant who deals in shoddy or inferior merchandise\nn10557854\ta learned person (especially in the humanities); someone who by long study has gained mastery in one or more disciplines\nn10559009\ta scholar who writes explanatory notes on an author (especially an ancient commentator on a classical author)\nn10559288\ta young person attending school (up through senior high school)\nn10559508\ta friend who attends the same school\nn10559683\ta scholar in one of the universities of the Middle Ages; versed in scholasticism\nn10559996\tany person (or institution) who acts as an educator\nn10560106\tan acquaintance that you go to school with\nn10560637\ta person with advanced knowledge of one or more sciences\nn10561222\ta descendent or heir\nn10561320\tsomeone who jeers or mocks or treats something with contempt or calls out in derision\nn10561736\tone who habitually ignores the law and does not answer court summonses\nn10562135\tan official who records the score during the progress of a game\nn10562283\ta logger who marks trees to be felled\nn10562509\tsomeone who travels widely and energetically\nn10562968\tsomeone employed to discover and recruit talented persons (especially in the worlds of entertainment or sports)\nn10563314\tthe leader of a troop of Scouts\nn10563403\ta rapid mover; someone who scrambles\nn10563711\ta person who scratches to relieve an itch\nn10564098\tan actor who plays a role in a film\nn10565502\tsomeone who examines votes at an election\nn10565667\tan underwater diver who uses scuba gear\nn10566072\tan artist who creates sculptures\nn10567613\ta Boy Scout enrolled in programs for water activities\nn10567722\ta worker who finds employment only in certain seasons\nn10567848\ta cook who uses seasonings\nn10568200\t(baseball) the person who plays second base\nn10568358\ta child of a first cousin\nn10568443\tsomeone who endorses a motion or petition as a necessary preliminary to a discussion or vote\nn10568608\tsomeone who serves in a subordinate capacity or plays a secondary role\nn10568915\tsomeone who relieves a commander\nn10569011\ta commissioned officer in the Army or Air Force or Marine Corps holding the lowest rank\nn10569179\ta person of second-rate ability or value\nn10570019\ta person who is head of an administrative department of government\nn10570704\tthe person who holds the secretaryship of the Department of Agriculture\nn10571907\tthe person who holds the secretaryship of the Department of Health and Human Services\nn10572706\tthe person who holds the secretaryship of the Department of State\nn10572889\tthe person who holds the secretaryship of the Interior Department\nn10573957\ta member of a sect\nn10574311\ta laborer assigned to a section gang\nn10574538\tan advocate of secularism; someone who believes that religion should be excluded from government and education\nn10574840\tan adviser about alarm systems to prevent burglaries\nn10575463\tone of the outstanding players in a tournament\nn10575594\ta person who seeds clouds\nn10575787\tsomeone making a search or inquiry\nn10576223\tsomeone who is or has been segregated\nn10576316\tsomeone who believes the races should be kept apart\nn10576676\tan elected member of a board of officials who run New England towns\nn10576818\tan elected member of a board of officials who run New England towns\nn10576962\ta person who is unusually selfish\nn10577182\tan energetic person with unusual initiative\nn10577284\tsomeone who promotes or exchanges goods or services for money\nn10577710\tsomeone who sells goods (on commission) for others\nn10577820\ta specialist in the study of meaning\nn10578021\tone of four competitors remaining in a tournament by elimination\nn10578162\ta student at a seminary (especially a Roman Catholic seminary)\nn10578471\ta member of a senate\nn10578656\tthe intended recipient of a message\nn10579062\tan undergraduate student during the year preceding graduation\nn10579549\tthe ranking vice president in a firm that has more than one\nn10580030\tan advocate of secession or separation from a larger group (such as an established church or a national union)\nn10580437\tsomeone whose age is in the seventies\nn10580535\t(Middle Ages) a person who is bound to the land and owned by the feudal lord\nn10581648\ta serial killer whose murders occur within a brief period of time\nn10581890\tan English barrister of the highest rank\nn10582604\t(court games) the player who serves to start a point\nn10582746\tsomeone who serves in the armed forces; a member of a military force\nn10583387\ta person who settles in a new colony or moves into new country\nn10583790\ta clerk in a betting shop who calculates the winnings\nn10585077\ta person (especially a celebrity) who is well-known for their sexual attractiveness\nn10585217\tan officer of the church who is in charge of sacred objects\nn10585628\tArabic term for holy martyrs; applied by Palestinians to suicide bombers\nn10586166\ta Shakespearean scholar\nn10586265\ta kidnapper who drugs men and takes them for compulsory service aboard a ship\nn10586444\tsmall farmers and tenants\nn10586903\tan adult male who shaves\nn10586998\tan admirer of G. B. Shaw or his works\nn10588074\ta timid defenseless simpleton who is readily preyed upon\nn10588357\tthe leader of an Arab village or family\nn10588724\ta worker who puts things (as books) on shelves\nn10588965\ta clergyman who watches over a group of people\nn10589666\ta contractor who buys old ships and breaks them up for scrap\nn10590146\tan associate on the same ship with you\nn10590239\tsomeone who owns a ship or a share in a ship\nn10590452\tthe agent of a shipowner\nn10590903\ta maker of shirts\nn10591072\ta hereditary military dictator of Japan; the shoguns ruled Japan until the revolution of 1867-68\nn10591811\ta compulsive shopper\nn10592049\ta young female shop assistant\nn10592811\ta union member who is elected to represent fellow workers in negotiating with management\nn10593521\tan athlete who competes in the shot put\nn10594147\ta scolding nagging bad-tempered woman\nn10594523\tthe card player who shuffles the cards\nn10594857\ta person (especially a lawyer or politician) who uses unscrupulous or unethical methods\nn10595164\ta person's brother or sister\nn10595647\ta person suffering from an illness\nn10596517\ta performer who reads without preparation or prior acquaintance (as in music)\nn10596899\tsomeone who communicates by signals\nn10597505\tsomeone who can use sign language to communicate\nn10597745\tused as an Italian courtesy title; can be prefixed to the name or used separately\nn10597889\tan Italian title of address equivalent to Mrs. when used before a name\nn10598013\tan Italian title of respect for a man; equivalent to the English `sir'; used separately (not prefixed to his name)\nn10598181\tan Italian courtesy title for an unmarried woman; equivalent to `Miss', it is either used alone or before a name\nn10598459\ta partner (who usually provides capital) whose association with the enterprise is not public knowledge\nn10598904\ta person with confused ideas; incapable of serious thought\nn10599215\ta smiler whose smile is silly and self-conscious and sometimes coy\nn10599806\ta person who sings\nn10601234\ta student of Chinese history and language and culture\nn10601362\ta drinker who sips\nn10602119\tformerly a contemptuous term of address to an inferior man or boy; often used in anger\nn10602470\t(Roman Catholic Church) a title given to a nun (and used as a form of address)\nn10602985\ta female person who has the same parents as another person\nn10603528\tone who hesitates (usually out of fear)\nn10603851\ta musician who plays the sitar\nn10604275\ta student in the sixth form\nn10604380\tsomeone who skates on a skateboard\nn10604634\tsomeone who habitually doubts accepted beliefs\nn10604880\tsomeone who draws sketches\nn10604979\ta worker who uses a skid to move logs\nn10605253\tsomeone who skis\nn10605737\ta naked swimmer\nn10607291\tan underwater swimmer equipped with a face mask and foot fins and either a snorkel or an air cylinder\nn10607478\ta young person who belongs to a British or American group that shave their heads and gather at rock concerts or engage in white supremacist demonstrations\nn10609092\tsomeone who slashes another person\nn10609198\ta dirty untidy woman\nn10610465\ta rester who is sleeping\nn10610850\ta spy or saboteur or terrorist planted in an enemy country who lives there as a law-abiding citizen until activated by a prearranged signal\nn10611267\ta person who is sleeping soundly\nn10611613\ta detective who follows a trail\nn10612210\ta coarse obnoxious person\nn10612373\tsomeone who coins and uses slogans to promote a cause\nn10612518\ta dealer in cheap ready-made clothing\nn10613996\ta very attractive or seductive looking woman\nn10614507\ta smiler whose smile is offensively self-satisfied\nn10614629\tsomeone who works metal (especially by hammering it when it is hot and malleable)\nn10615179\tsomeone with an assured and ingratiating manner\nn10615334\tsomeone who imports or exports without paying duties\nn10616578\ta person who sneezes\nn10617024\ta person regarded as arrogant and annoying\nn10617193\ta spy who makes uninvited inquiries into the private affairs of others\nn10617397\tsomeone who snores while sleeping\nn10618234\ta journalist who specializes in sentimental stories\nn10618342\tan athlete who plays soccer\nn10618465\tan anthropologist who studies such cultural phenomena as kinship systems\nn10618685\tsomeone seeking social prominence by obsequious behavior\nn10618848\ta political advocate of socialism\nn10619492\ta person who takes part in social activities\nn10619642\tsomeone expert in the study of human society and its personal relationships\nn10619888\ta personal secretary who handles your social correspondence and appointments\nn10620212\tan adherent of the teachings of Socinus; a Christian who rejects the divinity of Christ and the Trinity and original sin; influenced the development of Unitarian theology\nn10620586\ta linguist who studies the social and cultural factors that influence linguistic communication\nn10620758\ta social scientist who studies the institutions and development of human society\nn10621294\tsomeone who works at a soda fountain\nn10621400\ta member of a sodality\nn10621514\tsomeone who engages in anal copulation (especially a male who engages in anal copulation with another male)\nn10622053\tan enlisted man or woman who serves in an army\nn10624074\ta male human offspring\nn10624310\ta person who sings\nn10624437\ta woman songster (especially of popular songs)\nn10624540\ta composer of words or music for popular songs\nn10625860\tone who practices magic or sorcery\nn10626630\tsomeone who is peevish or disgruntled\nn10627252\tsomeone for whom you have a deep affinity\nn10628097\ta member of the Southern Baptist Convention\nn10628644\ta nation's ruler or head of state usually by hereditary right\nn10629329\tan astronaut who is active outside a spacecraft in outer space\nn10629647\tan American whose first language is Spanish\nn10629939\ta boxer who spars with another boxer who is training for an important fight\nn10630093\ta person suffering from spastic paralysis\nn10630188\tsomeone who expresses in language; someone who talks (especially someone who delivers a public speech or someone especially garrulous)\nn10631131\ta speaker of a particular language who has spoken that language since earliest childhood\nn10631309\tthe presiding officer of a deliberative assembly\nn10631654\ta writer who composes speeches for others to deliver\nn10632576\tpractices one branch of medicine\nn10633298\tsomeone who draws up specifications giving details (as for obtaining a patent)\nn10633450\ta close observer; someone who looks at something (such as an exhibition of some kind)\nn10634464\ta therapist who treats speech defects and disorders\nn10634849\tan ice-skater who races competitively; usually around an oval course\nn10634990\tan orator who can hold his listeners spellbound\nn10635788\tan inscrutable person who keeps his thoughts and intentions secret\nn10636488\tan elderly unmarried woman\nn10637483\t(football) an offensive end who lines up at a distance from the other linemen\nn10638922\tsomeone who engages in sports\nn10639238\t(Maine colloquial) a temporary summer resident of Maine\nn10639359\tsomeone who enjoys outdoor activities\nn10639637\tan announcer who reads sports news or describes sporting events\nn10639817\tthe newspaper editor responsible for sports news\nn10641223\ta child\nn10642596\tsomeone who does square dancing\nn10642705\ta frank and honest person\nn10643095\tsomeone who settles on land without right or title\nn10643837\tan English country landowner\nn10643937\tyoung nobleman attendant on a knight\nn10644598\tan employee who is a member of a staff of workers (especially a member of the staff that works for the President of the United States)\nn10645017\ta noncommissioned officer ranking above corporal and below sergeant first class in the Army or Marines or above airman 1st class in the Air Force\nn10645223\tsomeone who supervises the actors and directs the action in the production of a stage show\nn10646032\ta worker who stains (wood or fabric)\nn10646140\tsomeone entrusted to hold the stakes for two or more persons betting against one another; must deliver the stakes to the winner\nn10646433\tsomeone who stalks game\nn10646641\ta candidate put forward to divide the Opposition or to mask the true candidate\nn10646780\tsomeone who speaks with involuntary pauses and repetitions\nn10646942\tsomeone who walks with a heavy noisy gait or who stamps on the ground\nn10647745\tsomeone who stands in a place where one might otherwise sit (as a spectator who uses standing room in a theater or a passenger on a crowded bus or train)\nn10648237\tsomeone who takes the place of another (as when things get dangerous or difficult)\nn10648696\tan actor who plays a principal role\nn10649197\ta young (film) actress who is publicized as a future star\nn10649308\tthe official who signals the beginning of a race or competition\nn10650162\ta man who is a respected leader in national or international affairs\nn10652605\tthe treasurer for a state government\nn10652703\ta merchant who sells writing materials and office supplies\nn10654015\tsomeone skilled in the transcription of speech (especially dictation)\nn10654211\ta speaker with an unusually loud voice\nn10654321\ta brother who has only one parent in common with you\nn10654827\tthe wife of your father by a subsequent marriage\nn10654932\tthe spouse of your parent by a subsequent marriage\nn10655169\ta laborer who loads and unloads vessels in a port\nn10655442\tsomeone who manages property or other affairs for someone else\nn10655594\tan attendant on an airplane\nn10655730\tthe ship's officer who is in charge of provisions and dining arrangements\nn10655986\tsomeone who insists on something\nn10656120\tan ordinary man\nn10656223\ta person who stifles or smothers or suppresses\nn10656969\t(United Kingdom) a paid magistrate (appointed by the Home Secretary) dealing with police cases\nn10657306\ta garmentmaker who performs the finishing steps\nn10657556\tone who deals only with brokers or other jobbers\nn10657835\tsomeone who buys and sells stock shares\nn10658304\tone (as a retailer or distributor) that stocks goods\nn10659042\ta laborer who tends fires (as on a coal-fired train or steamship)\nn10659762\ta person who carries himself or herself with the head and shoulders habitually bent forward\nn10660128\ta private detective employed by a merchant to stop pilferage\nn10660621\ta combat pilot who strafes the enemy\nn10660883\ta performer who acts as stooge to a comedian\nn10661002\tanyone who does not belong in the environment in which they are found\nn10661216\tan individual that one is not acquainted with\nn10661563\tan expert in strategy (especially in warfare)\nn10661732\ta member of a work gang who supervises the other workers\nn10663315\ta prostitute who attracts customers by walking the streets\nn10663549\tone who helps carry a stretcher\nn10665302\ta person who struggles with difficulties or with great effort\nn10665587\ta man who is virile and sexually active\nn10665698\ta learner who is enrolled in an educational institution\nn10666752\ta second-rate prize fighter\nn10667477\tan artist who is a master of a particular style\nn10667709\ta British commissioned army officer below the rank of captain\nn10667863\tsomeone who enters into a subcontract with the primary contractor\nn10668450\tsomeone who overcomes and establishes ascendancy and control by force or persuasion\nn10668666\ta person who is subjected to experimental or other observational procedures; someone who is an object of investigation\nn10669991\tan assistant subject to the authority or control of another\nn10671042\tan athlete who plays only when a starter on the team is replaced\nn10671613\ta person who inherits some title or office\nn10671736\ta person who follows next in order\nn10671898\tsomeone who gives help in times of need or distress or difficulty\nn10672371\ta Muslim who represents the mystical dimension of Islam; a Muslim who seeks direct experience of Allah; mainly in Iran\nn10672540\tan assistant or subordinate bishop of a diocese\nn10672662\ta woman advocate of women's right to vote (especially a militant advocate in the United Kingdom at the beginning of the 20th century)\nn10673296\ta wealthy older man who gives a young person expensive gifts in return for friendship or intimacy\nn10673776\ta terrorist who blows himself up in order to kill or injure other people\nn10674130\ta man who courts a woman\nn10674713\ta wrestler who participates in sumo (a Japanese form of wrestling)\nn10675010\tsomeone who basks in the sunshine in order to get a suntan\nn10675142\ta tramp who habitually arrives at sundown\nn10675609\tan amateur boxer who weighs more than 201 pounds\nn10676018\tone of greater rank or station or quality\nn10676434\tan informal term for a mother who can combine childcare and full-time employment\nn10676569\ta minor actor in crowd scenes\nn10678937\tthe most important person in an organization\nn10679174\ta physician who specializes in surgery\nn10679503\tthe senior medical officer in an Army or Navy\nn10679610\tthe head of the United States Public Health Service\nn10679723\ta captor who uses surprise to capture the victim\nn10680609\tan engineer who determines the boundaries and elevations of land or structures\nn10680796\tsomeone who conducts a statistical survey\nn10681194\tone who lives through affliction\nn10681557\ta supplier of victuals or supplies to an army\nn10682713\tan employee who sweeps (floors or streets etc.)\nn10682953\ta person loved by another person\nn10683675\ta person who engages freely in promiscuous sex\nn10684146\ta person who administers punishment by wielding a switch or whip\nn10684630\tan insignificant student who is ridiculed as being affected or boringly studious\nn10684827\ta person who tries to please someone in order to gain a personal advantage\nn10685398\ta slender graceful young woman\nn10686073\tsomeone who shares your feelings or opinions and hopes that you will be successful\nn10686517\ta composer of symphonies\nn10686694\ta musician who plays syncopated jazz music (usually in a dance band)\nn10686885\tone appointed to represent a city or university or corporation in business transactions\nn10688356\ta person who is skilled at planning tactics\nn10688811\tsomeone who appends or joins one thing to another\nn10689306\t(American football) the person who plays tailback\nn10690268\tone who keeps a tally of quantity or weight of goods produced or shipped or received\nn10690421\tone who sells goods on the installment plan\nn10690648\ta soldier who drives a tank\nn10691318\tsomeone who wiretaps a telephone or telegraph wire\nn10691937\ta hypocrite who pretends to religious piety (after the protagonist in a play by Moliere)\nn10692090\t(sometimes used ironically) a man of great strength and agility (after the hero of a series of novels by Edgar Rice Burroughs)\nn10692482\tsomeone who samples food or drink for its quality\nn10692883\tan official who evaluates property for the purpose of taxing it\nn10693235\ta bureaucrat who levies taxes\nn10693334\ta woman employed to dance with patrons who pay a fee for each dance\nn10693824\ta biologist who specializes in the classification of organisms into groups on the basis of their structure and origin and behavior\nn10694258\ta person whose occupation is teaching\nn10694939\ta graduate student with teaching responsibilities\nn10695450\ta reckless and impetuous person\nn10696101\ta noncommissioned officer ranking below a master sergeant in the air force or marines\nn10696508\tsomeone known for high skill in some intellectual or artistic technique\nn10697135\ta tough youth of 1950's and 1960's wearing Edwardian style clothes\nn10697282\ta total abstainer\nn10698368\tsomeone who reports news stories via television\nn10699558\tsomeone who temporizes; someone who tries to gain time or who waits for a favorable time\nn10699752\ta person who tempts others\nn10699981\tinfant born at a gestational age between 37 and 42 completed weeks\nn10700105\tone who works strenuously\nn10700201\tsomeone who pays rent to use land or a building or a car that is owned by someone else\nn10700640\ta holder of buildings or lands by any kind of title (as ownership or lease)\nn10700963\tan inexperienced person (especially someone inexperienced in outdoor living)\nn10701180\tan athlete who plays tennis\nn10701644\tsomeone who earns a living playing or teaching tennis\nn10701962\ta musician who plays the tenor saxophone\nn10702167\ta person who serves a specified term\nn10702615\ta person who inspires fear or dread\nn10703221\ta woman who is pregnant for the third time\nn10703336\ta person who makes a will\nn10703480\ta female testator\nn10703692\tsomeone who is tested (as by an intelligence test or an academic examination)\nn10704238\ta baby conceived by fertilization that occurs outside the mother's body; the woman's ova are removed and mixed with sperm in a culture medium - if fertilization occurs the blastocyte is implanted in the woman's uterus\nn10704712\ta member of the Texas state highway patrol; formerly a mounted lawman who maintained order on the frontier\nn10704886\ta man ranking above an ordinary freeman and below a noble in Anglo-Saxon England (especially one who gave military service in exchange for land)\nn10705448\tsomeone who produces theatrical performances\nn10705615\tsomeone who is learned in theology or who speculates about theology\nn10706812\tsomeone who theorizes (especially in science or art)\nn10707134\ta believer in theosophy\nn10707233\ta person skilled in a particular type of therapy\nn10707707\ta native or inhabitant of Thessalonica\nn10708292\tan important intellectual\nn10708454\tsomeone who exercises the mind (usually in an effort to reach a decision)\nn10709529\tsomeone who projects something (especially by a rapid motion of the arm)\nn10710171\tan acolyte who carries a thurible\nn10710259\tsomeone who is paid to admit only those who have purchased tickets\nn10710778\t(football) an offensive end who lines up close to the tackle\nn10710913\ta worker who lays tile\nn10711483\t(sports) an official who keeps track of the time elapsed\nn10711766\ta native or inhabitant of Timor\nn10712229\tan unskilled person who tries to fix or mend\nn10712374\tsomeone who makes or repairs tinware\nn10712474\ta hairdresser who tints hair\nn10712690\tsomeone who drinks liquor repeatedly in small quantities\nn10712835\tone who sells advice about gambling or speculation (especially at the racetrack)\nn10713254\ta special law-enforcement agent of the United States Treasury\nn10713686\tthe person who proposes toasts and introduces speakers at a banquet\nn10713843\ta woman toastmaster\nn10714195\tsomeone who rides a toboggan\nn10715030\ta girl who behaves in a boyish manner\nn10715347\tsomeone skilled in making or repairing tools\nn10715789\ta leader in a campaign or movement\nn10716576\tan American who favored the British side during the American Revolution\nn10716864\ta member of political party in Great Britain that has been known as the Conservative Party since 1832; was the opposition party to the Whigs\nn10717055\tsomeone who throws lightly (as with the palm upward)\nn10717196\tterms of abuse for a masturbator\nn10717337\tan adherent of totalitarian principles or totalitarian government\nn10718131\tsomeone who travels for pleasure\nn10718349\tsomeone who advertises for customers in an especially brazen way\nn10718509\tsomeone who buys tickets to an event in order to resell them at a profit\nn10718665\ta comrade (especially in Russian communism)\nn10718952\ta person with light blond hair\nn10719036\tthe official who keeps a town's records\nn10719132\t(formerly) an official who made public announcements\nn10719267\ta resident of a town or city\nn10719807\tone who studies the nature and effects of poisons and their treatment\nn10720197\ta star runner\nn10720453\tsomeone who purchases and maintains an inventory of goods to be sold\nn10720964\ta worker who belongs to a trade union\nn10721124\tone who adheres to traditional views\nn10721321\ta policeman who controls the flow of automobile traffic\nn10721612\tan actor who specializes in tragic roles\nn10721708\ta writer (especially a playwright) who writes tragedies\nn10721819\tan actress who specializes in tragic roles\nn10722029\tthe person responsible for driving a herd of cattle\nn10722575\tone who trains other persons or animals\nn10722965\tsomeone who betrays his country by committing treason\nn10723230\tfemale traitor\nn10723597\tsomeone who conducts or carries on business or negotiations\nn10724132\tsomeone who represents the sounds of speech in phonetic notation\nn10724372\tsomeone who transfers or is transferred from one position to another\nn10724570\t(law) someone to whom a title or property is conveyed\nn10725280\ta person who translates written messages from one language to another\nn10726031\tsomeone who adopts the dress or manner or sexual role of the opposite sex\nn10726786\ta salesman who travels to call on customers\nn10727016\tsomeone who moves or passes across\nn10727171\ta fisherman who use a trawl net\nn10727458\tthe British cabinet minister responsible for economic strategy\nn10728117\tsomeone who digs trenches\nn10728233\tsomeone who popularizes a new fashion\nn10728624\tsomeone who lives in a tribe\nn10728998\tone who tries\nn10729330\tone who behaves lightly or not seriously\nn10730542\ta mounted policeman\nn10730728\ta state police officer\nn10731013\tradicals who support Trotsky's theory that socialism must be established throughout the world by continuing revolution\nn10731732\tone who is absent from school without permission\nn10732010\ta musician who plays the trumpet or cornet\nn10732521\ta convict who is considered trustworthy and granted special privileges\nn10732854\ta member of the dynasty that ruled England\nn10732967\ta gymnast who performs rolls and somersaults and twists etc.\nn10733820\tlearns from a tutor\nn10734394\teither of two offspring born at the same time from the same pregnancy\nn10734741\tsomeone who deceives a lover or spouse by carrying on a sexual relationship with somebody else\nn10734891\ta native of Yorkshire\nn10734963\ta person who plays the kettledrums\nn10735173\tsomeone paid to operate a typewriter\nn10735298\ta cruel and oppressive dictator\nn10735984\tan official at a baseball game\nn10737103\tan actor able to replace a regular performer when required\nn10737264\tone whose presence is undesirable\nn10738111\ta person who rides a unicycle\nn10738215\tan advocate of unilateralism\nn10738670\tadherent of Unitarianism\nn10738871\tadherent of Arminianism\nn10739135\ta person whose type O Rh-negative blood may be safely transfused into persons with other blood types\nn10739297\tan expert on the UNIX operating system\nn10739391\tan unidentified soldier whose body is honored as a memorial\nn10740594\tan unexpected winner; someone who defeats the favorite competitor\nn10740732\ta selfish actor who upstages the other actors\nn10740868\ta person who has suddenly risen to a higher economic status but has not gained social acceptance of others in that class\nn10741152\tan arrogant or presumptuous person\nn10741367\tpoor and often mischievous city child\nn10741493\ta specialist in urology\nn10742005\ta female usher\nn10742111\tan official stationed at the entrance of a courtroom or legislative chamber\nn10742546\tone who wrongfully or illegally seizes and holds the place of another\nn10742997\ta baseball player valued for the ability to play at several positions\nn10743124\tsomeone who puts to good use\nn10743356\tan idealistic (but usually impractical) social reformer\nn10744078\ta husband who murders his wife\nn10744164\tsomeone on vacation; someone who is devoting time to pleasure or relaxation rather than to work\nn10745006\tthe student with the best grades who usually delivers the valedictory address at commencement\nn10745770\ta girl who grew up in the tract housing in the San Fernando Valley\nn10746931\tan athlete who jumps over a high crossbar with the aid of a long pole\nn10747119\teater of fruits and grains and nuts; someone who eats no meat or fish or (often) any animal products\nn10747424\ta strict vegetarian; someone who eats no animal or dairy products at all\nn10747548\tsomeone who regards with deep respect or reverence\nn10747965\ta speculator who makes money available for innovative projects (especially in high technology)\nn10748142\ta merchant who undertakes a trading venture (especially a venture that sends goods overseas)\nn10748506\tan irritating or obnoxious person\nn10748620\tan important or influential (and often overbearing) person\nn10749928\ta musician who plays the vibraphone\nn10750031\ta Roman Catholic priest who acts for another higher-ranking clergyman\nn10750188\t(Church of England) a clergyman appointed to act as priest of a parish\nn10750640\t(Roman Catholic Church) an administrative deputy who assists a bishop\nn10751026\ta deputy or assistant to someone bearing the title of chancellor\nn10751152\tsomeone appointed by a ruler as an administrative deputy\nn10751265\tan executive officer ranking immediately below a president; may serve in the president's place under certain circumstances\nn10751710\ta regent's deputy\nn10752480\ta person who is tricked or swindled\nn10753061\ta person who lived during the reign of Victoria\nn10753182\tan innkeeper (especially British)\nn10753339\tmember of a vigilance committee\nn10753442\tone who has lived in a village most of their life\nn10753989\ta person who harvests grapes for making wine\nn10754189\tsomeone who sells wine\nn10754281\tsomeone who assaults others sexually\nn10754449\tsomeone who violates the law\nn10755080\ta musician who plays the viola\nn10755164\ta noisy or scolding or domineering woman\nn10755394\ta specialist in virology\nn10755648\ta member of the most numerous indigenous people of the Philippines\nn10756061\ta wife or widow of a viscount\nn10756148\t(in various countries) a son or younger brother or a count\nn10756261\ta member of the western group of Goths who sacked Rome and created a kingdom in present-day Spain and southern France\nn10756641\ta person given to fanciful speculations and enthusiasms with little regard for what is actually possible\nn10756837\tan important or distinguished visitor\nn10757050\ta professor visiting another college or university to teach for a limited time\nn10757492\tone whose prevailing mental imagery is visual\nn10758337\ta malicious woman with a fierce temper\nn10758445\ta high official in a Muslim government (especially in the Ottoman Empire)\nn10758949\tsomeone who regulates the tone of organ pipes\nn10759151\ta person who performs voluntary work\nn10759331\t(military) a person who freely enlists for service\nn10759982\ta priest or priestess (or consecrated worshipper) in a non-Christian religion or cult\nn10760199\tone bound by vows to a religion or life of worship or service\nn10760622\t(law) a person called into court to defend a title\nn10760951\tsomeone who makes a solemn promise to do something or behave in a certain way\nn10761190\ta traveler to a distant land (especially one who travels by sea)\nn10761326\ta viewer who enjoys seeing the sex acts or sex organs of others\nn10761519\tsomeone who vulcanizes rubber to improve its strength and resiliency\nn10762212\tsomeone who speaks or writes in a vague and evasive manner\nn10762480\ta follower of the theories or an admirer of the music of Richard Wagner\nn10763075\ta homeless child especially one forsaken or orphaned\nn10763245\ta mourner who utters long loud high-pitched cries\nn10763383\ta person whose occupation is to serve at table (as in a restaurant)\nn10763620\ta woman waiter\nn10764465\ta union representative who visits workers at their jobs to see whether agreements are observed\nn10764622\tplays a small part in a dramatic production\nn10764719\tusually in combination: person in charge of or employed at a particular thing\nn10765305\ta silly and inept person; someone who is regarded as stupid\nn10765587\ta dancer who waltzes\nn10765679\tsomeone who leads a wandering unsettled life\nn10765885\ta legendary Jew condemned to roam the world for mocking Jesus at the Crucifixion\nn10766260\tlewd or lascivious woman\nn10768148\ta customer to whom a warrant or guarantee is given\nn10768272\ta recipient of a warrant issued by a court in the United States\nn10768903\tsomeone who washes things for a living\nn10769084\toperates industrial washing machine\nn10769188\ta working woman who takes in washing\nn10769321\tsomeone who enjoys riotous drinking\nn10769459\tsomeone who dissipates resources self-indulgently\nn10771066\ta member of the women's reserve of the United States Navy; originally organized during World War II but now no longer a separate branch\nn10772092\tpredicts the weather\nn10772580\ta reservist who fulfills the military obligation on weekends\nn10772937\ta farmhand hired to remove weeds\nn10773665\tjoins pieces of metal by welding them together\nn10773800\ta case for a welfare worker\nn10774329\tan inhabitant of a western area; especially of the U.S.\nn10774756\ta resident of the west side of Manhattan in New York City\nn10775003\ta workman who wets the work in a manufacturing process\nn10775128\ta seaman who works on a ship that hunts whales\nn10776052\ta supporter of the American Revolution\nn10776339\ta person given to excessive complaints and crying and whining\nn10776887\thuntsman's assistant in managing the hounds\nn10777299\tone who speaks in a whisper\nn10778044\ta clown whose face is covered with white make-up\nn10778148\ta Roman Catholic friar wearing the white cloak of the Carmelite order; mendicant preachers\nn10778711\ta Roman Catholic friar or monk belonging to one of the Augustinian monastic orders\nn10778999\tsomeone (or something) expected to achieve great success in a given field\nn10779610\ta person who believes that the white race is or should be supreme\nn10779897\ta pimp who procures whores\nn10779995\ta prostitute's customer\nn10780284\ta woman whose husband is dead especially one who has not remarried\nn10780632\ta married woman; a man's partner in marriage\nn10781236\tone who can't stay still (especially a child)\nn10781817\ta person who lacks confidence, is irresolute and wishy-washy\nn10782362\t(RAF rank) one who is next below a group captain\nn10782471\t(sports) player in wing position\nn10782791\ta gambler who wins a bet\nn10782940\tthe contestant who wins the contest\nn10783240\tsomeone who decorates shop windows\nn10783539\ta person who winks\nn10783646\ta worker who wipes\nn10783734\ta worker who installs and repairs electric wiring\nn10784113\tan upstart who makes conceited, sardonic, insolent comments\nn10784544\tsomeone who is believed to heal through magical powers\nn10784922\ta student who withdraws from the educational institution in which he or she was enrolled\nn10785480\tan authority who withdraws permission\nn10787470\tan adult female person (as opposed to a man)\nn10788852\ta female person who plays a significant role (wife or mistress or girlfriend) in the life of a particular man\nn10789415\ta man who is unusually successful at an early age\nn10789709\tsomeone who is curious about something\nn10791115\ta young woman who is employed\nn10791221\tan employee who performs manual or industrial labor\nn10791820\ta fellow worker\nn10791890\ta person absorbed by the concerns and interests and pleasures of the present world\nn10792335\tsomeone who admires too much to recognize faults\nn10792506\tan important, honorable person (word is often used humorously)\nn10792856\tsomeone who demolishes or dismantles buildings as a job\nn10793570\tsomeone who makes or repairs something (usually used in combination)\nn10793799\ta candidate for public office whose name does not appear on the ballot and so must be written on the ballot by the voters\nn10794014\twrites (books or stories or articles or the like) professionally (for pay)\nn10801561\ta student enrolled in (or graduated from) Winchester College\nn10801802\ta Japanese gangster\nn10802507\ta military recruit who is assigned menial tasks\nn10802621\tmember of an international gang of Jamaican criminals who sell drugs and violence\nn10802953\tworker in a railway yard\nn10803031\ta railroad employer who is in charge of a railway yard\nn10803282\t(Yiddish) a woman who talks too much; a gossip unable to keep a secret; a woman who spreads rumors and scandal\nn10803978\tone who practices yoga and has achieved a high level of spiritual insight\nn10804287\ta teenager or a young adult male\nn10804636\ta young radical who agitates for reform\nn10804732\ta member of one or more of the insurgent groups in Turkey in the late 19th century who rebelled against the absolutism of Ottoman rule\nn10805501\ta Jewish supporter of Zionism\nn10806113\tthe chief person responsible for a zoological garden\nn10994097\tFrench diplomat who in 1793 tried to draw the United States into the war between France and England (1763-1834)\nn11100798\tUnited States diplomat who recommended a policy of containment in dealing with Soviet aggression (1904-2005)\nn11196627\tBritish writer of short stories (1870-1916)\nn11242849\tBritish philosopher (born in Austria) who argued that scientific theories can never be proved to be true, but are tested by attempts to falsify them (1902-1994)\nn11318824\tIrish writer of the horror novel about Dracula (1847-1912)\nn11346873\tUnited States physicist who developed the laser and maser principles for producing high-intensity radiation (1915-)\nn11448153\ta windstorm that lifts up clouds of dust or sand\nn11487732\ta bright spot on the parhelic circle; caused by diffraction by ice crystals\nn11508382\tprecipitation falling from clouds in the form of ice crystals\nn11511327\ta bright spot on a planet\nn11524451\ta persistent and widespread unusual weather condition (especially of unusual temperatures)\nn11530008\tmicroscopic plants; bacteria are often considered to be microflora\nn11531193\ta wild uncultivated plant (especially a wild apple or crabapple tree)\nn11531334\ta plant that tends to climb and on occasion can grow like a vine\nn11532682\tcuplike structure around the base of the stalk of certain fungi\nn11533212\tthe fruiting body of a basidiomycete which bears its spores on special cells\nn11533999\ta part of a plant (e.g., a leaf) that has been modified to provide protection for insects or mites or fungi\nn11536567\ta plant that reproduces or is reproduced by apomixis\nn11536673\ta plant that lives in or on water\nn11537327\tany of numerous plants of the division Bryophyta\nn11539289\ta moss in which the main axis is terminated by the archegonium (and hence the capsule)\nn11542137\tany of various pale or ashy mosses of the genus Sphagnum whose decomposed remains form peat\nn11542640\tany of numerous small green nonvascular plants of the class Hepaticopsida growing in wet places and resembling green seaweeds or leafy mosses\nn11544015\ta common liverwort\nn11545350\tCarboniferous fossil fern characterized by a regular arrangement of the leaflets resembling a comb\nn11545524\tplants having vascular tissue and reproducing by spores\nn11545714\tany of numerous flowerless and seedless vascular plants having true roots from a rhizome and fronds that uncurl upward; reproduce by spores\nn11547562\tpteridophytes of other classes than Filicopsida\nn11547855\ta small usually single-celled asexual reproductive body produced by many nonflowering plants and fungi and some bacteria and protozoans and that are capable of developing into a new individual without sexual fusion\nn11548728\ta nonmotile spore of red algae\nn11548870\tthick-walled asexual resting spore of certain fungi and algae\nn11549009\tan asexually produced fungal spore formed on a conidiophore\nn11549245\ta thick-walled sexual spore that develops from a fertilized oosphere in some algae and fungi\nn11549779\tone of the four asexual spores produced within a sporangium\nn11549895\tan asexual spore of some algae and fungi that moves by means of flagella\nn11552133\tformerly recognized taxonomic group including all flowerless and seedless plants that reproduce by means of spores: ferns, mosses, algae, fungi\nn11552386\tplant that reproduces by means of seeds not spores\nn11552594\tyoung plant or tree grown from a seed\nn11552806\t(botany) a plant that completes its entire life cycle within the space of a year\nn11552976\t(botany) a plant having a life cycle that normally takes two seasons from germination to death to complete; flowering biennials usually bloom and fruit in the second season\nn11553240\t(botany) a plant lasting for three seasons or more\nn11553522\ta plant that grows in a moist habitat\nn11596108\tplants of the class Gymnospermae having seeds not enclosed in an ovary\nn11597657\tsmall tropical tree with tiered branches and divaricate branchlets having broad glossy dark green leaves; exploited for its edible young leaves and seeds that provide a fine flour\nn11598287\ta shrub that is cultivated by Arabs for its leaves which are chewed or used to make tea\nn11598686\tjointed and nearly leafless desert shrub having reduced scalelike leaves and reddish fleshy seeds\nn11598886\tChinese ephedra yielding ephedrine\nn11599324\tcurious plant of arid regions of southwestern Africa having a yard-high and yard-wide trunk like a turnip with a deep taproot and two large persistent woody straplike leaves growing from the base; living relic of a flora long disappeared; some may be 700-5000 years old\nn11600372\tany tropical gymnosperm of the order Cycadales; having unbranched stems with a crown of fernlike leaves\nn11601177\tdwarf palmlike cycad of Japan that yields sago\nn11601333\tsoutheastern Indian cycad with palmlike foliage\nn11601918\tany of various cycads of the genus Zamia; among the smallest and most verdant cycads\nn11602091\tsmall tough woody zamia of Florida and West Indies and Cuba; roots and half-buried stems yield an arrowroot\nn11602478\ta small cycad of the genus Ceratozamia having a short scaly woody trunk and fernlike foliage and woody cones; Mexico\nn11602873\tany cycad of the genus Dioon; handsome palmlike cycads with robust crowns of leaves and rugged trunks\nn11603246\tany of numerous cycads of the genus Encephalartos having stout cylindrical trunks and a terminal crown of long often spiny pinnate leaves\nn11603462\tSouth African cycad; the farinaceous pith of the fruit used as food\nn11603835\tany treelike cycad of the genus Macrozamia having erect trunks and pinnate leaves and large cones with sometimes edible nuts; Australia\nn11604046\tlarge attractive palmlike evergreen cycad of New South Wales\nn11608250\ta coniferous tree\nn11609475\tany of several low-growing pines of western North America\nn11609684\tany of several pinons bearing edible nutlike seeds\nn11609862\ta small two-needled or three-needled pinon of Mexico and southern Texas\nn11610047\tsmall compact two-needled pinon of southwestern United States; important as a nut pine\nn11610215\tpinon of southwestern United States having solitary needles and often many stems; important as a nut pine\nn11610437\ttwo-needled or three-needled pinon mostly of northwestern California coast\nn11610602\tvery small tree similar to Rocky mountain pinon but having a single needle per fascicle; similar to Parry's pinyon in range\nn11610823\tfive-needled pinon of southern California and northern Baja California having (sometimes three-needled or four-needled showing hybridization from Pinus californiarum)\nn11611087\tlarge two-needled pine of southeastern United States with light soft wood\nn11611233\tlarge two-needled timber pine of southeastern Europe\nn11611356\tlarge three-needled pine of the eastern United States and southeastern Canada; closely related to the pond pine\nn11611561\tlarge three-needled pine of sandy swamps of southeastern United States; needles longer than those of the northern pitch pine\nn11611758\tmedium-sized two-needled pine of southern Europe having a spreading crown; widely cultivated for its sweet seeds that resemble almonds\nn11612018\tlarge five-needled European pine; yields cembra nuts and a resinous exudate\nn11612235\tthe seed of the Swiss pine\nn11612349\tlow shrubby pine of central Europe with short bright green needles in bunches of two\nn11612575\tsmall slow-growing pine of western United States similar to the bristlecone pine; chocolate brown bark in plates and short needles in bunches of 5; crown conic but becoming rough and twisted; oldest plant in the world growing to 5000 years in cold semidesert mountain tops\nn11612923\tany of several five-needled pines with white wood and smooth usually light grey bark when young; especially the eastern white pine\nn11613219\ttall-growing pine of eastern North America; bark is brown with longitudinal fissures when mature; valued as a timber tree\nn11613459\ttall pine of western North America with stout blue-green needles; bark is grey-brown with rectangular plates when mature\nn11613692\tmedium-size pine of northwestern Mexico; bark is dark brown and furrowed when mature\nn11613867\twestern North American pine with long needles and very flexible limbs and dark-grey furrowed bark\nn11614039\tsmall pine of western North America; having smooth grey-white bark and soft brittle wood; similar to limber pine\nn11614250\tany of various pines having yellow wood\nn11614420\tcommon and widely distributed tall timber pine of western North America having dark green needles in bunches of 2 to 5 and thick bark with dark brown plates when mature\nn11614713\ttall symmetrical pine of western North America having long blue-green needles in bunches of 3 and elongated cones on spreading somewhat pendulous branches; sometimes classified as a variety of ponderosa pine\nn11615026\tshrubby two-needled pine of coastal northwestern United States; red to yellow-brown bark fissured into small squares\nn11615259\ttall subspecies of lodgepole pine\nn11615387\ttall spreading three-needled pine of southeastern United States having reddish-brown fissured bark and a full bushy upper head\nn11615607\tslender medium-sized two-needled pine of eastern North America; with yellow-green needles and scaly grey to red-brown fissured bark\nn11615812\tany of several pines that prefer or endure moist situations such as loblolly pine or longleaf pine\nn11615967\tlarge three-needled pine of southeastern United States having very long needles and gnarled twisted limbs; bark is red-brown deeply ridged; an important timber tree\nn11616260\tlarge pine of southern United States having short needles in bunches of 2-3 and red-brown bark when mature\nn11616486\tpine of eastern North America having long needles in bunches of two and reddish bark\nn11616662\tmedium large two-needled pine of northern Europe and Asia having flaking red-brown bark\nn11616852\tcommon small shrubby pine of the eastern United States having straggling often twisted or branches and short needles in bunches of 2\nn11617090\ttall California pine with long needles in bunches of 3, a dense crown, and dark brown deeply fissured bark\nn11617272\tsmall slow-growing upland pine of western United States (Rocky Mountains) having dense branches with fissured rust-brown bark and short needles in bunches of 5 and thorn-tipped cone scales; among the oldest living things some over 4500 years old\nn11617631\ta small two-needled upland pine of the eastern United States (Appalachians) having dark brown flaking bark and thorn-tipped cone scales\nn11617878\tmedium-sized three-needled pine of the Pacific coast of the United States having a prominent knob on each scale of the cone\nn11618079\tpine native to Japan and Korea having a wide-spreading irregular crown when mature; grown as an ornamental\nn11618290\tlarge Japanese ornamental having long needles in bunches of 2; widely planted in United States because of its resistance to salt and smog\nn11618525\tmedium-sized five-needled pine of southwestern California having long cylindrical cones\nn11618861\tany of numerous conifers of the genus Larix all having deciduous needlelike leaves\nn11619227\tmedium-sized larch of Canada and northern United States including Alaska having a broad conic crown and rust-brown scaly bark\nn11619455\ttall larch of western North America have pale green sharply pointed leaves and oblong cones; an important timber tree\nn11619687\tmedium-sized larch of the Rocky Mountains; closely related to Larix occidentalis\nn11619845\ttall European tree having a slender conic crown, flat needlelike leaves, and hairy cone scales\nn11620016\tmedium-sized larch of northeastern Russia and Siberia having narrowly conic crown and soft narrow bright-green leaves; used in cultivation\nn11620389\tChinese deciduous conifer resembling a larch with golden yellow leaves\nn11620673\tany of various evergreen trees of the genus Abies; chiefly of upland areas\nn11621029\tany of various true firs having leaves white or silvery white beneath\nn11621281\tmedium to tall fir of western North America having a conic crown and branches in tiers; leaves smell of orange when crushed\nn11621547\ttall timber tree of central and southern Europe having a regular crown and grey bark\nn11621727\tmedium to tall fir of central to western United States having a narrow erect crown and soft wood\nn11621950\tmedium-sized fir of northeastern North America; leaves smell of balsam when crushed; much used for pulpwood and Christmas trees\nn11622184\tsmall fast-growing but short-lived fir of southern Alleghenies similar to balsam fir but with very short leaves\nn11622368\tlofty fir of the Pacific coast of northwestern America having long curving branches and deep green leaves\nn11622591\tmedium-tall timber tree of the Rocky Mountains having a narrowly conic to columnar crown\nn11622771\ta pyramidal fir of southwestern California having spiny pointed leaves and cone scales with long spines\nn11623105\tany cedar of the genus Cedrus\nn11623815\tcedar of Lebanon and northwestern Syria that attains great age and height\nn11623967\ttall East Indian cedar having spreading branches with nodding tips; highly valued for its appearance as well as its timber\nn11624192\ttall Algerian evergreen of Atlas mountains with blue-green leaves; widely planted as an ornamental\nn11624531\tany coniferous tree of the genus Picea\nn11625003\ttall pyramidal spruce native to northern Europe having dark green foliage on spreading branches with pendulous branchlets and long pendulous cones\nn11625223\tmedium-sized spruce of California and Oregon having pendulous branches\nn11625391\ttall spruce of Rocky Mountains and British Columbia with blue-green needles and acutely conic crown; wood used for rough lumber and boxes\nn11625632\tmedium-sized spruce of northeastern North America having short blue-green leaves and slender cones\nn11625804\tsmall spruce of boggy areas of northeastern North America having spreading branches with dense foliage; inferior wood\nn11626010\ttall spruce of northern Europe and Asia; resembles Norway spruce\nn11626152\ta large spruce that grows only along the northwestern coast of the United States and Canada; has sharp stiff needles and thin bark; the wood has a high ratio of strength to weight\nn11626409\tevergreen tree of the Caucasus and Asia Minor used as an ornamental having pendulous branchlets\nn11626585\ttall spruce with blue-green needles and dense conic crown; older trees become columnar with lower branches sweeping downward\nn11626826\tmedium-sized spruce of eastern North America; chief lumber spruce of the area; source of pulpwood\nn11627168\tan evergreen tree\nn11627512\tcommon forest tree of the eastern United States and Canada; used especially for pulpwood\nn11627714\tmedium-sized evergreen of southeastern United States having spreading branches and widely diverging cone scales\nn11627908\tlarge evergreen of western United States; wood much harder than Canadian hemlock\nn11628087\ttall evergreen of western North America; commercially important timber tree\nn11628456\ttall evergreen timber tree of western North America having resinous wood and short needles\nn11628793\tlofty douglas fir of northwestern North America having short needles and egg-shaped cones\nn11629047\tdouglas fir of California having cones 4-8 inches long\nn11629354\tChinese evergreen conifer discovered in 1955; not yet cultivated elsewhere\nn11630017\tany of numerous trees of the family Cupressaceae that resemble cedars\nn11630489\tany of numerous evergreen conifers of the genus Cupressus of north temperate regions having dark scalelike leaves and rounded cones\nn11631159\tsmall sometimes shrubby tree native to California; often used as an ornamental; in some classification systems includes the pygmy cypress and the Santa Cruz cypress\nn11631405\trare small cypress native to northern California; sometimes considered the same species as gowen cypress\nn11631619\trare California cypress taller than but closely related to gowen cypress and sometimes considered the same species\nn11631854\tArizona timber tree with bluish silvery foliage\nn11631985\trelatively low wide-spreading endemic on Guadalupe Island; cultivated for its bluish foliage\nn11632167\ttall California cypress endemic on Monterey Bay; widely used for ornament as well as reforestation and shelterbelt planting\nn11632376\ttall spreading evergreen found in Mexico having drooping branches; believed to have been introduced into Portugal from Goa\nn11632619\ttall Eurasian cypress with thin grey bark and ascending branches\nn11632929\tevergreen of Tasmanian mountains having sharp-pointed leaves that curve inward\nn11633284\ta small South American evergreen having coppery bark and pretty foliage\nn11634736\ttall tree of the Pacific coast of North America having foliage like cypress and cinnamon-red bark\nn11635152\tslow-growing medium-sized cedar of east coast of the United States; resembles American arborvitae\nn11635433\tlarge timber tree of western North America with trunk diameter to 12 feet and height to 200 feet\nn11635830\ttall evergreen of the Pacific coast of North America often cultivated for ornament\nn11636204\ttall evergreen of Japan and China yielding valuable soft wood\nn11636835\tberrylike fruit of a plant of the genus Juniperus especially the berrylike cone of the common juniper\nn11639084\tany of several attractive trees of southwestern South America and New Zealand and New Caledonia having glossy evergreen leaves and scented wood\nn11639306\tNew Zealand timber tree resembling the cypress\nn11639445\tevergreen tree of New Zealand resembling the kawaka\nn11640132\tlarge fast-growing Chinese monoecious tree having flat bright-green deciduous leaves and small globular cones; commonly cultivated in United States as an ornamental; known as a fossil before being discovered in China\nn11643835\tany of several Asian and North American conifers of the genera Thuja and Thujopsis\nn11644046\tlarge valuable arborvitae of northwestern United States\nn11644226\tsmall evergreen of eastern North America having tiny scalelike leaves on flattened branchlets\nn11644462\tAsiatic shrub or small tree widely planted in United States and Europe; in some classifications assigned to its own genus\nn11644872\tslow-growing medium-large Japanese evergreen used as an ornamental\nn11645163\tAsiatic conifers resembling firs\nn11645590\tnewly discovered (1994) pine thought to have been long extinct; Australia; genus and species names not yet assigned\nn11645914\tany of several tall South American or Australian trees with large cones and edible seeds\nn11646167\tlarge Chilean evergreen conifer having intertwined branches and bearing edible nuts\nn11646344\tevergreen of Australia and Norfolk Island in the South Pacific\nn11646517\tvery tall evergreen of New Caledonia and the New Hebrides similar to norfolk island pine\nn11646694\tAustralian conifer bearing two-inch seeds tasting like roasted chestnuts; among the aborigines the tree is hereditary property protected by law\nn11646955\tpine of Australia and New Guinea; yields a valuable light even-textured wood\nn11647306\tany of various trees of the genus Agathis; yield dammar resin\nn11647703\ttall timber tree of New Zealand having white straight-grained wood\nn11647868\tnative to the Moluccas and Philippines; a source of dammar resin\nn11648039\tAustralian timber tree resembling the kauri but having wood much lighter in weight and softer\nn11648268\tNew Zealand tree with glossy leaves and scaly reddish-brown bark\nn11648776\tany of several evergreen trees and shrubs of eastern Asia resembling yew and having large seeds enclosed in a fleshy envelope; sometimes cultivated as ornamentals\nn11649150\tCalifornia evergreen having a fruit resembling a nutmeg but with a strong turpentine flavor\nn11649359\trare small evergreen of northern Florida; its glossy green leaves have an unpleasant fetid smell when crushed\nn11649878\tAustralasian evergreen conifer having a graceful head of foliage resembling celery that is composed of phyllodes borne in the axils of scalelike leaves\nn11650160\tmedium tall celery pine of Tasmania\nn11650307\tmedium tall celery pine of New Zealand\nn11650430\tsmall shrubby celery pine of New Zealand\nn11650558\tany of various trees having yellowish wood or yielding a yellow extract\nn11650759\tany of various gymnospermous trees having yellow wood\nn11652039\tany evergreen in the southern hemisphere of the genus Podocarpus having a pulpy fruit with one hard seed\nn11652217\tWest Indian evergreen with medium to long leaves\nn11652376\tlarge Australian tree with straight-grained yellow wood that turns brown on exposure\nn11652578\tSouth African tree or shrub having a rounded crown\nn11652753\terect or shrubby tree of Africa having ridged dark grey bark and rigid glossy medium to long leaves\nn11652966\tlow wide-spreading coniferous shrub of New Zealand mountains\nn11653126\tvaluable timber tree of New Zealand yielding hard reddish wood used for furniture and bridges and wharves\nn11653570\tmedium-sized tree of South Africa\nn11653904\tNew Zealand evergreen valued for its light easily worked wood\nn11654293\ttall New Zealand timber tree\nn11654438\tNew Zealand silver pine of conical habit with long slender flexuous branches; adapted to cold wet summers and high altitudes\nn11654984\tsmall tropical rain forest tree of Indonesia and Malaysia\nn11655152\ta rain forest tree or shrub of New Caledonia having a conic crown and pale green sickle-shaped leaves; host species for the rare parasite yew\nn11655592\tNew Zealand shrub\nn11655974\ttimber tree of New Zealand having shiny white wood\nn11656123\tTasmanian timber tree with yellow aromatic wavy-grained wood used for carving and ship building; sometimes placed in genus Dacrydium\nn11656549\tabout the hardiest Podocarpaceae species; prostrate spreading shrub similar to mountain rimu; mountains of southern Chile\nn11656771\tlow-growing to prostrate shrub with slender trailing branches; New Zealand\nn11657585\tmedium-sized tree having glossy lanceolate leaves; southern China to Taiwan and southern Japan\nn11658331\tNew Zealand conifer used for lumber; the dark wood is used for interior carpentry\nn11658544\tconifer of Australia and New Zealand\nn11658709\tSouth American evergreen tree or shrub\nn11659248\tsmall yew having attractive foliage and partially weeping branches cultivated as an ornamental; mountains of southern Chile\nn11659627\ta large fast-growing monoecious tropical evergreen tree having large glossy lanceolate leaves; of rain forests of Sumatra and Philippines to northern Queensland\nn11660300\ttall evergreen having a symmetrical spreading crown and needles growing in whorls that resemble umbrellas at ends of twigs\nn11661372\tany of numerous evergreen trees or shrubs having red cup-shaped berries and flattened needlelike leaves\nn11661909\tpredominant yew in Europe; extraordinarily long-lived and slow growing; one of the oldest species in the world\nn11662128\tsmall or medium irregularly branched tree of the Pacific coast of North America; yields fine hard close-grained wood\nn11662371\tshrubby hardy evergreen of China and Japan having lustrous dark green foliage; cultivated in the eastern United States\nn11662585\tsmall bushy yew of northern Florida having spreading branches and very narrow leaves\nn11662937\tlarge yew native to New Caledonia; cultivated in eastern Australia and New Zealand and Hawaii\nn11663263\tyew of southeastern China, differing from the Old World yew in having white berries\nn11664418\tdeciduous dioecious Chinese tree having fan-shaped leaves and fleshy yellow seeds; exists almost exclusively in cultivation especially as an ornamental street tree\nn11665372\tplants having seeds in a closed ovary\nn11666854\tflowering plant with two cotyledons; the stem grows by deposit on its outside\nn11668117\ta monocotyledonous flowering plant; the stem grows by deposits on its inside\nn11669786\ta diminutive flower (especially one that is part of a composite flower)\nn11669921\ta plant cultivated for its blooms or blossoms\nn11672269\ta flower that blooms in a particular way\nn11672400\twild or uncultivated flowering plant\nn11674019\tflower having no petals\nn11674332\tthe flowering part of a plant or arrangement of flowers on a stalk\nn11675025\tthe bud of a rose\nn11675404\tthe crown of the stamen in plants of the genus Asclepias\nn11675738\ta coherent mass of pollen grains (as in orchids)\nn11676500\tthe female ovule-bearing part of a flower composed of ovary and style and stigma\nn11676743\tthe enlarged receptacle in which the pistil is borne\nn11676850\tthe stalk of a pistil that raises it above the receptacle\nn11677485\tan enlargement at the base of the style in some Umbelliferae\nn11677902\ta slender stalk that furnishes an axis for a carpel\nn11678010\tthe stalk of a corn plant\nn11678299\tthe stalk of a leaflet\nn11678377\ta carpel with one seed; one of a pair split apart at maturity\nn11679378\tminute opening in the wall of an ovule through which the pollen tube enters\nn11680457\t(botany) a slender tubular outgrowth from a spore in germination\nn11680596\t(botany) a slender tubular outgrowth from a pollen grain when deposited on the stigma for a flower; it penetrates the style and conveys the male gametes to the ovule\nn11682659\tsmall asexual reproductive structure in e.g. liverworts and mosses that detaches from the parent and develops into a new individual\nn11683216\tthe seed-producing cone of a cypress tree\nn11683838\ta gland (often a protuberance or depression) that secretes nectar\nn11684264\tthe ripened and variously modified walls of a plant ovary\nn11684499\toutermost layer of the pericarp of fruits as the skin of a peach or grape\nn11684654\tthe middle layer of a pericarp\nn11685091\ta small hard seed found in some fruits\nn11685621\tnarrow elongated seed capsule peculiar to the family Cruciferae\nn11686195\ta reduced or scarcely developed leaf at the start of a plant's life (i.e., cotyledons) or in the early stages of leaf development\nn11686652\tthe nutritive tissue outside the sac containing the embryo in some seeds\nn11686780\ta plant that bears fruit once and dies\nn11686912\tthe spore-producing individual or phase in the life cycle of a plant having alternation of generations\nn11687071\tthe gamete-bearing individual or phase in the life cycle of a plant having alternation of generations\nn11687432\ta plant structure that produces megaspores\nn11687789\tsmaller of the two types of spore produced in heterosporous plants; develops in the pollen sac into a male gametophyte\nn11687964\ta plant structure that produces microspores\nn11688069\tin non-flowering plants, a sporophyll that bears only microsporangia\nn11688378\tprimitive cell or group of cells from which a mother cell develops\nn11689197\thard shiny grey seed of a bonduc tree; used for making e.g. jewelry\nn11689367\thard pearly seeds of an Asiatic grass; often used as beads\nn11689483\tany of several seeds that yield oil\nn11689678\tthe toxic seed of the castor-oil plant; source of castor oil\nn11689815\tseed of cotton plants; source of cottonseed oil\nn11689957\tseed of candlenut tree; source of soil used in varnishes\nn11690088\tthe stone seed of a peach\nn11690254\tthe cuplike or ringlike or tubular structure of a flower which bears the sepals and stamens and calyx (as in Rosaceae)\nn11690455\tpart of the perianth that is usually brightly colored\nn11691046\t(botany) the whorl of petals of a flower that collectively form an inner floral envelope or layer of the perianth\nn11691857\t(botany) either of the two parts of a bilabiate corolla or calyx\nn11692265\tcollective term for the outer parts of a flower consisting of the calyx and corolla and enclosing the stamens and pistils\nn11692792\tpappus of a thistle consisting of silky featherlike hairs attached to the seed-like fruit of a thistle\nn11693981\tany of several tropical American trees bearing fruit with soft edible pulp\nn11694300\tsmall tropical American tree bearing round or oblong fruit\nn11694469\ttropical American tree grown in southern United States having a whitish pink-tinged fruit\nn11694664\tsmall tropical American tree bearing large succulent slightly acid fruit\nn11694866\tsmall tropical American tree bearing a bristly heart-shaped acid tropical fruit\nn11695085\ttropical American tree bearing sweet pulpy fruit with thick scaly rind and shiny black seeds\nn11695285\tsmall evergreen tree of tropical America with edible fruit; used chiefly as grafting stock\nn11695599\tsmall tree native to the eastern United States having oblong leaves and fleshy fruit\nn11695974\tevergreen Asian tree with aromatic greenish-yellow flowers yielding a volatile oil; widely grown in the tropics as an ornamental\nn11696450\tsource of most of the lancewood of commerce\nn11696935\ttropical west African evergreen tree bearing pungent aromatic seeds used as a condiment and in folk medicine\nn11697560\tany of numerous plants of the genus Berberis having prickly stems and yellow flowers followed by small red berries\nn11697802\tdeciduous shrub of eastern North America whose leaves turn scarlet in autumn and having racemes of yellow flowers followed by ellipsoid glossy red berries\nn11698042\tupright deciduous European shrub widely naturalized in United States having clusters of juicy berries\nn11698245\tcompact deciduous shrub having persistent red berries; widespread in cultivation especially for hedges\nn11699442\tornamental evergreen shrub of Pacific coast of North America having dark green pinnate leaves and racemes of yellow flowers followed by blue-black berries\nn11699751\tsmall shrub with grey-green leaves and yellow flowers followed by glaucous blue berries\nn11700058\tNorth American herb with poisonous root stock and edible though insipid fruit\nn11700279\tedible but insipid fruit of the May apple plant\nn11700864\tdeciduous shrubs having aromatic bark; eastern China; southwestern and eastern United States\nn11701066\thardy shrub of southeastern United States having clove-scented wood and fragrant red-brown flowers\nn11701302\tstraggling aromatic shrub of southwestern United States having fragrant brown flowers\nn11702713\trapidly growing deciduous tree of low mountainsides of China and Japan; grown as an ornamental for its dark blue-green candy-scented foliage that becomes yellow to scarlet in autumn\nn11703669\tany of various aromatic trees of the laurel family\nn11704093\tsmall Mediterranean evergreen tree with small blackish berries and glossy aromatic leaves used for flavoring in cooking; also used by ancient Greeks to crown victors\nn11704620\tlarge evergreen tree of warm regions whose aromatic wood yields camphor\nn11704791\ttropical Asian tree with aromatic yellowish-brown bark; source of the spice cinnamon\nn11705171\tChinese tree with aromatic bark; yields a less desirable cinnamon than Ceylon cinnamon\nn11705387\taromatic bark of the cassia-bark tree; less desirable as a spice than Ceylon cinnamon bark\nn11705573\ttropical southeast Asian tree with aromatic bark; yields a bark used medicinally\nn11705776\taromatic bark of Saigon cinnamon used medicinally as a carminative\nn11706325\tdeciduous shrub of the eastern United States having highly aromatic leaves and bark and yellow flowers followed by scarlet or yellow berries\nn11706761\ttropical American tree bearing large pulpy green fruits\nn11706942\tsmall tree of southern United States having dark red heartwood\nn11707229\tyellowwood tree with brittle wood and aromatic leaves and bark; source of sassafras oil; widely distributed in eastern North America\nn11707827\tPacific coast tree having aromatic foliage and small umbellate flowers followed by olivelike fruit; yields a hard tough wood\nn11708658\tany of several evergreen shrubs and small trees of the genus Illicium\nn11708857\tsmall shrubby tree with purple flowers; found in wet soils of southeastern United States\nn11709045\tsmall shrubby tree of Japan and Taiwan; flowers are not fragrant\nn11709205\tsmall tree of China and Vietnam bearing anise-scented star-shaped fruit used in food and medicinally as a carminative\nn11709674\tany shrub or tree of the genus Magnolia; valued for their longevity and exquisite fragrant blooms\nn11710136\tevergreen tree of southern United States having large stiff glossy leaves and huge white sweet-smelling flowers\nn11710393\tsmall deciduous tree of eastern North America having creamy white flowers and large leaves in formations like umbrellas at the ends of branches\nn11710658\tsmall erect deciduous tree with large leaves in coiled formations at branch tips\nn11710827\tAmerican deciduous magnolia having large leaves and fruit like a small cucumber\nn11710987\tlarge deciduous shrub or tree of southeastern United States having huge leaves in dense false whorls and large creamy flowers tinged purple toward the base\nn11711289\tlarge deciduous shrub or small tree having large open rosy to purplish flowers; native to Asia; prized as an ornamental in eastern North America\nn11711537\tdeciduous shrubby magnolia from Japan having fragrant white starlike flowers blooming before leaves unfold; grown as an ornamental in United States\nn11711764\tshrub or small tree having rather small fragrant white flowers; abundant in southeastern United States\nn11711971\ta genus of flowering tree of the family Magnoliaceae found from Malay to southern China\nn11712282\ttall North American deciduous timber tree having large tulip-shaped greenish yellow flowers and conelike fruit; yields soft white woods used especially for cabinet work\nn11713164\tplant of the family Menispermaceae having red or black fruit with crescent- or ring-shaped seeds\nn11713370\ta woody vine of eastern North America having large oval leaves and small white flowers and purple to blue-black fruits\nn11713763\twoody vine of southeastern United States resembling the common moonseed but having red fruits\nn11714382\tEast Indian tree widely cultivated in the tropics for its aromatic seed; source of two spices: nutmeg and mace\nn11715430\ta water lily having large leaves and showy fragrant flowers that float on the water; of temperate and tropical regions\nn11715678\ta water lily with white flowers\nn11716698\tof flowing waters of the southeastern United States; may form obstructive mats in streams\nn11717399\tnative to eastern Asia; widely cultivated for its large pink or white flowers\nn11717577\twater lily of eastern North America having pale yellow blossoms and edible globular nutlike seeds\nn11718296\tcommon aquatic plant of eastern North America having floating and submerged leaves and white yellow-spotted flowers\nn11718681\taquatic plant with floating oval leaves and purple flowers; in lakes and slow-moving streams; suitable for aquariums\nn11719286\tany of numerous plants widely cultivated for their showy single or double red or pink or white flowers\nn11720353\tany of various plants of the genus Ranunculus\nn11720643\tperennial European buttercup with yellow spring flowers widely naturalized especially in eastern North America\nn11720891\tplant of ponds and slow streams having submerged and floating leaves and white flowers; Europe and North America\nn11721337\tperennial herb native to Europe but naturalized elsewhere having heart-shaped leaves and yellow flowers resembling buttercups; its tuberous roots have been used as a poultice to relieve piles\nn11721642\tsemiaquatic Eurasian perennial crowfoot with leaves shaped like spears; naturalized in New Zealand\nn11722036\tsemiaquatic European crowfoot with leaves shaped like spears\nn11722342\tperennial of western North America\nn11722466\tperennial European herb with long creeping stolons\nn11722621\tannual herb growing in marshy places\nn11722982\tany of various usually poisonous plants of the genus Aconitum having tuberous roots and palmately lobed leaves and blue or white flowers\nn11723227\ta poisonous herb native to northern Europe having hooded blue-purple flowers; the dried leaves and roots yield aconite\nn11723452\tpoisonous Eurasian perennial herb with broad rounded leaves and yellow flowers and fibrous rootstock\nn11723770\ta plant of the genus Actaea having acrid poisonous berries\nn11723986\ta poisonous berry of a plant of the genus Actaea\nn11724109\tNorth American perennial herb with alternately compound leaves and racemes of small white flowers followed by bright red oval poisonous berries\nn11724660\tEurasian herb cultivated for its deep red flowers with dark centers\nn11725015\tany woodland plant of the genus Anemone grown for its beautiful flowers and whorls of dissected leaves\nn11725311\tsilky-foliaged herb of the Rocky Mountains with bluish-white flowers\nn11725480\tcommon summer-flowering woodland herb of Labrador to Colorado\nn11725623\ta common North American anemone with cylindrical fruit clusters resembling thimbles\nn11725821\tEuropean anemone with solitary white flowers common in deciduous woodlands\nn11725973\tcommon anemone of eastern North America with solitary pink-tinged white flowers\nn11726145\tthimbleweed of northern North America\nn11726269\tEurasian herb with solitary nodding fragrant white flowers\nn11726433\tthimbleweed of central and eastern North America\nn11726707\twoodland flower native to eastern North America having cup-shaped flowers reminiscent of anemone but more delicate\nn11727091\ta plant of the genus Aquilegia having irregular showy spurred flowers; north temperate regions especially mountains\nn11727358\tcolumbine of eastern North America having long-spurred red flowers\nn11727540\tcolumbine of the Rocky Mountains having long-spurred blue flowers\nn11727738\tcommon European columbine having variously colored (white or blue to purple or red) short-spurred flowers; naturalized in United States\nn11728099\tswamp plant of Europe and North America having bright yellow flowers resembling buttercups\nn11728769\tbugbane of the eastern United States having erect racemes of white flowers\nn11728945\tNorth American bugbane found from Maine and Ontario to Wisconsin and south to Georgia\nn11729142\tbugbane of Siberia and eastern Asia having ill-smelling green-white flowers\nn11729478\tany of various ornamental climbing plants of the genus Clematis usually having showy flowers\nn11729860\terect clematis of Florida having pink to purple flowers\nn11730015\tclimber of southern United States having bluish-purple flowers\nn11730458\tChinese clematis with serrate leaves and large yellow flowers\nn11730602\twoody vine of Texas having showy solitary nodding scarlet flowers\nn11730750\twoody vine of the southern United States having purple or blue flowers with leathery recurved sepals\nn11730933\tscandent subshrub of southeastern United States having large red-purple bell-shaped flowers with leathery recurved sepals\nn11731157\tcommon climber of eastern North America that sprawls over other plants and bears numerous panicles of small creamy white flowers\nn11731659\tclimber of northeastern North America having waxy purplish-blue flowers\nn11732052\tlow-growing perennial of North America woodlands having trifoliate leaves and yellow rootstock and white flowers\nn11732567\tcommonly cultivated larkspur of southern Europe having unbranched spikelike racemes of blue or sometimes purplish or pinkish flowers; sometime placed in genus Delphinium\nn11733054\tany plant of the genus Delphinium having palmately divided leaves and showy spikes of variously colored spurred flowers; some contain extremely poisonous substances\nn11733312\tany of numerous cultivated plants of the genus Delphinium\nn11733548\tsmall Old World perennial herb grown for its bright yellow flowers which appear in early spring often before snow is gone\nn11734493\tslightly hairy perennial having deep green leathery leaves and flowers that are ultimately purplish-green\nn11734698\tdeciduous plant with large deep green pedate leaves and nodding saucer-shaped green flowers\nn11735053\tany of several plants of the genus Hepatica having three-lobed leaves and white or pinkish flowers in early spring; of moist and mossy subalpine woodland areas of north temperate regions\nn11735570\tperennial herb of northeastern United States having a thick knotted yellow rootstock and large rounded leaves\nn11735977\tslender erect perennial of eastern North America having tuberous roots and pink-tinged white flowers; resembles meadow rue\nn11736362\tspectacular perennial native of wet montane grasslands of Peru; formerly included in genus Ranunculus\nn11736694\tany plant of the genus Nigella\nn11736851\tEuropean garden plant having finely cut leaves and white or pale blue flowers\nn11737009\tnigella of Spain and southern France\nn11737125\therb of the Mediterranean region having pungent seeds used like those of caraway\nn11737534\tany plant of the genus Pulsatilla; sometimes included in genus Anemone\nn11738547\tany of various herbs of the genus Thalictrum; sometimes rhizomatous or tuberous perennials found in damp shady places and meadows or stream banks; have lacy foliage and clouds of small purple or yellow flowers\nn11738997\ttall perennial of the eastern United States having large basal leaves and white summer flowers\nn11739365\tany of several plants of the genus Trollius having globose yellow flowers\nn11739978\tSouth American evergreen tree yielding winter's bark and a light soft wood similar to basswood\nn11740414\tevergreen shrub or small tree whose foliage is conspicuously blotched with red and yellow and having small black fruits\nn11741175\tbog shrub of north temperate zone having bitter-tasting fragrant leaves\nn11741350\tany shrub or small tree of the genus Myrica with aromatic foliage and small wax-coated berries\nn11741575\tevergreen aromatic shrubby tree of southeastern United States having small hard berries thickly coated with white wax used for candles\nn11741797\tdeciduous aromatic shrub of eastern North America with grey-green wax-coated berries\nn11742310\tdeciduous shrub of eastern North America with sweet scented fernlike leaves and tiny white flowers\nn11742878\tvery small deciduous dioecious tree or shrub of damp habitats in southeastern United States having extremely light wood\nn11744011\trush of Australia\nn11744108\tlow-growing annual rush of damp low-lying ground; nearly cosmopolitan\nn11744471\ttufted wiry rush of wide distribution\nn11745817\tany of various trees or shrubs having mottled or striped wood\nn11746600\ttropical American and east African tree with strikingly marked hardwood used in cabinetwork\nn11747468\tan erect or climbing bean or pea plant of the family Leguminosae\nn11748002\tthe fruit or seed of any of various bean or pea plants consisting of a case that splits along both sides when ripe and having the seeds attach to one side of the case\nn11748811\tunderground pod of the peanut vine\nn11749112\tWest Indian tree yielding a fine grade of green ebony\nn11749603\tBrazilian tree with handsomely marked wood\nn11750173\tfragrant black nutlike seeds of the tonka bean tree; used in perfumes and medicines and as a substitute for vanilla\nn11750508\tWest Indian locust tree having pinnate leaves and panicles of large white or purplish flowers; yields very hard tough wood\nn11750989\terect annual or biennial plant grown extensively especially for hay and soil improvement\nn11751765\teither of two Australian plants of the genus Swainsona that are poisonous to sheep\nn11751974\terect or trailing perennial of eastern Australia having axillary racemes of blue to purple or red flowers\nn11752578\ta plant of the genus Trifolium\nn11752798\tEuropean mountain clover with fragrant usually pink flowers\nn11752937\tclover native to Ireland with yellowish flowers; often considered the true or original shamrock\nn11753143\tsouthern European annual with spiky heads of crimson flower; extensively cultivated in United States for forage\nn11753355\terect to decumbent short-lived perennial having red-purple to pink flowers; the most commonly grown forage clover\nn11753562\tclover of western United States\nn11753700\tcreeping European clover having white to pink flowers and bright green leaves; naturalized in United States; widely grown for forage\nn11754893\tany of various tropical shrubs or trees of the genus Mimosa having usually yellow flowers and compound leaves\nn11756092\tany of various spiny trees or shrubs of the genus Acacia\nn11756329\tsource of a wood mentioned frequently in the Bible; probably a species of genus Acacia\nn11756669\tany of various Australasian trees yielding slender poles suitable for wattle\nn11756870\tAustralian tree that yields tanning materials\nn11757017\tscrubby Australian acacia having extremely foul-smelling blossoms\nn11757190\tEast Indian spiny tree having twice-pinnate leaves and yellow flowers followed by flat pods; source of black catechu\nn11757653\tevergreen Australasian tree having white or silvery bark and young leaves and yellow flowers\nn11757851\ttropical American thorny shrub or small tree; fragrant yellow flowers used in making perfumery\nn11758122\ttall Australian acacia yielding highly valued black timber\nn11758276\tshrubby Australian tree having clusters of fragrant golden yellow flowers; widely cultivated as an ornamental\nn11758483\tAfrican tree supposed to mark healthful regions\nn11758799\tEast Indian tree with racemes of yellow-white flowers; cultivated as an ornamental\nn11759224\tany of numerous trees of the genus Albizia\nn11759404\tattractive domed or flat-topped Asiatic tree having bipinnate leaves and flowers with long silky stamens\nn11759609\tlarge spreading Old World tree having large leaves and globose clusters of greenish-yellow flowers and long seed pods that clatter in the wind\nn11759853\tlarge ornamental tropical American tree with bipinnate leaves and globose clusters of flowers with crimson stamens and seed pods that are eaten by cattle\nn11760785\tany of various shrubs and small trees valued for their fine foliage and attractive spreading habit and clustered white to deep pink or red flowers\nn11761202\ttropical South American tree having a wide-spreading crown of bipinnate leaves and coiled ear-shaped fruits; grown for shade and ornament as well as valuable timber\nn11761650\tany tree or shrub of the genus Inga having pinnate leaves and showy usually white flowers; cultivated as ornamentals\nn11761836\tornamental evergreen tree with masses of white flowers; tropical and subtropical America\nn11762018\ttropical tree of Central America and West Indies and Puerto Rico having spikes of white flowers; used as shade for coffee plantations\nn11762433\tlow scrubby tree of tropical and subtropical North America having white flowers tinged with yellow resembling mimosa and long flattened pods\nn11762927\ta tree of the West Indies and Florida and Mexico; resembles tamarind and has long flat pods\nn11763142\tWest Indian tree yielding a hard dark brown wood resembling mahogany in texture and value\nn11763625\tany of several Old World tropical trees of the genus Parkia having heads of red or yellow flowers followed by pods usually containing edible seeds and pulp\nn11763874\ttall evergreen rain forest tree with wide-spreading crown having yellow-white flowers; grown as an ornamental in parks and large gardens\nn11764478\tcommon thorny tropical American tree having terminal racemes of yellow flowers followed by sickle-shaped or circinate edible pods and yielding good timber and a yellow dye and mucilaginous gum\nn11764814\terect shrub with small if any spines having racemes of white to yellow flowers followed by curved pointed pods and black shiny seeds; West Indies and Florida\nn11765568\tthorny deep-rooted drought-resistant shrub native to southwestern United States and Mexico bearing pods rich in sugar and important as livestock feed; tends to form extensive thickets\nn11766046\tmesquite pod used in tanning and dyeing\nn11766189\tshrub or small tree of southwestern United States and northwestern Mexico having spirally twisted pods\nn11766432\tspirally twisted sweet pod of screwbean mesquite that is used for fodder or ground into meal for feed\nn11767354\tany of several poisonous perennial plants of the genus Apocynum having acrid milky juice and bell-shaped white or pink flowers and a very bitter root\nn11767877\tCanadian dogbane yielding a tough fiber used as cordage by Native Americans; used in folk medicine for pain or inflammation in joints\nn11768816\tevergreen shrub or tree of South Africa\nn11769176\tSouth African shrub having a swollen succulent stem and bearing showy pink and white flowers after the leaves fall; popular as an ornamental in tropics\nn11769621\ta plant of the genus Allamanda having large showy funnel-shaped flowers in terminal cymes\nn11769803\tvigorous evergreen climbing plant of South America having glossy leathery foliage and golden yellow flowers\nn11770256\tevergreen tree of eastern Asia and Philippines having large leathery leaves and small green-white flowers in compact cymes; bark formerly used medicinally\nn11771147\tevergreen woody twiner with large glossy leaves and showy corymbs of fragrant white trumpet-shaped flowers\nn11771539\ta shrub of the genus Carissa having fragrant white flowers and plumlike red to purple-black fruits\nn11771746\tSouth African shrub having forked spines and plumlike fruit; frequently used as hedging\nn11771924\tvery large closely branched South African shrub having forked bright green spines and shiny leaves\nn11772408\tcommonly cultivated Old World woody herb having large pinkish to red flowers\nn11772879\ttropical Asian tree with hard white wood and bark formerly used as a remedy for dysentery and diarrhea\nn11773408\tshrubby climber having glossy leaves and white funnel-shaped flowers with yellow throats\nn11773628\twoody vine of Argentina grown as an ornamental for its glossy leaves and racemes of large fragrant funnel-shaped creamy-white flowers\nn11773987\tan ornamental but poisonous flowering shrub having narrow evergreen leaves and clusters of fragrant white to pink or red flowers: native to East Indies but widely cultivated in warm regions\nn11774513\tany of various tropical American deciduous shrubs or trees of the genus Plumeria having milky sap and showy fragrant funnel-shaped variously colored flowers\nn11774972\ttall sparingly branched conical tree having large fragrant yellow flowers with white centers\nn11775340\tany shrub or small tree of the genus Rauwolfia having leaves in whorls and cymose flowers; yield substances used medicinally especially as emetics or purgatives or antihypertensives\nn11775626\tEast Indian climbing shrub with twisted limbs and roots resembling serpents\nn11776234\tplant that is a source of strophanthin\nn11777080\ttropical American shrub or small tree having glossy dark green leaves and fragrant saffron yellow to orange or peach- colored flowers; all parts highly poisonous\nn11778092\twidely cultivated as a groundcover for its dark green shiny leaves and usually blue-violet flowers\nn11778257\tplant having variegated foliage and used for window boxes\nn11779300\tany plant of the family Araceae; have small flowers massed on a spadix surrounded by a large spathe\nn11780148\tcommon European arum with lanceolate spathe and short purple spadix; emerges in early spring; source of a starch called arum\nn11780424\tornamental plant of Middle East cultivated for its dark purple spathe\nn11781176\tthe aromatic root of the sweet flag used medicinally\nn11782036\tany plant of the genus Alocasia having large showy basal leaves and boat-shaped spathe and reddish berries\nn11782266\tlarge evergreen with extremely large erect or spreading leaves; cultivated widely in tropics for its edible rhizome and shoots; used in wet warm regions as a stately ornamental\nn11782761\tany plant of the genus Amorphophallus\nn11782878\tputrid-smelling aroid of southeastern Asia (especially the Philippines) grown for its edible tuber\nn11783162\tfoul-smelling somewhat fleshy tropical plant of southeastern Asia cultivated for its edible corms or in the greenhouse for its large leaves and showy dark red spathe surrounding a large spadix\nn11783920\tany of various tropical American plants cultivated for their showy foliage and flowers\nn11784126\tcommonly cultivated anthurium having bright scarlet spathe and spadix\nn11784497\tcommon American spring-flowering woodland herb having sheathing leaves and an upright club-shaped spadix with overarching green and purple spathe producing scarlet berries\nn11785276\ttuberous perennial having a cowl-shaped maroon or violet-black spathe; Mediterranean; Canaries; Azores\nn11785668\tany plant of the genus Caladium cultivated for their ornamental foliage variously patterned in white or pink or red\nn11785875\tmost popular caladium; cultivated in many varieties since the late 19th century\nn11786131\tplant of wetlands and bogs of temperate regions having small greenish flowers partly enclosed in a white spathe and red berries\nn11786539\therb of the Pacific islands grown throughout the tropics for its edible root and in temperate areas as an ornamental for its large glossy leaves\nn11786843\tedible starchy tuberous root of taro plants\nn11787190\tany plant of the genus Cryptocoryne; evergreen perennials growing in fresh or brackish water; tropical Asia\nn11788039\tany plant of the genus Dracontium; strongly malodorous tropical American plants usually with gigantic leaves\nn11788727\tevergreen liana widely cultivated for its variegated foliage\nn11789066\tclump-forming deciduous perennial swamp plant of western North America similar to Symplocarpus foetidus but having a yellow spathe\nn11789438\tany plant of the genus Monstera; often grown as houseplants\nn11789589\ttropical American vine having roots that hang like cords and cylindrical fruit with a pineapple and banana flavor\nn11789962\tany plant of the genus Nephthytis\nn11790089\ttropical rhizomatous plant cultivated as an ornamental for its large sagittate leaves\nn11790788\tan aquatic plant of the genus Peltandra; North America\nn11790936\tperennial herb of the eastern United States having arrowhead-shaped leaves and an elongate pointed spathe and green berries\nn11791341\toften grown as a houseplant\nn11791569\tpantropical floating plant forming a rosette of wedge-shaped leaves; a widespread weed in rivers and lakes\nn11792029\tany of various tropical lianas of the genus Scindapsus\nn11792341\tany of various plants of the genus Spathiphyllum having a white or green spathe and a spike of fragrant flowers and often cultivated as an ornamental\nn11792742\tdeciduous perennial low-growing fetid swamp plant of eastern North America having minute flowers enclosed in a mottled greenish or purple cowl-shaped spathe\nn11793403\ttropical American aroid having edible tubers that are cooked and eaten like yams or potatoes\nn11793779\tSouth African plant widely cultivated for its showy pure white spathe and yellow spadix\nn11794024\tcalla having a rose-colored spathe\nn11794139\tany of several callas of the genus Zantedeschia having yellow spathes\nn11794519\tany small or minute aquatic plant of the family Lemnaceae that float on or near the surface of shallow ponds\nn11795049\tof temperate regions except eastern Asia and Australia\nn11795216\tcosmopolitan in temperate regions except North America\nn11795580\tcosmopolitan except South America and New Zealand and some oceanic islands\nn11796005\tany of various aquatic plants of the genus Wolffia; throughout warmer regions of the world\nn11796188\tsmallest flowering plants known; of the Americas\nn11797321\tany of various plants of the genus Aralia; often aromatic plants having compound leaves and small umbellate flowers\nn11797508\tsmall deciduous clump-forming tree or shrub of eastern United States\nn11797981\tunarmed woody rhizomatous perennial plant distinguished from wild sarsaparilla by more aromatic roots and panicled umbels; southeastern North America to Mexico\nn11798270\tbristly herb of eastern and central North America having black fruit and medicinal bark\nn11798496\tdeciduous clump-forming Asian shrub or small tree; adventive in the eastern United States\nn11798688\tsimilar to American angelica tree but less prickly; China\nn11798978\tOld World vine with lobed evergreen leaves and black berrylike fruits\nn11799331\tsmall roundheaded New Zealand tree having large resinous leaves and panicles of green-white flowers\nn11799732\tChinese herb with palmately compound leaves and small greenish flowers and forked aromatic roots believed to have medicinal powers\nn11800236\taromatic root of ginseng plants\nn11800565\terect evergreen shrub or small tree of Australia and northern New Guinea having palmately compound leaves\nn11801392\tcreeping plant having curving flowers thought to resemble fetuses; native to Europe; naturalized Great Britain and eastern North America\nn11801665\thardy deciduous vine having large leaves and flowers with the calyx tube curved like the bowl of a pipe\nn11801891\tbirthwort of the eastern United States woodlands\nn11802410\tdeciduous low-growing perennial of Canada and eastern and central United States\nn11802586\tevergreen low-growing perennial having mottled green and silvery-grey heart-shaped pungent leaves; Virginia to South Carolina\nn11802800\twild ginger having persistent heart-shaped pungent leaves; West Virginia to Alabama\nn11802995\tthick creeping evergreen herb of western Europe\nn11805255\ta plant of the family Caryophyllaceae\nn11805544\tEuropean annual having large trumpet-shaped reddish-purple flowers and poisonous seed; a common weed in grainfields and beside roadways; naturalized in America\nn11805956\tlow-growing chiefly perennial plant usually with small white flowers suitable for e.g. rock gardens\nn11806219\tboreal or alpine sandwort\nn11806369\tdeep-rooted perennial of southeastern United States\nn11806521\tperennial succulent herb with small solitary axillary or terminal flowers\nn11806679\tlow perennial tufted plant of southeastern North America\nn11806814\tEurasian annual sprawling plant naturalized throughout North America\nn11807108\tany of various plants related to the common chickweed\nn11807525\tchickweed with hairy silver-grey leaves and rather large white flowers\nn11807696\twidespread in the Arctic and on mountains in Europe\nn11807979\tany of various flowers of plants of the genus Dianthus cultivated for their fragrant flowers\nn11808299\tEurasian pink widely cultivated for its flat-topped dense clusters of varicolored flowers\nn11808468\tEurasian plant with pink to purple-red spice-scented usually double flowers; widely cultivated in many varieties and many colors\nn11808721\tChinese pink with deeply toothed rose-lilac flowers with a purplish eye; usually raised as an annual\nn11808932\ta flowering variety of China pink distinguished by jagged-edged petals\nn11809094\tlow-growing loosely mat-forming Eurasian pink with a single pale pink flower with a crimson center\nn11809271\tmat-forming perennial of central Europe with large fragrant pink or red flowers\nn11809437\tmuch-branched pink with flowers in clusters; closely related to sweet William\nn11809594\tEuropean pink cultivated for its very fragrant pink or rosy flowers\nn11809754\tEurasian perennial pink having fragrant lilac or rose flowers with deeply fringed margins\nn11810030\tspiny-leaved perennial herb of southern Europe having terminal clusters of small flowers\nn11810358\ttall plant with small lance-shaped leaves and numerous tiny white or pink flowers\nn11811059\tglabrous annual with slender taproot and clusters of white flowers; western Europe especially western Mediterranean and Atlantic coastal areas\nn11811473\tmostly perennial herbs with sticky stems that catch insects; widespread in north temperate zone\nn11811706\tcommon perennial native to Europe and western Asia having usually pink flowers with ragged petals\nn11811921\tEurasian garden perennial having scarlet flowers in dense terminal heads\nn11812094\tan old cottage garden plant of southeastern Europe widely cultivated for its attractive white woolly foliage and showy crimson flowers\nn11812910\tlow-growing herb having clusters of small white four-petaled flowers\nn11813077\tloosely matted plant with moss-like foliage studded with tiny starry four-petaled white blossoms; mountains of central and southern Europe\nn11814584\tplant of European origin having pink or white flowers and leaves yielding a detergent when bruised\nn11814996\twidely distributed low-growing Eurasian herb having narrow leaves and inconspicuous green flowers\nn11815491\tany plant of the genus Silene\nn11815721\ttuft- or mat-forming dwarf perennial of Arctic regions of western and central Europe and North America\nn11815918\tperennial of eastern and central North America having short-stalked pink or white flowers in hairy clusters\nn11816121\tbiennial European catchfly having red or pink flowers; sometimes placed in genus Lychnis\nn11816336\tbluish-green herb having sticky stems and clusters of large evening-opening white flowers with much-inflated calyx; sometimes placed in genus Lychnis\nn11816649\tperennial herb of eastern North America, having red flowers with narrow notched petals\nn11816829\tperennial of Arctic Europe having large white flowers with inflated calyx\nn11817160\tsmall European weed with whorled leaves and white flowers\nn11817501\tprostrate weedy herb with tiny pink flowers; widespread throughout Europe and Asia on sand dunes and heath and coastal cliffs; naturalized in eastern North America\nn11817914\tany of various plants of the genus Stellaria\nn11818069\ta common low-growing annual garden weed with small white flowers; cosmopolitan; so-called because it is eaten by chickens\nn11818636\tEuropean annual with pale rose-colored flowers; cultivated flower or self-sown grainfield weed; introduced in North America; sometimes classified as a soapwort\nn11819509\tlow-growing South African succulent plant having a capsular fruit containing edible pulp\nn11819912\tlow-growing showy succulent annual of South Africa having white or pink or red or orange flowers and spatulate leaves covered in papillae that resemble small crystals\nn11820965\tany of several South African plants of the genus Mesembryanthemum cultivated for showy pink or white flowers\nn11821184\tOld World annual widely naturalized in warm regions having white flowers and fleshy foliage covered with hairs that resemble ice\nn11822300\tcoarse sprawling Australasian plant with red or yellow flowers; cultivated for its edible young shoots and succulent leaves\nn11823043\tany of various plants of the genus Amaranthus having dense plumes of green or red flowers; often cultivated for food\nn11823305\tseed of amaranth plants used as a native cereal in Central and South America\nn11823436\tbushy plant of western United States\nn11823756\ttall showy tropical American annual having hairy stems and long spikes of usually red flowers above leaves deeply flushed with purple; seeds often used as cereal\nn11824146\tleaves sometimes used as potherbs; seeds used as cereal; southern United States to Central America; India and China\nn11824344\terect annual of tropical central Asia and Africa having a pair of divergent spines at most leaf nodes\nn11824747\tprolific South American aquatic weed having grasslike leaves and short spikes of white flowers; clogs waterways with dense floating masses\nn11825351\tgarden annual with featherlike spikes of red or yellow flowers\nn11825749\tany of various plants of the genus Froelichia found in sandy soils and on rocky slopes in warmer regions of America; grown for their spikes of woolly white flowers\nn11826198\ttropical American herb having rose to red or purple flowers that can be dried without losing color\nn11826569\tany plant of the genus Iresine having colored foliage\nn11827541\tlow-growing strong-smelling coastal shrub of warm parts of the New World having unisexual flowers in conelike spikes and thick succulent leaves\nn11828577\tcommon weedy European plant introduced into North America; often used as a potherb\nn11828973\tEuropean plant naturalized in North America; often collected from the wild as a potherb\nn11829205\tEurasian aromatic oak-leaved goosefoot with many yellow-green flowers; naturalized North America\nn11829672\tannual European plant with spikes of greenish flowers and leaves that are white and hairy on the underside; common as a weed in North America\nn11829922\therb considered fatal to swine\nn11830045\tEuropean annual with coarsely dentate leaves; widespread in United States and southern Canada\nn11830252\tcommon Eurasian weed; naturalized in United States\nn11830400\tEuropean goosefoot with strong-scented foliage; adventive in eastern North America\nn11830714\tany of various herbaceous plants of the genus Atriplex that thrive in deserts and salt marshes\nn11830906\tany of various shrubby plants of the genus Atriplex that thrive in dry alkaline soil\nn11831100\tAsiatic plant resembling spinach often used as a potherb; naturalized in Europe and North America\nn11831297\thandsome low saltbush of arid southwestern United States and Mexico having blue-green prickly-edged leaves often used for Christmas decoration\nn11831521\tspiny shrub with silvery-scurfy foliage of alkaline plains of southwestern United States and Mexico\nn11832214\tbiennial Eurasian plant usually having a swollen edible root; widely cultivated as a food crop\nn11832480\tbeet having a massively swollen red root; widely grown for human consumption\nn11832671\tbeet lacking swollen root; grown as a vegetable for its edible leaves and stalks\nn11832899\tbeet with a large yellowish root; grown chiefly as cattle feed\nn11833373\tbushy annual weed of central North America having greenish flowers and winged seeds\nn11833749\ta coarse annual herb introduced into North America from Siberia; dangerous to sheep and cattle on western rangelands because of its high oxalate content\nn11834272\tfleshy maritime plant having fleshy stems with rudimentary scalelike leaves and small spikes of minute flowers; formerly used in making glass\nn11834654\tbushy plant of Old World salt marshes and sea beaches having prickly leaves; burned to produce a crude soda ash\nn11834890\tprickly bushy Eurasian plant; a troublesome weed in central and western United States\nn11835251\tlow hardy much-branched spiny shrub common in alkaline soils of western America\nn11836327\tviscid branched perennial of the southwestern United States and northern Mexico having tuberous roots and deep red flowers\nn11836722\tany of various plants of the genus Abronia of western North America and Mexico having flowers resembling verbena\nn11837204\ttaller than Abronia elliptica and having night-blooming flowers\nn11837351\tplant having hemispherical heads of yellow trumpet-shaped flowers; found in coastal dunes from California to British Columbia\nn11837562\tplant having hemispherical heads of wine-red flowers; found in coastal dunes from California to Mexico\nn11837743\tprostrate herb having heads of deep pink to white flowers; found in coastal dunes from British Columbia to Baja California\nn11837970\tsoft-haired sticky plant with heads of bright pink trumpet-shaped flowers; found in sandy desert soil; after ample rains may carpet miles of desert with pink from the southwestern United States to northern Mexico\nn11838413\ttrailing plant having crowded clusters of 3 brilliant deep pink flowers resembling a single flower blooming near the ground; found in dry gravelly or sandy soil; southwestern United States and Mexico\nn11838916\tany of several South American ornamental woody vines of the genus Bougainvillea having brilliant red or purple flower bracts; widely grown in warm regions\nn11839460\ta plant of the genus Mirabilis\nn11839568\tany of several plants of the genus Mirabilis having flowers that open in late afternoon\nn11839823\tcommon garden plant of North America having fragrant red or purple or yellow or white flowers that open in late afternoon\nn11840067\tCalifornia four o'clock with purple-red flowers\nn11840246\tleafy wildflower having fragrant slender white or pale pink trumpet-shaped flowers; southwestern United States and northern Mexico\nn11840476\twildflower having vibrant deep pink tubular evening-blooming flowers; found in sandy and desert areas from southern California to southern Colorado and into Mexico\nn11840764\tleafy wildflower with lavender-pink flowers that open in the evening and remain through cool part of the next day; found in open woods or brush in mountains of southern Colorado to Arizona and into Mexico\nn11841247\tsmall spiny West Indian tree\nn11843441\tcommonly cultivated tropical American cactus having slender creeping stems and very large showy crimson flowers that bloom for several days\nn11844371\textremely large treelike cactus of desert regions of southwestern United States having a thick columnar sparsely branched trunk bearing white flowers and edible red pulpy fruit\nn11844892\tany of several cacti of the genus Cereus\nn11845557\tany cactus of the genus Echinocactus; strongly ribbed and very spiny; southwestern United States to Brazil\nn11845793\tcactus of the genus Echinocactus having stout sharp spines\nn11845913\tlarge cactus of east central Mexico having golden to pale yellow flowers and spines\nn11846312\tcactus of the genus Echinocereus\nn11846425\ta stout cylindrical cactus of the southwest United States and adjacent Mexico\nn11846765\tany cactus of the genus Epiphyllum having flattened jointed irregularly branching stems and showy tubular flowers\nn11847169\ta cactus of the genus Ferocactus: unbranched barrel-shaped cactus having deep ribs with numerous spines and usually large funnel-shaped flowers followed by dry fruits\nn11848479\tany of several cacti of the genus Hylocereus\nn11848867\ttall treelike Mexican cactus with edible red fruit\nn11849271\ta small spineless globe-shaped cactus; source of mescal buttons\nn11849467\tthe button-shaped top of the mescal cactus; a source of psilocybin\nn11849871\tany cactus of the genus Mammillaria\nn11849983\ta low tuberculate cactus with white feathery spines; northeastern Mexico\nn11850521\tarborescent cactus of western Mexico bearing a small oblong edible berrylike fruit\nn11850918\tsmall clustering cactus of southwestern United States; a threatened species\nn11851258\tany of several cacti of the genus Nopalea resembling prickly pears\nn11851578\tcacti having spiny flat joints and oval fruit that is edible in some species; often used as food for stock\nn11851839\tarborescent cacti having very spiny cylindrical stem segments; southwestern United States and Mexico\nn11852028\tcactus having yellow flowers and purple fruits\nn11852148\ttropical American prickly pear of Jamaica\nn11852531\tWest Indian woody climber with spiny stems and numerous fragrant white flowers in panicles followed by small yellow to orange fruits\nn11853079\ta plant of the genus Rhipsalis\nn11853356\tepiphytic cactus of Brazilian ancestry widely cultivated as a houseplant having jointed flat segments and usually rose-purple flowers that bloom in winter\nn11853813\tany of several night-blooming cacti of the genus Selenicereus\nn11854479\tSouth American jointed cactus with usually red flowers; often cultivated as a houseplant; sometimes classified as genus Schlumbergera\nn11855274\tperennial of the genus Phytolacca\nn11855435\tpokeweed of southeastern Asia and China\nn11855553\ttall coarse perennial American herb having small white flowers followed by blackish-red berries on long drooping racemes; young fleshy stems are edible; berries and root are poisonous\nn11855842\tfast-growing herbaceous evergreen tree of South America having a broad trunk with high water content and dark green oval leaves\nn11856573\tbushy houseplant having white to pale pink flowers followed by racemes of scarlet berries; tropical Americas\nn11857696\ta plant of the genus Portulaca having pink or red or purple or white ephemeral flowers\nn11857875\twidely cultivated in many varieties for its fleshy moss-like foliage and profusion of brightly colored flowers\nn11858077\tweedy trailing mat-forming herb with bright yellow flowers cultivated for its edible mildly acid leaves eaten raw or cooked especially in Indian and Greek and Middle Eastern cuisine; cosmopolitan\nn11858703\ta plant of the genus Calandrinia\nn11858814\tsucculent carpet-forming plant having small brilliant reddish-pink flowers; southwestern United States\nn11859275\tsimilar to Claytonia virginica but having usually pink flowers; eastern North America\nn11859472\tsmall slender plant having one pair of succulent leaves at the middle of the stem and a loose raceme of white or pink or rose bowl-shaped flowers and an edible corm\nn11859737\tsmall cormous perennial grown for its low rosette of succulent foliage and racemes of pink-tinged white flowers; eastern North America\nn11860208\tevergreen perennial having a dense basal rosette of long spatula-shaped leaves and panicles of pink or white-and-red-striped or pink-purple flowers; found on cliffs and in rock crevices in mountains of southwestern Oregon and northern California\nn11860555\tshowy succulent ground-hugging plant of Rocky Mountains regions having deep to pale pink flowers and fleshy farinaceous roots; the Montana state flower\nn11861238\tsucculent plant with mostly basal leaves; stem bears 1 pair of broadly ovate or heart-shaped leaves and a loose raceme of 3-10 white flowers; western North America\nn11861487\tsmall Indian lettuce of northern regions\nn11861641\ta floating or creeping Indian lettuce having terminal racemes of pale rose flowers; wet areas at high elevations of western North America\nn11861853\tsucculent herb sometimes grown as a salad or pot herb; grows on dunes and waste ground of Pacific coast of North America\nn11862835\tplant with fleshy roots and erect stems with narrow succulent leaves and one reddish-orange flower in each upper leaf axil; southwestern United States; Indians once cooked the fleshy roots\nn11863467\tlow plant with crowded narrow succulent leaves and fairly large deep pink axillary flowers that seem to sit on the ground; southwestern United States\nn11863877\terect plant with tuberous roots and terminal panicles of red to yellow flowers; southwestern North America to Central America; widely introduced elsewhere\nn11865071\tany of numerous plants of the genus Capparis\nn11865276\tsmall Australian tree bearing edible fruit resembling the pomegranate\nn11865429\tshrub of southern Florida to West Indies\nn11865574\tshrub or small tree of southern Florida to Central and South America\nn11865874\tprostrate spiny shrub of the Mediterranean region cultivated for its greenish flower buds which are pickled\nn11866248\tany of various often strong-smelling plants of the genus Cleome having showy spider-shaped flowers\nn11866706\tplant of western North America having trifoliate leaves and white or pink spider-shaped flowers; sometimes used as an ornamental\nn11867311\tstrong-scented herb common in southern United States covered with intermixed gland and hairs\nn11868814\tany of various plants of the family Cruciferae\nn11869351\tany of various plants of the family Cruciferae with edible leaves that have a pungent taste\nn11869689\tany of several water-loving cresses\nn11870044\tany Old World herb of the genus Aethionema; native of sunny limestone habitats\nn11870418\tEuropean herb that smells like garlic\nn11870747\tany garden plant of the genus Alyssum having clusters of small yellow or white flowers\nn11871059\tsmall grey Asiatic desert plant bearing minute white flowers that rolls up when dry and expands when moist\nn11871496\ta small invasive self-pollinating weed with small white flowers; much studied by plant geneticists; the first higher plant whose complete genome sequence was described\nn11871748\ta small noninvasive cross-pollinating plant with white flowers; closely related to Arabidopsis thaliana\nn11872146\tany of several rock-loving cresses of the genus Arabis\nn11872324\tNorth American rock cress having very long curved pods\nn11872658\tor genus Arabis: erect cress widely distributed throughout Europe\nn11873182\tthe root of the horseradish plant; it is grated or ground and used for seasoning\nn11873612\tany plant of the genus Barbarea: yellow-flowered Eurasian cresses; widely cultivated for winter salad\nn11874081\tnoxious cress with yellow flowers; sometimes placed in genus Sisymbrium\nn11874423\ttall European annual with downy grey-green foliage and dense heads of small white flowers followed by hairy pods; naturalized in North America; sometimes a troublesome weed\nn11874878\tplant of southeastern Europe having yellow flowers like those of mustard and pods with open valves resembling bucklers\nn11875523\twild original of cultivated cabbages; common in western coastal Europe\nn11875691\tany of various cultivars of the genus Brassica oleracea grown for their edible leaves or flowers\nn11875938\tany of various cultivated cabbage plants having a short thick stalk and large compact head of edible usually green leaves\nn11876204\tcabbage plant with a compact head of crinkled leaves\nn11876432\tplant grown for its stout stalks of edible small green heads resembling diminutive cabbages\nn11876634\ta plant having a large edible head of crowded white flower buds\nn11876803\tplant with dense clusters of tight green flower buds\nn11877193\tvariety of kale having smooth leaves\nn11877283\tplant cultivated for its enlarged fleshy turnip-shaped edible stem\nn11877473\tany of several widely cultivated plants having edible roots\nn11877646\twidely cultivated plant having a large fleshy edible white or yellow root\nn11877860\ta cruciferous plant with a thick bulbous edible yellow root\nn11878101\tplant grown for its pungent edible leafy shoots\nn11878283\tany of several cruciferous plants of the genus Brassica\nn11878633\tAsiatic mustard used as a potherb\nn11879054\tAsiatic plant grown for its cluster of edible white stalks with dark green leaves\nn11879722\tEurasian plant cultivated for its seed and as a forage crop\nn11879895\tseed of rape plants; source of an edible oil\nn11881189\twhite-flowered annual European herb bearing triangular notched pods; nearly cosmopolitan as an introduced weed\nn11882074\ta bitter cress of Europe and America\nn11882237\tEuropean bittercress having a knotted white rootstock\nn11882426\tNorth American herb with pungent scaly or toothed roots\nn11882636\tmat-forming perennial found in cold springs of the eastern United States\nn11882821\tsmall white-flowered cress common in wet places in eastern North America\nn11882972\tsmall perennial herb of cooler regions of North America with racemose purple flowers\nn11883328\tperennial of southern Europe having clusters of fragrant flowers of all colors especially yellow and orange; often naturalized on old walls or cliffs; sometimes placed in genus Erysimum\nn11883628\tany of several western American plants of the genus Cheiranthus having large yellow flowers\nn11883945\ta widely distributed Arctic cress reputed to have value in treatment or prevention of scurvy; a concentrated source of vitamin C\nn11884384\tperennial of coastal sands and shingles of northern Europe and Baltic and Black Seas having racemes of small white flowers and large fleshy blue-green leaves often used as potherbs\nn11884967\tNorth American herb with bitter-tasting pinnate leaves resembling those of tansy\nn11885856\tany of numerous low-growing cushion-forming plants of the genus Draba having rosette-forming leaves and terminal racemes of small flowers with scapose or leafy stems; fruit is a dehiscent oblong or linear silique\nn11887119\tany of numerous plants of the genus Erysimum having fragrant yellow or orange or brownish flowers\nn11887310\tany of several North American plants of the genus Erysimum having large yellow flowers\nn11887476\tshowy erect biennial or short-lived perennial cultivated for its terminal racemes of orange-yellow flowers; sometimes placed in genus Cheiranthus\nn11887750\tbiennial or short-lived perennial prairie rocket having orange-yellow flowers; western North America to Minnesota and Kansas; sometimes placed in genus Cheiranthus\nn11888061\tslender yellow-flowered European mustard often troublesome as a weed; formerly used as an anthelmintic\nn11888424\tany of various South African herbs and subshrubs cultivated for long showy racemes of bright blue flowers with white eyes\nn11888800\tlong cultivated herb having flowers whose scent is more pronounced in the evening; naturalized throughout Europe to Siberia and into North America\nn11889205\tperennial stellate and hairy herb with small yellow flowers of mountains of southern Europe; sometimes placed in genus Sisymbrium\nn11889619\tany of various flowering plants of the genus Iberis cultivated for their showy clusters of white to red or purple flowers; native to Mediterranean region\nn11890022\tany of several herbs of the genus Isatis\nn11890150\tEuropean biennial formerly grown for the blue coloring matter yielded by its leaves\nn11890884\tany of several hairy North American herbs having yellow racemose flowers and inflated pods\nn11891175\tperennial European plant having clusters of small fragrant usually white flowers; widely grown in gardens\nn11892029\tany of various ornamental flowering plants of the genus Malcolmia\nn11892181\terect branching herb cultivated for its loose racemes of fragrant white or pink or red or lilac flowers; native to sands and sea cliffs of southwestern Greece and southern Albania\nn11892637\tany of several Old World plants cultivated for their brightly colored flowers\nn11892817\tEuropean plant with racemes of sweet-scented flowers; widely cultivated as an ornamental\nn11893640\tany of several plants of the genus Physaria having racemose yellow flowers and inflated pods\nn11893916\tsmall tufted perennial herb of mountains of central and southern Europe having very small flowers of usually leafless stems; sometimes placed in genus Lepidium\nn11894327\ta cruciferous plant of the genus Raphanus having a pungent edible root\nn11894558\tEurasian weed having yellow or mauve or white flowers and podlike fruits\nn11894770\tEurasian plant widely cultivated for its edible pungent root usually eaten raw\nn11895092\tradish of Japan with a long hard durable root eaten raw or cooked\nn11895472\tannual or biennial cress growing in damp places sometimes used in salads or as a potherb; troublesome weed in some localities\nn11895714\tperennial herb found on streams and riversides throughout Europe except extreme north and Mediterranean; sometimes placed in genus Nasturtium\nn11896141\ta dainty South American annual having deeply pinnatifid leaves and racemes of fringed almond-scented purple-white flowers\nn11896722\tweedy Eurasian plant often a pest in grain fields\nn11897116\tstiffly branching Old World annual with pale yellow flowers; widely naturalized in North America; formerly used medicinally\nn11897466\tperennial of southwestern United States having leathery blue-green pinnatifid leaves and thick plumelike spikes of yellow flowers; sometimes placed in genus Cleome\nn11898639\tany of several plants of the genus Thlaspi\nn11898775\tfoetid Eurasian weed having round flat pods; naturalized throughout North America\nn11899223\tannual herb having pinnatifid basal leaves and slender racemes of small white flowers followed by one-seeded winged silicles\nn11899762\tannual or perennial herbs with inflated seed pods; some placed in genus Lesquerella\nn11899921\ta Japanese plant of the family Cruciferae with a thick green root\nn11900569\tannual or biennial or perennial herbs having showy flowers\nn11901294\tOld World alpine poppy with white or yellow to orange flowers\nn11901452\tshowy annual of California with red flowers\nn11901597\tannual Old World poppy with orange-red flowers and bristly fruit\nn11901759\tsubarctic perennial poppy of both hemispheres having fragrant white or yellow to orange or peach flowers\nn11901977\tcommonly cultivated Asiatic perennial poppy having stiff heavily haired leaves and bright scarlet or pink to orange flowers\nn11902200\tannual European poppy common in grain fields and often cultivated\nn11902389\tsouthwestern Asian herb with greyish leaves and white or reddish flowers; source of opium\nn11902709\tany plant of the genus Argemone having large white or yellow flowers and prickly leaves and stems and pods; chiefly of tropical America\nn11902982\tannual herb with prickly stems and large yellow flowers; southern United States to West Indies and Mexico\nn11903333\tsmall Central American tree having loose racemes of purple-tinted green flowers\nn11903671\tperennial herb with branched woody stock and bright yellow flowers\nn11904109\ta plant of the genus Corydalis with beautiful compound foliage and spurred tubular flowers\nn11904274\tannual vine with decompound leaves and racemes of yellow and pink flowers\nn11905392\tof Pacific coast of North America; widely cultivated for its yellow to red flowers\nn11905749\tyellow-flowered Eurasian glaucous herb naturalized in along sandy shores in eastern North America\nn11906127\tnative of Mexican highlands grown for its glossy clear yellow flowers and blue-grey finely dissected foliage\nn11906514\therb of China and Japan widely cultivated for its plumelike panicles of creamy white flowers\nn11906917\tChinese perennial having mauve-pink to bright sky blue flowers in drooping cymes\nn11907100\twidely cultivated west European plant with showy pale yellow flowers\nn11907405\tCalifornia plant with small pale yellow flowers\nn11907689\ttall branching subshrub of California and Mexico often cultivated for its silvery-blue foliage and large fragrant white flowers\nn11908549\tCalifornia wild poppy with bright red flowers\nn11908846\tperennial herb native to woodland of the eastern United States having yellow flowers\nn11909864\tvine with feathery leaves and white or pinkish flowers; sometimes placed in genus Fumaria\nn11910271\tgarden plant having deep-pink drooping heart-shaped flowers\nn11910460\tdelicate spring-flowering plant of the eastern United States having white flowers with double spurs\nn11910666\tAmerican plant with cream-colored flowers and tuberous roots resembling kernels of corn\nn11915214\tconsidered the most highly evolved dicotyledonous plants, characterized by florets arranged in dense heads that resemble single flowers\nn11915658\tany of several plants having leaves so arranged on the axis as to indicate the cardinal points of the compass\nn11915899\tany of various plants of various genera of the family Compositae having flowers that can be dried without loss of form or color\nn11916467\tany of several plants of the genus Achillea native to Europe and having small white flowers in flat-topped flower heads\nn11916696\tubiquitous strong-scented mat-forming Eurasian herb of wasteland, hedgerow or pasture having narrow serrate leaves and small usually white florets; widely naturalized in North America\nn11917407\tflower of southwestern Australia having bright pink daisylike papery flowers; grown for drying\nn11917835\tAmerican herb having flat-topped clusters of small white flower heads; reputedly a cause of trembles and milk sickness; sometimes placed in genus Eupatorium\nn11918286\tany plant of the genus Ageratum having opposite leaves and small heads of blue or white flowers\nn11918473\tsmall tender herb grown for its fluffy brushlike blue to lavender blooms\nn11918808\tAsian plant widely grown for its sweetly fragrant pink flowers; sometimes placed in genus Centaurea\nn11919447\tany of numerous chiefly North American weedy plants constituting the genus Ambrosia that produce highly allergenic pollen responsible for much hay fever and asthma\nn11919761\tannual weed with finely divided foliage and spikes of green flowers; common in North America; introduced elsewhere accidentally\nn11919975\ta coarse annual with some leaves deeply and palmately three-cleft or five-cleft\nn11920133\tcoarse perennial ragweed with creeping roots of dry barren lands of southwestern United States and Mexico\nn11920498\tany plant of the genus Ammobium having yellow flowers and silvery foliage\nn11920663\tAustralian plant widely cultivated for its beautiful silvery-white blooms with bright yellow centers on long winged stems\nn11920998\ta small Mediterranean plant containing a volatile oil once used to relieve toothache\nn11921395\tan American everlasting having foliage with soft wooly hairs and corymbose heads with pearly white bracts\nn11921792\tany plant of the genus Andryala having milky sap and heads of bright yellow flowers\nn11922661\ta variety of pussytoes\nn11922755\ta variety of pussytoes\nn11922839\ta variety of pussytoes\nn11922926\ta variety of cat's foot\nn11923174\twidespread rank-smelling weed having white-rayed flower heads with yellow discs\nn11923397\tEurasian perennial herb with hairy divided leaves and yellow flowers; naturalized in North America\nn11923637\tEuropean white-flowered weed naturalized in North America\nn11924014\ttiny grey woolly tufted annual with small golden-yellow flower heads; southeastern California to northwestern Arizona and southwestern Utah; sometimes placed in genus Eriophyllum\nn11924445\tany of several erect biennial herbs of temperate Eurasia having stout taproots and producing burs\nn11924849\tburdock having heart-shaped leaves found in open woodland, hedgerows and rough grassland of Europe (except extreme N) and Asia Minor; sometimes cultivated for medicinal and culinary use\nn11925303\tany of several plants of the genus Arctotis having daisylike flowers\nn11925450\tbushy perennial of South Africa with white or violet flowers; in its native region often clothes entire valley sides in a sheet of color\nn11925898\tperennial subshrub of the Canary Islands having usually pale yellow daisylike flowers; often included in genus Chrysanthemum\nn11926365\tlow-growing plant found only in volcanic craters on Hawaii having rosettes of narrow pointed silver-green leaves and clusters of profuse red-purple flowers on a tall stem\nn11926833\tany of various rhizomatous usually perennial plants of the genus Arnica\nn11926976\twildflower with heart-shaped leaves and broad yellow flower heads; of alpine areas west of the Rockies from Alaska to southern California\nn11927215\therb of pasture and open woodland throughout most of Europe and western Asia having orange-yellow daisylike flower heads that when dried are used as a stimulant and to treat bruises and swellings\nn11927740\tsmall European herb with small yellow flowers\nn11928352\tany of various composite shrubs or herbs of the genus Artemisia having aromatic green or greyish foliage\nn11928858\tany of several weedy composite plants of the genus Artemisia\nn11929743\twormwood of southeastern Europe to Iran\nn11930038\tEuropean wormwood similar to common wormwood in its properties\nn11930203\taromatic perennial of southeastern Russia\nn11930353\tsilver-haired shrub of central and southern United States and Mexico; a troublesome weed on rangelands\nn11930571\tsilky-leaved aromatic perennial of dry northern parts of the northern hemisphere; has tawny florets\nn11930788\tperennial cottony-white herb of southwestern United States\nn11930994\tEuropean wormwood; minor source of absinthe\nn11931135\ta perennial that is valuable as sheep forage in the United States\nn11931540\tEuropean tufted aromatic perennial herb having hairy red or purple stems and dark green leaves downy white below and red-brown florets\nn11931918\tany of various chiefly fall-blooming herbs of the genus Aster with showy daisylike flowers\nn11932745\tany of several asters of eastern North America usually growing in woods\nn11932927\tNorth American perennial with apparently whorled leaves and showy white purple-tinged flowers\nn11933099\tcommon North American perennial with heathlike foliage and small white flower heads\nn11933257\tperennial wood aster of eastern North America\nn11933387\trhizomatous perennial wood aster of eastern North America with white flowers\nn11933546\tstiff perennial of the eastern United States having small linear leaves and numerous tiny white flower heads\nn11933728\tcommon much-branched North American perennial with heathlike foliage and small starry white flowers\nn11933903\tperennial of western North America having white flowers\nn11934041\twiry tufted perennial of the eastern United States with stiff erect rough stems, linear leaves and large violet flowers\nn11934239\tearly-flowering perennial of southern and southeastern Europe with flower heads resembling those of goldenrod\nn11934463\ttufted perennial wood aster of North America; naturalized in Europe\nn11934616\tcommon perennial of eastern North America having showy purplish flowers; a parent of the Michaelmas daisy\nn11934807\tNorth American perennial herb having small autumn-blooming purple or pink or white flowers; widely naturalized in Europe\nn11935027\ttufted rigid North American perennial with loose clusters of white flowers\nn11935187\tperennial of southeastern United States having usually blue flowers\nn11935330\ta common European aster that grows in salt marshes\nn11935469\tviolet-flowered perennial aster of central United States having solitary heads\nn11935627\ta variety of aster\nn11935715\ta variety of aster\nn11935794\ta variety of aster\nn11935877\ta variety of aster\nn11935953\ta variety of aster\nn11936027\ta variety of aster\nn11936113\ta variety of aster\nn11936199\ta variety of aster\nn11936287\ta variety of aster\nn11936369\ta variety of aster\nn11936448\ta variety of aster\nn11936539\ta variety of aster\nn11936624\ta variety of aster\nn11936707\ta variety of aster\nn11936782\ta variety of aster\nn11936864\ta variety of aster\nn11936946\ta variety of aster\nn11937023\ta variety of aster\nn11937102\ta variety of aster\nn11937195\ta variety of aster\nn11937278\ta variety of aster\nn11937360\ta variety of aster\nn11937446\ta variety of aster\nn11937692\tlow spreading tropical American shrub with long slender leaves used to make a mildly stimulating drink resembling tea; sometimes placed in genus Eupatorium\nn11938556\tCalifornia shrub with slender leafy shoots that are important browse for mule deer\nn11939180\ta plant of the genus Balsamorhiza having downy leaves in a basal rosette and yellow flowers and long balsam-scented taproots\nn11939491\tany of numerous composite plants having flower heads with well-developed ray flowers usually arranged in a single whorl\nn11939699\tlow-growing Eurasian plant with yellow central disc flowers and pinkish-white outer ray flowers\nn11940006\tany of several plants of the genus Bidens having yellow flowers and prickly fruits that cling to fur and clothing\nn11940349\tcommon bur marigold of the eastern United States\nn11940599\tNorth American bur marigold with large flowers\nn11940750\tbur marigold of temperate Eurasia\nn11941094\ta variety of knapweed\nn11941478\tany of various autumn-flowering perennials having white or pink to purple flowers that resemble asters; wild in moist soils from New Jersey to Florida and Texas\nn11941924\twestern Australian annual much cultivated for its flower heads with white or bluish to violet or variegated rays\nn11942659\thairy Eurasian perennial having deep yellow daisies on lax willowy stems; found in the wild in open woodland and on rocky slopes\nn11943133\tany of various plants of the genus Cacalia having leaves resembling those of plantain\nn11943407\tany of numerous chiefly annual herbs of the genus Calendula widely cultivated for their yellow or orange flowers; often used for medicinal and culinary purposes\nn11943660\tthe common European annual marigold\nn11943992\tvalued for their beautiful flowers in a wide range of clear bright colors; grown primarily for cutting\nn11944196\tany of numerous plants of the family Compositae and especially of the genera Carduus and Cirsium and Onopordum having prickly-edged leaves\nn11944751\tEuropean biennial introduced in North America having flower heads in crowded clusters at ends of branches\nn11944954\tEurasian perennial naturalized in eastern North America having very spiny white cottony foliage and nodding musky crimson flower heads; valuable source of nectar\nn11945367\ta thistle of the genus Carlina\nn11945514\tstemless perennial having large flowers with white or purple-brown florets nestled in a rosette of long spiny leaves hairy beneath; of alpine regions of southern and eastern Europe\nn11945783\tEurasian thistle growing in sand dunes and dry chalky soils\nn11946051\tthistlelike Eurasian plant widely grown for its red or orange flower heads and seeds that yield a valuable oil\nn11946313\tseed of the safflower\nn11946727\tany of several plants of the genus Catananche having long-stalked heads of blue or yellow flowers\nn11946918\tsouth European plant having dark-eyed flowers with flat blue rays\nn11947251\tany plant of the genus Centaurea\nn11947629\ta plant having leaves and stems covered with down that resembles dust\nn11947802\tan annual Eurasian plant cultivated in North America having showy heads of blue or purple or pink or white flowers\nn11948044\tMediterranean annual or biennial herb having pinkish to purple flowers surrounded by spine-tipped scales; naturalized in America\nn11948264\tany of various plants of the genus Centaurea having purple thistlelike flowers\nn11948469\tperennial of mountains of Iran and Iraq; cultivated for its fragrant rose-pink flowers\nn11948864\ttall European perennial having purple flower heads\nn11949015\tEuropean weed having a winged stem and hairy leaves; adventive in the eastern United States\nn11949402\tEurasian plant with apple-scented foliage and white-rayed flowers and feathery leaves used medicinally; in some classification systems placed in genus Anthemis\nn11949857\tany of several United States plants having long stalks of funnel-shaped white or yellow flowers\nn11950345\tany of numerous perennial Old World herbs having showy brightly colored flower heads of the genera Chrysanthemum, Argyranthemum, Dendranthema, Tanacetum; widely cultivated\nn11950686\tEuropean herb with bright yellow flowers; a common weed in grain fields\nn11950877\tshrubby annual of the Mediterranean region with yellowish-white flowers\nn11951052\tgrown for its succulent edible leaves used in Asian cooking\nn11951511\tany of several shrubby herbs or subshrubs of the genus Chrysopsis having bright golden-yellow flower heads that resemble asters; throughout much of United States and into Canada\nn11951820\tperennial golden aster of southeastern United States\nn11952346\tany of various much-branched yellow-flowered shrubs of the genus Chrysothamnus; western North America\nn11952541\tpleasantly aromatic shrub having erect slender flexible hairy branches and dense clusters of small yellow flowers covering vast areas of western alkali plains and affording a retreat for jackrabbits; source of a yellow dye used by the Navajo\nn11953038\tperennial Old World herb having rayed flower heads with blue florets cultivated for its root and its heads of crisp edible leaves used in salads\nn11953339\twidely cultivated herb with leaves valued as salad green; either curly serrated leaves or broad flat ones that are usually blanched\nn11953610\tthe dried root of the chicory plant: used as a coffee substitute\nn11953884\tany of numerous biennial to perennial herbs with handsome purple or yellow or occasionally white flower heads\nn11954161\tEuropean thistle naturalized in United States and Canada where it is a pernicious weed\nn11954345\tstout North American thistle with purplish-pink flower heads\nn11954484\tthistle of western North America having white woolly leaves\nn11954642\twoolly thistle of western and central Europe and Balkan Peninsula\nn11954798\tperennial stoloniferous thistle of northern Europe with lanceolate basal leaves and usually solitary heads of reddish-purple flowers\nn11955040\tof central and southwestern Europe\nn11955153\tEuropean thistle with rather large heads and prickly leaves; extensively naturalized as a weed in the United States\nn11955532\tannual of Mediterranean to Portugal having hairy stems and minutely spiny-toothed leaves and large heads of yellow flowers\nn11955896\trhizomatous plant of central and southeastern United States and West Indies having large showy heads of clear blue flowers; sometimes placed in genus Eupatorium\nn11956348\tcommon North American weed with linear leaves and small discoid heads of yellowish flowers; widely naturalized throughout temperate regions; sometimes placed in genus Erigeron\nn11956850\tany of numerous plants of the genus Coreopsis having a profusion of showy usually yellow daisylike flowers over long periods; North and South America\nn11957317\tlarge treelike shrub having feathery leaves and clusters of large yellow flower heads; coastal southern California\nn11957514\tstout herb with flowers one to a stalk; ornamental developed from a Mexican wildflower\nn11957678\tNorth American annual widely cultivated for its yellow flowers with purple-red to brownish centers; in some classifications placed in a subgenus Calliopsis\nn11958080\tany of various mostly Mexican herbs of the genus Cosmos having radiate heads of variously colored flowers and pinnate leaves; popular fall-blooming annuals\nn11958499\tSouth African herb with golden-yellow globose flower heads; naturalized in moist areas along coast of California; cultivated as an ornamental\nn11958888\tany of various plants of the genus Craspedia grown for their downy foliage and globose heads of golden flowers; Australia and New Zealand\nn11959259\tany of various plants of the genus Crepis having loose heads of yellow flowers on top of a long branched leafy stem; northern hemisphere\nn11959632\tMediterranean thistlelike plant widely cultivated for its large edible flower head\nn11959862\tsouthern European plant having spiny leaves and purple flowers cultivated for its edible leafstalks and roots\nn11960245\tany of several plants of or developed from the species Dahlia pinnata having tuberous roots and showy rayed variously colored flower heads; native to the mountains of Mexico and Central America and Colombia\nn11960673\tSouth African succulent evergreen twining climber with yellow flowers grown primarily as a houseplant for its foliage; sometimes placed in genus Senecio\nn11961100\tof China\nn11961446\tany of several South African plants grown for the profusion of usually yellow daisylike flowers and mounds of aromatic foliage\nn11961871\tany of several herbs of the genus Doronicum having alternate often clasping stem leaves cultivated for their long stalks of yellow flower heads\nn11962272\tany of various perennials of the eastern United States having thick rough leaves and long-stalked showy flowers with drooping rays and a conelike center\nn11962667\tany of various plants of the genus Echinops having prickly leaves and dense globose heads of bluish flowers\nn11962994\tany plant of the genus Elephantopus having heads of blue or purple flowers; America\nn11963572\ttropical Asiatic annual cultivated for its small tassel-shaped heads of scarlet flowers\nn11963932\tfragrant rounded shrub of southwestern United States and adjacent Mexico having brittle stems and small crowded blue-green leaves and yellow flowers; produces a resin used in incense and varnish and in folk medicine\nn11964446\therb having a basal cluster of grey-green leaves and leafless stalks each with a solitary broad yellow flower head; desert areas Idaho to Arizona\nn11964848\tcommon erect hairy perennial of plains and prairies of southern and central United States having flowers that resemble sunflowers\nn11965218\tan American weedy plant with small white or greenish flowers\nn11965627\tany of several North American plants of the genus Erigeron having daisylike flowers; formerly believed to repel fleas\nn11965962\twidespread weed with pale purple-blue flowers\nn11966083\twidely naturalized white-flowered North American herb\nn11966215\tmat-forming herb of Turkestan with nearly double orange-yellow flowers\nn11966385\twell-branched plant with hairy leaves and stems each with a solitary flower head with narrow white or pink or lavender rays; western North America\nn11966617\tslightly succulent perennial with basal leaves and hairy sticky stems each bearing a solitary flower head with narrow pink or lavender rays; coastal bluffs Oregon to southern California\nn11966896\tespecially pretty plant having a delicate fringe of threadlike rays around flower heads having very slender white or pink rays; United States and Canada\nn11967142\tcommon perennial of eastern North America having flowers with usually violet-purple rays\nn11967315\tplant having branching leafy stems each branch with an especially showy solitary flower head with many narrow pink or lavender or white rays; northwestern United States mountains\nn11967744\tany plant of the genus Eriophyllum\nn11967878\tgreyish woolly leafy perennial with branched stems ending in leafless stalks bearing golden-yellow flower heads; dry areas western North America\nn11968519\tweedy plant of southeastern United States having divided leaves and long clusters of greenish flowers\nn11968704\tNorth American herb having whorled leaves and terminal clusters of small pinkish or purple flower heads\nn11968931\tperennial herb of southeastern United States having white-rayed flower heads; formerly used as in folk medicine\nn11969166\tNorth American herb having whorled leaves and terminal clusters of flowers spotted with purple\nn11969607\thairy South African or Australian subshrub that has daisylike flowers with blue rays\nn11969806\tsoftly hairy South African herb having flowers with bright blue rays\nn11970101\tany plant of the genus Filago having capitate clusters of small woolly flower heads\nn11970298\t(literally an undutiful herb) a variety of cotton rose\nn11970586\tany plant of western America of the genus Gaillardia having hairy leaves and long-stalked flowers in hot vibrant colors from golden yellow and copper to rich burgundy\nn11971248\tany plant of the genus Gazania valued for their showy daisy flowers\nn11971406\tdecumbent South African perennial with short densely leafy stems and orange flower rays with black eyespots at base\nn11971783\tAfrican or Asiatic herbs with daisylike flowers\nn11971927\twidely cultivated South African perennial having flower heads with orange to flame-colored rays\nn11972291\tslender hairy plant with few leaves and golden-yellow flower heads; sandy desert areas of southeastern California to southwestern Utah and western Arizona and northwestern Mexico\nn11972759\tany of numerous plants of the genus Gnaphalium having flowers that can be dried without loss of form or color\nn11972959\tweedy perennial of north temperate regions having woolly foliage and dirty white flowers in a leafy spike\nn11973341\tany of various western American plants of the genus Grindelia having resinous leaves and stems formerly used medicinally; often poisonous to livestock\nn11973634\tperennial gumweed of California and Baja California\nn11973749\tperennial gumweed of western and central North America\nn11974373\tsimilar to Gutierrezia sarothrae but with flower heads having fewer rays and disk flowers\nn11974557\tlow-growing sticky subshrub of southwestern United States having narrow linear leaves on many slender branches and hundreds of tiny yellow flower heads\nn11974888\tannual of southwestern United States having rigid woody branches with sticky foliage and yellow flowers\nn11975254\tJavanese foliage plant grown for their handsome velvety leaves with violet-purple hairs\nn11976170\ta plant of the genus Haplopappus\nn11976314\tannual of southern United States and Mexico having bristly leaves and pale yellow flowers\nn11976511\tslender perennial of western North America having weakly bristly leaves and yellow flower heads\nn11976933\twestern American shrubs having white felted foliage and yellow flowers that become red-purple\nn11977303\tany of various plants of the genus Helenium characteristically causing sneezing\nn11977660\tstout perennial herb of western United States having flower heads with drooping orange-yellow rays; causes spewing sickness in sheep\nn11977887\ta sneezeweed of southwestern United States especially southern California\nn11978233\tany plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays\nn11978551\tsunflower of eastern North America having narrow leaves and found in bogs\nn11978713\tannual sunflower grown for silage and for its seeds which are a source of oil; common throughout United States and much of North America\nn11978961\tvery tall American perennial of central and the eastern United States to Canada having edible tuberous roots\nn11979187\ttall rough-leaved perennial with a few large flower heads; central United States\nn11979354\ttall perennial of central United States to Canada having golden-yellow flowers\nn11979527\tsimilar to the common sunflower with slender usually branching stems common in central United States\nn11979715\ttall perennial with hairy stems and leaves; widely cultivated for its large irregular edible tubers\nn11979964\tedible tuber of the Jerusalem artichoke\nn11980318\tAustralian plant naturalized in Spain having flowers of lemon yellow to deep gold; the frequent choice of those who love dried flowers\nn11980682\tany North American shrubby perennial herb of the genus Heliopsis having large yellow daisylike flowers\nn11981192\tany of various plants of the genus Helipterum\nn11981475\thairy perennial with yellow flower heads in branched clusters; found almost everywhere in dry places from Canada to west central and western United States; sometimes placed in genus Chrysopsis\nn11982115\tany of numerous often hairy plants of the genus Hieracium having yellow or orange flowers that resemble the dandelion\nn11982545\ta hawkweed with a rosette of purple-veined basal leaves; Canada to northern Georgia and Kentucky\nn11982939\trhizomatous herb with purple-red flowers suitable for groundcover; sometimes placed in genus Tussilago\nn11983375\tlow tufted plant having hairy stems each topped by a flower head with short narrow yellow rays; northwestern United States\nn11983606\tsimilar to but smaller than alpine hulsea\nn11984144\tEuropean weed widely naturalized in North America having yellow flower heads and leaves resembling a cat's ears\nn11984542\tany plant of the genus Inula\nn11985053\tany of various coarse shrubby plants of the genus Iva with small greenish flowers; common in moist areas (as coastal salt marshes) of eastern and central North America\nn11985321\ttall annual marsh elder common in moist rich soil in central North America that can cause contact dermatitis; produces much pollen that is a major cause of hay fever\nn11985739\tany small branched yellow-flowered North American herb of the genus Krigia\nn11985903\tsmall yellow-flowered herb resembling dandelions of central and southeastern United States\nn11986511\tannual or perennial garden plant having succulent leaves used in salads; widely grown\nn11986729\tlettuce with long dark-green spoon-shaped leaves\nn11987126\tdistinguished by leaves having curled or incised leaves forming a loose rosette that does not develop into a compact head\nn11987349\tlettuce valued especially for its edible stems\nn11987511\tEuropean annual wild lettuce having prickly stems; a troublesome weed in parts of United States\nn11988132\tsmall slender woolly annual with very narrow opposite leaves and branches bearing solitary golden-yellow flower heads; southwestern Oregon to Baja California and Arizona; often cultivated\nn11988596\tCalifornia annual having flower heads with yellow rays tipped with white\nn11988893\tany of various common wildflowers of the genus Leontodon; of temperate Eurasia to Mediterranean regions\nn11989087\tfall-blooming European herb with a yellow flower; naturalized in the United States\nn11989393\talpine perennial plant native to Europe having leaves covered with whitish down and small flower heads held in stars of glistening whitish bracts\nn11989869\ttall leafy-stemmed Eurasian perennial with white flowers; widely naturalized; often placed in genus Chrysanthemum\nn11990167\tsimilar to oxeye daisy\nn11990313\thybrid garden flower derived from Chrysanthemum maximum and Chrysanthemum lacustre having large white flower heads resembling oxeye daisies; often placed in the genus Chrysanthemum\nn11990627\tperennial of Portugal similar to the oxeye daisy\nn11990920\tperennial herb closely resembling European edelweiss; New Zealand\nn11991263\tany of various North American plants of the genus Liatris having racemes or panicles of small discoid flower heads\nn11991549\therb with many stems bearing narrow slender wands of crowded rose-lavender flowers; central United States and Canada to Texas and northern Mexico\nn11991777\tperennial of southeastern and central United States having very dense spikes of purple flowers; often cultivated for cut flowers\nn11992479\tTexas annual with coarsely pinnatifid leaves; cultivated for its showy radiate yellow flower heads\nn11992806\tshrub of southwestern Mediterranean region having yellow daisylike flowers\nn11993203\twild aster with fernlike leaves and flower heads with very narrow bright purple rays; Alberta to Texas and Mexico\nn11993444\twild aster having leafy stems and flower heads with narrow bright reddish-lavender or purple rays; western Colorado to Arizona\nn11993675\twild aster having greyish leafy stems and flower heads with narrow pale lavender or violet rays; of rocky desert slopes California to Arizona and Utah\nn11994150\tany of various resinous glandular plants of the genus Madia; of western North and South America\nn11995092\tannual Eurasian herb similar in fragrance and medicinal uses to chamomile though taste is more bitter and effect is considered inferior\nn11995396\tannual aromatic weed of Pacific coastal areas (United States and northeastern Asia) having bristle-pointed leaves and rayless yellow flowers\nn11996251\therb of tropical America having vanilla-scented flowers; climbs up trees\nn11996677\tany of various plants of the genus Mutisia\nn11997032\ta plant of the genus Nabalus\nn11997160\therb of northeastern North America having drooping clusters of yellowish-white flowers; sometimes placed in genus Prenanthes\nn11997969\tany of various mostly Australian attractively shaped shrubs of the genus Olearia grown for their handsome and sometimes fragrant evergreen foliage and profusion of daisy flowers with white or purple or blue rays\nn11998492\tbushy New Zealand shrub cultivated for its fragrant white flower heads\nn11998888\tbiennial Eurasian white hairy thistle having pale purple flowers; naturalized in North America\nn11999278\ta South African plant of the genus Othonna having smooth often fleshy leaves and heads of yellow flowers\nn11999656\tshrub with white woolly branches and woolly leaves having fragrant flowers forming long sprays; flowers suitable for drying; sometimes placed in genus Helichrysum\nn12000191\tany of several yellow-flowered plants of the genus Packera; often placed in genus Senecio\nn12001294\tstout perennial herb of the eastern United States with whitish flowers; leaves traditionally used by Catawba Indians to treat burns\nn12001707\therb of Canary Islands widely cultivated for its blue or purple or red or variegated daisylike flowers\nn12001924\therb derived from Pericallis cruenta and widely cultivated in a variety of profusely flowering forms with florets from white to pink to red or purple or violet or blue\nn12002428\tsmall Eurasian herb having broad leaves and lilac-pink rayless flowers; found in moist areas\nn12002651\tEuropean herb with vanilla-scented white-pink flowers\nn12002826\tAmerican sweet-scented herb\nn12003167\twidespread European weed with spiny tongue-shaped leaves and yellow flowers; naturalized in United States\nn12003696\tany of various plants of the genus Pilosella\nn12004120\tEuropean hawkweed having soft hairy leaves; sometimes placed in genus Hieracium\nn12004547\tany plant of the genus Piqueria or the closely related genus Stevia\nn12004987\therb of central and southern Europe having purple florets\nn12005656\thairy perennial Eurasian herb with yellow daisylike flowers reputed to destroy or drive away fleas\nn12006306\tperennial prostrate mat-forming herb with hoary woolly foliage\nn12006766\ta wildflower of the genus Ratibida\nn12006930\tconeflower with flower heads resembling a Mexican hat with a tall red-brown disk and drooping yellow or yellow and red-brown rays; grows in the great plains along base of Rocky Mountains\nn12007196\tplant similar to the Mexican hat coneflower; from British Columbia to New Mexico\nn12007406\tconeflower of central to southwestern United States\nn12007766\tAustralian annual everlasting having light pink nodding flower heads; sometimes placed in genus Helipterum\nn12008252\tany of various plants of the genus Rudbeckia cultivated for their large usually yellow daisies with prominent central cones\nn12008487\tthe state flower of Maryland; of central and southeastern United States; having daisylike flowers with dark centers and yellow to orange rays\nn12008749\ttall leafy plant with erect branches ending in large yellow flower heads with downward-arching rays; grow in Rocky Mountains south to Arizona and east to the Atlantic coast\nn12009047\tvery tall branching herb with showy much-doubled yellow flower heads\nn12009420\tbranching aromatic Mediterranean shrub with woolly stems and leaves and yellow flowers\nn12009792\tlow-branching leafy annual with flower heads resembling zinnias; found in southwestern United States and Mexico to Guatemala\nn12010628\tany of several spiny Mediterranean herbs of the genus Scolymus having yellow flower heads\nn12010815\ta golden thistle of southwestern Europe cultivated for its edible sweet roots and edible leaves and stalks; its yellow flowers are used as a substitute for saffron\nn12011370\tplant with erect leafy stems bearing clusters of rayless yellow flower heads on bent individual stalks; moist regions of southwestern United States\nn12011620\tstiff much-branched perennial of the Mediterranean region having very white woolly stems and leaves\nn12012111\tAmerican ragwort with yellow flowers\nn12012253\twidespread European weed having yellow daisylike flowers; sometimes an obnoxious weed and toxic to cattle if consumed in quantity\nn12012510\tperennial with sharply toothed triangular leaves on leafy stems bearing a cluster of yellow flower heads; moist places in mountains of western North America\nn12013035\tperennial south European herb having narrow entire leaves and solitary yellow flower heads and long black edible roots shaped like carrots\nn12013511\therb having corymbose white-rayed flowers with scaly bracts and silky indehiscent fruits\nn12013701\ta variety of white-topped aster\nn12014085\tlow much-branched perennial of western United States having silvery leaves; an important browse and shelter plant\nn12014355\tplants of western and northern European coasts\nn12014923\tEuropean perennial whose serrate leaves yield a yellow dye\nn12015221\tNorth American perennial having a resinous odor and yellow flowers\nn12015525\ttall Old World biennial thistle with large clasping white-blotched leaves and purple flower heads; naturalized in California and South America\nn12015959\tany of numerous chiefly summer-blooming and fall-blooming North American plants especially of the genus Solidago\nn12016434\tplant of eastern North America having creamy white flowers\nn12016567\tlarge North American goldenrod having showy clusters of yellow flowers on arching branches; often a weed\nn12016777\tsimilar to meadow goldenrod but usually smaller\nn12016914\tgoldenrod similar to narrow goldenrod but having bristly hairs on edges of leaf stalks; mountainous regions of western America\nn12017127\ta dyer's weed of Canada and the eastern United States having yellow flowers sometimes used in dyeing\nn12017326\tgoldenrod of eastern America having aromatic leaves from which a medicinal tea is made\nn12017511\teastern North American herb whose yellow flowers are (or were) used in dyeing\nn12017664\tvigorous showy goldenrod common along eastern coast and Gulf Coast of North America\nn12017853\twestern American goldenrod with long narrow clusters of small yellow flowers\nn12018014\ta variety of goldenrod\nn12018100\ta variety of goldenrod\nn12018188\ta variety of goldenrod\nn12018271\ta variety of goldenrod\nn12018363\ta variety of goldenrod\nn12018447\ta variety of goldenrod\nn12018530\ta variety of goldenrod\nn12018760\tany of several Old World coarse prickly-leaved shrubs and subshrubs having milky juice and yellow flowers; widely naturalized; often noxious weeds in cultivated soil\nn12019035\tannual Eurasian sow thistle with soft spiny leaves and rayed yellow flower heads\nn12019827\tany plant of the genus Stevia or the closely related genus Piqueria having glutinous foliage and white or purplish flowers; Central and South America\nn12020184\terect perennial of southeastern United States having large heads of usually blue flowers\nn12020507\tany of various tropical American plants of the genus Tagetes widely cultivated for their showy yellow or orange flowers\nn12020736\ta stout branching annual with large yellow to orange flower heads; Mexico and Central America\nn12020941\tstrong-scented bushy annual with orange or yellow flower heads marked with red; Mexico and Guatemala\nn12022054\tspring-flowering garden perennial of Asiatic origin having finely divided aromatic leaves and white to pink-purple flowers; source of an insecticide; sometimes placed in genus Chrysanthemum\nn12022382\twhite-flowered pyrethrum of Balkan area whose pinnate leaves are white and silky-hairy below; source of an insecticide; sometimes placed in genus Chrysanthemum\nn12022821\tlightly hairy rhizomatous perennial having aromatic feathery leaves and stems bearing open clusters of small buttonlike yellow flowers; sand dunes of Pacific coast of North America\nn12023108\tbushy aromatic European perennial herb having clusters of buttonlike white-rayed flower heads; valued traditionally for medicinal uses; sometimes placed in genus Chrysanthemum\nn12023407\tshrubby perennial of the Canary Islands having white flowers and leaves and hairy stems covered with dustlike down; sometimes placed in genus Chrysanthemum\nn12023726\tcommon perennial aromatic herb native to Eurasia having buttonlike yellow flower heads and bitter-tasting pinnate leaves sometimes used medicinally\nn12024176\tany of several herbs of the genus Taraxacum having long tap roots and deeply notched leaves and bright yellow flowers followed by fluffy seed balls\nn12024445\tEurasian plant widely naturalized as a weed in North America; used as salad greens and to make wine\nn12024690\tthe foliage of the dandelion plant\nn12024805\tperennial dandelion native to Kazakhstan cultivated for its fleshy roots that have high rubber content\nn12025220\tperennial having tufted basal leaves and short leafless stalks each bearing a solitary yellow flower head; dry hillsides and plains of west central North America\nn12026018\tany plant of the genus Tithonia; tall coarse herbs or shrubs of Mexico to Panama having large flower heads resembling sunflowers with yellow disc florets and golden-yellow to orange-scarlet rays\nn12026476\tdwarf tufted nearly stemless herb having a rosette of woolly leaves and large white-rayed flower heads and bristly achenes; central Canada and United States west to Arizona\nn12026981\tEuropean perennial naturalized throughout United States having hollow stems with a few long narrow tapered leaves and each bearing a solitary pale yellow flower\nn12027222\tMediterranean biennial herb with long-stemmed heads of purple ray flowers and milky sap and long edible root; naturalized throughout United States\nn12027658\tweedy European annual with yellow flowers; naturalized in United States\nn12028424\tubiquitous European annual weed with white flowers and finely divided leaves naturalized and sometimes cultivated in eastern North America; sometimes included in genus Matricaria\nn12029039\tlow densely tufted perennial herb of Turkey having small white flowers; used as a ground cover in dry places; sometimes included in genus Matricaria\nn12029635\tperennial herb with large rounded leaves resembling a colt's foot and yellow flowers appearing before the leaves do; native to Europe but now nearly cosmopolitan; used medicinally especially formerly\nn12030092\tany of various plants of the genus Ursinia grown for their yellow- or orange- or white-rayed flowers\nn12030654\tany plant of the genus Verbesina having clustered white or yellow flower heads\nn12030908\tperennial herb with showy yellow flowers; the eastern United States\nn12031139\tcoarse greyish-green annual yellow-flowered herb; southwestern United States to Mexico\nn12031388\tperennial herb with yellow flowers; southern and south central United States\nn12031547\ttall perennial herb having clusters of white flowers; the eastern United States\nn12031927\tany of various plants of the genus Vernonia of tropical and warm regions of especially North America that take their name from their loose heads of purple to rose flowers that quickly take on a rusty hue\nn12032429\tbalsamic-resinous herb with clumps of lanceolate leaves and stout leafy stems ending in large deep yellow flowers on long stalks; northwestern United States\nn12032686\therb with basal leaves and leafy hairy stems bearing solitary flower heads with white or pale cream-colored rays; northwestern United States\nn12033139\tany coarse weed of the genus Xanthium having spiny burrs\nn12033504\tany plant of the genus Xeranthemum native to southern Europe having chaffy or silvery flower heads with purplish tubular flowers\nn12033709\tmostly widely cultivated species of everlasting flowers having usually purple flowers; southern Europe to Iran; naturalized elsewhere\nn12034141\tany of various plants of the genus Zinnia cultivated for their variously and brightly colored flower heads\nn12034384\tsubshrub with slender woolly stems and long narrow leaves and flower heads with white rays; southern United States and northern Mexico\nn12034594\tsubshrub having short leafy stems and numerous small flower heads with nearly round yellow-orange rays; Arizona south to Mexico and east to Kansas\nn12035631\tbiennial of southwestern United States having white stems and toothed leaves that is grown for its large pale yellow flowers that open in early morning\nn12035907\tannual grown especially for its fragrant golden nocturnal flowers\nn12036067\tsmall dry indehiscent fruit with the seed distinct from the fruit wall\nn12036226\ta winged often one-seed indehiscent fruit as of the ash or elm or maple\nn12036939\tany of various plants of the genus Campanula having blue or white bell-shaped flowers\nn12037499\terect European herb with creeping rootstocks and nodding spikelike racemes of blue to violet flowers\nn12037691\tEuropean biennial widely cultivated for its blue or violet or white flowers\nn12038038\tannual or perennial of eastern North America with long spikes of blue or white flowers\nn12038208\tbellflower common in marshes of eastern North America having lanceolate linear leaves and small whitish flowers\nn12038406\tbellflower of Europe to temperate Asia having dense spikes of violet-blue to white flowers\nn12038585\tperennial European bellflower with racemose white or blue flowers\nn12038760\tbellflower of southeastern Europe\nn12038898\tbellflower of Europe and Asia and North Africa having bluish flowers and an edible tuberous root used with the leaves in salad\nn12039317\tEuropean perennial bellflower that grows in clumps with spreading stems and blue or white flowers\nn12041446\tany of numerous plants of the orchid family usually having flowers of unusual shapes and beautiful colors\nn12043444\tany of various deciduous terrestrial orchids having fleshy tubers and flowers in erect terminal racemes\nn12043673\tEurasian orchid with showy pink or purple flowers in a loose spike\nn12043836\tMediterranean orchid having usually purple flowers with a fan-shaped spotted or striped rose-red lip\nn12044041\tNorth American orchid having a spike of violet-purple flowers mixed with white; sepals and petals form a hood\nn12044467\tany orchid of the genus Aerides\nn12044784\tany of various spectacular orchids of the genus Angraecum having dark green leathery leaves and usually nocturnally scented white or ivory flowers\nn12045157\tany of several delicate Asiatic orchids grown especially for their velvety leaves with metallic white or gold veining\nn12045514\tNorth American orchid bearing a single leaf and yellowish-brown flowers\nn12045860\tany of several bog orchids of the genus Arethusa having 1 or 2 showy flowers\nn12046028\ta bog orchid with usually a solitary fragrant magenta pink blossom with a wide gaping corolla; Canada\nn12046428\tany of various orchids of the genus Bletia having pseudobulbs and erect leafless racemes of large purple or pink flowers\nn12046815\tJapanese orchid with white-striped leaves and slender erect racemes of rose to magenta flowers; often cultivated; sometimes placed in genus Bletia\nn12047345\tany of various tropical American orchids with usually solitary fleshy leaves and showy white to green nocturnally fragrant blossoms solitary or in racemes of up to 7\nn12047884\tSouth American orchid with spiderlike pale-yellow to pale-green flowers\nn12048056\tCentral American orchid having spiderlike flowers with prominent green warts\nn12048399\tany of various orchids of the genus Caladenia\nn12048928\tany of various showy orchids of the genus Calanthe having white or yellow or rose-colored flowers and broad leaves folded lengthwise\nn12049282\tan orchid\nn12049562\trare north temperate bog orchid bearing a solitary white to pink flower marked with purple at the tip of an erect reddish stalk above 1 basal leaf\nn12050533\tany orchid of the genus Cattleya characterized by a three-lobed lip enclosing the column; among the most popular and most extravagantly beautiful orchids known\nn12050959\tany of several orchids of the genus Cephalanthera\nn12051103\torchid of Mediterranean and Asia having a lax spike of bright rose-pink flowers\nn12051514\torchid of northeastern United States with magenta-pink flowers having funnel-shaped lip; sometimes placed in genus Pogonia\nn12051792\torchid of central and northern South America having 1- to 3-blossomed racemes of large showy rose-colored flowers; sometimes placed in genus Pogonia\nn12052267\torchid with broad ovate leaves and long-bracted green very irregular flowers\nn12052447\torchid having hooded long-bracted green to yellow-green flowers suffused with purple\nn12052787\tany of various orchids of the genus Coelogyne with: clusters of fragrant lacy snow-white flowers; salmon-pink solitary flowers; chainlike racemes of topaz and chocolate brown flowers; spikes of delicate white spice-scented flowers; emerald green flowers marked with blue-black\nn12053405\ta wildflower of the genus Corallorhiza growing from a hard mass of rhizomes associated with a fungus that aids in absorbing nutrients from the forest floor\nn12053690\tcommon coral root having yellowish- or reddish- or purplish-brown leafless stems bearing loose racemes of similarly colored flowers with white purple-spotted lips; Guatemala to Canada\nn12053962\tnearly leafless wildflower with erect reddish-purple stems bearing racemes of pale pinkish and brownish-striped flowers; western Canada to Mexico\nn12054195\tplant having clumps of nearly leafless pale yellowish to greenish stems bearing similarly colored flowers with white lower lips; northern New Mexico north through South Dakota and Washington to Alaska\nn12055073\tany of several orchids of the genus Cycnoches having slender arching columns of flowers suggesting the neck of a swan\nn12055516\tany of various plants of the genus Cymbidium having narrow leaves and a long drooping cluster of numerous showy and variously colored boat-shaped flowers; extensively hybridized and cultivated as houseplants and important florists' flowers\nn12056099\ta plant or flower of the genus Cypripedium\nn12056217\tany of several chiefly American wildflowers having an inflated pouchlike lip; difficult or impossible to cultivate in the garden\nn12056601\tonce common rose pink woodland orchid of eastern North America\nn12056758\tpale pink wild orchid of northeastern America having an inflated pouchlike lip\nn12056990\torchid of northern North America having a brownish-green flower and red-and-white lip suggestive of a ram's head\nn12057211\tmaroon to purple-brown orchid with yellow lip; Europe, North America and Japan\nn12057447\tplant of eastern and central North America having slightly fragrant purple-marked greenish-yellow flowers\nn12057660\toften having many yellow-green orchids with white pouches growing along streams and seeps of southwestern Oregon and northern California\nn12057895\tclusters of several short stems each having 2 broad leaves and 2-4 drooping brownish to greenish flowers with pouches mottled with purple; British Columbia to central California and northern Colorado\nn12058192\tleafy plant having a few stems in a clump with 1 white and dull purple flower in each upper leaf axil; Alaska to northern California and Wyoming\nn12058630\tany of several orchids of the genus Dactylorhiza having fingerlike tuberous roots; Europe and Mediterranean region\nn12058822\tEuropean orchid having lanceolate leaves spotted purple and pink to white or mauve flowers spotted or lined deep red or purple\nn12059314\ta plant of the genus Dendrobium having stems like cane and usually showy racemose flowers\nn12059625\tany orchid of the genus Disa; beautiful orchids with dark green leaves and usually hooded flowers; much prized as emblematic flowers in their native regions\nn12060546\twaxy white nearly leafless plant with stems in clusters and racemes of white flowers; northwestern United States to northern California and east to Idaho\nn12061104\tMexican epiphytic orchid with glaucous grey-green leaves and lemon- to golden-yellow flowers appearing only partially opened; sometimes placed in genus Cattleya\nn12061380\torchid of Florida and the Bahamas having showy brightly colored flowers; sometimes placed in genus Epidendrum\nn12061614\tMexican epiphytic orchid having pale green or yellow-green flowers with white purple-veined lip\nn12062105\tany of various orchids of the genus Epidendrum\nn12062468\tany of various orchids of the genus Epipactis\nn12062626\tEuropean orchid with spikes of green and pinkish or purplish flowers\nn12062781\torchid growing along streams or ponds of western North America having leafy stems and 1 greenish-brown and pinkish flower in the axil of each upper leaf\nn12063211\torchid having blue to purple flowers with tongue-shaped or strap-shaped protuberances (calli) at the lip base\nn12063639\tany of several small temperate and tropical orchids having mottled or striped leaves and spikes of small yellowish-white flowers in a twisted raceme\nn12064389\tEuropean orchid having dense spikes of fragrant pink or lilac or red flowers with conspicuous spurs\nn12064591\tsimilar to Gymnadenia conopsea but with smaller flowers on shorter stems and having much shorter spurs\nn12065316\tany of several summer-flowering American orchids distinguished by a fringed or lacerated lip\nn12065649\tany of several green orchids of the genus Habenaria\nn12065777\tany of several American wildflowers with a kidney-shaped lip\nn12066018\torchid with spikes of many fragrant white flowers on erect leafy stems; of wet or boggy ground through most of the West and northern North America\nn12066261\tbog orchid of eastern North America with a spike of pure white fringed flowers\nn12066451\tslender inland rein orchid similar to coastal rein orchid but with pale greenish-yellow flowers\nn12066630\tNorth American orchid similar to Habenaria psycodes with larger paler flowers\nn12066821\tstout orchid of central California to northern Washington having racemes of white fragrant bilaterally symmetrical flowers\nn12067029\ta long-spurred orchid with base leaves and petals converging under the upper sepal\nn12067193\tfringed orchid of the eastern United States having a greenish flower with the lip deeply lacerated\nn12067433\torchid of boggy or wet lands of north central United States having racemes of very fragrant creamy or greenish white flowers\nn12067672\tslender fringed orchid of eastern North America having white flowers\nn12067817\torchid having a raceme of large greenish-white flowers on a single flower stalk growing between two elliptic or round basal leaves lying on the ground; from northern Oregon and Montana across Canada to the eastern United States\nn12068138\torchid of northeastern and alpine eastern North America closely related to the purple fringed orchids but having rosy-purple or violet flowers with denticulate leaf divisions\nn12068432\tNorth American orchid with clusters of fragrant purple fringed flowers\nn12068615\tsimilar to coastal rein orchid but with smaller flowers; Alaska to Baja California and east to the Dakotas and Colorado\nn12069009\torchid with yellowish-brown flowers with dark veins; southeastern Arizona to the eastern United States\nn12069217\torchid with slender nearly leafless reddish-brown stems with loose racemes of reddish-brown flowers; of open brushy woods of southeastern Arizona and central Texas\nn12069679\tan orchid of the genus Himantoglossum\nn12070016\tany of various spectacular plants of the genus Laelia having showy flowers in many colors\nn12070381\tan orchid of the genus Liparis having few leaves and usually fairly small yellow-green or dull purple flowers in terminal racemes\nn12070583\tan orchid of the genus Liparis having a pair of leaves\nn12070712\tsmall terrestrial orchid of eastern North America and Europe having two nearly basal leaves and dull yellow-green racemose flowers\nn12071259\tsmall orchid with two elliptic leaves and a slender raceme of small green flowers; western North America\nn12071477\torchid having two triangular leaves and a short lax raceme of green to rust-colored flowers with the lip flushed mauve; Europe and Asia and North America and Greenland\nn12071744\torchid having a pair of ovate leaves and a long slender raceme of green flowers sometimes tinged red-brown; Europe to central Asia\nn12072210\tNorth American orchid having a solitary leaf and flowers with threadlike petals\nn12072722\tany of numerous orchids of the genus Masdevallia; tufted evergreen often diminutive plants whose flowers in a remarkable range of colors usually resemble a tricorn with sepals fused at the base to form a tube\nn12073217\tany of numerous orchids of the genus Maxillaria often cultivated for their large brilliantly colored solitary flowers\nn12073554\tany of various orchids of the genus Miltonia having solitary or loosely racemose showy broadly spreading flowers\nn12073991\tany of numerous and diverse orchids of the genus Odontoglossum having racemes of few to many showy usually large flowers in many colors\nn12074408\tany orchid of the genus Oncidium: characterized by slender branching sprays of small yellow and brown flowers; often grown as houseplants\nn12074867\tEuropean orchid whose flowers resemble bumble bees in shape and color\nn12075010\tEuropean orchid whose flowers resemble flies\nn12075151\tany of several European orchids of the genus Ophrys\nn12075299\tspring-blooming spider orchid having a flower with yellow or green or pink sepals and a broad brown velvety lip\nn12075830\tany of various orchids of the genus Paphiopedilum having slender flower stalks bearing 1 to several waxy flowers with pouchlike lips\nn12076223\tan orchid of the genus Phaius having large plicate leaves and racemes of showy flowers\nn12076577\tany of various orchids of the genus Phalaenopsis having often drooping glossy broad obovate or oval leaves usually dark green flushed purple or mottled grey and silver\nn12076852\torchid having large elliptic to obovate fleshy leaves and fragrant pink-and-white flowers dotted with red\nn12077244\tany of various orchids of the genus Pholidota having numerous white to brown flowers in spiraling racemes clothed in slightly inflated bracts and resembling a rattlesnake's tail\nn12077944\tsouth European orchid having fragrant greenish-white flowers; sometimes placed in genus Habenaria\nn12078172\tsouth European orchid with dark green flowers that are larger and less fragrant than Platanthera bifolia; sometimes placed in genus Habenaria\nn12078451\tof central North America; a threatened species\nn12078747\tan orchid of the genus Plectorrhiza having tangled roots and long wiry stems bearing lax racemes of small fragrant green flowers\nn12079120\tany of several dwarf orchids of the genus Pleione bearing one or two solitary white or pink to magenta or occasionally yellow flowers with slender stalks\nn12079523\tany of numerous small tufted orchids of the genus Pleurothallis having leathery to fleshy leaves and racemes of 1 to many small flowers\nn12079963\tany hardy bog orchid of the genus Pogonia: terrestrial orchids having slender rootstocks and erect stems bearing one or a few leaves and a solitary terminal flower\nn12080395\tany orchid of the genus Psychopsis: spectacular large tiger-striped orchids\nn12080588\torchid of South and Central America having flowers similar to but smaller than Psychopsis papilio; sometimes placed in genus Oncidium\nn12080820\torchid of South America and Trinidad having large yellow and reddish-brown flowers; sometimes placed in genus Oncidium\nn12081215\tany of numerous orchids of the genus Pterostylis having leaves in a basal rosette and green flowers often striped purple or brown or red with the dorsal sepal incurved to form a hood\nn12081649\tany of various orchids of the genus Rhyncostylis having pink- to purple-marked white flowers in a dense cylindrical raceme\nn12082131\tdiminutive Australian orchid with loose racemes of fragrant white flowers with purple and orange markings on the lip\nn12083113\tany of various showy orchids of the genus Sobralia having leafy stems and bright-colored solitary or racemose flowers similar to those of genus Cattleya\nn12083591\tan orchid of the genus Spiranthes having slender often twisted spikes of white flowers\nn12083847\tan orchid of the genus Spiranthes having tall erect densely flowered spiraling clusters of creamy white vanilla-scented flowers; widely distributed especially in low damp places of eastern and central North America\nn12084158\torchid having dense clusters of gently spiraling creamy white flowers with 2 upper petals forming a hood; western North America\nn12084400\tsimilar to Spiranthes romanzoffiana;States\nn12084555\tEuropean orchid having shorter racemes of strongly spiraling snow-white flowers\nn12084890\tany of various orchids of the genus Stanhopea having a single large leaf and loose racemes of large fragrant flowers of various colors; Mexico to Brazil\nn12085267\tany of various small tropical American orchids of the genus Stelis having long slender racemes of numerous small to minute flowers\nn12085664\tany of several dwarf creeping orchids with small bizarre insect-like hairy flowers on slender stalks\nn12086012\tany of numerous showy orchids of the genus Vanda having many large flowers in loose racemes\nn12086192\tfamous orchid of northern India having large pale to deep lilac-blue flowers\nn12086539\tany of numerous climbing plants of the genus Vanilla having fleshy leaves and clusters of large waxy highly fragrant white or green or topaz flowers\nn12086778\ta climbing orchid bearing a podlike fruit yielding vanilla beans; widely cultivated from Florida southward throughout tropical America\nn12087961\tany of a number of tropical vines of the genus Dioscorea many having edible tuberous roots\nn12088223\tedible tuber of any of several yams\nn12088327\tgrown in Australasia and Polynesia for its large root with fine edible white flesh\nn12088495\thardy Chinese vine naturalized in United States and cultivated as an ornamental climber for its glossy heart-shaped cinnamon-scented leaves and in the tropics for its edible tubers\nn12088909\tSouth African vine having a massive rootstock covered with deeply fissured bark\nn12089320\thaving a rhizome formerly dried and used to treat rheumatism or liver disorders\nn12089496\ttropical American yam with small yellow edible tubers\nn12089846\tcommon European twining vine with tuberous roots and cordate leaves and red berries\nn12090890\tany of numerous short-stemmed plants of the genus Primula having tufted basal leaves and showy flowers clustered in umbels or heads\nn12091213\tplant of western and southern Europe widely cultivated for its pale yellow flowers\nn12091377\tearly spring flower common in British isles having fragrant yellow or sometimes purple flowers\nn12091550\tEurasian primrose with yellow flowers clustered in a one-sided umbel\nn12091697\tcultivated Asiatic primrose\nn12091953\tflorists' primroses; considered a complex hybrid derived from oxlip, cowslip, and common primrose\nn12092262\tany of several plants of the genus Anagallis\nn12092417\therb with scarlet or white or purple blossoms that close at approach of rainy weather\nn12092629\tsmall creeping European herb having delicate pink flowers\nn12092930\tweedy plant having short dry chafflike leaves\nn12093329\tMediterranean plant widely cultivated as a houseplant for its showy dark green leaves splotched with silver and nodding white or pink to reddish flowers with reflexed petals\nn12093600\tcommon wild European cyclamen with pink flowers\nn12093885\ta small fleshy herb common along North American seashores and in brackish marshes having pink or white flowers\nn12094244\ta plant of the genus Hottonia\nn12094401\ta featherfoil of the eastern United States with submerged spongy inflated flower stalks and white flowers\nn12094612\tfeatherfoil of Europe and western Asia having submerged and floating leaves and violet flowers\nn12095020\tany of various herbs and subshrubs of the genus Lysimachia\nn12095281\ta variety of the loosestrife herb\nn12095412\ttrailing European evergreen with yellow flowers\nn12095543\tof North America\nn12095647\ta loosestrife vine\nn12095934\tNorth American plant with spikes of yellow flowers, found in wet places\nn12096089\tcommon North American yellow-flowered plant\nn12096395\ta white-flowered aquatic plant of the genus Samolus\nn12096563\twater pimpernel of Europe to China\nn12096674\tAmerican water pimpernel\nn12097396\tshrub with coral-red berries; Japan to northern India\nn12097556\ttropical American shrub or small tree with brown wood and dark berries\nn12098403\tany plumbaginaceous plant of the genus Plumbago\nn12098524\ta plant of the genus Plumbago with blue flowers\nn12098827\tany of numerous sun-loving low-growing evergreens of the genus Armeria having round heads of pink or white flowers\nn12099342\tany of various plants of the genus Limonium of temperate salt marshes having spikes of white or mauve flowers\nn12100187\tWest Indian shrub or small tree having leathery saponaceous leaves and extremely hard wood\nn12101870\tcosmopolitan herbaceous or woody plants with hollow jointed stems and long narrow leaves\nn12102133\tnarrow-leaved green herbage: grown as lawns; used as pasture for grazing animals; cut and dried as hay\nn12103680\tany of various grasses of moderate height which covered the undisturbed prairie in the United States; includes most of the forage grasses of the temperate zone\nn12103894\tany of various grasses that are short and can tolerate drought conditions; common on the dry upland plains just east of the Rocky Mountains\nn12104104\tany of various grasses or sedges having sword-shaped leaves with sharp edges\nn12104238\tany of various grasses that are tall and that flourish with abundant moisture\nn12104501\tsucculent herbaceous vegetation of pasture land\nn12104734\tEuropean grass naturalized as a weed in North America; sharp-pointed seeds cause injury when eaten by livestock\nn12105125\ta grass of the genus Agropyron\nn12105353\tEurasian grass grown in United States great plains area for forage and erosion control\nn12105828\ta wheatgrass with straight terminal awns on the flowering glumes\nn12105981\tvaluable forage grass of western United States\nn12106134\tAsiatic grass introduced into United States rangelands for pasture and fodder\nn12106323\tNorth American grass cultivated in western United States as excellent forage crop\nn12107002\tcommon grass with slender stems and narrow leaves\nn12107191\tSpanish grass with light feathery panicles grown for dried bouquets\nn12107710\tstout erect perennial grass of northern parts of Old World having silky flowering spikes; widely cultivated for pasture and hay; naturalized in North America\nn12107970\tgrasses of the genera Alopecurus and Setaria having dense silky or bristly brushlike flowering spikes\nn12108432\tany of several grasses of the genus Andropogon; used in broom making\nn12108613\ttall tufted grass of southeastern United States\nn12108871\tcoarse perennial Eurasian grass resembling oat; found on roadside verges and rough grassland and in hay meadows; introduced in North America for forage\nn12109365\tused by Maoris for thatching\nn12109827\tannual grass of Europe and North Africa; grains used as food and fodder (referred to primarily in the plural: `oats')\nn12110085\twidely cultivated in temperate regions for its edible grains\nn12110236\tcommon in meadows and pastures\nn12110352\toat of southern Europe and southwestern Asia\nn12110475\tMediterranean oat held to be progenitor of modern cultivated oat\nn12110778\tany of various woodland and meadow grasses of the genus Bromus; native to temperate regions\nn12111238\tweedy annual native to Europe but widely distributed as a weed especially in wheat\nn12111627\tannual grass of Europe and temperate Asia\nn12112008\tpasture grass of plains of South America and western North America\nn12112337\ta pasture grass (especially of western coastal regions of North America)\nn12112609\tshort grass growing on dry plains of central United States (where buffalo roam)\nn12112918\tany of various tall perennial grasses of the genus Calamagrostis having feathery plumes; natives of marshland fens and wet woodlands of temperate northern hemisphere\nn12113195\ta variety of reed grass\nn12113323\ttall Australian reedlike grass sometimes used for hay\nn12113657\ta grass of the genus Cenchrus\nn12114010\terect tussock-forming perennial bur grass used particularly in South Africa and Australia for pasture and forage\nn12114590\tperennial grass of South Africa introduced into United States; cultivated as forage grass in dry regions\nn12115180\ttall perennial grass of pampas of South America having silvery plumes and growing in large dense clumps\nn12116058\tperennial grass having stems 3 to 4 feet high; used especially in Africa and India for pasture and hay\nn12116429\twidely grown stout Old World hay and pasture grass\nn12116734\ta creeping grass with spikes like fingers\nn12117017\tgrasses with creeping stems that root freely; a pest in lawns\nn12117235\ta weed\nn12117326\ta European forage grass grown for hay; a naturalized weed in United States\nn12117695\ta coarse annual panic grass; a cosmopolitan weed; occasionally used for hay or grazing\nn12117912\tcoarse annual grass cultivated in Japan and southeastern Asia for its edible seeds and for forage; important wildlife food in United States\nn12118414\tcoarse annual grass having fingerlike spikes of flowers; native to Old World tropics; a naturalized weed elsewhere\nn12118661\tEast Indian cereal grass whose seed yield a somewhat bitter flour, a staple in the Orient\nn12119099\ta grass of the genus Elymus\nn12119238\tany of several grasses of the genus Elymus\nn12119390\tstout perennial grass of western North America\nn12119539\ta dune grass of the Pacific seacoast used as a sand binder\nn12119717\tNorth American wild rye\nn12120347\tan African grass economically important as a cereal grass (yielding white flour of good quality) as well as for forage and hay\nn12120578\tperennial South African grass having densely clumped flimsy stems; introduced into United States especially for erosion control\nn12121033\ta reedlike grass of the genus Erianthus having large plumes\nn12121187\tgrass often cultivated for its long white-ribbed leaves and large plumes resembling those of pampas grass\nn12121610\tgrass with wide flat leaves cultivated in Europe and America for permanent pasture and hay and for lawns\nn12122442\ta pasture grass of moist places throughout North America\nn12122725\ttall European perennial grass having a velvety stem; naturalized in United States and used for forage\nn12122918\tEuropean perennial grass with soft velvety foliage\nn12123648\ta grain of barley\nn12123741\tEuropean annual grass often found as a weed in waste ground especially along roadsides and hedgerows\nn12124172\tannual barley native to western North America and widespread in southern United States and tropical America\nn12124627\tany of several annual or perennial Eurasian grasses\nn12124818\tEuropean perennial grass widely cultivated for pasture and hay and as a lawn grass\nn12125001\tEuropean grass much used for hay and in United States also for turf and green manure\nn12125183\tweedy annual grass often occurs in grainfields and other cultivated land; seeds sometimes considered poisonous\nn12125584\tslender branching American grass of some value for grazing in central United States\nn12126084\tyields the staple food of 50 percent of world's population\nn12126360\tany grass of the genus Oryzopsis\nn12126736\tperennial mountain rice native to Mediterranean region and introduced into North America\nn12127460\tgrass of western America used for hay\nn12127575\textensively cultivated in Europe and Asia for its grain and in United States sometimes for forage\nn12127768\tannual weedy grass used for hay\nn12128071\ttall tufted perennial tropical American grass naturalized as pasture and forage grass in southern United States\nn12128306\tperennial tropical American grass used as pasture grass in arid areas of the Gulf States\nn12128490\tlow-growing weedy grass with spikelets along the leaf stems\nn12129134\ttall perennial ornamental grass with long nodding flower plumes of tropical Africa and Asia\nn12129738\tperennial grass of marshy meadows and ditches having broad leaves; Europe and North America\nn12129986\tCanary Islands grass; seeds used as feed for caged birds\nn12130549\tgrass with long cylindrical spikes grown in northern United States and Europe for hay\nn12131405\tany of various grasses of the genus Poa\nn12131550\tany of various grasses that thrive in the presence of abundant moisture\nn12132092\tslender European grass of shady places; grown also in northeastern America and temperate Asia\nn12132956\tsugarcanes representing the highest development of the species; characterized by large juicy stalks with soft rinds and high sugar content\nn12133151\ttough Asiatic grass whose culms are used for ropes and baskets\nn12133462\thandsome hardy North American grass with foliage turning pale bronze in autumn\nn12133682\ttall grass with smooth bluish leaf sheaths grown for hay in the United States\nn12134025\thardy annual cereal grass widely cultivated in northern Europe where its grain is the chief ingredient of black bread and in North America for forage and soil improvement\nn12134486\tgrasses of grasslands and woodlands having large gracefully arching spikes with long bristles beneath each spikelet\nn12134695\ttwo species of coarse annual foxtails that are naturalized weeds in United States\nn12134836\tcommon weedy and bristly grass found in nearly all temperate areas\nn12135049\tEuropean foxtail naturalized in North America; often a troublesome weed\nn12135576\tmillet having orange to reddish grains in long bristly spikes\nn12135729\tmillet having yellow grains in large drooping spikes\nn12135898\tany of various small-grained annual cereal and forage grasses of the genera Panicum, Echinochloa, Setaria, Sorghum, and Eleusine\nn12136392\tthe stem of various climbing palms of the genus Calamus and related genera used to make wickerwork and furniture and canes\nn12136581\tstem of the rattan palm used for making canes and umbrella handles\nn12136720\ttall woody perennial grasses with hollow slender stems especially of the genera Arundo and Phragmites\nn12137120\teconomically important Old World tropical cereal grass\nn12137569\tany of several sorghums cultivated primarily for grain\nn12137791\tsorghums of dry regions of Asia and North Africa\nn12137954\ta Sudanese sorghum having exceptionally large soft white grains\nn12138110\tSudanese sorghums having white seeds; one variety grown in southwestern United States\nn12138248\tsorghums of China and Manchuria having small white or brown grains (used for food) and dry pithy stalks (used for fodder, fuel and thatching)\nn12138444\tsmall drought-resistant sorghums having large yellow or whitish grains\nn12138578\tsorghum having slender dry stalks and small hard grains; introduced into United States from India\nn12139196\ttall grasses grown for the elongated stiff-branched panicle used for brooms and brushes\nn12139575\tany of several perennial grasses of the genus Spartina; some important as coastal soil binders\nn12139793\ttall reedlike grass common in salt meadows\nn12139921\tNorth American cordgrass having leaves with dry membranous margins and glumes with long awns\nn12140511\tgrass native to West Indies but common in southern United States having tufted wiry stems often infested with a dark fungus\nn12140759\terect smooth grass of sandy places in eastern North America\nn12140903\tgrass having wiry stems and sheathed panicles\nn12141167\tlow mat-forming grass of southern United States and tropical America; grown as a lawn grass\nn12141385\ta cereal grass\nn12141495\tgrass whose starchy grains are used as food: wheat; rice; rye; oats; maize; buckwheat; millet\nn12142085\tannual or biennial grass having erect flower spikes and light brown grains\nn12142357\ta grain of wheat\nn12142450\twheat with hard dark-colored kernels high in gluten and used for bread and pasta; grown especially in southern Russia, North Africa, and northern central North America\nn12143065\thardy wheat grown mostly in Europe for livestock feed\nn12143215\thard red wheat grown especially in Russia and Germany; in United States as stock feed\nn12143405\tfound wild in Palestine; held to be prototype of cultivated wheat\nn12143676\ttall annual cereal grass bearing kernels on large ears: widely cultivated in America in many varieties; the principal cereal in Mexico and Central and South America since pre-Columbian times\nn12144313\tan ear of corn\nn12144580\tthe dried grains or kernels or corn used as animal feed or ground for meal\nn12144987\tcorn whose kernels contain both hard and soft starch and become indented at maturity\nn12145148\tcorn having kernels with a hard outer layer enclosing the soft endosperm\nn12145477\tcorn having small ears and kernels that burst when exposed to dry heat\nn12146311\tany of several creeping grasses of the genus Zoysia\nn12146488\tlawn grass common in the Philippines; grown also in United States\nn12146654\tlawn grass common in China and Japan; grown also in United States\nn12147226\twoody tropical grass having hollow woody stems; mature canes used for construction and furniture\nn12147835\textremely vigorous bamboo having thin-walled culms striped green and yellow; so widely cultivated that native area is uncertain\nn12148757\timmense tropical southeast Asian bamboo with tough hollow culms that resemble tree trunks\nn12150722\tAfrican sedge widely cultivated as an ornamental water plant for its terminal umbrellalike cluster of slender grasslike leaves\nn12150969\tEuropean sedge having small edible nutlike tubers\nn12151170\tEuropean sedge having rough-edged leaves and spikelets of reddish flowers and aromatic roots\nn12151615\ta widely distributed perennial sedge having small edible nutlike tubers\nn12152031\tEuropean maritime sedge naturalized along Atlantic coast of United States; rootstock has properties of sarsaparilla\nn12152251\ttufted sedge of temperate regions; nearly cosmopolitan\nn12152532\tany sedge of the genus Eriophorum; north temperate bog plants with tufted spikes\nn12152722\thaving densely tufted white cottony or downlike glumes\nn12153033\twidely distributed North American sedge having rigid olive green stems\nn12153224\tsedge of eastern North America having numerous clustered woolly spikelets\nn12153580\ta sedge of the genus Eleocharis\nn12153741\tChinese sedge yielding edible bulb-shaped tubers\nn12153914\tfine-leaved aquatic spike rush; popular as aerator for aquariums\nn12154114\tcylindrical-stemmed sedge\nn12154773\tany of various Old World tropical palmlike trees having huge prop roots and edible conelike fruits and leaves like pineapple leaves\nn12155009\tPolynesian screw pine\nn12155583\ttall erect herbs with sword-shaped leaves; cosmopolitan in fresh and salt marshes\nn12155773\ttall marsh plant with cylindrical seed heads that explode when mature shedding large quantities of down; its long flat leaves are used for making mats and chair seats; of North America, Europe, Asia and North Africa\nn12156679\tmarsh plant having elongated linear leaves and round prickly fruit\nn12156819\tdry seed-like fruit produced by the cereal grasses: e.g. wheat, barley, Indian corn\nn12157056\ta single whole grain of a cereal\nn12157179\tthe seed of the cereal grass\nn12157769\tany vine of the family Cucurbitaceae that bears fruits with hard rinds\nn12158031\tany of numerous inedible fruits with hard rinds\nn12158443\ta coarse vine widely cultivated for its large pulpy round orange fruit with firm orange skin and numerous seeds; subspecies of Cucurbita pepo include the summer squashes and a few autumn squashes\nn12158798\tany of numerous annual trailing plants of the genus Cucurbita grown for their fleshy edible fruits\nn12159055\tany of various usually bushy plants producing fruit that is eaten while immature and before the rind or seeds harden\nn12159388\tany of various squash plants grown for their yellow fruits with somewhat elongated necks\nn12159555\tany of various squash plants grown for their elongated fruit with smooth dark green skin and whitish flesh\nn12159804\tmarrow squash plant whose fruit are eaten when small\nn12159942\tsquash plant having dark green fruit with skin mottled with light green or yellow\nn12160125\tsquash plant having flattened round fruit with a scalloped edge; usually greenish white\nn12160303\tsquash plant bearing oval fruit with smooth yellowish skin and tender stranded flesh resembling spaghetti\nn12160490\tany of various plants of the species Cucurbita maxima and Cucurbita moschata producing squashes that have hard rinds and mature in the fall\nn12160857\tsquash plant bearing small acorn-shaped fruits having yellow flesh and dark green or yellow rind with longitudinal ridges\nn12161056\tany of several winter squash plants producing large greyish-green football-shaped fruit with a rough warty rind\nn12161285\tsquash plants bearing hard-shelled fruit shaped somewhat like a turban with a rounded central portion protruding from the end opposite the stem\nn12161577\tplant bearing somewhat drum-shaped fruit having dark green rind with greyish markings\nn12161744\tplant bearing buff-colored squash having somewhat bottle-shaped fruit with fine-textured edible flesh and a smooth thin rind\nn12161969\tany of various plants bearing squash having hard rinds and elongated recurved necks\nn12162181\tplant bearing squash having globose to ovoid fruit with variously striped grey and green and white warty rinds\nn12162425\tperennial vine of dry parts of central and southwestern United States and Mexico having small hard mottled green inedible fruit\nn12162758\tsmall hard green-and-white inedible fruit of the prairie gourd plant\nn12163035\ta vine of the genus Bryonia having large leaves and small flowers and yielding acrid juice with emetic and purgative properties\nn12163279\twhite-flowered vine having thick roots and bearing small black berries; Europe to Iran\nn12164363\tany of several varieties of vine whose fruit has a netted rind and edible flesh and a musky smell\nn12164656\ta variety of muskmelon vine having fruit with a tan rind and orange flesh\nn12164881\tany of a variety of muskmelon vines having fruit with a smooth white rind and white or greenish flesh that does not have a musky smell\nn12165170\ta muskmelon vine with fruit that has a thin reticulated rind and sweet green flesh\nn12165384\ta melon vine of the genus Cucumis; cultivated from earliest times for its cylindrical green fruit\nn12165758\tMediterranean vine having oblong fruit that when ripe expels its seeds and juice violently when touched\nn12166128\tOld World climbing plant with hard-shelled bottle-shaped gourds as fruits\nn12166424\tany of several tropical annual climbers having large yellow flowers and edible young fruits; grown commercially for the mature fruit's dried fibrous interior that is used as a sponge\nn12166793\tthe loofah climber that has cylindrical fruit\nn12166929\tloofah of Pakistan; widely cultivated throughout tropics\nn12167075\tthe dried fibrous part of the fruit of a plant of the genus Luffa; used as a washing sponge or strainer\nn12167436\ta tropical Old World flowering vine with red or orange warty fruit\nn12167602\ttropical Old World vine with yellow-orange fruit\nn12168565\tany plant or flower of the genus Lobelia\nn12169099\terect perennial aquatic herb of Europe and North America having submerged spongy leaves and pendulous racemes of blue flowers above the water\nn12170585\tany of various plants of the family Malvaceae\nn12171098\terect Old World perennial with faintly musk-scented foliage and white or pink flowers; adventive in United States\nn12171316\tannual Old World plant with clusters of pink or white flowers; naturalized in United States\nn12171966\ttall coarse annual of Old World tropics widely cultivated in southern United States and West Indies for its long mucilaginous green pods used as basis for soups and stews; sometimes placed in genus Hibiscus\nn12172364\tlong green edible beaked pods of the okra plant\nn12172481\tbushy herb of tropical Asia grown for its yellow or pink to scarlet blooms that resemble the hibiscus\nn12172906\tan ornamental plant of the genus Abutilon having leaves that resemble maple leaves\nn12173069\ttall annual herb or subshrub of tropical Asia having velvety leaves and yellow flowers and yielding a strong fiber; naturalized in southeastern Europe and United States\nn12173664\tany of various tall plants of the genus Alcea; native to the Middle East but widely naturalized and cultivated for its very large variously colored flowers\nn12173912\tplant with terminal racemes of showy white to pink or purple flowers; the English cottage garden hollyhock\nn12174311\tany of various plants of the genus Althaea; similar to but having smaller flowers than genus Alcea\nn12174521\tEuropean perennial plant naturalized in United States having triangular ovate leaves and lilac-pink flowers\nn12174926\ta plant of the genus Callirhoe having palmately cleft leaves and white to red or purple flowers borne throughout the summer\nn12175181\tperennial poppy mallow of United States southern plains states having rose-red or rose-purple flowers\nn12175370\thairy perennial of central United States having round deeply lobed leaves and loose panicles of large crimson-purple or cherry-red flowers\nn12175598\tdensely hairy perennial having mostly triangular basal leaves and rose-purple flowers in panicled clusters\nn12176453\tsmall bushy tree grown on islands of the Caribbean and off the Atlantic coast of the southern United States; yields cotton with unusually long silky fibers\nn12176709\tOld World annual having heart-shaped leaves and large seeds with short greyish lint removed with difficulty; considered an ancestor of modern short-staple cottons\nn12176953\tnative tropical American plant now cultivated in the United States yielding short-staple cotton\nn12177129\tcotton with long rough hairy fibers\nn12177455\tshrub of southern Arizona and Mexico\nn12178129\tvaluable fiber plant of East Indies now widespread in cultivation\nn12178780\tAustralian tree with acid foliage\nn12178896\tshowy shrub of salt marshes of the eastern United States having large rose-colored flowers\nn12179122\tChinese shrub or small tree having white or pink flowers becoming deep red at night; widely cultivated; naturalized in southeastern United States\nn12179632\tEast Indian sparsely prickly annual herb or perennial subshrub widely cultivated for its fleshy calyxes used in tarts and jelly and for its bast fiber\nn12180168\tshrubby tree widely distributed along tropical shores; yields a light tough wood used for canoe outriggers and a fiber used for cordage and caulk; often cultivated for ornament\nn12180456\tannual weedy herb with ephemeral yellow purple-eyed flowers; Old World tropics; naturalized as a weed in North America\nn12180885\tsmall tree or shrub of New Zealand having a profusion of axillary clusters of honey-scented paper-white flowers and whose bark is used for cordage\nn12181352\ta rare mallow found only in Illinois resembling the common hollyhock and having pale rose-mauve flowers; sometimes placed in genus Sphaeralcea\nn12181612\tperennial of northwestern United States and western Canada resembling a hollyhock and having white or pink flowers\nn12182049\tany of various plants of the genus Kosteletzya predominantly of coastal habitats; grown for their flowers that resemble hibiscus\nn12182276\tsubshrub of southeastern United States to New York\nn12183026\tshrub of coastal ranges of California and Baja California having hairy branches and spikes of numerous mauve flowers; sometimes placed in genus Sphaeralcea\nn12183452\twestern Mediterranean annual having deep purple-red flowers subtended by 3 large cordate bracts\nn12183816\tan American plant of the genus Malvastrum\nn12184095\tany of various plants of the genus Malvaviscus having brilliant bell-shaped drooping flowers like incompletely opened hibiscus flowers\nn12184468\ttall coarse American herb having palmate leaves and numerous small white dioecious flowers; found wild in most alluvial soils of eastern and central United States\nn12184912\tany of various evergreen plants of the genus Pavonia having white or yellow or purple flowers\nn12185254\tdeciduous New Zealand tree whose inner bark yields a strong fiber that resembles flax and is called New Zealand cotton\nn12185859\tsouthern and western Australian shrub with unlobed or shallowly lobed toothed leaves and purple flowers; sometimes placed in genus Hibiscus\nn12186352\ttall handsome perennial herb of southeastern United States having maplelike leaves and white flowers\nn12186554\therb widely distributed in tropics and subtropics used for forage and medicinally as a demulcent and having a fine soft bast stronger than jute; sometimes an aggressive weed\nn12186839\ttropical American weed having pale yellow or orange flowers naturalized in southern United States\nn12187247\tperennial purple-flowered wild mallow of western North America that is also cultivated\nn12187663\tgenus of coarse herbs and subshrubs of arid North and South America having pink or scarlet flowers and globose fruits\nn12187891\tfalse mallow of western United States having racemose red flowers; sometimes placed in genus Malvastrum\nn12188289\tany of various trees yielding variously colored woods similar to true tulipwood\nn12188635\tpantropical tree of usually seacoasts sometimes cultivated as an ornamental for its rounded heart-shaped leaves and showy yellow and purple flowers; yields valuable pink to dark red close-grained wood and oil from its seeds\nn12189429\tEast Indian silk cotton tree yielding fibers inferior to kapok\nn12189779\tAustralian tree having an agreeably acid fruit that resembles a gourd\nn12189987\tAfrican tree having an exceedingly thick trunk and fruit that resembles a gourd and has an edible pulp called monkey bread\nn12190410\tmassive tropical tree with deep ridges on its massive trunk and bearing large pods of seeds covered with silky floss; source of the silky kapok fiber\nn12190869\ttree of southeastern Asia having edible oval fruit with a hard spiny rind\nn12191240\tevergreen tree with large leathery leaves and large pink to orange flowers; considered a link plant between families Bombacaceae and Sterculiaceae\nn12192132\ttree of Mexico to Guatemala having densely hairy flowers with long narrow petals clustered at ends of branches before leaves appear\nn12192877\tAustralian tree having hard white timber and glossy green leaves with white flowers followed by one-seeded glossy blue fruit\nn12193334\tthe fruit of the Brisbane quandong tree\nn12193665\tgraceful deciduous shrub or small tree having attractive foliage and small red berries that turn black at maturity and are used for making wine\nn12194147\ta fast-growing tropical American evergreen having white flowers and white fleshy edible fruit; bark yields a silky fiber used in cordage and wood is valuable for staves\nn12194613\tWest Indian timber tree having very hard wood\nn12195391\tany tree of the genus Sterculia\nn12195533\tlarge deciduous tree native to Panama and from which the country takes its name; having densely leafy crown and naked trunk\nn12195734\tlarge tree of Old World tropics having foul-smelling orange-red blossoms followed by red pods enclosing oil-rich seeds sometimes used as food\nn12196129\tan Australian tree of the genus Brachychiton\nn12196336\tsouth Australian tree having panicles of brilliant scarlet flowers\nn12196527\tnorth Australian tree having white flowers and broad leaves\nn12196694\twidely distributed tree of eastern Australia yielding a tough durable fiber and soft light attractively grained wood; foliage is an important emergency food for cattle\nn12196954\tlarge tree of Queensland having cream-colored flowers blotched with red inside; sometimes placed in genus Sterculia\nn12197359\ttree bearing large brown nuts containing e.g. caffeine; source of cola extract\nn12197601\tbitter brown seed containing caffein; source of cola extract\nn12198286\tdeciduous tree widely grown in southern United States as an ornamental for its handsome maplelike foliage and long racemes of yellow-green flowers followed by curious leaflike pods\nn12198793\tany of several handsome evergreen shrubs of California and northern Mexico having downy lobed leaves and showy yellow flowers\nn12199266\ta tree or shrub of the genus Helicteres\nn12199399\tEast Indian shrub often cultivated for its hairy leaves and orange-red flowers\nn12199790\tlarge tree of Australasia\nn12199982\tlarge evergreen tree of India and Burma whose leaves are silvery beneath\nn12200143\tsmall tree of coastal regions of Old World tropics whose leaves are silvery beneath\nn12200504\tAfrican shrub having decumbent stems and slender yellow honey-scented flowers either solitary or in pairs\nn12200905\tIndian tree having fragrant nocturnal white flowers and yielding a reddish wood used for planking; often grown as an ornamental or shade tree\nn12201331\tAustralian timber tree\nn12201580\ttropical American tree producing cacao beans\nn12201938\tlarge west African tree having large palmately lobed leaves and axillary cymose panicles of small white flowers and one-winged seeds; yields soft white to pale yellow wood\nn12202936\tany of various deciduous trees of the genus Tilia with heart-shaped leaves and drooping cymose clusters of yellowish often fragrant flowers; several yield valuable timber\nn12203529\tlarge American shade tree with large dark green leaves and rounded crown\nn12203699\tlarge spreading European linden with small dark green leaves; often cultivated as an ornamental\nn12203896\tAmerican basswood of the Allegheny region\nn12204032\tmedium-sized tree of Japan used as an ornamental\nn12204175\tlarge tree native to eastern Europe and Asia Minor having leaves with white tomentum on the under side; widely cultivated as an ornamental\nn12204730\tany of various plants of the genus Corchorus having large leaves and cymose clusters of yellow flowers; a source of jute\nn12205460\tlarge shrub of South Africa having many conspicuously hairy branches with large hairy leaves and clusters of conspicuous white flowers\nn12205694\ta plant lacking a permanent woody stem; many are flowering garden plants or potherbs; some having medicinal properties; some are pests\nn12214789\tany tropical African shrub of the genus Protea having alternate rigid leaves and dense colorful flower heads resembling cones\nn12215022\tSouth African shrub whose flowers when open are cup-shaped resembling artichokes\nn12215210\tAustralian shrub whose flowers yield honey copiously\nn12215579\tany shrub or tree of the genus Banksia having alternate leathery leaves apetalous yellow flowers often in showy heads and conelike fruit with winged seeds\nn12215824\tshrubby tree with silky foliage and spikes of cylindrical yellow nectarous flowers\nn12216215\tany of various shrubs of the genus Conospermum with panicles of mostly white woolly flowers\nn12216628\tgrown for outstanding display of brilliant usually scarlet-crimson flowers; Andes\nn12216968\tChilean shrub bearing coral-red fruit with an edible seed resembling a hazelnut\nn12217453\tany shrub or tree of the genus Grevillea\nn12217851\ttall shrub with cylindrical racemes of red flowers and pinnatifid leaves silky and grey beneath; eastern Australia\nn12218274\tmedium to tall fast-growing tree with orange flowers and feathery bipinnate leaves silky-hairy beneath; eastern Australia\nn12218490\ttree yielding hard heavy reddish wood\nn12218868\ttall straggling shrub with large globose crimson-yellow flowers; western Australia\nn12219668\tslender elegant tree of New Zealand having racemes of red flowers and yielding valuable mottled red timber\nn12220019\terect bushy shrub of eastern Australia having terminal clusters of red flowers yielding much nectar\nn12220496\tsmall South African tree with long silvery silky foliage\nn12220829\tany of various ornamental evergreens of the genus Lomatia having attractive fragrant flowers\nn12221191\tany tree of the genus Macadamia\nn12221368\tmedium-sized tree of eastern Australia having creamy-white flowers\nn12221522\tsmall Australian tree with racemes of pink flowers; widely cultivated (especially in Hawaii) for its sweet edible nuts\nn12221801\tbushy tree with pink to purple flowers\nn12222090\tAustralian tree having alternate simple leaves (when young they are pinnate with prickly toothed margins) and slender axillary spikes of white flowers\nn12222493\tany of numerous shrubs and small trees having hard narrow leaves and long-lasting yellow or white flowers followed by small edible but insipid fruits\nn12222900\teastern Australian tree widely cultivated as a shade tree and for its glossy leaves and circular clusters of showy red to orange-scarlet flowers\nn12223160\ttree or tall shrub with shiny leaves and umbels of fragrant creamy-white flowers; yields hard heavy reddish wood\nn12223569\ttall shrub of eastern Australia having oblanceolate to obovate leaves and red flowers in compact racemes\nn12223764\tstraggling shrub with narrow leaves and conspicuous red flowers in dense globular racemes\nn12224978\tany of various trees and shrubs of the genus Casuarina having jointed stems and whorls of scalelike leaves; some yield heavy hardwood\nn12225222\tany of several Australian trees of the genus Casuarina\nn12225349\tany of several Australian trees of the genus Casuarina yielding heavy hard red wood used in cabinetwork\nn12225563\tcommon Australian tree widely grown as an ornamental in tropical regions; yields heavy hard red wood\nn12226932\ta low evergreen shrub of the family Ericaceae; has small bell-shaped pink or purple flowers\nn12227658\tevergreen treelike Mediterranean shrub having fragrant white flowers in large terminal panicles and hard woody roots used to make tobacco pipes\nn12227909\thard woody root of the briar Erica arborea\nn12228229\tdwarf European shrub with very early blooming bell-shaped red flowers\nn12228387\tcommon low European shrub with purple-red flowers\nn12228689\tbushy shrub having pink to white flowers; common on the moors of Cornwall and in southwestern Europe; cultivated elsewhere\nn12228886\terect dense shrub native to western Iberian peninsula having profuse white or pink flowers; naturalized in southwestern England\nn12229111\tSouth African shrub grown for its profusion of white flowers\nn12229651\twiry evergreen shrub having pendent clusters of white or pink flowers; of wet acidic areas in Arctic and Canada to northeastern United States\nn12229887\terect to procumbent evergreen shrub having pendent clusters of white or pink flowers; of sphagnum peat bogs and other wet acidic areas in northern Europe\nn12230540\tevergreen tree of the Pacific coast of North America having glossy leathery leaves and orange-red edible berries; wood used for furniture and bark for tanning\nn12230794\tsmall evergreen European shrubby tree bearing many-seeded scarlet berries that are edible but bland; of Ireland, southern Europe, Asia Minor\nn12231192\tchiefly evergreen subshrubs of northern to Arctic areas\nn12231709\tdeciduous creeping shrub bright red in autumn having black or blue-black berries; alpine and circumpolar\nn12232114\terect California shrub having leaves with heart-shaped lobes at the base\nn12232280\terect treelike shrub forming dense thickets and having drooping panicles of white or pink flowers and red berrylike drupes; California\nn12232851\tsmall evergreen mat-forming shrub of southern Europe and Asia Minor having stiff stems and terminal clusters of small bell-shaped flowers\nn12233249\tprocumbent Old World mat-forming evergreen shrub with racemes of pinkish-white flowers\nn12234318\tnorth temperate bog shrub with evergreen leathery leaves and small white cylindrical flowers\nn12234669\tlow straggling evergreen shrub of western Europe represented by several varieties with flowers from white to rose-purple\nn12235051\tlow-growing evergreen shrub of eastern North America with leathery leaves and clusters of fragrant pink or white flowers\nn12235479\tslow-growing procumbent evergreen shrublet of northern North America and Japan having white flowers and numerous white fleshy rough and hairy seeds\nn12236160\tsmall evergreen shrub of Pacific coast of North America having edible dark purple grape-sized berries\nn12236546\tany of several shrubs of the genus Gaylussacia bearing small berries resembling blueberries\nn12236768\tlow shrub of the eastern United States bearing shiny black edible fruit; best known of the huckleberries\nn12236977\thuckleberry of the eastern United States with pink flowers and sweet blue fruit\nn12237152\tcreeping evergreen shrub of southeastern United States having small shiny boxlike leaves and flavorless berries\nn12237486\tany plant of the genus Kalmia\nn12237641\ta North American evergreen shrub having glossy leaves and white or rose-colored flowers\nn12237855\tlaurel of bogs of northwestern United States having small purple flowers and pale leaves that are glaucous beneath\nn12238756\ta Rocky Mountain shrub similar to Ledum groenlandicum\nn12238913\tbog shrub of northern and central Europe and eastern Siberia to Korea and Japan\nn12239240\tlow-growing evergreen shrub of New Jersey to Florida grown for its many white star-shaped flowers and glossy foliage\nn12239647\tany plant of the genus Leucothoe; grown for their beautiful white flowers; glossy foliage contains a poisonous substance similar to that found in genus Kalmia\nn12239880\tfast-growing evergreen shrub of southeastern United States having arching interlaced branches and racemes of white flowers\nn12240150\tbushy deciduous shrub of the eastern United States with long racemes of pinkish flowers\nn12240477\tcreeping mat-forming evergreen shrub of high mountain regions of northern hemisphere grown for its rose-pink flowers\nn12240965\tdeciduous shrub of coastal plain of the eastern United States having nodding pinkish-white flowers; poisonous to stock\nn12241192\tdeciduous much-branched shrub with dense downy panicles of small bell-shaped white flowers\nn12241426\tshowy evergreen shrub of southeastern United States with shiny leaves and angled branches and clusters of pink to reddish flowers that resemble an umbel\nn12241880\tstraggling shrub of northwestern North America having foliage with a bluish tinge and umbels of small bell-shaped flowers\nn12242123\tlow shrub of the eastern United States with downy twigs\nn12242409\tdeciduous shrubby tree of eastern North America having deeply fissured bark and sprays of small fragrant white flowers and sour-tasting leaves\nn12242850\tsmall shrub with tiny evergreen leaves and pink or purple flowers; Alpine summits and high ground in Asia and Europe and United States\nn12243109\tsemi-prostrate evergreen herb of western United States\nn12243693\tornamental evergreen shrub of southeastern United States having small white bell-shaped flowers\nn12244153\tany shrub of the genus Rhododendron: evergreen shrubs or small shrubby trees having leathery leaves and showy clusters of campanulate (bell-shaped) flowers\nn12244458\tmedium-sized rhododendron of Pacific coast of North America having large rosy brown-spotted flowers\nn12244650\tlate-spring-blooming rhododendron of eastern North America having rosy to pink-purple flowers\nn12244819\tshrub growing in swamps throughout the eastern United States and having small white to pinkish flowers resembling honeysuckle\nn12245319\tany of numerous ornamental shrubs grown for their showy flowers of various colors\nn12245695\tany of numerous shrubs of genus Vaccinium bearing cranberries\nn12245885\ttrailing red-fruited plant\nn12246037\tsmall red-fruited trailing cranberry of Arctic and cool regions of the northern hemisphere\nn12246232\tany of numerous shrubs of the genus Vaccinium bearing blueberries\nn12246773\tshrub or small tree of eastern United States having black inedible berries\nn12246941\tlow-growing deciduous shrub of northeastern North America having flowers in compact racemes and bearing sweet dark blue berries\nn12247202\tshrub of southeastern United States grown commercially especially for canning industry\nn12247407\tlow-growing tufted deciduous shrub of northern and alpine North America having pink to coral-red flowers followed by sweet blue berries\nn12247963\tshrub of the eastern United States having shining evergreen leaves and bluish-black fruit\nn12248141\tstiff bushy evergreen shrub of western North America having sour black berries and glossy green foliage used in floral arrangements\nn12248359\terect blueberry of western United States having solitary flowers and somewhat sour berries\nn12248574\terect European blueberry having solitary flowers and blue-black berries\nn12248780\tan evergreen shrub with leathery leaves\nn12248941\tlow deciduous shrub of the eastern United States bearing dark blue sweet berries\nn12249122\tshrub of northwestern North America bearing red berries\nn12249294\tsmall branching blueberry common in marshy areas of the eastern United States having greenish or yellowish unpalatable berries reputedly eaten by deer\nn12249542\tlow evergreen shrub of high north temperate regions of Europe and Asia and America bearing red edible berries\nn12251001\tany boreal low-growing evergreen plant of the genus Diapensia\nn12251278\ttufted evergreen perennial herb having spikes of tiny white flowers and glossy green round to heart-shaped leaves that become coppery to maroon or purplish in fall\nn12251740\tcreeping evergreen shrub having narrow overlapping leaves and early white star-shaped flowers; of the pine barrens of New Jersey and the Carolinas\nn12252168\tany plant of the genus Shortia; evergreen perennial herbs with smooth leathery basal leaves and showy white solitary flowers\nn12252383\tplant of southeastern United States having solitary white funnel-shaped flowers flushed with pink and large glossy green leaves that turn bronze-red in fall\nn12252866\tany heathlike plant of the family Epacridaceae; most are of the Australian region\nn12253229\tany heathlike evergreen shrub of the genus Epacris grown for their showy and crowded spikes of small bell-shaped or tubular flowers\nn12253487\tspindly upright shrub of southern Australia and Tasmania having white to rose or purple-red flowers\nn12253664\tsmall erect shrub of Australia and Tasmania with fragrant ivory flowers\nn12253835\tsmall shrub of southern and western Australia having pinkish to rosy purple tubular flowers\nn12254168\tsmall prostrate or ascending shrub having scarlet flowers and succulent fruit resembling cranberries; sometimes placed in genus Styphelia\nn12255225\theathlike shrub of southwestern Australia grown for its sharply scented foliage and pink flowers followed by pentagonal fruit\nn12256112\tany of several evergreen perennials of the genus Pyrola\nn12256325\tevergreen of eastern North America with leathery leaves and numerous white flowers\nn12256522\tthe common wintergreen having many-flowered racemes of pink-tinged white flowers; Europe and North America\nn12256708\tNorth American evergreen with small pinkish bell-shaped flowers and oblong leaves used formerly for shinplasters\nn12256920\tevergreen with rounded leaves and very fragrant creamy-white flowers; widely distributed in northern parts of Old and New Worlds\nn12257570\tany of several plants of the genus Chimaphila\nn12257725\tEurasian herb with white or pinkish flowers in a terminal corymb\nn12258101\tdelicate evergreen dwarf herb of north temperate regions having a solitary white terminal flower; sometimes placed in genus Pyrola\nn12258885\tsmall waxy white or pinkish-white saprophytic woodland plant having scalelike leaves and a nodding flower; turns black with age\nn12259316\tfleshy tawny or reddish saprophytic herb resembling the Indian pipe and growing in woodland humus of eastern North America; in some classifications placed in a separate genus Hypopitys\nn12260799\tany of several large deciduous trees with rounded spreading crowns and smooth grey bark and small sweet edible triangular nuts enclosed in burs; north temperate regions\nn12261359\tlarge European beech with minutely-toothed leaves; widely planted as an ornamental in North America\nn12261571\tvariety of European beech with shining purple or copper-colored leaves\nn12261808\tNorth American forest tree with light green leaves and edible nuts\nn12262018\tvariety of European beech with pendulous limbs\nn12262185\ta beech native to Japan having soft light yellowish-brown wood\nn12262553\tany of several attractive deciduous trees yellow-brown in autumn; yield a hard wood and edible nuts in a prickly bur\nn12263038\tlarge tree found from Maine to Alabama\nn12263204\twild or cultivated throughout southern Europe, northwestern Africa and southwestern Asia\nn12263410\ta small tree with small sweet nuts; wild or naturalized in Korea and China\nn12263588\ta spreading tree of Japan that has a short trunk\nn12263738\tshrubby chestnut tree of southeastern United States having small edible nuts\nn12263987\tshrubby tree closely related to the Allegheny chinkapin but with larger leaves; southern midwestern United States\nn12264512\ta tree of the genus Castanopsis\nn12264786\tsmall ornamental evergreen tree of Pacific Coast whose glossy yellow-green leaves are yellow beneath; bears edible nuts\nn12265083\tevergreen shrub similar to golden chinkapin; mountains of California\nn12265394\tevergreen tree of the Pacific coast area having large leathery leaves; yields tanbark\nn12265600\tsmall evergreen tree of China and Japan\nn12266217\tany of various beeches of the southern hemisphere having small usually evergreen leaves\nn12266528\tlarge evergreen tree of Tasmania\nn12266644\tChilean evergreen whose leafy boughs are used for thatching\nn12266796\tany of several tall New Zealand trees of the genus Nothofagus; some yield useful timber\nn12266984\tNew Zealand beech with usually pale silvery bark\nn12267133\ttall deciduous South American tree\nn12267265\tlarge Chilean timber tree yielding coarse lumber\nn12267411\tNew Zealand forest tree\nn12267534\ttall New Zealand tree yielding very hard wood\nn12267677\tfruit of the oak tree: a smooth thin-walled nut in a woody cup-shaped base\nn12267931\tcup-shaped structure of hardened bracts at the base of an acorn\nn12268246\ta deciduous tree of the genus Quercus; has acorns and lobed leaves\nn12269241\tany of several American evergreen oaks\nn12269406\thighly variable often shrubby evergreen oak of coastal zone of western North America having small thick usually spiny-toothed dark-green leaves\nn12269652\tany of numerous Old World and American oaks having 6 to 8 stamens in each floret, acorns that mature in one year and leaf veins that never extend beyond the margin of the leaf\nn12270027\tlarge slow-growing deciduous tree of the eastern United States having stout spreading branches and leaves with usually 7 rounded lobes; yields strong and durable hard wood\nn12270278\tsemi-evergreen shrub or small tree of Arizona and New Mexico having acorns with hemispherical cups\nn12270460\tlarge deciduous oak of the eastern United States with a flaky bark and leaves that have fewer lobes than other white oaks; yields heavy strong wood used in construction; thrives in wet soil\nn12270741\tlarge deciduous tree of central and southern Europe and Asia Minor having lanceolate leaves with spiked lobes\nn12270946\tmedium-sized evergreen of southwestern United States and northwestern Mexico with oblong leathery often spiny-edged leaves\nn12271187\tmedium-large deciduous tree with a thick trunk found in the eastern United States and southern Canada and having close-grained wood and deeply seven-lobed leaves turning scarlet in autumn\nn12271451\tsmall to medium deciduous oak of east central North America; leaves have sharply pointed lobes\nn12271643\tany of numerous American oaks having 4 stamens in each floret, acorns requiring two years to mature and leaf veins usually extending beyond the leaf margin to form points or bristles\nn12271933\tlarge round-topped deciduous tree with spreading branches having narrow falcate leaves with deeply sinuate lobes and wood similar to that of northern red oaks; New Jersey to Illinois and southward\nn12272239\tsmall deciduous tree of western North America with crooked branches and pale grey bark\nn12272432\tevergreen oak of southern Europe having leaves somewhat resembling those of holly; yields a hard wood\nn12272735\tshrubby oak of southeastern United States usually forming dense thickets\nn12272883\tsmall deciduous tree of eastern and central United States having leaves that shine like laurel; wood is used in western states for shingles\nn12273114\tsmall semi-evergreen shrubby tree of southeastern United States having hairy young branchlets and leaves narrowing to a slender bristly point\nn12273344\tlarge deciduous tree of the Pacific coast having deeply parted bristle-tipped leaves\nn12273515\tsmall slow-growing deciduous shrubby tree of dry sandy barrens of southeastern United States having leaves with bristle-tipped lobes resembling turkey's toes\nn12273768\tlarge nearly semi-evergreen oak of southeastern United States; thrives in damp soil\nn12273939\ttall graceful deciduous California oak having leathery leaves and slender pointed acorns\nn12274151\tmedium-large deciduous timber tree of central and southern United States; acorns deeply immersed in the cup and mature in first year\nn12274358\tmedium to large deciduous oak of central and eastern North America with ovoid acorns deeply immersed in large fringed cups; yields tough close-grained wood\nn12274630\tany of various chiefly American small shrubby oaks often a dominant form on thin dry soils sometimes forming dense thickets\nn12274863\ta common scrubby deciduous tree of central and southeastern United States having dark bark and broad three-lobed (club-shaped) leaves; tends to form dense thickets\nn12275131\tmedium to large deciduous tree of moist areas of southeastern United States similar to the basket oak\nn12275317\toak with moderately light fine-grained wood; Japan\nn12275489\tan oak having leaves resembling those of chestnut trees\nn12275675\tmedium-sized deciduous tree of the eastern United States that yields a strong durable wood\nn12275888\tsmall evergreen shrub or tree of southeastern United States; often forms almost impenetrable thickets in sandy coastal areas\nn12276110\trelatively tall deciduous water oak of southeastern United States often cultivated as a shade tree; thrives in wet soil\nn12276314\tsimilar to the pin oak; grows in damp sites in Mississippi River basin\nn12276477\tdeciduous European oak valued for its tough elastic wood\nn12276628\tmedium to large deciduous tree of the eastern United States; its durable wood is used as timber or split and woven into baskets or chair seats\nn12276872\tfast-growing medium to large pyramidal deciduous tree of northeastern United States and southeastern Canada having deeply pinnatifid leaves that turn bright red in autumn; thrives in damp soil\nn12277150\tmedium to large deciduous oak of the eastern United States having long lanceolate leaves and soft strong wood\nn12277334\tdeciduous shrubby tree of northeastern and central United States having a sweet edible nut and often forming dense thickets\nn12277578\tmedium to large deciduous European oak having smooth leaves with rounded lobes; yields hard strong light-colored wood\nn12277800\tlarge symmetrical deciduous tree with rounded crown widely distributed in eastern North America; has large leaves with triangular spiny tipped lobes and coarse-grained wood less durable than that of white oaks\nn12278107\tlarge deciduous red oak of southern and eastern United States having large seven-lobed to nine-lobed elliptical leaves, large acorns and medium hard coarse-grained wood\nn12278371\tsmall deciduous tree of eastern and central United States having dark green lyrate pinnatifid leaves and tough moisture-resistant wood used especially for fence posts\nn12278650\tmedium-sized evergreen oak of southern Europe and northern Africa having thick corky bark that is periodically stripped to yield commercial cork\nn12278865\tsmall deciduous tree having the trunk branched almost from the base with spreading branches; Texas and southern Oklahoma\nn12279060\ta low spreading or prostrate shrub of southwestern United States with small acorns and leaves resembling those of the huckleberry\nn12279293\tmedium to large deciduous tree of China, Japan, and Korea having thick corky bark\nn12279458\tmedium to large deciduous timber tree of the eastern United States and southeastern Canada having dark outer bark and yellow inner bark used for tanning; broad five-lobed leaves are bristle-tipped\nn12279772\tmedium-sized evergreen native to eastern North America to the east coast of Mexico; often cultivated as shade tree for it wide-spreading crown; extremely hard tough durable wood once used in shipbuilding\nn12280060\ta small shrubby evergreen tree of western North America similar to the coast live oak but occurring chiefly in foothills of mountain ranges removed from the coast; an important part of the chaparral\nn12280364\tnuts of forest trees (as beechnuts and acorns) accumulated on the ground\nn12281241\tany betulaceous tree or shrub of the genus Betula having a thin peeling bark\nn12281788\ttree of eastern North America with thin lustrous yellow or grey bark\nn12281974\tsmall American birch with peeling white bark often worked into e.g. baskets or toy canoes\nn12282235\tmedium-sized birch of eastern North America having white or pale grey bark and valueless wood; occurs often as a second-growth forest tree\nn12282527\tEuropean birch with silvery white peeling bark and markedly drooping branches\nn12282737\tEuropean birch with dull white to pale brown bark and somewhat drooping hairy branches\nn12282933\tbirch of swamps and river bottoms throughout the eastern United States having reddish-brown bark\nn12283147\tcommon birch of the eastern United States having spicy brown bark yielding a volatile oil and hard dark wood used for furniture\nn12283395\tAlaskan birch with white to pale brown bark\nn12283542\tbirch of western United States resembling the paper birch but having brownish bark\nn12283790\tsmall shrub of colder parts of North America and Greenland\nn12284262\tnorth temperate shrubs or trees having toothed leaves and conelike fruit; bark is used in tanning and dyeing and the wood is rot-resistant\nn12284821\tmedium-sized tree with brown-black bark and woody fruiting catkins; leaves are hairy beneath\nn12285049\tnative to Europe but introduced in America\nn12285195\tshrub or small tree of southeastern United States having soft light brown wood\nn12285369\ttree of western United States\nn12285512\tlarge tree of Pacific coast of North America having hard red wood much used for furniture\nn12285705\tcommon shrub of Canada and northeastern United States having shoots scattered with rust-colored down\nn12285900\tcommon shrub of the eastern United States with smooth bark\nn12286068\tshrub of mountainous areas of Europe\nn12286197\tNorth American shrub with light green leaves and winged nuts\nn12286826\tany of several trees or shrubs of the genus Carpinus\nn12286988\tmedium-sized Old World tree with smooth grey bark and leaves like beech that turn yellow-orange in autumn\nn12287195\ttree or large shrub with grey bark and blue-green leaves that turn red-orange in autumn\nn12287642\tany of several trees resembling hornbeams with fruiting clusters resembling hops\nn12287836\tmedium-sized hop hornbeam of southern Europe and Asia Minor\nn12288005\tmedium-sized hop hornbeam of eastern North America\nn12288823\tany of several shrubs or small trees of the genus Corylus bearing edible nuts enclosed in a leafy husk\nn12289310\tnut-bearing shrub of eastern North America\nn12289433\tsmall nut-bearing tree much grown in Europe\nn12289585\thazel of western United States with conspicuous beaklike involucres on the nuts\nn12290748\tany of various plants of the genus Centaurium\nn12290975\terect plant with small clusters of pink trumpet-shaped flowers of southwestern United States\nn12291143\tcommon European glabrous annual centaury with flowers in dense cymes\nn12291459\ta variety of centaury found at the seaside\nn12291671\ta slender variety of centaury\nn12291959\tone of the most handsome prairie wildflowers having large erect bell-shaped bluish flowers; of moist places in prairies and fields from eastern Colorado and Nebraska south to New Mexico and Texas\nn12292463\tperennial cultivated especially as a houseplant for its fragrant bluish to dark lavender flowers\nn12292877\tany of various tall perennial herbs constituting the genus Frasera; widely distributed in warm dry upland areas of California, Oregon, and Washington\nn12293723\tany of various plants of the family Gentianaceae especially the genera Gentiana and Gentianella and Gentianopsis\nn12294124\tlow-growing alpine plant cultivated for its dark glossy green leaves in basal rosettes and showy solitary bell-shaped blue flowers\nn12294331\tgentian of eastern North America having tubular blue or white flowers that open little if at all\nn12294542\ttufted sometimes sprawling perennial with blue flowers spotted with green; western North America\nn12294723\tsimilar to Gentiana andrewsii but with larger flowers\nn12294871\trobust European perennial having clusters of yellow flowers\nn12295033\tperennial Eurasian gentian with sky-blue funnel-shaped flowers of damp open heaths\nn12295237\terect perennial of wet woodlands of North America having leaves and flower buds resembling those of soapwort\nn12295429\ta perennial marsh gentian of eastern North America\nn12295796\tgentian of eastern North America having clusters of bristly blue flowers\nn12296045\tgentian of Europe and China having creamy white flowers with fringed corollas\nn12296432\tany of various herbs of the genus Gentianopsis having the margins of the corolla lobes fringed; sometimes included in genus Gentiana\nn12296735\ttall widely distributed fringed gentian of eastern North America having violet-blue or white fringed flowers\nn12296929\tmedium-tall fringed gentian with pale-blue to blue-purple flowers; circumboreal in distribution\nn12297110\tsmall blue-flowered fringed gentian of east central North America\nn12297280\tsmall blue-flowered fringed gentian of western United States (Rocky Mountains) especially around hot springs in Yellowstone National Park\nn12297507\tsmall blue-flowered fringed gentian of Sierra Nevada mountains\nn12297846\tany of various plants of the genus Halenia having flowers with spurred lobes\nn12298165\tany of various plants of the genus Sabbatia having usually pink cymose flowers; occur from acid bogs to brackish marshes\nn12299640\tglabrous or pubescent evergreen shrub or tree of the genus Salvadora; twigs are fibrous and in some parts of the world are bound together in clusters and used as a toothbrush; shoots are used as camel fodder; plant ash provides salt\nn12300840\ta tree of the genus Olea cultivated for its fruit\nn12301180\tevergreen tree cultivated in the Mediterranean region since antiquity and now elsewhere; has edible shiny black fruits\nn12301445\tsmall ovoid fruit of the European olive tree; important food and source of oil\nn12301613\tnorthern Zealand tree having dense hard light-brown wood\nn12301766\tsmall New Zealand tree having red pulpy one-seeded fruit\nn12302071\tany of various small decorative flowering trees or shrubs of the genus Chionanthus\nn12302248\tsmall bushy tree of southeastern United States having profuse clusters of white flowers\nn12302565\tany plant of the genus Forestiera\nn12303083\tany of various early blooming oleaceous shrubs of the genus Forsythia; native to eastern Asia and southern Europe but widely cultivated for their branches of bright yellow bell-shaped flowers\nn12303462\tany of various deciduous pinnate-leaved ornamental or timber trees of the genus Fraxinus\nn12304115\tspreading American ash with leaves pale green or silvery beneath and having hard brownish wood\nn12304286\tsmall ash of swampy areas of southeastern United States\nn12304420\tshrubby ash of southwestern United States having fragrant white flowers\nn12304703\ttall ash of Europe to the Caucasus having leaves shiny dark-green above and pale downy beneath\nn12304899\ttimber tree of western North America yielding hard light wood; closely related to the red ash\nn12305089\tvigorous spreading North American tree having dark brown heavy wood; leaves turn gold in autumn\nn12305293\tsouthern Mediterranean ash having fragrant white flowers in dense panicles and yielding manna\nn12305475\tsmallish American tree with velvety branchlets and lower leaf surfaces\nn12305654\ta variety of red ash having glossy branchlets and lower leaf surfaces\nn12305819\tash of central and southern United States with bluish-green foliage and hard brown wood\nn12305986\tlow-growing ash of Texas\nn12306089\ttimber tree of central and southeastern United States having hairy branchlets and a swollen trunk base\nn12306270\tsmall shrubby ash of southwestern United States and northwestern Mexico\nn12306717\tany of several shrubs and vines of the genus Jasminum chiefly native to Asia\nn12306938\tevergreen rambling yellow-flowered shrub of western China\nn12307076\tdeciduous rambling shrub widely cultivated for its winter-blooming yellow flowers\nn12307240\ta climbing deciduous shrub with fragrant white or yellow or red flowers used in perfume and to flavor tea\nn12307756\tany of various Old World shrubs having smooth entire leaves and terminal panicles of small white flowers followed by small black berries; many used for hedges\nn12308112\teastern Asian shrub cultivated especially for its persistent foliage\nn12308447\tevergreen shrub of Japan and Korea having small dark leaves and flowers in loose panicles; related to but smaller than Chinese privet\nn12308907\tsmall deciduous shrub having graceful arching branches and luxuriant foliage\nn12309277\tdeciduous semi-evergreen shrub used for hedges\nn12309630\tsmall tree of southern United States having panicles of dull white flowers followed by dark purple fruits\nn12310021\tevergreen shrub with white flowers and olivelike fruits\nn12310349\tany of various plants of the genus Syringa having large panicles of usually fragrant flowers\nn12310638\trobust upright shrub of mountains of northern India having oblong-elliptic leaves and pale lilac or white malodorous flowers\nn12311045\tsmall densely branching Asiatic shrub having lanceolate leaves and panicles of fragrant lilac flowers\nn12311224\tsmall tree of Japan having narrow pointed leaves and creamy-white flowers\nn12311413\tlilac of northern China having ovate leaves and profuse early summer rose-lilac flowers\nn12311579\tlarge European lilac naturalized in North America having heart-shaped ovate leaves and large panicles of highly fragrant lilac or white flowers\nn12312110\tany of various plants of the family Haemodoraceae; roots contain a deep red coloring matter\nn12312728\tsedgelike spring-flowering herb having clustered flowers covered with woolly hairs; Australia\nn12315060\tcommon shrub of eastern North America having small yellow flowers after the leaves have fallen\nn12315245\tfragrant shrub of lower Mississippi valley having very small flowers from midwinter to spring\nn12315598\tany of several Asiatic deciduous shrubs cultivated for their nodding racemes of yellow flowers that appear before the leaves\nn12315999\tany of several deciduous low-growing shrubs of the genus Fothergilla having showy brushlike spikes of white flowers in spring and fiery red and orange autumn color; grows from Alabama to the Allegheny Mountains\nn12316444\tany tree of the genus Liquidambar\nn12316572\ta North American tree of the genus Liquidambar having prickly spherical fruit clusters and fragrant sap\nn12317296\ta small slow-growing deciduous tree of northern Iran having a low domed shape\nn12318378\tany of various trees of the genus Juglans\nn12318782\tmedium-sized tree with somewhat aromatic compound leaves and edible nuts\nn12318965\tNorth American walnut tree having light-brown wood and edible nuts; source of a light-brown dye\nn12319204\tNorth American walnut tree with hard dark wood and edible nut\nn12319414\tEurasian walnut valued for its large edible nut and its hard richly figured wood; widely cultivated\nn12320010\tAmerican hardwood tree bearing edible nuts\nn12320414\thickory of southern United States having many narrow leaflets and rather bitter nuts\nn12320627\tan American hickory tree having bitter nuts\nn12320806\thickory of the eastern United States having a leaves with 7 or 9 leaflets and thin-shelled very bitter nuts\nn12321077\ttree of southern United States and Mexico cultivated for its nuts\nn12321395\thickory of the eastern United States resembling the shagbark but having a much larger nut\nn12321669\thickory of southern United States and Mexico having hard nutmeg-shaped nuts\nn12321873\tNorth American hickory having loose grey shaggy bark and edible nuts\nn12322099\tsmooth-barked North American hickory with 7 to 9 leaflets bearing a hard-shelled edible nut\nn12322501\tany tree of the genus Pterocarya; fruit is a small winged nutlet; Caucasus to southeastern Asia\nn12322699\tmedium-sized Caucasian much-branched tree distinguished from other walnut trees by its winged fruit\nn12323665\tan Indian tree of the family Combretaceae that is a source of timber and gum\nn12324056\tany of numerous shrubs or small trees of the genus Combretum having spikes of small flowers\nn12324222\tornamental African shrub or climber with red flowers\nn12324388\tsmall deciduous tree of the Transvaal having spikes of yellow flowers\nn12324558\tsmall South African tree having creamy yellow fragrant flowers usually growing on stream banks\nn12324906\tevergreen tree or shrub with fruit resembling buttons and yielding heavy hard compact wood\nn12325234\tshrub to moderately large tree that grows in brackish water along the seacoasts of western Africa and tropical America; locally important as a source of tannin\nn12325787\tany of several shrubs of the genus Elaeagnus having silver-white twigs and yellow flowers followed by olivelike fruits\nn12327022\tan aquatic plant of the genus Myriophyllum having feathery underwater leaves and small inconspicuous flowers\nn12327528\tWest Indian tree bearing edible fruit resembling mango\nn12327846\ttall South American tree bearing brazil nuts\nn12328398\tany of numerous herbs and subshrubs of the genus Lythrum\nn12328567\tmarsh herb with a long spike of purple flowers; originally of Europe but now rampant in eastern United States\nn12328801\tannual with small solitary pink flowers; originally of Europe but widely naturalized in moist areas\nn12329260\tornamental shrub from eastern India commonly planted in the southern United States\nn12329473\tnative to Asia, Australia, and East Indies, where it provides timber called pyinma; used elsewhere as an ornamental for its large showy flowers\nn12330239\ttrees and shrubs\nn12330469\tany evergreen shrub or tree of the genus Myrtus\nn12330587\tEuropean shrub with white or rosy flowers followed by black berries\nn12330891\tWest Indian tree; source of bay rum\nn12331066\taromatic West Indian tree that produces allspice berries\nn12331263\ttropical American tree having small white flowers and aromatic berries\nn12331655\tAustralian tree with sour red fruit\nn12331788\ttree of extreme southern Florida and West Indies having thin scaly bark and aromatic fruits and seeds and yielding hard heavy close-grained zebrawood\nn12332030\tBrazilian tree with spicy red fruit; often cultivated in California and Florida\nn12332218\ttropical tree of the East Indies cultivated for its edible fruit\nn12332555\tSouth American shrub having edible greenish plumlike fruit\nn12333053\tsmall evergreen tropical tree native to Brazil and West Indies but introduced into southern United States; grown in Brazil for its edible tough-skinned purple grapelike fruit that grows all along the branches\nn12333530\tsmall tropical American shrubby tree; widely cultivated in warm regions for its sweet globular yellow fruit\nn12333771\tsmall tropical shrubby tree bearing small yellowish fruit\nn12333961\tsmall tropical shrubby tree bearing deep red oval fruit\nn12334153\tSouth American tree having fruit similar to the true guava\nn12334293\tany of various trees of the genera Eucalyptus or Liquidambar or Nyssa that are sources of gum\nn12334891\ta tree of the genus Eucalyptus\nn12335483\tany of several Australian gum trees growing on moist or alluvial soil\nn12335664\tany of several low-growing Australian eucalypts\nn12335800\tany of several Australian eucalypts having fibrous inner bark\nn12335937\tany of several Australian eucalypts having the bark smooth except at or near the base of the trunk\nn12336092\tred gum tree of Tasmania\nn12336224\tvery large red gum tree\nn12336333\tsomewhat crooked red gum tree growing chiefly along rivers; has durable reddish lumber used in heavy construction\nn12336586\tmedium-sized swamp gum of New South Wales and Victoria\nn12336727\tsmall to medium-sized tree of Australia and Tasmania having smooth white to light-grey bark shedding in patches or strips\nn12336973\ttall timber tree with hard heavy pinkish or light brown wood\nn12337131\tsmall shrubby mallee\nn12337246\tstringybark having white wood\nn12337391\tlarge tree with dark compact bark on lower trunk but smooth and white above; yields lumber similar to that of European or American ashes\nn12337617\ttall fast-growing timber tree with leaves containing a medicinal oil; young leaves are bluish\nn12337800\tvery tall tree of Queensland and New South Wales\nn12337922\tsmall to medium-sized tree of Tasmania\nn12338034\tmedium-sized tree of southern Australia\nn12338146\tlarge gum tree with mottled bark\nn12338258\tsimilar to but smaller than the spotted gum and having lemon-scented leaves\nn12338454\ta small mallee with rough dark-colored bark toward the butt; yields a red eucalyptus kino gum\nn12338655\ttall tree of Queensland and New South Wales and Victoria\nn12338796\ttree having wood similar to the alpine ash; tallest tree in Australia and tallest hardwood in the world\nn12338979\ttall tree yielding a false manna\nn12339526\tmoderate sized very symmetrical red-flowered evergreen widely cultivated in the tropics for its flower buds which are source of cloves\nn12339831\taromatic flower bud of a clove tree; yields a spice\nn12340383\tany of several gum trees of swampy areas of North America\nn12340581\tcolumnar swamp tree of southeastern to midwestern North America yielding pale soft easily worked wood\nn12340755\tcolumnar tree of eastern North America having horizontal limbs and small leaves that emerge late in spring and have brilliant color in early fall\nn12341542\tany of several erect perennial rhizomatous herbs of the genus Circaea having white flowers that open at dawn; northern hemisphere\nn12341931\ttall evening primrose with inconspicuous flowers\nn12342299\ta plant of the genus Epilobium having pink or yellow flowers and seeds with silky hairs\nn12342498\ttall North American perennial with creeping rootstocks and narrow leaves and spikes of pinkish-purple flowers occurring in great abundance in burned-over areas or recent clearings; an important honey plant\nn12342852\tshrublet of southwestern United States to Mexico having brilliant scarlet flowers\nn12343480\tany of various tropical shrubs widely cultivated for their showy drooping purplish or reddish or white flowers; Central and South America and New Zealand and Tahiti\nn12343753\terect or climbing shrub of Brazil with deep pink to red flowers\nn12344283\tany of several plants of the family Onagraceae\nn12344483\ta coarse biennial of eastern North America with yellow flowers that open in the evening; naturalized in Europe\nn12344700\ta day-flowering biennial or perennial of the genus Oenothera\nn12344837\tevening-opening primrose of south central United States\nn12345280\tshrub or small tree native to southwestern Asia having large red many-seeded fruit\nn12345899\ta tropical tree or shrub bearing fruit that germinates while still on the tree and having numerous prop roots that eventually form an impenetrable mass and are important in land building\nn12346578\tany of several ornamental shrubs with shiny mostly evergreen leaves and clusters of small bell-shaped flowers\nn12346813\twidely cultivated low evergreen shrub with dense clusters of fragrant pink to deep rose flowers\nn12346986\tbushy Eurasian shrub with glossy leathery oblong leaves and yellow-green flowers\nn12347158\tsmall European deciduous shrub with fragrant lilac-colored flowers followed by red berries on highly toxic twigs\nn12349315\tevergreen spreading shrub of India and southeastern Asia having large purple flowers\nn12349711\ta beautiful tropical evergreen epiphytic shrub grown for its lush foliage and huge panicles of pink flowers; Philippines\nn12350032\tany of several plants of the genus Rhexia usually having pink-purple to magenta flowers; eastern North America\nn12350758\tany plant of the genus Canna having large sheathing leaves and clusters of large showy flowers\nn12351091\tcanna grown especially for its edible rootstock from which arrowroot starch is obtained\nn12351790\twhite-flowered West Indian plant whose root yields arrowroot starch\nn12352287\tany of several tropical and subtropical treelike herbs of the genus Musa having a terminal crown of large entire leaves and usually bearing hanging clusters of elongated fruits\nn12352639\tlow-growing Asian banana tree cultivated especially in the West Indies for its clusters of edible yellow fruit\nn12352844\tAsiatic banana plant cultivated especially as a foliage plant in Japan\nn12352990\ta banana tree bearing hanging clusters of edible angular greenish starchy fruits; tropics and subtropics\nn12353203\twidely cultivated species of banana trees bearing compact hanging clusters of commercially important edible yellow fruit\nn12353431\tPhilippine banana tree having leafstalks that yield Manila hemp used for rope and paper etc\nn12353754\tlarge evergreen arborescent herb having huge paddle-shaped leaves and bearing inedible fruit that resemble bananas but edible young flower shoots; sometimes placed in genus Musa\nn12355760\tperennial plants having thick branching aromatic rhizomes and leafy reedlike stems\nn12356023\ttropical Asian plant widely cultivated for its pungent root; source of gingerroot and powdered ginger\nn12356395\twidely cultivated tropical plant of India having yellow flowers and a large aromatic deep yellow rhizome; source of a condiment and a yellow dye\nn12356960\tsoutheastern Asian perennial with aromatic roots\nn12357485\tcultivated for its shining oblong leaves and arching clusters of white flowers with pink shading and crinkled yellow lips with variegated magenta stripes\nn12357968\tWest African plant bearing pungent peppery seeds\nn12358293\trhizomatous herb of India having aromatic seeds used as seasoning\nn12360108\tany of numerous plants of the genus Begonia grown for their attractive glossy asymmetrical leaves and colorful flowers in usually terminal cymes or racemes\nn12360534\tany of numerous begonias having fibrous rather than tuberous or rhizomatous roots\nn12360684\tany of numerous begonias having large tuberous roots\nn12360817\tany of numerous begonias having prominent shaggy creeping stems or rhizomes\nn12360958\thybrid winter-blooming begonia grown for its many large pink flowers\nn12361135\tSouth American fibrous-rooted begonias having prominent basal leaf lobes suggesting angels' wings and racemes of coral-red flowers\nn12361560\trhizomatous begonia with roundish fleshy leaves reddish colored beneath\nn12361754\trhizomatous begonia having leaves with pointed lobes suggestive of stars and pink flowers\nn12361946\tany of numerous usually rhizomatous hybrid begonias derived from an East Indian plant having rough-textured leaves patterned in silver and bronze and purple and red-brown with inconspicuous flowers\nn12362274\thybrid fibrous-rooted begonia having broad-ovate green to bronze-red leaves and small clusters of white or pink or red flowers; widely used as a bedding plant\nn12362514\tsemi-tuberous begonia having peltate leaves and rose-pink flowers; Yemen\nn12362668\tany of numerous hybrid begonias having tuberous roots and variously colored flowers\nn12363301\tany of several evergreen trees or shrubs of the genus Dillenia grown for their foliage and nodding flowers resembling magnolias which are followed by fruit that is used in curries and jellies and preserves\nn12363768\tany of several Australasian evergreen vines widely cultivated in warm regions for their large bright yellow single flowers\nn12364604\tany of several East Indian trees of the genus Calophyllum having shiny leathery leaves and lightweight hard wood\nn12364940\tWest Indian tree having racemes of fragrant white flowers and yielding a durable timber and resinous juice\nn12365158\tvaluable timber tree of Panama\nn12365285\ttropical American tree; valued for its hard durable wood\nn12365462\tEast Indian tree having racemes of fragrant white flowers; coastal areas southern India to Malaysia\nn12365900\tan aromatic tree of the genus Clusia having large white or yellow or pink flowers\nn12366053\ta West Indies clusia having fig-shaped fruit\nn12366186\tepiphytic clusia of British Guiana\nn12366313\ta common tropical American clusia having solitary white or rose flowers\nn12366675\tEast Indian tree with thick leathery leaves and edible fruit\nn12366870\tlow spreading tree of Indonesia yielding an orange to brown gum resin (gamboge) used as a pigment when powdered\nn12367611\tany of numerous plants of the genus Hypericum having yellow flowers and transparently dotted leaves; traditionally gathered on St John's eve to ward off evil\nn12368028\tdeciduous bushy Eurasian shrub with golden yellow flowers and reddish-purple fruits from which a soothing salve is made in Spain\nn12368257\tperennial shrub having large star-shaped yellow flowers in narrowly pyramidal cymes\nn12368451\tcreeping evergreen shrub with bright yellow star-shaped summer flowers; useful as ground cover\nn12369066\tlow shrubby plant having yellow flowers with four petals arranged in a cross; Bermuda and southeastern United States to West Indies and eastern Mexico\nn12369309\tyellow-flowered perennial common in fields and waste places but a weed in rangelands\nn12369476\tstiff shrub having oblong entire leaves and dense cymes of yellow flowers\nn12369665\tEuropean perennial St John's wort; Ireland and France to western Siberia\nn12369845\tperennial marsh herb with pink to mauve flowers; southeastern United States\nn12370174\ttropical American tree having edible fruit with a leathery rind\nn12370549\thandsome East Indian evergreen tree often planted as an ornamental for its fragrant white flowers that yield a perfume; source of very heavy hardwood used for railroad ties\nn12371202\tclimbing Asiatic vine having long finely serrate leaves and racemes of white flowers followed by greenish-yellow edible fruit\nn12371439\tclimbing vine native to China; cultivated in New Zealand for its fuzzy edible fruit with green meat\nn12371704\tornamental vine of eastern Asia having yellow edible fruit and leaves with silver-white markings\nn12372233\tlarge evergreen shrub or small tree having white aromatic bark and leathery leaves and small purple to red flowers in terminal cymes\nn12373100\ttropical American shrub or small tree having huge deeply palmately cleft leaves and large oblong yellow fruit\nn12373739\tlarge South American evergreen tree trifoliate leaves and drupes with nutlike seeds used as food and a source of cooking oil\nn12374418\tsmall shrubs of scrub and dry woodland regions of southern Europe and North Africa; grown for their showy flowers and soft often downy and aromatic evergreen foliage\nn12374705\tcompact white pubescent shrub of southwestern Europe having pink flowers\nn12374862\tshrub having white flowers and viscid stems and leaves yielding a fragrant oleoresin used in perfumes especially as a fixative\nn12375769\tperennial of the eastern United States having early solitary yellow flowers followed by late petalless flowers; so-called because ice crystals form on it during first frosts\nn12377198\ttree of the family Dipterocarpaceae\nn12377494\tvaluable Philippine timber tree\nn12378249\tsmall shrubby tree of Madagascar cultivated in tropical regions as a hedge plant and for its deep red acid fruits resembling small plums\nn12378753\tvigorous South African spiny shrub grown for its round yellow juicy edible fruits\nn12378963\ta small shrubby spiny tree cultivated for its maroon-purple fruit with sweet purple pulp tasting like gooseberries; Sri Lanka and India\nn12379531\tEast Indian tree with oily seeds yield chaulmoogra oil used to treat leprosy\nn12380761\tlarge much-branched shrub grown primarily for its evergreen foliage\nn12381511\tany of several resinous trees or shrubs often burned for light\nn12382233\tcandlewood of Mexico and southwestern California having tall columnar stems and bearing honey-scented creamy yellow flowers\nn12382875\tshrub with narrow-elliptic glossy evergreen leaves and yellow flowers with leathery petaloid sepals\nn12383737\tBrazilian passionflower cultivated for its deep purple fruit\nn12383894\tconsidered best for fruit\nn12384037\ttropical American passionflower yielding the large granadilla fruit\nn12384227\tof southern United States; having an insipid berry the size of a hen egg\nn12384375\tWest Indian passionflower; cultivated for its yellow edible fruit\nn12384569\tcultivated for fruit\nn12384680\tWest Indian passionflower with edible apple-sized fruit\nn12384839\ttropical American passion flower with finely dissected bracts; stems malodorous when crushed\nn12385429\tany plant of the genus Reseda\nn12385566\tMediterranean woody annual widely cultivated for its dense terminal spikelike clusters greenish or yellowish white flowers having an intense spicy fragrance\nn12385830\tEuropean mignonette cultivated as a source of yellow dye; naturalized in North America\nn12386945\tEurasian shrub resembling the tamarisk\nn12387103\tplant growing naturally in very salty soil\nn12387633\tany of the numerous plants of the genus Viola\nn12387839\tany of numerous low-growing violas with small flowers\nn12388143\tcommon Old World viola with creamy often violet-tinged flowers\nn12388293\tviolet of eastern North America having pale violet to white flowers\nn12388858\tOld World leafy-stemmed blue-flowered violet\nn12388989\tEuropean viola with an unusually long corolla spur\nn12389130\tviolet of Pacific coast of North America having white petals tinged with yellow and deep violet\nn12389501\tcommon violet of the eastern United States with large pale blue or purple flowers resembling pansies\nn12389727\tviolet of eastern North America having softly pubescent leaves and stems and clear yellow flowers with brown-purple veins\nn12389932\tviolet of eastern North America having lilac-purple flowers with a long slender spur\nn12390099\tleafy-stemmed violet of eastern North America having large white or creamy flowers faintly marked with purple\nn12390314\tcommon European violet that grows in woods and hedgerows\nn12392070\tany of numerous plants having stinging hairs that cause skin irritation on contact (especially of the genus Urtica or family Urticaceae)\nn12392549\tperennial Eurasian nettle established in North America having broad coarsely toothed leaves with copious stinging hairs\nn12392765\tannual European nettle with stinging foliage and small clusters of green flowers\nn12393269\ttall perennial herb of tropical Asia with dark green leaves; cultivated for the fiber from its woody stems that resembles flax\nn12394118\tAmerican perennial herb found in rich woods and provided with stinging hairs; provides fibers used for textiles\nn12394328\tany of several tall Australian trees of the genus Laportea\nn12394638\therb that grows in crevices having long narrow leaves and small pink apetalous flowers\nn12395068\ta plants of the genus Pilea having drooping green flower clusters and smooth translucent stems and leaves\nn12395289\ttropical American stingless nettle that discharges its pollen explosively\nn12395463\tlow stingless nettle of Central and South America having velvety brownish-green toothed leaves and clusters of small green flowers\nn12395906\tAustralian plant of genus Pipturus whose fiber is used in making cloth\nn12396091\tHawaiian tree of genus Pipturus having a bark (tapa) from which tapa cloth is made\nn12396924\tany plant of the genus Cannabis; a coarse bushy annual with palmate leaves and clusters of small green flowers; yields tough fibers and narcotic drugs\nn12397431\tsource of e.g. bhang and hashish as well as fiber\nn12399132\tany of several trees of the genus Morus having edible fruit that resembles the blackberry\nn12399384\tAsiatic mulberry with white to pale red fruit; leaves used to feed silkworms\nn12399534\tEuropean mulberry having dark foliage and fruit\nn12399656\tNorth American mulberry having dark purple edible fruit\nn12399899\tsmall shrubby deciduous yellowwood tree of south central United States having spines, glossy dark green leaves and an inedible fruit that resembles an orange; its hard orange-colored wood used for bows by Native Americans; frequently planted as boundary hedge\nn12400489\tnative to Pacific islands and having edible fruit with a texture like bread\nn12400720\tEast Indian tree cultivated for its immense edible fruit and seeds\nn12400924\tPhilippine tree similar to the breadfruit tree bearing edible fruit\nn12401335\tany moraceous tree of the tropical genus Ficus; produces a closed pear-shaped receptacle that becomes fleshy and edible when mature\nn12401684\tMediterranean tree widely cultivated for its edible fruit\nn12401893\twild variety of the common fig used to facilitate pollination of certain figs\nn12402051\ta strangler tree native to southern Florida and West Indies; begins as an epiphyte eventually developing many thick aerial roots and covering enormous areas\nn12402348\tEast Indian tree that puts out aerial shoots that grow down into the soil forming additional trunks\nn12402596\tfig tree of India noted for great size and longevity; lacks the prop roots of the banyan; regarded as sacred by Buddhists\nn12402840\tlarge tropical Asian tree frequently dwarfed as a houseplant; source of Assam rubber\nn12403075\tshrub or small tree often grown as a houseplant having foliage like mistletoe\nn12403276\tAustralian tree resembling the banyan often planted for ornament; introduced into South Africa for brushwood\nn12403513\tthick-branched wide-spreading tree of Africa and adjacent southwestern Asia often buttressed with branches rising from near the ground; produces cluster of edible but inferior figs on short leafless twigs; the biblical sycamore\nn12403994\tshrubby Asiatic tree having bark (tapa) that resembles cloth; grown as a shade tree in Europe and America; male flowers are pendulous catkins and female are urn-shaped followed by small orange-red aggregate berries\nn12404729\ttropical American tree with large peltate leaves and hollow stems\nn12405714\tany of various trees of the genus Ulmus: important timber or shade trees\nn12406304\tNorth American elm having twigs and young branches with prominent corky projections\nn12406488\tlarge ornamental tree with graceful gradually spreading branches common in eastern North America\nn12406715\tEuropean elm with lustrous smooth leaves used as an ornamental\nn12406902\telm of southern United States and Mexico having spreading pendulous corky branches\nn12407079\tEurasian elm often planted as a shade tree\nn12407222\tany of various hybrid ornamental European shade trees ranging from dwarf to tall\nn12407396\terect vigorous hybrid ornamental elm tree\nn12407545\tEurasian elm closely resembling the American elm; thrives in a moist environment\nn12407715\tsmall fast-growing tree native to Asia; widely grown as shelterbelts and hedges\nn12407890\tbroad spreading rough-leaved elm common throughout Europe and planted elsewhere\nn12408077\tfast-growing shrubby Asian tree naturalized in United States for shelter or ornament\nn12408280\tNorth American elm having rough leaves that are red when opening; yields a hard wood\nn12408466\ta variety of the English elm with erect branches and broader leaves\nn12408717\tautumn-flowering elm of southeastern United States\nn12408873\ttall widely distributed elm of eastern North America\nn12409231\tany of various trees of the genus Celtis having inconspicuous flowers and small berrylike fruits\nn12409470\tbright green deciduous shade tree of southern Europe\nn12409651\tlarge deciduous shade tree of southern United States with small deep purple berries\nn12409840\tdeciduous shade tree with small black berries; southern United States; yields soft yellowish wood\nn12411461\tany bulbous plant of the family Iridaceae\nn12412355\tany of numerous wild or cultivated irises with hairlike structures on the falls (the drooping sepals)\nn12412606\tany of numerous wild or cultivated irises having no hairs on the drooping sepals (the falls)\nn12412987\tfragrant rootstock of various irises especially Florentine iris; used in perfumes and medicines\nn12413165\tlow-growing summer-flowering iris of northeastern United States\nn12413301\tbulbous Spanish iris with red-violet flowers\nn12413419\tGerman iris having large white flowers with lavender-tinged falls and a fragrant rhizome\nn12413642\tiris with purple flowers and foul-smelling leaves; southern and western Europe and North Africa\nn12413880\ta large iris with purple or white flowers, native to central and southern Europe\nn12414035\tiris native to Japan having large showy flowers\nn12414159\tiris of northern Italy having deep blue-purple flowers; similar to but smaller than Iris germanica\nn12414329\tEuropean iris having soft lilac-blue flowers\nn12414449\tbulbous iris native to Asia Minor cultivated for its pale lilac-colored flowers\nn12414818\tbulbous Spanish iris having blue flowers\nn12414932\tlow-growing spring-flowering American iris with bright blue-lilac flowers\nn12415595\tbulbous iris of western Mediterranean region having usually violet-purple flowers\nn12416073\tgarden plant whose capsule discloses when ripe a mass of seeds resembling a blackberry\nn12416423\tany of numerous low-growing plants of the genus Crocus having slender grasslike leaves and white or yellow or purple flowers; native chiefly to the Mediterranean region but widely cultivated\nn12416703\tOld World crocus having purple or white flowers with aromatic pungent orange stigmas used in flavoring food\nn12417836\tany of several South African plants of the genus Ixia having grasslike leaves and clusters of showy variously colored lily-like flowers; widely cultivated\nn12418221\tplant with grasslike foliage and delicate blue flowers\nn12418507\ta showy often-cultivated plant with tawny yellow often purple-spotted flowers\nn12419037\tbulbous plant having showy white to reddish flowers\nn12419878\ttropical vine having pink-and-yellow flowers spotted purple and edible roots sometimes boiled as a potato substitute; West Indies to northern South America\nn12420124\ttropical vine having umbels of small purple flowers and edible roots sometimes boiled as a potato substitute; Colombia\nn12420535\tany of various deciduous or evergreen herbs of the genus Haemanthus; South Africa and Namibia\nn12420722\tspectacular plant having large prostrate leaves barred in reddish-purple and flowers with a clump of long yellow stamens in a coral-red cup of fleshy bracts; South Africa\nn12421137\tamaryllis of tropical America often cultivated as a houseplant for its showy white to red flowers\nn12421467\tbulbous plant having erect linear leaves and showy yellow or white flowers either solitary or in clusters\nn12421683\tany of numerous varieties of Narcissus plants having showy often yellow flowers with a trumpet-shaped central crown\nn12421917\twidely cultivated ornamental plant native to southern Europe but naturalized elsewhere having fragrant yellow or white clustered flowers\nn12422129\toften used colloquially for any yellow daffodil\nn12422559\tMexican bulbous herb cultivated for its handsome bright red solitary flower\nn12425281\tplant growing from a bulb or corm or rhizome or tuber\nn12426623\tJapanese lily with golden rays\nn12426749\tcommon lily of the eastern United States having nodding yellow or reddish flowers spotted with brown\nn12427184\tlily of southeastern United States having cup-shaped flowers with deep yellow to scarlet recurved petals\nn12427391\tlily of western North America with showy orange-red purple-spotted flowers\nn12427566\teast Asian perennial having large reddish-orange black-spotted flowers with reflexed petals\nn12427757\ttall lily have large white trumpet-shaped flowers that bloom in the spring\nn12427946\torange-flowered lily of Pacific coast of United States\nn12428076\tlily with small dull purple flowers of northwestern Europe and northwestern Asia\nn12428242\tlily of central North America having recurved orange-red flowers with deep crimson spots\nn12428412\tlily of western United States having orange-red to crimson maroon-spotted flowers\nn12428747\tlily of the eastern United States with orange to red maroon-spotted flowers\nn12429352\tAfrican plant with bright green evergreen leaves and umbels of many usually deep violet-blue flowers\nn12430198\tany of several perennials of the genus Aletris having grasslike leaves and bitter roots reputed to cure colic\nn12430471\tcolicroot having a scurfy or granuliferous perianth and white flowers; southeastern United States\nn12430675\tcolicroot with yellow-bracted racemose flowers; smaller than Aletris farinosa; southeastern United States\nn12431434\tbulbous plants having a characteristic pungent onion odor\nn12432069\ta common North American wild onion with a strong onion odor and an umbel of pink flowers atop a leafless stalk; British Columbia to California and Arizona and east to Wyoming and Colorado\nn12432356\tcoarse Old World perennial having a large bulb and tall stalk of greenish purple-tinged flowers; widely naturalized\nn12432574\tNorth American bulbous plant\nn12432707\tEurasian bulbous plant\nn12433081\tthe bulb of an onion plant\nn12433178\ttype of onion plant producing small clustered mild-flavored bulbs used as seasoning\nn12433769\twidely distributed North American wild onion with white to rose flowers\nn12433952\tAsiatic onion with slender bulbs; used as early green onions\nn12434106\tonion with white to deep red tunic; California\nn12434483\tEuropean onion with white flowers\nn12434634\tleek producing bulbils instead of flowers; Russia and Iran\nn12434775\tbulbous herb of southern Europe widely naturalized; bulb breaks up into separate strong-flavored cloves\nn12434985\tEuropean leek cultivated and used like leeks\nn12435152\tperennial having hollow cylindrical leaves used for seasoning\nn12435486\tpungent Old World wild onion\nn12435649\tpungent Old World weedy plant\nn12435777\ta plant of eastern Asia; larger than Allium schoenoprasum\nn12435965\tOld World leek with a spherical bulb\nn12436090\tEuropean leek naturalized in Great Britain; leaves are triangular\nn12436907\tmuch-branched South African plant with reddish prickly succulent leaves\nn12437513\ta plant of the genus Kniphofia having long grasslike leaves and tall scapes of red or yellow drooping flowers\nn12437769\tclump-forming plant of South Africa with spikes of scarlet flowers\nn12437930\twidely cultivated hybrid poker plant\nn12439154\tall parts of plant are highly toxic; bulb pounded and used as a fly poison; sometimes placed in subfamily Melanthiaceae\nn12439830\tplant having basal grasslike leaves and a narrow open cluster of starlike yellowish-orange flowers atop a leafless stalk; southwestern United States; only species of Anthericum growing in North America\nn12441183\tplant whose succulent young shoots are cooked and eaten as a vegetable\nn12441390\ta fernlike plant native to South Africa\nn12441552\tfragile twining plant of South Africa with bright green flattened stems and glossy foliage popular as a floral decoration\nn12441958\tany of various chiefly Mediterranean plants of the genera Asphodeline and Asphodelus having linear leaves and racemes of white or pink or yellow flowers\nn12442548\tasphodel having erect smooth unbranched stem either flexuous or straight\nn12443323\tevergreen perennial with large handsome basal leaves; grown primarily as a foliage houseplant\nn12443736\thalf-hardy Mexican herb cultivated for its drooping terminal umbels of showy red-and-white flowers\nn12444095\tany of several plants of the genus Blandfordia having large orange or crimson flowers\nn12444898\tmuch-branched leafless twining South African herb cultivated as an ornamental for its bright green stems growing from large aboveground bulbs\nn12446200\tany of several plants of the genus Calochortus having tulip-shaped flowers with 3 sepals and 3 petals; southwestern United States and Mexico\nn12446519\tany of several plants of the genus Calochortus having egg-shaped flowers\nn12446737\tany of several plants of the genus Calochortus having flowers with petals shaped like cat's ears\nn12446908\tglobe lily having open branched clusters of egg-shaped white flowers; southern California\nn12447121\tglobe lily having open branched clusters of clear yellow egg-shaped flowers; northern California\nn12447346\tglobe lily with deep rose-pink or purple egg-shaped flowers on flexuous stems; western slopes of Sierra Nevada in San Joaquin Valley\nn12447581\tsmall plant with slender bent stems bearing branched clusters of a few white star-shaped flowers with petals shaped like cat's ears; southeastern Washington and northeastern Oregon to Montana\nn12447891\tmariposa with clusters of bell-shaped vermilion or orange or yellow flowers atop short stems; southern California to Arizona and Mexico\nn12448136\tmariposa having clusters of a few large deep yellow bell-shaped flowers atop slender stems; California coastal ranges\nn12448361\tmariposa having loose clusters of one to three handsome lilac flowers resembling umbels atop stout erect stems; arid northwestern North America east of Cascade Mountains from southern British Columbia to northern California\nn12448700\tperennial plant having clusters of one to four showy white bell-shaped flowers atop erect unbranched stems; edible bulbs useful in times of scarcity; eastern Montana and western North Dakota south to northern Arizona and northwestern New Mexico\nn12449296\tany of several plants of the genus Camassia; North and South America\nn12449526\tplant having a large edible bulb and linear basal leaves and racemes of light to deep violet-blue star-shaped flowers on tall green scapes; western North America\nn12449784\tcamas found to the west of Cascade Mountains\nn12449934\teastern camas; eastern and central North America\nn12450344\tperennial woodland spring-flowering plant; widely cultivated\nn12450607\tNorth American dogtooth having solitary white flowers with yellow centers and blue or pink exteriors\nn12450840\teastern North American dogtooth having solitary yellow flowers marked with brown or purple and spotted interiors\nn12451070\tsturdy European dogtooth with rose to mauve flowers; cultivated in many varieties\nn12451240\tCalifornia dogtooth violet with creamy white flowers sometimes yellow-tinged\nn12451399\tdogtooth violet of western North America having bright yellow flowers\nn12451566\tperennial herb having large white flowers marked with orange; found near the snow line in the northwestern United States\nn12451915\tany liliaceous plant of the genus Fritillaria having nodding variously colored flowers\nn12452256\therb of northwestern America having green-and-purple bell-shaped flowers\nn12452480\therb of southwestern United States having dark purple bell-shaped flowers mottled with green\nn12452673\ta malodorous California herb with bell-shaped flowers; a common weed in grainfields\nn12452836\tEurasian herb with a cluster of leaves and orange-red bell-shaped flowers at the top of the stem\nn12453018\tCalifornia herb with white conic or bell-shaped flowers usually tinged with green\nn12453186\tEurasian checkered lily with pendant flowers usually veined and checkered with purple or maroon on a pale ground and shaped like the bells carried by lepers in medieval times; widely grown as an ornamental\nn12453714\tCalifornia herb with pinkish purple flowers\nn12453857\twestern United States herb with scarlet and yellow narrow bell-shaped flowers\nn12454159\tany of numerous perennial bulbous herbs having linear or broadly lanceolate leaves and usually a single showy flower\nn12454436\tsmall early blooming tulip\nn12454556\tEurasian tulip with small flowers blotched at the base\nn12454705\ttall late blooming tulip\nn12454793\tany of several long-stemmed tulips that flower in May; have egg-shaped variously colored flowers\nn12454949\tany of several very tall, late blooming tulips bearing large squarish flowers on sturdy stems\nn12455950\tany plant of the genus Gloriosa of tropical Africa and Asia; a perennial herb climbing by means of tendrils at leaf tips having showy yellow to red or purple flowers; all parts are poisonous\nn12457091\ta day lily with yellow flowers\nn12458550\twidely grown for its fragrance and its white, pink, blue, or purplish flowers\nn12458713\thyacinth with loosely flowered spikes, several growing from one bulb\nn12458874\tsouthern African herb with white bell-shaped flowers\nn12459629\tany of several perennial plants of the genus Ornithogalum native to the Mediterranean and having star-shaped flowers\nn12460146\tOld World star of Bethlehem having edible young shoots\nn12460697\tany of various early flowering spring hyacinths native to Eurasia having dense spikes of rounded blue flowers resembling bunches of small grapes\nn12460957\tprolific species having particularly beautiful dark blue flowers\nn12461109\tlarge beautiful Mediterranean species having sterile bluish-violet flowers with fringed corollas forming a tuft above the fertile flowers\nn12461466\tan Old World plant of the genus Scilla having narrow basal leaves and pink or blue or white racemose flowers\nn12461673\tEuropean scilla with small blue or purple flowers\nn12462032\ta plant of the genus Tofieldia having linear chiefly basal leaves and small spicate flowers\nn12462221\tfalse asphodel having spikes of white flowers; of mountainous regions of Europe\nn12462582\thaving dense spikes of small white flowers and yielding a bulb with medicinal properties\nn12462805\tbulb of the sea squill, which is sliced, dried, and used as an expectorant\nn12463134\tshrub with stiff flattened stems resembling leaves (cladophylls); used for making brooms\nn12463743\teither of two herbaceous rushlike bog plants having small yellow flowers and grasslike leaves; north temperate regions\nn12463975\tof western Europe: Scandinavia to northern Spain and Portugal\nn12464128\tof the eastern United States: New Jersey to South Carolina\nn12464476\tperennial herbs of the lily family having thick toxic rhizomes\nn12464649\tNorth American plant having large leaves and yellowish green flowers growing in racemes; yields a toxic alkaloid used medicinally\nn12465557\tplant of western North America having woody rhizomes and tufts of stiff grasslike basal leaves and spikes of creamy white flowers\nn12466727\tany of various plants of the genus Zigadenus having glaucous leaves and terminal racemes of mostly white flowers; all are poisonous\nn12467018\tplant of western North America having grasslike leaves and greenish-white flowers\nn12467197\tplant of eastern and central North America having creamy white flowers tinged with brown or purple; poisonous especially to grazing animals\nn12467433\ta common perennial death camas; Tennessee to Kansas to Texas\nn12467592\tplant of western North America to Mexico; poisonous especially to grazing animals\nn12468545\ttrillium of central United States having dark purple sessile flowers\nn12468719\ta low perennial white-flowered trillium found in the southeastern United States\nn12469517\tEuropean herb with yellow-green flowers resembling and closely related to the trilliums; reputed to be poisonous\nn12470092\tany of various prickly climbing plants of the tropical American genus Smilax having aromatic roots and heart-shaped leaves\nn12470512\ta very prickly woody vine of the eastern United States growing in tangled masses having tough round stems with shiny leathery leaves and small greenish flowers followed by clusters of inedible shiny black berries\nn12470907\tcreeping or climbing evergreen having spiny zigzag stems with shiny leaves and racemes of pale-green flowers; Canary Islands to southern Europe and Ethiopia and India\nn12472024\tany temperate liliaceous plant of the genus Clintonia having broad basal leaves and white or yellowish or purplish flowers followed by blue or black berries\nn12473608\tsmall two-leaved herb of the northern United States and parts of Canada having racemes of small fragrant white flowers\nn12473840\tsmall white-flowered plant of western Europe to Japan\nn12474167\tany of several plants of the genus Polygonatum having paired drooping yellowish-green flowers and a thick rootstock with scars shaped like Solomon's seal\nn12474418\tNorth American perennial herb with smooth foliage and drooping tubular greenish flowers\nn12475035\tany of various plants of the genus Uvularia having yellowish drooping bell-shaped flowers\nn12475242\tplant of southern and southeastern United States grown for its yellow flowers that can be dried\nn12475774\tperennial herb of East Indies to Polynesia and Australia; cultivated for its large edible root yielding Otaheite arrowroot starch\nn12476510\ttropical American plants with basal rosettes of fibrous sword-shaped leaves and flowers in tall spikes; some cultivated for ornament or for fiber\nn12477163\twidely cultivated American monocarpic plant with greenish-white flowers on a tall stalk; blooms only after ten to twenty years and then dies\nn12477401\tMexican or West Indian plant with large fleshy leaves yielding a stiff fiber used in e.g. rope\nn12477583\tPhilippine plant yielding a hard fibre used in making coarse twine\nn12477747\tMexican plant used especially for making pulque which is the source of the colorless Mexican liquor, mescal\nn12477983\tMexican plant used especially for making tequila\nn12478768\telegant tree having either a single trunk or a branching trunk each with terminal clusters of long narrow leaves and large panicles of fragrant white, yellow or red flowers; New Zealand\nn12479537\tan agave that is often cultivated for its decorative foliage\nn12480456\ta tuberous Mexican herb having grasslike leaves and cultivated for its spikes of highly fragrant lily-like waxy white flowers\nn12480895\tgrown as a houseplant for its mottled fleshy sword-shaped leaves or as a source of fiber\nn12481150\tbowstring hemp of South Africa\nn12481289\tplant having thick fibrous leaves transversely banded in light and dark green\nn12481458\tstemless plant having narrow rigid leaves often cultivated as a houseplant\nn12482437\ta stiff yucca with a short trunk; found in the southern United States and tropical America; has rigid spine-tipped leaves and clusters of white flowers\nn12482668\ttall yucca of the southwestern United States and Mexico having a woody stem and stiff swordlike pointed leaves and a large cluster of white flowers\nn12482893\ta large branched arborescent yucca of southwestern United States having short leaves and clustered greenish white flowers\nn12483282\ttall arborescent yucca of southwestern United States\nn12483427\tyucca with long stiff leaves having filamentlike appendages\nn12483625\tyucca of west central United States having a clump of basal grasslike leaves and a central stalk with a terminal raceme of small whitish flowers\nn12483841\tyucca of southeastern United States similar to the Spanish bayonets but with shorter trunk and smoother leaves\nn12484244\tyucca of southwestern United States and Mexico with a tall spike of creamy white flowers\nn12484784\tperennial plant of Europe and America having racemes of white or purplish flowers and intensely bitter trifoliate leaves; often rooting at water margin and spreading across the surface\nn12485653\ttropical shrub having clusters of white or violet or yellow flowers\nn12485981\tpoisonous woody evergreen vine of southeastern United States having fragrant yellow funnel-shaped flowers\nn12486574\tplant of the genus Linum that is cultivated for its seeds and for the fibers of its stem\nn12487058\tdark brown highly poisonous seed of the calabar-bean vine; source of physostigmine and used in native witchcraft\nn12488454\ttropical tree with large prickly pods of seeds that resemble beans and are used for jewelry and rosaries\nn12488709\tsmall thornless tree or shrub of tropical America whose seed pods are a source of tannin\nn12489046\tspreading thorny shrub of tropical Asia bearing large erect racemes of red-marked yellow flowers\nn12489676\tthornless tree yielding heavy wood\nn12489815\ta tropical flowering shrub having bright orange or red flowers; sometimes placed in genus Poinciana\nn12490490\tEast Indian timber tree with hard durable wood used especially for tea boxes\nn12491017\tsmall East Indian tree having orchid-like flowers and hard dark wood\nn12491435\tsmall shrubby African tree having compound leaves and racemes of small fragrant green flowers\nn12491826\tany of various trees or shrubs of the genus Cassia having pinnately compound leaves and usually yellow flowers followed by long seedpods\nn12492106\tdeciduous or semi-evergreen tree having scented sepia to yellow flowers in drooping racemes and pods whose pulp is used medicinally; tropical Asia and Central and South America and Australia\nn12492460\ttropical American semi-evergreen tree having erect racemes of pink or rose-colored flowers; used as an ornamental\nn12492682\tdeciduous ornamental hybrid of southeastern Asia and Hawaii having racemes of flowers ranging in color from cream-colored to orange and red\nn12492900\tEast Indian tree having long pods containing a black cathartic pulp used as a horse medicine\nn12493208\tevergreen Mediterranean tree with edible pods; the biblical carob\nn12493426\tlong pod containing small beans and sweetish edible pulp; used as animal feed and source of a chocolate substitute\nn12493868\ta thorny shrub of the genus Cercidium that grows in dry parts of the southwestern United States and adjacent Mexico; has smooth light green bark and racemes of yellow flowers and small leaves\nn12494794\tshowy tropical tree or shrub native to Madagascar; widely planted in tropical regions for its immense racemes of scarlet and orange flowers; sometimes placed in genus Poinciana\nn12495146\tany of various hardwood trees of the family Leguminosae\nn12495670\thoney locust of swamps and bottomlands of southern United States having short oval pods; yields dark heavy wood\nn12495895\ttall usually spiny North American tree having small greenish-white flowers in drooping racemes followed by long twisting seed pods; yields very hard durable reddish-brown wood; introduced to temperate Old World\nn12496427\thandsome tree of central and eastern North America having large bipinnate leaves and green-white flowers followed by large woody brown pods whose seeds are used as a coffee substitute\nn12496949\tspiny shrub or small tree of Central America and West Indies having bipinnate leaves and racemes of small bright yellow flowers and yielding a hard brown or brownish-red heartwood used in preparing a black dye\nn12497669\tlarge shrub or shrubby tree having sharp spines and pinnate leaves with small deciduous leaflets and sweet-scented racemose yellow-orange flowers; grown as ornamentals or hedging or emergency food for livestock; tropical America but naturalized in southern United States\nn12498055\tdensely branched spiny tree of southwestern United States having showy yellow flowers and blue-green bark; sometimes placed in genus Cercidium\nn12498457\terect shrub having large trifoliate leaves and dense clusters of yellow flowers followed by poisonous seeds; Yugoslavia; sometimes placed in genus Cytisus\nn12499163\tany of various plants of the genus Senna having pinnately compound leaves and showy usually yellow flowers; many are used medicinally\nn12499757\tevergreen Indian shrub with vivid yellow flowers whose bark is used in tanning; sometimes placed in genus Cassia\nn12499979\terect shrub having racemes of tawny yellow flowers; the dried leaves are used medicinally as a cathartic; sometimes placed in genus Cassia\nn12500309\tNorth American perennial herb; leaves are used medicinally; sometimes placed in genus Cassia\nn12500518\tcosmopolitan tropical herb or subshrub with yellow flowers and slender curved pods; a weed; sometimes placed in genus Cassia\nn12500751\tvery leafy malodorous tropical weedy shrub whose seeds have been used as an adulterant for coffee; sometimes classified in genus Cassia\nn12501202\tlong-lived tropical evergreen tree with a spreading crown and feathery evergreen foliage and fragrant flowers yielding hard yellowish wood and long pods with edible chocolate-colored acidic pulp\nn12504570\tan erect to spreading hairy shrub of the Pacific coast of the United States having racemes of red to indigo flowers\nn12504783\tdense shrub of moist riverbanks and flood plains of the eastern United States having attractive fragrant foliage and dense racemes of dark purple flowers\nn12505253\tvine widely distributed in eastern North America producing racemes of purple to maroon flowers and abundant (usually subterranean) edible one-seeded pods resembling peanuts\nn12506181\tany of several tropical American trees of the genus Andira\nn12506341\ttree with shaggy unpleasant-smelling toxic bark and yielding strong durable wood; bark and seeds used as a purgative and vermifuge and narcotic\nn12506991\tperennial Eurasian herb having heads of red or yellow flowers and common in meadows and pastures; formerly used medicinally for kidney disorders\nn12507379\ta North American vine with fragrant blossoms and edible tubers; important food crop of Native Americans\nn12507823\tSouth African shrub having flat acuminate leaves and yellow flowers; leaves are aromatic when dried and used to make an herbal tea\nn12508309\tany of various plants of the genus Astragalus\nn12508618\tperennial of mountainous areas of Eurasia and North America\nn12508762\tperennial of southern and western Europe having dense racemes of purple or violet flowers\nn12509109\tsmall shrubby African tree with hard wood used as a dyewood yielding a red dye\nn12509476\tany of several plants of the genus Baptisia\nn12509665\twild indigo of the eastern United States having racemes of blue flowers\nn12509821\terect or spreading herb having racemes of creamy white flowers; the eastern United States\nn12509993\tmuch-branched erect herb with bright yellow flowers; distributed from Massachusetts to Florida\nn12510343\tEast Indian tree bearing a profusion of intense vermilion velvet-textured blooms and yielding a yellow dye\nn12510774\ttropical woody herb with showy yellow flowers and flat pods; much cultivated in the tropics\nn12511488\ttwining tropical Old World plant bearing long pods usually with red or brown beans; long cultivated in Orient for food\nn12511856\tany plant of the genus Caragana having even-pinnate leaves and mostly yellow flowers followed by seeds in a linear pod\nn12512095\tlarge spiny shrub of eastern Asia having clusters of yellow flowers; often cultivated in shelterbelts and hedges\nn12512294\tshrub with dark-green glossy foliage and solitary pale yellow flowers; northern China\nn12512674\tAustralian tree having pinnate leaves and orange-yellow flowers followed by large woody pods containing 3 or 4 seeds that resemble chestnuts; yields dark strong wood\nn12513172\tlarge-flowered weakly twining or prostrate vine of New Jersey to tropical eastern North America, sometimes cultivated for its purple and white flowers\nn12513613\tsmall tree of the eastern Mediterranean having abundant purplish-red flowers growing on old wood directly from stems and appearing before the leaves: widely cultivated in mild regions; wood valuable for veneers\nn12513933\tsmall shrubby tree of eastern North America similar to the Judas tree having usually pink flowers; found in damp sheltered underwood\nn12514138\tshrub of western United States having pink or crimson flowers; often forms thickets\nn12514592\tshrub of Canary Islands having bristle-tipped oblanceolate leaves; used as cattle fodder\nn12514992\tsmall shrubby tree of New Zealand having weeping branches and racemes of white to violet flowers followed by woolly indehiscent two-seeded pods\nn12515393\tany of several small shrubs or twining vines having entire or lobed leaves and racemes of yellow to orange-red flowers; Australia\nn12515711\tAsiatic herb cultivated for its short pods with one or two edible seeds\nn12515925\tthe seed of the chickpea plant\nn12516165\tsmall handsome roundheaded deciduous tree having showy white flowers in terminal clusters and heavy hardwood yielding yellow dye\nn12516584\tany of various shrubs or vines of the genus Clianthus having compound leaves and pea-like red flowers in drooping racemes\nn12516828\tsprawling shrubby perennial noted for its scarlet black-marked flowers; widely distributed in dry parts of Australia\nn12517077\tevergreen shrub with scarlet to white clawlike or beaklike flowers; New Zealand\nn12517445\tlarge-flowered wild twining vine of southeastern and central United States having pale blue flowers\nn12517642\tvine of tropical Asia having pinnate leaves and bright blue flowers with yellow centers\nn12518013\terect tropical Asian shrub whose small lateral leaflets rotate on their axes and jerk up and down under the influence of sunshine\nn12518481\tyellow-flowered European shrub cultivated for its succession of yellow flowers and very inflated bladdery pods and as a source of wildlife food\nn12519089\tEuropean herb resembling vetch; naturalized in the eastern United States; having umbels of pink-and-white flowers and sharp-angled pods\nn12519563\tany of various plants of the genus Crotalaria having inflated pods within which the seeds rattle; used for pasture and green-manure crops\nn12520406\tdrought-tolerant herb grown for forage and for its seed which yield a gum used as a thickening agent or sizing material\nn12521186\tlow European broom having trifoliate leaves and yellowish-white flowers\nn12521394\tdeciduous erect spreading broom native to western Europe; widely cultivated for its rich yellow flowers\nn12522188\tany of those hardwood trees of the genus Dalbergia that yield rosewood--valuable cabinet woods of a dark red or purplish color streaked and variegated with black\nn12522678\tEast Indian tree having a useful dark purple wood\nn12522894\tEast Indian tree whose leaves are used for fodder; yields a compact dark brown durable timber used in shipbuilding and making railroad ties\nn12523141\tBrazilian tree yielding a handsome cabinet wood\nn12523475\tan important Brazilian timber tree yielding a heavy hard dark-colored wood streaked with black\nn12523850\ta valuable timber tree of tropical South America\nn12524188\tany of several hardwood trees yielding very dark-colored wood\nn12525168\tany of several spiny shrubs of the genus Daviesia having yellow flowers and triangular seeds; Australia\nn12525513\tany of various usually woody vines of the genus Derris of tropical Asia whose roots yield the insecticide rotenone; several are sources of native fish and arrow poisons\nn12525753\twoody vine having bright green leaves and racemes of rose-tinted white flowers; the swollen roots contain rotenone\nn12526178\tperennial herb of North American prairies having dense heads of small white flowers\nn12526516\tany of various tropical and subtropical plants having trifoliate leaves and rough sticky pod sections or loments\nn12526754\tWest Indian forage plant cultivated in southern United States as forage and to improve soil\nn12527081\tSouth African evergreen partly woody vine grown for its clusters of rosy purple flowers followed by edible pods like snap beans; also grown as green manure; sometimes placed in genus Dolichos\nn12527738\tany of various shrubs or shrubby trees of the genus Erythrina having trifoliate leaves and racemes of scarlet to coral red flowers and black seeds; cultivated as an ornamental\nn12528109\tsmall semi-evergreen broad-spreading tree of eastern South Africa with orange-scarlet flowers and small coral-red seeds; yields a light soft wood used for fence posts or shingles\nn12528382\tdeciduous shrub having racemes of deep red flowers and black-spotted red seeds\nn12528549\tsmall South American spiny tree with dark crimson and scarlet flowers solitary or clustered\nn12528768\tsmall semi-evergreen tree of South Africa having dense clusters of clear scarlet flowers and red seeds\nn12528974\tsmall to medium-sized thorny tree of tropical Asia and northern Australia having dense clusters of scarlet or crimson flowers and black seeds\nn12529220\tprickly Australian coral tree having soft spongy wood\nn12529500\ttall bushy European perennial grown for its pinnate foliage and slender spikes of blue flowers; sometimes used medicinally\nn12529905\tany of various Australian evergreen shrubs of the genus Gastrolobium having whorled compound leaves poisonous to livestock and showy yellow to deep reddish-orange flowers followed by two-seeded pods\nn12530629\terect shrub of southwestern Europe having racemes of golden yellow flowers\nn12530818\tsmall Eurasian shrub having clusters of yellow flowers that yield a dye; common as a weed in Britain and the United States; sometimes grown as an ornamental\nn12531328\tthorny shrub or small tree common in central Argentina having small orange or yellow flowers followed by edible berries\nn12531727\tany of several small deciduous trees valued for their dark wood and dense racemes of nectar-rich pink flowers grown in great profusion on arching branches; roots and bark and leaves and seeds are poisonous\nn12532564\ta source of oil; used for forage and soil improvement and as food\nn12532886\tdeep-rooted coarse-textured plant native to the Mediterranean region having blue flowers and pinnately compound leaves; widely cultivated in Europe for its long thick sweet roots\nn12533190\tNorth American plant similar to true licorice and having a root with similar properties\nn12533437\troot of licorice used in flavoring e.g. candy and liqueurs and medicines\nn12534208\tvigorous climber of the forests of western Australia; grown for their dense racemes of attractive bright rose-purple flowers\nn12534625\tperennial of western United States having racemes of pink to purple flowers followed by flat pods that separate into nearly orbicular joints\nn12534862\tperennial of southern Europe cultivated for forage and for its nectar-rich pink flowers that make it an important honey crop\nn12536291\tshrub of West Indies and South America that is a source of indigo dye\nn12537253\thairy trailing or prostrate western Australian vine with bright scarlet-pink flowers\nn12537569\tperennial twining vine of Old World tropics having trifoliate leaves and racemes of fragrant purple pea-like flowers followed by maroon pods of edible seeds; grown as an ornamental and as a vegetable on the Indian subcontinent; sometimes placed in genus Dolichos\nn12538209\tan ornamental shrub or tree of the genus Laburnum\nn12539074\tany of various small plants of the genus Lathyrus; climb usually by means of tendrils\nn12539306\tany of various plants of the family Leguminosae that usually grow like vines\nn12539832\tany of several perennial vines of the genus Lathyrus\nn12540250\twild pea of seashores of north temperate zone having tough roots and purple flowers and useful as a sand binder\nn12540647\tannual European vetch with red flowers\nn12540966\tscrambling perennial of damp or marshy areas of Eurasia and North America with purplish flowers\nn12541157\tscrambling perennial Eurasian wild pea having yellowish flowers and compressed seed pods; cultivated for forage\nn12541403\tEuropean annual grown for forage; seeds used for food in India and for stock elsewhere\nn12542043\tNorth African annual resembling the sweet pea having showy but odorless flowers\nn12542240\tEuropean herb bearing small tubers used for food and in Scotland to flavor whiskey\nn12543186\tAsian shrub having conspicuous racemose rose-purple flowers widely used as an ornamental and in erosion control and as a source of feed for wild birds\nn12543455\tan annual of tropical Asia naturalized in United States\nn12543639\tannual native to Korea but widely cultivated for forage and hay in hot dry regions\nn12543826\tperennial widely planted as for forage and as hay crop especially on poor land\nn12544240\twidely cultivated Eurasian annual herb grown for its edible flattened seeds that are cooked like peas and also ground into meal and for its leafy stalks that are used as fodder\nn12544539\tthe fruit or seed of a lentil plant\nn12545232\tNorth American annual with red or rose-colored flowers\nn12545635\tEuropean forage plant having claw-shaped pods introduced in America\nn12545865\tsprawling European annual having a 4-winged edible pod\nn12546183\tany plant of the genus Lupinus; bearing erect spikes of usually purplish-blue flowers\nn12546420\twhite-flowered Eurasian herb widely cultivated for forage and erosion control\nn12546617\tevergreen shrub of the Pacific coast of the United States having showy yellow or blue flowers; naturalized in Australia\nn12546962\tstout perennial of eastern and central North America having palmate leaves and showy racemose blue flowers\nn12547215\tlow-growing annual herb of southwestern United States (Texas) having silky foliage and blue flowers; a leading cause of livestock poisoning in the southwestern United States\nn12547503\tclosely resembles Lupinus subcarnosus; southwestern United States (Texas)\nn12548280\tany of several Old World herbs of the genus Medicago having small flowers and trifoliate compound leaves\nn12548564\tevergreen shrub of southern European highlands having downy foliage and a succession of yellow flowers throughout the summer followed by curious snail-shaped pods\nn12548804\tEuropean medic naturalized in North America having yellow flowers and sickle-shaped pods\nn12549005\tan annual of the Mediterranean area having spiny seed pods and leaves with dark spots\nn12549192\tprostrate European herb with small yellow flowers and curved black pods; naturalized in North America\nn12549420\timportant European leguminous forage plant with trifoliate leaves and blue-violet flowers grown widely as a pasture and hay crop\nn12549799\tany of several tropical trees or shrubs yielding showy streaked dark reddish or chocolate-colored wood\nn12550210\tany of several erect or climbing woody plants of the genus Mucuna; widespread in tropics of both hemispheres\nn12550408\tthe annual woody vine of Asia having long clusters of purplish flowers and densely hairy pods; cultivated in southern United States for green manure and grazing\nn12551173\tmedium-sized tropical American tree yielding tolu balsam and a fragrant hard wood used for high-grade furniture and cabinetwork\nn12551457\ttree of South and Central America yielding an aromatic balsam\nn12552309\tEurasian perennial herb having pale pink flowers and curved pods; naturalized in Britain and North America grasslands on calcareous soils; important forage crop and source of honey in Britain\nn12552893\tEuropean woody plant having pink flowers and unifoliate leaves and long tough roots; spreads by underground runners\nn12553742\tsmall tree of West Indies and northeastern Venezuela having large oblong pointed leaflets and panicles of purple flowers; seeds are black or scarlet with black spots\nn12554029\tWest Indian tree similar to Ormosia monosperma but larger and having smaller leaflets and smaller seeds\nn12554526\tany of several leguminous plants of western North America causing locoism in livestock\nn12554729\ttufted locoweed of southwestern United States having purple or pink to white flowers\nn12554911\tany plant that breaks away from its roots in autumn and is driven by the wind as a light rolling mass\nn12555255\tCentral American twining plant with edible roots and pods; large tubers are eaten raw or cooked especially when young and young pods must be thoroughly cooked; pods and seeds also yield rotenone and oils\nn12555859\ttrailing trifoliate Asiatic and African herb having cobalt blue flowers\nn12556656\ta climbing bean plant that will climb a wall or tree or trellis\nn12557064\tthe common bean plant grown for the beans rather than the pods (especially a variety with large red kidney-shaped beans)\nn12557438\ta French variety of green bean plant bearing light-colored beans\nn12557556\ta common bean plant grown for its edible golden pod\nn12557681\ttropical American bean with red flowers and mottled black beans similar to Phaseolus vulgaris but perennial; a preferred food bean in Great Britain\nn12558230\tbush or tall-growing bean plant having large flat edible seeds\nn12558425\tbush bean plant cultivated especially in southern United States having small flat edible seeds\nn12558680\ttwining plant of southwestern United States and Mexico having roundish white or yellow or brown or black beans\nn12559044\tspiny evergreen xerophytic shrub having showy rose and purple flowers and forming dense thickets; of dry rocky mountain slopes of California\nn12559518\tsmall tree of West Indies and Florida having large odd-pinnate leaves and panicles of red-striped purple to white flowers followed by decorative curly winged seedpods; yields fish poisons\nn12560282\tthe fruit or seed of a pea plant\nn12560621\tthe flattened to cylindric inflated multi-seeded fruit of the common pea plant\nn12560775\ta variety of pea plant producing peas having soft thick edible pods lacking the fibrous inner lining of the common pea\nn12561169\tvariety of pea plant producing peas having crisp rounded edible pods\nn12561309\tvariety of pea plant native to the Mediterranean region and North Africa and widely grown especially for forage\nn12561594\tseed of the field pea plant\nn12562141\tlow spreading evergreen shrub of southern Australia having triangular to somewhat heart-shaped foliage and orange-yellow flowers followed by flat winged pods\nn12562577\tany of several tropical American trees some yielding economically important timber\nn12562785\tlarge tree of Trinidad and Guyana having odd-pinnate leaves and violet-scented axillary racemes of yellow flowers and long smooth pods; grown as a specimen in parks and large gardens\nn12563045\tlarge erect shrub of Colombia having large odd-pinnate leaves with large leaflets and axillary racemes of fragrant yellow flowers\nn12563702\tevergreen Asiatic tree having glossy pinnate leaves and racemose creamy-white scented flowers; used as a shade tree\nn12564083\ta tuberous twining annual vine bearing clusters of purplish flowers and pods with four jagged wings; Old World tropics\nn12564613\tdensely hairy perennial of central North America having edible tuberous roots\nn12565102\tdeciduous South African tree having large odd-pinnate leaves and profuse fragrant orange-yellow flowers; yields a red juice and heavy strong durable wood\nn12565912\tEast Indian tree yielding a resin or extract often used medicinally and in e.g. tanning\nn12566331\ttree of India and East Indies yielding a hard fragrant timber prized for cabinetwork and dark red heartwood used as a dyewood\nn12566954\tfast-growing vine from eastern Asia having tuberous starchy roots and hairy trifoliate leaves and racemes of purple flowers followed by long hairy pods containing many seeds; grown for fodder and forage and root starch; widespread in the southern United States\nn12567950\tlarge shrub or small tree of the eastern United States having bristly stems and large clusters of pink flowers\nn12568186\tlarge thorny tree of eastern and central United States having pinnately compound leaves and drooping racemes of white flowers; widely naturalized in many varieties in temperate regions\nn12568649\tsmall rough-barked locust of southeastern United States having racemes of pink flowers and glutinous branches and seeds\nn12569037\tsmall Dominican tree bearing masses of large crimson flowers before the fine pinnate foliage emerges\nn12569616\ttall-growing annual of southwestern United States widely grown as green manure; yields a strong tough bast fiber formerly used by Indians for cordage\nn12569851\ta softwood tree with lax racemes of usually red or pink flowers; tropical Australia and Asia; naturalized in southern Florida and West Indies\nn12570394\thandsome roundheaded deciduous tree having compound dark green leaves and profuse panicles of fragrant creamy-white flowers; China and Japan\nn12570703\tshrub or small tree having pinnate leaves poisonous to livestock and dense racemes of intensely fragrant blue flowers and red beans\nn12570972\tshrub or small tree of New Zealand and Chile having pendulous racemes of tubular golden-yellow flowers; yields a hard strong wood\nn12571781\tvigorous Philippine evergreen twining liana; grown for spectacular festoons of green flowers that resemble lobster claws\nn12572546\ta plant of the genus Tephrosia having pinnate leaves and white or purplish flowers and flat hairy pods\nn12572759\tEast Indian shrub\nn12572858\tperennial subshrub of eastern North America having downy leaves yellowish and rose flowers and; source of rotenone\nn12573256\tany of various plants of the genus Thermopsis having trifoliate leaves and yellow or purple racemose flowers\nn12573474\twestern United States bushy herb having yellow pea-like flowers\nn12573647\teastern United States bush pea\nn12573911\tsemi-evergreen South American tree with odd-pinnate leaves and golden yellow flowers cultivated as an ornamental\nn12574320\tOld World herb related to fenugreek\nn12574470\tannual herb or southern Europe and eastern Asia having off-white flowers and aromatic seeds used medicinally and in curry\nn12574866\tvery spiny and dense evergreen shrub with fragrant golden-yellow flowers; common throughout western Europe\nn12575322\tany of various climbing plants of the genus Vicia having pinnately compound leaves that terminate in tendrils and small variously colored flowers; includes valuable forage and soil-building plants\nn12575812\tcommon perennial climber of temperate regions of Eurasia and North America having dense elongate clusters of flowers\nn12576323\tseed of the broad-bean plant\nn12576451\tEuropean perennial toxic vetch\nn12576695\tEuropean purple-flowered with slender stems; occurs as a weed in hedges\nn12577362\tEast Indian legume having hairy foliage and small yellow flowers followed by cylindrical pods; used especially in India for food and forage and as a soil conditioner; sometimes placed in genus Phaseolus\nn12577895\tperennial tropical American vine cultivated for its racemes of showy yellow and purple flowers having the corolla keel coiled like a snail shell; sometimes placed in genus Phaseolus\nn12578255\terect bushy annual widely cultivated in warm regions of India and Indonesia and United States for forage and especially its edible seeds; chief source of bean sprouts used in Chinese cookery; sometimes placed in genus Phaseolus\nn12578626\tsprawling Old World annual cultivated especially in southern United States for food and forage and green manure\nn12578916\tfruit or seed of the cowpea plant\nn12579038\tSouth American bean having very long succulent pods\nn12579404\tAustralian leafless shrub resembling broom and having small yellow flowers\nn12579822\ttree with odd-pinnate leaves and racemes of fragrant pink to purple flowers\nn12580012\tfast-growing roundheaded tree with fragrant white to deep rose flowers; planted as an ornamental\nn12580654\thaving flowers of pink to mauve or violet-blue\nn12580786\thaving deep purple flowers\nn12580896\tan eastern United States native resembling the cultivated Japanese wisteria having pale purple-lilac flowers\nn12581110\ta wisteria of China having white flowers\nn12582231\tany plant of the family Palmae having an unbranched trunk crowned by large pinnate or palmate leaves\nn12582665\tany of various tropical Asian palm trees the trunks of which yield sago\nn12582846\tpalm having pinnate or featherlike leaves\nn12583126\tpalm having palmate or fan-shaped leaves\nn12583401\tany of several low-growing palms with fan-shaped leaves\nn12583681\ttropical American palm having edible nuts and yielding a useful fiber\nn12583855\ttropical American feather palm having a swollen spiny trunk and edible nuts\nn12584191\tany of several tall tropical palms native to southeastern Asia having egg-shaped nuts\nn12584365\tsoutheastern Asian palm bearing betel nuts (scarlet or orange single-seeded fruit with a fibrous husk)\nn12584715\tMalaysian feather palm with base densely clothed with fibers; yields a sweet sap used in wine and trunk pith yields sago\nn12585137\tBrazilian palm yielding fibers used in making ropes, mats, and brushes\nn12585373\tnut having a hard hazel-brown shell used like vegetable ivory\nn12585629\ttall fan palm of Africa and India and Malaysia yielding a hard wood and sweet sap that is a source of palm wine and sugar; leaves used for thatching and weaving\nn12586298\tany tropical Asian palm of the genus Calamus; light tough stems are a source of rattan canes\nn12586499\tclimbing palm of Sri Lanka and southern India remarkable for the great length of the stems which are used for malacca canes\nn12586725\ttall scrambling spiny palm of northeastern Queensland, Australia\nn12586989\tattractive East Indian palm having distinctive bipinnate foliage\nn12587132\tfishtail palm of India to Malay Peninsula; sap yields a brown sugar (jaggery) and trunk pith yields sago\nn12587487\tpalm of the Andes yielding a resinous wax which is mixed with tallow to make candles\nn12587803\ttall palm tree bearing coconuts as fruits; widely planted throughout the tropics\nn12588320\tBrazilian fan palm having an edible root; source of a useful leaf fiber and a brittle yellowish wax\nn12588780\tSouth American palm yielding a wax similar to carnauba wax\nn12589142\tany of several tropical American palms bearing corozo nuts\nn12589458\tlarge-leaved palm of Malay to Philippines and northern Australia; leaves used for thatching or plaiting into containers\nn12589687\tfan palms of the southern United States and the Caribbean region\nn12589841\ttall palm of southern India and Sri Lanka with gigantic leaves used as umbrellas and fans or cut into strips for writing paper\nn12590232\tpinnate-leaved palms of the genus Elaeis having dense clusters of crowded flowers and bright red fruit and yielding high quality palm oils\nn12590499\toil palm of Africa\nn12590600\tpalm of Central and South America\nn12590715\tseed of any oil palm\nn12591017\tBrazilian palm of genus Euterpe whose leaf buds are eaten like cabbage when young\nn12591351\tAustralian palm with leaf buds that are edible when young\nn12591702\tMalaysian palm whose pithy trunk yields sago--a starch used as a food thickener and fabric stiffener; Malaya to Fiji\nn12592058\tany creeping semiaquatic feather palm of the genus Nipa found in mangrove swamps and tidal estuaries; its sap is used for a liquor; leaves are used for thatch; fruit has edible seeds\nn12592544\ttall feather palm of northern Brazil with hard-shelled nuts yielding valuable oil and a kind of vegetable ivory\nn12592839\thard-shelled nut of the babassu palm\nn12593122\ttropical American feather palm whose large nuts yield valuable oil and a kind of vegetable ivory\nn12593341\tnut of the cohune palm having hard white shells like those of ivory nuts\nn12593994\ttall tropical feather palm tree native to Syria bearing sweet edible fruit\nn12594324\ta stemless palm tree of Brazil and Peru bearing ivory nuts\nn12594989\ta large feather palm of Africa and Madagascar having very long pinnatisect fronds yielding a strong commercially important fiber from its leafstalks\nn12595699\ta palm of the genus Raffia\nn12595964\tany of several small palms of the genus Rhapis; cultivated as houseplants\nn12596148\tsmall graceful palm with reedlike stems and leaf bases clothed with loose coarse fibers\nn12596345\tChinese lady palm with more slender stems and finer sheath fibers than Rhapis excelsa\nn12596709\ttall feather palm of southern Florida and Cuba\nn12596849\tWest Indian palm with leaf buds that are edible when young\nn12597134\tlow-growing fan-leaved palm of coastal southern United States having edible leaf buds\nn12597466\tsmall hardy clump-forming spiny palm of southern United States\nn12597798\tsmall palm of southern Florida and West Indies closely resembling the silvertop palmetto\nn12598027\tsmall stocky fan palm of southern Florida and Cuba\nn12599185\tan Old World plantain with long narrow ribbed leaves widely established in temperate regions\nn12599435\tcommon European perennial naturalized worldwide; a troublesome weed\nn12599661\twidely distributed Old World perennial naturalized in North America having finely hairy leaves and inconspicuous white fragrant flowers\nn12599874\tplantain of Mediterranean regions whose seeds swell and become gelatinous when moist and are used as a mild laxative\nn12600095\tNorth American plantain having reddish leafstalks and broad leaves\nn12600267\tNorth American annual or biennial with long soft hairs on the leaves\nn12601494\ta member of the genus Fagopyrum; annual Asian plant with clusters of small pinkish white flowers and small edible triangular seeds which are used whole or ground into flour\nn12601805\tannual with broadly ovate leaves and slender drooping spikes of crimson flowers; southeastern Asia and Australia; naturalized in North America\nn12602262\tany plant of the genus Eriogonum with small clustered flowers\nn12602434\tlate blooming perennial plant of shale barrens of Virginia having flowers in flat-topped clusters\nn12602612\tlow-growing shrub with spreading branches and flowers in loose heads; desert regions of western United States (California to Utah)\nn12602980\tplants having long green or reddish acidic leafstalks growing in basal clumps; stems (and only the stems) are edible when cooked; leaves are poisonous\nn12603273\tAsian herb (Himalayas)\nn12603449\tlong cultivated hybrid of Rheum palmatum; stems often cooked in pies or as sauce or preserves\nn12603672\tlong used for laxative properties\nn12604228\tEuropean sorrel with large slightly acidic sagittate leaves grown throughout north temperate zone for salad and spring greens\nn12604460\tsmall plant having pleasantly acid-tasting arrow-shaped leaves; common in dry places\nn12604639\tEuropean dock with broad obtuse leaves and bitter rootstock common as a weed in North America\nn12604845\tlow perennial with small silvery-green ovate to hastate leaves\nn12605683\tany of several rushlike plants, especially of the pine barrens of southern United States\nn12606438\tany plant of the genus Commelina\nn12606545\tany plant of the family Commelinaceae\nn12607456\ta tropical American plant bearing a large fleshy edible fruit with a terminal tuft of stiff leaves; widely cultivated in the tropics\nn12609379\taquatic perennial of North America and Ireland and Hebrides having translucent green leaves in a basal spiral and dense buttonlike racemes of minute white flowers\nn12610328\ta tropical floating aquatic plant having spikes of large blue flowers; troublesome in clogging waterways especially in southern United States\nn12610740\tgrassy-leaved North American aquatic plant with yellow star-shaped blossoms\nn12611640\tsubmerged aquatic plant having narrow leaves and small flowers; of fresh or brackish water\nn12612170\tmarsh plant having clusters of small white or pinkish flowers and broad pointed or rounded leaves\nn12612811\ta variety of water plantain\nn12613706\tsubmersed plant with whorled lanceolate leaves and solitary axillary flowers; Old World plant naturalized in southern United States and clogging Florida's waterways\nn12614096\tAmerican plant with roundish heart-shaped or kidney-shaped leaves; usually rooted in muddy bottoms of ponds and ditches\nn12614477\ta weedy aquatic plant of genus Elodea\nn12614625\tNorth American waterweed; widely naturalized in Europe\nn12615232\tsubmerged aquatic plant with ribbonlike leaves; Old World and Australia\nn12615710\tany of several submerged or floating freshwater perennial aquatic weeds belonging to the family Potamogetonaceae\nn12616248\tEuropean herb naturalized in the eastern United States and California\nn12616630\tpondweed with floating leaves; of northern United States and Europe\nn12616996\tvery similar to Potamogeton; of western Africa, Asia, and Europe\nn12617559\ttufted perennial found in shallow water or marshland; sometimes poisons livestock\nn12618146\tfound in still or slow-moving fresh or brackish water; useful to oxygenate cool water ponds and aquaria\nn12618727\tsubmerged marine plant with very long narrow leaves found in abundance along North Atlantic coasts\nn12620196\tany of many shrubs of the genus Rosa that bear roses\nn12620546\tthe fruit of a rose plant\nn12620969\tChinese evergreen climbing rose with yellow or white single flowers\nn12621410\tlarge hardy very fragrant pink rose; cultivated in Asia Minor as source of attar of roses; parent of many hybrids\nn12621619\tEurasian rose with prickly stems and fragrant leaves and bright pink flowers followed by scarlet hips\nn12621945\tChinese climbing rose with fragrant white blossoms\nn12622297\trose native to Mediterranean region having curved or climbing branches and loose clusters of musky-scented flowers\nn12622875\ta plant of the genus Agrimonia having spikelike clusters of small yellow flowers\nn12623077\terect perennial Old World herb of dry grassy habitats\nn12623211\tfragrant European perennial herb found at woodland margins on moist soils\nn12623818\tshrub or small tree of northwestern North America having fragrant creamy white flowers and small waxy purple-red fruits\nn12624381\tAsiatic ornamental shrub with spiny branches and pink or red blossoms\nn12624568\tdeciduous thorny shrub native to Japan having red blossoms\nn12625003\tsmall tropical American tree bearing edible plumlike fruit\nn12625383\tany shrub of the genus Cotoneaster: erect or creeping shrubs having richly colored autumn foliage and many small white to pinkish flowers followed by tiny red or black fruits\nn12625670\tclimbing evergreen shrub with white flowers and red berries; often used as ground cover\nn12625823\tdeciduous flat-growing shrub with a fanned herringbone pattern and having reddish flowers and orange-red berries; used as a ground cover\nn12626674\tsouthern United States hawthorn with pinnately lobed leaves\nn12626878\tcommon shrub or small tree of the eastern United States having few thorns and white flowers in corymbs followed by bright orange-red berries\nn12627119\terect and almost thornless American hawthorn with somewhat pear-shaped berries\nn12627347\teastern United States hawthorn with long straight thorns\nn12627526\thawthorn of southern United States bearing a juicy, acidic, scarlet fruit that is often used in jellies or preserves\nn12628356\tAmerican red-fruited hawthorn with stems and leaves densely covered with short woolly hairs\nn12628705\tAmerican red-fruited hawthorn with dense corymbs of pink-red flowers\nn12628986\tsmall Asian tree with pinkish flowers and pear-shaped fruit; widely cultivated\nn12629305\tcreeping evergreen shrub with large white flowers; widely distributed in northern portions of Eurasia and North America\nn12629666\tevergreen tree of warm regions having fuzzy yellow olive-sized fruit with a large free stone; native to China and Japan\nn12630763\twild strawberry of western United States and South America; source of many varieties of cultivated strawberries\nn12630999\tNorth American wild strawberry with sweet scarlet fruit; a source of many cultivated strawberries\nn12631331\tany of various perennials of the genus Geum having usually pinnate basal leaves and variously colored flowers\nn12631637\terect subshrub with deep yellow flowers; Europe and Asia and North America\nn12631932\thairy yellow-flowered plant of eastern Asia and North America\nn12632335\tNorth American perennial with hairy basal pinnate leaves and purple flowers and plume-tipped fruits\nn12632733\tavens of Virginia having pale or greenish yellow flowers\nn12633061\tornamental evergreen treelike shrub of the Pacific coast of the United States having large white flowers and red berrylike fruits; often placed in genus Photinia\nn12633638\tany tree of the genus Malus especially those bearing firm rounded edible fruits\nn12633994\tnative Eurasian tree widely cultivated in many varieties for its firm rounded edible fruits\nn12634211\tany of numerous wild apple trees usually with small acidic fruit\nn12634429\tany of numerous varieties of crab apples cultivated for their small acidic (usually bright red) fruit used for preserves or as ornamentals for their blossoms\nn12634734\tAsian wild crab apple cultivated in many varieties for it small acid usually red fruit used for preserving\nn12634986\twild crab apple native to Europe; a chief ancestor of cultivated apples\nn12635151\tmedium-sized tree of the eastern United States having pink blossoms and small yellow fruit\nn12635359\tsmall tree or shrub of western United States having white blossoms and tiny yellow or red fruit\nn12635532\tsmall tree or shrub of southeastern United States; cultivated as an ornamental for its rose-colored blossoms\nn12635744\twild crab apple of western United States with fragrant pink flowers\nn12635955\tderived from the Iowa crab and cultivated for its large double pink blossoms\nn12636224\tsmall deciduous Eurasian tree cultivated for its fruit that resemble crab apples\nn12636885\tany of a numerous plants grown for their five-petaled flowers; abundant in temperate regions; alleged to have medicinal properties\nn12637123\tlow-growing perennial having leaves silvery beneath; northern United States; Europe; Asia\nn12637485\tEuropean garden herb with purple-tinged flowers and leaves that are sometimes used for salads\nn12638218\tany of several trees producing edible oval fruit having a smooth skin and a single hard stone\nn12638556\tan uncultivated plum tree or shrub\nn12638753\twild plum of northeastern United States having dark purple fruits with yellow flesh\nn12638964\twild plum trees of eastern and central North America having red-orange fruit with yellow flesh\nn12639168\tsmall native American shrubby tree bearing small edible yellow to reddish fruit\nn12639376\tseacoast shrub of northeastern North America having showy white blossoms and edible purple fruit\nn12639584\tany of various widely distributed plums grown in the cooler temperate areas\nn12639736\tsmall wild or half-domesticated Eurasian plum bearing small ovoid fruit in clusters\nn12639910\tplum tree long cultivated for its edible fruit\nn12640081\tsmall tree of southwestern United States having purplish-red fruit sometimes cultivated as an ornamental for its large leaves\nn12640284\tsmall tree native to northeastern North America having oblong orange-red fruit\nn12640435\thybrid produced by crossing Prunus domestica and Prunus armeniaca\nn12640607\tAsian tree having clusters of usually white blossoms and edible fruit resembling the peach\nn12640839\tJapanese ornamental tree with fragrant white or pink blossoms and small yellow fruits\nn12641007\ttemperate zone tree bearing downy yellow to rosy fruits\nn12641180\tsmall hybrid apricot of Asia and Asia Minor having purplish twigs and white flowers following by inferior purple fruit\nn12641413\tany of numerous trees and shrubs producing a small fleshy round fruit with a single hard stone; many also produce a valuable hardwood\nn12641931\tan uncultivated cherry tree\nn12642090\tthe fruit of the wild cherry tree\nn12642200\tlarge Eurasian tree producing small dark bitter fruit in the wild but edible sweet fruit under cultivation\nn12642435\tany of several cultivated sweet cherries having sweet juicy heart-shaped fruits\nn12642600\twild or seedling sweet cherry used as stock for grafting\nn12642964\tMexican black cherry tree having edible fruit\nn12643113\tsmall flowering evergreen tree of southern United States\nn12643313\tsmall Asiatic tree bearing edible red or yellow fruit\nn12643473\trather small Eurasian tree producing red to black acid edible fruit\nn12643688\tany of several cultivated sour cherry trees bearing pale red fruit with colorless juice\nn12643877\tany of several cultivated sour cherry trees bearing fruit with dark skin and juice\nn12644283\tsmall bitter fruit of the marasca cherry tree from whose juice maraschino liqueur is made\nn12644902\tany of several small bushy trees having pink or white blossoms and usually bearing nuts\nn12645174\tsmall bushy deciduous tree native to Asia and North Africa having pretty pink blossoms and highly prized edible nuts enclosed in a hard green hull; cultivated in southern Australia and California\nn12645530\talmond trees having white blossoms and poisonous nuts yielding an oil used for flavoring and for medicinal purposes\nn12646072\tvariety of large almond from Malaga, Spain; used in confectionery\nn12646197\tsmall Chinese shrub with smooth unfurrowed dark red fruit grown especially for its red or pink or white flowers\nn12646397\tCalifornia evergreen wild plum with spiny leathery leaves and white flowers\nn12646605\tshrubby Japanese cherry tree having pale pink blossoms\nn12646740\twoody oriental plant with smooth unfurrowed red fruit grown especially for its white or pale pink blossoms\nn12646950\tfrequently cultivated Eurasian evergreen shrub or small tree having showy clusters of white flowers and glossy foliage and yielding oil similar to bitter almond oil\nn12647231\tevergreen shrub or small tree found on Catalina Island (California)\nn12647376\tany of several small-fruited cherry trees frequented or fed on by birds\nn12647560\tsmall European cherry tree closely resembling the American chokecherry\nn12647787\tsmall cherry much liked by birds\nn12647893\tsmall shrubby North American wild cherry with small bright red acid fruit\nn12648045\tcultivated in temperate regions\nn12648196\tvariety or mutation of the peach bearing fruit with smooth skin and (usually) yellow flesh\nn12648424\tsmall straggling American cherry growing on sandy soil and having minute scarcely edible purplish-black fruit\nn12648693\tsmall tree of China and Japan bearing large yellow to red plums usually somewhat inferior to European plums in flavor\nn12648888\tlarge North American wild cherry with round black sour edible fruit\nn12649065\tany of several shrubs or trees of the genus Prunus cultivated for their showy white or pink single or double blossoms\nn12649317\tornamental tree with inedible fruits widely cultivated in many varieties for its white blossoms\nn12649539\tornamental tree with inedible fruit widely cultivated in many varieties for its pink blossoms\nn12649866\tshrub of the Pacific coast of the United States bearing small red insipid fruit\nn12650038\tshrub or tree native to Japan cultivated as an ornamental for its rose-pink flowers\nn12650229\tAsiatic shrub cultivated for its rosy red flowers\nn12650379\tdeciduous Chinese shrub or small tree with often trilobed leaves grown for its pink-white flowers\nn12650556\ta common wild cherry of eastern North America having small bitter black berries favored by birds\nn12650805\tthe fruit of the chokecherry tree\nn12650915\tchokecherry of western United States\nn12651229\tany of various thorny shrubs of the genus Pyracantha bearing small white flowers followed by hard red or orange-red berries\nn12651611\tOld World tree having sweet gritty-textured juicy fruit; widely cultivated in many varieties\nn12651821\ttree bearing edible fruit\nn12653218\tany prickly shrub of the genus Rubus bearing edible aggregate fruits\nn12653436\tstout-stemmed trailing shrub of New Zealand that scrambles over other growth\nn12653633\tEuropean trailing bramble with red berrylike fruits\nn12654227\tstiff shrubby blackberry of the eastern United States (Connecticut to Florida)\nn12654857\tcultivated hybrid bramble of California having large dark wine-red fruit with a flavor resembling raspberries\nn12655062\tred-fruited bramble native from Oregon to Baja California\nn12655245\tNorth American dewberry\nn12655351\tof eastern North America\nn12655498\tof southern North America\nn12655605\tof eastern North America\nn12655726\tcreeping European bramble bearing dewberries\nn12655869\twoody brambles bearing usually red but sometimes black or yellow fruits that separate from the receptacle when ripe and are rounder and smaller than blackberries\nn12656369\tthe common European raspberry; fruit red or orange\nn12656528\tred raspberry of North America\nn12656685\traspberry native to eastern North America having black thimble-shaped fruit\nn12656909\tlarge erect red-flowered raspberry of western North America having large pinkish-orange berries\nn12657082\twhite-flowered raspberry of western North America and northern Mexico with thimble-shaped orange berries\nn12657755\traspberry of China and Japan having pale pink flowers grown for ornament and for the small red acid fruits\nn12658118\tany of various trees of the genus Sorbus\nn12658308\tEurasian tree with orange-red berrylike fruits\nn12658481\tdecorative red berrylike fruit of a rowan tree\nn12658603\ta variety of mountain ash\nn12658715\tan ash of the western coast of North America\nn12658846\tmedium-sized European tree resembling the rowan but bearing edible fruit\nn12659064\tEuropean tree bearing edible small speckled brown fruit\nn12659356\tany rosaceous plant of the genus Spiraea; has sprays of small white or pink flowers\nn12659539\tshrub having copious small white flowers in spring\nn12660601\tany of numerous trees or shrubs or vines of the family Rubiaceae\nn12661045\tperennial East Indian creeping or climbing herb used for dye in the orient\nn12661227\tEurasian herb having small yellow flowers and red roots formerly an important source of the dye alizarin\nn12661538\tany plant of the genus Asperula\nn12662074\tsource of a tough elastic wood\nn12662379\tevergreen climbing shrub of southern Florida and West Indies grown for its racemes of fragrant white to creamy flowers followed by globose white succulent berries\nn12662772\tany of several small trees and shrubs native to the tropical Old World yielding coffee beans\nn12663023\tshrubby tree of northeastern tropical Africa widely cultivated in tropical or near tropical regions for its seed which form most of the commercial coffee\nn12663254\tsmall tree of West Africa\nn12663359\tnative to West Africa but grown in Java and elsewhere; resistant to coffee rust\nn12663804\tany of several trees of the genus Cinchona\nn12664005\tColombian tree; source of Cartagena bark (a cinchona bark)\nn12664187\tPeruvian shrub or small tree having large glossy leaves and cymes of fragrant yellow to green or red flowers; cultivated for its medicinal bark\nn12664469\tsmall tree of Ecuador and Peru having very large glossy leaves and large panicles of fragrant pink flowers; cultivated for its medicinal bark\nn12664710\tmedicinal bark of cinchona trees; source of quinine and quinidine\nn12665048\tany of several plants of the genus Galium\nn12665271\tOld World fragrant stoloniferous perennial having small white flowers and narrow leaves used as flavoring and in sachets; widely cultivated as a ground cover; in some classifications placed in genus Asperula\nn12665659\tNorth American stoloniferous perennial having white flowers; sometimes used as an ornamental\nn12665857\tcommon yellow-flowered perennial bedstraw; North America and Europe and Asia\nn12666050\tbedstraw with sweetish roots\nn12666159\tannual having the stem beset with curved prickles; North America and Europe and Asia\nn12666369\tEurasian herb with ample panicles of small white flowers; naturalized in North America\nn12666965\tevergreen shrub widely cultivated for its large fragrant waxlike white flowers and glossy leaves\nn12667406\tany tree of the genus Genipa bearing yellow flowers and edible fruit with a thick rind\nn12667582\ttree of the West Indies and northern South America bearing succulent edible orange-sized fruit\nn12667964\tany of several flowering tropical or subtropical shrubs of the genus Hamelia\nn12668131\thandsome shrub with showy orange to scarlet or crimson flowers; Florida and West Indies to Mexico and Brazil\nn12669803\tSouth African evergreen having hard tough wood\nn12670334\ta stout spreading or semi-climbing tropical shrub with round brownish-red warty fruit; Africa\nn12670758\tsmall deciduous tree of southern Africa having edible fruit\nn12670962\tshrubby tree of Madagascar occasionally cultivated for its edible apple-shaped fruit\nn12671651\tany of various deciduous or evergreen ornamental shrubs of the genus Abelia having opposite simple leaves and cymes of small white or pink or purplish flowers; Asia and Mexico\nn12672289\tbush honeysuckle of southeastern United States having large crowded clusters of sulfur-yellow flowers\nn12673588\tsimilar to the twinflower of northern Europe and Asia\nn12674120\tshrub or vine of the genus Lonicera\nn12674685\terect deciduous North American shrub with yellow-white flowers\nn12674895\tdeciduous climbing shrub with fragrant yellow-white flowers in axillary whorls\nn12675299\tclimbing deciduous shrub with fragrant yellow (later orange) flowers in terminal whorls; southeastern United States\nn12675515\ttwining deciduous shrub with hairy leaves and spikes of yellow-orange flowers; northeastern America\nn12675876\tan Asiatic trailing evergreen honeysuckle with half-evergreen leaves and fragrant white flowers turning yellow with age; has become a weed in some areas\nn12676134\ta variety of Japanese honeysuckle that grows like a vine; established as an aggressive escape in southeastern United States\nn12676370\ta grey deciduous honeysuckle shrub paired white flowers turning yellow; Japan\nn12676534\tEuropean twining honeysuckle with fragrant red and yellow-white flowers\nn12676703\tevergreen North American honeysuckle vine having coral-red or orange flowers\nn12677120\tcultivated Eurasian shrub with twin yellowish-white flowers and scarlet fruit\nn12677331\ta variety of fly honeysuckle\nn12677612\tdeciduous shrub of western North America having spikes of pink flowers followed by round white berries\nn12677841\tNorth American deciduous shrub cultivated for it abundant clusters of coral-red berrylike fruits\nn12678794\tshrub or small tree of western United States having white flowers and blue berries; fruit used in wines and jellies\nn12679023\tdwarf herbaceous elder of Europe having pink flowers and a nauseous odor\nn12679432\tcommon North American shrub or small tree\nn12679593\tEurasian shrub\nn12679876\tcoarse weedy American perennial herb with large usually perfoliate leaves and purple or dull red flowers\nn12680402\tdeciduous North American shrub or small tree having three-lobed leaves and red berries\nn12680652\tvigorous deciduous European treelike shrub common along waysides; red berries turn black\nn12680864\tdeciduous thicket-forming Old World shrub with clusters of white flowers and small bright red berries\nn12681376\tclosely related to southern arrow wood; grows in the eastern United States from Maine to Ohio and Georgia\nn12681579\tupright deciduous shrub having frosted dark-blue fruit; east and east central North America\nn12681893\tdeciduous shrub widely cultivated for its white or pink or red flowers\nn12682411\tany of several herbs of the genus Dipsacus native to the Old World having flower heads surrounded by spiny bracts\nn12682668\tteasel with lilac flowers native to Old World but naturalized in North America; dried flower heads used to raise a nap on woolen cloth\nn12682882\tsimilar to the common teasel and similarly used; widespread in Europe and North Africa and western Asia; naturalized in United States\nn12683096\tEuropean teasel with white to pink flowers; naturalized in United States\nn12683407\tany of various plants of the genus Scabiosa\nn12683571\tOld World annual having fragrant purple to deep crimson flower heads; naturalized in United States\nn12683791\tperennial having bluish-lilac flowers; introduced in the eastern United States\nn12684379\tNorth American annual plant with usually yellow or orange flowers; grows chiefly on wet rather acid soil\nn12685431\tany of numerous plants of the family Geraniaceae\nn12685831\tany of numerous geraniums of the genus Geranium\nn12686077\tcommon wild geranium of eastern North America with deeply parted leaves and rose-purple flowers\nn12686274\ttall perennial cranesbill with paired violet-blue axillary flowers; native to northern parts of Old World and naturalized in North America\nn12686496\tgeranium of western North America having branched clusters of white or pale pink flowers\nn12686676\ta sticky low herb with small reddish-purple flowers; widespread in the northern hemisphere\nn12686877\tgeranium of western North America having pinkish-purple flowers in open clusters\nn12687044\twestern geranium with small pink flowers; a common weed on lawns and in vacant lots\nn12687462\tany of several southern African geraniums having fragrant three-lobed to five-lobed leaves and pink flowers\nn12687698\tan upright geranium having scalloped leaves with a broad color zone inside the margin and white or pink or red flowers\nn12687957\ta commonly cultivated trailing South American plant with peltate leaves and rosy flowers\nn12688187\tgeranium with round fragrant leaves and small white flowers\nn12688372\ta common garden geranium with lemon-scented foliage\nn12688716\tany of various plants of the genus Erodium\nn12689305\tlow annual European herb naturalized in America; similar to alfilaria\nn12690653\tany of various tropical trees of the family Burseraceae yielding fragrant gums or resins that are burned as incense\nn12691428\tsmall tree or shrub of the southwestern United States having a spicy odor and odd-pinnate leaves and small clusters of white flowers\nn12691661\ttropical American tree yielding a reddish resin used in cements and varnishes\nn12692024\ttree yielding an aromatic gum resin burned as incense\nn12692160\tEast Indian tree yielding a resin used medicinally and burned as incense\nn12692521\tsmall evergreen tree of Africa and Asia; leaves have a strong aromatic odor when bruised\nn12692714\ttree of eastern Africa and Asia yielding myrrh\nn12693244\ttropical American tree\nn12693352\ttropical American tree\nn12693865\tany of several aquatic plants having a star-shaped rosette of floating leaves; America, Europe and Asia\nn12694486\ttropical American shrub bearing edible acid red fruit resembling cherries\nn12695144\tany of various tropical timber trees of the family Meliaceae especially the genus Swietinia valued for their hard yellowish- to reddish-brown wood that is readily worked and takes a high polish\nn12695975\ttree of northern India and China having purple blossoms and small inedible yellow fruits; naturalized in the southern United States as a shade tree\nn12696492\tlarge semi-evergreen tree of the East Indies; trunk exudes a tenacious gum; bitter bark used as a tonic; seeds yield an aromatic oil; sometimes placed in genus Melia\nn12696830\tseed of neem trees; source of pesticides and fertilizer and medicinal products\nn12697152\ttropical American tree yielding fragrant wood used especially for boxes\nn12697514\tEast Indian tree with valuable hard lustrous yellowish wood\nn12698027\tAfrican tree having rather lightweight cedar-scented wood varying in color from pink to reddish brown\nn12698435\tany of various timber trees of the genus Flindersia\nn12698598\ttall Australian timber tree yielding tough hard wood used for staves etc\nn12698774\tAustralian timber tree whose bark yields a poison\nn12699031\tAfrican tree having hard heavy odorless wood\nn12699301\tEast Indian tree bearing an edible yellow berry\nn12699922\tmahogany tree of West Indies\nn12700088\tan important Central American mahogany tree\nn12700357\tPhilippine timber tree having hard red fragrant wood\nn12702124\tlarge Costa Rican tree having light-colored wood suitable for cabinetry; similar to the African lepidobotrys in wood structure as well as in fruit and flowers and leaves and seeds; often classified in other families\nn12703190\tEurasian plant with heart-shaped trifoliate leaves and white purple-veined flowers\nn12703383\tSouth African bulbous wood sorrel with showy yellow flowers\nn12703557\tcreeping much-branched mat-forming weed; cosmopolitan\nn12703716\tshort-stemmed South African plant with bluish flowers\nn12703856\tperennial herb of eastern North America with palmately compound leaves and usually rose-purple flowers\nn12704041\tSouth American wood sorrel cultivated for its edible tubers\nn12704343\tEast Indian tree bearing deeply ridged yellow-brown fruit\nn12704513\tEast Indian evergreen tree bearing very acid fruit\nn12705013\tany of various plants of the genus Polygala\nn12705220\tperennial bushy herb of central and southern United States having white flowers with green centers and often purple crest; similar to Seneca snakeroot\nn12705458\tbog plant of pine barrens of southeastern United States having spikes of irregular yellow-orange flowers\nn12705698\tcommon trailing perennial milkwort of eastern North America having leaves like wintergreen and usually rosy-purple flowers with winged sepals\nn12705978\teastern North American plant having a terminal cluster of small white flowers and medicinal roots\nn12706410\tsmall European perennial with numerous branches having racemes of blue, pink or white flowers; formerly reputed to promote human lactation\nn12707199\tEuropean strong-scented perennial herb with grey-green bitter-tasting leaves; an irritant similar to poison ivy\nn12707781\tany of numerous tropical usually thorny evergreen trees of the genus Citrus having leathery evergreen leaves and widely cultivated for their juicy edible fruits having leathery aromatic rinds\nn12708293\tany citrus tree bearing oranges\nn12708654\tany of various common orange trees yielding sour or bitter fruit; used as grafting stock\nn12708941\tsmall tree with pear-shaped fruit whose oil is used in perfumery; Italy\nn12709103\tsoutheastern Asian tree producing large fruits resembling grapefruits\nn12709349\tthorny evergreen small tree or shrub of India widely cultivated for its large lemonlike fruits that have thick warty rind\nn12709688\tcitrus tree bearing large round edible fruit having a thick yellow rind and juicy somewhat acid pulp\nn12709901\tshrub or small tree having flattened globose fruit with very sweet aromatic pulp and thin yellow-orange to flame-orange rind that is loose and easily removed; native to southeastern Asia\nn12710295\ta variety of mandarin orange\nn12710415\ta variety of mandarin orange that is grown around the Mediterranean and in South Africa\nn12710577\ta variety of mandarin orange\nn12710693\tprobably native to southern China; widely cultivated as source of table and juice oranges\nn12710917\tlarge citrus tree having large sweet deep orange fruit that is easily peeled; widely cultivated in Florida\nn12711182\thybrid between grapefruit and mandarin orange; cultivated especially in Florida\nn12711398\thybrid between mandarin orange and lemon having very acid fruit with orange peel\nn12711596\ta small evergreen tree that originated in Asia but is widely cultivated for its fruit\nn12711817\tlemon tree having fruit with a somewhat insipid sweetish pulp\nn12711984\tany of various related trees bearing limes\nn12712320\tmore aromatic and acidic than oranges\nn12712626\tEurasian perennial herb with white flowers that emit flammable vapor in hot weather\nn12713063\tany of several trees or shrubs of the genus Fortunella bearing small orange-colored edible fruits with thick sweet-flavored skin and sour pulp\nn12713358\tshrub bearing round-fruited kumquats\nn12713521\tshrub bearing oval-fruited kumquats\nn12713866\tdeciduous tree of China and Manchuria having a turpentine aroma and handsome compound leaves turning yellow in autumn and deeply fissured corky bark\nn12714254\tsmall fast-growing spiny deciduous Chinese orange tree bearing sweetly scented flowers and decorative but inedible fruit: used as a stock in grafting and for hedges\nn12714755\tany of a number of trees or shrubs of the genus Zanthoxylum having spiny branches\nn12714949\tsmall deciduous aromatic shrub (or tree) having spiny branches and yellowish flowers; eastern North America\nn12715195\tdensely spiny ornamental of southeastern United States and West Indies\nn12715914\tany of various trees or shrubs of the family Simaroubaceae having wood and bark with a bitter taste\nn12716400\ttree of the Amazon valley yielding a light brittle timber locally regarded as resistant to insect attack\nn12716594\tmedium to large tree of tropical North and South America having odd-pinnate leaves and long panicles of small pale yellow flowers followed by scarlet fruits\nn12717072\tany of several deciduous Asian trees of the genus Ailanthus\nn12717224\tdeciduous rapidly growing tree of China with foliage like sumac and sweetish fetid flowers; widely planted in United States as a street tree because of its resistance to pollution\nn12717644\tAfrican tree with edible yellow fruit resembling mangos; valued for its oil-rich seed and hardy green wood that resists termites\nn12718074\tsmall African deciduous tree with spreading crown having leaves clustered toward ends of branches and clusters of creamy flowers resembling lilacs\nn12718483\tWest Indian tree yielding the drug Jamaica quassia\nn12718995\thandsome South American shrub or small tree having bright scarlet flowers and yielding a valuable fine-grained yellowish wood; yields the bitter drug quassia from its wood and bark\nn12719684\tany tropical American plant of the genus Tropaeolum having pungent juice and long-spurred yellow to red flowers\nn12719944\tstrong-growing annual climber having large flowers of all shades of orange from orange-red to yellowish orange and seeds that are pickled and used like capers\nn12720200\tannual with deep yellow flowers smaller than the common garden nasturtium\nn12720354\ta climber having flowers that are the color of canaries\nn12721122\tperennial shrub of the eastern Mediterranean region and southwestern Asia having flowers whose buds are used as capers\nn12721477\tSouth American tree of dry interior regions of Argentina and Paraguay having resinous heartwood used for incense\nn12722071\tsmall evergreen tree of Caribbean and southern Central America to northern South America; a source of lignum vitae wood, hardest of commercial timbers, and a medicinal resin\nn12723062\tdesert shrub of southwestern United States and New Mexico having persistent resinous aromatic foliage and small yellow flowers\nn12723610\ttropical annual procumbent poisonous subshrub having fruit that splits into five spiny nutlets; serious pasture weed\nn12724942\tany of numerous deciduous trees and shrubs of the genus Salix\nn12725521\tany of various willows having pliable twigs used in basketry and furniture\nn12725738\tlarge willow tree of Eurasia and North Africa having greyish canescent leaves and grey bark\nn12725940\tNorth American willow with greyish silky pubescent leaves that usually blacken in drying\nn12726159\tEuropean willow having greyish leaves and yellow-orange twigs used in basketry\nn12726357\tEurasian willow tree having greyish leaves and ascending branches\nn12726528\tlow creeping shrub of Arctic Europe and America\nn12726670\twillow with long drooping branches and slender leaves native to China; widely cultivated as an ornamental\nn12726902\thybrid willow usually not strongly weeping in habit\nn12727101\tsmall willow of eastern North America having greyish leaves and silky catkins that come before the leaves\nn12727301\tany of several Old World shrubby broad-leaved willows having large catkins; some are important sources for tanbark and charcoal\nn12727518\tmuch-branched Old World willow having large catkins and relatively large broad leaves\nn12727729\twillow of the western United States with leaves like those of peach or almond trees\nn12727960\tOld World willow with light green leaves cultivated for use in basketry\nn12728164\tNorth American shrub with whitish canescent leaves\nn12728322\tlarge willow tree with stiff branches that are easily broken\nn12728508\tslender shrubby willow of dry areas of North America\nn12728656\twidely distributed boreal shrubby willow with partially underground creeping stems and bright green glossy leaves\nn12728864\tEurasian shrubby willow with whitish tomentose twigs\nn12729023\tshrubby willow of the western United States\nn12729164\tcommon North American shrub with shiny lanceolate leaves\nn12729315\tNorth American shrubby willow having dark bark and linear leaves growing close to streams and lakes\nn12729521\tEuropean willow tree with shining leathery leaves; widely naturalized in the eastern United States\nn12729729\tEurasian osier having reddish or purple twigs and bark rich in tannin\nn12729950\tsmall shrubby tree of eastern North America having leaves exuding an odor of balsam when crushed\nn12730143\tsmall trailing bush of Europe and Asia having straggling branches with silky green leaves of which several varieties are cultivated\nn12730370\tsmall shrubby tree of western North America (Alaska to Oregon)\nn12730544\twillow shrub of dry places in the eastern United States having long narrow leaves canescent beneath\nn12730776\tdwarf prostrate mat-forming shrub of Arctic and alpine regions of North America and Greenland having deep green elliptic leaves that taper toward the base\nn12731029\twillow with long flexible twigs used in basketry\nn12731401\tany of numerous trees of north temperate regions having light soft wood and flowers borne in catkins\nn12731835\tpoplar of northeastern North America with broad heart-shaped leaves\nn12732009\ta poplar that is widely cultivated in the United States; has white bark and leaves with whitish undersurfaces\nn12732252\tlarge rapidly growing poplar with faintly lobed dentate leaves grey on the lower surface; native to Europe but introduced and naturalized elsewhere\nn12732491\tlarge European poplar\nn12732605\tdistinguished by its columnar fastigiate shape and erect branches\nn12732756\tany of several North American trees of the genus Populus having a tuft of cottony hairs on the seed\nn12732966\ta common poplar of eastern and central United States; cultivated in United States for its rapid growth and luxuriant foliage and in Europe for timber\nn12733218\tcottonwood of western North America with dark green leaves shining above and rusty or silvery beneath\nn12733428\tNorth American poplar with large rounded scalloped leaves and brownish bark and wood\nn12733647\tany of several trees of the genus Populus having leaves on flattened stalks so that they flutter in the lightest wind\nn12733870\tOld World aspen with a broad much-branched crown; northwestern Europe and Siberia to North Africa\nn12734070\tslender aspen native to North America\nn12734215\taspen with a narrow crown; eastern North America\nn12735160\tparasitic tree of Indonesia and Malaysia having fragrant close-grained yellowish heartwood with insect repelling properties and used, e.g., for making chests\nn12736603\tAustralian tree with edible flesh and edible nutlike seed\nn12736999\tshrub of southeastern United States parasitic on roots of hemlocks having sparse spikes of greenish flowers and pulpy drupes\nn12737383\tin some classification includes Viscaceae: parasitic or hemiparasitic shrublets or shrubs or small trees of tropical and temperate regions; attach to hosts by haustoria\nn12737898\tshrub of central and southeastern Europe; partially parasitic on beeches, chestnuts and oaks\nn12738259\tsmall herb with scalelike leaves on reddish-brown stems and berrylike fruits; parasitic on spruce and larch trees\nn12739332\tOld World parasitic shrub having branching greenish stems with leathery leaves and waxy white glutinous berries; the traditional mistletoe of Christmas\nn12739966\tthe traditional mistletoe of Christmas in America: grows on deciduous trees and can severely weaken the host plant\nn12740967\ta small Hawaiian tree with hard dark wood\nn12741222\ta tree of the genus Sapindus whose fruit is rich in saponin\nn12741586\tdeciduous tree of southwestern United States having pulpy fruit containing saponin\nn12741792\tevergreen of tropical America having pulpy fruit containing saponin which was used as soap by Native Americans\nn12742290\twidely cultivated in tropical and subtropical regions for its fragrant flowers and colorful fruits; introduced in Jamaica by William Bligh\nn12742741\ttendril-climbing vine\nn12742878\therbaceous vine of tropical America and Africa\nn12743009\twoody perennial climbing plant with large ornamental seed pods that resemble balloons; tropical India and Africa and America\nn12743352\ttree of southeastern Asia to Australia grown primarily for its sweet edible fruit resembling litchi nuts; sometimes placed in genera Euphorbia or Nephelium\nn12743823\tany of various tree of the genus Harpullia\nn12743976\tfast-growing tree of India and East Indies yielding a wood used especially for building\nn12744142\tAustralian tree yielding a variegated tulipwood\nn12744387\tChinese tree cultivated especially in Philippines and India for its edible fruit; sometimes placed in genus Nephelium\nn12744850\ttropical American tree bearing a small edible fruit with green leathery skin and sweet juicy translucent pulp\nn12745386\tMalayan tree bearing spiny red fruit\nn12745564\tEast Indian fruit tree bearing fruit similar to but sweeter than that of the rambutan\nn12746884\tany plant of the genus Pachysandra; low-growing evergreen herbs or subshrubs having dentate leaves and used as ground cover\nn12747120\tlow semi-evergreen perennial herb having small spikes of white or pinkish flowers; native to southern United States but grown elsewhere\nn12748248\ttwining shrub of North America having yellow capsules enclosing scarlet seeds\nn12749049\tany shrubby trees or woody vines of the genus Euonymus having showy usually reddish berries\nn12749456\tbushy deciduous shrub with branches having thin wide corky longitudinal wings; brilliant red in autumn; northeastern Asia to central China\nn12749679\tdeciduous shrub having purple capsules enclosing scarlet seeds\nn12749852\tupright deciduous plant with crimson pods and seeds; the eastern United States from New York to Florida and Texas\nn12750076\tbroad and bushy Asiatic twining shrub with pinkish fruit; many subspecies or varieties\nn12750767\tshrub or small tree of southeastern United States to West Indies and Brazil; grown for the slender racemes of white flowers and orange and crimson foliage\nn12751172\ttree of low-lying coastal areas of southeastern United States having glossy leaves and racemes of fragrant white flowers\nn12751675\ta low evergreen shrub with small purple flowers and black berrylike fruit\nn12752205\tany of numerous trees or shrubs of the genus Acer bearing winged seeds in pairs; north temperate zone\nn12753007\ta common North American maple tree; five-lobed leaves are light green above and silvery white beneath; source of hard close-grained but brittle light-brown wood\nn12753245\tmaple of eastern and central North America having three-lobed to five-lobed leaves and hard close-grained wood much used for cabinet work especially the curly-grained form; sap is chief source of maple syrup and maple sugar; many subspecies\nn12753573\tmaple of eastern and central America; five-lobed leaves turn scarlet and yellow in autumn\nn12753762\tmaple of eastern North America with striped bark and large two-lobed leaves clear yellow in autumn\nn12754003\tmaple of western North America having large 5-lobed leaves orange in autumn\nn12754174\tsmall maple of northwestern North America\nn12754311\tsmall shrubby maple of eastern North America; scarlet in autumn\nn12754468\tsmall maple of northwestern North America having prostrate stems that root freely and form dense thickets\nn12754648\tshrubby Eurasian maple often used as a hedge\nn12754781\ta large Eurasian maple tree naturalized in North America; five-lobed leaves yellow in autumn; cultivated in many varieties\nn12754981\tEurasian maple tree with pale grey bark that peels in flakes like that of a sycamore tree; leaves with five ovate lobes yellow in autumn\nn12755225\tcommon shade tree of eastern and central United States\nn12755387\tmaple of the Pacific coast of the United States; fruits are white when mature\nn12755559\tsmall shrubby Japanese plant with leaves having 5 to 7 acuminate lobes; yellow in autumn\nn12755727\tleaves deeply incised and bright red in autumn; Japan\nn12755876\tornamental shrub or small tree of Japan and Korea with deeply incised leaves; cultivated in many varieties\nn12756457\tany tree or shrub of the genus Ilex having red berries and shiny evergreen leaves with prickly edges\nn12757115\tdense rounded evergreen shrub of China having spiny leaves; widely cultivated as an ornamental\nn12757303\tdeciduous shrub of southeastern and central United States\nn12757458\tevergreen holly of eastern North America with oblong leathery leaves and small black berries\nn12757668\tSouth American holly; leaves used in making a drink like tea\nn12757816\tan evergreen tree\nn12757930\tan evergreen shrub\nn12758014\tan evergreen shrub\nn12758099\tan evergreen shrub\nn12758176\ta holly tree\nn12758250\ta holly shrub\nn12758325\ta holly tree\nn12758399\ta holly shrub\nn12758471\ta holly shrub\nn12758555\ta holly shrub\nn12759273\ttropical American evergreen tree bearing kidney-shaped nuts that are edible only when roasted\nn12759668\ttall tropical American timber tree especially abundant in eastern Brazil; yields hard strong durable zebrawood with straight grain and dark strips on a pinkish to yellowish ground; widely used for veneer and furniture and heavy construction\nn12760539\tOld World shrub having large plumes of yellowish feathery flowers resembling puffs of smoke\nn12760875\tsmall aromatic evergreen shrub of California having paniculate leaves and whitish berries; in some classifications included in genus Rhus\nn12761284\tlarge evergreen tropical tree cultivated for its large oval fruit\nn12761702\tsmall tree of southern Europe and Asia Minor bearing small hard-shelled nuts\nn12761905\ta Mediterranean tree yielding Chian turpentine\nn12762049\tan evergreen shrub of the Mediterranean region that is cultivated for its resin\nn12762405\tevergreen of Australia yielding a dark yellow wood\nn12762896\ta shrub or tree of the genus Rhus (usually limited to the non-poisonous members of the genus)\nn12763529\tcommon nonpoisonous shrub of eastern North America with waxy compound leaves and green paniculate flowers followed by red berries\nn12764008\tevergreen shrub of southeastern United States with spikes of reddish yellow flowers and glandular hairy fruits\nn12764202\tdeciduous shrubby tree or eastern North America with compound leaves that turn brilliant red in fall and dense panicles of greenish yellow flowers followed by crimson acidic berries\nn12764507\tdeciduous shrub of California with unpleasantly scented usually trifoliate leaves and edible fruit\nn12764978\tsmall resinous tree or shrub of Brazil\nn12765115\tsmall Peruvian evergreen with broad rounded head and slender pendant branches with attractive clusters of greenish flowers followed by clusters of rose-pink fruits\nn12765402\tsmall Brazilian evergreen resinous tree or shrub having dark green leaflets and white flowers followed by bright red fruit; used as a street tree and lawn specimen\nn12765846\ttropical American tree having edible yellow fruit\nn12766043\tcommon tropical American shrub or small tree with purplish fruit\nn12766595\tsmooth American swamp shrub with pinnate leaves and greenish flowers followed by greenish white berries; yields an irritating oil\nn12766869\tclimbing plant common in eastern and central United States with ternate leaves and greenish flowers followed by white berries; yields an irritating oil that causes a rash on contact\nn12767208\tpoisonous shrub of the Pacific coast of North America that causes a rash on contact\nn12767423\tpoisonous shrub of southeastern United States causing a rash on contact\nn12767648\tsmall Asiatic tree yielding a toxic exudate from which lacquer is obtained\nn12768369\ttree having palmate leaves and large clusters of white to red flowers followed by brown shiny inedible seeds\nn12768682\tthe inedible nutlike seed of the horse chestnut\nn12768809\ta tall and often cultivated buckeye of the central United States\nn12768933\ta buckeye with scaly grey bark that is found in the central United States\nn12769065\ta spreading shrub with pink flowers; found in southeastern United States\nn12769219\ta shrub buckeye of southern United States\nn12769318\ta buckeye marked by different colors or tints\nn12770529\ttropical tree of southern Asia having hard dark-colored heartwood used in cabinetwork\nn12770892\tlarge Asiatic tree having hard marbled zebrawood\nn12771085\thard marbled wood\nn12771192\tany of several tropical trees of the genus Diospyros\nn12771390\tsmall deciduous Asiatic tree bearing large red or orange edible astringent fruit\nn12771597\tmedium-sized tree of dry woodlands in the southern and eastern United States bearing yellow or orange very astringent fruit that is edible when fully ripe\nn12771890\tan Asiatic persimmon tree cultivated for its small yellow or purplish-black edible fruit much valued by Afghan tribes\nn12772753\tany shrub or small tree of the genus Bumelia\nn12772908\tshrubby thorny deciduous tree of southeastern United States with white flowers and small black drupaceous fruit\nn12773142\tdeciduous tree of southeastern United States and Mexico\nn12773651\tevergreen tree of West Indies and Central America having edible purple fruit star-shaped in cross section and dark green leaves with golden silky undersides\nn12773917\ttropical American timber tree with dark hard heavy wood and small plumlike purple fruit\nn12774299\ta tropical hardwood tree yielding balata gum and heavy red timber\nn12774641\tlarge tropical American evergreen yielding chicle gum and edible fruit; sometimes placed in genus Achras\nn12775070\tone of several East Indian trees yielding gutta-percha\nn12775393\tone of several East Indian trees yielding gutta-percha\nn12775717\ttropical tree of Florida and West Indies yielding edible fruit\nn12775919\ttropical American tree having wood like mahogany and sweet edible egg-shaped fruit; in some classifications placed in the genus Calocarpum\nn12776558\tsmall yellowwood tree of southern United States having small fragrant white flowers; leaves and bark yield a yellow dye\nn12776774\tdeciduous shrub of eastern Asia bearing decorative bright blue fruit\nn12777436\tany shrub or small tree of the genus Styrax having fragrant bell-shaped flowers that hang below the dark green foliage\nn12777680\tsmall tree native to Japan\nn12777778\tshrubby tree of China and Japan\nn12777892\tstyrax of southwestern United States; a threatened species\nn12778398\tmedium-sized tree of West Virginia to Florida and Texas\nn12778605\tplants adapted to attract and capture and digest primarily insects but also other small animals\nn12779603\tany of several insectivorous herbs of the order Sarraceniales\nn12779851\tperennial bog herb having dark red flowers and decumbent broadly winged pitchers forming a rosette; of northeastern North America and naturalized in Europe especially Ireland\nn12780325\tyellow-flowered pitcher plant of southeastern United States having trumpet-shaped leaves with the orifice covered with an arched hood\nn12780563\tpitcher plant of southeastern United States having erect yellow trumpet-shaped pitchers with wide mouths and erect lids\nn12781940\tany of several tropical carnivorous shrubs or woody herbs of the genus Nepenthes\nn12782530\tany of various bog plants of the genus Drosera having leaves covered with sticky hairs that trap and digest insects; cosmopolitan in distribution\nn12782915\tcarnivorous plant of coastal plains of the Carolinas having sensitive hinged marginally bristled leaf blades that close and entrap insects\nn12783316\tfloating aquatic carnivorous perennial of central and southern Europe, Africa, Asia, Australia having whorls of 6 to 9 leaves ending in hinged lobes for capturing e.g. water fleas\nn12783730\tperennial of dry habitats whose leaves have glandular hairs that secrete adhesive and digestive fluid for capture and digestion of insects; Portugal, southern Spain and Morocco\nn12784371\teither of 2 species of the genus Roridula; South African viscid perennial low-growing woody shrubs\nn12784889\ta carnivorous perennial herb having a green pitcher and hinged lid both with red edges; western Australia\nn12785724\tany of various plants of the genus Sedum\nn12785889\tany of various northern temperate plants of the genus Sedum having fleshy leaves and red or yellow or white flowers\nn12786273\tEurasian mountain plant with fleshy pink-tipped leaves and a cluster of yellow flowers\nn12786464\tperennial northern temperate plant with toothed leaves and heads of small purplish-white flowers\nn12786836\tperennial subshrub of Tenerife having leaves in rosettes resembling pinwheels\nn12787364\tAustralian tree or shrub with red flowers; often used in Christmas decoration\nn12788854\tdeciduous shrub bearing roundheaded flower clusters opening green and aging to pink or blue\nn12789054\tdeciduous shrub or small tree with pyramidal flower clusters\nn12789554\tCalifornia evergreen shrub having glossy opposite leaves and terminal clusters of a few fragrant white flowers\nn12789977\twoody climber of southeastern United States having white flowers in compound terminal clusters\nn12790430\tany of various shrubs of the genus Deutzia having usually toothed opposite leaves and shredding bark and white or pink flowers in loose terminal clusters\nn12791064\tany of various chiefly deciduous ornamental shrubs of the genus Philadelphus having white sweet-scented flowers, single or in clusters; widely grown in temperate regions\nn12791329\tlarge hardy shrub with showy and strongly fragrant creamy-white flowers in short terminal racemes\nn12793015\tany of various plants of the genus Saxifraga\nn12793284\ttufted evergreen perennial having ciliate leaves and yellow corymbose flowers often spotted orange\nn12793494\trosette-forming perennial having compact panicles of white flowers; Europe\nn12793695\ttufted or mat-forming perennial of mountains of Europe; cultivated for its white flowers\nn12793886\tsaxifrage having loose clusters of white flowers on hairy stems growing from a cluster of basal leaves; moist slopes of western North America\nn12794135\tplants forming dense cushions with bright reddish-lavender flowers; rocky areas of Europe and Asia and western North America\nn12794367\tsmall often mat-forming alpine plant having small starlike white flowers; Europe\nn12794568\teastern Asiatic saxifrage with racemes of small red-and-white flowers; spreads by numerous creeping stolons\nn12794985\tany plant of the genus Astilbe having compound leaves and showy panicles of tiny colorful flowers\nn12795209\tNorth American astilbe with panicles of creamy white flowers\nn12795352\tmat-forming evergreen Asiatic plant with finely cut leaves and small pink to burgundy flowers; grown as ground cover\nn12795555\ta Japanese shrub that resembles members of the genus Spiraea; widely cultivated in many varieties for its dense panicles of flowers in many colors; often forced by florists for Easter blooming\nn12796022\tany plant of the genus Bergenia; valued as an evergreen ground cover and for the spring blossoms\nn12796385\tplant with leaves mostly at the base and openly branched clusters of small white flowers; western North America\nn12796849\tany of various low aquatic herbs of the genus Chrysosplenium\nn12797368\trhizomatous perennial herb with large dramatic peltate leaves and white to bright pink flowers in round heads on leafless stems; colonizes stream banks in the Sierra Nevada in California\nn12797860\tChilean evergreen shrub having delicate spikes of small white flowers\nn12798284\tany of several herbs of the genus Heuchera\nn12798910\tperennial plant of the western United States having bright red flowers in feathery spikes; used as an ornamental\nn12799269\tplant with basal leathery elliptic leaves and erect leafless flower stalks each bearing a dense roundish cluster of tiny white flowers; moist places of northwestern North America to Oregon and Idaho\nn12799776\tCalifornia perennial herb cultivated for its racemose white flowers with widely spreading petals; sometimes placed in genus Tellima\nn12800049\tplant with mostly basal leaves and slender open racemes of white or pale pink flowers; prairies and open forest of northwestern United States to British Columbia and Alberta\nn12800586\tany of various rhizomatous perennial herbs of the genus Mitella having a capsule resembling a bishop's miter\nn12801072\tsmall plant with leaves in a basal cluster and tiny greenish flowers in slender racemes; northwestern North America to California and Colorado\nn12801520\tany of various usually evergreen bog plants of the genus Parnassia having broad smooth basal leaves and a single pale flower resembling a buttercup\nn12801781\tplant having ovate leaves in a basal rosette and white starlike flowers netted with green\nn12801966\tbog plant with broadly heart-shaped basal leaves and cream-colored or white saucer-shaped flowers with fringed petals; west of Rocky Mountains from Alaska to New Mexico\nn12803226\tplant growing in clumps with mostly basal leaves and cream-colored or pale pink fringed flowers in several long racemes; Alaska to coastal central California and east to Idaho\nn12803754\tstoloniferous white-flowered spring-blooming woodland plant\nn12803958\tplant with tiny white flowers hanging in loose clusters on leafy stems; moist woods from Alaska to central California and east to Montana\nn12804352\tvigorous perennial herb with flowers in erect racemes and having young plants develop at the junction of a leaf blade and the leafstalk\nn12805146\tany of various deciduous shrubs of the genus Ribes bearing currants\nn12805561\twidely cultivated current bearing edible black aromatic berries\nn12805762\tgarden currant bearing small white berries\nn12806015\tspiny Eurasian shrub having greenish purple-tinged flowers and ovoid yellow-green or red-purple berries\nn12806732\tany of several trees of the genus Platanus having thin pale bark that scales off in small plates and lobed leaves and ball-shaped heads of fruits\nn12807251\tvery large fast-growing tree much planted as a street tree\nn12807409\tvery large spreading plane tree of eastern and central North America to Mexico\nn12807624\tlarge tree of southeastern Europe to Asia Minor\nn12807773\ttall tree of Baja California having deciduous bark and large alternate palmately lobed leaves and ball-shaped clusters of flowers\nn12808007\tmedium-sized tree of Arizona and adjacent regions having deeply lobed leaves and collective fruits in groups of 3 to 5\nn12809868\terect or spreading perennial of the eastern United States\nn12810007\tperennial erect herb with white flowers; circumboreal\nn12810151\ttall herb of the Rocky Mountains having sticky leaves and an offensive smell\nn12810595\tany polemoniaceous plant of the genus Phlox; chiefly North American; cultivated for their clusters of flowers\nn12811027\tlow tufted perennial phlox with needlelike evergreen leaves and pink or white flowers; native to United States and widely cultivated as a ground cover\nn12811713\tsmall California annual with white flowers\nn12812235\tany plant of the genus Acanthus having large spiny leaves and spikes or white or purplish flowers; native to Mediterranean region but widely cultivated\nn12812478\twidely cultivated southern European acanthus with whitish purple-veined flowers\nn12812801\ttropical Old World shrub having purple or red tubular flowers and leaf markings resembling the profile of a human face\nn12813189\ttropical African climbing plant having yellow flowers with a dark purple center\nn12814643\ttree of the genus Catalpa with large leaves and white flowers followed by long slender pods\nn12814857\tcatalpa tree of southern United States\nn12814960\tcatalpa tree of central United States\nn12815198\tevergreen shrubby tree resembling a willow of dry regions of southwestern North America having showy purplish flowers and long seed pods\nn12815668\ttropical American evergreen that produces large round gourds\nn12815838\tround gourd of the calabash tree\nn12816508\thairy blue-flowered European annual herb long used in herbal medicine and eaten raw as salad greens or cooked like spinach\nn12816942\tannual of western United States with coiled spikes of yellow-orange coiled flowers\nn12817464\tany of various Old World herbs of the genus Anchusa having one-sided clusters of trumpet-shaped flowers\nn12817694\tperennial or biennial herb cultivated for its delicate usually blue flowers\nn12817855\tanchusa of southern Africa having blue flowers with white throats\nn12818004\tanchusa of southern Africa having blue to red-purple flowers\nn12818346\tlarge tropical American tree of the genus Cordia grown for its abundant creamy white flowers and valuable wood\nn12818601\ttropical American timber tree\nn12818966\tbiennial east Asian herb grown for its usually bright blue flowers\nn12819141\tbiennial shrub of Europe and western Asia having coarse tongue-shaped leaves and dark reddish-purple flowers\nn12819354\tperennial shrub of North America having coarse tongue-shaped leaves and pale-blue to purple flowers\nn12819728\ta coarse prickly European weed with spikes of blue flowers; naturalized in United States\nn12820113\tEurasian and North American plants having small prickly nutlets that stick to clothing\nn12820669\tEuropean perennial branching plant; occurs in hedgerows and at the edge of woodlands\nn12820853\tperennial plant of eastern North America having hairy foliage yielding a red or yellow pigment\nn12821505\tsmooth erect herb of eastern North America having entire leaves and showy blue flowers that are pink in bud\nn12821895\tsmall biennial to perennial herb of Europe, northern Africa and western Asia having blue, purple or white flowers\nn12822115\tsmall perennial herb having bright blue or white flowers\nn12822466\tany of several North American perennial herbs with hairy foliage and small yellowish or greenish flowers\nn12822769\tperennial herbs of Europe and Iran; make rapidly growing groundcover for shaded areas\nn12822955\tEuropean herb having small white, pink or purple flowers; naturalized as a weed in North America\nn12823717\tany of numerous plants of the genus Convolvulus\nn12823859\tany of several vines of the genera Convolvulus and Calystegia having a twining habit\nn12824053\tweakly climbing European perennial with white or pink flowers; naturalized in North America and an invasive weed\nn12824289\ttwining plant of Asia Minor having cream-colored to purple flowers and long thick roots yielding a cathartic resin\nn12824735\tany of various twining shrubs of the genus Argyreia having silvery leaves and showy purple flowers\nn12825497\ta leafless annual parasitic vine of the genus Cuscuta having whitish or yellow filamentous stems; obtain nourishment through haustoria\nn12826143\ta creeping perennial herb with hairy stems and orbicular to reniform leaves and small white to greenish flowers; used as a grass substitute in warm regions\nn12827270\ttropical American annual climber having red (sometimes white) flowers and finely dissected leaves; naturalized in United States and elsewhere\nn12827537\tpantropical climber having white fragrant nocturnal flowers\nn12827907\ttropical American prostrate or climbing herbaceous perennial having an enormous starchy root; sometimes held to be source of the sweet potato\nn12828220\tannual herb having scarlet flowers; the eastern United States\nn12828379\ta morning glory with long roots of western United States\nn12828520\ttropical American morning glory\nn12828791\tannual Old World tropical climbing herb distinguished by wide color range and frilled or double flowers\nn12828977\thybrid from Ipomoea nil\nn12829582\tany of numerous tropical or subtropical small shrubs or treelets or epiphytic vines of the family Gesneriaceae: African violet; Cape primroses; gloxinia\nn12829975\tany plant of the genus Gesneria\nn12830222\tany plant of the genus Achimenes having showy bell-shaped flowers that resemble gloxinias\nn12830568\ta plant of the genus Aeschynanthus having somewhat red or orange flowers and seeds having distinctive hairs at base and apex\nn12831141\tlow-growing creeping perennial of Central America having deeply fringed white flowers; sometimes placed in genus Episcia\nn12831535\ttropical plant having thick hairy somewhat toothed leaves and solitary or clustered yellow to scarlet flowers; many cultivated for their flowers and ornamental foliage\nn12831932\tany plant of the genus Episcia; usually creeping and stoloniferous and of cascading habit; grown for their colorful foliage and flowers\nn12832315\tany of several plants of the genera Gloxinia or Sinningia (greenhouse gloxinias) having showy bell-shaped flowers\nn12832538\therb of Colombia to Peru having pale purple flowers\nn12832822\tshrubby herb cultivated for their soft velvety foliage and showy scarlet flowers\nn12833149\ttropical African plant cultivated as a houseplant for its violet or white or pink flowers\nn12833985\tany of various plants of the genus Streptocarpus having leaves in a basal rosette and flowers like primroses\nn12834190\tany of various African plants of the genus Streptocarpus widely cultivated especially as houseplants for their showy blue or purple flowers\nn12834798\tany of several plants of the genus Hydrophyllum\nn12834938\tshowy perennial herb with white flowers; leaves sometimes used as edible greens in southeastern United States\nn12835331\tviscid herb of arid or desert habitats of southwestern United States having pendulous yellow flowers\nn12835766\tviscid evergreen shrub of western United States with white to deep lilac flowers; the sticky aromatic leaves are used in treating bronchial and pulmonary illnesses\nn12836212\tany plant of the genus Nemophila\nn12836337\tdelicate California annual having blue flowers marked with dark spots\nn12836508\tCalifornia annual having white flowers with a deep purple blotch on each petal\nn12836862\tany plant of the genus Phacelia\nn12837052\tannual of southern California with intricately branched stems and lax cymes of aromatic deep blue bell-shaped flowers\nn12837259\tdesert plant of southern California with blue or violet tubular flowers in terminal racemes\nn12837466\thairy annual of California to Mexico with crowded cymes of small blue to lilac or mauve flowers\nn12837803\tstraggling California annual herb with deep purple or violet flowers; sometimes placed in genus Nemophila\nn12839574\tfragrant European mint having clusters of small violet-and-white flowers; naturalized especially in eastern North America\nn12839979\tany of a number of aromatic plants of the genus Agastache\nn12840168\terect perennial with stout stems and yellow-green flowers; southern Canada and southeastern United States\nn12840362\tmuch-branched North American herb with an odor like fennel\nn12840502\terect perennial of Mexico having rose to crimson flowers\nn12840749\tany of various low-growing annual or perennial evergreen herbs native to Eurasia; used for ground cover\nn12841007\tlow rhizomatous European carpeting plant having spikes of blue flowers; naturalized in parts of United States\nn12841193\tupright rhizomatous perennial with bright blue flowers; southern Europe\nn12841354\tEuropean evergreen carpeting perennial\nn12842302\tAmerican herb of genus Blephilia with more or less hairy leaves and clusters of purplish or bluish flowers\nn12842519\ta variety of wood mint\nn12842642\ta variety of wood mint\nn12842887\tperennial aromatic herbs growing in hedgerows or scrub or open woodlands from western Europe to central Asia and in North America\nn12843144\tmint-scented perennial of central and southern Europe\nn12843316\taromatic herb with large pink flowers; southern and southeastern Europe; Anatolia; northern Iran\nn12843557\tlow-growing strongly aromatic perennial herb of southern Europe to Great Britain; naturalized in United States\nn12843970\taromatic herb having heads of small pink or whitish flowers; widely distributed in United States, Europe and Asia\nn12844409\terect perennial strong-scented with serrate pointed leaves and a loose panicle of yellowish flowers; the eastern United States\nn12844939\tany of various Old World tropical plants of the genus Coleus having multicolored decorative leaves and spikes of blue flowers\nn12845187\tan aromatic fleshy herb of India and Ceylon to South Africa; sometimes placed in genus Plectranthus\nn12845413\tperennial aromatic herb of southeastern Asia having large usually bright-colored or blotched leaves and spikes of blue-violet flowers; sometimes placed in genus Solenostemon\nn12845908\tsmall shrub of Apalachicola River area in southeastern United States having highly aromatic pinkish flowers; a threatened species\nn12846335\tAmerican herb having sharply serrate lanceolate leaves and spikes of blue to violet flowers\nn12846690\tany of various aromatic herbs of the genus Elsholtzia having blue or purple flowers in one-sided spikes\nn12847008\tcoarse bristly Eurasian plant with white or reddish flowers and foliage resembling that of a nettle; common as a weed in United States\nn12847374\ttrailing European aromatic plant of the mint family having rounded leaves and small purplish flowers often grown in hanging baskets; naturalized in North America; sometimes placed in genus Nepeta\nn12847927\terect hairy branching American herb having purple-blue flowers; yields an essential oil used as an insect repellent and sometimes in folk medicine\nn12848499\ta European mint with aromatic and pungent leaves used in perfumery and as a seasoning in cookery; often cultivated as a remedy for bruises; yields hyssop oil\nn12849061\tany of various plants of the genus Lamium having clusters of small usually purplish flowers with two lips\nn12849279\tEuropean dead nettle with white flowers\nn12849416\tEurasian plant having toothed leaves and small two-lipped white or purplish-red flowers\nn12849952\taromatic Mediterranean shrub widely cultivated for its lilac flowers which are dried and used in sachets\nn12850168\tshrubby greyish lavender of southwestern Europe having usually reddish-purple flowers\nn12850336\tMediterranean plant with pale purple flowers that yields spike lavender oil\nn12850906\trelatively nontoxic South African herb smoked like tobacco\nn12851094\tpantropical herb having whorls of striking lipped flowers; naturalized in United States\nn12851469\tbitter Old World herb of hedgerows and woodland margins having toothed leaves and white or pale pink flowers\nn12851860\tCalifornia plant with woolly stems and leaves and large white flowers\nn12852234\ta mildly narcotic and astringent aromatic herb having small whitish flowers; eastern United States\nn12852428\taromatic perennial herb of United States\nn12852570\thairy Eurasian herb with two-lipped white flowers\nn12853080\tany of various fragrant aromatic herbs of the genus Origanum used as seasonings\nn12853287\taromatic Eurasian perennial\nn12853482\taromatic European plant native to Mediterranean and Turkey; not widespread in Europe\nn12854048\tany of various aromatic herbs of the genus Marrubium\nn12854193\tEuropean aromatic herb with hairy leaves and numerous white flowers in axillary cymes; leaves yield a bitter extract use medicinally and as flavoring\nn12854600\tbushy perennial Old World mint having small white or yellowish flowers and fragrant lemon-flavored leaves; a garden escapee in northern Europe and North America\nn12855365\tEuropean mint naturalized in United States\nn12855494\ta European mint that thrives in wet places; has a perfume like that of the bergamot orange; naturalized in eastern North America\nn12855710\tmint with leaves having perfume like that of the bergamot orange\nn12855886\ta coarse Old World wild water mint having long leaves and spikelike clusters of flowers; naturalized in the eastern United States\nn12856091\therb with downy leaves and small purple or white flowers that yields a pungent oil used as a flavoring\nn12856287\tcommon garden herb having clusters of small purplish flowers and yielding an oil used as a flavoring\nn12856479\tmint with apple-scented stems of southern and western Europe; naturalized in United States\nn12856680\tEurasian perennial mint have small lilac-blue flowers and ovate leaves; yields an aromatic oil\nn12857204\ttrailing perennial evergreen herb of northwestern United States with small white flowers; used medicinally\nn12857779\taromatic annual with a tall stems of small whitish flowers enclosed in a greatly enlarged saucer-shaped or bell-shaped calyx\nn12858150\tany of various aromatic herbs of the genus Monarda\nn12858397\tperennial aromatic herb of eastern North America having variously colored tubular flowers in dense showy heads\nn12858618\ttall erect perennial or annual having lanceolate leaves and heads of purple-spotted creamy flowers; many subspecies grown from eastern to southwestern United States and in Mexico\nn12858871\tperennial herb of North America\nn12858987\tan annual horsemint of central and western United States and northern Mexico\nn12859153\tannual of southern United States\nn12859272\tperennial herb of North America (New York to Illinois and mountains of Alaska) having aromatic leaves and clusters of yellowish-pink balls\nn12859679\tfragrant California annual herb having lanceolate leaves and clusters of rose-purple flowers\nn12859986\thairy aromatic perennial herb having whorls of small white purple-spotted flowers in a terminal spike; used in the past as a domestic remedy; strongly attractive to cats\nn12860365\tany of several Old World tropical aromatic annual or perennial herbs of the genus Ocimum\nn12860978\tplant grown for its ornamental red or purple foliage\nn12861345\tany of various plants of the genus Phlomis; grown primarily for their dense whorls of lipped flowers and attractive foliage\nn12861541\ta spreading subshrub of Mediterranean regions cultivated for dense axillary whorls of purple or yellow flowers\nn12861892\tany of various plants of the genus Physostegia having sessile linear to oblong leaves and showy white or rose or lavender flowers\nn12862512\tany of various ornamental plants of the genus Plectranthus\nn12862828\tsmall East Indian shrubby mint; fragrant oil from its leaves is used in perfumes\nn12863234\tdecumbent blue-flowered European perennial thought to possess healing properties; naturalized throughout North America\nn12863624\tany of a number of perennial herbs of the genus Pycnanthemum; eastern North America and California\nn12864160\twidely cultivated for its fragrant grey-green leaves used in cooking and in perfumery\nn12865037\tstout Mediterranean sage with white or pink or violet flowers; yields oil used as a flavoring and in perfumery\nn12865562\tsilvery-leaved California herb with purple flowers\nn12865708\tsage of eastern United States\nn12865824\tshrubby plant with aromatic greyish-green leaves used as a cooking herb\nn12866002\ttall perennial Old World salvia with violet-blue flowers; found in open grasslands\nn12866162\taromatic herb of southern Europe; cultivated in Great Britain as a potherb and widely as an ornamental\nn12866333\tCalifornia erect and sparsely branched perennial\nn12866459\tan herb from Oaxaca that has a powerful hallucinogenic effect; the active ingredient is salvinorin\nn12866635\tEurasian sage with blue flowers and foliage like verbena; naturalized in United States\nn12866968\tany of several aromatic herbs or subshrubs of the genus Satureja having spikes of flowers attractive to bees\nn12867184\terect annual herb with oval leaves and pink flowers; used to flavor e.g. meats or soups or salads; southeastern Europe and naturalized elsewhere\nn12867449\terect perennial subshrub having pink or white flowers and leathery leaves with a flavor of thyme; southern Europe\nn12867826\ta herbaceous plant of the genus Scutellaria which has a calyx that, when inverted, resembles a helmet with its visor raised\nn12868019\tan American mint that yields a resinous exudate used especially formerly as an antispasmodic\nn12868880\tfoul-smelling perennial Eurasiatic herb with a green creeping rhizome\nn12869061\tperennial herb with an odorless rhizome widespread in moist places in northern hemisphere\nn12869478\tany of various plants of the genus Teucrium\nn12869668\tsubshrub with serrate leaves and cream-colored to pink or purple flowers in spikelike racemes; North America\nn12870048\tMediterranean germander having small hairy leaves and reddish purple flowers; attractive to cats\nn12870225\tEuropean germander with one-sided racemes of yellow flowers; naturalized in North America\nn12870535\tany of various mints of the genus Thymus\nn12870682\tcommon aromatic garden perennial native to the western Mediterranean; used in seasonings and formerly as medicine\nn12870891\taromatic dwarf shrub common on banks and hillsides in Europe; naturalized in United States\nn12871272\tany of several plants of the genus Trichostema having whorls of small blue flowers\nn12871696\taromatic plant of western United States\nn12871859\taromatic plant of the eastern United States\nn12872458\tany of numerous aquatic carnivorous plants of the genus Utricularia some of whose leaves are modified as small urn-shaped bladders that trap minute aquatic animals\nn12872914\tany of numerous carnivorous bog plants of the genus Pinguicula having showy purple or yellow or white flowers and a rosette of basal leaves coated with a sticky secretion to trap small insects\nn12873341\trootless carnivorous swamp plants having at the base of the stem a rosette of foliage and leaves consisting of slender tubes swollen in the middle to form traps; each tube passes into two long spirally twisted arms with stiff hairs\nn12873984\tsprawling annual or perennial herb of Central America and West Indies having creamy-white to red-purple bell-shaped flowers followed by unusual horned fruit\nn12875269\tannual of southern United States to Mexico having large whitish or yellowish flowers mottled with purple and a long curving beak\nn12875697\talternatively placed in genus Martynia\nn12875861\ta herbaceous plant of the genus Proboscidea\nn12876899\tany of numerous tall coarse woodland plants of the genus Scrophularia\nn12877244\ta garden plant of the genus Antirrhinum having showy white or yellow or crimson flowers resembling the face of a dragon\nn12877493\tCalifornia plant with slender racemes of white flowers\nn12877637\tsouthwestern United States plant with yellow flowers on stems that twist and twine through other vegetation\nn12877838\tperennial native to the Mediterranean but widely cultivated for its purple or pink flowers\nn12878169\ta plant of the genus Besseya having fluffy spikes of flowers\nn12878325\tsmall pale plant with dense spikes of pale bluish-violet flowers; of high cold meadows from Wyoming and Utah to New Mexico\nn12878784\tmulti-stemmed North American annual having solitary axillary dark golden-yellow flowers resembling those of the foxglove; sometimes placed in genus Gerardia\nn12879068\tsparsely branched North American perennial with terminal racemes of bright yellow flowers resembling those of the foxglove; sometimes placed in genus Gerardia\nn12879527\tany garden plant of the genus Calceolaria having flowers with large inflated slipper-shaped lower lip\nn12879963\tany of various plants of the genus Castilleja having dense spikes of hooded flowers with brightly colored bracts\nn12880244\tmost common paintbrush of western United States dry lands; having erect stems ending in dense spikes of bright orange to red flowers\nn12880462\twildflower of western North America having ragged clusters of crimson or scarlet flowers\nn12880638\thairy plant with pinkish flowers; Great Plains to northern Mexico\nn12880799\tplant of moist highland meadows having ragged clusters of pale yellow flowers\nn12881105\tshowy perennial of marshlands of eastern and central North America having waxy lanceolate leaves and flower with lower part creamy white and upper parts pale pink to deep purple\nn12881913\tsmall widely branching western plant with tiny blue-and-white flowers; British Columbia to Ontario and south to California and Colorado\nn12882158\teastern United States plant with whorls of blue-and-white flowers\nn12882779\tany of several plants of the genus Digitalis\nn12882945\ttall leafy European biennial or perennial having spectacular clusters of large tubular pink-purple flowers; leaves yield drug digitalis and are poisonous to livestock\nn12883265\tEuropean yellow-flowered foxglove\nn12883628\tany plant of the genus Gerardia\nn12884100\tNorth American plant having racemes of blue-violet flowers\nn12884260\tcommon European perennial having showy yellow and orange flowers; a naturalized weed in North America\nn12885045\tplant of southwestern United States having long open clusters of scarlet flowers with yellow hairs on lower lip\nn12885265\tplant with bright red tubular flowers in long narrow clusters near tips of erect stems; coastal ranges from central California southward\nn12885510\tlow branching dark green shrub with bunches of brick-red flowers at ends of branches; coastal ranges and foothills of northern California\nn12885754\terect plant with blue-violet flowers in rings near tips of stems; Idaho to Utah and Wyoming\nn12886185\tstems in clumps with cream-colored flowers; found from Washington to Wyoming and southward to California and Utah\nn12886402\tlow plant with light blue and violet flowers in short clusters near tips of stems; Nevada to Utah\nn12886600\tlow bushy plant with large showy pale lavender or blue-violet flowers in narrow clusters at ends of stems\nn12886831\tplant having small narrow leaves and blue-violet flowers in long open clusters; Utah and Colorado to New Mexico and Arizona\nn12887293\tfragrant puffed-up white to reddish-pink flowers in long narrow clusters on erect stems; Arizona to New Mexico and Utah\nn12887532\terect stems with pinkish-lavender flowers in long interrupted clusters; Arizona\nn12887713\tone of the West's most beautiful wildflowers; large brilliant pink or rose flowers in many racemes above thick mats of stems and leaves; ledges and cliffs from Washington to California\nn12888016\tplant with whorls of small dark blue-violet flowers; Washington to Wyoming and south to California and Colorado\nn12888234\twhorls of deep blue to dark purple flowers at tips of erect leafy stems; moist places from British Columbia to Oregon\nn12888457\twine and lavender to purple and black flowers in several clusters on the upper half of leafy stems; Montana south through the Rocky Mountains to Arizona and New Mexico\nn12889219\tEuropean mullein with smooth leaves and large yellow or purplish flowers; naturalized as a weed in North America\nn12889412\tdensely hairy Eurasian herb with racemose white flowers; naturalized in North America\nn12889579\tEurasian mullein with showy purple or pink flowers\nn12889713\ttall-stalked very woolly mullein with densely packed yellow flowers; ancient Greeks and Romans dipped the stalks in tallow for funeral torches\nn12890265\tany plant of the genus Veronica\nn12890490\tEuropean plant with minute axillary blue flowers on long stalks; widely naturalized in America\nn12890685\tplant of western North America and northeastern Asia having prostrate stems with dense racemes of pale violet to lilac flowers\nn12890928\terect or procumbent blue-flowered annual found in waste places of Europe and America\nn12891093\tEuropean plant having low-lying stems with blue flowers; sparsely naturalized in North America\nn12891305\tOld World plant with axillary racemes of blue-and-white flowers\nn12891469\tplant of wet places in Eurasia and America\nn12891643\tcommon hairy European perennial with pale blue or lilac flowers in axillary racemes\nn12891824\tNorth American annual with small white flowers widely naturalized as a weed in South America and Europe\nn12892013\tperennial decumbent herb having small opposite leaves and racemes of blue flowers; throughout Eurasia and the New World\nn12893463\tany of numerous shrubs or herbs or vines of the genus Solanum; most are poisonous though many bear edible fruit\nn12893993\tcoarse prickly weed having pale yellow flowers and yellow berrylike fruit; common throughout southern and eastern United States\nn12895298\twoolly-stemmed biennial arborescent shrub of tropical Africa and southern Asia having silvery-white prickly branches, clusters of blue or white flowers, and bright red berries resembling holly berries\nn12895811\tcopiously branched vine of Brazil having deciduous leaves and white flowers tinged with blue\nn12896615\timproved garden variety of black nightshade having small edible orange or black berries\nn12897118\tsmall perennial shrub cultivated in uplands of South America for its edible bright orange fruits resembling tomatoes or oranges\nn12897788\tvine of Costa Rica sparsely armed with hooklike spines and having large lilac-blue flowers\nn12897999\tSouth American shrub or small tree widely cultivated in the tropics; not a true potato\nn12898342\tperennial Eurasian herb with reddish bell-shaped flowers and shining black berries; extensively grown in United States; roots and leaves yield atropine\nn12898774\tany of several herbs of the genus Browallia cultivated for their blue or violet or white flowers\nn12899166\tWest Indian shrub with fragrant showy yellowish-white flowers\nn12899537\ta South American plant that is cultivated for its large fragrant trumpet-shaped flowers\nn12899752\tSouth American plant cultivated for its very large nocturnally fragrant trumpet-shaped flowers\nn12899971\tarborescent South American shrub having very large orange-red flowers\nn12900783\tplant bearing erect pungent conical red or yellow or purple fruits; sometimes grown as an ornamental\nn12901724\tplant bearing very small and very hot oblong red fruits; includes wild forms native to tropical America; thought to be ancestral to the sweet pepper and many hot peppers\nn12902466\tWest Indian evergreen shrub having clusters of funnel-shaped white flowers that are fragrant by day\nn12902662\tWest Indian evergreen shrub having clusters of funnel-shaped yellow-white flowers that are fragrant by night\nn12903014\tSouth American arborescent shrub having pale pink blossoms followed by egg-shaped reddish-brown edible fruit somewhat resembling a tomato in flavor\nn12903367\tany of several plants of the genus Datura\nn12903503\tintensely poisonous tall coarse annual tropical weed having rank-smelling foliage, large white or violet trumpet-shaped flowers and prickly fruits\nn12903964\tPeruvian shrub with small pink to lavender tubular flowers; leaves yield a tonic and diuretic\nn12904314\tpoisonous fetid Old World herb having sticky hairy leaves and yellow-brown flowers; yields hyoscyamine and scopolamine\nn12904562\tpoisonous herb whose leaves are a source of hyoscyamine\nn12904938\tany of various shrubs or vines of the genus Lycium with showy flowers and bright berries\nn12905135\tdeciduous erect or spreading shrub with spiny branches and violet-purple flowers followed by orange-red berries; southeastern Europe to China\nn12905412\tspiny evergreen shrub of southeastern United States having spreading branches usually blue or mauve flowers and red berries\nn12906214\tan Italian variety of cherry tomato that is shaped like a plum\nn12906498\ta plant of southern Europe and North Africa having purple flowers, yellow fruits and a forked root formerly thought to have magical powers\nn12906771\tthe root of the mandrake plant; used medicinally or as a narcotic\nn12907057\tcoarse South American herb grown for its blue-and-white flowers followed by a bladderlike fruit enclosing a dry berry\nn12907671\tSouth American ornamental perennial having nocturnally fragrant greenish-white flowers\nn12907857\ttall erect South American herb with large ovate leaves and terminal clusters of tubular white or pink flowers; cultivated for its leaves\nn12908093\ttobacco plant of South America and Mexico\nn12908645\tany of various plants of the genus Nierembergia having upturned bell-shaped flowers\nn12908854\tprostrate woody South American herb with white tubular flowers often tinged with blue or rose\nn12909421\tany of numerous tropical herbs having fluted funnel-shaped flowers\nn12909614\tannual herb having large nocturnally fragrant white flowers\nn12909759\therb or small shrublet having solitary violet to rose-red flowers\nn12909917\thybrids of Petunia axillaris and Petunia integrifolia: a complex group of petunias having single or double flowers in colors from white to purple\nn12911079\tannual of tropical South America having edible purple fruits\nn12911264\tstout hairy annual of eastern North America with sweet yellow fruits\nn12911440\tannual of Mexico and southern United States having edible purplish viscid fruit resembling small tomatoes\nn12911673\tMexican annual naturalized in eastern North America having yellow to purple edible fruit resembling small tomatoes\nn12911914\tfound on sea beaches from Virginia to South America having greenish-yellow flowers and orange or yellow berries\nn12912274\tweedy vine of Argentina having solitary white flowers followed by egg-shaped white or yellow fruit\nn12912670\tany plant of the genus Salpiglossis\nn12912801\tChilean herb having velvety funnel-shaped yellowish or violet flowers with long tonguelike styles at the corolla throat\nn12913144\tany plant of the genus Schizanthus having finely divided leaves and showy variegated flowers\nn12913524\therb that is a source of scopolamine\nn12913791\tMexican evergreen climbing plant having large solitary funnel-shaped fragrant yellow flowers with purple-brown ridges in the throat\nn12914923\tany of numerous tropical or subtropical American plants of the genus Verbena grown for their showy spikes of variously colored flowers\nn12915140\ta flowering shrub\nn12915568\ta mangrove of the West Indies and the southern Florida coast; occurs in dense thickets and has numerous short roots that bend up from the ground\nn12915811\ta small to medium-sized tree growing in brackish water especially along the shores of the southwestern Pacific\nn12916179\tan Australian tree resembling the black mangrove of the West Indies and Florida\nn12916511\ttall East Indian timber tree now planted in western Africa and tropical America for its hard durable wood\nn12917901\tany of numerous plants of the genus Euphorbia; usually having milky often poisonous juice\nn12918609\tnot unattractive European weed whose flowers turn toward the sun\nn12918810\tan Old World spurge introduced as a weed in the eastern United States\nn12918991\tAfrican dwarf succulent perennial shrub with numerous slender drooping branches\nn12919195\tcommon perennial United States spurge having showy white petallike bracts\nn12919403\tannual spurge of western United States having showy white-bracted flower clusters and very poisonous milk\nn12919646\tOld World perennial having foliage resembling cypress; naturalized as a weed in the United States\nn12919847\ttall European perennial naturalized and troublesome as a weed in eastern North America\nn12920043\tmuch-branched hirsute weed native to northeastern North America\nn12920204\ttropical American plant having poisonous milk and showy tapering usually scarlet petallike leaves surrounding small yellow flowers\nn12920521\tshowy poinsettia found from the southern United States to Peru\nn12920719\tpoinsettia of United States and eastern Mexico; often confused with Euphorbia heterophylla\nn12920955\tEuropean perennial herb with greenish yellow terminal flower clusters\nn12921315\tEuropean erect or depressed annual weedy spurge adventive in northeastern United States\nn12921499\tMexican shrub often cultivated for its scarlet-bracted flowers\nn12921660\tsmall tree of dry open parts of southern Africa having erect angled branches suggesting candelabra\nn12921868\tsomewhat climbing bushy spurge of Madagascar having long woody spiny stems with few leaves and flowers with scarlet bracts\nn12922119\tan annual weed of northeastern North America with dentate leaves\nn12922458\tweedy herb of eastern North America\nn12922763\ttropical Asiatic shrub; source of croton oil\nn12923108\tWest Indian shrub with aromatic bark\nn12923257\taromatic bark of cascarilla; used as a tonic and for making incense\nn12924623\tlarge shrub of tropical Africa and Asia having large palmate leaves and spiny capsules containing seeds that are the source of castor oil and ricin; widely naturalized throughout the tropics\nn12925179\ta stinging herb of tropical America\nn12925583\tsmall tropical American tree yielding purple dye and a tanning extract and bearing physic nuts containing a purgative oil that is poisonous in large quantities\nn12926039\tdeciduous tree of the Amazon and Orinoco Rivers having leathery leaves and fragrant yellow-white flowers; it yields a milky juice that is the chief source of commercial rubber\nn12926480\tany of several plants of the genus Manihot having fleshy roots yielding a nutritious starch\nn12926689\tcassava with long tuberous edible roots and soft brittle stems; used especially to make cassiri (an intoxicating drink) and tapioca\nn12927013\tcassava root eaten as a staple food after drying and leaching; source of tapioca\nn12927194\tSouth American plant with roots used as a vegetable and herbage used for stock feed\nn12927494\tlarge tree native to southeastern Asia; the nuts yield oil used in varnishes; nut kernels strung together are used locally as candles\nn12927758\tChinese tree bearing seeds that yield tung oil\nn12928071\tany of several tropical American shrubby succulent plants resembling cacti but having foot-shaped bracts\nn12928307\twax-coated Mexican shrub related to Euphorbia antisyphilitica\nn12928491\tlow tropical American shrub having powerful emetic properties\nn12928819\tseed of Mexican shrubs of the genus Sebastiana containing the larva of a moth whose movements cause the bean to jerk or tumble\nn12929403\tany of several shrubs or small evergreen trees having solitary white or pink or reddish flowers\nn12929600\tgreenhouse shrub with glossy green leaves and showy fragrant rose-like flowers; cultivated in many varieties\nn12930778\tany of numerous aromatic herbs of the family Umbelliferae\nn12930951\tany of various uncultivated umbelliferous plants with foliage resembling that of carrots or parsley\nn12931231\tEuropean weed naturalized in America that resembles parsley but causes nausea and poisoning when eaten\nn12931542\taromatic Old World herb having aromatic threadlike foliage and seeds used as seasoning\nn12931906\tany of various tall and stout herbs of the genus Angelica having pinnately compound leaves and small white or greenish flowers in compound umbels\nn12932173\ta biennial cultivated herb; its stems are candied and eaten and its roots are used medicinally\nn12932365\tEuropean herb with compound leaves and white flowers; adventive on Cape Breton Island\nn12932706\taromatic annual Old World herb cultivated for its finely divided and often curly leaves for use especially in soups and salads\nn12932966\tcoarse erect biennial Old World herb introduced as a weed in eastern North America\nn12933274\therb of Europe and temperate Asia\nn12934036\tany plant of the genus Astrantia\nn12934174\tEuropean herb with aromatic roots and leaves in a basal tuft and showy compound umbels of white to rosy flowers\nn12934479\ta Eurasian plant with small white flowers yielding caraway seed\nn12934685\ta caraway with whorled leaves\nn12934985\ttall erect highly poisonous Eurasiatic perennial herb locally abundant in marshy areas\nn12935166\ttall biennial water hemlock of northeastern North America having purple-spotted stems and clusters of extremely poisonous tuberous roots resembling small sweet potatoes\nn12935609\tlarge branching biennial herb native to Eurasia and Africa and adventive in North America having large fernlike leaves and white flowers; usually found in damp habitats; all parts extremely poisonous\nn12936155\ta common European plant having edible tubers with the flavor of roasted chestnuts\nn12936826\tdwarf Mediterranean annual long cultivated for its aromatic seeds\nn12937130\ta widely naturalized Eurasian herb with finely cut foliage and white compound umbels of small white or yellowish flowers and thin yellowish roots\nn12938081\tany plant of the genus Eryngium\nn12938193\tEuropean evergreen eryngo with twisted spiny leaves naturalized on United States east coast; roots formerly used as an aphrodisiac\nn12938445\tcoarse prickly perennial eryngo with aromatic roots; southeastern United States; often confused with rattlesnake master\nn12938667\tcoarse prickly perennial eryngo of United States thought to cure rattlesnake bite\nn12939104\tany of several aromatic herbs having edible seeds and leaves and stems\nn12939282\tstrongly aromatic with a smell of aniseed; leaves and seeds used for seasoning\nn12939479\tgrown especially for its edible aromatic bulbous stem base\nn12939874\ttall coarse plant having thick stems and cluster of white to purple flowers\nn12940226\therb native to southern Europe; cultivated for its edible stalks and foliage and seeds\nn12940609\tEuropean herb with soft ferny leaves and white flowers\nn12941220\tEuropean poisonous herb with fibrous roots\nn12941536\ta strong-scented plant cultivated for its edible root\nn12941717\tEuropean biennial having a long fusiform root that has been made palatable through cultivation\nn12942025\tbiennial weed in Europe and America having large pinnate leaves and yellow flowers and a bitter and somewhat poisonous root; the ancestor of cultivated parsnip\nn12942395\tannual or perennial herb with aromatic leaves\nn12942572\ta variety of parsley having flat leaves\nn12942729\tparsley with smooth leaves and enlarged edible taproot resembling a savory parsnip\nn12943049\tnative to Egypt but cultivated widely for its aromatic seeds and the oil from them used medicinally and as a flavoring in cookery\nn12943443\ta plant of the genus Sanicula having palmately compound leaves and unisexual flowers in panicled umbels followed by bristly fruit; reputed to have healing powers\nn12943912\tsanicle of northwestern United States and British Columbia having yellow or red or purple flowers\nn12944095\tsanicle of Europe and Asia having white to pale pink flowers\nn12945177\tstout white-flowered perennial found wild in shallow fresh water; northern United States and Asia\nn12945366\tlarge stout white-flowered perennial found wild in shallow fresh water; Europe\nn12945549\tan Asiatic herb cultivated in Europe for its sweet edible tuberous root\nn12946849\ta tree of shrub of the genus Cornus often having showy bracts resembling flowers\nn12947313\tdeciduous tree; celebrated for its large white or pink bracts and stunning autumn color that is followed by red berries\nn12947544\tcommon North American shrub with reddish purple twigs and white flowers\nn12947756\tshrub of eastern North America closely resembling silky cornel\nn12947895\tshrub of eastern North America having purplish stems and blue fruit\nn12948053\tEuropean deciduous shrub turning red in autumn having dull white flowers\nn12948251\tcreeping perennial herb distinguished by red berries and clustered leaf whorls at the tips of shoots; Greenland to Alaska\nn12948495\tdeciduous European shrub or small tree having bright red fruit\nn12949160\tSouth American shrub or small tree having long shining evergreen leaves and panicles of green or yellow flowers\nn12949361\tsmall New Zealand broadleaf evergreen tree often cultivated in warm regions as an ornamental\nn12950126\ta plant of the genus Valeriana having lobed or dissected leaves and cymose white or pink flowers\nn12950314\ttall rhizomatous plant having very fragrant flowers and rhizomes used medicinally\nn12950796\twidely cultivated as a salad crop and pot herb; often a weed\nn12951146\tEuropean herb with small fragrant crimson or white spurred flowers\nn12951835\tany fern of the genus Hymenophyllum growing in tropical humid regions and having translucent leaves\nn12952165\tany fern of the genus Trichomanes having large pinnatifid often translucent fronds; most are epiphytic on tree branches and twigs or terrestrial on mossy banks\nn12952469\ta variety of bristle fern\nn12952590\tlarge stout fern of extreme western Europe\nn12952717\tlarge fern of New Zealand having kidney-shaped fronds\nn12953206\tany fern of the genus Osmunda: large ferns with creeping rhizomes; naked sporangia are on modified fronds that resemble flower clusters\nn12953484\tlarge deeply rooted fern of worldwide distribution with upright bipinnate compound tufted fronds\nn12953712\tNorth American fern having tall erect pinnate fronds and a few sporogenous pinnae at or near the center of the fertile fronds\nn12954353\tNew Zealand with pinnate fronds and a densely woolly stalks; sometimes included in genus Todea\nn12954799\tfern of rain forests of tropical Australia and New Zealand and South Africa\nn12955414\trare small fern of northeastern North America having numerous slender spiraling fronds and forming dense tufts\nn12955840\tfern of Florida and West Indies and Central America with rhizome densely clad in grown hairs\nn12956170\tany of several ferns of the genus Lygodium that climb by twining\nn12956367\tdelicate fern of the eastern United States having a twining stem and palmately-lobed sterile fronds and forked fertile fronds\nn12956588\ttropical fern widespread in Old World; naturalized in Jamaica and Florida\nn12956922\tsweetly scented African fern with narrow bipinnate fronds\nn12957608\tany of several water ferns of the genus Marsilea having four leaflets\nn12957803\tAustralian clover fern\nn12957924\twater fern of Europe and Asia and the eastern United States distinguished by four leaflets resembling clover leaves\nn12958261\tEuropean water fern found around margins of bodies of water or in wet acid soil having small globose sporocarps\nn12958615\tsmall latex-containing aquatic fern of southern Brazil\nn12959074\tfree-floating aquatic ferns\nn12959538\tsmall free-floating aquatic fern from the eastern United States to tropical America; naturalized in western and southern Europe\nn12960378\tferns with fertile spikes shaped like a snake's tongue\nn12960552\tepiphytic fern with straplike usually twisted fronds of tropical Asia and Polynesia and America\nn12960863\ta fern of the genus Botrychium having a fertile frond bearing small grapelike clusters of spore cases\nn12961242\tof North America and Eurasia\nn12961393\tEuropean fern with leathery and sparsely hairy fronds\nn12961536\tAmerican fern whose clustered sporangia resemble a snake's rattle\nn12961879\tAustralasian fern with clusters of sporangia on stems of fertile fronds\nn12963628\tany of various fungi of the genus Erysiphe producing powdery conidia on the host surface\nn12964920\tfungus causing Dutch elm disease\nn12965626\ta fungus that infects various cereal plants forming compact black masses of branching filaments that replace many grains of the plant; source of medicinally important alkaloids and of lysergic acid\nn12965951\ta sclerotium or hardened mass of mycelium\nn12966804\tfungus causing black root rot in apples\nn12966945\tthe fruiting bodies of the fungi of the genus Xylaria\nn12968136\tany fungus of the genus Sclerotinia; some causing brown rot diseases in plants\nn12968309\ta variety of sclerotinia\nn12969131\tany of various fungi of the genus Scleroderma having hard-skinned subterranean fruiting bodies resembling truffles\nn12969425\tan earthball fungus that is a dingy brownish yellow and a dark purplish interior; the peridium is covered with a pattern of small warts\nn12969670\tan earthball with a smooth upper surface that is at first buried in sand; the top of the fruiting body opens up to form segments like the ray of an umbel\nn12969927\tan earthball with a peridium that is firm dry and smooth when young but developing cracks when mature; pale orange-yellow when young and reddish brown at maturity\nn12970193\ta variety of gastromycete\nn12970293\ta variety of Podaxaceae\nn12970733\tmushroom of the genus Tulostoma that resembles a puffball\nn12971400\tany of various fungi of the family Rhizopogonaceae having subterranean fruiting bodies similar to the truffle\nn12971804\ta large whitish Rhizopogon that becomes greyish brown in maturity\nn12972136\ta fungus with a round yellow to orange fruiting body that is found on the surface of the ground or partially buried; has a distinctive sterile column extending into the spore-bearing tissue\nn12973443\tany mold of the genus Mucor\nn12973791\tany of various rot causing fungi of the genus Rhizopus\nn12973937\ta mold of the genus Rhizopus\nn12974987\ta naked mass of protoplasm having characteristics of both plants and animals; sometimes classified as protoctists\nn12975804\ta slime mold of the class Myxomycetes\nn12976198\tdiffering from true slime molds in being cellular and nucleate throughout the life cycle\nn12976554\tany slime mold of the genus Dictostylium\nn12978076\tan aquatic fungus of genus Synchytriaceae that is parasitic on pond scum\nn12979316\tfungus causing potato wart disease in potato tubers\nn12979829\ta fungus that attacks living fish and tadpoles and spawn causing white fungus disease: a coating of white hyphae on especially peripheral parts (as fins)\nn12980080\tparasitic or saprobic organisms living chiefly in fresh water or moist soil\nn12980840\tany of various fungi of the family Peronosporaceae parasitic on e.g. grapes and potatoes and melons\nn12981086\tfungus causing a serious disease in tobacco plants characterized by bluish-grey mildew on undersides of leaves\nn12981301\tfungus causing a downy mildew on onions\nn12981443\tfungus causing a downy mildew on growing tobacco\nn12981954\tfungus causing a disease characterized by a white powdery mass of conidia\nn12982468\tany fungus of the genus Pythium\nn12982590\tfungus causing damping off disease in seedlings\nn12982915\tcauses brown rot gummosis in citrus fruits\nn12983048\tfungus causing late blight in solanaceous plants especially tomatoes and potatoes\nn12983654\ta fungus resembling slime mold that causes swellings or distortions of the roots of cabbages and related plants\nn12983873\ta type of ascomycetous fungus\nn12983961\ta type of ascomycetous fungus\nn12984267\ta common name for a variety of Sarcosomataceae\nn12984489\ta common name for a variety of Sarcosomataceae\nn12984595\ta common name for a variety of Sarcosomataceae\nn12985420\tany of various highly prized edible subterranean fungi of the genus Tuber; grow naturally in southwestern Europe\nn12985773\ta club-shaped coral fungus\nn12985857\tany of numerous fungi of the family Clavariaceae often brightly colored that grow in often intricately branched clusters like coral\nn12986227\ta fungus of the family Hydnaceae\nn12987056\tany thallophytic plant of the division Lichenes; occur as crusty patches or bushy growths on tree trunks or rocks or bare ground etc.\nn12987423\ta lichen in which the fungus component is an ascomycete\nn12987535\ta lichen in which the fungus component is a basidiomycete\nn12988158\tany lichen of the genus Lecanora; some used in dyeing; some used for food\nn12988341\tany of several Old World partially crustaceous or shrubby lecanoras that roll up and are blown about over African and Arabian deserts and used as food by people and animals\nn12988572\tany of various lecanoras that yield the dye archil\nn12989007\ta source of the dye archil and of litmus\nn12989938\tgreenish grey pendulous lichen growing on trees\nn12990597\tany of several lichens of the genus Alectoria having a thallus consisting of filaments resembling hair\nn12991184\tan erect greyish branching lichen of Arctic and even some north temperate regions constituting the chief food for reindeer and caribou and sometimes being eaten by humans\nn12991837\tany of several lichens of the genus Parmelia from which reddish brown or purple dyes are made\nn12992177\tlichen with branched flattened partly erect thallus that grows in mountainous and Arctic regions; used as a medicine or food for humans and livestock; a source of glycerol\nn12992868\tan organism of the kingdom Fungi lacking chlorophyll and feeding on organic matter; ranging from unicellular or multicellular organisms to spore-bearing syncytia\nn12994892\tthe basidium of various fungi\nn12995601\tany of numerous fungi of the division Eumycota\nn12997654\tany of various fungi of the subdivision Basidiomycota\nn12997919\tany of various fleshy fungi of the subdivision Basidiomycota consisting of a cap at the end of a stem arising from an underground mycelium\nn12998815\ta saprophytic fungus of the order Agaricales having an umbrellalike cap with gills on the underside\nn13000891\tmushrooms and related fleshy fungi (including toadstools, puffballs, morels, coral fungi, etc.)\nn13001041\tcommon name for an edible agaric (contrasting with the inedible toadstool)\nn13001206\tcommon name for an inedible or poisonous agaric (contrasting with the edible mushroom)\nn13001366\tcoarse edible mushroom with a hollow stem and a broad white cap\nn13001529\tcommon edible mushroom found naturally in moist open soil; the cultivated mushroom of commerce\nn13001930\tedible east Asian mushroom having a golden or dark brown to blackish cap and an inedible stipe\nn13002209\ta fungus with a scaly cap and white flesh and a ring on the stalk (with scales below the ring); odor reminiscent of licorice\nn13002750\twidely distributed edible mushroom resembling the fly agaric\nn13002925\tagaric often confused with the death cup\nn13003061\tpoisonous (but rarely fatal) woodland fungus having a scarlet cap with white warts and white gills\nn13003254\textremely poisonous usually white fungus with a prominent cup-shaped base; differs from edible Agaricus only in its white gills\nn13003522\tyellowish edible agaric that usually turns red when touched\nn13003712\tfungus similar to Amanita phalloides\nn13004423\twidely distributed edible mushroom rich yellow in color with a smooth cap and a pleasant apricot aroma\nn13004640\ta mildly poisonous fungus with a fruiting body shaped like a hollow trumpet\nn13004826\tan edible agaric with a brown fruiting body that is often compound\nn13004992\tmushroom with a distinctive pink to vermillion fruiting body\nn13005329\ta large poisonous agaric with orange caps and narrow clustered stalks; the gills are luminescent\nn13005984\thaving a cap that melts into an inky fluid after spores have matured\nn13006171\tcommon edible mushroom having an elongated shaggy white cap and black spores\nn13006631\tedible mushroom\nn13006894\tmushroom that grows in a fairy ring\nn13007034\ta ring of fungi marking the periphery of the perennial underground growth of the mycelium\nn13007417\tedible agaric with a soft greyish cap growing in shelving masses on dead wood\nn13007629\tred luminescent mushroom of Europe\nn13008157\ta fungus with a smooth orange cap and yellow gills and pale yellow stalk\nn13008315\ta beautiful yellow gilled fungus found from Alaska south along the coast\nn13008485\ta large fungus with whitish scales on the cap and remnants of the veil hanging from the cap; the stalk is thick and hard\nn13008689\ta fungus with a yellow cap covered with fine scales as is the stalk\nn13008839\ta fungus that grows in clusters on the ground; cap is brownish orange with a surface that is smooth and slightly sticky; whitish gills and a cylindrical brown stalk\nn13009085\tone of the most important fungi cultivated in Japan\nn13009244\ta gilled fungus having yellow slimy caps with conspicuous tawny scales on the caps and stalks\nn13009429\ta gilled fungus with a cap and stalk that are conspicuously scaly with upright scales; gills develop a greenish tinge with age\nn13009656\ta pale buff fungus with tawny scales\nn13010694\ta gilled fungus with a long stalk and a yellow slimy cap from which fragments of the broken veil hang; gills are initially white but become dark brown as spores are released\nn13010951\ta gilled fungus with a large slimy purple or olive cap; gills become purple with age; the stalk is long and richly decorated with pieces of the white sheath that extends up to a ring\nn13011221\ta large gilled fungus with a broad cap and a long stalk; the cap is dark brown; the white gills turn dark purplish brown with age; edible and choice\nn13011595\ta basidiomycete with gills\nn13012253\ta deadly poisonous agaric; a large cap that is first white (livid or lead-colored) and then turns yellowish or tan\nn13012469\tan agaric with a dark brown conical cap; fruits in early spring\nn13012973\ta poisonous agaric with a fibrillose cap and brown scales on a white ground color; cap can reach a diameter of 30 cm; often forms `fairy rings'\nn13013534\tany fungus of the genus Lepiota\nn13013764\tedible long-stalked mushroom with white flesh and gills and spores; found in open woodlands in autumn\nn13013965\tan agaric regarded as poisonous\nn13014097\tan agaric with greyish white fruiting body and gills that change from pink to dingy red\nn13014265\tan agaric with a large cap with brown scales and a thick stalk\nn13014409\tan agaric with a pallid cap and a stalk that is enlarged near the base\nn13014581\tan agaric with a relatively small pink to red cap and white gills and stalk\nn13014741\tan agaric with a ragged stalk and a soft floccose cap\nn13014879\ta white agaric that tends to cluster and has a club-shaped base\nn13015509\tfungus causing pink disease in citrus and coffee and rubber trees etc\nn13015688\tfungus causing bottom rot in lettuce\nn13016076\tfungus causing a disease in potatoes characterized by black scurfy spots on the tubers\nn13016289\tfungus causing a disease in coffee and some other tropical plants\nn13017102\tedible agaric that is pale lilac when young; has a smooth moist cap\nn13017240\tan edible agaric that fruits in great clusters (especially in sandy soil under cottonwood trees)\nn13017439\ta mildly poisonous agaric with a viscid reddish brown cap and white gills and stalk\nn13017610\tan agaric with a cap that is coated with dark fibrils in the center and has yellowish margins\nn13017789\tan edible agaric with yellow gills and a viscid yellow cap that has a brownish center\nn13017979\ta poisonous white agaric\nn13018088\ta poisonous agaric having a pale cap with fine grey fibrils\nn13018232\tan agaric with a cap that is densely covered with reddish fibrils and pale gills and stalk\nn13018407\tan orange tan agaric whose gills become brown by maturity; has a strong odor and taste\nn13018906\ta parasite on various trees\nn13019496\tan agaric with a brilliant scarlet cap and a slender stalk\nn13019643\tan edible agaric found in piles of hardwood sawdust; the caps are black and coarsely wrinkled\nn13019835\ta small edible agaric with a slender stalk; usually found on rotting hardwoods\nn13020191\tsmall tropical and subtropical edible mushroom having a white cap and long stem; an expensive delicacy in China and other Asian countries where it is grown commercially\nn13020481\ta mushroom with a dry yellowish to white fibrillose cap\nn13020964\tan agaric with a flat cap that is greyish or yellowish brown with pallid gills and a stalk that bulges toward the base\nn13021166\ta small poisonous agaric; has a dry white cap with crowded gills and a short stalk\nn13021332\ta fungus with a cap that is creamy grey when young and turns brown with age and a whitish stalk that stains yellow when handled\nn13021543\ta large white agaric; edible but not palatable\nn13021689\tan edible agaric with large silky white caps and thick stalks\nn13021867\tan edible white agaric that fruits in dense clusters; the gills are narrow and crowded and the stalk is fleshy and unpolished\nn13022210\tan edible agaric that is available in early spring or late fall when few other mushrooms are; has a viscid smooth orange to brown cap and a velvety stalk that turns black in maturity and pallid gills; often occur in clusters\nn13022709\tthe vegetative part of a fungus consisting of a mass of branching threadlike hyphae\nn13022903\tcompact usually dark-colored mass of hardened mycelium constituting a vegetative food-storage body in various true fungi; detaches when mature and can give rise to new growth\nn13023134\tany of various ascomycetous fungi in which the spores are formed in a sac or ascus\nn13024012\tany fungus of the class Ascomycetes (or subdivision Ascomycota) in which the spores are formed inside an ascus\nn13024500\tany of various mushrooms of the class Ascomycetes\nn13024653\ta variety of grainy club mushrooms\nn13025647\tany of various single-celled fungi that reproduce asexually by budding or division\nn13025854\tused as a leaven in baking and brewing\nn13026015\tused in making wine\nn13027557\ta mold causing aspergillosis in birds and man\nn13027879\tfungus causing brown root rot in plants of the pea and potato and cucumber families\nn13028611\tany fungus that is a member of the subclass Discomycetes\nn13028937\ta discomycete that develops in clusters of slippery rubbery gelatinous fruiting bodies that are dingy yellow to tan in color\nn13029122\ta discomycete that is 3-8 cm high with an orange to yellow fertile portion and white or pinkish stalks often half in and half out of the water\nn13029326\ta discomycete that is a harbinger of spring; the fruiting body is thin and tough and saucer-shaped (about the size of quarter to a half dollar) with a deep bright red upper surface and a whitish exterior\nn13029610\tan early spring variety of discomycete with yellow to orange yellow lining of the cup\nn13029760\ta discomycete with bright orange cup-shaped or saucer-shaped fruiting bodies and pale orange exteriors\nn13030337\tapothecium of a fungus of the family Pezizaceae\nn13030616\ta discomycetous fungus of the genus Peziza; the fragile fruiting body is a ghostly white but stains yellow when broken; favors strongly alkaline habitats\nn13030852\ta scarlet European fungus with cup-shaped ascocarp\nn13031193\tan urn-shaped discomycete with a nearly black interior\nn13031323\tthe cup-shaped fruiting body of this discomycete has a jellylike interior and a short stalk\nn13031474\tthe fruiting bodies of this discomycete have a firm texture and long retain their cup shape; the pale brown interior blends with the color of dead leaves\nn13032115\tany of various edible mushrooms of the genus Morchella having a brownish spongelike cap\nn13032381\tan edible and choice morel with a globular to elongate head with an irregular pattern of pits and ridges\nn13032618\tan edible morel with a cup-shaped or saucer-shaped fruiting body can be up to 20 cm wide; the fertile surface inside the cup has wrinkles radiating from the center; can be easily confused with inedible mushrooms\nn13032923\ta morel whose fertile portion resembles a bell and is attached to the stipe only at the top\nn13033134\tresembles a thimble on a finger; the surface of the fertile portion is folded into wrinkles that extend from the top down; fruiting begins in spring before the leaves are out on the trees\nn13033396\ta morel with a fertile portion that has a relatively smooth surface; the stalk is fragile\nn13033577\ta morel whose pitted fertile body is attached to the stalk with little free skirt around it; the fertile body is grey when young and black in old age\nn13033879\ta delicious morel with a conic fertile portion having deep and irregular pits\nn13034062\ta morel with the ridged and pitted fertile portion attached to the stipe for about half its length\nn13034555\ta fungus composed of several apothecia that look like elongated rabbit ears; the sterile surface is dark brown and warty; the fertile surface is smooth and pinkish orange\nn13034788\ta fungus with a long solid stalk embedded in soil and a yellow-brown head shaped like a cauliflower\nn13035241\ta fungus of the family Helvellaceae\nn13035389\ta large fungus of the family Helvellaceae\nn13035707\tany fungus of the genus Helvella having the ascocarps stalked or pleated or often in folds\nn13035925\ta helvella with a saddle-shaped fertile part and creamy color; the stalk is fluted and pitted\nn13036116\ta helvella with a cup-shaped fertile body having a brown interior; the stalk is creamy white and heavily ribbed\nn13036312\ta helvella with an irregularly convoluted cap that is dark brown when young and becomes dull grey with age; the lower surface of the cap is smooth and pale grey; the stalk is thick and deeply fluted\nn13036804\tany fungus of the genus Discina\nn13037406\tany fungus of the genus Gyromitra\nn13037585\ta gyromitra with a brown puffed up fertile part and a thick fluted stalk; found under conifers in California\nn13037805\ta gyromitra with a brown puffed up fertile part and a rosy pink fluted stalk and smooth round spores; found on hardwood slash east of the Great Plains\nn13038068\ta poisonous gyromitra; the surface of the fertile body is smooth at first and becomes progressively undulating and wrinkled (but never truly pitted); color varies from dull yellow to brown\nn13038376\ta poisonous fungus; saddle-shaped and dull yellow to brown fertile part is relatively even\nn13038577\ta lorchel with deep brownish red fertile part and white stalk\nn13038744\ta gyromitra with a large irregular stalk and fertile part that is yellow to brown and wrinkled; has early fruiting time\nn13039349\tany fungus of the class Gasteromycetes\nn13040303\tany of various ill-smelling brown-capped fungi of the order Phallales\nn13040629\ta common fungus formerly used in preparing a salve for rheumatism\nn13040796\tthis stinkhorn has a cap with a granulose surface at the apex and smells like decaying flesh\nn13041312\ta stinkhorn having a stalk without a cap; the slimy gleba is simply plastered on its surface near the apex where winged insects can find it\nn13041943\ta gasteromycete with a leathery stalk and a fruiting body that is globose and has a pale yellow spore case\nn13042134\ta gasteromycete with a leathery stalk and a fruiting body this globose and has a red spore case\nn13042316\ta gasteromycete with a leathery stalk and a fruiting body with a thin gelatinous spore case and elliptical spores\nn13042982\ta stinkhorn of genus Pseudocolus; the fruiting body first resembles a small puffball that soon splits open to form a stalk with tapering arms that arch and taper to a common point\nn13043926\tany of various fungi of the family Lycoperdaceae whose round fruiting body discharges a cloud of spores when mature\nn13044375\thuge edible puffball up to 2 feet diameter and 25 pounds in weight\nn13044778\tany fungus of the family Geastraceae; in form suggesting a puffball whose outer peridium splits into the shape of a star\nn13045210\tan earthstar with a bluish spore sac and a purplish brown gleba; at maturity the outer layer splits into rays that bend backward and elevate the spore sac\nn13045594\ta fungus similar to an earthstar except that it does not open up; the spore mass is brown at maturity with a column of sterile tissue extending up into it\nn13045975\tthe largest earthstar; the fruiting body can measure 15 cm across when the rays are expanded\nn13046130\ta common species of earthstar widely distributed in sandy soil; the gleba is a pale tan\nn13046669\tany of various fungi of the family Nidulariaceae having a cup-shaped body containing several egg-shaped structure enclosing the spores\nn13047862\ta species of Gastrocybe fungus that has a conic cap and a thin stalk; at first the stalk is upright but as it matures the stalk bends over and then downward; the cap then gelatinizes and a slimy mass containing the spores falls to the ground as the stalk collapses\nn13048447\ta small fungus with a fragile cap that cracks to expose the white context and a white stalk that is practically enclosed by the cap\nn13049953\twoody pore fungi; any fungus of the family Polyporaceae or family Boletaceae having the spore-bearing surface within tubes or pores; the fruiting bodies are usually woody at maturity and persistent\nn13050397\ta woody fungus that forms shelflike sporophores on tree trunks and wood structures\nn13050705\ta rare fungus having a large (up to 14 inches wide) yellow fruiting body with multiple individual caps and a broad central stalk and a fragrant odor\nn13050940\ta fungus with a whitish often circular cap and a white pore surface and small pores and a white central stalk; found under conifers; edible but not popular\nn13051346\ta gilled polypore with a large cap (up to 15 inches in diameter) and a broad stalk; edible when young and tender\nn13052014\ta pore fungus with a whitish cottony soft cap found on conifer logs in forests at high elevation in the western United States and adjacent Canada\nn13052248\ta fungus with a whitish kidney-shaped cap and elongated pores; causes white rot in dead hardwoods\nn13052670\tlarge greyish-brown edible fungus forming a mass of overlapping caps that somewhat resembles a hen at the base of trees\nn13052931\ta fungus with a lateral stalk (when there is a stalk) and a scaly cap that becomes nearly black in maturity; widely distributed in the northern hemisphere\nn13053608\ta popular edible fungus with a cap the color of liver or raw meat; abundant in southeastern United States\nn13054073\tfungus used in the preparation of punk for fuses\nn13054560\tany fungus of the family Boletaceae\nn13055423\ta fungus convex cap and a dingy yellow under surface and a dry stalk\nn13055577\tan edible and choice fungus; has a convex cap that is slightly viscid when fresh and moist but soon dries and a thick bulbous tan stalk\nn13055792\ta fungus with a red cap and a red coarsely reticulate stalk\nn13055949\ta poisonous fungus with a dingy yellow cap and orange red undersurface and a cylindrical reticulate stalk\nn13056135\ta fungus that is edible when young and fresh; has a dark brown convex cap with a yellow to greenish under surface and reddish stalk\nn13056349\ta fungus that has an off-white cap when it is young but later becomes dingy brown and a stalk of the same color; the under surface of the cap (the tubes) a pale greenish yellow\nn13056607\ta beautiful but poisonous bolete; has a brown cap with a scarlet pore surface and a thick reticulate stalk\nn13056799\tan edible fungus with a broadly convex blackish brown cap and a pore surface that is yellow when young and darkens with age; stalk is thick and enlarges toward the base\nn13057054\ta fungus with a rusty red cap and a white pore surface that becomes yellow with age and a pale yellow stalk\nn13057242\ta fungus with a velvety stalk and usually a dingy brown cap; injured areas turn blue instantly\nn13057422\tan edible (but not choice) fungus found on soil under hardwoods; has a dry convex cap with whitish under surface and a reticulate stalk\nn13057639\tan edible and choice fungus that has a brown cap with greenish yellow under surface and a stalk that become dull red with age\nn13058037\tan edible fungus with a pinkish purple cap and stalk and a pore surface that is yellow with large angular pores that become like gills in maturity\nn13058272\tan edible fungus with a broadly convex brown cap and a whitish pore surface and stalk\nn13058608\tan edible fungus with a dark reddish brown cap and a wide light tan stalk that expands toward the base\nn13059298\ta short squat edible fungus with a reddish brown cap and white stalk; fruits under pines in the spring\nn13059657\tedible mild-tasting mushroom found in coniferous woodlands of eastern North America\nn13060017\ta fungus with a long coarsely shaggy reticulate stalk and a rimose areolate cap surface\nn13060190\tany fungus of the order Tremellales or Auriculariales whose fruiting body is jellylike in consistency when fresh\nn13061172\tpopular in China and Japan and Taiwan; gelatinous mushrooms; most are dried\nn13061348\ta yellow jelly fungus\nn13061471\ta jelly fungus with a fruiting body 5-15 cm broad and gelatinous in consistency; resembles a bunch of leaf lettuce; mostly water and brownish in color\nn13061704\ta jelly fungus with an erect whitish fruiting body and a highly variable shape (sometimes resembling coral fungi)\nn13062421\twidely distributed edible fungus shaped like a human ear and growing on decaying wood\nn13063269\tany of various fungi causing rust disease in plants\nn13063514\tfruiting body of some rust fungi bearing chains of aeciospores\nn13064111\tfungus causing flax rust\nn13064457\tfungus causing white pine blister rust and having a complex life cycle requiring a plant of genus Ribes as alternate host\nn13065089\trust fungus that attacks wheat\nn13065514\trust fungus causing rust spots on apples and pears etc\nn13066129\tany fungus of the order Ustilaginales\nn13066448\ta smut fungus causing a smut disease of grains in which the spore masses are covered or held together by the grain membranes\nn13066979\ta smut fungus of the genus Ustilago causing a smut disease of grains in which the entire head is transformed into a dusty mass of spores\nn13067191\ta smut fungus attacking Indian corn\nn13067330\ta common smut attacking Indian corn causing greyish white swellings that rupture to expose a black spore mass\nn13067532\tgenus of smut fungus\nn13067672\tsmut fungus attacking heads of corn or sorghum and causing a covered smut\nn13068255\tfungus that destroys kernels of wheat by replacing them with greasy masses of smelly spores\nn13068434\tsimilar to Tilletia caries\nn13068735\tsmut fungus causing blackish blisters on scales and leaves of onions; especially destructive to seedlings\nn13068917\ta smut fungus causing a smut in cereals and other grasses that chiefly affects leaves and stems and is characterized chains of sori within the plant tissue that later rupture releasing black masses of spores\nn13069224\tfungus affecting leaves and stems of wheat\nn13069773\tfungus that frequently encircles twigs and branches of various trees especially citrus trees in southern United States\nn13070308\tany fungus of the family Hygrophoraceae having gills that are more or less waxy in appearance\nn13070875\ta fungus having an acutely conic cap and dry stalks\nn13071371\ta fungus with a white convex cap and arcuate white gills and a stalk that tapers toward the base\nn13071553\ta fungus with a broadly convex cap that is cream color with a tint of blue over the margin; waxy gills are bluish green to blue-grey; a short stalk tapers abruptly at the base\nn13071815\ta fungus with a drab squamulose cap and grey-brown squamules over the white background of the stalk and waxy grey-white gills\nn13072031\ta fungus with a slightly viscid cap; cap and gills are reddish brown and the stalk is grey\nn13072209\ta grey fungus frequently found near melting snow banks\nn13072350\ta fungus with a viscid purplish red cap and stalk; found under spruce and other conifers\nn13072528\tan edible fungus with a reddish cap and close pale gills and dry stalk; found under hardwoods\nn13072706\tan edible fungus with a large white cap and a dry stalk and white gills\nn13072863\ta fungus having a brownish sticky cap with a white margin and white gills and an odor of raw potatoes\nn13073055\ta small fungus with orange cap and yellow gills found in sphagnum bogs\nn13073703\ta fungus with a small brown convex cap with a depressed disc; waxy wine-colored gills and a brown stalk; fruits in or near melting snow banks in the western mountains of North America\nn13074619\ta fungus with large tawny caps and pale cinnamon gills and a red band of veil around the stalk; usually found near birch trees\nn13074814\tan edible fungus with a slimy viscid cap that is initially yellow but turns olive and then tawny; flesh is lavender\nn13075020\ta fungus with a viscid wrinkled tawny cap; the stalk has a basal bulb that diminishes as the stalk elongates; the gills are dark violet at first but soon turn brown\nn13075272\ta poisonous fungus with a bright yellow brown cap and a long cinnamon colored stalk\nn13075441\ta fungus with a reddish purple cap having a smooth slimy surface; close violet gills; all parts stain dark purple when bruised\nn13075684\ta fungus with a dry brown cap and rusty red gills and a yellowish stalk\nn13075847\ta fungus with a sticky lavender cap and stalk that whitish above and covered with a silky lavender sheath\nn13076041\ta fungus that is violet overall with a squamulose cap\nn13076405\ta fungus with a brownish orange fruiting body and a ring near the top of the stalk; the taste is bitter and the flesh contains psilocybin and psilocin\nn13076643\ta poisonous fungus with a dry cap and a cortina that does not leave much of a ring on the robust stalk\nn13076831\ta giant fungus of the Pacific Northwest; has a very thick stalk and the cortina leaves a ring high up on the stalk\nn13077033\ta fungus that produces a superficial growth on various kinds of damp or decaying organic matter\nn13077295\ta fungus that produces a superficial (usually white) growth on organic matter\nn13078021\ta fungus of the genus Verticillium\nn13079073\tany of the yeastlike imperfect fungi of the genus Monilia\nn13079419\tany of the yeastlike imperfect fungi of the genus Candida\nn13079567\ta parasitic fungus that can infect the mouth or the skin or the intestines or the vagina\nn13080306\tany of various yeastlike budding fungi of the genus Blastomyces; cause disease in humans and other animals\nn13080866\tfungus causing yellow spot (a sugarcane disease in Australia)\nn13081229\tfungus causing green smut in rice\nn13081999\ta fungus causing dry rot\nn13082568\tany fungus now or formerly belonging to the form genus Rhizoctinia\nn13083023\tany of a variety of plants grown indoors for decorative purposes\nn13083461\tan ornamental plant suitable for planting in a flowerbed\nn13084184\ta plant adapted to arid conditions and characterized by fleshy water-storing tissues that act as water reservoirs\nn13084834\ta variety of a plant developed from a natural species and maintained under cultivation\nn13085113\tany plant that crowds out cultivated plants\nn13085747\tusually used in combination: `liverwort'; `milkwort'; `whorlywort'\nn13090018\ta thorny stem or twig\nn13090871\tfleshy and usually brightly colored cover of some seeds that develops from the ovule stalk and partially or entirely envelopes the seed\nn13091620\tleaf in ferns and mosses that bears the sporangia\nn13091774\torgan containing or producing spores\nn13091982\tstalk bearing one or more sporangia\nn13092078\tsaclike structure in which ascospores are formed through sexual reproduction of ascomycetes\nn13092240\tsexually produced fungal spore formed within an ascus\nn13092385\tone of a string of thick walled vegetative resting cells formed by some algae and fungi\nn13092987\ta sporangium that arises from a group of epidermal cells\nn13093275\ta sporangium containing four asexual spores\nn13093629\tcell or organ in which gametes develop\nn13094145\tcluster of sporangia usually on underside of a fern frond\nn13094273\ta spore-producing structure in certain lichens and fungi\nn13095013\tmembrane of the young sporophore of various mushrooms extending from the margin of the cap to the stem and is ruptured by growth; represented in mature mushroom by an annulus around the stem and sometimes a cortina on the margin of the cap\nn13096779\twoody tissue\nn13098515\ta sheet of vascular tissue separating the vascular bundles\nn13098962\t(botany) tissue that conducts synthesized food substances (e.g., from leaves) to parts where needed; consists primarily of sieve tubes\nn13099833\ta plant having foliage that persists and remains green throughout the year\nn13099999\ta plant having foliage that is shed annually at the end of the growing season\nn13100156\ta plant that when touched or ingested in sufficient quantity can be harmful or fatal to an organism\nn13100677\ta plant with a weak stem that derives support from climbing, twining, or creeping along a surface\nn13102648\tany plant (as ivy or periwinkle) that grows by creeping\nn13102775\tslender stem-like structure by which some twining plants attach themselves to an object for support\nn13103023\ta plant that climbs by its adventitious roots e.g. ivy\nn13103660\ta category in some early taxonomies\nn13103750\thaving the shape or characteristics of a tree\nn13103877\ta dead tree that is still standing, usually in an undisturbed forest\nn13104059\ta tall perennial woody plant having a main trunk and branches forming a distinct elevated crown; includes both gymnosperms and angiosperms\nn13107694\tany tree that is valued as a source of lumber or timber\nn13107807\ta small tree\nn13107891\ttree (as opposed to shrub)\nn13108131\tany of several trees having seedpods as fruits\nn13108323\ta tree with limbs cut back to promote a more bushy growth of foliage\nn13108481\tyoung tree\nn13108545\ta tree planted or valued chiefly for its shade from sunlight\nn13108662\tany tree of the division Gymnospermophyta\nn13108841\tany gymnospermous tree or shrub bearing cones\nn13109733\tany tree having seeds and ovules contained in the ovary\nn13110915\ttree bearing edible nuts\nn13111174\ttree bearing aromatic bark or berries\nn13111340\tany of several trees having leaves or bark used to allay fever or thought to indicate regions free of fever\nn13111504\tthe base part of a tree that remains standing after the tree has been felled\nn13111881\ta dwarfed ornamental tree or shrub grown in a tray or shallow pot\nn13112035\ta dwarfed evergreen conifer or shrub shaped to have flat-topped asymmetrical branches and grown in a container\nn13112201\tan artificial plant resembling a bonsai\nn13118330\ta low shrub\nn13118707\tlow-growing woody shrub or perennial with woody base\nn13119870\tany of various rough thorny shrubs or vines\nn13120211\ta woody climbing usually tropical plant\nn13120958\ta perennial plant that propagates by underground bulbs or tubers or corms\nn13121104\tplant adapted for life with a limited supply of water; compare hydrophyte and mesophyte\nn13121349\tland plant growing in surroundings having an average supply of water; compare xerophyte and hydrophyte\nn13122364\ta semiaquatic plant that grows in soft wet land; most are monocots: sedge, sphagnum, grasses, cattails, etc; possibly heath\nn13123309\ta plant that is an epiphyte for part of its life\nn13123431\tan epiphytic vine or tree whose aerial roots extend down the trunk of a supporting tree and coalesce around it eventually strangling the tree\nn13123841\tplant that grows on rocks or stony soil and derives nourishment from the atmosphere\nn13124358\tan organism that lives in and derives its nourishment from organic matter in stagnant or foul water\nn13124654\tplant capable of synthesizing its own food from simple organic substances\nn13125117\t(botany) the usually underground organ that lacks buds or leaves or nodes; absorbs water and mineral salts; usually it anchors the plant to the ground\nn13126050\t(botany) main root of a plant growing straight downward from the stem\nn13126856\ta root that grows from and supports the stem above the ground in plants such as mangroves\nn13127001\ta plant structure resembling a leaf\nn13127303\troot or part of a root used for plant propagation; especially that part of a grafted plant that supplies the roots\nn13127666\tcuttings of plants set in the ground to grow as hawthorn for hedges or vines\nn13127843\ta horizontal branch from the base of plant that produces new plants from buds at its tips\nn13128278\tplant growing from a tuber\nn13128582\ta horizontal plant stem with shoots above and roots below serving as a reproductive structure\nn13128976\taxis of a compound leaf or compound inflorescence\nn13129078\twoody stem of palms and tree ferns\nn13130014\ta flattened stem resembling and functioning as a leaf\nn13130161\tenlarged tip of a stem that bears the floral parts\nn13130726\terect leafless flower stalk growing directly from the ground as in a tulip\nn13131028\tflat-topped or rounded inflorescence characteristic of the family Umbelliferae in which the individual flower stalks arise from about the same point; youngest flowers are at the center\nn13131618\tthe slender stem that supports the blade of a leaf\nn13132034\tstalk bearing an inflorescence or solitary flower\nn13132156\ta small stalk bearing a single flower of an inflorescence; an ultimate division of a common peduncle\nn13132338\tan inflorescence consisting of a cluster of flowers\nn13132486\tusually elongate cluster of flowers along the main stem in which the flowers at the base open first\nn13132656\tcompound raceme or branched cluster of flowers\nn13132756\ta dense flower cluster (as of the lilac or horse chestnut) in which the main axis is racemose and the branches are cymose\nn13132940\tmore or less flat-topped cluster of flowers in which the central or terminal flower opens first\nn13133140\ta small cyme, generally with few flowers\nn13133233\ta compacted or sessile cyme\nn13133316\ta cyme with flowers or branches alternating in opposite ranks\nn13133613\tfruiting spike of a cereal plant especially corn\nn13133932\tthe fleshy axis of a spike often surrounded by a spathe\nn13134302\tplant growing from a bulb\nn13134531\tsmall bulb or bulb-shaped growth arising from the leaf axil or in the place of flowers\nn13134844\tplant growing from a corm\nn13134947\tthe ripened reproductive body of a seed plant\nn13135692\ta diminutive fruit, especially one that is part of a multiple fruit\nn13135832\ta small hard fruit\nn13136316\tany of various seeds or fruits that are beans or resemble beans\nn13136556\tusually large hard-shelled seed\nn13136781\ta small nut\nn13137010\tthe inner and usually edible part of a seed or grain or nut or fruit stone\nn13137225\tthe fleshy multiple fruit of the fig consisting of an enlarged hollow receptacle containing numerous fruitlets\nn13137409\ta small fruit having any of various structures, e.g., simple (grape or blueberry) or aggregate (blackberry or raspberry)\nn13137672\tfruit consisting of many individual small fruits or drupes derived from separate ovaries within a common receptacle: e.g. blackberry; raspberry; pineapple\nn13137951\tan indehiscent fruit derived from a single ovary having one or many seeds within a fleshy wall or pericarp: e.g. grape; tomato; cranberry\nn13138155\tone of the small drupes making up an aggregate or multiple fruit like a blackberry\nn13138308\tfleshy indehiscent fruit with a single seed: e.g. almond; peach; plum; cherry; elderberry; olive; jujube\nn13138658\ta small part of an aggregate fruit that resembles a drupe\nn13138842\ta fleshy fruit (apple or pear or related fruits) having seed chambers and an outer fleshy part\nn13139055\ta several-seeded dehiscent fruit as e.g. of a leguminous plant\nn13139321\tseedpods that are constricted between the seeds and that break apart when mature into single-seeded segments\nn13139482\tfruit of such plants as the plantain; a capsule whose upper part falls off when the seeds are released\nn13139647\touter membranous covering of some fruits or seeds\nn13139837\tthe husk of an ear of corn\nn13140049\tthe vessel that contains the seeds of a plant (not the seeds themselves)\nn13140367\tfruit containing much fleshy tissue besides that of the ripened ovary; as apple or strawberry\nn13141141\ta shrub or shrubby tree of the genus Rhamnus; fruits are source of yellow dyes or pigments\nn13141415\tfruit of various buckthorns yielding dyes or pigments\nn13141564\tshrubby tree of the Pacific coast of the United States; yields cascara sagrada\nn13141797\tdried bark of the cascara buckthorn used as a laxative\nn13141972\tdeciduous shrub of eastern and central United States having black berrylike fruit; golden-yellow in autumn\nn13142182\tevergreen shrub of western United States bearing small red or black fruits\nn13142504\tsmall spiny evergreen shrub of western United States and Mexico with minute flowers and bright red berries\nn13142907\tany of several small to medium-sized trees of Florida and West Indies with thin scaly bark and heavy dark heartwood\nn13143285\tspiny tree having dark red edible fruits\nn13143758\tthorny Eurasian shrub with dry woody winged fruit\nn13144084\tAustralian tree grown especially for ornament and its fine-grained wood and bearing edible nuts\nn13145040\tnative grape of northeastern United States; origin of many cultivated varieties e.g. Concord grapes\nn13145250\tnative grape of southeastern United States; origin of many cultivated varieties\nn13145444\tcommon European grape cultivated in many varieties; chief source of Old World wine and table grapes\nn13146403\twhite wine grape; grown especially in California for making wines resembling those from Chablis, France\nn13146583\tsmall blue-black grape of Medoc region of France highly prized in winemaking\nn13146928\twhite wine grape grown in California\nn13147153\twhite grape grown especially in the valley the Loire in France\nn13147270\twhite grape grown in Europe and California\nn13147386\tsmall black grape grown chiefly in California; transplanted from Europe\nn13147532\twhite grape grown especially in California and the lower Loire valley of France\nn13147689\tused to make malmsey wine\nn13147918\ta variety of white wine grape grown in Italy\nn13148208\tAsiatic vine with three-lobed leaves and purple berries\nn13148384\tcommon North American vine with compound leaves and bluish-black berrylike fruit\nn13149296\tany of various shrubby vines of the genus Piper\nn13149970\tAsian pepper plant whose dried leaves are chewed with betel nut (seed of the betel palm) by southeast Asians\nn13150378\tspicy fruit of the cubeb vine; when dried and crushed is used medicinally or in perfumery and sometimes smoked in cigarettes\nn13150592\ta dry dehiscent fruit that at maturity splits into two or more parts each with a single seed\nn13150894\tany of various plants of the genus Peperomia; grown primarily for their often succulent foliage\nn13151082\tgrown as a houseplant for its silvery striped fleshy foliage; South America\nn13152339\tstoloniferous herb of southwestern United States and Mexico having a pungent rootstock and small spicate flowers with white bracts suggesting an anemone\nn13154388\tdivision of a usually pinnately divided leaf\nn13154494\tcompound leaf of a fern or palm or cycad\nn13154841\ta modified leaf or leaflike part just below and protecting an inflorescence\nn13155095\ta small bract\nn13155305\ta highly conspicuous bract or bract pair or ring of bracts at the base of an inflorescence\nn13155611\tsmall dry membranous bract found in inflorescences of Gramineae and Cyperaceae\nn13156986\ta leaf resembling an open hand; having lobes radiating from a common point\nn13157137\ta leaf resembling a feather; having the leaflets on each side of a common axis\nn13157346\ta pinnate leaf having two pairs of leaflets\nn13157481\ta leaf having divisions that are themselves compound\nn13157684\ta leaf narrowing to a slender point\nn13157971\ta simple leaf shaped like a capital delta\nn13158167\ta sword-shaped leaf; as of iris\nn13158512\ta long slender leaf\nn13158605\ta simple leaf having curvature suggestive of a lyre\nn13158714\ta simple leaf having a rounded or blunt tip\nn13158815\ta leaf having a rounded apex and tapering base\nn13159357\ta fiddle-shaped leaf\nn13159691\ta simple kidney-shaped leaf\nn13159890\tspatula-shaped leaf; having a broad rounded apex and narrow base\nn13160116\ta pinnate leaf with a pair of leaflets at the apex\nn13160254\ta pinnate leaf with a single leaflet at the apex\nn13160365\ta leaf having the radiating lobes each deeply cleft or divided\nn13160604\ta leaf having a scalloped margin\nn13160831\ta leaf having a toothed margin\nn13160938\ta leaf having a finely toothed margin; minutely dentate\nn13161151\ta leaf having a jagged margin as though gnawed\nn13161254\ta leaf having incised margins with the lobes or teeth curved toward the base; as a dandelion leaf\nn13161904\ta leaf having prickly margins\nn13163553\ta branch or a part of a tree that is dead\nn13163649\tstems of beans and peas and potatoes and grasses collectively as used for thatching and bedding\nn13163991\ta small branch or division of a branch (especially a terminal division); usually applied to branches of the current or preceding year\nn13164501\tflexible twig of a willow tree\nn13170840\tlarge scrambling fern forming large patches to 18 feet high; Pacific region and China\nn13171210\tlarge Australasian fern with fanlike repeatedly forked fronds; sometimes placed in genus Gleichenia\nn13171797\taquatic fern of tropical America often used in aquariums\nn13172923\tany of numerous ferns of the genus Polypodium\nn13173132\tfern having rootstock of a sweetish flavor\nn13173259\tfern growing on rocks or tree trunks and having fronds greyish and scurfy below; Americas and South Africa\nn13173488\tstiff leathery-leaved fern of western North America having ovate fronds parted to the midrib\nn13173697\tchiefly lithophytic or epiphytic fern of North America and east Asia\nn13173882\tmat-forming lithophytic or terrestrial fern with creeping rootstocks and large pinnatifid fronds found throughout North America and Europe and Africa and east Asia\nn13174354\tepiphytic fern with large fronds; Taiwan and Philippines\nn13174670\tfern with long narrow strap-shaped leaves\nn13174823\tcommon epiphytic or sometimes terrestrial fern having pale yellow-green strap-shaped leaves; Florida to West Indies and Mexico and south to Uruguay\nn13175682\tgiant epiphytic or lithophytic fern; Asia to Polynesia and Australia\nn13176363\tepiphytic ferns with long rhizomes; tropical America\nn13176714\ttropical Africa to Australasia and Polynesia\nn13177048\ttropical American fern with brown scaly rhizomes cultivated for its large deeply lobed deep bluish-green fronds; sometimes placed in genus Polypodium\nn13177529\tany of various tropical ferns of the genus Platycerium having large flat lobed fronds often resembling the antlers of a stag\nn13177768\tfern of Peru and Bolivia\nn13177884\tcommonly cultivated fern of Australia and southeastern Asia and Polynesia\nn13178284\teast Asian fern having fronds shaped like tongues; sometimes placed in genus Cyclophorus\nn13178707\tsmall epiphytic fern of South America with tuberous swellings along rhizomes\nn13179056\tplant that affords shelter or food to ants that live in symbiotic relations with it\nn13179804\tepiphytic fern found in lowland forests of tropical America\nn13180534\tany of various chiefly rock-inhabiting ferns of the genus Asplenium\nn13180875\tspleenwort of Europe and Africa and Asia having pinnate fronds and yielding an astringent\nn13181055\ttropical Old World or Australian epiphytic fern frequently forming tufts in tree crotches\nn13181244\tcommon North American fern with polished black stripes\nn13181406\tfern of tropical America: from southern United States to West Indies and Mexico to Brazil\nn13181811\tferns having lanceolate fronds that root at the tip\nn13182164\ta small fern with slim green fronds; widely distributed in cool parts of northern hemisphere\nn13182338\ta spleenwort of eastern North America\nn13182799\ta spleenwort of eastern and southern United States\nn13182937\ta spleenwort of western Europe\nn13183056\tEurasian fern with simple lanceolate fronds\nn13183489\tsmall European fern with chaffy leathery fronds\nn13184394\ta fern thought to resemble a millipede\nn13185269\tfern with erect fronds of Europe and western North America; often cultivated for deer browse\nn13185658\tany fern of the genus Doodia having pinnate fronds with sharply dentate pinnae\nn13186388\ta fern of the genus Woodwardia having the sori in chainlike rows\nn13186546\tNorth American fern\nn13187367\ta showy tree fern of New Zealand and Australia having a crown of pinnated fronds with whitish undersides\nn13188096\tany fern of the genus Davallia; having scaly creeping rhizomes\nn13188268\teither of two ferns of the genus Davallia having a soft grey hairy rootstock\nn13188462\tfern of the Canary Islands and Madeira\nn13188767\tfeathery fern of tropical Asia and Malaysia\nn13190060\tfern of southeastern Asia; not hardy in cold temperate regions\nn13190747\tof Australia and Tasmania; often cultivated; hardy in cool climates\nn13191148\tAsiatic tree fern having dense matted hairs sometimes used as a styptic\nn13191620\tresembles Pteridium aquilinum; of Queensland, Australia\nn13191884\ta terrestrial tree fern of South America\nn13192625\tany of various ferns of the genera Dryopteris or Polystichum or Lastreopsis having somewhat shield-shaped coverings on the sori\nn13193143\tEuropean shield fern\nn13193269\tfern or northern Eurasia and North America having fragrant fronds\nn13193466\tNorth American fern with a blackish lustrous stipe\nn13193642\tany of various ferns of the genus Dryopteris\nn13193856\tfern of North America and Europe whose rhizomes and stalks yield an oleoresin used to expel tapeworms\nn13194036\tNorth American fern with evergreen fronds\nn13194212\ta fern of the genus Dryopteris\nn13194572\tmost widely grown fern of the genus Athyrium for its delicate foliage\nn13194758\ta lady fern with deeply cut leaf segments; found in the Rocky Mountains\nn13194918\tNorth American fern with narrow fronds on yellowish leafstalks\nn13195341\ttropical Old World fern having glossy fronds suggestive of holly; sometimes placed in genus Polystichum\nn13195761\tany fern of the genus Cystopteris characterized by a hooded indusium or bladderlike membrane covering the sori\nn13196003\tdelicate fern widely distributed in North America and European having thin pinnatifid fronds with brittle stems\nn13196234\tfern of rocky mountainous areas of hemisphere\nn13196369\tNorth American fern often bearing bulbils on the leaflets\nn13196738\tfern with elongate silvery outgrowths enclosing the developing spores\nn13197274\tbright blue-green fern widely distributed especially in damp acid woodlands of temperate northern hemisphere\nn13197507\tyellow-green fern of rocky areas of northern hemisphere\nn13198054\ttall fern of northern temperate regions having graceful arched fronds and sporophylls resembling ostrich plumes\nn13198482\ttropical American terrestrial fern with leathery lanceolate fronds; sometimes placed in genus Polybotrya\nn13198914\tbeautiful spreading fern of eastern North America and eastern Asia naturalized in western Europe; pinnately divided fronds show a slight tendency to fold when touched; pinnules enclose groups of sori in beadlike lobes\nn13199717\tNorth American evergreen fern having pinnate leaves and dense clusters of lance-shaped fronds\nn13199970\tany of various ferns of the genus Polystichum having fronds with texture and gloss like holly\nn13200193\tNorth American fern whose more or less evergreen leathery fronds are covered with pale brown chafflike scales\nn13200542\tNorth American fern\nn13200651\tEuropean shield fern cultivated in many varieties\nn13200986\twidely distributed fern of tropical southern hemisphere having leathery pinnatifid fronds\nn13201423\tJamaican fern having round buttonlike bulbils\nn13201566\tfern of tropical Asia having round buttonlike bulbils\nn13201969\tany fern of the genus Woodsia\nn13202125\ta common rock-inhabiting fern of northern temperate regions having rusty-brown stipes and lanceolate pinnate fronds\nn13202355\tslender fern of northern North America with shining chestnut-colored stipes and bipinnate fronds with usually distinct marginal sori\nn13202602\trock-inhabiting fern of Arctic and subarctic Europe to eastern Asia\nn13205058\ta sword fern with arching or drooping pinnate fronds; a popular houseplant\nn13205249\ttropical American fern cultivated for its finely divided greyish-green foliage; West Indies and southern Mexico to Peru and Brazil\nn13206178\tstout tropical swamp fern (especially tropical America) having large fronds with golden yellow sporangia covering the undersides\nn13206817\tany of various small to large terrestrial ferns of the genus Adiantum having delicate palmately branched fronds\nn13207094\tdelicate maidenhair fern with slender shining black leaf stalks; cosmopolitan\nn13207335\thardy palmately branched North American fern with divergent recurved branches borne on lustrous dark reddish stipes\nn13207572\tdelicate endemic Bermudian fern with creeping rootstock\nn13207736\ttropical American fern with broad pinnae; widely cultivated\nn13207923\tnamed for a country house in Barbados where it was discovered\nn13208302\tsmall short-lived fern of Central and South America\nn13208705\tany of various terrestrial ferns of the genus Cheilanthes; cosmopolitan in arid and semiarid temperate or tropical regions\nn13208965\tsoutheastern United States to northern Mexico and Jamaica\nn13209129\tsmall tufted fern of northwestern America\nn13209270\tsmall North American evergreen fern whose stipes and lower frond surfaces are densely wooly\nn13209460\tlip fern of Texas to Oklahoma and Colorado and Arizona and Mexico having tall erect tufted fronds\nn13209808\tfast-growing sturdy Japanese fern; cultivated for their attractive broad dark-green pinnate fronds\nn13210350\trock-inhabiting fern of northern North America growing in massive tufts and having fronds resembling parsley\nn13210597\tfern of Europe and Asia Minor having short slender rhizome and densely tufted bright green fronds resembling parsley\nn13211020\ttropical American fern with coarsely lobed to palmatifid fronds\nn13211790\tany of several small lithophytic ferns of tropical and warm temperate regions\nn13212025\tevergreen fern of California and Baja California\nn13212175\tvery short shallowly creeping North American fern usually growing on cliffs or walls and having dark glossy leaf axes\nn13212379\tcliff brake of California and Baja California having purple-brown leafstalks\nn13212559\tfern of New Zealand and Australia having trailing fronds with dark green buttonlike leaflets\nn13213066\tfern of southern tropical Africa having fronds with white undersides\nn13213397\ttropical American fern having fronds with light golden undersides\nn13213577\tfern of West Indies and South America having fronds with bright golden-yellow undersides\nn13214217\tcultivated in many varieties as houseplants\nn13214340\tAsiatic fern introduced in America\nn13214485\tfern of North Africa and Azores and Canary Islands\nn13215258\tlarge Australasian evergreen fern with an edible rhizome sometimes used as a vegetable by indigenous people\nn13215586\thighly variable species of very large primitive ferns of the Pacific tropical areas with high rainfall\nn13217005\tpantropical epiphytic or terrestrial whisk fern with usually dull yellow branches and minute leaves; America; Japan; Australia\nn13219422\tperennial rushlike flowerless herbs with jointed hollow stems and narrow toothlike leaves that spread by creeping rhizomes; tend to become weedy; common in northern hemisphere; some in Africa and South America\nn13219833\tof Eurasia and Greenland and North America\nn13219976\tEurasia; northern North America to Virginia\nn13220122\tevergreen erect horsetail with rough-edged stems; formerly used for scouring utensils\nn13220355\tscouring-rush horsetail widely distributed in wet or boggy areas of northern hemisphere\nn13220525\tEurasia except southern Russia; northern North America\nn13220663\tnorthern North America; Greenland; northern and central Europe\nn13221529\tprimitive evergreen moss-like plant with spores in club-shaped strobiles\nn13222877\ta variety of club moss\nn13222985\ta variety of club moss\nn13223090\tof northern Europe and America; resembling a miniature fir\nn13223588\ta variety of club moss\nn13223710\ta variety of club moss\nn13223843\tground pine thickly covered with bristly leaves; widely distributed in barren sandy or peaty moist coastal regions of eastern and southeastern United States\nn13224673\tany of numerous fern allies of the genus Selaginella\nn13224922\tspikemoss forming dense mats; eastern North America\nn13225244\tprostrate spikemoss; California\nn13225365\tdensely tufted fern ally of southwestern United States to Peru; curls up in a tight ball when dry and expands and grows under moist conditions\nn13225617\toccurs widely in Florida\nn13226320\tany of several spore-bearing aquatic or marsh plants having short rhizomes and leaves resembling quills; worldwide except Polynesia\nn13226871\tany club-shaped fungus of the genus Geoglossum\nn13228017\tfern of northeastern North America\nn13228536\tany of several tropical ferns of the genus Christella having thin brittle fronds\nn13229543\tcommon European mountain fern having fragrant lemon or balsam scented fronds\nn13229951\tslender shield fern of moist woods of eastern North America; sometimes placed in genus Dryopteris\nn13230190\tdelicate feathery shield fern of the eastern United States; sometimes placed in genus Thelypteris\nn13230662\tany fern of the genus Phegopteris having deeply cut triangular fronds\nn13230843\tbeech fern of North American woodlands having straw-colored stripes\nn13231078\tbeech fern of North America and Eurasia\nn13231678\tany of several fungi of the genus Armillaria that form brown stringy rhizomorphs and cause destructive rot of the roots of some trees such as apples or maples\nn13231919\tfungus with a brown cap and white gills and a membranous ring halfway up the stalk\nn13232106\ta large white mushroom that develops brown stains as it ages; gills are white; odor is spicy and aromatic; collected commercially for oriental cooking the Pacific Northwest\nn13232363\ta large fungus with viscid cap that dries and turns brown with age; gills are off-white\nn13232779\ta honey-colored edible mushroom commonly associated with the roots of trees in late summer and fall; do not eat raw\nn13233727\tany of numerous plants of the genus Asclepias having milky juice and pods that split open releasing seeds with downy tufts\nn13234114\ttall herb with leafless white waxy stems and whitish starlike flowers; southwestern United States\nn13234519\tmilkweed of the eastern United States with leaves resembling those of pokeweed\nn13234678\tdensely branching perennial of the eastern United States with white to crimson or purple flowers\nn13234857\tmilkweed of central North America; a threatened species\nn13235011\tperennial of eastern North America having pink-purple flowers\nn13235159\tmilkweed of southern North America having large starry purple and pink flowers\nn13235319\tmilkweed of southwestern United States and Mexico; poisonous to livestock\nn13235503\terect perennial of eastern and southern United States having showy orange flowers\nn13235766\tmilkweed of the eastern United States with narrow leaves in whorls and greenish-white flowers\nn13236100\trobust twining shrub having racemes of fragrant white or pink flowers with flat spreading terminal petals that trap nocturnal moths and hold them until dawn\nn13237188\tsucculent climber of southern Asia with umbels of pink and white star-shaped flowers\nn13237508\tdeciduous climber for arches and fences having ill-scented but interesting flowers and poisonous yellow fruits; cultivated for its dark shining foliage; southeastern Europe to Asia Minor\nn13238375\tany of various plants of the genus Stapelia having succulent leafless toothed stems resembling cacti and large foul-smelling (often star-shaped) flowers\nn13238654\tstapelia of Cape Province having mostly dark red-brown flowers with flat starlike corollas\nn13238988\tany of various evergreen climbing shrubs of the genus Stephanotis having fragrant waxy flowers\nn13239177\ttwining woody vine of Madagascar having thick dark waxy evergreen leaves and clusters of large fragrant waxy white flowers along the stems; widely cultivated in warm regions\nn13239736\ttwining vine with hairy foliage and dark purplish-brown flowers\nn13239921\ta plant spore formed by two similar sexual cells\nn13240362\tthe biblical tree in the Garden of Eden whose forbidden fruit was tasted by Adam and Eve\nn13252672\ta place where oranges are grown; a plantation of orange trees in warm climes or a greenhouse in cooler areas\nn13354021\tyour personal financial means\nn13555775\ta coarse term for defecation\nn13579829\tthe amount of wood in an area as measured in cords\nn13650447\ta unit of length equal to 3 feet; defined as 91.44 centimeters; originally taken to be the average length of a stride\nn13653902\tthe most extreme possible amount or value\nn13862407\tany of the various shape that leaves of plants can assume\nn13862552\ta figure whose sides are all equal\nn13862780\ta combination of points and lines and planes that form a visible palpable shape\nn13863020\ta figure formed by a set of straight lines or light rays meeting at a point\nn13863186\ta two-dimensional shape\nn13863473\ta three-dimensional shape\nn13863771\ta length (straight or curved) without breadth or thickness; the trace of a moving point\nn13864035\tanything with a round shape resembling a teardrop\nn13864153\ta shape that curves or bulges outward\nn13864965\ta shape that curves or bends inward\nn13865298\ta solid bounded by a cylindrical surface and two parallel planes (the bases)\nn13865483\ta shape that is curved and without sharp angles\nn13865904\ta plane figure with rounded sides curving inward at the top and intersecting at the bottom; conventionally used on playing cards and valentines\nn13866144\ta closed plane figure bounded by straight sides\nn13866626\ta polygon such that no side extended cuts any other side or vertex; it can be cut by a straight line in at most two points\nn13866827\ta polygon such that there is a straight line that cuts it in four or more points\nn13867005\ta polygon with one or more reentrant angles\nn13867492\tan ill-defined or arbitrary shape\nn13868248\ta curve (such as a circle) having no endpoints\nn13868371\ta closed curve that does not intersect itself\nn13868515\ta double curve resembling the letter S\nn13868944\tan undulating curve\nn13869045\tthe exterior curve of an arch\nn13869547\ta sharp curve or crook; a shape resembling a hook\nn13869788\ta curve that is tangent to each of a family of curves\nn13869896\ta bend or curve (especially in a coastline)\nn13871717\ta straight line connecting the center of a circle with two points on its perimeter (or the center of a sphere with two points on its surface)\nn13872592\ta shape whose base is a circle and whose sides taper up to a point\nn13872822\ta conical shape with a wider and a narrower opening at the two ends\nn13873361\ta plane figure that deviates from a square or circle due to elongation\nn13873502\tellipse in which the two axes are of equal length; a plane curve generated by one point moving at a constant distance from a fixed point\nn13873917\tsomething approximating the shape of a circle\nn13874073\ta circle dividing a sphere or other surface into two usually equal and symmetrical parts\nn13874558\tone of a series of rounded projections (or the notches between them) formed by curves along an edge (as the edge of a leaf or piece of cloth or the margin of a shell or a shriveled red blood cell observed in a hypertonic solution etc.)\nn13875392\ta toroidal shape\nn13875571\tanything with a round or oval shape (formed by a curve that is closed and does not intersect itself)\nn13875884\ta loop in a rope\nn13876561\ta curve that lies on the surface of a cylinder or cone and cuts the element at a constant angle\nn13877547\ta straight line joining the apex and a point on the base\nn13877667\ta straight line running the length of the cylinder\nn13878306\ta closed plane curve resulting from the intersection of a circular cone and a plane cutting completely through it\nn13879049\ta square-shaped object\nn13879320\ta three-sided polygon\nn13879816\ta triangle whose interior angles are all acute\nn13880199\ta triangle with two equal sides\nn13880415\ta triangle that contains an obtuse interior angle\nn13880551\ta triangle with one right angle\nn13880704\ta triangle with no two sides of equal length\nn13880994\t(mathematics) one of a set of parallel geometric figures (parallel lines or planes)\nn13881512\ta quadrilateral with two parallel sides\nn13881644\ta plane figure with 5 or more points; often used as an emblem\nn13882201\ta five-sided polygon\nn13882276\ta six-sided polygon\nn13882487\ta seven-sided polygon\nn13882563\tan eight-sided polygon\nn13882639\ta nine-sided polygon\nn13882713\ta polygon with 10 sides and 10 angles\nn13882961\ta parallelogram with four equal sides; an oblique-angled equilateral parallelogram\nn13883603\ta figure on the surface of a sphere bounded by arcs of 3 or more great circles\nn13883763\ta spherical polygon formed by the arcs of 3 great circles\nn13884261\ta polyhedron any plane section of which is a convex polygon\nn13884384\ta polyhedron some of whose plane sections are concave polygons\nn13884930\ta rectangular parallelepiped\nn13885011\ta prism whose bases are quadrangles\nn13886260\tthe shape of a bell\nn13888491\tthe angular separation between two objects as perceived by an observer\nn13889066\tthe angular distance of a point in an orbit past the point of periapsis measured in degrees\nn13889331\tan angle formed at the intersection of the arcs of two great circles\nn13891547\tthe angle between a refracted ray and a line perpendicular to the surface between the two media at the point of refraction\nn13891937\tan angle less than 90 degrees but more than 0 degrees\nn13893786\ta long narrow furrow cut either by a natural process (such as erosion) or by a tool (as e.g. a groove in a phonograph record)\nn13894154\ta groove or furrow (especially one in soft earth caused by wheels)\nn13894434\tsomething that bulges out or is protuberant or projects from its surroundings\nn13895262\ta part that bulges deeply\nn13896100\tsomething curved in shape\nn13896217\tany shape resembling the curved shape of the moon in its first or last quarters\nn13897198\ta surface whose plane sections are all ellipses or circles\nn13897528\tthe side of a right triangle opposite the right angle\nn13897996\tequality of distribution\nn13898207\ta symmetrical arrangement of the parts of a thing\nn13898315\tbalance among the parts of something\nn13898645\ta shape that is generated by rotating an ellipse around one of its axes\nn13899735\ta small sphere\nn13900287\tthe doughnut-shaped object enclosed by a torus\nn13900422\tanything that approximates the shape of a column or tower\nn13901211\ta bulging cylindrical shape; hollow with flat ends\nn13901321\ta hollow cylindrical shape\nn13901423\ta small sphere\nn13901490\ta small round soft mass (as of chewed food)\nn13901858\ta drop of dew\nn13902048\tany long raised strip\nn13902336\tthe shape of a raised edge of a more or less circular object\nn13902793\ta convex shape that narrows toward a point\nn13903079\ta line determining the limits of an area\nn13905121\t(anatomy) a notch or small hollow\nn13905275\ta V-shaped indentation\nn13905792\ta slight depression in the smoothness of a surface\nn13906484\tthe lines that form patterns on the skin (especially on the fingertips and the palms of the hands and the soles of the feet)\nn13906669\ta facial wrinkle associated with frowning\nn13906767\ta crease on the palm; its length is said by palmists to indicate how long you will live\nn13906936\ta crease on the palm; palmists say it indicates your emotional nature\nn13907272\ta long narrow depression in a surface\nn13908201\ta split or indentation in something (as the palate or chin)\nn13908580\ta line generated by a point on one figure rolling around a second figure\nn13911045\ta connecting point at which several lines come together\nn13912260\ta figure that branches from a single root\nn13912540\ta tree diagram showing a reconstruction of the transmission of manuscripts of a literary work\nn13914141\t(biology) a branching or armlike part of an animal\nn13914265\tthe region of the angle formed by the junction of two branches\nn13914608\ta three-dimensional shape with six square or rectangular sides\nn13915023\tan egg-shaped object\nn13915113\tany polyhedron having four plane faces\nn13915209\tany polyhedron having five plane faces\nn13915305\tany polyhedron having six plane faces\nn13915999\tany one of five solids whose faces are congruent regular polygons and whose polyhedral angles are all congruent\nn13916363\tthe space enclosed by three or more planes that intersect in a vertex\nn13916721\ta hexahedron with six equal squares as faces\nn13917690\ta frustum formed from a pyramid\nn13917785\ta frustum formed from a cone\nn13918274\tany projection that resembles the tail of an animal\nn13918387\tany long thin projection that is transient\nn13918717\ta polyhedron whose faces are trapeziums\nn13919547\tany shape that is triangular in cross section\nn13919919\ta projection or ridge that suggests a keel\nn13926786\ta particular situation\nn14131950\tviral diseases causing eruptions of the skin or mucous membrane\nn14175579\ta sexually transmitted infection caused by bacteria of the genus Chlamydia\nn14564779\ta difficult or awkward situation\nn14582716\ta substance needed only in small amounts for normal body function (e.g., vitamins or minerals)\nn14583400\ta semiliquid mass of partially digested food that passes from the stomach through the pyloric sphincter into the duodenum\nn14585392\tpollen of the ragweed plant is a common allergen\nn14592309\ta fine cloth made from pineapple fibers\nn14603798\ta tear gas that is stronger than CN gas but wears off faster; can be deployed by grenades or cluster bombs; can cause skin burns and fatal pulmonary edema\nn14633206\tan abundant nonmetallic tetravalent element occurring in three allotropic forms: amorphous carbon and graphite and diamond; occurs in all organic compounds\nn14685296\ta carbonaceous material obtained by heating wood or other organic matter in the absence of air\nn14696793\tmaterial consisting of the aggregate of minerals like those making up the Earth's crust\nn14698884\trock fragments and pebbles\nn14714645\ta potent carcinogen from the fungus Aspergillus; can be produced and stored for use as a bioweapon\nn14720833\ta potent form of vitamin E obtained from germ oils or by synthesis\nn14765422\tthe pelt of a leopard\nn14785065\tbuilding material consisting of bricks laid with mortar between them\nn14786943\tused to wrap around pipes or boilers or laid in attics to prevent loss of heat\nn14804958\ta cement that hardens under water; made by heating limestone and clay in a kiln and pulverizing the result\nn14810561\ta B-complex vitamin that is a constituent of lecithin; essential in the metabolism of fat\nn14820180\ta strong hard building material composed of sand and gravel and cement and water\nn14821852\tglass fibers spun and massed into bundles resembling wool\nn14844693\tthe part of the earth's surface consisting of humus and disintegrated rock\nn14853210\ta powerful chemical explosive that produces gas at a very high rate\nn14858292\trubbish carelessly dropped or left about (especially in public places)\nn14867545\tground dried fish used as fertilizer and as feed for domestic livestock\nn14891255\ta mixture used by Byzantine Greeks that was often shot at adversaries; catches fire when wetted\nn14899328\t(bacteriology) a nutrient substance (solid or liquid) that is used to cultivate micro-organisms\nn14900184\tany culture medium that uses agar as the gelling agent\nn14900342\ta culture medium containing whole blood as the nutrient\nn14908027\ta tile shaped so as to cover the hip of a hip roof\nn14909584\ta red transparent variety of zircon used as a gemstone\nn14914945\tthe anion OH having one oxygen and one hydrogen atom\nn14915184\twater frozen in the solid state\nn14919819\tan optically inactive alcohol that is a component of the vitamin B complex\nn14938389\ta floor covering\nn14941787\tmineral water containing lithium salts\nn14942411\ta permanent magnet consisting of magnetite that possess polarity and has the power to attract as well as to be attracted magnetically\nn14973585\ta vitamin of the vitamin B complex that performs an important role in the oxidation of fats and carbohydrates and certain amino acids; occurs in many foods\nn14974264\ta material made of cellulose pulp derived mainly from wood or rags or certain grasses\nn14975598\tpaper made from the papyrus plant by cutting it in strips and pressing it flat; used by ancient Egyptians and Greeks and Romans\nn14976759\ta roofing tile with a S-shape; laid so that curves overlap\nn14976871\ta black bituminous material used for paving roads or other areas; usually spread over crushed rock\nn14977188\ta paving material of tar and broken stone; mixed in a factory and shaped during paving\nn14977504\tmaterial used to pave an area\nn14992287\ta mixture of lime or gypsum with sand and water; hardens into a smooth solid; used to cover walls and ceilings\nn14993378\ta gas that is poisonous to breath or contact; used in chemical warfare\nn15005577\ta decorative tile that is bent in cross section; used to cover the ridge of a roof\nn15006012\ta coarse plaster for the surface of external walls\nn15019030\ta loose material consisting of grains of rock or coral\nn15048888\tpowder (containing gypsum plaster and glue) that when mixed with water forms a plastic paste used to fill cracks and holes in plaster\nn15060326\ta substance similar to stucco but exclusively applied to masonry walls\nn15060688\tbuilding material consisting of interwoven rods and twigs covered with clay\nn15062057\ta plaster now made mostly from Portland cement and sand and lime; applied while soft to cover exterior walls or surfaces\nn15067877\ta gas that makes the eyes fill with tears but does not damage them; used in dispersing crowds\nn15075141\ta soft thin absorbent paper for use in toilets\nn15086247\tthe seed of flax used as a source of oil\nn15089258\tany of a group of organic substances essential in small quantities to normal metabolism\nn15089472\tany vitamin that is soluble in fats\nn15089645\tany vitamin that is soluble in water\nn15089803\tany of several fat-soluble vitamins essential for normal vision; prevents night blindness or inflammation or dryness of the eyes\nn15090065\tan unsaturated alcohol that occurs in marine fish-liver oils and is synthesized biologically from carotene\nn15090238\ta viscous alcohol that is less active in mammals than is vitamin A1\nn15090742\toriginally thought to be a single vitamin but now separated into several B vitamins\nn15091129\ta B vitamin that prevents beriberi; maintains appetite and growth\nn15091304\ta B vitamin that is used to treat pernicious anemia\nn15091473\ta B vitamin that prevents skin lesions and weight loss\nn15091669\ta B vitamin that is essential for metabolism of amino acids and starch\nn15091846\ta B vitamin that is essential for cell growth and reproduction\nn15092059\ta B vitamin essential for the normal function of the nervous system and the gastrointestinal tract\nn15092227\ta fat-soluble vitamin that prevents rickets\nn15092409\ta fat-soluble vitamin that is essential for normal reproduction; an important antioxidant that neutralizes free radicals in the body\nn15092650\ta B vitamin that aids in body growth\nn15092751\ta fat-soluble vitamin that helps in the clotting of blood\nn15092942\ta form of vitamin K\nn15093049\ta form of vitamin K\nn15093137\ta vitamin that maintains the resistance of cell and capillary walls to permeation\nn15093298\ta vitamin found in fresh fruits (especially citrus fruits) and vegetables; prevents scurvy\nn15102359\tplanks collectively; a quantity of planks\nn15102455\ta cheap hard material made from wood chips that are pressed together and bound with synthetic resin\nn15102894\ta hole in a board where a knot came out\n"
  },
  {
    "path": "timm/data/_info/imagenet_synset_to_lemma.txt",
    "content": "n00004475\torganism, being\nn00005787\tbenthos\nn00006024\theterotroph\nn00006484\tcell\nn00007846\tperson, individual, someone, somebody, mortal, soul\nn00015388\tanimal, animate being, beast, brute, creature, fauna\nn00017222\tplant, flora, plant life\nn00021265\tfood, nutrient\nn00021939\tartifact, artefact\nn00120010\thop\nn00141669\tcheck-in\nn00288000\tdressage\nn00288190\tcurvet, vaulting\nn00288384\tpiaffe\nn00324978\tfunambulism, tightrope walking\nn00326094\trock climbing\nn00433458\tcontact sport\nn00433661\toutdoor sport, field sport\nn00433802\tgymnastics, gymnastic exercise\nn00434075\tacrobatics, tumbling\nn00439826\ttrack and field\nn00440039\ttrack, running\nn00440218\tjumping\nn00440382\tbroad jump, long jump\nn00440509\thigh jump\nn00440643\tFosbury flop\nn00440747\tskiing\nn00440941\tcross-country skiing\nn00441073\tski jumping\nn00441824\twater sport, aquatics\nn00442115\tswimming, swim\nn00442437\tbathe\nn00442847\tdip, plunge\nn00442981\tdive, diving\nn00443231\tfloating, natation\nn00443375\tdead-man's float, prone float\nn00443517\tbelly flop, belly flopper, belly whop, belly whopper\nn00443692\tcliff diving\nn00443803\tflip\nn00443917\tgainer, full gainer\nn00444142\thalf gainer\nn00444340\tjackknife\nn00444490\tswan dive, swallow dive\nn00444651\tskin diving, skin-dive\nn00444846\tscuba diving\nn00444937\tsnorkeling, snorkel diving\nn00445055\tsurfing, surfboarding, surfriding\nn00445226\twater-skiing\nn00445351\trowing, row\nn00445685\tsculling\nn00445802\tboxing, pugilism, fisticuffs\nn00446311\tprofessional boxing\nn00446411\tin-fighting\nn00446493\tfight\nn00446632\trope-a-dope\nn00446804\tspar, sparring\nn00446980\tarchery\nn00447073\tsledding\nn00447221\ttobogganing\nn00447361\tluging\nn00447463\tbobsledding\nn00447540\twrestling, rassling, grappling\nn00447957\tGreco-Roman wrestling\nn00448126\tprofessional wrestling\nn00448232\tsumo\nn00448466\tskating\nn00448640\tice skating\nn00448748\tfigure skating\nn00448872\trollerblading\nn00448958\troller skating\nn00449054\tskateboarding\nn00449168\tspeed skating\nn00449295\tracing\nn00449517\tauto racing, car racing\nn00449695\tboat racing\nn00449796\thydroplane racing\nn00449892\tcamel racing\nn00449977\tgreyhound racing\nn00450070\thorse racing\nn00450335\triding, horseback riding, equitation\nn00450700\tequestrian sport\nn00450866\tpony-trekking\nn00450998\tshowjumping, stadium jumping\nn00451186\tcross-country riding, cross-country jumping\nn00451370\tcycling\nn00451563\tbicycling\nn00451635\tmotorcycling\nn00451768\tdune cycling\nn00451866\tblood sport\nn00452034\tbullfighting, tauromachy\nn00452152\tcockfighting\nn00452293\thunt, hunting\nn00452734\tbattue\nn00452864\tbeagling\nn00453126\tcoursing\nn00453313\tdeer hunting, deer hunt\nn00453396\tducking, duck hunting\nn00453478\tfox hunting, foxhunt\nn00453631\tpigsticking\nn00453935\tfishing, sportfishing\nn00454237\tangling\nn00454395\tfly-fishing\nn00454493\ttroll, trolling\nn00454624\tcasting, cast\nn00454855\tbait casting\nn00454983\tfly casting\nn00455076\tovercast\nn00455173\tsurf casting, surf fishing\nn00456465\tday game\nn00463246\tathletic game\nn00463543\tice hockey, hockey, hockey game\nn00464277\ttetherball\nn00464478\twater polo\nn00464651\toutdoor game\nn00464894\tgolf, golf game\nn00466273\tprofessional golf\nn00466377\tround of golf, round\nn00466524\tmedal play, stroke play\nn00466630\tmatch play\nn00466712\tminiature golf\nn00466880\tcroquet\nn00467320\tquoits, horseshoes\nn00467536\tshuffleboard, shovelboard\nn00467719\tfield game\nn00467995\tfield hockey, hockey\nn00468299\tshinny, shinney\nn00468480\tfootball, football game\nn00469651\tAmerican football, American football game\nn00470554\tprofessional football\nn00470682\ttouch football\nn00470830\thurling\nn00470966\trugby, rugby football, rugger\nn00471437\tball game, ballgame\nn00471613\tbaseball, baseball game\nn00474568\tball\nn00474657\tprofessional baseball\nn00474769\thardball\nn00474881\tperfect game\nn00475014\tno-hit game, no-hitter\nn00475142\tone-hitter, 1-hitter\nn00475273\ttwo-hitter, 2-hitter\nn00475403\tthree-hitter, 3-hitter\nn00475535\tfour-hitter, 4-hitter\nn00475661\tfive-hitter, 5-hitter\nn00475787\tsoftball, softball game\nn00476140\trounders\nn00476235\tstickball, stickball game\nn00476389\tcricket\nn00477392\tlacrosse\nn00477639\tpolo\nn00477827\tpushball\nn00478262\tsoccer, association football\nn00479076\tcourt game\nn00479440\thandball\nn00479616\tracquetball\nn00479734\tfives\nn00479887\tsquash, squash racquets, squash rackets\nn00480211\tvolleyball, volleyball game\nn00480366\tjai alai, pelota\nn00480508\tbadminton\nn00480885\tbattledore, battledore and shuttlecock\nn00480993\tbasketball, basketball game, hoops\nn00481803\tprofessional basketball\nn00481938\tdeck tennis\nn00482122\tnetball\nn00482298\ttennis, lawn tennis\nn00483205\tprofessional tennis\nn00483313\tsingles\nn00483409\tsingles\nn00483508\tdoubles\nn00483605\tdoubles\nn00483705\troyal tennis, real tennis, court tennis\nn00483848\tpallone\nn00523513\tsport, athletics\nn00812526\tclasp, clench, clutch, clutches, grasp, grip, hold\nn00825773\tjudo\nn00887544\tteam sport\nn01035504\tLast Supper, Lord's Supper\nn01035667\tSeder, Passover supper\nn01055165\tcamping, encampment, bivouacking, tenting\nn01314388\tpest\nn01314663\tcritter\nn01314781\tcreepy-crawly\nn01314910\tdarter\nn01315213\tpeeper\nn01315330\thomeotherm, homoiotherm, homotherm\nn01315581\tpoikilotherm, ectotherm\nn01315805\trange animal\nn01316422\tscavenger\nn01316579\tbottom-feeder, bottom-dweller\nn01316734\tbottom-feeder\nn01316949\twork animal\nn01317089\tbeast of burden, jument\nn01317294\tdraft animal\nn01317391\tpack animal, sumpter\nn01317541\tdomestic animal, domesticated animal\nn01317813\tfeeder\nn01317916\tfeeder\nn01318053\tstocker\nn01318279\thatchling\nn01318381\thead\nn01318478\tmigrator\nn01318660\tmolter, moulter\nn01318894\tpet\nn01319001\tstayer\nn01319187\tstunt\nn01319467\tmarine animal, marine creature, sea animal, sea creature\nn01319685\tby-catch, bycatch\nn01320872\tfemale\nn01321123\then\nn01321230\tmale\nn01321456\tadult\nn01321579\tyoung, offspring\nn01321770\torphan\nn01321854\tyoung mammal\nn01322221\tbaby\nn01322343\tpup, whelp\nn01322508\twolf pup, wolf cub\nn01322604\tpuppy\nn01322685\tcub, young carnivore\nn01322898\tlion cub\nn01322983\tbear cub\nn01323068\ttiger cub\nn01323155\tkit\nn01323261\tsuckling\nn01323355\tsire\nn01323493\tdam\nn01323599\tthoroughbred, purebred, pureblood\nn01323781\tgiant\nn01324305\tmutant\nn01324431\tcarnivore\nn01324610\therbivore\nn01324799\tinsectivore\nn01324916\tacrodont\nn01325060\tpleurodont\nn01326291\tmicroorganism, micro-organism\nn01327909\tmonohybrid\nn01329186\tarbovirus, arborvirus\nn01330126\tadenovirus\nn01330497\tarenavirus\nn01332181\tMarburg virus\nn01333082\tArenaviridae\nn01333483\tvesiculovirus\nn01333610\tReoviridae\nn01334217\tvariola major, variola major virus\nn01334690\tviroid, virusoid\nn01335218\tcoliphage\nn01337191\tparamyxovirus\nn01337734\tpoliovirus\nn01338685\therpes, herpes virus\nn01339083\therpes simplex 1, HS1, HSV-1, HSV-I\nn01339336\therpes zoster, herpes zoster virus\nn01339471\therpes varicella zoster, herpes varicella zoster virus\nn01339801\tcytomegalovirus, CMV\nn01340014\tvaricella zoster virus\nn01340522\tpolyoma, polyoma virus\nn01340785\tlyssavirus\nn01340935\treovirus\nn01341090\trotavirus\nn01342269\tmoneran, moneron\nn01347583\tarchaebacteria, archaebacterium, archaeobacteria, archeobacteria\nn01349735\tbacteroid\nn01350226\tBacillus anthracis, anthrax bacillus\nn01350701\tYersinia pestis\nn01351170\tBrucella\nn01351315\tspirillum, spirilla\nn01357328\tbotulinus, botulinum, Clostridium botulinum\nn01357507\tclostridium perfringens\nn01358572\tcyanobacteria, blue-green algae\nn01359762\ttrichodesmium\nn01362336\tnitric bacteria, nitrobacteria\nn01363719\tspirillum\nn01365474\tFrancisella, genus Francisella\nn01365885\tgonococcus, Neisseria gonorrhoeae\nn01366700\tCorynebacterium diphtheriae, C. diphtheriae, Klebs-Loeffler bacillus\nn01367772\tenteric bacteria, enterobacteria, enterics, entric\nn01368672\tklebsiella\nn01369358\tSalmonella typhimurium\nn01369484\ttyphoid bacillus, Salmonella typhosa, Salmonella typhi\nn01374703\tnitrate bacterium, nitric bacterium\nn01374846\tnitrite bacterium, nitrous bacterium\nn01375204\tactinomycete\nn01376237\tstreptomyces\nn01376437\tStreptomyces erythreus\nn01376543\tStreptomyces griseus\nn01377278\ttubercle bacillus, Mycobacterium tuberculosis\nn01377510\tpus-forming bacteria\nn01377694\tstreptobacillus\nn01378545\tmyxobacteria, myxobacterium, myxobacter, gliding bacteria, slime bacteria\nn01379389\tstaphylococcus, staphylococci, staph\nn01380610\tdiplococcus\nn01380754\tpneumococcus, Diplococcus pneumoniae\nn01381044\tstreptococcus, streptococci, strep\nn01382033\tspirochete, spirochaete\nn01384084\tplanktonic algae\nn01384164\tzooplankton\nn01384687\tparasite\nn01385017\tendoparasite, entoparasite, entozoan, entozoon, endozoan\nn01385330\tectoparasite, ectozoan, ectozoon, epizoan, epizoon\nn01386007\tpathogen\nn01386182\tcommensal\nn01386354\tmyrmecophile\nn01387065\tprotoctist\nn01389507\tprotozoan, protozoon\nn01390123\tsarcodinian, sarcodine\nn01390763\theliozoan\nn01392275\tendameba\nn01392380\tameba, amoeba\nn01393486\tglobigerina\nn01394040\ttestacean\nn01394492\tarcella\nn01394771\tdifflugia\nn01395254\tciliate, ciliated protozoan, ciliophoran\nn01396048\tparamecium, paramecia\nn01396617\tstentor\nn01397114\talga, algae\nn01397690\tarame\nn01397871\tseagrass\nn01400247\tgolden algae\nn01400391\tyellow-green algae\nn01402600\tbrown algae\nn01403457\tkelp\nn01404365\tfucoid, fucoid algae\nn01404495\tfucoid\nn01405007\tfucus\nn01405616\tbladderwrack, Ascophyllum nodosum\nn01407798\tgreen algae, chlorophyte\nn01410457\tpond scum\nn01411450\tchlorella\nn01412694\tstonewort\nn01413457\tdesmid\nn01414216\tsea moss\nn01415626\teukaryote, eucaryote\nn01415920\tprokaryote, procaryote\nn01416213\tzooid\nn01418498\tLeishmania, genus Leishmania\nn01418620\tzoomastigote, zooflagellate\nn01419332\tpolymastigote\nn01419573\tcostia, Costia necatrix\nn01419888\tgiardia\nn01421333\tcryptomonad, cryptophyte\nn01421807\tsporozoan\nn01422185\tsporozoite\nn01422335\ttrophozoite\nn01422450\tmerozoite\nn01423302\tcoccidium, eimeria\nn01423617\tgregarine\nn01424420\tplasmodium, Plasmodium vivax, malaria parasite\nn01425223\tleucocytozoan, leucocytozoon\nn01427399\tmicrosporidian\nn01429172\tOstariophysi, order Ostariophysi\nn01438208\tcypriniform fish\nn01438581\tloach\nn01439121\tcyprinid, cyprinid fish\nn01439514\tcarp\nn01439808\tdomestic carp, Cyprinus carpio\nn01440160\tleather carp\nn01440242\tmirror carp\nn01440467\tEuropean bream, Abramis brama\nn01440764\ttench, Tinca tinca\nn01441117\tdace, Leuciscus leuciscus\nn01441272\tchub, Leuciscus cephalus\nn01441425\tshiner\nn01441910\tcommon shiner, silversides, Notropis cornutus\nn01442450\troach, Rutilus rutilus\nn01442710\trudd, Scardinius erythrophthalmus\nn01442972\tminnow, Phoxinus phoxinus\nn01443243\tgudgeon, Gobio gobio\nn01443537\tgoldfish, Carassius auratus\nn01443831\tcrucian carp, Carassius carassius, Carassius vulgaris\nn01444339\telectric eel, Electrophorus electric\nn01444783\tcatostomid\nn01445429\tbuffalo fish, buffalofish\nn01445593\tblack buffalo, Ictiobus niger\nn01445857\thog sucker, hog molly, Hypentelium nigricans\nn01446152\tredhorse, redhorse sucker\nn01446589\tcyprinodont\nn01446760\tkillifish\nn01447139\tmummichog, Fundulus heteroclitus\nn01447331\tstriped killifish, mayfish, may fish, Fundulus majalis\nn01447658\trivulus\nn01447946\tflagfish, American flagfish, Jordanella floridae\nn01448291\tswordtail, helleri, topminnow, Xyphophorus helleri\nn01448594\tguppy, rainbow fish, Lebistes reticulatus\nn01448951\ttopminnow, poeciliid fish, poeciliid, live-bearer\nn01449374\tmosquitofish, Gambusia affinis\nn01449712\tplaty, Platypoecilus maculatus\nn01449980\tmollie, molly\nn01450661\tsquirrelfish\nn01450950\treef squirrelfish, Holocentrus coruscus\nn01451115\tdeepwater squirrelfish, Holocentrus bullisi\nn01451295\tHolocentrus ascensionis\nn01451426\tsoldierfish, soldier-fish\nn01451863\tanomalops, flashlight fish\nn01452345\tflashlight fish, Photoblepharon palpebratus\nn01453087\tJohn Dory, Zeus faber\nn01453475\tboarfish, Capros aper\nn01453742\tboarfish\nn01454545\tcornetfish\nn01454856\tstickleback, prickleback\nn01455317\tthree-spined stickleback, Gasterosteus aculeatus\nn01455461\tten-spined stickleback, Gasterosteus pungitius\nn01455778\tpipefish, needlefish\nn01456137\tdwarf pipefish, Syngnathus hildebrandi\nn01456454\tdeepwater pipefish, Cosmocampus profundus\nn01456756\tseahorse, sea horse\nn01457082\tsnipefish, bellows fish\nn01457407\tshrimpfish, shrimp-fish\nn01457852\ttrumpetfish, Aulostomus maculatus\nn01458746\tpellicle\nn01458842\tembryo, conceptus, fertilized egg\nn01459791\tfetus, foetus\nn01460303\tabortus\nn01461315\tspawn\nn01461646\tblastula, blastosphere\nn01462042\tblastocyst, blastodermic vessicle\nn01462544\tgastrula\nn01462803\tmorula\nn01464844\tyolk, vitellus\nn01466257\tchordate\nn01467336\tcephalochordate\nn01467804\tlancelet, amphioxus\nn01468238\ttunicate, urochordate, urochord\nn01468712\tascidian\nn01469103\tsea squirt\nn01469723\tsalp, salpa\nn01470145\tdoliolum\nn01470479\tlarvacean\nn01470733\tappendicularia\nn01470895\tascidian tadpole\nn01471682\tvertebrate, craniate\nn01472303\tAmniota\nn01472502\tamniote\nn01473806\taquatic vertebrate\nn01474283\tjawless vertebrate, jawless fish, agnathan\nn01474864\tostracoderm\nn01475232\theterostracan\nn01475940\tanaspid\nn01476418\tconodont\nn01477080\tcyclostome\nn01477525\tlamprey, lamprey eel, lamper eel\nn01477875\tsea lamprey, Petromyzon marinus\nn01478511\thagfish, hag, slime eels\nn01478969\tMyxine glutinosa\nn01479213\teptatretus\nn01479820\tgnathostome\nn01480106\tplacoderm\nn01480516\tcartilaginous fish, chondrichthian\nn01480880\tholocephalan, holocephalian\nn01481331\tchimaera\nn01481498\trabbitfish, Chimaera monstrosa\nn01482071\telasmobranch, selachian\nn01482330\tshark\nn01483021\tcow shark, six-gilled shark, Hexanchus griseus\nn01483522\tmackerel shark\nn01483830\tporbeagle, Lamna nasus\nn01484097\tmako, mako shark\nn01484285\tshortfin mako, Isurus oxyrhincus\nn01484447\tlongfin mako, Isurus paucus\nn01484562\tbonito shark, blue pointed, Isurus glaucus\nn01484850\tgreat white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias\nn01485479\tbasking shark, Cetorhinus maximus\nn01486010\tthresher, thrasher, thresher shark, fox shark, Alopius vulpinus\nn01486540\tcarpet shark, Orectolobus barbatus\nn01486838\tnurse shark, Ginglymostoma cirratum\nn01487506\tsand tiger, sand shark, Carcharias taurus, Odontaspis taurus\nn01488038\twhale shark, Rhincodon typus\nn01488918\trequiem shark\nn01489501\tbull shark, cub shark, Carcharhinus leucas\nn01489709\tsandbar shark, Carcharhinus plumbeus\nn01489920\tblacktip shark, sandbar shark, Carcharhinus limbatus\nn01490112\twhitetip shark, oceanic whitetip shark, white-tipped shark, Carcharinus longimanus\nn01490360\tdusky shark, Carcharhinus obscurus\nn01490670\tlemon shark, Negaprion brevirostris\nn01491006\tblue shark, great blue shark, Prionace glauca\nn01491361\ttiger shark, Galeocerdo cuvieri\nn01491661\tsoupfin shark, soupfin, soup-fin, Galeorhinus zyopterus\nn01491874\tdogfish\nn01492357\tsmooth dogfish\nn01492569\tsmoothhound, smoothhound shark, Mustelus mustelus\nn01492708\tAmerican smooth dogfish, Mustelus canis\nn01492860\tFlorida smoothhound, Mustelus norrisi\nn01493146\twhitetip shark, reef whitetip shark, Triaenodon obseus\nn01493541\tspiny dogfish\nn01493829\tAtlantic spiny dogfish, Squalus acanthias\nn01494041\tPacific spiny dogfish, Squalus suckleyi\nn01494475\thammerhead, hammerhead shark\nn01494757\tsmooth hammerhead, Sphyrna zygaena\nn01494882\tsmalleye hammerhead, Sphyrna tudes\nn01495006\tshovelhead, bonnethead, bonnet shark, Sphyrna tiburo\nn01495493\tangel shark, angelfish, Squatina squatina, monkfish\nn01495701\tray\nn01496331\telectric ray, crampfish, numbfish, torpedo\nn01497118\tsawfish\nn01497413\tsmalltooth sawfish, Pristis pectinatus\nn01497738\tguitarfish\nn01498041\tstingray\nn01498406\troughtail stingray, Dasyatis centroura\nn01498699\tbutterfly ray\nn01498989\teagle ray\nn01499396\tspotted eagle ray, spotted ray, Aetobatus narinari\nn01499732\tcownose ray, cow-nosed ray, Rhinoptera bonasus\nn01500091\tmanta, manta ray, devilfish\nn01500476\tAtlantic manta, Manta birostris\nn01500854\tdevil ray, Mobula hypostoma\nn01501160\tskate\nn01501641\tgrey skate, gray skate, Raja batis\nn01501777\tlittle skate, Raja erinacea\nn01501948\tthorny skate, Raja radiata\nn01502101\tbarndoor skate, Raja laevis\nn01503061\tbird\nn01503976\tdickeybird, dickey-bird, dickybird, dicky-bird\nn01504179\tfledgling, fledgeling\nn01504344\tnestling, baby bird\nn01514668\tcock\nn01514752\tgamecock, fighting cock\nn01514859\then\nn01514926\tnester\nn01515078\tnight bird\nn01515217\tnight raven\nn01515303\tbird of passage\nn01516212\tarchaeopteryx, archeopteryx, Archaeopteryx lithographica\nn01517389\tarchaeornis\nn01517565\tratite, ratite bird, flightless bird\nn01517966\tcarinate, carinate bird, flying bird\nn01518878\tostrich, Struthio camelus\nn01519563\tcassowary\nn01519873\temu, Dromaius novaehollandiae, Emu novaehollandiae\nn01520576\tkiwi, apteryx\nn01521399\trhea, Rhea americana\nn01521756\trhea, nandu, Pterocnemia pennata\nn01522450\telephant bird, aepyornis\nn01523105\tmoa\nn01524359\tpasserine, passeriform bird\nn01524761\tnonpasserine bird\nn01525720\toscine, oscine bird\nn01526521\tsongbird, songster\nn01526766\thoney eater, honeysucker\nn01527194\taccentor\nn01527347\thedge sparrow, sparrow, dunnock, Prunella modularis\nn01527617\tlark\nn01527917\tskylark, Alauda arvensis\nn01528396\twagtail\nn01528654\tpipit, titlark, lark\nn01528845\tmeadow pipit, Anthus pratensis\nn01529672\tfinch\nn01530439\tchaffinch, Fringilla coelebs\nn01530575\tbrambling, Fringilla montifringilla\nn01531178\tgoldfinch, Carduelis carduelis\nn01531344\tlinnet, lintwhite, Carduelis cannabina\nn01531512\tsiskin, Carduelis spinus\nn01531639\tred siskin, Carduelis cucullata\nn01531811\tredpoll, Carduelis flammea\nn01531971\tredpoll, Carduelis hornemanni\nn01532325\tNew World goldfinch, goldfinch, yellowbird, Spinus tristis\nn01532511\tpine siskin, pine finch, Spinus pinus\nn01532829\thouse finch, linnet, Carpodacus mexicanus\nn01533000\tpurple finch, Carpodacus purpureus\nn01533339\tcanary, canary bird\nn01533481\tcommon canary, Serinus canaria\nn01533651\tserin\nn01533893\tcrossbill, Loxia curvirostra\nn01534155\tbullfinch, Pyrrhula pyrrhula\nn01534433\tjunco, snowbird\nn01534582\tdark-eyed junco, slate-colored junco, Junco hyemalis\nn01534762\tNew World sparrow\nn01535140\tvesper sparrow, grass finch, Pooecetes gramineus\nn01535469\twhite-throated sparrow, whitethroat, Zonotrichia albicollis\nn01535690\twhite-crowned sparrow, Zonotrichia leucophrys\nn01536035\tchipping sparrow, Spizella passerina\nn01536186\tfield sparrow, Spizella pusilla\nn01536334\ttree sparrow, Spizella arborea\nn01536644\tsong sparrow, Melospiza melodia\nn01536780\tswamp sparrow, Melospiza georgiana\nn01537134\tbunting\nn01537544\tindigo bunting, indigo finch, indigo bird, Passerina cyanea\nn01537895\tortolan, ortolan bunting, Emberiza hortulana\nn01538059\treed bunting, Emberiza schoeniclus\nn01538200\tyellowhammer, yellow bunting, Emberiza citrinella\nn01538362\tyellow-breasted bunting, Emberiza aureola\nn01538630\tsnow bunting, snowbird, snowflake, Plectrophenax nivalis\nn01538955\thoneycreeper\nn01539272\tbanana quit\nn01539573\tsparrow, true sparrow\nn01539925\tEnglish sparrow, house sparrow, Passer domesticus\nn01540090\ttree sparrow, Passer montanus\nn01540233\tgrosbeak, grossbeak\nn01540566\tevening grosbeak, Hesperiphona vespertina\nn01540832\thawfinch, Coccothraustes coccothraustes\nn01541102\tpine grosbeak, Pinicola enucleator\nn01541386\tcardinal, cardinal grosbeak, Richmondena Cardinalis, Cardinalis cardinalis, redbird\nn01541760\tpyrrhuloxia, Pyrrhuloxia sinuata\nn01541922\ttowhee\nn01542168\tchewink, cheewink, Pipilo erythrophthalmus\nn01542433\tgreen-tailed towhee, Chlorura chlorura\nn01542786\tweaver, weaverbird, weaver finch\nn01543175\tbaya, Ploceus philippinus\nn01543383\twhydah, whidah, widow bird\nn01543632\tJava sparrow, Java finch, ricebird, Padda oryzivora\nn01543936\tavadavat, amadavat\nn01544208\tgrassfinch, grass finch\nn01544389\tzebra finch, Poephila castanotis\nn01544704\thoneycreeper, Hawaiian honeycreeper\nn01545574\tlyrebird\nn01546039\tscrubbird, scrub-bird, scrub bird\nn01546506\tbroadbill\nn01546921\ttyrannid\nn01547832\tNew World flycatcher, flycatcher, tyrant flycatcher, tyrant bird\nn01548301\tkingbird, Tyrannus tyrannus\nn01548492\tArkansas kingbird, western kingbird\nn01548694\tCassin's kingbird, Tyrannus vociferans\nn01548865\teastern kingbird\nn01549053\tgrey kingbird, gray kingbird, petchary, Tyrannus domenicensis domenicensis\nn01549430\tpewee, peewee, peewit, pewit, wood pewee, Contopus virens\nn01549641\twestern wood pewee, Contopus sordidulus\nn01549886\tphoebe, phoebe bird, Sayornis phoebe\nn01550172\tvermillion flycatcher, firebird, Pyrocephalus rubinus mexicanus\nn01550761\tcotinga, chatterer\nn01551080\tcock of the rock, Rupicola rupicola\nn01551300\tcock of the rock, Rupicola peruviana\nn01551711\tmanakin\nn01552034\tbellbird\nn01552333\tumbrella bird, Cephalopterus ornatus\nn01552813\tovenbird\nn01553142\tantbird, ant bird\nn01553527\tant thrush\nn01553762\tant shrike\nn01554017\tspotted antbird, Hylophylax naevioides\nn01554448\twoodhewer, woodcreeper, wood-creeper, tree creeper\nn01555004\tpitta\nn01555305\tscissortail, scissortailed flycatcher, Muscivora-forficata\nn01555809\tOld World flycatcher, true flycatcher, flycatcher\nn01556182\tspotted flycatcher, Muscicapa striata, Muscicapa grisola\nn01556514\tthickhead, whistler\nn01557185\tthrush\nn01557962\tmissel thrush, mistle thrush, mistletoe thrush, Turdus viscivorus\nn01558149\tsong thrush, mavis, throstle, Turdus philomelos\nn01558307\tfieldfare, snowbird, Turdus pilaris\nn01558461\tredwing, Turdus iliacus\nn01558594\tblackbird, merl, merle, ouzel, ousel, European blackbird, Turdus merula\nn01558765\tring ouzel, ring blackbird, ring thrush, Turdus torquatus\nn01558993\trobin, American robin, Turdus migratorius\nn01559160\tclay-colored robin, Turdus greyi\nn01559477\thermit thrush, Hylocichla guttata\nn01559639\tveery, Wilson's thrush, Hylocichla fuscescens\nn01559804\twood thrush, Hylocichla mustelina\nn01560105\tnightingale, Luscinia megarhynchos\nn01560280\tthrush nightingale, Luscinia luscinia\nn01560419\tbulbul\nn01560636\tOld World chat, chat\nn01560793\tstonechat, Saxicola torquata\nn01560935\twhinchat, Saxicola rubetra\nn01561181\tsolitaire\nn01561452\tredstart, redtail\nn01561732\twheatear\nn01562014\tbluebird\nn01562265\trobin, redbreast, robin redbreast, Old World robin, Erithacus rubecola\nn01562451\tbluethroat, Erithacus svecicus\nn01563128\twarbler\nn01563449\tgnatcatcher\nn01563746\tkinglet\nn01563945\tgoldcrest, golden-crested kinglet, Regulus regulus\nn01564101\tgold-crowned kinglet, Regulus satrata\nn01564217\truby-crowned kinglet, ruby-crowned wren, Regulus calendula\nn01564394\tOld World warbler, true warbler\nn01564773\tblackcap, Silvia atricapilla\nn01564914\tgreater whitethroat, whitethroat, Sylvia communis\nn01565078\tlesser whitethroat, whitethroat, Sylvia curruca\nn01565345\twood warbler, Phylloscopus sibilatrix\nn01565599\tsedge warbler, sedge bird, sedge wren, reedbird, Acrocephalus schoenobaenus\nn01565930\twren warbler\nn01566207\ttailorbird, Orthotomus sutorius\nn01566645\tbabbler, cackler\nn01567133\tNew World warbler, wood warbler\nn01567678\tparula warbler, northern parula, Parula americana\nn01567879\tWilson's warbler, Wilson's blackcap, Wilsonia pusilla\nn01568132\tflycatching warbler\nn01568294\tAmerican redstart, redstart, Setophaga ruticilla\nn01568720\tCape May warbler, Dendroica tigrina\nn01568892\tyellow warbler, golden warbler, yellowbird, Dendroica petechia\nn01569060\tBlackburn, Blackburnian warbler, Dendroica fusca\nn01569262\tAudubon's warbler, Audubon warbler, Dendroica auduboni\nn01569423\tmyrtle warbler, myrtle bird, Dendroica coronata\nn01569566\tblackpoll, Dendroica striate\nn01569836\tNew World chat, chat\nn01569971\tyellow-breasted chat, Icteria virens\nn01570267\tovenbird, Seiurus aurocapillus\nn01570421\twater thrush\nn01570676\tyellowthroat\nn01570839\tcommon yellowthroat, Maryland yellowthroat, Geothlypis trichas\nn01571410\triflebird, Ptloris paradisea\nn01571904\tNew World oriole, American oriole, oriole\nn01572328\tnorthern oriole, Icterus galbula\nn01572489\tBaltimore oriole, Baltimore bird, hangbird, firebird, Icterus galbula galbula\nn01572654\tBullock's oriole, Icterus galbula bullockii\nn01572782\torchard oriole, Icterus spurius\nn01573074\tmeadowlark, lark\nn01573240\teastern meadowlark, Sturnella magna\nn01573360\twestern meadowlark, Sturnella neglecta\nn01573627\tcacique, cazique\nn01573898\tbobolink, ricebird, reedbird, Dolichonyx oryzivorus\nn01574045\tNew World blackbird, blackbird\nn01574390\tgrackle, crow blackbird\nn01574560\tpurple grackle, Quiscalus quiscula\nn01574801\trusty blackbird, rusty grackle, Euphagus carilonus\nn01575117\tcowbird\nn01575401\tred-winged blackbird, redwing, Agelaius phoeniceus\nn01575745\tOld World oriole, oriole\nn01576076\tgolden oriole, Oriolus oriolus\nn01576358\tfig-bird\nn01576695\tstarling\nn01577035\tcommon starling, Sturnus vulgaris\nn01577458\trose-colored starling, rose-colored pastor, Pastor sturnus, Pastor roseus\nn01577659\tmyna, mynah, mina, minah, myna bird, mynah bird\nn01577941\tcrested myna, Acridotheres tristis\nn01578180\thill myna, Indian grackle, grackle, Gracula religiosa\nn01578575\tcorvine bird\nn01579028\tcrow\nn01579149\tAmerican crow, Corvus brachyrhyncos\nn01579260\traven, Corvus corax\nn01579410\trook, Corvus frugilegus\nn01579578\tjackdaw, daw, Corvus monedula\nn01579729\tchough\nn01580077\tjay\nn01580379\tOld World jay\nn01580490\tcommon European jay, Garullus garullus\nn01580772\tNew World jay\nn01580870\tblue jay, jaybird, Cyanocitta cristata\nn01581166\tCanada jay, grey jay, gray jay, camp robber, whisker jack, Perisoreus canadensis\nn01581434\tRocky Mountain jay, Perisoreus canadensis capitalis\nn01581730\tnutcracker\nn01581874\tcommon nutcracker, Nucifraga caryocatactes\nn01581984\tClark's nutcracker, Nucifraga columbiana\nn01582220\tmagpie\nn01582398\tEuropean magpie, Pica pica\nn01582498\tAmerican magpie, Pica pica hudsonia\nn01582856\tAustralian magpie\nn01583209\tbutcherbird\nn01583495\tcurrawong, bell magpie\nn01583828\tpiping crow, piping crow-shrike, Gymnorhina tibicen\nn01584225\twren, jenny wren\nn01584695\twinter wren, Troglodytes troglodytes\nn01584853\thouse wren, Troglodytes aedon\nn01585121\tmarsh wren\nn01585287\tlong-billed marsh wren, Cistothorus palustris\nn01585422\tsedge wren, short-billed marsh wren, Cistothorus platensis\nn01585715\trock wren, Salpinctes obsoletus\nn01586020\tCarolina wren, Thryothorus ludovicianus\nn01586374\tcactus wren\nn01586941\tmockingbird, mocker, Mimus polyglotktos\nn01587278\tblue mockingbird, Melanotis caerulescens\nn01587526\tcatbird, grey catbird, gray catbird, Dumetella carolinensis\nn01587834\tthrasher, mocking thrush\nn01588002\tbrown thrasher, brown thrush, Toxostoma rufums\nn01588431\tNew Zealand wren\nn01588725\trock wren, Xenicus gilviventris\nn01588996\trifleman bird, Acanthisitta chloris\nn01589286\tcreeper, tree creeper\nn01589718\tbrown creeper, American creeper, Certhia americana\nn01589893\tEuropean creeper, Certhia familiaris\nn01590220\twall creeper, tichodrome, Tichodroma muriaria\nn01591005\tEuropean nuthatch, Sitta europaea\nn01591123\tred-breasted nuthatch, Sitta canadensis\nn01591301\twhite-breasted nuthatch, Sitta carolinensis\nn01591697\ttitmouse, tit\nn01592084\tchickadee\nn01592257\tblack-capped chickadee, blackcap, Parus atricapillus\nn01592387\ttufted titmouse, Parus bicolor\nn01592540\tCarolina chickadee, Parus carolinensis\nn01592694\tblue tit, tomtit, Parus caeruleus\nn01593028\tbushtit, bush tit\nn01593282\twren-tit, Chamaea fasciata\nn01593553\tverdin, Auriparus flaviceps\nn01594004\tfairy bluebird, bluebird\nn01594372\tswallow\nn01594787\tbarn swallow, chimney swallow, Hirundo rustica\nn01594968\tcliff swallow, Hirundo pyrrhonota\nn01595168\ttree swallow, tree martin, Hirundo nigricans\nn01595450\twhite-bellied swallow, tree swallow, Iridoprocne bicolor\nn01595624\tmartin\nn01595974\thouse martin, Delichon urbica\nn01596273\tbank martin, bank swallow, sand martin, Riparia riparia\nn01596608\tpurple martin, Progne subis\nn01597022\twood swallow, swallow shrike\nn01597336\ttanager\nn01597737\tscarlet tanager, Piranga olivacea, redbird, firebird\nn01597906\twestern tanager, Piranga ludoviciana\nn01598074\tsummer tanager, summer redbird, Piranga rubra\nn01598271\thepatic tanager, Piranga flava hepatica\nn01598588\tshrike\nn01598988\tbutcherbird\nn01599159\tEuropean shrike, Lanius excubitor\nn01599269\tnorthern shrike, Lanius borealis\nn01599388\twhite-rumped shrike, Lanius ludovicianus excubitorides\nn01599556\tloggerhead shrike, Lanius lucovicianus\nn01599741\tmigrant shrike, Lanius ludovicianus migrans\nn01600085\tbush shrike\nn01600341\tblack-fronted bush shrike, Chlorophoneus nigrifrons\nn01600657\tbowerbird, catbird\nn01601068\tsatin bowerbird, satin bird, Ptilonorhynchus violaceus\nn01601410\tgreat bowerbird, Chlamydera nuchalis\nn01601694\twater ouzel, dipper\nn01602080\tEuropean water ouzel, Cinclus aquaticus\nn01602209\tAmerican water ouzel, Cinclus mexicanus\nn01602630\tvireo\nn01602832\tred-eyed vireo, Vireo olivaceous\nn01603000\tsolitary vireo, Vireo solitarius\nn01603152\tblue-headed vireo, Vireo solitarius solitarius\nn01603600\twaxwing\nn01603812\tcedar waxwing, cedarbird, Bombycilla cedrorun\nn01603953\tBohemian waxwing, Bombycilla garrulus\nn01604330\tbird of prey, raptor, raptorial bird\nn01604968\tAccipitriformes, order Accipitriformes\nn01605630\thawk\nn01606097\teyas\nn01606177\ttiercel, tercel, tercelet\nn01606522\tgoshawk, Accipiter gentilis\nn01606672\tsparrow hawk, Accipiter nisus\nn01606809\tCooper's hawk, blue darter, Accipiter cooperii\nn01606978\tchicken hawk, hen hawk\nn01607309\tbuteonine\nn01607429\tredtail, red-tailed hawk, Buteo jamaicensis\nn01607600\trough-legged hawk, roughleg, Buteo lagopus\nn01607812\tred-shouldered hawk, Buteo lineatus\nn01607962\tbuzzard, Buteo buteo\nn01608265\thoney buzzard, Pernis apivorus\nn01608432\tkite\nn01608814\tblack kite, Milvus migrans\nn01609062\tswallow-tailed kite, swallow-tailed hawk, Elanoides forficatus\nn01609391\twhite-tailed kite, Elanus leucurus\nn01609751\tharrier\nn01609956\tmarsh harrier, Circus Aeruginosus\nn01610100\tMontagu's harrier, Circus pygargus\nn01610226\tmarsh hawk, northern harrier, hen harrier, Circus cyaneus\nn01610552\tharrier eagle, short-toed eagle\nn01610955\tfalcon\nn01611472\tperegrine, peregrine falcon, Falco peregrinus\nn01611674\tfalcon-gentle, falcon-gentil\nn01611800\tgyrfalcon, gerfalcon, Falco rusticolus\nn01611969\tkestrel, Falco tinnunculus\nn01612122\tsparrow hawk, American kestrel, kestrel, Falco sparverius\nn01612275\tpigeon hawk, merlin, Falco columbarius\nn01612476\thobby, Falco subbuteo\nn01612628\tcaracara\nn01612955\tAudubon's caracara, Polyborus cheriway audubonii\nn01613177\tcarancha, Polyborus plancus\nn01613294\teagle, bird of Jove\nn01613615\tyoung bird\nn01613807\teaglet\nn01614038\tharpy, harpy eagle, Harpia harpyja\nn01614343\tgolden eagle, Aquila chrysaetos\nn01614556\ttawny eagle, Aquila rapax\nn01614925\tbald eagle, American eagle, Haliaeetus leucocephalus\nn01615121\tsea eagle\nn01615303\tKamchatkan sea eagle, Stellar's sea eagle, Haliaeetus pelagicus\nn01615458\tern, erne, grey sea eagle, gray sea eagle, European sea eagle, white-tailed sea eagle, Haliatus albicilla\nn01615703\tfishing eagle, Haliaeetus leucorhyphus\nn01616086\tosprey, fish hawk, fish eagle, sea eagle, Pandion haliaetus\nn01616318\tvulture\nn01616551\tAegypiidae, family Aegypiidae\nn01616764\tOld World vulture\nn01617095\tgriffon vulture, griffon, Gyps fulvus\nn01617443\tbearded vulture, lammergeier, lammergeyer, Gypaetus barbatus\nn01617766\tEgyptian vulture, Pharaoh's chicken, Neophron percnopterus\nn01618082\tblack vulture, Aegypius monachus\nn01618503\tsecretary bird, Sagittarius serpentarius\nn01618922\tNew World vulture, cathartid\nn01619310\tbuzzard, turkey buzzard, turkey vulture, Cathartes aura\nn01619536\tcondor\nn01619835\tAndean condor, Vultur gryphus\nn01620135\tCalifornia condor, Gymnogyps californianus\nn01620414\tblack vulture, carrion crow, Coragyps atratus\nn01620735\tking vulture, Sarcorhamphus papa\nn01621127\towl, bird of Minerva, bird of night, hooter\nn01621635\towlet\nn01622120\tlittle owl, Athene noctua\nn01622352\thorned owl\nn01622483\tgreat horned owl, Bubo virginianus\nn01622779\tgreat grey owl, great gray owl, Strix nebulosa\nn01622959\ttawny owl, Strix aluco\nn01623110\tbarred owl, Strix varia\nn01623425\tscreech owl, Otus asio\nn01623615\tscreech owl\nn01623706\tscops owl\nn01623880\tspotted owl, Strix occidentalis\nn01624115\tOld World scops owl, Otus scops\nn01624212\tOriental scops owl, Otus sunia\nn01624305\thoot owl\nn01624537\thawk owl, Surnia ulula\nn01624833\tlong-eared owl, Asio otus\nn01625121\tlaughing owl, laughing jackass, Sceloglaux albifacies\nn01625562\tbarn owl, Tyto alba\nn01627424\tamphibian\nn01628331\tIchyostega\nn01628770\turodele, caudate\nn01629276\tsalamander\nn01629819\tEuropean fire salamander, Salamandra salamandra\nn01629962\tspotted salamander, fire salamander, Salamandra maculosa\nn01630148\talpine salamander, Salamandra atra\nn01630284\tnewt, triton\nn01630670\tcommon newt, Triturus vulgaris\nn01630901\tred eft, Notophthalmus viridescens\nn01631175\tPacific newt\nn01631354\trough-skinned newt, Taricha granulosa\nn01631512\tCalifornia newt, Taricha torosa\nn01631663\teft\nn01632047\tambystomid, ambystomid salamander\nn01632308\tmole salamander, Ambystoma talpoideum\nn01632458\tspotted salamander, Ambystoma maculatum\nn01632601\ttiger salamander, Ambystoma tigrinum\nn01632777\taxolotl, mud puppy, Ambystoma mexicanum\nn01632952\twaterdog\nn01633406\thellbender, mud puppy, Cryptobranchus alleganiensis\nn01633781\tgiant salamander, Megalobatrachus maximus\nn01634227\tolm, Proteus anguinus\nn01634522\tmud puppy, Necturus maculosus\nn01635027\tdicamptodon, dicamptodontid\nn01635176\tPacific giant salamander, Dicamptodon ensatus\nn01635480\tolympic salamander, Rhyacotriton olympicus\nn01636127\tlungless salamander, plethodont\nn01636352\teastern red-backed salamander, Plethodon cinereus\nn01636510\twestern red-backed salamander, Plethodon vehiculum\nn01636829\tdusky salamander\nn01637112\tclimbing salamander\nn01637338\tarboreal salamander, Aneides lugubris\nn01637615\tslender salamander, worm salamander\nn01637932\tweb-toed salamander\nn01638194\tShasta salamander, Hydromantes shastae\nn01638329\tlimestone salamander, Hydromantes brunus\nn01638722\tamphiuma, congo snake, congo eel, blind eel\nn01639187\tsiren\nn01639765\tfrog, toad, toad frog, anuran, batrachian, salientian\nn01640846\ttrue frog, ranid\nn01641206\twood-frog, wood frog, Rana sylvatica\nn01641391\tleopard frog, spring frog, Rana pipiens\nn01641577\tbullfrog, Rana catesbeiana\nn01641739\tgreen frog, spring frog, Rana clamitans\nn01641930\tcascades frog, Rana cascadae\nn01642097\tgoliath frog, Rana goliath\nn01642257\tpickerel frog, Rana palustris\nn01642391\ttarahumara frog, Rana tarahumarae\nn01642539\tgrass frog, Rana temporaria\nn01642943\tleptodactylid frog, leptodactylid\nn01643255\trobber frog\nn01643507\tbarking frog, robber frog, Hylactophryne augusti\nn01643896\tcrapaud, South American bullfrog, Leptodactylus pentadactylus\nn01644373\ttree frog, tree-frog\nn01644900\ttailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui\nn01645466\tLiopelma hamiltoni\nn01645776\ttrue toad\nn01646292\tbufo\nn01646388\tagua, agua toad, Bufo marinus\nn01646555\tEuropean toad, Bufo bufo\nn01646648\tnatterjack, Bufo calamita\nn01646802\tAmerican toad, Bufo americanus\nn01646902\tEurasian green toad, Bufo viridis\nn01647033\tAmerican green toad, Bufo debilis\nn01647180\tYosemite toad, Bufo canorus\nn01647303\tTexas toad, Bufo speciosus\nn01647466\tsouthwestern toad, Bufo microscaphus\nn01647640\twestern toad, Bufo boreas\nn01648139\tobstetrical toad, midwife toad, Alytes obstetricans\nn01648356\tmidwife toad, Alytes cisternasi\nn01648620\tfire-bellied toad, Bombina bombina\nn01649170\tspadefoot, spadefoot toad\nn01649412\twestern spadefoot, Scaphiopus hammondii\nn01649556\tsouthern spadefoot, Scaphiopus multiplicatus\nn01649726\tplains spadefoot, Scaphiopus bombifrons\nn01650167\ttree toad, tree frog, tree-frog\nn01650690\tspring peeper, Hyla crucifer\nn01650901\tPacific tree toad, Hyla regilla\nn01651059\tcanyon treefrog, Hyla arenicolor\nn01651285\tchameleon tree frog\nn01651487\tcricket frog\nn01651641\tnorthern cricket frog, Acris crepitans\nn01651778\teastern cricket frog, Acris gryllus\nn01652026\tchorus frog\nn01652297\tlowland burrowing treefrog, northern casque-headed frog, Pternohyla fodiens\nn01653026\twestern narrow-mouthed toad, Gastrophryne olivacea\nn01653223\teastern narrow-mouthed toad, Gastrophryne carolinensis\nn01653509\tsheep frog\nn01653773\ttongueless frog\nn01654083\tSurinam toad, Pipa pipa, Pipa americana\nn01654637\tAfrican clawed frog, Xenopus laevis\nn01654863\tSouth American poison toad\nn01655344\tcaecilian, blindworm\nn01661091\treptile, reptilian\nn01661592\tanapsid, anapsid reptile\nn01661818\tdiapsid, diapsid reptile\nn01662060\tDiapsida, subclass Diapsida\nn01662622\tchelonian, chelonian reptile\nn01662784\tturtle\nn01663401\tsea turtle, marine turtle\nn01663782\tgreen turtle, Chelonia mydas\nn01664065\tloggerhead, loggerhead turtle, Caretta caretta\nn01664369\tridley\nn01664492\tAtlantic ridley, bastard ridley, bastard turtle, Lepidochelys kempii\nn01664674\tPacific ridley, olive ridley, Lepidochelys olivacea\nn01664990\thawksbill turtle, hawksbill, hawkbill, tortoiseshell turtle, Eretmochelys imbricata\nn01665541\tleatherback turtle, leatherback, leathery turtle, Dermochelys coriacea\nn01665932\tsnapping turtle\nn01666228\tcommon snapping turtle, snapper, Chelydra serpentina\nn01666585\talligator snapping turtle, alligator snapper, Macroclemys temmincki\nn01667114\tmud turtle\nn01667432\tmusk turtle, stinkpot\nn01667778\tterrapin\nn01668091\tdiamondback terrapin, Malaclemys centrata\nn01668436\tred-bellied terrapin, red-bellied turtle, redbelly, Pseudemys rubriventris\nn01668665\tslider, yellow-bellied terrapin, Pseudemys scripta\nn01668892\tcooter, river cooter, Pseudemys concinna\nn01669191\tbox turtle, box tortoise\nn01669372\tWestern box turtle, Terrapene ornata\nn01669654\tpainted turtle, painted terrapin, painted tortoise, Chrysemys picta\nn01670092\ttortoise\nn01670535\tEuropean tortoise, Testudo graeca\nn01670802\tgiant tortoise\nn01671125\tgopher tortoise, gopher turtle, gopher, Gopherus polypemus\nn01671479\tdesert tortoise, Gopherus agassizii\nn01671705\tTexas tortoise\nn01672032\tsoft-shelled turtle, pancake turtle\nn01672432\tspiny softshell, Trionyx spiniferus\nn01672611\tsmooth softshell, Trionyx muticus\nn01673282\ttuatara, Sphenodon punctatum\nn01674216\tsaurian\nn01674464\tlizard\nn01674990\tgecko\nn01675352\tflying gecko, fringed gecko, Ptychozoon homalocephalum\nn01675722\tbanded gecko\nn01676755\tiguanid, iguanid lizard\nn01677366\tcommon iguana, iguana, Iguana iguana\nn01677747\tmarine iguana, Amblyrhynchus cristatus\nn01678043\tdesert iguana, Dipsosaurus dorsalis\nn01678343\tchuckwalla, Sauromalus obesus\nn01678657\tzebra-tailed lizard, gridiron-tailed lizard, Callisaurus draconoides\nn01679005\tfringe-toed lizard, Uma notata\nn01679307\tearless lizard\nn01679626\tcollared lizard\nn01679962\tleopard lizard\nn01680264\tspiny lizard\nn01680478\tfence lizard\nn01680655\twestern fence lizard, swift, blue-belly, Sceloporus occidentalis\nn01680813\teastern fence lizard, pine lizard, Sceloporus undulatus\nn01680983\tsagebrush lizard, Sceloporus graciosus\nn01681328\tside-blotched lizard, sand lizard, Uta stansburiana\nn01681653\ttree lizard, Urosaurus ornatus\nn01681940\thorned lizard, horned toad, horny frog\nn01682172\tTexas horned lizard, Phrynosoma cornutum\nn01682435\tbasilisk\nn01682714\tAmerican chameleon, anole, Anolis carolinensis\nn01683201\tworm lizard\nn01683558\tnight lizard\nn01684133\tskink, scincid, scincid lizard\nn01684578\twestern skink, Eumeces skiltonianus\nn01684741\tmountain skink, Eumeces callicephalus\nn01685439\tteiid lizard, teiid\nn01685808\twhiptail, whiptail lizard\nn01686044\tracerunner, race runner, six-lined racerunner, Cnemidophorus sexlineatus\nn01686220\tplateau striped whiptail, Cnemidophorus velox\nn01686403\tChihuahuan spotted whiptail, Cnemidophorus exsanguis\nn01686609\twestern whiptail, Cnemidophorus tigris\nn01686808\tcheckered whiptail, Cnemidophorus tesselatus\nn01687128\tteju\nn01687290\tcaiman lizard\nn01687665\tagamid, agamid lizard\nn01687978\tagama\nn01688243\tfrilled lizard, Chlamydosaurus kingi\nn01688961\tmoloch\nn01689081\tmountain devil, spiny lizard, Moloch horridus\nn01689411\tanguid lizard\nn01689811\talligator lizard\nn01690149\tblindworm, slowworm, Anguis fragilis\nn01690466\tglass lizard, glass snake, joint snake\nn01691217\tlegless lizard\nn01691652\tLanthanotus borneensis\nn01691951\tvenomous lizard\nn01692333\tGila monster, Heloderma suspectum\nn01692523\tbeaded lizard, Mexican beaded lizard, Heloderma horridum\nn01692864\tlacertid lizard, lacertid\nn01693175\tsand lizard, Lacerta agilis\nn01693334\tgreen lizard, Lacerta viridis\nn01693783\tchameleon, chamaeleon\nn01694178\tAfrican chameleon, Chamaeleo chamaeleon\nn01694311\thorned chameleon, Chamaeleo oweni\nn01694709\tmonitor, monitor lizard, varan\nn01694955\tAfrican monitor, Varanus niloticus\nn01695060\tKomodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis\nn01696633\tcrocodilian reptile, crocodilian\nn01697178\tcrocodile\nn01697457\tAfrican crocodile, Nile crocodile, Crocodylus niloticus\nn01697611\tAsian crocodile, Crocodylus porosus\nn01697749\tMorlett's crocodile\nn01697978\tfalse gavial, Tomistoma schlegeli\nn01698434\talligator, gator\nn01698640\tAmerican alligator, Alligator mississipiensis\nn01698782\tChinese alligator, Alligator sinensis\nn01699040\tcaiman, cayman\nn01699254\tspectacled caiman, Caiman sclerops\nn01699675\tgavial, Gavialis gangeticus\nn01701551\tarmored dinosaur\nn01701859\tstegosaur, stegosaurus, Stegosaur stenops\nn01702256\tankylosaur, ankylosaurus\nn01702479\tEdmontonia\nn01703011\tbone-headed dinosaur\nn01703161\tpachycephalosaur, pachycephalosaurus\nn01703569\tceratopsian, horned dinosaur\nn01704103\tprotoceratops\nn01704323\ttriceratops\nn01704626\tstyracosaur, styracosaurus\nn01705010\tpsittacosaur, psittacosaurus\nn01705591\tornithopod, ornithopod dinosaur\nn01705934\thadrosaur, hadrosaurus, duck-billed dinosaur\nn01707294\ttrachodon, trachodont\nn01708106\tsaurischian, saurischian dinosaur\nn01708998\tsauropod, sauropod dinosaur\nn01709484\tapatosaur, apatosaurus, brontosaur, brontosaurus, thunder lizard, Apatosaurus excelsus\nn01709876\tbarosaur, barosaurus\nn01710177\tdiplodocus\nn01711160\targentinosaur\nn01712008\ttheropod, theropod dinosaur, bird-footed dinosaur\nn01712752\tceratosaur, ceratosaurus\nn01713170\tcoelophysis\nn01713764\ttyrannosaur, tyrannosaurus, Tyrannosaurus rex\nn01714231\tallosaur, allosaurus\nn01715888\tornithomimid\nn01717016\tmaniraptor\nn01717229\toviraptorid\nn01717467\tvelociraptor\nn01718096\tdeinonychus\nn01718414\tutahraptor, superslasher\nn01719403\tsynapsid, synapsid reptile\nn01721174\tdicynodont\nn01721898\tpelycosaur\nn01722670\tdimetrodon\nn01722998\tpterosaur, flying reptile\nn01723579\tpterodactyl\nn01724231\tichthyosaur\nn01724840\tichthyosaurus\nn01725086\tstenopterygius, Stenopterygius quadrisicissus\nn01725713\tplesiosaur, plesiosaurus\nn01726203\tnothosaur\nn01726692\tsnake, serpent, ophidian\nn01727646\tcolubrid snake, colubrid\nn01728266\thoop snake\nn01728572\tthunder snake, worm snake, Carphophis amoenus\nn01728920\tringneck snake, ring-necked snake, ring snake\nn01729322\thognose snake, puff adder, sand viper\nn01729672\tleaf-nosed snake\nn01729977\tgreen snake, grass snake\nn01730185\tsmooth green snake, Opheodrys vernalis\nn01730307\trough green snake, Opheodrys aestivus\nn01730563\tgreen snake\nn01730812\tracer\nn01730960\tblacksnake, black racer, Coluber constrictor\nn01731137\tblue racer, Coluber constrictor flaviventris\nn01731277\thorseshoe whipsnake, Coluber hippocrepis\nn01731545\twhip-snake, whip snake, whipsnake\nn01731764\tcoachwhip, coachwhip snake, Masticophis flagellum\nn01731941\tCalifornia whipsnake, striped racer, Masticophis lateralis\nn01732093\tSonoran whipsnake, Masticophis bilineatus\nn01732244\trat snake\nn01732614\tcorn snake, red rat snake, Elaphe guttata\nn01732789\tblack rat snake, blacksnake, pilot blacksnake, mountain blacksnake, Elaphe obsoleta\nn01732989\tchicken snake\nn01733214\tIndian rat snake, Ptyas mucosus\nn01733466\tglossy snake, Arizona elegans\nn01733757\tbull snake, bull-snake\nn01733957\tgopher snake, Pituophis melanoleucus\nn01734104\tpine snake\nn01734418\tking snake, kingsnake\nn01734637\tcommon kingsnake, Lampropeltis getulus\nn01734808\tmilk snake, house snake, milk adder, checkered adder, Lampropeltis triangulum\nn01735189\tgarter snake, grass snake\nn01735439\tcommon garter snake, Thamnophis sirtalis\nn01735577\tribbon snake, Thamnophis sauritus\nn01735728\tWestern ribbon snake, Thamnophis proximus\nn01736032\tlined snake, Tropidoclonion lineatum\nn01736375\tground snake, Sonora semiannulata\nn01736796\teastern ground snake, Potamophis striatula, Haldea striatula\nn01737021\twater snake\nn01737472\tcommon water snake, banded water snake, Natrix sipedon, Nerodia sipedon\nn01737728\twater moccasin\nn01737875\tgrass snake, ring snake, ringed snake, Natrix natrix\nn01738065\tviperine grass snake, Natrix maura\nn01738306\tred-bellied snake, Storeria occipitamaculata\nn01738601\tsand snake\nn01738731\tbanded sand snake, Chilomeniscus cinctus\nn01739094\tblack-headed snake\nn01739381\tvine snake\nn01739647\tlyre snake\nn01739871\tSonoran lyre snake, Trimorphodon lambda\nn01740131\tnight snake, Hypsiglena torquata\nn01740551\tblind snake, worm snake\nn01740885\twestern blind snake, Leptotyphlops humilis\nn01741232\tindigo snake, gopher snake, Drymarchon corais\nn01741442\teastern indigo snake, Drymarchon corais couperi\nn01741562\tconstrictor\nn01741943\tboa\nn01742172\tboa constrictor, Constrictor constrictor\nn01742447\trubber boa, tow-headed snake, Charina bottae\nn01742821\trosy boa, Lichanura trivirgata\nn01743086\tanaconda, Eunectes murinus\nn01743605\tpython\nn01743936\tcarpet snake, Python variegatus, Morelia spilotes variegatus\nn01744100\treticulated python, Python reticulatus\nn01744270\tIndian python, Python molurus\nn01744401\trock python, rock snake, Python sebae\nn01744555\tamethystine python\nn01745125\telapid, elapid snake\nn01745484\tcoral snake, harlequin-snake, New World coral snake\nn01745902\teastern coral snake, Micrurus fulvius\nn01746191\twestern coral snake, Micruroides euryxanthus\nn01746359\tcoral snake, Old World coral snake\nn01746952\tAfrican coral snake, Aspidelaps lubricus\nn01747285\tAustralian coral snake, Rhynchoelaps australis\nn01747589\tcopperhead, Denisonia superba\nn01747885\tcobra\nn01748264\tIndian cobra, Naja naja\nn01748389\tasp, Egyptian cobra, Naja haje\nn01748686\tblack-necked cobra, spitting cobra, Naja nigricollis\nn01748906\thamadryad, king cobra, Ophiophagus hannah, Naja hannah\nn01749244\tringhals, rinkhals, spitting snake, Hemachatus haemachatus\nn01749582\tmamba\nn01749742\tblack mamba, Dendroaspis augusticeps\nn01749939\tgreen mamba\nn01750167\tdeath adder, Acanthophis antarcticus\nn01750437\ttiger snake, Notechis scutatus\nn01750743\tAustralian blacksnake, Pseudechis porphyriacus\nn01751036\tkrait\nn01751215\tbanded krait, banded adder, Bungarus fasciatus\nn01751472\ttaipan, Oxyuranus scutellatus\nn01751748\tsea snake\nn01752165\tviper\nn01752585\tadder, common viper, Vipera berus\nn01752736\tasp, asp viper, Vipera aspis\nn01753032\tpuff adder, Bitis arietans\nn01753180\tgaboon viper, Bitis gabonica\nn01753488\thorned viper, cerastes, sand viper, horned asp, Cerastes cornutus\nn01753959\tpit viper\nn01754370\tcopperhead, Agkistrodon contortrix\nn01754533\twater moccasin, cottonmouth, cottonmouth moccasin, Agkistrodon piscivorus\nn01754876\trattlesnake, rattler\nn01755581\tdiamondback, diamondback rattlesnake, Crotalus adamanteus\nn01755740\ttimber rattlesnake, banded rattlesnake, Crotalus horridus horridus\nn01755952\tcanebrake rattlesnake, canebrake rattler, Crotalus horridus atricaudatus\nn01756089\tprairie rattlesnake, prairie rattler, Western rattlesnake, Crotalus viridis\nn01756291\tsidewinder, horned rattlesnake, Crotalus cerastes\nn01756508\tWestern diamondback, Western diamondback rattlesnake, Crotalus atrox\nn01756733\trock rattlesnake, Crotalus lepidus\nn01756916\ttiger rattlesnake, Crotalus tigris\nn01757115\tMojave rattlesnake, Crotalus scutulatus\nn01757343\tspeckled rattlesnake, Crotalus mitchellii\nn01757677\tmassasauga, massasauga rattler, Sistrurus catenatus\nn01757901\tground rattler, massasauga, Sistrurus miliaris\nn01758141\tfer-de-lance, Bothrops atrops\nn01758757\tcarcase, carcass\nn01758895\tcarrion\nn01767661\tarthropod\nn01768244\ttrilobite\nn01769347\tarachnid, arachnoid\nn01770081\tharvestman, daddy longlegs, Phalangium opilio\nn01770393\tscorpion\nn01770795\tfalse scorpion, pseudoscorpion\nn01771100\tbook scorpion, Chelifer cancroides\nn01771417\twhip-scorpion, whip scorpion\nn01771766\tvinegarroon, Mastigoproctus giganteus\nn01772222\tspider\nn01772664\torb-weaving spider\nn01773157\tblack and gold garden spider, Argiope aurantia\nn01773549\tbarn spider, Araneus cavaticus\nn01773797\tgarden spider, Aranea diademata\nn01774097\tcomb-footed spider, theridiid\nn01774384\tblack widow, Latrodectus mactans\nn01774750\ttarantula\nn01775062\twolf spider, hunting spider\nn01775370\tEuropean wolf spider, tarantula, Lycosa tarentula\nn01775730\ttrap-door spider\nn01776192\tacarine\nn01776313\ttick\nn01776705\thard tick, ixodid\nn01777304\tIxodes dammini, deer tick\nn01777467\tIxodes neotomae\nn01777649\tIxodes pacificus, western black-legged tick\nn01777909\tIxodes scapularis, black-legged tick\nn01778217\tsheep-tick, sheep tick, Ixodes ricinus\nn01778487\tIxodes persulcatus\nn01778621\tIxodes dentatus\nn01778801\tIxodes spinipalpis\nn01779148\twood tick, American dog tick, Dermacentor variabilis\nn01779463\tsoft tick, argasid\nn01779629\tmite\nn01779939\tweb-spinning mite\nn01780142\tacarid\nn01780426\ttrombidiid\nn01780696\ttrombiculid\nn01781071\tharvest mite, chigger, jigger, redbug\nn01781570\tacarus, genus Acarus\nn01781698\titch mite, sarcoptid\nn01781875\trust mite\nn01782209\tspider mite, tetranychid\nn01782516\tred spider, red spider mite, Panonychus ulmi\nn01783017\tmyriapod\nn01783706\tgarden centipede, garden symphilid, symphilid, Scutigerella immaculata\nn01784293\ttardigrade\nn01784675\tcentipede\nn01785667\thouse centipede, Scutigera coleoptrata\nn01786646\tmillipede, millepede, milliped\nn01787006\tsea spider, pycnogonid\nn01787191\tMerostomata, class Merostomata\nn01787835\thorseshoe crab, king crab, Limulus polyphemus, Xiphosurus polyphemus\nn01788291\tAsian horseshoe crab\nn01788579\teurypterid\nn01788864\ttongue worm, pentastomid\nn01789386\tgallinaceous bird, gallinacean\nn01789740\tdomestic fowl, fowl, poultry\nn01790171\tDorking\nn01790304\tPlymouth Rock\nn01790398\tCornish, Cornish fowl\nn01790557\tRock Cornish\nn01790711\tgame fowl\nn01790812\tcochin, cochin china\nn01791107\tjungle fowl, gallina\nn01791314\tjungle cock\nn01791388\tjungle hen\nn01791463\tred jungle fowl, Gallus gallus\nn01791625\tchicken, Gallus gallus\nn01791954\tbantam\nn01792042\tchick, biddy\nn01792158\tcock, rooster\nn01792429\tcockerel\nn01792530\tcapon\nn01792640\then, biddy\nn01792808\tcackler\nn01792955\tbrood hen, broody, broody hen, setting hen, sitter\nn01793085\tmother hen\nn01793159\tlayer\nn01793249\tpullet\nn01793340\tspring chicken\nn01793435\tRhode Island red\nn01793565\tDominique, Dominick\nn01793715\tOrpington\nn01794158\tturkey, Meleagris gallopavo\nn01794344\tturkey cock, gobbler, tom, tom turkey\nn01794651\tocellated turkey, Agriocharis ocellata\nn01795088\tgrouse\nn01795545\tblack grouse\nn01795735\tEuropean black grouse, heathfowl, Lyrurus tetrix\nn01795900\tAsian black grouse, Lyrurus mlokosiewiczi\nn01796019\tblackcock, black cock\nn01796105\tgreyhen, grayhen, grey hen, gray hen, heath hen\nn01796340\tptarmigan\nn01796519\tred grouse, moorfowl, moorbird, moor-bird, moorgame, Lagopus scoticus\nn01796729\tmoorhen\nn01797020\tcapercaillie, capercailzie, horse of the wood, Tetrao urogallus\nn01797307\tspruce grouse, Canachites canadensis\nn01797601\tsage grouse, sage hen, Centrocercus urophasianus\nn01797886\truffed grouse, partridge, Bonasa umbellus\nn01798168\tsharp-tailed grouse, sprigtail, sprig tail, Pedioecetes phasianellus\nn01798484\tprairie chicken, prairie grouse, prairie fowl\nn01798706\tgreater prairie chicken, Tympanuchus cupido\nn01798839\tlesser prairie chicken, Tympanuchus pallidicinctus\nn01798979\theath hen, Tympanuchus cupido cupido\nn01799302\tguan\nn01799679\tcurassow\nn01800195\tpiping guan\nn01800424\tchachalaca\nn01800633\tTexas chachalaca, Ortilis vetula macalli\nn01801088\tmegapode, mound bird, mound-bird, mound builder, scrub fowl\nn01801479\tmallee fowl, leipoa, lowan, Leipoa ocellata\nn01801672\tmallee hen\nn01801876\tbrush turkey, Alectura lathami\nn01802159\tmaleo, Macrocephalon maleo\nn01802721\tphasianid\nn01803078\tpheasant\nn01803362\tring-necked pheasant, Phasianus colchicus\nn01803641\tafropavo, Congo peafowl, Afropavo congensis\nn01803893\targus, argus pheasant\nn01804163\tgolden pheasant, Chrysolophus pictus\nn01804478\tbobwhite, bobwhite quail, partridge\nn01804653\tnorthern bobwhite, Colinus virginianus\nn01804921\tOld World quail\nn01805070\tmigratory quail, Coturnix coturnix, Coturnix communis\nn01805321\tmonal, monaul\nn01805801\tpeafowl, bird of Juno\nn01806061\tpeachick, pea-chick\nn01806143\tpeacock\nn01806297\tpeahen\nn01806364\tblue peafowl, Pavo cristatus\nn01806467\tgreen peafowl, Pavo muticus\nn01806567\tquail\nn01806847\tCalifornia quail, Lofortyx californicus\nn01807105\ttragopan\nn01807496\tpartridge\nn01807828\tHungarian partridge, grey partridge, gray partridge, Perdix perdix\nn01808140\tred-legged partridge, Alectoris ruffa\nn01808291\tGreek partridge, rock partridge, Alectoris graeca\nn01808596\tmountain quail, mountain partridge, Oreortyx picta palmeri\nn01809106\tguinea fowl, guinea, Numida meleagris\nn01809371\tguinea hen\nn01809752\thoatzin, hoactzin, stinkbird, Opisthocomus hoazin\nn01810268\ttinamou, partridge\nn01810700\tcolumbiform bird\nn01811243\tdodo, Raphus cucullatus\nn01811909\tpigeon\nn01812187\tpouter pigeon, pouter\nn01812337\tdove\nn01812662\trock dove, rock pigeon, Columba livia\nn01812866\tband-tailed pigeon, band-tail pigeon, bandtail, Columba fasciata\nn01813088\twood pigeon, ringdove, cushat, Columba palumbus\nn01813385\tturtledove\nn01813532\tStreptopelia turtur\nn01813658\tringdove, Streptopelia risoria\nn01813948\tAustralian turtledove, turtledove, Stictopelia cuneata\nn01814217\tmourning dove, Zenaidura macroura\nn01814370\tdomestic pigeon\nn01814549\tsquab\nn01814620\tfairy swallow\nn01814755\troller, tumbler, tumbler pigeon\nn01814921\thoming pigeon, homer\nn01815036\tcarrier pigeon\nn01815270\tpassenger pigeon, Ectopistes migratorius\nn01815601\tsandgrouse, sand grouse\nn01816017\tpainted sandgrouse, Pterocles indicus\nn01816140\tpin-tailed sandgrouse, pin-tailed grouse, Pterocles alchata\nn01816474\tpallas's sandgrouse, Syrrhaptes paradoxus\nn01816887\tparrot\nn01817263\tpopinjay\nn01817346\tpoll, poll parrot\nn01817953\tAfrican grey, African gray, Psittacus erithacus\nn01818299\tamazon\nn01818515\tmacaw\nn01818832\tkea, Nestor notabilis\nn01819115\tcockatoo\nn01819313\tsulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita\nn01819465\tpink cockatoo, Kakatoe leadbeateri\nn01819734\tcockateel, cockatiel, cockatoo parrot, Nymphicus hollandicus\nn01820052\tlovebird\nn01820348\tlory\nn01820546\tlorikeet\nn01820801\tvaried Lorikeet, Glossopsitta versicolor\nn01821076\trainbow lorikeet, Trichoglossus moluccanus\nn01821203\tparakeet, parrakeet, parroket, paraquet, paroquet, parroquet\nn01821554\tCarolina parakeet, Conuropsis carolinensis\nn01821869\tbudgerigar, budgereegah, budgerygah, budgie, grass parakeet, lovebird, shell parakeet, Melopsittacus undulatus\nn01822300\tring-necked parakeet, Psittacula krameri\nn01822602\tcuculiform bird\nn01823013\tcuckoo\nn01823414\tEuropean cuckoo, Cuculus canorus\nn01823740\tblack-billed cuckoo, Coccyzus erythropthalmus\nn01824035\troadrunner, chaparral cock, Geococcyx californianus\nn01824344\tani\nn01824575\tcoucal\nn01824749\tcrow pheasant, Centropus sinensis\nn01825278\ttouraco, turaco, turacou, turakoo\nn01825930\tcoraciiform bird\nn01826364\troller\nn01826680\tEuropean roller, Coracias garrulus\nn01826844\tground roller\nn01827403\tkingfisher\nn01827793\tEurasian kingfisher, Alcedo atthis\nn01828096\tbelted kingfisher, Ceryle alcyon\nn01828556\tkookaburra, laughing jackass, Dacelo gigas\nn01828970\tbee eater\nn01829413\thornbill\nn01829869\thoopoe, hoopoo\nn01830042\tEuopean hoopoe, Upupa epops\nn01830479\twood hoopoe\nn01830915\tmotmot, momot\nn01831360\ttody\nn01831712\tapodiform bird\nn01832167\tswift\nn01832493\tEuropean swift, Apus apus\nn01832813\tchimney swift, chimney swallow, Chateura pelagica\nn01833112\tswiftlet, Collocalia inexpectata\nn01833415\ttree swift, crested swift\nn01833805\thummingbird\nn01834177\tArchilochus colubris\nn01834540\tthornbill\nn01835276\tgoatsucker, nightjar, caprimulgid\nn01835769\tEuropean goatsucker, European nightjar, Caprimulgus europaeus\nn01835918\tchuck-will's-widow, Caprimulgus carolinensis\nn01836087\twhippoorwill, Caprimulgus vociferus\nn01836673\tpoorwill, Phalaenoptilus nuttallii\nn01837072\tfrogmouth\nn01837526\toilbird, guacharo, Steatornis caripensis\nn01838038\tpiciform bird\nn01838598\twoodpecker, peckerwood, pecker\nn01839086\tgreen woodpecker, Picus viridis\nn01839330\tdowny woodpecker\nn01839598\tflicker\nn01839750\tyellow-shafted flicker, Colaptes auratus, yellowhammer\nn01839949\tgilded flicker, Colaptes chrysoides\nn01840120\tred-shafted flicker, Colaptes caper collaris\nn01840412\tivorybill, ivory-billed woodpecker, Campephilus principalis\nn01840775\tredheaded woodpecker, redhead, Melanerpes erythrocephalus\nn01841102\tsapsucker\nn01841288\tyellow-bellied sapsucker, Sphyrapicus varius\nn01841441\tred-breasted sapsucker, Sphyrapicus varius ruber\nn01841679\twryneck\nn01841943\tpiculet\nn01842235\tbarbet\nn01842504\tpuffbird\nn01842788\thoney guide\nn01843065\tjacamar\nn01843383\ttoucan\nn01843719\ttoucanet\nn01844231\ttrogon\nn01844551\tquetzal, quetzal bird\nn01844746\tresplendent quetzel, resplendent trogon, Pharomacrus mocino\nn01844917\taquatic bird\nn01845132\twaterfowl, water bird, waterbird\nn01845477\tanseriform bird\nn01846331\tduck\nn01847000\tdrake\nn01847089\tquack-quack\nn01847170\tduckling\nn01847253\tdiving duck\nn01847407\tdabbling duck, dabbler\nn01847806\tmallard, Anas platyrhynchos\nn01847978\tblack duck, Anas rubripes\nn01848123\tteal\nn01848323\tgreenwing, green-winged teal, Anas crecca\nn01848453\tbluewing, blue-winged teal, Anas discors\nn01848555\tgarganey, Anas querquedula\nn01848648\twidgeon, wigeon, Anas penelope\nn01848840\tAmerican widgeon, baldpate, Anas americana\nn01848976\tshoveler, shoveller, broadbill, Anas clypeata\nn01849157\tpintail, pin-tailed duck, Anas acuta\nn01849466\tsheldrake\nn01849676\tshelduck\nn01849863\truddy duck, Oxyura jamaicensis\nn01850192\tbufflehead, butterball, dipper, Bucephela albeola\nn01850373\tgoldeneye, whistler, Bucephela clangula\nn01850553\tBarrow's goldeneye, Bucephala islandica\nn01850873\tcanvasback, canvasback duck, Aythya valisineria\nn01851038\tpochard, Aythya ferina\nn01851207\tredhead, Aythya americana\nn01851375\tscaup, scaup duck, bluebill, broadbill\nn01851573\tgreater scaup, Aythya marila\nn01851731\tlesser scaup, lesser scaup duck, lake duck, Aythya affinis\nn01851895\twild duck\nn01852142\twood duck, summer duck, wood widgeon, Aix sponsa\nn01852329\twood drake\nn01852400\tmandarin duck, Aix galericulata\nn01852671\tmuscovy duck, musk duck, Cairina moschata\nn01852861\tsea duck\nn01853195\teider, eider duck\nn01853498\tscoter, scooter\nn01853666\tcommon scoter, Melanitta nigra\nn01853870\told squaw, oldwife, Clangula hyemalis\nn01854415\tmerganser, fish duck, sawbill, sheldrake\nn01854700\tgoosander, Mergus merganser\nn01854838\tAmerican merganser, Mergus merganser americanus\nn01855032\tred-breasted merganser, Mergus serrator\nn01855188\tsmew, Mergus albellus\nn01855476\thooded merganser, hooded sheldrake, Lophodytes cucullatus\nn01855672\tgoose\nn01856072\tgosling\nn01856155\tgander\nn01856380\tChinese goose, Anser cygnoides\nn01856553\tgreylag, graylag, greylag goose, graylag goose, Anser anser\nn01856890\tblue goose, Chen caerulescens\nn01857079\tsnow goose\nn01857325\tbrant, brant goose, brent, brent goose\nn01857512\tcommon brant goose, Branta bernicla\nn01857632\thonker, Canada goose, Canadian goose, Branta canadensis\nn01857851\tbarnacle goose, barnacle, Branta leucopsis\nn01858281\tcoscoroba\nn01858441\tswan\nn01858780\tcob\nn01858845\tpen\nn01858906\tcygnet\nn01859190\tmute swan, Cygnus olor\nn01859325\twhooper, whooper swan, Cygnus cygnus\nn01859496\ttundra swan, Cygnus columbianus\nn01859689\twhistling swan, Cygnus columbianus columbianus\nn01859852\tBewick's swan, Cygnus columbianus bewickii\nn01860002\ttrumpeter, trumpeter swan, Cygnus buccinator\nn01860187\tblack swan, Cygnus atratus\nn01860497\tscreamer\nn01860864\thorned screamer, Anhima cornuta\nn01861148\tcrested screamer\nn01861330\tchaja, Chauna torquata\nn01861778\tmammal, mammalian\nn01862399\tfemale mammal\nn01871265\ttusker\nn01871543\tprototherian\nn01871875\tmonotreme, egg-laying mammal\nn01872401\techidna, spiny anteater, anteater\nn01872772\techidna, spiny anteater, anteater\nn01873310\tplatypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus\nn01874434\tmarsupial, pouched mammal\nn01874928\topossum, possum\nn01875313\tcommon opossum, Didelphis virginiana, Didelphis marsupialis\nn01875610\tcrab-eating opossum\nn01876034\topossum rat\nn01876326\tbandicoot\nn01876667\trabbit-eared bandicoot, rabbit bandicoot, bilby, Macrotis lagotis\nn01877134\tkangaroo\nn01877606\tgiant kangaroo, great grey kangaroo, Macropus giganteus\nn01877812\twallaby, brush kangaroo\nn01878061\tcommon wallaby, Macropus agiles\nn01878335\thare wallaby, kangaroo hare\nn01878639\tnail-tailed wallaby, nail-tailed kangaroo\nn01878929\trock wallaby, rock kangaroo\nn01879217\tpademelon, paddymelon\nn01879509\ttree wallaby, tree kangaroo\nn01879837\tmusk kangaroo, Hypsiprymnodon moschatus\nn01880152\trat kangaroo, kangaroo rat\nn01880473\tpotoroo\nn01880716\tbettong\nn01880813\tjerboa kangaroo, kangaroo jerboa\nn01881171\tphalanger, opossum, possum\nn01881564\tcuscus\nn01881857\tbrush-tailed phalanger, Trichosurus vulpecula\nn01882125\tflying phalanger, flying opossum, flying squirrel\nn01882714\tkoala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus\nn01883070\twombat\nn01883513\tdasyurid marsupial, dasyurid\nn01883920\tdasyure\nn01884104\teastern dasyure, Dasyurus quoll\nn01884203\tnative cat, Dasyurus viverrinus\nn01884476\tthylacine, Tasmanian wolf, Tasmanian tiger, Thylacinus cynocephalus\nn01884834\tTasmanian devil, ursine dasyure, Sarcophilus hariisi\nn01885158\tpouched mouse, marsupial mouse, marsupial rat\nn01885498\tnumbat, banded anteater, anteater, Myrmecobius fasciatus\nn01886045\tpouched mole, marsupial mole, Notoryctus typhlops\nn01886756\tplacental, placental mammal, eutherian, eutherian mammal\nn01887474\tlivestock, stock, farm animal\nn01887623\tbull\nn01887787\tcow\nn01887896\tcalf\nn01888045\tcalf\nn01888181\tyearling\nn01888264\tbuck\nn01888411\tdoe\nn01889074\tinsectivore\nn01889520\tmole\nn01889849\tstarnose mole, star-nosed mole, Condylura cristata\nn01890144\tbrewer's mole, hair-tailed mole, Parascalops breweri\nn01890564\tgolden mole\nn01890860\tshrew mole\nn01891013\tAsiatic shrew mole, Uropsilus soricipes\nn01891274\tAmerican shrew mole, Neurotrichus gibbsii\nn01891633\tshrew, shrewmouse\nn01892030\tcommon shrew, Sorex araneus\nn01892145\tmasked shrew, Sorex cinereus\nn01892385\tshort-tailed shrew, Blarina brevicauda\nn01892551\twater shrew\nn01892744\tAmerican water shrew, Sorex palustris\nn01893021\tEuropean water shrew, Neomys fodiens\nn01893164\tMediterranean water shrew, Neomys anomalus\nn01893399\tleast shrew, Cryptotis parva\nn01893825\thedgehog, Erinaceus europaeus, Erinaceus europeaeus\nn01894207\ttenrec, tendrac\nn01894522\ttailless tenrec, Tenrec ecaudatus\nn01894956\totter shrew, potamogale, Potamogale velox\nn01896844\teiderdown\nn01897257\taftershaft\nn01897426\tsickle feather\nn01897536\tcontour feather\nn01897667\tbastard wing, alula, spurious wing\nn01898593\tsaddle hackle, saddle feather\nn01899894\tencolure\nn01900150\thair\nn01903234\tsquama\nn01903346\tscute\nn01903498\tsclerite\nn01904029\tplastron\nn01904806\tscallop shell\nn01904886\toyster shell\nn01905321\ttheca\nn01905661\tinvertebrate\nn01906749\tsponge, poriferan, parazoan\nn01907287\tchoanocyte, collar cell\nn01907738\tglass sponge\nn01908042\tVenus's flower basket\nn01908958\tmetazoan\nn01909422\tcoelenterate, cnidarian\nn01909788\tplanula\nn01909906\tpolyp\nn01910252\tmedusa, medusoid, medusan\nn01910747\tjellyfish\nn01911063\tscyphozoan\nn01911403\tChrysaora quinquecirrha\nn01911839\thydrozoan, hydroid\nn01912152\thydra\nn01912454\tsiphonophore\nn01912809\tnanomia\nn01913166\tPortuguese man-of-war, man-of-war, jellyfish\nn01913346\tpraya\nn01913440\tapolemia\nn01914163\tanthozoan, actinozoan\nn01914609\tsea anemone, anemone\nn01914830\tactinia, actinian, actiniarian\nn01915700\tsea pen\nn01915811\tcoral\nn01916187\tgorgonian, gorgonian coral\nn01916388\tsea feather\nn01916481\tsea fan\nn01916588\tred coral\nn01916925\tstony coral, madrepore, madriporian coral\nn01917289\tbrain coral\nn01917611\tstaghorn coral, stag's-horn coral\nn01917882\tmushroom coral\nn01918744\tctenophore, comb jelly\nn01919385\tberoe\nn01920051\tplatyctenean\nn01920438\tsea gooseberry\nn01921059\tVenus's girdle, Cestum veneris\nn01922303\tworm\nn01922717\thelminth, parasitic worm\nn01922948\twoodworm\nn01923025\twoodborer, borer\nn01923404\tacanthocephalan, spiny-headed worm\nn01923890\tarrowworm, chaetognath\nn01924800\tbladder worm\nn01924916\tflatworm, platyhelminth\nn01925270\tplanarian, planaria\nn01925695\tfluke, trematode, trematode worm\nn01925916\tcercaria\nn01926379\tliver fluke, Fasciola hepatica\nn01926689\tFasciolopsis buski\nn01927159\tschistosome, blood fluke\nn01927456\ttapeworm, cestode\nn01927928\techinococcus\nn01928215\ttaenia\nn01928517\tribbon worm, nemertean, nemertine, proboscis worm\nn01928865\tbeard worm, pogonophoran\nn01929186\trotifer\nn01930112\tnematode, nematode worm, roundworm\nn01930852\tcommon roundworm, Ascaris lumbricoides\nn01931140\tchicken roundworm, Ascaridia galli\nn01931520\tpinworm, threadworm, Enterobius vermicularis\nn01931714\teelworm\nn01932151\tvinegar eel, vinegar worm, Anguillula aceti, Turbatrix aceti\nn01932936\ttrichina, Trichinella spiralis\nn01933151\thookworm\nn01933478\tfilaria\nn01933988\tGuinea worm, Dracunculus medinensis\nn01934440\tannelid, annelid worm, segmented worm\nn01934844\tarchiannelid\nn01935176\toligochaete, oligochaete worm\nn01935395\tearthworm, angleworm, fishworm, fishing worm, wiggler, nightwalker, nightcrawler, crawler, dew worm, red worm\nn01936391\tpolychaete, polychete, polychaete worm, polychete worm\nn01936671\tlugworm, lug, lobworm\nn01936858\tsea mouse\nn01937579\tbloodworm\nn01937909\tleech, bloodsucker, hirudinean\nn01938454\tmedicinal leech, Hirudo medicinalis\nn01938735\thorseleech\nn01940736\tmollusk, mollusc, shellfish\nn01941223\tscaphopod\nn01941340\ttooth shell, tusk shell\nn01942177\tgastropod, univalve\nn01942869\tabalone, ear-shell\nn01943087\tormer, sea-ear, Haliotis tuberculata\nn01943541\tscorpion shell\nn01943899\tconch\nn01944118\tgiant conch, Strombus gigas\nn01944390\tsnail\nn01944812\tedible snail, Helix pomatia\nn01944955\tgarden snail\nn01945143\tbrown snail, Helix aspersa\nn01945340\tHelix hortensis\nn01945685\tslug\nn01945845\tseasnail\nn01946277\tneritid, neritid gastropod\nn01946630\tnerita\nn01946827\tbleeding tooth, Nerita peloronta\nn01947139\tneritina\nn01947396\twhelk\nn01947997\tmoon shell, moonshell\nn01948446\tperiwinkle, winkle\nn01948573\tlimpet\nn01949085\tcommon limpet, Patella vulgata\nn01949499\tkeyhole limpet, Fissurella apertura, Diodora apertura\nn01949973\triver limpet, freshwater limpet, Ancylus fluviatilis\nn01950731\tsea slug, nudibranch\nn01951274\tsea hare, Aplysia punctata\nn01951613\tHermissenda crassicornis\nn01952029\tbubble shell\nn01952712\tphysa\nn01953361\tcowrie, cowry\nn01953594\tmoney cowrie, Cypraea moneta\nn01953762\ttiger cowrie, Cypraea tigris\nn01954516\tsolenogaster, aplacophoran\nn01955084\tchiton, coat-of-mail shell, sea cradle, polyplacophore\nn01955933\tbivalve, pelecypod, lamellibranch\nn01956344\tspat\nn01956481\tclam\nn01956764\tseashell\nn01957335\tsoft-shell clam, steamer, steamer clam, long-neck clam, Mya arenaria\nn01958038\tquahog, quahaug, hard-shell clam, hard clam, round clam, Venus mercenaria, Mercenaria mercenaria\nn01958346\tlittleneck, littleneck clam\nn01958435\tcherrystone, cherrystone clam\nn01958531\tgeoduck\nn01959029\trazor clam, jackknife clam, knife-handle\nn01959492\tgiant clam, Tridacna gigas\nn01959985\tcockle\nn01960177\tedible cockle, Cardium edule\nn01960459\toyster\nn01961234\tJapanese oyster, Ostrea gigas\nn01961600\tVirginia oyster\nn01961985\tpearl oyster, Pinctada margaritifera\nn01962506\tsaddle oyster, Anomia ephippium\nn01962788\twindow oyster, windowpane oyster, capiz, Placuna placenta\nn01963317\tark shell\nn01963479\tblood clam\nn01963571\tmussel\nn01964049\tmarine mussel, mytilid\nn01964271\tedible mussel, Mytilus edulis\nn01964441\tfreshwater mussel, freshwater clam\nn01964957\tpearly-shelled mussel\nn01965252\tthin-shelled mussel\nn01965529\tzebra mussel, Dreissena polymorpha\nn01965889\tscallop, scollop, escallop\nn01966377\tbay scallop, Pecten irradians\nn01966586\tsea scallop, giant scallop, Pecten magellanicus\nn01967094\tshipworm, teredinid\nn01967308\tteredo\nn01967963\tpiddock\nn01968315\tcephalopod, cephalopod mollusk\nn01968897\tchambered nautilus, pearly nautilus, nautilus\nn01969726\toctopod\nn01970164\toctopus, devilfish\nn01970667\tpaper nautilus, nautilus, Argonaut, Argonauta argo\nn01971094\tdecapod\nn01971280\tsquid\nn01971620\tloligo\nn01971850\tommastrephes\nn01972131\tarchiteuthis, giant squid\nn01972541\tcuttlefish, cuttle\nn01973148\tspirula, Spirula peronii\nn01974773\tcrustacean\nn01975687\tmalacostracan crustacean\nn01976146\tdecapod crustacean, decapod\nn01976868\tbrachyuran\nn01976957\tcrab\nn01977485\tstone crab, Menippe mercenaria\nn01978010\thard-shell crab\nn01978136\tsoft-shell crab, soft-shelled crab\nn01978287\tDungeness crab, Cancer magister\nn01978455\trock crab, Cancer irroratus\nn01978587\tJonah crab, Cancer borealis\nn01978930\tswimming crab\nn01979269\tEnglish lady crab, Portunus puber\nn01979526\tAmerican lady crab, lady crab, calico crab, Ovalipes ocellatus\nn01979874\tblue crab, Callinectes sapidus\nn01980166\tfiddler crab\nn01980655\tpea crab\nn01981276\tking crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica\nn01981702\tspider crab\nn01982068\tEuropean spider crab, king crab, Maja squinado\nn01982347\tgiant crab, Macrocheira kaempferi\nn01982650\tlobster\nn01983048\ttrue lobster\nn01983481\tAmerican lobster, Northern lobster, Maine lobster, Homarus americanus\nn01983674\tEuropean lobster, Homarus vulgaris\nn01983829\tCape lobster, Homarus capensis\nn01984245\tNorway lobster, Nephrops norvegicus\nn01984695\tspiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish\nn01985128\tcrayfish, crawfish, crawdad, crawdaddy\nn01985493\tOld World crayfish, ecrevisse\nn01985797\tAmerican crayfish\nn01986214\thermit crab\nn01986806\tshrimp\nn01987076\tsnapping shrimp, pistol shrimp\nn01987545\tprawn\nn01987727\tlong-clawed prawn, river prawn, Palaemon australis\nn01988203\ttropical prawn\nn01988701\tkrill\nn01988869\tEuphausia pacifica\nn01989516\topossum shrimp\nn01989869\tstomatopod, stomatopod crustacean\nn01990007\tmantis shrimp, mantis crab\nn01990516\tsquilla, mantis prawn\nn01990800\tisopod\nn01991028\twoodlouse, slater\nn01991520\tpill bug\nn01992262\tsow bug\nn01992423\tsea louse, sea slater\nn01992773\tamphipod\nn01993525\tskeleton shrimp\nn01993830\twhale louse\nn01994910\tdaphnia, water flea\nn01995514\tfairy shrimp\nn01995686\tbrine shrimp, Artemia salina\nn01996280\ttadpole shrimp\nn01996585\tcopepod, copepod crustacean\nn01997119\tcyclops, water flea\nn01997825\tseed shrimp, mussel shrimp, ostracod\nn01998183\tbarnacle, cirriped, cirripede\nn01998741\tacorn barnacle, rock barnacle, Balanus balanoides\nn01999186\tgoose barnacle, gooseneck barnacle, Lepas fascicularis\nn01999767\tonychophoran, velvet worm, peripatus\nn02000954\twading bird, wader\nn02002075\tstork\nn02002556\twhite stork, Ciconia ciconia\nn02002724\tblack stork, Ciconia nigra\nn02003037\tadjutant bird, adjutant, adjutant stork, Leptoptilus dubius\nn02003204\tmarabou, marabout, marabou stork, Leptoptilus crumeniferus\nn02003577\topenbill\nn02003839\tjabiru, Jabiru mycteria\nn02004131\tsaddlebill, jabiru, Ephippiorhynchus senegalensis\nn02004492\tpoliceman bird, black-necked stork, jabiru, Xenorhyncus asiaticus\nn02004855\twood ibis, wood stork, flinthead, Mycteria americana\nn02005399\tshoebill, shoebird, Balaeniceps rex\nn02005790\tibis\nn02006063\twood ibis, wood stork, Ibis ibis\nn02006364\tsacred ibis, Threskiornis aethiopica\nn02006656\tspoonbill\nn02006985\tcommon spoonbill, Platalea leucorodia\nn02007284\troseate spoonbill, Ajaia ajaja\nn02007558\tflamingo\nn02008041\theron\nn02008497\tgreat blue heron, Ardea herodius\nn02008643\tgreat white heron, Ardea occidentalis\nn02008796\tegret\nn02009229\tlittle blue heron, Egretta caerulea\nn02009380\tsnowy egret, snowy heron, Egretta thula\nn02009508\tlittle egret, Egretta garzetta\nn02009750\tgreat white heron, Casmerodius albus\nn02009912\tAmerican egret, great white heron, Egretta albus\nn02010272\tcattle egret, Bubulcus ibis\nn02010453\tnight heron, night raven\nn02010728\tblack-crowned night heron, Nycticorax nycticorax\nn02011016\tyellow-crowned night heron, Nyctanassa violacea\nn02011281\tboatbill, boat-billed heron, broadbill, Cochlearius cochlearius\nn02011460\tbittern\nn02011805\tAmerican bittern, stake driver, Botaurus lentiginosus\nn02011943\tEuropean bittern, Botaurus stellaris\nn02012185\tleast bittern, Ixobrychus exilis\nn02012849\tcrane\nn02013177\twhooping crane, whooper, Grus americana\nn02013567\tcourlan, Aramus guarauna\nn02013706\tlimpkin, Aramus pictus\nn02014237\tcrested cariama, seriema, Cariama cristata\nn02014524\tchunga, seriema, Chunga burmeisteri\nn02014941\trail\nn02015357\tweka, maori hen, wood hen\nn02015554\tcrake\nn02015797\tcorncrake, land rail, Crex crex\nn02016066\tspotted crake, Porzana porzana\nn02016358\tgallinule, marsh hen, water hen, swamphen\nn02016659\tFlorida gallinule, Gallinula chloropus cachinnans\nn02016816\tmoorhen, Gallinula chloropus\nn02016956\tpurple gallinule\nn02017213\tEuropean gallinule, Porphyrio porphyrio\nn02017475\tAmerican gallinule, Porphyrula martinica\nn02017725\tnotornis, takahe, Notornis mantelli\nn02018027\tcoot\nn02018207\tAmerican coot, marsh hen, mud hen, water hen, Fulica americana\nn02018368\tOld World coot, Fulica atra\nn02018795\tbustard\nn02019190\tgreat bustard, Otis tarda\nn02019438\tplain turkey, Choriotis australis\nn02019929\tbutton quail, button-quail, bustard quail, hemipode\nn02020219\tstriped button quail, Turnix sylvatica\nn02020578\tplain wanderer, Pedionomus torquatus\nn02021050\ttrumpeter\nn02021281\tBrazilian trumpeter, Psophia crepitans\nn02021795\tseabird, sea bird, seafowl\nn02022684\tshorebird, shore bird, limicoline bird\nn02023341\tplover\nn02023855\tpiping plover, Charadrius melodus\nn02023992\tkilldeer, kildeer, killdeer plover, Charadrius vociferus\nn02024185\tdotterel, dotrel, Charadrius morinellus, Eudromias morinellus\nn02024479\tgolden plover\nn02024763\tlapwing, green plover, peewit, pewit\nn02025043\tturnstone\nn02025239\truddy turnstone, Arenaria interpres\nn02025389\tblack turnstone, Arenaria-Melanocephala\nn02026059\tsandpiper\nn02026629\tsurfbird, Aphriza virgata\nn02026948\tEuropean sandpiper, Actitis hypoleucos\nn02027075\tspotted sandpiper, Actitis macularia\nn02027357\tleast sandpiper, stint, Erolia minutilla\nn02027492\tred-backed sandpiper, dunlin, Erolia alpina\nn02027897\tgreenshank, Tringa nebularia\nn02028035\tredshank, Tringa totanus\nn02028175\tyellowlegs\nn02028342\tgreater yellowlegs, Tringa melanoleuca\nn02028451\tlesser yellowlegs, Tringa flavipes\nn02028727\tpectoral sandpiper, jacksnipe, Calidris melanotos\nn02028900\tknot, greyback, grayback, Calidris canutus\nn02029087\tcurlew sandpiper, Calidris Ferruginea\nn02029378\tsanderling, Crocethia alba\nn02029706\tupland sandpiper, upland plover, Bartramian sandpiper, Bartramia longicauda\nn02030035\truff, Philomachus pugnax\nn02030224\treeve\nn02030287\ttattler\nn02030568\tPolynesian tattler, Heteroscelus incanus\nn02030837\twillet, Catoptrophorus semipalmatus\nn02030996\twoodcock\nn02031298\tEurasian woodcock, Scolopax rusticola\nn02031585\tAmerican woodcock, woodcock snipe, Philohela minor\nn02031934\tsnipe\nn02032222\twhole snipe, Gallinago gallinago\nn02032355\tWilson's snipe, Gallinago gallinago delicata\nn02032480\tgreat snipe, woodcock snipe, Gallinago media\nn02032769\tjacksnipe, half snipe, Limnocryptes minima\nn02033041\tdowitcher\nn02033208\tgreyback, grayback, Limnodromus griseus\nn02033324\tred-breasted snipe, Limnodromus scolopaceus\nn02033561\tcurlew\nn02033779\tEuropean curlew, Numenius arquata\nn02033882\tEskimo curlew, Numenius borealis\nn02034129\tgodwit\nn02034295\tHudsonian godwit, Limosa haemastica\nn02034661\tstilt, stiltbird, longlegs, long-legs, stilt plover, Himantopus stilt\nn02034971\tblack-necked stilt, Himantopus mexicanus\nn02035210\tblack-winged stilt, Himantopus himantopus\nn02035402\twhite-headed stilt, Himantopus himantopus leucocephalus\nn02035656\tkaki, Himantopus novae-zelandiae\nn02036053\tstilt, Australian stilt\nn02036228\tbanded stilt, Cladorhyncus leucocephalum\nn02036711\tavocet\nn02037110\toystercatcher, oyster catcher\nn02037464\tphalarope\nn02037869\tred phalarope, Phalaropus fulicarius\nn02038141\tnorthern phalarope, Lobipes lobatus\nn02038466\tWilson's phalarope, Steganopus tricolor\nn02038993\tpratincole, glareole\nn02039171\tcourser\nn02039497\tcream-colored courser, Cursorius cursor\nn02039780\tcrocodile bird, Pluvianus aegyptius\nn02040266\tstone curlew, thick-knee, Burhinus oedicnemus\nn02040505\tcoastal diving bird\nn02041085\tlarid\nn02041246\tgull, seagull, sea gull\nn02041678\tmew, mew gull, sea mew, Larus canus\nn02041875\tblack-backed gull, great black-backed gull, cob, Larus marinus\nn02042046\therring gull, Larus argentatus\nn02042180\tlaughing gull, blackcap, pewit, pewit gull, Larus ridibundus\nn02042472\tivory gull, Pagophila eburnea\nn02042759\tkittiwake\nn02043063\ttern\nn02043333\tsea swallow, Sterna hirundo\nn02043808\tskimmer\nn02044178\tjaeger\nn02044517\tparasitic jaeger, arctic skua, Stercorarius parasiticus\nn02044778\tskua, bonxie\nn02044908\tgreat skua, Catharacta skua\nn02045369\tauk\nn02045596\tauklet\nn02045864\trazorbill, razor-billed auk, Alca torda\nn02046171\tlittle auk, dovekie, Plautus alle\nn02046759\tguillemot\nn02046939\tblack guillemot, Cepphus grylle\nn02047045\tpigeon guillemot, Cepphus columba\nn02047260\tmurre\nn02047411\tcommon murre, Uria aalge\nn02047517\tthick-billed murre, Uria lomvia\nn02047614\tpuffin\nn02047975\tAtlantic puffin, Fratercula arctica\nn02048115\thorned puffin, Fratercula corniculata\nn02048353\ttufted puffin, Lunda cirrhata\nn02048698\tgaviiform seabird\nn02049088\tloon, diver\nn02049532\tpodicipitiform seabird\nn02050004\tgrebe\nn02050313\tgreat crested grebe, Podiceps cristatus\nn02050442\tred-necked grebe, Podiceps grisegena\nn02050586\tblack-necked grebe, eared grebe, Podiceps nigricollis\nn02050809\tdabchick, little grebe, Podiceps ruficollis\nn02051059\tpied-billed grebe, Podilymbus podiceps\nn02051474\tpelecaniform seabird\nn02051845\tpelican\nn02052204\twhite pelican, Pelecanus erythrorhynchos\nn02052365\tOld world white pelican, Pelecanus onocrotalus\nn02052775\tfrigate bird, man-of-war bird\nn02053083\tgannet\nn02053425\tsolan, solan goose, solant goose, Sula bassana\nn02053584\tbooby\nn02054036\tcormorant, Phalacrocorax carbo\nn02054502\tsnakebird, anhinga, darter\nn02054711\twater turkey, Anhinga anhinga\nn02055107\ttropic bird, tropicbird, boatswain bird\nn02055658\tsphenisciform seabird\nn02055803\tpenguin\nn02056228\tAdelie, Adelie penguin, Pygoscelis adeliae\nn02056570\tking penguin, Aptenodytes patagonica\nn02056728\temperor penguin, Aptenodytes forsteri\nn02057035\tjackass penguin, Spheniscus demersus\nn02057330\trock hopper, crested penguin\nn02057731\tpelagic bird, oceanic bird\nn02057898\tprocellariiform seabird\nn02058221\talbatross, mollymawk\nn02058594\twandering albatross, Diomedea exulans\nn02058747\tblack-footed albatross, gooney, gooney bird, goonie, goony, Diomedea nigripes\nn02059162\tpetrel\nn02059541\twhite-chinned petrel, Procellaria aequinoctialis\nn02059852\tgiant petrel, giant fulmar, Macronectes giganteus\nn02060133\tfulmar, fulmar petrel, Fulmarus glacialis\nn02060411\tshearwater\nn02060569\tManx shearwater, Puffinus puffinus\nn02060889\tstorm petrel\nn02061217\tstormy petrel, northern storm petrel, Hydrobates pelagicus\nn02061560\tMother Carey's chicken, Mother Carey's hen, Oceanites oceanicus\nn02061853\tdiving petrel\nn02062017\taquatic mammal\nn02062430\tcetacean, cetacean mammal, blower\nn02062744\twhale\nn02063224\tbaleen whale, whalebone whale\nn02063662\tright whale\nn02064000\tbowhead, bowhead whale, Greenland whale, Balaena mysticetus\nn02064338\trorqual, razorback\nn02064816\tblue whale, sulfur bottom, Balaenoptera musculus\nn02065026\tfinback, finback whale, fin whale, common rorqual, Balaenoptera physalus\nn02065263\tsei whale, Balaenoptera borealis\nn02065407\tlesser rorqual, piked whale, minke whale, Balaenoptera acutorostrata\nn02065726\thumpback, humpback whale, Megaptera novaeangliae\nn02066245\tgrey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus\nn02066707\ttoothed whale\nn02067240\tsperm whale, cachalot, black whale, Physeter catodon\nn02067603\tpygmy sperm whale, Kogia breviceps\nn02067768\tdwarf sperm whale, Kogia simus\nn02068206\tbeaked whale\nn02068541\tbottle-nosed whale, bottlenose whale, bottlenose, Hyperoodon ampullatus\nn02068974\tdolphin\nn02069412\tcommon dolphin, Delphinus delphis\nn02069701\tbottlenose dolphin, bottle-nosed dolphin, bottlenose\nn02069974\tAtlantic bottlenose dolphin, Tursiops truncatus\nn02070174\tPacific bottlenose dolphin, Tursiops gilli\nn02070430\tporpoise\nn02070624\tharbor porpoise, herring hog, Phocoena phocoena\nn02070776\tvaquita, Phocoena sinus\nn02071028\tgrampus, Grampus griseus\nn02071294\tkiller whale, killer, orca, grampus, sea wolf, Orcinus orca\nn02071636\tpilot whale, black whale, common blackfish, blackfish, Globicephala melaena\nn02072040\triver dolphin\nn02072493\tnarwhal, narwal, narwhale, Monodon monoceros\nn02072798\twhite whale, beluga, Delphinapterus leucas\nn02073250\tsea cow, sirenian mammal, sirenian\nn02073831\tmanatee, Trichechus manatus\nn02074367\tdugong, Dugong dugon\nn02074726\tSteller's sea cow, Hydrodamalis gigas\nn02075296\tcarnivore\nn02075612\tomnivore\nn02075927\tpinniped mammal, pinniped, pinnatiped\nn02076196\tseal\nn02076402\tcrabeater seal, crab-eating seal\nn02076779\teared seal\nn02077152\tfur seal\nn02077384\tguadalupe fur seal, Arctocephalus philippi\nn02077658\tfur seal\nn02077787\tAlaska fur seal, Callorhinus ursinus\nn02077923\tsea lion\nn02078292\tSouth American sea lion, Otaria Byronia\nn02078574\tCalifornia sea lion, Zalophus californianus, Zalophus californicus\nn02078738\tAustralian sea lion, Zalophus lobatus\nn02079005\tSteller sea lion, Steller's sea lion, Eumetopias jubatus\nn02079389\tearless seal, true seal, hair seal\nn02079851\tharbor seal, common seal, Phoca vitulina\nn02080146\tharp seal, Pagophilus groenlandicus\nn02080415\telephant seal, sea elephant\nn02080713\tbearded seal, squareflipper square flipper, Erignathus barbatus\nn02081060\thooded seal, bladdernose, Cystophora cristata\nn02081571\twalrus, seahorse, sea horse\nn02081798\tAtlantic walrus, Odobenus rosmarus\nn02081927\tPacific walrus, Odobenus divergens\nn02082056\tFissipedia\nn02082190\tfissiped mammal, fissiped\nn02082791\taardvark, ant bear, anteater, Orycteropus afer\nn02083346\tcanine, canid\nn02083672\tbitch\nn02083780\tbrood bitch\nn02084071\tdog, domestic dog, Canis familiaris\nn02084732\tpooch, doggie, doggy, barker, bow-wow\nn02084861\tcur, mongrel, mutt\nn02085019\tfeist, fice\nn02085118\tpariah dog, pye-dog, pie-dog\nn02085272\tlapdog\nn02085374\ttoy dog, toy\nn02085620\tChihuahua\nn02085782\tJapanese spaniel\nn02085936\tMaltese dog, Maltese terrier, Maltese\nn02086079\tPekinese, Pekingese, Peke\nn02086240\tShih-Tzu\nn02086346\ttoy spaniel\nn02086478\tEnglish toy spaniel\nn02086646\tBlenheim spaniel\nn02086753\tKing Charles spaniel\nn02086910\tpapillon\nn02087046\ttoy terrier\nn02087122\thunting dog\nn02087314\tcourser\nn02087394\tRhodesian ridgeback\nn02087551\thound, hound dog\nn02088094\tAfghan hound, Afghan\nn02088238\tbasset, basset hound\nn02088364\tbeagle\nn02088466\tbloodhound, sleuthhound\nn02088632\tbluetick\nn02088745\tboarhound\nn02088839\tcoonhound\nn02088992\tcoondog\nn02089078\tblack-and-tan coonhound\nn02089232\tdachshund, dachsie, badger dog\nn02089468\tsausage dog, sausage hound\nn02089555\tfoxhound\nn02089725\tAmerican foxhound\nn02089867\tWalker hound, Walker foxhound\nn02089973\tEnglish foxhound\nn02090129\tharrier\nn02090253\tPlott hound\nn02090379\tredbone\nn02090475\twolfhound\nn02090622\tborzoi, Russian wolfhound\nn02090721\tIrish wolfhound\nn02090827\tgreyhound\nn02091032\tItalian greyhound\nn02091134\twhippet\nn02091244\tIbizan hound, Ibizan Podenco\nn02091467\tNorwegian elkhound, elkhound\nn02091635\totterhound, otter hound\nn02091831\tSaluki, gazelle hound\nn02092002\tScottish deerhound, deerhound\nn02092173\tstaghound\nn02092339\tWeimaraner\nn02092468\tterrier\nn02093056\tbullterrier, bull terrier\nn02093256\tStaffordshire bullterrier, Staffordshire bull terrier\nn02093428\tAmerican Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier\nn02093647\tBedlington terrier\nn02093754\tBorder terrier\nn02093859\tKerry blue terrier\nn02093991\tIrish terrier\nn02094114\tNorfolk terrier\nn02094258\tNorwich terrier\nn02094433\tYorkshire terrier\nn02094562\trat terrier, ratter\nn02094721\tManchester terrier, black-and-tan terrier\nn02094931\ttoy Manchester, toy Manchester terrier\nn02095050\tfox terrier\nn02095212\tsmooth-haired fox terrier\nn02095314\twire-haired fox terrier\nn02095412\twirehair, wirehaired terrier, wire-haired terrier\nn02095570\tLakeland terrier\nn02095727\tWelsh terrier\nn02095889\tSealyham terrier, Sealyham\nn02096051\tAiredale, Airedale terrier\nn02096177\tcairn, cairn terrier\nn02096294\tAustralian terrier\nn02096437\tDandie Dinmont, Dandie Dinmont terrier\nn02096585\tBoston bull, Boston terrier\nn02096756\tschnauzer\nn02097047\tminiature schnauzer\nn02097130\tgiant schnauzer\nn02097209\tstandard schnauzer\nn02097298\tScotch terrier, Scottish terrier, Scottie\nn02097474\tTibetan terrier, chrysanthemum dog\nn02097658\tsilky terrier, Sydney silky\nn02097786\tSkye terrier\nn02097967\tClydesdale terrier\nn02098105\tsoft-coated wheaten terrier\nn02098286\tWest Highland white terrier\nn02098413\tLhasa, Lhasa apso\nn02098550\tsporting dog, gun dog\nn02098806\tbird dog\nn02098906\twater dog\nn02099029\tretriever\nn02099267\tflat-coated retriever\nn02099429\tcurly-coated retriever\nn02099601\tgolden retriever\nn02099712\tLabrador retriever\nn02099849\tChesapeake Bay retriever\nn02099997\tpointer, Spanish pointer\nn02100236\tGerman short-haired pointer\nn02100399\tsetter\nn02100583\tvizsla, Hungarian pointer\nn02100735\tEnglish setter\nn02100877\tIrish setter, red setter\nn02101006\tGordon setter\nn02101108\tspaniel\nn02101388\tBrittany spaniel\nn02101556\tclumber, clumber spaniel\nn02101670\tfield spaniel\nn02101861\tspringer spaniel, springer\nn02102040\tEnglish springer, English springer spaniel\nn02102177\tWelsh springer spaniel\nn02102318\tcocker spaniel, English cocker spaniel, cocker\nn02102480\tSussex spaniel\nn02102605\twater spaniel\nn02102806\tAmerican water spaniel\nn02102973\tIrish water spaniel\nn02103181\tgriffon, wire-haired pointing griffon\nn02103406\tworking dog\nn02103841\twatchdog, guard dog\nn02104029\tkuvasz\nn02104184\tattack dog\nn02104280\thousedog\nn02104365\tschipperke\nn02104523\tshepherd dog, sheepdog, sheep dog\nn02104882\tBelgian sheepdog, Belgian shepherd\nn02105056\tgroenendael\nn02105162\tmalinois\nn02105251\tbriard\nn02105412\tkelpie\nn02105505\tkomondor\nn02105641\tOld English sheepdog, bobtail\nn02105855\tShetland sheepdog, Shetland sheep dog, Shetland\nn02106030\tcollie\nn02106166\tBorder collie\nn02106382\tBouvier des Flandres, Bouviers des Flandres\nn02106550\tRottweiler\nn02106662\tGerman shepherd, German shepherd dog, German police dog, alsatian\nn02106854\tpolice dog\nn02106966\tpinscher\nn02107142\tDoberman, Doberman pinscher\nn02107312\tminiature pinscher\nn02107420\tSennenhunde\nn02107574\tGreater Swiss Mountain dog\nn02107683\tBernese mountain dog\nn02107908\tAppenzeller\nn02108000\tEntleBucher\nn02108089\tboxer\nn02108254\tmastiff\nn02108422\tbull mastiff\nn02108551\tTibetan mastiff\nn02108672\tbulldog, English bulldog\nn02108915\tFrench bulldog\nn02109047\tGreat Dane\nn02109150\tguide dog\nn02109256\tSeeing Eye dog\nn02109391\thearing dog\nn02109525\tSaint Bernard, St Bernard\nn02109687\tseizure-alert dog\nn02109811\tsled dog, sledge dog\nn02109961\tEskimo dog, husky\nn02110063\tmalamute, malemute, Alaskan malamute\nn02110185\tSiberian husky\nn02110341\tdalmatian, coach dog, carriage dog\nn02110532\tliver-spotted dalmatian\nn02110627\taffenpinscher, monkey pinscher, monkey dog\nn02110806\tbasenji\nn02110958\tpug, pug-dog\nn02111129\tLeonberg\nn02111277\tNewfoundland, Newfoundland dog\nn02111500\tGreat Pyrenees\nn02111626\tspitz\nn02111889\tSamoyed, Samoyede\nn02112018\tPomeranian\nn02112137\tchow, chow chow\nn02112350\tkeeshond\nn02112497\tgriffon, Brussels griffon, Belgian griffon\nn02112706\tBrabancon griffon\nn02112826\tcorgi, Welsh corgi\nn02113023\tPembroke, Pembroke Welsh corgi\nn02113186\tCardigan, Cardigan Welsh corgi\nn02113335\tpoodle, poodle dog\nn02113624\ttoy poodle\nn02113712\tminiature poodle\nn02113799\tstandard poodle\nn02113892\tlarge poodle\nn02113978\tMexican hairless\nn02114100\twolf\nn02114367\ttimber wolf, grey wolf, gray wolf, Canis lupus\nn02114548\twhite wolf, Arctic wolf, Canis lupus tundrarum\nn02114712\tred wolf, maned wolf, Canis rufus, Canis niger\nn02114855\tcoyote, prairie wolf, brush wolf, Canis latrans\nn02115012\tcoydog\nn02115096\tjackal, Canis aureus\nn02115335\twild dog\nn02115641\tdingo, warrigal, warragal, Canis dingo\nn02115913\tdhole, Cuon alpinus\nn02116185\tcrab-eating dog, crab-eating fox, Dusicyon cancrivorus\nn02116450\traccoon dog, Nyctereutes procyonides\nn02116738\tAfrican hunting dog, hyena dog, Cape hunting dog, Lycaon pictus\nn02117135\thyena, hyaena\nn02117512\tstriped hyena, Hyaena hyaena\nn02117646\tbrown hyena, strand wolf, Hyaena brunnea\nn02117900\tspotted hyena, laughing hyena, Crocuta crocuta\nn02118176\taardwolf, Proteles cristata\nn02118333\tfox\nn02118643\tvixen\nn02118707\tReynard\nn02119022\tred fox, Vulpes vulpes\nn02119247\tblack fox\nn02119359\tsilver fox\nn02119477\tred fox, Vulpes fulva\nn02119634\tkit fox, prairie fox, Vulpes velox\nn02119789\tkit fox, Vulpes macrotis\nn02120079\tArctic fox, white fox, Alopex lagopus\nn02120278\tblue fox\nn02120505\tgrey fox, gray fox, Urocyon cinereoargenteus\nn02120997\tfeline, felid\nn02121620\tcat, true cat\nn02121808\tdomestic cat, house cat, Felis domesticus, Felis catus\nn02122298\tkitty, kitty-cat, puss, pussy, pussycat\nn02122430\tmouser\nn02122510\talley cat\nn02122580\tstray\nn02122725\ttom, tomcat\nn02122810\tgib\nn02122878\ttabby, queen\nn02122948\tkitten, kitty\nn02123045\ttabby, tabby cat\nn02123159\ttiger cat\nn02123242\ttortoiseshell, tortoiseshell-cat, calico cat\nn02123394\tPersian cat\nn02123478\tAngora, Angora cat\nn02123597\tSiamese cat, Siamese\nn02123785\tblue point Siamese\nn02123917\tBurmese cat\nn02124075\tEgyptian cat\nn02124157\tMaltese, Maltese cat\nn02124313\tAbyssinian, Abyssinian cat\nn02124484\tManx, Manx cat\nn02124623\twildcat\nn02125010\tsand cat\nn02125081\tEuropean wildcat, catamountain, Felis silvestris\nn02125311\tcougar, puma, catamount, mountain lion, painter, panther, Felis concolor\nn02125494\tocelot, panther cat, Felis pardalis\nn02125689\tjaguarundi, jaguarundi cat, jaguarondi, eyra, Felis yagouaroundi\nn02125872\tkaffir cat, caffer cat, Felis ocreata\nn02126028\tjungle cat, Felis chaus\nn02126139\tserval, Felis serval\nn02126317\tleopard cat, Felis bengalensis\nn02126640\tmargay, margay cat, Felis wiedi\nn02126787\tmanul, Pallas's cat, Felis manul\nn02127052\tlynx, catamount\nn02127292\tcommon lynx, Lynx lynx\nn02127381\tCanada lynx, Lynx canadensis\nn02127482\tbobcat, bay lynx, Lynx rufus\nn02127586\tspotted lynx, Lynx pardina\nn02127678\tcaracal, desert lynx, Lynx caracal\nn02127808\tbig cat, cat\nn02128385\tleopard, Panthera pardus\nn02128598\tleopardess\nn02128669\tpanther\nn02128757\tsnow leopard, ounce, Panthera uncia\nn02128925\tjaguar, panther, Panthera onca, Felis onca\nn02129165\tlion, king of beasts, Panthera leo\nn02129463\tlioness\nn02129530\tlionet\nn02129604\ttiger, Panthera tigris\nn02129837\tBengal tiger\nn02129923\ttigress\nn02129991\tliger\nn02130086\ttiglon, tigon\nn02130308\tcheetah, chetah, Acinonyx jubatus\nn02130545\tsaber-toothed tiger, sabertooth\nn02130925\tSmiledon californicus\nn02131653\tbear\nn02132136\tbrown bear, bruin, Ursus arctos\nn02132320\tbruin\nn02132466\tSyrian bear, Ursus arctos syriacus\nn02132580\tgrizzly, grizzly bear, silvertip, silver-tip, Ursus horribilis, Ursus arctos horribilis\nn02132788\tAlaskan brown bear, Kodiak bear, Kodiak, Ursus middendorffi, Ursus arctos middendorffi\nn02133161\tAmerican black bear, black bear, Ursus americanus, Euarctos americanus\nn02133400\tcinnamon bear\nn02133704\tAsiatic black bear, black bear, Ursus thibetanus, Selenarctos thibetanus\nn02134084\tice bear, polar bear, Ursus Maritimus, Thalarctos maritimus\nn02134418\tsloth bear, Melursus ursinus, Ursus ursinus\nn02134971\tviverrine, viverrine mammal\nn02135220\tcivet, civet cat\nn02135610\tlarge civet, Viverra zibetha\nn02135844\tsmall civet, Viverricula indica, Viverricula malaccensis\nn02136103\tbinturong, bearcat, Arctictis bintourong\nn02136285\tCryptoprocta, genus Cryptoprocta\nn02136452\tfossa, fossa cat, Cryptoprocta ferox\nn02136794\tfanaloka, Fossa fossa\nn02137015\tgenet, Genetta genetta\nn02137302\tbanded palm civet, Hemigalus hardwickii\nn02137549\tmongoose\nn02137722\tIndian mongoose, Herpestes nyula\nn02137888\tichneumon, Herpestes ichneumon\nn02138169\tpalm cat, palm civet\nn02138441\tmeerkat, mierkat\nn02138647\tslender-tailed meerkat, Suricata suricatta\nn02138777\tsuricate, Suricata tetradactyla\nn02139199\tbat, chiropteran\nn02139671\tfruit bat, megabat\nn02140049\tflying fox\nn02140179\tPteropus capestratus\nn02140268\tPteropus hypomelanus\nn02140491\tharpy, harpy bat, tube-nosed bat, tube-nosed fruit bat\nn02140858\tCynopterus sphinx\nn02141306\tcarnivorous bat, microbat\nn02141611\tmouse-eared bat\nn02141713\tleafnose bat, leaf-nosed bat\nn02142407\tmacrotus, Macrotus californicus\nn02142734\tspearnose bat\nn02142898\tPhyllostomus hastatus\nn02143142\thognose bat, Choeronycteris mexicana\nn02143439\thorseshoe bat\nn02143891\thorseshoe bat\nn02144251\torange bat, orange horseshoe bat, Rhinonicteris aurantius\nn02144593\tfalse vampire, false vampire bat\nn02144936\tbig-eared bat, Megaderma lyra\nn02145424\tvespertilian bat, vespertilionid\nn02145910\tfrosted bat, Vespertilio murinus\nn02146201\tred bat, Lasiurus borealis\nn02146371\tbrown bat\nn02146700\tlittle brown bat, little brown myotis, Myotis leucifugus\nn02146879\tcave myotis, Myotis velifer\nn02147173\tbig brown bat, Eptesicus fuscus\nn02147328\tserotine, European brown bat, Eptesicus serotinus\nn02147591\tpallid bat, cave bat, Antrozous pallidus\nn02147947\tpipistrelle, pipistrel, Pipistrellus pipistrellus\nn02148088\teastern pipistrel, Pipistrellus subflavus\nn02148512\tjackass bat, spotted bat, Euderma maculata\nn02148835\tlong-eared bat\nn02148991\twestern big-eared bat, Plecotus townsendi\nn02149420\tfreetail, free-tailed bat, freetailed bat\nn02149653\tguano bat, Mexican freetail bat, Tadarida brasiliensis\nn02149861\tpocketed bat, pocketed freetail bat, Tadirida femorosacca\nn02150134\tmastiff bat\nn02150482\tvampire bat, true vampire bat\nn02150885\tDesmodus rotundus\nn02151230\thairy-legged vampire bat, Diphylla ecaudata\nn02152740\tpredator, predatory animal\nn02152881\tprey, quarry\nn02152991\tgame\nn02153109\tbig game\nn02153203\tgame bird\nn02153809\tfossorial mammal\nn02156732\ttetrapod\nn02156871\tquadruped\nn02157206\thexapod\nn02157285\tbiped\nn02159955\tinsect\nn02160947\tsocial insect\nn02161225\tholometabola, metabola\nn02161338\tdefoliator\nn02161457\tpollinator\nn02161588\tgallfly\nn02162561\tscorpion fly\nn02163008\thanging fly\nn02163297\tcollembolan, springtail\nn02164464\tbeetle\nn02165105\ttiger beetle\nn02165456\tladybug, ladybeetle, lady beetle, ladybird, ladybird beetle\nn02165877\ttwo-spotted ladybug, Adalia bipunctata\nn02166229\tMexican bean beetle, bean beetle, Epilachna varivestis\nn02166567\tHippodamia convergens\nn02166826\tvedalia, Rodolia cardinalis\nn02167151\tground beetle, carabid beetle\nn02167505\tbombardier beetle\nn02167820\tcalosoma\nn02167944\tsearcher, searcher beetle, Calosoma scrutator\nn02168245\tfirefly, lightning bug\nn02168427\tglowworm\nn02168699\tlong-horned beetle, longicorn, longicorn beetle\nn02169023\tsawyer, sawyer beetle\nn02169218\tpine sawyer\nn02169497\tleaf beetle, chrysomelid\nn02169705\tflea beetle\nn02169974\tColorado potato beetle, Colorado beetle, potato bug, potato beetle, Leptinotarsa decemlineata\nn02170400\tcarpet beetle, carpet bug\nn02170599\tbuffalo carpet beetle, Anthrenus scrophulariae\nn02170738\tblack carpet beetle\nn02170993\tclerid beetle, clerid\nn02171164\tbee beetle\nn02171453\tlamellicorn beetle\nn02171869\tscarabaeid beetle, scarabaeid, scarabaean\nn02172182\tdung beetle\nn02172518\tscarab, scarabaeus, Scarabaeus sacer\nn02172678\ttumblebug\nn02172761\tdorbeetle\nn02172870\tJune beetle, June bug, May bug, May beetle\nn02173113\tgreen June beetle, figeater\nn02173373\tJapanese beetle, Popillia japonica\nn02173784\tOriental beetle, Asiatic beetle, Anomala orientalis\nn02174001\trhinoceros beetle\nn02174355\tmelolonthid beetle\nn02174659\tcockchafer, May bug, May beetle, Melolontha melolontha\nn02175014\trose chafer, rose bug, Macrodactylus subspinosus\nn02175569\trose chafer, rose beetle, Cetonia aurata\nn02175916\tstag beetle\nn02176261\telaterid beetle, elater, elaterid\nn02176439\tclick beetle, skipjack, snapping beetle\nn02176747\tfirefly, fire beetle, Pyrophorus noctiluca\nn02176916\twireworm\nn02177196\twater beetle\nn02177506\twhirligig beetle\nn02177775\tdeathwatch beetle, deathwatch, Xestobium rufovillosum\nn02177972\tweevil\nn02178411\tsnout beetle\nn02178717\tboll weevil, Anthonomus grandis\nn02179012\tblister beetle, meloid\nn02179192\toil beetle\nn02179340\tSpanish fly\nn02179891\tDutch-elm beetle, Scolytus multistriatus\nn02180233\tbark beetle\nn02180427\tspruce bark beetle, Dendroctonus rufipennis\nn02180875\trove beetle\nn02181235\tdarkling beetle, darkling groung beetle, tenebrionid\nn02181477\tmealworm\nn02181724\tflour beetle, flour weevil\nn02182045\tseed beetle, seed weevil\nn02182355\tpea weevil, Bruchus pisorum\nn02182642\tbean weevil, Acanthoscelides obtectus\nn02182930\trice weevil, black weevil, Sitophylus oryzae\nn02183096\tAsian longhorned beetle, Anoplophora glabripennis\nn02183507\tweb spinner\nn02183857\tlouse, sucking louse\nn02184473\tcommon louse, Pediculus humanus\nn02184589\thead louse, Pediculus capitis\nn02184720\tbody louse, cootie, Pediculus corporis\nn02185167\tcrab louse, pubic louse, crab, Phthirius pubis\nn02185481\tbird louse, biting louse, louse\nn02186153\tflea\nn02186717\tPulex irritans\nn02187150\tdog flea, Ctenocephalides canis\nn02187279\tcat flea, Ctenocephalides felis\nn02187554\tchigoe, chigger, chigoe flea, Tunga penetrans\nn02187900\tsticktight, sticktight flea, Echidnophaga gallinacea\nn02188699\tdipterous insect, two-winged insects, dipteran, dipteron\nn02189363\tgall midge, gallfly, gall gnat\nn02189670\tHessian fly, Mayetiola destructor\nn02190166\tfly\nn02190790\thousefly, house fly, Musca domestica\nn02191273\ttsetse fly, tsetse, tzetze fly, tzetze, glossina\nn02191773\tblowfly, blow fly\nn02191979\tbluebottle, Calliphora vicina\nn02192252\tgreenbottle, greenbottle fly\nn02192513\tflesh fly, Sarcophaga carnaria\nn02192814\ttachina fly\nn02193009\tgadfly\nn02193163\tbotfly\nn02194249\thuman botfly, Dermatobia hominis\nn02194750\tsheep botfly, sheep gadfly, Oestrus ovis\nn02195091\twarble fly\nn02195526\thorsefly, cleg, clegg, horse fly\nn02195819\tbee fly\nn02196119\trobber fly, bee killer\nn02196344\tfruit fly, pomace fly\nn02196896\tapple maggot, railroad worm, Rhagoletis pomonella\nn02197185\tMediterranean fruit fly, medfly, Ceratitis capitata\nn02197689\tdrosophila, Drosophila melanogaster\nn02197877\tvinegar fly\nn02198129\tleaf miner, leaf-miner\nn02198532\tlouse fly, hippoboscid\nn02198859\thorse tick, horsefly, Hippobosca equina\nn02199170\tsheep ked, sheep-tick, sheep tick, Melophagus Ovinus\nn02199502\thorn fly, Haematobia irritans\nn02200198\tmosquito\nn02200509\twiggler, wriggler\nn02200630\tgnat\nn02200850\tyellow-fever mosquito, Aedes aegypti\nn02201000\tAsian tiger mosquito, Aedes albopictus\nn02201497\tanopheline\nn02201626\tmalarial mosquito, malaria mosquito\nn02202006\tcommon mosquito, Culex pipiens\nn02202124\tCulex quinquefasciatus, Culex fatigans\nn02202287\tgnat\nn02202678\tpunkie, punky, punkey, no-see-um, biting midge\nn02203152\tmidge\nn02203592\tfungus gnat\nn02203978\tpsychodid\nn02204249\tsand fly, sandfly, Phlebotomus papatasii\nn02204722\tfungus gnat, sciara, sciarid\nn02204907\tarmyworm\nn02205219\tcrane fly, daddy longlegs\nn02205673\tblackfly, black fly, buffalo gnat\nn02206270\thymenopterous insect, hymenopteran, hymenopteron, hymenopter\nn02206856\tbee\nn02207179\tdrone\nn02207345\tqueen bee\nn02207449\tworker\nn02207647\tsoldier\nn02207805\tworker bee\nn02208280\thoneybee, Apis mellifera\nn02208498\tAfricanized bee, Africanized honey bee, killer bee, Apis mellifera scutellata, Apis mellifera adansonii\nn02208848\tblack bee, German bee\nn02208979\tCarniolan bee\nn02209111\tItalian bee\nn02209354\tcarpenter bee\nn02209624\tbumblebee, humblebee\nn02209964\tcuckoo-bumblebee\nn02210427\tandrena, andrenid, mining bee\nn02210921\tNomia melanderi, alkali bee\nn02211444\tleaf-cutting bee, leaf-cutter, leaf-cutter bee\nn02211627\tmason bee\nn02211896\tpotter bee\nn02212062\twasp\nn02212602\tvespid, vespid wasp\nn02212958\tpaper wasp\nn02213107\thornet\nn02213239\tgiant hornet, Vespa crabro\nn02213543\tcommon wasp, Vespula vulgaris\nn02213663\tbald-faced hornet, white-faced hornet, Vespula maculata\nn02213788\tyellow jacket, yellow hornet, Vespula maculifrons\nn02214096\tPolistes annularis\nn02214341\tmason wasp\nn02214499\tpotter wasp\nn02214660\tMutillidae, family Mutillidae\nn02214773\tvelvet ant\nn02215161\tsphecoid wasp, sphecoid\nn02215621\tmason wasp\nn02215770\tdigger wasp\nn02216211\tcicada killer, Sphecius speciosis\nn02216365\tmud dauber\nn02216740\tgall wasp, gallfly, cynipid wasp, cynipid gall wasp\nn02217563\tchalcid fly, chalcidfly, chalcid, chalcid wasp\nn02217839\tstrawworm, jointworm\nn02218134\tchalcis fly\nn02218371\tichneumon fly\nn02218713\tsawfly\nn02219015\tbirch leaf miner, Fenusa pusilla\nn02219486\tant, emmet, pismire\nn02220055\tpharaoh ant, pharaoh's ant, Monomorium pharaonis\nn02220225\tlittle black ant, Monomorium minimum\nn02220518\tarmy ant, driver ant, legionary ant\nn02220804\tcarpenter ant\nn02221083\tfire ant\nn02221414\twood ant, Formica rufa\nn02221571\tslave ant\nn02221715\tFormica fusca\nn02221820\tslave-making ant, slave-maker\nn02222035\tsanguinary ant, Formica sanguinea\nn02222321\tbulldog ant\nn02222582\tAmazon ant, Polyergus rufescens\nn02223266\ttermite, white ant\nn02223520\tdry-wood termite\nn02224023\tReticulitermes lucifugus\nn02224713\tMastotermes darwiniensis\nn02225081\tMastotermes electrodominicus\nn02225798\tpowder-post termite, Cryptotermes brevis\nn02226183\torthopterous insect, orthopteron, orthopteran\nn02226429\tgrasshopper, hopper\nn02226821\tshort-horned grasshopper, acridid\nn02226970\tlocust\nn02227247\tmigratory locust, Locusta migratoria\nn02227604\tmigratory grasshopper\nn02227966\tlong-horned grasshopper, tettigoniid\nn02228341\tkatydid\nn02228697\tmormon cricket, Anabrus simplex\nn02229156\tsand cricket, Jerusalem cricket, Stenopelmatus fuscus\nn02229544\tcricket\nn02229765\tmole cricket\nn02230023\tEuropean house cricket, Acheta domestica\nn02230187\tfield cricket, Acheta assimilis\nn02230480\ttree cricket\nn02230634\tsnowy tree cricket, Oecanthus fultoni\nn02231052\tphasmid, phasmid insect\nn02231487\twalking stick, walkingstick, stick insect\nn02231803\tdiapheromera, Diapheromera femorata\nn02232223\twalking leaf, leaf insect\nn02233338\tcockroach, roach\nn02233943\toriental cockroach, oriental roach, Asiatic cockroach, blackbeetle, Blatta orientalis\nn02234355\tAmerican cockroach, Periplaneta americana\nn02234570\tAustralian cockroach, Periplaneta australasiae\nn02234848\tGerman cockroach, Croton bug, crotonbug, water bug, Blattella germanica\nn02235205\tgiant cockroach\nn02236044\tmantis, mantid\nn02236241\tpraying mantis, praying mantid, Mantis religioso\nn02236355\tbug\nn02236896\themipterous insect, bug, hemipteran, hemipteron\nn02237424\tleaf bug, plant bug\nn02237581\tmirid bug, mirid, capsid\nn02237868\tfour-lined plant bug, four-lined leaf bug, Poecilocapsus lineatus\nn02238235\tlygus bug\nn02238358\ttarnished plant bug, Lygus lineolaris\nn02238594\tlace bug\nn02238887\tlygaeid, lygaeid bug\nn02239192\tchinch bug, Blissus leucopterus\nn02239528\tcoreid bug, coreid\nn02239774\tsquash bug, Anasa tristis\nn02240068\tleaf-footed bug, leaf-foot bug\nn02240517\tbedbug, bed bug, chinch, Cimex lectularius\nn02241008\tbackswimmer, Notonecta undulata\nn02241426\ttrue bug\nn02241569\theteropterous insect\nn02241799\twater bug\nn02242137\tgiant water bug\nn02242455\twater scorpion\nn02243209\twater boatman, boat bug\nn02243562\twater strider, pond-skater, water skater\nn02243878\tcommon pond-skater, Gerris lacustris\nn02244173\tassassin bug, reduviid\nn02244515\tconenose, cone-nosed bug, conenose bug, big bedbug, kissing bug\nn02244797\twheel bug, Arilus cristatus\nn02245111\tfirebug\nn02245443\tcotton stainer\nn02246011\thomopterous insect, homopteran\nn02246628\twhitefly\nn02246941\tcitrus whitefly, Dialeurodes citri\nn02247216\tgreenhouse whitefly, Trialeurodes vaporariorum\nn02247511\tsweet-potato whitefly\nn02247655\tsuperbug, Bemisia tabaci, poinsettia strain\nn02248062\tcotton strain\nn02248368\tcoccid insect\nn02248510\tscale insect\nn02248887\tsoft scale\nn02249134\tbrown soft scale, Coccus hesperidum\nn02249515\tarmored scale\nn02249809\tSan Jose scale, Aspidiotus perniciosus\nn02250280\tcochineal insect, cochineal, Dactylopius coccus\nn02250822\tmealybug, mealy bug\nn02251067\tcitrophilous mealybug, citrophilus mealybug, Pseudococcus fragilis\nn02251233\tComstock mealybug, Comstock's mealybug, Pseudococcus comstocki\nn02251593\tcitrus mealybug, Planococcus citri\nn02251775\tplant louse, louse\nn02252226\taphid\nn02252799\tapple aphid, green apple aphid, Aphis pomi\nn02252972\tblackfly, bean aphid, Aphis fabae\nn02253127\tgreenfly\nn02253264\tgreen peach aphid\nn02253494\tant cow\nn02253715\twoolly aphid, woolly plant louse\nn02253913\twoolly apple aphid, American blight, Eriosoma lanigerum\nn02254246\twoolly alder aphid, Prociphilus tessellatus\nn02254697\tadelgid\nn02254901\tbalsam woolly aphid, Adelges piceae\nn02255023\tspruce gall aphid, Adelges abietis\nn02255391\twoolly adelgid\nn02256172\tjumping plant louse, psylla, psyllid\nn02256656\tcicada, cicala\nn02257003\tdog-day cicada, harvest fly\nn02257284\tseventeen-year locust, periodical cicada, Magicicada septendecim\nn02257715\tspittle insect, spittlebug\nn02257985\tfroghopper\nn02258198\tmeadow spittlebug, Philaenus spumarius\nn02258508\tpine spittlebug\nn02258629\tSaratoga spittlebug, Aphrophora saratogensis\nn02259212\tleafhopper\nn02259377\tplant hopper, planthopper\nn02259708\ttreehopper\nn02259987\tlantern fly, lantern-fly\nn02260421\tpsocopterous insect\nn02260863\tpsocid\nn02261063\tbark-louse, bark louse\nn02261419\tbooklouse, book louse, deathwatch, Liposcelis divinatorius\nn02261757\tcommon booklouse, Trogium pulsatorium\nn02262178\tephemerid, ephemeropteran\nn02262449\tmayfly, dayfly, shadfly\nn02262803\tstonefly, stone fly, plecopteran\nn02263378\tneuropteron, neuropteran, neuropterous insect\nn02264021\tant lion, antlion, antlion fly\nn02264232\tdoodlebug, ant lion, antlion\nn02264363\tlacewing, lacewing fly\nn02264591\taphid lion, aphis lion\nn02264885\tgreen lacewing, chrysopid, stink fly\nn02265330\tbrown lacewing, hemerobiid, hemerobiid fly\nn02266050\tdobson, dobsonfly, dobson fly, Corydalus cornutus\nn02266269\thellgrammiate, dobson\nn02266421\tfish fly, fish-fly\nn02266864\talderfly, alder fly, Sialis lutaria\nn02267208\tsnakefly\nn02267483\tmantispid\nn02268148\todonate\nn02268443\tdragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk\nn02268853\tdamselfly\nn02269196\ttrichopterous insect, trichopteran, trichopteron\nn02269340\tcaddis fly, caddis-fly, caddice fly, caddice-fly\nn02269522\tcaseworm\nn02269657\tcaddisworm, strawworm\nn02270011\tthysanuran insect, thysanuron\nn02270200\tbristletail\nn02270623\tsilverfish, Lepisma saccharina\nn02270945\tfirebrat, Thermobia domestica\nn02271222\tjumping bristletail, machilid\nn02271570\tthysanopter, thysanopteron, thysanopterous insect\nn02271897\tthrips, thrip, thripid\nn02272286\ttobacco thrips, Frankliniella fusca\nn02272552\tonion thrips, onion louse, Thrips tobaci\nn02272871\tearwig\nn02273392\tcommon European earwig, Forficula auricularia\nn02274024\tlepidopterous insect, lepidopteron, lepidopteran\nn02274259\tbutterfly\nn02274822\tnymphalid, nymphalid butterfly, brush-footed butterfly, four-footed butterfly\nn02275560\tmourning cloak, mourning cloak butterfly, Camberwell beauty, Nymphalis antiopa\nn02275773\ttortoiseshell, tortoiseshell butterfly\nn02276078\tpainted beauty, Vanessa virginiensis\nn02276258\tadmiral\nn02276355\tred admiral, Vanessa atalanta\nn02276749\twhite admiral, Limenitis camilla\nn02276902\tbanded purple, white admiral, Limenitis arthemis\nn02277094\tred-spotted purple, Limenitis astyanax\nn02277268\tviceroy, Limenitis archippus\nn02277422\tanglewing\nn02277742\tringlet, ringlet butterfly\nn02278024\tcomma, comma butterfly, Polygonia comma\nn02278210\tfritillary\nn02278463\tsilverspot\nn02278839\temperor butterfly, emperor\nn02278980\tpurple emperor, Apatura iris\nn02279257\tpeacock, peacock butterfly, Inachis io\nn02279637\tdanaid, danaid butterfly\nn02279972\tmonarch, monarch butterfly, milkweed butterfly, Danaus plexippus\nn02280458\tpierid, pierid butterfly\nn02280649\tcabbage butterfly\nn02281015\tsmall white, Pieris rapae\nn02281136\tlarge white, Pieris brassicae\nn02281267\tsouthern cabbage butterfly, Pieris protodice\nn02281406\tsulphur butterfly, sulfur butterfly\nn02281787\tlycaenid, lycaenid butterfly\nn02282257\tblue\nn02282385\tcopper\nn02282553\tAmerican copper, Lycaena hypophlaeas\nn02282903\thairstreak, hairstreak butterfly\nn02283077\tStrymon melinus\nn02283201\tmoth\nn02283617\tmoth miller, miller\nn02283951\ttortricid, tortricid moth\nn02284224\tleaf roller, leaf-roller\nn02284611\ttea tortrix, tortrix, Homona coffearia\nn02284884\torange tortrix, tortrix, Argyrotaenia citrana\nn02285179\tcodling moth, codlin moth, Carpocapsa pomonella\nn02285548\tlymantriid, tussock moth\nn02285801\ttussock caterpillar\nn02286089\tgypsy moth, gipsy moth, Lymantria dispar\nn02286425\tbrowntail, brown-tail moth, Euproctis phaeorrhoea\nn02286654\tgold-tail moth, Euproctis chrysorrhoea\nn02287004\tgeometrid, geometrid moth\nn02287352\tPaleacrita vernata\nn02287622\tAlsophila pometaria\nn02287799\tcankerworm\nn02287987\tspring cankerworm\nn02288122\tfall cankerworm\nn02288268\tmeasuring worm, inchworm, looper\nn02288789\tpyralid, pyralid moth\nn02289307\tbee moth, wax moth, Galleria mellonella\nn02289610\tcorn borer, European corn borer moth, corn borer moth, Pyrausta nubilalis\nn02289988\tMediterranean flour moth, Anagasta kuehniella\nn02290340\ttobacco moth, cacao moth, Ephestia elutella\nn02290664\talmond moth, fig moth, Cadra cautella\nn02290870\traisin moth, Cadra figulilella\nn02291220\ttineoid, tineoid moth\nn02291572\ttineid, tineid moth\nn02291748\tclothes moth\nn02292085\tcasemaking clothes moth, Tinea pellionella\nn02292401\twebbing clothes moth, webbing moth, Tineola bisselliella\nn02292692\tcarpet moth, tapestry moth, Trichophaga tapetzella\nn02293352\tgelechiid, gelechiid moth\nn02293868\tgrain moth\nn02294097\tangoumois moth, angoumois grain moth, Sitotroga cerealella\nn02294407\tpotato moth, potato tuber moth, splitworm, Phthorimaea operculella\nn02294577\tpotato tuberworm, Phthorimaea operculella\nn02295064\tnoctuid moth, noctuid, owlet moth\nn02295390\tcutworm\nn02295870\tunderwing\nn02296021\tred underwing, Catocala nupta\nn02296276\tantler moth, Cerapteryx graminis\nn02296612\theliothis moth, Heliothis zia\nn02296912\tarmy cutworm, Chorizagrotis auxiliaris\nn02297294\tarmyworm, Pseudaletia unipuncta\nn02297442\tarmyworm, army worm, Pseudaletia unipuncta\nn02297819\tSpodoptera exigua\nn02297938\tbeet armyworm, Spodoptera exigua\nn02298095\tSpodoptera frugiperda\nn02298218\tfall armyworm, Spodoptera frugiperda\nn02298541\thawkmoth, hawk moth, sphingid, sphinx moth, hummingbird moth\nn02299039\tManduca sexta\nn02299157\ttobacco hornworm, tomato worm, Manduca sexta\nn02299378\tManduca quinquemaculata\nn02299505\ttomato hornworm, potato worm, Manduca quinquemaculata\nn02299846\tdeath's-head moth, Acherontia atropos\nn02300173\tbombycid, bombycid moth, silkworm moth\nn02300554\tdomestic silkworm moth, domesticated silkworm moth, Bombyx mori\nn02300797\tsilkworm\nn02301452\tsaturniid, saturniid moth\nn02301935\temperor, emperor moth, Saturnia pavonia\nn02302244\timperial moth, Eacles imperialis\nn02302459\tgiant silkworm moth, silkworm moth\nn02302620\tsilkworm, giant silkworm, wild wilkworm\nn02302969\tluna moth, Actias luna\nn02303284\tcecropia, cecropia moth, Hyalophora cecropia\nn02303585\tcynthia moth, Samia cynthia, Samia walkeri\nn02303777\tailanthus silkworm, Samia cynthia\nn02304036\tio moth, Automeris io\nn02304432\tpolyphemus moth, Antheraea polyphemus\nn02304657\tpernyi moth, Antheraea pernyi\nn02304797\ttussah, tusseh, tussur, tussore, tusser, Antheraea mylitta\nn02305085\tatlas moth, Atticus atlas\nn02305407\tarctiid, arctiid moth\nn02305636\ttiger moth\nn02305929\tcinnabar, cinnabar moth, Callimorpha jacobeae\nn02306433\tlasiocampid, lasiocampid moth\nn02306825\teggar, egger\nn02307176\ttent-caterpillar moth, Malacosoma americana\nn02307325\ttent caterpillar\nn02307515\ttent-caterpillar moth, Malacosoma disstria\nn02307681\tforest tent caterpillar, Malacosoma disstria\nn02307910\tlappet, lappet moth\nn02308033\tlappet caterpillar\nn02308139\twebworm\nn02308471\twebworm moth\nn02308618\tHyphantria cunea\nn02308735\tfall webworm, Hyphantria cunea\nn02309120\tgarden webworm, Loxostege similalis\nn02309242\tinstar\nn02309337\tcaterpillar\nn02309841\tcorn borer, Pyrausta nubilalis\nn02310000\tbollworm\nn02310149\tpink bollworm, Gelechia gossypiella\nn02310334\tcorn earworm, cotton bollworm, tomato fruitworm, tobacco budworm, vetchworm, Heliothis zia\nn02310585\tcabbageworm, Pieris rapae\nn02310717\twoolly bear, woolly bear caterpillar\nn02310941\twoolly bear moth\nn02311060\tlarva\nn02311617\tnymph\nn02311748\tleptocephalus\nn02312006\tgrub\nn02312175\tmaggot\nn02312325\tleatherjacket\nn02312427\tpupa\nn02312640\tchrysalis\nn02312912\timago\nn02313008\tqueen\nn02313360\tphoronid\nn02313709\tbryozoan, polyzoan, sea mat, sea moss, moss animal\nn02315487\tbrachiopod, lamp shell, lampshell\nn02315821\tpeanut worm, sipunculid\nn02316707\techinoderm\nn02317335\tstarfish, sea star\nn02317781\tbrittle star, brittle-star, serpent star\nn02318167\tbasket star, basket fish\nn02318687\tAstrophyton muricatum\nn02319095\tsea urchin\nn02319308\tedible sea urchin, Echinus esculentus\nn02319555\tsand dollar\nn02319829\theart urchin\nn02320127\tcrinoid\nn02320465\tsea lily\nn02321170\tfeather star, comatulid\nn02321529\tsea cucumber, holothurian\nn02322047\ttrepang, Holothuria edulis\nn02322992\tDuplicidentata\nn02323449\tlagomorph, gnawing mammal\nn02323902\tleporid, leporid mammal\nn02324045\trabbit, coney, cony\nn02324431\trabbit ears\nn02324514\tlapin\nn02324587\tbunny, bunny rabbit\nn02324850\tEuropean rabbit, Old World rabbit, Oryctolagus cuniculus\nn02325366\twood rabbit, cottontail, cottontail rabbit\nn02325722\teastern cottontail, Sylvilagus floridanus\nn02325884\tswamp rabbit, canecutter, swamp hare, Sylvilagus aquaticus\nn02326074\tmarsh hare, swamp rabbit, Sylvilagus palustris\nn02326432\thare\nn02326763\tleveret\nn02326862\tEuropean hare, Lepus europaeus\nn02327028\tjackrabbit\nn02327175\twhite-tailed jackrabbit, whitetail jackrabbit, Lepus townsendi\nn02327435\tblacktail jackrabbit, Lepus californicus\nn02327656\tpolar hare, Arctic hare, Lepus arcticus\nn02327842\tsnowshoe hare, snowshoe rabbit, varying hare, Lepus americanus\nn02328009\tBelgian hare, leporide\nn02328150\tAngora, Angora rabbit\nn02328429\tpika, mouse hare, rock rabbit, coney, cony\nn02328820\tlittle chief hare, Ochotona princeps\nn02328942\tcollared pika, Ochotona collaris\nn02329401\trodent, gnawer\nn02330245\tmouse\nn02331046\trat\nn02331309\tpocket rat\nn02331842\tmurine\nn02332156\thouse mouse, Mus musculus\nn02332447\tharvest mouse, Micromyx minutus\nn02332755\tfield mouse, fieldmouse\nn02332954\tnude mouse\nn02333190\tEuropean wood mouse, Apodemus sylvaticus\nn02333546\tbrown rat, Norway rat, Rattus norvegicus\nn02333733\twharf rat\nn02333819\tsewer rat\nn02333909\tblack rat, roof rat, Rattus rattus\nn02334201\tbandicoot rat, mole rat\nn02334460\tjerboa rat\nn02334728\tkangaroo mouse\nn02335127\twater rat\nn02335231\tbeaver rat\nn02336011\tNew World mouse\nn02336275\tAmerican harvest mouse, harvest mouse\nn02336641\twood mouse\nn02336826\twhite-footed mouse, vesper mouse, Peromyscus leucopus\nn02337001\tdeer mouse, Peromyscus maniculatus\nn02337171\tcactus mouse, Peromyscus eremicus\nn02337332\tcotton mouse, Peromyscus gossypinus\nn02337598\tpygmy mouse, Baiomys taylori\nn02337902\tgrasshopper mouse\nn02338145\tmuskrat, musquash, Ondatra zibethica\nn02338449\tround-tailed muskrat, Florida water rat, Neofiber alleni\nn02338722\tcotton rat, Sigmodon hispidus\nn02338901\twood rat, wood-rat\nn02339282\tdusky-footed wood rat\nn02339376\tvole, field mouse\nn02339922\tpackrat, pack rat, trade rat, bushytail woodrat, Neotoma cinerea\nn02340186\tdusky-footed woodrat, Neotoma fuscipes\nn02340358\teastern woodrat, Neotoma floridana\nn02340640\trice rat, Oryzomys palustris\nn02340930\tpine vole, pine mouse, Pitymys pinetorum\nn02341288\tmeadow vole, meadow mouse, Microtus pennsylvaticus\nn02341475\twater vole, Richardson vole, Microtus richardsoni\nn02341616\tprairie vole, Microtus ochrogaster\nn02341974\twater vole, water rat, Arvicola amphibius\nn02342250\tred-backed mouse, redback vole\nn02342534\tphenacomys\nn02342885\thamster\nn02343058\tEurasian hamster, Cricetus cricetus\nn02343320\tgolden hamster, Syrian hamster, Mesocricetus auratus\nn02343772\tgerbil, gerbille\nn02344175\tjird\nn02344270\ttamarisk gerbil, Meriones unguiculatus\nn02344408\tsand rat, Meriones longifrons\nn02344528\tlemming\nn02344918\tEuropean lemming, Lemmus lemmus\nn02345078\tbrown lemming, Lemmus trimucronatus\nn02345340\tgrey lemming, gray lemming, red-backed lemming\nn02345600\tpied lemming\nn02345774\tHudson bay collared lemming, Dicrostonyx hudsonius\nn02345997\tsouthern bog lemming, Synaptomys cooperi\nn02346170\tnorthern bog lemming, Synaptomys borealis\nn02346627\tporcupine, hedgehog\nn02346998\tOld World porcupine\nn02347274\tbrush-tailed porcupine, brush-tail porcupine\nn02347573\tlong-tailed porcupine, Trichys lipura\nn02347744\tNew World porcupine\nn02348173\tCanada porcupine, Erethizon dorsatum\nn02348788\tpocket mouse\nn02349205\tsilky pocket mouse, Perognathus flavus\nn02349390\tplains pocket mouse, Perognathus flavescens\nn02349557\thispid pocket mouse, Perognathus hispidus\nn02349847\tMexican pocket mouse, Liomys irroratus\nn02350105\tkangaroo rat, desert rat, Dipodomys phillipsii\nn02350357\tOrd kangaroo rat, Dipodomys ordi\nn02350670\tkangaroo mouse, dwarf pocket rat\nn02350989\tjumping mouse\nn02351343\tmeadow jumping mouse, Zapus hudsonius\nn02351870\tjerboa\nn02352002\ttypical jerboa\nn02352290\tJaculus jaculus\nn02352591\tdormouse\nn02352932\tloir, Glis glis\nn02353172\thazel mouse, Muscardinus avellanarius\nn02353411\tlerot\nn02353861\tgopher, pocket gopher, pouched rat\nn02354162\tplains pocket gopher, Geomys bursarius\nn02354320\tsoutheastern pocket gopher, Geomys pinetis\nn02354621\tvalley pocket gopher, Thomomys bottae\nn02354781\tnorthern pocket gopher, Thomomys talpoides\nn02355227\tsquirrel\nn02355477\ttree squirrel\nn02356381\teastern grey squirrel, eastern gray squirrel, cat squirrel, Sciurus carolinensis\nn02356612\twestern grey squirrel, western gray squirrel, Sciurus griseus\nn02356798\tfox squirrel, eastern fox squirrel, Sciurus niger\nn02356977\tblack squirrel\nn02357111\tred squirrel, cat squirrel, Sciurus vulgaris\nn02357401\tAmerican red squirrel, spruce squirrel, red squirrel, Sciurus hudsonicus, Tamiasciurus hudsonicus\nn02357585\tchickeree, Douglas squirrel, Tamiasciurus douglasi\nn02357911\tantelope squirrel, whitetail antelope squirrel, antelope chipmunk, Citellus leucurus\nn02358091\tground squirrel, gopher, spermophile\nn02358390\tmantled ground squirrel, Citellus lateralis\nn02358584\tsuslik, souslik, Citellus citellus\nn02358712\tflickertail, Richardson ground squirrel, Citellus richardsoni\nn02358890\trock squirrel, Citellus variegatus\nn02359047\tArctic ground squirrel, parka squirrel, Citellus parryi\nn02359324\tprairie dog, prairie marmot\nn02359556\tblacktail prairie dog, Cynomys ludovicianus\nn02359667\twhitetail prairie dog, Cynomys gunnisoni\nn02359915\teastern chipmunk, hackee, striped squirrel, ground squirrel, Tamias striatus\nn02360282\tchipmunk\nn02360480\tbaronduki, baranduki, barunduki, burunduki, Eutamius asiaticus, Eutamius sibiricus\nn02360781\tAmerican flying squirrel\nn02360933\tsouthern flying squirrel, Glaucomys volans\nn02361090\tnorthern flying squirrel, Glaucomys sabrinus\nn02361337\tmarmot\nn02361587\tgroundhog, woodchuck, Marmota monax\nn02361706\thoary marmot, whistler, whistling marmot, Marmota caligata\nn02361850\tyellowbelly marmot, rockchuck, Marmota flaviventris\nn02362194\tAsiatic flying squirrel\nn02363005\tbeaver\nn02363245\tOld World beaver, Castor fiber\nn02363351\tNew World beaver, Castor canadensis\nn02363996\tmountain beaver, sewellel, Aplodontia rufa\nn02364520\tcavy\nn02364673\tguinea pig, Cavia cobaya\nn02364840\taperea, wild cavy, Cavia porcellus\nn02365108\tmara, Dolichotis patagonum\nn02365480\tcapybara, capibara, Hydrochoerus hydrochaeris\nn02366002\tagouti, Dasyprocta aguti\nn02366301\tpaca, Cuniculus paca\nn02366579\tmountain paca\nn02366959\tcoypu, nutria, Myocastor coypus\nn02367492\tchinchilla, Chinchilla laniger\nn02367812\tmountain chinchilla, mountain viscacha\nn02368116\tviscacha, chinchillon, Lagostomus maximus\nn02368399\tabrocome, chinchilla rat, rat chinchilla\nn02368821\tmole rat\nn02369293\tmole rat\nn02369555\tsand rat\nn02369680\tnaked mole rat\nn02369935\tqueen, queen mole rat\nn02370137\tDamaraland mole rat\nn02370525\tUngulata\nn02370806\tungulate, hoofed mammal\nn02371344\tunguiculate, unguiculate mammal\nn02372140\tdinoceras, uintathere\nn02372584\thyrax, coney, cony, dassie, das\nn02372952\trock hyrax, rock rabbit, Procavia capensis\nn02373336\todd-toed ungulate, perissodactyl, perissodactyl mammal\nn02374149\tequine, equid\nn02374451\thorse, Equus caballus\nn02375302\troan\nn02375438\tstablemate, stable companion\nn02375757\tgee-gee\nn02375862\teohippus, dawn horse\nn02376542\tfoal\nn02376679\tfilly\nn02376791\tcolt\nn02376918\tmale horse\nn02377063\tridgeling, ridgling, ridgel, ridgil\nn02377181\tstallion, entire\nn02377291\tstud, studhorse\nn02377388\tgelding\nn02377480\tmare, female horse\nn02377603\tbroodmare, stud mare\nn02377703\tsaddle horse, riding horse, mount\nn02378149\tremount\nn02378299\tpalfrey\nn02378415\twarhorse\nn02378541\tcavalry horse\nn02378625\tcharger, courser\nn02378755\tsteed\nn02378870\tprancer\nn02378969\thack\nn02379081\tcow pony\nn02379183\tquarter horse\nn02379329\tMorgan\nn02379430\tTennessee walker, Tennessee walking horse, Walking horse, Plantation walking horse\nn02379630\tAmerican saddle horse\nn02379743\tAppaloosa\nn02379908\tArabian, Arab\nn02380052\tLippizan, Lipizzan, Lippizaner\nn02380335\tpony\nn02380464\tpolo pony\nn02380583\tmustang\nn02380745\tbronco, bronc, broncho\nn02380875\tbucking bronco\nn02381004\tbuckskin\nn02381119\tcrowbait, crow-bait\nn02381261\tdun\nn02381364\tgrey, gray\nn02381460\twild horse\nn02381609\ttarpan, Equus caballus gomelini\nn02381831\tPrzewalski's horse, Przevalski's horse, Equus caballus przewalskii, Equus caballus przevalskii\nn02382039\tcayuse, Indian pony\nn02382132\thack\nn02382204\thack, jade, nag, plug\nn02382338\tplow horse, plough horse\nn02382437\tpony\nn02382635\tShetland pony\nn02382750\tWelsh pony\nn02382850\tExmoor\nn02382948\tracehorse, race horse, bangtail\nn02383231\tthoroughbred\nn02384741\tsteeplechaser\nn02384858\tracer\nn02385002\tfinisher\nn02385098\tpony\nn02385214\tyearling\nn02385580\tdark horse\nn02385676\tmudder\nn02385776\tnonstarter\nn02385898\tstalking-horse\nn02386014\tharness horse\nn02386141\tcob\nn02386224\thackney\nn02386310\tworkhorse\nn02386496\tdraft horse, draught horse, dray horse\nn02386746\tpackhorse\nn02386853\tcarthorse, cart horse, drayhorse\nn02386968\tClydesdale\nn02387093\tPercheron\nn02387254\tfarm horse, dobbin\nn02387346\tshire, shire horse\nn02387452\tpole horse, poler\nn02387722\tpost horse, post-horse, poster\nn02387887\tcoach horse\nn02387983\tpacer\nn02388143\tpacer, pacemaker, pacesetter\nn02388276\ttrotting horse, trotter\nn02388453\tpole horse\nn02388588\tstepper, high stepper\nn02388735\tchestnut\nn02388832\tliver chestnut\nn02388917\tbay\nn02389026\tsorrel\nn02389128\tpalomino\nn02389261\tpinto\nn02389346\tass\nn02389559\tdomestic ass, donkey, Equus asinus\nn02389779\tburro\nn02389865\tmoke\nn02389943\tjack, jackass\nn02390015\tjennet, jenny, jenny ass\nn02390101\tmule\nn02390258\thinny\nn02390454\twild ass\nn02390640\tAfrican wild ass, Equus asinus\nn02390738\tkiang, Equus kiang\nn02390834\tonager, Equus hemionus\nn02390938\tchigetai, dziggetai, Equus hemionus hemionus\nn02391049\tzebra\nn02391234\tcommon zebra, Burchell's zebra, Equus Burchelli\nn02391373\tmountain zebra, Equus zebra zebra\nn02391508\tgrevy's zebra, Equus grevyi\nn02391617\tquagga, Equus quagga\nn02391994\trhinoceros, rhino\nn02392434\tIndian rhinoceros, Rhinoceros unicornis\nn02392555\twoolly rhinoceros, Rhinoceros antiquitatis\nn02392824\twhite rhinoceros, Ceratotherium simum, Diceros simus\nn02393161\tblack rhinoceros, Diceros bicornis\nn02393580\ttapir\nn02393807\tNew World tapir, Tapirus terrestris\nn02393940\tMalayan tapir, Indian tapir, Tapirus indicus\nn02394477\teven-toed ungulate, artiodactyl, artiodactyl mammal\nn02395003\tswine\nn02395406\thog, pig, grunter, squealer, Sus scrofa\nn02395694\tpiglet, piggy, shoat, shote\nn02395855\tsucking pig\nn02395931\tporker\nn02396014\tboar\nn02396088\tsow\nn02396157\trazorback, razorback hog, razorbacked hog\nn02396427\twild boar, boar, Sus scrofa\nn02396796\tbabirusa, babiroussa, babirussa, Babyrousa Babyrussa\nn02397096\twarthog\nn02397529\tpeccary, musk hog\nn02397744\tcollared peccary, javelina, Tayassu angulatus, Tayassu tajacu, Peccari angulatus\nn02397987\twhite-lipped peccary, Tayassu pecari\nn02398521\thippopotamus, hippo, river horse, Hippopotamus amphibius\nn02399000\truminant\nn02401031\tbovid\nn02402010\tbovine\nn02402175\tox, wild ox\nn02402425\tcattle, cows, kine, oxen, Bos taurus\nn02403003\tox\nn02403153\tstirk\nn02403231\tbullock, steer\nn02403325\tbull\nn02403454\tcow, moo-cow\nn02403740\theifer\nn02403820\tbullock\nn02403920\tdogie, dogy, leppy\nn02404028\tmaverick\nn02404186\tbeef, beef cattle\nn02404432\tlonghorn, Texas longhorn\nn02404573\tBrahman, Brahma, Brahmin, Bos indicus\nn02404906\tzebu\nn02405101\taurochs, urus, Bos primigenius\nn02405302\tyak, Bos grunniens\nn02405440\tbanteng, banting, tsine, Bos banteng\nn02405577\tWelsh, Welsh Black\nn02405692\tred poll\nn02405799\tSanta Gertrudis\nn02405929\tAberdeen Angus, Angus, black Angus\nn02406046\tAfricander\nn02406174\tdairy cattle, dairy cow, milch cow, milk cow, milcher, milker\nn02406432\tAyrshire\nn02406533\tBrown Swiss\nn02406647\tCharolais\nn02406749\tJersey\nn02406859\tDevon\nn02406952\tgrade\nn02407071\tDurham, shorthorn\nn02407172\tmilking shorthorn\nn02407276\tGalloway\nn02407390\tFriesian, Holstein, Holstein-Friesian\nn02407521\tGuernsey\nn02407625\tHereford, whiteface\nn02407763\tcattalo, beefalo\nn02407959\tOld World buffalo, buffalo\nn02408429\twater buffalo, water ox, Asiatic buffalo, Bubalus bubalis\nn02408660\tIndian buffalo\nn02408817\tcarabao\nn02409038\tanoa, dwarf buffalo, Anoa depressicornis\nn02409202\ttamarau, tamarao, Bubalus mindorensis, Anoa mindorensis\nn02409508\tCape buffalo, Synercus caffer\nn02409870\tAsian wild ox\nn02410011\tgaur, Bibos gaurus\nn02410141\tgayal, mithan, Bibos frontalis\nn02410509\tbison\nn02410702\tAmerican bison, American buffalo, buffalo, Bison bison\nn02410900\twisent, aurochs, Bison bonasus\nn02411206\tmusk ox, musk sheep, Ovibos moschatus\nn02411705\tsheep\nn02411999\tewe\nn02412080\tram, tup\nn02412210\twether\nn02412440\tlamb\nn02412629\tlambkin\nn02412700\tbaa-lamb\nn02412787\thog, hogget, hogg\nn02412909\tteg\nn02412977\tPersian lamb\nn02413050\tblack sheep\nn02413131\tdomestic sheep, Ovis aries\nn02413484\tCotswold\nn02413593\tHampshire, Hampshire down\nn02413717\tLincoln\nn02413824\tExmoor\nn02413917\tCheviot\nn02414043\tbroadtail, caracul, karakul\nn02414209\tlongwool\nn02414290\tmerino, merino sheep\nn02414442\tRambouillet\nn02414578\twild sheep\nn02414763\targali, argal, Ovis ammon\nn02414904\tMarco Polo sheep, Marco Polo's sheep, Ovis poli\nn02415130\turial, Ovis vignei\nn02415253\tDall sheep, Dall's sheep, white sheep, Ovis montana dalli\nn02415435\tmountain sheep\nn02415577\tbighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis\nn02415829\tmouflon, moufflon, Ovis musimon\nn02416104\taoudad, arui, audad, Barbary sheep, maned sheep, Ammotragus lervia\nn02416519\tgoat, caprine animal\nn02416820\tkid\nn02416880\tbilly, billy goat, he-goat\nn02416964\tnanny, nanny-goat, she-goat\nn02417070\tdomestic goat, Capra hircus\nn02417242\tCashmere goat, Kashmir goat\nn02417387\tAngora, Angora goat\nn02417534\twild goat\nn02417663\tbezoar goat, pasang, Capra aegagrus\nn02417785\tmarkhor, markhoor, Capra falconeri\nn02417914\tibex, Capra ibex\nn02418064\tgoat antelope\nn02418465\tmountain goat, Rocky Mountain goat, Oreamnos americanus\nn02418770\tgoral, Naemorhedus goral\nn02419056\tserow\nn02419336\tchamois, Rupicapra rupicapra\nn02419634\ttakin, gnu goat, Budorcas taxicolor\nn02419796\tantelope\nn02420509\tblackbuck, black buck, Antilope cervicapra\nn02420828\tgerenuk, Litocranius walleri\nn02421136\taddax, Addax nasomaculatus\nn02421449\tgnu, wildebeest\nn02421792\tdik-dik\nn02422106\thartebeest\nn02422391\tsassaby, topi, Damaliscus lunatus\nn02422699\timpala, Aepyceros melampus\nn02423022\tgazelle\nn02423218\tThomson's gazelle, Gazella thomsoni\nn02423362\tGazella subgutturosa\nn02423589\tspringbok, springbuck, Antidorcas marsupialis, Antidorcas euchore\nn02424085\tbongo, Tragelaphus eurycerus, Boocercus eurycerus\nn02424305\tkudu, koodoo, koudou\nn02424486\tgreater kudu, Tragelaphus strepsiceros\nn02424589\tlesser kudu, Tragelaphus imberbis\nn02424695\tharnessed antelope\nn02424909\tnyala, Tragelaphus angasi\nn02425086\tmountain nyala, Tragelaphus buxtoni\nn02425228\tbushbuck, guib, Tragelaphus scriptus\nn02425532\tnilgai, nylghai, nylghau, blue bull, Boselaphus tragocamelus\nn02425887\tsable antelope, Hippotragus niger\nn02426176\tsaiga, Saiga tatarica\nn02426481\tsteenbok, steinbok, Raphicerus campestris\nn02426813\teland\nn02427032\tcommon eland, Taurotragus oryx\nn02427183\tgiant eland, Taurotragus derbianus\nn02427470\tkob, Kobus kob\nn02427576\tlechwe, Kobus leche\nn02427724\twaterbuck\nn02428089\tpuku, Adenota vardoni\nn02428349\toryx, pasang\nn02428508\tgemsbok, gemsbuck, Oryx gazella\nn02428842\tforest goat, spindle horn, Pseudoryx nghetinhensis\nn02429456\tpronghorn, prongbuck, pronghorn antelope, American antelope, Antilocapra americana\nn02430045\tdeer, cervid\nn02430559\tstag\nn02430643\troyal, royal stag\nn02430748\tpricket\nn02430830\tfawn\nn02431122\tred deer, elk, American elk, wapiti, Cervus elaphus\nn02431337\thart, stag\nn02431441\thind\nn02431542\tbrocket\nn02431628\tsambar, sambur, Cervus unicolor\nn02431785\twapiti, elk, American elk, Cervus elaphus canadensis\nn02431976\tJapanese deer, sika, Cervus nipon, Cervus sika\nn02432291\tVirginia deer, white tail, whitetail, white-tailed deer, whitetail deer, Odocoileus Virginianus\nn02432511\tmule deer, burro deer, Odocoileus hemionus\nn02432704\tblack-tailed deer, blacktail deer, blacktail, Odocoileus hemionus columbianus\nn02432983\telk, European elk, moose, Alces alces\nn02433318\tfallow deer, Dama dama\nn02433546\troe deer, Capreolus capreolus\nn02433729\troebuck\nn02433925\tcaribou, reindeer, Greenland caribou, Rangifer tarandus\nn02434190\twoodland caribou, Rangifer caribou\nn02434415\tbarren ground caribou, Rangifer arcticus\nn02434712\tbrocket\nn02434954\tmuntjac, barking deer\nn02435216\tmusk deer, Moschus moschiferus\nn02435517\tpere david's deer, elaphure, Elaphurus davidianus\nn02435853\tchevrotain, mouse deer\nn02436224\tkanchil, Tragulus kanchil\nn02436353\tnapu, Tragulus Javanicus\nn02436645\twater chevrotain, water deer, Hyemoschus aquaticus\nn02437136\tcamel\nn02437312\tArabian camel, dromedary, Camelus dromedarius\nn02437482\tBactrian camel, Camelus bactrianus\nn02437616\tllama\nn02437971\tdomestic llama, Lama peruana\nn02438173\tguanaco, Lama guanicoe\nn02438272\talpaca, Lama pacos\nn02438580\tvicuna, Vicugna vicugna\nn02439033\tgiraffe, camelopard, Giraffa camelopardalis\nn02439398\tokapi, Okapia johnstoni\nn02441326\tmusteline mammal, mustelid, musteline\nn02441942\tweasel\nn02442172\termine, shorttail weasel, Mustela erminea\nn02442336\tstoat\nn02442446\tNew World least weasel, Mustela rixosa\nn02442572\tOld World least weasel, Mustela nivalis\nn02442668\tlongtail weasel, long-tailed weasel, Mustela frenata\nn02442845\tmink\nn02443015\tAmerican mink, Mustela vison\nn02443114\tpolecat, fitch, foulmart, foumart, Mustela putorius\nn02443346\tferret\nn02443484\tblack-footed ferret, ferret, Mustela nigripes\nn02443808\tmuishond\nn02443959\tsnake muishond, Poecilogale albinucha\nn02444251\tstriped muishond, Ictonyx striata\nn02444819\totter\nn02445004\triver otter, Lutra canadensis\nn02445171\tEurasian otter, Lutra lutra\nn02445394\tsea otter, Enhydra lutris\nn02445715\tskunk, polecat, wood pussy\nn02446206\tstriped skunk, Mephitis mephitis\nn02446352\thooded skunk, Mephitis macroura\nn02446645\thog-nosed skunk, hognosed skunk, badger skunk, rooter skunk, Conepatus leuconotus\nn02447021\tspotted skunk, little spotted skunk, Spilogale putorius\nn02447366\tbadger\nn02447762\tAmerican badger, Taxidea taxus\nn02448060\tEurasian badger, Meles meles\nn02448318\tratel, honey badger, Mellivora capensis\nn02448633\tferret badger\nn02448885\thog badger, hog-nosed badger, sand badger, Arctonyx collaris\nn02449183\twolverine, carcajou, skunk bear, Gulo luscus\nn02449350\tglutton, Gulo gulo, wolverine\nn02449699\tgrison, Grison vittatus, Galictis vittatus\nn02450034\tmarten, marten cat\nn02450295\tpine marten, Martes martes\nn02450426\tsable, Martes zibellina\nn02450561\tAmerican marten, American sable, Martes americana\nn02450677\tstone marten, beech marten, Martes foina\nn02450829\tfisher, pekan, fisher cat, black cat, Martes pennanti\nn02451125\tyellow-throated marten, Charronia flavigula\nn02451415\ttayra, taira, Eira barbara\nn02451575\tfictional animal\nn02453108\tpachyderm\nn02453611\tedentate\nn02454379\tarmadillo\nn02454794\tpeba, nine-banded armadillo, Texas armadillo, Dasypus novemcinctus\nn02455135\tapar, three-banded armadillo, Tolypeutes tricinctus\nn02455428\ttatouay, cabassous, Cabassous unicinctus\nn02455720\tpeludo, poyou, Euphractus sexcinctus\nn02456008\tgiant armadillo, tatou, tatu, Priodontes giganteus\nn02456275\tpichiciago, pichiciego, fairy armadillo, chlamyphore, Chlamyphorus truncatus\nn02456962\tsloth, tree sloth\nn02457408\tthree-toed sloth, ai, Bradypus tridactylus\nn02457945\ttwo-toed sloth, unau, unai, Choloepus didactylus\nn02458135\ttwo-toed sloth, unau, unai, Choloepus hoffmanni\nn02458517\tmegatherian, megatheriid, megatherian mammal\nn02459190\tmylodontid\nn02460009\tanteater, New World anteater\nn02460451\tant bear, giant anteater, great anteater, tamanoir, Myrmecophaga jubata\nn02460817\tsilky anteater, two-toed anteater, Cyclopes didactylus\nn02461128\ttamandua, tamandu, lesser anteater, Tamandua tetradactyla\nn02461830\tpangolin, scaly anteater, anteater\nn02462213\tcoronet\nn02469248\tscapular\nn02469472\ttadpole, polliwog, pollywog\nn02469914\tprimate\nn02470238\tsimian\nn02470325\tape\nn02470709\tanthropoid\nn02470899\tanthropoid ape\nn02471300\thominoid\nn02471762\thominid\nn02472293\thomo, man, human being, human\nn02472987\tworld, human race, humanity, humankind, human beings, humans, mankind, man\nn02473307\tHomo erectus\nn02473554\tPithecanthropus, Pithecanthropus erectus, genus Pithecanthropus\nn02473720\tJava man, Trinil man\nn02473857\tPeking man\nn02473983\tSinanthropus, genus Sinanthropus\nn02474110\tHomo soloensis\nn02474282\tJavanthropus, genus Javanthropus\nn02474605\tHomo habilis\nn02474777\tHomo sapiens\nn02475078\tNeandertal man, Neanderthal man, Neandertal, Neanderthal, Homo sapiens neanderthalensis\nn02475358\tCro-magnon\nn02475669\tHomo sapiens sapiens, modern man\nn02476219\taustralopithecine\nn02476567\tAustralopithecus afarensis\nn02476870\tAustralopithecus africanus\nn02477028\tAustralopithecus boisei\nn02477187\tZinjanthropus, genus Zinjanthropus\nn02477329\tAustralopithecus robustus\nn02477516\tParanthropus, genus Paranthropus\nn02477782\tSivapithecus\nn02478239\trudapithecus, Dryopithecus Rudapithecus hungaricus\nn02478875\tproconsul\nn02479332\tAegyptopithecus\nn02480153\tgreat ape, pongid\nn02480495\torangutan, orang, orangutang, Pongo pygmaeus\nn02480855\tgorilla, Gorilla gorilla\nn02481103\twestern lowland gorilla, Gorilla gorilla gorilla\nn02481235\teastern lowland gorilla, Gorilla gorilla grauri\nn02481366\tmountain gorilla, Gorilla gorilla beringei\nn02481500\tsilverback\nn02481823\tchimpanzee, chimp, Pan troglodytes\nn02482060\twestern chimpanzee, Pan troglodytes verus\nn02482286\teastern chimpanzee, Pan troglodytes schweinfurthii\nn02482474\tcentral chimpanzee, Pan troglodytes troglodytes\nn02482650\tpygmy chimpanzee, bonobo, Pan paniscus\nn02483092\tlesser ape\nn02483362\tgibbon, Hylobates lar\nn02483708\tsiamang, Hylobates syndactylus, Symphalangus syndactylus\nn02484322\tmonkey\nn02484473\tOld World monkey, catarrhine\nn02484975\tguenon, guenon monkey\nn02485225\ttalapoin, Cercopithecus talapoin\nn02485371\tgrivet, Cercopithecus aethiops\nn02485536\tvervet, vervet monkey, Cercopithecus aethiops pygerythrus\nn02485688\tgreen monkey, African green monkey, Cercopithecus aethiops sabaeus\nn02485988\tmangabey\nn02486261\tpatas, hussar monkey, Erythrocebus patas\nn02486410\tbaboon\nn02486657\tchacma, chacma baboon, Papio ursinus\nn02486908\tmandrill, Mandrillus sphinx\nn02487079\tdrill, Mandrillus leucophaeus\nn02487347\tmacaque\nn02487547\trhesus, rhesus monkey, Macaca mulatta\nn02487675\tbonnet macaque, bonnet monkey, capped macaque, crown monkey, Macaca radiata\nn02487847\tBarbary ape, Macaca sylvana\nn02488003\tcrab-eating macaque, croo monkey, Macaca irus\nn02488291\tlangur\nn02488415\tentellus, hanuman, Presbytes entellus, Semnopithecus entellus\nn02488702\tcolobus, colobus monkey\nn02488894\tguereza, Colobus guereza\nn02489166\tproboscis monkey, Nasalis larvatus\nn02489589\tNew World monkey, platyrrhine, platyrrhinian\nn02490219\tmarmoset\nn02490597\ttrue marmoset\nn02490811\tpygmy marmoset, Cebuella pygmaea\nn02491107\ttamarin, lion monkey, lion marmoset, leoncita\nn02491329\tsilky tamarin, Leontocebus rosalia\nn02491474\tpinche, Leontocebus oedipus\nn02492035\tcapuchin, ringtail, Cebus capucinus\nn02492356\tdouroucouli, Aotus trivirgatus\nn02492660\thowler monkey, howler\nn02492948\tsaki\nn02493224\tuakari\nn02493509\ttiti, titi monkey\nn02493793\tspider monkey, Ateles geoffroyi\nn02494079\tsquirrel monkey, Saimiri sciureus\nn02494383\twoolly monkey\nn02495242\ttree shrew\nn02496052\tprosimian\nn02496913\tlemur\nn02497673\tMadagascar cat, ring-tailed lemur, Lemur catta\nn02498153\taye-aye, Daubentonia madagascariensis\nn02498743\tslender loris, Loris gracilis\nn02499022\tslow loris, Nycticebus tardigradua, Nycticebus pygmaeus\nn02499316\tpotto, kinkajou, Perodicticus potto\nn02499568\tangwantibo, golden potto, Arctocebus calabarensis\nn02499808\tgalago, bushbaby, bush baby\nn02500267\tindri, indris, Indri indri, Indri brevicaudatus\nn02500596\twoolly indris, Avahi laniger\nn02501583\ttarsier\nn02501923\tTarsius syrichta\nn02502006\tTarsius glis\nn02502514\tflying lemur, flying cat, colugo\nn02502807\tCynocephalus variegatus\nn02503127\tproboscidean, proboscidian\nn02503517\telephant\nn02503756\trogue elephant\nn02504013\tIndian elephant, Elephas maximus\nn02504458\tAfrican elephant, Loxodonta africana\nn02504770\tmammoth\nn02505063\twoolly mammoth, northern mammoth, Mammuthus primigenius\nn02505238\tcolumbian mammoth, Mammuthus columbi\nn02505485\timperial mammoth, imperial elephant, Archidiskidon imperator\nn02505998\tmastodon, mastodont\nn02506947\tplantigrade mammal, plantigrade\nn02507148\tdigitigrade mammal, digitigrade\nn02507649\tprocyonid\nn02508021\traccoon, racoon\nn02508213\tcommon raccoon, common racoon, coon, ringtail, Procyon lotor\nn02508346\tcrab-eating raccoon, Procyon cancrivorus\nn02508742\tbassarisk, cacomistle, cacomixle, coon cat, raccoon fox, ringtail, ring-tailed cat, civet cat, miner's cat, Bassariscus astutus\nn02509197\tkinkajou, honey bear, potto, Potos flavus, Potos caudivolvulus\nn02509515\tcoati, coati-mondi, coati-mundi, coon cat, Nasua narica\nn02509815\tlesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens\nn02510455\tgiant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca\nn02511730\ttwitterer\nn02512053\tfish\nn02512752\tfingerling\nn02512830\tgame fish, sport fish\nn02512938\tfood fish\nn02513248\trough fish\nn02513355\tgroundfish, bottom fish\nn02513560\tyoung fish\nn02513727\tparr\nn02513805\tmouthbreeder\nn02513939\tspawner\nn02514041\tbarracouta, snoek\nn02515214\tcrossopterygian, lobefin, lobe-finned fish\nn02515713\tcoelacanth, Latimeria chalumnae\nn02516188\tlungfish\nn02516776\tceratodus\nn02517442\tcatfish, siluriform fish\nn02517938\tsilurid, silurid fish\nn02518324\tEuropean catfish, sheatfish, Silurus glanis\nn02518622\telectric catfish, Malopterurus electricus\nn02519148\tbullhead, bullhead catfish\nn02519340\thorned pout, hornpout, pout, Ameiurus Melas\nn02519472\tbrown bullhead\nn02519686\tchannel catfish, channel cat, Ictalurus punctatus\nn02519862\tblue catfish, blue cat, blue channel catfish, blue channel cat\nn02520147\tflathead catfish, mudcat, goujon, shovelnose catfish, spoonbill catfish, Pylodictus olivaris\nn02520525\tarmored catfish\nn02520810\tsea catfish\nn02521646\tgadoid, gadoid fish\nn02522399\tcod, codfish\nn02522637\tcodling\nn02522722\tAtlantic cod, Gadus morhua\nn02522866\tPacific cod, Alaska cod, Gadus macrocephalus\nn02523110\twhiting, Merlangus merlangus, Gadus merlangus\nn02523427\tburbot, eelpout, ling, cusk, Lota lota\nn02523877\thaddock, Melanogrammus aeglefinus\nn02524202\tpollack, pollock, Pollachius pollachius\nn02524524\thake\nn02524659\tsilver hake, Merluccius bilinearis, whiting\nn02524928\tling\nn02525382\tcusk, torsk, Brosme brosme\nn02525703\tgrenadier, rattail, rattail fish\nn02526121\teel\nn02526425\telver\nn02526818\tcommon eel, freshwater eel\nn02527057\ttuna, Anguilla sucklandii\nn02527271\tmoray, moray eel\nn02527622\tconger, conger eel\nn02528163\tteleost fish, teleost, teleostan\nn02529293\tbeaked salmon, sandfish, Gonorhynchus gonorhynchus\nn02529772\tclupeid fish, clupeid\nn02530052\twhitebait\nn02530188\tbrit, britt\nn02530421\tshad\nn02530637\tcommon American shad, Alosa sapidissima\nn02530831\triver shad, Alosa chrysocloris\nn02530999\tallice shad, allis shad, allice, allis, Alosa alosa\nn02531114\talewife, Alosa pseudoharengus, Pomolobus pseudoharengus\nn02531625\tmenhaden, Brevoortia tyrannis\nn02532028\therring, Clupea harangus\nn02532272\tAtlantic herring, Clupea harengus harengus\nn02532451\tPacific herring, Clupea harengus pallasii\nn02532602\tsardine\nn02532786\tsild\nn02532918\tbrisling, sprat, Clupea sprattus\nn02533209\tpilchard, sardine, Sardina pilchardus\nn02533545\tPacific sardine, Sardinops caerulea\nn02533834\tanchovy\nn02534165\tmediterranean anchovy, Engraulis encrasicholus\nn02534559\tsalmonid\nn02534734\tsalmon\nn02535080\tparr\nn02535163\tblackfish\nn02535258\tredfish\nn02535537\tAtlantic salmon, Salmo salar\nn02535759\tlandlocked salmon, lake salmon\nn02536165\tsockeye, sockeye salmon, red salmon, blueback salmon, Oncorhynchus nerka\nn02536456\tchinook, chinook salmon, king salmon, quinnat salmon, Oncorhynchus tshawytscha\nn02536864\tcoho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch\nn02537085\ttrout\nn02537319\tbrown trout, salmon trout, Salmo trutta\nn02537525\trainbow trout, Salmo gairdneri\nn02537716\tsea trout\nn02538010\tlake trout, salmon trout, Salvelinus namaycush\nn02538216\tbrook trout, speckled trout, Salvelinus fontinalis\nn02538406\tchar, charr\nn02538562\tArctic char, Salvelinus alpinus\nn02538985\twhitefish\nn02539424\tlake whitefish, Coregonus clupeaformis\nn02539573\tcisco, lake herring, Coregonus artedi\nn02539894\tround whitefish, Menominee whitefish, Prosopium cylindraceum\nn02540412\tsmelt\nn02540983\tsparling, European smelt, Osmerus eperlanus\nn02541257\tcapelin, capelan, caplin\nn02541687\ttarpon, Tarpon atlanticus\nn02542017\tladyfish, tenpounder, Elops saurus\nn02542432\tbonefish, Albula vulpes\nn02542958\targentine\nn02543255\tlanternfish\nn02543565\tlizardfish, snakefish, snake-fish\nn02544274\tlancetfish, lancet fish, wolffish\nn02545841\topah, moonfish, Lampris regius\nn02546028\tNew World opah, Lampris guttatus\nn02546331\tribbonfish\nn02546627\tdealfish, Trachipterus arcticus\nn02547014\toarfish, king of the herring, ribbonfish, Regalecus glesne\nn02547733\tbatfish\nn02548247\tgoosefish, angler, anglerfish, angler fish, monkfish, lotte, allmouth, Lophius Americanus\nn02548689\ttoadfish, Opsanus tau\nn02548884\toyster fish, oyster-fish, oysterfish\nn02549248\tfrogfish\nn02549376\tsargassum fish\nn02549989\tneedlefish, gar, billfish\nn02550203\ttimucu\nn02550460\tflying fish\nn02550655\tmonoplane flying fish, two-wing flying fish\nn02551134\thalfbeak\nn02551668\tsaury, billfish, Scomberesox saurus\nn02552171\tspiny-finned fish, acanthopterygian\nn02553028\tlingcod, Ophiodon elongatus\nn02554730\tpercoid fish, percoid, percoidean\nn02555863\tperch\nn02556373\tclimbing perch, Anabas testudineus, A. testudineus\nn02556846\tperch\nn02557182\tyellow perch, Perca flavescens\nn02557318\tEuropean perch, Perca fluviatilis\nn02557591\tpike-perch, pike perch\nn02557749\twalleye, walleyed pike, jack salmon, dory, Stizostedion vitreum\nn02557909\tblue pike, blue pickerel, blue pikeperch, blue walleye, Strizostedion vitreum glaucum\nn02558206\tsnail darter, Percina tanasi\nn02558860\tcusk-eel\nn02559144\tbrotula\nn02559383\tpearlfish, pearl-fish\nn02559862\trobalo\nn02560110\tsnook\nn02561108\tpike\nn02561381\tnorthern pike, Esox lucius\nn02561514\tmuskellunge, Esox masquinongy\nn02561661\tpickerel\nn02561803\tchain pickerel, chain pike, Esox niger\nn02561937\tredfin pickerel, barred pickerel, Esox americanus\nn02562315\tsunfish, centrarchid\nn02562796\tcrappie\nn02562971\tblack crappie, Pomoxis nigromaculatus\nn02563079\twhite crappie, Pomoxis annularis\nn02563182\tfreshwater bream, bream\nn02563648\tpumpkinseed, Lepomis gibbosus\nn02563792\tbluegill, Lepomis macrochirus\nn02563949\tspotted sunfish, stumpknocker, Lepomis punctatus\nn02564270\tfreshwater bass\nn02564403\trock bass, rock sunfish, Ambloplites rupestris\nn02564720\tblack bass\nn02564935\tKentucky black bass, spotted black bass, Micropterus pseudoplites\nn02565072\tsmallmouth, smallmouth bass, smallmouthed bass, smallmouth black bass, smallmouthed black bass, Micropterus dolomieu\nn02565324\tlargemouth, largemouth bass, largemouthed bass, largemouth black bass, largemouthed black bass, Micropterus salmoides\nn02565573\tbass\nn02566109\tserranid fish, serranid\nn02566489\twhite perch, silver perch, Morone americana\nn02566665\tyellow bass, Morone interrupta\nn02567334\tblackmouth bass, Synagrops bellus\nn02567633\trock sea bass, rock bass, Centropristis philadelphica\nn02568087\tstriped bass, striper, Roccus saxatilis, rockfish\nn02568447\tstone bass, wreckfish, Polyprion americanus\nn02568959\tgrouper\nn02569484\thind\nn02569631\trock hind, Epinephelus adscensionis\nn02569905\tcreole-fish, Paranthias furcifer\nn02570164\tjewfish, Mycteroperca bonaci\nn02570484\tsoapfish\nn02570838\tsurfperch, surffish, surf fish\nn02571167\trainbow seaperch, rainbow perch, Hipsurus caryi\nn02571652\tbigeye\nn02571810\tcatalufa, Priacanthus arenatus\nn02572196\tcardinalfish\nn02572484\tflame fish, flamefish, Apogon maculatus\nn02573249\ttilefish, Lopholatilus chamaeleonticeps\nn02573704\tbluefish, Pomatomus saltatrix\nn02574271\tcobia, Rachycentron canadum, sergeant fish\nn02574910\tremora, suckerfish, sucking fish\nn02575325\tsharksucker, Echeneis naucrates\nn02575590\twhale sucker, whalesucker, Remilegia australis\nn02576223\tcarangid fish, carangid\nn02576575\tjack\nn02576906\tcrevalle jack, jack crevalle, Caranx hippos\nn02577041\tyellow jack, Caranx bartholomaei\nn02577164\trunner, blue runner, Caranx crysos\nn02577403\trainbow runner, Elagatis bipinnulata\nn02577662\tleatherjacket, leatherjack\nn02577952\tthreadfish, thread-fish, Alectis ciliaris\nn02578233\tmoonfish, Atlantic moonfish, horsefish, horsehead, horse-head, dollarfish, Selene setapinnis\nn02578454\tlookdown, lookdown fish, Selene vomer\nn02578771\tamberjack, amberfish\nn02578928\tyellowtail, Seriola dorsalis\nn02579303\tkingfish, Seriola grandis\nn02579557\tpompano\nn02579762\tFlorida pompano, Trachinotus carolinus\nn02579928\tpermit, Trachinotus falcatus\nn02580336\tscad\nn02580679\thorse mackerel, jack mackerel, Spanish mackerel, saurel, Trachurus symmetricus\nn02580830\thorse mackerel, saurel, Trachurus trachurus\nn02581108\tbigeye scad, big-eyed scad, goggle-eye, Selar crumenophthalmus\nn02581482\tmackerel scad, mackerel shad, Decapterus macarellus\nn02581642\tround scad, cigarfish, quiaquia, Decapterus punctatus\nn02581957\tdolphinfish, dolphin, mahimahi\nn02582220\tCoryphaena hippurus\nn02582349\tCoryphaena equisetis\nn02582721\tpomfret, Brama raii\nn02583567\tcharacin, characin fish, characid\nn02583890\ttetra\nn02584145\tcardinal tetra, Paracheirodon axelrodi\nn02584449\tpiranha, pirana, caribe\nn02585872\tcichlid, cichlid fish\nn02586238\tbolti, Tilapia nilotica\nn02586543\tsnapper\nn02587051\tred snapper, Lutjanus blackfordi\nn02587300\tgrey snapper, gray snapper, mangrove snapper, Lutjanus griseus\nn02587479\tmutton snapper, muttonfish, Lutjanus analis\nn02587618\tschoolmaster, Lutjanus apodus\nn02587877\tyellowtail, yellowtail snapper, Ocyurus chrysurus\nn02588286\tgrunt\nn02588794\tmargate, Haemulon album\nn02588945\tSpanish grunt, Haemulon macrostomum\nn02589062\ttomtate, Haemulon aurolineatum\nn02589196\tcottonwick, Haemulon malanurum\nn02589316\tsailor's-choice, sailors choice, Haemulon parra\nn02589623\tporkfish, pork-fish, Anisotremus virginicus\nn02589796\tpompon, black margate, Anisotremus surinamensis\nn02590094\tpigfish, hogfish, Orthopristis chrysopterus\nn02590495\tsparid, sparid fish\nn02590702\tsea bream, bream\nn02590987\tporgy\nn02591330\tred porgy, Pagrus pagrus\nn02591613\tEuropean sea bream, Pagellus centrodontus\nn02591911\tAtlantic sea bream, Archosargus rhomboidalis\nn02592055\tsheepshead, Archosargus probatocephalus\nn02592371\tpinfish, sailor's-choice, squirrelfish, Lagodon rhomboides\nn02592734\tsheepshead porgy, Calamus penna\nn02593019\tsnapper, Chrysophrys auratus\nn02593191\tblack bream, Chrysophrys australis\nn02593453\tscup, northern porgy, northern scup, Stenotomus chrysops\nn02593679\tscup, southern porgy, southern scup, Stenotomus aculeatus\nn02594250\tsciaenid fish, sciaenid\nn02594942\tstriped drum, Equetus pulcher\nn02595056\tjackknife-fish, Equetus lanceolatus\nn02595339\tsilver perch, mademoiselle, Bairdiella chrysoura\nn02595702\tred drum, channel bass, redfish, Sciaenops ocellatus\nn02596067\tmulloway, jewfish, Sciaena antarctica\nn02596252\tmaigre, maiger, Sciaena aquila\nn02596381\tcroaker\nn02596720\tAtlantic croaker, Micropogonias undulatus\nn02597004\tyellowfin croaker, surffish, surf fish, Umbrina roncador\nn02597367\twhiting\nn02597608\tkingfish\nn02597818\tking whiting, Menticirrhus americanus\nn02597972\tnorthern whiting, Menticirrhus saxatilis\nn02598134\tcorbina, Menticirrhus undulatus\nn02598573\twhite croaker, chenfish, kingfish, Genyonemus lineatus\nn02598878\twhite croaker, queenfish, Seriphus politus\nn02599052\tsea trout\nn02599347\tweakfish, Cynoscion regalis\nn02599557\tspotted weakfish, spotted sea trout, spotted squeateague, Cynoscion nebulosus\nn02599958\tmullet\nn02600298\tgoatfish, red mullet, surmullet, Mullus surmuletus\nn02600503\tred goatfish, Mullus auratus\nn02600798\tyellow goatfish, Mulloidichthys martinicus\nn02601344\tmullet, grey mullet, gray mullet\nn02601767\tstriped mullet, Mugil cephalus\nn02601921\twhite mullet, Mugil curema\nn02602059\tliza, Mugil liza\nn02602405\tsilversides, silverside\nn02602760\tjacksmelt, Atherinopsis californiensis\nn02603317\tbarracuda\nn02603540\tgreat barracuda, Sphyraena barracuda\nn02603862\tsweeper\nn02604157\tsea chub\nn02604480\tBermuda chub, rudderfish, Kyphosus sectatrix\nn02604954\tspadefish, angelfish, Chaetodipterus faber\nn02605316\tbutterfly fish\nn02605703\tchaetodon\nn02605936\tangelfish\nn02606052\trock beauty, Holocanthus tricolor\nn02606384\tdamselfish, demoiselle\nn02606751\tbeaugregory, Pomacentrus leucostictus\nn02607072\tanemone fish\nn02607201\tclown anemone fish, Amphiprion percula\nn02607470\tsergeant major, Abudefduf saxatilis\nn02607862\twrasse\nn02608284\tpigfish, giant pigfish, Achoerodus gouldii\nn02608547\thogfish, hog snapper, Lachnolaimus maximus\nn02608860\tslippery dick, Halicoeres bivittatus\nn02608996\tpuddingwife, pudding-wife, Halicoeres radiatus\nn02609302\tbluehead, Thalassoma bifasciatum\nn02609823\tpearly razorfish, Hemipteronatus novacula\nn02610066\ttautog, blackfish, Tautoga onitis\nn02610373\tcunner, bergall, Tautogolabrus adspersus\nn02610664\tparrotfish, polly fish, pollyfish\nn02610980\tthreadfin\nn02611561\tjawfish\nn02611898\tstargazer\nn02612167\tsand stargazer\nn02613181\tblenny, combtooth blenny\nn02613572\tshanny, Blennius pholis\nn02613820\tMolly Miller, Scartella cristata\nn02614140\tclinid, clinid fish\nn02614482\tpikeblenny\nn02614653\tbluethroat pikeblenny, Chaenopsis ocellata\nn02614978\tgunnel, bracketed blenny\nn02615298\trock gunnel, butterfish, Pholis gunnellus\nn02616128\teelblenny\nn02616397\twrymouth, ghostfish, Cryptacanthodes maculatus\nn02616851\twolffish, wolf fish, catfish\nn02617537\tviviparous eelpout, Zoarces viviparus\nn02618094\tocean pout, Macrozoarces americanus\nn02618513\tsand lance, sand launce, sand eel, launce\nn02618827\tdragonet\nn02619165\tgoby, gudgeon\nn02619550\tmudskipper, mudspringer\nn02619861\tsleeper, sleeper goby\nn02620167\tflathead\nn02620578\tarcherfish, Toxotes jaculatrix\nn02621258\tsurgeonfish\nn02621908\tgempylid\nn02622249\tsnake mackerel, Gempylus serpens\nn02622547\tescolar, Lepidocybium flavobrunneum\nn02622712\toilfish, Ruvettus pretiosus\nn02622955\tcutlassfish, frost fish, hairtail\nn02623445\tscombroid, scombroid fish\nn02624167\tmackerel\nn02624551\tcommon mackerel, shiner, Scomber scombrus\nn02624807\tSpanish mackerel, Scomber colias\nn02624987\tchub mackerel, tinker, Scomber japonicus\nn02625258\twahoo, Acanthocybium solandri\nn02625612\tSpanish mackerel\nn02625851\tking mackerel, cavalla, cero, Scomberomorus cavalla\nn02626089\tScomberomorus maculatus\nn02626265\tcero, pintado, kingfish, Scomberomorus regalis\nn02626471\tsierra, Scomberomorus sierra\nn02626762\ttuna, tunny\nn02627037\talbacore, long-fin tunny, Thunnus alalunga\nn02627292\tbluefin, bluefin tuna, horse mackerel, Thunnus thynnus\nn02627532\tyellowfin, yellowfin tuna, Thunnus albacares\nn02627835\tbonito\nn02628062\tskipjack, Atlantic bonito, Sarda sarda\nn02628259\tChile bonito, Chilean bonito, Pacific bonito, Sarda chiliensis\nn02628600\tskipjack, skipjack tuna, Euthynnus pelamis\nn02629230\tbonito, oceanic bonito, Katsuwonus pelamis\nn02629716\tswordfish, Xiphias gladius\nn02630281\tsailfish\nn02630615\tAtlantic sailfish, Istiophorus albicans\nn02630739\tbillfish\nn02631041\tmarlin\nn02631330\tblue marlin, Makaira nigricans\nn02631475\tblack marlin, Makaira mazara, Makaira marlina\nn02631628\tstriped marlin, Makaira mitsukurii\nn02631775\twhite marlin, Makaira albida\nn02632039\tspearfish\nn02632494\tlouvar, Luvarus imperialis\nn02633422\tdollarfish, Poronotus triacanthus\nn02633677\tpalometa, California pompano, Palometa simillima\nn02633977\tharvestfish, Paprilus alepidotus\nn02634545\tdriftfish\nn02635154\tbarrelfish, black rudderfish, Hyperglyphe perciformis\nn02635580\tclingfish\nn02636170\ttripletail\nn02636405\tAtlantic tripletail, Lobotes surinamensis\nn02636550\tPacific tripletail, Lobotes pacificus\nn02636854\tmojarra\nn02637179\tyellowfin mojarra, Gerres cinereus\nn02637475\tsilver jenny, Eucinostomus gula\nn02637977\twhiting\nn02638596\tganoid, ganoid fish\nn02639087\tbowfin, grindle, dogfish, Amia calva\nn02639605\tpaddlefish, duckbill, Polyodon spathula\nn02639922\tChinese paddlefish, Psephurus gladis\nn02640242\tsturgeon\nn02640626\tPacific sturgeon, white sturgeon, Sacramento sturgeon, Acipenser transmontanus\nn02640857\tbeluga, hausen, white sturgeon, Acipenser huso\nn02641379\tgar, garfish, garpike, billfish, Lepisosteus osseus\nn02642107\tscorpaenoid, scorpaenoid fish\nn02642644\tscorpaenid, scorpaenid fish\nn02643112\tscorpionfish, scorpion fish, sea scorpion\nn02643316\tplumed scorpionfish, Scorpaena grandicornis\nn02643566\tlionfish\nn02643836\tstonefish, Synanceja verrucosa\nn02644113\trockfish\nn02644360\tcopper rockfish, Sebastodes caurinus\nn02644501\tvermillion rockfish, rasher, Sebastodes miniatus\nn02644665\tred rockfish, Sebastodes ruberrimus\nn02644817\trosefish, ocean perch, Sebastodes marinus\nn02645538\tbullhead\nn02645691\tmiller's-thumb\nn02645953\tsea raven, Hemitripterus americanus\nn02646667\tlumpfish, Cyclopterus lumpus\nn02646892\tlumpsucker\nn02648035\tpogge, armed bullhead, Agonus cataphractus\nn02648625\tgreenling\nn02648916\tkelp greenling, Hexagrammos decagrammus\nn02649218\tpainted greenling, convict fish, convictfish, Oxylebius pictus\nn02649546\tflathead\nn02650050\tgurnard\nn02650413\ttub gurnard, yellow gurnard, Trigla lucerna\nn02650541\tsea robin, searobin\nn02651060\tnorthern sea robin, Prionotus carolinus\nn02652132\tflying gurnard, flying robin, butterflyfish\nn02652668\tplectognath, plectognath fish\nn02653145\ttriggerfish\nn02653497\tqueen triggerfish, Bessy cerca, oldwench, oldwife, Balistes vetula\nn02653786\tfilefish\nn02654112\tleatherjacket, leatherfish\nn02654425\tboxfish, trunkfish\nn02654745\tcowfish, Lactophrys quadricornis\nn02655020\tpuffer, pufferfish, blowfish, globefish\nn02655523\tspiny puffer\nn02655848\tporcupinefish, porcupine fish, Diodon hystrix\nn02656032\tballoonfish, Diodon holocanthus\nn02656301\tburrfish\nn02656670\tocean sunfish, sunfish, mola, headfish\nn02656969\tsharptail mola, Mola lanceolata\nn02657368\tflatfish\nn02657694\tflounder\nn02658079\trighteye flounder, righteyed flounder\nn02658531\tplaice, Pleuronectes platessa\nn02658811\tEuropean flatfish, Platichthys flesus\nn02659176\tyellowtail flounder, Limanda ferruginea\nn02659478\twinter flounder, blackback flounder, lemon sole, Pseudopleuronectes americanus\nn02659808\tlemon sole, Microstomus kitt\nn02660091\tAmerican plaice, Hippoglossoides platessoides\nn02660208\thalibut, holibut\nn02660519\tAtlantic halibut, Hippoglossus hippoglossus\nn02660640\tPacific halibut, Hippoglossus stenolepsis\nn02661017\tlefteye flounder, lefteyed flounder\nn02661473\tsouthern flounder, Paralichthys lethostigmus\nn02661618\tsummer flounder, Paralichthys dentatus\nn02662239\twhiff\nn02662397\thorned whiff, Citharichthys cornutus\nn02662559\tsand dab\nn02662825\twindowpane, Scophthalmus aquosus\nn02662993\tbrill, Scophthalmus rhombus\nn02663211\tturbot, Psetta maxima\nn02663485\ttonguefish, tongue-fish\nn02663849\tsole\nn02664285\tEuropean sole, Solea solea\nn02664642\tEnglish sole, lemon sole, Parophrys vitulus\nn02665250\thogchoker, Trinectes maculatus\nn02665985\taba\nn02666196\tabacus\nn02666501\tabandoned ship, derelict\nn02666624\tA battery\nn02666943\tabattoir, butchery, shambles, slaughterhouse\nn02667093\tabaya\nn02667244\tAbbe condenser\nn02667379\tabbey\nn02667478\tabbey\nn02667576\tabbey\nn02667693\tAbney level\nn02668393\tabrader, abradant\nn02668613\tabrading stone\nn02669295\tabutment\nn02669442\tabutment arch\nn02669534\tacademic costume\nn02669723\tacademic gown, academic robe, judge's robe\nn02670186\taccelerator, throttle, throttle valve\nn02670382\taccelerator, particle accelerator, atom smasher\nn02670683\taccelerator, accelerator pedal, gas pedal, gas, throttle, gun\nn02670935\taccelerometer\nn02671780\taccessory, accoutrement, accouterment\nn02672152\taccommodating lens implant, accommodating IOL\nn02672371\taccommodation\nn02672831\taccordion, piano accordion, squeeze box\nn02675077\tacetate disk, phonograph recording disk\nn02675219\tacetate rayon, acetate\nn02675522\tachromatic lens\nn02676097\tacoustic delay line, sonic delay line\nn02676261\tacoustic device\nn02676566\tacoustic guitar\nn02676670\tacoustic modem\nn02676938\tacropolis\nn02677028\tacrylic\nn02677136\tacrylic, acrylic paint\nn02677436\tactinometer\nn02677718\taction, action mechanism\nn02678010\tactive matrix screen\nn02678384\tactuator\nn02678897\tadapter, adaptor\nn02679142\tadder\nn02679257\tadding machine, totalizer, totaliser\nn02679961\taddressing machine, Addressograph\nn02680110\tadhesive bandage\nn02680512\tadit\nn02680638\tadjoining room\nn02680754\tadjustable wrench, adjustable spanner\nn02681392\tadobe, adobe brick\nn02682311\tadz, adze\nn02682407\taeolian harp, aeolian lyre, wind harp\nn02682569\taerator\nn02682811\taerial torpedo\nn02682922\taerosol, aerosol container, aerosol can, aerosol bomb, spray can\nn02683183\tAertex\nn02683323\tafghan\nn02683454\tAfro-wig\nn02683558\tafterburner\nn02683791\tafter-shave, after-shave lotion\nn02684248\tagateware\nn02684356\tagglomerator\nn02684515\taglet, aiglet, aiguilette\nn02684649\taglet, aiglet\nn02684962\tagora, public square\nn02685082\taigrette, aigret\nn02685253\taileron\nn02685365\tair bag\nn02685701\tairbrake\nn02685995\tairbrush\nn02686121\tairbus\nn02686227\tair compressor\nn02686379\tair conditioner, air conditioning\nn02686568\taircraft\nn02687172\taircraft carrier, carrier, flattop, attack aircraft carrier\nn02687423\taircraft engine\nn02687682\tair cushion, air spring\nn02687821\tairdock, hangar, repair shed\nn02687992\tairfield, landing field, flying field, field\nn02688273\tair filter, air cleaner\nn02688443\tairfoil, aerofoil, control surface, surface\nn02689144\tairframe\nn02689274\tair gun, airgun, air rifle\nn02689434\tair hammer, jackhammer, pneumatic hammer\nn02689748\tair horn\nn02689819\tairing cupboard\nn02690373\tairliner\nn02690715\tairmailer\nn02691156\tairplane, aeroplane, plane\nn02692086\tairplane propeller, airscrew, prop\nn02692232\tairport, airdrome, aerodrome, drome\nn02692513\tair pump, vacuum pump\nn02692680\tair search radar\nn02692877\tairship, dirigible\nn02693246\tair terminal, airport terminal\nn02693413\tair-to-air missile\nn02693540\tair-to-ground missile, air-to-surface missile\nn02694045\taisle\nn02694279\tAladdin's lamp\nn02694426\talarm, warning device, alarm system\nn02694662\talarm clock, alarm\nn02694966\talb\nn02695627\talcazar\nn02695762\talcohol thermometer, alcohol-in-glass thermometer\nn02696165\talehouse\nn02696246\talembic\nn02696569\talgometer\nn02696843\talidade, alidad\nn02697022\talidade, alidad\nn02697221\tA-line\nn02697576\tAllen screw\nn02697675\tAllen wrench\nn02697876\talligator wrench\nn02698244\talms dish, alms tray\nn02698473\talpaca\nn02698634\talpenstock\nn02699494\taltar\nn02699629\taltar, communion table, Lord's table\nn02699770\taltarpiece, reredos\nn02699915\taltazimuth\nn02700064\talternator\nn02700258\taltimeter\nn02700895\tAmati\nn02701002\tambulance\nn02701260\tamen corner\nn02701730\tAmerican organ\nn02702989\tammeter\nn02703124\tammonia clock\nn02703275\tammunition, ammo\nn02704645\tamphibian, amphibious aircraft\nn02704792\tamphibian, amphibious vehicle\nn02704949\tamphitheater, amphitheatre, coliseum\nn02705201\tamphitheater, amphitheatre\nn02705429\tamphora\nn02705944\tamplifier\nn02706221\tampulla\nn02706806\tamusement arcade\nn02708093\tanalog clock\nn02708224\tanalog computer, analogue computer\nn02708433\tanalog watch\nn02708555\tanalytical balance, chemical balance\nn02708711\tanalyzer, analyser\nn02708885\tanamorphosis, anamorphism\nn02709101\tanastigmat\nn02709367\tanchor, ground tackle\nn02709637\tanchor chain, anchor rope\nn02709763\tanchor light, riding light, riding lamp\nn02709908\tAND circuit, AND gate\nn02710044\tandiron, firedog, dog, dog-iron\nn02710201\tandroid, humanoid, mechanical man\nn02710324\tanechoic chamber\nn02710429\tanemometer, wind gauge, wind gage\nn02710600\taneroid barometer, aneroid\nn02711237\tangiocardiogram\nn02711780\tangioscope\nn02712545\tangle bracket, angle iron\nn02712643\tangledozer\nn02713003\tankle brace\nn02713218\tanklet, anklets, bobbysock, bobbysocks\nn02713364\tanklet\nn02713496\tankus\nn02714315\tanode\nn02714535\tanode\nn02714751\tanswering machine\nn02715229\tantenna, aerial, transmitting aerial\nn02715513\tanteroom, antechamber, entrance hall, hall, foyer, lobby, vestibule\nn02715712\tantiaircraft, antiaircraft gun, flak, flack, pom-pom, ack-ack, ack-ack gun\nn02716626\tantiballistic missile, ABM\nn02720048\tantifouling paint\nn02720576\tanti-G suit, G suit\nn02721813\tantimacassar\nn02723165\tantiperspirant\nn02724722\tanti-submarine rocket\nn02725872\tanvil\nn02726017\tao dai\nn02726210\tapadana\nn02726305\tapartment, flat\nn02726681\tapartment building, apartment house\nn02727016\taperture\nn02727141\taperture\nn02727426\tapiary, bee house\nn02727825\tapparatus, setup\nn02728440\tapparel, wearing apparel, dress, clothes\nn02729222\tapplecart\nn02729837\tappliance\nn02729965\tappliance, contraption, contrivance, convenience, gadget, gizmo, gismo, widget\nn02730265\tapplicator, applier\nn02730568\tappointment, fitting\nn02730930\tapron\nn02731251\tapron string\nn02731398\tapse, apsis\nn02731629\taqualung, Aqua-Lung, scuba\nn02731900\taquaplane\nn02732072\taquarium, fish tank, marine museum\nn02732572\tarabesque\nn02732827\tarbor, arbour, bower, pergola\nn02733213\tarcade, colonnade\nn02733524\tarch\nn02734725\tarchitecture\nn02734835\tarchitrave\nn02735268\tarch support\nn02735361\tarc lamp, arc light\nn02735538\tarctic, galosh, golosh, rubber, gumshoe\nn02735688\tarea\nn02736396\tareaway\nn02736798\targyle, argyll\nn02737351\tark\nn02737660\tarm\nn02738031\tarmament\nn02738271\tarmature\nn02738449\tarmband\nn02738535\tarmchair\nn02738741\tarmet\nn02738859\tarm guard, arm pad\nn02738978\tarmhole\nn02739123\tarmilla\nn02739427\tarmlet, arm band\nn02739550\tarmoire\nn02739668\tarmor, armour\nn02739889\tarmored car, armoured car\nn02740061\tarmored car, armoured car\nn02740300\tarmored personnel carrier, armoured personnel carrier, APC\nn02740533\tarmored vehicle, armoured vehicle\nn02740764\tarmor plate, armour plate, armor plating, plate armor, plate armour\nn02741367\tarmory, armoury, arsenal\nn02741475\tarmrest\nn02742070\tarquebus, harquebus, hackbut, hagbut\nn02742194\tarray\nn02742322\tarray, raiment, regalia\nn02742468\tarrester, arrester hook\nn02742753\tarrow\nn02743426\tarsenal, armory, armoury\nn02744323\tarterial road\nn02744844\tarthrogram\nn02744961\tarthroscope\nn02745492\tartificial heart\nn02745611\tartificial horizon, gyro horizon, flight indicator\nn02745816\tartificial joint\nn02746008\tartificial kidney, hemodialyzer\nn02746225\tartificial skin\nn02746365\tartillery, heavy weapon, gun, ordnance\nn02746595\tartillery shell\nn02746683\tartist's loft\nn02746978\tart school\nn02747063\tascot\nn02747177\tashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin\nn02747672\tash-pan\nn02747802\tashtray\nn02748183\taspergill, aspersorium\nn02748359\taspersorium\nn02748491\taspirator\nn02749169\taspirin powder, headache powder\nn02749292\tassault gun\nn02749479\tassault rifle, assault gun\nn02749670\tassegai, assagai\nn02749790\tassembly\nn02749953\tassembly\nn02750070\tassembly hall\nn02750169\tassembly plant\nn02750320\tastatic coils\nn02750652\tastatic galvanometer\nn02751067\tastrodome\nn02751215\tastrolabe\nn02751295\tastronomical telescope\nn02751490\tastronomy satellite\nn02752199\tathenaeum, atheneum\nn02752496\tathletic sock, sweat sock, varsity sock\nn02752615\tathletic supporter, supporter, suspensor, jockstrap, jock\nn02752810\tatlas, telamon\nn02752917\tatmometer, evaporometer\nn02753044\tatom bomb, atomic bomb, A-bomb, fission bomb, plutonium bomb\nn02753394\tatomic clock\nn02753710\tatomic pile, atomic reactor, pile, chain reactor\nn02754103\tatomizer, atomiser, spray, sprayer, nebulizer, nebuliser\nn02754656\tatrium\nn02755140\tattache case, attache\nn02755352\tattachment, bond\nn02755529\tattack submarine\nn02755675\tattenuator\nn02755823\tattic\nn02755984\tattic fan\nn02756098\tattire, garb, dress\nn02756854\taudio amplifier\nn02756977\taudiocassette\nn02757061\taudio CD, audio compact disc\nn02757337\taudiometer, sonometer\nn02757462\taudio system, sound system\nn02757714\taudiotape\nn02757810\taudiotape\nn02757927\taudiovisual, audiovisual aid\nn02758134\tauditorium\nn02758490\tauger, gimlet, screw auger, wimble\nn02758863\tautobahn\nn02758960\tautoclave, sterilizer, steriliser\nn02759257\tautofocus\nn02759387\tautogiro, autogyro, gyroplane\nn02759700\tautoinjector\nn02759963\tautoloader, self-loader\nn02760099\tautomat\nn02760199\tautomat\nn02760298\tautomatic choke\nn02760429\tautomatic firearm, automatic gun, automatic weapon\nn02760658\tautomatic pistol, automatic\nn02760855\tautomatic rifle, automatic, machine rifle\nn02761034\tautomatic transmission, automatic drive\nn02761206\tautomation\nn02761392\tautomaton, robot, golem\nn02761557\tautomobile engine\nn02761696\tautomobile factory, auto factory, car factory\nn02761834\tautomobile horn, car horn, motor horn, horn, hooter\nn02762169\tautopilot, automatic pilot, robot pilot\nn02762371\tautoradiograph\nn02762508\tautostrada\nn02762725\tauxiliary boiler, donkey boiler\nn02762909\tauxiliary engine, donkey engine\nn02763083\tauxiliary pump, donkey pump\nn02763198\tauxiliary research submarine\nn02763306\tauxiliary storage, external storage, secondary storage\nn02763604\taviary, bird sanctuary, volary\nn02763714\tawl\nn02763901\tawning, sunshade, sunblind\nn02764044\tax, axe\nn02764398\tax handle, axe handle\nn02764505\tax head, axe head\nn02764614\taxis, axis of rotation\nn02764779\taxle\nn02764935\taxle bar\nn02765028\taxletree\nn02766168\tbabushka\nn02766320\tbaby bed, baby's bed\nn02766534\tbaby buggy, baby carriage, carriage, perambulator, pram, stroller, go-cart, pushchair, pusher\nn02766792\tbaby grand, baby grand piano, parlor grand, parlor grand piano, parlour grand, parlour grand piano\nn02767038\tbaby powder\nn02767147\tbaby shoe\nn02767433\tback, backrest\nn02767665\tback\nn02767956\tbackbench\nn02768114\tbackboard\nn02768226\tbackboard, basketball backboard\nn02768433\tbackbone\nn02768655\tback brace\nn02768973\tbackgammon board\nn02769075\tbackground, desktop, screen background\nn02769290\tbackhoe\nn02769669\tbacklighting\nn02769748\tbackpack, back pack, knapsack, packsack, rucksack, haversack\nn02769963\tbackpacking tent, pack tent\nn02770078\tbackplate\nn02770211\tback porch\nn02770585\tbacksaw, back saw\nn02770721\tbackscratcher\nn02770830\tbackseat\nn02771004\tbackspace key, backspace, backspacer\nn02771166\tbackstairs\nn02771286\tbackstay\nn02771547\tbackstop\nn02771750\tbacksword\nn02772101\tbackup system\nn02772435\tbadminton court\nn02772554\tbadminton equipment\nn02772700\tbadminton racket, badminton racquet, battledore\nn02773037\tbag\nn02773838\tbag, traveling bag, travelling bag, grip, suitcase\nn02774152\tbag, handbag, pocketbook, purse\nn02774630\tbaggage, luggage\nn02774921\tbaggage\nn02775039\tbaggage car, luggage van\nn02775178\tbaggage claim\nn02775483\tbagpipe\nn02775689\tbailey\nn02775813\tbailey\nn02775897\tBailey bridge\nn02776007\tbain-marie\nn02776205\tbait, decoy, lure\nn02776505\tbaize\nn02776631\tbakery, bakeshop, bakehouse\nn02776825\tbalaclava, balaclava helmet\nn02776978\tbalalaika\nn02777100\tbalance\nn02777292\tbalance beam, beam\nn02777402\tbalance wheel, balance\nn02777638\tbalbriggan\nn02777734\tbalcony\nn02777927\tbalcony\nn02778131\tbaldachin\nn02778294\tbaldric, baldrick\nn02778456\tbale\nn02778588\tbaling wire\nn02778669\tball\nn02779435\tball\nn02779609\tball and chain\nn02779719\tball-and-socket joint\nn02779971\tballast, light ballast\nn02780315\tball bearing, needle bearing, roller bearing\nn02780445\tball cartridge\nn02780588\tballcock, ball cock\nn02780704\tballdress\nn02780815\tballet skirt, tutu\nn02781121\tball gown\nn02781213\tballistic galvanometer\nn02781338\tballistic missile\nn02781517\tballistic pendulum\nn02781764\tballistocardiograph, cardiograph\nn02782093\tballoon\nn02782432\tballoon bomb, Fugo\nn02782602\tballoon sail\nn02782681\tballot box\nn02782778\tballpark, park\nn02783035\tball-peen hammer\nn02783161\tballpoint, ballpoint pen, ballpen, Biro\nn02783324\tballroom, dance hall, dance palace\nn02783459\tball valve\nn02783900\tbalsa raft, Kon Tiki\nn02783994\tbaluster\nn02784124\tbanana boat\nn02784998\tband\nn02785648\tbandage, patch\nn02786058\tBand Aid\nn02786198\tbandanna, bandana\nn02786331\tbandbox\nn02786463\tbanderilla\nn02786611\tbandoleer, bandolier\nn02786736\tbandoneon\nn02786837\tbandsaw, band saw\nn02787120\tbandwagon\nn02787269\tbangalore torpedo\nn02787435\tbangle, bauble, gaud, gewgaw, novelty, fallal, trinket\nn02787622\tbanjo\nn02788021\tbanner, streamer\nn02788148\tbannister, banister, balustrade, balusters, handrail\nn02788386\tbanquette\nn02788462\tbanyan, banian\nn02788572\tbaptismal font, baptistry, baptistery, font\nn02788689\tbar\nn02789487\tbar\nn02790669\tbarbecue, barbeque\nn02790823\tbarbed wire, barbwire\nn02790996\tbarbell\nn02791124\tbarber chair\nn02791270\tbarbershop\nn02791532\tbarbette carriage\nn02791665\tbarbican, barbacan\nn02791795\tbar bit\nn02792409\tbareboat\nn02792552\tbarge, flatboat, hoy, lighter\nn02792948\tbarge pole\nn02793089\tbaritone, baritone horn\nn02793199\tbark, barque\nn02793296\tbar magnet\nn02793414\tbar mask\nn02793495\tbarn\nn02793684\tbarndoor\nn02793842\tbarn door\nn02793930\tbarnyard\nn02794008\tbarograph\nn02794156\tbarometer\nn02794368\tbarong\nn02794474\tbarouche\nn02794664\tbar printer\nn02794779\tbarrack\nn02794972\tbarrage balloon\nn02795169\tbarrel, cask\nn02795528\tbarrel, gun barrel\nn02795670\tbarrelhouse, honky-tonk\nn02795783\tbarrel knot, blood knot\nn02795978\tbarrel organ, grind organ, hand organ, hurdy gurdy, hurdy-gurdy, street organ\nn02796207\tbarrel vault\nn02796318\tbarrette\nn02796412\tbarricade\nn02796623\tbarrier\nn02796995\tbarroom, bar, saloon, ginmill, taproom\nn02797295\tbarrow, garden cart, lawn cart, wheelbarrow\nn02797535\tbascule\nn02797692\tbase, pedestal, stand\nn02797881\tbase, bag\nn02799071\tbaseball\nn02799175\tbaseball bat, lumber\nn02799323\tbaseball cap, jockey cap, golf cap\nn02799897\tbaseball equipment\nn02800213\tbaseball glove, glove, baseball mitt, mitt\nn02800497\tbasement, cellar\nn02800675\tbasement\nn02800940\tbasic point defense missile system\nn02801047\tbasilica, Roman basilica\nn02801184\tbasilica\nn02801450\tbasilisk\nn02801525\tbasin\nn02801823\tbasinet\nn02801938\tbasket, handbasket\nn02802215\tbasket, basketball hoop, hoop\nn02802426\tbasketball\nn02802544\tbasketball court\nn02802721\tbasketball equipment\nn02802990\tbasket weave\nn02803349\tbass\nn02803539\tbass clarinet\nn02803666\tbass drum, gran casa\nn02803809\tbasset horn\nn02803934\tbass fiddle, bass viol, bull fiddle, double bass, contrabass, string bass\nn02804123\tbass guitar\nn02804252\tbass horn, sousaphone, tuba\nn02804414\tbassinet\nn02804515\tbassinet\nn02804610\tbassoon\nn02805283\tbaster\nn02805845\tbastinado\nn02805983\tbastion\nn02806088\tbastion, citadel\nn02806379\tbat\nn02806530\tbath\nn02806762\tbath chair\nn02806875\tbathhouse, bagnio\nn02806992\tbathhouse, bathing machine\nn02807133\tbathing cap, swimming cap\nn02807523\tbath oil\nn02807616\tbathrobe\nn02807731\tbathroom, bath\nn02808185\tbath salts\nn02808304\tbath towel\nn02808440\tbathtub, bathing tub, bath, tub\nn02808829\tbathyscaphe, bathyscaph, bathyscape\nn02808968\tbathysphere\nn02809105\tbatik\nn02809241\tbatiste\nn02809364\tbaton, wand\nn02809491\tbaton\nn02809605\tbaton\nn02809736\tbaton\nn02810139\tbattering ram\nn02810270\tbatter's box\nn02810471\tbattery, electric battery\nn02810782\tbattery, stamp battery\nn02811059\tbatting cage, cage\nn02811204\tbatting glove\nn02811350\tbatting helmet\nn02811468\tbattle-ax, battle-axe\nn02811618\tbattle cruiser\nn02811719\tbattle dress\nn02811936\tbattlement, crenelation, crenellation\nn02812201\tbattleship, battlewagon\nn02812342\tbattle sight, battlesight\nn02812631\tbay\nn02812785\tbay\nn02812949\tbayonet\nn02813252\tbay rum\nn02813399\tbay window, bow window\nn02813544\tbazaar, bazar\nn02813645\tbazaar, bazar\nn02813752\tbazooka\nn02813981\tB battery\nn02814116\tBB gun\nn02814338\tbeach house\nn02814428\tbeach towel\nn02814533\tbeach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon\nn02814774\tbeachwear\nn02814860\tbeacon, lighthouse, beacon light, pharos\nn02815478\tbeading plane\nn02815749\tbeaker\nn02815834\tbeaker\nn02815950\tbeam\nn02816494\tbeam balance\nn02816656\tbeanbag\nn02816768\tbeanie, beany\nn02817031\tbearing\nn02817251\tbearing rein, checkrein\nn02817386\tbearing wall\nn02817516\tbearskin, busby, shako\nn02817650\tbeater\nn02817799\tbeating-reed instrument, reed instrument, reed\nn02818135\tbeaver, castor\nn02818254\tbeaver\nn02818687\tBeckman thermometer\nn02818832\tbed\nn02819697\tbed\nn02820085\tbed and breakfast, bed-and-breakfast\nn02820210\tbedclothes, bed clothing, bedding\nn02820556\tBedford cord\nn02820675\tbed jacket\nn02821202\tbedpan\nn02821415\tbedpost\nn02821543\tbedroll\nn02821627\tbedroom, sleeping room, sleeping accommodation, chamber, bedchamber\nn02821943\tbedroom furniture\nn02822064\tbedsitting room, bedsitter, bedsit\nn02822220\tbedspread, bedcover, bed cover, bed covering, counterpane, spread\nn02822399\tbedspring\nn02822579\tbedstead, bedframe\nn02822762\tbeefcake\nn02822865\tbeehive, hive\nn02823124\tbeeper, pager\nn02823335\tbeer barrel, beer keg\nn02823428\tbeer bottle\nn02823510\tbeer can\nn02823586\tbeer garden\nn02823750\tbeer glass\nn02823848\tbeer hall\nn02823964\tbeer mat\nn02824058\tbeer mug, stein\nn02824152\tbelaying pin\nn02824319\tbelfry\nn02824448\tbell\nn02825153\tbell arch\nn02825240\tbellarmine, longbeard, long-beard, greybeard\nn02825442\tbellbottom trousers, bell-bottoms, bellbottom pants\nn02825657\tbell cote, bell cot\nn02825872\tbell foundry\nn02825961\tbell gable\nn02826068\tbell jar, bell glass\nn02826259\tbellows\nn02826459\tbellpull\nn02826589\tbell push\nn02826683\tbell seat, balloon seat\nn02826812\tbell tent\nn02826886\tbell tower\nn02827148\tbellyband\nn02827606\tbelt\nn02828115\tbelt, belt ammunition, belted ammunition\nn02828299\tbelt buckle\nn02828427\tbelting\nn02828884\tbench\nn02829246\tbench clamp\nn02829353\tbench hook\nn02829510\tbench lathe\nn02829596\tbench press\nn02830157\tbender\nn02831237\tberet\nn02831335\tberlin\nn02831595\tBermuda shorts, Jamaica shorts\nn02831724\tberth, bunk, built in bed\nn02831894\tbesom\nn02831998\tBessemer converter\nn02833040\tbethel\nn02833140\tbetting shop\nn02833275\tbevatron\nn02833403\tbevel, bevel square\nn02833793\tbevel gear, pinion and crown wheel, pinion and ring gear\nn02834027\tB-flat clarinet, licorice stick\nn02834397\tbib\nn02834506\tbib-and-tucker\nn02834642\tbicorn, bicorne\nn02834778\tbicycle, bike, wheel, cycle\nn02835271\tbicycle-built-for-two, tandem bicycle, tandem\nn02835412\tbicycle chain\nn02835551\tbicycle clip, trouser clip\nn02835724\tbicycle pump\nn02835829\tbicycle rack\nn02835915\tbicycle seat, saddle\nn02836035\tbicycle wheel\nn02836174\tbidet\nn02836268\tbier\nn02836392\tbier\nn02836513\tbi-fold door\nn02836607\tbifocals\nn02836900\tBig Blue, BLU-82\nn02837134\tbig board\nn02837567\tbight\nn02837789\tbikini, two-piece\nn02837887\tbikini pants\nn02838014\tbilge\nn02838178\tbilge keel\nn02838345\tbilge pump\nn02838577\tbilge well\nn02838728\tbill, peak, eyeshade, visor, vizor\nn02838958\tbill, billhook\nn02839110\tbillboard, hoarding\nn02839351\tbilliard ball\nn02839592\tbilliard room, billiard saloon, billiard parlor, billiard parlour, billiard hall\nn02839910\tbin\nn02840134\tbinder, ligature\nn02840245\tbinder, ring-binder\nn02840515\tbindery\nn02840619\tbinding, book binding, cover, back\nn02841063\tbin liner\nn02841187\tbinnacle\nn02841315\tbinoculars, field glasses, opera glasses\nn02841506\tbinocular microscope\nn02841641\tbiochip\nn02841847\tbiohazard suit\nn02842133\tbioscope\nn02842573\tbiplane\nn02842809\tbirch, birch rod\nn02843029\tbirchbark canoe, birchbark, birch bark\nn02843158\tbirdbath\nn02843276\tbirdcage\nn02843465\tbirdcall\nn02843553\tbird feeder, birdfeeder, feeder\nn02843684\tbirdhouse\nn02843777\tbird shot, buckshot, duck shot\nn02843909\tbiretta, berretta, birretta\nn02844056\tbishop\nn02844214\tbistro\nn02844307\tbit\nn02844714\tbit\nn02845130\tbite plate, biteplate\nn02845293\tbitewing\nn02845985\tbitumastic\nn02846141\tblack\nn02846260\tblack\nn02846511\tblackboard, chalkboard\nn02846619\tblackboard eraser\nn02846733\tblack box\nn02846874\tblackface\nn02847461\tblackjack, cosh, sap\nn02847631\tblack tie\nn02847852\tblackwash\nn02848118\tbladder\nn02848216\tblade\nn02848523\tblade, vane\nn02848806\tblade\nn02848921\tblank, dummy, blank shell\nn02849154\tblanket, cover\nn02849885\tblast furnace\nn02850060\tblasting cap\nn02850358\tblazer, sport jacket, sport coat, sports jacket, sports coat\nn02850732\tblender, liquidizer, liquidiser\nn02850950\tblimp, sausage balloon, sausage\nn02851099\tblind, screen\nn02851795\tblind curve, blind bend\nn02851939\tblindfold\nn02852043\tbling, bling bling\nn02852173\tblinker, flasher\nn02852360\tblister pack, bubble pack\nn02853016\tblock\nn02853218\tblockade\nn02853336\tblockade-runner\nn02853745\tblock and tackle\nn02853870\tblockbuster\nn02854378\tblockhouse\nn02854532\tblock plane\nn02854630\tbloodmobile\nn02854739\tbloomers, pants, drawers, knickers\nn02854926\tblouse\nn02855089\tblower\nn02855390\tblowtorch, torch, blowlamp\nn02855701\tblucher\nn02855793\tbludgeon\nn02855925\tblue\nn02856013\tblue chip\nn02856237\tblunderbuss\nn02856362\tblunt file\nn02857365\tboarding\nn02857477\tboarding house, boardinghouse\nn02857644\tboardroom, council chamber\nn02857907\tboards\nn02858304\tboat\nn02859184\tboater, leghorn, Panama, Panama hat, sailor, skimmer, straw hat\nn02859343\tboat hook\nn02859443\tboathouse\nn02859557\tboatswain's chair, bosun's chair\nn02859729\tboat train\nn02859955\tboatyard\nn02860415\tbobbin, spool, reel\nn02860640\tbobby pin, hairgrip, grip\nn02860847\tbobsled, bobsleigh, bob\nn02861022\tbobsled, bobsleigh\nn02861147\tbocce ball, bocci ball, boccie ball\nn02861286\tbodega\nn02861387\tbodice\nn02861509\tbodkin, threader\nn02861658\tbodkin\nn02861777\tbodkin\nn02861886\tbody\nn02862048\tbody armor, body armour, suit of armor, suit of armour, coat of mail, cataphract\nn02862916\tbody lotion\nn02863014\tbody stocking\nn02863176\tbody plethysmograph\nn02863340\tbody pad\nn02863426\tbodywork\nn02863536\tBofors gun\nn02863638\tbogy, bogie, bogey\nn02863750\tboiler, steam boiler\nn02864122\tboiling water reactor, BWR\nn02864504\tbolero\nn02864593\tbollard, bitt\nn02864987\tbolo, bolo knife\nn02865351\tbolo tie, bolo, bola tie, bola\nn02865665\tbolt\nn02865931\tbolt, deadbolt\nn02866106\tbolt\nn02866386\tbolt cutter\nn02866578\tbomb\nn02867401\tbombazine\nn02867592\tbomb calorimeter, bomb\nn02867715\tbomber\nn02867966\tbomber jacket\nn02868240\tbomblet, cluster bomblet\nn02868429\tbomb rack\nn02868546\tbombshell\nn02868638\tbomb shelter, air-raid shelter, bombproof\nn02868975\tbone-ash cup, cupel, refractory pot\nn02869155\tbone china\nn02869249\tbones, castanets, clappers, finger cymbals\nn02869563\tboneshaker\nn02869737\tbongo, bongo drum\nn02869837\tbonnet, poke bonnet\nn02870526\tbook\nn02870676\tbook bag\nn02870772\tbookbindery\nn02870880\tbookcase\nn02871005\tbookend\nn02871147\tbookmark, bookmarker\nn02871314\tbookmobile\nn02871439\tbookshelf\nn02871525\tbookshop, bookstore, bookstall\nn02871631\tboom\nn02871824\tboom, microphone boom\nn02871963\tboomerang, throwing stick, throw stick\nn02872333\tbooster, booster rocket, booster unit, takeoff booster, takeoff rocket\nn02872529\tbooster, booster amplifier, booster station, relay link, relay station, relay transmitter\nn02872752\tboot\nn02873520\tboot\nn02873623\tboot camp\nn02873733\tbootee, bootie\nn02873839\tbooth, cubicle, stall, kiosk\nn02874086\tbooth\nn02874214\tbooth\nn02874336\tboothose\nn02874442\tbootjack\nn02874537\tbootlace\nn02874642\tbootleg\nn02874750\tbootstrap\nn02875436\tbore bit, borer, rock drill, stone drill\nn02875626\tboron chamber\nn02875948\tborstal\nn02876084\tbosom\nn02876326\tBoston rocker\nn02876457\tbota\nn02876657\tbottle\nn02877266\tbottle, feeding bottle, nursing bottle\nn02877513\tbottle bank\nn02877642\tbottlebrush\nn02877765\tbottlecap\nn02877962\tbottle opener\nn02878107\tbottling plant\nn02878222\tbottom, freighter, merchantman, merchant ship\nn02878425\tboucle\nn02878534\tboudoir\nn02878628\tboulle, boule, buhl\nn02878796\tbouncing betty\nn02879087\tbouquet, corsage, posy, nosegay\nn02879309\tboutique, dress shop\nn02879422\tboutonniere\nn02879517\tbow\nn02879718\tbow\nn02880189\tbow, bowknot\nn02880393\tbow and arrow\nn02880546\tbowed stringed instrument, string\nn02880842\tBowie knife\nn02880940\tbowl\nn02881193\tbowl\nn02881546\tbowl\nn02881757\tbowler hat, bowler, derby hat, derby, plug hat\nn02881906\tbowline, bowline knot\nn02882190\tbowling alley\nn02882301\tbowling ball, bowl\nn02882483\tbowling equipment\nn02882647\tbowling pin, pin\nn02882894\tbowling shoe\nn02883004\tbowsprit\nn02883101\tbowstring\nn02883205\tbow tie, bow-tie, bowtie\nn02883344\tbox\nn02884225\tbox, loge\nn02884450\tbox, box seat\nn02884859\tbox beam, box girder\nn02884994\tbox camera, box Kodak\nn02885108\tboxcar\nn02885233\tbox coat\nn02885338\tboxing equipment\nn02885462\tboxing glove, glove\nn02885882\tbox office, ticket office, ticket booth\nn02886321\tbox spring\nn02886434\tbox wrench, box end wrench\nn02886599\tbrace, bracing\nn02887079\tbrace, braces, orthodontic braces\nn02887209\tbrace\nn02887489\tbrace, suspender, gallus\nn02887832\tbrace and bit\nn02887970\tbracelet, bangle\nn02888270\tbracer, armguard\nn02888429\tbrace wrench\nn02888569\tbracket, wall bracket\nn02888898\tbradawl, pricker\nn02889425\tbrake\nn02889646\tbrake\nn02889856\tbrake band\nn02889996\tbrake cylinder, hydraulic brake cylinder, master cylinder\nn02890188\tbrake disk\nn02890351\tbrake drum, drum\nn02890513\tbrake lining\nn02890662\tbrake pad\nn02890804\tbrake pedal\nn02890940\tbrake shoe, shoe, skid\nn02891188\tbrake system, brakes\nn02891788\tbrass, brass instrument\nn02892201\tbrass, memorial tablet, plaque\nn02892304\tbrass\nn02892392\tbrassard\nn02892499\tbrasserie\nn02892626\tbrassie\nn02892767\tbrassiere, bra, bandeau\nn02892948\tbrass knucks, knucks, brass knuckles, knuckles, knuckle duster\nn02893269\tbrattice\nn02893418\tbrazier, brasier\nn02893608\tbreadbasket\nn02893692\tbread-bin, breadbox\nn02893941\tbread knife\nn02894024\tbreakable\nn02894158\tbreakfast area, breakfast nook\nn02894337\tbreakfast table\nn02894605\tbreakwater, groin, groyne, mole, bulwark, seawall, jetty\nn02894847\tbreast drill\nn02895008\tbreast implant\nn02895154\tbreastplate, aegis, egis\nn02895328\tbreast pocket\nn02895438\tbreathalyzer, breathalyser\nn02896074\tbreechblock, breech closer\nn02896294\tbreechcloth, breechclout, loincloth\nn02896442\tbreeches, knee breeches, knee pants, knickerbockers, knickers\nn02896694\tbreeches buoy\nn02896856\tbreechloader\nn02896949\tbreeder reactor\nn02897097\tBren, Bren gun\nn02897389\tbrewpub\nn02897820\tbrick\nn02898093\tbrickkiln\nn02898173\tbricklayer's hammer\nn02898269\tbrick trowel, mason's trowel\nn02898369\tbrickwork\nn02898585\tbridal gown, wedding gown, wedding dress\nn02898711\tbridge, span\nn02899439\tbridge, nosepiece\nn02900160\tbridle\nn02900459\tbridle path, bridle road\nn02900594\tbridoon\nn02900705\tbriefcase\nn02900857\tbriefcase bomb\nn02900987\tbriefcase computer\nn02901114\tbriefs, Jockey shorts\nn02901259\tbrig\nn02901377\tbrig\nn02901481\tbrigandine\nn02901620\tbrigantine, hermaphrodite brig\nn02901793\tbrilliantine\nn02901901\tbrilliant pebble\nn02902079\tbrim\nn02902687\tbristle brush\nn02902816\tbritches\nn02902916\tbroad arrow\nn02903006\tbroadax, broadaxe\nn02903126\tbrochette\nn02903204\tbroadcaster, spreader\nn02903727\tbroadcloth\nn02903852\tbroadcloth\nn02904109\tbroad hatchet\nn02904233\tbroadloom\nn02904505\tbroadside\nn02904640\tbroadsword\nn02904803\tbrocade\nn02904927\tbrogan, brogue, clodhopper, work shoe\nn02905036\tbroiler\nn02905152\tbroken arch\nn02905886\tbronchoscope\nn02906734\tbroom\nn02906963\tbroom closet\nn02907082\tbroomstick, broom handle\nn02907296\tbrougham\nn02907391\tBrowning automatic rifle, BAR\nn02907656\tBrowning machine gun, Peacemaker\nn02907873\tbrownstone\nn02908123\tbrunch coat\nn02908217\tbrush\nn02908773\tBrussels carpet\nn02908951\tBrussels lace\nn02909053\tbubble\nn02909165\tbubble chamber\nn02909285\tbubble jet printer, bubble-jet printer, bubblejet\nn02909706\tbuckboard\nn02909870\tbucket, pail\nn02910145\tbucket seat\nn02910241\tbucket shop\nn02910353\tbuckle\nn02910542\tbuckram\nn02910701\tbucksaw\nn02910864\tbuckskins\nn02910964\tbuff, buffer\nn02911332\tbuffer, polisher\nn02911485\tbuffer, buffer storage, buffer store\nn02912065\tbuffet, counter, sideboard\nn02912319\tbuffing wheel\nn02912557\tbuggy, roadster\nn02912894\tbugle\nn02913152\tbuilding, edifice\nn02914991\tbuilding complex, complex\nn02915904\tbulldog clip, alligator clip\nn02916065\tbulldog wrench\nn02916179\tbulldozer, dozer\nn02916350\tbullet, slug\nn02916936\tbulletproof vest\nn02917067\tbullet train, bullet\nn02917377\tbullhorn, loud hailer, loud-hailer\nn02917521\tbullion\nn02917607\tbullnose, bullnosed plane\nn02917742\tbullpen, detention cell, detention centre\nn02917964\tbullpen\nn02918112\tbullring\nn02918330\tbulwark\nn02918455\tbumboat\nn02918595\tbumper\nn02918831\tbumper\nn02918964\tbumper car, Dodgem\nn02919148\tbumper guard\nn02919308\tbumper jack\nn02919414\tbundle, sheaf\nn02919648\tbung, spile\nn02919792\tbungalow, cottage\nn02919890\tbungee, bungee cord\nn02919976\tbunghole\nn02920083\tbunk\nn02920164\tbunk, feed bunk\nn02920259\tbunk bed, bunk\nn02920369\tbunker, sand trap, trap\nn02920503\tbunker, dugout\nn02920658\tbunker\nn02921029\tbunsen burner, bunsen, etna\nn02921195\tbunting\nn02921292\tbur, burr\nn02921406\tBurberry\nn02921592\tburette, buret\nn02921756\tburglar alarm\nn02921884\tburial chamber, sepulcher, sepulchre, sepulture\nn02922159\tburial garment\nn02922292\tburial mound, grave mound, barrow, tumulus\nn02922461\tburin\nn02922578\tburqa, burka\nn02922798\tburlap, gunny\nn02922877\tburn bag\nn02923129\tburner\nn02923535\tburnous, burnoose, burnouse\nn02923682\tburp gun, machine pistol\nn02923915\tburr\nn02924116\tbus, autobus, coach, charabanc, double-decker, jitney, motorbus, motorcoach, omnibus, passenger vehicle\nn02925009\tbushel basket\nn02925107\tbushing, cylindrical lining\nn02925385\tbush jacket\nn02925519\tbusiness suit\nn02925666\tbuskin, combat boot, desert boot, half boot, top boot\nn02926426\tbustier\nn02926591\tbustle\nn02927053\tbutcher knife\nn02927161\tbutcher shop, meat market\nn02927764\tbutter dish\nn02927887\tbutterfly valve\nn02928049\tbutter knife\nn02928299\tbutt hinge\nn02928413\tbutt joint, butt\nn02928608\tbutton\nn02929184\tbuttonhook\nn02929289\tbuttress, buttressing\nn02929462\tbutt shaft\nn02929582\tbutt weld, butt-weld\nn02929923\tbuzz bomb, robot bomb, flying bomb, doodlebug, V-1\nn02930080\tbuzzer\nn02930214\tBVD, BVD's\nn02930339\tbypass condenser, bypass capacitor\nn02930645\tbyway, bypath, byroad\nn02930766\tcab, hack, taxi, taxicab\nn02931013\tcab, cabriolet\nn02931148\tcab\nn02931294\tcabana\nn02931417\tcabaret, nightclub, night club, club, nightspot\nn02931836\tcaber\nn02932019\tcabin\nn02932400\tcabin\nn02932523\tcabin car, caboose\nn02932693\tcabin class, second class, economy class\nn02932891\tcabin cruiser, cruiser, pleasure boat, pleasure craft\nn02933112\tcabinet\nn02933340\tcabinet, console\nn02933462\tcabinet, locker, storage locker\nn02933649\tcabinetwork\nn02933750\tcabin liner\nn02933990\tcable, cable television, cable system, cable television service\nn02934168\tcable, line, transmission line\nn02934451\tcable car, car\nn02935017\tcache, memory cache\nn02935387\tcaddy, tea caddy\nn02935490\tcaesium clock\nn02935658\tcafe, coffeehouse, coffee shop, coffee bar\nn02935891\tcafeteria\nn02936176\tcafeteria tray\nn02936281\tcaff\nn02936402\tcaftan, kaftan\nn02936570\tcaftan, kaftan\nn02936714\tcage, coop\nn02936921\tcage\nn02937010\tcagoule\nn02937336\tcaisson\nn02937958\tcalash, caleche, calash top\nn02938218\tcalceus\nn02938321\tcalcimine\nn02938886\tcalculator, calculating machine\nn02939185\tcaldron, cauldron\nn02939763\tcalico\nn02939866\tcaliper, calliper\nn02940289\tcall-board\nn02940385\tcall center, call centre\nn02940570\tcaller ID\nn02940706\tcalliope, steam organ\nn02941095\tcalorimeter\nn02941228\tcalpac, calpack, kalpac\nn02941845\tcamail, aventail, ventail\nn02942015\tcamber arch\nn02942147\tcambric\nn02942349\tcamcorder\nn02942460\tcamel's hair, camelhair\nn02942699\tcamera, photographic camera\nn02943241\tcamera lens, optical lens\nn02943465\tcamera lucida\nn02943686\tcamera obscura\nn02943871\tcamera tripod\nn02943964\tcamise\nn02944075\tcamisole\nn02944146\tcamisole, underbodice\nn02944256\tcamlet\nn02944459\tcamouflage\nn02944579\tcamouflage, camo\nn02944826\tcamp, encampment, cantonment, bivouac\nn02945161\tcamp\nn02945813\tcamp, refugee camp\nn02945964\tcampaign hat\nn02946127\tcampanile, belfry\nn02946270\tcamp chair\nn02946348\tcamper, camping bus, motor home\nn02946509\tcamper trailer\nn02946753\tcampstool\nn02946824\tcamshaft\nn02946921\tcan, tin, tin can\nn02947212\tcanal\nn02947660\tcanal boat, narrow boat, narrowboat\nn02947818\tcandelabrum, candelabra\nn02947977\tcandid camera\nn02948072\tcandle, taper, wax light\nn02948293\tcandlepin\nn02948403\tcandlesnuffer\nn02948557\tcandlestick, candle holder\nn02948834\tcandlewick\nn02948942\tcandy thermometer\nn02949084\tcane\nn02949202\tcane\nn02949356\tcangue\nn02949542\tcanister, cannister, tin\nn02950018\tcannery\nn02950120\tcannikin\nn02950186\tcannikin\nn02950256\tcannon\nn02950482\tcannon\nn02950632\tcannon\nn02950826\tcannon\nn02950943\tcannonball, cannon ball, round shot\nn02951358\tcanoe\nn02951585\tcan opener, tin opener\nn02951703\tcanopic jar, canopic vase\nn02951843\tcanopy\nn02952109\tcanopy\nn02952237\tcanopy\nn02952374\tcanteen\nn02952485\tcanteen\nn02952585\tcanteen\nn02952674\tcanteen, mobile canteen\nn02952798\tcanteen\nn02952935\tcant hook\nn02953056\tcantilever\nn02953197\tcantilever bridge\nn02953455\tcantle\nn02953552\tCanton crepe\nn02953673\tcanvas, canvass\nn02953850\tcanvas, canvass\nn02954163\tcanvas tent, canvas, canvass\nn02954340\tcap\nn02954938\tcap\nn02955065\tcap\nn02955247\tcapacitor, capacitance, condenser, electrical condenser\nn02955540\tcaparison, trapping, housing\nn02955767\tcape, mantle\nn02956393\tcapital ship\nn02956699\tcapitol\nn02956795\tcap opener\nn02956883\tcapote, hooded cloak\nn02957008\tcapote, hooded coat\nn02957135\tcap screw\nn02957252\tcapstan\nn02957427\tcapstone, copestone, coping stone, stretcher\nn02957755\tcapsule\nn02957862\tcaptain's chair\nn02958343\tcar, auto, automobile, machine, motorcar\nn02959942\tcar, railcar, railway car, railroad car\nn02960352\tcar, elevator car\nn02960690\tcarabiner, karabiner, snap ring\nn02960903\tcarafe, decanter\nn02961035\tcaravansary, caravanserai, khan, caravan inn\nn02961225\tcar battery, automobile battery\nn02961451\tcarbine\nn02961544\tcar bomb\nn02961947\tcarbon arc lamp, carbon arc\nn02962061\tcarboy\nn02962200\tcarburetor, carburettor\nn02962414\tcar carrier\nn02962843\tcardcase\nn02962938\tcardiac monitor, heart monitor\nn02963159\tcardigan\nn02963302\tcard index, card catalog, card catalogue\nn02963503\tcardiograph, electrocardiograph\nn02963692\tcardioid microphone\nn02963821\tcar door\nn02963987\tcardroom\nn02964075\tcard table\nn02964196\tcard table\nn02964295\tcar-ferry\nn02964634\tcargo area, cargo deck, cargo hold, hold, storage area\nn02964843\tcargo container\nn02964934\tcargo door\nn02965024\tcargo hatch\nn02965122\tcargo helicopter\nn02965216\tcargo liner\nn02965300\tcargo ship, cargo vessel\nn02965529\tcarillon\nn02965783\tcar mirror\nn02966068\tcaroche\nn02966193\tcarousel, carrousel, merry-go-round, roundabout, whirligig\nn02966545\tcarpenter's hammer, claw hammer, clawhammer\nn02966687\tcarpenter's kit, tool kit\nn02966786\tcarpenter's level\nn02966942\tcarpenter's mallet\nn02967081\tcarpenter's rule\nn02967170\tcarpenter's square\nn02967294\tcarpetbag\nn02967407\tcarpet beater, rug beater\nn02967540\tcarpet loom\nn02967626\tcarpet pad, rug pad, underlay, underlayment\nn02967782\tcarpet sweeper, sweeper\nn02967991\tcarpet tack\nn02968074\tcarport, car port\nn02968210\tcarrack, carack\nn02968333\tcarrel, carrell, cubicle, stall\nn02968473\tcarriage, equipage, rig\nn02969010\tcarriage\nn02969163\tcarriage bolt\nn02969323\tcarriageway\nn02969527\tcarriage wrench\nn02969634\tcarrick bend\nn02969886\tcarrier\nn02970408\tcarryall, holdall, tote, tote bag\nn02970534\tcarrycot\nn02970685\tcar seat\nn02970849\tcart\nn02971167\tcar tire, automobile tire, auto tire, rubber tire\nn02971356\tcarton\nn02971473\tcartouche, cartouch\nn02971579\tcar train\nn02971691\tcartridge\nn02971940\tcartridge, pickup\nn02972397\tcartridge belt\nn02972714\tcartridge extractor, cartridge remover, extractor\nn02972934\tcartridge fuse\nn02973017\tcartridge holder, cartridge clip, clip, magazine\nn02973236\tcartwheel\nn02973805\tcarving fork\nn02973904\tcarving knife\nn02974003\tcar wheel\nn02974348\tcaryatid\nn02974454\tcascade liquefier\nn02974565\tcascade transformer\nn02974697\tcase\nn02975212\tcase, display case, showcase, vitrine\nn02975589\tcase, compositor's case, typesetter's case\nn02975994\tcasein paint, casein\nn02976123\tcase knife, sheath knife\nn02976249\tcase knife\nn02976350\tcasement\nn02976455\tcasement window\nn02976552\tcasern\nn02976641\tcase shot, canister, canister shot\nn02976815\tcash bar\nn02976939\tcashbox, money box, till\nn02977058\tcash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM\nn02977330\tcashmere\nn02977438\tcash register, register\nn02977619\tcasing, case\nn02977936\tcasino, gambling casino\nn02978055\tcasket, jewel casket\nn02978205\tcasque\nn02978367\tcasquet, casquetel\nn02978478\tCassegrainian telescope, Gregorian telescope\nn02978753\tcasserole\nn02978881\tcassette\nn02979074\tcassette deck\nn02979186\tcassette player\nn02979290\tcassette recorder\nn02979399\tcassette tape\nn02979516\tcassock\nn02979836\tcast, plaster cast, plaster bandage\nn02980036\tcaster, castor\nn02980203\tcaster, castor\nn02980441\tcastle\nn02980625\tcastle, rook\nn02981024\tcatacomb\nn02981198\tcatafalque\nn02981321\tcatalytic converter\nn02981565\tcatalytic cracker, cat cracker\nn02981792\tcatamaran\nn02981911\tcatapult, arbalest, arbalist, ballista, bricole, mangonel, onager, trebuchet, trebucket\nn02982232\tcatapult, launcher\nn02982416\tcatboat\nn02982515\tcat box\nn02982599\tcatch\nn02983072\tcatchall\nn02983189\tcatcher's mask\nn02983357\tcatchment\nn02983507\tCaterpillar, cat\nn02983904\tcathedra, bishop's throne\nn02984061\tcathedral\nn02984203\tcathedral, duomo\nn02984469\tcatheter\nn02984699\tcathode\nn02985137\tcathode-ray tube, CRT\nn02985606\tcat-o'-nine-tails, cat\nn02985828\tcat's-paw\nn02985963\tcatsup bottle, ketchup bottle\nn02986066\tcattle car\nn02986160\tcattle guard, cattle grid\nn02986348\tcattleship, cattle boat\nn02987047\tcautery, cauterant\nn02987379\tcavalier hat, slouch hat\nn02987492\tcavalry sword, saber, sabre\nn02987706\tcavetto\nn02987823\tcavity wall\nn02987950\tC battery\nn02988066\tC-clamp\nn02988156\tCD drive\nn02988304\tCD player\nn02988486\tCD-R, compact disc recordable, CD-WO, compact disc write-once\nn02988679\tCD-ROM, compact disc read-only memory\nn02988963\tCD-ROM drive\nn02989099\tcedar chest\nn02990373\tceiling\nn02990758\tcelesta\nn02991048\tcell, electric cell\nn02991302\tcell, jail cell, prison cell\nn02991847\tcellar, wine cellar\nn02992032\tcellblock, ward\nn02992211\tcello, violoncello\nn02992368\tcellophane\nn02992529\tcellular telephone, cellular phone, cellphone, cell, mobile phone\nn02992795\tcellulose tape, Scotch tape, Sellotape\nn02993194\tcenotaph, empty tomb\nn02993368\tcenser, thurible\nn02993546\tcenter, centre\nn02994573\tcenter punch\nn02994743\tCentigrade thermometer\nn02995345\tcentral processing unit, CPU, C.P.U., central processor, processor, mainframe\nn02995871\tcentrifugal pump\nn02995998\tcentrifuge, extractor, separator\nn02997391\tceramic\nn02997607\tceramic ware\nn02997910\tcereal bowl\nn02998003\tcereal box\nn02998107\tcerecloth\nn02998563\tcesspool, cesspit, sink, sump\nn02998696\tchachka, tsatske, tshatshke, tchotchke\nn02998841\tchador, chadar, chaddar, chuddar\nn02999138\tchafing dish\nn02999410\tchain\nn02999936\tchain\nn03000134\tchainlink fence\nn03000247\tchain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour\nn03000530\tchain printer\nn03000684\tchain saw, chainsaw\nn03001115\tchain store\nn03001282\tchain tongs\nn03001540\tchain wrench\nn03001627\tchair\nn03002096\tchair\nn03002210\tchair of state\nn03002341\tchairlift, chair lift\nn03002555\tchaise, shay\nn03002711\tchaise longue, chaise, daybed\nn03002816\tchalet\nn03002948\tchalice, goblet\nn03003091\tchalk\nn03003633\tchallis\nn03004275\tchamberpot, potty, thunder mug\nn03004409\tchambray\nn03004531\tchamfer bit\nn03004620\tchamfer plane\nn03004713\tchamois cloth\nn03004824\tchancel, sanctuary, bema\nn03005033\tchancellery\nn03005147\tchancery\nn03005285\tchandelier, pendant, pendent\nn03005515\tchandlery\nn03005619\tchanfron, chamfron, testiere, frontstall, front-stall\nn03006626\tchanter, melody pipe\nn03006788\tchantry\nn03006903\tchap\nn03007130\tchapel\nn03007297\tchapterhouse, fraternity house, frat house\nn03007444\tchapterhouse\nn03007591\tcharacter printer, character-at-a-time printer, serial printer\nn03008177\tcharcuterie\nn03008817\tcharge-exchange accelerator\nn03008976\tcharger, battery charger\nn03009111\tchariot\nn03009269\tchariot\nn03009794\tcharnel house, charnel\nn03010473\tchassis\nn03010656\tchassis\nn03010795\tchasuble\nn03010915\tchateau\nn03011018\tchatelaine\nn03011355\tchecker, chequer\nn03011741\tcheckout, checkout counter\nn03012013\tcheekpiece\nn03012159\tcheeseboard, cheese tray\nn03012373\tcheesecloth\nn03012499\tcheese cutter\nn03012644\tcheese press\nn03012734\tchemical bomb, gas bomb\nn03012897\tchemical plant\nn03013006\tchemical reactor\nn03013438\tchemise, sack, shift\nn03013580\tchemise, shimmy, shift, slip, teddy\nn03013850\tchenille\nn03014440\tchessman, chess piece\nn03014705\tchest\nn03015149\tchesterfield\nn03015254\tchest of drawers, chest, bureau, dresser\nn03015478\tchest protector\nn03015631\tcheval-de-frise, chevaux-de-frise\nn03015851\tcheval glass\nn03016209\tchicane\nn03016389\tchicken coop, coop, hencoop, henhouse\nn03016609\tchicken wire\nn03016737\tchicken yard, hen yard, chicken run, fowl run\nn03016868\tchiffon\nn03016953\tchiffonier, commode\nn03017070\tchild's room\nn03017168\tchime, bell, gong\nn03017698\tchimney breast\nn03017835\tchimney corner, inglenook\nn03018209\tchina\nn03018349\tchina cabinet, china closet\nn03018614\tchinchilla\nn03018712\tChinese lantern\nn03018848\tChinese puzzle\nn03019198\tchinning bar\nn03019304\tchino\nn03019434\tchino\nn03019685\tchin rest\nn03019806\tchin strap\nn03019938\tchintz\nn03020034\tchip, microchip, micro chip, silicon chip, microprocessor chip\nn03020416\tchip, poker chip\nn03020692\tchisel\nn03021228\tchlamys\nn03024064\tchoir\nn03024233\tchoir loft\nn03024333\tchoke\nn03024518\tchoke, choke coil, choking coil\nn03025070\tchokey, choky\nn03025165\tchoo-choo\nn03025250\tchopine, platform\nn03025886\tchordophone\nn03026506\tChristmas stocking\nn03026907\tchronograph\nn03027001\tchronometer\nn03027108\tchronoscope\nn03027250\tchuck\nn03027505\tchuck wagon\nn03027625\tchukka, chukka boot\nn03028079\tchurch, church building\nn03028596\tchurch bell\nn03028785\tchurch hat\nn03029066\tchurch key\nn03029197\tchurch tower\nn03029296\tchuridars\nn03029445\tchurn, butter churn\nn03029925\tciderpress\nn03030262\tcigar band\nn03030353\tcigar box\nn03030557\tcigar cutter\nn03030880\tcigarette butt\nn03031012\tcigarette case\nn03031152\tcigarette holder\nn03031422\tcigar lighter, cigarette lighter, pocket lighter\nn03031756\tcinch, girth\nn03032252\tcinema, movie theater, movie theatre, movie house, picture palace\nn03032453\tcinquefoil\nn03032811\tcircle, round\nn03033267\tcirclet\nn03033362\tcircuit, electrical circuit, electric circuit\nn03033986\tcircuit board, circuit card, board, card, plug-in, add-in\nn03034244\tcircuit breaker, breaker\nn03034405\tcircuitry\nn03034516\tcircular plane, compass plane\nn03034663\tcircular saw, buzz saw\nn03035252\tcircus tent, big top, round top, top\nn03035510\tcistern\nn03035715\tcistern, water tank\nn03035832\tcittern, cithern, cither, citole, gittern\nn03036022\tcity hall\nn03036149\tcityscape\nn03036244\tcity university\nn03036341\tcivies, civvies\nn03036469\tcivilian clothing, civilian dress, civilian garb, plain clothes\nn03036701\tclack valve, clack, clapper valve\nn03036866\tclamp, clinch\nn03037108\tclamshell, grapple\nn03037228\tclapper, tongue\nn03037404\tclapperboard\nn03037590\tclarence\nn03037709\tclarinet\nn03038041\tClark cell, Clark standard cell\nn03038281\tclasp\nn03038480\tclasp knife, jackknife\nn03038685\tclassroom, schoolroom\nn03038870\tclavichord\nn03039015\tclavier, Klavier\nn03039259\tclay pigeon\nn03039353\tclaymore mine, claymore\nn03039493\tclaymore\nn03039827\tcleaners, dry cleaners\nn03039947\tcleaning implement, cleaning device, cleaning equipment\nn03040229\tcleaning pad\nn03040376\tclean room, white room\nn03040836\tclearway\nn03041114\tcleat\nn03041265\tcleat\nn03041449\tcleats\nn03041632\tcleaver, meat cleaver, chopper\nn03041810\tclerestory, clearstory\nn03042139\tclevis\nn03042384\tclews\nn03042490\tcliff dwelling\nn03042697\tclimbing frame\nn03042829\tclinch\nn03042984\tclinch, clench\nn03043173\tclincher\nn03043274\tclinic\nn03043423\tclinical thermometer, mercury-in-glass clinical thermometer\nn03043693\tclinker, clinker brick\nn03043798\tclinometer, inclinometer\nn03043958\tclip\nn03044671\tclip lead\nn03044801\tclip-on\nn03044934\tclipper\nn03045074\tclipper\nn03045228\tclipper, clipper ship\nn03045337\tcloak\nn03045698\tcloak\nn03045800\tcloakroom, coatroom\nn03046029\tcloche\nn03046133\tcloche\nn03046257\tclock\nn03046802\tclock pendulum\nn03046921\tclock radio\nn03047052\tclock tower\nn03047171\tclockwork\nn03047690\tclog, geta, patten, sabot\nn03047799\tcloisonne\nn03047941\tcloister\nn03048883\tclosed circuit, loop\nn03049066\tclosed-circuit television\nn03049326\tclosed loop, closed-loop system\nn03049457\tcloset\nn03049782\tcloseup lens\nn03049924\tcloth cap, flat cap\nn03050026\tcloth covering\nn03050453\tclothesbrush\nn03050546\tclothes closet, clothespress\nn03050655\tclothes dryer, clothes drier\nn03050864\tclothes hamper, laundry basket, clothes basket, voider\nn03051041\tclotheshorse\nn03051249\tclothespin, clothes pin, clothes peg\nn03051396\tclothes tree, coat tree, coat stand\nn03051540\tclothing, article of clothing, vesture, wear, wearable, habiliment\nn03052464\tclothing store, haberdashery, haberdashery store, mens store\nn03052917\tclout nail, clout\nn03053047\tclove hitch\nn03053976\tclub car, lounge car\nn03054491\tclubroom\nn03054605\tcluster bomb\nn03054901\tclutch\nn03055159\tclutch, clutch pedal\nn03055418\tclutch bag, clutch\nn03055670\tcoach, four-in-hand, coach-and-four\nn03055857\tcoach house, carriage house, remise\nn03056097\tcoal car\nn03056215\tcoal chute\nn03056288\tcoal house\nn03056493\tcoal shovel\nn03056583\tcoaming\nn03056873\tcoaster brake\nn03057021\tcoat\nn03057541\tcoat button\nn03057636\tcoat closet\nn03057724\tcoatdress\nn03057841\tcoatee\nn03057920\tcoat hanger, clothes hanger, dress hanger\nn03058107\tcoating, coat\nn03058603\tcoating\nn03058949\tcoat of paint\nn03059103\tcoatrack, coat rack, hatrack\nn03059236\tcoattail\nn03059366\tcoaxial cable, coax, coax cable\nn03059685\tcobweb\nn03059934\tcobweb\nn03060728\tCockcroft and Walton accelerator, Cockcroft-Walton accelerator, Cockcroft and Walton voltage multiplier, Cockcroft-Walton voltage multiplier\nn03061050\tcocked hat\nn03061211\tcockhorse\nn03061345\tcockleshell\nn03061505\tcockpit\nn03061674\tcockpit\nn03061819\tcockpit\nn03061893\tcockscomb, coxcomb\nn03062015\tcocktail dress, sheath\nn03062122\tcocktail lounge\nn03062245\tcocktail shaker\nn03062336\tcocotte\nn03062651\tcodpiece\nn03062798\tcoelostat\nn03062985\tcoffee can\nn03063073\tcoffee cup\nn03063199\tcoffee filter\nn03063338\tcoffee maker\nn03063485\tcoffee mill, coffee grinder\nn03063599\tcoffee mug\nn03063689\tcoffeepot\nn03063834\tcoffee stall\nn03063968\tcoffee table, cocktail table\nn03064250\tcoffee urn\nn03064350\tcoffer\nn03064562\tCoffey still\nn03064758\tcoffin, casket\nn03064935\tcog, sprocket\nn03065243\tcoif\nn03065424\tcoil, spiral, volute, whorl, helix\nn03065708\tcoil\nn03066232\tcoil\nn03066359\tcoil spring, volute spring\nn03066464\tcoin box\nn03066849\tcolander, cullender\nn03067093\tcold cathode\nn03067212\tcold chisel, set chisel\nn03067339\tcold cream, coldcream, face cream, vanishing cream\nn03067518\tcold frame\nn03068181\tcollar, neckband\nn03068998\tcollar\nn03069752\tcollege\nn03070059\tcollet, collet chuck\nn03070193\tcollider\nn03070396\tcolliery, pit\nn03070587\tcollimator\nn03070854\tcollimator\nn03071021\tcologne, cologne water, eau de cologne\nn03071160\tcolonnade\nn03071288\tcolonoscope\nn03071552\tcolorimeter, tintometer\nn03072056\tcolors, colours\nn03072201\tcolor television, colour television, color television system, colour television system, color TV, colour TV\nn03072440\tcolor tube, colour tube, color television tube, colour television tube, color TV tube, colour TV tube\nn03072682\tcolor wash, colour wash\nn03073296\tColt\nn03073384\tcolter, coulter\nn03073545\tcolumbarium\nn03073694\tcolumbarium, cinerarium\nn03073977\tcolumn, pillar\nn03074380\tcolumn, pillar\nn03074855\tcomb\nn03075097\tcomb\nn03075248\tcomber\nn03075370\tcombination lock\nn03075500\tcombination plane\nn03075634\tcombine\nn03075768\tcomforter, pacifier, baby's dummy, teething ring\nn03075946\tcommand module\nn03076411\tcommissary\nn03076623\tcommissary\nn03076708\tcommodity, trade good, good\nn03077442\tcommon ax, common axe, Dayton ax, Dayton axe\nn03077616\tcommon room\nn03077741\tcommunications satellite\nn03078287\tcommunication system\nn03078506\tcommunity center, civic center\nn03078670\tcommutator\nn03078802\tcommuter, commuter train\nn03078995\tcompact, powder compact\nn03079136\tcompact, compact car\nn03079230\tcompact disk, compact disc, CD\nn03079494\tcompact-disk burner, CD burner\nn03079616\tcompanionway\nn03079741\tcompartment\nn03080309\tcompartment\nn03080497\tcompass\nn03080633\tcompass\nn03080731\tcompass card, mariner's compass\nn03080904\tcompass saw\nn03081859\tcompound\nn03081986\tcompound lens\nn03082127\tcompound lever\nn03082280\tcompound microscope\nn03082450\tcompress\nn03082656\tcompression bandage, tourniquet\nn03082807\tcompressor\nn03082979\tcomputer, computing machine, computing device, data processor, electronic computer, information processing system\nn03084420\tcomputer circuit\nn03084834\tcomputerized axial tomography scanner, CAT scanner\nn03085013\tcomputer keyboard, keypad\nn03085219\tcomputer monitor\nn03085333\tcomputer network\nn03085602\tcomputer screen, computer display\nn03085781\tcomputer store\nn03085915\tcomputer system, computing system, automatic data processing system, ADP system, ADPS\nn03086183\tconcentration camp, stockade\nn03086457\tconcert grand, concert piano\nn03086580\tconcert hall\nn03086670\tconcertina\nn03086868\tconcertina\nn03087069\tconcrete mixer, cement mixer\nn03087245\tcondensation pump, diffusion pump\nn03087366\tcondenser, optical condenser\nn03087521\tcondenser\nn03087643\tcondenser\nn03087816\tcondenser microphone, capacitor microphone\nn03088389\tcondominium\nn03088580\tcondominium, condo\nn03088707\tconductor\nn03089477\tcone clutch, cone friction clutch\nn03089624\tconfectionery, confectionary, candy store\nn03089753\tconference center, conference house\nn03089879\tconference room\nn03090000\tconference table, council table, council board\nn03090172\tconfessional\nn03090437\tconformal projection, orthomorphic projection\nn03090710\tcongress boot, congress shoe, congress gaiter\nn03090856\tconic projection, conical projection\nn03091044\tconnecting rod\nn03091223\tconnecting room\nn03091374\tconnection, connexion, connector, connecter, connective\nn03091907\tconning tower\nn03092053\tconning tower\nn03092166\tconservatory, hothouse, indoor garden\nn03092314\tconservatory, conservatoire\nn03092476\tconsole\nn03092656\tconsole\nn03092883\tconsole table, console\nn03093427\tconsulate\nn03093792\tcontact, tangency\nn03094159\tcontact, contact lens\nn03094503\tcontainer\nn03095699\tcontainer ship, containership, container vessel\nn03095965\tcontainment\nn03096439\tcontrabassoon, contrafagotto, double bassoon\nn03096960\tcontrol, controller\nn03097362\tcontrol center\nn03097535\tcontrol circuit, negative feedback circuit\nn03097673\tcontrol key, command key\nn03098140\tcontrol panel, instrument panel, control board, board, panel\nn03098515\tcontrol rod\nn03098688\tcontrol room\nn03098806\tcontrol system\nn03098959\tcontrol tower\nn03099147\tconvector\nn03099274\tconvenience store\nn03099454\tconvent\nn03099622\tconventicle, meetinghouse\nn03099771\tconverging lens, convex lens\nn03099945\tconverter, convertor\nn03100240\tconvertible\nn03100346\tconvertible, sofa bed\nn03100490\tconveyance, transport\nn03100897\tconveyer belt, conveyor belt, conveyer, conveyor, transporter\nn03101156\tcooker\nn03101302\tcookfire\nn03101375\tcookhouse\nn03101517\tcookie cutter\nn03101664\tcookie jar, cooky jar\nn03101796\tcookie sheet, baking tray\nn03101986\tcooking utensil, cookware\nn03102371\tcookstove\nn03102516\tcoolant system\nn03102654\tcooler, ice chest\nn03102859\tcooling system, cooling\nn03103128\tcooling system, engine cooling system\nn03103396\tcooling tower\nn03103563\tcoonskin cap, coonskin\nn03103904\tcope\nn03104019\tcoping saw\nn03104512\tcopperware\nn03105088\tcopyholder\nn03105214\tcoquille\nn03105306\tcoracle\nn03105467\tcorbel, truss\nn03105645\tcorbel arch\nn03105810\tcorbel step, corbie-step, corbiestep, crow step\nn03105974\tcorbie gable\nn03106722\tcord, corduroy\nn03106898\tcord, electric cord\nn03107046\tcordage\nn03107488\tcords, corduroys\nn03107716\tcore\nn03108455\tcore bit\nn03108624\tcore drill\nn03108759\tcorer\nn03108853\tcork, bottle cork\nn03109033\tcorker\nn03109150\tcorkscrew, bottle screw\nn03109253\tcorncrib\nn03109693\tcorner, quoin\nn03109881\tcorner, nook\nn03110202\tcorner post\nn03110669\tcornet, horn, trumpet, trump\nn03111041\tcornice\nn03111177\tcornice\nn03111296\tcornice, valance, valance board, pelmet\nn03111690\tcorrectional institution\nn03112240\tcorrugated fastener, wiggle nail\nn03112719\tcorselet, corslet\nn03112869\tcorset, girdle, stays\nn03113152\tcosmetic\nn03113505\tcosmotron\nn03113657\tcostume\nn03113835\tcostume\nn03114041\tcostume\nn03114236\tcostume\nn03114379\tcosy, tea cosy, cozy, tea cozy\nn03114504\tcot, camp bed\nn03114743\tcottage tent\nn03114839\tcotter, cottar\nn03115014\tcotter pin\nn03115180\tcotton\nn03115400\tcotton flannel, Canton flannel\nn03115663\tcotton mill\nn03115762\tcouch\nn03115897\tcouch\nn03116008\tcouchette\nn03116163\tcoude telescope, coude system\nn03116530\tcounter\nn03116767\tcounter, tabulator\nn03117199\tcounter\nn03117642\tcounterbore, countersink, countersink bit\nn03118346\tcounter tube\nn03118969\tcountry house\nn03119203\tcountry store, general store, trading post\nn03119396\tcoupe\nn03119510\tcoupling, coupler\nn03120198\tcourt, courtyard\nn03120491\tcourt\nn03120778\tcourt, courtroom\nn03121040\tcourt\nn03121190\tCourtelle\nn03121298\tcourthouse\nn03121431\tcourthouse\nn03121897\tcoverall\nn03122073\tcovered bridge\nn03122202\tcovered couch\nn03122295\tcovered wagon, Conestoga wagon, Conestoga, prairie wagon, prairie schooner\nn03122748\tcovering\nn03123553\tcoverlet\nn03123666\tcover plate\nn03123809\tcowbarn, cowshed, cow barn, cowhouse, byre\nn03123917\tcowbell\nn03124043\tcowboy boot\nn03124170\tcowboy hat, ten-gallon hat\nn03124313\tcowhide\nn03124474\tcowl\nn03124590\tcow pen, cattle pen, corral\nn03125057\tCPU board, mother board\nn03125588\tcrackle, crackleware, crackle china\nn03125729\tcradle\nn03125870\tcraft\nn03126090\tcramp, cramp iron\nn03126385\tcrampon, crampoon, climbing iron, climber\nn03126580\tcrampon, crampoon\nn03126707\tcrane\nn03126927\tcraniometer\nn03127024\tcrank, starter\nn03127203\tcrankcase\nn03127408\tcrankshaft\nn03127531\tcrash barrier\nn03127747\tcrash helmet\nn03127925\tcrate\nn03128085\tcravat\nn03128248\tcrayon, wax crayon\nn03128427\tcrazy quilt\nn03128519\tcream, ointment, emollient\nn03129001\tcream pitcher, creamer\nn03129471\tcreche, foundling hospital\nn03129636\tcreche\nn03129753\tcredenza, credence\nn03129848\tcreel\nn03130066\tcrematory, crematorium, cremation chamber\nn03130233\tcrematory, crematorium\nn03130563\tcrepe, crape\nn03130761\tcrepe de Chine\nn03130866\tcrescent wrench\nn03131193\tcretonne\nn03131574\tcrib, cot\nn03131669\tcrib\nn03131967\tcricket ball\nn03132076\tcricket bat, bat\nn03132261\tcricket equipment\nn03132438\tcringle, eyelet, loop, grommet, grummet\nn03132666\tcrinoline\nn03132776\tcrinoline\nn03133050\tcrochet needle, crochet hook\nn03133415\tcrock, earthenware jar\nn03133878\tCrock Pot\nn03134118\tcrook, shepherd's crook\nn03134232\tCrookes radiometer\nn03134394\tCrookes tube\nn03134739\tcroquet ball\nn03134853\tcroquet equipment\nn03135030\tcroquet mallet\nn03135532\tcross\nn03135656\tcrossbar\nn03135788\tcrossbar\nn03135917\tcrossbar\nn03136051\tcrossbench\nn03136254\tcross bit\nn03136369\tcrossbow\nn03136504\tcrosscut saw, crosscut handsaw, cutoff saw\nn03137473\tcrossjack, mizzen course\nn03137579\tcrosspiece\nn03138128\tcrotchet\nn03138217\tcroupier's rake\nn03138344\tcrowbar, wrecking bar, pry, pry bar\nn03138669\tcrown, diadem\nn03139089\tcrown, crownwork, jacket, jacket crown, cap\nn03139464\tcrown jewels\nn03139640\tcrown lens\nn03139998\tcrow's nest\nn03140126\tcrucible, melting pot\nn03140292\tcrucifix, rood, rood-tree\nn03140431\tcruet, crewet\nn03140546\tcruet-stand\nn03140652\tcruise control\nn03140771\tcruise missile\nn03140900\tcruiser\nn03141065\tcruiser, police cruiser, patrol car, police car, prowl car, squad car\nn03141327\tcruise ship, cruise liner\nn03141455\tcrupper\nn03141612\tcruse\nn03141702\tcrusher\nn03141823\tcrutch\nn03142099\tcryometer\nn03142205\tcryoscope\nn03142325\tcryostat\nn03142431\tcrypt\nn03142679\tcrystal, watch crystal, watch glass\nn03143400\tcrystal detector\nn03143572\tcrystal microphone\nn03143754\tcrystal oscillator, quartz oscillator\nn03144156\tcrystal set\nn03144873\tcubitiere\nn03144982\tcucking stool, ducking stool\nn03145147\tcuckoo clock\nn03145277\tcuddy\nn03145384\tcudgel\nn03145522\tcue, cue stick, pool cue, pool stick\nn03145719\tcue ball\nn03145843\tcuff, turnup\nn03146219\tcuirass\nn03146342\tcuisse\nn03146449\tcul, cul de sac, dead end\nn03146560\tculdoscope\nn03146687\tcullis\nn03146777\tculotte\nn03146846\tcultivator, tiller\nn03147084\tculverin\nn03147156\tculverin\nn03147280\tculvert\nn03147509\tcup\nn03148324\tcupboard, closet\nn03148518\tcup hook\nn03148727\tcupola\nn03148808\tcupola\nn03149135\tcurb, curb bit\nn03149401\tcurb roof\nn03149686\tcurbstone, kerbstone\nn03149810\tcurette, curet\nn03150232\tcurler, hair curler, roller, crimper\nn03150511\tcurling iron\nn03150661\tcurrycomb\nn03150795\tcursor, pointer\nn03151077\tcurtain, drape, drapery, mantle, pall\nn03152303\tcustomhouse, customshouse\nn03152951\tcutaway, cutaway drawing, cutaway model\nn03153246\tcutlas, cutlass\nn03153585\tcutoff\nn03153948\tcutout\nn03154073\tcutter, cutlery, cutting tool\nn03154316\tcutter\nn03154446\tcutting implement\nn03154616\tcutting room\nn03154745\tcutty stool\nn03154895\tcutwork\nn03155178\tcybercafe\nn03155502\tcyclopean masonry\nn03155915\tcyclostyle\nn03156071\tcyclotron\nn03156279\tcylinder\nn03156405\tcylinder, piston chamber\nn03156767\tcylinder lock\nn03157348\tcymbal\nn03158186\tdacha\nn03158414\tDacron, Terylene\nn03158668\tdado\nn03158796\tdado plane\nn03158885\tdagger, sticker\nn03159535\tdairy, dairy farm\nn03159640\tdais, podium, pulpit, rostrum, ambo, stump, soapbox\nn03160001\tdaisy print wheel, daisy wheel\nn03160186\tdaisywheel printer\nn03160309\tdam, dike, dyke\nn03160740\tdamask\nn03161016\tdampener, moistener\nn03161450\tdamper, muffler\nn03161893\tdamper block, piano damper\nn03162297\tdark lantern, bull's-eye\nn03162460\tdarkroom\nn03162556\tdarning needle, embroidery needle\nn03162714\tdart\nn03162818\tdart\nn03163222\tdashboard, fascia\nn03163381\tdashiki, daishiki\nn03163488\tdash-pot\nn03163798\tdata converter\nn03163973\tdata input device, input device\nn03164192\tdata multiplexer\nn03164344\tdata system, information system\nn03164605\tdavenport\nn03164722\tdavenport\nn03164929\tdavit\nn03165096\tdaybed, divan bed\nn03165211\tdaybook, ledger\nn03165466\tday nursery, day care center\nn03165616\tday school\nn03165823\tdead axle\nn03165955\tdeadeye\nn03166120\tdeadhead\nn03166514\tdeanery\nn03166600\tdeathbed\nn03166685\tdeath camp\nn03166809\tdeath house, death row\nn03166951\tdeath knell, death bell\nn03167153\tdeath seat\nn03167978\tdeck\nn03168107\tdeck\nn03168217\tdeck chair, beach chair\nn03168543\tdeck-house\nn03168663\tdeckle\nn03168774\tdeckle edge, deckle\nn03168933\tdeclinometer, transit declinometer\nn03169063\tdecoder\nn03169176\tdecolletage\nn03170292\tdecoupage\nn03170459\tdedicated file server\nn03170635\tdeep-freeze, Deepfreeze, deep freezer, freezer\nn03170872\tdeerstalker\nn03171228\tdefense system, defence system\nn03171356\tdefensive structure, defense, defence\nn03171635\tdefibrillator\nn03171910\tdefilade\nn03172038\tdeflector\nn03172738\tdelayed action\nn03172965\tdelay line\nn03173270\tdelft\nn03173387\tdelicatessen, deli, food shop\nn03173929\tdelivery truck, delivery van, panel truck\nn03174079\tdelta wing\nn03174450\tdemijohn\nn03174731\tdemitasse\nn03175081\tden\nn03175189\tdenim, dungaree, jean\nn03175301\tdensimeter, densitometer\nn03175457\tdensitometer\nn03175604\tdental appliance\nn03175843\tdental floss, floss\nn03175983\tdental implant\nn03176238\tdentist's drill, burr drill\nn03176386\tdenture, dental plate, plate\nn03176594\tdeodorant, deodourant\nn03176763\tdepartment store, emporium\nn03177059\tdeparture lounge\nn03177165\tdepilatory, depilator, epilator\nn03177708\tdepressor\nn03178000\tdepth finder\nn03178173\tdepth gauge, depth gage\nn03178430\tderrick\nn03178538\tderrick\nn03178674\tderringer\nn03179701\tdesk\nn03179910\tdesk phone\nn03180011\tdesktop computer\nn03180384\tdessert spoon\nn03180504\tdestroyer, guided missile destroyer\nn03180732\tdestroyer escort\nn03180865\tdetached house, single dwelling\nn03180969\tdetector, sensor, sensing element\nn03181293\tdetector\nn03181667\tdetention home, detention house, house of detention, detention camp\nn03182140\tdetonating fuse\nn03182232\tdetonator, detonating device, cap\nn03182912\tdeveloper\nn03183080\tdevice\nn03185868\tDewar flask, Dewar\nn03186199\tdhoti\nn03186285\tdhow\nn03186818\tdial, telephone dial\nn03187037\tdial\nn03187153\tdial\nn03187268\tdialog box, panel\nn03187595\tdial telephone, dial phone\nn03187751\tdialyzer, dialysis machine\nn03188290\tdiamante\nn03188531\tdiaper, nappy, napkin\nn03188725\tdiaper\nn03188871\tdiaphone\nn03189083\tdiaphragm, stop\nn03189311\tdiaphragm\nn03189818\tdiathermy machine\nn03190458\tdibble, dibber\nn03191286\tdice cup, dice box\nn03191451\tdicer\nn03191561\tdickey, dickie, dicky, shirtfront\nn03191776\tdickey, dickie, dicky, dickey-seat, dickie-seat, dicky-seat\nn03192543\tDictaphone\nn03192907\tdie\nn03193107\tdiesel, diesel engine, diesel motor\nn03193260\tdiesel-electric locomotive, diesel-electric\nn03193423\tdiesel-hydraulic locomotive, diesel-hydraulic\nn03193597\tdiesel locomotive\nn03193754\tdiestock\nn03194170\tdifferential analyzer\nn03194297\tdifferential gear, differential\nn03194812\tdiffuser, diffusor\nn03194992\tdiffuser, diffusor\nn03195332\tdigester\nn03195485\tdiggings, digs, domiciliation, lodgings, pad\nn03195799\tdigital-analog converter, digital-to-analog converter\nn03195959\tdigital audiotape, DAT\nn03196062\tdigital camera\nn03196217\tdigital clock\nn03196324\tdigital computer\nn03196598\tdigital display, alphanumeric display\nn03196990\tdigital subscriber line, DSL\nn03197201\tdigital voltmeter\nn03197337\tdigital watch\nn03197446\tdigitizer, digitiser, analog-digital converter, analog-to-digital converter\nn03198223\tdilator, dilater\nn03198500\tdildo\nn03199358\tdimity\nn03199488\tdimmer\nn03199647\tdiner\nn03199775\tdinette\nn03199901\tdinghy, dory, rowboat\nn03200231\tdining area\nn03200357\tdining car, diner, dining compartment, buffet car\nn03200539\tdining-hall\nn03200701\tdining room, dining-room\nn03200906\tdining-room furniture\nn03201035\tdining-room table\nn03201208\tdining table, board\nn03201529\tdinner bell\nn03201638\tdinner dress, dinner gown, formal, evening gown\nn03201776\tdinner jacket, tux, tuxedo, black tie\nn03201895\tdinner napkin\nn03201996\tdinner pail, dinner bucket\nn03202354\tdinner table\nn03202481\tdinner theater, dinner theatre\nn03202760\tdiode, semiconductor diode, junction rectifier, crystal rectifier\nn03202940\tdiode, rectifying tube, rectifying valve\nn03203089\tdip\nn03203806\tdiplomatic building\nn03204134\tdipole, dipole antenna\nn03204306\tdipper\nn03204436\tdipstick\nn03204558\tDIP switch, dual inline package switch\nn03204955\tdirectional antenna\nn03205143\tdirectional microphone\nn03205304\tdirection finder\nn03205458\tdirk\nn03205574\tdirndl\nn03205669\tdirndl\nn03205903\tdirty bomb\nn03206023\tdischarge lamp\nn03206158\tdischarge pipe\nn03206282\tdisco, discotheque\nn03206405\tdiscount house, discount store, discounter, wholesale house\nn03206602\tdiscus, saucer\nn03206718\tdisguise\nn03206908\tdish\nn03207305\tdish, dish aerial, dish antenna, saucer\nn03207548\tdishpan\nn03207630\tdish rack\nn03207743\tdishrag, dishcloth\nn03207835\tdishtowel, dish towel, tea towel\nn03207941\tdishwasher, dish washer, dishwashing machine\nn03208556\tdisk, disc\nn03208938\tdisk brake, disc brake\nn03209359\tdisk clutch\nn03209477\tdisk controller\nn03209666\tdisk drive, disc drive, hard drive, Winchester drive\nn03209910\tdiskette, floppy, floppy disk\nn03210245\tdisk harrow, disc harrow\nn03210372\tdispatch case, dispatch box\nn03210552\tdispensary\nn03210683\tdispenser\nn03211117\tdisplay, video display\nn03211413\tdisplay adapter, display adaptor\nn03211616\tdisplay panel, display board, board\nn03211789\tdisplay window, shop window, shopwindow, show window\nn03212114\tdisposal, electric pig, garbage disposal\nn03212247\tdisrupting explosive, bursting explosive\nn03212406\tdistaff\nn03212811\tdistillery, still\nn03213014\tdistributor, distributer, electrical distributor\nn03213361\tdistributor cam\nn03213538\tdistributor cap\nn03213715\tdistributor housing\nn03213826\tdistributor point, breaker point, point\nn03214253\tditch\nn03214450\tditch spade, long-handled spade\nn03214582\tditty bag\nn03214966\tdivan\nn03215076\tdivan, diwan\nn03215191\tdive bomber\nn03215337\tdiverging lens, concave lens\nn03215508\tdivided highway, dual carriageway\nn03215749\tdivider\nn03215930\tdiving bell\nn03216199\tdivining rod, dowser, dowsing rod, waterfinder, water finder\nn03216402\tdiving suit, diving dress\nn03216562\tdixie\nn03216710\tDixie cup, paper cup\nn03216828\tdock, dockage, docking facility\nn03217653\tdoeskin\nn03217739\tdogcart\nn03217889\tdoggie bag, doggy bag\nn03218198\tdogsled, dog sled, dog sleigh\nn03218446\tdog wrench\nn03219010\tdoily, doyley, doyly\nn03219135\tdoll, dolly\nn03219483\tdollhouse, doll's house\nn03219612\tdolly\nn03219859\tdolman\nn03219966\tdolman, dolman jacket\nn03220095\tdolman sleeve\nn03220237\tdolmen, cromlech, portal tomb\nn03220513\tdome\nn03220692\tdome, domed stadium, covered stadium\nn03221059\tdomino, half mask, eye mask\nn03221351\tdongle\nn03221540\tdonkey jacket\nn03221720\tdoor\nn03222176\tdoor\nn03222318\tdoor\nn03222516\tdoorbell, bell, buzzer\nn03222722\tdoorframe, doorcase\nn03222857\tdoorjamb, doorpost\nn03223162\tdoorlock\nn03223299\tdoormat, welcome mat\nn03223441\tdoornail\nn03223553\tdoorplate\nn03223686\tdoorsill, doorstep, threshold\nn03223923\tdoorstop, doorstopper\nn03224490\tDoppler radar\nn03224603\tdormer, dormer window\nn03224753\tdormer window\nn03224893\tdormitory, dorm, residence hall, hall, student residence\nn03225108\tdormitory, dormitory room, dorm room\nn03225458\tdosemeter, dosimeter\nn03225616\tdossal, dossel\nn03225777\tdot matrix printer, matrix printer, dot printer\nn03225988\tdouble bed\nn03226090\tdouble-bitted ax, double-bitted axe, Western ax, Western axe\nn03226254\tdouble boiler, double saucepan\nn03226375\tdouble-breasted jacket\nn03226538\tdouble-breasted suit\nn03226880\tdouble door\nn03227010\tdouble glazing\nn03227184\tdouble-hung window\nn03227317\tdouble knit\nn03227721\tdoubler\nn03227856\tdouble reed\nn03228016\tdouble-reed instrument, double reed\nn03228254\tdoublet\nn03228365\tdoubletree\nn03228533\tdouche, douche bag\nn03228692\tdovecote, columbarium, columbary\nn03228796\tDover's powder\nn03228967\tdovetail, dovetail joint\nn03229115\tdovetail plane\nn03229244\tdowel, dowel pin, joggle\nn03229526\tdownstage\nn03231160\tdrafting instrument\nn03231368\tdrafting table, drawing table\nn03231819\tDragunov\nn03232309\tdrainage ditch\nn03232417\tdrainage system\nn03232543\tdrain basket\nn03232815\tdrainplug\nn03232923\tdrape\nn03233123\tdrapery\nn03233624\tdrawbar\nn03233744\tdrawbridge, lift bridge\nn03233905\tdrawer\nn03234164\tdrawers, underdrawers, shorts, boxers, boxershorts\nn03234952\tdrawing chalk\nn03235042\tdrawing room, withdrawing room\nn03235180\tdrawing room\nn03235327\tdrawknife, drawshave\nn03235796\tdrawstring bag\nn03235979\tdray, camion\nn03236093\tdreadnought, dreadnaught\nn03236217\tdredge\nn03236423\tdredger\nn03236580\tdredging bucket\nn03236735\tdress, frock\nn03237212\tdress blues, dress whites\nn03237340\tdresser\nn03237416\tdress hat, high hat, opera hat, silk hat, stovepipe, top hat, topper, beaver\nn03237639\tdressing, medical dressing\nn03237839\tdressing case\nn03237992\tdressing gown, robe-de-chambre, lounging robe\nn03238131\tdressing room\nn03238286\tdressing sack, dressing sacque\nn03238586\tdressing table, dresser, vanity, toilet table\nn03238762\tdress rack\nn03238879\tdress shirt, evening shirt\nn03239054\tdress suit, full dress, tailcoat, tail coat, tails, white tie, white tie and tails\nn03239259\tdress uniform\nn03239607\tdrift net\nn03239726\tdrill\nn03240140\telectric drill\nn03240683\tdrilling platform, offshore rig\nn03240892\tdrill press\nn03241093\tdrill rig, drilling rig, oilrig, oil rig\nn03241335\tdrinking fountain, water fountain, bubbler\nn03241496\tdrinking vessel\nn03241903\tdrip loop\nn03242120\tdrip mat\nn03242264\tdrip pan\nn03242390\tdripping pan, drip pan\nn03242506\tdrip pot\nn03242995\tdrive\nn03243218\tdrive\nn03243625\tdrive line, drive line system\nn03244047\tdriver, number one wood\nn03244231\tdriveshaft\nn03244388\tdriveway, drive, private road\nn03244775\tdriving iron, one iron\nn03244919\tdriving wheel\nn03245271\tdrogue, drogue chute, drogue parachute\nn03245421\tdrogue parachute\nn03245724\tdrone, drone pipe, bourdon\nn03245889\tdrone, pilotless aircraft, radio-controlled aircraft\nn03246197\tdrop arch\nn03246312\tdrop cloth\nn03246454\tdrop curtain, drop cloth, drop\nn03246653\tdrop forge, drop hammer, drop press\nn03246933\tdrop-leaf table\nn03247083\tdropper, eye dropper\nn03247351\tdroshky, drosky\nn03247495\tdrove, drove chisel\nn03248835\tdrugget\nn03249342\tdrugstore, apothecary's shop, chemist's, chemist's shop, pharmacy\nn03249569\tdrum, membranophone, tympan\nn03249956\tdrum, metal drum\nn03250089\tdrum brake\nn03250279\tdrumhead, head\nn03250405\tdrum printer\nn03250588\tdrum sander, electric sander, sander, smoother\nn03250847\tdrumstick\nn03250952\tdry battery\nn03251100\tdry-bulb thermometer\nn03251280\tdry cell\nn03251533\tdry dock, drydock, graving dock\nn03251766\tdryer, drier\nn03251932\tdry fly\nn03252231\tdry kiln\nn03252324\tdry masonry\nn03252422\tdry point\nn03252637\tdry wall, dry-stone wall\nn03252787\tdual scan display\nn03253071\tduck\nn03253187\tduckboard\nn03253279\tduckpin\nn03253714\tdudeen\nn03253796\tduffel, duffle\nn03253886\tduffel bag, duffle bag, duffel, duffle\nn03254046\tduffel coat, duffle coat\nn03254189\tdugout\nn03254374\tdugout canoe, dugout, pirogue\nn03254625\tdulciana\nn03254737\tdulcimer\nn03254862\tdulcimer\nn03255030\tdumbbell\nn03255167\tdumb bomb, gravity bomb\nn03255322\tdumbwaiter, food elevator\nn03255488\tdumdum, dumdum bullet\nn03255899\tdumpcart\nn03256032\tDumpster\nn03256166\tdump truck, dumper, tipper truck, tipper lorry, tip truck, tipper\nn03256472\tDumpy level\nn03256631\tdunce cap, dunce's cap, fool's cap\nn03256788\tdune buggy, beach buggy\nn03256928\tdungeon\nn03257065\tduplex apartment, duplex\nn03257210\tduplex house, duplex, semidetached house\nn03257586\tduplicator, copier\nn03258192\tdust bag, vacuum bag\nn03258330\tdustcloth, dustrag, duster\nn03258456\tdust cover\nn03258577\tdust cover, dust sheet\nn03258905\tdustmop, dust mop, dry mop\nn03259009\tdustpan\nn03259280\tDutch oven\nn03259401\tDutch oven\nn03259505\tdwelling, home, domicile, abode, habitation, dwelling house\nn03260206\tdye-works\nn03260504\tdynamo\nn03260733\tdynamometer, ergometer\nn03260849\tEames chair\nn03261019\tearflap, earlap\nn03261263\tearly warning radar\nn03261395\tearly warning system\nn03261603\tearmuff\nn03261776\tearphone, earpiece, headphone, phone\nn03262072\tearplug\nn03262248\tearplug\nn03262519\tearthenware\nn03262717\tearthwork\nn03262809\teasel\nn03262932\teasy chair, lounge chair, overstuffed chair\nn03263076\teaves\nn03263338\tecclesiastical attire, ecclesiastical robe\nn03263640\techinus\nn03263758\techocardiograph\nn03264906\tedger\nn03265032\tedge tool\nn03265754\tefficiency apartment\nn03266195\tegg-and-dart, egg-and-anchor, egg-and-tongue\nn03266371\teggbeater, eggwhisk\nn03266620\tegg timer\nn03266749\teiderdown, duvet, continental quilt\nn03267113\teight ball\nn03267468\tejection seat, ejector seat, capsule\nn03267696\telastic\nn03267821\telastic bandage\nn03268142\tElastoplast\nn03268311\telbow\nn03268645\telbow pad\nn03268790\telectric, electric automobile, electric car\nn03268918\telectrical cable\nn03269073\telectrical contact\nn03269203\telectrical converter\nn03269401\telectrical device\nn03270165\telectrical system\nn03270695\telectric bell\nn03270854\telectric blanket\nn03271030\telectric chair, chair, death chair, hot seat\nn03271260\telectric clock\nn03271376\telectric-discharge lamp, gas-discharge lamp\nn03271574\telectric fan, blower\nn03271765\telectric frying pan\nn03271865\telectric furnace\nn03272010\telectric guitar\nn03272125\telectric hammer\nn03272239\telectric heater, electric fire\nn03272383\telectric lamp\nn03272562\telectric locomotive\nn03272810\telectric meter, power meter\nn03272940\telectric mixer\nn03273061\telectric motor\nn03273551\telectric organ, electronic organ, Hammond organ, organ\nn03273740\telectric range\nn03273913\telectric refrigerator, fridge\nn03274265\telectric toothbrush\nn03274435\telectric typewriter\nn03274561\telectro-acoustic transducer\nn03274796\telectrode\nn03275125\telectrodynamometer\nn03275311\telectroencephalograph\nn03275566\telectrograph\nn03275681\telectrolytic, electrolytic capacitor, electrolytic condenser\nn03275864\telectrolytic cell\nn03276179\telectromagnet\nn03276696\telectrometer\nn03276839\telectromyograph\nn03277004\telectron accelerator\nn03277149\telectron gun\nn03277459\telectronic balance\nn03277602\telectronic converter\nn03277771\telectronic device\nn03278248\telectronic equipment\nn03278914\telectronic fetal monitor, electronic foetal monitor, fetal monitor, foetal monitor\nn03279153\telectronic instrument, electronic musical instrument\nn03279364\telectronic voltmeter\nn03279508\telectron microscope\nn03279804\telectron multiplier\nn03279918\telectrophorus\nn03280216\telectroscope\nn03280394\telectrostatic generator, electrostatic machine, Wimshurst machine, Van de Graaff generator\nn03280644\telectrostatic printer\nn03281145\televator, lift\nn03281524\televator\nn03281673\televator shaft\nn03282060\tembankment\nn03282295\tembassy\nn03282401\tembellishment\nn03283221\temergency room, ER\nn03283413\temesis basin\nn03283827\temitter\nn03284308\tempty\nn03284482\temulsion, photographic emulsion\nn03284743\tenamel\nn03284886\tenamel\nn03284981\tenamelware\nn03285578\tencaustic\nn03285730\tencephalogram, pneumoencephalogram\nn03285912\tenclosure\nn03286572\tendoscope\nn03287351\tenergizer, energiser\nn03287733\tengine\nn03288003\tengine\nn03288500\tengineering, engine room\nn03288643\tenginery\nn03288742\tEnglish horn, cor anglais\nn03288886\tEnglish saddle, English cavalry saddle\nn03289660\tenlarger\nn03289985\tensemble\nn03290096\tensign\nn03290195\tentablature\nn03290653\tentertainment center\nn03291413\tentrenching tool, trenching spade\nn03291551\tentrenchment, intrenchment\nn03291741\tenvelope\nn03291819\tenvelope\nn03291963\tenvelope, gasbag\nn03292085\teolith\nn03292362\tepauliere\nn03292475\tepee\nn03292603\tepergne\nn03292736\tepicyclic train, epicyclic gear train\nn03292960\tepidiascope\nn03293095\tepilating wax\nn03293741\tequalizer, equaliser\nn03293863\tequatorial\nn03294048\tequipment\nn03294604\terasable programmable read-only memory, EPROM\nn03294833\teraser\nn03295012\terecting prism\nn03295140\terection\nn03295246\tErlenmeyer flask\nn03295928\tescape hatch\nn03296081\tescapement\nn03296217\tescape wheel\nn03296328\tescarpment, escarp, scarp, protective embankment\nn03296478\tescutcheon, scutcheon\nn03296963\tesophagoscope, oesophagoscope\nn03297103\tespadrille\nn03297226\tespalier\nn03297495\tespresso maker\nn03297644\tespresso shop\nn03297735\testablishment\nn03298089\testaminet\nn03298352\testradiol patch\nn03298716\tetagere\nn03298858\tetamine, etamin\nn03299406\tetching\nn03300216\tethernet\nn03300443\tethernet cable\nn03301175\tEton jacket\nn03301291\tetui\nn03301389\teudiometer\nn03301568\teuphonium\nn03301833\tevaporative cooler\nn03301940\tevening bag\nn03302671\texercise bike, exercycle\nn03302790\texercise device\nn03302938\texhaust, exhaust system\nn03303217\texhaust fan\nn03303669\texhaust valve\nn03303831\texhibition hall, exhibition area\nn03304197\tExocet\nn03304323\texpansion bit, expansive bit\nn03304465\texpansion bolt\nn03305300\texplosive detection system, EDS\nn03305522\texplosive device\nn03305953\texplosive trace detection, ETD\nn03306385\texpress, limited\nn03306869\textension, telephone extension, extension phone\nn03307037\textension cord\nn03307573\texternal-combustion engine\nn03307792\texternal drive\nn03308152\textractor\nn03308481\teyebrow pencil\nn03308614\teyecup, eyebath, eye cup\nn03309110\teyeliner\nn03309356\teyepatch, patch\nn03309465\teyepiece, ocular\nn03309687\teyeshadow\nn03309808\tfabric, cloth, material, textile\nn03313333\tfacade, frontage, frontal\nn03314227\tface guard\nn03314378\tface mask\nn03314608\tfaceplate\nn03314780\tface powder\nn03314884\tface veil\nn03315644\tfacing, cladding\nn03315805\tfacing\nn03315990\tfacing, veneer\nn03316105\tfacsimile, facsimile machine, fax\nn03316406\tfactory, mill, manufacturing plant, manufactory\nn03316873\tfactory ship\nn03317233\tfagot, faggot\nn03317510\tfagot stitch, faggot stitch\nn03317673\tFahrenheit thermometer\nn03317788\tfaience\nn03317889\tfaille\nn03318136\tfairlead\nn03318294\tfairy light\nn03318865\tfalchion\nn03318983\tfallboard, fall-board\nn03319167\tfallout shelter\nn03319457\tfalse face\nn03319576\tfalse teeth\nn03319745\tfamily room\nn03320046\tfan\nn03320262\tfan belt\nn03320421\tfan blade\nn03320519\tfancy dress, masquerade, masquerade costume\nn03320845\tfanion\nn03320959\tfanlight\nn03321103\tfanjet, fan-jet, fanjet engine, turbojet, turbojet engine, turbofan, turbofan engine\nn03321419\tfanjet, fan-jet, turbofan, turbojet\nn03321563\tfanny pack, butt pack\nn03321843\tfan tracery\nn03321954\tfan vaulting\nn03322570\tfarm building\nn03322704\tfarmer's market, green market, greenmarket\nn03322836\tfarmhouse\nn03322940\tfarm machine\nn03323096\tfarmplace, farm-place, farmstead\nn03323211\tfarmyard\nn03323319\tfarthingale\nn03323703\tfastener, fastening, holdfast, fixing\nn03324629\tfast reactor\nn03324814\tfat farm\nn03324928\tfatigues\nn03325088\tfaucet, spigot\nn03325288\tfauld\nn03325403\tfauteuil\nn03325584\tfeather boa, boa\nn03325691\tfeatheredge\nn03325941\tfedora, felt hat, homburg, Stetson, trilby\nn03326073\tfeedback circuit, feedback loop\nn03326371\tfeedlot\nn03326475\tfell, felled seam\nn03326660\tfelloe, felly\nn03326795\tfelt\nn03326948\tfelt-tip pen, felt-tipped pen, felt tip, Magic Marker\nn03327133\tfelucca\nn03327234\tfence, fencing\nn03327553\tfencing mask, fencer's mask\nn03327691\tfencing sword\nn03327841\tfender, wing\nn03328201\tfender, buffer, cowcatcher, pilot\nn03329302\tFerris wheel\nn03329536\tferrule, collet\nn03329663\tferry, ferryboat\nn03330002\tferule\nn03330665\tfestoon\nn03330792\tfetoscope, foetoscope\nn03330947\tfetter, hobble\nn03331077\tfez, tarboosh\nn03331244\tfiber, fibre, vulcanized fiber\nn03331599\tfiber optic cable, fibre optic cable\nn03332005\tfiberscope\nn03332173\tfichu\nn03332271\tfiddlestick, violin bow\nn03332393\tfield artillery, field gun\nn03332591\tfield coil, field winding\nn03332784\tfield-effect transistor, FET\nn03332989\tfield-emission microscope\nn03333129\tfield glass, glass, spyglass\nn03333252\tfield hockey ball\nn03333349\tfield hospital\nn03333610\tfield house, sports arena\nn03333711\tfield lens\nn03333851\tfield magnet\nn03334017\tfield-sequential color television, field-sequential color TV, field-sequential color television system, field-sequential color TV system\nn03334291\tfield tent\nn03334382\tfieldwork\nn03334492\tfife\nn03334912\tfifth wheel, spare\nn03335030\tfighter, fighter aircraft, attack aircraft\nn03335333\tfighting chair\nn03335461\tfig leaf\nn03335846\tfigure eight, figure of eight\nn03336168\tfigure loom, figured-fabric loom\nn03336282\tfigure skate\nn03336575\tfilament\nn03336742\tfilature\nn03336839\tfile\nn03337140\tfile, file cabinet, filing cabinet\nn03337383\tfile folder\nn03337494\tfile server\nn03337822\tfiligree, filagree, fillagree\nn03338287\tfilling\nn03338821\tfilm, photographic film\nn03339296\tfilm, plastic film\nn03339529\tfilm advance\nn03339643\tfilter\nn03340009\tfilter\nn03340723\tfinder, viewfinder, view finder\nn03340923\tfinery\nn03341035\tfine-tooth comb, fine-toothed comb\nn03341153\tfinger\nn03341297\tfingerboard\nn03341606\tfinger bowl\nn03342015\tfinger paint, fingerpaint\nn03342127\tfinger-painting\nn03342262\tfinger plate, escutcheon, scutcheon\nn03342432\tfingerstall, cot\nn03342657\tfinish coat, finishing coat\nn03342863\tfinish coat, finishing coat\nn03342961\tfinisher\nn03343047\tfin keel\nn03343234\tfipple\nn03343354\tfipple flute, fipple pipe, recorder, vertical flute\nn03343560\tfire\nn03343737\tfire alarm, smoke alarm\nn03343853\tfirearm, piece, small-arm\nn03344305\tfire bell\nn03344393\tfireboat\nn03344509\tfirebox\nn03344642\tfirebrick\nn03344784\tfire control radar\nn03344935\tfire control system\nn03345487\tfire engine, fire truck\nn03345837\tfire extinguisher, extinguisher, asphyxiator\nn03346135\tfire iron\nn03346289\tfireman's ax, fireman's axe\nn03346455\tfireplace, hearth, open fireplace\nn03347037\tfire screen, fireguard\nn03347472\tfire tongs, coal tongs\nn03347617\tfire tower\nn03348142\tfirewall\nn03348868\tfiring chamber, gun chamber\nn03349020\tfiring pin\nn03349296\tfirkin\nn03349367\tfirmer chisel\nn03349469\tfirst-aid kit\nn03349599\tfirst-aid station\nn03349771\tfirst base\nn03349892\tfirst class\nn03350204\tfishbowl, fish bowl, goldfish bowl\nn03350352\tfisherman's bend\nn03350456\tfisherman's knot, true lover's knot, truelove knot\nn03350602\tfisherman's lure, fish lure\nn03351151\tfishhook\nn03351262\tfishing boat, fishing smack, fishing vessel\nn03351434\tfishing gear, tackle, fishing tackle, fishing rig, rig\nn03351979\tfishing rod, fishing pole\nn03352232\tfish joint\nn03352366\tfish knife\nn03352628\tfishnet, fishing net\nn03352961\tfish slice\nn03353281\tfitment\nn03353951\tfixative\nn03354207\tfixer-upper\nn03354903\tflag\nn03355468\tflageolet, treble recorder, shepherd's pipe\nn03355768\tflagon\nn03355925\tflagpole, flagstaff\nn03356038\tflagship\nn03356279\tflail\nn03356446\tflambeau\nn03356559\tflamethrower\nn03356858\tflange, rim\nn03356982\tflannel\nn03357081\tflannel, gabardine, tweed, white\nn03357267\tflannelette\nn03357716\tflap, flaps\nn03358172\tflash, photoflash, flash lamp, flashgun, flashbulb, flash bulb\nn03358380\tflash\nn03358726\tflash camera\nn03358841\tflasher\nn03359137\tflashlight, torch\nn03359285\tflashlight battery\nn03359436\tflash memory\nn03359566\tflask\nn03360133\tflat arch, straight arch\nn03360300\tflatbed\nn03360431\tflatbed press, cylinder press\nn03360622\tflat bench\nn03360731\tflatcar, flatbed, flat\nn03361109\tflat file\nn03361297\tflatlet\nn03361380\tflat panel display, FPD\nn03361550\tflats\nn03361683\tflat tip screwdriver\nn03362639\tfleece\nn03362771\tfleet ballistic missile submarine\nn03362890\tfleur-de-lis, fleur-de-lys\nn03363363\tflight simulator, trainer\nn03363549\tflintlock\nn03363749\tflintlock, firelock\nn03364008\tflip-flop, thong\nn03364156\tflipper, fin\nn03364599\tfloat, plasterer's float\nn03364937\tfloating dock, floating dry dock\nn03365231\tfloatplane, pontoon plane\nn03365374\tflood, floodlight, flood lamp, photoflood\nn03365592\tfloor, flooring\nn03365991\tfloor, level, storey, story\nn03366464\tfloor\nn03366721\tfloorboard\nn03366823\tfloor cover, floor covering\nn03366974\tfloor joist\nn03367059\tfloor lamp\nn03367321\tflophouse, dosshouse\nn03367410\tflorist, florist shop, flower store\nn03367545\tfloss\nn03367875\tflotsam, jetsam\nn03367969\tflour bin\nn03368048\tflour mill\nn03368352\tflowerbed, flower bed, bed of flowers\nn03369276\tflugelhorn, fluegelhorn\nn03369407\tfluid drive\nn03369512\tfluid flywheel\nn03369866\tflume\nn03370387\tfluorescent lamp\nn03370646\tfluoroscope, roentgenoscope\nn03371875\tflush toilet, lavatory\nn03372029\tflute, transverse flute\nn03372549\tflute, flute glass, champagne flute\nn03372822\tflux applicator\nn03372933\tfluxmeter\nn03373237\tfly\nn03373611\tflying boat\nn03373943\tflying buttress, arc-boutant\nn03374102\tflying carpet\nn03374282\tflying jib\nn03374372\tfly rod\nn03374473\tfly tent\nn03374570\tflytrap\nn03374649\tflywheel\nn03374838\tfob, watch chain, watch guard\nn03375171\tfoghorn\nn03375329\tfoglamp\nn03375575\tfoil\nn03376159\tfold, sheepfold, sheep pen, sheepcote\nn03376279\tfolder\nn03376595\tfolding chair\nn03376771\tfolding door, accordion door\nn03376938\tfolding saw\nn03378005\tfood court\nn03378174\tfood processor\nn03378342\tfood hamper\nn03378442\tfoot\nn03378593\tfootage\nn03378765\tfootball\nn03379051\tfootball helmet\nn03379204\tfootball stadium\nn03379343\tfootbath\nn03379719\tfoot brake\nn03379828\tfootbridge, overcrossing, pedestrian bridge\nn03379989\tfoothold, footing\nn03380301\tfootlocker, locker\nn03380647\tfoot rule\nn03380724\tfootstool, footrest, ottoman, tuffet\nn03380867\tfootwear, footgear\nn03381126\tfootwear\nn03381231\tforceps\nn03381450\tforce pump\nn03381565\tfore-and-after\nn03381776\tfore-and-aft sail\nn03382104\tforecastle, fo'c'sle\nn03382292\tforecourt\nn03382413\tforedeck\nn03382533\tfore edge, foredge\nn03382708\tforeground\nn03382856\tforemast\nn03382969\tfore plane\nn03383099\tforesail\nn03383211\tforestay\nn03383378\tforetop\nn03383468\tfore-topmast\nn03383562\tfore-topsail\nn03383821\tforge\nn03384167\tfork\nn03384352\tforklift\nn03384891\tformalwear, eveningwear, evening dress, evening clothes\nn03385295\tFormica\nn03385557\tfortification, munition\nn03386011\tfortress, fort\nn03386343\tforty-five\nn03386544\tFoucault pendulum\nn03386726\tfoulard\nn03386870\tfoul-weather gear\nn03387323\tfoundation garment, foundation\nn03387653\tfoundry, metalworks\nn03388043\tfountain\nn03388183\tfountain pen\nn03388323\tfour-in-hand\nn03388549\tfour-poster\nn03388711\tfour-pounder\nn03388990\tfour-stroke engine, four-stroke internal-combustion engine\nn03389611\tfour-wheel drive, 4WD\nn03389761\tfour-wheel drive, 4WD\nn03389889\tfour-wheeler\nn03389983\tfowling piece\nn03390075\tfoxhole, fox hole\nn03390327\tfragmentation bomb, antipersonnel bomb, anti-personnel bomb, daisy cutter\nn03390673\tfrail\nn03390786\tfraise\nn03390983\tframe, framing\nn03391301\tframe\nn03391613\tframe buffer\nn03391770\tframework\nn03392648\tFrancis turbine\nn03392741\tfranking machine\nn03393017\tfree house\nn03393199\tfree-reed\nn03393324\tfree-reed instrument\nn03393761\tfreewheel\nn03393912\tfreight car\nn03394149\tfreight elevator, service elevator\nn03394272\tfreight liner, liner train\nn03394480\tfreight train, rattler\nn03394649\tFrench door\nn03394916\tFrench horn, horn\nn03395256\tFrench polish, French polish shellac\nn03395401\tFrench roof\nn03395514\tFrench window\nn03395859\tFresnel lens\nn03396074\tfret\nn03396580\tfriary\nn03396654\tfriction clutch\nn03396997\tfrieze\nn03397087\tfrieze\nn03397266\tfrigate\nn03397412\tfrigate\nn03397532\tfrill, flounce, ruffle, furbelow\nn03397947\tFrisbee\nn03398153\tfrock\nn03398228\tfrock coat\nn03399579\tfrontlet, frontal\nn03399677\tfront porch\nn03399761\tfront projector\nn03399971\tfruit machine\nn03400231\tfrying pan, frypan, skillet\nn03400972\tfuel filter\nn03401129\tfuel gauge, fuel indicator\nn03401279\tfuel injection, fuel injection system\nn03401721\tfuel system\nn03402188\tfull-dress uniform\nn03402369\tfull metal jacket\nn03402511\tfull skirt\nn03402785\tfumigator\nn03402941\tfuneral home, funeral parlor, funeral parlour, funeral chapel, funeral church, funeral-residence\nn03403643\tfunnel\nn03404012\tfunny wagon\nn03404149\tfur\nn03404251\tfur coat\nn03404360\tfur hat\nn03404449\tfurnace\nn03404900\tfurnace lining, refractory\nn03405111\tfurnace room\nn03405265\tfurnishing\nn03405595\tfurnishing, trappings\nn03405725\tfurniture, piece of furniture, article of furniture\nn03406759\tfur-piece\nn03406966\tfurrow\nn03407369\tfuse, electrical fuse, safety fuse\nn03407865\tfusee drive, fusee\nn03408054\tfuselage\nn03408264\tfusil\nn03408340\tfustian\nn03408444\tfuton\nn03409297\tgabardine\nn03409393\tgable, gable end, gable wall\nn03409591\tgable roof, saddle roof, saddleback, saddleback roof\nn03409920\tgadgetry\nn03410022\tgaff\nn03410147\tgaff\nn03410303\tgaff\nn03410423\tgaffsail, gaff-headed sail\nn03410571\tgaff topsail, fore-and-aft topsail\nn03410740\tgag, muzzle\nn03410938\tgaiter\nn03411079\tgaiter\nn03411208\tGalilean telescope\nn03411339\tgalleon\nn03411927\tgallery\nn03412058\tgallery, art gallery, picture gallery\nn03412220\tgalley, ship's galley, caboose, cookhouse\nn03412387\tgalley\nn03412511\tgalley\nn03412906\tgallows\nn03413124\tgallows tree, gallows-tree, gibbet, gallous\nn03413264\tgalvanometer\nn03413428\tgambling house, gambling den, gambling hell, gaming house\nn03413684\tgambrel, gambrel roof\nn03413828\tgame\nn03414029\tgamebag\nn03414162\tgame equipment\nn03414676\tgaming table\nn03415252\tgamp, brolly\nn03415486\tgangplank, gangboard, gangway\nn03415626\tgangsaw\nn03415749\tgangway\nn03415868\tgantlet\nn03416094\tgantry, gauntry\nn03416489\tgarage\nn03416640\tgarage, service department\nn03416775\tGarand rifle, Garand, M-1, M-1 rifle\nn03416900\tgarbage\nn03417042\tgarbage truck, dustcart\nn03417202\tgarboard, garboard plank, garboard strake\nn03417345\tgarden\nn03417749\tgarden\nn03417970\tgarden rake\nn03418158\tgarden spade\nn03418242\tgarden tool, lawn tool\nn03418402\tgarden trowel\nn03418618\tgargoyle\nn03418749\tgaribaldi\nn03418915\tgarlic press\nn03419014\tgarment\nn03420345\tgarment bag\nn03420801\tgarrison cap, overseas cap\nn03420935\tgarrote, garotte, garrotte, iron collar\nn03421117\tgarter, supporter\nn03421324\tgarter belt, suspender belt\nn03421485\tgarter stitch\nn03421669\tgas guzzler\nn03421768\tgas shell\nn03421960\tgas bracket\nn03422072\tgas burner, gas jet\nn03422484\tgas-cooled reactor\nn03422589\tgas-discharge tube\nn03422771\tgas engine\nn03423099\tgas fixture\nn03423224\tgas furnace\nn03423306\tgas gun\nn03423479\tgas heater\nn03423568\tgas holder, gasometer\nn03423719\tgasket\nn03423877\tgas lamp\nn03424204\tgas maser\nn03424325\tgasmask, respirator, gas helmet\nn03424489\tgas meter, gasometer\nn03424630\tgasoline engine, petrol engine\nn03424862\tgasoline gauge, gasoline gage, gas gauge, gas gage, petrol gauge, petrol gage\nn03425241\tgas oven\nn03425325\tgas oven\nn03425413\tgas pump, gasoline pump, petrol pump, island dispenser\nn03425595\tgas range, gas stove, gas cooker\nn03425769\tgas ring\nn03426134\tgas tank, gasoline tank, petrol tank\nn03426285\tgas thermometer, air thermometer\nn03426462\tgastroscope\nn03426574\tgas turbine\nn03426871\tgas-turbine ship\nn03427202\tgat, rod\nn03427296\tgate\nn03428090\tgatehouse\nn03428226\tgateleg table\nn03428349\tgatepost\nn03429003\tgathered skirt\nn03429137\tGatling gun\nn03429288\tgauge, gage\nn03429682\tgauntlet, gantlet\nn03429771\tgauntlet, gantlet, metal glove\nn03429914\tgauze, netting, veiling\nn03430091\tgauze, gauze bandage\nn03430313\tgavel\nn03430418\tgazebo, summerhouse\nn03430551\tgear, gear wheel, geared wheel, cogwheel\nn03430959\tgear, paraphernalia, appurtenance\nn03431243\tgear, gear mechanism\nn03431570\tgearbox, gear box, gear case\nn03431745\tgearing, gear, geartrain, power train, train\nn03432061\tgearset\nn03432129\tgearshift, gearstick, shifter, gear lever\nn03432360\tGeiger counter, Geiger-Muller counter\nn03432509\tGeiger tube, Geiger-Muller tube\nn03433247\tgene chip, DNA chip\nn03433637\tgeneral-purpose bomb, GP bomb\nn03433877\tgenerator\nn03434188\tgenerator\nn03434285\tgenerator\nn03434830\tGeneva gown\nn03435593\tgeodesic dome\nn03435743\tgeorgette\nn03435991\tgharry\nn03436075\tghat\nn03436182\tghetto blaster, boom box\nn03436417\tgift shop, novelty shop\nn03436549\tgift wrapping\nn03436656\tgig\nn03436772\tgig\nn03436891\tgig\nn03436990\tgig\nn03437184\tgildhall\nn03437295\tgill net\nn03437430\tgilt, gilding\nn03437581\tgimbal\nn03437741\tgingham\nn03437829\tgirandole, girandola\nn03437941\tgirder\nn03438071\tgirdle, cincture, sash, waistband, waistcloth\nn03438257\tglass, drinking glass\nn03438661\tglass\nn03438780\tglass cutter\nn03438863\tglasses case\nn03439348\tglebe house\nn03439631\tGlengarry\nn03439814\tglider, sailplane\nn03440216\tGlobal Positioning System, GPS\nn03440682\tglockenspiel, orchestral bells\nn03440876\tglory hole, lazaretto\nn03441112\tglove\nn03441345\tglove compartment\nn03441465\tglow lamp\nn03441582\tglow tube\nn03442288\tglyptic art, glyptography\nn03442487\tglyptics, lithoglyptics\nn03442597\tgnomon\nn03442756\tgoal\nn03443005\tgoalmouth\nn03443149\tgoalpost\nn03443371\tgoblet\nn03443543\tgodown\nn03443912\tgoggles\nn03444034\tgo-kart\nn03445326\tgold plate\nn03445617\tgolf bag\nn03445777\tgolf ball\nn03445924\tgolfcart, golf cart\nn03446070\tgolf club, golf-club, club\nn03446268\tgolf-club head, club head, club-head, clubhead\nn03446832\tgolf equipment\nn03447075\tgolf glove\nn03447358\tgolliwog, golliwogg\nn03447447\tgondola\nn03447721\tgong, tam-tam\nn03447894\tgoniometer\nn03448031\tGordian knot\nn03448590\tgorget\nn03448696\tgossamer\nn03448956\tGothic arch\nn03449217\tgouache\nn03449309\tgouge\nn03449451\tgourd, calabash\nn03449564\tgovernment building\nn03449858\tgovernment office\nn03450230\tgown\nn03450516\tgown, robe\nn03450734\tgown, surgical gown, scrubs\nn03450881\tgrab\nn03450974\tgrab bag\nn03451120\tgrab bar\nn03451253\tgrace cup\nn03451365\tgrade separation\nn03451711\tgraduated cylinder\nn03451798\tgraffito, graffiti\nn03452267\tgramophone, acoustic gramophone\nn03452449\tgranary, garner\nn03452594\tgrandfather clock, longcase clock\nn03452741\tgrand piano, grand\nn03453231\tgraniteware\nn03453320\tgranny knot, granny\nn03453443\tgrape arbor, grape arbour\nn03454110\tgrapnel, grapnel anchor\nn03454211\tgrapnel, grapple, grappler, grappling hook, grappling iron\nn03454442\tgrass skirt\nn03454536\tgrate, grating\nn03454707\tgrate, grating\nn03454885\tgrater\nn03455355\tgraver, graving tool, pointel, pointrel\nn03455488\tgravestone, headstone, tombstone\nn03455642\tgravimeter, gravity meter\nn03455802\tgravure, photogravure, heliogravure\nn03456024\tgravy boat, gravy holder, sauceboat, boat\nn03456186\tgrey, gray\nn03456299\tgrease-gun, gun\nn03456447\tgreasepaint\nn03456548\tgreasy spoon\nn03456665\tgreatcoat, overcoat, topcoat\nn03457008\tgreat hall\nn03457451\tgreave, jambeau\nn03457686\tgreengrocery\nn03457902\tgreenhouse, nursery, glasshouse\nn03458271\tgrenade\nn03458422\tgrid, gridiron\nn03459328\tgriddle\nn03459591\tgrill, grille, grillwork\nn03459775\tgrille, radiator grille\nn03459914\tgrillroom, grill\nn03460040\tgrinder\nn03460147\tgrinding wheel, emery wheel\nn03460297\tgrindstone\nn03460455\tgripsack\nn03460899\tgristmill\nn03461288\tgrocery bag\nn03461385\tgrocery store, grocery, food market, market\nn03461651\tgrogram\nn03461882\tgroined vault\nn03461988\tgroover\nn03462110\tgrosgrain\nn03462315\tgros point\nn03462747\tground, earth\nn03462972\tground bait\nn03463185\tground control\nn03463381\tground floor, first floor, ground level\nn03463666\tgroundsheet, ground cloth\nn03464053\tG-string, thong\nn03464467\tguard, safety, safety device\nn03464628\tguard boat\nn03464952\tguardroom\nn03465040\tguardroom\nn03465151\tguard ship\nn03465320\tguard's van\nn03465426\tgueridon\nn03465500\tGuarnerius\nn03465605\tguesthouse\nn03465718\tguestroom\nn03465818\tguidance system, guidance device\nn03466162\tguided missile\nn03466493\tguided missile cruiser\nn03466600\tguided missile frigate\nn03466839\tguildhall\nn03466947\tguilloche\nn03467068\tguillotine\nn03467254\tguimpe\nn03467380\tguimpe\nn03467517\tguitar\nn03467796\tguitar pick\nn03467887\tgulag\nn03467984\tgun\nn03468570\tgunboat\nn03468696\tgun carriage\nn03468821\tgun case\nn03469031\tgun emplacement, weapons emplacement\nn03469175\tgun enclosure, gun turret, turret\nn03469493\tgunlock, firing mechanism\nn03469832\tgunnery\nn03469903\tgunnysack, gunny sack, burlap bag\nn03470005\tgun pendulum\nn03470222\tgun room\nn03470387\tgunsight, gun-sight\nn03470629\tgun trigger, trigger\nn03470948\tgurney\nn03471030\tgusher\nn03471190\tgusset, inset\nn03471347\tgusset, gusset plate\nn03471779\tguy, guy cable, guy wire, guy rope\nn03472232\tgymnastic apparatus, exerciser\nn03472535\tgym shoe, sneaker, tennis shoe\nn03472672\tgym suit\nn03472796\tgymslip\nn03472937\tgypsy cab\nn03473078\tgyrocompass\nn03473227\tgyroscope, gyro\nn03473465\tgyrostabilizer, gyrostabiliser\nn03473817\thabergeon\nn03473966\thabit\nn03474167\thabit, riding habit\nn03474352\thacienda\nn03474779\thacksaw, hack saw, metal saw\nn03474896\thaft, helve\nn03475581\thairbrush\nn03475674\thaircloth, hair\nn03475823\thairdressing, hair tonic, hair oil, hair grease\nn03475961\thairnet\nn03476083\thairpiece, false hair, postiche\nn03476313\thairpin\nn03476542\thair shirt\nn03476684\thair slide\nn03476991\thair spray\nn03477143\thairspring\nn03477303\thair trigger\nn03477410\thalberd\nn03477512\thalf binding\nn03477773\thalf hatchet\nn03477902\thalf hitch\nn03478589\thalf track\nn03478756\thall\nn03478907\thall\nn03479121\thall\nn03479266\tHall of Fame\nn03479397\thall of residence\nn03479502\thallstand\nn03480579\thalter\nn03480719\thalter, hackamore\nn03480973\thame\nn03481172\thammer\nn03481521\thammer, power hammer\nn03482001\thammer\nn03482128\thammerhead\nn03482252\thammock, sack\nn03482405\thamper\nn03482523\thand\nn03482877\thandball\nn03483086\thandbarrow\nn03483230\thandbell\nn03483316\thand blower, blow dryer, blow drier, hair dryer, hair drier\nn03483531\thandbow\nn03483637\thand brake, emergency, emergency brake, parking brake\nn03483823\thand calculator, pocket calculator\nn03483971\thandcar\nn03484083\thandcart, pushcart, cart, go-cart\nn03484487\thand cream\nn03484576\thandcuff, cuff, handlock, manacle\nn03484809\thand drill, handheld drill\nn03484931\thand glass, simple microscope, magnifying glass\nn03485198\thand glass, hand mirror\nn03485309\thand grenade\nn03485407\thand-held computer, hand-held microcomputer\nn03485575\thandhold\nn03485794\thandkerchief, hankie, hanky, hankey\nn03487090\thandlebar\nn03487331\thandloom\nn03487444\thand lotion\nn03487533\thand luggage\nn03487642\thand-me-down\nn03487774\thand mower\nn03487886\thand pump\nn03488111\thandrest\nn03488188\thandsaw, hand saw, carpenter's saw\nn03488438\thandset, French telephone\nn03488603\thand shovel\nn03488784\thandspike\nn03488887\thandstamp, rubber stamp\nn03489048\thand throttle\nn03489162\thand tool\nn03490006\thand towel, face towel\nn03490119\thand truck, truck\nn03490324\thandwear, hand wear\nn03490449\thandwheel\nn03490649\thandwheel\nn03490784\thangar queen\nn03490884\thanger\nn03491032\thang glider\nn03491724\thangman's rope, hangman's halter, halter, hemp, hempen necktie\nn03491988\thank\nn03492087\thansom, hansom cab\nn03492250\tharbor, harbour\nn03492542\thard disc, hard disk, fixed disk\nn03492922\thard hat, tin hat, safety hat\nn03493219\thardtop\nn03493792\thardware, ironware\nn03493911\thardware store, ironmonger, ironmonger's shop\nn03494278\tharmonica, mouth organ, harp, mouth harp\nn03494537\tharmonium, organ, reed organ\nn03494706\tharness\nn03495039\tharness\nn03495258\tharp\nn03495570\tharp\nn03495671\tharpoon\nn03495941\tharpoon gun\nn03496183\tharpoon log\nn03496296\tharpsichord, cembalo\nn03496486\tHarris Tweed\nn03496612\tharrow\nn03496892\tharvester, reaper\nn03497100\thash house\nn03497352\thasp\nn03497657\that, chapeau, lid\nn03498441\thatbox\nn03498536\thatch\nn03498662\thatchback, hatchback door\nn03498781\thatchback\nn03498866\thatchel, heckle\nn03498962\thatchet\nn03499354\thatpin\nn03499468\thauberk, byrnie\nn03499907\tHawaiian guitar, steel guitar\nn03500090\thawse, hawsehole, hawsepipe\nn03500209\thawser\nn03500295\thawser bend\nn03500389\thay bale\nn03500457\thayfork\nn03500557\thayloft, haymow, mow\nn03500699\thaymaker, hay conditioner\nn03500838\thayrack, hayrig\nn03500971\thayrack\nn03501152\thazard\nn03501288\thead\nn03501520\thead\nn03501614\thead\nn03502200\theadboard\nn03502331\thead covering, veil\nn03502509\theaddress, headgear\nn03502777\theader\nn03502897\theader\nn03503097\theader, coping, cope\nn03503233\theader, lintel\nn03503358\theadfast\nn03503477\thead gasket\nn03503567\thead gate\nn03503718\theadgear\nn03503997\theadlight, headlamp\nn03504205\theadpiece\nn03504293\theadpin, kingpin\nn03504723\theadquarters, central office, main office, home office, home base\nn03505015\theadrace\nn03505133\theadrest\nn03505383\theadsail\nn03505504\theadscarf\nn03505667\theadset\nn03505764\thead shop\nn03506028\theadstall, headpiece\nn03506184\theadstock\nn03506370\thealth spa, spa, health club\nn03506560\thearing aid, ear trumpet\nn03506727\thearing aid, deaf-aid\nn03506880\thearse\nn03507241\thearth, fireside\nn03507458\thearthrug\nn03507658\theart-lung machine\nn03507963\theat engine\nn03508101\theater, warmer\nn03508485\theat exchanger\nn03508881\theating pad, hot pad\nn03509394\theat lamp, infrared lamp\nn03509608\theat pump\nn03509843\theat-seeking missile\nn03510072\theat shield\nn03510244\theat sink\nn03510384\theaume\nn03510487\theaver\nn03510583\theavier-than-air craft\nn03510866\theckelphone, basset oboe\nn03510987\thectograph, heliotype\nn03511175\thedge, hedgerow\nn03511333\thedge trimmer\nn03512030\thelicon, bombardon\nn03512147\thelicopter, chopper, whirlybird, eggbeater\nn03512452\theliograph\nn03512624\theliometer\nn03512911\thelm\nn03513137\thelmet\nn03513376\thelmet\nn03514129\thematocrit, haematocrit\nn03514340\themming-stitch\nn03514451\themostat, haemostat\nn03514693\themstitch, hemstitching\nn03514894\thenroost\nn03515338\theraldry\nn03515934\thermitage\nn03516266\therringbone\nn03516367\therringbone, herringbone pattern\nn03516647\tHerschelian telescope, off-axis reflector\nn03516844\tHessian boot, hessian, jackboot, Wellington, Wellington boot\nn03516996\theterodyne receiver, superheterodyne receiver, superhet\nn03517509\thibachi\nn03517647\thideaway, retreat\nn03517760\thi-fi, high fidelity sound system\nn03517899\thigh altar\nn03517982\thigh-angle gun\nn03518135\thighball glass\nn03518230\thighboard\nn03518305\thighboy, tallboy\nn03518445\thighchair, feeding chair\nn03518631\thigh gear, high\nn03518829\thigh-hat cymbal, high hat\nn03518943\thighlighter\nn03519081\thighlighter\nn03519226\thigh-pass filter\nn03519387\thigh-rise, tower block\nn03519674\thigh table\nn03519848\thigh-warp loom\nn03520493\thijab\nn03521076\thinge, flexible joint\nn03521431\thinging post, swinging post\nn03521544\thip boot, thigh boot\nn03521675\thipflask, pocket flask\nn03521771\thip pad\nn03521899\thip pocket\nn03522003\thippodrome\nn03522100\thip roof, hipped roof\nn03522634\thitch\nn03522863\thitch\nn03522990\thitching post\nn03523134\thitchrack, hitching bar\nn03523398\thob\nn03523506\thobble skirt\nn03523987\thockey skate\nn03524150\thockey stick\nn03524287\thod\nn03524425\thodoscope\nn03524574\thoe\nn03524745\thoe handle\nn03524976\thogshead\nn03525074\thoist\nn03525252\thold, keep\nn03525454\tholder\nn03525693\tholding cell\nn03525827\tholding device\nn03526062\tholding pen, holding paddock, holding yard\nn03527149\thollowware, holloware\nn03527444\tholster\nn03527565\tholster\nn03527675\tholy of holies, sanctum sanctorum\nn03528100\thome, nursing home, rest home\nn03528263\thome appliance, household appliance\nn03528523\thome computer\nn03528901\thome plate, home base, home, plate\nn03529175\thome room, homeroom\nn03529444\thomespun\nn03529629\thomestead\nn03529860\thome theater, home theatre\nn03530189\thoming torpedo\nn03530511\thone\nn03530642\thoneycomb\nn03530910\thood, bonnet, cowl, cowling\nn03531281\thood\nn03531447\thood\nn03531546\thood, exhaust hood\nn03531691\thood\nn03531982\thood latch\nn03532342\thook\nn03532672\thook, claw\nn03532919\thook\nn03533014\thookah, narghile, nargileh, sheesha, shisha, chicha, calean, kalian, water pipe, hubble-bubble, hubbly-bubbly\nn03533392\thook and eye\nn03533486\thookup, assemblage\nn03533654\thookup\nn03533845\thook wrench, hook spanner\nn03534580\thoopskirt, crinoline\nn03534695\thoosegow, hoosgow\nn03534776\tHoover\nn03535024\thope chest, wedding chest\nn03535284\thopper\nn03535647\thopsacking, hopsack\nn03535780\thorizontal bar, high bar\nn03536122\thorizontal stabilizer, horizontal stabiliser, tailplane\nn03536568\thorizontal tail\nn03536761\thorn\nn03537085\thorn\nn03537241\thorn\nn03537412\thorn button\nn03537550\thornpipe, pibgorn, stockhorn\nn03538037\thorse, gymnastic horse\nn03538179\thorsebox\nn03538300\thorsecar\nn03538406\thorse cart, horse-cart\nn03538542\thorsecloth\nn03538634\thorse-drawn vehicle\nn03538817\thorsehair\nn03538957\thorsehair wig\nn03539103\thorseless carriage\nn03539293\thorse pistol, horse-pistol\nn03539433\thorseshoe, shoe\nn03539546\thorseshoe\nn03539678\thorse-trail\nn03539754\thorsewhip\nn03540090\those\nn03540267\thosiery, hose\nn03540476\thospice\nn03540595\thospital, infirmary\nn03540914\thospital bed\nn03541091\thospital room\nn03541269\thospital ship\nn03541393\thospital train\nn03541537\thostel, youth hostel, student lodging\nn03541696\thostel, hostelry, inn, lodge, auberge\nn03541923\thot-air balloon\nn03542333\thotel\nn03542605\thotel-casino, casino-hotel\nn03542727\thotel-casino, casino-hotel\nn03542860\thotel room\nn03543012\thot line\nn03543112\thot pants\nn03543254\thot plate, hotplate\nn03543394\thot rod, hot-rod\nn03543511\thot spot, hotspot\nn03543603\thot tub\nn03543735\thot-water bottle, hot-water bag\nn03543945\thoundstooth check, hound's-tooth check, dogstooth check, dogs-tooth check, dog's-tooth check\nn03544143\thourglass\nn03544238\thour hand, little hand\nn03544360\thouse\nn03545150\thouse\nn03545470\thouseboat\nn03545585\thouselights\nn03545756\thouse of cards, cardhouse, card-house, cardcastle\nn03545961\thouse of correction\nn03546112\thouse paint, housepaint\nn03546235\thousetop\nn03546340\thousing, lodging, living accommodations\nn03547054\thovel, hut, hutch, shack, shanty\nn03547229\thovercraft, ground-effect machine\nn03547397\thowdah, houdah\nn03547530\thuarache, huaraches\nn03547861\thub-and-spoke, hub-and-spoke system\nn03548086\thubcap\nn03548195\thuck, huckaback\nn03548320\thug-me-tight\nn03548402\thula-hoop\nn03548533\thulk\nn03548626\thull\nn03548930\thumeral veil, veil\nn03549199\tHumvee, Hum-Vee\nn03549350\thunter, hunting watch\nn03549473\thunting knife\nn03549589\thurdle\nn03549732\thurricane deck, hurricane roof, promenade deck, awning deck\nn03549897\thurricane lamp, hurricane lantern, tornado lantern, storm lantern, storm lamp\nn03550153\thut, army hut, field hut\nn03550289\thutch\nn03550420\thutment\nn03551084\thydraulic brake, hydraulic brakes\nn03551395\thydraulic press\nn03551582\thydraulic pump, hydraulic ram\nn03551790\thydraulic system\nn03552001\thydraulic transmission, hydraulic transmission system\nn03552449\thydroelectric turbine\nn03552749\thydrofoil, hydroplane\nn03553019\thydrofoil, foil\nn03553248\thydrogen bomb, H-bomb, fusion bomb, thermonuclear bomb\nn03553486\thydrometer, gravimeter\nn03554375\thygrodeik\nn03554460\thygrometer\nn03554645\thygroscope\nn03555006\thyperbaric chamber\nn03555217\thypercoaster\nn03555426\thypermarket\nn03555564\thypodermic needle\nn03555662\thypodermic syringe, hypodermic, hypo\nn03555862\thypsometer\nn03555996\thysterosalpingogram\nn03556173\tI-beam\nn03556679\tice ax, ice axe, piolet\nn03556811\ticeboat, ice yacht, scooter\nn03556992\ticebreaker, iceboat\nn03557270\ticed-tea spoon\nn03557360\tice hockey rink, ice-hockey rink\nn03557590\tice machine\nn03557692\tice maker\nn03557840\tice pack, ice bag\nn03558007\ticepick, ice pick\nn03558176\tice rink, ice-skating rink, ice\nn03558404\tice skate\nn03558633\tice tongs\nn03558739\ticetray\nn03559373\ticonoscope\nn03559531\tIdentikit, Identikit picture\nn03559999\tidle pulley, idler pulley, idle wheel\nn03560430\tigloo, iglu\nn03560860\tignition coil\nn03561047\tignition key\nn03561169\tignition switch\nn03561573\timaret\nn03562565\timmovable bandage\nn03563200\timpact printer\nn03563460\timpeller\nn03563710\timplant\nn03563967\timplement\nn03564849\timpression\nn03565288\timprint\nn03565565\timprovised explosive device, I.E.D., IED\nn03565710\timpulse turbine\nn03565830\tin-basket, in-tray\nn03565991\tincendiary bomb, incendiary, firebomb\nn03566193\tincinerator\nn03566329\tinclined plane\nn03566555\tinclinometer, dip circle\nn03566730\tinclinometer\nn03566860\tincrustation, encrustation\nn03567066\tincubator, brooder\nn03567635\tindex register\nn03567788\tIndiaman\nn03567912\tIndian club\nn03568117\tindicator\nn03568818\tinduction coil\nn03569014\tinductor, inductance\nn03569174\tindustrial watercourse\nn03569293\tinertial guidance system, inertial navigation system\nn03569494\tinflater, inflator\nn03571280\tinhaler, inhalator\nn03571439\tinjector\nn03571625\tink bottle, inkpot\nn03571853\tink eraser\nn03571942\tink-jet printer\nn03572107\tinkle\nn03572205\tinkstand\nn03572321\tinkwell, inkstand\nn03572631\tinlay\nn03573574\tinside caliper\nn03573848\tinsole, innersole\nn03574243\tinstep\nn03574416\tinstillator\nn03574555\tinstitution\nn03574816\tinstrument\nn03575958\tinstrument of punishment\nn03576215\tinstrument of torture\nn03576443\tintaglio, diaglyph\nn03576955\tintake valve\nn03577090\tintegrated circuit, microcircuit\nn03577312\tintegrator, planimeter\nn03577474\tIntelnet\nn03577672\tinterceptor\nn03577818\tinterchange\nn03578055\tintercommunication system, intercom\nn03578251\tintercontinental ballistic missile, ICBM\nn03578656\tinterface, port\nn03578981\tinterferometer\nn03579538\tinterior door\nn03579982\tinternal-combustion engine, ICE\nn03580518\tinternal drive\nn03580615\tinternet, net, cyberspace\nn03580845\tinterphone\nn03580990\tinterrupter\nn03581125\tintersection, crossroad, crossway, crossing, carrefour\nn03581531\tinterstice\nn03581897\tintraocular lens\nn03582508\tintravenous pyelogram, IVP\nn03582959\tinverter\nn03583419\tion engine\nn03583621\tionization chamber, ionization tube\nn03584254\tiPod\nn03584400\tvideo iPod\nn03584829\tiron, smoothing iron\nn03585073\tiron\nn03585337\tiron, branding iron\nn03585438\tirons, chains\nn03585551\tironclad\nn03585682\tiron foundry\nn03585778\tiron horse\nn03585875\tironing\nn03586219\tiron lung\nn03586631\tironmongery\nn03586911\tironworks\nn03587205\tirrigation ditch\nn03588216\tizar\nn03588841\tjabot\nn03588951\tjack\nn03589313\tjack, jackstones\nn03589513\tjack\nn03589672\tjack\nn03589791\tjacket\nn03590306\tjacket\nn03590475\tjacket\nn03590588\tjack-in-the-box\nn03590841\tjack-o'-lantern\nn03590932\tjack plane\nn03591116\tJacob's ladder, jack ladder, pilot ladder\nn03591313\tjaconet\nn03591592\tJacquard loom, Jacquard\nn03591798\tjacquard\nn03591901\tjag, dag\nn03592245\tjail, jailhouse, gaol, clink, slammer, poky, pokey\nn03592669\tjalousie\nn03592773\tjamb\nn03592931\tjammer\nn03593122\tjampot, jamjar\nn03593222\tjapan\nn03593526\tjar\nn03593862\tJarvik heart, Jarvik artificial heart\nn03594010\tjaunting car, jaunty car\nn03594148\tjavelin\nn03594277\tjaw\nn03594523\tJaws of Life\nn03594734\tjean, blue jean, denim\nn03594945\tjeep, landrover\nn03595055\tjellaba\nn03595264\tjerkin\nn03595409\tjeroboam, double-magnum\nn03595523\tjersey\nn03595614\tjersey, T-shirt, tee shirt\nn03595860\tjet, jet plane, jet-propelled plane\nn03596099\tjet bridge\nn03596285\tjet engine\nn03596543\tjetliner\nn03597147\tjeweler's glass\nn03597317\tjewelled headdress, jeweled headdress\nn03597916\tjew's harp, jews' harp, mouth bow\nn03598151\tjib\nn03598299\tjibboom\nn03598385\tjig\nn03598515\tjig\nn03598646\tjiggermast, jigger\nn03598783\tjigsaw, scroll saw, fretsaw\nn03598930\tjigsaw puzzle\nn03599486\tjinrikisha, ricksha, rickshaw\nn03599964\tjobcentre\nn03600285\tjodhpurs, jodhpur breeches, riding breeches\nn03600475\tjodhpur, jodhpur boot, jodhpur shoe\nn03600722\tjoinery\nn03600977\tjoint\nn03601442\tJoint Direct Attack Munition, JDAM\nn03601638\tjointer, jointer plane, jointing plane, long plane\nn03601840\tjoist\nn03602081\tjolly boat, jolly\nn03602194\tjorum\nn03602365\tjoss house\nn03602686\tjournal bearing\nn03602790\tjournal box\nn03602883\tjoystick\nn03603442\tjungle gym\nn03603594\tjunk\nn03603722\tjug\nn03604156\tjukebox, nickelodeon\nn03604311\tjumbojet, jumbo jet\nn03604400\tjumper, pinafore, pinny\nn03604536\tjumper\nn03604629\tjumper\nn03604763\tjumper\nn03604843\tjumper cable, jumper lead, lead, booster cable\nn03605417\tjump seat\nn03605504\tjump suit\nn03605598\tjump suit, jumpsuit\nn03605722\tjunction\nn03605915\tjunction, conjunction\nn03606106\tjunction barrier, barrier strip\nn03606251\tjunk shop\nn03606347\tjury box\nn03606465\tjury mast\nn03607029\tkachina\nn03607186\tkaffiyeh\nn03607527\tkalansuwa\nn03607659\tKalashnikov\nn03607923\tkameez\nn03608504\tkanzu\nn03609147\tkatharometer\nn03609235\tkayak\nn03609397\tkazoo\nn03609542\tkeel\nn03609786\tkeelboat\nn03609959\tkeelson\nn03610098\tkeep, donjon, dungeon\nn03610418\tkeg\nn03610524\tkennel, doghouse, dog house\nn03610682\tkepi, peaked cap, service cap, yachting cap\nn03610836\tkeratoscope\nn03610992\tkerchief\nn03612010\tketch\nn03612814\tkettle, boiler\nn03612965\tkettle, kettledrum, tympanum, tympani, timpani\nn03613294\tkey\nn03613592\tkey\nn03614007\tkeyboard\nn03614383\tkeyboard buffer\nn03614532\tkeyboard instrument\nn03614782\tkeyhole\nn03614887\tkeyhole saw\nn03615300\tkhadi, khaddar\nn03615406\tkhaki\nn03615563\tkhakis\nn03615655\tkhimar\nn03615790\tkhukuri\nn03616091\tkick pleat\nn03616225\tkicksorter, pulse height analyzer\nn03616428\tkickstand\nn03616763\tkick starter, kick start\nn03616979\tkid glove, suede glove\nn03617095\tkiln\nn03617312\tkilt\nn03617480\tkimono\nn03617594\tkinescope, picture tube, television tube\nn03617834\tKinetoscope\nn03618101\tking\nn03618339\tking\nn03618546\tkingbolt, kingpin, swivel pin\nn03618678\tking post\nn03618797\tKipp's apparatus\nn03618982\tkirk\nn03619050\tkirpan\nn03619196\tkirtle\nn03619275\tkirtle\nn03619396\tkit, outfit\nn03619650\tkit\nn03619793\tkitbag, kit bag\nn03619890\tkitchen\nn03620052\tkitchen appliance\nn03620353\tkitchenette\nn03620967\tkitchen table\nn03621049\tkitchen utensil\nn03621377\tkitchenware\nn03621694\tkite balloon\nn03622058\tklaxon, claxon\nn03622401\tklieg light\nn03622526\tklystron\nn03622839\tknee brace\nn03622931\tknee-high, knee-hi\nn03623198\tknee pad\nn03623338\tknee piece\nn03623556\tknife\nn03624134\tknife\nn03624400\tknife blade\nn03624767\tknight, horse\nn03625355\tknit\nn03625539\tknitting machine\nn03625646\tknitting needle\nn03625943\tknitwear\nn03626115\tknob, boss\nn03626272\tknob, pommel\nn03626418\tknobble\nn03626502\tknobkerrie, knobkerry\nn03626760\tknocker, doorknocker, rapper\nn03627232\tknot\nn03627954\tknuckle joint, hinge joint\nn03628071\tkohl\nn03628215\tkoto\nn03628421\tkraal\nn03628511\tkremlin\nn03628728\tkris, creese, crease\nn03628831\tkrummhorn, crumhorn, cromorne\nn03628984\tKundt's tube\nn03629100\tKurdistan\nn03629231\tkurta\nn03629520\tkylix, cylix\nn03629643\tkymograph, cymograph\nn03630262\tlab bench, laboratory bench\nn03630383\tlab coat, laboratory coat\nn03631177\tlace\nn03631811\tlacquer\nn03631922\tlacquerware\nn03632100\tlacrosse ball\nn03632577\tladder-back\nn03632729\tladder-back, ladder-back chair\nn03632852\tladder truck, aerial ladder truck\nn03632963\tladies' room, powder room\nn03633091\tladle\nn03633341\tlady chapel\nn03633632\tlagerphone\nn03633886\tlag screw, lag bolt\nn03634034\tlake dwelling, pile dwelling\nn03634899\tlally, lally column\nn03635032\tlamasery\nn03635108\tlambrequin\nn03635330\tlame\nn03635516\tlaminar flow clean room\nn03635668\tlaminate\nn03635932\tlamination\nn03636248\tlamp\nn03636649\tlamp\nn03637027\tlamp house, lamphouse, lamp housing\nn03637181\tlamppost\nn03637318\tlampshade, lamp shade\nn03637480\tlanai\nn03637787\tlancet arch, lancet\nn03637898\tlancet window\nn03638014\tlandau\nn03638180\tlander\nn03638623\tlanding craft\nn03638743\tlanding flap\nn03638883\tlanding gear\nn03639077\tlanding net\nn03639230\tlanding skid\nn03639497\tland line, landline\nn03639675\tland mine, ground-emplaced mine, booby trap\nn03639880\tland office\nn03640850\tlanolin\nn03640988\tlantern\nn03641569\tlanyard, laniard\nn03641947\tlap, lap covering\nn03642144\tlaparoscope\nn03642341\tlapboard\nn03642444\tlapel\nn03642573\tlap joint, splice\nn03642806\tlaptop, laptop computer\nn03643149\tlaryngoscope\nn03643253\tlaser, optical maser\nn03643491\tlaser-guided bomb, LGB\nn03643737\tlaser printer\nn03643907\tlash, thong\nn03644073\tlashing\nn03644378\tlasso, lariat, riata, reata\nn03644858\tlatch\nn03645011\tlatch, door latch\nn03645168\tlatchet\nn03645290\tlatchkey\nn03645577\tlateen, lateen sail\nn03646020\tlatex paint, latex, rubber-base paint\nn03646148\tlath\nn03646296\tlathe\nn03646809\tlatrine\nn03646916\tlattice, latticework, fretwork\nn03647423\tlaunch\nn03647520\tlauncher, rocket launcher\nn03648219\tlaundry, wash, washing, washables\nn03648431\tlaundry cart\nn03648667\tlaundry truck\nn03649003\tlavalava\nn03649161\tlavaliere, lavalier, lavalliere\nn03649288\tlaver\nn03649674\tlawn chair, garden chair\nn03649797\tlawn furniture\nn03649909\tlawn mower, mower\nn03650551\tlayette\nn03651388\tlead-acid battery, lead-acid accumulator\nn03651605\tlead-in\nn03651843\tleading rein\nn03652100\tlead pencil\nn03652389\tleaf spring\nn03652729\tlean-to\nn03652826\tlean-to tent\nn03652932\tleash, tether, lead\nn03653110\tleatherette, imitation leather\nn03653220\tleather strip\nn03653454\tLeclanche cell\nn03653583\tlectern, reading desk\nn03653740\tlecture room\nn03653833\tlederhosen\nn03653975\tledger board\nn03654576\tleg\nn03654826\tleg\nn03655072\tlegging, leging, leg covering\nn03655470\tLeiden jar, Leyden jar\nn03655720\tleisure wear\nn03656484\tlens, lense, lens system\nn03656957\tlens, electron lens\nn03657121\tlens cap, lens cover\nn03657239\tlens implant, interocular lens implant, IOL\nn03657511\tleotard, unitard, body suit, cat suit\nn03658102\tletter case\nn03658185\tletter opener, paper knife, paperknife\nn03658635\tlevee\nn03658858\tlevel, spirit level\nn03659292\tlever\nn03659686\tlever, lever tumbler\nn03659809\tlever\nn03659950\tlever lock\nn03660124\tLevi's, levis\nn03660562\tLiberty ship\nn03660909\tlibrary\nn03661043\tlibrary\nn03661340\tlid\nn03662301\tLiebig condenser\nn03662452\tlie detector\nn03662601\tlifeboat\nn03662719\tlife buoy, lifesaver, life belt, life ring\nn03662887\tlife jacket, life vest, cork jacket\nn03663433\tlife office\nn03663531\tlife preserver, preserver, flotation device\nn03663910\tlife-support system, life support\nn03664159\tlife-support system, life support\nn03664675\tlifting device\nn03664840\tlift pump\nn03664943\tligament\nn03665232\tligature\nn03665366\tlight, light source\nn03665851\tlight arm\nn03665924\tlight bulb, lightbulb, bulb, incandescent lamp, electric light, electric-light bulb\nn03666238\tlight circuit, lighting circuit\nn03666362\tlight-emitting diode, LED\nn03666591\tlighter, light, igniter, ignitor\nn03666917\tlighter-than-air craft\nn03667060\tlight filter, diffusing screen\nn03667235\tlighting\nn03667552\tlight machine gun\nn03667664\tlight meter, exposure meter, photometer\nn03667829\tlight microscope\nn03668067\tlightning rod, lightning conductor\nn03668279\tlight pen, electronic stylus\nn03668488\tlightship\nn03668803\tLilo\nn03669245\tlimber\nn03669534\tlimekiln\nn03669886\tlimiter, clipper\nn03670208\tlimousine, limo\nn03671914\tlinear accelerator, linac\nn03672521\tlinen\nn03672827\tline printer, line-at-a-time printer\nn03673027\tliner, ocean liner\nn03673270\tliner, lining\nn03673450\tlingerie, intimate apparel\nn03673767\tlining, liner\nn03674270\tlink, data link\nn03674440\tlinkage\nn03674731\tLink trainer\nn03674842\tlinocut\nn03675076\tlinoleum knife, linoleum cutter\nn03675235\tLinotype, Linotype machine\nn03675445\tlinsey-woolsey\nn03675558\tlinstock\nn03675907\tlion-jaw forceps\nn03676087\tlip-gloss\nn03676483\tlipstick, lip rouge\nn03676623\tliqueur glass\nn03676759\tliquid crystal display, LCD\nn03677115\tliquid metal reactor\nn03677682\tlisle\nn03677766\tlister, lister plow, lister plough, middlebreaker, middle buster\nn03678558\tlitterbin, litter basket, litter-basket\nn03678729\tlittle theater, little theatre\nn03678879\tlive axle, driving axle\nn03679384\tliving quarters, quarters\nn03679712\tliving room, living-room, sitting room, front room, parlor, parlour\nn03680248\tload\nn03680355\tLoafer\nn03680512\tloaner\nn03680734\tlobe\nn03680858\tlobster pot\nn03680942\tlocal\nn03681477\tlocal area network, LAN\nn03681813\tlocal oscillator, heterodyne oscillator\nn03682380\tLochaber ax\nn03682487\tlock\nn03682877\tlock, ignition lock\nn03683079\tlock, lock chamber\nn03683341\tlock\nn03683457\tlockage\nn03683606\tlocker\nn03683708\tlocker room\nn03683995\tlocket\nn03684143\tlock-gate\nn03684224\tlocking pliers\nn03684489\tlockring, lock ring, lock washer\nn03684611\tlockstitch\nn03684740\tlockup\nn03684823\tlocomotive, engine, locomotive engine, railway locomotive\nn03685307\tlodge, indian lodge\nn03685486\tlodge, hunting lodge\nn03685640\tlodge\nn03685820\tlodging house, rooming house\nn03686130\tloft, attic, garret\nn03686363\tloft, pigeon loft\nn03686470\tloft\nn03686924\tlog cabin\nn03687137\tloggia\nn03687928\tlongbow\nn03688066\tlong iron\nn03688192\tlong johns\nn03688405\tlong sleeve\nn03688504\tlong tom\nn03688605\tlong trousers, long pants\nn03688707\tlong underwear, union suit\nn03688832\tlooking glass, glass\nn03688943\tlookout, observation tower, lookout station, observatory\nn03689157\tloom\nn03689570\tloop knot\nn03690168\tlorgnette\nn03690279\tLorraine cross, cross of Lorraine\nn03690473\tlorry, camion\nn03690851\tlota\nn03690938\tlotion\nn03691459\tloudspeaker, speaker, speaker unit, loudspeaker system, speaker system\nn03691817\tlounge, waiting room, waiting area\nn03692004\tlounger\nn03692136\tlounging jacket, smoking jacket\nn03692272\tlounging pajama, lounging pyjama\nn03692379\tloungewear\nn03692522\tloupe, jeweler's loupe\nn03692842\tlouvered window, jalousie\nn03693293\tlove knot, lovers' knot, lover's knot, true lovers' knot, true lover's knot\nn03693474\tlove seat, loveseat, tete-a-tete, vis-a-vis\nn03693707\tloving cup\nn03693860\tlowboy\nn03694196\tlow-pass filter\nn03694356\tlow-warp-loom\nn03694639\tLP, L-P\nn03694761\tL-plate\nn03694949\tlubber's hole\nn03695122\tlubricating system, force-feed lubricating system, force feed, pressure-feed lubricating system, pressure feed\nn03695452\tluff\nn03695616\tlug\nn03695753\tluge\nn03695857\tLuger\nn03695957\tluggage carrier\nn03696065\tluggage compartment, automobile trunk, trunk\nn03696301\tluggage rack, roof rack\nn03696445\tlugger\nn03696568\tlugsail, lug\nn03696746\tlug wrench\nn03696909\tlumberjack, lumber jacket\nn03697007\tlumbermill, sawmill\nn03697366\tlunar excursion module, lunar module, LEM\nn03697552\tlunchroom\nn03697812\tlunette\nn03697913\tlungi, lungyi, longyi\nn03698123\tlunula\nn03698226\tlusterware\nn03698360\tlute\nn03698604\tluxury liner, express luxury liner\nn03698723\tlyceum\nn03698815\tlychgate, lichgate\nn03699280\tlyre\nn03699591\tmachete, matchet, panga\nn03699754\tmachicolation\nn03699975\tmachine\nn03700963\tmachine, simple machine\nn03701191\tmachine bolt\nn03701391\tmachine gun\nn03701640\tmachinery\nn03701790\tmachine screw\nn03702248\tmachine tool\nn03702440\tmachinist's vise, metalworking vise\nn03702582\tmachmeter\nn03703075\tmackinaw\nn03703203\tmackinaw, Mackinaw boat\nn03703463\tmackinaw, Mackinaw coat\nn03703590\tmackintosh, macintosh\nn03703730\tmacrame\nn03703862\tmadras\nn03703945\tMae West, air jacket\nn03704549\tmagazine rack\nn03704834\tmagic lantern\nn03705379\tmagnet\nn03705808\tmagnetic bottle\nn03706229\tmagnetic compass\nn03706415\tmagnetic core memory, core memory\nn03706653\tmagnetic disk, magnetic disc, disk, disc\nn03706939\tmagnetic head\nn03707171\tmagnetic mine\nn03707372\tmagnetic needle\nn03707597\tmagnetic recorder\nn03707766\tmagnetic stripe\nn03708036\tmagnetic tape, mag tape, tape\nn03708425\tmagneto, magnetoelectric machine\nn03708843\tmagnetometer, gaussmeter\nn03708962\tmagnetron\nn03709206\tmagnifier\nn03709363\tmagnum\nn03709545\tmagnus hitch\nn03709644\tmail\nn03709823\tmailbag, postbag\nn03709960\tmailbag, mail pouch\nn03710079\tmailboat, mail boat, packet, packet boat\nn03710193\tmailbox, letter box\nn03710294\tmail car\nn03710421\tmaildrop\nn03710528\tmailer\nn03710637\tmaillot\nn03710721\tmaillot, tank suit\nn03710937\tmailsorter\nn03711044\tmail train\nn03711711\tmainframe, mainframe computer\nn03711999\tmainmast\nn03712111\tmain rotor\nn03712337\tmainsail\nn03712444\tmainspring\nn03712887\tmain-topmast\nn03712981\tmain-topsail\nn03713069\tmain yard\nn03713151\tmaisonette, maisonnette\nn03713436\tmajolica, maiolica\nn03714235\tmakeup, make-up, war paint\nn03715114\tMaksutov telescope\nn03715275\tmalacca, malacca cane\nn03715386\tmallet, beetle\nn03715669\tmallet, hammer\nn03715892\tmallet\nn03716228\tmammogram\nn03716887\tmandola\nn03716966\tmandolin\nn03717131\tmanger, trough\nn03717285\tmangle\nn03717447\tmanhole\nn03717622\tmanhole cover\nn03718212\tman-of-war, ship of the line\nn03718335\tmanometer\nn03718458\tmanor, manor house\nn03718581\tmanor hall, hall\nn03718699\tMANPAD\nn03718789\tmansard, mansard roof\nn03718935\tmanse\nn03719053\tmansion, mansion house, manse, hall, residence\nn03719343\tmantel, mantelpiece, mantle, mantlepiece, chimneypiece\nn03719560\tmantelet, mantilla\nn03719743\tmantilla\nn03720005\tMao jacket\nn03720163\tmap\nn03720665\tmaquiladora\nn03720891\tmaraca\nn03721047\tmarble\nn03721252\tmarching order\nn03721384\tmarimba, xylophone\nn03721590\tmarina\nn03722007\tmarker\nn03722288\tmarketplace, market place, mart, market\nn03722646\tmarlinespike, marlinspike, marlingspike\nn03722944\tmarocain, crepe marocain\nn03723153\tmarquee, marquise\nn03723267\tmarquetry, marqueterie\nn03723439\tmarriage bed\nn03723781\tmartello tower\nn03723885\tmartingale\nn03724066\tmascara\nn03724176\tmaser\nn03724417\tmasher\nn03724538\tmashie, five iron\nn03724623\tmashie niblick, seven iron\nn03724756\tmasjid, musjid\nn03724870\tmask\nn03725035\tmask\nn03725506\tMasonite\nn03725600\tMason jar\nn03725717\tmasonry\nn03725869\tmason's level\nn03726116\tmassage parlor\nn03726233\tmassage parlor\nn03726371\tmass spectrograph\nn03726516\tmass spectrometer, spectrometer\nn03726760\tmast\nn03726993\tmast\nn03727067\tmastaba, mastabah\nn03727465\tmaster bedroom\nn03727605\tmasterpiece, chef-d'oeuvre\nn03727837\tmat\nn03727946\tmat, gym mat\nn03728437\tmatch, lucifer, friction match\nn03728982\tmatch\nn03729131\tmatchboard\nn03729308\tmatchbook\nn03729402\tmatchbox\nn03729482\tmatchlock\nn03729647\tmatch plane, tonguing and grooving plane\nn03729826\tmatchstick\nn03729951\tmaterial\nn03730153\tmateriel, equipage\nn03730334\tmaternity hospital\nn03730494\tmaternity ward\nn03730655\tmatrix\nn03730788\tMatthew Walker, Matthew Walker knot\nn03730893\tmatting\nn03731019\tmattock\nn03731483\tmattress cover\nn03731695\tmaul, sledge, sledgehammer\nn03731882\tmaulstick, mahlstick\nn03732020\tMauser\nn03732114\tmausoleum\nn03732458\tmaxi\nn03732543\tMaxim gun\nn03732658\tmaximum and minimum thermometer\nn03733131\tmaypole\nn03733281\tmaze, labyrinth\nn03733465\tmazer\nn03733547\tmeans\nn03733644\tmeasure\nn03733805\tmeasuring cup\nn03733925\tmeasuring instrument, measuring system, measuring device\nn03735637\tmeasuring stick, measure, measuring rod\nn03735963\tmeat counter\nn03736064\tmeat grinder\nn03736147\tmeat hook\nn03736269\tmeat house\nn03736372\tmeat safe\nn03736470\tmeat thermometer\nn03736970\tmechanical device\nn03738066\tmechanical piano, Pianola, player piano\nn03738241\tmechanical system\nn03738472\tmechanism\nn03739518\tmedical building, health facility, healthcare facility\nn03739693\tmedical instrument\nn03742019\tmedicine ball\nn03742115\tmedicine chest, medicine cabinet\nn03742238\tMEDLINE\nn03743016\tmegalith, megalithic structure\nn03743279\tmegaphone\nn03743902\tmemorial, monument\nn03744276\tmemory, computer memory, storage, computer storage, store, memory board\nn03744684\tmemory chip\nn03744840\tmemory device, storage device\nn03745146\tmenagerie, zoo, zoological garden\nn03745487\tmending\nn03745571\tmenhir, standing stone\nn03746005\tmenorah\nn03746155\tMenorah\nn03746330\tman's clothing\nn03746486\tmen's room, men's\nn03748162\tmercantile establishment, retail store, sales outlet, outlet\nn03749504\tmercury barometer\nn03749634\tmercury cell\nn03749807\tmercury thermometer, mercury-in-glass thermometer\nn03750206\tmercury-vapor lamp\nn03750437\tmercy seat\nn03750614\tmerlon\nn03751065\tmess, mess hall\nn03751269\tmess jacket, monkey jacket, shell jacket\nn03751458\tmess kit\nn03751590\tmessuage\nn03751757\tmetal detector\nn03752071\tmetallic\nn03752185\tmetal screw\nn03752398\tmetal wood\nn03752922\tmeteorological balloon\nn03753077\tmeter\nn03753514\tmeterstick, metrestick\nn03757604\tmetronome\nn03758089\tmezzanine, mezzanine floor, entresol\nn03758220\tmezzanine, first balcony\nn03758894\tmicrobalance\nn03758992\tmicrobrewery\nn03759243\tmicrofiche\nn03759432\tmicrofilm\nn03759661\tmicrometer, micrometer gauge, micrometer caliper\nn03759954\tmicrophone, mike\nn03760310\tmicroprocessor\nn03760671\tmicroscope\nn03760944\tmicrotome\nn03761084\tmicrowave, microwave oven\nn03761588\tmicrowave diathermy machine\nn03761731\tmicrowave linear accelerator\nn03762238\tmiddy, middy blouse\nn03762332\tmidiron, two iron\nn03762434\tmihrab\nn03762602\tmihrab\nn03762982\tmilitary hospital\nn03763727\tmilitary quarters\nn03763968\tmilitary uniform\nn03764276\tmilitary vehicle\nn03764606\tmilk bar\nn03764736\tmilk can\nn03764822\tmilk float\nn03764995\tmilking machine\nn03765128\tmilking stool\nn03765467\tmilk wagon, milkwagon\nn03765561\tmill, grinder, milling machinery\nn03765934\tmilldam\nn03766044\tmiller, milling machine\nn03766218\tmilliammeter\nn03766322\tmillinery, woman's hat\nn03766508\tmillinery, hat shop\nn03766600\tmilling\nn03766697\tmillivoltmeter\nn03766935\tmillstone\nn03767112\tmillstone\nn03767203\tmillwheel, mill wheel\nn03767459\tmimeograph, mimeo, mimeograph machine, Roneo, Roneograph\nn03767745\tminaret\nn03767966\tmincer, mincing machine\nn03768132\tmine\nn03768683\tmine detector\nn03768823\tminelayer\nn03768916\tmineshaft\nn03769610\tminibar, cellaret\nn03769722\tminibike, motorbike\nn03769881\tminibus\nn03770085\tminicar\nn03770224\tminicomputer\nn03770316\tministry\nn03770439\tminiskirt, mini\nn03770520\tminisub, minisubmarine\nn03770679\tminivan\nn03770834\tminiver\nn03770954\tmink, mink coat\nn03772077\tminster\nn03772269\tmint\nn03772584\tminute hand, big hand\nn03772674\tMinuteman\nn03773035\tmirror\nn03773504\tmissile\nn03773835\tmissile defense system, missile defence system\nn03774327\tmiter box, mitre box\nn03774461\tmiter joint, mitre joint, miter, mitre\nn03775071\tmitten\nn03775199\tmixer\nn03775388\tmixer\nn03775546\tmixing bowl\nn03775636\tmixing faucet\nn03775747\tmizzen, mizen\nn03775847\tmizzenmast, mizenmast, mizzen, mizen\nn03776167\tmobcap\nn03776460\tmobile home, manufactured home\nn03776877\tmoccasin, mocassin\nn03776997\tmock-up\nn03777126\tmod con\nn03777568\tModel T\nn03777754\tmodem\nn03778459\tmodillion\nn03778817\tmodule\nn03779000\tmodule\nn03779128\tmohair\nn03779246\tmoire, watered-silk\nn03779370\tmold, mould, cast\nn03779884\tmoldboard, mouldboard\nn03780047\tmoldboard plow, mouldboard plough\nn03780799\tmoleskin\nn03781055\tMolotov cocktail, petrol bomb, gasoline bomb\nn03781244\tmonastery\nn03781467\tmonastic habit\nn03781594\tmoneybag\nn03781683\tmoney belt\nn03781787\tmonitor\nn03782006\tmonitor\nn03782190\tmonitor, monitoring device\nn03782794\tmonkey-wrench, monkey wrench\nn03782929\tmonk's cloth\nn03783304\tmonochrome\nn03783430\tmonocle, eyeglass\nn03783575\tmonofocal lens implant, monofocal IOL\nn03783873\tmonoplane\nn03784139\tmonotype\nn03784270\tmonstrance, ostensorium\nn03784793\tmooring tower, mooring mast\nn03784896\tMoorish arch, horseshoe arch\nn03785016\tmoped\nn03785142\tmop handle\nn03785237\tmoquette\nn03785499\tmorgue, mortuary, dead room\nn03785721\tmorion, cabasset\nn03786096\tmorning dress\nn03786194\tmorning dress\nn03786313\tmorning room\nn03786621\tMorris chair\nn03786715\tmortar, howitzer, trench mortar\nn03786901\tmortar\nn03787032\tmortarboard\nn03787523\tmortise joint, mortise-and-tenon joint\nn03788047\tmosaic\nn03788195\tmosque\nn03788365\tmosquito net\nn03788498\tmotel\nn03788601\tmotel room\nn03788914\tMother Hubbard, muumuu\nn03789171\tmotion-picture camera, movie camera, cine-camera\nn03789400\tmotion-picture film, movie film, cine-film\nn03789603\tmotley\nn03789794\tmotley\nn03789946\tmotor\nn03790230\tmotorboat, powerboat\nn03790512\tmotorcycle, bike\nn03790755\tmotor hotel, motor inn, motor lodge, tourist court, court\nn03790953\tmotorized wheelchair\nn03791053\tmotor scooter, scooter\nn03791235\tmotor vehicle, automotive vehicle\nn03792048\tmound, hill\nn03792334\tmound, hill, pitcher's mound\nn03792526\tmount, setting\nn03792782\tmountain bike, all-terrain bike, off-roader\nn03792972\tmountain tent\nn03793489\tmouse, computer mouse\nn03793850\tmouse button\nn03794056\tmousetrap\nn03794136\tmousse, hair mousse, hair gel\nn03794798\tmouthpiece, embouchure\nn03795123\tmouthpiece\nn03795269\tmouthpiece, gumshield\nn03795758\tmovement\nn03795976\tmovie projector, cine projector, film projector\nn03796181\tmoving-coil galvanometer\nn03796401\tmoving van\nn03796522\tmud brick\nn03796605\tmudguard, splash guard, splash-guard\nn03796848\tmudhif\nn03796974\tmuff\nn03797062\tmuffle\nn03797182\tmuffler\nn03797264\tmufti\nn03797390\tmug\nn03797896\tmulch\nn03798061\tmule, scuff\nn03798442\tmultichannel recorder\nn03798610\tmultiengine airplane, multiengine plane\nn03798982\tmultiplex\nn03799113\tmultiplexer\nn03799240\tmultiprocessor\nn03799375\tmultistage rocket, step rocket\nn03799610\tmunition, ordnance, ordnance store\nn03799876\tMurphy bed\nn03800371\tmusette, shepherd's pipe\nn03800485\tmusette pipe\nn03800563\tmuseum\nn03800772\tmushroom anchor\nn03800933\tmusical instrument, instrument\nn03801353\tmusic box, musical box\nn03801533\tmusic hall, vaudeville theater, vaudeville theatre\nn03801671\tmusic school\nn03801760\tmusic stand, music rack\nn03801880\tmusic stool, piano stool\nn03802007\tmusket\nn03802228\tmusket ball, ball\nn03802393\tmuslin\nn03802643\tmustache cup, moustache cup\nn03802800\tmustard plaster, sinapism\nn03802973\tmute\nn03803116\tmuzzle loader\nn03803284\tmuzzle\nn03803780\tmyelogram\nn03804211\tnacelle\nn03804744\tnail\nn03805180\tnailbrush\nn03805280\tnailfile\nn03805374\tnailhead\nn03805503\tnailhead\nn03805725\tnail polish, nail enamel, nail varnish\nn03805933\tnainsook\nn03807334\tNapier's bones, Napier's rods\nn03809211\tnard, spikenard\nn03809312\tnarrowbody aircraft, narrow-body aircraft, narrow-body\nn03809603\tnarrow wale\nn03809686\tnarthex\nn03809802\tnarthex\nn03810412\tnasotracheal tube\nn03810952\tnational monument\nn03811295\tnautilus, nuclear submarine, nuclear-powered submarine\nn03811444\tnavigational system\nn03811847\tnaval equipment\nn03811965\tnaval gun\nn03812263\tnaval missile\nn03812382\tnaval radar\nn03812789\tnaval tactical data system\nn03812924\tnaval weaponry\nn03813078\tnave\nn03813176\tnavigational instrument\nn03813946\tnebuchadnezzar\nn03814528\tneckband\nn03814639\tneck brace\nn03814727\tneckcloth, stock\nn03814817\tneckerchief\nn03814906\tnecklace\nn03815149\tnecklet\nn03815278\tneckline\nn03815482\tneckpiece\nn03815615\tnecktie, tie\nn03816005\tneckwear\nn03816136\tneedle\nn03816394\tneedle\nn03816530\tneedlenose pliers\nn03816849\tneedlework, needlecraft\nn03817191\tnegative\nn03817331\tnegative magnetic pole, negative pole, south-seeking pole\nn03817522\tnegative pole\nn03817647\tnegligee, neglige, peignoir, wrapper, housecoat\nn03818001\tneolith\nn03818343\tneon lamp, neon induction lamp, neon tube\nn03819047\tnephoscope\nn03819336\tnest\nn03819448\tnest egg\nn03819595\tnet, network, mesh, meshing, meshwork\nn03819994\tnet\nn03820154\tnet\nn03820318\tnet\nn03820728\tnetwork, electronic network\nn03820950\tnetwork\nn03821145\tneutron bomb\nn03821424\tnewel\nn03821518\tnewel post, newel\nn03822171\tnewspaper, paper\nn03822361\tnewsroom\nn03822504\tnewsroom\nn03822656\tnewsstand\nn03822767\tNewtonian telescope, Newtonian reflector\nn03823111\tnib, pen nib\nn03823216\tniblick, nine iron\nn03823312\tnicad, nickel-cadmium accumulator\nn03823673\tnickel-iron battery, nickel-iron accumulator\nn03823906\tNicol prism\nn03824197\tnight bell\nn03824284\tnightcap\nn03824381\tnightgown, gown, nightie, night-robe, nightdress\nn03824589\tnight latch\nn03824713\tnight-light\nn03824999\tnightshirt\nn03825080\tnightwear, sleepwear, nightclothes\nn03825271\tninepin, skittle, skittle pin\nn03825442\tninepin ball, skittle ball\nn03825673\tninon\nn03825788\tnipple\nn03825913\tnipple shield\nn03826039\tniqab\nn03826186\tNissen hut, Quonset hut\nn03827420\tnogging\nn03827536\tnoisemaker\nn03828020\tnonsmoker, nonsmoking car\nn03829340\tnon-volatile storage, nonvolatile storage\nn03829857\tNorfolk jacket\nn03829954\tnoria\nn03831203\tnosebag, feedbag\nn03831382\tnoseband, nosepiece\nn03831757\tnose flute\nn03832144\tnosewheel\nn03832673\tnotebook, notebook computer\nn03833907\tnuclear-powered ship\nn03834040\tnuclear reactor, reactor\nn03834472\tnuclear rocket\nn03834604\tnuclear weapon, atomic weapon\nn03835197\tnude, nude painting\nn03835729\tnumdah, numdah rug, nammad\nn03835941\tnun's habit\nn03836062\tnursery, baby's room\nn03836451\tnut and bolt\nn03836602\tnutcracker\nn03836906\tnylon\nn03836976\tnylons, nylon stocking, rayons, rayon stocking, silk stocking\nn03837422\toar\nn03837606\toast\nn03837698\toast house\nn03837869\tobelisk\nn03838024\tobject ball\nn03838298\tobjective, objective lens, object lens, object glass\nn03838748\toblique bandage\nn03838899\toboe, hautboy, hautbois\nn03839172\toboe da caccia\nn03839276\toboe d'amore\nn03839424\tobservation dome\nn03839671\tobservatory\nn03839795\tobstacle\nn03840327\tobturator\nn03840681\tocarina, sweet potato\nn03840823\toctant\nn03841011\todd-leg caliper\nn03841143\todometer, hodometer, mileometer, milometer\nn03841290\toeil de boeuf\nn03841666\toffice, business office\nn03842012\toffice building, office block\nn03842156\toffice furniture\nn03842276\tofficer's mess\nn03842377\toff-line equipment, auxiliary equipment\nn03842585\togee, cyma reversa\nn03842754\togee arch, keel arch\nn03842986\tohmmeter\nn03843092\toil, oil color, oil colour\nn03843316\toilcan\nn03843438\toilcloth\nn03843555\toil filter\nn03843883\toil heater, oilstove, kerosene heater, kerosine heater\nn03844045\toil lamp, kerosene lamp, kerosine lamp\nn03844233\toil paint\nn03844550\toil pump\nn03844673\toil refinery, petroleum refinery\nn03844815\toilskin, slicker\nn03844965\toil slick\nn03845107\toilstone\nn03845190\toil tanker, oiler, tanker, tank ship\nn03845990\told school tie\nn03846100\tolive drab\nn03846234\tolive drab, olive-drab uniform\nn03846431\tOlympian Zeus\nn03846677\tomelet pan, omelette pan\nn03846772\tomnidirectional antenna, nondirectional antenna\nn03846970\tomnirange, omnidirectional range, omnidirectional radio range\nn03847471\tonion dome\nn03847823\topen-air market, open-air marketplace, market square\nn03848033\topen circuit\nn03848168\topen-end wrench, tappet wrench\nn03848348\topener\nn03848537\topen-hearth furnace\nn03849275\topenside plane, rabbet plane\nn03849412\topen sight\nn03849679\topenwork\nn03849814\topera, opera house\nn03849943\topera cloak, opera hood\nn03850053\toperating microscope\nn03850245\toperating room, OR, operating theater, operating theatre, surgery\nn03850492\toperating table\nn03850613\tophthalmoscope\nn03851341\toptical device\nn03851787\toptical disk, optical disc\nn03852280\toptical instrument\nn03852544\toptical pyrometer, pyroscope\nn03852688\toptical telescope\nn03853291\torchestra pit, pit\nn03853924\tordinary, ordinary bicycle\nn03854065\torgan, pipe organ\nn03854421\torgandy, organdie\nn03854506\torganic light-emitting diode, OLED\nn03854722\torgan loft\nn03854815\torgan pipe, pipe, pipework\nn03855214\torganza\nn03855333\toriel, oriel window\nn03855464\toriflamme\nn03855604\tO ring\nn03855756\tOrlon\nn03855908\torlop deck, orlop, fourth deck\nn03856012\torphanage, orphans' asylum\nn03856335\torphrey\nn03856465\torrery\nn03856728\torthicon, image orthicon\nn03857026\torthochromatic film\nn03857156\torthopter, ornithopter\nn03857291\torthoscope\nn03857687\toscillograph\nn03857828\toscilloscope, scope, cathode-ray oscilloscope, CRO\nn03858085\tossuary\nn03858183\totoscope, auriscope, auroscope\nn03858418\tottoman, pouf, pouffe, puff, hassock\nn03858533\toubliette\nn03858837\tout-basket, out-tray\nn03859000\toutboard motor, outboard\nn03859170\toutboard motorboat, outboard\nn03859280\toutbuilding\nn03859495\touterwear, overclothes\nn03859608\toutfall\nn03859958\toutfit, getup, rig, turnout\nn03860234\toutfitter\nn03860404\touthouse, privy, earth-closet, jakes\nn03861048\toutput device\nn03861271\toutrigger\nn03861430\toutrigger canoe\nn03861596\toutside caliper\nn03861842\toutside mirror\nn03862379\toutwork\nn03862676\toven\nn03862862\toven thermometer\nn03863108\toverall\nn03863262\toverall, boilersuit, boilers suit\nn03863657\tovercoat, overcoating\nn03863783\toverdrive\nn03863923\tovergarment, outer garment\nn03864139\toverhand knot\nn03864356\toverhang\nn03864692\toverhead projector\nn03865288\tovermantel\nn03865371\tovernighter, overnight bag, overnight case\nn03865557\toverpass, flyover\nn03865820\toverride\nn03865949\tovershoe\nn03866082\toverskirt\nn03867854\toxbow\nn03868044\tOxbridge\nn03868242\toxcart\nn03868324\toxeye\nn03868406\toxford\nn03868643\toximeter\nn03868763\toxyacetylene torch\nn03868863\toxygen mask\nn03869838\toyster bar\nn03869976\toyster bed, oyster bank, oyster park\nn03870105\tpace car\nn03870290\tpacemaker, artificial pacemaker\nn03870546\tpack\nn03870672\tpack\nn03870980\tpack, face pack\nn03871083\tpackage, parcel\nn03871371\tpackage store, liquor store, off-licence\nn03871524\tpackaging\nn03871628\tpacket\nn03871724\tpacking box, packing case\nn03871860\tpackinghouse, packing plant\nn03872016\tpackinghouse\nn03872167\tpacking needle\nn03872273\tpacksaddle\nn03873416\tpaddle, boat paddle\nn03873699\tpaddle\nn03873848\tpaddle\nn03873996\tpaddle box, paddle-box\nn03874138\tpaddle steamer, paddle-wheeler\nn03874293\tpaddlewheel, paddle wheel\nn03874487\tpaddock\nn03874599\tpadlock\nn03874823\tpage printer, page-at-a-time printer\nn03875218\tpaint, pigment\nn03875806\tpaintball\nn03875955\tpaintball gun\nn03876111\tpaintbox\nn03876231\tpaintbrush\nn03877351\tpaisley\nn03877472\tpajama, pyjama, pj's, jammies\nn03877674\tpajama, pyjama\nn03877845\tpalace\nn03878066\tpalace, castle\nn03878211\tpalace\nn03878294\tpalanquin, palankeen\nn03878418\tpaleolith\nn03878511\tpalestra, palaestra\nn03878674\tpalette, pallet\nn03878828\tpalette knife\nn03878963\tpalisade\nn03879456\tpallet\nn03879705\tpallette, palette\nn03880032\tpallium\nn03880129\tpallium\nn03880323\tpan\nn03880531\tpan, cooking pan\nn03881305\tpancake turner\nn03881404\tpanchromatic film\nn03881534\tpanda car\nn03882611\tpaneling, panelling, pane\nn03882960\tpanhandle\nn03883054\tpanic button\nn03883385\tpannier\nn03883524\tpannier\nn03883664\tpannikin\nn03883773\tpanopticon\nn03883944\tpanopticon\nn03884397\tpanpipe, pandean pipe, syrinx\nn03884554\tpantaloon\nn03884639\tpantechnicon\nn03884778\tpantheon\nn03884926\tpantheon\nn03885028\tpantie, panty, scanty, step-in\nn03885194\tpanting, trousering\nn03885293\tpant leg, trouser leg\nn03885410\tpantograph\nn03885535\tpantry, larder, buttery\nn03885669\tpants suit, pantsuit\nn03885788\tpanty girdle\nn03885904\tpantyhose\nn03886053\tpanzer\nn03886641\tpaper chain\nn03886762\tpaper clip, paperclip, gem clip\nn03886940\tpaper cutter\nn03887185\tpaper fastener\nn03887330\tpaper feed\nn03887512\tpaper mill\nn03887697\tpaper towel\nn03887899\tparabolic mirror\nn03888022\tparabolic reflector, paraboloid reflector\nn03888257\tparachute, chute\nn03888605\tparallel bars, bars\nn03888808\tparallel circuit, shunt circuit\nn03888998\tparallel interface, parallel port\nn03889397\tparang\nn03889503\tparapet, breastwork\nn03889626\tparapet\nn03889726\tparasail\nn03889871\tparasol, sunshade\nn03890093\tparer, paring knife\nn03890233\tparfait glass\nn03890358\tpargeting, pargetting, pargetry\nn03890514\tpari-mutuel machine, totalizer, totaliser, totalizator, totalisator\nn03891051\tparka, windbreaker, windcheater, anorak\nn03891251\tpark bench\nn03891332\tparking meter\nn03891538\tparlor, parlour\nn03892178\tparquet, parquet floor\nn03892425\tparquetry, parqueterie\nn03892557\tparsonage, vicarage, rectory\nn03892728\tParsons table\nn03893935\tpartial denture\nn03894051\tparticle detector\nn03894379\tpartition, divider\nn03894677\tparts bin\nn03894933\tparty line\nn03895038\tparty wall\nn03895170\tparvis\nn03895866\tpassenger car, coach, carriage\nn03896103\tpassenger ship\nn03896233\tpassenger train\nn03896419\tpassenger van\nn03896526\tpasse-partout\nn03896628\tpassive matrix display\nn03896984\tpasskey, passe-partout, master key, master\nn03897130\tpass-through\nn03897634\tpastry cart\nn03897943\tpatch\nn03898129\tpatchcord\nn03898271\tpatchouli, patchouly, pachouli\nn03898395\tpatch pocket\nn03898633\tpatchwork, patchwork quilt\nn03898787\tpatent log, screw log, taffrail log\nn03899100\tpaternoster\nn03899612\tpatina\nn03899768\tpatio, terrace\nn03899933\tpatisserie\nn03900028\tpatka\nn03900194\tpatrol boat, patrol ship\nn03900301\tpatty-pan\nn03900393\tpave\nn03900979\tpavilion, marquee\nn03901229\tpavior, paviour, paving machine\nn03901338\tpavis, pavise\nn03901750\tpawn\nn03901974\tpawnbroker's shop, pawnshop, loan office\nn03902125\tpay-phone, pay-station\nn03902220\tPC board\nn03902482\tpeach orchard\nn03902756\tpea jacket, peacoat\nn03903133\tpeavey, peavy, cant dog, dog hook\nn03903290\tpectoral, pectoral medallion\nn03903424\tpedal, treadle, foot pedal, foot lever\nn03903733\tpedal pusher, toreador pants\nn03903868\tpedestal, plinth, footstall\nn03904060\tpedestal table\nn03904183\tpedestrian crossing, zebra crossing\nn03904433\tpedicab, cycle rickshaw\nn03904657\tpediment\nn03904782\tpedometer\nn03904909\tpeeler\nn03905361\tpeep sight\nn03905540\tpeg, nog\nn03905730\tpeg, pin, thole, tholepin, rowlock, oarlock\nn03905947\tpeg\nn03906106\tpeg, wooden leg, leg, pegleg\nn03906224\tpegboard\nn03906463\tPelham\nn03906590\tpelican crossing\nn03906789\tpelisse\nn03906894\tpelvimeter\nn03906997\tpen\nn03907475\tpenal colony\nn03907654\tpenal institution, penal facility\nn03907908\tpenalty box\nn03908111\tpen-and-ink\nn03908204\tpencil\nn03908456\tpencil\nn03908618\tpencil box, pencil case\nn03908714\tpencil sharpener\nn03909020\tpendant earring, drop earring, eardrop\nn03909160\tpendulum\nn03909406\tpendulum clock\nn03909516\tpendulum watch\nn03909658\tpenetration bomb\nn03911406\tpenile implant\nn03911513\tpenitentiary, pen\nn03911658\tpenknife\nn03911767\tpenlight\nn03911866\tpennant, pennon, streamer, waft\nn03912218\tpennywhistle, tin whistle, whistle\nn03912821\tpenthouse\nn03913343\tpentode\nn03913930\tpeplos, peplus, peplum\nn03914106\tpeplum\nn03914337\tpepper mill, pepper grinder\nn03914438\tpepper shaker, pepper box, pepper pot\nn03914583\tpepper spray\nn03914831\tpercale\nn03915118\tpercolator\nn03915320\tpercussion cap\nn03915437\tpercussion instrument, percussive instrument\nn03915900\tperforation\nn03916031\tperfume, essence\nn03916289\tperfumery\nn03916385\tperfumery\nn03916470\tperfumery\nn03916720\tperipheral, computer peripheral, peripheral device\nn03917048\tperiscope\nn03917198\tperistyle\nn03917327\tperiwig, peruke\nn03917814\tpermanent press, durable press\nn03918074\tperpetual motion machine\nn03918480\tpersonal computer, PC, microcomputer\nn03918737\tpersonal digital assistant, PDA, personal organizer, personal organiser, organizer, organiser\nn03919096\tpersonnel carrier\nn03919289\tpestle\nn03919430\tpestle, muller, pounder\nn03919808\tpetcock\nn03920288\tPetri dish\nn03920384\tpetrolatum gauze\nn03920641\tpet shop\nn03920737\tpetticoat, half-slip, underskirt\nn03920867\tpew, church bench\nn03923379\tphial, vial, ampule, ampul, ampoule\nn03923564\tPhillips screw\nn03923692\tPhillips screwdriver\nn03923918\tphonograph needle, needle\nn03924069\tphonograph record, phonograph recording, record, disk, disc, platter\nn03924407\tphotocathode\nn03924532\tphotocoagulator\nn03924679\tphotocopier\nn03926148\tphotographic equipment\nn03926412\tphotographic paper, photographic material\nn03926876\tphotometer\nn03927091\tphotomicrograph\nn03927299\tPhotostat, Photostat machine\nn03927539\tphotostat\nn03927792\tphysical pendulum, compound pendulum\nn03928116\tpiano, pianoforte, forte-piano\nn03928589\tpiano action\nn03928814\tpiano keyboard, fingerboard, clavier\nn03928994\tpiano wire\nn03929091\tpiccolo\nn03929202\tpick, pickax, pickaxe\nn03929443\tpick\nn03929660\tpick, plectrum, plectron\nn03929855\tpickelhaube\nn03930229\tpicket boat\nn03930313\tpicket fence, paling\nn03930431\tpicket ship\nn03930515\tpickle barrel\nn03930630\tpickup, pickup truck\nn03931044\tpicture, image, icon, ikon\nn03931765\tpicture frame\nn03931885\tpicture hat\nn03931980\tpicture rail\nn03932080\tpicture window\nn03932670\tpiece of cloth, piece of material\nn03933391\tpied-a-terre\nn03933933\tpier\nn03934042\tpier\nn03934229\tpier arch\nn03934311\tpier glass, pier mirror\nn03934565\tpier table\nn03934656\tpieta\nn03934890\tpiezometer\nn03935116\tpig bed, pig\nn03935234\tpiggery, pig farm\nn03935335\tpiggy bank, penny bank\nn03935883\tpilaster\nn03936269\tpile, spile, piling, stilt\nn03936466\tpile driver\nn03937543\tpill bottle\nn03937835\tpillbox, toque, turban\nn03937931\tpillion\nn03938037\tpillory\nn03938244\tpillow\nn03938401\tpillow block\nn03938522\tpillow lace, bobbin lace\nn03938725\tpillow sham\nn03939062\tpilot bit\nn03939178\tpilot boat\nn03939281\tpilot burner, pilot light, pilot\nn03939440\tpilot cloth\nn03939565\tpilot engine\nn03939677\tpilothouse, wheelhouse\nn03939844\tpilot light, pilot lamp, indicator lamp\nn03940256\tpin\nn03940894\tpin, flag\nn03941013\tpin, pin tumbler\nn03941231\tpinata\nn03941417\tpinball machine, pin table\nn03941586\tpince-nez\nn03941684\tpincer, pair of pincers, tweezer, pair of tweezers\nn03941887\tpinch bar\nn03942028\tpincurl clip\nn03942600\tpinfold\nn03942813\tping-pong ball\nn03942920\tpinhead\nn03943115\tpinion\nn03943266\tpinnacle\nn03943623\tpinprick\nn03943714\tpinstripe\nn03943833\tpinstripe\nn03943920\tpinstripe\nn03944024\tpintle\nn03944138\tpinwheel, pinwheel wind collector\nn03944341\tpinwheel\nn03945459\ttabor pipe\nn03945615\tpipe\nn03945817\tpipe bomb\nn03945928\tpipe cleaner\nn03946076\tpipe cutter\nn03946162\tpipefitting, pipe fitting\nn03947111\tpipet, pipette\nn03947343\tpipe vise, pipe clamp\nn03947466\tpipe wrench, tube wrench\nn03947798\tpique\nn03947888\tpirate, pirate ship\nn03948242\tpiste\nn03948459\tpistol, handgun, side arm, shooting iron\nn03948830\tpistol grip\nn03948950\tpiston, plunger\nn03949145\tpiston ring\nn03949317\tpiston rod\nn03949761\tpit\nn03950228\tpitcher, ewer\nn03950359\tpitchfork\nn03950537\tpitching wedge\nn03950647\tpitch pipe\nn03950899\tpith hat, pith helmet, sun helmet, topee, topi\nn03951068\tpiton\nn03951213\tPitot-static tube, Pitot head, Pitot tube\nn03951453\tPitot tube, Pitot\nn03951800\tpitsaw\nn03951971\tpivot, pin\nn03952150\tpivoting window\nn03952576\tpizzeria, pizza shop, pizza parlor\nn03953020\tplace of business, business establishment\nn03953416\tplace of worship, house of prayer, house of God, house of worship\nn03953901\tplacket\nn03954393\tplanchet, coin blank\nn03954731\tplane, carpenter's plane, woodworking plane\nn03955296\tplane, planer, planing machine\nn03955489\tplane seat\nn03955809\tplanetarium\nn03955941\tplanetarium\nn03956157\tplanetarium\nn03956331\tplanetary gear, epicyclic gear, planet wheel, planet gear\nn03956531\tplank-bed\nn03956623\tplanking\nn03956785\tplanner\nn03956922\tplant, works, industrial plant\nn03957315\tplanter\nn03957420\tplaster, adhesive plaster, sticking plaster\nn03957762\tplasterboard, gypsum board\nn03957991\tplastering trowel\nn03958227\tplastic bag\nn03958338\tplastic bomb\nn03958630\tplastic laminate\nn03958752\tplastic wrap\nn03959014\tplastron\nn03959123\tplastron\nn03959227\tplastron\nn03959701\tplate, scale, shell\nn03960374\tplate, collection plate\nn03960490\tplate\nn03961394\tplaten\nn03961630\tplaten\nn03961711\tplate rack\nn03961828\tplate rail\nn03961939\tplatform\nn03962525\tplatform, weapons platform\nn03962685\tplatform\nn03962852\tplatform bed\nn03962932\tplatform rocker\nn03963028\tplating, metal plating\nn03963198\tplatter\nn03963294\tplayback\nn03963483\tplaybox, play-box\nn03963645\tplayground\nn03964495\tplaypen, pen\nn03964611\tplaysuit\nn03965456\tplaza, mall, center, shopping mall, shopping center, shopping centre\nn03965907\tpleat, plait\nn03966206\tplenum\nn03966325\tplethysmograph\nn03966582\tpleximeter, plessimeter\nn03966751\tplexor, plessor, percussor\nn03966976\tpliers, pair of pliers, plyers\nn03967270\tplimsoll\nn03967396\tplotter\nn03967562\tplow, plough\nn03967942\tplug, stopper, stopple\nn03968293\tplug, male plug\nn03968479\tplug fuse\nn03968581\tplughole\nn03968728\tplumb bob, plumb, plummet\nn03969510\tplumb level\nn03970156\tplunger, plumber's helper\nn03970363\tplus fours\nn03970546\tplush\nn03971218\tplywood, plyboard\nn03971321\tpneumatic drill\nn03971960\tp-n junction\nn03972146\tp-n-p transistor\nn03972372\tpoacher\nn03972524\tpocket\nn03973003\tpocket battleship\nn03973285\tpocketcomb, pocket comb\nn03973402\tpocket flap\nn03973520\tpocket-handkerchief\nn03973628\tpocketknife, pocket knife\nn03973839\tpocket watch\nn03973945\tpod, fuel pod\nn03974070\tpogo stick\nn03974915\tpoint-and-shoot camera\nn03975035\tpointed arch\nn03975657\tpointing trowel\nn03975788\tpoint lace, needlepoint\nn03975926\tpoker, stove poker, fire hook, salamander\nn03976105\tpolarimeter, polariscope\nn03976268\tPolaroid\nn03976467\tPolaroid camera, Polaroid Land camera\nn03976657\tpole\nn03977158\tpole\nn03977266\tpoleax, poleaxe\nn03977430\tpoleax, poleaxe\nn03977592\tpolice boat\nn03977966\tpolice van, police wagon, paddy wagon, patrol wagon, wagon, black Maria\nn03978421\tpolling booth\nn03978575\tpolo ball\nn03978686\tpolo mallet, polo stick\nn03978815\tpolonaise\nn03978966\tpolo shirt, sport shirt\nn03979377\tpolyester\nn03979492\tpolygraph\nn03980026\tpomade, pomatum\nn03980478\tpommel horse, side horse\nn03980874\tponcho\nn03980986\tpongee\nn03981094\tponiard, bodkin\nn03981340\tpontifical\nn03981566\tpontoon\nn03981760\tpontoon bridge, bateau bridge, floating bridge\nn03981924\tpony cart, ponycart, donkey cart, tub-cart\nn03982232\tpool ball\nn03982331\tpoolroom\nn03982430\tpool table, billiard table, snooker table\nn03982642\tpoop deck\nn03982767\tpoor box, alms box, mite box\nn03982895\tpoorhouse\nn03983396\tpop bottle, soda bottle\nn03983499\tpopgun\nn03983612\tpoplin\nn03983712\tpopper\nn03983928\tpoppet, poppet valve\nn03984125\tpop tent\nn03984234\tporcelain\nn03984381\tporch\nn03984643\tporkpie, porkpie hat\nn03984759\tporringer\nn03985069\tportable\nn03985232\tportable computer\nn03985441\tportable circular saw, portable saw\nn03985881\tportcullis\nn03986071\tporte-cochere\nn03986224\tporte-cochere\nn03986355\tportfolio\nn03986562\tporthole\nn03986704\tportico\nn03986857\tportiere\nn03986949\tportmanteau, Gladstone, Gladstone bag\nn03987266\tportrait camera\nn03987376\tportrait lens\nn03987674\tpositive pole, positive magnetic pole, north-seeking pole\nn03987865\tpositive pole\nn03987990\tpositron emission tomography scanner, PET scanner\nn03988170\tpost\nn03988758\tpostage meter\nn03988926\tpost and lintel\nn03989199\tpost chaise\nn03989349\tpostern\nn03989447\tpost exchange, PX\nn03989665\tposthole digger, post-hole digger\nn03989777\tpost horn\nn03989898\tposthouse, post house\nn03990474\tpot\nn03991062\tpot, flowerpot\nn03991202\tpotbelly, potbelly stove\nn03991321\tPotemkin village\nn03991443\tpotential divider, voltage divider\nn03991646\tpotentiometer, pot\nn03991837\tpotentiometer\nn03992325\tpotpourri\nn03992436\tpotsherd\nn03992509\tpotter's wheel\nn03992703\tpottery, clayware\nn03992975\tpottle\nn03993053\tpotty seat, potty chair\nn03993180\tpouch\nn03993403\tpoultice, cataplasm, plaster\nn03993703\tpound, dog pound\nn03993878\tpound net\nn03994008\tpowder\nn03994297\tpowder and shot\nn03994417\tpowdered mustard, dry mustard\nn03994614\tpowder horn, powder flask\nn03994757\tpowder keg\nn03995018\tpower brake\nn03995265\tpower cord\nn03995372\tpower drill\nn03995535\tpower line, power cable\nn03995661\tpower loom\nn03995856\tpower mower, motor mower\nn03996004\tpower pack\nn03996145\tpower saw, saw, sawing machine\nn03996416\tpower shovel, excavator, digger, shovel\nn03996849\tpower steering, power-assisted steering\nn03997274\tpower takeoff, PTO\nn03997484\tpower tool\nn03997875\tpraetorium, pretorium\nn03998194\tprayer rug, prayer mat\nn03998333\tprayer shawl, tallith, tallis\nn03998673\tprecipitator, electrostatic precipitator, Cottrell precipitator\nn03999064\tprefab\nn03999160\tpresbytery\nn03999621\tpresence chamber\nn03999992\tpress, mechanical press\nn04000311\tpress, printing press\nn04000480\tpress\nn04000592\tpress box\nn04000716\tpress gallery\nn04000998\tpress of sail, press of canvas\nn04001132\tpressure cabin\nn04001265\tpressure cooker\nn04001397\tpressure dome\nn04001499\tpressure gauge, pressure gage\nn04001661\tpressurized water reactor, PWR\nn04001845\tpressure suit\nn04002262\tpricket\nn04002371\tprie-dieu\nn04002629\tprimary coil, primary winding, primary\nn04003241\tPrimus stove, Primus\nn04003359\tPrince Albert\nn04003856\tprint\nn04004099\tprint buffer\nn04004210\tprinted circuit\nn04004475\tprinter, printing machine\nn04004767\tprinter\nn04004990\tprinter cable\nn04005197\tpriory\nn04005630\tprison, prison house\nn04005912\tprison camp, internment camp, prisoner of war camp, POW camp\nn04006067\tprivateer\nn04006227\tprivate line\nn04006330\tprivet hedge\nn04006411\tprobe\nn04007415\tproctoscope\nn04007664\tprod, goad\nn04008385\tproduction line, assembly line, line\nn04008634\tprojectile, missile\nn04009552\tprojector\nn04009801\tprojector\nn04009923\tprolonge\nn04010057\tprolonge knot, sailor's breastplate\nn04010779\tprompter, autocue\nn04010927\tprong\nn04011827\tpropeller, propellor\nn04012084\tpropeller plane\nn04012482\tpropjet, turboprop, turbo-propeller plane\nn04012665\tproportional counter tube, proportional counter\nn04013060\tpropulsion system\nn04013176\tproscenium, proscenium wall\nn04013600\tproscenium arch\nn04013729\tprosthesis, prosthetic device\nn04014297\tprotective covering, protective cover, protection\nn04015204\tprotective garment\nn04015786\tproton accelerator\nn04015908\tprotractor\nn04016240\tpruner, pruning hook, lopper\nn04016479\tpruning knife\nn04016576\tpruning saw\nn04016684\tpruning shears\nn04016846\tpsaltery\nn04017571\tpsychrometer\nn04017807\tPT boat, mosquito boat, mosquito craft, motor torpedo boat\nn04018155\tpublic address system, P.A. system, PA system, P.A., PA\nn04018399\tpublic house, pub, saloon, pothouse, gin mill, taphouse\nn04018667\tpublic toilet, comfort station, public convenience, convenience, public lavatory, restroom, toilet facility, wash room\nn04019101\tpublic transport\nn04019335\tpublic works\nn04019541\tpuck, hockey puck\nn04019696\tpull\nn04019881\tpullback, tieback\nn04020087\tpull chain\nn04020298\tpulley, pulley-block, pulley block, block\nn04020744\tpull-off, rest area, rest stop, layby, lay-by\nn04020912\tPullman, Pullman car\nn04021028\tpullover, slipover\nn04021164\tpull-through\nn04021362\tpulse counter\nn04021503\tpulse generator\nn04021704\tpulse timing circuit\nn04021798\tpump\nn04022332\tpump\nn04022434\tpump action, slide action\nn04022708\tpump house, pumping station\nn04022866\tpump room\nn04023021\tpump-type pliers\nn04023119\tpump well\nn04023249\tpunch, puncher\nn04023422\tpunchboard\nn04023695\tpunch bowl\nn04023962\tpunching bag, punch bag, punching ball, punchball\nn04024137\tpunch pliers\nn04024274\tpunch press\nn04024862\tpunnet\nn04024983\tpunt\nn04025508\tpup tent, shelter tent\nn04025633\tpurdah\nn04026053\tpurifier\nn04026180\tpurl, purl stitch\nn04026417\tpurse\nn04026813\tpush-bike\nn04026918\tpush broom\nn04027023\tpush button, push, button\nn04027367\tpush-button radio\nn04027706\tpusher, zori\nn04027820\tput-put\nn04027935\tputtee\nn04028074\tputter, putting iron\nn04028221\tputty knife\nn04028315\tpuzzle\nn04028581\tpylon, power pylon\nn04028764\tpylon\nn04029416\tpyramidal tent\nn04029647\tpyrograph\nn04029734\tpyrometer\nn04029913\tpyrometric cone\nn04030054\tpyrostat\nn04030161\tpyx, pix\nn04030274\tpyx, pix, pyx chest, pix chest\nn04030414\tpyxis\nn04030518\tquad, quadrangle\nn04030846\tquadrant\nn04030965\tquadraphony, quadraphonic system, quadriphonic system\nn04031884\tquartering\nn04032509\tquarterstaff\nn04032603\tquartz battery, quartz mill\nn04032936\tquartz lamp\nn04033287\tqueen\nn04033425\tqueen\nn04033557\tqueen post\nn04033801\tquern\nn04033901\tquill, quill pen\nn04033995\tquilt, comforter, comfort, puff\nn04034262\tquilted bedspread\nn04034367\tquilting\nn04035231\tquipu\nn04035634\tquirk molding, quirk moulding\nn04035748\tquirt\nn04035836\tquiver\nn04035912\tquoin, coign, coigne\nn04036155\tquoit\nn04036303\tQWERTY keyboard\nn04036776\trabbet, rebate\nn04036963\trabbet joint\nn04037076\trabbit ears\nn04037220\trabbit hutch\nn04037298\traceabout\nn04037443\tracer, race car, racing car\nn04037873\traceway, race\nn04037964\tracing boat\nn04038231\tracing gig\nn04038338\tracing skiff, single shell\nn04038440\track, stand\nn04038727\track\nn04039041\track, wheel\nn04039209\track and pinion\nn04039381\tracket, racquet\nn04039742\tracquetball\nn04039848\tradar, microwave radar, radio detection and ranging, radiolocation\nn04040247\tradial, radial tire, radial-ply tire\nn04040373\tradial engine, rotary engine\nn04040540\tradiation pyrometer\nn04040759\tradiator\nn04041069\tradiator\nn04041243\tradiator cap\nn04041408\tradiator hose\nn04041544\tradio, wireless\nn04041747\tradio antenna, radio aerial\nn04042076\tradio chassis\nn04042204\tradio compass\nn04042358\tradiogram, radiograph, shadowgraph, skiagraph, skiagram\nn04042632\tradio interferometer\nn04042795\tradio link, link\nn04042985\tradiometer\nn04043168\tradiomicrometer\nn04043411\tradio-phonograph, radio-gramophone\nn04043733\tradio receiver, receiving set, radio set, radio, tuner, wireless\nn04044307\tradiotelegraph, radiotelegraphy, wireless telegraph, wireless telegraphy\nn04044498\tradiotelephone, radiophone, wireless telephone\nn04044716\tradio telescope, radio reflector\nn04044955\tradiotherapy equipment\nn04045085\tradio transmitter\nn04045255\tradome, radar dome\nn04045397\traft\nn04045644\trafter, balk, baulk\nn04045787\traft foundation\nn04045941\trag, shred, tag, tag end, tatter\nn04046091\tragbag\nn04046277\traglan\nn04046400\traglan sleeve\nn04046590\trail\nn04046974\trail fence\nn04047139\trailhead\nn04047401\trailing, rail\nn04047733\trailing\nn04047834\trailroad bed\nn04048441\trailroad tunnel\nn04049303\train barrel\nn04049405\traincoat, waterproof\nn04049585\train gauge, rain gage, pluviometer, udometer\nn04049753\train stick\nn04050066\trake\nn04050313\trake handle\nn04050600\tRAM disk\nn04050933\tramekin, ramequin\nn04051269\tramjet, ramjet engine, atherodyde, athodyd, flying drainpipe\nn04051439\trammer\nn04051549\tramp, incline\nn04051705\trampant arch\nn04051825\trampart, bulwark, wall\nn04052235\tramrod\nn04052346\tramrod\nn04052442\tranch, spread, cattle ranch, cattle farm\nn04052658\tranch house\nn04052757\trandom-access memory, random access memory, random memory, RAM, read/write memory\nn04053508\trangefinder, range finder\nn04053677\trange hood\nn04053767\trange pole, ranging pole, flagpole\nn04054361\trapier, tuck\nn04054566\trariora\nn04054670\trasp, wood file\nn04055180\tratchet, rachet, ratch\nn04055447\tratchet wheel\nn04055700\trathskeller\nn04055861\tratline, ratlin\nn04056073\trat-tail file\nn04056180\trattan, ratan\nn04056413\trattrap\nn04056932\trayon\nn04057047\trazor\nn04057215\trazorblade\nn04057435\treaction-propulsion engine, reaction engine\nn04057673\treaction turbine\nn04057846\treactor\nn04057981\treading lamp\nn04058096\treading room\nn04058239\tread-only memory, ROM, read-only storage, fixed storage\nn04058486\tread-only memory chip\nn04058594\treadout, read-out\nn04058721\tread/write head, head\nn04059157\tready-to-wear\nn04059298\treal storage\nn04059399\treamer\nn04059516\treamer, juicer, juice reamer\nn04059947\trearview mirror\nn04060198\tReaumur thermometer\nn04060448\trebozo\nn04060647\treceiver, receiving system\nn04060904\treceptacle\nn04061681\treception desk\nn04061793\treception room\nn04061969\trecess, niche\nn04062179\treciprocating engine\nn04062428\trecliner, reclining chair, lounger\nn04062644\treconnaissance plane\nn04062807\treconnaissance vehicle, scout car\nn04063154\trecord changer, auto-changer, changer\nn04063373\trecorder, recording equipment, recording machine\nn04063868\trecording\nn04064213\trecording system\nn04064401\trecord player, phonograph\nn04064747\trecord sleeve, record cover\nn04064862\trecovery room\nn04065272\trecreational vehicle, RV, R.V.\nn04065464\trecreation room, rec room\nn04065789\trecycling bin\nn04065909\trecycling plant\nn04066023\tredbrick university\nn04066270\tred carpet\nn04066388\tredoubt\nn04066476\tredoubt\nn04066767\treduction gear\nn04067143\treed pipe\nn04067231\treed stop\nn04067353\treef knot, flat knot\nn04067472\treel\nn04067658\treel\nn04067818\trefectory\nn04067921\trefectory table\nn04068441\trefinery\nn04068601\treflecting telescope, reflector\nn04069166\treflectometer\nn04069276\treflector\nn04069434\treflex camera\nn04069582\treflux condenser\nn04069777\treformatory, reform school, training school\nn04070003\treformer\nn04070207\trefracting telescope\nn04070415\trefractometer\nn04070545\trefrigeration system\nn04070727\trefrigerator, icebox\nn04070964\trefrigerator car\nn04071102\trefuge, sanctuary, asylum\nn04071263\tregalia\nn04071393\tregimentals\nn04072193\tregulator\nn04072551\trein\nn04072960\trelay, electrical relay\nn04073425\trelease, button\nn04073948\treligious residence, cloister\nn04074185\treliquary\nn04074963\tremote control, remote\nn04075291\tremote terminal, link-attached terminal, remote station, link-attached station\nn04075468\tremovable disk\nn04075715\trendering\nn04075813\trep, repp\nn04075916\trepair shop, fix-it shop\nn04076052\trepeater\nn04076284\trepeating firearm, repeater\nn04076713\trepository, monument\nn04077430\treproducer\nn04077594\trerebrace, upper cannon\nn04077734\trescue equipment\nn04077889\tresearch center, research facility\nn04078002\treseau\nn04078574\treservoir\nn04078955\treset\nn04079106\treset button\nn04079244\tresidence\nn04079603\tresistance pyrometer\nn04079933\tresistor, resistance\nn04080138\tresonator\nn04080454\tresonator, cavity resonator, resonating chamber\nn04080705\tresort hotel, spa\nn04080833\trespirator, inhalator\nn04081281\trestaurant, eating house, eating place, eatery\nn04081699\trest house\nn04081844\trestraint, constraint\nn04082344\tresuscitator\nn04082562\tretainer\nn04082710\tretaining wall\nn04082886\treticle, reticule, graticule\nn04083113\treticulation\nn04083309\treticule\nn04083649\tretort\nn04083800\tretractor\nn04084517\treturn key, return\nn04084682\treverberatory furnace\nn04084889\trevers, revere\nn04085017\treverse, reverse gear\nn04085574\treversible\nn04085873\trevetment, revetement, stone facing\nn04086066\trevetment\nn04086273\trevolver, six-gun, six-shooter\nn04086446\trevolving door, revolver\nn04086663\trheometer\nn04086794\trheostat, variable resistor\nn04086937\trhinoscope\nn04087126\trib\nn04087432\triband, ribband\nn04087709\tribbed vault\nn04087826\tribbing\nn04088229\tribbon development\nn04088343\trib joint pliers\nn04088441\tricer\nn04088696\triddle\nn04088797\tride\nn04089152\tridge, ridgepole, rooftree\nn04089376\tridge rope\nn04089666\triding boot\nn04089836\triding crop, hunting crop\nn04089976\triding mower\nn04090263\trifle\nn04090548\trifle ball\nn04090781\trifle grenade\nn04091097\trig\nn04091466\trigger, rigger brush\nn04091584\trigger\nn04091693\trigging, tackle\nn04092168\trigout\nn04093157\tringlet\nn04093223\trings\nn04093625\trink, skating rink\nn04093775\triot gun\nn04093915\tripcord\nn04094060\tripcord\nn04094250\tripping bar\nn04094438\tripping chisel\nn04094608\tripsaw, splitsaw\nn04094720\triser\nn04094859\triser, riser pipe, riser pipeline, riser main\nn04095109\tRitz\nn04095210\triver boat\nn04095342\trivet\nn04095577\triveting machine, riveter, rivetter\nn04095938\troach clip, roach holder\nn04096066\troad, route\nn04096733\troadbed\nn04096848\troadblock, barricade\nn04097085\troadhouse\nn04097373\troadster, runabout, two-seater\nn04097622\troadway\nn04097760\troaster\nn04097866\trobe\nn04098169\trobotics equipment\nn04098260\tRochon prism, Wollaston prism\nn04098399\trock bit, roller bit\nn04098513\trocker\nn04098795\trocker, cradle\nn04099003\trocker arm, valve rocker\nn04099175\trocket, rocket engine\nn04099429\trocket, projectile\nn04099969\trocking chair, rocker\nn04100174\trod\nn04100519\trodeo\nn04101375\troll\nn04101497\troller\nn04101701\troller\nn04101860\troller bandage\nn04102037\tin-line skate\nn04102162\tRollerblade\nn04102285\troller blind\nn04102406\troller coaster, big dipper, chute-the-chute\nn04102618\troller skate\nn04102760\troller towel\nn04102872\troll film\nn04102962\trolling hitch\nn04103094\trolling mill\nn04103206\trolling pin\nn04103364\trolling stock\nn04103665\troll-on\nn04103769\troll-on\nn04103918\troll-on roll-off\nn04104147\tRolodex\nn04104384\tRoman arch, semicircular arch\nn04104500\tRoman building\nn04104770\tromper, romper suit\nn04104925\trood screen\nn04105068\troof\nn04105438\troof\nn04105704\troofing\nn04105893\troom\nn04107598\troomette\nn04107743\troom light\nn04107984\troost\nn04108268\trope\nn04108822\trope bridge\nn04108999\trope tow\nn04110068\trose water\nn04110178\trose window, rosette\nn04110281\trosin bag\nn04110439\trotary actuator, positioner\nn04110654\trotary engine\nn04110841\trotary press\nn04110955\trotating mechanism\nn04111190\trotating shaft, shaft\nn04111414\trotisserie\nn04111531\trotisserie\nn04111668\trotor\nn04111962\trotor, rotor coil\nn04112147\trotor\nn04112252\trotor blade, rotary wing\nn04112430\trotor head, rotor shaft\nn04112579\trotunda\nn04112654\trotunda\nn04112752\trouge, paint, blusher\nn04112921\troughcast\nn04113038\trouleau\nn04113194\troulette, toothed wheel\nn04113316\troulette ball\nn04113406\troulette wheel, wheel\nn04113641\tround, unit of ammunition, one shot\nn04113765\tround arch\nn04113968\tround-bottom flask\nn04114069\troundel\nn04114301\tround file\nn04114428\troundhouse\nn04114719\trouter\nn04114844\trouter\nn04114996\trouter plane\nn04115144\trowel\nn04115256\trow house, town house\nn04115456\trowing boat\nn04115542\trowlock arch\nn04115802\troyal\nn04115996\troyal mast\nn04116098\trubber band, elastic band, elastic\nn04116294\trubber boot, gum boot\nn04116389\trubber bullet\nn04116512\trubber eraser, rubber, pencil eraser\nn04117216\trudder\nn04117464\trudder\nn04117639\trudder blade\nn04118021\trug, carpet, carpeting\nn04118538\trugby ball\nn04118635\truin\nn04118776\trule, ruler\nn04119091\trumble\nn04119230\trumble seat\nn04119360\trummer\nn04119478\trumpus room, playroom, game room\nn04119630\truncible spoon\nn04119751\trundle, spoke, rung\nn04120489\trunning shoe\nn04120695\trunning suit\nn04120842\trunway\nn04121228\trushlight, rush candle\nn04121342\trusset\nn04121426\trya, rya rug\nn04121511\tsaber, sabre\nn04121728\tsaber saw, jigsaw, reciprocating saw\nn04122262\tsable\nn04122349\tsable, sable brush, sable's hair pencil\nn04122492\tsable coat\nn04122578\tsabot, wooden shoe\nn04122685\tsachet\nn04122825\tsack, poke, paper bag, carrier bag\nn04123026\tsack, sacque\nn04123123\tsackbut\nn04123228\tsackcloth\nn04123317\tsackcloth\nn04123448\tsack coat\nn04123567\tsacking, bagging\nn04123740\tsaddle\nn04124098\tsaddlebag\nn04124202\tsaddle blanket, saddlecloth, horse blanket\nn04124370\tsaddle oxford, saddle shoe\nn04124488\tsaddlery\nn04124573\tsaddle seat\nn04124887\tsaddle stitch\nn04125021\tsafe\nn04125116\tsafe\nn04125257\tsafe-deposit, safe-deposit box, safety-deposit, safety deposit box, deposit box, lockbox\nn04125541\tsafe house\nn04125692\tsafety arch\nn04125853\tsafety belt, life belt, safety harness\nn04126066\tsafety bicycle, safety bike\nn04126244\tsafety bolt, safety lock\nn04126541\tsafety curtain\nn04126659\tsafety fuse\nn04126852\tsafety lamp, Davy lamp\nn04126980\tsafety match, book matches\nn04127117\tsafety net\nn04127249\tsafety pin\nn04127395\tsafety rail, guardrail\nn04127521\tsafety razor\nn04127633\tsafety valve, relief valve, escape valve, escape cock, escape\nn04127904\tsail, canvas, canvass, sheet\nn04128413\tsail\nn04128499\tsailboat, sailing boat\nn04128710\tsailcloth\nn04128837\tsailing vessel, sailing ship\nn04129490\tsailing warship\nn04129688\tsailor cap\nn04129766\tsailor suit\nn04130143\tsalad bar\nn04130257\tsalad bowl\nn04130566\tsalinometer\nn04130907\tsallet, salade\nn04131015\tsalon\nn04131113\tsalon\nn04131208\tsalon, beauty salon, beauty parlor, beauty parlour, beauty shop\nn04131368\tsaltbox\nn04131499\tsaltcellar\nn04131690\tsaltshaker, salt shaker\nn04131811\tsaltworks\nn04131929\tsalver\nn04132158\tsalwar, shalwar\nn04132465\tSam Browne belt\nn04132603\tsamisen, shamisen\nn04132829\tsamite\nn04132985\tsamovar\nn04133114\tsampan\nn04133789\tsandal\nn04134008\tsandbag\nn04134170\tsandblaster\nn04134523\tsandbox\nn04134632\tsandglass\nn04135024\tsand wedge\nn04135118\tsandwich board\nn04135315\tsanitary napkin, sanitary towel, Kotex\nn04135710\tcling film, clingfilm, Saran Wrap\nn04135933\tsarcenet, sarsenet\nn04136045\tsarcophagus\nn04136161\tsari, saree\nn04136333\tsarong\nn04136510\tsash, window sash\nn04136800\tsash fastener, sash lock, window lock\nn04137089\tsash window\nn04137217\tsatchel\nn04137355\tsateen\nn04137444\tsatellite, artificial satellite, orbiter\nn04137773\tsatellite receiver\nn04137897\tsatellite television, satellite TV\nn04138131\tsatellite transmitter\nn04138261\tsatin\nn04138869\tSaturday night special\nn04138977\tsaucepan\nn04139140\tsaucepot\nn04139395\tsauna, sweat room\nn04139859\tsavings bank, coin bank, money box, bank\nn04140064\tsaw\nn04140539\tsawed-off shotgun\nn04140631\tsawhorse, horse, sawbuck, buck\nn04140777\tsawmill\nn04140853\tsaw set\nn04141076\tsax, saxophone\nn04141198\tsaxhorn\nn04141327\tscabbard\nn04141712\tscaffolding, staging\nn04141838\tscale\nn04141975\tscale, weighing machine\nn04142175\tscaler\nn04142327\tscaling ladder\nn04142434\tscalpel\nn04142731\tscanner, electronic scanner\nn04142999\tscanner\nn04143140\tscanner, digital scanner, image scanner\nn04143365\tscantling, stud\nn04143897\tscarf\nn04144241\tscarf joint, scarf\nn04144539\tscatter rug, throw rug\nn04144651\tscauper, scorper\nn04145863\tSchmidt telescope, Schmidt camera\nn04146050\tschool, schoolhouse\nn04146343\tschoolbag\nn04146504\tschool bell\nn04146614\tschool bus\nn04146862\tschool ship, training ship\nn04146976\tschool system\nn04147183\tschooner\nn04147291\tschooner\nn04147495\tscientific instrument\nn04147793\tscimitar\nn04147916\tscintillation counter\nn04148054\tscissors, pair of scissors\nn04148285\tsclerometer\nn04148464\tscoinson arch, sconcheon arch\nn04148579\tsconce\nn04148703\tsconce\nn04149083\tscoop\nn04149374\tscooter\nn04149813\tscoreboard\nn04150153\tscouring pad\nn04150273\tscow\nn04150371\tscow\nn04150980\tscraper\nn04151108\tscratcher\nn04151581\tscreen\nn04151940\tscreen, cover, covert, concealment\nn04152387\tscreen\nn04152593\tscreen, CRT screen\nn04153025\tscreen door, screen\nn04153330\tscreening\nn04153751\tscrew\nn04154152\tscrew, screw propeller\nn04154340\tscrew\nn04154565\tscrewdriver\nn04154753\tscrew eye\nn04154854\tscrew key\nn04154938\tscrew thread, thread\nn04155068\tscrewtop\nn04155177\tscrew wrench\nn04155457\tscriber, scribe, scratch awl\nn04155625\tscrim\nn04155735\tscrimshaw\nn04155889\tscriptorium\nn04156040\tscrubber\nn04156140\tscrub brush, scrubbing brush, scrubber\nn04156297\tscrub plane\nn04156411\tscuffer\nn04156591\tscuffle, scuffle hoe, Dutch hoe\nn04156814\tscull\nn04156946\tscull\nn04157099\tscullery\nn04157320\tsculpture\nn04158002\tscuttle, coal scuttle\nn04158138\tscyphus\nn04158250\tscythe\nn04158672\tseabag\nn04158807\tsea boat\nn04158956\tsea chest\nn04160036\tsealing wax, seal\nn04160261\tsealskin\nn04160372\tseam\nn04160586\tseaplane, hydroplane\nn04160847\tsearchlight\nn04161010\tsearing iron\nn04161358\tseat\nn04161981\tseat\nn04162433\tseat\nn04162706\tseat belt, seatbelt\nn04163530\tsecateurs\nn04164002\tsecondary coil, secondary winding, secondary\nn04164199\tsecond balcony, family circle, upper balcony, peanut gallery\nn04164406\tsecond base\nn04164757\tsecond hand\nn04164868\tsecretary, writing table, escritoire, secretaire\nn04165409\tsectional\nn04165675\tsecurity blanket\nn04165945\tsecurity system, security measure, security\nn04166111\tsecurity system\nn04166281\tsedan, saloon\nn04166436\tsedan, sedan chair\nn04167346\tseeder\nn04167489\tseeker\nn04167661\tseersucker\nn04168084\tsegmental arch\nn04168199\tSegway, Segway Human Transporter, Segway HT\nn04168472\tseidel\nn04168541\tseine\nn04168840\tseismograph\nn04169437\tselector, selector switch\nn04169597\tselenium cell\nn04170037\tself-propelled vehicle\nn04170384\tself-registering thermometer\nn04170515\tself-starter\nn04170694\tselsyn, synchro\nn04170933\tselvage, selvedge\nn04171208\tsemaphore\nn04171459\tsemiautomatic firearm\nn04171629\tsemiautomatic pistol, semiautomatic\nn04171831\tsemiconductor device, semiconductor unit, semiconductor\nn04172107\tsemi-detached house\nn04172230\tsemigloss\nn04172342\tsemitrailer, semi\nn04172512\tsennit\nn04172607\tsensitometer\nn04172776\tsentry box\nn04172904\tseparate\nn04173046\tseptic tank\nn04173172\tsequence, episode\nn04173511\tsequencer, sequenator\nn04173907\tserape, sarape\nn04174026\tserge\nn04174101\tserger\nn04174234\tserial port\nn04174500\tserpent\nn04174705\tserration\nn04175039\tserver\nn04175147\tserver, host\nn04175574\tservice club\nn04176068\tserving cart\nn04176190\tserving dish\nn04176295\tservo, servomechanism, servosystem\nn04176528\tset\nn04177041\tset gun, spring gun\nn04177329\tsetscrew\nn04177545\tsetscrew\nn04177654\tset square\nn04177755\tsettee\nn04177820\tsettle, settee\nn04177931\tsettlement house\nn04178190\tseventy-eight, 78\nn04178329\tSeven Wonders of the Ancient World, Seven Wonders of the World\nn04178668\tsewage disposal plant, disposal plant\nn04179126\tsewer, sewerage, cloaca\nn04179712\tsewing basket\nn04179824\tsewing kit\nn04179913\tsewing machine\nn04180063\tsewing needle\nn04180229\tsewing room\nn04180888\tsextant\nn04181083\tsgraffito\nn04181228\tshackle, bond, hamper, trammel\nn04181561\tshackle\nn04181718\tshade\nn04182152\tshadow box\nn04182322\tshaft\nn04183217\tshag rug\nn04183329\tshaker\nn04183957\tshank\nn04184095\tshank, stem\nn04184316\tshantung\nn04184435\tshaper, shaping machine\nn04184600\tshaping tool\nn04184880\tsharkskin\nn04185071\tsharpener\nn04185529\tSharpie\nn04185804\tshaver, electric shaver, electric razor\nn04185946\tshaving brush\nn04186051\tshaving cream, shaving soap\nn04186268\tshaving foam\nn04186455\tshawl\nn04186624\tshawm\nn04186848\tshears\nn04187061\tsheath\nn04187233\tsheathing, overlay, overlayer\nn04187547\tshed\nn04187751\tsheep bell\nn04187885\tsheepshank\nn04187970\tsheepskin coat, afghan\nn04188064\tsheepwalk, sheeprun\nn04188179\tsheet, bed sheet\nn04189092\tsheet bend, becket bend, weaver's knot, weaver's hitch\nn04189282\tsheeting\nn04189651\tsheet pile, sheath pile, sheet piling\nn04189816\tSheetrock\nn04190052\tshelf\nn04190376\tshelf bracket\nn04190464\tshell\nn04190747\tshell, case, casing\nn04190997\tshell, racing shell\nn04191150\tshellac, shellac varnish\nn04191595\tshelter\nn04191943\tshelter\nn04192238\tshelter\nn04192361\tsheltered workshop\nn04192521\tSheraton\nn04192698\tshield, buckler\nn04192858\tshield\nn04193179\tshielding\nn04193377\tshift key, shift\nn04193742\tshillelagh, shillalah\nn04193883\tshim\nn04194009\tshingle\nn04194127\tshin guard, shinpad\nn04194289\tship\nn04196080\tshipboard system\nn04196502\tshipping, cargo ships, merchant marine, merchant vessels\nn04196803\tshipping room\nn04196925\tship-towed long-range acoustic detection system\nn04197110\tshipwreck\nn04197391\tshirt\nn04197781\tshirt button\nn04197878\tshirtdress\nn04198015\tshirtfront\nn04198233\tshirting\nn04198355\tshirtsleeve\nn04198453\tshirttail\nn04198562\tshirtwaist, shirtwaister\nn04198722\tshiv\nn04198797\tshock absorber, shock, cushion\nn04199027\tshoe\nn04200000\tshoe\nn04200258\tshoebox\nn04200537\tshoehorn\nn04200800\tshoe shop, shoe-shop, shoe store\nn04200908\tshoetree\nn04201064\tshofar, shophar\nn04201297\tshoji\nn04201733\tshooting brake\nn04202142\tshooting lodge, shooting box\nn04202282\tshooting stick\nn04202417\tshop, store\nn04203356\tshop bell\nn04204081\tshopping bag\nn04204238\tshopping basket\nn04204347\tshopping cart\nn04204755\tshort circuit, short\nn04205062\tshort iron\nn04205318\tshort pants, shorts, trunks\nn04205505\tshort sleeve\nn04205613\tshortwave diathermy machine\nn04206070\tshot\nn04206225\tshot glass, jigger, pony\nn04206356\tshotgun, scattergun\nn04206570\tshotgun shell\nn04206790\tshot tower\nn04207151\tshoulder\nn04207343\tshoulder bag\nn04207596\tshouldered arch\nn04207763\tshoulder holster\nn04207903\tshoulder pad\nn04208065\tshoulder patch\nn04208210\tshovel\nn04208427\tshovel\nn04208582\tshovel hat\nn04208760\tshowboat\nn04208936\tshower\nn04209133\tshower cap\nn04209239\tshower curtain\nn04209509\tshower room\nn04209613\tshower stall, shower bath\nn04209811\tshowroom, salesroom, saleroom\nn04210012\tshrapnel\nn04210120\tshredder\nn04210288\tshrimper\nn04210390\tshrine\nn04210591\tshrink-wrap\nn04210858\tshunt\nn04211001\tshunt, electrical shunt, bypass\nn04211219\tshunter\nn04211356\tshutter\nn04211528\tshutter\nn04211857\tshuttle\nn04211970\tshuttle\nn04212165\tshuttle bus\nn04212282\tshuttlecock, bird, birdie, shuttle\nn04212467\tshuttle helicopter\nn04212810\tSibley tent\nn04213105\tsickbay, sick berth\nn04213264\tsickbed\nn04213353\tsickle, reaping hook, reap hook\nn04213530\tsickroom\nn04214046\tsideboard\nn04214282\tsidecar\nn04214413\tside chapel\nn04214649\tsidelight, running light\nn04215153\tsidesaddle\nn04215402\tsidewalk, pavement\nn04215588\tsidewall\nn04215800\tside-wheeler\nn04215910\tsidewinder\nn04216634\tsieve, screen\nn04216860\tsifter\nn04216963\tsights\nn04217387\tsigmoidoscope, flexible sigmoidoscope\nn04217546\tsignal box, signal tower\nn04217718\tsignaling device\nn04217882\tsignboard, sign\nn04218564\tsilencer, muffler\nn04218921\tsilent butler\nn04219185\tSilex\nn04219424\tsilk\nn04219580\tsilks\nn04220250\tsilo\nn04220805\tsilver plate\nn04221076\tsilverpoint\nn04221673\tsimple pendulum\nn04221823\tsimulator\nn04222210\tsingle bed\nn04222307\tsingle-breasted jacket\nn04222470\tsingle-breasted suit\nn04222723\tsingle prop, single-propeller plane\nn04222847\tsingle-reed instrument, single-reed woodwind\nn04223066\tsingle-rotor helicopter\nn04223170\tsinglestick, fencing stick, backsword\nn04223299\tsinglet, vest, undershirt\nn04224395\tsiren\nn04224543\tsister ship\nn04224842\tsitar\nn04225031\tsitz bath, hip bath\nn04225222\tsix-pack, six pack, sixpack\nn04225729\tskate\nn04225987\tskateboard\nn04226322\tskeg\nn04226464\tskein\nn04226537\tskeleton, skeletal frame, frame, underframe\nn04226826\tskeleton key\nn04226962\tskep\nn04227050\tskep\nn04227144\tsketch, study\nn04227519\tsketcher\nn04227787\tskew arch\nn04227900\tskewer\nn04228054\tski\nn04228215\tski binding, binding\nn04228422\tskibob\nn04228581\tski boot\nn04228693\tski cap, stocking cap, toboggan cap\nn04229007\tskidder\nn04229107\tskid lid\nn04229480\tskiff\nn04229620\tski jump\nn04229737\tski lodge\nn04229816\tski mask\nn04229959\tskimmer\nn04230387\tski parka, ski jacket\nn04230487\tski-plane\nn04230603\tski pole\nn04230707\tski rack\nn04230808\tskirt\nn04231272\tskirt\nn04231693\tski tow, ski lift, lift\nn04231905\tSkivvies\nn04232153\tskullcap\nn04232312\tskybox\nn04232437\tskyhook\nn04232800\tskylight, fanlight\nn04233027\tskysail\nn04233124\tskyscraper\nn04233295\tskywalk\nn04233715\tslacks\nn04233832\tslack suit\nn04234160\tslasher\nn04234260\tslash pocket\nn04234455\tslat, spline\nn04234670\tslate\nn04234763\tslate pencil\nn04234887\tslate roof\nn04235291\tsled, sledge, sleigh\nn04235646\tsleeper\nn04235771\tsleeper\nn04235860\tsleeping bag\nn04236001\tsleeping car, sleeper, wagon-lit\nn04236377\tsleeve, arm\nn04236702\tsleeve\nn04236809\tsleigh bed\nn04236935\tsleigh bell, cascabel\nn04237174\tslice bar\nn04237287\tslicer\nn04237423\tslicer\nn04238128\tslide, playground slide, sliding board\nn04238321\tslide fastener, zip, zipper, zip fastener\nn04238617\tslide projector\nn04238763\tslide rule, slipstick\nn04238953\tslide valve\nn04239074\tsliding door\nn04239218\tsliding seat\nn04239333\tsliding window\nn04239436\tsling, scarf bandage, triangular bandage\nn04239639\tsling\nn04239786\tslingback, sling\nn04239900\tslinger ring\nn04240434\tslip clutch, slip friction clutch\nn04240752\tslipcover\nn04240867\tslip-joint pliers\nn04241042\tslipknot\nn04241249\tslip-on\nn04241394\tslipper, carpet slipper\nn04241573\tslip ring\nn04242084\tslit lamp\nn04242315\tslit trench\nn04242408\tsloop\nn04242587\tsloop of war\nn04242704\tslop basin, slop bowl\nn04243003\tslop pail, slop jar\nn04243142\tslops\nn04243251\tslopshop, slopseller's shop\nn04243546\tslot, one-armed bandit\nn04243941\tslot machine, coin machine\nn04244379\tsluice, sluiceway, penstock\nn04244847\tsmack\nn04244997\tsmall boat\nn04245218\tsmall computer system interface, SCSI\nn04245412\tsmall ship\nn04245508\tsmall stores\nn04245847\tsmart bomb\nn04246060\tsmelling bottle\nn04246271\tsmocking\nn04246459\tsmoke bomb, smoke grenade\nn04246731\tsmokehouse, meat house\nn04246855\tsmoker, smoking car, smoking carriage, smoking compartment\nn04247011\tsmoke screen, smokescreen\nn04247440\tsmoking room\nn04247544\tsmoothbore\nn04247630\tsmooth plane, smoothing plane\nn04247736\tsnack bar, snack counter, buffet\nn04247876\tsnaffle, snaffle bit\nn04248209\tsnap, snap fastener, press stud\nn04248396\tsnap brim\nn04248507\tsnap-brim hat\nn04248851\tsnare, gin, noose\nn04249415\tsnare drum, snare, side drum\nn04249582\tsnatch block\nn04249882\tsnifter, brandy snifter, brandy glass\nn04250224\tsniper rifle, precision rifle\nn04250473\tsnips, tinsnips\nn04250599\tSno-cat\nn04250692\tsnood\nn04250850\tsnorkel, schnorkel, schnorchel, snorkel breather, breather\nn04251144\tsnorkel\nn04251701\tsnowbank, snow bank\nn04251791\tsnowboard\nn04252077\tsnowmobile\nn04252225\tsnowplow, snowplough\nn04252331\tsnowshoe\nn04252560\tsnowsuit\nn04252653\tsnow thrower, snow blower\nn04253057\tsnuffbox\nn04253168\tsnuffer\nn04253304\tsnuffers\nn04253931\tsoapbox\nn04254009\tsoap dish\nn04254120\tsoap dispenser\nn04254450\tsoap pad\nn04254680\tsoccer ball\nn04254777\tsock\nn04255163\tsocket\nn04255346\tsocket wrench\nn04255499\tsocle\nn04255586\tsoda can\nn04255670\tsoda fountain\nn04255768\tsoda fountain\nn04255899\tsod house, soddy, adobe house\nn04256318\tsodium-vapor lamp, sodium-vapour lamp\nn04256520\tsofa, couch, lounge\nn04256758\tsoffit\nn04256891\tsoftball, playground ball\nn04257223\tsoft pedal\nn04257684\tsoil pipe\nn04257790\tsolar array, solar battery, solar panel\nn04257986\tsolar cell, photovoltaic cell\nn04258138\tsolar dish, solar collector, solar furnace\nn04258333\tsolar heater\nn04258438\tsolar house\nn04258618\tsolar telescope\nn04258732\tsolar thermal system\nn04258859\tsoldering iron\nn04259202\tsolenoid\nn04259468\tsolleret, sabaton\nn04259630\tsombrero\nn04260192\tsonic depth finder, fathometer\nn04260364\tsonogram, echogram\nn04260589\tsonograph\nn04261116\tsorter\nn04261281\tsouk\nn04261369\tsound bow\nn04261506\tsoundbox, body\nn04261638\tsound camera\nn04261767\tsounder\nn04261868\tsound film\nn04262161\tsounding board, soundboard\nn04262530\tsounding rocket\nn04262678\tsound recording, audio recording, audio\nn04262869\tsound spectrograph\nn04263257\tsoup bowl\nn04263336\tsoup ladle\nn04263502\tsoupspoon, soup spoon\nn04263760\tsource of illumination\nn04263950\tsourdine\nn04264134\tsoutache\nn04264233\tsoutane\nn04264361\tsou'wester\nn04264485\tsoybean future\nn04264628\tspace bar\nn04264765\tspace capsule, capsule\nn04264914\tspacecraft, ballistic capsule, space vehicle\nn04265275\tspace heater\nn04265428\tspace helmet\nn04265904\tspace rocket\nn04266014\tspace shuttle\nn04266162\tspace station, space platform, space laboratory\nn04266375\tspacesuit\nn04266486\tspade\nn04266849\tspade bit\nn04266968\tspaghetti junction\nn04267091\tSpandau\nn04267165\tspandex\nn04267246\tspandrel, spandril\nn04267435\tspanker\nn04267577\tspar\nn04267985\tsparge pipe\nn04268142\tspark arrester, sparker\nn04268275\tspark arrester\nn04268418\tspark chamber, spark counter\nn04268565\tspark coil\nn04268799\tspark gap\nn04269086\tspark lever\nn04269270\tspark plug, sparking plug, plug\nn04269502\tsparkplug wrench\nn04269668\tspark transmitter\nn04269822\tspat, gaiter\nn04269944\tspatula\nn04270147\tspatula\nn04270371\tspeakerphone\nn04270576\tspeaking trumpet\nn04270891\tspear, lance, shaft\nn04271148\tspear, gig, fizgig, fishgig, lance\nn04271531\tspecialty store\nn04271793\tspecimen bottle\nn04271891\tspectacle\nn04272054\tspectacles, specs, eyeglasses, glasses\nn04272389\tspectator pump, spectator\nn04272782\tspectrograph\nn04272928\tspectrophotometer\nn04273064\tspectroscope, prism spectroscope\nn04273285\tspeculum\nn04273569\tspeedboat\nn04273659\tspeed bump\nn04273796\tspeedometer, speed indicator\nn04273972\tspeed skate, racing skate\nn04274686\tspherometer\nn04274985\tsphygmomanometer\nn04275093\tspicemill\nn04275175\tspice rack\nn04275283\tspider\nn04275548\tspider web, spider's web\nn04275661\tspike\nn04275904\tspike\nn04277352\tspindle\nn04277493\tspindle, mandrel, mandril, arbor\nn04277669\tspindle\nn04277826\tspin dryer, spin drier\nn04278247\tspinet\nn04278353\tspinet\nn04278447\tspinnaker\nn04278605\tspinner\nn04278932\tspinning frame\nn04279063\tspinning jenny\nn04279172\tspinning machine\nn04279353\tspinning rod\nn04279462\tspinning wheel\nn04279858\tspiral bandage\nn04279987\tspiral ratchet screwdriver, ratchet screwdriver\nn04280259\tspiral spring\nn04280373\tspirit lamp\nn04280487\tspirit stove\nn04280845\tspirometer\nn04280970\tspit\nn04281260\tspittoon, cuspidor\nn04281375\tsplashboard, splasher, dashboard\nn04281571\tsplasher\nn04281998\tsplice, splicing\nn04282231\tsplicer\nn04282494\tsplint\nn04282872\tsplit rail, fence rail\nn04282992\tSpode\nn04283096\tspoiler\nn04283255\tspoiler\nn04283378\tspoke, wheel spoke, radius\nn04283585\tspokeshave\nn04283784\tsponge cloth\nn04283905\tsponge mop\nn04284002\tspoon\nn04284341\tspoon\nn04284438\tSpork\nn04284572\tsporran\nn04284869\tsport kite, stunt kite\nn04285008\tsports car, sport car\nn04285146\tsports equipment\nn04285622\tsports implement\nn04285803\tsportswear, athletic wear, activewear\nn04285965\tsport utility, sport utility vehicle, S.U.V., SUV\nn04286128\tspot\nn04286575\tspotlight, spot\nn04286960\tspot weld, spot-weld\nn04287351\tspouter\nn04287451\tsprag\nn04287747\tspray gun\nn04287898\tspray paint\nn04287986\tspreader\nn04288165\tsprig\nn04288272\tspring\nn04288533\tspring balance, spring scale\nn04288673\tspringboard\nn04289027\tsprinkler\nn04289195\tsprinkler system\nn04289449\tsprit\nn04289576\tspritsail\nn04289690\tsprocket, sprocket wheel\nn04289827\tsprocket\nn04290079\tspun yarn\nn04290259\tspur, gad\nn04290507\tspur gear, spur wheel\nn04290615\tsputnik\nn04290762\tspy satellite\nn04291069\tsquad room\nn04291242\tsquare\nn04291759\tsquare knot\nn04291992\tsquare-rigger\nn04292080\tsquare sail\nn04292221\tsquash ball\nn04292414\tsquash racket, squash racquet, bat\nn04292572\tsquawk box, squawker, intercom speaker\nn04292921\tsqueegee\nn04293119\tsqueezer\nn04293258\tsquelch circuit, squelch, squelcher\nn04293744\tsquinch\nn04294212\tstabilizer, stabiliser\nn04294426\tstabilizer\nn04294614\tstabilizer bar, anti-sway bar\nn04294879\tstable, stalls, horse barn\nn04295081\tstable gear, saddlery, tack\nn04295353\tstabling\nn04295571\tstacks\nn04295777\tstaddle\nn04295881\tstadium, bowl, arena, sports stadium\nn04296562\tstage\nn04297098\tstagecoach, stage\nn04297750\tstained-glass window\nn04297847\tstair-carpet\nn04298053\tstair-rod\nn04298661\tstairwell\nn04298765\tstake\nn04299215\tstall, stand, sales booth\nn04299370\tstall\nn04299963\tstamp\nn04300358\tstamp mill, stamping mill\nn04300509\tstamping machine, stamper\nn04300643\tstanchion\nn04301000\tstand\nn04301242\tstandard\nn04301474\tstandard cell\nn04301760\tstandard transmission, stick shift\nn04302200\tstanding press\nn04302863\tstanhope\nn04302988\tStanley Steamer\nn04303095\tstaple\nn04303258\tstaple\nn04303357\tstaple gun, staplegun, tacker\nn04303497\tstapler, stapling machine\nn04304215\tstarship, spaceship\nn04304375\tstarter, starter motor, starting motor\nn04304680\tstarting gate, starting stall\nn04305016\tStassano furnace, electric-arc furnace\nn04305210\tStatehouse\nn04305323\tstately home\nn04305471\tstate prison\nn04305572\tstateroom\nn04305947\tstatic tube\nn04306080\tstation\nn04306592\tstator, stator coil\nn04306847\tstatue\nn04307419\tstay\nn04307767\tstaysail\nn04307878\tsteakhouse, chophouse\nn04307986\tsteak knife\nn04308084\tstealth aircraft\nn04308273\tstealth bomber\nn04308397\tstealth fighter\nn04308583\tsteam bath, steam room, vapor bath, vapour bath\nn04308807\tsteamboat\nn04308915\tsteam chest\nn04309049\tsteam engine\nn04309348\tsteamer, steamship\nn04309548\tsteamer\nn04309833\tsteam iron\nn04310018\tsteam locomotive\nn04310157\tsteamroller, road roller\nn04310507\tsteam shovel\nn04310604\tsteam turbine\nn04310721\tsteam whistle\nn04310904\tsteel\nn04311004\tsteel arch bridge\nn04311174\tsteel drum\nn04311595\tsteel mill, steelworks, steel plant, steel factory\nn04312020\tsteel-wool pad\nn04312154\tsteelyard, lever scale, beam scale\nn04312432\tsteeple, spire\nn04312654\tsteerage\nn04312756\tsteering gear\nn04312916\tsteering linkage\nn04313220\tsteering system, steering mechanism\nn04313503\tsteering wheel, wheel\nn04313628\tstele, stela\nn04314107\tstem-winder\nn04314216\tstencil\nn04314522\tSten gun\nn04314632\tstenograph\nn04314914\tstep, stair\nn04315342\tstep-down transformer\nn04315713\tstep stool\nn04315828\tstep-up transformer\nn04315948\tstereo, stereophony, stereo system, stereophonic system\nn04316498\tstereoscope\nn04316815\tstern chaser\nn04316924\tsternpost\nn04317063\tsternwheeler\nn04317175\tstethoscope\nn04317325\tstewing pan, stewpan\nn04317420\tstick\nn04317833\tstick\nn04317976\tstick, control stick, joystick\nn04318131\tstick\nn04318787\tstile\nn04318892\tstiletto\nn04318982\tstill\nn04319545\tstillroom, still room\nn04319774\tStillson wrench\nn04319937\tstilt\nn04320405\tStinger\nn04320598\tstink bomb, stench bomb\nn04320871\tstirrer\nn04320973\tstirrup, stirrup iron\nn04321121\tstirrup pump\nn04321453\tstob\nn04322026\tstock, gunstock\nn04322531\tstockade\nn04322692\tstockcar\nn04322801\tstock car\nn04323519\tstockinet, stockinette\nn04323819\tstocking\nn04324120\tstock-in-trade\nn04324297\tstockpot\nn04324387\tstockroom, stock room\nn04324515\tstocks\nn04325041\tstock saddle, Western saddle\nn04325208\tstockyard\nn04325704\tstole\nn04325804\tstomacher\nn04325968\tstomach pump\nn04326547\tstone wall\nn04326676\tstoneware\nn04326799\tstonework\nn04326896\tstool\nn04327204\tstoop, stoep\nn04327544\tstop bath, short-stop, short-stop bath\nn04327682\tstopcock, cock, turncock\nn04328054\tstopper knot\nn04328186\tstopwatch, stop watch\nn04328329\tstorage battery, accumulator\nn04328580\tstorage cell, secondary cell\nn04328703\tstorage ring\nn04328946\tstorage space\nn04329477\tstoreroom, storage room, stowage\nn04329681\tstorm cellar, cyclone cellar, tornado cellar\nn04329834\tstorm door\nn04329958\tstorm window, storm sash\nn04330109\tstoup, stoop\nn04330189\tstoup\nn04330267\tstove\nn04330340\tstove, kitchen stove, range, kitchen range, cooking stove\nn04330669\tstove bolt\nn04330746\tstovepipe\nn04330896\tstovepipe iron\nn04330998\tStradavarius, Strad\nn04331277\tstraight chair, side chair\nn04331443\tstraightedge\nn04331639\tstraightener\nn04331765\tstraight flute, straight-fluted drill\nn04331892\tstraight pin\nn04332074\tstraight razor\nn04332243\tstrainer\nn04332580\tstraitjacket, straightjacket\nn04332987\tstrap\nn04333129\tstrap\nn04333869\tstrap hinge, joint hinge\nn04334105\tstrapless\nn04334365\tstreamer fly\nn04334504\tstreamliner\nn04334599\tstreet\nn04335209\tstreet\nn04335435\tstreetcar, tram, tramcar, trolley, trolley car\nn04335693\tstreet clothes\nn04335886\tstreetlight, street lamp\nn04336792\tstretcher\nn04337157\tstretcher\nn04337287\tstretch pants\nn04337503\tstrickle\nn04337650\tstrickle\nn04338517\tstringed instrument\nn04338963\tstringer\nn04339062\tstringer\nn04339191\tstring tie\nn04339638\tstrip\nn04339879\tstrip lighting\nn04340019\tstrip mall\nn04340521\tstroboscope, strobe, strobe light\nn04340750\tstrongbox, deedbox\nn04340935\tstronghold, fastness\nn04341133\tstrongroom\nn04341288\tstrop\nn04341414\tstructural member\nn04341686\tstructure, construction\nn04343511\tstudent center\nn04343630\tstudent lamp\nn04343740\tstudent union\nn04344003\tstud finder\nn04344734\tstudio apartment, studio\nn04344873\tstudio couch, day bed\nn04345028\tstudy\nn04345201\tstudy hall\nn04345787\tstuffing nut, packing nut\nn04346003\tstump\nn04346157\tstun gun, stun baton\nn04346328\tstupa, tope\nn04346428\tsty, pigsty, pigpen\nn04346511\tstylus, style\nn04346679\tstylus\nn04346855\tsub-assembly\nn04347119\tsubcompact, subcompact car\nn04347519\tsubmachine gun\nn04347754\tsubmarine, pigboat, sub, U-boat\nn04348070\tsubmarine torpedo\nn04348184\tsubmersible, submersible warship\nn04348359\tsubmersible\nn04348988\tsubtracter\nn04349189\tsubway token\nn04349306\tsubway train\nn04349401\tsubwoofer\nn04349913\tsuction cup\nn04350104\tsuction pump\nn04350235\tsudatorium, sudatory\nn04350458\tsuede cloth, suede\nn04350581\tsugar bowl\nn04350688\tsugar refinery\nn04350769\tsugar spoon, sugar shell\nn04350905\tsuit, suit of clothes\nn04351550\tsuite, rooms\nn04351699\tsuiting\nn04353573\tsulky\nn04354026\tsummer house\nn04354182\tsumo ring\nn04354387\tsump\nn04354487\tsump pump\nn04354589\tsunbonnet\nn04355115\tSunday best, Sunday clothes\nn04355267\tsun deck\nn04355338\tsundial\nn04355511\tsundress\nn04355684\tsundries\nn04355821\tsun gear\nn04355933\tsunglass\nn04356056\tsunglasses, dark glasses, shades\nn04356595\tsunhat, sun hat\nn04356772\tsunlamp, sun lamp, sunray lamp, sun-ray lamp\nn04356925\tsun parlor, sun parlour, sun porch, sunporch, sunroom, sun lounge, solarium\nn04357121\tsunroof, sunshine-roof\nn04357314\tsunscreen, sunblock, sun blocker\nn04357531\tsunsuit\nn04357930\tsupercharger\nn04358117\tsupercomputer\nn04358256\tsuperconducting supercollider\nn04358491\tsuperhighway, information superhighway\nn04358707\tsupermarket\nn04358874\tsuperstructure\nn04359034\tsupertanker\nn04359124\tsupper club\nn04359217\tsupplejack\nn04359335\tsupply chamber\nn04359500\tsupply closet\nn04359589\tsupport\nn04360501\tsupport\nn04360798\tsupport column\nn04360914\tsupport hose, support stocking\nn04361095\tsupporting structure\nn04361260\tsupporting tower\nn04361937\tsurcoat\nn04362624\tsurface gauge, surface gage, scribing block\nn04362821\tsurface lift\nn04362972\tsurface search radar\nn04363082\tsurface ship\nn04363210\tsurface-to-air missile, SAM\nn04363412\tsurface-to-air missile system\nn04363671\tsurfboat\nn04363777\tsurcoat\nn04363874\tsurgeon's knot\nn04363991\tsurgery\nn04364160\tsurge suppressor, surge protector, spike suppressor, spike arrester, lightning arrester\nn04364397\tsurgical dressing\nn04364545\tsurgical instrument\nn04364827\tsurgical knife\nn04364994\tsurplice\nn04365112\tsurrey\nn04365229\tsurtout\nn04365328\tsurveillance system\nn04365484\tsurveying instrument, surveyor's instrument\nn04365751\tsurveyor's level\nn04366033\tsushi bar\nn04366116\tsuspension, suspension system\nn04366367\tsuspension bridge\nn04366832\tsuspensory, suspensory bandage\nn04367011\tsustaining pedal, loud pedal\nn04367371\tsuture, surgical seam\nn04367480\tswab, swob, mop\nn04367746\tswab\nn04367950\tswaddling clothes, swaddling bands\nn04368109\tswag\nn04368235\tswage block\nn04368365\tswagger stick\nn04368496\tswallow-tailed coat, swallowtail, morning coat\nn04368695\tswamp buggy, marsh buggy\nn04368840\tswan's down\nn04369025\tswathe, wrapping\nn04369282\tswatter, flyswatter, flyswat\nn04369485\tsweat bag\nn04369618\tsweatband\nn04370048\tsweater, jumper\nn04370288\tsweat pants, sweatpants\nn04370456\tsweatshirt\nn04370600\tsweatshop\nn04370774\tsweat suit, sweatsuit, sweats, workout suit\nn04370955\tsweep, sweep oar\nn04371050\tsweep hand, sweep-second\nn04371430\tswimming trunks, bathing trunks\nn04371563\tswimsuit, swimwear, bathing suit, swimming costume, bathing costume\nn04371774\tswing\nn04371979\tswing door, swinging door\nn04372370\tswitch, electric switch, electrical switch\nn04373089\tswitchblade, switchblade knife, flick-knife, flick knife\nn04373428\tswitch engine, donkey engine\nn04373563\tswivel\nn04373704\tswivel chair\nn04373795\tswizzle stick\nn04373894\tsword, blade, brand, steel\nn04374315\tsword cane, sword stick\nn04374521\tS wrench\nn04374735\tsynagogue, temple, tabernacle\nn04374907\tsynchrocyclotron\nn04375080\tsynchroflash\nn04375241\tsynchromesh\nn04375405\tsynchronous converter, rotary, rotary converter\nn04375615\tsynchronous motor\nn04375775\tsynchrotron\nn04375926\tsynchroscope, synchronoscope, synchronizer, synchroniser\nn04376400\tsynthesizer, synthesiser\nn04376876\tsyringe\nn04377057\tsystem\nn04378489\ttabard\nn04378651\tTabernacle\nn04378956\ttabi, tabis\nn04379096\ttab key, tab\nn04379243\ttable\nn04379964\ttable\nn04380255\ttablefork\nn04380346\ttable knife\nn04380533\ttable lamp\nn04380916\ttable saw\nn04381073\ttablespoon\nn04381450\ttablet-armed chair\nn04381587\ttable-tennis table, ping-pong table, pingpong table\nn04381724\ttable-tennis racquet, table-tennis bat, pingpong paddle\nn04381860\ttabletop\nn04381994\ttableware\nn04382334\ttabor, tabour\nn04382438\ttaboret, tabouret\nn04382537\ttachistoscope, t-scope\nn04382695\ttachograph\nn04382880\ttachometer, tach\nn04383015\ttachymeter, tacheometer\nn04383130\ttack\nn04383301\ttack hammer\nn04383839\ttaffeta\nn04383923\ttaffrail\nn04384593\ttailgate, tailboard\nn04384910\ttaillight, tail lamp, rear light, rear lamp\nn04385079\ttailor-made\nn04385157\ttailor's chalk\nn04385536\ttailpipe\nn04385799\ttail rotor, anti-torque rotor\nn04386051\ttailstock\nn04386456\ttake-up\nn04386664\ttalaria\nn04386792\ttalcum, talcum powder\nn04387095\ttam, tam-o'-shanter, tammy\nn04387201\ttambour\nn04387261\ttambour, embroidery frame, embroidery hoop\nn04387400\ttambourine\nn04387531\ttammy\nn04387706\ttamp, tamper, tamping bar\nn04387932\tTampax\nn04388040\ttampion, tompion\nn04388162\ttampon\nn04388473\ttandoor\nn04388574\ttangram\nn04388743\ttank, storage tank\nn04389033\ttank, army tank, armored combat vehicle, armoured combat vehicle\nn04389430\ttankard\nn04389521\ttank car, tank\nn04389718\ttank destroyer\nn04389854\ttank engine, tank locomotive\nn04389999\ttanker plane\nn04390483\ttank shell\nn04390577\ttank top\nn04390873\ttannoy\nn04390977\ttap, spigot\nn04391445\ttapa, tappa\nn04391838\ttape, tape recording, taping\nn04392113\ttape, tapeline, tape measure\nn04392526\ttape deck\nn04392764\ttape drive, tape transport, transport\nn04392985\ttape player\nn04393095\ttape recorder, tape machine\nn04393301\ttaper file\nn04393549\ttapestry, tapis\nn04393808\ttappet\nn04393913\ttap wrench\nn04394031\ttare\nn04394261\ttarget, butt\nn04394421\ttarget acquisition system\nn04394630\ttarmacadam, tarmac, macadam\nn04395024\ttarpaulin, tarp\nn04395106\ttartan, plaid\nn04395332\ttasset, tasse\nn04395651\ttattoo\nn04395875\ttavern, tap house\nn04396226\ttawse\nn04396335\ttaximeter\nn04396650\tT-bar lift, T-bar, Alpine lift\nn04396808\ttea bag\nn04396902\ttea ball\nn04397027\ttea cart, teacart, tea trolley, tea wagon\nn04397168\ttea chest\nn04397261\tteaching aid\nn04397452\tteacup\nn04397645\ttea gown\nn04397768\tteakettle\nn04397860\ttea maker\nn04398044\tteapot\nn04398497\tteashop, teahouse, tearoom, tea parlor, tea parlour\nn04398688\tteaspoon\nn04398834\ttea-strainer\nn04398951\ttea table\nn04399046\ttea tray\nn04399158\ttea urn\nn04399382\tteddy, teddy bear\nn04399537\ttee, golf tee\nn04399846\ttee hinge, T hinge\nn04400109\ttelecom hotel, telco building\nn04400289\ttelecommunication system, telecom system, telecommunication equipment, telecom equipment\nn04400499\ttelegraph, telegraphy\nn04400737\ttelegraph key\nn04400899\ttelemeter\nn04401088\ttelephone, phone, telephone set\nn04401578\ttelephone bell\nn04401680\ttelephone booth, phone booth, call box, telephone box, telephone kiosk\nn04401828\ttelephone cord, phone cord\nn04401949\ttelephone jack, phone jack\nn04402057\ttelephone line, phone line, telephone circuit, subscriber line, line\nn04402342\ttelephone plug, phone plug\nn04402449\ttelephone pole, telegraph pole, telegraph post\nn04402580\ttelephone receiver, receiver\nn04402746\ttelephone system, phone system\nn04402984\ttelephone wire, telephone line, telegraph wire, telegraph line\nn04403413\ttelephoto lens, zoom lens\nn04403524\tTeleprompter\nn04403638\ttelescope, scope\nn04403925\ttelescopic sight, telescope sight\nn04404072\ttelethermometer\nn04404200\tteletypewriter, teleprinter, teletype machine, telex, telex machine\nn04404412\ttelevision, television system\nn04404817\ttelevision antenna, tv-antenna\nn04404997\ttelevision camera, tv camera, camera\nn04405540\ttelevision equipment, video equipment\nn04405762\ttelevision monitor, tv monitor\nn04405907\ttelevision receiver, television, television set, tv, tv set, idiot box, boob tube, telly, goggle box\nn04406239\ttelevision room, tv room\nn04406552\ttelevision transmitter\nn04406687\ttelpher, telfer\nn04406817\ttelpherage, telferage\nn04407257\ttempera, poster paint, poster color, poster colour\nn04407435\ttemple\nn04407686\ttemple\nn04408871\ttemporary hookup, patch\nn04409011\ttender, supply ship\nn04409128\ttender, ship's boat, pinnace, cutter\nn04409279\ttender\nn04409384\ttenement, tenement house\nn04409515\ttennis ball\nn04409625\ttennis camp\nn04409806\ttennis racket, tennis racquet\nn04409911\ttenon\nn04410086\ttenor drum, tom-tom\nn04410365\ttenoroon\nn04410485\ttenpenny nail\nn04410565\ttenpin\nn04410663\ttensimeter\nn04410760\ttensiometer\nn04410886\ttensiometer\nn04411019\ttensiometer\nn04411264\ttent, collapsible shelter\nn04411835\ttenter\nn04411966\ttenterhook\nn04412097\ttent-fly, rainfly, fly sheet, fly, tent flap\nn04412300\ttent peg\nn04412416\ttepee, tipi, teepee\nn04413151\tterminal, pole\nn04413419\tterminal\nn04413969\tterraced house\nn04414101\tterra cotta\nn04414199\tterrarium\nn04414319\tterra sigillata, Samian ware\nn04414476\tterry, terry cloth, terrycloth\nn04414675\tTesla coil\nn04414909\ttessera\nn04415257\ttest equipment\nn04415663\ttest rocket, research rocket, test instrument vehicle\nn04415815\ttest room, testing room\nn04416005\ttestudo\nn04416901\ttetraskelion, tetraskele\nn04417086\ttetrode\nn04417180\ttextile machine\nn04417361\ttextile mill\nn04417672\tthatch, thatched roof\nn04417809\ttheater, theatre, house\nn04418357\ttheater curtain, theatre curtain\nn04418644\ttheater light\nn04419073\ttheodolite, transit\nn04419642\ttheremin\nn04419868\tthermal printer\nn04420024\tthermal reactor\nn04420720\tthermocouple, thermocouple junction\nn04421083\tthermoelectric thermometer, thermel, electric thermometer\nn04421258\tthermograph, thermometrograph\nn04421417\tthermograph\nn04421582\tthermohydrometer, thermogravimeter\nn04421740\tthermojunction\nn04421872\tthermometer\nn04422409\tthermonuclear reactor, fusion reactor\nn04422566\tthermopile\nn04422727\tthermos, thermos bottle, thermos flask\nn04422875\tthermostat, thermoregulator\nn04423552\tthigh pad\nn04423687\tthill\nn04423845\tthimble\nn04424692\tthinning shears\nn04425804\tthird base, third\nn04425977\tthird gear, third\nn04426184\tthird rail\nn04426316\tthong\nn04426427\tthong\nn04427216\tthree-centered arch, basket-handle arch\nn04427473\tthree-decker\nn04427559\tthree-dimensional radar, 3d radar\nn04427715\tthree-piece suit\nn04427857\tthree-quarter binding\nn04428008\tthree-way switch, three-point switch\nn04428191\tthresher, thrasher, threshing machine\nn04428382\tthreshing floor\nn04428634\tthriftshop, second-hand store\nn04429038\tthroat protector\nn04429376\tthrone\nn04430475\tthrust bearing\nn04430605\tthruster\nn04430896\tthumb\nn04431025\tthumbhole\nn04431436\tthumbscrew\nn04431648\tthumbstall\nn04431745\tthumbtack, drawing pin, pushpin\nn04431925\tthunderer\nn04432043\tthwart, cross thwart\nn04432203\ttiara\nn04432662\tticking\nn04432785\ttickler coil\nn04433377\ttie, tie beam\nn04433585\ttie, railroad tie, crosstie, sleeper\nn04434207\ttie rack\nn04434531\ttie rod\nn04434932\ttights, leotards\nn04435180\ttile\nn04435552\ttile cutter\nn04435653\ttile roof\nn04435759\ttiller\nn04435870\ttilter\nn04436012\ttilt-top table, tip-top table, tip table\nn04436185\ttimber\nn04436329\ttimber\nn04436401\ttimber hitch\nn04436542\ttimbrel\nn04436832\ttime bomb, infernal machine\nn04436992\ttime capsule\nn04437276\ttime clock\nn04437380\ttime-delay measuring instrument, time-delay measuring system\nn04437670\ttime-fuse\nn04437953\ttimepiece, timekeeper, horologe\nn04438304\ttimer\nn04438507\ttimer\nn04438643\ttime-switch\nn04438897\ttin\nn04439505\ttinderbox\nn04439585\ttine\nn04439712\ttinfoil, tin foil\nn04440597\ttippet\nn04440963\ttire chain, snow chain\nn04441093\ttire iron, tire tool\nn04441528\ttitfer\nn04441662\ttithe barn\nn04441790\ttitrator\nn04442312\ttoaster\nn04442441\ttoaster oven\nn04442582\ttoasting fork\nn04442741\ttoastrack\nn04443164\ttobacco pouch\nn04443257\ttobacco shop, tobacconist shop, tobacconist\nn04443433\ttoboggan\nn04443766\ttoby, toby jug, toby fillpot jug\nn04444121\ttocsin, warning bell\nn04444218\ttoe\nn04444749\ttoecap\nn04444953\ttoehold\nn04445040\ttoga\nn04445154\ttoga virilis\nn04445327\ttoggle\nn04445610\ttoggle bolt\nn04445782\ttoggle joint\nn04445952\ttoggle switch, toggle, on-off switch, on/off switch\nn04446162\ttogs, threads, duds\nn04446276\ttoilet, lavatory, lav, can, john, privy, bathroom\nn04446844\ttoilet bag, sponge bag\nn04447028\ttoilet bowl\nn04447156\ttoilet kit, travel kit\nn04447276\ttoilet powder, bath powder, dusting powder\nn04447443\ttoiletry, toilet articles\nn04447861\ttoilet seat\nn04448070\ttoilet water, eau de toilette\nn04448185\ttokamak\nn04448361\ttoken\nn04449290\ttollbooth, tolbooth, tollhouse\nn04449449\ttoll bridge\nn04449550\ttollgate, tollbar\nn04449700\ttoll line\nn04449966\ttomahawk, hatchet\nn04450133\tTommy gun, Thompson submachine gun\nn04450243\ttomograph\nn04450465\ttone arm, pickup, pickup arm\nn04450640\ttoner\nn04450749\ttongs, pair of tongs\nn04450994\ttongue\nn04451139\ttongue and groove joint\nn04451318\ttongue depressor\nn04451636\ttonometer\nn04451818\ttool\nn04452528\ttool bag\nn04452615\ttoolbox, tool chest, tool cabinet, tool case\nn04452757\ttoolshed, toolhouse\nn04452848\ttooth\nn04453037\ttooth\nn04453156\ttoothbrush\nn04453390\ttoothpick\nn04453666\ttop\nn04453910\ttop, cover\nn04454654\ttopgallant, topgallant mast\nn04454792\ttopgallant, topgallant sail\nn04454908\ttopiary\nn04455048\ttopknot\nn04455250\ttopmast\nn04455579\ttopper\nn04455652\ttopsail\nn04456011\ttoque\nn04456115\ttorch\nn04456472\ttorpedo\nn04456734\ttorpedo\nn04457157\ttorpedo\nn04457326\ttorpedo boat\nn04457474\ttorpedo-boat destroyer\nn04457638\ttorpedo tube\nn04457767\ttorque converter\nn04457910\ttorque wrench\nn04458201\ttorture chamber\nn04458633\ttotem pole\nn04458843\ttouch screen, touchscreen\nn04459018\ttoupee, toupe\nn04459122\ttouring car, phaeton, tourer\nn04459243\ttourist class, third class\nn04459362\ttowel\nn04459610\ttoweling, towelling\nn04459773\ttowel rack, towel horse\nn04459909\ttowel rail, towel bar\nn04460130\ttower\nn04461437\ttown hall\nn04461570\ttowpath, towing path\nn04461696\ttow truck, tow car, wrecker\nn04461879\ttoy\nn04462011\ttoy box, toy chest\nn04462240\ttoyshop\nn04462576\ttrace detector\nn04463679\ttrack, rail, rails, runway\nn04464125\ttrack\nn04464615\ttrackball\nn04464852\ttracked vehicle\nn04465050\ttract house\nn04465203\ttract housing\nn04465358\ttraction engine\nn04465501\ttractor\nn04465666\ttractor\nn04466871\ttrail bike, dirt bike, scrambler\nn04467099\ttrailer, house trailer\nn04467307\ttrailer\nn04467506\ttrailer camp, trailer park\nn04467665\ttrailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi\nn04467899\ttrailing edge\nn04468005\ttrain, railroad train\nn04469003\ttramline, tramway, streetcar track\nn04469251\ttrammel\nn04469514\ttrampoline\nn04469684\ttramp steamer, tramp\nn04469813\ttramway, tram, aerial tramway, cable tramway, ropeway\nn04470741\ttransdermal patch, skin patch\nn04471148\ttransept\nn04471315\ttransformer\nn04471632\ttransistor, junction transistor, electronic transistor\nn04471912\ttransit instrument\nn04472243\ttransmission, transmission system\nn04472563\ttransmission shaft\nn04472726\ttransmitter, sender\nn04472961\ttransom, traverse\nn04473108\ttransom, transom window, fanlight\nn04473275\ttransponder\nn04473884\ttransporter\nn04474035\ttransporter, car transporter\nn04474187\ttransport ship\nn04474466\ttrap\nn04475309\ttrap door\nn04475411\ttrapeze\nn04475496\ttrave, traverse, crossbeam, crosspiece\nn04475631\ttravel iron\nn04475749\ttrawl, dragnet, trawl net\nn04475900\ttrawl, trawl line, spiller, setline, trotline\nn04476116\ttrawler, dragger\nn04476259\ttray\nn04476526\ttray cloth\nn04476831\ttread\nn04476972\ttread\nn04477219\ttreadmill, treadwheel, tread-wheel\nn04477387\ttreadmill\nn04477548\ttreasure chest\nn04477725\ttreasure ship\nn04478066\ttreenail, trenail, trunnel\nn04478383\ttrefoil arch\nn04478512\ttrellis, treillage\nn04478657\ttrench\nn04479046\ttrench coat\nn04479287\ttrench knife\nn04479405\ttrepan\nn04479526\ttrepan, trephine\nn04479694\ttrestle\nn04479823\ttrestle\nn04479939\ttrestle bridge\nn04480033\ttrestle table\nn04480141\ttrestlework\nn04480303\ttrews\nn04480527\ttrial balloon\nn04480853\ttriangle\nn04480995\ttriangle\nn04481524\ttriclinium\nn04481642\ttriclinium\nn04482177\ttricorn, tricorne\nn04482297\ttricot\nn04482393\ttricycle, trike, velocipede\nn04482975\ttrident\nn04483073\ttrigger\nn04483307\ttrimaran\nn04483925\ttrimmer\nn04484024\ttrimmer arch\nn04484432\ttriode\nn04485082\ttripod\nn04485423\ttriptych\nn04485586\ttrip wire\nn04485750\ttrireme\nn04485884\ttriskelion, triskele\nn04486054\ttriumphal arch\nn04486213\ttrivet\nn04486322\ttrivet\nn04486616\ttroika\nn04486934\ttroll\nn04487081\ttrolleybus, trolley coach, trackless trolley\nn04487394\ttrombone\nn04487724\ttroop carrier, troop transport\nn04487894\ttroopship\nn04488202\ttrophy case\nn04488427\ttrough\nn04488530\ttrouser\nn04488742\ttrouser cuff\nn04488857\ttrouser press, pants presser\nn04489008\ttrouser, pant\nn04489695\ttrousseau\nn04489817\ttrowel\nn04490091\ttruck, motortruck\nn04491312\ttrumpet arch\nn04491388\ttruncheon, nightstick, baton, billy, billystick, billy club\nn04491638\ttrundle bed, trundle, truckle bed, truckle\nn04491769\ttrunk\nn04491934\ttrunk hose\nn04492060\ttrunk lid\nn04492157\ttrunk line\nn04492375\ttruss\nn04492749\ttruss bridge\nn04493109\ttry square\nn04493259\tT-square\nn04493381\ttub, vat\nn04494204\ttube, vacuum tube, thermionic vacuum tube, thermionic tube, electron tube, thermionic valve\nn04495051\ttuck box\nn04495183\ttucker\nn04495310\ttucker-bag\nn04495450\ttuck shop\nn04495555\tTudor arch, four-centered arch\nn04495698\ttudung\nn04495843\ttugboat, tug, towboat, tower\nn04496614\ttulle\nn04496726\ttumble-dryer, tumble drier\nn04496872\ttumbler\nn04497249\ttumbrel, tumbril\nn04497442\ttun\nn04497570\ttunic\nn04497801\ttuning fork\nn04498275\ttupik, tupek, sealskin tent\nn04498389\tturban\nn04498523\tturbine\nn04498873\tturbogenerator\nn04499062\ttureen\nn04499300\tTurkish bath\nn04499446\tTurkish towel, terry towel\nn04499554\tTurk's head\nn04499810\tturnbuckle\nn04500060\tturner, food turner\nn04500390\tturnery\nn04501127\tturnpike\nn04501281\tturnspit\nn04501370\tturnstile\nn04501550\tturntable\nn04501837\tturntable, lazy Susan\nn04501947\tturret\nn04502059\tturret clock\nn04502197\tturtleneck, turtle, polo-neck\nn04502502\ttweed\nn04502670\ttweeter\nn04502851\ttwenty-two, .22\nn04502989\ttwenty-two pistol\nn04503073\ttwenty-two rifle\nn04503155\ttwill\nn04503269\ttwill, twill weave\nn04503413\ttwin bed\nn04503499\ttwinjet\nn04503593\ttwist bit, twist drill\nn04503705\ttwo-by-four\nn04504038\ttwo-man tent\nn04504141\ttwo-piece, two-piece suit, lounge suit\nn04504770\ttypesetting machine\nn04505036\ttypewriter\nn04505345\ttypewriter carriage\nn04505470\ttypewriter keyboard\nn04505888\ttyrolean, tirolean\nn04506289\tuke, ukulele\nn04506402\tulster\nn04506506\tultracentrifuge\nn04506688\tultramicroscope, dark-field microscope\nn04506895\tUltrasuede\nn04506994\tultraviolet lamp, ultraviolet source\nn04507155\tumbrella\nn04507326\tumbrella tent\nn04507453\tundercarriage\nn04507689\tundercoat, underseal\nn04508163\tundergarment, unmentionable\nn04508489\tunderpants\nn04508949\tunderwear, underclothes, underclothing\nn04509171\tundies\nn04509260\tuneven parallel bars, uneven bars\nn04509417\tunicycle, monocycle\nn04509592\tuniform\nn04510706\tuniversal joint, universal\nn04511002\tuniversity\nn04513827\tupholstery\nn04513998\tupholstery material\nn04514095\tupholstery needle\nn04514241\tuplift\nn04514648\tupper berth, upper\nn04515003\tupright, upright piano\nn04515444\tupset, swage\nn04515729\tupstairs\nn04515890\turceole\nn04516116\turn\nn04516214\turn\nn04516354\tused-car, secondhand car\nn04516672\tutensil\nn04517211\tUzi\nn04517408\tvacation home\nn04517823\tvacuum, vacuum cleaner\nn04517999\tvacuum chamber\nn04518132\tvacuum flask, vacuum bottle\nn04518343\tvacuum gauge, vacuum gage\nn04518643\tValenciennes, Valenciennes lace\nn04518764\tvalise\nn04519153\tvalve\nn04519536\tvalve\nn04519728\tvalve-in-head engine\nn04519887\tvambrace, lower cannon\nn04520170\tvan\nn04520382\tvan, caravan\nn04520784\tvane\nn04520962\tvaporizer, vaporiser\nn04521571\tvariable-pitch propeller\nn04521863\tvariometer\nn04521987\tvarnish\nn04522168\tvase\nn04523525\tvault\nn04523831\tvault, bank vault\nn04524142\tvaulting horse, long horse, buck\nn04524313\tvehicle\nn04524594\tVelcro\nn04524716\tvelocipede\nn04524941\tvelour, velours\nn04525038\tvelvet\nn04525191\tvelveteen\nn04525305\tvending machine\nn04525417\tveneer, veneering\nn04525584\tVenetian blind\nn04525821\tVenn diagram, Venn's diagram\nn04526520\tventilation, ventilation system, ventilating system\nn04526800\tventilation shaft\nn04526964\tventilator\nn04527648\tveranda, verandah, gallery\nn04528079\tverdigris\nn04528968\tvernier caliper, vernier micrometer\nn04529108\tvernier scale, vernier\nn04529681\tvertical file\nn04529962\tvertical stabilizer, vertical stabiliser, vertical fin, tail fin, tailfin\nn04530283\tvertical tail\nn04530456\tVery pistol, Verey pistol\nn04530566\tvessel, watercraft\nn04531098\tvessel\nn04531873\tvest, waistcoat\nn04532022\tvestiture\nn04532106\tvestment\nn04532398\tvest pocket\nn04532504\tvestry, sacristy\nn04532670\tviaduct\nn04532831\tvibraphone, vibraharp, vibes\nn04533042\tvibrator\nn04533199\tvibrator\nn04533499\tVictrola\nn04533594\tvicuna\nn04533700\tvideocassette\nn04533802\tvideocassette recorder, VCR\nn04533946\tvideodisk, videodisc, DVD\nn04534127\tvideo recording, video\nn04534359\tvideotape\nn04534520\tvideotape\nn04534895\tvigil light, vigil candle\nn04535252\tvilla\nn04535370\tvilla\nn04535524\tvilla\nn04536153\tviol\nn04536335\tviola\nn04536465\tviola da braccio\nn04536595\tviola da gamba, gamba, bass viol\nn04536765\tviola d'amore\nn04536866\tviolin, fiddle\nn04537436\tvirginal, pair of virginals\nn04538249\tviscometer, viscosimeter\nn04538403\tviscose rayon, viscose\nn04538552\tvise, bench vise\nn04538878\tvisor, vizor\nn04539053\tvisual display unit, VDU\nn04539203\tvivarium\nn04539407\tViyella\nn04539794\tvoile\nn04540053\tvolleyball\nn04540255\tvolleyball net\nn04540397\tvoltage regulator\nn04540761\tvoltaic cell, galvanic cell, primary cell\nn04541136\tvoltaic pile, pile, galvanic pile\nn04541320\tvoltmeter\nn04541662\tvomitory\nn04541777\tvon Neumann machine\nn04541987\tvoting booth\nn04542095\tvoting machine\nn04542329\tvoussoir\nn04542474\tvox angelica, voix celeste\nn04542595\tvox humana\nn04542715\twaders\nn04542858\twading pool\nn04542943\twaffle iron\nn04543158\twagon, waggon\nn04543509\twagon, coaster wagon\nn04543636\twagon tire\nn04543772\twagon wheel\nn04543924\twain\nn04543996\twainscot, wainscoting, wainscotting\nn04544325\twainscoting, wainscotting\nn04544450\twaist pack, belt bag\nn04545305\twalker, baby-walker, go-cart\nn04545471\twalker, Zimmer, Zimmer frame\nn04545748\twalker\nn04545858\twalkie-talkie, walky-talky\nn04545984\twalk-in\nn04546081\twalking shoe\nn04546194\twalking stick\nn04546340\tWalkman\nn04546595\twalk-up apartment, walk-up\nn04546855\twall\nn04547592\twall\nn04548280\twall clock\nn04548362\twallet, billfold, notecase, pocketbook\nn04549028\twall tent\nn04549122\twall unit\nn04549629\twand\nn04549721\tWankel engine, Wankel rotary engine, epitrochoidal engine\nn04549919\tward, hospital ward\nn04550184\twardrobe, closet, press\nn04550676\twardroom\nn04551055\twarehouse, storage warehouse\nn04551833\twarming pan\nn04552097\twar paint\nn04552348\twarplane, military plane\nn04552551\twar room\nn04552696\twarship, war vessel, combat ship\nn04553389\twash\nn04553561\twash-and-wear\nn04553703\twashbasin, handbasin, washbowl, lavabo, wash-hand basin\nn04554211\twashboard, splashboard\nn04554406\twashboard\nn04554684\twasher, automatic washer, washing machine\nn04554871\twasher\nn04554998\twashhouse\nn04555291\twashroom\nn04555400\twashstand, wash-hand stand\nn04555600\twashtub\nn04555700\twastepaper basket, waste-paper basket, wastebasket, waste basket, circular file\nn04555897\twatch, ticker\nn04556408\twatch cap\nn04556533\twatch case\nn04556664\twatch glass\nn04556948\twatchtower\nn04557308\twater-base paint\nn04557522\twater bed\nn04557648\twater bottle\nn04557751\twater butt\nn04558059\twater cart\nn04558199\twater chute\nn04558478\twater closet, closet, W.C., loo\nn04558804\twatercolor, water-color, watercolour, water-colour\nn04559023\twater-cooled reactor\nn04559166\twater cooler\nn04559451\twater faucet, water tap, tap, hydrant\nn04559620\twater filter\nn04559730\twater gauge, water gage, water glass\nn04559910\twater glass\nn04559994\twater hazard\nn04560113\twater heater, hot-water heater, hot-water tank\nn04560292\twatering can, watering pot\nn04560502\twatering cart\nn04560619\twater jacket\nn04560804\twater jug\nn04560882\twater jump\nn04561010\twater level\nn04561287\twater meter\nn04561422\twater mill\nn04561734\twaterproof\nn04561857\twaterproofing\nn04561965\twater pump\nn04562122\twater scooter, sea scooter, scooter\nn04562262\twater ski\nn04562496\twaterspout\nn04562935\twater tower\nn04563020\twater wagon, water waggon\nn04563204\twaterwheel, water wheel\nn04563413\twaterwheel, water wheel\nn04563560\twater wings\nn04563790\twaterworks\nn04564278\twattmeter\nn04564581\twaxwork, wax figure\nn04565039\tways, shipway, slipway\nn04565375\tweapon, arm, weapon system\nn04566257\tweaponry, arms, implements of war, weapons system, munition\nn04566561\tweapons carrier\nn04566756\tweathercock\nn04567098\tweatherglass\nn04567593\tweather satellite, meteorological satellite\nn04567746\tweather ship\nn04568069\tweathervane, weather vane, vane, wind vane\nn04568557\tweb, entanglement\nn04568713\tweb\nn04568841\twebbing\nn04569063\twebcam\nn04569520\twedge\nn04569822\twedge\nn04570118\twedgie\nn04570214\tWedgwood\nn04570416\tweeder, weed-whacker\nn04570532\tweeds, widow's weeds\nn04570815\tweekender\nn04570958\tweighbridge\nn04571292\tweight, free weight, exercising weight\nn04571566\tweir\nn04571686\tweir\nn04571800\twelcome wagon\nn04571958\tweld\nn04572121\twelder's mask\nn04572235\tweldment\nn04572935\twell\nn04573045\twellhead\nn04573281\twelt\nn04573379\tWeston cell, cadmium cell\nn04573513\twet bar\nn04573625\twet-bulb thermometer\nn04573832\twet cell\nn04573937\twet fly\nn04574067\twet suit\nn04574348\twhaleboat\nn04574471\twhaler, whaling ship\nn04574606\twhaling gun\nn04574999\twheel\nn04575723\twheel\nn04575824\twheel and axle\nn04576002\twheelchair\nn04576211\twheeled vehicle\nn04576971\twheelwork\nn04577139\twherry\nn04577293\twherry, Norfolk wherry\nn04577426\twhetstone\nn04577567\twhiffletree, whippletree, swingletree\nn04577769\twhip\nn04578112\twhipcord\nn04578329\twhipping post\nn04578559\twhipstitch, whipping, whipstitching\nn04578708\twhirler\nn04578801\twhisk, whisk broom\nn04578934\twhisk\nn04579056\twhiskey bottle\nn04579145\twhiskey jug\nn04579230\twhispering gallery, whispering dome\nn04579432\twhistle\nn04579667\twhistle\nn04579986\twhite\nn04580493\twhite goods\nn04581102\twhitewash\nn04581595\twhorehouse, brothel, bordello, bagnio, house of prostitution, house of ill repute, bawdyhouse, cathouse, sporting house\nn04581829\twick, taper\nn04582205\twicker, wickerwork, caning\nn04582349\twicker basket\nn04582771\twicket, hoop\nn04582869\twicket\nn04583022\twickiup, wikiup\nn04583212\twide-angle lens, fisheye lens\nn04583620\twidebody aircraft, wide-body aircraft, wide-body, twin-aisle airplane\nn04583888\twide wale\nn04583967\twidow's walk\nn04584056\tWiffle, Wiffle Ball\nn04584207\twig\nn04584373\twigwam\nn04585128\tWilton, Wilton carpet\nn04585318\twimple\nn04585456\twincey\nn04585626\twinceyette\nn04585745\twinch, windlass\nn04585980\tWinchester\nn04586072\twindbreak, shelterbelt\nn04586581\twinder, key\nn04586932\twind instrument, wind\nn04587327\twindjammer\nn04587404\twindmill, aerogenerator, wind generator\nn04587559\twindmill\nn04587648\twindow\nn04588739\twindow\nn04589190\twindow blind\nn04589325\twindow box\nn04589434\twindow envelope\nn04589593\twindow frame\nn04589890\twindow screen\nn04590021\twindow seat\nn04590129\twindow shade\nn04590263\twindowsill\nn04590553\twindshield, windscreen\nn04590746\twindshield wiper, windscreen wiper, wiper, wiper blade\nn04590933\tWindsor chair\nn04591056\tWindsor knot\nn04591157\tWindsor tie\nn04591249\twind tee\nn04591359\twind tunnel\nn04591517\twind turbine\nn04591631\twine bar\nn04591713\twine bottle\nn04591887\twine bucket, wine cooler\nn04592005\twine cask, wine barrel\nn04592099\twineglass\nn04592356\twinepress\nn04592465\twinery, wine maker\nn04592596\twineskin\nn04592741\twing\nn04593077\twing chair\nn04593185\twing nut, wing-nut, wing screw, butterfly nut, thumbnut\nn04593376\twing tip\nn04593524\twing tip\nn04593629\twinker, blinker, blinder\nn04593866\twiper, wiper arm, contact arm\nn04594114\twiper motor\nn04594218\twire\nn04594489\twire, conducting wire\nn04594742\twire cloth\nn04594828\twire cutter\nn04594919\twire gauge, wire gage\nn04595028\twireless local area network, WLAN, wireless fidelity, WiFi\nn04595285\twire matrix printer, wire printer, stylus printer\nn04595501\twire recorder\nn04595611\twire stripper\nn04595762\twirework, grillwork\nn04595855\twiring\nn04596116\twishing cap\nn04596492\twitness box, witness stand\nn04596742\twok\nn04596852\twoman's clothing\nn04597066\twood\nn04597309\twoodcarving\nn04597400\twood chisel\nn04597804\twoodenware\nn04597913\twooden spoon\nn04598136\twoodscrew\nn04598318\twoodshed\nn04598416\twood vise, woodworking vise, shoulder vise\nn04598582\twoodwind, woodwind instrument, wood\nn04598965\twoof, weft, filling, pick\nn04599124\twoofer\nn04599235\twool, woolen, woollen\nn04600312\tworkbasket, workbox, workbag\nn04600486\tworkbench, work bench, bench\nn04600912\twork-clothing, work-clothes\nn04601041\tworkhouse\nn04601159\tworkhouse\nn04601938\tworkpiece\nn04602762\tworkroom\nn04602840\tworks, workings\nn04602956\twork-shirt\nn04603399\tworkstation\nn04603729\tworktable, work table\nn04603872\tworkwear\nn04604276\tWorld Wide Web, WWW, web\nn04604644\tworm fence, snake fence, snake-rail fence, Virginia fence\nn04604806\tworm gear\nn04605057\tworm wheel\nn04605163\tworsted\nn04605321\tworsted, worsted yarn\nn04605446\twrap, wrapper\nn04605572\twraparound\nn04605726\twrapping, wrap, wrapper\nn04606251\twreck\nn04606574\twrench, spanner\nn04607035\twrestling mat\nn04607242\twringer\nn04607640\twrist pad\nn04607759\twrist pin, gudgeon pin\nn04607869\twristwatch, wrist watch\nn04607982\twriting arm\nn04608329\twriting desk\nn04608435\twriting desk\nn04608567\twriting implement\nn04608809\txerographic printer\nn04608923\tXerox, xerographic copier, Xerox machine\nn04609531\tX-ray film\nn04609651\tX-ray machine\nn04609811\tX-ray tube\nn04610013\tyacht, racing yacht\nn04610176\tyacht chair\nn04610274\tyagi, Yagi aerial\nn04610503\tyard\nn04610676\tyard\nn04611351\tyardarm\nn04611795\tyard marker\nn04611916\tyardstick, yard measure\nn04612026\tyarmulke, yarmulka, yarmelke\nn04612159\tyashmak, yashmac\nn04612257\tyataghan\nn04612373\tyawl, dandy\nn04612504\tyawl\nn04612840\tyoke\nn04613015\tyoke\nn04613158\tyoke, coupling\nn04613696\tyurt\nn04613939\tZamboni\nn04614505\tzero\nn04614655\tziggurat, zikkurat, zikurat\nn04614844\tzill\nn04615149\tzip gun\nn04615226\tzither, cither, zithern\nn04615644\tzoot suit\nn04682018\tshading\nn04950713\tgrain\nn04950952\twood grain, woodgrain, woodiness\nn04951071\tgraining, woodgraining\nn04951186\tmarbleization, marbleisation, marbleizing, marbleising\nn04951373\tlight, lightness\nn04951716\taura, aureole, halo, nimbus, glory, gloriole\nn04951875\tsunniness\nn04953296\tglint\nn04953678\topalescence, iridescence\nn04955160\tpolish, gloss, glossiness, burnish\nn04957356\tprimary color for pigments, primary colour for pigments\nn04957589\tprimary color for light, primary colour for light\nn04958634\tcolorlessness, colourlessness, achromatism, achromaticity\nn04958865\tmottle\nn04959061\tachromia\nn04959230\tshade, tint, tincture, tone\nn04959672\tchromatic color, chromatic colour, spectral color, spectral colour\nn04960277\tblack, blackness, inkiness\nn04960582\tcoal black, ebony, jet black, pitch black, sable, soot black\nn04961062\talabaster\nn04961331\tbone, ivory, pearl, off-white\nn04961691\tgray, grayness, grey, greyness\nn04962062\tash grey, ash gray, silver, silver grey, silver gray\nn04962240\tcharcoal, charcoal grey, charcoal gray, oxford grey, oxford gray\nn04963111\tsanguine\nn04963307\tTurkey red, alizarine red\nn04963588\tcrimson, ruby, deep red\nn04963740\tdark red\nn04964001\tclaret\nn04964799\tfuschia\nn04964878\tmaroon\nn04965179\torange, orangeness\nn04965451\treddish orange\nn04965661\tyellow, yellowness\nn04966543\tgamboge, lemon, lemon yellow, maize\nn04966941\tpale yellow, straw, wheat\nn04967191\tgreen, greenness, viridity\nn04967561\tgreenishness\nn04967674\tsea green\nn04967801\tsage green\nn04967882\tbottle green\nn04968056\temerald\nn04968139\tolive green, olive-green\nn04968749\tjade green, jade\nn04968895\tblue, blueness\nn04969242\tazure, cerulean, sapphire, lazuline, sky-blue\nn04969540\tsteel blue\nn04969798\tgreenish blue, aqua, aquamarine, turquoise, cobalt blue, peacock blue\nn04969952\tpurplish blue, royal blue\nn04970059\tpurple, purpleness\nn04970312\tTyrian purple\nn04970398\tindigo\nn04970470\tlavender\nn04970631\treddish purple, royal purple\nn04970916\tpink\nn04971211\tcarnation\nn04971313\trose, rosiness\nn04972350\tchestnut\nn04972451\tchocolate, coffee, deep brown, umber, burnt umber\nn04972801\tlight brown\nn04973020\ttan, topaz\nn04973291\tbeige, ecru\nn04973386\treddish brown, sepia, burnt sienna, Venetian red, mahogany\nn04973585\tbrick red\nn04973669\tcopper, copper color\nn04973816\tIndian red\nn04974145\tpuce\nn04974340\tolive\nn04974859\tultramarine\nn04975739\tcomplementary color, complementary\nn04976319\tpigmentation\nn04976952\tcomplexion, skin color, skin colour\nn04977412\truddiness, rosiness\nn04978561\tnonsolid color, nonsolid colour, dithered color, dithered colour\nn04979002\taposematic coloration, warning coloration\nn04979307\tcryptic coloration\nn04981658\tring\nn05102764\tcenter of curvature, centre of curvature\nn05218119\tcadaver, corpse, stiff, clay, remains\nn05233741\tmandibular notch\nn05235879\trib\nn05238282\tskin, tegument, cutis\nn05239437\tskin graft\nn05241218\tepidermal cell\nn05241485\tmelanocyte\nn05241662\tprickle cell\nn05242070\tcolumnar cell, columnar epithelial cell\nn05242239\tspongioblast\nn05242928\tsquamous cell\nn05244421\tamyloid plaque, amyloid protein plaque\nn05244755\tdental plaque, bacterial plaque\nn05244934\tmacule, macula\nn05245192\tfreckle, lentigo\nn05257476\tbouffant\nn05257967\tsausage curl\nn05258051\tforelock\nn05258627\tspit curl, kiss curl\nn05259914\tpigtail\nn05260127\tpageboy\nn05260240\tpompadour\nn05261310\tthatch\nn05262422\tsoup-strainer, toothbrush\nn05262534\tmustachio, moustachio, handle-bars\nn05262698\twalrus mustache, walrus moustache\nn05263183\tstubble\nn05263316\tvandyke beard, vandyke\nn05263448\tsoul patch, Attilio\nn05265736\tesophageal smear\nn05266096\tparaduodenal smear, duodenal smear\nn05266879\tspecimen\nn05278922\tpunctum\nn05279953\tglenoid fossa, glenoid cavity\nn05282652\tdiastema\nn05285623\tmarrow, bone marrow\nn05302499\tmouth, oral cavity, oral fissure, rima oris\nn05314075\tcanthus\nn05399034\tmilk\nn05399243\tmother's milk\nn05399356\tcolostrum, foremilk\nn05418717\tvein, vena, venous blood vessel\nn05427346\tganglion cell, gangliocyte\nn05442594\tX chromosome\nn05447757\tembryonic cell, formative cell\nn05448704\tmyeloblast\nn05448827\tsideroblast\nn05449196\tosteocyte\nn05449661\tmegalocyte, macrocyte\nn05449959\tleukocyte, leucocyte, white blood cell, white cell, white blood corpuscle, white corpuscle, WBC\nn05450617\thistiocyte\nn05451099\tfixed phagocyte\nn05451384\tlymphocyte, lymph cell\nn05453412\tmonoblast\nn05453657\tneutrophil, neutrophile\nn05453815\tmicrophage\nn05454833\tsickle cell\nn05454978\tsiderocyte\nn05455113\tspherocyte\nn05458173\tootid\nn05458576\toocyte\nn05459101\tspermatid\nn05459457\tLeydig cell, Leydig's cell\nn05459769\tstriated muscle cell, striated muscle fiber\nn05460759\tsmooth muscle cell\nn05464534\tRanvier's nodes, nodes of Ranvier\nn05467054\tneuroglia, glia\nn05467758\tastrocyte\nn05468098\tprotoplasmic astrocyte\nn05468739\toligodendrocyte\nn05469664\tproprioceptor\nn05469861\tdendrite\nn05475397\tsensory fiber, afferent fiber\nn05482922\tsubarachnoid space\nn05486510\tcerebral cortex, cerebral mantle, pallium, cortex\nn05491154\trenal cortex\nn05526957\tprepuce, foreskin\nn05538625\thead, caput\nn05539947\tscalp\nn05541509\tfrontal eminence\nn05542893\tsuture, sutura, fibrous joint\nn05545879\tforamen magnum\nn05571341\tesophagogastric junction, oesophagogastric junction\nn05578095\theel\nn05581932\tcuticle\nn05584746\thangnail, agnail\nn05586759\texoskeleton\nn05604434\tabdominal wall\nn05716342\tlemon\nn06008896\tcoordinate axis\nn06209940\tlandscape\nn06254669\tmedium\nn06255081\tvehicle\nn06255613\tpaper\nn06259898\tchannel, transmission channel\nn06262567\tfilm, cinema, celluloid\nn06262943\tsilver screen\nn06263202\tfree press\nn06263369\tpress, public press\nn06263609\tprint media\nn06263762\tstorage medium, data-storage medium\nn06263895\tmagnetic storage medium, magnetic medium, magnetic storage\nn06266417\tjournalism, news media\nn06266633\tFleet Street\nn06266710\tphotojournalism\nn06266878\tnews photography\nn06266973\trotogravure\nn06267145\tnewspaper, paper\nn06267564\tdaily\nn06267655\tgazette\nn06267758\tschool newspaper, school paper\nn06267893\ttabloid, rag, sheet\nn06267991\tyellow journalism, tabloid, tab\nn06271778\ttelecommunication, telecom\nn06272290\ttelephone, telephony\nn06272612\tvoice mail, voicemail\nn06272803\tcall, phone call, telephone call\nn06273207\tcall-back\nn06273294\tcollect call\nn06273414\tcall forwarding\nn06273555\tcall-in\nn06273743\tcall waiting\nn06273890\tcrank call\nn06273986\tlocal call\nn06274092\tlong distance, long-distance call, trunk call\nn06274292\ttoll call\nn06274546\twake-up call\nn06274760\tthree-way calling\nn06274921\ttelegraphy\nn06275095\tcable, cablegram, overseas telegram\nn06275353\twireless\nn06275471\tradiotelegraph, radiotelegraphy, wireless telegraphy\nn06276501\tradiotelephone, radiotelephony, wireless telephone\nn06276697\tbroadcasting\nn06276902\tRediffusion\nn06277025\tmultiplex\nn06277135\tradio, radiocommunication, wireless\nn06277280\ttelevision, telecasting, TV, video\nn06278338\tcable television, cable\nn06278475\thigh-definition television, HDTV\nn06281040\treception\nn06281175\tsignal detection, detection\nn06340977\tHakham\nn06359193\tweb site, website, internet site, site\nn06359467\tchat room, chatroom\nn06359657\tportal site, portal\nn06415688\tjotter\nn06417096\tbreviary\nn06418693\twordbook\nn06419354\tdesk dictionary, collegiate dictionary\nn06423496\treckoner, ready reckoner\nn06470073\tdocument, written document, papers\nn06591815\talbum, record album\nn06592078\tconcept album\nn06592281\trock opera\nn06592421\ttribute album, benefit album\nn06595351\tmagazine, mag\nn06596179\tcolour supplement\nn06596364\tcomic book\nn06596474\tnews magazine\nn06596607\tpulp, pulp magazine\nn06596727\tslick, slick magazine, glossy\nn06596845\ttrade magazine\nn06613686\tmovie, film, picture, moving picture, moving-picture show, motion picture, motion-picture show, picture show, pic, flick\nn06614901\touttake\nn06616216\tshoot-'em-up\nn06618653\tspaghetti Western\nn06625062\tencyclical, encyclical letter\nn06785654\tcrossword puzzle, crossword\nn06793231\tsign\nn06794110\tstreet sign\nn06874185\ttraffic light, traffic signal, stoplight\nn06883725\tswastika, Hakenkreuz\nn06892775\tconcert\nn06998748\tartwork, art, graphics, nontextual matter\nn07005523\tlobe\nn07248320\tbook jacket, dust cover, dust jacket, dust wrapper\nn07273802\tcairn\nn07461050\tthree-day event\nn07556406\tcomfort food\nn07556637\tcomestible, edible, eatable, pabulum, victual, victuals\nn07556872\ttuck\nn07556970\tcourse\nn07557165\tdainty, delicacy, goody, kickshaw, treat\nn07557434\tdish\nn07560193\tfast food\nn07560331\tfinger food\nn07560422\tingesta\nn07560542\tkosher\nn07560652\tfare\nn07560903\tdiet\nn07561112\tdiet\nn07561590\tdietary\nn07561848\tbalanced diet\nn07562017\tbland diet, ulcer diet\nn07562172\tclear liquid diet\nn07562379\tdiabetic diet\nn07562495\tdietary supplement\nn07562651\tcarbohydrate loading, carbo loading\nn07562881\tfad diet\nn07562984\tgluten-free diet\nn07563207\thigh-protein diet\nn07563366\thigh-vitamin diet, vitamin-deficiency diet\nn07563642\tlight diet\nn07563800\tliquid diet\nn07564008\tlow-calorie diet\nn07564101\tlow-fat diet\nn07564292\tlow-sodium diet, low-salt diet, salt-free diet\nn07564515\tmacrobiotic diet\nn07564629\treducing diet, obesity diet\nn07564796\tsoft diet, pap, spoon food\nn07564971\tvegetarianism\nn07565083\tmenu\nn07565161\tchow, chuck, eats, grub\nn07565259\tboard, table\nn07565608\tmess\nn07565725\tration\nn07565945\tfield ration\nn07566092\tK ration\nn07566231\tC-ration\nn07566340\tfoodstuff, food product\nn07566863\tstarches\nn07567039\tbreadstuff\nn07567139\tcoloring, colouring, food coloring, food colouring, food color, food colour\nn07567390\tconcentrate\nn07567611\ttomato concentrate\nn07567707\tmeal\nn07567980\tkibble\nn07568095\tcornmeal, Indian meal\nn07568241\tfarina\nn07568389\tmatzo meal, matzoh meal, matzah meal\nn07568502\toatmeal, rolled oats\nn07568625\tpea flour\nn07568818\troughage, fiber\nn07568991\tbran\nn07569106\tflour\nn07569423\tplain flour\nn07569543\twheat flour\nn07569644\twhole wheat flour, graham flour, graham, whole meal flour\nn07569873\tsoybean meal, soybean flour, soy flour\nn07570021\tsemolina\nn07570530\tcorn gluten feed\nn07570720\tnutriment, nourishment, nutrition, sustenance, aliment, alimentation, victuals\nn07572353\tcommissariat, provisions, provender, viands, victuals\nn07572616\tlarder\nn07572858\tfrozen food, frozen foods\nn07572957\tcanned food, canned foods, canned goods, tinned goods\nn07573103\tcanned meat, tinned meat\nn07573347\tSpam\nn07573453\tdehydrated food, dehydrated foods\nn07573563\tsquare meal\nn07573696\tmeal, repast\nn07574176\tpotluck\nn07574426\trefection\nn07574504\trefreshment\nn07574602\tbreakfast\nn07574780\tcontinental breakfast, petit dejeuner\nn07574923\tbrunch\nn07575076\tlunch, luncheon, tiffin, dejeuner\nn07575226\tbusiness lunch\nn07575392\thigh tea\nn07575510\ttea, afternoon tea, teatime\nn07575726\tdinner\nn07575984\tsupper\nn07576182\tbuffet\nn07576438\tpicnic\nn07576577\tcookout\nn07576781\tbarbecue, barbeque\nn07576969\tclambake\nn07577144\tfish fry\nn07577374\tbite, collation, snack\nn07577538\tnosh\nn07577657\tnosh-up\nn07577772\tploughman's lunch\nn07577918\tcoffee break, tea break\nn07578093\tbanquet, feast, spread\nn07579575\tentree, main course\nn07579688\tpiece de resistance\nn07579787\tplate\nn07579917\tadobo\nn07580053\tside dish, side order, entremets\nn07580253\tspecial\nn07580359\tcasserole\nn07580470\tchicken casserole\nn07580592\tchicken cacciatore, chicken cacciatora, hunter's chicken\nn07581249\tantipasto\nn07581346\tappetizer, appetiser, starter\nn07581607\tcanape\nn07581775\tcocktail\nn07581931\tfruit cocktail\nn07582027\tcrab cocktail\nn07582152\tshrimp cocktail\nn07582277\thors d'oeuvre\nn07582441\trelish\nn07582609\tdip\nn07582811\tbean dip\nn07582892\tcheese dip\nn07582970\tclam dip\nn07583066\tguacamole\nn07583197\tsoup\nn07583865\tsoup du jour\nn07583978\talphabet soup\nn07584110\tconsomme\nn07584228\tmadrilene\nn07584332\tbisque\nn07584423\tborsch, borsh, borscht, borsht, borshch, bortsch\nn07584593\tbroth\nn07584859\tbarley water\nn07584938\tbouillon\nn07585015\tbeef broth, beef stock\nn07585107\tchicken broth, chicken stock\nn07585208\tbroth, stock\nn07585474\tstock cube\nn07585557\tchicken soup\nn07585644\tcock-a-leekie, cocky-leeky\nn07585758\tgazpacho\nn07585906\tgumbo\nn07585997\tjulienne\nn07586099\tmarmite\nn07586179\tmock turtle soup\nn07586318\tmulligatawny\nn07586485\toxtail soup\nn07586604\tpea soup\nn07586718\tpepper pot, Philadelphia pepper pot\nn07586894\tpetite marmite, minestrone, vegetable soup\nn07587023\tpotage, pottage\nn07587111\tpottage\nn07587206\tturtle soup, green turtle soup\nn07587331\teggdrop soup\nn07587441\tchowder\nn07587618\tcorn chowder\nn07587700\tclam chowder\nn07587819\tManhattan clam chowder\nn07587962\tNew England clam chowder\nn07588111\tfish chowder\nn07588193\twon ton, wonton, wonton soup\nn07588299\tsplit-pea soup\nn07588419\tgreen pea soup, potage St. Germain\nn07588574\tlentil soup\nn07588688\tScotch broth\nn07588817\tvichyssoise\nn07588947\tstew\nn07589458\tbigos\nn07589543\tBrunswick stew\nn07589724\tburgoo\nn07589872\tburgoo\nn07589967\tolla podrida, Spanish burgoo\nn07590068\tmulligan stew, mulligan, Irish burgoo\nn07590177\tpurloo, chicken purloo, poilu\nn07590320\tgoulash, Hungarian goulash, gulyas\nn07590502\thotchpotch\nn07590611\thot pot, hotpot\nn07590752\tbeef goulash\nn07590841\tpork-and-veal goulash\nn07590974\tporkholt\nn07591049\tIrish stew\nn07591162\toyster stew\nn07591236\tlobster stew\nn07591330\tlobscouse, lobscuse, scouse\nn07591473\tfish stew\nn07591586\tbouillabaisse\nn07591813\tmatelote\nn07591961\tpaella\nn07592094\tfricassee\nn07592317\tchicken stew\nn07592400\tturkey stew\nn07592481\tbeef stew\nn07592656\tragout\nn07592768\tratatouille\nn07592922\tsalmi\nn07593004\tpot-au-feu\nn07593107\tslumgullion\nn07593199\tsmorgasbord\nn07593471\tviand\nn07593774\tready-mix\nn07593972\tbrownie mix\nn07594066\tcake mix\nn07594155\tlemonade mix\nn07594250\tself-rising flour, self-raising flour\nn07594737\tchoice morsel, tidbit, titbit\nn07594840\tsavory, savoury\nn07595051\tcalf's-foot jelly\nn07595180\tcaramel, caramelized sugar\nn07595368\tlump sugar\nn07595649\tcane sugar\nn07595751\tcastor sugar, caster sugar\nn07595914\tpowdered sugar\nn07596046\tgranulated sugar\nn07596160\ticing sugar\nn07596362\tcorn sugar\nn07596452\tbrown sugar\nn07596566\tdemerara, demerara sugar\nn07596684\tsweet, confection\nn07596967\tconfectionery\nn07597145\tconfiture\nn07597263\tsweetmeat\nn07597365\tcandy, confect\nn07598256\tcandy bar\nn07598529\tcarob bar\nn07598622\thardbake\nn07598734\thard candy\nn07598928\tbarley-sugar, barley candy\nn07599068\tbrandyball\nn07599161\tjawbreaker\nn07599242\tlemon drop\nn07599383\tsourball\nn07599468\tpatty\nn07599554\tpeppermint patty\nn07599649\tbonbon\nn07599783\tbrittle, toffee, toffy\nn07599911\tpeanut brittle\nn07599998\tchewing gum, gum\nn07600177\tgum ball\nn07600285\tbubble gum\nn07600394\tbutterscotch\nn07600506\tcandied fruit, succade, crystallized fruit\nn07600696\tcandied apple, candy apple, taffy apple, caramel apple, toffee apple\nn07600895\tcrystallized ginger\nn07601025\tgrapefruit peel\nn07601175\tlemon peel\nn07601290\torange peel\nn07601407\tcandied citrus peel\nn07601572\tcandy cane\nn07601686\tcandy corn\nn07601809\tcaramel\nn07602650\tcenter, centre\nn07604956\tcomfit\nn07605040\tcotton candy, spun sugar, candyfloss\nn07605198\tdragee\nn07605282\tdragee\nn07605380\tfondant\nn07605474\tfudge\nn07605597\tchocolate fudge\nn07605693\tdivinity, divinity fudge\nn07605804\tpenuche, penoche, panoche, panocha\nn07605944\tgumdrop\nn07606058\tjujube\nn07606191\thoney crisp\nn07606278\tmint, mint candy\nn07606419\thorehound\nn07606538\tpeppermint, peppermint candy\nn07606669\tjelly bean, jelly egg\nn07606764\tkiss, candy kiss\nn07606933\tmolasses kiss\nn07607027\tmeringue kiss\nn07607138\tchocolate kiss\nn07607361\tlicorice, liquorice\nn07607492\tLife Saver\nn07607605\tlollipop, sucker, all-day sucker\nn07607707\tlozenge\nn07607832\tcachou\nn07607967\tcough drop, troche, pastille, pastil\nn07608098\tmarshmallow\nn07608245\tmarzipan, marchpane\nn07608339\tnougat\nn07608429\tnougat bar\nn07608533\tnut bar\nn07608641\tpeanut bar\nn07608721\tpopcorn ball\nn07608866\tpraline\nn07608980\trock candy\nn07609083\trock candy, rock\nn07609215\tsugar candy\nn07609316\tsugarplum\nn07609407\ttaffy\nn07609549\tmolasses taffy\nn07609632\ttruffle, chocolate truffle\nn07609728\tTurkish Delight\nn07609840\tdessert, sweet, afters\nn07610295\tambrosia, nectar\nn07610502\tambrosia\nn07610620\tbaked Alaska\nn07610746\tblancmange\nn07610890\tcharlotte\nn07611046\tcompote, fruit compote\nn07611148\tdumpling\nn07611267\tflan\nn07611358\tfrozen dessert\nn07611733\tjunket\nn07611839\tmousse\nn07611991\tmousse\nn07612137\tpavlova\nn07612273\tpeach melba\nn07612367\twhip\nn07612530\tprune whip\nn07612632\tpudding\nn07612996\tpudding, pud\nn07613158\tsyllabub, sillabub\nn07613266\ttiramisu\nn07613480\ttrifle\nn07613671\ttipsy cake\nn07613815\tjello, Jell-O\nn07614103\tapple dumpling\nn07614198\tice, frappe\nn07614348\twater ice, sorbet\nn07614500\tice cream, icecream\nn07614730\tice-cream cone\nn07614825\tchocolate ice cream\nn07615052\tNeapolitan ice cream\nn07615190\tpeach ice cream\nn07615289\tsherbert, sherbet\nn07615460\tstrawberry ice cream\nn07615569\ttutti-frutti\nn07615671\tvanilla ice cream\nn07615774\tice lolly, lolly, lollipop, popsicle\nn07615954\tice milk\nn07616046\tfrozen yogurt\nn07616174\tsnowball\nn07616265\tsnowball\nn07616386\tparfait\nn07616487\tice-cream sundae, sundae\nn07616590\tsplit\nn07616748\tbanana split\nn07616906\tfrozen pudding\nn07617051\tfrozen custard, soft ice cream\nn07617188\tpudding\nn07617344\tflummery\nn07617447\tfish mousse\nn07617526\tchicken mousse\nn07617611\tchocolate mousse\nn07617708\tplum pudding, Christmas pudding\nn07617839\tcarrot pudding\nn07617932\tcorn pudding\nn07618029\tsteamed pudding\nn07618119\tduff, plum duff\nn07618281\tvanilla pudding\nn07618432\tchocolate pudding\nn07618587\tbrown Betty\nn07618684\tNesselrode, Nesselrode pudding\nn07618871\tpease pudding\nn07619004\tcustard\nn07619208\tcreme caramel\nn07619301\tcreme anglais\nn07619409\tcreme brulee\nn07619508\tfruit custard\nn07619881\ttapioca\nn07620047\ttapioca pudding\nn07620145\troly-poly, roly-poly pudding\nn07620327\tsuet pudding\nn07620597\tBavarian cream\nn07620689\tmaraschino, maraschino cherry\nn07621264\tnonpareil\nn07621497\tzabaglione, sabayon\nn07621618\tgarnish\nn07623136\tpastry, pastry dough\nn07624466\tturnover\nn07624666\tapple turnover\nn07624757\tknish\nn07624924\tpirogi, piroshki, pirozhki\nn07625061\tsamosa\nn07625324\ttimbale\nn07627931\tpuff paste, pate feuillete\nn07628068\tphyllo\nn07628181\tpuff batter, pouf paste, pate a choux\nn07631926\tice-cream cake, icebox cake\nn07639069\tdoughnut, donut, sinker\nn07641928\tfish cake, fish ball\nn07642361\tfish stick, fish finger\nn07642471\tconserve, preserve, conserves, preserves\nn07642742\tapple butter\nn07642833\tchowchow\nn07642933\tjam\nn07643026\tlemon curd, lemon cheese\nn07643200\tstrawberry jam, strawberry preserves\nn07643306\tjelly\nn07643474\tapple jelly\nn07643577\tcrabapple jelly\nn07643679\tgrape jelly\nn07643764\tmarmalade\nn07643891\torange marmalade\nn07643981\tgelatin, jelly\nn07644244\tgelatin dessert\nn07648913\tbuffalo wing\nn07648997\tbarbecued wing\nn07650792\tmess\nn07650903\tmince\nn07651025\tpuree\nn07654148\tbarbecue, barbeque\nn07654298\tbiryani, biriani\nn07655067\tescalope de veau Orloff\nn07655263\tsaute\nn07663899\tpatty, cake\nn07665438\tveal parmesan, veal parmigiana\nn07666176\tveal cordon bleu\nn07672914\tmargarine, margarin, oleo, oleomargarine, marge\nn07678586\tmincemeat\nn07678729\tstuffing, dressing\nn07678953\tturkey stuffing\nn07679034\toyster stuffing, oyster dressing\nn07679140\tforcemeat, farce\nn07679356\tbread, breadstuff, staff of life\nn07680168\tanadama bread\nn07680313\tbap\nn07680416\tbarmbrack\nn07680517\tbreadstick, bread-stick\nn07680655\tgrissino\nn07680761\tbrown bread, Boston brown bread\nn07680932\tbun, roll\nn07681264\ttea bread\nn07681355\tcaraway seed bread\nn07681450\tchallah, hallah\nn07681691\tcinnamon bread\nn07681805\tcracked-wheat bread\nn07681926\tcracker\nn07682197\tcrouton\nn07682316\tdark bread, whole wheat bread, whole meal bread, brown bread\nn07682477\tEnglish muffin\nn07682624\tflatbread\nn07682808\tgarlic bread\nn07682952\tgluten bread\nn07683039\tgraham bread\nn07683138\tHost\nn07683265\tflatbrod\nn07683360\tbannock\nn07683490\tchapatti, chapati\nn07683617\tpita, pocket bread\nn07683786\tloaf of bread, loaf\nn07684084\tFrench loaf\nn07684164\tmatzo, matzoh, matzah, unleavened bread\nn07684289\tnan, naan\nn07684422\tonion bread\nn07684517\traisin bread\nn07684600\tquick bread\nn07684938\tbanana bread\nn07685031\tdate bread\nn07685118\tdate-nut bread\nn07685218\tnut bread\nn07685303\toatcake\nn07685399\tIrish soda bread\nn07685546\tskillet bread, fry bread\nn07685730\trye bread\nn07685918\tblack bread, pumpernickel\nn07686021\tJewish rye bread, Jewish rye\nn07686202\tlimpa\nn07686299\tSwedish rye bread, Swedish rye\nn07686461\tsalt-rising bread\nn07686634\tsimnel\nn07686720\tsour bread, sourdough bread\nn07686873\ttoast\nn07687053\twafer\nn07687211\twhite bread, light bread\nn07687381\tbaguet, baguette\nn07687469\tFrench bread\nn07687626\tItalian bread\nn07687789\tcornbread\nn07688021\tcorn cake\nn07688130\tskillet corn bread\nn07688265\tashcake, ash cake, corn tash\nn07688412\thoecake\nn07688624\tcornpone, pone\nn07688757\tcorn dab, corn dodger, dodger\nn07688898\thush puppy, hushpuppy\nn07689003\tjohnnycake, johnny cake, journey cake\nn07689217\tShawnee cake\nn07689313\tspoon bread, batter bread\nn07689490\tcinnamon toast\nn07689624\torange toast\nn07689757\tMelba toast\nn07689842\tzwieback, rusk, Brussels biscuit, twice-baked bread\nn07690019\tfrankfurter bun, hotdog bun\nn07690152\thamburger bun, hamburger roll\nn07690273\tmuffin, gem\nn07690431\tbran muffin\nn07690511\tcorn muffin\nn07690585\tYorkshire pudding\nn07690739\tpopover\nn07690892\tscone\nn07691091\tdrop scone, griddlecake, Scotch pancake\nn07691237\tcross bun, hot cross bun\nn07691539\tbrioche\nn07691650\tcrescent roll, croissant\nn07691758\thard roll, Vienna roll\nn07691863\tsoft roll\nn07691954\tkaiser roll\nn07692114\tParker House roll\nn07692248\tclover-leaf roll\nn07692405\tonion roll\nn07692517\tbialy, bialystoker\nn07692614\tsweet roll, coffee roll\nn07692887\tbear claw, bear paw\nn07693048\tcinnamon roll, cinnamon bun, cinnamon snail\nn07693223\thoney bun, sticky bun, caramel bun, schnecken\nn07693439\tpinwheel roll\nn07693590\tdanish, danish pastry\nn07693725\tbagel, beigel\nn07693889\tonion bagel\nn07693972\tbiscuit\nn07694169\trolled biscuit\nn07694403\tbaking-powder biscuit\nn07694516\tbuttermilk biscuit, soda biscuit\nn07694659\tshortcake\nn07694839\thardtack, pilot biscuit, pilot bread, sea biscuit, ship biscuit\nn07695187\tsaltine\nn07695284\tsoda cracker\nn07695410\toyster cracker\nn07695504\twater biscuit\nn07695652\tgraham cracker\nn07695742\tpretzel\nn07695878\tsoft pretzel\nn07695965\tsandwich\nn07696403\tsandwich plate\nn07696527\tbutty\nn07696625\tham sandwich\nn07696728\tchicken sandwich\nn07696839\tclub sandwich, three-decker, triple-decker\nn07696977\topen-face sandwich, open sandwich\nn07697100\thamburger, beefburger, burger\nn07697313\tcheeseburger\nn07697408\ttunaburger\nn07697537\thotdog, hot dog, red hot\nn07697699\tSloppy Joe\nn07697825\tbomber, grinder, hero, hero sandwich, hoagie, hoagy, Cuban sandwich, Italian sandwich, poor boy, sub, submarine, submarine sandwich, torpedo, wedge, zep\nn07698250\tgyro\nn07698401\tbacon-lettuce-tomato sandwich, BLT\nn07698543\tReuben\nn07698672\twestern, western sandwich\nn07698782\twrap\nn07700003\tspaghetti\nn07703889\thasty pudding\nn07704054\tgruel\nn07704205\tcongee, jook\nn07704305\tskilly\nn07705931\tedible fruit\nn07707451\tvegetable, veggie, veg\nn07708124\tjulienne, julienne vegetable\nn07708398\traw vegetable, rabbit food\nn07708512\tcrudites\nn07708685\tcelery stick\nn07708798\tlegume\nn07709046\tpulse\nn07709172\tpotherb\nn07709333\tgreens, green, leafy vegetable\nn07709701\tchop-suey greens\nn07709881\tbean curd, tofu\nn07710007\tsolanaceous vegetable\nn07710283\troot vegetable\nn07710616\tpotato, white potato, Irish potato, murphy, spud, tater\nn07710952\tbaked potato\nn07711080\tfrench fries, french-fried potatoes, fries, chips\nn07711232\thome fries, home-fried potatoes\nn07711371\tjacket potato\nn07711569\tmashed potato\nn07711683\tpotato skin, potato peel, potato peelings\nn07711799\tUruguay potato\nn07711907\tyam\nn07712063\tsweet potato\nn07712267\tyam\nn07712382\tsnack food\nn07712559\tchip, crisp, potato chip, Saratoga chip\nn07712748\tcorn chip\nn07712856\ttortilla chip\nn07712959\tnacho\nn07713074\teggplant, aubergine, mad apple\nn07713267\tpieplant, rhubarb\nn07713395\tcruciferous vegetable\nn07713763\tmustard, mustard greens, leaf mustard, Indian mustard\nn07713895\tcabbage, chou\nn07714078\tkale, kail, cole\nn07714188\tcollards, collard greens\nn07714287\tChinese cabbage, celery cabbage, Chinese celery\nn07714448\tbok choy, bok choi\nn07714571\thead cabbage\nn07714802\tred cabbage\nn07714895\tsavoy cabbage, savoy\nn07714990\tbroccoli\nn07715103\tcauliflower\nn07715221\tbrussels sprouts\nn07715407\tbroccoli rabe, broccoli raab\nn07715561\tsquash\nn07715721\tsummer squash\nn07716034\tyellow squash\nn07716203\tcrookneck, crookneck squash, summer crookneck\nn07716358\tzucchini, courgette\nn07716504\tmarrow, vegetable marrow\nn07716649\tcocozelle\nn07716750\tpattypan squash\nn07716906\tspaghetti squash\nn07717070\twinter squash\nn07717410\tacorn squash\nn07717556\tbutternut squash\nn07717714\thubbard squash\nn07717858\tturban squash\nn07718068\tbuttercup squash\nn07718195\tcushaw\nn07718329\twinter crookneck squash\nn07718472\tcucumber, cuke\nn07718671\tgherkin\nn07718747\tartichoke, globe artichoke\nn07718920\tartichoke heart\nn07719058\tJerusalem artichoke, sunchoke\nn07719213\tasparagus\nn07719330\tbamboo shoot\nn07719437\tsprout\nn07719616\tbean sprout\nn07719756\talfalfa sprout\nn07719839\tbeet, beetroot\nn07719980\tbeet green\nn07720084\tsugar beet\nn07720185\tmangel-wurzel\nn07720277\tchard, Swiss chard, spinach beet, leaf beet\nn07720442\tpepper\nn07720615\tsweet pepper\nn07720875\tbell pepper\nn07721018\tgreen pepper\nn07721118\tglobe pepper\nn07721195\tpimento, pimiento\nn07721325\thot pepper\nn07721456\tchili, chili pepper, chilli, chilly, chile\nn07721678\tjalapeno, jalapeno pepper\nn07721833\tchipotle\nn07721942\tcayenne, cayenne pepper\nn07722052\ttabasco, red pepper\nn07722217\tonion\nn07722390\tBermuda onion\nn07722485\tgreen onion, spring onion, scallion\nn07722666\tVidalia onion\nn07722763\tSpanish onion\nn07722888\tpurple onion, red onion\nn07723039\tleek\nn07723177\tshallot\nn07723330\tsalad green, salad greens\nn07723559\tlettuce\nn07723753\tbutterhead lettuce\nn07723968\tbuttercrunch\nn07724078\tBibb lettuce\nn07724173\tBoston lettuce\nn07724269\tcrisphead lettuce, iceberg lettuce, iceberg\nn07724492\tcos, cos lettuce, romaine, romaine lettuce\nn07724654\tleaf lettuce, loose-leaf lettuce\nn07724819\tceltuce\nn07724943\tbean, edible bean\nn07725158\tgoa bean\nn07725255\tlentil\nn07725376\tpea\nn07725531\tgreen pea, garden pea\nn07725663\tmarrowfat pea\nn07725789\tsnow pea, sugar pea\nn07725888\tsugar snap pea\nn07726009\tsplit-pea\nn07726095\tchickpea, garbanzo\nn07726230\tcajan pea, pigeon pea, dahl\nn07726386\tfield pea\nn07726525\tmushy peas\nn07726672\tblack-eyed pea, cowpea\nn07726796\tcommon bean\nn07727048\tkidney bean\nn07727140\tnavy bean, pea bean, white bean\nn07727252\tpinto bean\nn07727377\tfrijole\nn07727458\tblack bean, turtle bean\nn07727578\tfresh bean\nn07727741\tflageolet, haricot\nn07727868\tgreen bean\nn07728053\tsnap bean, snap\nn07728181\tstring bean\nn07728284\tKentucky wonder, Kentucky wonder bean\nn07728391\tscarlet runner, scarlet runner bean, runner bean, English runner bean\nn07728585\tharicot vert, haricots verts, French bean\nn07728708\twax bean, yellow bean\nn07728804\tshell bean\nn07729000\tlima bean\nn07729142\tFordhooks\nn07729225\tsieva bean, butter bean, butterbean, civet bean\nn07729384\tfava bean, broad bean\nn07729485\tsoy, soybean, soya, soya bean\nn07729828\tgreen soybean\nn07729926\tfield soybean\nn07730033\tcardoon\nn07730207\tcarrot\nn07730320\tcarrot stick\nn07730406\tcelery\nn07730562\tpascal celery, Paschal celery\nn07730708\tceleriac, celery root\nn07730855\tchicory, curly endive\nn07731006\tradicchio\nn07731122\tcoffee substitute\nn07731284\tchicory, chicory root\nn07731436\tPostum\nn07731587\tchicory escarole, endive, escarole\nn07731767\tBelgian endive, French endive, witloof\nn07731952\tcorn, edible corn\nn07732168\tsweet corn, green corn\nn07732302\thominy\nn07732433\tlye hominy\nn07732525\tpearl hominy\nn07732636\tpopcorn\nn07732747\tcress\nn07732904\twatercress\nn07733005\tgarden cress\nn07733124\twinter cress\nn07733217\tdandelion green\nn07733394\tgumbo, okra\nn07733567\tkohlrabi, turnip cabbage\nn07733712\tlamb's-quarter, pigweed, wild spinach\nn07733847\twild spinach\nn07734017\ttomato\nn07734183\tbeefsteak tomato\nn07734292\tcherry tomato\nn07734417\tplum tomato\nn07734555\ttomatillo, husk tomato, Mexican husk tomato\nn07734744\tmushroom\nn07734879\tstuffed mushroom\nn07735052\tsalsify\nn07735179\toyster plant, vegetable oyster\nn07735294\tscorzonera, black salsify\nn07735404\tparsnip\nn07735510\tpumpkin\nn07735687\tradish\nn07735803\tturnip\nn07735981\twhite turnip\nn07736087\trutabaga, swede, swedish turnip, yellow turnip\nn07736256\tturnip greens\nn07736371\tsorrel, common sorrel\nn07736527\tFrench sorrel\nn07736692\tspinach\nn07736813\ttaro, taro root, cocoyam, dasheen, edda\nn07736971\ttruffle, earthnut\nn07737081\tedible nut\nn07737594\tbunya bunya\nn07737745\tpeanut, earthnut, goober, goober pea, groundnut, monkey nut\nn07738105\tfreestone\nn07738224\tcling, clingstone\nn07739035\twindfall\nn07739125\tapple\nn07739344\tcrab apple, crabapple\nn07739506\teating apple, dessert apple\nn07739923\tBaldwin\nn07740033\tCortland\nn07740115\tCox's Orange Pippin\nn07740220\tDelicious\nn07740342\tGolden Delicious, Yellow Delicious\nn07740461\tRed Delicious\nn07740597\tEmpire\nn07740744\tGrimes' golden\nn07740855\tJonathan\nn07740954\tMcIntosh\nn07741138\tMacoun\nn07741235\tNorthern Spy\nn07741357\tPearmain\nn07741461\tPippin\nn07741623\tPrima\nn07741706\tStayman\nn07741804\tWinesap\nn07741888\tStayman Winesap\nn07742012\tcooking apple\nn07742224\tBramley's Seedling\nn07742313\tGranny Smith\nn07742415\tLane's Prince Albert\nn07742513\tNewtown Wonder\nn07742605\tRome Beauty\nn07742704\tberry\nn07743224\tbilberry, whortleberry, European blueberry\nn07743384\thuckleberry\nn07743544\tblueberry\nn07743723\twintergreen, boxberry, checkerberry, teaberry, spiceberry\nn07743902\tcranberry\nn07744057\tlingonberry, mountain cranberry, cowberry, lowbush cranberry\nn07744246\tcurrant\nn07744430\tgooseberry\nn07744559\tblack currant\nn07744682\tred currant\nn07744811\tblackberry\nn07745046\tboysenberry\nn07745197\tdewberry\nn07745357\tloganberry\nn07745466\traspberry\nn07745661\tsaskatoon, serviceberry, shadberry, juneberry\nn07745940\tstrawberry\nn07746038\tsugarberry, hackberry\nn07746186\tpersimmon\nn07746334\tacerola, barbados cherry, surinam cherry, West Indian cherry\nn07746551\tcarambola, star fruit\nn07746749\tceriman, monstera\nn07746910\tcarissa plum, natal plum\nn07747055\tcitrus, citrus fruit, citrous fruit\nn07747607\torange\nn07747811\ttemple orange\nn07747951\tmandarin, mandarin orange\nn07748157\tclementine\nn07748276\tsatsuma\nn07748416\ttangerine\nn07748574\ttangelo, ugli, ugli fruit\nn07748753\tbitter orange, Seville orange, sour orange\nn07748912\tsweet orange\nn07749095\tJaffa orange\nn07749192\tnavel orange\nn07749312\tValencia orange\nn07749446\tkumquat\nn07749582\tlemon\nn07749731\tlime\nn07749870\tkey lime\nn07749969\tgrapefruit\nn07750146\tpomelo, shaddock\nn07750299\tcitrange\nn07750449\tcitron\nn07750586\talmond\nn07750736\tJordan almond\nn07750872\tapricot\nn07751004\tpeach\nn07751148\tnectarine\nn07751280\tpitahaya\nn07751451\tplum\nn07751737\tdamson, damson plum\nn07751858\tgreengage, greengage plum\nn07751977\tbeach plum\nn07752109\tsloe\nn07752264\tVictoria plum\nn07752377\tdried fruit\nn07752514\tdried apricot\nn07752602\tprune\nn07752664\traisin\nn07752782\tseedless raisin, sultana\nn07752874\tseeded raisin\nn07752966\tcurrant\nn07753113\tfig\nn07753275\tpineapple, ananas\nn07753448\tanchovy pear, river pear\nn07753592\tbanana\nn07753743\tpassion fruit\nn07753980\tgranadilla\nn07754155\tsweet calabash\nn07754279\tbell apple, sweet cup, water lemon, yellow granadilla\nn07754451\tbreadfruit\nn07754684\tjackfruit, jak, jack\nn07754894\tcacao bean, cocoa bean\nn07755089\tcocoa\nn07755262\tcanistel, eggfruit\nn07755411\tmelon\nn07755619\tmelon ball\nn07755707\tmuskmelon, sweet melon\nn07755929\tcantaloup, cantaloupe\nn07756096\twinter melon\nn07756325\thoneydew, honeydew melon\nn07756499\tPersian melon\nn07756641\tnet melon, netted melon, nutmeg melon\nn07756838\tcasaba, casaba melon\nn07756951\twatermelon\nn07757132\tcherry\nn07757312\tsweet cherry, black cherry\nn07757511\tbing cherry\nn07757602\theart cherry, oxheart, oxheart cherry\nn07757753\tblackheart, blackheart cherry\nn07757874\tcapulin, Mexican black cherry\nn07757990\tsour cherry\nn07758125\tamarelle\nn07758260\tmorello\nn07758407\tcocoa plum, coco plum, icaco\nn07758582\tgherkin\nn07758680\tgrape\nn07758950\tfox grape\nn07759194\tConcord grape\nn07759324\tCatawba\nn07759424\tmuscadine, bullace grape\nn07759576\tscuppernong\nn07759691\tslipskin grape\nn07759816\tvinifera grape\nn07760070\temperor\nn07760153\tmuscat, muscatel, muscat grape\nn07760297\tribier\nn07760395\tsultana\nn07760501\tTokay\nn07760673\tflame tokay\nn07760755\tThompson Seedless\nn07760859\tcustard apple\nn07761141\tcherimoya, cherimolla\nn07761309\tsoursop, guanabana\nn07761611\tsweetsop, annon, sugar apple\nn07761777\tilama\nn07761954\tpond apple\nn07762114\tpapaw, pawpaw\nn07762244\tpapaya\nn07762373\tkai apple\nn07762534\tketembilla, kitembilla, kitambilla\nn07762740\tackee, akee\nn07762913\tdurian\nn07763107\tfeijoa, pineapple guava\nn07763290\tgenip, Spanish lime\nn07763483\tgenipap, genipap fruit\nn07763629\tkiwi, kiwi fruit, Chinese gooseberry\nn07763792\tloquat, Japanese plum\nn07763987\tmangosteen\nn07764155\tmango\nn07764315\tsapodilla, sapodilla plum, sapota\nn07764486\tsapote, mammee, marmalade plum\nn07764630\ttamarind, tamarindo\nn07764847\tavocado, alligator pear, avocado pear, aguacate\nn07765073\tdate\nn07765208\telderberry\nn07765361\tguava\nn07765517\tmombin\nn07765612\thog plum, yellow mombin\nn07765728\thog plum, wild plum\nn07765862\tjaboticaba\nn07765999\tjujube, Chinese date, Chinese jujube\nn07766173\tlitchi, litchi nut, litchee, lichi, leechee, lichee, lychee\nn07766409\tlonganberry, dragon's eye\nn07766530\tmamey, mammee, mammee apple\nn07766723\tmarang\nn07766891\tmedlar\nn07767002\tmedlar\nn07767171\tmulberry\nn07767344\tolive\nn07767549\tblack olive, ripe olive\nn07767709\tgreen olive\nn07767847\tpear\nn07768068\tbosc\nn07768139\tanjou\nn07768230\tbartlett, bartlett pear\nn07768318\tseckel, seckel pear\nn07768423\tplantain\nn07768590\tplumcot\nn07768694\tpomegranate\nn07768858\tprickly pear\nn07769102\tBarbados gooseberry, blade apple\nn07769306\tquandong, quandang, quantong, native peach\nn07769465\tquandong nut\nn07769584\tquince\nn07769731\trambutan, rambotan\nn07769886\tpulasan, pulassan\nn07770034\trose apple\nn07770180\tsorb, sorb apple\nn07770439\tsour gourd, monkey bread\nn07770571\tedible seed\nn07770763\tpumpkin seed\nn07770869\tbetel nut, areca nut\nn07771082\tbeechnut\nn07771212\twalnut\nn07771405\tblack walnut\nn07771539\tEnglish walnut\nn07771731\tbrazil nut, brazil\nn07771891\tbutternut\nn07772026\tsouari nut\nn07772147\tcashew, cashew nut\nn07772274\tchestnut\nn07772413\tchincapin, chinkapin, chinquapin\nn07772788\thazelnut, filbert, cobnut, cob\nn07772935\tcoconut, cocoanut\nn07773428\tcoconut milk, coconut water\nn07774182\tgrugru nut\nn07774295\thickory nut\nn07774479\tcola extract\nn07774596\tmacadamia nut\nn07774719\tpecan\nn07774842\tpine nut, pignolia, pinon nut\nn07775050\tpistachio, pistachio nut\nn07775197\tsunflower seed\nn07783827\tanchovy paste\nn07785487\trollmops\nn07800091\tfeed, provender\nn07800487\tcattle cake\nn07800636\tcreep feed\nn07800740\tfodder\nn07801007\tfeed grain\nn07801091\teatage, forage, pasture, pasturage, grass\nn07801342\tsilage, ensilage\nn07801508\toil cake\nn07801709\toil meal\nn07801779\talfalfa\nn07801892\tbroad bean, horse bean\nn07802026\thay\nn07802152\ttimothy\nn07802246\tstover\nn07802417\tgrain, food grain, cereal\nn07802767\tgrist\nn07802863\tgroats\nn07802963\tmillet\nn07803093\tbarley, barleycorn\nn07803213\tpearl barley\nn07803310\tbuckwheat\nn07803408\tbulgur, bulghur, bulgur wheat\nn07803545\twheat, wheat berry\nn07803779\tcracked wheat\nn07803895\tstodge\nn07803992\twheat germ\nn07804152\toat\nn07804323\trice\nn07804543\tbrown rice\nn07804657\twhite rice, polished rice\nn07804771\twild rice, Indian rice\nn07804900\tpaddy\nn07805006\tslop, slops, swill, pigswill, pigwash\nn07805254\tmash\nn07805389\tchicken feed, scratch\nn07805478\tcud, rechewed food\nn07805594\tbird feed, bird food, birdseed\nn07805731\tpetfood, pet-food, pet food\nn07805966\tdog food\nn07806043\tcat food\nn07806120\tcanary seed\nn07806221\tsalad\nn07806633\ttossed salad\nn07806774\tgreen salad\nn07806879\tCaesar salad\nn07807002\tsalmagundi\nn07807171\tsalad nicoise\nn07807317\tcombination salad\nn07807472\tchef's salad\nn07807594\tpotato salad\nn07807710\tpasta salad\nn07807834\tmacaroni salad\nn07807922\tfruit salad\nn07808022\tWaldorf salad\nn07808166\tcrab Louis\nn07808268\therring salad\nn07808352\ttuna fish salad, tuna salad\nn07808479\tchicken salad\nn07808587\tcoleslaw, slaw\nn07808675\taspic\nn07808806\tmolded salad\nn07808904\ttabbouleh, tabooli\nn07809096\tingredient, fixings\nn07809368\tflavorer, flavourer, flavoring, flavouring, seasoner, seasoning\nn07810531\tbouillon cube\nn07810907\tcondiment\nn07811416\therb\nn07812046\tfines herbes\nn07812184\tspice\nn07812662\tspearmint oil\nn07812790\tlemon oil\nn07812913\twintergreen oil, oil of wintergreen\nn07813107\tsalt, table salt, common salt\nn07813324\tcelery salt\nn07813495\tonion salt\nn07813579\tseasoned salt\nn07813717\tsour salt\nn07813833\tfive spice powder\nn07814007\tallspice\nn07814203\tcinnamon\nn07814390\tstick cinnamon\nn07814487\tclove\nn07814634\tcumin, cumin seed\nn07814790\tfennel\nn07814925\tginger, gingerroot\nn07815163\tginger, powdered ginger\nn07815294\tmace\nn07815424\tnutmeg\nn07815588\tpepper, peppercorn\nn07815839\tblack pepper\nn07815956\twhite pepper\nn07816052\tsassafras\nn07816164\tbasil, sweet basil\nn07816296\tbay leaf\nn07816398\tborage\nn07816575\thyssop\nn07816726\tcaraway\nn07816839\tchervil\nn07817024\tchives\nn07817160\tcomfrey, healing herb\nn07817315\tcoriander, Chinese parsley, cilantro\nn07817465\tcoriander, coriander seed\nn07817599\tcostmary\nn07817758\tfennel, common fennel\nn07817871\tfennel, Florence fennel, finocchio\nn07818029\tfennel seed\nn07818133\tfenugreek, fenugreek seed\nn07818277\tgarlic, ail\nn07818422\tclove, garlic clove\nn07818572\tgarlic chive\nn07818689\tlemon balm\nn07818825\tlovage\nn07818995\tmarjoram, oregano\nn07819166\tmint\nn07819303\tmustard seed\nn07819480\tmustard, table mustard\nn07819682\tChinese mustard\nn07819769\tnasturtium\nn07819896\tparsley\nn07820036\tsalad burnet\nn07820145\trosemary\nn07820297\true\nn07820497\tsage\nn07820683\tclary sage\nn07820814\tsavory, savoury\nn07820960\tsummer savory, summer savoury\nn07821107\twinter savory, winter savoury\nn07821260\tsweet woodruff, waldmeister\nn07821404\tsweet cicely\nn07821610\ttarragon, estragon\nn07821758\tthyme\nn07821919\tturmeric\nn07822053\tcaper\nn07822197\tcatsup, ketchup, cetchup, tomato ketchup\nn07822323\tcardamom, cardamon, cardamum\nn07822518\tcayenne, cayenne pepper, red pepper\nn07822687\tchili powder\nn07822845\tchili sauce\nn07823105\tchutney, Indian relish\nn07823280\tsteak sauce\nn07823369\ttaco sauce\nn07823460\tsalsa\nn07823591\tmint sauce\nn07823698\tcranberry sauce\nn07823814\tcurry powder\nn07823951\tcurry\nn07824191\tlamb curry\nn07824268\tduck sauce, hoisin sauce\nn07824383\thorseradish\nn07824502\tmarinade\nn07824702\tpaprika\nn07824863\tSpanish paprika\nn07824988\tpickle\nn07825194\tdill pickle\nn07825399\tbread and butter pickle\nn07825496\tpickle relish\nn07825597\tpiccalilli\nn07825717\tsweet pickle\nn07825850\tapplesauce, apple sauce\nn07825972\tsoy sauce, soy\nn07826091\tTabasco, Tabasco sauce\nn07826250\ttomato paste\nn07826340\tangelica\nn07826453\tangelica\nn07826544\talmond extract\nn07826653\tanise, aniseed, anise seed\nn07826930\tChinese anise, star anise, star aniseed\nn07827130\tjuniper berries\nn07827284\tsaffron\nn07827410\tsesame seed, benniseed\nn07827554\tcaraway seed\nn07827750\tpoppy seed\nn07827896\tdill, dill weed\nn07828041\tdill seed\nn07828156\tcelery seed\nn07828275\tlemon extract\nn07828378\tmonosodium glutamate, MSG\nn07828642\tvanilla bean\nn07828987\tvinegar, acetum\nn07829248\tcider vinegar\nn07829331\twine vinegar\nn07829412\tsauce\nn07830493\tanchovy sauce\nn07830593\thot sauce\nn07830690\thard sauce\nn07830841\thorseradish sauce, sauce Albert\nn07830986\tbolognese pasta sauce\nn07831146\tcarbonara\nn07831267\ttomato sauce\nn07831450\ttartare sauce, tartar sauce\nn07831663\twine sauce\nn07831821\tmarchand de vin, mushroom wine sauce\nn07831955\tbread sauce\nn07832099\tplum sauce\nn07832202\tpeach sauce\nn07832307\tapricot sauce\nn07832416\tpesto\nn07832592\travigote, ravigotte\nn07832741\tremoulade sauce\nn07832902\tdressing, salad dressing\nn07833333\tsauce Louis\nn07833535\tbleu cheese dressing, blue cheese dressing\nn07833672\tblue cheese dressing, Roquefort dressing\nn07833816\tFrench dressing, vinaigrette, sauce vinaigrette\nn07833951\tLorenzo dressing\nn07834065\tanchovy dressing\nn07834160\tItalian dressing\nn07834286\thalf-and-half dressing\nn07834507\tmayonnaise, mayo\nn07834618\tgreen mayonnaise, sauce verte\nn07834774\taioli, aioli sauce, garlic sauce\nn07834872\tRussian dressing, Russian mayonnaise\nn07835051\tsalad cream\nn07835173\tThousand Island dressing\nn07835331\tbarbecue sauce\nn07835457\thollandaise\nn07835547\tbearnaise\nn07835701\tBercy, Bercy butter\nn07835823\tbordelaise\nn07835921\tbourguignon, bourguignon sauce, Burgundy sauce\nn07836077\tbrown sauce, sauce Espagnole\nn07836269\tEspagnole, sauce Espagnole\nn07836456\tChinese brown sauce, brown sauce\nn07836600\tblanc\nn07836731\tcheese sauce\nn07836838\tchocolate sauce, chocolate syrup\nn07837002\thot-fudge sauce, fudge sauce\nn07837110\tcocktail sauce, seafood sauce\nn07837234\tColbert, Colbert butter\nn07837362\twhite sauce, bechamel sauce, bechamel\nn07837545\tcream sauce\nn07837630\tMornay sauce\nn07837755\tdemiglace, demi-glaze\nn07837912\tgravy, pan gravy\nn07838073\tgravy\nn07838233\tspaghetti sauce, pasta sauce\nn07838441\tmarinara\nn07838551\tmole\nn07838659\thunter's sauce, sauce chausseur\nn07838811\tmushroom sauce\nn07838905\tmustard sauce\nn07839055\tNantua, shrimp sauce\nn07839172\tHungarian sauce, paprika sauce\nn07839312\tpepper sauce, Poivrade\nn07839478\troux\nn07839593\tSmitane\nn07839730\tSoubise, white onion sauce\nn07839864\tLyonnaise sauce, brown onion sauce\nn07840027\tveloute\nn07840124\tallemande, allemande sauce\nn07840219\tcaper sauce\nn07840304\tpoulette\nn07840395\tcurry sauce\nn07840520\tWorcester sauce, Worcestershire, Worcestershire sauce\nn07840672\tcoconut milk, coconut cream\nn07840804\tegg, eggs\nn07841037\tegg white, white, albumen, ovalbumin\nn07841345\tegg yolk, yolk\nn07841495\tboiled egg, coddled egg\nn07841639\thard-boiled egg, hard-cooked egg\nn07841800\tEaster egg\nn07841907\tEaster egg\nn07842044\tchocolate egg\nn07842130\tcandy egg\nn07842202\tpoached egg, dropped egg\nn07842308\tscrambled eggs\nn07842433\tdeviled egg, stuffed egg\nn07842605\tshirred egg, baked egg, egg en cocotte\nn07842753\tomelet, omelette\nn07842972\tfirm omelet\nn07843117\tFrench omelet\nn07843220\tfluffy omelet\nn07843348\twestern omelet\nn07843464\tsouffle\nn07843636\tfried egg\nn07843775\tdairy product\nn07844042\tmilk\nn07844604\tmilk\nn07844786\tsour milk\nn07844867\tsoya milk, soybean milk, soymilk\nn07845087\tformula\nn07845166\tpasteurized milk\nn07845335\tcows' milk\nn07845421\tyak's milk\nn07845495\tgoats' milk\nn07845571\tacidophilus milk\nn07845702\traw milk\nn07845775\tscalded milk\nn07845863\thomogenized milk\nn07846014\tcertified milk\nn07846143\tpowdered milk, dry milk, dried milk, milk powder\nn07846274\tnonfat dry milk\nn07846359\tevaporated milk\nn07846471\tcondensed milk\nn07846557\tskim milk, skimmed milk\nn07846688\tsemi-skimmed milk\nn07846802\twhole milk\nn07846938\tlow-fat milk\nn07847047\tbuttermilk\nn07847198\tcream\nn07847453\tclotted cream, Devonshire cream\nn07847585\tdouble creme, heavy whipping cream\nn07847706\thalf-and-half\nn07847827\theavy cream\nn07847917\tlight cream, coffee cream, single cream\nn07848093\tsour cream, soured cream\nn07848196\twhipping cream, light whipping cream\nn07848338\tbutter\nn07848771\tclarified butter, drawn butter\nn07848936\tghee\nn07849026\tbrown butter, beurre noisette\nn07849186\tMeuniere butter, lemon butter\nn07849336\tyogurt, yoghurt, yoghourt\nn07849506\tblueberry yogurt\nn07849619\traita\nn07849733\twhey\nn07849912\tcurd\nn07850083\tcurd\nn07850219\tclabber\nn07850329\tcheese\nn07851054\tparing\nn07851298\tcream cheese\nn07851443\tdouble cream\nn07851554\tmascarpone\nn07851641\ttriple cream, triple creme\nn07851767\tcottage cheese, pot cheese, farm cheese, farmer's cheese\nn07851926\tprocess cheese, processed cheese\nn07852045\tbleu, blue cheese\nn07852229\tStilton\nn07852302\tRoquefort\nn07852376\tgorgonzola\nn07852452\tDanish blue\nn07852532\tBavarian blue\nn07852614\tBrie\nn07852712\tbrick cheese\nn07852833\tCamembert\nn07852919\tcheddar, cheddar cheese, Armerican cheddar, American cheese\nn07853125\trat cheese, store cheese\nn07853232\tCheshire cheese\nn07853345\tdouble Gloucester\nn07853445\tEdam\nn07853560\tgoat cheese, chevre\nn07853648\tGouda, Gouda cheese\nn07853762\tgrated cheese\nn07853852\thand cheese\nn07853946\tLiederkranz\nn07854066\tLimburger\nn07854184\tmozzarella\nn07854266\tMuenster\nn07854348\tParmesan\nn07854455\tquark cheese, quark\nn07854614\tricotta\nn07854707\tstring cheese\nn07854813\tSwiss cheese\nn07854982\tEmmenthal, Emmental, Emmenthaler, Emmentaler\nn07855105\tGruyere\nn07855188\tsapsago\nn07855317\tVelveeta\nn07855413\tnut butter\nn07855510\tpeanut butter\nn07855603\tmarshmallow fluff\nn07855721\tonion butter\nn07855812\tpimento butter\nn07855907\tshrimp butter\nn07856045\tlobster butter\nn07856186\tyak butter\nn07856270\tspread, paste\nn07856756\tcheese spread\nn07856895\tanchovy butter\nn07856992\tfishpaste\nn07857076\tgarlic butter\nn07857170\tmiso\nn07857356\twasabi\nn07857598\tsnail butter\nn07857731\thummus, humus, hommos, hoummos, humous\nn07857959\tpate\nn07858114\tduck pate\nn07858197\tfoie gras, pate de foie gras\nn07858336\ttapenade\nn07858484\ttahini\nn07858595\tsweetening, sweetener\nn07858841\taspartame\nn07858978\thoney\nn07859142\tsaccharin\nn07859284\tsugar, refined sugar\nn07859583\tsyrup, sirup\nn07859796\tsugar syrup\nn07859951\tmolasses\nn07860103\tsorghum, sorghum molasses\nn07860208\ttreacle, golden syrup\nn07860331\tgrenadine\nn07860447\tmaple syrup\nn07860548\tcorn syrup\nn07860629\tmiraculous food, manna, manna from heaven\nn07860805\tbatter\nn07860988\tdough\nn07861158\tbread dough\nn07861247\tpancake batter\nn07861334\tfritter batter\nn07861557\tcoq au vin\nn07861681\tchicken provencale\nn07861813\tchicken and rice\nn07861983\tmoo goo gai pan\nn07862095\tarroz con pollo\nn07862244\tbacon and eggs\nn07862348\tbarbecued spareribs, spareribs\nn07862461\tbeef Bourguignonne, boeuf Bourguignonne\nn07862611\tbeef Wellington, filet de boeuf en croute\nn07862770\tbitok\nn07862946\tboiled dinner, New England boiled dinner\nn07863107\tBoston baked beans\nn07863229\tbubble and squeak\nn07863374\tpasta\nn07863547\tcannelloni\nn07863644\tcarbonnade flamande, Belgian beef stew\nn07863802\tcheese souffle\nn07863935\tchicken Marengo\nn07864065\tchicken cordon bleu\nn07864198\tMaryland chicken\nn07864317\tchicken paprika, chicken paprikash\nn07864475\tchicken Tetrazzini\nn07864638\tTetrazzini\nn07864756\tchicken Kiev\nn07864934\tchili, chili con carne\nn07865105\tchili dog\nn07865196\tchop suey\nn07865484\tchow mein\nn07865575\tcodfish ball, codfish cake\nn07865700\tcoquille\nn07865788\tcoquilles Saint-Jacques\nn07866015\tcroquette\nn07866151\tcottage pie\nn07866277\trissole\nn07866409\tdolmas, stuffed grape leaves\nn07866571\tegg foo yong, egg fu yung\nn07866723\tegg roll, spring roll\nn07866868\teggs Benedict\nn07867021\tenchilada\nn07867164\tfalafel, felafel\nn07867324\tfish and chips\nn07867421\tfondue, fondu\nn07867616\tcheese fondue\nn07867751\tchocolate fondue\nn07867883\tfondue, fondu\nn07868045\tbeef fondue, boeuf fondu bourguignon\nn07868200\tFrench toast\nn07868340\tfried rice, Chinese fried rice\nn07868508\tfrittata\nn07868684\tfrog legs\nn07868830\tgalantine\nn07868955\tgefilte fish, fish ball\nn07869111\thaggis\nn07869291\tham and eggs\nn07869391\thash\nn07869522\tcorned beef hash\nn07869611\tjambalaya\nn07869775\tkabob, kebab, shish kebab\nn07869937\tkedgeree\nn07870069\tsouvlaki, souvlakia\nn07870167\tlasagna, lasagne\nn07870313\tseafood Newburg\nn07870478\tlobster Newburg, lobster a la Newburg\nn07870620\tshrimp Newburg\nn07870734\tNewburg sauce\nn07870894\tlobster thermidor\nn07871065\tlutefisk, lutfisk\nn07871234\tmacaroni and cheese\nn07871335\tmacedoine\nn07871436\tmeatball\nn07871588\tporcupine ball, porcupines\nn07871720\tSwedish meatball\nn07871810\tmeat loaf, meatloaf\nn07872593\tmoussaka\nn07872748\tosso buco\nn07873057\tmarrow, bone marrow\nn07873198\tpheasant under glass\nn07873348\tpigs in blankets\nn07873464\tpilaf, pilaff, pilau, pilaw\nn07873679\tbulgur pilaf\nn07873807\tpizza, pizza pie\nn07874063\tsausage pizza\nn07874159\tpepperoni pizza\nn07874259\tcheese pizza\nn07874343\tanchovy pizza\nn07874441\tSicilian pizza\nn07874531\tpoi\nn07874674\tpork and beans\nn07874780\tporridge\nn07874995\toatmeal, burgoo\nn07875086\tloblolly\nn07875152\tpotpie\nn07875267\trijsttaffel, rijstaffel, rijstafel\nn07875436\trisotto, Italian rice\nn07875560\troulade\nn07875693\tfish loaf\nn07875835\tsalmon loaf\nn07875926\tSalisbury steak\nn07876026\tsauerbraten\nn07876189\tsauerkraut\nn07876281\tscallopine, scallopini\nn07876460\tveal scallopini\nn07876550\tscampi\nn07876651\tScotch egg\nn07876775\tScotch woodcock\nn07876893\tscrapple\nn07877187\tspaghetti and meatballs\nn07877299\tSpanish rice\nn07877675\tsteak tartare, tartar steak, cannibal mound\nn07877849\tpepper steak\nn07877961\tsteak au poivre, peppered steak, pepper steak\nn07878145\tbeef Stroganoff\nn07878283\tstuffed cabbage\nn07878479\tkishke, stuffed derma\nn07878647\tstuffed peppers\nn07878785\tstuffed tomato, hot stuffed tomato\nn07878926\tstuffed tomato, cold stuffed tomato\nn07879072\tsuccotash\nn07879174\tsukiyaki\nn07879350\tsashimi\nn07879450\tsushi\nn07879560\tSwiss steak\nn07879659\ttamale\nn07879821\ttamale pie\nn07879953\ttempura\nn07880080\tteriyaki\nn07880213\tterrine\nn07880325\tWelsh rarebit, Welsh rabbit, rarebit\nn07880458\tschnitzel, Wiener schnitzel\nn07880751\ttaco\nn07880880\tchicken taco\nn07880968\tburrito\nn07881117\tbeef burrito\nn07881205\tquesadilla\nn07881404\ttostada\nn07881525\tbean tostada\nn07881625\trefried beans, frijoles refritos\nn07881800\tbeverage, drink, drinkable, potable\nn07882420\twish-wash\nn07882497\tconcoction, mixture, intermixture\nn07882886\tmix, premix\nn07883031\tfilling\nn07883156\tlekvar\nn07883251\tpotion\nn07883384\telixir\nn07883510\telixir of life\nn07883661\tphilter, philtre, love-potion, love-philter, love-philtre\nn07884567\talcohol, alcoholic drink, alcoholic beverage, intoxicant, inebriant\nn07885705\tproof spirit\nn07886057\thome brew, homebrew\nn07886176\thooch, hootch\nn07886317\tkava, kavakava\nn07886463\taperitif\nn07886572\tbrew, brewage\nn07886849\tbeer\nn07887099\tdraft beer, draught beer\nn07887192\tsuds\nn07887304\tMunich beer, Munchener\nn07887461\tbock, bock beer\nn07887634\tlager, lager beer\nn07887967\tlight beer\nn07888058\tOktoberfest, Octoberfest\nn07888229\tPilsner, Pilsener\nn07888378\tshebeen\nn07888465\tWeissbier, white beer, wheat beer\nn07888816\tWeizenbock\nn07888909\tmalt\nn07889193\twort\nn07889274\tmalt, malt liquor\nn07889510\tale\nn07889814\tbitter\nn07889990\tBurton\nn07890068\tpale ale\nn07890226\tporter, porter's beer\nn07890352\tstout\nn07890540\tGuinness\nn07890617\tkvass\nn07890750\tmead\nn07890890\tmetheglin\nn07890970\thydromel\nn07891095\toenomel\nn07891189\tnear beer\nn07891309\tginger beer\nn07891433\tsake, saki, rice beer\nn07891726\twine, vino\nn07892418\tvintage\nn07892512\tred wine\nn07892813\twhite wine\nn07893253\tblush wine, pink wine, rose, rose wine\nn07893425\taltar wine, sacramental wine\nn07893528\tsparkling wine\nn07893642\tchampagne, bubbly\nn07893792\tcold duck\nn07893891\tBurgundy, Burgundy wine\nn07894102\tBeaujolais\nn07894298\tMedoc\nn07894451\tCanary wine\nn07894551\tChablis, white Burgundy\nn07894703\tMontrachet\nn07894799\tChardonnay, Pinot Chardonnay\nn07894965\tPinot noir\nn07895100\tPinot blanc\nn07895237\tBordeaux, Bordeaux wine\nn07895435\tclaret, red Bordeaux\nn07895595\tChianti\nn07895710\tCabernet, Cabernet Sauvignon\nn07895839\tMerlot\nn07895962\tSauvignon blanc\nn07896060\tCalifornia wine\nn07896165\tCotes de Provence\nn07896287\tdessert wine\nn07896422\tDubonnet\nn07896560\tjug wine\nn07896661\tmacon, maconnais\nn07896765\tMoselle\nn07896893\tMuscadet\nn07896994\tplonk\nn07897116\tretsina\nn07897200\tRhine wine, Rhenish, hock\nn07897438\tRiesling\nn07897600\tliebfraumilch\nn07897750\tRhone wine\nn07897865\tRioja\nn07897975\tsack\nn07898117\tSaint Emilion\nn07898247\tSoave\nn07898333\tzinfandel\nn07898443\tSauterne, Sauternes\nn07898617\tstraw wine\nn07898745\ttable wine\nn07898895\tTokay\nn07899003\tvin ordinaire\nn07899108\tvermouth\nn07899292\tsweet vermouth, Italian vermouth\nn07899434\tdry vermouth, French vermouth\nn07899533\tChenin blanc\nn07899660\tVerdicchio\nn07899769\tVouvray\nn07899899\tYquem\nn07899976\tgeneric, generic wine\nn07900225\tvarietal, varietal wine\nn07900406\tfortified wine\nn07900616\tMadeira\nn07900734\tmalmsey\nn07900825\tport, port wine\nn07900958\tsherry\nn07901355\tMarsala\nn07901457\tmuscat, muscatel, muscadel, muscadelle\nn07901587\tliquor, spirits, booze, hard drink, hard liquor, John Barleycorn, strong drink\nn07902121\tneutral spirits, ethyl alcohol\nn07902336\taqua vitae, ardent spirits\nn07902443\teau de vie\nn07902520\tmoonshine, bootleg, corn liquor\nn07902698\tbathtub gin\nn07902799\taquavit, akvavit\nn07902937\tarrack, arak\nn07903101\tbitters\nn07903208\tbrandy\nn07903543\tapplejack\nn07903643\tCalvados\nn07903731\tArmagnac\nn07903841\tCognac\nn07903962\tgrappa\nn07904072\tkirsch\nn07904293\tslivovitz\nn07904395\tgin\nn07904637\tsloe gin\nn07904760\tgeneva, Holland gin, Hollands\nn07904865\tgrog\nn07904934\touzo\nn07905038\trum\nn07905296\tdemerara, demerara rum\nn07905386\tJamaica rum\nn07905474\tschnapps, schnaps\nn07905618\tpulque\nn07905770\tmescal\nn07905979\ttequila\nn07906111\tvodka\nn07906284\twhiskey, whisky\nn07906572\tblended whiskey, blended whisky\nn07906718\tbourbon\nn07906877\tcorn whiskey, corn whisky, corn\nn07907037\tfirewater\nn07907161\tIrish, Irish whiskey, Irish whisky\nn07907342\tpoteen\nn07907429\trye, rye whiskey, rye whisky\nn07907548\tScotch, Scotch whiskey, Scotch whisky, malt whiskey, malt whisky, Scotch malt whiskey, Scotch malt whisky\nn07907831\tsour mash, sour mash whiskey\nn07907943\tliqueur, cordial\nn07908411\tabsinth, absinthe\nn07908567\tamaretto\nn07908647\tanisette, anisette de Bordeaux\nn07908812\tbenedictine\nn07908923\tChartreuse\nn07909129\tcoffee liqueur\nn07909231\tcreme de cacao\nn07909362\tcreme de menthe\nn07909504\tcreme de fraise\nn07909593\tDrambuie\nn07909714\tGalliano\nn07909811\torange liqueur\nn07909954\tcuracao, curacoa\nn07910048\ttriple sec\nn07910152\tGrand Marnier\nn07910245\tkummel\nn07910379\tmaraschino, maraschino liqueur\nn07910538\tpastis\nn07910656\tPernod\nn07910799\tpousse-cafe\nn07910970\tKahlua\nn07911061\tratafia, ratafee\nn07911249\tsambuca\nn07911371\tmixed drink\nn07911677\tcocktail\nn07912093\tDom Pedro\nn07912211\thighball\nn07913180\tmixer\nn07913300\tbishop\nn07913393\tBloody Mary\nn07913537\tVirgin Mary, bloody shame\nn07913644\tbullshot\nn07913774\tcobbler\nn07913882\tcollins, Tom Collins\nn07914006\tcooler\nn07914128\trefresher\nn07914271\tsmoothie\nn07914413\tdaiquiri, rum cocktail\nn07914586\tstrawberry daiquiri\nn07914686\tNADA daiquiri\nn07914777\tspritzer\nn07914887\tflip\nn07914995\tgimlet\nn07915094\tgin and tonic\nn07915213\tgrasshopper\nn07915366\tHarvey Wallbanger\nn07915491\tjulep, mint julep\nn07915618\tmanhattan\nn07915800\tRob Roy\nn07915918\tmargarita\nn07916041\tmartini\nn07916183\tgin and it\nn07916319\tvodka martini\nn07916437\told fashioned\nn07916582\tpink lady\nn07917133\tSazerac\nn07917272\tscrewdriver\nn07917392\tsidecar\nn07917507\tScotch and soda\nn07917618\tsling\nn07917791\tbrandy sling\nn07917874\tgin sling\nn07917951\trum sling\nn07918028\tsour\nn07918193\twhiskey sour, whisky sour\nn07918309\tstinger\nn07918706\tswizzle\nn07918879\thot toddy, toddy\nn07919165\tzombie, zombi\nn07919310\tfizz\nn07919441\tIrish coffee\nn07919572\tcafe au lait\nn07919665\tcafe noir, demitasse\nn07919787\tdecaffeinated coffee, decaf\nn07919894\tdrip coffee\nn07920052\tespresso\nn07920222\tcaffe latte, latte\nn07920349\tcappuccino, cappuccino coffee, coffee cappuccino\nn07920540\ticed coffee, ice coffee\nn07920663\tinstant coffee\nn07920872\tmocha, mocha coffee\nn07920989\tmocha\nn07921090\tcassareep\nn07921239\tTurkish coffee\nn07921360\tchocolate milk\nn07921455\tcider, cyder\nn07921615\thard cider\nn07921834\tscrumpy\nn07921948\tsweet cider\nn07922041\tmulled cider\nn07922147\tperry\nn07922512\trotgut\nn07922607\tslug\nn07922764\tcocoa, chocolate, hot chocolate, drinking chocolate\nn07922955\tcriollo\nn07923748\tjuice\nn07924033\tfruit juice, fruit crush\nn07924276\tnectar\nn07924366\tapple juice\nn07924443\tcranberry juice\nn07924560\tgrape juice\nn07924655\tmust\nn07924747\tgrapefruit juice\nn07924834\torange juice\nn07924955\tfrozen orange juice, orange-juice concentrate\nn07925116\tpineapple juice\nn07925229\tlemon juice\nn07925327\tlime juice\nn07925423\tpapaya juice\nn07925500\ttomato juice\nn07925608\tcarrot juice\nn07925708\tV-8 juice\nn07925808\tkoumiss, kumis\nn07925966\tfruit drink, ade\nn07926250\tlemonade\nn07926346\tlimeade\nn07926442\torangeade\nn07926540\tmalted milk\nn07926785\tmate\nn07926920\tmulled wine\nn07927070\tnegus\nn07927197\tsoft drink\nn07927512\tpop, soda, soda pop, soda water, tonic\nn07927716\tbirch beer\nn07927836\tbitter lemon\nn07927931\tcola, dope\nn07928163\tcream soda\nn07928264\tegg cream\nn07928367\tginger ale, ginger pop\nn07928488\torange soda\nn07928578\tphosphate\nn07928696\tCoca Cola, Coke\nn07928790\tPepsi, Pepsi Cola\nn07928887\troot beer\nn07928998\tsarsaparilla\nn07929172\ttonic, tonic water, quinine water\nn07929351\tcoffee bean, coffee berry, coffee\nn07929519\tcoffee, java\nn07929940\tcafe royale, coffee royal\nn07930062\tfruit punch\nn07930205\tmilk punch\nn07930315\tmimosa, buck's fizz\nn07930433\tpina colada\nn07930554\tpunch\nn07930864\tcup\nn07931001\tchampagne cup\nn07931096\tclaret cup\nn07931280\twassail\nn07931452\tplanter's punch\nn07931612\tWhite Russian\nn07931733\tfish house punch\nn07931870\tMay wine\nn07932039\teggnog\nn07932323\tcassiri\nn07932454\tspruce beer\nn07932614\trickey\nn07932762\tgin rickey\nn07932841\ttea, tea leaf\nn07933154\ttea bag\nn07933274\ttea\nn07933530\ttea-like drink\nn07933652\tcambric tea\nn07933799\tcuppa, cupper\nn07933891\therb tea, herbal tea, herbal\nn07934032\ttisane\nn07934152\tcamomile tea\nn07934282\tice tea, iced tea\nn07934373\tsun tea\nn07934530\tblack tea\nn07934678\tcongou, congo, congou tea, English breakfast tea\nn07934800\tDarjeeling\nn07934908\torange pekoe, pekoe\nn07935043\tsouchong, soochong\nn07935152\tgreen tea\nn07935288\thyson\nn07935379\toolong\nn07935504\twater\nn07935737\tbottled water\nn07935878\tbranch water\nn07936015\tspring water\nn07936093\tsugar water\nn07936263\tdrinking water\nn07936459\tice water\nn07936548\tsoda water, carbonated water, club soda, seltzer, sparkling water\nn07936745\tmineral water\nn07936979\tseltzer\nn07937069\tVichy water\nn07937344\tperishable, spoilable\nn07937461\tcouscous\nn07937621\tramekin, ramequin\nn07938007\tmultivitamin, multivitamin pill\nn07938149\tvitamin pill\nn07938313\tsoul food\nn07938594\tmold, mould\nn07942152\tpeople\nn07951464\tcollection, aggregation, accumulation, assemblage\nn07954211\tbook, rule book\nn07977870\tlibrary\nn08079613\tbaseball club, ball club, club, nine\nn08182379\tcrowd\nn08238463\tclass, form, grade, course\nn08242223\tcore, nucleus, core group\nn08249459\tconcert band, military band\nn08253141\tdance\nn08256735\twedding, wedding party\nn08376250\tchain, concatenation\nn08385989\tpower breakfast\nn08492354\taerie, aery, eyrie, eyry\nn08492461\tagora\nn08494231\tamusement park, funfair, pleasure ground\nn08495908\taphelion\nn08496334\tapron\nn08500819\tinterplanetary space\nn08500989\tinterstellar space\nn08501887\tintergalactic space\nn08505018\tbush\nn08506347\tsemidesert\nn08511017\tbeam-ends\nn08517010\tbridgehead\nn08517676\tbus stop\nn08518171\tcampsite, campground, camping site, camping ground, bivouac, encampment, camping area\nn08519299\tdetention basin\nn08521623\tcemetery, graveyard, burial site, burial ground, burying ground, memorial park, necropolis\nn08523340\ttrichion, crinion\nn08524735\tcity, metropolis, urban center\nn08539072\tbusiness district, downtown\nn08539276\toutskirts\nn08540532\tborough\nn08547468\tcow pasture\nn08547544\tcrest\nn08551296\teparchy, exarchate\nn08554440\tsuburb, suburbia, suburban area\nn08555333\tstockbroker belt\nn08555710\tcrawlspace, crawl space\nn08558770\tsheikdom, sheikhdom\nn08558963\tresidence, abode\nn08559155\tdomicile, legal residence\nn08560295\tdude ranch\nn08569482\tfarmland, farming area\nn08571275\tmidfield\nn08571642\tfirebreak, fireguard\nn08571898\tflea market\nn08573674\tbattlefront, front, front line\nn08573842\tgarbage heap, junk heap, rubbish heap, scrapheap, trash heap, junk pile, trash pile, refuse heap\nn08578517\tbenthos, benthic division, benthonic zone\nn08579266\tgoldfield\nn08579352\tgrainfield, grain field\nn08580944\thalf-mast, half-staff\nn08583292\themline\nn08583455\theronry\nn08583554\thipline\nn08583682\thipline\nn08584914\thole-in-the-wall\nn08586978\tjunkyard\nn08589670\tisoclinic line, isoclinal\nn08596076\tlittoral, litoral, littoral zone, sands\nn08597579\tmagnetic pole\nn08598301\tgrassland\nn08598568\tmecca\nn08599174\tobserver's meridian\nn08599292\tprime meridian\nn08611339\tnombril\nn08611421\tno-parking zone\nn08613733\toutdoors, out-of-doors, open air, open\nn08614632\tfairground\nn08616050\tpasture, pastureland, grazing land, lea, ley\nn08618831\tperihelion\nn08619112\tperiselene, perilune\nn08623676\tlocus of infection\nn08628141\tkasbah, casbah\nn08633683\twaterfront\nn08640531\tresort, resort hotel, holiday resort\nn08640739\tresort area, playground, vacation spot\nn08640962\trough\nn08643267\tashram\nn08644045\tharborage, harbourage\nn08645104\tscrubland\nn08645212\tweald\nn08645318\twold\nn08647264\tschoolyard\nn08648917\tshowplace\nn08649711\tbedside\nn08651104\tsideline, out of bounds\nn08652376\tski resort\nn08658309\tsoil horizon\nn08658918\tgeological horizon\nn08659242\tcoal seam\nn08659331\tcoalface\nn08659446\tfield\nn08659861\toilfield\nn08661878\tTemperate Zone\nn08662427\tterreplein\nn08663051\tthree-mile limit\nn08663703\tdesktop\nn08663860\ttop\nn08673039\tkampong, campong\nn08674344\tsubtropics, semitropics\nn08676253\tbarrio\nn08677424\tveld, veldt\nn08677801\tvertex, peak, apex, acme\nn08678783\twaterline, water line, water level\nn08679167\thigh-water mark\nn08679269\tlow-water mark\nn08679562\tcontinental divide\nn08685188\tzodiac\nn08782627\tAegean island\nn08896327\tsultanate\nn09032191\tSwiss canton\nn09186592\tabyssal zone\nn09189157\taerie, aery, eyrie, eyry\nn09191635\tair bubble\nn09193551\talluvial flat, alluvial plain\nn09193705\talp\nn09194227\tAlpine glacier, Alpine type of glacier\nn09199101\tanthill, formicary\nn09201998\taquifer\nn09203827\tarchipelago\nn09205509\tarete\nn09206896\tarroyo\nn09206985\tascent, acclivity, rise, raise, climb, upgrade\nn09208496\tasterism\nn09209025\tasthenosphere\nn09210862\tatoll\nn09213434\tbank\nn09213565\tbank\nn09214060\tbar\nn09214269\tbarbecue pit\nn09214916\tbarrier reef\nn09215023\tbaryon, heavy particle\nn09215437\tbasin\nn09217230\tbeach\nn09218315\thoneycomb\nn09218494\tbelay\nn09218641\tben\nn09219233\tberm\nn09223487\tbladder stone, cystolith\nn09224725\tbluff\nn09226869\tborrow pit\nn09228055\tbrae\nn09229709\tbubble\nn09230041\tburrow, tunnel\nn09230202\tbutte\nn09231117\tcaldera\nn09233446\tcanyon, canon\nn09233603\tcanyonside\nn09238926\tcave\nn09239302\tcavern\nn09242389\tchasm\nn09245515\tcirque, corrie, cwm\nn09246464\tcliff, drop, drop-off\nn09247410\tcloud\nn09248153\tcoast\nn09248399\tcoastland\nn09249034\tcol, gap\nn09249155\tcollector\nn09251407\tcomet\nn09255070\tcontinental glacier\nn09256479\tcoral reef\nn09257843\tcove\nn09259025\tcrag\nn09259219\tcrater\nn09260907\tcultivated land, farmland, plowland, ploughland, tilled land, tillage, tilth\nn09262690\tdale\nn09263912\tdefile, gorge\nn09264803\tdelta\nn09265620\tdescent, declivity, fall, decline, declination, declension, downslope\nn09266604\tdiapir\nn09267854\tdivot\nn09268007\tdivot\nn09269341\tdown\nn09269472\tdownhill\nn09269882\tdraw\nn09270160\tdrey\nn09270657\tdrumlin\nn09270735\tdune, sand dune\nn09274152\tescarpment, scarp\nn09274305\tesker\nn09279986\tfireball\nn09281252\tflare star\nn09282208\tfloor\nn09283193\tfomite, vehicle\nn09283405\tfoothill\nn09283514\tfootwall\nn09283767\tforeland\nn09283866\tforeshore\nn09287415\tgauge boson\nn09287968\tgeological formation, formation\nn09288635\tgeyser\nn09289331\tglacier\nn09289596\tglen\nn09290350\tgopher hole\nn09290444\tgorge\nn09294877\tgrotto, grot\nn09295210\tgrowler\nn09295946\tgulch, flume\nn09300306\tgully\nn09300905\thail\nn09302616\thighland, upland\nn09303008\thill\nn09303528\thillside\nn09304750\thole, hollow\nn09305031\thollow, holler\nn09305898\thot spring, thermal spring\nn09308572\ticeberg, berg\nn09308743\ticecap, ice cap\nn09309046\tice field\nn09309168\tice floe, floe\nn09309292\tice mass\nn09310616\tinclined fault\nn09315159\tion\nn09319604\tisthmus\nn09325824\tkidney stone, urinary calculus, nephrolith, renal calculus\nn09326662\tknoll, mound, hillock, hummock, hammock\nn09327077\tkopje, koppie\nn09327538\tKuiper belt, Edgeworth-Kuiper belt\nn09330378\tlake bed, lake bottom\nn09331251\tlakefront\nn09332890\tlakeside, lakeshore\nn09335693\tlandfall\nn09335809\tlandfill\nn09336555\tlather\nn09337048\tleak\nn09337253\tledge, shelf\nn09338013\tlepton\nn09339810\tlithosphere, geosphere\nn09344198\tlowland\nn09344324\tlunar crater\nn09344724\tmaar\nn09348460\tmassif\nn09349648\tmeander\nn09351905\tmesa, table\nn09352849\tmeteorite\nn09353815\tmicrofossil\nn09354511\tmidstream\nn09357346\tmolehill\nn09357447\tmonocline\nn09359803\tmountain, mount\nn09361517\tmountainside, versant\nn09362316\tmouth\nn09362945\tmull\nn09366017\tnatural depression, depression\nn09366317\tnatural elevation, elevation\nn09375606\tnullah\nn09376198\tocean\nn09376526\tocean floor, sea floor, ocean bottom, seabed, sea bottom, Davy Jones's locker, Davy Jones\nn09376786\toceanfront\nn09381242\toutcrop, outcropping, rock outcrop\nn09382099\toxbow\nn09384106\tpallasite\nn09389867\tperforation\nn09391386\tphotosphere\nn09391644\tpiedmont\nn09391774\tPiedmont glacier, Piedmont type of glacier\nn09392402\tpinetum\nn09393524\tplage\nn09393605\tplain, field, champaign\nn09396465\tpoint\nn09396608\tpolar glacier\nn09398076\tpothole, chuckhole\nn09398677\tprecipice\nn09399592\tpromontory, headland, head, foreland\nn09400584\tptyalith\nn09400987\tpulsar\nn09402944\tquicksand\nn09403086\trabbit burrow, rabbit hole\nn09403211\tradiator\nn09403427\trainbow\nn09403734\trange, mountain range, range of mountains, chain, mountain chain, chain of mountains\nn09405078\trangeland\nn09405787\travine\nn09406793\treef\nn09409512\tridge\nn09409752\tridge, ridgeline\nn09410224\trift valley\nn09411189\triparian forest\nn09411295\tripple mark\nn09415584\triverbank, riverside\nn09415671\triverbed, river bottom\nn09416076\trock, stone\nn09416890\troof\nn09421031\tsaltpan\nn09421799\tsandbank\nn09421951\tsandbar, sand bar\nn09422190\tsandpit\nn09422631\tsanitary landfill\nn09425019\tsawpit\nn09425344\tscablands\nn09428293\tseashore, coast, seacoast, sea-coast\nn09428628\tseaside, seaboard\nn09429630\tseif dune\nn09432283\tshell\nn09432990\tshiner\nn09433312\tshoal\nn09433442\tshore\nn09433839\tshoreline\nn09435739\tsinkhole, sink, swallow hole\nn09436444\tski slope\nn09436708\tsky\nn09437454\tslope, incline, side\nn09438844\tsnowcap\nn09438940\tsnowdrift\nn09439032\tsnowfield\nn09439213\tsoapsuds, suds, lather\nn09442595\tspit, tongue\nn09443281\tspoor\nn09443641\tspume\nn09444783\tstar\nn09445008\tsteep\nn09445289\tsteppe\nn09447666\tstrand\nn09448690\tstreambed, creek bed\nn09450163\tsun, Sun\nn09451237\tsupernova\nn09452291\tswale\nn09452395\tswamp, swampland\nn09452760\tswell\nn09453008\ttableland, plateau\nn09454153\ttalus, scree\nn09454412\ttangle\nn09454744\ttar pit\nn09456207\tterrace, bench\nn09457979\ttidal basin\nn09458269\ttideland\nn09459979\ttor\nn09460046\ttor\nn09461069\tTrapezium\nn09462600\ttroposphere\nn09463226\ttundra\nn09464486\ttwinkler\nn09466678\tuphill\nn09467696\turolith\nn09468604\tvalley, vale\nn09470027\tvehicle-borne transmission\nn09470222\tvein, mineral vein\nn09472413\tvolcanic crater, crater\nn09472597\tvolcano\nn09474010\twadi\nn09474412\twall\nn09474765\twarren, rabbit warren\nn09475044\twasp's nest, wasps' nest, hornet's nest, hornets' nest\nn09475179\twatercourse\nn09475925\twaterside\nn09476123\twater table, water level, groundwater level\nn09478210\twhinstone, whin\nn09480959\twormcast\nn09481120\txenolith\nn09493983\tCirce\nn09495962\tgryphon, griffin, griffon\nn09505153\tspiritual leader\nn09537660\tmessiah, christ\nn09556121\tRhea Silvia, Rea Silvia\nn09605110\tnumber one\nn09606009\tadventurer, venturer\nn09606527\tanomaly, unusual person\nn09607630\tappointee, appointment\nn09607782\targonaut\nn09607903\tAshkenazi\nn09608709\tbenefactor, helper\nn09610255\tcolor-blind person\nn09610405\tcommoner, common man, common person\nn09611722\tconservator\nn09612700\tcontrarian\nn09613118\tcontadino\nn09613191\tcontestant\nn09613690\tcosigner, cosignatory\nn09615336\tdiscussant\nn09616573\tenologist, oenologist, fermentologist\nn09616922\tentertainer\nn09617161\teulogist, panegyrist\nn09617435\tex-gambler\nn09617577\texperimenter\nn09617696\texperimenter\nn09618760\texponent\nn09618880\tex-president\nn09618957\tface\nn09619168\tfemale, female person\nn09619452\tfinisher\nn09620078\tinhabitant, habitant, dweller, denizen, indweller\nn09620794\tnative, indigen, indigene, aborigine, aboriginal\nn09621232\tnative\nn09622049\tjuvenile, juvenile person\nn09622302\tlover\nn09624168\tmale, male person\nn09624559\tmediator, go-between, intermediator, intermediary, intercessor\nn09624899\tmediatrix\nn09625401\tnational, subject\nn09626238\tpeer, equal, match, compeer\nn09627807\tprize winner, lottery winner\nn09627906\trecipient, receiver\nn09629065\treligionist\nn09629246\tsensualist\nn09629752\ttraveler, traveller\nn09631129\tunwelcome person, persona non grata\nn09632274\tunskilled person\nn09632518\tworker\nn09633969\twrongdoer, offender\nn09635534\tBlack African\nn09635635\tAfrikaner, Afrikander, Boer\nn09635973\tAryan\nn09636339\tBlack, Black person, blackamoor, Negro, Negroid\nn09637339\tBlack woman\nn09638454\tmulatto\nn09638875\tWhite, White person, Caucasian\nn09639382\tCircassian\nn09639919\tSemite\nn09640327\tChaldean, Chaldaean, Chaldee\nn09640715\tElamite\nn09641002\twhite man\nn09641578\tWASP, white Anglo-Saxon Protestant\nn09643799\tgook, slant-eye\nn09644152\tMongol, Mongolian\nn09644657\tTatar, Tartar, Mongol Tatar\nn09648743\tNahuatl\nn09648911\tAztec\nn09649067\tOlmec\nn09650729\tBiloxi\nn09650839\tBlackfoot\nn09650989\tBrule\nn09651123\tCaddo\nn09651968\tCheyenne\nn09652149\tChickasaw\nn09653144\tCocopa, Cocopah\nn09653438\tComanche\nn09654079\tCreek\nn09654518\tDelaware\nn09654898\tDiegueno\nn09655213\tEsselen\nn09655466\tEyeish\nn09656077\tHavasupai\nn09657206\tHunkpapa\nn09657748\tIowa, Ioway\nn09658254\tKalapooia, Kalapuya, Calapooya, Calapuya\nn09658398\tKamia\nn09658815\tKekchi\nn09658921\tKichai\nn09659039\tKickapoo\nn09659188\tKiliwa, Kiliwi\nn09660010\tMalecite\nn09660240\tMaricopa\nn09661873\tMohican, Mahican\nn09662038\tMuskhogean, Muskogean\nn09662661\tNavaho, Navajo\nn09662951\tNootka\nn09663248\tOglala, Ogalala\nn09663786\tOsage\nn09663999\tOneida\nn09664556\tPaiute, Piute\nn09664908\tPassamaquody\nn09665367\tPenobscot\nn09665545\tPenutian\nn09666349\tPotawatomi\nn09666476\tPowhatan\nn09666883\tkachina\nn09667358\tSalish\nn09668199\tShahaptian, Sahaptin, Sahaptino\nn09668437\tShasta\nn09668562\tShawnee\nn09668988\tSihasapa\nn09669631\tTeton, Lakota, Teton Sioux, Teton Dakota\nn09670280\tTaracahitian\nn09670521\tTarahumara\nn09670909\tTuscarora\nn09671089\tTutelo\nn09672590\tYana\nn09672725\tYavapai\nn09672840\tYokuts\nn09673091\tYuma\nn09674412\tGadaba\nn09674786\tKolam\nn09675045\tKui\nn09675673\tToda\nn09675799\tTulu\nn09675922\tGujarati, Gujerati\nn09676021\tKashmiri\nn09676247\tPunjabi, Panjabi\nn09676884\tSlav\nn09677427\tAnabaptist\nn09678747\tAdventist, Second Adventist\nn09679028\tgentile, non-Jew, goy\nn09679170\tgentile\nn09679925\tCatholic\nn09680908\tOld Catholic\nn09681107\tUniat, Uniate, Uniate Christian\nn09681234\tCopt\nn09681973\tJewess\nn09683180\tJihadist\nn09683757\tBuddhist\nn09683924\tZen Buddhist\nn09684082\tMahayanist\nn09684901\tswami\nn09685233\tHare Krishna\nn09685806\tShintoist\nn09686262\tEurafrican\nn09686401\tEurasian\nn09688233\tGael\nn09688804\tFrank\nn09689435\tAfghan, Afghanistani\nn09689958\tAlbanian\nn09690083\tAlgerian\nn09690208\tAltaic\nn09690496\tAndorran\nn09690621\tAngolan\nn09690864\tAnguillan\nn09691604\tAustrian\nn09691729\tBahamian\nn09691858\tBahraini, Bahreini\nn09692125\tBasotho\nn09692915\tHerero\nn09693244\tLuba, Chiluba\nn09693982\tBarbadian\nn09694664\tBolivian\nn09694771\tBornean\nn09695019\tCarioca\nn09695132\tTupi\nn09695514\tBruneian\nn09695620\tBulgarian\nn09695979\tByelorussian, Belorussian, White Russian\nn09696456\tCameroonian\nn09696585\tCanadian\nn09696763\tFrench Canadian\nn09697401\tCentral American\nn09697986\tChilean\nn09698644\tCongolese\nn09699020\tCypriot, Cypriote, Cyprian\nn09699642\tDane\nn09700125\tDjiboutian\nn09700964\tBritisher, Briton, Brit\nn09701148\tEnglish person\nn09701833\tEnglishwoman\nn09702134\tAnglo-Saxon\nn09702673\tAngle\nn09703101\tWest Saxon\nn09703344\tLombard, Langobard\nn09703485\tlimey, John Bull\nn09703708\tCantabrigian\nn09703809\tCornishman\nn09703932\tCornishwoman\nn09704057\tLancastrian\nn09704157\tLancastrian\nn09704283\tGeordie\nn09705003\tOxonian\nn09705124\tEthiopian\nn09705671\tAmhara\nn09705784\tEritrean\nn09706029\tFinn\nn09706255\tKomi\nn09707061\tLivonian\nn09707289\tLithuanian\nn09707735\tSelkup, Ostyak-Samoyed\nn09708750\tParisian\nn09708889\tParisienne\nn09709531\tCreole\nn09709673\tCreole\nn09710041\tGabonese\nn09710164\tGreek, Hellene\nn09710886\tDorian\nn09711132\tAthenian\nn09711435\tLaconian\nn09712324\tGuyanese\nn09712448\tHaitian\nn09712696\tMalay, Malayan\nn09712967\tMoro\nn09713108\tNetherlander, Dutchman, Hollander\nn09714120\tIcelander\nn09714694\tIraqi, Iraki\nn09715165\tIrishman\nn09715303\tIrishwoman\nn09715427\tDubliner\nn09716047\tItalian\nn09716933\tRoman\nn09717233\tSabine\nn09718217\tJapanese, Nipponese\nn09718811\tJordanian\nn09718936\tKorean\nn09719309\tKenyan\nn09719794\tLao, Laotian\nn09720033\tLapp, Lapplander, Sami, Saami, Same, Saame\nn09720256\tLatin American, Latino\nn09720595\tLebanese\nn09720702\tLevantine\nn09720842\tLiberian\nn09721244\tLuxemburger, Luxembourger\nn09721444\tMacedonian\nn09722064\tSabahan\nn09722658\tMexican\nn09722817\tChicano\nn09723067\tMexican-American, Mexicano\nn09723819\tNamibian\nn09723944\tNauruan\nn09724234\tGurkha\nn09724533\tNew Zealander, Kiwi\nn09724656\tNicaraguan\nn09724785\tNigerian\nn09725000\tHausa, Haussa\nn09725229\tNorth American\nn09725546\tNova Scotian, bluenose\nn09725653\tOmani\nn09725772\tPakistani\nn09725935\tBrahui\nn09726621\tSouth American Indian\nn09726811\tCarib, Carib Indian\nn09727440\tFilipino\nn09727826\tPolynesian\nn09728137\tQatari, Katari\nn09728285\tRomanian, Rumanian\nn09729062\tMuscovite\nn09729156\tGeorgian\nn09730077\tSarawakian\nn09730204\tScandinavian, Norse, Northman\nn09730824\tSenegalese\nn09731343\tSlovene\nn09731436\tSouth African\nn09731571\tSouth American\nn09732170\tSudanese\nn09733459\tSyrian\nn09733793\tTahitian\nn09734185\tTanzanian\nn09734450\tTibetan\nn09734535\tTogolese\nn09734639\tTuareg\nn09735258\tTurki\nn09735654\tChuvash\nn09736485\tTurkoman, Turkmen, Turcoman\nn09736798\tUzbek, Uzbeg, Uzbak, Usbek, Usbeg\nn09736945\tUgandan\nn09737050\tUkranian\nn09737161\tYakut\nn09737453\tTungus, Evenk\nn09738121\tIgbo\nn09738400\tAmerican\nn09740724\tAnglo-American\nn09741074\tAlaska Native, Alaskan Native, Native Alaskan\nn09741331\tArkansan, Arkansawyer\nn09741722\tCarolinian\nn09741816\tColoradan\nn09741904\tConnecticuter\nn09741999\tDelawarean, Delawarian\nn09742101\tFloridian\nn09742315\tGerman American\nn09742927\tIllinoisan\nn09743487\tMainer, Down Easter\nn09743601\tMarylander\nn09743792\tMinnesotan, Gopher\nn09744161\tNebraskan, Cornhusker\nn09744346\tNew Hampshirite, Granite Stater\nn09744462\tNew Jerseyan, New Jerseyite, Garden Stater\nn09744679\tNew Yorker\nn09744834\tNorth Carolinian, Tarheel\nn09745229\tOregonian, Beaver\nn09745324\tPennsylvanian, Keystone Stater\nn09745834\tTexan\nn09745933\tUtahan\nn09746936\tUruguayan\nn09747191\tVietnamese, Annamese\nn09747495\tGambian\nn09748101\tEast German\nn09748408\tBerliner\nn09748648\tPrussian\nn09748889\tGhanian\nn09749386\tGuinean\nn09750282\tPapuan\nn09750641\tWalloon\nn09750770\tYemeni\nn09750891\tYugoslav, Jugoslav, Yugoslavian, Jugoslavian\nn09751076\tSerbian, Serb\nn09751496\tXhosa\nn09751622\tZairese, Zairean\nn09751895\tZimbabwean\nn09752023\tZulu\nn09752519\tGemini, Twin\nn09753348\tSagittarius, Archer\nn09753792\tPisces, Fish\nn09754152\tabbe\nn09754217\tabbess, mother superior, prioress\nn09754633\tabnegator\nn09754907\tabridger, abbreviator\nn09755086\tabstractor, abstracter\nn09755241\tabsconder\nn09755555\tabsolver\nn09755788\tabecedarian\nn09755893\taberrant\nn09756049\tabettor, abetter\nn09756195\tabhorrer\nn09756961\tabomination\nn09757449\tabseiler, rappeller\nn09758173\tabstainer, ascetic\nn09758885\tacademic administrator\nn09759501\tacademician\nn09760290\taccessory before the fact\nn09760609\tcompanion\nn09760913\taccompanist, accompanyist\nn09761068\taccomplice, confederate\nn09761753\taccount executive, account representative, registered representative, customer's broker, customer's man\nn09762011\taccused\nn09762385\taccuser\nn09763272\tacid head\nn09763784\tacquaintance, friend\nn09764201\tacquirer\nn09764598\taerialist\nn09764732\taction officer\nn09764900\tactive\nn09765118\tactive citizen\nn09765278\tactor, histrion, player, thespian, role player\nn09767197\tactor, doer, worker\nn09769076\taddict, nut, freak, junkie, junky\nn09769525\tadducer\nn09769929\tadjuster, adjustor, claims adjuster, claims adjustor, claim agent\nn09770179\tadjutant, aide, aide-de-camp\nn09770359\tadjutant general\nn09771435\tadmirer, adorer\nn09772330\tadoptee\nn09772746\tadulterer, fornicator\nn09772930\tadulteress, fornicatress, hussy, jade, loose woman, slut, strumpet, trollop\nn09773962\tadvertiser, advertizer, adman\nn09774167\tadvisee\nn09774783\tadvocate, advocator, proponent, exponent\nn09775907\taeronautical engineer\nn09776346\taffiliate\nn09776642\taffluent\nn09776807\taficionado\nn09777870\tbuck sergeant\nn09778266\tagent-in-place\nn09778537\taggravator, annoyance\nn09778783\tagitator, fomenter\nn09778927\tagnostic\nn09779124\tagnostic, doubter\nn09779280\tagonist\nn09779461\tagony aunt\nn09779790\tagriculturist, agriculturalist, cultivator, grower, raiser\nn09780395\tair attache\nn09780828\tair force officer, commander\nn09780984\tairhead\nn09781398\tair traveler, air traveller\nn09781504\talarmist\nn09781650\talbino\nn09782167\talcoholic, alky, dipsomaniac, boozer, lush, soaker, souse\nn09782397\talderman\nn09782855\talexic\nn09783537\talienee, grantee\nn09783776\talienor\nn09783884\taliterate, aliterate person\nn09784043\talgebraist\nn09784160\tallegorizer, allegoriser\nn09784564\talliterator\nn09785236\talmoner, medical social worker\nn09785659\talpinist\nn09785891\taltar boy\nn09786115\talto\nn09787534\tambassador, embassador\nn09787765\tambassador\nn09788073\tambusher\nn09788237\tamicus curiae, friend of the court\nn09789150\tamoralist\nn09789566\tamputee\nn09789898\tanalogist\nn09790047\tanalphabet, analphabetic\nn09790482\tanalyst\nn09791014\tindustry analyst\nn09791419\tmarket strategist\nn09791816\tanarchist, nihilist, syndicalist\nn09792125\tanathema, bete noire\nn09792555\tancestor, ascendant, ascendent, antecedent, root\nn09792969\tanchor, anchorman, anchorperson\nn09793141\tancient\nn09793352\tanecdotist, raconteur\nn09793946\tangler, troller\nn09794550\tanimator\nn09794668\tanimist\nn09795010\tannotator\nn09795124\tannouncer\nn09795334\tannouncer\nn09796809\tanti\nn09796974\tanti-American\nn09797742\tanti-Semite, Jew-baiter\nn09797873\tAnzac\nn09797998\tape-man\nn09798096\taphakic\nn09800469\tappellant, plaintiff in error\nn09800964\tappointee\nn09801102\tapprehender\nn09801275\tApril fool\nn09801533\taspirant, aspirer, hopeful, wannabe, wannabee\nn09802445\tappreciator\nn09802641\tappropriator\nn09802951\tArabist\nn09804230\tarchaist\nn09805151\tarchbishop\nn09805324\tarcher, bowman\nn09805475\tarchitect, designer\nn09806944\tarchivist\nn09807075\tarchpriest, hierarch, high priest, prelate, primate\nn09808080\tAristotelian, Aristotelean, Peripatetic\nn09808591\tarmiger\nn09809279\tarmy attache\nn09809538\tarmy engineer, military engineer\nn09809749\tarmy officer\nn09809925\tarranger, adapter, transcriber\nn09810166\tarrival, arriver, comer\nn09811568\tarthritic\nn09811712\tarticulator\nn09811852\tartilleryman, cannoneer, gunner, machine gunner\nn09813219\tartist's model, sitter\nn09814252\tassayer\nn09814381\tassemblyman\nn09814488\tassemblywoman\nn09814567\tassenter\nn09814660\tasserter, declarer, affirmer, asseverator, avower\nn09815455\tassignee\nn09815790\tassistant, helper, help, supporter\nn09816654\tassistant professor\nn09816771\tassociate\nn09817174\tassociate\nn09817386\tassociate professor\nn09818022\tastronaut, spaceman, cosmonaut\nn09819477\tcosmographer, cosmographist\nn09820044\tatheist\nn09820263\tathlete, jock\nn09821831\tattendant, attender, tender\nn09822830\tattorney general\nn09823153\tauditor\nn09823287\taugur, auspex\nn09823502\taunt, auntie, aunty\nn09823832\tau pair girl\nn09824135\tauthoritarian, dictator\nn09824609\tauthority\nn09825096\tauthorizer, authoriser\nn09825750\tautomobile mechanic, auto-mechanic, car-mechanic, mechanic, grease monkey\nn09826204\taviator, aeronaut, airman, flier, flyer\nn09826605\taviatrix, airwoman, aviatress\nn09826821\tayah\nn09827246\tbabu, baboo\nn09827363\tbaby, babe, sister\nn09828216\tbaby\nn09828403\tbaby boomer, boomer\nn09828988\tbaby farmer\nn09830194\tback\nn09830400\tbackbencher\nn09830629\tbackpacker, packer\nn09830759\tbackroom boy, brain truster\nn09830926\tbackscratcher\nn09831962\tbad person\nn09832456\tbaggage\nn09832633\tbag lady\nn09832978\tbailee\nn09833111\tbailiff\nn09833275\tbailor\nn09833441\tbairn\nn09833536\tbaker, bread maker\nn09833751\tbalancer\nn09833997\tbalker, baulker, noncompliant\nn09834258\tball-buster, ball-breaker\nn09834378\tball carrier, runner\nn09834699\tballet dancer\nn09834885\tballet master\nn09835017\tballet mistress\nn09835153\tballetomane\nn09835230\tball hawk\nn09835348\tballoonist\nn09835506\tballplayer, baseball player\nn09836160\tbullfighter, toreador\nn09836343\tbanderillero\nn09836519\tmatador\nn09836786\tpicador\nn09837459\tbandsman\nn09837720\tbanker\nn09838295\tbank robber\nn09838370\tbankrupt, insolvent\nn09838621\tbantamweight\nn09839702\tbarmaid\nn09840217\tbaron, big businessman, business leader, king, magnate, mogul, power, top executive, tycoon\nn09840435\tbaron\nn09840520\tbaron\nn09841188\tbartender, barman, barkeep, barkeeper, mixologist\nn09841515\tbaseball coach, baseball manager\nn09841696\tbase runner, runner\nn09842047\tbasketball player, basketeer, cager\nn09842288\tbasketweaver, basketmaker\nn09842395\tBasket Maker\nn09842528\tbass, basso\nn09842823\tbastard, by-blow, love child, illegitimate child, illegitimate, whoreson\nn09843443\tbat boy\nn09843602\tbather\nn09843716\tbatman\nn09843824\tbaton twirler, twirler\nn09844457\tBavarian\nn09844898\tbeadsman, bedesman\nn09845401\tbeard\nn09845849\tbeatnik, beat\nn09846142\tbeauty consultant\nn09846469\tBedouin, Beduin\nn09846586\tbedwetter, bed wetter, wetter\nn09846755\tbeekeeper, apiarist, apiculturist\nn09846894\tbeer drinker, ale drinker\nn09847267\tbeggarman\nn09847344\tbeggarwoman\nn09847543\tbeldam, beldame\nn09848110\ttheist\nn09848489\tbeliever, truster\nn09849167\tbell founder\nn09849990\tbenedick, benedict\nn09850760\tberserker, berserk\nn09850974\tbesieger\nn09851165\tbest, topper\nn09851575\tbetrothed\nn09853541\tBig Brother\nn09853645\tbigot\nn09853881\tbig shot, big gun, big wheel, big cheese, big deal, big enchilada, big fish, head honcho\nn09854218\tbig sister\nn09854421\tbilliard player\nn09854915\tbiochemist\nn09855433\tbiographer\nn09856401\tbird fancier\nn09856671\tbirth\nn09856827\tbirth-control campaigner, birth-control reformer\nn09857007\tbisexual, bisexual person\nn09858165\tblack belt\nn09858299\tblackmailer, extortioner, extortionist\nn09858733\tBlack Muslim\nn09859152\tblacksmith\nn09859285\tblade\nn09859684\tbleacher\nn09859975\tblind date\nn09861287\tbluecoat\nn09861599\tbluestocking, bas bleu\nn09861863\tboatbuilder\nn09861946\tboatman, boater, waterman\nn09862183\tboatswain, bos'n, bo's'n, bosun, bo'sun\nn09862621\tbobby\nn09863031\tbodyguard, escort\nn09863339\tboffin\nn09863749\tBolshevik, Marxist, red, bolshie, bolshy\nn09863936\tBolshevik, Bolshevist\nn09864632\tbombshell\nn09864968\tbondman, bondsman\nn09865068\tbondwoman, bondswoman, bondmaid\nn09865162\tbondwoman, bondswoman, bondmaid\nn09865398\tbond servant\nn09865672\tbook agent\nn09865744\tbookbinder\nn09866115\tbookkeeper\nn09866354\tbookmaker\nn09866559\tbookworm\nn09866661\tbooster, shoplifter, lifter\nn09866817\tbootblack, shoeblack\nn09866922\tbootlegger, moonshiner\nn09867069\tbootmaker, boot maker\nn09867154\tborderer\nn09867311\tborder patrolman\nn09868270\tbotanist, phytologist, plant scientist\nn09868782\tbottom feeder\nn09868899\tboulevardier\nn09869317\tbounty hunter\nn09869447\tbounty hunter\nn09869578\tBourbon\nn09870096\tbowler\nn09871095\tslugger, slogger\nn09871229\tcub, lad, laddie, sonny, sonny boy\nn09871681\tBoy Scout\nn09871867\tboy scout\nn09871952\tboy wonder\nn09872066\tbragger, braggart, boaster, blowhard, line-shooter, vaunter\nn09872557\tbrahman, brahmin\nn09873348\tbrawler\nn09873473\tbreadwinner\nn09873769\tbreaststroker\nn09873899\tbreeder, stock breeder\nn09874428\tbrick\nn09874725\tbride\nn09874862\tbridesmaid, maid of honor\nn09875025\tbridge agent\nn09875979\tbroadcast journalist\nn09876701\tBrother\nn09877288\tbrother-in-law\nn09877587\tbrowser\nn09877750\tBrummie, Brummy\nn09877951\tbuddy, brother, chum, crony, pal, sidekick\nn09878921\tbull\nn09879552\tbully\nn09880189\tbunny, bunny girl\nn09880741\tburglar\nn09881265\tbursar\nn09881358\tbusboy, waiter's assistant\nn09881895\tbusiness editor\nn09883047\tbusiness traveler\nn09883452\tbuster\nn09883807\tbusybody, nosy-parker, nosey-parker, quidnunc\nn09885059\tbuttinsky\nn09885866\tcabinetmaker, furniture maker\nn09886403\tcaddie, golf caddie\nn09886540\tcadet, plebe\nn09888635\tcaller, caller-out\nn09889065\tcall girl\nn09889170\tcalligrapher, calligraphist\nn09889691\tcampaigner, candidate, nominee\nn09889941\tcamper\nn09890192\tcamp follower\nn09890749\tcandidate, prospect\nn09891730\tcanonist\nn09892262\tcapitalist\nn09892513\tcaptain, headwaiter, maitre d'hotel, maitre d'\nn09892693\tcaptain, senior pilot\nn09893191\tcaptain\nn09893344\tcaptain, chieftain\nn09893502\tcaptive\nn09893600\tcaptive\nn09894143\tcardinal\nn09894445\tcardiologist, heart specialist, heart surgeon\nn09894654\tcard player\nn09894909\tcardsharp, card sharp, cardsharper, card sharper, sharper, sharpie, sharpy, card shark\nn09895222\tcareerist\nn09895480\tcareer man\nn09895561\tcaregiver\nn09895701\tcaretaker\nn09895902\tcaretaker\nn09896170\tcaricaturist\nn09896311\tcarillonneur\nn09896401\tcaroler, caroller\nn09896685\tcarpenter\nn09896826\tcarper, niggler\nn09898020\tCartesian\nn09899289\tcashier\nn09899671\tcasualty, injured party\nn09899782\tcasualty\nn09899929\tcasuist, sophist\nn09901337\tcatechist\nn09901502\tcatechumen, neophyte\nn09901642\tcaterer\nn09901786\tCatholicos\nn09901921\tcat fancier\nn09902128\tCavalier, Royalist\nn09902353\tcavalryman, trooper\nn09902731\tcaveman, cave man, cave dweller, troglodyte\nn09902851\tcelebrant\nn09902954\tcelebrant, celebrator, celebrater\nn09903153\tcelebrity, famous person\nn09903501\tcellist, violoncellist\nn09903639\tcensor\nn09903936\tcensor\nn09904208\tcentenarian\nn09904837\tcentrist, middle of the roader, moderate, moderationist\nn09905050\tcenturion\nn09905185\tcertified public accountant, CPA\nn09905530\tchachka, tsatske, tshatshke, tchotchke, tchotchkeleh\nn09906293\tchambermaid, fille de chambre\nn09906449\tchameleon\nn09906704\tchampion, champ, title-holder\nn09907804\tchandler\nn09908769\tprison chaplain\nn09909660\tcharcoal burner\nn09909929\tcharge d'affaires\nn09910222\tcharioteer\nn09910374\tcharmer, beguiler\nn09910556\tchartered accountant\nn09910840\tchartist, technical analyst\nn09911226\tcharwoman, char, cleaning woman, cleaning lady, woman\nn09912431\tmale chauvinist, sexist\nn09912681\tcheapskate, tightwad\nn09912907\tChechen\nn09912995\tchecker\nn09913329\tcheerer\nn09913455\tcheerleader\nn09913593\tcheerleader\nn09915434\tCheops, Khufu\nn09915651\tchess master\nn09916348\tchief executive officer, CEO, chief operating officer\nn09917214\tchief of staff\nn09917345\tchief petty officer\nn09917481\tChief Secretary\nn09917593\tchild, kid, youngster, minor, shaver, nipper, small fry, tiddler, tike, tyke, fry, nestling\nn09918248\tchild, kid\nn09918554\tchild, baby\nn09918867\tchild prodigy, infant prodigy, wonder child\nn09919061\tchimneysweeper, chimneysweep, sweep\nn09919200\tchiropractor\nn09919451\tchit\nn09919899\tchoker\nn09920106\tchoragus\nn09920283\tchoreographer\nn09920901\tchorus girl, showgirl, chorine\nn09921034\tchosen\nn09923003\tcicerone\nn09923186\tcigar smoker\nn09923418\tcipher, cypher, nobody, nonentity\nn09923561\tcircus acrobat\nn09923673\tcitizen\nn09923996\tcity editor\nn09924106\tcity father\nn09924195\tcity man\nn09924313\tcity slicker, city boy\nn09924437\tcivic leader, civil leader\nn09924996\tcivil rights leader, civil rights worker, civil rights activist\nn09927089\tcleaner\nn09927451\tclergyman, reverend, man of the cloth\nn09928136\tcleric, churchman, divine, ecclesiastic\nn09928451\tclerk\nn09928845\tclever Dick, clever clogs\nn09929202\tclimatologist\nn09929298\tclimber\nn09929577\tclinician\nn09930257\tcloser, finisher\nn09930628\tcloset queen\nn09930876\tclown, buffoon, goof, goofball, merry andrew\nn09931165\tclown, buffoon\nn09931418\tcoach, private instructor, tutor\nn09931640\tcoach, manager, handler\nn09932098\tpitching coach\nn09932336\tcoachman\nn09932508\tcoal miner, collier, pitman\nn09932788\tcoastguardsman\nn09933020\tcobber\nn09933098\tcobbler, shoemaker\nn09933842\tcodger, old codger\nn09933972\tco-beneficiary\nn09934337\tcog\nn09934488\tcognitive neuroscientist\nn09934774\tcoiffeur\nn09935107\tcoiner\nn09935434\tcollaborator, cooperator, partner, pardner\nn09936825\tcolleen\nn09936892\tcollege student, university student\nn09937056\tcollegian, college man, college boy\nn09937688\tcolonial\nn09937802\tcolonialist\nn09937903\tcolonizer, coloniser\nn09938080\tcoloratura, coloratura soprano\nn09938449\tcolor guard\nn09938991\tcolossus, behemoth, giant, heavyweight, titan\nn09940725\tcomedian\nn09940818\tcomedienne\nn09941089\tcomer\nn09941571\tcommander\nn09941787\tcommander in chief, generalissimo\nn09941964\tcommanding officer, commandant, commander\nn09942697\tcommissar, political commissar\nn09942970\tcommissioned officer\nn09943239\tcommissioned military officer\nn09943811\tcommissioner\nn09944022\tcommissioner\nn09944160\tcommittee member\nn09944430\tcommitteewoman\nn09945021\tcommodore\nn09945223\tcommunicant\nn09945319\tcommunist, commie\nn09945603\tCommunist\nn09945745\tcommuter\nn09946814\tcompere\nn09947127\tcomplexifier\nn09950457\tcompulsive\nn09950728\tcomputational linguist\nn09951070\tcomputer scientist\nn09951274\tcomputer user\nn09951524\tComrade\nn09951616\tconcert-goer, music lover\nn09952163\tconciliator, make-peace, pacifier, peacemaker, reconciler\nn09953052\tconductor\nn09953350\tconfectioner, candymaker\nn09953615\tConfederate\nn09954355\tconfessor\nn09954639\tconfidant, intimate\nn09955406\tConfucian, Confucianist\nn09955944\trep\nn09956578\tconqueror, vanquisher\nn09957523\tConservative\nn09958133\tNonconformist, chapelgoer\nn09958292\tAnglican\nn09958447\tconsignee\nn09958569\tconsigner, consignor\nn09959142\tconstable\nn09959658\tconstructivist\nn09960688\tcontractor\nn09961198\tcontralto\nn09961331\tcontributor\nn09961469\tcontrol freak\nn09961605\tconvalescent\nn09961739\tconvener\nn09962966\tconvict, con, inmate, yard bird, yardbird\nn09964202\tcopilot, co-pilot\nn09964411\tcopycat, imitator, emulator, ape, aper\nn09965515\tcoreligionist\nn09965787\tcornerback\nn09966470\tcorporatist\nn09966554\tcorrespondent, letter writer\nn09967063\tcosmetician\nn09967406\tcosmopolitan, cosmopolite\nn09967555\tCossack\nn09967816\tcost accountant\nn09967967\tco-star\nn09968259\tcostumier, costumer, costume designer\nn09968652\tcotter, cottier\nn09968741\tcotter, cottar\nn09968845\tcounselor, counsellor\nn09970088\tcounterterrorist\nn09970192\tcounterspy, mole\nn09970402\tcountess\nn09970822\tcompromiser\nn09971273\tcountrywoman\nn09971385\tcounty agent, agricultural agent, extension agent\nn09971839\tcourtier\nn09972010\tcousin, first cousin, cousin-german, full cousin\nn09972458\tcover girl, pin-up, lovely\nn09972587\tcow\nn09974648\tcraftsman, artisan, journeyman, artificer\nn09975425\tcraftsman, crafter\nn09976024\tcrapshooter\nn09976283\tcrazy, loony, looney, nutcase, weirdo\nn09976429\tcreature, wight\nn09976728\tcreditor\nn09976917\tcreep, weirdo, weirdie, weirdy, spook\nn09978442\tcriminologist\nn09979321\tcritic\nn09979913\tCroesus\nn09980458\tcross-examiner, cross-questioner\nn09980805\tcrossover voter, crossover\nn09980985\tcroupier\nn09981092\tcrown prince\nn09981278\tcrown princess\nn09981540\tcryptanalyst, cryptographer, cryptologist\nn09981939\tCub Scout\nn09982152\tcuckold\nn09982525\tcultist\nn09983314\tcurandera\nn09983572\tcurate, minister of religion, minister, parson, pastor, rector\nn09983889\tcurator, conservator\nn09984960\tcustomer agent\nn09985470\tcutter, carver\nn09985809\tcyberpunk\nn09985978\tcyborg, bionic man, bionic woman\nn09986450\tcymbalist\nn09986700\tCynic\nn09986904\tcytogeneticist\nn09987045\tcytologist\nn09987161\tczar\nn09987239\tczar, tsar, tzar\nn09988063\tdad, dada, daddy, pa, papa, pappa, pop\nn09988311\tdairyman\nn09988493\tDalai Lama, Grand Lama\nn09988703\tdallier, dillydallier, dilly-dallier, mope, lounger\nn09989502\tdancer, professional dancer, terpsichorean\nn09990415\tdancer, social dancer\nn09990690\tclog dancer\nn09990777\tdancing-master, dance master\nn09991740\tdark horse\nn09991867\tdarling, favorite, favourite, pet, dearie, deary, ducky\nn09992538\tdate, escort\nn09992837\tdaughter, girl\nn09993252\tdawdler, drone, laggard, lagger, trailer, poke\nn09993651\tday boarder\nn09994400\tday laborer, day labourer\nn09994673\tdeacon, Protestant deacon\nn09994808\tdeaconess\nn09994878\tdeadeye\nn09995829\tdeipnosophist\nn09996039\tdropout\nn09996304\tdeadhead\nn09996481\tdeaf person\nn09997622\tdebtor, debitor\nn09998788\tdeckhand, roustabout\nn09999135\tdefamer, maligner, slanderer, vilifier, libeler, backbiter, traducer\nn10000294\tdefense contractor\nn10000459\tdeist, freethinker\nn10000787\tdelegate\nn10001217\tdeliveryman, delivery boy, deliverer\nn10001481\tdemagogue, demagog, rabble-rouser\nn10001764\tdemigod, superman, Ubermensch\nn10002257\tdemographer, demographist, population scientist\nn10002760\tdemonstrator, protester\nn10003476\tden mother\nn10004718\tdepartment head\nn10005006\tdepositor\nn10005934\tdeputy\nn10006177\tdermatologist, skin doctor\nn10006748\tdescender\nn10007684\tdesignated hitter\nn10007809\tdesigner, intriguer\nn10007995\tdesk clerk, hotel desk clerk, hotel clerk\nn10008123\tdesk officer\nn10008254\tdesk sergeant, deskman, station keeper\nn10009162\tdetainee, political detainee\nn10009276\tdetective, investigator, tec, police detective\nn10009484\tdetective\nn10009671\tdetractor, disparager, depreciator, knocker\nn10010062\tdeveloper\nn10010243\tdeviationist\nn10010632\tdevisee\nn10010767\tdevisor\nn10010864\tdevourer\nn10011360\tdialectician\nn10011486\tdiarist, diary keeper, journalist\nn10012484\tdietician, dietitian, nutritionist\nn10013811\tdiocesan\nn10015215\tdirector, theater director, theatre director\nn10015485\tdirector\nn10015792\tdirty old man\nn10015897\tdisbeliever, nonbeliever, unbeliever\nn10017272\tdisk jockey, disc jockey, dj\nn10017422\tdispatcher\nn10018747\tdistortionist\nn10018861\tdistributor, distributer\nn10019072\tdistrict attorney, DA\nn10019187\tdistrict manager\nn10019406\tdiver, plunger\nn10020366\tdivorcee, grass widow\nn10020533\tex-wife, ex\nn10020670\tdivorce lawyer\nn10020807\tdocent\nn10020890\tdoctor, doc, physician, MD, Dr., medico\nn10022908\tdodo, fogy, fogey, fossil\nn10023264\tdoge\nn10023506\tdog in the manger\nn10023656\tdogmatist, doctrinaire\nn10024025\tdolichocephalic\nn10024362\tdomestic partner, significant other, spousal equivalent, spouse equivalent\nn10024937\tDominican\nn10025060\tdominus, dominie, domine, dominee\nn10025295\tdon, father\nn10025391\tDonatist\nn10025635\tdonna\nn10026976\tdosser, street person\nn10027246\tdouble, image, look-alike\nn10027590\tdouble-crosser, double-dealer, two-timer, betrayer, traitor\nn10028402\tdown-and-out\nn10028541\tdoyenne\nn10029068\tdraftsman, drawer\nn10030277\tdramatist, playwright\nn10032987\tdreamer\nn10033412\tdressmaker, modiste, needlewoman, seamstress, sempstress\nn10033572\tdressmaker's model\nn10033663\tdribbler, driveller, slobberer, drooler\nn10033888\tdribbler\nn10034201\tdrinker, imbiber, toper, juicer\nn10034614\tdrinker\nn10035952\tdrug addict, junkie, junky\nn10036266\tdrug user, substance abuser, user\nn10036444\tDruid\nn10036692\tdrum majorette, majorette\nn10036929\tdrummer\nn10037080\tdrunk\nn10037385\tdrunkard, drunk, rummy, sot, inebriate, wino\nn10037588\tDruze, Druse\nn10037922\tdry, prohibitionist\nn10038119\tdry nurse\nn10038409\tduchess\nn10038620\tduke\nn10039271\tduffer\nn10039946\tdunker\nn10040240\tDutch uncle\nn10040698\tdyspeptic\nn10040945\teager beaver, busy bee, live wire, sharpie, sharpy\nn10041373\tearl\nn10041887\tearner, wage earner\nn10042690\teavesdropper\nn10042845\teccentric, eccentric person, flake, oddball, geek\nn10043024\teclectic, eclecticist\nn10043491\teconometrician, econometrist\nn10043643\teconomist, economic expert\nn10044682\tectomorph\nn10044879\teditor, editor in chief\nn10047199\tegocentric, egoist\nn10047459\tegotist, egoist, swellhead\nn10048117\tejaculator\nn10048367\telder\nn10048612\telder statesman\nn10048836\telected official\nn10049363\telectrician, lineman, linesman\nn10050043\telegist\nn10050880\telocutionist\nn10051026\temancipator, manumitter\nn10051761\tembryologist\nn10051861\temeritus\nn10051975\temigrant, emigre, emigree, outgoer\nn10052694\temissary, envoy\nn10053439\tempress\nn10053808\temployee\nn10054657\temployer\nn10055297\tenchantress, witch\nn10055410\tenchantress, temptress, siren, Delilah, femme fatale\nn10055566\tencyclopedist, encyclopaedist\nn10055730\tendomorph\nn10055847\tenemy, foe, foeman, opposition\nn10056103\tenergizer, energiser, vitalizer, vitaliser, animator\nn10056611\tend man\nn10056719\tend man, corner man\nn10057271\tendorser, indorser\nn10058411\tenjoyer\nn10058962\tenlisted woman\nn10059067\tenophile, oenophile\nn10060075\tentrant\nn10060175\tentrant\nn10060352\tentrepreneur, enterpriser\nn10061043\tenvoy, envoy extraordinary, minister plenipotentiary\nn10061195\tenzymologist\nn10061431\teparch\nn10061882\tepidemiologist\nn10062042\tepigone, epigon\nn10062176\tepileptic\nn10062275\tEpiscopalian\nn10062492\tequerry\nn10062594\tequerry\nn10062716\terotic\nn10062905\tescapee\nn10062996\tescapist, dreamer, wishful thinker\nn10063635\tEskimo, Esquimau, Inuit\nn10063919\tespionage agent\nn10064831\testhetician, aesthetician\nn10064977\tetcher\nn10065758\tethnologist\nn10066206\tEtonian\nn10066314\tetymologist\nn10067011\tevangelist, revivalist, gospeler, gospeller\nn10067305\tEvangelist\nn10067600\tevent planner\nn10067968\texaminer, inspector\nn10068234\texaminer, tester, quizzer\nn10068425\texarch\nn10069296\texecutant\nn10069981\texecutive secretary\nn10070108\texecutive vice president\nn10070377\texecutrix\nn10070449\texegete\nn10070563\texhibitor, exhibitioner, shower\nn10070711\texhibitionist, show-off\nn10071332\texile, expatriate, expat\nn10071557\texistentialist, existentialist philosopher, existential philosopher\nn10072054\texorcist, exorciser\nn10074249\tex-spouse\nn10074578\textern, medical extern\nn10074735\textremist\nn10074841\textrovert, extravert\nn10075299\teyewitness\nn10075693\tfacilitator\nn10076224\tfairy godmother\nn10076483\tfalangist, phalangist\nn10076604\tfalconer, hawker\nn10076957\tfalsifier\nn10077106\tfamiliar\nn10077593\tfan, buff, devotee, lover\nn10077879\tfanatic, fiend\nn10078131\tfancier, enthusiast\nn10078719\tfarm boy\nn10078806\tfarmer, husbandman, granger, sodbuster\nn10079399\tfarmhand, fieldhand, field hand, farm worker\nn10079893\tfascist\nn10080117\tfascista\nn10080508\tfatalist, determinist, predestinarian, predestinationist\nn10080869\tfather, male parent, begetter\nn10081204\tFather, Padre\nn10081842\tfather-figure\nn10082043\tfather-in-law\nn10082299\tFauntleroy, Little Lord Fauntleroy\nn10082423\tFauve, fauvist\nn10082562\tfavorite son\nn10082687\tfeatherweight\nn10082997\tfederalist\nn10083677\tfellow traveler, fellow traveller\nn10083823\tfemale aristocrat\nn10084043\tfemale offspring\nn10084295\tfemale child, girl, little girl\nn10085101\tfence\nn10085869\tfiance, groom-to-be\nn10086383\tfielder, fieldsman\nn10086744\tfield judge\nn10087434\tfighter pilot\nn10087736\tfiler\nn10088200\tfilm director, director\nn10090745\tfinder\nn10091349\tfire chief, fire marshal\nn10091450\tfire-eater, fire-swallower\nn10091564\tfire-eater, hothead\nn10091651\tfireman, firefighter, fire fighter, fire-eater\nn10091861\tfire marshall\nn10091997\tfire walker\nn10092488\tfirst baseman, first sacker\nn10092643\tfirstborn, eldest\nn10092794\tfirst lady\nn10092978\tfirst lieutenant, 1st lieutenant\nn10093167\tfirst offender\nn10093475\tfirst sergeant, sergeant first class\nn10093818\tfishmonger, fishwife\nn10094320\tflagellant\nn10094584\tflag officer\nn10094782\tflak catcher, flak, flack catcher, flack\nn10095265\tflanker back, flanker\nn10095420\tflapper\nn10095769\tflatmate\nn10095869\tflatterer, adulator\nn10096126\tflibbertigibbet, foolish woman\nn10096508\tflight surgeon\nn10097262\tfloorwalker, shopwalker\nn10097477\tflop, dud, washout\nn10097590\tFlorentine\nn10097842\tflower girl\nn10097995\tflower girl\nn10098245\tflutist, flautist, flute player\nn10098388\tfly-by-night\nn10098517\tflyweight\nn10098624\tflyweight\nn10098710\tfoe, enemy\nn10098862\tfolk dancer\nn10099002\tfolk poet\nn10099375\tfollower\nn10101308\tfootball hero\nn10101634\tfootball player, footballer\nn10101981\tfootman\nn10102800\tforefather, father, sire\nn10103155\tforemother\nn10103228\tforeign agent\nn10103921\tforeigner, outsider\nn10104064\tboss\nn10104487\tforeman\nn10104756\tforester, tree farmer, arboriculturist\nn10104888\tforewoman\nn10105085\tforger, counterfeiter\nn10105733\tforward\nn10105906\tfoster-brother, foster brother\nn10106387\tfoster-father, foster father\nn10106509\tfoster-mother, foster mother\nn10106995\tfoster-sister, foster sister\nn10107173\tfoster-son, foster son\nn10107303\tfounder, beginner, founding father, father\nn10108018\tfoundress\nn10108089\tfour-minute man\nn10108464\tframer\nn10108832\tFrancophobe\nn10109443\tfreak, monster, monstrosity, lusus naturae\nn10109662\tfree agent, free spirit, freewheeler\nn10109826\tfree agent\nn10110093\tfreedom rider\nn10110731\tfree-liver\nn10110893\tfreeloader\nn10111358\tfree trader\nn10111779\tFreudian\nn10111903\tfriar, mendicant\nn10112129\tmonk, monastic\nn10113249\tfrontierswoman\nn10113583\tfront man, front, figurehead, nominal head, straw man, strawman\nn10113869\tfrotteur\nn10114476\tfucker\nn10114550\tfucker\nn10114662\tfuddy-duddy\nn10115430\tfullback\nn10115946\tfunambulist, tightrope walker\nn10116370\tfundamentalist\nn10116478\tfundraiser\nn10116702\tfuturist\nn10117017\tgadgeteer\nn10117267\tgagman, gagster, gagwriter\nn10117415\tgagman, standup comedian\nn10117739\tgainer, weight gainer\nn10117851\tgal\nn10118301\tgaloot\nn10118743\tgambist\nn10118844\tgambler\nn10119609\tgamine\nn10120330\tgarbage man, garbageman, garbage collector, garbage carter, garbage hauler, refuse collector, dustman\nn10120671\tgardener\nn10121026\tgarment cutter\nn10121246\tgarroter, garrotter, strangler, throttler, choker\nn10121714\tgasman\nn10121800\tgastroenterologist\nn10122300\tgatherer\nn10122531\tgawker\nn10123122\tgendarme\nn10123844\tgeneral, full general\nn10126177\tgenerator, source, author\nn10126424\tgeneticist\nn10126708\tgenitor\nn10127186\tgent\nn10127689\tgeologist\nn10128519\tgeophysicist\nn10128748\tghostwriter, ghost\nn10129338\tGibson girl\nn10129825\tgirl, miss, missy, young lady, young woman, fille\nn10130686\tgirlfriend, girl, lady friend\nn10130877\tgirlfriend\nn10131151\tgirl wonder\nn10131268\tGirondist, Girondin\nn10131590\tgitano\nn10131815\tgladiator\nn10132035\tglassblower\nn10132502\tgleaner\nn10134178\tgoat herder, goatherd\nn10134396\tgodchild\nn10134760\tgodfather\nn10134982\tgodparent\nn10135129\tgodson\nn10135197\tgofer\nn10135297\tgoffer, gopher\nn10136615\tgoldsmith, goldworker, gold-worker\nn10136959\tgolfer, golf player, linksman\nn10137825\tgondolier, gondoliere\nn10138369\tgood guy\nn10138472\tgood old boy, good ole boy, good ol' boy\nn10139077\tgood Samaritan\nn10139651\tgossip columnist\nn10140051\tgouger\nn10140597\tgovernor general\nn10140683\tgrabber\nn10140783\tgrader\nn10140929\tgraduate nurse, trained nurse\nn10141364\tgrammarian, syntactician\nn10141732\tgranddaughter\nn10142166\tgrande dame\nn10142391\tgrandfather, gramps, granddad, grandad, granddaddy, grandpa\nn10142537\tGrand Inquisitor\nn10142747\tgrandma, grandmother, granny, grannie, gran, nan, nanna\nn10142946\tgrandmaster\nn10143172\tgrandparent\nn10143595\tgrantee\nn10143725\tgranter\nn10144338\tgrass widower, divorced man\nn10145239\tgreat-aunt, grandaunt\nn10145340\tgreat grandchild\nn10145480\tgreat granddaughter\nn10145590\tgreat grandmother\nn10145774\tgreat grandparent\nn10145902\tgreat grandson\nn10146002\tgreat-nephew, grandnephew\nn10146104\tgreat-niece, grandniece\nn10146416\tGreen Beret\nn10146816\tgrenadier, grenade thrower\nn10146927\tgreeter, saluter, welcomer\nn10147121\tgringo\nn10147262\tgrinner\nn10147710\tgrocer\nn10147935\tgroom, bridegroom\nn10148035\tgroom, bridegroom\nn10148305\tgrouch, grump, crank, churl, crosspatch\nn10148825\tgroup captain\nn10149436\tgrunter\nn10149867\tprison guard, jailer, jailor, gaoler, screw, turnkey\nn10150071\tguard\nn10150794\tguesser\nn10150940\tguest, invitee\nn10151133\tguest\nn10151261\tguest of honor\nn10151367\tguest worker, guestworker\nn10151570\tguide\nn10151760\tguitarist, guitar player\nn10152306\tgunnery sergeant\nn10152616\tguru\nn10152763\tguru\nn10153155\tguvnor\nn10153414\tguy, cat, hombre, bozo\nn10153594\tgymnast\nn10153865\tgym rat\nn10154013\tgynecologist, gynaecologist, woman's doctor\nn10154186\tGypsy, Gipsy, Romany, Rommany, Romani, Roma, Bohemian\nn10154601\thack, drudge, hacker\nn10155222\thacker, cyber-terrorist, cyberpunk\nn10155600\thaggler\nn10155849\thairdresser, hairstylist, stylist, styler\nn10156629\thakim, hakeem\nn10156831\tHakka\nn10157016\thalberdier\nn10157128\thalfback\nn10157271\thalf blood\nn10158506\thand\nn10159045\tanimal trainer, handler\nn10159289\thandyman, jack of all trades, odd-job man\nn10159533\thang glider\nn10160188\thardliner\nn10160280\tharlequin\nn10160412\tharmonizer, harmoniser\nn10161622\thash head\nn10162016\thatchet man, iceman\nn10162194\thater\nn10162354\thatmaker, hatter, milliner, modiste\nn10164025\theadman, tribal chief, chieftain, chief\nn10164233\theadmaster, schoolmaster, master\nn10164492\thead nurse\nn10165448\thearer, listener, auditor, attender\nn10166189\theartbreaker\nn10166394\theathen, pagan, gentile, infidel\nn10167152\theavyweight\nn10167361\theavy\nn10167565\theckler, badgerer\nn10167838\thedger\nn10168012\thedger, equivocator, tergiversator\nn10168183\thedonist, pagan, pleasure seeker\nn10168584\their, inheritor, heritor\nn10168837\their apparent\nn10169147\theiress, inheritress, inheritrix\nn10169241\their presumptive\nn10169419\thellion, heller, devil\nn10169796\thelmsman, steersman, steerer\nn10170060\thire\nn10170681\thematologist, haematologist\nn10170866\themiplegic\nn10171219\therald, trumpeter\nn10171456\therbalist, herb doctor\nn10171567\therder, herdsman, drover\nn10172080\thermaphrodite, intersex, gynandromorph, androgyne, epicene, epicene person\nn10173410\theroine\nn10173579\theroin addict\nn10173665\thero worshiper, hero worshipper\nn10173771\tHerr\nn10174253\thighbinder\nn10174330\thighbrow\nn10174445\thigh commissioner\nn10174589\thighflier, highflyer\nn10174695\tHighlander, Scottish Highlander, Highland Scot\nn10174971\thigh-muck-a-muck, pooh-bah\nn10175248\thigh priest\nn10175725\thighjacker, hijacker\nn10176913\thireling, pensionary\nn10177150\thistorian, historiographer\nn10178077\thitchhiker\nn10178216\thitter, striker\nn10179069\thobbyist\nn10180580\tholdout\nn10180791\tholdover, hangover\nn10180923\tholdup man, stickup man\nn10181445\thomeboy\nn10181547\thomeboy\nn10181799\thome buyer\nn10181878\thomegirl\nn10182190\thomeless, homeless person\nn10182402\thomeopath, homoeopath\nn10183347\thonest woman\nn10183931\thonor guard, guard of honor\nn10184505\thooker\nn10185148\thoper\nn10185483\thornist\nn10185793\thorseman, equestrian, horseback rider\nn10186068\thorse trader\nn10186143\thorsewoman\nn10186216\thorse wrangler, wrangler\nn10186350\thorticulturist, plantsman\nn10186686\thospital chaplain\nn10186774\thost, innkeeper, boniface\nn10187130\thost\nn10187491\thostess\nn10187990\thotelier, hotelkeeper, hotel manager, hotelman, hosteller\nn10188715\thousekeeper\nn10188856\thousemaster\nn10188957\thousemate\nn10189278\thouse physician, resident, resident physician\nn10189597\thouse sitter\nn10190122\thousing commissioner\nn10190516\thuckster, cheap-jack\nn10191001\thugger\nn10191388\thumanist, humanitarian\nn10191613\thumanitarian, do-gooder, improver\nn10192839\thunk\nn10193650\thuntress\nn10194231\tex-husband, ex\nn10194775\thydrologist\nn10195056\thyperope\nn10195155\thypertensive\nn10195261\thypnotist, hypnotizer, hypnotiser, mesmerist, mesmerizer\nn10195593\thypocrite, dissembler, dissimulator, phony, phoney, pretender\nn10196404\ticeman\nn10196725\ticonoclast\nn10197392\tideologist, ideologue\nn10198437\tidol, matinee idol\nn10198832\tidolizer, idoliser\nn10199251\timam, imaum\nn10200246\timperialist\nn10200781\timportant person, influential person, personage\nn10202225\tinamorato\nn10202624\tincumbent, officeholder\nn10202763\tincurable\nn10203949\tinductee\nn10204177\tindustrialist\nn10204833\tinfanticide\nn10205231\tinferior\nn10205344\tinfernal\nn10205457\tinfielder\nn10205714\tinfiltrator\nn10206173\tinformer, betrayer, rat, squealer, blabber\nn10206506\tingenue\nn10206629\tingenue\nn10207077\tpolymath\nn10207169\tin-law, relative-in-law\nn10208189\tinquiry agent\nn10208847\tinspector\nn10208950\tinspector general\nn10209082\tinstigator, initiator\nn10209731\tinsurance broker, insurance agent, general agent, underwriter\nn10210137\tinsurgent, insurrectionist, freedom fighter, rebel\nn10210512\tintelligence analyst\nn10210648\tinterior designer, designer, interior decorator, house decorator, room decorator, decorator\nn10210911\tinterlocutor, conversational partner\nn10211036\tinterlocutor, middleman\nn10211666\tInternational Grandmaster\nn10211830\tinternationalist\nn10212231\tinternist\nn10212501\tinterpreter, translator\nn10212780\tinterpreter\nn10213034\tintervenor\nn10213429\tintrovert\nn10214062\tinvader, encroacher\nn10214390\tinvalidator, voider, nullifier\nn10215623\tinvestigator\nn10216106\tinvestor\nn10216403\tinvigilator\nn10217208\tirreligionist\nn10218043\tIvy Leaguer\nn10218164\tJack of all trades\nn10218292\tJacksonian\nn10219240\tJane Doe\nn10219453\tjanissary\nn10219879\tJat\nn10220080\tJavanese, Javan\nn10220924\tJekyll and Hyde\nn10221312\tjester, fool, motley fool\nn10221520\tJesuit\nn10222170\tjezebel\nn10222259\tjilt\nn10222497\tjobber, middleman, wholesaler\nn10222716\tjob candidate\nn10223069\tJob's comforter\nn10223177\tjockey\nn10223606\tJohn Doe\nn10224578\tjournalist\nn10225219\tjudge, justice, jurist\nn10225931\tjudge advocate\nn10226413\tjuggler\nn10227166\tJungian\nn10227266\tjunior\nn10227393\tjunior\nn10227490\tJunior, Jr, Jnr\nn10227698\tjunior lightweight\nn10227793\tjunior middleweight\nn10227985\tjurist, legal expert\nn10228278\tjuror, juryman, jurywoman\nn10228468\tjustice of the peace\nn10228592\tjusticiar, justiciary\nn10228712\tkachina\nn10229883\tkeyboardist\nn10230216\tKhedive\nn10233248\tkingmaker\nn10235024\tking, queen, world-beater\nn10235269\tKing's Counsel\nn10235385\tCounsel to the Crown\nn10236304\tkin, kinsperson, family\nn10236521\tenate, matrikin, matrilineal kin, matrisib, matrilineal sib\nn10236842\tkink\nn10237069\tkinswoman\nn10237196\tkisser, osculator\nn10237464\tkitchen help\nn10237556\tkitchen police, KP\nn10237676\tKlansman, Ku Kluxer, Kluxer\nn10237799\tkleptomaniac\nn10238272\tkneeler\nn10238375\tknight\nn10239928\tknocker\nn10240082\tknower, apprehender\nn10240235\tknow-it-all, know-all\nn10240417\tkolkhoznik\nn10240821\tKshatriya\nn10241024\tlabor coach, birthing coach, doula, monitrice\nn10241300\tlaborer, manual laborer, labourer, jack\nn10242328\tLabourite\nn10243137\tlady\nn10243273\tlady-in-waiting\nn10243483\tlady's maid\nn10243664\tlama\nn10243872\tlamb, dear\nn10244108\tlame duck\nn10244359\tlamplighter\nn10244913\tland agent\nn10245029\tlandgrave\nn10245341\tlandlubber, lubber, landsman\nn10245507\tlandlubber, landsman, landman\nn10245639\tlandowner, landholder, property owner\nn10245863\tlandscape architect, landscape gardener, landscaper, landscapist\nn10246317\tlanglaufer\nn10246395\tlanguisher\nn10246703\tlapidary, lapidarist\nn10247358\tlass, lassie, young girl, jeune fille\nn10247880\tLatin\nn10248008\tLatin\nn10248198\tlatitudinarian\nn10248377\tJehovah's Witness\nn10249191\tlaw agent\nn10249270\tlawgiver, lawmaker\nn10249459\tlawman, law officer, peace officer\nn10249869\tlaw student\nn10249950\tlawyer, attorney\nn10250712\tlay reader\nn10251329\tlazybones\nn10251612\tleaker\nn10252075\tleaseholder, lessee\nn10252222\tlector, lecturer, reader\nn10252354\tlector, reader\nn10252547\tlecturer\nn10253122\tleft-hander, lefty, southpaw\nn10253296\tlegal representative\nn10253479\tlegate, official emissary\nn10253611\tlegatee\nn10253703\tlegionnaire, legionary\nn10255459\tletterman\nn10257221\tliberator\nn10258602\tlicenser\nn10258786\tlicentiate\nn10259348\tlieutenant\nn10259780\tlieutenant colonel, light colonel\nn10259997\tlieutenant commander\nn10260473\tlieutenant junior grade, lieutenant JG\nn10260706\tlife\nn10260800\tlifeguard, lifesaver\nn10261211\tlife tenant\nn10261511\tlight flyweight\nn10261624\tlight heavyweight, cruiserweight\nn10261862\tlight heavyweight\nn10262343\tlight-o'-love, light-of-love\nn10262445\tlightweight\nn10262561\tlightweight\nn10262655\tlightweight\nn10262880\tlilliputian\nn10263146\tlimnologist\nn10263411\tlineman\nn10263790\tline officer\nn10265281\tlion-hunter\nn10265801\tlisper\nn10265891\tlister\nn10266016\tliterary critic\nn10266328\tliterate, literate person\nn10266848\tlitigant, litigator\nn10267166\tlitterer, litterbug, litter lout\nn10267311\tlittle brother\nn10267865\tlittle sister\nn10268629\tlobbyist\nn10269199\tlocksmith\nn10269289\tlocum tenens, locum\nn10271677\tLord, noble, nobleman\nn10272782\tloser\nn10272913\tloser, also-ran\nn10273064\tfailure, loser, nonstarter, unsuccessful person\nn10274173\tLothario\nn10274318\tloudmouth, blusterer\nn10274815\tlowerclassman, underclassman\nn10275249\tLowlander, Scottish Lowlander, Lowland Scot\nn10275395\tloyalist, stalwart\nn10275848\tLuddite\nn10276045\tlumberman, lumberjack, logger, feller, faller\nn10276477\tlumper\nn10276942\tbedlamite\nn10277027\tpyromaniac\nn10277638\tlutist, lutanist, lutenist\nn10277815\tLutheran\nn10277912\tlyricist, lyrist\nn10278456\tmacebearer, mace, macer\nn10279018\tmachinist, mechanic, shop mechanic\nn10279778\tmadame\nn10280034\tmaenad\nn10280130\tmaestro, master\nn10280598\tmagdalen\nn10280674\tmagician, prestidigitator, conjurer, conjuror, illusionist\nn10281546\tmagus\nn10281770\tmaharani, maharanee\nn10281896\tmahatma\nn10282482\tmaid, maiden\nn10282672\tmaid, maidservant, housemaid, amah\nn10283170\tmajor\nn10283366\tmajor\nn10283546\tmajor-domo, seneschal\nn10284064\tmaker, shaper\nn10284871\tmalahini\nn10284965\tmalcontent\nn10286282\tmalik\nn10286539\tmalingerer, skulker, shammer\nn10286749\tMalthusian\nn10288964\tadonis\nn10289039\tman\nn10289176\tman\nn10289462\tmanageress\nn10289766\tmandarin\nn10290422\tmaneuverer, manoeuvrer\nn10290541\tmaniac\nn10290813\tManichaean, Manichean, Manichee\nn10290919\tmanicurist\nn10291110\tmanipulator\nn10291469\tman-at-arms\nn10291822\tman of action, man of deeds\nn10291942\tman of letters\nn10292316\tmanufacturer, producer\nn10293332\tmarcher, parader\nn10293590\tmarchioness, marquise\nn10293861\tmargrave\nn10294020\tmargrave\nn10294139\tMarine, devil dog, leatherneck, shipboard soldier\nn10295371\tmarquess\nn10295479\tmarquis, marquess\nn10296176\tmarshal, marshall\nn10296444\tmartinet, disciplinarian, moralist\nn10297234\tmascot\nn10297367\tmasochist\nn10297531\tmason, stonemason\nn10297841\tmasquerader, masker, masquer\nn10298202\tmasseur\nn10298271\tmasseuse\nn10298647\tmaster\nn10298912\tmaster, captain, sea captain, skipper\nn10299125\tmaster-at-arms\nn10299250\tmaster of ceremonies, emcee, host\nn10299700\tmasturbator, onanist\nn10299875\tmatchmaker, matcher, marriage broker\nn10300041\tmate, first mate\nn10300154\tmate\nn10300303\tmate\nn10300500\tmater\nn10300654\tmaterial\nn10300829\tmaterialist\nn10302576\tmatriarch, materfamilias\nn10302700\tmatriarch\nn10302905\tmatriculate\nn10303037\tmatron\nn10303814\tmayor, city manager\nn10304086\tmayoress\nn10304650\tmechanical engineer\nn10304914\tmedalist, medallist, medal winner\nn10305635\tmedical officer, medic\nn10305802\tmedical practitioner, medical man\nn10306004\tmedical scientist\nn10306279\tmedium, spiritualist, sensitive\nn10306496\tmegalomaniac\nn10306595\tmelancholic, melancholiac\nn10306890\tMelkite, Melchite\nn10307114\tmelter\nn10308066\tnonmember\nn10308168\tboard member\nn10308275\tclansman, clanswoman, clan member\nn10308504\tmemorizer, memoriser\nn10308653\tMendelian\nn10308732\tmender, repairer, fixer\nn10310783\tMesoamerican\nn10311506\tmessmate\nn10311661\tmestiza\nn10312287\tmeteorologist\nn10312491\tmeter maid\nn10312600\tMethodist\nn10313000\tMetis\nn10313239\tmetropolitan\nn10313441\tmezzo-soprano, mezzo\nn10313724\tmicroeconomist, microeconomic expert\nn10314054\tmiddle-aged man\nn10314182\tmiddlebrow\nn10314517\tmiddleweight\nn10314836\tmidwife, accoucheuse\nn10315217\tmikado, tenno\nn10315456\tMilanese\nn10315561\tmiler\nn10315730\tmiles gloriosus\nn10316360\tmilitary attache\nn10316527\tmilitary chaplain, padre, Holy Joe, sky pilot\nn10316862\tmilitary leader\nn10317007\tmilitary officer, officer\nn10317500\tmilitary policeman, MP\nn10317963\tmill agent\nn10318293\tmill-hand, factory worker\nn10318607\tmillionairess\nn10318686\tmillwright\nn10319313\tminder\nn10320484\tmining engineer\nn10320863\tminister, government minister\nn10321126\tministrant\nn10321340\tminor leaguer, bush leaguer\nn10321632\tMinuteman\nn10321882\tmisanthrope, misanthropist\nn10322238\tmisfit\nn10323634\tmistress\nn10323752\tmistress, kept woman, fancy woman\nn10323999\tmixed-blood\nn10324560\tmodel, poser\nn10325549\tclass act\nn10325774\tmodeler, modeller\nn10326776\tmodifier\nn10327143\tmolecular biologist\nn10327987\tMonegasque, Monacan\nn10328123\tmonetarist\nn10328328\tmoneygrubber\nn10328437\tmoneymaker\nn10328696\tMongoloid\nn10328941\tmonolingual\nn10329035\tmonologist\nn10330593\tmoonlighter\nn10330931\tmoralist\nn10331098\tmorosoph\nn10331167\tmorris dancer\nn10331258\tmortal enemy\nn10331347\tmortgagee, mortgage holder\nn10331841\tmortician, undertaker, funeral undertaker, funeral director\nn10332110\tmoss-trooper\nn10332385\tmother, female parent\nn10332861\tmother\nn10332953\tmother\nn10333044\tmother figure\nn10333165\tmother hen\nn10333317\tmother-in-law\nn10333439\tmother's boy, mamma's boy, mama's boy\nn10333601\tmother's daughter\nn10333838\tmotorcycle cop, motorcycle policeman, speed cop\nn10334009\tmotorcyclist\nn10334461\tMound Builder\nn10334782\tmountebank, charlatan\nn10335246\tmourner, griever, sorrower, lamenter\nn10335801\tmouthpiece, mouth\nn10335931\tmover\nn10336411\tmoviegoer, motion-picture fan\nn10336904\tmuffin man\nn10337488\tmugwump, independent, fencesitter\nn10338231\tMullah, Mollah, Mulla\nn10338391\tmuncher\nn10339179\tmurderess\nn10339251\tmurder suspect\nn10339717\tmusher\nn10340312\tmusician, instrumentalist, player\nn10341243\tmusicologist\nn10341343\tmusic teacher\nn10341446\tmusketeer\nn10341573\tMuslimah\nn10341955\tmutilator, maimer, mangler\nn10342180\tmutineer\nn10342367\tmute, deaf-mute, deaf-and-dumb person\nn10342543\tmutterer, mumbler, murmurer\nn10342893\tmuzzler\nn10342992\tMycenaen\nn10343088\tmycologist\nn10343355\tmyope\nn10343449\tmyrmidon\nn10343554\tmystic, religious mystic\nn10343869\tmythologist\nn10344121\tnaif\nn10344203\tnailer\nn10344319\tnamby-pamby\nn10344656\tname dropper\nn10344774\tnamer\nn10345015\tnan\nn10345100\tnanny, nursemaid, nurse\nn10345302\tnarc, nark, narcotics agent\nn10345422\tnarcissist, narcist\nn10345659\tnark, copper's nark\nn10346015\tnationalist\nn10347204\tnautch girl\nn10347446\tnaval commander\nn10348526\tNavy SEAL, SEAL\nn10349243\tobstructionist, obstructor, obstructer, resister, thwarter\nn10349750\tNazarene\nn10349836\tNazarene, Ebionite\nn10350220\tNazi, German Nazi\nn10350774\tnebbish, nebbech\nn10351064\tnecker\nn10353016\tneonate, newborn, newborn infant, newborn baby\nn10353355\tnephew\nn10353928\tneurobiologist\nn10354265\tneurologist, brain doctor\nn10354754\tneurosurgeon, brain surgeon\nn10355142\tneutral\nn10355306\tneutralist\nn10355449\tnewcomer, fledgling, fledgeling, starter, neophyte, freshman, newbie, entrant\nn10355688\tnewcomer\nn10355806\tNew Dealer\nn10356450\tnewspaper editor\nn10356877\tnewsreader, news reader\nn10357012\tNewtonian\nn10357613\tniece\nn10357737\tniggard, skinflint, scrooge, churl\nn10358032\tnight porter\nn10358124\tnight rider, nightrider\nn10358575\tNIMBY\nn10359117\tniqaabi\nn10359422\tnitpicker\nn10359546\tNobelist, Nobel Laureate\nn10359659\tNOC\nn10360366\tnoncandidate\nn10360747\tnoncommissioned officer, noncom, enlisted officer\nn10361060\tnondescript\nn10361194\tnondriver\nn10361296\tnonparticipant\nn10361525\tnonperson, unperson\nn10362003\tnonresident\nn10362319\tnonsmoker\nn10362557\tNorthern Baptist\nn10363445\tnoticer\nn10363573\tnovelist\nn10364198\tnovitiate, novice\nn10364502\tnuclear chemist, radiochemist\nn10365514\tnudger\nn10366145\tnullipara\nn10366276\tnumber theorist\nn10366966\tnurse\nn10368291\tnursling, nurseling, suckling\nn10368528\tnymph, houri\nn10368624\tnymphet\nn10368711\tnympholept\nn10368798\tnymphomaniac, nympho\nn10369095\toarswoman\nn10369317\toboist\nn10369417\tobscurantist\nn10369528\tobserver, commentator\nn10369699\tobstetrician, accoucheur\nn10369955\toccupier\nn10370381\toccultist\nn10370955\twine lover\nn10371052\tofferer, offeror\nn10371221\toffice-bearer\nn10371330\toffice boy\nn10371450\tofficeholder, officer\nn10373390\tofficiant\nn10373525\tFederal, Fed, federal official\nn10374541\toilman\nn10374849\toil tycoon\nn10374943\told-age pensioner\nn10375052\told boy\nn10375314\told lady\nn10375402\told man\nn10376523\toldster, old person, senior citizen, golden ager\nn10376890\told-timer, oldtimer, gaffer, old geezer, antique\nn10377021\told woman\nn10377185\toligarch\nn10377291\tOlympian\nn10377542\tomnivore\nn10377633\toncologist\nn10378026\tonlooker, looker-on\nn10378113\tonomancer\nn10378780\toperator\nn10379376\topportunist, self-seeker\nn10380126\toptimist\nn10380499\tOrangeman\nn10380672\torator, speechmaker, rhetorician, public speaker, speechifier\nn10381804\torderly, hospital attendant\nn10381981\torderly\nn10382157\torderly sergeant\nn10382302\tordinand\nn10382480\tordinary\nn10382710\torgan-grinder\nn10382825\torganist\nn10383094\torganization man\nn10383237\torganizer, organiser, arranger\nn10383505\torganizer, organiser, labor organizer\nn10383816\toriginator, conceiver, mastermind\nn10384214\tornithologist, bird watcher\nn10384392\torphan\nn10384496\torphan\nn10385566\tosteopath, osteopathist\nn10386196\tout-and-outer\nn10386754\toutdoorswoman\nn10386874\toutfielder\nn10386984\toutfielder\nn10387196\tright fielder\nn10387324\tright-handed pitcher, right-hander\nn10387836\toutlier\nn10389865\towner-occupier\nn10389976\toyabun\nn10390600\tpackrat\nn10390698\tpadrone\nn10390807\tpadrone\nn10391416\tpage, pageboy\nn10393909\tpainter\nn10394434\tPaleo-American, Paleo-Amerind, Paleo-Indian\nn10394786\tpaleontologist, palaeontologist, fossilist\nn10395073\tpallbearer, bearer\nn10395209\tpalmist, palmister, chiromancer\nn10395390\tpamperer, spoiler, coddler, mollycoddler\nn10395828\tPanchen Lama\nn10396106\tpanelist, panellist\nn10396337\tpanhandler\nn10396727\tpaparazzo\nn10396908\tpaperboy\nn10397001\tpaperhanger, paperer\nn10397142\tpaperhanger\nn10397392\tpapoose, pappoose\nn10399130\tpardoner\nn10400003\tparetic\nn10400108\tparishioner\nn10400205\tpark commissioner\nn10400437\tParliamentarian, Member of Parliament\nn10400618\tparliamentary agent\nn10400998\tparodist, lampooner\nn10401204\tparricide\nn10401331\tparrot\nn10401639\tpartaker, sharer\nn10402709\tpart-timer\nn10402824\tparty\nn10403633\tparty man, party liner\nn10403876\tpassenger, rider\nn10404426\tpasser\nn10404998\tpaster\nn10405540\tpater\nn10405694\tpatient\nn10406266\tpatriarch\nn10406391\tpatriarch\nn10406765\tpatriarch, paterfamilias\nn10407310\tpatriot, nationalist\nn10407954\tpatron, sponsor, supporter\nn10408809\tpatternmaker\nn10409459\tpawnbroker\nn10409752\tpayer, remunerator\nn10410246\tpeacekeeper\nn10410996\tpeasant\nn10411356\tpedant, bookworm, scholastic\nn10411551\tpeddler, pedlar, packman, hawker, pitchman\nn10411867\tpederast, paederast, child molester\nn10414239\tpenologist\nn10414768\tpentathlete\nn10414865\tPentecostal, Pentecostalist\nn10415037\tpercussionist\nn10416567\tperiodontist\nn10417288\tpeshmerga\nn10417424\tpersonality\nn10417551\tpersonal representative\nn10417682\tpersonage\nn10417843\tpersona grata\nn10417969\tpersona non grata\nn10418101\tpersonification\nn10418735\tperspirer, sweater\nn10419047\tpervert, deviant, deviate, degenerate\nn10419472\tpessimist\nn10419630\tpest, blighter, cuss, pesterer, gadfly\nn10419785\tPeter Pan\nn10420031\tpetitioner, suppliant, supplicant, requester\nn10420277\tpetit juror, petty juror\nn10420507\tpet sitter, critter sitter\nn10420649\tpetter, fondler\nn10421016\tPharaoh, Pharaoh of Egypt\nn10421470\tpharmacist, druggist, chemist, apothecary, pill pusher, pill roller\nn10421956\tphilanthropist, altruist\nn10422405\tphilatelist, stamp collector\nn10425946\tphilosopher\nn10426454\tphonetician\nn10426630\tphonologist\nn10427223\tphotojournalist\nn10427359\tphotometrist, photometrician\nn10427764\tphysical therapist, physiotherapist\nn10428004\tphysicist\nn10431122\tpiano maker\nn10431625\tpicker, chooser, selector\nn10432189\tpicnicker, picknicker\nn10432441\tpilgrim\nn10432875\tpill\nn10432957\tpillar, mainstay\nn10433077\tpill head\nn10433452\tpilot\nn10433610\tPiltdown man, Piltdown hoax\nn10433737\tpimp, procurer, panderer, pander, pandar, fancy man, ponce\nn10435169\tpipe smoker\nn10435251\tpip-squeak, squirt, small fry\nn10435716\tpisser, urinator\nn10435988\tpitcher, hurler, twirler\nn10436334\tpitchman\nn10437014\tplaceman, placeseeker\nn10437137\tplacer miner\nn10437262\tplagiarist, plagiarizer, plagiariser, literary pirate, pirate\nn10437698\tplainsman\nn10438172\tplanner, contriver, deviser\nn10438619\tplanter, plantation owner\nn10438842\tplasterer\nn10439373\tplatinum blond, platinum blonde\nn10439523\tplatitudinarian\nn10439727\tplayboy, man-about-town, Corinthian\nn10439851\tplayer, participant\nn10441037\tplaymate, playfellow\nn10441124\tpleaser\nn10441694\tpledger\nn10441962\tplenipotentiary\nn10442093\tplier, plyer\nn10442232\tplodder, slowpoke, stick-in-the-mud, slowcoach\nn10442417\tplodder, slogger\nn10442573\tplotter, mapper\nn10443032\tplumber, pipe fitter\nn10443659\tpluralist\nn10443830\tpluralist\nn10444194\tpoet\nn10448322\tpointsman\nn10448455\tpoint woman\nn10449664\tpolicyholder\nn10450038\tpolitical prisoner\nn10450161\tpolitical scientist\nn10450303\tpolitician, politico, pol, political leader\nn10451450\tpolitician\nn10451590\tpollster, poll taker, headcounter, canvasser\nn10451858\tpolluter, defiler\nn10453184\tpool player\nn10455619\tportraitist, portrait painter, portrayer, limner\nn10456070\tposeuse\nn10456138\tpositivist, rationalist\nn10456696\tpostdoc, post doc\nn10457214\tposter girl\nn10457444\tpostulator\nn10457903\tprivate citizen\nn10458111\tproblem solver, solver, convergent thinker\nn10458356\tpro-lifer\nn10458596\tprosthetist\nn10459882\tpostulant\nn10460033\tpotboy, potman\nn10461060\tpoultryman, poulterer\nn10462588\tpower user\nn10462751\tpower worker, power-station worker\nn10462860\tpractitioner, practician\nn10464052\tprayer, supplicant\nn10464542\tpreceptor, don\nn10464711\tpredecessor\nn10464870\tpreemptor, pre-emptor\nn10465002\tpreemptor, pre-emptor\nn10465451\tpremature baby, preterm baby, premature infant, preterm infant, preemie, premie\nn10465831\tpresbyter\nn10466198\tpresenter, sponsor\nn10466564\tpresentist\nn10466918\tpreserver\nn10467179\tpresident\nn10467395\tPresident of the United States, United States President, President, Chief Executive\nn10468750\tpresident, prexy\nn10469611\tpress agent, publicity man, public relations man, PR man\nn10469874\tpress photographer\nn10470779\tpriest\nn10471640\tprima ballerina\nn10471732\tprima donna, diva\nn10471859\tprima donna\nn10472129\tprimigravida, gravida I\nn10472447\tprimordial dwarf, hypoplastic dwarf, true dwarf, normal dwarf\nn10473453\tprince charming\nn10473562\tprince consort\nn10473789\tprinceling\nn10473917\tPrince of Wales\nn10474064\tprincess\nn10474343\tprincess royal\nn10474446\tprincipal, dealer\nn10474645\tprincipal, school principal, head teacher, head\nn10475835\tprint seller\nn10475940\tprior\nn10476467\tprivate, buck private, common soldier\nn10477713\tprobationer, student nurse\nn10477955\tprocessor\nn10478118\tprocess-server\nn10478293\tproconsul\nn10478462\tproconsul\nn10478827\tproctologist\nn10478960\tproctor, monitor\nn10479135\tprocurator\nn10479328\tprocurer, securer\nn10481167\tprofit taker\nn10481268\tprogrammer, computer programmer, coder, software engineer\nn10482054\tpromiser, promisor\nn10482220\tpromoter, booster, plugger\nn10482587\tpromulgator\nn10482921\tpropagandist\nn10483138\tpropagator, disseminator\nn10483395\tproperty man, propman, property master\nn10483799\tprophetess\nn10483890\tprophet\nn10484858\tprosecutor, public prosecutor, prosecuting officer, prosecuting attorney\nn10485298\tprospector\nn10485883\tprotectionist\nn10486166\tprotegee\nn10486236\tprotozoologist\nn10486561\tprovost marshal\nn10487182\tpruner, trimmer\nn10487363\tpsalmist\nn10487592\tpsephologist\nn10488016\tpsychiatrist, head-shrinker, shrink\nn10488309\tpsychic\nn10488656\tpsycholinguist\nn10489426\tpsychophysicist\nn10490421\tpublican, tavern keeper\nn10491998\tpudge\nn10492086\tpuerpera\nn10492727\tpunching bag\nn10493199\tpunter\nn10493419\tpunter\nn10493685\tpuppeteer\nn10493835\tpuppy, pup\nn10493922\tpurchasing agent\nn10494195\tpuritan\nn10494373\tPuritan\nn10495167\tpursuer\nn10495421\tpusher, shover\nn10495555\tpusher, drug peddler, peddler, drug dealer, drug trafficker\nn10495756\tpusher, thruster\nn10496393\tputz\nn10496489\tPygmy, Pigmy\nn10497135\tqadi\nn10497534\tquadriplegic\nn10497645\tquadruplet, quad\nn10498046\tquaker, trembler\nn10498699\tquarter\nn10498816\tquarterback, signal caller, field general\nn10498986\tquartermaster\nn10499110\tquartermaster general\nn10499232\tQuebecois\nn10499355\tqueen, queen regnant, female monarch\nn10499631\tQueen of England\nn10499857\tqueen\nn10500217\tqueen\nn10500419\tqueen consort\nn10500603\tqueen mother\nn10500824\tQueen's Counsel\nn10500942\tquestion master, quizmaster\nn10501453\tquick study, sponge\nn10501635\tquietist\nn10502046\tquitter\nn10502329\trabbi\nn10502950\tracist, racialist\nn10503818\tradiobiologist\nn10504090\tradiologic technologist\nn10504206\tradiologist, radiotherapist\nn10505347\trainmaker\nn10505613\traiser\nn10505732\traja, rajah\nn10505942\trake, rakehell, profligate, rip, blood, roue\nn10506336\tramrod\nn10506544\tranch hand\nn10506915\tranker\nn10507070\tranter, raver\nn10507380\trape suspect\nn10507482\trapper\nn10507565\trapporteur\nn10507692\trare bird, rara avis\nn10508141\tratepayer\nn10508379\traw recruit\nn10508710\treader\nn10509063\treading teacher\nn10509161\trealist\nn10509810\treal estate broker, real estate agent, estate agent, land agent, house agent\nn10510245\trear admiral\nn10510974\treceiver\nn10511771\treciter\nn10512201\trecruit, enlistee\nn10512372\trecruit, military recruit\nn10512708\trecruiter\nn10512859\trecruiting-sergeant\nn10513509\tredcap\nn10513823\tredhead, redheader, red-header, carrottop\nn10513938\tredneck, cracker\nn10514051\treeler\nn10514121\treenactor\nn10514255\treferral\nn10514429\treferee, ref\nn10514784\trefiner\nn10515863\tReform Jew\nn10516527\tregistered nurse, RN\nn10517137\tregistrar\nn10517283\tRegius professor\nn10518349\treliever, allayer, comforter\nn10519126\tanchorite, hermit\nn10519494\treligious leader\nn10519984\tremover\nn10520286\tRenaissance man, generalist\nn10520544\trenegade\nn10520964\trentier\nn10521100\trepairman, maintenance man, service man\nn10521662\treporter, newsman, newsperson\nn10521853\tnewswoman\nn10522035\trepresentative\nn10522324\treprobate, miscreant\nn10522759\trescuer, recoverer, saver\nn10523341\treservist\nn10524076\tresident commissioner\nn10524223\trespecter\nn10524869\trestaurateur, restauranter\nn10525134\trestrainer, controller\nn10525436\tretailer, retail merchant\nn10525617\tretiree, retired person\nn10525878\treturning officer\nn10526534\trevenant\nn10527147\trevisionist\nn10527334\trevolutionist, revolutionary, subversive, subverter\nn10528023\trheumatologist\nn10528148\tRhodesian man, Homo rhodesiensis\nn10528493\trhymer, rhymester, versifier, poetizer, poetiser\nn10529231\trich person, wealthy person, have\nn10530150\trider\nn10530383\triding master\nn10530571\trifleman\nn10530959\tright-hander, right hander, righthander\nn10531109\tright-hand man, chief assistant, man Friday\nn10531445\tringer\nn10531838\tringleader\nn10533874\troadman, road mender\nn10533983\troarer, bawler, bellower, screamer, screecher, shouter, yeller\nn10536134\trocket engineer, rocket scientist\nn10536274\trocket scientist\nn10536416\trock star\nn10537708\tRomanov, Romanoff\nn10537906\tromanticist, romantic\nn10538629\tropemaker, rope-maker, roper\nn10538733\troper\nn10538853\troper\nn10539015\tropewalker, ropedancer\nn10539160\trosebud\nn10539278\tRosicrucian\nn10540114\tMountie\nn10540252\tRough Rider\nn10540656\troundhead\nn10541833\tcivil authority, civil officer\nn10542608\trunner\nn10542761\trunner\nn10542888\trunner\nn10543161\trunning back\nn10543937\trusher\nn10544232\trustic\nn10544748\tsaboteur, wrecker, diversionist\nn10545792\tsadist\nn10546428\tsailing master, navigator\nn10546633\tsailor, crewman\nn10548419\tsalesgirl, saleswoman, saleslady\nn10548537\tsalesman\nn10548681\tsalesperson, sales representative, sales rep\nn10549510\tsalvager, salvor\nn10550252\tsandwichman\nn10550369\tsangoma\nn10550468\tsannup\nn10551576\tsapper\nn10552393\tSassenach\nn10553140\tsatrap\nn10553235\tsaunterer, stroller, ambler\nn10554024\tSavoyard\nn10554141\tsawyer\nn10554846\tscalper\nn10555059\tscandalmonger\nn10555430\tscapegrace, black sheep\nn10556033\tscene painter\nn10556518\tschemer, plotter\nn10556704\tschizophrenic\nn10556825\tschlemiel, shlemiel\nn10557246\tschlockmeister, shlockmeister\nn10557854\tscholar, scholarly person, bookman, student\nn10559009\tscholiast\nn10559288\tschoolchild, school-age child, pupil\nn10559508\tschoolfriend\nn10559683\tSchoolman, medieval Schoolman\nn10559996\tschoolmaster\nn10560106\tschoolmate, classmate, schoolfellow, class fellow\nn10560637\tscientist\nn10561222\tscion\nn10561320\tscoffer, flouter, mocker, jeerer\nn10561736\tscofflaw\nn10562135\tscorekeeper, scorer\nn10562283\tscorer\nn10562509\tscourer\nn10562968\tscout, talent scout\nn10563314\tscoutmaster\nn10563403\tscrambler\nn10563711\tscratcher\nn10564098\tscreen actor, movie actor\nn10565502\tscrutineer, canvasser\nn10565667\tscuba diver\nn10566072\tsculptor, sculpturer, carver, statue maker\nn10567613\tSea Scout\nn10567722\tseasonal worker, seasonal\nn10567848\tseasoner\nn10568200\tsecond baseman, second sacker\nn10568358\tsecond cousin\nn10568443\tseconder\nn10568608\tsecond fiddle, second banana\nn10568915\tsecond-in-command\nn10569011\tsecond lieutenant, 2nd lieutenant\nn10569179\tsecond-rater, mediocrity\nn10570019\tsecretary\nn10570704\tSecretary of Agriculture, Agriculture Secretary\nn10571907\tSecretary of Health and Human Services\nn10572706\tSecretary of State\nn10572889\tSecretary of the Interior, Interior Secretary\nn10573957\tsectarian, sectary, sectarist\nn10574311\tsection hand\nn10574538\tsecularist\nn10574840\tsecurity consultant\nn10575463\tseeded player, seed\nn10575594\tseeder, cloud seeder\nn10575787\tseeker, searcher, quester\nn10576223\tsegregate\nn10576316\tsegregator, segregationist\nn10576676\tselectman\nn10576818\tselectwoman\nn10576962\tselfish person\nn10577182\tself-starter\nn10577284\tseller, marketer, vender, vendor, trafficker\nn10577710\tselling agent\nn10577820\tsemanticist, semiotician\nn10578021\tsemifinalist\nn10578162\tseminarian, seminarist\nn10578471\tsenator\nn10578656\tsendee\nn10579062\tsenior\nn10579549\tsenior vice president\nn10580030\tseparatist, separationist\nn10580437\tseptuagenarian\nn10580535\tserf, helot, villein\nn10581648\tspree killer\nn10581890\tserjeant-at-law, serjeant, sergeant-at-law, sergeant\nn10582604\tserver\nn10582746\tserviceman, military man, man, military personnel\nn10583387\tsettler, colonist\nn10583790\tsettler\nn10585077\tsex symbol\nn10585217\tsexton, sacristan\nn10585628\tshaheed\nn10586166\tShakespearian, Shakespearean\nn10586265\tshanghaier, seizer\nn10586444\tsharecropper, cropper, sharecrop farmer\nn10586903\tshaver\nn10586998\tShavian\nn10588074\tsheep\nn10588357\tsheik, tribal sheik, sheikh, tribal sheikh, Arab chief\nn10588724\tshelver\nn10588965\tshepherd\nn10589666\tship-breaker\nn10590146\tshipmate\nn10590239\tshipowner\nn10590452\tshipping agent\nn10590903\tshirtmaker\nn10591072\tshogun\nn10591811\tshopaholic\nn10592049\tshop girl\nn10592811\tshop steward, steward\nn10593521\tshot putter\nn10594147\tshrew, termagant\nn10594523\tshuffler\nn10594857\tshyster, pettifogger\nn10595164\tsibling, sib\nn10595647\tsick person, diseased person, sufferer\nn10596517\tsightreader\nn10596899\tsignaler, signaller\nn10597505\tsigner\nn10597745\tsignor, signior\nn10597889\tsignora\nn10598013\tsignore\nn10598181\tsignorina\nn10598459\tsilent partner, sleeping partner\nn10598904\taddle-head, addlehead, loon, birdbrain\nn10599215\tsimperer\nn10599806\tsinger, vocalist, vocalizer, vocaliser\nn10601234\tSinologist\nn10601362\tsipper\nn10602119\tsirrah\nn10602470\tSister\nn10602985\tsister, sis\nn10603528\twaverer, vacillator, hesitator, hesitater\nn10603851\tsitar player\nn10604275\tsixth-former\nn10604380\tskateboarder\nn10604634\tskeptic, sceptic, doubter\nn10604880\tsketcher\nn10604979\tskidder\nn10605253\tskier\nn10605737\tskinny-dipper\nn10607291\tskin-diver, aquanaut\nn10607478\tskinhead\nn10609092\tslasher\nn10609198\tslattern, slut, slovenly woman, trollop\nn10610465\tsleeper, slumberer\nn10610850\tsleeper\nn10611267\tsleeping beauty\nn10611613\tsleuth, sleuthhound\nn10612210\tslob, sloven, pig, slovenly person\nn10612373\tsloganeer\nn10612518\tslopseller, slop-seller\nn10613996\tsmasher, stunner, knockout, beauty, ravisher, sweetheart, peach, lulu, looker, mantrap, dish\nn10614507\tsmirker\nn10614629\tsmith, metalworker\nn10615179\tsmoothie, smoothy, sweet talker, charmer\nn10615334\tsmuggler, runner, contrabandist, moon curser, moon-curser\nn10616578\tsneezer\nn10617024\tsnob, prig, snot, snoot\nn10617193\tsnoop, snooper\nn10617397\tsnorer\nn10618234\tsob sister\nn10618342\tsoccer player\nn10618465\tsocial anthropologist, cultural anthropologist\nn10618685\tsocial climber, climber\nn10618848\tsocialist\nn10619492\tsocializer, socialiser\nn10619642\tsocial scientist\nn10619888\tsocial secretary\nn10620212\tSocinian\nn10620586\tsociolinguist\nn10620758\tsociologist\nn10621294\tsoda jerk, soda jerker\nn10621400\tsodalist\nn10621514\tsodomite, sodomist, sod, bugger\nn10622053\tsoldier\nn10624074\tson, boy\nn10624310\tsongster\nn10624437\tsongstress\nn10624540\tsongwriter, songster, ballad maker\nn10625860\tsorcerer, magician, wizard, necromancer, thaumaturge, thaumaturgist\nn10626630\tsorehead\nn10627252\tsoul mate\nn10628097\tSouthern Baptist\nn10628644\tsovereign, crowned head, monarch\nn10629329\tspacewalker\nn10629647\tSpanish American, Hispanic American, Hispanic\nn10629939\tsparring partner, sparring mate\nn10630093\tspastic\nn10630188\tspeaker, talker, utterer, verbalizer, verbaliser\nn10631131\tnative speaker\nn10631309\tSpeaker\nn10631654\tspeechwriter\nn10632576\tspecialist, medical specialist\nn10633298\tspecifier\nn10633450\tspectator, witness, viewer, watcher, looker\nn10634464\tspeech therapist\nn10634849\tspeedskater, speed skater\nn10634990\tspellbinder\nn10635788\tsphinx\nn10636488\tspinster, old maid\nn10637483\tsplit end\nn10638922\tsport, sportsman, sportswoman\nn10639238\tsport, summercater\nn10639359\tsporting man, outdoor man\nn10639637\tsports announcer, sportscaster, sports commentator\nn10639817\tsports editor\nn10641223\tsprog\nn10642596\tsquare dancer\nn10642705\tsquare shooter, straight shooter, straight arrow\nn10643095\tsquatter\nn10643837\tsquire\nn10643937\tsquire\nn10644598\tstaff member, staffer\nn10645017\tstaff sergeant\nn10645223\tstage director\nn10646032\tstainer\nn10646140\tstakeholder\nn10646433\tstalker\nn10646641\tstalking-horse\nn10646780\tstammerer, stutterer\nn10646942\tstamper, stomper, tramper, trampler\nn10647745\tstandee\nn10648237\tstand-in, substitute, relief, reliever, backup, backup man, fill-in\nn10648696\tstar, principal, lead\nn10649197\tstarlet\nn10649308\tstarter, dispatcher\nn10650162\tstatesman, solon, national leader\nn10652605\tstate treasurer\nn10652703\tstationer, stationery seller\nn10654015\tstenographer, amanuensis, shorthand typist\nn10654211\tstentor\nn10654321\tstepbrother, half-brother, half brother\nn10654827\tstepmother\nn10654932\tstepparent\nn10655169\tstevedore, loader, longshoreman, docker, dockhand, dock worker, dockworker, dock-walloper, lumper\nn10655442\tsteward\nn10655594\tsteward, flight attendant\nn10655730\tsteward\nn10655986\tstickler\nn10656120\tstiff\nn10656223\tstifler, smotherer\nn10656969\tstipendiary, stipendiary magistrate\nn10657306\tstitcher\nn10657556\tstockjobber\nn10657835\tstock trader\nn10658304\tstockist\nn10659042\tstoker, fireman\nn10659762\tstooper\nn10660128\tstore detective\nn10660621\tstrafer\nn10660883\tstraight man, second banana\nn10661002\tstranger, alien, unknown\nn10661216\tstranger\nn10661563\tstrategist, strategian\nn10661732\tstraw boss, assistant foreman\nn10663315\tstreetwalker, street girl, hooker, hustler, floozy, floozie, slattern\nn10663549\tstretcher-bearer, litter-bearer\nn10665302\tstruggler\nn10665587\tstud, he-man, macho-man\nn10665698\tstudent, pupil, educatee\nn10666752\tstumblebum, palooka\nn10667477\tstylist\nn10667709\tsubaltern\nn10667863\tsubcontractor\nn10668450\tsubduer, surmounter, overcomer\nn10668666\tsubject, case, guinea pig\nn10669991\tsubordinate, subsidiary, underling, foot soldier\nn10671042\tsubstitute, reserve, second-stringer\nn10671613\tsuccessor, heir\nn10671736\tsuccessor, replacement\nn10671898\tsuccorer, succourer\nn10672371\tSufi\nn10672540\tsuffragan, suffragan bishop\nn10672662\tsuffragette\nn10673296\tsugar daddy\nn10673776\tsuicide bomber\nn10674130\tsuitor, suer, wooer\nn10674713\tsumo wrestler\nn10675010\tsunbather\nn10675142\tsundowner\nn10675609\tsuper heavyweight\nn10676018\tsuperior, higher-up, superordinate\nn10676434\tsupermom\nn10676569\tsupernumerary, spear carrier, extra\nn10678937\tsupremo\nn10679174\tsurgeon, operating surgeon, sawbones\nn10679503\tSurgeon General\nn10679610\tSurgeon General\nn10679723\tsurpriser\nn10680609\tsurveyor\nn10680796\tsurveyor\nn10681194\tsurvivor, subsister\nn10681557\tsutler, victualer, victualler, provisioner\nn10682713\tsweeper\nn10682953\tsweetheart, sweetie, steady, truelove\nn10683675\tswinger, tramp\nn10684146\tswitcher, whipper\nn10684630\tswot, grind, nerd, wonk, dweeb\nn10684827\tsycophant, toady, crawler, lackey, ass-kisser\nn10685398\tsylph\nn10686073\tsympathizer, sympathiser, well-wisher\nn10686517\tsymphonist\nn10686694\tsyncopator\nn10686885\tsyndic\nn10688356\ttactician\nn10688811\ttagger\nn10689306\ttailback\nn10690268\ttallyman, tally clerk\nn10690421\ttallyman\nn10690648\ttanker, tank driver\nn10691318\ttapper, wiretapper, phone tapper\nn10691937\tTartuffe, Tartufe\nn10692090\tTarzan\nn10692482\ttaster, taste tester, taste-tester, sampler\nn10692883\ttax assessor, assessor\nn10693235\ttaxer\nn10693334\ttaxi dancer\nn10693824\ttaxonomist, taxonomer, systematist\nn10694258\tteacher, instructor\nn10694939\tteaching fellow\nn10695450\ttearaway\nn10696101\ttechnical sergeant\nn10696508\ttechnician\nn10697135\tTed, Teddy boy\nn10697282\tteetotaler, teetotaller, teetotalist\nn10698368\ttelevision reporter, television newscaster, TV reporter, TV newsman\nn10699558\ttemporizer, temporiser\nn10699752\ttempter\nn10699981\tterm infant\nn10700105\ttoiler\nn10700201\ttenant, renter\nn10700640\ttenant\nn10700963\ttenderfoot\nn10701180\ttennis player\nn10701644\ttennis pro, professional tennis player\nn10701962\ttenor saxophonist, tenorist\nn10702167\ttermer\nn10702615\tterror, scourge, threat\nn10703221\ttertigravida, gravida III\nn10703336\ttestator, testate\nn10703480\ttestatrix\nn10703692\ttestee, examinee\nn10704238\ttest-tube baby\nn10704712\tTexas Ranger, Ranger\nn10704886\tthane\nn10705448\ttheatrical producer\nn10705615\ttheologian, theologist, theologizer, theologiser\nn10706812\ttheorist, theoretician, theorizer, theoriser, idealogue\nn10707134\ttheosophist\nn10707233\ttherapist, healer\nn10707707\tThessalonian\nn10708292\tthinker, creative thinker, mind\nn10708454\tthinker\nn10709529\tthrower\nn10710171\tthurifer\nn10710259\tticket collector, ticket taker\nn10710778\ttight end\nn10710913\ttiler\nn10711483\ttimekeeper, timer\nn10711766\tTimorese\nn10712229\ttinkerer, fiddler\nn10712374\ttinsmith, tinner\nn10712474\ttinter\nn10712690\ttippler, social drinker\nn10712835\ttipster, tout\nn10713254\tT-man\nn10713686\ttoastmaster, symposiarch\nn10713843\ttoast mistress\nn10714195\ttobogganist\nn10715030\ttomboy, romp, hoyden\nn10715347\ttoolmaker\nn10715789\ttorchbearer\nn10716576\tTory\nn10716864\tTory\nn10717055\ttosser\nn10717196\ttosser, jerk-off, wanker\nn10717337\ttotalitarian\nn10718131\ttourist, tourer, holidaymaker\nn10718349\ttout, touter\nn10718509\ttout, ticket tout\nn10718665\ttovarich, tovarisch\nn10718952\ttowhead\nn10719036\ttown clerk\nn10719132\ttown crier, crier\nn10719267\ttownsman, towner\nn10719807\ttoxicologist\nn10720197\ttrack star\nn10720453\ttrader, bargainer, dealer, monger\nn10720964\ttrade unionist, unionist, union member\nn10721124\ttraditionalist, diehard\nn10721321\ttraffic cop\nn10721612\ttragedian\nn10721708\ttragedian\nn10721819\ttragedienne\nn10722029\ttrail boss\nn10722575\ttrainer\nn10722965\ttraitor, treasonist\nn10723230\ttraitress\nn10723597\ttransactor\nn10724132\ttranscriber\nn10724372\ttransfer, transferee\nn10724570\ttransferee\nn10725280\ttranslator, transcriber\nn10726031\ttransvestite, cross-dresser\nn10726786\ttraveling salesman, travelling salesman, commercial traveler, commercial traveller, roadman, bagman\nn10727016\ttraverser\nn10727171\ttrawler\nn10727458\tTreasury, First Lord of the Treasury\nn10728117\ttrencher\nn10728233\ttrend-setter, taste-maker, fashion arbiter\nn10728624\ttribesman\nn10728998\ttrier, attempter, essayer\nn10729330\ttrifler\nn10730542\ttrooper\nn10730728\ttrooper, state trooper\nn10731013\tTrotskyite, Trotskyist, Trot\nn10731732\ttruant, hooky player\nn10732010\ttrumpeter, cornetist\nn10732521\ttrusty\nn10732854\tTudor\nn10732967\ttumbler\nn10733820\ttutee\nn10734394\ttwin\nn10734741\ttwo-timer\nn10734891\tTyke\nn10734963\ttympanist, timpanist\nn10735173\ttypist\nn10735298\ttyrant, autocrat, despot\nn10735984\tumpire, ump\nn10737103\tunderstudy, standby\nn10737264\tundesirable\nn10738111\tunicyclist\nn10738215\tunilateralist\nn10738670\tUnitarian\nn10738871\tArminian\nn10739135\tuniversal donor\nn10739297\tUNIX guru\nn10739391\tUnknown Soldier\nn10740594\tupsetter\nn10740732\tupstager\nn10740868\tupstart, parvenu, nouveau-riche, arriviste\nn10741152\tupstart\nn10741367\turchin\nn10741493\turologist\nn10742005\tusherette\nn10742111\tusher, doorkeeper\nn10742546\tusurper, supplanter\nn10742997\tutility man\nn10743124\tutilizer, utiliser\nn10743356\tUtopian\nn10744078\tuxoricide\nn10744164\tvacationer, vacationist\nn10745006\tvaledictorian, valedictory speaker\nn10745770\tvalley girl\nn10746931\tvaulter, pole vaulter, pole jumper\nn10747119\tvegetarian\nn10747424\tvegan\nn10747548\tvenerator\nn10747965\tventure capitalist\nn10748142\tventurer, merchant-venturer\nn10748506\tvermin, varmint\nn10748620\tvery important person, VIP, high-up, dignitary, panjandrum, high muckamuck\nn10749928\tvibist, vibraphonist\nn10750031\tvicar\nn10750188\tvicar\nn10750640\tvicar-general\nn10751026\tvice chancellor\nn10751152\tvicegerent\nn10751265\tvice president, V.P.\nn10751710\tvice-regent\nn10752480\tvictim, dupe\nn10753061\tVictorian\nn10753182\tvictualer, victualler\nn10753339\tvigilante, vigilance man\nn10753442\tvillager\nn10753989\tvintager\nn10754189\tvintner, wine merchant\nn10754281\tviolator, debaucher, ravisher\nn10754449\tviolator, lawbreaker, law offender\nn10755080\tviolist\nn10755164\tvirago\nn10755394\tvirologist\nn10755648\tVisayan, Bisayan\nn10756061\tviscountess\nn10756148\tviscount\nn10756261\tVisigoth\nn10756641\tvisionary\nn10756837\tvisiting fireman\nn10757050\tvisiting professor\nn10757492\tvisualizer, visualiser\nn10758337\tvixen, harpy, hellcat\nn10758445\tvizier\nn10758949\tvoicer\nn10759151\tvolunteer, unpaid worker\nn10759331\tvolunteer, military volunteer, voluntary\nn10759982\tvotary\nn10760199\tvotary\nn10760622\tvouchee\nn10760951\tvower\nn10761190\tvoyager\nn10761326\tvoyeur, Peeping Tom, peeper\nn10761519\tvulcanizer, vulcaniser\nn10762212\twaffler\nn10762480\tWagnerian\nn10763075\twaif, street child\nn10763245\twailer\nn10763383\twaiter, server\nn10763620\twaitress\nn10764465\twalking delegate\nn10764622\twalk-on\nn10764719\twallah\nn10765305\twally\nn10765587\twaltzer\nn10765679\twanderer, roamer, rover, bird of passage\nn10765885\tWandering Jew\nn10766260\twanton\nn10768148\twarrantee\nn10768272\twarrantee\nn10768903\twasher\nn10769084\twasherman, laundryman\nn10769188\twashwoman, washerwoman, laundrywoman, laundress\nn10769321\twassailer, carouser\nn10769459\twastrel, waster\nn10771066\tWave\nn10772092\tweatherman, weather forecaster\nn10772580\tweekend warrior\nn10772937\tweeder\nn10773665\twelder\nn10773800\twelfare case, charity case\nn10774329\twesterner\nn10774756\tWest-sider\nn10775003\twetter\nn10775128\twhaler\nn10776052\tWhig\nn10776339\twhiner, complainer, moaner, sniveller, crybaby, bellyacher, grumbler, squawker\nn10776887\twhipper-in\nn10777299\twhisperer\nn10778044\twhiteface\nn10778148\tCarmelite, White Friar\nn10778711\tAugustinian\nn10778999\twhite hope, great white hope\nn10779610\twhite supremacist\nn10779897\twhoremaster, whoremonger\nn10779995\twhoremaster, whoremonger, john, trick\nn10780284\twidow, widow woman\nn10780632\twife, married woman\nn10781236\twiggler, wriggler, squirmer\nn10781817\twimp, chicken, crybaby\nn10782362\twing commander\nn10782471\twinger\nn10782791\twinner\nn10782940\twinner, victor\nn10783240\twindow dresser, window trimmer\nn10783539\twinker\nn10783646\twiper\nn10783734\twireman, wirer\nn10784113\twise guy, smart aleck, wiseacre, wisenheimer, weisenheimer\nn10784544\twitch doctor\nn10784922\twithdrawer\nn10785480\twithdrawer\nn10787470\twoman, adult female\nn10788852\twoman\nn10789415\twonder boy, golden boy\nn10789709\twonderer\nn10791115\tworking girl\nn10791221\tworkman, workingman, working man, working person\nn10791820\tworkmate\nn10791890\tworldling\nn10792335\tworshiper, worshipper\nn10792506\tworthy\nn10792856\twrecker\nn10793570\twright\nn10793799\twrite-in candidate, write-in\nn10794014\twriter, author\nn10801561\tWykehamist\nn10801802\tyakuza\nn10802507\tyard bird, yardbird\nn10802621\tyardie\nn10802953\tyardman\nn10803031\tyardmaster, trainmaster, train dispatcher\nn10803282\tyenta\nn10803978\tyogi\nn10804287\tyoung buck, young man\nn10804636\tyoung Turk\nn10804732\tYoung Turk\nn10805501\tZionist\nn10806113\tzoo keeper\nn10994097\tGenet, Edmund Charles Edouard Genet, Citizen Genet\nn11100798\tKennan, George F. Kennan, George Frost Kennan\nn11196627\tMunro, H. H. Munro, Hector Hugh Munro, Saki\nn11242849\tPopper, Karl Popper, Sir Karl Raimund Popper\nn11318824\tStoker, Bram Stoker, Abraham Stoker\nn11346873\tTownes, Charles Townes, Charles Hard Townes\nn11448153\tdust storm, duster, sandstorm, sirocco\nn11487732\tparhelion, mock sun, sundog\nn11508382\tsnow, snowfall\nn11511327\tfacula\nn11524451\twave\nn11530008\tmicroflora\nn11531193\twilding\nn11531334\tsemi-climber\nn11532682\tvolva\nn11533212\tbasidiocarp\nn11533999\tdomatium\nn11536567\tapomict\nn11536673\taquatic\nn11537327\tbryophyte, nonvascular plant\nn11539289\tacrocarp, acrocarpous moss\nn11542137\tsphagnum, sphagnum moss, peat moss, bog moss\nn11542640\tliverwort, hepatic\nn11544015\thepatica, Marchantia polymorpha\nn11545350\tpecopteris\nn11545524\tpteridophyte, nonflowering plant\nn11545714\tfern\nn11547562\tfern ally\nn11547855\tspore\nn11548728\tcarpospore\nn11548870\tchlamydospore\nn11549009\tconidium, conidiospore\nn11549245\toospore\nn11549779\ttetraspore\nn11549895\tzoospore\nn11552133\tcryptogam\nn11552386\tspermatophyte, phanerogam, seed plant\nn11552594\tseedling\nn11552806\tannual\nn11552976\tbiennial\nn11553240\tperennial\nn11553522\thygrophyte\nn11596108\tgymnosperm\nn11597657\tgnetum, Gnetum gnemon\nn11598287\tCatha edulis\nn11598686\tephedra, joint fir\nn11598886\tmahuang, Ephedra sinica\nn11599324\twelwitschia, Welwitschia mirabilis\nn11600372\tcycad\nn11601177\tsago palm, Cycas revoluta\nn11601333\tfalse sago, fern palm, Cycas circinalis\nn11601918\tzamia\nn11602091\tcoontie, Florida arrowroot, Seminole bread, Zamia pumila\nn11602478\tceratozamia\nn11602873\tdioon\nn11603246\tencephalartos\nn11603462\tkaffir bread, Encephalartos caffer\nn11603835\tmacrozamia\nn11604046\tburrawong, Macrozamia communis, Macrozamia spiralis\nn11608250\tpine, pine tree, true pine\nn11609475\tpinon, pinyon\nn11609684\tnut pine\nn11609862\tpinon pine, Mexican nut pine, Pinus cembroides\nn11610047\tRocky mountain pinon, Pinus edulis\nn11610215\tsingle-leaf, single-leaf pine, single-leaf pinyon, Pinus monophylla\nn11610437\tbishop pine, bishop's pine, Pinus muricata\nn11610602\tCalifornia single-leaf pinyon, Pinus californiarum\nn11610823\tParry's pinyon, Pinus quadrifolia, Pinus parryana\nn11611087\tspruce pine, Pinus glabra\nn11611233\tblack pine, Pinus nigra\nn11611356\tpitch pine, northern pitch pine, Pinus rigida\nn11611561\tpond pine, Pinus serotina\nn11611758\tstone pine, umbrella pine, European nut pine, Pinus pinea\nn11612018\tSwiss pine, Swiss stone pine, arolla pine, cembra nut tree, Pinus cembra\nn11612235\tcembra nut, cedar nut\nn11612349\tSwiss mountain pine, mountain pine, dwarf mountain pine, mugho pine, mugo pine, Pinus mugo\nn11612575\tancient pine, Pinus longaeva\nn11612923\twhite pine\nn11613219\tAmerican white pine, eastern white pine, weymouth pine, Pinus strobus\nn11613459\twestern white pine, silver pine, mountain pine, Pinus monticola\nn11613692\tsouthwestern white pine, Pinus strobiformis\nn11613867\tlimber pine, Pinus flexilis\nn11614039\twhitebark pine, whitebarked pine, Pinus albicaulis\nn11614250\tyellow pine\nn11614420\tponderosa, ponderosa pine, western yellow pine, bull pine, Pinus ponderosa\nn11614713\tJeffrey pine, Jeffrey's pine, black pine, Pinus jeffreyi\nn11615026\tshore pine, lodgepole, lodgepole pine, spruce pine, Pinus contorta\nn11615259\tSierra lodgepole pine, Pinus contorta murrayana\nn11615387\tloblolly pine, frankincense pine, Pinus taeda\nn11615607\tjack pine, Pinus banksiana\nn11615812\tswamp pine\nn11615967\tlongleaf pine, pitch pine, southern yellow pine, Georgia pine, Pinus palustris\nn11616260\tshortleaf pine, short-leaf pine, shortleaf yellow pine, Pinus echinata\nn11616486\tred pine, Canadian red pine, Pinus resinosa\nn11616662\tScotch pine, Scots pine, Scotch fir, Pinus sylvestris\nn11616852\tscrub pine, Virginia pine, Jersey pine, Pinus virginiana\nn11617090\tMonterey pine, Pinus radiata\nn11617272\tbristlecone pine, Rocky Mountain bristlecone pine, Pinus aristata\nn11617631\ttable-mountain pine, prickly pine, hickory pine, Pinus pungens\nn11617878\tknobcone pine, Pinus attenuata\nn11618079\tJapanese red pine, Japanese table pine, Pinus densiflora\nn11618290\tJapanese black pine, black pine, Pinus thunbergii\nn11618525\tTorrey pine, Torrey's pine, soledad pine, grey-leaf pine, sabine pine, Pinus torreyana\nn11618861\tlarch, larch tree\nn11619227\tAmerican larch, tamarack, black larch, Larix laricina\nn11619455\twestern larch, western tamarack, Oregon larch, Larix occidentalis\nn11619687\tsubalpine larch, Larix lyallii\nn11619845\tEuropean larch, Larix decidua\nn11620016\tSiberian larch, Larix siberica, Larix russica\nn11620389\tgolden larch, Pseudolarix amabilis\nn11620673\tfir, fir tree, true fir\nn11621029\tsilver fir\nn11621281\tamabilis fir, white fir, Pacific silver fir, red silver fir, Christmas tree, Abies amabilis\nn11621547\tEuropean silver fir, Christmas tree, Abies alba\nn11621727\twhite fir, Colorado fir, California white fir, Abies concolor, Abies lowiana\nn11621950\tbalsam fir, balm of Gilead, Canada balsam, Abies balsamea\nn11622184\tFraser fir, Abies fraseri\nn11622368\tlowland fir, lowland white fir, giant fir, grand fir, Abies grandis\nn11622591\tAlpine fir, subalpine fir, Abies lasiocarpa\nn11622771\tSanta Lucia fir, bristlecone fir, Abies bracteata, Abies venusta\nn11623105\tcedar, cedar tree, true cedar\nn11623815\tcedar of Lebanon, Cedrus libani\nn11623967\tdeodar, deodar cedar, Himalayan cedar, Cedrus deodara\nn11624192\tAtlas cedar, Cedrus atlantica\nn11624531\tspruce\nn11625003\tNorway spruce, Picea abies\nn11625223\tweeping spruce, Brewer's spruce, Picea breweriana\nn11625391\tEngelmann spruce, Engelmann's spruce, Picea engelmannii\nn11625632\twhite spruce, Picea glauca\nn11625804\tblack spruce, Picea mariana, spruce pine\nn11626010\tSiberian spruce, Picea obovata\nn11626152\tSitka spruce, Picea sitchensis\nn11626409\toriental spruce, Picea orientalis\nn11626585\tColorado spruce, Colorado blue spruce, silver spruce, Picea pungens\nn11626826\tred spruce, eastern spruce, yellow spruce, Picea rubens\nn11627168\themlock, hemlock tree\nn11627512\teastern hemlock, Canadian hemlock, spruce pine, Tsuga canadensis\nn11627714\tCarolina hemlock, Tsuga caroliniana\nn11627908\tmountain hemlock, black hemlock, Tsuga mertensiana\nn11628087\twestern hemlock, Pacific hemlock, west coast hemlock, Tsuga heterophylla\nn11628456\tdouglas fir\nn11628793\tgreen douglas fir, douglas spruce, douglas pine, douglas hemlock, Oregon fir, Oregon pine, Pseudotsuga menziesii\nn11629047\tbig-cone spruce, big-cone douglas fir, Pseudotsuga macrocarpa\nn11629354\tCathaya\nn11630017\tcedar, cedar tree\nn11630489\tcypress, cypress tree\nn11631159\tgowen cypress, Cupressus goveniana\nn11631405\tpygmy cypress, Cupressus pigmaea, Cupressus goveniana pigmaea\nn11631619\tSanta Cruz cypress, Cupressus abramsiana, Cupressus goveniana abramsiana\nn11631854\tArizona cypress, Cupressus arizonica\nn11631985\tGuadalupe cypress, Cupressus guadalupensis\nn11632167\tMonterey cypress, Cupressus macrocarpa\nn11632376\tMexican cypress, cedar of Goa, Portuguese cypress, Cupressus lusitanica\nn11632619\tItalian cypress, Mediterranean cypress, Cupressus sempervirens\nn11632929\tKing William pine, Athrotaxis selaginoides\nn11633284\tChilean cedar, Austrocedrus chilensis\nn11634736\tincense cedar, red cedar, Calocedrus decurrens, Libocedrus decurrens\nn11635152\tsouthern white cedar, coast white cedar, Atlantic white cedar, white cypress, white cedar, Chamaecyparis thyoides\nn11635433\tOregon cedar, Port Orford cedar, Lawson's cypress, Lawson's cedar, Chamaecyparis lawsoniana\nn11635830\tyellow cypress, yellow cedar, Nootka cypress, Alaska cedar, Chamaecyparis nootkatensis\nn11636204\tJapanese cedar, Japan cedar, sugi, Cryptomeria japonica\nn11636835\tjuniper berry\nn11639084\tincense cedar\nn11639306\tkawaka, Libocedrus plumosa\nn11639445\tpahautea, Libocedrus bidwillii, mountain pine\nn11640132\tmetasequoia, dawn redwood, Metasequoia glyptostrodoides\nn11643835\tarborvitae\nn11644046\twestern red cedar, red cedar, canoe cedar, Thuja plicata\nn11644226\tAmerican arborvitae, northern white cedar, white cedar, Thuja occidentalis\nn11644462\tOriental arborvitae, Thuja orientalis, Platycladus orientalis\nn11644872\thiba arborvitae, Thujopsis dolobrata\nn11645163\tketeleeria\nn11645590\tWollemi pine\nn11645914\taraucaria\nn11646167\tmonkey puzzle, chile pine, Araucaria araucana\nn11646344\tnorfolk island pine, Araucaria heterophylla, Araucaria excelsa\nn11646517\tnew caledonian pine, Araucaria columnaris\nn11646694\tbunya bunya, bunya bunya tree, Araucaria bidwillii\nn11646955\thoop pine, Moreton Bay pine, Araucaria cunninghamii\nn11647306\tkauri pine, dammar pine\nn11647703\tkauri, kaury, Agathis australis\nn11647868\tamboina pine, amboyna pine, Agathis dammara, Agathis alba\nn11648039\tdundathu pine, queensland kauri, smooth bark kauri, Agathis robusta\nn11648268\tred kauri, Agathis lanceolata\nn11648776\tplum-yew\nn11649150\tCalifornia nutmeg, nutmeg-yew, Torreya californica\nn11649359\tstinking cedar, stinking yew, Torrey tree, Torreya taxifolia\nn11649878\tcelery pine\nn11650160\tcelery top pine, celery-topped pine, Phyllocladus asplenifolius\nn11650307\ttanekaha, Phyllocladus trichomanoides\nn11650430\tAlpine celery pine, Phyllocladus alpinus\nn11650558\tyellowwood, yellowwood tree\nn11650759\tgymnospermous yellowwood\nn11652039\tpodocarp\nn11652217\tyacca, yacca podocarp, Podocarpus coriaceus\nn11652376\tbrown pine, Rockingham podocarp, Podocarpus elatus\nn11652578\tcape yellowwood, African yellowwood, Podocarpus elongatus\nn11652753\tSouth-African yellowwood, Podocarpus latifolius\nn11652966\talpine totara, Podocarpus nivalis\nn11653126\ttotara, Podocarpus totara\nn11653570\tcommon yellowwood, bastard yellowwood, Afrocarpus falcata\nn11653904\tkahikatea, New Zealand Dacryberry, New Zealand white pine, Dacrycarpus dacrydioides, Podocarpus dacrydioides\nn11654293\trimu, imou pine, red pine, Dacrydium cupressinum\nn11654438\ttarwood, tar-wood, Dacrydium colensoi\nn11654984\tcommon sickle pine, Falcatifolium falciforme\nn11655152\tyellow-leaf sickle pine, Falcatifolium taxoides\nn11655592\ttarwood, tar-wood, New Zealand mountain pine, Halocarpus bidwilli, Dacrydium bidwilli\nn11655974\twestland pine, silver pine, Lagarostrobus colensoi\nn11656123\thuon pine, Lagarostrobus franklinii, Dacrydium franklinii\nn11656549\tChilean rimu, Lepidothamnus fonkii\nn11656771\tmountain rimu, Lepidothamnus laxifolius, Dacridium laxifolius\nn11657585\tnagi, Nageia nagi\nn11658331\tmiro, black pine, Prumnopitys ferruginea, Podocarpus ferruginea\nn11658544\tmatai, black pine, Prumnopitys taxifolia, Podocarpus spicata\nn11658709\tplum-fruited yew, Prumnopitys andina, Prumnopitys elegans\nn11659248\tPrince Albert yew, Prince Albert's yew, Saxe-gothea conspicua\nn11659627\tSundacarpus amara, Prumnopitys amara, Podocarpus amara\nn11660300\tJapanese umbrella pine, Sciadopitys verticillata\nn11661372\tyew\nn11661909\tOld World yew, English yew, Taxus baccata\nn11662128\tPacific yew, California yew, western yew, Taxus brevifolia\nn11662371\tJapanese yew, Taxus cuspidata\nn11662585\tFlorida yew, Taxus floridana\nn11662937\tNew Caledonian yew, Austrotaxus spicata\nn11663263\twhite-berry yew, Pseudotaxus chienii\nn11664418\tginkgo, gingko, maidenhair tree, Ginkgo biloba\nn11665372\tangiosperm, flowering plant\nn11666854\tdicot, dicotyledon, magnoliopsid, exogen\nn11668117\tmonocot, monocotyledon, liliopsid, endogen\nn11669786\tfloret, floweret\nn11669921\tflower\nn11672269\tbloomer\nn11672400\twildflower, wild flower\nn11674019\tapetalous flower\nn11674332\tinflorescence\nn11675025\trosebud\nn11675404\tgynostegium\nn11675738\tpollinium\nn11676500\tpistil\nn11676743\tgynobase\nn11676850\tgynophore\nn11677485\tstylopodium\nn11677902\tcarpophore\nn11678010\tcornstalk, corn stalk\nn11678299\tpetiolule\nn11678377\tmericarp\nn11679378\tmicropyle\nn11680457\tgerm tube\nn11680596\tpollen tube\nn11682659\tgemma\nn11683216\tgalbulus\nn11683838\tnectary, honey gland\nn11684264\tpericarp, seed vessel\nn11684499\tepicarp, exocarp\nn11684654\tmesocarp\nn11685091\tpip\nn11685621\tsilique, siliqua\nn11686195\tcataphyll\nn11686652\tperisperm\nn11686780\tmonocarp, monocarpic plant, monocarpous plant\nn11686912\tsporophyte\nn11687071\tgametophyte\nn11687432\tmegasporangium, macrosporangium\nn11687789\tmicrospore\nn11687964\tmicrosporangium\nn11688069\tmicrosporophyll\nn11688378\tarchespore, archesporium\nn11689197\tbonduc nut, nicker nut, nicker seed\nn11689367\tJob's tears\nn11689483\toilseed, oil-rich seed\nn11689678\tcastor bean\nn11689815\tcottonseed\nn11689957\tcandlenut\nn11690088\tpeach pit\nn11690254\thypanthium, floral cup, calyx tube\nn11690455\tpetal, flower petal\nn11691046\tcorolla\nn11691857\tlip\nn11692265\tperianth, chlamys, floral envelope, perigone, perigonium\nn11692792\tthistledown\nn11693981\tcustard apple, custard apple tree\nn11694300\tcherimoya, cherimoya tree, Annona cherimola\nn11694469\tilama, ilama tree, Annona diversifolia\nn11694664\tsoursop, prickly custard apple, soursop tree, Annona muricata\nn11694866\tbullock's heart, bullock's heart tree, bullock heart, Annona reticulata\nn11695085\tsweetsop, sweetsop tree, Annona squamosa\nn11695285\tpond apple, pond-apple tree, Annona glabra\nn11695599\tpawpaw, papaw, papaw tree, Asimina triloba\nn11695974\tilang-ilang, ylang-ylang, Cananga odorata\nn11696450\tlancewood, lancewood tree, Oxandra lanceolata\nn11696935\tGuinea pepper, negro pepper, Xylopia aethiopica\nn11697560\tbarberry\nn11697802\tAmerican barberry, Berberis canadensis\nn11698042\tcommon barberry, European barberry, Berberis vulgaris\nn11698245\tJapanese barberry, Berberis thunbergii\nn11699442\tOregon grape, Oregon holly grape, hollygrape, mountain grape, holly-leaves barberry, Mahonia aquifolium\nn11699751\tOregon grape, Mahonia nervosa\nn11700058\tmayapple, May apple, wild mandrake, Podophyllum peltatum\nn11700279\tMay apple\nn11700864\tallspice\nn11701066\tCarolina allspice, strawberry shrub, strawberry bush, sweet shrub, Calycanthus floridus\nn11701302\tspicebush, California allspice, Calycanthus occidentalis\nn11702713\tkatsura tree, Cercidiphyllum japonicum\nn11703669\tlaurel\nn11704093\ttrue laurel, bay, bay laurel, bay tree, Laurus nobilis\nn11704620\tcamphor tree, Cinnamomum camphora\nn11704791\tcinnamon, Ceylon cinnamon, Ceylon cinnamon tree, Cinnamomum zeylanicum\nn11705171\tcassia, cassia-bark tree, Cinnamomum cassia\nn11705387\tcassia bark, Chinese cinnamon\nn11705573\tSaigon cinnamon, Cinnamomum loureirii\nn11705776\tcinnamon bark\nn11706325\tspicebush, spice bush, American spicebush, Benjamin bush, Lindera benzoin, Benzoin odoriferum\nn11706761\tavocado, avocado tree, Persea Americana\nn11706942\tlaurel-tree, red bay, Persea borbonia\nn11707229\tsassafras, sassafras tree, Sassafras albidum\nn11707827\tCalifornia laurel, California bay tree, Oregon myrtle, pepperwood, spice tree, sassafras laurel, California olive, mountain laurel, Umbellularia californica\nn11708658\tanise tree\nn11708857\tpurple anise, Illicium floridanum\nn11709045\tstar anise, Illicium anisatum\nn11709205\tstar anise, Chinese anise, Illicium verum\nn11709674\tmagnolia\nn11710136\tsouthern magnolia, evergreen magnolia, large-flowering magnolia, bull bay, Magnolia grandiflora\nn11710393\tumbrella tree, umbrella magnolia, elkwood, elk-wood, Magnolia tripetala\nn11710658\tearleaved umbrella tree, Magnolia fraseri\nn11710827\tcucumber tree, Magnolia acuminata\nn11710987\tlarge-leaved magnolia, large-leaved cucumber tree, great-leaved macrophylla, Magnolia macrophylla\nn11711289\tsaucer magnolia, Chinese magnolia, Magnolia soulangiana\nn11711537\tstar magnolia, Magnolia stellata\nn11711764\tsweet bay, swamp bay, swamp laurel, Magnolia virginiana\nn11711971\tmanglietia, genus Manglietia\nn11712282\ttulip tree, tulip poplar, yellow poplar, canary whitewood, Liriodendron tulipifera\nn11713164\tmoonseed\nn11713370\tcommon moonseed, Canada moonseed, yellow parilla, Menispermum canadense\nn11713763\tCarolina moonseed, Cocculus carolinus\nn11714382\tnutmeg, nutmeg tree, Myristica fragrans\nn11715430\twater nymph, fragrant water lily, pond lily, Nymphaea odorata\nn11715678\tEuropean white lily, Nymphaea alba\nn11716698\tsouthern spatterdock, Nuphar sagittifolium\nn11717399\tlotus, Indian lotus, sacred lotus, Nelumbo nucifera\nn11717577\twater chinquapin, American lotus, yanquapin, Nelumbo lutea\nn11718296\twater-shield, fanwort, Cabomba caroliniana\nn11718681\twater-shield, Brasenia schreberi, water-target\nn11719286\tpeony, paeony\nn11720353\tbuttercup, butterflower, butter-flower, crowfoot, goldcup, kingcup\nn11720643\tmeadow buttercup, tall buttercup, tall crowfoot, tall field buttercup, Ranunculus acris\nn11720891\twater crowfoot, water buttercup, Ranunculus aquatilis\nn11721337\tlesser celandine, pilewort, Ranunculus ficaria\nn11721642\tlesser spearwort, Ranunculus flammula\nn11722036\tgreater spearwort, Ranunculus lingua\nn11722342\twestern buttercup, Ranunculus occidentalis\nn11722466\tcreeping buttercup, creeping crowfoot, Ranunculus repens\nn11722621\tcursed crowfoot, celery-leaved buttercup, Ranunculus sceleratus\nn11722982\taconite\nn11723227\tmonkshood, helmetflower, helmet flower, Aconitum napellus\nn11723452\twolfsbane, wolfbane, wolf's bane, Aconitum lycoctonum\nn11723770\tbaneberry, cohosh, herb Christopher\nn11723986\tbaneberry\nn11724109\tred baneberry, redberry, red-berry, snakeberry, Actaea rubra\nn11724660\tpheasant's-eye, Adonis annua\nn11725015\tanemone, windflower\nn11725311\tAlpine anemone, mountain anemone, Anemone tetonensis\nn11725480\tCanada anemone, Anemone Canadensis\nn11725623\tthimbleweed, Anemone cylindrica\nn11725821\twood anemone, Anemone nemorosa\nn11725973\twood anemone, snowdrop, Anemone quinquefolia\nn11726145\tlongheaded thimbleweed, Anemone riparia\nn11726269\tsnowdrop anemone, snowdrop windflower, Anemone sylvestris\nn11726433\tVirginia thimbleweed, Anemone virginiana\nn11726707\true anemone, Anemonella thalictroides\nn11727091\tcolumbine, aquilegia, aquilege\nn11727358\tmeeting house, honeysuckle, Aquilegia canadensis\nn11727540\tblue columbine, Aquilegia caerulea, Aquilegia scopulorum calcarea\nn11727738\tgranny's bonnets, Aquilegia vulgaris\nn11728099\tmarsh marigold, kingcup, meadow bright, May blob, cowslip, water dragon, Caltha palustris\nn11728769\tAmerican bugbane, summer cohosh, Cimicifuga americana\nn11728945\tblack cohosh, black snakeroot, rattle-top, Cimicifuga racemosa\nn11729142\tfetid bugbane, foetid bugbane, Cimicifuga foetida\nn11729478\tclematis\nn11729860\tpine hyacinth, Clematis baldwinii, Viorna baldwinii\nn11730015\tblue jasmine, blue jessamine, curly clematis, marsh clematis, Clematis crispa\nn11730458\tgolden clematis, Clematis tangutica\nn11730602\tscarlet clematis, Clematis texensis\nn11730750\tleather flower, Clematis versicolor\nn11730933\tleather flower, vase-fine, vase vine, Clematis viorna\nn11731157\tvirgin's bower, old man's beard, devil's darning needle, Clematis virginiana\nn11731659\tpurple clematis, purple virgin's bower, mountain clematis, Clematis verticillaris\nn11732052\tgoldthread, golden thread, Coptis groenlandica, Coptis trifolia groenlandica\nn11732567\trocket larkspur, Consolida ambigua, Delphinium ajacis\nn11733054\tdelphinium\nn11733312\tlarkspur\nn11733548\twinter aconite, Eranthis hyemalis\nn11734493\tlenten rose, black hellebore, Helleborus orientalis\nn11734698\tgreen hellebore, Helleborus viridis\nn11735053\thepatica, liverleaf\nn11735570\tgoldenseal, golden seal, yellow root, turmeric root, Hydrastis Canadensis\nn11735977\tfalse rue anemone, false rue, Isopyrum biternatum\nn11736362\tgiant buttercup, Laccopetalum giganteum\nn11736694\tnigella\nn11736851\tlove-in-a-mist, Nigella damascena\nn11737009\tfennel flower, Nigella hispanica\nn11737125\tblack caraway, nutmeg flower, Roman coriander, Nigella sativa\nn11737534\tpasqueflower, pasque flower\nn11738547\tmeadow rue\nn11738997\tfalse bugbane, Trautvetteria carolinensis\nn11739365\tglobeflower, globe flower\nn11739978\twinter's bark, winter's bark tree, Drimys winteri\nn11740414\tpepper shrub, Pseudowintera colorata, Wintera colorata\nn11741175\tsweet gale, Scotch gale, Myrica gale\nn11741350\twax myrtle\nn11741575\tbay myrtle, puckerbush, Myrica cerifera\nn11741797\tbayberry, candleberry, swamp candleberry, waxberry, Myrica pensylvanica\nn11742310\tsweet fern, Comptonia peregrina, Comptonia asplenifolia\nn11742878\tcorkwood, corkwood tree, Leitneria floridana\nn11744011\tjointed rush, Juncus articulatus\nn11744108\ttoad rush, Juncus bufonius\nn11744471\tslender rush, Juncus tenuis\nn11745817\tzebrawood, zebrawood tree\nn11746600\tConnarus guianensis\nn11747468\tlegume, leguminous plant\nn11748002\tlegume\nn11748811\tpeanut\nn11749112\tgranadilla tree, granadillo, Brya ebenus\nn11749603\tarariba, Centrolobium robustum\nn11750173\ttonka bean, coumara nut\nn11750508\tcourbaril, Hymenaea courbaril\nn11750989\tmelilotus, melilot, sweet clover\nn11751765\tdarling pea, poison bush\nn11751974\tsmooth darling pea, Swainsona galegifolia\nn11752578\tclover, trefoil\nn11752798\talpine clover, Trifolium alpinum\nn11752937\thop clover, shamrock, lesser yellow trefoil, Trifolium dubium\nn11753143\tcrimson clover, Italian clover, Trifolium incarnatum\nn11753355\tred clover, purple clover, Trifolium pratense\nn11753562\tbuffalo clover, Trifolium reflexum, Trifolium stoloniferum\nn11753700\twhite clover, dutch clover, shamrock, Trifolium repens\nn11754893\tmimosa\nn11756092\tacacia\nn11756329\tshittah, shittah tree\nn11756669\twattle\nn11756870\tblack wattle, Acacia auriculiformis\nn11757017\tgidgee, stinking wattle, Acacia cambegei\nn11757190\tcatechu, Jerusalem thorn, Acacia catechu\nn11757653\tsilver wattle, mimosa, Acacia dealbata\nn11757851\thuisache, cassie, mimosa bush, sweet wattle, sweet acacia, scented wattle, flame tree, Acacia farnesiana\nn11758122\tlightwood, Acacia melanoxylon\nn11758276\tgolden wattle, Acacia pycnantha\nn11758483\tfever tree, Acacia xanthophloea\nn11758799\tcoralwood, coral-wood, red sandalwood, Barbados pride, peacock flower fence, Adenanthera pavonina\nn11759224\talbizzia, albizia\nn11759404\tsilk tree, Albizia julibrissin, Albizzia julibrissin\nn11759609\tsiris, siris tree, Albizia lebbeck, Albizzia lebbeck\nn11759853\train tree, saman, monkeypod, monkey pod, zaman, zamang, Albizia saman\nn11760785\tcalliandra\nn11761202\tconacaste, elephant's ear, Enterolobium cyclocarpa\nn11761650\tinga\nn11761836\tice-cream bean, Inga edulis\nn11762018\tguama, Inga laurina\nn11762433\tlead tree, white popinac, Leucaena glauca, Leucaena leucocephala\nn11762927\twild tamarind, Lysiloma latisiliqua, Lysiloma bahamensis\nn11763142\tsabicu, Lysiloma sabicu\nn11763625\tnitta tree\nn11763874\tParkia javanica\nn11764478\tmanila tamarind, camachile, huamachil, wild tamarind, Pithecellobium dulce\nn11764814\tcat's-claw, catclaw, black bead, Pithecellodium unguis-cati\nn11765568\thoney mesquite, Western honey mesquite, Prosopis glandulosa\nn11766046\talgarroba, algarrobilla, algarobilla\nn11766189\tscrew bean, screwbean, tornillo, screwbean mesquite, Prosopis pubescens\nn11766432\tscrew bean\nn11767354\tdogbane\nn11767877\tIndian hemp, rheumatism weed, Apocynum cannabinum\nn11768816\tbushman's poison, ordeal tree, Acocanthera oppositifolia, Acocanthera venenata\nn11769176\timpala lily, mock azalia, desert rose, kudu lily, Adenium obesum, Adenium multiflorum\nn11769621\tallamanda\nn11769803\tcommon allamanda, golden trumpet, Allamanda cathartica\nn11770256\tdita, dita bark, devil tree, Alstonia scholaris\nn11771147\tNepal trumpet flower, Easter lily vine, Beaumontia grandiflora\nn11771539\tcarissa\nn11771746\thedge thorn, natal plum, Carissa bispinosa\nn11771924\tnatal plum, amatungulu, Carissa macrocarpa, Carissa grandiflora\nn11772408\tperiwinkle, rose periwinkle, Madagascar periwinkle, old maid, Cape periwinkle, red periwinkle, cayenne jasmine, Catharanthus roseus, Vinca rosea\nn11772879\tivory tree, conessi, kurchi, kurchee, Holarrhena pubescens, Holarrhena antidysenterica\nn11773408\twhite dipladenia, Mandevilla boliviensis, Dipladenia boliviensis\nn11773628\tChilean jasmine, Mandevilla laxa\nn11773987\toleander, rose bay, Nerium oleander\nn11774513\tfrangipani, frangipanni\nn11774972\tWest Indian jasmine, pagoda tree, Plumeria alba\nn11775340\trauwolfia, rauvolfia\nn11775626\tsnakewood, Rauwolfia serpentina\nn11776234\tStrophanthus kombe\nn11777080\tyellow oleander, Thevetia peruviana, Thevetia neriifolia\nn11778092\tmyrtle, Vinca minor\nn11778257\tlarge periwinkle, Vinca major\nn11779300\tarum, aroid\nn11780148\tcuckoopint, lords-and-ladies, jack-in-the-pulpit, Arum maculatum\nn11780424\tblack calla, Arum palaestinum\nn11781176\tcalamus\nn11782036\talocasia, elephant's ear, elephant ear\nn11782266\tgiant taro, Alocasia macrorrhiza\nn11782761\tamorphophallus\nn11782878\tpungapung, telingo potato, elephant yam, Amorphophallus paeonifolius, Amorphophallus campanulatus\nn11783162\tdevil's tongue, snake palm, umbrella arum, Amorphophallus rivieri\nn11783920\tanthurium, tailflower, tail-flower\nn11784126\tflamingo flower, flamingo plant, Anthurium andraeanum, Anthurium scherzerianum\nn11784497\tjack-in-the-pulpit, Indian turnip, wake-robin, Arisaema triphyllum, Arisaema atrorubens\nn11785276\tfriar's-cowl, Arisarum vulgare\nn11785668\tcaladium\nn11785875\tCaladium bicolor\nn11786131\twild calla, water arum, Calla palustris\nn11786539\ttaro, taro plant, dalo, dasheen, Colocasia esculenta\nn11786843\ttaro, cocoyam, dasheen, eddo\nn11787190\tcryptocoryne, water trumpet\nn11788039\tdracontium\nn11788727\tgolden pothos, pothos, ivy arum, Epipremnum aureum, Scindapsus aureus\nn11789066\tskunk cabbage, Lysichiton americanum\nn11789438\tmonstera\nn11789589\tceriman, Monstera deliciosa\nn11789962\tnephthytis\nn11790089\tNephthytis afzelii\nn11790788\tarrow arum\nn11790936\tgreen arrow arum, tuckahoe, Peltandra virginica\nn11791341\tphilodendron\nn11791569\tpistia, water lettuce, water cabbage, Pistia stratiotes, Pistia stratoites\nn11792029\tpothos\nn11792341\tspathiphyllum, peace lily, spathe flower\nn11792742\tskunk cabbage, polecat weed, foetid pothos, Symplocarpus foetidus\nn11793403\tyautia, tannia, spoonflower, malanga, Xanthosoma sagittifolium, Xanthosoma atrovirens\nn11793779\tcalla lily, calla, arum lily, Zantedeschia aethiopica\nn11794024\tpink calla, Zantedeschia rehmanii\nn11794139\tgolden calla\nn11794519\tduckweed\nn11795049\tcommon duckweed, lesser duckweed, Lemna minor\nn11795216\tstar-duckweed, Lemna trisulca\nn11795580\tgreat duckweed, water flaxseed, Spirodela polyrrhiza\nn11796005\twatermeal\nn11796188\tcommon wolffia, Wolffia columbiana\nn11797321\taralia\nn11797508\tAmerican angelica tree, devil's walking stick, Hercules'-club, Aralia spinosa\nn11797981\tAmerican spikenard, petty morel, life-of-man, Aralia racemosa\nn11798270\tbristly sarsaparilla, bristly sarsparilla, dwarf elder, Aralia hispida\nn11798496\tJapanese angelica tree, Aralia elata\nn11798688\tChinese angelica, Chinese angelica tree, Aralia stipulata\nn11798978\tivy, common ivy, English ivy, Hedera helix\nn11799331\tpuka, Meryta sinclairii\nn11799732\tginseng, nin-sin, Panax ginseng, Panax schinseng, Panax pseudoginseng\nn11800236\tginseng\nn11800565\tumbrella tree, Schefflera actinophylla, Brassaia actinophylla\nn11801392\tbirthwort, Aristolochia clematitis\nn11801665\tDutchman's-pipe, pipe vine, Aristolochia macrophylla, Aristolochia durior\nn11801891\tVirginia snakeroot, Virginia serpentaria, Virginia serpentary, Aristolochia serpentaria\nn11802410\tCanada ginger, black snakeroot, Asarum canadense\nn11802586\theartleaf, heart-leaf, Asarum virginicum\nn11802800\theartleaf, heart-leaf, Asarum shuttleworthii\nn11802995\tasarabacca, Asarum europaeum\nn11805255\tcaryophyllaceous plant\nn11805544\tcorn cockle, corn campion, crown-of-the-field, Agrostemma githago\nn11805956\tsandwort\nn11806219\tmountain sandwort, mountain starwort, mountain daisy, Arenaria groenlandica\nn11806369\tpine-barren sandwort, longroot, Arenaria caroliniana\nn11806521\tseabeach sandwort, Arenaria peploides\nn11806679\trock sandwort, Arenaria stricta\nn11806814\tthyme-leaved sandwort, Arenaria serpyllifolia\nn11807108\tmouse-ear chickweed, mouse eared chickweed, mouse ear, clammy chickweed, chickweed\nn11807525\tsnow-in-summer, love-in-a-mist, Cerastium tomentosum\nn11807696\tAlpine mouse-ear, Arctic mouse-ear, Cerastium alpinum\nn11807979\tpink, garden pink\nn11808299\tsweet William, Dianthus barbatus\nn11808468\tcarnation, clove pink, gillyflower, Dianthus caryophyllus\nn11808721\tchina pink, rainbow pink, Dianthus chinensis\nn11808932\tJapanese pink, Dianthus chinensis heddewigii\nn11809094\tmaiden pink, Dianthus deltoides\nn11809271\tcheddar pink, Diangus gratianopolitanus\nn11809437\tbutton pink, Dianthus latifolius\nn11809594\tcottage pink, grass pink, Dianthus plumarius\nn11809754\tfringed pink, Dianthus supurbus\nn11810030\tdrypis\nn11810358\tbaby's breath, babies'-breath, Gypsophila paniculata\nn11811059\tcoral necklace, Illecebrum verticullatum\nn11811473\tlychnis, catchfly\nn11811706\tragged robin, cuckoo flower, Lychnis flos-cuculi, Lychins floscuculi\nn11811921\tscarlet lychnis, maltese cross, Lychins chalcedonica\nn11812094\tmullein pink, rose campion, gardener's delight, dusty miller, Lychnis coronaria\nn11812910\tsandwort, Moehringia lateriflora\nn11813077\tsandwort, Moehringia mucosa\nn11814584\tsoapwort, hedge pink, bouncing Bet, bouncing Bess, Saponaria officinalis\nn11814996\tknawel, knawe, Scleranthus annuus\nn11815491\tsilene, campion, catchfly\nn11815721\tmoss campion, Silene acaulis\nn11815918\twild pink, Silene caroliniana\nn11816121\tred campion, red bird's eye, Silene dioica, Lychnis dioica\nn11816336\twhite campion, evening lychnis, white cockle, bladder campion, Silene latifolia, Lychnis alba\nn11816649\tfire pink, Silene virginica\nn11816829\tbladder campion, Silene uniflora, Silene vulgaris\nn11817160\tcorn spurry, corn spurrey, Spergula arvensis\nn11817501\tsand spurry, sea spurry, Spergularia rubra\nn11817914\tchickweed\nn11818069\tcommon chickweed, Stellaria media\nn11818636\tcowherb, cow cockle, Vaccaria hispanica, Vaccaria pyramidata, Saponaria vaccaria\nn11819509\tHottentot fig, Hottentot's fig, sour fig, Carpobrotus edulis, Mesembryanthemum edule\nn11819912\tlivingstone daisy, Dorotheanthus bellidiformis\nn11820965\tfig marigold, pebble plant\nn11821184\tice plant, icicle plant, Mesembryanthemum crystallinum\nn11822300\tNew Zealand spinach, Tetragonia tetragonioides, Tetragonia expansa\nn11823043\tamaranth\nn11823305\tamaranth\nn11823436\ttumbleweed, Amaranthus albus, Amaranthus graecizans\nn11823756\tprince's-feather, gentleman's-cane, prince's-plume, red amaranth, purple amaranth, Amaranthus cruentus, Amaranthus hybridus hypochondriacus, Amaranthus hybridus erythrostachys\nn11824146\tpigweed, Amaranthus hypochondriacus\nn11824344\tthorny amaranth, Amaranthus spinosus\nn11824747\talligator weed, alligator grass, Alternanthera philoxeroides\nn11825351\tcockscomb, common cockscomb, Celosia cristata, Celosia argentea cristata\nn11825749\tcottonweed\nn11826198\tglobe amaranth, bachelor's button, Gomphrena globosa\nn11826569\tbloodleaf\nn11827541\tsaltwort, Batis maritima\nn11828577\tlamb's-quarters, pigweed, wild spinach, Chenopodium album\nn11828973\tgood-king-henry, allgood, fat hen, wild spinach, Chenopodium bonus-henricus\nn11829205\tJerusalem oak, feather geranium, Mexican tea, Chenopodium botrys, Atriplex mexicana\nn11829672\toak-leaved goosefoot, oakleaf goosefoot, Chenopodium glaucum\nn11829922\tsowbane, red goosefoot, Chenopodium hybridum\nn11830045\tnettle-leaved goosefoot, nettleleaf goosefoot, Chenopodium murale\nn11830252\tred goosefoot, French spinach, Chenopodium rubrum\nn11830400\tstinking goosefoot, Chenopodium vulvaria\nn11830714\torach, orache\nn11830906\tsaltbush\nn11831100\tgarden orache, mountain spinach, Atriplex hortensis\nn11831297\tdesert holly, Atriplex hymenelytra\nn11831521\tquail bush, quail brush, white thistle, Atriplex lentiformis\nn11832214\tbeet, common beet, Beta vulgaris\nn11832480\tbeetroot, Beta vulgaris rubra\nn11832671\tchard, Swiss chard, spinach beet, leaf beet, chard plant, Beta vulgaris cicla\nn11832899\tmangel-wurzel, mangold-wurzel, mangold, Beta vulgaris vulgaris\nn11833373\twinged pigweed, tumbleweed, Cycloloma atriplicifolium\nn11833749\thalogeton, Halogeton glomeratus\nn11834272\tglasswort, samphire, Salicornia europaea\nn11834654\tsaltwort, barilla, glasswort, kali, kelpwort, Salsola kali, Salsola soda\nn11834890\tRussian thistle, Russian tumbleweed, Russian cactus, tumbleweed, Salsola kali tenuifolia\nn11835251\tgreasewood, black greasewood, Sarcobatus vermiculatus\nn11836327\tscarlet musk flower, Nyctaginia capitata\nn11836722\tsand verbena\nn11837204\tsweet sand verbena, Abronia fragrans\nn11837351\tyellow sand verbena, Abronia latifolia\nn11837562\tbeach pancake, Abronia maritima\nn11837743\tbeach sand verbena, pink sand verbena, Abronia umbellata\nn11837970\tdesert sand verbena, Abronia villosa\nn11838413\ttrailing four o'clock, trailing windmills, Allionia incarnata\nn11838916\tbougainvillea\nn11839460\tumbrellawort\nn11839568\tfour o'clock\nn11839823\tcommon four-o'clock, marvel-of-Peru, Mirabilis jalapa, Mirabilis uniflora\nn11840067\tCalifornia four o'clock, Mirabilis laevis, Mirabilis californica\nn11840246\tsweet four o'clock, maravilla, Mirabilis longiflora\nn11840476\tdesert four o'clock, Colorado four o'clock, maravilla, Mirabilis multiflora\nn11840764\tmountain four o'clock, Mirabilis oblongifolia\nn11841247\tcockspur, Pisonia aculeata\nn11843441\trattail cactus, rat's-tail cactus, Aporocactus flagelliformis\nn11844371\tsaguaro, sahuaro, Carnegiea gigantea\nn11844892\tnight-blooming cereus\nn11845557\techinocactus, barrel cactus\nn11845793\thedgehog cactus\nn11845913\tgolden barrel cactus, Echinocactus grusonii\nn11846312\thedgehog cereus\nn11846425\trainbow cactus\nn11846765\tepiphyllum, orchid cactus\nn11847169\tbarrel cactus\nn11848479\tnight-blooming cereus\nn11848867\tchichipe, Lemaireocereus chichipe\nn11849271\tmescal, mezcal, peyote, Lophophora williamsii\nn11849467\tmescal button, sacred mushroom, magic mushroom\nn11849871\tmammillaria\nn11849983\tfeather ball, Mammillaria plumosa\nn11850521\tgarambulla, garambulla cactus, Myrtillocactus geometrizans\nn11850918\tKnowlton's cactus, Pediocactus knowltonii\nn11851258\tnopal\nn11851578\tprickly pear, prickly pear cactus\nn11851839\tcholla, Opuntia cholla\nn11852028\tnopal, Opuntia lindheimeri\nn11852148\ttuna, Opuntia tuna\nn11852531\tBarbados gooseberry, Barbados-gooseberry vine, Pereskia aculeata\nn11853079\tmistletoe cactus\nn11853356\tChristmas cactus, Schlumbergera buckleyi, Schlumbergera baridgesii\nn11853813\tnight-blooming cereus\nn11854479\tcrab cactus, Thanksgiving cactus, Zygocactus truncatus, Schlumbergera truncatus\nn11855274\tpokeweed\nn11855435\tIndian poke, Phytolacca acinosa\nn11855553\tpoke, pigeon berry, garget, scoke, Phytolacca americana\nn11855842\tombu, bella sombra, Phytolacca dioica\nn11856573\tbloodberry, blood berry, rougeberry, rouge plant, Rivina humilis\nn11857696\tportulaca\nn11857875\trose moss, sun plant, Portulaca grandiflora\nn11858077\tcommon purslane, pussley, pussly, verdolagas, Portulaca oleracea\nn11858703\trock purslane\nn11858814\tred maids, redmaids, Calandrinia ciliata\nn11859275\tCarolina spring beauty, Claytonia caroliniana\nn11859472\tspring beauty, Clatonia lanceolata\nn11859737\tVirginia spring beauty, Claytonia virginica\nn11860208\tsiskiyou lewisia, Lewisia cotyledon\nn11860555\tbitterroot, Lewisia rediviva\nn11861238\tbroad-leaved montia, Montia cordifolia\nn11861487\tblinks, blinking chickweed, water chickweed, Montia lamprosperma\nn11861641\ttoad lily, Montia chamissoi\nn11861853\twinter purslane, miner's lettuce, Cuban spinach, Montia perfoliata\nn11862835\tflame flower, flame-flower, flameflower, Talinum aurantiacum\nn11863467\tpigmy talinum, Talinum brevifolium\nn11863877\tjewels-of-opar, Talinum paniculatum\nn11865071\tcaper\nn11865276\tnative pomegranate, Capparis arborea\nn11865429\tcaper tree, Jamaica caper tree, Capparis cynophallophora\nn11865574\tcaper tree, bay-leaved caper, Capparis flexuosa\nn11865874\tcommon caper, Capparis spinosa\nn11866248\tspiderflower, cleome\nn11866706\tRocky Mountain bee plant, stinking clover, Cleome serrulata\nn11867311\tclammyweed, Polanisia graveolens, Polanisia dodecandra\nn11868814\tcrucifer, cruciferous plant\nn11869351\tcress, cress plant\nn11869689\twatercress\nn11870044\tstonecress, stone cress\nn11870418\tgarlic mustard, hedge garlic, sauce-alone, jack-by-the-hedge, Alliaria officinalis\nn11870747\talyssum, madwort\nn11871059\trose of Jericho, resurrection plant, Anastatica hierochuntica\nn11871496\tArabidopsis thaliana, mouse-ear cress\nn11871748\tArabidopsis lyrata\nn11872146\trock cress, rockcress\nn11872324\tsicklepod, Arabis Canadensis\nn11872658\ttower mustard, tower cress, Turritis glabra, Arabis glabra\nn11873182\thorseradish, horseradish root\nn11873612\twinter cress, St. Barbara's herb, scurvy grass\nn11874081\tyellow rocket, rockcress, rocket cress, Barbarea vulgaris, Sisymbrium barbarea\nn11874423\thoary alison, hoary alyssum, Berteroa incana\nn11874878\tbuckler mustard, Biscutalla laevigata\nn11875523\twild cabbage, Brassica oleracea\nn11875691\tcabbage, cultivated cabbage, Brassica oleracea\nn11875938\thead cabbage, head cabbage plant, Brassica oleracea capitata\nn11876204\tsavoy cabbage\nn11876432\tbrussels sprout, Brassica oleracea gemmifera\nn11876634\tcauliflower, Brassica oleracea botrytis\nn11876803\tbroccoli, Brassica oleracea italica\nn11877193\tcollard\nn11877283\tkohlrabi, Brassica oleracea gongylodes\nn11877473\tturnip plant\nn11877646\tturnip, white turnip, Brassica rapa\nn11877860\trutabaga, turnip cabbage, swede, Swedish turnip, rutabaga plant, Brassica napus napobrassica\nn11878101\tbroccoli raab, broccoli rabe, Brassica rapa ruvo\nn11878283\tmustard\nn11878633\tchinese mustard, indian mustard, leaf mustard, gai choi, Brassica juncea\nn11879054\tbok choy, bok choi, pakchoi, pak choi, Chinese white cabbage, Brassica rapa chinensis\nn11879722\trape, colza, Brassica napus\nn11879895\trapeseed\nn11881189\tshepherd's purse, shepherd's pouch, Capsella bursa-pastoris\nn11882074\tlady's smock, cuckooflower, cuckoo flower, meadow cress, Cardamine pratensis\nn11882237\tcoral-root bittercress, coralroot, coralwort, Cardamine bulbifera, Dentaria bulbifera\nn11882426\tcrinkleroot, crinkle-root, crinkle root, pepper root, toothwort, Cardamine diphylla, Dentaria diphylla\nn11882636\tAmerican watercress, mountain watercress, Cardamine rotundifolia\nn11882821\tspring cress, Cardamine bulbosa\nn11882972\tpurple cress, Cardamine douglasii\nn11883328\twallflower, Cheiranthus cheiri, Erysimum cheiri\nn11883628\tprairie rocket\nn11883945\tscurvy grass, common scurvy grass, Cochlearia officinalis\nn11884384\tsea kale, sea cole, Crambe maritima\nn11884967\ttansy mustard, Descurainia pinnata\nn11885856\tdraba\nn11887119\twallflower\nn11887310\tprairie rocket\nn11887476\tSiberian wall flower, Erysimum allionii, Cheiranthus allionii\nn11887750\twestern wall flower, Erysimum asperum, Cheiranthus asperus, Erysimum arkansanum\nn11888061\twormseed mustard, Erysimum cheiranthoides\nn11888424\theliophila\nn11888800\tdamask violet, Dame's violet, sweet rocket, Hesperis matronalis\nn11889205\ttansy-leaved rocket, Hugueninia tanacetifolia, Sisymbrium tanacetifolia\nn11889619\tcandytuft\nn11890022\twoad\nn11890150\tdyer's woad, Isatis tinctoria\nn11890884\tbladderpod\nn11891175\tsweet alyssum, sweet alison, Lobularia maritima\nn11892029\tMalcolm stock, stock\nn11892181\tVirginian stock, Virginia stock, Malcolmia maritima\nn11892637\tstock, gillyflower\nn11892817\tbrompton stock, Matthiola incana\nn11893640\tbladderpod\nn11893916\tchamois cress, Pritzelago alpina, Lepidium alpina\nn11894327\tradish plant, radish\nn11894558\tjointed charlock, wild radish, wild rape, runch, Raphanus raphanistrum\nn11894770\tradish, Raphanus sativus\nn11895092\tradish, daikon, Japanese radish, Raphanus sativus longipinnatus\nn11895472\tmarsh cress, yellow watercress, Rorippa islandica\nn11895714\tgreat yellowcress, Rorippa amphibia, Nasturtium amphibium\nn11896141\tschizopetalon, Schizopetalon walkeri\nn11896722\tfield mustard, wild mustard, charlock, chadlock, Brassica kaber, Sinapis arvensis\nn11897116\thedge mustard, Sisymbrium officinale\nn11897466\tdesert plume, prince's-plume, Stanleya pinnata, Cleome pinnata\nn11898639\tpennycress\nn11898775\tfield pennycress, French weed, fanweed, penny grass, stinkweed, mithridate mustard, Thlaspi arvense\nn11899223\tfringepod, lacepod\nn11899762\tbladderpod\nn11899921\twasabi\nn11900569\tpoppy\nn11901294\tIceland poppy, Papaver alpinum\nn11901452\twestern poppy, Papaver californicum\nn11901597\tprickly poppy, Papaver argemone\nn11901759\tIceland poppy, arctic poppy, Papaver nudicaule\nn11901977\toriental poppy, Papaver orientale\nn11902200\tcorn poppy, field poppy, Flanders poppy, Papaver rhoeas\nn11902389\topium poppy, Papaver somniferum\nn11902709\tprickly poppy, argemone, white thistle, devil's fig\nn11902982\tMexican poppy, Argemone mexicana\nn11903333\tbocconia, tree celandine, Bocconia frutescens\nn11903671\tcelandine, greater celandine, swallowwort, swallow wort, Chelidonium majus\nn11904109\tcorydalis\nn11904274\tclimbing corydalis, Corydalis claviculata, Fumaria claviculata\nn11905392\tCalifornia poppy, Eschscholtzia californica\nn11905749\thorn poppy, horned poppy, yellow horned poppy, sea poppy, Glaucium flavum\nn11906127\tgolden cup, Mexican tulip poppy, Hunnemania fumariifolia\nn11906514\tplume poppy, bocconia, Macleaya cordata\nn11906917\tblue poppy, Meconopsis betonicifolia\nn11907100\tWelsh poppy, Meconopsis cambrica\nn11907405\tcreamcups, Platystemon californicus\nn11907689\tmatilija poppy, California tree poppy, Romneya coulteri\nn11908549\twind poppy, flaming poppy, Stylomecon heterophyllum, Papaver heterophyllum\nn11908846\tcelandine poppy, wood poppy, Stylophorum diphyllum\nn11909864\tclimbing fumitory, Allegheny vine, Adlumia fungosa, Fumaria fungosa\nn11910271\tbleeding heart, lyreflower, lyre-flower, Dicentra spectabilis\nn11910460\tDutchman's breeches, Dicentra cucullaria\nn11910666\tsquirrel corn, Dicentra canadensis\nn11915214\tcomposite, composite plant\nn11915658\tcompass plant, compass flower\nn11915899\teverlasting, everlasting flower\nn11916467\tachillea\nn11916696\tyarrow, milfoil, Achillea millefolium\nn11917407\tpink-and-white everlasting, pink paper daisy, Acroclinium roseum\nn11917835\twhite snakeroot, white sanicle, Ageratina altissima, Eupatorium rugosum\nn11918286\tageratum\nn11918473\tcommon ageratum, Ageratum houstonianum\nn11918808\tsweet sultan, Amberboa moschata, Centaurea moschata\nn11919447\tragweed, ambrosia, bitterweed\nn11919761\tcommon ragweed, Ambrosia artemisiifolia\nn11919975\tgreat ragweed, Ambrosia trifida\nn11920133\twestern ragweed, perennial ragweed, Ambrosia psilostachya\nn11920498\tammobium\nn11920663\twinged everlasting, Ammobium alatum\nn11920998\tpellitory, pellitory-of-Spain, Anacyclus pyrethrum\nn11921395\tpearly everlasting, cottonweed, Anaphalis margaritacea\nn11921792\tandryala\nn11922661\tplantain-leaved pussytoes\nn11922755\tfield pussytoes\nn11922839\tsolitary pussytoes\nn11922926\tmountain everlasting\nn11923174\tmayweed, dog fennel, stinking mayweed, stinking chamomile, Anthemis cotula\nn11923397\tyellow chamomile, golden marguerite, dyers' chamomile, Anthemis tinctoria\nn11923637\tcorn chamomile, field chamomile, corn mayweed, Anthemis arvensis\nn11924014\twoolly daisy, dwarf daisy, Antheropeas wallacei, Eriophyllum wallacei\nn11924445\tburdock, clotbur\nn11924849\tgreat burdock, greater burdock, cocklebur, Arctium lappa\nn11925303\tAfrican daisy\nn11925450\tblue-eyed African daisy, Arctotis stoechadifolia, Arctotis venusta\nn11925898\tmarguerite, marguerite daisy, Paris daisy, Chrysanthemum frutescens, Argyranthemum frutescens\nn11926365\tsilversword, Argyroxiphium sandwicense\nn11926833\tarnica\nn11926976\theartleaf arnica, Arnica cordifolia\nn11927215\tArnica montana\nn11927740\tlamb succory, dwarf nipplewort, Arnoseris minima\nn11928352\tartemisia\nn11928858\tmugwort\nn11929743\tsweet wormwood, Artemisia annua\nn11930038\tfield wormwood, Artemisia campestris\nn11930203\ttarragon, estragon, Artemisia dracunculus\nn11930353\tsand sage, silvery wormwood, Artemisia filifolia\nn11930571\twormwood sage, prairie sagewort, Artemisia frigida\nn11930788\twestern mugwort, white sage, cudweed, prairie sage, Artemisia ludoviciana, Artemisia gnaphalodes\nn11930994\tRoman wormwood, Artemis pontica\nn11931135\tbud brush, bud sagebrush, Artemis spinescens\nn11931540\tcommon mugwort, Artemisia vulgaris\nn11931918\taster\nn11932745\twood aster\nn11932927\twhorled aster, Aster acuminatus\nn11933099\theath aster, Aster arenosus\nn11933257\theart-leaved aster, Aster cordifolius\nn11933387\twhite wood aster, Aster divaricatus\nn11933546\tbushy aster, Aster dumosus\nn11933728\theath aster, Aster ericoides\nn11933903\twhite prairie aster, Aster falcatus\nn11934041\tstiff aster, Aster linarifolius\nn11934239\tgoldilocks, goldilocks aster, Aster linosyris, Linosyris vulgaris\nn11934463\tlarge-leaved aster, Aster macrophyllus\nn11934616\tNew England aster, Aster novae-angliae\nn11934807\tMichaelmas daisy, New York aster, Aster novi-belgii\nn11935027\tupland white aster, Aster ptarmicoides\nn11935187\tShort's aster, Aster shortii\nn11935330\tsea aster, sea starwort, Aster tripolium\nn11935469\tprairie aster, Aster turbinellis\nn11935627\tannual salt-marsh aster\nn11935715\taromatic aster\nn11935794\tarrow leaved aster\nn11935877\tazure aster\nn11935953\tbog aster\nn11936027\tcrooked-stemmed aster\nn11936113\tEastern silvery aster\nn11936199\tflat-topped white aster\nn11936287\tlate purple aster\nn11936369\tpanicled aster\nn11936448\tperennial salt marsh aster\nn11936539\tpurple-stemmed aster\nn11936624\trough-leaved aster\nn11936707\trush aster\nn11936782\tSchreiber's aster\nn11936864\tsmall white aster\nn11936946\tsmooth aster\nn11937023\tsouthern aster\nn11937102\tstarved aster, calico aster\nn11937195\ttradescant's aster\nn11937278\twavy-leaved aster\nn11937360\tWestern silvery aster\nn11937446\twillow aster\nn11937692\tayapana, Ayapana triplinervis, Eupatorium aya-pana\nn11938556\tmule fat, Baccharis viminea\nn11939180\tbalsamroot\nn11939491\tdaisy\nn11939699\tcommon daisy, English daisy, Bellis perennis\nn11940006\tbur marigold, burr marigold, beggar-ticks, beggar's-ticks, sticktight\nn11940349\tSpanish needles, Bidens bipinnata\nn11940599\ttickseed sunflower, Bidens coronata, Bidens trichosperma\nn11940750\tEuropean beggar-ticks, trifid beggar-ticks, trifid bur marigold, Bidens tripartita\nn11941094\tslender knapweed\nn11941478\tfalse chamomile\nn11941924\tSwan River daisy, Brachycome Iberidifolia\nn11942659\twoodland oxeye, Buphthalmum salicifolium\nn11943133\tIndian plantain\nn11943407\tcalendula\nn11943660\tcommon marigold, pot marigold, ruddles, Scotch marigold, Calendula officinalis\nn11943992\tChina aster, Callistephus chinensis\nn11944196\tthistle\nn11944751\twelted thistle, Carduus crispus\nn11944954\tmusk thistle, nodding thistle, Carduus nutans\nn11945367\tcarline thistle\nn11945514\tstemless carline thistle, Carlina acaulis\nn11945783\tcommon carline thistle, Carlina vulgaris\nn11946051\tsafflower, false saffron, Carthamus tinctorius\nn11946313\tsafflower seed\nn11946727\tcatananche\nn11946918\tblue succory, cupid's dart, Catananche caerulea\nn11947251\tcentaury\nn11947629\tdusty miller, Centaurea cineraria, Centaurea gymnocarpa\nn11947802\tcornflower, bachelor's button, bluebottle, Centaurea cyanus\nn11948044\tstar-thistle, caltrop, Centauria calcitrapa\nn11948264\tknapweed\nn11948469\tsweet sultan, Centaurea imperialis\nn11948864\tgreat knapweed, greater knapweed, Centaurea scabiosa\nn11949015\tBarnaby's thistle, yellow star-thistle, Centaurea solstitialis\nn11949402\tchamomile, camomile, Chamaemelum nobilis, Anthemis nobilis\nn11949857\tchaenactis\nn11950345\tchrysanthemum\nn11950686\tcorn marigold, field marigold, Chrysanthemum segetum\nn11950877\tcrown daisy, Chrysanthemum coronarium\nn11951052\tchop-suey greens, tong ho, shun giku, Chrysanthemum coronarium spatiosum\nn11951511\tgolden aster\nn11951820\tMaryland golden aster, Chrysopsis mariana\nn11952346\tgoldenbush\nn11952541\trabbit brush, rabbit bush, Chrysothamnus nauseosus\nn11953038\tchicory, succory, chicory plant, Cichorium intybus\nn11953339\tendive, witloof, Cichorium endivia\nn11953610\tchicory, chicory root\nn11953884\tplume thistle, plumed thistle\nn11954161\tCanada thistle, creeping thistle, Cirsium arvense\nn11954345\tfield thistle, Cirsium discolor\nn11954484\twoolly thistle, Cirsium flodmanii\nn11954642\tEuropean woolly thistle, Cirsium eriophorum\nn11954798\tmelancholy thistle, Cirsium heterophylum, Cirsium helenioides\nn11955040\tbrook thistle, Cirsium rivulare\nn11955153\tbull thistle, boar thistle, spear thistle, Cirsium vulgare, Cirsium lanceolatum\nn11955532\tblessed thistle, sweet sultan, Cnicus benedictus\nn11955896\tmistflower, mist-flower, ageratum, Conoclinium coelestinum, Eupatorium coelestinum\nn11956348\thorseweed, Canadian fleabane, fleabane, Conyza canadensis, Erigeron canadensis\nn11956850\tcoreopsis, tickseed, tickweed, tick-weed\nn11957317\tgiant coreopsis, Coreopsis gigantea\nn11957514\tsea dahlia, Coreopsis maritima\nn11957678\tcalliopsis, Coreopsis tinctoria\nn11958080\tcosmos, cosmea\nn11958499\tbrass buttons, Cotula coronopifolia\nn11958888\tbilly buttons\nn11959259\thawk's-beard, hawk's-beards\nn11959632\tartichoke, globe artichoke, artichoke plant, Cynara scolymus\nn11959862\tcardoon, Cynara cardunculus\nn11960245\tdahlia, Dahlia pinnata\nn11960673\tGerman ivy, Delairea odorata, Senecio milkanioides\nn11961100\tflorist's chrysanthemum, florists' chrysanthemum, mum, Dendranthema grandifloruom, Chrysanthemum morifolium\nn11961446\tcape marigold, sun marigold, star of the veldt\nn11961871\tleopard's-bane, leopardbane\nn11962272\tconeflower\nn11962667\tglobe thistle\nn11962994\telephant's-foot\nn11963572\ttassel flower, Emilia sagitta\nn11963932\tbrittlebush, brittle bush, incienso, Encelia farinosa\nn11964446\tsunray, Enceliopsis nudicaulis\nn11964848\tengelmannia\nn11965218\tfireweed, Erechtites hieracifolia\nn11965627\tfleabane\nn11965962\tblue fleabane, Erigeron acer\nn11966083\tdaisy fleabane, Erigeron annuus\nn11966215\torange daisy, orange fleabane, Erigeron aurantiacus\nn11966385\tspreading fleabane, Erigeron divergens\nn11966617\tseaside daisy, beach aster, Erigeron glaucous\nn11966896\tPhiladelphia fleabane, Erigeron philadelphicus\nn11967142\trobin's plantain, Erigeron pulchellus\nn11967315\tshowy daisy, Erigeron speciosus\nn11967744\twoolly sunflower\nn11967878\tgolden yarrow, Eriophyllum lanatum\nn11968519\tdog fennel, Eupatorium capillifolium\nn11968704\tJoe-Pye weed, spotted Joe-Pye weed, Eupatorium maculatum\nn11968931\tboneset, agueweed, thoroughwort, Eupatorium perfoliatum\nn11969166\tJoe-Pye weed, purple boneset, trumpet weed, marsh milkweed, Eupatorium purpureum\nn11969607\tblue daisy, blue marguerite, Felicia amelloides\nn11969806\tkingfisher daisy, Felicia bergeriana\nn11970101\tcotton rose, cudweed, filago\nn11970298\therba impia, Filago germanica\nn11970586\tgaillardia\nn11971248\tgazania\nn11971406\ttreasure flower, Gazania rigens\nn11971783\tAfrican daisy\nn11971927\tBarberton daisy, Transvaal daisy, Gerbera jamesonii\nn11972291\tdesert sunflower, Gerea canescens\nn11972759\tcudweed\nn11972959\tchafeweed, wood cudweed, Gnaphalium sylvaticum\nn11973341\tgumweed, gum plant, tarweed, rosinweed\nn11973634\tGrindelia robusta\nn11973749\tcurlycup gumweed, Grindelia squarrosa\nn11974373\tlittle-head snakeweed, Gutierrezia microcephala\nn11974557\trabbitweed, rabbit-weed, snakeweed, broom snakeweed, broom snakeroot, turpentine weed, Gutierrezia sarothrae\nn11974888\tbroomweed, broom-weed, Gutierrezia texana\nn11975254\tvelvet plant, purple velvet plant, royal velvet plant, Gynura aurantiaca\nn11976170\tgoldenbush\nn11976314\tcamphor daisy, Haplopappus phyllocephalus\nn11976511\tyellow spiny daisy, Haplopappus spinulosus\nn11976933\thoary golden bush, Hazardia cana\nn11977303\tsneezeweed\nn11977660\torange sneezeweed, owlclaws, Helenium hoopesii\nn11977887\trosilla, Helenium puberulum\nn11978233\tsunflower, helianthus\nn11978551\tswamp sunflower, Helianthus angustifolius\nn11978713\tcommon sunflower, mirasol, Helianthus annuus\nn11978961\tgiant sunflower, tall sunflower, Indian potato, Helianthus giganteus\nn11979187\tshowy sunflower, Helianthus laetiflorus\nn11979354\tMaximilian's sunflower, Helianthus maximilianii\nn11979527\tprairie sunflower, Helianthus petiolaris\nn11979715\tJerusalem artichoke, girasol, Jerusalem artichoke sunflower, Helianthus tuberosus\nn11979964\tJerusalem artichoke\nn11980318\tstrawflower, golden everlasting, yellow paper daisy, Helichrysum bracteatum\nn11980682\theliopsis, oxeye\nn11981192\tstrawflower\nn11981475\thairy golden aster, prairie golden aster, Heterotheca villosa, Chrysopsis villosa\nn11982115\thawkweed\nn11982545\trattlesnake weed, Hieracium venosum\nn11982939\talpine coltsfoot, Homogyne alpina, Tussilago alpina\nn11983375\talpine gold, alpine hulsea, Hulsea algida\nn11983606\tdwarf hulsea, Hulsea nana\nn11984144\tcat's-ear, California dandelion, capeweed, gosmore, Hypochaeris radicata\nn11984542\tinula\nn11985053\tmarsh elder, iva\nn11985321\tburweed marsh elder, false ragweed, Iva xanthifolia\nn11985739\tkrigia\nn11985903\tdwarf dandelion, Krigia dandelion, Krigia bulbosa\nn11986511\tgarden lettuce, common lettuce, Lactuca sativa\nn11986729\tcos lettuce, romaine lettuce, Lactuca sativa longifolia\nn11987126\tleaf lettuce, Lactuca sativa crispa\nn11987349\tceltuce, stem lettuce, Lactuca sativa asparagina\nn11987511\tprickly lettuce, horse thistle, Lactuca serriola, Lactuca scariola\nn11988132\tgoldfields, Lasthenia chrysostoma\nn11988596\ttidytips, tidy tips, Layia platyglossa\nn11988893\thawkbit\nn11989087\tfall dandelion, arnica bud, Leontodon autumnalis\nn11989393\tedelweiss, Leontopodium alpinum\nn11989869\toxeye daisy, ox-eyed daisy, marguerite, moon daisy, white daisy, Leucanthemum vulgare, Chrysanthemum leucanthemum\nn11990167\toxeye daisy, Leucanthemum maximum, Chrysanthemum maximum\nn11990313\tshasta daisy, Leucanthemum superbum, Chrysanthemum maximum maximum\nn11990627\tPyrenees daisy, Leucanthemum lacustre, Chrysanthemum lacustre\nn11990920\tnorth island edelweiss, Leucogenes leontopodium\nn11991263\tblazing star, button snakeroot, gayfeather, gay-feather, snakeroot\nn11991549\tdotted gayfeather, Liatris punctata\nn11991777\tdense blazing star, Liatris pycnostachya\nn11992479\tTexas star, Lindheimera texana\nn11992806\tAfrican daisy, yellow ageratum, Lonas inodora, Lonas annua\nn11993203\ttahoka daisy, tansy leaf aster, Machaeranthera tanacetifolia\nn11993444\tsticky aster, Machaeranthera bigelovii\nn11993675\tMojave aster, Machaeranthera tortifoloia\nn11994150\ttarweed\nn11995092\tsweet false chamomile, wild chamomile, German chamomile, Matricaria recutita, Matricaria chamomilla\nn11995396\tpineapple weed, rayless chamomile, Matricaria matricarioides\nn11996251\tclimbing hempweed, climbing boneset, wild climbing hempweed, climbing hemp-vine, Mikania scandens\nn11996677\tmutisia\nn11997032\trattlesnake root\nn11997160\twhite lettuce, cankerweed, Nabalus alba, Prenanthes alba\nn11997969\tdaisybush, daisy-bush, daisy bush\nn11998492\tNew Zealand daisybush, Olearia haastii\nn11998888\tcotton thistle, woolly thistle, Scotch thistle, Onopordum acanthium, Onopordon acanthium\nn11999278\tothonna\nn11999656\tcascade everlasting, Ozothamnus secundiflorus, Helichrysum secundiflorum\nn12000191\tbutterweed\nn12001294\tAmerican feverfew, wild quinine, prairie dock, Parthenium integrifolium\nn12001707\tcineraria, Pericallis cruenta, Senecio cruentus\nn12001924\tflorest's cineraria, Pericallis hybrida\nn12002428\tbutterbur, bog rhubarb, Petasites hybridus, Petasites vulgaris\nn12002651\twinter heliotrope, sweet coltsfoot, Petasites fragrans\nn12002826\tsweet coltsfoot, Petasites sagitattus\nn12003167\toxtongue, bristly oxtongue, bitterweed, bugloss, Picris echioides\nn12003696\thawkweed\nn12004120\tmouse-ear hawkweed, Pilosella officinarum, Hieracium pilocella\nn12004547\tstevia\nn12004987\trattlesnake root, Prenanthes purpurea\nn12005656\tfleabane, feabane mullet, Pulicaria dysenterica\nn12006306\tsheep plant, vegetable sheep, Raoulia lutescens, Raoulia australis\nn12006766\tconeflower\nn12006930\tMexican hat, Ratibida columnaris\nn12007196\tlong-head coneflower, prairie coneflower, Ratibida columnifera\nn12007406\tprairie coneflower, Ratibida tagetes\nn12007766\tSwan River everlasting, rhodanthe, Rhodanthe manglesii, Helipterum manglesii\nn12008252\tconeflower\nn12008487\tblack-eyed Susan, Rudbeckia hirta, Rudbeckia serotina\nn12008749\tcutleaved coneflower, Rudbeckia laciniata\nn12009047\tgolden glow, double gold, hortensia, Rudbeckia laciniata hortensia\nn12009420\tlavender cotton, Santolina chamaecyparissus\nn12009792\tcreeping zinnia, Sanvitalia procumbens\nn12010628\tgolden thistle\nn12010815\tSpanish oyster plant, Scolymus hispanicus\nn12011370\tnodding groundsel, Senecio bigelovii\nn12011620\tdusty miller, Senecio cineraria, Cineraria maritima\nn12012111\tbutterweed, ragwort, Senecio glabellus\nn12012253\tragwort, tansy ragwort, ragweed, benweed, Senecio jacobaea\nn12012510\tarrowleaf groundsel, Senecio triangularis\nn12013035\tblack salsify, viper's grass, scorzonera, Scorzonera hispanica\nn12013511\twhite-topped aster\nn12013701\tnarrow-leaved white-topped aster\nn12014085\tsilver sage, silver sagebrush, grey sage, gray sage, Seriphidium canum, Artemisia cana\nn12014355\tsea wormwood, Seriphidium maritimum, Artemisia maritima\nn12014923\tsawwort, Serratula tinctoria\nn12015221\trosinweed, Silphium laciniatum\nn12015525\tmilk thistle, lady's thistle, Our Lady's mild thistle, holy thistle, blessed thistle, Silybum marianum\nn12015959\tgoldenrod\nn12016434\tsilverrod, Solidago bicolor\nn12016567\tmeadow goldenrod, Canadian goldenrod, Solidago canadensis\nn12016777\tMissouri goldenrod, Solidago missouriensis\nn12016914\talpine goldenrod, Solidago multiradiata\nn12017127\tgrey goldenrod, gray goldenrod, Solidago nemoralis\nn12017326\tBlue Mountain tea, sweet goldenrod, Solidago odora\nn12017511\tdyer's weed, Solidago rugosa\nn12017664\tseaside goldenrod, beach goldenrod, Solidago sempervirens\nn12017853\tnarrow goldenrod, Solidago spathulata\nn12018014\tBoott's goldenrod\nn12018100\tElliott's goldenrod\nn12018188\tOhio goldenrod\nn12018271\trough-stemmed goldenrod\nn12018363\tshowy goldenrod\nn12018447\ttall goldenrod\nn12018530\tzigzag goldenrod, broad leaved goldenrod\nn12018760\tsow thistle, milk thistle\nn12019035\tmilkweed, Sonchus oleraceus\nn12019827\tstevia\nn12020184\tstokes' aster, cornflower aster, Stokesia laevis\nn12020507\tmarigold\nn12020736\tAfrican marigold, big marigold, Aztec marigold, Tagetes erecta\nn12020941\tFrench marigold, Tagetes patula\nn12022054\tpainted daisy, pyrethrum, Tanacetum coccineum, Chrysanthemum coccineum\nn12022382\tpyrethrum, Dalmatian pyrethrum, Dalmatia pyrethrum, Tanacetum cinerariifolium, Chrysanthemum cinerariifolium\nn12022821\tnorthern dune tansy, Tanacetum douglasii\nn12023108\tfeverfew, Tanacetum parthenium, Chrysanthemum parthenium\nn12023407\tdusty miller, silver-lace, silver lace, Tanacetum ptarmiciflorum, Chrysanthemum ptarmiciflorum\nn12023726\ttansy, golden buttons, scented fern, Tanacetum vulgare\nn12024176\tdandelion, blowball\nn12024445\tcommon dandelion, Taraxacum ruderalia, Taraxacum officinale\nn12024690\tdandelion green\nn12024805\tRussian dandelion, kok-saghyz, kok-sagyz, Taraxacum kok-saghyz\nn12025220\tstemless hymenoxys, Tetraneuris acaulis, Hymenoxys acaulis\nn12026018\tMexican sunflower, tithonia\nn12026476\tEaster daisy, stemless daisy, Townsendia Exscapa\nn12026981\tyellow salsify, Tragopogon dubius\nn12027222\tsalsify, oyster plant, vegetable oyster, Tragopogon porrifolius\nn12027658\tmeadow salsify, goatsbeard, shepherd's clock, Tragopogon pratensis\nn12028424\tscentless camomile, scentless false camomile, scentless mayweed, scentless hayweed, corn mayweed, Tripleurospermum inodorum, Matricaria inodorum\nn12029039\tturfing daisy, Tripleurospermum tchihatchewii, Matricaria tchihatchewii\nn12029635\tcoltsfoot, Tussilago farfara\nn12030092\tursinia\nn12030654\tcrownbeard, crown-beard, crown beard\nn12030908\twingstem, golden ironweed, yellow ironweed, golden honey plant, Verbesina alternifolia, Actinomeris alternifolia\nn12031139\tcowpen daisy, golden crownbeard, golden crown beard, butter daisy, Verbesina encelioides, Ximenesia encelioides\nn12031388\tgravelweed, Verbesina helianthoides\nn12031547\tVirginia crownbeard, frostweed, frost-weed, Verbesina virginica\nn12031927\tironweed, vernonia\nn12032429\tmule's ears, Wyethia amplexicaulis\nn12032686\twhite-rayed mule's ears, Wyethia helianthoides\nn12033139\tcocklebur, cockle-bur, cockleburr, cockle-burr\nn12033504\txeranthemum\nn12033709\timmortelle, Xeranthemum annuum\nn12034141\tzinnia, old maid, old maid flower\nn12034384\twhite zinnia, Zinnia acerosa\nn12034594\tlittle golden zinnia, Zinnia grandiflora\nn12035631\tblazing star, Mentzelia livicaulis, Mentzelia laevicaulis\nn12035907\tbartonia, Mentzelia lindleyi\nn12036067\tachene\nn12036226\tsamara, key fruit, key\nn12036939\tcampanula, bellflower\nn12037499\tcreeping bellflower, Campanula rapunculoides\nn12037691\tCanterbury bell, cup and saucer, Campanula medium\nn12038038\ttall bellflower, Campanula americana\nn12038208\tmarsh bellflower, Campanula aparinoides\nn12038406\tclustered bellflower, Campanula glomerata\nn12038585\tpeach bells, peach bell, willow bell, Campanula persicifolia\nn12038760\tchimney plant, chimney bellflower, Campanula pyramidalis\nn12038898\trampion, rampion bellflower, Campanula rapunculus\nn12039317\ttussock bellflower, spreading bellflower, Campanula carpatica\nn12041446\torchid, orchidaceous plant\nn12043444\torchis\nn12043673\tmale orchis, early purple orchid, Orchis mascula\nn12043836\tbutterfly orchid, butterfly orchis, Orchis papilionaceae\nn12044041\tshowy orchis, purple orchis, purple-hooded orchis, Orchis spectabilis\nn12044467\taerides\nn12044784\tangrecum\nn12045157\tjewel orchid\nn12045514\tputtyroot, adam-and-eve, Aplectrum hyemale\nn12045860\tarethusa\nn12046028\tbog rose, wild pink, dragon's mouth, Arethusa bulbosa\nn12046428\tbletia\nn12046815\tBletilla striata, Bletia striata\nn12047345\tbrassavola\nn12047884\tspider orchid, Brassia lawrenceana\nn12048056\tspider orchid, Brassia verrucosa\nn12048399\tcaladenia\nn12048928\tcalanthe\nn12049282\tgrass pink, Calopogon pulchellum, Calopogon tuberosum\nn12049562\tcalypso, fairy-slipper, Calypso bulbosa\nn12050533\tcattleya\nn12050959\thelleborine\nn12051103\tred helleborine, Cephalanthera rubra\nn12051514\tspreading pogonia, funnel-crest rosebud orchid, Cleistes divaricata, Pogonia divaricata\nn12051792\trosebud orchid, Cleistes rosea, Pogonia rosea\nn12052267\tsatyr orchid, Coeloglossum bracteatum\nn12052447\tfrog orchid, Coeloglossum viride\nn12052787\tcoelogyne\nn12053405\tcoral root\nn12053690\tspotted coral root, Corallorhiza maculata\nn12053962\tstriped coral root, Corallorhiza striata\nn12054195\tearly coral root, pale coral root, Corallorhiza trifida\nn12055073\tswan orchid, swanflower, swan-flower, swanneck, swan-neck\nn12055516\tcymbid, cymbidium\nn12056099\tcypripedia\nn12056217\tlady's slipper, lady-slipper, ladies' slipper, slipper orchid\nn12056601\tmoccasin flower, nerveroot, Cypripedium acaule\nn12056758\tcommon lady's-slipper, showy lady's-slipper, showy lady slipper, Cypripedium reginae, Cypripedium album\nn12056990\tram's-head, ram's-head lady's slipper, Cypripedium arietinum\nn12057211\tyellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum\nn12057447\tlarge yellow lady's slipper, Cypripedium calceolus pubescens\nn12057660\tCalifornia lady's slipper, Cypripedium californicum\nn12057895\tclustered lady's slipper, Cypripedium fasciculatum\nn12058192\tmountain lady's slipper, Cypripedium montanum\nn12058630\tmarsh orchid\nn12058822\tcommon spotted orchid, Dactylorhiza fuchsii, Dactylorhiza maculata fuchsii\nn12059314\tdendrobium\nn12059625\tdisa\nn12060546\tphantom orchid, snow orchid, Eburophyton austinae\nn12061104\ttulip orchid, Encyclia citrina, Cattleya citrina\nn12061380\tbutterfly orchid, Encyclia tampensis, Epidendrum tampense\nn12061614\tbutterfly orchid, butterfly orchis, Epidendrum venosum, Encyclia venosa\nn12062105\tepidendron\nn12062468\thelleborine\nn12062626\tEpipactis helleborine\nn12062781\tstream orchid, chatterbox, giant helleborine, Epipactis gigantea\nn12063211\ttongueflower, tongue-flower\nn12063639\trattlesnake plantain, helleborine\nn12064389\tfragrant orchid, Gymnadenia conopsea\nn12064591\tshort-spurred fragrant orchid, Gymnadenia odoratissima\nn12065316\tfringed orchis, fringed orchid\nn12065649\tfrog orchid\nn12065777\trein orchid, rein orchis\nn12066018\tbog rein orchid, bog candles, Habenaria dilatata\nn12066261\twhite fringed orchis, white fringed orchid, Habenaria albiflora\nn12066451\telegant Habenaria, Habenaria elegans\nn12066630\tpurple-fringed orchid, purple-fringed orchis, Habenaria fimbriata\nn12066821\tcoastal rein orchid, Habenaria greenei\nn12067029\tHooker's orchid, Habenaria hookeri\nn12067193\tragged orchid, ragged orchis, ragged-fringed orchid, green fringed orchis, Habenaria lacera\nn12067433\tprairie orchid, prairie white-fringed orchis, Habenaria leucophaea\nn12067672\tsnowy orchid, Habenaria nivea\nn12067817\tround-leaved rein orchid, Habenaria orbiculata\nn12068138\tpurple fringeless orchid, purple fringeless orchis, Habenaria peramoena\nn12068432\tpurple-fringed orchid, purple-fringed orchis, Habenaria psycodes\nn12068615\tAlaska rein orchid, Habenaria unalascensis\nn12069009\tcrested coral root, Hexalectris spicata\nn12069217\tTexas purple spike, Hexalectris warnockii\nn12069679\tlizard orchid, Himantoglossum hircinum\nn12070016\tlaelia\nn12070381\tliparis\nn12070583\ttwayblade\nn12070712\tfen orchid, fen orchis, Liparis loeselii\nn12071259\tbroad-leaved twayblade, Listera convallarioides\nn12071477\tlesser twayblade, Listera cordata\nn12071744\ttwayblade, Listera ovata\nn12072210\tgreen adder's mouth, Malaxis-unifolia, Malaxis ophioglossoides\nn12072722\tmasdevallia\nn12073217\tmaxillaria\nn12073554\tpansy orchid\nn12073991\todontoglossum\nn12074408\toncidium, dancing lady orchid, butterfly plant, butterfly orchid\nn12074867\tbee orchid, Ophrys apifera\nn12075010\tfly orchid, Ophrys insectifera, Ophrys muscifera\nn12075151\tspider orchid\nn12075299\tearly spider orchid, Ophrys sphegodes\nn12075830\tVenus' slipper, Venus's slipper, Venus's shoe\nn12076223\tphaius\nn12076577\tmoth orchid, moth plant\nn12076852\tbutterfly plant, Phalaenopsis amabilis\nn12077244\trattlesnake orchid\nn12077944\tlesser butterfly orchid, Platanthera bifolia, Habenaria bifolia\nn12078172\tgreater butterfly orchid, Platanthera chlorantha, Habenaria chlorantha\nn12078451\tprairie white-fringed orchid, Platanthera leucophea\nn12078747\ttangle orchid\nn12079120\tIndian crocus\nn12079523\tpleurothallis\nn12079963\tpogonia\nn12080395\tbutterfly orchid\nn12080588\tPsychopsis krameriana, Oncidium papilio kramerianum\nn12080820\tPsychopsis papilio, Oncidium papilio\nn12081215\thelmet orchid, greenhood\nn12081649\tfoxtail orchid\nn12082131\torange-blossom orchid, Sarcochilus falcatus\nn12083113\tsobralia\nn12083591\tladies' tresses, lady's tresses\nn12083847\tscrew augur, Spiranthes cernua\nn12084158\thooded ladies' tresses, Spiranthes romanzoffiana\nn12084400\twestern ladies' tresses, Spiranthes porrifolia\nn12084555\tEuropean ladies' tresses, Spiranthes spiralis\nn12084890\tstanhopea\nn12085267\tstelis\nn12085664\tfly orchid\nn12086012\tvanda\nn12086192\tblue orchid, Vanda caerulea\nn12086539\tvanilla\nn12086778\tvanilla orchid, Vanilla planifolia\nn12087961\tyam, yam plant\nn12088223\tyam\nn12088327\twhite yam, water yam, Dioscorea alata\nn12088495\tcinnamon vine, Chinese yam, Dioscorea batata\nn12088909\telephant's-foot, tortoise plant, Hottentot bread vine, Hottentot's bread vine, Dioscorea elephantipes\nn12089320\twild yam, Dioscorea paniculata\nn12089496\tcush-cush, Dioscorea trifida\nn12089846\tblack bryony, black bindweed, Tamus communis\nn12090890\tprimrose, primula\nn12091213\tEnglish primrose, Primula vulgaris\nn12091377\tcowslip, paigle, Primula veris\nn12091550\toxlip, paigle, Primula elatior\nn12091697\tChinese primrose, Primula sinensis\nn12091953\tpolyanthus, Primula polyantha\nn12092262\tpimpernel\nn12092417\tscarlet pimpernel, red pimpernel, poor man's weatherglass, Anagallis arvensis\nn12092629\tbog pimpernel, Anagallis tenella\nn12092930\tchaffweed, bastard pimpernel, false pimpernel\nn12093329\tcyclamen, Cyclamen purpurascens\nn12093600\tsowbread, Cyclamen hederifolium, Cyclamen neopolitanum\nn12093885\tsea milkwort, sea trifoly, black saltwort, Glaux maritima\nn12094244\tfeatherfoil, feather-foil\nn12094401\twater gillyflower, American featherfoil, Hottonia inflata\nn12094612\twater violet, Hottonia palustris\nn12095020\tloosestrife\nn12095281\tgooseneck loosestrife, Lysimachia clethroides Duby\nn12095412\tyellow pimpernel, Lysimachia nemorum\nn12095543\tfringed loosestrife, Lysimachia ciliatum\nn12095647\tmoneywort, creeping Jenny, creeping Charlie, Lysimachia nummularia\nn12095934\tswamp candles, Lysimachia terrestris\nn12096089\twhorled loosestrife, Lysimachia quadrifolia\nn12096395\twater pimpernel\nn12096563\tbrookweed, Samolus valerandii\nn12096674\tbrookweed, Samolus parviflorus, Samolus floribundus\nn12097396\tcoralberry, spiceberry, Ardisia crenata\nn12097556\tmarlberry, Ardisia escallonoides, Ardisia paniculata\nn12098403\tplumbago\nn12098524\tleadwort, Plumbago europaea\nn12098827\tthrift\nn12099342\tsea lavender, marsh rosemary, statice\nn12100187\tbarbasco, joewood, Jacquinia keyensis\nn12101870\tgramineous plant, graminaceous plant\nn12102133\tgrass\nn12103680\tmidgrass\nn12103894\tshortgrass, short-grass\nn12104104\tsword grass\nn12104238\ttallgrass, tall-grass\nn12104501\therbage, pasturage\nn12104734\tgoat grass, Aegilops triuncalis\nn12105125\twheatgrass, wheat-grass\nn12105353\tcrested wheatgrass, crested wheat grass, fairway crested wheat grass, Agropyron cristatum\nn12105828\tbearded wheatgrass, Agropyron subsecundum\nn12105981\twestern wheatgrass, bluestem wheatgrass, Agropyron smithii\nn12106134\tintermediate wheatgrass, Agropyron intermedium, Elymus hispidus\nn12106323\tslender wheatgrass, Agropyron trachycaulum, Agropyron pauciflorum, Elymus trachycaulos\nn12107002\tvelvet bent, velvet bent grass, brown bent, Rhode Island bent, dog bent, Agrostis canina\nn12107191\tcloud grass, Agrostis nebulosa\nn12107710\tmeadow foxtail, Alopecurus pratensis\nn12107970\tfoxtail, foxtail grass\nn12108432\tbroom grass\nn12108613\tbroom sedge, Andropogon virginicus\nn12108871\ttall oat grass, tall meadow grass, evergreen grass, false oat, French rye, Arrhenatherum elatius\nn12109365\ttoetoe, toitoi, Arundo conspicua, Chionochloa conspicua\nn12109827\toat\nn12110085\tcereal oat, Avena sativa\nn12110236\twild oat, wild oat grass, Avena fatua\nn12110352\tslender wild oat, Avena barbata\nn12110475\twild red oat, animated oat, Avene sterilis\nn12110778\tbrome, bromegrass\nn12111238\tchess, cheat, Bromus secalinus\nn12111627\tfield brome, Bromus arvensis\nn12112008\tgrama, grama grass, gramma, gramma grass\nn12112337\tblack grama, Bouteloua eriopoda\nn12112609\tbuffalo grass, Buchloe dactyloides\nn12112918\treed grass\nn12113195\tfeather reed grass, feathertop, Calamagrostis acutiflora\nn12113323\tAustralian reed grass, Calamagrostic quadriseta\nn12113657\tburgrass, bur grass\nn12114010\tbuffel grass, Cenchrus ciliaris, Pennisetum cenchroides\nn12114590\tRhodes grass, Chloris gayana\nn12115180\tpampas grass, Cortaderia selloana\nn12116058\tgiant star grass, Cynodon plectostachyum\nn12116429\torchard grass, cocksfoot, cockspur, Dactylis glomerata\nn12116734\tEgyptian grass, crowfoot grass, Dactyloctenium aegypticum\nn12117017\tcrabgrass, crab grass, finger grass\nn12117235\tsmooth crabgrass, Digitaria ischaemum\nn12117326\tlarge crabgrass, hairy finger grass, Digitaria sanguinalis\nn12117695\tbarnyard grass, barn grass, barn millet, Echinochloa crusgalli\nn12117912\tJapanese millet, billion-dollar grass, Japanese barnyard millet, sanwa millet, Echinochloa frumentacea\nn12118414\tyardgrass, yard grass, wire grass, goose grass, Eleusine indica\nn12118661\tfinger millet, ragi, ragee, African millet, coracan, corakan, kurakkan, Eleusine coracana\nn12119099\tlyme grass\nn12119238\twild rye\nn12119390\tgiant ryegrass, Elymus condensatus, Leymus condensatus\nn12119539\tsea lyme grass, European dune grass, Elymus arenarius, Leymus arenaria\nn12119717\tCanada wild rye, Elymus canadensis\nn12120347\tteff, teff grass, Eragrostis tef, Eragrostic abyssinica\nn12120578\tweeping love grass, African love grass, Eragrostis curvula\nn12121033\tplume grass\nn12121187\tRavenna grass, wool grass, Erianthus ravennae\nn12121610\tfescue, fescue grass, meadow fescue, Festuca elatior\nn12122442\treed meadow grass, Glyceria grandis\nn12122725\tvelvet grass, Yorkshire fog, Holcus lanatus\nn12122918\tcreeping soft grass, Holcus mollis\nn12123648\tbarleycorn\nn12123741\tbarley grass, wall barley, Hordeum murinum\nn12124172\tlittle barley, Hordeum pusillum\nn12124627\trye grass, ryegrass\nn12124818\tperennial ryegrass, English ryegrass, Lolium perenne\nn12125001\tItalian ryegrass, Italian rye, Lolium multiflorum\nn12125183\tdarnel, tare, bearded darnel, cheat, Lolium temulentum\nn12125584\tnimblewill, nimble Will, Muhlenbergia schreberi\nn12126084\tcultivated rice, Oryza sativa\nn12126360\tricegrass, rice grass\nn12126736\tsmilo, smilo grass, Oryzopsis miliacea\nn12127460\tswitch grass, Panicum virgatum\nn12127575\tbroomcorn millet, hog millet, Panicum miliaceum\nn12127768\tgoose grass, Texas millet, Panicum Texanum\nn12128071\tdallisgrass, dallis grass, paspalum, Paspalum dilatatum\nn12128306\tBahia grass, Paspalum notatum\nn12128490\tknotgrass, Paspalum distichum\nn12129134\tfountain grass, Pennisetum ruppelii, Pennisetum setaceum\nn12129738\treed canary grass, gardener's garters, lady's laces, ribbon grass, Phalaris arundinacea\nn12129986\tcanary grass, birdseed grass, Phalaris canariensis\nn12130549\ttimothy, herd's grass, Phleum pratense\nn12131405\tbluegrass, blue grass\nn12131550\tmeadowgrass, meadow grass\nn12132092\twood meadowgrass, Poa nemoralis, Agrostis alba\nn12132956\tnoble cane\nn12133151\tmunj, munja, Saccharum bengalense, Saccharum munja\nn12133462\tbroom beard grass, prairie grass, wire grass, Andropogon scoparius, Schizachyrium scoparium\nn12133682\tbluestem, blue stem, Andropogon furcatus, Andropogon gerardii\nn12134025\trye, Secale cereale\nn12134486\tbristlegrass, bristle grass\nn12134695\tgiant foxtail\nn12134836\tyellow bristlegrass, yellow bristle grass, yellow foxtail, glaucous bristlegrass, Setaria glauca\nn12135049\tgreen bristlegrass, green foxtail, rough bristlegrass, bottle-grass, bottle grass, Setaria viridis\nn12135576\tSiberian millet, Setaria italica rubrofructa\nn12135729\tGerman millet, golden wonder millet, Setaria italica stramineofructa\nn12135898\tmillet\nn12136392\trattan, rattan cane\nn12136581\tmalacca\nn12136720\treed\nn12137120\tsorghum\nn12137569\tgrain sorghum\nn12137791\tdurra, doura, dourah, Egyptian corn, Indian millet, Guinea corn\nn12137954\tfeterita, federita, Sorghum vulgare caudatum\nn12138110\thegari\nn12138248\tkaoliang\nn12138444\tmilo, milo maize\nn12138578\tshallu, Sorghum vulgare rosburghii\nn12139196\tbroomcorn, Sorghum vulgare technicum\nn12139575\tcordgrass, cord grass\nn12139793\tsalt reed grass, Spartina cynosuroides\nn12139921\tprairie cordgrass, freshwater cordgrass, slough grass, Spartina pectinmata\nn12140511\tsmut grass, blackseed, carpet grass, Sporobolus poiretii\nn12140759\tsand dropseed, Sporobolus cryptandrus\nn12140903\trush grass, rush-grass\nn12141167\tSt. Augustine grass, Stenotaphrum secundatum, buffalo grass\nn12141385\tgrain\nn12141495\tcereal, cereal grass\nn12142085\twheat\nn12142357\twheat berry\nn12142450\tdurum, durum wheat, hard wheat, Triticum durum, Triticum turgidum, macaroni wheat\nn12143065\tspelt, Triticum spelta, Triticum aestivum spelta\nn12143215\temmer, starch wheat, two-grain spelt, Triticum dicoccum\nn12143405\twild wheat, wild emmer, Triticum dicoccum dicoccoides\nn12143676\tcorn, maize, Indian corn, Zea mays\nn12144313\tmealie\nn12144580\tcorn\nn12144987\tdent corn, Zea mays indentata\nn12145148\tflint corn, flint maize, Yankee corn, Zea mays indurata\nn12145477\tpopcorn, Zea mays everta\nn12146311\tzoysia\nn12146488\tManila grass, Japanese carpet grass, Zoysia matrella\nn12146654\tKorean lawn grass, Japanese lawn grass, Zoysia japonica\nn12147226\tbamboo\nn12147835\tcommon bamboo, Bambusa vulgaris\nn12148757\tgiant bamboo, kyo-chiku, Dendrocalamus giganteus\nn12150722\tumbrella plant, umbrella sedge, Cyperus alternifolius\nn12150969\tchufa, yellow nutgrass, earth almond, ground almond, rush nut, Cyperus esculentus\nn12151170\tgalingale, galangal, Cyperus longus\nn12151615\tnutgrass, nut grass, nutsedge, nut sedge, Cyperus rotundus\nn12152031\tsand sedge, sand reed, Carex arenaria\nn12152251\tcypress sedge, Carex pseudocyperus\nn12152532\tcotton grass, cotton rush\nn12152722\tcommon cotton grass, Eriophorum angustifolium\nn12153033\thardstem bulrush, hardstemmed bulrush, Scirpus acutus\nn12153224\twool grass, Scirpus cyperinus\nn12153580\tspike rush\nn12153741\twater chestnut, Chinese water chestnut, Eleocharis dulcis\nn12153914\tneedle spike rush, needle rush, slender spike rush, hair grass, Eleocharis acicularis\nn12154114\tcreeping spike rush, Eleocharis palustris\nn12154773\tpandanus, screw pine\nn12155009\ttextile screw pine, lauhala, Pandanus tectorius\nn12155583\tcattail\nn12155773\tcat's-tail, bullrush, bulrush, nailrod, reed mace, reedmace, Typha latifolia\nn12156679\tbur reed\nn12156819\tgrain, caryopsis\nn12157056\tkernel\nn12157179\trye\nn12157769\tgourd, gourd vine\nn12158031\tgourd\nn12158443\tpumpkin, pumpkin vine, autumn pumpkin, Cucurbita pepo\nn12158798\tsquash, squash vine\nn12159055\tsummer squash, summer squash vine, Cucurbita pepo melopepo\nn12159388\tyellow squash\nn12159555\tmarrow, marrow squash, vegetable marrow\nn12159804\tzucchini, courgette\nn12159942\tcocozelle, Italian vegetable marrow\nn12160125\tcymling, pattypan squash\nn12160303\tspaghetti squash\nn12160490\twinter squash, winter squash plant\nn12160857\tacorn squash\nn12161056\thubbard squash, Cucurbita maxima\nn12161285\tturban squash, Cucurbita maxima turbaniformis\nn12161577\tbuttercup squash\nn12161744\tbutternut squash, Cucurbita maxima\nn12161969\twinter crookneck, winter crookneck squash, Cucurbita moschata\nn12162181\tcushaw, Cucurbita mixta, Cucurbita argyrosperma\nn12162425\tprairie gourd, prairie gourd vine, Missouri gourd, wild pumpkin, buffalo gourd, calabazilla, Cucurbita foetidissima\nn12162758\tprairie gourd\nn12163035\tbryony, briony\nn12163279\twhite bryony, devil's turnip, Bryonia alba\nn12164363\tsweet melon, muskmelon, sweet melon vine, Cucumis melo\nn12164656\tcantaloupe, cantaloup, cantaloupe vine, cantaloup vine, Cucumis melo cantalupensis\nn12164881\twinter melon, Persian melon, honeydew melon, winter melon vine, Cucumis melo inodorus\nn12165170\tnet melon, netted melon, nutmeg melon, Cucumis melo reticulatus\nn12165384\tcucumber, cucumber vine, Cucumis sativus\nn12165758\tsquirting cucumber, exploding cucumber, touch-me-not, Ecballium elaterium\nn12166128\tbottle gourd, calabash, Lagenaria siceraria\nn12166424\tluffa, dishcloth gourd, sponge gourd, rag gourd, strainer vine\nn12166793\tloofah, vegetable sponge, Luffa cylindrica\nn12166929\tangled loofah, sing-kwa, Luffa acutangula\nn12167075\tloofa, loofah, luffa, loufah sponge\nn12167436\tbalsam apple, Momordica balsamina\nn12167602\tbalsam pear, Momordica charantia\nn12168565\tlobelia\nn12169099\twater lobelia, Lobelia dortmanna\nn12170585\tmallow\nn12171098\tmusk mallow, mus rose, Malva moschata\nn12171316\tcommon mallow, Malva neglecta\nn12171966\tokra, gumbo, okra plant, lady's-finger, Abelmoschus esculentus, Hibiscus esculentus\nn12172364\tokra\nn12172481\tabelmosk, musk mallow, Abelmoschus moschatus, Hibiscus moschatus\nn12172906\tflowering maple\nn12173069\tvelvetleaf, velvet-leaf, velvetweed, Indian mallow, butter-print, China jute, Abutilon theophrasti\nn12173664\thollyhock\nn12173912\trose mallow, Alcea rosea, Althea rosea\nn12174311\talthea, althaea, hollyhock\nn12174521\tmarsh mallow, white mallow, Althea officinalis\nn12174926\tpoppy mallow\nn12175181\tfringed poppy mallow, Callirhoe digitata\nn12175370\tpurple poppy mallow, Callirhoe involucrata\nn12175598\tclustered poppy mallow, Callirhoe triangulata\nn12176453\tsea island cotton, tree cotton, Gossypium barbadense\nn12176709\tLevant cotton, Gossypium herbaceum\nn12176953\tupland cotton, Gossypium hirsutum\nn12177129\tPeruvian cotton, Gossypium peruvianum\nn12177455\twild cotton, Arizona wild cotton, Gossypium thurberi\nn12178129\tkenaf, kanaf, deccan hemp, bimli, bimli hemp, Indian hemp, Bombay hemp, Hibiscus cannabinus\nn12178780\tsorrel tree, Hibiscus heterophyllus\nn12178896\trose mallow, swamp mallow, common rose mallow, swamp rose mallow, Hibiscus moscheutos\nn12179122\tcotton rose, Confederate rose, Confederate rose mallow, Hibiscus mutabilis\nn12179632\troselle, rozelle, sorrel, red sorrel, Jamaica sorrel, Hibiscus sabdariffa\nn12180168\tmahoe, majagua, mahagua, balibago, purau, Hibiscus tiliaceus\nn12180456\tflower-of-an-hour, flowers-of-an-hour, bladder ketmia, black-eyed Susan, Hibiscus trionum\nn12180885\tlacebark, ribbonwood, houhere, Hoheria populnea\nn12181352\twild hollyhock, Iliamna remota, Sphaeralcea remota\nn12181612\tmountain hollyhock, Iliamna ruvularis, Iliamna acerifolia\nn12182049\tseashore mallow\nn12182276\tsalt marsh mallow, Kosteletzya virginica\nn12183026\tchaparral mallow, Malacothamnus fasciculatus, Sphaeralcea fasciculata\nn12183452\tmalope, Malope trifida\nn12183816\tfalse mallow\nn12184095\twaxmallow, wax mallow, sleeping hibiscus\nn12184468\tglade mallow, Napaea dioica\nn12184912\tpavonia\nn12185254\tribbon tree, ribbonwood, Plagianthus regius, Plagianthus betulinus\nn12185859\tbush hibiscus, Radyera farragei, Hibiscus farragei\nn12186352\tVirginia mallow, Sida hermaphrodita\nn12186554\tQueensland hemp, jellyleaf, Sida rhombifolia\nn12186839\tIndian mallow, Sida spinosa\nn12187247\tcheckerbloom, wild hollyhock, Sidalcea malviflora\nn12187663\tglobe mallow, false mallow\nn12187891\tprairie mallow, red false mallow, Sphaeralcea coccinea, Malvastrum coccineum\nn12188289\ttulipwood tree\nn12188635\tportia tree, bendy tree, seaside mahoe, Thespesia populnea\nn12189429\tred silk-cotton tree, simal, Bombax ceiba, Bombax malabarica\nn12189779\tcream-of-tartar tree, sour gourd, Adansonia gregorii\nn12189987\tbaobab, monkey-bread tree, Adansonia digitata\nn12190410\tkapok, ceiba tree, silk-cotton tree, white silk-cotton tree, Bombay ceiba, God tree, Ceiba pentandra\nn12190869\tdurian, durion, durian tree, Durio zibethinus\nn12191240\tMontezuma\nn12192132\tshaving-brush tree, Pseudobombax ellipticum\nn12192877\tquandong, quandong tree, Brisbane quandong, silver quandong tree, blue fig, Elaeocarpus grandis\nn12193334\tquandong, blue fig\nn12193665\tmakomako, New Zealand wine berry, wineberry, Aristotelia serrata, Aristotelia racemosa\nn12194147\tJamaican cherry, calabur tree, calabura, silk wood, silkwood, Muntingia calabura\nn12194613\tbreakax, breakaxe, break-axe, Sloanea jamaicensis\nn12195391\tsterculia\nn12195533\tPanama tree, Sterculia apetala\nn12195734\tkalumpang, Java olives, Sterculia foetida\nn12196129\tbottle-tree, bottle tree\nn12196336\tflame tree, flame durrajong, Brachychiton acerifolius, Sterculia acerifolia\nn12196527\tflame tree, broad-leaved bottletree, Brachychiton australis\nn12196694\tkurrajong, currajong, Brachychiton populneus\nn12196954\tQueensland bottletree, narrow-leaved bottletree, Brachychiton rupestris, Sterculia rupestris\nn12197359\tkola, kola nut, kola nut tree, goora nut, Cola acuminata\nn12197601\tkola nut, cola nut\nn12198286\tChinese parasol tree, Chinese parasol, Japanese varnish tree, phoenix tree, Firmiana simplex\nn12198793\tflannelbush, flannel bush, California beauty\nn12199266\tscrew tree\nn12199399\tnut-leaved screw tree, Helicteres isora\nn12199790\tred beech, brown oak, booyong, crow's foot, stave wood, silky elm, Heritiera trifoliolata, Terrietia trifoliolata\nn12199982\tlooking glass tree, Heritiera macrophylla\nn12200143\tlooking-glass plant, Heritiera littoralis\nn12200504\thoney bell, honeybells, Hermannia verticillata, Mahernia verticillata\nn12200905\tmayeng, maple-leaved bayur, Pterospermum acerifolium\nn12201331\tsilver tree, Tarrietia argyrodendron\nn12201580\tcacao, cacao tree, chocolate tree, Theobroma cacao\nn12201938\tobeche, obechi, arere, samba, Triplochiton scleroxcylon\nn12202936\tlinden, linden tree, basswood, lime, lime tree\nn12203529\tAmerican basswood, American lime, Tilia americana\nn12203699\tsmall-leaved linden, small-leaved lime, Tilia cordata\nn12203896\twhite basswood, cottonwood, Tilia heterophylla\nn12204032\tJapanese linden, Japanese lime, Tilia japonica\nn12204175\tsilver lime, silver linden, Tilia tomentosa\nn12204730\tcorchorus\nn12205460\tAfrican hemp, Sparmannia africana\nn12205694\therb, herbaceous plant\nn12214789\tprotea\nn12215022\thoneypot, king protea, Protea cynaroides\nn12215210\thoneyflower, honey-flower, Protea mellifera\nn12215579\tbanksia\nn12215824\thoneysuckle, Australian honeysuckle, coast banksia, Banksia integrifolia\nn12216215\tsmoke bush\nn12216628\tChilean firebush, Chilean flameflower, Embothrium coccineum\nn12216968\tChilean nut, Chile nut, Chile hazel, Chilean hazelnut, Guevina heterophylla, Guevina avellana\nn12217453\tgrevillea\nn12217851\tred-flowered silky oak, Grevillea banksii\nn12218274\tsilky oak, Grevillea robusta\nn12218490\tbeefwood, Grevillea striata\nn12218868\tcushion flower, pincushion hakea, Hakea laurina\nn12219668\trewa-rewa, New Zealand honeysuckle\nn12220019\thoneyflower, honey-flower, mountain devil, Lambertia formosa\nn12220496\tsilver tree, Leucadendron argenteum\nn12220829\tlomatia\nn12221191\tmacadamia, macadamia tree\nn12221368\tMacadamia integrifolia\nn12221522\tmacadamia nut, macadamia nut tree, Macadamia ternifolia\nn12221801\tQueensland nut, Macadamia tetraphylla\nn12222090\tprickly ash, Orites excelsa\nn12222493\tgeebung\nn12222900\twheel tree, firewheel tree, Stenocarpus sinuatus\nn12223160\tscrub beefwood, beefwood, Stenocarpus salignus\nn12223569\twaratah, Telopea Oreades\nn12223764\twaratah, Telopea speciosissima\nn12224978\tcasuarina\nn12225222\tshe-oak\nn12225349\tbeefwood\nn12225563\tAustralian pine, Casuarina equisetfolia\nn12226932\theath\nn12227658\ttree heath, briar, brier, Erica arborea\nn12227909\tbriarroot\nn12228229\twinter heath, spring heath, Erica carnea\nn12228387\tbell heather, heather bell, fine-leaved heath, Erica cinerea\nn12228689\tCornish heath, Erica vagans\nn12228886\tSpanish heath, Portuguese heath, Erica lusitanica\nn12229111\tPrince-of-Wales'-heath, Prince of Wales heath, Erica perspicua\nn12229651\tbog rosemary, moorwort, Andromeda glaucophylla\nn12229887\tmarsh andromeda, common bog rosemary, Andromeda polifolia\nn12230540\tmadrona, madrono, manzanita, Arbutus menziesii\nn12230794\tstrawberry tree, Irish strawberry, Arbutus unedo\nn12231192\tbearberry\nn12231709\talpine bearberry, black bearberry, Arctostaphylos alpina\nn12232114\theartleaf manzanita, Arctostaphylos andersonii\nn12232280\tParry manzanita, Arctostaphylos manzanita\nn12232851\tspike heath, Bruckenthalia spiculifolia\nn12233249\tbryanthus\nn12234318\tleatherleaf, Chamaedaphne calyculata\nn12234669\tConnemara heath, St. Dabeoc's heath, Daboecia cantabrica\nn12235051\ttrailing arbutus, mayflower, Epigaea repens\nn12235479\tcreeping snowberry, moxie plum, maidenhair berry, Gaultheria hispidula\nn12236160\tsalal, shallon, Gaultheria shallon\nn12236546\thuckleberry\nn12236768\tblack huckleberry, Gaylussacia baccata\nn12236977\tdangleberry, dangle-berry, Gaylussacia frondosa\nn12237152\tbox huckleberry, Gaylussacia brachycera\nn12237486\tkalmia\nn12237641\tmountain laurel, wood laurel, American laurel, calico bush, Kalmia latifolia\nn12237855\tswamp laurel, bog laurel, bog kalmia, Kalmia polifolia\nn12238756\ttrapper's tea, glandular Labrador tea\nn12238913\twild rosemary, marsh tea, Ledum palustre\nn12239240\tsand myrtle, Leiophyllum buxifolium\nn12239647\tleucothoe\nn12239880\tdog laurel, dog hobble, switch-ivy, Leucothoe fontanesiana, Leucothoe editorum\nn12240150\tsweet bells, Leucothoe racemosa\nn12240477\talpine azalea, mountain azalea, Loiseleuria procumbens\nn12240965\tstaggerbush, stagger bush, Lyonia mariana\nn12241192\tmaleberry, male berry, privet andromeda, he-huckleberry, Lyonia ligustrina\nn12241426\tfetterbush, fetter bush, shiny lyonia, Lyonia lucida\nn12241880\tfalse azalea, fool's huckleberry, Menziesia ferruginea\nn12242123\tminniebush, minnie bush, Menziesia pilosa\nn12242409\tsorrel tree, sourwood, titi, Oxydendrum arboreum\nn12242850\tmountain heath, Phyllodoce caerulea, Bryanthus taxifolius\nn12243109\tpurple heather, Brewer's mountain heather, Phyllodoce breweri\nn12243693\tfetterbush, mountain fetterbush, mountain andromeda, Pieris floribunda\nn12244153\trhododendron\nn12244458\tcoast rhododendron, Rhododendron californicum\nn12244650\trosebay, Rhododendron maxima\nn12244819\tswamp azalea, swamp honeysuckle, white honeysuckle, Rhododendron viscosum\nn12245319\tazalea\nn12245695\tcranberry\nn12245885\tAmerican cranberry, large cranberry, Vaccinium macrocarpon\nn12246037\tEuropean cranberry, small cranberry, Vaccinium oxycoccus\nn12246232\tblueberry, blueberry bush\nn12246773\tfarkleberry, sparkleberry, Vaccinium arboreum\nn12246941\tlow-bush blueberry, low blueberry, Vaccinium angustifolium, Vaccinium pennsylvanicum\nn12247202\trabbiteye blueberry, rabbit-eye blueberry, rabbiteye, Vaccinium ashei\nn12247407\tdwarf bilberry, dwarf blueberry, Vaccinium caespitosum\nn12247963\tevergreen blueberry, Vaccinium myrsinites\nn12248141\tevergreen huckleberry, Vaccinium ovatum\nn12248359\tbilberry, thin-leaved bilberry, mountain blue berry, Viccinium membranaceum\nn12248574\tbilberry, whortleberry, whinberry, blaeberry, Viccinium myrtillus\nn12248780\tbog bilberry, bog whortleberry, moor berry, Vaccinium uliginosum alpinum\nn12248941\tdryland blueberry, dryland berry, Vaccinium pallidum\nn12249122\tgrouseberry, grouse-berry, grouse whortleberry, Vaccinium scoparium\nn12249294\tdeerberry, squaw huckleberry, Vaccinium stamineum\nn12249542\tcowberry, mountain cranberry, lingonberry, lingenberry, lingberry, foxberry, Vaccinium vitis-idaea\nn12251001\tdiapensia\nn12251278\tgalax, galaxy, wandflower, beetleweed, coltsfoot, Galax urceolata\nn12251740\tpyxie, pixie, pixy, Pyxidanthera barbulata\nn12252168\tshortia\nn12252383\toconee bells, Shortia galacifolia\nn12252866\tAustralian heath\nn12253229\tepacris\nn12253487\tcommon heath, Epacris impressa\nn12253664\tcommon heath, blunt-leaf heath, Epacris obtusifolia\nn12253835\tPort Jackson heath, Epacris purpurascens\nn12254168\tnative cranberry, groundberry, ground-berry, cranberry heath, Astroloma humifusum, Styphelia humifusum\nn12255225\tpink fivecorner, Styphelia triflora\nn12256112\twintergreen, pyrola\nn12256325\tfalse wintergreen, Pyrola americana, Pyrola rotundifolia americana\nn12256522\tlesser wintergreen, Pyrola minor\nn12256708\twild lily of the valley, shinleaf, Pyrola elliptica\nn12256920\twild lily of the valley, Pyrola rotundifolia\nn12257570\tpipsissewa, prince's pine\nn12257725\tlove-in-winter, western prince's pine, Chimaphila umbellata, Chimaphila corymbosa\nn12258101\tone-flowered wintergreen, one-flowered pyrola, Moneses uniflora, Pyrola uniflora\nn12258885\tIndian pipe, waxflower, Monotropa uniflora\nn12259316\tpinesap, false beachdrops, Monotropa hypopithys\nn12260799\tbeech, beech tree\nn12261359\tcommon beech, European beech, Fagus sylvatica\nn12261571\tcopper beech, purple beech, Fagus sylvatica atropunicea, Fagus purpurea, Fagus sylvatica purpurea\nn12261808\tAmerican beech, white beech, red beech, Fagus grandifolia, Fagus americana\nn12262018\tweeping beech, Fagus pendula, Fagus sylvatica pendula\nn12262185\tJapanese beech\nn12262553\tchestnut, chestnut tree\nn12263038\tAmerican chestnut, American sweet chestnut, Castanea dentata\nn12263204\tEuropean chestnut, sweet chestnut, Spanish chestnut, Castanea sativa\nn12263410\tChinese chestnut, Castanea mollissima\nn12263588\tJapanese chestnut, Castanea crenata\nn12263738\tAllegheny chinkapin, eastern chinquapin, chinquapin, dwarf chestnut, Castanea pumila\nn12263987\tOzark chinkapin, Ozark chinquapin, chinquapin, Castanea ozarkensis\nn12264512\toak chestnut\nn12264786\tgiant chinkapin, golden chinkapin, Chrysolepis chrysophylla, Castanea chrysophylla, Castanopsis chrysophylla\nn12265083\tdwarf golden chinkapin, Chrysolepis sempervirens\nn12265394\ttanbark oak, Lithocarpus densiflorus\nn12265600\tJapanese oak, Lithocarpus glabra, Lithocarpus glaber\nn12266217\tsouthern beech, evergreen beech\nn12266528\tmyrtle beech, Nothofagus cuninghamii\nn12266644\tCoigue, Nothofagus dombeyi\nn12266796\tNew Zealand beech\nn12266984\tsilver beech, Nothofagus menziesii\nn12267133\troble beech, Nothofagus obliqua\nn12267265\trauli beech, Nothofagus procera\nn12267411\tblack beech, Nothofagus solanderi\nn12267534\thard beech, Nothofagus truncata\nn12267677\tacorn\nn12267931\tcupule, acorn cup\nn12268246\toak, oak tree\nn12269241\tlive oak\nn12269406\tcoast live oak, California live oak, Quercus agrifolia\nn12269652\twhite oak\nn12270027\tAmerican white oak, Quercus alba\nn12270278\tArizona white oak, Quercus arizonica\nn12270460\tswamp white oak, swamp oak, Quercus bicolor\nn12270741\tEuropean turkey oak, turkey oak, Quercus cerris\nn12270946\tcanyon oak, canyon live oak, maul oak, iron oak, Quercus chrysolepis\nn12271187\tscarlet oak, Quercus coccinea\nn12271451\tjack oak, northern pin oak, Quercus ellipsoidalis\nn12271643\tred oak\nn12271933\tsouthern red oak, swamp red oak, turkey oak, Quercus falcata\nn12272239\tOregon white oak, Oregon oak, Garry oak, Quercus garryana\nn12272432\tholm oak, holm tree, holly-leaved oak, evergreen oak, Quercus ilex\nn12272735\tbear oak, Quercus ilicifolia\nn12272883\tshingle oak, laurel oak, Quercus imbricaria\nn12273114\tbluejack oak, turkey oak, Quercus incana\nn12273344\tCalifornia black oak, Quercus kelloggii\nn12273515\tAmerican turkey oak, turkey oak, Quercus laevis\nn12273768\tlaurel oak, pin oak, Quercus laurifolia\nn12273939\tCalifornia white oak, valley oak, valley white oak, roble, Quercus lobata\nn12274151\tovercup oak, Quercus lyrata\nn12274358\tbur oak, burr oak, mossy-cup oak, mossycup oak, Quercus macrocarpa\nn12274630\tscrub oak\nn12274863\tblackjack oak, blackjack, jack oak, Quercus marilandica\nn12275131\tswamp chestnut oak, Quercus michauxii\nn12275317\tJapanese oak, Quercus mongolica, Quercus grosseserrata\nn12275489\tchestnut oak\nn12275675\tchinquapin oak, chinkapin oak, yellow chestnut oak, Quercus muehlenbergii\nn12275888\tmyrtle oak, seaside scrub oak, Quercus myrtifolia\nn12276110\twater oak, possum oak, Quercus nigra\nn12276314\tNuttall oak, Nuttall's oak, Quercus nuttalli\nn12276477\tdurmast, Quercus petraea, Quercus sessiliflora\nn12276628\tbasket oak, cow oak, Quercus prinus, Quercus montana\nn12276872\tpin oak, swamp oak, Quercus palustris\nn12277150\twillow oak, Quercus phellos\nn12277334\tdwarf chinkapin oak, dwarf chinquapin oak, dwarf oak, Quercus prinoides\nn12277578\tcommon oak, English oak, pedunculate oak, Quercus robur\nn12277800\tnorthern red oak, Quercus rubra, Quercus borealis\nn12278107\tShumard oak, Shumard red oak, Quercus shumardii\nn12278371\tpost oak, box white oak, brash oak, iron oak, Quercus stellata\nn12278650\tcork oak, Quercus suber\nn12278865\tSpanish oak, Quercus texana\nn12279060\thuckleberry oak, Quercus vaccinifolia\nn12279293\tChinese cork oak, Quercus variabilis\nn12279458\tblack oak, yellow oak, quercitron, quercitron oak, Quercus velutina\nn12279772\tsouthern live oak, Quercus virginiana\nn12280060\tinterior live oak, Quercus wislizenii, Quercus wizlizenii\nn12280364\tmast\nn12281241\tbirch, birch tree\nn12281788\tyellow birch, Betula alleghaniensis, Betula leutea\nn12281974\tAmerican white birch, paper birch, paperbark birch, canoe birch, Betula cordifolia, Betula papyrifera\nn12282235\tgrey birch, gray birch, American grey birch, American gray birch, Betula populifolia\nn12282527\tsilver birch, common birch, European white birch, Betula pendula\nn12282737\tdowny birch, white birch, Betula pubescens\nn12282933\tblack birch, river birch, red birch, Betula nigra\nn12283147\tsweet birch, cherry birch, black birch, Betula lenta\nn12283395\tYukon white birch, Betula neoalaskana\nn12283542\tswamp birch, water birch, mountain birch, Western paper birch, Western birch, Betula fontinalis\nn12283790\tNewfoundland dwarf birch, American dwarf birch, Betula glandulosa\nn12284262\talder, alder tree\nn12284821\tcommon alder, European black alder, Alnus glutinosa, Alnus vulgaris\nn12285049\tgrey alder, gray alder, Alnus incana\nn12285195\tseaside alder, Alnus maritima\nn12285369\twhite alder, mountain alder, Alnus rhombifolia\nn12285512\tred alder, Oregon alder, Alnus rubra\nn12285705\tspeckled alder, Alnus rugosa\nn12285900\tsmooth alder, hazel alder, Alnus serrulata\nn12286068\tgreen alder, Alnus veridis\nn12286197\tgreen alder, Alnus veridis crispa, Alnus crispa\nn12286826\thornbeam\nn12286988\tEuropean hornbeam, Carpinus betulus\nn12287195\tAmerican hornbeam, Carpinus caroliniana\nn12287642\thop hornbeam\nn12287836\tOld World hop hornbeam, Ostrya carpinifolia\nn12288005\tEastern hop hornbeam, ironwood, ironwood tree, Ostrya virginiana\nn12288823\thazelnut, hazel, hazelnut tree\nn12289310\tAmerican hazel, Corylus americana\nn12289433\tcobnut, filbert, Corylus avellana, Corylus avellana grandis\nn12289585\tbeaked hazelnut, Corylus cornuta\nn12290748\tcentaury\nn12290975\trosita, Centaurium calycosum\nn12291143\tlesser centaury, Centaurium minus\nn12291459\tseaside centaury\nn12291671\tslender centaury\nn12291959\tprairie gentian, tulip gentian, bluebell, Eustoma grandiflorum\nn12292463\tPersian violet, Exacum affine\nn12292877\tcolumbo, American columbo, deer's-ear, deer's-ears, pyramid plant, American gentian\nn12293723\tgentian\nn12294124\tgentianella, Gentiana acaulis\nn12294331\tclosed gentian, blind gentian, bottle gentian, Gentiana andrewsii\nn12294542\texplorer's gentian, Gentiana calycosa\nn12294723\tclosed gentian, blind gentian, Gentiana clausa\nn12294871\tgreat yellow gentian, Gentiana lutea\nn12295033\tmarsh gentian, calathian violet, Gentiana pneumonanthe\nn12295237\tsoapwort gentian, Gentiana saponaria\nn12295429\tstriped gentian, Gentiana villosa\nn12295796\tagueweed, ague weed, five-flowered gentian, stiff gentian, Gentianella quinquefolia, Gentiana quinquefolia\nn12296045\tfelwort, gentianella amarella\nn12296432\tfringed gentian\nn12296735\tGentianopsis crinita, Gentiana crinita\nn12296929\tGentianopsis detonsa, Gentiana detonsa\nn12297110\tGentianopsid procera, Gentiana procera\nn12297280\tGentianopsis thermalis, Gentiana thermalis\nn12297507\ttufted gentian, Gentianopsis holopetala, Gentiana holopetala\nn12297846\tspurred gentian\nn12298165\tsabbatia\nn12299640\ttoothbrush tree, mustard tree, Salvadora persica\nn12300840\tolive tree\nn12301180\tolive, European olive tree, Olea europaea\nn12301445\tolive\nn12301613\tblack maire, Olea cunninghamii\nn12301766\twhite maire, Olea lanceolata\nn12302071\tfringe tree\nn12302248\tfringe bush, Chionanthus virginicus\nn12302565\tforestiera\nn12303083\tforsythia\nn12303462\tash, ash tree\nn12304115\twhite ash, Fraxinus Americana\nn12304286\tswamp ash, Fraxinus caroliniana\nn12304420\tflowering ash, Fraxinus cuspidata\nn12304703\tEuropean ash, common European ash, Fraxinus excelsior\nn12304899\tOregon ash, Fraxinus latifolia, Fraxinus oregona\nn12305089\tblack ash, basket ash, brown ash, hoop ash, Fraxinus nigra\nn12305293\tmanna ash, flowering ash, Fraxinus ornus\nn12305475\tred ash, downy ash, Fraxinus pennsylvanica\nn12305654\tgreen ash, Fraxinus pennsylvanica subintegerrima\nn12305819\tblue ash, Fraxinus quadrangulata\nn12305986\tmountain ash, Fraxinus texensis\nn12306089\tpumpkin ash, Fraxinus tomentosa\nn12306270\tArizona ash, Fraxinus velutina\nn12306717\tjasmine\nn12306938\tprimrose jasmine, Jasminum mesnyi\nn12307076\twinter jasmine, Jasminum nudiflorum\nn12307240\tcommon jasmine, true jasmine, jessamine, Jasminum officinale\nn12307756\tprivet\nn12308112\tAmur privet, Ligustrum amurense\nn12308447\tJapanese privet, Ligustrum japonicum\nn12308907\tLigustrum obtusifolium\nn12309277\tcommon privet, Ligustrum vulgare\nn12309630\tdevilwood, American olive, Osmanthus americanus\nn12310021\tmock privet\nn12310349\tlilac\nn12310638\tHimalayan lilac, Syringa emodi\nn12311045\tPersian lilac, Syringa persica\nn12311224\tJapanese tree lilac, Syringa reticulata, Syringa amurensis japonica\nn12311413\tJapanese lilac, Syringa villosa\nn12311579\tcommon lilac, Syringa vulgaris\nn12312110\tbloodwort\nn12312728\tkangaroo paw, kangaroo's paw, kangaroo's-foot, kangaroo-foot plant, Australian sword lily, Anigozanthus manglesii\nn12315060\tVirginian witch hazel, Hamamelis virginiana\nn12315245\tvernal witch hazel, Hamamelis vernalis\nn12315598\twinter hazel, flowering hazel\nn12315999\tfothergilla, witch alder\nn12316444\tliquidambar\nn12316572\tsweet gum, sweet gum tree, bilsted, red gum, American sweet gum, Liquidambar styraciflua\nn12317296\tiron tree, iron-tree, ironwood, ironwood tree\nn12318378\twalnut, walnut tree\nn12318782\tCalifornia black walnut, Juglans californica\nn12318965\tbutternut, butternut tree, white walnut, Juglans cinerea\nn12319204\tblack walnut, black walnut tree, black hickory, Juglans nigra\nn12319414\tEnglish walnut, English walnut tree, Circassian walnut, Persian walnut, Juglans regia\nn12320010\thickory, hickory tree\nn12320414\twater hickory, bitter pecan, water bitternut, Carya aquatica\nn12320627\tpignut, pignut hickory, brown hickory, black hickory, Carya glabra\nn12320806\tbitternut, bitternut hickory, bitter hickory, bitter pignut, swamp hickory, Carya cordiformis\nn12321077\tpecan, pecan tree, Carya illinoensis, Carya illinoinsis\nn12321395\tbig shellbark, big shellbark hickory, big shagbark, king nut, king nut hickory, Carya laciniosa\nn12321669\tnutmeg hickory, Carya myristicaeformis, Carya myristiciformis\nn12321873\tshagbark, shagbark hickory, shellbark, shellbark hickory, Carya ovata\nn12322099\tmockernut, mockernut hickory, black hickory, white-heart hickory, big-bud hickory, Carya tomentosa\nn12322501\twing nut, wing-nut\nn12322699\tCaucasian walnut, Pterocarya fraxinifolia\nn12323665\tdhawa, dhava\nn12324056\tcombretum\nn12324222\thiccup nut, hiccough nut, Combretum bracteosum\nn12324388\tbush willow, Combretum appiculatum\nn12324558\tbush willow, Combretum erythrophyllum\nn12324906\tbutton tree, button mangrove, Conocarpus erectus\nn12325234\twhite mangrove, Laguncularia racemosa\nn12325787\toleaster\nn12327022\twater milfoil\nn12327528\tanchovy pear, anchovy pear tree, Grias cauliflora\nn12327846\tbrazil nut, brazil-nut tree, Bertholletia excelsa\nn12328398\tloosestrife\nn12328567\tpurple loosestrife, spiked loosestrife, Lythrum salicaria\nn12328801\tgrass poly, hyssop loosestrife, Lythrum hyssopifolia\nn12329260\tcrape myrtle, crepe myrtle, crepe flower, Lagerstroemia indica\nn12329473\tQueen's crape myrtle, pride-of-India, Lagerstroemia speciosa\nn12330239\tmyrtaceous tree\nn12330469\tmyrtle\nn12330587\tcommon myrtle, Myrtus communis\nn12330891\tbayberry, bay-rum tree, Jamaica bayberry, wild cinnamon, Pimenta acris\nn12331066\tallspice, allspice tree, pimento tree, Pimenta dioica\nn12331263\tallspice tree, Pimenta officinalis\nn12331655\tsour cherry, Eugenia corynantha\nn12331788\tnakedwood, Eugenia dicrana\nn12332030\tSurinam cherry, pitanga, Eugenia uniflora\nn12332218\trose apple, rose-apple tree, jambosa, Eugenia jambos\nn12332555\tfeijoa, feijoa bush\nn12333053\tjaboticaba, jaboticaba tree, Myrciaria cauliflora\nn12333530\tguava, true guava, guava bush, Psidium guajava\nn12333771\tguava, strawberry guava, yellow cattley guava, Psidium littorale\nn12333961\tcattley guava, purple strawberry guava, Psidium cattleianum, Psidium littorale longipes\nn12334153\tBrazilian guava, Psidium guineense\nn12334293\tgum tree, gum\nn12334891\teucalyptus, eucalypt, eucalyptus tree\nn12335483\tflooded gum\nn12335664\tmallee\nn12335800\tstringybark\nn12335937\tsmoothbark\nn12336092\tred gum, peppermint, peppermint gum, Eucalyptus amygdalina\nn12336224\tred gum, marri, Eucalyptus calophylla\nn12336333\triver red gum, river gum, Eucalyptus camaldulensis, Eucalyptus rostrata\nn12336586\tmountain swamp gum, Eucalyptus camphora\nn12336727\tsnow gum, ghost gum, white ash, Eucalyptus coriacea, Eucalyptus pauciflora\nn12336973\talpine ash, mountain oak, Eucalyptus delegatensis\nn12337131\twhite mallee, congoo mallee, Eucalyptus dumosa\nn12337246\twhite stringybark, thin-leaved stringybark, Eucalyptusd eugenioides\nn12337391\twhite mountain ash, Eucalyptus fraxinoides\nn12337617\tblue gum, fever tree, Eucalyptus globulus\nn12337800\trose gum, Eucalypt grandis\nn12337922\tcider gum, Eucalypt gunnii\nn12338034\tswamp gum, Eucalypt ovata\nn12338146\tspotted gum, Eucalyptus maculata\nn12338258\tlemon-scented gum, Eucalyptus citriodora, Eucalyptus maculata citriodora\nn12338454\tblack mallee, black sally, black gum, Eucalytus stellulata\nn12338655\tforest red gum, Eucalypt tereticornis\nn12338796\tmountain ash, Eucalyptus regnans\nn12338979\tmanna gum, Eucalyptus viminalis\nn12339526\tclove, clove tree, Syzygium aromaticum, Eugenia aromaticum, Eugenia caryophyllatum\nn12339831\tclove\nn12340383\ttupelo, tupelo tree\nn12340581\twater gum, Nyssa aquatica\nn12340755\tsour gum, black gum, pepperidge, Nyssa sylvatica\nn12341542\tenchanter's nightshade\nn12341931\tCircaea lutetiana\nn12342299\twillowherb\nn12342498\tfireweed, giant willowherb, rosebay willowherb, wickup, Epilobium angustifolium\nn12342852\tCalifornia fuchsia, humming bird's trumpet, Epilobium canum canum, Zauschneria californica\nn12343480\tfuchsia\nn12343753\tlady's-eardrop, ladies'-eardrop, lady's-eardrops, ladies'-eardrops, Fuchsia coccinea\nn12344283\tevening primrose\nn12344483\tcommon evening primrose, German rampion, Oenothera biennis\nn12344700\tsundrops, Oenothera fruticosa\nn12344837\tMissouri primrose, Ozark sundrops, Oenothera macrocarpa\nn12345280\tpomegranate, pomegranate tree, Punica granatum\nn12345899\tmangrove, Rhizophora mangle\nn12346578\tdaphne\nn12346813\tgarland flower, Daphne cneorum\nn12346986\tspurge laurel, wood laurel, Daphne laureola\nn12347158\tmezereon, February daphne, Daphne mezereum\nn12349315\tIndian rhododendron, Melastoma malabathricum\nn12349711\tMedinilla magnifica\nn12350032\tdeer grass, meadow beauty\nn12350758\tcanna\nn12351091\tachira, indian shot, arrowroot, Canna indica, Canna edulis\nn12351790\tarrowroot, American arrowroot, obedience plant, Maranta arundinaceae\nn12352287\tbanana, banana tree\nn12352639\tdwarf banana, Musa acuminata\nn12352844\tJapanese banana, Musa basjoo\nn12352990\tplantain, plantain tree, Musa paradisiaca\nn12353203\tedible banana, Musa paradisiaca sapientum\nn12353431\tabaca, Manila hemp, Musa textilis\nn12353754\tAbyssinian banana, Ethiopian banana, Ensete ventricosum, Musa ensete\nn12355760\tginger\nn12356023\tcommon ginger, Canton ginger, stem ginger, Zingiber officinale\nn12356395\tturmeric, Curcuma longa, Curcuma domestica\nn12356960\tgalangal, Alpinia galanga\nn12357485\tshellflower, shall-flower, shell ginger, Alpinia Zerumbet, Alpinia speciosa, Languas speciosa\nn12357968\tgrains of paradise, Guinea grains, Guinea pepper, melagueta pepper, Aframomum melegueta\nn12358293\tcardamom, cardamon, Elettaria cardamomum\nn12360108\tbegonia\nn12360534\tfibrous-rooted begonia\nn12360684\ttuberous begonia\nn12360817\trhizomatous begonia\nn12360958\tChristmas begonia, blooming-fool begonia, Begonia cheimantha\nn12361135\tangel-wing begonia, Begonia cocchinea\nn12361560\tbeefsteak begonia, kidney begonia, Begonia erythrophylla, Begonia feastii\nn12361754\tstar begonia, star-leaf begonia, Begonia heracleifolia\nn12361946\trex begonia, king begonia, painted-leaf begonia, beefsteak geranium, Begonia rex\nn12362274\twax begonia, Begonia semperflorens\nn12362514\tSocotra begonia, Begonia socotrana\nn12362668\thybrid tuberous begonia, Begonia tuberhybrida\nn12363301\tdillenia\nn12363768\tguinea gold vine, guinea flower\nn12364604\tpoon\nn12364940\tcalaba, Santa Maria tree, Calophyllum calaba\nn12365158\tMaria, Calophyllum longifolium\nn12365285\tlaurelwood, lancewood tree, Calophyllum candidissimum\nn12365462\tAlexandrian laurel, Calophyllum inophyllum\nn12365900\tclusia\nn12366053\twild fig, Clusia flava\nn12366186\twaxflower, Clusia insignis\nn12366313\tpitch apple, strangler fig, Clusia rosea, Clusia major\nn12366675\tmangosteen, mangosteen tree, Garcinia mangostana\nn12366870\tgamboge tree, Garcinia hanburyi, Garcinia cambogia, Garcinia gummi-gutta\nn12367611\tSt John's wort\nn12368028\tcommon St John's wort, tutsan, Hypericum androsaemum\nn12368257\tgreat St John's wort, Hypericum ascyron, Hypericum pyramidatum\nn12368451\tcreeping St John's wort, Hypericum calycinum\nn12369066\tlow St Andrew's cross, Hypericum hypericoides\nn12369309\tklammath weed, Hypericum perforatum\nn12369476\tshrubby St John's wort, Hypericum prolificum, Hypericum spathulatum\nn12369665\tSt Peter's wort, Hypericum tetrapterum, Hypericum maculatum\nn12369845\tmarsh St-John's wort, Hypericum virginianum\nn12370174\tmammee apple, mammee, mamey, mammee tree, Mammea americana\nn12370549\trose chestnut, ironwood, ironwood tree, Mesua ferrea\nn12371202\tbower actinidia, tara vine, Actinidia arguta\nn12371439\tChinese gooseberry, kiwi, kiwi vine, Actinidia chinensis, Actinidia deliciosa\nn12371704\tsilvervine, silver vine, Actinidia polygama\nn12372233\twild cinnamon, white cinnamon tree, Canella winterana, Canella-alba\nn12373100\tpapaya, papaia, pawpaw, papaya tree, melon tree, Carica papaya\nn12373739\tsouari, souari nut, souari tree, Caryocar nuciferum\nn12374418\trockrose, rock rose\nn12374705\twhite-leaved rockrose, Cistus albidus\nn12374862\tcommon gum cistus, Cistus ladanifer, Cistus ladanum\nn12375769\tfrostweed, frost-weed, frostwort, Helianthemum canadense, Crocanthemum canadense\nn12377198\tdipterocarp\nn12377494\tred lauan, red lauan tree, Shorea teysmanniana\nn12378249\tgovernor's plum, governor plum, Madagascar plum, ramontchi, batoko palm, Flacourtia indica\nn12378753\tkei apple, kei apple bush, Dovyalis caffra\nn12378963\tketembilla, kitembilla, kitambilla, ketembilla tree, Ceylon gooseberry, Dovyalis hebecarpa\nn12379531\tchaulmoogra, chaulmoogra tree, chaulmugra, Hydnocarpus kurzii, Taraktagenos kurzii, Taraktogenos kurzii\nn12380761\twild peach, Kiggelaria africana\nn12381511\tcandlewood\nn12382233\tboojum tree, cirio, Fouquieria columnaris, Idria columnaris\nn12382875\tbird's-eye bush, Ochna serrulata\nn12383737\tgranadilla, purple granadillo, Passiflora edulis\nn12383894\tgranadilla, sweet granadilla, Passiflora ligularis\nn12384037\tgranadilla, giant granadilla, Passiflora quadrangularis\nn12384227\tmaypop, Passiflora incarnata\nn12384375\tJamaica honeysuckle, yellow granadilla, Passiflora laurifolia\nn12384569\tbanana passion fruit, Passiflora mollissima\nn12384680\tsweet calabash, Passiflora maliformis\nn12384839\tlove-in-a-mist, running pop, wild water lemon, Passiflora foetida\nn12385429\treseda\nn12385566\tmignonette, sweet reseda, Reseda odorata\nn12385830\tdyer's rocket, dyer's mignonette, weld, Reseda luteola\nn12386945\tfalse tamarisk, German tamarisk, Myricaria germanica\nn12387103\thalophyte\nn12387633\tviola\nn12387839\tviolet\nn12388143\tfield pansy, heartsease, Viola arvensis\nn12388293\tAmerican dog violet, Viola conspersa\nn12388858\tdog violet, heath violet, Viola canina\nn12388989\thorned violet, tufted pansy, Viola cornuta\nn12389130\ttwo-eyed violet, heartsease, Viola ocellata\nn12389501\tbird's-foot violet, pansy violet, Johnny-jump-up, wood violet, Viola pedata\nn12389727\tdowny yellow violet, Viola pubescens\nn12389932\tlong-spurred violet, Viola rostrata\nn12390099\tpale violet, striped violet, cream violet, Viola striata\nn12390314\thedge violet, wood violet, Viola sylvatica, Viola reichenbachiana\nn12392070\tnettle\nn12392549\tstinging nettle, Urtica dioica\nn12392765\tRoman nettle, Urtica pipulifera\nn12393269\tramie, ramee, Chinese silk plant, China grass, Boehmeria nivea\nn12394118\twood nettle, Laportea canadensis\nn12394328\tAustralian nettle, Australian nettle tree\nn12394638\tpellitory-of-the-wall, wall pellitory, pellitory, Parietaria difussa\nn12395068\trichweed, clearweed, dead nettle, Pilea pumilla\nn12395289\tartillery plant, Pilea microphylla\nn12395463\tfriendship plant, panamica, panamiga, Pilea involucrata\nn12395906\tQueensland grass-cloth plant, Pipturus argenteus\nn12396091\tPipturus albidus\nn12396924\tcannabis, hemp\nn12397431\tIndian hemp, Cannabis indica\nn12399132\tmulberry, mulberry tree\nn12399384\twhite mulberry, Morus alba\nn12399534\tblack mulberry, Morus nigra\nn12399656\tred mulberry, Morus rubra\nn12399899\tosage orange, bow wood, mock orange, Maclura pomifera\nn12400489\tbreadfruit, breadfruit tree, Artocarpus communis, Artocarpus altilis\nn12400720\tjackfruit, jackfruit tree, Artocarpus heterophyllus\nn12400924\tmarang, marang tree, Artocarpus odoratissima\nn12401335\tfig tree\nn12401684\tfig, common fig, common fig tree, Ficus carica\nn12401893\tcaprifig, Ficus carica sylvestris\nn12402051\tgolden fig, Florida strangler fig, strangler fig, wild fig, Ficus aurea\nn12402348\tbanyan, banyan tree, banian, banian tree, Indian banyan, East Indian fig tree, Ficus bengalensis\nn12402596\tpipal, pipal tree, pipul, peepul, sacred fig, bo tree, Ficus religiosa\nn12402840\tIndia-rubber tree, India-rubber plant, India-rubber fig, rubber plant, Assam rubber, Ficus elastica\nn12403075\tmistletoe fig, mistletoe rubber plant, Ficus diversifolia, Ficus deltoidea\nn12403276\tPort Jackson fig, rusty rig, little-leaf fig, Botany Bay fig, Ficus rubiginosa\nn12403513\tsycamore, sycamore fig, mulberry fig, Ficus sycomorus\nn12403994\tpaper mulberry, Broussonetia papyrifera\nn12404729\ttrumpetwood, trumpet-wood, trumpet tree, snake wood, imbauba, Cecropia peltata\nn12405714\telm, elm tree\nn12406304\twinged elm, wing elm, Ulmus alata\nn12406488\tAmerican elm, white elm, water elm, rock elm, Ulmus americana\nn12406715\tsmooth-leaved elm, European field elm, Ulmus carpinifolia\nn12406902\tcedar elm, Ulmus crassifolia\nn12407079\twitch elm, wych elm, Ulmus glabra\nn12407222\tDutch elm, Ulmus hollandica\nn12407396\tHuntingdon elm, Ulmus hollandica vegetata\nn12407545\twater elm, Ulmus laevis\nn12407715\tChinese elm, Ulmus parvifolia\nn12407890\tEnglish elm, European elm, Ulmus procera\nn12408077\tSiberian elm, Chinese elm, dwarf elm, Ulmus pumila\nn12408280\tslippery elm, red elm, Ulmus rubra\nn12408466\tJersey elm, guernsey elm, wheately elm, Ulmus sarniensis, Ulmus campestris sarniensis, Ulmus campestris wheatleyi\nn12408717\tSeptember elm, red elm, Ulmus serotina\nn12408873\trock elm, Ulmus thomasii\nn12409231\thackberry, nettle tree\nn12409470\tEuropean hackberry, Mediterranean hackberry, Celtis australis\nn12409651\tAmerican hackberry, Celtis occidentalis\nn12409840\tsugarberry, Celtis laevigata\nn12411461\tiridaceous plant\nn12412355\tbearded iris\nn12412606\tbeardless iris\nn12412987\torrisroot, orris\nn12413165\tdwarf iris, Iris cristata\nn12413301\tDutch iris, Iris filifolia\nn12413419\tFlorentine iris, orris, Iris germanica florentina, Iris florentina\nn12413642\tstinking iris, gladdon, gladdon iris, stinking gladwyn, roast beef plant, Iris foetidissima\nn12413880\tGerman iris, Iris germanica\nn12414035\tJapanese iris, Iris kaempferi\nn12414159\tGerman iris, Iris kochii\nn12414329\tDalmatian iris, Iris pallida\nn12414449\tPersian iris, Iris persica\nn12414818\tDutch iris, Iris tingitana\nn12414932\tdwarf iris, vernal iris, Iris verna\nn12415595\tSpanish iris, xiphium iris, Iris xiphium\nn12416073\tblackberry-lily, leopard lily, Belamcanda chinensis\nn12416423\tcrocus\nn12416703\tsaffron, saffron crocus, Crocus sativus\nn12417836\tcorn lily\nn12418221\tblue-eyed grass\nn12418507\twandflower, Sparaxis tricolor\nn12419037\tamaryllis\nn12419878\tsalsilla, Bomarea edulis\nn12420124\tsalsilla, Bomarea salsilla\nn12420535\tblood lily\nn12420722\tCape tulip, Haemanthus coccineus\nn12421137\thippeastrum, Hippeastrum puniceum\nn12421467\tnarcissus\nn12421683\tdaffodil, Narcissus pseudonarcissus\nn12421917\tjonquil, Narcissus jonquilla\nn12422129\tjonquil\nn12422559\tJacobean lily, Aztec lily, Strekelia formosissima\nn12425281\tliliaceous plant\nn12426623\tmountain lily, Lilium auratum\nn12426749\tCanada lily, wild yellow lily, meadow lily, wild meadow lily, Lilium canadense\nn12427184\ttiger lily, leopard lily, pine lily, Lilium catesbaei\nn12427391\tColumbia tiger lily, Oregon lily, Lilium columbianum\nn12427566\ttiger lily, devil lily, kentan, Lilium lancifolium\nn12427757\tEaster lily, Bermuda lily, white trumpet lily, Lilium longiflorum\nn12427946\tcoast lily, Lilium maritinum\nn12428076\tTurk's-cap, martagon, Lilium martagon\nn12428242\tMichigan lily, Lilium michiganense\nn12428412\tleopard lily, panther lily, Lilium pardalinum\nn12428747\tTurk's-cap, Turk's cap-lily, Lilium superbum\nn12429352\tAfrican lily, African tulip, blue African lily, Agapanthus africanus\nn12430198\tcolicroot, colic root, crow corn, star grass, unicorn root\nn12430471\tague root, ague grass, Aletris farinosa\nn12430675\tyellow colicroot, Aletris aurea\nn12431434\talliaceous plant\nn12432069\tHooker's onion, Allium acuminatum\nn12432356\twild leek, Levant garlic, kurrat, Allium ampeloprasum\nn12432574\tCanada garlic, meadow leek, rose leek, Allium canadense\nn12432707\tkeeled garlic, Allium carinatum\nn12433081\tonion\nn12433178\tshallot, eschalot, multiplier onion, Allium cepa aggregatum, Allium ascalonicum\nn12433769\tnodding onion, nodding wild onion, lady's leek, Allium cernuum\nn12433952\tWelsh onion, Japanese leek, Allium fistulosum\nn12434106\tred-skinned onion, Allium haematochiton\nn12434483\tdaffodil garlic, flowering onion, Naples garlic, Allium neopolitanum\nn12434634\tfew-flowered leek, Allium paradoxum\nn12434775\tgarlic, Allium sativum\nn12434985\tsand leek, giant garlic, Spanish garlic, rocambole, Allium scorodoprasum\nn12435152\tchives, chive, cive, schnittlaugh, Allium schoenoprasum\nn12435486\tcrow garlic, false garlic, field garlic, stag's garlic, wild garlic, Allium vineale\nn12435649\twild garlic, wood garlic, Ramsons, Allium ursinum\nn12435777\tgarlic chive, Chinese chive, Oriental garlic, Allium tuberosum\nn12435965\tround-headed leek, Allium sphaerocephalum\nn12436090\tthree-cornered leek, triquetrous leek, Allium triquetrum\nn12436907\tcape aloe, Aloe ferox\nn12437513\tkniphofia, tritoma, flame flower, flame-flower, flameflower\nn12437769\tpoker plant, Kniphofia uvaria\nn12437930\tred-hot poker, Kniphofia praecox\nn12439154\tfly poison, Amianthum muscaetoxicum, Amianthum muscitoxicum\nn12439830\tamber lily, Anthericum torreyi\nn12441183\tasparagus, edible asparagus, Asparagus officinales\nn12441390\tasparagus fern, Asparagus setaceous, Asparagus plumosus\nn12441552\tsmilax, Asparagus asparagoides\nn12441958\tasphodel\nn12442548\tJacob's rod\nn12443323\taspidistra, cast-iron plant, bar-room plant, Aspidistra elatio\nn12443736\tcoral drops, Bessera elegans\nn12444095\tChristmas bells\nn12444898\tclimbing onion, Bowiea volubilis\nn12446200\tmariposa, mariposa tulip, mariposa lily\nn12446519\tglobe lily, fairy lantern\nn12446737\tcat's-ear\nn12446908\twhite globe lily, white fairy lantern, Calochortus albus\nn12447121\tyellow globe lily, golden fairy lantern, Calochortus amabilis\nn12447346\trose globe lily, Calochortus amoenus\nn12447581\tstar tulip, elegant cat's ears, Calochortus elegans\nn12447891\tdesert mariposa tulip, Calochortus kennedyi\nn12448136\tyellow mariposa tulip, Calochortus luteus\nn12448361\tsagebrush mariposa tulip, Calochortus macrocarpus\nn12448700\tsego lily, Calochortus nuttallii\nn12449296\tcamas, camass, quamash, camosh, camash\nn12449526\tcommon camas, Camassia quamash\nn12449784\tLeichtlin's camas, Camassia leichtlinii\nn12449934\twild hyacinth, indigo squill, Camassia scilloides\nn12450344\tdogtooth violet, dogtooth, dog's-tooth violet\nn12450607\twhite dogtooth violet, white dog's-tooth violet, blonde lilian, Erythronium albidum\nn12450840\tyellow adder's tongue, trout lily, amberbell, Erythronium americanum\nn12451070\tEuropean dogtooth, Erythronium dens-canis\nn12451240\tfawn lily, Erythronium californicum\nn12451399\tglacier lily, snow lily, Erythronium grandiflorum\nn12451566\tavalanche lily, Erythronium montanum\nn12451915\tfritillary, checkered lily\nn12452256\tmission bells, rice-grain fritillary, Fritillaria affinis, Fritillaria lanceolata, Fritillaria mutica\nn12452480\tmission bells, black fritillary, Fritillaria biflora\nn12452673\tstink bell, Fritillaria agrestis\nn12452836\tcrown imperial, Fritillaria imperialis\nn12453018\twhite fritillary, Fritillaria liliaceae\nn12453186\tsnake's head fritillary, guinea-hen flower, checkered daffodil, leper lily, Fritillaria meleagris\nn12453714\tadobe lily, pink fritillary, Fritillaria pluriflora\nn12453857\tscarlet fritillary, Fritillaria recurva\nn12454159\ttulip\nn12454436\tdwarf tulip, Tulipa armena, Tulipa suaveolens\nn12454556\tlady tulip, candlestick tulip, Tulipa clusiana\nn12454705\tTulipa gesneriana\nn12454793\tcottage tulip\nn12454949\tDarwin tulip\nn12455950\tgloriosa, glory lily, climbing lily, creeping lily, Gloriosa superba\nn12457091\tlemon lily, Hemerocallis lilio-asphodelus, Hemerocallis flava\nn12458550\tcommon hyacinth, Hyacinthus orientalis\nn12458713\tRoman hyacinth, Hyacinthus orientalis albulus\nn12458874\tsummer hyacinth, cape hyacinth, Hyacinthus candicans, Galtonia candicans\nn12459629\tstar-of-Bethlehem\nn12460146\tbath asparagus, Prussian asparagus, Ornithogalum pyrenaicum\nn12460697\tgrape hyacinth\nn12460957\tcommon grape hyacinth, Muscari neglectum\nn12461109\ttassel hyacinth, Muscari comosum\nn12461466\tscilla, squill\nn12461673\tspring squill, Scilla verna, sea onion\nn12462032\tfalse asphodel\nn12462221\tScotch asphodel, Tofieldia pusilla\nn12462582\tsea squill, sea onion, squill, Urginea maritima\nn12462805\tsquill\nn12463134\tbutcher's broom, Ruscus aculeatus\nn12463743\tbog asphodel\nn12463975\tEuropean bog asphodel, Narthecium ossifragum\nn12464128\tAmerican bog asphodel, Narthecium americanum\nn12464476\thellebore, false hellebore\nn12464649\twhite hellebore, American hellebore, Indian poke, bugbane, Veratrum viride\nn12465557\tsquaw grass, bear grass, Xerophyllum tenax\nn12466727\tdeath camas, zigadene\nn12467018\talkali grass, Zigadenus elegans\nn12467197\twhite camas, Zigadenus glaucus\nn12467433\tpoison camas, Zigadenus nuttalli\nn12467592\tgrassy death camas, Zigadenus venenosus, Zigadenus venenosus gramineus\nn12468545\tprairie wake-robin, prairie trillium, Trillium recurvatum\nn12468719\tdwarf-white trillium, snow trillium, early wake-robin\nn12469517\therb Paris, Paris quadrifolia\nn12470092\tsarsaparilla\nn12470512\tbullbrier, greenbrier, catbrier, horse brier, horse-brier, brier, briar, Smilax rotundifolia\nn12470907\trough bindweed, Smilax aspera\nn12472024\tclintonia, Clinton's lily\nn12473608\tfalse lily of the valley, Maianthemum canadense\nn12473840\tfalse lily of the valley, Maianthemum bifolium\nn12474167\tSolomon's-seal\nn12474418\tgreat Solomon's-seal, Polygonatum biflorum, Polygonatum commutatum\nn12475035\tbellwort, merry bells, wild oats\nn12475242\tstrawflower, cornflower, Uvularia grandiflora\nn12475774\tpia, Indian arrowroot, Tacca leontopetaloides, Tacca pinnatifida\nn12476510\tagave, century plant, American aloe\nn12477163\tAmerican agave, Agave americana\nn12477401\tsisal, Agave sisalana\nn12477583\tmaguey, cantala, Agave cantala\nn12477747\tmaguey, Agave atrovirens\nn12477983\tAgave tequilana\nn12478768\tcabbage tree, grass tree, Cordyline australis\nn12479537\tdracaena\nn12480456\ttuberose, Polianthes tuberosa\nn12480895\tsansevieria, bowstring hemp\nn12481150\tAfrican bowstring hemp, African hemp, Sansevieria guineensis\nn12481289\tCeylon bowstring hemp, Sansevieria zeylanica\nn12481458\tmother-in-law's tongue, snake plant, Sansevieria trifasciata\nn12482437\tSpanish bayonet, Yucca aloifolia\nn12482668\tSpanish bayonet, Yucca baccata\nn12482893\tJoshua tree, Yucca brevifolia\nn12483282\tsoapweed, soap-weed, soap tree, Yucca elata\nn12483427\tAdam's needle, Adam's needle-and-thread, spoonleaf yucca, needle palm, Yucca filamentosa\nn12483625\tbear grass, Yucca glauca\nn12483841\tSpanish dagger, Yucca gloriosa\nn12484244\tOur Lord's candle, Yucca whipplei\nn12484784\twater shamrock, buckbean, bogbean, bog myrtle, marsh trefoil, Menyanthes trifoliata\nn12485653\tbutterfly bush, buddleia\nn12485981\tyellow jasmine, yellow jessamine, Carolina jasmine, evening trumpet flower, Gelsemium sempervirens\nn12486574\tflax\nn12487058\tcalabar bean, ordeal bean\nn12488454\tbonduc, bonduc tree, Caesalpinia bonduc, Caesalpinia bonducella\nn12488709\tdivi-divi, Caesalpinia coriaria\nn12489046\tMysore thorn, Caesalpinia decapetala, Caesalpinia sepiaria\nn12489676\tbrazilian ironwood, Caesalpinia ferrea\nn12489815\tbird of paradise, poinciana, Caesalpinia gilliesii, Poinciana gilliesii\nn12490490\tshingle tree, Acrocarpus fraxinifolius\nn12491017\tmountain ebony, orchid tree, Bauhinia variegata\nn12491435\tmsasa, Brachystegia speciformis\nn12491826\tcassia\nn12492106\tgolden shower tree, drumstick tree, purging cassia, pudding pipe tree, canafistola, canafistula, Cassia fistula\nn12492460\tpink shower, pink shower tree, horse cassia, Cassia grandis\nn12492682\trainbow shower, Cassia javonica\nn12492900\thorse cassia, Cassia roxburghii, Cassia marginata\nn12493208\tcarob, carob tree, carob bean tree, algarroba, Ceratonia siliqua\nn12493426\tcarob, carob bean, algarroba bean, algarroba, locust bean, locust pod\nn12493868\tpaloverde\nn12494794\troyal poinciana, flamboyant, flame tree, peacock flower, Delonix regia, Poinciana regia\nn12495146\tlocust tree, locust\nn12495670\twater locust, swamp locust, Gleditsia aquatica\nn12495895\thoney locust, Gleditsia triacanthos\nn12496427\tKentucky coffee tree, bonduc, chicot, Gymnocladus dioica\nn12496949\tlogwood, logwood tree, campeachy, bloodwood tree, Haematoxylum campechianum\nn12497669\tJerusalem thorn, horsebean, Parkinsonia aculeata\nn12498055\tpalo verde, Parkinsonia florida, Cercidium floridum\nn12498457\tDalmatian laburnum, Petteria ramentacea, Cytisus ramentaceus\nn12499163\tsenna\nn12499757\tavaram, tanner's cassia, Senna auriculata, Cassia auriculata\nn12499979\tAlexandria senna, Alexandrian senna, true senna, tinnevelly senna, Indian senna, Senna alexandrina, Cassia acutifolia, Cassia augustifolia\nn12500309\twild senna, Senna marilandica, Cassia marilandica\nn12500518\tsicklepod, Senna obtusifolia, Cassia tora\nn12500751\tcoffee senna, mogdad coffee, styptic weed, stinking weed, Senna occidentalis, Cassia occidentalis\nn12501202\ttamarind, tamarind tree, tamarindo, Tamarindus indica\nn12504570\tfalse indigo, bastard indigo, Amorpha californica\nn12504783\tfalse indigo, bastard indigo, Amorpha fruticosa\nn12505253\thog peanut, wild peanut, Amphicarpaea bracteata, Amphicarpa bracteata\nn12506181\tangelim, andelmin\nn12506341\tcabbage bark, cabbage-bark tree, cabbage tree, Andira inermis\nn12506991\tkidney vetch, Anthyllis vulneraria\nn12507379\tgroundnut, groundnut vine, Indian potato, potato bean, wild bean, Apios americana, Apios tuberosa\nn12507823\trooibos, Aspalathus linearis, Aspalathus cedcarbergensis\nn12508309\tmilk vetch, milk-vetch\nn12508618\talpine milk vetch, Astragalus alpinus\nn12508762\tpurple milk vetch, Astragalus danicus\nn12509109\tcamwood, African sandalwood, Baphia nitida\nn12509476\twild indigo, false indigo\nn12509665\tblue false indigo, Baptisia australis\nn12509821\twhite false indigo, Baptisia lactea\nn12509993\tindigo broom, horsefly weed, rattle weed, Baptisia tinctoria\nn12510343\tdhak, dak, palas, Butea frondosa, Butea monosperma\nn12510774\tpigeon pea, pigeon-pea plant, cajan pea, catjang pea, red gram, dhal, dahl, Cajanus cajan\nn12511488\tsword bean, Canavalia gladiata\nn12511856\tpea tree, caragana\nn12512095\tSiberian pea tree, Caragana arborescens\nn12512294\tChinese pea tree, Caragana sinica\nn12512674\tMoreton Bay chestnut, Australian chestnut\nn12513172\tbutterfly pea, Centrosema virginianum\nn12513613\tJudas tree, love tree, Circis siliquastrum\nn12513933\tredbud, Cercis canadensis\nn12514138\twestern redbud, California redbud, Cercis occidentalis\nn12514592\ttagasaste, Chamaecytisus palmensis, Cytesis proliferus\nn12514992\tweeping tree broom\nn12515393\tflame pea\nn12515711\tchickpea, chickpea plant, Egyptian pea, Cicer arietinum\nn12515925\tchickpea, garbanzo\nn12516165\tKentucky yellowwood, gopherwood, Cladrastis lutea, Cladrastis kentukea\nn12516584\tglory pea, clianthus\nn12516828\tdesert pea, Sturt pea, Sturt's desert pea, Clianthus formosus, Clianthus speciosus\nn12517077\tparrot's beak, parrot's bill, Clianthus puniceus\nn12517445\tbutterfly pea, Clitoria mariana\nn12517642\tblue pea, butterfly pea, Clitoria turnatea\nn12518013\ttelegraph plant, semaphore plant, Codariocalyx motorius, Desmodium motorium, Desmodium gyrans\nn12518481\tbladder senna, Colutea arborescens\nn12519089\taxseed, crown vetch, Coronilla varia\nn12519563\tcrotalaria, rattlebox\nn12520406\tguar, cluster bean, Cyamopsis tetragonolobus, Cyamopsis psoraloides\nn12521186\twhite broom, white Spanish broom, Cytisus albus, Cytisus multiflorus\nn12521394\tcommon broom, Scotch broom, green broom, Cytisus scoparius\nn12522188\trosewood, rosewood tree\nn12522678\tIndian blackwood, East Indian rosewood, East India rosewood, Indian rosewood, Dalbergia latifolia\nn12522894\tsissoo, sissu, sisham, Dalbergia sissoo\nn12523141\tkingwood, kingwood tree, Dalbergia cearensis\nn12523475\tBrazilian rosewood, caviuna wood, jacaranda, Dalbergia nigra\nn12523850\tcocobolo, Dalbergia retusa\nn12524188\tblackwood, blackwood tree\nn12525168\tbitter pea\nn12525513\tderris\nn12525753\tderris root, tuba root, Derris elliptica\nn12526178\tprairie mimosa, prickle-weed, Desmanthus ilinoensis\nn12526516\ttick trefoil, beggar lice, beggar's lice\nn12526754\tbeggarweed, Desmodium tortuosum, Desmodium purpureum\nn12527081\tAustralian pea, Dipogon lignosus, Dolichos lignosus\nn12527738\tcoral tree, erythrina\nn12528109\tkaffir boom, Cape kafferboom, Erythrina caffra\nn12528382\tcoral bean tree, Erythrina corallodendrum\nn12528549\tceibo, crybaby tree, cry-baby tree, common coral tree, Erythrina crista-galli\nn12528768\tkaffir boom, Transvaal kafferboom, Erythrina lysistemon\nn12528974\tIndian coral tree, Erythrina variegata, Erythrina Indica\nn12529220\tcork tree, Erythrina vespertilio\nn12529500\tgoat's rue, goat rue, Galega officinalis\nn12529905\tpoison bush, poison pea, gastrolobium\nn12530629\tSpanish broom, Spanish gorse, Genista hispanica\nn12530818\twoodwaxen, dyer's greenweed, dyer's-broom, dyeweed, greenweed, whin, woadwaxen, Genista tinctoria\nn12531328\tchanar, chanal, Geoffroea decorticans\nn12531727\tgliricidia\nn12532564\tsoy, soybean, soya bean\nn12532886\tlicorice, liquorice, Glycyrrhiza glabra\nn12533190\twild licorice, wild liquorice, American licorice, American liquorice, Glycyrrhiza lepidota\nn12533437\tlicorice root\nn12534208\tWestern Australia coral pea, Hardenbergia comnptoniana\nn12534625\tsweet vetch, Hedysarum boreale\nn12534862\tFrench honeysuckle, sulla, Hedysarum coronarium\nn12536291\tanil, Indigofera suffruticosa, Indigofera anil\nn12537253\tscarlet runner, running postman, Kennedia prostrata\nn12537569\thyacinth bean, bonavist, Indian bean, Egyptian bean, Lablab purpureus, Dolichos lablab\nn12538209\tScotch laburnum, Alpine golden chain, Laburnum alpinum\nn12539074\tvetchling\nn12539306\twild pea\nn12539832\teverlasting pea\nn12540250\tbeach pea, sea pea, Lathyrus maritimus, Lathyrus japonicus\nn12540647\tgrass vetch, grass vetchling, Lathyrus nissolia\nn12540966\tmarsh pea, Lathyrus palustris\nn12541157\tcommon vetchling, meadow pea, yellow vetchling, Lathyrus pratensis\nn12541403\tgrass pea, Indian pea, khesari, Lathyrus sativus\nn12542043\tTangier pea, Tangier peavine, Lalthyrus tingitanus\nn12542240\theath pea, earth-nut pea, earthnut pea, tuberous vetch, Lathyrus tuberosus\nn12543186\tbicolor lespediza, ezo-yama-hagi, Lespedeza bicolor\nn12543455\tjapanese clover, japan clover, jap clover, Lespedeza striata\nn12543639\tKorean lespedeza, Lespedeza stipulacea\nn12543826\tsericea lespedeza, Lespedeza sericea, Lespedeza cuneata\nn12544240\tlentil, lentil plant, Lens culinaris\nn12544539\tlentil\nn12545232\tprairie bird's-foot trefoil, compass plant, prairie lotus, prairie trefoil, Lotus americanus\nn12545635\tbird's foot trefoil, bird's foot clover, babies' slippers, bacon and eggs, Lotus corniculatus\nn12545865\twinged pea, asparagus pea, Lotus tetragonolobus\nn12546183\tlupine, lupin\nn12546420\twhite lupine, field lupine, wolf bean, Egyptian lupine, Lupinus albus\nn12546617\ttree lupine, Lupinus arboreus\nn12546962\twild lupine, sundial lupine, Indian beet, old-maid's bonnet, Lupinus perennis\nn12547215\tbluebonnet, buffalo clover, Texas bluebonnet, Lupinus subcarnosus\nn12547503\tTexas bluebonnet, Lupinus texensis\nn12548280\tmedic, medick, trefoil\nn12548564\tmoon trefoil, Medicago arborea\nn12548804\tsickle alfalfa, sickle lucerne, sickle medick, Medicago falcata\nn12549005\tCalvary clover, Medicago intertexta, Medicago echinus\nn12549192\tblack medick, hop clover, yellow trefoil, nonesuch clover, Medicago lupulina\nn12549420\talfalfa, lucerne, Medicago sativa\nn12549799\tmillettia\nn12550210\tmucuna\nn12550408\tcowage, velvet bean, Bengal bean, Benghal bean, Florida bean, Mucuna pruriens utilis, Mucuna deeringiana, Mucuna aterrima, Stizolobium deeringiana\nn12551173\ttolu tree, tolu balsam tree, Myroxylon balsamum, Myroxylon toluiferum\nn12551457\tPeruvian balsam, Myroxylon pereirae, Myroxylon balsamum pereirae\nn12552309\tsainfoin, sanfoin, holy clover, esparcet, Onobrychis viciifolia, Onobrychis viciaefolia\nn12552893\trestharrow, rest-harrow, Ononis repens\nn12553742\tbead tree, jumby bean, jumby tree, Ormosia monosperma\nn12554029\tjumby bead, jumbie bead, Ormosia coarctata\nn12554526\tlocoweed, crazyweed, crazy weed\nn12554729\tpurple locoweed, purple loco, Oxytropis lambertii\nn12554911\ttumbleweed\nn12555255\tyam bean, Pachyrhizus erosus\nn12555859\tshamrock pea, Parochetus communis\nn12556656\tpole bean\nn12557064\tkidney bean, frijol, frijole\nn12557438\tharicot\nn12557556\twax bean\nn12557681\tscarlet runner, scarlet runner bean, Dutch case-knife bean, runner bean, Phaseolus coccineus, Phaseolus multiflorus\nn12558230\tlima bean, lima bean plant, Phaseolus limensis\nn12558425\tsieva bean, butter bean, butter-bean plant, lima bean, Phaseolus lunatus\nn12558680\ttepary bean, Phaseolus acutifolius latifolius\nn12559044\tchaparral pea, stingaree-bush, Pickeringia montana\nn12559518\tJamaica dogwood, fish fuddle, Piscidia piscipula, Piscidia erythrina\nn12560282\tpea\nn12560621\tgarden pea\nn12560775\tedible-pod pea, edible-podded pea, Pisum sativum macrocarpon\nn12561169\tsugar snap pea, snap pea\nn12561309\tfield pea, field-pea plant, Austrian winter pea, Pisum sativum arvense, Pisum arvense\nn12561594\tfield pea\nn12562141\tcommon flat pea, native holly, Playlobium obtusangulum\nn12562577\tquira\nn12562785\troble, Platymiscium trinitatis\nn12563045\tPanama redwood tree, Panama redwood, Platymiscium pinnatum\nn12563702\tIndian beech, Pongamia glabra\nn12564083\twinged bean, winged pea, goa bean, goa bean vine, Manila bean, Psophocarpus tetragonolobus\nn12564613\tbreadroot, Indian breadroot, pomme blanche, pomme de prairie, Psoralea esculenta\nn12565102\tbloodwood tree, kiaat, Pterocarpus angolensis\nn12565912\tkino, Pterocarpus marsupium\nn12566331\tred sandalwood, red sanders, red sanderswood, red saunders, Pterocarpus santalinus\nn12566954\tkudzu, kudzu vine, Pueraria lobata\nn12567950\tbristly locust, rose acacia, moss locust, Robinia hispida\nn12568186\tblack locust, yellow locust, Robinia pseudoacacia\nn12568649\tclammy locust, Robinia viscosa\nn12569037\tcarib wood, Sabinea carinalis\nn12569616\tColorado River hemp, Sesbania exaltata\nn12569851\tscarlet wisteria tree, vegetable hummingbird, Sesbania grandiflora\nn12570394\tJapanese pagoda tree, Chinese scholartree, Chinese scholar tree, Sophora japonica, Sophora sinensis\nn12570703\tmescal bean, coral bean, frijolito, frijolillo, Sophora secundiflora\nn12570972\tkowhai, Sophora tetraptera\nn12571781\tjade vine, emerald creeper, Strongylodon macrobotrys\nn12572546\thoary pea\nn12572759\tbastard indigo, Tephrosia purpurea\nn12572858\tcatgut, goat's rue, wild sweet pea, Tephrosia virginiana\nn12573256\tbush pea\nn12573474\tfalse lupine, golden pea, yellow pea, Thermopsis macrophylla\nn12573647\tCarolina lupine, Thermopsis villosa\nn12573911\ttipu, tipu tree, yellow jacaranda, pride of Bolivia\nn12574320\tbird's foot trefoil, Trigonella ornithopodioides\nn12574470\tfenugreek, Greek clover, Trigonella foenumgraecum\nn12574866\tgorse, furze, whin, Irish gorse, Ulex europaeus\nn12575322\tvetch\nn12575812\ttufted vetch, bird vetch, Calnada pea, Vicia cracca\nn12576323\tbroad bean, fava bean, horsebean\nn12576451\tbitter betch, Vicia orobus\nn12576695\tbush vetch, Vicia sepium\nn12577362\tmoth bean, Vigna aconitifolia, Phaseolus aconitifolius\nn12577895\tsnailflower, snail-flower, snail flower, snail bean, corkscrew flower, Vigna caracalla, Phaseolus caracalla\nn12578255\tmung, mung bean, green gram, golden gram, Vigna radiata, Phaseolus aureus\nn12578626\tcowpea, cowpea plant, black-eyed pea, Vigna unguiculata, Vigna sinensis\nn12578916\tcowpea, black-eyed pea\nn12579038\tasparagus bean, yard-long bean, Vigna unguiculata sesquipedalis, Vigna sesquipedalis\nn12579404\tswamp oak, Viminaria juncea, Viminaria denudata\nn12579822\tkeurboom, Virgilia capensis, Virgilia oroboides\nn12580012\tkeurboom, Virgilia divaricata\nn12580654\tJapanese wistaria, Wisteria floribunda\nn12580786\tChinese wistaria, Wisteria chinensis\nn12580896\tAmerican wistaria, American wisteria, Wisteria frutescens\nn12581110\tsilky wisteria, Wisteria venusta\nn12582231\tpalm, palm tree\nn12582665\tsago palm\nn12582846\tfeather palm\nn12583126\tfan palm\nn12583401\tpalmetto\nn12583681\tcoyol, coyol palm, Acrocomia vinifera\nn12583855\tgrugru, gri-gri, grugru palm, macamba, Acrocomia aculeata\nn12584191\tareca\nn12584365\tbetel palm, Areca catechu\nn12584715\tsugar palm, gomuti, gomuti palm, Arenga pinnata\nn12585137\tpiassava palm, pissaba palm, Bahia piassava, bahia coquilla, Attalea funifera\nn12585373\tcoquilla nut\nn12585629\tpalmyra, palmyra palm, toddy palm, wine palm, lontar, longar palm, Borassus flabellifer\nn12586298\tcalamus\nn12586499\trattan, rattan palm, Calamus rotang\nn12586725\tlawyer cane, Calamus australis\nn12586989\tfishtail palm\nn12587132\twine palm, jaggery palm, kitul, kittul, kitul tree, toddy palm, Caryota urens\nn12587487\twax palm, Ceroxylon andicola, Ceroxylon alpinum\nn12587803\tcoconut, coconut palm, coco palm, coco, cocoa palm, coconut tree, Cocos nucifera\nn12588320\tcarnauba, carnauba palm, wax palm, Copernicia prunifera, Copernicia cerifera\nn12588780\tcaranday, caranda, caranda palm, wax palm, Copernicia australis, Copernicia alba\nn12589142\tcorozo, corozo palm\nn12589458\tgebang palm, Corypha utan, Corypha gebanga\nn12589687\tlatanier, latanier palm\nn12589841\ttalipot, talipot palm, Corypha umbraculifera\nn12590232\toil palm\nn12590499\tAfrican oil palm, Elaeis guineensis\nn12590600\tAmerican oil palm, Elaeis oleifera\nn12590715\tpalm nut, palm kernel\nn12591017\tcabbage palm, Euterpe oleracea\nn12591351\tcabbage palm, cabbage tree, Livistona australis\nn12591702\ttrue sago palm, Metroxylon sagu\nn12592058\tnipa palm, Nipa fruticans\nn12592544\tbabassu, babassu palm, coco de macao, Orbignya phalerata, Orbignya spesiosa, Orbignya martiana\nn12592839\tbabassu nut\nn12593122\tcohune palm, Orbignya cohune, cohune\nn12593341\tcohune nut\nn12593994\tdate palm, Phoenix dactylifera\nn12594324\tivory palm, ivory-nut palm, ivory plant, Phytelephas macrocarpa\nn12594989\traffia palm, Raffia farinifera, Raffia ruffia\nn12595699\tbamboo palm, Raffia vinifera\nn12595964\tlady palm\nn12596148\tminiature fan palm, bamboo palm, fern rhapis, Rhapis excelsa\nn12596345\treed rhapis, slender lady palm, Rhapis humilis\nn12596709\troyal palm, Roystonea regia\nn12596849\tcabbage palm, Roystonea oleracea\nn12597134\tcabbage palmetto, cabbage palm, Sabal palmetto\nn12597466\tsaw palmetto, scrub palmetto, Serenoa repens\nn12597798\tthatch palm, thatch tree, silver thatch, broom palm, Thrinax parviflora\nn12598027\tkey palm, silvertop palmetto, silver thatch, Thrinax microcarpa, Thrinax morrisii, Thrinax keyensis\nn12599185\tEnglish plantain, narrow-leaved plantain, ribgrass, ribwort, ripple-grass, buckthorn, Plantago lanceolata\nn12599435\tbroad-leaved plantain, common plantain, white-man's foot, whiteman's foot, cart-track plant, Plantago major\nn12599661\thoary plantain, Plantago media\nn12599874\tfleawort, psyllium, Spanish psyllium, Plantago psyllium\nn12600095\trugel's plantain, broad-leaved plantain, Plantago rugelii\nn12600267\thoary plantain, Plantago virginica\nn12601494\tbuckwheat, Polygonum fagopyrum, Fagopyrum esculentum\nn12601805\tprince's-feather, princess feather, kiss-me-over-the-garden-gate, prince's-plume, Polygonum orientale\nn12602262\teriogonum\nn12602434\tumbrella plant, Eriogonum allenii\nn12602612\twild buckwheat, California buckwheat, Erigonum fasciculatum\nn12602980\trhubarb, rhubarb plant\nn12603273\tHimalayan rhubarb, Indian rhubarb, red-veined pie plant, Rheum australe, Rheum emodi\nn12603449\tpie plant, garden rhubarb, Rheum cultorum, Rheum rhabarbarum, Rheum rhaponticum\nn12603672\tChinese rhubarb, Rheum palmatum\nn12604228\tsour dock, garden sorrel, Rumex acetosa\nn12604460\tsheep sorrel, sheep's sorrel, Rumex acetosella\nn12604639\tbitter dock, broad-leaved dock, yellow dock, Rumex obtusifolius\nn12604845\tFrench sorrel, garden sorrel, Rumex scutatus\nn12605683\tyellow-eyed grass\nn12606438\tcommelina\nn12606545\tspiderwort, dayflower\nn12607456\tpineapple, pineapple plant, Ananas comosus\nn12609379\tpipewort, Eriocaulon aquaticum\nn12610328\twater hyacinth, water orchid, Eichhornia crassipes, Eichhornia spesiosa\nn12610740\twater star grass, mud plantain, Heteranthera dubia\nn12611640\tnaiad, water nymph\nn12612170\twater plantain, Alisma plantago-aquatica\nn12612811\tnarrow-leaved water plantain\nn12613706\thydrilla, Hydrilla verticillata\nn12614096\tAmerican frogbit, Limnodium spongia\nn12614477\twaterweed\nn12614625\tCanadian pondweed, Elodea canadensis\nn12615232\ttape grass, eelgrass, wild celery, Vallisneria spiralis\nn12615710\tpondweed\nn12616248\tcurled leaf pondweed, curly pondweed, Potamogeton crispus\nn12616630\tloddon pondweed, Potamogeton nodosus, Potamogeton americanus\nn12616996\tfrog's lettuce\nn12617559\tarrow grass, Triglochin maritima\nn12618146\thorned pondweed, Zannichellia palustris\nn12618727\teelgrass, grass wrack, sea wrack, Zostera marina\nn12620196\trose, rosebush\nn12620546\thip, rose hip, rosehip\nn12620969\tbanksia rose, Rosa banksia\nn12621410\tdamask rose, summer damask rose, Rosa damascena\nn12621619\tsweetbrier, sweetbriar, brier, briar, eglantine, Rosa eglanteria\nn12621945\tCherokee rose, Rosa laevigata\nn12622297\tmusk rose, Rosa moschata\nn12622875\tagrimonia, agrimony\nn12623077\tharvest-lice, Agrimonia eupatoria\nn12623211\tfragrant agrimony, Agrimonia procera\nn12623818\talderleaf Juneberry, alder-leaved serviceberry, Amelanchier alnifolia\nn12624381\tflowering quince\nn12624568\tjaponica, maule's quince, Chaenomeles japonica\nn12625003\tcoco plum, coco plum tree, cocoa plum, icaco, Chrysobalanus icaco\nn12625383\tcotoneaster\nn12625670\tCotoneaster dammeri\nn12625823\tCotoneaster horizontalis\nn12626674\tparsley haw, parsley-leaved thorn, Crataegus apiifolia, Crataegus marshallii\nn12626878\tscarlet haw, Crataegus biltmoreana\nn12627119\tblackthorn, pear haw, pear hawthorn, Crataegus calpodendron, Crataegus tomentosa\nn12627347\tcockspur thorn, cockspur hawthorn, Crataegus crus-galli\nn12627526\tmayhaw, summer haw, Crataegus aestivalis\nn12628356\tred haw, downy haw, Crataegus mollis, Crataegus coccinea mollis\nn12628705\tred haw, Crataegus pedicellata, Crataegus coccinea\nn12628986\tquince, quince bush, Cydonia oblonga\nn12629305\tmountain avens, Dryas octopetala\nn12629666\tloquat, loquat tree, Japanese medlar, Japanese plum, Eriobotrya japonica\nn12630763\tbeach strawberry, Chilean strawberry, Fragaria chiloensis\nn12630999\tVirginia strawberry, scarlet strawberry, Fragaria virginiana\nn12631331\tavens\nn12631637\tyellow avens, Geum alleppicum strictum, Geum strictum\nn12631932\tyellow avens, Geum macrophyllum\nn12632335\tprairie smoke, purple avens, Geum triflorum\nn12632733\tbennet, white avens, Geum virginianum\nn12633061\ttoyon, tollon, Christmasberry, Christmas berry, Heteromeles arbutifolia, Photinia arbutifolia\nn12633638\tapple tree\nn12633994\tapple, orchard apple tree, Malus pumila\nn12634211\twild apple, crab apple, crabapple\nn12634429\tcrab apple, crabapple, cultivated crab apple\nn12634734\tSiberian crab, Siberian crab apple, cherry apple, cherry crab, Malus baccata\nn12634986\twild crab, Malus sylvestris\nn12635151\tAmerican crab apple, garland crab, Malus coronaria\nn12635359\tOregon crab apple, Malus fusca\nn12635532\tSouthern crab apple, flowering crab, Malus angustifolia\nn12635744\tIowa crab, Iowa crab apple, prairie crab, western crab apple, Malus ioensis\nn12635955\tBechtel crab, flowering crab\nn12636224\tmedlar, medlar tree, Mespilus germanica\nn12636885\tcinquefoil, five-finger\nn12637123\tsilverweed, goose-tansy, goose grass, Potentilla anserina\nn12637485\tsalad burnet, burnet bloodwort, pimpernel, Poterium sanguisorba\nn12638218\tplum, plum tree\nn12638556\twild plum, wild plum tree\nn12638753\tAllegheny plum, Alleghany plum, sloe, Prunus alleghaniensis\nn12638964\tAmerican red plum, August plum, goose plum, Prunus americana\nn12639168\tchickasaw plum, hog plum, hog plum bush, Prunus angustifolia\nn12639376\tbeach plum, beach plum bush, Prunus maritima\nn12639584\tcommon plum, Prunus domestica\nn12639736\tbullace, Prunus insititia\nn12639910\tdamson plum, damson plum tree, Prunus domestica insititia\nn12640081\tbig-tree plum, Prunus mexicana\nn12640284\tCanada plum, Prunus nigra\nn12640435\tplumcot, plumcot tree\nn12640607\tapricot, apricot tree\nn12640839\tJapanese apricot, mei, Prunus mume\nn12641007\tcommon apricot, Prunus armeniaca\nn12641180\tpurple apricot, black apricot, Prunus dasycarpa\nn12641413\tcherry, cherry tree\nn12641931\twild cherry, wild cherry tree\nn12642090\twild cherry\nn12642200\tsweet cherry, Prunus avium\nn12642435\theart cherry, oxheart, oxheart cherry\nn12642600\tgean, mazzard, mazzard cherry\nn12642964\tcapulin, capulin tree, Prunus capuli\nn12643113\tcherry laurel, laurel cherry, mock orange, wild orange, Prunus caroliniana\nn12643313\tcherry plum, myrobalan, myrobalan plum, Prunus cerasifera\nn12643473\tsour cherry, sour cherry tree, Prunus cerasus\nn12643688\tamarelle, Prunus cerasus caproniana\nn12643877\tmorello, Prunus cerasus austera\nn12644283\tmarasca\nn12644902\talmond tree\nn12645174\talmond, sweet almond, Prunus dulcis, Prunus amygdalus, Amygdalus communis\nn12645530\tbitter almond, Prunus dulcis amara, Amygdalus communis amara\nn12646072\tjordan almond\nn12646197\tdwarf flowering almond, Prunus glandulosa\nn12646397\tholly-leaved cherry, holly-leaf cherry, evergreen cherry, islay, Prunus ilicifolia\nn12646605\tfuji, fuji cherry, Prunus incisa\nn12646740\tflowering almond, oriental bush cherry, Prunus japonica\nn12646950\tcherry laurel, laurel cherry, Prunus laurocerasus\nn12647231\tCatalina cherry, Prunus lyonii\nn12647376\tbird cherry, bird cherry tree\nn12647560\thagberry tree, European bird cherry, common bird cherry, Prunus padus\nn12647787\thagberry\nn12647893\tpin cherry, Prunus pensylvanica\nn12648045\tpeach, peach tree, Prunus persica\nn12648196\tnectarine, nectarine tree, Prunus persica nectarina\nn12648424\tsand cherry, Prunus pumila, Prunus pumilla susquehanae, Prunus susquehanae, Prunus cuneata\nn12648693\tJapanese plum, Prunus salicina\nn12648888\tblack cherry, black cherry tree, rum cherry, Prunus serotina\nn12649065\tflowering cherry\nn12649317\toriental cherry, Japanese cherry, Japanese flowering cherry, Prunus serrulata\nn12649539\tJapanese flowering cherry, Prunus sieboldii\nn12649866\tSierra plum, Pacific plum, Prunus subcordata\nn12650038\trosebud cherry, winter flowering cherry, Prunus subhirtella\nn12650229\tRussian almond, dwarf Russian almond, Prunus tenella\nn12650379\tflowering almond, Prunus triloba\nn12650556\tchokecherry, chokecherry tree, Prunus virginiana\nn12650805\tchokecherry\nn12650915\twestern chokecherry, Prunus virginiana demissa, Prunus demissa\nn12651229\tPyracantha, pyracanth, fire thorn, firethorn\nn12651611\tpear, pear tree, Pyrus communis\nn12651821\tfruit tree\nn12653218\tbramble bush\nn12653436\tlawyerbush, lawyer bush, bush lawyer, Rubus cissoides, Rubus australis\nn12653633\tstone bramble, Rubus saxatilis\nn12654227\tsand blackberry, Rubus cuneifolius\nn12654857\tboysenberry, boysenberry bush\nn12655062\tloganberry, Rubus loganobaccus, Rubus ursinus loganobaccus\nn12655245\tAmerican dewberry, Rubus canadensis\nn12655351\tNorthern dewberry, American dewberry, Rubus flagellaris\nn12655498\tSouthern dewberry, Rubus trivialis\nn12655605\tswamp dewberry, swamp blackberry, Rubus hispidus\nn12655726\tEuropean dewberry, Rubus caesius\nn12655869\traspberry, raspberry bush\nn12656369\twild raspberry, European raspberry, framboise, Rubus idaeus\nn12656528\tAmerican raspberry, Rubus strigosus, Rubus idaeus strigosus\nn12656685\tblack raspberry, blackcap, blackcap raspberry, thimbleberry, Rubus occidentalis\nn12656909\tsalmonberry, Rubus spectabilis\nn12657082\tsalmonberry, salmon berry, thimbleberry, Rubus parviflorus\nn12657755\twineberry, Rubus phoenicolasius\nn12658118\tmountain ash\nn12658308\trowan, rowan tree, European mountain ash, Sorbus aucuparia\nn12658481\trowanberry\nn12658603\tAmerican mountain ash, Sorbus americana\nn12658715\tWestern mountain ash, Sorbus sitchensis\nn12658846\tservice tree, sorb apple, sorb apple tree, Sorbus domestica\nn12659064\twild service tree, Sorbus torminalis\nn12659356\tspirea, spiraea\nn12659539\tbridal wreath, bridal-wreath, Saint Peter's wreath, St. Peter's wreath, Spiraea prunifolia\nn12660601\tmadderwort, rubiaceous plant\nn12661045\tIndian madder, munjeet, Rubia cordifolia\nn12661227\tmadder, Rubia tinctorum\nn12661538\twoodruff\nn12662074\tdagame, lemonwood tree, Calycophyllum candidissimum\nn12662379\tblolly, West Indian snowberry, Chiococca alba\nn12662772\tcoffee, coffee tree\nn12663023\tArabian coffee, Coffea arabica\nn12663254\tLiberian coffee, Coffea liberica\nn12663359\trobusta coffee, Rio Nunez coffee, Coffea robusta, Coffea canephora\nn12663804\tcinchona, chinchona\nn12664005\tCartagena bark, Cinchona cordifolia, Cinchona lancifolia\nn12664187\tcalisaya, Cinchona officinalis, Cinchona ledgeriana, Cinchona calisaya\nn12664469\tcinchona tree, Cinchona pubescens\nn12664710\tcinchona, cinchona bark, Peruvian bark, Jesuit's bark\nn12665048\tbedstraw\nn12665271\tsweet woodruff, waldmeister, woodruff, fragrant bedstraw, Galium odoratum, Asperula odorata\nn12665659\tNorthern bedstraw, Northern snow bedstraw, Galium boreale\nn12665857\tyellow bedstraw, yellow cleavers, Our Lady's bedstraw, Galium verum\nn12666050\twild licorice, Galium lanceolatum\nn12666159\tcleavers, clivers, goose grass, catchweed, spring cleavers, Galium aparine\nn12666369\twild madder, white madder, white bedstraw, infant's-breath, false baby's breath, Galium mollugo\nn12666965\tcape jasmine, cape jessamine, Gardenia jasminoides, Gardenia augusta\nn12667406\tgenipa\nn12667582\tgenipap fruit, jagua, marmalade box, Genipa Americana\nn12667964\thamelia\nn12668131\tscarlet bush, scarlet hamelia, coloradillo, Hamelia patens, Hamelia erecta\nn12669803\tlemonwood, lemon-wood, lemonwood tree, lemon-wood tree, Psychotria capensis\nn12670334\tnegro peach, Sarcocephalus latifolius, Sarcocephalus esculentus\nn12670758\twild medlar, wild medlar tree, medlar, Vangueria infausta\nn12670962\tSpanish tamarind, Vangueria madagascariensis\nn12671651\tabelia\nn12672289\tbush honeysuckle, Diervilla sessilifolia\nn12673588\tAmerican twinflower, Linnaea borealis americana\nn12674120\thoneysuckle\nn12674685\tAmerican fly honeysuckle, fly honeysuckle, Lonicera canadensis\nn12674895\tItalian honeysuckle, Italian woodbine, Lonicera caprifolium\nn12675299\tyellow honeysuckle, Lonicera flava\nn12675515\thairy honeysuckle, Lonicera hirsuta\nn12675876\tJapanese honeysuckle, Lonicera japonica\nn12676134\tHall's honeysuckle, Lonicera japonica halliana\nn12676370\tMorrow's honeysuckle, Lonicera morrowii\nn12676534\twoodbine, Lonicera periclymenum\nn12676703\ttrumpet honeysuckle, coral honeysuckle, trumpet flower, trumpet vine, Lonicera sempervirens\nn12677120\tEuropean fly honeysuckle, European honeysuckle, Lonicera xylosteum\nn12677331\tswamp fly honeysuckle\nn12677612\tsnowberry, common snowberry, waxberry, Symphoricarpos alba\nn12677841\tcoralberry, Indian currant, Symphoricarpos orbiculatus\nn12678794\tblue elder, blue elderberry, Sambucus caerulea\nn12679023\tdwarf elder, danewort, Sambucus ebulus\nn12679432\tAmerican red elder, red-berried elder, stinking elder, Sambucus pubens\nn12679593\tEuropean red elder, red-berried elder, Sambucus racemosa\nn12679876\tfeverroot, horse gentian, tinker's root, wild coffee, Triostium perfoliatum\nn12680402\tcranberry bush, cranberry tree, American cranberry bush, highbush cranberry, Viburnum trilobum\nn12680652\twayfaring tree, twist wood, twistwood, Viburnum lantana\nn12680864\tguelder rose, European cranberrybush, European cranberry bush, crampbark, cranberry tree, Viburnum opulus\nn12681376\tarrow wood, Viburnum recognitum\nn12681579\tblack haw, Viburnum prunifolium\nn12681893\tweigela, Weigela florida\nn12682411\tteasel, teazel, teasle\nn12682668\tcommon teasel, Dipsacus fullonum\nn12682882\tfuller's teasel, Dipsacus sativus\nn12683096\twild teasel, Dipsacus sylvestris\nn12683407\tscabious, scabiosa\nn12683571\tsweet scabious, pincushion flower, mournful widow, Scabiosa atropurpurea\nn12683791\tfield scabious, Scabiosa arvensis\nn12684379\tjewelweed, lady's earrings, orange balsam, celandine, touch-me-not, Impatiens capensis\nn12685431\tgeranium\nn12685831\tcranesbill, crane's bill\nn12686077\twild geranium, spotted cranesbill, Geranium maculatum\nn12686274\tmeadow cranesbill, Geranium pratense\nn12686496\tRichardson's geranium, Geranium richardsonii\nn12686676\therb robert, herbs robert, herb roberts, Geranium robertianum\nn12686877\tsticky geranium, Geranium viscosissimum\nn12687044\tdove's foot geranium, Geranium molle\nn12687462\trose geranium, sweet-scented geranium, Pelargonium graveolens\nn12687698\tfish geranium, bedding geranium, zonal pelargonium, Pelargonium hortorum\nn12687957\tivy geranium, ivy-leaved geranium, hanging geranium, Pelargonium peltatum\nn12688187\tapple geranium, nutmeg geranium, Pelargonium odoratissimum\nn12688372\tlemon geranium, Pelargonium limoneum\nn12688716\tstorksbill, heron's bill\nn12689305\tmusk clover, muskus grass, white-stemmed filaree, Erodium moschatum\nn12690653\tincense tree\nn12691428\telephant tree, Bursera microphylla\nn12691661\tgumbo-limbo, Bursera simaruba\nn12692024\tBoswellia carteri\nn12692160\tsalai, Boswellia serrata\nn12692521\tbalm of gilead, Commiphora meccanensis\nn12692714\tmyrrh tree, Commiphora myrrha\nn12693244\tProtium heptaphyllum\nn12693352\tProtium guianense\nn12693865\twater starwort\nn12694486\tbarbados cherry, acerola, Surinam cherry, West Indian cherry, Malpighia glabra\nn12695144\tmahogany, mahogany tree\nn12695975\tchinaberry, chinaberry tree, China tree, Persian lilac, pride-of-India, azederach, azedarach, Melia azederach, Melia azedarach\nn12696492\tneem, neem tree, nim tree, margosa, arishth, Azadirachta indica, Melia Azadirachta\nn12696830\tneem seed\nn12697152\tSpanish cedar, Spanish cedar tree, Cedrela odorata\nn12697514\tsatinwood, satinwood tree, Chloroxylon swietenia\nn12698027\tAfrican scented mahogany, cedar mahogany, sapele mahogany, Entandrophragma cylindricum\nn12698435\tsilver ash\nn12698598\tnative beech, flindosa, flindosy, Flindersia australis\nn12698774\tbunji-bunji, Flindersia schottiana\nn12699031\tAfrican mahogany\nn12699301\tlanseh tree, langsat, langset, Lansium domesticum\nn12699922\ttrue mahogany, Cuban mahogany, Dominican mahogany, Swietinia mahogani\nn12700088\tHonduras mahogany, Swietinia macrophylla\nn12700357\tPhilippine mahogany, Philippine cedar, kalantas, Toona calantas, Cedrela calantas\nn12702124\tcaracolito, Ruptiliocarpon caracolito\nn12703190\tcommon wood sorrel, cuckoo bread, shamrock, Oxalis acetosella\nn12703383\tBermuda buttercup, English-weed, Oxalis pes-caprae, Oxalis cernua\nn12703557\tcreeping oxalis, creeping wood sorrel, Oxalis corniculata\nn12703716\tgoatsfoot, goat's foot, Oxalis caprina\nn12703856\tviolet wood sorrel, Oxalis violacea\nn12704041\toca, oka, Oxalis tuberosa, Oxalis crenata\nn12704343\tcarambola, carambola tree, Averrhoa carambola\nn12704513\tbilimbi, Averrhoa bilimbi\nn12705013\tmilkwort\nn12705220\tsenega, Polygala alba\nn12705458\torange milkwort, yellow milkwort, candyweed, yellow bachelor's button, Polygala lutea\nn12705698\tflowering wintergreen, gaywings, bird-on-the-wing, fringed polygala, Polygala paucifolia\nn12705978\tSeneca snakeroot, Seneka snakeroot, senga root, senega root, senega snakeroot, Polygala senega\nn12706410\tcommon milkwort, gand flower, Polygala vulgaris\nn12707199\true, herb of grace, Ruta graveolens\nn12707781\tcitrus, citrus tree\nn12708293\torange, orange tree\nn12708654\tsour orange, Seville orange, bitter orange, bitter orange tree, bigarade, marmalade orange, Citrus aurantium\nn12708941\tbergamot, bergamot orange, Citrus bergamia\nn12709103\tpomelo, pomelo tree, pummelo, shaddock, Citrus maxima, Citrus grandis, Citrus decumana\nn12709349\tcitron, citron tree, Citrus medica\nn12709688\tgrapefruit, Citrus paradisi\nn12709901\tmandarin, mandarin orange, mandarin orange tree, Citrus reticulata\nn12710295\ttangerine, tangerine tree\nn12710415\tclementine, clementine tree\nn12710577\tsatsuma, satsuma tree\nn12710693\tsweet orange, sweet orange tree, Citrus sinensis\nn12710917\ttemple orange, temple orange tree, tangor, king orange, Citrus nobilis\nn12711182\ttangelo, tangelo tree, ugli fruit, Citrus tangelo\nn12711398\trangpur, rangpur lime, lemanderin, Citrus limonia\nn12711596\tlemon, lemon tree, Citrus limon\nn12711817\tsweet lemon, sweet lime, Citrus limetta\nn12711984\tlime, lime tree, Citrus aurantifolia\nn12712320\tcitrange, citrange tree, Citroncirus webberi\nn12712626\tfraxinella, dittany, burning bush, gas plant, Dictamnus alba\nn12713063\tkumquat, cumquat, kumquat tree\nn12713358\tmarumi, marumi kumquat, round kumquat, Fortunella japonica\nn12713521\tnagami, nagami kumquat, oval kumquat, Fortunella margarita\nn12713866\tcork tree, Phellodendron amurense\nn12714254\ttrifoliate orange, trifoliata, wild orange, Poncirus trifoliata\nn12714755\tprickly ash\nn12714949\ttoothache tree, sea ash, Zanthoxylum americanum, Zanthoxylum fraxineum\nn12715195\tHercules'-club, Hercules'-clubs, Hercules-club, Zanthoxylum clava-herculis\nn12715914\tbitterwood tree\nn12716400\tmarupa, Simarouba amara\nn12716594\tparadise tree, bitterwood, Simarouba glauca\nn12717072\tailanthus\nn12717224\ttree of heaven, tree of the gods, Ailanthus altissima\nn12717644\twild mango, dika, wild mango tree, Irvingia gabonensis\nn12718074\tpepper tree, Kirkia wilmsii\nn12718483\tJamaica quassia, bitterwood, Picrasma excelsa, Picrasma excelsum\nn12718995\tquassia, bitterwood, Quassia amara\nn12719684\tnasturtium\nn12719944\tgarden nasturtium, Indian cress, Tropaeolum majus\nn12720200\tbush nasturtium, Tropaeolum minus\nn12720354\tcanarybird flower, canarybird vine, canary creeper, Tropaeolum peregrinum\nn12721122\tbean caper, Syrian bean caper, Zygophyllum fabago\nn12721477\tpalo santo, Bulnesia sarmienti\nn12722071\tlignum vitae, Guaiacum officinale\nn12723062\tcreosote bush, coville, hediondilla, Larrea tridentata\nn12723610\tcaltrop, devil's weed, Tribulus terestris\nn12724942\twillow, willow tree\nn12725521\tosier\nn12725738\twhite willow, Huntingdon willow, Salix alba\nn12725940\tsilver willow, silky willow, Salix alba sericea, Salix sericea\nn12726159\tgolden willow, Salix alba vitellina, Salix vitellina\nn12726357\tcricket-bat willow, Salix alba caerulea\nn12726528\tarctic willow, Salix arctica\nn12726670\tweeping willow, Babylonian weeping willow, Salix babylonica\nn12726902\tWisconsin weeping willow, Salix pendulina, Salix blanda, Salix pendulina blanda\nn12727101\tpussy willow, Salix discolor\nn12727301\tsallow\nn12727518\tgoat willow, florist's willow, pussy willow, Salix caprea\nn12727729\tpeachleaf willow, peach-leaved willow, almond-leaves willow, Salix amygdaloides\nn12727960\talmond willow, black Hollander, Salix triandra, Salix amygdalina\nn12728164\thoary willow, sage willow, Salix candida\nn12728322\tcrack willow, brittle willow, snap willow, Salix fragilis\nn12728508\tprairie willow, Salix humilis\nn12728656\tdwarf willow, Salix herbacea\nn12728864\tgrey willow, gray willow, Salix cinerea\nn12729023\tarroyo willow, Salix lasiolepis\nn12729164\tshining willow, Salix lucida\nn12729315\tswamp willow, black willow, Salix nigra\nn12729521\tbay willow, laurel willow, Salix pentandra\nn12729729\tpurple willow, red willow, red osier, basket willow, purple osier, Salix purpurea\nn12729950\tbalsam willow, Salix pyrifolia\nn12730143\tcreeping willow, Salix repens\nn12730370\tSitka willow, silky willow, Salix sitchensis\nn12730544\tdwarf grey willow, dwarf gray willow, sage willow, Salix tristis\nn12730776\tbearberry willow, Salix uva-ursi\nn12731029\tcommon osier, hemp willow, velvet osier, Salix viminalis\nn12731401\tpoplar, poplar tree\nn12731835\tbalsam poplar, hackmatack, tacamahac, Populus balsamifera\nn12732009\twhite poplar, white aspen, abele, aspen poplar, silver-leaved poplar, Populus alba\nn12732252\tgrey poplar, gray poplar, Populus canescens\nn12732491\tblack poplar, Populus nigra\nn12732605\tLombardy poplar, Populus nigra italica\nn12732756\tcottonwood\nn12732966\tEastern cottonwood, necklace poplar, Populus deltoides\nn12733218\tblack cottonwood, Western balsam poplar, Populus trichocarpa\nn12733428\tswamp cottonwood, black cottonwood, downy poplar, swamp poplar, Populus heterophylla\nn12733647\taspen\nn12733870\tquaking aspen, European quaking aspen, Populus tremula\nn12734070\tAmerican quaking aspen, American aspen, Populus tremuloides\nn12734215\tCanadian aspen, bigtooth aspen, bigtoothed aspen, big-toothed aspen, large-toothed aspen, large tooth aspen, Populus grandidentata\nn12735160\tsandalwood tree, true sandalwood, Santalum album\nn12736603\tquandong, quandang, quandong tree, Eucarya acuminata, Fusanus acuminatus\nn12736999\trabbitwood, buffalo nut, Pyrularia pubera\nn12737383\tLoranthaceae, family Loranthaceae, mistletoe family\nn12737898\tmistletoe, Loranthus europaeus\nn12738259\tAmerican mistletoe, Arceuthobium pusillum\nn12739332\tmistletoe, Viscum album, Old World mistletoe\nn12739966\tAmerican mistletoe, Phoradendron serotinum, Phoradendron flavescens\nn12740967\taalii\nn12741222\tsoapberry, soapberry tree\nn12741586\twild China tree, Sapindus drumondii, Sapindus marginatus\nn12741792\tChina tree, false dogwood, jaboncillo, chinaberry, Sapindus saponaria\nn12742290\takee, akee tree, Blighia sapida\nn12742741\tsoapberry vine\nn12742878\theartseed, Cardiospermum grandiflorum\nn12743009\tballoon vine, heart pea, Cardiospermum halicacabum\nn12743352\tlongan, lungen, longanberry, Dimocarpus longan, Euphorbia litchi, Nephelium longana\nn12743823\tharpullia\nn12743976\tharpulla, Harpullia cupanioides\nn12744142\tMoreton Bay tulipwood, Harpullia pendula\nn12744387\tlitchi, lichee, litchi tree, Litchi chinensis, Nephelium litchi\nn12744850\tSpanish lime, Spanish lime tree, honey berry, mamoncillo, genip, ginep, Melicocca bijuga, Melicocca bijugatus\nn12745386\trambutan, rambotan, rambutan tree, Nephelium lappaceum\nn12745564\tpulasan, pulassan, pulasan tree, Nephelium mutabile\nn12746884\tpachysandra\nn12747120\tAllegheny spurge, Allegheny mountain spurge, Pachysandra procumbens\nn12748248\tbittersweet, American bittersweet, climbing bittersweet, false bittersweet, staff vine, waxwork, shrubby bittersweet, Celastrus scandens\nn12749049\tspindle tree, spindleberry, spindleberry tree\nn12749456\twinged spindle tree, Euonymous alatus\nn12749679\twahoo, burning bush, Euonymus atropurpureus\nn12749852\tstrawberry bush, wahoo, Euonymus americanus\nn12750076\tevergreen bittersweet, Euonymus fortunei radicans, Euonymus radicans vegetus\nn12750767\tcyrilla, leatherwood, white titi, Cyrilla racemiflora\nn12751172\ttiti, buckwheat tree, Cliftonia monophylla\nn12751675\tcrowberry\nn12752205\tmaple\nn12753007\tsilver maple, Acer saccharinum\nn12753245\tsugar maple, rock maple, Acer saccharum\nn12753573\tred maple, scarlet maple, swamp maple, Acer rubrum\nn12753762\tmoosewood, moose-wood, striped maple, striped dogwood, goosefoot maple, Acer pennsylvanicum\nn12754003\tOregon maple, big-leaf maple, Acer macrophyllum\nn12754174\tdwarf maple, Rocky-mountain maple, Acer glabrum\nn12754311\tmountain maple, mountain alder, Acer spicatum\nn12754468\tvine maple, Acer circinatum\nn12754648\thedge maple, field maple, Acer campestre\nn12754781\tNorway maple, Acer platanoides\nn12754981\tsycamore, great maple, scottish maple, Acer pseudoplatanus\nn12755225\tbox elder, ash-leaved maple, Acer negundo\nn12755387\tCalifornia box elder, Acer negundo Californicum\nn12755559\tpointed-leaf maple, Acer argutum\nn12755727\tJapanese maple, full moon maple, Acer japonicum\nn12755876\tJapanese maple, Acer palmatum\nn12756457\tholly\nn12757115\tChinese holly, Ilex cornuta\nn12757303\tbearberry, possum haw, winterberry, Ilex decidua\nn12757458\tinkberry, gallberry, gall-berry, evergreen winterberry, Ilex glabra\nn12757668\tmate, Paraguay tea, Ilex paraguariensis\nn12757816\tAmerican holly, Christmas holly\nn12757930\tlow gallberry holly\nn12758014\ttall gallberry holly\nn12758099\tyaupon holly\nn12758176\tdeciduous holly\nn12758250\tjuneberry holly\nn12758325\tlargeleaf holly\nn12758399\tGeogia holly\nn12758471\tcommon winterberry holly\nn12758555\tsmooth winterberry holly\nn12759273\tcashew, cashew tree, Anacardium occidentale\nn12759668\tgoncalo alves, Astronium fraxinifolium\nn12760539\tVenetian sumac, wig tree, Cotinus coggygria\nn12760875\tlaurel sumac, Malosma laurina, Rhus laurina\nn12761284\tmango, mango tree, Mangifera indica\nn12761702\tpistachio, Pistacia vera, pistachio tree\nn12761905\tterebinth, Pistacia terebinthus\nn12762049\tmastic, mastic tree, lentisk, Pistacia lentiscus\nn12762405\tAustralian sumac, Rhodosphaera rhodanthema, Rhus rhodanthema\nn12762896\tsumac, sumach, shumac\nn12763529\tsmooth sumac, scarlet sumac, vinegar tree, Rhus glabra\nn12764008\tsugar-bush, sugar sumac, Rhus ovata\nn12764202\tstaghorn sumac, velvet sumac, Virginian sumac, vinegar tree, Rhus typhina\nn12764507\tsquawbush, squaw-bush, skunkbush, Rhus trilobata\nn12764978\taroeira blanca, Schinus chichita\nn12765115\tpepper tree, molle, Peruvian mastic tree, Schinus molle\nn12765402\tBrazilian pepper tree, Schinus terebinthifolius\nn12765846\thog plum, yellow mombin, yellow mombin tree, Spondias mombin\nn12766043\tmombin, mombin tree, jocote, Spondias purpurea\nn12766595\tpoison ash, poison dogwood, poison sumac, Toxicodendron vernix, Rhus vernix\nn12766869\tpoison ivy, markweed, poison mercury, poison oak, Toxicodendron radicans, Rhus radicans\nn12767208\twestern poison oak, Toxicodendron diversilobum, Rhus diversiloba\nn12767423\teastern poison oak, Toxicodendron quercifolium, Rhus quercifolia, Rhus toxicodenedron\nn12767648\tvarnish tree, lacquer tree, Chinese lacquer tree, Japanese lacquer tree, Japanese varnish tree, Japanese sumac, Toxicodendron vernicifluum, Rhus verniciflua\nn12768369\thorse chestnut, buckeye, Aesculus hippocastanum\nn12768682\tbuckeye, horse chestnut, conker\nn12768809\tsweet buckeye\nn12768933\tOhio buckeye\nn12769065\tdwarf buckeye, bottlebrush buckeye\nn12769219\tred buckeye\nn12769318\tparticolored buckeye\nn12770529\tebony, ebony tree, Diospyros ebenum\nn12770892\tmarblewood, marble-wood, Andaman marble, Diospyros kurzii\nn12771085\tmarblewood, marble-wood\nn12771192\tpersimmon, persimmon tree\nn12771390\tJapanese persimmon, kaki, Diospyros kaki\nn12771597\tAmerican persimmon, possumwood, Diospyros virginiana\nn12771890\tdate plum, Diospyros lotus\nn12772753\tbuckthorn\nn12772908\tsouthern buckthorn, shittimwood, shittim, mock orange, Bumelia lycioides\nn12773142\tfalse buckthorn, chittamwood, chittimwood, shittimwood, black haw, Bumelia lanuginosa\nn12773651\tstar apple, caimito, Chrysophyllum cainito\nn12773917\tsatinleaf, satin leaf, caimitillo, damson plum, Chrysophyllum oliviforme\nn12774299\tbalata, balata tree, beefwood, bully tree, Manilkara bidentata\nn12774641\tsapodilla, sapodilla tree, Manilkara zapota, Achras zapota\nn12775070\tgutta-percha tree, Palaquium gutta\nn12775393\tgutta-percha tree\nn12775717\tcanistel, canistel tree, Pouteria campechiana nervosa\nn12775919\tmarmalade tree, mammee, sapote, Pouteria zapota, Calocarpum zapota\nn12776558\tsweetleaf, Symplocus tinctoria\nn12776774\tAsiatic sweetleaf, sapphire berry, Symplocus paniculata\nn12777436\tstyrax\nn12777680\tsnowbell, Styrax obassia\nn12777778\tJapanese snowbell, Styrax japonicum\nn12777892\tTexas snowbell, Texas snowbells, Styrax texana\nn12778398\tsilver-bell tree, silverbell tree, snowdrop tree, opossum wood, Halesia carolina, Halesia tetraptera\nn12778605\tcarnivorous plant\nn12779603\tpitcher plant\nn12779851\tcommon pitcher plant, huntsman's cup, huntsman's cups, Sarracenia purpurea\nn12780325\thooded pitcher plant, Sarracenia minor\nn12780563\thuntsman's horn, huntsman's horns, yellow trumpet, yellow pitcher plant, trumpets, Sarracenia flava\nn12781940\ttropical pitcher plant\nn12782530\tsundew, sundew plant, daily dew\nn12782915\tVenus's flytrap, Venus's flytraps, Dionaea muscipula\nn12783316\twaterwheel plant, Aldrovanda vesiculosa\nn12783730\tDrosophyllum lusitanicum\nn12784371\troridula\nn12784889\tAustralian pitcher plant, Cephalotus follicularis\nn12785724\tsedum\nn12785889\tstonecrop\nn12786273\trose-root, midsummer-men, Sedum rosea\nn12786464\torpine, orpin, livelong, live-forever, Sedum telephium\nn12786836\tpinwheel, Aeonium haworthii\nn12787364\tChristmas bush, Christmas tree, Ceratopetalum gummiferum\nn12788854\thortensia, Hydrangea macrophylla hortensis\nn12789054\tfall-blooming hydrangea, Hydrangea paniculata\nn12789554\tcarpenteria, Carpenteria californica\nn12789977\tdecumary, Decumaria barbata, Decumaria barbara\nn12790430\tdeutzia\nn12791064\tphiladelphus\nn12791329\tmock orange, syringa, Philadelphus coronarius\nn12793015\tsaxifrage, breakstone, rockfoil\nn12793284\tyellow mountain saxifrage, Saxifraga aizoides\nn12793494\tmeadow saxifrage, fair-maids-of-France, Saxifraga granulata\nn12793695\tmossy saxifrage, Saxifraga hypnoides\nn12793886\twestern saxifrage, Saxifraga occidentalis\nn12794135\tpurple saxifrage, Saxifraga oppositifolia\nn12794367\tstar saxifrage, starry saxifrage, Saxifraga stellaris\nn12794568\tstrawberry geranium, strawberry saxifrage, mother-of-thousands, Saxifraga stolonifera, Saxifraga sarmentosam\nn12794985\tastilbe\nn12795209\tfalse goatsbeard, Astilbe biternata\nn12795352\tdwarf astilbe, Astilbe chinensis pumila\nn12795555\tspirea, spiraea, Astilbe japonica\nn12796022\tbergenia\nn12796385\tcoast boykinia, Boykinia elata, Boykinia occidentalis\nn12796849\tgolden saxifrage, golden spleen\nn12797368\tumbrella plant, Indian rhubarb, Darmera peltata, Peltiphyllum peltatum\nn12797860\tbridal wreath, bridal-wreath, Francoa ramosa\nn12798284\talumroot, alumbloom\nn12798910\tcoralbells, Heuchera sanguinea\nn12799269\tleatherleaf saxifrage, Leptarrhena pyrolifolia\nn12799776\twoodland star, Lithophragma affine, Lithophragma affinis, Tellima affinis\nn12800049\tprairie star, Lithophragma parviflorum\nn12800586\tmiterwort, mitrewort, bishop's cap\nn12801072\tfive-point bishop's cap, Mitella pentandra\nn12801520\tparnassia, grass-of-Parnassus\nn12801781\tbog star, Parnassia palustris\nn12801966\tfringed grass of Parnassus, Parnassia fimbriata\nn12803226\tfalse alumroot, fringe cups, Tellima grandiflora\nn12803754\tfoamflower, coolwart, false miterwort, false mitrewort, Tiarella cordifolia\nn12803958\tfalse miterwort, false mitrewort, Tiarella unifoliata\nn12804352\tpickaback plant, piggyback plant, youth-on-age, Tolmiea menziesii\nn12805146\tcurrant, currant bush\nn12805561\tblack currant, European black currant, Ribes nigrum\nn12805762\twhite currant, Ribes sativum\nn12806015\tgooseberry, gooseberry bush, Ribes uva-crispa, Ribes grossularia\nn12806732\tplane tree, sycamore, platan\nn12807251\tLondon plane, Platanus acerifolia\nn12807409\tAmerican sycamore, American plane, buttonwood, Platanus occidentalis\nn12807624\toriental plane, Platanus orientalis\nn12807773\tCalifornia sycamore, Platanus racemosa\nn12808007\tArizona sycamore, Platanus wrightii\nn12809868\tGreek valerian, Polemonium reptans\nn12810007\tnorthern Jacob's ladder, Polemonium boreale\nn12810151\tskunkweed, skunk-weed, Polemonium viscosum\nn12810595\tphlox\nn12811027\tmoss pink, mountain phlox, moss phlox, dwarf phlox, Phlox subulata\nn12811713\tevening-snow, Linanthus dichotomus\nn12812235\tacanthus\nn12812478\tbear's breech, bear's breeches, sea holly, Acanthus mollis\nn12812801\tcaricature plant, Graptophyllum pictum\nn12813189\tblack-eyed Susan, black-eyed Susan vine, Thunbergia alata\nn12814643\tcatalpa, Indian bean\nn12814857\tCatalpa bignioides\nn12814960\tCatalpa speciosa\nn12815198\tdesert willow, Chilopsis linearis\nn12815668\tcalabash, calabash tree, Crescentia cujete\nn12815838\tcalabash\nn12816508\tborage, tailwort, Borago officinalis\nn12816942\tcommon amsinckia, Amsinckia intermedia\nn12817464\tanchusa\nn12817694\tbugloss, alkanet, Anchusa officinalis\nn12817855\tcape forget-me-not, Anchusa capensis\nn12818004\tcape forget-me-not, Anchusa riparia\nn12818346\tSpanish elm, Equador laurel, salmwood, cypre, princewood, Cordia alliodora\nn12818601\tprincewood, Spanish elm, Cordia gerascanthus\nn12818966\tChinese forget-me-not, Cynoglossum amabile\nn12819141\thound's-tongue, Cynoglossum officinale\nn12819354\thound's-tongue, Cynoglossum virginaticum\nn12819728\tblueweed, blue devil, blue thistle, viper's bugloss, Echium vulgare\nn12820113\tbeggar's lice, beggar lice\nn12820669\tgromwell, Lithospermum officinale\nn12820853\tpuccoon, Lithospermum caroliniense\nn12821505\tVirginia bluebell, Virginia cowslip, Mertensia virginica\nn12821895\tgarden forget-me-not, Myosotis sylvatica\nn12822115\tforget-me-not, mouse ear, Myosotis scorpiodes\nn12822466\tfalse gromwell\nn12822769\tcomfrey, cumfrey\nn12822955\tcommon comfrey, boneset, Symphytum officinale\nn12823717\tconvolvulus\nn12823859\tbindweed\nn12824053\tfield bindweed, wild morning-glory, Convolvulus arvensis\nn12824289\tscammony, Convolvulus scammonia\nn12824735\tsilverweed\nn12825497\tdodder\nn12826143\tdichondra, Dichondra micrantha\nn12827270\tcypress vine, star-glory, Indian pink, Ipomoea quamoclit, Quamoclit pennata\nn12827537\tmoonflower, belle de nuit, Ipomoea alba\nn12827907\twild potato vine, wild sweet potato vine, man-of-the-earth, manroot, scammonyroot, Ipomoea panurata, Ipomoea fastigiata\nn12828220\tred morning-glory, star ipomoea, Ipomoea coccinea\nn12828379\tman-of-the-earth, Ipomoea leptophylla\nn12828520\tscammony, Ipomoea orizabensis\nn12828791\tJapanese morning glory, Ipomoea nil\nn12828977\timperial Japanese morning glory, Ipomoea imperialis\nn12829582\tgesneriad\nn12829975\tgesneria\nn12830222\tachimenes, hot water plant\nn12830568\taeschynanthus\nn12831141\tlace-flower vine, Alsobia dianthiflora, Episcia dianthiflora\nn12831535\tcolumnea\nn12831932\tepiscia\nn12832315\tgloxinia\nn12832538\tCanterbury bell, Gloxinia perennis\nn12832822\tkohleria\nn12833149\tAfrican violet, Saintpaulia ionantha\nn12833985\tstreptocarpus\nn12834190\tCape primrose\nn12834798\twaterleaf\nn12834938\tVirginia waterleaf, Shawnee salad, shawny, Indian salad, John's cabbage, Hydrophyllum virginianum\nn12835331\tyellow bells, California yellow bells, whispering bells, Emmanthe penduliflora\nn12835766\tyerba santa, Eriodictyon californicum\nn12836212\tnemophila\nn12836337\tbaby blue-eyes, Nemophila menziesii\nn12836508\tfive-spot, Nemophila maculata\nn12836862\tscorpionweed, scorpion weed, phacelia\nn12837052\tCalifornia bluebell, Phacelia campanularia\nn12837259\tCalifornia bluebell, whitlavia, Phacelia minor, Phacelia whitlavia\nn12837466\tfiddleneck, Phacelia tanacetifolia\nn12837803\tfiesta flower, Pholistoma auritum, Nemophila aurita\nn12839574\tbasil thyme, basil balm, mother of thyme, Acinos arvensis, Satureja acinos\nn12839979\tgiant hyssop\nn12840168\tyellow giant hyssop, Agastache nepetoides\nn12840362\tanise hyssop, Agastache foeniculum\nn12840502\tMexican hyssop, Agastache mexicana\nn12840749\tbugle, bugleweed\nn12841007\tcreeping bugle, Ajuga reptans\nn12841193\terect bugle, blue bugle, Ajuga genevensis\nn12841354\tpyramid bugle, Ajuga pyramidalis\nn12842302\twood mint\nn12842519\thairy wood mint, Blephilia hirsuta\nn12842642\tdowny wood mint, Blephilia celiata\nn12842887\tcalamint\nn12843144\tcommon calamint, Calamintha sylvatica, Satureja calamintha officinalis\nn12843316\tlarge-flowered calamint, Calamintha grandiflora, Clinopodium grandiflorum, Satureja grandiflora\nn12843557\tlesser calamint, field balm, Calamintha nepeta, Calamintha nepeta glantulosa, Satureja nepeta, Satureja calamintha glandulosa\nn12843970\twild basil, cushion calamint, Clinopodium vulgare, Satureja vulgaris\nn12844409\thorse balm, horseweed, stoneroot, stone-root, richweed, stone root, Collinsonia canadensis\nn12844939\tcoleus, flame nettle\nn12845187\tcountry borage, Coleus aromaticus, Coleus amboinicus, Plectranthus amboinicus\nn12845413\tpainted nettle, Joseph's coat, Coleus blumei, Solenostemon blumei, Solenostemon scutellarioides\nn12845908\tApalachicola rosemary, Conradina glabra\nn12846335\tdragonhead, dragon's head, Dracocephalum parviflorum\nn12846690\telsholtzia\nn12847008\themp nettle, dead nettle, Galeopsis tetrahit\nn12847374\tground ivy, alehoof, field balm, gill-over-the-ground, runaway robin, Glechoma hederaceae, Nepeta hederaceae\nn12847927\tpennyroyal, American pennyroyal, Hedeoma pulegioides\nn12848499\thyssop, Hyssopus officinalis\nn12849061\tdead nettle\nn12849279\twhite dead nettle, Lamium album\nn12849416\thenbit, Lamium amplexicaule\nn12849952\tEnglish lavender, Lavandula angustifolia, Lavandula officinalis\nn12850168\tFrench lavender, Lavandula stoechas\nn12850336\tspike lavender, French lavender, Lavandula latifolia\nn12850906\tdagga, Cape dagga, red dagga, wilde dagga, Leonotis leonurus\nn12851094\tlion's-ear, Leonotis nepetaefolia, Leonotis nepetifolia\nn12851469\tmotherwort, Leonurus cardiaca\nn12851860\tpitcher sage, Lepechinia calycina, Sphacele calycina\nn12852234\tbugleweed, Lycopus virginicus\nn12852428\twater horehound, Lycopus americanus\nn12852570\tgipsywort, gypsywort, Lycopus europaeus\nn12853080\toriganum\nn12853287\toregano, marjoram, pot marjoram, wild marjoram, winter sweet, Origanum vulgare\nn12853482\tsweet marjoram, knotted marjoram, Origanum majorana, Majorana hortensis\nn12854048\thorehound\nn12854193\tcommon horehound, white horehound, Marrubium vulgare\nn12854600\tlemon balm, garden balm, sweet balm, bee balm, beebalm, Melissa officinalis\nn12855365\tcorn mint, field mint, Mentha arvensis\nn12855494\twater-mint, water mint, Mentha aquatica\nn12855710\tbergamot mint, lemon mint, eau de cologne mint, Mentha citrata\nn12855886\thorsemint, Mentha longifolia\nn12856091\tpeppermint, Mentha piperita\nn12856287\tspearmint, Mentha spicata\nn12856479\tapple mint, applemint, Mentha rotundifolia, Mentha suaveolens\nn12856680\tpennyroyal, Mentha pulegium\nn12857204\tyerba buena, Micromeria chamissonis, Micromeria douglasii, Satureja douglasii\nn12857779\tmolucca balm, bells of Ireland, Molucella laevis\nn12858150\tmonarda, wild bergamot\nn12858397\tbee balm, beebalm, bergamot mint, oswego tea, Monarda didyma\nn12858618\thorsemint, Monarda punctata\nn12858871\tbee balm, beebalm, Monarda fistulosa\nn12858987\tlemon mint, horsemint, Monarda citriodora\nn12859153\tplains lemon monarda, Monarda pectinata\nn12859272\tbasil balm, Monarda clinopodia\nn12859679\tmustang mint, Monardella lanceolata\nn12859986\tcatmint, catnip, Nepeta cataria\nn12860365\tbasil\nn12860978\tbeefsteak plant, Perilla frutescens crispa\nn12861345\tphlomis\nn12861541\tJerusalem sage, Phlomis fruticosa\nn12861892\tphysostegia\nn12862512\tplectranthus\nn12862828\tpatchouli, patchouly, pachouli, Pogostemon cablin\nn12863234\tself-heal, heal all, Prunella vulgaris\nn12863624\tmountain mint\nn12864160\trosemary, Rosmarinus officinalis\nn12865037\tclary sage, Salvia clarea\nn12865562\tpurple sage, chaparral sage, Salvia leucophylla\nn12865708\tcancerweed, cancer weed, Salvia lyrata\nn12865824\tcommon sage, ramona, Salvia officinalis\nn12866002\tmeadow clary, Salvia pratensis\nn12866162\tclary, Salvia sclarea\nn12866333\tpitcher sage, Salvia spathacea\nn12866459\tMexican mint, Salvia divinorum\nn12866635\twild sage, wild clary, vervain sage, Salvia verbenaca\nn12866968\tsavory\nn12867184\tsummer savory, Satureja hortensis, Satureia hortensis\nn12867449\twinter savory, Satureja montana, Satureia montana\nn12867826\tskullcap, helmetflower\nn12868019\tblue pimpernel, blue skullcap, mad-dog skullcap, mad-dog weed, Scutellaria lateriflora\nn12868880\thedge nettle, dead nettle, Stachys sylvatica\nn12869061\thedge nettle, Stachys palustris\nn12869478\tgermander\nn12869668\tAmerican germander, wood sage, Teucrium canadense\nn12870048\tcat thyme, marum, Teucrium marum\nn12870225\twood sage, Teucrium scorodonia\nn12870535\tthyme\nn12870682\tcommon thyme, Thymus vulgaris\nn12870891\twild thyme, creeping thyme, Thymus serpyllum\nn12871272\tblue curls\nn12871696\tturpentine camphor weed, camphorweed, vinegarweed, Trichostema lanceolatum\nn12871859\tbastard pennyroyal, Trichostema dichotomum\nn12872458\tbladderwort\nn12872914\tbutterwort\nn12873341\tgenlisea\nn12873984\tmartynia, Martynia annua\nn12875269\tcommon unicorn plant, devil's claw, common devil's claw, elephant-tusk, proboscis flower, ram's horn, Proboscidea louisianica\nn12875697\tsand devil's claw, Proboscidea arenaria, Martynia arenaria\nn12875861\tsweet unicorn plant, Proboscidea fragrans, Martynia fragrans\nn12876899\tfigwort\nn12877244\tsnapdragon\nn12877493\twhite snapdragon, Antirrhinum coulterianum\nn12877637\tyellow twining snapdragon, Antirrhinum filipes\nn12877838\tMediterranean snapdragon, Antirrhinum majus\nn12878169\tkitten-tails\nn12878325\tAlpine besseya, Besseya alpina\nn12878784\tfalse foxglove, Aureolaria pedicularia, Gerardia pedicularia\nn12879068\tfalse foxglove, Aureolaria virginica, Gerardia virginica\nn12879527\tcalceolaria, slipperwort\nn12879963\tIndian paintbrush, painted cup\nn12880244\tdesert paintbrush, Castilleja chromosa\nn12880462\tgiant red paintbrush, Castilleja miniata\nn12880638\tgreat plains paintbrush, Castilleja sessiliflora\nn12880799\tsulfur paintbrush, Castilleja sulphurea\nn12881105\tshellflower, shell-flower, turtlehead, snakehead, snake-head, Chelone glabra\nn12881913\tmaiden blue-eyed Mary, Collinsia parviflora\nn12882158\tblue-eyed Mary, Collinsia verna\nn12882779\tfoxglove, digitalis\nn12882945\tcommon foxglove, fairy bell, fingerflower, finger-flower, fingerroot, finger-root, Digitalis purpurea\nn12883265\tyellow foxglove, straw foxglove, Digitalis lutea\nn12883628\tgerardia\nn12884100\tblue toadflax, old-field toadflax, Linaria canadensis\nn12884260\ttoadflax, butter-and-eggs, wild snapdragon, devil's flax, Linaria vulgaris\nn12885045\tgolden-beard penstemon, Penstemon barbatus\nn12885265\tscarlet bugler, Penstemon centranthifolius\nn12885510\tred shrubby penstemon, redwood penstemon\nn12885754\tPlatte River penstemon, Penstemon cyananthus\nn12886185\thot-rock penstemon, Penstemon deustus\nn12886402\tJones' penstemon, Penstemon dolius\nn12886600\tshrubby penstemon, lowbush penstemon, Penstemon fruticosus\nn12886831\tnarrow-leaf penstemon, Penstemon linarioides\nn12887293\tballoon flower, scented penstemon, Penstemon palmeri\nn12887532\tParry's penstemon, Penstemon parryi\nn12887713\trock penstemon, cliff penstemon, Penstemon rupicola\nn12888016\tRydberg's penstemon, Penstemon rydbergii\nn12888234\tcascade penstemon, Penstemon serrulatus\nn12888457\tWhipple's penstemon, Penstemon whippleanus\nn12889219\tmoth mullein, Verbascum blattaria\nn12889412\twhite mullein, Verbascum lychnitis\nn12889579\tpurple mullein, Verbascum phoeniceum\nn12889713\tcommon mullein, great mullein, Aaron's rod, flannel mullein, woolly mullein, torch, Verbascum thapsus\nn12890265\tveronica, speedwell\nn12890490\tfield speedwell, Veronica agrestis\nn12890685\tbrooklime, American brooklime, Veronica americana\nn12890928\tcorn speedwell, Veronica arvensis\nn12891093\tbrooklime, European brooklime, Veronica beccabunga\nn12891305\tgermander speedwell, bird's eye, Veronica chamaedrys\nn12891469\twater speedwell, Veronica michauxii, Veronica anagallis-aquatica\nn12891643\tcommon speedwell, gypsyweed, Veronica officinalis\nn12891824\tpurslane speedwell, Veronica peregrina\nn12892013\tthyme-leaved speedwell, Veronica serpyllifolia\nn12893463\tnightshade\nn12893993\thorse nettle, ball nettle, bull nettle, ball nightshade, Solanum carolinense\nn12895298\tAfrican holly, Solanum giganteum\nn12895811\tpotato vine, Solanum jasmoides\nn12896615\tgarden huckleberry, wonderberry, sunberry, Solanum nigrum guineese, Solanum melanocerasum, Solanum burbankii\nn12897118\tnaranjilla, Solanum quitoense\nn12897788\tpotato vine, giant potato creeper, Solanum wendlandii\nn12897999\tpotato tree, Brazilian potato tree, Solanum wrightii, Solanum macranthum\nn12898342\tbelladonna, belladonna plant, deadly nightshade, Atropa belladonna\nn12898774\tbush violet, browallia\nn12899166\tlady-of-the-night, Brunfelsia americana\nn12899537\tangel's trumpet, maikoa, Brugmansia arborea, Datura arborea\nn12899752\tangel's trumpet, Brugmansia suaveolens, Datura suaveolens\nn12899971\tred angel's trumpet, Brugmansia sanguinea, Datura sanguinea\nn12900783\tcone pepper, Capsicum annuum conoides\nn12901724\tbird pepper, Capsicum frutescens baccatum, Capsicum baccatum\nn12902466\tday jessamine, Cestrum diurnum\nn12902662\tnight jasmine, night jessamine, Cestrum nocturnum\nn12903014\ttree tomato, tamarillo\nn12903367\tthorn apple\nn12903503\tjimsonweed, jimson weed, Jamestown weed, common thorn apple, apple of Peru, Datura stramonium\nn12903964\tpichi, Fabiana imbricata\nn12904314\thenbane, black henbane, stinking nightshade, Hyoscyamus niger\nn12904562\tEgyptian henbane, Hyoscyamus muticus\nn12904938\tmatrimony vine, boxthorn\nn12905135\tcommon matrimony vine, Duke of Argyll's tea tree, Lycium barbarum, Lycium halimifolium\nn12905412\tChristmasberry, Christmas berry, Lycium carolinianum\nn12906214\tplum tomato\nn12906498\tmandrake, devil's apples, Mandragora officinarum\nn12906771\tmandrake root, mandrake\nn12907057\tapple of Peru, shoo fly, Nicandra physaloides\nn12907671\tflowering tobacco, Jasmine tobacco, Nicotiana alata\nn12907857\tcommon tobacco, Nicotiana tabacum\nn12908093\twild tobacco, Indian tobacco, Nicotiana rustica\nn12908645\tcupflower, nierembergia\nn12908854\twhitecup, Nierembergia repens, Nierembergia rivularis\nn12909421\tpetunia\nn12909614\tlarge white petunia, Petunia axillaris\nn12909759\tviolet-flowered petunia, Petunia integrifolia\nn12909917\thybrid petunia, Petunia hybrida\nn12911079\tcape gooseberry, purple ground cherry, Physalis peruviana\nn12911264\tstrawberry tomato, dwarf cape gooseberry, Physalis pruinosa\nn12911440\ttomatillo, jamberry, Mexican husk tomato, Physalis ixocarpa\nn12911673\ttomatillo, miltomate, purple ground cherry, jamberry, Physalis philadelphica\nn12911914\tyellow henbane, Physalis viscosa\nn12912274\tcock's eggs, Salpichroa organifolia, Salpichroa rhomboidea\nn12912670\tsalpiglossis\nn12912801\tpainted tongue, Salpiglossis sinuata\nn12913144\tbutterfly flower, poor man's orchid, schizanthus\nn12913524\tScopolia carniolica\nn12913791\tchalice vine, trumpet flower, cupflower, Solandra guttata\nn12914923\tverbena, vervain\nn12915140\tlantana\nn12915568\tblack mangrove, Avicennia marina\nn12915811\twhite mangrove, Avicennia officinalis\nn12916179\tblack mangrove, Aegiceras majus\nn12916511\tteak, Tectona grandis\nn12917901\tspurge\nn12918609\tsun spurge, wartweed, wartwort, devil's milk, Euphorbia helioscopia\nn12918810\tpetty spurge, devil's milk, Euphorbia peplus\nn12918991\tmedusa's head, Euphorbia medusae, Euphorbia caput-medusae\nn12919195\twild spurge, flowering spurge, tramp's spurge, Euphorbia corollata\nn12919403\tsnow-on-the-mountain, snow-in-summer, ghost weed, Euphorbia marginata\nn12919646\tcypress spurge, Euphorbia cyparissias\nn12919847\tleafy spurge, wolf's milk, Euphorbia esula\nn12920043\thairy spurge, Euphorbia hirsuta\nn12920204\tpoinsettia, Christmas star, Christmas flower, lobster plant, Mexican flameleaf, painted leaf, Euphorbia pulcherrima\nn12920521\tJapanese poinsettia, mole plant, paint leaf, Euphorbia heterophylla\nn12920719\tfire-on-the-mountain, painted leaf, Mexican fire plant, Euphorbia cyathophora\nn12920955\twood spurge, Euphorbia amygdaloides\nn12921315\tdwarf spurge, Euphorbia exigua\nn12921499\tscarlet plume, Euphorbia fulgens\nn12921660\tnaboom, cactus euphorbia, Euphorbia ingens\nn12921868\tcrown of thorns, Christ thorn, Christ plant, Euphorbia milii\nn12922119\ttoothed spurge, Euphorbia dentata\nn12922458\tthree-seeded mercury, Acalypha virginica\nn12922763\tcroton, Croton tiglium\nn12923108\tcascarilla, Croton eluteria\nn12923257\tcascarilla bark, eleuthera bark, sweetwood bark\nn12924623\tcastor-oil plant, castor bean plant, palma christi, palma christ, Ricinus communis\nn12925179\tspurge nettle, tread-softly, devil nettle, pica-pica, Cnidoscolus urens, Jatropha urens, Jatropha stimulosus\nn12925583\tphysic nut, Jatropha curcus\nn12926039\tPara rubber tree, caoutchouc tree, Hevea brasiliensis\nn12926480\tcassava, casava\nn12926689\tbitter cassava, manioc, mandioc, mandioca, tapioca plant, gari, Manihot esculenta, Manihot utilissima\nn12927013\tcassava, manioc\nn12927194\tsweet cassava, Manihot dulcis\nn12927494\tcandlenut, varnish tree, Aleurites moluccana\nn12927758\ttung tree, tung, tung-oil tree, Aleurites fordii\nn12928071\tslipper spurge, slipper plant\nn12928307\tcandelilla, Pedilanthus bracteatus, Pedilanthus pavonis\nn12928491\tJewbush, Jew-bush, Jew bush, redbird cactus, redbird flower, Pedilanthus tithymaloides\nn12928819\tjumping bean, jumping seed, Mexican jumping bean\nn12929403\tcamellia, camelia\nn12929600\tjaponica, Camellia japonica\nn12930778\tumbellifer, umbelliferous plant\nn12930951\twild parsley\nn12931231\tfool's parsley, lesser hemlock, Aethusa cynapium\nn12931542\tdill, Anethum graveolens\nn12931906\tangelica, angelique\nn12932173\tgarden angelica, archangel, Angelica Archangelica\nn12932365\twild angelica, Angelica sylvestris\nn12932706\tchervil, beaked parsley, Anthriscus cereifolium\nn12932966\tcow parsley, wild chervil, Anthriscus sylvestris\nn12933274\twild celery, Apium graveolens\nn12934036\tastrantia, masterwort\nn12934174\tgreater masterwort, Astrantia major\nn12934479\tcaraway, Carum carvi\nn12934685\twhorled caraway\nn12934985\twater hemlock, Cicuta verosa\nn12935166\tspotted cowbane, spotted hemlock, spotted water hemlock\nn12935609\themlock, poison hemlock, poison parsley, California fern, Nebraska fern, winter fern, Conium maculatum\nn12936155\tearthnut, Conopodium denudatum\nn12936826\tcumin, Cuminum cyminum\nn12937130\twild carrot, Queen Anne's lace, Daucus carota\nn12938081\teryngo, eringo\nn12938193\tsea holly, sea holm, sea eryngium, Eryngium maritimum\nn12938445\tbutton snakeroot, Eryngium aquaticum\nn12938667\trattlesnake master, rattlesnake's master, button snakeroot, Eryngium yuccifolium\nn12939104\tfennel\nn12939282\tcommon fennel, Foeniculum vulgare\nn12939479\tFlorence fennel, Foeniculum dulce, Foeniculum vulgare dulce\nn12939874\tcow parsnip, hogweed, Heracleum sphondylium\nn12940226\tlovage, Levisticum officinale\nn12940609\tsweet cicely, Myrrhis odorata\nn12941220\twater fennel, Oenanthe aquatica\nn12941536\tparsnip, Pastinaca sativa\nn12941717\tcultivated parsnip\nn12942025\twild parsnip, madnep\nn12942395\tparsley, Petroselinum crispum\nn12942572\tItalian parsley, flat-leaf parsley, Petroselinum crispum neapolitanum\nn12942729\tHamburg parsley, turnip-rooted parsley, Petroselinum crispum tuberosum\nn12943049\tanise, anise plant, Pimpinella anisum\nn12943443\tsanicle, snakeroot\nn12943912\tpurple sanicle, Sanicula bipinnatifida\nn12944095\tEuropean sanicle, Sanicula Europaea\nn12945177\twater parsnip, Sium suave\nn12945366\tgreater water parsnip, Sium latifolium\nn12945549\tskirret, Sium sisarum\nn12946849\tdogwood, dogwood tree, cornel\nn12947313\tcommon white dogwood, eastern flowering dogwood, Cornus florida\nn12947544\tred osier, red osier dogwood, red dogwood, American dogwood, redbrush, Cornus stolonifera\nn12947756\tsilky dogwood, Cornus obliqua\nn12947895\tsilky cornel, silky dogwood, Cornus amomum\nn12948053\tcommon European dogwood, red dogwood, blood-twig, pedwood, Cornus sanguinea\nn12948251\tbunchberry, dwarf cornel, crackerberry, pudding berry, Cornus canadensis\nn12948495\tcornelian cherry, Cornus mas\nn12949160\tpuka, Griselinia lucida\nn12949361\tkapuka, Griselinia littoralis\nn12950126\tvalerian\nn12950314\tcommon valerian, garden heliotrope, Valeriana officinalis\nn12950796\tcommon corn salad, lamb's lettuce, Valerianella olitoria, Valerianella locusta\nn12951146\tred valerian, French honeysuckle, Centranthus ruber\nn12951835\tfilmy fern, film fern\nn12952165\tbristle fern, filmy fern\nn12952469\thare's-foot bristle fern, Trichomanes boschianum\nn12952590\tKillarney fern, Trichomanes speciosum\nn12952717\tkidney fern, Trichomanes reniforme\nn12953206\tflowering fern, osmund\nn12953484\troyal fern, royal osmund, king fern, ditch fern, French bracken, Osmunda regalis\nn12953712\tinterrupted fern, Osmunda clatonia\nn12954353\tcrape fern, Prince-of-Wales fern, Prince-of-Wales feather, Prince-of-Wales plume, Leptopteris superba, Todea superba\nn12954799\tcrepe fern, king fern, Todea barbara\nn12955414\tcurly grass, curly grass fern, Schizaea pusilla\nn12955840\tpine fern, Anemia adiantifolia\nn12956170\tclimbing fern\nn12956367\tcreeping fern, Hartford fern, Lygodium palmatum\nn12956588\tclimbing maidenhair, climbing maidenhair fern, snake fern, Lygodium microphyllum\nn12956922\tscented fern, Mohria caffrorum\nn12957608\tclover fern, pepperwort\nn12957803\tnardoo, nardo, common nardoo, Marsilea drummondii\nn12957924\twater clover, Marsilea quadrifolia\nn12958261\tpillwort, Pilularia globulifera\nn12958615\tregnellidium, Regnellidium diphyllum\nn12959074\tfloating-moss, Salvinia rotundifolia, Salvinia auriculata\nn12959538\tmosquito fern, floating fern, Carolina pond fern, Azolla caroliniana\nn12960378\tadder's tongue, adder's tongue fern\nn12960552\tribbon fern, Ophioglossum pendulum\nn12960863\tgrape fern\nn12961242\tdaisyleaf grape fern, daisy-leaved grape fern, Botrychium matricariifolium\nn12961393\tleathery grape fern, Botrychium multifidum\nn12961536\trattlesnake fern, Botrychium virginianum\nn12961879\tflowering fern, Helminthostachys zeylanica\nn12963628\tpowdery mildew\nn12964920\tDutch elm fungus, Ceratostomella ulmi\nn12965626\tergot, Claviceps purpurea\nn12965951\trye ergot\nn12966804\tblack root rot fungus, Xylaria mali\nn12966945\tdead-man's-fingers, dead-men's-fingers, Xylaria polymorpha\nn12968136\tsclerotinia\nn12968309\tbrown cup\nn12969131\tearthball, false truffle, puffball, hard-skinned puffball\nn12969425\tScleroderma citrinum, Scleroderma aurantium\nn12969670\tScleroderma flavidium, star earthball\nn12969927\tScleroderma bovista, smooth earthball\nn12970193\tPodaxaceae\nn12970293\tstalked puffball\nn12970733\tstalked puffball\nn12971400\tfalse truffle\nn12971804\tRhizopogon idahoensis\nn12972136\tTruncocolumella citrina\nn12973443\tmucor\nn12973791\trhizopus\nn12973937\tbread mold, Rhizopus nigricans\nn12974987\tslime mold, slime mould\nn12975804\ttrue slime mold, acellular slime mold, plasmodial slime mold, myxomycete\nn12976198\tcellular slime mold\nn12976554\tdictostylium\nn12978076\tpond-scum parasite\nn12979316\tpotato wart fungus, Synchytrium endobioticum\nn12979829\twhite fungus, Saprolegnia ferax\nn12980080\twater mold\nn12980840\tdowny mildew, false mildew\nn12981086\tblue mold fungus, Peronospora tabacina\nn12981301\tonion mildew, Peronospora destructor\nn12981443\ttobacco mildew, Peronospora hyoscyami\nn12981954\twhite rust\nn12982468\tpythium\nn12982590\tdamping off fungus, Pythium debaryanum\nn12982915\tPhytophthora citrophthora\nn12983048\tPhytophthora infestans\nn12983654\tclubroot fungus, Plasmodiophora brassicae\nn12983873\tGeglossaceae\nn12983961\tSarcosomataceae\nn12984267\tRufous rubber cup\nn12984489\tdevil's cigar\nn12984595\tdevil's urn\nn12985420\ttruffle, earthnut, earth-ball\nn12985773\tclub fungus\nn12985857\tcoral fungus\nn12986227\ttooth fungus\nn12987056\tlichen\nn12987423\tascolichen\nn12987535\tbasidiolichen\nn12988158\tlecanora\nn12988341\tmanna lichen\nn12988572\tarchil, orchil\nn12989007\troccella, Roccella tinctoria\nn12989938\tbeard lichen, beard moss, Usnea barbata\nn12990597\thorsehair lichen, horsetail lichen\nn12991184\treindeer moss, reindeer lichen, arctic moss, Cladonia rangiferina\nn12991837\tcrottle, crottal, crotal\nn12992177\tIceland moss, Iceland lichen, Cetraria islandica\nn12992868\tfungus\nn12994892\tpromycelium\nn12995601\ttrue fungus\nn12997654\tbasidiomycete, basidiomycetous fungi\nn12997919\tmushroom\nn12998815\tagaric\nn13000891\tmushroom\nn13001041\tmushroom\nn13001206\ttoadstool\nn13001366\thorse mushroom, Agaricus arvensis\nn13001529\tmeadow mushroom, field mushroom, Agaricus campestris\nn13001930\tshiitake, shiitake mushroom, Chinese black mushroom, golden oak mushroom, Oriental black mushroom, Lentinus edodes\nn13002209\tscaly lentinus, Lentinus lepideus\nn13002750\troyal agaric, Caesar's agaric, Amanita caesarea\nn13002925\tfalse deathcap, Amanita mappa\nn13003061\tfly agaric, Amanita muscaria\nn13003254\tdeath cap, death cup, death angel, destroying angel, Amanita phalloides\nn13003522\tblushing mushroom, blusher, Amanita rubescens\nn13003712\tdestroying angel, Amanita verna\nn13004423\tchanterelle, chantarelle, Cantharellus cibarius\nn13004640\tfloccose chanterelle, Cantharellus floccosus\nn13004826\tpig's ears, Cantharellus clavatus\nn13004992\tcinnabar chanterelle, Cantharellus cinnabarinus\nn13005329\tjack-o-lantern fungus, jack-o-lantern, jack-a-lantern, Omphalotus illudens\nn13005984\tinky cap, inky-cap mushroom, Coprinus atramentarius\nn13006171\tshaggymane, shaggy cap, shaggymane mushroom, Coprinus comatus\nn13006631\tmilkcap, Lactarius delicioso\nn13006894\tfairy-ring mushroom, Marasmius oreades\nn13007034\tfairy ring, fairy circle\nn13007417\toyster mushroom, oyster fungus, oyster agaric, Pleurotus ostreatus\nn13007629\tolive-tree agaric, Pleurotus phosphoreus\nn13008157\tPholiota astragalina\nn13008315\tPholiota aurea, golden pholiota\nn13008485\tPholiota destruens\nn13008689\tPholiota flammans\nn13008839\tPholiota flavida\nn13009085\tnameko, viscid mushroom, Pholiota nameko\nn13009244\tPholiota squarrosa-adiposa\nn13009429\tPholiota squarrosa, scaly pholiota\nn13009656\tPholiota squarrosoides\nn13010694\tStropharia ambigua\nn13010951\tStropharia hornemannii\nn13011221\tStropharia rugoso-annulata\nn13011595\tgill fungus\nn13012253\tEntoloma lividum, Entoloma sinuatum\nn13012469\tEntoloma aprile\nn13012973\tChlorophyllum molybdites\nn13013534\tlepiota\nn13013764\tparasol mushroom, Lepiota procera\nn13013965\tpoisonous parasol, Lepiota morgani\nn13014097\tLepiota naucina\nn13014265\tLepiota rhacodes\nn13014409\tAmerican parasol, Lepiota americana\nn13014581\tLepiota rubrotincta\nn13014741\tLepiota clypeolaria\nn13014879\tonion stem, Lepiota cepaestipes\nn13015509\tpink disease fungus, Corticium salmonicolor\nn13015688\tbottom rot fungus, Corticium solani\nn13016076\tpotato fungus, Pellicularia filamentosa, Rhizoctinia solani\nn13016289\tcoffee fungus, Pellicularia koleroga\nn13017102\tblewits, Clitocybe nuda\nn13017240\tsandy mushroom, Tricholoma populinum\nn13017439\tTricholoma pessundatum\nn13017610\tTricholoma sejunctum\nn13017789\tman-on-a-horse, Tricholoma flavovirens\nn13017979\tTricholoma venenata\nn13018088\tTricholoma pardinum\nn13018232\tTricholoma vaccinum\nn13018407\tTricholoma aurantium\nn13018906\tVolvaria bombycina\nn13019496\tPluteus aurantiorugosus\nn13019643\tPluteus magnus, sawdust mushroom\nn13019835\tdeer mushroom, Pluteus cervinus\nn13020191\tstraw mushroom, Chinese mushroom, Volvariella volvacea\nn13020481\tVolvariella bombycina\nn13020964\tClitocybe clavipes\nn13021166\tClitocybe dealbata\nn13021332\tClitocybe inornata\nn13021543\tClitocybe robusta, Clytocybe alba\nn13021689\tClitocybe irina, Tricholoma irinum, Lepista irina\nn13021867\tClitocybe subconnexa\nn13022210\twinter mushroom, Flammulina velutipes\nn13022709\tmycelium\nn13022903\tsclerotium\nn13023134\tsac fungus\nn13024012\tascomycete, ascomycetous fungus\nn13024500\tClavicipitaceae, grainy club mushrooms\nn13024653\tgrainy club\nn13025647\tyeast\nn13025854\tbaker's yeast, brewer's yeast, Saccharomyces cerevisiae\nn13026015\twine-maker's yeast, Saccharomyces ellipsoides\nn13027557\tAspergillus fumigatus\nn13027879\tbrown root rot fungus, Thielavia basicola\nn13028611\tdiscomycete, cup fungus\nn13028937\tLeotia lubrica\nn13029122\tMitrula elegans\nn13029326\tSarcoscypha coccinea, scarlet cup\nn13029610\tCaloscypha fulgens\nn13029760\tAleuria aurantia, orange peel fungus\nn13030337\telf cup\nn13030616\tPeziza domicilina\nn13030852\tblood cup, fairy cup, Peziza coccinea\nn13031193\tUrnula craterium, urn fungus\nn13031323\tGaliella rufa\nn13031474\tJafnea semitosta\nn13032115\tmorel\nn13032381\tcommon morel, Morchella esculenta, sponge mushroom, sponge morel\nn13032618\tDisciotis venosa, cup morel\nn13032923\tVerpa, bell morel\nn13033134\tVerpa bohemica, early morel\nn13033396\tVerpa conica, conic Verpa\nn13033577\tblack morel, Morchella conica, conic morel, Morchella angusticeps, narrowhead morel\nn13033879\tMorchella crassipes, thick-footed morel\nn13034062\tMorchella semilibera, half-free morel, cow's head\nn13034555\tWynnea americana\nn13034788\tWynnea sparassoides\nn13035241\tfalse morel\nn13035389\tlorchel\nn13035707\thelvella\nn13035925\tHelvella crispa, miter mushroom\nn13036116\tHelvella acetabulum\nn13036312\tHelvella sulcata\nn13036804\tdiscina\nn13037406\tgyromitra\nn13037585\tGyromitra californica, California false morel\nn13037805\tGyromitra sphaerospora, round-spored gyromitra\nn13038068\tGyromitra esculenta, brain mushroom, beefsteak morel\nn13038376\tGyromitra infula, saddled-shaped false morel\nn13038577\tGyromitra fastigiata, Gyromitra brunnea\nn13038744\tGyromitra gigas\nn13039349\tgasteromycete, gastromycete\nn13040303\tstinkhorn, carrion fungus\nn13040629\tcommon stinkhorn, Phallus impudicus\nn13040796\tPhallus ravenelii\nn13041312\tdog stinkhorn, Mutinus caninus\nn13041943\tCalostoma lutescens\nn13042134\tCalostoma cinnabarina\nn13042316\tCalostoma ravenelii\nn13042982\tstinky squid, Pseudocolus fusiformis\nn13043926\tpuffball, true puffball\nn13044375\tgiant puffball, Calvatia gigantea\nn13044778\tearthstar\nn13045210\tGeastrum coronatum\nn13045594\tRadiigera fuscogleba\nn13045975\tAstreus pteridis\nn13046130\tAstreus hygrometricus\nn13046669\tbird's-nest fungus\nn13047862\tGastrocybe lateritia\nn13048447\tMacowanites americanus\nn13049953\tpolypore, pore fungus, pore mushroom\nn13050397\tbracket fungus, shelf fungus\nn13050705\tAlbatrellus dispansus\nn13050940\tAlbatrellus ovinus, sheep polypore\nn13051346\tNeolentinus ponderosus\nn13052014\tOligoporus leucospongia\nn13052248\tPolyporus tenuiculus\nn13052670\then-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa\nn13052931\tPolyporus squamosus, scaly polypore\nn13053608\tbeefsteak fungus, Fistulina hepatica\nn13054073\tagaric, Fomes igniarius\nn13054560\tbolete\nn13055423\tBoletus chrysenteron\nn13055577\tBoletus edulis\nn13055792\tFrost's bolete, Boletus frostii\nn13055949\tBoletus luridus\nn13056135\tBoletus mirabilis\nn13056349\tBoletus pallidus\nn13056607\tBoletus pulcherrimus\nn13056799\tBoletus pulverulentus\nn13057054\tBoletus roxanae\nn13057242\tBoletus subvelutipes\nn13057422\tBoletus variipes\nn13057639\tBoletus zelleri\nn13058037\tFuscoboletinus paluster\nn13058272\tFuscoboletinus serotinus\nn13058608\tLeccinum fibrillosum\nn13059298\tSuillus albivelatus\nn13059657\told-man-of-the-woods, Strobilomyces floccopus\nn13060017\tBoletellus russellii\nn13060190\tjelly fungus\nn13061172\tsnow mushroom, Tremella fuciformis\nn13061348\twitches' butter, Tremella lutescens\nn13061471\tTremella foliacea\nn13061704\tTremella reticulata\nn13062421\tJew's-ear, Jew's-ears, ear fungus, Auricularia auricula\nn13063269\trust, rust fungus\nn13063514\taecium\nn13064111\tflax rust, flax rust fungus, Melampsora lini\nn13064457\tblister rust, Cronartium ribicola\nn13065089\twheat rust, Puccinia graminis\nn13065514\tapple rust, cedar-apple rust, Gymnosporangium juniperi-virginianae\nn13066129\tsmut, smut fungus\nn13066448\tcovered smut\nn13066979\tloose smut\nn13067191\tcornsmut, corn smut\nn13067330\tboil smut, Ustilago maydis\nn13067532\tSphacelotheca, genus Sphacelotheca\nn13067672\thead smut, Sphacelotheca reiliana\nn13068255\tbunt, Tilletia caries\nn13068434\tbunt, stinking smut, Tilletia foetida\nn13068735\tonion smut, Urocystis cepulae\nn13068917\tflag smut fungus\nn13069224\twheat flag smut, Urocystis tritici\nn13069773\tfelt fungus, Septobasidium pseudopedicellatum\nn13070308\twaxycap\nn13070875\tHygrocybe acutoconica, conic waxycap\nn13071371\tHygrophorus borealis\nn13071553\tHygrophorus caeruleus\nn13071815\tHygrophorus inocybiformis\nn13072031\tHygrophorus kauffmanii\nn13072209\tHygrophorus marzuolus\nn13072350\tHygrophorus purpurascens\nn13072528\tHygrophorus russula\nn13072706\tHygrophorus sordidus\nn13072863\tHygrophorus tennesseensis\nn13073055\tHygrophorus turundus\nn13073703\tNeohygrophorus angelesianus\nn13074619\tCortinarius armillatus\nn13074814\tCortinarius atkinsonianus\nn13075020\tCortinarius corrugatus\nn13075272\tCortinarius gentilis\nn13075441\tCortinarius mutabilis, purple-staining Cortinarius\nn13075684\tCortinarius semisanguineus\nn13075847\tCortinarius subfoetidus\nn13076041\tCortinarius violaceus\nn13076405\tGymnopilus spectabilis\nn13076643\tGymnopilus validipes\nn13076831\tGymnopilus ventricosus\nn13077033\tmold, mould\nn13077295\tmildew\nn13078021\tverticillium\nn13079073\tmonilia\nn13079419\tcandida\nn13079567\tCandida albicans, Monilia albicans\nn13080306\tblastomycete\nn13080866\tyellow spot fungus, Cercospora kopkei\nn13081229\tgreen smut fungus, Ustilaginoidea virens\nn13081999\tdry rot\nn13082568\trhizoctinia\nn13083023\thouseplant\nn13083461\tbedder, bedding plant\nn13084184\tsucculent\nn13084834\tcultivar\nn13085113\tweed\nn13085747\twort\nn13090018\tbrier\nn13090871\taril\nn13091620\tsporophyll, sporophyl\nn13091774\tsporangium, spore case, spore sac\nn13091982\tsporangiophore\nn13092078\tascus\nn13092240\tascospore\nn13092385\tarthrospore\nn13092987\teusporangium\nn13093275\ttetrasporangium\nn13093629\tgametangium\nn13094145\tsorus\nn13094273\tsorus\nn13095013\tpartial veil\nn13096779\tlignum\nn13098515\tvascular ray, medullary ray\nn13098962\tphloem, bast\nn13099833\tevergreen, evergreen plant\nn13099999\tdeciduous plant\nn13100156\tpoisonous plant\nn13100677\tvine\nn13102648\tcreeper\nn13102775\ttendril\nn13103023\troot climber\nn13103660\tlignosae\nn13103750\tarborescent plant\nn13103877\tsnag\nn13104059\ttree\nn13107694\ttimber tree\nn13107807\ttreelet\nn13107891\tarbor\nn13108131\tbean tree\nn13108323\tpollard\nn13108481\tsapling\nn13108545\tshade tree\nn13108662\tgymnospermous tree\nn13108841\tconifer, coniferous tree\nn13109733\tangiospermous tree, flowering tree\nn13110915\tnut tree\nn13111174\tspice tree\nn13111340\tfever tree\nn13111504\tstump, tree stump\nn13111881\tbonsai\nn13112035\tming tree\nn13112201\tming tree\nn13118330\tundershrub\nn13118707\tsubshrub, suffrutex\nn13119870\tbramble\nn13120211\tliana\nn13120958\tgeophyte\nn13121104\tdesert plant, xerophyte, xerophytic plant, xerophile, xerophilous plant\nn13121349\tmesophyte, mesophytic plant\nn13122364\tmarsh plant, bog plant, swamp plant\nn13123309\themiepiphyte, semiepiphyte\nn13123431\tstrangler, strangler tree\nn13123841\tlithophyte, lithophytic plant\nn13124358\tsaprobe\nn13124654\tautophyte, autophytic plant, autotroph, autotrophic organism\nn13125117\troot\nn13126050\ttaproot\nn13126856\tprop root\nn13127001\tprophyll\nn13127303\trootstock\nn13127666\tquickset\nn13127843\tstolon, runner, offset\nn13128278\ttuberous plant\nn13128582\trhizome, rootstock, rootstalk\nn13128976\trachis\nn13129078\tcaudex\nn13130014\tcladode, cladophyll, phylloclad, phylloclade\nn13130161\treceptacle\nn13130726\tscape, flower stalk\nn13131028\tumbel\nn13131618\tpetiole, leafstalk\nn13132034\tpeduncle\nn13132156\tpedicel, pedicle\nn13132338\tflower cluster\nn13132486\traceme\nn13132656\tpanicle\nn13132756\tthyrse, thyrsus\nn13132940\tcyme\nn13133140\tcymule\nn13133233\tglomerule\nn13133316\tscorpioid cyme\nn13133613\tear, spike, capitulum\nn13133932\tspadix\nn13134302\tbulbous plant\nn13134531\tbulbil, bulblet\nn13134844\tcormous plant\nn13134947\tfruit\nn13135692\tfruitlet\nn13135832\tseed\nn13136316\tbean\nn13136556\tnut\nn13136781\tnutlet\nn13137010\tkernel, meat\nn13137225\tsyconium\nn13137409\tberry\nn13137672\taggregate fruit, multiple fruit, syncarp\nn13137951\tsimple fruit, bacca\nn13138155\tacinus\nn13138308\tdrupe, stone fruit\nn13138658\tdrupelet\nn13138842\tpome, false fruit\nn13139055\tpod, seedpod\nn13139321\tloment\nn13139482\tpyxidium, pyxis\nn13139647\thusk\nn13139837\tcornhusk\nn13140049\tpod, cod, seedcase\nn13140367\taccessory fruit, pseudocarp\nn13141141\tbuckthorn\nn13141415\tbuckthorn berry, yellow berry\nn13141564\tcascara buckthorn, bearberry, bearwood, chittamwood, chittimwood, Rhamnus purshianus\nn13141797\tcascara, cascara sagrada, chittam bark, chittem bark\nn13141972\tCarolina buckthorn, indian cherry, Rhamnus carolinianus\nn13142182\tcoffeeberry, California buckthorn, California coffee, Rhamnus californicus\nn13142504\tredberry, red-berry, Rhamnus croceus\nn13142907\tnakedwood\nn13143285\tjujube, jujube bush, Christ's-thorn, Jerusalem thorn, Ziziphus jujuba\nn13143758\tChrist's-thorn, Jerusalem thorn, Paliurus spina-christi\nn13144084\thazel, hazel tree, Pomaderris apetala\nn13145040\tfox grape, Vitis labrusca\nn13145250\tmuscadine, Vitis rotundifolia\nn13145444\tvinifera, vinifera grape, common grape vine, Vitis vinifera\nn13146403\tPinot blanc\nn13146583\tSauvignon grape\nn13146928\tSauvignon blanc\nn13147153\tMuscadet\nn13147270\tRiesling\nn13147386\tZinfandel\nn13147532\tChenin blanc\nn13147689\tmalvasia\nn13147918\tVerdicchio\nn13148208\tBoston ivy, Japanese ivy, Parthenocissus tricuspidata\nn13148384\tVirginia creeper, American ivy, woodbine, Parthenocissus quinquefolia\nn13149296\ttrue pepper, pepper vine\nn13149970\tbetel, betel pepper, Piper betel\nn13150378\tcubeb\nn13150592\tschizocarp\nn13150894\tpeperomia\nn13151082\twatermelon begonia, Peperomia argyreia, Peperomia sandersii\nn13152339\tyerba mansa, Anemopsis californica\nn13154388\tpinna, pinnule\nn13154494\tfrond\nn13154841\tbract\nn13155095\tbracteole, bractlet\nn13155305\tinvolucre\nn13155611\tglume\nn13156986\tpalmate leaf\nn13157137\tpinnate leaf\nn13157346\tbijugate leaf, bijugous leaf, twice-pinnate\nn13157481\tdecompound leaf\nn13157684\tacuminate leaf\nn13157971\tdeltoid leaf\nn13158167\tensiform leaf\nn13158512\tlinear leaf, elongate leaf\nn13158605\tlyrate leaf\nn13158714\tobtuse leaf\nn13158815\toblanceolate leaf\nn13159357\tpandurate leaf, panduriform leaf\nn13159691\treniform leaf\nn13159890\tspatulate leaf\nn13160116\teven-pinnate leaf, abruptly-pinnate leaf\nn13160254\todd-pinnate leaf\nn13160365\tpedate leaf\nn13160604\tcrenate leaf\nn13160831\tdentate leaf\nn13160938\tdenticulate leaf\nn13161151\terose leaf\nn13161254\truncinate leaf\nn13161904\tprickly-edged leaf\nn13163553\tdeadwood\nn13163649\thaulm, halm\nn13163991\tbranchlet, twig, sprig\nn13164501\tosier\nn13170840\tgiant scrambling fern, Diplopterygium longissimum\nn13171210\tumbrella fern, fan fern, Sticherus flabellatus, Gleichenia flabellata\nn13171797\tfloating fern, water sprite, Ceratopteris pteridioides\nn13172923\tpolypody\nn13173132\tlicorice fern, Polypodium glycyrrhiza\nn13173259\tgrey polypody, gray polypody, resurrection fern, Polypodium polypodioides\nn13173488\tleatherleaf, leathery polypody, coast polypody, Polypodium scouleri\nn13173697\trock polypody, rock brake, American wall fern, Polypodium virgianum\nn13173882\tcommon polypody, adder's fern, wall fern, golden maidenhair, golden polypody, sweet fern, Polypodium vulgare\nn13174354\tbear's-paw fern, Aglaomorpha meyeniana\nn13174670\tstrap fern\nn13174823\tFlorida strap fern, cow-tongue fern, hart's-tongue fern\nn13175682\tbasket fern, Drynaria rigidula\nn13176363\tsnake polypody, Microgramma-piloselloides\nn13176714\tclimbing bird's nest fern, Microsorium punctatum\nn13177048\tgolden polypody, serpent fern, rabbit's-foot fern, Phlebodium aureum, Polypodium aureum\nn13177529\tstaghorn fern\nn13177768\tSouth American staghorn, Platycerium andinum\nn13177884\tcommon staghorn fern, elkhorn fern, Platycerium bifurcatum, Platycerium alcicorne\nn13178284\tfelt fern, tongue fern, Pyrrosia lingua, Cyclophorus lingua\nn13178707\tpotato fern, Solanopteris bifrons\nn13179056\tmyrmecophyte\nn13179804\tgrass fern, ribbon fern, Vittaria lineata\nn13180534\tspleenwort\nn13180875\tblack spleenwort, Asplenium adiantum-nigrum\nn13181055\tbird's nest fern, Asplenium nidus\nn13181244\tebony spleenwort, Scott's Spleenwort, Asplenium platyneuron\nn13181406\tblack-stem spleenwort, black-stemmed spleenwort, little ebony spleenwort\nn13181811\twalking fern, walking leaf, Asplenium rhizophyllum, Camptosorus rhizophyllus\nn13182164\tgreen spleenwort, Asplenium viride\nn13182338\tmountain spleenwort, Asplenium montanum\nn13182799\tlobed spleenwort, Asplenium pinnatifidum\nn13182937\tlanceolate spleenwort, Asplenium billotii\nn13183056\thart's-tongue, hart's-tongue fern, Asplenium scolopendrium, Phyllitis scolopendrium\nn13183489\tscale fern, scaly fern, Asplenium ceterach, Ceterach officinarum\nn13184394\tscolopendrium\nn13185269\tdeer fern, Blechnum spicant\nn13185658\tdoodia, rasp fern\nn13186388\tchain fern\nn13186546\tVirginia chain fern, Woodwardia virginica\nn13187367\tsilver tree fern, sago fern, black tree fern, Cyathea medullaris\nn13188096\tdavallia\nn13188268\thare's-foot fern\nn13188462\tCanary Island hare's foot fern, Davallia canariensis\nn13188767\tsquirrel's-foot fern, ball fern, Davalia bullata, Davalia bullata mariesii, Davallia Mariesii\nn13190060\tbracken, Pteridium esculentum\nn13190747\tsoft tree fern, Dicksonia antarctica\nn13191148\tScythian lamb, Cibotium barometz\nn13191620\tfalse bracken, Culcita dubia\nn13191884\tthyrsopteris, Thyrsopteris elegans\nn13192625\tshield fern, buckler fern\nn13193143\tbroad buckler-fern, Dryopteris dilatata\nn13193269\tfragrant cliff fern, fragrant shield fern, fragrant wood fern, Dryopteris fragrans\nn13193466\tGoldie's fern, Goldie's shield fern, goldie's wood fern, Dryopteris goldiana\nn13193642\twood fern, wood-fern, woodfern\nn13193856\tmale fern, Dryopteris filix-mas\nn13194036\tmarginal wood fern, evergreen wood fern, leatherleaf wood fern, Dryopteris marginalis\nn13194212\tmountain male fern, Dryopteris oreades\nn13194572\tlady fern, Athyrium filix-femina\nn13194758\tAlpine lady fern, Athyrium distentifolium\nn13194918\tsilvery spleenwort, glade fern, narrow-leaved spleenwort, Athyrium pycnocarpon, Diplazium pycnocarpon\nn13195341\tholly fern, Cyrtomium aculeatum, Polystichum aculeatum\nn13195761\tbladder fern\nn13196003\tbrittle bladder fern, brittle fern, fragile fern, Cystopteris fragilis\nn13196234\tmountain bladder fern, Cystopteris montana\nn13196369\tbulblet fern, bulblet bladder fern, berry fern, Cystopteris bulbifera\nn13196738\tsilvery spleenwort, Deparia acrostichoides, Athyrium thelypteroides\nn13197274\toak fern, Gymnocarpium dryopteris, Thelypteris dryopteris\nn13197507\tlimestone fern, northern oak fern, Gymnocarpium robertianum\nn13198054\tostrich fern, shuttlecock fern, fiddlehead, Matteuccia struthiopteris, Pteretis struthiopteris, Onoclea struthiopteris\nn13198482\thart's-tongue, hart's-tongue fern, Olfersia cervina, Polybotrya cervina, Polybotria cervina\nn13198914\tsensitive fern, bead fern, Onoclea sensibilis\nn13199717\tChristmas fern, canker brake, dagger fern, evergreen wood fern, Polystichum acrostichoides\nn13199970\tholly fern\nn13200193\tBraun's holly fern, prickly shield fern, Polystichum braunii\nn13200542\twestern holly fern, Polystichum scopulinum\nn13200651\tsoft shield fern, Polystichum setiferum\nn13200986\tleather fern, leatherleaf fern, ten-day fern, Rumohra adiantiformis, Polystichum adiantiformis\nn13201423\tbutton fern, Tectaria cicutaria\nn13201566\tIndian button fern, Tectaria macrodonta\nn13201969\twoodsia\nn13202125\trusty woodsia, fragrant woodsia, oblong woodsia, Woodsia ilvensis\nn13202355\tAlpine woodsia, northern woodsia, flower-cup fern, Woodsia alpina\nn13202602\tsmooth woodsia, Woodsia glabella\nn13205058\tBoston fern, Nephrolepis exaltata, Nephrolepis exaltata bostoniensis\nn13205249\tbasket fern, toothed sword fern, Nephrolepis pectinata\nn13206178\tgolden fern, leather fern, Acrostichum aureum\nn13206817\tmaidenhair, maidenhair fern\nn13207094\tcommon maidenhair, Venushair, Venus'-hair fern, southern maidenhair, Venus maidenhair, Adiantum capillus-veneris\nn13207335\tAmerican maidenhair fern, five-fingered maidenhair fern, Adiantum pedatum\nn13207572\tBermuda maidenhair, Bermuda maidenhair fern, Adiantum bellum\nn13207736\tbrittle maidenhair, brittle maidenhair fern, Adiantum tenerum\nn13207923\tFarley maidenhair, Farley maidenhair fern, Barbados maidenhair, glory fern, Adiantum tenerum farleyense\nn13208302\tannual fern, Jersey fern, Anogramma leptophylla\nn13208705\tlip fern, lipfern\nn13208965\tsmooth lip fern, Alabama lip fern, Cheilanthes alabamensis\nn13209129\tlace fern, Cheilanthes gracillima\nn13209270\twooly lip fern, hairy lip fern, Cheilanthes lanosa\nn13209460\tsouthwestern lip fern, Cheilanthes eatonii\nn13209808\tbamboo fern, Coniogramme japonica\nn13210350\tAmerican rock brake, American parsley fern, Cryptogramma acrostichoides\nn13210597\tEuropean parsley fern, mountain parsley fern, Cryptogramma crispa\nn13211020\thand fern, Doryopteris pedata\nn13211790\tcliff brake, cliff-brake, rock brake\nn13212025\tcoffee fern, Pellaea andromedifolia\nn13212175\tpurple rock brake, Pellaea atropurpurea\nn13212379\tbird's-foot fern, Pellaea mucronata, Pellaea ornithopus\nn13212559\tbutton fern, Pellaea rotundifolia\nn13213066\tsilver fern, Pityrogramma argentea\nn13213397\tgolden fern, Pityrogramma calomelanos aureoflava\nn13213577\tgold fern, Pityrogramma chrysophylla\nn13214217\tPteris cretica\nn13214340\tspider brake, spider fern, Pteris multifida\nn13214485\tribbon fern, spider fern, Pteris serrulata\nn13215258\tpotato fern, Marattia salicina\nn13215586\tangiopteris, giant fern, Angiopteris evecta\nn13217005\tskeleton fork fern, Psilotum nudum\nn13219422\thorsetail\nn13219833\tcommon horsetail, field horsetail, Equisetum arvense\nn13219976\tswamp horsetail, water horsetail, Equisetum fluviatile\nn13220122\tscouring rush, rough horsetail, Equisetum hyemale, Equisetum hyemale robustum, Equisetum robustum\nn13220355\tmarsh horsetail, Equisetum palustre\nn13220525\twood horsetail, Equisetum Sylvaticum\nn13220663\tvariegated horsetail, variegated scouring rush, Equisetum variegatum\nn13221529\tclub moss, club-moss, lycopod\nn13222877\tshining clubmoss, Lycopodium lucidulum\nn13222985\talpine clubmoss, Lycopodium alpinum\nn13223090\tfir clubmoss, mountain clubmoss, little clubmoss, Lycopodium selago\nn13223588\tground cedar, staghorn moss, Lycopodium complanatum\nn13223710\tground fir, princess pine, tree clubmoss, Lycopodium obscurum\nn13223843\tfoxtail grass, Lycopodium alopecuroides\nn13224673\tspikemoss, spike moss, little club moss\nn13224922\tmeadow spikemoss, basket spikemoss, Selaginella apoda\nn13225244\tdesert selaginella, Selaginella eremophila\nn13225365\tresurrection plant, rose of Jericho, Selaginella lepidophylla\nn13225617\tflorida selaginella, Selaginella eatonii\nn13226320\tquillwort\nn13226871\tearthtongue, earth-tongue\nn13228017\tsnuffbox fern, meadow fern, Thelypteris palustris pubescens, Dryopteris thelypteris pubescens\nn13228536\tchristella\nn13229543\tmountain fern, Oreopteris limbosperma, Dryopteris oreopteris\nn13229951\tNew York fern, Parathelypteris novae-boracensis, Dryopteris noveboracensis\nn13230190\tMassachusetts fern, Parathelypteris simulata, Thelypteris simulata\nn13230662\tbeech fern\nn13230843\tbroad beech fern, southern beech fern, Phegopteris hexagonoptera, Dryopteris hexagonoptera, Thelypteris hexagonoptera\nn13231078\tlong beech fern, narrow beech fern, northern beech fern, Phegopteris connectilis, Dryopteris phegopteris, Thelypteris phegopteris\nn13231678\tshoestring fungus\nn13231919\tArmillaria caligata, booted armillaria\nn13232106\tArmillaria ponderosa, white matsutake\nn13232363\tArmillaria zelleri\nn13232779\thoney mushroom, honey fungus, Armillariella mellea\nn13233727\tmilkweed, silkweed\nn13234114\twhite milkweed, Asclepias albicans\nn13234519\tpoke milkweed, Asclepias exaltata\nn13234678\tswamp milkweed, Asclepias incarnata\nn13234857\tMead's milkweed, Asclepias meadii, Asclepia meadii\nn13235011\tpurple silkweed, Asclepias purpurascens\nn13235159\tshowy milkweed, Asclepias speciosa\nn13235319\tpoison milkweed, horsetail milkweed, Asclepias subverticillata\nn13235503\tbutterfly weed, orange milkweed, chigger flower, chiggerflower, pleurisy root, tuber root, Indian paintbrush, Asclepias tuberosa\nn13235766\twhorled milkweed, Asclepias verticillata\nn13236100\tcruel plant, Araujia sericofera\nn13237188\twax plant, Hoya carnosa\nn13237508\tsilk vine, Periploca graeca\nn13238375\tstapelia, carrion flower, starfish flower\nn13238654\tStapelias asterias\nn13238988\tstephanotis\nn13239177\tMadagascar jasmine, waxflower, Stephanotis floribunda\nn13239736\tnegro vine, Vincetoxicum hirsutum, Vincetoxicum negrum\nn13239921\tzygospore\nn13240362\ttree of knowledge\nn13252672\torangery\nn13354021\tpocketbook\nn13555775\tshit, dump\nn13579829\tcordage\nn13650447\tyard, pace\nn13653902\textremum, peak\nn13862407\tleaf shape, leaf form\nn13862552\tequilateral\nn13862780\tfigure\nn13863020\tpencil\nn13863186\tplane figure, two-dimensional figure\nn13863473\tsolid figure, three-dimensional figure\nn13863771\tline\nn13864035\tbulb\nn13864153\tconvex shape, convexity\nn13864965\tconcave shape, concavity, incurvation, incurvature\nn13865298\tcylinder\nn13865483\tround shape\nn13865904\theart\nn13866144\tpolygon, polygonal shape\nn13866626\tconvex polygon\nn13866827\tconcave polygon\nn13867005\treentrant polygon, reentering polygon\nn13867492\tamorphous shape\nn13868248\tclosed curve\nn13868371\tsimple closed curve, Jordan curve\nn13868515\tS-shape\nn13868944\twave, undulation\nn13869045\textrados\nn13869547\thook, crotchet\nn13869788\tenvelope\nn13869896\tbight\nn13871717\tdiameter\nn13872592\tcone, conoid, cone shape\nn13872822\tfunnel, funnel shape\nn13873361\toblong\nn13873502\tcircle\nn13873917\tcircle\nn13874073\tequator\nn13874558\tscallop, crenation, crenature, crenel, crenelle\nn13875392\tring, halo, annulus, doughnut, anchor ring\nn13875571\tloop\nn13875884\tbight\nn13876561\thelix, spiral\nn13877547\telement of a cone\nn13877667\telement of a cylinder\nn13878306\tellipse, oval\nn13879049\tquadrate\nn13879320\ttriangle, trigon, trilateral\nn13879816\tacute triangle, acute-angled triangle\nn13880199\tisosceles triangle\nn13880415\tobtuse triangle, obtuse-angled triangle\nn13880551\tright triangle, right-angled triangle\nn13880704\tscalene triangle\nn13880994\tparallel\nn13881512\ttrapezoid\nn13881644\tstar\nn13882201\tpentagon\nn13882276\thexagon\nn13882487\theptagon\nn13882563\toctagon\nn13882639\tnonagon\nn13882713\tdecagon\nn13882961\trhombus, rhomb, diamond\nn13883603\tspherical polygon\nn13883763\tspherical triangle\nn13884261\tconvex polyhedron\nn13884384\tconcave polyhedron\nn13884930\tcuboid\nn13885011\tquadrangular prism\nn13886260\tbell, bell shape, campana\nn13888491\tangular distance\nn13889066\ttrue anomaly\nn13889331\tspherical angle\nn13891547\tangle of refraction\nn13891937\tacute angle\nn13893786\tgroove, channel\nn13894154\trut\nn13894434\tbulge, bump, hump, swelling, gibbosity, gibbousness, jut, prominence, protuberance, protrusion, extrusion, excrescence\nn13895262\tbelly\nn13896100\tbow, arc\nn13896217\tcrescent\nn13897198\tellipsoid\nn13897528\thypotenuse\nn13897996\tbalance, equilibrium, equipoise, counterbalance\nn13898207\tconformation\nn13898315\tsymmetry, proportion\nn13898645\tspheroid, ellipsoid of revolution\nn13899735\tspherule\nn13900287\ttoroid\nn13900422\tcolumn, tower, pillar\nn13901211\tbarrel, drum\nn13901321\tpipe, tube\nn13901423\tpellet\nn13901490\tbolus\nn13901858\tdewdrop\nn13902048\tridge\nn13902336\trim\nn13902793\ttaper\nn13903079\tboundary, edge, bound\nn13905121\tincisure, incisura\nn13905275\tnotch\nn13905792\twrinkle, furrow, crease, crinkle, seam, line\nn13906484\tdermatoglyphic\nn13906669\tfrown line\nn13906767\tline of life, life line, lifeline\nn13906936\tline of heart, heart line, love line, mensal line\nn13907272\tcrevice, cranny, crack, fissure, chap\nn13908201\tcleft\nn13908580\troulette, line roulette\nn13911045\tnode\nn13912260\ttree, tree diagram\nn13912540\tstemma\nn13914141\tbrachium\nn13914265\tfork, crotch\nn13914608\tblock, cube\nn13915023\tovoid\nn13915113\ttetrahedron\nn13915209\tpentahedron\nn13915305\thexahedron\nn13915999\tregular polyhedron, regular convex solid, regular convex polyhedron, Platonic body, Platonic solid, ideal solid\nn13916363\tpolyhedral angle\nn13916721\tcube, regular hexahedron\nn13917690\ttruncated pyramid\nn13917785\ttruncated cone\nn13918274\ttail, tail end\nn13918387\ttongue, knife\nn13918717\ttrapezohedron\nn13919547\twedge, wedge shape, cuneus\nn13919919\tkeel\nn13926786\tplace, shoes\nn14131950\therpes\nn14175579\tchlamydia\nn14564779\twall\nn14582716\tmicronutrient\nn14583400\tchyme\nn14585392\tragweed pollen\nn14592309\tpina cloth\nn14603798\tchlorobenzylidenemalononitrile, CS gas\nn14633206\tcarbon, C, atomic number 6\nn14685296\tcharcoal, wood coal\nn14696793\trock, stone\nn14698884\tgravel, crushed rock\nn14714645\taflatoxin\nn14720833\talpha-tocopheral\nn14765422\tleopard\nn14785065\tbricks and mortar\nn14786943\tlagging\nn14804958\thydraulic cement, Portland cement\nn14810561\tcholine\nn14820180\tconcrete\nn14821852\tglass wool\nn14844693\tsoil, dirt\nn14853210\thigh explosive\nn14858292\tlitter\nn14867545\tfish meal\nn14891255\tGreek fire\nn14899328\tculture medium, medium\nn14900184\tagar, nutrient agar\nn14900342\tblood agar\nn14908027\thip tile, hipped tile\nn14909584\thyacinth, jacinth\nn14914945\thydroxide ion, hydroxyl ion\nn14915184\tice, water ice\nn14919819\tinositol\nn14938389\tlinoleum, lino\nn14941787\tlithia water\nn14942411\tlodestone, loadstone\nn14973585\tpantothenic acid, pantothen\nn14974264\tpaper\nn14975598\tpapyrus\nn14976759\tpantile\nn14976871\tblacktop, blacktopping\nn14977188\ttarmacadam, tarmac\nn14977504\tpaving, pavement, paving material\nn14992287\tplaster\nn14993378\tpoison gas\nn15005577\tridge tile\nn15006012\troughcast\nn15019030\tsand\nn15048888\tspackle, spackling compound\nn15060326\trender\nn15060688\twattle and daub\nn15062057\tstucco\nn15067877\ttear gas, teargas, lacrimator, lachrymator\nn15075141\ttoilet tissue, toilet paper, bathroom tissue\nn15086247\tlinseed, flaxseed\nn15089258\tvitamin\nn15089472\tfat-soluble vitamin\nn15089645\twater-soluble vitamin\nn15089803\tvitamin A, antiophthalmic factor, axerophthol, A\nn15090065\tvitamin A1, retinol\nn15090238\tvitamin A2, dehydroretinol\nn15090742\tB-complex vitamin, B complex, vitamin B complex, vitamin B, B vitamin, B\nn15091129\tvitamin B1, thiamine, thiamin, aneurin, antiberiberi factor\nn15091304\tvitamin B12, cobalamin, cyanocobalamin, antipernicious anemia factor\nn15091473\tvitamin B2, vitamin G, riboflavin, lactoflavin, ovoflavin, hepatoflavin\nn15091669\tvitamin B6, pyridoxine, pyridoxal, pyridoxamine, adermin\nn15091846\tvitamin Bc, vitamin M, folate, folic acid, folacin, pteroylglutamic acid, pteroylmonoglutamic acid\nn15092059\tniacin, nicotinic acid\nn15092227\tvitamin D, calciferol, viosterol, ergocalciferol, cholecalciferol, D\nn15092409\tvitamin E, tocopherol, E\nn15092650\tbiotin, vitamin H\nn15092751\tvitamin K, naphthoquinone, antihemorrhagic factor\nn15092942\tvitamin K1, phylloquinone, phytonadione\nn15093049\tvitamin K3, menadione\nn15093137\tvitamin P, bioflavinoid, citrin\nn15093298\tvitamin C, C, ascorbic acid\nn15102359\tplanking\nn15102455\tchipboard, hardboard\nn15102894\tknothole\n"
  },
  {
    "path": "timm/data/_info/imagenet_synsets.txt",
    "content": "n01440764\nn01443537\nn01484850\nn01491361\nn01494475\nn01496331\nn01498041\nn01514668\nn01514859\nn01518878\nn01530575\nn01531178\nn01532829\nn01534433\nn01537544\nn01558993\nn01560419\nn01580077\nn01582220\nn01592084\nn01601694\nn01608432\nn01614925\nn01616318\nn01622779\nn01629819\nn01630670\nn01631663\nn01632458\nn01632777\nn01641577\nn01644373\nn01644900\nn01664065\nn01665541\nn01667114\nn01667778\nn01669191\nn01675722\nn01677366\nn01682714\nn01685808\nn01687978\nn01688243\nn01689811\nn01692333\nn01693334\nn01694178\nn01695060\nn01697457\nn01698640\nn01704323\nn01728572\nn01728920\nn01729322\nn01729977\nn01734418\nn01735189\nn01737021\nn01739381\nn01740131\nn01742172\nn01744401\nn01748264\nn01749939\nn01751748\nn01753488\nn01755581\nn01756291\nn01768244\nn01770081\nn01770393\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01776313\nn01784675\nn01795545\nn01796340\nn01797886\nn01798484\nn01806143\nn01806567\nn01807496\nn01817953\nn01818515\nn01819313\nn01820546\nn01824575\nn01828970\nn01829413\nn01833805\nn01843065\nn01843383\nn01847000\nn01855032\nn01855672\nn01860187\nn01871265\nn01872401\nn01873310\nn01877812\nn01882714\nn01883070\nn01910747\nn01914609\nn01917289\nn01924916\nn01930112\nn01943899\nn01944390\nn01945685\nn01950731\nn01955084\nn01968897\nn01978287\nn01978455\nn01980166\nn01981276\nn01983481\nn01984695\nn01985128\nn01986214\nn01990800\nn02002556\nn02002724\nn02006656\nn02007558\nn02009229\nn02009912\nn02011460\nn02012849\nn02013706\nn02017213\nn02018207\nn02018795\nn02025239\nn02027492\nn02028035\nn02033041\nn02037110\nn02051845\nn02056570\nn02058221\nn02066245\nn02071294\nn02074367\nn02077923\nn02085620\nn02085782\nn02085936\nn02086079\nn02086240\nn02086646\nn02086910\nn02087046\nn02087394\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02089078\nn02089867\nn02089973\nn02090379\nn02090622\nn02090721\nn02091032\nn02091134\nn02091244\nn02091467\nn02091635\nn02091831\nn02092002\nn02092339\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02095314\nn02095570\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02098105\nn02098286\nn02098413\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02100236\nn02100583\nn02100735\nn02100877\nn02101006\nn02101388\nn02101556\nn02102040\nn02102177\nn02102318\nn02102480\nn02102973\nn02104029\nn02104365\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02107142\nn02107312\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108422\nn02108551\nn02108915\nn02109047\nn02109525\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111889\nn02112018\nn02112137\nn02112350\nn02112706\nn02113023\nn02113186\nn02113624\nn02113712\nn02113799\nn02113978\nn02114367\nn02114548\nn02114712\nn02114855\nn02115641\nn02115913\nn02116738\nn02117135\nn02119022\nn02119789\nn02120079\nn02120505\nn02123045\nn02123159\nn02123394\nn02123597\nn02124075\nn02125311\nn02127052\nn02128385\nn02128757\nn02128925\nn02129165\nn02129604\nn02130308\nn02132136\nn02133161\nn02134084\nn02134418\nn02137549\nn02138441\nn02165105\nn02165456\nn02167151\nn02168699\nn02169497\nn02172182\nn02174001\nn02177972\nn02190166\nn02206856\nn02219486\nn02226429\nn02229544\nn02231487\nn02233338\nn02236044\nn02256656\nn02259212\nn02264363\nn02268443\nn02268853\nn02276258\nn02277742\nn02279972\nn02280649\nn02281406\nn02281787\nn02317335\nn02319095\nn02321529\nn02325366\nn02326432\nn02328150\nn02342885\nn02346627\nn02356798\nn02361337\nn02363005\nn02364673\nn02389026\nn02391049\nn02395406\nn02396427\nn02397096\nn02398521\nn02403003\nn02408429\nn02410509\nn02412080\nn02415577\nn02417914\nn02422106\nn02422699\nn02423022\nn02437312\nn02437616\nn02441942\nn02442845\nn02443114\nn02443484\nn02444819\nn02445715\nn02447366\nn02454379\nn02457408\nn02480495\nn02480855\nn02481823\nn02483362\nn02483708\nn02484975\nn02486261\nn02486410\nn02487347\nn02488291\nn02488702\nn02489166\nn02490219\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02497673\nn02500267\nn02504013\nn02504458\nn02509815\nn02510455\nn02514041\nn02526121\nn02536864\nn02606052\nn02607072\nn02640242\nn02641379\nn02643566\nn02655020\nn02666196\nn02667093\nn02669723\nn02672831\nn02676566\nn02687172\nn02690373\nn02692877\nn02699494\nn02701002\nn02704792\nn02708093\nn02727426\nn02730930\nn02747177\nn02749479\nn02769748\nn02776631\nn02777292\nn02782093\nn02783161\nn02786058\nn02787622\nn02788148\nn02790996\nn02791124\nn02791270\nn02793495\nn02794156\nn02795169\nn02797295\nn02799071\nn02802426\nn02804414\nn02804610\nn02807133\nn02808304\nn02808440\nn02814533\nn02814860\nn02815834\nn02817516\nn02823428\nn02823750\nn02825657\nn02834397\nn02835271\nn02837789\nn02840245\nn02841315\nn02843684\nn02859443\nn02860847\nn02865351\nn02869837\nn02870880\nn02871525\nn02877765\nn02879718\nn02883205\nn02892201\nn02892767\nn02894605\nn02895154\nn02906734\nn02909870\nn02910353\nn02916936\nn02917067\nn02927161\nn02930766\nn02939185\nn02948072\nn02950826\nn02951358\nn02951585\nn02963159\nn02965783\nn02966193\nn02966687\nn02971356\nn02974003\nn02977058\nn02978881\nn02979186\nn02980441\nn02981792\nn02988304\nn02992211\nn02992529\nn02999410\nn03000134\nn03000247\nn03000684\nn03014705\nn03016953\nn03017168\nn03018349\nn03026506\nn03028079\nn03032252\nn03041632\nn03042490\nn03045698\nn03047690\nn03062245\nn03063599\nn03063689\nn03065424\nn03075370\nn03085013\nn03089624\nn03095699\nn03100240\nn03109150\nn03110669\nn03124043\nn03124170\nn03125729\nn03126707\nn03127747\nn03127925\nn03131574\nn03133878\nn03134739\nn03141823\nn03146219\nn03160309\nn03179701\nn03180011\nn03187595\nn03188531\nn03196217\nn03197337\nn03201208\nn03207743\nn03207941\nn03208938\nn03216828\nn03218198\nn03220513\nn03223299\nn03240683\nn03249569\nn03250847\nn03255030\nn03259280\nn03271574\nn03272010\nn03272562\nn03290653\nn03291819\nn03297495\nn03314780\nn03325584\nn03337140\nn03344393\nn03345487\nn03347037\nn03355925\nn03372029\nn03376595\nn03379051\nn03384352\nn03388043\nn03388183\nn03388549\nn03393912\nn03394916\nn03400231\nn03404251\nn03417042\nn03424325\nn03425413\nn03443371\nn03444034\nn03445777\nn03445924\nn03447447\nn03447721\nn03450230\nn03452741\nn03457902\nn03459775\nn03461385\nn03467068\nn03476684\nn03476991\nn03478589\nn03481172\nn03482405\nn03483316\nn03485407\nn03485794\nn03492542\nn03494278\nn03495258\nn03496892\nn03498962\nn03527444\nn03529860\nn03530642\nn03532672\nn03534580\nn03535780\nn03538406\nn03544143\nn03584254\nn03584829\nn03590841\nn03594734\nn03594945\nn03595614\nn03598930\nn03599486\nn03602883\nn03617480\nn03623198\nn03627232\nn03630383\nn03633091\nn03637318\nn03642806\nn03649909\nn03657121\nn03658185\nn03661043\nn03662601\nn03666591\nn03670208\nn03673027\nn03676483\nn03680355\nn03690938\nn03691459\nn03692522\nn03697007\nn03706229\nn03709823\nn03710193\nn03710637\nn03710721\nn03717622\nn03720891\nn03721384\nn03724870\nn03729826\nn03733131\nn03733281\nn03733805\nn03742115\nn03743016\nn03759954\nn03761084\nn03763968\nn03764736\nn03769881\nn03770439\nn03770679\nn03773504\nn03775071\nn03775546\nn03776460\nn03777568\nn03777754\nn03781244\nn03782006\nn03785016\nn03786901\nn03787032\nn03788195\nn03788365\nn03791053\nn03792782\nn03792972\nn03793489\nn03794056\nn03796401\nn03803284\nn03804744\nn03814639\nn03814906\nn03825788\nn03832673\nn03837869\nn03838899\nn03840681\nn03841143\nn03843555\nn03854065\nn03857828\nn03866082\nn03868242\nn03868863\nn03871628\nn03873416\nn03874293\nn03874599\nn03876231\nn03877472\nn03877845\nn03884397\nn03887697\nn03888257\nn03888605\nn03891251\nn03891332\nn03895866\nn03899768\nn03902125\nn03903868\nn03908618\nn03908714\nn03916031\nn03920288\nn03924679\nn03929660\nn03929855\nn03930313\nn03930630\nn03933933\nn03935335\nn03937543\nn03938244\nn03942813\nn03944341\nn03947888\nn03950228\nn03954731\nn03956157\nn03958227\nn03961711\nn03967562\nn03970156\nn03976467\nn03976657\nn03977966\nn03980874\nn03982430\nn03983396\nn03991062\nn03992509\nn03995372\nn03998194\nn04004767\nn04005630\nn04008634\nn04009552\nn04019541\nn04023962\nn04026417\nn04033901\nn04033995\nn04037443\nn04039381\nn04040759\nn04041544\nn04044716\nn04049303\nn04065272\nn04067472\nn04069434\nn04070727\nn04074963\nn04081281\nn04086273\nn04090263\nn04099969\nn04111531\nn04116512\nn04118538\nn04118776\nn04120489\nn04125021\nn04127249\nn04131690\nn04133789\nn04136333\nn04141076\nn04141327\nn04141975\nn04146614\nn04147183\nn04149813\nn04152593\nn04153751\nn04154565\nn04162706\nn04179913\nn04192698\nn04200800\nn04201297\nn04204238\nn04204347\nn04208210\nn04209133\nn04209239\nn04228054\nn04229816\nn04235860\nn04238763\nn04239074\nn04243546\nn04251144\nn04252077\nn04252225\nn04254120\nn04254680\nn04254777\nn04258138\nn04259630\nn04263257\nn04264628\nn04265275\nn04266014\nn04270147\nn04273569\nn04275548\nn04277352\nn04285008\nn04286575\nn04296562\nn04310018\nn04311004\nn04311174\nn04317175\nn04325704\nn04326547\nn04328186\nn04330267\nn04332243\nn04335435\nn04336792\nn04344873\nn04346328\nn04347754\nn04350905\nn04355338\nn04355933\nn04356056\nn04357314\nn04366367\nn04367480\nn04370456\nn04371430\nn04371774\nn04372370\nn04376876\nn04380533\nn04389033\nn04392985\nn04398044\nn04399382\nn04404412\nn04409515\nn04417672\nn04418357\nn04423845\nn04428191\nn04429376\nn04435653\nn04442312\nn04443257\nn04447861\nn04456115\nn04458633\nn04461696\nn04462240\nn04465501\nn04467665\nn04476259\nn04479046\nn04482393\nn04483307\nn04485082\nn04486054\nn04487081\nn04487394\nn04493381\nn04501370\nn04505470\nn04507155\nn04509417\nn04515003\nn04517823\nn04522168\nn04523525\nn04525038\nn04525305\nn04532106\nn04532670\nn04536866\nn04540053\nn04542943\nn04548280\nn04548362\nn04550184\nn04552348\nn04553703\nn04554684\nn04557648\nn04560804\nn04562935\nn04579145\nn04579432\nn04584207\nn04589890\nn04590129\nn04591157\nn04591713\nn04592741\nn04596742\nn04597913\nn04599235\nn04604644\nn04606251\nn04612504\nn04613696\nn06359193\nn06596364\nn06785654\nn06794110\nn06874185\nn07248320\nn07565083\nn07579787\nn07583066\nn07584110\nn07590611\nn07613480\nn07614500\nn07615774\nn07684084\nn07693725\nn07695742\nn07697313\nn07697537\nn07711569\nn07714571\nn07714990\nn07715103\nn07716358\nn07716906\nn07717410\nn07717556\nn07718472\nn07718747\nn07720875\nn07730033\nn07734744\nn07742313\nn07745940\nn07747607\nn07749582\nn07753113\nn07753275\nn07753592\nn07754684\nn07760859\nn07768694\nn07802026\nn07831146\nn07836838\nn07860988\nn07871810\nn07873807\nn07875152\nn07880968\nn07892512\nn07920052\nn07930864\nn07932039\nn09193705\nn09229709\nn09246464\nn09256479\nn09288635\nn09332890\nn09399592\nn09421951\nn09428293\nn09468604\nn09472597\nn09835506\nn10148035\nn10565667\nn11879895\nn11939491\nn12057211\nn12144580\nn12267677\nn12620546\nn12768682\nn12985857\nn12998815\nn13037406\nn13040303\nn13044778\nn13052670\nn13054560\nn13133613\nn15075141\n"
  },
  {
    "path": "timm/data/_info/mini_imagenet_indices.txt",
    "content": "12\n15\n51\n64\n70\n96\n99\n107\n111\n121\n149\n166\n173\n176\n207\n214\n228\n242\n244\n245\n249\n251\n256\n266\n270\n275\n279\n291\n299\n301\n306\n310\n359\n364\n392\n403\n412\n427\n440\n454\n471\n476\n478\n484\n494\n502\n503\n507\n519\n524\n533\n538\n546\n553\n556\n567\n569\n584\n597\n602\n604\n605\n629\n655\n657\n659\n683\n687\n702\n709\n713\n735\n741\n758\n779\n781\n800\n801\n807\n815\n819\n847\n854\n858\n860\n880\n881\n883\n909\n912\n914\n919\n925\n927\n934\n950\n972\n973\n997\n998\n"
  },
  {
    "path": "timm/data/_info/mini_imagenet_synsets.txt",
    "content": "n01532829\nn01558993\nn01704323\nn01749939\nn01770081\nn01843383\nn01855672\nn01910747\nn01930112\nn01981276\nn02074367\nn02089867\nn02091244\nn02091831\nn02099601\nn02101006\nn02105505\nn02108089\nn02108551\nn02108915\nn02110063\nn02110341\nn02111277\nn02113712\nn02114548\nn02116738\nn02120079\nn02129165\nn02138441\nn02165456\nn02174001\nn02219486\nn02443484\nn02457408\nn02606052\nn02687172\nn02747177\nn02795169\nn02823428\nn02871525\nn02950826\nn02966193\nn02971356\nn02981792\nn03017168\nn03047690\nn03062245\nn03075370\nn03127925\nn03146219\nn03207743\nn03220513\nn03272010\nn03337140\nn03347037\nn03400231\nn03417042\nn03476684\nn03527444\nn03535780\nn03544143\nn03584254\nn03676483\nn03770439\nn03773504\nn03775546\nn03838899\nn03854065\nn03888605\nn03908618\nn03924679\nn03980874\nn03998194\nn04067472\nn04146614\nn04149813\nn04243546\nn04251144\nn04258138\nn04275548\nn04296562\nn04389033\nn04418357\nn04435653\nn04443257\nn04509417\nn04515003\nn04522168\nn04596742\nn04604644\nn04612504\nn06794110\nn07584110\nn07613480\nn07697537\nn07747607\nn09246464\nn09256479\nn13054560\nn13133613\n"
  },
  {
    "path": "timm/data/auto_augment.py",
    "content": "\"\"\" AutoAugment, RandAugment, AugMix, and 3-Augment for PyTorch\n\nThis code implements the searched ImageNet policies with various tweaks and improvements and\ndoes not include any of the search code.\n\nAA and RA Implementation adapted from:\n    https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py\n\nAugMix adapted from:\n    https://github.com/google-research/augmix\n\n3-Augment based on: https://github.com/facebookresearch/deit/blob/main/README_revenge.md\n\nPapers:\n    AutoAugment: Learning Augmentation Policies from Data - https://arxiv.org/abs/1805.09501\n    Learning Data Augmentation Strategies for Object Detection - https://arxiv.org/abs/1906.11172\n    RandAugment: Practical automated data augmentation... - https://arxiv.org/abs/1909.13719\n    AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty - https://arxiv.org/abs/1912.02781\n    3-Augment: DeiT III: Revenge of the ViT - https://arxiv.org/abs/2204.07118\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport random\nimport math\nimport re\nfrom functools import partial\nfrom typing import Dict, List, Optional, Union\n\nfrom PIL import Image, ImageOps, ImageEnhance, ImageChops, ImageFilter\nimport PIL\nimport numpy as np\n\n\n_PIL_VER = tuple([int(x) for x in PIL.__version__.split('.')[:2]])\n\n_FILL = (128, 128, 128)\n\n_LEVEL_DENOM = 10.  # denominator for conversion from 'Mx' magnitude scale to fractional aug level for op arguments\n\n_HPARAMS_DEFAULT = dict(\n    translate_const=250,\n    img_mean=_FILL,\n)\n\nif hasattr(Image, \"Resampling\"):\n    _RANDOM_INTERPOLATION = (Image.Resampling.BILINEAR, Image.Resampling.BICUBIC)\n    _DEFAULT_INTERPOLATION = Image.Resampling.BICUBIC\nelse:\n    _RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC)\n    _DEFAULT_INTERPOLATION = Image.BICUBIC\n\n\ndef _interpolation(kwargs):\n    interpolation = kwargs.pop('resample', _DEFAULT_INTERPOLATION)\n    if isinstance(interpolation, (list, tuple)):\n        return random.choice(interpolation)\n    return interpolation\n\n\ndef _check_args_tf(kwargs):\n    if 'fillcolor' in kwargs and _PIL_VER < (5, 0):\n        kwargs.pop('fillcolor')\n    kwargs['resample'] = _interpolation(kwargs)\n\n\ndef shear_x(img, factor, **kwargs):\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, factor, 0, 0, 1, 0), **kwargs)\n\n\ndef shear_y(img, factor, **kwargs):\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, 0, 0, factor, 1, 0), **kwargs)\n\n\ndef translate_x_rel(img, pct, **kwargs):\n    pixels = pct * img.size[0]\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)\n\n\ndef translate_y_rel(img, pct, **kwargs):\n    pixels = pct * img.size[1]\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)\n\n\ndef translate_x_abs(img, pixels, **kwargs):\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)\n\n\ndef translate_y_abs(img, pixels, **kwargs):\n    _check_args_tf(kwargs)\n    return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)\n\n\ndef rotate(img, degrees, **kwargs):\n    _check_args_tf(kwargs)\n    if _PIL_VER >= (5, 2):\n        return img.rotate(degrees, **kwargs)\n    if _PIL_VER >= (5, 0):\n        w, h = img.size\n        post_trans = (0, 0)\n        rotn_center = (w / 2.0, h / 2.0)\n        angle = -math.radians(degrees)\n        matrix = [\n            round(math.cos(angle), 15),\n            round(math.sin(angle), 15),\n            0.0,\n            round(-math.sin(angle), 15),\n            round(math.cos(angle), 15),\n            0.0,\n        ]\n\n        def transform(x, y, matrix):\n            (a, b, c, d, e, f) = matrix\n            return a * x + b * y + c, d * x + e * y + f\n\n        matrix[2], matrix[5] = transform(\n            -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix\n        )\n        matrix[2] += rotn_center[0]\n        matrix[5] += rotn_center[1]\n        return img.transform(img.size, Image.AFFINE, matrix, **kwargs)\n    return img.rotate(degrees, resample=kwargs['resample'])\n\n\ndef auto_contrast(img, **__):\n    return ImageOps.autocontrast(img)\n\n\ndef invert(img, **__):\n    return ImageOps.invert(img)\n\n\ndef equalize(img, **__):\n    return ImageOps.equalize(img)\n\n\ndef solarize(img, thresh, **__):\n    return ImageOps.solarize(img, thresh)\n\n\ndef solarize_add(img, add, thresh=128, **__):\n    lut = []\n    for i in range(256):\n        if i < thresh:\n            lut.append(min(255, i + add))\n        else:\n            lut.append(i)\n\n    if img.mode in (\"L\", \"RGB\"):\n        if img.mode == \"RGB\" and len(lut) == 256:\n            lut = lut + lut + lut\n        return img.point(lut)\n\n    return img\n\n\ndef posterize(img, bits_to_keep, **__):\n    if bits_to_keep >= 8:\n        return img\n    return ImageOps.posterize(img, bits_to_keep)\n\n\ndef contrast(img, factor, **__):\n    return ImageEnhance.Contrast(img).enhance(factor)\n\n\ndef color(img, factor, **__):\n    return ImageEnhance.Color(img).enhance(factor)\n\n\ndef brightness(img, factor, **__):\n    return ImageEnhance.Brightness(img).enhance(factor)\n\n\ndef sharpness(img, factor, **__):\n    return ImageEnhance.Sharpness(img).enhance(factor)\n\n\ndef gaussian_blur(img, factor, **__):\n    img = img.filter(ImageFilter.GaussianBlur(radius=factor))\n    return img\n\n\ndef gaussian_blur_rand(img, factor, **__):\n    radius_min = 0.1\n    radius_max = 2.0\n    img = img.filter(ImageFilter.GaussianBlur(radius=random.uniform(radius_min, radius_max * factor)))\n    return img\n\n\ndef desaturate(img, factor, **_):\n    factor = min(1., max(0., 1. - factor))\n    # enhance factor 0 = grayscale, 1.0 = no-change\n    return ImageEnhance.Color(img).enhance(factor)\n\n\ndef _randomly_negate(v):\n    \"\"\"With 50% prob, negate the value\"\"\"\n    return -v if random.random() > 0.5 else v\n\n\ndef _rotate_level_to_arg(level, _hparams):\n    # range [-30, 30]\n    level = (level / _LEVEL_DENOM) * 30.\n    level = _randomly_negate(level)\n    return level,\n\n\ndef _enhance_level_to_arg(level, _hparams):\n    # range [0.1, 1.9]\n    return (level / _LEVEL_DENOM) * 1.8 + 0.1,\n\n\ndef _enhance_increasing_level_to_arg(level, _hparams):\n    # the 'no change' level is 1.0, moving away from that towards 0. or 2.0 increases the enhancement blend\n    # range [0.1, 1.9] if level <= _LEVEL_DENOM\n    level = (level / _LEVEL_DENOM) * .9\n    level = max(0.1, 1.0 + _randomly_negate(level))  # keep it >= 0.1\n    return level,\n\n\ndef _minmax_level_to_arg(level, _hparams, min_val=0., max_val=1.0, clamp=True):\n    level = (level / _LEVEL_DENOM)\n    level = min_val + (max_val - min_val) * level\n    if clamp:\n        level = max(min_val, min(max_val, level))\n    return level,\n\n\ndef _shear_level_to_arg(level, _hparams):\n    # range [-0.3, 0.3]\n    level = (level / _LEVEL_DENOM) * 0.3\n    level = _randomly_negate(level)\n    return level,\n\n\ndef _translate_abs_level_to_arg(level, hparams):\n    translate_const = hparams['translate_const']\n    level = (level / _LEVEL_DENOM) * float(translate_const)\n    level = _randomly_negate(level)\n    return level,\n\n\ndef _translate_rel_level_to_arg(level, hparams):\n    # default range [-0.45, 0.45]\n    translate_pct = hparams.get('translate_pct', 0.45)\n    level = (level / _LEVEL_DENOM) * translate_pct\n    level = _randomly_negate(level)\n    return level,\n\n\ndef _posterize_level_to_arg(level, _hparams):\n    # As per Tensorflow TPU EfficientNet impl\n    # range [0, 4], 'keep 0 up to 4 MSB of original image'\n    # intensity/severity of augmentation decreases with level\n    return int((level / _LEVEL_DENOM) * 4),\n\n\ndef _posterize_increasing_level_to_arg(level, hparams):\n    # As per Tensorflow models research and UDA impl\n    # range [4, 0], 'keep 4 down to 0 MSB of original image',\n    # intensity/severity of augmentation increases with level\n    return 4 - _posterize_level_to_arg(level, hparams)[0],\n\n\ndef _posterize_original_level_to_arg(level, _hparams):\n    # As per original AutoAugment paper description\n    # range [4, 8], 'keep 4 up to 8 MSB of image'\n    # intensity/severity of augmentation decreases with level\n    return int((level / _LEVEL_DENOM) * 4) + 4,\n\n\ndef _solarize_level_to_arg(level, _hparams):\n    # range [0, 256]\n    # intensity/severity of augmentation decreases with level\n    return min(256, int((level / _LEVEL_DENOM) * 256)),\n\n\ndef _solarize_increasing_level_to_arg(level, _hparams):\n    # range [0, 256]\n    # intensity/severity of augmentation increases with level\n    return 256 - _solarize_level_to_arg(level, _hparams)[0],\n\n\ndef _solarize_add_level_to_arg(level, _hparams):\n    # range [0, 110]\n    return min(128, int((level / _LEVEL_DENOM) * 110)),\n\n\nLEVEL_TO_ARG = {\n    'AutoContrast': None,\n    'Equalize': None,\n    'Invert': None,\n    'Rotate': _rotate_level_to_arg,\n    # There are several variations of the posterize level scaling in various Tensorflow/Google repositories/papers\n    'Posterize': _posterize_level_to_arg,\n    'PosterizeIncreasing': _posterize_increasing_level_to_arg,\n    'PosterizeOriginal': _posterize_original_level_to_arg,\n    'Solarize': _solarize_level_to_arg,\n    'SolarizeIncreasing': _solarize_increasing_level_to_arg,\n    'SolarizeAdd': _solarize_add_level_to_arg,\n    'Color': _enhance_level_to_arg,\n    'ColorIncreasing': _enhance_increasing_level_to_arg,\n    'Contrast': _enhance_level_to_arg,\n    'ContrastIncreasing': _enhance_increasing_level_to_arg,\n    'Brightness': _enhance_level_to_arg,\n    'BrightnessIncreasing': _enhance_increasing_level_to_arg,\n    'Sharpness': _enhance_level_to_arg,\n    'SharpnessIncreasing': _enhance_increasing_level_to_arg,\n    'ShearX': _shear_level_to_arg,\n    'ShearY': _shear_level_to_arg,\n    'TranslateX': _translate_abs_level_to_arg,\n    'TranslateY': _translate_abs_level_to_arg,\n    'TranslateXRel': _translate_rel_level_to_arg,\n    'TranslateYRel': _translate_rel_level_to_arg,\n    'Desaturate': partial(_minmax_level_to_arg, min_val=0.5, max_val=1.0),\n    'GaussianBlur': partial(_minmax_level_to_arg, min_val=0.1, max_val=2.0),\n    'GaussianBlurRand': _minmax_level_to_arg,\n}\n\n\nNAME_TO_OP = {\n    'AutoContrast': auto_contrast,\n    'Equalize': equalize,\n    'Invert': invert,\n    'Rotate': rotate,\n    'Posterize': posterize,\n    'PosterizeIncreasing': posterize,\n    'PosterizeOriginal': posterize,\n    'Solarize': solarize,\n    'SolarizeIncreasing': solarize,\n    'SolarizeAdd': solarize_add,\n    'Color': color,\n    'ColorIncreasing': color,\n    'Contrast': contrast,\n    'ContrastIncreasing': contrast,\n    'Brightness': brightness,\n    'BrightnessIncreasing': brightness,\n    'Sharpness': sharpness,\n    'SharpnessIncreasing': sharpness,\n    'ShearX': shear_x,\n    'ShearY': shear_y,\n    'TranslateX': translate_x_abs,\n    'TranslateY': translate_y_abs,\n    'TranslateXRel': translate_x_rel,\n    'TranslateYRel': translate_y_rel,\n    'Desaturate': desaturate,\n    'GaussianBlur': gaussian_blur,\n    'GaussianBlurRand': gaussian_blur_rand,\n}\n\n\nclass AugmentOp:\n\n    def __init__(self, name, prob=0.5, magnitude=10, hparams=None):\n        hparams = hparams or _HPARAMS_DEFAULT\n        self.name = name\n        self.aug_fn = NAME_TO_OP[name]\n        self.level_fn = LEVEL_TO_ARG[name]\n        self.prob = prob\n        self.magnitude = magnitude\n        self.hparams = hparams.copy()\n        self.kwargs = dict(\n            fillcolor=hparams['img_mean'] if 'img_mean' in hparams else _FILL,\n            resample=hparams['interpolation'] if 'interpolation' in hparams else _RANDOM_INTERPOLATION,\n        )\n\n        # If magnitude_std is > 0, we introduce some randomness\n        # in the usually fixed policy and sample magnitude from a normal distribution\n        # with mean `magnitude` and std-dev of `magnitude_std`.\n        # NOTE This is my own hack, being tested, not in papers or reference impls.\n        # If magnitude_std is inf, we sample magnitude from a uniform distribution\n        self.magnitude_std = self.hparams.get('magnitude_std', 0)\n        self.magnitude_max = self.hparams.get('magnitude_max', None)\n\n    def __call__(self, img):\n        if self.prob < 1.0 and random.random() > self.prob:\n            return img\n        magnitude = self.magnitude\n        if self.magnitude_std > 0:\n            # magnitude randomization enabled\n            if self.magnitude_std == float('inf'):\n                # inf == uniform sampling\n                magnitude = random.uniform(0, magnitude)\n            elif self.magnitude_std > 0:\n                magnitude = random.gauss(magnitude, self.magnitude_std)\n        # default upper_bound for the timm RA impl is _LEVEL_DENOM (10)\n        # setting magnitude_max overrides this to allow M > 10 (behaviour closer to Google TF RA impl)\n        upper_bound = self.magnitude_max or _LEVEL_DENOM\n        magnitude = max(0., min(magnitude, upper_bound))\n        level_args = self.level_fn(magnitude, self.hparams) if self.level_fn is not None else tuple()\n        return self.aug_fn(img, *level_args, **self.kwargs)\n\n    def __repr__(self):\n        fs = self.__class__.__name__ + f'(name={self.name}, p={self.prob}'\n        fs += f', m={self.magnitude}, mstd={self.magnitude_std}'\n        if self.magnitude_max is not None:\n            fs += f', mmax={self.magnitude_max}'\n        fs += ')'\n        return fs\n\n\ndef auto_augment_policy_v0(hparams):\n    # ImageNet v0 policy from TPU EfficientNet impl, cannot find a paper reference.\n    policy = [\n        [('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],\n        [('Color', 0.4, 9), ('Equalize', 0.6, 3)],\n        [('Color', 0.4, 1), ('Rotate', 0.6, 8)],\n        [('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],\n        [('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],\n        [('Color', 0.2, 0), ('Equalize', 0.8, 8)],\n        [('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],\n        [('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],\n        [('Color', 0.6, 1), ('Equalize', 1.0, 2)],\n        [('Invert', 0.4, 9), ('Rotate', 0.6, 0)],\n        [('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],\n        [('Color', 0.4, 7), ('Equalize', 0.6, 0)],\n        [('Posterize', 0.4, 6), ('AutoContrast', 0.4, 7)],\n        [('Solarize', 0.6, 8), ('Color', 0.6, 9)],\n        [('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],\n        [('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],\n        [('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],\n        [('ShearY', 0.8, 0), ('Color', 0.6, 4)],\n        [('Color', 1.0, 0), ('Rotate', 0.6, 2)],\n        [('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],\n        [('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],\n        [('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],\n        [('Posterize', 0.8, 2), ('Solarize', 0.6, 10)],  # This results in black image with Tpu posterize\n        [('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],\n        [('Color', 0.8, 6), ('Rotate', 0.4, 5)],\n    ]\n    pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]\n    return pc\n\n\ndef auto_augment_policy_v0r(hparams):\n    # ImageNet v0 policy from TPU EfficientNet impl, with variation of Posterize used\n    # in Google research implementation (number of bits discarded increases with magnitude)\n    policy = [\n        [('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],\n        [('Color', 0.4, 9), ('Equalize', 0.6, 3)],\n        [('Color', 0.4, 1), ('Rotate', 0.6, 8)],\n        [('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],\n        [('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],\n        [('Color', 0.2, 0), ('Equalize', 0.8, 8)],\n        [('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],\n        [('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],\n        [('Color', 0.6, 1), ('Equalize', 1.0, 2)],\n        [('Invert', 0.4, 9), ('Rotate', 0.6, 0)],\n        [('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],\n        [('Color', 0.4, 7), ('Equalize', 0.6, 0)],\n        [('PosterizeIncreasing', 0.4, 6), ('AutoContrast', 0.4, 7)],\n        [('Solarize', 0.6, 8), ('Color', 0.6, 9)],\n        [('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],\n        [('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],\n        [('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],\n        [('ShearY', 0.8, 0), ('Color', 0.6, 4)],\n        [('Color', 1.0, 0), ('Rotate', 0.6, 2)],\n        [('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],\n        [('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],\n        [('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],\n        [('PosterizeIncreasing', 0.8, 2), ('Solarize', 0.6, 10)],\n        [('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],\n        [('Color', 0.8, 6), ('Rotate', 0.4, 5)],\n    ]\n    pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]\n    return pc\n\n\ndef auto_augment_policy_original(hparams):\n    # ImageNet policy from https://arxiv.org/abs/1805.09501\n    policy = [\n        [('PosterizeOriginal', 0.4, 8), ('Rotate', 0.6, 9)],\n        [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],\n        [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],\n        [('PosterizeOriginal', 0.6, 7), ('PosterizeOriginal', 0.6, 6)],\n        [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],\n        [('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],\n        [('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],\n        [('PosterizeOriginal', 0.8, 5), ('Equalize', 1.0, 2)],\n        [('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],\n        [('Equalize', 0.6, 8), ('PosterizeOriginal', 0.4, 6)],\n        [('Rotate', 0.8, 8), ('Color', 0.4, 0)],\n        [('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],\n        [('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],\n        [('Invert', 0.6, 4), ('Equalize', 1.0, 8)],\n        [('Color', 0.6, 4), ('Contrast', 1.0, 8)],\n        [('Rotate', 0.8, 8), ('Color', 1.0, 2)],\n        [('Color', 0.8, 8), ('Solarize', 0.8, 7)],\n        [('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],\n        [('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],\n        [('Color', 0.4, 0), ('Equalize', 0.6, 3)],\n        [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],\n        [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],\n        [('Invert', 0.6, 4), ('Equalize', 1.0, 8)],\n        [('Color', 0.6, 4), ('Contrast', 1.0, 8)],\n        [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],\n    ]\n    pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]\n    return pc\n\n\ndef auto_augment_policy_originalr(hparams):\n    # ImageNet policy from https://arxiv.org/abs/1805.09501 with research posterize variation\n    policy = [\n        [('PosterizeIncreasing', 0.4, 8), ('Rotate', 0.6, 9)],\n        [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],\n        [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],\n        [('PosterizeIncreasing', 0.6, 7), ('PosterizeIncreasing', 0.6, 6)],\n        [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],\n        [('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],\n        [('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],\n        [('PosterizeIncreasing', 0.8, 5), ('Equalize', 1.0, 2)],\n        [('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],\n        [('Equalize', 0.6, 8), ('PosterizeIncreasing', 0.4, 6)],\n        [('Rotate', 0.8, 8), ('Color', 0.4, 0)],\n        [('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],\n        [('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],\n        [('Invert', 0.6, 4), ('Equalize', 1.0, 8)],\n        [('Color', 0.6, 4), ('Contrast', 1.0, 8)],\n        [('Rotate', 0.8, 8), ('Color', 1.0, 2)],\n        [('Color', 0.8, 8), ('Solarize', 0.8, 7)],\n        [('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],\n        [('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],\n        [('Color', 0.4, 0), ('Equalize', 0.6, 3)],\n        [('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],\n        [('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],\n        [('Invert', 0.6, 4), ('Equalize', 1.0, 8)],\n        [('Color', 0.6, 4), ('Contrast', 1.0, 8)],\n        [('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],\n    ]\n    pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]\n    return pc\n\n\ndef auto_augment_policy_3a(hparams):\n    policy = [\n        [('Solarize', 1.0, 5)],  # 128 solarize threshold @ 5 magnitude\n        [('Desaturate', 1.0, 10)],  # grayscale at 10 magnitude\n        [('GaussianBlurRand', 1.0, 10)],\n    ]\n    pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]\n    return pc\n\n\ndef auto_augment_policy(name='v0', hparams=None):\n    hparams = hparams or _HPARAMS_DEFAULT\n    if name == 'original':\n        return auto_augment_policy_original(hparams)\n    if name == 'originalr':\n        return auto_augment_policy_originalr(hparams)\n    if name == 'v0':\n        return auto_augment_policy_v0(hparams)\n    if name == 'v0r':\n        return auto_augment_policy_v0r(hparams)\n    if name == '3a':\n        return auto_augment_policy_3a(hparams)\n    assert False, f'Unknown AA policy {name}'\n\n\nclass AutoAugment:\n\n    def __init__(self, policy):\n        self.policy = policy\n\n    def __call__(self, img):\n        sub_policy = random.choice(self.policy)\n        for op in sub_policy:\n            img = op(img)\n        return img\n\n    def __repr__(self):\n        fs = self.__class__.__name__ + '(policy='\n        for p in self.policy:\n            fs += '\\n\\t['\n            fs += ', '.join([str(op) for op in p])\n            fs += ']'\n        fs += ')'\n        return fs\n\n\ndef auto_augment_transform(config_str: str, hparams: Optional[Dict] = None):\n    \"\"\" Create a AutoAugment transform\n\n    Args:\n        config_str: String defining configuration of auto augmentation. Consists of multiple sections separated by\n            dashes ('-').\n            The first section defines the AutoAugment policy (one of 'v0', 'v0r', 'original', 'originalr').\n            While the remaining sections define other arguments\n                * 'mstd' -  float std deviation of magnitude noise applied\n        hparams: Other hparams (kwargs) for the AutoAugmentation scheme\n\n    Returns:\n         A PyTorch compatible Transform\n\n    Examples::\n\n        'original-mstd0.5' results in AutoAugment with original policy, magnitude_std 0.5\n    \"\"\"\n    config = config_str.split('-')\n    policy_name = config[0]\n    config = config[1:]\n    for c in config:\n        cs = re.split(r'(\\d.*)', c)\n        if len(cs) < 2:\n            continue\n        key, val = cs[:2]\n        if key == 'mstd':\n            # noise param injected via hparams for now\n            hparams.setdefault('magnitude_std', float(val))\n        else:\n            assert False, 'Unknown AutoAugment config section'\n    aa_policy = auto_augment_policy(policy_name, hparams=hparams)\n    return AutoAugment(aa_policy)\n\n\n_RAND_TRANSFORMS = [\n    'AutoContrast',\n    'Equalize',\n    'Invert',\n    'Rotate',\n    'Posterize',\n    'Solarize',\n    'SolarizeAdd',\n    'Color',\n    'Contrast',\n    'Brightness',\n    'Sharpness',\n    'ShearX',\n    'ShearY',\n    'TranslateXRel',\n    'TranslateYRel',\n    # 'Cutout'  # NOTE I've implement this as random erasing separately\n]\n\n\n_RAND_INCREASING_TRANSFORMS = [\n    'AutoContrast',\n    'Equalize',\n    'Invert',\n    'Rotate',\n    'PosterizeIncreasing',\n    'SolarizeIncreasing',\n    'SolarizeAdd',\n    'ColorIncreasing',\n    'ContrastIncreasing',\n    'BrightnessIncreasing',\n    'SharpnessIncreasing',\n    'ShearX',\n    'ShearY',\n    'TranslateXRel',\n    'TranslateYRel',\n    # 'Cutout'  # NOTE I've implement this as random erasing separately\n]\n\n\n_RAND_3A = [\n    'SolarizeIncreasing',\n    'Desaturate',\n    'GaussianBlur',\n]\n\n\n_RAND_WEIGHTED_3A = {\n    'SolarizeIncreasing': 6,\n    'Desaturate': 6,\n    'GaussianBlur': 6,\n    'Rotate': 3,\n    'ShearX': 2,\n    'ShearY': 2,\n    'PosterizeIncreasing': 1,\n    'AutoContrast': 1,\n    'ColorIncreasing': 1,\n    'SharpnessIncreasing': 1,\n    'ContrastIncreasing': 1,\n    'BrightnessIncreasing': 1,\n    'Equalize': 1,\n    'Invert': 1,\n}\n\n\n# These experimental weights are based loosely on the relative improvements mentioned in paper.\n# They may not result in increased performance, but could likely be tuned to so.\n_RAND_WEIGHTED_0 = {\n    'Rotate': 3,\n    'ShearX': 2,\n    'ShearY': 2,\n    'TranslateXRel': 1,\n    'TranslateYRel': 1,\n    'ColorIncreasing': .25,\n    'SharpnessIncreasing': 0.25,\n    'AutoContrast': 0.25,\n    'SolarizeIncreasing': .05,\n    'SolarizeAdd': .05,\n    'ContrastIncreasing': .05,\n    'BrightnessIncreasing': .05,\n    'Equalize': .05,\n    'PosterizeIncreasing': 0.05,\n    'Invert': 0.05,\n}\n\n\ndef _get_weighted_transforms(transforms: Dict):\n    transforms, probs = list(zip(*transforms.items()))\n    probs = np.array(probs)\n    probs = probs / np.sum(probs)\n    return transforms, probs\n\n\ndef rand_augment_choices(name: str, increasing=True):\n    if name == 'weights':\n        return _RAND_WEIGHTED_0\n    if name == '3aw':\n        return _RAND_WEIGHTED_3A\n    if name == '3a':\n        return _RAND_3A\n    return _RAND_INCREASING_TRANSFORMS if increasing else _RAND_TRANSFORMS\n\n\ndef rand_augment_ops(\n        magnitude: Union[int, float] = 10,\n        prob: float = 0.5,\n        hparams: Optional[Dict] = None,\n        transforms: Optional[Union[Dict, List]] = None,\n):\n    hparams = hparams or _HPARAMS_DEFAULT\n    transforms = transforms or _RAND_TRANSFORMS\n    return [AugmentOp(\n        name, prob=prob, magnitude=magnitude, hparams=hparams) for name in transforms]\n\n\nclass RandAugment:\n    def __init__(self, ops, num_layers=2, choice_weights=None):\n        self.ops = ops\n        self.num_layers = num_layers\n        self.choice_weights = choice_weights\n\n    def __call__(self, img):\n        # no replacement when using weighted choice\n        ops = np.random.choice(\n            self.ops,\n            self.num_layers,\n            replace=self.choice_weights is None,\n            p=self.choice_weights,\n        )\n        for op in ops:\n            img = op(img)\n        return img\n\n    def __repr__(self):\n        fs = self.__class__.__name__ + f'(n={self.num_layers}, ops='\n        for op in self.ops:\n            fs += f'\\n\\t{op}'\n        fs += ')'\n        return fs\n\n\ndef rand_augment_transform(\n        config_str: str,\n        hparams: Optional[Dict] = None,\n        transforms: Optional[Union[str, Dict, List]] = None,\n):\n    \"\"\" Create a RandAugment transform\n\n    Args:\n        config_str (str): String defining configuration of random augmentation. Consists of multiple sections separated\n            by dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand').\n            The remaining sections, not order specific determine\n                * 'm' - integer magnitude of rand augment\n                * 'n' - integer num layers (number of transform ops selected per image)\n                * 'p' - float probability of applying each layer (default 0.5)\n                * 'mstd' -  float std deviation of magnitude noise applied, or uniform sampling if infinity (or > 100)\n                * 'mmax' - set upper bound for magnitude to something other than default of  _LEVEL_DENOM (10)\n                * 'inc' - integer (bool), use augmentations that increase in severity with magnitude (default: 0)\n                * 't' - str name of transform set to use\n        hparams (dict): Other hparams (kwargs) for the RandAugmentation scheme\n\n    Returns:\n         A PyTorch compatible Transform\n\n    Examples::\n\n        'rand-m9-n3-mstd0.5' results in RandAugment with magnitude 9, num_layers 3, magnitude_std 0.5\n\n        'rand-mstd1-tweights' results in mag std 1.0, weighted transforms, default mag of 10 and num_layers 2\n\n    \"\"\"\n    magnitude = _LEVEL_DENOM  # default to _LEVEL_DENOM for magnitude (currently 10)\n    num_layers = 2  # default to 2 ops per image\n    increasing = False\n    prob = 0.5\n    config = config_str.split('-')\n    assert config[0] == 'rand'\n    config = config[1:]\n    for c in config:\n        if c.startswith('t'):\n            # NOTE old 'w' key was removed, 'w0' is not equivalent to 'tweights'\n            val = str(c[1:])\n            if transforms is None:\n                transforms = val\n        else:\n            # numeric options\n            cs = re.split(r'(\\d.*)', c)\n            if len(cs) < 2:\n                continue\n            key, val = cs[:2]\n            if key == 'mstd':\n                # noise param / randomization of magnitude values\n                mstd = float(val)\n                if mstd > 100:\n                    # use uniform sampling in 0 to magnitude if mstd is > 100\n                    mstd = float('inf')\n                hparams.setdefault('magnitude_std', mstd)\n            elif key == 'mmax':\n                # clip magnitude between [0, mmax] instead of default [0, _LEVEL_DENOM]\n                hparams.setdefault('magnitude_max', int(val))\n            elif key == 'inc':\n                if bool(val):\n                    increasing = True\n            elif key == 'm':\n                magnitude = int(val)\n            elif key == 'n':\n                num_layers = int(val)\n            elif key == 'p':\n                prob = float(val)\n            else:\n                assert False, 'Unknown RandAugment config section'\n\n    if isinstance(transforms, str):\n        transforms = rand_augment_choices(transforms, increasing=increasing)\n    elif transforms is None:\n        transforms = _RAND_INCREASING_TRANSFORMS if increasing else _RAND_TRANSFORMS\n\n    choice_weights = None\n    if isinstance(transforms, Dict):\n        transforms, choice_weights = _get_weighted_transforms(transforms)\n\n    ra_ops = rand_augment_ops(magnitude=magnitude, prob=prob, hparams=hparams, transforms=transforms)\n    return RandAugment(ra_ops, num_layers, choice_weights=choice_weights)\n\n\n_AUGMIX_TRANSFORMS = [\n    'AutoContrast',\n    'ColorIncreasing',  # not in paper\n    'ContrastIncreasing',  # not in paper\n    'BrightnessIncreasing',  # not in paper\n    'SharpnessIncreasing',  # not in paper\n    'Equalize',\n    'Rotate',\n    'PosterizeIncreasing',\n    'SolarizeIncreasing',\n    'ShearX',\n    'ShearY',\n    'TranslateXRel',\n    'TranslateYRel',\n]\n\n\ndef augmix_ops(\n        magnitude: Union[int, float] = 10,\n        hparams: Optional[Dict] = None,\n        transforms: Optional[Union[str, Dict, List]] = None,\n):\n    hparams = hparams or _HPARAMS_DEFAULT\n    transforms = transforms or _AUGMIX_TRANSFORMS\n    return [AugmentOp(\n        name,\n        prob=1.0,\n        magnitude=magnitude,\n        hparams=hparams\n    ) for name in transforms]\n\n\nclass AugMixAugment:\n    \"\"\" AugMix Transform\n    Adapted and improved from impl here: https://github.com/google-research/augmix/blob/master/imagenet.py\n    From paper: 'AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty -\n    https://arxiv.org/abs/1912.02781\n    \"\"\"\n    def __init__(self, ops, alpha=1., width=3, depth=-1, blended=False):\n        self.ops = ops\n        self.alpha = alpha\n        self.width = width\n        self.depth = depth\n        self.blended = blended  # blended mode is faster but not well tested\n\n    def _calc_blended_weights(self, ws, m):\n        ws = ws * m\n        cump = 1.\n        rws = []\n        for w in ws[::-1]:\n            alpha = w / cump\n            cump *= (1 - alpha)\n            rws.append(alpha)\n        return np.array(rws[::-1], dtype=np.float32)\n\n    def _apply_blended(self, img, mixing_weights, m):\n        # This is my first crack and implementing a slightly faster mixed augmentation. Instead\n        # of accumulating the mix for each chain in a Numpy array and then blending with original,\n        # it recomputes the blending coefficients and applies one PIL image blend per chain.\n        # TODO the results appear in the right ballpark but they differ by more than rounding.\n        img_orig = img.copy()\n        ws = self._calc_blended_weights(mixing_weights, m)\n        for w in ws:\n            depth = self.depth if self.depth > 0 else np.random.randint(1, 4)\n            ops = np.random.choice(self.ops, depth, replace=True)\n            img_aug = img_orig  # no ops are in-place, deep copy not necessary\n            for op in ops:\n                img_aug = op(img_aug)\n            img = Image.blend(img, img_aug, w)\n        return img\n\n    def _apply_basic(self, img, mixing_weights, m):\n        # This is a literal adaptation of the paper/official implementation without normalizations and\n        # PIL <-> Numpy conversions between every op. It is still quite CPU compute heavy compared to the\n        # typical augmentation transforms, could use a GPU / Kornia implementation.\n        img_shape = img.size[0], img.size[1], len(img.getbands())\n        mixed = np.zeros(img_shape, dtype=np.float32)\n        for mw in mixing_weights:\n            depth = self.depth if self.depth > 0 else np.random.randint(1, 4)\n            ops = np.random.choice(self.ops, depth, replace=True)\n            img_aug = img  # no ops are in-place, deep copy not necessary\n            for op in ops:\n                img_aug = op(img_aug)\n            mixed += mw * np.asarray(img_aug, dtype=np.float32)\n        np.clip(mixed, 0, 255., out=mixed)\n        mixed = Image.fromarray(mixed.astype(np.uint8))\n        return Image.blend(img, mixed, m)\n\n    def __call__(self, img):\n        mixing_weights = np.float32(np.random.dirichlet([self.alpha] * self.width))\n        m = np.float32(np.random.beta(self.alpha, self.alpha))\n        if self.blended:\n            mixed = self._apply_blended(img, mixing_weights, m)\n        else:\n            mixed = self._apply_basic(img, mixing_weights, m)\n        return mixed\n\n    def __repr__(self):\n        fs = self.__class__.__name__ + f'(alpha={self.alpha}, width={self.width}, depth={self.depth}, ops='\n        for op in self.ops:\n            fs += f'\\n\\t{op}'\n        fs += ')'\n        return fs\n\n\ndef augment_and_mix_transform(config_str: str, hparams: Optional[Dict] = None):\n    \"\"\" Create AugMix PyTorch transform\n\n    Args:\n        config_str (str): String defining configuration of random augmentation. Consists of multiple sections separated\n            by dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand').\n            The remaining sections, not order specific determine\n                'm' - integer magnitude (severity) of augmentation mix (default: 3)\n                'w' - integer width of augmentation chain (default: 3)\n                'd' - integer depth of augmentation chain (-1 is random [1, 3], default: -1)\n                'b' - integer (bool), blend each branch of chain into end result without a final blend, less CPU (default: 0)\n                'mstd' -  float std deviation of magnitude noise applied (default: 0)\n            Ex 'augmix-m5-w4-d2' results in AugMix with severity 5, chain width 4, chain depth 2\n\n        hparams: Other hparams (kwargs) for the Augmentation transforms\n\n    Returns:\n         A PyTorch compatible Transform\n    \"\"\"\n    magnitude = 3\n    width = 3\n    depth = -1\n    alpha = 1.\n    blended = False\n    config = config_str.split('-')\n    assert config[0] == 'augmix'\n    config = config[1:]\n    for c in config:\n        cs = re.split(r'(\\d.*)', c)\n        if len(cs) < 2:\n            continue\n        key, val = cs[:2]\n        if key == 'mstd':\n            # noise param injected via hparams for now\n            hparams.setdefault('magnitude_std', float(val))\n        elif key == 'm':\n            magnitude = int(val)\n        elif key == 'w':\n            width = int(val)\n        elif key == 'd':\n            depth = int(val)\n        elif key == 'a':\n            alpha = float(val)\n        elif key == 'b':\n            blended = bool(val)\n        else:\n            assert False, 'Unknown AugMix config section'\n    hparams.setdefault('magnitude_std', float('inf'))  # default to uniform sampling (if not set via mstd arg)\n    ops = augmix_ops(magnitude=magnitude, hparams=hparams)\n    return AugMixAugment(ops, alpha=alpha, width=width, depth=depth, blended=blended)\n"
  },
  {
    "path": "timm/data/config.py",
    "content": "import logging\nfrom .constants import *\n\n\n_logger = logging.getLogger(__name__)\n\n\ndef resolve_data_config(\n        args=None,\n        pretrained_cfg=None,\n        model=None,\n        use_test_size=False,\n        verbose=False\n):\n    assert model or args or pretrained_cfg, \"At least one of model, args, or pretrained_cfg required for data config.\"\n    args = args or {}\n    pretrained_cfg = pretrained_cfg or {}\n    if not pretrained_cfg and model is not None and hasattr(model, 'pretrained_cfg'):\n        pretrained_cfg = model.pretrained_cfg\n    data_config = {}\n\n    # Resolve input/image size\n    in_chans = 3\n    if args.get('in_chans', None) is not None:\n        in_chans = args['in_chans']\n    elif args.get('chans', None) is not None:\n        in_chans = args['chans']\n\n    input_size = (in_chans, 224, 224)\n    if args.get('input_size', None) is not None:\n        assert isinstance(args['input_size'], (tuple, list))\n        assert len(args['input_size']) == 3\n        input_size = tuple(args['input_size'])\n        in_chans = input_size[0]  # input_size overrides in_chans\n    elif args.get('img_size', None) is not None:\n        assert isinstance(args['img_size'], int)\n        input_size = (in_chans, args['img_size'], args['img_size'])\n    else:\n        if use_test_size and pretrained_cfg.get('test_input_size', None) is not None:\n            input_size = pretrained_cfg['test_input_size']\n        elif pretrained_cfg.get('input_size', None) is not None:\n            input_size = pretrained_cfg['input_size']\n    data_config['input_size'] = input_size\n\n    # resolve interpolation method\n    data_config['interpolation'] = 'bicubic'\n    if args.get('interpolation', None):\n        data_config['interpolation'] = args['interpolation']\n    elif pretrained_cfg.get('interpolation', None):\n        data_config['interpolation'] = pretrained_cfg['interpolation']\n\n    # resolve dataset + model mean for normalization\n    data_config['mean'] = IMAGENET_DEFAULT_MEAN\n    if args.get('mean', None) is not None:\n        mean = tuple(args['mean'])\n        if len(mean) == 1:\n            mean = tuple(list(mean) * in_chans)\n        else:\n            assert len(mean) == in_chans\n        data_config['mean'] = mean\n    elif pretrained_cfg.get('mean', None):\n        data_config['mean'] = pretrained_cfg['mean']\n\n    # resolve dataset + model std deviation for normalization\n    data_config['std'] = IMAGENET_DEFAULT_STD\n    if args.get('std', None) is not None:\n        std = tuple(args['std'])\n        if len(std) == 1:\n            std = tuple(list(std) * in_chans)\n        else:\n            assert len(std) == in_chans\n        data_config['std'] = std\n    elif pretrained_cfg.get('std', None):\n        data_config['std'] = pretrained_cfg['std']\n\n    # resolve default inference crop\n    crop_pct = DEFAULT_CROP_PCT\n    if args.get('crop_pct', None):\n        crop_pct = args['crop_pct']\n    else:\n        if use_test_size and pretrained_cfg.get('test_crop_pct', None):\n            crop_pct = pretrained_cfg['test_crop_pct']\n        elif pretrained_cfg.get('crop_pct', None):\n            crop_pct = pretrained_cfg['crop_pct']\n    data_config['crop_pct'] = crop_pct\n\n    # resolve default crop percentage\n    crop_mode = DEFAULT_CROP_MODE\n    if args.get('crop_mode', None):\n        crop_mode = args['crop_mode']\n    elif pretrained_cfg.get('crop_mode', None):\n        crop_mode = pretrained_cfg['crop_mode']\n    data_config['crop_mode'] = crop_mode\n\n    if verbose:\n        _logger.info('Data processing configuration for current model + dataset:')\n        for n, v in data_config.items():\n            _logger.info('\\t%s: %s' % (n, str(v)))\n\n    return data_config\n\n\ndef resolve_model_data_config(\n        model,\n        args=None,\n        pretrained_cfg=None,\n        use_test_size=False,\n        verbose=False,\n):\n    \"\"\" Resolve Model Data Config\n    This is equivalent to resolve_data_config() but with arguments re-ordered to put model first.\n\n    Args:\n        model (nn.Module): the model instance\n        args (dict): command line arguments / configuration in dict form (overrides pretrained_cfg)\n        pretrained_cfg (dict): pretrained model config (overrides pretrained_cfg attached to model)\n        use_test_size (bool): use the test time input resolution (if one exists) instead of default train resolution\n        verbose (bool): enable extra logging of resolved values\n\n    Returns:\n        dictionary of config\n    \"\"\"\n    return resolve_data_config(\n        args=args,\n        pretrained_cfg=pretrained_cfg,\n        model=model,\n        use_test_size=use_test_size,\n        verbose=verbose,\n    )\n"
  },
  {
    "path": "timm/data/constants.py",
    "content": "DEFAULT_CROP_PCT = 0.875\nDEFAULT_CROP_MODE = 'center'\nIMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)\nIMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)\nIMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5)\nIMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5)\nIMAGENET_DPN_MEAN = (124 / 255, 117 / 255, 104 / 255)\nIMAGENET_DPN_STD = tuple([1 / (.0167 * 255)] * 3)\nOPENAI_CLIP_MEAN = (0.48145466, 0.4578275, 0.40821073)\nOPENAI_CLIP_STD = (0.26862954, 0.26130258, 0.27577711)\n"
  },
  {
    "path": "timm/data/dataset.py",
    "content": "\"\"\" Quick n Simple Image Folder, Tarfile based DataSet\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport io\nimport logging\nfrom typing import Optional\n\nimport torch\nimport torch.utils.data as data\nfrom PIL import Image\n\nfrom .readers import create_reader\n\n_logger = logging.getLogger(__name__)\n\n\n_ERROR_RETRY = 20\n\n\nclass ImageDataset(data.Dataset):\n\n    def __init__(\n            self,\n            root,\n            reader=None,\n            split='train',\n            class_map=None,\n            load_bytes=False,\n            input_img_mode='RGB',\n            transform=None,\n            target_transform=None,\n            additional_features=None,\n            **kwargs,\n    ):\n        if reader is None or isinstance(reader, str):\n            reader = create_reader(\n                reader or '',\n                root=root,\n                split=split,\n                class_map=class_map,\n                additional_features=additional_features,\n                **kwargs,\n            )\n        self.reader = reader\n        self.load_bytes = load_bytes\n        self.input_img_mode = input_img_mode\n        self.transform = transform\n        self.target_transform = target_transform\n        self.additional_features = additional_features\n        self._max_retries = _ERROR_RETRY\n\n    def __getitem__(self, index):\n        for attempt in range(self._max_retries):\n            try:\n                img, target, *features = self.reader[index]\n                img = img.read() if self.load_bytes else Image.open(img)\n                break\n            except (IOError, OSError) as e:  # be specific\n                _logger.warning(f'Skipped sample (index {index}). {e}')\n                index = (index + 1) % len(self.reader)\n        else:\n            raise RuntimeError(f\"Failed to load {self._max_retries} consecutive samples\")\n\n        if self.input_img_mode and not self.load_bytes:\n            img = img.convert(self.input_img_mode)\n        if self.transform is not None:\n            img = self.transform(img)\n\n        if target is None:\n            target = -1\n        elif self.target_transform is not None:\n            target = self.target_transform(target)\n\n        if self.additional_features is None:\n            return img, target\n        else:\n            return img, target, *features\n\n    def __len__(self):\n        return len(self.reader)\n\n    def filename(self, index, basename=False, absolute=False):\n        return self.reader.filename(index, basename, absolute)\n\n    def filenames(self, basename=False, absolute=False):\n        return self.reader.filenames(basename, absolute)\n\n\nclass IterableImageDataset(data.IterableDataset):\n\n    def __init__(\n            self,\n            root,\n            reader=None,\n            split='train',\n            class_map=None,\n            is_training=False,\n            batch_size=1,\n            num_samples=None,\n            seed=42,\n            repeats=0,\n            download=False,\n            input_img_mode='RGB',\n            input_key=None,\n            target_key=None,\n            transform=None,\n            target_transform=None,\n            max_steps=None,\n            **kwargs,\n    ):\n        assert reader is not None\n        if isinstance(reader, str):\n            self.reader = create_reader(\n                reader,\n                root=root,\n                split=split,\n                class_map=class_map,\n                is_training=is_training,\n                batch_size=batch_size,\n                num_samples=num_samples,\n                seed=seed,\n                repeats=repeats,\n                download=download,\n                input_img_mode=input_img_mode,\n                input_key=input_key,\n                target_key=target_key,\n                max_steps=max_steps,\n                **kwargs,\n            )\n        else:\n            self.reader = reader\n        self.transform = transform\n        self.target_transform = target_transform\n\n    def __iter__(self):\n        for img, target in self.reader:\n            if self.transform is not None:\n                img = self.transform(img)\n            if self.target_transform is not None:\n                target = self.target_transform(target)\n            yield img, target\n\n    def __len__(self):\n        if hasattr(self.reader, '__len__'):\n            return len(self.reader)\n        else:\n            return 0\n\n    def set_epoch(self, count):\n        # TFDS and WDS need external epoch count for deterministic cross process shuffle\n        if hasattr(self.reader, 'set_epoch'):\n            self.reader.set_epoch(count)\n\n    def set_loader_cfg(\n            self,\n            num_workers: Optional[int] = None,\n    ):\n        # TFDS and WDS readers need # workers for correct # samples estimate before loader processes created\n        if hasattr(self.reader, 'set_loader_cfg'):\n            self.reader.set_loader_cfg(num_workers=num_workers)\n\n    def filename(self, index, basename=False, absolute=False):\n        assert False, 'Filename lookup by index not supported, use filenames().'\n\n    def filenames(self, basename=False, absolute=False):\n        return self.reader.filenames(basename, absolute)\n\n\nclass AugMixDataset(torch.utils.data.Dataset):\n    \"\"\"Dataset wrapper to perform AugMix or other clean/augmentation mixes\"\"\"\n\n    def __init__(self, dataset, num_splits=2):\n        self.augmentation = None\n        self.normalize = None\n        self.dataset = dataset\n        if self.dataset.transform is not None:\n            self._set_transforms(self.dataset.transform)\n        self.num_splits = num_splits\n\n    def _set_transforms(self, x):\n        assert isinstance(x, (list, tuple)) and len(x) == 3, 'Expecting a tuple/list of 3 transforms'\n        self.dataset.transform = x[0]\n        self.augmentation = x[1]\n        self.normalize = x[2]\n\n    @property\n    def transform(self):\n        return self.dataset.transform\n\n    @transform.setter\n    def transform(self, x):\n        self._set_transforms(x)\n\n    def _normalize(self, x):\n        return x if self.normalize is None else self.normalize(x)\n\n    def __getitem__(self, i):\n        x, y = self.dataset[i]  # all splits share the same dataset base transform\n        x_list = [self._normalize(x)]  # first split only normalizes (this is the 'clean' split)\n        # run the full augmentation on the remaining splits\n        for _ in range(self.num_splits - 1):\n            x_list.append(self._normalize(self.augmentation(x)))\n        return tuple(x_list), y\n\n    def __len__(self):\n        return len(self.dataset)\n"
  },
  {
    "path": "timm/data/dataset_factory.py",
    "content": "\"\"\" Dataset Factory\n\nHacked together by / Copyright 2021, Ross Wightman\n\"\"\"\nimport os\nfrom typing import Optional\n\nfrom torchvision.datasets import CIFAR100, CIFAR10, MNIST, KMNIST, FashionMNIST, ImageFolder\ntry:\n    from torchvision.datasets import Places365\n    has_places365 = True\nexcept ImportError:\n    has_places365 = False\ntry:\n    from torchvision.datasets import INaturalist\n    has_inaturalist = True\nexcept ImportError:\n    has_inaturalist = False\ntry:\n    from torchvision.datasets import QMNIST\n    has_qmnist = True\nexcept ImportError:\n    has_qmnist = False\ntry:\n    from torchvision.datasets import ImageNet\n    has_imagenet = True\nexcept ImportError:\n    has_imagenet = False\n\nfrom .dataset import IterableImageDataset, ImageDataset\n\n_TORCH_BASIC_DS = dict(\n    cifar10=CIFAR10,\n    cifar100=CIFAR100,\n    mnist=MNIST,\n    kmnist=KMNIST,\n    fashion_mnist=FashionMNIST,\n)\n_TRAIN_SYNONYM = dict(train=None, training=None)\n_EVAL_SYNONYM = dict(val=None, valid=None, validation=None, eval=None, evaluation=None)\n\n\ndef _search_split(root, split):\n    # look for sub-folder with name of split in root and use that if it exists\n    split_name = split.split('[')[0]\n    try_root = os.path.join(root, split_name)\n    if os.path.exists(try_root):\n        return try_root\n\n    def _try(syn):\n        for s in syn:\n            try_root = os.path.join(root, s)\n            if os.path.exists(try_root):\n                return try_root\n        return root\n    if split_name in _TRAIN_SYNONYM:\n        root = _try(_TRAIN_SYNONYM)\n    elif split_name in _EVAL_SYNONYM:\n        root = _try(_EVAL_SYNONYM)\n    return root\n\n\ndef create_dataset(\n        name: str,\n        root: Optional[str] = None,\n        split: str = 'validation',\n        search_split: bool = True,\n        class_map: dict = None,\n        load_bytes: bool = False,\n        is_training: bool = False,\n        download: bool = False,\n        batch_size: int = 1,\n        num_samples: Optional[int] = None,\n        seed: int = 42,\n        repeats: int = 0,\n        input_img_mode: str = 'RGB',\n        trust_remote_code: bool = False,\n        **kwargs,\n):\n    \"\"\" Dataset factory method\n\n    In parentheses after each arg are the type of dataset supported for each arg, one of:\n      * Folder - default, timm folder (or tar) based ImageDataset\n      * Torch - torchvision based datasets\n      * HFDS - Hugging Face Datasets\n      * HFIDS - Hugging Face Datasets Iterable (streaming mode, with IterableDataset)\n      * TFDS - Tensorflow-datasets wrapper in IterabeDataset interface via IterableImageDataset\n      * WDS - Webdataset\n      * All - any of the above\n\n    Args:\n        name: Dataset name, empty is okay for folder based datasets\n        root: Root folder of dataset (All)\n        split: Dataset split (All)\n        search_split: Search for split specific child fold from root so one can specify\n            `imagenet/` instead of `/imagenet/val`, etc on cmd line / config. (Folder, Torch)\n        class_map: Specify class -> index mapping via text file or dict (Folder)\n        load_bytes: Load data, return images as undecoded bytes (Folder)\n        download: Download dataset if not present and supported (HFIDS, TFDS, Torch)\n        is_training: Create dataset in train mode, this is different from the split.\n            For Iterable / TDFS it enables shuffle, ignored for other datasets. (TFDS, WDS, HFIDS)\n        batch_size: Batch size hint for iterable datasets (TFDS, WDS, HFIDS)\n        seed: Seed for iterable datasets (TFDS, WDS, HFIDS)\n        repeats: Dataset repeats per iteration i.e. epoch (TFDS, WDS, HFIDS)\n        input_img_mode: Input image color conversion mode e.g. 'RGB', 'L' (folder, TFDS, WDS, HFDS, HFIDS)\n        trust_remote_code: Trust remote code in Hugging Face Datasets if True (HFDS, HFIDS)\n        **kwargs: Other args to pass through to underlying Dataset and/or Reader classes\n\n    Returns:\n        Dataset object\n    \"\"\"\n    kwargs = {k: v for k, v in kwargs.items() if v is not None}\n    name = name.lower()\n    if name.startswith('torch/'):\n        name = name.split('/', 2)[-1]\n        torch_kwargs = dict(root=root, download=download, **kwargs)\n        if name in _TORCH_BASIC_DS:\n            ds_class = _TORCH_BASIC_DS[name]\n            use_train = split in _TRAIN_SYNONYM\n            ds = ds_class(train=use_train, **torch_kwargs)\n        elif name == 'inaturalist' or name == 'inat':\n            assert has_inaturalist, 'Please update to PyTorch 1.10, torchvision 0.11+ for Inaturalist'\n            target_type = 'full'\n            split_split = split.split('/')\n            if len(split_split) > 1:\n                target_type = split_split[0].split('_')\n                if len(target_type) == 1:\n                    target_type = target_type[0]\n                split = split_split[-1]\n            if split in _TRAIN_SYNONYM:\n                split = '2021_train'\n            elif split in _EVAL_SYNONYM:\n                split = '2021_valid'\n            ds = INaturalist(version=split, target_type=target_type, **torch_kwargs)\n        elif name == 'places365':\n            assert has_places365, 'Please update to a newer PyTorch and torchvision for Places365 dataset.'\n            if split in _TRAIN_SYNONYM:\n                split = 'train-standard'\n            elif split in _EVAL_SYNONYM:\n                split = 'val'\n            ds = Places365(split=split, **torch_kwargs)\n        elif name == 'qmnist':\n            assert has_qmnist, 'Please update to a newer PyTorch and torchvision for QMNIST dataset.'\n            use_train = split in _TRAIN_SYNONYM\n            ds = QMNIST(train=use_train, **torch_kwargs)\n        elif name == 'imagenet':\n            torch_kwargs.pop('download')\n            assert has_imagenet, 'Please update to a newer PyTorch and torchvision for ImageNet dataset.'\n            if split in _EVAL_SYNONYM:\n                split = 'val'\n            ds = ImageNet(split=split, **torch_kwargs)\n        elif name == 'image_folder' or name == 'folder':\n            # in case torchvision ImageFolder is preferred over timm ImageDataset for some reason\n            if search_split and os.path.isdir(root):\n                # look for split specific sub-folder in root\n                root = _search_split(root, split)\n            ds = ImageFolder(root, **kwargs)\n        else:\n            assert False, f\"Unknown torchvision dataset {name}\"\n    elif name.startswith('hfds/'):\n        # NOTE right now, HF datasets default arrow format is a random-access Dataset,\n        # There will be a IterableDataset variant too, TBD\n        ds = ImageDataset(\n            root,\n            reader=name,\n            split=split,\n            class_map=class_map,\n            input_img_mode=input_img_mode,\n            trust_remote_code=trust_remote_code,\n            **kwargs,\n        )\n    elif name.startswith('hfids/'):\n        ds = IterableImageDataset(\n            root,\n            reader=name,\n            split=split,\n            class_map=class_map,\n            is_training=is_training,\n            download=download,\n            batch_size=batch_size,\n            num_samples=num_samples,\n            repeats=repeats,\n            seed=seed,\n            input_img_mode=input_img_mode,\n            trust_remote_code=trust_remote_code,\n            **kwargs,\n        )\n    elif name.startswith('tfds/'):\n        ds = IterableImageDataset(\n            root,\n            reader=name,\n            split=split,\n            class_map=class_map,\n            is_training=is_training,\n            download=download,\n            batch_size=batch_size,\n            num_samples=num_samples,\n            repeats=repeats,\n            seed=seed,\n            input_img_mode=input_img_mode,\n            **kwargs\n        )\n    elif name.startswith('wds/'):\n        ds = IterableImageDataset(\n            root,\n            reader=name,\n            split=split,\n            class_map=class_map,\n            is_training=is_training,\n            batch_size=batch_size,\n            num_samples=num_samples,\n            repeats=repeats,\n            seed=seed,\n            input_img_mode=input_img_mode,\n            **kwargs\n        )\n    else:\n        # FIXME support more advance split cfg for ImageFolder/Tar datasets in the future\n        if search_split and os.path.isdir(root):\n            # look for split specific sub-folder in root\n            root = _search_split(root, split)\n        ds = ImageDataset(\n            root,\n            reader=name,\n            class_map=class_map,\n            load_bytes=load_bytes,\n            input_img_mode=input_img_mode,\n            **kwargs,\n        )\n    return ds\n"
  },
  {
    "path": "timm/data/dataset_info.py",
    "content": "from abc import ABC, abstractmethod\nfrom typing import Dict, List, Optional, Union\n\n\nclass DatasetInfo(ABC):\n\n    def __init__(self):\n        pass\n\n    @abstractmethod\n    def num_classes(self):\n        pass\n\n    @abstractmethod\n    def label_names(self):\n        pass\n\n    @abstractmethod\n    def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:\n        pass\n\n    @abstractmethod\n    def index_to_label_name(self, index) -> str:\n        pass\n\n    @abstractmethod\n    def index_to_description(self, index: int, detailed: bool = False) -> str:\n        pass\n\n    @abstractmethod\n    def label_name_to_description(self, label: str, detailed: bool = False) -> str:\n        pass\n\n\nclass CustomDatasetInfo(DatasetInfo):\n    \"\"\" DatasetInfo that wraps passed values for custom datasets.\"\"\"\n\n    def __init__(\n            self,\n            label_names: Union[List[str], Dict[int, str]],\n            label_descriptions: Optional[Dict[str, str]] = None\n    ):\n        super().__init__()\n        assert len(label_names) > 0\n        self._label_names = label_names  # label index => label name mapping\n        self._label_descriptions = label_descriptions  # label name => label description mapping\n        if self._label_descriptions is not None:\n            # validate descriptions (label names required)\n            assert isinstance(self._label_descriptions, dict)\n            for n in self._label_names:\n                assert n in self._label_descriptions\n\n    def num_classes(self):\n        return len(self._label_names)\n\n    def label_names(self):\n        return self._label_names\n\n    def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:\n        return self._label_descriptions\n\n    def label_name_to_description(self, label: str, detailed: bool = False) -> str:\n        if self._label_descriptions:\n            return self._label_descriptions[label]\n        return label  # return label name itself if a descriptions is not present\n\n    def index_to_label_name(self, index) -> str:\n        assert 0 <= index < len(self._label_names)\n        return self._label_names[index]\n\n    def index_to_description(self, index: int, detailed: bool = False) -> str:\n        label = self.index_to_label_name(index)\n        return self.label_name_to_description(label, detailed=detailed)\n"
  },
  {
    "path": "timm/data/distributed_sampler.py",
    "content": "import math\nimport torch\nfrom torch.utils.data import Sampler\nimport torch.distributed as dist\n\n\nclass OrderedDistributedSampler(Sampler):\n    \"\"\"Sampler that restricts data loading to a subset of the dataset.\n    It is especially useful in conjunction with\n    :class:`torch.nn.parallel.DistributedDataParallel`. In such case, each\n    process can pass a DistributedSampler instance as a DataLoader sampler,\n    and load a subset of the original dataset that is exclusive to it.\n    .. note::\n        Dataset is assumed to be of constant size.\n    Arguments:\n        dataset: Dataset used for sampling.\n        num_replicas (optional): Number of processes participating in\n            distributed training.\n        rank (optional): Rank of the current process within num_replicas.\n    \"\"\"\n\n    def __init__(self, dataset, num_replicas=None, rank=None):\n        if num_replicas is None:\n            if not dist.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = dist.get_world_size()\n        if rank is None:\n            if not dist.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = dist.get_rank()\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n\n    def __iter__(self):\n        indices = list(range(len(self.dataset)))\n\n        # add extra samples to make it evenly divisible\n        indices += indices[:(self.total_size - len(indices))]\n        assert len(indices) == self.total_size\n\n        # subsample\n        indices = indices[self.rank:self.total_size:self.num_replicas]\n        assert len(indices) == self.num_samples\n\n        return iter(indices)\n\n    def __len__(self):\n        return self.num_samples\n\n\nclass RepeatAugSampler(Sampler):\n    \"\"\"Sampler that restricts data loading to a subset of the dataset for distributed,\n    with repeated augmentation.\n    It ensures that different each augmented version of a sample will be visible to a\n    different process (GPU). Heavily based on torch.utils.data.DistributedSampler\n\n    This sampler was taken from https://github.com/facebookresearch/deit/blob/0c4b8f60/samplers.py\n    Used in\n    Copyright (c) 2015-present, Facebook, Inc.\n    \"\"\"\n\n    def __init__(\n            self,\n            dataset,\n            num_replicas=None,\n            rank=None,\n            shuffle=True,\n            num_repeats=3,\n            selected_round=256,\n            selected_ratio=0,\n    ):\n        if num_replicas is None:\n            if not dist.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            num_replicas = dist.get_world_size()\n        if rank is None:\n            if not dist.is_available():\n                raise RuntimeError(\"Requires distributed package to be available\")\n            rank = dist.get_rank()\n        self.dataset = dataset\n        self.num_replicas = num_replicas\n        self.rank = rank\n        self.shuffle = shuffle\n        self.num_repeats = num_repeats\n        self.epoch = 0\n        self.num_samples = int(math.ceil(len(self.dataset) * num_repeats / self.num_replicas))\n        self.total_size = self.num_samples * self.num_replicas\n        # Determine the number of samples to select per epoch for each rank.\n        # num_selected logic defaults to be the same as original RASampler impl, but this one can be tweaked\n        # via selected_ratio and selected_round args.\n        selected_ratio = selected_ratio or num_replicas  # ratio to reduce selected samples by, num_replicas if 0\n        if selected_round:\n            self.num_selected_samples = int(math.floor(\n                 len(self.dataset) // selected_round * selected_round / selected_ratio))\n        else:\n            self.num_selected_samples = int(math.ceil(len(self.dataset) / selected_ratio))\n\n    def __iter__(self):\n        # deterministically shuffle based on epoch\n        g = torch.Generator()\n        g.manual_seed(self.epoch)\n        if self.shuffle:\n            indices = torch.randperm(len(self.dataset), generator=g)\n        else:\n            indices = torch.arange(start=0, end=len(self.dataset))\n\n        # produce repeats e.g. [0, 0, 0, 1, 1, 1, 2, 2, 2....]\n        if isinstance(self.num_repeats, float) and not self.num_repeats.is_integer():\n            # resample for repeats w/ non-integer ratio\n            repeat_size = math.ceil(self.num_repeats * len(self.dataset))\n            indices = indices[torch.tensor([int(i // self.num_repeats) for i in range(repeat_size)])]\n        else:\n            indices = torch.repeat_interleave(indices, repeats=int(self.num_repeats), dim=0)\n        indices = indices.tolist()  # leaving as tensor thrashes dataloader memory\n        # add extra samples to make it evenly divisible\n        padding_size = self.total_size - len(indices)\n        if padding_size > 0:\n            indices += indices[:padding_size]\n        assert len(indices) == self.total_size\n\n        # subsample per rank\n        indices = indices[self.rank:self.total_size:self.num_replicas]\n        assert len(indices) == self.num_samples\n\n        # return up to num selected samples\n        return iter(indices[:self.num_selected_samples])\n\n    def __len__(self):\n        return self.num_selected_samples\n\n    def set_epoch(self, epoch):\n        self.epoch = epoch\n"
  },
  {
    "path": "timm/data/imagenet_info.py",
    "content": "import csv\nimport os\nimport pkgutil\nimport re\nfrom typing import Dict, List, Optional, Union\n\nfrom .dataset_info import DatasetInfo\n\n\n# NOTE no ambiguity wrt to mapping from # classes to ImageNet subset so far, but likely to change\n_NUM_CLASSES_TO_SUBSET = {\n    1000: 'imagenet-1k',\n    11221: 'imagenet-21k-miil',  # miil subset of fall11\n    11821: 'imagenet-12k',  # timm specific 12k subset of fall11\n    21841: 'imagenet-22k',  # as in fall11.tar\n    21842: 'imagenet-22k-ms',  # a Microsoft (for FocalNet) remapping of 22k w/ moves ImageNet-1k classes to first 1000\n    21843: 'imagenet-21k-goog',  # Google's ImageNet full has two classes not in fall11\n}\n\n_SUBSETS = {\n    'imagenet1k': 'imagenet_synsets.txt',\n    'imagenet12k': 'imagenet12k_synsets.txt',\n    'imagenet22k': 'imagenet22k_synsets.txt',\n    'imagenet21k': 'imagenet21k_goog_synsets.txt',\n    'imagenet21kgoog': 'imagenet21k_goog_synsets.txt',\n    'imagenet21kmiil': 'imagenet21k_miil_synsets.txt',\n    'imagenet22kms': 'imagenet22k_ms_synsets.txt',\n}\n_LEMMA_FILE = 'imagenet_synset_to_lemma.txt'\n_DEFINITION_FILE = 'imagenet_synset_to_definition.txt'\n\n\ndef infer_imagenet_subset(model_or_cfg) -> Optional[str]:\n    if isinstance(model_or_cfg, dict):\n        num_classes = model_or_cfg.get('num_classes', None)\n    else:\n        num_classes = getattr(model_or_cfg, 'num_classes', None)\n        if not num_classes:\n            pretrained_cfg = getattr(model_or_cfg, 'pretrained_cfg', {})\n            # FIXME at some point pretrained_cfg should include dataset-tag,\n            # which will be more robust than a guess based on num_classes\n            num_classes = pretrained_cfg.get('num_classes', None)\n    if not num_classes or num_classes not in _NUM_CLASSES_TO_SUBSET:\n        return None\n    return _NUM_CLASSES_TO_SUBSET[num_classes]\n\n\nclass ImageNetInfo(DatasetInfo):\n\n    def __init__(self, subset: str = 'imagenet-1k'):\n        super().__init__()\n        subset = re.sub(r'[-_\\s]', '', subset.lower())\n        assert subset in _SUBSETS, f'Unknown imagenet subset {subset}.'\n\n        # WordNet synsets (part-of-speech + offset) are the unique class label names for ImageNet classifiers\n        synset_file = _SUBSETS[subset]\n        synset_data = pkgutil.get_data(__name__, os.path.join('_info', synset_file))\n        self._synsets = synset_data.decode('utf-8').splitlines()\n\n        # WordNet lemmas (canonical dictionary form of word) and definitions are used to build\n        # the class descriptions. If detailed=True both are used, otherwise just the lemmas.\n        lemma_data = pkgutil.get_data(__name__, os.path.join('_info', _LEMMA_FILE))\n        reader = csv.reader(lemma_data.decode('utf-8').splitlines(), delimiter='\\t')\n        self._lemmas = dict(reader)\n        definition_data = pkgutil.get_data(__name__, os.path.join('_info', _DEFINITION_FILE))\n        reader = csv.reader(definition_data.decode('utf-8').splitlines(), delimiter='\\t')\n        self._definitions = dict(reader)\n\n    def num_classes(self):\n        return len(self._synsets)\n\n    def label_names(self):\n        return self._synsets\n\n    def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:\n        if as_dict:\n            return {label: self.label_name_to_description(label, detailed=detailed) for label in self._synsets}\n        else:\n            return [self.label_name_to_description(label, detailed=detailed) for label in self._synsets]\n\n    def index_to_label_name(self, index) -> str:\n        assert 0 <= index < len(self._synsets), \\\n            f'Index ({index}) out of range for dataset with {len(self._synsets)} classes.'\n        return self._synsets[index]\n\n    def index_to_description(self, index: int, detailed: bool = False) -> str:\n        label = self.index_to_label_name(index)\n        return self.label_name_to_description(label, detailed=detailed)\n\n    def label_name_to_description(self, label: str, detailed: bool = False) -> str:\n        if detailed:\n            description = f'{self._lemmas[label]}: {self._definitions[label]}'\n        else:\n            description = f'{self._lemmas[label]}'\n        return description\n"
  },
  {
    "path": "timm/data/loader.py",
    "content": "\"\"\" Loader Factory, Fast Collate, CUDA Prefetcher\n\nPrefetcher and Fast Collate inspired by NVIDIA APEX example at\nhttps://github.com/NVIDIA/apex/commit/d5e2bb4bdeedd27b1dfaf5bb2b24d6c000dee9be#diff-cf86c282ff7fba81fad27a559379d5bf\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport logging\nimport random\nfrom contextlib import suppress\nfrom functools import partial\nfrom itertools import repeat\nfrom typing import Callable, Optional, Tuple, Union\n\nimport torch\nimport torch.utils.data\nimport numpy as np\n\nfrom .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom .dataset import IterableImageDataset, ImageDataset\nfrom .distributed_sampler import OrderedDistributedSampler, RepeatAugSampler\nfrom .random_erasing import RandomErasing\nfrom .mixup import FastCollateMixup\nfrom .transforms_factory import create_transform\n\n_logger = logging.getLogger(__name__)\n\n\ndef fast_collate(batch):\n    \"\"\" A fast collation function optimized for uint8 images (np array or torch) and int64 targets (labels)\"\"\"\n    assert isinstance(batch[0], tuple)\n    batch_size = len(batch)\n    if isinstance(batch[0][0], tuple):\n        # This branch 'deinterleaves' and flattens tuples of input tensors into one tensor ordered by position\n        # such that all tuple of position n will end up in a torch.split(tensor, batch_size) in nth position\n        is_np = isinstance(batch[0][0], np.ndarray)\n        inner_tuple_size = len(batch[0][0])\n        flattened_batch_size = batch_size * inner_tuple_size\n        targets = torch.zeros(flattened_batch_size, dtype=torch.int64)\n        tensor = torch.zeros((flattened_batch_size, *batch[0][0][0].shape), dtype=torch.uint8)\n        for i in range(batch_size):\n            assert len(batch[i][0]) == inner_tuple_size  # all input tensor tuples must be same length\n            for j in range(inner_tuple_size):\n                targets[i + j * batch_size] = batch[i][1]\n                if is_np:\n                    tensor[i + j * batch_size] += torch.from_numpy(batch[i][0][j])\n                else:\n                    tensor[i + j * batch_size] += batch[i][0][j]\n        return tensor, targets\n    elif isinstance(batch[0][0], np.ndarray):\n        targets = torch.tensor([b[1] for b in batch], dtype=torch.int64)\n        assert len(targets) == batch_size\n        tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)\n        for i in range(batch_size):\n            tensor[i] += torch.from_numpy(batch[i][0])\n        return tensor, targets\n    elif isinstance(batch[0][0], torch.Tensor):\n        targets = torch.tensor([b[1] for b in batch], dtype=torch.int64)\n        assert len(targets) == batch_size\n        tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)\n        for i in range(batch_size):\n            tensor[i].copy_(batch[i][0])\n        return tensor, targets\n    else:\n        assert False\n\n\ndef adapt_to_chs(x, n):\n    if not isinstance(x, (tuple, list)):\n        x = tuple(repeat(x, n))\n    elif len(x) != n:\n        x_mean = np.mean(x).item()\n        x = (x_mean,) * n\n        _logger.warning(f'Pretrained mean/std different shape than model, using avg value {x}.')\n    else:\n        assert len(x) == n, 'normalization stats must match image channels'\n    return x\n\n\nclass PrefetchLoader:\n\n    def __init__(\n            self,\n            loader: torch.utils.data.DataLoader,\n            mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n            std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n            channels: int = 3,\n            device: torch.device = torch.device('cuda'),\n            img_dtype: Optional[torch.dtype] = None,\n            fp16: bool = False,\n            re_prob: float = 0.,\n            re_mode: str = 'const',\n            re_count: int = 1,\n            re_num_splits: int = 0,\n    ):\n        mean = adapt_to_chs(mean, channels)\n        std = adapt_to_chs(std, channels)\n        normalization_shape = (1, channels, 1, 1)\n\n        self.loader = loader\n        self.device = device\n        if fp16:\n            # fp16 arg is deprecated, but will override dtype arg if set for bwd compat\n            img_dtype = torch.float16\n        self.img_dtype = img_dtype or torch.float32\n        self.mean = torch.tensor(\n            [x * 255 for x in mean], device=device, dtype=img_dtype).view(normalization_shape)\n        self.std = torch.tensor(\n            [x * 255 for x in std], device=device, dtype=img_dtype).view(normalization_shape)\n        if re_prob > 0.:\n            self.random_erasing = RandomErasing(\n                probability=re_prob,\n                mode=re_mode,\n                max_count=re_count,\n                num_splits=re_num_splits,\n                device=device,\n            )\n        else:\n            self.random_erasing = None\n        self.is_cuda = device.type == 'cuda' and torch.cuda.is_available()\n        self.is_npu = device.type == 'npu' and torch.npu.is_available()\n\n    def __iter__(self):\n        first = True\n        if self.is_cuda:\n            stream = torch.cuda.Stream(device=self.device)\n            stream_context = partial(torch.cuda.stream, stream=stream)\n        elif self.is_npu:\n            stream = torch.npu.Stream(device=self.device)\n            stream_context = partial(torch.npu.stream, stream=stream)\n        else:\n            stream = None\n            stream_context = suppress\n\n        for next_input, next_target in self.loader:\n\n            with stream_context():\n                next_input = next_input.to(device=self.device, non_blocking=True)\n                next_target = next_target.to(device=self.device, non_blocking=True)\n                next_input = next_input.to(self.img_dtype).sub_(self.mean).div_(self.std)\n                if self.random_erasing is not None:\n                    next_input = self.random_erasing(next_input)\n\n            if not first:\n                yield input, target\n            else:\n                first = False\n\n            if stream is not None:\n                if self.is_cuda:\n                    torch.cuda.current_stream(device=self.device).wait_stream(stream)\n                elif self.is_npu:\n                    torch.npu.current_stream(device=self.device).wait_stream(stream)\n\n            input = next_input\n            target = next_target\n\n        yield input, target\n\n    def __len__(self):\n        return len(self.loader)\n\n    @property\n    def sampler(self):\n        return self.loader.sampler\n\n    @property\n    def dataset(self):\n        return self.loader.dataset\n\n    @property\n    def mixup_enabled(self):\n        if isinstance(self.loader.collate_fn, FastCollateMixup):\n            return self.loader.collate_fn.mixup_enabled\n        else:\n            return False\n\n    @mixup_enabled.setter\n    def mixup_enabled(self, x):\n        if isinstance(self.loader.collate_fn, FastCollateMixup):\n            self.loader.collate_fn.mixup_enabled = x\n\n\ndef _worker_init(worker_id, worker_seeding='all'):\n    worker_info = torch.utils.data.get_worker_info()\n    assert worker_info.id == worker_id\n    if isinstance(worker_seeding, Callable):\n        seed = worker_seeding(worker_info)\n        random.seed(seed)\n        torch.manual_seed(seed)\n        np.random.seed(seed % (2 ** 32 - 1))\n    else:\n        assert worker_seeding in ('all', 'part')\n        # random / torch seed already called in dataloader iter class w/ worker_info.seed\n        # to reproduce some old results (same seed + hparam combo), partial seeding is required (skip numpy re-seed)\n        if worker_seeding == 'all':\n            np.random.seed(worker_info.seed % (2 ** 32 - 1))\n\n\ndef create_loader(\n        dataset: Union[ImageDataset, IterableImageDataset],\n        input_size: Union[int, Tuple[int, int], Tuple[int, int, int]],\n        batch_size: int,\n        is_training: bool = False,\n        no_aug: bool = False,\n        re_prob: float = 0.,\n        re_mode: str = 'const',\n        re_count: int = 1,\n        re_split: bool = False,\n        train_crop_mode: Optional[str] = None,\n        scale: Optional[Tuple[float, float]] = None,\n        ratio: Optional[Tuple[float, float]] = None,\n        hflip: float = 0.5,\n        vflip: float = 0.,\n        color_jitter: float = 0.4,\n        color_jitter_prob: Optional[float] = None,\n        grayscale_prob: float = 0.,\n        gaussian_blur_prob: float = 0.,\n        auto_augment: Optional[str] = None,\n        num_aug_repeats: int = 0,\n        num_aug_splits: int = 0,\n        interpolation: str = 'bilinear',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        num_workers: int = 1,\n        distributed: bool = False,\n        crop_pct: Optional[float] = None,\n        crop_mode: Optional[str] = None,\n        crop_border_pixels: Optional[int] = None,\n        collate_fn: Optional[Callable] = None,\n        pin_memory: bool = False,\n        fp16: bool = False,  # deprecated, use img_dtype\n        img_dtype: torch.dtype = torch.float32,\n        device: torch.device = torch.device('cuda'),\n        use_prefetcher: bool = True,\n        use_multi_epochs_loader: bool = False,\n        persistent_workers: bool = True,\n        worker_seeding: str = 'all',\n        tf_preprocessing: bool = False,\n):\n    \"\"\"\n\n    Args:\n        dataset: The image dataset to load.\n        input_size: Target input size (channels, height, width) tuple or size scalar.\n        batch_size: Number of samples in a batch.\n        is_training: Return training (random) transforms.\n        no_aug: Disable augmentation for training (useful for debug).\n        re_prob: Random erasing probability.\n        re_mode: Random erasing fill mode.\n        re_count: Number of random erasing regions.\n        re_split: Control split of random erasing across batch size.\n        scale: Random resize scale range (crop area, < 1.0 => zoom in).\n        ratio: Random aspect ratio range (crop ratio for RRC, ratio adjustment factor for RKR).\n        hflip: Horizontal flip probability.\n        vflip: Vertical flip probability.\n        color_jitter: Random color jitter component factors (brightness, contrast, saturation, hue).\n            Scalar is applied as (scalar,) * 3 (no hue).\n        color_jitter_prob: Apply color jitter with this probability if not None (for SimlCLR-like aug\n        grayscale_prob: Probability of converting image to grayscale (for SimCLR-like aug).\n        gaussian_blur_prob: Probability of applying gaussian blur (for SimCLR-like aug).\n        auto_augment: Auto augment configuration string (see auto_augment.py).\n        num_aug_repeats: Enable special sampler to repeat same augmentation across distributed GPUs.\n        num_aug_splits: Enable mode where augmentations can be split across the batch.\n        interpolation: Image interpolation mode.\n        mean: Image normalization mean.\n        std: Image normalization standard deviation.\n        num_workers: Num worker processes per DataLoader.\n        distributed: Enable dataloading for distributed training.\n        crop_pct: Inference crop percentage (output size / resize size).\n        crop_mode: Inference crop mode. One of ['squash', 'border', 'center']. Defaults to 'center' when None.\n        crop_border_pixels: Inference crop border of specified # pixels around edge of original image.\n        collate_fn: Override default collate_fn.\n        pin_memory: Pin memory for device transfer.\n        fp16: Deprecated argument for half-precision input dtype. Use img_dtype.\n        img_dtype: Data type for input image.\n        device: Device to transfer inputs and targets to.\n        use_prefetcher: Use efficient pre-fetcher to load samples onto device.\n        use_multi_epochs_loader:\n        persistent_workers: Enable persistent worker processes.\n        worker_seeding: Control worker random seeding at init.\n        tf_preprocessing: Use TF 1.0 inference preprocessing for testing model ports.\n\n    Returns:\n        DataLoader\n    \"\"\"\n    re_num_splits = 0\n    if re_split:\n        # apply RE to second half of batch if no aug split otherwise line up with aug split\n        re_num_splits = num_aug_splits or 2\n    dataset.transform = create_transform(\n        input_size,\n        is_training=is_training,\n        no_aug=no_aug,\n        train_crop_mode=train_crop_mode,\n        scale=scale,\n        ratio=ratio,\n        hflip=hflip,\n        vflip=vflip,\n        color_jitter=color_jitter,\n        color_jitter_prob=color_jitter_prob,\n        grayscale_prob=grayscale_prob,\n        gaussian_blur_prob=gaussian_blur_prob,\n        auto_augment=auto_augment,\n        interpolation=interpolation,\n        mean=mean,\n        std=std,\n        crop_pct=crop_pct,\n        crop_mode=crop_mode,\n        crop_border_pixels=crop_border_pixels,\n        re_prob=re_prob,\n        re_mode=re_mode,\n        re_count=re_count,\n        re_num_splits=re_num_splits,\n        tf_preprocessing=tf_preprocessing,\n        use_prefetcher=use_prefetcher,\n        separate=num_aug_splits > 0,\n    )\n\n    if isinstance(dataset, IterableImageDataset):\n        # give Iterable datasets early knowledge of num_workers so that sample estimates\n        # are correct before worker processes are launched\n        dataset.set_loader_cfg(num_workers=num_workers)\n\n    sampler = None\n    if distributed and not isinstance(dataset, torch.utils.data.IterableDataset):\n        if is_training:\n            if num_aug_repeats:\n                sampler = RepeatAugSampler(dataset, num_repeats=num_aug_repeats)\n            else:\n                sampler = torch.utils.data.distributed.DistributedSampler(dataset)\n        else:\n            # This will add extra duplicate entries to result in equal num\n            # of samples per-process, will slightly alter validation results\n            sampler = OrderedDistributedSampler(dataset)\n    else:\n        assert num_aug_repeats == 0, \"RepeatAugment not currently supported in non-distributed or IterableDataset use\"\n\n    if collate_fn is None:\n        collate_fn = fast_collate if use_prefetcher else torch.utils.data.dataloader.default_collate\n\n    loader_class = torch.utils.data.DataLoader\n    if use_multi_epochs_loader:\n        loader_class = MultiEpochsDataLoader\n\n    loader_args = dict(\n        batch_size=batch_size,\n        shuffle=not isinstance(dataset, torch.utils.data.IterableDataset) and sampler is None and is_training,\n        num_workers=num_workers,\n        sampler=sampler,\n        collate_fn=collate_fn,\n        pin_memory=pin_memory,\n        drop_last=is_training,\n        worker_init_fn=partial(_worker_init, worker_seeding=worker_seeding),\n        persistent_workers=persistent_workers\n    )\n    try:\n        loader = loader_class(dataset, **loader_args)\n    except TypeError as e:\n        loader_args.pop('persistent_workers')  # only in Pytorch 1.7+\n        loader = loader_class(dataset, **loader_args)\n    if use_prefetcher:\n        prefetch_re_prob = re_prob if is_training and not no_aug else 0.\n        loader = PrefetchLoader(\n            loader,\n            mean=mean,\n            std=std,\n            channels=input_size[0],\n            device=device,\n            fp16=fp16,  # deprecated, use img_dtype\n            img_dtype=img_dtype,\n            re_prob=prefetch_re_prob,\n            re_mode=re_mode,\n            re_count=re_count,\n            re_num_splits=re_num_splits\n        )\n\n    return loader\n\n\nclass MultiEpochsDataLoader(torch.utils.data.DataLoader):\n\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        self._DataLoader__initialized = False\n        if self.batch_sampler is None:\n            self.sampler = _RepeatSampler(self.sampler)\n        else:\n            self.batch_sampler = _RepeatSampler(self.batch_sampler)\n        self._DataLoader__initialized = True\n        self.iterator = super().__iter__()\n\n    def __len__(self):\n        return len(self.sampler) if self.batch_sampler is None else len(self.batch_sampler.sampler)\n\n    def __iter__(self):\n        for i in range(len(self)):\n            yield next(self.iterator)\n\n\nclass _RepeatSampler(object):\n    \"\"\" Sampler that repeats forever.\n\n    Args:\n        sampler (Sampler)\n    \"\"\"\n\n    def __init__(self, sampler):\n        self.sampler = sampler\n\n    def __iter__(self):\n        while True:\n            yield from iter(self.sampler)\n"
  },
  {
    "path": "timm/data/mixup.py",
    "content": "\"\"\" Mixup and Cutmix\n\nPapers:\nmixup: Beyond Empirical Risk Minimization (https://arxiv.org/abs/1710.09412)\n\nCutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features (https://arxiv.org/abs/1905.04899)\n\nCode Reference:\nCutMix: https://github.com/clovaai/CutMix-PyTorch\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport numpy as np\nimport torch\n\n\ndef one_hot(x, num_classes, on_value=1., off_value=0.):\n    x = x.long().view(-1, 1)\n    return torch.full((x.size()[0], num_classes), off_value, device=x.device).scatter_(1, x, on_value)\n\n\ndef mixup_target(target, num_classes, lam=1., smoothing=0.0):\n    off_value = smoothing / num_classes\n    on_value = 1. - smoothing + off_value\n    y1 = one_hot(target, num_classes, on_value=on_value, off_value=off_value)\n    y2 = one_hot(target.flip(0), num_classes, on_value=on_value, off_value=off_value)\n    return y1 * lam + y2 * (1. - lam)\n\n\ndef rand_bbox(img_shape, lam, margin=0., count=None):\n    \"\"\" Standard CutMix bounding-box\n    Generates a random square bbox based on lambda value. This impl includes\n    support for enforcing a border margin as percent of bbox dimensions.\n\n    Args:\n        img_shape (tuple): Image shape as tuple\n        lam (float): Cutmix lambda value\n        margin (float): Percentage of bbox dimension to enforce as margin (reduce amount of box outside image)\n        count (int): Number of bbox to generate\n    \"\"\"\n    ratio = np.sqrt(1 - lam)\n    img_h, img_w = img_shape[-2:]\n    cut_h, cut_w = int(img_h * ratio), int(img_w * ratio)\n    margin_y, margin_x = int(margin * cut_h), int(margin * cut_w)\n    cy = np.random.randint(0 + margin_y, img_h - margin_y, size=count)\n    cx = np.random.randint(0 + margin_x, img_w - margin_x, size=count)\n    yl = np.clip(cy - cut_h // 2, 0, img_h)\n    yh = np.clip(cy + cut_h // 2, 0, img_h)\n    xl = np.clip(cx - cut_w // 2, 0, img_w)\n    xh = np.clip(cx + cut_w // 2, 0, img_w)\n    return yl, yh, xl, xh\n\n\ndef rand_bbox_minmax(img_shape, minmax, count=None):\n    \"\"\" Min-Max CutMix bounding-box\n    Inspired by Darknet cutmix impl, generates a random rectangular bbox\n    based on min/max percent values applied to each dimension of the input image.\n\n    Typical defaults for minmax are usually in the  .2-.3 for min and .8-.9 range for max.\n\n    Args:\n        img_shape (tuple): Image shape as tuple\n        minmax (tuple or list): Min and max bbox ratios (as percent of image size)\n        count (int): Number of bbox to generate\n    \"\"\"\n    assert len(minmax) == 2\n    img_h, img_w = img_shape[-2:]\n    cut_h = np.random.randint(int(img_h * minmax[0]), int(img_h * minmax[1]), size=count)\n    cut_w = np.random.randint(int(img_w * minmax[0]), int(img_w * minmax[1]), size=count)\n    yl = np.random.randint(0, img_h - cut_h, size=count)\n    xl = np.random.randint(0, img_w - cut_w, size=count)\n    yu = yl + cut_h\n    xu = xl + cut_w\n    return yl, yu, xl, xu\n\n\ndef cutmix_bbox_and_lam(img_shape, lam, ratio_minmax=None, correct_lam=True, count=None):\n    \"\"\" Generate bbox and apply lambda correction.\n    \"\"\"\n    if ratio_minmax is not None:\n        yl, yu, xl, xu = rand_bbox_minmax(img_shape, ratio_minmax, count=count)\n    else:\n        yl, yu, xl, xu = rand_bbox(img_shape, lam, count=count)\n    if correct_lam or ratio_minmax is not None:\n        bbox_area = (yu - yl) * (xu - xl)\n        lam = 1. - bbox_area / float(img_shape[-2] * img_shape[-1])\n    return (yl, yu, xl, xu), lam\n\n\nclass Mixup:\n    \"\"\" Mixup/Cutmix that applies different params to each element or whole batch\n\n    Args:\n        mixup_alpha (float): mixup alpha value, mixup is active if > 0.\n        cutmix_alpha (float): cutmix alpha value, cutmix is active if > 0.\n        cutmix_minmax (List[float]): cutmix min/max image ratio, cutmix is active and uses this vs alpha if not None.\n        prob (float): probability of applying mixup or cutmix per batch or element\n        switch_prob (float): probability of switching to cutmix instead of mixup when both are active\n        mode (str): how to apply mixup/cutmix params (per 'batch', 'pair' (pair of elements), 'elem' (element)\n        correct_lam (bool): apply lambda correction when cutmix bbox clipped by image borders\n        label_smoothing (float): apply label smoothing to the mixed target tensor\n        num_classes (int): number of classes for target\n    \"\"\"\n    def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0, switch_prob=0.5,\n                 mode='batch', correct_lam=True, label_smoothing=0.1, num_classes=1000):\n        self.mixup_alpha = mixup_alpha\n        self.cutmix_alpha = cutmix_alpha\n        self.cutmix_minmax = cutmix_minmax\n        if self.cutmix_minmax is not None:\n            assert len(self.cutmix_minmax) == 2\n            # force cutmix alpha == 1.0 when minmax active to keep logic simple & safe\n            self.cutmix_alpha = 1.0\n        self.mix_prob = prob\n        self.switch_prob = switch_prob\n        self.label_smoothing = label_smoothing\n        self.num_classes = num_classes\n        self.mode = mode\n        self.correct_lam = correct_lam  # correct lambda based on clipped area for cutmix\n        self.mixup_enabled = True  # set to false to disable mixing (intended tp be set by train loop)\n\n    def _params_per_elem(self, batch_size):\n        lam = np.ones(batch_size, dtype=np.float32)\n        use_cutmix = np.zeros(batch_size, dtype=bool)\n        if self.mixup_enabled:\n            if self.mixup_alpha > 0. and self.cutmix_alpha > 0.:\n                use_cutmix = np.random.rand(batch_size) < self.switch_prob\n                lam_mix = np.where(\n                    use_cutmix,\n                    np.random.beta(self.cutmix_alpha, self.cutmix_alpha, size=batch_size),\n                    np.random.beta(self.mixup_alpha, self.mixup_alpha, size=batch_size))\n            elif self.mixup_alpha > 0.:\n                lam_mix = np.random.beta(self.mixup_alpha, self.mixup_alpha, size=batch_size)\n            elif self.cutmix_alpha > 0.:\n                use_cutmix = np.ones(batch_size, dtype=bool)\n                lam_mix = np.random.beta(self.cutmix_alpha, self.cutmix_alpha, size=batch_size)\n            else:\n                assert False, \"One of mixup_alpha > 0., cutmix_alpha > 0., cutmix_minmax not None should be true.\"\n            lam = np.where(np.random.rand(batch_size) < self.mix_prob, lam_mix.astype(np.float32), lam)\n        return lam, use_cutmix\n\n    def _params_per_batch(self):\n        lam = 1.\n        use_cutmix = False\n        if self.mixup_enabled and np.random.rand() < self.mix_prob:\n            if self.mixup_alpha > 0. and self.cutmix_alpha > 0.:\n                use_cutmix = np.random.rand() < self.switch_prob\n                lam_mix = np.random.beta(self.cutmix_alpha, self.cutmix_alpha) if use_cutmix else \\\n                    np.random.beta(self.mixup_alpha, self.mixup_alpha)\n            elif self.mixup_alpha > 0.:\n                lam_mix = np.random.beta(self.mixup_alpha, self.mixup_alpha)\n            elif self.cutmix_alpha > 0.:\n                use_cutmix = True\n                lam_mix = np.random.beta(self.cutmix_alpha, self.cutmix_alpha)\n            else:\n                assert False, \"One of mixup_alpha > 0., cutmix_alpha > 0., cutmix_minmax not None should be true.\"\n            lam = float(lam_mix)\n        return lam, use_cutmix\n\n    def _mix_elem(self, x):\n        batch_size = len(x)\n        lam_batch, use_cutmix = self._params_per_elem(batch_size)\n        x_orig = x.clone()  # need to keep an unmodified original for mixing source\n        for i in range(batch_size):\n            j = batch_size - i - 1\n            lam = lam_batch[i]\n            if lam != 1.:\n                if use_cutmix[i]:\n                    (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                        x[i].shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)\n                    x[i][:, yl:yh, xl:xh] = x_orig[j][:, yl:yh, xl:xh]\n                    lam_batch[i] = lam\n                else:\n                    x[i] = x[i] * lam + x_orig[j] * (1 - lam)\n        return torch.tensor(lam_batch, device=x.device, dtype=x.dtype).unsqueeze(1)\n\n    def _mix_pair(self, x):\n        batch_size = len(x)\n        lam_batch, use_cutmix = self._params_per_elem(batch_size // 2)\n        x_orig = x.clone()  # need to keep an unmodified original for mixing source\n        for i in range(batch_size // 2):\n            j = batch_size - i - 1\n            lam = lam_batch[i]\n            if lam != 1.:\n                if use_cutmix[i]:\n                    (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                        x[i].shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)\n                    x[i][:, yl:yh, xl:xh] = x_orig[j][:, yl:yh, xl:xh]\n                    x[j][:, yl:yh, xl:xh] = x_orig[i][:, yl:yh, xl:xh]\n                    lam_batch[i] = lam\n                else:\n                    x[i] = x[i] * lam + x_orig[j] * (1 - lam)\n                    x[j] = x[j] * lam + x_orig[i] * (1 - lam)\n        lam_batch = np.concatenate((lam_batch, lam_batch[::-1]))\n        return torch.tensor(lam_batch, device=x.device, dtype=x.dtype).unsqueeze(1)\n\n    def _mix_batch(self, x):\n        lam, use_cutmix = self._params_per_batch()\n        if lam == 1.:\n            return 1.\n        if use_cutmix:\n            (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                x.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)\n            x[:, :, yl:yh, xl:xh] = x.flip(0)[:, :, yl:yh, xl:xh]\n        else:\n            x_flipped = x.flip(0).mul_(1. - lam)\n            x.mul_(lam).add_(x_flipped)\n        return lam\n\n    def __call__(self, x, target):\n        assert len(x) % 2 == 0, 'Batch size should be even when using this'\n        if self.mode == 'elem':\n            lam = self._mix_elem(x)\n        elif self.mode == 'pair':\n            lam = self._mix_pair(x)\n        else:\n            lam = self._mix_batch(x)\n        target = mixup_target(target, self.num_classes, lam, self.label_smoothing)\n        return x, target\n\n\nclass FastCollateMixup(Mixup):\n    \"\"\" Fast Collate w/ Mixup/Cutmix that applies different params to each element or whole batch\n\n    A Mixup impl that's performed while collating the batches.\n    \"\"\"\n\n    def _mix_elem_collate(self, output, batch, half=False):\n        batch_size = len(batch)\n        num_elem = batch_size // 2 if half else batch_size\n        assert len(output) == num_elem\n        lam_batch, use_cutmix = self._params_per_elem(num_elem)\n        is_np = isinstance(batch[0][0], np.ndarray)\n\n        for i in range(num_elem):\n            j = batch_size - i - 1\n            lam = lam_batch[i]\n            mixed = batch[i][0]\n            if lam != 1.:\n                if use_cutmix[i]:\n                    if not half:\n                        mixed = mixed.copy() if is_np else mixed.clone()\n                    (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                        output.shape,\n                        lam,\n                        ratio_minmax=self.cutmix_minmax,\n                        correct_lam=self.correct_lam,\n                    )\n                    mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh]\n                    lam_batch[i] = lam\n                else:\n                    if is_np:\n                        mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam)\n                        np.rint(mixed, out=mixed)\n                    else:\n                        mixed = mixed.float() * lam + batch[j][0].float() * (1 - lam)\n                        torch.round(mixed, out=mixed)\n            output[i] += torch.from_numpy(mixed.astype(np.uint8)) if is_np else mixed.byte()\n        if half:\n            lam_batch = np.concatenate((lam_batch, np.ones(num_elem)))\n        return torch.tensor(lam_batch).unsqueeze(1)\n\n    def _mix_pair_collate(self, output, batch):\n        batch_size = len(batch)\n        lam_batch, use_cutmix = self._params_per_elem(batch_size // 2)\n        is_np = isinstance(batch[0][0], np.ndarray)\n\n        for i in range(batch_size // 2):\n            j = batch_size - i - 1\n            lam = lam_batch[i]\n            mixed_i = batch[i][0]\n            mixed_j = batch[j][0]\n            assert 0 <= lam <= 1.0\n            if lam < 1.:\n                if use_cutmix[i]:\n                    (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                        output.shape,\n                        lam,\n                        ratio_minmax=self.cutmix_minmax,\n                        correct_lam=self.correct_lam,\n                    )\n                    patch_i = mixed_i[:, yl:yh, xl:xh].copy() if is_np else mixed_i[:, yl:yh, xl:xh].clone()\n                    mixed_i[:, yl:yh, xl:xh] = mixed_j[:, yl:yh, xl:xh]\n                    mixed_j[:, yl:yh, xl:xh] = patch_i\n                    lam_batch[i] = lam\n                else:\n                    if is_np:\n                        mixed_temp = mixed_i.astype(np.float32) * lam + mixed_j.astype(np.float32) * (1 - lam)\n                        mixed_j = mixed_j.astype(np.float32) * lam + mixed_i.astype(np.float32) * (1 - lam)\n                        mixed_i = mixed_temp\n                        np.rint(mixed_j, out=mixed_j)\n                        np.rint(mixed_i, out=mixed_i)\n                    else:\n                        mixed_temp = mixed_i.float() * lam + mixed_j.float() * (1 - lam)\n                        mixed_j = mixed_j.float() * lam + mixed_i.float() * (1 - lam)\n                        mixed_i = mixed_temp\n                        torch.round(mixed_j, out=mixed_j)\n                        torch.round(mixed_i, out=mixed_i)\n            output[i] += torch.from_numpy(mixed_i.astype(np.uint8)) if is_np else mixed_i.byte()\n            output[j] += torch.from_numpy(mixed_j.astype(np.uint8)) if is_np else mixed_j.byte()\n        lam_batch = np.concatenate((lam_batch, lam_batch[::-1]))\n        return torch.tensor(lam_batch).unsqueeze(1)\n\n    def _mix_batch_collate(self, output, batch):\n        batch_size = len(batch)\n        lam, use_cutmix = self._params_per_batch()\n        is_np = isinstance(batch[0][0], np.ndarray)\n\n        if use_cutmix:\n            (yl, yh, xl, xh), lam = cutmix_bbox_and_lam(\n                output.shape,\n                lam,\n                ratio_minmax=self.cutmix_minmax,\n                correct_lam=self.correct_lam,\n            )\n        for i in range(batch_size):\n            j = batch_size - i - 1\n            mixed = batch[i][0]\n            if lam != 1.:\n                if use_cutmix:\n                    mixed = mixed.copy() if is_np else mixed.clone()  # don't want to modify the original while iterating\n                    mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh]\n                else:\n                    if is_np:\n                        mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam)\n                        np.rint(mixed, out=mixed)\n                    else:\n                        mixed = mixed.float() * lam + batch[j][0].float() * (1 - lam)\n                        torch.round(mixed, out=mixed)\n            output[i] += torch.from_numpy(mixed.astype(np.uint8)) if is_np else mixed.byte()\n        return lam\n\n    def __call__(self, batch, _=None):\n        batch_size = len(batch)\n        assert batch_size % 2 == 0, 'Batch size should be even when using this'\n        half = 'half' in self.mode\n        if half:\n            batch_size //= 2\n        output = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)\n        if self.mode == 'elem' or self.mode == 'half':\n            lam = self._mix_elem_collate(output, batch, half=half)\n        elif self.mode == 'pair':\n            lam = self._mix_pair_collate(output, batch)\n        else:\n            lam = self._mix_batch_collate(output, batch)\n        target = torch.tensor([b[1] for b in batch], dtype=torch.int64)\n        target = mixup_target(target, self.num_classes, lam, self.label_smoothing)\n        target = target[:batch_size]\n        return output, target\n\n"
  },
  {
    "path": "timm/data/naflex_dataset.py",
    "content": "\"\"\" Dynamic Sequence Length Datasets for Variable Resolution Image Processing\n\nImplements two dataset wrappers:\n1. NaFlexMapDatasetWrapper - Map-style dataset that returns batches with variable sequence lengths\nTODO: 2. NaFlexIterableDatasetWrapper - Iterable dataset that yields batches with variable sequence lengths\n\nBoth support:\n- Pre-initialized transforms for efficiency\n- Distributed training\n- Multiple workers\n- Variable batch sizes based on sequence length\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\n\nimport math\nimport random\nimport warnings\nfrom functools import partial\nfrom typing import Any, Iterator, List, Tuple, Dict, Optional, Union, Callable\n\nimport torch\nfrom torch.utils.data import Dataset, IterableDataset, DataLoader\nfrom PIL import Image\n\nfrom .naflex_transforms import Patchify\nfrom timm.layers import to_2tuple\n\n\ndef calculate_naflex_batch_size(\n        tokens_per_batch: int,\n        seq_len: int,\n        max_size: Optional[int] = None,\n        divisor: int = 1,\n        rounding: str = 'floor',\n) -> int:\n    \"\"\"Calculate batch size based on sequence length with divisibility constraints.\n\n    Args:\n        tokens_per_batch: Target number of tokens per batch.\n        seq_len: Sequence length for this batch.\n        max_size: Optional maximum batch size.\n        divisor: Ensure batch size is divisible by this value.\n        rounding: Rounding method ('floor', 'ceil', 'round').\n\n    Returns:\n        Calculated batch size.\n    \"\"\"\n    # Calculate raw batch size based on sequence length\n    raw_batch_size = tokens_per_batch / seq_len\n\n    # Apply divisibility with specified rounding method\n    if divisor > 1:\n        if rounding == 'floor':\n            batch_size = math.floor(raw_batch_size / divisor) * divisor\n        elif rounding == 'ceil':\n            batch_size = math.ceil(raw_batch_size / divisor) * divisor\n        else:  # 'round' is the default\n            batch_size = round(raw_batch_size / divisor) * divisor\n    else:\n        # If no divisor specified, just use integer division\n        batch_size = int(raw_batch_size)\n\n    # Ensure batch size is valid\n    batch_size = max(1, batch_size)  # At least 1\n\n    if max_size is not None:\n        batch_size = min(batch_size, max_size)\n\n    return batch_size\n\n\nclass NaFlexCollator:\n    \"\"\"Custom collator for batching NaFlex-style variable-resolution images.\"\"\"\n\n    def __init__(\n            self,\n            max_seq_len: Optional[int] = None,\n    ) -> None:\n        \"\"\"Initialize NaFlexCollator.\n\n        Args:\n            max_seq_len: Maximum sequence length for batching.\n        \"\"\"\n        self.max_seq_len = max_seq_len or 576  # Default ViT-B/16 sequence length (577 = 24*24)\n\n    def __call__(self, batch: List[Tuple[Dict[str, torch.Tensor], Union[int, torch.Tensor]]]) -> Tuple[Dict[str, torch.Tensor], torch.Tensor]:\n        \"\"\"Collate batch of NaFlex samples.\n\n        Args:\n            batch: List of tuples (patch_dict, target).\n\n        Returns:\n            A tuple of (input_dict, targets) where input_dict contains:\n                - patches: Padded tensor of patches\n                - patch_coord: Coordinates for each patch (y, x)\n                - patch_valid: Valid indicators\n        \"\"\"\n        assert isinstance(batch[0], tuple)\n        batch_size = len(batch)\n\n        # Extract targets\n        targets = [item[1] for item in batch]\n        if isinstance(targets[0], torch.Tensor):\n            targets = torch.stack(targets)\n        else:\n            targets = torch.tensor(targets, dtype=torch.int64)\n\n        # Get patch dictionaries\n        patch_dicts = [item[0] for item in batch]\n\n        # If we have a maximum sequence length constraint, ensure we don't exceed it\n        if self.max_seq_len is not None:\n            max_patches = self.max_seq_len\n        else:\n            # Find the maximum number of patches in this batch\n            max_patches = max(item['patches'].shape[0] for item in patch_dicts)\n\n        # Check if patches are flattened or unflattened\n        patches_tensor = patch_dicts[0]['patches']\n        is_unflattened = patches_tensor.ndim == 4  # [N, Ph, Pw, C]\n\n        if is_unflattened:\n            # Patches are [N, Ph, Pw, C] - variable patch size mode\n            _, ph, pw, c = patches_tensor.shape\n            patches = torch.zeros((batch_size, max_patches, ph, pw, c), dtype=torch.float32)\n        else:\n            # Patches are [N, P*P*C] - normal mode\n            patch_dim = patches_tensor.shape[1]\n            patches = torch.zeros((batch_size, max_patches, patch_dim), dtype=torch.float32)\n\n        # Prepare other tensors\n        patch_coord = torch.zeros((batch_size, max_patches, 2), dtype=torch.int64)  # [B, N, 2] for (y, x)\n        patch_valid = torch.zeros((batch_size, max_patches), dtype=torch.bool)\n\n        # Fill in the tensors\n        for i, patch_dict in enumerate(patch_dicts):\n            num_patches = min(patch_dict['patches'].shape[0], max_patches)\n\n            patches[i, :num_patches] = patch_dict['patches'][:num_patches]\n            patch_coord[i, :num_patches] = patch_dict['patch_coord'][:num_patches]\n            patch_valid[i, :num_patches] = patch_dict['patch_valid'][:num_patches]\n\n        result = {\n            'patches': patches,\n            'patch_coord': patch_coord,\n            'patch_valid': patch_valid,\n            'seq_len': max_patches,\n        }\n\n        return result, targets\n\n\ndef _resolve_patch_cfg(\n        patch_size: Optional[Union[int, Tuple[int, int]]],\n        patch_size_choices: Optional[List[int]],\n        patch_size_choice_probs: Optional[List[float]],\n) -> Tuple[List[Tuple[int, int]], List[float], bool]:\n    \"\"\"Resolve patch size configuration.\n\n    Args:\n        patch_size: Single patch size to use.\n        patch_size_choices: List of patch sizes to choose from.\n        patch_size_choice_probs: Probabilities for each patch size choice.\n\n    Returns:\n        Tuple of (sizes, probs, variable) where sizes is list of patch size tuples,\n        probs is list of probabilities, and variable indicates if patch size varies.\n    \"\"\"\n    # If both are None, default to patch_size=16\n    if patch_size is None and patch_size_choices is None:\n        patch_size = 16\n\n    if (patch_size is None) == (patch_size_choices is None):\n        raise ValueError(\n            \"Specify exactly one of `patch_size` or `patch_size_choices`.\"\n        )\n\n    if patch_size is not None:\n        sizes = [to_2tuple(patch_size)]\n        probs = [1.0]\n        variable = False\n    else:\n        sizes = [to_2tuple(p) for p in patch_size_choices]\n        if patch_size_choice_probs is None:\n            probs = [1.0 / len(sizes)] * len(sizes)\n        else:\n            if len(patch_size_choice_probs) != len(sizes):\n                raise ValueError(\"`patch_size_choice_probs` length mismatch.\")\n            s = float(sum(patch_size_choice_probs))\n            if s <= 0:\n                raise ValueError(\"`patch_size_choice_probs` sum to zero.\")\n            probs = [p / s for p in patch_size_choice_probs]\n        variable = True\n    return sizes, probs, variable\n\n\nclass NaFlexMapDatasetWrapper(IterableDataset):\n    \"\"\"\n    IterableDataset wrapper for a map-style base dataset.\n\n    Yields batches with variable sequence lengths. It calculates a canonical\n    batch schedule (sequence length, batch size pairs) once based on the\n    total dataset size (padded for distribution). Each epoch, it shuffles\n    the order of this canonical schedule and the dataset indices.\n    This ensures a consistent number of batches and samples per epoch\n    across all ranks. Handles distributed training and multiple workers.\n    \"\"\"\n\n    def __init__(\n            self,\n            base_dataset: Dataset,\n            patch_size: Optional[Union[int, Tuple[int, int]]] = None,\n            patch_size_choices: Optional[List[int]] = None,\n            patch_size_choice_probs: Optional[List[float]] = None,\n            seq_lens: Tuple[int, ...] = (128, 256, 576, 784, 1024),\n            max_tokens_per_batch: int = 4096 * 4,\n            transform_factory: Optional[Callable] = None,\n            mixup_fn: Optional[Callable] = None,\n            seed: int = 42,\n            shuffle: bool = True,\n            distributed: bool = False,\n            rank: int = 0,\n            world_size: int = 1,\n            epoch: int = 0,\n            batch_divisor: int = 8,\n    ) -> None:\n        \"\"\"Initialize NaFlexMapDatasetWrapper.\n\n        Args:\n            base_dataset: Map-style dataset to wrap.\n            patch_size: Single patch size to use.\n            patch_size_choices: List of patch sizes to randomly select from.\n            patch_size_choice_probs: Probabilities for each patch size.\n            seq_lens: Sequence lengths to use for batching.\n            max_tokens_per_batch: Target tokens per batch.\n            transform_factory: Factory function for creating transforms.\n            mixup_fn: Optional mixup function.\n            seed: Random seed.\n            shuffle: Whether to shuffle data.\n            distributed: Whether using distributed training.\n            rank: Process rank for distributed training.\n            world_size: Total number of processes.\n            epoch: Starting epoch.\n            batch_divisor: Ensure batch size is divisible by this.\n        \"\"\"\n        super().__init__()\n        if not hasattr(base_dataset, '__len__') or not hasattr(base_dataset, '__getitem__'):\n            raise TypeError(\"base_dataset must be a map-style dataset (implement __len__ and __getitem__)\")\n\n        self.base_dataset = base_dataset\n        self.seq_lens = sorted(list(set(seq_lens))) # Ensure unique and sorted\n        self.max_tokens_per_batch = max_tokens_per_batch\n        self.seed = seed\n        self.shuffle = shuffle\n        self.distributed = distributed\n        self.rank = rank if distributed else 0\n        self.world_size = world_size if distributed else 1\n        self.epoch = epoch\n        self.batch_divisor = batch_divisor\n\n        # Resolve patch size configuration\n        self.patch_sizes, self.patch_size_probs, self.variable_patch_size = _resolve_patch_cfg(\n            patch_size,\n            patch_size_choices,\n            patch_size_choice_probs\n        )\n\n        # Pre-initialize transforms and collate fns for each (seq_len, patch_idx) combination\n        self.transforms: Dict[Tuple[int, int], Optional[Callable]] = {}\n        self.collate_fns: Dict[int, Callable] = {}\n        self.patchifiers: List[Callable] = []\n\n        for seq_len in self.seq_lens:\n            self.collate_fns[seq_len] = NaFlexCollator(seq_len)\n\n        for patch_idx, patch_size_tuple in enumerate(self.patch_sizes):\n            # Pre-initialize patchifiers for each patch size (indexed by patch_idx)\n            self.patchifiers.append(Patchify(\n                patch_size=patch_size_tuple,\n                flatten_patches=not self.variable_patch_size\n            ))\n\n            # Create transforms for each (seq_len, patch_idx) combination\n            for seq_len in self.seq_lens:\n                key = (seq_len, patch_idx)\n                if transform_factory:\n                    self.transforms[key] = transform_factory(max_seq_len=seq_len, patch_size=patch_size_tuple)\n                else:\n                    self.transforms[key] = None # No transform\n\n        self.mixup_fn = mixup_fn\n\n        # Canonical Schedule Calculation (Done Once)\n        self._canonical_batch_schedule: List[Tuple[int, int]] = []\n        self._num_batches_per_rank: int = 0\n        self._padded_samples_per_rank: int = 0\n        self._create_canonical_schedule() # Calculate schedule based on padded size\n\n        # Per-Epoch State\n        # Stores (seq_len, list_of_indices) for the current epoch, specific to this rank\n        self._epoch_batches: List[Tuple[int, List[int]]] = []\n        self._prepare_epoch_batches(self.epoch)  # setup for initial epoch\n\n    def _create_canonical_schedule(self):\n        \"\"\"\n        Calculates the canonical batch schedule (seq_len, batch_size pairs)\n        based on the dataset size, padded for distributed training.\n        This schedule is the *same* for all ranks and ensures consistent\n        epoch length. It is calculated once during initialization.\n        \"\"\"\n        total_len = len(self.base_dataset)\n        padded_total_len = total_len\n        num_samples_per_rank = total_len\n\n        if self.distributed and self.world_size > 1:\n            # Calculate padding needed for even distribution\n            if total_len % self.world_size != 0:\n                 pad_size = self.world_size - (total_len % self.world_size)\n                 padded_total_len += pad_size\n                 print(f\"Rank {self.rank}: Padding dataset with {pad_size} samples for distributed training (total size {padded_total_len}).\")\n            else:\n                 pad_size = 0\n\n            if padded_total_len % self.world_size != 0:\n                 # This should not happen with the padding logic, but safeguard\n                 raise RuntimeError(f\"Internal Error: Padded total length {padded_total_len} not divisible by world size {self.world_size}\")\n\n            num_samples_per_rank = padded_total_len // self.world_size\n        elif self.distributed and self.world_size <= 1:\n             # Distributed flag set but world_size is 1, treat as non-distributed\n             pass # num_samples_per_rank remains total_len\n\n        self._padded_samples_per_rank = num_samples_per_rank\n\n        if num_samples_per_rank == 0:\n             self._canonical_batch_schedule = []\n             self._num_batches_per_rank = 0\n             return\n\n        # Use a fixed seed for generating the canonical schedule structure\n        g = torch.Generator()\n        g.manual_seed(self.seed) # Use base seed, NOT epoch seed\n\n        current_schedule: List[Tuple[int, int]] = []\n        remaining_samples = num_samples_per_rank\n        total_scheduled_samples = 0\n\n        while remaining_samples > 0:\n            # Sample sequence length deterministically based on base seed\n            seq_idx = torch.randint(0, len(self.seq_lens), (1,), generator=g).item()\n            seq_len = self.seq_lens[seq_idx]\n\n            # Calculate batch size\n            batch_size = calculate_naflex_batch_size(\n                tokens_per_batch=self.max_tokens_per_batch,\n                seq_len=seq_len,\n                # max_size should be remaining_samples to avoid overshooting\n                max_size=remaining_samples,\n                divisor=self.batch_divisor,\n                rounding='floor',\n            )\n            # Ensure batch size is positive and doesn't exceed remaining samples\n            batch_size = max(1, batch_size)\n            batch_size = min(batch_size, remaining_samples)\n\n            if batch_size <= 0:\n                 warnings.warn(f\"Calculated batch size <= 0 (seq_len={seq_len}, remaining={remaining_samples}). Stopping schedule generation early.\")\n                 break # Avoid infinite loop if something goes wrong\n\n            current_schedule.append((seq_len, batch_size))\n            remaining_samples -= batch_size\n            total_scheduled_samples += batch_size\n\n        # Sanity check: Ensure the schedule covers all samples for the rank\n        if total_scheduled_samples != num_samples_per_rank:\n            warnings.warn(\n                f\"Rank {self.rank}: Canonical schedule accounts for {total_scheduled_samples} samples, \"\n                f\"but expected {num_samples_per_rank} samples per rank. \"\n                f\"This might happen if min_batch_size or batch_divisor constraints prevent utilizing all samples. \"\n                f\"Check parameters. Remaining samples: {remaining_samples}\"\n            )\n            # Adjust if needed? Could add a final small batch, but might violate constraints.\n            # Current behavior: some samples might be dropped if schedule logic fails.\n\n        self._canonical_batch_schedule = current_schedule\n        self._num_batches_per_rank = len(current_schedule)\n        print(f\"Rank {self.rank}: Created canonical schedule with {self._num_batches_per_rank} batches for {self._padded_samples_per_rank} samples/rank.\")\n\n\n    def _prepare_epoch_batches(self, epoch: int):\n        \"\"\"\n        Prepares the batches for the current epoch by:\n        1. Shuffling the full dataset indices (using epoch seed).\n        2. Applying padding if in distributed mode.\n        3. Selecting indices for the current rank.\n        4. Shuffling the *order* of the canonical batch schedule (using epoch seed).\n        5. Assigning the rank's indices to the shuffled batches.\n        \"\"\"\n        g = torch.Generator()\n        g.manual_seed(self.seed + epoch) # Epoch-specific seed for shuffling\n\n        # 1. Get shuffled global indices\n        total_len = len(self.base_dataset)\n        if self.shuffle:\n            all_indices_shuffled = torch.randperm(total_len, generator=g).tolist()\n        else:\n            all_indices_shuffled = list(range(total_len))\n\n        # 2. Apply padding for distributed mode\n        indices_for_ranks = all_indices_shuffled\n        if self.distributed and self.world_size > 1:\n            padded_total_len = self._padded_samples_per_rank * self.world_size\n            if padded_total_len > total_len:\n                pad_size = padded_total_len - total_len\n                # Repeat initial elements from the *shuffled* list for padding\n                indices_for_ranks = all_indices_shuffled + all_indices_shuffled[:pad_size]\n            # Ensure length matches expectation\n            if len(indices_for_ranks) != padded_total_len:\n                 raise RuntimeError(f\"Internal Error: Padded index list length {len(indices_for_ranks)} does not match expected {padded_total_len}\")\n\n        # 3. Select indices for the current rank\n        if self.distributed and self.world_size > 1:\n            indices_this_rank = indices_for_ranks[self.rank::self.world_size]\n        else: # Non-distributed or world_size=1\n            indices_this_rank = indices_for_ranks\n\n        # Sanity check length\n        if len(indices_this_rank) != self._padded_samples_per_rank:\n             # This might happen if canonical schedule generation had warnings/issues\n             warnings.warn(\n                 f\"Rank {self.rank}: Number of indices for this rank ({len(indices_this_rank)}) \"\n                 f\"does not match expected padded samples per rank ({self._padded_samples_per_rank}). \"\n                 f\"Epoch generation might be inconsistent.\"\n              )\n             # Adjust expected samples? Or truncate/pad indices? Let's proceed but warn.\n             # Using min() prevents IndexError later if indices are fewer than expected.\n             effective_samples_this_rank = min(len(indices_this_rank), self._padded_samples_per_rank)\n             indices_this_rank = indices_this_rank[:effective_samples_this_rank]\n\n        else:\n             effective_samples_this_rank = self._padded_samples_per_rank\n\n        # 4. Shuffle the order of the canonical batch schedule for this epoch\n        if self.shuffle:\n            schedule_perm = torch.randperm(self._num_batches_per_rank, generator=g).tolist()\n            shuffled_schedule = [self._canonical_batch_schedule[i] for i in schedule_perm]\n        else:\n            shuffled_schedule = list(self._canonical_batch_schedule) # Keep original order\n\n        # 5. Assign indices to the shuffled batches\n        self._epoch_batches = []\n        idx_pos = 0\n        scheduled_samples_count = 0\n        for seq_len, bs in shuffled_schedule:\n            # Ensure we don't try to grab more indices than available for the rank\n            actual_bs = min(bs, effective_samples_this_rank - idx_pos)\n            if actual_bs <= 0:\n                 if scheduled_samples_count < effective_samples_this_rank:\n                     # This indicates mismatch between schedule total and actual samples\n                     warnings.warn(f\"Rank {self.rank}: Ran out of samples ({idx_pos}/{effective_samples_this_rank}) before processing entire schedule. Check schedule generation.\")\n                 break # Stop if no more indices or batch size is zero\n\n            batch_indices = indices_this_rank[idx_pos : idx_pos + actual_bs]\n            self._epoch_batches.append((seq_len, batch_indices))\n            idx_pos += actual_bs\n            scheduled_samples_count += actual_bs\n\n        # Final check\n        if scheduled_samples_count != effective_samples_this_rank:\n             warnings.warn(\n                f\"Rank {self.rank}: Assigned {scheduled_samples_count} samples to batches, \"\n                f\"but expected {effective_samples_this_rank} effective samples this epoch. \"\n                f\"Indices remaining: {effective_samples_this_rank - scheduled_samples_count}.\"\n             )\n\n    def set_epoch(self, epoch: int) -> None:\n        \"\"\"Updates the epoch, regenerating the epoch-specific batches.\n\n        Args:\n            epoch: New epoch number.\n        \"\"\"\n        # Only regenerate if the epoch actually changes\n        if epoch != self.epoch:\n            self.epoch = epoch\n            self._prepare_epoch_batches(epoch)\n\n    def __len__(self) -> int:\n        \"\"\"Returns the number of batches per worker for the current epoch.\n\n        Returns:\n            Number of batches this worker will process.\n        \"\"\"\n        return self._num_batches_per_rank\n\n    def __iter__(self) -> Iterator[Tuple[Dict[str, torch.Tensor], torch.Tensor]]:\n        \"\"\"Iterates through pre-calculated batches for the current epoch.\n\n        Yields:\n            Tuple of (input_dict, targets) for each batch.\n        \"\"\"\n        worker_info = torch.utils.data.get_worker_info()\n        num_workers = worker_info.num_workers if worker_info else 1\n        worker_id = worker_info.id if worker_info else 0\n\n        # Distribute pre-calculated batches among workers for this rank\n        # Each worker processes a slice of the batches prepared in _prepare_epoch_batches\n        batches_for_worker = self._epoch_batches[worker_id::num_workers]\n        for seq_len, indices in batches_for_worker:\n            if not indices: # Skip if a batch ended up with no indices (shouldn't happen often)\n                 continue\n\n            # Select patch size for this batch\n            patch_idx = 0\n            if self.variable_patch_size:\n                # Use torch multinomial for weighted random choice\n                patch_idx = torch.multinomial(torch.tensor(self.patch_size_probs), 1).item()\n\n            # Get the pre-initialized transform and patchifier using patch_idx\n            transform_key = (seq_len, patch_idx)\n            transform = self.transforms.get(transform_key)\n            batch_patchifier = self.patchifiers[patch_idx]\n\n            batch_imgs = []\n            batch_targets = []\n            for idx in indices:\n                try:\n                    # Get original image and label from map-style dataset\n                    img, label = self.base_dataset[idx]\n\n                    # Apply transform if available\n                    # Handle cases where transform might return None or fail\n                    processed_img = transform(img) if transform else img\n                    if processed_img is None:\n                        warnings.warn(f\"Transform returned None for index {idx}. Skipping sample.\")\n                        continue\n\n                    batch_imgs.append(processed_img)\n                    batch_targets.append(label)\n\n                except IndexError:\n                     warnings.warn(f\"IndexError encountered for index {idx} (possibly due to padding/repeated indices). Skipping sample.\")\n                     continue\n                except Exception as e:\n                    # Log other potential errors during data loading/processing\n                    warnings.warn(f\"Error processing sample index {idx}. Error: {e}. Skipping sample.\")\n                    continue # Skip problematic sample\n\n            if self.mixup_fn is not None:\n                batch_imgs, batch_targets = self.mixup_fn(batch_imgs, batch_targets)\n\n            batch_imgs = [batch_patchifier(img) for img in batch_imgs]\n            batch_samples = list(zip(batch_imgs, batch_targets))\n            if batch_samples: # Only yield if we successfully processed samples\n                # Collate the processed samples into a batch\n                yield self.collate_fns[seq_len](batch_samples)\n\n            # If batch_samples is empty after processing 'indices', an empty batch is skipped.\n"
  },
  {
    "path": "timm/data/naflex_loader.py",
    "content": "\"\"\"NaFlex data loader for dynamic sequence length training.\n\nThis module provides a specialized data loader for Vision Transformer models that supports:\n- Dynamic sequence length sampling during training for improved efficiency\n- Variable patch size training with probabilistic selection\n- Patch-level random erasing augmentation\n- Efficient GPU prefetching with normalization\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\n\nimport math\nfrom contextlib import suppress\nfrom functools import partial\nfrom typing import Callable, Dict, Iterator, List, Optional, Tuple, Union\n\n\nimport torch\n\nfrom .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom .loader import _worker_init, adapt_to_chs\nfrom .naflex_dataset import NaFlexMapDatasetWrapper, NaFlexCollator\nfrom .naflex_random_erasing import PatchRandomErasing\nfrom .transforms_factory import create_transform\n\n\nclass NaFlexPrefetchLoader:\n    \"\"\"Data prefetcher for NaFlex format which normalizes patches.\"\"\"\n\n    def __init__(\n            self,\n            loader: torch.utils.data.DataLoader,\n            mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n            std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n            channels: int = 3,\n            device: torch.device = torch.device('cuda'),\n            img_dtype: Optional[torch.dtype] = None,\n            re_prob: float = 0.,\n            re_mode: str = 'const',\n            re_count: int = 1,\n            re_num_splits: int = 0,\n        ) -> None:\n        \"\"\"Initialize NaFlexPrefetchLoader.\n\n        Args:\n            loader: DataLoader to prefetch from.\n            mean: Mean values for normalization.\n            std: Standard deviation values for normalization.\n            channels: Number of image channels.\n            device: Device to move tensors to.\n            img_dtype: Data type for image tensors.\n            re_prob: Random erasing probability.\n            re_mode: Random erasing mode.\n            re_count: Maximum number of erasing rectangles.\n            re_num_splits: Number of augmentation splits.\n        \"\"\"\n        self.loader = loader\n        self.device = device\n        self.img_dtype = img_dtype or torch.float32\n\n        # Create mean/std tensors for normalization (will be applied to patches)\n        mean = adapt_to_chs(mean, channels)\n        std = adapt_to_chs(std, channels)\n        normalization_shape = (1, 1, channels)\n        self.channels = channels\n        self.mean = torch.tensor(\n            [x * 255 for x in mean], device=device, dtype=self.img_dtype).view(normalization_shape)\n        self.std = torch.tensor(\n            [x * 255 for x in std], device=device, dtype=self.img_dtype).view(normalization_shape)\n\n        if re_prob > 0.:\n            self.random_erasing = PatchRandomErasing(\n                erase_prob=re_prob,\n                mode=re_mode,\n                max_count=re_count,\n                num_splits=re_num_splits,\n                device=device,\n            )\n        else:\n            self.random_erasing = None\n\n        # Check for CUDA/NPU availability\n        self.is_cuda = device.type == 'cuda' and torch.cuda.is_available()\n        self.is_npu = device.type == 'npu' and torch.npu.is_available()\n\n    def __iter__(self) -> Iterator[Tuple[Dict[str, torch.Tensor], torch.Tensor]]:\n        \"\"\"Iterate through the loader with prefetching and normalization.\n\n        Yields:\n            Tuple of (input_dict, targets) with normalized patches.\n        \"\"\"\n        first = True\n        if self.is_cuda:\n            stream = torch.cuda.Stream(device=self.device)\n            stream_context = partial(torch.cuda.stream, stream=stream)\n        elif self.is_npu:\n            stream = torch.npu.Stream(device=self.device)\n            stream_context = partial(torch.npu.stream, stream=stream)\n        else:\n            stream = None\n            stream_context = suppress\n\n        for next_input_dict, next_target in self.loader:\n            with stream_context():\n                # Move all tensors in input_dict to device\n                for k, v in next_input_dict.items():\n                    if isinstance(v, torch.Tensor):\n                        dtype = self.img_dtype if k == 'patches' else None\n                        next_input_dict[k] = next_input_dict[k].to(\n                            device=self.device,\n                            non_blocking=True,\n                            dtype=dtype,\n                        )\n\n                next_target = next_target.to(device=self.device, non_blocking=True)\n\n                # Normalize patch values - handle both [B, N, P*P*C] and [B, N, Ph, Pw, C] formats\n                patches_tensor = next_input_dict['patches']\n                original_shape = patches_tensor.shape\n\n                if patches_tensor.ndim == 3:\n                    # Format: [B, N, P*P*C] - flattened patches\n                    batch_size, num_patches, patch_pixels = original_shape\n                    # To [B*N, P*P, C] for normalization and erasing\n                    patches = patches_tensor.view(batch_size, num_patches, -1, self.channels)\n                elif patches_tensor.ndim == 5:\n                    # Format: [B, N, Ph, Pw, C] - unflattened patches (variable patch size mode)\n                    batch_size, num_patches, patch_h, patch_w, channels = original_shape\n                    assert channels == self.channels, f\"Expected {self.channels} channels, got {channels}\"\n                    # To [B*N, Ph*Pw, C] for normalization and erasing\n                    patches = patches_tensor.view(batch_size, num_patches, -1, self.channels)\n                else:\n                    raise ValueError(f\"Unexpected patches tensor dimensions: {patches_tensor.ndim}. Expected 3 or 5.\")\n\n                # Apply normalization\n                patches = patches.sub(self.mean).div(self.std)\n\n                if self.random_erasing is not None:\n                    patches = self.random_erasing(\n                        patches,\n                        patch_coord=next_input_dict['patch_coord'],\n                        patch_valid=next_input_dict.get('patch_valid', None),\n                    )\n\n                # Reshape back to original format\n                next_input_dict['patches'] = patches.view(original_shape)\n\n            if not first:\n                yield input_dict, target\n            else:\n                first = False\n\n            if stream is not None:\n                if self.is_cuda:\n                    torch.cuda.current_stream(device=self.device).wait_stream(stream)\n                elif self.is_npu:\n                    torch.npu.current_stream(device=self.device).wait_stream(stream)\n\n            input_dict = next_input_dict\n            target = next_target\n\n        yield input_dict, target\n\n    def __len__(self) -> int:\n        \"\"\"Get length of underlying loader.\n\n        Returns:\n            Number of batches in the loader.\n        \"\"\"\n        return len(self.loader)\n\n    @property\n    def sampler(self):\n        \"\"\"Get sampler from underlying loader.\n\n        Returns:\n            Sampler from the underlying DataLoader.\n        \"\"\"\n        return self.loader.sampler\n\n    @property\n    def dataset(self):\n        \"\"\"Get dataset from underlying loader.\n\n        Returns:\n            Dataset from the underlying DataLoader.\n        \"\"\"\n        return self.loader.dataset\n\n\ndef create_naflex_loader(\n        dataset,\n        patch_size: Optional[Union[Tuple[int, int], int]] = None,\n        patch_size_choices: Optional[List[int]] = None,\n        patch_size_choice_probs: Optional[List[float]] = None,\n        train_seq_lens: Tuple[int, ...] = (128, 256, 576, 784, 1024),\n        max_seq_len: int = 576,\n        batch_size: int = 32,\n        is_training: bool = False,\n        mixup_fn: Optional[Callable] = None,\n\n        no_aug: bool = False,\n        re_prob: float = 0.,\n        re_mode: str = 'const',\n        re_count: int = 1,\n        re_split: bool = False,\n        train_crop_mode: Optional[str] = None,\n        scale: Optional[Tuple[float, float]] = None,\n        ratio: Optional[Tuple[float, float]] = None,\n        hflip: float = 0.5,\n        vflip: float = 0.,\n        color_jitter: float = 0.4,\n        color_jitter_prob: Optional[float] = None,\n        grayscale_prob: float = 0.,\n        gaussian_blur_prob: float = 0.,\n        auto_augment: Optional[str] = None,\n        num_aug_repeats: int = 0,\n        num_aug_splits: int = 0,\n        interpolation: str = 'bilinear',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        crop_pct: Optional[float] = None,\n        crop_mode: Optional[str] = None,\n        crop_border_pixels: Optional[int] = None,\n\n        num_workers: int = 4,\n        distributed: bool = False,\n        rank: int = 0,\n        world_size: int = 1,\n        seed: int = 42,\n        epoch: int = 0,\n        use_prefetcher: bool = True,\n        pin_memory: bool = True,\n        img_dtype: torch.dtype = torch.float32,\n        device: Union[str, torch.device] = torch.device('cuda'),\n        persistent_workers: bool = True,\n        worker_seeding: str = 'all',\n    ) -> Union[torch.utils.data.DataLoader, NaFlexPrefetchLoader]:\n    \"\"\"Create a data loader with dynamic sequence length sampling for training.\n\n    Args:\n        dataset: Dataset to load from.\n        patch_size: Single patch size to use.\n        patch_size_choices: List of patch sizes for variable patch size training.\n        patch_size_choice_probs: Probabilities for each patch size choice.\n        train_seq_lens: Training sequence lengths for dynamic batching.\n        max_seq_len: Fixed sequence length for validation.\n        batch_size: Batch size for validation and max training sequence length.\n        is_training: Whether this is for training (enables dynamic batching).\n        mixup_fn: Optional mixup function.\n        no_aug: Disable augmentation.\n        re_prob: Random erasing probability.\n        re_mode: Random erasing mode.\n        re_count: Maximum number of erasing rectangles.\n        re_split: Random erasing split flag.\n        train_crop_mode: Training crop mode.\n        scale: Scale range for random resize crop.\n        ratio: Aspect ratio range for random resize crop.\n        hflip: Horizontal flip probability.\n        vflip: Vertical flip probability.\n        color_jitter: Color jitter factor.\n        color_jitter_prob: Color jitter probability.\n        grayscale_prob: Grayscale conversion probability.\n        gaussian_blur_prob: Gaussian blur probability.\n        auto_augment: AutoAugment policy.\n        num_aug_repeats: Number of augmentation repeats.\n        num_aug_splits: Number of augmentation splits.\n        interpolation: Interpolation method.\n        mean: Normalization mean values.\n        std: Normalization standard deviation values.\n        crop_pct: Crop percentage for validation.\n        crop_mode: Crop mode.\n        crop_border_pixels: Crop border pixels.\n        num_workers: Number of data loading workers.\n        distributed: Whether using distributed training.\n        rank: Process rank for distributed training.\n        world_size: Total number of processes.\n        seed: Random seed.\n        epoch: Starting epoch.\n        use_prefetcher: Whether to use prefetching.\n        pin_memory: Whether to pin memory.\n        img_dtype: Image data type.\n        device: Device to move tensors to.\n        persistent_workers: Whether to use persistent workers.\n        worker_seeding: Worker seeding mode.\n\n    Returns:\n        DataLoader or NaFlexPrefetchLoader instance.\n    \"\"\"\n\n    if is_training:\n        # For training, use the dynamic sequence length mechanism\n        assert num_aug_repeats == 0, 'Augmentation repeats not currently supported in NaFlex loader'\n\n        transform_factory = partial(\n            create_transform,\n            is_training=True,\n            no_aug=no_aug,\n            train_crop_mode=train_crop_mode,\n            scale=scale,\n            ratio=ratio,\n            hflip=hflip,\n            vflip=vflip,\n            color_jitter=color_jitter,\n            color_jitter_prob=color_jitter_prob,\n            grayscale_prob=grayscale_prob,\n            gaussian_blur_prob=gaussian_blur_prob,\n            auto_augment=auto_augment,\n            interpolation=interpolation,\n            mean=mean,\n            std=std,\n            crop_pct=crop_pct,\n            crop_mode=crop_mode,\n            crop_border_pixels=crop_border_pixels,\n            re_prob=re_prob,\n            re_mode=re_mode,\n            re_count=re_count,\n            use_prefetcher=use_prefetcher,\n            naflex=True,\n        )\n\n        max_train_seq_len = max(train_seq_lens)\n        max_tokens_per_batch = batch_size * max_train_seq_len\n\n        if isinstance(dataset, torch.utils.data.IterableDataset):\n            assert False, \"IterableDataset Wrapper is a WIP\"\n\n        naflex_dataset = NaFlexMapDatasetWrapper(\n            dataset,\n            transform_factory=transform_factory,\n            patch_size=patch_size,\n            patch_size_choices=patch_size_choices,\n            patch_size_choice_probs=patch_size_choice_probs,\n            seq_lens=train_seq_lens,\n            max_tokens_per_batch=max_tokens_per_batch,\n            mixup_fn=mixup_fn,\n            seed=seed,\n            distributed=distributed,\n            rank=rank,\n            world_size=world_size,\n            shuffle=True,\n            epoch=epoch,\n        )\n\n        # NOTE: Collation is handled by the dataset wrapper for training\n        loader = torch.utils.data.DataLoader(\n            naflex_dataset,\n            batch_size=None,\n            shuffle=False,\n            num_workers=num_workers,\n            sampler=None,\n            pin_memory=pin_memory,\n            worker_init_fn=partial(_worker_init, worker_seeding=worker_seeding),\n            persistent_workers=persistent_workers\n        )\n\n        if use_prefetcher:\n            loader = NaFlexPrefetchLoader(\n                loader,\n                mean=mean,\n                std=std,\n                img_dtype=img_dtype,\n                device=device,\n                re_prob=re_prob,\n                re_mode=re_mode,\n                re_count=re_count,\n            )\n\n    else:\n        # For validation, use fixed sequence length (unchanged)\n        dataset.transform = create_transform(\n            is_training=False,\n            interpolation=interpolation,\n            mean=mean,\n            std=std,\n            # FIXME add crop args when sequence transforms support crop modes\n            use_prefetcher=use_prefetcher,\n            naflex=True,\n            patch_size=patch_size,\n            max_seq_len=max_seq_len,\n            patchify=True,\n        )\n\n        # Create the collator\n        collate_fn = NaFlexCollator(max_seq_len=max_seq_len)\n\n        # Handle distributed training\n        sampler = None\n        if distributed and not isinstance(dataset, torch.utils.data.IterableDataset):\n            # For validation, use OrderedDistributedSampler\n            from timm.data.distributed_sampler import OrderedDistributedSampler\n            sampler = OrderedDistributedSampler(dataset)\n\n        loader = torch.utils.data.DataLoader(\n            dataset,\n            batch_size=batch_size,\n            shuffle=False,\n            num_workers=num_workers,\n            sampler=sampler,\n            collate_fn=collate_fn,\n            pin_memory=pin_memory,\n            drop_last=False,\n        )\n\n        if use_prefetcher:\n            loader = NaFlexPrefetchLoader(\n                loader,\n                mean=mean,\n                std=std,\n                img_dtype=img_dtype,\n                device=device,\n            )\n\n    return loader\n"
  },
  {
    "path": "timm/data/naflex_mixup.py",
    "content": "\"\"\"Variable‑size Mixup / CutMix utilities for NaFlex data loaders.\n\nThis module provides:\n\n* `mix_batch_variable_size` – pixel‑level Mixup/CutMix that operates on a\n  list of images whose spatial sizes differ, mixing only their central overlap\n  so no resizing is required.\n* `pairwise_mixup_target` – builds soft‑label targets that exactly match the\n  per‑sample pixel provenance produced by the mixer.\n* `NaFlexMixup` – a callable functor that wraps the two helpers and stores\n  all augmentation hyper‑parameters in one place, making it easy to plug into\n  different dataset wrappers.\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\nimport math\nimport random\nfrom typing import Dict, List, Tuple, Union\n\nimport torch\n\n\ndef mix_batch_variable_size(\n        imgs: List[torch.Tensor],\n        *,\n        mixup_alpha: float = 0.8,\n        cutmix_alpha: float = 1.0,\n        switch_prob: float = 0.5,\n        local_shuffle: int = 4,\n) -> Tuple[List[torch.Tensor], List[float], Dict[int, int]]:\n    \"\"\"Apply Mixup or CutMix on a batch of variable-sized images.\n\n    Sorts images by aspect ratio and pairs neighboring samples. Only the mutual\n    central overlap region of each pair is mixed.\n\n    Args:\n        imgs: List of transformed images shaped (C, H, W).\n        mixup_alpha: Beta distribution alpha for Mixup. Set to 0 to disable.\n        cutmix_alpha: Beta distribution alpha for CutMix. Set to 0 to disable.\n        switch_prob: Probability of using CutMix when both modes are enabled.\n        local_shuffle: Size of local windows for shuffling after aspect sorting.\n\n    Returns:\n        Tuple of (mixed_imgs, lam_list, pair_to) where:\n            - mixed_imgs: List of mixed images\n            - lam_list: Per-sample lambda values representing mixing degree\n            - pair_to: Mapping i -> j of which sample was mixed with which\n    \"\"\"\n    if len(imgs) < 2:\n        raise ValueError(\"Need at least two images to perform Mixup/CutMix.\")\n\n    # Decide augmentation mode and raw λ\n    if mixup_alpha > 0.0 and cutmix_alpha > 0.0:\n        use_cutmix = torch.rand(()).item() < switch_prob\n        alpha = cutmix_alpha if use_cutmix else mixup_alpha\n    elif mixup_alpha > 0.0:\n        use_cutmix = False\n        alpha = mixup_alpha\n    elif cutmix_alpha > 0.0:\n        use_cutmix = True\n        alpha = cutmix_alpha\n    else:\n        raise ValueError(\"Both mixup_alpha and cutmix_alpha are zero – nothing to do.\")\n\n    lam_raw = torch.distributions.Beta(alpha, alpha).sample().item()\n    lam_raw = max(0.0, min(1.0, lam_raw))  # numerical safety\n\n    # Pair images by nearest aspect ratio\n    order = sorted(range(len(imgs)), key=lambda i: imgs[i].shape[2] / imgs[i].shape[1])\n    if local_shuffle > 1:\n        for start in range(0, len(order), local_shuffle):\n            random.shuffle(order[start:start + local_shuffle])\n\n    pair_to: Dict[int, int] = {}\n    for a, b in zip(order[::2], order[1::2]):\n        pair_to[a] = b\n        pair_to[b] = a\n\n    odd_one = order[-1] if len(imgs) % 2 else None\n\n    mixed_imgs: List[torch.Tensor] = [None] * len(imgs)\n    lam_list: List[float] = [1.0] * len(imgs)\n\n    for i in range(len(imgs)):\n        if i == odd_one:\n            mixed_imgs[i] = imgs[i]\n            continue\n\n        j = pair_to[i]\n        xi, xj = imgs[i], imgs[j]\n        _, hi, wi = xi.shape\n        _, hj, wj = xj.shape\n        dest_area = hi * wi\n\n        # Central overlap common to both images\n        oh, ow = min(hi, hj), min(wi, wj)\n        overlap_area = oh * ow\n        top_i, left_i = (hi - oh) // 2, (wi - ow) // 2\n        top_j, left_j = (hj - oh) // 2, (wj - ow) // 2\n\n        xi = xi.clone()\n        if use_cutmix:\n            # CutMix: random rectangle inside the overlap\n            cut_ratio = math.sqrt(1.0 - lam_raw)\n            ch, cw = int(oh * cut_ratio), int(ow * cut_ratio)\n            cut_area = ch * cw\n            y_off = random.randint(0, oh - ch)\n            x_off = random.randint(0, ow - cw)\n\n            yl_i, xl_i = top_i + y_off, left_i + x_off\n            yl_j, xl_j = top_j + y_off, left_j + x_off\n            xi[:, yl_i: yl_i + ch, xl_i: xl_i + cw] = xj[:, yl_j: yl_j + ch, xl_j: xl_j + cw]\n            mixed_imgs[i] = xi\n\n            corrected_lam = 1.0 - cut_area / float(dest_area)\n            lam_list[i] = corrected_lam\n        else:\n            # Mixup: blend the entire overlap region\n            patch_i = xi[:, top_i:top_i + oh, left_i:left_i + ow]\n            patch_j = xj[:, top_j:top_j + oh, left_j:left_j + ow]\n\n            blended = patch_i.mul(lam_raw).add_(patch_j, alpha=1.0 - lam_raw)\n            xi[:, top_i:top_i + oh, left_i:left_i + ow] = blended\n            mixed_imgs[i] = xi\n\n            corrected_lam = (dest_area - overlap_area) / dest_area + lam_raw * overlap_area / dest_area\n            lam_list[i] = corrected_lam\n\n    return mixed_imgs, lam_list, pair_to\n\n\ndef smoothed_sparse_target(\n        targets: torch.Tensor,\n        *,\n        num_classes: int,\n        smoothing: float = 0.0,\n) -> torch.Tensor:\n    off_val = smoothing / num_classes\n    on_val = 1.0 - smoothing + off_val\n\n    y_onehot = torch.full(\n        (targets.size(0), num_classes),\n        off_val,\n        dtype=torch.float32,\n        device=targets.device\n    )\n    y_onehot.scatter_(1, targets.unsqueeze(1), on_val)\n    return y_onehot\n\n\ndef pairwise_mixup_target(\n        targets: torch.Tensor,\n        pair_to: Dict[int, int],\n        lam_list: List[float],\n        *,\n        num_classes: int,\n        smoothing: float = 0.0,\n) -> torch.Tensor:\n    \"\"\"Create soft targets that match the pixel‑level mixing performed.\n\n    Args:\n        targets: (B,) tensor of integer class indices.\n        pair_to: Mapping of sample index to its mixed partner as returned by mix_batch_variable_size().\n        lam_list: Per‑sample fractions of own pixels, also from the mixer.\n        num_classes: Total number of classes in the dataset.\n        smoothing: Label‑smoothing value in the range [0, 1).\n\n    Returns:\n        Tensor of shape (B, num_classes) whose rows sum to 1.\n    \"\"\"\n    y_onehot = smoothed_sparse_target(targets, num_classes=num_classes, smoothing=smoothing)\n    targets = y_onehot.clone()\n    for i, j in pair_to.items():\n        lam = lam_list[i]\n        targets[i].mul_(lam).add_(y_onehot[j], alpha=1.0 - lam)\n\n    return targets\n\n\nclass NaFlexMixup:\n    \"\"\"Callable wrapper that combines mixing and target generation.\"\"\"\n\n    def __init__(\n            self,\n            *,\n            num_classes: int,\n            mixup_alpha: float = 0.8,\n            cutmix_alpha: float = 1.0,\n            switch_prob: float = 0.5,\n            prob: float = 1.0,\n            local_shuffle: int = 4,\n            label_smoothing: float = 0.0,\n    ) -> None:\n        \"\"\"Configure the augmentation.\n\n        Args:\n            num_classes: Total number of classes.\n            mixup_alpha: Beta α for Mixup. 0 disables Mixup.\n            cutmix_alpha: Beta α for CutMix. 0 disables CutMix.\n            switch_prob: Probability of selecting CutMix when both modes are enabled.\n            prob: Probability of applying any mixing per batch.\n            local_shuffle: Window size used to shuffle images after aspect sorting so pairings vary between epochs.\n            smoothing: Label‑smoothing value. 0 disables smoothing.\n        \"\"\"\n        self.num_classes = num_classes\n        self.mixup_alpha = mixup_alpha\n        self.cutmix_alpha = cutmix_alpha\n        self.switch_prob = switch_prob\n        self.prob = prob\n        self.local_shuffle = local_shuffle\n        self.smoothing = label_smoothing\n\n    def __call__(\n            self,\n            imgs: List[torch.Tensor],\n            targets: torch.Tensor,\n    ) -> Tuple[List[torch.Tensor], List[torch.Tensor]]:\n        \"\"\"Apply the augmentation and generate matching targets.\n\n        Args:\n            imgs: List of already transformed images shaped (C, H, W).\n            targets: Hard labels with shape (B,).\n\n        Returns:\n            mixed_imgs: List of mixed images in the same order and shapes as the input.\n            targets: Soft‑label tensor shaped (B, num_classes) suitable for cross‑entropy with soft targets.\n        \"\"\"\n        if not isinstance(targets, torch.Tensor):\n            targets = torch.tensor(targets)\n\n        if random.random() > self.prob:\n            targets = smoothed_sparse_target(targets, num_classes=self.num_classes, smoothing=self.smoothing)\n            return imgs, targets.unbind(0)\n\n        mixed_imgs, lam_list, pair_to = mix_batch_variable_size(\n            imgs,\n            mixup_alpha=self.mixup_alpha,\n            cutmix_alpha=self.cutmix_alpha,\n            switch_prob=self.switch_prob,\n            local_shuffle=self.local_shuffle,\n        )\n\n        targets = pairwise_mixup_target(\n            targets,\n            pair_to,\n            lam_list,\n            num_classes=self.num_classes,\n            smoothing=self.smoothing,\n        )\n        return mixed_imgs, targets.unbind(0)\n"
  },
  {
    "path": "timm/data/naflex_random_erasing.py",
    "content": "\"\"\"Patch-level random erasing augmentation for NaFlex Vision Transformers.\n\nThis module implements random erasing specifically designed for patchified images,\noperating at the patch granularity rather than pixel level. It supports two modes:\n- 'patch': Randomly erases individual patches (speckle-like noise)\n- 'region': Erases contiguous rectangular regions of patches (similar to original RandomErasing)\n\nThe implementation is coordinate-aware, respecting valid patch boundaries and supporting\nvariable patch sizes in NaFlex training.\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\n\nimport random\nimport math\nfrom typing import Optional, Union, Tuple\n\nimport torch\n\n\nclass PatchRandomErasing:\n    \"\"\"Random erasing for patchified images in NaFlex format.\n\n    Supports two modes:\n    1. 'patch': Simple mode that erases randomly selected valid patches\n    2. 'region': Erases rectangular regions at patch granularity\n    \"\"\"\n\n    def __init__(\n            self,\n            erase_prob: float = 0.5,\n            patch_drop_prob: float = 0.0,\n            min_count: int = 1,\n            max_count: Optional[int] = None,\n            min_area: float = 0.02,\n            max_area: float = 1 / 3,\n            min_aspect: float = 0.3,\n            max_aspect: Optional[float] = None,\n            mode: str = 'const',\n            value: float = 0.,\n            spatial_mode: str = 'region',\n            num_splits: int = 0,\n            device: Union[str, torch.device] = 'cuda',\n    ) -> None:\n        \"\"\"Initialize PatchRandomErasing.\n\n        Args:\n            erase_prob: Probability that the Random Erasing operation will be performed.\n            patch_drop_prob: Patch dropout probability. Remove random patches instead of erasing.\n            min_count: Minimum number of erasing operations.\n            max_count: Maximum number of erasing operations.\n            min_area: Minimum percentage of valid patches/area to erase.\n            max_area: Maximum percentage of valid patches/area to erase.\n            min_aspect: Minimum aspect ratio of erased area (only used in 'region' mode).\n            max_aspect: Maximum aspect ratio of erased area (only used in 'region' mode).\n            mode: Patch content mode, one of 'const', 'rand', or 'pixel'.\n            value: Constant value for 'const' mode.\n            spatial_mode: Erasing strategy, one of 'patch' or 'region'.\n            num_splits: Number of splits to apply erasing to (0 for all).\n            device: Computation device.\n        \"\"\"\n        self.erase_prob = erase_prob\n        self.patch_drop_prob = patch_drop_prob\n        self.min_count = min_count\n        self.max_count = max_count or min_count\n        self.min_area = min_area\n        self.max_area = max_area\n\n        # Aspect ratio params (for region mode)\n        max_aspect = max_aspect or 1 / min_aspect\n        self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect))\n\n        # Number of splits\n        self.num_splits = num_splits\n        self.device = device\n\n        # Strategy mode\n        self.spatial_mode = spatial_mode\n        assert self.spatial_mode in ('patch', 'region')\n\n        # Value generation mode flags\n        self.erase_mode = mode.lower()\n        assert self.erase_mode in ('rand', 'pixel', 'const')\n        self.const_value = value\n        self.unique_noise_per_patch = True\n\n    def _get_values(\n            self,\n            shape: Union[Tuple[int, ...], torch.Size],\n            value: Optional[torch.Tensor] = None,\n            dtype: torch.dtype = torch.float32,\n            device: Optional[Union[str, torch.device]] = None\n    ) -> torch.Tensor:\n        \"\"\"Generate values for erased patches based on the specified mode.\n\n        Args:\n            shape: Shape of patches to erase.\n            value: Value to use in const (or rand) mode.\n            dtype: Data type to use.\n            device: Device to use.\n\n        Returns:\n            Tensor with values for erasing patches.\n        \"\"\"\n        device = device or self.device\n        if self.erase_mode == 'pixel':\n            # only mode with erase shape that includes pixels\n            return torch.empty(shape, dtype=dtype, device=device).normal_()\n        else:\n            shape = (1, 1, shape[-1]) if len(shape) == 3 else (1, shape[-1])\n            if self.erase_mode == 'const' or value is not None:\n                erase_value = value or self.const_value\n                if isinstance(erase_value, (int, float)):\n                    values = torch.full(shape, erase_value, dtype=dtype, device=device)\n                else:\n                    erase_value = torch.tensor(erase_value, dtype=dtype, device=device)\n                    values = torch.expand_copy(erase_value, shape)\n            else:\n                values = torch.empty(shape, dtype=dtype, device=device).normal_()\n            return values\n\n    def _drop_patches(\n            self,\n            patches: torch.Tensor,\n            patch_coord: torch.Tensor,\n            patch_valid: torch.Tensor,\n    ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:\n        \"\"\"Patch Dropout.\n\n        Fully drops patches from datastream. Only mode that saves compute BUT requires support\n        for non-contiguous patches and associated patch coordinate and valid handling.\n\n        Args:\n            patches: Tensor of patches.\n            patch_coord: Tensor of patch coordinates.\n            patch_valid: Tensor indicating which patches are valid.\n\n        Returns:\n            Tuple of (patches, patch_coord, patch_valid) with some patches dropped.\n        \"\"\"\n        # FIXME WIP, not completed. Downstream support in model needed for non-contiguous valid patches\n        if random.random() > self.erase_prob:\n            return\n\n        # Get indices of valid patches\n        valid_indices = torch.nonzero(patch_valid, as_tuple=True)[0].tolist()\n\n        # Skip if no valid patches\n        if not valid_indices:\n            return patches, patch_coord, patch_valid\n\n        num_valid = len(valid_indices)\n        if self.patch_drop_prob:\n            # patch dropout mode, completely remove dropped patches (FIXME needs downstream support in model)\n            num_keep = max(1, int(num_valid * (1. - self.patch_drop_prob)))\n            keep_indices = torch.argsort(torch.randn(1, num_valid, device=self.device), dim=-1)[:, :num_keep]\n            # maintain patch order, possibly useful for debug / visualization\n            keep_indices = keep_indices.sort(dim=-1)[0]\n            patches = patches.gather(1, keep_indices.unsqueeze(-1).expand((-1, -1) + patches.shape[2:]))\n\n        return patches, patch_coord, patch_valid\n\n    def _erase_patches(\n            self,\n            patches: torch.Tensor,\n            patch_coord: torch.Tensor,\n            patch_valid: torch.Tensor,\n            patch_shape: torch.Size,\n            dtype: torch.dtype = torch.float32,\n    ) -> None:\n        \"\"\"Apply erasing by selecting individual patches randomly.\n\n        The simplest mode, aligned on patch boundaries. Behaves similarly to speckle or 'sprinkles'\n        noise augmentation at patch size.\n\n        Args:\n            patches: Tensor of patches to modify in-place.\n            patch_coord: Tensor of patch coordinates.\n            patch_valid: Tensor indicating which patches are valid.\n            patch_shape: Shape of individual patches.\n            dtype: Data type for generated values.\n        \"\"\"\n        if random.random() > self.erase_prob:\n            return\n\n        # Get indices of valid patches\n        valid_indices = torch.nonzero(patch_valid, as_tuple=True)[0]\n        num_valid = len(valid_indices)\n        if num_valid == 0:\n            return\n\n        count = random.randint(self.min_count, self.max_count)\n        # Determine how many valid patches to erase from RE min/max count and area args\n        max_erase = min(num_valid, max(1, int(num_valid * count * self.max_area)))\n        min_erase = max(1, int(num_valid * count * self.min_area))\n        num_erase = random.randint(min_erase, max_erase)\n\n        # Randomly select valid patches to erase\n        erase_idx = valid_indices[torch.randperm(num_valid, device=patches.device)[:num_erase]]\n\n        if self.unique_noise_per_patch and self.erase_mode == 'pixel':\n            # generate unique noise for the whole selection of patches\n            fill_shape = (num_erase,) + patch_shape\n        else:\n            fill_shape = patch_shape\n\n        patches[erase_idx] = self._get_values(fill_shape, dtype=dtype)\n\n    def _erase_region(\n            self,\n            patches: torch.Tensor,\n            patch_coord: torch.Tensor,\n            patch_valid: torch.Tensor,\n            patch_shape: torch.Size,\n            dtype: torch.dtype = torch.float32,\n    ) -> None:\n        \"\"\"Apply erasing by selecting rectangular regions of patches randomly.\n\n        Closer to the original RandomErasing implementation. Erases\n        spatially contiguous rectangular regions of patches (aligned with patches).\n\n        Args:\n            patches: Tensor of patches to modify in-place.\n            patch_coord: Tensor of patch coordinates.\n            patch_valid: Tensor indicating which patches are valid.\n            patch_shape: Shape of individual patches.\n            dtype: Data type for generated values.\n        \"\"\"\n        if random.random() > self.erase_prob:\n            return\n\n        # Determine grid dimensions from coordinates\n        valid_coord = patch_coord[patch_valid]\n        if len(valid_coord) == 0:\n            return  # No valid patches\n        max_y = valid_coord[:, 0].max().item() + 1\n        max_x = valid_coord[:, 1].max().item() + 1\n        grid_h, grid_w = max_y, max_x\n        total_area = grid_h * grid_w\n        ys, xs = patch_coord[:, 0], patch_coord[:, 1]\n\n        count = random.randint(self.min_count, self.max_count)\n        for _ in range(count):\n            # Try to select a valid region to erase (multiple attempts)\n            for attempt in range(10):\n                # Sample random area and aspect ratio\n                target_area = random.uniform(self.min_area, self.max_area) * total_area\n                aspect_ratio = math.exp(random.uniform(*self.log_aspect_ratio))\n\n                # Calculate region height and width\n                h = int(round(math.sqrt(target_area * aspect_ratio)))\n                w = int(round(math.sqrt(target_area / aspect_ratio)))\n\n                if h > grid_h or w > grid_w:\n                    continue  # try again\n\n                # Calculate region patch bounds\n                top = random.randint(0, grid_h - h)\n                left = random.randint(0, grid_w - w)\n                bottom, right = top + h, left + w\n\n                # Region test\n                region_mask = (\n                        (ys >= top) & (ys < bottom) &\n                        (xs >= left) & (xs < right) &\n                        patch_valid\n                )\n                num_selected = int(region_mask.sum().item())\n                if not num_selected:\n                    continue  # no patch actually falls inside – try again\n\n                if self.unique_noise_per_patch and self.erase_mode == 'pixel':\n                    # generate unique noise for the whole region\n                    fill_shape = (num_selected,) + patch_shape\n                else:\n                    fill_shape = patch_shape\n\n                patches[region_mask] = self._get_values(fill_shape, dtype=dtype)\n                # Successfully applied erasing, exit the loop\n                break\n\n    def __call__(\n            self,\n            patches: torch.Tensor,\n            patch_coord: torch.Tensor,\n            patch_valid: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        \"\"\"Apply random patch erasing.\n\n        Args:\n            patches: Tensor of shape [B, N, P*P, C] or [B, N, Ph, Pw, C].\n            patch_coord: Tensor of shape [B, N, 2] with (y, x) coordinates.\n            patch_valid: Boolean tensor of shape [B, N] indicating which patches are valid.\n\n        Returns:\n            Erased patches tensor of same shape as input.\n        \"\"\"\n        if patches.ndim == 4:\n            batch_size, num_patches, patch_dim, channels = patches.shape\n        elif patches.ndim == 5:\n            batch_size, num_patches, patch_h, patch_w, channels = patches.shape\n        else:\n            assert False\n        patch_shape = patches.shape[2:]\n        # patch_shape ==> shape of patches to fill (h, w, c) or (h * w, c)\n\n        # Create default valid mask if not provided\n        if patch_valid is None:\n            patch_valid = torch.ones((batch_size, num_patches), dtype=torch.bool, device=patches.device)\n\n        # Skip the first part of the batch if num_splits is set\n        batch_start = batch_size // self.num_splits if self.num_splits > 1 else 0\n\n        # Apply erasing to each batch element\n        for i in range(batch_start, batch_size):\n            if self.patch_drop_prob:\n                assert False, \"WIP, not completed\"\n                self._drop_patches(\n                    patches[i],\n                    patch_coord[i],\n                    patch_valid[i],\n                )\n            elif self.spatial_mode == 'patch':\n                # FIXME we could vectorize patch mode across batch, worth the effort?\n                self._erase_patches(\n                    patches[i],\n                    patch_coord[i],\n                    patch_valid[i],\n                    patch_shape,\n                    patches.dtype\n                )\n            elif self.spatial_mode == 'region':\n                self._erase_region(\n                    patches[i],\n                    patch_coord[i],\n                    patch_valid[i],\n                    patch_shape,\n                    patches.dtype\n                )\n            else:\n                assert False\n\n        return patches\n\n    def __repr__(self) -> str:\n        \"\"\"Return string representation of PatchRandomErasing.\n\n        Returns:\n            String representation of the object.\n        \"\"\"\n        fs = self.__class__.__name__ + f'(p={self.erase_prob}, mode={self.erase_mode}'\n        fs += f', spatial={self.spatial_mode}, area=({self.min_area}, {self.max_area}))'\n        fs += f', count=({self.min_count}, {self.max_count}))'\n        return fs"
  },
  {
    "path": "timm/data/naflex_transforms.py",
    "content": "\"\"\" NaFlex (NaViT + FlexiViT) Transforms and Collation\n\nImplements PyTorch versions of the transforms described in the NaViT and FlexiViT papers:\n- NaViT: https://arxiv.org/abs/2307.14995\n- FlexiViT: https://arxiv.org/abs/2212.08013\n\nEnables variable resolution/aspect ratio image handling with efficient patching.\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\n\nimport math\nimport random\nimport warnings\nfrom typing import Dict, List, Optional, Sequence, Tuple, Union\n\nimport torch\nfrom PIL import Image\nfrom torchvision import transforms\nfrom torchvision.transforms import functional as F\nfrom torchvision.transforms.functional import InterpolationMode\n\nfrom .transforms import str_to_interp_mode, crop_or_pad, center_crop_or_pad\n\n\ndef get_image_size_for_seq(\n        image_hw: Tuple[int, int],\n        patch_size: Union[int, Tuple[int, int]] = 16,\n        max_seq_len: int = 1024,\n        divisible_by_patch: bool = True,\n        max_ratio: Optional[float] = None,\n        eps: float = 1e-5,\n) -> Tuple[float, Tuple[int, int]]:\n    \"\"\"Determine scaling ratio and image size for sequence length constraint.\n\n    Calculates the scaling ratio needed so that when image_hw is scaled,\n    the total number of resulting patches does not exceed max_seq_len.\n\n    Args:\n        image_hw: Original image dimensions (height, width).\n        patch_size: Patch dimensions. If int, patches are square.\n        max_seq_len: Maximum allowed sequence length.\n        divisible_by_patch: Whether resulting dimensions must be divisible by patch_size.\n        max_ratio: Optional cap on scaling ratio to prevent excessive upsampling.\n        eps: Convergence threshold for binary search.\n\n    Returns:\n        Tuple of (ratio, target_hw) where ratio is the scaling factor and\n        target_hw is the resulting (height, width) after scaling.\n    \"\"\"\n\n    # Handle patch size input, extract patch_h, patch_w\n    if isinstance(patch_size, int):\n        patch_h, patch_w = patch_size, patch_size\n    else:\n        # Assume it's a tuple/list: (patch_h, patch_w)\n        if len(patch_size) != 2:\n            raise ValueError(\"patch_size tuple must have exactly two elements (patch_h, patch_w).\")\n        patch_h, patch_w = patch_size\n\n    # Safety checks\n    if patch_h <= 0 or patch_w <= 0:\n        raise ValueError(\"patch_size dimensions must be positive.\")\n\n    def prepare_target_hw(ratio):\n        \"\"\"Scale image_hw by ratio and optionally round dimensions to multiples of patch_h, patch_w.\"\"\"\n        scaled_h = image_hw[0] * ratio\n        scaled_w = image_hw[1] * ratio\n\n        # If we need the result to be divisible by patch_size\n        if divisible_by_patch:\n            scaled_h = patch_h * math.ceil(scaled_h / patch_h)\n            scaled_w = patch_w * math.ceil(scaled_w / patch_w)\n\n        # Ensure at least one patch in each dimension\n        scaled_h = int(max(scaled_h, patch_h))\n        scaled_w = int(max(scaled_w, patch_w))\n\n        return scaled_h, scaled_w\n\n    def is_feasible(ratio):\n        \"\"\"Check if scaling by 'ratio' keeps patch count within max_seq_len.\"\"\"\n        t_h, t_w = prepare_target_hw(ratio)\n\n        # Each dimension is already a multiple of patch_h, patch_w if divisible_by_patch=True.\n        # Use integer division to count patches.\n        num_patches_h = t_h // patch_h\n        num_patches_w = t_w // patch_w\n        seq_len = num_patches_h * num_patches_w\n\n        return seq_len <= max_seq_len\n\n    # Binary search boundaries\n    lb = eps / 10.0\n    rb = 100.0\n\n    # Standard binary search loop\n    while (rb - lb) >= eps:\n        mid = (lb + rb) / 2.0\n        if is_feasible(mid):\n            lb = mid\n        else:\n            rb = mid\n\n    # The final ratio from the binary search\n    ratio = lb\n\n    # If max_ratio is provided, clamp it to prevent upsampling beyond that threshold\n    if max_ratio is not None:\n        ratio = min(ratio, max_ratio)\n\n    # Final checks\n    if ratio <= eps:\n        raise ValueError(\"Binary search failed - image might be too large?\")\n    if ratio >= 100.0:\n        raise ValueError(\"Binary search failed - image might be too small?\")\n\n    # Prepare the final target dimensions with the possibly clamped ratio\n    target_hw = prepare_target_hw(ratio)\n    return ratio, target_hw\n\n\n_RANDOM_INTERPOLATION = (str_to_interp_mode('bilinear'), str_to_interp_mode('bicubic'))\n\n\nclass ResizeToSequence(torch.nn.Module):\n    \"\"\"Resize image to fit within a maximum sequence length constraint when patchified.\n\n    This maintains aspect ratio while ensuring the resulting image, when divided into patches,\n    will not exceed the specified maximum sequence length.\n    \"\"\"\n    def __init__(\n            self,\n            patch_size: int,\n            max_seq_len: int = 1024,\n            divisible_by_patch: bool = True,\n            max_ratio: Optional[float] = None,\n            interpolation: Union[str, InterpolationMode, Tuple[InterpolationMode, ...]] = 'bicubic',\n        ) -> None:\n        \"\"\"Initialize ResizeToSequence transform.\n\n        Args:\n            patch_size: Size of patches.\n            max_seq_len: Maximum sequence length constraint.\n            divisible_by_patch: Whether dimensions must be divisible by patch_size.\n            max_ratio: Optional cap on scaling ratio.\n            interpolation: Interpolation method or methods.\n        \"\"\"\n        super().__init__()\n        self.patch_size = patch_size\n        self.max_seq_len = max_seq_len\n        self.divisible_by_patch = divisible_by_patch\n        self.max_ratio = max_ratio\n        if isinstance(interpolation, str):\n            if interpolation == 'random':\n                self.interpolation = _RANDOM_INTERPOLATION\n            else:\n                self.interpolation = str_to_interp_mode(interpolation)\n        else:\n            self.interpolation = interpolation\n\n\n    def forward(self, img: torch.Tensor) -> torch.Tensor:\n        \"\"\"Resize image to maintain aspect ratio and fit sequence constraint.\n\n        Args:\n            img: Input image tensor.\n\n        Returns:\n            Resized image tensor.\n        \"\"\"\n        _, h, w = transforms.functional.get_dimensions(img)\n\n        _, target_hw = get_image_size_for_seq(\n            (h, w),\n            self.patch_size,\n            self.max_seq_len,\n            divisible_by_patch=self.divisible_by_patch,\n            max_ratio=self.max_ratio,\n        )\n\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolation = random.choice(self.interpolation)\n        else:\n            interpolation = self.interpolation\n\n        resized_img = transforms.functional.resize(img, target_hw, interpolation=interpolation, antialias=True)\n\n        return resized_img\n\n\nclass ResizeKeepRatioToSequence(torch.nn.Module):\n    \"\"\"\n    Resize and Keep Aspect Ratio, adapted to fit sequence length constraints.\n    \"\"\"\n\n    def __init__(\n            self,\n            patch_size=16,\n            max_sequence_len=1024,\n            divisible_by_patch=True,\n            longest=0.,\n            interpolation='bilinear',\n            random_scale_prob=0.,\n            random_scale_range=(0.85, 1.05),\n            random_scale_area=False,\n            random_aspect_prob=0.,\n            random_aspect_range=(0.9, 1.11),\n            max_ratio=None,\n    ):\n        \"\"\"\n        Args:\n            patch_size: Size of patches (int or tuple of (patch_h, patch_w))\n            max_sequence_len: Maximum allowed sequence length for the resulting image\n            divisible_by_patch: If True, ensure dimensions are divisible by patch_size\n            longest: Float between 0-1 where 0=shortest side, 1=longest side determines scale\n            interpolation: Interpolation method for resizing\n            random_scale_prob: Probability of applying random scaling\n            random_scale_range: Range for random scaling factor (min, max)\n            random_scale_area: If True, scale factors affect area (√ factor)\n            random_aspect_prob: Probability of applying random aspect ratio jittering\n            random_aspect_range: Range for random aspect ratio (min, max)\n            max_ratio: Maximum allowed scaling ratio\n        \"\"\"\n        super().__init__()\n        self.patch_size = patch_size\n        self.max_sequence_len = max_sequence_len\n        self.divisible_by_patch = divisible_by_patch\n        self.longest = float(longest)\n\n        if interpolation == 'random':\n            self.interpolation = _RANDOM_INTERPOLATION\n        else:\n            self.interpolation = str_to_interp_mode(interpolation)\n\n        self.random_scale_prob = random_scale_prob\n        self.random_scale_range = random_scale_range\n        self.random_scale_area = random_scale_area\n        self.random_aspect_prob = random_aspect_prob\n        self.random_aspect_range = random_aspect_range\n        self.max_ratio = max_ratio\n\n    @staticmethod\n    def get_params(\n            img,\n            patch_size,\n            max_sequence_len,\n            divisible_by_patch,\n            longest,\n            random_scale_prob=0.,\n            random_scale_range=(1.0, 1.33),\n            random_scale_area=False,\n            random_aspect_prob=0.,\n            random_aspect_range=(0.9, 1.11),\n            max_ratio=None,\n    ):\n        \"\"\"Get parameters for resizing.\"\"\"\n        # Get image dimensions\n        img_h, img_w = F.get_dimensions(img)[1:]\n\n        # Step 1: Get the maximum allowed dimensions from sequence length constraint\n        _, target_hw = get_image_size_for_seq(\n            (img_h, img_w),\n            patch_size,\n            max_sequence_len,\n            divisible_by_patch,\n            max_ratio,\n        )\n        target_h, target_w = target_hw\n\n        # Calculate ratio based on sequence constraint\n        ratio_h = target_h / img_h\n        ratio_w = target_w / img_w\n        # Apply longest blending\n        ratio = max(ratio_h, ratio_w) * longest + min(ratio_h, ratio_w) * (1. - longest)\n\n        # Apply random scaling\n        if random_scale_prob > 0 and random.random() < random_scale_prob:\n            ratio_factor = random.uniform(random_scale_range[0], random_scale_range[1])\n            if random_scale_area:\n                # Make ratio factor equivalent to area change\n                ratio_factor = 1. / math.sqrt(ratio_factor)\n            ratio_factor = (ratio_factor, ratio_factor)\n        else:\n            ratio_factor = (1., 1.)\n\n        # Apply random aspect\n        if random_aspect_prob > 0 and random.random() < random_aspect_prob:\n            log_aspect = (math.log(random_aspect_range[0]), math.log(random_aspect_range[1]))\n            aspect_factor = math.exp(random.uniform(*log_aspect))\n            aspect_factor = math.sqrt(aspect_factor)\n            # Apply aspect ratio jittering\n            ratio_factor = (ratio_factor[0] / aspect_factor, ratio_factor[1] * aspect_factor)\n\n        # Calculate final dimensions\n        size = [round(dim * ratio * f) for dim, f in zip((img_h, img_w), ratio_factor)]\n\n        # Ensure dimensions satisfy sequence constraint and are divisible by patch size\n        if isinstance(patch_size, int):\n            ph, pw = patch_size, patch_size\n        else:\n            ph, pw = patch_size\n\n        # Ensure dimensions are at least one patch\n        size[0] = max(size[0], ph)\n        size[1] = max(size[1], pw)\n\n        # Make divisible by patch size if needed\n        if divisible_by_patch:\n            size[0] = ph * math.ceil(size[0] / ph)\n            size[1] = pw * math.ceil(size[1] / pw)\n\n        # Verify we haven't exceeded sequence length\n        num_patches_h = size[0] // ph\n        num_patches_w = size[1] // pw\n        seq_len = num_patches_h * num_patches_w\n\n        if seq_len > max_sequence_len:\n            # Scale back down to fit sequence constraint\n            scale_back = math.sqrt(max_sequence_len / seq_len)\n            size[0] = int(size[0] * scale_back)\n            size[1] = int(size[1] * scale_back)\n\n            # Ensure divisible by patch size after scaling back\n            if divisible_by_patch:\n                size[0] = ph * math.ceil(size[0] / ph)\n                size[1] = pw * math.ceil(size[1] / pw)\n\n        return size\n\n    def forward(self, img):\n        \"\"\"\n        Resize the image with aspect ratio preservation and sequence length constraints.\n        \"\"\"\n        size = self.get_params(\n            img,\n            self.patch_size,\n            self.max_sequence_len,\n            self.divisible_by_patch,\n            self.longest,\n            self.random_scale_prob,\n            self.random_scale_range,\n            self.random_scale_area,\n            self.random_aspect_prob,\n            self.random_aspect_range,\n            self.max_ratio,\n        )\n\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolation = random.choice(self.interpolation)\n        else:\n            interpolation = self.interpolation\n\n        return F.resize(img, size, interpolation)\n\n    def __repr__(self):\n        interpolate_str = \"random\" if isinstance(self.interpolation, (tuple, list)) else str(self.interpolation)\n        return (f\"{self.__class__.__name__}(patch_size={self.patch_size}, \"\n                f\"max_sequence_len={self.max_sequence_len}, \"\n                f\"longest={self.longest:.3f}, \"\n                f\"random_scale_prob={self.random_scale_prob:.3f}, \"\n                f\"random_aspect_prob={self.random_aspect_prob:.3f})\")\n\n\nclass CenterCropToSequence(torch.nn.Module):\n    \"\"\"Center crop the image such that the resulting patch sequence length meets constraints.\"\"\"\n    def __init__(\n            self,\n            patch_size: int,\n            max_seq_len: int,\n            divisible_by_patch: bool = True,\n            fill: Union[int, Tuple[int, int, int]] = 0,\n            padding_mode: str = 'constant'\n        ):\n        super().__init__()\n        self.patch_size = patch_size\n        self.max_seq_len = max_seq_len\n        self.divisible_by_patch = divisible_by_patch\n        self.fill = fill\n        self.padding_mode = padding_mode\n\n\n    def forward(self, img):\n        \"\"\"Center crop the image to maintain aspect ratio and fit sequence constraint.\"\"\"\n        _, h, w = transforms.functional.get_dimensions(img)\n        _, target_hw = get_image_size_for_seq(\n            (h, w),\n            self.patch_size,\n            self.max_seq_len,\n            self.divisible_by_patch\n        )\n\n        # Use center crop\n        return center_crop_or_pad(img, target_hw, fill=self.fill, padding_mode=self.padding_mode)\n\n\nclass RandomCropToSequence(torch.nn.Module):\n    \"\"\"Randomly crop and/or pad the image to fit sequence length constraints.\n\n    This maintains aspect ratio while ensuring the resulting image, when divided into patches,\n    will not exceed the specified maximum sequence length. Similar to CentralCropToSequence\n    but with randomized positioning.\n    \"\"\"\n\n    def __init__(\n            self,\n            patch_size: int,\n            max_sequence_len: int,\n            divisible_by_patch: bool = True,\n            fill: Union[int, Tuple[int, int, int]] = 0,\n            padding_mode: str = 'constant'\n    ):\n        \"\"\"\n        Args:\n            patch_size: Size of patches (int or tuple of (patch_h, patch_w))\n            max_sequence_len: Maximum allowed sequence length for the resulting image\n            divisible_by_patch: If True, resulting image dimensions will be multiples of patch_size\n            fill: Fill value for padding\n            padding_mode: Padding mode ('constant', 'edge', 'reflect', 'symmetric')\n        \"\"\"\n        super().__init__()\n        self.patch_size = patch_size\n        self.max_sequence_len = max_sequence_len\n        self.divisible_by_patch = divisible_by_patch\n        self.fill = fill\n        self.padding_mode = padding_mode\n\n    @staticmethod\n    def get_params(img, target_size):\n        \"\"\"Get random position for crop/pad.\"\"\"\n        _, image_height, image_width = transforms.functional.get_dimensions(img)\n        delta_height = image_height - target_size[0]\n        delta_width = image_width - target_size[1]\n\n        # Handle both positive (crop) and negative (pad) deltas\n        if delta_height == 0:\n            top = 0\n        else:\n            top = int(math.copysign(random.randint(0, abs(delta_height)), delta_height))\n\n        if delta_width == 0:\n            left = 0\n        else:\n            left = int(math.copysign(random.randint(0, abs(delta_width)), delta_width))\n\n        return top, left\n\n    def forward(self, img):\n        \"\"\"Randomly crop or pad the image to maintain aspect ratio and fit sequence constraint.\"\"\"\n        # Get current dimensions\n        _, img_h, img_w = transforms.functional.get_dimensions(img)\n\n        # Calculate target dimensions that satisfy sequence length\n        # We use max_ratio=1.0 to prevent upscaling - we only want to crop or maintain current size\n        _, target_hw = get_image_size_for_seq(\n            (img_h, img_w),\n            self.patch_size,\n            self.max_sequence_len,\n            self.divisible_by_patch,\n            max_ratio=1.0  # Prevent upscaling\n        )\n\n        # Get random position for crop/pad\n        top, left = self.get_params(img, target_hw)\n\n        # Apply crop or pad\n        return crop_or_pad(\n            img,\n            top=top,\n            left=left,\n            height=target_hw[0],\n            width=target_hw[1],\n            fill=self.fill,\n            padding_mode=self.padding_mode,\n        )\n\n    def __repr__(self) -> str:\n        return (f\"{self.__class__.__name__}(patch_size={self.patch_size}, \"\n                f\"max_sequence_len={self.max_sequence_len}, \"\n                f\"divisible_by_patch={self.divisible_by_patch})\")\n\n\ndef _validate_range(value, name, length=2):\n    # Validate type and length\n    if not isinstance(value, Sequence) or len(value) != length:\n        raise ValueError(f\"{name} should be a sequence of length {length}.\")\n\n    # Validate order\n    if value[0] > value[1]:\n        warnings.warn(f\"{name.capitalize()} range reversed. Swapping.\")\n        return value[1], value[0]\n\n    return value\n\n\nclass RandomResizedCropToSequence(torch.nn.Module):\n    \"\"\"\n    Randomly crop the input image to a subregion with varying area and aspect ratio\n    (relative to the original), then resize that crop to a target size. The target size\n    is determined such that patchifying the resized image (with `patch_size`)\n    does not exceed `max_seq_len` patches, while maintaining the aspect ratio of the crop.\n\n    This combines aspects of torchvision's RandomResizedCrop with sequence length constraints.\n\n    Args:\n        patch_size (int or tuple[int, int]):\n            Patch dimensions (patch_h, patch_w) for sequence length calculation.\n        max_seq_len (int):\n            Maximum number of patches allowed in the final image.\n        scale (tuple[float, float]):\n            Range (min, max) of area fraction of the original image to crop.\n        ratio (tuple[float, float]):\n            Range (min, max) of aspect ratio *multipliers* for the crop, relative\n            to the original image's aspect ratio. E.g., (0.75, 1.333) means the\n            crop's aspect ratio will be sampled between 0.75*orig_ar and 1.333*orig_ar.\n            Uses log-uniform sampling.\n        interpolation (str or InterpolationMode):\n            Interpolation mode for resizing. Can be 'bilinear', 'bicubic', 'nearest',\n            or 'random' (chooses between bilinear and bicubic).\n            Defaults to 'bicubic'.\n        divisible_by_patch (bool):\n            If True, the final image height and width will be multiples of the\n            respective patch dimensions. Defaults to True.\n        max_ratio (float, optional):\n            An optional upper limit on the scaling ratio applied during resizing.\n            Prevents excessive upsampling of the initial crop. `max_ratio=1.0`\n            prevents any upsampling beyond the cropped size. Defaults to None (no limit).\n        final_scale_range (tuple[float, float], optional):\n            If provided, applies an *additional* random scaling factor to the\n            final target size. The factor is sampled uniformly from this range,\n            and multiplied by the size determined by `get_image_size_for_seq`.\n            E.g., (0.8, 1.0) means the final size will be between 80% and 100%\n            of the maximum feasible size. Defaults to None (use maximum feasible size).\n        attempts (int):\n            Number of attempts to sample a valid crop geometry before falling back\n            to a center crop strategy. Defaults to 10.\n    \"\"\"\n\n    def __init__(\n        self,\n        patch_size: Union[int, Tuple[int, int]] = 16,\n        max_seq_len: int = 1024,\n        scale: Tuple[float, float] = (0.08, 1.0),\n        ratio: Tuple[float, float] = (.8, 1.25),\n        interpolation: Union[str, InterpolationMode] = 'bicubic',\n        divisible_by_patch: bool = True,\n        max_ratio: Optional[float] = None,\n        final_scale_range: Optional[Tuple[float, float]] = None,\n        attempts: int = 10,\n    ):\n        super().__init__()\n        if isinstance(patch_size, int):\n            self.patch_h, self.patch_w = patch_size, patch_size\n        else:\n            # Assume it's a tuple/list: (patch_h, patch_w)\n            if len(patch_size) != 2:\n                raise ValueError(\"patch_size tuple must have exactly two elements (patch_h, patch_w).\")\n            self.patch_h, self.patch_w = patch_size\n        self.max_seq_len = max_seq_len\n        self.scale = scale\n        self.ratio = ratio\n        self.divisible_by_patch = divisible_by_patch\n        self.max_ratio = max_ratio\n        self.final_scale_range = final_scale_range\n        self.attempts = attempts\n        if isinstance(interpolation, str):\n            if interpolation == 'random':\n                self.interpolation = _RANDOM_INTERPOLATION\n            else:\n                self.interpolation = str_to_interp_mode(interpolation)\n        else:\n            self.interpolation = interpolation\n\n        # Validate scale and ratio\n        self.scale = _validate_range(self.scale, \"scale\")\n        self.ratio = _validate_range(self.ratio, \"ratio\")\n\n        # Validate final_scale_range if provided\n        if self.final_scale_range is not None:\n            self.final_scale_range = _validate_range(self.final_scale_range, \"final_scale_range\")\n\n            # Additional validation for final_scale_range values\n            if not (0.0 <= self.final_scale_range[0] <= self.final_scale_range[1] <= 1.0):\n                warnings.warn(\"final_scale_range values should ideally be between 0.0 and 1.0.\")\n\n    @staticmethod\n    def get_params(\n            img: torch.Tensor,\n            scale: Tuple[float, float],\n            ratio: Tuple[float, float],\n            crop_attempts: int = 10,\n            patch_h: int = 16,\n            patch_w: int = 16,\n            max_seq_len: int = 1024,\n            divisible_by_patch: bool = True,\n            max_ratio: Optional[float] = None,\n            final_scale_range: Optional[Tuple[float, float]] = None,\n            interpolation: Union[List[InterpolationMode], InterpolationMode] = _RANDOM_INTERPOLATION,\n    ) -> Tuple[Tuple[int, int, int, int], Tuple[int, int], InterpolationMode]:\n        \"\"\" Get parameters for a random sized crop relative to image aspect ratio.\n        \"\"\"\n        _, height, width = F.get_dimensions(img)\n        if height <= 0 or width <= 0:\n             raise ValueError(f\"Input image must have positive dimensions, got H={height}, W={width}\")\n\n        area = height * width\n        orig_aspect = width / height\n        log_ratio = (math.log(ratio[0]), math.log(ratio[1]))\n\n        for _ in range(crop_attempts):\n            target_area = area * random.uniform(scale[0], scale[1])\n            aspect_ratio_factor = math.exp(random.uniform(log_ratio[0], log_ratio[1]))\n            aspect_ratio = orig_aspect * aspect_ratio_factor\n\n            # Calculate target dimensions for the crop\n            # target_area = crop_w * crop_h, aspect_ratio = crop_w / crop_h\n            # => crop_h = sqrt(target_area / aspect_ratio)\n            # => crop_w = sqrt(target_area * aspect_ratio)\n            crop_h = int(round(math.sqrt(target_area / aspect_ratio)))\n            crop_w = int(round(math.sqrt(target_area * aspect_ratio)))\n\n            if 0 < crop_w <= width and 0 < crop_h <= height:\n                top = random.randint(0, height - crop_h)\n                left = random.randint(0, width - crop_w)\n                break\n        else:\n            # Fallback strategy, use center crop trying to respect ratio range\n            min_aspect_ratio = orig_aspect * ratio[0]\n            max_aspect_ratio = orig_aspect * ratio[1]\n\n            if orig_aspect < min_aspect_ratio:\n                # Original is narrower than target min, clamp width\n                crop_w = width\n                crop_h = min(int(round(crop_w / min_aspect_ratio)), height)\n            elif orig_aspect > max_aspect_ratio:\n                # Original is wider than target max, clamp height\n                crop_h = height\n                crop_w = min(int(round(crop_h * max_aspect_ratio)), width)\n            else:\n                # Aspect ratio is within range, take the largest possible crop (full image)\n                crop_w = width\n                crop_h = height\n\n            # Ensure valid dimensions after fallback calculation\n            crop_h = max(1, crop_h)\n            crop_w = max(1, crop_w)\n\n            top = (height - crop_h) // 2\n            left = (width - crop_w) // 2\n\n        # Determine max feasible size for scaling of the *cropped* region\n        feasible_ratio, feasible_size = get_image_size_for_seq(\n            (crop_h, crop_w),\n            patch_size=(patch_h, patch_w), # Pass as tuple\n            max_seq_len=max_seq_len,\n            divisible_by_patch=divisible_by_patch,\n            max_ratio=max_ratio,\n        )\n\n        # Optionally apply final scale randomization\n        final_size = feasible_size\n        if final_scale_range is not None:\n            min_sc, max_sc = final_scale_range\n            scale_factor = random.uniform(min_sc, max_sc)\n            scale_factor = min(max(scale_factor, 0.0), 1.0) # Clamp factor just in case\n\n            # Calculate raw scaled size\n            # Note: feasible_ratio already accounts for max_ratio clamp if any\n            raw_h = crop_h * feasible_ratio * scale_factor\n            raw_w = crop_w * feasible_ratio * scale_factor\n\n            # Re-apply divisibility constraint if needed\n            if divisible_by_patch:\n                # Use ceil to avoid going under minimum patch size\n                target_h = patch_h * math.ceil(raw_h / patch_h)\n                target_w = patch_w * math.ceil(raw_w / patch_w)\n            else:\n                target_h = int(round(raw_h))\n                target_w = int(round(raw_w))\n\n            # Ensure final size is at least one patch dimension\n            target_h = max(target_h, patch_h)\n            target_w = max(target_w, patch_w)\n            final_size = (target_h, target_w)\n\n             # Final check: Ensure this randomized size still fits max_seq_len\n             # (It should, as we scaled down, but rounding might theoretically push it over)\n            num_patches_h = final_size[0] // patch_h\n            num_patches_w = final_size[1] // patch_w\n            if (num_patches_h * num_patches_w) > max_seq_len:\n                 # If it exceeds, revert to the original feasible_size (safest)\n                 final_size = feasible_size\n                 warnings.warn(f\"Final scale randomization ({scale_factor:.2f}) resulted in size {final_size} exceeding max_seq_len={max_seq_len} after rounding. Reverting to feasible size {feasible_size}.\")\n\n        # Select interpolation mode\n        if isinstance(interpolation, (tuple, list)):\n            interpolation = random.choice(interpolation)\n        else:\n            interpolation = interpolation\n\n        return (top, left, crop_h, crop_w), final_size, interpolation\n\n    def forward(self, img: torch.Tensor) -> torch.Tensor:\n        # Sample crop, resize, and interpolation parameters\n        crop_params, final_size, interpolation = self.get_params(\n            img,\n            scale=self.scale,\n            ratio=self.ratio,\n            crop_attempts=self.attempts,\n            patch_h=self.patch_h,\n            patch_w=self.patch_w,\n            divisible_by_patch=self.divisible_by_patch,\n            max_seq_len=self.max_seq_len,\n            final_scale_range=self.final_scale_range,\n            interpolation=self.interpolation,\n        )\n        top, left, crop_h, crop_w = crop_params\n\n        output = F.resized_crop(\n            img,\n            top=top,\n            left=left,\n            height=crop_h,\n            width=crop_w,\n            size=final_size,\n            interpolation=interpolation,\n            antialias=True,\n        )\n\n        return output\n\n    def __repr__(self) -> str:\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolate_str = ', '.join(str(m).split('.')[-1] for m in self.interpolation)\n        else:\n            interpolate_str = str(self.interpolation)\n        format_string = self.__class__.__name__ + '('\n        format_string += f\"patch_size=({self.patch_h}, {self.patch_w})\"\n        format_string += f\", max_seq_len={self.max_seq_len}\"\n        format_string += f\", scale={self.scale}\"\n        format_string += f\", ratio={self.ratio}\"\n        format_string += f\", interpolation=[{interpolate_str}]\"\n        format_string += f\", divisible_by_patch={self.divisible_by_patch}\"\n        format_string += f\", max_ratio={self.max_ratio}\"\n        format_string += f\", final_scale_range={self.final_scale_range}\"\n        format_string += f\", attempts={self.attempts}\"\n        format_string += ')'\n        return format_string\n\n\ndef patchify_image(\n        img: torch.Tensor,\n        patch_size: Tuple[int, int],\n        pad: bool = True,\n        include_info: bool = True,\n        flatten_patches: bool = True,\n) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor, torch.Tensor]]:\n    c, h, w = img.shape\n    ph, pw = patch_size\n\n    # Ensure the image is divisible by patch size\n    if pad and (h % ph != 0 or w % pw != 0):\n        pad_h = (ph - h % ph) % ph  # amount to add on bottom\n        pad_w = (pw - w % pw) % pw  # amount to add on right\n        img = torch.nn.functional.pad(img, (0, pad_w, 0, pad_h))\n        c, h, w = img.shape\n\n    # Calculate number of patches in each dimension\n    nh, nw = h // ph, w // pw\n    # Reshape image to patches\n    patches = img.view(c, nh, ph, nw, pw).permute(1, 3, 2, 4, 0)\n    # [nh, nw, ph, pw, c] -> [nh * nw, ph * pw * c] or [nh * nw, ph, pw, c]\n    patches = patches.reshape(-1, ph * pw * c) if flatten_patches else patches.reshape(-1, ph, pw, c)\n\n    if include_info:\n        # Create coordinate indices\n        y_idx, x_idx = torch.meshgrid(torch.arange(nh), torch.arange(nw), indexing='ij')\n        # Stack into a single coords tensor [N, 2] with (y, x) order\n        coord = torch.stack([y_idx.reshape(-1), x_idx.reshape(-1)], dim=1)\n        # Create type indicators (all 1s for regular patches)\n        valid = torch.ones(nh * nw, dtype=torch.bool)\n        return patches, coord, valid\n\n    return patches\n\n\nclass Patchify(torch.nn.Module):\n    \"\"\"Transform an image into patches with corresponding coordinates and type indicators.\"\"\"\n\n    def __init__(\n            self,\n            patch_size: Union[int, Tuple[int, int]],\n            flatten_patches: bool = True\n    ):\n        super().__init__()\n        self.patch_size = patch_size if isinstance(patch_size, tuple) else (patch_size, patch_size)\n        self.flatten_patches = flatten_patches\n\n    def forward(self, img):\n        \"\"\"\n        Args:\n            img: A PIL Image or tensor of shape [C, H, W]\n\n        Returns:\n            A dictionary containing:\n                - patches: Tensor of shape [N, P*P*C] if flatten_patches=True,\n                          or [N, Ph, Pw, C] if flatten_patches=False\n                - patch_coord: Tensor of shape [N, 2] with (y, x) coordinates\n                - patch_valid: Valid indicator (all 1s for non-padding patches)\n        \"\"\"\n        if isinstance(img, Image.Image):\n            # Convert PIL Image to tensor [C, H, W]\n            img = transforms.functional.to_tensor(img)\n\n        patches, coord, valid = patchify_image(img, self.patch_size, flatten_patches=self.flatten_patches)\n\n        return {\n            'patches': patches,\n            'patch_coord': coord,\n            'patch_valid': valid,\n        }\n"
  },
  {
    "path": "timm/data/random_erasing.py",
    "content": "\"\"\" Random Erasing (Cutout)\n\nOriginally inspired by impl at https://github.com/zhunzhong07/Random-Erasing, Apache 2.0\nCopyright Zhun Zhong & Liang Zheng\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport random\nimport math\n\nimport torch\n\n\ndef _get_pixels(per_pixel, rand_color, patch_size, dtype=torch.float32, device='cuda'):\n    # NOTE I've seen CUDA illegal memory access errors being caused by the normal_()\n    # paths, flip the order so normal is run on CPU if this becomes a problem\n    # Issue has been fixed in master https://github.com/pytorch/pytorch/issues/19508\n    if per_pixel:\n        return torch.empty(patch_size, dtype=dtype, device=device).normal_()\n    elif rand_color:\n        return torch.empty((patch_size[0], 1, 1), dtype=dtype, device=device).normal_()\n    else:\n        return torch.zeros((patch_size[0], 1, 1), dtype=dtype, device=device)\n\n\nclass RandomErasing:\n    \"\"\" Randomly selects a rectangle region in an image and erases its pixels.\n        'Random Erasing Data Augmentation' by Zhong et al.\n        See https://arxiv.org/pdf/1708.04896.pdf\n\n        This variant of RandomErasing is intended to be applied to either a batch\n        or single image tensor after it has been normalized by dataset mean and std.\n    Args:\n         probability: Probability that the Random Erasing operation will be performed.\n         min_area: Minimum percentage of erased area wrt input image area.\n         max_area: Maximum percentage of erased area wrt input image area.\n         min_aspect: Minimum aspect ratio of erased area.\n         mode: pixel color mode, one of 'const', 'rand', or 'pixel'\n            'const' - erase block is constant color of 0 for all channels\n            'rand'  - erase block is same per-channel random (normal) color\n            'pixel' - erase block is per-pixel random (normal) color\n        max_count: maximum number of erasing blocks per image, area per box is scaled by count.\n            per-image count is randomly chosen between 1 and this value.\n    \"\"\"\n\n    def __init__(\n            self,\n            probability=0.5,\n            min_area=0.02,\n            max_area=1/3,\n            min_aspect=0.3,\n            max_aspect=None,\n            mode='const',\n            min_count=1,\n            max_count=None,\n            num_splits=0,\n            device='cuda',\n    ):\n        self.probability = probability\n        self.min_area = min_area\n        self.max_area = max_area\n        max_aspect = max_aspect or 1 / min_aspect\n        self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect))\n        self.min_count = min_count\n        self.max_count = max_count or min_count\n        self.num_splits = num_splits\n        self.mode = mode.lower()\n        self.rand_color = False\n        self.per_pixel = False\n        if self.mode == 'rand':\n            self.rand_color = True  # per block random normal\n        elif self.mode == 'pixel':\n            self.per_pixel = True  # per pixel random normal\n        else:\n            assert not self.mode or self.mode == 'const'\n        self.device = device\n\n    def _erase(self, img, chan, img_h, img_w, dtype):\n        if random.random() > self.probability:\n            return\n        area = img_h * img_w\n        count = self.min_count if self.min_count == self.max_count else \\\n            random.randint(self.min_count, self.max_count)\n        for _ in range(count):\n            for attempt in range(10):\n                target_area = random.uniform(self.min_area, self.max_area) * area / count\n                aspect_ratio = math.exp(random.uniform(*self.log_aspect_ratio))\n                h = int(round(math.sqrt(target_area * aspect_ratio)))\n                w = int(round(math.sqrt(target_area / aspect_ratio)))\n                if w < img_w and h < img_h:\n                    top = random.randint(0, img_h - h)\n                    left = random.randint(0, img_w - w)\n                    img[:, top:top + h, left:left + w] = _get_pixels(\n                        self.per_pixel,\n                        self.rand_color,\n                        (chan, h, w),\n                        dtype=dtype,\n                        device=self.device,\n                    )\n                    break\n\n    def __call__(self, input):\n        if len(input.size()) == 3:\n            self._erase(input, *input.size(), input.dtype)\n        else:\n            batch_size, chan, img_h, img_w = input.size()\n            # skip first slice of batch if num_splits is set (for clean portion of samples)\n            batch_start = batch_size // self.num_splits if self.num_splits > 1 else 0\n            for i in range(batch_start, batch_size):\n                self._erase(input[i], chan, img_h, img_w, input.dtype)\n        return input\n\n    def __repr__(self):\n        # NOTE simplified state for repr\n        fs = self.__class__.__name__ + f'(p={self.probability}, mode={self.mode}'\n        fs += f', count=({self.min_count}, {self.max_count}))'\n        return fs\n"
  },
  {
    "path": "timm/data/readers/__init__.py",
    "content": "from .reader_factory import create_reader\nfrom .img_extensions import *\n"
  },
  {
    "path": "timm/data/readers/class_map.py",
    "content": "import os\nimport pickle\n\n\ndef load_class_map(map_or_filename, root=''):\n    if isinstance(map_or_filename, dict):\n        assert dict, 'class_map dict must be non-empty'\n        return map_or_filename\n    class_map_path = map_or_filename\n    if not os.path.exists(class_map_path):\n        class_map_path = os.path.join(root, class_map_path)\n        assert os.path.exists(class_map_path), 'Cannot locate specified class map file (%s)' % map_or_filename\n    class_map_ext = os.path.splitext(map_or_filename)[-1].lower()\n    if class_map_ext == '.txt':\n        with open(class_map_path) as f:\n            class_to_idx = {v.strip(): k for k, v in enumerate(f)}\n    elif class_map_ext == '.pkl':\n        with open(class_map_path, 'rb') as f:\n            class_to_idx = pickle.load(f)\n    else:\n        assert False, f'Unsupported class map file extension ({class_map_ext}).'\n    return class_to_idx\n\n"
  },
  {
    "path": "timm/data/readers/img_extensions.py",
    "content": "from copy import deepcopy\n\n__all__ = ['get_img_extensions', 'is_img_extension', 'set_img_extensions', 'add_img_extensions', 'del_img_extensions']\n\n\nIMG_EXTENSIONS = ('.png', '.jpg', '.jpeg')  # singleton, kept public for bwd compat use\n_IMG_EXTENSIONS_SET = set(IMG_EXTENSIONS)  # set version, private, kept in sync\n\n\ndef _set_extensions(extensions):\n    global IMG_EXTENSIONS\n    global _IMG_EXTENSIONS_SET\n    dedupe = set()  # NOTE de-duping tuple while keeping original order\n    IMG_EXTENSIONS = tuple(x for x in extensions if x not in dedupe and not dedupe.add(x))\n    _IMG_EXTENSIONS_SET = set(extensions)\n\n\ndef _valid_extension(x: str):\n    return x and isinstance(x, str) and len(x) >= 2 and x.startswith('.')\n\n\ndef is_img_extension(ext):\n    return ext in _IMG_EXTENSIONS_SET\n\n\ndef get_img_extensions(as_set=False):\n    return deepcopy(_IMG_EXTENSIONS_SET if as_set else IMG_EXTENSIONS)\n\n\ndef set_img_extensions(extensions):\n    assert len(extensions)\n    for x in extensions:\n        assert _valid_extension(x)\n    _set_extensions(extensions)\n\n\ndef add_img_extensions(ext):\n    if not isinstance(ext, (list, tuple, set)):\n        ext = (ext,)\n    for x in ext:\n        assert _valid_extension(x)\n    extensions = IMG_EXTENSIONS + tuple(ext)\n    _set_extensions(extensions)\n\n\ndef del_img_extensions(ext):\n    if not isinstance(ext, (list, tuple, set)):\n        ext = (ext,)\n    extensions = tuple(x for x in IMG_EXTENSIONS if x not in ext)\n    _set_extensions(extensions)\n"
  },
  {
    "path": "timm/data/readers/reader.py",
    "content": "from abc import abstractmethod\n\n\nclass Reader:\n    def __init__(self):\n        pass\n\n    @abstractmethod\n    def _filename(self, index, basename=False, absolute=False):\n        pass\n\n    def filename(self, index, basename=False, absolute=False):\n        return self._filename(index, basename=basename, absolute=absolute)\n\n    def filenames(self, basename=False, absolute=False):\n        return [self._filename(index, basename=basename, absolute=absolute) for index in range(len(self))]\n\n"
  },
  {
    "path": "timm/data/readers/reader_factory.py",
    "content": "import os\nfrom typing import Optional\n\nfrom .reader_image_folder import ReaderImageFolder\nfrom .reader_image_in_tar import ReaderImageInTar\n\n\ndef create_reader(\n        name: str,\n        root: Optional[str] = None,\n        split: str = 'train',\n        **kwargs,\n):\n    kwargs = {k: v for k, v in kwargs.items() if v is not None}\n    name = name.lower()\n    name = name.split('/', 1)\n    prefix = ''\n    if len(name) > 1:\n        prefix = name[0]\n    name = name[-1]\n\n    # FIXME the additional features are only supported by ReaderHfds for now.\n    additional_features = kwargs.pop(\"additional_features\", None)\n\n    # FIXME improve the selection right now just tfds prefix or fallback path, will need options to\n    # explicitly select other options shortly\n    if prefix == 'hfds':\n        from .reader_hfds import ReaderHfds  # defer Hf datasets import\n        reader = ReaderHfds(name=name, root=root, split=split, additional_features=additional_features, **kwargs)\n    elif prefix == 'hfids':\n        from .reader_hfids import ReaderHfids  # defer HF datasets import\n        reader = ReaderHfids(name=name, root=root, split=split, **kwargs)\n    elif prefix == 'tfds':\n        from .reader_tfds import ReaderTfds  # defer tensorflow import\n        reader = ReaderTfds(name=name, root=root, split=split, **kwargs)\n    elif prefix == 'wds':\n        from .reader_wds import ReaderWds\n        kwargs.pop('download', False)\n        reader = ReaderWds(root=root, name=name, split=split, **kwargs)\n    else:\n        assert os.path.exists(root)\n        # default fallback path (backwards compat), use image tar if root is a .tar file, otherwise image folder\n        # FIXME support split here or in reader?\n        if os.path.isfile(root) and os.path.splitext(root)[1] == '.tar':\n            reader = ReaderImageInTar(root, **kwargs)\n        else:\n            reader = ReaderImageFolder(root, **kwargs)\n    return reader\n"
  },
  {
    "path": "timm/data/readers/reader_hfds.py",
    "content": "\"\"\" Dataset reader that wraps Hugging Face datasets\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport io\nimport math\nfrom typing import Optional\n\nimport torch\nimport torch.distributed as dist\nfrom PIL import Image\n\ntry:\n    import datasets\nexcept ImportError as e:\n    print(\"Please install Hugging Face datasets package `pip install datasets`.\")\n    raise e\nfrom .class_map import load_class_map\nfrom .reader import Reader\n\n\ndef get_class_labels(info, label_key='label'):\n    if 'label' not in info.features:\n        return {}\n    class_label = info.features[label_key]\n    class_to_idx = {n: class_label.str2int(n) for n in class_label.names}\n    return class_to_idx\n\n\nclass ReaderHfds(Reader):\n\n    def __init__(\n            self,\n            name: str,\n            root: Optional[str] = None,\n            split: str = 'train',\n            class_map: dict = None,\n            input_key: str = 'image',\n            target_key: str = 'label',\n            additional_features: Optional[list[str]] = None,\n            download: bool = False,\n            trust_remote_code: bool = False\n    ):\n        \"\"\"\n        \"\"\"\n        super().__init__()\n        self.root = root\n        self.split = split\n        self.dataset = datasets.load_dataset(\n            name,  # 'name' maps to path arg in hf datasets\n            split=split,\n            cache_dir=self.root,  # timm doesn't expect hidden cache dir for datasets, specify a path if root set\n            trust_remote_code=trust_remote_code\n        )\n        # leave decode for caller, plus we want easy access to original path names...\n        self.dataset = self.dataset.cast_column(input_key, datasets.Image(decode=False))\n\n        self.image_key = input_key\n        self.label_key = target_key\n        self.remap_class = False\n        if class_map:\n            self.class_to_idx = load_class_map(class_map)\n            self.remap_class = True\n        else:\n            self.class_to_idx = get_class_labels(self.dataset.info, self.label_key)\n        self.split_info = self.dataset.info.splits[split]\n        self.num_samples = self.split_info.num_examples\n\n        if additional_features is not None:\n            if isinstance(additional_features, list):\n                self.additional_features = additional_features\n            else:\n                self.additional_features = [additional_features]\n        else:\n            self.additional_features = None\n\n    def __getitem__(self, index):\n        item = self.dataset[index]\n        image = item[self.image_key]\n\n        if 'bytes' in image and image['bytes']:\n            image = io.BytesIO(image['bytes'])\n        else:\n            assert 'path' in image and image['path']\n            image = open(image['path'], 'rb')\n\n        label = item[self.label_key]\n        if self.remap_class:\n            label = self.class_to_idx[label]\n\n        if self.additional_features is not None:\n            features = [item[feat] for feat in self.additional_features]\n            return image, label, *features\n        else:\n            return image, label\n\n    def __len__(self):\n        return len(self.dataset)\n\n    def _filename(self, index, basename=False, absolute=False):\n        item = self.dataset[index]\n        return item[self.image_key]['path']\n"
  },
  {
    "path": "timm/data/readers/reader_hfids.py",
    "content": "\"\"\" Dataset reader for HF IterableDataset\n\"\"\"\nimport math\nimport os\nfrom itertools import repeat, chain\nfrom typing import Optional\n\nimport torch\nimport torch.distributed as dist\nfrom PIL import Image\n\ntry:\n    import datasets\n    from datasets.distributed import split_dataset_by_node\n    from datasets.splits import SplitInfo\nexcept ImportError as e:\n    print(\"Please install Hugging Face datasets package `pip install datasets`.\")\n    raise e\n\n\nfrom .class_map import load_class_map\nfrom .reader import Reader\nfrom .shared_count import SharedCount\n\n\nSHUFFLE_SIZE = int(os.environ.get('HFIDS_SHUFFLE_SIZE', 4096))\n\n\nclass ReaderHfids(Reader):\n    def __init__(\n            self,\n            name: str,\n            root: Optional[str] = None,\n            split: str = 'train',\n            is_training: bool = False,\n            batch_size: int = 1,\n            download: bool = False,\n            repeats: int = 0,\n            seed: int = 42,\n            class_map: Optional[dict] = None,\n            input_key: str = 'image',\n            input_img_mode: str = 'RGB',\n            target_key: str = 'label',\n            target_img_mode: str = '',\n            shuffle_size: Optional[int] = None,\n            num_samples: Optional[int] = None,\n            trust_remote_code: bool = False\n    ):\n        super().__init__()\n        self.root = root\n        self.split = split\n        self.is_training = is_training\n        self.batch_size = batch_size\n        self.download = download\n        self.repeats = repeats\n        self.common_seed = seed  # a seed that's fixed across all worker / distributed instances\n        self.shuffle_size = shuffle_size or SHUFFLE_SIZE\n\n        self.input_key = input_key\n        self.input_img_mode = input_img_mode\n        self.target_key = target_key\n        self.target_img_mode = target_img_mode\n\n        self.builder = datasets.load_dataset_builder(\n            name,\n            cache_dir=root,\n            trust_remote_code=trust_remote_code,\n        )\n        if download:\n            self.builder.download_and_prepare()\n\n        split_info: Optional[SplitInfo] = None\n        if self.builder.info.splits and split in self.builder.info.splits:\n            if isinstance(self.builder.info.splits[split], SplitInfo):\n                split_info: Optional[SplitInfo] = self.builder.info.splits[split]\n\n        if num_samples:\n            self.num_samples = num_samples\n        elif split_info and split_info.num_examples:\n            self.num_samples = split_info.num_examples\n        else:\n            raise ValueError(\n                \"Dataset length is unknown, please pass `num_samples` explicitly. \"\n                \"The number of steps needs to be known in advance for the learning rate scheduler.\"\n            )\n\n        self.remap_class = False\n        if class_map:\n            self.class_to_idx = load_class_map(class_map)\n            self.remap_class = True\n        else:\n            self.class_to_idx = {}\n\n        # Distributed world state\n        self.dist_rank = 0\n        self.dist_num_replicas = 1\n        if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1:\n            self.dist_rank = dist.get_rank()\n            self.dist_num_replicas = dist.get_world_size()\n\n        # Attributes that are updated in _lazy_init\n        self.worker_info = None\n        self.worker_id = 0\n        self.num_workers = 1\n        self.global_worker_id = 0\n        self.global_num_workers = 1\n\n        # Initialized lazily on each dataloader worker process\n        self.ds: Optional[datasets.IterableDataset] = None\n        self.epoch = SharedCount()\n\n    def set_epoch(self, count):\n        # to update the shuffling effective_seed = seed + epoch\n        self.epoch.value = count\n\n    def set_loader_cfg(\n            self,\n            num_workers: Optional[int] = None,\n    ):\n        if self.ds is not None:\n            return\n        if num_workers is not None:\n            self.num_workers = num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n\n    def _lazy_init(self):\n        \"\"\" Lazily initialize worker (in worker processes)\n        \"\"\"\n        if self.worker_info is None:\n            worker_info = torch.utils.data.get_worker_info()\n            if worker_info is not None:\n                self.worker_info = worker_info\n                self.worker_id = worker_info.id\n                self.num_workers = worker_info.num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n            self.global_worker_id = self.dist_rank * self.num_workers + self.worker_id\n\n        if self.download:\n            dataset = self.builder.as_dataset(split=self.split)\n            # to distribute evenly to workers\n            ds = dataset.to_iterable_dataset(num_shards=self.global_num_workers)\n        else:\n            # in this case the number of shard is determined by the number of remote files\n            ds = self.builder.as_streaming_dataset(split=self.split)\n\n        if self.is_training:\n            # will shuffle the list of shards and use a shuffle buffer\n            ds = ds.shuffle(seed=self.common_seed, buffer_size=self.shuffle_size)\n\n        # Distributed:\n        # The dataset has a number of shards that is a factor of `dist_num_replicas` (i.e. if `ds.n_shards % dist_num_replicas == 0`),\n        # so the shards are evenly assigned across the nodes.\n        # If it's not the case for dataset streaming, each node keeps 1 example out of `dist_num_replicas`, skipping the other examples.\n\n        # Workers:\n        # In a node, datasets.IterableDataset assigns the shards assigned to the node as evenly as possible to workers.\n        self.ds = split_dataset_by_node(ds, rank=self.dist_rank, world_size=self.dist_num_replicas)\n\n    def _num_samples_per_worker(self):\n        num_worker_samples = \\\n            max(1, self.repeats) * self.num_samples / max(self.global_num_workers, self.dist_num_replicas)\n        if self.is_training or self.dist_num_replicas > 1:\n            num_worker_samples = math.ceil(num_worker_samples)\n        if self.is_training and self.batch_size is not None:\n            num_worker_samples = math.ceil(num_worker_samples / self.batch_size) * self.batch_size\n        return int(num_worker_samples)\n\n    def __iter__(self):\n        if self.ds is None:\n            self._lazy_init()\n        self.ds.set_epoch(self.epoch.value)\n\n        target_sample_count = self._num_samples_per_worker()\n        sample_count = 0\n\n        if self.is_training:\n            ds_iter = chain.from_iterable(repeat(self.ds))\n        else:\n            ds_iter = iter(self.ds)\n        for sample in ds_iter:\n            input_data: Image.Image = sample[self.input_key]\n            if self.input_img_mode and input_data.mode != self.input_img_mode:\n                input_data = input_data.convert(self.input_img_mode)\n            target_data = sample[self.target_key]\n            if self.target_img_mode:\n                assert isinstance(target_data, Image.Image), \"target_img_mode is specified but target is not an image\"\n                if target_data.mode != self.target_img_mode:\n                    target_data = target_data.convert(self.target_img_mode)\n            elif self.remap_class:\n                target_data = self.class_to_idx[target_data]\n            yield input_data, target_data\n            sample_count += 1\n            if self.is_training and sample_count >= target_sample_count:\n                break\n\n    def __len__(self):\n        num_samples = self._num_samples_per_worker() * self.num_workers\n        return num_samples\n\n    def _filename(self, index, basename=False, absolute=False):\n        assert False, \"Not supported\"  # no random access to examples\n\n    def filenames(self, basename=False, absolute=False):\n        \"\"\" Return all filenames in dataset, overrides base\"\"\"\n        if self.ds is None:\n            self._lazy_init()\n        names = []\n        for sample in self.ds:\n            if 'file_name' in sample:\n                name = sample['file_name']\n            elif 'filename' in sample:\n                name = sample['filename']\n            elif 'id' in sample:\n                name = sample['id']\n            elif 'image_id' in sample:\n                name = sample['image_id']\n            else:\n                assert False, \"No supported name field present\"\n            names.append(name)\n        return names"
  },
  {
    "path": "timm/data/readers/reader_image_folder.py",
    "content": "\"\"\" A dataset reader that extracts images from folders\n\nFolders are scanned recursively to find image files. Labels are based\non the folder hierarchy, just leaf folders by default.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport os\nfrom typing import Dict, List, Optional, Set, Tuple, Union\n\nfrom timm.utils.misc import natural_key\n\nfrom .class_map import load_class_map\nfrom .img_extensions import get_img_extensions\nfrom .reader import Reader\n\n\ndef find_images_and_targets(\n        folder: str,\n        types: Optional[Union[List, Tuple, Set]] = None,\n        class_to_idx: Optional[Dict] = None,\n        leaf_name_only: bool = True,\n        sort: bool = True\n):\n    \"\"\" Walk folder recursively to discover images and map them to classes by folder names.\n\n    Args:\n        folder: root of folder to recursively search\n        types: types (file extensions) to search for in path\n        class_to_idx: specify mapping for class (folder name) to class index if set\n        leaf_name_only: use only leaf-name of folder walk for class names\n        sort: re-sort found images by name (for consistent ordering)\n\n    Returns:\n        A list of image and target tuples, class_to_idx mapping\n    \"\"\"\n    types = get_img_extensions(as_set=True) if not types else set(types)\n    labels = []\n    filenames = []\n    for root, subdirs, files in os.walk(folder, topdown=False, followlinks=True):\n        rel_path = os.path.relpath(root, folder) if (root != folder) else ''\n        label = os.path.basename(rel_path) if leaf_name_only else rel_path.replace(os.path.sep, '_')\n        for f in files:\n            base, ext = os.path.splitext(f)\n            if ext.lower() in types:\n                filenames.append(os.path.join(root, f))\n                labels.append(label)\n    if class_to_idx is None:\n        # building class index\n        unique_labels = set(labels)\n        sorted_labels = list(sorted(unique_labels, key=natural_key))\n        class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)}\n    images_and_targets = [(f, class_to_idx[l]) for f, l in zip(filenames, labels) if l in class_to_idx]\n    if sort:\n        images_and_targets = sorted(images_and_targets, key=lambda k: natural_key(k[0]))\n    return images_and_targets, class_to_idx\n\n\nclass ReaderImageFolder(Reader):\n\n    def __init__(\n            self,\n            root,\n            class_map='',\n            input_key=None,\n    ):\n        super().__init__()\n\n        self.root = root\n        class_to_idx = None\n        if class_map:\n            class_to_idx = load_class_map(class_map, root)\n        find_types = None\n        if input_key:\n            find_types = input_key.split(';')\n        self.samples, self.class_to_idx = find_images_and_targets(\n            root,\n            class_to_idx=class_to_idx,\n            types=find_types,\n        )\n        if len(self.samples) == 0:\n            raise RuntimeError(\n                f'Found 0 images in subfolders of {root}. '\n                f'Supported image extensions are {\", \".join(get_img_extensions())}')\n\n    def __getitem__(self, index):\n        path, target = self.samples[index]\n        return open(path, 'rb'), target\n\n    def __len__(self):\n        return len(self.samples)\n\n    def _filename(self, index, basename=False, absolute=False):\n        filename = self.samples[index][0]\n        if basename:\n            filename = os.path.basename(filename)\n        elif not absolute:\n            filename = os.path.relpath(filename, self.root)\n        return filename\n"
  },
  {
    "path": "timm/data/readers/reader_image_in_tar.py",
    "content": "\"\"\" A dataset reader that reads tarfile based datasets\n\nThis reader can extract image samples from:\n* a single tar of image files\n* a folder of multiple tarfiles containing imagefiles\n* a tar of tars containing image files\n\nLabels are based on the combined folder and/or tar name structure.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nimport os\nimport pickle\nimport tarfile\nfrom glob import glob\nfrom typing import List, Tuple, Dict, Set, Optional, Union\n\nimport numpy as np\n\nfrom timm.utils.misc import natural_key\n\nfrom .class_map import load_class_map\nfrom .img_extensions import get_img_extensions\nfrom .reader import Reader\n\n_logger = logging.getLogger(__name__)\nCACHE_FILENAME_SUFFIX = '_tarinfos.pickle'\n\n\nclass TarState:\n\n    def __init__(self, tf: tarfile.TarFile = None, ti: tarfile.TarInfo = None):\n        self.tf: tarfile.TarFile = tf\n        self.ti: tarfile.TarInfo = ti\n        self.children: Dict[str, TarState] = {}  # child states (tars within tars)\n\n    def reset(self):\n        self.tf = None\n\n\ndef _extract_tarinfo(tf: tarfile.TarFile, parent_info: Dict, extensions: Set[str]):\n    sample_count = 0\n    for i, ti in enumerate(tf):\n        if not ti.isfile():\n            continue\n        dirname, basename = os.path.split(ti.path)\n        name, ext = os.path.splitext(basename)\n        ext = ext.lower()\n        if ext == '.tar':\n            with tarfile.open(fileobj=tf.extractfile(ti), mode='r|') as ctf:\n                child_info = dict(\n                    name=ti.name, path=os.path.join(parent_info['path'], name), ti=ti, children=[], samples=[])\n                sample_count += _extract_tarinfo(ctf, child_info, extensions=extensions)\n                _logger.debug(f'{i}/?. Extracted child tarinfos from {ti.name}. {len(child_info[\"samples\"])} images.')\n                parent_info['children'].append(child_info)\n        elif ext in extensions:\n            parent_info['samples'].append(ti)\n            sample_count += 1\n    return sample_count\n\n\ndef extract_tarinfos(\n        root,\n        class_name_to_idx: Optional[Dict] = None,\n        cache_tarinfo: Optional[bool] = None,\n        extensions: Optional[Union[List, Tuple, Set]] = None,\n        sort: bool = True\n):\n    extensions = get_img_extensions(as_set=True) if not extensions else set(extensions)\n    root_is_tar = False\n    if os.path.isfile(root):\n        assert os.path.splitext(root)[-1].lower() == '.tar'\n        tar_filenames = [root]\n        root, root_name = os.path.split(root)\n        root_name = os.path.splitext(root_name)[0]\n        root_is_tar = True\n    else:\n        root_name = root.strip(os.path.sep).split(os.path.sep)[-1]\n        tar_filenames = glob(os.path.join(root, '*.tar'), recursive=True)\n    num_tars = len(tar_filenames)\n    tar_bytes = sum([os.path.getsize(f) for f in tar_filenames])\n    assert num_tars, f'No .tar files found at specified path ({root}).'\n\n    _logger.info(f'Scanning {tar_bytes/1024**2:.2f}MB of tar files...')\n    info = dict(tartrees=[])\n    cache_path = ''\n    if cache_tarinfo is None:\n        cache_tarinfo = True if tar_bytes > 10*1024**3 else False  # FIXME magic number, 10GB\n    if cache_tarinfo:\n        cache_filename = '_' + root_name + CACHE_FILENAME_SUFFIX\n        cache_path = os.path.join(root, cache_filename)\n    if os.path.exists(cache_path):\n        _logger.info(f'Reading tar info from cache file {cache_path}.')\n        with open(cache_path, 'rb') as pf:\n            info = pickle.load(pf)\n        assert len(info['tartrees']) == num_tars, \"Cached tartree len doesn't match number of tarfiles\"\n    else:\n        for i, fn in enumerate(tar_filenames):\n            path = '' if root_is_tar else os.path.splitext(os.path.basename(fn))[0]\n            with tarfile.open(fn, mode='r|') as tf:  # tarinfo scans done in streaming mode\n                parent_info = dict(name=os.path.relpath(fn, root), path=path, ti=None, children=[], samples=[])\n                num_samples = _extract_tarinfo(tf, parent_info, extensions=extensions)\n                num_children = len(parent_info[\"children\"])\n                _logger.debug(\n                    f'{i}/{num_tars}. Extracted tarinfos from {fn}. {num_children} children, {num_samples} samples.')\n            info['tartrees'].append(parent_info)\n        if cache_path:\n            _logger.info(f'Writing tar info to cache file {cache_path}.')\n            with open(cache_path, 'wb') as pf:\n                pickle.dump(info, pf)\n\n    samples = []\n    labels = []\n    build_class_map = False\n    if class_name_to_idx is None:\n        build_class_map = True\n\n    # Flatten tartree info into lists of samples and targets w/ targets based on label id via\n    # class map arg or from unique paths.\n    # NOTE: currently only flattening up to two-levels, filesystem .tars and then one level of sub-tar children\n    # this covers my current use cases and keeps things a little easier to test for now.\n    tarfiles = []\n\n    def _label_from_paths(*path, leaf_only=True):\n        path = os.path.join(*path).strip(os.path.sep)\n        return path.split(os.path.sep)[-1] if leaf_only else path.replace(os.path.sep, '_')\n\n    def _add_samples(info, fn):\n        added = 0\n        for s in info['samples']:\n            label = _label_from_paths(info['path'], os.path.dirname(s.path))\n            if not build_class_map and label not in class_name_to_idx:\n                continue\n            samples.append((s, fn, info['ti']))\n            labels.append(label)\n            added += 1\n        return added\n\n    _logger.info(f'Collecting samples and building tar states.')\n    for parent_info in info['tartrees']:\n        # if tartree has children, we assume all samples are at the child level\n        tar_name = None if root_is_tar else parent_info['name']\n        tar_state = TarState()\n        parent_added = 0\n        for child_info in parent_info['children']:\n            child_added = _add_samples(child_info, fn=tar_name)\n            if child_added:\n                tar_state.children[child_info['name']] = TarState(ti=child_info['ti'])\n            parent_added += child_added\n        parent_added += _add_samples(parent_info, fn=tar_name)\n        if parent_added:\n            tarfiles.append((tar_name, tar_state))\n    del info\n\n    if build_class_map:\n        # build class index\n        sorted_labels = list(sorted(set(labels), key=natural_key))\n        class_name_to_idx = {c: idx for idx, c in enumerate(sorted_labels)}\n\n    _logger.info(f'Mapping targets and sorting samples.')\n    samples_and_targets = [(s, class_name_to_idx[l]) for s, l in zip(samples, labels) if l in class_name_to_idx]\n    if sort:\n        samples_and_targets = sorted(samples_and_targets, key=lambda k: natural_key(k[0][0].path))\n    samples, targets = zip(*samples_and_targets)\n    samples = np.array(samples)\n    targets = np.array(targets)\n    _logger.info(f'Finished processing {len(samples)} samples across {len(tarfiles)} tar files.')\n    return samples, targets, class_name_to_idx, tarfiles\n\n\nclass ReaderImageInTar(Reader):\n    \"\"\" Multi-tarfile dataset reader where there is one .tar file per class\n    \"\"\"\n\n    def __init__(self, root, class_map='', cache_tarfiles=True, cache_tarinfo=None):\n        super().__init__()\n\n        class_name_to_idx = None\n        if class_map:\n            class_name_to_idx = load_class_map(class_map, root)\n        self.root = root\n        self.samples, self.targets, self.class_name_to_idx, tarfiles = extract_tarinfos(\n            self.root,\n            class_name_to_idx=class_name_to_idx,\n            cache_tarinfo=cache_tarinfo\n        )\n        self.class_idx_to_name = {v: k for k, v in self.class_name_to_idx.items()}\n        if len(tarfiles) == 1 and tarfiles[0][0] is None:\n            self.root_is_tar = True\n            self.tar_state = tarfiles[0][1]\n        else:\n            self.root_is_tar = False\n            self.tar_state = dict(tarfiles)\n        self.cache_tarfiles = cache_tarfiles\n\n    def __len__(self):\n        return len(self.samples)\n\n    def __getitem__(self, index):\n        sample = self.samples[index]\n        target = self.targets[index]\n        sample_ti, parent_fn, child_ti = sample\n        parent_abs = os.path.join(self.root, parent_fn) if parent_fn else self.root\n\n        tf = None\n        cache_state = None\n        if self.cache_tarfiles:\n            cache_state = self.tar_state if self.root_is_tar else self.tar_state[parent_fn]\n            tf = cache_state.tf\n        if tf is None:\n            tf = tarfile.open(parent_abs)\n            if self.cache_tarfiles:\n                cache_state.tf = tf\n        if child_ti is not None:\n            ctf = cache_state.children[child_ti.name].tf if self.cache_tarfiles else None\n            if ctf is None:\n                ctf = tarfile.open(fileobj=tf.extractfile(child_ti))\n                if self.cache_tarfiles:\n                    cache_state.children[child_ti.name].tf = ctf\n            tf = ctf\n\n        return tf.extractfile(sample_ti), target\n\n    def _filename(self, index, basename=False, absolute=False):\n        filename = self.samples[index][0].name\n        if basename:\n            filename = os.path.basename(filename)\n        return filename\n"
  },
  {
    "path": "timm/data/readers/reader_image_tar.py",
    "content": "\"\"\" A dataset reader that reads single tarfile based datasets\n\nThis reader can read datasets consisting if a single tarfile containing images.\nI am planning to deprecated it in favour of ParerImageInTar.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport os\nimport tarfile\n\nfrom timm.utils.misc import natural_key\n\nfrom .class_map import load_class_map\nfrom .img_extensions import get_img_extensions\nfrom .reader import Reader\n\n\ndef extract_tarinfo(tarfile, class_to_idx=None, sort=True):\n    extensions = get_img_extensions(as_set=True)\n    files = []\n    labels = []\n    for ti in tarfile.getmembers():\n        if not ti.isfile():\n            continue\n        dirname, basename = os.path.split(ti.path)\n        label = os.path.basename(dirname)\n        ext = os.path.splitext(basename)[1]\n        if ext.lower() in extensions:\n            files.append(ti)\n            labels.append(label)\n    if class_to_idx is None:\n        unique_labels = set(labels)\n        sorted_labels = list(sorted(unique_labels, key=natural_key))\n        class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)}\n    tarinfo_and_targets = [(f, class_to_idx[l]) for f, l in zip(files, labels) if l in class_to_idx]\n    if sort:\n        tarinfo_and_targets = sorted(tarinfo_and_targets, key=lambda k: natural_key(k[0].path))\n    return tarinfo_and_targets, class_to_idx\n\n\nclass ReaderImageTar(Reader):\n    \"\"\" Single tarfile dataset where classes are mapped to folders within tar\n    NOTE: This class is being deprecated in favour of the more capable ReaderImageInTar that can\n    operate on folders of tars or tars in tars.\n    \"\"\"\n    def __init__(self, root, class_map=''):\n        super().__init__()\n\n        class_to_idx = None\n        if class_map:\n            class_to_idx = load_class_map(class_map, root)\n        assert os.path.isfile(root)\n        self.root = root\n\n        with tarfile.open(root) as tf:  # cannot keep this open across processes, reopen later\n            self.samples, self.class_to_idx = extract_tarinfo(tf, class_to_idx)\n        self.imgs = self.samples\n        self.tarfile = None  # lazy init in __getitem__\n\n    def __getitem__(self, index):\n        if self.tarfile is None:\n            self.tarfile = tarfile.open(self.root)\n        tarinfo, target = self.samples[index]\n        fileobj = self.tarfile.extractfile(tarinfo)\n        return fileobj, target\n\n    def __len__(self):\n        return len(self.samples)\n\n    def _filename(self, index, basename=False, absolute=False):\n        filename = self.samples[index][0].name\n        if basename:\n            filename = os.path.basename(filename)\n        return filename\n"
  },
  {
    "path": "timm/data/readers/reader_tfds.py",
    "content": "\"\"\" Dataset reader that wraps TFDS datasets\n\nWraps many (most?) TFDS image-classification datasets\nfrom https://github.com/tensorflow/datasets\nhttps://www.tensorflow.org/datasets/catalog/overview#image_classification\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport math\nimport os\nimport sys\nfrom typing import Optional\n\nimport torch\nimport torch.distributed as dist\nfrom PIL import Image\n\ntry:\n    import tensorflow as tf\n    tf.config.set_visible_devices([], 'GPU')  # Hands off my GPU! (or pip install tensorflow-cpu)\n    import tensorflow_datasets as tfds\n    try:\n        tfds.even_splits('', 1, drop_remainder=False)  # non-buggy even_splits has drop_remainder arg\n        has_buggy_even_splits = False\n    except TypeError:\n        print(\"Warning: This version of tfds doesn't have the latest even_splits impl. \"\n              \"Please update or use tfds-nightly for better fine-grained split behaviour.\")\n        has_buggy_even_splits = True\n    # NOTE uncomment below if having file limit issues on dataset build (or alter your OS defaults)\n    # import resource\n    # low, high = resource.getrlimit(resource.RLIMIT_NOFILE)\n    # resource.setrlimit(resource.RLIMIT_NOFILE, (high, high))\nexcept ImportError as e:\n    print(e)\n    print(\"Please install tensorflow_datasets package `pip install tensorflow-datasets`.\")\n    raise e\n\nfrom .class_map import load_class_map\nfrom .reader import Reader\nfrom .shared_count import SharedCount\n\n\nMAX_TP_SIZE = int(os.environ.get('TFDS_TP_SIZE', 8))  # maximum TF threadpool size, for jpeg decodes and queuing activities\nSHUFFLE_SIZE = int(os.environ.get('TFDS_SHUFFLE_SIZE', 8192))  # samples to shuffle in DS queue\nPREFETCH_SIZE = int(os.environ.get('TFDS_PREFETCH_SIZE', 2048))  # samples to prefetch\n\n\n@tfds.decode.make_decoder()\ndef decode_example(serialized_image, feature, dct_method='INTEGER_ACCURATE', channels=3):\n    return tf.image.decode_jpeg(\n        serialized_image,\n        channels=channels,\n        dct_method=dct_method,\n    )\n\n\ndef even_split_indices(split, n, num_samples):\n    partitions = [round(i * num_samples / n) for i in range(n + 1)]\n    return [f\"{split}[{partitions[i]}:{partitions[i + 1]}]\" for i in range(n)]\n\n\ndef get_class_labels(info):\n    if 'label' not in info.features:\n        return {}\n    class_label = info.features['label']\n    class_to_idx = {n: class_label.str2int(n) for n in class_label.names}\n    return class_to_idx\n\n\nclass ReaderTfds(Reader):\n    \"\"\" Wrap Tensorflow Datasets for use in PyTorch\n\n    There several things to be aware of:\n      * To prevent excessive samples being dropped per epoch w/ distributed training or multiplicity of\n         dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last\n         https://github.com/pytorch/pytorch/issues/33413\n      * With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch\n        from each worker could be a different size. For training this is worked around by option above, for\n        validation extra samples are inserted iff distributed mode is enabled so that the batches being reduced\n        across replicas are of same size. This will slightly alter the results, distributed validation will not be\n        100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse\n        since there are up to N * J extra samples with IterableDatasets.\n      * The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of\n        replicas and dataloader workers you can use. For really small datasets that only contain a few shards\n        you may have to train non-distributed w/ 1-2 dataloader workers. This is likely not a huge concern as the\n        benefit of distributed training or fast dataloading should be much less for small datasets.\n      * This wrapper is currently configured to return individual, decompressed image samples from the TFDS\n        dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible\n        to specify TF augmentation fn and return augmented batches w/ some modifications to other downstream\n        components.\n\n    \"\"\"\n\n    def __init__(\n            self,\n            name,\n            root=None,\n            split='train',\n            class_map=None,\n            is_training=False,\n            batch_size=1,\n            download=False,\n            repeats=0,\n            seed=42,\n            input_key='image',\n            input_img_mode='RGB',\n            target_key='label',\n            target_img_mode='',\n            prefetch_size=None,\n            shuffle_size=None,\n            max_threadpool_size=None\n    ):\n        \"\"\" Tensorflow-datasets Wrapper\n\n        Args:\n            root: root data dir (ie your TFDS_DATA_DIR. not dataset specific sub-dir)\n            name: tfds dataset name (eg `imagenet2012`)\n            split: tfds dataset split (can use all TFDS split strings eg `train[:10%]`)\n            is_training: training mode, shuffle enabled, dataset len rounded by batch_size\n            batch_size: batch_size to use to unsure total samples % batch_size == 0 in training across all dis nodes\n            download: download and build TFDS dataset if set, otherwise must use tfds CLI\n            repeats: iterate through (repeat) the dataset this many times per iteration (once if 0 or 1)\n            seed: common seed for shard shuffle across all distributed/worker instances\n            input_key: name of Feature to return as data (input)\n            input_img_mode: image mode if input is an image (currently PIL mode string)\n            target_key: name of Feature to return as target (label)\n            target_img_mode: image mode if target is an image (currently PIL mode string)\n            prefetch_size: override default tf.data prefetch buffer size\n            shuffle_size: override default tf.data shuffle buffer size\n            max_threadpool_size: override default threadpool size for tf.data\n        \"\"\"\n        super().__init__()\n        self.root = root\n        self.split = split\n        self.is_training = is_training\n        self.batch_size = batch_size\n        self.repeats = repeats\n        self.common_seed = seed  # a seed that's fixed across all worker / distributed instances\n\n        # performance settings\n        self.prefetch_size = prefetch_size or PREFETCH_SIZE\n        self.shuffle_size = shuffle_size or SHUFFLE_SIZE\n        self.max_threadpool_size = max_threadpool_size or MAX_TP_SIZE\n\n        # TFDS builder and split information\n        self.input_key = input_key  # FIXME support tuples / lists of inputs and targets and full range of Feature\n        self.input_img_mode = input_img_mode\n        self.target_key = target_key\n        self.target_img_mode = target_img_mode  # for dense pixel targets\n        self.builder = tfds.builder(name, data_dir=root)\n        # NOTE: the tfds command line app can be used download & prepare datasets if you don't enable download flag\n        if download:\n            self.builder.download_and_prepare()\n        self.remap_class = False\n        if class_map:\n            self.class_to_idx = load_class_map(class_map)\n            self.remap_class = True\n        else:\n            self.class_to_idx = get_class_labels(self.builder.info) if self.target_key == 'label' else {}\n        self.split_info = self.builder.info.splits[split]\n        self.num_samples = self.split_info.num_examples\n\n        # Distributed world state\n        self.dist_rank = 0\n        self.dist_num_replicas = 1\n        if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1:\n            self.dist_rank = dist.get_rank()\n            self.dist_num_replicas = dist.get_world_size()\n\n        # Attributes that are updated in _lazy_init, including the tf.data pipeline itself\n        self.global_num_workers = 1\n        self.num_workers = 1\n        self.worker_info = None\n        self.worker_seed = 0  # seed unique to each work instance\n        self.subsplit = None  # set when data is distributed across workers using sub-splits\n        self.ds = None  # initialized lazily on each dataloader worker process\n        self.init_count = 0  # number of ds TF data pipeline initializations\n        self.epoch_count = SharedCount()\n        # FIXME need to determine if reinit_each_iter is necessary. I'm don't completely trust behaviour\n        #  of `shuffle_reshuffle_each_iteration` when there are multiple workers / nodes across epochs\n        self.reinit_each_iter = self.is_training\n\n    def set_epoch(self, count):\n        self.epoch_count.value = count\n\n    def set_loader_cfg(\n            self,\n            num_workers: Optional[int] = None,\n    ):\n        if self.ds is not None:\n            return\n        if num_workers is not None:\n            self.num_workers = num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n\n    def _lazy_init(self):\n        \"\"\" Lazily initialize the dataset.\n\n        This is necessary to init the Tensorflow dataset pipeline in the (dataloader) process that\n        will be using the dataset instance. The __init__ method is called on the main process,\n        this will be called in a dataloader worker process.\n\n        NOTE: There will be problems if you try to re-use this dataset across different loader/worker\n        instances once it has been initialized. Do not call any dataset methods that can call _lazy_init\n        before it is passed to dataloader.\n        \"\"\"\n        worker_info = torch.utils.data.get_worker_info()\n\n        # setup input context to split dataset across distributed processes\n        num_workers = 1\n        global_worker_id = 0\n        if worker_info is not None:\n            self.worker_info = worker_info\n            self.worker_seed = worker_info.seed\n            self.num_workers = worker_info.num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n            global_worker_id = self.dist_rank * self.num_workers + worker_info.id\n\n            \"\"\" Data sharding\n            InputContext will assign subset of underlying TFRecord files to each 'pipeline' if used.\n            My understanding is that using split, the underling TFRecord files will shuffle (shuffle_files=True)\n            between the splits each iteration, but that understanding could be wrong.\n\n            I am currently using a mix of InputContext shard assignment and fine-grained sub-splits for distributing\n            the data across workers. For training InputContext is used to assign shards to nodes unless num_shards\n            in dataset < total number of workers. Otherwise sub-split API is used for datasets without enough shards or\n            for validation where we can't drop samples and need to avoid minimize uneven splits to avoid padding.\n            \"\"\"\n            should_subsplit = self.global_num_workers > 1 and (\n                    self.split_info.num_shards < self.global_num_workers or not self.is_training)\n            if should_subsplit:\n                # split the dataset w/o using sharding for more even samples / worker, can result in less optimal\n                # read patterns for distributed training (overlap across shards) so better to use InputContext there\n                if has_buggy_even_splits:\n                    # my even_split workaround doesn't work on subsplits, upgrade tfds!\n                    if not isinstance(self.split_info, tfds.core.splits.SubSplitInfo):\n                        subsplits = even_split_indices(self.split, self.global_num_workers, self.num_samples)\n                        self.subsplit = subsplits[global_worker_id]\n                else:\n                    subsplits = tfds.even_splits(self.split, self.global_num_workers)\n                    self.subsplit = subsplits[global_worker_id]\n\n        input_context = None\n        if self.global_num_workers > 1 and self.subsplit is None:\n            # set input context to divide shards among distributed replicas\n            input_context = tf.distribute.InputContext(\n                num_input_pipelines=self.global_num_workers,\n                input_pipeline_id=global_worker_id,\n                num_replicas_in_sync=self.dist_num_replicas  # FIXME does this arg have any impact?\n            )\n        read_config = tfds.ReadConfig(\n            shuffle_seed=self.common_seed + self.epoch_count.value,\n            shuffle_reshuffle_each_iteration=True,\n            input_context=input_context,\n        )\n        ds = self.builder.as_dataset(\n            split=self.subsplit or self.split,\n            shuffle_files=self.is_training,\n            decoders=dict(image=decode_example(channels=1 if self.input_img_mode == 'L' else 3)),\n            read_config=read_config,\n        )\n        # avoid overloading threading w/ combo of TF ds threads + PyTorch workers\n        options = tf.data.Options()\n        thread_member = 'threading' if hasattr(options, 'threading') else 'experimental_threading'\n        getattr(options, thread_member).private_threadpool_size = max(1, self.max_threadpool_size // self.num_workers)\n        getattr(options, thread_member).max_intra_op_parallelism = 1\n        ds = ds.with_options(options)\n        if self.is_training or self.repeats > 1:\n            # to prevent excessive drop_last batch behaviour w/ IterableDatasets\n            # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading\n            ds = ds.repeat()  # allow wrap around and break iteration manually\n        if self.is_training:\n            ds = ds.shuffle(min(self.num_samples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed)\n        ds = ds.prefetch(min(self.num_samples // self.global_num_workers, self.prefetch_size))\n        self.ds = tfds.as_numpy(ds)\n        self.init_count += 1\n\n    def _num_samples_per_worker(self):\n        num_worker_samples = \\\n            max(1, self.repeats) * self.num_samples / max(self.global_num_workers, self.dist_num_replicas)\n        if self.is_training or self.dist_num_replicas > 1:\n            num_worker_samples = math.ceil(num_worker_samples)\n        if self.is_training:\n            num_worker_samples = math.ceil(num_worker_samples / self.batch_size) * self.batch_size\n        return int(num_worker_samples)\n\n    def __iter__(self):\n        if self.ds is None or self.reinit_each_iter:\n            self._lazy_init()\n\n        # Compute a rounded up sample count that is used to:\n        #   1. make batches even cross workers & replicas in distributed validation.\n        #     This adds extra samples and will slightly alter validation results.\n        #   2. determine loop ending condition in training w/ repeat enabled so that only full batch_size\n        #     batches are produced (underlying tfds iter wraps around)\n        target_sample_count = self._num_samples_per_worker()\n\n        # Iterate until exhausted or sample count hits target when training (ds.repeat enabled)\n        sample_count = 0\n        for sample in self.ds:\n            input_data = sample[self.input_key]\n            if self.input_img_mode:\n                if self.input_img_mode == 'L' and input_data.ndim == 3:\n                    input_data = input_data[:, :, 0]\n                input_data = Image.fromarray(input_data, mode=self.input_img_mode)\n            target_data = sample[self.target_key]\n            if self.target_img_mode:\n                # dense pixel target\n                target_data = Image.fromarray(target_data, mode=self.target_img_mode)\n            elif self.remap_class:\n                target_data = self.class_to_idx[target_data]\n            yield input_data, target_data\n            sample_count += 1\n            if self.is_training and sample_count >= target_sample_count:\n                # Need to break out of loop when repeat() is enabled for training w/ oversampling\n                # this results in extra samples per epoch but seems more desirable than dropping\n                # up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes)\n                break\n\n        # Pad across distributed nodes (make counts equal by adding samples)\n        if not self.is_training and self.dist_num_replicas > 1 and self.subsplit is not None and \\\n                0 < sample_count < target_sample_count:\n            # Validation batch padding only done for distributed training where results are reduced across nodes.\n            # For single process case, it won't matter if workers return different batch sizes.\n            # If using input_context or % based splits, sample count can vary significantly across workers and this\n            # approach should not be used (hence disabled if self.subsplit isn't set).\n            while sample_count < target_sample_count:\n                yield input_data, target_data  # yield prev sample again\n                sample_count += 1\n\n    def __len__(self):\n        num_samples = self._num_samples_per_worker() * self.num_workers\n        return num_samples\n\n    def _filename(self, index, basename=False, absolute=False):\n        assert False, \"Not supported\"  # no random access to samples\n\n    def filenames(self, basename=False, absolute=False):\n        \"\"\" Return all filenames in dataset, overrides base\"\"\"\n        if self.ds is None:\n            self._lazy_init()\n        names = []\n        for sample in self.ds:\n            if len(names) > self.num_samples:\n                break  # safety for ds.repeat() case\n            if 'file_name' in sample:\n                name = sample['file_name']\n            elif 'filename' in sample:\n                name = sample['filename']\n            elif 'id' in sample:\n                name = sample['id']\n            else:\n                assert False, \"No supported name field present\"\n            names.append(name)\n        return names\n"
  },
  {
    "path": "timm/data/readers/reader_wds.py",
    "content": "\"\"\" Dataset reader for webdataset\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport io\nimport json\nimport logging\nimport math\nimport os\nimport random\nimport sys\nfrom dataclasses import dataclass\nfrom functools import partial\nfrom itertools import islice\nfrom typing import Any, Callable, Dict, List, Optional, Tuple\n\nimport torch\nimport torch.distributed as dist\nimport yaml\nfrom PIL import Image\nfrom torch.utils.data import Dataset, IterableDataset, get_worker_info\n\ntry:\n    import webdataset as wds\n    from webdataset.filters import _shuffle, getfirst\n    from webdataset.shardlists import expand_urls\n    from webdataset.tariterators import base_plus_ext, url_opener, tar_file_expander, valid_sample\nexcept ImportError:\n    wds = None\n    expand_urls = None\n\nfrom .class_map import load_class_map\nfrom .reader import Reader\nfrom .shared_count import SharedCount\n\n_logger = logging.getLogger(__name__)\n\nSAMPLE_SHUFFLE_SIZE = int(os.environ.get('WDS_SHUFFLE_SIZE', 8192))\nSAMPLE_INITIAL_SIZE = int(os.environ.get('WDS_INITIAL_SIZE', 2048))\n\n\ndef _load_info(root, names=('_info.json', 'info.json')):\n    if isinstance(names, str):\n        names = (names,)\n    tried = []\n    err_str = ''\n    for n in names:\n        full_path = os.path.join(root, n)\n        try:\n            tried.append(full_path)\n            with wds.gopen(full_path) as f:\n                if n.endswith('.json'):\n                    info_dict = json.load(f)\n                else:\n                    info_dict = yaml.safe_load(f)\n            return info_dict\n        except Exception as e:\n            err_str = str(e)\n\n    _logger.warning(\n        f'Dataset info file not found at {tried}. Error: {err_str}. '\n        'Falling back to provided split and size arg.')\n    return {}\n\n\n@dataclass\nclass SplitInfo:\n    num_samples: int\n    filenames: Tuple[str]\n    shard_lengths: Tuple[int] = ()\n    alt_label: str = ''\n    name: str = ''\n\n\ndef _parse_split_info(split: str, info: Dict):\n    def _info_convert(dict_info):\n        return SplitInfo(\n            num_samples=dict_info['num_samples'],\n            filenames=tuple(dict_info['filenames']),\n            shard_lengths=tuple(dict_info['shard_lengths']),\n            alt_label=dict_info.get('alt_label', ''),\n            name=dict_info['name'],\n        )\n\n    if 'tar' in split or '..' in split:\n        # split in WDS string braceexpand format, sample count can be included with a | separator\n        # ex: `dataset-split-{0000..9999}.tar|100000` for 9999 shards, covering 100,000 samples\n        split = split.split('|')\n        num_samples = 0\n        split_name = ''\n        if len(split) > 1:\n            num_samples = int(split[1])\n        split = split[0]\n        if '::' not in split:\n            split_parts = split.split('-', 3)\n            split_idx = len(split_parts) - 1\n            if split_idx and 'splits' in info and split_parts[split_idx] in info['splits']:\n                split_name = split_parts[split_idx]\n\n        split_filenames = expand_urls(split)\n        if split_name:\n            split_info = info['splits'][split_name]\n            if not num_samples:\n                _fc = {f: c for f, c in zip(split_info['filenames'], split_info['shard_lengths'])}\n                num_samples = sum(_fc[f] for f in split_filenames)\n                split_info['filenames'] = tuple(_fc.keys())\n                split_info['shard_lengths'] = tuple(_fc.values())\n                split_info['num_samples'] = num_samples\n            split_info = _info_convert(split_info)\n        else:\n            split_info = SplitInfo(\n                name=split_name,\n                num_samples=num_samples,\n                filenames=split_filenames,\n            )\n    else:\n        if 'splits' not in info or split not in info['splits']:\n            raise RuntimeError(f\"split {split} not found in info ({info.get('splits', {}).keys()})\")\n        split = split\n        split_info = info['splits'][split]\n        split_info = _info_convert(split_info)\n\n    return split_info\n\n\ndef log_and_continue(exn):\n    \"\"\"Call in an exception handler to ignore exceptions, issue a warning, and continue.\"\"\"\n    _logger.warning(f'Handling webdataset error ({repr(exn)}). Ignoring.')\n    # NOTE: try force an exit on errors that are clearly code / config and not transient\n    if isinstance(exn, TypeError):\n        raise exn\n    return True\n\n\ndef _decode(\n        sample,\n        image_key='jpg',\n        image_mode='RGB',\n        target_key='cls',\n        alt_label=''\n):\n    \"\"\" Custom sample decode\n    * decode and convert PIL Image\n    * cls byte string label to int\n    * pass through JSON byte string (if it exists) without parse\n    \"\"\"\n    # decode class label, skip if alternate label not valid\n    if alt_label:\n        # alternative labels are encoded in json metadata\n        meta = json.loads(sample['json'])\n        class_label = int(meta[alt_label])\n        if class_label < 0:\n            # skipped labels currently encoded as -1, may change to a null/None value\n            return None\n    else:\n        class_label = int(sample[target_key])\n\n    # decode image\n    img = getfirst(sample, image_key)\n    with io.BytesIO(img) as b:\n        img = Image.open(b)\n        img.load()\n    if image_mode:\n        img = img.convert(image_mode)\n\n    # json passed through in undecoded state\n    decoded = dict(jpg=img, cls=class_label, json=sample.get('json', None))\n    return decoded\n\n\ndef pytorch_worker_seed():\n    \"\"\"get dataloader worker seed from pytorch\"\"\"\n    worker_info = get_worker_info()\n    if worker_info is not None:\n        # favour the seed already created for pytorch dataloader workers if it exists\n        return worker_info.seed\n    # fallback to wds rank based seed\n    return wds.utils.pytorch_worker_seed()\n\n\nif wds is not None:\n    # conditional to avoid mandatory wds import (via inheritance of wds.PipelineStage)\n\n    class detshuffle2(wds.PipelineStage):\n        def __init__(\n                self,\n                bufsize=1000,\n                initial=100,\n                seed=0,\n                epoch=-1,\n        ):\n            self.bufsize = bufsize\n            self.initial = initial\n            self.seed = seed\n            self.epoch = epoch\n\n        def run(self, src):\n            if isinstance(self.epoch, SharedCount):\n                epoch = self.epoch.value\n            else:\n                # NOTE: this is epoch tracking is problematic in a multiprocess (dataloader workers or train)\n                # situation as different workers may wrap at different times (or not at all).\n                self.epoch += 1\n                epoch = self.epoch\n\n            if self.seed < 0:\n                seed = pytorch_worker_seed() + epoch\n            else:\n                seed = self.seed + epoch\n            # _logger.info(f'shuffle seed: {self.seed}, {seed}, epoch: {epoch}')  # FIXME temporary\n            rng = random.Random(seed)\n            return _shuffle(src, self.bufsize, self.initial, rng)\n\nelse:\n    detshuffle2 = None\n\n\nclass ResampledShards2(IterableDataset):\n    \"\"\"An iterable dataset yielding a list of urls.\"\"\"\n\n    def __init__(\n        self,\n        urls,\n        nshards=sys.maxsize,\n        worker_seed=None,\n        deterministic=True,\n        epoch=-1,\n    ):\n        \"\"\"Sample shards from the shard list with replacement.\n\n        :param urls: a list of URLs as a Python list or brace notation string\n        \"\"\"\n        super().__init__()\n        urls = wds.shardlists.expand_urls(urls)\n        self.urls = urls\n        assert isinstance(self.urls[0], str)\n        self.nshards = nshards\n        self.rng = random.Random()\n        self.worker_seed = pytorch_worker_seed if worker_seed is None else worker_seed\n        self.deterministic = deterministic\n        self.epoch = epoch\n\n    def __iter__(self):\n        \"\"\"Return an iterator over the shards.\"\"\"\n        if isinstance(self.epoch, SharedCount):\n            epoch = self.epoch.value\n        else:\n            # NOTE: this is epoch tracking is problematic in a multiprocess (dataloader workers or train)\n            # situation as different workers may wrap at different times (or not at all).\n            self.epoch += 1\n            epoch = self.epoch\n\n        if self.deterministic:\n            # reset seed w/ epoch if deterministic, worker seed should be deterministic due to arg.seed\n            self.rng = random.Random(self.worker_seed() + epoch)\n\n        for _ in range(self.nshards):\n            index = self.rng.randint(0, len(self.urls) - 1)\n            yield dict(url=self.urls[index])\n\n\nclass ReaderWds(Reader):\n    def __init__(\n            self,\n            root: str,\n            name: Optional[str] = None,\n            split: str = 'train',\n            is_training: bool = False,\n            num_samples: Optional[int] = None,\n            batch_size: int = 1,\n            repeats: int = 0,\n            seed: int = 42,\n            class_map: Optional[dict] = None,\n            input_key: str = 'jpg;png;webp',\n            input_img_mode: str = 'RGB',\n            target_key: str = 'cls',\n            target_img_mode: str = '',\n            filename_key: str = 'filename',\n            sample_shuffle_size: Optional[int] = None,\n            sample_initial_size: Optional[int] = None,\n    ):\n        super().__init__()\n        if wds is None:\n            raise RuntimeError(\n                'Please install webdataset 0.2.x package `pip install git+https://github.com/webdataset/webdataset`.')\n        self.root = root\n        self.is_training = is_training\n        self.batch_size = batch_size\n        self.repeats = repeats\n        self.common_seed = seed  # a seed that's fixed across all worker / distributed instances\n        self.shard_shuffle_size = 500\n        self.sample_shuffle_size = sample_shuffle_size or SAMPLE_SHUFFLE_SIZE\n        self.sample_initial_size = sample_initial_size or SAMPLE_INITIAL_SIZE\n\n        self.input_key = input_key\n        self.input_img_mode = input_img_mode\n        self.target_key = target_key\n        self.filename_key = filename_key\n        self.key_ext = '.JPEG'  # extension to add to key for original filenames (DS specific, default ImageNet)\n\n        self.info = _load_info(self.root)\n        self.split_info = _parse_split_info(split, self.info)\n        if num_samples is not None:\n            self.num_samples = num_samples\n        else:\n            self.num_samples = self.split_info.num_samples\n        if is_training and not self.num_samples:\n            raise RuntimeError(f'Invalid split definition, num_samples not specified in train mode.')\n        self.remap_class = False\n        if class_map:\n            self.class_to_idx = load_class_map(class_map)\n            self.remap_class = True\n        else:\n            self.class_to_idx = {}\n\n        # Distributed world state\n        self.dist_rank = 0\n        self.dist_num_replicas = 1\n        if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1:\n            self.dist_rank = dist.get_rank()\n            self.dist_num_replicas = dist.get_world_size()\n\n        # Attributes that are updated in _lazy_init\n        self.worker_info = None\n        self.worker_id = 0\n        self.worker_seed = seed  # seed unique to each worker instance\n        self.num_workers = 1\n        self.global_worker_id = 0\n        self.global_num_workers = 1\n        self.init_count = 0\n        self.epoch_count = SharedCount()\n\n        # DataPipeline is lazy init, the majority of WDS DataPipeline could be init here, BUT, shuffle seed\n        # is not handled in manner where it can be deterministic for each worker AND initialized up front\n        self.ds = None\n\n    def set_epoch(self, count):\n        self.epoch_count.value = count\n\n    def set_loader_cfg(\n            self,\n            num_workers: Optional[int] = None,\n    ):\n        if self.ds is not None:\n            return\n        if num_workers is not None:\n            self.num_workers = num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n\n    def _lazy_init(self):\n        \"\"\" Lazily initialize worker (in worker processes)\n        \"\"\"\n        if self.worker_info is None:\n            worker_info = torch.utils.data.get_worker_info()\n            if worker_info is not None:\n                self.worker_info = worker_info\n                self.worker_id = worker_info.id\n                self.worker_seed = worker_info.seed\n                self.num_workers = worker_info.num_workers\n            self.global_num_workers = self.dist_num_replicas * self.num_workers\n            self.global_worker_id = self.dist_rank * self.num_workers + self.worker_id\n\n        # init data pipeline\n        abs_shard_filenames = [os.path.join(self.root, f) for f in self.split_info.filenames]\n        pipeline = [wds.SimpleShardList(abs_shard_filenames)]\n        # at this point we have an iterator over all the shards\n        if self.is_training:\n            pipeline.extend([\n                detshuffle2(\n                    self.shard_shuffle_size,\n                    seed=self.common_seed,\n                    epoch=self.epoch_count,\n                ),\n                self._split_by_node_and_worker,\n                # at this point, we have an iterator over the shards assigned to each worker\n                wds.tarfile_to_samples(handler=log_and_continue),\n                wds.shuffle(\n                    bufsize=self.sample_shuffle_size,\n                    initial=self.sample_initial_size,\n                    rng=random.Random(self.worker_seed) # this is why we lazy-init whole DataPipeline\n                ),\n            ])\n        else:\n            pipeline.extend([\n                self._split_by_node_and_worker,\n                # at this point, we have an iterator over the shards assigned to each worker\n                wds.tarfile_to_samples(handler=log_and_continue),\n            ])\n        pipeline.extend([\n            wds.map(\n                partial(\n                    _decode,\n                    image_key=self.input_key,\n                    image_mode=self.input_img_mode,\n                    alt_label=self.split_info.alt_label,\n                ),\n                handler=log_and_continue,\n            ),\n            wds.rename(image=self.input_key, target=self.target_key)\n        ])\n        self.ds = wds.DataPipeline(*pipeline)\n\n    def _split_by_node_and_worker(self, src):\n        if self.global_num_workers > 1:\n            for s in islice(src, self.global_worker_id, None, self.global_num_workers):\n                yield s\n        else:\n            for s in src:\n                yield s\n\n    def _num_samples_per_worker(self):\n        num_worker_samples = self.num_samples / max(self.global_num_workers, self.dist_num_replicas)\n        if self.is_training or self.dist_num_replicas > 1:\n            num_worker_samples = math.ceil(num_worker_samples)\n        if self.is_training:\n            num_worker_samples = math.ceil(num_worker_samples / self.batch_size) * self.batch_size\n        return int(num_worker_samples)\n\n    def __iter__(self):\n        if self.ds is None:\n            self._lazy_init()\n\n        num_worker_samples = self._num_samples_per_worker()\n        if self.is_training or self.dist_num_replicas > 1:\n            # NOTE: doing distributed validation w/ WDS is messy, hard to meet constraints that\n            # same # of batches needed across all replicas w/ seeing each sample once.\n            # with_epoch() is simple but could miss a shard's worth of samples in some workers,\n            # and duplicate in others. Best to keep num DL workers low and a divisor of #val shards.\n            ds = self.ds.with_epoch(num_worker_samples)\n        else:\n            ds = self.ds\n\n        i = 0\n        # _logger.info(f'start {i}, {self.worker_id}')  # FIXME temporary debug\n        for sample in ds:\n            target = sample['target']\n            if self.remap_class:\n                target = self.class_to_idx[target]\n            yield sample['image'], target\n            i += 1\n        # _logger.info(f'end {i}, {self.worker_id}')  # FIXME temporary debug\n\n    def __len__(self):\n        num_samples = self._num_samples_per_worker() * self.num_workers\n        return num_samples\n\n    def _filename(self, index, basename=False, absolute=False):\n        assert False, \"Not supported\"  # no random access to examples\n\n    def filenames(self, basename=False, absolute=False):\n        \"\"\" Return all filenames in dataset, overrides base\"\"\"\n        if self.ds is None:\n            self._lazy_init()\n\n        names = []\n        for sample in self.ds:\n            if self.filename_key in sample:\n                name = sample[self.filename_key]\n            elif '__key__' in sample:\n                name = sample['__key__'] + self.key_ext\n            else:\n                assert False, \"No supported name field present\"\n            names.append(name)\n            if len(names) >= self.num_samples:\n                break  # safety for ds.repeat() case\n        return names\n"
  },
  {
    "path": "timm/data/readers/shared_count.py",
    "content": "from multiprocessing import Value\n\n\nclass SharedCount:\n    def __init__(self, epoch: int = 0):\n        self.shared_epoch = Value('i', epoch)\n\n    @property\n    def value(self):\n        return self.shared_epoch.value\n\n    @value.setter\n    def value(self, epoch):\n        self.shared_epoch.value = epoch\n"
  },
  {
    "path": "timm/data/real_labels.py",
    "content": "\"\"\" Real labels evaluator for ImageNet\nPaper: `Are we done with ImageNet?` - https://arxiv.org/abs/2006.07159\nBased on Numpy example at https://github.com/google-research/reassessed-imagenet\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport os\nimport json\nimport numpy as np\nimport pkgutil\n\n\nclass RealLabelsImagenet:\n\n    def __init__(self, filenames, real_json=None, topk=(1, 5)):\n        if real_json is not None:\n            with open(real_json) as real_labels:\n                real_labels = json.load(real_labels)\n        else:\n            real_labels = json.loads(\n                pkgutil.get_data(__name__, os.path.join('_info', 'imagenet_real_labels.json')).decode('utf-8'))\n        real_labels = {f'ILSVRC2012_val_{i + 1:08d}.JPEG': labels for i, labels in enumerate(real_labels)}\n        self.real_labels = real_labels\n        self.filenames = filenames\n        assert len(self.filenames) == len(self.real_labels)\n        self.topk = topk\n        self.is_correct = {k: [] for k in topk}\n        self.sample_idx = 0\n\n    def add_result(self, output):\n        maxk = max(self.topk)\n        _, pred_batch = output.topk(maxk, 1, True, True)\n        pred_batch = pred_batch.cpu().numpy()\n        for pred in pred_batch:\n            filename = self.filenames[self.sample_idx]\n            filename = os.path.basename(filename)\n            if self.real_labels[filename]:\n                for k in self.topk:\n                    self.is_correct[k].append(\n                        any([p in self.real_labels[filename] for p in pred[:k]]))\n            self.sample_idx += 1\n\n    def get_accuracy(self, k=None):\n        if k is None:\n            return {k: float(np.mean(self.is_correct[k])) * 100 for k in self.topk}\n        else:\n            return float(np.mean(self.is_correct[k])) * 100\n"
  },
  {
    "path": "timm/data/tf_preprocessing.py",
    "content": "\"\"\" Tensorflow Preprocessing Adapter\n\nAllows use of Tensorflow preprocessing pipeline in PyTorch Transform\n\nCopyright of original Tensorflow code below.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\n# Copyright 2018 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"ImageNet preprocessing for MnasNet.\"\"\"\nimport tensorflow.compat.v1 as tf\nimport numpy as np\n\nIMAGE_SIZE = 224\nCROP_PADDING = 32\n\ntf.compat.v1.disable_eager_execution()\n\ndef distorted_bounding_box_crop(image_bytes,\n                                bbox,\n                                min_object_covered=0.1,\n                                aspect_ratio_range=(0.75, 1.33),\n                                area_range=(0.05, 1.0),\n                                max_attempts=100,\n                                scope=None):\n    \"\"\"Generates cropped_image using one of the bboxes randomly distorted.\n\n    See `tf.image.sample_distorted_bounding_box` for more documentation.\n\n    Args:\n      image_bytes: `Tensor` of binary image data.\n      bbox: `Tensor` of bounding boxes arranged `[1, num_boxes, coords]`\n          where each coordinate is [0, 1) and the coordinates are arranged\n          as `[ymin, xmin, ymax, xmax]`. If num_boxes is 0 then use the whole\n          image.\n      min_object_covered: An optional `float`. Defaults to `0.1`. The cropped\n          area of the image must contain at least this fraction of any bounding\n          box supplied.\n      aspect_ratio_range: An optional list of `float`s. The cropped area of the\n          image must have an aspect ratio = width / height within this range.\n      area_range: An optional list of `float`s. The cropped area of the image\n          must contain a fraction of the supplied image within in this range.\n      max_attempts: An optional `int`. Number of attempts at generating a cropped\n          region of the image of the specified constraints. After `max_attempts`\n          failures, return the entire image.\n      scope: Optional `str` for name scope.\n    Returns:\n      cropped image `Tensor`\n    \"\"\"\n    with tf.name_scope(scope, 'distorted_bounding_box_crop', [image_bytes, bbox]):\n        shape = tf.image.extract_jpeg_shape(image_bytes)\n        sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box(\n            shape,\n            bounding_boxes=bbox,\n            min_object_covered=min_object_covered,\n            aspect_ratio_range=aspect_ratio_range,\n            area_range=area_range,\n            max_attempts=max_attempts,\n            use_image_if_no_bounding_boxes=True)\n        bbox_begin, bbox_size, _ = sample_distorted_bounding_box\n\n        # Crop the image to the specified bounding box.\n        offset_y, offset_x, _ = tf.unstack(bbox_begin)\n        target_height, target_width, _ = tf.unstack(bbox_size)\n        crop_window = tf.stack([offset_y, offset_x, target_height, target_width])\n        image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)\n\n        return image\n\n\ndef _at_least_x_are_equal(a, b, x):\n    \"\"\"At least `x` of `a` and `b` `Tensors` are equal.\"\"\"\n    match = tf.equal(a, b)\n    match = tf.cast(match, tf.int32)\n    return tf.greater_equal(tf.reduce_sum(match), x)\n\n\ndef _decode_and_random_crop(image_bytes, image_size, resize_method):\n    \"\"\"Make a random crop of image_size.\"\"\"\n    bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4])\n    image = distorted_bounding_box_crop(\n        image_bytes,\n        bbox,\n        min_object_covered=0.1,\n        aspect_ratio_range=(3. / 4, 4. / 3.),\n        area_range=(0.08, 1.0),\n        max_attempts=10,\n        scope=None)\n    original_shape = tf.image.extract_jpeg_shape(image_bytes)\n    bad = _at_least_x_are_equal(original_shape, tf.shape(image), 3)\n\n    image = tf.cond(\n        bad,\n        lambda: _decode_and_center_crop(image_bytes, image_size),\n        lambda: tf.image.resize([image], [image_size, image_size], resize_method)[0])\n\n    return image\n\n\ndef _decode_and_center_crop(image_bytes, image_size, resize_method):\n    \"\"\"Crops to center of image with padding then scales image_size.\"\"\"\n    shape = tf.image.extract_jpeg_shape(image_bytes)\n    image_height = shape[0]\n    image_width = shape[1]\n\n    padded_center_crop_size = tf.cast(\n        ((image_size / (image_size + CROP_PADDING)) *\n         tf.cast(tf.minimum(image_height, image_width), tf.float32)),\n        tf.int32)\n\n    offset_height = ((image_height - padded_center_crop_size) + 1) // 2\n    offset_width = ((image_width - padded_center_crop_size) + 1) // 2\n    crop_window = tf.stack([offset_height, offset_width,\n                            padded_center_crop_size, padded_center_crop_size])\n    image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)\n    image = tf.image.resize([image], [image_size, image_size], resize_method)[0]\n\n    return image\n\n\ndef _flip(image):\n    \"\"\"Random horizontal image flip.\"\"\"\n    image = tf.image.random_flip_left_right(image)\n    return image\n\n\ndef preprocess_for_train(image_bytes, use_bfloat16, image_size=IMAGE_SIZE, interpolation='bicubic'):\n    \"\"\"Preprocesses the given image for evaluation.\n\n    Args:\n      image_bytes: `Tensor` representing an image binary of arbitrary size.\n      use_bfloat16: `bool` for whether to use bfloat16.\n      image_size: image size.\n      interpolation: image interpolation method\n\n    Returns:\n      A preprocessed image `Tensor`.\n    \"\"\"\n    resize_method = tf.image.ResizeMethod.BICUBIC if interpolation == 'bicubic' else tf.image.ResizeMethod.BILINEAR\n    image = _decode_and_random_crop(image_bytes, image_size, resize_method)\n    image = _flip(image)\n    image = tf.reshape(image, [image_size, image_size, 3])\n    image = tf.image.convert_image_dtype(\n        image, dtype=tf.bfloat16 if use_bfloat16 else tf.float32)\n    return image\n\n\ndef preprocess_for_eval(image_bytes, use_bfloat16, image_size=IMAGE_SIZE, interpolation='bicubic'):\n    \"\"\"Preprocesses the given image for evaluation.\n\n    Args:\n      image_bytes: `Tensor` representing an image binary of arbitrary size.\n      use_bfloat16: `bool` for whether to use bfloat16.\n      image_size: image size.\n      interpolation: image interpolation method\n\n    Returns:\n      A preprocessed image `Tensor`.\n    \"\"\"\n    resize_method = tf.image.ResizeMethod.BICUBIC if interpolation == 'bicubic' else tf.image.ResizeMethod.BILINEAR\n    image = _decode_and_center_crop(image_bytes, image_size, resize_method)\n    image = tf.reshape(image, [image_size, image_size, 3])\n    image = tf.image.convert_image_dtype(\n        image, dtype=tf.bfloat16 if use_bfloat16 else tf.float32)\n    return image\n\n\ndef preprocess_image(image_bytes,\n                     is_training=False,\n                     use_bfloat16=False,\n                     image_size=IMAGE_SIZE,\n                     interpolation='bicubic'):\n    \"\"\"Preprocesses the given image.\n\n    Args:\n      image_bytes: `Tensor` representing an image binary of arbitrary size.\n      is_training: `bool` for whether the preprocessing is for training.\n      use_bfloat16: `bool` for whether to use bfloat16.\n      image_size: image size.\n      interpolation: image interpolation method\n\n    Returns:\n      A preprocessed image `Tensor` with value range of [0, 255].\n    \"\"\"\n    if is_training:\n        return preprocess_for_train(image_bytes, use_bfloat16, image_size, interpolation)\n    else:\n        return preprocess_for_eval(image_bytes, use_bfloat16, image_size, interpolation)\n\n\nclass TfPreprocessTransform:\n\n    def __init__(self, is_training=False, size=224, interpolation='bicubic'):\n        self.is_training = is_training\n        self.size = size[0] if isinstance(size, tuple) else size\n        self.interpolation = interpolation\n        self._image_bytes = None\n        self.process_image = self._build_tf_graph()\n        self.sess = None\n\n    def _build_tf_graph(self):\n        with tf.device('/cpu:0'):\n            self._image_bytes = tf.placeholder(\n                shape=[],\n                dtype=tf.string,\n            )\n            img = preprocess_image(\n                self._image_bytes, self.is_training, False, self.size, self.interpolation)\n        return img\n\n    def __call__(self, image_bytes):\n        if self.sess is None:\n            self.sess = tf.Session()\n        img = self.sess.run(self.process_image, feed_dict={self._image_bytes: image_bytes})\n        img = img.round().clip(0, 255).astype(np.uint8)\n        if img.ndim < 3:\n            img = np.expand_dims(img, axis=-1)\n        img = np.rollaxis(img, 2)  # HWC to CHW\n        return img\n"
  },
  {
    "path": "timm/data/transforms.py",
    "content": "import math\nimport numbers\nimport random\nimport warnings\nfrom typing import List, Sequence, Tuple, Union\n\nimport torch\nimport torchvision.transforms as transforms\nimport torchvision.transforms.functional as F\ntry:\n    from torchvision.transforms.functional import InterpolationMode\n    has_interpolation_mode = True\nexcept ImportError:\n    has_interpolation_mode = False\nfrom PIL import Image\nimport numpy as np\n\n__all__ = [\n    \"ToNumpy\", \"ToTensor\", \"str_to_interp_mode\", \"str_to_pil_interp\", \"interp_mode_to_str\",\n    \"RandomResizedCropAndInterpolation\", \"CenterCropOrPad\", \"center_crop_or_pad\", \"crop_or_pad\",\n    \"RandomCropOrPad\", \"RandomPad\", \"ResizeKeepRatio\", \"TrimBorder\", \"MaybeToTensor\", \"MaybePILToTensor\"\n]\n\n\nclass ToNumpy:\n\n    def __call__(self, pil_img):\n        np_img = np.array(pil_img, dtype=np.uint8)\n        if np_img.ndim < 3:\n            np_img = np.expand_dims(np_img, axis=-1)\n        np_img = np.rollaxis(np_img, 2)  # HWC to CHW\n        return np_img\n\n\nclass ToTensor:\n    \"\"\" ToTensor with no rescaling of values\"\"\"\n    def __init__(self, dtype=torch.float32):\n        self.dtype = dtype\n\n    def __call__(self, pil_img):\n        return F.pil_to_tensor(pil_img).to(dtype=self.dtype)\n\n\nclass MaybeToTensor(transforms.ToTensor):\n    \"\"\"Convert a PIL Image or ndarray to tensor if it's not already one.\n    \"\"\"\n\n    def __init__(self) -> None:\n        super().__init__()\n\n    def __call__(self, pic) -> torch.Tensor:\n        \"\"\"\n        Args:\n            pic (PIL Image or numpy.ndarray): Image to be converted to tensor.\n\n        Returns:\n            Tensor: Converted image.\n        \"\"\"\n        if isinstance(pic, torch.Tensor):\n            return pic\n        return F.to_tensor(pic)\n\n    def __repr__(self) -> str:\n        return f\"{self.__class__.__name__}()\"\n\n\nclass MaybePILToTensor:\n    \"\"\"Convert a PIL Image to a tensor of the same type - this does not scale values.\n    \"\"\"\n\n    def __init__(self) -> None:\n        super().__init__()\n\n    def __call__(self, pic):\n        \"\"\"\n        Note: A deep copy of the underlying array is performed.\n\n        Args:\n            pic (PIL Image): Image to be converted to tensor.\n\n        Returns:\n            Tensor: Converted image.\n        \"\"\"\n        if isinstance(pic, torch.Tensor):\n            return pic\n        return F.pil_to_tensor(pic)\n\n    def __repr__(self) -> str:\n        return f\"{self.__class__.__name__}()\"\n\n\n# Pillow is deprecating the top-level resampling attributes (e.g., Image.BILINEAR) in\n# favor of the Image.Resampling enum. The top-level resampling attributes will be\n# removed in Pillow 10.\nif hasattr(Image, \"Resampling\"):\n    _pil_interpolation_to_str = {\n        Image.Resampling.NEAREST: 'nearest',\n        Image.Resampling.BILINEAR: 'bilinear',\n        Image.Resampling.BICUBIC: 'bicubic',\n        Image.Resampling.BOX: 'box',\n        Image.Resampling.HAMMING: 'hamming',\n        Image.Resampling.LANCZOS: 'lanczos',\n    }\nelse:\n    _pil_interpolation_to_str = {\n        Image.NEAREST: 'nearest',\n        Image.BILINEAR: 'bilinear',\n        Image.BICUBIC: 'bicubic',\n        Image.BOX: 'box',\n        Image.HAMMING: 'hamming',\n        Image.LANCZOS: 'lanczos',\n    }\n\n_str_to_pil_interpolation = {b: a for a, b in _pil_interpolation_to_str.items()}\n\n\nif has_interpolation_mode:\n    _torch_interpolation_to_str = {\n        InterpolationMode.NEAREST: 'nearest',\n        InterpolationMode.BILINEAR: 'bilinear',\n        InterpolationMode.BICUBIC: 'bicubic',\n        InterpolationMode.BOX: 'box',\n        InterpolationMode.HAMMING: 'hamming',\n        InterpolationMode.LANCZOS: 'lanczos',\n    }\n    _str_to_torch_interpolation = {b: a for a, b in _torch_interpolation_to_str.items()}\nelse:\n    _pil_interpolation_to_torch = {}\n    _torch_interpolation_to_str = {}\n\n\ndef str_to_pil_interp(mode_str):\n    return _str_to_pil_interpolation[mode_str]\n\n\ndef str_to_interp_mode(mode_str):\n    if has_interpolation_mode:\n        return _str_to_torch_interpolation[mode_str]\n    else:\n        return _str_to_pil_interpolation[mode_str]\n\n\ndef interp_mode_to_str(mode):\n    if has_interpolation_mode:\n        return _torch_interpolation_to_str[mode]\n    else:\n        return _pil_interpolation_to_str[mode]\n\n\n_RANDOM_INTERPOLATION = (str_to_interp_mode('bilinear'), str_to_interp_mode('bicubic'))\n\n\ndef _setup_size(size, error_msg=\"Please provide only two dimensions (h, w) for size.\"):\n    if isinstance(size, numbers.Number):\n        return int(size), int(size)\n\n    if isinstance(size, Sequence) and len(size) == 1:\n        return size[0], size[0]\n\n    if len(size) != 2:\n        raise ValueError(error_msg)\n\n    return size\n\n\nclass RandomResizedCropAndInterpolation:\n    \"\"\"Crop the given PIL Image to random size and aspect ratio with random interpolation.\n\n    A crop of random size (default: of 0.08 to 1.0) of the original size and a random\n    aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio is made. This crop\n    is finally resized to given size.\n    This is popularly used to train the Inception networks.\n\n    Args:\n        size: expected output size of each edge\n        scale: range of size of the origin size cropped\n        ratio: range of aspect ratio of the origin aspect ratio cropped\n        interpolation: Default: PIL.Image.BILINEAR\n    \"\"\"\n\n    def __init__(\n            self,\n            size,\n            scale=(0.08, 1.0),\n            ratio=(3. / 4., 4. / 3.),\n            interpolation='bilinear',\n    ):\n        if isinstance(size, (list, tuple)):\n            self.size = tuple(size)\n        else:\n            self.size = (size, size)\n        if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):\n            warnings.warn(\"range should be of kind (min, max)\")\n\n        if interpolation == 'random':\n            self.interpolation = _RANDOM_INTERPOLATION\n        else:\n            self.interpolation = str_to_interp_mode(interpolation)\n        self.scale = scale\n        self.ratio = ratio\n\n    @staticmethod\n    def get_params(img, scale, ratio):\n        \"\"\"Get parameters for ``crop`` for a random sized crop.\n\n        Args:\n            img (PIL Image): Image to be cropped.\n            scale (tuple): range of size of the origin size cropped\n            ratio (tuple): range of aspect ratio of the origin aspect ratio cropped\n\n        Returns:\n            tuple: params (i, j, h, w) to be passed to ``crop`` for a random\n                sized crop.\n        \"\"\"\n        img_w, img_h = F.get_image_size(img)\n        area = img_w * img_h\n\n        for attempt in range(10):\n            target_area = random.uniform(*scale) * area\n            log_ratio = (math.log(ratio[0]), math.log(ratio[1]))\n            aspect_ratio = math.exp(random.uniform(*log_ratio))\n\n            target_w = int(round(math.sqrt(target_area * aspect_ratio)))\n            target_h = int(round(math.sqrt(target_area / aspect_ratio)))\n            if target_w <= img_w and target_h <= img_h:\n                i = random.randint(0, img_h - target_h)\n                j = random.randint(0, img_w - target_w)\n                return i, j, target_h, target_w\n\n        # Fallback to central crop\n        in_ratio = img_w / img_h\n        if in_ratio < min(ratio):\n            target_w = img_w\n            target_h = int(round(target_w / min(ratio)))\n        elif in_ratio > max(ratio):\n            target_h = img_h\n            target_w = int(round(target_h * max(ratio)))\n        else:  # whole image\n            target_w = img_w\n            target_h = img_h\n        i = (img_h - target_h) // 2\n        j = (img_w - target_w) // 2\n        return i, j, target_h, target_w\n\n    def __call__(self, img):\n        \"\"\"\n        Args:\n            img (PIL Image): Image to be cropped and resized.\n\n        Returns:\n            PIL Image: Randomly cropped and resized image.\n        \"\"\"\n        i, j, h, w = self.get_params(img, self.scale, self.ratio)\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolation = random.choice(self.interpolation)\n        else:\n            interpolation = self.interpolation\n        return F.resized_crop(img, i, j, h, w, self.size, interpolation)\n\n    def __repr__(self):\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolate_str = ' '.join([interp_mode_to_str(x) for x in self.interpolation])\n        else:\n            interpolate_str = interp_mode_to_str(self.interpolation)\n        format_string = self.__class__.__name__ + '(size={0}'.format(self.size)\n        format_string += ', scale={0}'.format(tuple(round(s, 4) for s in self.scale))\n        format_string += ', ratio={0}'.format(tuple(round(r, 4) for r in self.ratio))\n        format_string += ', interpolation={0})'.format(interpolate_str)\n        return format_string\n\n\ndef center_crop_or_pad(\n        img: torch.Tensor,\n        output_size: Union[int, List[int]],\n        fill: Union[int, Tuple[int, int, int]] = 0,\n        padding_mode: str = 'constant',\n) -> torch.Tensor:\n    \"\"\"Center crops and/or pads the given image.\n\n    If the image is torch Tensor, it is expected\n    to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions.\n    If image size is smaller than output size along any edge, image is padded with 0 and then center cropped.\n\n    Args:\n        img (PIL Image or Tensor): Image to be cropped.\n        output_size (sequence or int): (height, width) of the crop box. If int or sequence with single int,\n            it is used for both directions.\n        fill (int, Tuple[int]): Padding color\n\n    Returns:\n        PIL Image or Tensor: Cropped image.\n    \"\"\"\n    output_size = _setup_size(output_size)\n    crop_height, crop_width = output_size\n    _, image_height, image_width = F.get_dimensions(img)\n\n    if crop_width > image_width or crop_height > image_height:\n        padding_ltrb = [\n            (crop_width - image_width) // 2 if crop_width > image_width else 0,\n            (crop_height - image_height) // 2 if crop_height > image_height else 0,\n            (crop_width - image_width + 1) // 2 if crop_width > image_width else 0,\n            (crop_height - image_height + 1) // 2 if crop_height > image_height else 0,\n        ]\n        img = F.pad(img, padding_ltrb, fill=fill, padding_mode=padding_mode)\n        _, image_height, image_width = F.get_dimensions(img)\n        if crop_width == image_width and crop_height == image_height:\n            return img\n\n    crop_top = int(round((image_height - crop_height) / 2.0))\n    crop_left = int(round((image_width - crop_width) / 2.0))\n    return F.crop(img, crop_top, crop_left, crop_height, crop_width)\n\n\nclass CenterCropOrPad(torch.nn.Module):\n    \"\"\"Crops the given image at the center.\n    If the image is torch Tensor, it is expected\n    to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions.\n    If image size is smaller than output size along any edge, image is padded with 0 and then center cropped.\n\n    Args:\n        size (sequence or int): Desired output size of the crop. If size is an\n            int instead of sequence like (h, w), a square crop (size, size) is\n            made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).\n    \"\"\"\n\n    def __init__(\n            self,\n            size: Union[int, List[int]],\n            fill: Union[int, Tuple[int, int, int]] = 0,\n            padding_mode: str = 'constant',\n    ):\n        super().__init__()\n        self.size = _setup_size(size)\n        self.fill = fill\n        self.padding_mode = padding_mode\n\n    def forward(self, img):\n        \"\"\"\n        Args:\n            img (PIL Image or Tensor): Image to be cropped.\n\n        Returns:\n            PIL Image or Tensor: Cropped image.\n        \"\"\"\n        return center_crop_or_pad(img, self.size, fill=self.fill, padding_mode=self.padding_mode)\n\n    def __repr__(self) -> str:\n        return f\"{self.__class__.__name__}(size={self.size})\"\n\n\ndef crop_or_pad(\n        img: torch.Tensor,\n        top: int,\n        left: int,\n        height: int,\n        width: int,\n        fill: Union[int, Tuple[int, int, int]] = 0,\n        padding_mode: str = 'constant',\n) -> torch.Tensor:\n    \"\"\" Crops and/or pads image to meet target size, with control over fill and padding_mode.\n    \"\"\"\n    _, image_height, image_width = F.get_dimensions(img)\n    right = left + width\n    bottom = top + height\n    if left < 0 or top < 0 or right > image_width or bottom > image_height:\n        padding_ltrb = [\n            max(-left + min(0, right), 0),\n            max(-top + min(0, bottom), 0),\n            max(right - max(image_width, left), 0),\n            max(bottom - max(image_height, top), 0),\n        ]\n        img = F.pad(img, padding_ltrb, fill=fill, padding_mode=padding_mode)\n\n    top = max(top, 0)\n    left = max(left, 0)\n    return F.crop(img, top, left, height, width)\n\n\nclass RandomCropOrPad(torch.nn.Module):\n    \"\"\" Crop and/or pad image with random placement within the crop or pad margin.\n    \"\"\"\n\n    def __init__(\n            self,\n            size: Union[int, List[int]],\n            fill: Union[int, Tuple[int, int, int]] = 0,\n            padding_mode: str = 'constant',\n    ):\n        super().__init__()\n        self.size = _setup_size(size)\n        self.fill = fill\n        self.padding_mode = padding_mode\n\n    @staticmethod\n    def get_params(img, size):\n        _, image_height, image_width = F.get_dimensions(img)\n        delta_height = image_height - size[0]\n        delta_width = image_width - size[1]\n        top = int(math.copysign(random.randint(0, abs(delta_height)), delta_height))\n        left = int(math.copysign(random.randint(0, abs(delta_width)), delta_width))\n        return top, left\n\n    def forward(self, img):\n        \"\"\"\n        Args:\n            img (PIL Image or Tensor): Image to be cropped.\n\n        Returns:\n            PIL Image or Tensor: Cropped image.\n        \"\"\"\n        top, left = self.get_params(img, self.size)\n        return crop_or_pad(\n            img,\n            top=top,\n            left=left,\n            height=self.size[0],\n            width=self.size[1],\n            fill=self.fill,\n            padding_mode=self.padding_mode,\n        )\n\n    def __repr__(self) -> str:\n        return f\"{self.__class__.__name__}(size={self.size})\"\n\n\nclass RandomPad:\n    def __init__(self, input_size, fill=0):\n        self.input_size = input_size\n        self.fill = fill\n\n    @staticmethod\n    def get_params(img, input_size):\n        width, height = F.get_image_size(img)\n        delta_width = max(input_size[1] - width, 0)\n        delta_height = max(input_size[0] - height, 0)\n        pad_left = random.randint(0, delta_width)\n        pad_top = random.randint(0, delta_height)\n        pad_right = delta_width - pad_left\n        pad_bottom = delta_height - pad_top\n        return pad_left, pad_top, pad_right, pad_bottom\n\n    def __call__(self, img):\n        padding = self.get_params(img, self.input_size)\n        img = F.pad(img, padding, self.fill)\n        return img\n\n\nclass ResizeKeepRatio:\n    \"\"\" Resize and Keep Aspect Ratio\n    \"\"\"\n\n    def __init__(\n            self,\n            size,\n            longest=0.,\n            interpolation='bilinear',\n            random_scale_prob=0.,\n            random_scale_range=(0.85, 1.05),\n            random_scale_area=False,\n            random_aspect_prob=0.,\n            random_aspect_range=(0.9, 1.11),\n    ):\n        \"\"\"\n\n        Args:\n            size:\n            longest:\n            interpolation:\n            random_scale_prob:\n            random_scale_range:\n            random_scale_area:\n            random_aspect_prob:\n            random_aspect_range:\n        \"\"\"\n        if isinstance(size, (list, tuple)):\n            self.size = tuple(size)\n        else:\n            self.size = (size, size)\n        if interpolation == 'random':\n            self.interpolation = _RANDOM_INTERPOLATION\n        else:\n            self.interpolation = str_to_interp_mode(interpolation)\n        self.longest = float(longest)\n        self.random_scale_prob = random_scale_prob\n        self.random_scale_range = random_scale_range\n        self.random_scale_area = random_scale_area\n        self.random_aspect_prob = random_aspect_prob\n        self.random_aspect_range = random_aspect_range\n\n    @staticmethod\n    def get_params(\n            img,\n            target_size,\n            longest,\n            random_scale_prob=0.,\n            random_scale_range=(1.0, 1.33),\n            random_scale_area=False,\n            random_aspect_prob=0.,\n            random_aspect_range=(0.9, 1.11)\n    ):\n        \"\"\"Get parameters\n        \"\"\"\n        img_h, img_w = img_size = F.get_dimensions(img)[1:]\n        target_h, target_w = target_size\n        ratio_h = img_h / target_h\n        ratio_w = img_w / target_w\n        ratio = max(ratio_h, ratio_w) * longest + min(ratio_h, ratio_w) * (1. - longest)\n\n        if random_scale_prob > 0 and random.random() < random_scale_prob:\n            ratio_factor = random.uniform(random_scale_range[0], random_scale_range[1])\n            if random_scale_area:\n                # make ratio factor equivalent to RRC area crop where < 1.0 = area zoom,\n                # otherwise like affine scale where < 1.0 = linear zoom out\n                ratio_factor = 1. / math.sqrt(ratio_factor)\n            ratio_factor = (ratio_factor, ratio_factor)\n        else:\n            ratio_factor = (1., 1.)\n\n        if random_aspect_prob > 0 and random.random() < random_aspect_prob:\n            log_aspect = (math.log(random_aspect_range[0]), math.log(random_aspect_range[1]))\n            aspect_factor = math.exp(random.uniform(*log_aspect))\n            aspect_factor = math.sqrt(aspect_factor)\n            # currently applying random aspect adjustment equally to both dims,\n            # could change to keep output sizes above their target where possible\n            ratio_factor = (ratio_factor[0] / aspect_factor, ratio_factor[1] * aspect_factor)\n\n        size = [round(x * f / ratio) for x, f in zip(img_size, ratio_factor)]\n        return size\n\n    def __call__(self, img):\n        \"\"\"\n        Args:\n            img (PIL Image): Image to be cropped and resized.\n\n        Returns:\n            PIL Image: Resized, padded to at least target size, possibly cropped to exactly target size\n        \"\"\"\n        size = self.get_params(\n            img, self.size, self.longest,\n            self.random_scale_prob, self.random_scale_range, self.random_scale_area,\n            self.random_aspect_prob, self.random_aspect_range\n        )\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolation = random.choice(self.interpolation)\n        else:\n            interpolation = self.interpolation\n        img = F.resize(img, size, interpolation)\n        return img\n\n    def __repr__(self):\n        if isinstance(self.interpolation, (tuple, list)):\n            interpolate_str = ' '.join([interp_mode_to_str(x) for x in self.interpolation])\n        else:\n            interpolate_str = interp_mode_to_str(self.interpolation)\n        format_string = self.__class__.__name__ + '(size={0}'.format(self.size)\n        format_string += f', interpolation={interpolate_str}'\n        format_string += f', longest={self.longest:.3f}'\n        format_string += f', random_scale_prob={self.random_scale_prob:.3f}'\n        format_string += f', random_scale_range=(' \\\n                         f'{self.random_scale_range[0]:.3f}, {self.random_scale_range[1]:.3f})'\n        format_string += f', random_aspect_prob={self.random_aspect_prob:.3f}'\n        format_string += f', random_aspect_range=(' \\\n                         f'{self.random_aspect_range[0]:.3f}, {self.random_aspect_range[1]:.3f}))'\n        return format_string\n\n\nclass TrimBorder(torch.nn.Module):\n\n    def __init__(\n            self,\n            border_size: int,\n    ):\n        super().__init__()\n        self.border_size = border_size\n\n    def forward(self, img):\n        w, h = F.get_image_size(img)\n        top = left = self.border_size\n        top = min(top, h)\n        left = min(left, h)\n        height = max(0, h - 2 * self.border_size)\n        width = max(0, w - 2 * self.border_size)\n        return F.crop(img, top, left, height, width)\n"
  },
  {
    "path": "timm/data/transforms_factory.py",
    "content": "\"\"\" Transforms Factory\nFactory methods for building image transforms for use with TIMM (PyTorch Image Models)\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nimport math\nfrom typing import Optional, Tuple, Union\n\nimport torch\nfrom torchvision import transforms\n\nfrom timm.data.constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, DEFAULT_CROP_PCT\nfrom timm.data.auto_augment import rand_augment_transform, augment_and_mix_transform, auto_augment_transform\nfrom timm.data.transforms import str_to_interp_mode, str_to_pil_interp, RandomResizedCropAndInterpolation, \\\n    ResizeKeepRatio, CenterCropOrPad, RandomCropOrPad, TrimBorder, MaybeToTensor, MaybePILToTensor\nfrom timm.data.naflex_transforms import RandomResizedCropToSequence, ResizeToSequence, Patchify\nfrom timm.data.random_erasing import RandomErasing\n\n\ndef transforms_noaug_train(\n        img_size: Union[int, Tuple[int, int]] = 224,\n        interpolation: str = 'bilinear',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        use_prefetcher: bool = False,\n        normalize: bool = True,\n):\n    \"\"\" No-augmentation image transforms for training.\n\n    Args:\n        img_size: Target image size.\n        interpolation: Image interpolation mode.\n        mean: Image normalization mean.\n        std: Image normalization standard deviation.\n        use_prefetcher: Prefetcher enabled. Do not convert image to tensor or normalize.\n        normalize: Normalization tensor output w/ provided mean/std (if prefetcher not used).\n\n    Returns:\n\n    \"\"\"\n    if interpolation == 'random':\n        # random interpolation not supported with no-aug\n        interpolation = 'bilinear'\n    tfl = [\n        transforms.Resize(img_size, interpolation=str_to_interp_mode(interpolation)),\n        transforms.CenterCrop(img_size)\n    ]\n    if use_prefetcher:\n        # prefetcher and collate will handle tensor conversion and norm\n        tfl += [MaybePILToTensor()]\n    elif not normalize:\n        # when normalize disabled, converted to tensor without scaling, keep original dtype\n        tfl += [MaybePILToTensor()]\n    else:\n        tfl += [\n            MaybeToTensor(),\n            transforms.Normalize(\n                mean=torch.tensor(mean),\n                std=torch.tensor(std)\n            )\n        ]\n    return transforms.Compose(tfl)\n\n\ndef transforms_imagenet_train(\n        img_size: Union[int, Tuple[int, int]] = 224,\n        scale: Optional[Tuple[float, float]] = None,\n        ratio: Optional[Tuple[float, float]] = None,\n        train_crop_mode: Optional[str] = None,\n        hflip: float = 0.5,\n        vflip: float = 0.,\n        color_jitter: Union[float, Tuple[float, ...]] = 0.4,\n        color_jitter_prob: Optional[float] = None,\n        force_color_jitter: bool = False,\n        grayscale_prob: float = 0.,\n        gaussian_blur_prob: float = 0.,\n        auto_augment: Optional[str] = None,\n        interpolation: str = 'random',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        re_prob: float = 0.,\n        re_mode: str = 'const',\n        re_count: int = 1,\n        re_num_splits: int = 0,\n        use_prefetcher: bool = False,\n        normalize: bool = True,\n        separate: bool = False,\n        naflex: bool = False,\n        patch_size: Union[int, Tuple[int, int]] = 16,\n        max_seq_len: int = 576,  # 24x24 for 16x16 patch\n        patchify: bool = False,\n):\n    \"\"\" ImageNet-oriented image transforms for training.\n\n    Args:\n        img_size: Target image size.\n        train_crop_mode: Training random crop mode ('rrc', 'rkrc', 'rkrr').\n        scale: Random resize scale range (crop area, < 1.0 => zoom in).\n        ratio: Random aspect ratio range (crop ratio for RRC, ratio adjustment factor for RKR).\n        hflip: Horizontal flip probability.\n        vflip: Vertical flip probability.\n        color_jitter: Random color jitter component factors (brightness, contrast, saturation, hue).\n            Scalar is applied as (scalar,) * 3 (no hue).\n        color_jitter_prob: Apply color jitter with this probability if not None (for SimlCLR-like aug).\n        force_color_jitter: Force color jitter where it is normally disabled (ie with RandAugment on).\n        grayscale_prob: Probability of converting image to grayscale (for SimCLR-like aug).\n        gaussian_blur_prob: Probability of applying gaussian blur (for SimCLR-like aug).\n        auto_augment: Auto augment configuration string (see auto_augment.py).\n        interpolation: Image interpolation mode.\n        mean: Image normalization mean.\n        std: Image normalization standard deviation.\n        re_prob: Random erasing probability.\n        re_mode: Random erasing fill mode.\n        re_count: Number of random erasing regions.\n        re_num_splits: Control split of random erasing across batch size.\n        use_prefetcher: Prefetcher enabled. Do not convert image to tensor or normalize.\n        normalize: Normalize tensor output w/ provided mean/std (if prefetcher not used).\n        separate: Output transforms in 3-stage tuple.\n        naflex: Enable NaFlex mode, sequence constrained patch output\n        patch_size: Patch size for NaFlex mode.\n        max_seq_len: Max sequence length for NaFlex mode.\n\n    Returns:\n        If separate==True, the transforms are returned as a tuple of 3 separate transforms\n          for use in a mixing dataset that passes\n            * all data through the first (primary) transform, called the 'clean' data\n            * a portion of the data through the secondary transform\n            * normalizes and converts the branches above with the third, final transform\n    \"\"\"\n    train_crop_mode = train_crop_mode or 'rrc'\n    assert train_crop_mode in {'rrc', 'rkrc', 'rkrr'}\n\n    primary_tfl = []\n    if naflex:\n        scale = tuple(scale or (0.08, 1.0))  # default imagenet scale range\n        ratio = tuple(ratio or (3. / 4., 4. / 3.))  # default imagenet ratio range\n        primary_tfl += [RandomResizedCropToSequence(\n            patch_size=patch_size,\n            max_seq_len=max_seq_len,\n            scale=scale,\n            ratio=ratio,\n            interpolation=interpolation\n        )]\n    else:\n        if train_crop_mode in ('rkrc', 'rkrr'):\n            # FIXME integration of RKR is a WIP\n            scale = tuple(scale or (0.8, 1.00))\n            ratio = tuple(ratio or (0.9, 1/.9))\n            primary_tfl += [\n                ResizeKeepRatio(\n                    img_size,\n                    interpolation=interpolation,\n                    random_scale_prob=0.5,\n                    random_scale_range=scale,\n                    random_scale_area=True,  # scale compatible with RRC\n                    random_aspect_prob=0.5,\n                    random_aspect_range=ratio,\n                ),\n                CenterCropOrPad(img_size, padding_mode='reflect')\n                if train_crop_mode == 'rkrc' else\n                RandomCropOrPad(img_size, padding_mode='reflect')\n            ]\n        else:\n            scale = tuple(scale or (0.08, 1.0))  # default imagenet scale range\n            ratio = tuple(ratio or (3. / 4., 4. / 3.))  # default imagenet ratio range\n            primary_tfl += [\n                RandomResizedCropAndInterpolation(\n                    img_size,\n                    scale=scale,\n                    ratio=ratio,\n                    interpolation=interpolation,\n                )\n            ]\n\n    if hflip > 0.:\n        primary_tfl += [transforms.RandomHorizontalFlip(p=hflip)]\n    if vflip > 0.:\n        primary_tfl += [transforms.RandomVerticalFlip(p=vflip)]\n\n    secondary_tfl = []\n    disable_color_jitter = False\n    if auto_augment:\n        assert isinstance(auto_augment, str)\n        # color jitter is typically disabled if AA/RA on,\n        # this allows override without breaking old hparm cfgs\n        disable_color_jitter = not (force_color_jitter or '3a' in auto_augment)\n        if isinstance(img_size, (tuple, list)):\n            img_size_min = min(img_size)\n        else:\n            img_size_min = img_size\n        aa_params = dict(\n            translate_const=int(img_size_min * 0.45),\n            img_mean=tuple([min(255, round(255 * x)) for x in mean]),\n        )\n        if interpolation and interpolation != 'random':\n            aa_params['interpolation'] = str_to_pil_interp(interpolation)\n        if auto_augment.startswith('rand'):\n            secondary_tfl += [rand_augment_transform(auto_augment, aa_params)]\n        elif auto_augment.startswith('augmix'):\n            aa_params['translate_pct'] = 0.3\n            secondary_tfl += [augment_and_mix_transform(auto_augment, aa_params)]\n        else:\n            secondary_tfl += [auto_augment_transform(auto_augment, aa_params)]\n\n    if color_jitter is not None and not disable_color_jitter:\n        # color jitter is enabled when not using AA or when forced\n        if isinstance(color_jitter, (list, tuple)):\n            # color jitter should be a 3-tuple/list if spec brightness/contrast/saturation\n            # or 4 if also augmenting hue\n            assert len(color_jitter) in (3, 4)\n        else:\n            # if it's a scalar, duplicate for brightness, contrast, and saturation, no hue\n            color_jitter = (float(color_jitter),) * 3\n        if color_jitter_prob is not None:\n            secondary_tfl += [\n                transforms.RandomApply([\n                        transforms.ColorJitter(*color_jitter),\n                    ],\n                    p=color_jitter_prob\n                )\n            ]\n        else:\n            secondary_tfl += [transforms.ColorJitter(*color_jitter)]\n\n    if grayscale_prob:\n        secondary_tfl += [transforms.RandomGrayscale(p=grayscale_prob)]\n\n    if gaussian_blur_prob:\n        secondary_tfl += [\n            transforms.RandomApply([\n                    transforms.GaussianBlur(kernel_size=23),  # hardcoded for now\n                ],\n                p=gaussian_blur_prob,\n            )\n        ]\n\n    final_tfl = []\n    if use_prefetcher:\n        # prefetcher and collate will handle tensor conversion and norm\n        final_tfl += [MaybePILToTensor()]\n    elif not normalize:\n        # when normalize disable, converted to tensor without scaling, keeps original dtype\n        final_tfl += [MaybePILToTensor()]\n    else:\n        final_tfl += [\n            MaybeToTensor(),\n            transforms.Normalize(\n                mean=torch.tensor(mean),\n                std=torch.tensor(std),\n            ),\n        ]\n        if re_prob > 0.:\n            final_tfl += [\n                RandomErasing(\n                    re_prob,\n                    mode=re_mode,\n                    max_count=re_count,\n                    num_splits=re_num_splits,\n                    device='cpu',\n                )\n            ]\n\n    if patchify:\n        final_tfl += [Patchify(patch_size=patch_size)]\n\n    if separate:\n        return transforms.Compose(primary_tfl), transforms.Compose(secondary_tfl), transforms.Compose(final_tfl)\n    else:\n        return transforms.Compose(primary_tfl + secondary_tfl + final_tfl)\n\n\ndef transforms_imagenet_eval(\n        img_size: Union[int, Tuple[int, int]] = 224,\n        crop_pct: Optional[float] = None,\n        crop_mode: Optional[str] = None,\n        crop_border_pixels: Optional[int] = None,\n        interpolation: str = 'bilinear',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        use_prefetcher: bool = False,\n        normalize: bool = True,\n        naflex: bool = False,\n        patch_size: Union[int, Tuple[int, int]] = 16,\n        max_seq_len: int = 576,  # 24x24 for 16x16 patch\n        patchify: bool = False,\n):\n    \"\"\" ImageNet-oriented image transform for evaluation and inference.\n\n    Args:\n        img_size: Target image size.\n        crop_pct: Crop percentage. Defaults to 0.875 when None.\n        crop_mode: Crop mode. One of ['squash', 'border', 'center']. Defaults to 'center' when None.\n        crop_border_pixels: Trim a border of specified # pixels around edge of original image.\n        interpolation: Image interpolation mode.\n        mean: Image normalization mean.\n        std: Image normalization standard deviation.\n        use_prefetcher: Prefetcher enabled. Do not convert image to tensor or normalize.\n        normalize: Normalize tensor output w/ provided mean/std (if prefetcher not used).\n        naflex: Enable NaFlex mode, sequence constrained patch output\n        patch_size: Patch size for NaFlex mode.\n        max_seq_len: Max sequence length for NaFlex mode.\n        patchify: Patchify the output instead of relying on prefetcher\n\n    Returns:\n        Composed transform pipeline\n    \"\"\"\n    crop_pct = crop_pct or DEFAULT_CROP_PCT\n\n    if isinstance(img_size, (tuple, list)):\n        assert len(img_size) == 2\n        scale_size = tuple([math.floor(x / crop_pct) for x in img_size])\n    else:\n        scale_size = math.floor(img_size / crop_pct)\n        scale_size = (scale_size, scale_size)\n\n    tfl = []\n\n    if crop_border_pixels:\n        tfl += [TrimBorder(crop_border_pixels)]\n\n    if naflex:\n        tfl += [ResizeToSequence(\n            patch_size=patch_size,\n            max_seq_len=max_seq_len,\n            interpolation=interpolation,\n        )]\n    else:\n        if crop_mode == 'squash':\n            # squash mode scales each edge to 1/pct of target, then crops\n            # aspect ratio is not preserved, no img lost if crop_pct == 1.0\n            tfl += [\n                transforms.Resize(scale_size, interpolation=str_to_interp_mode(interpolation)),\n                transforms.CenterCrop(img_size),\n            ]\n        elif crop_mode == 'border':\n            # scale the longest edge of image to 1/pct of target edge, add borders to pad, then crop\n            # no image lost if crop_pct == 1.0\n            fill = [round(255 * v) for v in mean]\n            tfl += [\n                ResizeKeepRatio(scale_size, interpolation=interpolation, longest=1.0),\n                CenterCropOrPad(img_size, fill=fill),\n            ]\n        else:\n            # default crop model is center\n            # aspect ratio is preserved, crops center within image, no borders are added, image is lost\n            if scale_size[0] == scale_size[1]:\n                # simple case, use torchvision built-in Resize w/ shortest edge mode (scalar size arg)\n                tfl += [\n                    transforms.Resize(scale_size[0], interpolation=str_to_interp_mode(interpolation))\n                ]\n            else:\n                # resize the shortest edge to matching target dim for non-square target\n                tfl += [ResizeKeepRatio(scale_size)]\n            tfl += [transforms.CenterCrop(img_size)]\n\n    if use_prefetcher:\n        # prefetcher and collate will handle tensor conversion and norm\n        tfl += [MaybePILToTensor()]\n    elif not normalize:\n        # when normalize disabled, converted to tensor without scaling, keeps original dtype\n        tfl += [MaybePILToTensor()]\n    else:\n        tfl += [\n            MaybeToTensor(),\n            transforms.Normalize(\n                mean=torch.tensor(mean),\n                std=torch.tensor(std),\n            ),\n        ]\n\n    if patchify:\n        tfl += [Patchify(patch_size=patch_size)]\n\n    return transforms.Compose(tfl)\n\n\ndef create_transform(\n        input_size: Union[int, Tuple[int, int], Tuple[int, int, int]] = 224,\n        is_training: bool = False,\n        no_aug: bool = False,\n        train_crop_mode: Optional[str] = None,\n        scale: Optional[Tuple[float, float]] = None,\n        ratio: Optional[Tuple[float, float]] = None,\n        hflip: float = 0.5,\n        vflip: float = 0.,\n        color_jitter: Union[float, Tuple[float, ...]] = 0.4,\n        color_jitter_prob: Optional[float] = None,\n        grayscale_prob: float = 0.,\n        gaussian_blur_prob: float = 0.,\n        auto_augment: Optional[str] = None,\n        interpolation: str = 'bilinear',\n        mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,\n        std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,\n        re_prob: float = 0.,\n        re_mode: str = 'const',\n        re_count: int = 1,\n        re_num_splits: int = 0,\n        crop_pct: Optional[float] = None,\n        crop_mode: Optional[str] = None,\n        crop_border_pixels: Optional[int] = None,\n        tf_preprocessing: bool = False,\n        use_prefetcher: bool = False,\n        normalize: bool = True,\n        separate: bool = False,\n        naflex: bool = False,\n        patch_size: Union[int, Tuple[int, int]] = 16,\n        max_seq_len: int = 576,  # 24x24 for 16x16 patch\n        patchify: bool = False\n):\n    \"\"\"\n\n    Args:\n        input_size: Target input size (channels, height, width) tuple or size scalar.\n        is_training: Return training (random) transforms.\n        no_aug: Disable augmentation for training (useful for debug).\n        train_crop_mode: Training random crop mode ('rrc', 'rkrc', 'rkrr').\n        scale: Random resize scale range (crop area, < 1.0 => zoom in).\n        ratio: Random aspect ratio range (crop ratio for RRC, ratio adjustment factor for RKR).\n        hflip: Horizontal flip probability.\n        vflip: Vertical flip probability.\n        color_jitter: Random color jitter component factors (brightness, contrast, saturation, hue).\n            Scalar is applied as (scalar,) * 3 (no hue).\n        color_jitter_prob: Apply color jitter with this probability if not None (for SimlCLR-like aug).\n        grayscale_prob: Probability of converting image to grayscale (for SimCLR-like aug).\n        gaussian_blur_prob: Probability of applying gaussian blur (for SimCLR-like aug).\n        auto_augment: Auto augment configuration string (see auto_augment.py).\n        interpolation: Image interpolation mode.\n        mean: Image normalization mean.\n        std: Image normalization standard deviation.\n        re_prob: Random erasing probability.\n        re_mode: Random erasing fill mode.\n        re_count: Number of random erasing regions.\n        re_num_splits: Control split of random erasing across batch size.\n        crop_pct: Inference crop percentage (output size / resize size).\n        crop_mode: Inference crop mode. One of ['squash', 'border', 'center']. Defaults to 'center' when None.\n        crop_border_pixels: Inference crop border of specified # pixels around edge of original image.\n        tf_preprocessing: Use TF 1.0 inference preprocessing for testing model ports\n        use_prefetcher: Pre-fetcher enabled. Do not convert image to tensor or normalize.\n        normalize: Normalization tensor output w/ provided mean/std (if prefetcher not used).\n        separate: Output transforms in 3-stage tuple.\n\n    Returns:\n        Composed transforms or tuple thereof\n    \"\"\"\n    if isinstance(input_size, (tuple, list)):\n        img_size = input_size[-2:]\n    else:\n        img_size = input_size\n\n    if tf_preprocessing and use_prefetcher:\n        assert not separate, \"Separate transforms not supported for TF preprocessing\"\n        from timm.data.tf_preprocessing import TfPreprocessTransform\n        transform = TfPreprocessTransform(\n            is_training=is_training,\n            size=img_size,\n            interpolation=interpolation,\n        )\n    else:\n        if is_training and no_aug:\n            assert not separate, \"Cannot perform split augmentation with no_aug\"\n            transform = transforms_noaug_train(\n                img_size,\n                interpolation=interpolation,\n                mean=mean,\n                std=std,\n                use_prefetcher=use_prefetcher,\n                normalize=normalize,\n            )\n        elif is_training:\n            transform = transforms_imagenet_train(\n                img_size,\n                train_crop_mode=train_crop_mode,\n                scale=scale,\n                ratio=ratio,\n                hflip=hflip,\n                vflip=vflip,\n                color_jitter=color_jitter,\n                color_jitter_prob=color_jitter_prob,\n                grayscale_prob=grayscale_prob,\n                gaussian_blur_prob=gaussian_blur_prob,\n                auto_augment=auto_augment,\n                interpolation=interpolation,\n                mean=mean,\n                std=std,\n                re_prob=re_prob,\n                re_mode=re_mode,\n                re_count=re_count,\n                re_num_splits=re_num_splits,\n                use_prefetcher=use_prefetcher,\n                normalize=normalize,\n                separate=separate,\n                naflex=naflex,\n                patch_size=patch_size,\n                max_seq_len=max_seq_len,\n                patchify=patchify,\n            )\n        else:\n            assert not separate, \"Separate transforms not supported for validation preprocessing\"\n            transform = transforms_imagenet_eval(\n                img_size,\n                interpolation=interpolation,\n                mean=mean,\n                std=std,\n                crop_pct=crop_pct,\n                crop_mode=crop_mode,\n                crop_border_pixels=crop_border_pixels,\n                use_prefetcher=use_prefetcher,\n                normalize=normalize,\n                naflex=naflex,\n                patch_size=patch_size,\n                max_seq_len=max_seq_len,\n                patchify=patchify,\n            )\n\n    return transform\n"
  },
  {
    "path": "timm/layers/__init__.py",
    "content": "from ._fx import (\n    create_feature_extractor,\n    get_graph_node_names,\n    register_notrace_function,\n    register_notrace_module,\n    is_notrace_module,\n    is_notrace_function,\n    get_notrace_modules,\n    get_notrace_functions,\n)\nfrom .activations import *\nfrom .adaptive_avgmax_pool import (\n    adaptive_avgmax_pool2d,\n    select_adaptive_pool2d,\n    AdaptiveAvgMaxPool2d,\n    SelectAdaptivePool2d,\n)\nfrom .attention import Attention, AttentionRope, maybe_add_mask, resolve_self_attn_mask\nfrom .attention2d import MultiQueryAttention2d, Attention2d, MultiQueryAttentionV2\nfrom .attention_pool import AttentionPoolLatent, AttentionPoolPrr\nfrom .attention_pool2d import AttentionPool2d, RotAttentionPool2d\nfrom .blur_pool import BlurPool2d, create_aa\nfrom .classifier import create_classifier, ClassifierHead, NormMlpClassifierHead, ClNormMlpClassifierHead\nfrom .cond_conv2d import CondConv2d, get_condconv_initializer\nfrom .config import (\n    is_exportable,\n    is_scriptable,\n    is_no_jit,\n    use_fused_attn,\n    set_exportable,\n    set_scriptable,\n    set_no_jit,\n    set_layer_config,\n    set_fused_attn,\n    set_reentrant_ckpt,\n    use_reentrant_ckpt,\n)\nfrom .conv2d_same import Conv2dSame, conv2d_same\nfrom .conv_bn_act import ConvNormAct, ConvNormActAa, ConvBnAct\nfrom .create_act import create_act_layer, get_act_layer, get_act_fn\nfrom .create_attn import get_attn, create_attn\nfrom .create_conv2d import create_conv2d\nfrom .create_norm import get_norm_layer, create_norm_layer\nfrom .create_norm_act import get_norm_act_layer, create_norm_act_layer, get_norm_act_layer\nfrom .diff_attention import DiffAttention\nfrom .drop import DropBlock2d, DropPath, drop_block_2d, drop_path, calculate_drop_path_rates\nfrom .eca import EcaModule, CecaModule, EfficientChannelAttn, CircularEfficientChannelAttn\nfrom .evo_norm import (\n    EvoNorm2dB0,\n    EvoNorm2dB1,\n    EvoNorm2dB2,\n    EvoNorm2dS0,\n    EvoNorm2dS0a,\n    EvoNorm2dS1,\n    EvoNorm2dS1a,\n    EvoNorm2dS2,\n    EvoNorm2dS2a,\n)\nfrom .fast_norm import is_fast_norm, set_fast_norm, fast_group_norm, fast_layer_norm\nfrom .filter_response_norm import FilterResponseNormTlu2d, FilterResponseNormAct2d\nfrom .format import Format, get_channel_dim, get_spatial_dim, nchw_to, nhwc_to\nfrom .gather_excite import GatherExcite\nfrom .global_context import GlobalContext\nfrom .grid import ndgrid, meshgrid\nfrom .helpers import to_ntuple, to_2tuple, to_3tuple, to_4tuple, make_divisible, extend_tuple\nfrom .hybrid_embed import HybridEmbed, HybridEmbedWithSize\nfrom .inplace_abn import InplaceAbn\nfrom .layer_scale import LayerScale, LayerScale2d\nfrom .linear import Linear\nfrom .mixed_conv2d import MixedConv2d\nfrom .mlp import Mlp, GluMlp, GatedMlp, SwiGLU, SwiGLUPacked, ConvMlp, GlobalResponseNormMlp\nfrom .non_local_attn import NonLocalAttn, BatNonLocalAttn\nfrom .norm import (\n    GroupNorm,\n    GroupNorm1,\n    LayerNorm,\n    LayerNorm2d,\n    LayerNormFp32,\n    LayerNorm2dFp32,\n    RmsNorm,\n    RmsNorm2d,\n    RmsNormFp32,\n    RmsNorm2dFp32,\n    SimpleNorm,\n    SimpleNorm2d,\n    SimpleNormFp32,\n    SimpleNorm2dFp32,\n)\nfrom .norm_act import (\n    BatchNormAct2d,\n    GroupNormAct,\n    GroupNorm1Act,\n    LayerNormAct,\n    LayerNormAct2d,\n    LayerNormActFp32,\n    LayerNormAct2dFp32,\n    RmsNormAct,\n    RmsNormAct2d,\n    RmsNormActFp32,\n    RmsNormAct2dFp32,\n    SyncBatchNormAct,\n    convert_sync_batchnorm,\n    FrozenBatchNormAct2d,\n    freeze_batch_norm_2d,\n    unfreeze_batch_norm_2d,\n)\nfrom .padding import get_padding, get_same_padding, pad_same\nfrom .patch_dropout import PatchDropout, PatchDropoutWithIndices, patch_dropout_forward\nfrom .patch_embed import PatchEmbed, PatchEmbedWithSize, PatchEmbedInterpolator, resample_patch_embed\nfrom .pool1d import global_pool_nlc\nfrom .other_pool import LsePlus2d, LsePlus1d, SimPool2d, SimPool1d\nfrom .pool2d_same import AvgPool2dSame, create_pool2d\nfrom .pos_embed import resample_abs_pos_embed, resample_abs_pos_embed_nhwc\nfrom .pos_embed_rel import (\n    RelPosMlp,\n    RelPosBias,\n    RelPosBiasTf,\n    gen_relative_position_index,\n    gen_relative_log_coords,\n    resize_rel_pos_bias_table,\n    resize_rel_pos_bias_table_simple,\n    resize_rel_pos_bias_table_levit,\n)\nfrom .pos_embed_sincos import (\n    pixel_freq_bands,\n    freq_bands,\n    build_sincos2d_pos_embed,\n    build_fourier_pos_embed,\n    build_rotary_pos_embed,\n    apply_rot_embed,\n    apply_rot_embed_cat,\n    apply_rot_embed_list,\n    apply_keep_indices_nlc,\n    FourierEmbed,\n    RotaryEmbedding,\n    RotaryEmbeddingCat,\n    RotaryEmbeddingMixed,\n    RotaryEmbeddingDinoV3,\n    get_mixed_freqs,\n    create_rope_embed,\n)\nfrom .squeeze_excite import SEModule, SqueezeExcite, EffectiveSEModule, EffectiveSqueezeExcite\nfrom .selective_kernel import SelectiveKernel\nfrom .separable_conv import SeparableConv2d, SeparableConvNormAct\nfrom .space_to_depth import SpaceToDepth, DepthToSpace\nfrom .split_attn import SplitAttn\nfrom .split_batchnorm import SplitBatchNorm2d, convert_splitbn_model\nfrom .std_conv import StdConv2d, StdConv2dSame, ScaledStdConv2d, ScaledStdConv2dSame\nfrom .test_time_pool import TestTimePoolHead, apply_test_time_pool\nfrom .trace_utils import _assert, _float_to_int\nfrom .typing import LayerType, PadType, disable_compiler\nfrom .weight_init import (\n    is_meta_device,\n    trunc_normal_,\n    trunc_normal_tf_,\n    variance_scaling_,\n    lecun_normal_,\n    init_weight_jax,\n    init_weight_vit,\n)\n"
  },
  {
    "path": "timm/layers/_fx.py",
    "content": "from typing import Callable, Dict, List, Optional, Union, Tuple, Type\n\nimport torch\nfrom torch import nn\n\ntry:\n    # NOTE we wrap torchvision fns to use timm leaf / no trace definitions\n    from torchvision.models.feature_extraction import create_feature_extractor as _create_feature_extractor\n    from torchvision.models.feature_extraction import get_graph_node_names as _get_graph_node_names\n    has_fx_feature_extraction = True\nexcept ImportError:\n    has_fx_feature_extraction = False\n\n\n__all__ = [\n    'register_notrace_module',\n    'is_notrace_module',\n    'get_notrace_modules',\n    'register_notrace_function',\n    'is_notrace_function',\n    'get_notrace_functions',\n    'create_feature_extractor',\n    'get_graph_node_names',\n]\n\n# modules to treat as leafs when tracing\n_leaf_modules = set()\n\n\ndef register_notrace_module(module: Type[nn.Module]):\n    \"\"\"\n    Any module not under timm.models.layers should get this decorator if we don't want to trace through it.\n    \"\"\"\n    _leaf_modules.add(module)\n    return module\n\n\ndef is_notrace_module(module: Type[nn.Module]):\n    return module in _leaf_modules\n\n\ndef get_notrace_modules():\n    return list(_leaf_modules)\n\n\n# Functions we want to autowrap (treat them as leaves)\n_autowrap_functions = set()\n\n\ndef register_notrace_function(name_or_fn):\n    _autowrap_functions.add(name_or_fn)\n    return name_or_fn\n\n\ndef is_notrace_function(func: Callable):\n    return func in _autowrap_functions\n\n\ndef get_notrace_functions():\n    return list(_autowrap_functions)\n\n\ndef get_graph_node_names(model: nn.Module) -> Tuple[List[str], List[str]]:\n    return _get_graph_node_names(\n        model,\n        tracer_kwargs={\n            'leaf_modules': list(_leaf_modules),\n            'autowrap_functions': list(_autowrap_functions)\n        }\n    )\n\n\ndef create_feature_extractor(model: nn.Module, return_nodes: Union[Dict[str, str], List[str]]):\n    assert has_fx_feature_extraction, 'Please update to PyTorch 1.10+, torchvision 0.11+ for FX feature extraction'\n    return _create_feature_extractor(\n        model, return_nodes,\n        tracer_kwargs={\n            'leaf_modules': list(_leaf_modules),\n            'autowrap_functions': list(_autowrap_functions)\n        }\n    )"
  },
  {
    "path": "timm/layers/activations.py",
    "content": "\"\"\" Activations\n\nA collection of activations fn and modules with a common interface so that they can\neasily be swapped. All have an `inplace` arg even if not used.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\n\n\ndef swish(x, inplace: bool = False):\n    \"\"\"Swish - Described in: https://arxiv.org/abs/1710.05941\n    \"\"\"\n    return x.mul_(x.sigmoid()) if inplace else x.mul(x.sigmoid())\n\n\nclass Swish(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return swish(x, self.inplace)\n\n\ndef mish(x, inplace: bool = False):\n    \"\"\"Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681\n    NOTE: I don't have a working inplace variant\n    \"\"\"\n    return x.mul(F.softplus(x).tanh())\n\n\nclass Mish(nn.Module):\n    \"\"\"Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681\n    \"\"\"\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return mish(x)\n\n\ndef sigmoid(x, inplace: bool = False):\n    return x.sigmoid_() if inplace else x.sigmoid()\n\n\n# PyTorch has this, but not with a consistent inplace argument interface\nclass Sigmoid(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return x.sigmoid_() if self.inplace else x.sigmoid()\n\n\ndef tanh(x, inplace: bool = False):\n    return x.tanh_() if inplace else x.tanh()\n\n\n# PyTorch has this, but not with a consistent inplace argument interface\nclass Tanh(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return x.tanh_() if self.inplace else x.tanh()\n\n\ndef hard_swish(x, inplace: bool = False):\n    inner = F.relu6(x + 3.).div_(6.)\n    return x.mul_(inner) if inplace else x.mul(inner)\n\n\nclass HardSwish(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return hard_swish(x, self.inplace)\n\n\ndef hard_sigmoid(x, inplace: bool = False):\n    if inplace:\n        return x.add_(3.).clamp_(0., 6.).div_(6.)\n    else:\n        return F.relu6(x + 3.) / 6.\n\n\nclass HardSigmoid(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return hard_sigmoid(x, self.inplace)\n\n\ndef hard_mish(x, inplace: bool = False):\n    \"\"\" Hard Mish\n    Experimental, based on notes by Mish author Diganta Misra at\n      https://github.com/digantamisra98/H-Mish/blob/0da20d4bc58e696b6803f2523c58d3c8a82782d0/README.md\n    \"\"\"\n    if inplace:\n        return x.mul_(0.5 * (x + 2).clamp(min=0, max=2))\n    else:\n        return 0.5 * x * (x + 2).clamp(min=0, max=2)\n\n\nclass HardMish(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.inplace = inplace\n\n    def forward(self, x):\n        return hard_mish(x, self.inplace)\n\n\nclass PReLU(nn.PReLU):\n    \"\"\"Applies PReLU (w/ dummy inplace arg)\n    \"\"\"\n    def __init__(self, num_parameters: int = 1, init: float = 0.25, inplace: bool = False) -> None:\n        super().__init__(num_parameters=num_parameters, init=init)\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        return F.prelu(input, self.weight)\n\n\ndef gelu(x: torch.Tensor, inplace: bool = False) -> torch.Tensor:\n    return F.gelu(x)\n\n\nclass GELU(nn.Module):\n    \"\"\"Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)\n    \"\"\"\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        return F.gelu(input)\n\n\ndef gelu_tanh(x: torch.Tensor, inplace: bool = False) -> torch.Tensor:\n    return F.gelu(x, approximate='tanh')\n\n\nclass GELUTanh(nn.Module):\n    \"\"\"Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)\n    \"\"\"\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        return F.gelu(input, approximate='tanh')\n\n\ndef quick_gelu(x: torch.Tensor, inplace: bool = False) -> torch.Tensor:\n    return x * torch.sigmoid(1.702 * x)\n\n\nclass QuickGELU(nn.Module):\n    \"\"\"Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)\n    \"\"\"\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        return quick_gelu(input)\n"
  },
  {
    "path": "timm/layers/activations_me.py",
    "content": "\"\"\" Activations (memory-efficient w/ custom autograd)\n\nA collection of activations fn and modules with a common interface so that they can\neasily be swapped. All have an `inplace` arg even if not used.\n\nThese activations are not compatible with jit scripting or ONNX export of the model, please use\nbasic versions of the activations.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\n\n\ndef swish_fwd(x):\n    return x.mul(torch.sigmoid(x))\n\n\ndef swish_bwd(x, grad_output):\n    x_sigmoid = torch.sigmoid(x)\n    return grad_output * (x_sigmoid * (1 + x * (1 - x_sigmoid)))\n\n\nclass SwishAutoFn(torch.autograd.Function):\n    \"\"\" optimised Swish w/ memory-efficient checkpoint\n    Inspired by conversation btw Jeremy Howard & Adam Pazske\n    https://twitter.com/jeremyphoward/status/1188251041835315200\n    \"\"\"\n    @staticmethod\n    def symbolic(g, x):\n        return g.op(\"Mul\", x, g.op(\"Sigmoid\", x))\n\n    @staticmethod\n    def forward(ctx, x):\n        ctx.save_for_backward(x)\n        return swish_fwd(x)\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        x = ctx.saved_tensors[0]\n        return swish_bwd(x, grad_output)\n\n\ndef swish_me(x, inplace=False):\n    return SwishAutoFn.apply(x)\n\n\nclass SwishMe(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return SwishAutoFn.apply(x)\n\n\ndef mish_fwd(x):\n    return x.mul(torch.tanh(F.softplus(x)))\n\n\ndef mish_bwd(x, grad_output):\n    x_sigmoid = torch.sigmoid(x)\n    x_tanh_sp = F.softplus(x).tanh()\n    return grad_output.mul(x_tanh_sp + x * x_sigmoid * (1 - x_tanh_sp * x_tanh_sp))\n\n\nclass MishAutoFn(torch.autograd.Function):\n    \"\"\" Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681\n    A memory efficient variant of Mish\n    \"\"\"\n    @staticmethod\n    def forward(ctx, x):\n        ctx.save_for_backward(x)\n        return mish_fwd(x)\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        x = ctx.saved_tensors[0]\n        return mish_bwd(x, grad_output)\n\n\ndef mish_me(x, inplace=False):\n    return MishAutoFn.apply(x)\n\n\nclass MishMe(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return MishAutoFn.apply(x)\n\n\ndef hard_sigmoid_fwd(x, inplace: bool = False):\n    return (x + 3).clamp(min=0, max=6).div(6.)\n\n\ndef hard_sigmoid_bwd(x, grad_output):\n    m = torch.ones_like(x) * ((x >= -3.) & (x <= 3.)) / 6.\n    return grad_output * m\n\n\nclass HardSigmoidAutoFn(torch.autograd.Function):\n    @staticmethod\n    def forward(ctx, x):\n        ctx.save_for_backward(x)\n        return hard_sigmoid_fwd(x)\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        x = ctx.saved_tensors[0]\n        return hard_sigmoid_bwd(x, grad_output)\n\n\ndef hard_sigmoid_me(x, inplace: bool = False):\n    return HardSigmoidAutoFn.apply(x)\n\n\nclass HardSigmoidMe(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return HardSigmoidAutoFn.apply(x)\n\n\ndef hard_swish_fwd(x):\n    return x * (x + 3).clamp(min=0, max=6).div(6.)\n\n\ndef hard_swish_bwd(x, grad_output):\n    m = torch.ones_like(x) * (x >= 3.)\n    m = torch.where((x >= -3.) & (x <= 3.),  x / 3. + .5, m)\n    return grad_output * m\n\n\nclass HardSwishAutoFn(torch.autograd.Function):\n    \"\"\"A memory efficient HardSwish activation\"\"\"\n    @staticmethod\n    def forward(ctx, x):\n        ctx.save_for_backward(x)\n        return hard_swish_fwd(x)\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        x = ctx.saved_tensors[0]\n        return hard_swish_bwd(x, grad_output)\n\n    @staticmethod\n    def symbolic(g, self):\n        input = g.op(\"Add\", self, g.op('Constant', value_t=torch.tensor(3, dtype=torch.float)))\n        hardtanh_ = g.op(\"Clip\", input, g.op('Constant', value_t=torch.tensor(0, dtype=torch.float)), g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))\n        hardtanh_ = g.op(\"Div\", hardtanh_, g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))\n        return g.op(\"Mul\", self, hardtanh_)\n\n\ndef hard_swish_me(x, inplace=False):\n    return HardSwishAutoFn.apply(x)\n\n\nclass HardSwishMe(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return HardSwishAutoFn.apply(x)\n\n\ndef hard_mish_fwd(x):\n    return 0.5 * x * (x + 2).clamp(min=0, max=2)\n\n\ndef hard_mish_bwd(x, grad_output):\n    m = torch.ones_like(x) * (x >= -2.)\n    m = torch.where((x >= -2.) & (x <= 0.), x + 1., m)\n    return grad_output * m\n\n\nclass HardMishAutoFn(torch.autograd.Function):\n    \"\"\" A memory efficient variant of Hard Mish\n    Experimental, based on notes by Mish author Diganta Misra at\n      https://github.com/digantamisra98/H-Mish/blob/0da20d4bc58e696b6803f2523c58d3c8a82782d0/README.md\n    \"\"\"\n    @staticmethod\n    def forward(ctx, x):\n        ctx.save_for_backward(x)\n        return hard_mish_fwd(x)\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        x = ctx.saved_tensors[0]\n        return hard_mish_bwd(x, grad_output)\n\n\ndef hard_mish_me(x, inplace: bool = False):\n    return HardMishAutoFn.apply(x)\n\n\nclass HardMishMe(nn.Module):\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n\n    def forward(self, x):\n        return HardMishAutoFn.apply(x)\n\n\n\n"
  },
  {
    "path": "timm/layers/adaptive_avgmax_pool.py",
    "content": "\"\"\" PyTorch selectable adaptive pooling\nAdaptive pooling with the ability to select the type of pooling from:\n    * 'avg' - Average pooling\n    * 'max' - Max pooling\n    * 'avgmax' - Sum of average and max pooling re-scaled by 0.5\n    * 'avgmaxc' - Concatenation of average and max pooling along feature dim, doubles feature dim\n\nBoth a functional and a nn.Module version of the pooling is provided.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .format import get_spatial_dim, get_channel_dim\n\n_int_tuple_2_t = Union[int, Tuple[int, int]]\n\n\ndef adaptive_pool_feat_mult(pool_type='avg'):\n    if pool_type.endswith('catavgmax'):\n        return 2\n    else:\n        return 1\n\n\ndef adaptive_avgmax_pool2d(x, output_size: _int_tuple_2_t = 1):\n    x_avg = F.adaptive_avg_pool2d(x, output_size)\n    x_max = F.adaptive_max_pool2d(x, output_size)\n    return 0.5 * (x_avg + x_max)\n\n\ndef adaptive_catavgmax_pool2d(x, output_size: _int_tuple_2_t = 1):\n    x_avg = F.adaptive_avg_pool2d(x, output_size)\n    x_max = F.adaptive_max_pool2d(x, output_size)\n    return torch.cat((x_avg, x_max), 1)\n\n\ndef select_adaptive_pool2d(x, pool_type='avg', output_size: _int_tuple_2_t = 1):\n    \"\"\"Selectable global pooling function with dynamic input kernel size\n    \"\"\"\n    if pool_type == 'avg':\n        x = F.adaptive_avg_pool2d(x, output_size)\n    elif pool_type == 'avgmax':\n        x = adaptive_avgmax_pool2d(x, output_size)\n    elif pool_type == 'catavgmax':\n        x = adaptive_catavgmax_pool2d(x, output_size)\n    elif pool_type == 'max':\n        x = F.adaptive_max_pool2d(x, output_size)\n    else:\n        assert False, 'Invalid pool type: %s' % pool_type\n    return x\n\n\nclass FastAdaptiveAvgPool(nn.Module):\n    def __init__(self, flatten: bool = False, input_fmt: F = 'NCHW'):\n        super().__init__()\n        self.flatten = flatten\n        self.dim = get_spatial_dim(input_fmt)\n\n    def forward(self, x):\n        return x.mean(self.dim, keepdim=not self.flatten)\n\n\nclass FastAdaptiveMaxPool(nn.Module):\n    def __init__(self, flatten: bool = False, input_fmt: str = 'NCHW'):\n        super().__init__()\n        self.flatten = flatten\n        self.dim = get_spatial_dim(input_fmt)\n\n    def forward(self, x):\n        return x.amax(self.dim, keepdim=not self.flatten)\n\n\nclass FastAdaptiveAvgMaxPool(nn.Module):\n    def __init__(self, flatten: bool = False, input_fmt: str = 'NCHW'):\n        super().__init__()\n        self.flatten = flatten\n        self.dim = get_spatial_dim(input_fmt)\n\n    def forward(self, x):\n        x_avg = x.mean(self.dim, keepdim=not self.flatten)\n        x_max = x.amax(self.dim, keepdim=not self.flatten)\n        return 0.5 * x_avg + 0.5 * x_max\n\n\nclass FastAdaptiveCatAvgMaxPool(nn.Module):\n    def __init__(self, flatten: bool = False, input_fmt: str = 'NCHW'):\n        super().__init__()\n        self.flatten = flatten\n        self.dim_reduce = get_spatial_dim(input_fmt)\n        if flatten:\n            self.dim_cat = 1\n        else:\n            self.dim_cat = get_channel_dim(input_fmt)\n\n    def forward(self, x):\n        x_avg = x.mean(self.dim_reduce, keepdim=not self.flatten)\n        x_max = x.amax(self.dim_reduce, keepdim=not self.flatten)\n        return torch.cat((x_avg, x_max), self.dim_cat)\n\n\nclass AdaptiveAvgMaxPool2d(nn.Module):\n    def __init__(self, output_size: _int_tuple_2_t = 1):\n        super().__init__()\n        self.output_size = output_size\n\n    def forward(self, x):\n        return adaptive_avgmax_pool2d(x, self.output_size)\n\n\nclass AdaptiveCatAvgMaxPool2d(nn.Module):\n    def __init__(self, output_size: _int_tuple_2_t = 1):\n        super().__init__()\n        self.output_size = output_size\n\n    def forward(self, x):\n        return adaptive_catavgmax_pool2d(x, self.output_size)\n\n\nclass SelectAdaptivePool2d(nn.Module):\n    \"\"\"Selectable global pooling layer with dynamic input kernel size\n    \"\"\"\n    def __init__(\n            self,\n            output_size: _int_tuple_2_t = 1,\n            pool_type: str = 'fast',\n            flatten: bool = False,\n            input_fmt: str = 'NCHW',\n    ):\n        super().__init__()\n        assert input_fmt in ('NCHW', 'NHWC')\n        self.pool_type = pool_type or ''  # convert other falsy values to empty string for consistent TS typing\n        pool_type = pool_type.lower()\n        if not pool_type:\n            self.pool = nn.Identity()  # pass through\n            self.flatten = nn.Flatten(1) if flatten else nn.Identity()\n        elif pool_type.startswith('fast') or input_fmt != 'NCHW':\n            assert output_size == 1, 'Fast pooling and non NCHW input formats require output_size == 1.'\n            if pool_type.endswith('catavgmax'):\n                self.pool = FastAdaptiveCatAvgMaxPool(flatten, input_fmt=input_fmt)\n            elif pool_type.endswith('avgmax'):\n                self.pool = FastAdaptiveAvgMaxPool(flatten, input_fmt=input_fmt)\n            elif pool_type.endswith('max'):\n                self.pool = FastAdaptiveMaxPool(flatten, input_fmt=input_fmt)\n            elif pool_type == 'fast' or pool_type.endswith('avg'):\n                self.pool = FastAdaptiveAvgPool(flatten, input_fmt=input_fmt)\n            else:\n                assert False, 'Invalid pool type: %s' % pool_type\n            self.flatten = nn.Identity()\n        else:\n            assert input_fmt == 'NCHW'\n            if pool_type == 'avgmax':\n                self.pool = AdaptiveAvgMaxPool2d(output_size)\n            elif pool_type == 'catavgmax':\n                self.pool = AdaptiveCatAvgMaxPool2d(output_size)\n            elif pool_type == 'max':\n                self.pool = nn.AdaptiveMaxPool2d(output_size)\n            elif pool_type == 'avg':\n                self.pool = nn.AdaptiveAvgPool2d(output_size)\n            else:\n                assert False, 'Invalid pool type: %s' % pool_type\n            self.flatten = nn.Flatten(1) if flatten else nn.Identity()\n\n    def is_identity(self):\n        return not self.pool_type\n\n    def forward(self, x):\n        x = self.pool(x)\n        x = self.flatten(x)\n        return x\n\n    def feat_mult(self):\n        return adaptive_pool_feat_mult(self.pool_type)\n\n    def __repr__(self):\n        return self.__class__.__name__ + '(' \\\n               + 'pool_type=' + self.pool_type \\\n               + ', flatten=' + str(self.flatten) + ')'\n\n"
  },
  {
    "path": "timm/layers/attention.py",
    "content": "from typing import Final, Optional, Type\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\n\nfrom ._fx import register_notrace_function\nfrom .config import use_fused_attn\nfrom .pos_embed_sincos import apply_rot_embed_cat\n\n\n__all__ = ['Attention', 'AttentionRope', 'maybe_add_mask', 'resolve_self_attn_mask']\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef maybe_add_mask(scores: torch.Tensor, attn_mask: Optional[torch.Tensor] = None):\n    return scores if attn_mask is None else scores + attn_mask\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef resolve_self_attn_mask(\n        seq_len: int,\n        attn: torch.Tensor,\n        attn_mask: Optional[torch.Tensor] = None,\n        is_causal: bool = False,\n) -> Optional[torch.Tensor]:\n    # Build additive bias matching SDPA semantics for self-attention\n    # is_causal and attn_mask are mutually exclusive (is_causal takes precedence)\n    if is_causal:\n        attn_bias = attn.new_full((seq_len, seq_len), float('-inf')).triu_(1)\n    elif attn_mask is None:\n        attn_bias = None\n    elif attn_mask.dtype == torch.bool:\n        attn_bias = torch.zeros_like(attn_mask, dtype=attn.dtype)\n        attn_bias.masked_fill_(~attn_mask, float('-inf'))\n    else:\n        attn_bias = attn_mask\n    return attn_bias\n\n\nclass Attention(nn.Module):\n    \"\"\"Standard Multi-head Self Attention module with QKV projection.\n\n    This module implements the standard multi-head attention mechanism used in transformers.\n    It supports both the fused attention implementation (scaled_dot_product_attention) for\n    efficiency when available, and a manual implementation otherwise. The module includes\n    options for QK normalization, attention dropout, and projection dropout.\n    \"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            attn_head_dim: Optional[int] = None,\n            dim_out: Optional[int] = None,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_norm: bool = False,\n            proj_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize the Attention module.\n\n        Args:\n            dim: Input dimension of the token embeddings.\n            num_heads: Number of attention heads.\n            attn_head_dim: Dimension of each attention head. If None, computed as dim // num_heads.\n            dim_out: Output dimension. If None, same as dim.\n            qkv_bias: Whether to use bias in the query, key, value projections.\n            qk_norm: Whether to apply normalization to query and key vectors.\n            scale_norm: Whether to apply normalization to attention output before projection.\n            proj_bias: Whether to use bias in the output projection.\n            attn_drop: Dropout rate applied to the attention weights.\n            proj_drop: Dropout rate applied after the output projection.\n            norm_layer: Normalization layer constructor for QK normalization if enabled.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        dim_out = dim_out or dim\n        head_dim = attn_head_dim\n        if head_dim is None:\n            assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n            head_dim = dim // num_heads\n        if qk_norm or scale_norm:\n            assert norm_layer is not None, 'norm_layer must be provided if qk_norm or scale_norm is True'\n\n        self.num_heads = num_heads\n        self.head_dim = head_dim\n        self.attn_dim = num_heads * head_dim\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, self.attn_dim * 3, bias=qkv_bias, **dd)\n        self.q_norm = norm_layer(head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.norm = norm_layer(self.attn_dim, **dd) if scale_norm else nn.Identity()\n        self.proj = nn.Linear(self.attn_dim, dim_out, bias=proj_bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n                is_causal=is_causal,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, self.attn_dim)\n        x = self.norm(x)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass AttentionRope(nn.Module):\n    \"\"\" A Self Attention module with ROPE support.\n\n    Includes options for:\n     * QK normalization option\n     * Attention output (scale) normalization\n     * Fused or unfused QKV projection support\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            dim_out: Optional[int] = None,\n            qkv_bias: bool = True,\n            qkv_fused: bool = True,\n            num_prefix_tokens: int = 1,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            attn_head_dim: Optional[int] = None,\n            norm_layer: Type[nn.Module] = None,\n            qk_norm: bool = False,\n            scale_norm: bool = False,\n            proj_bias: bool = True,\n            rotate_half: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize the Attention module.\n\n        Args:\n            dim: Input dimension of the token embeddings\n            num_heads: Number of attention heads\n            dim_out: Output dimension. If None, same as dim.\n            qkv_bias: Whether to add a bias term to the query, key, and value projections\n            qkv_fused: Whether to use fused QKV projection (single linear) or separate projections\n            num_prefix_tokens: Number of reg/cls tokens at the beginning of the sequence that\n                should not have position embeddings applied\n            attn_drop: Dropout rate for attention weights\n            proj_drop: Dropout rate for the output projection\n            attn_head_dim: Dimension of each attention head. If None, computed as dim // num_heads.\n            norm_layer: Normalization layer constructor to use for QK and scale normalization\n            qk_norm: Enable normalization of query (Q) and key (K) vectors with norm_layer\n            scale_norm: Enable normalization (scaling) of attention output with norm_layer\n            proj_bias: Whether to use bias in the output projection\n            rotate_half: Use 'half' ROPE layout instead of default 'interleaved'\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        dim_out = dim_out or dim\n        head_dim = attn_head_dim\n        if head_dim is None:\n            assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n            head_dim = dim // num_heads\n        if scale_norm or qk_norm:\n            assert norm_layer is not None, 'norm_layer must be provided if qk_norm or scale_norm is True'\n\n        self.num_heads = num_heads\n        self.head_dim = head_dim\n        self.attn_dim = head_dim * num_heads\n        self.scale = head_dim ** -0.5\n        self.num_prefix_tokens = num_prefix_tokens\n        self.fused_attn = use_fused_attn()\n        self.rotate_half = rotate_half\n\n        if qkv_fused:\n            self.qkv = nn.Linear(dim, self.attn_dim * 3, bias=qkv_bias, **dd)\n            self.q_proj = self.k_proj = self.v_proj = None\n        else:\n            self.qkv = None\n            self.q_proj = nn.Linear(dim, self.attn_dim, bias=qkv_bias, **dd)\n            self.k_proj = nn.Linear(dim, self.attn_dim, bias=qkv_bias, **dd)\n            self.v_proj = nn.Linear(dim, self.attn_dim, bias=qkv_bias, **dd)\n\n        self.q_norm = norm_layer(head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.norm = norm_layer(self.attn_dim, **dd) if scale_norm else nn.Identity()\n        self.proj = nn.Linear(self.attn_dim, dim_out, bias=proj_bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(\n            self,\n            x,\n            rope: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ):\n        \"\"\"Forward pass for the attention module.\n\n        Args:\n            x: Input tensor of shape (batch_size, sequence_length, embedding_dim)\n            rope: Rotary position embeddings tensor for position-aware attention\n            attn_mask: Optional attention mask to apply during attention computation\n            is_causal: If True, use causal (autoregressive) masking\n\n        Returns:\n            Tensor of shape (batch_size, sequence_length, dim_out)\n        \"\"\"\n        B, N, C = x.shape\n\n        if self.qkv is not None:\n            qkv = self.qkv(x)\n            qkv = qkv.reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            q, k, v = qkv.unbind(0)  # B, num_heads, N, head_dim\n        else:\n            q = self.q_proj(x).reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n            k = self.k_proj(x).reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n            v = self.v_proj(x).reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if rope is not None:\n            npt = self.num_prefix_tokens\n            half = getattr(self, 'rotate_half', False)\n            q = torch.cat([q[:, :, :npt, :], apply_rot_embed_cat(q[:, :, npt:, :], rope, half=half)], dim=2).type_as(v)\n            k = torch.cat([k[:, :, :npt, :], apply_rot_embed_cat(k[:, :, npt:, :], rope, half=half)], dim=2).type_as(v)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n                is_causal=is_causal,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, self.attn_dim)\n        x = self.norm(x)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n"
  },
  {
    "path": "timm/layers/attention2d.py",
    "content": "from typing import List, Optional, Type, Union\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\n\nfrom .config import use_fused_attn\nfrom .create_conv2d import create_conv2d\nfrom .helpers import to_2tuple\nfrom .pool2d_same import create_pool2d\n\n\nclass MultiQueryAttentionV2(nn.Module):\n    \"\"\"Multi Query Attention.\n\n    Fast Transformer Decoding: One Write-Head is All You Need\n    https://arxiv.org/pdf/1911.02150.pdf\n\n    This is an acceletor optimized version - removing multiple unnecessary\n    tensor transpose by re-arranging indices according to the following rules: 1)\n    contracted indices are at the end, 2) other indices have the same order in the\n    input and output tensores.\n\n    Compared to V1, this gives 3x speed up.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            num_heads: int = 8,\n            key_dim: int = 64,\n            value_dim: int = 64,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initializer.\"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        self.num_heads = num_heads\n        self.key_dim = key_dim\n        self.value_dim = value_dim\n        self.scale = key_dim ** -0.5\n\n        self.query_proj = nn.Parameter(torch.empty((self.num_heads, self.key_dim, dim), **dd))\n        self.key_proj = nn.Parameter(torch.empty((dim, self.key_dim), **dd))\n        self.value_proj = nn.Parameter(torch.empty((dim, self.value_dim), **dd))\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.out_proj = nn.Parameter(torch.empty((dim_out, self.num_heads, self.value_dim), **dd))\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        scale = self.key_proj.shape[0] ** -0.5\n        nn.init.normal_(self.query_proj, std=scale)\n        nn.init.normal_(self.key_proj, std=scale)\n        nn.init.normal_(self.value_proj, std=scale)\n        nn.init.normal_(self.out_proj, std=self.out_proj.shape[0] ** -0.5)\n\n    def _reshape_input(self, t):\n        \"\"\"Reshapes a tensor to three dimensions, keeping the first and last.\"\"\"\n        s = t.shape\n        # Propagate the shape statically where possible.\n        #num = t.shape[1:-1].numel()\n        #return t.reshape(s[0], num, s[-1])\n        return t.reshape(s[0], s[1], -1).transpose(1, 2)\n\n    def forward(self, x, m: Optional[torch.Tensor] = None):\n        \"\"\"Run layer computation.\"\"\"\n        b, _, h, w = x.shape\n        m = m if m is not None else x\n\n        reshaped_x = self._reshape_input(x)\n        reshaped_m = self._reshape_input(m)\n\n        q = torch.einsum('bnd,hkd->bnhk', reshaped_x, self.query_proj)\n        k = torch.einsum('bmd,dk->bmk', reshaped_m, self.key_proj)\n\n        attn = torch.einsum('bnhk,bmk->bnhm', q, k) * self.scale\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        v = torch.einsum('bmd,dv->bmv', reshaped_m, self.value_proj)\n        o = torch.einsum('bnhm,bmv->bnhv', attn, v)\n        result = torch.einsum('bnhv,dhv->bdn', o, self.out_proj)\n        result = self.proj_drop(result)\n        return result.reshape(b, -1, h, w)\n\n\nclass MultiQueryAttention2d(nn.Module):\n    \"\"\"Multi Query Attention with spatial downsampling.\n\n     3 parameters are introduced for the spatial downsampling:\n     1. kv_stride: downsampling factor on Key and Values only.\n     2. query_strides: horizontal & vertical strides on Query only.\n\n    This is an optimized version.\n    1. Projections in Attention is explicit written out as 1x1 Conv2D.\n    2. Additional reshapes are introduced to bring a up to 3x speed up.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            num_heads: int = 8,\n            key_dim: Optional[int] = None,\n            value_dim: Optional[int] = None,\n            query_strides: int = 1,\n            kv_stride: int = 1,\n            dw_kernel_size: int = 3,\n            dilation: int = 1,\n            padding: Union[str, int, List[int]] = '',\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            use_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initializer.\n\n        Args:\n          num_heads: Number of attention heads.\n          key_dim: Size of the attention key dimension.\n          value_dim: Size of the attention value dimension.\n          query_strides: Vertical stride size for query only.\n          kv_stride: Key and value stride size.\n          dw_kernel_size: Spatial dimension of the depthwise kernel.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        self.num_heads = num_heads\n        self.key_dim = key_dim or dim // num_heads\n        self.value_dim = value_dim or dim // num_heads\n        self.query_strides = to_2tuple(query_strides)\n        self.kv_stride = kv_stride\n        self.has_query_strides = any([s > 1 for s in self.query_strides])\n        self.scale = self.key_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n        self.drop = attn_drop\n\n        self.query = nn.Sequential()\n        if self.has_query_strides:\n            # FIXME dilation\n            if padding == 'same':\n                self.query.add_module('down_pool', create_pool2d(\n                    'avg',\n                    kernel_size=self.query_strides,\n                    padding='same',\n                ))\n            else:\n                # no pad if not 'same' as kern=stride=even\n                self.query.add_module('down_pool', nn.AvgPool2d(kernel_size=query_strides))\n            self.query.add_module('norm', norm_layer(dim, **dd))\n        self.query.add_module('proj', create_conv2d(\n            dim,\n            self.num_heads * self.key_dim,\n            kernel_size=1,\n            bias=use_bias,\n            **dd,\n        ))\n\n        self.key = nn.Sequential()\n        if kv_stride > 1:\n            self.key.add_module('down_conv', create_conv2d(\n                dim,\n                dim,\n                kernel_size=dw_kernel_size,\n                stride=kv_stride,\n                dilation=dilation,\n                padding=padding,\n                depthwise=True,\n                **dd,\n            ))\n            self.key.add_module('norm', norm_layer(dim, **dd))\n        self.key.add_module('proj', create_conv2d(\n            dim,\n            self.key_dim,\n            kernel_size=1,\n            padding=padding,\n            bias=use_bias,\n            **dd,\n        ))\n\n        self.value = nn.Sequential()\n        if kv_stride > 1:\n            self.value.add_module('down_conv', create_conv2d(\n                dim,\n                dim,\n                kernel_size=dw_kernel_size,\n                stride=kv_stride,\n                dilation=dilation,\n                padding=padding,\n                depthwise=True,\n                **dd,\n            ))\n            self.value.add_module('norm', norm_layer(dim, **dd))\n        self.value.add_module('proj', create_conv2d(\n            dim,\n            self.value_dim,\n            kernel_size=1,\n            bias=use_bias,\n            **dd,\n        ))\n\n        self.attn_drop = nn.Dropout(attn_drop)\n\n        self.output = nn.Sequential()\n        if self.has_query_strides:\n            self.output.add_module('upsample', nn.Upsample(\n                scale_factor=self.query_strides,\n                mode='bilinear',\n                align_corners=False\n            ))\n        self.output.add_module('proj', create_conv2d(\n            self.value_dim * self.num_heads,\n            dim_out,\n            kernel_size=1,\n            bias=use_bias,\n            **dd,\n        ))\n        self.output.add_module('drop', nn.Dropout(proj_drop))\n\n        self.einsum = False\n        self.init_weights()\n\n    def init_weights(self):\n        # using xavier appeared to improve stability for mobilenetv4 hybrid w/ this layer\n        nn.init.xavier_uniform_(self.query.proj.weight)\n        nn.init.xavier_uniform_(self.key.proj.weight)\n        nn.init.xavier_uniform_(self.value.proj.weight)\n        if self.kv_stride > 1:\n            nn.init.xavier_uniform_(self.key.down_conv.weight)\n            nn.init.xavier_uniform_(self.value.down_conv.weight)\n        nn.init.xavier_uniform_(self.output.proj.weight)\n\n    def _reshape_input(self, t: torch.Tensor):\n        \"\"\"Reshapes a tensor to three dimensions, keeping the batch and channels.\"\"\"\n        s = t.shape\n        t = t.reshape(s[0], s[1], -1).transpose(1, 2)\n        if self.einsum:\n            return t\n        else:\n            return t.unsqueeze(1).contiguous()\n\n    def _reshape_projected_query(self, t: torch.Tensor, num_heads: int, key_dim: int):\n        \"\"\"Reshapes projected query: [b, n, n, h x k] -> [b, n x n, h, k].\"\"\"\n        s = t.shape\n        t = t.reshape(s[0], num_heads, key_dim, -1)\n        if self.einsum:\n            return t.permute(0, 3, 1, 2).contiguous()\n        else:\n            return t.transpose(-1, -2).contiguous()\n\n    def _reshape_output(self, t: torch.Tensor, num_heads: int, h_px: int, w_px: int):\n        \"\"\"Reshape output:[b, n x n x h, k] -> [b, n, n, hk].\"\"\"\n        s = t.shape\n        feat_dim = s[-1] * num_heads\n        if not self.einsum:\n            t = t.transpose(1, 2)\n        return t.reshape(s[0], h_px, w_px, feat_dim).permute(0, 3, 1, 2).contiguous()\n\n    def forward(self, x, attn_mask: Optional[torch.Tensor] = None):\n        \"\"\"Run layer computation.\"\"\"\n        B, C, H, W = s = x.shape\n\n        q = self.query(x)\n        # desired q shape: [b, h, k, n x n] - [b, l, h, k]\n        q = self._reshape_projected_query(q, self.num_heads, self.key_dim)\n\n        k = self.key(x)\n        # output shape of k: [b, k, p], p = m x m\n        k = self._reshape_input(k)\n\n        v = self.value(x)\n        # output shape of v: [ b, p, k], p = m x m\n        v = self._reshape_input(v)\n\n        # desired q shape: [b, n x n, h, k]\n        # desired k shape: [b, m x m, k]\n        # desired logits shape: [b, n x n, h, m x m]\n        if self.einsum:\n            attn = torch.einsum('blhk,bpk->blhp', q, k) * self.scale\n            if attn_mask is not None:\n                # NOTE: assumes mask is float and in correct shape\n                attn = attn + attn_mask\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            o = torch.einsum('blhp,bpk->blhk', attn, v)\n        else:\n            if self.fused_attn:\n                o = F.scaled_dot_product_attention(\n                    q, k, v,\n                    attn_mask=attn_mask,\n                    dropout_p=self.attn_drop.p if self.training else 0.\n                )\n            else:\n                q = q * self.scale\n                attn = q @ k.transpose(-1, -2)\n                if attn_mask is not None:\n                    # NOTE: assumes mask is float and in correct shape\n                    attn = attn + attn_mask\n                attn = attn.softmax(dim=-1)\n                attn = self.attn_drop(attn)\n                o = attn @ v\n\n        # reshape o into [b, hk, n, n,]\n        o = self._reshape_output(o, self.num_heads, H // self.query_strides[0], W // self.query_strides[1])\n        x = self.output(o)\n        return x\n\n\nclass Attention2d(nn.Module):\n    fused_attn: torch.jit.Final[bool]\n\n    \"\"\" multi-head attention for 2D NCHW tensors\"\"\"\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            num_heads: int = 32,\n            bias: bool = True,\n            expand_first: bool = False,\n            head_first: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        dim_attn = dim_out if expand_first else dim\n        self.num_heads = num_heads\n        self.dim_head = dim_attn // num_heads\n        self.head_first = head_first\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Conv2d(dim, dim_attn * 3, 1, bias=bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Conv2d(dim_attn, dim_out, 1, bias=bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x, attn_mask: Optional[torch.Tensor] = None):\n        B, C, H, W = x.shape\n\n        if self.head_first:\n            q, k, v = self.qkv(x).view(B, self.num_heads, self.dim_head * 3, -1).chunk(3, dim=2)\n        else:\n            q, k, v = self.qkv(x).reshape(B, 3, self.num_heads, self.dim_head, -1).unbind(1)\n\n        if self.fused_attn:\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q.transpose(-1, -2).contiguous(),\n                k.transpose(-1, -2).contiguous(),\n                v.transpose(-1, -2).contiguous(),\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            ).transpose(-1, -2).reshape(B, -1, H, W)\n        else:\n            q = q.transpose(-1, -2)\n            v = v.transpose(-1, -2)\n            attn = q @ k * q.size(-1) ** -0.5\n            if attn_mask is not None:\n                # NOTE: assumes mask is float and in correct shape\n                attn = attn + attn_mask\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = (attn @ v).transpose(-1, -2).reshape(B, -1, H, W)\n\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n"
  },
  {
    "path": "timm/layers/attention_pool.py",
    "content": "from typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .attention import maybe_add_mask\nfrom .config import use_fused_attn\nfrom .mlp import Mlp\nfrom .weight_init import trunc_normal_tf_\n\n\nclass AttentionPoolLatent(nn.Module):\n    \"\"\" Attention pooling w/ latent query\n\n    Setting out_features=0 disables the output projection, norm, and MLP layers (pre_logits mode).\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            in_features: int,\n            out_features: int = None,\n            embed_dim: int = None,\n            num_heads: int = 8,\n            feat_size: Optional[int] = None,\n            mlp_ratio: float = 4.0,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            latent_len: int = 1,\n            latent_dim: int = None,\n            pos_embed: str = '',\n            pool_type: str = 'token',\n            norm_layer: Optional[Type[nn.Module]] = None,\n            act_layer: Optional[Type[nn.Module]] = nn.GELU,\n            drop: float = 0.0,\n            device = None,\n            dtype = None\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        embed_dim = embed_dim or in_features\n        if out_features is None:\n            out_features = in_features\n        assert embed_dim % num_heads == 0\n        self.num_heads = num_heads\n        self.head_dim = embed_dim // num_heads\n        self.feat_size = feat_size\n        self.scale = self.head_dim ** -0.5\n        self.pool = pool_type\n        self.fused_attn = use_fused_attn()\n\n        if pos_embed == 'abs':\n            assert feat_size is not None\n            self.pos_embed = nn.Parameter(torch.zeros(feat_size, in_features, **dd))\n        else:\n            self.pos_embed = None\n\n        self.latent_dim = latent_dim or embed_dim\n        self.latent_len = latent_len\n        self.latent = nn.Parameter(torch.zeros(1, self.latent_len, embed_dim, **dd))\n\n        self.q = nn.Linear(embed_dim, embed_dim, bias=qkv_bias, **dd)\n        self.kv = nn.Linear(embed_dim, embed_dim * 2, bias=qkv_bias, **dd)\n        if qk_norm:\n            qk_norm_layer = norm_layer or nn.LayerNorm\n            self.q_norm = qk_norm_layer(self.head_dim, **dd)\n            self.k_norm = qk_norm_layer(self.head_dim, **dd)\n        else:\n            self.q_norm = nn.Identity()\n            self.k_norm = nn.Identity()\n\n        if out_features > 0:\n            self.proj = nn.Linear(embed_dim, out_features, **dd)\n            self.proj_drop = nn.Dropout(drop)\n            self.norm = norm_layer(out_features, **dd) if norm_layer is not None else nn.Identity()\n            self.mlp = Mlp(out_features, int(out_features * mlp_ratio), out_features=out_features, act_layer=act_layer, **dd)\n        else:\n            self.proj = nn.Identity()\n            self.proj_drop = nn.Dropout(drop)\n            self.norm = nn.Identity()\n            self.mlp = None\n            out_features = embed_dim\n\n        self.out_features = out_features\n\n        self.init_weights()\n\n    def init_weights(self):\n        if self.pos_embed is not None:\n            trunc_normal_tf_(self.pos_embed, std=self.pos_embed.shape[1] ** -0.5)\n        trunc_normal_tf_(self.latent, std=self.latent_dim ** -0.5)\n\n    def forward(self, x, attn_mask: Optional[torch.Tensor] = None):\n        B, N, C = x.shape\n\n        if self.pos_embed is not None:\n            # FIXME interpolate\n            x = x + self.pos_embed.unsqueeze(0).to(x.dtype)\n\n        q_latent = self.latent.expand(B, -1, -1)\n        q = self.q(q_latent).reshape(B, self.latent_len, self.num_heads, self.head_dim).transpose(1, 2)\n\n        kv = self.kv(x).reshape(B, N, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        k, v = kv.unbind(0)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = maybe_add_mask(attn, attn_mask)\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n        x = x.transpose(1, 2).reshape(B, self.latent_len, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n\n        if self.mlp is not None:\n            x = x + self.mlp(self.norm(x))\n\n        # optional pool if latent seq_len > 1 and pooled output is desired\n        if self.pool == 'token':\n            x = x[:, 0]\n        elif self.pool == 'avg':\n            x = x.mean(1)\n        return x\n\n\nclass AttentionPoolPrr(nn.Module):\n    \"\"\" Patch Representation Refinement (PRR) attention pool.\n\n    From \"Locality-Attending Vision Transformer\" (ICLR 2026).\n\n    Parameter-free multi-head self-attention that refines all patch representations\n    before pooling. No Q/K/V projections — input is reshaped directly into multi-head\n    format for self-attention.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            pool_type: str = 'token',\n            pre_norm: bool = False,\n            post_norm: bool = False,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert pool_type in ('token', 'avg'), f\"pool_type must be 'token' or 'avg', got '{pool_type}'\"\n        assert dim % num_heads == 0, f\"dim ({dim}) must be divisible by num_heads ({num_heads})\"\n\n        if norm_layer is None and (pre_norm or post_norm):\n            norm_layer = nn.LayerNorm\n\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.pool = pool_type\n        self.fused_attn = use_fused_attn()\n        self.out_features = dim\n\n        self.pre_norm = norm_layer(dim, **dd) if pre_norm else nn.Identity()\n        self.post_norm = norm_layer(dim, **dd) if post_norm else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, N, C = x.shape\n\n        x = self.pre_norm(x)\n\n        # Parameter-free self-attention: reshape into multi-head format\n        qkv = x.reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)  # (B, H, N, D)\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(qkv, qkv, qkv)\n        else:\n            attn = (qkv * self.scale) @ qkv.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            x = attn @ qkv\n        x = x.transpose(1, 2).reshape(B, N, C)\n\n        x = self.post_norm(x)\n\n        # Pool\n        if self.pool == 'token':\n            x = x[:, 0]\n        elif self.pool == 'avg':\n            x = x.mean(1)\n\n        return x"
  },
  {
    "path": "timm/layers/attention_pool2d.py",
    "content": "\"\"\" Attention Pool 2D\n\nImplementations of 2D spatial feature pooling using multi-head attention instead of average pool.\n\nBased on idea in CLIP by OpenAI, licensed Apache 2.0\nhttps://github.com/openai/CLIP/blob/3b473b0e682c091a9e53623eebc1ca1657385717/clip/model.py\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Union, Tuple\n\nimport torch\nimport torch.nn as nn\n\nfrom .config import use_fused_attn\nfrom .helpers import to_2tuple\nfrom .pos_embed import resample_abs_pos_embed\nfrom .pos_embed_sincos import apply_rot_embed_cat, create_rope_embed\nfrom .weight_init import trunc_normal_\n\n\nclass RotAttentionPool2d(nn.Module):\n    \"\"\" Attention based 2D feature pooling w/ rotary (relative) pos embedding.\n    This is a multi-head attention based replacement for (spatial) average pooling in NN architectures.\n\n    Adapted from the AttentionPool2d in CLIP w/ rotary embedding instead of learned embed.\n    https://github.com/openai/CLIP/blob/3b473b0e682c091a9e53623eebc1ca1657385717/clip/model.py\n\n    NOTE: While this impl does not require a fixed feature size, performance at differeing resolutions from\n    train varies widely and falls off dramatically. I'm not sure if there is a way around this... -RW\n\n    Setting out_features=0 disables the output projection (pre_logits mode).\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            in_features: int,\n            out_features: Optional[int] = None,\n            ref_feat_size: Union[int, Tuple[int, int]] = 7,\n            embed_dim: Optional[int] = None,\n            head_dim: Optional[int] = 64,\n            num_heads: Optional[int] = None,\n            qkv_bias: bool = True,\n            qkv_separate: bool = False,\n            pool_type: str = 'token',\n            class_token: bool = False,\n            drop_rate: float = 0.,\n            rope_type: str = 'cat',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert pool_type in ('', 'token')\n        self.embed_dim = embed_dim = embed_dim or in_features\n        self.in_features = in_features\n        if out_features is None:\n            self.out_features = in_features\n        elif out_features > 0:\n            self.out_features = out_features\n        else:\n            self.out_features = embed_dim  # out_features=0 disables projection\n        ref_feat_size = to_2tuple(ref_feat_size)\n        if num_heads is not None:\n            assert embed_dim % num_heads == 0\n            head_dim = embed_dim // num_heads\n        else:\n            assert embed_dim % head_dim == 0\n            num_heads = embed_dim // head_dim\n        self.num_heads = num_heads\n        self.head_dim = head_dim\n        self.pool_type = pool_type.lower()\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n        self.rope_type = rope_type\n\n        if class_token:\n            self.cls_token = nn.Parameter(torch.zeros(1, embed_dim, **dd))\n        else:\n            self.cls_token = None\n\n        if qkv_separate:\n            self.q = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.k = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.v = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.qkv = None\n        else:\n            self.qkv = nn.Linear(in_features, embed_dim * 3, bias=qkv_bias, **dd)\n        self.drop = nn.Dropout(drop_rate)\n        self.proj = nn.Linear(embed_dim, self.out_features, **dd) if out_features != 0 else nn.Identity()\n\n        self.pos_embed = create_rope_embed(\n            rope_type=rope_type,\n            dim=embed_dim,\n            num_heads=num_heads,\n            in_pixels=False,\n            ref_feat_shape=ref_feat_size,\n            rotate_half=False,\n            **dd,\n        )\n\n    def init_weights(self, zero_init_last: bool = False):\n        if self.qkv is None:\n            in_features = self.q.in_features\n            trunc_normal_(self.q.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.q.bias)\n            trunc_normal_(self.k.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.k.bias)\n            trunc_normal_(self.v.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.v.bias)\n        else:\n            in_features = self.qkv.in_features\n            trunc_normal_(self.qkv.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.qkv.bias)\n\n    def reset(self, num_classes: Optional[int] = None, pool_type: Optional[str] = None):\n        # NOTE: this module is being used as a head, so need compatible reset()\n        if pool_type is not None:\n            assert pool_type in ('', 'token')\n            self.pool_type = pool_type\n        if num_classes is not None:\n            self.proj = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n            self.out_features = num_classes if num_classes > 0 else self.embed_dim\n\n    def _pool(self, x: torch.Tensor, H: int, W: int) -> torch.Tensor:\n        if self.pool_type == 'token':\n            x = x[:, 0]\n        else:\n            # if not pooled, return spatial output without token\n            x = x[:, 1:].reshape(x.shape[0], H, W, -1).permute(0, 3, 1, 2)\n        return x\n\n    def forward(self, x, pre_logits: bool = False):\n        B, _, H, W = x.shape\n        N = H * W\n        x = x.flatten(2).transpose(1, 2)\n        if self.cls_token is None:\n            x = torch.cat([x.mean(1, keepdim=True), x], dim=1)\n        else:\n            x = torch.cat([self.cls_token.expand(x.shape[0], -1, -1), x], dim=1)\n        if self.qkv is None:\n            q = self.q(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n            k = self.k(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n            v = self.v(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n        else:\n            x = self.qkv(x).reshape(B, N + 1, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            q, k, v = x.unbind(0)\n\n        rope = self.pos_embed.get_embed((H, W))\n        if isinstance(rope, tuple):\n            # RotaryEmbedding returns (sin, cos) tuple - concatenate for apply_rot_embed_cat\n            rope = torch.cat(rope, dim=-1)\n        q = torch.cat([q[:, :, :1, :], apply_rot_embed_cat(q[:, :, 1:, :], rope)], dim=2).type_as(v)\n        k = torch.cat([k[:, :, :1, :], apply_rot_embed_cat(k[:, :, 1:, :], rope)], dim=2).type_as(v)\n\n        if self.fused_attn:\n            x = nn.functional.scaled_dot_product_attention(q, k, v)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n        x = x.transpose(1, 2).reshape(B, N + 1, -1)\n        x = self.drop(x)\n        if pre_logits:\n            x = self._pool(x, H, W)\n            return x\n        x = self.proj(x)\n        x = self._pool(x, H, W)\n        return x\n\n\nclass AttentionPool2d(nn.Module):\n    \"\"\" Attention based 2D feature pooling w/ learned (absolute) pos embedding.\n    This is a multi-head attention based replacement for (spatial) average pooling in NN architectures.\n\n    It was based on impl in CLIP by OpenAI\n    https://github.com/openai/CLIP/blob/3b473b0e682c091a9e53623eebc1ca1657385717/clip/model.py\n\n    NOTE: This requires feature size upon construction and well prevent adaptive sizing of the network.\n\n    Setting out_features=0 disables the output projection (pre_logits mode).\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            in_features: int,\n            feat_size: Union[int, Tuple[int, int]] = 7,\n            out_features: Optional[int] = None,\n            embed_dim: Optional[int] = None,\n            head_dim: Optional[int] = 64,\n            num_heads: Optional[int] = None,\n            qkv_bias: bool = True,\n            qkv_separate: bool = False,\n            pool_type: str = 'token',\n            class_token: bool = False,\n            drop_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert pool_type in ('', 'token')\n        self.embed_dim = embed_dim = embed_dim or in_features\n        self.in_features = in_features\n        if out_features is None:\n            self.out_features = in_features\n        elif out_features > 0:\n            self.out_features = out_features\n        else:\n            self.out_features = embed_dim  # out_features=0 disables projection\n        if num_heads is not None:\n            assert embed_dim % num_heads == 0\n            head_dim = embed_dim // num_heads\n        else:\n            assert embed_dim % head_dim == 0\n            num_heads = embed_dim // head_dim\n        self.feat_size = to_2tuple(feat_size)\n        self.seq_len = self.feat_size[0] * self.feat_size[1]\n        self.num_heads = num_heads\n        self.head_dim = head_dim\n        self.pool_type = pool_type\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        if class_token:\n            self.cls_token = nn.Parameter(torch.zeros(1, embed_dim, **dd))\n        else:\n            self.cls_token = None\n\n        if qkv_separate:\n            self.q = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.k = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.v = nn.Linear(in_features, embed_dim, bias=qkv_bias, **dd)\n            self.qkv = None\n        else:\n            self.q = self.k = self.v = None\n            self.qkv = nn.Linear(in_features, embed_dim * 3, bias=qkv_bias, **dd)\n        self.drop = nn.Dropout(drop_rate)\n        self.proj = nn.Linear(embed_dim, self.out_features, **dd) if out_features != 0 else nn.Identity()\n        self.pos_embed = nn.Parameter(torch.zeros(self.seq_len + 1, in_features, **dd))\n\n        self.init_weights()\n\n    def init_weights(self, zero_init_last: bool = False):\n        if self.qkv is None:\n            in_features = self.q.in_features\n            trunc_normal_(self.q.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.q.bias)\n            trunc_normal_(self.k.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.k.bias)\n            trunc_normal_(self.v.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.v.bias)\n        else:\n            in_features = self.qkv.in_features\n            trunc_normal_(self.qkv.weight, std=in_features ** -0.5)\n            nn.init.zeros_(self.qkv.bias)\n        trunc_normal_(self.pos_embed, std=in_features ** -0.5)\n\n    def reset(self, num_classes: Optional[int] = None, pool_type: Optional[str] = None):\n        # NOTE: this module is being used as a head, so need compatible reset()\n        if pool_type is not None:\n            assert pool_type in ('', 'token')\n            self.pool_type = pool_type\n        if num_classes is not None:\n            self.proj = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n            self.out_features = num_classes if num_classes > 0 else self.embed_dim\n\n    def _pool(self, x: torch.Tensor, H: int, W: int) -> torch.Tensor:\n        if self.pool_type == 'token':\n            x = x[:, 0]\n        else:\n            # if not pooled, return spatial output without token\n            x = x[:, 1:].reshape(x.shape[0], H, W, -1).permute(0, 3, 1, 2)\n        return x\n\n    def forward(self, x, pre_logits: bool = False):\n        B, _, H, W = x.shape\n        N = H * W\n        x = x.flatten(2).transpose(1, 2)\n        if self.cls_token is None:\n            x = torch.cat([x.mean(1, keepdim=True), x], dim=1)\n        else:\n            x = torch.cat([self.cls_token.expand(x.shape[0], -1, -1), x], dim=1)\n        pos_embed = resample_abs_pos_embed(self.pos_embed.unsqueeze(0), (H, W), num_prefix_tokens=1)\n        x = x + pos_embed\n\n        if self.qkv is None:\n            q = self.q(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n            k = self.k(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n            v = self.v(x).reshape(B, N + 1, self.num_heads, self.head_dim).transpose(1, 2)\n        else:\n            x = self.qkv(x).reshape(B, -1, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            q, k, v = x.unbind(0)\n\n        if self.fused_attn:\n            x = nn.functional.scaled_dot_product_attention(q, k, v)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n        x = x.transpose(1, 2).reshape(B, N + 1, -1)\n        x = self.drop(x)\n        if pre_logits:\n            x = self._pool(x, H, W)\n            return x\n        x = self.proj(x)\n        x = self._pool(x, H, W)\n        return x\n"
  },
  {
    "path": "timm/layers/blur_pool.py",
    "content": "\"\"\"\nBlurPool layer inspired by\n - Kornia's Max_BlurPool2d\n - Making Convolutional Networks Shift-Invariant Again :cite:`zhang2019shiftinvar`\n\nHacked together by Chris Ha and Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom math import comb  # Python 3.8\nfrom typing import Callable, Optional, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .padding import get_padding\nfrom .typing import LayerType\n\n\nclass BlurPool2d(nn.Module):\n    r\"\"\"Creates a module that computes blurs and downsample a given feature map.\n    See :cite:`zhang2019shiftinvar` for more details.\n    Corresponds to the Downsample class, which does blurring and subsampling\n\n    Args:\n        channels = Number of input channels\n        filt_size (int): binomial filter size for blurring. currently supports 3 (default) and 5.\n        stride (int): downsampling filter stride\n\n    Returns:\n        torch.Tensor: the transformed tensor.\n    \"\"\"\n    def __init__(\n            self,\n            channels: Optional[int] = None,\n            filt_size: int = 3,\n            stride: int = 2,\n            pad_mode: str = 'reflect',\n            device=None,\n            dtype=None\n    ) -> None:\n        super().__init__()\n        assert filt_size > 1\n        self.channels = channels\n        self.filt_size = filt_size\n        self.stride = stride\n        self.pad_mode = pad_mode\n        self.padding = [get_padding(filt_size, stride, dilation=1)] * 4\n\n        # Register empty buffer with correct shape\n        filt_shape = (channels or 1, 1, filt_size, filt_size)\n        self.register_buffer('filt', torch.empty(filt_shape, device=device, dtype=dtype), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        # (0.5 + 0.5 x)^N => coefficients = C(N,k) / 2^N,  k = 0..N\n        coeffs = torch.tensor(\n            [comb(self.filt_size - 1, k) for k in range(self.filt_size)],\n            device='cpu',\n            dtype=torch.float32,\n        ) / (2 ** (self.filt_size - 1))  # normalise so coefficients sum to 1\n        blur_filter = (coeffs[:, None] * coeffs[None, :])[None, None, :, :]\n        if self.channels is not None:\n            blur_filter = blur_filter.repeat(self.channels, 1, 1, 1)\n        self.filt.copy_(blur_filter)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = F.pad(x, self.padding, mode=self.pad_mode)\n        if self.channels is None:\n            channels = x.shape[1]\n            weight = self.filt.expand(channels, 1, self.filt_size, self.filt_size)\n        else:\n            channels = self.channels\n            weight = self.filt\n        return F.conv2d(x, weight, stride=self.stride, groups=channels)\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\ndef _normalize_aa_layer(aa_layer: LayerType) -> Callable[..., nn.Module]:\n    \"\"\"Map string shorthands to callables (class or partial).\"\"\"\n    if isinstance(aa_layer, str):\n        key = aa_layer.lower().replace('_', '').replace('-', '')\n        if key in ('avg', 'avgpool'):\n            return nn.AvgPool2d\n        if key in ('blur', 'blurpool'):\n            return BlurPool2d\n        if key == 'blurpc':\n            # preconfigure a constant-pad BlurPool2d\n            return partial(BlurPool2d, pad_mode='constant')\n        raise AssertionError(f\"Unknown anti-aliasing layer ({aa_layer}).\")\n    return aa_layer\n\n\ndef _underlying_cls(layer_callable: Callable[..., nn.Module]):\n    \"\"\"Return the class behind a callable (unwrap partial), else None.\"\"\"\n    if isinstance(layer_callable, partial):\n        return layer_callable.func\n    return layer_callable if isinstance(layer_callable, type) else None\n\n\ndef _is_blurpool(layer_callable: Callable[..., nn.Module]) -> bool:\n    \"\"\"True if callable is BlurPool2d or a partial of it.\"\"\"\n    cls = _underlying_cls(layer_callable)\n    try:\n        return issubclass(cls, BlurPool2d)  # cls may be None, protect below\n    except TypeError:\n        return False\n    except Exception:\n        return False\n\n\ndef create_aa(\n        aa_layer: LayerType,\n        channels: Optional[int] = None,\n        stride: int = 2,\n        enable: bool = True,\n        noop: Optional[Type[nn.Module]] = nn.Identity,\n        device=None,\n        dtype=None,\n) -> Optional[nn.Module]:\n    \"\"\" Anti-aliasing factory that supports strings, classes, and partials. \"\"\"\n    if not aa_layer or not enable:\n        return noop() if noop is not None else None\n\n    # Resolve strings to callables\n    aa_layer = _normalize_aa_layer(aa_layer)\n\n    # Build kwargs we *intend* to pass\n    call_kwargs = {\"channels\": channels, \"stride\": stride}\n\n    # Only add device/dtype for BlurPool2d (or partial of it) and don't override if already provided in the partial.\n    if _is_blurpool(aa_layer):\n        # Check if aa_layer is a partial and already has device/dtype set\n        existing_kw = aa_layer.keywords if isinstance(aa_layer, partial) and aa_layer.keywords else {}\n        if \"device\" not in existing_kw and device is not None:\n            call_kwargs[\"device\"] = device\n        if \"dtype\" not in existing_kw and dtype is not None:\n            call_kwargs[\"dtype\"] = dtype\n\n    # Try (channels, stride, [device, dtype]) first; fall back to (stride) only\n    try:\n        return aa_layer(**call_kwargs)\n    except TypeError:\n        # Some layers (e.g., AvgPool2d) may not accept 'channels' and need stride passed as kernel\n        return aa_layer(stride)\n"
  },
  {
    "path": "timm/layers/bottleneck_attn.py",
    "content": "\"\"\" Bottleneck Self Attention (Bottleneck Transformers)\n\nPaper: `Bottleneck Transformers for Visual Recognition` - https://arxiv.org/abs/2101.11605\n\n@misc{2101.11605,\nAuthor = {Aravind Srinivas and Tsung-Yi Lin and Niki Parmar and Jonathon Shlens and Pieter Abbeel and Ashish Vaswani},\nTitle = {Bottleneck Transformers for Visual Recognition},\nYear = {2021},\n}\n\nBased on ref gist at: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2\n\nThis impl is a WIP but given that it is based on the ref gist likely not too far off.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import List, Optional, Tuple\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .helpers import to_2tuple, make_divisible\nfrom .weight_init import trunc_normal_\nfrom .trace_utils import _assert\n\n\ndef rel_logits_1d(q, rel_k, permute_mask: List[int]):\n    \"\"\" Compute relative logits along one dimension\n\n    As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2\n    Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925\n\n    Args:\n        q: (batch, heads, height, width, dim)\n        rel_k: (2 * width - 1, dim)\n        permute_mask: permute output dim according to this\n    \"\"\"\n    B, H, W, dim = q.shape\n    x = (q @ rel_k.transpose(-1, -2))\n    x = x.reshape(-1, W, 2 * W -1)\n\n    # pad to shift from relative to absolute indexing\n    x_pad = F.pad(x, [0, 1]).flatten(1)\n    x_pad = F.pad(x_pad, [0, W - 1])\n\n    # reshape and slice out the padded elements\n    x_pad = x_pad.reshape(-1, W + 1, 2 * W - 1)\n    x = x_pad[:, :W, W - 1:]\n\n    # reshape and tile\n    x = x.reshape(B, H, 1, W, W).expand(-1, -1, H, -1, -1)\n    return x.permute(permute_mask)\n\n\nclass PosEmbedRel(nn.Module):\n    \"\"\" Relative Position Embedding\n    As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2\n    Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925\n    \"\"\"\n    def __init__(\n            self,\n            feat_size: Tuple[int, int],\n            dim_head: int,\n            scale: float,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.height, self.width = to_2tuple(feat_size)\n        self.dim_head = dim_head\n        self.scale = scale\n\n        self.height_rel = nn.Parameter(torch.empty(self.height * 2 - 1, dim_head, **dd))\n        self.width_rel = nn.Parameter(torch.empty(self.width * 2 - 1, dim_head, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        torch.nn.init.normal_(self.height_rel, std=self.scale)\n        torch.nn.init.normal_(self.width_rel, std=self.scale)\n\n    def forward(self, q):\n        B, HW, _ = q.shape\n\n        # relative logits in width dimension.\n        q = q.reshape(B, self.height, self.width, -1)\n        rel_logits_w = rel_logits_1d(q, self.width_rel, permute_mask=(0, 1, 3, 2, 4))\n\n        # relative logits in height dimension.\n        q = q.transpose(1, 2)\n        rel_logits_h = rel_logits_1d(q, self.height_rel, permute_mask=(0, 3, 1, 4, 2))\n\n        rel_logits = rel_logits_h + rel_logits_w\n        rel_logits = rel_logits.reshape(B, HW, HW)\n        return rel_logits\n\n\nclass BottleneckAttn(nn.Module):\n    \"\"\" Bottleneck Attention\n    Paper: `Bottleneck Transformers for Visual Recognition` - https://arxiv.org/abs/2101.11605\n\n    The internal dimensions of the attention module are controlled by the interaction of several arguments.\n      * the output dimension of the module is specified by dim_out, which falls back to input dim if not set\n      * the value (v) dimension is set to dim_out // num_heads, the v projection determines the output dim\n      * the query and key (qk) dimensions are determined by\n        * num_heads * dim_head if dim_head is not None\n        * num_heads * (dim_out * attn_ratio // num_heads) if dim_head is None\n      * as seen above, attn_ratio determines the ratio of q and k relative to the output if dim_head not used\n\n    Args:\n        dim (int): input dimension to the module\n        dim_out (int): output dimension of the module, same as dim if not set\n        stride (int): output stride of the module, avg pool used if stride == 2 (default: 1).\n        num_heads (int): parallel attention heads (default: 4)\n        dim_head (int): dimension of query and key heads, calculated from dim_out * attn_ratio // num_heads if not set\n        qk_ratio (float): ratio of q and k dimensions to output dimension when dim_head not set. (default: 1.0)\n        qkv_bias (bool): add bias to q, k, and v projections\n        scale_pos_embed (bool): scale the position embedding as well as Q @ K\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            feat_size: Optional[Tuple[int, int]] = None,\n            stride: int = 1,\n            num_heads: int = 4,\n            dim_head: Optional[int] = None,\n            qk_ratio: float = 1.0,\n            qkv_bias: bool = False,\n            scale_pos_embed: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert feat_size is not None, 'A concrete feature size matching expected input (H, W) is required'\n        dim_out = dim_out or dim\n        assert dim_out % num_heads == 0\n        self.num_heads = num_heads\n        self.dim_head_qk = dim_head or make_divisible(dim_out * qk_ratio, divisor=8) // num_heads\n        self.dim_head_v = dim_out // self.num_heads\n        self.dim_out_qk = num_heads * self.dim_head_qk\n        self.dim_out_v = num_heads * self.dim_head_v\n        self.scale = self.dim_head_qk ** -0.5\n        self.scale_pos_embed = scale_pos_embed\n\n        self.qkv = nn.Conv2d(dim, self.dim_out_qk * 2 + self.dim_out_v, 1, bias=qkv_bias, **dd)\n\n        # NOTE I'm only supporting relative pos embedding for now\n        self.pos_embed = PosEmbedRel(feat_size, dim_head=self.dim_head_qk, scale=self.scale, **dd)\n\n        self.pool = nn.AvgPool2d(2, 2) if stride == 2 else nn.Identity()\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        trunc_normal_(self.qkv.weight, std=self.qkv.weight.shape[1] ** -0.5)  # fan-in\n        trunc_normal_(self.pos_embed.height_rel, std=self.scale)\n        trunc_normal_(self.pos_embed.width_rel, std=self.scale)\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        _assert(H == self.pos_embed.height, '')\n        _assert(W == self.pos_embed.width, '')\n\n        x = self.qkv(x)  # B, (2 * dim_head_qk + dim_head_v) * num_heads, H, W\n\n        # NOTE head vs channel split ordering in qkv projection was decided before I allowed qk to differ from v\n        # So, this is more verbose than if heads were before qkv splits, but throughput is not impacted.\n        q, k, v = torch.split(x, [self.dim_out_qk, self.dim_out_qk, self.dim_out_v], dim=1)\n        q = q.reshape(B * self.num_heads, self.dim_head_qk, -1).transpose(-1, -2)\n        k = k.reshape(B * self.num_heads, self.dim_head_qk, -1)  # no transpose, for q @ k\n        v = v.reshape(B * self.num_heads, self.dim_head_v, -1).transpose(-1, -2)\n\n        if self.scale_pos_embed:\n            attn = (q @ k + self.pos_embed(q)) * self.scale  # B * num_heads, H * W, H * W\n        else:\n            attn = (q @ k) * self.scale + self.pos_embed(q)\n        attn = attn.softmax(dim=-1)\n\n        out = (attn @ v).transpose(-1, -2).reshape(B, self.dim_out_v, H, W)  # B, dim_out, H, W\n        out = self.pool(out)\n        return out\n"
  },
  {
    "path": "timm/layers/cbam.py",
    "content": "\"\"\" CBAM (sort-of) Attention\n\nExperimental impl of CBAM: Convolutional Block Attention Module: https://arxiv.org/abs/1807.06521\n\nWARNING: Results with these attention layers have been mixed. They can significantly reduce performance on\nsome tasks, especially fine-grained it seems. I may end up removing this impl.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple, Type, Union\n\nimport torch\nfrom torch import nn as nn\nimport torch.nn.functional as F\n\nfrom .conv_bn_act import ConvNormAct\nfrom .create_act import create_act_layer, get_act_layer\nfrom .helpers import make_divisible\n\n\nclass ChannelAttn(nn.Module):\n    \"\"\" Original CBAM channel attention module, currently avg + max pool variant only.\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1. / 16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            mlp_bias=False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)\n        self.fc1 = nn.Conv2d(channels, rd_channels, 1, bias=mlp_bias, **dd)\n        self.act = act_layer(inplace=True)\n        self.fc2 = nn.Conv2d(rd_channels, channels, 1, bias=mlp_bias, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_avg = self.fc2(self.act(self.fc1(x.mean((2, 3), keepdim=True))))\n        x_max = self.fc2(self.act(self.fc1(x.amax((2, 3), keepdim=True))))\n        return x * self.gate(x_avg + x_max)\n\n\nclass LightChannelAttn(ChannelAttn):\n    \"\"\"An experimental 'lightweight' that sums avg + max pool first\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1./16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            mlp_bias: bool = False,\n            device=None,\n            dtype=None\n    ):\n        super().__init__(\n            channels, rd_ratio, rd_channels, rd_divisor, act_layer, gate_layer, mlp_bias, device=device, dtype=dtype)\n\n    def forward(self, x):\n        x_pool = 0.5 * x.mean((2, 3), keepdim=True) + 0.5 * x.amax((2, 3), keepdim=True)\n        x_attn = self.fc2(self.act(self.fc1(x_pool)))\n        return x * F.sigmoid(x_attn)\n\n\nclass SpatialAttn(nn.Module):\n    \"\"\" Original CBAM spatial attention module\n    \"\"\"\n    def __init__(\n            self,\n            kernel_size: int = 7,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.conv = ConvNormAct(2, 1, kernel_size, apply_act=False, device=device, dtype=dtype)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_attn = torch.cat([x.mean(dim=1, keepdim=True), x.amax(dim=1, keepdim=True)], dim=1)\n        x_attn = self.conv(x_attn)\n        return x * self.gate(x_attn)\n\n\nclass LightSpatialAttn(nn.Module):\n    \"\"\"An experimental 'lightweight' variant that sums avg_pool and max_pool results.\n    \"\"\"\n    def __init__(\n            self,\n            kernel_size: int = 7,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.conv = ConvNormAct(1, 1, kernel_size, apply_act=False, device=device, dtype=dtype)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_attn = 0.5 * x.mean(dim=1, keepdim=True) + 0.5 * x.amax(dim=1, keepdim=True)\n        x_attn = self.conv(x_attn)\n        return x * self.gate(x_attn)\n\n\nclass CbamModule(nn.Module):\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1./16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            spatial_kernel_size: int = 7,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            mlp_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.channel = ChannelAttn(\n            channels,\n            rd_ratio=rd_ratio,\n            rd_channels=rd_channels,\n            rd_divisor=rd_divisor,\n            act_layer=act_layer,\n            gate_layer=gate_layer,\n            mlp_bias=mlp_bias,\n            **dd,\n        )\n        self.spatial = SpatialAttn(spatial_kernel_size, gate_layer=gate_layer, **dd)\n\n    def forward(self, x):\n        x = self.channel(x)\n        x = self.spatial(x)\n        return x\n\n\nclass LightCbamModule(nn.Module):\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1./16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            spatial_kernel_size: int = 7,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            mlp_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.channel = LightChannelAttn(\n            channels,\n            rd_ratio=rd_ratio,\n            rd_channels=rd_channels,\n            rd_divisor=rd_divisor,\n            act_layer=act_layer,\n            gate_layer=gate_layer,\n            mlp_bias=mlp_bias,\n            **dd,\n        )\n        self.spatial = LightSpatialAttn(spatial_kernel_size, **dd)\n\n    def forward(self, x):\n        x = self.channel(x)\n        x = self.spatial(x)\n        return x\n\n"
  },
  {
    "path": "timm/layers/classifier.py",
    "content": "\"\"\" Classifier head and layer factory\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Optional, Union, Callable\n\nimport torch\nimport torch.nn as nn\nfrom torch.nn import functional as F\n\nfrom .adaptive_avgmax_pool import SelectAdaptivePool2d\nfrom .create_act import get_act_layer\nfrom .create_norm import get_norm_layer\n\n\ndef _create_pool(\n        num_features: int,\n        num_classes: int,\n        pool_type: str = 'avg',\n        use_conv: bool = False,\n        input_fmt: Optional[str] = None,\n):\n    flatten_in_pool = not use_conv  # flatten when we use a Linear layer after pooling\n    if not pool_type:\n        flatten_in_pool = False  # disable flattening if pooling is pass-through (no pooling)\n    global_pool = SelectAdaptivePool2d(\n        pool_type=pool_type,\n        flatten=flatten_in_pool,\n        input_fmt=input_fmt,\n    )\n    num_pooled_features = num_features * global_pool.feat_mult()\n    return global_pool, num_pooled_features\n\n\ndef _create_fc(num_features, num_classes, use_conv=False, device=None, dtype=None):\n    if num_classes <= 0:\n        fc = nn.Identity()  # pass-through (no classifier)\n    elif use_conv:\n        fc = nn.Conv2d(num_features, num_classes, 1, bias=True, device=device, dtype=dtype)\n    else:\n        fc = nn.Linear(num_features, num_classes, bias=True, device=device, dtype=dtype)\n    return fc\n\n\ndef create_classifier(\n        num_features: int,\n        num_classes: int,\n        pool_type: str = 'avg',\n        use_conv: bool = False,\n        input_fmt: str = 'NCHW',\n        drop_rate: Optional[float] = None,\n        device=None,\n        dtype=None,\n):\n    global_pool, num_pooled_features = _create_pool(\n        num_features,\n        num_classes,\n        pool_type,\n        use_conv=use_conv,\n        input_fmt=input_fmt,\n    )\n    fc = _create_fc(\n        num_pooled_features,\n        num_classes,\n        use_conv=use_conv,\n        device=device,\n        dtype=dtype,\n    )\n    if drop_rate is not None:\n        dropout = nn.Dropout(drop_rate)\n        return global_pool, dropout, fc\n    return global_pool, fc\n\n\nclass ClassifierHead(nn.Module):\n    \"\"\"Classifier head w/ configurable global pooling and dropout.\"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int,\n            pool_type: str = 'avg',\n            drop_rate: float = 0.,\n            use_conv: bool = False,\n            input_fmt: str = 'NCHW',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_features: The number of input features.\n            num_classes:  The number of classes for the final classifier layer (output).\n            pool_type: Global pooling type, pooling disabled if empty string ('').\n            drop_rate: Pre-classifier dropout rate.\n        \"\"\"\n        super().__init__()\n        self.in_features = in_features\n        self.use_conv = use_conv\n        self.input_fmt = input_fmt\n\n        global_pool, fc = create_classifier(\n            in_features,\n            num_classes,\n            pool_type,\n            use_conv=use_conv,\n            input_fmt=input_fmt,\n            device=device,\n            dtype=dtype,\n        )\n        self.global_pool = global_pool\n        self.drop = nn.Dropout(drop_rate)\n        self.fc = fc\n        self.flatten = nn.Flatten(1) if use_conv and pool_type else nn.Identity()\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None):\n        # FIXME get current device/dtype for reset?\n        if pool_type is not None and pool_type != self.global_pool.pool_type:\n            self.global_pool, self.fc = create_classifier(\n                self.in_features,\n                num_classes,\n                pool_type=pool_type,\n                use_conv=self.use_conv,\n                input_fmt=self.input_fmt,\n            )\n            self.flatten = nn.Flatten(1) if self.use_conv and pool_type else nn.Identity()\n        else:\n            num_pooled_features = self.in_features * self.global_pool.feat_mult()\n            self.fc = _create_fc(\n                num_pooled_features,\n                num_classes,\n                use_conv=self.use_conv,\n            )\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.drop(x)\n        if pre_logits:\n            return self.flatten(x)\n        x = self.fc(x)\n        return self.flatten(x)\n\n\nclass NormMlpClassifierHead(nn.Module):\n    \"\"\" A Pool -> Norm -> Mlp Classifier Head for '2D' NCHW tensors\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int,\n            hidden_size: Optional[int] = None,\n            pool_type: str = 'avg',\n            drop_rate: float = 0.,\n            norm_layer: Union[str, Callable] = 'layernorm2d',\n            act_layer: Union[str, Callable] = 'tanh',\n            device=None,\n            dtype=None\n    ):\n        \"\"\"\n        Args:\n            in_features: The number of input features.\n            num_classes:  The number of classes for the final classifier layer (output).\n            hidden_size: The hidden size of the MLP (pre-logits FC layer) if not None.\n            pool_type: Global pooling type, pooling disabled if empty string ('').\n            drop_rate: Pre-classifier dropout rate.\n            norm_layer: Normalization layer type.\n            act_layer: MLP activation layer type (only used if hidden_size is not None).\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.in_features = in_features\n        self.hidden_size = hidden_size\n        self.num_features = in_features\n        self.use_conv = not pool_type\n        norm_layer = get_norm_layer(norm_layer)\n        act_layer = get_act_layer(act_layer)\n        linear_layer = partial(nn.Conv2d, kernel_size=1) if self.use_conv else nn.Linear\n\n        self.global_pool = SelectAdaptivePool2d(pool_type=pool_type)\n        self.norm = norm_layer(in_features, **dd)\n        self.flatten = nn.Flatten(1) if pool_type else nn.Identity()\n        if hidden_size:\n            self.pre_logits = nn.Sequential(OrderedDict([\n                ('fc', linear_layer(in_features, hidden_size, **dd)),\n                ('act', act_layer()),\n            ]))\n            self.num_features = hidden_size\n        else:\n            self.pre_logits = nn.Identity()\n        self.drop = nn.Dropout(drop_rate)\n        self.fc = linear_layer(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None):\n        # FIXME handle device/dtype on reset\n        if pool_type is not None:\n            self.global_pool = SelectAdaptivePool2d(pool_type=pool_type)\n            self.flatten = nn.Flatten(1) if pool_type else nn.Identity()\n        self.use_conv = self.global_pool.is_identity()\n        linear_layer = partial(nn.Conv2d, kernel_size=1) if self.use_conv else nn.Linear\n        if self.hidden_size:\n            if ((isinstance(self.pre_logits.fc, nn.Conv2d) and not self.use_conv) or\n                    (isinstance(self.pre_logits.fc, nn.Linear) and self.use_conv)):\n                with torch.no_grad():\n                    new_fc = linear_layer(self.in_features, self.hidden_size)\n                    new_fc.weight.copy_(self.pre_logits.fc.weight.reshape(new_fc.weight.shape))\n                    new_fc.bias.copy_(self.pre_logits.fc.bias)\n                    self.pre_logits.fc = new_fc\n        self.fc = linear_layer(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.norm(x)\n        x = self.flatten(x)\n        x = self.pre_logits(x)\n        x = self.drop(x)\n        if pre_logits:\n            return x\n        x = self.fc(x)\n        return x\n\n\nclass ClNormMlpClassifierHead(nn.Module):\n    \"\"\" A Pool -> Norm -> Mlp Classifier Head for n-D NxxC tensors\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int,\n            hidden_size: Optional[int] = None,\n            pool_type: str = 'avg',\n            drop_rate: float = 0.,\n            norm_layer: Union[str, Callable] = 'layernorm',\n            act_layer: Union[str, Callable] = 'gelu',\n            input_fmt: str = 'NHWC',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_features: The number of input features.\n            num_classes:  The number of classes for the final classifier layer (output).\n            hidden_size: The hidden size of the MLP (pre-logits FC layer) if not None.\n            pool_type: Global pooling type, pooling disabled if empty string ('').\n            drop_rate: Pre-classifier dropout rate.\n            norm_layer: Normalization layer type.\n            act_layer: MLP activation layer type (only used if hidden_size is not None).\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.in_features = in_features\n        self.hidden_size = hidden_size\n        self.num_features = in_features\n        assert pool_type in ('', 'avg', 'max', 'avgmax')\n        self.pool_type = pool_type\n        assert input_fmt in ('NHWC', 'NLC')\n        self.pool_dim = 1 if input_fmt == 'NLC' else (1, 2)\n        norm_layer = get_norm_layer(norm_layer)\n        act_layer = get_act_layer(act_layer)\n\n        self.norm = norm_layer(in_features, **dd)\n        if hidden_size:\n            self.pre_logits = nn.Sequential(OrderedDict([\n                ('fc', nn.Linear(in_features, hidden_size, **dd)),\n                ('act', act_layer()),\n            ]))\n            self.num_features = hidden_size\n        else:\n            self.pre_logits = nn.Identity()\n        self.drop = nn.Dropout(drop_rate)\n        self.fc = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None, reset_other: bool = False):\n        # FIXME extract dd on reset\n        if pool_type is not None:\n            self.pool_type = pool_type\n        if reset_other:\n            self.pre_logits = nn.Identity()\n            self.norm = nn.Identity()\n        self.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def _global_pool(self, x):\n        if self.pool_type:\n            if self.pool_type == 'avg':\n                x = x.mean(dim=self.pool_dim)\n            elif self.pool_type == 'max':\n                x = x.amax(dim=self.pool_dim)\n            elif self.pool_type == 'avgmax':\n                x = 0.5 * (x.amax(dim=self.pool_dim) + x.mean(dim=self.pool_dim))\n        return x\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self._global_pool(x)\n        x = self.norm(x)\n        x = self.pre_logits(x)\n        x = self.drop(x)\n        if pre_logits:\n            return x\n        x = self.fc(x)\n        return x\n"
  },
  {
    "path": "timm/layers/cond_conv2d.py",
    "content": "\"\"\" PyTorch Conditionally Parameterized Convolution (CondConv)\n\nPaper: CondConv: Conditionally Parameterized Convolutions for Efficient Inference\n(https://arxiv.org/abs/1904.04971)\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport math\nfrom functools import partial\nfrom typing import Union, Tuple\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\n\nfrom ._fx import register_notrace_module\nfrom .helpers import to_2tuple\nfrom .conv2d_same import conv2d_same\nfrom .padding import get_padding_value\n\n\ndef get_condconv_initializer(initializer, num_experts, expert_shape):\n    def condconv_initializer(weight):\n        \"\"\"CondConv initializer function.\"\"\"\n        num_params = math.prod(expert_shape)\n        if (len(weight.shape) != 2 or weight.shape[0] != num_experts or\n                weight.shape[1] != num_params):\n            raise (ValueError(\n                'CondConv variables must have shape [num_experts, num_params]'))\n        for i in range(num_experts):\n            initializer(weight[i].view(expert_shape))\n    return condconv_initializer\n\n\n@register_notrace_module\nclass CondConv2d(nn.Module):\n    \"\"\" Conditionally Parameterized Convolution\n    Inspired by: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/condconv/condconv_layers.py\n\n    Grouped convolution hackery for parallel execution of the per-sample kernel filters inspired by this discussion:\n    https://github.com/pytorch/pytorch/issues/17983\n    \"\"\"\n    __constants__ = ['in_channels', 'out_channels', 'dynamic_padding']\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]] = 3,\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: Union[int, Tuple[int, int], str] = '',\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = False,\n            num_experts: int = 4,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.kernel_size = to_2tuple(kernel_size)\n        self.stride = to_2tuple(stride)\n        padding_val, is_padding_dynamic = get_padding_value(\n            padding, kernel_size, stride=stride, dilation=dilation)\n        self.dynamic_padding = is_padding_dynamic  # if in forward to work with torchscript\n        self.padding = to_2tuple(padding_val)\n        self.dilation = to_2tuple(dilation)\n        self.groups = groups\n        self.num_experts = num_experts\n\n        self.weight_shape = (self.out_channels, self.in_channels // self.groups) + self.kernel_size\n        weight_num_param = 1\n        for wd in self.weight_shape:\n            weight_num_param *= wd\n        self.weight = torch.nn.Parameter(torch.empty(self.num_experts, weight_num_param, **dd))\n\n        if bias:\n            self.bias_shape = (self.out_channels,)\n            self.bias = torch.nn.Parameter(torch.empty(self.num_experts, self.out_channels, **dd))\n        else:\n            self.register_parameter('bias', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        init_weight = get_condconv_initializer(\n            partial(nn.init.kaiming_uniform_, a=math.sqrt(5)), self.num_experts, self.weight_shape)\n        init_weight(self.weight)\n        if self.bias is not None:\n            fan_in = math.prod(self.weight_shape[1:])\n            bound = 1 / math.sqrt(fan_in)\n            init_bias = get_condconv_initializer(\n                partial(nn.init.uniform_, a=-bound, b=bound), self.num_experts, self.bias_shape)\n            init_bias(self.bias)\n\n    def forward(self, x, routing_weights):\n        B, C, H, W = x.shape\n        weight = torch.matmul(routing_weights, self.weight)\n        new_weight_shape = (B * self.out_channels, self.in_channels // self.groups) + self.kernel_size\n        weight = weight.view(new_weight_shape)\n        bias = None\n        if self.bias is not None:\n            bias = torch.matmul(routing_weights, self.bias)\n            bias = bias.view(B * self.out_channels)\n        # move batch elements with channels so each batch element can be efficiently convolved with separate kernel\n        # reshape instead of view to work with channels_last input\n        x = x.reshape(1, B * C, H, W)\n        if self.dynamic_padding:\n            out = conv2d_same(\n                x, weight, bias, stride=self.stride, padding=self.padding,\n                dilation=self.dilation, groups=self.groups * B)\n        else:\n            out = F.conv2d(\n                x, weight, bias, stride=self.stride, padding=self.padding,\n                dilation=self.dilation, groups=self.groups * B)\n        out = out.permute([1, 0, 2, 3]).view(B, self.out_channels, out.shape[-2], out.shape[-1])\n\n        # Literal port (from TF definition)\n        # x = torch.split(x, 1, 0)\n        # weight = torch.split(weight, 1, 0)\n        # if self.bias is not None:\n        #     bias = torch.matmul(routing_weights, self.bias)\n        #     bias = torch.split(bias, 1, 0)\n        # else:\n        #     bias = [None] * B\n        # out = []\n        # for xi, wi, bi in zip(x, weight, bias):\n        #     wi = wi.view(*self.weight_shape)\n        #     if bi is not None:\n        #         bi = bi.view(*self.bias_shape)\n        #     out.append(self.conv_fn(\n        #         xi, wi, bi, stride=self.stride, padding=self.padding,\n        #         dilation=self.dilation, groups=self.groups))\n        # out = torch.cat(out, 0)\n        return out\n"
  },
  {
    "path": "timm/layers/config.py",
    "content": "\"\"\" Model / Layer Config singleton state\n\"\"\"\nimport os\nimport warnings\nfrom typing import Any, Optional\n\nimport torch\n\n__all__ = [\n    'is_exportable', 'is_scriptable', 'is_no_jit', 'use_fused_attn',\n    'set_exportable', 'set_scriptable', 'set_no_jit', 'set_layer_config', 'set_fused_attn',\n    'set_reentrant_ckpt', 'use_reentrant_ckpt'\n]\n\n# Set to True if prefer to have layers with no jit optimization (includes activations)\n_NO_JIT = False\n\n# Set to True if prefer to have activation layers with no jit optimization\n# NOTE not currently used as no difference between no_jit and no_activation jit as only layers obeying\n# the jit flags so far are activations. This will change as more layers are updated and/or added.\n_NO_ACTIVATION_JIT = False\n\n# Set to True if exporting a model with Same padding via ONNX\n_EXPORTABLE = False\n\n# Set to True if wanting to use torch.jit.script on a model\n_SCRIPTABLE = False\n\n\n# use torch.scaled_dot_product_attention where possible\n_HAS_FUSED_ATTN = hasattr(torch.nn.functional, 'scaled_dot_product_attention')\nif 'TIMM_FUSED_ATTN' in os.environ:\n    _USE_FUSED_ATTN = int(os.environ['TIMM_FUSED_ATTN'])\nelse:\n    _USE_FUSED_ATTN = 1  # 0 == off, 1 == on (for tested use), 2 == on (for experimental use)\n\n\nif 'TIMM_REENTRANT_CKPT' in os.environ:\n    _USE_REENTRANT_CKPT = bool(os.environ['TIMM_REENTRANT_CKPT'])\nelse:\n    _USE_REENTRANT_CKPT = False  # defaults to disabled (off)\n\n\ndef is_no_jit():\n    return _NO_JIT\n\n\nclass set_no_jit:\n    def __init__(self, mode: bool) -> None:\n        global _NO_JIT\n        self.prev = _NO_JIT\n        _NO_JIT = mode\n\n    def __enter__(self) -> None:\n        pass\n\n    def __exit__(self, *args: Any) -> bool:\n        global _NO_JIT\n        _NO_JIT = self.prev\n        return False\n\n\ndef is_exportable():\n    return _EXPORTABLE\n\n\nclass set_exportable:\n    def __init__(self, mode: bool) -> None:\n        global _EXPORTABLE\n        self.prev = _EXPORTABLE\n        _EXPORTABLE = mode\n\n    def __enter__(self) -> None:\n        pass\n\n    def __exit__(self, *args: Any) -> bool:\n        global _EXPORTABLE\n        _EXPORTABLE = self.prev\n        return False\n\n\ndef is_scriptable():\n    return _SCRIPTABLE\n\n\nclass set_scriptable:\n    def __init__(self, mode: bool) -> None:\n        global _SCRIPTABLE\n        self.prev = _SCRIPTABLE\n        _SCRIPTABLE = mode\n\n    def __enter__(self) -> None:\n        pass\n\n    def __exit__(self, *args: Any) -> bool:\n        global _SCRIPTABLE\n        _SCRIPTABLE = self.prev\n        return False\n\n\nclass set_layer_config:\n    \"\"\" Layer config context manager that allows setting all layer config flags at once.\n    If a flag arg is None, it will not change the current value.\n    \"\"\"\n    def __init__(\n            self,\n            scriptable: Optional[bool] = None,\n            exportable: Optional[bool] = None,\n            no_jit: Optional[bool] = None,\n            no_activation_jit: Optional[bool] = None):\n        global _SCRIPTABLE\n        global _EXPORTABLE\n        global _NO_JIT\n        global _NO_ACTIVATION_JIT\n        self.prev = _SCRIPTABLE, _EXPORTABLE, _NO_JIT, _NO_ACTIVATION_JIT\n        if scriptable is not None:\n            _SCRIPTABLE = scriptable\n        if exportable is not None:\n            _EXPORTABLE = exportable\n        if no_jit is not None:\n            _NO_JIT = no_jit\n        if no_activation_jit is not None:\n            _NO_ACTIVATION_JIT = no_activation_jit\n\n    def __enter__(self) -> None:\n        pass\n\n    def __exit__(self, *args: Any) -> bool:\n        global _SCRIPTABLE\n        global _EXPORTABLE\n        global _NO_JIT\n        global _NO_ACTIVATION_JIT\n        _SCRIPTABLE, _EXPORTABLE, _NO_JIT, _NO_ACTIVATION_JIT = self.prev\n        return False\n\n\ndef use_fused_attn(experimental: bool = False) -> bool:\n    # NOTE: ONNX export cannot handle F.scaled_dot_product_attention as of pytorch 2.0\n    if not _HAS_FUSED_ATTN or _EXPORTABLE:\n        return False\n    if experimental:\n        return _USE_FUSED_ATTN > 1\n    return _USE_FUSED_ATTN > 0\n\n\ndef set_fused_attn(enable: bool = True, experimental: bool = False):\n    global _USE_FUSED_ATTN\n    if not _HAS_FUSED_ATTN:\n        warnings.warn('This version of pytorch does not have F.scaled_dot_product_attention, fused_attn flag ignored.')\n        return\n    if experimental and enable:\n        _USE_FUSED_ATTN = 2\n    elif enable:\n        _USE_FUSED_ATTN = 1\n    else:\n        _USE_FUSED_ATTN = 0\n\n\ndef use_reentrant_ckpt() -> bool:\n    return _USE_REENTRANT_CKPT\n\n\ndef set_reentrant_ckpt(enable: bool = True):\n    global _USE_REENTRANT_CKPT\n    _USE_REENTRANT_CKPT = enable\n"
  },
  {
    "path": "timm/layers/conv2d_same.py",
    "content": "\"\"\" Conv2d w/ Same Padding\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom typing import Tuple, Optional, Union\n\nfrom ._fx import register_notrace_module\nfrom .config import is_exportable, is_scriptable\nfrom .padding import pad_same, pad_same_arg, get_padding_value\n\n\n_USE_EXPORT_CONV = False\n\n\ndef conv2d_same(\n        x,\n        weight: torch.Tensor,\n        bias: Optional[torch.Tensor] = None,\n        stride: Tuple[int, int] = (1, 1),\n        padding: Tuple[int, int] = (0, 0),\n        dilation: Tuple[int, int] = (1, 1),\n        groups: int = 1,\n):\n    x = pad_same(x, weight.shape[-2:], stride, dilation)\n    return F.conv2d(x, weight, bias, stride, (0, 0), dilation, groups)\n\n\n@register_notrace_module\nclass Conv2dSame(nn.Conv2d):\n    \"\"\" Tensorflow like 'SAME' convolution wrapper for 2D convolutions\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: Union[int, Tuple[int, int], str] = 0,\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(\n            in_channels,\n            out_channels,\n            kernel_size,\n            stride,\n            0,  # padding\n            dilation,\n            groups,\n            bias,\n            device=device,\n            dtype=dtype,\n        )\n\n    def forward(self, x):\n        return conv2d_same(\n            x,\n            self.weight,\n            self.bias,\n            self.stride,\n            self.padding,\n            self.dilation,\n            self.groups,\n        )\n\n\nclass Conv2dSameExport(nn.Conv2d):\n    \"\"\" ONNX export friendly Tensorflow like 'SAME' convolution wrapper for 2D convolutions\n\n    NOTE: This does not currently work with torch.jit.script\n    \"\"\"\n\n    # pylint: disable=unused-argument\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: Union[int, Tuple[int, int], str] = 0,\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(\n            in_channels,\n            out_channels,\n            kernel_size,\n            stride,\n            0,  # padding\n            dilation,\n            groups,\n            bias,\n            device=device,\n            dtype=dtype,\n        )\n        self.pad = None\n        self.pad_input_size = (0, 0)\n\n    def forward(self, x):\n        input_size = x.size()[-2:]\n        if self.pad is None:\n            pad_arg = pad_same_arg(input_size, self.weight.size()[-2:], self.stride, self.dilation)\n            self.pad = nn.ZeroPad2d(pad_arg)\n            self.pad_input_size = input_size\n\n        x = self.pad(x)\n        return F.conv2d(\n            x,\n            self.weight,\n            self.bias,\n            self.stride,\n            self.padding,\n            self.dilation,\n            self.groups,\n        )\n\n\ndef create_conv2d_pad(in_chs, out_chs, kernel_size, **kwargs):\n    padding = kwargs.pop('padding', '')\n    kwargs.setdefault('bias', False)\n    padding, is_dynamic = get_padding_value(padding, kernel_size, **kwargs)\n    if is_dynamic:\n        if _USE_EXPORT_CONV and is_exportable():\n            # older PyTorch ver needed this to export same padding reasonably\n            assert not is_scriptable()  # Conv2DSameExport does not work with jit\n            return Conv2dSameExport(in_chs, out_chs, kernel_size, **kwargs)\n        else:\n            return Conv2dSame(in_chs, out_chs, kernel_size, **kwargs)\n    else:\n        return nn.Conv2d(in_chs, out_chs, kernel_size, padding=padding, **kwargs)\n\n\n"
  },
  {
    "path": "timm/layers/conv_bn_act.py",
    "content": "\"\"\" Conv2d + BN + Act\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Any, Dict, Optional, Type\n\nfrom torch import nn as nn\n\nfrom .typing import LayerType, PadType\nfrom .blur_pool import create_aa\nfrom .create_conv2d import create_conv2d\nfrom .create_norm_act import get_norm_act_layer\n\n\nclass ConvNormAct(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: PadType = '',\n            dilation: int = 1,\n            groups: int = 1,\n            bias: bool = False,\n            apply_norm: bool = True,\n            apply_act: bool = True,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            act_layer: Optional[LayerType] = nn.ReLU,\n            aa_layer: Optional[LayerType] = None,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            conv_kwargs: Optional[Dict[str, Any]] = None,\n            norm_kwargs: Optional[Dict[str, Any]] = None,\n            act_kwargs: Optional[Dict[str, Any]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_kwargs = {**dd, **(conv_kwargs or {})}\n        norm_kwargs = {**dd, **(norm_kwargs or {})}\n        act_kwargs = act_kwargs or {}\n        use_aa = aa_layer is not None and stride > 1\n\n        self.conv = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size,\n            stride=1 if use_aa else stride,\n            padding=padding,\n            dilation=dilation,\n            groups=groups,\n            bias=bias,\n            **conv_kwargs,\n        )\n\n        if apply_norm:\n            # NOTE for backwards compatibility with models that use separate norm and act layer definitions\n            norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n            # NOTE for backwards (weight) compatibility, norm layer name remains `.bn`\n            if drop_layer:\n                norm_kwargs['drop_layer'] = drop_layer\n            self.bn = norm_act_layer(\n                out_channels,\n                apply_act=apply_act,\n                act_kwargs=act_kwargs,\n                **norm_kwargs,\n            )\n        else:\n            self.bn = nn.Sequential()\n            if drop_layer:\n                norm_kwargs['drop_layer'] = drop_layer\n                self.bn.add_module('drop', drop_layer())\n\n        self.aa = create_aa(\n            aa_layer,\n            out_channels,\n            stride=stride,\n            enable=use_aa,\n            noop=None,\n            **dd,\n        )\n\n    @property\n    def in_channels(self):\n        return self.conv.in_channels\n\n    @property\n    def out_channels(self):\n        return self.conv.out_channels\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn(x)\n        aa = getattr(self, 'aa', None)\n        if aa is not None:\n            x = self.aa(x)\n        return x\n\n\nConvBnAct = ConvNormAct\nConvNormActAa = ConvNormAct   # backwards compat, when they were separate\n"
  },
  {
    "path": "timm/layers/coord_attn.py",
    "content": "\"\"\" Coordinate Attention and Variants\n\nCoordinate Attention decomposes channel attention into two 1D feature encoding processes\nto capture long-range dependencies with precise positional information. This module includes\nthe original implementation along with simplified and other variants.\n\nPapers / References:\n- Coordinate Attention: `Coordinate Attention for Efficient Mobile Network Design` - https://arxiv.org/abs/2103.02907\n- Efficient Local Attention: `Rethinking Local Perception in Lightweight Vision Transformer` - https://arxiv.org/abs/2403.01123\n\nHacked together by / Copyright 2025 Ross Wightman\n\"\"\"\nfrom typing import Optional, Type, Union\n\nimport torch\nfrom torch import nn\n\nfrom .create_act import create_act_layer\nfrom .helpers import make_divisible\nfrom .norm import GroupNorm1\n\n\nclass CoordAttn(nn.Module):\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1. / 16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            se_factor: float = 2/3,\n            bias: bool = False,\n            act_layer: Type[nn.Module] = nn.Hardswish,\n            norm_layer: Optional[Type[nn.Module]] = nn.BatchNorm2d,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            has_skip: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Coordinate Attention module for spatial feature recalibration.\n\n        Introduced in \"Coordinate Attention for Efficient Mobile Network Design\" (CVPR 2021).\n        Decomposes channel attention into two 1D feature encoding processes along the height and\n        width axes to capture long-range dependencies with precise positional information.\n\n        Args:\n            channels: Number of input channels.\n            rd_ratio: Reduction ratio for bottleneck channel calculation.\n            rd_channels: Explicit number of bottleneck channels, overrides rd_ratio if set.\n            rd_divisor: Divisor for making bottleneck channels divisible.\n            se_factor: Applied to rd_ratio for final channel count (keeps params similar to SE).\n            bias: Whether to use bias in convolution layers.\n            act_layer: Activation module class for bottleneck.\n            norm_layer: Normalization module class, None for no normalization.\n            gate_layer: Gate activation, either 'sigmoid', 'hardsigmoid', or a module class.\n            has_skip: Whether to add residual skip connection to output.\n            device: Device to place tensors on.\n            dtype: Data type for tensors.\n        \"\"\"\n\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.has_skip = has_skip\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio * se_factor, rd_divisor, round_limit=0.)\n\n        self.conv1 = nn.Conv2d(channels, rd_channels, kernel_size=1, stride=1, padding=0, bias=bias, **dd)\n        self.bn1 = norm_layer(rd_channels, **dd) if norm_layer is not None else nn.Identity()\n        self.act = act_layer()\n\n        self.conv_h = nn.Conv2d(rd_channels, channels, kernel_size=1, stride=1, padding=0, bias=bias, **dd)\n        self.conv_w = nn.Conv2d(rd_channels, channels, kernel_size=1, stride=1, padding=0, bias=bias, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        identity = x\n\n        N, C, H, W = x.size()\n\n        # Strip pooling\n        x_h = x.mean(3, keepdim=True)\n        x_w = x.mean(2, keepdim=True)\n\n        x_w = x_w.transpose(-1, -2)\n        y = torch.cat([x_h, x_w], dim=2)\n        y = self.conv1(y)\n        y = self.bn1(y)\n        y = self.act(y)\n        x_h, x_w = torch.split(y, [H, W], dim=2)\n        x_w = x_w.transpose(-1, -2)\n\n        a_h = self.gate(self.conv_h(x_h))\n        a_w = self.gate(self.conv_w(x_w))\n\n        out = identity * a_w * a_h\n        if self.has_skip:\n            out = out + identity\n\n        return out\n\n\nclass SimpleCoordAttn(nn.Module):\n    \"\"\"Simplified Coordinate Attention variant.\n\n    Uses\n     * linear layers instead of convolutions\n     * no norm\n     * additive pre-gating re-combination\n    for reduced complexity while maintaining the core coordinate attention mechanism\n    of separate height and width attention.\n    \"\"\"\n\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 0.25,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            se_factor: float = 2 / 3,\n            bias: bool = True,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            has_skip: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            channels: Number of input channels.\n            rd_ratio: Reduction ratio for bottleneck channel calculation.\n            rd_channels: Explicit number of bottleneck channels, overrides rd_ratio if set.\n            rd_divisor: Divisor for making bottleneck channels divisible.\n            se_factor: Applied to rd_ratio for final channel count (keeps param similar to SE)\n            bias: Whether to use bias in linear layers.\n            act_layer: Activation module class for bottleneck.\n            gate_layer: Gate activation, either 'sigmoid', 'hardsigmoid', or a module class.\n            has_skip: Whether to add residual skip connection to output.\n            device: Device to place tensors on.\n            dtype: Data type for tensors.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.has_skip = has_skip\n\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio * se_factor, rd_divisor, round_limit=0.)\n\n        self.fc1 = nn.Linear(channels, rd_channels, bias=bias, **dd)\n        self.act = act_layer()\n        self.fc_h = nn.Linear(rd_channels, channels, bias=bias, **dd)\n        self.fc_w = nn.Linear(rd_channels, channels, bias=bias, **dd)\n\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        identity = x\n\n        # Strip pooling\n        x_h = x.mean(dim=3)  # (N, C, H)\n        x_w = x.mean(dim=2)  # (N, C, W)\n\n        # Shared bottleneck projection\n        x_h = self.act(self.fc1(x_h.transpose(1, 2)))  # (N, H, rd_c)\n        x_w = self.act(self.fc1(x_w.transpose(1, 2)))  # (N, W, rd_c)\n\n        # Separate attention heads\n        a_h = self.fc_h(x_h).transpose(1, 2).unsqueeze(-1)  # (N, C, H, 1)\n        a_w = self.fc_w(x_w).transpose(1, 2).unsqueeze(-2)  # (N, C, 1, W)\n\n        out = identity * self.gate(a_h + a_w)\n        if self.has_skip:\n            out = out + identity\n\n        return out\n\n\nclass EfficientLocalAttn(nn.Module):\n    \"\"\"Efficient Local Attention.\n\n    Lightweight alternative to Coordinate Attention that preserves spatial\n    information without channel reduction. Uses 1D depthwise convolutions\n    and GroupNorm for better generalization.\n\n    Paper: https://arxiv.org/abs/2403.01123\n    \"\"\"\n\n    def __init__(\n            self,\n            channels: int,\n            kernel_size: int = 7,\n            bias: bool = False,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            norm_layer: Optional[Type[nn.Module]] = GroupNorm1,\n            has_skip: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            channels: Number of input channels.\n            kernel_size: Kernel size for 1D depthwise convolutions.\n            bias: Whether to use bias in convolution layers.\n            act_layer: Activation module class applied after normalization.\n            gate_layer: Gate activation, either 'sigmoid', 'hardsigmoid', or a module class.\n            norm_layer: Normalization module class, None for no normalization.\n            has_skip: Whether to add residual skip connection to output.\n            device: Device to place tensors on.\n            dtype: Data type for tensors.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.has_skip = has_skip\n\n        self.conv_h = nn.Conv2d(\n            channels, channels,\n            kernel_size=(kernel_size, 1),\n            stride=1,\n            padding=(kernel_size // 2, 0),\n            groups=channels,\n            bias=bias,\n            **dd\n        )\n        self.conv_w = nn.Conv2d(\n            channels, channels,\n            kernel_size=(1, kernel_size),\n            stride=1,\n            padding=(0, kernel_size // 2),\n            groups=channels,\n            bias=bias,\n            **dd\n        )\n        if norm_layer is not None:\n            self.norm_h = norm_layer(channels, **dd)\n            self.norm_w = norm_layer(channels, **dd)\n        else:\n            self.norm_h = nn.Identity()\n            self.norm_w = nn.Identity()\n        self.act = act_layer()\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        identity = x\n\n        # Strip pooling: (N, C, H, W) -> (N, C, H) and (N, C, W)\n        x_h = x.mean(dim=3, keepdim=True)\n        x_w = x.mean(dim=2, keepdim=True)\n\n        # 1D conv + norm + act\n        x_h = self.act(self.norm_h(self.conv_h(x_h)))  # (N, C, H, 1)\n        x_w = self.act(self.norm_w(self.conv_w(x_w)))  # (N, C, 1, W)\n\n        # Generate attention maps\n        a_h = self.gate(x_h)  # (N, C, H, 1)\n        a_w = self.gate(x_w)  # (N, C, 1, W)\n\n        out = identity * a_h * a_w\n        if self.has_skip:\n            out = out + identity\n\n        return out\n\n\nclass StripAttn(nn.Module):\n    \"\"\"Minimal Strip Attention.\n\n    Lightweight spatial attention using strip pooling with optional learned refinement.\n    \"\"\"\n\n    def __init__(\n            self,\n            channels: int,\n            use_conv: bool = True,\n            kernel_size: int = 3,\n            bias: bool = False,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            has_skip: bool = False,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        \"\"\"\n        Args:\n            channels: Number of input channels.\n            use_conv: Whether to apply depthwise convolutions for learned spatial refinement.\n            kernel_size: Kernel size for 1D depthwise convolutions when use_conv is True.\n            bias: Whether to use bias in convolution layers.\n            gate_layer: Gate activation, either 'sigmoid', 'hardsigmoid', or a module class.\n            has_skip: Whether to add residual skip connection to output.\n            device: Device to place tensors on.\n            dtype: Data type for tensors.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.has_skip = has_skip\n        self.use_conv = use_conv\n\n        if use_conv:\n            self.conv_h = nn.Conv2d(\n                channels, channels,\n                kernel_size=(kernel_size, 1),\n                stride=1,\n                padding=(kernel_size // 2, 0),\n                groups=channels,\n                bias=bias,\n                **dd\n            )\n            self.conv_w = nn.Conv2d(\n                channels, channels,\n                kernel_size=(1, kernel_size),\n                stride=1,\n                padding=(0, kernel_size // 2),\n                groups=channels,\n                bias=bias,\n                **dd\n            )\n        else:\n            self.conv_h = nn.Identity()\n            self.conv_w = nn.Identity()\n\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        identity = x\n\n        # Strip pooling\n        x_h = x.mean(dim=3, keepdim=True)  # (N, C, H, 1)\n        x_w = x.mean(dim=2, keepdim=True)  # (N, C, 1, W)\n\n        # Optional learned refinement\n        x_h = self.conv_h(x_h)\n        x_w = self.conv_w(x_w)\n\n        # Combine and gate\n        a_hw = self.gate(x_h + x_w)  # broadcasts to (N, C, H, W)\n\n        out = identity * a_hw\n        if self.has_skip:\n            out = out + identity\n\n        return out\n\n"
  },
  {
    "path": "timm/layers/create_act.py",
    "content": "\"\"\" Activation Factory\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Callable, Optional, Type, Union\n\nfrom .activations import *\nfrom .activations_me import *\nfrom .config import is_exportable, is_scriptable\nfrom .typing import LayerType\n\n# PyTorch has an optimized, native 'silu' (aka 'swish') operator as of PyTorch 1.7.\n# Also hardsigmoid, hardswish, and soon mish. This code will use native version if present.\n# Eventually, the custom SiLU, Mish, Hard*, layers will be removed and only native variants will be used.\n_has_silu = 'silu' in dir(torch.nn.functional)\n_has_hardswish = 'hardswish' in dir(torch.nn.functional)\n_has_hardsigmoid = 'hardsigmoid' in dir(torch.nn.functional)\n_has_mish = 'mish' in dir(torch.nn.functional)\n\n\n_ACT_FN_DEFAULT = dict(\n    silu=F.silu if _has_silu else swish,\n    swish=F.silu if _has_silu else swish,\n    mish=F.mish if _has_mish else mish,\n    relu=F.relu,\n    relu6=F.relu6,\n    leaky_relu=F.leaky_relu,\n    elu=F.elu,\n    celu=F.celu,\n    selu=F.selu,\n    gelu=gelu,\n    gelu_tanh=gelu_tanh,\n    quick_gelu=quick_gelu,\n    sigmoid=sigmoid,\n    tanh=tanh,\n    hard_sigmoid=F.hardsigmoid if _has_hardsigmoid else hard_sigmoid,\n    hard_swish=F.hardswish if _has_hardswish else hard_swish,\n    hard_mish=hard_mish,\n)\n\n_ACT_FN_ME = dict(\n    silu=F.silu if _has_silu else swish_me,\n    swish=F.silu if _has_silu else swish_me,\n    mish=F.mish if _has_mish else mish_me,\n    hard_sigmoid=F.hardsigmoid if _has_hardsigmoid else hard_sigmoid_me,\n    hard_swish=F.hardswish if _has_hardswish else hard_swish_me,\n    hard_mish=hard_mish_me,\n)\n\n_ACT_FNS = (_ACT_FN_ME, _ACT_FN_DEFAULT)\nfor a in _ACT_FNS:\n    a.setdefault('hardsigmoid', a.get('hard_sigmoid'))\n    a.setdefault('hardswish', a.get('hard_swish'))\n\n\n_ACT_LAYER_DEFAULT = dict(\n    silu=nn.SiLU if _has_silu else Swish,\n    swish=nn.SiLU if _has_silu else Swish,\n    mish=nn.Mish if _has_mish else Mish,\n    relu=nn.ReLU,\n    relu6=nn.ReLU6,\n    leaky_relu=nn.LeakyReLU,\n    elu=nn.ELU,\n    prelu=PReLU,\n    celu=nn.CELU,\n    selu=nn.SELU,\n    gelu=GELU,\n    gelu_tanh=GELUTanh,\n    quick_gelu=QuickGELU,\n    sigmoid=Sigmoid,\n    tanh=Tanh,\n    hard_sigmoid=nn.Hardsigmoid if _has_hardsigmoid else HardSigmoid,\n    hard_swish=nn.Hardswish if _has_hardswish else HardSwish,\n    hard_mish=HardMish,\n    identity=nn.Identity,\n)\n\n_ACT_LAYER_ME = dict(\n    silu=nn.SiLU if _has_silu else SwishMe,\n    swish=nn.SiLU if _has_silu else SwishMe,\n    mish=nn.Mish if _has_mish else MishMe,\n    hard_sigmoid=nn.Hardsigmoid if _has_hardsigmoid else HardSigmoidMe,\n    hard_swish=nn.Hardswish if _has_hardswish else HardSwishMe,\n    hard_mish=HardMishMe,\n)\n\n_ACT_LAYERS = (_ACT_LAYER_ME, _ACT_LAYER_DEFAULT)\nfor a in _ACT_LAYERS:\n    a.setdefault('hardsigmoid', a.get('hard_sigmoid'))\n    a.setdefault('hardswish', a.get('hard_swish'))\n\n\ndef get_act_fn(name: Optional[LayerType] = 'relu'):\n    \"\"\" Activation Function Factory\n    Fetching activation fns by name with this function allows export or torch script friendly\n    functions to be returned dynamically based on current config.\n    \"\"\"\n    if not name:\n        return None\n    if isinstance(name, Callable):\n        return name\n    name = name.lower()\n    if not (is_exportable() or is_scriptable()):\n        # If not exporting or scripting the model, first look for a memory-efficient version with\n        # custom autograd, then fallback\n        if name in _ACT_FN_ME:\n            return _ACT_FN_ME[name]\n    return _ACT_FN_DEFAULT[name]\n\n\ndef get_act_layer(name: Optional[LayerType] = 'relu'):\n    \"\"\" Activation Layer Factory\n    Fetching activation layers by name with this function allows export or torch script friendly\n    functions to be returned dynamically based on current config.\n    \"\"\"\n    if name is None:\n        return None\n    if not isinstance(name, str):\n        # callable, module, etc\n        return name\n    if not name:\n        return None\n    name = name.lower()\n    if not (is_exportable() or is_scriptable()):\n        if name in _ACT_LAYER_ME:\n            return _ACT_LAYER_ME[name]\n    return _ACT_LAYER_DEFAULT[name]\n\n\ndef create_act_layer(\n        name: Optional[LayerType],\n        inplace: Optional[bool] = None,\n        **kwargs\n):\n    act_layer = get_act_layer(name)\n    if act_layer is None:\n        return None\n    if inplace is None:\n        return act_layer(**kwargs)\n    try:\n        return act_layer(inplace=inplace, **kwargs)\n    except TypeError:\n        # recover if act layer doesn't have inplace arg\n        return act_layer(**kwargs)\n"
  },
  {
    "path": "timm/layers/create_attn.py",
    "content": "\"\"\" Attention Factory\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport torch\nfrom functools import partial\n\nfrom .bottleneck_attn import BottleneckAttn\nfrom .cbam import CbamModule, LightCbamModule\nfrom .coord_attn import CoordAttn, EfficientLocalAttn, StripAttn, SimpleCoordAttn\nfrom .eca import EcaModule, CecaModule\nfrom .gather_excite import GatherExcite\nfrom .global_context import GlobalContext\nfrom .halo_attn import HaloAttn\nfrom .lambda_layer import LambdaLayer\nfrom .non_local_attn import NonLocalAttn, BatNonLocalAttn\nfrom .selective_kernel import SelectiveKernel\nfrom .split_attn import SplitAttn\nfrom .squeeze_excite import SEModule, EffectiveSEModule\n\n\ndef get_attn(attn_type):\n    if isinstance(attn_type, torch.nn.Module):\n        return attn_type\n    module_cls = None\n    if attn_type:\n        if isinstance(attn_type, str):\n            attn_type = attn_type.lower()\n            # Lightweight attention modules (channel and/or coarse spatial).\n            # Typically added to existing network architecture blocks in addition to existing convolutions.\n            if attn_type == 'se':\n                module_cls = SEModule\n            elif attn_type == 'ese':\n                module_cls = EffectiveSEModule\n            elif attn_type == 'eca':\n                module_cls = EcaModule\n            elif attn_type == 'ecam':\n                module_cls = partial(EcaModule, use_mlp=True)\n            elif attn_type == 'ceca':\n                module_cls = CecaModule\n            elif attn_type == 'ge':\n                module_cls = GatherExcite\n            elif attn_type == 'gc':\n                module_cls = GlobalContext\n            elif attn_type == 'gca':\n                module_cls = partial(GlobalContext, fuse_add=True, fuse_scale=False)\n            elif attn_type == 'cbam':\n                module_cls = CbamModule\n            elif attn_type == 'lcbam':\n                module_cls = LightCbamModule\n            elif attn_type == 'coord':\n                module_cls = CoordAttn\n            elif attn_type == 'scoord':\n                module_cls = SimpleCoordAttn\n            elif attn_type == 'ela':\n                module_cls = EfficientLocalAttn\n            elif attn_type == 'strip':\n                module_cls = StripAttn\n\n            # Attention / attention-like modules w/ significant params\n            # Typically replace some of the existing workhorse convs in a network architecture.\n            # All of these accept a stride argument and can spatially downsample the input.\n            elif attn_type == 'sk':\n                module_cls = SelectiveKernel\n            elif attn_type == 'splat':\n                module_cls = SplitAttn\n\n            # Self-attention / attention-like modules w/ significant compute and/or params\n            # Typically replace some of the existing workhorse convs in a network architecture.\n            # All of these accept a stride argument and can spatially downsample the input.\n            elif attn_type == 'lambda':\n                return LambdaLayer\n            elif attn_type == 'bottleneck':\n                return BottleneckAttn\n            elif attn_type == 'halo':\n                return HaloAttn\n            elif attn_type == 'nl':\n                module_cls = NonLocalAttn\n            elif attn_type == 'bat':\n                module_cls = BatNonLocalAttn\n\n            # Woops!\n            else:\n                assert False, \"Invalid attn module (%s)\" % attn_type\n        elif isinstance(attn_type, bool):\n            if attn_type:\n                module_cls = SEModule\n        else:\n            module_cls = attn_type\n    return module_cls\n\n\ndef create_attn(attn_type, channels, **kwargs):\n    module_cls = get_attn(attn_type)\n    if module_cls is not None:\n        # NOTE: it's expected the first (positional) argument of all attention layers is the # input channels\n        return module_cls(channels, **kwargs)\n    return None\n"
  },
  {
    "path": "timm/layers/create_conv2d.py",
    "content": "\"\"\" Create Conv2d Factory Method\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nfrom .mixed_conv2d import MixedConv2d\nfrom .cond_conv2d import CondConv2d\nfrom .conv2d_same import create_conv2d_pad\n\n\ndef create_conv2d(in_channels, out_channels, kernel_size, **kwargs):\n    \"\"\" Select a 2d convolution implementation based on arguments\n    Creates and returns one of torch.nn.Conv2d, Conv2dSame, MixedConv2d, or CondConv2d.\n\n    Used extensively by EfficientNet, MobileNetv3 and related networks.\n    \"\"\"\n    if isinstance(kernel_size, list):\n        assert 'num_experts' not in kwargs  # MixNet + CondConv combo not supported currently\n        if 'groups' in kwargs:\n            groups = kwargs.pop('groups')\n            if groups == in_channels:\n                kwargs['depthwise'] = True\n            else:\n                assert groups == 1\n        # We're going to use only lists for defining the MixedConv2d kernel groups,\n        # ints, tuples, other iterables will continue to pass to normal conv and specify h, w.\n        m = MixedConv2d(in_channels, out_channels, kernel_size, **kwargs)\n    else:\n        depthwise = kwargs.pop('depthwise', False)\n        # for DW out_channels must be multiple of in_channels as must have out_channels % groups == 0\n        groups = in_channels if depthwise else kwargs.pop('groups', 1)\n        if 'num_experts' in kwargs and kwargs['num_experts'] > 0:\n            m = CondConv2d(in_channels, out_channels, kernel_size, groups=groups, **kwargs)\n        else:\n            m = create_conv2d_pad(in_channels, out_channels, kernel_size, groups=groups, **kwargs)\n    return m\n"
  },
  {
    "path": "timm/layers/create_norm.py",
    "content": "\"\"\" Norm Layer Factory\n\nCreate norm modules by string (to mirror create_act and creat_norm-act fns)\n\nCopyright 2022 Ross Wightman\n\"\"\"\nimport functools\nimport types\nfrom typing import Type\n\nimport torch.nn as nn\n\nfrom .norm import (\n    GroupNorm,\n    GroupNorm1,\n    LayerNorm,\n    LayerNorm2d,\n    LayerNormFp32,\n    LayerNorm2dFp32,\n    RmsNorm,\n    RmsNorm2d,\n    RmsNormFp32,\n    RmsNorm2dFp32,\n    SimpleNorm,\n    SimpleNorm2d,\n    SimpleNormFp32,\n    SimpleNorm2dFp32,\n)\nfrom torchvision.ops.misc import FrozenBatchNorm2d\n\n_NORM_MAP = dict(\n    batchnorm=nn.BatchNorm2d,\n    batchnorm2d=nn.BatchNorm2d,\n    batchnorm1d=nn.BatchNorm1d,\n    groupnorm=GroupNorm,\n    groupnorm1=GroupNorm1,\n    layernorm=LayerNorm,\n    layernorm2d=LayerNorm2d,\n    layernormfp32=LayerNormFp32,\n    layernorm2dfp32=LayerNorm2dFp32,\n    rmsnorm=RmsNorm,\n    rmsnorm2d=RmsNorm2d,\n    rmsnormfp32=RmsNormFp32,\n    rmsnorm2dfp32=RmsNorm2dFp32,\n    simplenorm=SimpleNorm,\n    simplenorm2d=SimpleNorm2d,\n    simplenormfp32=SimpleNormFp32,\n    simplenorm2dfp32=SimpleNorm2dFp32,\n    frozenbatchnorm2d=FrozenBatchNorm2d,\n)\n_NORM_TYPES = {m for n, m in _NORM_MAP.items()}\n\n\ndef create_norm_layer(layer_name, num_features, **kwargs):\n    layer = get_norm_layer(layer_name)\n    layer_instance = layer(num_features, **kwargs)\n    return layer_instance\n\n\ndef get_norm_layer(norm_layer):\n    if norm_layer is None:\n        return None\n    assert isinstance(norm_layer, (type, str, types.FunctionType, functools.partial))\n    norm_kwargs = {}\n\n    # unbind partial fn, so args can be rebound later\n    if isinstance(norm_layer, functools.partial):\n        norm_kwargs.update(norm_layer.keywords)\n        norm_layer = norm_layer.func\n\n    if isinstance(norm_layer, str):\n        if not norm_layer:\n            return None\n        layer_name = norm_layer.replace('_', '').lower()\n        norm_layer = _NORM_MAP[layer_name]\n    else:\n        norm_layer = norm_layer\n\n    if norm_kwargs:\n        norm_layer = functools.partial(norm_layer, **norm_kwargs)  # bind/rebind args\n    return norm_layer\n"
  },
  {
    "path": "timm/layers/create_norm_act.py",
    "content": "\"\"\" NormAct (Normalization + Activation Layer) Factory\n\nCreate norm + act combo modules that attempt to be backwards compatible with separate norm + act\ninstances in models. Where these are used it will be possible to swap separate BN + act layers with\ncombined modules like IABN or EvoNorms.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport types\nimport functools\nfrom typing import Optional\n\nfrom .evo_norm import *\nfrom .filter_response_norm import FilterResponseNormAct2d, FilterResponseNormTlu2d\nfrom .norm_act import (\n    BatchNormAct2d,\n    GroupNormAct,\n    GroupNorm1Act,\n    LayerNormAct,\n    LayerNormActFp32,\n    LayerNormAct2d,\n    LayerNormAct2dFp32,\n    RmsNormAct,\n    RmsNormActFp32,\n    RmsNormAct2d,\n    RmsNormAct2dFp32,\n)\nfrom .inplace_abn import InplaceAbn\nfrom .typing import LayerType\n\n_NORM_ACT_MAP = dict(\n    batchnorm=BatchNormAct2d,\n    batchnorm2d=BatchNormAct2d,\n    groupnorm=GroupNormAct,\n    groupnorm1=GroupNorm1Act,\n    layernorm=LayerNormAct,\n    layernorm2d=LayerNormAct2d,\n    layernormfp32=LayerNormActFp32,\n    layernorm2dfp32=LayerNormAct2dFp32,\n    evonormb0=EvoNorm2dB0,\n    evonormb1=EvoNorm2dB1,\n    evonormb2=EvoNorm2dB2,\n    evonorms0=EvoNorm2dS0,\n    evonorms0a=EvoNorm2dS0a,\n    evonorms1=EvoNorm2dS1,\n    evonorms1a=EvoNorm2dS1a,\n    evonorms2=EvoNorm2dS2,\n    evonorms2a=EvoNorm2dS2a,\n    frn=FilterResponseNormAct2d,\n    frntlu=FilterResponseNormTlu2d,\n    inplaceabn=InplaceAbn,\n    iabn=InplaceAbn,\n    rmsnorm=RmsNormAct,\n    rmsnorm2d=RmsNormAct2d,\n    rmsnormfp32=RmsNormActFp32,\n    rmsnorm2dfp32=RmsNormAct2dFp32,\n)\n_NORM_ACT_TYPES = {m for n, m in _NORM_ACT_MAP.items()}\n# Reverse map from base norm layer names to norm+act layer classes\n_NORM_TO_NORM_ACT_MAP = dict(\n    batchnorm=BatchNormAct2d,\n    batchnorm2d=BatchNormAct2d,\n    groupnorm=GroupNormAct,\n    groupnorm1=GroupNorm1Act,\n    layernorm=LayerNormAct,\n    layernorm2d=LayerNormAct2d,\n    layernormfp32=LayerNormActFp32,\n    layernorm2dfp32=LayerNormAct2dFp32,\n    rmsnorm=RmsNormAct,\n    rmsnorm2d=RmsNormAct2d,\n    rmsnormfp32=RmsNormActFp32,\n    rmsnorm2dfp32=RmsNormAct2dFp32,\n)\n# has act_layer arg to define act type\n_NORM_ACT_REQUIRES_ARG = {\n    BatchNormAct2d,\n    GroupNormAct,\n    GroupNorm1Act,\n    LayerNormAct,\n    LayerNormAct2d,\n    LayerNormActFp32,\n    LayerNormAct2dFp32,\n    FilterResponseNormAct2d,\n    InplaceAbn,\n    RmsNormAct,\n    RmsNormAct2d,\n    RmsNormActFp32,\n    RmsNormAct2dFp32,\n}\n\n\ndef create_norm_act_layer(\n        layer_name: LayerType,\n        num_features: int,\n        act_layer: Optional[LayerType] = None,\n        apply_act: bool = True,\n        jit: bool = False,\n        **kwargs,\n):\n    layer = get_norm_act_layer(layer_name, act_layer=act_layer)\n    layer_instance = layer(num_features, apply_act=apply_act, **kwargs)\n    if jit:\n        layer_instance = torch.jit.script(layer_instance)\n    return layer_instance\n\n\ndef get_norm_act_layer(\n        norm_layer: LayerType,\n        act_layer: Optional[LayerType] = None,\n):\n    if norm_layer is None:\n        return None\n    assert isinstance(norm_layer, (type, str,  types.FunctionType, functools.partial))\n    assert act_layer is None or isinstance(act_layer, (type, str, types.FunctionType, functools.partial))\n    norm_act_kwargs = {}\n\n    # unbind partial fn, so args can be rebound later\n    if isinstance(norm_layer, functools.partial):\n        norm_act_kwargs.update(norm_layer.keywords)\n        norm_layer = norm_layer.func\n\n    if isinstance(norm_layer, str):\n        if not norm_layer:\n            return None\n        layer_name = norm_layer.replace('_', '').lower().split('-')[0]\n        norm_act_layer = _NORM_ACT_MAP[layer_name]\n    elif norm_layer in _NORM_ACT_TYPES:\n        norm_act_layer = norm_layer\n    elif isinstance(norm_layer,  types.FunctionType):\n        # if function type, must be a lambda/fn that creates a norm_act layer\n        norm_act_layer = norm_layer\n    else:\n        # Use reverse map to find the corresponding norm+act layer\n        type_name = norm_layer.__name__.lower()\n        norm_act_layer = _NORM_TO_NORM_ACT_MAP.get(type_name, None)\n        assert norm_act_layer is not None, f\"No equivalent norm_act layer for {type_name}\"\n\n    if norm_act_layer in _NORM_ACT_REQUIRES_ARG:\n        # pass `act_layer` through for backwards compat where `act_layer=None` implies no activation.\n        # In the future, may force use of `apply_act` with `act_layer` arg bound to relevant NormAct types\n        norm_act_kwargs.setdefault('act_layer', act_layer)\n    if norm_act_kwargs:\n        norm_act_layer = functools.partial(norm_act_layer, **norm_act_kwargs)  # bind/rebind args\n\n    return norm_act_layer\n"
  },
  {
    "path": "timm/layers/diff_attention.py",
    "content": "\"\"\"Differential Attention\n\nPaper: 'Differential Transformer' - https://arxiv.org/abs/2410.05258\n\nReference impl: https://github.com/microsoft/unilm/tree/master/Diff-Transformer\n\nHacked together by / Copyright 2024, Ross Wightman\n\"\"\"\nimport math\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .attention import maybe_add_mask, resolve_self_attn_mask\nfrom .config import use_fused_attn\nfrom .norm import RmsNorm\n\n\nclass DiffAttention(nn.Module):\n    \"\"\"Differential Attention module.\n\n    Computes attention as the difference between two softmax attention maps, which helps\n    cancel out noise and promotes sparse attention patterns. The module splits Q and K\n    into two groups, computes separate attention maps, and subtracts one from the other\n    scaled by a learnable lambda parameter.\n\n    The attention output is computed as:\n        Attn = softmax(Q1 @ K1^T) - lambda * softmax(Q2 @ K2^T)\n        Output = Attn @ V\n\n    Supports both fused (scaled_dot_product_attention) and manual implementations.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_norm: bool = False,\n            proj_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            depth: int = 0,\n            dual_lambda: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize the DiffAttention module.\n\n        Args:\n            dim: Input dimension of the token embeddings.\n            num_heads: Number of attention heads.\n            qkv_bias: Whether to use bias in the query, key, value projections.\n            qk_norm: Whether to apply normalization to query and key vectors.\n            scale_norm: Whether to apply normalization before the output projection.\n            proj_bias: Whether to use bias in the output projection.\n            attn_drop: Dropout rate applied to the attention weights.\n            proj_drop: Dropout rate applied after the output projection.\n            norm_layer: Normalization layer constructor (defaults to RmsNorm).\n            depth: Block depth index, used to compute depth-dependent lambda_init.\n            dual_lambda: If True, use simplified dual scalar lambda parameterization\n                (2 params). If False, use the paper's original formulation with\n                lambda_q/k vectors (4 * head_dim params).\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n        if norm_layer is None:\n            norm_layer = RmsNorm\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads // 2\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.attn_drop_p = attn_drop\n        self.norm = norm_layer(dim, **dd) if scale_norm else nn.Identity()\n        self.proj = nn.Linear(dim, dim, bias=proj_bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        self.dual_lambda = dual_lambda\n        if dual_lambda:\n            self.lambda_a = nn.Parameter(torch.empty((), dtype=torch.float32, device=device))\n            self.lambda_b = nn.Parameter(torch.empty((), dtype=torch.float32, device=device))\n            self.lambda_q1 = self.lambda_k1 = self.lambda_q2 = self.lambda_k2 = None\n        else:\n            self.lambda_a = self.lambda_b = None\n            self.lambda_q1 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_k1 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_q2 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_k2 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n\n        self.sub_norm = RmsNorm(2 * self.head_dim, eps=1e-5, **dd)\n\n        self.lambda_init = 0.8\n        self.set_lambda_init(depth)\n        self.reset_parameters()\n\n    def set_lambda_init(self, depth: int):\n        self.lambda_init = 0.8 - 0.6 * math.exp(-0.3 * depth)\n\n    def reset_parameters(self):\n        if self.dual_lambda:\n            nn.init.zeros_(self.lambda_a)\n            nn.init.zeros_(self.lambda_b)\n        else:\n            nn.init.normal_(self.lambda_q1, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_k1, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_q2, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_k2, mean=0, std=0.1)\n\n    def _compute_lambda(self) -> torch.Tensor:\n        if self.lambda_a is not None:\n            lambda_1 = torch.exp(self.lambda_a)\n            lambda_2 = torch.exp(self.lambda_b)\n        else:\n            lambda_1 = torch.exp(torch.sum(self.lambda_q1 * self.lambda_k1, dim=-1).float())\n            lambda_2 = torch.exp(torch.sum(self.lambda_q2 * self.lambda_k2, dim=-1).float())\n        return lambda_1 - lambda_2 + self.lambda_init\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        B, N, C = x.shape\n\n        q, k, v = self.qkv(x).chunk(3, dim=2)\n        q = q.reshape(B, N, 2 * self.num_heads, self.head_dim).transpose(1, 2)\n        k = k.reshape(B, N, 2 * self.num_heads, self.head_dim).transpose(1, 2)\n        v = v.reshape(B, N, self.num_heads, 2 * self.head_dim).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        lambda_full = self._compute_lambda().type_as(q)\n\n        if self.fused_attn:\n            q = q.reshape(B, self.num_heads, 2, N, self.head_dim)\n            k = k.reshape(B, self.num_heads, 2, N, self.head_dim)\n            q1, q2 = q.unbind(2)\n            k1, k2 = k.unbind(2)\n\n            dropout_p = self.attn_drop_p if self.training else 0.0\n            attn1 = F.scaled_dot_product_attention(\n                q1, k1, v, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal)\n            attn2 = F.scaled_dot_product_attention(\n                q2, k2, v, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal)\n\n            x = attn1 - lambda_full * attn2\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal=is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n\n            attn = attn.view(B, self.num_heads, 2, N, N)\n            attn = attn[:, :, 0] - lambda_full * attn[:, :, 1]\n            x = attn @ v\n\n        x = self.sub_norm(x)\n        x = x * (1 - self.lambda_init)\n        x = x.transpose(1, 2).reshape(B, N, C)\n\n        x = self.norm(x)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n\n        return x\n"
  },
  {
    "path": "timm/layers/drop.py",
    "content": "\"\"\" DropBlock, DropPath\n\nPyTorch implementations of DropBlock and DropPath (Stochastic Depth) regularization layers.\n\nPapers:\nDropBlock: A regularization method for convolutional networks (https://arxiv.org/abs/1810.12890)\n\nDeep Networks with Stochastic Depth (https://arxiv.org/abs/1603.09382)\n\nCode:\nDropBlock impl inspired by two Tensorflow impl that I liked:\n - https://github.com/tensorflow/tpu/blob/master/models/official/resnet/resnet_model.py#L74\n - https://github.com/clovaai/assembled-cnn/blob/master/nets/blocks.py\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import List, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\ndef drop_block_2d(\n        x: torch.Tensor,\n        drop_prob: float = 0.1,\n        block_size: int = 7,\n        gamma_scale: float = 1.0,\n        with_noise: bool = False,\n        inplace: bool = False,\n        couple_channels: bool = True,\n        scale_by_keep: bool = True,\n):\n    \"\"\" DropBlock. See https://arxiv.org/pdf/1810.12890.pdf\n\n    DropBlock with an experimental gaussian noise option.\n\n    Args:\n        x: Input tensor of shape (B, C, H, W).\n        drop_prob: Probability of dropping a block.\n        block_size: Size of the block to drop.\n        gamma_scale: Scale factor for the drop probability.\n        with_noise: If True, add gaussian noise to dropped regions instead of zeros.\n        inplace: If True, perform operation in-place.\n        couple_channels: If True, all channels share the same drop mask (per the original paper).\n            If False, each channel gets an independent mask.\n        scale_by_keep: If True, scale kept activations to maintain expected values.\n\n    Returns:\n        Tensor with dropped blocks, same shape as input.\n    \"\"\"\n    B, C, H, W = x.shape\n    kh, kw = min(block_size, H), min(block_size, W)\n\n    # Compute gamma (seed drop rate) - probability of dropping each spatial location\n    gamma = float(gamma_scale * drop_prob * H * W) / float(kh * kw) / float((H - kh + 1) * (W - kw + 1))\n\n    # Generate drop mask: 1 at block centers to drop, 0 elsewhere\n    # couple_channels=True means all channels share same spatial mask (matches paper)\n    noise_shape = (B, 1 if couple_channels else C, H, W)\n    with torch.no_grad():\n        block_mask = torch.empty(noise_shape, dtype=x.dtype, device=x.device).bernoulli_(gamma)\n\n        # Expand block centers to full blocks using max pooling\n        block_mask = F.max_pool2d(\n            block_mask,\n            kernel_size=(kh, kw),\n            stride=1,\n            padding=(kh // 2, kw // 2),\n        )\n        # Handle even kernel sizes - max_pool2d output is 1 larger in each even dimension\n        if kh % 2 == 0 or kw % 2 == 0:\n            # Fix for even kernels proposed by https://github.com/crutcher\n            block_mask = block_mask[..., (kh + 1) % 2:, (kw + 1) % 2:]\n\n        keep_mask = 1. - block_mask\n\n    if with_noise:\n        with torch.no_grad():\n            noise = torch.empty_like(keep_mask).normal_()\n            noise.mul_(block_mask)\n\n        if inplace:\n            x.mul_(keep_mask).add_(noise)\n        else:\n            x = x * keep_mask + noise\n    else:\n        if scale_by_keep:\n            with torch.no_grad():\n                # Normalize to maintain expected values (scale up kept activations)\n                normalize_scale = keep_mask.numel() / keep_mask.to(dtype=torch.float32).sum().add(1e-7)\n                keep_mask.mul_(normalize_scale.to(x.dtype))\n\n        if inplace:\n            x.mul_(keep_mask)\n        else:\n            x = x * keep_mask\n\n    return x\n\n\nclass DropBlock2d(nn.Module):\n    \"\"\" DropBlock. See https://arxiv.org/pdf/1810.12890.pdf\n\n    Args:\n        drop_prob: Probability of dropping a block.\n        block_size: Size of the block to drop.\n        gamma_scale: Scale factor for the drop probability.\n        with_noise: If True, add gaussian noise to dropped regions instead of zeros.\n        inplace: If True, perform operation in-place.\n        couple_channels: If True, all channels share the same drop mask (per the original paper).\n            If False, each channel gets an independent mask.\n        scale_by_keep: If True, scale kept activations to maintain expected values.\n    \"\"\"\n\n    def __init__(\n            self,\n            drop_prob: float = 0.1,\n            block_size: int = 7,\n            gamma_scale: float = 1.0,\n            with_noise: bool = False,\n            inplace: bool = False,\n            couple_channels: bool = True,\n            scale_by_keep: bool = True,\n            **kwargs,\n    ):\n        super().__init__()\n        self.drop_prob = drop_prob\n        self.gamma_scale = gamma_scale\n        self.block_size = block_size\n        self.with_noise = with_noise\n        self.inplace = inplace\n        self.couple_channels = couple_channels\n        self.scale_by_keep = scale_by_keep\n\n        # Backwards compatibility: silently consume args removed in v1.0.23, warn on unknown\n        deprecated_args = {'batchwise', 'fast'}\n        for k in kwargs:\n            if k not in deprecated_args:\n                import warnings\n                warnings.warn(f\"DropBlock2d() got unexpected keyword argument '{k}'\")\n\n    def forward(self, x):\n        if not self.training or not self.drop_prob:\n            return x\n        return drop_block_2d(\n            x,\n            drop_prob=self.drop_prob,\n            block_size=self.block_size,\n            gamma_scale=self.gamma_scale,\n            with_noise=self.with_noise,\n            inplace=self.inplace,\n            couple_channels=self.couple_channels,\n            scale_by_keep=self.scale_by_keep,\n        )\n\n\ndef drop_path(x, drop_prob: float = 0., training: bool = False, scale_by_keep: bool = True):\n    \"\"\"Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).\n\n    This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,\n    the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...\n    See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for\n    changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use\n    'survival rate' as the argument.\n\n    \"\"\"\n    if drop_prob == 0. or not training:\n        return x\n    keep_prob = 1 - drop_prob\n    shape = (x.shape[0],) + (1,) * (x.ndim - 1)  # work with diff dim tensors, not just 2D ConvNets\n    random_tensor = x.new_empty(shape).bernoulli_(keep_prob)\n    if keep_prob > 0.0 and scale_by_keep:\n        random_tensor.div_(keep_prob)\n    return x * random_tensor\n\n\nclass DropPath(nn.Module):\n    \"\"\"Drop paths (Stochastic Depth) per sample  (when applied in main path of residual blocks).\n    \"\"\"\n    def __init__(self, drop_prob: float = 0., scale_by_keep: bool = True):\n        super().__init__()\n        self.drop_prob = drop_prob\n        self.scale_by_keep = scale_by_keep\n\n    def forward(self, x):\n        return drop_path(x, self.drop_prob, self.training, self.scale_by_keep)\n\n    def extra_repr(self):\n        return f'drop_prob={round(self.drop_prob,3):0.3f}'\n\n\ndef calculate_drop_path_rates(\n        drop_path_rate: float,\n        depths: Union[int, List[int]],\n        stagewise: bool = False,\n) -> Union[List[float], List[List[float]]]:\n    \"\"\"Generate drop path rates for stochastic depth.\n\n    This function handles two common patterns for drop path rate scheduling:\n    1. Per-block: Linear increase from 0 to drop_path_rate across all blocks\n    2. Stage-wise: Linear increase across stages, with same rate within each stage\n\n    Args:\n        drop_path_rate: Maximum drop path rate (at the end).\n        depths: Either a single int for total depth (per-block mode) or\n                list of ints for depths per stage (stage-wise mode).\n        stagewise: If True, use stage-wise pattern. If False, use per-block pattern.\n                   When depths is a list, stagewise defaults to True.\n\n    Returns:\n        For per-block mode: List of drop rates, one per block.\n        For stage-wise mode: List of lists, drop rates per stage.\n    \"\"\"\n    if isinstance(depths, int):\n        # Single depth value - per-block pattern\n        if stagewise:\n            raise ValueError(\"stagewise=True requires depths to be a list of stage depths\")\n        dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depths, device='cpu')]\n        return dpr\n    else:\n        # List of depths - can be either pattern\n        total_depth = sum(depths)\n        if stagewise:\n            # Stage-wise pattern: same drop rate within each stage\n            dpr = [x.tolist() for x in torch.linspace(0, drop_path_rate, total_depth, device='cpu').split(depths)]\n            return dpr\n        else:\n            # Per-block pattern across all stages\n            dpr = [x.item() for x in torch.linspace(0, drop_path_rate, total_depth, device='cpu')]\n            return dpr\n"
  },
  {
    "path": "timm/layers/eca.py",
    "content": "\"\"\"\nECA module from ECAnet\n\npaper: ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks\nhttps://arxiv.org/abs/1910.03151\n\nOriginal ECA model borrowed from https://github.com/BangguWu/ECANet\n\nModified circular ECA implementation and adaption for use in timm package\nby Chris Ha https://github.com/VRandme\n\nOriginal License:\n\nMIT License\n\nCopyright (c) 2019 BangguWu, Qilong Wang\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\"\"\"\nfrom typing import Optional, Tuple, Type, Union\nimport math\n\nfrom torch import nn\nimport torch.nn.functional as F\n\nfrom .create_act import create_act_layer\nfrom .helpers import make_divisible\n\n\nclass EcaModule(nn.Module):\n    \"\"\"Constructs an ECA module.\n\n    Args:\n        channels: Number of channels of the input feature map for use in adaptive kernel sizes\n            for actual calculations according to channel.\n            gamma, beta: when channel is given parameters of mapping function\n            refer to original paper https://arxiv.org/pdf/1910.03151.pdf\n            (default=None. if channel size not given, use k_size given for kernel size.)\n        kernel_size: Adaptive selection of kernel size (default=3)\n        gamm: used in kernel_size calc, see above\n        beta: used in kernel_size calc, see above\n        act_layer: optional non-linearity after conv, enables conv bias, this is an experiment\n        gate_layer: gating non-linearity to use\n    \"\"\"\n    def __init__(\n            self,\n            channels: Optional[int] = None,\n            kernel_size: int = 3,\n            gamma: float = 2,\n            beta: float = 1,\n            act_layer: Optional[Type[nn.Module]] = None,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            rd_ratio: float = 1/8,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            use_mlp: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if channels is not None:\n            t = int(abs(math.log(channels, 2) + beta) / gamma)\n            kernel_size = max(t if t % 2 else t + 1, 3)\n        assert kernel_size % 2 == 1\n        padding = (kernel_size - 1) // 2\n        if use_mlp:\n            # NOTE 'mlp' mode is a timm experiment, not in paper\n            assert channels is not None\n            if rd_channels is None:\n                rd_channels = make_divisible(channels * rd_ratio, divisor=rd_divisor)\n            act_layer = act_layer or nn.ReLU\n            self.conv = nn.Conv1d(1, rd_channels, kernel_size=1, padding=0, bias=True, **dd)\n            self.act = create_act_layer(act_layer)\n            self.conv2 = nn.Conv1d(rd_channels, 1, kernel_size=kernel_size, padding=padding, bias=True, **dd)\n        else:\n            self.conv = nn.Conv1d(1, 1, kernel_size=kernel_size, padding=padding, bias=False, **dd)\n            self.act = None\n            self.conv2 = None\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        y = x.mean((2, 3)).view(x.shape[0], 1, -1)  # view for 1d conv\n        y = self.conv(y)\n        if self.conv2 is not None:\n            y = self.act(y)\n            y = self.conv2(y)\n        y = self.gate(y).view(x.shape[0], -1, 1, 1)\n        return x * y.expand_as(x)\n\n\nEfficientChannelAttn = EcaModule  # alias\n\n\nclass CecaModule(nn.Module):\n    \"\"\"Constructs a circular ECA module.\n\n    ECA module where the conv uses circular padding rather than zero padding.\n    Unlike the spatial dimension, the channels do not have inherent ordering nor\n    locality. Although this module in essence, applies such an assumption, it is unnecessary\n    to limit the channels on either \"edge\" from being circularly adapted to each other.\n    This will fundamentally increase connectivity and possibly increase performance metrics\n    (accuracy, robustness), without significantly impacting resource metrics\n    (parameter size, throughput,latency, etc)\n\n    Args:\n        channels: Number of channels of the input feature map for use in adaptive kernel sizes\n            for actual calculations according to channel.\n            gamma, beta: when channel is given parameters of mapping function\n            refer to original paper https://arxiv.org/pdf/1910.03151.pdf\n            (default=None. if channel size not given, use k_size given for kernel size.)\n        kernel_size: Adaptive selection of kernel size (default=3)\n        gamm: used in kernel_size calc, see above\n        beta: used in kernel_size calc, see above\n        act_layer: optional non-linearity after conv, enables conv bias, this is an experiment\n        gate_layer: gating non-linearity to use\n    \"\"\"\n\n    def __init__(\n            self,\n            channels: Optional[int] = None,\n            kernel_size: int = 3,\n            gamma: float = 2,\n            beta: float = 1,\n            act_layer: Optional[nn.Module] = None,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if channels is not None:\n            t = int(abs(math.log(channels, 2) + beta) / gamma)\n            kernel_size = max(t if t % 2 else t + 1, 3)\n        has_act = act_layer is not None\n        assert kernel_size % 2 == 1\n\n        # PyTorch circular padding mode is buggy as of pytorch 1.4\n        # see https://github.com/pytorch/pytorch/pull/17240\n        # implement manual circular padding\n        self.padding = (kernel_size - 1) // 2\n        self.conv = nn.Conv1d(1, 1, kernel_size=kernel_size, padding=0, bias=has_act, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        y = x.mean((2, 3)).view(x.shape[0], 1, -1)\n        # Manually implement circular padding, F.pad does not seemed to be bugged\n        y = F.pad(y, (self.padding, self.padding), mode='circular')\n        y = self.conv(y)\n        y = self.gate(y).view(x.shape[0], -1, 1, 1)\n        return x * y.expand_as(x)\n\n\nCircularEfficientChannelAttn = CecaModule\n"
  },
  {
    "path": "timm/layers/evo_norm.py",
    "content": "\"\"\" EvoNorm in PyTorch\n\nBased on `Evolving Normalization-Activation Layers` - https://arxiv.org/abs/2004.02967\n@inproceedings{NEURIPS2020,\n author = {Liu, Hanxiao and Brock, Andy and Simonyan, Karen and Le, Quoc},\n booktitle = {Advances in Neural Information Processing Systems},\n editor = {H. Larochelle and M. Ranzato and R. Hadsell and M. F. Balcan and H. Lin},\n pages = {13539--13550},\n publisher = {Curran Associates, Inc.},\n title = {Evolving Normalization-Activation Layers},\n url = {https://proceedings.neurips.cc/paper/2020/file/9d4c03631b8b0c85ae08bf05eda37d0f-Paper.pdf},\n volume = {33},\n year = {2020}\n}\n\nAn attempt at getting decent performing EvoNorms running in PyTorch.\nWhile faster than other PyTorch impl, still quite a ways off the built-in BatchNorm\nin terms of memory usage and throughput on GPUs.\n\nI'm testing these modules on TPU w/ PyTorch XLA. Promising start but\ncurrently working around some issues with builtin torch/tensor.var/std. Unlike\nGPU, similar train speeds for EvoNormS variants and BatchNorm.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Optional, Sequence, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .create_act import create_act_layer\nfrom .trace_utils import _assert\n\n\ndef instance_std(x, eps: float = 1e-5):\n    std = x.float().var(dim=(2, 3), unbiased=False, keepdim=True).add(eps).sqrt().to(x.dtype)\n    return std.expand(x.shape)\n\n\ndef instance_std_tpu(x, eps: float = 1e-5):\n    std = manual_var(x, dim=(2, 3)).add(eps).sqrt()\n    return std.expand(x.shape)\n# instance_std = instance_std_tpu\n\n\ndef instance_rms(x, eps: float = 1e-5):\n    rms = x.float().square().mean(dim=(2, 3), keepdim=True).add(eps).sqrt().to(x.dtype)\n    return rms.expand(x.shape)\n\n\ndef manual_var(x, dim: Union[int, Sequence[int]], diff_sqm: bool = False):\n    xm = x.mean(dim=dim, keepdim=True)\n    if diff_sqm:\n        # difference of squared mean and mean squared, faster on TPU can be less stable\n        var = ((x * x).mean(dim=dim, keepdim=True) - (xm * xm)).clamp(0)\n    else:\n        var = ((x - xm) * (x - xm)).mean(dim=dim, keepdim=True)\n    return var\n\n\ndef group_std(x, groups: int = 32, eps: float = 1e-5, flatten: bool = False):\n    B, C, H, W = x.shape\n    x_dtype = x.dtype\n    _assert(C % groups == 0, '')\n    if flatten:\n        x = x.reshape(B, groups, -1)  # FIXME simpler shape causing TPU / XLA issues\n        std = x.float().var(dim=2, unbiased=False, keepdim=True).add(eps).sqrt().to(x_dtype)\n    else:\n        x = x.reshape(B, groups, C // groups, H, W)\n        std = x.float().var(dim=(2, 3, 4), unbiased=False, keepdim=True).add(eps).sqrt().to(x_dtype)\n    return std.expand(x.shape).reshape(B, C, H, W)\n\n\ndef group_std_tpu(x, groups: int = 32, eps: float = 1e-5, diff_sqm: bool = False, flatten: bool = False):\n    # This is a workaround for some stability / odd behaviour of .var and .std\n    # running on PyTorch XLA w/ TPUs. These manual var impl are producing much better results\n    B, C, H, W = x.shape\n    _assert(C % groups == 0, '')\n    if flatten:\n        x = x.reshape(B, groups, -1)  # FIXME simpler shape causing TPU / XLA issues\n        var = manual_var(x, dim=-1, diff_sqm=diff_sqm)\n    else:\n        x = x.reshape(B, groups, C // groups, H, W)\n        var = manual_var(x, dim=(2, 3, 4), diff_sqm=diff_sqm)\n    return var.add(eps).sqrt().expand(x.shape).reshape(B, C, H, W)\n#group_std = group_std_tpu  # FIXME TPU temporary\n\n\ndef group_rms(x, groups: int = 32, eps: float = 1e-5):\n    B, C, H, W = x.shape\n    _assert(C % groups == 0, '')\n    x_dtype = x.dtype\n    x = x.reshape(B, groups, C // groups, H, W)\n    rms = x.float().square().mean(dim=(2, 3, 4), keepdim=True).add(eps).sqrt_().to(x_dtype)\n    return rms.expand(x.shape).reshape(B, C, H, W)\n\n\nclass EvoNorm2dB0(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            apply_act: bool = True,\n            momentum: float = 0.1,\n            eps: float = 1e-3,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        self.momentum = momentum\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n        self.v = nn.Parameter(torch.empty(num_features, **dd)) if apply_act else None\n        self.register_buffer('running_var', torch.ones(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n        if self.v is not None:\n            nn.init.ones_(self.v)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.v is not None:\n            if self.training:\n                var = x.float().var(dim=(0, 2, 3), unbiased=False)\n                # var = manual_var(x, dim=(0, 2, 3)).squeeze()\n                n = x.numel() / x.shape[1]\n                self.running_var.copy_(\n                    self.running_var * (1 - self.momentum) +\n                    var.detach() * self.momentum * (n / (n - 1)))\n            else:\n                var = self.running_var\n            left = var.add(self.eps).sqrt_().to(x_dtype).view(v_shape).expand_as(x)\n            v = self.v.to(x_dtype).view(v_shape)\n            right = x * v + instance_std(x, self.eps)\n            x = x / left.max(right)\n        return x * self.weight.to(x_dtype).view(v_shape) + self.bias.to(x_dtype).view(v_shape)\n\n\nclass EvoNorm2dB1(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            apply_act: bool = True,\n            momentum: float = 0.1,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        self.momentum = momentum\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n        self.register_buffer('running_var', torch.ones(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.apply_act:\n            if self.training:\n                var = x.float().var(dim=(0, 2, 3), unbiased=False)\n                n = x.numel() / x.shape[1]\n                self.running_var.copy_(\n                    self.running_var * (1 - self.momentum) +\n                    var.detach().to(self.running_var.dtype) * self.momentum * (n / (n - 1)))\n            else:\n                var = self.running_var\n            var = var.to(x_dtype).view(v_shape)\n            left = var.add(self.eps).sqrt_()\n            right = (x + 1) * instance_rms(x, self.eps)\n            x = x / left.max(right)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dB2(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            apply_act: bool = True,\n            momentum: float = 0.1,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        self.momentum = momentum\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n        self.register_buffer('running_var', torch.ones(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.apply_act:\n            if self.training:\n                var = x.float().var(dim=(0, 2, 3), unbiased=False)\n                n = x.numel() / x.shape[1]\n                self.running_var.copy_(\n                    self.running_var * (1 - self.momentum) +\n                    var.detach().to(self.running_var.dtype) * self.momentum * (n / (n - 1)))\n            else:\n                var = self.running_var\n            var = var.to(x_dtype).view(v_shape)\n            left = var.add(self.eps).sqrt_()\n            right = instance_rms(x, self.eps) - x\n            x = x / left.max(right)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS0(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        if group_size:\n            assert num_features % group_size == 0\n            self.groups = num_features // group_size\n        else:\n            self.groups = groups\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n        self.v = nn.Parameter(torch.empty(num_features, **dd)) if apply_act else None\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n        if self.v is not None:\n            nn.init.ones_(self.v)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.v is not None:\n            v = self.v.view(v_shape).to(x_dtype)\n            x = x * (x * v).sigmoid() / group_std(x, self.groups, self.eps)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS0a(EvoNorm2dS0):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            eps: float = 1e-3,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        super().__init__(\n            num_features,\n            groups=groups,\n            group_size=group_size,\n            apply_act=apply_act,\n            eps=eps,\n            device=device,\n            dtype=dtype,\n        )\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        d = group_std(x, self.groups, self.eps)\n        if self.v is not None:\n            v = self.v.view(v_shape).to(x_dtype)\n            x = x * (x * v).sigmoid()\n        x = x / d\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS1(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            act_layer: Optional[Type[nn.Module]] = None,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        act_layer = act_layer or nn.SiLU\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        if act_layer is not None and apply_act:\n            self.act = create_act_layer(act_layer)\n        else:\n            self.act = nn.Identity()\n        if group_size:\n            assert num_features % group_size == 0\n            self.groups = num_features // group_size\n        else:\n            self.groups = groups\n        self.eps = eps\n        self.pre_act_norm = False\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.apply_act:\n            x = self.act(x) / group_std(x, self.groups, self.eps)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS1a(EvoNorm2dS1):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            act_layer: Optional[Type[nn.Module]] = None,\n            eps: float = 1e-3,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        super().__init__(\n            num_features,\n            groups=groups,\n            group_size=group_size,\n            apply_act=apply_act,\n            act_layer=act_layer,\n            eps=eps,\n            device=device,\n            dtype=dtype,\n        )\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        x = self.act(x) / group_std(x, self.groups, self.eps)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS2(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            act_layer: Optional[Type[nn.Module]] = None,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        act_layer = act_layer or nn.SiLU\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        if act_layer is not None and apply_act:\n            self.act = create_act_layer(act_layer)\n        else:\n            self.act = nn.Identity()\n        if group_size:\n            assert num_features % group_size == 0\n            self.groups = num_features // group_size\n        else:\n            self.groups = groups\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        if self.apply_act:\n            x = self.act(x) / group_rms(x, self.groups, self.eps)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n\n\nclass EvoNorm2dS2a(EvoNorm2dS2):\n    def __init__(\n            self,\n            num_features: int,\n            groups: int = 32,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            act_layer: Optional[Type[nn.Module]] = None,\n            eps: float = 1e-3,\n            device=None,\n            dtype=None,\n            **_\n    ):\n        super().__init__(\n            num_features,\n            groups=groups,\n            group_size=group_size,\n            apply_act=apply_act,\n            act_layer=act_layer,\n            eps=eps,\n            device=device,\n            dtype=dtype,\n        )\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        x = self.act(x) / group_rms(x, self.groups, self.eps)\n        return x * self.weight.view(v_shape).to(x_dtype) + self.bias.view(v_shape).to(x_dtype)\n"
  },
  {
    "path": "timm/layers/fast_norm.py",
    "content": "\"\"\" 'Fast' Normalization Functions\n\nFor GroupNorm and LayerNorm these functions bypass typical AMP upcast to float32.\n\nAdditionally, for LayerNorm, the APEX fused LN is used if available (which also does not upcast)\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nfrom typing import List, Optional\n\nimport torch\nfrom torch.nn import functional as F\n\ntry:\n    from apex.normalization.fused_layer_norm import fused_layer_norm_affine\n    has_apex = True\nexcept ImportError:\n    has_apex = False\n\ntry:\n    from apex.normalization.fused_layer_norm import fused_rms_norm_affine, fused_rms_norm\n    has_apex_rmsnorm = True\nexcept ImportError:\n    has_apex_rmsnorm = False\n\n\nhas_torch_rms_norm = hasattr(F, 'rms_norm')\n\n# fast (ie lower precision LN) can be disabled with this flag if issues crop up\n_USE_FAST_NORM = False  # defaulting to False for now\n\n\ndef get_autocast_dtype(device: str = 'cuda'):\n    try:\n        return torch.get_autocast_dtype(device)\n    except (AttributeError, TypeError):\n        # dispatch to older device specific fns, only covering cuda/cpu devices here\n        if device == 'cpu':\n            return torch.get_autocast_cpu_dtype()\n        else:\n            assert device == 'cuda'\n            return torch.get_autocast_gpu_dtype()\n\n\ndef is_autocast_enabled(device: str = 'cuda'):\n    try:\n        return torch.is_autocast_enabled(device)\n    except TypeError:\n        # dispatch to older device specific fns, only covering cuda/cpu devices here\n        if device == 'cpu':\n            return torch.is_autocast_cpu_enabled()\n        else:\n            assert device == 'cuda'\n            return torch.is_autocast_enabled()  # defaults cuda (only cuda on older pytorch)\n\n\ndef is_fast_norm():\n    return _USE_FAST_NORM\n\n\ndef set_fast_norm(enable=True):\n    global _USE_FAST_NORM\n    _USE_FAST_NORM = enable\n\n\ndef fast_group_norm(\n    x: torch.Tensor,\n    num_groups: int,\n    weight: Optional[torch.Tensor] = None,\n    bias: Optional[torch.Tensor] = None,\n    eps: float = 1e-5\n) -> torch.Tensor:\n    if torch.jit.is_scripting():\n        # currently cannot use is_autocast_enabled within torchscript\n        return F.group_norm(x, num_groups, weight, bias, eps)\n\n    if is_autocast_enabled(x.device.type):\n        # normally native AMP casts GN inputs to float32\n        # here we use the low precision autocast dtype\n        dt = get_autocast_dtype(x.device.type)\n        x, weight, bias = (\n            x.to(dt),\n            weight.to(dt) if weight is not None else None,\n            bias.to(dt) if bias is not None else None,\n        )\n\n    with torch.amp.autocast(device_type=x.device.type, enabled=False):\n        return F.group_norm(x, num_groups, weight, bias, eps)\n\n\ndef fast_layer_norm(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    bias: Optional[torch.Tensor] = None,\n    eps: float = 1e-5\n) -> torch.Tensor:\n    if torch.jit.is_scripting():\n        # currently cannot use is_autocast_enabled within torchscript\n        return F.layer_norm(x, normalized_shape, weight, bias, eps)\n\n    if has_apex:\n        return fused_layer_norm_affine(x, weight, bias, normalized_shape, eps)\n\n    if is_autocast_enabled(x.device.type):\n        # normally native AMP casts LN inputs to float32\n        # apex LN does not, this is behaving like Apex\n        dt = get_autocast_dtype(x.device.type)\n        x, weight, bias = (\n            x.to(dt),\n            weight.to(dt) if weight is not None else None,\n            bias.to(dt) if bias is not None else None,\n        )\n\n    with torch.amp.autocast(device_type=x.device.type, enabled=False):\n        return F.layer_norm(x, normalized_shape, weight, bias, eps)\n\n\ndef rms_norm(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n):\n    norm_ndim = len(normalized_shape)\n    v = x.pow(2)\n    if torch.jit.is_scripting():\n        # ndim = len(x.shape)\n        # dims = list(range(ndim - norm_ndim, ndim))  # this doesn't work on pytorch <= 1.13.x\n        # NOTE -ve dims cause torchscript to crash in some cases, out of options to work around\n        assert norm_ndim == 1\n        v = torch.mean(v, dim=-1).unsqueeze(-1)  # ts crashes with -ve dim + keepdim=True\n    else:\n        dims = tuple(range(-1, -norm_ndim - 1, -1))\n        v = torch.mean(v, dim=dims, keepdim=True)\n    x = x * torch.rsqrt(v + eps)\n    if weight is not None:\n        x = x * weight\n    return x\n\n\ndef fast_rms_norm(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n) -> torch.Tensor:\n    if torch.jit.is_scripting():\n        # this must be by itself, cannot merge with has_apex_rmsnorm\n        return rms_norm(x, normalized_shape, weight, eps)\n\n    if has_apex_rmsnorm:\n        if weight is None:\n            return fused_rms_norm(x, normalized_shape, eps)\n        else:\n            return fused_rms_norm_affine(x, weight, normalized_shape, eps)\n\n    if is_autocast_enabled(x.device.type):\n        # normally native AMP casts LN inputs to float32 and leaves the output as float32\n        # apex LN does not, this is behaving like Apex\n        dt = get_autocast_dtype(x.device.type)\n        x, weight = x.to(dt), weight.to(dt) if weight is not None else None\n\n    with torch.amp.autocast(device_type=x.device.type, enabled=False):\n        if has_torch_rms_norm:\n            x = F.rms_norm(x, normalized_shape, weight, eps)\n        else:\n            x = rms_norm(x, normalized_shape, weight, eps)\n\n    return x\n\n\ndef rms_norm2d(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n):\n    assert len(normalized_shape) == 1\n    v = x.pow(2)\n    v = torch.mean(v, dim=1, keepdim=True)\n    x = x * torch.rsqrt(v + eps)\n    if weight is not None:\n        x = x * weight.reshape(1, -1, 1, 1)\n    return x\n\n\ndef fast_rms_norm2d(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n) -> torch.Tensor:\n    if torch.jit.is_scripting():\n        # this must be by itself, cannot merge with has_apex_rmsnorm\n        return rms_norm2d(x, normalized_shape, weight, eps)\n\n    if has_apex_rmsnorm:\n        x = x.permute(0, 2, 3, 1)\n        if weight is None:\n            x = fused_rms_norm(x, normalized_shape, eps)\n        else:\n            x = fused_rms_norm_affine(x, weight, normalized_shape, eps)\n        x = x.permute(0, 3, 1, 2)\n\n    if is_autocast_enabled(x.device.type):\n        # normally native AMP casts norm inputs to float32 and leaves the output as float32\n        # apex does not, this is behaving like Apex\n        dt = get_autocast_dtype(x.device.type)\n        x, weight = x.to(dt), weight.to(dt) if weight is not None else None\n\n    with torch.amp.autocast(device_type=x.device.type, enabled=False):\n        x = rms_norm2d(x, normalized_shape, weight, eps)\n\n    return x\n\n\ndef simple_norm(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n):\n    norm_ndim = len(normalized_shape)\n    if torch.jit.is_scripting():\n        # ndim = len(x.shape)\n        # dims = list(range(ndim - norm_ndim, ndim))  # this doesn't work on pytorch <= 1.13.x\n        # NOTE -ve dims cause torchscript to crash in some cases, out of options to work around\n        assert norm_ndim == 1\n        v = torch.var(x, dim=-1).unsqueeze(-1)  # ts crashes with -ve dim + keepdim=True\n    else:\n        dims = tuple(range(-1, -norm_ndim - 1, -1))\n        v = torch.var(x, dim=dims, keepdim=True)\n    x = x * torch.rsqrt(v + eps)\n    if weight is not None:\n        x = x * weight\n    return x\n\n\ndef fast_simple_norm(\n    x: torch.Tensor,\n    normalized_shape: List[int],\n    weight: Optional[torch.Tensor] = None,\n    eps: float = 1e-5,\n) -> torch.Tensor:\n    if torch.jit.is_scripting():\n        # this must be by itself, cannot merge with has_apex_rmsnorm\n        return simple_norm(x, normalized_shape, weight, eps)\n\n    if is_autocast_enabled(x.device.type):\n        # normally native AMP casts LN inputs to float32\n        # apex LN does not, this is behaving like Apex\n        dt = get_autocast_dtype(x.device.type)\n        x, weight = x.to(dt), weight.to(dt) if weight is not None else None\n\n    with torch.amp.autocast(device_type=x.device.type, enabled=False):\n        x = simple_norm(x, normalized_shape, weight, eps)\n    return x\n\n"
  },
  {
    "path": "timm/layers/filter_response_norm.py",
    "content": "\"\"\" Filter Response Norm in PyTorch\n\nBased on `Filter Response Normalization Layer` - https://arxiv.org/abs/1911.09737\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom .create_act import create_act_layer\nfrom .trace_utils import _assert\n\n\ndef inv_instance_rms(x, eps: float = 1e-5):\n    rms = x.square().float().mean(dim=(2, 3), keepdim=True).add(eps).rsqrt().to(x.dtype)\n    return rms.expand(x.shape)\n\n\nclass FilterResponseNormTlu2d(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            apply_act: bool = True,\n            eps: float = 1e-5,\n            rms: bool = True,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.apply_act = apply_act  # apply activation (non-linearity)\n        self.rms = rms\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n        self.tau = nn.Parameter(torch.empty(num_features, **dd)) if apply_act else None\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n        if self.tau is not None:\n            nn.init.zeros_(self.tau)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        x = x * inv_instance_rms(x, self.eps)\n        x = x * self.weight.view(v_shape).to(dtype=x_dtype) + self.bias.view(v_shape).to(dtype=x_dtype)\n        return torch.maximum(x, self.tau.reshape(v_shape).to(dtype=x_dtype)) if self.tau is not None else x\n\n\nclass FilterResponseNormAct2d(nn.Module):\n    def __init__(\n            self,\n            num_features: int,\n            apply_act: bool = True,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            inplace: Optional[bool] = None,\n            rms: bool = True,\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if act_layer is not None and apply_act:\n            self.act = create_act_layer(act_layer, inplace=inplace)\n        else:\n            self.act = nn.Identity()\n        self.rms = rms\n        self.eps = eps\n        self.weight = nn.Parameter(torch.empty(num_features, **dd))\n        self.bias = nn.Parameter(torch.empty(num_features, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.ones_(self.weight)\n        nn.init.zeros_(self.bias)\n\n    def forward(self, x):\n        _assert(x.dim() == 4, 'expected 4D input')\n        x_dtype = x.dtype\n        v_shape = (1, -1, 1, 1)\n        x = x * inv_instance_rms(x, self.eps)\n        x = x * self.weight.view(v_shape).to(dtype=x_dtype) + self.bias.view(v_shape).to(dtype=x_dtype)\n        return self.act(x)\n"
  },
  {
    "path": "timm/layers/format.py",
    "content": "from enum import Enum\nfrom typing import Union\n\nimport torch\n\n\nclass Format(str, Enum):\n    NCHW = 'NCHW'\n    NHWC = 'NHWC'\n    NCL = 'NCL'\n    NLC = 'NLC'\n\n\nFormatT = Union[str, Format]\n\n\ndef get_spatial_dim(fmt: FormatT):\n    \"\"\"Return spatial dimension indices for a given tensor format.\n\n    Args:\n        fmt: Tensor format (NCHW, NHWC, NCL, or NLC).\n\n    Returns:\n        Tuple of spatial dimension indices.\n    \"\"\"\n    fmt = Format(fmt)\n    if fmt is Format.NLC:\n        dim = (1,)\n    elif fmt is Format.NCL:\n        dim = (2,)\n    elif fmt is Format.NHWC:\n        dim = (1, 2)\n    else:\n        dim = (2, 3)\n    return dim\n\n\ndef get_channel_dim(fmt: FormatT):\n    \"\"\"Return channel dimension index for a given tensor format.\n\n    Args:\n        fmt: Tensor format (NCHW, NHWC, NCL, or NLC).\n\n    Returns:\n        Channel dimension index.\n    \"\"\"\n    fmt = Format(fmt)\n    if fmt is Format.NHWC:\n        dim = 3\n    elif fmt is Format.NLC:\n        dim = 2\n    else:\n        dim = 1\n    return dim\n\n\ndef nchw_to(x: torch.Tensor, fmt: Format):\n    \"\"\"Convert tensor from NCHW format to specified format.\n\n    Args:\n        x: Input tensor in NCHW format.\n        fmt: Target format.\n\n    Returns:\n        Tensor in target format.\n    \"\"\"\n    if fmt == Format.NHWC:\n        x = x.permute(0, 2, 3, 1)\n    elif fmt == Format.NLC:\n        x = x.flatten(2).transpose(1, 2)\n    elif fmt == Format.NCL:\n        x = x.flatten(2)\n    return x\n\n\ndef nhwc_to(x: torch.Tensor, fmt: Format):\n    \"\"\"Convert tensor from NHWC format to specified format.\n\n    Args:\n        x: Input tensor in NHWC format.\n        fmt: Target format.\n\n    Returns:\n        Tensor in target format.\n    \"\"\"\n    if fmt == Format.NCHW:\n        x = x.permute(0, 3, 1, 2)\n    elif fmt == Format.NLC:\n        x = x.flatten(1, 2)\n    elif fmt == Format.NCL:\n        x = x.flatten(1, 2).transpose(1, 2)\n    return x\n"
  },
  {
    "path": "timm/layers/gather_excite.py",
    "content": "\"\"\" Gather-Excite Attention Block\n\nPaper: `Gather-Excite: Exploiting Feature Context in CNNs` - https://arxiv.org/abs/1810.12348\n\nOfficial code here, but it's only partial impl in Caffe: https://github.com/hujie-frank/GENet\n\nI've tried to support all of the extent both w/ and w/o params. I don't believe I've seen another\nimpl that covers all of the cases.\n\nNOTE: extent=0 + extra_params=False is equivalent to Squeeze-and-Excitation\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple, Type, Union\nimport math\n\nfrom torch import nn as nn\nimport torch.nn.functional as F\n\nfrom .create_act import create_act_layer, get_act_layer\nfrom .create_conv2d import create_conv2d\nfrom .helpers import make_divisible\nfrom .mlp import ConvMlp\n\n\nclass GatherExcite(nn.Module):\n    \"\"\" Gather-Excite Attention Module\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            feat_size: Optional[Tuple[int, int]] = None,\n            extra_params: bool = False,\n            extent: int = 0,\n            use_mlp: bool = True,\n            rd_ratio: float = 1./16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            add_maxpool: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_maxpool = add_maxpool\n        act_layer = get_act_layer(act_layer)\n        self.extent = extent\n        if extra_params:\n            self.gather = nn.Sequential()\n            if extent == 0:\n                assert feat_size is not None, 'spatial feature size must be specified for global extent w/ params'\n                self.gather.add_module(\n                    'conv1', create_conv2d(channels, channels, kernel_size=feat_size, stride=1, depthwise=True, *dd))\n                if norm_layer:\n                    self.gather.add_module(f'norm1', nn.BatchNorm2d(channels, *dd))\n            else:\n                assert extent % 2 == 0\n                num_conv = int(math.log2(extent))\n                for i in range(num_conv):\n                    self.gather.add_module(\n                        f'conv{i + 1}',\n                        create_conv2d(channels, channels, kernel_size=3, stride=2, depthwise=True, *dd))\n                    if norm_layer:\n                        self.gather.add_module(f'norm{i + 1}', nn.BatchNorm2d(channels, *dd))\n                    if i != num_conv - 1:\n                        self.gather.add_module(f'act{i + 1}', act_layer(inplace=True))\n        else:\n            self.gather = None\n            if self.extent == 0:\n                self.gk = 0\n                self.gs = 0\n            else:\n                assert extent % 2 == 0\n                self.gk = self.extent * 2 - 1\n                self.gs = self.extent\n\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)\n        self.mlp = ConvMlp(channels, rd_channels, act_layer=act_layer, *dd) if use_mlp else nn.Identity()\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        size = x.shape[-2:]\n        if self.gather is not None:\n            x_ge = self.gather(x)\n        else:\n            if self.extent == 0:\n                # global extent\n                x_ge = x.mean(dim=(2, 3), keepdims=True)\n                if self.add_maxpool:\n                    # experimental codepath, may remove or change\n                    x_ge = 0.5 * x_ge + 0.5 * x.amax((2, 3), keepdim=True)\n            else:\n                x_ge = F.avg_pool2d(\n                    x, kernel_size=self.gk, stride=self.gs, padding=self.gk // 2, count_include_pad=False)\n                if self.add_maxpool:\n                    # experimental codepath, may remove or change\n                    x_ge = 0.5 * x_ge + 0.5 * F.max_pool2d(x, kernel_size=self.gk, stride=self.gs, padding=self.gk // 2)\n        x_ge = self.mlp(x_ge)\n        if x_ge.shape[-1] != 1 or x_ge.shape[-2] != 1:\n            x_ge = F.interpolate(x_ge, size=size)\n        return x * self.gate(x_ge)\n"
  },
  {
    "path": "timm/layers/global_context.py",
    "content": "\"\"\" Global Context Attention Block\n\nPaper: `GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond`\n    - https://arxiv.org/abs/1904.11492\n\nOfficial code consulted as reference: https://github.com/xvjiarui/GCNet\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple, Type, Union\n\nfrom torch import nn as nn\nimport torch.nn.functional as F\n\nfrom .create_act import create_act_layer, get_act_layer\nfrom .helpers import make_divisible\nfrom .mlp import ConvMlp\nfrom .norm import LayerNorm2d\n\n\nclass GlobalContext(nn.Module):\n\n    def __init__(\n            self,\n            channels: int,\n            use_attn: bool = True,\n            fuse_add: bool = False,\n            fuse_scale: bool = True,\n            init_last_zero: bool = False,\n            rd_ratio: float = 1./8,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        act_layer = get_act_layer(act_layer)\n\n        self.conv_attn = nn.Conv2d(channels, 1, kernel_size=1, bias=True, **dd) if use_attn else None\n\n        if rd_channels is None:\n            rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)\n        if fuse_add:\n            self.mlp_add = ConvMlp(channels, rd_channels, act_layer=act_layer, norm_layer=LayerNorm2d, **dd)\n        else:\n            self.mlp_add = None\n        if fuse_scale:\n            self.mlp_scale = ConvMlp(channels, rd_channels, act_layer=act_layer, norm_layer=LayerNorm2d, **dd)\n        else:\n            self.mlp_scale = None\n\n        self.gate = create_act_layer(gate_layer)\n        self.init_last_zero = init_last_zero\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        if self.conv_attn is not None:\n            nn.init.kaiming_normal_(self.conv_attn.weight, mode='fan_in', nonlinearity='relu')\n        if self.mlp_add is not None:\n            nn.init.zeros_(self.mlp_add.fc2.weight)\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n\n        if self.conv_attn is not None:\n            attn = self.conv_attn(x).reshape(B, 1, H * W)  # (B, 1, H * W)\n            attn = F.softmax(attn, dim=-1).unsqueeze(3)  # (B, 1, H * W, 1)\n            context = x.reshape(B, C, H * W).unsqueeze(1) @ attn\n            context = context.view(B, C, 1, 1)\n        else:\n            context = x.mean(dim=(2, 3), keepdim=True)\n\n        if self.mlp_scale is not None:\n            mlp_x = self.mlp_scale(context)\n            x = x * self.gate(mlp_x)\n        if self.mlp_add is not None:\n            mlp_x = self.mlp_add(context)\n            x = x + mlp_x\n\n        return x\n"
  },
  {
    "path": "timm/layers/grid.py",
    "content": "from typing import Tuple\n\nimport torch\n\n\ndef ndgrid(*tensors) -> Tuple[torch.Tensor, ...]:\n    \"\"\"generate N-D grid in dimension order.\n\n    The ndgrid function is like meshgrid except that the order of the first two input arguments are switched.\n\n    That is, the statement\n    [X1,X2,X3] = ndgrid(x1,x2,x3)\n\n    produces the same result as\n\n    [X2,X1,X3] = meshgrid(x2,x1,x3)\n\n    This naming is based on MATLAB, the purpose is to avoid confusion due to torch's change to make\n    torch.meshgrid behaviour move from matching ndgrid ('ij') indexing to numpy meshgrid defaults of ('xy').\n\n    \"\"\"\n    try:\n        return torch.meshgrid(*tensors, indexing='ij')\n    except TypeError:\n        # old PyTorch < 1.10 will follow this path as it does not have indexing arg,\n        # the old behaviour of meshgrid was 'ij'\n        return torch.meshgrid(*tensors)\n\n\ndef meshgrid(*tensors) -> Tuple[torch.Tensor, ...]:\n    \"\"\"generate N-D grid in spatial dim order.\n\n    The meshgrid function is similar to ndgrid except that the order of the\n    first two input and output arguments is switched.\n\n    That is, the statement\n\n    [X,Y,Z] = meshgrid(x,y,z)\n    produces the same result as\n\n    [Y,X,Z] = ndgrid(y,x,z)\n    Because of this, meshgrid is better suited to problems in two- or three-dimensional Cartesian space,\n    while ndgrid is better suited to multidimensional problems that aren't spatially based.\n    \"\"\"\n\n    # NOTE: this will throw in PyTorch < 1.10 as meshgrid did not support indexing arg or have\n    # capability of generating grid in xy order before then.\n    return torch.meshgrid(*tensors, indexing='xy')\n\n"
  },
  {
    "path": "timm/layers/grn.py",
    "content": "\"\"\" Global Response Normalization Module\n\nBased on the GRN layer presented in\n`ConvNeXt-V2 - Co-designing and Scaling ConvNets with Masked Autoencoders` - https://arxiv.org/abs/2301.00808\n\nThis implementation\n* works for both NCHW and NHWC tensor layouts\n* uses affine param names matching existing torch norm layers\n* slightly improves eager mode performance via fused addcmul\n\nHacked together by / Copyright 2023 Ross Wightman\n\"\"\"\n\nimport torch\nfrom torch import nn as nn\n\n\nclass GlobalResponseNorm(nn.Module):\n    \"\"\" Global Response Normalization layer\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            eps: float = 1e-6,\n            channels_last: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.eps = eps\n        if channels_last:\n            self.spatial_dim = (1, 2)\n            self.channel_dim = -1\n            self.wb_shape = (1, 1, 1, -1)\n        else:\n            self.spatial_dim = (2, 3)\n            self.channel_dim = 1\n            self.wb_shape = (1, -1, 1, 1)\n\n        self.weight = nn.Parameter(torch.zeros(dim, **dd))\n        self.bias = nn.Parameter(torch.zeros(dim, **dd))\n\n    def forward(self, x):\n        x_g = x.norm(p=2, dim=self.spatial_dim, keepdim=True)\n        x_n = x_g / (x_g.mean(dim=self.channel_dim, keepdim=True) + self.eps)\n        return x + torch.addcmul(self.bias.view(self.wb_shape), self.weight.view(self.wb_shape), x * x_n)\n"
  },
  {
    "path": "timm/layers/halo_attn.py",
    "content": "\"\"\" Halo Self Attention\n\nPaper: `Scaling Local Self-Attention for Parameter Efficient Visual Backbones`\n    - https://arxiv.org/abs/2103.12731\n\n@misc{2103.12731,\nAuthor = {Ashish Vaswani and Prajit Ramachandran and Aravind Srinivas and Niki Parmar and Blake Hechtman and\n    Jonathon Shlens},\nTitle = {Scaling Local Self-Attention for Parameter Efficient Visual Backbones},\nYear = {2021},\n}\n\nStatus:\nThis impl is a WIP, there is no official ref impl and some details in paper weren't clear to me.\nThe attention mechanism works but it's slow as implemented.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\nfrom torch import nn\nimport torch.nn.functional as F\n\nfrom .helpers import make_divisible\nfrom .weight_init import trunc_normal_\nfrom .trace_utils import _assert\n\n\ndef rel_logits_1d(q, rel_k, permute_mask: List[int]):\n    \"\"\" Compute relative logits along one dimension\n\n    As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2\n    Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925\n\n    Args:\n        q: (batch, height, width, dim)\n        rel_k: (2 * window - 1, dim)\n        permute_mask: permute output dim according to this\n    \"\"\"\n    B, H, W, dim = q.shape\n    rel_size = rel_k.shape[0]\n    win_size = (rel_size + 1) // 2\n\n    x = (q @ rel_k.transpose(-1, -2))\n    x = x.reshape(-1, W, rel_size)\n\n    # pad to shift from relative to absolute indexing\n    x_pad = F.pad(x, [0, 1]).flatten(1)\n    x_pad = F.pad(x_pad, [0, rel_size - W])\n\n    # reshape and slice out the padded elements\n    x_pad = x_pad.reshape(-1, W + 1, rel_size)\n    x = x_pad[:, :W, win_size - 1:]\n\n    # reshape and tile\n    x = x.reshape(B, H, 1, W, win_size).expand(-1, -1, win_size, -1, -1)\n    return x.permute(permute_mask)\n\n\nclass PosEmbedRel(nn.Module):\n    \"\"\" Relative Position Embedding\n    As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2\n    Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925\n\n    \"\"\"\n    def __init__(\n            self,\n            block_size: int,\n            win_size: int,\n            dim_head: int,\n            scale: float,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            block_size: block size\n            win_size: neighbourhood window size\n            dim_head: attention head dim\n            scale: scale factor (for init)\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.block_size = block_size\n        self.dim_head = dim_head\n        self.scale = scale\n\n        self.height_rel = nn.Parameter(torch.empty(win_size * 2 - 1, dim_head, **dd))\n        self.width_rel = nn.Parameter(torch.empty(win_size * 2 - 1, dim_head, **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        torch.nn.init.normal_(self.height_rel, std=self.scale)\n        torch.nn.init.normal_(self.width_rel, std=self.scale)\n\n    def forward(self, q):\n        B, BB, HW, _ = q.shape\n\n        # relative logits in width dimension.\n        q = q.reshape(-1, self.block_size, self.block_size, self.dim_head)\n        rel_logits_w = rel_logits_1d(q, self.width_rel, permute_mask=(0, 1, 3, 2, 4))\n\n        # relative logits in height dimension.\n        q = q.transpose(1, 2)\n        rel_logits_h = rel_logits_1d(q, self.height_rel, permute_mask=(0, 3, 1, 4, 2))\n\n        rel_logits = rel_logits_h + rel_logits_w\n        rel_logits = rel_logits.reshape(B, BB, HW, -1)\n        return rel_logits\n\n\nclass HaloAttn(nn.Module):\n    \"\"\" Halo Attention\n\n    Paper: `Scaling Local Self-Attention for Parameter Efficient Visual Backbones`\n        - https://arxiv.org/abs/2103.12731\n\n    The internal dimensions of the attention module are controlled by the interaction of several arguments.\n      * the output dimension of the module is specified by dim_out, which falls back to input dim if not set\n      * the value (v) dimension is set to dim_out // num_heads, the v projection determines the output dim\n      * the query and key (qk) dimensions are determined by\n        * num_heads * dim_head if dim_head is not None\n        * num_heads * (dim_out * attn_ratio // num_heads) if dim_head is None\n      * as seen above, attn_ratio determines the ratio of q and k relative to the output if dim_head not used\n\n    Args:\n        dim (int): input dimension to the module\n        dim_out (int): output dimension of the module, same as dim if not set\n        feat_size (Tuple[int, int]): size of input feature_map (not used, for arg compat with bottle/lambda)\n        stride: output stride of the module, query downscaled if > 1 (default: 1).\n        num_heads: parallel attention heads (default: 8).\n        dim_head: dimension of query and key heads, calculated from dim_out * attn_ratio // num_heads if not set\n        block_size (int): size of blocks. (default: 8)\n        halo_size (int): size of halo overlap. (default: 3)\n        qk_ratio (float): ratio of q and k dimensions to output dimension when dim_head not set. (default: 1.0)\n        qkv_bias (bool) : add bias to q, k, and v projections\n        avg_down (bool): use average pool downsample instead of strided query blocks\n        scale_pos_embed (bool): scale the position embedding as well as Q @ K\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            feat_size: Optional[Tuple[int, int]] = None,\n            stride: int = 1,\n            num_heads: int = 8,\n            dim_head: Optional[int] = None,\n            block_size: int = 8,\n            halo_size: int = 3,\n            qk_ratio: float = 1.0,\n            qkv_bias: bool = False,\n            avg_down: bool = False,\n            scale_pos_embed: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        assert dim_out % num_heads == 0\n        assert stride in (1, 2)\n        self.num_heads = num_heads\n        self.dim_head_qk = dim_head or make_divisible(dim_out * qk_ratio, divisor=8) // num_heads\n        self.dim_head_v = dim_out // self.num_heads\n        self.dim_out_qk = num_heads * self.dim_head_qk\n        self.dim_out_v = num_heads * self.dim_head_v\n        self.scale = self.dim_head_qk ** -0.5\n        self.scale_pos_embed = scale_pos_embed\n        self.block_size = self.block_size_ds = block_size\n        self.halo_size = halo_size\n        self.win_size = block_size + halo_size * 2  # neighbourhood window size\n        self.block_stride = 1\n        use_avg_pool = False\n        if stride > 1:\n            use_avg_pool = avg_down or block_size % stride != 0\n            self.block_stride = 1 if use_avg_pool else stride\n            self.block_size_ds = self.block_size // self.block_stride\n\n        # FIXME not clear if this stride behaviour is what the paper intended\n        # Also, the paper mentions using a 3D conv for dealing with the blocking/gather, and leaving\n        # data in unfolded block form. I haven't wrapped my head around how that'd look.\n        self.q = nn.Conv2d(dim, self.dim_out_qk, 1, stride=self.block_stride, bias=qkv_bias, **dd)\n        self.kv = nn.Conv2d(dim, self.dim_out_qk + self.dim_out_v, 1, bias=qkv_bias, **dd)\n\n        self.pos_embed = PosEmbedRel(\n            block_size=self.block_size_ds,\n            win_size=self.win_size,\n            dim_head=self.dim_head_qk,\n            scale=self.scale,\n            **dd,\n        )\n\n        self.pool = nn.AvgPool2d(2, 2) if use_avg_pool else nn.Identity()\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        std = self.q.weight.shape[1] ** -0.5  # fan-in\n        trunc_normal_(self.q.weight, std=std)\n        trunc_normal_(self.kv.weight, std=std)\n        trunc_normal_(self.pos_embed.height_rel, std=self.scale)\n        trunc_normal_(self.pos_embed.width_rel, std=self.scale)\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        _assert(H % self.block_size == 0, '')\n        _assert(W % self.block_size == 0, '')\n        num_h_blocks = H // self.block_size\n        num_w_blocks = W // self.block_size\n        num_blocks = num_h_blocks * num_w_blocks\n\n        q = self.q(x)\n        # unfold\n        q = q.reshape(\n            -1, self.dim_head_qk,\n            num_h_blocks, self.block_size_ds, num_w_blocks, self.block_size_ds).permute(0, 1, 3, 5, 2, 4)\n        # B, num_heads * dim_head * block_size ** 2, num_blocks\n        q = q.reshape(B * self.num_heads, self.dim_head_qk, -1, num_blocks).transpose(1, 3)\n        # B * num_heads, num_blocks, block_size ** 2, dim_head\n\n        kv = self.kv(x)\n        # Generate overlapping windows for kv. This approach is good for GPU and CPU. However, unfold() is not\n        # lowered for PyTorch XLA so it will be very slow. See code at bottom of file for XLA friendly approach.\n        # FIXME figure out how to switch impl between this and conv2d if XLA being used.\n        kv = F.pad(kv, [self.halo_size, self.halo_size, self.halo_size, self.halo_size])\n        kv = kv.unfold(2, self.win_size, self.block_size).unfold(3, self.win_size, self.block_size).reshape(\n            B * self.num_heads, self.dim_head_qk + self.dim_head_v, num_blocks, -1).permute(0, 2, 3, 1)\n        k, v = torch.split(kv, [self.dim_head_qk, self.dim_head_v], dim=-1)\n        # B * num_heads, num_blocks, win_size ** 2, dim_head_qk or dim_head_v\n\n        if self.scale_pos_embed:\n            attn = (q @ k.transpose(-1, -2) + self.pos_embed(q)) * self.scale\n        else:\n            attn = (q @ k.transpose(-1, -2)) * self.scale + self.pos_embed(q)\n        # B * num_heads, num_blocks, block_size ** 2, win_size ** 2\n        attn = attn.softmax(dim=-1)\n\n        out = (attn @ v).transpose(1, 3)  # B * num_heads, dim_head_v, block_size ** 2, num_blocks\n        # fold\n        out = out.reshape(-1, self.block_size_ds, self.block_size_ds, num_h_blocks, num_w_blocks)\n        out = out.permute(0, 3, 1, 4, 2).contiguous().view(\n            B, self.dim_out_v, H // self.block_stride, W // self.block_stride)\n        # B, dim_out, H // block_stride, W // block_stride\n        out = self.pool(out)\n        return out\n\n\n\"\"\" Three alternatives for overlapping windows.\n\n`.unfold().unfold()` is same speed as stride tricks with similar clarity as F.unfold()\n\n    if is_xla:\n        # This code achieves haloing on PyTorch XLA with reasonable runtime trade-off, it is\n        # EXTREMELY slow for backward on a GPU though so I need a way of selecting based on environment.\n        WW = self.win_size ** 2\n        pw = torch.eye(WW, dtype=x.dtype, device=x.device).reshape(WW, 1, self.win_size, self.win_size)\n        kv = F.conv2d(kv.reshape(-1, 1, H, W), pw, stride=self.block_size, padding=self.halo_size)\n    elif self.stride_tricks:\n        kv = F.pad(kv, [self.halo_size, self.halo_size, self.halo_size, self.halo_size]).contiguous()\n        kv = kv.as_strided((\n            B, self.dim_out_qk + self.dim_out_v, self.win_size, self.win_size, num_h_blocks, num_w_blocks),\n            stride=(kv.stride(0), kv.stride(1), kv.shape[-1], 1, self.block_size * kv.shape[-1], self.block_size))\n    else:\n        kv = F.unfold(kv, kernel_size=self.win_size, stride=self.block_size, padding=self.halo_size)\n\n    kv = kv.reshape(\n       B * self.num_heads, self.dim_head_qk + self.dim_head_v, -1, num_blocks).transpose(1, 3)\n\"\"\"\n"
  },
  {
    "path": "timm/layers/helpers.py",
    "content": "\"\"\" Layer/Module Helpers\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom itertools import repeat\nimport collections.abc\n\n\n# From PyTorch internals\ndef _ntuple(n):\n    \"\"\"Return a function that converts input to an n-tuple.\n\n    Scalar values are repeated n times, while iterables are converted to tuples.\n    Strings are treated as scalars to avoid character-level splitting.\n\n    Args:\n        n: Target tuple length.\n\n    Returns:\n        Function that converts input to n-tuple.\n    \"\"\"\n    def parse(x):\n        if isinstance(x, collections.abc.Iterable) and not isinstance(x, str):\n            return tuple(x)\n        return tuple(repeat(x, n))\n    return parse\n\n\nto_1tuple = _ntuple(1)\nto_2tuple = _ntuple(2)\nto_3tuple = _ntuple(3)\nto_4tuple = _ntuple(4)\nto_ntuple = _ntuple\n\n\ndef make_divisible(v, divisor=8, min_value=None, round_limit=.9):\n    \"\"\"Adjust value to be divisible by a divisor, typically for channel counts.\n\n    Rounds to the nearest multiple of divisor while ensuring the result doesn't\n    fall below min_value or decrease by more than (1 - round_limit).\n\n    Args:\n        v: Value to adjust.\n        divisor: Target divisor.\n        min_value: Minimum acceptable value.\n        round_limit: Prevent decrease beyond this fraction of original value.\n\n    Returns:\n        Adjusted value divisible by divisor.\n    \"\"\"\n    min_value = min_value or divisor\n    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)\n    # Make sure that round down does not go down by more than 10%.\n    if new_v < round_limit * v:\n        new_v += divisor\n    return new_v\n\n\ndef extend_tuple(x, n):\n    \"\"\"Pad a tuple to length n by repeating the last value.\n\n    If input is shorter than n, extends by repeating the last element.\n    If input is longer than n, truncates to n.\n\n    Args:\n        x: Input value, tuple, or list.\n        n: Target length.\n\n    Returns:\n        Tuple of length n.\n    \"\"\"\n    if not isinstance(x, (tuple, list)):\n        x = (x,)\n    else:\n        x = tuple(x)\n    pad_n = n - len(x)\n    if pad_n <= 0:\n        return x[:n]\n    return x + (x[-1],) * pad_n\n"
  },
  {
    "path": "timm/layers/hybrid_embed.py",
    "content": "\"\"\" Image to Patch Hybird Embedding Layer\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\nfrom torch import nn as nn\nimport torch.nn.functional as F\n\nfrom .format import Format, nchw_to\nfrom .helpers import to_2tuple\nfrom .patch_embed import resample_patch_embed\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass HybridEmbed(nn.Module):\n    \"\"\" CNN Feature Map Embedding\n    Extract feature map from CNN, flatten, project to embedding dim.\n    \"\"\"\n    output_fmt: Format\n    dynamic_img_pad: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            backbone: nn.Module,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 1,\n            feature_size: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_ratio: Optional[Union[int, Tuple[int, int]]] = None,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            bias: bool = True,\n            proj: bool = True,\n            flatten: bool = True,\n            output_fmt: Optional[str] = None,\n            strict_img_size: bool = True,\n            dynamic_img_pad: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert isinstance(backbone, nn.Module)\n        self.backbone = backbone\n        self.in_chans = in_chans\n        (\n            self.img_size,\n            self.patch_size,\n            self.feature_size,\n            self.feature_ratio,\n            self.feature_dim,\n            self.grid_size,\n            self.num_patches,\n        ) = self._init_backbone(\n            img_size=img_size,\n            patch_size=patch_size,\n            feature_size=feature_size,\n            feature_ratio=feature_ratio,\n            **dd,\n        )\n\n        if output_fmt is not None:\n            self.flatten = False\n            self.output_fmt = Format(output_fmt)\n        else:\n            # flatten spatial dim and transpose to channels last, kept for bwd compat\n            self.flatten = flatten\n            self.output_fmt = Format.NCHW\n        self.strict_img_size = strict_img_size\n        self.dynamic_img_pad = dynamic_img_pad\n        if not dynamic_img_pad:\n            assert self.feature_size[0] % self.patch_size[0] == 0 and self.feature_size[1] % self.patch_size[1] == 0\n\n        if proj:\n            self.proj = nn.Conv2d(\n                self.feature_dim,\n                embed_dim,\n                kernel_size=patch_size,\n                stride=patch_size,\n                bias=bias,\n                **dd,\n            )\n        else:\n            assert self.feature_dim == embed_dim, \\\n                f'The feature dim ({self.feature_dim} must match embed dim ({embed_dim}) when projection disabled.'\n            self.proj = nn.Identity()\n\n    def _init_backbone(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 1,\n            feature_size: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_ratio: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_dim: Optional[int] = None,\n            device=None,\n            dtype=None,\n    ):\n        img_size = to_2tuple(img_size)\n        patch_size = to_2tuple(patch_size)\n        if feature_size is None:\n            with torch.no_grad():\n                # NOTE Most reliable way of determining output dims is to run forward pass\n                training = self.backbone.training\n                if training:\n                    self.backbone.eval()\n                # FIXME whatif meta device?\n                o = self.backbone(torch.zeros(1, self.in_chans, img_size[0], img_size[1], device=device, dtype=dtype))\n                if isinstance(o, (list, tuple)):\n                    o = o[-1]  # last feature if backbone outputs list/tuple of features\n                feature_size = o.shape[-2:]\n                feature_dim = o.shape[1]\n                self.backbone.train(training)\n            feature_ratio = tuple([s // f for s, f in zip(img_size, feature_size)])\n        else:\n            feature_size = to_2tuple(feature_size)\n            feature_ratio = to_2tuple(feature_ratio or 16)\n            if feature_dim is None:\n                if hasattr(self.backbone, 'feature_info'):\n                    feature_dim = self.backbone.feature_info.channels()[-1]\n                else:\n                    feature_dim = self.backbone.num_features\n        grid_size = tuple([f // p for f, p in zip(feature_size, patch_size)])\n        num_patches = grid_size[0] * grid_size[1]\n        return img_size, patch_size, feature_size, feature_ratio, feature_dim, grid_size, num_patches\n\n    def set_input_size(\n            self,\n            img_size: Optional[Union[int, Tuple[int, int]]] = None,\n            patch_size: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_size: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_ratio: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_dim: Optional[int] = None,\n    ):\n        assert img_size is not None or patch_size is not None\n        img_size = img_size or self.img_size\n        new_patch_size = None\n        if patch_size is not None:\n            new_patch_size = to_2tuple(patch_size)\n        if new_patch_size is not None and new_patch_size != self.patch_size:\n            assert isinstance(self.proj, nn.Conv2d), 'HybridEmbed must have a projection layer to change patch size.'\n            with torch.no_grad():\n                new_proj = nn.Conv2d(\n                    self.proj.in_channels,\n                    self.proj.out_channels,\n                    kernel_size=new_patch_size,\n                    stride=new_patch_size,\n                    bias=self.proj.bias is not None,\n                    device=self.proj.device,\n                    dtype=self.proj.dtype,\n                )\n                new_proj.weight.copy_(resample_patch_embed(self.proj.weight, new_patch_size, verbose=True))\n                if self.proj.bias is not None:\n                    new_proj.bias.copy_(self.proj.bias)\n                self.proj = new_proj\n            patch_size = new_patch_size\n        patch_size = patch_size or self.patch_size\n\n        if img_size != self.img_size or patch_size != self.patch_size:\n            (\n                self.img_size,\n                self.patch_size,\n                self.feature_size,\n                self.feature_ratio,\n                self.feature_dim,\n                self.grid_size,\n                self.num_patches,\n            ) = self._init_backbone(\n                img_size=img_size,\n                patch_size=patch_size,\n                feature_size=feature_size,\n                feature_ratio=feature_ratio,\n                feature_dim=feature_dim,\n                # FIXME device/dtype?\n            )\n\n    def feat_ratio(self, as_scalar=True) -> Union[Tuple[int, int], int]:\n        total_reduction = (\n            self.feature_ratio[0] * self.patch_size[0],\n            self.feature_ratio[1] * self.patch_size[1]\n        )\n        if as_scalar:\n            return max(total_reduction)\n        else:\n            return total_reduction\n\n    def dynamic_feat_size(self, img_size: Tuple[int, int]) -> Tuple[int, int]:\n        \"\"\" Get feature grid size taking account dynamic padding and backbone network feat reduction\n        \"\"\"\n        feat_size = (img_size[0] // self.feature_ratio[0], img_size[1] // self.feature_ratio[1])\n        if self.dynamic_img_pad:\n            return math.ceil(feat_size[0] / self.patch_size[0]), math.ceil(feat_size[1] / self.patch_size[1])\n        else:\n            return feat_size[0] // self.patch_size[0], feat_size[1] // self.patch_size[1]\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        if hasattr(self.backbone, 'set_grad_checkpointing'):\n            self.backbone.set_grad_checkpointing(enable=enable)\n        elif hasattr(self.backbone, 'grad_checkpointing'):\n            self.backbone.grad_checkpointing = enable\n\n    def forward(self, x):\n        x = self.backbone(x)\n        if isinstance(x, (list, tuple)):\n            x = x[-1]  # last feature if backbone outputs list/tuple of features\n        _, _, H, W = x.shape\n        if self.dynamic_img_pad:\n            pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0]\n            pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1]\n            x = F.pad(x, (0, pad_w, 0, pad_h))\n        x = self.proj(x)\n        if self.flatten:\n            x = x.flatten(2).transpose(1, 2)  # NCHW -> NLC\n        elif self.output_fmt != Format.NCHW:\n            x = nchw_to(x, self.output_fmt)\n        return x\n\n\nclass HybridEmbedWithSize(HybridEmbed):\n    \"\"\" CNN Feature Map Embedding\n    Extract feature map from CNN, flatten, project to embedding dim.\n    \"\"\"\n    def __init__(\n            self,\n            backbone: nn.Module,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 1,\n            feature_size: Optional[Union[int, Tuple[int, int]]] = None,\n            feature_ratio: Optional[Union[int, Tuple[int, int]]] = None,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            bias=True,\n            proj=True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(\n            backbone=backbone,\n            img_size=img_size,\n            patch_size=patch_size,\n            feature_size=feature_size,\n            feature_ratio=feature_ratio,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            bias=bias,\n            proj=proj,\n            device=device,\n            dtype=dtype,\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        if hasattr(self.backbone, 'set_grad_checkpointing'):\n            self.backbone.set_grad_checkpointing(enable=enable)\n        elif hasattr(self.backbone, 'grad_checkpointing'):\n            self.backbone.grad_checkpointing = enable\n\n    def forward(self, x) -> Tuple[torch.Tensor, List[int]]:\n        x = self.backbone(x)\n        if isinstance(x, (list, tuple)):\n            x = x[-1]  # last feature if backbone outputs list/tuple of features\n        x = self.proj(x)\n        return x.flatten(2).transpose(1, 2), x.shape[-2:]"
  },
  {
    "path": "timm/layers/inplace_abn.py",
    "content": "import torch\nfrom torch import nn as nn\n\ntry:\n    from inplace_abn.functions import inplace_abn, inplace_abn_sync\n    has_iabn = True\nexcept ImportError:\n    has_iabn = False\n\n    def inplace_abn(x, weight, bias, running_mean, running_var,\n                    training=True, momentum=0.1, eps=1e-05, activation=\"leaky_relu\", activation_param=0.01):\n        raise ImportError(\n            \"Please install InplaceABN:'pip install git+https://github.com/mapillary/inplace_abn.git@v1.0.12'\")\n\n    def inplace_abn_sync(**kwargs):\n        inplace_abn(**kwargs)\n\nfrom ._fx import register_notrace_module\n\n\n@register_notrace_module\nclass InplaceAbn(nn.Module):\n    \"\"\"Activated Batch Normalization\n\n    This gathers a BatchNorm and an activation function in a single module\n\n    Parameters\n    ----------\n    num_features : int\n        Number of feature channels in the input and output.\n    eps : float\n        Small constant to prevent numerical issues.\n    momentum : float\n        Momentum factor applied to compute running statistics.\n    affine : bool\n        If `True` apply learned scale and shift transformation after normalization.\n    act_layer : str or nn.Module type\n        Name or type of the activation functions, one of: `leaky_relu`, `elu`\n    act_param : float\n        Negative slope for the `leaky_relu` activation.\n    \"\"\"\n\n    def __init__(\n            self,\n            num_features,\n            eps=1e-5,\n            momentum=0.1,\n            affine=True,\n            apply_act=True,\n            act_layer=\"leaky_relu\",\n            act_param=0.01,\n            drop_layer=None,\n    ):\n        super().__init__()\n        self.num_features = num_features\n        self.affine = affine\n        self.eps = eps\n        self.momentum = momentum\n        if apply_act:\n            if isinstance(act_layer, str):\n                assert act_layer in ('leaky_relu', 'elu', 'identity', '')\n                self.act_name = act_layer if act_layer else 'identity'\n            else:\n                # convert act layer passed as type to string\n                if act_layer == nn.ELU:\n                    self.act_name = 'elu'\n                elif act_layer == nn.LeakyReLU:\n                    self.act_name = 'leaky_relu'\n                elif act_layer is None or act_layer == nn.Identity:\n                    self.act_name = 'identity'\n                else:\n                    assert False, f'Invalid act layer {act_layer.__name__} for IABN'\n        else:\n            self.act_name = 'identity'\n        self.act_param = act_param\n        if self.affine:\n            self.weight = nn.Parameter(torch.ones(num_features))\n            self.bias = nn.Parameter(torch.zeros(num_features))\n        else:\n            self.register_parameter('weight', None)\n            self.register_parameter('bias', None)\n        self.register_buffer('running_mean', torch.zeros(num_features))\n        self.register_buffer('running_var', torch.ones(num_features))\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        nn.init.constant_(self.running_mean, 0)\n        nn.init.constant_(self.running_var, 1)\n        if self.affine:\n            nn.init.constant_(self.weight, 1)\n            nn.init.constant_(self.bias, 0)\n\n    def forward(self, x):\n        output = inplace_abn(\n            x, self.weight, self.bias, self.running_mean, self.running_var,\n            self.training, self.momentum, self.eps, self.act_name, self.act_param)\n        if isinstance(output, tuple):\n            output = output[0]\n        return output\n"
  },
  {
    "path": "timm/layers/interpolate.py",
    "content": "\"\"\" Interpolation helpers for timm layers\n\nRegularGridInterpolator from https://github.com/sbarratt/torch_interpolations\nCopyright Shane Barratt, Apache 2.0 license\n\"\"\"\nimport torch\nfrom itertools import product\n\n\nclass RegularGridInterpolator:\n    \"\"\" Interpolate data defined on a rectilinear grid with even or uneven spacing.\n    Produces similar results to scipy RegularGridInterpolator or interp2d\n    in 'linear' mode.\n\n    Taken from https://github.com/sbarratt/torch_interpolations\n    \"\"\"\n\n    def __init__(self, points, values):\n        self.points = points\n        self.values = values\n\n        assert isinstance(self.points, tuple) or isinstance(self.points, list)\n        assert isinstance(self.values, torch.Tensor)\n\n        self.ms = list(self.values.shape)\n        self.n = len(self.points)\n\n        assert len(self.ms) == self.n\n\n        for i, p in enumerate(self.points):\n            assert isinstance(p, torch.Tensor)\n            assert p.shape[0] == self.values.shape[i]\n\n    def __call__(self, points_to_interp):\n        assert self.points is not None\n        assert self.values is not None\n\n        assert len(points_to_interp) == len(self.points)\n        K = points_to_interp[0].shape[0]\n        for x in points_to_interp:\n            assert x.shape[0] == K\n\n        idxs = []\n        dists = []\n        overalls = []\n        for p, x in zip(self.points, points_to_interp):\n            idx_right = torch.bucketize(x, p)\n            idx_right[idx_right >= p.shape[0]] = p.shape[0] - 1\n            idx_left = (idx_right - 1).clamp(0, p.shape[0] - 1)\n            dist_left = x - p[idx_left]\n            dist_right = p[idx_right] - x\n            dist_left[dist_left < 0] = 0.\n            dist_right[dist_right < 0] = 0.\n            both_zero = (dist_left == 0) & (dist_right == 0)\n            dist_left[both_zero] = dist_right[both_zero] = 1.\n\n            idxs.append((idx_left, idx_right))\n            dists.append((dist_left, dist_right))\n            overalls.append(dist_left + dist_right)\n\n        numerator = 0.\n        for indexer in product([0, 1], repeat=self.n):\n            as_s = [idx[onoff] for onoff, idx in zip(indexer, idxs)]\n            bs_s = [dist[1 - onoff] for onoff, dist in zip(indexer, dists)]\n            numerator += self.values[as_s] * \\\n                torch.prod(torch.stack(bs_s), dim=0)\n        denominator = torch.prod(torch.stack(overalls), dim=0)\n        return numerator / denominator\n"
  },
  {
    "path": "timm/layers/lambda_layer.py",
    "content": "\"\"\" Lambda Layer\n\nPaper: `LambdaNetworks: Modeling Long-Range Interactions Without Attention`\n    - https://arxiv.org/abs/2102.08602\n\n@misc{2102.08602,\nAuthor = {Irwan Bello},\nTitle = {LambdaNetworks: Modeling Long-Range Interactions Without Attention},\nYear = {2021},\n}\n\nStatus:\nThis impl is a WIP. Code snippets in the paper were used as reference but\ngood chance some details are missing/wrong.\n\nI've only implemented local lambda conv based pos embeddings.\n\nFor a PyTorch impl that includes other embedding options checkout\nhttps://github.com/lucidrains/lambda-networks\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple\n\nimport torch\nfrom torch import nn\nimport torch.nn.functional as F\n\nfrom .grid import ndgrid\nfrom .helpers import to_2tuple, make_divisible\nfrom .weight_init import trunc_normal_\n\n\ndef rel_pos_indices(size, device=None):\n    size = to_2tuple(size)\n    pos = torch.stack(ndgrid(\n        torch.arange(size[0], device=device, dtype=torch.long),\n        torch.arange(size[1], device=device, dtype=torch.long),\n    )).flatten(1)\n    rel_pos = pos[:, None, :] - pos[:, :, None]\n    rel_pos[0] += size[0] - 1\n    rel_pos[1] += size[1] - 1\n    return rel_pos  # 2, H * W, H * W\n\n\nclass LambdaLayer(nn.Module):\n    \"\"\"Lambda Layer\n\n    Paper: `LambdaNetworks: Modeling Long-Range Interactions Without Attention`\n        - https://arxiv.org/abs/2102.08602\n\n    NOTE: intra-depth parameter 'u' is fixed at 1. It did not appear worth the complexity to add.\n\n    The internal dimensions of the lambda module are controlled via the interaction of several arguments.\n      * the output dimension of the module is specified by dim_out, which falls back to input dim if not set\n      * the value (v) dimension is set to dim_out // num_heads, the v projection determines the output dim\n      * the query (q) and key (k) dimension are determined by\n        * dim_head = (dim_out * attn_ratio // num_heads) if dim_head is None\n        * q = num_heads * dim_head, k = dim_head\n      * as seen above, attn_ratio determines the ratio of q and k relative to the output if dim_head not set\n\n    Args:\n        dim: input dimension to the module\n        dim_out: output dimension of the module, same as dim if not set\n        feat_size: size of input feature_map for relative pos variant H, W\n        stride: output stride of the module, avg pool used if stride == 2\n        num_heads: parallel attention heads.\n        dim_head: dimension of query and key heads, calculated from dim_out * attn_ratio // num_heads if not set\n        r: local lambda convolution radius. Use lambda conv if set, else relative pos if not. (default: 9)\n        qk_ratio: ratio of q and k dimensions to output dimension when dim_head not set. (default: 1.0)\n        qkv_bias: add bias to q, k, and v projections\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            feat_size: Optional[Tuple[int, int]] = None,\n            stride: int = 1,\n            num_heads: int = 4,\n            dim_head: int = 16,\n            r: int = 9,\n            qk_ratio: float = 1.0,\n            qkv_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        assert dim_out % num_heads == 0, ' should be divided by num_heads'\n        self.dim_qk = dim_head or make_divisible(dim_out * qk_ratio, divisor=8) // num_heads\n        self.num_heads = num_heads\n        self.dim_v = dim_out // num_heads\n\n        self.qkv = nn.Conv2d(\n            dim,\n            num_heads * self.dim_qk + self.dim_qk + self.dim_v,\n            kernel_size=1,\n            bias=qkv_bias,\n            **dd,\n        )\n        self.norm_q = nn.BatchNorm2d(num_heads * self.dim_qk, **dd)\n        self.norm_v = nn.BatchNorm2d(self.dim_v, **dd)\n\n        if r is not None:\n            # local lambda convolution for pos\n            self.conv_lambda = nn.Conv3d(1, self.dim_qk, (r, r, 1), padding=(r // 2, r // 2, 0), **dd)\n            self.pos_emb = None\n            self.rel_pos_indices = None\n            self.feat_size = None\n        else:\n            # relative pos embedding\n            assert feat_size is not None\n            feat_size = to_2tuple(feat_size)\n            self.feat_size = feat_size\n            rel_size = [2 * s - 1 for s in feat_size]\n            M = feat_size[0] * feat_size[1]\n            self.conv_lambda = None\n            self.pos_emb = nn.Parameter(torch.empty(rel_size[0], rel_size[1], self.dim_qk, **dd))\n            self.register_buffer(\n                'rel_pos_indices',\n                torch.empty((2, M, M), device=device, dtype=torch.long),\n                persistent=False,\n            )\n\n        self.pool = nn.AvgPool2d(2, 2) if stride == 2 else nn.Identity()\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        trunc_normal_(self.qkv.weight, std=self.qkv.weight.shape[1] ** -0.5)  # fan-in\n        if self.conv_lambda is not None:\n            trunc_normal_(self.conv_lambda.weight, std=self.dim_qk ** -0.5)\n        if self.pos_emb is not None:\n            trunc_normal_(self.pos_emb, std=.02)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if self.rel_pos_indices is not None:\n            self.rel_pos_indices.copy_(\n                rel_pos_indices(self.feat_size, device=self.rel_pos_indices.device)\n            )\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        M = H * W\n        qkv = self.qkv(x)\n        q, k, v = torch.split(qkv, [\n            self.num_heads * self.dim_qk, self.dim_qk, self.dim_v], dim=1)\n        q = self.norm_q(q).reshape(B, self.num_heads, self.dim_qk, M).transpose(-1, -2)  # B, num_heads, M, K\n        v = self.norm_v(v).reshape(B, self.dim_v, M).transpose(-1, -2)  # B, M, V\n        k = F.softmax(k.reshape(B, self.dim_qk, M), dim=-1)  # B, K, M\n\n        content_lam = k @ v  # B, K, V\n        content_out = q @ content_lam.unsqueeze(1)  # B, num_heads, M, V\n\n        if self.pos_emb is None:\n            position_lam = self.conv_lambda(v.reshape(B, 1, H, W, self.dim_v))  # B, H, W, V, K\n            position_lam = position_lam.reshape(B, 1, self.dim_qk, H * W, self.dim_v).transpose(2, 3)  # B, 1, M, K, V\n        else:\n            # FIXME relative pos embedding path not fully verified\n            pos_emb = self.pos_emb[self.rel_pos_indices[0], self.rel_pos_indices[1]].expand(B, -1, -1, -1)\n            position_lam = (pos_emb.transpose(-1, -2) @ v.unsqueeze(1)).unsqueeze(1)  # B, 1, M, K, V\n        position_out = (q.unsqueeze(-2) @ position_lam).squeeze(-2)  # B, num_heads, M, V\n\n        out = (content_out + position_out).transpose(-1, -2).reshape(B, C, H, W)  # B, C (num_heads * V), H, W\n        out = self.pool(out)\n        return out\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n"
  },
  {
    "path": "timm/layers/layer_scale.py",
    "content": "import torch\nfrom torch import nn\n\n\nclass LayerScale(nn.Module):\n    \"\"\" LayerScale on tensors with channels in last-dim.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            init_values: float = 1e-5,\n            inplace: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        self.init_values = init_values\n        self.inplace = inplace\n        self.gamma = nn.Parameter(torch.empty(dim, device=device, dtype=dtype))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        torch.nn.init.constant_(self.gamma, self.init_values)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return x.mul_(self.gamma) if self.inplace else x * self.gamma\n\n\nclass LayerScale2d(nn.Module):\n    \"\"\" LayerScale for tensors with torch 2D NCHW layout.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            init_values: float = 1e-5,\n            inplace: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.init_values = init_values\n        self.inplace = inplace\n        self.gamma = nn.Parameter(torch.empty(dim, device=device, dtype=dtype))\n\n        self.reset_parameters()\n\n    def reset_parameters(self):\n        torch.nn.init.constant_(self.gamma, self.init_values)\n\n    def forward(self, x):\n        gamma = self.gamma.view(1, -1, 1, 1)\n        return x.mul_(gamma) if self.inplace else x * gamma\n\n"
  },
  {
    "path": "timm/layers/linear.py",
    "content": "\"\"\" Linear layer (alternate definition)\n\"\"\"\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn as nn\n\n\nclass Linear(nn.Linear):\n    r\"\"\"Applies a linear transformation to the incoming data: :math:`y = xA^T + b`\n\n    Wraps torch.nn.Linear to support AMP + torchscript usage by manually casting\n    weight & bias to input.dtype to work around an issue w/ torch.addmm in this use case.\n    \"\"\"\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        if torch.jit.is_scripting():\n            bias = self.bias.to(dtype=input.dtype) if self.bias is not None else None\n            return F.linear(input, self.weight.to(dtype=input.dtype), bias=bias)\n        else:\n            return F.linear(input, self.weight, self.bias)\n"
  },
  {
    "path": "timm/layers/median_pool.py",
    "content": "\"\"\" Median Pool\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom .helpers import to_2tuple, to_4tuple\n\n\nclass MedianPool2d(nn.Module):\n    \"\"\" Median pool (usable as median filter when stride=1) module.\n\n    Args:\n         kernel_size: size of pooling kernel, int or 2-tuple\n         stride: pool stride, int or 2-tuple\n         padding: pool padding, int or 4-tuple (l, r, t, b) as in pytorch F.pad\n         same: override padding and enforce same padding, boolean\n    \"\"\"\n    def __init__(self, kernel_size=3, stride=1, padding=0, same=False):\n        super().__init__()\n        self.k = to_2tuple(kernel_size)\n        self.stride = to_2tuple(stride)\n        self.padding = to_4tuple(padding)  # convert to l, r, t, b\n        self.same = same\n\n    def _padding(self, x):\n        if self.same:\n            ih, iw = x.size()[2:]\n            if ih % self.stride[0] == 0:\n                ph = max(self.k[0] - self.stride[0], 0)\n            else:\n                ph = max(self.k[0] - (ih % self.stride[0]), 0)\n            if iw % self.stride[1] == 0:\n                pw = max(self.k[1] - self.stride[1], 0)\n            else:\n                pw = max(self.k[1] - (iw % self.stride[1]), 0)\n            pl = pw // 2\n            pr = pw - pl\n            pt = ph // 2\n            pb = ph - pt\n            padding = (pl, pr, pt, pb)\n        else:\n            padding = self.padding\n        return padding\n\n    def forward(self, x):\n        x = F.pad(x, self._padding(x), mode='reflect')\n        x = x.unfold(2, self.k[0], self.stride[0]).unfold(3, self.k[1], self.stride[1])\n        x = x.contiguous().view(x.size()[:4] + (-1,)).median(dim=-1)[0]\n        return x\n"
  },
  {
    "path": "timm/layers/mixed_conv2d.py",
    "content": "\"\"\" PyTorch Mixed Convolution\n\nPaper: MixConv: Mixed Depthwise Convolutional Kernels (https://arxiv.org/abs/1907.09595)\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import List, Union\n\nimport torch\nfrom torch import nn as nn\n\nfrom .conv2d_same import create_conv2d_pad\n\n\ndef _split_channels(num_chan, num_groups):\n    split = [num_chan // num_groups for _ in range(num_groups)]\n    split[0] += num_chan - sum(split)\n    return split\n\n\nclass MixedConv2d(nn.ModuleDict):\n    \"\"\" Mixed Grouped Convolution\n\n    Based on MDConv and GroupedConv in MixNet impl:\n      https://github.com/tensorflow/tpu/blob/master/models/official/mnasnet/mixnet/custom_layers.py\n    \"\"\"\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, List[int]] = 3,\n            stride: int = 1,\n            padding: str = '',\n            dilation: int = 1,\n            depthwise: bool = False,\n            **kwargs\n    ):\n        super().__init__()\n\n        kernel_size = kernel_size if isinstance(kernel_size, list) else [kernel_size]\n        num_groups = len(kernel_size)\n        in_splits = _split_channels(in_channels, num_groups)\n        out_splits = _split_channels(out_channels, num_groups)\n        self.in_channels = sum(in_splits)\n        self.out_channels = sum(out_splits)\n        for idx, (k, in_ch, out_ch) in enumerate(zip(kernel_size, in_splits, out_splits)):\n            conv_groups = in_ch if depthwise else 1\n            # use add_module to keep key space clean\n            self.add_module(\n                str(idx),\n                create_conv2d_pad(\n                    in_ch,\n                    out_ch,\n                    k,\n                    stride=stride,\n                    padding=padding,\n                    dilation=dilation,\n                    groups=conv_groups,\n                    **kwargs,\n                )\n            )\n        self.splits = in_splits\n\n    def forward(self, x):\n        x_split = torch.split(x, self.splits, 1)\n        x_out = [c(x_split[i]) for i, c in enumerate(self.values())]\n        x = torch.cat(x_out, 1)\n        return x\n"
  },
  {
    "path": "timm/layers/ml_decoder.py",
    "content": "from typing import Optional\n\nimport torch\nfrom torch import nn\nfrom torch import nn, Tensor\nfrom torch.nn.modules.transformer import _get_activation_fn\n\n\ndef add_ml_decoder_head(model):\n    if hasattr(model, 'global_pool') and hasattr(model, 'fc'):  # most CNN models, like Resnet50\n        model.global_pool = nn.Identity()\n        del model.fc\n        num_classes = model.num_classes\n        num_features = model.num_features\n        model.fc = MLDecoder(num_classes=num_classes, initial_num_features=num_features)\n    elif hasattr(model, 'global_pool') and hasattr(model, 'classifier'):  # EfficientNet\n        model.global_pool = nn.Identity()\n        del model.classifier\n        num_classes = model.num_classes\n        num_features = model.num_features\n        model.classifier = MLDecoder(num_classes=num_classes, initial_num_features=num_features)\n    elif 'RegNet' in model._get_name() or 'TResNet' in model._get_name():  # hasattr(model, 'head')\n        del model.head\n        num_classes = model.num_classes\n        num_features = model.num_features\n        model.head = MLDecoder(num_classes=num_classes, initial_num_features=num_features)\n    else:\n        print(\"Model code-writing is not aligned currently with ml-decoder\")\n        exit(-1)\n    if hasattr(model, 'drop_rate'):  # Ml-Decoder has inner dropout\n        model.drop_rate = 0\n    return model\n\n\nclass TransformerDecoderLayerOptimal(nn.Module):\n    def __init__(self, d_model, nhead=8, dim_feedforward=2048, dropout=0.1, activation=\"relu\",\n                 layer_norm_eps=1e-5) -> None:\n        super().__init__()\n        self.norm1 = nn.LayerNorm(d_model, eps=layer_norm_eps)\n        self.dropout = nn.Dropout(dropout)\n        self.dropout1 = nn.Dropout(dropout)\n        self.dropout2 = nn.Dropout(dropout)\n        self.dropout3 = nn.Dropout(dropout)\n\n        self.multihead_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout)\n\n        # Implementation of Feedforward model\n        self.linear1 = nn.Linear(d_model, dim_feedforward)\n        self.linear2 = nn.Linear(dim_feedforward, d_model)\n\n        self.norm2 = nn.LayerNorm(d_model, eps=layer_norm_eps)\n        self.norm3 = nn.LayerNorm(d_model, eps=layer_norm_eps)\n\n        self.activation = _get_activation_fn(activation)\n\n    def __setstate__(self, state):\n        if 'activation' not in state:\n            state['activation'] = torch.nn.functional.relu\n        super(TransformerDecoderLayerOptimal, self).__setstate__(state)\n\n    def forward(self, tgt: Tensor, memory: Tensor, tgt_mask: Optional[Tensor] = None,\n                memory_mask: Optional[Tensor] = None,\n                tgt_key_padding_mask: Optional[Tensor] = None,\n                memory_key_padding_mask: Optional[Tensor] = None) -> Tensor:\n        tgt = tgt + self.dropout1(tgt)\n        tgt = self.norm1(tgt)\n        tgt2 = self.multihead_attn(tgt, memory, memory)[0]\n        tgt = tgt + self.dropout2(tgt2)\n        tgt = self.norm2(tgt)\n        tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt))))\n        tgt = tgt + self.dropout3(tgt2)\n        tgt = self.norm3(tgt)\n        return tgt\n\n\n# class ExtrapClasses(object):\n#     def __init__(self, num_queries: int, group_size: int):\n#         self.num_queries = num_queries\n#         self.group_size = group_size\n#\n#     def __call__(self, h: torch.Tensor, class_embed_w: torch.Tensor, class_embed_b: torch.Tensor, out_extrap:\n#     torch.Tensor):\n#         # h = h.unsqueeze(-1).expand(-1, -1, -1, self.group_size)\n#         h = h[..., None].repeat(1, 1, 1, self.group_size) # torch.Size([bs, 5, 768, groups])\n#         w = class_embed_w.view((self.num_queries, h.shape[2], self.group_size))\n#         out = (h * w).sum(dim=2) + class_embed_b\n#         out = out.view((h.shape[0], self.group_size * self.num_queries))\n#         return out\n\nclass MLDecoder(nn.Module):\n    def __init__(self, num_classes, num_of_groups=-1, decoder_embedding=768, initial_num_features=2048):\n        super().__init__()\n        embed_len_decoder = 100 if num_of_groups < 0 else num_of_groups\n        if embed_len_decoder > num_classes:\n            embed_len_decoder = num_classes\n        self.embed_len_decoder = embed_len_decoder\n\n        # switching to 768 initial embeddings\n        decoder_embedding = 768 if decoder_embedding < 0 else decoder_embedding\n        self.embed_standart = nn.Linear(initial_num_features, decoder_embedding)\n\n        # decoder\n        decoder_dropout = 0.1\n        num_layers_decoder = 1\n        dim_feedforward = 2048\n        layer_decode = TransformerDecoderLayerOptimal(d_model=decoder_embedding,\n                                                      dim_feedforward=dim_feedforward, dropout=decoder_dropout)\n        self.decoder = nn.TransformerDecoder(layer_decode, num_layers=num_layers_decoder)\n\n        # non-learnable queries\n        self.query_embed = nn.Embedding(embed_len_decoder, decoder_embedding)\n        self.query_embed.requires_grad_(False)\n\n        # group fully-connected\n        self.num_classes = num_classes\n        self.duplicate_factor = int(num_classes / embed_len_decoder + 0.999)\n        self.duplicate_pooling = torch.nn.Parameter(\n            torch.Tensor(embed_len_decoder, decoder_embedding, self.duplicate_factor))\n        self.duplicate_pooling_bias = torch.nn.Parameter(torch.Tensor(num_classes))\n        torch.nn.init.xavier_normal_(self.duplicate_pooling)\n        torch.nn.init.constant_(self.duplicate_pooling_bias, 0)\n\n    def forward(self, x):\n        if len(x.shape) == 4:  # [bs,2048, 7,7]\n            embedding_spatial = x.flatten(2).transpose(1, 2)\n        else:  # [bs, 197,468]\n            embedding_spatial = x\n        embedding_spatial_786 = self.embed_standart(embedding_spatial)\n        embedding_spatial_786 = torch.nn.functional.relu(embedding_spatial_786, inplace=True)\n\n        bs = embedding_spatial_786.shape[0]\n        query_embed = self.query_embed.weight\n        # tgt = query_embed.unsqueeze(1).repeat(1, bs, 1)\n        tgt = query_embed.unsqueeze(1).expand(-1, bs, -1)  # no allocation of memory with expand\n        h = self.decoder(tgt, embedding_spatial_786.transpose(0, 1))  # [embed_len_decoder, batch, 768]\n        h = h.transpose(0, 1)\n\n        out_extrap = torch.zeros(h.shape[0], h.shape[1], self.duplicate_factor, device=h.device, dtype=h.dtype)\n        for i in range(self.embed_len_decoder):  # group FC\n            h_i = h[:, i, :]\n            w_i = self.duplicate_pooling[i, :, :]\n            out_extrap[:, i, :] = torch.matmul(h_i, w_i)\n        h_out = out_extrap.flatten(1)[:, :self.num_classes]\n        h_out += self.duplicate_pooling_bias\n        logits = h_out\n        return logits\n"
  },
  {
    "path": "timm/layers/mlp.py",
    "content": "\"\"\" MLP module w/ dropout and configurable activation layer\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom typing import Optional, Type, Union, Tuple\n\nfrom torch import nn as nn\n\nfrom .grn import GlobalResponseNorm\nfrom .helpers import to_2tuple\n\n\nclass Mlp(nn.Module):\n    \"\"\" MLP as used in Vision Transformer, MLP-Mixer and related networks\n\n    NOTE: When use_conv=True, expects 2D NCHW tensors, otherwise N*C expected.\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: Union[float, Tuple[float, float]] = 0.,\n            use_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n        drop_probs = to_2tuple(drop)\n        linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear\n\n        self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0], **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        self.norm = norm_layer(hidden_features, **dd) if norm_layer is not None else nn.Identity()\n        self.fc2 = linear_layer(hidden_features, out_features, bias=bias[1], **dd)\n        self.drop2 = nn.Dropout(drop_probs[1])\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop1(x)\n        x = self.norm(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nclass GluMlp(nn.Module):\n    \"\"\" MLP w/ GLU style gating\n    See: https://arxiv.org/abs/1612.08083, https://arxiv.org/abs/2002.05202\n\n    NOTE: When use_conv=True, expects 2D NCHW tensors, otherwise N*C expected.\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.Sigmoid,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: Union[float, Tuple[float, float]] = 0.,\n            use_conv: bool = False,\n            gate_last: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        assert hidden_features % 2 == 0\n        bias = to_2tuple(bias)\n        drop_probs = to_2tuple(drop)\n        linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear\n        self.chunk_dim = 1 if use_conv else -1\n        self.gate_last = gate_last  # use second half of width for gate\n\n        self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0], **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        self.norm = norm_layer(hidden_features // 2, **dd) if norm_layer is not None else nn.Identity()\n        self.fc2 = linear_layer(hidden_features // 2, out_features, bias=bias[1], **dd)\n        self.drop2 = nn.Dropout(drop_probs[1])\n\n    def init_weights(self):\n        # override init of fc1 w/ gate portion set to weight near zero, bias=1\n        if self.fc1.bias is not None:\n            nn.init.ones_(self.fc1.bias[self.fc1.bias.shape[0] // 2:])\n        nn.init.normal_(self.fc1.weight[self.fc1.weight.shape[0] // 2:], std=1e-6)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x1, x2 = x.chunk(2, dim=self.chunk_dim)\n        x = x1 * self.act(x2) if self.gate_last else self.act(x1) * x2\n        x = self.drop1(x)\n        x = self.norm(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nSwiGLUPacked = partial(GluMlp, act_layer=nn.SiLU, gate_last=False)\n\n\nclass SwiGLU(nn.Module):\n    \"\"\" SwiGLU\n    NOTE: GluMLP above can implement SwiGLU, but this impl has split fc1 and\n    better matches some other common impl which makes mapping checkpoints simpler.\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: Union[float, Tuple[float, float]] = 0.,\n            align_to: int = 0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n        drop_probs = to_2tuple(drop)\n\n        if align_to:\n            hidden_features = hidden_features + (-hidden_features % align_to)\n\n        self.fc1_g = nn.Linear(in_features, hidden_features, bias=bias[0], **dd)\n        self.fc1_x = nn.Linear(in_features, hidden_features, bias=bias[0], **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        self.norm = norm_layer(hidden_features, **dd) if norm_layer is not None else nn.Identity()\n        self.fc2 = nn.Linear(hidden_features, out_features, bias=bias[1], **dd)\n        self.drop2 = nn.Dropout(drop_probs[1])\n\n    def init_weights(self):\n        # override init of fc1 w/ gate portion set to weight near zero, bias=1\n        if self.fc1_g.bias is not None:\n            nn.init.ones_(self.fc1_g.bias)\n        nn.init.normal_(self.fc1_g.weight, std=1e-6)\n\n    def forward(self, x):\n        x_gate = self.fc1_g(x)\n        x = self.fc1_x(x)\n        x = self.act(x_gate) * x\n        x = self.drop1(x)\n        x = self.norm(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nclass GatedMlp(nn.Module):\n    \"\"\" MLP as used in gMLP\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            gate_layer: Optional[Type[nn.Module]] = None,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: Union[float, Tuple[float, float]] = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n        drop_probs = to_2tuple(drop)\n\n        self.fc1 = nn.Linear(in_features, hidden_features, bias=bias[0], **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        if gate_layer is not None:\n            assert hidden_features % 2 == 0\n            self.gate = gate_layer(hidden_features, **dd)\n            hidden_features = hidden_features // 2  # FIXME base reduction on gate property?\n        else:\n            self.gate = nn.Identity()\n        self.norm = norm_layer(hidden_features, **dd) if norm_layer is not None else nn.Identity()\n        self.fc2 = nn.Linear(hidden_features, out_features, bias=bias[1], **dd)\n        self.drop2 = nn.Dropout(drop_probs[1])\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop1(x)\n        x = self.gate(x)\n        x = self.norm(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nclass ConvMlp(nn.Module):\n    \"\"\" MLP using 1x1 convs that keeps spatial dims (for 2D NCHW tensors)\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n\n        self.fc1 = nn.Conv2d(in_features, hidden_features, kernel_size=1, bias=bias[0], **dd)\n        self.norm = norm_layer(hidden_features, **dd) if norm_layer else nn.Identity()\n        self.act = act_layer()\n        self.drop = nn.Dropout(drop)\n        self.fc2 = nn.Conv2d(hidden_features, out_features, kernel_size=1, bias=bias[1], **dd)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.norm(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        return x\n\n\nclass GlobalResponseNormMlp(nn.Module):\n    \"\"\" MLP w/ Global Response Norm (see grn.py), nn.Linear or 1x1 Conv2d\n\n    NOTE: Intended for '2D' NCHW (use_conv=True) or NHWC (use_conv=False, channels-last) tensor layouts\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            bias: Union[bool, Tuple[bool, bool]] = True,\n            drop: Union[float, Tuple[float, float]] = 0.,\n            use_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n        drop_probs = to_2tuple(drop)\n        linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear\n\n        self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0], **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        self.grn = GlobalResponseNorm(hidden_features, channels_last=not use_conv, **dd)\n        self.fc2 = linear_layer(hidden_features, out_features, bias=bias[1], **dd)\n        self.drop2 = nn.Dropout(drop_probs[1])\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop1(x)\n        x = self.grn(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n"
  },
  {
    "path": "timm/layers/non_local_attn.py",
    "content": "\"\"\" Bilinear-Attention-Transform and Non-Local Attention\n\nPaper: `Non-Local Neural Networks With Grouped Bilinear Attentional Transforms`\n    - https://openaccess.thecvf.com/content_CVPR_2020/html/Chi_Non-Local_Neural_Networks_With_Grouped_Bilinear_Attentional_Transforms_CVPR_2020_paper.html\nAdapted from original code: https://github.com/BA-Transform/BAT-Image-Classification\n\"\"\"\nfrom typing import Optional, Type\n\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom ._fx import register_notrace_module\nfrom .conv_bn_act import ConvNormAct\nfrom .helpers import make_divisible\nfrom .trace_utils import _assert\n\n\nclass NonLocalAttn(nn.Module):\n    \"\"\"Spatial NL block for image classification.\n\n    This was adapted from https://github.com/BA-Transform/BAT-Image-Classification\n    Their NonLocal impl inspired by https://github.com/facebookresearch/video-nonlocal-net.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels,\n            use_scale=True,\n            rd_ratio=1/8,\n            rd_channels=None,\n            rd_divisor=8,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if rd_channels is None:\n            rd_channels = make_divisible(in_channels * rd_ratio, divisor=rd_divisor)\n        self.scale = in_channels ** -0.5 if use_scale else 1.0\n        self.t = nn.Conv2d(in_channels, rd_channels, kernel_size=1, stride=1, bias=True, **dd)\n        self.p = nn.Conv2d(in_channels, rd_channels, kernel_size=1, stride=1, bias=True, **dd)\n        self.g = nn.Conv2d(in_channels, rd_channels, kernel_size=1, stride=1, bias=True, **dd)\n        self.z = nn.Conv2d(rd_channels, in_channels, kernel_size=1, stride=1, bias=True, **dd)\n        self.norm = nn.BatchNorm2d(in_channels, **dd)\n        self.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n\n        t = self.t(x)\n        p = self.p(x)\n        g = self.g(x)\n\n        B, C, H, W = t.size()\n        t = t.view(B, C, -1).permute(0, 2, 1)\n        p = p.view(B, C, -1)\n        g = g.view(B, C, -1).permute(0, 2, 1)\n\n        att = torch.bmm(t, p) * self.scale\n        att = F.softmax(att, dim=2)\n        x = torch.bmm(att, g)\n\n        x = x.permute(0, 2, 1).reshape(B, C, H, W)\n        x = self.z(x)\n        x = self.norm(x) + shortcut\n\n        return x\n\n    def reset_parameters(self):\n        for name, m in self.named_modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(\n                    m.weight, mode='fan_out', nonlinearity='relu')\n                if len(list(m.parameters())) > 1:\n                    nn.init.constant_(m.bias, 0.0)\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.constant_(m.weight, 0)\n                nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.GroupNorm):\n                nn.init.constant_(m.weight, 0)\n                nn.init.constant_(m.bias, 0)\n\n\n@register_notrace_module\nclass BilinearAttnTransform(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            block_size: int,\n            groups: int,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = ConvNormAct(in_channels, groups, 1, act_layer=act_layer, norm_layer=norm_layer, **dd)\n        self.conv_p = nn.Conv2d(groups, block_size * block_size * groups, kernel_size=(block_size, 1), **dd)\n        self.conv_q = nn.Conv2d(groups, block_size * block_size * groups, kernel_size=(1, block_size), **dd)\n        self.conv2 = ConvNormAct(in_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer, **dd)\n        self.block_size = block_size\n        self.groups = groups\n        self.in_channels = in_channels\n\n    def resize_mat(self, x, t: int):\n        B, C, block_size, block_size1 = x.shape\n        _assert(block_size == block_size1, '')\n        if t <= 1:\n            return x\n        x = x.view(B * C, -1, 1, 1)\n        x = x * torch.eye(t, t, dtype=x.dtype, device=x.device)\n        x = x.view(B * C, block_size, block_size, t, t)\n        x = torch.cat(torch.split(x, 1, dim=1), dim=3)\n        x = torch.cat(torch.split(x, 1, dim=2), dim=4)\n        x = x.view(B, C, block_size * t, block_size * t)\n        return x\n\n    def forward(self, x):\n        _assert(x.shape[-1] % self.block_size == 0, '')\n        _assert(x.shape[-2] % self.block_size == 0, '')\n        B, C, H, W = x.shape\n        out = self.conv1(x)\n        rp = F.adaptive_max_pool2d(out, (self.block_size, 1))\n        cp = F.adaptive_max_pool2d(out, (1, self.block_size))\n        p = self.conv_p(rp).view(B, self.groups, self.block_size, self.block_size).sigmoid()\n        q = self.conv_q(cp).view(B, self.groups, self.block_size, self.block_size).sigmoid()\n        p = p / p.sum(dim=3, keepdim=True)\n        q = q / q.sum(dim=2, keepdim=True)\n        p = p.view(B, self.groups, 1, self.block_size, self.block_size).expand(x.size(\n            0), self.groups, C // self.groups, self.block_size, self.block_size).contiguous()\n        p = p.view(B, C, self.block_size, self.block_size)\n        q = q.view(B, self.groups, 1, self.block_size, self.block_size).expand(x.size(\n            0), self.groups, C // self.groups, self.block_size, self.block_size).contiguous()\n        q = q.view(B, C, self.block_size, self.block_size)\n        p = self.resize_mat(p, H // self.block_size)\n        q = self.resize_mat(q, W // self.block_size)\n        y = p.matmul(x)\n        y = y.matmul(q)\n\n        y = self.conv2(y)\n        return y\n\n\nclass BatNonLocalAttn(nn.Module):\n    \"\"\" BAT\n    Adapted from: https://github.com/BA-Transform/BAT-Image-Classification\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            block_size: int = 7,\n            groups: int = 2,\n            rd_ratio: float = 0.25,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            drop_rate: float = 0.2,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if rd_channels is None:\n            rd_channels = make_divisible(in_channels * rd_ratio, divisor=rd_divisor)\n        self.conv1 = ConvNormAct(in_channels, rd_channels, 1, act_layer=act_layer, norm_layer=norm_layer, **dd)\n        self.ba = BilinearAttnTransform(\n            rd_channels,\n            block_size,\n            groups,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.conv2 = ConvNormAct(rd_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer, **dd)\n        self.dropout = nn.Dropout2d(p=drop_rate)\n\n    def forward(self, x):\n        xl = self.conv1(x)\n        y = self.ba(xl)\n        y = self.conv2(y)\n        y = self.dropout(y)\n        return y + x\n"
  },
  {
    "path": "timm/layers/norm.py",
    "content": "\"\"\" Normalization layers and wrappers\n\nNorm layer definitions that support fast norm and consistent channel arg order (always first arg).\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport numbers\nfrom typing import Tuple\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .fast_norm import (\n    is_fast_norm,\n    fast_group_norm,\n    fast_layer_norm,\n    fast_rms_norm,\n    rms_norm2d,\n    fast_rms_norm2d,\n    fast_simple_norm,\n    simple_norm,\n)\n\ntry:\n    from torch.nn.functional import rms_norm\nexcept ImportError:\n    from .fast_norm import rms_norm\n\n\nclass GroupNorm(nn.GroupNorm):\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_channels: int,\n            num_groups: int = 32,\n            eps: float = 1e-5,\n            affine: bool = True,\n            **kwargs,\n    ):\n        # NOTE num_channels is swapped to first arg for consistency in swapping norm layers with BN\n        super().__init__(num_groups, num_channels, eps=eps, affine=affine, **kwargs)\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n    def forward(self, x):\n        if self._fast_norm:\n            return fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        else:\n            return F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n\n\nclass GroupNorm1(nn.GroupNorm):\n    \"\"\" Group Normalization with 1 group.\n    Input: tensor in shape [B, C, *]\n    \"\"\"\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(self, num_channels: int, **kwargs):\n        super().__init__(1, num_channels, **kwargs)\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self._fast_norm:\n            return fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        else:\n            return F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n\n\nclass LayerNorm(nn.LayerNorm):\n    \"\"\" LayerNorm w/ fast norm option\n    \"\"\"\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self._fast_norm:\n            x = fast_layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        else:\n            x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        return x\n\n\nclass LayerNormFp32(nn.LayerNorm):\n    \"\"\" LayerNorm\n    \"\"\"\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        bias = self.bias.float() if self.bias is not None else None\n        x = F.layer_norm(x.float(), self.normalized_shape, weight, bias, self.eps).to(x.dtype)\n        return x\n\n\nclass LayerNorm2d(nn.LayerNorm):\n    \"\"\" LayerNorm for channels of '2D' spatial NCHW tensors \"\"\"\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x.permute(0, 2, 3, 1)\n        if self._fast_norm:\n            x = fast_layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        else:\n            x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        x = x.permute(0, 3, 1, 2)\n        return x\n\n\nclass LayerNorm2dFp32(nn.LayerNorm):\n    \"\"\" LayerNorm for channels of '2D' spatial NCHW tensors \"\"\"\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x.permute(0, 2, 3, 1)\n        weight = self.weight.float() if self.weight is not None else None\n        bias = self.bias.float() if self.bias is not None else None\n        x = F.layer_norm(x.float(), self.normalized_shape, weight, bias, self.eps).to(x.dtype)\n        x = x.permute(0, 3, 1, 2)\n        return x\n\n\ndef _is_contiguous(tensor: torch.Tensor) -> bool:\n    # jit is oh so lovely :/\n    if torch.jit.is_scripting():\n        return tensor.is_contiguous()\n    else:\n        return tensor.is_contiguous(memory_format=torch.contiguous_format)\n\n\ndef _layer_norm_cf(x: torch.Tensor, weight: torch.Tensor, bias: torch.Tensor, eps: float):\n    s, u = torch.var_mean(x, dim=1, unbiased=False, keepdim=True)\n    x = (x - u) * torch.rsqrt(s + eps)\n    x = x * weight[:, None, None] + bias[:, None, None]\n    return x\n\n\ndef _layer_norm_cf_sqm(x: torch.Tensor, weight: torch.Tensor, bias: torch.Tensor, eps: float):\n    u = x.mean(dim=1, keepdim=True)\n    s = ((x * x).mean(dim=1, keepdim=True) - (u * u)).clamp(0)\n    x = (x - u) * torch.rsqrt(s + eps)\n    x = x * weight.view(1, -1, 1, 1) + bias.view(1, -1, 1, 1)\n    return x\n\n\nclass LayerNormExp2d(nn.LayerNorm):\n    \"\"\" LayerNorm for channels_first tensors with 2d spatial dimensions (ie N, C, H, W).\n\n    Experimental implementation w/ manual norm for tensors non-contiguous tensors.\n\n    This improves throughput in some scenarios (tested on Ampere GPU), esp w/ channels_last\n    layout. However, benefits are not always clear and can perform worse on other GPUs.\n    \"\"\"\n\n    def __init__(self, num_channels: int, eps: float = 1e-6):\n        super().__init__(num_channels, eps=eps)\n\n    def forward(self, x) -> torch.Tensor:\n        if _is_contiguous(x):\n            x = F.layer_norm(\n                x.permute(0, 2, 3, 1), self.normalized_shape, self.weight, self.bias, self.eps).permute(0, 3, 1, 2)\n        else:\n            x = _layer_norm_cf(x, self.weight, self.bias, self.eps)\n        return x\n\n\nclass RmsNorm(nn.Module):\n    \"\"\" RmsNorm w/ fast (apex) norm if available\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine', '_fast_norm']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n    _fast_norm: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        # NOTE fast norm fallback needs our rms norm impl, so both paths through here.\n        # Since there is no built-in PyTorch impl, always uses APEX RmsNorm if installed.\n        if self._fast_norm:\n            x = fast_rms_norm(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = rms_norm(x, self.normalized_shape, self.weight, self.eps)\n        return x\n\n\nclass RmsNormFp32(nn.Module):\n    \"\"\" RmsNorm w/ fast (apex) norm if available\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        x = rms_norm(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        return x\n\n\nclass RmsNorm2d(nn.Module):\n    \"\"\" RmsNorm2D for NCHW tensors, w/ fast apex or cast norm if available\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine', '_fast_norm']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n    _fast_norm: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        # NOTE fast norm fallback needs our rms norm impl, so both paths through here.\n        # Since there is no built-in PyTorch impl, always use APEX RmsNorm if is installed.\n        if self._fast_norm:\n            x = fast_rms_norm2d(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = rms_norm2d(x, self.normalized_shape, self.weight, self.eps)\n        return x\n\n\nclass RmsNorm2dFp32(nn.Module):\n    \"\"\" RmsNorm2D for NCHW tensors, w/ fast apex or cast norm if available\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        x = rms_norm2d(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        return x\n\n\nclass SimpleNorm(nn.Module):\n    \"\"\" SimpleNorm (x / std(x))\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine', '_fast_norm']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n    _fast_norm: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self._fast_norm:\n            x = fast_simple_norm(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = simple_norm(x, self.normalized_shape, self.weight, self.eps)\n        return x\n\n\nclass SimpleNormFp32(nn.Module):\n    \"\"\" SimpleNorm (x / std(x))\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        x = simple_norm(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        return x\n\n\nclass SimpleNorm2d(nn.Module):\n    \"\"\" SimpleNorm for NCHW tensors\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine', '_fast_norm']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n    _fast_norm: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n        self._fast_norm = is_fast_norm()  # can't script unless we have these flags here (no globals)\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x.permute(0, 2, 3, 1)\n        if self._fast_norm:\n            x = fast_simple_norm(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = simple_norm(x, self.normalized_shape, self.weight, self.eps)\n        x = x.permute(0, 3, 1, 2)\n        return x\n\n\nclass SimpleNorm2dFp32(nn.Module):\n    \"\"\" SimpleNorm for NCHW tensors\n    \"\"\"\n    __constants__ = ['normalized_shape', 'eps', 'elementwise_affine']\n    normalized_shape: Tuple[int, ...]\n    eps: float\n    elementwise_affine: bool\n\n    def __init__(\n            self,\n            channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        normalized_shape = channels\n        if isinstance(normalized_shape, numbers.Integral):\n            # mypy error: incompatible types in assignment\n            normalized_shape = (normalized_shape,)  # type: ignore[assignment]\n        self.normalized_shape = tuple(normalized_shape)  # type: ignore[arg-type]\n        self.eps = eps\n        self.elementwise_affine = affine\n\n        if self.elementwise_affine:\n            self.weight = nn.Parameter(torch.empty(self.normalized_shape, **dd))\n        else:\n            self.register_parameter('weight', None)\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.elementwise_affine:\n            nn.init.ones_(self.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x.permute(0, 2, 3, 1)\n        weight = self.weight.float() if self.weight is not None else None\n        x = simple_norm(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        x = x.permute(0, 3, 1, 2)\n        return x\n"
  },
  {
    "path": "timm/layers/norm_act.py",
    "content": "\"\"\" Normalization + Activation Layers\n\nProvides Norm+Act fns for standard PyTorch norm layers such as\n* BatchNorm\n* GroupNorm\n* LayerNorm\n\nThis allows swapping with alternative layers that are natively both norm + act such as\n* EvoNorm (evo_norm.py)\n* FilterResponseNorm (filter_response_norm.py)\n* InplaceABN (inplace_abn.py)\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nfrom typing import Any, Dict, List, Optional, Type, Union\n\nimport torch\nfrom torch import nn as nn\nfrom torch.nn import functional as F\nfrom torchvision.ops.misc import FrozenBatchNorm2d\n\nfrom ._fx import register_notrace_module\nfrom .create_act import create_act_layer\nfrom .fast_norm import (\n    is_fast_norm,\n    fast_group_norm,\n    fast_layer_norm,\n    fast_rms_norm,\n    rms_norm2d,\n    fast_rms_norm2d,\n)\nfrom .norm import RmsNorm, RmsNorm2d\nfrom .trace_utils import _assert\nfrom .typing import LayerType\n\ntry:\n    from torch.nn.functional import rms_norm\nexcept ImportError:\n    from .fast_norm import rms_norm\n\n\ndef _create_act(\n        act_layer: LayerType,\n        act_kwargs: Dict[str, Any] = None,\n        inplace: Optional[bool] = False,\n        apply_act: bool = True,\n) -> nn.Module:\n    act_kwargs = act_kwargs or {}\n    act_kwargs.setdefault('inplace', inplace)\n    act = None\n    if apply_act:\n        act = create_act_layer(act_layer, **act_kwargs)\n    return nn.Identity() if act is None else act\n\n\n@register_notrace_module\nclass BatchNormAct2d(nn.BatchNorm2d):\n    \"\"\"BatchNorm + Activation\n\n    This module performs BatchNorm + Activation in a manner that will remain backwards\n    compatible with weights trained with separate bn, act. This is why we inherit from BN\n    instead of composing it as a .bn member.\n    \"\"\"\n    def __init__(\n            self,\n            num_features: int,\n            eps: float = 1e-5,\n            momentum: float = 0.1,\n            affine: bool = True,\n            track_running_stats: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        try:\n            factory_kwargs = {'device': device, 'dtype': dtype}\n            super().__init__(\n                num_features,\n                eps=eps,\n                momentum=momentum,\n                affine=affine,\n                track_running_stats=track_running_stats,\n                **factory_kwargs,\n            )\n        except TypeError:\n            # NOTE for backwards compat with old PyTorch w/o factory device/dtype support\n            super().__init__(\n                num_features,\n                eps=eps,\n                momentum=momentum,\n                affine=affine,\n                track_running_stats=track_running_stats,\n            )\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def forward(self, x):\n        # cut & paste of torch.nn.BatchNorm2d.forward impl to avoid issues with torchscript and tracing\n        _assert(x.ndim == 4, f'expected 4D input (got {x.ndim}D input)')\n\n        # exponential_average_factor is set to self.momentum\n        # (when it is available) only so that it gets updated\n        # in ONNX graph when this node is exported to ONNX.\n        if self.momentum is None:\n            exponential_average_factor = 0.0\n        else:\n            exponential_average_factor = self.momentum\n\n        if self.training and self.track_running_stats:\n            # TODO: if statement only here to tell the jit to skip emitting this when it is None\n            if self.num_batches_tracked is not None:  # type: ignore[has-type]\n                self.num_batches_tracked.add_(1)  # type: ignore[has-type]\n                if self.momentum is None:  # use cumulative moving average\n                    exponential_average_factor = 1.0 / float(self.num_batches_tracked)\n                else:  # use exponential moving average\n                    exponential_average_factor = self.momentum\n\n        r\"\"\"\n        Decide whether the mini-batch stats should be used for normalization rather than the buffers.\n        Mini-batch stats are used in training mode, and in eval mode when buffers are None.\n        \"\"\"\n        if self.training:\n            bn_training = True\n        else:\n            bn_training = (self.running_mean is None) and (self.running_var is None)\n\n        r\"\"\"\n        Buffers are only updated if they are to be tracked and we are in training mode. Thus they only need to be\n        passed when the update should occur (i.e. in training mode when they are tracked), or when buffer stats are\n        used for normalization (i.e. in eval mode when buffers are not None).\n        \"\"\"\n        x = F.batch_norm(\n            x,\n            # If buffers are not to be tracked, ensure that they won't be updated\n            self.running_mean if not self.training or self.track_running_stats else None,\n            self.running_var if not self.training or self.track_running_stats else None,\n            self.weight,\n            self.bias,\n            bn_training,\n            exponential_average_factor,\n            self.eps,\n        )\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\n@register_notrace_module\nclass SyncBatchNormAct(nn.SyncBatchNorm):\n    # Thanks to Selim Seferbekov (https://github.com/rwightman/pytorch-image-models/issues/1254)\n    # This is a quick workaround to support SyncBatchNorm for timm BatchNormAct2d layers\n    # but ONLY when used in conjunction with the timm conversion function below.\n    # Do not create this module directly or use the PyTorch conversion function.\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = super().forward(x)  # SyncBN doesn't work with torchscript anyways, so this is fine\n        if hasattr(self, \"drop\"):\n            x = self.drop(x)\n        if hasattr(self, \"act\"):\n            x = self.act(x)\n        return x\n\n\ndef convert_sync_batchnorm(module, process_group=None):\n    # convert both BatchNorm and BatchNormAct layers to Synchronized variants\n    module_output = module\n    if isinstance(module, torch.nn.modules.batchnorm._BatchNorm):\n        if isinstance(module, BatchNormAct2d):\n            # convert timm norm + act layer\n            module_output = SyncBatchNormAct(\n                module.num_features,\n                module.eps,\n                module.momentum,\n                module.affine,\n                module.track_running_stats,\n                process_group=process_group,\n            )\n            # set act and drop attr from the original module\n            module_output.act = module.act\n            module_output.drop = module.drop\n        else:\n            # convert standard BatchNorm layers\n            module_output = torch.nn.SyncBatchNorm(\n                module.num_features,\n                module.eps,\n                module.momentum,\n                module.affine,\n                module.track_running_stats,\n                process_group,\n            )\n        if module.affine:\n            with torch.no_grad():\n                module_output.weight = module.weight\n                module_output.bias = module.bias\n        module_output.running_mean = module.running_mean\n        module_output.running_var = module.running_var\n        module_output.num_batches_tracked = module.num_batches_tracked\n        module_output.training = module.training\n        if hasattr(module, \"qconfig\"):\n            module_output.qconfig = module.qconfig\n    for name, child in module.named_children():\n        module_output.add_module(name, convert_sync_batchnorm(child, process_group))\n    del module\n    return module_output\n\n\n@register_notrace_module\nclass FrozenBatchNormAct2d(torch.nn.Module):\n    \"\"\"\n    BatchNormAct2d where the batch statistics and the affine parameters are fixed\n\n    Args:\n        num_features (int): Number of features ``C`` from an expected input of size ``(N, C, H, W)``\n        eps (float): a value added to the denominator for numerical stability. Default: 1e-5\n    \"\"\"\n\n    def __init__(\n            self,\n            num_features: int,\n            eps: float = 1e-5,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.eps = eps\n        self.register_buffer(\"weight\", torch.ones(num_features, **dd))\n        self.register_buffer(\"bias\", torch.zeros(num_features, **dd))\n        self.register_buffer(\"running_mean\", torch.zeros(num_features, **dd))\n        self.register_buffer(\"running_var\", torch.ones(num_features, **dd))\n\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def _load_from_state_dict(\n        self,\n        state_dict: dict,\n        prefix: str,\n        local_metadata: dict,\n        strict: bool,\n        missing_keys: List[str],\n        unexpected_keys: List[str],\n        error_msgs: List[str],\n    ):\n        num_batches_tracked_key = prefix + \"num_batches_tracked\"\n        if num_batches_tracked_key in state_dict:\n            del state_dict[num_batches_tracked_key]\n\n        super()._load_from_state_dict(\n            state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs\n        )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        # move reshapes to the beginning\n        # to make it fuser-friendly\n        w = self.weight.reshape(1, -1, 1, 1)\n        b = self.bias.reshape(1, -1, 1, 1)\n        rv = self.running_var.reshape(1, -1, 1, 1)\n        rm = self.running_mean.reshape(1, -1, 1, 1)\n        scale = w * (rv + self.eps).rsqrt()\n        bias = b - rm * scale\n        x = x * scale + bias\n        x = self.act(self.drop(x))\n        return x\n\n    def __repr__(self) -> str:\n        return f\"{self.__class__.__name__}({self.weight.shape[0]}, eps={self.eps}, act={self.act})\"\n\n\ndef freeze_batch_norm_2d(module):\n    \"\"\"\n    Converts all `BatchNorm2d` and `SyncBatchNorm` or `BatchNormAct2d` and `SyncBatchNormAct2d` layers\n    of provided module into `FrozenBatchNorm2d` or `FrozenBatchNormAct2d` respectively.\n\n    Args:\n        module (torch.nn.Module): Any PyTorch module.\n\n    Returns:\n        torch.nn.Module: Resulting module\n\n    Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762\n    \"\"\"\n    res = module\n    if isinstance(module, (BatchNormAct2d, SyncBatchNormAct)):\n        res = FrozenBatchNormAct2d(module.num_features)\n        res.num_features = module.num_features\n        res.affine = module.affine\n        if module.affine:\n            res.weight.data = module.weight.data.clone().detach()\n            res.bias.data = module.bias.data.clone().detach()\n        res.running_mean.data = module.running_mean.data\n        res.running_var.data = module.running_var.data\n        res.eps = module.eps\n        res.drop = module.drop\n        res.act = module.act\n    elif isinstance(module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)):\n        res = FrozenBatchNorm2d(module.num_features)\n        res.num_features = module.num_features\n        res.affine = module.affine\n        if module.affine:\n            res.weight.data = module.weight.data.clone().detach()\n            res.bias.data = module.bias.data.clone().detach()\n        res.running_mean.data = module.running_mean.data\n        res.running_var.data = module.running_var.data\n        res.eps = module.eps\n    else:\n        for name, child in module.named_children():\n            new_child = freeze_batch_norm_2d(child)\n            if new_child is not child:\n                res.add_module(name, new_child)\n    return res\n\n\ndef unfreeze_batch_norm_2d(module):\n    \"\"\"\n    Converts all `FrozenBatchNorm2d` layers of provided module into `BatchNorm2d`. If `module` is itself and instance\n    of `FrozenBatchNorm2d`, it is converted into `BatchNorm2d` and returned. Otherwise, the module is walked\n    recursively and submodules are converted in place.\n\n    Args:\n        module (torch.nn.Module): Any PyTorch module.\n\n    Returns:\n        torch.nn.Module: Resulting module\n\n    Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762\n    \"\"\"\n    res = module\n    if isinstance(module, FrozenBatchNormAct2d):\n        res = BatchNormAct2d(module.num_features)\n        if module.affine:\n            res.weight.data = module.weight.data.clone().detach()\n            res.bias.data = module.bias.data.clone().detach()\n        res.running_mean.data = module.running_mean.data\n        res.running_var.data = module.running_var.data\n        res.eps = module.eps\n        res.drop = module.drop\n        res.act = module.act\n    elif isinstance(module, FrozenBatchNorm2d):\n        res = torch.nn.BatchNorm2d(module.num_features)\n        if module.affine:\n            res.weight.data = module.weight.data.clone().detach()\n            res.bias.data = module.bias.data.clone().detach()\n        res.running_mean.data = module.running_mean.data\n        res.running_var.data = module.running_var.data\n        res.eps = module.eps\n    else:\n        for name, child in module.named_children():\n            new_child = unfreeze_batch_norm_2d(child)\n            if new_child is not child:\n                res.add_module(name, new_child)\n    return res\n\n\ndef _num_groups(num_channels: int, num_groups: int, group_size: int):\n    if group_size:\n        assert num_channels % group_size == 0\n        return num_channels // group_size\n    return num_groups\n\n\nclass GroupNormAct(nn.GroupNorm):\n    _fast_norm: torch.jit.Final[bool]\n\n    # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args\n    def __init__(\n            self,\n            num_channels: int,\n            num_groups: int = 32,\n            eps: float = 1e-5,\n            affine: bool = True,\n            group_size: Optional[int] = None,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(\n            _num_groups(num_channels, num_groups, group_size),\n            num_channels,\n            eps=eps,\n            affine=affine,\n            device=device,\n            dtype=dtype,\n        )\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x):\n        if self._fast_norm:\n            x = fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        else:\n            x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass GroupNorm1Act(nn.GroupNorm):\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-5,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(1, num_channels, eps=eps, affine=affine, device=device, dtype=dtype)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x):\n        if self._fast_norm:\n            x = fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        else:\n            x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass LayerNormAct(nn.LayerNorm):\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            normalization_shape: Union[int, List[int], torch.Size],\n            eps: float = 1e-5,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(normalization_shape, eps=eps, elementwise_affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x):\n        if self._fast_norm:\n            x = fast_layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        else:\n            x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass LayerNormActFp32(nn.LayerNorm):\n\n    def __init__(\n            self,\n            normalization_shape: Union[int, List[int], torch.Size],\n            eps: float = 1e-5,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(normalization_shape, eps=eps, elementwise_affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def forward(self, x):\n        weight = self.weight.float() if self.weight is not None else None\n        bias = self.bias.float() if self.bias is not None else None\n        x = F.layer_norm(x.float(), self.normalized_shape, weight, bias, self.eps).to(x.dtype)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass LayerNormAct2d(nn.LayerNorm):\n    _fast_norm: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-5,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x):\n        x = x.permute(0, 2, 3, 1)\n        if self._fast_norm:\n            x = fast_layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        else:\n            x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)\n        x = x.permute(0, 3, 1, 2)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass LayerNormAct2dFp32(nn.LayerNorm):\n\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-5,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(num_channels, eps=eps, elementwise_affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def forward(self, x):\n        x = x.permute(0, 2, 3, 1)\n        weight = self.weight.float() if self.weight is not None else None\n        bias = self.bias.float() if self.bias is not None else None\n        x = F.layer_norm(x.float(), self.normalized_shape, weight, bias, self.eps).to(x.dtype)\n        x = x.permute(0, 3, 1, 2)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass RmsNormAct(RmsNorm):\n    \"\"\" RMSNorm + Activation for '2D' NCHW tensors\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(channels=num_channels, eps=eps, affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self._fast_norm:\n            x = fast_rms_norm(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = rms_norm(x, self.normalized_shape, self.weight, self.eps)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass RmsNormActFp32(RmsNorm):\n    \"\"\" RMSNorm + Activation for '2D' NCHW tensors\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(channels=num_channels, eps=eps, affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        x = rms_norm(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass RmsNormAct2d(RmsNorm2d):\n    \"\"\" RMSNorm + Activation for '2D' NCHW tensors\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(channels=num_channels, eps=eps, affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n        self._fast_norm = is_fast_norm()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self._fast_norm:\n            x = fast_rms_norm2d(x, self.normalized_shape, self.weight, self.eps)\n        else:\n            x = rms_norm2d(x, self.normalized_shape, self.weight, self.eps)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n\n\nclass RmsNormAct2dFp32(RmsNorm2d):\n    \"\"\" RMSNorm + Activation for '2D' NCHW tensors\n\n    NOTE: It's currently (2025-05-10) faster to use an eager 2d kernel that does reduction\n    on dim=1 than to permute and use internal PyTorch F.rms_norm, this may change if something\n    like https://github.com/pytorch/pytorch/pull/150576 lands.\n    \"\"\"\n    def __init__(\n            self,\n            num_channels: int,\n            eps: float = 1e-6,\n            affine: bool = True,\n            apply_act: bool = True,\n            act_layer: LayerType = nn.ReLU,\n            act_kwargs: Dict[str, Any] = None,\n            inplace: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        super().__init__(channels=num_channels, eps=eps, affine=affine, **kwargs)\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act = _create_act(act_layer, act_kwargs=act_kwargs, inplace=inplace, apply_act=apply_act)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        weight = self.weight.float() if self.weight is not None else None\n        x = rms_norm2d(x.float(), self.normalized_shape, weight, self.eps).to(x.dtype)\n        x = self.drop(x)\n        x = self.act(x)\n        return x\n"
  },
  {
    "path": "timm/layers/other_pool.py",
    "content": "\"\"\" Non-Local Attention Pooling Layers\n\nA collection of global pooling layers that go beyond simple avg/max pooling.\n\nLSEPool - LogSumExp pooling, a smooth approximation between avg and max pooling\nSimPool - Attention-based pooling from 'Keep It SimPool' (ICCV 2023)\n\nBased on implementations from:\n* LSE Pooling: custom implementation by Bill Psomas\n* SimPool: https://arxiv.org/abs/2309.06891 - 'Keep It SimPool: Who Said Supervised Transformers\n    Suffer from Attention Deficit?' by Bill Psomas et al.\n\nHacked together by / Copyright 2024 Ross Wightman, original code by Bill Psomas\n\"\"\"\nfrom typing import Optional, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .config import use_fused_attn\n\n\nclass LsePlus2d(nn.Module):\n    \"\"\"LogSumExp (LSE) Pooling for 2D inputs.\n\n    A smooth approximation to max pooling that provides a learnable interpolation between\n    average and max pooling. When r is large, LSE approaches max pooling; when r is small,\n    it approaches average pooling.\n\n    Implements: (1/r) * log((1/n) * sum(exp(r * (x - x_max)))) + x_max\n\n    The x_max subtraction provides numerical stability.\n    \"\"\"\n\n    def __init__(\n            self,\n            r: float = 10.0,\n            r_learnable: bool = True,\n            flatten: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            r: Initial value of the pooling parameter. Higher = closer to max pooling.\n            r_learnable: If True, r is a learnable parameter.\n            flatten: If True, flatten spatial dims in output.\n        \"\"\"\n        super().__init__()\n        if r_learnable:\n            self.r = nn.Parameter(torch.tensor(r, device=device, dtype=dtype))\n        else:\n            self.register_buffer('r', torch.tensor(r, device=device, dtype=dtype))\n        self.flatten = flatten\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x_max = F.adaptive_max_pool2d(x, 1)\n        exp_x = torch.exp(self.r * (x - x_max))\n        sum_exp = exp_x.mean(dim=(2, 3), keepdim=True)\n        out = x_max + (1.0 / self.r) * torch.log(sum_exp)\n        if self.flatten:\n            out = out.flatten(1)\n        return out\n\n\nclass LsePlus1d(nn.Module):\n    \"\"\"LogSumExp (LSE) Pooling for sequence (NLC) inputs.\n\n    A smooth approximation to max pooling that provides a learnable interpolation between\n    average and max pooling. When r is large, LSE approaches max pooling; when r is small,\n    it approaches average pooling.\n    \"\"\"\n\n    def __init__(\n            self,\n            r: float = 10.0,\n            r_learnable: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            r: Initial value of the pooling parameter. Higher = closer to max pooling.\n            r_learnable: If True, r is a learnable parameter.\n        \"\"\"\n        super().__init__()\n        if r_learnable:\n            self.r = nn.Parameter(torch.tensor(r, device=device, dtype=dtype))\n        else:\n            self.register_buffer('r', torch.tensor(r, device=device, dtype=dtype))\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        # x: (B, N, C)\n        x_max = x.max(dim=1, keepdim=True).values\n        exp_x = torch.exp(self.r * (x - x_max))\n        sum_exp = exp_x.mean(dim=1, keepdim=True)\n        out = x_max + (1.0 / self.r) * torch.log(sum_exp)\n        return out.squeeze(1)  # (B, C)\n\n\nclass SimPool2d(nn.Module):\n    \"\"\"SimPool: Simple Attention-Based Pooling for 2D (NCHW) inputs.\n\n    From 'Keep It SimPool: Who Said Supervised Transformers Suffer from Attention Deficit?'\n    https://arxiv.org/abs/2309.06891\n\n    Uses GAP as query initialization and applies cross-attention between the GAP query\n    and spatial features to produce a weighted pooled representation.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 1,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            gamma: Optional[float] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input feature dimension (number of channels).\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add bias to query and key projections.\n            qk_norm: If True, apply normalization to queries and keys.\n            gamma: If provided, apply power normalization to values with this exponent.\n            norm_layer: Normalization layer for patches and optionally qk_norm.\n            flatten: If True, flatten output to (B, C).\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, 'dim must be divisible by num_heads'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.gamma = gamma\n        self.fused_attn = use_fused_attn()\n\n        norm_layer = norm_layer or nn.LayerNorm\n        self.norm = norm_layer(dim, **dd)\n        self.q = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.k = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        if qk_norm:\n            self.q_norm = norm_layer(self.head_dim, **dd)\n            self.k_norm = norm_layer(self.head_dim, **dd)\n        else:\n            self.q_norm = nn.Identity()\n            self.k_norm = nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, C, H, W = x.shape\n        N = H * W\n\n        # Reshape to (B, N, C) for attention\n        x = x.flatten(2).transpose(1, 2)  # (B, N, C)\n\n        # GAP as query initialization\n        q = x.mean(dim=1, keepdim=True)  # (B, 1, C)\n\n        # Normalize patches for keys and values\n        x_norm = self.norm(x)\n\n        # Project query and keys\n        q = self.q(q).reshape(B, 1, self.num_heads, self.head_dim).transpose(1, 2)\n        k = self.k(x_norm).reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n        v = x_norm.reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if self.gamma is not None:\n            # Power normalization on values\n            v_min = v.amin(dim=-2, keepdim=True)\n            v_shifted = v - v_min + 1e-6\n            if self.fused_attn:\n                attn_out = F.scaled_dot_product_attention(q, k, v_shifted.pow(self.gamma))\n            else:\n                attn = (q * self.scale) @ k.transpose(-2, -1)\n                attn = attn.softmax(dim=-1)\n                attn_out = attn @ v_shifted.pow(self.gamma)\n            out = attn_out.pow(1.0 / self.gamma)\n        else:\n            if self.fused_attn:\n                out = F.scaled_dot_product_attention(q, k, v)\n            else:\n                attn = (q * self.scale) @ k.transpose(-2, -1)\n                attn = attn.softmax(dim=-1)\n                out = attn @ v\n\n        # (B, num_heads, 1, head_dim) -> (B, C) or (B, C)\n        out = out.transpose(1, 2).reshape(B, C)\n        return out\n\n\nclass SimPool1d(nn.Module):\n    \"\"\"SimPool: Simple Attention-Based Pooling for sequence (NLC) inputs.\n\n    From 'Keep It SimPool: Who Said Supervised Transformers Suffer from Attention Deficit?'\n    https://arxiv.org/abs/2309.06891\n\n    Uses GAP as query initialization and applies cross-attention between the GAP query\n    and sequence tokens to produce a weighted pooled representation.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 1,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            gamma: Optional[float] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add bias to query and key projections.\n            qk_norm: If True, apply normalization to queries and keys.\n            gamma: If provided, apply power normalization to values with this exponent.\n            norm_layer: Normalization layer for tokens and optionally qk_norm.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, 'dim must be divisible by num_heads'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.gamma = gamma\n        self.fused_attn = use_fused_attn()\n\n        norm_layer = norm_layer or nn.LayerNorm\n        self.norm = norm_layer(dim, **dd)\n        self.q = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.k = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        if qk_norm:\n            self.q_norm = norm_layer(self.head_dim, **dd)\n            self.k_norm = norm_layer(self.head_dim, **dd)\n        else:\n            self.q_norm = nn.Identity()\n            self.k_norm = nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, N, C = x.shape\n\n        # GAP as query initialization\n        q = x.mean(dim=1, keepdim=True)  # (B, 1, C)\n\n        # Normalize tokens for keys and values\n        x_norm = self.norm(x)\n\n        # Project query and keys\n        q = self.q(q).reshape(B, 1, self.num_heads, self.head_dim).transpose(1, 2)\n        k = self.k(x_norm).reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n        v = x_norm.reshape(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if self.gamma is not None:\n            # Power normalization on values\n            v_min = v.amin(dim=-2, keepdim=True)\n            v_shifted = v - v_min + 1e-6\n            if self.fused_attn:\n                attn_out = F.scaled_dot_product_attention(q, k, v_shifted.pow(self.gamma))\n            else:\n                attn = (q * self.scale) @ k.transpose(-2, -1)\n                attn = attn.softmax(dim=-1)\n                attn_out = attn @ v_shifted.pow(self.gamma)\n            out = attn_out.pow(1.0 / self.gamma)\n        else:\n            if self.fused_attn:\n                out = F.scaled_dot_product_attention(q, k, v)\n            else:\n                attn = (q * self.scale) @ k.transpose(-2, -1)\n                attn = attn.softmax(dim=-1)\n                out = attn @ v\n\n        # (B, num_heads, 1, head_dim) -> (B, C)\n        out = out.transpose(1, 2).reshape(B, C)\n        return out\n"
  },
  {
    "path": "timm/layers/padding.py",
    "content": "\"\"\" Padding Helpers\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport math\nfrom typing import List, Tuple, Union\n\nimport torch\nimport torch.nn.functional as F\n\nfrom .helpers import to_2tuple\n\n\n# Calculate symmetric padding for a convolution\ndef get_padding(kernel_size: int, stride: int = 1, dilation: int = 1, **_) -> Union[int, List[int]]:\n    if any([isinstance(v, (tuple, list)) for v in [kernel_size, stride, dilation]]):\n        kernel_size, stride, dilation = to_2tuple(kernel_size), to_2tuple(stride), to_2tuple(dilation)\n        return [get_padding(*a) for a in zip(kernel_size, stride, dilation)]\n    padding = ((stride - 1) + dilation * (kernel_size - 1)) // 2\n    return padding\n\n\n# Calculate asymmetric TensorFlow-like 'SAME' padding for a convolution\ndef get_same_padding(x: int, kernel_size: int, stride: int, dilation: int):\n    if isinstance(x, torch.Tensor):\n        return torch.clamp(((x / stride).ceil() - 1) * stride + (kernel_size - 1) * dilation + 1 - x, min=0)\n    else:\n        return max((math.ceil(x / stride) - 1) * stride + (kernel_size - 1) * dilation + 1 - x, 0)\n\n\n# Can SAME padding for given args be done statically?\ndef is_static_pad(kernel_size: int, stride: int = 1, dilation: int = 1, **_):\n    if any([isinstance(v, (tuple, list)) for v in [kernel_size, stride, dilation]]):\n        kernel_size, stride, dilation = to_2tuple(kernel_size), to_2tuple(stride), to_2tuple(dilation)\n        return all([is_static_pad(*a) for a in zip(kernel_size, stride, dilation)])\n    return stride == 1 and (dilation * (kernel_size - 1)) % 2 == 0\n\n\ndef pad_same_arg(\n        input_size: List[int],\n        kernel_size: List[int],\n        stride: List[int],\n        dilation: List[int] = (1, 1),\n) -> List[int]:\n    ih, iw = input_size\n    kh, kw = kernel_size\n    pad_h = get_same_padding(ih, kh, stride[0], dilation[0])\n    pad_w = get_same_padding(iw, kw, stride[1], dilation[1])\n    return [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]\n\n\n# Dynamically pad input x with 'SAME' padding for conv with specified args\ndef pad_same(\n        x,\n        kernel_size: List[int],\n        stride: List[int],\n        dilation: List[int] = (1, 1),\n        value: float = 0,\n):\n    ih, iw = x.size()[-2:]\n    pad_h = get_same_padding(ih, kernel_size[0], stride[0], dilation[0])\n    pad_w = get_same_padding(iw, kernel_size[1], stride[1], dilation[1])\n    x = F.pad(x, (pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2), value=value)\n    return x\n\n\ndef get_padding_value(padding, kernel_size, **kwargs) -> Tuple[Tuple, bool]:\n    dynamic = False\n    if isinstance(padding, str):\n        # for any string padding, the padding will be calculated for you, one of three ways\n        padding = padding.lower()\n        if padding == 'same':\n            # TF compatible 'SAME' padding, has a performance and GPU memory allocation impact\n            if is_static_pad(kernel_size, **kwargs):\n                # static case, no extra overhead\n                padding = get_padding(kernel_size, **kwargs)\n            else:\n                # dynamic 'SAME' padding, has runtime/GPU memory overhead\n                padding = 0\n                dynamic = True\n        elif padding == 'valid':\n            # 'VALID' padding, same as padding=0\n            padding = 0\n        else:\n            # Default to PyTorch style 'same'-ish symmetric padding\n            padding = get_padding(kernel_size, **kwargs)\n    return padding, dynamic\n"
  },
  {
    "path": "timm/layers/patch_dropout.py",
    "content": "from typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\n\ndef patch_dropout_forward(\n        x: torch.Tensor,\n        prob: float,\n        num_prefix_tokens: int,\n        ordered: bool,\n        training: bool,\n) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:\n    \"\"\"\n    Common forward logic for patch dropout.\n\n    Args:\n        x: Input tensor of shape (B, L, D)\n        prob: Dropout probability\n        num_prefix_tokens: Number of prefix tokens to preserve\n        ordered: Whether to maintain patch order\n        training: Whether in training mode\n\n    Returns:\n        Tuple of (output tensor, keep_indices or None)\n    \"\"\"\n    if not training or prob == 0.:\n        return x, None\n\n    if num_prefix_tokens:\n        prefix_tokens, x = x[:, :num_prefix_tokens], x[:, num_prefix_tokens:]\n    else:\n        prefix_tokens = None\n\n    B = x.shape[0]\n    L = x.shape[1]\n    num_keep = max(1, int(L * (1. - prob)))\n    keep_indices = torch.argsort(torch.randn(B, L, device=x.device), dim=-1)[:, :num_keep]\n\n    if ordered:\n        # NOTE does not need to maintain patch order in typical transformer use,\n        # but possibly useful for debug / visualization\n        keep_indices = keep_indices.sort(dim=-1)[0]\n\n    x = x.gather(1, keep_indices.unsqueeze(-1).expand((-1, -1) + x.shape[2:]))\n\n    if prefix_tokens is not None:\n        x = torch.cat((prefix_tokens, x), dim=1)\n\n    return x, keep_indices\n\n\nclass PatchDropout(nn.Module):\n    \"\"\"\n    Patch Dropout without returning indices.\n    https://arxiv.org/abs/2212.00794 and https://arxiv.org/pdf/2208.07220\n    \"\"\"\n\n    def __init__(\n            self,\n            prob: float = 0.5,\n            num_prefix_tokens: int = 1,\n            ordered: bool = False,\n    ):\n        super().__init__()\n        assert 0 <= prob < 1.\n        self.prob = prob\n        self.num_prefix_tokens = num_prefix_tokens  # exclude CLS token (or other prefix tokens)\n        self.ordered = ordered\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        output, _ = patch_dropout_forward(\n            x,\n            self.prob,\n            self.num_prefix_tokens,\n            self.ordered,\n            self.training\n        )\n        return output\n\n\nclass PatchDropoutWithIndices(nn.Module):\n    \"\"\"\n    Patch Dropout that returns both output and keep indices.\n    https://arxiv.org/abs/2212.00794 and https://arxiv.org/pdf/2208.07220\n    \"\"\"\n\n    def __init__(\n            self,\n            prob: float = 0.5,\n            num_prefix_tokens: int = 1,\n            ordered: bool = False,\n    ):\n        super().__init__()\n        assert 0 <= prob < 1.\n        self.prob = prob\n        self.num_prefix_tokens = num_prefix_tokens  # exclude CLS token (or other prefix tokens)\n        self.ordered = ordered\n\n    def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:\n        return patch_dropout_forward(\n            x,\n            self.prob,\n            self.num_prefix_tokens,\n            self.ordered,\n            self.training\n        )\n"
  },
  {
    "path": "timm/layers/patch_embed.py",
    "content": "\"\"\" Image to Patch Embedding using Conv2d\n\nA convolution based approach to patchifying a 2D image w/ embedding projection.\n\nBased on code in:\n  * https://github.com/google-research/vision_transformer\n  * https://github.com/google-research/big_vision/tree/main/big_vision\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nimport math\nfrom typing import Callable, Dict, List, Optional, Tuple, Union\n\nimport torch\nfrom torch import nn as nn\nimport torch.nn.functional as F\n\nfrom .format import Format, nchw_to\nfrom .helpers import to_2tuple\nfrom .trace_utils import _assert\n\n_logger = logging.getLogger(__name__)\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\" 2D Image to Patch Embedding\n    \"\"\"\n    output_fmt: Format\n    dynamic_img_pad: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            img_size: Optional[Union[int, Tuple[int, int]]] = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            norm_layer: Optional[Callable] = None,\n            flatten: bool = True,\n            output_fmt: Optional[str] = None,\n            bias: bool = True,\n            strict_img_size: bool = True,\n            dynamic_img_pad: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.patch_size = to_2tuple(patch_size)\n        self.img_size, self.grid_size, self.num_patches = self._init_img_size(img_size)\n\n        if output_fmt is not None:\n            self.flatten = False\n            self.output_fmt = Format(output_fmt)\n        else:\n            # flatten spatial dim and transpose to channels last, kept for bwd compat\n            self.flatten = flatten\n            self.output_fmt = Format.NCHW\n        self.strict_img_size = strict_img_size\n        self.dynamic_img_pad = dynamic_img_pad\n\n        self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias, **dd)\n        self.norm = norm_layer(embed_dim, **dd) if norm_layer else nn.Identity()\n\n    def _init_img_size(self, img_size: Union[int, Tuple[int, int]]):\n        assert self.patch_size\n        if img_size is None:\n            return None, None, None\n        img_size = to_2tuple(img_size)\n        grid_size = tuple([s // p for s, p in zip(img_size, self.patch_size)])\n        num_patches = grid_size[0] * grid_size[1]\n        return img_size, grid_size, num_patches\n\n    def set_input_size(\n            self,\n            img_size: Optional[Union[int, Tuple[int, int]]] = None,\n            patch_size: Optional[Union[int, Tuple[int, int]]] = None,\n    ):\n        new_patch_size = None\n        if patch_size is not None:\n            new_patch_size = to_2tuple(patch_size)\n        if new_patch_size is not None and new_patch_size != self.patch_size:\n            with torch.no_grad():\n                new_proj = nn.Conv2d(\n                    self.proj.in_channels,\n                    self.proj.out_channels,\n                    kernel_size=new_patch_size,\n                    stride=new_patch_size,\n                    bias=self.proj.bias is not None,\n                    device=self.proj.weight.device,\n                    dtype=self.proj.weight.dtype,\n                )\n                new_proj.weight.copy_(resample_patch_embed(self.proj.weight, new_patch_size, verbose=True))\n                if self.proj.bias is not None:\n                    new_proj.bias.copy_(self.proj.bias)\n                self.proj = new_proj\n            self.patch_size = new_patch_size\n        img_size = img_size or self.img_size\n        if img_size != self.img_size or new_patch_size is not None:\n            self.img_size, self.grid_size, self.num_patches = self._init_img_size(img_size)\n\n    def feat_ratio(self, as_scalar=True) -> Union[Tuple[int, int], int]:\n        if as_scalar:\n            return max(self.patch_size)\n        else:\n            return self.patch_size\n\n    def dynamic_feat_size(self, img_size: Tuple[int, int]) -> Tuple[int, int]:\n        \"\"\" Get grid (feature) size for given image size taking account of dynamic padding.\n        NOTE: must be torchscript compatible so using fixed tuple indexing\n        \"\"\"\n        if self.dynamic_img_pad:\n            return math.ceil(img_size[0] / self.patch_size[0]), math.ceil(img_size[1] / self.patch_size[1])\n        else:\n            return img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        if self.img_size is not None:\n            if self.strict_img_size:\n                _assert(H == self.img_size[0], f\"Input height ({H}) doesn't match model ({self.img_size[0]}).\")\n                _assert(W == self.img_size[1], f\"Input width ({W}) doesn't match model ({self.img_size[1]}).\")\n            elif not self.dynamic_img_pad:\n                _assert(\n                    H % self.patch_size[0] == 0,\n                    f\"Input height ({H}) should be divisible by patch size ({self.patch_size[0]}).\"\n                )\n                _assert(\n                    W % self.patch_size[1] == 0,\n                    f\"Input width ({W}) should be divisible by patch size ({self.patch_size[1]}).\"\n                )\n        if self.dynamic_img_pad:\n            pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0]\n            pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1]\n            x = F.pad(x, (0, pad_w, 0, pad_h))\n        x = self.proj(x)\n        if self.flatten:\n            x = x.flatten(2).transpose(1, 2)  # NCHW -> NLC\n        elif self.output_fmt != Format.NCHW:\n            x = nchw_to(x, self.output_fmt)\n        x = self.norm(x)\n        return x\n\n\nclass PatchEmbedWithSize(PatchEmbed):\n    \"\"\" 2D Image to Patch Embedding\n    \"\"\"\n    output_fmt: Format\n\n    def __init__(\n            self,\n            img_size: Optional[Union[int, Tuple[int, int]]] = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            norm_layer: Optional[Callable] = None,\n            flatten: bool = True,\n            output_fmt: Optional[str] = None,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            norm_layer=norm_layer,\n            flatten=flatten,\n            output_fmt=output_fmt,\n            bias=bias,\n            device=device,\n            dtype=dtype,\n        )\n\n    def forward(self, x) -> Tuple[torch.Tensor, List[int]]:\n        B, C, H, W = x.shape\n        if self.img_size is not None:\n            _assert(H % self.patch_size[0] == 0, f\"Input image height ({H}) must be divisible by patch size ({self.patch_size[0]}).\")\n            _assert(W % self.patch_size[1] == 0, f\"Input image width ({W}) must be divisible by patch size ({self.patch_size[1]}).\")\n\n        x = self.proj(x)\n        feat_size = x.shape[-2:]\n        if self.flatten:\n            x = x.flatten(2).transpose(1, 2)  # NCHW -> NLC\n        elif self.output_fmt != Format.NCHW:\n            x = nchw_to(x, self.output_fmt)\n        x = self.norm(x)\n        return x, feat_size\n\n\n# FIXME to remove, keeping for comparison for now\ndef resample_patch_embed_old(\n        patch_embed,\n        new_size: List[int],\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n        verbose: bool = False,\n):\n    \"\"\"Resample the weights of the patch embedding kernel to target resolution.\n    We resample the patch embedding kernel by approximately inverting the effect\n    of patch resizing.\n\n    Code based on:\n      https://github.com/google-research/big_vision/blob/b00544b81f8694488d5f36295aeb7972f3755ffe/big_vision/models/proj/flexi/vit.py\n\n    With this resizing, we can for example load a B/8 filter into a B/16 model\n    and, on 2x larger input image, the result will match.\n\n    Args:\n        patch_embed: original parameter to be resized.\n        new_size (tuple(int, int): target shape (height, width)-only.\n        interpolation (str): interpolation for resize\n        antialias (bool): use anti-aliasing filter in resize\n        verbose (bool): log operation\n    Returns:\n        Resized patch embedding kernel.\n    \"\"\"\n    import numpy as np\n    try:\n        from torch import vmap\n    except ImportError:\n        from functorch import vmap\n\n    assert len(patch_embed.shape) == 4, \"Four dimensions expected\"\n    assert len(new_size) == 2, \"New shape should only be hw\"\n    old_size = patch_embed.shape[-2:]\n    if tuple(old_size) == tuple(new_size):\n        return patch_embed\n\n    if verbose:\n        _logger.info(f\"Resize patch embedding {patch_embed.shape} to {new_size}, w/ {interpolation} interpolation.\")\n\n    def resize(x_np, _new_size):\n        x_tf = torch.Tensor(x_np)[None, None, ...]\n        x_upsampled = F.interpolate(\n            x_tf, size=_new_size, mode=interpolation, antialias=antialias)[0, 0, ...].numpy()\n        return x_upsampled\n\n    def get_resize_mat(_old_size, _new_size):\n        mat = []\n        for i in range(np.prod(_old_size)):\n            basis_vec = np.zeros(_old_size)\n            basis_vec[np.unravel_index(i, _old_size)] = 1.\n            mat.append(resize(basis_vec, _new_size).reshape(-1))\n        return np.stack(mat).T\n\n    resize_mat = get_resize_mat(old_size, new_size)\n    resize_mat_pinv = torch.tensor(np.linalg.pinv(resize_mat.T), device=patch_embed.device)\n\n    def resample_kernel(kernel):\n        resampled_kernel = resize_mat_pinv @ kernel.reshape(-1)\n        return resampled_kernel.reshape(new_size)\n\n    v_resample_kernel = vmap(vmap(resample_kernel, 0, 0), 1, 1)\n    orig_dtype = patch_embed.dtype\n    patch_embed = patch_embed.float()\n    patch_embed = v_resample_kernel(patch_embed)\n    patch_embed = patch_embed.to(orig_dtype)\n    return patch_embed\n\n\nDTYPE_INTERMEDIATE = torch.float32\n\n\ndef _compute_resize_matrix(\n        old_size: Tuple[int, int],\n        new_size: Tuple[int, int],\n        interpolation: str,\n        antialias: bool,\n        device: torch.device,\n        dtype: torch.dtype = DTYPE_INTERMEDIATE\n) -> torch.Tensor:\n    \"\"\"Computes the resize matrix basis vectors and interpolates them to new_size.\"\"\"\n    old_h, old_w = old_size\n    new_h, new_w = new_size\n    old_total = old_h * old_w\n    new_total = new_h * new_w\n\n    eye_matrix = torch.eye(old_total, device=device, dtype=dtype)\n    basis_vectors_batch = eye_matrix.reshape(old_total, 1, old_h, old_w)\n    resized_basis_vectors_batch = F.interpolate(\n        basis_vectors_batch,\n        size=new_size,\n        mode=interpolation,\n        antialias=antialias,\n        align_corners=False\n    ) # Output shape: (old_total, 1, new_h, new_w)\n    resize_matrix = resized_basis_vectors_batch.squeeze(1).permute(1, 2, 0).reshape(new_total, old_total)\n    return resize_matrix # Shape: (new_total, old_total)\n\n\ndef _apply_resampling(\n        patch_embed: torch.Tensor,\n        pinv_matrix: torch.Tensor,\n        new_size_tuple: Tuple[int, int],\n        orig_dtype: torch.dtype,\n        intermediate_dtype: torch.dtype = DTYPE_INTERMEDIATE\n) -> torch.Tensor:\n    \"\"\" Simplified resampling w/o vmap use.\n    As proposed by https://github.com/stas-sl\n    \"\"\"\n    c_out, c_in, *_ = patch_embed.shape\n    patch_embed = patch_embed.reshape(c_out, c_in, -1).to(dtype=intermediate_dtype)\n    pinv_matrix = pinv_matrix.to(dtype=intermediate_dtype)\n    resampled_patch_embed = patch_embed @ pinv_matrix  # (C_out, C_in, P_old * P_old) @ (P_old * P_old, P_new * P_new)\n    resampled_patch_embed = resampled_patch_embed.reshape(c_out, c_in, *new_size_tuple).to(dtype=orig_dtype)\n    return resampled_patch_embed\n\n\ndef resample_patch_embed(\n        patch_embed: torch.Tensor,\n        new_size: List[int],\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n        verbose: bool = False,\n):\n    \"\"\" Standalone function (computes matrix on each call). \"\"\"\n    assert len(patch_embed.shape) == 4, \"Input tensor should be 4D (out_ch, in_ch, h, w)\"\n    assert len(new_size) == 2, \"New shape should only be hw (height, width)\"\n\n    old_size_tuple: Tuple[int, int] = tuple(patch_embed.shape[-2:])\n    new_size_tuple: Tuple[int, int] = tuple(new_size)\n\n    if old_size_tuple == new_size_tuple:\n        return patch_embed\n\n    device = patch_embed.device\n    orig_dtype = patch_embed.dtype\n\n    resize_mat = _compute_resize_matrix(\n        old_size_tuple, new_size_tuple, interpolation, antialias, device, DTYPE_INTERMEDIATE\n    )\n    pinv_matrix = torch.linalg.pinv(resize_mat)  # Calculates the pseudoinverse matrix used for resampling\n    resampled_patch_embed = _apply_resampling(\n        patch_embed, pinv_matrix, new_size_tuple, orig_dtype, DTYPE_INTERMEDIATE\n    )\n    return resampled_patch_embed\n\n\nclass PatchEmbedResamplerFixedOrigSize(nn.Module):\n    \"\"\"\n    Resample patch embedding weights from a fixed original size,\n    caching the pseudoinverse matrix based on the target size.\n    \"\"\"\n    def __init__(\n            self,\n            orig_size: Tuple[int, int],\n            interpolation: str = 'bicubic',\n            antialias: bool = True\n    ):\n        \"\"\"\n        Args:\n            orig_size (Tuple[int, int]): The expected original (height, width) of input patch_embed tensors.\n            interpolation (str): Interpolation mode.\n            antialias (bool): Use anti-aliasing filter in resize.\n        \"\"\"\n        super().__init__()\n        assert isinstance(orig_size, tuple) and len(orig_size) == 2, \\\n            \"`orig_size` must be a tuple of (height, width)\"\n        self.orig_size = orig_size # expected original size\n        self.interpolation = interpolation\n        self.antialias = antialias\n        # Cache map key is the target new_size tuple\n        self._pinv_cache_map: Dict[Tuple[int, int], str] = {}\n\n    def _get_or_create_pinv_matrix(\n            self,\n            new_size: Tuple[int, int],\n            device: torch.device,\n            dtype: torch.dtype = DTYPE_INTERMEDIATE\n    ) -> torch.Tensor:\n        \"\"\"Retrieves the cached pinv matrix or computes and caches it for the given new_size.\"\"\"\n        cache_key = new_size\n        buffer_name = self._pinv_cache_map.get(cache_key)\n\n        if buffer_name and hasattr(self, buffer_name):\n            pinv_matrix = getattr(self, buffer_name)\n            if pinv_matrix.device == device and pinv_matrix.dtype == dtype:\n                 return pinv_matrix\n\n        # Calculate the matrix if not cached or needs update\n        resize_mat = _compute_resize_matrix(\n            self.orig_size, new_size, self.interpolation, self.antialias, device, dtype\n        )\n        pinv_matrix = torch.linalg.pinv(resize_mat)  # Calculates the pseudoinverse matrix used for resampling\n\n        # Cache using register_buffer\n        buffer_name = f\"pinv_{new_size[0]}x{new_size[1]}\"\n        if hasattr(self, buffer_name):\n             delattr(self, buffer_name)\n        self.register_buffer(buffer_name, pinv_matrix)\n        self._pinv_cache_map[cache_key] = buffer_name # Map new_size key to buffer name\n\n        return pinv_matrix\n\n    def forward(self, patch_embed: torch.Tensor, new_size: List[int]) -> torch.Tensor:\n        \"\"\" Resamples the patch embedding weights to new_size.\n\n        Args:\n            patch_embed (torch.Tensor): Original weights (out_ch, in_ch, H_orig, W_orig).\n            new_size (List[int]): Target [height, width].\n\n        Returns:\n            torch.Tensor: Resampled weights.\n        \"\"\"\n        assert len(patch_embed.shape) == 4\n        assert len(new_size) == 2\n\n        # Input Validation\n        input_size = tuple(patch_embed.shape[-2:])\n        assert input_size == self.orig_size, \\\n            f\"Input patch_embed spatial size {input_size} does not match \" \\\n            f\"module's expected original size {self.orig_size}\"\n\n        new_size_tuple: Tuple[int, int] = tuple(new_size)\n\n        # Check no-op case against self.orig_size\n        if self.orig_size == new_size_tuple:\n            return patch_embed\n\n        device = patch_embed.device\n        orig_dtype = patch_embed.dtype\n\n        # Get or compute the required pseudoinverse matrix\n        pinv_matrix = self._get_or_create_pinv_matrix(new_size_tuple, device)\n\n        # Apply the resampling\n        resampled_patch_embed = _apply_resampling(patch_embed, pinv_matrix, new_size_tuple, orig_dtype)\n\n        return resampled_patch_embed\n\n\nclass PatchEmbedInterpolator(nn.Module):\n    \"\"\"Dynamically interpolates patch embedding weights for variable patch sizes.\n\n    This module wraps patch embedding weight resampling functionality to support\n    on-the-fly patch size variation during training. It handles both Conv2d and\n    Linear patch embeddings.\n\n    Args:\n        base_patch_size: The original patch size the model was initialized with\n        in_chans: Number of input channels\n        embed_dim: Embedding dimension\n        interpolation: Interpolation mode for resampling\n        antialias: Whether to use antialiasing during interpolation\n    \"\"\"\n\n    def __init__(\n            self,\n            base_patch_size: Tuple[int, int],\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            interpolation: str = 'bicubic',\n            antialias: bool = True,\n    ):\n        super().__init__()\n        self.base_patch_size = base_patch_size\n        self.in_chans = in_chans\n        self.embed_dim = embed_dim\n        self.interpolation = interpolation\n        self.antialias = antialias\n\n    def resample_linear_weight(\n            self,\n            weight: torch.Tensor,\n            target_patch_size: Tuple[int, int],\n    ) -> torch.Tensor:\n        \"\"\"Resample linear patch embedding weights for a new patch size.\n\n        Args:\n            weight: Linear weight tensor of shape [embed_dim, patch_h * patch_w * in_chans]\n            target_patch_size: Target (patch_h, patch_w) to resample to\n\n        Returns:\n            Resampled weight tensor\n        \"\"\"\n        if target_patch_size == self.base_patch_size:\n            return weight\n\n        embed_dim = weight.shape[0]\n        base_ph, base_pw = self.base_patch_size\n        target_ph, target_pw = target_patch_size\n\n        # Reshape linear weight to conv2d format\n        # [embed_dim, ph*pw*C] -> [embed_dim, C, ph, pw]\n        weight_conv = weight.reshape(embed_dim, base_ph, base_pw, self.in_chans)\n        weight_conv = weight_conv.permute(0, 3, 1, 2)\n\n        # Resample using existing function\n        weight_conv_resampled = resample_patch_embed(\n            weight_conv,\n            new_size=[target_ph, target_pw],\n            interpolation=self.interpolation,\n            antialias=self.antialias,\n            verbose=False,\n        )\n\n        # Reshape back to linear format\n        # [embed_dim, C, ph, pw] -> [embed_dim, ph*pw*C]\n        weight_resampled = weight_conv_resampled.permute(0, 2, 3, 1)\n        weight_resampled = weight_resampled.reshape(embed_dim, -1)\n\n        return weight_resampled\n\n    def resample_conv_weight(\n            self,\n            weight: torch.Tensor,\n            target_patch_size: Tuple[int, int],\n    ) -> torch.Tensor:\n        \"\"\"Resample conv2d patch embedding weights for a new patch size.\n\n        Args:\n            weight: Conv2d weight tensor of shape [embed_dim, in_chans, patch_h, patch_w]\n            target_patch_size: Target (patch_h, patch_w) to resample to\n\n        Returns:\n            Resampled weight tensor\n        \"\"\"\n        if target_patch_size == self.base_patch_size:\n            return weight\n\n        # Resample using existing function\n        weight_resampled = resample_patch_embed(\n            weight,\n            new_size=list(target_patch_size),\n            interpolation=self.interpolation,\n            antialias=self.antialias,\n            verbose=False,\n        )\n\n        return weight_resampled\n\n    def forward(\n            self,\n            patches: torch.Tensor,\n            proj_weight: torch.Tensor,\n            proj_bias: Optional[torch.Tensor] = None,\n            patch_size: Optional[Tuple[int, int]] = None,\n            is_linear: bool = True,\n    ) -> torch.Tensor:\n        \"\"\"Apply patch embedding with dynamic weight resampling.\n\n        Args:\n            patches: Input patches\n                - For linear mode with resampling: [B, N, Ph, Pw, C]\n                - For linear mode without resampling: [B, N, Ph*Pw*C]\n                - For conv mode: [B, C, H, W]\n            proj_weight: Original projection weight\n            proj_bias: Optional projection bias\n            patch_size: Current patch size (if None, uses base_patch_size)\n            is_linear: Whether using linear (True) or conv2d (False) projection\n\n        Returns:\n            Embedded patches\n        \"\"\"\n        if patch_size is None:\n            patch_size = self.base_patch_size\n\n        if is_linear:\n            if patch_size != self.base_patch_size:\n                # Need to resample - expects unflattened patches\n                assert patches.ndim == 5, \"Patches must be [B, N, Ph, Pw, C] for resampling\"\n                B, N, Ph, Pw, C = patches.shape\n\n                # Resample the weight\n                weight_resampled = self.resample_linear_weight(proj_weight, patch_size)\n\n                # Flatten patches and apply linear projection\n                patches_flat = patches.reshape(B, N, -1)\n                output = torch.nn.functional.linear(patches_flat, weight_resampled, proj_bias)\n            else:\n                # No resampling needed, patches can be pre-flattened\n                if patches.ndim == 5:\n                    B, N, Ph, Pw, C = patches.shape\n                    patches = patches.reshape(B, N, -1)\n                output = torch.nn.functional.linear(patches, proj_weight, proj_bias)\n        else:\n            # Conv mode\n            if patch_size != self.base_patch_size:\n                weight_resampled = self.resample_conv_weight(proj_weight, patch_size)\n                output = torch.nn.functional.conv2d(\n                    patches, weight_resampled, proj_bias,\n                    stride=patch_size, padding=0\n                )\n            else:\n                output = torch.nn.functional.conv2d(\n                    patches, proj_weight, proj_bias,\n                    stride=patch_size, padding=0\n                )\n\n        return output\n\n# def divs(n, m=None):\n#     m = m or n // 2\n#     if m == 1:\n#         return [1]\n#     if n % m == 0:\n#         return [m] + divs(n, m - 1)\n#     return divs(n, m - 1)\n#\n#\n# class FlexiPatchEmbed(nn.Module):\n#     \"\"\" 2D Image to Patch Embedding w/ Flexible Patch sizes (FlexiViT)\n#     FIXME WIP\n#     \"\"\"\n#     def __init__(\n#             self,\n#             img_size=240,\n#             patch_size=16,\n#             in_chans=3,\n#             embed_dim=768,\n#             base_img_size=240,\n#             base_patch_size=32,\n#             norm_layer=None,\n#             flatten=True,\n#             bias=True,\n#     ):\n#         super().__init__()\n#         self.img_size = to_2tuple(img_size)\n#         self.patch_size = to_2tuple(patch_size)\n#         self.num_patches = 0\n#\n#         # full range for 240 = (5, 6, 8, 10, 12, 14, 15, 16, 20, 24, 30, 40, 48)\n#         self.seqhw = (6, 8, 10, 12, 14, 15, 16, 20, 24, 30)\n#\n#         self.base_img_size = to_2tuple(base_img_size)\n#         self.base_patch_size = to_2tuple(base_patch_size)\n#         self.base_grid_size = tuple([i // p for i, p in zip(self.base_img_size, self.base_patch_size)])\n#         self.base_num_patches = self.base_grid_size[0] * self.base_grid_size[1]\n#\n#         self.flatten = flatten\n#         self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=self.patch_size, stride=self.patch_size, bias=bias)\n#         self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity()\n#\n#     def forward(self, x):\n#         B, C, H, W = x.shape\n#\n#         if self.patch_size == self.base_patch_size:\n#             weight = self.proj.weight\n#         else:\n#             weight = resample_patch_embed(self.proj.weight, self.patch_size)\n#         patch_size = self.patch_size\n#         x = F.conv2d(x, weight, bias=self.proj.bias, stride=patch_size)\n#         if self.flatten:\n#             x = x.flatten(2).transpose(1, 2)  # BCHW -> BNC\n#         x = self.norm(x)\n#         return x\n"
  },
  {
    "path": "timm/layers/pool1d.py",
    "content": "import torch\n\n\ndef global_pool_nlc(\n        x: torch.Tensor,\n        pool_type: str = 'token',\n        num_prefix_tokens: int = 1,\n        reduce_include_prefix: bool = False,\n):\n    \"\"\"Apply global pooling to tensor in NLC format.\n\n    Args:\n        x: Input tensor in (batch, length, channels) format.\n        pool_type: Pooling type - 'token', 'avg', 'max', 'avgmax', or empty string.\n        num_prefix_tokens: Number of prefix tokens (e.g., class token) to exclude from pooling.\n        reduce_include_prefix: Whether to include prefix tokens in reduction.\n\n    Returns:\n        Pooled tensor.\n    \"\"\"\n    if not pool_type:\n        return x\n\n    if pool_type == 'token':\n        x = x[:, 0]  # class token\n    else:\n        x = x if reduce_include_prefix else x[:, num_prefix_tokens:]\n        if pool_type == 'avg':\n            x = x.mean(dim=1)\n        elif pool_type == 'avgmax':\n            x = 0.5 * (x.amax(dim=1) + x.mean(dim=1))\n        elif pool_type == 'max':\n            x = x.amax(dim=1)\n        else:\n            assert not pool_type, f'Unknown pool type {pool_type}'\n\n    return x"
  },
  {
    "path": "timm/layers/pool2d_same.py",
    "content": "\"\"\" AvgPool2d w/ Same Padding\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom typing import List, Tuple, Optional, Union\n\nfrom ._fx import register_notrace_module\nfrom .helpers import to_2tuple\nfrom .padding import pad_same, get_padding_value\n\n\ndef avg_pool2d_same(\n        x: torch.Tensor,\n        kernel_size: List[int],\n        stride: List[int],\n        padding: List[int] = (0, 0),\n        ceil_mode: bool = False,\n        count_include_pad: bool = True,\n):\n    # FIXME how to deal with count_include_pad vs not for external padding?\n    x = pad_same(x, kernel_size, stride)\n    return F.avg_pool2d(x, kernel_size, stride, (0, 0), ceil_mode, count_include_pad)\n\n\n@register_notrace_module\nclass AvgPool2dSame(nn.AvgPool2d):\n    \"\"\" Tensorflow like 'SAME' wrapper for 2D average pooling\n    \"\"\"\n    def __init__(\n            self,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Optional[Union[int, Tuple[int, int]]] = None,\n            padding: Union[int, Tuple[int, int], str] = 0,\n            ceil_mode: bool = False,\n            count_include_pad: bool = True,\n    ):\n        kernel_size = to_2tuple(kernel_size)\n        stride = to_2tuple(stride)\n        super().__init__(kernel_size, stride, (0, 0), ceil_mode, count_include_pad)\n\n    def forward(self, x):\n        x = pad_same(x, self.kernel_size, self.stride)\n        return F.avg_pool2d(\n            x, self.kernel_size, self.stride, self.padding, self.ceil_mode, self.count_include_pad)\n\n\ndef max_pool2d_same(\n        x: torch.Tensor,\n        kernel_size: List[int],\n        stride: List[int],\n        padding: List[int] = (0, 0),\n        dilation: List[int] = (1, 1),\n        ceil_mode: bool = False,\n):\n    x = pad_same(x, kernel_size, stride, value=-float('inf'))\n    return F.max_pool2d(x, kernel_size, stride, (0, 0), dilation, ceil_mode)\n\n\n@register_notrace_module\nclass MaxPool2dSame(nn.MaxPool2d):\n    \"\"\" Tensorflow like 'SAME' wrapper for 2D max pooling\n    \"\"\"\n    def __init__(\n            self,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Optional[Union[int, Tuple[int, int]]] = None,\n            padding: Union[int, Tuple[int, int], str] = 0,\n            dilation: Union[int, Tuple[int, int]] = 1,\n            ceil_mode: bool = False,\n    ):\n        kernel_size = to_2tuple(kernel_size)\n        stride = to_2tuple(stride)\n        dilation = to_2tuple(dilation)\n        super().__init__(kernel_size, stride, (0, 0), dilation, ceil_mode)\n\n    def forward(self, x):\n        x = pad_same(x, self.kernel_size, self.stride, value=-float('inf'))\n        return F.max_pool2d(x, self.kernel_size, self.stride, (0, 0), self.dilation, self.ceil_mode)\n\n\ndef create_pool2d(pool_type, kernel_size, stride=None, **kwargs):\n    stride = stride or kernel_size\n    padding = kwargs.pop('padding', '')\n    padding, is_dynamic = get_padding_value(padding, kernel_size, stride=stride, **kwargs)\n    if is_dynamic:\n        if pool_type == 'avg':\n            return AvgPool2dSame(kernel_size, stride=stride, **kwargs)\n        elif pool_type == 'max':\n            return MaxPool2dSame(kernel_size, stride=stride, **kwargs)\n        else:\n            assert False, f'Unsupported pool type {pool_type}'\n    else:\n        if pool_type == 'avg':\n            return nn.AvgPool2d(kernel_size, stride=stride, padding=padding, **kwargs)\n        elif pool_type == 'max':\n            return nn.MaxPool2d(kernel_size, stride=stride, padding=padding, **kwargs)\n        else:\n            assert False, f'Unsupported pool type {pool_type}'\n"
  },
  {
    "path": "timm/layers/pos_embed.py",
    "content": "\"\"\" Position Embedding Utilities\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport logging\nimport math\nfrom typing import List, Tuple, Optional, Union\n\nimport torch\nimport torch.nn.functional as F\n\nfrom ._fx import register_notrace_function\n\n_logger = logging.getLogger(__name__)\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef resample_abs_pos_embed(\n        posemb: torch.Tensor,\n        new_size: List[int],\n        old_size: Optional[List[int]] = None,\n        num_prefix_tokens: int = 1,\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n        verbose: bool = False,\n):\n    # sort out sizes, assume square if old size not provided\n    num_pos_tokens = posemb.shape[1]\n    num_new_tokens = new_size[0] * new_size[1] + num_prefix_tokens\n    if num_new_tokens == num_pos_tokens and new_size[0] == new_size[1]:\n        return posemb\n\n    if old_size is None:\n        hw = int(math.sqrt(num_pos_tokens - num_prefix_tokens))\n        old_size = hw, hw\n\n    if num_prefix_tokens:\n        posemb_prefix, posemb = posemb[:, :num_prefix_tokens], posemb[:, num_prefix_tokens:]\n    else:\n        posemb_prefix, posemb = None, posemb\n\n    # do the interpolation\n    embed_dim = posemb.shape[-1]\n    orig_dtype = posemb.dtype\n    posemb = posemb.float()  # interpolate needs float32\n    posemb = posemb.reshape(1, old_size[0], old_size[1], -1).permute(0, 3, 1, 2)\n    posemb = F.interpolate(posemb, size=new_size, mode=interpolation, antialias=antialias)\n    posemb = posemb.permute(0, 2, 3, 1).reshape(1, -1, embed_dim)\n    posemb = posemb.to(orig_dtype)\n\n    # add back extra (class, etc) prefix tokens\n    if posemb_prefix is not None:\n        posemb = torch.cat([posemb_prefix, posemb], dim=1)\n\n    if not torch.jit.is_scripting() and verbose:\n        _logger.info(f'Resized position embedding: {old_size} to {new_size}.')\n\n    return posemb\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef resample_abs_pos_embed_nhwc(\n        posemb: torch.Tensor,\n        new_size: List[int],\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n        verbose: bool = False,\n):\n    if new_size[0] == posemb.shape[-3] and new_size[1] == posemb.shape[-2]:\n        return posemb\n\n    orig_dtype = posemb.dtype\n    posemb = posemb.float()\n    posemb = posemb.reshape(1, posemb.shape[-3], posemb.shape[-2], posemb.shape[-1]).permute(0, 3, 1, 2)\n    posemb = F.interpolate(posemb, size=new_size, mode=interpolation, antialias=antialias)\n    posemb = posemb.permute(0, 2, 3, 1).to(orig_dtype)\n\n    if not torch.jit.is_scripting() and verbose:\n        _logger.info(f'Resized position embedding: {posemb.shape[-3:-1]} to {new_size}.')\n\n    return posemb\n"
  },
  {
    "path": "timm/layers/pos_embed_rel.py",
    "content": "\"\"\" Relative position embedding modules and functions\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport math\nimport os\nfrom typing import Optional, Tuple\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .grid import ndgrid\nfrom .interpolate import RegularGridInterpolator\nfrom .mlp import Mlp\nfrom .weight_init import trunc_normal_\n\n_USE_SCIPY = int(os.environ.get('TIMM_USE_SCIPY_INTERP', 0)) > 0\n\n\ndef gen_relative_position_index(\n        q_size: Tuple[int, int],\n        k_size: Optional[Tuple[int, int]] = None,\n        class_token: bool = False,\n        device=None,\n) -> torch.Tensor:\n    # Adapted with significant modifications from Swin / BeiT codebases\n    # get pair-wise relative position index for each token inside the window\n    assert k_size is None, 'Different q & k sizes not currently supported'  # FIXME\n\n    coords = torch.stack(ndgrid(\n        torch.arange(q_size[0], device=device),\n        torch.arange(q_size[1], device=device),\n    )).flatten(1)  # 2, Wh, Ww\n    relative_coords = coords[:, :, None] - coords[:, None, :]  # 2, Wh*Ww, Wh*Ww\n    relative_coords = relative_coords.permute(1, 2, 0)  # Qh*Qw, Kh*Kw, 2\n    relative_coords[:, :, 0] += q_size[0] - 1  # shift to start from 0\n    relative_coords[:, :, 1] += q_size[1] - 1\n    relative_coords[:, :, 0] *= 2 * q_size[1] - 1\n    num_relative_distance = (2 * q_size[0] - 1) * (2 * q_size[1] - 1)\n\n    # else:\n    #     # FIXME different q vs k sizes is a WIP, need to better offset the two grids?\n    #     q_coords = torch.stack(\n    #         ndgrid(\n    #             torch.arange(q_size[0]),\n    #             torch.arange(q_size[1])\n    #         )\n    #     ).flatten(1)  # 2, Wh, Ww\n    #     k_coords = torch.stack(\n    #         ndgrid(\n    #             torch.arange(k_size[0]),\n    #             torch.arange(k_size[1])\n    #         )\n    #     ).flatten(1)\n    #     relative_coords = q_coords[:, :, None] - k_coords[:, None, :]  # 2, Wh*Ww, Wh*Ww\n    #     relative_coords = relative_coords.permute(1, 2, 0)  # Qh*Qw, Kh*Kw, 2\n    #     relative_coords[:, :, 0] += max(q_size[0], k_size[0]) - 1  # shift to start from 0\n    #     relative_coords[:, :, 1] += max(q_size[1], k_size[1]) - 1\n    #     relative_coords[:, :, 0] *= k_size[1] + q_size[1] - 1\n    #     relative_position_index = relative_coords.sum(-1)  # Qh*Qw, Kh*Kw\n    #     num_relative_distance = (q_size[0] + k_size[0] - 1) * (q_size[1] + k_size[1] - 1) + 3\n\n    relative_position_index = relative_coords.sum(-1)  # Wh*Ww, Wh*Ww\n\n    if class_token:\n        # handle cls to token & token 2 cls & cls to cls as per beit for rel pos bias\n        # NOTE not intended or tested with MLP log-coords\n        relative_position_index = F.pad(relative_position_index, [1, 0, 1, 0])\n        relative_position_index[0, 0:] = num_relative_distance\n        relative_position_index[0:, 0] = num_relative_distance + 1\n        relative_position_index[0, 0] = num_relative_distance + 2\n\n    return relative_position_index.contiguous()\n\n\ndef resize_rel_pos_bias_table_simple(\n        rel_pos_bias,\n        new_window_size: Tuple[int, int],\n        new_bias_shape: Tuple[int, ...],\n):\n    dst_size = (new_window_size[0] * 2 - 1, new_window_size[1] * 2 - 1)\n    if rel_pos_bias.ndim == 3:\n        # TF maxvit style (num_heads, H, W) bias shape, no extra tokens currently supported\n        _, dst_h, dst_w = new_bias_shape\n        num_attn_heads, src_h, src_w = rel_pos_bias.shape\n        assert dst_h == dst_size[0] and dst_w == dst_size[1]\n        if src_h != dst_h or src_w != dst_w:\n            rel_pos_bias = torch.nn.functional.interpolate(\n                rel_pos_bias.unsqueeze(0),\n                size=dst_size,\n                mode=\"bicubic\",\n                align_corners=False,\n            ).squeeze(0)\n    else:\n        assert rel_pos_bias.ndim == 2\n        # (num_pos, num_heads) (aka flat) bias shape\n        dst_num_pos, _ = new_bias_shape\n        src_num_pos, num_attn_heads = rel_pos_bias.shape\n        num_extra_tokens = dst_num_pos - (dst_size[0] * dst_size[1])\n        src_size = int((src_num_pos - num_extra_tokens) ** 0.5)\n        src_size = (src_size, src_size)  # FIXME could support non-equal src if argument passed\n\n        if src_size[0] != dst_size[0] or src_size[1] != dst_size[1]:\n            if num_extra_tokens:\n                extra_tokens = rel_pos_bias[-num_extra_tokens:, :]\n                rel_pos_bias = rel_pos_bias[:-num_extra_tokens, :]\n            else:\n                extra_tokens = None\n\n            rel_pos_bias = torch.nn.functional.interpolate(\n                rel_pos_bias.transpose(1, 0).reshape((1, -1, src_size[0], src_size[1])),\n                size=dst_size,\n                mode=\"bicubic\",\n                align_corners=False,\n            ).view(-1, dst_num_pos - num_extra_tokens).transpose(0, 1)\n\n            if extra_tokens is not None:\n                rel_pos_bias = torch.cat((rel_pos_bias, extra_tokens), dim=0)\n\n    return rel_pos_bias\n\n\ndef resize_rel_pos_bias_table_levit(\n        position_bias_table,\n        new_size,\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n):\n    \"\"\"\n    Resample relative position bias table suggested in LeVit\n    Adapted from: https://github.com/microsoft/Cream/blob/main/TinyViT/utils.py\n    \"\"\"\n    L1, nH1 = position_bias_table.size()\n    L2, nH2 = new_size\n    assert nH1 == nH2\n    if L1 != L2:\n        orig_dtype = position_bias_table.dtype\n        position_bias_table = position_bias_table.float()\n        # bicubic interpolate relative_position_bias_table if not match\n        S1 = int(L1 ** 0.5)\n        S2 = int(L2 ** 0.5)\n        relative_position_bias_table_resized = F.interpolate(\n            position_bias_table.permute(1, 0).view(1, nH1, S1, S1),\n            size=(S2, S2),\n            mode=interpolation,\n            antialias=antialias,\n        )\n        relative_position_bias_table_resized = relative_position_bias_table_resized.view(nH2, L2).permute(1, 0)\n        relative_position_bias_table_resized.to(orig_dtype)\n        return relative_position_bias_table_resized\n    else:\n        return position_bias_table\n\n\ndef resize_rel_pos_bias_table(\n        rel_pos_bias,\n        new_window_size: Tuple[int, int],\n        new_bias_shape: Tuple[int, ...],\n):\n    \"\"\" Resize relative position bias table using more advanced interpolation.\n\n    Modified from code in Microsoft Unilm (https://github.com/microsoft/unilm) repo (BeiT, BeiT-v2, etc).\n\n    https://github.com/microsoft/unilm/blob/5255d52de86dad642810f5849dd357769346c1d7/beit/run_class_finetuning.py#L351\n\n    Args:\n        rel_pos_bias:\n        new_window_size:\n        new_bias_shape:\n\n    Returns:\n\n    \"\"\"\n    if _USE_SCIPY:\n        from scipy import interpolate\n\n    dst_size = (new_window_size[0] * 2 - 1, new_window_size[1] * 2 - 1)\n    if rel_pos_bias.ndim == 3:\n        # TF maxvit style (num_heads, H, W) bias shape, no extra tokens currently supported\n        num_extra_tokens = 0\n        _, dst_h, dst_w = new_bias_shape\n        assert dst_h == dst_size[0] and dst_w == dst_size[1]\n        num_attn_heads, src_h, src_w = rel_pos_bias.shape\n        src_size = (src_h, src_w)\n        has_flat_shape = False\n    else:\n        assert rel_pos_bias.ndim == 2\n        # (num_pos, num_heads) (aka flat) bias shape\n        dst_num_pos, _ = new_bias_shape\n        src_num_pos, num_attn_heads = rel_pos_bias.shape\n        num_extra_tokens = dst_num_pos - (dst_size[0] * dst_size[1])\n        src_size = int((src_num_pos - num_extra_tokens) ** 0.5)\n        src_size = (src_size, src_size)\n        has_flat_shape = True\n\n    if src_size[0] != dst_size[0] or src_size[1] != dst_size[1]:\n        # print(\"Interpolating position from %dx%d to %dx%d\" % (src_size[0], src_size[1], dst_size[0], dst_size[1]))\n        if num_extra_tokens:\n            extra_tokens = rel_pos_bias[-num_extra_tokens:, :]\n            rel_pos_bias = rel_pos_bias[:-num_extra_tokens, :]\n        else:\n            extra_tokens = None\n\n        def geometric_progression(a, r, n):\n            return a * (1.0 - r ** n) / (1.0 - r)\n\n        def _calc(src, dst):\n            left, right = 1.01, 1.5\n            while right - left > 1e-6:\n                q = (left + right) / 2.0\n                gp = geometric_progression(1, q, src // 2)\n                if gp > dst // 2:\n                    right = q\n                else:\n                    left = q\n\n            dis = []\n            cur = 1\n            for i in range(src // 2):\n                dis.append(cur)\n                cur += q ** (i + 1)\n            r_ids = [-_ for _ in reversed(dis)]\n            return r_ids + [0] + dis\n\n        y = _calc(src_size[0], dst_size[0])\n        x = _calc(src_size[1], dst_size[1])\n        yx = [torch.tensor(y), torch.tensor(x)]\n        # print(\"Original positions = %s\" % str(x))\n\n        ty = dst_size[0] // 2.0\n        tx = dst_size[1] // 2.0\n        dy = torch.arange(-ty, ty + 0.1, 1.0)\n        dx = torch.arange(-tx, tx + 0.1, 1.0)\n        dyx = ndgrid(dy, dx)\n        # print(\"Target positions = %s\" % str(dx))\n\n        all_rel_pos_bias = []\n        for i in range(num_attn_heads):\n            if has_flat_shape:\n                z = rel_pos_bias[:, i].view(src_size[0], src_size[1]).float()\n            else:\n                z = rel_pos_bias[i, :, :].float()\n\n            if _USE_SCIPY:\n                # Original beit code uses scipy w/ cubic interpolation\n                f = interpolate.interp2d(x, y, z.numpy(), kind='cubic')\n                r = torch.Tensor(f(dx, dy)).contiguous().to(rel_pos_bias.device)\n            else:\n                # Without scipy dependency, I've found a reasonably simple impl\n                # that supports uneven spaced interpolation pts with 'linear' interp.\n                # Results are comparable to scipy for model accuracy in most cases.\n                f = RegularGridInterpolator(yx, z)\n                r = f(dyx).contiguous().to(rel_pos_bias.device)\n\n            if has_flat_shape:\n                r = r.view(-1, 1)\n            all_rel_pos_bias.append(r)\n\n        if has_flat_shape:\n            rel_pos_bias = torch.cat(all_rel_pos_bias, dim=-1)\n        else:\n            rel_pos_bias = torch.cat(all_rel_pos_bias, dim=0)\n\n        if extra_tokens is not None:\n            assert has_flat_shape\n            rel_pos_bias = torch.cat((rel_pos_bias, extra_tokens), dim=0)\n\n    return rel_pos_bias\n\n\nclass RelPosBias(nn.Module):\n    \"\"\" Relative Position Bias\n    Adapted from Swin-V1 relative position bias impl, modularized.\n    \"\"\"\n\n    def __init__(\n            self,\n            window_size: Tuple[int, int],\n            num_heads: int,\n            prefix_tokens: int = 0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert prefix_tokens <= 1\n        self.window_size = window_size\n        self.window_area = window_size[0] * window_size[1]\n        self.prefix_tokens = prefix_tokens\n        self.bias_shape = (self.window_area + prefix_tokens,) * 2 + (num_heads,)\n\n        num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3 * prefix_tokens\n        self.relative_position_bias_table = nn.Parameter(torch.empty(num_relative_distance, num_heads, **dd))\n        index_size = (self.window_area + prefix_tokens) ** 2\n        self.register_buffer(\n            \"relative_position_index\",\n            torch.empty(index_size, device=device, dtype=torch.long),\n            persistent=False,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        trunc_normal_(self.relative_position_bias_table, std=.02)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.relative_position_index.copy_(\n            gen_relative_position_index(\n                self.window_size,\n                class_token=self.prefix_tokens > 0,\n                device=self.relative_position_index.device,\n            ).view(-1)\n        )\n\n    def get_bias(self) -> torch.Tensor:\n        relative_position_bias = self.relative_position_bias_table[self.relative_position_index]\n        # win_h * win_w, win_h * win_w, num_heads\n        relative_position_bias = relative_position_bias.view(self.bias_shape).permute(2, 0, 1)\n        return relative_position_bias.unsqueeze(0).contiguous()\n\n    def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None):\n        return attn + self.get_bias()\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\ndef gen_relative_log_coords(\n        win_size: Tuple[int, int],\n        pretrained_win_size: Tuple[int, int] = (0, 0),\n        mode='swin',\n        device=None,\n        dtype=None,\n):\n    assert mode in ('swin', 'cr')\n    # as per official swin-v2 impl, supporting timm specific 'cr' log coords as well\n    relative_coords_h = torch.arange(-(win_size[0] - 1), win_size[0], device=device).to(torch.float32)\n    relative_coords_w = torch.arange(-(win_size[1] - 1), win_size[1], device=device).to(torch.float32)\n    relative_coords_table = torch.stack(ndgrid(relative_coords_h, relative_coords_w))\n    relative_coords_table = relative_coords_table.permute(1, 2, 0).contiguous()  # 2*Wh-1, 2*Ww-1, 2\n    if mode == 'swin':\n        if pretrained_win_size[0] > 0:\n            relative_coords_table[:, :, 0] /= (pretrained_win_size[0] - 1)\n            relative_coords_table[:, :, 1] /= (pretrained_win_size[1] - 1)\n        else:\n            relative_coords_table[:, :, 0] /= (win_size[0] - 1)\n            relative_coords_table[:, :, 1] /= (win_size[1] - 1)\n        relative_coords_table *= 8  # normalize to -8, 8\n        relative_coords_table = torch.sign(relative_coords_table) * torch.log2(\n            1.0 + relative_coords_table.abs()) / math.log2(8)\n    else:\n        # mode == 'cr'\n        relative_coords_table = torch.sign(relative_coords_table) * torch.log(\n            1.0 + relative_coords_table.abs())\n\n    return relative_coords_table.to(dtype)\n\n\nclass RelPosMlp(nn.Module):\n    \"\"\" Log-Coordinate Relative Position MLP\n    Based on ideas presented in Swin-V2 paper (https://arxiv.org/abs/2111.09883)\n\n    This impl covers the 'swin' implementation as well as two timm specific modes ('cr', and 'rw')\n    \"\"\"\n    def __init__(\n            self,\n            window_size: Tuple[int, int],\n            num_heads: int = 8,\n            hidden_dim: int = 128,\n            prefix_tokens: int = 0,\n            mode: str = 'cr',\n            pretrained_window_size: Tuple[int, int] = (0, 0),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.window_size = window_size\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.prefix_tokens = prefix_tokens\n        self.num_heads = num_heads\n        self.bias_shape = (self.window_area,) * 2 + (num_heads,)\n        self.mode = mode\n        self.pretrained_window_size = pretrained_window_size\n        if mode == 'swin':\n            self.bias_act = nn.Sigmoid()\n            self.bias_gain = 16\n            mlp_bias = (True, False)\n        else:\n            self.bias_act = nn.Identity()\n            self.bias_gain = None\n            mlp_bias = True\n\n        self.mlp = Mlp(\n            2,  # x, y\n            hidden_features=hidden_dim,\n            out_features=num_heads,\n            act_layer=nn.ReLU,\n            bias=mlp_bias,\n            drop=(0.125, 0.),\n            **dd,\n        )\n\n        index_size = self.window_area ** 2\n        rel_coords_shape = (2 * window_size[0] - 1, 2 * window_size[1] - 1, 2)\n        self.register_buffer(\n            \"relative_position_index\",\n            torch.empty(index_size, device=device, dtype=torch.long),\n            persistent=False,\n        )\n        self.register_buffer(\n            \"rel_coords_log\",\n            torch.empty(rel_coords_shape, **dd),\n            persistent=False,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def get_bias(self) -> torch.Tensor:\n        relative_position_bias = self.mlp(self.rel_coords_log)\n        if self.relative_position_index is not None:\n            relative_position_bias = relative_position_bias.view(-1, self.num_heads)[self.relative_position_index]\n            relative_position_bias = relative_position_bias.view(self.bias_shape)\n        relative_position_bias = relative_position_bias.permute(2, 0, 1)\n        relative_position_bias = self.bias_act(relative_position_bias)\n        if self.bias_gain is not None:\n            relative_position_bias = self.bias_gain * relative_position_bias\n        if self.prefix_tokens:\n            relative_position_bias = F.pad(relative_position_bias, [self.prefix_tokens, 0, self.prefix_tokens, 0])\n        return relative_position_bias.unsqueeze(0).contiguous()\n\n    def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None):\n        return attn + self.get_bias()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        device = self.relative_position_index.device\n        dtype = self.rel_coords_log.dtype\n        self.relative_position_index.copy_(\n            gen_relative_position_index(self.window_size, device=device).view(-1)\n        )\n        self.rel_coords_log.copy_(\n            gen_relative_log_coords(\n                self.window_size,\n                self.pretrained_window_size,\n                mode=self.mode,\n                device=device,\n                dtype=dtype,\n            )\n        )\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\ndef generate_lookup_tensor(\n        length: int,\n        max_relative_position: Optional[int] = None,\n        device=None,\n        dtype=None,\n):\n    \"\"\"Generate a one_hot lookup tensor to reindex embeddings along one dimension.\n\n    Args:\n        length: the length to reindex to.\n        max_relative_position: the maximum relative position to consider.\n            Relative position embeddings for distances above this threshold\n            are zeroed out.\n    Returns:\n        a lookup Tensor of size [length, length, vocab_size] that satisfies\n            ret[n,m,v] = 1{m - n + max_relative_position = v}.\n    \"\"\"\n    if max_relative_position is None:\n        max_relative_position = length - 1\n    # Return the cached lookup tensor, otherwise compute it and cache it.\n    vocab_size = 2 * max_relative_position + 1\n    ret = torch.zeros(length, length, vocab_size, device=device, dtype=dtype)\n    for i in range(length):\n        for x in range(length):\n            v = x - i + max_relative_position\n            if abs(x - i) > max_relative_position:\n                continue\n            ret[i, x, v] = 1\n    return ret\n\n\ndef reindex_2d_einsum_lookup(\n        relative_position_tensor,\n        height: int,\n        width: int,\n        height_lookup: torch.Tensor,\n        width_lookup: torch.Tensor,\n) -> torch.Tensor:\n    \"\"\"Reindex 2d relative position bias with 2 independent einsum lookups.\n\n    Adapted from:\n     https://github.com/google-research/maxvit/blob/2e06a7f1f70c76e64cd3dabe5cd1b8c1a23c9fb7/maxvit/models/attention_utils.py\n\n    Args:\n        relative_position_tensor: tensor of shape\n            [..., vocab_height, vocab_width, ...].\n        height: height to reindex to.\n        width: width to reindex to.\n        height_lookup: one-hot height lookup\n        width_lookup: one-hot width lookup\n    Returns:\n        reindexed_tensor: a Tensor of shape\n            [..., height * width, height * width, ...]\n    \"\"\"\n    reindexed_tensor = torch.einsum('nhw,ixh->nixw', relative_position_tensor, height_lookup)\n    reindexed_tensor = torch.einsum('nixw,jyw->nijxy', reindexed_tensor, width_lookup)\n    area = height * width\n    return reindexed_tensor.reshape(relative_position_tensor.shape[0], area, area)\n\n\nclass RelPosBiasTf(nn.Module):\n    \"\"\" Relative Position Bias Impl (Compatible with Tensorflow MaxViT models)\n    Adapted from:\n     https://github.com/google-research/maxvit/blob/2e06a7f1f70c76e64cd3dabe5cd1b8c1a23c9fb7/maxvit/models/attention_utils.py\n    \"\"\"\n    def __init__(\n            self,\n            window_size: Tuple[int, int],\n            num_heads: int,\n            prefix_tokens: int = 0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert prefix_tokens <= 1\n        self.window_size = window_size\n        self.window_area = window_size[0] * window_size[1]\n        self.num_heads = num_heads\n\n        vocab_height = 2 * window_size[0] - 1\n        vocab_width = 2 * window_size[1] - 1\n        self.bias_shape = (self.num_heads, vocab_height, vocab_width)\n        self.relative_position_bias_table = nn.Parameter(torch.empty(self.bias_shape, **dd))\n        height_lookup_shape = (window_size[0], window_size[0], vocab_height)\n        width_lookup_shape = (window_size[1], window_size[1], vocab_width)\n        self.register_buffer('height_lookup', torch.empty(height_lookup_shape, **dd), persistent=False)\n        self.register_buffer('width_lookup', torch.empty(width_lookup_shape, **dd), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.normal_(self.relative_position_bias_table, std=.02)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        device = self.height_lookup.device\n        dtype = self.height_lookup.dtype\n        self.height_lookup.copy_(generate_lookup_tensor(self.window_size[0], device=device, dtype=dtype))\n        self.width_lookup.copy_(generate_lookup_tensor(self.window_size[1], device=device, dtype=dtype))\n\n    def get_bias(self) -> torch.Tensor:\n        # FIXME change to not use one-hot/einsum?\n        return reindex_2d_einsum_lookup(\n            self.relative_position_bias_table,\n            self.window_size[0],\n            self.window_size[1],\n            self.height_lookup,\n            self.width_lookup\n        )\n\n    def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None):\n        return attn + self.get_bias()\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n"
  },
  {
    "path": "timm/layers/pos_embed_sincos.py",
    "content": "\"\"\" Sin-cos, fourier, rotary position embedding modules and functions\n\nHacked together by / Copyright 2022 Ross Wightman\n\"\"\"\nimport math\nfrom typing import List, Tuple, Optional, Union\n\nimport torch\nfrom torch import nn as nn\n\nfrom ._fx import register_notrace_function\nfrom .grid import ndgrid\nfrom .trace_utils import _assert\n\ndef pixel_freq_bands(\n        num_bands: int,\n        max_freq: float = 224.,\n        linear_bands: bool = True,\n        device: Optional[torch.device] = None,\n):\n    if linear_bands:\n        bands = torch.linspace(1.0, max_freq / 2, num_bands, dtype=torch.float32, device=device)\n    else:\n        bands = 2 ** torch.linspace(0, math.log(max_freq, 2) - 1, num_bands, dtype=torch.float32, device=device)\n    return bands * torch.pi\n\n\ndef freq_bands(\n        num_bands: int,\n        temperature: float = 10000.,\n        step: int = 2,\n        device: Optional[torch.device] = None,\n) -> torch.Tensor:\n    exp = torch.arange(0, num_bands, step, dtype=torch.int64, device=device).to(torch.float32) / num_bands\n    bands = 1. / (temperature ** exp)\n    return bands\n\n\ndef build_sincos2d_pos_embed(\n        feat_shape: List[int],\n        dim: int = 64,\n        temperature: float = 10000.,\n        reverse_coord: bool = False,\n        interleave_sin_cos: bool = False,\n        device: Optional[torch.device] = None,\n        dtype: torch.dtype = torch.float32,\n) -> torch.Tensor:\n    \"\"\"\n\n    Args:\n        feat_shape:\n        dim:\n        temperature:\n        reverse_coord: stack grid order W, H instead of H, W\n        interleave_sin_cos: sin, cos, sin, cos stack instead of sin, sin, cos, cos\n        dtype:\n        device:\n\n    Returns:\n\n    \"\"\"\n    assert dim % 4 == 0, 'Embed dimension must be divisible by 4 for sin-cos 2D position embedding'\n    pos_dim = dim // 4\n    bands = freq_bands(pos_dim, temperature=temperature, step=1, device=device)\n\n    if reverse_coord:\n        feat_shape = feat_shape[::-1]  # stack W, H instead of H, W\n    grid = torch.stack(ndgrid([\n        torch.arange(s, device=device, dtype=torch.int64).to(torch.float32)\n        for s in feat_shape\n    ])).flatten(1).transpose(0, 1)\n    pos2 = grid.unsqueeze(-1) * bands.unsqueeze(0)\n    # FIXME add support for unflattened spatial dim?\n\n    stack_dim = 2 if interleave_sin_cos else 1  # stack sin, cos, sin, cos  instead of sin sin cos cos\n    pos_emb = torch.stack([torch.sin(pos2), torch.cos(pos2)], dim=stack_dim).flatten(1)\n    return pos_emb.to(dtype=dtype)\n\n\ndef swap_shape_xy(seq: List[int]) -> List[int]:\n    if len(seq) < 2:\n        return seq\n    return [seq[1], seq[0]] + list(seq[2:])\n\n\ndef build_fourier_pos_embed(\n        feat_shape: List[int],\n        bands: Optional[torch.Tensor] = None,\n        num_bands: int = 64,\n        max_res: int = 224,\n        temperature: float = 10000.,\n        linear_bands: bool = False,\n        include_grid: bool = False,\n        in_pixels: bool = True,\n        ref_feat_shape: Optional[List[int]] = None,\n        grid_offset: float = 0.,\n        grid_indexing: str = 'ij',\n        device: Optional[torch.device] = None,\n        dtype: torch.dtype = torch.float32,\n) -> List[torch.Tensor]:\n    \"\"\"\n\n    Args:\n        feat_shape: Feature shape for embedding.\n        bands: Pre-calculated frequency bands.\n        num_bands: Number of frequency bands (determines output dim).\n        max_res: Maximum resolution for pixel based freq.\n        temperature: Temperature for non-pixel freq.\n        linear_bands: Linear band spacing for pixel based freq.\n        include_grid: Include the spatial grid in output.\n        in_pixels: Output in pixel freq.\n        ref_feat_shape: Reference feature shape for resize / fine-tune.\n        grid_offset: Constant offset to add to grid for non-pixel freq.\n        grid_indexing: Indexing mode for meshgrid ('ij' or 'xy')\n        dtype: Output dtype.\n        device: Output device.\n\n    Returns:\n\n    \"\"\"\n    if bands is None:\n        if in_pixels:\n            bands = pixel_freq_bands(\n                num_bands,\n                float(max_res),\n                linear_bands=linear_bands,\n                device=device,\n            )\n        else:\n            bands = freq_bands(\n                num_bands,\n                temperature=temperature,\n                step=1,\n                device=device,\n            )\n    else:\n        if device is None:\n            device = bands.device\n        if dtype is None:\n            dtype = bands.dtype\n\n    if grid_indexing == 'xy':\n        feat_shape = swap_shape_xy(feat_shape)\n        if ref_feat_shape is not None:\n            ref_feat_shape = swap_shape_xy(ref_feat_shape)\n\n    if in_pixels:\n        t = [\n            torch.linspace(-1., 1., steps=s, device=device, dtype=torch.float32)\n            for s in feat_shape\n        ]\n    else:\n        t = [\n            torch.arange(s, device=device, dtype=torch.int64).to(torch.float32) + grid_offset\n            for s in feat_shape\n        ]\n\n    if ref_feat_shape is not None:\n        # eva's scheme for resizing rope embeddings (ref shape = pretrain)\n        t = [x / f * r for x, f, r in zip(t, feat_shape, ref_feat_shape)]\n\n    grid = torch.stack(torch.meshgrid(t, indexing=grid_indexing), dim=-1)\n    grid = grid.unsqueeze(-1)\n    pos = grid * bands\n\n    pos_sin, pos_cos = pos.sin().to(dtype=dtype), pos.cos().to(dtype=dtype)\n    out = [grid, pos_sin, pos_cos] if include_grid else [pos_sin, pos_cos]\n    return out\n\n\nclass FourierEmbed(nn.Module):\n\n    def __init__(\n            self,\n            max_res: int = 224,\n            num_bands: int = 64,\n            concat_grid=True,\n            keep_spatial=False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.max_res = max_res\n        self.num_bands = num_bands\n        self.concat_grid = concat_grid\n        self.keep_spatial = keep_spatial\n        self.register_buffer('bands', torch.empty(num_bands, device=device, dtype=dtype), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.bands.copy_(pixel_freq_bands(self.num_bands, self.max_res))\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def forward(self, x):\n        B, C = x.shape[:2]\n        feat_shape = x.shape[2:]\n        emb = build_fourier_pos_embed(\n            feat_shape,\n            self.bands,\n            include_grid=self.concat_grid,\n            dtype=x.dtype,\n            device=x.device,\n        )\n        emb = torch.cat(emb, dim=-1)\n        emb = emb.transpose(-1, -2).flatten(len(feat_shape))\n        batch_expand = (B,) + (-1,) * (x.ndim - 1)\n\n        # FIXME support nD\n        if self.keep_spatial:\n            x = torch.cat([x, emb.unsqueeze(0).expand(batch_expand).permute(0, 3, 1, 2)], dim=1)\n        else:\n            x = torch.cat([x.permute(0, 2, 3, 1), emb.unsqueeze(0).expand(batch_expand)], dim=-1)\n            x = x.reshape(B, feat_shape.numel(), -1)\n\n        return x\n\n\ndef rot(x):\n    # x:   [ x0  x1  x2  x3  x4  x5]\n    # out: [-x1  x0 -x3  x2 -x5  x4]\n    return torch.stack([-x[..., 1::2], x[..., ::2]], -1).reshape(x.shape)\n\n\ndef rope_rotate_half(x: torch.Tensor) -> torch.Tensor:\n    # x:   [ x0  x1  x2  x3  x4  x5]\n    # out: [-x3 -x4 -x5  x0  x1  x2]\n    x1, x2 = x.chunk(2, dim=-1)\n    return torch.cat([-x2, x1], dim=-1)\n\n\ndef apply_rot_embed(\n        x: torch.Tensor,\n        sin_emb: torch.Tensor,\n        cos_emb: torch.Tensor,\n        half: bool = False,\n) -> torch.Tensor:\n    # x: [..., D], eg [x0, x1, x2, x3, x4, x5]\n    if half:\n        # sin: [..., D], eg [sin0, sin1, sin2, sin0, sin1, sin2]\n        # cos: [..., D], eg [cos0, cos1, cos2, cos0, cos1, cos2\n        # rope_rotate_half(x): eg [-x3, -x4, -x5, x0, x1, x2]\n        return x * cos_emb + rope_rotate_half(x) * sin_emb\n    else:\n        # sin: [..., D], eg [sin0, sin0, sin1, sin1, sin2, sin2]\n        # cos: [..., D], eg [cos0, cos0, cos1, cos1, cos2, cos2]\n        # rot(x): eg [-x1, x0, -x3, x2, -x5, x4]\n        return x * cos_emb + rot(x) * sin_emb\n\n\ndef apply_rot_embed_list(\n        x: List[torch.Tensor],\n        sin_emb: torch.Tensor,\n        cos_emb: torch.Tensor,\n        half: bool = False\n) -> List[torch.Tensor]:\n    if isinstance(x, torch.Tensor):\n        x = [x]\n    # x: [..., D], eg [x0, x1, x2, x3, x4, x5]\n    if half:\n        # sin: [..., D], eg [sin0, sin1, sin2, sin0, sin1, sin2]\n        # cos: [..., D], eg [cos0, cos1, cos2, cos0, cos1, cos2\n        # rope_rotate_half(x): eg [-x3, -x4, -x5, x0, x1, x2]\n        return [t * cos_emb + rope_rotate_half(t) * sin_emb for t in x]\n    else:\n        # sin: [..., D], eg [sin0, sin0, sin1, sin1, sin2, sin2]\n        # cos: [..., D], eg [cos0, cos0, cos1, cos1, cos2, cos2]\n        # rot(x): eg [-x1, x0, -x3, x2, -x5, x4]\n        return [t * cos_emb + rot(t) * sin_emb for t in x]\n\n\ndef apply_rot_embed_cat(\n        x: torch.Tensor,\n        emb: torch.Tensor,\n        half: bool = False\n) -> torch.Tensor:\n    sin_emb, cos_emb = emb.chunk(2, -1)\n    # x: [..., D], eg [x0, x1, x2, x3, x4, x5]\n    if half:\n        # sin: [..., D], eg [sin0, sin1, sin2, sin0, sin1, sin2]\n        # cos: [..., D], eg [cos0, cos1, cos2, cos0, cos1, cos2\n        # rope_rotate_half(x), eg [-x3, -x4, -x5, x0, x1, x2]\n        return x * cos_emb + rope_rotate_half(x) * sin_emb\n    else:\n        # sin: [..., D], eg [sin0, sin0, sin1, sin1, sin2, sin2]\n        # cos: [..., D], eg [cos0, cos0, cos1, cos1, cos2, cos2]\n        # rot(x), eg [-x1, x0, -x3, x2, -x5, x4]\n        return x * cos_emb + rot(x) * sin_emb\n\n\ndef apply_keep_indices_nlc(\n        x: torch.Tensor,\n        pos_embed: torch.Tensor,\n        keep_indices: torch.Tensor,\n        pos_embed_has_batch: bool = False,\n) -> torch.Tensor:\n    \"\"\" Apply keep indices to different ROPE shapes\n\n    Expected pos_embed shapes:\n    * [seq_len, pos_embed_dim] --> output [batch_size, seq_len, pos_embed_dim]\n    * [num_heads, seq_len, pos_embed_dim] --> output [batch_size, num_heads, seq_len, pos_embed_dim]\n    * [depth, num_heads, seq_len, pos_embed_dim] --> output [batch_size, depth, num_heads, seq_len, pos_embed_dim]\n\n    And all of the above with leading batch dimension already present if `pos_embed_has_batch == True`\n\n    \"\"\"\n    if pos_embed_has_batch:\n        # Pos embed already includes batch dim\n        _assert(pos_embed.ndim >= 3, 'Incorrect number of dimensions')  # At least [batch, seq_len, pos_embed_dim]\n    else:\n        # Add batch dimension and expand to batch size\n        _assert(pos_embed.ndim >= 2, 'Incorrect number of dimensions')  # At least [seq_len, pos_embed_dim]\n        expand_shape = (x.shape[0],) + (-1,) * pos_embed.ndim\n        pos_embed = pos_embed.unsqueeze(0).expand(expand_shape)\n\n    # Reshape keep_indices to add singleton dims\n    keep_shape = (keep_indices.shape[0],) + (1,) * (pos_embed.ndim - 3) + (keep_indices.shape[1], 1)\n    keep_indices = keep_indices.view(keep_shape)\n\n    # Expand all dims to match position embedding except the gather dim (second-last)\n    keep_expand = list(pos_embed.shape)\n    keep_expand[-2] = -1\n    keep_indices = keep_indices.expand(keep_expand)\n\n    return pos_embed.gather(-2, keep_indices)\n\n\ndef build_rotary_pos_embed(\n        feat_shape: List[int],\n        bands: Optional[torch.Tensor] = None,\n        dim: int = 64,\n        max_res: int = 224,\n        temperature: float = 10000.,\n        linear_bands: bool = False,\n        in_pixels: bool = True,\n        ref_feat_shape: Optional[List[int]] = None,\n        grid_offset: float = 0.,\n        grid_indexing: str = 'ij',\n        device: Optional[torch.device] = None,\n        dtype: torch.dtype = torch.float32,\n):\n    \"\"\"\n\n    Args:\n        feat_shape: Spatial shape of the target tensor for embedding.\n        bands: Optional pre-generated frequency bands\n        dim: Output dimension of embedding tensor.\n        max_res: Maximum resolution for pixel mode.\n        temperature: Temperature (inv freq) for non-pixel mode\n        linear_bands: Linearly (instead of log) spaced bands for pixel mode\n        in_pixels: Pixel vs language (inv freq) mode.\n        ref_feat_shape: Reference feature shape for resize / fine-tune.\n        grid_offset: Constant offset to add to grid for non-pixel freq.\n        grid_indexing: Indexing mode for meshgrid ('ij' or 'xy')\n        device: Output device.\n        dtype: Output dtype.\n\n    Returns:\n\n    \"\"\"\n    sin_emb, cos_emb = build_fourier_pos_embed(\n        feat_shape,\n        bands=bands,\n        num_bands=dim // 4,\n        max_res=max_res,\n        temperature=temperature,\n        linear_bands=linear_bands,\n        in_pixels=in_pixels,\n        ref_feat_shape=ref_feat_shape,\n        grid_offset=grid_offset,\n        grid_indexing=grid_indexing,\n        device=device,\n        dtype=dtype,\n    )\n    num_spatial_dim = 1\n    # this would be much nicer as a .numel() call to torch.Size(), but torchscript sucks\n    for x in feat_shape:\n        num_spatial_dim *= x\n    sin_emb = sin_emb.reshape(num_spatial_dim, -1).repeat_interleave(2, -1)\n    cos_emb = cos_emb.reshape(num_spatial_dim, -1).repeat_interleave(2, -1)\n    return sin_emb, cos_emb\n\n\nclass RotaryEmbedding(nn.Module):\n    \"\"\" Rotary position embedding\n\n    NOTE: This is my initial attempt at impl rotary embedding for spatial use, it has not\n    been well tested, and will likely change. It will be moved to its own file.\n\n    The following impl/resources were referenced for this impl:\n    * https://github.com/lucidrains/vit-pytorch/blob/6f3a5fcf0bca1c5ec33a35ef48d97213709df4ba/vit_pytorch/rvt.py\n    * https://blog.eleuther.ai/rotary-embeddings/\n    \"\"\"\n\n    def __init__(\n            self,\n            dim,\n            max_res=224,\n            temperature=10000,\n            in_pixels=True,\n            linear_bands: bool = False,\n            feat_shape: Optional[List[int]] = None,\n            ref_feat_shape: Optional[List[int]] = None,\n            grid_offset: float = 0.,\n            grid_indexing: str = 'ij',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.dim = dim\n        self.max_res = max_res\n        self.temperature = temperature\n        self.linear_bands = linear_bands\n        self.in_pixels = in_pixels\n        self.feat_shape = feat_shape\n        self.ref_feat_shape = ref_feat_shape\n        self.grid_offset = grid_offset\n        self.grid_indexing = grid_indexing\n\n        # Track which mode we're in\n        self._use_cached_embed = feat_shape is not None\n\n        if feat_shape is None:\n            # bands mode: cache bands, rebuild embeddings on each get_embed call\n            bands_shape = (dim // 4,)\n            self.register_buffer('bands', torch.empty(bands_shape, device=device, dtype=dtype), persistent=False)\n            self.pos_embed_sin = None\n            self.pos_embed_cos = None\n        else:\n            # embed mode: cache full sin/cos embeddings\n            self.bands = None\n            num_pos = 1\n            for s in feat_shape:\n                num_pos *= s\n            emb_shape = (num_pos, dim)\n            self.register_buffer('pos_embed_sin', torch.empty(emb_shape, device=device, dtype=dtype), persistent=False)\n            self.register_buffer('pos_embed_cos', torch.empty(emb_shape, device=device, dtype=dtype), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if not self._use_cached_embed:\n            self.bands.copy_(self._compute_bands())\n        else:\n            emb_sin, emb_cos = self._get_pos_embed_values(self.feat_shape)\n            self.pos_embed_sin.copy_(emb_sin)\n            self.pos_embed_cos.copy_(emb_cos)\n\n    def _compute_bands(self, device=None, dtype=None):\n        \"\"\"Compute frequency bands.\"\"\"\n        if self.in_pixels:\n            bands = pixel_freq_bands(\n                self.dim // 4,\n                float(self.max_res),\n                linear_bands=self.linear_bands,\n            )\n        else:\n            bands = freq_bands(\n                self.dim // 4,\n                temperature=self.temperature,\n                step=1,\n            )\n        return bands.to(device=device, dtype=dtype)\n\n    def _get_pos_embed_values(self, feat_shape: List[int], device=None, dtype=torch.float32):\n        emb_sin, emb_cos = build_rotary_pos_embed(\n            feat_shape=feat_shape,\n            dim=self.dim,\n            max_res=self.max_res,\n            temperature=self.temperature,\n            linear_bands=self.linear_bands,\n            in_pixels=self.in_pixels,\n            ref_feat_shape=self.ref_feat_shape,\n            grid_offset=self.grid_offset,\n            grid_indexing=self.grid_indexing,\n            device=device,\n            dtype=dtype,\n        )\n        return emb_sin, emb_cos\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def update_feat_shape(self, feat_shape: List[int]):\n        if self.feat_shape is not None and feat_shape != self.feat_shape:\n            # only update if feat_shape was set and different from previous value\n            assert self.pos_embed_sin is not None\n            assert self.pos_embed_cos is not None\n            self.pos_embed_sin, self.pos_embed_cos = self._get_pos_embed_values(\n                feat_shape,\n                device=self.pos_embed_sin.device,\n                dtype=self.pos_embed_sin.dtype,\n            )\n            self.feat_shape = feat_shape\n\n    def get_embed(self, shape: Optional[List[int]] = None):\n        if shape is not None and self.bands is not None:\n            # rebuild embeddings every call, use if target shape changes\n            return build_rotary_pos_embed(\n                shape,\n                self.bands,\n                in_pixels=self.in_pixels,\n                ref_feat_shape=self.ref_feat_shape,\n                grid_offset=self.grid_offset,\n                grid_indexing=self.grid_indexing,\n            )\n        elif self.pos_embed_sin is not None and self.pos_embed_cos is not None:\n            return self.pos_embed_sin, self.pos_embed_cos\n        else:\n            assert False, \"get_embed() requires pre-computed pos embeds or valid shape w/ pre-computed bands\"\n\n    def forward(self, x):\n        # assuming channel-first tensor where spatial dim are >= 2\n        sin_emb, cos_emb = self.get_embed(x.shape[2:])\n        return apply_rot_embed(x, sin_emb, cos_emb)\n\n\nclass RotaryEmbeddingCat(nn.Module):\n    \"\"\" Rotary position embedding w/ concatenatd sin & cos\n\n    The following impl/resources were referenced for this impl:\n    * https://github.com/lucidrains/vit-pytorch/blob/6f3a5fcf0bca1c5ec33a35ef48d97213709df4ba/vit_pytorch/rvt.py\n    * https://blog.eleuther.ai/rotary-embeddings/\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            max_res: int = 224,\n            temperature: float = 10000,\n            in_pixels: bool = True,\n            linear_bands: bool = False,\n            feat_shape: Optional[List[int]] = None,\n            ref_feat_shape: Optional[List[int]] = None,\n            grid_offset: float = 0.,\n            grid_indexing: str = 'ij',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.dim = dim\n        self.max_res = max_res\n        self.temperature = temperature\n        self.in_pixels = in_pixels\n        self.linear_bands = linear_bands\n        self.feat_shape = feat_shape\n        self.ref_feat_shape = ref_feat_shape\n        self.grid_offset = grid_offset\n        self.grid_indexing = grid_indexing\n\n        # Track which mode we're in\n        self._use_cached_embed = feat_shape is not None\n\n        if feat_shape is None:\n            # bands mode: cache bands, rebuild embeddings on each get_embed call\n            bands_shape = (dim // 4,)\n            self.register_buffer('bands', torch.empty(bands_shape, device=device, dtype=dtype), persistent=False)\n            self.pos_embed = None\n        else:\n            # embed mode: cache full embeddings\n            self.bands = None\n            num_pos = 1\n            for s in feat_shape:\n                num_pos *= s\n            emb_shape = (num_pos, dim * 2)  # concatenated sin & cos\n            self.register_buffer('pos_embed', torch.empty(emb_shape, device=device, dtype=dtype), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if not self._use_cached_embed:\n            self.bands.copy_(self._compute_bands())\n        else:\n            self.pos_embed.copy_(self._get_pos_embed_values(self.feat_shape))\n\n    def _compute_bands(self, device=None, dtype=None):\n        \"\"\"Compute frequency bands.\"\"\"\n        if self.in_pixels:\n            bands = pixel_freq_bands(\n                self.dim // 4,\n                float(self.max_res),\n                linear_bands=self.linear_bands,\n            )\n        else:\n            bands = freq_bands(\n                self.dim // 4,\n                temperature=self.temperature,\n                step=1,\n            )\n        return bands.to(device=device, dtype=dtype)\n\n    def _get_pos_embed_values(self, feat_shape: List[int], device=None, dtype=torch.float32):\n        embeds = build_rotary_pos_embed(\n            feat_shape=feat_shape,\n            dim=self.dim,\n            max_res=self.max_res,\n            temperature=self.temperature,\n            linear_bands=self.linear_bands,\n            in_pixels=self.in_pixels,\n            ref_feat_shape=self.ref_feat_shape,\n            grid_offset=self.grid_offset,\n            grid_indexing=self.grid_indexing,\n            device=device,\n            dtype=dtype,\n        )\n        return torch.cat(embeds, -1)\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def update_feat_shape(self, feat_shape: List[int]):\n        if self.feat_shape is not None and feat_shape != self.feat_shape:\n            # only update if feat_shape was set and different from previous value\n            assert self.pos_embed is not None\n            self.pos_embed = self._get_pos_embed_values(\n                feat_shape,\n                device=self.pos_embed.device,\n                dtype=self.pos_embed.dtype,\n            )\n            self.feat_shape = feat_shape\n\n    def get_embed(self, shape: Optional[List[int]] = None):\n        if shape is not None and self.bands is not None:\n            # rebuild embeddings from cached bands every call, use if target shape changes\n            embeds = build_rotary_pos_embed(\n                shape,\n                self.bands,\n                in_pixels=self.in_pixels,\n                ref_feat_shape=self.ref_feat_shape,\n                grid_offset=self.grid_offset,\n                grid_indexing=self.grid_indexing,\n            )\n            return torch.cat(embeds, -1)\n        elif self.pos_embed is not None:\n            return self.pos_embed\n        else:\n            assert False, \"get_embed() requires pre-computed pos embed or valid shape w/ pre-computed bands\"\n\n    def get_batch_embeds(\n            self,\n            shapes: List[Tuple[int, int]],\n            seq_len: Optional[int] = None,\n    ) -> Union[torch.Tensor, List[torch.Tensor]]:\n        \"\"\"Generate ROPE embeddings for multiple grid shapes efficiently.\n\n        Computes embeddings for the maximum grid size once, then extracts\n        and flattens the relevant portions for each requested shape.\n\n        Args:\n            shapes: List of (H, W) tuples representing different grid sizes\n\n        Returns:\n            List of concatenated sin/cos embeddings for each shape,\n            where each tensor has shape (H*W, dim)\n        \"\"\"\n        if not shapes:\n            return []\n\n        # Check if we have pre-computed bands\n        if self.bands is None:\n            # If we have pre-computed pos_embed for a fixed shape, we can't do batch generation\n            raise RuntimeError(\"Batch embedding generation requires cached bands, not pre-computed embeddings\")\n\n        # Find max dimensions across all shapes\n        max_h = max(h for h, w in shapes)\n        max_w = max(w for h, w in shapes)\n\n        # Generate embeddings for max size ONCE\n        sin_emb, cos_emb = build_rotary_pos_embed(\n            feat_shape=(max_h, max_w),\n            bands=self.bands,\n            in_pixels=self.in_pixels,\n            ref_feat_shape=self.ref_feat_shape,\n            grid_offset=self.grid_offset,\n            grid_indexing=self.grid_indexing,\n        )\n\n        # sin_emb and cos_emb are (max_h * max_w, dim//2)\n        # concat and reshape to 2D for slicing\n        rope_embed_2d = torch.cat([sin_emb, cos_emb], dim=-1).view(max_h, max_w, -1)\n\n        if seq_len is not None:\n            flat_embeds = torch.zeros(len(shapes), seq_len, rope_embed_2d.shape[-1]).type_as(sin_emb)\n            for i, (h, w) in enumerate(shapes):\n                src_len = h * w\n                flat_embeds[i, :src_len] = rope_embed_2d[:h, :w].reshape(src_len, -1)\n            return flat_embeds\n        else:\n            flat_embeds_list = [rope_embed_2d[:h, :w].reshape(h * w, -1) for h, w in shapes]\n            return flat_embeds_list\n\n    def forward(self, x):\n        # assuming channel-first tensor where spatial dim are >= 2\n        pos_embed = self.get_embed(x.shape[2:])\n        return apply_rot_embed_cat(x, pos_embed)\n\n\ndef init_random_2d_freqs(\n        head_dim: int,\n        depth: int,\n        num_heads: int,\n        temperature: float = 10.0,\n        rotate: bool = True,\n        *,\n        device=None,\n        dtype=torch.float32,\n) -> torch.Tensor:\n    \"\"\" Vectorised 2D ROPE frequencies with random rotation for mixed mode ROPE.\n    Returns:\n         Tensor (2, depth, num_heads, head_dim//2)\n    \"\"\"\n    # base magnitudes, shape: (head_dim//4,)\n    mag = 1.0 / (temperature ** (torch.arange(0, head_dim, 4, device=device, dtype=dtype) / head_dim))\n\n    # (1,1,L) so it broadcasts over both depth and heads\n    mag = mag.unsqueeze(0).unsqueeze(0)  # (1,1,L)\n\n    # random (or zero) rotation per head *and* per block\n    if rotate:\n        angles = torch.rand(depth, num_heads, 1, device=device, dtype=dtype) * 2 * torch.pi\n    else:\n        angles = torch.zeros(depth, num_heads, 1, device=device, dtype=dtype)\n\n    # build (depth, num_heads, 2·L) == head_dim//2 on the last axis\n    fx = torch.cat([mag * torch.cos(angles), mag * torch.cos(angles + torch.pi / 2)], dim=-1)\n    fy = torch.cat([mag * torch.sin(angles), mag * torch.sin(angles + torch.pi / 2)], dim=-1)\n\n    # (2, depth, num_heads, head_dim//2)\n    return torch.stack([fx, fy], dim=0)\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef get_mixed_grid(\n        shape: List[int],\n        grid_indexing: str = 'ij',\n        device: Optional[torch.device] = None,\n        dtype: torch.dtype = torch.float32,\n) -> Tuple[torch.Tensor, torch.Tensor]:\n    if grid_indexing == 'xy':\n        shape = swap_shape_xy(shape)\n    x_pos, y_pos = torch.meshgrid(\n        torch.arange(shape[0], device=device, dtype=torch.float32),\n        torch.arange(shape[1], device=device, dtype=torch.float32),\n        indexing=grid_indexing,\n    )\n    t_x = x_pos.to(dtype).flatten()\n    t_y = y_pos.to(dtype).flatten()\n    return t_x, t_y\n\n\ndef get_mixed_freqs(\n        freqs: torch.Tensor,\n        t_x: torch.Tensor,\n        t_y: torch.Tensor,\n) -> torch.Tensor:\n    \"\"\"Compute mixed (learnable) frequencies.\"\"\"\n    # Create position indices\n    dtype = freqs.dtype\n    freqs = freqs.float()\n    freqs_x = (t_x.unsqueeze(-1) @ freqs[0].unsqueeze(-2))\n    freqs_y = (t_y.unsqueeze(-1) @ freqs[1].unsqueeze(-2))\n    combined = freqs_x + freqs_y  # shape: (num_heads, N, dim//4)\n    sin_emb = torch.sin(combined).repeat_interleave(2, -1)  # (N, dim//2)\n    cos_emb = torch.cos(combined).repeat_interleave(2, -1)  # (N, dim//2)\n    rope_embeds = torch.cat([sin_emb, cos_emb], dim=-1)  # (num_heads, H*W, head_dim)\n    return rope_embeds.to(dtype)\n\n\nclass RotaryEmbeddingMixed(nn.Module):\n    \"\"\"Rotary position embedding with depth-dependent learnable frequencies.\n\n    This implementation supports mixed (learnable) ROPE. In mixed mode,\n    each transformer block has its own set of learnable frequency parameters.\n\n    Based on 'Rotary Position Embedding for Vision: https://arxiv.org/abs/2403.13298)'\n    Compatible with original at https://github.com/naver-ai/rope-vit\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            depth: int,\n            num_heads: int,\n            temperature: float = 10.0,\n            feat_shape: Optional[List[int]] = None,\n            grid_indexing: str = 'xy',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize rotary embeddings.\n\n        Args:\n            dim: Embedding dimension (should be divisible by 4)\n            depth: Number of transformer blocks\n            num_heads: Number of attention heads\n            temperature: Base for frequency computation\n            feat_shape: Spatial dimensions [H, W] if known in advance\n            grid_indexing: How to index grid positions ('xy' or 'ij')\n        \"\"\"\n        super().__init__()\n        self.dim = dim\n        self.depth = depth\n        self.num_heads = num_heads\n        self.temperature = temperature\n        self.feat_shape = feat_shape\n        self.grid_indexing = grid_indexing\n\n        head_dim = dim // num_heads\n        assert head_dim % 4 == 0, f\"head_dim must be divisible by 4, got {head_dim}\"\n\n        freqs = init_random_2d_freqs(\n            head_dim,\n            depth,\n            num_heads,\n            temperature=temperature,\n            rotate=True,\n            device=device,\n            dtype=dtype,\n        )  # (2, depth, num_heads, head_dim//2)\n        self.freqs = nn.Parameter(freqs)\n\n        if feat_shape is not None:\n            # cache pre-computed grid\n            num_pos = 1\n            for s in feat_shape:\n                num_pos *= s\n            self.register_buffer('t_x', torch.empty(num_pos, device=device, dtype=dtype), persistent=False)\n            self.register_buffer('t_y', torch.empty(num_pos, device=device, dtype=dtype), persistent=False)\n            # TODO: skip init when on meta device when safe to do so\n            self._init_buffers()\n        else:\n            self.t_x = self.t_y = None\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if self.feat_shape is not None:\n            t_x, t_y = self._get_grid_values(self.feat_shape)\n            self.t_x.copy_(t_x)\n            self.t_y.copy_(t_y)\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _get_grid_values(self, feat_shape: Optional[List[int]]):\n        t_x, t_y = get_mixed_grid(\n            feat_shape,\n            grid_indexing=self.grid_indexing,\n            device=self.freqs.device,\n        )\n        return t_x, t_y\n\n    def update_feat_shape(self, feat_shape: Optional[List[int]]):\n        if self.feat_shape is not None and feat_shape != self.feat_shape:\n            assert self.t_x is not None\n            assert self.t_y is not None\n            t_x, t_y = self._get_grid_values(feat_shape)\n            self.t_x = t_x.to(self.t_x.device, self.t_x.dtype)\n            self.t_y = t_y.to(self.t_y.device, self.t_y.dtype)\n            self.feat_shape = feat_shape\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_embed(self, shape: Optional[List[int]] = None) -> torch.Tensor:\n        \"\"\"Generate rotary embeddings for the given spatial shape.\n\n        Args:\n            shape: Spatial dimensions [H, W]\n\n        Returns:\n            Tensor of shape (depth, H*W, dim) containing concatenated sin/cos embeddings\n        \"\"\"\n        if shape is not None:\n            t_x, t_y = get_mixed_grid(\n                shape,\n                grid_indexing=self.grid_indexing,\n                device=self.freqs.device\n            )\n        elif self.t_x is not None and self.t_y is not None:\n            t_x, t_y = self.t_x, self.t_y\n        else:\n            assert False, \"get_embed() requires pre-computed t_x/t_y or valid shape\"\n\n        return get_mixed_freqs(self.freqs, t_x, t_y)\n\n    def get_batch_embeds(\n            self,\n            shapes: List[Tuple[int, int]],\n            seq_len: Optional[int] = None,\n    ) -> Union[torch.Tensor, List[torch.Tensor]]:\n        \"\"\"Generate ROPE embeddings for multiple grid shapes efficiently.\n\n        Computes embeddings for the maximum grid size once, then extracts\n        and flattens the relevant portions for each requested shape.\n\n        Args:\n            shapes: List of (H, W) tuples representing different grid sizes\n            seq_len: If provided, return padded tensor of this length. Otherwise return list.\n\n        Returns:\n            If seq_len is provided: Padded tensor of shape (len(shapes), depth, num_heads, seq_len, dim)\n            Otherwise: List of tensors with shape (depth, num_heads, H*W, dim) for each shape\n        \"\"\"\n        if not shapes:\n            return []\n\n        # Find max dimensions\n        max_h = max(h for h, w in shapes)\n        max_w = max(w for h, w in shapes)\n\n        # Generate embeddings for max size ONCE\n        t_x, t_y = get_mixed_grid(\n            [max_h, max_w],\n            grid_indexing=self.grid_indexing,\n            device=self.freqs.device\n        )\n        max_embed = get_mixed_freqs(self.freqs, t_x, t_y)  # (depth, num_heads, max_h*max_w, dim)\n\n        # Reshape to 2D grid for easy slicing\n        depth, num_heads, _, dim = max_embed.shape\n        max_embed_2d = max_embed.view(depth, num_heads, max_h, max_w, dim)\n\n        if seq_len is not None:\n            # Return padded tensor\n            B = len(shapes)\n            padded = torch.zeros(B, depth, num_heads, seq_len, dim, device=self.freqs.device, dtype=self.freqs.dtype)\n            for i, (h, w) in enumerate(shapes):\n                # Slice and flatten\n                embed_slice = max_embed_2d[:, :, :h, :w].reshape(depth, num_heads, h * w, dim)\n                actual_len = h * w\n                padded[i, :, :, :actual_len] = embed_slice\n            return padded\n        else:\n            # Return list\n            results = []\n            for h, w in shapes:\n                # Slice and flatten\n                embed_slice = max_embed_2d[:, :, :h, :w].reshape(depth, num_heads, h * w, dim)\n                results.append(embed_slice)\n            return results\n\n    def forward(self, x):\n        # assuming channel-first tensor where spatial dim are >= 2\n        pos_embed = self.get_embed(x.shape[2:])\n        return apply_rot_embed_cat(x, pos_embed)\n\n    def no_weight_decay(self):\n        \"\"\"Exclude frequency parameters from weight decay.\"\"\"\n        return {'freqs'}\n\n\n@torch.fx.wrap\n@register_notrace_function\ndef make_coords_dinov3(\n        height: int,\n        width: int,\n        normalize_coords: str = 'separate',\n        grid_indexing: str = 'ij',\n        grid_offset: float = 0.,\n        device: torch.device = 'cpu',\n        dtype: torch.dtype = torch.float32,\n) -> torch.Tensor:\n    \"\"\"Make coordinate grid matching offset and normalization of original.\n    Returns: coords with shape (HW, 2) in [-1, 1].\n    \"\"\"\n    # 0.5-centered indices with optional offset\n    coords_h = torch.arange(0.5, height, device=device, dtype=torch.float32) + grid_offset\n    coords_w = torch.arange(0.5, width, device=device, dtype=torch.float32) + grid_offset\n\n    # Normalization denominators\n    if normalize_coords == \"max\":\n        denom = float(max(height, width))\n        h_denom = denom\n        w_denom = denom\n    elif normalize_coords == \"min\":\n        denom = float(min(height, width))\n        h_denom = denom\n        w_denom = denom\n    elif normalize_coords == \"separate\":\n        h_denom = float(height)\n        w_denom = float(width)\n    else:\n        raise ValueError(f\"Unknown normalize_coords: {normalize_coords}\")\n\n    # Normalize to [0, 1]\n    coords_h = coords_h / h_denom\n    coords_w = coords_w / w_denom\n    coords_h = coords_h.to(dtype)\n    coords_w = coords_w.to(dtype)\n\n    # Create grid then map to [-1, 1]\n    if grid_indexing == \"xy\":\n        grid_w, grid_h = torch.meshgrid(coords_w, coords_h, indexing=\"xy\")\n        coords = torch.stack([grid_h, grid_w], dim=-1)  # (H, W, 2) -> (h, w order)\n    else:\n        coords = torch.stack(torch.meshgrid(coords_h, coords_w, indexing=\"ij\"), dim=-1)  # (H, W, 2)\n    coords = coords.flatten(0, 1)  # (HW, 2)\n    coords = 2.0 * coords - 1.0  # (H, W, 2) in [-1, 1]\n    return coords\n\n\nclass RotaryEmbeddingDinoV3(nn.Module):\n    \"\"\"RoPE for timm DinoV3 port, numerically matching original.\n\n    Math is aligned to original DinoV3 RopePositionEmbedding at https://github.com/facebookresearch/dinov3:\n      - 0.5-centered coords normalized by H/W (or min/max), mapped to [-1,1]\n      - training-time augmentations (shift/jitter/rescale)\n      - periods schedule equals Rope's temperature (base) or min/max period\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            temperature: Optional[float] = 100.0,\n            min_period: Optional[float] = None,\n            max_period: Optional[float] = None,\n            feat_shape: Optional[List[int]] = None,\n            normalize_coords: str = \"separate\",  # 'min', 'max', 'separate'\n            grid_offset: float = 0.0,\n            grid_indexing: str = \"ij\",\n            rotate_half: bool = True,\n            shift_coords: Optional[float] = None,\n            jitter_coords: Optional[float] = None,  # interpreted as factor J >= 1\n            rescale_coords: Optional[float] = None,  # interpreted as factor R >= 1\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n\n        # Dimensions / output format\n        self.dim = dim  # equal to head_dim for most vit applications\n        self.rotate_half = rotate_half\n\n        # Period schedule parameters\n        self.temperature = float(temperature)\n        self.min_period = min_period\n        self.max_period = max_period\n\n        # Coord processing + augs\n        self.normalize_coords = normalize_coords\n        self.shift_coords = shift_coords\n        self.jitter_coords = jitter_coords\n        self.rescale_coords = rescale_coords\n        self.aug_active = any([a is not None for a in [self.shift_coords, self.jitter_coords, self.rescale_coords]])\n\n        # Grid config\n        self.feat_shape = feat_shape\n        self.grid_offset = grid_offset\n        self.grid_indexing = grid_indexing\n\n        # Register empty buffer for periods\n        periods_shape = (dim // 4,)\n        self.register_buffer(\"periods\", torch.empty(periods_shape, device=device, dtype=dtype), persistent=False)\n\n        if feat_shape is not None:\n            # Register empty buffer for cached embeddings\n            num_pos = feat_shape[0] * feat_shape[1]\n            emb_shape = (num_pos, dim * 2)  # concatenated sin & cos\n            self.register_buffer(\"pos_embed_cached\", torch.empty(emb_shape, device=device, dtype=dtype), persistent=False)\n        else:\n            self.pos_embed_cached = None\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.periods.copy_(self._compute_periods())\n        if self.feat_shape is not None and self.pos_embed_cached is not None:\n            rope_embed = self._create_embed(self.feat_shape, no_aug=True)\n            self.pos_embed_cached.copy_(rope_embed)\n\n    def _compute_periods(self, device: torch.device = 'cpu', dtype: torch.dtype = torch.float32) -> torch.Tensor:\n        \"\"\"Construct periods from either min/max or temperature.\"\"\"\n        dim = self.dim // 4\n\n        if self.min_period is not None and self.max_period is not None:\n            exponents = torch.linspace(0, 1, dim, device='cpu', dtype=torch.float32)\n            periods = self.min_period * ((self.max_period / self.min_period) ** exponents)\n        else:\n            if self.temperature is None:\n                raise ValueError(\"Provide either min/max periods or `temperature`.\")\n            exponents = 2.0 * torch.arange(dim, device='cpu', dtype=torch.float32) / (self.dim // 2)\n            periods = self.temperature ** exponents\n\n        # NOTE: The original dinv3 model weights have periods downcast to bfloat16 in persistent buffers,\n        # loaded models will differ a bit vs timm as periods is not persistent and generated in float32 by default\n        return periods.to(device=device, dtype=dtype)\n\n    def _apply_coord_augs(self, coords: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply shift/jitter/rescale train time augmentations.\"\"\"\n        if not self.training or not self.aug_active:\n            return coords\n\n        device = coords.device\n        dtype = coords.dtype\n\n        # Shift per-axis in [-s, +s]\n        if self.shift_coords is not None:\n            shift = float(self.shift_coords)\n            shift_hw = torch.empty(2, device=device, dtype=dtype).uniform_(-shift, shift)\n            coords = coords + shift_hw[None, :]\n\n        # Jitter: per-axis log-uniform factor in [1/J, J]\n        if self.jitter_coords is not None:\n            jitter_factor = float(self.jitter_coords)\n            if jitter_factor <= 0:\n                raise ValueError(\"jitter_coords must be > 0 (interpreted as multiplicative factor).\")\n            jitter_max = math.log(jitter_factor)\n            jitter_hw = torch.empty(2, device=device, dtype=dtype).uniform_(-jitter_max, jitter_max).exp()\n            coords = coords * jitter_hw[None, :]\n\n        # Rescale: shared scalar log-uniform factor in [1/R, R]\n        if self.rescale_coords is not None:\n            rescale_factor = float(self.rescale_coords)\n            if rescale_factor <= 0:\n                raise ValueError(\"rescale_coords must be > 0 (interpreted as multiplicative factor).\")\n            rescale_max = math.log(rescale_factor)\n            rescale = torch.empty(1, device=device, dtype=dtype).uniform_(-rescale_max, rescale_max).exp()\n            coords = coords * rescale\n\n        return coords\n\n    def _get_pos_embed_from_coords(self, coords: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Return sin/cos embeddings with either 'half' or 'interleaved' layout.\"\"\"\n        # coords: (HW, 2); periods: (dim)\n        dim = self.dim // 4\n        device = self.periods.device\n        dtype = self.periods.dtype\n        assert self.periods.numel() == dim\n\n        # NOTE this is a slightly later device/dtype switch than original\n        coords = coords[:, :, None].to(device=device, dtype=dtype)\n        angles = 2 * math.pi * coords / self.periods[None, None, :]\n        angles = angles.flatten(1)  # (HW, dim // 2)\n\n        if self.rotate_half:\n            # Tile (half layout) (HW, dim // 2) -> (HW, dim)\n            angles = angles.tile(2)\n        else:\n            # Interleaved layout (HW, dim // 2) -> (HW, dim)\n            angles = angles.repeat_interleave(2, dim=-1)\n\n        sin = torch.sin(angles)\n        cos = torch.cos(angles)\n        return sin, cos\n\n    def _create_embed(\n            self,\n            feat_shape: List[int],\n            no_aug: bool = False,\n    ) -> torch.Tensor:\n        H, W = feat_shape\n        coords = make_coords_dinov3(\n            H, W,\n            normalize_coords=self.normalize_coords,\n            grid_indexing=self.grid_indexing,\n            grid_offset=self.grid_offset,\n        )  # (HW, 2)\n        if not no_aug:\n            coords = self._apply_coord_augs(coords)\n        sin, cos = self._get_pos_embed_from_coords(coords)  # 2 * (HW, dim)\n        rope_embed = torch.cat([sin, cos], dim=-1)  # (HW, 2*dim)\n        return rope_embed\n\n    def _cache_embed(self, feat_shape: List[int]):\n        # create non-augmented embeds for cache\n        rope_embed = self._create_embed(feat_shape, no_aug=True)\n        self.register_buffer(\"pos_embed_cached\", rope_embed, persistent=False)\n        self.feat_shape = feat_shape\n\n    def update_feat_shape(self, feat_shape: List[int]):\n        if self.feat_shape is not None and feat_shape != self.feat_shape:\n            # only update if feat_shape was set (valid cache) and different from previous value\n            self._cache_embed(feat_shape)\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_embed(self, shape: Optional[List[int]] = None) -> torch.Tensor:\n        \"\"\"Generate rope_embed matching DINOv3 RopePositionEmbedding numerics.\n\n        Returns: (HW, num_heads, 2 * head_dim) with last dim = [sin, cos] cat.\n        \"\"\"\n        if shape is not None:\n            rope_embed = self._create_embed(shape)\n        else:\n            need_create = self.pos_embed_cached is None or (self.training and self.aug_active)\n            if need_create:\n                assert self.feat_shape is not None, 'feature shape must be cached on create'\n                rope_embed = self._create_embed(self.feat_shape)\n            else:\n                assert self.pos_embed_cached is not None\n                rope_embed = self.pos_embed_cached\n\n        return rope_embed\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Get and apply rotary embeddings to x\"\"\"\n        # assuming channel-first tensor where spatial dim are >= 2\n        pos_embed = self.get_embed(x.shape[2:])\n        return apply_rot_embed_cat(x, pos_embed, half=self.rotate_half)\n\n\ndef create_rope_embed(\n        rope_type: str = 'cat',\n        dim: int = 768,\n        num_heads: int = 12,\n        **kwargs\n) -> nn.Module:\n    \"\"\"Factory function for creating rotary position embeddings.\n\n    Args:\n        rope_type: Type of RoPE to create. Options:\n            - 'base': Basic RotaryEmbedding\n            - 'cat': RotaryEmbeddingCat (concatenated sin/cos)\n            - 'mixed': RotaryEmbeddingMixed (learnable per-depth frequencies)\n            - 'dinov3': RotaryEmbeddingDinoV3 (with coordinate transforms)\n        dim: Total embedding dimension\n        num_heads: Number of attention heads\n        **kwargs: Additional arguments passed to the specific RoPE class\n\n    Returns:\n        Rotary embedding module\n    \"\"\"\n    if rope_type == 'base':\n        kwargs.pop('rotate_half', None)  # doesn't support\n        return RotaryEmbedding(dim=dim // num_heads, **kwargs)\n    elif rope_type == 'cat':\n        kwargs.pop('rotate_half', None)  # doesn't support\n        return RotaryEmbeddingCat(dim=dim // num_heads, **kwargs)\n    elif rope_type == 'mixed':\n        # Mixed requires depth parameter, generates differing embeddings per layer and head\n        kwargs.pop('in_pixels', None)  # doesn't support\n        kwargs.pop('ref_feat_shape', None)  # doesn't support\n        return RotaryEmbeddingMixed(dim=dim, num_heads=num_heads, **kwargs)\n    elif rope_type == 'dinov3':\n        kwargs.pop('in_pixels', None)  # doesn't support\n        kwargs.pop('ref_feat_shape', None)  # doesn't support\n        return RotaryEmbeddingDinoV3(dim=dim // num_heads, **kwargs)\n    else:\n        raise ValueError(f\"Unknown RoPE type: {rope_type}\")\n"
  },
  {
    "path": "timm/layers/selective_kernel.py",
    "content": "\"\"\" Selective Kernel Convolution/Attention\n\nPaper: Selective Kernel Networks (https://arxiv.org/abs/1903.06586)\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nfrom torch import nn as nn\n\nfrom .conv_bn_act import ConvNormAct\nfrom .helpers import make_divisible\nfrom .trace_utils import _assert\n\n\ndef _kernel_valid(k):\n    if isinstance(k, (list, tuple)):\n        for ki in k:\n            return _kernel_valid(ki)\n    assert k >= 3 and k % 2\n\n\nclass SelectiveKernelAttn(nn.Module):\n    def __init__(\n            self,\n            channels: int,\n            num_paths: int = 2,\n            attn_channels: int = 32,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\" Selective Kernel Attention Module\n\n        Selective Kernel attention mechanism factored out into its own module.\n\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_paths = num_paths\n        self.fc_reduce = nn.Conv2d(channels, attn_channels, kernel_size=1, bias=False, **dd)\n        self.bn = norm_layer(attn_channels, **dd)\n        self.act = act_layer(inplace=True)\n        self.fc_select = nn.Conv2d(attn_channels, channels * num_paths, kernel_size=1, bias=False, **dd)\n\n    def forward(self, x):\n        _assert(x.shape[1] == self.num_paths, '')\n        x = x.sum(1).mean((2, 3), keepdim=True)\n        x = self.fc_reduce(x)\n        x = self.bn(x)\n        x = self.act(x)\n        x = self.fc_select(x)\n        B, C, H, W = x.shape\n        x = x.view(B, self.num_paths, C // self.num_paths, H, W)\n        x = torch.softmax(x, dim=1)\n        return x\n\n\nclass SelectiveKernel(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: Optional[int] = None,\n            kernel_size: Optional[Union[int, List[int]]] = None,\n            stride: int = 1,\n            dilation: int = 1,\n            groups: int = 1,\n            rd_ratio: float = 1./16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            keep_3x3: bool = True,\n            split_input: bool = True,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module]= nn.BatchNorm2d,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\" Selective Kernel Convolution Module\n\n        As described in Selective Kernel Networks (https://arxiv.org/abs/1903.06586) with some modifications.\n\n        Largest change is the input split, which divides the input channels across each convolution path, this can\n        be viewed as a grouping of sorts, but the output channel counts expand to the module level value. This keeps\n        the parameter count from ballooning when the convolutions themselves don't have groups, but still provides\n        a noteworthy increase in performance over similar param count models without this attention layer. -Ross W\n\n        Args:\n            in_channels:  module input (feature) channel count\n            out_channels:  module output (feature) channel count\n            kernel_size: kernel size for each convolution branch\n            stride: stride for convolutions\n            dilation: dilation for module as a whole, impacts dilation of each branch\n            groups: number of groups for each branch\n            rd_ratio: reduction factor for attention features\n            keep_3x3: keep all branch convolution kernels as 3x3, changing larger kernels for dilations\n            split_input: split input channels evenly across each convolution branch, keeps param count lower,\n                can be viewed as grouping by path, output expands to module out_channels count\n            act_layer: activation layer to use\n            norm_layer: batchnorm/norm layer to use\n            aa_layer: anti-aliasing module\n            drop_layer: spatial drop module in convs (drop block, etc)\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_channels = out_channels or in_channels\n        kernel_size = kernel_size or [3, 5]  # default to one 3x3 and one 5x5 branch. 5x5 -> 3x3 + dilation\n        _kernel_valid(kernel_size)\n        if not isinstance(kernel_size, list):\n            kernel_size = [kernel_size] * 2\n        if keep_3x3:\n            dilation = [dilation * (k - 1) // 2 for k in kernel_size]\n            kernel_size = [3] * len(kernel_size)\n        else:\n            dilation = [dilation] * len(kernel_size)\n        self.num_paths = len(kernel_size)\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.split_input = split_input\n        if self.split_input:\n            assert in_channels % self.num_paths == 0\n            in_channels = in_channels // self.num_paths\n        groups = min(out_channels, groups)\n\n        conv_kwargs = dict(\n            stride=stride, groups=groups, act_layer=act_layer, norm_layer=norm_layer,\n            aa_layer=aa_layer, drop_layer=drop_layer, **dd)\n        self.paths = nn.ModuleList([\n            ConvNormAct(in_channels, out_channels, kernel_size=k, dilation=d, **conv_kwargs)\n            for k, d in zip(kernel_size, dilation)])\n\n        attn_channels = rd_channels or make_divisible(out_channels * rd_ratio, divisor=rd_divisor)\n        self.attn = SelectiveKernelAttn(out_channels, self.num_paths, attn_channels, **dd)\n\n    def forward(self, x):\n        if self.split_input:\n            x_split = torch.split(x, self.in_channels // self.num_paths, 1)\n            x_paths = [op(x_split[i]) for i, op in enumerate(self.paths)]\n        else:\n            x_paths = [op(x) for op in self.paths]\n        x = torch.stack(x_paths, dim=1)\n        x_attn = self.attn(x)\n        x = x * x_attn\n        x = torch.sum(x, dim=1)\n        return x\n"
  },
  {
    "path": "timm/layers/separable_conv.py",
    "content": "\"\"\" Depthwise Separable Conv Modules\n\nBasic DWS convs. Other variations of DWS exist with batch norm or activations between the\nDW and PW convs such as the Depthwise modules in MobileNetV2 / EfficientNet and Xception.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom typing import Optional, Type, Union\n\nfrom torch import nn as nn\n\nfrom .create_conv2d import create_conv2d\nfrom .create_norm_act import get_norm_act_layer\n\n\nclass SeparableConvNormAct(nn.Module):\n    \"\"\" Separable Conv w/ trailing Norm and Activation\n    \"\"\"\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            padding: str = '',\n            bias: bool = False,\n            channel_multiplier: float = 1.0,\n            pw_kernel_size: int = 1,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            apply_act: bool = True,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.conv_dw = create_conv2d(\n            in_channels,\n            int(in_channels * channel_multiplier),\n            kernel_size,\n            stride=stride,\n            dilation=dilation,\n            padding=padding,\n            depthwise=True,\n            **dd,\n        )\n\n        self.conv_pw = create_conv2d(\n            int(in_channels * channel_multiplier),\n            out_channels,\n            pw_kernel_size,\n            padding=padding,\n            bias=bias,\n            **dd,\n        )\n\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        norm_kwargs = dict(drop_layer=drop_layer) if drop_layer is not None else {}\n        self.bn = norm_act_layer(out_channels, apply_act=apply_act, **norm_kwargs, **dd)\n\n    @property\n    def in_channels(self):\n        return self.conv_dw.in_channels\n\n    @property\n    def out_channels(self):\n        return self.conv_pw.out_channels\n\n    def forward(self, x):\n        x = self.conv_dw(x)\n        x = self.conv_pw(x)\n        x = self.bn(x)\n        return x\n\n\nSeparableConvBnAct = SeparableConvNormAct\n\n\nclass SeparableConv2d(nn.Module):\n    \"\"\" Separable Conv\n    \"\"\"\n    def __init__(\n            self,\n            in_channels,\n            out_channels,\n            kernel_size=3,\n            stride=1,\n            dilation=1,\n            padding='',\n            bias=False,\n            channel_multiplier=1.0,\n            pw_kernel_size=1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.conv_dw = create_conv2d(\n            in_channels,\n            int(in_channels * channel_multiplier),\n            kernel_size,\n            stride=stride,\n            dilation=dilation,\n            padding=padding,\n            depthwise=True,\n            **dd,\n        )\n\n        self.conv_pw = create_conv2d(\n            int(in_channels * channel_multiplier),\n            out_channels,\n            pw_kernel_size,\n            padding=padding,\n            bias=bias,\n            **dd,\n        )\n\n    @property\n    def in_channels(self):\n        return self.conv_dw.in_channels\n\n    @property\n    def out_channels(self):\n        return self.conv_pw.out_channels\n\n    def forward(self, x):\n        x = self.conv_dw(x)\n        x = self.conv_pw(x)\n        return x\n"
  },
  {
    "path": "timm/layers/space_to_depth.py",
    "content": "import torch\nimport torch.nn as nn\n\n\nclass SpaceToDepth(nn.Module):\n    \"\"\"Rearrange spatial dimensions into channel dimension.\n\n    Divides spatial dimensions by block_size and multiplies channels by block_size^2.\n    Used in TResNet as an efficient stem operation.\n\n    Args:\n        block_size: Spatial reduction factor.\n    \"\"\"\n    bs: torch.jit.Final[int]\n\n    def __init__(self, block_size: int = 4):\n        super().__init__()\n        assert block_size == 4\n        self.bs = block_size\n\n    def forward(self, x):\n        N, C, H, W = x.size()\n        x = x.view(N, C, H // self.bs, self.bs, W // self.bs, self.bs)  # (N, C, H//bs, bs, W//bs, bs)\n        x = x.permute(0, 3, 5, 1, 2, 4).contiguous()  # (N, bs, bs, C, H//bs, W//bs)\n        x = x.view(N, C * self.bs * self.bs, H // self.bs, W // self.bs)  # (N, C*bs^2, H//bs, W//bs)\n        return x\n\n\nclass DepthToSpace(nn.Module):\n    \"\"\"Rearrange channel dimension into spatial dimensions.\n\n    Inverse of SpaceToDepth. Divides channels by block_size^2 and multiplies\n    spatial dimensions by block_size.\n\n    Args:\n        block_size: Spatial expansion factor.\n    \"\"\"\n\n    def __init__(self, block_size):\n        super().__init__()\n        self.bs = block_size\n\n    def forward(self, x):\n        N, C, H, W = x.size()\n        x = x.view(N, self.bs, self.bs, C // (self.bs ** 2), H, W)  # (N, bs, bs, C//bs^2, H, W)\n        x = x.permute(0, 3, 4, 1, 5, 2).contiguous()  # (N, C//bs^2, H, bs, W, bs)\n        x = x.view(N, C // (self.bs ** 2), H * self.bs, W * self.bs)  # (N, C//bs^2, H * bs, W * bs)\n        return x\n"
  },
  {
    "path": "timm/layers/split_attn.py",
    "content": "\"\"\" Split Attention Conv2d (for ResNeSt Models)\n\nPaper: `ResNeSt: Split-Attention Networks` - /https://arxiv.org/abs/2004.08955\n\nAdapted from original PyTorch impl at https://github.com/zhanghang1989/ResNeSt\n\nModified for torchscript compat, performance, and consistency with timm by Ross Wightman\n\"\"\"\nfrom typing import Optional, Type, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom .helpers import make_divisible\n\n\nclass RadixSoftmax(nn.Module):\n    def __init__(self, radix: int, cardinality: int):\n        super().__init__()\n        self.radix = radix\n        self.cardinality = cardinality\n\n    def forward(self, x):\n        batch = x.size(0)\n        if self.radix > 1:\n            x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2)\n            x = F.softmax(x, dim=1)\n            x = x.reshape(batch, -1)\n        else:\n            x = torch.sigmoid(x)\n        return x\n\n\nclass SplitAttn(nn.Module):\n    \"\"\"Split-Attention (aka Splat)\n    \"\"\"\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: Optional[int] = None,\n            kernel_size: int = 3,\n            stride: int = 1,\n            padding: Optional[int] = None,\n            dilation: int = 1,\n            groups: int = 1,\n            bias: bool = False,\n            radix: int = 2,\n            rd_ratio: float = 0.25,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            drop_layer: Optional[Type[nn.Module]] = None,\n            **kwargs,\n    ):\n        dd = {'device': kwargs.pop('device', None), 'dtype': kwargs.pop('dtype', None)}\n        super().__init__()\n        out_channels = out_channels or in_channels\n        self.radix = radix\n        mid_chs = out_channels * radix\n        if rd_channels is None:\n            attn_chs = make_divisible(in_channels * radix * rd_ratio, min_value=32, divisor=rd_divisor)\n        else:\n            attn_chs = rd_channels * radix\n\n        padding = kernel_size // 2 if padding is None else padding\n        self.conv = nn.Conv2d(\n            in_channels,\n            mid_chs,\n            kernel_size,\n            stride,\n            padding,\n            dilation,\n            groups=groups * radix,\n            bias=bias,\n            **kwargs,\n            **dd,\n        )\n        self.bn0 = norm_layer(mid_chs, **dd) if norm_layer else nn.Identity()\n        self.drop = drop_layer() if drop_layer is not None else nn.Identity()\n        self.act0 = act_layer(inplace=True)\n        self.fc1 = nn.Conv2d(out_channels, attn_chs, 1, groups=groups, **dd)\n        self.bn1 = norm_layer(attn_chs, **dd) if norm_layer else nn.Identity()\n        self.act1 = act_layer(inplace=True)\n        self.fc2 = nn.Conv2d(attn_chs, mid_chs, 1, groups=groups, **dd)\n        self.rsoftmax = RadixSoftmax(radix, groups)\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn0(x)\n        x = self.drop(x)\n        x = self.act0(x)\n\n        B, RC, H, W = x.shape\n        if self.radix > 1:\n            x = x.reshape((B, self.radix, RC // self.radix, H, W))\n            x_gap = x.sum(dim=1)\n        else:\n            x_gap = x\n        x_gap = x_gap.mean((2, 3), keepdim=True)\n        x_gap = self.fc1(x_gap)\n        x_gap = self.bn1(x_gap)\n        x_gap = self.act1(x_gap)\n        x_attn = self.fc2(x_gap)\n\n        x_attn = self.rsoftmax(x_attn).view(B, -1, 1, 1)\n        if self.radix > 1:\n            out = (x * x_attn.reshape((B, self.radix, RC // self.radix, 1, 1))).sum(dim=1)\n        else:\n            out = x * x_attn\n        return out.contiguous()\n"
  },
  {
    "path": "timm/layers/split_batchnorm.py",
    "content": "\"\"\" Split BatchNorm\n\nA PyTorch BatchNorm layer that splits input batch into N equal parts and passes each through\na separate BN layer. The first split is passed through the parent BN layers with weight/bias\nkeys the same as the original BN. All other splits pass through BN sub-layers under the '.aux_bn'\nnamespace.\n\nThis allows easily removing the auxiliary BN layers after training to efficiently\nachieve the 'Auxiliary BatchNorm' as described in the AdvProp Paper, section 4.2,\n'Disentangled Learning via An Auxiliary BN'\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch\nimport torch.nn as nn\n\n\nclass SplitBatchNorm2d(torch.nn.BatchNorm2d):\n\n    def __init__(\n            self,\n            num_features,\n            eps=1e-5,\n            momentum=0.1,\n            affine=True,\n            track_running_stats=True,\n            num_splits=2,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__(num_features, eps, momentum, affine, track_running_stats)\n        assert num_splits > 1, 'Should have at least one aux BN layer (num_splits at least 2)'\n        self.num_splits = num_splits\n        self.aux_bn = nn.ModuleList([\n            nn.BatchNorm2d(num_features, eps, momentum, affine, track_running_stats, **dd)\n            for _ in range(num_splits - 1)\n        ])\n\n    def forward(self, input: torch.Tensor):\n        if self.training:  # aux BN only relevant while training\n            split_size = input.shape[0] // self.num_splits\n            assert input.shape[0] == split_size * self.num_splits, \"batch size must be evenly divisible by num_splits\"\n            split_input = input.split(split_size)\n            x = [super().forward(split_input[0])]\n            for i, a in enumerate(self.aux_bn):\n                x.append(a(split_input[i + 1]))\n            return torch.cat(x, dim=0)\n        else:\n            return super().forward(input)\n\n\ndef convert_splitbn_model(module, num_splits=2):\n    \"\"\"\n    Recursively traverse module and its children to replace all instances of\n    ``torch.nn.modules.batchnorm._BatchNorm`` with `SplitBatchnorm2d`.\n    Args:\n        module (torch.nn.Module): input module\n        num_splits: number of separate batchnorm layers to split input across\n    Example::\n        >>> # model is an instance of torch.nn.Module\n        >>> model = timm.models.convert_splitbn_model(model, num_splits=2)\n    \"\"\"\n    mod = module\n    if isinstance(module, torch.nn.modules.instancenorm._InstanceNorm):\n        return module\n    if isinstance(module, torch.nn.modules.batchnorm._BatchNorm):\n        mod = SplitBatchNorm2d(\n            module.num_features, module.eps, module.momentum, module.affine,\n            module.track_running_stats, num_splits=num_splits)\n        mod.running_mean = module.running_mean\n        mod.running_var = module.running_var\n        mod.num_batches_tracked = module.num_batches_tracked\n        if module.affine:\n            mod.weight.data = module.weight.data.clone().detach()\n            mod.bias.data = module.bias.data.clone().detach()\n        for aux in mod.aux_bn:\n            aux.running_mean = module.running_mean.clone()\n            aux.running_var = module.running_var.clone()\n            aux.num_batches_tracked = module.num_batches_tracked.clone()\n            if module.affine:\n                aux.weight.data = module.weight.data.clone().detach()\n                aux.bias.data = module.bias.data.clone().detach()\n    for name, child in module.named_children():\n        mod.add_module(name, convert_splitbn_model(child, num_splits=num_splits))\n    del module\n    return mod\n"
  },
  {
    "path": "timm/layers/squeeze_excite.py",
    "content": "\"\"\" Squeeze-and-Excitation Channel Attention\n\nAn SE implementation originally based on PyTorch SE-Net impl.\nHas since evolved with additional functionality / configuration.\n\nPaper: `Squeeze-and-Excitation Networks` - https://arxiv.org/abs/1709.01507\n\nAlso included is Effective Squeeze-Excitation (ESE).\nPaper: `CenterMask : Real-Time Anchor-Free Instance Segmentation` - https://arxiv.org/abs/1911.06667\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Tuple, Type, Union\n\nfrom torch import nn as nn\n\nfrom .create_act import create_act_layer\nfrom .helpers import make_divisible\n\n\nclass SEModule(nn.Module):\n    \"\"\" SE Module as defined in original SE-Nets with a few additions\n    Additions include:\n        * divisor can be specified to keep channels % div == 0 (default: 8)\n        * reduction channels can be specified directly by arg (if rd_channels is set)\n        * reduction channels can be specified by float rd_ratio (default: 1/16)\n        * global max pooling can be added to the squeeze aggregation\n        * customizable activation, normalization, and gate layer\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1. / 16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            add_maxpool: bool = False,\n            bias: bool = True,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_maxpool = add_maxpool\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)\n        self.fc1 = nn.Conv2d(channels, rd_channels, kernel_size=1, bias=bias, **dd)\n        self.bn = norm_layer(rd_channels, **dd) if norm_layer else nn.Identity()\n        self.act = create_act_layer(act_layer, inplace=True)\n        self.fc2 = nn.Conv2d(rd_channels, channels, kernel_size=1, bias=bias, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_se = x.mean((2, 3), keepdim=True)\n        if self.add_maxpool:\n            # experimental codepath, may remove or change\n            x_se = 0.5 * x_se + 0.5 * x.amax((2, 3), keepdim=True)\n        x_se = self.fc1(x_se)\n        x_se = self.act(self.bn(x_se))\n        x_se = self.fc2(x_se)\n        return x * self.gate(x_se)\n\n\nSqueezeExcite = SEModule  # alias\n\n\nclass EffectiveSEModule(nn.Module):\n    \"\"\" 'Effective Squeeze-Excitation\n    From `CenterMask : Real-Time Anchor-Free Instance Segmentation` - https://arxiv.org/abs/1911.06667\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            add_maxpool: bool = False,\n            gate_layer: Union[str, Type[nn.Module]] = 'hard_sigmoid',\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_maxpool = add_maxpool\n        self.fc = nn.Conv2d(channels, channels, kernel_size=1, padding=0, device=device, dtype=dtype)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_se = x.mean((2, 3), keepdim=True)\n        if self.add_maxpool:\n            # experimental codepath, may remove or change\n            x_se = 0.5 * x_se + 0.5 * x.amax((2, 3), keepdim=True)\n        x_se = self.fc(x_se)\n        return x * self.gate(x_se)\n\n\nEffectiveSqueezeExcite = EffectiveSEModule  # alias\n\n\nclass SqueezeExciteCl(nn.Module):\n    \"\"\" SE Module as defined in original SE-Nets with a few additions\n    Additions include:\n        * divisor can be specified to keep channels % div == 0 (default: 8)\n        * reduction channels can be specified directly by arg (if rd_channels is set)\n        * reduction channels can be specified by float rd_ratio (default: 1/16)\n        * global max pooling can be added to the squeeze aggregation\n        * customizable activation, normalization, and gate layer\n    \"\"\"\n    def __init__(\n            self,\n            channels: int,\n            rd_ratio: float = 1. / 16,\n            rd_channels: Optional[int] = None,\n            rd_divisor: int = 8,\n            bias: bool = True,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            gate_layer: Union[str, Type[nn.Module]] = 'sigmoid',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if not rd_channels:\n            rd_channels = make_divisible(channels * rd_ratio, rd_divisor, round_limit=0.)\n        self.fc1 = nn.Linear(channels, rd_channels, bias=bias, **dd)\n        self.act = create_act_layer(act_layer, inplace=True)\n        self.fc2 = nn.Linear(rd_channels, channels, bias=bias, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_se = x.mean((1, 2), keepdims=True)  # FIXME avg dim [1:n-1], don't assume 2D NHWC\n        x_se = self.fc1(x_se)\n        x_se = self.act(x_se)\n        x_se = self.fc2(x_se)\n        return x * self.gate(x_se)"
  },
  {
    "path": "timm/layers/std_conv.py",
    "content": "\"\"\" Convolution with Weight Standardization (StdConv and ScaledStdConv)\n\nStdConv:\n@article{weightstandardization,\n  author    = {Siyuan Qiao and Huiyu Wang and Chenxi Liu and Wei Shen and Alan Yuille},\n  title     = {Weight Standardization},\n  journal   = {arXiv preprint arXiv:1903.10520},\n  year      = {2019},\n}\nCode: https://github.com/joe-siyuan-qiao/WeightStandardization\n\nScaledStdConv:\nPaper: `Characterizing signal propagation to close the performance gap in unnormalized ResNets`\n    - https://arxiv.org/abs/2101.08692\nOfficial Deepmind JAX code: https://github.com/deepmind/deepmind-research/tree/master/nfnets\n\nHacked together by / copyright Ross Wightman, 2021.\n\"\"\"\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom ._fx import register_notrace_module\nfrom .padding import get_padding, get_padding_value, pad_same\n\n\nclass StdConv2d(nn.Conv2d):\n    \"\"\"Conv2d with Weight Standardization. Used for BiT ResNet-V2 models.\n\n    Paper: `Micro-Batch Training with Batch-Channel Normalization and Weight Standardization` -\n        https://arxiv.org/abs/1903.10520v2\n    \"\"\"\n    def __init__(\n            self,\n            in_channel: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: Optional[Union[int, Tuple[int, int]]] = None,\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = False,\n            eps: float = 1e-6,\n            device=None,\n            dtype=None,\n    ):\n        if padding is None:\n            padding = get_padding(kernel_size, stride, dilation)\n        super().__init__(\n            in_channel, out_channels, kernel_size, stride=stride,\n            padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype)\n        self.eps = eps\n\n    def forward(self, x):\n        weight = F.batch_norm(\n            self.weight.reshape(1, self.out_channels, -1),\n            None,  # running_mean\n            None,  # running_var\n            training=True,\n            momentum=0.,\n            eps=self.eps,\n        ).reshape_as(self.weight)\n        x = F.conv2d(x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups)\n        return x\n\n\n@register_notrace_module\nclass StdConv2dSame(nn.Conv2d):\n    \"\"\"Conv2d with Weight Standardization. TF compatible SAME padding. Used for ViT Hybrid model.\n\n    Paper: `Micro-Batch Training with Batch-Channel Normalization and Weight Standardization` -\n        https://arxiv.org/abs/1903.10520v2\n    \"\"\"\n    def __init__(\n            self,\n            in_channel: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: str = 'SAME',\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = False,\n            eps: float = 1e-6,\n            device=None,\n            dtype=None,\n    ):\n        padding, is_dynamic = get_padding_value(padding, kernel_size, stride=stride, dilation=dilation)\n        super().__init__(\n            in_channel, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation,\n            groups=groups, bias=bias, device=device, dtype=dtype)\n        self.same_pad = is_dynamic\n        self.eps = eps\n\n    def forward(self, x):\n        if self.same_pad:\n            x = pad_same(x, self.kernel_size, self.stride, self.dilation)\n        weight = F.batch_norm(\n            self.weight.reshape(1, self.out_channels, -1),\n            None,  # running_mean\n            None,  # running_var\n            training=True,\n            momentum=0.,\n            eps=self.eps,\n        ).reshape_as(self.weight)\n        x = F.conv2d(x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups)\n        return x\n\n\nclass ScaledStdConv2d(nn.Conv2d):\n    \"\"\"Conv2d layer with Scaled Weight Standardization.\n\n    Paper: `Characterizing signal propagation to close the performance gap in unnormalized ResNets` -\n        https://arxiv.org/abs/2101.08692\n\n    NOTE: the operations used in this impl differ slightly from the DeepMind Haiku impl. The impact is minor.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: Optional[Union[int, Tuple[int, int], str]] = None,\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = True,\n            gamma: float = 1.0,\n            eps: float = 1e-6,\n            gain_init: float = 1.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        if padding is None:\n            padding = get_padding(kernel_size, stride, dilation)\n        super().__init__(\n            in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation,\n            groups=groups, bias=bias, **dd)\n        self.scale = gamma * self.weight[0].numel() ** -0.5  # gamma * 1 / sqrt(fan-in)\n        self.eps = eps\n        self.gain_init = gain_init\n\n        self.gain = nn.Parameter(torch.empty((self.out_channels, 1, 1, 1), **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        # Only initialize gain if it exists (for the second call)\n        if hasattr(self, 'gain'):\n            torch.nn.init.constant_(self.gain, self.gain_init)\n            # Also reset parent parameters if needed\n            super().reset_parameters()\n        # On first call (from super().__init__), do nothing\n\n    def forward(self, x):\n        weight = F.batch_norm(\n            self.weight.reshape(1, self.out_channels, -1),\n            None,  # running_mean\n            None,  # running_var\n            weight=(self.gain * self.scale).view(-1),\n            training=True,\n            momentum=0.,\n            eps=self.eps,\n        ).reshape_as(self.weight)\n        return F.conv2d(x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups)\n\n\n@register_notrace_module\nclass ScaledStdConv2dSame(nn.Conv2d):\n    \"\"\"Conv2d layer with Scaled Weight Standardization and Tensorflow-like SAME padding support\n\n    Paper: `Characterizing signal propagation to close the performance gap in unnormalized ResNets` -\n        https://arxiv.org/abs/2101.08692\n\n    NOTE: the operations used in this impl differ slightly from the DeepMind Haiku impl. The impact is minor.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]],\n            stride: Union[int, Tuple[int, int]] = 1,\n            padding: str = 'SAME',\n            dilation: Union[int, Tuple[int, int]] = 1,\n            groups: int = 1,\n            bias: bool = True,\n            gamma: float = 1.0,\n            eps: float = 1e-6,\n            gain_init: float = 1.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        padding, is_dynamic = get_padding_value(padding, kernel_size, stride=stride, dilation=dilation)\n        super().__init__(\n            in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation,\n            groups=groups, bias=bias, **dd)\n        self.scale = gamma * self.weight[0].numel() ** -0.5\n        self.same_pad = is_dynamic\n        self.eps = eps\n        self.gain_init = gain_init\n\n        self.gain = nn.Parameter(torch.empty((self.out_channels, 1, 1, 1), **dd))\n\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        # Only initialize gain if it exists (for the second call)\n        if hasattr(self, 'gain'):\n            torch.nn.init.constant_(self.gain, self.gain_init)\n            # Also reset parent parameters if needed\n            super().reset_parameters()\n        # On first call (from super().__init__), do nothing\n\n    def forward(self, x):\n        if self.same_pad:\n            x = pad_same(x, self.kernel_size, self.stride, self.dilation)\n        weight = F.batch_norm(\n            self.weight.reshape(1, self.out_channels, -1),\n            None,  # running_mean\n            None,  # running_var\n            weight=(self.gain * self.scale).view(-1),\n            training=True,\n            momentum=0.,\n            eps=self.eps,\n        ).reshape_as(self.weight)\n        return F.conv2d(x, weight, self.bias, self.stride, self.padding, self.dilation, self.groups)\n"
  },
  {
    "path": "timm/layers/test_time_pool.py",
    "content": "\"\"\" Test Time Pooling (Average-Max Pool)\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport logging\nfrom torch import nn\nimport torch.nn.functional as F\n\nfrom .adaptive_avgmax_pool import adaptive_avgmax_pool2d\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass TestTimePoolHead(nn.Module):\n    def __init__(self, base, original_pool=7):\n        super().__init__()\n        self.base = base\n        self.original_pool = original_pool\n        base_fc = self.base.get_classifier()\n        if isinstance(base_fc, nn.Conv2d):\n            self.fc = base_fc\n        else:\n            self.fc = nn.Conv2d(\n                self.base.num_features, self.base.num_classes, kernel_size=1, bias=True)\n            self.fc.weight.data.copy_(base_fc.weight.data.view(self.fc.weight.size()))\n            self.fc.bias.data.copy_(base_fc.bias.data.view(self.fc.bias.size()))\n        self.base.reset_classifier(0)  # delete original fc layer\n\n    def forward(self, x):\n        x = self.base.forward_features(x)\n        x = F.avg_pool2d(x, kernel_size=self.original_pool, stride=1)\n        x = self.fc(x)\n        x = adaptive_avgmax_pool2d(x, 1)\n        return x.view(x.size(0), -1)\n\n\ndef apply_test_time_pool(model, config, use_test_size=False):\n    test_time_pool = False\n    if not hasattr(model, 'default_cfg') or not model.default_cfg:\n        return model, False\n    if use_test_size and 'test_input_size' in model.default_cfg:\n        df_input_size = model.default_cfg['test_input_size']\n    else:\n        df_input_size = model.default_cfg['input_size']\n    if config['input_size'][-1] > df_input_size[-1] and config['input_size'][-2] > df_input_size[-2]:\n        _logger.info('Target input size %s > pretrained default %s, using test time pooling' %\n                     (str(config['input_size'][-2:]), str(df_input_size[-2:])))\n        model = TestTimePoolHead(model, original_pool=model.default_cfg['pool_size'])\n        test_time_pool = True\n    return model, test_time_pool\n"
  },
  {
    "path": "timm/layers/trace_utils.py",
    "content": "try:\n    from torch import _assert\nexcept ImportError:\n    def _assert(condition: bool, message: str):\n        assert condition, message\n\n\ndef _float_to_int(x: float) -> int:\n    \"\"\"\n    Symbolic tracing helper to substitute for inbuilt `int`.\n    Hint: Inbuilt `int` can't accept an argument of type `Proxy`\n    \"\"\"\n    return int(x)\n"
  },
  {
    "path": "timm/layers/typing.py",
    "content": "from contextlib import nullcontext\nfrom functools import wraps\nfrom typing import Callable, Optional, Tuple, Type, TypeVar, Union, overload, ContextManager\n\nimport torch\n\n__all__ = [\"LayerType\", \"PadType\", \"nullwrap\", \"disable_compiler\"]\n\n\nLayerType = Union[str, Callable, Type[torch.nn.Module]]\nPadType = Union[str, int, Tuple[int, int]]\n\nF = TypeVar(\"F\", bound=Callable[..., object])\n\n\n@overload\ndef nullwrap(fn: F) -> F: ...  # decorator form\n\n@overload\ndef nullwrap(fn: None = ...) -> ContextManager: ...  # context‑manager form\n\ndef nullwrap(fn: Optional[F] = None):\n    # as a context manager\n    if fn is None:\n        return nullcontext()  # `with nullwrap():`\n\n    # as a decorator\n    @wraps(fn)\n    def wrapper(*args, **kwargs):\n        return fn(*args, **kwargs)\n    return wrapper  # `@nullwrap`\n\n\ndisable_compiler = getattr(getattr(torch, \"compiler\", None), \"disable\", None) or nullwrap\n"
  },
  {
    "path": "timm/layers/weight_init.py",
    "content": "import torch\nimport math\nimport warnings\nfrom torch import nn\nfrom torch.nn.init import _calculate_fan_in_and_fan_out\n\n\ndef is_meta_device(device) -> bool:\n    \"\"\"Check if targeting meta device (explicit arg or context manager).\"\"\"\n    if device is not None:\n        return str(device) == 'meta'\n    # Check context manager (PyTorch 2.0+)\n    if hasattr(torch, 'get_default_device'):\n        default_device = torch.get_default_device()\n        return default_device is not None and default_device.type == 'meta'\n    return False\n\n\ndef _trunc_normal_(tensor, mean, std, a, b):\n    # Cut & paste from PyTorch official master until it's in a few official releases - RW\n    # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf\n    def norm_cdf(x):\n        # Computes standard normal cumulative distribution function\n        return (1. + math.erf(x / math.sqrt(2.))) / 2.\n\n    if (mean < a - 2 * std) or (mean > b + 2 * std):\n        warnings.warn(\"mean is more than 2 std from [a, b] in nn.init.trunc_normal_. \"\n                      \"The distribution of values may be incorrect.\",\n                      stacklevel=2)\n\n    # Values are generated by using a truncated uniform distribution and\n    # then using the inverse CDF for the normal distribution.\n    # Get upper and lower cdf values\n    l = norm_cdf((a - mean) / std)\n    u = norm_cdf((b - mean) / std)\n\n    # Uniformly fill tensor with values from [l, u], then translate to\n    # [2l-1, 2u-1].\n    tensor.uniform_(2 * l - 1, 2 * u - 1)\n\n    # Use inverse cdf transform for normal distribution to get truncated\n    # standard normal\n    tensor.erfinv_()\n\n    # Transform to proper mean, std\n    tensor.mul_(std * math.sqrt(2.))\n    tensor.add_(mean)\n\n    # Clamp to ensure it's in the proper range\n    tensor.clamp_(min=a, max=b)\n    return tensor\n\n\ndef trunc_normal_(tensor, mean=0., std=1., a=-2., b=2.):\n    # type: (Tensor, float, float, float, float) -> Tensor\n    r\"\"\"Fills the input Tensor with values drawn from a truncated\n    normal distribution. The values are effectively drawn from the\n    normal distribution :math:`\\mathcal{N}(\\text{mean}, \\text{std}^2)`\n    with values outside :math:`[a, b]` redrawn until they are within\n    the bounds. The method used for generating the random values works\n    best when :math:`a \\leq \\text{mean} \\leq b`.\n\n    NOTE: this impl is similar to the PyTorch trunc_normal_, the bounds [a, b] are\n    applied while sampling the normal with mean/std applied, therefore a, b args\n    should be adjusted to match the range of mean, std args.\n\n    Args:\n        tensor: an n-dimensional `torch.Tensor`\n        mean: the mean of the normal distribution\n        std: the standard deviation of the normal distribution\n        a: the minimum cutoff value\n        b: the maximum cutoff value\n    Examples:\n        >>> w = torch.empty(3, 5)\n        >>> nn.init.trunc_normal_(w)\n    \"\"\"\n    with torch.no_grad():\n        return _trunc_normal_(tensor, mean, std, a, b)\n\n\ndef trunc_normal_tf_(tensor, mean=0., std=1., a=-2., b=2.):\n    # type: (Tensor, float, float, float, float) -> Tensor\n    r\"\"\"Fills the input Tensor with values drawn from a truncated\n    normal distribution. The values are effectively drawn from the\n    normal distribution :math:`\\mathcal{N}(\\text{mean}, \\text{std}^2)`\n    with values outside :math:`[a, b]` redrawn until they are within\n    the bounds. The method used for generating the random values works\n    best when :math:`a \\leq \\text{mean} \\leq b`.\n\n    NOTE: this 'tf' variant behaves closer to Tensorflow / JAX impl where the\n    bounds [a, b] are applied when sampling the normal distribution with mean=0, std=1.0\n    and the result is subsequently scaled and shifted by the mean and std args.\n\n    Args:\n        tensor: an n-dimensional `torch.Tensor`\n        mean: the mean of the normal distribution\n        std: the standard deviation of the normal distribution\n        a: the minimum cutoff value\n        b: the maximum cutoff value\n    Examples:\n        >>> w = torch.empty(3, 5)\n        >>> nn.init.trunc_normal_(w)\n    \"\"\"\n    with torch.no_grad():\n        _trunc_normal_(tensor, 0, 1.0, a, b)\n        tensor.mul_(std).add_(mean)\n    return tensor\n\n\ndef variance_scaling_(tensor, scale=1.0, mode='fan_in', distribution='normal'):\n    fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor)\n    if mode == 'fan_in':\n        denom = fan_in\n    elif mode == 'fan_out':\n        denom = fan_out\n    elif mode == 'fan_avg':\n        denom = (fan_in + fan_out) / 2\n\n    variance = scale / denom\n\n    if distribution == \"truncated_normal\":\n        # constant is stddev of standard normal truncated to (-2, 2)\n        trunc_normal_tf_(tensor, std=math.sqrt(variance) / .87962566103423978)\n    elif distribution == \"normal\":\n        with torch.no_grad():\n            tensor.normal_(std=math.sqrt(variance))\n    elif distribution == \"uniform\":\n        bound = math.sqrt(3 * variance)\n        with torch.no_grad():\n            tensor.uniform_(-bound, bound)\n    else:\n        raise ValueError(f\"invalid distribution {distribution}\")\n\n\ndef lecun_normal_(tensor):\n    variance_scaling_(tensor, mode='fan_in', distribution='truncated_normal')\n\n\ndef init_weight_vit(\n        module: nn.Module,\n        name: str,\n        init_bias: float = 0.02,\n        head_bias: float = 0.,\n        classifier_name: str = 'head'\n):\n    if isinstance(module, (nn.Linear, nn.Conv1d, nn.Conv2d, nn.Conv3d)):\n        if name.startswith(classifier_name):\n            nn.init.zeros_(module.weight)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            nn.init.trunc_normal_(module.weight, std=0.02)\n            if isinstance(module, nn.Linear) and module.bias is not None:\n                nn.init.constant_(module.bias, init_bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n\n\ndef init_weight_jax(\n        module: nn.Module,\n        name: str,\n        head_bias: float = 0.,\n        classifier_name: str = 'head',\n):\n    if isinstance(module, nn.Linear):\n        if name.startswith(classifier_name):\n            nn.init.zeros_(module.weight)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            nn.init.xavier_uniform_(module.weight)\n            if module.bias is not None:\n                nn.init.normal_(module.bias, std=1e-6) if 'mlp' in name else nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        lecun_normal_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n\n"
  },
  {
    "path": "timm/loss/__init__.py",
    "content": "from .asymmetric_loss import AsymmetricLossMultiLabel, AsymmetricLossSingleLabel\nfrom .binary_cross_entropy import BinaryCrossEntropy\nfrom .cross_entropy import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy\nfrom .jsd import JsdCrossEntropy\n"
  },
  {
    "path": "timm/loss/asymmetric_loss.py",
    "content": "import torch\nimport torch.nn as nn\n\n\nclass AsymmetricLossMultiLabel(nn.Module):\n    def __init__(self, gamma_neg=4, gamma_pos=1, clip=0.05, eps=1e-8, disable_torch_grad_focal_loss=False):\n        super(AsymmetricLossMultiLabel, self).__init__()\n\n        self.gamma_neg = gamma_neg\n        self.gamma_pos = gamma_pos\n        self.clip = clip\n        self.disable_torch_grad_focal_loss = disable_torch_grad_focal_loss\n        self.eps = eps\n\n    def forward(self, x, y):\n        \"\"\"\"\n        Parameters\n        ----------\n        x: input logits\n        y: targets (multi-label binarized vector)\n        \"\"\"\n\n        # Calculating Probabilities\n        x_sigmoid = torch.sigmoid(x)\n        xs_pos = x_sigmoid\n        xs_neg = 1 - x_sigmoid\n\n        # Asymmetric Clipping\n        if self.clip is not None and self.clip > 0:\n            xs_neg = (xs_neg + self.clip).clamp(max=1)\n\n        # Basic CE calculation\n        los_pos = y * torch.log(xs_pos.clamp(min=self.eps))\n        los_neg = (1 - y) * torch.log(xs_neg.clamp(min=self.eps))\n        loss = los_pos + los_neg\n\n        # Asymmetric Focusing\n        if self.gamma_neg > 0 or self.gamma_pos > 0:\n            if self.disable_torch_grad_focal_loss:\n                torch.set_grad_enabled(False)\n            pt0 = xs_pos * y\n            pt1 = xs_neg * (1 - y)  # pt = p if t > 0 else 1-p\n            pt = pt0 + pt1\n            one_sided_gamma = self.gamma_pos * y + self.gamma_neg * (1 - y)\n            one_sided_w = torch.pow(1 - pt, one_sided_gamma)\n            if self.disable_torch_grad_focal_loss:\n                torch.set_grad_enabled(True)\n            loss *= one_sided_w\n\n        return -loss.sum()\n\n\nclass AsymmetricLossSingleLabel(nn.Module):\n    def __init__(self, gamma_pos=1, gamma_neg=4, eps: float = 0.1, reduction='mean'):\n        super(AsymmetricLossSingleLabel, self).__init__()\n\n        self.eps = eps\n        self.logsoftmax = nn.LogSoftmax(dim=-1)\n        self.targets_classes = []  # prevent gpu repeated memory allocation\n        self.gamma_pos = gamma_pos\n        self.gamma_neg = gamma_neg\n        self.reduction = reduction\n\n    def forward(self, inputs, target, reduction=None):\n        \"\"\"\"\n        Parameters\n        ----------\n        x: input logits\n        y: targets (1-hot vector)\n        \"\"\"\n\n        num_classes = inputs.size()[-1]\n        log_preds = self.logsoftmax(inputs)\n        self.targets_classes = torch.zeros_like(inputs).scatter_(1, target.long().unsqueeze(1), 1)\n\n        # ASL weights\n        targets = self.targets_classes\n        anti_targets = 1 - targets\n        xs_pos = torch.exp(log_preds)\n        xs_neg = 1 - xs_pos\n        xs_pos = xs_pos * targets\n        xs_neg = xs_neg * anti_targets\n        asymmetric_w = torch.pow(1 - xs_pos - xs_neg,\n                                 self.gamma_pos * targets + self.gamma_neg * anti_targets)\n        log_preds = log_preds * asymmetric_w\n\n        if self.eps > 0:  # label smoothing\n            self.targets_classes = self.targets_classes.mul(1 - self.eps).add(self.eps / num_classes)\n\n        # loss calculation\n        loss = - self.targets_classes.mul(log_preds)\n\n        loss = loss.sum(dim=-1)\n        if self.reduction == 'mean':\n            loss = loss.mean()\n\n        return loss\n"
  },
  {
    "path": "timm/loss/binary_cross_entropy.py",
    "content": "\"\"\" Binary Cross Entropy w/ a few extras\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Optional, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nclass BinaryCrossEntropy(nn.Module):\n    \"\"\" BCE with optional one-hot from dense targets, label smoothing, thresholding\n    NOTE for experiments comparing CE to BCE /w label smoothing, may remove\n    \"\"\"\n    def __init__(\n            self,\n            smoothing=0.1,\n            target_threshold: Optional[float] = None,\n            weight: Optional[torch.Tensor] = None,\n            reduction: str = 'mean',\n            sum_classes: bool = False,\n            pos_weight: Optional[Union[torch.Tensor, float]] = None,\n    ):\n        super(BinaryCrossEntropy, self).__init__()\n        assert 0. <= smoothing < 1.0\n        if pos_weight is not None:\n            if not isinstance(pos_weight, torch.Tensor):\n                pos_weight = torch.tensor(pos_weight)\n        self.smoothing = smoothing\n        self.target_threshold = target_threshold\n        self.reduction = 'none' if sum_classes else reduction\n        self.sum_classes = sum_classes\n        self.register_buffer('weight', weight)\n        self.register_buffer('pos_weight', pos_weight)\n\n    def forward(self, x: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n        batch_size = x.shape[0]\n        assert batch_size == target.shape[0]\n\n        if target.shape != x.shape:\n            # NOTE currently assume smoothing or other label softening is applied upstream if targets are already sparse\n            num_classes = x.shape[-1]\n            # FIXME should off/on be different for smoothing w/ BCE? Other impl out there differ\n            off_value = self.smoothing / num_classes\n            on_value = 1. - self.smoothing + off_value\n            target = target.long().view(-1, 1)\n            target = torch.full(\n                (batch_size, num_classes),\n                off_value,\n                device=x.device, dtype=x.dtype).scatter_(1, target, on_value)\n\n        if self.target_threshold is not None:\n            # Make target 0, or 1 if threshold set\n            target = target.gt(self.target_threshold).to(dtype=target.dtype)\n\n        loss = F.binary_cross_entropy_with_logits(\n            x, target,\n            self.weight,\n            pos_weight=self.pos_weight,\n            reduction=self.reduction,\n        )\n        if self.sum_classes:\n            loss = loss.sum(-1).mean()\n        return loss\n"
  },
  {
    "path": "timm/loss/cross_entropy.py",
    "content": "\"\"\" Cross Entropy w/ smoothing or soft targets\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nclass LabelSmoothingCrossEntropy(nn.Module):\n    \"\"\" NLL loss with label smoothing.\n    \"\"\"\n    def __init__(self, smoothing=0.1):\n        super(LabelSmoothingCrossEntropy, self).__init__()\n        assert smoothing < 1.0\n        self.smoothing = smoothing\n        self.confidence = 1. - smoothing\n\n    def forward(self, x: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n        logprobs = F.log_softmax(x, dim=-1)\n        nll_loss = -logprobs.gather(dim=-1, index=target.unsqueeze(1))\n        nll_loss = nll_loss.squeeze(1)\n        smooth_loss = -logprobs.mean(dim=-1)\n        loss = self.confidence * nll_loss + self.smoothing * smooth_loss\n        return loss.mean()\n\n\nclass SoftTargetCrossEntropy(nn.Module):\n\n    def __init__(self):\n        super(SoftTargetCrossEntropy, self).__init__()\n\n    def forward(self, x: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n        loss = torch.sum(-target * F.log_softmax(x, dim=-1), dim=-1)\n        return loss.mean()\n"
  },
  {
    "path": "timm/loss/jsd.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .cross_entropy import LabelSmoothingCrossEntropy\n\n\nclass JsdCrossEntropy(nn.Module):\n    \"\"\" Jensen-Shannon Divergence + Cross-Entropy Loss\n\n    Based on impl here: https://github.com/google-research/augmix/blob/master/imagenet.py\n    From paper: 'AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty -\n    https://arxiv.org/abs/1912.02781\n\n    Hacked together by / Copyright 2020 Ross Wightman\n    \"\"\"\n    def __init__(self, num_splits=3, alpha=12, smoothing=0.1):\n        super().__init__()\n        self.num_splits = num_splits\n        self.alpha = alpha\n        if smoothing is not None and smoothing > 0:\n            self.cross_entropy_loss = LabelSmoothingCrossEntropy(smoothing)\n        else:\n            self.cross_entropy_loss = torch.nn.CrossEntropyLoss()\n\n    def __call__(self, output, target):\n        split_size = output.shape[0] // self.num_splits\n        assert split_size * self.num_splits == output.shape[0]\n        logits_split = torch.split(output, split_size)\n\n        # Cross-entropy is only computed on clean images\n        loss = self.cross_entropy_loss(logits_split[0], target[:split_size])\n        probs = [F.softmax(logits, dim=1) for logits in logits_split]\n\n        # Clamp mixture distribution to avoid exploding KL divergence\n        logp_mixture = torch.clamp(torch.stack(probs).mean(axis=0), 1e-7, 1).log()\n        loss += self.alpha * sum([F.kl_div(\n            logp_mixture, p_split, reduction='batchmean') for p_split in probs]) / len(probs)\n        return loss\n"
  },
  {
    "path": "timm/models/__init__.py",
    "content": "from .beit import *\nfrom .byoanet import *\nfrom .byobnet import *\nfrom .cait import *\nfrom .coat import *\nfrom .convit import *\nfrom .convmixer import *\nfrom .convnext import *\nfrom .crossvit import *\nfrom .csatv2 import *\nfrom .cspnet import *\nfrom .davit import *\nfrom .deit import *\nfrom .densenet import *\nfrom .dla import *\nfrom .dpn import *\nfrom .edgenext import *\nfrom .efficientformer import *\nfrom .efficientformer_v2 import *\nfrom .efficientnet import *\nfrom .efficientvit_mit import *\nfrom .efficientvit_msra import *\nfrom .eva import *\nfrom .fasternet import *\nfrom .fastvit import *\nfrom .focalnet import *\nfrom .gcvit import *\nfrom .ghostnet import *\nfrom .hardcorenas import *\nfrom .hgnet import *\nfrom .hiera import *\nfrom .hieradet_sam2 import *\nfrom .hrnet import *\nfrom .inception_next import *\nfrom .inception_resnet_v2 import *\nfrom .inception_v3 import *\nfrom .inception_v4 import *\nfrom .levit import *\nfrom .maxxvit import *\nfrom .mambaout import *\nfrom .metaformer import *\nfrom .mlp_mixer import *\nfrom .mobilenetv3 import *\nfrom .mobilenetv5 import *\nfrom .mobilevit import *\nfrom .mvitv2 import *\nfrom .naflexvit import *\nfrom .nasnet import *\nfrom .nest import *\nfrom .nextvit import *\nfrom .nfnet import *\nfrom .pit import *\nfrom .pnasnet import *\nfrom .pvt_v2 import *\nfrom .rdnet import *\nfrom .regnet import *\nfrom .repghost import *\nfrom .repvit import *\nfrom .res2net import *\nfrom .resnest import *\nfrom .resnet import *\nfrom .resnetv2 import *\nfrom .rexnet import *\nfrom .selecsls import *\nfrom .senet import *\nfrom .sequencer import *\nfrom .shvit import *\nfrom .sknet import *\nfrom .starnet import *\nfrom .swiftformer import *\nfrom .swin_transformer import *\nfrom .swin_transformer_v2 import *\nfrom .swin_transformer_v2_cr import *\nfrom .tiny_vit import *\nfrom .tnt import *\nfrom .tresnet import *\nfrom .twins import *\nfrom .vgg import *\nfrom .visformer import *\nfrom .vision_transformer import *\nfrom .vision_transformer_hybrid import *\nfrom .vision_transformer_relpos import *\nfrom .vision_transformer_sam import *\nfrom .vitamin import *\nfrom .volo import *\nfrom .vovnet import *\nfrom .xception import *\nfrom .xception_aligned import *\nfrom .xcit import *\n\nfrom ._builder import (\n    build_model_with_cfg as build_model_with_cfg,\n    load_pretrained as load_pretrained,\n    load_custom_pretrained as load_custom_pretrained,\n    resolve_pretrained_cfg as resolve_pretrained_cfg,\n    set_pretrained_download_progress as set_pretrained_download_progress,\n    set_pretrained_check_hash as set_pretrained_check_hash,\n)\nfrom ._factory import (\n    create_model as create_model,\n    parse_model_name as parse_model_name,\n    safe_model_name as safe_model_name,\n)\nfrom ._features import (\n    FeatureInfo as FeatureInfo,\n    FeatureHooks as FeatureHooks,\n    FeatureHookNet as FeatureHookNet,\n    FeatureListNet as FeatureListNet,\n    FeatureDictNet as FeatureDictNet,\n)\nfrom ._features_fx import (\n    FeatureGraphNet as FeatureGraphNet,\n    GraphExtractNet as GraphExtractNet,\n    create_feature_extractor as create_feature_extractor,\n    get_graph_node_names as get_graph_node_names,\n    register_notrace_module as register_notrace_module,\n    is_notrace_module as is_notrace_module,\n    get_notrace_modules as get_notrace_modules,\n    register_notrace_function as register_notrace_function,\n    is_notrace_function as is_notrace_function,\n    get_notrace_functions as get_notrace_functions,\n)\nfrom ._helpers import (\n    clean_state_dict as clean_state_dict,\n    load_state_dict as load_state_dict,\n    load_checkpoint as load_checkpoint,\n    remap_state_dict as remap_state_dict,\n    resume_checkpoint as resume_checkpoint,\n)\nfrom ._hub import (\n    load_model_config_from_hf as load_model_config_from_hf,\n    load_state_dict_from_hf as load_state_dict_from_hf,\n    push_to_hf_hub as push_to_hf_hub,\n    save_for_hf as save_for_hf,\n)\nfrom ._manipulate import (\n    model_parameters as model_parameters,\n    named_apply as named_apply,\n    named_modules as named_modules,\n    named_modules_with_params as named_modules_with_params,\n    group_modules as group_modules,\n    group_parameters as group_parameters,\n    checkpoint_seq as checkpoint_seq,\n    checkpoint as checkpoint,\n    adapt_input_conv as adapt_input_conv,\n)\nfrom ._pretrained import (\n    PretrainedCfg as PretrainedCfg,\n    DefaultCfg as DefaultCfg,\n    filter_pretrained_cfg as filter_pretrained_cfg,\n)\nfrom ._prune import adapt_model_from_string as adapt_model_from_string\nfrom ._registry import (\n    split_model_name_tag as split_model_name_tag,\n    get_arch_name as get_arch_name,\n    generate_default_cfgs as generate_default_cfgs,\n    register_model as register_model,\n    register_model_deprecations as register_model_deprecations,\n    model_entrypoint as model_entrypoint,\n    list_models as list_models,\n    list_pretrained as list_pretrained,\n    get_deprecated_models as get_deprecated_models,\n    is_model as is_model,\n    list_modules as list_modules,\n    is_model_in_modules as is_model_in_modules,\n    is_model_pretrained as is_model_pretrained,\n    get_pretrained_cfg as get_pretrained_cfg,\n    get_pretrained_cfg_value as get_pretrained_cfg_value,\n    get_arch_pretrained_cfgs as get_arch_pretrained_cfgs,\n)\n"
  },
  {
    "path": "timm/models/_builder.py",
    "content": "import dataclasses\nimport logging\nimport os\nfrom copy import deepcopy\nfrom pathlib import Path\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union\n\nfrom torch import nn as nn\nfrom torch.hub import load_state_dict_from_url\n\nfrom timm.models._features import FeatureListNet, FeatureDictNet, FeatureHookNet, FeatureGetterNet\nfrom timm.models._features_fx import FeatureGraphNet\nfrom timm.models._helpers import load_state_dict\nfrom timm.models._hub import has_hf_hub, download_cached_file, check_cached_file, load_state_dict_from_hf, \\\n    load_state_dict_from_path, load_custom_from_hf\nfrom timm.models._manipulate import adapt_input_conv\nfrom timm.models._pretrained import PretrainedCfg\nfrom timm.models._prune import adapt_model_from_file\nfrom timm.models._registry import get_pretrained_cfg\n\n_logger = logging.getLogger(__name__)\n\n# Global variables for rarely used pretrained checkpoint download progress and hash check.\n# Use set_pretrained_download_progress / set_pretrained_check_hash functions to toggle.\n_DOWNLOAD_PROGRESS = False\n_CHECK_HASH = False\n_USE_OLD_CACHE = int(os.environ.get('TIMM_USE_OLD_CACHE', 0)) > 0\n\n__all__ = [\n    'set_pretrained_download_progress',\n    'set_pretrained_check_hash',\n    'load_custom_pretrained',\n    'load_pretrained',\n    'pretrained_cfg_for_features',\n    'resolve_pretrained_cfg',\n    'build_model_with_cfg',\n]\n\n\nModelT = TypeVar(\"ModelT\", bound=nn.Module)              # any subclass of nn.Module\n\n\ndef _resolve_pretrained_source(pretrained_cfg: Dict[str, Any]) -> Tuple[str, str]:\n    cfg_source = pretrained_cfg.get('source', '')\n    pretrained_url = pretrained_cfg.get('url', None)\n    pretrained_file = pretrained_cfg.get('file', None)\n    pretrained_sd = pretrained_cfg.get('state_dict', None)\n    hf_hub_id = pretrained_cfg.get('hf_hub_id', None)\n\n    # resolve where to load pretrained weights from\n    load_from = ''\n    pretrained_loc = ''\n    if cfg_source == 'hf-hub' and has_hf_hub(necessary=True):\n        # hf-hub specified as source via model identifier\n        load_from = 'hf-hub'\n        assert hf_hub_id\n        pretrained_loc = hf_hub_id\n    elif cfg_source == 'local-dir':\n        load_from = 'local-dir'\n        pretrained_loc = pretrained_file\n    else:\n        # default source == timm or unspecified\n        if pretrained_sd:\n            # direct state_dict pass through is the highest priority\n            load_from = 'state_dict'\n            pretrained_loc = pretrained_sd\n            assert isinstance(pretrained_loc, dict)\n        elif pretrained_file:\n            # file load override is the second-highest priority if set\n            load_from = 'file'\n            pretrained_loc = pretrained_file\n        else:\n            old_cache_valid = False\n            if _USE_OLD_CACHE:\n                # prioritized old cached weights if exists and env var enabled\n                old_cache_valid = check_cached_file(pretrained_url) if pretrained_url else False\n            if not old_cache_valid and hf_hub_id and has_hf_hub(necessary=True):\n                # hf-hub available as alternate weight source in default_cfg\n                load_from = 'hf-hub'\n                pretrained_loc = hf_hub_id\n            elif pretrained_url:\n                load_from = 'url'\n                pretrained_loc = pretrained_url\n\n    if load_from == 'hf-hub' and pretrained_cfg.get('hf_hub_filename', None):\n        # if a filename override is set, return tuple for location w/ (hub_id, filename)\n        pretrained_loc = pretrained_loc, pretrained_cfg['hf_hub_filename']\n    return load_from, pretrained_loc\n\n\ndef set_pretrained_download_progress(enable: bool = True) -> None:\n    \"\"\" Set download progress for pretrained weights on/off (globally). \"\"\"\n    global _DOWNLOAD_PROGRESS\n    _DOWNLOAD_PROGRESS = enable\n\n\ndef set_pretrained_check_hash(enable: bool = True) -> None:\n    \"\"\" Set hash checking for pretrained weights on/off (globally). \"\"\"\n    global _CHECK_HASH\n    _CHECK_HASH = enable\n\n\ndef load_custom_pretrained(\n        model: nn.Module,\n        pretrained_cfg: Optional[Dict[str, Any]] = None,\n        load_fn: Optional[Callable] = None,\n        cache_dir: Optional[Union[str, Path]] = None,\n) -> None:\n    \"\"\"Loads a custom (read non .pth) weight file\n\n    Downloads checkpoint file into cache-dir like torch.hub based loaders, but calls\n    a passed in custom load fun, or the `load_pretrained` model member fn.\n\n    If the object is already present in `model_dir`, it's deserialized and returned.\n    The default value of `model_dir` is ``<hub_dir>/checkpoints`` where\n    `hub_dir` is the directory returned by :func:`~torch.hub.get_dir`.\n\n    Args:\n        model: The instantiated model to load weights into\n        pretrained_cfg: Default pretrained model cfg\n        load_fn: An external standalone fn that loads weights into provided model, otherwise a fn named\n            'load_pretrained' on the model will be called if it exists\n        cache_dir: Override model checkpoint cache dir for this load\n    \"\"\"\n    pretrained_cfg = pretrained_cfg or getattr(model, 'pretrained_cfg', None)\n    if not pretrained_cfg:\n        _logger.warning(\"Invalid pretrained config, cannot load weights.\")\n        return\n\n    load_from, pretrained_loc = _resolve_pretrained_source(pretrained_cfg)\n    if not load_from:\n        _logger.warning(\"No pretrained weights exist for this model. Using random initialization.\")\n        return\n    if load_from == 'hf-hub':\n        _logger.warning(\"Hugging Face hub not currently supported for custom load pretrained models.\")\n    elif load_from == 'url':\n        pretrained_loc = download_cached_file(\n            pretrained_loc,\n            check_hash=_CHECK_HASH,\n            progress=_DOWNLOAD_PROGRESS,\n            cache_dir=cache_dir,\n        )\n\n    if load_fn is not None:\n        load_fn(model, pretrained_loc)\n    elif hasattr(model, 'load_pretrained'):\n        model.load_pretrained(pretrained_loc)\n    else:\n        _logger.warning(\"Valid function to load pretrained weights is not available, using random initialization.\")\n\n\ndef load_pretrained(\n        model: nn.Module,\n        pretrained_cfg: Optional[Dict[str, Any]] = None,\n        num_classes: int = 1000,\n        in_chans: int = 3,\n        filter_fn: Optional[Callable] = None,\n        strict: bool = True,\n        cache_dir: Optional[Union[str, Path]] = None,\n) -> None:\n    \"\"\" Load pretrained checkpoint\n\n    Args:\n        model: PyTorch module\n        pretrained_cfg: Configuration for pretrained weights / target dataset\n        num_classes: Number of classes for target model. Will adapt pretrained if different.\n        in_chans: Number of input chans for target model. Will adapt pretrained if different.\n        filter_fn: state_dict filter fn for load (takes state_dict, model as args)\n        strict: Strict load of checkpoint\n        cache_dir: Override model checkpoint cache dir for this load\n    \"\"\"\n    pretrained_cfg = pretrained_cfg or getattr(model, 'pretrained_cfg', None)\n    if not pretrained_cfg:\n        raise RuntimeError(\"Invalid pretrained config, cannot load weights. Use `pretrained=False` for random init.\")\n\n    load_from, pretrained_loc = _resolve_pretrained_source(pretrained_cfg)\n    if load_from == 'state_dict':\n        _logger.info(f'Loading pretrained weights from state dict')\n        state_dict = pretrained_loc  # pretrained_loc is the actual state dict for this override\n    elif load_from == 'file':\n        _logger.info(f'Loading pretrained weights from file ({pretrained_loc})')\n        if pretrained_cfg.get('custom_load', False):\n            model.load_pretrained(pretrained_loc)\n            return\n        else:\n            state_dict = load_state_dict(pretrained_loc)\n    elif load_from == 'url':\n        _logger.info(f'Loading pretrained weights from url ({pretrained_loc})')\n        if pretrained_cfg.get('custom_load', False):\n            pretrained_loc = download_cached_file(\n                pretrained_loc,\n                progress=_DOWNLOAD_PROGRESS,\n                check_hash=_CHECK_HASH,\n                cache_dir=cache_dir,\n            )\n            model.load_pretrained(pretrained_loc)\n            return\n        else:\n            try:\n                state_dict = load_state_dict_from_url(\n                    pretrained_loc,\n                    map_location='cpu',\n                    progress=_DOWNLOAD_PROGRESS,\n                    check_hash=_CHECK_HASH,\n                    weights_only=True,\n                    model_dir=cache_dir,\n                )\n            except TypeError:\n                state_dict = load_state_dict_from_url(\n                    pretrained_loc,\n                    map_location='cpu',\n                    progress=_DOWNLOAD_PROGRESS,\n                    check_hash=_CHECK_HASH,\n                    model_dir=cache_dir,\n                )\n    elif load_from == 'hf-hub':\n        _logger.info(f'Loading pretrained weights from Hugging Face hub ({pretrained_loc})')\n        if isinstance(pretrained_loc, (list, tuple)):\n            custom_load = pretrained_cfg.get('custom_load', False)\n            if isinstance(custom_load, str) and custom_load == 'hf':\n                load_custom_from_hf(*pretrained_loc, model, cache_dir=cache_dir)\n                return\n            else:\n                state_dict = load_state_dict_from_hf(*pretrained_loc, cache_dir=cache_dir)\n        else:\n            state_dict = load_state_dict_from_hf(pretrained_loc, weights_only=True, cache_dir=cache_dir)\n    elif load_from == 'local-dir':\n        _logger.info(f'Loading pretrained weights from local directory ({pretrained_loc})')\n        pretrained_path = Path(pretrained_loc)\n        if pretrained_path.is_dir():\n            state_dict = load_state_dict_from_path(pretrained_path)\n        else:\n            raise RuntimeError(f\"Specified path is not a directory: {pretrained_loc}\")\n    else:\n        model_name = pretrained_cfg.get('architecture', 'this model')\n        raise RuntimeError(f\"No pretrained weights exist for {model_name}. Use `pretrained=False` for random init.\")\n\n    if filter_fn is not None:\n        try:\n            state_dict = filter_fn(state_dict, model)\n        except TypeError as e:\n            # for backwards compat with filter fn that take one arg\n            state_dict = filter_fn(state_dict)\n\n    input_convs = pretrained_cfg.get('first_conv', None)\n    if input_convs is not None and in_chans != 3:\n        if isinstance(input_convs, str):\n            input_convs = (input_convs,)\n        for input_conv_name in input_convs:\n            weight_name = input_conv_name + '.weight'\n            try:\n                state_dict[weight_name] = adapt_input_conv(in_chans, state_dict[weight_name])\n                _logger.info(\n                    f'Converted input conv {input_conv_name} pretrained weights from 3 to {in_chans} channel(s)')\n            except NotImplementedError as e:\n                del state_dict[weight_name]\n                strict = False\n                _logger.warning(\n                    f'Unable to convert pretrained {input_conv_name} weights, using random init for this layer.')\n\n    classifiers = pretrained_cfg.get('classifier', None)\n    label_offset = pretrained_cfg.get('label_offset', 0)\n    if classifiers is not None:\n        if isinstance(classifiers, str):\n            classifiers = (classifiers,)\n        if num_classes != pretrained_cfg['num_classes']:\n            for classifier_name in classifiers:\n                # completely discard fully connected if model num_classes doesn't match pretrained weights\n                state_dict.pop(classifier_name + '.weight', None)\n                state_dict.pop(classifier_name + '.bias', None)\n            strict = False\n        elif label_offset > 0:\n            for classifier_name in classifiers:\n                # special case for pretrained weights with an extra background class in pretrained weights\n                classifier_weight = state_dict[classifier_name + '.weight']\n                state_dict[classifier_name + '.weight'] = classifier_weight[label_offset:]\n                classifier_bias = state_dict[classifier_name + '.bias']\n                state_dict[classifier_name + '.bias'] = classifier_bias[label_offset:]\n\n    load_result = model.load_state_dict(state_dict, strict=strict)\n    if load_result.missing_keys:\n        _logger.info(\n            f'Missing keys ({\", \".join(load_result.missing_keys)}) discovered while loading pretrained weights.'\n            f' This is expected if model is being adapted.')\n    if load_result.unexpected_keys:\n        _logger.warning(\n            f'Unexpected keys ({\", \".join(load_result.unexpected_keys)}) found while loading pretrained weights.'\n            f' This may be expected if model is being adapted.')\n\n\ndef pretrained_cfg_for_features(pretrained_cfg: Dict[str, Any]) -> Dict[str, Any]:\n    pretrained_cfg = deepcopy(pretrained_cfg)\n    # remove default pretrained cfg fields that don't have much relevance for feature backbone\n    to_remove = ('num_classes', 'classifier', 'global_pool')  # add default final pool size?\n    for tr in to_remove:\n        pretrained_cfg.pop(tr, None)\n    return pretrained_cfg\n\n\ndef _filter_kwargs(kwargs: Dict[str, Any], names: List[str]) -> None:\n    if not kwargs or not names:\n        return\n    for n in names:\n        kwargs.pop(n, None)\n\n\ndef _update_default_model_kwargs(pretrained_cfg, kwargs, kwargs_filter) -> None:\n    \"\"\" Update the default_cfg and kwargs before passing to model\n\n    Args:\n        pretrained_cfg: input pretrained cfg (updated in-place)\n        kwargs: keyword args passed to model build fn (updated in-place)\n        kwargs_filter: keyword arg keys that must be removed before model __init__\n    \"\"\"\n    # Set model __init__ args that can be determined by default_cfg (if not already passed as kwargs)\n    default_kwarg_names = ('num_classes', 'global_pool', 'in_chans')\n    if pretrained_cfg.get('fixed_input_size', False):\n        # if fixed_input_size exists and is True, model takes an img_size arg that fixes its input size\n        default_kwarg_names += ('img_size',)\n\n    for n in default_kwarg_names:\n        # for legacy reasons, model __init__args uses img_size + in_chans as separate args while\n        # pretrained_cfg has one input_size=(C, H ,W) entry\n        if n == 'img_size':\n            input_size = pretrained_cfg.get('input_size', None)\n            if input_size is not None:\n                assert len(input_size) == 3\n                kwargs.setdefault(n, input_size[-2:])\n        elif n == 'in_chans':\n            input_size = pretrained_cfg.get('input_size', None)\n            if input_size is not None:\n                assert len(input_size) == 3\n                kwargs.setdefault(n, input_size[0])\n        elif n == 'num_classes':\n            default_val = pretrained_cfg.get(n, None)\n            # if default is < 0, don't pass through to model\n            if default_val is not None and default_val >= 0:\n                kwargs.setdefault(n, pretrained_cfg[n])\n        else:\n            default_val = pretrained_cfg.get(n, None)\n            if default_val is not None:\n                kwargs.setdefault(n, pretrained_cfg[n])\n\n    # Filter keyword args for task specific model variants (some 'features only' models, etc.)\n    _filter_kwargs(kwargs, names=kwargs_filter)\n\n\ndef resolve_pretrained_cfg(\n        variant: str,\n        pretrained_cfg: Optional[Union[str, Dict[str, Any]]] = None,\n        pretrained_cfg_overlay: Optional[Dict[str, Any]] = None,\n) -> PretrainedCfg:\n    \"\"\"Resolve pretrained configuration from various sources.\"\"\"\n    model_with_tag = variant\n    pretrained_tag = None\n    if pretrained_cfg:\n        if isinstance(pretrained_cfg, dict):\n            # pretrained_cfg dict passed as arg, validate by converting to PretrainedCfg\n            pretrained_cfg = PretrainedCfg(**pretrained_cfg)\n        elif isinstance(pretrained_cfg, str):\n            pretrained_tag = pretrained_cfg\n            pretrained_cfg = None\n\n    # fallback to looking up pretrained cfg in model registry by variant identifier\n    if not pretrained_cfg:\n        if pretrained_tag:\n            model_with_tag = '.'.join([variant, pretrained_tag])\n        pretrained_cfg = get_pretrained_cfg(model_with_tag)\n\n    if not pretrained_cfg:\n        _logger.warning(\n            f\"No pretrained configuration specified for {model_with_tag} model. Using a default.\"\n            f\" Please add a config to the model pretrained_cfg registry or pass explicitly.\")\n        pretrained_cfg = PretrainedCfg()  # instance with defaults\n\n    pretrained_cfg_overlay = pretrained_cfg_overlay or {}\n    if not pretrained_cfg.architecture:\n        pretrained_cfg_overlay.setdefault('architecture', variant)\n    pretrained_cfg = dataclasses.replace(pretrained_cfg, **pretrained_cfg_overlay)\n\n    return pretrained_cfg\n\n\ndef build_model_with_cfg(\n        model_cls: Union[Type[ModelT], Callable[..., ModelT]],\n        variant: str,\n        pretrained: bool,\n        pretrained_cfg: Optional[Dict] = None,\n        pretrained_cfg_overlay: Optional[Dict] = None,\n        model_cfg: Optional[Any] = None,\n        feature_cfg: Optional[Dict] = None,\n        pretrained_strict: bool = True,\n        pretrained_filter_fn: Optional[Callable] = None,\n        cache_dir: Optional[Union[str, Path]] = None,\n        kwargs_filter: Optional[Tuple[str]] = None,\n        **kwargs,\n) -> ModelT:\n    \"\"\" Build model with specified default_cfg and optional model_cfg\n\n    This helper fn aids in the construction of a model including:\n      * handling default_cfg and associated pretrained weight loading\n      * passing through optional model_cfg for models with config based arch spec\n      * features_only model adaptation\n      * pruning config / model adaptation\n\n    Args:\n        model_cls: Model class\n        variant: Model variant name\n        pretrained: Load the pretrained weights\n        pretrained_cfg: Model's pretrained weight/task config\n        pretrained_cfg_overlay: Entries that will override those in pretrained_cfg\n        model_cfg: Model's architecture config\n        feature_cfg: Feature extraction adapter config\n        pretrained_strict: Load pretrained weights strictly\n        pretrained_filter_fn: Filter callable for pretrained weights\n        cache_dir: Override model cache dir for Hugging Face Hub and Torch checkpoints\n        kwargs_filter: Kwargs keys to filter (remove) before passing to model\n        **kwargs: Model args passed through to model __init__\n    \"\"\"\n    pruned = kwargs.pop('pruned', False)\n    features = False\n    feature_cfg = feature_cfg or {}\n\n    # resolve and update model pretrained config and model kwargs\n    pretrained_cfg = resolve_pretrained_cfg(\n        variant,\n        pretrained_cfg=pretrained_cfg,\n        pretrained_cfg_overlay=pretrained_cfg_overlay\n    )\n    pretrained_cfg = pretrained_cfg.to_dict()\n\n    _update_default_model_kwargs(pretrained_cfg, kwargs, kwargs_filter)\n\n    # Setup for feature extraction wrapper done at end of this fn\n    if kwargs.pop('features_only', False):\n        features = True\n        feature_cfg.setdefault('out_indices', (0, 1, 2, 3, 4))\n        if 'out_indices' in kwargs:\n            feature_cfg['out_indices'] = kwargs.pop('out_indices')\n        if 'feature_cls' in kwargs:\n            feature_cfg['feature_cls'] = kwargs.pop('feature_cls')\n\n    # Instantiate the model\n    if model_cfg is None:\n        model = model_cls(**kwargs)\n    else:\n        model = model_cls(cfg=model_cfg, **kwargs)\n    model.pretrained_cfg = pretrained_cfg\n    model.default_cfg = model.pretrained_cfg  # alias for backwards compat\n\n    if pruned:\n        model = adapt_model_from_file(model, variant)\n\n    # For classification models, check class attr, then kwargs, then default to 1k, otherwise 0 for feats\n    num_classes_pretrained = 0 if features else getattr(model, 'num_classes', kwargs.get('num_classes', 1000))\n    if pretrained:\n        load_pretrained(\n            model,\n            pretrained_cfg=pretrained_cfg,\n            num_classes=num_classes_pretrained,\n            in_chans=kwargs.get('in_chans', 3),\n            filter_fn=pretrained_filter_fn,\n            strict=pretrained_strict,\n            cache_dir=cache_dir,\n        )\n\n    # Wrap the model in a feature extraction module if enabled\n    if features:\n        use_getter = False\n        if 'feature_cls' in feature_cfg:\n            feature_cls = feature_cfg.pop('feature_cls')\n            if isinstance(feature_cls, str):\n                feature_cls = feature_cls.lower()\n\n                # flatten_sequential only valid for some feature extractors\n                if feature_cls not in ('dict', 'list', 'hook'):\n                    feature_cfg.pop('flatten_sequential', None)\n\n                if 'hook' in feature_cls:\n                    feature_cls = FeatureHookNet\n                elif feature_cls == 'list':\n                    feature_cls = FeatureListNet\n                elif feature_cls == 'dict':\n                    feature_cls = FeatureDictNet\n                elif feature_cls == 'fx':\n                    feature_cls = FeatureGraphNet\n                elif feature_cls == 'getter':\n                    use_getter = True\n                    feature_cls = FeatureGetterNet\n                else:\n                    assert False, f'Unknown feature class {feature_cls}'\n        else:\n            feature_cls = FeatureListNet\n\n        output_fmt = getattr(model, 'output_fmt', None)\n        if output_fmt is not None and not use_getter:  # don't set default for intermediate feat getter\n            feature_cfg.setdefault('output_fmt', output_fmt)\n\n        model = feature_cls(model, **feature_cfg)\n        model.pretrained_cfg = pretrained_cfg_for_features(pretrained_cfg)  # add back pretrained cfg\n        model.default_cfg = model.pretrained_cfg  # alias for rename backwards compat (default_cfg -> pretrained_cfg)\n\n    return model\n"
  },
  {
    "path": "timm/models/_efficientnet_blocks.py",
    "content": "\"\"\" EfficientNet, MobileNetV3, etc Blocks\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nfrom typing import Callable, Dict, Optional, Type, Union\n\nimport torch\nimport torch.nn as nn\nfrom torch.nn import functional as F\n\nfrom timm.layers import (\n    create_conv2d,\n    DropPath,\n    make_divisible,\n    create_act_layer,\n    create_aa,\n    to_2tuple,\n    LayerType,\n    ConvNormAct,\n    get_norm_act_layer,\n    MultiQueryAttention2d,\n    Attention2d,\n    LayerScale2d,\n)\n\n__all__ = [\n    'SqueezeExcite', 'ConvBnAct', 'DepthwiseSeparableConv', 'InvertedResidual', 'CondConvResidual', 'EdgeResidual',\n    'UniversalInvertedResidual', 'MobileAttention'\n]\n\nModuleType = Type[nn.Module]\n\n\ndef num_groups(group_size: Optional[int], channels: int):\n    if not group_size:  # 0 or None\n        return 1  # normal conv with 1 group\n    else:\n        # NOTE group_size == 1 -> depthwise conv\n        assert channels % group_size == 0\n        return channels // group_size\n\n\nclass SqueezeExcite(nn.Module):\n    \"\"\" Squeeze-and-Excitation w/ specific features for EfficientNet/MobileNet family\n\n    Args:\n        in_chs (int): input channels to layer\n        rd_ratio (float): ratio of squeeze reduction\n        act_layer (nn.Module): activation layer of containing block\n        gate_layer (Callable): attention gate function\n        force_act_layer (nn.Module): override block's activation fn if this is set/bound\n        rd_round_fn (Callable): specify a fn to calculate rounding of reduced chs\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            rd_ratio: float = 0.25,\n            rd_channels: Optional[int] = None,\n            act_layer: LayerType = nn.ReLU,\n            gate_layer: LayerType = nn.Sigmoid,\n            force_act_layer: Optional[LayerType] = None,\n            rd_round_fn: Optional[Callable] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if rd_channels is None:\n            rd_round_fn = rd_round_fn or round\n            rd_channels = rd_round_fn(in_chs * rd_ratio)\n        act_layer = force_act_layer or act_layer\n        self.conv_reduce = nn.Conv2d(in_chs, rd_channels, 1, bias=True, **dd)\n        self.act1 = create_act_layer(act_layer, inplace=True)\n        self.conv_expand = nn.Conv2d(rd_channels, in_chs, 1, bias=True, **dd)\n        self.gate = create_act_layer(gate_layer)\n\n    def forward(self, x):\n        x_se = x.mean((2, 3), keepdim=True)\n        x_se = self.conv_reduce(x_se)\n        x_se = self.act1(x_se)\n        x_se = self.conv_expand(x_se)\n        return x * self.gate(x_se)\n\n\nclass ConvBnAct(nn.Module):\n    \"\"\" Conv + Norm Layer + Activation w/ optional skip connection\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 0,\n            pad_type: Union[int, str] = '',\n            skip: bool = False,\n            act_layer: Optional[LayerType] = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        groups = num_groups(group_size, in_chs)\n        self.has_skip = skip and stride == 1 and in_chs == out_chs\n        use_aa = aa_layer is not None and stride > 1  # FIXME handle dilation\n\n        self.conv = create_conv2d(\n            in_chs,\n            out_chs,\n            kernel_size,\n            stride=1 if use_aa else stride,\n            dilation=dilation,\n            groups=groups,\n            padding=pad_type,\n            **dd,\n        )\n        self.bn1 = norm_act_layer(out_chs, inplace=True, **dd)\n        self.aa = create_aa(aa_layer, channels=out_chs, stride=stride, enable=use_aa, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # output of conv after act, same as block coutput\n            return dict(module='bn1', hook_type='forward', num_chs=self.conv.out_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.conv.out_channels)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv(x)\n        x = self.bn1(x)\n        x = self.aa(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n\n\nclass DepthwiseSeparableConv(nn.Module):\n    \"\"\" Depthwise-separable block\n    Used for DS convs in MobileNet-V1 and in the place of IR blocks that have no expansion\n    (factor of 1.0). This is an alternative to having a IR with an optional first pw conv.\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 1,\n            pad_type: str = '',\n            noskip: bool = False,\n            pw_kernel_size: int = 1,\n            pw_act: bool = False,\n            s2d: int = 0,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[ModuleType] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        self.has_skip = (stride == 1 and in_chs == out_chs) and not noskip\n        self.has_pw_act = pw_act  # activation after point-wise conv\n        use_aa = aa_layer is not None and stride > 1  # FIXME handle dilation\n\n        # Space to depth\n        if s2d == 1:\n            sd_chs = int(in_chs * 4)\n            self.conv_s2d = create_conv2d(in_chs, sd_chs, kernel_size=2, stride=2, padding='same', **dd)\n            self.bn_s2d = norm_act_layer(sd_chs, **dd)\n            dw_kernel_size = (dw_kernel_size + 1) // 2\n            dw_pad_type = 'same' if dw_kernel_size == 2 else pad_type\n            in_chs = sd_chs\n            use_aa = False  # disable AA\n        else:\n            self.conv_s2d = None\n            self.bn_s2d = None\n            dw_pad_type = pad_type\n\n        groups = num_groups(group_size, in_chs)\n\n        self.conv_dw = create_conv2d(\n            in_chs,\n            in_chs,\n            dw_kernel_size,\n            stride=1 if use_aa else stride,\n            dilation=dilation,\n            padding=dw_pad_type,\n            groups=groups,\n            **dd,\n        )\n        self.bn1 = norm_act_layer(in_chs, inplace=True, **dd)\n        self.aa = create_aa(aa_layer, channels=out_chs, stride=stride, enable=use_aa, **dd)\n\n        # Squeeze-and-excitation\n        self.se = se_layer(in_chs, act_layer=act_layer, **dd) if se_layer else nn.Identity()\n\n        self.conv_pw = create_conv2d(in_chs, out_chs, pw_kernel_size, padding=pad_type, **dd)\n        self.bn2 = norm_act_layer(out_chs, inplace=True, apply_act=self.has_pw_act, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # after SE, input to PW\n            return dict(module='conv_pw', hook_type='forward_pre', num_chs=self.conv_pw.in_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.conv_pw.out_channels)\n\n    def forward(self, x):\n        shortcut = x\n        if self.conv_s2d is not None:\n            x = self.conv_s2d(x)\n            x = self.bn_s2d(x)\n        x = self.conv_dw(x)\n        x = self.bn1(x)\n        x = self.aa(x)\n        x = self.se(x)\n        x = self.conv_pw(x)\n        x = self.bn2(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n\n\nclass InvertedResidual(nn.Module):\n    \"\"\" Inverted residual block w/ optional SE\n\n    Originally used in MobileNet-V2 - https://arxiv.org/abs/1801.04381v4, this layer is often\n    referred to as 'MBConv' for (Mobile inverted bottleneck conv) and is also used in\n      * MNasNet - https://arxiv.org/abs/1807.11626\n      * EfficientNet - https://arxiv.org/abs/1905.11946\n      * MobileNet-V3 - https://arxiv.org/abs/1905.02244\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 1,\n            pad_type: str = '',\n            noskip: bool = False,\n            exp_ratio: float = 1.0,\n            exp_kernel_size: int = 1,\n            pw_kernel_size: int = 1,\n            s2d: int = 0,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[ModuleType] = None,\n            conv_kwargs: Optional[Dict] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        conv_kwargs = conv_kwargs or {}\n        self.has_skip = (in_chs == out_chs and stride == 1) and not noskip\n        use_aa = aa_layer is not None and stride > 1  # FIXME handle dilation\n\n        # Space to depth\n        if s2d == 1:\n            sd_chs = int(in_chs * 4)\n            self.conv_s2d = create_conv2d(in_chs, sd_chs, kernel_size=2, stride=2, padding='same', **dd)\n            self.bn_s2d = norm_act_layer(sd_chs, **dd)\n            dw_kernel_size = (dw_kernel_size + 1) // 2\n            dw_pad_type = 'same' if dw_kernel_size == 2 else pad_type\n            in_chs = sd_chs\n            use_aa = False  # disable AA\n        else:\n            self.conv_s2d = None\n            self.bn_s2d = None\n            dw_pad_type = pad_type\n\n        mid_chs = make_divisible(in_chs * exp_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        # Point-wise expansion\n        self.conv_pw = create_conv2d(in_chs, mid_chs, exp_kernel_size, padding=pad_type, **conv_kwargs, **dd)\n        self.bn1 = norm_act_layer(mid_chs, inplace=True, **dd)\n\n        # Depth-wise convolution\n        self.conv_dw = create_conv2d(\n            mid_chs,\n            mid_chs,\n            dw_kernel_size,\n            stride=1 if use_aa else stride,\n            dilation=dilation,\n            groups=groups,\n            padding=dw_pad_type,\n            **conv_kwargs,\n            **dd,\n        )\n        self.bn2 = norm_act_layer(mid_chs, inplace=True, **dd)\n        self.aa = create_aa(aa_layer, channels=mid_chs, stride=stride, enable=use_aa, **dd)\n\n        # Squeeze-and-excitation\n        self.se = se_layer(mid_chs, act_layer=act_layer, **dd) if se_layer else nn.Identity()\n\n        # Point-wise linear projection\n        self.conv_pwl = create_conv2d(mid_chs, out_chs, pw_kernel_size, padding=pad_type, **conv_kwargs, **dd)\n        self.bn3 = norm_act_layer(out_chs, apply_act=False, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # after SE, input to PWL\n            return dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.conv_pwl.out_channels)\n\n    def forward(self, x):\n        shortcut = x\n        if self.conv_s2d is not None:\n            x = self.conv_s2d(x)\n            x = self.bn_s2d(x)\n        x = self.conv_pw(x)\n        x = self.bn1(x)\n        x = self.conv_dw(x)\n        x = self.bn2(x)\n        x = self.aa(x)\n        x = self.se(x)\n        x = self.conv_pwl(x)\n        x = self.bn3(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n\n\nclass UniversalInvertedResidual(nn.Module):\n    \"\"\" Universal Inverted Residual Block (aka Universal Inverted Bottleneck, UIB)\n\n    For MobileNetV4 - https://arxiv.org/abs/, referenced from\n    https://github.com/tensorflow/models/blob/d93c7e932de27522b2fa3b115f58d06d6f640537/official/vision/modeling/layers/nn_blocks.py#L778\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dw_kernel_size_start: int = 0,\n            dw_kernel_size_mid: int = 3,\n            dw_kernel_size_end: int = 0,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 1,\n            pad_type: str = '',\n            noskip: bool = False,\n            exp_ratio: float = 1.0,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[ModuleType] = None,\n            conv_kwargs: Optional[Dict] = None,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_kwargs = conv_kwargs or {}\n        self.has_skip = (in_chs == out_chs and stride == 1) and not noskip\n        if stride > 1:\n            assert dw_kernel_size_start or dw_kernel_size_mid or dw_kernel_size_end\n\n        # FIXME dilation isn't right w/ extra ks > 1 convs\n        if dw_kernel_size_start:\n            dw_start_stride = stride if not dw_kernel_size_mid else 1\n            dw_start_groups = num_groups(group_size, in_chs)\n            self.dw_start = ConvNormAct(\n                in_chs, in_chs, dw_kernel_size_start,\n                stride=dw_start_stride,\n                dilation=dilation,  # FIXME\n                groups=dw_start_groups,\n                padding=pad_type,\n                apply_act=False,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                aa_layer=aa_layer,\n                **conv_kwargs,\n                **dd,\n            )\n        else:\n            self.dw_start = nn.Identity()\n\n        # Point-wise expansion\n        mid_chs = make_divisible(in_chs * exp_ratio)\n        self.pw_exp = ConvNormAct(\n            in_chs, mid_chs, 1,\n            padding=pad_type,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **conv_kwargs,\n            **dd,\n        )\n\n        # Middle depth-wise convolution\n        if dw_kernel_size_mid:\n            groups = num_groups(group_size, mid_chs)\n            self.dw_mid = ConvNormAct(\n                mid_chs, mid_chs, dw_kernel_size_mid,\n                stride=stride,\n                dilation=dilation,  # FIXME\n                groups=groups,\n                padding=pad_type,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                aa_layer=aa_layer,\n                **conv_kwargs,\n                **dd,\n            )\n        else:\n            # keeping mid as identity so it can be hooked more easily for features\n            self.dw_mid = nn.Identity()\n\n        # Squeeze-and-excitation\n        self.se = se_layer(mid_chs, act_layer=act_layer, **dd) if se_layer else nn.Identity()\n\n        # Point-wise linear projection\n        self.pw_proj = ConvNormAct(\n            mid_chs, out_chs, 1,\n            padding=pad_type,\n            apply_act=False,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **conv_kwargs,\n            **dd,\n        )\n\n        if dw_kernel_size_end:\n            dw_end_stride = stride if not dw_kernel_size_start and not dw_kernel_size_mid else 1\n            dw_end_groups = num_groups(group_size, out_chs)\n            if dw_end_stride > 1:\n                assert not aa_layer\n            self.dw_end = ConvNormAct(\n                out_chs, out_chs, dw_kernel_size_end,\n                stride=dw_end_stride,\n                dilation=dilation,\n                groups=dw_end_groups,\n                padding=pad_type,\n                apply_act=False,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                **conv_kwargs,\n                **dd,\n            )\n        else:\n            self.dw_end = nn.Identity()\n\n        if layer_scale_init_value is not None:\n            self.layer_scale = LayerScale2d(out_chs, layer_scale_init_value, **dd)\n        else:\n            self.layer_scale = nn.Identity()\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # after SE, input to PWL\n            return dict(module='pw_proj.conv', hook_type='forward_pre', num_chs=self.pw_proj.conv.in_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.pw_proj.conv.out_channels)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.dw_start(x)\n        x = self.pw_exp(x)\n        x = self.dw_mid(x)\n        x = self.se(x)\n        x = self.pw_proj(x)\n        x = self.dw_end(x)\n        x = self.layer_scale(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n\n\nclass MobileAttention(nn.Module):\n    \"\"\" Mobile Attention Block\n\n    For MobileNetV4 - https://arxiv.org/abs/, referenced from\n    https://github.com/tensorflow/models/blob/d93c7e932de27522b2fa3b115f58d06d6f640537/official/vision/modeling/layers/nn_blocks.py#L1504\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dw_kernel_size: int = 3,\n            dilation: int = 1,\n            group_size: int = 1,\n            pad_type: str = '',\n            num_heads: int = 8,\n            key_dim: int = 64,\n            value_dim: int = 64,\n            use_multi_query: bool = False,\n            query_strides: int = (1, 1),\n            kv_stride: int = 1,\n            cpe_dw_kernel_size: int = 3,\n            noskip: bool = False,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            drop_path_rate: float = 0.,\n            attn_drop: float = 0.0,\n            proj_drop: float = 0.0,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            use_bias: bool = False,\n            use_cpe: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        self.has_skip = (stride == 1 and in_chs == out_chs) and not noskip\n        self.query_strides = to_2tuple(query_strides)\n        self.kv_stride = kv_stride\n        self.has_query_stride = any([s > 1 for s in self.query_strides])\n\n        # This CPE is different than the one suggested in the original paper.\n        # https://arxiv.org/abs/2102.10882\n        # 1. Rather than adding one CPE before the attention blocks, we add a CPE\n        #    into every attention block.\n        # 2. We replace the expensive Conv2D by a Separable DW Conv.\n        if use_cpe:\n            self.conv_cpe_dw = create_conv2d(\n                in_chs, in_chs,\n                kernel_size=cpe_dw_kernel_size,\n                dilation=dilation,\n                depthwise=True,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.conv_cpe_dw = None\n\n        self.norm = norm_act_layer(in_chs, apply_act=False, **dd)\n\n        if num_heads is None:\n            assert in_chs % key_dim == 0\n            num_heads = in_chs // key_dim\n\n        if use_multi_query:\n            self.attn = MultiQueryAttention2d(\n                in_chs,\n                dim_out=out_chs,\n                num_heads=num_heads,\n                key_dim=key_dim,\n                value_dim=value_dim,\n                query_strides=query_strides,\n                kv_stride=kv_stride,\n                dw_kernel_size=dw_kernel_size,\n                dilation=dilation,\n                padding=pad_type,\n                attn_drop=attn_drop,\n                proj_drop=proj_drop,\n                norm_layer=norm_layer,\n                # use_bias=use_bias, # why not here if used w/ mhsa?\n                **dd,\n            )\n        else:\n            self.attn = Attention2d(\n                in_chs,\n                dim_out=out_chs,\n                num_heads=num_heads,\n                attn_drop=attn_drop,\n                proj_drop=proj_drop,\n                bias=use_bias,\n                **dd,\n            )\n\n        if layer_scale_init_value is not None:\n            self.layer_scale = LayerScale2d(out_chs, layer_scale_init_value, **dd)\n        else:\n            self.layer_scale = nn.Identity()\n\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # after SE, input to PW\n            return dict(module='conv_pw', hook_type='forward_pre', num_chs=self.conv_pw.in_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.conv_pw.out_channels)\n\n    def forward(self, x):\n        if self.conv_cpe_dw is not None:\n            x_cpe = self.conv_cpe_dw(x)\n            x = x + x_cpe\n\n        shortcut = x\n        x = self.norm(x)\n        x = self.attn(x)\n        x = self.layer_scale(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n\n        return x\n\n\nclass CondConvResidual(InvertedResidual):\n    \"\"\" Inverted residual block w/ CondConv routing\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 1,\n            pad_type: str = '',\n            noskip: bool = False,\n            exp_ratio: float = 1.0,\n            exp_kernel_size: int = 1,\n            pw_kernel_size: int = 1,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[ModuleType] = None,\n            num_experts: int = 0,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_experts = num_experts\n        conv_kwargs = dict(num_experts=self.num_experts)\n        super().__init__(\n            in_chs,\n            out_chs,\n            dw_kernel_size=dw_kernel_size,\n            stride=stride,\n            dilation=dilation,\n            group_size=group_size,\n            pad_type=pad_type,\n            noskip=noskip,\n            exp_ratio=exp_ratio,\n            exp_kernel_size=exp_kernel_size,\n            pw_kernel_size=pw_kernel_size,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            conv_kwargs=conv_kwargs,\n            drop_path_rate=drop_path_rate,\n            **dd,\n        )\n        self.routing_fn = nn.Linear(in_chs, self.num_experts, **dd)\n\n    def forward(self, x):\n        shortcut = x\n        pooled_inputs = F.adaptive_avg_pool2d(x, 1).flatten(1)  # CondConv routing\n        routing_weights = torch.sigmoid(self.routing_fn(pooled_inputs))\n        x = self.conv_pw(x, routing_weights)\n        x = self.bn1(x)\n        x = self.conv_dw(x, routing_weights)\n        x = self.bn2(x)\n        x = self.se(x)\n        x = self.conv_pwl(x, routing_weights)\n        x = self.bn3(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n\n\nclass EdgeResidual(nn.Module):\n    \"\"\" Residual block with expansion convolution followed by pointwise-linear w/ stride\n\n    Originally introduced in `EfficientNet-EdgeTPU: Creating Accelerator-Optimized Neural Networks with AutoML`\n        - https://ai.googleblog.com/2019/08/efficientnet-edgetpu-creating.html\n\n    This layer is also called FusedMBConv in the MobileDet, EfficientNet-X, and EfficientNet-V2 papers\n      * MobileDet - https://arxiv.org/abs/2004.14525\n      * EfficientNet-X - https://arxiv.org/abs/2102.05610\n      * EfficientNet-V2 - https://arxiv.org/abs/2104.00298\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            exp_kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 0,\n            pad_type: str = '',\n            force_in_chs: int = 0,\n            noskip: bool = False,\n            exp_ratio: float = 1.0,\n            pw_kernel_size:  int = 1,\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[ModuleType] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        if force_in_chs > 0:\n            mid_chs = make_divisible(force_in_chs * exp_ratio)\n        else:\n            mid_chs = make_divisible(in_chs * exp_ratio)\n        groups = num_groups(group_size, mid_chs)  # NOTE: Using out_chs of conv_exp for groups calc\n        self.has_skip = (in_chs == out_chs and stride == 1) and not noskip\n        use_aa = aa_layer is not None and stride > 1  # FIXME handle dilation\n\n        # Expansion convolution\n        self.conv_exp = create_conv2d(\n            in_chs,\n            mid_chs,\n            exp_kernel_size,\n            stride=1 if use_aa else stride,\n            dilation=dilation,\n            groups=groups,\n            padding=pad_type,\n            **dd,\n        )\n        self.bn1 = norm_act_layer(mid_chs, inplace=True, **dd)\n\n        self.aa = create_aa(aa_layer, channels=mid_chs, stride=stride, enable=use_aa, **dd)\n\n        # Squeeze-and-excitation\n        self.se = se_layer(mid_chs, act_layer=act_layer, **dd) if se_layer else nn.Identity()\n\n        # Point-wise linear projection\n        self.conv_pwl = create_conv2d(mid_chs, out_chs, pw_kernel_size, padding=pad_type, **dd)\n        self.bn2 = norm_act_layer(out_chs, apply_act=False, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity()\n\n    def feature_info(self, location):\n        if location == 'expansion':  # after SE, before PWL\n            return dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels)\n        else:  # location == 'bottleneck', block output\n            return dict(module='', num_chs=self.conv_pwl.out_channels)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv_exp(x)\n        x = self.bn1(x)\n        x = self.aa(x)\n        x = self.se(x)\n        x = self.conv_pwl(x)\n        x = self.bn2(x)\n        if self.has_skip:\n            x = self.drop_path(x) + shortcut\n        return x\n"
  },
  {
    "path": "timm/models/_efficientnet_builder.py",
    "content": "\"\"\" EfficientNet, MobileNetV3, etc Builder\n\nAssembles EfficieNet and related network feature blocks from string definitions.\nHandles stride, dilation calculations, and selects feature extraction points.\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nfrom typing import Callable, Optional\n\nimport logging\nimport math\nimport re\nfrom copy import deepcopy\nfrom functools import partial\nfrom typing import Any, Dict, List\n\nimport torch.nn as nn\n\nfrom timm.layers import CondConv2d, get_condconv_initializer, get_act_layer, get_attn, make_divisible, LayerType\nfrom ._efficientnet_blocks import *\nfrom ._manipulate import named_modules\n\n__all__ = [\"EfficientNetBuilder\", \"BlockArgs\", \"decode_arch_def\", \"efficientnet_init_weights\",\n           'resolve_bn_args', 'resolve_act_layer', 'round_channels', 'BN_MOMENTUM_TF_DEFAULT', 'BN_EPS_TF_DEFAULT']\n\n_logger = logging.getLogger(__name__)\n\n\n_DEBUG_BUILDER = False\n\n# Defaults used for Google/Tensorflow training of mobile networks /w RMSprop as per\n# papers and TF reference implementations. PT momentum equiv for TF decay is (1 - TF decay)\n# NOTE: momentum varies btw .99 and .9997 depending on source\n# .99 in official TF TPU impl\n# .9997 (/w .999 in search space) for paper\nBN_MOMENTUM_TF_DEFAULT = 1 - 0.99\nBN_EPS_TF_DEFAULT = 1e-3\n_BN_ARGS_TF = dict(momentum=BN_MOMENTUM_TF_DEFAULT, eps=BN_EPS_TF_DEFAULT)\n\nBlockArgs = List[List[Dict[str, Any]]]\n\n\ndef get_bn_args_tf():\n    return _BN_ARGS_TF.copy()\n\n\ndef resolve_bn_args(kwargs):\n    bn_args = {}\n    bn_momentum = kwargs.pop('bn_momentum', None)\n    if bn_momentum is not None:\n        bn_args['momentum'] = bn_momentum\n    bn_eps = kwargs.pop('bn_eps', None)\n    if bn_eps is not None:\n        bn_args['eps'] = bn_eps\n    return bn_args\n\n\ndef resolve_act_layer(kwargs, default='relu'):\n    return get_act_layer(kwargs.pop('act_layer', default))\n\n\ndef round_channels(channels, multiplier=1.0, divisor=8, channel_min=None, round_limit=0.9):\n    \"\"\"Round number of filters based on depth multiplier.\"\"\"\n    if not multiplier:\n        return channels\n    return make_divisible(channels * multiplier, divisor, channel_min, round_limit=round_limit)\n\n\ndef _log_info_if(msg, condition):\n    if condition:\n        _logger.info(msg)\n\n\ndef _parse_ksize(ss):\n    if ss.isdigit():\n        return int(ss)\n    else:\n        return [int(k) for k in ss.split('.')]\n\n\ndef _decode_block_str(block_str):\n    \"\"\" Decode block definition string\n\n    Gets a list of block arg (dicts) through a string notation of arguments.\n    E.g. ir_r2_k3_s2_e1_i32_o16_se0.25_noskip\n\n    All args can exist in any order with the exception of the leading string which\n    is assumed to indicate the block type.\n\n    leading string - block type (\n      ir = InvertedResidual, ds = DepthwiseSep, dsa = DeptwhiseSep with pw act, cn = ConvBnAct)\n    r - number of repeat blocks,\n    k - kernel size,\n    s - strides (1-9),\n    e - expansion ratio,\n    c - output channels,\n    se - squeeze/excitation ratio\n    n - activation fn ('re', 'r6', 'hs', or 'sw')\n    Args:\n        block_str: a string representation of block arguments.\n    Returns:\n        A list of block args (dicts)\n    Raises:\n        ValueError: if the string def not properly specified (TODO)\n    \"\"\"\n    assert isinstance(block_str, str)\n    ops = block_str.split('_')\n    block_type = ops[0]  # take the block type off the front\n    ops = ops[1:]\n    options = {}\n    skip = None\n    for op in ops:\n        # string options being checked on individual basis, combine if they grow\n        if op == 'noskip':\n            skip = False  # force no skip connection\n        elif op == 'skip':\n            skip = True  # force a skip connection\n        elif op.startswith('n'):\n            # activation fn\n            key = op[0]\n            v = op[1:]\n            if v == 're':\n                value = get_act_layer('relu')\n            elif v == 'r6':\n                value = get_act_layer('relu6')\n            elif v == 'hs':\n                value = get_act_layer('hard_swish')\n            elif v == 'sw':\n                value = get_act_layer('swish')  # aka SiLU\n            elif v == 'mi':\n                value = get_act_layer('mish')\n            else:\n                continue\n            options[key] = value\n        else:\n            # all numeric options\n            splits = re.split(r'(\\d.*)', op)\n            if len(splits) >= 2:\n                key, value = splits[:2]\n                options[key] = value\n\n    # if act_layer is None, the model default (passed to model init) will be used\n    act_layer = options['n'] if 'n' in options else None\n    start_kernel_size = _parse_ksize(options['a']) if 'a' in options else 1\n    end_kernel_size = _parse_ksize(options['p']) if 'p' in options else 1\n    force_in_chs = int(options['fc']) if 'fc' in options else 0  # FIXME hack to deal with in_chs issue in TPU def\n    num_repeat = int(options['r'])\n\n    # each type of block has different valid arguments, fill accordingly\n    block_args = dict(\n        block_type=block_type,\n        out_chs=int(options['c']),\n        stride=int(options['s']),\n        act_layer=act_layer,\n    )\n    if block_type == 'ir':\n        block_args.update(dict(\n            dw_kernel_size=_parse_ksize(options['k']),\n            exp_kernel_size=start_kernel_size,\n            pw_kernel_size=end_kernel_size,\n            exp_ratio=float(options['e']),\n            se_ratio=float(options.get('se', 0.)),\n            noskip=skip is False,\n            s2d=int(options.get('d', 0)) > 0,\n        ))\n        if 'cc' in options:\n            block_args['num_experts'] = int(options['cc'])\n    elif block_type == 'ds' or block_type == 'dsa':\n        block_args.update(dict(\n            dw_kernel_size=_parse_ksize(options['k']),\n            pw_kernel_size=end_kernel_size,\n            se_ratio=float(options.get('se', 0.)),\n            pw_act=block_type == 'dsa',\n            noskip=block_type == 'dsa' or skip is False,\n            s2d=int(options.get('d', 0)) > 0,\n        ))\n    elif block_type == 'er':\n        block_args.update(dict(\n            exp_kernel_size=_parse_ksize(options['k']),\n            pw_kernel_size=end_kernel_size,\n            exp_ratio=float(options['e']),\n            force_in_chs=force_in_chs,\n            se_ratio=float(options.get('se', 0.)),\n            noskip=skip is False,\n        ))\n    elif block_type == 'cn':\n        block_args.update(dict(\n            kernel_size=int(options['k']),\n            skip=skip is True,\n        ))\n    elif block_type == 'uir':\n        # override exp / proj kernels for start/end in uir block\n        start_kernel_size = _parse_ksize(options['a']) if 'a' in options else 0\n        end_kernel_size = _parse_ksize(options['p']) if 'p' in options else 0\n        block_args.update(dict(\n            dw_kernel_size_start=start_kernel_size,  # overload exp ks arg for dw start\n            dw_kernel_size_mid=_parse_ksize(options['k']),\n            dw_kernel_size_end=end_kernel_size,  # overload pw ks arg for dw end\n            exp_ratio=float(options['e']),\n            se_ratio=float(options.get('se', 0.)),\n            noskip=skip is False,\n        ))\n    elif block_type == 'mha':\n        kv_dim = int(options['d'])\n        block_args.update(dict(\n            dw_kernel_size=_parse_ksize(options['k']),\n            num_heads=int(options['h']),\n            key_dim=kv_dim,\n            value_dim=kv_dim,\n            kv_stride=int(options.get('v', 1)),\n            noskip=skip is False,\n        ))\n    elif block_type == 'mqa':\n        kv_dim = int(options['d'])\n        block_args.update(dict(\n            dw_kernel_size=_parse_ksize(options['k']),\n            num_heads=int(options['h']),\n            key_dim=kv_dim,\n            value_dim=kv_dim,\n            kv_stride=int(options.get('v', 1)),\n            noskip=skip is False,\n        ))\n    else:\n        assert False, 'Unknown block type (%s)' % block_type\n\n    if 'gs' in options:\n        block_args['group_size'] = int(options['gs'])\n\n    return block_args, num_repeat\n\n\ndef _scale_stage_depth(stack_args, repeats, depth_multiplier=1.0, depth_trunc='ceil'):\n    \"\"\" Per-stage depth scaling\n    Scales the block repeats in each stage. This depth scaling impl maintains\n    compatibility with the EfficientNet scaling method, while allowing sensible\n    scaling for other models that may have multiple block arg definitions in each stage.\n    \"\"\"\n\n    # We scale the total repeat count for each stage, there may be multiple\n    # block arg defs per stage so we need to sum.\n    num_repeat = sum(repeats)\n    if depth_trunc == 'round':\n        # Truncating to int by rounding allows stages with few repeats to remain\n        # proportionally smaller for longer. This is a good choice when stage definitions\n        # include single repeat stages that we'd prefer to keep that way as long as possible\n        num_repeat_scaled = max(1, round(num_repeat * depth_multiplier))\n    else:\n        # The default for EfficientNet truncates repeats to int via 'ceil'.\n        # Any multiplier > 1.0 will result in an increased depth for every stage.\n        num_repeat_scaled = int(math.ceil(num_repeat * depth_multiplier))\n\n    # Proportionally distribute repeat count scaling to each block definition in the stage.\n    # Allocation is done in reverse as it results in the first block being less likely to be scaled.\n    # The first block makes less sense to repeat in most of the arch definitions.\n    repeats_scaled = []\n    for r in repeats[::-1]:\n        rs = max(1, round((r / num_repeat * num_repeat_scaled)))\n        repeats_scaled.append(rs)\n        num_repeat -= r\n        num_repeat_scaled -= rs\n    repeats_scaled = repeats_scaled[::-1]\n\n    # Apply the calculated scaling to each block arg in the stage\n    sa_scaled = []\n    for ba, rep in zip(stack_args, repeats_scaled):\n        sa_scaled.extend([deepcopy(ba) for _ in range(rep)])\n    return sa_scaled\n\n\ndef decode_arch_def(\n        arch_def,\n        depth_multiplier=1.0,\n        depth_trunc='ceil',\n        experts_multiplier=1,\n        fix_first_last=False,\n        group_size=None,\n):\n    \"\"\" Decode block architecture definition strings -> block kwargs\n\n    Args:\n        arch_def: architecture definition strings, list of list of strings\n        depth_multiplier: network depth multiplier\n        depth_trunc: networ depth truncation mode when applying multiplier\n        experts_multiplier: CondConv experts multiplier\n        fix_first_last: fix first and last block depths when multiplier is applied\n        group_size: group size override for all blocks that weren't explicitly set in arch string\n\n    Returns:\n        list of list of block kwargs\n    \"\"\"\n    arch_args = []\n    if isinstance(depth_multiplier, tuple):\n        assert len(depth_multiplier) == len(arch_def)\n    else:\n        depth_multiplier = (depth_multiplier,) * len(arch_def)\n    for stack_idx, (block_strings, multiplier) in enumerate(zip(arch_def, depth_multiplier)):\n        assert isinstance(block_strings, list)\n        stack_args = []\n        repeats = []\n        for block_str in block_strings:\n            assert isinstance(block_str, str)\n            ba, rep = _decode_block_str(block_str)\n            if ba.get('num_experts', 0) > 0 and experts_multiplier > 1:\n                ba['num_experts'] *= experts_multiplier\n            if group_size is not None:\n                ba.setdefault('group_size', group_size)\n            stack_args.append(ba)\n            repeats.append(rep)\n        if fix_first_last and (stack_idx == 0 or stack_idx == len(arch_def) - 1):\n            arch_args.append(_scale_stage_depth(stack_args, repeats, 1.0, depth_trunc))\n        else:\n            arch_args.append(_scale_stage_depth(stack_args, repeats, multiplier, depth_trunc))\n    return arch_args\n\n\nclass EfficientNetBuilder:\n    \"\"\" Build Trunk Blocks\n\n    This ended up being somewhat of a cross between\n    https://github.com/tensorflow/tpu/blob/master/models/official/mnasnet/mnasnet_models.py\n    and\n    https://github.com/facebookresearch/maskrcnn-benchmark/blob/master/maskrcnn_benchmark/modeling/backbone/fbnet_builder.py\n\n    \"\"\"\n    def __init__(\n            self,\n            output_stride: int = 32,\n            pad_type: str = '',\n            round_chs_fn: Callable = round_channels,\n            se_from_exp: bool = False,\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = None,\n            feature_location: str = '',\n            device=None,\n            dtype=None,\n    ):\n        self.output_stride = output_stride\n        self.pad_type = pad_type\n        self.round_chs_fn = round_chs_fn\n        self.se_from_exp = se_from_exp  # calculate se channel reduction from expanded (mid) chs\n        self.act_layer = act_layer\n        self.norm_layer = norm_layer\n        self.aa_layer = aa_layer\n        self.se_layer = get_attn(se_layer)\n        try:\n            self.se_layer(8, rd_ratio=1.0)  # test if attn layer accepts rd_ratio arg\n            self.se_has_ratio = True\n        except TypeError:\n            self.se_has_ratio = False\n        self.drop_path_rate = drop_path_rate\n        self.layer_scale_init_value = layer_scale_init_value\n        if feature_location == 'depthwise':\n            # old 'depthwise' mode renamed 'expansion' to match TF impl, old expansion mode didn't make sense\n            _logger.warning(\"feature_location=='depthwise' is deprecated, using 'expansion'\")\n            feature_location = 'expansion'\n        self.feature_location = feature_location\n        assert feature_location in ('bottleneck', 'expansion', '')\n        self.dd = {'device': device, 'dtype': dtype}  # device/dtype factory kwargs\n        self.verbose = _DEBUG_BUILDER\n\n        # state updated during build, consumed by model\n        self.in_chs = None\n        self.features = []\n\n    def _make_block(self, ba, block_idx, block_count):\n        drop_path_rate = self.drop_path_rate * block_idx / block_count\n        bt = ba.pop('block_type')\n        ba['in_chs'] = self.in_chs\n        ba['out_chs'] = self.round_chs_fn(ba['out_chs'])\n        s2d = ba.get('s2d', 0)\n        if s2d > 0:\n            # adjust while space2depth active\n            ba['out_chs'] *= 4\n        if 'force_in_chs' in ba and ba['force_in_chs']:\n            # NOTE this is a hack to work around mismatch in TF EdgeEffNet impl\n            ba['force_in_chs'] = self.round_chs_fn(ba['force_in_chs'])\n        ba['pad_type'] = self.pad_type\n        # block act fn overrides the model default\n        ba['act_layer'] = ba['act_layer'] if ba['act_layer'] is not None else self.act_layer\n        assert ba['act_layer'] is not None\n        ba['norm_layer'] = self.norm_layer\n        ba['drop_path_rate'] = drop_path_rate\n\n        if self.aa_layer is not None:\n            ba['aa_layer'] = self.aa_layer\n\n        se_ratio = ba.pop('se_ratio', None)\n        if se_ratio and self.se_layer is not None:\n            if not self.se_from_exp:\n                # adjust se_ratio by expansion ratio if calculating se channels from block input\n                se_ratio /= ba.get('exp_ratio', 1.0)\n            if s2d == 1:\n                # adjust for start of space2depth\n                se_ratio /= 4\n            if self.se_has_ratio:\n                ba['se_layer'] = partial(self.se_layer, rd_ratio=se_ratio)\n            else:\n                ba['se_layer'] = self.se_layer\n\n        ba.update(self.dd)  # device/type factory kwargs\n\n        if bt == 'ir':\n            _log_info_if('  InvertedResidual {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = CondConvResidual(**ba) if ba.get('num_experts', 0) else InvertedResidual(**ba)\n        elif bt == 'ds' or bt == 'dsa':\n            _log_info_if('  DepthwiseSeparable {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = DepthwiseSeparableConv(**ba)\n        elif bt == 'er':\n            _log_info_if('  EdgeResidual {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = EdgeResidual(**ba)\n        elif bt == 'cn':\n            _log_info_if('  ConvBnAct {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = ConvBnAct(**ba)\n        elif bt == 'uir':\n            _log_info_if('  UniversalInvertedResidual {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = UniversalInvertedResidual(**ba, layer_scale_init_value=self.layer_scale_init_value)\n        elif bt == 'mqa':\n            _log_info_if('  MobileMultiQueryAttention {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = MobileAttention(**ba, use_multi_query=True, layer_scale_init_value=self.layer_scale_init_value)\n        elif bt == 'mha':\n            _log_info_if('  MobileMultiHeadAttention {}, Args: {}'.format(block_idx, str(ba)), self.verbose)\n            block = MobileAttention(**ba, layer_scale_init_value=self.layer_scale_init_value)\n        else:\n            assert False, 'Unknown block type (%s) while building model.' % bt\n\n        self.in_chs = ba['out_chs']  # update in_chs for arg of next block\n        return block\n\n    def __call__(self, in_chs, model_block_args):\n        \"\"\" Build the blocks\n        Args:\n            in_chs: Number of input-channels passed to first block\n            model_block_args: A list of lists, outer list defines stages, inner\n                list contains strings defining block configuration(s)\n        Return:\n             List of block stacks (each stack wrapped in nn.Sequential)\n        \"\"\"\n        _log_info_if('Building model trunk with %d stages...' % len(model_block_args), self.verbose)\n        self.in_chs = in_chs\n        total_block_count = sum([len(x) for x in model_block_args])\n        total_block_idx = 0\n        current_stride = 2\n        current_dilation = 1\n        stages = []\n        if model_block_args[0][0]['stride'] > 1:\n            # if the first block starts with a stride, we need to extract first level feat from stem\n            feature_info = dict(module='bn1', num_chs=in_chs, stage=0, reduction=current_stride)\n            self.features.append(feature_info)\n\n        # outer list of block_args defines the stacks\n        space2depth = 0\n        for stack_idx, stack_args in enumerate(model_block_args):\n            last_stack = stack_idx + 1 == len(model_block_args)\n            _log_info_if('Stack: {}'.format(stack_idx), self.verbose)\n            assert isinstance(stack_args, list)\n\n            blocks = []\n            # each stack (stage of blocks) contains a list of block arguments\n            for block_idx, block_args in enumerate(stack_args):\n                last_block = block_idx + 1 == len(stack_args)\n                _log_info_if(' Block: {}'.format(block_idx), self.verbose)\n\n                assert block_args['stride'] in (1, 2)\n                if block_idx >= 1:   # only the first block in any stack can have a stride > 1\n                    block_args['stride'] = 1\n\n                if not space2depth and block_args.pop('s2d', False):\n                    assert block_args['stride'] == 1\n                    space2depth = 1\n\n                if space2depth > 0:\n                    # FIXME s2d is a WIP\n                    if space2depth == 2 and block_args['stride'] == 2:\n                        block_args['stride'] = 1\n                        # to end s2d region, need to correct expansion and se ratio relative to input\n                        block_args['exp_ratio'] /= 4\n                        space2depth = 0\n                    else:\n                        block_args['s2d'] = space2depth\n\n                extract_features = False\n                if last_block:\n                    next_stack_idx = stack_idx + 1\n                    extract_features = next_stack_idx >= len(model_block_args) or \\\n                        model_block_args[next_stack_idx][0]['stride'] > 1\n\n                next_dilation = current_dilation\n                if block_args['stride'] > 1:\n                    next_output_stride = current_stride * block_args['stride']\n                    if next_output_stride > self.output_stride:\n                        next_dilation = current_dilation * block_args['stride']\n                        block_args['stride'] = 1\n                        _log_info_if('  Converting stride to dilation to maintain output_stride=={}'.format(\n                            self.output_stride), self.verbose)\n                    else:\n                        current_stride = next_output_stride\n                block_args['dilation'] = current_dilation\n                if next_dilation != current_dilation:\n                    current_dilation = next_dilation\n\n                # create the block\n                block = self._make_block(block_args, total_block_idx, total_block_count)\n                blocks.append(block)\n\n                if space2depth == 1:\n                    space2depth = 2\n\n                # stash feature module name and channel info for model feature extraction\n                if extract_features:\n                    feature_info = dict(\n                        stage=stack_idx + 1,\n                        reduction=current_stride,\n                        **block.feature_info(self.feature_location),\n                    )\n                    leaf_name = feature_info.get('module', '')\n                    if leaf_name:\n                        feature_info['module'] = '.'.join([f'blocks.{stack_idx}.{block_idx}', leaf_name])\n                    else:\n                        assert last_block\n                        feature_info['module'] = f'blocks.{stack_idx}'\n                    self.features.append(feature_info)\n\n                total_block_idx += 1  # incr global block idx (across all stacks)\n            stages.append(nn.Sequential(*blocks))\n        return stages\n\n\ndef _init_weight_goog(m, n='', fix_group_fanout=True):\n    \"\"\" Weight initialization as per Tensorflow official implementations.\n\n    Args:\n        m (nn.Module): module to init\n        n (str): module name\n        fix_group_fanout (bool): enable correct (matching Tensorflow TPU impl) fanout calculation w/ group convs\n\n    Handles layers in EfficientNet, EfficientNet-CondConv, MixNet, MnasNet, MobileNetV3, etc:\n    * https://github.com/tensorflow/tpu/blob/master/models/official/mnasnet/mnasnet_model.py\n    * https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py\n    \"\"\"\n    if isinstance(m, CondConv2d):\n        fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n        if fix_group_fanout:\n            fan_out //= m.groups\n        init_weight_fn = get_condconv_initializer(\n            lambda w: nn.init.normal_(w, 0, math.sqrt(2.0 / fan_out)), m.num_experts, m.weight_shape)\n        init_weight_fn(m.weight)\n        if m.bias is not None:\n            nn.init.zeros_(m.bias)\n    elif isinstance(m, nn.Conv2d):\n        fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n        if fix_group_fanout:\n            fan_out //= m.groups\n        nn.init.normal_(m.weight, 0, math.sqrt(2.0 / fan_out))\n        if m.bias is not None:\n            nn.init.zeros_(m.bias)\n    elif isinstance(m, nn.BatchNorm2d):\n        nn.init.ones_(m.weight)\n        nn.init.zeros_(m.bias)\n    elif isinstance(m, nn.Linear):\n        fan_out = m.weight.size(0)  # fan-out\n        fan_in = 0\n        if 'routing_fn' in n:\n            fan_in = m.weight.size(1)\n        init_range = 1.0 / math.sqrt(fan_in + fan_out)\n        nn.init.uniform_(m.weight, -init_range, init_range)\n        nn.init.zeros_(m.bias)\n\n\ndef efficientnet_init_weights(model: nn.Module, init_fn=None):\n    init_fn = init_fn or _init_weight_goog\n    for n, m in model.named_modules():\n        init_fn(m, n)\n\n    # iterate and call any module.init_weights() fn, children first\n    for n, m in named_modules(model):\n        if hasattr(m, 'init_weights'):\n            m.init_weights()\n"
  },
  {
    "path": "timm/models/_factory.py",
    "content": "import os\nfrom pathlib import Path\nfrom typing import Any, Dict, Optional, Tuple, Union\nfrom urllib.parse import urlsplit\n\nfrom torch import nn\n\nfrom timm.layers import set_layer_config\nfrom ._helpers import load_checkpoint\nfrom ._hub import load_model_config_from_hf, load_model_config_from_path\nfrom ._pretrained import PretrainedCfg\nfrom ._registry import is_model, model_entrypoint, split_model_name_tag\n\n\n__all__ = ['parse_model_name', 'safe_model_name', 'create_model']\n\n\ndef parse_model_name(model_name: str) -> Tuple[Optional[str], str]:\n    \"\"\"Parse source and name from potentially prefixed model name.\"\"\"\n    if model_name.startswith('hf_hub'):\n        # NOTE for backwards compat, deprecate hf_hub use\n        model_name = model_name.replace('hf_hub', 'hf-hub')\n    parsed = urlsplit(model_name)\n    assert parsed.scheme in ('', 'hf-hub', 'local-dir')\n    if parsed.scheme == 'hf-hub':\n        # FIXME may use fragment as revision, currently `@` in URI path\n        return parsed.scheme, parsed.path\n    elif parsed.scheme == 'local-dir':\n        return parsed.scheme, parsed.path\n    else:\n        model_name = os.path.split(parsed.path)[-1]\n        return None, model_name\n\n\ndef safe_model_name(model_name: str, remove_source: bool = True) -> str:\n    \"\"\"Return a filename / path safe model name.\"\"\"\n    def make_safe(name: str) -> str:\n        return ''.join(c if c.isalnum() else '_' for c in name).rstrip('_')\n    if remove_source:\n        model_name = parse_model_name(model_name)[-1]\n    return make_safe(model_name)\n\n\ndef create_model(\n        model_name: str,\n        pretrained: bool = False,\n        pretrained_cfg: Optional[Union[str, Dict[str, Any], PretrainedCfg]] = None,\n        pretrained_cfg_overlay: Optional[Dict[str, Any]] = None,\n        checkpoint_path: Optional[Union[str, Path]] = None,\n        cache_dir: Optional[Union[str, Path]] = None,\n        scriptable: Optional[bool] = None,\n        exportable: Optional[bool] = None,\n        no_jit: Optional[bool] = None,\n        **kwargs: Any,\n) -> nn.Module:\n    \"\"\"Create a model.\n\n    Lookup model's entrypoint function and pass relevant args to create a new model.\n\n    Tip:\n        **kwargs will be passed through entrypoint fn to ``timm.models.build_model_with_cfg()``\n        and then the model class __init__(). kwargs values set to None are pruned before passing.\n\n    Args:\n        model_name: Name of model to instantiate.\n        pretrained: If set to `True`, load pretrained ImageNet-1k weights.\n        pretrained_cfg: Pass in an external pretrained_cfg for model.\n        pretrained_cfg_overlay: Replace key-values in base pretrained_cfg with these.\n        checkpoint_path: Path of checkpoint to load _after_ the model is initialized.\n        cache_dir: Override model cache dir for Hugging Face Hub and Torch checkpoints.\n        scriptable: Set layer config so that model is jit scriptable (not working for all models yet).\n        exportable: Set layer config so that model is traceable / ONNX exportable (not fully impl/obeyed yet).\n        no_jit: Set layer config so that model doesn't utilize jit scripted layers (so far activations only).\n\n    Keyword Args:\n        drop_rate (float): Classifier dropout rate for training.\n        drop_path_rate (float): Stochastic depth drop rate for training.\n        global_pool (str): Classifier global pooling type.\n\n    Example:\n\n    ```py\n    >>> from timm import create_model\n\n    >>> # Create a MobileNetV3-Large model with no pretrained weights.\n    >>> model = create_model('mobilenetv3_large_100')\n\n    >>> # Create a MobileNetV3-Large model with pretrained weights.\n    >>> model = create_model('mobilenetv3_large_100', pretrained=True)\n    >>> model.num_classes\n    1000\n\n    >>> # Create a MobileNetV3-Large model with pretrained weights and a new head with 10 classes.\n    >>> model = create_model('mobilenetv3_large_100', pretrained=True, num_classes=10)\n    >>> model.num_classes\n    10\n\n    >>> # Create a Dinov2 small model with pretrained weights and save weights in a custom directory.\n    >>> model = create_model('vit_small_patch14_dinov2.lvd142m', pretrained=True, cache_dir=\"/data/my-models\")\n    >>> # Data will be stored at `/data/my-models/models--timm--vit_small_patch14_dinov2.lvd142m/`\n    ```\n    \"\"\"\n    # Parameters that aren't supported by all models or are intended to only override model defaults if set\n    # should default to None in command line args/cfg. Remove them if they are present and not set so that\n    # non-supporting models don't break and default args remain in effect.\n    kwargs = {k: v for k, v in kwargs.items() if v is not None}\n\n    model_source, model_id = parse_model_name(model_name)\n    if model_source:\n        assert not pretrained_cfg, 'pretrained_cfg should not be set when sourcing model from Hugging Face Hub.'\n        if model_source == 'hf-hub':\n            # For model names specified in the form `hf-hub:path/architecture_name@revision`,\n            # load model weights + pretrained_cfg from Hugging Face hub.\n            pretrained_cfg, model_name, model_args = load_model_config_from_hf(\n                model_id,\n                cache_dir=cache_dir,\n            )\n        elif model_source == 'local-dir':\n            pretrained_cfg, model_name, model_args = load_model_config_from_path(\n                model_id,\n            )\n        else:\n            assert False, f'Unknown model_source {model_source}'\n        if model_args:\n            for k, v in model_args.items():\n                kwargs.setdefault(k, v)\n    else:\n        model_name, pretrained_tag = split_model_name_tag(model_id)\n        if pretrained_tag and not pretrained_cfg:\n            # a valid pretrained_cfg argument takes priority over tag in model name\n            pretrained_cfg = pretrained_tag\n\n    if not is_model(model_name):\n        raise RuntimeError('Unknown model (%s)' % model_name)\n\n    create_fn = model_entrypoint(model_name)\n    with set_layer_config(scriptable=scriptable, exportable=exportable, no_jit=no_jit):\n        model = create_fn(\n            pretrained=pretrained,\n            pretrained_cfg=pretrained_cfg,\n            pretrained_cfg_overlay=pretrained_cfg_overlay,\n            cache_dir=cache_dir,\n            **kwargs,\n        )\n\n    if checkpoint_path:\n        load_checkpoint(model, checkpoint_path)\n\n    return model\n"
  },
  {
    "path": "timm/models/_features.py",
    "content": "\"\"\" PyTorch Feature Extraction Helpers\n\nA collection of classes, functions, modules to help extract features from models\nand provide a common interface for describing them.\n\nThe return_layers, module re-writing idea inspired by torchvision IntermediateLayerGetter\nhttps://github.com/pytorch/vision/blob/d88d8961ae51507d0cb680329d985b1488b1b76b/torchvision/models/_utils.py\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom collections import OrderedDict, defaultdict\nfrom copy import deepcopy\nfrom functools import partial\nfrom typing import Dict, List, Optional, Sequence, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.layers import Format, _assert\nfrom ._manipulate import checkpoint\n\n__all__ = [\n    'FeatureInfo', 'FeatureHooks', 'FeatureDictNet', 'FeatureListNet', 'FeatureHookNet', 'FeatureGetterNet',\n    'feature_take_indices'\n]\n\n\ndef feature_take_indices(\n        num_features: int,\n        indices: Optional[Union[int, List[int]]] = None,\n        as_set: bool = False,\n) -> Tuple[List[int], int]:\n    \"\"\" Determine the absolute feature indices to 'take' from.\n\n    Note: This function can be called in forward() so must be torchscript compatible,\n    which requires some incomplete typing and workaround hacks.\n\n    Args:\n        num_features: total number of features to select from\n        indices: indices to select,\n          None -> select all\n          int -> select last n\n          list/tuple of int -> return specified (-ve indices specify from end)\n        as_set: return as a set\n\n    Returns:\n        List (or set) of absolute (from beginning) indices, Maximum index\n    \"\"\"\n    if indices is None:\n        indices = num_features  # all features if None\n\n    if isinstance(indices, int):\n        # convert int -> last n indices\n        _assert(0 < indices <= num_features, f'last-n ({indices}) is out of range (1 to {num_features})')\n        take_indices = [num_features - indices + i for i in range(indices)]\n    else:\n        take_indices: List[int] = []\n        for i in indices:\n            idx = num_features + i if i < 0 else i\n            _assert(0 <= idx < num_features, f'feature index {idx} is out of range (0 to {num_features - 1})')\n            take_indices.append(idx)\n\n    if not torch.jit.is_scripting() and as_set:\n        return set(take_indices), max(take_indices)\n\n    return take_indices, max(take_indices)\n\n\ndef _out_indices_as_tuple(x: Union[int, Tuple[int, ...]]) -> Tuple[int, ...]:\n    if isinstance(x, int):\n        # if indices is an int, take last N features\n        return tuple(range(-x, 0))\n    return tuple(x)\n\n\nOutIndicesT = Union[int, Tuple[int, ...]]\n\n\nclass FeatureInfo:\n\n    def __init__(\n            self,\n            feature_info: List[Dict],\n            out_indices: OutIndicesT,\n    ):\n        out_indices = _out_indices_as_tuple(out_indices)\n        prev_reduction = 1\n        for i, fi in enumerate(feature_info):\n            # sanity check the mandatory fields, there may be additional fields depending on the model\n            assert 'num_chs' in fi and fi['num_chs'] > 0\n            assert 'reduction' in fi and fi['reduction'] >= prev_reduction\n            prev_reduction = fi['reduction']\n            assert 'module' in fi\n            fi.setdefault('index', i)\n        self.out_indices = out_indices\n        self.info = feature_info\n\n    def from_other(self, out_indices: OutIndicesT):\n        out_indices = _out_indices_as_tuple(out_indices)\n        return FeatureInfo(deepcopy(self.info), out_indices)\n\n    def get(self, key: str, idx: Optional[Union[int, List[int]]] = None):\n        \"\"\" Get value by key at specified index (indices)\n        if idx == None, returns value for key at each output index\n        if idx is an integer, return value for that feature module index (ignoring output indices)\n        if idx is a list/tuple, return value for each module index (ignoring output indices)\n        \"\"\"\n        if idx is None:\n            return [self.info[i][key] for i in self.out_indices]\n        if isinstance(idx, (tuple, list)):\n            return [self.info[i][key] for i in idx]\n        else:\n            return self.info[idx][key]\n\n    def get_dicts(self, keys: Optional[List[str]] = None, idx: Optional[Union[int, List[int]]] = None):\n        \"\"\" return info dicts for specified keys (or all if None) at specified indices (or out_indices if None)\n        \"\"\"\n        if idx is None:\n            if keys is None:\n                return [self.info[i] for i in self.out_indices]\n            else:\n                return [{k: self.info[i][k] for k in keys} for i in self.out_indices]\n        if isinstance(idx, (tuple, list)):\n            return [self.info[i] if keys is None else {k: self.info[i][k] for k in keys} for i in idx]\n        else:\n            return self.info[idx] if keys is None else {k: self.info[idx][k] for k in keys}\n\n    def channels(self, idx: Optional[Union[int, List[int]]] = None):\n        \"\"\" feature channels accessor\n        \"\"\"\n        return self.get('num_chs', idx)\n\n    def reduction(self, idx: Optional[Union[int, List[int]]] = None):\n        \"\"\" feature reduction (output stride) accessor\n        \"\"\"\n        return self.get('reduction', idx)\n\n    def module_name(self, idx: Optional[Union[int, List[int]]] = None):\n        \"\"\" feature module name accessor\n        \"\"\"\n        return self.get('module', idx)\n\n    def __getitem__(self, item):\n        return self.info[item]\n\n    def __len__(self):\n        return len(self.info)\n\n\nclass FeatureHooks:\n    \"\"\" Feature Hook Helper\n\n    This module helps with the setup and extraction of hooks for extracting features from\n    internal nodes in a model by node name.\n\n    FIXME This works well in eager Python but needs redesign for torchscript.\n    \"\"\"\n\n    def __init__(\n            self,\n            hooks: Sequence[Union[str, Dict]],\n            named_modules: dict,\n            out_map: Sequence[Union[int, str]] = None,\n            default_hook_type: str = 'forward',\n    ):\n        # setup feature hooks\n        self._feature_outputs = defaultdict(OrderedDict)\n        self._handles = []\n        modules = {k: v for k, v in named_modules}\n        for i, h in enumerate(hooks):\n            hook_name = h if isinstance(h, str) else h['module']\n            m = modules[hook_name]\n            hook_id = out_map[i] if out_map else hook_name\n            hook_fn = partial(self._collect_output_hook, hook_id)\n            hook_type = default_hook_type\n            if isinstance(h, dict):\n                hook_type = h.get('hook_type', default_hook_type)\n            if hook_type == 'forward_pre':\n                handle = m.register_forward_pre_hook(hook_fn)\n            elif hook_type == 'forward':\n                handle = m.register_forward_hook(hook_fn)\n            else:\n                assert False, \"Unsupported hook type\"\n            self._handles.append(handle)\n\n    def _collect_output_hook(self, hook_id, *args):\n        x = args[-1]  # tensor we want is last argument, output for fwd, input for fwd_pre\n        if isinstance(x, tuple):\n            x = x[0]  # unwrap input tuple\n        self._feature_outputs[x.device][hook_id] = x\n\n    def get_output(self, device) -> Dict[str, torch.tensor]:\n        output = self._feature_outputs[device]\n        self._feature_outputs[device] = OrderedDict()  # clear after reading\n        return output\n\n\ndef _module_list(module, flatten_sequential=False):\n    # a yield/iter would be better for this but wouldn't be compatible with torchscript\n    ml = []\n    for name, module in module.named_children():\n        if flatten_sequential and isinstance(module, nn.Sequential):\n            # first level of Sequential containers is flattened into containing model\n            for child_name, child_module in module.named_children():\n                combined = [name, child_name]\n                ml.append(('_'.join(combined), '.'.join(combined), child_module))\n        else:\n            ml.append((name, name, module))\n    return ml\n\n\ndef _get_feature_info(net, out_indices: OutIndicesT):\n    feature_info = getattr(net, 'feature_info')\n    if isinstance(feature_info, FeatureInfo):\n        return feature_info.from_other(out_indices)\n    elif isinstance(feature_info, (list, tuple)):\n        return FeatureInfo(net.feature_info, out_indices)\n    else:\n        assert False, \"Provided feature_info is not valid\"\n\n\ndef _get_return_layers(feature_info, out_map):\n    module_names = feature_info.module_name()\n    return_layers = {}\n    for i, name in enumerate(module_names):\n        return_layers[name] = out_map[i] if out_map is not None else feature_info.out_indices[i]\n    return return_layers\n\n\nclass FeatureDictNet(nn.ModuleDict):\n    \"\"\" Feature extractor with OrderedDict return\n\n    Wrap a model and extract features as specified by the out indices, the network is\n    partially re-built from contained modules.\n\n    There is a strong assumption that the modules have been registered into the model in the same\n    order as they are used. There should be no reuse of the same nn.Module more than once, including\n    trivial modules like `self.relu = nn.ReLU`.\n\n    Only submodules that are directly assigned to the model class (`model.feature1`) or at most\n    one Sequential container deep (`model.features.1`, with flatten_sequent=True) can be captured.\n    All Sequential containers that are directly assigned to the original model will have their\n    modules assigned to this module with the name `model.features.1` being changed to `model.features_1`\n    \"\"\"\n    def __init__(\n            self,\n            model: nn.Module,\n            out_indices: OutIndicesT = (0, 1, 2, 3, 4),\n            out_map: Sequence[Union[int, str]] = None,\n            output_fmt: str = 'NCHW',\n            feature_concat: bool = False,\n            flatten_sequential: bool = False,\n    ):\n        \"\"\"\n        Args:\n            model: Model from which to extract features.\n            out_indices: Output indices of the model features to extract.\n            out_map: Return id mapping for each output index, otherwise str(index) is used.\n            feature_concat: Concatenate intermediate features that are lists or tuples instead of selecting\n                first element e.g. `x[0]`\n            flatten_sequential: Flatten first two-levels of sequential modules in model (re-writes model modules)\n        \"\"\"\n        super().__init__()\n        self.feature_info = _get_feature_info(model, out_indices)\n        self.output_fmt = Format(output_fmt)\n        self.concat = feature_concat\n        self.grad_checkpointing = False\n        self.return_layers = {}\n\n        return_layers = _get_return_layers(self.feature_info, out_map)\n        modules = _module_list(model, flatten_sequential=flatten_sequential)\n        remaining = set(return_layers.keys())\n        layers = OrderedDict()\n        for new_name, old_name, module in modules:\n            layers[new_name] = module\n            if old_name in remaining:\n                # return id has to be consistently str type for torchscript\n                self.return_layers[new_name] = str(return_layers[old_name])\n                remaining.remove(old_name)\n            if not remaining:\n                break\n        assert not remaining and len(self.return_layers) == len(return_layers), \\\n            f'Return layers ({remaining}) are not present in model'\n        self.update(layers)\n\n    def set_grad_checkpointing(self, enable: bool = True):\n        self.grad_checkpointing = enable\n\n    def _collect(self, x) -> (Dict[str, torch.Tensor]):\n        out = OrderedDict()\n        for i, (name, module) in enumerate(self.items()):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                # Skipping checkpoint of first module because need a gradient at input\n                # Skipping last because networks with in-place ops might fail w/ checkpointing enabled\n                # NOTE: first_or_last module could be static, but recalc in is_scripting guard to avoid jit issues\n                first_or_last_module = i == 0 or i == max(len(self) - 1, 0)\n                x = module(x) if first_or_last_module else checkpoint(module, x)\n            else:\n                x = module(x)\n\n            if name in self.return_layers:\n                out_id = self.return_layers[name]\n                if isinstance(x, (tuple, list)):\n                    # If model tap is a tuple or list, concat or select first element\n                    # FIXME this may need to be more generic / flexible for some nets\n                    out[out_id] = torch.cat(x, 1) if self.concat else x[0]\n                else:\n                    out[out_id] = x\n        return out\n\n    def forward(self, x) -> Dict[str, torch.Tensor]:\n        return self._collect(x)\n\n\nclass FeatureListNet(FeatureDictNet):\n    \"\"\" Feature extractor with list return\n\n    A specialization of FeatureDictNet that always returns features as a list (values() of dict).\n    \"\"\"\n    def __init__(\n            self,\n            model: nn.Module,\n            out_indices: OutIndicesT = (0, 1, 2, 3, 4),\n            output_fmt: str = 'NCHW',\n            feature_concat: bool = False,\n            flatten_sequential: bool = False,\n    ):\n        \"\"\"\n        Args:\n            model: Model from which to extract features.\n            out_indices: Output indices of the model features to extract.\n            feature_concat: Concatenate intermediate features that are lists or tuples instead of selecting\n                first element e.g. `x[0]`\n            flatten_sequential: Flatten first two-levels of sequential modules in model (re-writes model modules)\n        \"\"\"\n        super().__init__(\n            model,\n            out_indices=out_indices,\n            output_fmt=output_fmt,\n            feature_concat=feature_concat,\n            flatten_sequential=flatten_sequential,\n        )\n\n    def forward(self, x) -> (List[torch.Tensor]):\n        return list(self._collect(x).values())\n\n\nclass FeatureHookNet(nn.ModuleDict):\n    \"\"\" FeatureHookNet\n\n    Wrap a model and extract features specified by the out indices using forward/forward-pre hooks.\n\n    If `no_rewrite` is True, features are extracted via hooks without modifying the underlying\n    network in any way.\n\n    If `no_rewrite` is False, the model will be re-written as in the\n    FeatureList/FeatureDict case by folding first to second (Sequential only) level modules into this one.\n\n    FIXME this does not currently work with Torchscript, see FeatureHooks class\n    \"\"\"\n    def __init__(\n            self,\n            model: nn.Module,\n            out_indices: OutIndicesT = (0, 1, 2, 3, 4),\n            out_map: Optional[Sequence[Union[int, str]]] = None,\n            return_dict: bool = False,\n            output_fmt: str = 'NCHW',\n            no_rewrite: Optional[bool] = None,\n            flatten_sequential: bool = False,\n            default_hook_type: str = 'forward',\n    ):\n        \"\"\"\n\n        Args:\n            model: Model from which to extract features.\n            out_indices: Output indices of the model features to extract.\n            out_map: Return id mapping for each output index, otherwise str(index) is used.\n            return_dict: Output features as a dict.\n            no_rewrite: Enforce that model is not re-written if True, ie no modules are removed / changed.\n                flatten_sequential arg must also be False if this is set True.\n            flatten_sequential: Re-write modules by flattening first two levels of nn.Sequential containers.\n            default_hook_type: The default hook type to use if not specified in model.feature_info.\n        \"\"\"\n        super().__init__()\n        assert not torch.jit.is_scripting()\n        self.feature_info = _get_feature_info(model, out_indices)\n        self.return_dict = return_dict\n        self.output_fmt = Format(output_fmt)\n        self.grad_checkpointing = False\n        if no_rewrite is None:\n            no_rewrite = not flatten_sequential\n        layers = OrderedDict()\n        hooks = []\n        if no_rewrite:\n            assert not flatten_sequential\n            if hasattr(model, 'reset_classifier'):  # make sure classifier is removed?\n                model.reset_classifier(0)\n            layers['body'] = model\n            hooks.extend(self.feature_info.get_dicts())\n        else:\n            modules = _module_list(model, flatten_sequential=flatten_sequential)\n            remaining = {\n                f['module']: f['hook_type'] if 'hook_type' in f else default_hook_type\n                for f in self.feature_info.get_dicts()\n            }\n            for new_name, old_name, module in modules:\n                layers[new_name] = module\n                for fn, fm in module.named_modules(prefix=old_name):\n                    if fn in remaining:\n                        hooks.append(dict(module=fn, hook_type=remaining[fn]))\n                        del remaining[fn]\n                if not remaining:\n                    break\n            assert not remaining, f'Return layers ({remaining}) are not present in model'\n        self.update(layers)\n        self.hooks = FeatureHooks(hooks, model.named_modules(), out_map=out_map)\n\n    def set_grad_checkpointing(self, enable: bool = True):\n        self.grad_checkpointing = enable\n\n    def forward(self, x):\n        for i, (name, module) in enumerate(self.items()):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                # Skipping checkpoint of first module because need a gradient at input\n                # Skipping last because networks with in-place ops might fail w/ checkpointing enabled\n                # NOTE: first_or_last module could be static, but recalc in is_scripting guard to avoid jit issues\n                first_or_last_module = i == 0 or i == max(len(self) - 1, 0)\n                x = module(x) if first_or_last_module else checkpoint(module, x)\n            else:\n                x = module(x)\n        out = self.hooks.get_output(x.device)\n        return out if self.return_dict else list(out.values())\n\n\nclass FeatureGetterNet(nn.ModuleDict):\n    \"\"\" FeatureGetterNet\n\n    Wrap models with a feature getter method, like 'get_intermediate_layers'\n\n    \"\"\"\n    def __init__(\n            self,\n            model: nn.Module,\n            out_indices: OutIndicesT = 4,\n            out_map: Optional[Sequence[Union[int, str]]] = None,\n            return_dict: bool = False,\n            output_fmt: str = 'NCHW',\n            norm: bool = False,\n            prune: bool = True,\n    ):\n        \"\"\"\n\n        Args:\n            model: Model to wrap.\n            out_indices: Indices of features to extract.\n            out_map: Remap feature names for dict output (WIP, not supported).\n            return_dict: Return features as dictionary instead of list (WIP, not supported).\n            norm: Apply final model norm to all output features (if possible).\n        \"\"\"\n        super().__init__()\n        if prune and hasattr(model, 'prune_intermediate_layers'):\n            # replace out_indices after they've been normalized, -ve indices will be invalid after prune\n            out_indices = model.prune_intermediate_layers(\n                out_indices,\n                prune_norm=not norm,\n            )\n        self.feature_info = _get_feature_info(model, out_indices)\n        self.model = model\n        self.out_indices = out_indices\n        self.out_map = out_map\n        self.return_dict = return_dict\n        self.output_fmt = Format(output_fmt)\n        self.norm = norm\n\n    def forward(self, x):\n        features = self.model.forward_intermediates(\n            x,\n            indices=self.out_indices,\n            norm=self.norm,\n            output_fmt=self.output_fmt,\n            intermediates_only=True,\n        )\n        return features\n"
  },
  {
    "path": "timm/models/_features_fx.py",
    "content": "\"\"\" PyTorch FX Based Feature Extraction Helpers\nUsing https://pytorch.org/vision/stable/feature_extraction.html\n\"\"\"\nfrom typing import Callable, Dict, List, Optional, Union, Tuple, Type\n\nimport torch\nfrom torch import nn\n\nfrom timm.layers import (\n    create_feature_extractor,\n    get_graph_node_names,\n    register_notrace_module,\n    register_notrace_function,\n    is_notrace_module,\n    is_notrace_function,\n    get_notrace_functions,\n    get_notrace_modules,\n    Format,\n )\nfrom ._features import _get_feature_info, _get_return_layers\n\n\n\n__all__ = [\n    'register_notrace_module',\n    'is_notrace_module',\n    'get_notrace_modules',\n    'register_notrace_function',\n    'is_notrace_function',\n    'get_notrace_functions',\n    'create_feature_extractor',\n    'get_graph_node_names',\n    'FeatureGraphNet',\n    'GraphExtractNet',\n]\n\n\nclass FeatureGraphNet(nn.Module):\n    \"\"\" A FX Graph based feature extractor that works with the model feature_info metadata\n    \"\"\"\n    return_dict: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            model: nn.Module,\n            out_indices: Tuple[int, ...],\n            out_map: Optional[Dict] = None,\n            output_fmt: str = 'NCHW',\n            return_dict: bool = False,\n    ):\n        super().__init__()\n        self.feature_info = _get_feature_info(model, out_indices)\n        if out_map is not None:\n            assert len(out_map) == len(out_indices)\n        self.output_fmt = Format(output_fmt)\n        return_nodes = _get_return_layers(self.feature_info, out_map)\n        self.graph_module = create_feature_extractor(model, return_nodes)\n        self.return_dict = return_dict\n\n    def forward(self, x):\n        out = self.graph_module(x)\n        if self.return_dict:\n            return out\n        return list(out.values())\n\n\nclass GraphExtractNet(nn.Module):\n    \"\"\" A standalone feature extraction wrapper that maps dict -> list or single tensor\n    NOTE:\n      * one can use feature_extractor directly if dictionary output is desired\n      * unlike FeatureGraphNet, this is intended to be used standalone and not with model feature_info\n      metadata for builtin feature extraction mode\n      * create_feature_extractor can be used directly if dictionary output is acceptable\n\n    Args:\n        model: model to extract features from\n        return_nodes: node names to return features from (dict or list)\n        squeeze_out: if only one output, and output in list format, flatten to single tensor\n        return_dict: return as dictionary from extractor with node names as keys, ignores squeeze_out arg\n    \"\"\"\n    return_dict: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            model: nn.Module,\n            return_nodes: Union[Dict[str, str], List[str]],\n            squeeze_out: bool = True,\n            return_dict: bool = False,\n    ):\n        super().__init__()\n        self.squeeze_out = squeeze_out\n        self.graph_module = create_feature_extractor(model, return_nodes)\n        self.return_dict = return_dict\n\n    def forward(self, x) -> Union[List[torch.Tensor], torch.Tensor]:\n        out = self.graph_module(x)\n        if self.return_dict:\n            return out\n        out = list(out.values())\n        return out[0] if self.squeeze_out and len(out) == 1 else out\n"
  },
  {
    "path": "timm/models/_helpers.py",
    "content": "\"\"\" Model creation / weight loading / state_dict helpers\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport argparse\nimport logging\nimport os\nimport pickle\nfrom typing import Any, Callable, Dict, Optional, Union\n\nimport torch\n\ntry:\n    import safetensors.torch\n\n    _has_safetensors = True\nexcept ImportError:\n    _has_safetensors = False\n\n_logger = logging.getLogger(__name__)\n\n__all__ = [\n    'clean_state_dict',\n    'load_checkpoint',\n    'load_state_dict',\n    'remap_state_dict',\n    'resume_checkpoint',\n]\n\n\ndef _checkpoint_unsafe_globals(checkpoint_path: str) -> str:\n    if not hasattr(torch.serialization, 'get_unsafe_globals_in_checkpoint'):\n        return ''\n    try:\n        unsafe_globals = torch.serialization.get_unsafe_globals_in_checkpoint(str(checkpoint_path))\n    except Exception:\n        unsafe_globals = []\n    return f\" Unsupported globals: {', '.join(unsafe_globals)}.\" if unsafe_globals else ''\n\n\ndef _torch_load(\n        checkpoint_path: str,\n        map_location: Union[str, torch.device] = 'cpu',\n        weights_only: bool = True,\n):\n    use_safe_globals = weights_only and hasattr(torch.serialization, 'safe_globals')\n    try:\n        if use_safe_globals:\n            # Compatibility: timm training checkpoints often include argparse.Namespace in `args`.\n            with torch.serialization.safe_globals([argparse.Namespace]):\n                return torch.load(checkpoint_path, map_location=map_location, weights_only=weights_only)\n        return torch.load(checkpoint_path, map_location=map_location, weights_only=weights_only)\n    except TypeError as e:\n        if not weights_only:\n            return torch.load(checkpoint_path, map_location=map_location)\n        raise RuntimeError(\n            f\"weights_only=True is not supported by this PyTorch build (torch=={torch.__version__}). \"\n            \"No automatic unsafe pickle fallback is performed. \"\n            \"Upgrade PyTorch, or explicitly set weights_only=False only for trusted local checkpoints.\"\n        ) from e\n    except pickle.UnpicklingError as e:\n        if not weights_only:\n            raise\n        raise RuntimeError(\n            \"weights_only=True blocked loading this checkpoint because it requires non-allowlisted pickle globals.\"\n            f\"{_checkpoint_unsafe_globals(checkpoint_path)} \"\n            \"No automatic unsafe pickle fallback is performed. \"\n            \"If this checkpoint is trusted, retry with weights_only=False.\"\n        ) from e\n\n\ndef _remove_prefix(text: str, prefix: str) -> str:\n    # FIXME replace with 3.9 stdlib fn when min at 3.9\n    if text.startswith(prefix):\n        return text[len(prefix):]\n    return text\n\n\ndef clean_state_dict(state_dict: Dict[str, Any]) -> Dict[str, Any]:\n    # 'clean' checkpoint by removing .module prefix from state dict if it exists from parallel training\n    cleaned_state_dict = {}\n    to_remove = (\n        'module.',  # DDP wrapper\n        '_orig_mod.',  # torchcompile dynamo wrapper\n    )\n    for k, v in state_dict.items():\n        for r in to_remove:\n            k = _remove_prefix(k, r)\n        cleaned_state_dict[k] = v\n    return cleaned_state_dict\n\n\ndef load_state_dict(\n        checkpoint_path: str,\n        use_ema: bool = True,\n        device: Union[str, torch.device] = 'cpu',\n        weights_only: bool = True,\n) -> Dict[str, Any]:\n    \"\"\"Load state dictionary from checkpoint file.\n\n    Args:\n        checkpoint_path: Path to checkpoint file.\n        use_ema: Whether to use EMA weights if available.\n        device: Device to load checkpoint to.\n        weights_only: Whether to load only weights (torch.load parameter).\n\n    Returns:\n        State dictionary loaded from checkpoint.\n    \"\"\"\n    if checkpoint_path and os.path.isfile(checkpoint_path):\n        # Check if safetensors or not and load weights accordingly\n        if str(checkpoint_path).endswith(\".safetensors\"):\n            assert _has_safetensors, \"`pip install safetensors` to use .safetensors\"\n            checkpoint = safetensors.torch.load_file(checkpoint_path, device=device)\n        else:\n            checkpoint = _torch_load(checkpoint_path, map_location=device, weights_only=weights_only)\n\n        state_dict_key = ''\n        if isinstance(checkpoint, dict):\n            if use_ema and checkpoint.get('state_dict_ema', None) is not None:\n                state_dict_key = 'state_dict_ema'\n            elif use_ema and checkpoint.get('model_ema', None) is not None:\n                state_dict_key = 'model_ema'\n            elif 'state_dict' in checkpoint:\n                state_dict_key = 'state_dict'\n            elif 'model' in checkpoint:\n                state_dict_key = 'model'\n        state_dict = clean_state_dict(checkpoint[state_dict_key] if state_dict_key else checkpoint)\n        _logger.info(\"Loaded {} from checkpoint '{}'\".format(state_dict_key, checkpoint_path))\n        return state_dict\n    else:\n        _logger.error(\"No checkpoint found at '{}'\".format(checkpoint_path))\n        raise FileNotFoundError()\n\n\ndef load_checkpoint(\n        model: torch.nn.Module,\n        checkpoint_path: str,\n        use_ema: bool = True,\n        device: Union[str, torch.device] = 'cpu',\n        strict: bool = True,\n        remap: bool = False,\n        filter_fn: Optional[Callable] = None,\n        weights_only: bool = True,\n) -> Any:\n    \"\"\"Load checkpoint into model.\n\n    Args:\n        model: Model to load checkpoint into.\n        checkpoint_path: Path to checkpoint file.\n        use_ema: Whether to use EMA weights if available.\n        device: Device to load checkpoint to.\n        strict: Whether to strictly enforce state_dict keys match.\n        remap: Whether to remap state dict keys by order.\n        filter_fn: Optional function to filter state dict.\n        weights_only: Whether to load only weights (torch.load parameter).\n\n    Returns:\n        Incompatible keys from model.load_state_dict().\n    \"\"\"\n    if os.path.splitext(checkpoint_path)[-1].lower() in ('.npz', '.npy'):\n        # numpy checkpoint, try to load via model specific load_pretrained fn\n        if hasattr(model, 'load_pretrained'):\n            model.load_pretrained(checkpoint_path)\n        else:\n            raise NotImplementedError('Model cannot load numpy checkpoint')\n        return\n\n    state_dict = load_state_dict(checkpoint_path, use_ema, device=device, weights_only=weights_only)\n    if remap:\n        state_dict = remap_state_dict(state_dict, model)\n    elif filter_fn:\n        state_dict = filter_fn(state_dict, model)\n    incompatible_keys = model.load_state_dict(state_dict, strict=strict)\n    return incompatible_keys\n\n\ndef remap_state_dict(\n        state_dict: Dict[str, Any],\n        model: torch.nn.Module,\n        allow_reshape: bool = True\n) -> Dict[str, Any]:\n    \"\"\"Remap checkpoint by iterating over state dicts in order (ignoring original keys).\n\n    This assumes models (and originating state dict) were created with params registered in same order.\n\n    Args:\n        state_dict: State dict to remap.\n        model: Model whose state dict keys to use.\n        allow_reshape: Whether to allow reshaping tensors to match.\n\n    Returns:\n        Remapped state dictionary.\n    \"\"\"\n    out_dict = {}\n    for (ka, va), (kb, vb) in zip(model.state_dict().items(), state_dict.items()):\n        assert va.numel() == vb.numel(), f'Tensor size mismatch {ka}: {va.shape} vs {kb}: {vb.shape}. Remap failed.'\n        if va.shape != vb.shape:\n            if allow_reshape:\n                vb = vb.reshape(va.shape)\n            else:\n                assert False, f'Tensor shape mismatch {ka}: {va.shape} vs {kb}: {vb.shape}. Remap failed.'\n        out_dict[ka] = vb\n    return out_dict\n\n\ndef resume_checkpoint(\n        model: torch.nn.Module,\n        checkpoint_path: str,\n        optimizer: Optional[torch.optim.Optimizer] = None,\n        loss_scaler: Optional[Any] = None,\n        log_info: bool = True,\n        weights_only: bool = True,\n) -> Optional[int]:\n    \"\"\"Resume training from checkpoint.\n\n    Args:\n        model: Model to load checkpoint into.\n        checkpoint_path: Path to checkpoint file.\n        optimizer: Optional optimizer to restore state.\n        loss_scaler: Optional AMP loss scaler to restore state.\n        log_info: Whether to log loading info.\n        weights_only: Whether to load only weights via torch.load.\n\n    Returns:\n        Resume epoch number if available, else None.\n    \"\"\"\n    resume_epoch = None\n    if os.path.isfile(checkpoint_path):\n        checkpoint = _torch_load(checkpoint_path, map_location='cpu', weights_only=weights_only)\n        if isinstance(checkpoint, dict) and 'state_dict' in checkpoint:\n            if log_info:\n                _logger.info('Restoring model state from checkpoint...')\n            state_dict = clean_state_dict(checkpoint['state_dict'])\n            model.load_state_dict(state_dict)\n\n            if optimizer is not None and 'optimizer' in checkpoint:\n                if log_info:\n                    _logger.info('Restoring optimizer state from checkpoint...')\n                optimizer.load_state_dict(checkpoint['optimizer'])\n\n            if loss_scaler is not None and loss_scaler.state_dict_key in checkpoint:\n                if log_info:\n                    _logger.info('Restoring AMP loss scaler state from checkpoint...')\n                loss_scaler.load_state_dict(checkpoint[loss_scaler.state_dict_key])\n\n            if 'epoch' in checkpoint:\n                resume_epoch = checkpoint['epoch']\n                if 'version' in checkpoint and checkpoint['version'] > 1:\n                    resume_epoch += 1  # start at the next epoch, old checkpoints incremented before save\n\n                if log_info:\n                    _logger.info(\"Loaded checkpoint '{}' (epoch {})\".format(checkpoint_path, checkpoint['epoch']))\n        else:\n            model.load_state_dict(checkpoint)\n            if log_info:\n                _logger.info(\"Loaded checkpoint '{}'\".format(checkpoint_path))\n        return resume_epoch\n    else:\n        _logger.error(\"No checkpoint found at '{}'\".format(checkpoint_path))\n        raise FileNotFoundError()\n"
  },
  {
    "path": "timm/models/_hub.py",
    "content": "import hashlib\nimport json\nimport logging\nimport os\nfrom functools import partial\nfrom pathlib import Path\nfrom tempfile import TemporaryDirectory\nfrom typing import Any, Dict, Iterable, List, Optional, Tuple, Union\n\nimport torch\nfrom torch.hub import HASH_REGEX, download_url_to_file, urlparse\n\ntry:\n    from torch.hub import get_dir\nexcept ImportError:\n    from torch.hub import _get_torch_home as get_dir\n\ntry:\n    import safetensors.torch\n    _has_safetensors = True\nexcept ImportError:\n    _has_safetensors = False\n\ntry:\n    from typing import Literal\nexcept ImportError:\n    from typing_extensions import Literal\n\nfrom timm import __version__\nfrom ._helpers import _torch_load, load_state_dict\nfrom ._pretrained import filter_pretrained_cfg\n\ntry:\n    from huggingface_hub import HfApi, hf_hub_download, model_info\n    from huggingface_hub.utils import EntryNotFoundError, RepositoryNotFoundError\n    hf_hub_download = partial(hf_hub_download, library_name=\"timm\", library_version=__version__)\n    _has_hf_hub = True\nexcept ImportError:\n    hf_hub_download = None\n    _has_hf_hub = False\n\n_logger = logging.getLogger(__name__)\n\n__all__ = ['get_cache_dir', 'download_cached_file', 'has_hf_hub', 'hf_split', 'load_model_config_from_hf',\n           'load_state_dict_from_hf', 'save_for_hf', 'push_to_hf_hub']\n\n# Default name for a weights file hosted on the Huggingface Hub.\nHF_WEIGHTS_NAME = \"pytorch_model.bin\"  # default pytorch pkl\nHF_SAFE_WEIGHTS_NAME = \"model.safetensors\"  # safetensors version\nHF_OPEN_CLIP_WEIGHTS_NAME = \"open_clip_pytorch_model.bin\"  # default pytorch pkl\nHF_OPEN_CLIP_SAFE_WEIGHTS_NAME = \"open_clip_model.safetensors\"  # safetensors version\n\n\ndef get_cache_dir(child_dir: str = ''):\n    \"\"\"\n    Returns the location of the directory where models are cached (and creates it if necessary).\n    \"\"\"\n    # Issue warning to move data if old env is set\n    if os.getenv('TORCH_MODEL_ZOO'):\n        _logger.warning('TORCH_MODEL_ZOO is deprecated, please use env TORCH_HOME instead')\n\n    hub_dir = get_dir()\n    child_dir = () if not child_dir else (child_dir,)\n    model_dir = os.path.join(hub_dir, 'checkpoints', *child_dir)\n    os.makedirs(model_dir, exist_ok=True)\n    return model_dir\n\n\ndef download_cached_file(\n        url: Union[str, List[str], Tuple[str, str]],\n        check_hash: bool = True,\n        progress: bool = False,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    if isinstance(url, (list, tuple)):\n        url, filename = url\n    else:\n        parts = urlparse(url)\n        filename = os.path.basename(parts.path)\n    if cache_dir:\n        os.makedirs(cache_dir, exist_ok=True)\n    else:\n        cache_dir = get_cache_dir()\n    cached_file = os.path.join(cache_dir, filename)\n    if not os.path.exists(cached_file):\n        _logger.info('Downloading: \"{}\" to {}\\n'.format(url, cached_file))\n        hash_prefix = None\n        if check_hash:\n            r = HASH_REGEX.search(filename)  # r is Optional[Match[str]]\n            hash_prefix = r.group(1) if r else None\n        download_url_to_file(url, cached_file, hash_prefix, progress=progress)\n    return cached_file\n\n\ndef check_cached_file(\n        url: Union[str, List[str], Tuple[str, str]],\n        check_hash: bool = True,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    if isinstance(url, (list, tuple)):\n        url, filename = url\n    else:\n        parts = urlparse(url)\n        filename = os.path.basename(parts.path)\n    if not cache_dir:\n        cache_dir = get_cache_dir()\n    cached_file = os.path.join(cache_dir, filename)\n    if os.path.exists(cached_file):\n        if check_hash:\n            r = HASH_REGEX.search(filename)  # r is Optional[Match[str]]\n            hash_prefix = r.group(1) if r else None\n            if hash_prefix:\n                with open(cached_file, 'rb') as f:\n                    hd = hashlib.sha256(f.read()).hexdigest()\n                    if hd[:len(hash_prefix)] != hash_prefix:\n                        return False\n        return True\n    return False\n\n\ndef has_hf_hub(necessary: bool = False):\n    if not _has_hf_hub and necessary:\n        # if no HF Hub module installed, and it is necessary to continue, raise error\n        raise RuntimeError(\n            'Hugging Face hub model specified but package not installed. Run `pip install huggingface_hub`.')\n    return _has_hf_hub\n\n\ndef hf_split(hf_id: str):\n    # FIXME I may change @ -> # and be parsed as fragment in a URI model name scheme\n    rev_split = hf_id.split('@')\n    assert 0 < len(rev_split) <= 2, 'hf_hub id should only contain one @ character to identify revision.'\n    hf_model_id = rev_split[0]\n    hf_revision = rev_split[-1] if len(rev_split) > 1 else None\n    return hf_model_id, hf_revision\n\n\ndef load_cfg_from_json(json_file: Union[str, Path]):\n    with open(json_file, \"r\", encoding=\"utf-8\") as reader:\n        text = reader.read()\n    return json.loads(text)\n\n\ndef download_from_hf(\n        model_id: str,\n        filename: str,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    hf_model_id, hf_revision = hf_split(model_id)\n    return hf_hub_download(\n        hf_model_id,\n        filename,\n        revision=hf_revision,\n        cache_dir=cache_dir,\n    )\n\n\ndef _parse_model_cfg(\n        cfg: Dict[str, Any],\n        extra_fields: Dict[str, Any],\n) -> Tuple[Dict[str, Any], str, Dict[str, Any]]:\n    \"\"\"\"\"\"\n    # legacy \"single‑dict\" → split\n    if \"pretrained_cfg\" not in cfg:\n        pretrained_cfg = cfg\n        cfg = {\n            \"architecture\": pretrained_cfg.pop(\"architecture\"),\n            \"num_features\": pretrained_cfg.pop(\"num_features\", None),\n            \"pretrained_cfg\": pretrained_cfg,\n        }\n        if \"labels\" in pretrained_cfg:                  # rename ‑‑> label_names\n            pretrained_cfg[\"label_names\"] = pretrained_cfg.pop(\"labels\")\n\n    pretrained_cfg = cfg[\"pretrained_cfg\"]\n    pretrained_cfg.update(extra_fields)\n\n    # top‑level overrides\n    if \"num_classes\" in cfg:\n        pretrained_cfg[\"num_classes\"] = cfg[\"num_classes\"]\n    if \"label_names\" in cfg:\n        pretrained_cfg[\"label_names\"] = cfg.pop(\"label_names\")\n    if \"label_descriptions\" in cfg:\n        pretrained_cfg[\"label_descriptions\"] = cfg.pop(\"label_descriptions\")\n\n    model_args = cfg.get(\"model_args\", {})\n    model_name = cfg[\"architecture\"]\n    return pretrained_cfg, model_name, model_args\n\n\ndef load_model_config_from_hf(\n        model_id: str,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    \"\"\"Original HF‑Hub loader (unchanged download, shared parsing).\"\"\"\n    assert has_hf_hub(True)\n    cfg_path = download_from_hf(model_id, \"config.json\", cache_dir=cache_dir)\n    cfg = load_cfg_from_json(cfg_path)\n    return _parse_model_cfg(cfg, {\"hf_hub_id\": model_id, \"source\": \"hf-hub\"})\n\n\ndef load_model_config_from_path(\n        model_path: Union[str, Path],\n):\n    \"\"\"Load from ``<model_path>/config.json`` on the local filesystem.\"\"\"\n    model_path = Path(model_path)\n    cfg_file = model_path / \"config.json\"\n    if not cfg_file.is_file():\n        raise FileNotFoundError(f\"Config file not found: {cfg_file}\")\n    cfg = load_cfg_from_json(cfg_file)\n    extra_fields = {\"file\": str(model_path), \"source\": \"local-dir\"}\n    return _parse_model_cfg(cfg, extra_fields=extra_fields)\n\n\ndef load_state_dict_from_hf(\n        model_id: str,\n        filename: str = HF_WEIGHTS_NAME,\n        weights_only: bool = True,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    assert has_hf_hub(True)\n    hf_model_id, hf_revision = hf_split(model_id)\n\n    # Look for .safetensors alternatives and load from it if it exists\n    if _has_safetensors:\n        for safe_filename in _get_safe_alternatives(filename):\n            try:\n                cached_safe_file = hf_hub_download(\n                    repo_id=hf_model_id,\n                    filename=safe_filename,\n                    revision=hf_revision,\n                    cache_dir=cache_dir,\n                )\n                _logger.info(\n                    f\"[{model_id}] Safe alternative available for '{filename}' \"\n                    f\"(as '{safe_filename}'). Loading weights using safetensors.\")\n                return safetensors.torch.load_file(cached_safe_file, device=\"cpu\")\n            except EntryNotFoundError:\n                pass\n\n    # Otherwise, load using pytorch.load\n    cached_file = hf_hub_download(\n        hf_model_id,\n        filename=filename,\n        revision=hf_revision,\n        cache_dir=cache_dir,\n    )\n    _logger.debug(f\"[{model_id}] Safe alternative not found for '{filename}'. Loading weights using default pytorch.\")\n    state_dict = _torch_load(cached_file, map_location='cpu', weights_only=weights_only)\n\n    return state_dict\n\n\n_PREFERRED_FILES = (\n    \"model.safetensors\",\n    \"pytorch_model.bin\",\n    \"pytorch_model.pth\",\n    \"model.pth\",\n    \"open_clip_model.safetensors\",\n    \"open_clip_pytorch_model.safetensors\",\n    \"open_clip_pytorch_model.bin\",\n    \"open_clip_pytorch_model.pth\",\n)\n_EXT_PRIORITY = ('.safetensors', '.pth', '.pth.tar', '.bin')\n\ndef load_state_dict_from_path(\n        path: str,\n        weights_only: bool = True,\n):\n    found_file = None\n    for fname in _PREFERRED_FILES:\n        p = path / fname\n        if p.exists():\n            logging.info(f\"Found preferred checkpoint: {p.name}\")\n            found_file = p\n            break\n\n    # fallback: first match per‑extension class\n    for ext in _EXT_PRIORITY:\n        files = sorted(path.glob(f\"*{ext}\"))\n        if files:\n            if len(files) > 1:\n                logging.warning(\n                    f\"Multiple {ext} checkpoints in {path}: {names}. \"\n                    f\"Using '{files[0].name}'.\"\n                )\n            found_file = files[0]\n\n    if not found_file:\n        raise RuntimeError(f\"No suitable checkpoints found in {path}.\")\n\n    state_dict = load_state_dict(found_file, weights_only=weights_only)\n\n    return state_dict\n\n\ndef load_custom_from_hf(\n        model_id: str,\n        filename: str,\n        model: torch.nn.Module,\n        cache_dir: Optional[Union[str, Path]] = None,\n):\n    assert has_hf_hub(True)\n    hf_model_id, hf_revision = hf_split(model_id)\n    cached_file = hf_hub_download(\n        hf_model_id,\n        filename=filename,\n        revision=hf_revision,\n        cache_dir=cache_dir,\n    )\n    return model.load_pretrained(cached_file)\n\n\ndef save_config_for_hf(\n        model: torch.nn.Module,\n        config_path: str,\n        model_config: Optional[dict] = None,\n        model_args: Optional[dict] = None\n):\n    model_config = model_config or {}\n    hf_config = {}\n    pretrained_cfg = filter_pretrained_cfg(model.pretrained_cfg, remove_source=True, remove_null=True)\n    # set some values at root config level\n    hf_config['architecture'] = pretrained_cfg.pop('architecture')\n    hf_config['num_classes'] = model_config.pop('num_classes', model.num_classes)\n\n    # NOTE these attr saved for informational purposes, do not impact model build\n    hf_config['num_features'] = model_config.pop('num_features', model.num_features)\n    global_pool_type = model_config.pop('global_pool', getattr(model, 'global_pool', None))\n    if isinstance(global_pool_type, str) and global_pool_type:\n        hf_config['global_pool'] = global_pool_type\n\n    # Save class label info\n    if 'labels' in model_config:\n        _logger.warning(\n            \"'labels' as a config field for is deprecated. Please use 'label_names' and 'label_descriptions'.\"\n            \" Renaming provided 'labels' field to 'label_names'.\")\n        model_config.setdefault('label_names', model_config.pop('labels'))\n\n    label_names = model_config.pop('label_names', None)\n    if label_names:\n        assert isinstance(label_names, (dict, list, tuple))\n        # map label id (classifier index) -> unique label name (ie synset for ImageNet, MID for OpenImages)\n        # can be a dict id: name if there are id gaps, or tuple/list if no gaps.\n        hf_config['label_names'] = label_names\n\n    label_descriptions = model_config.pop('label_descriptions', None)\n    if label_descriptions:\n        assert isinstance(label_descriptions, dict)\n        # maps label names -> descriptions\n        hf_config['label_descriptions'] = label_descriptions\n\n    if model_args:\n        hf_config['model_args'] = model_args\n\n    hf_config['pretrained_cfg'] = pretrained_cfg\n    hf_config.update(model_config)\n\n    with config_path.open('w') as f:\n        json.dump(hf_config, f, indent=2)\n\n\ndef save_for_hf(\n        model: torch.nn.Module,\n        save_directory: str,\n        model_config: Optional[dict] = None,\n        model_args: Optional[dict] = None,\n        safe_serialization: Union[bool, Literal[\"both\"]] = False,\n):\n    assert has_hf_hub(True)\n    save_directory = Path(save_directory)\n    save_directory.mkdir(exist_ok=True, parents=True)\n\n    # Save model weights, either safely (using safetensors), or using legacy pytorch approach or both.\n    tensors = model.state_dict()\n    if safe_serialization is True or safe_serialization == \"both\":\n        assert _has_safetensors, \"`pip install safetensors` to use .safetensors\"\n        safetensors.torch.save_file(tensors, save_directory / HF_SAFE_WEIGHTS_NAME)\n    if safe_serialization is False or safe_serialization == \"both\":\n        torch.save(tensors, save_directory / HF_WEIGHTS_NAME)\n\n    config_path = save_directory / 'config.json'\n    save_config_for_hf(\n        model,\n        config_path,\n        model_config=model_config,\n        model_args=model_args,\n    )\n\n\ndef push_to_hf_hub(\n        model: torch.nn.Module,\n        repo_id: str,\n        commit_message: str = 'Add model',\n        token: Optional[str] = None,\n        revision: Optional[str] = None,\n        private: bool = False,\n        create_pr: bool = False,\n        model_config: Optional[dict] = None,\n        model_card: Optional[dict] = None,\n        model_args: Optional[dict] = None,\n        task_name: str = 'image-classification',\n        safe_serialization: Union[bool, Literal[\"both\"]] = 'both',\n):\n    \"\"\"\n    Arguments:\n        (...)\n        safe_serialization (`bool` or `\"both\"`, *optional*, defaults to `False`):\n            Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).\n            Can be set to `\"both\"` in order to push both safe and unsafe weights.\n    \"\"\"\n    api = HfApi(token=token, library_name=\"timm\", library_version=__version__)\n\n    # Create repo if it doesn't exist yet\n    repo_url = api.create_repo(repo_id, private=private, exist_ok=True)\n\n    # Can be different from the input `repo_id` if repo_owner was implicit\n    repo_id = repo_url.repo_id\n\n    # Check if README file already exist in repo\n    has_readme = api.file_exists(repo_id=repo_id, filename=\"README.md\", revision=revision)\n\n    # Dump model and push to Hub\n    with TemporaryDirectory() as tmpdir:\n        # Save model weights and config.\n        save_for_hf(\n            model,\n            tmpdir,\n            model_config=model_config,\n            model_args=model_args,\n            safe_serialization=safe_serialization,\n        )\n\n        # Add readme if it does not exist\n        if not has_readme:\n            model_card = model_card or {}\n            model_name = repo_id.split('/')[-1]\n            readme_path = Path(tmpdir) / \"README.md\"\n            readme_text = generate_readme(model_card, model_name, task_name=task_name)\n            readme_path.write_text(readme_text)\n\n        # Upload model and return\n        return api.upload_folder(\n            repo_id=repo_id,\n            folder_path=tmpdir,\n            revision=revision,\n            create_pr=create_pr,\n            commit_message=commit_message,\n        )\n\n\ndef generate_readme(\n        model_card: dict,\n        model_name: str,\n        task_name: str = 'image-classification',\n):\n    tags = model_card.get('tags', None) or [task_name, 'timm', 'transformers']\n    readme_text = \"---\\n\"\n    if tags:\n        readme_text += \"tags:\\n\"\n        for t in tags:\n            readme_text += f\"- {t}\\n\"\n    readme_text += f\"pipeline_tag: {task_name}\\n\"\n    readme_text += f\"library_name: {model_card.get('library_name', 'timm')}\\n\"\n    readme_text += f\"license: {model_card.get('license', 'apache-2.0')}\\n\"\n    if 'license_name' in model_card:\n        readme_text += f\"license_name: {model_card.get('license_name')}\\n\"\n    if 'license_link' in model_card:\n        readme_text += f\"license_link: {model_card.get('license_link')}\\n\"\n    if 'details' in model_card and 'Dataset' in model_card['details']:\n        readme_text += 'datasets:\\n'\n        if isinstance(model_card['details']['Dataset'], (tuple, list)):\n            for d in model_card['details']['Dataset']:\n                readme_text += f\"- {d.lower()}\\n\"\n        else:\n            readme_text += f\"- {model_card['details']['Dataset'].lower()}\\n\"\n        if 'Pretrain Dataset' in model_card['details']:\n            if isinstance(model_card['details']['Pretrain Dataset'], (tuple, list)):\n                for d in model_card['details']['Pretrain Dataset']:\n                    readme_text += f\"- {d.lower()}\\n\"\n            else:\n                readme_text += f\"- {model_card['details']['Pretrain Dataset'].lower()}\\n\"\n    readme_text += \"---\\n\"\n    readme_text += f\"# Model card for {model_name}\\n\"\n    if 'description' in model_card:\n        readme_text += f\"\\n{model_card['description']}\\n\"\n    if 'details' in model_card:\n        readme_text += f\"\\n## Model Details\\n\"\n        for k, v in model_card['details'].items():\n            if isinstance(v, (list, tuple)):\n                readme_text += f\"- **{k}:**\\n\"\n                for vi in v:\n                    readme_text += f\"  - {vi}\\n\"\n            elif isinstance(v, dict):\n                readme_text += f\"- **{k}:**\\n\"\n                for ki, vi in v.items():\n                    readme_text += f\"  - {ki}: {vi}\\n\"\n            else:\n                readme_text += f\"- **{k}:** {v}\\n\"\n    if 'usage' in model_card:\n        readme_text += f\"\\n## Model Usage\\n\"\n        readme_text += model_card['usage']\n        readme_text += '\\n'\n\n    if 'comparison' in model_card:\n        readme_text += f\"\\n## Model Comparison\\n\"\n        readme_text += model_card['comparison']\n        readme_text += '\\n'\n\n    if 'citation' in model_card:\n        readme_text += f\"\\n## Citation\\n\"\n        if not isinstance(model_card['citation'], (list, tuple)):\n            citations = [model_card['citation']]\n        else:\n            citations = model_card['citation']\n        for c in citations:\n            readme_text += f\"```bibtex\\n{c}\\n```\\n\"\n    return readme_text\n\n\ndef _get_safe_alternatives(filename: str) -> Iterable[str]:\n    \"\"\"Returns potential safetensors alternatives for a given filename.\n\n    Use case:\n        When downloading a model from the Huggingface Hub, we first look if a .safetensors file exists and if yes, we use it.\n        Main use case is filename \"pytorch_model.bin\" => check for \"model.safetensors\" or \"pytorch_model.safetensors\".\n    \"\"\"\n    if filename == HF_WEIGHTS_NAME:\n        yield HF_SAFE_WEIGHTS_NAME\n    if filename == HF_OPEN_CLIP_WEIGHTS_NAME:\n        yield HF_OPEN_CLIP_SAFE_WEIGHTS_NAME\n    if filename not in (HF_WEIGHTS_NAME, HF_OPEN_CLIP_WEIGHTS_NAME) and filename.endswith(\".bin\"):\n        yield filename[:-4] + \".safetensors\"\n\n\ndef _get_license_from_hf_hub(model_id: Optional[str], hf_hub_id: Optional[str]) -> Optional[str]:\n    \"\"\"Retrieve license information for a model from Hugging Face Hub.\n\n    Fetches the license field from the model card metadata on Hugging Face Hub\n    for the specified model. Returns None if the model is not found, if\n    huggingface_hub is not installed, or if the model is marked as \"untrained\".\n\n    Args:\n        model_id: The model identifier/name. In the case of None we assume an untrained model.\n        hf_hub_id: The Hugging Face Hub organization/user ID. If it is None,\n            we will return None as we cannot infer the license terms.\n\n    Returns:\n        The license string in lowercase if found, None otherwise.\n\n    Note:\n        Requires huggingface_hub package to be installed. Will log a warning\n        and return None if the package is not available.\n    \"\"\"\n    if not has_hf_hub(True):\n        msg = \"For updated license information run `pip install huggingface_hub`.\"\n        _logger.warning(msg=msg)\n        return None\n\n    if not (model_id and hf_hub_id):\n        return None\n\n    repo_id: str = hf_hub_id + model_id\n\n    try:\n        info = model_info(repo_id=repo_id)\n\n    except RepositoryNotFoundError:\n        msg = f\"Repository {repo_id} was not found. Manual inspection of license needed.\"\n        _logger.warning(msg=msg)\n        return None\n\n    except Exception as _:\n        msg = f\"Error for {repo_id}. Manual inspection of license needed.\"\n        _logger.warning(msg=msg)\n        return None\n\n    license = info.card_data.get(\"license\").lower() if info.card_data else None\n\n    if license == 'other':\n        name = info.card_data.get(\"license_name\", None)\n\n        if name is not None:\n            return name\n\n    return license\n"
  },
  {
    "path": "timm/models/_manipulate.py",
    "content": "import collections.abc\nimport math\nimport re\nfrom collections import defaultdict\nfrom itertools import chain\nfrom typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.utils.checkpoint\nfrom torch import nn as nn\nfrom torch import Tensor\n\nfrom timm.layers import use_reentrant_ckpt\n\n\n__all__ = ['model_parameters', 'named_apply', 'named_modules', 'named_modules_with_params', 'adapt_input_conv',\n           'group_with_matcher', 'group_modules', 'group_parameters', 'flatten_modules', 'checkpoint_seq', 'checkpoint',\n           'reinit_non_persistent_buffers']\n\n\ndef model_parameters(model: nn.Module, exclude_head: bool = False):\n    if exclude_head:\n        # FIXME this a bit of a quick and dirty hack to skip classifier head params based on ordering\n        return [p for p in model.parameters()][:-2]\n    else:\n        return model.parameters()\n\n\ndef named_apply(\n        fn: Callable,\n        module: nn.Module, name='',\n        depth_first: bool = True,\n        include_root: bool = False,\n) -> nn.Module:\n    if not depth_first and include_root:\n        fn(module=module, name=name)\n    for child_name, child_module in module.named_children():\n        child_name = '.'.join((name, child_name)) if name else child_name\n        named_apply(fn=fn, module=child_module, name=child_name, depth_first=depth_first, include_root=True)\n    if depth_first and include_root:\n        fn(module=module, name=name)\n    return module\n\n\ndef named_modules(\n        module: nn.Module,\n        name: str = '',\n        depth_first: bool = True,\n        include_root: bool = False,\n):\n    if not depth_first and include_root:\n        yield name, module\n    for child_name, child_module in module.named_children():\n        child_name = '.'.join((name, child_name)) if name else child_name\n        yield from named_modules(\n            module=child_module, name=child_name, depth_first=depth_first, include_root=True)\n    if depth_first and include_root:\n        yield name, module\n\n\ndef named_modules_with_params(\n        module: nn.Module,\n        name: str = '',\n        depth_first: bool = True,\n        include_root: bool = False,\n):\n    if module._parameters and not depth_first and include_root:\n        yield name, module\n    for child_name, child_module in module.named_children():\n        child_name = '.'.join((name, child_name)) if name else child_name\n        yield from named_modules_with_params(\n            module=child_module, name=child_name, depth_first=depth_first, include_root=True)\n    if module._parameters and depth_first and include_root:\n        yield name, module\n\n\nMATCH_PREV_GROUP = (99999,)\n\n\ndef group_with_matcher(\n        named_objects: Iterator[Tuple[str, Any]],\n        group_matcher: Union[Dict, Callable],\n        return_values: bool = False,\n        reverse: bool = False\n):\n    if isinstance(group_matcher, dict):\n        # dictionary matcher contains a dict of raw-string regex expr that must be compiled\n        compiled = []\n        for group_ordinal, (group_name, mspec) in enumerate(group_matcher.items()):\n            if mspec is None:\n                continue\n            # map all matching specifications into 3-tuple (compiled re, prefix, suffix)\n            if isinstance(mspec, (tuple, list)):\n                # multi-entry match specifications require each sub-spec to be a 2-tuple (re, suffix)\n                for sspec in mspec:\n                    compiled += [(re.compile(sspec[0]), (group_ordinal,), sspec[1])]\n            else:\n                compiled += [(re.compile(mspec), (group_ordinal,), None)]\n        group_matcher = compiled\n\n    def _get_grouping(name):\n        if isinstance(group_matcher, (list, tuple)):\n            for match_fn, prefix, suffix in group_matcher:\n                r = match_fn.match(name)\n                if r:\n                    parts = (prefix, r.groups(), suffix)\n                    # map all tuple elem to int for numeric sort, filter out None entries\n                    return tuple(map(float, chain.from_iterable(filter(None, parts))))\n            return float('inf'),  # un-matched layers (neck, head) mapped to largest ordinal\n        else:\n            ord = group_matcher(name)\n            if not isinstance(ord, collections.abc.Iterable):\n                return ord,\n            return tuple(ord)\n\n    # map layers into groups via ordinals (ints or tuples of ints) from matcher\n    grouping = defaultdict(list)\n    for k, v in named_objects:\n        grouping[_get_grouping(k)].append(v if return_values else k)\n\n    # remap to integers\n    layer_id_to_param = defaultdict(list)\n    lid = -1\n    for k in sorted(filter(lambda x: x is not None, grouping.keys())):\n        if lid < 0 or k[-1] != MATCH_PREV_GROUP[0]:\n            lid += 1\n        layer_id_to_param[lid].extend(grouping[k])\n\n    if reverse:\n        assert not return_values, \"reverse mapping only sensible for name output\"\n        # output reverse mapping\n        param_to_layer_id = {}\n        for lid, lm in layer_id_to_param.items():\n            for n in lm:\n                param_to_layer_id[n] = lid\n        return param_to_layer_id\n\n    return layer_id_to_param\n\n\ndef group_parameters(\n        module: nn.Module,\n        group_matcher,\n        return_values: bool = False,\n        reverse: bool = False,\n):\n    return group_with_matcher(\n        module.named_parameters(), group_matcher, return_values=return_values, reverse=reverse)\n\n\ndef group_modules(\n        module: nn.Module,\n        group_matcher,\n        return_values: bool = False,\n        reverse: bool = False,\n):\n    return group_with_matcher(\n        named_modules_with_params(module), group_matcher, return_values=return_values, reverse=reverse)\n\n\ndef flatten_modules(\n        named_modules: Iterator[Tuple[str, nn.Module]],\n        depth: int = 1,\n        prefix: Union[str, Tuple[str, ...]] = '',\n        module_types: Union[str, Tuple[Type[nn.Module]]] = 'sequential',\n):\n    prefix_is_tuple = isinstance(prefix, tuple)\n    if isinstance(module_types, str):\n        if module_types == 'container':\n            module_types = (nn.Sequential, nn.ModuleList, nn.ModuleDict)\n        else:\n            module_types = (nn.Sequential,)\n    for name, module in named_modules:\n        if depth and isinstance(module, module_types):\n            yield from flatten_modules(\n                module.named_children(),\n                depth - 1,\n                prefix=(name,) if prefix_is_tuple else name,\n                module_types=module_types,\n            )\n        else:\n            if prefix_is_tuple:\n                name = prefix + (name,)\n                yield name, module\n            else:\n                if prefix:\n                    name = '.'.join([prefix, name])\n                yield name, module\n\n\ndef checkpoint(\n    function,\n    *args,\n    use_reentrant: Optional[bool] = None,\n    **kwargs,\n):\n    \"\"\" checkpoint wrapper fn\n\n    A thin wrapper around torch.utils.checkpoint.checkpoint to default\n    use_reentrant to False\n    \"\"\"\n    if use_reentrant is None:\n        use_reentrant = use_reentrant_ckpt()\n\n    return torch.utils.checkpoint.checkpoint(\n        function,\n        *args,\n        use_reentrant=use_reentrant,\n        **kwargs,\n    )\n\n\ndef checkpoint_seq(\n        functions,\n        x,\n        every: int = 1,\n        flatten: bool = False,\n        skip_last: bool = False,\n        use_reentrant: Optional[bool] = None,\n):\n    r\"\"\"A helper function for checkpointing sequential models.\n\n    Sequential models execute a list of modules/functions in order\n    (sequentially). Therefore, we can divide such a sequence into segments\n    and checkpoint each segment. All segments except run in :func:`torch.no_grad`\n    manner, i.e., not storing the intermediate activations. The inputs of each\n    checkpointed segment will be saved for re-running the segment in the backward pass.\n\n    See :func:`~torch.utils.checkpoint.checkpoint` on how checkpointing works.\n\n    .. warning::\n        Checkpointing currently only supports :func:`torch.autograd.backward`\n        and only if its `inputs` argument is not passed. :func:`torch.autograd.grad`\n        is not supported.\n\n    .. warning:\n        At least one of the inputs needs to have :code:`requires_grad=True` if\n        grads are needed for model inputs, otherwise the checkpointed part of the\n        model won't have gradients.\n\n    Args:\n        functions: A :class:`torch.nn.Sequential` or the list of modules or functions to run sequentially.\n        x: A Tensor that is input to :attr:`functions`\n        every: checkpoint every-n functions (default: 1)\n        flatten: flatten nn.Sequential of nn.Sequentials\n        skip_last: skip checkpointing the last function in the sequence if True\n        use_reentrant: Use re-entrant checkpointing\n\n    Returns:\n        Output of running :attr:`functions` sequentially on :attr:`*inputs`\n\n    Example:\n        >>> model = nn.Sequential(...)\n        >>> input_var = checkpoint_seq(model, input_var, every=2)\n    \"\"\"\n    if use_reentrant is None:\n        use_reentrant = use_reentrant_ckpt()\n\n    def run_function(start, end, functions):\n        def forward(_x):\n            for j in range(start, end + 1):\n                _x = functions[j](_x)\n            return _x\n        return forward\n\n    if isinstance(functions, torch.nn.Sequential):\n        functions = functions.children()\n    if flatten:\n        functions = chain.from_iterable(functions)\n    if not isinstance(functions, (tuple, list)):\n        functions = tuple(functions)\n\n    num_checkpointed = len(functions)\n    if skip_last:\n        num_checkpointed -= 1\n    end = -1\n    for start in range(0, num_checkpointed, every):\n        end = min(start + every - 1, num_checkpointed - 1)\n        x = torch.utils.checkpoint.checkpoint(\n            run_function(start, end, functions),\n            x,\n            use_reentrant=use_reentrant,\n        )\n    if skip_last:\n        return run_function(end + 1, len(functions) - 1, functions)(x)\n    return x\n\n\ndef adapt_input_conv(in_chans: int, conv_weight: Tensor) -> Tensor:\n    conv_type = conv_weight.dtype\n    conv_weight = conv_weight.float()  # Some weights are in torch.half, ensure it's float for sum on CPU\n    O, I, J, K = conv_weight.shape\n    if in_chans == 1:\n        if I > 3:\n            assert conv_weight.shape[1] % 3 == 0\n            # For models with space2depth stems\n            conv_weight = conv_weight.reshape(O, I // 3, 3, J, K)\n            conv_weight = conv_weight.sum(dim=2, keepdim=False)\n        else:\n            conv_weight = conv_weight.sum(dim=1, keepdim=True)\n    elif in_chans != 3:\n        if I != 3:\n            raise NotImplementedError('Weight format not supported by conversion.')\n        else:\n            # NOTE this strategy should be better than random init, but there could be other combinations of\n            # the original RGB input layer weights that'd work better for specific cases.\n            repeat = int(math.ceil(in_chans / 3))\n            conv_weight = conv_weight.repeat(1, repeat, 1, 1)[:, :in_chans, :, :]\n            conv_weight *= (3 / float(in_chans))\n    conv_weight = conv_weight.to(conv_type)\n    return conv_weight\n\n\ndef reinit_non_persistent_buffers(model: nn.Module) -> List[str]:\n    \"\"\"Walk model and call init_non_persistent_buffers() on modules that have it.\n\n    This reinitializes computed buffers (like RoPE frequencies, attention bias indices)\n    that are marked as non-persistent and thus not saved in checkpoints. These buffers\n    are typically computed from module configuration and need to be reinitialized after\n    loading a checkpoint.\n\n    Args:\n        model: Model to reinitialize buffers for\n\n    Returns:\n        List of module names that were reinitialized\n\n    Example:\n        >>> model = create_model('vit_base', pretrained=True)\n        >>> # After loading checkpoint or moving to new device\n        >>> reinitialized = reinit_non_persistent_buffers(model)\n        >>> print(f\"Reinitialized {len(reinitialized)} modules\")\n    \"\"\"\n    reinitialized = []\n    for name, module in model.named_modules():\n        if hasattr(module, 'init_non_persistent_buffers'):\n            module.init_non_persistent_buffers()\n            reinitialized.append(name if name else '(root)')\n    return reinitialized\n"
  },
  {
    "path": "timm/models/_pretrained.py",
    "content": "import copy\nfrom collections import deque, defaultdict\nfrom dataclasses import dataclass, field, replace, asdict\nfrom typing import Any, Deque, Dict, Tuple, Optional, Union\n\n\n__all__ = ['PretrainedCfg', 'filter_pretrained_cfg', 'DefaultCfg']\n\n\n@dataclass\nclass PretrainedCfg:\n    \"\"\"\n    \"\"\"\n    # weight source locations\n    url: Optional[Union[str, Tuple[str, str]]] = None  # remote URL\n    file: Optional[str] = None  # local / shared filesystem path\n    state_dict: Optional[Dict[str, Any]] = None  # in-memory state dict\n    hf_hub_id: Optional[str] = None  # Hugging Face Hub model id ('organization/model')\n    hf_hub_filename: Optional[str] = None  # Hugging Face Hub filename (overrides default)\n\n    source: Optional[str] = None  # source of cfg / weight location used (url, file, hf-hub)\n    architecture: Optional[str] = None  # architecture variant can be set when not implicit\n    tag: Optional[str] = None  # pretrained tag of source\n    custom_load: bool = False  # use custom model specific model.load_pretrained() (ie for npz files)\n\n    # input / data config\n    input_size: Tuple[int, int, int] = (3, 224, 224)\n    test_input_size: Optional[Tuple[int, int, int]] = None\n    min_input_size: Optional[Tuple[int, int, int]] = None\n    fixed_input_size: bool = False\n    interpolation: str = 'bicubic'\n    crop_pct: float = 0.875\n    test_crop_pct: Optional[float] = None\n    crop_mode: str = 'center'\n    mean: Tuple[float, ...] = (0.485, 0.456, 0.406)\n    std: Tuple[float, ...] = (0.229, 0.224, 0.225)\n\n    # head / classifier config and meta-data\n    num_classes: int = 1000\n    label_offset: Optional[int] = None\n    label_names: Optional[Tuple[str]] = None\n    label_descriptions: Optional[Dict[str, str]] = None\n\n    # model attributes that vary with above or required for pretrained adaptation\n    pool_size: Optional[Tuple[int, ...]] = None\n    test_pool_size: Optional[Tuple[int, ...]] = None\n    first_conv: Optional[str] = None\n    classifier: Optional[str] = None\n\n    license: Optional[str] = None\n    description: Optional[str] = None\n    origin_url: Optional[str] = None\n    paper_name: Optional[str] = None\n    paper_ids: Optional[Union[str, Tuple[str]]] = None\n    notes: Optional[Tuple[str]] = None\n\n    @property\n    def has_weights(self):\n        return self.url or self.file or self.hf_hub_id\n\n    def to_dict(self, remove_source=False, remove_null=True):\n        return filter_pretrained_cfg(\n            asdict(self),\n            remove_source=remove_source,\n            remove_null=remove_null\n        )\n\n\ndef filter_pretrained_cfg(cfg, remove_source=False, remove_null=True):\n    filtered_cfg = {}\n    keep_null = {'pool_size', 'first_conv', 'classifier'}  # always keep these keys, even if none\n    for k, v in cfg.items():\n        if remove_source and k in {'url', 'file', 'hf_hub_id', 'hf_hub_id', 'hf_hub_filename', 'source'}:\n            continue\n        if remove_null and v is None and k not in keep_null:\n            continue\n        filtered_cfg[k] = v\n    return filtered_cfg\n\n\n@dataclass\nclass DefaultCfg:\n    tags: Deque[str] = field(default_factory=deque)  # priority queue of tags (first is default)\n    cfgs: Dict[str, PretrainedCfg] = field(default_factory=dict)  # pretrained cfgs by tag\n    is_pretrained: bool = False  # at least one of the configs has a pretrained source set\n\n    @property\n    def default(self):\n        return self.cfgs[self.tags[0]]\n\n    @property\n    def default_with_tag(self):\n        tag = self.tags[0]\n        return tag, self.cfgs[tag]\n"
  },
  {
    "path": "timm/models/_prune.py",
    "content": "import os\nimport pkgutil\nfrom copy import deepcopy\n\nfrom torch import nn as nn\n\nfrom timm.layers import Conv2dSame, BatchNormAct2d, Linear\n\n__all__ = ['extract_layer', 'set_layer', 'adapt_model_from_string', 'adapt_model_from_file']\n\n\ndef extract_layer(model, layer):\n    \"\"\"Extract a layer from a model using dot-separated path.\n\n    Args:\n        model: PyTorch model.\n        layer: Dot-separated layer path (e.g., 'layer1.0.conv1').\n\n    Returns:\n        Extracted module.\n    \"\"\"\n    layer = layer.split('.')\n    module = model\n    if hasattr(model, 'module') and layer[0] != 'module':\n        module = model.module\n    if not hasattr(model, 'module') and layer[0] == 'module':\n        layer = layer[1:]\n    for l in layer:\n        if hasattr(module, l):\n            if not l.isdigit():\n                module = getattr(module, l)\n            else:\n                module = module[int(l)]\n        else:\n            return module\n    return module\n\n\ndef set_layer(model, layer, val):\n    \"\"\"Set a layer in a model using dot-separated path.\n\n    Args:\n        model: PyTorch model.\n        layer: Dot-separated layer path.\n        val: New value for the layer.\n    \"\"\"\n    layer = layer.split('.')\n    module = model\n    if hasattr(model, 'module') and layer[0] != 'module':\n        module = model.module\n    lst_index = 0\n    module2 = module\n    for l in layer:\n        if hasattr(module2, l):\n            if not l.isdigit():\n                module2 = getattr(module2, l)\n            else:\n                module2 = module2[int(l)]\n            lst_index += 1\n    lst_index -= 1\n    for l in layer[:lst_index]:\n        if not l.isdigit():\n            module = getattr(module, l)\n        else:\n            module = module[int(l)]\n    l = layer[lst_index]\n    setattr(module, l, val)\n\n\ndef adapt_model_from_string(parent_module, model_string):\n    \"\"\"Adapt a model to pruned structure from string specification.\n\n    Args:\n        parent_module: Original model to adapt.\n        model_string: String containing layer shapes for pruned model.\n\n    Returns:\n        Adapted model with pruned layer dimensions.\n    \"\"\"\n    separator = '***'\n    state_dict = {}\n    lst_shape = model_string.split(separator)\n    for k in lst_shape:\n        k = k.split(':')\n        key = k[0]\n        shape = k[1][1:-1].split(',')\n        if shape[0] != '':\n            state_dict[key] = [int(i) for i in shape]\n\n    # Extract device and dtype from the parent module\n    device = next(parent_module.parameters()).device\n    dtype = next(parent_module.parameters()).dtype\n    dd = {'device': device, 'dtype': dtype}\n\n    new_module = deepcopy(parent_module)\n    for n, m in parent_module.named_modules():\n        old_module = extract_layer(parent_module, n)\n        if isinstance(old_module, nn.Conv2d) or isinstance(old_module, Conv2dSame):\n            if isinstance(old_module, Conv2dSame):\n                conv = Conv2dSame\n            else:\n                conv = nn.Conv2d\n            s = state_dict[n + '.weight']\n            in_channels = s[1]\n            out_channels = s[0]\n            g = 1\n            if old_module.groups > 1:\n                in_channels = out_channels\n                g = in_channels\n            new_conv = conv(\n                in_channels=in_channels,\n                out_channels=out_channels,\n                kernel_size=old_module.kernel_size,\n                bias=old_module.bias is not None,\n                padding=old_module.padding,\n                dilation=old_module.dilation,\n                groups=g,\n                stride=old_module.stride,\n                **dd,\n            )\n            set_layer(new_module, n, new_conv)\n        elif isinstance(old_module, BatchNormAct2d):\n            new_bn = BatchNormAct2d(\n                state_dict[n + '.weight'][0],\n                eps=old_module.eps,\n                momentum=old_module.momentum,\n                affine=old_module.affine,\n                track_running_stats=True,\n                **dd,\n            )\n            new_bn.drop = old_module.drop\n            new_bn.act = old_module.act\n            set_layer(new_module, n, new_bn)\n        elif isinstance(old_module, nn.BatchNorm2d):\n            new_bn = nn.BatchNorm2d(\n                num_features=state_dict[n + '.weight'][0],\n                eps=old_module.eps,\n                momentum=old_module.momentum,\n                affine=old_module.affine,\n                track_running_stats=True,\n                **dd,\n            )\n            set_layer(new_module, n, new_bn)\n        elif isinstance(old_module, nn.Linear):\n            # FIXME extra checks to ensure this is actually the FC classifier layer and not a diff Linear layer?\n            num_features = state_dict[n + '.weight'][1]\n            new_fc = Linear(\n                in_features=num_features,\n                out_features=old_module.out_features,\n                bias=old_module.bias is not None,\n                **dd,\n            )\n            set_layer(new_module, n, new_fc)\n            if hasattr(new_module, 'num_features'):\n                if getattr(new_module, 'head_hidden_size', 0) == new_module.num_features:\n                    new_module.head_hidden_size = num_features\n                new_module.num_features = num_features\n\n    new_module.eval()\n    parent_module.eval()\n\n    return new_module\n\n\ndef adapt_model_from_file(parent_module, model_variant):\n    \"\"\"Adapt a model to pruned structure from file specification.\n\n    Args:\n        parent_module: Original model to adapt.\n        model_variant: Name of pruned model variant file.\n\n    Returns:\n        Adapted model with pruned layer dimensions.\n    \"\"\"\n    adapt_data = pkgutil.get_data(__name__, os.path.join('_pruned', model_variant + '.txt'))\n    return adapt_model_from_string(parent_module, adapt_data.decode('utf-8').strip())\n"
  },
  {
    "path": "timm/models/_pruned/ecaresnet101d_pruned.txt",
    "content": "conv1.0.weight:[32, 3, 3, 3]***conv1.1.weight:[32]***conv1.3.weight:[32, 32, 3, 3]***conv1.4.weight:[32]***conv1.6.weight:[64, 32, 3, 3]***bn1.weight:[64]***layer1.0.conv1.weight:[45, 64, 1, 1]***layer1.0.bn1.weight:[45]***layer1.0.conv2.weight:[25, 45, 3, 3]***layer1.0.bn2.weight:[25]***layer1.0.conv3.weight:[26, 25, 1, 1]***layer1.0.bn3.weight:[26]***layer1.0.se.conv.weight:[1, 1, 5]***layer1.0.downsample.1.weight:[26, 64, 1, 1]***layer1.0.downsample.2.weight:[26]***layer1.1.conv1.weight:[53, 26, 1, 1]***layer1.1.bn1.weight:[53]***layer1.1.conv2.weight:[20, 53, 3, 3]***layer1.1.bn2.weight:[20]***layer1.1.conv3.weight:[26, 20, 1, 1]***layer1.1.bn3.weight:[26]***layer1.1.se.conv.weight:[1, 1, 5]***layer1.2.conv1.weight:[60, 26, 1, 1]***layer1.2.bn1.weight:[60]***layer1.2.conv2.weight:[27, 60, 3, 3]***layer1.2.bn2.weight:[27]***layer1.2.conv3.weight:[26, 27, 1, 1]***layer1.2.bn3.weight:[26]***layer1.2.se.conv.weight:[1, 1, 5]***layer2.0.conv1.weight:[81, 26, 1, 1]***layer2.0.bn1.weight:[81]***layer2.0.conv2.weight:[24, 81, 3, 3]***layer2.0.bn2.weight:[24]***layer2.0.conv3.weight:[142, 24, 1, 1]***layer2.0.bn3.weight:[142]***layer2.0.se.conv.weight:[1, 1, 5]***layer2.0.downsample.1.weight:[142, 26, 1, 1]***layer2.0.downsample.2.weight:[142]***layer2.1.conv1.weight:[93, 142, 1, 1]***layer2.1.bn1.weight:[93]***layer2.1.conv2.weight:[49, 93, 3, 3]***layer2.1.bn2.weight:[49]***layer2.1.conv3.weight:[142, 49, 1, 1]***layer2.1.bn3.weight:[142]***layer2.1.se.conv.weight:[1, 1, 5]***layer2.2.conv1.weight:[102, 142, 1, 1]***layer2.2.bn1.weight:[102]***layer2.2.conv2.weight:[54, 102, 3, 3]***layer2.2.bn2.weight:[54]***layer2.2.conv3.weight:[142, 54, 1, 1]***layer2.2.bn3.weight:[142]***layer2.2.se.conv.weight:[1, 1, 5]***layer2.3.conv1.weight:[122, 142, 1, 1]***layer2.3.bn1.weight:[122]***layer2.3.conv2.weight:[78, 122, 3, 3]***layer2.3.bn2.weight:[78]***layer2.3.conv3.weight:[142, 78, 1, 1]***layer2.3.bn3.weight:[142]***layer2.3.se.conv.weight:[1, 1, 5]***layer3.0.conv1.weight:[101, 142, 1, 1]***layer3.0.bn1.weight:[101]***layer3.0.conv2.weight:[25, 101, 3, 3]***layer3.0.bn2.weight:[25]***layer3.0.conv3.weight:[278, 25, 1, 1]***layer3.0.bn3.weight:[278]***layer3.0.se.conv.weight:[1, 1, 5]***layer3.0.downsample.1.weight:[278, 142, 1, 1]***layer3.0.downsample.2.weight:[278]***layer3.1.conv1.weight:[239, 278, 1, 1]***layer3.1.bn1.weight:[239]***layer3.1.conv2.weight:[160, 239, 3, 3]***layer3.1.bn2.weight:[160]***layer3.1.conv3.weight:[278, 160, 1, 1]***layer3.1.bn3.weight:[278]***layer3.1.se.conv.weight:[1, 1, 5]***layer3.2.conv1.weight:[234, 278, 1, 1]***layer3.2.bn1.weight:[234]***layer3.2.conv2.weight:[156, 234, 3, 3]***layer3.2.bn2.weight:[156]***layer3.2.conv3.weight:[278, 156, 1, 1]***layer3.2.bn3.weight:[278]***layer3.2.se.conv.weight:[1, 1, 5]***layer3.3.conv1.weight:[250, 278, 1, 1]***layer3.3.bn1.weight:[250]***layer3.3.conv2.weight:[176, 250, 3, 3]***layer3.3.bn2.weight:[176]***layer3.3.conv3.weight:[278, 176, 1, 1]***layer3.3.bn3.weight:[278]***layer3.3.se.conv.weight:[1, 1, 5]***layer3.4.conv1.weight:[253, 278, 1, 1]***layer3.4.bn1.weight:[253]***layer3.4.conv2.weight:[191, 253, 3, 3]***layer3.4.bn2.weight:[191]***layer3.4.conv3.weight:[278, 191, 1, 1]***layer3.4.bn3.weight:[278]***layer3.4.se.conv.weight:[1, 1, 5]***layer3.5.conv1.weight:[251, 278, 1, 1]***layer3.5.bn1.weight:[251]***layer3.5.conv2.weight:[175, 251, 3, 3]***layer3.5.bn2.weight:[175]***layer3.5.conv3.weight:[278, 175, 1, 1]***layer3.5.bn3.weight:[278]***layer3.5.se.conv.weight:[1, 1, 5]***layer3.6.conv1.weight:[230, 278, 1, 1]***layer3.6.bn1.weight:[230]***layer3.6.conv2.weight:[128, 230, 3, 3]***layer3.6.bn2.weight:[128]***layer3.6.conv3.weight:[278, 128, 1, 1]***layer3.6.bn3.weight:[278]***layer3.6.se.conv.weight:[1, 1, 5]***layer3.7.conv1.weight:[244, 278, 1, 1]***layer3.7.bn1.weight:[244]***layer3.7.conv2.weight:[154, 244, 3, 3]***layer3.7.bn2.weight:[154]***layer3.7.conv3.weight:[278, 154, 1, 1]***layer3.7.bn3.weight:[278]***layer3.7.se.conv.weight:[1, 1, 5]***layer3.8.conv1.weight:[244, 278, 1, 1]***layer3.8.bn1.weight:[244]***layer3.8.conv2.weight:[159, 244, 3, 3]***layer3.8.bn2.weight:[159]***layer3.8.conv3.weight:[278, 159, 1, 1]***layer3.8.bn3.weight:[278]***layer3.8.se.conv.weight:[1, 1, 5]***layer3.9.conv1.weight:[238, 278, 1, 1]***layer3.9.bn1.weight:[238]***layer3.9.conv2.weight:[97, 238, 3, 3]***layer3.9.bn2.weight:[97]***layer3.9.conv3.weight:[278, 97, 1, 1]***layer3.9.bn3.weight:[278]***layer3.9.se.conv.weight:[1, 1, 5]***layer3.10.conv1.weight:[244, 278, 1, 1]***layer3.10.bn1.weight:[244]***layer3.10.conv2.weight:[149, 244, 3, 3]***layer3.10.bn2.weight:[149]***layer3.10.conv3.weight:[278, 149, 1, 1]***layer3.10.bn3.weight:[278]***layer3.10.se.conv.weight:[1, 1, 5]***layer3.11.conv1.weight:[253, 278, 1, 1]***layer3.11.bn1.weight:[253]***layer3.11.conv2.weight:[181, 253, 3, 3]***layer3.11.bn2.weight:[181]***layer3.11.conv3.weight:[278, 181, 1, 1]***layer3.11.bn3.weight:[278]***layer3.11.se.conv.weight:[1, 1, 5]***layer3.12.conv1.weight:[245, 278, 1, 1]***layer3.12.bn1.weight:[245]***layer3.12.conv2.weight:[119, 245, 3, 3]***layer3.12.bn2.weight:[119]***layer3.12.conv3.weight:[278, 119, 1, 1]***layer3.12.bn3.weight:[278]***layer3.12.se.conv.weight:[1, 1, 5]***layer3.13.conv1.weight:[255, 278, 1, 1]***layer3.13.bn1.weight:[255]***layer3.13.conv2.weight:[216, 255, 3, 3]***layer3.13.bn2.weight:[216]***layer3.13.conv3.weight:[278, 216, 1, 1]***layer3.13.bn3.weight:[278]***layer3.13.se.conv.weight:[1, 1, 5]***layer3.14.conv1.weight:[256, 278, 1, 1]***layer3.14.bn1.weight:[256]***layer3.14.conv2.weight:[201, 256, 3, 3]***layer3.14.bn2.weight:[201]***layer3.14.conv3.weight:[278, 201, 1, 1]***layer3.14.bn3.weight:[278]***layer3.14.se.conv.weight:[1, 1, 5]***layer3.15.conv1.weight:[253, 278, 1, 1]***layer3.15.bn1.weight:[253]***layer3.15.conv2.weight:[149, 253, 3, 3]***layer3.15.bn2.weight:[149]***layer3.15.conv3.weight:[278, 149, 1, 1]***layer3.15.bn3.weight:[278]***layer3.15.se.conv.weight:[1, 1, 5]***layer3.16.conv1.weight:[254, 278, 1, 1]***layer3.16.bn1.weight:[254]***layer3.16.conv2.weight:[141, 254, 3, 3]***layer3.16.bn2.weight:[141]***layer3.16.conv3.weight:[278, 141, 1, 1]***layer3.16.bn3.weight:[278]***layer3.16.se.conv.weight:[1, 1, 5]***layer3.17.conv1.weight:[256, 278, 1, 1]***layer3.17.bn1.weight:[256]***layer3.17.conv2.weight:[190, 256, 3, 3]***layer3.17.bn2.weight:[190]***layer3.17.conv3.weight:[278, 190, 1, 1]***layer3.17.bn3.weight:[278]***layer3.17.se.conv.weight:[1, 1, 5]***layer3.18.conv1.weight:[256, 278, 1, 1]***layer3.18.bn1.weight:[256]***layer3.18.conv2.weight:[217, 256, 3, 3]***layer3.18.bn2.weight:[217]***layer3.18.conv3.weight:[278, 217, 1, 1]***layer3.18.bn3.weight:[278]***layer3.18.se.conv.weight:[1, 1, 5]***layer3.19.conv1.weight:[255, 278, 1, 1]***layer3.19.bn1.weight:[255]***layer3.19.conv2.weight:[156, 255, 3, 3]***layer3.19.bn2.weight:[156]***layer3.19.conv3.weight:[278, 156, 1, 1]***layer3.19.bn3.weight:[278]***layer3.19.se.conv.weight:[1, 1, 5]***layer3.20.conv1.weight:[256, 278, 1, 1]***layer3.20.bn1.weight:[256]***layer3.20.conv2.weight:[155, 256, 3, 3]***layer3.20.bn2.weight:[155]***layer3.20.conv3.weight:[278, 155, 1, 1]***layer3.20.bn3.weight:[278]***layer3.20.se.conv.weight:[1, 1, 5]***layer3.21.conv1.weight:[256, 278, 1, 1]***layer3.21.bn1.weight:[256]***layer3.21.conv2.weight:[232, 256, 3, 3]***layer3.21.bn2.weight:[232]***layer3.21.conv3.weight:[278, 232, 1, 1]***layer3.21.bn3.weight:[278]***layer3.21.se.conv.weight:[1, 1, 5]***layer3.22.conv1.weight:[256, 278, 1, 1]***layer3.22.bn1.weight:[256]***layer3.22.conv2.weight:[214, 256, 3, 3]***layer3.22.bn2.weight:[214]***layer3.22.conv3.weight:[278, 214, 1, 1]***layer3.22.bn3.weight:[278]***layer3.22.se.conv.weight:[1, 1, 5]***layer4.0.conv1.weight:[499, 278, 1, 1]***layer4.0.bn1.weight:[499]***layer4.0.conv2.weight:[289, 499, 3, 3]***layer4.0.bn2.weight:[289]***layer4.0.conv3.weight:[2042, 289, 1, 1]***layer4.0.bn3.weight:[2042]***layer4.0.se.conv.weight:[1, 1, 7]***layer4.0.downsample.1.weight:[2042, 278, 1, 1]***layer4.0.downsample.2.weight:[2042]***layer4.1.conv1.weight:[512, 2042, 1, 1]***layer4.1.bn1.weight:[512]***layer4.1.conv2.weight:[512, 512, 3, 3]***layer4.1.bn2.weight:[512]***layer4.1.conv3.weight:[2042, 512, 1, 1]***layer4.1.bn3.weight:[2042]***layer4.1.se.conv.weight:[1, 1, 7]***layer4.2.conv1.weight:[512, 2042, 1, 1]***layer4.2.bn1.weight:[512]***layer4.2.conv2.weight:[502, 512, 3, 3]***layer4.2.bn2.weight:[502]***layer4.2.conv3.weight:[2042, 502, 1, 1]***layer4.2.bn3.weight:[2042]***layer4.2.se.conv.weight:[1, 1, 7]***fc.weight:[1000, 2042]***layer1_2_conv3_M.weight:[256, 26]***layer2_3_conv3_M.weight:[512, 142]***layer3_22_conv3_M.weight:[1024, 278]***layer4_2_conv3_M.weight:[2048, 2042]"
  },
  {
    "path": "timm/models/_pruned/ecaresnet50d_pruned.txt",
    "content": "conv1.0.weight:[32, 3, 3, 3]***conv1.1.weight:[32]***conv1.3.weight:[32, 32, 3, 3]***conv1.4.weight:[32]***conv1.6.weight:[64, 32, 3, 3]***bn1.weight:[64]***layer1.0.conv1.weight:[47, 64, 1, 1]***layer1.0.bn1.weight:[47]***layer1.0.conv2.weight:[18, 47, 3, 3]***layer1.0.bn2.weight:[18]***layer1.0.conv3.weight:[19, 18, 1, 1]***layer1.0.bn3.weight:[19]***layer1.0.se.conv.weight:[1, 1, 5]***layer1.0.downsample.1.weight:[19, 64, 1, 1]***layer1.0.downsample.2.weight:[19]***layer1.1.conv1.weight:[52, 19, 1, 1]***layer1.1.bn1.weight:[52]***layer1.1.conv2.weight:[22, 52, 3, 3]***layer1.1.bn2.weight:[22]***layer1.1.conv3.weight:[19, 22, 1, 1]***layer1.1.bn3.weight:[19]***layer1.1.se.conv.weight:[1, 1, 5]***layer1.2.conv1.weight:[64, 19, 1, 1]***layer1.2.bn1.weight:[64]***layer1.2.conv2.weight:[35, 64, 3, 3]***layer1.2.bn2.weight:[35]***layer1.2.conv3.weight:[19, 35, 1, 1]***layer1.2.bn3.weight:[19]***layer1.2.se.conv.weight:[1, 1, 5]***layer2.0.conv1.weight:[85, 19, 1, 1]***layer2.0.bn1.weight:[85]***layer2.0.conv2.weight:[37, 85, 3, 3]***layer2.0.bn2.weight:[37]***layer2.0.conv3.weight:[171, 37, 1, 1]***layer2.0.bn3.weight:[171]***layer2.0.se.conv.weight:[1, 1, 5]***layer2.0.downsample.1.weight:[171, 19, 1, 1]***layer2.0.downsample.2.weight:[171]***layer2.1.conv1.weight:[107, 171, 1, 1]***layer2.1.bn1.weight:[107]***layer2.1.conv2.weight:[80, 107, 3, 3]***layer2.1.bn2.weight:[80]***layer2.1.conv3.weight:[171, 80, 1, 1]***layer2.1.bn3.weight:[171]***layer2.1.se.conv.weight:[1, 1, 5]***layer2.2.conv1.weight:[120, 171, 1, 1]***layer2.2.bn1.weight:[120]***layer2.2.conv2.weight:[85, 120, 3, 3]***layer2.2.bn2.weight:[85]***layer2.2.conv3.weight:[171, 85, 1, 1]***layer2.2.bn3.weight:[171]***layer2.2.se.conv.weight:[1, 1, 5]***layer2.3.conv1.weight:[125, 171, 1, 1]***layer2.3.bn1.weight:[125]***layer2.3.conv2.weight:[87, 125, 3, 3]***layer2.3.bn2.weight:[87]***layer2.3.conv3.weight:[171, 87, 1, 1]***layer2.3.bn3.weight:[171]***layer2.3.se.conv.weight:[1, 1, 5]***layer3.0.conv1.weight:[198, 171, 1, 1]***layer3.0.bn1.weight:[198]***layer3.0.conv2.weight:[126, 198, 3, 3]***layer3.0.bn2.weight:[126]***layer3.0.conv3.weight:[818, 126, 1, 1]***layer3.0.bn3.weight:[818]***layer3.0.se.conv.weight:[1, 1, 5]***layer3.0.downsample.1.weight:[818, 171, 1, 1]***layer3.0.downsample.2.weight:[818]***layer3.1.conv1.weight:[255, 818, 1, 1]***layer3.1.bn1.weight:[255]***layer3.1.conv2.weight:[232, 255, 3, 3]***layer3.1.bn2.weight:[232]***layer3.1.conv3.weight:[818, 232, 1, 1]***layer3.1.bn3.weight:[818]***layer3.1.se.conv.weight:[1, 1, 5]***layer3.2.conv1.weight:[256, 818, 1, 1]***layer3.2.bn1.weight:[256]***layer3.2.conv2.weight:[233, 256, 3, 3]***layer3.2.bn2.weight:[233]***layer3.2.conv3.weight:[818, 233, 1, 1]***layer3.2.bn3.weight:[818]***layer3.2.se.conv.weight:[1, 1, 5]***layer3.3.conv1.weight:[253, 818, 1, 1]***layer3.3.bn1.weight:[253]***layer3.3.conv2.weight:[235, 253, 3, 3]***layer3.3.bn2.weight:[235]***layer3.3.conv3.weight:[818, 235, 1, 1]***layer3.3.bn3.weight:[818]***layer3.3.se.conv.weight:[1, 1, 5]***layer3.4.conv1.weight:[256, 818, 1, 1]***layer3.4.bn1.weight:[256]***layer3.4.conv2.weight:[225, 256, 3, 3]***layer3.4.bn2.weight:[225]***layer3.4.conv3.weight:[818, 225, 1, 1]***layer3.4.bn3.weight:[818]***layer3.4.se.conv.weight:[1, 1, 5]***layer3.5.conv1.weight:[256, 818, 1, 1]***layer3.5.bn1.weight:[256]***layer3.5.conv2.weight:[239, 256, 3, 3]***layer3.5.bn2.weight:[239]***layer3.5.conv3.weight:[818, 239, 1, 1]***layer3.5.bn3.weight:[818]***layer3.5.se.conv.weight:[1, 1, 5]***layer4.0.conv1.weight:[492, 818, 1, 1]***layer4.0.bn1.weight:[492]***layer4.0.conv2.weight:[237, 492, 3, 3]***layer4.0.bn2.weight:[237]***layer4.0.conv3.weight:[2022, 237, 1, 1]***layer4.0.bn3.weight:[2022]***layer4.0.se.conv.weight:[1, 1, 7]***layer4.0.downsample.1.weight:[2022, 818, 1, 1]***layer4.0.downsample.2.weight:[2022]***layer4.1.conv1.weight:[512, 2022, 1, 1]***layer4.1.bn1.weight:[512]***layer4.1.conv2.weight:[500, 512, 3, 3]***layer4.1.bn2.weight:[500]***layer4.1.conv3.weight:[2022, 500, 1, 1]***layer4.1.bn3.weight:[2022]***layer4.1.se.conv.weight:[1, 1, 7]***layer4.2.conv1.weight:[512, 2022, 1, 1]***layer4.2.bn1.weight:[512]***layer4.2.conv2.weight:[490, 512, 3, 3]***layer4.2.bn2.weight:[490]***layer4.2.conv3.weight:[2022, 490, 1, 1]***layer4.2.bn3.weight:[2022]***layer4.2.se.conv.weight:[1, 1, 7]***fc.weight:[1000, 2022]***layer1_2_conv3_M.weight:[256, 19]***layer2_3_conv3_M.weight:[512, 171]***layer3_5_conv3_M.weight:[1024, 818]***layer4_2_conv3_M.weight:[2048, 2022]"
  },
  {
    "path": "timm/models/_pruned/efficientnet_b1_pruned.txt",
    "content": "conv_stem.weight:[32, 3, 3, 3]***bn1.weight:[32]***bn1.bias:[32]***bn1.running_mean:[32]***bn1.running_var:[32]***bn1.num_batches_tracked:[]***blocks.0.0.conv_dw.weight:[32, 1, 3, 3]***blocks.0.0.bn1.weight:[32]***blocks.0.0.bn1.bias:[32]***blocks.0.0.bn1.running_mean:[32]***blocks.0.0.bn1.running_var:[32]***blocks.0.0.bn1.num_batches_tracked:[]***blocks.0.0.se.conv_reduce.weight:[8, 32, 1, 1]***blocks.0.0.se.conv_reduce.bias:[8]***blocks.0.0.se.conv_expand.weight:[32, 8, 1, 1]***blocks.0.0.se.conv_expand.bias:[32]***blocks.0.0.conv_pw.weight:[16, 32, 1, 1]***blocks.0.0.bn2.weight:[16]***blocks.0.0.bn2.bias:[16]***blocks.0.0.bn2.running_mean:[16]***blocks.0.0.bn2.running_var:[16]***blocks.0.0.bn2.num_batches_tracked:[]***blocks.0.1.conv_dw.weight:[16, 1, 3, 3]***blocks.0.1.bn1.weight:[16]***blocks.0.1.bn1.bias:[16]***blocks.0.1.bn1.running_mean:[16]***blocks.0.1.bn1.running_var:[16]***blocks.0.1.bn1.num_batches_tracked:[]***blocks.0.1.se.conv_reduce.weight:[4, 16, 1, 1]***blocks.0.1.se.conv_reduce.bias:[4]***blocks.0.1.se.conv_expand.weight:[16, 4, 1, 1]***blocks.0.1.se.conv_expand.bias:[16]***blocks.0.1.conv_pw.weight:[16, 16, 1, 1]***blocks.0.1.bn2.weight:[16]***blocks.0.1.bn2.bias:[16]***blocks.0.1.bn2.running_mean:[16]***blocks.0.1.bn2.running_var:[16]***blocks.0.1.bn2.num_batches_tracked:[]***blocks.1.0.conv_pw.weight:[48, 16, 1, 1]***blocks.1.0.bn1.weight:[48]***blocks.1.0.bn1.bias:[48]***blocks.1.0.bn1.running_mean:[48]***blocks.1.0.bn1.running_var:[48]***blocks.1.0.bn1.num_batches_tracked:[]***blocks.1.0.conv_dw.weight:[48, 1, 3, 3]***blocks.1.0.bn2.weight:[48]***blocks.1.0.bn2.bias:[48]***blocks.1.0.bn2.running_mean:[48]***blocks.1.0.bn2.running_var:[48]***blocks.1.0.bn2.num_batches_tracked:[]***blocks.1.0.se.conv_reduce.weight:[4, 48, 1, 1]***blocks.1.0.se.conv_reduce.bias:[4]***blocks.1.0.se.conv_expand.weight:[48, 4, 1, 1]***blocks.1.0.se.conv_expand.bias:[48]***blocks.1.0.conv_pwl.weight:[12, 48, 1, 1]***blocks.1.0.bn3.weight:[12]***blocks.1.0.bn3.bias:[12]***blocks.1.0.bn3.running_mean:[12]***blocks.1.0.bn3.running_var:[12]***blocks.1.0.bn3.num_batches_tracked:[]***blocks.1.1.conv_pw.weight:[62, 12, 1, 1]***blocks.1.1.bn1.weight:[62]***blocks.1.1.bn1.bias:[62]***blocks.1.1.bn1.running_mean:[62]***blocks.1.1.bn1.running_var:[62]***blocks.1.1.bn1.num_batches_tracked:[]***blocks.1.1.conv_dw.weight:[62, 1, 3, 3]***blocks.1.1.bn2.weight:[62]***blocks.1.1.bn2.bias:[62]***blocks.1.1.bn2.running_mean:[62]***blocks.1.1.bn2.running_var:[62]***blocks.1.1.bn2.num_batches_tracked:[]***blocks.1.1.se.conv_reduce.weight:[6, 62, 1, 1]***blocks.1.1.se.conv_reduce.bias:[6]***blocks.1.1.se.conv_expand.weight:[62, 6, 1, 1]***blocks.1.1.se.conv_expand.bias:[62]***blocks.1.1.conv_pwl.weight:[12, 62, 1, 1]***blocks.1.1.bn3.weight:[12]***blocks.1.1.bn3.bias:[12]***blocks.1.1.bn3.running_mean:[12]***blocks.1.1.bn3.running_var:[12]***blocks.1.1.bn3.num_batches_tracked:[]***blocks.1.2.conv_pw.weight:[48, 12, 1, 1]***blocks.1.2.bn1.weight:[48]***blocks.1.2.bn1.bias:[48]***blocks.1.2.bn1.running_mean:[48]***blocks.1.2.bn1.running_var:[48]***blocks.1.2.bn1.num_batches_tracked:[]***blocks.1.2.conv_dw.weight:[48, 1, 3, 3]***blocks.1.2.bn2.weight:[48]***blocks.1.2.bn2.bias:[48]***blocks.1.2.bn2.running_mean:[48]***blocks.1.2.bn2.running_var:[48]***blocks.1.2.bn2.num_batches_tracked:[]***blocks.1.2.se.conv_reduce.weight:[6, 48, 1, 1]***blocks.1.2.se.conv_reduce.bias:[6]***blocks.1.2.se.conv_expand.weight:[48, 6, 1, 1]***blocks.1.2.se.conv_expand.bias:[48]***blocks.1.2.conv_pwl.weight:[12, 48, 1, 1]***blocks.1.2.bn3.weight:[12]***blocks.1.2.bn3.bias:[12]***blocks.1.2.bn3.running_mean:[12]***blocks.1.2.bn3.running_var:[12]***blocks.1.2.bn3.num_batches_tracked:[]***blocks.2.0.conv_pw.weight:[70, 12, 1, 1]***blocks.2.0.bn1.weight:[70]***blocks.2.0.bn1.bias:[70]***blocks.2.0.bn1.running_mean:[70]***blocks.2.0.bn1.running_var:[70]***blocks.2.0.bn1.num_batches_tracked:[]***blocks.2.0.conv_dw.weight:[70, 1, 5, 5]***blocks.2.0.bn2.weight:[70]***blocks.2.0.bn2.bias:[70]***blocks.2.0.bn2.running_mean:[70]***blocks.2.0.bn2.running_var:[70]***blocks.2.0.bn2.num_batches_tracked:[]***blocks.2.0.se.conv_reduce.weight:[6, 70, 1, 1]***blocks.2.0.se.conv_reduce.bias:[6]***blocks.2.0.se.conv_expand.weight:[70, 6, 1, 1]***blocks.2.0.se.conv_expand.bias:[70]***blocks.2.0.conv_pwl.weight:[35, 70, 1, 1]***blocks.2.0.bn3.weight:[35]***blocks.2.0.bn3.bias:[35]***blocks.2.0.bn3.running_mean:[35]***blocks.2.0.bn3.running_var:[35]***blocks.2.0.bn3.num_batches_tracked:[]***blocks.2.1.conv_pw.weight:[61, 35, 1, 1]***blocks.2.1.bn1.weight:[61]***blocks.2.1.bn1.bias:[61]***blocks.2.1.bn1.running_mean:[61]***blocks.2.1.bn1.running_var:[61]***blocks.2.1.bn1.num_batches_tracked:[]***blocks.2.1.conv_dw.weight:[61, 1, 5, 5]***blocks.2.1.bn2.weight:[61]***blocks.2.1.bn2.bias:[61]***blocks.2.1.bn2.running_mean:[61]***blocks.2.1.bn2.running_var:[61]***blocks.2.1.bn2.num_batches_tracked:[]***blocks.2.1.se.conv_reduce.weight:[10, 61, 1, 1]***blocks.2.1.se.conv_reduce.bias:[10]***blocks.2.1.se.conv_expand.weight:[61, 10, 1, 1]***blocks.2.1.se.conv_expand.bias:[61]***blocks.2.1.conv_pwl.weight:[35, 61, 1, 1]***blocks.2.1.bn3.weight:[35]***blocks.2.1.bn3.bias:[35]***blocks.2.1.bn3.running_mean:[35]***blocks.2.1.bn3.running_var:[35]***blocks.2.1.bn3.num_batches_tracked:[]***blocks.2.2.conv_pw.weight:[51, 35, 1, 1]***blocks.2.2.bn1.weight:[51]***blocks.2.2.bn1.bias:[51]***blocks.2.2.bn1.running_mean:[51]***blocks.2.2.bn1.running_var:[51]***blocks.2.2.bn1.num_batches_tracked:[]***blocks.2.2.conv_dw.weight:[51, 1, 5, 5]***blocks.2.2.bn2.weight:[51]***blocks.2.2.bn2.bias:[51]***blocks.2.2.bn2.running_mean:[51]***blocks.2.2.bn2.running_var:[51]***blocks.2.2.bn2.num_batches_tracked:[]***blocks.2.2.se.conv_reduce.weight:[10, 51, 1, 1]***blocks.2.2.se.conv_reduce.bias:[10]***blocks.2.2.se.conv_expand.weight:[51, 10, 1, 1]***blocks.2.2.se.conv_expand.bias:[51]***blocks.2.2.conv_pwl.weight:[35, 51, 1, 1]***blocks.2.2.bn3.weight:[35]***blocks.2.2.bn3.bias:[35]***blocks.2.2.bn3.running_mean:[35]***blocks.2.2.bn3.running_var:[35]***blocks.2.2.bn3.num_batches_tracked:[]***blocks.3.0.conv_pw.weight:[175, 35, 1, 1]***blocks.3.0.bn1.weight:[175]***blocks.3.0.bn1.bias:[175]***blocks.3.0.bn1.running_mean:[175]***blocks.3.0.bn1.running_var:[175]***blocks.3.0.bn1.num_batches_tracked:[]***blocks.3.0.conv_dw.weight:[175, 1, 3, 3]***blocks.3.0.bn2.weight:[175]***blocks.3.0.bn2.bias:[175]***blocks.3.0.bn2.running_mean:[175]***blocks.3.0.bn2.running_var:[175]***blocks.3.0.bn2.num_batches_tracked:[]***blocks.3.0.se.conv_reduce.weight:[10, 175, 1, 1]***blocks.3.0.se.conv_reduce.bias:[10]***blocks.3.0.se.conv_expand.weight:[175, 10, 1, 1]***blocks.3.0.se.conv_expand.bias:[175]***blocks.3.0.conv_pwl.weight:[74, 175, 1, 1]***blocks.3.0.bn3.weight:[74]***blocks.3.0.bn3.bias:[74]***blocks.3.0.bn3.running_mean:[74]***blocks.3.0.bn3.running_var:[74]***blocks.3.0.bn3.num_batches_tracked:[]***blocks.3.1.conv_pw.weight:[188, 74, 1, 1]***blocks.3.1.bn1.weight:[188]***blocks.3.1.bn1.bias:[188]***blocks.3.1.bn1.running_mean:[188]***blocks.3.1.bn1.running_var:[188]***blocks.3.1.bn1.num_batches_tracked:[]***blocks.3.1.conv_dw.weight:[188, 1, 3, 3]***blocks.3.1.bn2.weight:[188]***blocks.3.1.bn2.bias:[188]***blocks.3.1.bn2.running_mean:[188]***blocks.3.1.bn2.running_var:[188]***blocks.3.1.bn2.num_batches_tracked:[]***blocks.3.1.se.conv_reduce.weight:[20, 188, 1, 1]***blocks.3.1.se.conv_reduce.bias:[20]***blocks.3.1.se.conv_expand.weight:[188, 20, 1, 1]***blocks.3.1.se.conv_expand.bias:[188]***blocks.3.1.conv_pwl.weight:[74, 188, 1, 1]***blocks.3.1.bn3.weight:[74]***blocks.3.1.bn3.bias:[74]***blocks.3.1.bn3.running_mean:[74]***blocks.3.1.bn3.running_var:[74]***blocks.3.1.bn3.num_batches_tracked:[]***blocks.3.2.conv_pw.weight:[137, 74, 1, 1]***blocks.3.2.bn1.weight:[137]***blocks.3.2.bn1.bias:[137]***blocks.3.2.bn1.running_mean:[137]***blocks.3.2.bn1.running_var:[137]***blocks.3.2.bn1.num_batches_tracked:[]***blocks.3.2.conv_dw.weight:[137, 1, 3, 3]***blocks.3.2.bn2.weight:[137]***blocks.3.2.bn2.bias:[137]***blocks.3.2.bn2.running_mean:[137]***blocks.3.2.bn2.running_var:[137]***blocks.3.2.bn2.num_batches_tracked:[]***blocks.3.2.se.conv_reduce.weight:[20, 137, 1, 1]***blocks.3.2.se.conv_reduce.bias:[20]***blocks.3.2.se.conv_expand.weight:[137, 20, 1, 1]***blocks.3.2.se.conv_expand.bias:[137]***blocks.3.2.conv_pwl.weight:[74, 137, 1, 1]***blocks.3.2.bn3.weight:[74]***blocks.3.2.bn3.bias:[74]***blocks.3.2.bn3.running_mean:[74]***blocks.3.2.bn3.running_var:[74]***blocks.3.2.bn3.num_batches_tracked:[]***blocks.3.3.conv_pw.weight:[164, 74, 1, 1]***blocks.3.3.bn1.weight:[164]***blocks.3.3.bn1.bias:[164]***blocks.3.3.bn1.running_mean:[164]***blocks.3.3.bn1.running_var:[164]***blocks.3.3.bn1.num_batches_tracked:[]***blocks.3.3.conv_dw.weight:[164, 1, 3, 3]***blocks.3.3.bn2.weight:[164]***blocks.3.3.bn2.bias:[164]***blocks.3.3.bn2.running_mean:[164]***blocks.3.3.bn2.running_var:[164]***blocks.3.3.bn2.num_batches_tracked:[]***blocks.3.3.se.conv_reduce.weight:[20, 164, 1, 1]***blocks.3.3.se.conv_reduce.bias:[20]***blocks.3.3.se.conv_expand.weight:[164, 20, 1, 1]***blocks.3.3.se.conv_expand.bias:[164]***blocks.3.3.conv_pwl.weight:[74, 164, 1, 1]***blocks.3.3.bn3.weight:[74]***blocks.3.3.bn3.bias:[74]***blocks.3.3.bn3.running_mean:[74]***blocks.3.3.bn3.running_var:[74]***blocks.3.3.bn3.num_batches_tracked:[]***blocks.4.0.conv_pw.weight:[399, 74, 1, 1]***blocks.4.0.bn1.weight:[399]***blocks.4.0.bn1.bias:[399]***blocks.4.0.bn1.running_mean:[399]***blocks.4.0.bn1.running_var:[399]***blocks.4.0.bn1.num_batches_tracked:[]***blocks.4.0.conv_dw.weight:[399, 1, 5, 5]***blocks.4.0.bn2.weight:[399]***blocks.4.0.bn2.bias:[399]***blocks.4.0.bn2.running_mean:[399]***blocks.4.0.bn2.running_var:[399]***blocks.4.0.bn2.num_batches_tracked:[]***blocks.4.0.se.conv_reduce.weight:[20, 399, 1, 1]***blocks.4.0.se.conv_reduce.bias:[20]***blocks.4.0.se.conv_expand.weight:[399, 20, 1, 1]***blocks.4.0.se.conv_expand.bias:[399]***blocks.4.0.conv_pwl.weight:[67, 399, 1, 1]***blocks.4.0.bn3.weight:[67]***blocks.4.0.bn3.bias:[67]***blocks.4.0.bn3.running_mean:[67]***blocks.4.0.bn3.running_var:[67]***blocks.4.0.bn3.num_batches_tracked:[]***blocks.4.1.conv_pw.weight:[201, 67, 1, 1]***blocks.4.1.bn1.weight:[201]***blocks.4.1.bn1.bias:[201]***blocks.4.1.bn1.running_mean:[201]***blocks.4.1.bn1.running_var:[201]***blocks.4.1.bn1.num_batches_tracked:[]***blocks.4.1.conv_dw.weight:[201, 1, 5, 5]***blocks.4.1.bn2.weight:[201]***blocks.4.1.bn2.bias:[201]***blocks.4.1.bn2.running_mean:[201]***blocks.4.1.bn2.running_var:[201]***blocks.4.1.bn2.num_batches_tracked:[]***blocks.4.1.se.conv_reduce.weight:[28, 201, 1, 1]***blocks.4.1.se.conv_reduce.bias:[28]***blocks.4.1.se.conv_expand.weight:[201, 28, 1, 1]***blocks.4.1.se.conv_expand.bias:[201]***blocks.4.1.conv_pwl.weight:[67, 201, 1, 1]***blocks.4.1.bn3.weight:[67]***blocks.4.1.bn3.bias:[67]***blocks.4.1.bn3.running_mean:[67]***blocks.4.1.bn3.running_var:[67]***blocks.4.1.bn3.num_batches_tracked:[]***blocks.4.2.conv_pw.weight:[160, 67, 1, 1]***blocks.4.2.bn1.weight:[160]***blocks.4.2.bn1.bias:[160]***blocks.4.2.bn1.running_mean:[160]***blocks.4.2.bn1.running_var:[160]***blocks.4.2.bn1.num_batches_tracked:[]***blocks.4.2.conv_dw.weight:[160, 1, 5, 5]***blocks.4.2.bn2.weight:[160]***blocks.4.2.bn2.bias:[160]***blocks.4.2.bn2.running_mean:[160]***blocks.4.2.bn2.running_var:[160]***blocks.4.2.bn2.num_batches_tracked:[]***blocks.4.2.se.conv_reduce.weight:[28, 160, 1, 1]***blocks.4.2.se.conv_reduce.bias:[28]***blocks.4.2.se.conv_expand.weight:[160, 28, 1, 1]***blocks.4.2.se.conv_expand.bias:[160]***blocks.4.2.conv_pwl.weight:[67, 160, 1, 1]***blocks.4.2.bn3.weight:[67]***blocks.4.2.bn3.bias:[67]***blocks.4.2.bn3.running_mean:[67]***blocks.4.2.bn3.running_var:[67]***blocks.4.2.bn3.num_batches_tracked:[]***blocks.4.3.conv_pw.weight:[213, 67, 1, 1]***blocks.4.3.bn1.weight:[213]***blocks.4.3.bn1.bias:[213]***blocks.4.3.bn1.running_mean:[213]***blocks.4.3.bn1.running_var:[213]***blocks.4.3.bn1.num_batches_tracked:[]***blocks.4.3.conv_dw.weight:[213, 1, 5, 5]***blocks.4.3.bn2.weight:[213]***blocks.4.3.bn2.bias:[213]***blocks.4.3.bn2.running_mean:[213]***blocks.4.3.bn2.running_var:[213]***blocks.4.3.bn2.num_batches_tracked:[]***blocks.4.3.se.conv_reduce.weight:[28, 213, 1, 1]***blocks.4.3.se.conv_reduce.bias:[28]***blocks.4.3.se.conv_expand.weight:[213, 28, 1, 1]***blocks.4.3.se.conv_expand.bias:[213]***blocks.4.3.conv_pwl.weight:[67, 213, 1, 1]***blocks.4.3.bn3.weight:[67]***blocks.4.3.bn3.bias:[67]***blocks.4.3.bn3.running_mean:[67]***blocks.4.3.bn3.running_var:[67]***blocks.4.3.bn3.num_batches_tracked:[]***blocks.5.0.conv_pw.weight:[637, 67, 1, 1]***blocks.5.0.bn1.weight:[637]***blocks.5.0.bn1.bias:[637]***blocks.5.0.bn1.running_mean:[637]***blocks.5.0.bn1.running_var:[637]***blocks.5.0.bn1.num_batches_tracked:[]***blocks.5.0.conv_dw.weight:[637, 1, 5, 5]***blocks.5.0.bn2.weight:[637]***blocks.5.0.bn2.bias:[637]***blocks.5.0.bn2.running_mean:[637]***blocks.5.0.bn2.running_var:[637]***blocks.5.0.bn2.num_batches_tracked:[]***blocks.5.0.se.conv_reduce.weight:[27, 637, 1, 1]***blocks.5.0.se.conv_reduce.bias:[27]***blocks.5.0.se.conv_expand.weight:[637, 27, 1, 1]***blocks.5.0.se.conv_expand.bias:[637]***blocks.5.0.conv_pwl.weight:[192, 637, 1, 1]***blocks.5.0.bn3.weight:[192]***blocks.5.0.bn3.bias:[192]***blocks.5.0.bn3.running_mean:[192]***blocks.5.0.bn3.running_var:[192]***blocks.5.0.bn3.num_batches_tracked:[]***blocks.5.1.conv_pw.weight:[806, 192, 1, 1]***blocks.5.1.bn1.weight:[806]***blocks.5.1.bn1.bias:[806]***blocks.5.1.bn1.running_mean:[806]***blocks.5.1.bn1.running_var:[806]***blocks.5.1.bn1.num_batches_tracked:[]***blocks.5.1.conv_dw.weight:[806, 1, 5, 5]***blocks.5.1.bn2.weight:[806]***blocks.5.1.bn2.bias:[806]***blocks.5.1.bn2.running_mean:[806]***blocks.5.1.bn2.running_var:[806]***blocks.5.1.bn2.num_batches_tracked:[]***blocks.5.1.se.conv_reduce.weight:[48, 806, 1, 1]***blocks.5.1.se.conv_reduce.bias:[48]***blocks.5.1.se.conv_expand.weight:[806, 48, 1, 1]***blocks.5.1.se.conv_expand.bias:[806]***blocks.5.1.conv_pwl.weight:[192, 806, 1, 1]***blocks.5.1.bn3.weight:[192]***blocks.5.1.bn3.bias:[192]***blocks.5.1.bn3.running_mean:[192]***blocks.5.1.bn3.running_var:[192]***blocks.5.1.bn3.num_batches_tracked:[]***blocks.5.2.conv_pw.weight:[798, 192, 1, 1]***blocks.5.2.bn1.weight:[798]***blocks.5.2.bn1.bias:[798]***blocks.5.2.bn1.running_mean:[798]***blocks.5.2.bn1.running_var:[798]***blocks.5.2.bn1.num_batches_tracked:[]***blocks.5.2.conv_dw.weight:[798, 1, 5, 5]***blocks.5.2.bn2.weight:[798]***blocks.5.2.bn2.bias:[798]***blocks.5.2.bn2.running_mean:[798]***blocks.5.2.bn2.running_var:[798]***blocks.5.2.bn2.num_batches_tracked:[]***blocks.5.2.se.conv_reduce.weight:[48, 798, 1, 1]***blocks.5.2.se.conv_reduce.bias:[48]***blocks.5.2.se.conv_expand.weight:[798, 48, 1, 1]***blocks.5.2.se.conv_expand.bias:[798]***blocks.5.2.conv_pwl.weight:[192, 798, 1, 1]***blocks.5.2.bn3.weight:[192]***blocks.5.2.bn3.bias:[192]***blocks.5.2.bn3.running_mean:[192]***blocks.5.2.bn3.running_var:[192]***blocks.5.2.bn3.num_batches_tracked:[]***blocks.5.3.conv_pw.weight:[891, 192, 1, 1]***blocks.5.3.bn1.weight:[891]***blocks.5.3.bn1.bias:[891]***blocks.5.3.bn1.running_mean:[891]***blocks.5.3.bn1.running_var:[891]***blocks.5.3.bn1.num_batches_tracked:[]***blocks.5.3.conv_dw.weight:[891, 1, 5, 5]***blocks.5.3.bn2.weight:[891]***blocks.5.3.bn2.bias:[891]***blocks.5.3.bn2.running_mean:[891]***blocks.5.3.bn2.running_var:[891]***blocks.5.3.bn2.num_batches_tracked:[]***blocks.5.3.se.conv_reduce.weight:[48, 891, 1, 1]***blocks.5.3.se.conv_reduce.bias:[48]***blocks.5.3.se.conv_expand.weight:[891, 48, 1, 1]***blocks.5.3.se.conv_expand.bias:[891]***blocks.5.3.conv_pwl.weight:[192, 891, 1, 1]***blocks.5.3.bn3.weight:[192]***blocks.5.3.bn3.bias:[192]***blocks.5.3.bn3.running_mean:[192]***blocks.5.3.bn3.running_var:[192]***blocks.5.3.bn3.num_batches_tracked:[]***blocks.5.4.conv_pw.weight:[990, 192, 1, 1]***blocks.5.4.bn1.weight:[990]***blocks.5.4.bn1.bias:[990]***blocks.5.4.bn1.running_mean:[990]***blocks.5.4.bn1.running_var:[990]***blocks.5.4.bn1.num_batches_tracked:[]***blocks.5.4.conv_dw.weight:[990, 1, 5, 5]***blocks.5.4.bn2.weight:[990]***blocks.5.4.bn2.bias:[990]***blocks.5.4.bn2.running_mean:[990]***blocks.5.4.bn2.running_var:[990]***blocks.5.4.bn2.num_batches_tracked:[]***blocks.5.4.se.conv_reduce.weight:[48, 990, 1, 1]***blocks.5.4.se.conv_reduce.bias:[48]***blocks.5.4.se.conv_expand.weight:[990, 48, 1, 1]***blocks.5.4.se.conv_expand.bias:[990]***blocks.5.4.conv_pwl.weight:[192, 990, 1, 1]***blocks.5.4.bn3.weight:[192]***blocks.5.4.bn3.bias:[192]***blocks.5.4.bn3.running_mean:[192]***blocks.5.4.bn3.running_var:[192]***blocks.5.4.bn3.num_batches_tracked:[]***blocks.6.0.conv_pw.weight:[1152, 192, 1, 1]***blocks.6.0.bn1.weight:[1152]***blocks.6.0.bn1.bias:[1152]***blocks.6.0.bn1.running_mean:[1152]***blocks.6.0.bn1.running_var:[1152]***blocks.6.0.bn1.num_batches_tracked:[]***blocks.6.0.conv_dw.weight:[1152, 1, 3, 3]***blocks.6.0.bn2.weight:[1152]***blocks.6.0.bn2.bias:[1152]***blocks.6.0.bn2.running_mean:[1152]***blocks.6.0.bn2.running_var:[1152]***blocks.6.0.bn2.num_batches_tracked:[]***blocks.6.0.se.conv_reduce.weight:[48, 1152, 1, 1]***blocks.6.0.se.conv_reduce.bias:[48]***blocks.6.0.se.conv_expand.weight:[1152, 48, 1, 1]***blocks.6.0.se.conv_expand.bias:[1152]***blocks.6.0.conv_pwl.weight:[320, 1152, 1, 1]***blocks.6.0.bn3.weight:[320]***blocks.6.0.bn3.bias:[320]***blocks.6.0.bn3.running_mean:[320]***blocks.6.0.bn3.running_var:[320]***blocks.6.0.bn3.num_batches_tracked:[]***blocks.6.1.conv_pw.weight:[1912, 320, 1, 1]***blocks.6.1.bn1.weight:[1912]***blocks.6.1.bn1.bias:[1912]***blocks.6.1.bn1.running_mean:[1912]***blocks.6.1.bn1.running_var:[1912]***blocks.6.1.bn1.num_batches_tracked:[]***blocks.6.1.conv_dw.weight:[1912, 1, 3, 3]***blocks.6.1.bn2.weight:[1912]***blocks.6.1.bn2.bias:[1912]***blocks.6.1.bn2.running_mean:[1912]***blocks.6.1.bn2.running_var:[1912]***blocks.6.1.bn2.num_batches_tracked:[]***blocks.6.1.se.conv_reduce.weight:[80, 1912, 1, 1]***blocks.6.1.se.conv_reduce.bias:[80]***blocks.6.1.se.conv_expand.weight:[1912, 80, 1, 1]***blocks.6.1.se.conv_expand.bias:[1912]***blocks.6.1.conv_pwl.weight:[320, 1912, 1, 1]***blocks.6.1.bn3.weight:[320]***blocks.6.1.bn3.bias:[320]***blocks.6.1.bn3.running_mean:[320]***blocks.6.1.bn3.running_var:[320]***blocks.6.1.bn3.num_batches_tracked:[]***conv_head.weight:[1280, 320, 1, 1]***bn2.weight:[1280]***bn2.bias:[1280]***bn2.running_mean:[1280]***bn2.running_var:[1280]***bn2.num_batches_tracked:[]***classifier.weight:[1000, 1280]***classifier.bias:[1000]"
  },
  {
    "path": "timm/models/_pruned/efficientnet_b2_pruned.txt",
    "content": "conv_stem.weight:[32, 3, 3, 3]***bn1.weight:[32]***bn1.bias:[32]***bn1.running_mean:[32]***bn1.running_var:[32]***bn1.num_batches_tracked:[]***blocks.0.0.conv_dw.weight:[32, 1, 3, 3]***blocks.0.0.bn1.weight:[32]***blocks.0.0.bn1.bias:[32]***blocks.0.0.bn1.running_mean:[32]***blocks.0.0.bn1.running_var:[32]***blocks.0.0.bn1.num_batches_tracked:[]***blocks.0.0.se.conv_reduce.weight:[8, 32, 1, 1]***blocks.0.0.se.conv_reduce.bias:[8]***blocks.0.0.se.conv_expand.weight:[32, 8, 1, 1]***blocks.0.0.se.conv_expand.bias:[32]***blocks.0.0.conv_pw.weight:[16, 32, 1, 1]***blocks.0.0.bn2.weight:[16]***blocks.0.0.bn2.bias:[16]***blocks.0.0.bn2.running_mean:[16]***blocks.0.0.bn2.running_var:[16]***blocks.0.0.bn2.num_batches_tracked:[]***blocks.0.1.conv_dw.weight:[16, 1, 3, 3]***blocks.0.1.bn1.weight:[16]***blocks.0.1.bn1.bias:[16]***blocks.0.1.bn1.running_mean:[16]***blocks.0.1.bn1.running_var:[16]***blocks.0.1.bn1.num_batches_tracked:[]***blocks.0.1.se.conv_reduce.weight:[4, 16, 1, 1]***blocks.0.1.se.conv_reduce.bias:[4]***blocks.0.1.se.conv_expand.weight:[16, 4, 1, 1]***blocks.0.1.se.conv_expand.bias:[16]***blocks.0.1.conv_pw.weight:[16, 16, 1, 1]***blocks.0.1.bn2.weight:[16]***blocks.0.1.bn2.bias:[16]***blocks.0.1.bn2.running_mean:[16]***blocks.0.1.bn2.running_var:[16]***blocks.0.1.bn2.num_batches_tracked:[]***blocks.1.0.conv_pw.weight:[54, 16, 1, 1]***blocks.1.0.bn1.weight:[54]***blocks.1.0.bn1.bias:[54]***blocks.1.0.bn1.running_mean:[54]***blocks.1.0.bn1.running_var:[54]***blocks.1.0.bn1.num_batches_tracked:[]***blocks.1.0.conv_dw.weight:[54, 1, 3, 3]***blocks.1.0.bn2.weight:[54]***blocks.1.0.bn2.bias:[54]***blocks.1.0.bn2.running_mean:[54]***blocks.1.0.bn2.running_var:[54]***blocks.1.0.bn2.num_batches_tracked:[]***blocks.1.0.se.conv_reduce.weight:[4, 54, 1, 1]***blocks.1.0.se.conv_reduce.bias:[4]***blocks.1.0.se.conv_expand.weight:[54, 4, 1, 1]***blocks.1.0.se.conv_expand.bias:[54]***blocks.1.0.conv_pwl.weight:[17, 54, 1, 1]***blocks.1.0.bn3.weight:[17]***blocks.1.0.bn3.bias:[17]***blocks.1.0.bn3.running_mean:[17]***blocks.1.0.bn3.running_var:[17]***blocks.1.0.bn3.num_batches_tracked:[]***blocks.1.1.conv_pw.weight:[69, 17, 1, 1]***blocks.1.1.bn1.weight:[69]***blocks.1.1.bn1.bias:[69]***blocks.1.1.bn1.running_mean:[69]***blocks.1.1.bn1.running_var:[69]***blocks.1.1.bn1.num_batches_tracked:[]***blocks.1.1.conv_dw.weight:[69, 1, 3, 3]***blocks.1.1.bn2.weight:[69]***blocks.1.1.bn2.bias:[69]***blocks.1.1.bn2.running_mean:[69]***blocks.1.1.bn2.running_var:[69]***blocks.1.1.bn2.num_batches_tracked:[]***blocks.1.1.se.conv_reduce.weight:[6, 69, 1, 1]***blocks.1.1.se.conv_reduce.bias:[6]***blocks.1.1.se.conv_expand.weight:[69, 6, 1, 1]***blocks.1.1.se.conv_expand.bias:[69]***blocks.1.1.conv_pwl.weight:[17, 69, 1, 1]***blocks.1.1.bn3.weight:[17]***blocks.1.1.bn3.bias:[17]***blocks.1.1.bn3.running_mean:[17]***blocks.1.1.bn3.running_var:[17]***blocks.1.1.bn3.num_batches_tracked:[]***blocks.1.2.conv_pw.weight:[61, 17, 1, 1]***blocks.1.2.bn1.weight:[61]***blocks.1.2.bn1.bias:[61]***blocks.1.2.bn1.running_mean:[61]***blocks.1.2.bn1.running_var:[61]***blocks.1.2.bn1.num_batches_tracked:[]***blocks.1.2.conv_dw.weight:[61, 1, 3, 3]***blocks.1.2.bn2.weight:[61]***blocks.1.2.bn2.bias:[61]***blocks.1.2.bn2.running_mean:[61]***blocks.1.2.bn2.running_var:[61]***blocks.1.2.bn2.num_batches_tracked:[]***blocks.1.2.se.conv_reduce.weight:[6, 61, 1, 1]***blocks.1.2.se.conv_reduce.bias:[6]***blocks.1.2.se.conv_expand.weight:[61, 6, 1, 1]***blocks.1.2.se.conv_expand.bias:[61]***blocks.1.2.conv_pwl.weight:[17, 61, 1, 1]***blocks.1.2.bn3.weight:[17]***blocks.1.2.bn3.bias:[17]***blocks.1.2.bn3.running_mean:[17]***blocks.1.2.bn3.running_var:[17]***blocks.1.2.bn3.num_batches_tracked:[]***blocks.2.0.conv_pw.weight:[86, 17, 1, 1]***blocks.2.0.bn1.weight:[86]***blocks.2.0.bn1.bias:[86]***blocks.2.0.bn1.running_mean:[86]***blocks.2.0.bn1.running_var:[86]***blocks.2.0.bn1.num_batches_tracked:[]***blocks.2.0.conv_dw.weight:[86, 1, 5, 5]***blocks.2.0.bn2.weight:[86]***blocks.2.0.bn2.bias:[86]***blocks.2.0.bn2.running_mean:[86]***blocks.2.0.bn2.running_var:[86]***blocks.2.0.bn2.num_batches_tracked:[]***blocks.2.0.se.conv_reduce.weight:[6, 86, 1, 1]***blocks.2.0.se.conv_reduce.bias:[6]***blocks.2.0.se.conv_expand.weight:[86, 6, 1, 1]***blocks.2.0.se.conv_expand.bias:[86]***blocks.2.0.conv_pwl.weight:[42, 86, 1, 1]***blocks.2.0.bn3.weight:[42]***blocks.2.0.bn3.bias:[42]***blocks.2.0.bn3.running_mean:[42]***blocks.2.0.bn3.running_var:[42]***blocks.2.0.bn3.num_batches_tracked:[]***blocks.2.1.conv_pw.weight:[72, 42, 1, 1]***blocks.2.1.bn1.weight:[72]***blocks.2.1.bn1.bias:[72]***blocks.2.1.bn1.running_mean:[72]***blocks.2.1.bn1.running_var:[72]***blocks.2.1.bn1.num_batches_tracked:[]***blocks.2.1.conv_dw.weight:[72, 1, 5, 5]***blocks.2.1.bn2.weight:[72]***blocks.2.1.bn2.bias:[72]***blocks.2.1.bn2.running_mean:[72]***blocks.2.1.bn2.running_var:[72]***blocks.2.1.bn2.num_batches_tracked:[]***blocks.2.1.se.conv_reduce.weight:[12, 72, 1, 1]***blocks.2.1.se.conv_reduce.bias:[12]***blocks.2.1.se.conv_expand.weight:[72, 12, 1, 1]***blocks.2.1.se.conv_expand.bias:[72]***blocks.2.1.conv_pwl.weight:[42, 72, 1, 1]***blocks.2.1.bn3.weight:[42]***blocks.2.1.bn3.bias:[42]***blocks.2.1.bn3.running_mean:[42]***blocks.2.1.bn3.running_var:[42]***blocks.2.1.bn3.num_batches_tracked:[]***blocks.2.2.conv_pw.weight:[98, 42, 1, 1]***blocks.2.2.bn1.weight:[98]***blocks.2.2.bn1.bias:[98]***blocks.2.2.bn1.running_mean:[98]***blocks.2.2.bn1.running_var:[98]***blocks.2.2.bn1.num_batches_tracked:[]***blocks.2.2.conv_dw.weight:[98, 1, 5, 5]***blocks.2.2.bn2.weight:[98]***blocks.2.2.bn2.bias:[98]***blocks.2.2.bn2.running_mean:[98]***blocks.2.2.bn2.running_var:[98]***blocks.2.2.bn2.num_batches_tracked:[]***blocks.2.2.se.conv_reduce.weight:[12, 98, 1, 1]***blocks.2.2.se.conv_reduce.bias:[12]***blocks.2.2.se.conv_expand.weight:[98, 12, 1, 1]***blocks.2.2.se.conv_expand.bias:[98]***blocks.2.2.conv_pwl.weight:[42, 98, 1, 1]***blocks.2.2.bn3.weight:[42]***blocks.2.2.bn3.bias:[42]***blocks.2.2.bn3.running_mean:[42]***blocks.2.2.bn3.running_var:[42]***blocks.2.2.bn3.num_batches_tracked:[]***blocks.3.0.conv_pw.weight:[245, 42, 1, 1]***blocks.3.0.bn1.weight:[245]***blocks.3.0.bn1.bias:[245]***blocks.3.0.bn1.running_mean:[245]***blocks.3.0.bn1.running_var:[245]***blocks.3.0.bn1.num_batches_tracked:[]***blocks.3.0.conv_dw.weight:[245, 1, 3, 3]***blocks.3.0.bn2.weight:[245]***blocks.3.0.bn2.bias:[245]***blocks.3.0.bn2.running_mean:[245]***blocks.3.0.bn2.running_var:[245]***blocks.3.0.bn2.num_batches_tracked:[]***blocks.3.0.se.conv_reduce.weight:[12, 245, 1, 1]***blocks.3.0.se.conv_reduce.bias:[12]***blocks.3.0.se.conv_expand.weight:[245, 12, 1, 1]***blocks.3.0.se.conv_expand.bias:[245]***blocks.3.0.conv_pwl.weight:[85, 245, 1, 1]***blocks.3.0.bn3.weight:[85]***blocks.3.0.bn3.bias:[85]***blocks.3.0.bn3.running_mean:[85]***blocks.3.0.bn3.running_var:[85]***blocks.3.0.bn3.num_batches_tracked:[]***blocks.3.1.conv_pw.weight:[274, 85, 1, 1]***blocks.3.1.bn1.weight:[274]***blocks.3.1.bn1.bias:[274]***blocks.3.1.bn1.running_mean:[274]***blocks.3.1.bn1.running_var:[274]***blocks.3.1.bn1.num_batches_tracked:[]***blocks.3.1.conv_dw.weight:[274, 1, 3, 3]***blocks.3.1.bn2.weight:[274]***blocks.3.1.bn2.bias:[274]***blocks.3.1.bn2.running_mean:[274]***blocks.3.1.bn2.running_var:[274]***blocks.3.1.bn2.num_batches_tracked:[]***blocks.3.1.se.conv_reduce.weight:[22, 274, 1, 1]***blocks.3.1.se.conv_reduce.bias:[22]***blocks.3.1.se.conv_expand.weight:[274, 22, 1, 1]***blocks.3.1.se.conv_expand.bias:[274]***blocks.3.1.conv_pwl.weight:[85, 274, 1, 1]***blocks.3.1.bn3.weight:[85]***blocks.3.1.bn3.bias:[85]***blocks.3.1.bn3.running_mean:[85]***blocks.3.1.bn3.running_var:[85]***blocks.3.1.bn3.num_batches_tracked:[]***blocks.3.2.conv_pw.weight:[254, 85, 1, 1]***blocks.3.2.bn1.weight:[254]***blocks.3.2.bn1.bias:[254]***blocks.3.2.bn1.running_mean:[254]***blocks.3.2.bn1.running_var:[254]***blocks.3.2.bn1.num_batches_tracked:[]***blocks.3.2.conv_dw.weight:[254, 1, 3, 3]***blocks.3.2.bn2.weight:[254]***blocks.3.2.bn2.bias:[254]***blocks.3.2.bn2.running_mean:[254]***blocks.3.2.bn2.running_var:[254]***blocks.3.2.bn2.num_batches_tracked:[]***blocks.3.2.se.conv_reduce.weight:[22, 254, 1, 1]***blocks.3.2.se.conv_reduce.bias:[22]***blocks.3.2.se.conv_expand.weight:[254, 22, 1, 1]***blocks.3.2.se.conv_expand.bias:[254]***blocks.3.2.conv_pwl.weight:[85, 254, 1, 1]***blocks.3.2.bn3.weight:[85]***blocks.3.2.bn3.bias:[85]***blocks.3.2.bn3.running_mean:[85]***blocks.3.2.bn3.running_var:[85]***blocks.3.2.bn3.num_batches_tracked:[]***blocks.3.3.conv_pw.weight:[292, 85, 1, 1]***blocks.3.3.bn1.weight:[292]***blocks.3.3.bn1.bias:[292]***blocks.3.3.bn1.running_mean:[292]***blocks.3.3.bn1.running_var:[292]***blocks.3.3.bn1.num_batches_tracked:[]***blocks.3.3.conv_dw.weight:[292, 1, 3, 3]***blocks.3.3.bn2.weight:[292]***blocks.3.3.bn2.bias:[292]***blocks.3.3.bn2.running_mean:[292]***blocks.3.3.bn2.running_var:[292]***blocks.3.3.bn2.num_batches_tracked:[]***blocks.3.3.se.conv_reduce.weight:[22, 292, 1, 1]***blocks.3.3.se.conv_reduce.bias:[22]***blocks.3.3.se.conv_expand.weight:[292, 22, 1, 1]***blocks.3.3.se.conv_expand.bias:[292]***blocks.3.3.conv_pwl.weight:[85, 292, 1, 1]***blocks.3.3.bn3.weight:[85]***blocks.3.3.bn3.bias:[85]***blocks.3.3.bn3.running_mean:[85]***blocks.3.3.bn3.running_var:[85]***blocks.3.3.bn3.num_batches_tracked:[]***blocks.4.0.conv_pw.weight:[502, 85, 1, 1]***blocks.4.0.bn1.weight:[502]***blocks.4.0.bn1.bias:[502]***blocks.4.0.bn1.running_mean:[502]***blocks.4.0.bn1.running_var:[502]***blocks.4.0.bn1.num_batches_tracked:[]***blocks.4.0.conv_dw.weight:[502, 1, 5, 5]***blocks.4.0.bn2.weight:[502]***blocks.4.0.bn2.bias:[502]***blocks.4.0.bn2.running_mean:[502]***blocks.4.0.bn2.running_var:[502]***blocks.4.0.bn2.num_batches_tracked:[]***blocks.4.0.se.conv_reduce.weight:[22, 502, 1, 1]***blocks.4.0.se.conv_reduce.bias:[22]***blocks.4.0.se.conv_expand.weight:[502, 22, 1, 1]***blocks.4.0.se.conv_expand.bias:[502]***blocks.4.0.conv_pwl.weight:[116, 502, 1, 1]***blocks.4.0.bn3.weight:[116]***blocks.4.0.bn3.bias:[116]***blocks.4.0.bn3.running_mean:[116]***blocks.4.0.bn3.running_var:[116]***blocks.4.0.bn3.num_batches_tracked:[]***blocks.4.1.conv_pw.weight:[315, 116, 1, 1]***blocks.4.1.bn1.weight:[315]***blocks.4.1.bn1.bias:[315]***blocks.4.1.bn1.running_mean:[315]***blocks.4.1.bn1.running_var:[315]***blocks.4.1.bn1.num_batches_tracked:[]***blocks.4.1.conv_dw.weight:[315, 1, 5, 5]***blocks.4.1.bn2.weight:[315]***blocks.4.1.bn2.bias:[315]***blocks.4.1.bn2.running_mean:[315]***blocks.4.1.bn2.running_var:[315]***blocks.4.1.bn2.num_batches_tracked:[]***blocks.4.1.se.conv_reduce.weight:[30, 315, 1, 1]***blocks.4.1.se.conv_reduce.bias:[30]***blocks.4.1.se.conv_expand.weight:[315, 30, 1, 1]***blocks.4.1.se.conv_expand.bias:[315]***blocks.4.1.conv_pwl.weight:[116, 315, 1, 1]***blocks.4.1.bn3.weight:[116]***blocks.4.1.bn3.bias:[116]***blocks.4.1.bn3.running_mean:[116]***blocks.4.1.bn3.running_var:[116]***blocks.4.1.bn3.num_batches_tracked:[]***blocks.4.2.conv_pw.weight:[354, 116, 1, 1]***blocks.4.2.bn1.weight:[354]***blocks.4.2.bn1.bias:[354]***blocks.4.2.bn1.running_mean:[354]***blocks.4.2.bn1.running_var:[354]***blocks.4.2.bn1.num_batches_tracked:[]***blocks.4.2.conv_dw.weight:[354, 1, 5, 5]***blocks.4.2.bn2.weight:[354]***blocks.4.2.bn2.bias:[354]***blocks.4.2.bn2.running_mean:[354]***blocks.4.2.bn2.running_var:[354]***blocks.4.2.bn2.num_batches_tracked:[]***blocks.4.2.se.conv_reduce.weight:[30, 354, 1, 1]***blocks.4.2.se.conv_reduce.bias:[30]***blocks.4.2.se.conv_expand.weight:[354, 30, 1, 1]***blocks.4.2.se.conv_expand.bias:[354]***blocks.4.2.conv_pwl.weight:[116, 354, 1, 1]***blocks.4.2.bn3.weight:[116]***blocks.4.2.bn3.bias:[116]***blocks.4.2.bn3.running_mean:[116]***blocks.4.2.bn3.running_var:[116]***blocks.4.2.bn3.num_batches_tracked:[]***blocks.4.3.conv_pw.weight:[443, 116, 1, 1]***blocks.4.3.bn1.weight:[443]***blocks.4.3.bn1.bias:[443]***blocks.4.3.bn1.running_mean:[443]***blocks.4.3.bn1.running_var:[443]***blocks.4.3.bn1.num_batches_tracked:[]***blocks.4.3.conv_dw.weight:[443, 1, 5, 5]***blocks.4.3.bn2.weight:[443]***blocks.4.3.bn2.bias:[443]***blocks.4.3.bn2.running_mean:[443]***blocks.4.3.bn2.running_var:[443]***blocks.4.3.bn2.num_batches_tracked:[]***blocks.4.3.se.conv_reduce.weight:[30, 443, 1, 1]***blocks.4.3.se.conv_reduce.bias:[30]***blocks.4.3.se.conv_expand.weight:[443, 30, 1, 1]***blocks.4.3.se.conv_expand.bias:[443]***blocks.4.3.conv_pwl.weight:[116, 443, 1, 1]***blocks.4.3.bn3.weight:[116]***blocks.4.3.bn3.bias:[116]***blocks.4.3.bn3.running_mean:[116]***blocks.4.3.bn3.running_var:[116]***blocks.4.3.bn3.num_batches_tracked:[]***blocks.5.0.conv_pw.weight:[719, 116, 1, 1]***blocks.5.0.bn1.weight:[719]***blocks.5.0.bn1.bias:[719]***blocks.5.0.bn1.running_mean:[719]***blocks.5.0.bn1.running_var:[719]***blocks.5.0.bn1.num_batches_tracked:[]***blocks.5.0.conv_dw.weight:[719, 1, 5, 5]***blocks.5.0.bn2.weight:[719]***blocks.5.0.bn2.bias:[719]***blocks.5.0.bn2.running_mean:[719]***blocks.5.0.bn2.running_var:[719]***blocks.5.0.bn2.num_batches_tracked:[]***blocks.5.0.se.conv_reduce.weight:[30, 719, 1, 1]***blocks.5.0.se.conv_reduce.bias:[30]***blocks.5.0.se.conv_expand.weight:[719, 30, 1, 1]***blocks.5.0.se.conv_expand.bias:[719]***blocks.5.0.conv_pwl.weight:[208, 719, 1, 1]***blocks.5.0.bn3.weight:[208]***blocks.5.0.bn3.bias:[208]***blocks.5.0.bn3.running_mean:[208]***blocks.5.0.bn3.running_var:[208]***blocks.5.0.bn3.num_batches_tracked:[]***blocks.5.1.conv_pw.weight:[1148, 208, 1, 1]***blocks.5.1.bn1.weight:[1148]***blocks.5.1.bn1.bias:[1148]***blocks.5.1.bn1.running_mean:[1148]***blocks.5.1.bn1.running_var:[1148]***blocks.5.1.bn1.num_batches_tracked:[]***blocks.5.1.conv_dw.weight:[1148, 1, 5, 5]***blocks.5.1.bn2.weight:[1148]***blocks.5.1.bn2.bias:[1148]***blocks.5.1.bn2.running_mean:[1148]***blocks.5.1.bn2.running_var:[1148]***blocks.5.1.bn2.num_batches_tracked:[]***blocks.5.1.se.conv_reduce.weight:[52, 1148, 1, 1]***blocks.5.1.se.conv_reduce.bias:[52]***blocks.5.1.se.conv_expand.weight:[1148, 52, 1, 1]***blocks.5.1.se.conv_expand.bias:[1148]***blocks.5.1.conv_pwl.weight:[208, 1148, 1, 1]***blocks.5.1.bn3.weight:[208]***blocks.5.1.bn3.bias:[208]***blocks.5.1.bn3.running_mean:[208]***blocks.5.1.bn3.running_var:[208]***blocks.5.1.bn3.num_batches_tracked:[]***blocks.5.2.conv_pw.weight:[1160, 208, 1, 1]***blocks.5.2.bn1.weight:[1160]***blocks.5.2.bn1.bias:[1160]***blocks.5.2.bn1.running_mean:[1160]***blocks.5.2.bn1.running_var:[1160]***blocks.5.2.bn1.num_batches_tracked:[]***blocks.5.2.conv_dw.weight:[1160, 1, 5, 5]***blocks.5.2.bn2.weight:[1160]***blocks.5.2.bn2.bias:[1160]***blocks.5.2.bn2.running_mean:[1160]***blocks.5.2.bn2.running_var:[1160]***blocks.5.2.bn2.num_batches_tracked:[]***blocks.5.2.se.conv_reduce.weight:[52, 1160, 1, 1]***blocks.5.2.se.conv_reduce.bias:[52]***blocks.5.2.se.conv_expand.weight:[1160, 52, 1, 1]***blocks.5.2.se.conv_expand.bias:[1160]***blocks.5.2.conv_pwl.weight:[208, 1160, 1, 1]***blocks.5.2.bn3.weight:[208]***blocks.5.2.bn3.bias:[208]***blocks.5.2.bn3.running_mean:[208]***blocks.5.2.bn3.running_var:[208]***blocks.5.2.bn3.num_batches_tracked:[]***blocks.5.3.conv_pw.weight:[1182, 208, 1, 1]***blocks.5.3.bn1.weight:[1182]***blocks.5.3.bn1.bias:[1182]***blocks.5.3.bn1.running_mean:[1182]***blocks.5.3.bn1.running_var:[1182]***blocks.5.3.bn1.num_batches_tracked:[]***blocks.5.3.conv_dw.weight:[1182, 1, 5, 5]***blocks.5.3.bn2.weight:[1182]***blocks.5.3.bn2.bias:[1182]***blocks.5.3.bn2.running_mean:[1182]***blocks.5.3.bn2.running_var:[1182]***blocks.5.3.bn2.num_batches_tracked:[]***blocks.5.3.se.conv_reduce.weight:[52, 1182, 1, 1]***blocks.5.3.se.conv_reduce.bias:[52]***blocks.5.3.se.conv_expand.weight:[1182, 52, 1, 1]***blocks.5.3.se.conv_expand.bias:[1182]***blocks.5.3.conv_pwl.weight:[208, 1182, 1, 1]***blocks.5.3.bn3.weight:[208]***blocks.5.3.bn3.bias:[208]***blocks.5.3.bn3.running_mean:[208]***blocks.5.3.bn3.running_var:[208]***blocks.5.3.bn3.num_batches_tracked:[]***blocks.5.4.conv_pw.weight:[1228, 208, 1, 1]***blocks.5.4.bn1.weight:[1228]***blocks.5.4.bn1.bias:[1228]***blocks.5.4.bn1.running_mean:[1228]***blocks.5.4.bn1.running_var:[1228]***blocks.5.4.bn1.num_batches_tracked:[]***blocks.5.4.conv_dw.weight:[1228, 1, 5, 5]***blocks.5.4.bn2.weight:[1228]***blocks.5.4.bn2.bias:[1228]***blocks.5.4.bn2.running_mean:[1228]***blocks.5.4.bn2.running_var:[1228]***blocks.5.4.bn2.num_batches_tracked:[]***blocks.5.4.se.conv_reduce.weight:[52, 1228, 1, 1]***blocks.5.4.se.conv_reduce.bias:[52]***blocks.5.4.se.conv_expand.weight:[1228, 52, 1, 1]***blocks.5.4.se.conv_expand.bias:[1228]***blocks.5.4.conv_pwl.weight:[208, 1228, 1, 1]***blocks.5.4.bn3.weight:[208]***blocks.5.4.bn3.bias:[208]***blocks.5.4.bn3.running_mean:[208]***blocks.5.4.bn3.running_var:[208]***blocks.5.4.bn3.num_batches_tracked:[]***blocks.6.0.conv_pw.weight:[1248, 208, 1, 1]***blocks.6.0.bn1.weight:[1248]***blocks.6.0.bn1.bias:[1248]***blocks.6.0.bn1.running_mean:[1248]***blocks.6.0.bn1.running_var:[1248]***blocks.6.0.bn1.num_batches_tracked:[]***blocks.6.0.conv_dw.weight:[1248, 1, 3, 3]***blocks.6.0.bn2.weight:[1248]***blocks.6.0.bn2.bias:[1248]***blocks.6.0.bn2.running_mean:[1248]***blocks.6.0.bn2.running_var:[1248]***blocks.6.0.bn2.num_batches_tracked:[]***blocks.6.0.se.conv_reduce.weight:[52, 1248, 1, 1]***blocks.6.0.se.conv_reduce.bias:[52]***blocks.6.0.se.conv_expand.weight:[1248, 52, 1, 1]***blocks.6.0.se.conv_expand.bias:[1248]***blocks.6.0.conv_pwl.weight:[352, 1248, 1, 1]***blocks.6.0.bn3.weight:[352]***blocks.6.0.bn3.bias:[352]***blocks.6.0.bn3.running_mean:[352]***blocks.6.0.bn3.running_var:[352]***blocks.6.0.bn3.num_batches_tracked:[]***blocks.6.1.conv_pw.weight:[2112, 352, 1, 1]***blocks.6.1.bn1.weight:[2112]***blocks.6.1.bn1.bias:[2112]***blocks.6.1.bn1.running_mean:[2112]***blocks.6.1.bn1.running_var:[2112]***blocks.6.1.bn1.num_batches_tracked:[]***blocks.6.1.conv_dw.weight:[2112, 1, 3, 3]***blocks.6.1.bn2.weight:[2112]***blocks.6.1.bn2.bias:[2112]***blocks.6.1.bn2.running_mean:[2112]***blocks.6.1.bn2.running_var:[2112]***blocks.6.1.bn2.num_batches_tracked:[]***blocks.6.1.se.conv_reduce.weight:[88, 2112, 1, 1]***blocks.6.1.se.conv_reduce.bias:[88]***blocks.6.1.se.conv_expand.weight:[2112, 88, 1, 1]***blocks.6.1.se.conv_expand.bias:[2112]***blocks.6.1.conv_pwl.weight:[352, 2112, 1, 1]***blocks.6.1.bn3.weight:[352]***blocks.6.1.bn3.bias:[352]***blocks.6.1.bn3.running_mean:[352]***blocks.6.1.bn3.running_var:[352]***blocks.6.1.bn3.num_batches_tracked:[]***conv_head.weight:[1408, 352, 1, 1]***bn2.weight:[1408]***bn2.bias:[1408]***bn2.running_mean:[1408]***bn2.running_var:[1408]***bn2.num_batches_tracked:[]***classifier.weight:[1000, 1408]***classifier.bias:[1000]"
  },
  {
    "path": "timm/models/_pruned/efficientnet_b3_pruned.txt",
    "content": "conv_stem.weight:[40, 3, 3, 3]***bn1.weight:[40]***bn1.bias:[40]***bn1.running_mean:[40]***bn1.running_var:[40]***bn1.num_batches_tracked:[]***blocks.0.0.conv_dw.weight:[40, 1, 3, 3]***blocks.0.0.bn1.weight:[40]***blocks.0.0.bn1.bias:[40]***blocks.0.0.bn1.running_mean:[40]***blocks.0.0.bn1.running_var:[40]***blocks.0.0.bn1.num_batches_tracked:[]***blocks.0.0.se.conv_reduce.weight:[10, 40, 1, 1]***blocks.0.0.se.conv_reduce.bias:[10]***blocks.0.0.se.conv_expand.weight:[40, 10, 1, 1]***blocks.0.0.se.conv_expand.bias:[40]***blocks.0.0.conv_pw.weight:[24, 40, 1, 1]***blocks.0.0.bn2.weight:[24]***blocks.0.0.bn2.bias:[24]***blocks.0.0.bn2.running_mean:[24]***blocks.0.0.bn2.running_var:[24]***blocks.0.0.bn2.num_batches_tracked:[]***blocks.0.1.conv_dw.weight:[24, 1, 3, 3]***blocks.0.1.bn1.weight:[24]***blocks.0.1.bn1.bias:[24]***blocks.0.1.bn1.running_mean:[24]***blocks.0.1.bn1.running_var:[24]***blocks.0.1.bn1.num_batches_tracked:[]***blocks.0.1.se.conv_reduce.weight:[6, 24, 1, 1]***blocks.0.1.se.conv_reduce.bias:[6]***blocks.0.1.se.conv_expand.weight:[24, 6, 1, 1]***blocks.0.1.se.conv_expand.bias:[24]***blocks.0.1.conv_pw.weight:[24, 24, 1, 1]***blocks.0.1.bn2.weight:[24]***blocks.0.1.bn2.bias:[24]***blocks.0.1.bn2.running_mean:[24]***blocks.0.1.bn2.running_var:[24]***blocks.0.1.bn2.num_batches_tracked:[]***blocks.1.0.conv_pw.weight:[27, 24, 1, 1]***blocks.1.0.bn1.weight:[27]***blocks.1.0.bn1.bias:[27]***blocks.1.0.bn1.running_mean:[27]***blocks.1.0.bn1.running_var:[27]***blocks.1.0.bn1.num_batches_tracked:[]***blocks.1.0.conv_dw.weight:[27, 1, 3, 3]***blocks.1.0.bn2.weight:[27]***blocks.1.0.bn2.bias:[27]***blocks.1.0.bn2.running_mean:[27]***blocks.1.0.bn2.running_var:[27]***blocks.1.0.bn2.num_batches_tracked:[]***blocks.1.0.se.conv_reduce.weight:[6, 27, 1, 1]***blocks.1.0.se.conv_reduce.bias:[6]***blocks.1.0.se.conv_expand.weight:[27, 6, 1, 1]***blocks.1.0.se.conv_expand.bias:[27]***blocks.1.0.conv_pwl.weight:[12, 27, 1, 1]***blocks.1.0.bn3.weight:[12]***blocks.1.0.bn3.bias:[12]***blocks.1.0.bn3.running_mean:[12]***blocks.1.0.bn3.running_var:[12]***blocks.1.0.bn3.num_batches_tracked:[]***blocks.1.1.conv_pw.weight:[49, 12, 1, 1]***blocks.1.1.bn1.weight:[49]***blocks.1.1.bn1.bias:[49]***blocks.1.1.bn1.running_mean:[49]***blocks.1.1.bn1.running_var:[49]***blocks.1.1.bn1.num_batches_tracked:[]***blocks.1.1.conv_dw.weight:[49, 1, 3, 3]***blocks.1.1.bn2.weight:[49]***blocks.1.1.bn2.bias:[49]***blocks.1.1.bn2.running_mean:[49]***blocks.1.1.bn2.running_var:[49]***blocks.1.1.bn2.num_batches_tracked:[]***blocks.1.1.se.conv_reduce.weight:[8, 49, 1, 1]***blocks.1.1.se.conv_reduce.bias:[8]***blocks.1.1.se.conv_expand.weight:[49, 8, 1, 1]***blocks.1.1.se.conv_expand.bias:[49]***blocks.1.1.conv_pwl.weight:[12, 49, 1, 1]***blocks.1.1.bn3.weight:[12]***blocks.1.1.bn3.bias:[12]***blocks.1.1.bn3.running_mean:[12]***blocks.1.1.bn3.running_var:[12]***blocks.1.1.bn3.num_batches_tracked:[]***blocks.1.2.conv_pw.weight:[48, 12, 1, 1]***blocks.1.2.bn1.weight:[48]***blocks.1.2.bn1.bias:[48]***blocks.1.2.bn1.running_mean:[48]***blocks.1.2.bn1.running_var:[48]***blocks.1.2.bn1.num_batches_tracked:[]***blocks.1.2.conv_dw.weight:[48, 1, 3, 3]***blocks.1.2.bn2.weight:[48]***blocks.1.2.bn2.bias:[48]***blocks.1.2.bn2.running_mean:[48]***blocks.1.2.bn2.running_var:[48]***blocks.1.2.bn2.num_batches_tracked:[]***blocks.1.2.se.conv_reduce.weight:[8, 48, 1, 1]***blocks.1.2.se.conv_reduce.bias:[8]***blocks.1.2.se.conv_expand.weight:[48, 8, 1, 1]***blocks.1.2.se.conv_expand.bias:[48]***blocks.1.2.conv_pwl.weight:[12, 48, 1, 1]***blocks.1.2.bn3.weight:[12]***blocks.1.2.bn3.bias:[12]***blocks.1.2.bn3.running_mean:[12]***blocks.1.2.bn3.running_var:[12]***blocks.1.2.bn3.num_batches_tracked:[]***blocks.2.0.conv_pw.weight:[83, 12, 1, 1]***blocks.2.0.bn1.weight:[83]***blocks.2.0.bn1.bias:[83]***blocks.2.0.bn1.running_mean:[83]***blocks.2.0.bn1.running_var:[83]***blocks.2.0.bn1.num_batches_tracked:[]***blocks.2.0.conv_dw.weight:[83, 1, 5, 5]***blocks.2.0.bn2.weight:[83]***blocks.2.0.bn2.bias:[83]***blocks.2.0.bn2.running_mean:[83]***blocks.2.0.bn2.running_var:[83]***blocks.2.0.bn2.num_batches_tracked:[]***blocks.2.0.se.conv_reduce.weight:[8, 83, 1, 1]***blocks.2.0.se.conv_reduce.bias:[8]***blocks.2.0.se.conv_expand.weight:[83, 8, 1, 1]***blocks.2.0.se.conv_expand.bias:[83]***blocks.2.0.conv_pwl.weight:[40, 83, 1, 1]***blocks.2.0.bn3.weight:[40]***blocks.2.0.bn3.bias:[40]***blocks.2.0.bn3.running_mean:[40]***blocks.2.0.bn3.running_var:[40]***blocks.2.0.bn3.num_batches_tracked:[]***blocks.2.1.conv_pw.weight:[90, 40, 1, 1]***blocks.2.1.bn1.weight:[90]***blocks.2.1.bn1.bias:[90]***blocks.2.1.bn1.running_mean:[90]***blocks.2.1.bn1.running_var:[90]***blocks.2.1.bn1.num_batches_tracked:[]***blocks.2.1.conv_dw.weight:[90, 1, 5, 5]***blocks.2.1.bn2.weight:[90]***blocks.2.1.bn2.bias:[90]***blocks.2.1.bn2.running_mean:[90]***blocks.2.1.bn2.running_var:[90]***blocks.2.1.bn2.num_batches_tracked:[]***blocks.2.1.se.conv_reduce.weight:[12, 90, 1, 1]***blocks.2.1.se.conv_reduce.bias:[12]***blocks.2.1.se.conv_expand.weight:[90, 12, 1, 1]***blocks.2.1.se.conv_expand.bias:[90]***blocks.2.1.conv_pwl.weight:[40, 90, 1, 1]***blocks.2.1.bn3.weight:[40]***blocks.2.1.bn3.bias:[40]***blocks.2.1.bn3.running_mean:[40]***blocks.2.1.bn3.running_var:[40]***blocks.2.1.bn3.num_batches_tracked:[]***blocks.2.2.conv_pw.weight:[85, 40, 1, 1]***blocks.2.2.bn1.weight:[85]***blocks.2.2.bn1.bias:[85]***blocks.2.2.bn1.running_mean:[85]***blocks.2.2.bn1.running_var:[85]***blocks.2.2.bn1.num_batches_tracked:[]***blocks.2.2.conv_dw.weight:[85, 1, 5, 5]***blocks.2.2.bn2.weight:[85]***blocks.2.2.bn2.bias:[85]***blocks.2.2.bn2.running_mean:[85]***blocks.2.2.bn2.running_var:[85]***blocks.2.2.bn2.num_batches_tracked:[]***blocks.2.2.se.conv_reduce.weight:[12, 85, 1, 1]***blocks.2.2.se.conv_reduce.bias:[12]***blocks.2.2.se.conv_expand.weight:[85, 12, 1, 1]***blocks.2.2.se.conv_expand.bias:[85]***blocks.2.2.conv_pwl.weight:[40, 85, 1, 1]***blocks.2.2.bn3.weight:[40]***blocks.2.2.bn3.bias:[40]***blocks.2.2.bn3.running_mean:[40]***blocks.2.2.bn3.running_var:[40]***blocks.2.2.bn3.num_batches_tracked:[]***blocks.3.0.conv_pw.weight:[215, 40, 1, 1]***blocks.3.0.bn1.weight:[215]***blocks.3.0.bn1.bias:[215]***blocks.3.0.bn1.running_mean:[215]***blocks.3.0.bn1.running_var:[215]***blocks.3.0.bn1.num_batches_tracked:[]***blocks.3.0.conv_dw.weight:[215, 1, 3, 3]***blocks.3.0.bn2.weight:[215]***blocks.3.0.bn2.bias:[215]***blocks.3.0.bn2.running_mean:[215]***blocks.3.0.bn2.running_var:[215]***blocks.3.0.bn2.num_batches_tracked:[]***blocks.3.0.se.conv_reduce.weight:[12, 215, 1, 1]***blocks.3.0.se.conv_reduce.bias:[12]***blocks.3.0.se.conv_expand.weight:[215, 12, 1, 1]***blocks.3.0.se.conv_expand.bias:[215]***blocks.3.0.conv_pwl.weight:[93, 215, 1, 1]***blocks.3.0.bn3.weight:[93]***blocks.3.0.bn3.bias:[93]***blocks.3.0.bn3.running_mean:[93]***blocks.3.0.bn3.running_var:[93]***blocks.3.0.bn3.num_batches_tracked:[]***blocks.3.1.conv_pw.weight:[261, 93, 1, 1]***blocks.3.1.bn1.weight:[261]***blocks.3.1.bn1.bias:[261]***blocks.3.1.bn1.running_mean:[261]***blocks.3.1.bn1.running_var:[261]***blocks.3.1.bn1.num_batches_tracked:[]***blocks.3.1.conv_dw.weight:[261, 1, 3, 3]***blocks.3.1.bn2.weight:[261]***blocks.3.1.bn2.bias:[261]***blocks.3.1.bn2.running_mean:[261]***blocks.3.1.bn2.running_var:[261]***blocks.3.1.bn2.num_batches_tracked:[]***blocks.3.1.se.conv_reduce.weight:[24, 261, 1, 1]***blocks.3.1.se.conv_reduce.bias:[24]***blocks.3.1.se.conv_expand.weight:[261, 24, 1, 1]***blocks.3.1.se.conv_expand.bias:[261]***blocks.3.1.conv_pwl.weight:[93, 261, 1, 1]***blocks.3.1.bn3.weight:[93]***blocks.3.1.bn3.bias:[93]***blocks.3.1.bn3.running_mean:[93]***blocks.3.1.bn3.running_var:[93]***blocks.3.1.bn3.num_batches_tracked:[]***blocks.3.2.conv_pw.weight:[219, 93, 1, 1]***blocks.3.2.bn1.weight:[219]***blocks.3.2.bn1.bias:[219]***blocks.3.2.bn1.running_mean:[219]***blocks.3.2.bn1.running_var:[219]***blocks.3.2.bn1.num_batches_tracked:[]***blocks.3.2.conv_dw.weight:[219, 1, 3, 3]***blocks.3.2.bn2.weight:[219]***blocks.3.2.bn2.bias:[219]***blocks.3.2.bn2.running_mean:[219]***blocks.3.2.bn2.running_var:[219]***blocks.3.2.bn2.num_batches_tracked:[]***blocks.3.2.se.conv_reduce.weight:[24, 219, 1, 1]***blocks.3.2.se.conv_reduce.bias:[24]***blocks.3.2.se.conv_expand.weight:[219, 24, 1, 1]***blocks.3.2.se.conv_expand.bias:[219]***blocks.3.2.conv_pwl.weight:[93, 219, 1, 1]***blocks.3.2.bn3.weight:[93]***blocks.3.2.bn3.bias:[93]***blocks.3.2.bn3.running_mean:[93]***blocks.3.2.bn3.running_var:[93]***blocks.3.2.bn3.num_batches_tracked:[]***blocks.3.3.conv_pw.weight:[254, 93, 1, 1]***blocks.3.3.bn1.weight:[254]***blocks.3.3.bn1.bias:[254]***blocks.3.3.bn1.running_mean:[254]***blocks.3.3.bn1.running_var:[254]***blocks.3.3.bn1.num_batches_tracked:[]***blocks.3.3.conv_dw.weight:[254, 1, 3, 3]***blocks.3.3.bn2.weight:[254]***blocks.3.3.bn2.bias:[254]***blocks.3.3.bn2.running_mean:[254]***blocks.3.3.bn2.running_var:[254]***blocks.3.3.bn2.num_batches_tracked:[]***blocks.3.3.se.conv_reduce.weight:[24, 254, 1, 1]***blocks.3.3.se.conv_reduce.bias:[24]***blocks.3.3.se.conv_expand.weight:[254, 24, 1, 1]***blocks.3.3.se.conv_expand.bias:[254]***blocks.3.3.conv_pwl.weight:[93, 254, 1, 1]***blocks.3.3.bn3.weight:[93]***blocks.3.3.bn3.bias:[93]***blocks.3.3.bn3.running_mean:[93]***blocks.3.3.bn3.running_var:[93]***blocks.3.3.bn3.num_batches_tracked:[]***blocks.3.4.conv_pw.weight:[236, 93, 1, 1]***blocks.3.4.bn1.weight:[236]***blocks.3.4.bn1.bias:[236]***blocks.3.4.bn1.running_mean:[236]***blocks.3.4.bn1.running_var:[236]***blocks.3.4.bn1.num_batches_tracked:[]***blocks.3.4.conv_dw.weight:[236, 1, 3, 3]***blocks.3.4.bn2.weight:[236]***blocks.3.4.bn2.bias:[236]***blocks.3.4.bn2.running_mean:[236]***blocks.3.4.bn2.running_var:[236]***blocks.3.4.bn2.num_batches_tracked:[]***blocks.3.4.se.conv_reduce.weight:[24, 236, 1, 1]***blocks.3.4.se.conv_reduce.bias:[24]***blocks.3.4.se.conv_expand.weight:[236, 24, 1, 1]***blocks.3.4.se.conv_expand.bias:[236]***blocks.3.4.conv_pwl.weight:[93, 236, 1, 1]***blocks.3.4.bn3.weight:[93]***blocks.3.4.bn3.bias:[93]***blocks.3.4.bn3.running_mean:[93]***blocks.3.4.bn3.running_var:[93]***blocks.3.4.bn3.num_batches_tracked:[]***blocks.4.0.conv_pw.weight:[480, 93, 1, 1]***blocks.4.0.bn1.weight:[480]***blocks.4.0.bn1.bias:[480]***blocks.4.0.bn1.running_mean:[480]***blocks.4.0.bn1.running_var:[480]***blocks.4.0.bn1.num_batches_tracked:[]***blocks.4.0.conv_dw.weight:[480, 1, 5, 5]***blocks.4.0.bn2.weight:[480]***blocks.4.0.bn2.bias:[480]***blocks.4.0.bn2.running_mean:[480]***blocks.4.0.bn2.running_var:[480]***blocks.4.0.bn2.num_batches_tracked:[]***blocks.4.0.se.conv_reduce.weight:[24, 480, 1, 1]***blocks.4.0.se.conv_reduce.bias:[24]***blocks.4.0.se.conv_expand.weight:[480, 24, 1, 1]***blocks.4.0.se.conv_expand.bias:[480]***blocks.4.0.conv_pwl.weight:[120, 480, 1, 1]***blocks.4.0.bn3.weight:[120]***blocks.4.0.bn3.bias:[120]***blocks.4.0.bn3.running_mean:[120]***blocks.4.0.bn3.running_var:[120]***blocks.4.0.bn3.num_batches_tracked:[]***blocks.4.1.conv_pw.weight:[235, 120, 1, 1]***blocks.4.1.bn1.weight:[235]***blocks.4.1.bn1.bias:[235]***blocks.4.1.bn1.running_mean:[235]***blocks.4.1.bn1.running_var:[235]***blocks.4.1.bn1.num_batches_tracked:[]***blocks.4.1.conv_dw.weight:[235, 1, 5, 5]***blocks.4.1.bn2.weight:[235]***blocks.4.1.bn2.bias:[235]***blocks.4.1.bn2.running_mean:[235]***blocks.4.1.bn2.running_var:[235]***blocks.4.1.bn2.num_batches_tracked:[]***blocks.4.1.se.conv_reduce.weight:[34, 235, 1, 1]***blocks.4.1.se.conv_reduce.bias:[34]***blocks.4.1.se.conv_expand.weight:[235, 34, 1, 1]***blocks.4.1.se.conv_expand.bias:[235]***blocks.4.1.conv_pwl.weight:[120, 235, 1, 1]***blocks.4.1.bn3.weight:[120]***blocks.4.1.bn3.bias:[120]***blocks.4.1.bn3.running_mean:[120]***blocks.4.1.bn3.running_var:[120]***blocks.4.1.bn3.num_batches_tracked:[]***blocks.4.2.conv_pw.weight:[217, 120, 1, 1]***blocks.4.2.bn1.weight:[217]***blocks.4.2.bn1.bias:[217]***blocks.4.2.bn1.running_mean:[217]***blocks.4.2.bn1.running_var:[217]***blocks.4.2.bn1.num_batches_tracked:[]***blocks.4.2.conv_dw.weight:[217, 1, 5, 5]***blocks.4.2.bn2.weight:[217]***blocks.4.2.bn2.bias:[217]***blocks.4.2.bn2.running_mean:[217]***blocks.4.2.bn2.running_var:[217]***blocks.4.2.bn2.num_batches_tracked:[]***blocks.4.2.se.conv_reduce.weight:[34, 217, 1, 1]***blocks.4.2.se.conv_reduce.bias:[34]***blocks.4.2.se.conv_expand.weight:[217, 34, 1, 1]***blocks.4.2.se.conv_expand.bias:[217]***blocks.4.2.conv_pwl.weight:[120, 217, 1, 1]***blocks.4.2.bn3.weight:[120]***blocks.4.2.bn3.bias:[120]***blocks.4.2.bn3.running_mean:[120]***blocks.4.2.bn3.running_var:[120]***blocks.4.2.bn3.num_batches_tracked:[]***blocks.4.3.conv_pw.weight:[226, 120, 1, 1]***blocks.4.3.bn1.weight:[226]***blocks.4.3.bn1.bias:[226]***blocks.4.3.bn1.running_mean:[226]***blocks.4.3.bn1.running_var:[226]***blocks.4.3.bn1.num_batches_tracked:[]***blocks.4.3.conv_dw.weight:[226, 1, 5, 5]***blocks.4.3.bn2.weight:[226]***blocks.4.3.bn2.bias:[226]***blocks.4.3.bn2.running_mean:[226]***blocks.4.3.bn2.running_var:[226]***blocks.4.3.bn2.num_batches_tracked:[]***blocks.4.3.se.conv_reduce.weight:[33, 226, 1, 1]***blocks.4.3.se.conv_reduce.bias:[33]***blocks.4.3.se.conv_expand.weight:[226, 33, 1, 1]***blocks.4.3.se.conv_expand.bias:[226]***blocks.4.3.conv_pwl.weight:[120, 226, 1, 1]***blocks.4.3.bn3.weight:[120]***blocks.4.3.bn3.bias:[120]***blocks.4.3.bn3.running_mean:[120]***blocks.4.3.bn3.running_var:[120]***blocks.4.3.bn3.num_batches_tracked:[]***blocks.4.4.conv_pw.weight:[340, 120, 1, 1]***blocks.4.4.bn1.weight:[340]***blocks.4.4.bn1.bias:[340]***blocks.4.4.bn1.running_mean:[340]***blocks.4.4.bn1.running_var:[340]***blocks.4.4.bn1.num_batches_tracked:[]***blocks.4.4.conv_dw.weight:[340, 1, 5, 5]***blocks.4.4.bn2.weight:[340]***blocks.4.4.bn2.bias:[340]***blocks.4.4.bn2.running_mean:[340]***blocks.4.4.bn2.running_var:[340]***blocks.4.4.bn2.num_batches_tracked:[]***blocks.4.4.se.conv_reduce.weight:[34, 340, 1, 1]***blocks.4.4.se.conv_reduce.bias:[34]***blocks.4.4.se.conv_expand.weight:[340, 34, 1, 1]***blocks.4.4.se.conv_expand.bias:[340]***blocks.4.4.conv_pwl.weight:[120, 340, 1, 1]***blocks.4.4.bn3.weight:[120]***blocks.4.4.bn3.bias:[120]***blocks.4.4.bn3.running_mean:[120]***blocks.4.4.bn3.running_var:[120]***blocks.4.4.bn3.num_batches_tracked:[]***blocks.5.0.conv_pw.weight:[802, 120, 1, 1]***blocks.5.0.bn1.weight:[802]***blocks.5.0.bn1.bias:[802]***blocks.5.0.bn1.running_mean:[802]***blocks.5.0.bn1.running_var:[802]***blocks.5.0.bn1.num_batches_tracked:[]***blocks.5.0.conv_dw.weight:[802, 1, 5, 5]***blocks.5.0.bn2.weight:[802]***blocks.5.0.bn2.bias:[802]***blocks.5.0.bn2.running_mean:[802]***blocks.5.0.bn2.running_var:[802]***blocks.5.0.bn2.num_batches_tracked:[]***blocks.5.0.se.conv_reduce.weight:[34, 802, 1, 1]***blocks.5.0.se.conv_reduce.bias:[34]***blocks.5.0.se.conv_expand.weight:[802, 34, 1, 1]***blocks.5.0.se.conv_expand.bias:[802]***blocks.5.0.conv_pwl.weight:[232, 802, 1, 1]***blocks.5.0.bn3.weight:[232]***blocks.5.0.bn3.bias:[232]***blocks.5.0.bn3.running_mean:[232]***blocks.5.0.bn3.running_var:[232]***blocks.5.0.bn3.num_batches_tracked:[]***blocks.5.1.conv_pw.weight:[1030, 232, 1, 1]***blocks.5.1.bn1.weight:[1030]***blocks.5.1.bn1.bias:[1030]***blocks.5.1.bn1.running_mean:[1030]***blocks.5.1.bn1.running_var:[1030]***blocks.5.1.bn1.num_batches_tracked:[]***blocks.5.1.conv_dw.weight:[1030, 1, 5, 5]***blocks.5.1.bn2.weight:[1030]***blocks.5.1.bn2.bias:[1030]***blocks.5.1.bn2.running_mean:[1030]***blocks.5.1.bn2.running_var:[1030]***blocks.5.1.bn2.num_batches_tracked:[]***blocks.5.1.se.conv_reduce.weight:[58, 1030, 1, 1]***blocks.5.1.se.conv_reduce.bias:[58]***blocks.5.1.se.conv_expand.weight:[1030, 58, 1, 1]***blocks.5.1.se.conv_expand.bias:[1030]***blocks.5.1.conv_pwl.weight:[232, 1030, 1, 1]***blocks.5.1.bn3.weight:[232]***blocks.5.1.bn3.bias:[232]***blocks.5.1.bn3.running_mean:[232]***blocks.5.1.bn3.running_var:[232]***blocks.5.1.bn3.num_batches_tracked:[]***blocks.5.2.conv_pw.weight:[924, 232, 1, 1]***blocks.5.2.bn1.weight:[924]***blocks.5.2.bn1.bias:[924]***blocks.5.2.bn1.running_mean:[924]***blocks.5.2.bn1.running_var:[924]***blocks.5.2.bn1.num_batches_tracked:[]***blocks.5.2.conv_dw.weight:[924, 1, 5, 5]***blocks.5.2.bn2.weight:[924]***blocks.5.2.bn2.bias:[924]***blocks.5.2.bn2.running_mean:[924]***blocks.5.2.bn2.running_var:[924]***blocks.5.2.bn2.num_batches_tracked:[]***blocks.5.2.se.conv_reduce.weight:[58, 924, 1, 1]***blocks.5.2.se.conv_reduce.bias:[58]***blocks.5.2.se.conv_expand.weight:[924, 58, 1, 1]***blocks.5.2.se.conv_expand.bias:[924]***blocks.5.2.conv_pwl.weight:[232, 924, 1, 1]***blocks.5.2.bn3.weight:[232]***blocks.5.2.bn3.bias:[232]***blocks.5.2.bn3.running_mean:[232]***blocks.5.2.bn3.running_var:[232]***blocks.5.2.bn3.num_batches_tracked:[]***blocks.5.3.conv_pw.weight:[1016, 232, 1, 1]***blocks.5.3.bn1.weight:[1016]***blocks.5.3.bn1.bias:[1016]***blocks.5.3.bn1.running_mean:[1016]***blocks.5.3.bn1.running_var:[1016]***blocks.5.3.bn1.num_batches_tracked:[]***blocks.5.3.conv_dw.weight:[1016, 1, 5, 5]***blocks.5.3.bn2.weight:[1016]***blocks.5.3.bn2.bias:[1016]***blocks.5.3.bn2.running_mean:[1016]***blocks.5.3.bn2.running_var:[1016]***blocks.5.3.bn2.num_batches_tracked:[]***blocks.5.3.se.conv_reduce.weight:[58, 1016, 1, 1]***blocks.5.3.se.conv_reduce.bias:[58]***blocks.5.3.se.conv_expand.weight:[1016, 58, 1, 1]***blocks.5.3.se.conv_expand.bias:[1016]***blocks.5.3.conv_pwl.weight:[232, 1016, 1, 1]***blocks.5.3.bn3.weight:[232]***blocks.5.3.bn3.bias:[232]***blocks.5.3.bn3.running_mean:[232]***blocks.5.3.bn3.running_var:[232]***blocks.5.3.bn3.num_batches_tracked:[]***blocks.5.4.conv_pw.weight:[1130, 232, 1, 1]***blocks.5.4.bn1.weight:[1130]***blocks.5.4.bn1.bias:[1130]***blocks.5.4.bn1.running_mean:[1130]***blocks.5.4.bn1.running_var:[1130]***blocks.5.4.bn1.num_batches_tracked:[]***blocks.5.4.conv_dw.weight:[1130, 1, 5, 5]***blocks.5.4.bn2.weight:[1130]***blocks.5.4.bn2.bias:[1130]***blocks.5.4.bn2.running_mean:[1130]***blocks.5.4.bn2.running_var:[1130]***blocks.5.4.bn2.num_batches_tracked:[]***blocks.5.4.se.conv_reduce.weight:[58, 1130, 1, 1]***blocks.5.4.se.conv_reduce.bias:[58]***blocks.5.4.se.conv_expand.weight:[1130, 58, 1, 1]***blocks.5.4.se.conv_expand.bias:[1130]***blocks.5.4.conv_pwl.weight:[232, 1130, 1, 1]***blocks.5.4.bn3.weight:[232]***blocks.5.4.bn3.bias:[232]***blocks.5.4.bn3.running_mean:[232]***blocks.5.4.bn3.running_var:[232]***blocks.5.4.bn3.num_batches_tracked:[]***blocks.5.5.conv_pw.weight:[1266, 232, 1, 1]***blocks.5.5.bn1.weight:[1266]***blocks.5.5.bn1.bias:[1266]***blocks.5.5.bn1.running_mean:[1266]***blocks.5.5.bn1.running_var:[1266]***blocks.5.5.bn1.num_batches_tracked:[]***blocks.5.5.conv_dw.weight:[1266, 1, 5, 5]***blocks.5.5.bn2.weight:[1266]***blocks.5.5.bn2.bias:[1266]***blocks.5.5.bn2.running_mean:[1266]***blocks.5.5.bn2.running_var:[1266]***blocks.5.5.bn2.num_batches_tracked:[]***blocks.5.5.se.conv_reduce.weight:[58, 1266, 1, 1]***blocks.5.5.se.conv_reduce.bias:[58]***blocks.5.5.se.conv_expand.weight:[1266, 58, 1, 1]***blocks.5.5.se.conv_expand.bias:[1266]***blocks.5.5.conv_pwl.weight:[232, 1266, 1, 1]***blocks.5.5.bn3.weight:[232]***blocks.5.5.bn3.bias:[232]***blocks.5.5.bn3.running_mean:[232]***blocks.5.5.bn3.running_var:[232]***blocks.5.5.bn3.num_batches_tracked:[]***blocks.6.0.conv_pw.weight:[1392, 232, 1, 1]***blocks.6.0.bn1.weight:[1392]***blocks.6.0.bn1.bias:[1392]***blocks.6.0.bn1.running_mean:[1392]***blocks.6.0.bn1.running_var:[1392]***blocks.6.0.bn1.num_batches_tracked:[]***blocks.6.0.conv_dw.weight:[1392, 1, 3, 3]***blocks.6.0.bn2.weight:[1392]***blocks.6.0.bn2.bias:[1392]***blocks.6.0.bn2.running_mean:[1392]***blocks.6.0.bn2.running_var:[1392]***blocks.6.0.bn2.num_batches_tracked:[]***blocks.6.0.se.conv_reduce.weight:[58, 1392, 1, 1]***blocks.6.0.se.conv_reduce.bias:[58]***blocks.6.0.se.conv_expand.weight:[1392, 58, 1, 1]***blocks.6.0.se.conv_expand.bias:[1392]***blocks.6.0.conv_pwl.weight:[384, 1392, 1, 1]***blocks.6.0.bn3.weight:[384]***blocks.6.0.bn3.bias:[384]***blocks.6.0.bn3.running_mean:[384]***blocks.6.0.bn3.running_var:[384]***blocks.6.0.bn3.num_batches_tracked:[]***blocks.6.1.conv_pw.weight:[2301, 384, 1, 1]***blocks.6.1.bn1.weight:[2301]***blocks.6.1.bn1.bias:[2301]***blocks.6.1.bn1.running_mean:[2301]***blocks.6.1.bn1.running_var:[2301]***blocks.6.1.bn1.num_batches_tracked:[]***blocks.6.1.conv_dw.weight:[2301, 1, 3, 3]***blocks.6.1.bn2.weight:[2301]***blocks.6.1.bn2.bias:[2301]***blocks.6.1.bn2.running_mean:[2301]***blocks.6.1.bn2.running_var:[2301]***blocks.6.1.bn2.num_batches_tracked:[]***blocks.6.1.se.conv_reduce.weight:[96, 2301, 1, 1]***blocks.6.1.se.conv_reduce.bias:[96]***blocks.6.1.se.conv_expand.weight:[2301, 96, 1, 1]***blocks.6.1.se.conv_expand.bias:[2301]***blocks.6.1.conv_pwl.weight:[384, 2301, 1, 1]***blocks.6.1.bn3.weight:[384]***blocks.6.1.bn3.bias:[384]***blocks.6.1.bn3.running_mean:[384]***blocks.6.1.bn3.running_var:[384]***blocks.6.1.bn3.num_batches_tracked:[]***conv_head.weight:[1536, 384, 1, 1]***bn2.weight:[1536]***bn2.bias:[1536]***bn2.running_mean:[1536]***bn2.running_var:[1536]***bn2.num_batches_tracked:[]***classifier.weight:[1000, 1536]***classifier.bias:[1000]"
  },
  {
    "path": "timm/models/_registry.py",
    "content": "\"\"\" Model Registry\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport fnmatch\nimport re\nimport sys\nimport warnings\nfrom collections import defaultdict, deque\nfrom copy import deepcopy\nfrom dataclasses import replace\nfrom typing import Any, Callable, Dict, Iterable, List, Optional, Set, Sequence, Union, Tuple\n\nfrom ._pretrained import PretrainedCfg, DefaultCfg\n\n__all__ = [\n    'split_model_name_tag', 'get_arch_name', 'register_model', 'generate_default_cfgs',\n    'list_models', 'list_pretrained', 'is_model', 'model_entrypoint', 'list_modules', 'is_model_in_modules',\n    'get_pretrained_cfg_value', 'is_model_pretrained', 'get_arch_pretrained_cfgs'\n]\n\n_module_to_models: Dict[str, Set[str]] = defaultdict(set)  # dict of sets to check membership of model in module\n_model_to_module: Dict[str, str] = {}  # mapping of model names to module names\n_model_entrypoints: Dict[str, Callable[..., Any]] = {}  # mapping of model names to architecture entrypoint fns\n_model_has_pretrained: Set[str] = set()  # set of model names that have pretrained weight url present\n_model_default_cfgs: Dict[str, PretrainedCfg] = {}  # central repo for model arch -> default cfg objects\n_model_pretrained_cfgs: Dict[str, PretrainedCfg] = {}  # central repo for model arch.tag -> pretrained cfgs\n_model_with_tags: Dict[str, List[str]] = defaultdict(list)  # shortcut to map each model arch to all model + tag names\n_module_to_deprecated_models: Dict[str, Dict[str, Optional[str]]] = defaultdict(dict)\n_deprecated_models: Dict[str, Optional[str]] = {}\n\n\ndef split_model_name_tag(model_name: str, no_tag: str = '') -> Tuple[str, str]:\n    model_name, *tag_list = model_name.split('.', 1)\n    tag = tag_list[0] if tag_list else no_tag\n    return model_name, tag\n\n\ndef get_arch_name(model_name: str) -> str:\n    return split_model_name_tag(model_name)[0]\n\n\ndef generate_default_cfgs(cfgs: Dict[str, Union[Dict[str, Any], PretrainedCfg]]):\n    out = defaultdict(DefaultCfg)\n    default_set = set()  # no tag and tags ending with * are prioritized as default\n\n    for k, v in cfgs.items():\n        if isinstance(v, dict):\n            v = PretrainedCfg(**v)\n        has_weights = v.has_weights\n\n        model, tag = split_model_name_tag(k)\n        is_default_set = model in default_set\n        priority = (has_weights and not tag) or (tag.endswith('*') and not is_default_set)\n        tag = tag.strip('*')\n\n        default_cfg = out[model]\n\n        if priority:\n            default_cfg.tags.appendleft(tag)\n            default_set.add(model)\n        elif has_weights and not default_cfg.is_pretrained:\n            default_cfg.tags.appendleft(tag)\n        else:\n            default_cfg.tags.append(tag)\n\n        if has_weights:\n            default_cfg.is_pretrained = True\n\n        default_cfg.cfgs[tag] = v\n\n    return out\n\n\ndef register_model(fn: Callable[..., Any]) -> Callable[..., Any]:\n    # lookup containing module\n    mod = sys.modules[fn.__module__]\n    module_name_split = fn.__module__.split('.')\n    module_name = module_name_split[-1] if len(module_name_split) else ''\n\n    # add model to __all__ in module\n    model_name = fn.__name__\n    if hasattr(mod, '__all__'):\n        mod.__all__.append(model_name)\n    else:\n        mod.__all__ = [model_name]  # type: ignore\n\n    # add entries to registry dict/sets\n    if model_name in _model_entrypoints:\n        warnings.warn(\n            f'Overwriting {model_name} in registry with {fn.__module__}.{model_name}. This is because the name being '\n            'registered conflicts with an existing name. Please check if this is not expected.',\n            stacklevel=2,\n        )\n    _model_entrypoints[model_name] = fn\n    _model_to_module[model_name] = module_name\n    _module_to_models[module_name].add(model_name)\n    if hasattr(mod, 'default_cfgs') and model_name in mod.default_cfgs:\n        # this will catch all models that have entrypoint matching cfg key, but miss any aliasing\n        # entrypoints or non-matching combos\n        default_cfg = mod.default_cfgs[model_name]\n        if not isinstance(default_cfg, DefaultCfg):\n            # new style default cfg dataclass w/ multiple entries per model-arch\n            assert isinstance(default_cfg, dict)\n            # old style cfg dict per model-arch\n            pretrained_cfg = PretrainedCfg(**default_cfg)\n            default_cfg = DefaultCfg(tags=deque(['']), cfgs={'': pretrained_cfg})\n\n        for tag_idx, tag in enumerate(default_cfg.tags):\n            is_default = tag_idx == 0\n            pretrained_cfg = default_cfg.cfgs[tag]\n            model_name_tag = '.'.join([model_name, tag]) if tag else model_name\n            replace_items = dict(architecture=model_name, tag=tag if tag else None)\n            if pretrained_cfg.hf_hub_id and pretrained_cfg.hf_hub_id == 'timm/':\n                # auto-complete hub name w/ architecture.tag\n                replace_items['hf_hub_id'] = pretrained_cfg.hf_hub_id + model_name_tag\n            pretrained_cfg = replace(pretrained_cfg, **replace_items)\n\n            if is_default:\n                _model_pretrained_cfgs[model_name] = pretrained_cfg\n                if pretrained_cfg.has_weights:\n                    # add tagless entry if it's default and has weights\n                    _model_has_pretrained.add(model_name)\n\n            if tag:\n                _model_pretrained_cfgs[model_name_tag] = pretrained_cfg\n                if pretrained_cfg.has_weights:\n                    # add model w/ tag if tag is valid\n                    _model_has_pretrained.add(model_name_tag)\n                _model_with_tags[model_name].append(model_name_tag)\n            else:\n                _model_with_tags[model_name].append(model_name)  # has empty tag (to slowly remove these instances)\n\n        _model_default_cfgs[model_name] = default_cfg\n\n    return fn\n\n\ndef _deprecated_model_shim(deprecated_name: str, current_fn: Callable = None, current_tag: str = ''):\n    def _fn(pretrained=False, **kwargs):\n        assert current_fn is not None,  f'Model {deprecated_name} has been removed with no replacement.'\n        current_name = '.'.join([current_fn.__name__, current_tag]) if current_tag else current_fn.__name__\n        warnings.warn(f'Mapping deprecated model name {deprecated_name} to current {current_name}.', stacklevel=2)\n        pretrained_cfg = kwargs.pop('pretrained_cfg', None)\n        return current_fn(pretrained=pretrained, pretrained_cfg=pretrained_cfg or current_tag, **kwargs)\n    return _fn\n\n\ndef register_model_deprecations(module_name: str, deprecation_map: Dict[str, Optional[str]]):\n    mod = sys.modules[module_name]\n    module_name_split = module_name.split('.')\n    module_name = module_name_split[-1] if len(module_name_split) else ''\n\n    for deprecated, current in deprecation_map.items():\n        if hasattr(mod, '__all__'):\n            mod.__all__.append(deprecated)\n        current_fn = None\n        current_tag = ''\n        if current:\n            current_name, current_tag = split_model_name_tag(current)\n            current_fn = getattr(mod, current_name)\n        deprecated_entrypoint_fn = _deprecated_model_shim(deprecated, current_fn, current_tag)\n        setattr(mod, deprecated, deprecated_entrypoint_fn)\n        _model_entrypoints[deprecated] = deprecated_entrypoint_fn\n        _model_to_module[deprecated] = module_name\n        _module_to_models[module_name].add(deprecated)\n        _deprecated_models[deprecated] = current\n        _module_to_deprecated_models[module_name][deprecated] = current\n\n\ndef _natural_key(string_: str) -> List[Union[int, str]]:\n    \"\"\"See https://blog.codinghorror.com/sorting-for-humans-natural-sort-order/\"\"\"\n    return [int(s) if s.isdigit() else s for s in re.split(r'(\\d+)', string_.lower())]\n\n\ndef _expand_filter(filter: str):\n    \"\"\" expand a 'base_filter' to 'base_filter.*' if no tag portion\"\"\"\n    filter_base, filter_tag = split_model_name_tag(filter)\n    if not filter_tag:\n        return ['.'.join([filter_base, '*']), filter]\n    else:\n        return [filter]\n\n\ndef list_models(\n        filter: Union[str, List[str]] = '',\n        module: Union[str, List[str]] = '',\n        pretrained: bool = False,\n        exclude_filters: Union[str, List[str]] = '',\n        name_matches_cfg: bool = False,\n        include_tags: Optional[bool] = None,\n) -> List[str]:\n    \"\"\" Return list of available model names, sorted alphabetically\n\n    Args:\n        filter - Wildcard filter string that works with fnmatch\n        module - Limit model selection to a specific submodule (ie 'vision_transformer')\n        pretrained - Include only models with valid pretrained weights if True\n        exclude_filters - Wildcard filters to exclude models after including them with filter\n        name_matches_cfg - Include only models w/ model_name matching default_cfg name (excludes some aliases)\n        include_tags - Include pretrained tags in model names (model.tag). If None, defaults\n            set to True when pretrained=True else False (default: None)\n\n    Returns:\n        models - The sorted list of models\n\n    Example:\n        model_list('gluon_resnet*') -- returns all models starting with 'gluon_resnet'\n        model_list('*resnext*, 'resnet') -- returns all models with 'resnext' in 'resnet' module\n    \"\"\"\n    if filter:\n        include_filters = filter if isinstance(filter, (tuple, list)) else [filter]\n    else:\n        include_filters = []\n\n    if include_tags is None:\n        # FIXME should this be default behaviour? or default to include_tags=True?\n        include_tags = pretrained\n\n    if not module:\n        all_models: Set[str] = set(_model_entrypoints.keys())\n    else:\n        if isinstance(module, str):\n            all_models: Set[str] = _module_to_models[module]\n        else:\n            assert isinstance(module, Sequence)\n            all_models: Set[str] = set()\n            for m in module:\n                all_models.update(_module_to_models[m])\n    all_models = all_models - _deprecated_models.keys()  # remove deprecated models from listings\n\n    if include_tags:\n        # expand model names to include names w/ pretrained tags\n        models_with_tags: Set[str] = set()\n        for m in all_models:\n            models_with_tags.update(_model_with_tags[m])\n        all_models = models_with_tags\n        # expand include and exclude filters to include a '.*' for proper match if no tags in filter\n        include_filters = [ef for f in include_filters for ef in _expand_filter(f)]\n        exclude_filters = [ef for f in exclude_filters for ef in _expand_filter(f)]\n\n    if include_filters:\n        models: Set[str] = set()\n        for f in include_filters:\n            include_models = fnmatch.filter(all_models, f)  # include these models\n            if len(include_models):\n                models = models.union(include_models)\n    else:\n        models = all_models\n\n    if exclude_filters:\n        if not isinstance(exclude_filters, (tuple, list)):\n            exclude_filters = [exclude_filters]\n        for xf in exclude_filters:\n            exclude_models = fnmatch.filter(models, xf)  # exclude these models\n            if len(exclude_models):\n                models = models.difference(exclude_models)\n\n    if pretrained:\n        models = _model_has_pretrained.intersection(models)\n\n    if name_matches_cfg:\n        models = set(_model_pretrained_cfgs).intersection(models)\n\n    return sorted(models, key=_natural_key)\n\n\ndef list_pretrained(\n        filter: Union[str, List[str]] = '',\n        exclude_filters: str = '',\n) -> List[str]:\n    return list_models(\n        filter=filter,\n        pretrained=True,\n        exclude_filters=exclude_filters,\n        include_tags=True,\n    )\n\n\ndef get_deprecated_models(module: str = '') -> Dict[str, str]:\n    all_deprecated = _module_to_deprecated_models[module] if module else _deprecated_models\n    return deepcopy(all_deprecated)\n\n\ndef is_model(model_name: str) -> bool:\n    \"\"\" Check if a model name exists\n    \"\"\"\n    arch_name = get_arch_name(model_name)\n    return arch_name in _model_entrypoints\n\n\ndef model_entrypoint(model_name: str, module_filter: Optional[str] = None) -> Callable[..., Any]:\n    \"\"\"Fetch a model entrypoint for specified model name\n    \"\"\"\n    arch_name = get_arch_name(model_name)\n    if module_filter and arch_name not in _module_to_models.get(module_filter, {}):\n        raise RuntimeError(f'Model ({model_name} not found in module {module_filter}.')\n    return _model_entrypoints[arch_name]\n\n\ndef list_modules() -> List[str]:\n    \"\"\" Return list of module names that contain models / model entrypoints\n    \"\"\"\n    modules = _module_to_models.keys()\n    return sorted(modules)\n\n\ndef is_model_in_modules(\n        model_name: str, module_names: Union[Tuple[str, ...], List[str], Set[str]]\n) -> bool:\n    \"\"\"Check if a model exists within a subset of modules\n\n    Args:\n        model_name - name of model to check\n        module_names - names of modules to search in\n    \"\"\"\n    arch_name = get_arch_name(model_name)\n    assert isinstance(module_names, (tuple, list, set))\n    return any(arch_name in _module_to_models[n] for n in module_names)\n\n\ndef is_model_pretrained(model_name: str) -> bool:\n    return model_name in _model_has_pretrained\n\n\ndef get_pretrained_cfg(model_name: str, allow_unregistered: bool = True) -> Optional[PretrainedCfg]:\n    if model_name in _model_pretrained_cfgs:\n        return deepcopy(_model_pretrained_cfgs[model_name])\n    arch_name, tag = split_model_name_tag(model_name)\n    if arch_name in _model_default_cfgs:\n        # if model arch exists, but the tag is wrong, error out\n        raise RuntimeError(f'Invalid pretrained tag ({tag}) for {arch_name}.')\n    if allow_unregistered:\n        # if model arch doesn't exist, it has no pretrained_cfg registered, allow a default to be created\n        return None\n    raise RuntimeError(f'Model architecture ({arch_name}) has no pretrained cfg registered.')\n\n\ndef get_pretrained_cfg_value(model_name: str, cfg_key: str) -> Optional[Any]:\n    \"\"\" Get a specific model default_cfg value by key. None if key doesn't exist.\n    \"\"\"\n    cfg = get_pretrained_cfg(model_name, allow_unregistered=False)\n    return getattr(cfg, cfg_key, None)\n\n\ndef get_arch_pretrained_cfgs(model_name: str) -> Dict[str, PretrainedCfg]:\n    \"\"\" Get all pretrained cfgs for a given architecture.\n    \"\"\"\n    arch_name, _ = split_model_name_tag(model_name)\n    model_names = _model_with_tags[arch_name]\n    cfgs = {m: _model_pretrained_cfgs[m] for m in model_names}\n    return cfgs\n"
  },
  {
    "path": "timm/models/beit.py",
    "content": "\"\"\" BEiT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)\n\nModel from official source: https://github.com/microsoft/unilm/tree/master/beit\n\n@inproceedings{beit,\ntitle={{BEiT}: {BERT} Pre-Training of Image Transformers},\nauthor={Hangbo Bao and Li Dong and Songhao Piao and Furu Wei},\nbooktitle={International Conference on Learning Representations},\nyear={2022},\nurl={https://openreview.net/forum?id=p-BhZSz59o4}\n}\n\nBEiT-v2 from https://github.com/microsoft/unilm/tree/master/beit2\n\n@article{beitv2,\ntitle={{BEiT v2}: Masked Image Modeling with Vector-Quantized Visual Tokenizers},\nauthor={Zhiliang Peng and Li Dong and Hangbo Bao and Qixiang Ye and Furu Wei},\nyear={2022},\neprint={2208.06366},\narchivePrefix={arXiv},\nprimaryClass={cs.CV}\n}\n\nAt this point only the 1k fine-tuned classification weights and model configs have been added,\nsee original source above for pre-training models and procedure.\n\nModifications by / Copyright 2021 Ross Wightman, original copyrights below\n\"\"\"\n# --------------------------------------------------------\n# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)\n# Github source: https://github.com/microsoft/unilm/tree/master/beit\n# Copyright (c) 2021 Microsoft\n# Licensed under The MIT License [see LICENSE for details]\n# By Hangbo Bao\n# Based on timm and DeiT code bases\n# https://github.com/rwightman/pytorch-image-models/tree/master/timm\n# https://github.com/facebookresearch/deit/\n# https://github.com/facebookresearch/dino\n# --------------------------------------------------------'\n\nimport math\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    SwiGLU,\n    LayerNorm,\n    DropPath,\n    calculate_drop_path_rates,\n    trunc_normal_,\n    use_fused_attn,\n    resample_patch_embed,\n    resample_abs_pos_embed,\n    resize_rel_pos_bias_table,\n    ndgrid,\n)\n\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['Beit']\n\n\ndef gen_relative_position_index(window_size: Tuple[int, int], device=None) -> torch.Tensor:\n    \"\"\"Generate relative position index for window-based attention.\n\n    Creates a lookup table for relative position indices between all pairs of positions\n    within a window, including special handling for cls token interactions.\n\n    Args:\n        window_size: Height and width of the attention window.\n\n    Returns:\n        Relative position index tensor of shape (window_area+1, window_area+1)\n        where +1 accounts for the cls token.\n    \"\"\"\n    num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3\n    # cls to token & token 2 cls & cls to cls\n    # get pair-wise relative position index for each token inside the window\n    window_area = window_size[0] * window_size[1]\n    coords = torch.stack(ndgrid(\n        torch.arange(window_size[0], device=device, dtype=torch.long),\n        torch.arange(window_size[1], device=device, dtype=torch.long),\n    ))  # 2, Wh, Ww\n    coords_flatten = torch.flatten(coords, 1)  # 2, Wh*Ww\n    relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :]  # 2, Wh*Ww, Wh*Ww\n    relative_coords = relative_coords.permute(1, 2, 0).contiguous()  # Wh*Ww, Wh*Ww, 2\n    relative_coords[:, :, 0] += window_size[0] - 1  # shift to start from 0\n    relative_coords[:, :, 1] += window_size[1] - 1\n    relative_coords[:, :, 0] *= 2 * window_size[1] - 1\n    relative_position_index = torch.zeros(size=(window_area + 1,) * 2, device=device, dtype=relative_coords.dtype)\n    relative_position_index[1:, 1:] = relative_coords.sum(-1)  # Wh*Ww, Wh*Ww\n    relative_position_index[0, 0:] = num_relative_distance - 3\n    relative_position_index[0:, 0] = num_relative_distance - 2\n    relative_position_index[0, 0] = num_relative_distance - 1\n    return relative_position_index\n\n\nclass Attention(nn.Module):\n    \"\"\"Multi-head attention module with optional relative position bias.\n\n    Implements multi-head self-attention with support for relative position bias\n    and fused attention operations. Can use either standard or custom head dimensions.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            qkv_bias_separate: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            window_size: Optional[Tuple[int, int]] = None,\n            attn_head_dim: Optional[int] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize attention module.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add learnable bias to query, key, value projections.\n            qkv_bias_separate: If True, use separate bias for q, k, v projections.\n            attn_drop: Dropout rate for attention weights.\n            proj_drop: Dropout rate for output projection.\n            window_size: Window size for relative position bias. If None, no relative position bias.\n            attn_head_dim: Dimension per attention head. If None, uses dim // num_heads.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        if attn_head_dim is not None:\n            head_dim = attn_head_dim\n        all_head_dim = head_dim * self.num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n        self.qkv_bias_separate = qkv_bias_separate\n\n        self.qkv = nn.Linear(dim, all_head_dim * 3, bias=False, **dd)\n        if qkv_bias:\n            self.q_bias = nn.Parameter(torch.empty(all_head_dim, **dd))\n            self.register_buffer('k_bias', torch.empty(all_head_dim, **dd), persistent=False)\n            self.v_bias = nn.Parameter(torch.empty(all_head_dim, **dd))\n        else:\n            self.q_bias = None\n            self.k_bias = None\n            self.v_bias = None\n\n        if window_size:\n            self.window_size = window_size\n            self.num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3\n            window_area = window_size[0] * window_size[1]\n            self.relative_position_bias_table = nn.Parameter(\n                torch.empty(self.num_relative_distance, num_heads, **dd))  # 2*Wh-1 * 2*Ww-1, nH\n            self.register_buffer(\n                \"relative_position_index\",\n                torch.empty((window_area + 1, window_area + 1), device=device, dtype=torch.long),\n                persistent=False,\n            )\n        else:\n            self.window_size = None\n            self.relative_position_bias_table = None\n            self.relative_position_index = None\n\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(all_head_dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def _get_rel_pos_bias(self) -> torch.Tensor:\n        \"\"\"Get relative position bias for the attention window.\n\n        Returns:\n            Relative position bias tensor of shape (1, num_heads, window_area+1, window_area+1).\n        \"\"\"\n        relative_position_bias = self.relative_position_bias_table[\n            self.relative_position_index.view(-1)].view(\n            self.window_size[0] * self.window_size[1] + 1,\n            self.window_size[0] * self.window_size[1] + 1, -1)  # Wh*Ww,Wh*Ww,nH\n        relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous()  # nH, Wh*Ww, Wh*Ww\n        return relative_position_bias.unsqueeze(0)\n\n    def forward(self, x: torch.Tensor, shared_rel_pos_bias: Optional[torch.Tensor] = None) -> torch.Tensor:\n        \"\"\"Forward pass of attention module.\n\n        Args:\n            x: Input tensor of shape (batch_size, num_tokens, dim).\n            shared_rel_pos_bias: Optional shared relative position bias from parent module.\n\n        Returns:\n            Output tensor of shape (batch_size, num_tokens, dim).\n        \"\"\"\n        B, N, C = x.shape\n\n        if self.q_bias is None:\n            qkv = self.qkv(x)\n        else:\n            qkv_bias = torch.cat((self.q_bias, self.k_bias, self.v_bias))\n            if self.qkv_bias_separate:\n                qkv = self.qkv(x)\n                qkv += qkv_bias\n            else:\n                qkv = F.linear(x, weight=self.qkv.weight, bias=qkv_bias)\n        qkv = qkv.reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)  # B, num_heads, N, head_dim\n\n        if self.fused_attn:\n            rel_pos_bias = None\n            if self.relative_position_bias_table is not None:\n                rel_pos_bias = self._get_rel_pos_bias()\n                if shared_rel_pos_bias is not None:\n                    rel_pos_bias = rel_pos_bias + shared_rel_pos_bias\n            elif shared_rel_pos_bias is not None:\n                rel_pos_bias = shared_rel_pos_bias\n\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=rel_pos_bias,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = (q @ k.transpose(-2, -1))\n\n            if self.relative_position_bias_table is not None:\n                attn = attn + self._get_rel_pos_bias()\n            if shared_rel_pos_bias is not None:\n                attn = attn + shared_rel_pos_bias\n\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        if self.q_bias is not None:\n            nn.init.zeros_(self.q_bias)\n            nn.init.zeros_(self.v_bias)\n        if self.relative_position_bias_table is not None:\n            nn.init.zeros_(self.relative_position_bias_table)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if self.k_bias is not None:\n            self.k_bias.zero_()\n        if self.relative_position_index is not None:\n            self.relative_position_index.copy_(\n                gen_relative_position_index(self.window_size, device=self.relative_position_index.device)\n            )\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass Block(nn.Module):\n    \"\"\"Transformer block with attention and MLP.\n\n    Standard transformer block consisting of multi-head self-attention and MLP\n    with residual connections and layer normalization. Supports layer scale and\n    stochastic depth regularization.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            qkv_bias: bool = False,\n            mlp_ratio: float = 4.,\n            scale_mlp: bool = False,\n            swiglu_mlp: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            init_values: Optional[float] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            window_size: Optional[Tuple[int, int]] = None,\n            attn_head_dim: Optional[int] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize transformer block.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add learnable bias to query, key, value projections.\n            mlp_ratio: Ratio of MLP hidden dimension to input dimension.\n            scale_mlp: If True, apply layer normalization in MLP.\n            swiglu_mlp: If True, use SwiGLU activation in MLP.\n            proj_drop: Dropout rate for projections.\n            attn_drop: Dropout rate for attention.\n            drop_path: Drop path rate for stochastic depth.\n            init_values: Initial values for layer scale. If None, no layer scale.\n            act_layer: Activation function class.\n            norm_layer: Normalization layer class.\n            window_size: Window size for relative position bias in attention.\n            attn_head_dim: Dimension per attention head.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            window_size=window_size,\n            attn_head_dim=attn_head_dim,\n            **dd,\n        )\n        # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        if swiglu_mlp:\n            self.mlp = SwiGLU(\n                in_features=dim,\n                hidden_features=int(dim * mlp_ratio),\n                norm_layer=norm_layer if scale_mlp else None,\n                drop=proj_drop,\n                **dd,\n            )\n        else:\n            self.mlp = Mlp(\n                in_features=dim,\n                hidden_features=int(dim * mlp_ratio),\n                act_layer=act_layer,\n                norm_layer=norm_layer if scale_mlp else None,\n                drop=proj_drop,\n                **dd,\n            )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.init_values = init_values\n        if init_values:\n            self.gamma_1 = nn.Parameter(torch.empty(dim, **dd))\n            self.gamma_2 = nn.Parameter(torch.empty(dim, **dd))\n        else:\n            self.gamma_1, self.gamma_2 = None, None\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters.\"\"\"\n        if self.gamma_1 is not None:\n            nn.init.constant_(self.gamma_1, self.init_values)\n            nn.init.constant_(self.gamma_2, self.init_values)\n\n    def forward(self, x: torch.Tensor, shared_rel_pos_bias: Optional[torch.Tensor] = None) -> torch.Tensor:\n        \"\"\"Forward pass of transformer block.\n\n        Args:\n            x: Input tensor of shape (batch_size, num_tokens, dim).\n            shared_rel_pos_bias: Optional shared relative position bias.\n\n        Returns:\n            Output tensor of shape (batch_size, num_tokens, dim).\n        \"\"\"\n        if self.gamma_1 is None:\n            x = x + self.drop_path1(self.attn(self.norm1(x), shared_rel_pos_bias=shared_rel_pos_bias))\n            x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        else:\n            x = x + self.drop_path1(self.gamma_1 * self.attn(self.norm1(x), shared_rel_pos_bias=shared_rel_pos_bias))\n            x = x + self.drop_path2(self.gamma_2 * self.mlp(self.norm2(x)))\n        return x\n\n\nclass RelativePositionBias(nn.Module):\n    \"\"\"Relative position bias module for window-based attention.\n\n    Generates learnable relative position biases for all pairs of positions\n    within a window, including special handling for cls token.\n    \"\"\"\n\n    def __init__(self, window_size: Tuple[int, int], num_heads: int, device=None, dtype=None):\n        \"\"\"Initialize relative position bias module.\n\n        Args:\n            window_size: Height and width of the attention window.\n            num_heads: Number of attention heads.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.window_size = window_size\n        self.window_area = window_size[0] * window_size[1]\n        num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3\n        self.relative_position_bias_table = nn.Parameter(torch.empty(num_relative_distance, num_heads, **dd))\n        self.register_buffer(\n            \"relative_position_index\",\n            torch.empty((self.window_area + 1, self.window_area + 1), device=device, dtype=torch.long),\n            persistent=False,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.relative_position_bias_table)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.relative_position_index.copy_(\n            gen_relative_position_index(self.window_size, device=self.relative_position_index.device)\n        )\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def forward(self) -> torch.Tensor:\n        \"\"\"Generate relative position bias.\n\n        Returns:\n            Relative position bias tensor of shape (num_heads, window_area+1, window_area+1).\n        \"\"\"\n        relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)].view(\n            self.window_area + 1, self.window_area + 1, -1)  # Wh*Ww,Wh*Ww,nH\n        return relative_position_bias.permute(2, 0, 1).contiguous()  # nH, Wh*Ww, Wh*Ww\n\n\nclass Beit(nn.Module):\n    \"\"\"BEiT: BERT Pre-Training of Image Transformers.\n\n    Vision Transformer model with support for relative position bias and\n    shared relative position bias across layers. Implements both BEiT v1 and v2\n    architectures with flexible configuration options.\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            qkv_bias: bool = True,\n            mlp_ratio: float = 4.,\n            swiglu_mlp: bool = False,\n            scale_mlp: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            init_values: Optional[float] = None,\n            use_abs_pos_emb: bool = True,\n            use_rel_pos_bias: bool = False,\n            use_shared_rel_pos_bias: bool = False,\n            head_init_scale: float = 0.001,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize BEiT model.\n\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size for patch embedding.\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            global_pool: Type of global pooling ('avg' or '').\n            embed_dim: Embedding dimension.\n            depth: Number of transformer blocks.\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add learnable bias to query, key, value projections.\n            mlp_ratio: Ratio of MLP hidden dimension to embedding dimension.\n            swiglu_mlp: If True, use SwiGLU activation in MLP.\n            scale_mlp: If True, apply layer normalization in MLP.\n            drop_rate: Dropout rate.\n            pos_drop_rate: Dropout rate for position embeddings.\n            proj_drop_rate: Dropout rate for projections.\n            attn_drop_rate: Dropout rate for attention.\n            drop_path_rate: Stochastic depth rate.\n            norm_layer: Normalization layer class.\n            init_values: Initial values for layer scale.\n            use_abs_pos_emb: If True, use absolute position embeddings.\n            use_rel_pos_bias: If True, use relative position bias in attention.\n            use_shared_rel_pos_bias: If True, share relative position bias across layers.\n            head_init_scale: Scale factor for head initialization.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.num_prefix_tokens = 1\n        self.grad_checkpointing = False\n\n        self.patch_embed = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            **dd,\n        )\n        num_patches = self.patch_embed.num_patches\n        r = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        self.cls_token = nn.Parameter(torch.empty(1, 1, embed_dim, **dd))\n        # self.mask_token = nn.Parameter(torch.empty(1, 1, embed_dim))\n        self.pos_embed = nn.Parameter(torch.empty(1, num_patches + 1, embed_dim, **dd)) if use_abs_pos_emb else None\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        if use_shared_rel_pos_bias:\n            self.rel_pos_bias = RelativePositionBias(\n                window_size=self.patch_embed.grid_size,\n                num_heads=num_heads,\n                **dd,\n            )\n        else:\n            self.rel_pos_bias = None\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        self.blocks = nn.ModuleList([\n            Block(\n                dim=embed_dim,\n                num_heads=num_heads,\n                qkv_bias=qkv_bias,\n                mlp_ratio=mlp_ratio,\n                scale_mlp=scale_mlp,\n                swiglu_mlp=swiglu_mlp,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                init_values=init_values,\n                window_size=self.patch_embed.grid_size if use_rel_pos_bias else None,\n                **dd,\n            )\n            for i in range(depth)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=r) for i in range(depth)]\n\n        use_fc_norm = self.global_pool == 'avg'\n        self.norm = nn.Identity() if use_fc_norm else norm_layer(embed_dim, **dd)\n        self.fc_norm = norm_layer(embed_dim, **dd) if use_fc_norm else nn.Identity()\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.head_init_scale = head_init_scale\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            needs_reset: If True, call reset_parameters() on modules that have it.\n                Set to False when modules have already self-initialized in __init__.\n        \"\"\"\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n        if self.pos_embed is not None:\n            trunc_normal_(self.pos_embed, std=.02)\n        trunc_normal_(self.cls_token, std=.02)\n\n        self.fix_init_weight()\n\n        if self.head_init_scale and isinstance(self.head, nn.Linear):\n            trunc_normal_(self.head.weight, std=.02)\n            with torch.no_grad():\n                self.head.weight.mul_(self.head_init_scale)\n                self.head.bias.mul_(self.head_init_scale)\n\n    def fix_init_weight(self) -> None:\n        \"\"\"Fix initialization weights according to BEiT paper.\n\n        Rescales attention and MLP weights based on layer depth to improve\n        training stability.\n        \"\"\"\n        with torch.no_grad():\n            for layer_id, layer in enumerate(self.blocks):\n                scale = math.sqrt(2.0 * (layer_id + 1))\n                layer.attn.proj.weight.div_(scale)\n                layer.mlp.fc2.weight.div_(scale)\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True):\n        \"\"\"Initialize model weights.\n\n        Args:\n            m: Module to initialize.\n            needs_reset: If True, call reset_parameters() on modules that have it.\n        \"\"\"\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        \"\"\"Get parameter names that should not use weight decay.\n\n        Returns:\n            Set of parameter names to exclude from weight decay.\n        \"\"\"\n        nwd = {'pos_embed', 'cls_token'}\n        for n, _ in self.named_parameters():\n            if 'relative_position_bias_table' in n:\n                nwd.add(n)\n        return nwd\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: If True, enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Create parameter group matcher for optimizer parameter groups.\n\n        Args:\n            coarse: If True, use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        matcher = dict(\n            stem=r'^cls_token|pos_embed|patch_embed|rel_pos_bias',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))],\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\n\n        Returns:\n            The classification head module.\n        \"\"\"\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        \"\"\"Reset the classification head.\n\n        Args:\n            num_classes: Number of classes for new head.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward pass that returns intermediate feature maps.\n\n        Args:\n            x: Input image tensor of shape (batch_size, channels, height, width).\n            indices: Block indices to return features from. If int, returns last n blocks.\n            return_prefix_tokens: If True, return both prefix and spatial tokens.\n            norm: If True, apply normalization to intermediate features.\n            stop_early: If True, stop at last selected intermediate.\n            output_fmt: Output format ('NCHW' or 'NLC').\n            intermediates_only: If True, only return intermediate features.\n\n        Returns:\n            If intermediates_only is True, returns list of intermediate tensors.\n            Otherwise, returns tuple of (final_features, intermediates).\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x)\n        x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1)\n        if self.pos_embed is not None:\n            x = x + self.pos_embed\n        x = self.pos_drop(x)\n\n        rel_pos_bias = self.rel_pos_bias() if self.rel_pos_bias is not None else None\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, shared_rel_pos_bias=rel_pos_bias)\n            else:\n                x = blk(x, shared_rel_pos_bias=rel_pos_bias)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.patch_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n        if not torch.jit.is_scripting() and return_prefix_tokens:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediate outputs.\n\n        Args:\n            indices: Indices of blocks to keep.\n            prune_norm: If True, remove final normalization.\n            prune_head: If True, remove classification head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.fc_norm = nn.Identity()\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor of shape (batch_size, channels, height, width).\n\n        Returns:\n            Feature tensor of shape (batch_size, num_tokens, embed_dim).\n        \"\"\"\n        x = self.patch_embed(x)\n        x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1)\n        if self.pos_embed is not None:\n            x = x + self.pos_embed\n        x = self.pos_drop(x)\n\n        rel_pos_bias = self.rel_pos_bias() if self.rel_pos_bias is not None else None\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, shared_rel_pos_bias=rel_pos_bias)\n            else:\n                x = blk(x, shared_rel_pos_bias=rel_pos_bias)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classification head.\n\n        Args:\n            x: Feature tensor of shape (batch_size, num_tokens, embed_dim).\n            pre_logits: If True, return features before final linear layer.\n\n        Returns:\n            Logits tensor of shape (batch_size, num_classes) or pre-logits.\n        \"\"\"\n        if self.global_pool:\n            x = x[:, self.num_prefix_tokens:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.fc_norm(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through the model.\n\n        Args:\n            x: Input tensor of shape (batch_size, channels, height, width).\n\n        Returns:\n            Logits tensor of shape (batch_size, num_classes).\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a default configuration dictionary for BEiT models.\n\n    Args:\n        url: Model weights URL.\n        **kwargs: Additional configuration parameters.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'beit_base_patch16_224.in22k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_224_pt22k_ft22kto1k.pth',\n        hf_hub_id='timm/'),\n    'beit_base_patch16_384.in22k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_384_pt22k_ft22kto1k.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0,\n    ),\n    'beit_base_patch16_224.in22k_ft_in22k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_224_pt22k_ft22k.pth',\n        hf_hub_id='timm/',\n        num_classes=21841,\n    ),\n    'beit_large_patch16_224.in22k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_224_pt22k_ft22kto1k.pth',\n        hf_hub_id='timm/'),\n    'beit_large_patch16_384.in22k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_384_pt22k_ft22kto1k.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0,\n    ),\n    'beit_large_patch16_512.in22k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_512_pt22k_ft22kto1k.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), crop_pct=1.0,\n    ),\n    'beit_large_patch16_224.in22k_ft_in22k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_224_pt22k_ft22k.pth',\n        hf_hub_id='timm/',\n        num_classes=21841,\n    ),\n\n    'beitv2_base_patch16_224.in1k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_base_patch16_224_pt1k_ft21kto1k.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n    'beitv2_base_patch16_224.in1k_ft_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_base_patch16_224_pt1k_ft1k.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n    'beitv2_base_patch16_224.in1k_ft_in22k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_base_patch16_224_pt1k_ft21k.pth',\n        hf_hub_id='timm/',\n        num_classes=21841, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n    'beitv2_large_patch16_224.in1k_ft_in22k_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_large_patch16_224_pt1k_ft21kto1k.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n    'beitv2_large_patch16_224.in1k_ft_in1k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_large_patch16_224_pt1k_ft1k.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n    'beitv2_large_patch16_224.in1k_ft_in22k': _cfg(\n        #url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_large_patch16_224_pt1k_ft21k.pth',\n        hf_hub_id='timm/',\n        num_classes=21841, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module, interpolation: str = 'bicubic', antialias: bool = True) -> Dict[str, torch.Tensor]:\n    \"\"\"Filter and process checkpoint state dict for loading.\n\n    Handles resizing of patch embeddings, position embeddings, and relative position\n    bias tables when model size differs from checkpoint.\n\n    Args:\n        state_dict: Checkpoint state dictionary.\n        model: Target model to load weights into.\n        interpolation: Interpolation method for resizing.\n        antialias: If True, use antialiasing when resizing.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('module', state_dict)\n    # beit v2 didn't strip module\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        if 'relative_position_index' in k:\n            continue\n        if 'patch_embed.proj.weight' in k:\n            O, I, H, W = model.patch_embed.proj.weight.shape\n            if v.shape[-1] != W or v.shape[-2] != H:\n                v = resample_patch_embed(\n                    v,\n                    (H, W),\n                    interpolation=interpolation,\n                    antialias=antialias,\n                    verbose=True,\n                )\n        elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]:\n            # To resize pos embedding when using model at different size from pretrained weights\n            num_prefix_tokens = 1\n            v = resample_abs_pos_embed(\n                v,\n                new_size=model.patch_embed.grid_size,\n                num_prefix_tokens=num_prefix_tokens,\n                interpolation=interpolation,\n                antialias=antialias,\n                verbose=True,\n            )\n        elif k.endswith('relative_position_bias_table'):\n            m = model.get_submodule(k[:-29])\n            if v.shape != m.relative_position_bias_table.shape or m.window_size[0] != m.window_size[1]:\n                v = resize_rel_pos_bias_table(\n                    v,\n                    new_window_size=m.window_size,\n                    new_bias_shape=m.relative_position_bias_table.shape,\n                )\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_beit(variant: str, pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"Create a BEiT model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: If True, load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        BEiT model instance.\n    \"\"\"\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        Beit, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef beit_base_patch16_224(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT base model @ 224x224 with patch size 16x16.\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=0.1)\n    model = _create_beit('beit_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit_base_patch16_384(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT base model @ 384x384 with patch size 16x16.\"\"\"\n    model_args = dict(\n        img_size=384, patch_size=16, embed_dim=768, depth=12, num_heads=12,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=0.1)\n    model = _create_beit('beit_base_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit_large_patch16_224(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT large model @ 224x224 with patch size 16x16.\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=1e-5)\n    model = _create_beit('beit_large_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit_large_patch16_384(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT large model @ 384x384 with patch size 16x16.\"\"\"\n    model_args = dict(\n        img_size=384, patch_size=16, embed_dim=1024, depth=24, num_heads=16,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=1e-5)\n    model = _create_beit('beit_large_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit_large_patch16_512(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT large model @ 512x512 with patch size 16x16.\"\"\"\n    model_args = dict(\n        img_size=512, patch_size=16, embed_dim=1024, depth=24, num_heads=16,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=1e-5)\n    model = _create_beit('beit_large_patch16_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beitv2_base_patch16_224(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT v2 base model @ 224x224 with patch size 16x16.\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=1e-5)\n    model = _create_beit('beitv2_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beitv2_large_patch16_224(pretrained: bool = False, **kwargs) -> Beit:\n    \"\"\"BEiT v2 large model @ 224x224 with patch size 16x16.\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16,\n        use_abs_pos_emb=False, use_rel_pos_bias=True, init_values=1e-5)\n    model = _create_beit('beitv2_large_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/byoanet.py",
    "content": "\"\"\" Bring-Your-Own-Attention Network\n\nA flexible network w/ dataclass based config for stacking NN blocks including\nself-attention (or similar) layers.\n\nCurrently used to implement experimental variants of:\n  * Bottleneck Transformers\n  * Lambda ResNets\n  * HaloNets\n\nConsider all of the models definitions here as experimental WIP and likely to change.\n\nHacked together by / copyright Ross Wightman, 2021.\n\"\"\"\nfrom typing import Any, Dict, Optional\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\nfrom .byobnet import ByoBlockCfg, ByoModelCfg, ByobNet, interleave_blocks\n\n__all__ = []\n\n\nmodel_cfgs = dict(\n\n    botnet26t=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        fixed_input_size=True,\n        self_attn_layer='bottleneck',\n        self_attn_kwargs=dict()\n    ),\n    sebotnet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=[2], d=3, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=[2], d=3, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg('self_attn', d=2, c=1536, s=2, gs=0, br=0.333),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        act_layer='silu',\n        num_features=1280,\n        attn_layer='se',\n        self_attn_layer='bottleneck',\n        self_attn_kwargs=dict()\n    ),\n    botnet50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=4, d=4, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=6, c=1024, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=3, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        fixed_input_size=True,\n        self_attn_layer='bottleneck',\n        self_attn_kwargs=dict()\n    ),\n    eca_botnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=16, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=16, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=16, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=16, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        fixed_input_size=True,\n        act_layer='silu',\n        attn_layer='eca',\n        self_attn_layer='bottleneck',\n        self_attn_kwargs=dict(dim_head=16)\n    ),\n\n    halonet_h1=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='self_attn', d=3, c=64, s=1, gs=0, br=1.0),\n            ByoBlockCfg(type='self_attn', d=3, c=128, s=2, gs=0, br=1.0),\n            ByoBlockCfg(type='self_attn', d=10, c=256, s=2, gs=0, br=1.0),\n            ByoBlockCfg(type='self_attn', d=3, c=512, s=2, gs=0, br=1.0),\n        ),\n        stem_chs=64,\n        stem_type='7x7',\n        stem_pool='maxpool',\n\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=8, halo_size=3),\n    ),\n    halonet26t=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=8, halo_size=2)\n    ),\n    sehalonet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=[2], d=3, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=[2], d=3, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg('self_attn', d=2, c=1536, s=2, gs=0, br=0.333),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        act_layer='silu',\n        num_features=1280,\n        attn_layer='se',\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=8, halo_size=3)\n    ),\n    halonet50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), every=4, d=4, c=512, s=2, gs=0, br=0.25,\n                self_attn_layer='halo', self_attn_kwargs=dict(block_size=8, halo_size=3, num_heads=4)),\n            interleave_blocks(types=('bottle', 'self_attn'), d=6, c=1024, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=3, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=8, halo_size=3)\n    ),\n    eca_halonext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=16, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=16, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=16, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=16, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='eca',\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=8, halo_size=2, dim_head=16)\n    ),\n\n    lambda_resnet26t=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        self_attn_layer='lambda',\n        self_attn_kwargs=dict(r=9)\n    ),\n    lambda_resnet50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), every=4, d=4, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=6, c=1024, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=3, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        self_attn_layer='lambda',\n        self_attn_kwargs=dict(r=9)\n    ),\n    lambda_resnet26rpt_256=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=0, br=0.25),\n            interleave_blocks(types=('bottle', 'self_attn'), d=2, c=1024, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='self_attn', d=2, c=2048, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        self_attn_layer='lambda',\n        self_attn_kwargs=dict(r=None)\n    ),\n\n    # experimental\n    haloregnetz_b=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=3),\n            interleave_blocks(types=('bottle', 'self_attn'), every=3, d=12, c=192, s=2, gs=16, br=3),\n            ByoBlockCfg('self_attn', d=2, c=288, s=2, gs=16, br=3),\n        ),\n        stem_chs=32,\n        stem_pool='',\n        downsample='',\n        num_features=1536,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n        self_attn_layer='halo',\n        self_attn_kwargs=dict(block_size=7, halo_size=2, qk_ratio=0.33)\n    ),\n\n    # experimental\n    lamhalobotnet50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=4, c=512, s=2, gs=0, br=0.25,\n                self_attn_layer='lambda', self_attn_kwargs=dict(r=13)),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=6, c=1024, s=2, gs=0, br=0.25,\n                self_attn_layer='halo', self_attn_kwargs=dict(halo_size=3)),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=3, c=2048, s=2, gs=0, br=0.25,\n                self_attn_layer='bottleneck', self_attn_kwargs=dict()),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        act_layer='silu',\n    ),\n    halo2botnet50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=4, c=512, s=2, gs=0, br=0.25,\n                self_attn_layer='halo', self_attn_kwargs=dict(halo_size=3)),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=6, c=1024, s=2, gs=0, br=0.25,\n                self_attn_layer='halo', self_attn_kwargs=dict(halo_size=3)),\n            interleave_blocks(\n                types=('bottle', 'self_attn'), d=3, c=2048, s=2, gs=0, br=0.25,\n                self_attn_layer='bottleneck', self_attn_kwargs=dict()),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        act_layer='silu',\n    ),\n)\n\n\ndef _create_byoanet(variant: str, cfg_variant: Optional[str] = None, pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\"Create a Bring-Your-Own-Attention network model.\n\n    Args:\n        variant: Model variant name.\n        cfg_variant: Config variant name if different from model variant.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        Instantiated ByobNet model.\n    \"\"\"\n    return build_model_with_cfg(\n        ByobNet, variant, pretrained,\n        model_cfg=model_cfgs[variant] if not cfg_variant else model_cfgs[cfg_variant],\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Generate default model configuration.\n\n    Args:\n        url: URL for pretrained weights.\n        **kwargs: Override default configuration values.\n\n    Returns:\n        Model configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1.conv', 'classifier': 'head.fc',\n        'fixed_input_size': False, 'min_input_size': (3, 224, 224), 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # GPU-Efficient (ResNet) weights\n    'botnet26t_256.c1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/botnet26t_c1_256-167a0e9f.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),\n    'sebotnet33ts_256.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sebotnet33ts_a1h2_256-957e3c3e.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94),\n    'botnet50ts_256.untrained': _cfg(\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),\n    'eca_botnext26ts_256.c1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_botnext26ts_c_256-95a898f6.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    'halonet_h1.untrained': _cfg(input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),\n    'halonet26t.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_a1h_256-3083328c.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),\n    'sehalonet33ts.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sehalonet33ts_256-87e053f9.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),\n    'halonet50ts.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet50ts_a1h2_256-f3a3daee.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),\n    'eca_halonext26ts.c1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_halonext26ts_c_256-06906299.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),\n\n    'lambda_resnet26t.c1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26t_c_256-e5a5c857.pth',\n        hf_hub_id='timm/',\n        min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94),\n    'lambda_resnet50ts.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet50ts_a1h_256-b87370f7.pth',\n        hf_hub_id='timm/',\n        min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8)),\n    'lambda_resnet26rpt_256.c1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_c_256-ab00292d.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94),\n\n    'haloregnetz_b.ra3_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/haloregnetz_c_raa_256-c8ad7616.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        first_conv='stem.conv', input_size=(3, 224, 224), pool_size=(7, 7), min_input_size=(3, 224, 224), crop_pct=0.94),\n\n    'lamhalobotnet50ts_256.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lamhalobotnet50ts_a1h2_256-fe3d9445.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),\n    'halo2botnet50ts_256.a1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halo2botnet50ts_a1h2_256-fd9c11a3.pth',\n        hf_hub_id='timm/',\n        fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),\n})\n\n\n@register_model\ndef botnet26t_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Bottleneck Transformer w/ ResNet26-T backbone.\n    \"\"\"\n    kwargs.setdefault('img_size', 256)\n    return _create_byoanet('botnet26t_256', 'botnet26t', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef sebotnet33ts_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Bottleneck Transformer w/ a ResNet33-t backbone, SE attn for non Halo blocks, SiLU,\n    \"\"\"\n    return _create_byoanet('sebotnet33ts_256', 'sebotnet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef botnet50ts_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Bottleneck Transformer w/ ResNet50-T backbone, silu act.\n    \"\"\"\n    kwargs.setdefault('img_size', 256)\n    return _create_byoanet('botnet50ts_256', 'botnet50ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_botnext26ts_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Bottleneck Transformer w/ ResNet26-T backbone, silu act.\n    \"\"\"\n    kwargs.setdefault('img_size', 256)\n    return _create_byoanet('eca_botnext26ts_256', 'eca_botnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef halonet_h1(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" HaloNet-H1. Halo attention in all stages as per the paper.\n    NOTE: This runs very slowly!\n    \"\"\"\n    return _create_byoanet('halonet_h1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef halonet26t(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" HaloNet w/ a ResNet26-t backbone. Halo attention in final two stages\n    \"\"\"\n    return _create_byoanet('halonet26t', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef sehalonet33ts(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" HaloNet w/ a ResNet33-t backbone, SE attn for non Halo blocks, SiLU, 1-2 Halo in stage 2,3,4.\n    \"\"\"\n    return _create_byoanet('sehalonet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef halonet50ts(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" HaloNet w/ a ResNet50-t backbone, silu act. Halo attention in final two stages\n    \"\"\"\n    return _create_byoanet('halonet50ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_halonext26ts(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" HaloNet w/ a ResNet26-t backbone, silu act. Halo attention in final two stages\n    \"\"\"\n    return _create_byoanet('eca_halonext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef lambda_resnet26t(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Lambda-ResNet-26-T. Lambda layers w/ conv pos in last two stages.\n    \"\"\"\n    return _create_byoanet('lambda_resnet26t', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef lambda_resnet50ts(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Lambda-ResNet-50-TS. SiLU act. Lambda layers w/ conv pos in last two stages.\n    \"\"\"\n    return _create_byoanet('lambda_resnet50ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef lambda_resnet26rpt_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Lambda-ResNet-26-R-T. Lambda layers w/ rel pos embed in last two stages.\n    \"\"\"\n    kwargs.setdefault('img_size', 256)\n    return _create_byoanet('lambda_resnet26rpt_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef haloregnetz_b(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Halo + RegNetZ\n    \"\"\"\n    return _create_byoanet('haloregnetz_b', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef lamhalobotnet50ts_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Combo Attention (Lambda + Halo + Bot) Network\n    \"\"\"\n    return _create_byoanet('lamhalobotnet50ts_256', 'lamhalobotnet50ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef halo2botnet50ts_256(pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\" Combo Attention (Halo + Halo + Bot) Network\n    \"\"\"\n    return _create_byoanet('halo2botnet50ts_256', 'halo2botnet50ts', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/byobnet.py",
    "content": "\"\"\" Bring-Your-Own-Blocks Network\n\nA flexible network w/ dataclass based config for stacking those NN blocks.\n\nThis model is currently used to implement the following networks:\n\nGPU Efficient (ResNets) - gernet_l/m/s (original versions called genet, but this was already used (by SENet author)).\nPaper: `Neural Architecture Design for GPU-Efficient Networks` - https://arxiv.org/abs/2006.14090\nCode and weights: https://github.com/idstcv/GPU-Efficient-Networks, licensed Apache 2.0\n\nRepVGG - repvgg_*\nPaper: `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\nCode and weights: https://github.com/DingXiaoH/RepVGG, licensed MIT\n\nMobileOne - mobileone_*\nPaper: `MobileOne: An Improved One millisecond Mobile Backbone` - https://arxiv.org/abs/2206.04040\nCode and weights: https://github.com/apple/ml-mobileone, licensed MIT\n\nIn all cases the models have been modified to fit within the design of ByobNet. I've remapped\nthe original weights and verified accuracies.\n\nFor GPU Efficient nets, I used the original names for the blocks since they were for the most part\nthe same as original residual blocks in ResNe(X)t, DarkNet, and other existing models. Note also some\nchanges introduced in RegNet were also present in the stem and bottleneck blocks for this model.\n\nA significant number of different network archs can be implemented here, including variants of the\nabove nets that include attention.\n\nHacked together by / copyright Ross Wightman, 2021.\n\"\"\"\nimport math\nfrom dataclasses import dataclass, field, replace\nfrom functools import partial\nfrom typing import Tuple, List, Dict, Optional, Union, Any, Callable, Sequence, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\nfrom timm.layers import (\n    ClassifierHead,\n    NormMlpClassifierHead,\n    ConvNormAct,\n    BatchNormAct2d,\n    DropBlock2d,\n    EvoNorm2dS0a,\n    AttentionPool2d,\n    RotAttentionPool2d,\n    DropPath,\n    calculate_drop_path_rates,\n    AvgPool2dSame,\n    create_conv2d,\n    get_act_layer,\n    get_norm_act_layer,\n    get_attn,\n    make_divisible,\n    to_2tuple,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['ByobNet', 'ByoModelCfg', 'ByoBlockCfg', 'create_byob_stem', 'create_block']\n\n\n@dataclass\nclass ByoBlockCfg:\n    \"\"\"Block configuration for Bring-Your-Own-Blocks.\n\n    Defines configuration for a single block or stage of blocks.\n    \"\"\"\n    type: Union[str, nn.Module]\n    d: int  # block depth (number of block repeats in stage)\n    c: int  # number of output channels for each block in stage\n    s: int = 2  # stride of stage (first block)\n    gs: Optional[Union[int, Callable]] = None  # group-size of blocks in stage, conv is depthwise if gs == 1\n    br: float = 1.  # bottleneck-ratio of blocks in stage\n\n    # NOTE: these config items override the model cfgs that are applied to all blocks by default\n    attn_layer: Optional[str] = None\n    attn_kwargs: Optional[Dict[str, Any]] = None\n    self_attn_layer: Optional[str] = None\n    self_attn_kwargs: Optional[Dict[str, Any]] = None\n    block_kwargs: Optional[Dict[str, Any]] = None\n\n\n@dataclass\nclass ByoModelCfg:\n    \"\"\"Model configuration for Bring-Your-Own-Blocks network.\n\n    Defines overall architecture configuration.\n    \"\"\"\n    blocks: Tuple[Union[ByoBlockCfg, Tuple[ByoBlockCfg, ...]], ...]\n    downsample: str = 'conv1x1'\n    stem_type: str = '3x3'\n    stem_pool: Optional[str] = 'maxpool'\n    stem_chs: Union[int, List[int], Tuple[int, ...]] = 32\n    width_factor: float = 1.0\n    num_features: int = 0  # num out_channels for final conv, no final 1x1 conv if 0\n    zero_init_last: bool = True  # zero init last weight (usually bn) in residual path\n    fixed_input_size: bool = False  # model constrained to a fixed-input size / img_size must be provided on creation\n\n    # layer config\n    act_layer: str = 'relu'\n    norm_layer: str = 'batchnorm'\n    aa_layer: str = ''\n\n    # Head config\n    head_hidden_size: Optional[int] = None  # feat dim of MLP head or AttentionPool output\n    head_type: str = 'classifier'\n\n    # Block config\n    # NOTE: these config items will be overridden by the block cfg (per-block) if they are set there\n    attn_layer: Optional[str] = None\n    attn_kwargs: dict = field(default_factory=lambda: dict())\n    self_attn_layer: Optional[str] = None\n    self_attn_kwargs: dict = field(default_factory=lambda: dict())\n    block_kwargs: Dict[str, Any] = field(default_factory=lambda: dict())\n\n\ndef _rep_vgg_bcfg(d: Tuple[int, ...] = (4, 6, 16, 1), wf: Tuple[float, ...] = (1., 1., 1., 1.), groups: int = 0) -> \\\nTuple[ByoBlockCfg, ...]:\n    \"\"\"Create RepVGG block configuration.\n\n    Args:\n        d: Depth (number of blocks) per stage.\n        wf: Width factor per stage.\n        groups: Number of groups for grouped convolution.\n\n    Returns:\n        Tuple of block configurations.\n    \"\"\"\n    c = (64, 128, 256, 512)\n    group_size = 0\n    if groups > 0:\n        group_size = lambda chs, idx: chs // groups if (idx + 1) % 2 == 0 else 0\n    bcfg = tuple([ByoBlockCfg(type='rep', d=d, c=c * wf, gs=group_size) for d, c, wf in zip(d, c, wf)])\n    return bcfg\n\n\ndef _mobileone_bcfg(\n        d: Tuple[int, ...] = (2, 8, 10, 1),\n        wf: Tuple[float, ...] = (1., 1., 1., 1.),\n        se_blocks: Tuple[int, ...] = (),\n        num_conv_branches: int = 1\n) -> List[List[ByoBlockCfg]]:\n    \"\"\"Create MobileOne block configuration.\n\n    Args:\n        d: Depth (number of blocks) per stage.\n        wf: Width factor per stage.\n        se_blocks: Number of SE blocks per stage.\n        num_conv_branches: Number of conv branches.\n\n    Returns:\n        List of block configurations per stage.\n    \"\"\"\n    c = (64, 128, 256, 512)\n    prev_c = min(64, c[0] * wf[0])\n    se_blocks = se_blocks or (0,) * len(d)\n    bcfg = []\n    for d, c, w, se in zip(d, c, wf, se_blocks):\n        scfg = []\n        for i in range(d):\n            out_c = c * w\n            bk = dict(num_conv_branches=num_conv_branches)\n            ak = {}\n            if i >= d - se:\n                ak['attn_layer'] = 'se'\n            scfg += [ByoBlockCfg(type='one', d=1, c=prev_c, gs=1, block_kwargs=bk, **ak)]  # depthwise block\n            scfg += [ByoBlockCfg(\n                type='one', d=1, c=out_c, gs=0, block_kwargs=dict(kernel_size=1, **bk), **ak)]  # pointwise block\n            prev_c = out_c\n        bcfg += [scfg]\n    return bcfg\n\n\ndef interleave_blocks(\n        types: Tuple[str, str],\n        d: int,\n        every: Union[int, List[int]] = 1,\n        first: bool = False,\n        **kwargs,\n) -> Tuple[ByoBlockCfg, ...]:\n    \"\"\"Interleave 2 block types in stack.\n\n    Args:\n        types: Two block type names to interleave.\n        d: Total depth of blocks.\n        every: Interval for alternating blocks.\n        first: Whether to start with alternate block.\n        **kwargs: Additional block arguments.\n\n    Returns:\n        Tuple of interleaved block configurations.\n    \"\"\"\n    assert len(types) == 2\n    if isinstance(every, int):\n        every = list(range(0 if first else every, d, every + 1))\n        if not every:\n            every = [d - 1]\n    set(every)\n    blocks = []\n    for i in range(d):\n        block_type = types[1] if i in every else types[0]\n        blocks += [ByoBlockCfg(type=block_type, d=1, **kwargs)]\n    return tuple(blocks)\n\n\ndef expand_blocks_cfg(stage_blocks_cfg: Union[ByoBlockCfg, Sequence[ByoBlockCfg]]) -> List[ByoBlockCfg]:\n    \"\"\"Expand block config into individual block instances.\n\n    Args:\n        stage_blocks_cfg: Block configuration(s) for a stage.\n\n    Returns:\n        List of individual block configurations.\n    \"\"\"\n    if not isinstance(stage_blocks_cfg, Sequence):\n        stage_blocks_cfg = (stage_blocks_cfg,)\n    block_cfgs = []\n    for i, cfg in enumerate(stage_blocks_cfg):\n        block_cfgs += [replace(cfg, d=1) for _ in range(cfg.d)]\n    return block_cfgs\n\n\ndef num_groups(group_size: Optional[int], channels: int) -> int:\n    \"\"\"Calculate number of groups for grouped convolution.\n\n    Args:\n        group_size: Size of each group (1 for depthwise).\n        channels: Number of channels.\n\n    Returns:\n        Number of groups.\n    \"\"\"\n    if not group_size:  # 0 or None\n        return 1  # normal conv with 1 group\n    else:\n        # NOTE group_size == 1 -> depthwise conv\n        assert channels % group_size == 0\n        return channels // group_size\n\n\n@dataclass\nclass LayerFn:\n    \"\"\"Container for layer factory functions.\"\"\"\n    conv_norm_act: Type[nn.Module] = ConvNormAct\n    norm_act: Type[nn.Module] = BatchNormAct2d\n    act: Type[nn.Module] = nn.ReLU\n    attn: Optional[Type[nn.Module]] = None\n    self_attn: Optional[Type[nn.Module]] = None\n\n\nclass DownsampleAvg(nn.Module):\n    \"\"\"Average pool downsampling module.\n\n    AvgPool Downsampling as in 'D' ResNet variants.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            apply_act: bool = False,\n            layers: Optional[LayerFn] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize DownsampleAvg.\n\n        Args:\n            in_chs: Number of input channels.\n            out_chs: Number of output channels.\n            stride: Stride for downsampling.\n            dilation: Dilation rate.\n            apply_act: Whether to apply activation.\n            layers: Layer factory functions.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        avg_stride = stride if dilation == 1 else 1\n        if stride > 1 or dilation > 1:\n            avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n            self.pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n        else:\n            self.pool = nn.Identity()\n        self.conv = layers.conv_norm_act(in_chs, out_chs, 1, apply_act=apply_act, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        return self.conv(self.pool(x))\n\n\ndef create_shortcut(\n        downsample_type: str,\n        in_chs: int,\n        out_chs: int,\n        stride: int,\n        dilation: Tuple[int, int],\n        layers: LayerFn,\n        **kwargs,\n) -> Optional[nn.Module]:\n    \"\"\"Create shortcut connection for residual blocks.\n\n    Args:\n        downsample_type: Type of downsampling ('avg', 'conv1x1', or '').\n        in_chs: Input channels.\n        out_chs: Output channels.\n        stride: Stride for downsampling.\n        dilation: Dilation rates.\n        layers: Layer factory functions.\n        **kwargs: Additional arguments.\n\n    Returns:\n        Shortcut module or None.\n    \"\"\"\n    assert downsample_type in ('avg', 'conv1x1', '')\n    if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]:\n        if not downsample_type:\n            return None  # no shortcut\n        elif downsample_type == 'avg':\n            return DownsampleAvg(in_chs, out_chs, stride=stride, dilation=dilation[0], **kwargs)\n        else:\n            return layers.conv_norm_act(in_chs, out_chs, kernel_size=1, stride=stride, dilation=dilation[0], **kwargs)\n    else:\n        return nn.Identity()  # identity shortcut\n\n\nclass BasicBlock(nn.Module):\n    \"\"\" ResNet Basic Block - kxk + kxk\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            group_size: Optional[int] = None,\n            bottle_ratio: float = 1.0,\n            downsample: str = 'avg',\n            attn_last: bool = True,\n            linear_out: bool = False,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        self.shortcut = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            stride=stride,\n            dilation=dilation,\n            apply_act=False,\n            layers=layers,\n            **dd,\n        )\n\n        self.conv1_kxk = layers.conv_norm_act(in_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], **dd)\n        self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs)\n        self.conv2_kxk = layers.conv_norm_act(\n            mid_chs,\n            out_chs,\n            kernel_size,\n            dilation=dilation[1],\n            groups=groups,\n            drop_layer=drop_block,\n            apply_act=False,\n            **dd,\n        )\n        self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity()\n        self.act = nn.Identity() if linear_out else layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        if zero_init_last and self.shortcut is not None and getattr(self.conv2_kxk.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv2_kxk.bn.weight)\n        for attn in (self.attn, self.attn_last):\n            if hasattr(attn, 'reset_parameters'):\n                attn.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1_kxk(x)\n        x = self.attn(x)\n        x = self.conv2_kxk(x)\n        x = self.attn_last(x)\n        x = self.drop_path(x)\n        if self.shortcut is not None:\n            x = x + self.shortcut(shortcut)\n        return self.act(x)\n\n\nclass BottleneckBlock(nn.Module):\n    \"\"\" ResNet-like Bottleneck Block - 1x1 - kxk - 1x1\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.,\n            group_size: Optional[int] = None,\n            downsample: str = 'avg',\n            attn_last: bool = False,\n            linear_out: bool = False,\n            extra_conv: bool = False,\n            bottle_in: bool = False,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        mid_chs = make_divisible((in_chs if bottle_in else out_chs) * bottle_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        self.shortcut = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            stride=stride,\n            dilation=dilation,\n            apply_act=False,\n            layers=layers,\n            **dd,\n        )\n\n        self.conv1_1x1 = layers.conv_norm_act(in_chs, mid_chs, 1, **dd)\n        self.conv2_kxk = layers.conv_norm_act(\n            mid_chs,\n            mid_chs,\n            kernel_size,\n            stride=stride,\n            dilation=dilation[0],\n            groups=groups,\n            drop_layer=drop_block,\n            **dd,\n        )\n        if extra_conv:\n            self.conv2b_kxk = layers.conv_norm_act(\n                mid_chs,\n                mid_chs,\n                kernel_size,\n                dilation=dilation[1],\n                groups=groups,\n                **dd,\n            )\n        else:\n            self.conv2b_kxk = nn.Identity()\n        self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs, **dd)\n        self.conv3_1x1 = layers.conv_norm_act(mid_chs, out_chs, 1, apply_act=False, **dd)\n        self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity()\n        self.act = nn.Identity() if linear_out else layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        if zero_init_last and self.shortcut is not None and getattr(self.conv3_1x1.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv3_1x1.bn.weight)\n        for attn in (self.attn, self.attn_last):\n            if hasattr(attn, 'reset_parameters'):\n                attn.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1_1x1(x)\n        x = self.conv2_kxk(x)\n        x = self.conv2b_kxk(x)\n        x = self.attn(x)\n        x = self.conv3_1x1(x)\n        x = self.attn_last(x)\n        x = self.drop_path(x)\n        if self.shortcut is not None:\n            x = x + self.shortcut(shortcut)\n        return self.act(x)\n\n\nclass DarkBlock(nn.Module):\n    \"\"\" DarkNet-like (1x1 + 3x3 w/ stride) block\n\n    The GE-Net impl included a 1x1 + 3x3 block in their search space. It was not used in the feature models.\n    This block is pretty much a DarkNet block (also DenseNet) hence the name. Neither DarkNet or DenseNet\n    uses strides within the block (external 3x3 or maxpool downsampling is done in front of the block repeats).\n\n    If one does want to use a lot of these blocks w/ stride, I'd recommend using the EdgeBlock (3x3 /w stride + 1x1)\n    for more optimal compute.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.0,\n            group_size: Optional[int] = None,\n            downsample: str = 'avg',\n            attn_last: bool = True,\n            linear_out: bool = False,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        self.shortcut = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            stride=stride,\n            dilation=dilation,\n            apply_act=False,\n            layers=layers,\n            **dd,\n        )\n\n        self.conv1_1x1 = layers.conv_norm_act(in_chs, mid_chs, 1, **dd)\n        self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs, **dd)\n        self.conv2_kxk = layers.conv_norm_act(\n            mid_chs,\n            out_chs,\n            kernel_size,\n            stride=stride,\n            dilation=dilation[0],\n            groups=groups,\n            drop_layer=drop_block,\n            apply_act=False,\n            **dd,\n        )\n        self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity()\n        self.act = nn.Identity() if linear_out else layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        if zero_init_last and self.shortcut is not None and getattr(self.conv2_kxk.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv2_kxk.bn.weight)\n        for attn in (self.attn, self.attn_last):\n            if hasattr(attn, 'reset_parameters'):\n                attn.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1_1x1(x)\n        x = self.attn(x)\n        x = self.conv2_kxk(x)\n        x = self.attn_last(x)\n        x = self.drop_path(x)\n        if self.shortcut is not None:\n            x = x + self.shortcut(shortcut)\n        return self.act(x)\n\n\nclass EdgeBlock(nn.Module):\n    \"\"\" EdgeResidual-like (3x3 + 1x1) block\n\n    A two layer block like DarkBlock, but with the order of the 3x3 and 1x1 convs reversed.\n    Very similar to the EfficientNet Edge-Residual block but this block it ends with activations, is\n    intended to be used with either expansion or bottleneck contraction, and can use DW/group/non-grouped convs.\n\n    FIXME is there a more common 3x3 + 1x1 conv block to name this after?\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.0,\n            group_size: Optional[int] = None,\n            downsample: str = 'avg',\n            attn_last: bool = False,\n            linear_out: bool = False,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        self.shortcut = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            stride=stride,\n            dilation=dilation,\n            apply_act=False,\n            layers=layers,\n            **dd,\n        )\n        self.conv1_kxk = layers.conv_norm_act(\n            in_chs,\n            mid_chs,\n            kernel_size,\n            stride=stride,\n            dilation=dilation[0],\n            groups=groups,\n            drop_layer=drop_block,\n            **dd,\n        )\n        self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs, **dd)\n        self.conv2_1x1 = layers.conv_norm_act(mid_chs, out_chs, 1, apply_act=False, **dd)\n        self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity()\n        self.act = nn.Identity() if linear_out else layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        if zero_init_last and self.shortcut is not None and getattr(self.conv2_1x1.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv2_1x1.bn.weight)\n        for attn in (self.attn, self.attn_last):\n            if hasattr(attn, 'reset_parameters'):\n                attn.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1_kxk(x)\n        x = self.attn(x)\n        x = self.conv2_1x1(x)\n        x = self.attn_last(x)\n        x = self.drop_path(x)\n        if self.shortcut is not None:\n            x = x + self.shortcut(shortcut)\n        return self.act(x)\n\n\nclass RepVggBlock(nn.Module):\n    \"\"\" RepVGG Block.\n\n    Adapted from impl at https://github.com/DingXiaoH/RepVGG\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.0,\n            group_size: Optional[int] = None,\n            downsample: str = '',\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.groups = groups = num_groups(group_size, in_chs)\n        layers = layers or LayerFn()\n\n        if inference_mode:\n            self.reparam_conv = nn.Conv2d(\n                in_channels=in_chs,\n                out_channels=out_chs,\n                kernel_size=kernel_size,\n                stride=stride,\n                dilation=dilation,\n                groups=groups,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.reparam_conv = None\n            use_ident = in_chs == out_chs and stride == 1 and dilation[0] == dilation[1]\n            self.identity = layers.norm_act(out_chs, apply_act=False, **dd) if use_ident else None\n            self.conv_kxk = layers.conv_norm_act(\n                in_chs,\n                out_chs,\n                kernel_size,\n                stride=stride,\n                dilation=dilation[0],\n                groups=groups,\n                drop_layer=drop_block,\n                apply_act=False,\n                **dd,\n            )\n            self.conv_1x1 = layers.conv_norm_act(\n                in_chs,\n                out_chs,\n                1,\n                stride=stride,\n                groups=groups,\n                apply_act=False,\n                **dd,\n            )\n            self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. and use_ident else nn.Identity()\n\n        self.attn = nn.Identity() if layers.attn is None else layers.attn(out_chs, **dd)\n        self.act = layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        # NOTE this init overrides that base model init with specific changes for the block type\n        for m in self.modules():\n            if isinstance(m, nn.BatchNorm2d):\n                nn.init.normal_(m.weight, .1, .1)\n                nn.init.normal_(m.bias, 0, .1)\n        if hasattr(self.attn, 'reset_parameters'):\n            self.attn.reset_parameters()\n\n    def forward(self, x):\n        if self.reparam_conv is not None:\n            return self.act(self.attn(self.reparam_conv(x)))\n\n        if self.identity is None:\n            x = self.conv_1x1(x) + self.conv_kxk(x)\n        else:\n            identity = self.identity(x)\n            x = self.conv_1x1(x) + self.conv_kxk(x)\n            x = self.drop_path(x)  # not in the paper / official impl, experimental\n            x += identity\n        x = self.attn(x)  # no attn in the paper / official impl, experimental\n        return self.act(x)\n\n    def reparameterize(self):\n        \"\"\" Following works like `RepVGG: Making VGG-style ConvNets Great Again` -\n        https://arxiv.org/pdf/2101.03697.pdf. We re-parameterize multi-branched\n        architecture used at training time to obtain a plain CNN-like structure\n        for inference.\n        \"\"\"\n        if self.reparam_conv is not None:\n            return\n\n        kernel, bias = self._get_kernel_bias()\n        self.reparam_conv = nn.Conv2d(\n            in_channels=self.conv_kxk.conv.in_channels,\n            out_channels=self.conv_kxk.conv.out_channels,\n            kernel_size=self.conv_kxk.conv.kernel_size,\n            stride=self.conv_kxk.conv.stride,\n            padding=self.conv_kxk.conv.padding,\n            dilation=self.conv_kxk.conv.dilation,\n            groups=self.conv_kxk.conv.groups,\n            bias=True,\n        )\n        self.reparam_conv.weight.data = kernel\n        self.reparam_conv.bias.data = bias\n\n        # Delete un-used branches\n        for name, para in self.named_parameters():\n            if 'reparam_conv' in name:\n                continue\n            para.detach_()\n        self.__delattr__('conv_kxk')\n        self.__delattr__('conv_1x1')\n        self.__delattr__('identity')\n        self.__delattr__('drop_path')\n\n    def _get_kernel_bias(self) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\" Method to obtain re-parameterized kernel and bias.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L83\n        \"\"\"\n        # get weights and bias of scale branch\n        kernel_1x1 = 0\n        bias_1x1 = 0\n        if self.conv_1x1 is not None:\n            kernel_1x1, bias_1x1 = self._fuse_bn_tensor(self.conv_1x1)\n            # Pad scale branch kernel to match conv branch kernel size.\n            pad = self.conv_kxk.conv.kernel_size[0] // 2\n            kernel_1x1 = torch.nn.functional.pad(kernel_1x1, [pad, pad, pad, pad])\n\n        # get weights and bias of skip branch\n        kernel_identity = 0\n        bias_identity = 0\n        if self.identity is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.identity)\n\n        # get weights and bias of conv branches\n        kernel_conv, bias_conv = self._fuse_bn_tensor(self.conv_kxk)\n\n        kernel_final = kernel_conv + kernel_1x1 + kernel_identity\n        bias_final = bias_conv + bias_1x1 + bias_identity\n        return kernel_final, bias_final\n\n    def _fuse_bn_tensor(self, branch) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\" Method to fuse batchnorm layer with preceding conv layer.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L95\n        \"\"\"\n        if isinstance(branch, ConvNormAct):\n            kernel = branch.conv.weight\n            running_mean = branch.bn.running_mean\n            running_var = branch.bn.running_var\n            gamma = branch.bn.weight\n            beta = branch.bn.bias\n            eps = branch.bn.eps\n        else:\n            assert isinstance(branch, nn.BatchNorm2d)\n            if not hasattr(self, 'id_tensor'):\n                in_chs = self.conv_kxk.conv.in_channels\n                input_dim = in_chs // self.groups\n                kernel_size = self.conv_kxk.conv.kernel_size\n                kernel_value = torch.zeros_like(self.conv_kxk.conv.weight)\n                for i in range(in_chs):\n                    kernel_value[i, i % input_dim, kernel_size[0] // 2, kernel_size[1] // 2] = 1\n                self.id_tensor = kernel_value\n            kernel = self.id_tensor\n            running_mean = branch.running_mean\n            running_var = branch.running_var\n            gamma = branch.weight\n            beta = branch.bias\n            eps = branch.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n\nclass MobileOneBlock(nn.Module):\n    \"\"\" MobileOne building block.\n\n        This block has a multi-branched architecture at train-time\n        and plain-CNN style architecture at inference time\n        For more details, please refer to our paper:\n        `An Improved One millisecond Mobile Backbone` -\n        https://arxiv.org/pdf/2206.04040.pdf\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.0,  # unused\n            group_size: Optional[int] = None,\n            downsample: str = '',  # unused\n            inference_mode: bool = False,\n            num_conv_branches: int = 1,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\" Construct a MobileOneBlock module.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_conv_branches = num_conv_branches\n        self.groups = groups = num_groups(group_size, in_chs)\n        layers = layers or LayerFn()\n\n        if inference_mode:\n            self.reparam_conv = nn.Conv2d(\n                in_channels=in_chs,\n                out_channels=out_chs,\n                kernel_size=kernel_size,\n                stride=stride,\n                dilation=dilation,\n                groups=groups,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.reparam_conv = None\n\n            # Re-parameterizable skip connection\n            use_ident = in_chs == out_chs and stride == 1 and dilation[0] == dilation[1]\n            self.identity = layers.norm_act(out_chs, apply_act=False, **dd) if use_ident else None\n\n            # Re-parameterizable conv branches\n            convs = []\n            for _ in range(self.num_conv_branches):\n                convs.append(layers.conv_norm_act(\n                    in_chs,\n                    out_chs,\n                    kernel_size=kernel_size,\n                    stride=stride,\n                    groups=groups,\n                    apply_act=False,\n                    **dd,\n                ))\n            self.conv_kxk = nn.ModuleList(convs)\n\n            # Re-parameterizable scale branch\n            self.conv_scale = None\n            if kernel_size > 1:\n                self.conv_scale = layers.conv_norm_act(\n                    in_chs,\n                    out_chs,\n                    kernel_size=1,\n                    stride=stride,\n                    groups=groups,\n                    apply_act=False,\n                    **dd,\n                )\n            self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. and use_ident else nn.Identity()\n\n        self.attn = nn.Identity() if layers.attn is None else layers.attn(out_chs, **dd)\n        self.act = layers.act(inplace=True)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\" Apply forward pass. \"\"\"\n        # Inference mode forward pass.\n        if self.reparam_conv is not None:\n            return self.act(self.attn(self.reparam_conv(x)))\n\n        # Multi-branched train-time forward pass.\n        # Skip branch output\n        identity_out = 0\n        if self.identity is not None:\n            identity_out = self.identity(x)\n\n        # Scale branch output\n        scale_out = 0\n        if self.conv_scale is not None:\n            scale_out = self.conv_scale(x)\n\n        # Other branches\n        out = scale_out\n        for ck in self.conv_kxk:\n            out += ck(x)\n        out = self.drop_path(out)\n        out += identity_out\n\n        return self.act(self.attn(out))\n\n    def reparameterize(self):\n        \"\"\" Following works like `RepVGG: Making VGG-style ConvNets Great Again` -\n        https://arxiv.org/pdf/2101.03697.pdf. We re-parameterize multi-branched\n        architecture used at training time to obtain a plain CNN-like structure\n        for inference.\n        \"\"\"\n        if self.reparam_conv is not None:\n            return\n\n        kernel, bias = self._get_kernel_bias()\n        self.reparam_conv = nn.Conv2d(\n            in_channels=self.conv_kxk[0].conv.in_channels,\n            out_channels=self.conv_kxk[0].conv.out_channels,\n            kernel_size=self.conv_kxk[0].conv.kernel_size,\n            stride=self.conv_kxk[0].conv.stride,\n            padding=self.conv_kxk[0].conv.padding,\n            dilation=self.conv_kxk[0].conv.dilation,\n            groups=self.conv_kxk[0].conv.groups,\n            bias=True)\n        self.reparam_conv.weight.data = kernel\n        self.reparam_conv.bias.data = bias\n\n        # Delete un-used branches\n        for name, para in self.named_parameters():\n            if 'reparam_conv' in name:\n                continue\n            para.detach_()\n        self.__delattr__('conv_kxk')\n        self.__delattr__('conv_scale')\n        self.__delattr__('identity')\n        self.__delattr__('drop_path')\n\n    def _get_kernel_bias(self) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\" Method to obtain re-parameterized kernel and bias.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L83\n        \"\"\"\n        # get weights and bias of scale branch\n        kernel_scale = 0\n        bias_scale = 0\n        if self.conv_scale is not None:\n            kernel_scale, bias_scale = self._fuse_bn_tensor(self.conv_scale)\n            # Pad scale branch kernel to match conv branch kernel size.\n            pad = self.conv_kxk[0].conv.kernel_size[0] // 2\n            kernel_scale = torch.nn.functional.pad(kernel_scale, [pad, pad, pad, pad])\n\n        # get weights and bias of skip branch\n        kernel_identity = 0\n        bias_identity = 0\n        if self.identity is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.identity)\n\n        # get weights and bias of conv branches\n        kernel_conv = 0\n        bias_conv = 0\n        for ix in range(self.num_conv_branches):\n            _kernel, _bias = self._fuse_bn_tensor(self.conv_kxk[ix])\n            kernel_conv += _kernel\n            bias_conv += _bias\n\n        kernel_final = kernel_conv + kernel_scale + kernel_identity\n        bias_final = bias_conv + bias_scale + bias_identity\n        return kernel_final, bias_final\n\n    def _fuse_bn_tensor(self, branch) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\" Method to fuse batchnorm layer with preceding conv layer.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L95\n        \"\"\"\n        if isinstance(branch, ConvNormAct):\n            kernel = branch.conv.weight\n            running_mean = branch.bn.running_mean\n            running_var = branch.bn.running_var\n            gamma = branch.bn.weight\n            beta = branch.bn.bias\n            eps = branch.bn.eps\n        else:\n            assert isinstance(branch, nn.BatchNorm2d)\n            if not hasattr(self, 'id_tensor'):\n                in_chs = self.conv_kxk[0].conv.in_channels\n                input_dim = in_chs // self.groups\n                kernel_size = self.conv_kxk[0].conv.kernel_size\n                kernel_value = torch.zeros_like(self.conv_kxk[0].conv.weight)\n                for i in range(in_chs):\n                    kernel_value[i, i % input_dim, kernel_size[0] // 2, kernel_size[1] // 2] = 1\n                self.id_tensor = kernel_value\n            kernel = self.id_tensor\n            running_mean = branch.running_mean\n            running_var = branch.running_var\n            gamma = branch.weight\n            beta = branch.bias\n            eps = branch.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n\nclass SelfAttnBlock(nn.Module):\n    \"\"\" ResNet-like Bottleneck Block - 1x1 - optional kxk - self attn - 1x1\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1.,\n            group_size: Optional[int] = None,\n            downsample: str = 'avg',\n            extra_conv: bool = False,\n            linear_out: bool = False,\n            bottle_in: bool = False,\n            post_attn_na: bool = True,\n            feat_size: Optional[Tuple[int, int]] = None,\n            layers: LayerFn = None,\n            drop_block: Callable = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert layers is not None\n        mid_chs = make_divisible((in_chs if bottle_in else out_chs) * bottle_ratio)\n        groups = num_groups(group_size, mid_chs)\n\n        self.shortcut = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            stride=stride,\n            dilation=dilation,\n            apply_act=False,\n            layers=layers,\n            **dd,\n        )\n\n        self.conv1_1x1 = layers.conv_norm_act(in_chs, mid_chs, 1, **dd)\n        if extra_conv:\n            self.conv2_kxk = layers.conv_norm_act(\n                mid_chs,\n                mid_chs,\n                kernel_size,\n                stride=stride,\n                dilation=dilation[0],\n                groups=groups,\n                drop_layer=drop_block,\n                **dd,\n            )\n            stride = 1  # striding done via conv if enabled\n        else:\n            self.conv2_kxk = nn.Identity()\n        opt_kwargs = {} if feat_size is None else dict(feat_size=feat_size)\n        # FIXME need to dilate self attn to have dilated network support, moop moop\n        self.self_attn = layers.self_attn(mid_chs, stride=stride, **opt_kwargs, **dd)\n        self.post_attn = layers.norm_act(mid_chs, **dd) if post_attn_na else nn.Identity()\n        self.conv3_1x1 = layers.conv_norm_act(mid_chs, out_chs, 1, apply_act=False, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity()\n        self.act = nn.Identity() if linear_out else layers.act(inplace=True)\n\n    def init_weights(self, zero_init_last: bool = False):\n        if zero_init_last and self.shortcut is not None and getattr(self.conv3_1x1.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv3_1x1.bn.weight)\n        if hasattr(self.self_attn, 'reset_parameters'):\n            self.self_attn.reset_parameters()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1_1x1(x)\n        x = self.conv2_kxk(x)\n        x = self.self_attn(x)\n        x = self.post_attn(x)\n        x = self.conv3_1x1(x)\n        x = self.drop_path(x)\n        if self.shortcut is not None:\n            x = x + self.shortcut(shortcut)\n        return self.act(x)\n\n\n_block_registry = dict(\n    basic=BasicBlock,\n    bottle=BottleneckBlock,\n    dark=DarkBlock,\n    edge=EdgeBlock,\n    rep=RepVggBlock,\n    one=MobileOneBlock,\n    self_attn=SelfAttnBlock,\n)\n\n\ndef register_block(block_type: str, block_fn: nn.Module):\n    _block_registry[block_type] = block_fn\n\n\ndef create_block(block: Union[str, nn.Module], **kwargs):\n    if isinstance(block, (nn.Module, partial)):\n        return block(**kwargs)\n    assert block in _block_registry, f'Unknown block type ({block}'\n    return _block_registry[block](**kwargs)\n\n\nclass Stem(nn.Sequential):\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Union[int, List[int], Tuple[int, ...]],\n            kernel_size: int = 3,\n            stride: int = 4,\n            pool: str = 'maxpool',\n            num_rep: int = 3,\n            num_act: Optional[int] = None,\n            chs_decay: float = 0.5,\n            layers: LayerFn = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert stride in (2, 4)\n        layers = layers or LayerFn()\n\n        if isinstance(out_chs, (list, tuple)):\n            num_rep = len(out_chs)\n            stem_chs = out_chs\n        else:\n            stem_chs = [round(out_chs * chs_decay ** i) for i in range(num_rep)][::-1]\n\n        self.stride = stride\n        self.feature_info = []  # track intermediate features\n        prev_feat = ''\n        stem_strides = [2] + [1] * (num_rep - 1)\n        if stride == 4 and not pool:\n            # set last conv in stack to be strided if stride == 4 and no pooling layer\n            stem_strides[-1] = 2\n\n        num_act = num_rep if num_act is None else num_act\n        # if num_act < num_rep, first convs in stack won't have bn + act\n        stem_norm_acts = [False] * (num_rep - num_act) + [True] * num_act\n        prev_chs = in_chs\n        curr_stride = 1\n        last_feat_idx = -1\n        for i, (ch, s, na) in enumerate(zip(stem_chs, stem_strides, stem_norm_acts)):\n            layer_fn = layers.conv_norm_act if na else create_conv2d\n            conv_name = f'conv{i + 1}'\n            if i > 0 and s > 1:\n                last_feat_idx = i - 1\n                self.feature_info.append(dict(num_chs=prev_chs, reduction=curr_stride, module=prev_feat, stage=0))\n            self.add_module(conv_name, layer_fn(prev_chs, ch, kernel_size=kernel_size, stride=s, **dd))\n            prev_chs = ch\n            curr_stride *= s\n            prev_feat = conv_name\n\n        if pool:\n            pool = pool.lower()\n            assert pool in ('max', 'maxpool', 'avg', 'avgpool', 'max2', 'avg2')\n            last_feat_idx = i\n            self.feature_info.append(dict(num_chs=prev_chs, reduction=curr_stride, module=prev_feat, stage=0))\n            if pool == 'max2':\n                self.add_module('pool', nn.MaxPool2d(2))\n            elif pool == 'avg2':\n                self.add_module('pool', nn.AvgPool2d(2))\n            elif 'max' in pool:\n                self.add_module('pool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n            elif 'avg' in pool:\n                self.add_module('pool', nn.AvgPool2d(kernel_size=3, stride=2, padding=1, count_include_pad=False))\n            curr_stride *= 2\n            prev_feat = 'pool'\n\n        self.last_feat_idx = last_feat_idx if last_feat_idx >= 0 else None\n        self.feature_info.append(dict(num_chs=prev_chs, reduction=curr_stride, module=prev_feat, stage=0))\n        assert curr_stride == stride\n\n    def forward_intermediates(self, x) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:\n        intermediate: Optional[torch.Tensor] = None\n        for i, m in enumerate(self):\n            x = m(x)\n            if self.last_feat_idx is not None and i == self.last_feat_idx:\n                intermediate = x\n        return x, intermediate\n\n\ndef create_byob_stem(\n        in_chs: int,\n        out_chs: int,\n        stem_type: str = '',\n        pool_type: str = '',\n        feat_prefix: str = 'stem',\n        layers: LayerFn = None,\n        device=None,\n        dtype=None,\n):\n    dd = {'device': device, 'dtype': dtype}\n    layers = layers or LayerFn()\n    assert stem_type in ('', 'quad', 'quad2', 'tiered', 'deep', 'rep', 'one', '7x7', '3x3')\n    if 'quad' in stem_type:\n        # based on NFNet stem, stack of 4 3x3 convs\n        num_act = 2 if 'quad2' in stem_type else None\n        stem = Stem(in_chs, out_chs, num_rep=4, num_act=num_act, pool=pool_type, layers=layers, **dd)\n    elif 'tiered' in stem_type:\n        # 3x3 stack of 3 convs as in my ResNet-T\n        stem = Stem(in_chs, (3 * out_chs // 8, out_chs // 2, out_chs), pool=pool_type, layers=layers, **dd)\n    elif 'deep' in stem_type:\n        # 3x3 stack of 3 convs as in ResNet-D\n        stem = Stem(in_chs, out_chs, num_rep=3, chs_decay=1.0, pool=pool_type, layers=layers, **dd)\n    elif 'rep' in stem_type:\n        stem = RepVggBlock(in_chs, out_chs, stride=2, layers=layers, **dd)\n    elif 'one' in stem_type:\n        stem = MobileOneBlock(in_chs, out_chs, kernel_size=3, stride=2, layers=layers, **dd)\n    elif '7x7' in stem_type:\n        # 7x7 stem conv as in ResNet\n        if pool_type:\n            stem = Stem(in_chs, out_chs, 7, num_rep=1, pool=pool_type, layers=layers, **dd)\n        else:\n            stem = layers.conv_norm_act(in_chs, out_chs, 7, stride=2, **dd)\n    else:\n        if isinstance(out_chs, (tuple, list)):\n            stem = Stem(in_chs, out_chs, 3, pool=pool_type, layers=layers, **dd)\n        else:\n            # 3x3 stem conv as in RegNet is the default\n            if pool_type:\n                stem = Stem(in_chs, out_chs, 3, num_rep=1, pool=pool_type, layers=layers, **dd)\n            else:\n                stem = layers.conv_norm_act(in_chs, out_chs, 3, stride=2, **dd)\n\n    if isinstance(stem, Stem):\n        feature_info = [dict(f, module='.'.join([feat_prefix, f['module']])) for f in stem.feature_info]\n    else:\n        feature_info = [dict(num_chs=out_chs, reduction=2, module=feat_prefix, stage=0)]\n    return stem, feature_info\n\n\ndef reduce_feat_size(feat_size, stride=2):\n    return None if feat_size is None else tuple([s // stride for s in feat_size])\n\n\ndef override_kwargs(block_kwargs, model_kwargs):\n    \"\"\" Override model level attn/self-attn/block kwargs w/ block level\n\n    NOTE: kwargs are NOT merged across levels, block_kwargs will fully replace model_kwargs\n    for the block if set to anything that isn't None.\n\n    i.e. an empty block_kwargs dict will remove kwargs set at model level for that block\n    \"\"\"\n    out_kwargs = block_kwargs if block_kwargs is not None else model_kwargs\n    return out_kwargs or {}  # make sure None isn't returned\n\n\ndef update_block_kwargs(block_kwargs: Dict[str, Any], block_cfg: ByoBlockCfg, model_cfg: ByoModelCfg, ):\n    layer_fns = block_kwargs['layers']\n\n    # override attn layer / args with block local config\n    attn_set = block_cfg.attn_layer is not None\n    if attn_set or block_cfg.attn_kwargs is not None:\n        # override attn layer config\n        if attn_set and not block_cfg.attn_layer:\n            # empty string for attn_layer type will disable attn for this block\n            attn_layer = None\n        else:\n            attn_kwargs = override_kwargs(block_cfg.attn_kwargs, model_cfg.attn_kwargs)\n            attn_layer = block_cfg.attn_layer or model_cfg.attn_layer\n            attn_layer = partial(get_attn(attn_layer), **attn_kwargs) if attn_layer is not None else None\n        layer_fns = replace(layer_fns, attn=attn_layer)\n\n    # override self-attn layer / args with block local cfg\n    self_attn_set = block_cfg.self_attn_layer is not None\n    if self_attn_set or block_cfg.self_attn_kwargs is not None:\n        # override attn layer config\n        if self_attn_set and not block_cfg.self_attn_layer:  # attn_layer == ''\n            # empty string for self_attn_layer type will disable attn for this block\n            self_attn_layer = None\n        else:\n            self_attn_kwargs = override_kwargs(block_cfg.self_attn_kwargs, model_cfg.self_attn_kwargs)\n            self_attn_layer = block_cfg.self_attn_layer or model_cfg.self_attn_layer\n            self_attn_layer = partial(get_attn(self_attn_layer), **self_attn_kwargs) \\\n                if self_attn_layer is not None else None\n        layer_fns = replace(layer_fns, self_attn=self_attn_layer)\n\n    block_kwargs['layers'] = layer_fns\n\n    # add additional block_kwargs specified in block_cfg or model_cfg, precedence to block if set\n    block_kwargs.update(override_kwargs(block_cfg.block_kwargs, model_cfg.block_kwargs))\n\n\ndef drop_blocks(\n        drop_prob: float = 0.,\n        block_size: int = 3,\n        num_stages: int = 4,\n) -> List[Optional[partial]]:\n    \"\"\"Create DropBlock layer partials for each stage.\n\n    DropBlock is applied to the last two stages only, following common practice.\n    The block_size specifies the size for the final stage; the second-to-last\n    stage uses a larger block size scaled to account for 2x larger feature maps.\n\n    Args:\n        drop_prob: Drop probability for DropBlock.\n        block_size: Block size for the final stage. Second-to-last stage\n            uses `block_size * 2 - 1` to scale with feature map size.\n        num_stages: Number of stages in the model.\n\n    Returns:\n        List of DropBlock partial instances or None for each stage.\n    \"\"\"\n    assert num_stages >= 2\n    dbs = [None] * num_stages\n    if drop_prob:\n        # Scale block size for second-to-last stage (2x larger feature maps)\n        dbs[-2] = partial(DropBlock2d, drop_prob=drop_prob, block_size=block_size * 2 - 1, gamma_scale=0.25)\n        dbs[-1] = partial(DropBlock2d, drop_prob=drop_prob, block_size=block_size, gamma_scale=1.00)\n    return dbs\n\n\ndef create_byob_stages(\n        cfg: ByoModelCfg,\n        drop_path_rate: float,\n        output_stride: int,\n        stem_feat: Dict[str, Any],\n        drop_block_rate: float = 0.,\n        drop_block_size: int = 3,\n        feat_size: Optional[int] = None,\n        layers: Optional[LayerFn] = None,\n        block_kwargs_fn: Optional[Callable] = update_block_kwargs,\n        device=None,\n        dtype=None,\n):\n    layers = layers or LayerFn()\n    feature_info = []\n    block_cfgs = [expand_blocks_cfg(s) for s in cfg.blocks]\n    num_stages = len(block_cfgs)\n    depths = [sum([bc.d for bc in stage_bcs]) for stage_bcs in block_cfgs]\n    dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n    dbs = drop_blocks(drop_block_rate, drop_block_size, num_stages)\n    dilation = 1\n    net_stride = stem_feat['reduction']\n    prev_chs = stem_feat['num_chs']\n    prev_feat = stem_feat\n    stages = []\n    for stage_idx, stage_block_cfgs in enumerate(block_cfgs):\n        stride = stage_block_cfgs[0].s\n        if stride != 1 and prev_feat:\n            feature_info.append(prev_feat)\n        if net_stride >= output_stride and stride > 1:\n            dilation *= stride\n            stride = 1\n        net_stride *= stride\n        first_dilation = 1 if dilation in (1, 2) else 2\n\n        blocks = []\n        for block_idx, block_cfg in enumerate(stage_block_cfgs):\n            out_chs = make_divisible(block_cfg.c * cfg.width_factor)\n            group_size = block_cfg.gs\n            if isinstance(group_size, Callable):\n                group_size = group_size(out_chs, block_idx)\n            block_kwargs = dict(  # Blocks used in this model must accept these arguments\n                in_chs=prev_chs,\n                out_chs=out_chs,\n                stride=stride if block_idx == 0 else 1,\n                dilation=(first_dilation, dilation),\n                group_size=group_size,\n                bottle_ratio=block_cfg.br,\n                downsample=cfg.downsample,\n                drop_block=dbs[stage_idx],\n                drop_path_rate=dpr[stage_idx][block_idx],\n                layers=layers,\n                device=device,\n                dtype=dtype,\n            )\n            if block_cfg.type in ('self_attn',):\n                # add feat_size arg for blocks that support/need it\n                block_kwargs['feat_size'] = feat_size\n            block_kwargs_fn(block_kwargs, block_cfg=block_cfg, model_cfg=cfg)\n            blocks += [create_block(block_cfg.type, **block_kwargs)]\n            first_dilation = dilation\n            prev_chs = out_chs\n            if stride > 1 and block_idx == 0:\n                feat_size = reduce_feat_size(feat_size, stride)\n\n        stages += [nn.Sequential(*blocks)]\n        prev_feat = dict(num_chs=prev_chs, reduction=net_stride, module=f'stages.{stage_idx}', stage=stage_idx + 1)\n\n    feature_info.append(prev_feat)\n    return nn.Sequential(*stages), feature_info, feat_size\n\n\ndef get_layer_fns(cfg: ByoModelCfg, allow_aa: bool = True):\n    act = get_act_layer(cfg.act_layer)\n    norm_act = get_norm_act_layer(norm_layer=cfg.norm_layer, act_layer=act)\n    if cfg.aa_layer and allow_aa:\n        conv_norm_act = partial(ConvNormAct, norm_layer=cfg.norm_layer, act_layer=act, aa_layer=cfg.aa_layer)\n    else:\n        conv_norm_act = partial(ConvNormAct, norm_layer=cfg.norm_layer, act_layer=act)\n    attn = partial(get_attn(cfg.attn_layer), **cfg.attn_kwargs) if cfg.attn_layer else None\n    self_attn = partial(get_attn(cfg.self_attn_layer), **cfg.self_attn_kwargs) if cfg.self_attn_layer else None\n    layer_fn = LayerFn(conv_norm_act=conv_norm_act, norm_act=norm_act, act=act, attn=attn, self_attn=self_attn)\n    return layer_fn\n\n\nclass ByobNet(nn.Module):\n    \"\"\"Bring-your-own-blocks Network.\n\n    A flexible network backbone that allows building model stem + blocks via\n    dataclass cfg definition w/ factory functions for module instantiation.\n\n    Current assumption is that both stem and blocks are in conv-bn-act order (w/ block ending in act).\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: ByoModelCfg,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: Optional[str] = None,\n            output_stride: int = 32,\n            img_size: Optional[Union[int, Tuple[int, int]]] = None,\n            drop_rate: float = 0.,\n            drop_block_rate: float = 0.,\n            drop_block_size: int = 3,\n            drop_path_rate: float = 0.,\n            zero_init_last: bool = True,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"\n        Args:\n            cfg: Model architecture configuration.\n            num_classes: Number of classifier classes.\n            in_chans: Number of input channels.\n            global_pool: Global pooling type.\n            output_stride: Output stride of network, one of (8, 16, 32).\n            img_size: Image size for fixed image size models (i.e. self-attn).\n            drop_rate: Classifier dropout rate.\n            drop_block_rate: DropBlock drop rate.\n            drop_block_size: DropBlock block size for final stage (scales up for earlier stages).\n            drop_path_rate: Stochastic depth drop-path rate.\n            zero_init_last: Zero-init last weight of residual path.\n            **kwargs: Extra kwargs overlayed onto cfg.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        cfg = replace(cfg, **kwargs)  # overlay kwargs onto cfg\n        stem_layers = get_layer_fns(cfg, allow_aa=False)  # keep aa off for stem-layers\n        stage_layers = get_layer_fns(cfg)\n        if cfg.fixed_input_size:\n            assert img_size is not None, 'img_size argument is required for fixed input size model'\n        feat_size = to_2tuple(img_size) if img_size is not None else None\n\n        self.feature_info = []\n        if isinstance(cfg.stem_chs, (list, tuple)):\n            stem_chs = [int(round(c * cfg.width_factor)) for c in cfg.stem_chs]\n        else:\n            stem_chs = int(round((cfg.stem_chs or cfg.blocks[0].c) * cfg.width_factor))\n        self.stem, stem_feat = create_byob_stem(\n            in_chs=in_chans,\n            out_chs=stem_chs,\n            stem_type=cfg.stem_type,\n            pool_type=cfg.stem_pool,\n            layers=stem_layers,\n            **dd,\n        )\n        self.feature_info.extend(stem_feat[:-1])\n        feat_size = reduce_feat_size(feat_size, stride=stem_feat[-1]['reduction'])\n\n        self.stages, stage_feat, feat_size = create_byob_stages(\n            cfg,\n            drop_path_rate,\n            output_stride,\n            stem_feat[-1],\n            drop_block_rate=drop_block_rate,\n            drop_block_size=drop_block_size,\n            layers=stage_layers,\n            feat_size=feat_size,\n            **dd,\n        )\n        self.feature_info.extend(stage_feat[:-1])\n        reduction = stage_feat[-1]['reduction']\n\n        prev_chs = stage_feat[-1]['num_chs']\n        if cfg.num_features:\n            self.num_features = int(round(cfg.width_factor * cfg.num_features))\n            self.final_conv = stage_layers.conv_norm_act(prev_chs, self.num_features, 1, **dd)\n        else:\n            self.num_features = prev_chs\n            self.final_conv = nn.Identity()\n        self.feature_info += [\n            dict(num_chs=self.num_features, reduction=reduction, module='final_conv', stage=len(self.stages))]\n        self.stage_ends = [f['stage'] for f in self.feature_info]\n\n        self.head_hidden_size = self.num_features\n        assert cfg.head_type in ('', 'classifier', 'mlp', 'attn_abs', 'attn_rot')\n        if cfg.head_type == 'mlp':\n            if global_pool is None:\n                global_pool = 'avg'\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                hidden_size=cfg.head_hidden_size,\n                pool_type=global_pool,\n                norm_layer=cfg.norm_layer,\n                act_layer=cfg.act_layer,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n            self.head_hidden_size = self.head.hidden_size\n        elif cfg.head_type == 'attn_abs':\n            if global_pool is None:\n                global_pool = 'token'\n            assert global_pool in ('', 'token')\n            self.head = AttentionPool2d(\n                self.num_features,\n                embed_dim=cfg.head_hidden_size,\n                out_features=num_classes,\n                feat_size=feat_size,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                qkv_separate=True,\n                **dd,\n            )\n            self.head_hidden_size = self.head.embed_dim\n        elif cfg.head_type == 'attn_rot':\n            if global_pool is None:\n                global_pool = 'token'\n            assert global_pool in ('', 'token')\n            self.head = RotAttentionPool2d(\n                self.num_features,\n                embed_dim=cfg.head_hidden_size,\n                out_features=num_classes,\n                ref_feat_size=feat_size,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                qkv_separate=True,\n                **dd,\n            )\n            self.head_hidden_size = self.head.embed_dim\n        else:\n            if global_pool is None:\n                global_pool = 'avg'\n            assert cfg.head_hidden_size is None\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n        self.global_pool = global_pool\n\n        # init weights\n        named_apply(partial(_init_weights, zero_init_last=zero_init_last), self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group matcher for parameter groups.\n\n        Args:\n            coarse: Whether to use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to patterns.\n        \"\"\"\n        matcher = dict(\n            stem=r'^stem',\n            blocks=[\n                (r'^stages\\.(\\d+)' if coarse else r'^stages\\.(\\d+)\\.(\\d+)', None),\n                (r'^final_conv', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get classifier module.\n\n        Returns:\n            Classifier module.\n        \"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset classifier.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            exclude_final_conv: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            exclude_final_conv: Exclude final_conv from last intermediate\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n        take_indices = [self.stage_ends[i] for i in take_indices]\n        max_index = self.stage_ends[max_index]\n        # forward pass\n        feat_idx = 0  # stem is index 0\n        if hasattr(self.stem, 'forward_intermediates'):\n            # returns last intermediate features in stem (before final stride in stride > 2 stems)\n            x, x_inter = self.stem.forward_intermediates(x)\n        else:\n            x, x_inter = self.stem(x), None\n        if feat_idx in take_indices:\n            intermediates.append(x if x_inter is None else x_inter)\n        last_idx = self.stage_ends[-1]\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index]\n        for stage in stages:\n            feat_idx += 1\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stage, x)\n            else:\n                x = stage(x)\n            if not exclude_final_conv and feat_idx == last_idx:\n                # default feature_info for this model uses final_conv as the last feature output (if present)\n                x = self.final_conv(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if exclude_final_conv and feat_idx == last_idx:\n            x = self.final_conv(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n        max_index = self.stage_ends[max_index]\n        self.stages = self.stages[:max_index]  # truncate blocks w/ stem as idx 0\n        if max_index < self.stage_ends[-1]:\n            self.final_conv = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.final_conv(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module: nn.Module, name: str = '', zero_init_last: bool = False) -> None:\n    \"\"\"Initialize weights.\n\n    Args:\n        module: Module to initialize.\n        name: Module name.\n        zero_init_last: Zero-initialize last layer.\n    \"\"\"\n    if isinstance(module, nn.Conv2d):\n        fan_out = module.kernel_size[0] * module.kernel_size[1] * module.out_channels\n        fan_out //= module.groups\n        module.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n        if module.bias is not None:\n            module.bias.data.zero_()\n    elif isinstance(module, nn.Linear):\n        nn.init.normal_(module.weight, mean=0.0, std=0.01)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.BatchNorm2d):\n        nn.init.ones_(module.weight)\n        nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights(zero_init_last=zero_init_last)\n\n\nmodel_cfgs = dict(\n    gernet_l=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='basic', d=1, c=128, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='basic', d=2, c=192, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='bottle', d=6, c=640, s=2, gs=0, br=1 / 4),\n            ByoBlockCfg(type='bottle', d=5, c=640, s=2, gs=1, br=3.),\n            ByoBlockCfg(type='bottle', d=4, c=640, s=1, gs=1, br=3.),\n        ),\n        stem_chs=32,\n        stem_pool=None,\n        num_features=2560,\n    ),\n    gernet_m=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='basic', d=1, c=128, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='basic', d=2, c=192, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='bottle', d=6, c=640, s=2, gs=0, br=1 / 4),\n            ByoBlockCfg(type='bottle', d=4, c=640, s=2, gs=1, br=3.),\n            ByoBlockCfg(type='bottle', d=1, c=640, s=1, gs=1, br=3.),\n        ),\n        stem_chs=32,\n        stem_pool=None,\n        num_features=2560,\n    ),\n    gernet_s=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='basic', d=1, c=48, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='basic', d=3, c=48, s=2, gs=0, br=1.),\n            ByoBlockCfg(type='bottle', d=7, c=384, s=2, gs=0, br=1 / 4),\n            ByoBlockCfg(type='bottle', d=2, c=560, s=2, gs=1, br=3.),\n            ByoBlockCfg(type='bottle', d=1, c=256, s=1, gs=1, br=3.),\n        ),\n        stem_chs=13,\n        stem_pool=None,\n        num_features=1920,\n    ),\n\n    repvgg_a0=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(d=(2, 4, 14, 1), wf=(0.75, 0.75, 0.75, 2.5)),\n        stem_type='rep',\n        stem_chs=48,\n    ),\n    repvgg_a1=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(d=(2, 4, 14, 1), wf=(1, 1, 1, 2.5)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_a2=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(d=(2, 4, 14, 1), wf=(1.5, 1.5, 1.5, 2.75)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b0=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(1., 1., 1., 2.5)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b1=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(2., 2., 2., 4.)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b1g4=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(2., 2., 2., 4.), groups=4),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b2=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(2.5, 2.5, 2.5, 5.)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b2g4=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(2.5, 2.5, 2.5, 5.), groups=4),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b3=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(3., 3., 3., 5.)),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_b3g4=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(wf=(3., 3., 3., 5.), groups=4),\n        stem_type='rep',\n        stem_chs=64,\n    ),\n    repvgg_d2se=ByoModelCfg(\n        blocks=_rep_vgg_bcfg(d=(8, 14, 24, 1), wf=(2.5, 2.5, 2.5, 5.)),\n        stem_type='rep',\n        stem_chs=64,\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.0625, rd_divisor=1),\n    ),\n\n    # 4 x conv stem w/ 2 act, no maxpool, 2,4,6,4 repeats, group size 32 in first 3 blocks\n    # DW convs in last block, 2048 pre-FC, silu act\n    resnet51q=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1536, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=1536, s=2, gs=1, br=1.0),\n        ),\n        stem_chs=128,\n        stem_type='quad2',\n        stem_pool=None,\n        num_features=2048,\n        act_layer='silu',\n    ),\n\n    # 4 x conv stem w/ 4 act, no maxpool, 1,4,6,4 repeats, edge block first, group size 32 in next 2 blocks\n    # DW convs in last block, 4 conv for each bottle block, 2048 pre-FC, silu act\n    resnet61q=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='edge', d=1, c=256, s=1, gs=0, br=1.0, block_kwargs=dict()),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1536, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=1536, s=2, gs=1, br=1.0),\n        ),\n        stem_chs=128,\n        stem_type='quad',\n        stem_pool=None,\n        num_features=2048,\n        act_layer='silu',\n        block_kwargs=dict(extra_conv=True),\n    ),\n\n    # A series of ResNeXt-26 models w/ one of none, GC, SE, ECA, BAT attn, group size 32, SiLU act,\n    # and a tiered stem w/ maxpool\n    resnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n    ),\n    gcresnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='gca',\n    ),\n    seresnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='se',\n    ),\n    eca_resnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='eca',\n    ),\n    bat_resnext26ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='bat',\n        attn_kwargs=dict(block_size=8)\n    ),\n\n    # ResNet-32 (2, 3, 3, 2) models w/ no attn, no groups, SiLU act, no pre-fc feat layer, tiered stem w/o maxpool\n    resnet32ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=1536, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1536, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        num_features=0,\n        act_layer='silu',\n    ),\n\n    # ResNet-33 (2, 3, 3, 2) models w/ no attn, no groups, SiLU act, 1280 pre-FC feat, tiered stem w/o maxpool\n    resnet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=1536, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1536, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        num_features=1280,\n        act_layer='silu',\n    ),\n\n    # A series of ResNet-33 (2, 3, 3, 2) models w/ one of GC, SE, ECA attn, no groups, SiLU act, 1280 pre-FC feat\n    # and a tiered stem w/ no maxpool\n    gcresnet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=1536, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1536, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        num_features=1280,\n        act_layer='silu',\n        attn_layer='gca',\n    ),\n    seresnet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=1536, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1536, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        num_features=1280,\n        act_layer='silu',\n        attn_layer='se',\n    ),\n    eca_resnet33ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=1536, s=2, gs=0, br=0.25),\n            ByoBlockCfg(type='bottle', d=2, c=1536, s=2, gs=0, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        num_features=1280,\n        act_layer='silu',\n        attn_layer='eca',\n    ),\n\n    gcresnet50t=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=2048, s=2, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        attn_layer='gca',\n    ),\n\n    gcresnext50ts=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1024, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=2048, s=2, gs=32, br=0.25),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='maxpool',\n        act_layer='silu',\n        attn_layer='gca',\n    ),\n\n    # experimental models, closer to a RegNetZ than a ResNet. Similar to EfficientNets but w/ groups instead of DW\n    regnetz_b16=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=12, c=192, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=2, c=288, s=2, gs=16, br=3),\n        ),\n        stem_chs=32,\n        stem_pool='',\n        downsample='',\n        num_features=1536,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_c16=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=12, c=192, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=2, c=288, s=2, gs=16, br=4),\n        ),\n        stem_chs=32,\n        stem_pool='',\n        downsample='',\n        num_features=1536,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_d32=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=64, s=1, gs=32, br=4),\n            ByoBlockCfg(type='bottle', d=6, c=128, s=2, gs=32, br=4),\n            ByoBlockCfg(type='bottle', d=12, c=256, s=2, gs=32, br=4),\n            ByoBlockCfg(type='bottle', d=3, c=384, s=2, gs=32, br=4),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        downsample='',\n        num_features=1792,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_d8=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=64, s=1, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=6, c=128, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=12, c=256, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=3, c=384, s=2, gs=8, br=4),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        downsample='',\n        num_features=1792,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_e8=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=96, s=1, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=8, c=192, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=16, c=384, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=8, br=4),\n        ),\n        stem_chs=64,\n        stem_type='tiered',\n        stem_pool='',\n        downsample='',\n        num_features=2048,\n        act_layer='silu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n\n    # experimental EvoNorm configs\n    regnetz_b16_evos=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=12, c=192, s=2, gs=16, br=3),\n            ByoBlockCfg(type='bottle', d=2, c=288, s=2, gs=16, br=3),\n        ),\n        stem_chs=32,\n        stem_pool='',\n        downsample='',\n        num_features=1536,\n        act_layer='silu',\n        norm_layer=partial(EvoNorm2dS0a, group_size=16),\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_c16_evos=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=12, c=192, s=2, gs=16, br=4),\n            ByoBlockCfg(type='bottle', d=2, c=288, s=2, gs=16, br=4),\n        ),\n        stem_chs=32,\n        stem_pool='',\n        downsample='',\n        num_features=1536,\n        act_layer='silu',\n        norm_layer=partial(EvoNorm2dS0a, group_size=16),\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n    regnetz_d8_evos=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=64, s=1, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=6, c=128, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=12, c=256, s=2, gs=8, br=4),\n            ByoBlockCfg(type='bottle', d=3, c=384, s=2, gs=8, br=4),\n        ),\n        stem_chs=64,\n        stem_type='deep',\n        stem_pool='',\n        downsample='',\n        num_features=1792,\n        act_layer='silu',\n        norm_layer=partial(EvoNorm2dS0a, group_size=16),\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n        block_kwargs=dict(bottle_in=True, linear_out=True),\n    ),\n\n    mobileone_s0=ByoModelCfg(\n        blocks=_mobileone_bcfg(wf=(0.75, 1.0, 1.0, 2.), num_conv_branches=4),\n        stem_type='one',\n        stem_chs=48,\n    ),\n    mobileone_s1=ByoModelCfg(\n        blocks=_mobileone_bcfg(wf=(1.5, 1.5, 2.0, 2.5)),\n        stem_type='one',\n        stem_chs=64,\n    ),\n    mobileone_s2=ByoModelCfg(\n        blocks=_mobileone_bcfg(wf=(1.5, 2.0, 2.5, 4.0)),\n        stem_type='one',\n        stem_chs=64,\n    ),\n    mobileone_s3=ByoModelCfg(\n        blocks=_mobileone_bcfg(wf=(2.0, 2.5, 3.0, 4.0)),\n        stem_type='one',\n        stem_chs=64,\n    ),\n    mobileone_s4=ByoModelCfg(\n        blocks=_mobileone_bcfg(wf=(3.0, 3.5, 3.5, 4.0), se_blocks=(0, 0, 5, 1)),\n        stem_type='one',\n        stem_chs=64,\n    ),\n\n    resnet50_clip=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=2048, s=2, br=0.25),\n        ),\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_type='attn_abs',\n    ),\n    resnet101_clip=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=23, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=2048, s=2, br=0.25),\n        ),\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_type='attn_abs',\n    ),\n    resnet50x4_clip=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=4, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=10, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=2048, s=2, br=0.25),\n        ),\n        width_factor=1.25,\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_type='attn_abs',\n    ),\n    resnet50x16_clip=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=6, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=8, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=18, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=8, c=2048, s=2, br=0.25),\n        ),\n        width_factor=1.5,\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_type='attn_abs',\n    ),\n    resnet50x64_clip=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=15, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=36, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=10, c=2048, s=2, br=0.25),\n        ),\n        width_factor=2.0,\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_type='attn_abs',\n    ),\n\n    resnet50_mlp=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='bottle', d=3, c=256, s=1, br=0.25),\n            ByoBlockCfg(type='bottle', d=4, c=512, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=6, c=1024, s=2, br=0.25),\n            ByoBlockCfg(type='bottle', d=3, c=2048, s=2, br=0.25),\n        ),\n        stem_chs=(32, 32, 64),\n        stem_type='',\n        stem_pool='avg2',\n        downsample='avg',\n        aa_layer='avg',\n        head_hidden_size=1024,\n        head_type='mlp',\n    ),\n\n    test_byobnet=ByoModelCfg(\n        blocks=(\n            ByoBlockCfg(type='edge', d=1, c=32, s=2, gs=0, br=0.5),\n            ByoBlockCfg(type='dark', d=1, c=64, s=2, gs=0, br=0.5),\n            ByoBlockCfg(type='basic', d=1, c=128, s=2, gs=32, br=0.25),\n            ByoBlockCfg(type='bottle', d=1, c=256, s=2, gs=64, br=0.25),\n        ),\n        stem_chs=24,\n        downsample='avg',\n        stem_pool='',\n        act_layer='relu',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.25),\n    ),\n)\nfor k in ('resnet50_clip', 'resnet101_clip', 'resnet50x4_clip', 'resnet50x16_clip', 'resnet50x64_clip'):\n    model_cfgs[k + '_gap'] = replace(model_cfgs[k], head_type='classifier')\n\n\ndef _convert_openai_clip(\n        state_dict: Dict[str, torch.Tensor],\n        model: ByobNet,\n        prefix: str = 'visual.',\n) -> Dict[str, torch.Tensor]:\n    model_has_attn_pool = isinstance(model.head, (RotAttentionPool2d, AttentionPool2d))\n    import re\n\n    def _stage_sub(m):\n        stage_idx = int(m.group(1)) - 1\n        layer_idx, layer_type, layer_id = int(m.group(2)), m.group(3), int(m.group(4))\n        prefix_str = f'stages.{stage_idx}.{layer_idx}.'\n        id_map = {1: 'conv1_1x1.', 2: 'conv2_kxk.', 3: 'conv3_1x1.'}\n        suffix_str = id_map[layer_id] + layer_type\n        return prefix_str + suffix_str\n\n    def _down_sub(m):\n        stage_idx = int(m.group(1)) - 1\n        layer_idx, layer_id = int(m.group(2)), int(m.group(3))\n        return f'stages.{stage_idx}.{layer_idx}.shortcut.' + ('conv.conv' if layer_id == 0 else 'conv.bn')\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        if not k.startswith(prefix):\n            continue\n        k = re.sub(rf'{prefix}conv([0-9])', r'stem.conv\\1.conv', k)\n        k = re.sub(rf'{prefix}bn([0-9])', r'stem.conv\\1.bn', k)\n        k = re.sub(rf'{prefix}layer([0-9])\\.([0-9]+)\\.([a-z]+)([0-9])', _stage_sub, k)\n        k = re.sub(rf'{prefix}layer([0-9])\\.([0-9]+)\\.downsample\\.([0-9])', _down_sub, k)\n        if k.startswith(f'{prefix}attnpool'):\n            if not model_has_attn_pool:\n                continue\n            k = k.replace(prefix + 'attnpool', 'head')  # 'attn_pool')\n            k = k.replace('positional_embedding', 'pos_embed')\n            k = k.replace('q_proj', 'q')\n            k = k.replace('k_proj', 'k')\n            k = k.replace('v_proj', 'v')\n            k = k.replace('c_proj', 'proj')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef checkpoint_filter_fn(\n        state_dict: Dict[str, torch.Tensor],\n        model: ByobNet\n):\n    if 'visual.conv1.weight' in state_dict:\n        state_dict = _convert_openai_clip(state_dict, model)\n    return state_dict\n\n\ndef _create_byobnet(variant: str, pretrained: bool = False, **kwargs) -> ByobNet:\n    \"\"\"Create a ByobNet model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        ByobNet model instance.\n    \"\"\"\n    return build_model_with_cfg(\n        ByobNet, variant, pretrained,\n        model_cfg=model_cfgs[variant],\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndef _cfgr(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create RepVGG configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (8, 8),\n        'crop_pct': 0.9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # GPU-Efficient (ResNet) weights\n    'gernet_s.idstcv_in1k': _cfg(hf_hub_id='timm/'),\n    'gernet_m.idstcv_in1k': _cfg(hf_hub_id='timm/'),\n    'gernet_l.idstcv_in1k': _cfg(hf_hub_id='timm/', input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    # RepVGG weights\n    'repvgg_a0.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_a1.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_a2.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b0.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b1.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b1g4.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b2.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b2g4.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b3.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_b3g4.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit'),\n    'repvgg_d2se.rvgg_in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('stem.conv_kxk.conv', 'stem.conv_1x1.conv'), license='mit',\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0,\n    ),\n\n    # experimental ResNet configs\n    'resnet51q.ra2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet51q_ra2-d47dcc76.pth',\n        first_conv='stem.conv1', input_size=(3, 256, 256), pool_size=(8, 8),\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnet61q.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet61q_ra2-6afc536c.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    # ResNeXt-26 models with different attention in Bottleneck blocks\n    'resnext26ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/resnext26ts_256_ra2-8bbd9106.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'seresnext26ts.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/seresnext26ts_256-6f0d74a3.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'gcresnext26ts.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/gcresnext26ts_256-e414378b.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'eca_resnext26ts.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_resnext26ts_256-5a1d030f.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'bat_resnext26ts.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/bat_resnext26ts_256-fa6fd595.pth',\n        min_input_size=(3, 256, 256)),\n\n    # ResNet-32 / 33 models with different attention in Bottleneck blocks\n    'resnet32ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/resnet32ts_256-aacf5250.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnet33ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/resnet33ts_256-e91b09a4.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'gcresnet33ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/gcresnet33ts_256-0e0cd345.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'seresnet33ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/seresnet33ts_256-f8ad44d9.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'eca_resnet33ts.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_resnet33ts_256-8f98face.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'gcresnet50t.ra2_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/gcresnet50t_256-96374d1c.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'gcresnext50ts.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/gcresnext50ts_256-3e0f515e.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    # custom `timm` specific RegNetZ inspired models w/ different sizing from paper\n    'regnetz_b16.ra3_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/regnetz_b_raa-677d9606.pth',\n        first_conv='stem.conv', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 224, 224), pool_size=(7, 7), crop_pct=0.94, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'regnetz_c16.ra3_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/regnetz_c_rab2_256-a54bf36a.pth',\n        first_conv='stem.conv', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.94, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'regnetz_d32.ra3_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/regnetz_d_rab_256-b8073a89.pth',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.95, test_input_size=(3, 320, 320)),\n    'regnetz_d8.ra3_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/regnetz_d8_bh-afc03c55.pth',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.94, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'regnetz_e8.ra3_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/regnetz_e8_bh-aace8e6e.pth',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.94, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n\n    'regnetz_b16_evos.untrained': _cfgr(\n        first_conv='stem.conv', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 224, 224), pool_size=(7, 7), crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'regnetz_c16_evos.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetz_c16_evos_ch-d8311942.pth',\n        first_conv='stem.conv', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.95, test_input_size=(3, 320, 320)),\n    'regnetz_d8_evos.ch_in1k': _cfgr(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetz_d8_evos_ch-2bc12646.pth',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n\n    'mobileone_s0.apple_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.875,\n        first_conv=('stem.conv_kxk.0.conv', 'stem.conv_scale.conv'),\n        license='mobileone-license',\n    ),\n    'mobileone_s1.apple_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9,\n        first_conv=('stem.conv_kxk.0.conv', 'stem.conv_scale.conv'),\n        license='mobileone-license',\n    ),\n    'mobileone_s2.apple_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9,\n        first_conv=('stem.conv_kxk.0.conv', 'stem.conv_scale.conv'),\n        license='mobileone-license',\n    ),\n    'mobileone_s3.apple_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9,\n        first_conv=('stem.conv_kxk.0.conv', 'stem.conv_scale.conv'),\n        license='mobileone-license',\n    ),\n    'mobileone_s4.apple_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9,\n        first_conv=('stem.conv_kxk.0.conv', 'stem.conv_scale.conv'),\n        license='mobileone-license',\n    ),\n\n    # original attention pool head variants\n    'resnet50_clip.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=1024, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 224, 224), pool_size=(7, 7),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet101_clip.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=512, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 224, 224), pool_size=(7, 7),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet50x4_clip.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=640, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 288, 288), pool_size=(9, 9),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet50x16_clip.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=768, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 384, 384), pool_size=(12, 12),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet50x64_clip.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=1024, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 448, 448), pool_size=(14, 14),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet50_clip.cc12m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=1024, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 224, 224), pool_size=(7, 7),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet50_clip.yfcc15m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=1024, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 224, 224), pool_size=(7, 7),\n        classifier='head.proj',\n        license='mit',\n    ),\n    'resnet101_clip.yfcc15m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=512, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        fixed_input_size=True, input_size=(3, 224, 224), pool_size=(7, 7),\n        classifier='head.proj',\n        license='mit',\n    ),\n\n    # avg-pool w/ optional standard classifier head variants\n    'resnet50_clip_gap.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 224, 224), pool_size=(7, 7),\n        license='mit',\n    ),\n    'resnet101_clip_gap.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 224, 224), pool_size=(7, 7),\n        license='mit',\n    ),\n    'resnet50x4_clip_gap.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 288, 288), pool_size=(9, 9),\n        license='mit',\n    ),\n    'resnet50x16_clip_gap.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        license='mit',\n    ),\n    'resnet50x64_clip_gap.openai': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 448, 448), pool_size=(14, 14),\n        license='mit',\n    ),\n    'resnet50_clip_gap.cc12m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 224, 224), pool_size=(7, 7),\n        license='mit',\n    ),\n    'resnet50_clip_gap.yfcc15m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 224, 224), pool_size=(7, 7),\n        license='mit',\n    ),\n    'resnet101_clip_gap.yfcc15m': _cfgr(\n        hf_hub_id='timm/',\n        num_classes=0, mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 224, 224), pool_size=(7, 7),\n        license='mit',\n    ),\n\n    'resnet50_mlp.untrained': _cfgr(\n        input_size=(3, 256, 256), pool_size=(8, 8),\n    ),\n\n    'test_byobnet.r160_in1k': _cfgr(\n        hf_hub_id='timm/',\n        first_conv='stem.conv',\n        input_size=(3, 160, 160), crop_pct=0.95, pool_size=(5, 5),\n    ),\n})\n\n\n@register_model\ndef gernet_l(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" GEResNet-Large (GENet-Large from official impl)\n    `Neural Architecture Design for GPU-Efficient Networks` - https://arxiv.org/abs/2006.14090\n    \"\"\"\n    return _create_byobnet('gernet_l', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gernet_m(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" GEResNet-Medium (GENet-Normal from official impl)\n    `Neural Architecture Design for GPU-Efficient Networks` - https://arxiv.org/abs/2006.14090\n    \"\"\"\n    return _create_byobnet('gernet_m', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gernet_s(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" EResNet-Small (GENet-Small from official impl)\n    `Neural Architecture Design for GPU-Efficient Networks` - https://arxiv.org/abs/2006.14090\n    \"\"\"\n    return _create_byobnet('gernet_s', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_a0(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-A0\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_a0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_a1(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-A1\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_a1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_a2(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-A2\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_a2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b0(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B0\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b1(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B1\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b1g4(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B1g4\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b1g4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b2(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B2\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b2g4(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B2g4\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b2g4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b3(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B3\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_b3g4(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-B3g4\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_b3g4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef repvgg_d2se(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" RepVGG-D2se\n    `Making VGG-style ConvNets Great Again` - https://arxiv.org/abs/2101.03697\n    \"\"\"\n    return _create_byobnet('repvgg_d2se', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet51q(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnet51q', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet61q(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnet61q', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnext26ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gcresnext26ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('gcresnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef seresnext26ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('seresnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_resnext26ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('eca_resnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef bat_resnext26ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('bat_resnext26ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet32ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnet32ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet33ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gcresnet33ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('gcresnet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef seresnet33ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('seresnet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_resnet33ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('eca_resnet33ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gcresnet50t(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('gcresnet50t', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef gcresnext50ts(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('gcresnext50ts', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_b16(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_b16', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_c16(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_c16', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_d32(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_d32', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_d8(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_d8', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_e8(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_e8', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_b16_evos(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_b16_evos', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_c16_evos(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_c16_evos', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_d8_evos(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('regnetz_d8_evos', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobileone_s0(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('mobileone_s0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobileone_s1(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('mobileone_s1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobileone_s2(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('mobileone_s2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobileone_s3(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('mobileone_s3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobileone_s4(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('mobileone_s4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50_clip(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50 CLIP image tower\n    \"\"\"\n    return _create_byobnet('resnet50_clip', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet101_clip(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-101 CLIP image tower\n    \"\"\"\n    return _create_byobnet('resnet101_clip', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x4_clip(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x4 CLIP image tower\n    \"\"\"\n    return _create_byobnet('resnet50x4_clip', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x16_clip(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x16 CLIP image tower\n    \"\"\"\n    return _create_byobnet('resnet50x16_clip', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x64_clip(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x64 CLIP image tower\n    \"\"\"\n    return _create_byobnet('resnet50x64_clip', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50_clip_gap(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50 CLIP image tower w/ avg pool (no attention pool)\n    \"\"\"\n    return _create_byobnet('resnet50_clip_gap', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet101_clip_gap(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-101 CLIP image tower w/ avg pool (no attention pool)\n    \"\"\"\n    return _create_byobnet('resnet101_clip_gap', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x4_clip_gap(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x4 CLIP image tower w/ avg pool (no attention pool)\n    \"\"\"\n    return _create_byobnet('resnet50x4_clip_gap', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x16_clip_gap(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x16 CLIP image tower w/ avg pool (no attention pool)\n    \"\"\"\n    return _create_byobnet('resnet50x16_clip_gap', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50x64_clip_gap(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" OpenAI Modified ResNet-50x64 CLIP image tower w/ avg pool (no attention pool)\n    \"\"\"\n    return _create_byobnet('resnet50x64_clip_gap', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef resnet50_mlp(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\"\n    \"\"\"\n    return _create_byobnet('resnet50_mlp', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef test_byobnet(pretrained=False, **kwargs) -> ByobNet:\n    \"\"\" Minimal test ResNet (BYOB based) model.\n    \"\"\"\n    return _create_byobnet('test_byobnet', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/cait.py",
    "content": "\"\"\" Class-Attention in Image Transformers (CaiT)\n\nPaper: 'Going deeper with Image Transformers' - https://arxiv.org/abs/2103.17239\n\nOriginal code and weights from https://github.com/facebookresearch/deit, copyright below\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n# Copyright (c) 2015-present, Facebook, Inc.\n# All rights reserved.\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import PatchEmbed, Mlp, DropPath, trunc_normal_, use_fused_attn\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['Cait', 'ClassAttn', 'LayerScaleBlockClassAttn', 'LayerScaleBlock', 'TalkingHeadAttn']\n\n\nclass ClassAttn(nn.Module):\n    # taken from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    # with slight modifications to do CA\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.q = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.k = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.v = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        q = self.q(x[:, 0]).unsqueeze(1).reshape(B, 1, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n        k = self.k(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n        v = self.v(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n\n        if self.fused_attn:\n            x_cls = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x_cls = attn @ v\n\n        x_cls = x_cls.transpose(1, 2).reshape(B, 1, C)\n        x_cls = self.proj(x_cls)\n        x_cls = self.proj_drop(x_cls)\n\n        return x_cls\n\n\nclass LayerScaleBlockClassAttn(nn.Module):\n    # taken from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    # with slight modifications to add CA and LayerScale\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            attn_block: Type[nn.Module] = ClassAttn,\n            mlp_block: Type[nn.Module] = Mlp,\n            init_values: float = 1e-4,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = attn_block(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        mlp_hidden_dim = int(dim * mlp_ratio)\n        self.mlp = mlp_block(\n            in_features=dim,\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.gamma_1 = nn.Parameter(init_values * torch.ones(dim, **dd))\n        self.gamma_2 = nn.Parameter(init_values * torch.ones(dim, **dd))\n\n    def forward(self, x, x_cls):\n        u = torch.cat((x_cls, x), dim=1)\n        x_cls = x_cls + self.drop_path(self.gamma_1 * self.attn(self.norm1(u)))\n        x_cls = x_cls + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x_cls)))\n        return x_cls\n\n\nclass TalkingHeadAttn(nn.Module):\n    # taken from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    # with slight modifications to add Talking Heads Attention (https://arxiv.org/pdf/2003.02436v1.pdf)\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.num_heads = num_heads\n\n        head_dim = dim // num_heads\n\n        self.scale = head_dim ** -0.5\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n\n        self.proj = nn.Linear(dim, dim, **dd)\n\n        self.proj_l = nn.Linear(num_heads, num_heads, **dd)\n        self.proj_w = nn.Linear(num_heads, num_heads, **dd)\n\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv[0] * self.scale, qkv[1], qkv[2]\n\n        attn = q @ k.transpose(-2, -1)\n\n        attn = self.proj_l(attn.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n\n        attn = attn.softmax(dim=-1)\n\n        attn = self.proj_w(attn.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass LayerScaleBlock(nn.Module):\n    # taken from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    # with slight modifications to add layerScale\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            attn_block: Type[nn.Module] = TalkingHeadAttn,\n            mlp_block: Type[nn.Module] = Mlp,\n            init_values: float = 1e-4,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = attn_block(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        mlp_hidden_dim = int(dim * mlp_ratio)\n        self.mlp = mlp_block(\n            in_features=dim,\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.gamma_1 = nn.Parameter(init_values * torch.ones(dim, **dd))\n        self.gamma_2 = nn.Parameter(init_values * torch.ones(dim, **dd))\n\n    def forward(self, x):\n        x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x)))\n        x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x)))\n        return x\n\n\nclass Cait(nn.Module):\n    # taken from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    # with slight modifications to adapt to our cait models\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'token',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            block_layers: Type[nn.Module] = LayerScaleBlock,\n            block_layers_token: Type[nn.Module] = LayerScaleBlockClassAttn,\n            patch_layer: Type[nn.Module] = PatchEmbed,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            attn_block: Type[nn.Module] = TalkingHeadAttn,\n            mlp_block: Type[nn.Module] = Mlp,\n            init_values: float = 1e-4,\n            attn_block_token_only: Type[nn.Module] = ClassAttn,\n            mlp_block_token_only: Type[nn.Module] = Mlp,\n            depth_token_only: int = 2,\n            mlp_ratio_token_only: float = 4.0,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'token', 'avg')\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim\n        self.grad_checkpointing = False\n\n        self.patch_embed = patch_layer(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            **dd,\n        )\n        num_patches = self.patch_embed.num_patches\n        r = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim, **dd))\n        self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, embed_dim, **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        dpr = [drop_path_rate for i in range(depth)]\n        self.blocks = nn.Sequential(*[block_layers(\n            dim=embed_dim,\n            num_heads=num_heads,\n            mlp_ratio=mlp_ratio,\n            qkv_bias=qkv_bias,\n            proj_drop=proj_drop_rate,\n            attn_drop=attn_drop_rate,\n            drop_path=dpr[i],\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            attn_block=attn_block,\n            mlp_block=mlp_block,\n            init_values=init_values,\n            **dd,\n        ) for i in range(depth)])\n        self.feature_info = [dict(num_chs=embed_dim, reduction=r, module=f'blocks.{i}') for i in range(depth)]\n\n        self.blocks_token_only = nn.ModuleList([block_layers_token(\n            dim=embed_dim,\n            num_heads=num_heads,\n            mlp_ratio=mlp_ratio_token_only,\n            qkv_bias=qkv_bias,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            attn_block=attn_block_token_only,\n            mlp_block=mlp_block_token_only,\n            init_values=init_values,\n            **dd,\n        ) for _ in range(depth_token_only)])\n\n        self.norm = norm_layer(embed_dim, **dd)\n\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        trunc_normal_(self.pos_embed, std=.02)\n        trunc_normal_(self.cls_token, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'pos_embed', 'cls_token'}\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        def _matcher(name):\n            if any([name.startswith(n) for n in ('cls_token', 'pos_embed', 'patch_embed')]):\n                return 0\n            elif name.startswith('blocks.'):\n                return int(name.split('.')[1]) + 1\n            elif name.startswith('blocks_token_only.'):\n                # overlap token only blocks with last blocks\n                to_offset = len(self.blocks) - len(self.blocks_token_only) + 1\n                return int(name.split('.')[1]) + to_offset\n            elif name.startswith('norm.'):\n                return len(self.blocks)\n            else:\n                return float('inf')\n        return _matcher\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'token', 'avg')\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x)\n        x = x + self.pos_embed\n        x = self.pos_drop(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.patch_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n\n        if intermediates_only:\n            return intermediates\n\n        # NOTE not supporting return of class tokens\n        cls_tokens = self.cls_token.expand(x.shape[0], -1, -1)\n        for i, blk in enumerate(self.blocks_token_only):\n            cls_tokens = blk(x, cls_tokens)\n        x = torch.cat((cls_tokens, x), dim=1)\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.blocks_token_only = nn.ModuleList()  # prune token blocks with head\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        x = x + self.pos_embed\n        x = self.pos_drop(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        cls_tokens = self.cls_token.expand(x.shape[0], -1, -1)\n        for i, blk in enumerate(self.blocks_token_only):\n            cls_tokens = blk(x, cls_tokens)\n        x = torch.cat((cls_tokens, x), dim=1)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x[:, 1:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model=None):\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n    checkpoint_no_module = {}\n    for k, v in state_dict.items():\n        checkpoint_no_module[k.replace('module.', '')] = v\n    return checkpoint_no_module\n\n\ndef _create_cait(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        Cait,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 384, 384), 'pool_size': None,\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'cait_xxs24_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/XXS24_224.pth',\n        input_size=(3, 224, 224),\n    ),\n    'cait_xxs24_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/XXS24_384.pth',\n    ),\n    'cait_xxs36_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/XXS36_224.pth',\n        input_size=(3, 224, 224),\n    ),\n    'cait_xxs36_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/XXS36_384.pth',\n    ),\n    'cait_xs24_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/XS24_384.pth',\n    ),\n    'cait_s24_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/S24_224.pth',\n        input_size=(3, 224, 224),\n    ),\n    'cait_s24_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/S24_384.pth',\n    ),\n    'cait_s36_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/S36_384.pth',\n    ),\n    'cait_m36_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/M36_384.pth',\n    ),\n    'cait_m48_448.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/M48_448.pth',\n        input_size=(3, 448, 448),\n    ),\n})\n\n\n@register_model\ndef cait_xxs24_224(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=192, depth=24, num_heads=4, init_values=1e-5)\n    model = _create_cait('cait_xxs24_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_xxs24_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=192, depth=24, num_heads=4, init_values=1e-5)\n    model = _create_cait('cait_xxs24_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_xxs36_224(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=192, depth=36, num_heads=4, init_values=1e-5)\n    model = _create_cait('cait_xxs36_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_xxs36_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=192, depth=36, num_heads=4, init_values=1e-5)\n    model = _create_cait('cait_xxs36_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_xs24_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=288, depth=24, num_heads=6, init_values=1e-5)\n    model = _create_cait('cait_xs24_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_s24_224(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=384, depth=24, num_heads=8, init_values=1e-5)\n    model = _create_cait('cait_s24_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_s24_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=384, depth=24, num_heads=8, init_values=1e-5)\n    model = _create_cait('cait_s24_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_s36_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=384, depth=36, num_heads=8, init_values=1e-6)\n    model = _create_cait('cait_s36_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_m36_384(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=768, depth=36, num_heads=16, init_values=1e-6)\n    model = _create_cait('cait_m36_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef cait_m48_448(pretrained=False, **kwargs) -> Cait:\n    model_args = dict(patch_size=16, embed_dim=768, depth=48, num_heads=16, init_values=1e-6)\n    model = _create_cait('cait_m48_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/coat.py",
    "content": "\"\"\"\nCoaT architecture.\n\nPaper: Co-Scale Conv-Attentional Image Transformers - https://arxiv.org/abs/2104.06399\n\nOfficial CoaT code at: https://github.com/mlpc-ucsd/CoaT\n\nModified from timm/models/vision_transformer.py\n\"\"\"\nfrom typing import List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import PatchEmbed, Mlp, DropPath, to_2tuple, trunc_normal_, _assert, LayerNorm\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['CoaT']\n\n\nclass ConvRelPosEnc(nn.Module):\n    \"\"\" Convolutional relative position encoding. \"\"\"\n    def __init__(\n            self,\n            head_chs: int,\n            num_heads: int,\n            window: Union[int, dict],\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Initialization.\n            Ch: Channels per head.\n            h: Number of heads.\n            window: Window size(s) in convolutional relative positional encoding. It can have two forms:\n                1. An integer of window size, which assigns all attention heads with the same window s\n                    size in ConvRelPosEnc.\n                2. A dict mapping window size to #attention head splits (\n                    e.g. {window size 1: #attention head split 1, window size 2: #attention head split 2})\n                    It will apply different window size to the attention head splits.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        if isinstance(window, int):\n            # Set the same window size for all attention heads.\n            window = {window: num_heads}\n            self.window = window\n        elif isinstance(window, dict):\n            self.window = window\n        else:\n            raise ValueError()\n\n        self.conv_list = nn.ModuleList()\n        self.head_splits = []\n        for cur_window, cur_head_split in window.items():\n            dilation = 1\n            # Determine padding size.\n            # Ref: https://discuss.pytorch.org/t/how-to-keep-the-shape-of-input-and-output-same-when-dilation-conv/14338\n            padding_size = (cur_window + (cur_window - 1) * (dilation - 1)) // 2\n            cur_conv = nn.Conv2d(\n                cur_head_split * head_chs,\n                cur_head_split * head_chs,\n                kernel_size=(cur_window, cur_window),\n                padding=(padding_size, padding_size),\n                dilation=(dilation, dilation),\n                groups=cur_head_split * head_chs,\n                **dd,\n            )\n            self.conv_list.append(cur_conv)\n            self.head_splits.append(cur_head_split)\n        self.channel_splits = [x * head_chs for x in self.head_splits]\n\n    def forward(self, q, v, size: Tuple[int, int]):\n        B, num_heads, N, C = q.shape\n        H, W = size\n        _assert(N == 1 + H * W, '')\n\n        # Convolutional relative position encoding.\n        q_img = q[:, :, 1:, :]  # [B, h, H*W, Ch]\n        v_img = v[:, :, 1:, :]  # [B, h, H*W, Ch]\n\n        v_img = v_img.transpose(-1, -2).reshape(B, num_heads * C, H, W)\n        v_img_list = torch.split(v_img, self.channel_splits, dim=1)  # Split according to channels\n        conv_v_img_list = []\n        for i, conv in enumerate(self.conv_list):\n            conv_v_img_list.append(conv(v_img_list[i]))\n        conv_v_img = torch.cat(conv_v_img_list, dim=1)\n        conv_v_img = conv_v_img.reshape(B, num_heads, C, H * W).transpose(-1, -2)\n\n        EV_hat = q_img * conv_v_img\n        EV_hat = F.pad(EV_hat, (0, 0, 1, 0, 0, 0))  # [B, h, N, Ch].\n        return EV_hat\n\n\nclass FactorAttnConvRelPosEnc(nn.Module):\n    \"\"\" Factorized attention with convolutional relative position encoding class. \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            shared_crpe: Optional[Any] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)  # Note: attn_drop is actually not used.\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        # Shared convolutional relative position encoding.\n        self.crpe = shared_crpe\n\n    def forward(self, x, size: Tuple[int, int]):\n        B, N, C = x.shape\n\n        # Generate Q, K, V.\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)  # [B, h, N, Ch]\n\n        # Factorized attention.\n        k_softmax = k.softmax(dim=2)\n        factor_att = k_softmax.transpose(-1, -2) @ v\n        factor_att = q @ factor_att\n\n        # Convolutional relative position encoding.\n        crpe = self.crpe(q, v, size=size)  # [B, h, N, Ch]\n\n        # Merge and reshape.\n        x = self.scale * factor_att + crpe\n        x = x.transpose(1, 2).reshape(B, N, C)  # [B, h, N, Ch] -> [B, N, h, Ch] -> [B, N, C]\n\n        # Output projection.\n        x = self.proj(x)\n        x = self.proj_drop(x)\n\n        return x\n\n\nclass ConvPosEnc(nn.Module):\n    \"\"\" Convolutional Position Encoding.\n        Note: This module is similar to the conditional position encoding in CPVT.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            k: int = 3,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.proj = nn.Conv2d(dim, dim, k, 1, k//2, groups=dim, **dd)\n\n    def forward(self, x, size: Tuple[int, int]):\n        B, N, C = x.shape\n        H, W = size\n        _assert(N == 1 + H * W, '')\n\n        # Extract CLS token and image tokens.\n        cls_token, img_tokens = x[:, :1], x[:, 1:]  # [B, 1, C], [B, H*W, C]\n\n        # Depthwise convolution.\n        feat = img_tokens.transpose(1, 2).view(B, C, H, W)\n        x = self.proj(feat) + feat\n        x = x.flatten(2).transpose(1, 2)\n\n        # Combine with CLS token.\n        x = torch.cat((cls_token, x), dim=1)\n\n        return x\n\n\nclass SerialBlock(nn.Module):\n    \"\"\" Serial block class.\n        Note: In this implementation, each serial block only contains a conv-attention and a FFN (MLP) module. \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            shared_cpe: Optional[Any] = None,\n            shared_crpe: Optional[Any] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        # Conv-Attention.\n        self.cpe = shared_cpe\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.factoratt_crpe = FactorAttnConvRelPosEnc(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            shared_crpe=shared_crpe,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        # MLP.\n        self.norm2 = norm_layer(dim, **dd)\n        mlp_hidden_dim = int(dim * mlp_ratio)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n\n    def forward(self, x, size: Tuple[int, int]):\n        # Conv-Attention.\n        x = self.cpe(x, size)\n        cur = self.norm1(x)\n        cur = self.factoratt_crpe(cur, size)\n        x = x + self.drop_path(cur)\n\n        # MLP.\n        cur = self.norm2(x)\n        cur = self.mlp(cur)\n        x = x + self.drop_path(cur)\n\n        return x\n\n\nclass ParallelBlock(nn.Module):\n    \"\"\" Parallel block class. \"\"\"\n    def __init__(\n            self,\n            dims: List[int],\n            num_heads: int,\n            mlp_ratios: List[float] = None,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            shared_crpes: Optional[List[Any]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if mlp_ratios is None:\n            mlp_ratios = []\n\n        # Conv-Attention.\n        self.norm12 = norm_layer(dims[1], **dd)\n        self.norm13 = norm_layer(dims[2], **dd)\n        self.norm14 = norm_layer(dims[3], **dd)\n        self.factoratt_crpe2 = FactorAttnConvRelPosEnc(\n            dims[1],\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            shared_crpe=shared_crpes[1],\n            **dd,\n        )\n        self.factoratt_crpe3 = FactorAttnConvRelPosEnc(\n            dims[2],\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            shared_crpe=shared_crpes[2],\n            **dd,\n        )\n        self.factoratt_crpe4 = FactorAttnConvRelPosEnc(\n            dims[3],\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            shared_crpe=shared_crpes[3],\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        # MLP.\n        self.norm22 = norm_layer(dims[1], **dd)\n        self.norm23 = norm_layer(dims[2], **dd)\n        self.norm24 = norm_layer(dims[3], **dd)\n        # In parallel block, we assume dimensions are the same and share the linear transformation.\n        assert dims[1] == dims[2] == dims[3]\n        assert mlp_ratios[1] == mlp_ratios[2] == mlp_ratios[3]\n        mlp_hidden_dim = int(dims[1] * mlp_ratios[1])\n        self.mlp2 = self.mlp3 = self.mlp4 = Mlp(\n            in_features=dims[1],\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n\n    def upsample(self, x, factor: float, size: Tuple[int, int]):\n        \"\"\" Feature map up-sampling. \"\"\"\n        return self.interpolate(x, scale_factor=factor, size=size)\n\n    def downsample(self, x, factor: float, size: Tuple[int, int]):\n        \"\"\" Feature map down-sampling. \"\"\"\n        return self.interpolate(x, scale_factor=1.0/factor, size=size)\n\n    def interpolate(self, x, scale_factor: float, size: Tuple[int, int]):\n        \"\"\" Feature map interpolation. \"\"\"\n        B, N, C = x.shape\n        H, W = size\n        _assert(N == 1 + H * W, '')\n\n        cls_token = x[:, :1, :]\n        img_tokens = x[:, 1:, :]\n\n        img_tokens = img_tokens.transpose(1, 2).reshape(B, C, H, W)\n        img_tokens = F.interpolate(\n            img_tokens,\n            scale_factor=scale_factor,\n            recompute_scale_factor=False,\n            mode='bilinear',\n            align_corners=False,\n        )\n        img_tokens = img_tokens.reshape(B, C, -1).transpose(1, 2)\n\n        out = torch.cat((cls_token, img_tokens), dim=1)\n\n        return out\n\n    def forward(self, x1, x2, x3, x4, sizes: List[Tuple[int, int]]):\n        _, S2, S3, S4 = sizes\n        cur2 = self.norm12(x2)\n        cur3 = self.norm13(x3)\n        cur4 = self.norm14(x4)\n        cur2 = self.factoratt_crpe2(cur2, size=S2)\n        cur3 = self.factoratt_crpe3(cur3, size=S3)\n        cur4 = self.factoratt_crpe4(cur4, size=S4)\n        upsample3_2 = self.upsample(cur3, factor=2., size=S3)\n        upsample4_3 = self.upsample(cur4, factor=2., size=S4)\n        upsample4_2 = self.upsample(cur4, factor=4., size=S4)\n        downsample2_3 = self.downsample(cur2, factor=2., size=S2)\n        downsample3_4 = self.downsample(cur3, factor=2., size=S3)\n        downsample2_4 = self.downsample(cur2, factor=4., size=S2)\n        cur2 = cur2 + upsample3_2 + upsample4_2\n        cur3 = cur3 + upsample4_3 + downsample2_3\n        cur4 = cur4 + downsample3_4 + downsample2_4\n        x2 = x2 + self.drop_path(cur2)\n        x3 = x3 + self.drop_path(cur3)\n        x4 = x4 + self.drop_path(cur4)\n\n        # MLP.\n        cur2 = self.norm22(x2)\n        cur3 = self.norm23(x3)\n        cur4 = self.norm24(x4)\n        cur2 = self.mlp2(cur2)\n        cur3 = self.mlp3(cur3)\n        cur4 = self.mlp4(cur4)\n        x2 = x2 + self.drop_path(cur2)\n        x3 = x3 + self.drop_path(cur3)\n        x4 = x4 + self.drop_path(cur4)\n\n        return x1, x2, x3, x4\n\n\nclass CoaT(nn.Module):\n    \"\"\" CoaT class. \"\"\"\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            embed_dims: Tuple[int, int, int, int] = (64, 128, 320, 512),\n            serial_depths: Tuple[int, int, int, int] = (3, 4, 6, 3),\n            parallel_depth: int = 0,\n            num_heads: int = 8,\n            mlp_ratios: Tuple[float, float, float, float] = (4, 4, 4, 4),\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            return_interm_layers: bool = False,\n            out_features: Optional[List[str]] = None,\n            crpe_window: Optional[dict] = None,\n            global_pool: str = 'token',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('token', 'avg')\n        crpe_window = crpe_window or {3: 2, 5: 3, 7: 3}\n        self.return_interm_layers = return_interm_layers\n        self.out_features = out_features\n        self.embed_dims = embed_dims\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n\n        # Patch embeddings.\n        img_size = to_2tuple(img_size)\n        self.patch_embed1 = PatchEmbed(\n            img_size=img_size, patch_size=patch_size, in_chans=in_chans,\n            embed_dim=embed_dims[0], norm_layer=nn.LayerNorm, **dd)\n        self.patch_embed2 = PatchEmbed(\n            img_size=[x // 4 for x in img_size], patch_size=2, in_chans=embed_dims[0],\n            embed_dim=embed_dims[1], norm_layer=nn.LayerNorm, **dd)\n        self.patch_embed3 = PatchEmbed(\n            img_size=[x // 8 for x in img_size], patch_size=2, in_chans=embed_dims[1],\n            embed_dim=embed_dims[2], norm_layer=nn.LayerNorm, **dd)\n        self.patch_embed4 = PatchEmbed(\n            img_size=[x // 16 for x in img_size], patch_size=2, in_chans=embed_dims[2],\n            embed_dim=embed_dims[3], norm_layer=nn.LayerNorm, **dd)\n\n        # Class tokens.\n        self.cls_token1 = nn.Parameter(torch.zeros(1, 1, embed_dims[0], **dd))\n        self.cls_token2 = nn.Parameter(torch.zeros(1, 1, embed_dims[1], **dd))\n        self.cls_token3 = nn.Parameter(torch.zeros(1, 1, embed_dims[2], **dd))\n        self.cls_token4 = nn.Parameter(torch.zeros(1, 1, embed_dims[3], **dd))\n\n        # Convolutional position encodings.\n        self.cpe1 = ConvPosEnc(dim=embed_dims[0], k=3, **dd)\n        self.cpe2 = ConvPosEnc(dim=embed_dims[1], k=3, **dd)\n        self.cpe3 = ConvPosEnc(dim=embed_dims[2], k=3, **dd)\n        self.cpe4 = ConvPosEnc(dim=embed_dims[3], k=3, **dd)\n\n        # Convolutional relative position encodings.\n        self.crpe1 = ConvRelPosEnc(head_chs=embed_dims[0] // num_heads, num_heads=num_heads, window=crpe_window, **dd)\n        self.crpe2 = ConvRelPosEnc(head_chs=embed_dims[1] // num_heads, num_heads=num_heads, window=crpe_window, **dd)\n        self.crpe3 = ConvRelPosEnc(head_chs=embed_dims[2] // num_heads, num_heads=num_heads, window=crpe_window, **dd)\n        self.crpe4 = ConvRelPosEnc(head_chs=embed_dims[3] // num_heads, num_heads=num_heads, window=crpe_window, **dd)\n\n        dpr = drop_path_rate\n        skwargs = dict(\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            proj_drop=proj_drop_rate,\n            attn_drop=attn_drop_rate,\n            drop_path=dpr,\n            norm_layer=norm_layer,\n        )\n\n        # Serial blocks 1.\n        self.serial_blocks1 = nn.ModuleList([\n            SerialBlock(\n                dim=embed_dims[0],\n                mlp_ratio=mlp_ratios[0],\n                shared_cpe=self.cpe1,\n                shared_crpe=self.crpe1,\n                **skwargs,\n                **dd,\n            )\n            for _ in range(serial_depths[0])]\n        )\n\n        # Serial blocks 2.\n        self.serial_blocks2 = nn.ModuleList([\n            SerialBlock(\n                dim=embed_dims[1],\n                mlp_ratio=mlp_ratios[1],\n                shared_cpe=self.cpe2,\n                shared_crpe=self.crpe2,\n                **skwargs,\n                **dd,\n            )\n            for _ in range(serial_depths[1])]\n        )\n\n        # Serial blocks 3.\n        self.serial_blocks3 = nn.ModuleList([\n            SerialBlock(\n                dim=embed_dims[2],\n                mlp_ratio=mlp_ratios[2],\n                shared_cpe=self.cpe3,\n                shared_crpe=self.crpe3,\n                **skwargs,\n                **dd,\n            )\n            for _ in range(serial_depths[2])]\n        )\n\n        # Serial blocks 4.\n        self.serial_blocks4 = nn.ModuleList([\n            SerialBlock(\n                dim=embed_dims[3],\n                mlp_ratio=mlp_ratios[3],\n                shared_cpe=self.cpe4,\n                shared_crpe=self.crpe4,\n                **skwargs,\n                **dd,\n            )\n            for _ in range(serial_depths[3])]\n        )\n\n        # Parallel blocks.\n        self.parallel_depth = parallel_depth\n        if self.parallel_depth > 0:\n            self.parallel_blocks = nn.ModuleList([\n                ParallelBlock(\n                    dims=embed_dims,\n                    mlp_ratios=mlp_ratios,\n                    shared_crpes=(self.crpe1, self.crpe2, self.crpe3, self.crpe4),\n                    **skwargs,\n                    **dd,\n                )\n                for _ in range(parallel_depth)]\n            )\n        else:\n            self.parallel_blocks = None\n\n        # Classification head(s).\n        if not self.return_interm_layers:\n            if self.parallel_blocks is not None:\n                self.norm2 = norm_layer(embed_dims[1], **dd)\n                self.norm3 = norm_layer(embed_dims[2], **dd)\n            else:\n                self.norm2 = self.norm3 = None\n            self.norm4 = norm_layer(embed_dims[3], **dd)\n\n            if self.parallel_depth > 0:\n                # CoaT series: Aggregate features of last three scales for classification.\n                assert embed_dims[1] == embed_dims[2] == embed_dims[3]\n                self.aggregate = torch.nn.Conv1d(in_channels=3, out_channels=1, kernel_size=1, **dd)\n                self.head_drop = nn.Dropout(drop_rate)\n                self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n            else:\n                # CoaT-Lite series: Use feature of last scale for classification.\n                self.aggregate = None\n                self.head_drop = nn.Dropout(drop_rate)\n                self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        # Initialize weights.\n        trunc_normal_(self.cls_token1, std=.02)\n        trunc_normal_(self.cls_token2, std=.02)\n        trunc_normal_(self.cls_token3, std=.02)\n        trunc_normal_(self.cls_token4, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'cls_token1', 'cls_token2', 'cls_token3', 'cls_token4'}\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem1=r'^cls_token1|patch_embed1|crpe1|cpe1',\n            serial_blocks1=r'^serial_blocks1\\.(\\d+)',\n            stem2=r'^cls_token2|patch_embed2|crpe2|cpe2',\n            serial_blocks2=r'^serial_blocks2\\.(\\d+)',\n            stem3=r'^cls_token3|patch_embed3|crpe3|cpe3',\n            serial_blocks3=r'^serial_blocks3\\.(\\d+)',\n            stem4=r'^cls_token4|patch_embed4|crpe4|cpe4',\n            serial_blocks4=r'^serial_blocks4\\.(\\d+)',\n            parallel_blocks=[  # FIXME (partially?) overlap parallel w/ serial blocks??\n                (r'^parallel_blocks\\.(\\d+)', None),\n                (r'^norm|aggregate', (99999,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('token', 'avg')\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_features(self, x0):\n        B = x0.shape[0]\n\n        # Serial blocks 1.\n        x1 = self.patch_embed1(x0)\n        H1, W1 = self.patch_embed1.grid_size\n        x1 = insert_cls(x1, self.cls_token1)\n        for blk in self.serial_blocks1:\n            x1 = blk(x1, size=(H1, W1))\n        x1_nocls = remove_cls(x1).reshape(B, H1, W1, -1).permute(0, 3, 1, 2).contiguous()\n\n        # Serial blocks 2.\n        x2 = self.patch_embed2(x1_nocls)\n        H2, W2 = self.patch_embed2.grid_size\n        x2 = insert_cls(x2, self.cls_token2)\n        for blk in self.serial_blocks2:\n            x2 = blk(x2, size=(H2, W2))\n        x2_nocls = remove_cls(x2).reshape(B, H2, W2, -1).permute(0, 3, 1, 2).contiguous()\n\n        # Serial blocks 3.\n        x3 = self.patch_embed3(x2_nocls)\n        H3, W3 = self.patch_embed3.grid_size\n        x3 = insert_cls(x3, self.cls_token3)\n        for blk in self.serial_blocks3:\n            x3 = blk(x3, size=(H3, W3))\n        x3_nocls = remove_cls(x3).reshape(B, H3, W3, -1).permute(0, 3, 1, 2).contiguous()\n\n        # Serial blocks 4.\n        x4 = self.patch_embed4(x3_nocls)\n        H4, W4 = self.patch_embed4.grid_size\n        x4 = insert_cls(x4, self.cls_token4)\n        for blk in self.serial_blocks4:\n            x4 = blk(x4, size=(H4, W4))\n        x4_nocls = remove_cls(x4).reshape(B, H4, W4, -1).permute(0, 3, 1, 2).contiguous()\n\n        # Only serial blocks: Early return.\n        if self.parallel_blocks is None:\n            if not torch.jit.is_scripting() and self.return_interm_layers:\n                # Return intermediate features for down-stream tasks (e.g. Deformable DETR and Detectron2).\n                feat_out = {}\n                if 'x1_nocls' in self.out_features:\n                    feat_out['x1_nocls'] = x1_nocls\n                if 'x2_nocls' in self.out_features:\n                    feat_out['x2_nocls'] = x2_nocls\n                if 'x3_nocls' in self.out_features:\n                    feat_out['x3_nocls'] = x3_nocls\n                if 'x4_nocls' in self.out_features:\n                    feat_out['x4_nocls'] = x4_nocls\n                return feat_out\n            else:\n                # Return features for classification.\n                x4 = self.norm4(x4)\n                return x4\n\n        # Parallel blocks.\n        for blk in self.parallel_blocks:\n            x2, x3, x4 = self.cpe2(x2, (H2, W2)), self.cpe3(x3, (H3, W3)), self.cpe4(x4, (H4, W4))\n            x1, x2, x3, x4 = blk(x1, x2, x3, x4, sizes=[(H1, W1), (H2, W2), (H3, W3), (H4, W4)])\n\n        if not torch.jit.is_scripting() and self.return_interm_layers:\n            # Return intermediate features for down-stream tasks (e.g. Deformable DETR and Detectron2).\n            feat_out = {}\n            if 'x1_nocls' in self.out_features:\n                x1_nocls = remove_cls(x1).reshape(B, H1, W1, -1).permute(0, 3, 1, 2).contiguous()\n                feat_out['x1_nocls'] = x1_nocls\n            if 'x2_nocls' in self.out_features:\n                x2_nocls = remove_cls(x2).reshape(B, H2, W2, -1).permute(0, 3, 1, 2).contiguous()\n                feat_out['x2_nocls'] = x2_nocls\n            if 'x3_nocls' in self.out_features:\n                x3_nocls = remove_cls(x3).reshape(B, H3, W3, -1).permute(0, 3, 1, 2).contiguous()\n                feat_out['x3_nocls'] = x3_nocls\n            if 'x4_nocls' in self.out_features:\n                x4_nocls = remove_cls(x4).reshape(B, H4, W4, -1).permute(0, 3, 1, 2).contiguous()\n                feat_out['x4_nocls'] = x4_nocls\n            return feat_out\n        else:\n            x2 = self.norm2(x2)\n            x3 = self.norm3(x3)\n            x4 = self.norm4(x4)\n            return [x2, x3, x4]\n\n    def forward_head(self, x_feat: Union[torch.Tensor, List[torch.Tensor]], pre_logits: bool = False):\n        if isinstance(x_feat, list):\n            assert self.aggregate is not None\n            if self.global_pool == 'avg':\n                x = torch.cat([xl[:, 1:].mean(dim=1, keepdim=True) for xl in x_feat], dim=1)  # [B, 3, C]\n            else:\n                x = torch.stack([xl[:, 0] for xl in x_feat], dim=1)  # [B, 3, C]\n            x = self.aggregate(x).squeeze(dim=1)  # Shape: [B, C]\n        else:\n            x = x_feat[:, 1:].mean(dim=1) if self.global_pool == 'avg' else x_feat[:, 0]\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x) -> torch.Tensor:\n        if not torch.jit.is_scripting() and self.return_interm_layers:\n            # Return intermediate features (for down-stream tasks).\n            return self.forward_features(x)\n        else:\n            # Return features for classification.\n            x_feat = self.forward_features(x)\n            x = self.forward_head(x_feat)\n            return x\n\n\ndef insert_cls(x, cls_token):\n    \"\"\" Insert CLS token. \"\"\"\n    cls_tokens = cls_token.expand(x.shape[0], -1, -1)\n    x = torch.cat((cls_tokens, x), dim=1)\n    return x\n\n\ndef remove_cls(x):\n    \"\"\" Remove CLS token. \"\"\"\n    return x[:, 1:, :]\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    out_dict = {}\n    state_dict = state_dict.get('model', state_dict)\n    for k, v in state_dict.items():\n        # original model had unused norm layers, removing them requires filtering pretrained checkpoints\n        if k.startswith('norm1') or \\\n                (k.startswith('norm2') and getattr(model, 'norm2', None) is None) or \\\n                (k.startswith('norm3') and getattr(model, 'norm3', None) is None) or \\\n                (k.startswith('norm4') and getattr(model, 'norm4', None) is None) or \\\n                (k.startswith('aggregate') and getattr(model, 'aggregate', None) is None) or \\\n                (k.startswith('head') and getattr(model, 'head', None) is None):\n            continue\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_coat(variant, pretrained=False, default_cfg=None, **kwargs):\n    if kwargs.get('features_only', None):\n        raise RuntimeError('features_only not implemented for Vision Transformer models.')\n\n    model = build_model_with_cfg(\n        CoaT,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg_coat(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed1.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'coat_tiny.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_mini.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_small.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_lite_tiny.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_lite_mini.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_lite_small.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_lite_medium.in1k': _cfg_coat(hf_hub_id='timm/'),\n    'coat_lite_medium_384.in1k': _cfg_coat(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash',\n    ),\n})\n\n\n@register_model\ndef coat_tiny(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[152, 152, 152, 152], serial_depths=[2, 2, 2, 2], parallel_depth=6)\n    model = _create_coat('coat_tiny', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_mini(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[152, 216, 216, 216], serial_depths=[2, 2, 2, 2], parallel_depth=6)\n    model = _create_coat('coat_mini', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_small(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[152, 320, 320, 320], serial_depths=[2, 2, 2, 2], parallel_depth=6, **kwargs)\n    model = _create_coat('coat_small', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_lite_tiny(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[64, 128, 256, 320], serial_depths=[2, 2, 2, 2], mlp_ratios=[8, 8, 4, 4])\n    model = _create_coat('coat_lite_tiny', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_lite_mini(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[64, 128, 320, 512], serial_depths=[2, 2, 2, 2], mlp_ratios=[8, 8, 4, 4])\n    model = _create_coat('coat_lite_mini', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_lite_small(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[64, 128, 320, 512], serial_depths=[3, 4, 6, 3], mlp_ratios=[8, 8, 4, 4])\n    model = _create_coat('coat_lite_small', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_lite_medium(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        patch_size=4, embed_dims=[128, 256, 320, 512], serial_depths=[3, 6, 10, 8])\n    model = _create_coat('coat_lite_medium', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef coat_lite_medium_384(pretrained=False, **kwargs) -> CoaT:\n    model_cfg = dict(\n        img_size=384, patch_size=4, embed_dims=[128, 256, 320, 512], serial_depths=[3, 6, 10, 8])\n    model = _create_coat('coat_lite_medium_384', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/convit.py",
    "content": "\"\"\" ConViT Model\n\n@article{d2021convit,\n  title={ConViT: Improving Vision Transformers with Soft Convolutional Inductive Biases},\n  author={d'Ascoli, St{\\'e}phane and Touvron, Hugo and Leavitt, Matthew and Morcos, Ari and Biroli, Giulio and Sagun, Levent},\n  journal={arXiv preprint arXiv:2103.10697},\n  year={2021}\n}\n\nPaper link: https://arxiv.org/abs/2103.10697\nOriginal code: https://github.com/facebookresearch/convit, original copyright below\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n# Copyright (c) 2015-present, Facebook, Inc.\n# All rights reserved.\n#\n# This source code is licensed under the CC-by-NC license found in the\n# LICENSE file in the root directory of this source tree.\n#\n'''These modules are adapted from those of timm, see\nhttps://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n'''\nfrom typing import Optional, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, trunc_normal_, PatchEmbed, Mlp, LayerNorm, HybridEmbed\nfrom ._builder import build_model_with_cfg\nfrom ._features_fx import register_notrace_module\nfrom ._registry import register_model, generate_default_cfgs\n\n\n__all__ = ['ConVit']\n\n\n@register_notrace_module  # reason: FX can't symbolically trace control flow in forward method\nclass GPSA(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            locality_strength: float = 1.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.dim = dim\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.locality_strength = locality_strength\n\n        self.qk = nn.Linear(dim, dim * 2, bias=qkv_bias, **dd)\n        self.v = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.pos_proj = nn.Linear(3, num_heads, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.gating_param = nn.Parameter(torch.ones(self.num_heads, **dd))\n        self.rel_indices: torch.Tensor = torch.zeros(1, 1, 1, 3, **dd)  # silly torchscript hack, won't work with None\n\n    def forward(self, x):\n        B, N, C = x.shape\n        if self.rel_indices is None or self.rel_indices.shape[1] != N:\n            self.rel_indices = self.get_rel_indices(N)\n        attn = self.get_attention(x)\n        v = self.v(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n        x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    def get_attention(self, x):\n        B, N, C = x.shape\n        qk = self.qk(x).reshape(B, N, 2, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k = qk[0], qk[1]\n        pos_score = self.rel_indices.expand(B, -1, -1, -1)\n        pos_score = self.pos_proj(pos_score).permute(0, 3, 1, 2)\n        patch_score = (q @ k.transpose(-2, -1)) * self.scale\n        patch_score = patch_score.softmax(dim=-1)\n        pos_score = pos_score.softmax(dim=-1)\n\n        gating = self.gating_param.view(1, -1, 1, 1)\n        attn = (1. - torch.sigmoid(gating)) * patch_score + torch.sigmoid(gating) * pos_score\n        attn /= attn.sum(dim=-1).unsqueeze(-1)\n        attn = self.attn_drop(attn)\n        return attn\n\n    def get_attention_map(self, x, return_map=False):\n        attn_map = self.get_attention(x).mean(0)  # average over batch\n        distances = self.rel_indices.squeeze()[:, :, -1] ** .5\n        dist = torch.einsum('nm,hnm->h', (distances, attn_map)) / distances.size(0)\n        if return_map:\n            return dist, attn_map\n        else:\n            return dist\n\n    def local_init(self):\n        self.v.weight.data.copy_(torch.eye(self.dim))\n        locality_distance = 1  # max(1,1/locality_strength**.5)\n\n        kernel_size = int(self.num_heads ** .5)\n        center = (kernel_size - 1) / 2 if kernel_size % 2 == 0 else kernel_size // 2\n        for h1 in range(kernel_size):\n            for h2 in range(kernel_size):\n                position = h1 + kernel_size * h2\n                self.pos_proj.weight.data[position, 2] = -1\n                self.pos_proj.weight.data[position, 1] = 2 * (h1 - center) * locality_distance\n                self.pos_proj.weight.data[position, 0] = 2 * (h2 - center) * locality_distance\n        self.pos_proj.weight.data *= self.locality_strength\n\n    def get_rel_indices(self, num_patches: int) -> torch.Tensor:\n        img_size = int(num_patches ** .5)\n        rel_indices = torch.zeros(1, num_patches, num_patches, 3)\n        ind = (\n                torch.arange(img_size, dtype=torch.float32).view(1, -1)\n                - torch.arange(img_size, dtype=torch.float32).view(-1, 1)\n        )\n        indx = ind.repeat(img_size, img_size)\n        indy = ind.repeat_interleave(img_size, dim=0).repeat_interleave(img_size, dim=1)\n        indd = indx ** 2 + indy ** 2\n        rel_indices[:, :, :, 2] = indd.unsqueeze(0)\n        rel_indices[:, :, :, 1] = indy.unsqueeze(0)\n        rel_indices[:, :, :, 0] = indx.unsqueeze(0)\n        device = self.qk.weight.device\n        dtype = self.qk.weight.dtype\n        return rel_indices.to(device=device, dtype=dtype)\n\n\nclass MHSA(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def get_attention_map(self, x, return_map=False):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv[0], qkv[1], qkv[2]\n        attn_map = (q @ k.transpose(-2, -1)) * self.scale\n        attn_map = attn_map.softmax(dim=-1).mean(0)\n\n        img_size = int(N ** .5)\n        ind = (\n                torch.arange(img_size, dtype=torch.float32).view(1, -1)\n                - torch.arange(img_size, dtype=torch.float32).view(-1, 1)\n        )\n        indx = ind.repeat(img_size, img_size)\n        indy = ind.repeat_interleave(img_size, dim=0).repeat_interleave(img_size, dim=1)\n        indd = indx ** 2 + indy ** 2\n        distances = indd ** .5\n        distances = distances.to(attn_map.device, attn_map.dtype)\n\n        dist = torch.einsum('nm,hnm->h', (distances, attn_map)) / N\n        if return_map:\n            return dist, attn_map\n        else:\n            return dist\n\n    def forward(self, x):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        attn = (q @ k.transpose(-2, -1)) * self.scale\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass Block(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            use_gpsa: bool = True,\n            locality_strength: float = 1.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.use_gpsa = use_gpsa\n        if self.use_gpsa:\n            self.attn = GPSA(\n                dim,\n                num_heads=num_heads,\n                qkv_bias=qkv_bias,\n                attn_drop=attn_drop,\n                proj_drop=proj_drop,\n                locality_strength=locality_strength,\n                **dd,\n            )\n        else:\n            self.attn = MHSA(\n                dim,\n                num_heads=num_heads,\n                qkv_bias=qkv_bias,\n                attn_drop=attn_drop,\n                proj_drop=proj_drop,\n                **dd,\n            )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        mlp_hidden_dim = int(dim * mlp_ratio)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = x + self.drop_path(self.attn(self.norm1(x)))\n        x = x + self.drop_path(self.mlp(self.norm2(x)))\n        return x\n\n\nclass ConVit(nn.Module):\n    \"\"\" Vision Transformer with support for patch or hybrid CNN input stage\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'token',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            hybrid_backbone: Optional[Any] = None,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            local_up_to_layer: int = 3,\n            locality_strength: float = 1.,\n            use_pos_embed: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg', 'token')\n        embed_dim *= num_heads\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.local_up_to_layer = local_up_to_layer\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.locality_strength = locality_strength\n        self.use_pos_embed = use_pos_embed\n\n        if hybrid_backbone is not None:\n            self.patch_embed = HybridEmbed(\n                hybrid_backbone,\n                img_size=img_size,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                **dd,\n            )\n        else:\n            self.patch_embed = PatchEmbed(\n                img_size=img_size,\n                patch_size=patch_size,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                **dd,\n            )\n        num_patches = self.patch_embed.num_patches\n        self.num_patches = num_patches\n\n        self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim, **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        if self.use_pos_embed:\n            self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, embed_dim, **dd))\n            trunc_normal_(self.pos_embed, std=.02)\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        self.blocks = nn.ModuleList([\n            Block(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                use_gpsa=i < local_up_to_layer,\n                locality_strength=locality_strength,\n                **dd,\n            ) for i in range(depth)])\n        self.norm = norm_layer(embed_dim, **dd)\n\n        # Classifier head\n        self.feature_info = [dict(num_chs=embed_dim, reduction=0, module='head')]\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        trunc_normal_(self.cls_token, std=.02)\n        self.apply(self._init_weights)\n        for n, m in self.named_modules():\n            if hasattr(m, 'local_init'):\n                m.local_init()\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'pos_embed', 'cls_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'token', 'avg')\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        if self.use_pos_embed:\n            x = x + self.pos_embed\n        x = self.pos_drop(x)\n        cls_tokens = self.cls_token.expand(x.shape[0], -1, -1)\n        for u, blk in enumerate(self.blocks):\n            if u == self.local_up_to_layer:\n                x = torch.cat((cls_tokens, x), dim=1)\n            x = blk(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x[:, 1:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_convit(variant, pretrained=False, **kwargs):\n    if kwargs.get('features_only', None):\n        raise RuntimeError('features_only not implemented for Vision Transformer models.')\n\n    return build_model_with_cfg(ConVit, variant, pretrained, **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'fixed_input_size': True,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # ConViT\n    'convit_tiny.fb_in1k': _cfg(hf_hub_id='timm/'),\n    'convit_small.fb_in1k': _cfg(hf_hub_id='timm/'),\n    'convit_base.fb_in1k': _cfg(hf_hub_id='timm/')\n})\n\n\n@register_model\ndef convit_tiny(pretrained=False, **kwargs) -> ConVit:\n    model_args = dict(\n        local_up_to_layer=10, locality_strength=1.0, embed_dim=48, num_heads=4)\n    model = _create_convit(variant='convit_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convit_small(pretrained=False, **kwargs) -> ConVit:\n    model_args = dict(\n        local_up_to_layer=10, locality_strength=1.0, embed_dim=48, num_heads=9)\n    model = _create_convit(variant='convit_small', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convit_base(pretrained=False, **kwargs) -> ConVit:\n    model_args = dict(\n        local_up_to_layer=10, locality_strength=1.0, embed_dim=48, num_heads=16)\n    model = _create_convit(variant='convit_base', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/convmixer.py",
    "content": "\"\"\" ConvMixer\n\n\"\"\"\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d\nfrom ._registry import register_model, generate_default_cfgs\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import checkpoint_seq\n\n__all__ = ['ConvMixer']\n\n\nclass Residual(nn.Module):\n    def __init__(self, fn: nn.Module):\n        super().__init__()\n        self.fn = fn\n\n    def forward(self, x):\n        return self.fn(x) + x\n\n\nclass ConvMixer(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            depth: int,\n            kernel_size: int = 9,\n            patch_size: int = 7,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = dim\n        self.grad_checkpointing = False\n\n        self.stem = nn.Sequential(\n            nn.Conv2d(in_chans, dim, kernel_size=patch_size, stride=patch_size, **dd),\n            act_layer(),\n            nn.BatchNorm2d(dim, **dd)\n        )\n        self.blocks = nn.Sequential(\n            *[nn.Sequential(\n                    Residual(nn.Sequential(\n                        nn.Conv2d(dim, dim, kernel_size, groups=dim, padding=\"same\", **dd),\n                        act_layer(),\n                        nn.BatchNorm2d(dim, **dd)\n                    )),\n                    nn.Conv2d(dim, dim, kernel_size=1, **dd),\n                    act_layer(),\n                    nn.BatchNorm2d(dim, **dd)\n            ) for i in range(depth)]\n        )\n        self.pooling = SelectAdaptivePool2d(pool_type=global_pool, flatten=True)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(stem=r'^stem', blocks=r'^blocks\\.(\\d+)')\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.pooling = SelectAdaptivePool2d(pool_type=global_pool, flatten=True)\n        self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.pooling(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_convmixer(variant, pretrained=False, **kwargs):\n    if kwargs.get('features_only', None):\n        raise RuntimeError('features_only not implemented for ConvMixer models.')\n\n    return build_model_with_cfg(ConvMixer, variant, pretrained, **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .96, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'classifier': 'head',\n        'first_conv': 'stem.0', 'license': 'mit',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'convmixer_1536_20.in1k': _cfg(hf_hub_id='timm/'),\n    'convmixer_768_32.in1k': _cfg(hf_hub_id='timm/'),\n    'convmixer_1024_20_ks9_p14.in1k': _cfg(hf_hub_id='timm/')\n})\n\n\n\n@register_model\ndef convmixer_1536_20(pretrained=False, **kwargs) -> ConvMixer:\n    model_args = dict(dim=1536, depth=20, kernel_size=9, patch_size=7, **kwargs)\n    return _create_convmixer('convmixer_1536_20', pretrained, **model_args)\n\n\n@register_model\ndef convmixer_768_32(pretrained=False, **kwargs) -> ConvMixer:\n    model_args = dict(dim=768, depth=32, kernel_size=7, patch_size=7, act_layer=nn.ReLU, **kwargs)\n    return _create_convmixer('convmixer_768_32', pretrained, **model_args)\n\n\n@register_model\ndef convmixer_1024_20_ks9_p14(pretrained=False, **kwargs) -> ConvMixer:\n    model_args = dict(dim=1024, depth=20, kernel_size=9, patch_size=14, **kwargs)\n    return _create_convmixer('convmixer_1024_20_ks9_p14', pretrained, **model_args)\n"
  },
  {
    "path": "timm/models/convnext.py",
    "content": "\"\"\" ConvNeXt\n\nPapers:\n* `A ConvNet for the 2020s` - https://arxiv.org/pdf/2201.03545.pdf\n@Article{liu2022convnet,\n  author  = {Zhuang Liu and Hanzi Mao and Chao-Yuan Wu and Christoph Feichtenhofer and Trevor Darrell and Saining Xie},\n  title   = {A ConvNet for the 2020s},\n  journal = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},\n  year    = {2022},\n}\n\n* `ConvNeXt-V2 - Co-designing and Scaling ConvNets with Masked Autoencoders` - https://arxiv.org/abs/2301.00808\n@article{Woo2023ConvNeXtV2,\n  title={ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders},\n  author={Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon and Saining Xie},\n  year={2023},\n  journal={arXiv preprint arXiv:2301.00808},\n}\n\nOriginal code and weights from:\n* https://github.com/facebookresearch/ConvNeXt, original copyright below\n* https://github.com/facebookresearch/ConvNeXt-V2, original copyright below\n\nModel defs atto, femto, pico, nano and _ols / _hnf variants are timm originals.\n\nModifications and additions for timm hacked together by / Copyright 2022, Ross Wightman\n\"\"\"\n# ConvNeXt\n# Copyright (c) Meta Platforms, Inc. and affiliates.\n# All rights reserved.\n# This source code is licensed under the MIT license\n\n# ConvNeXt-V2\n# Copyright (c) Meta Platforms, Inc. and affiliates.\n# All rights reserved.\n# This source code is licensed under the license found in the\n# LICENSE file in the root directory of this source tree (Attribution-NonCommercial 4.0 International (CC BY-NC 4.0))\n# No code was used directly from ConvNeXt-V2, however the weights are CC BY-NC 4.0 so beware if using commercially.\n\nfrom functools import partial\nfrom typing import Callable, Dict, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\nfrom timm.layers import (\n    trunc_normal_,\n    AvgPool2dSame,\n    DropPath,\n    calculate_drop_path_rates,\n    Mlp,\n    GlobalResponseNormMlp,\n    LayerNorm2d,\n    LayerNorm,\n    RmsNorm2d,\n    RmsNorm,\n    SimpleNorm2d,\n    SimpleNorm,\n    create_conv2d,\n    get_act_layer,\n    get_norm_layer,\n    make_divisible,\n    to_ntuple,\n    NormMlpClassifierHead,\n    ClassifierHead,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['ConvNeXt']  # model_registry will add each entrypoint fn to this\n\n\nclass Downsample(nn.Module):\n    \"\"\"Downsample module for ConvNeXt.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize Downsample module.\n\n        Args:\n            in_chs: Number of input channels.\n            out_chs: Number of output channels.\n            stride: Stride for downsampling.\n            dilation: Dilation rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        avg_stride = stride if dilation == 1 else 1\n        if stride > 1 or dilation > 1:\n            avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n            self.pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n        else:\n            self.pool = nn.Identity()\n\n        if in_chs != out_chs:\n            self.conv = create_conv2d(in_chs, out_chs, 1, stride=1, **dd)\n        else:\n            self.conv = nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.pool(x)\n        x = self.conv(x)\n        return x\n\n\nclass ConvNeXtBlock(nn.Module):\n    \"\"\"ConvNeXt Block.\n\n    There are two equivalent implementations:\n      (1) DwConv -> LayerNorm (channels_first) -> 1x1 Conv -> GELU -> 1x1 Conv; all in (N, C, H, W)\n      (2) DwConv -> Permute to (N, H, W, C); LayerNorm (channels_last) -> Linear -> GELU -> Linear; Permute back\n\n    Unlike the official impl, this one allows choice of 1 or 2, 1x1 conv can be faster with appropriate\n    choice of LayerNorm impl, however as model size increases the tradeoffs appear to change and nn.Linear\n    is a better choice. This was observed with PyTorch 1.10 on 3090 GPU, it could change over time & w/ different HW.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            kernel_size: int = 7,\n            stride: int = 1,\n            dilation: Union[int, Tuple[int, int]] = (1, 1),\n            mlp_ratio: float = 4,\n            conv_mlp: bool = False,\n            conv_bias: bool = True,\n            use_grn: bool = False,\n            ls_init_value: Optional[float] = 1e-6,\n            act_layer: Union[str, Callable] = 'gelu',\n            norm_layer: Optional[Callable] = None,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n\n        Args:\n            in_chs: Block input channels.\n            out_chs: Block output channels (same as in_chs if None).\n            kernel_size: Depthwise convolution kernel size.\n            stride: Stride of depthwise convolution.\n            dilation: Tuple specifying input and output dilation of block.\n            mlp_ratio: MLP expansion ratio.\n            conv_mlp: Use 1x1 convolutions for MLP and a NCHW compatible norm layer if True.\n            conv_bias: Apply bias for all convolution (linear) layers.\n            use_grn: Use GlobalResponseNorm in MLP (from ConvNeXt-V2)\n            ls_init_value: Layer-scale init values, layer-scale applied if not None.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer (defaults to LN if not specified).\n            drop_path: Stochastic depth probability.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_chs = out_chs or in_chs\n        dilation = to_ntuple(2)(dilation)\n        act_layer = get_act_layer(act_layer)\n        if not norm_layer:\n            norm_layer = LayerNorm2d if conv_mlp else LayerNorm\n        mlp_layer = partial(GlobalResponseNormMlp if use_grn else Mlp, use_conv=conv_mlp)\n        self.use_conv_mlp = conv_mlp\n        self.conv_dw = create_conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            stride=stride,\n            dilation=dilation[0],\n            depthwise=True,\n            bias=conv_bias,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n        self.mlp = mlp_layer(\n            out_chs,\n            int(mlp_ratio * out_chs),\n            act_layer=act_layer,\n            **dd,\n        )\n        self.gamma = nn.Parameter(ls_init_value * torch.ones(out_chs, **dd)) if ls_init_value is not None else None\n        if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]:\n            self.shortcut = Downsample(in_chs, out_chs, stride=stride, dilation=dilation[0], **dd)\n        else:\n            self.shortcut = nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        shortcut = x\n        x = self.conv_dw(x)\n        if self.use_conv_mlp:\n            x = self.norm(x)\n            x = self.mlp(x)\n        else:\n            x = x.permute(0, 2, 3, 1)\n            x = self.norm(x)\n            x = self.mlp(x)\n            x = x.permute(0, 3, 1, 2)\n        if self.gamma is not None:\n            x = x.mul(self.gamma.reshape(1, -1, 1, 1))\n\n        x = self.drop_path(x) + self.shortcut(shortcut)\n        return x\n\n\nclass ConvNeXtStage(nn.Module):\n    \"\"\"ConvNeXt stage (multiple blocks).\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 7,\n            stride: int = 2,\n            depth: int = 2,\n            dilation: Tuple[int, int] = (1, 1),\n            drop_path_rates: Optional[List[float]] = None,\n            ls_init_value: float = 1.0,\n            conv_mlp: bool = False,\n            conv_bias: bool = True,\n            use_grn: bool = False,\n            act_layer: Union[str, Callable] = 'gelu',\n            norm_layer: Optional[Callable] = None,\n            norm_layer_cl: Optional[Callable] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize ConvNeXt stage.\n\n        Args:\n            in_chs: Number of input channels.\n            out_chs: Number of output channels.\n            kernel_size: Kernel size for depthwise convolution.\n            stride: Stride for downsampling.\n            depth: Number of blocks in stage.\n            dilation: Dilation rates.\n            drop_path_rates: Drop path rates for each block.\n            ls_init_value: Initial value for layer scale.\n            conv_mlp: Use convolutional MLP.\n            conv_bias: Use bias in convolutions.\n            use_grn: Use global response normalization.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            norm_layer_cl: Normalization layer for channels last.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        if in_chs != out_chs or stride > 1 or dilation[0] != dilation[1]:\n            ds_ks = 2 if stride > 1 or dilation[0] != dilation[1] else 1\n            pad = 'same' if dilation[1] > 1 else 0  # same padding needed if dilation used\n            self.downsample = nn.Sequential(\n                norm_layer(in_chs, **dd),\n                create_conv2d(\n                    in_chs,\n                    out_chs,\n                    kernel_size=ds_ks,\n                    stride=stride,\n                    dilation=dilation[0],\n                    padding=pad,\n                    bias=conv_bias,\n                    **dd,\n                ),\n            )\n            in_chs = out_chs\n        else:\n            self.downsample = nn.Identity()\n\n        drop_path_rates = drop_path_rates or [0.] * depth\n        stage_blocks = []\n        for i in range(depth):\n            stage_blocks.append(ConvNeXtBlock(\n                in_chs=in_chs,\n                out_chs=out_chs,\n                kernel_size=kernel_size,\n                dilation=dilation[1],\n                drop_path=drop_path_rates[i],\n                ls_init_value=ls_init_value,\n                conv_mlp=conv_mlp,\n                conv_bias=conv_bias,\n                use_grn=use_grn,\n                act_layer=act_layer,\n                norm_layer=norm_layer if conv_mlp else norm_layer_cl,\n                **dd,\n            ))\n            in_chs = out_chs\n        self.blocks = nn.Sequential(*stage_blocks)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n# map of norm layers with NCHW (2D) and channels last variants\n_NORM_MAP = {\n    'layernorm': (LayerNorm2d, LayerNorm),\n    'layernorm2d': (LayerNorm2d, LayerNorm),\n    'simplenorm': (SimpleNorm2d, SimpleNorm),\n    'simplenorm2d': (SimpleNorm2d, SimpleNorm),\n    'rmsnorm': (RmsNorm2d, RmsNorm),\n    'rmsnorm2d': (RmsNorm2d, RmsNorm),\n}\n\n\ndef _get_norm_layers(norm_layer: Union[Callable, str], conv_mlp: bool, norm_eps: float):\n    norm_layer = norm_layer or 'layernorm'\n    if norm_layer in _NORM_MAP:\n        norm_layer_cl = _NORM_MAP[norm_layer][0] if conv_mlp else _NORM_MAP[norm_layer][1]\n        norm_layer = _NORM_MAP[norm_layer][0]\n        if norm_eps is not None:\n            norm_layer = partial(norm_layer, eps=norm_eps)\n            norm_layer_cl = partial(norm_layer_cl, eps=norm_eps)\n    else:\n        assert conv_mlp, \\\n            'If a norm_layer is specified, conv MLP must be used so all norm expect rank-4, channels-first input'\n        norm_layer = get_norm_layer(norm_layer)\n        norm_layer_cl = norm_layer\n        if norm_eps is not None:\n            norm_layer_cl = partial(norm_layer_cl, eps=norm_eps)\n    return norm_layer, norm_layer_cl\n\n\nclass ConvNeXt(nn.Module):\n    \"\"\"ConvNeXt model architecture.\n\n    A PyTorch impl of : `A ConvNet for the 2020s`  - https://arxiv.org/pdf/2201.03545.pdf\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            depths: Tuple[int, ...] = (3, 3, 9, 3),\n            dims: Tuple[int, ...] = (96, 192, 384, 768),\n            kernel_sizes: Union[int, Tuple[int, ...]] = 7,\n            ls_init_value: Optional[float] = 1e-6,\n            stem_type: str = 'patch',\n            patch_size: int = 4,\n            head_init_scale: float = 1.,\n            head_norm_first: bool = False,\n            head_hidden_size: Optional[int] = None,\n            conv_mlp: bool = False,\n            conv_bias: bool = True,\n            use_grn: bool = False,\n            act_layer: Union[str, Callable] = 'gelu',\n            norm_layer: Optional[Union[str, Callable]] = None,\n            norm_eps: Optional[float] = None,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            global_pool: Global pooling type.\n            output_stride: Output stride of network, one of (8, 16, 32).\n            depths: Number of blocks at each stage.\n            dims: Feature dimension at each stage.\n            kernel_sizes: Depthwise convolution kernel-sizes for each stage.\n            ls_init_value: Init value for Layer Scale, disabled if None.\n            stem_type: Type of stem.\n            patch_size: Stem patch size for patch stem.\n            head_init_scale: Init scaling value for classifier weights and biases.\n            head_norm_first: Apply normalization before global pool + head.\n            head_hidden_size: Size of MLP hidden layer in head if not None and head_norm_first == False.\n            conv_mlp: Use 1x1 conv in MLP, improves speed for small networks w/ chan last.\n            conv_bias: Use bias layers w/ all convolutions.\n            use_grn: Use Global Response Norm (ConvNeXt-V2) in MLP.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer type.\n            drop_rate: Head pre-classifier dropout rate.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert output_stride in (8, 16, 32)\n        kernel_sizes = to_ntuple(4)(kernel_sizes)\n        norm_layer, norm_layer_cl = _get_norm_layers(norm_layer, conv_mlp, norm_eps)\n        act_layer = get_act_layer(act_layer)\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.feature_info = []\n\n        assert stem_type in ('patch', 'overlap', 'overlap_tiered', 'overlap_act')\n        if stem_type == 'patch':\n            # NOTE: this stem is a minimal form of ViT PatchEmbed, as used in SwinTransformer w/ patch_size = 4\n            self.stem = nn.Sequential(\n                nn.Conv2d(in_chans, dims[0], kernel_size=patch_size, stride=patch_size, bias=conv_bias, **dd),\n                norm_layer(dims[0], **dd),\n            )\n            stem_stride = patch_size\n        else:\n            mid_chs = make_divisible(dims[0] // 2) if 'tiered' in stem_type else dims[0]\n            self.stem = nn.Sequential(*filter(None, [\n                nn.Conv2d(in_chans, mid_chs, kernel_size=3, stride=2, padding=1, bias=conv_bias, **dd),\n                act_layer() if 'act' in stem_type else None,\n                nn.Conv2d(mid_chs, dims[0], kernel_size=3, stride=2, padding=1, bias=conv_bias, **dd),\n                norm_layer(dims[0], **dd),\n            ]))\n            stem_stride = 4\n\n        self.stages = nn.Sequential()\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        stages = []\n        prev_chs = dims[0]\n        curr_stride = stem_stride\n        dilation = 1\n        # 4 feature resolution stages, each consisting of multiple residual blocks\n        for i in range(4):\n            stride = 2 if curr_stride == 2 or i > 0 else 1\n            if curr_stride >= output_stride and stride > 1:\n                dilation *= stride\n                stride = 1\n            curr_stride *= stride\n            first_dilation = 1 if dilation in (1, 2) else 2\n            out_chs = dims[i]\n            stages.append(ConvNeXtStage(\n                prev_chs,\n                out_chs,\n                kernel_size=kernel_sizes[i],\n                stride=stride,\n                dilation=(first_dilation, dilation),\n                depth=depths[i],\n                drop_path_rates=dp_rates[i],\n                ls_init_value=ls_init_value,\n                conv_mlp=conv_mlp,\n                conv_bias=conv_bias,\n                use_grn=use_grn,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                norm_layer_cl=norm_layer_cl,\n                **dd,\n            ))\n            prev_chs = out_chs\n            # NOTE feature_info use currently assumes stage 0 == stride 1, rest are stride 2\n            self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n        self.num_features = self.head_hidden_size = prev_chs\n\n        # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets\n        # otherwise pool -> norm -> fc, the default ConvNeXt ordering (pretrained FB weights)\n        if head_norm_first:\n            assert not head_hidden_size\n            self.norm_pre = norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n        else:\n            self.norm_pre = nn.Identity()\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                hidden_size=head_hidden_size,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                norm_layer=norm_layer,\n                act_layer='gelu',\n                **dd,\n            )\n            self.head_hidden_size = self.head.num_features\n        named_apply(partial(_init_weights, head_init_scale=head_init_scale), self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Union[str, List]]:\n        \"\"\"Create regex patterns for parameter grouping.\n\n        Args:\n            coarse: Use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.downsample', (0,)),  # blocks\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm_pre', (99999,))\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier module.\"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor.\n            indices: Take last n blocks if int, all if None, select matching indices if sequence.\n            norm: Apply norm layer to compatible intermediates.\n            stop_early: Stop iterating over blocks when last desired intermediate hit.\n            output_fmt: Shape of intermediate feature outputs.\n            intermediates_only: Only return intermediate features.\n\n        Returns:\n            List of intermediate features or tuple of (final features, intermediates).\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n\n        last_idx = len(self.stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    intermediates.append(self.norm_pre(x))\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm_pre(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm_pre = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm_pre(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module: nn.Module, name: Optional[str] = None, head_init_scale: float = 1.0) -> None:\n    \"\"\"Initialize model weights.\n\n    Args:\n        module: Module to initialize.\n        name: Module name.\n        head_init_scale: Scale factor for head initialization.\n    \"\"\"\n    if isinstance(module, nn.Conv2d):\n        trunc_normal_(module.weight, std=.02)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Linear):\n        trunc_normal_(module.weight, std=.02)\n        nn.init.zeros_(module.bias)\n        if name and 'head.' in name:\n            module.weight.data.mul_(head_init_scale)\n            module.bias.data.mul_(head_init_scale)\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap FB checkpoints -> timm \"\"\"\n    if 'head.norm.weight' in state_dict or 'norm_pre.weight' in state_dict:\n        return state_dict  # non-FB checkpoint\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n\n    out_dict = {}\n    if 'visual.trunk.stem.0.weight' in state_dict:\n        out_dict = {k.replace('visual.trunk.', ''): v for k, v in state_dict.items() if k.startswith('visual.trunk.')}\n        if 'visual.head.proj.weight' in state_dict:\n            out_dict['head.fc.weight'] = state_dict['visual.head.proj.weight']\n            out_dict['head.fc.bias'] = torch.zeros(state_dict['visual.head.proj.weight'].shape[0])\n        elif 'visual.head.mlp.fc1.weight' in state_dict:\n            out_dict['head.pre_logits.fc.weight'] = state_dict['visual.head.mlp.fc1.weight']\n            out_dict['head.pre_logits.fc.bias'] = state_dict['visual.head.mlp.fc1.bias']\n            out_dict['head.fc.weight'] = state_dict['visual.head.mlp.fc2.weight']\n            out_dict['head.fc.bias'] = torch.zeros(state_dict['visual.head.mlp.fc2.weight'].shape[0])\n        return out_dict\n\n    import re\n    for k, v in state_dict.items():\n        k = k.replace('downsample_layers.0.', 'stem.')\n        k = re.sub(r'stages.([0-9]+).([0-9]+)', r'stages.\\1.blocks.\\2', k)\n        k = re.sub(r'downsample_layers.([0-9]+).([0-9]+)', r'stages.\\1.downsample.\\2', k)\n        k = k.replace('dwconv', 'conv_dw')\n        k = k.replace('pwconv', 'mlp.fc')\n        if 'grn' in k:\n            k = k.replace('grn.beta', 'mlp.grn.bias')\n            k = k.replace('grn.gamma', 'mlp.grn.weight')\n            v = v.reshape(v.shape[-1])\n        k = k.replace('head.', 'head.fc.')\n        if k.startswith('norm.'):\n            k = k.replace('norm', 'head.norm')\n        if v.ndim == 2 and 'head' not in k:\n            model_shape = model.state_dict()[k].shape\n            v = v.reshape(model_shape)\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _create_convnext(variant, pretrained=False, **kwargs):\n    if kwargs.get('pretrained_cfg', '') == 'fcmae':\n        # NOTE fcmae pretrained weights have no classifier or final norm-layer (`head.norm`)\n        # This is workaround loading with num_classes=0 w/o removing norm-layer.\n        kwargs.setdefault('pretrained_strict', False)\n\n    model = build_model_with_cfg(\n        ConvNeXt, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'head.fc',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndef _cfgv2(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'head.fc',\n        'license': 'cc-by-nc-4.0', 'paper_ids': 'arXiv:2301.00808',\n        'paper_name': 'ConvNeXt-V2: Co-designing and Scaling ConvNets with Masked Autoencoders',\n        'origin_url': 'https://github.com/facebookresearch/ConvNeXt-V2',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # timm specific variants\n    'convnext_tiny.in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_small.in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'convnext_zepto_rms.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),\n    'convnext_zepto_rms_ols.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.9),\n    'convnext_atto.d2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_atto_d2-01bb0f51.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnext_atto_ols.a2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_atto_ols_a2-78d1c8f3.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnext_atto_rms.untrained': _cfg(\n        #hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), test_crop_pct=0.95),\n    'convnext_femto.d1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_femto_d1-d71d5b4c.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnext_femto_ols.d1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_femto_ols_d1-246bf2ed.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnext_pico.d1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_pico_d1-10ad7f0d.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnext_pico_ols.d1_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_pico_ols_d1-611f0ca7.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_nano.in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_nano.d1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_nano_d1h-7eb4bdea.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_nano_ols.d1h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_nano_ols_d1h-ae424a9a.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_tiny_hnf.a2h_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_tiny_hnf_a2h-ab7e9df2.pth',\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_nano.r384_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n\n    'convnext_tiny.in12k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n       input_size=(3, 384, 384), pool_size=(12, 12),  crop_pct=1.0, crop_mode='squash'),\n    'convnext_small.in12k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,  crop_mode='squash'),\n\n    'convnext_nano.in12k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, num_classes=11821),\n    'convnext_nano.r384_in12k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=11821),\n    'convnext_nano.r384_ad_in12k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=11821),\n    'convnext_tiny.in12k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, num_classes=11821),\n    'convnext_small.in12k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, num_classes=11821),\n\n    'convnext_tiny.fb_in22k_ft_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_1k_224.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_small.fb_in22k_ft_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_1k_224.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_base.fb_in22k_ft_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_1k_224.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_large.fb_in22k_ft_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_1k_224.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_xlarge.fb_in22k_ft_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_1k_224_ema.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'convnext_tiny.fb_in1k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_tiny_1k_224_ema.pth\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_small.fb_in1k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_small_1k_224_ema.pth\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_base.fb_in1k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_base_1k_224_ema.pth\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnext_large.fb_in1k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_large_1k_224_ema.pth\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'convnext_tiny.fb_in22k_ft_in1k_384': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_1k_384.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_small.fb_in22k_ft_in1k_384': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_1k_384.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_base.fb_in22k_ft_in1k_384': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_1k_384.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_large.fb_in22k_ft_in1k_384': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_1k_384.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_xlarge.fb_in22k_ft_in1k_384': _cfg(\n        url='https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_1k_384_ema.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    'convnext_tiny.fb_in22k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_224.pth\",\n        hf_hub_id='timm/',\n        num_classes=21841),\n    'convnext_small.fb_in22k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_224.pth\",\n        hf_hub_id='timm/',\n        num_classes=21841),\n    'convnext_base.fb_in22k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_224.pth\",\n        hf_hub_id='timm/',\n        num_classes=21841),\n    'convnext_large.fb_in22k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_224.pth\",\n        hf_hub_id='timm/',\n        num_classes=21841),\n    'convnext_xlarge.fb_in22k': _cfg(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_224.pth\",\n        hf_hub_id='timm/',\n        num_classes=21841),\n\n    'convnextv2_nano.fcmae_ft_in22k_in1k': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_nano_22k_224_ema.pt',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_nano.fcmae_ft_in22k_in1k_384': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_nano_22k_384_ema.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnextv2_tiny.fcmae_ft_in22k_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_tiny_22k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_tiny.fcmae_ft_in22k_in1k_384': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_tiny_22k_384_ema.pt\",\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnextv2_base.fcmae_ft_in22k_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_base_22k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_base.fcmae_ft_in22k_in1k_384': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_base_22k_384_ema.pt\",\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnextv2_large.fcmae_ft_in22k_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_large_22k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_large.fcmae_ft_in22k_in1k_384': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_large_22k_384_ema.pt\",\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnextv2_huge.fcmae_ft_in22k_in1k_384': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_huge_22k_384_ema.pt\",\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnextv2_huge.fcmae_ft_in22k_in1k_512': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_huge_22k_512_ema.pt\",\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(15, 15), crop_pct=1.0, crop_mode='squash'),\n\n    'convnextv2_atto.fcmae_ft_in1k': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_atto_1k_224_ema.pt',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnextv2_femto.fcmae_ft_in1k': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_femto_1k_224_ema.pt',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnextv2_pico.fcmae_ft_in1k': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_pico_1k_224_ema.pt',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'convnextv2_nano.fcmae_ft_in1k': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_nano_1k_224_ema.pt',\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_tiny.fcmae_ft_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_tiny_1k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_base.fcmae_ft_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_base_1k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_large.fcmae_ft_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_large_1k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'convnextv2_huge.fcmae_ft_in1k': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_huge_1k_224_ema.pt\",\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'convnextv2_atto.fcmae': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_atto_1k_224_fcmae.pt',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_femto.fcmae': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_femto_1k_224_fcmae.pt',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_pico.fcmae': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_pico_1k_224_fcmae.pt',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_nano.fcmae': _cfgv2(\n        url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_nano_1k_224_fcmae.pt',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_tiny.fcmae': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_tiny_1k_224_fcmae.pt\",\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_base.fcmae': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_base_1k_224_fcmae.pt\",\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_large.fcmae': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_large_1k_224_fcmae.pt\",\n        hf_hub_id='timm/',\n        num_classes=0),\n    'convnextv2_huge.fcmae': _cfgv2(\n        url=\"https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_huge_1k_224_fcmae.pt\",\n        hf_hub_id='timm/',\n        num_classes=0),\n\n    'convnextv2_small.untrained': _cfg(),\n\n    # CLIP weights, fine-tuned on in1k or in12k + in1k\n    'convnext_base.clip_laion2b_augreg_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0),\n    'convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0),\n    'convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    'convnext_base.clip_laion2b_augreg_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0),\n    'convnext_base.clip_laiona_augreg_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'convnext_large_mlp.clip_laion2b_augreg_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0\n    ),\n    'convnext_large_mlp.clip_laion2b_augreg_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'\n    ),\n    'convnext_xxlarge.clip_laion2b_soup_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0),\n\n    'convnext_base.clip_laion2b_augreg_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0),\n    'convnext_large_mlp.clip_laion2b_soup_ft_in12k_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0),\n    'convnext_large_mlp.clip_laion2b_augreg_ft_in12k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_large_mlp.clip_laion2b_soup_ft_in12k_384': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'convnext_xxlarge.clip_laion2b_soup_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0),\n\n    # CLIP original image tower weights\n    'convnext_base.clip_laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640),\n    'convnext_base.clip_laion2b_augreg': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640),\n    'convnext_base.clip_laiona': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640),\n    'convnext_base.clip_laiona_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=640),\n    'convnext_base.clip_laiona_augreg_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=640),\n    'convnext_large_mlp.clip_laion2b_augreg': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=768),\n    'convnext_large_mlp.clip_laion2b_ft_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=768),\n    'convnext_large_mlp.clip_laion2b_ft_soup_320': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=768),\n    'convnext_xxlarge.clip_laion2b_soup': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=1024),\n    'convnext_xxlarge.clip_laion2b_rewind': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=1024),\n\n    # NOTE dinov3 convnext weights are under a specific license, and downstream outputs must be shared with this\n    # https://ai.meta.com/resources/models-and-libraries/dinov3-license/\n    'convnext_tiny.dinov3_lvd1689m': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n        num_classes=0,\n        license='dinov3-license',\n    ),\n    'convnext_small.dinov3_lvd1689m': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n        num_classes=0,\n        license='dinov3-license',\n    ),\n    'convnext_base.dinov3_lvd1689m': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n        num_classes=0,\n        license='dinov3-license',\n    ),\n    'convnext_large.dinov3_lvd1689m': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n        num_classes=0,\n        license='dinov3-license',\n    ),\n\n    \"test_convnext.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n    \"test_convnext2.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n    \"test_convnext3.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n\n})\n\n\n@register_model\ndef convnext_zepto_rms(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M\n    model_args = dict(depths=(2, 2, 4, 2), dims=(32, 64, 128, 256), conv_mlp=True, norm_layer='simplenorm')\n    model = _create_convnext('convnext_zepto_rms', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_zepto_rms_ols(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M\n    model_args = dict(\n        depths=(2, 2, 4, 2), dims=(32, 64, 128, 256), conv_mlp=True, norm_layer='simplenorm', stem_type='overlap_act')\n    model = _create_convnext('convnext_zepto_rms_ols', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_atto(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M\n    model_args = dict(depths=(2, 2, 6, 2), dims=(40, 80, 160, 320), conv_mlp=True)\n    model = _create_convnext('convnext_atto', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_atto_ols(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant with overlapping 3x3 conv stem, wider than non-ols femto above, current param count 3.7M\n    model_args = dict(depths=(2, 2, 6, 2), dims=(40, 80, 160, 320), conv_mlp=True, stem_type='overlap_tiered')\n    model = _create_convnext('convnext_atto_ols', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_atto_rms(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M\n    model_args = dict(depths=(2, 2, 6, 2), dims=(40, 80, 160, 320), conv_mlp=True, norm_layer='rmsnorm2d')\n    model = _create_convnext('convnext_atto_rms', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_femto(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant\n    model_args = dict(depths=(2, 2, 6, 2), dims=(48, 96, 192, 384), conv_mlp=True)\n    model = _create_convnext('convnext_femto', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_femto_ols(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant\n    model_args = dict(depths=(2, 2, 6, 2), dims=(48, 96, 192, 384), conv_mlp=True, stem_type='overlap_tiered')\n    model = _create_convnext('convnext_femto_ols', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_pico(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm pico variant\n    model_args = dict(depths=(2, 2, 6, 2), dims=(64, 128, 256, 512), conv_mlp=True)\n    model = _create_convnext('convnext_pico', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_pico_ols(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm nano variant with overlapping 3x3 conv stem\n    model_args = dict(depths=(2, 2, 6, 2), dims=(64, 128, 256, 512), conv_mlp=True,  stem_type='overlap_tiered')\n    model = _create_convnext('convnext_pico_ols', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_nano(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm nano variant with standard stem and head\n    model_args = dict(depths=(2, 2, 8, 2), dims=(80, 160, 320, 640), conv_mlp=True)\n    model = _create_convnext('convnext_nano', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_nano_ols(pretrained=False, **kwargs) -> ConvNeXt:\n    # experimental nano variant with overlapping conv stem\n    model_args = dict(depths=(2, 2, 8, 2), dims=(80, 160, 320, 640), conv_mlp=True, stem_type='overlap')\n    model = _create_convnext('convnext_nano_ols', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_tiny_hnf(pretrained=False, **kwargs) -> ConvNeXt:\n    # experimental tiny variant with norm before pooling in head (head norm first)\n    model_args = dict(depths=(3, 3, 9, 3), dims=(96, 192, 384, 768), head_norm_first=True, conv_mlp=True)\n    model = _create_convnext('convnext_tiny_hnf', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_tiny(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=(3, 3, 9, 3), dims=(96, 192, 384, 768))\n    model = _create_convnext('convnext_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_small(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768])\n    model = _create_convnext('convnext_small', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_base(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024])\n    model = _create_convnext('convnext_base', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_large(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536])\n    model = _create_convnext('convnext_large', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_large_mlp(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], head_hidden_size=1536)\n    model = _create_convnext('convnext_large_mlp', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_xlarge(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[256, 512, 1024, 2048])\n    model = _create_convnext('convnext_xlarge', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnext_xxlarge(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 4, 30, 3], dims=[384, 768, 1536, 3072], norm_eps=kwargs.pop('norm_eps', 1e-5))\n    model = _create_convnext('convnext_xxlarge', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_atto(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M\n    model_args = dict(\n        depths=(2, 2, 6, 2), dims=(40, 80, 160, 320), use_grn=True, ls_init_value=None, conv_mlp=True)\n    model = _create_convnext('convnextv2_atto', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_femto(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm femto variant\n    model_args = dict(\n        depths=(2, 2, 6, 2), dims=(48, 96, 192, 384), use_grn=True, ls_init_value=None, conv_mlp=True)\n    model = _create_convnext('convnextv2_femto', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_pico(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm pico variant\n    model_args = dict(\n        depths=(2, 2, 6, 2), dims=(64, 128, 256, 512), use_grn=True, ls_init_value=None, conv_mlp=True)\n    model = _create_convnext('convnextv2_pico', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_nano(pretrained=False, **kwargs) -> ConvNeXt:\n    # timm nano variant with standard stem and head\n    model_args = dict(\n        depths=(2, 2, 8, 2), dims=(80, 160, 320, 640), use_grn=True, ls_init_value=None, conv_mlp=True)\n    model = _create_convnext('convnextv2_nano', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_tiny(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=(3, 3, 9, 3), dims=(96, 192, 384, 768), use_grn=True, ls_init_value=None)\n    model = _create_convnext('convnextv2_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_small(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768], use_grn=True, ls_init_value=None)\n    model = _create_convnext('convnextv2_small', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_base(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], use_grn=True, ls_init_value=None)\n    model = _create_convnext('convnextv2_base', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_large(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], use_grn=True, ls_init_value=None)\n    model = _create_convnext('convnextv2_large', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef convnextv2_huge(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[3, 3, 27, 3], dims=[352, 704, 1408, 2816], use_grn=True, ls_init_value=None)\n    model = _create_convnext('convnextv2_huge', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_convnext(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[1, 2, 4, 2], dims=[24, 32, 48, 64], norm_eps=kwargs.pop('norm_eps', 1e-5), act_layer='gelu_tanh')\n    model = _create_convnext('test_convnext', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_convnext2(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(depths=[1, 1, 1, 1], dims=[32, 64, 96, 128], norm_eps=kwargs.pop('norm_eps', 1e-5), act_layer='gelu_tanh')\n    model = _create_convnext('test_convnext2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_convnext3(pretrained=False, **kwargs) -> ConvNeXt:\n    model_args = dict(\n        depths=[1, 1, 1, 1], dims=[32, 64, 96, 128], norm_eps=kwargs.pop('norm_eps', 1e-5), kernel_sizes=(7, 5, 5, 3), act_layer='silu')\n    model = _create_convnext('test_convnext3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n\nregister_model_deprecations(__name__, {\n    'convnext_tiny_in22ft1k': 'convnext_tiny.fb_in22k_ft_in1k',\n    'convnext_small_in22ft1k': 'convnext_small.fb_in22k_ft_in1k',\n    'convnext_base_in22ft1k': 'convnext_base.fb_in22k_ft_in1k',\n    'convnext_large_in22ft1k': 'convnext_large.fb_in22k_ft_in1k',\n    'convnext_xlarge_in22ft1k': 'convnext_xlarge.fb_in22k_ft_in1k',\n    'convnext_tiny_384_in22ft1k': 'convnext_tiny.fb_in22k_ft_in1k_384',\n    'convnext_small_384_in22ft1k': 'convnext_small.fb_in22k_ft_in1k_384',\n    'convnext_base_384_in22ft1k': 'convnext_base.fb_in22k_ft_in1k_384',\n    'convnext_large_384_in22ft1k': 'convnext_large.fb_in22k_ft_in1k_384',\n    'convnext_xlarge_384_in22ft1k': 'convnext_xlarge.fb_in22k_ft_in1k_384',\n    'convnext_tiny_in22k': 'convnext_tiny.fb_in22k',\n    'convnext_small_in22k': 'convnext_small.fb_in22k',\n    'convnext_base_in22k': 'convnext_base.fb_in22k',\n    'convnext_large_in22k': 'convnext_large.fb_in22k',\n    'convnext_xlarge_in22k': 'convnext_xlarge.fb_in22k',\n})\n"
  },
  {
    "path": "timm/models/crossvit.py",
    "content": "\"\"\" CrossViT Model\n\n@inproceedings{\n    chen2021crossvit,\n    title={{CrossViT: Cross-Attention Multi-Scale Vision Transformer for Image Classification}},\n    author={Chun-Fu (Richard) Chen and Quanfu Fan and Rameswar Panda},\n    booktitle={International Conference on Computer Vision (ICCV)},\n    year={2021}\n}\n\nPaper link: https://arxiv.org/abs/2103.14899\nOriginal code: https://github.com/IBM/CrossViT/blob/main/models/crossvit.py\n\nNOTE: model names have been renamed from originals to represent actual input res all *_224 -> *_240 and *_384 -> *_408\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\nModified from Timm. https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n\"\"\"\n\n# Copyright IBM All Rights Reserved.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, to_2tuple, trunc_normal_, _assert\nfrom ._builder import build_model_with_cfg\nfrom ._features_fx import register_notrace_function\nfrom ._registry import register_model, generate_default_cfgs\nfrom .vision_transformer import Block\n\n__all__ = ['CrossVit']  # model_registry will add each entrypoint fn to this\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\" Image to Patch Embedding\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            multi_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        img_size = to_2tuple(img_size)\n        patch_size = to_2tuple(patch_size)\n        num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0])\n        self.img_size = img_size\n        self.patch_size = patch_size\n        self.num_patches = num_patches\n        if multi_conv:\n            if patch_size[0] == 12:\n                self.proj = nn.Sequential(\n                    nn.Conv2d(in_chans, embed_dim // 4, kernel_size=7, stride=4, padding=3, **dd),\n                    nn.ReLU(inplace=True),\n                    nn.Conv2d(embed_dim // 4, embed_dim // 2, kernel_size=3, stride=3, padding=0, **dd),\n                    nn.ReLU(inplace=True),\n                    nn.Conv2d(embed_dim // 2, embed_dim, kernel_size=3, stride=1, padding=1, **dd),\n                )\n            elif patch_size[0] == 16:\n                self.proj = nn.Sequential(\n                    nn.Conv2d(in_chans, embed_dim // 4, kernel_size=7, stride=4, padding=3, **dd),\n                    nn.ReLU(inplace=True),\n                    nn.Conv2d(embed_dim // 4, embed_dim // 2, kernel_size=3, stride=2, padding=1, **dd),\n                    nn.ReLU(inplace=True),\n                    nn.Conv2d(embed_dim // 2, embed_dim, kernel_size=3, stride=2, padding=1, **dd),\n                )\n        else:\n            self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, **dd)\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        # FIXME look at relaxing size constraints\n        _assert(H == self.img_size[0],\n                f\"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]}).\")\n        _assert(W == self.img_size[1],\n                f\"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]}).\")\n        x = self.proj(x).flatten(2).transpose(1, 2)\n        return x\n\n\nclass CrossAttention(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights\n        self.scale = head_dim ** -0.5\n\n        self.wq = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.wk = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.wv = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        # B1C -> B1H(C/H) -> BH1(C/H)\n        q = self.wq(x[:, 0:1, ...]).reshape(B, 1, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n        # BNC -> BNH(C/H) -> BHN(C/H)\n        k = self.wk(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n        # BNC -> BNH(C/H) -> BHN(C/H)\n        v = self.wv(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n\n        attn = (q @ k.transpose(-2, -1)) * self.scale  # BH1(C/H) @ BH(C/H)N -> BH1N\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B, 1, C)  # (BH1N @ BHN(C/H)) -> BH1(C/H) -> B1H(C/H) -> B1C\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass CrossAttentionBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = CrossAttention(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        x = x[:, 0:1, ...] + self.drop_path(self.attn(self.norm1(x)))\n        return x\n\n\nclass MultiScaleBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: Tuple[int, ...],\n            patches: Tuple[int, ...],\n            depth: Tuple[int, ...],\n            num_heads: Tuple[int, ...],\n            mlp_ratio: Tuple[float, ...],\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Union[List[float], float] = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        num_branches = len(dim)\n        self.num_branches = num_branches\n        # different branch could have different embedding size, the first one is the base\n        self.blocks = nn.ModuleList()\n        for d in range(num_branches):\n            tmp = []\n            for i in range(depth[d]):\n                tmp.append(Block(\n                    dim=dim[d],\n                    num_heads=num_heads[d],\n                    mlp_ratio=mlp_ratio[d],\n                    qkv_bias=qkv_bias,\n                    proj_drop=proj_drop,\n                    attn_drop=attn_drop,\n                    drop_path=drop_path[i],\n                    norm_layer=norm_layer,\n                    **dd,\n                ))\n            if len(tmp) != 0:\n                self.blocks.append(nn.Sequential(*tmp))\n\n        if len(self.blocks) == 0:\n            self.blocks = None\n\n        self.projs = nn.ModuleList()\n        for d in range(num_branches):\n            if dim[d] == dim[(d + 1) % num_branches] and False:\n                tmp = [nn.Identity()]\n            else:\n                tmp = [norm_layer(dim[d], **dd), act_layer(), nn.Linear(dim[d], dim[(d + 1) % num_branches], **dd)]\n            self.projs.append(nn.Sequential(*tmp))\n\n        self.fusion = nn.ModuleList()\n        for d in range(num_branches):\n            d_ = (d + 1) % num_branches\n            nh = num_heads[d_]\n            if depth[-1] == 0:  # backward capability:\n                self.fusion.append(\n                    CrossAttentionBlock(\n                        dim=dim[d_],\n                        num_heads=nh,\n                        mlp_ratio=mlp_ratio[d],\n                        qkv_bias=qkv_bias,\n                        proj_drop=proj_drop,\n                        attn_drop=attn_drop,\n                        drop_path=drop_path[-1],\n                        norm_layer=norm_layer,\n                        **dd,\n                    ))\n            else:\n                tmp = []\n                for _ in range(depth[-1]):\n                    tmp.append(CrossAttentionBlock(\n                        dim=dim[d_],\n                        num_heads=nh,\n                        mlp_ratio=mlp_ratio[d],\n                        qkv_bias=qkv_bias,\n                        proj_drop=proj_drop,\n                        attn_drop=attn_drop,\n                        drop_path=drop_path[-1],\n                        norm_layer=norm_layer,\n                        **dd,\n                    ))\n                self.fusion.append(nn.Sequential(*tmp))\n\n        self.revert_projs = nn.ModuleList()\n        for d in range(num_branches):\n            if dim[(d + 1) % num_branches] == dim[d] and False:\n                tmp = [nn.Identity()]\n            else:\n                tmp = [norm_layer(dim[(d + 1) % num_branches], **dd), act_layer(),\n                       nn.Linear(dim[(d + 1) % num_branches], dim[d], **dd)]\n            self.revert_projs.append(nn.Sequential(*tmp))\n\n    def forward(self, x: List[torch.Tensor]) -> List[torch.Tensor]:\n\n        outs_b = []\n        for i, block in enumerate(self.blocks):\n            outs_b.append(block(x[i]))\n\n        # only take the cls token out\n        proj_cls_token = torch.jit.annotate(List[torch.Tensor], [])\n        for i, proj in enumerate(self.projs):\n            proj_cls_token.append(proj(outs_b[i][:, 0:1, ...]))\n\n        # cross attention\n        outs = []\n        for i, (fusion, revert_proj) in enumerate(zip(self.fusion, self.revert_projs)):\n            tmp = torch.cat((proj_cls_token[i], outs_b[(i + 1) % self.num_branches][:, 1:, ...]), dim=1)\n            tmp = fusion(tmp)\n            reverted_proj_cls_token = revert_proj(tmp[:, 0:1, ...])\n            tmp = torch.cat((reverted_proj_cls_token, outs_b[i][:, 1:, ...]), dim=1)\n            outs.append(tmp)\n        return outs\n\n\ndef _compute_num_patches(img_size, patches):\n    return [i[0] // p * i[1] // p for i, p in zip(img_size, patches)]\n\n\n@register_notrace_function\ndef scale_image(x, ss: Tuple[int, int], crop_scale: bool = False):  # annotations for torchscript\n    \"\"\"\n    Pulled out of CrossViT.forward_features to bury conditional logic in a leaf node for FX tracing.\n    Args:\n        x (Tensor): input image\n        ss (tuple[int, int]): height and width to scale to\n        crop_scale (bool): whether to crop instead of interpolate to achieve the desired scale. Defaults to False\n    Returns:\n        Tensor: the \"scaled\" image batch tensor\n    \"\"\"\n    H, W = x.shape[-2:]\n    if H != ss[0] or W != ss[1]:\n        if crop_scale and ss[0] <= H and ss[1] <= W:\n            cu, cl = int(round((H - ss[0]) / 2.)), int(round((W - ss[1]) / 2.))\n            x = x[:, :, cu:cu + ss[0], cl:cl + ss[1]]\n        else:\n            x = torch.nn.functional.interpolate(x, size=ss, mode='bicubic', align_corners=False)\n    return x\n\n\nclass CrossVit(nn.Module):\n    \"\"\" Vision Transformer with support for patch or hybrid CNN input stage\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 224,\n            img_scale: Tuple[float, ...] = (1.0, 1.0),\n            patch_size: Tuple[int, ...] = (8, 16),\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            embed_dim: Tuple[int, ...] = (192, 384),\n            depth: Tuple[Tuple[int, ...], ...] = ((1, 3, 1), (1, 3, 1), (1, 3, 1)),\n            num_heads: Tuple[int, ...] = (6, 12),\n            mlp_ratio: Tuple[float, ...] = (2., 2., 4.),\n            multi_conv: bool = False,\n            crop_scale: bool = False,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            global_pool: str = 'token',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('token', 'avg')\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.img_size = to_2tuple(img_size)\n        img_scale = to_2tuple(img_scale)\n        self.img_size_scaled = [tuple([int(sj * si) for sj in self.img_size]) for si in img_scale]\n        self.crop_scale = crop_scale  # crop instead of interpolate for scale\n        num_patches = _compute_num_patches(self.img_size_scaled, patch_size)\n        self.num_branches = len(patch_size)\n        self.embed_dim = embed_dim\n        self.num_features = self.head_hidden_size = sum(embed_dim)\n        self.patch_embed = nn.ModuleList()\n\n        # hard-coded for torch jit script\n        for i in range(self.num_branches):\n            setattr(self, f'pos_embed_{i}', nn.Parameter(torch.zeros(1, 1 + num_patches[i], embed_dim[i], **dd)))\n            setattr(self, f'cls_token_{i}', nn.Parameter(torch.zeros(1, 1, embed_dim[i], **dd)))\n\n        for im_s, p, d in zip(self.img_size_scaled, patch_size, embed_dim):\n            self.patch_embed.append(\n                PatchEmbed(\n                    img_size=im_s,\n                    patch_size=p,\n                    in_chans=in_chans,\n                    embed_dim=d,\n                    multi_conv=multi_conv,\n                    **dd,\n                ))\n\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        total_depth = sum([sum(x[-2:]) for x in depth])\n        dpr = calculate_drop_path_rates(drop_path_rate, total_depth)  # stochastic depth decay rule\n        dpr_ptr = 0\n        self.blocks = nn.ModuleList()\n        for idx, block_cfg in enumerate(depth):\n            curr_depth = max(block_cfg[:-1]) + block_cfg[-1]\n            dpr_ = dpr[dpr_ptr:dpr_ptr + curr_depth]\n            blk = MultiScaleBlock(\n                embed_dim,\n                num_patches,\n                block_cfg,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr_,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            dpr_ptr += curr_depth\n            self.blocks.append(blk)\n\n        self.norm = nn.ModuleList([norm_layer(embed_dim[i], **dd) for i in range(self.num_branches)])\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.ModuleList([\n            nn.Linear(embed_dim[i], num_classes, **dd) if num_classes > 0 else nn.Identity()\n            for i in range(self.num_branches)])\n\n        for i in range(self.num_branches):\n            trunc_normal_(getattr(self, f'pos_embed_{i}'), std=.02)\n            trunc_normal_(getattr(self, f'cls_token_{i}'), std=.02)\n\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        out = set()\n        for i in range(self.num_branches):\n            out.add(f'cls_token_{i}')\n            pe = getattr(self, f'pos_embed_{i}', None)\n            if pe is not None and pe.requires_grad:\n                out.add(f'pos_embed_{i}')\n        return out\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('token', 'avg')\n            self.global_pool = global_pool\n        device = self.head[0].weight.device if hasattr(self.head[0], 'weight') else None\n        dtype = self.head[0].weight.dtype if hasattr(self.head[0], 'weight') else None\n        dd = {'device': device, 'dtype': dtype}\n        self.head = nn.ModuleList([\n            nn.Linear(self.embed_dim[i], num_classes, **dd) if num_classes > 0 else nn.Identity()\n            for i in range(self.num_branches)\n        ])\n\n    def forward_features(self, x) -> List[torch.Tensor]:\n        B = x.shape[0]\n        xs = []\n        for i, patch_embed in enumerate(self.patch_embed):\n            x_ = x\n            ss = self.img_size_scaled[i]\n            x_ = scale_image(x_, ss, self.crop_scale)\n            x_ = patch_embed(x_)\n            cls_tokens = self.cls_token_0 if i == 0 else self.cls_token_1  # hard-coded for torch jit script\n            cls_tokens = cls_tokens.expand(B, -1, -1)\n            x_ = torch.cat((cls_tokens, x_), dim=1)\n            pos_embed = self.pos_embed_0 if i == 0 else self.pos_embed_1  # hard-coded for torch jit script\n            x_ = x_ + pos_embed\n            x_ = self.pos_drop(x_)\n            xs.append(x_)\n\n        for i, blk in enumerate(self.blocks):\n            xs = blk(xs)\n\n        # NOTE: was before branch token section, move to here to assure all branch token are before layer norm\n        xs = [norm(xs[i]) for i, norm in enumerate(self.norm)]\n        return xs\n\n    def forward_head(self, xs: List[torch.Tensor], pre_logits: bool = False) -> torch.Tensor:\n        xs = [x[:, 1:].mean(dim=1) for x in xs] if self.global_pool == 'avg' else [x[:, 0] for x in xs]\n        xs = [self.head_drop(x) for x in xs]\n        if pre_logits or isinstance(self.head[0], nn.Identity):\n            return torch.cat([x for x in xs], dim=1)\n        return torch.mean(torch.stack([head(xs[i]) for i, head in enumerate(self.head)], dim=0), dim=0)\n\n    def forward(self, x):\n        xs = self.forward_features(x)\n        x = self.forward_head(xs)\n        return x\n\n\ndef _create_crossvit(variant, pretrained=False, **kwargs):\n    if kwargs.get('features_only', None):\n        raise RuntimeError('features_only not implemented for Vision Transformer models.')\n\n    def pretrained_filter_fn(state_dict):\n        new_state_dict = {}\n        for key in state_dict.keys():\n            if 'pos_embed' in key or 'cls_token' in key:\n                new_key = key.replace(\".\", \"_\")\n            else:\n                new_key = key\n            new_state_dict[new_key] = state_dict[key]\n        return new_state_dict\n\n    return build_model_with_cfg(\n        CrossVit,\n        variant,\n        pretrained,\n        pretrained_filter_fn=pretrained_filter_fn,\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 240, 240), 'pool_size': None, 'crop_pct': 0.875,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'fixed_input_size': True,\n        'first_conv': ('patch_embed.0.proj', 'patch_embed.1.proj'),\n        'classifier': ('head.0', 'head.1'),\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'crossvit_15_240.in1k': _cfg(hf_hub_id='timm/'),\n    'crossvit_15_dagger_240.in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('patch_embed.0.proj.0', 'patch_embed.1.proj.0'),\n    ),\n    'crossvit_15_dagger_408.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 408, 408), first_conv=('patch_embed.0.proj.0', 'patch_embed.1.proj.0'), crop_pct=1.0,\n    ),\n    'crossvit_18_240.in1k': _cfg(hf_hub_id='timm/'),\n    'crossvit_18_dagger_240.in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('patch_embed.0.proj.0', 'patch_embed.1.proj.0'),\n    ),\n    'crossvit_18_dagger_408.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 408, 408), first_conv=('patch_embed.0.proj.0', 'patch_embed.1.proj.0'), crop_pct=1.0,\n    ),\n    'crossvit_9_240.in1k': _cfg(hf_hub_id='timm/'),\n    'crossvit_9_dagger_240.in1k': _cfg(\n        hf_hub_id='timm/',\n        first_conv=('patch_embed.0.proj.0', 'patch_embed.1.proj.0'),\n    ),\n    'crossvit_base_240.in1k': _cfg(hf_hub_id='timm/'),\n    'crossvit_small_240.in1k': _cfg(hf_hub_id='timm/'),\n    'crossvit_tiny_240.in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef crossvit_tiny_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[96, 192], depth=[[1, 4, 0], [1, 4, 0], [1, 4, 0]],\n        num_heads=[3, 3], mlp_ratio=[4, 4, 1])\n    model = _create_crossvit(variant='crossvit_tiny_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_small_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[192, 384], depth=[[1, 4, 0], [1, 4, 0], [1, 4, 0]],\n        num_heads=[6, 6], mlp_ratio=[4, 4, 1])\n    model = _create_crossvit(variant='crossvit_small_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_base_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[384, 768], depth=[[1, 4, 0], [1, 4, 0], [1, 4, 0]],\n        num_heads=[12, 12], mlp_ratio=[4, 4, 1])\n    model = _create_crossvit(variant='crossvit_base_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_9_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[128, 256], depth=[[1, 3, 0], [1, 3, 0], [1, 3, 0]],\n        num_heads=[4, 4], mlp_ratio=[3, 3, 1])\n    model = _create_crossvit(variant='crossvit_9_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_15_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[192, 384], depth=[[1, 5, 0], [1, 5, 0], [1, 5, 0]],\n        num_heads=[6, 6], mlp_ratio=[3, 3, 1])\n    model = _create_crossvit(variant='crossvit_15_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_18_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224 / 240), patch_size=[12, 16], embed_dim=[224, 448], depth=[[1, 6, 0], [1, 6, 0], [1, 6, 0]],\n        num_heads=[7, 7], mlp_ratio=[3, 3, 1], **kwargs)\n    model = _create_crossvit(variant='crossvit_18_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_9_dagger_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224 / 240), patch_size=[12, 16], embed_dim=[128, 256], depth=[[1, 3, 0], [1, 3, 0], [1, 3, 0]],\n        num_heads=[4, 4], mlp_ratio=[3, 3, 1], multi_conv=True)\n    model = _create_crossvit(variant='crossvit_9_dagger_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_15_dagger_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[192, 384], depth=[[1, 5, 0], [1, 5, 0], [1, 5, 0]],\n        num_heads=[6, 6], mlp_ratio=[3, 3, 1], multi_conv=True)\n    model = _create_crossvit(variant='crossvit_15_dagger_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_15_dagger_408(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 384/408), patch_size=[12, 16], embed_dim=[192, 384], depth=[[1, 5, 0], [1, 5, 0], [1, 5, 0]],\n        num_heads=[6, 6], mlp_ratio=[3, 3, 1], multi_conv=True)\n    model = _create_crossvit(variant='crossvit_15_dagger_408', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_18_dagger_240(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 224/240), patch_size=[12, 16], embed_dim=[224, 448], depth=[[1, 6, 0], [1, 6, 0], [1, 6, 0]],\n        num_heads=[7, 7], mlp_ratio=[3, 3, 1], multi_conv=True)\n    model = _create_crossvit(variant='crossvit_18_dagger_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef crossvit_18_dagger_408(pretrained=False, **kwargs) -> CrossVit:\n    model_args = dict(\n        img_scale=(1.0, 384/408), patch_size=[12, 16], embed_dim=[224, 448], depth=[[1, 6, 0], [1, 6, 0], [1, 6, 0]],\n        num_heads=[7, 7], mlp_ratio=[3, 3, 1], multi_conv=True)\n    model = _create_crossvit(variant='crossvit_18_dagger_408', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/csatv2.py",
    "content": "\"\"\"CSATv2\n\nA frequency-domain vision model using DCT transforms with spatial attention.\n\nPaper: TBD\n\nThis model created by members of MLPA Lab. Welcome feedback and suggestion, questions.\ngusdlf93@naver.com\njuno.demie.oh@gmail.com\n\nRefined for timm by Ross Wightman\n\"\"\"\nimport math\nimport warnings\nfrom functools import partial, reduce\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.layers import trunc_normal_, DropPath, Mlp, LayerNorm2d, Attention, NormMlpClassifierHead, LayerScale, LayerScale2d\nfrom timm.layers.grn import GlobalResponseNorm\nfrom timm.models._builder import build_model_with_cfg\nfrom timm.models._features import feature_take_indices\nfrom timm.models._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['CSATv2', 'csatv2']\n\n# DCT frequency normalization statistics (Y, Cb, Cr channels x 64 coefficients)\n_DCT_MEAN = (\n    (932.42657, -0.00260, 0.33415, -0.02840, 0.00003, -0.02792, -0.00183, 0.00006,\n     0.00032, 0.03402, -0.00571, 0.00020, 0.00006, -0.00038, -0.00558, -0.00116,\n     -0.00000, -0.00047, -0.00008, -0.00030, 0.00942, 0.00161, -0.00009, -0.00006,\n     -0.00014, -0.00035, 0.00001, -0.00220, 0.00033, -0.00002, -0.00003, -0.00020,\n     0.00007, -0.00000, 0.00005, 0.00293, -0.00004, 0.00006, 0.00019, 0.00004,\n     0.00006, -0.00015, -0.00002, 0.00007, 0.00010, -0.00004, 0.00008, 0.00000,\n     0.00008, -0.00001, 0.00015, 0.00002, 0.00007, 0.00003, 0.00004, -0.00001,\n     0.00004, -0.00000, 0.00002, -0.00000, -0.00008, -0.00000, -0.00003, 0.00003),\n    (962.34735, -0.00428, 0.09835, 0.00152, -0.00009, 0.00312, -0.00141, -0.00001,\n     -0.00013, 0.01050, 0.00065, 0.00006, -0.00000, 0.00003, 0.00264, 0.00000,\n     0.00001, 0.00007, -0.00006, 0.00003, 0.00341, 0.00163, 0.00004, 0.00003,\n     -0.00001, 0.00008, -0.00000, 0.00090, 0.00018, -0.00006, -0.00001, 0.00007,\n     -0.00003, -0.00001, 0.00006, 0.00084, -0.00000, -0.00001, 0.00000, 0.00004,\n     -0.00001, -0.00002, 0.00000, 0.00001, 0.00002, 0.00001, 0.00004, 0.00011,\n     0.00000, -0.00003, 0.00011, -0.00002, 0.00001, 0.00001, 0.00001, 0.00001,\n     -0.00007, -0.00003, 0.00001, 0.00000, 0.00001, 0.00002, 0.00001, 0.00000),\n    (1053.16101, -0.00213, -0.09207, 0.00186, 0.00013, 0.00034, -0.00119, 0.00002,\n     0.00011, -0.00984, 0.00046, -0.00007, -0.00001, -0.00005, 0.00180, 0.00042,\n     0.00002, -0.00010, 0.00004, 0.00003, -0.00301, 0.00125, -0.00002, -0.00003,\n     -0.00001, -0.00001, -0.00001, 0.00056, 0.00021, 0.00001, -0.00001, 0.00002,\n     -0.00001, -0.00001, 0.00005, -0.00070, -0.00002, -0.00002, 0.00005, -0.00004,\n     -0.00000, 0.00002, -0.00002, 0.00001, 0.00000, -0.00003, 0.00004, 0.00007,\n     0.00001, 0.00000, 0.00013, -0.00000, 0.00000, 0.00002, -0.00000, -0.00001,\n     -0.00004, -0.00003, 0.00000, 0.00001, -0.00001, 0.00001, -0.00000, 0.00000),\n)\n\n_DCT_VAR = (\n    (270372.37500, 6287.10645, 5974.94043, 1653.10889, 1463.91748, 1832.58997, 755.92468, 692.41528,\n     648.57184, 641.46881, 285.79288, 301.62100, 380.43405, 349.84027, 374.15891, 190.30960,\n     190.76746, 221.64578, 200.82646, 145.87979, 126.92046, 62.14622, 67.75562, 102.42001,\n     129.74922, 130.04631, 103.12189, 97.76417, 53.17402, 54.81048, 73.48712, 81.04342,\n     69.35100, 49.06024, 33.96053, 37.03279, 20.48858, 24.94830, 33.90822, 44.54912,\n     47.56363, 40.03160, 30.43313, 22.63899, 26.53739, 26.57114, 21.84404, 17.41557,\n     15.18253, 10.69678, 11.24111, 12.97229, 15.08971, 15.31646, 8.90409, 7.44213,\n     6.66096, 6.97719, 4.17834, 3.83882, 4.51073, 2.36646, 2.41363, 1.48266),\n    (18839.21094, 321.70932, 300.15259, 77.47830, 76.02293, 89.04748, 33.99642, 34.74807,\n     32.12333, 28.19588, 12.04675, 14.26871, 18.45779, 16.59588, 15.67892, 7.37718,\n     8.56312, 10.28946, 9.41013, 6.69090, 5.16453, 2.55186, 3.03073, 4.66765,\n     5.85418, 5.74644, 4.33702, 3.66948, 1.95107, 2.26034, 3.06380, 3.50705,\n     3.06359, 2.19284, 1.54454, 1.57860, 0.97078, 1.13941, 1.48653, 1.89996,\n     1.95544, 1.64950, 1.24754, 0.93677, 1.09267, 1.09516, 0.94163, 0.78966,\n     0.72489, 0.50841, 0.50909, 0.55664, 0.63111, 0.64125, 0.38847, 0.33378,\n     0.30918, 0.33463, 0.20875, 0.19298, 0.21903, 0.13380, 0.13444, 0.09554),\n    (17127.39844, 292.81421, 271.45209, 66.64056, 63.60253, 76.35437, 28.06587, 27.84831,\n     25.96656, 23.60370, 9.99173, 11.34992, 14.46955, 12.92553, 12.69353, 5.91537,\n     6.60187, 7.90891, 7.32825, 5.32785, 4.29660, 2.13459, 2.44135, 3.66021,\n     4.50335, 4.38959, 3.34888, 2.97181, 1.60633, 1.77010, 2.35118, 2.69018,\n     2.38189, 1.74596, 1.26014, 1.31684, 0.79327, 0.92046, 1.17670, 1.47609,\n     1.50914, 1.28725, 0.99898, 0.74832, 0.85736, 0.85800, 0.74663, 0.63508,\n     0.58748, 0.41098, 0.41121, 0.44663, 0.50277, 0.51519, 0.31729, 0.27336,\n     0.25399, 0.27241, 0.17353, 0.16255, 0.18440, 0.11602, 0.11511, 0.08450),\n)\n\n\ndef _zigzag_permutation(rows: int, cols: int) -> List[int]:\n    \"\"\"Generate zigzag scan order for DCT coefficients.\"\"\"\n    idx_matrix = np.arange(0, rows * cols, 1).reshape(rows, cols).tolist()\n    dia = [[] for _ in range(rows + cols - 1)]\n    zigzag = []\n    for i in range(rows):\n        for j in range(cols):\n            s = i + j\n            if s % 2 == 0:\n                dia[s].insert(0, idx_matrix[i][j])\n            else:\n                dia[s].append(idx_matrix[i][j])\n    for d in dia:\n        zigzag.extend(d)\n    return zigzag\n\n\ndef _dct_kernel_type_2(\n        kernel_size: int,\n        orthonormal: bool,\n        device=None,\n        dtype=None,\n) -> torch.Tensor:\n    \"\"\"Generate Type-II DCT kernel matrix.\"\"\"\n    dd = dict(device=device, dtype=dtype)\n    x = torch.eye(kernel_size, **dd)\n    v = x.clone().contiguous().view(-1, kernel_size)\n    v = torch.cat([v, v.flip([1])], dim=-1)\n    v = torch.fft.fft(v, dim=-1)[:, :kernel_size]\n    k = (\n        torch.tensor(-1j, device=device, dtype=torch.complex64) * torch.pi\n        * torch.arange(kernel_size, device=device, dtype=torch.long)[None, :]\n    )\n    k = torch.exp(k / (kernel_size * 2))\n    v = v * k\n    v = v.real\n    if orthonormal:\n        v[:, 0] = v[:, 0] * torch.sqrt(torch.tensor(1 / (kernel_size * 4), **dd))\n        v[:, 1:] = v[:, 1:] * torch.sqrt(torch.tensor(1 / (kernel_size * 2), **dd))\n    v = v.contiguous().view(*x.shape)\n    return v\n\n\ndef _dct_kernel_type_3(\n        kernel_size: int,\n        orthonormal: bool,\n        device=None,\n        dtype=None,\n) -> torch.Tensor:\n    \"\"\"Generate Type-III DCT kernel matrix (inverse of Type-II).\"\"\"\n    return torch.linalg.inv(_dct_kernel_type_2(kernel_size, orthonormal, device, dtype))\n\n\nclass Dct1d(nn.Module):\n    \"\"\"1D Discrete Cosine Transform layer.\"\"\"\n\n    def __init__(\n            self,\n            kernel_size: int,\n            kernel_type: int = 2,\n            orthonormal: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        kernel = {'2': _dct_kernel_type_2, '3': _dct_kernel_type_3}\n        dct_weights = kernel[f'{kernel_type}'](kernel_size, orthonormal, **dd).T\n        self.register_buffer('weights', dct_weights.contiguous())\n        self.register_parameter('bias', None)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return F.linear(x, self.weights, self.bias)\n\n\nclass Dct2d(nn.Module):\n    \"\"\"2D Discrete Cosine Transform layer.\"\"\"\n\n    def __init__(\n            self,\n            kernel_size: int,\n            kernel_type: int = 2,\n            orthonormal: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.transform = Dct1d(kernel_size, kernel_type, orthonormal, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return self.transform(self.transform(x).transpose(-1, -2)).transpose(-1, -2)\n\n\ndef _split_out_chs(out_chs: int, ratio=(24, 4, 4)):\n    # reduce ratio to smallest integers (24,4,4) -> (6,1,1)\n    g = reduce(math.gcd, ratio)\n    r = tuple(x // g for x in ratio)\n    denom = sum(r)\n\n    assert out_chs % denom == 0 and out_chs >= denom, (\n        f\"out_chs={out_chs} can't be split into Y/Cb/Cr with ratio {ratio} \"\n        f\"(reduced {r}); out_chs must be a multiple of {denom}.\"\n    )\n\n    unit = out_chs // denom\n    y, cb, cr = (ri * unit for ri in r)\n    assert y + cb + cr == out_chs and min(y, cb, cr) > 0\n    return y, cb, cr\n\n\nclass LearnableDct2d(nn.Module):\n    \"\"\"Learnable 2D DCT stem with RGB to YCbCr conversion and frequency selection.\"\"\"\n\n    def __init__(\n            self,\n            kernel_size: int,\n            kernel_type: int = 2,\n            orthonormal: bool = True,\n            out_chs: int = 32,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.k = kernel_size\n        self.transform = Dct2d(kernel_size, kernel_type, orthonormal, **dd)\n        self.permutation = _zigzag_permutation(kernel_size, kernel_size)\n\n        y_ch, cb_ch, cr_ch = _split_out_chs(out_chs, ratio=(24, 4, 4))\n        self.conv_y  = nn.Conv2d(kernel_size ** 2, y_ch,  kernel_size=1, padding=0, **dd)\n        self.conv_cb = nn.Conv2d(kernel_size ** 2, cb_ch, kernel_size=1, padding=0, **dd)\n        self.conv_cr = nn.Conv2d(kernel_size ** 2, cr_ch, kernel_size=1, padding=0, **dd)\n\n        # Register empty buffers for DCT normalization statistics\n        self.register_buffer('mean', torch.empty(3, 64, device=device, dtype=dtype), persistent=False)\n        self.register_buffer('var', torch.empty(3, 64, device=device, dtype=dtype), persistent=False)\n        # Shape (3, 1, 1) for BCHW broadcasting\n        self.register_buffer('imagenet_mean', torch.empty(3, 1, 1, device=device, dtype=dtype), persistent=False)\n        self.register_buffer('imagenet_std', torch.empty(3, 1, 1, device=device, dtype=dtype), persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.mean.copy_(torch.tensor(_DCT_MEAN))\n        self.var.copy_(torch.tensor(_DCT_VAR))\n        self.imagenet_mean.copy_(torch.tensor([0.485, 0.456, 0.406]).view(3, 1, 1))\n        self.imagenet_std.copy_(torch.tensor([0.229, 0.224, 0.225]).view(3, 1, 1))\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def _denormalize(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Convert from ImageNet normalized to [0, 255] range.\"\"\"\n        return x.mul(self.imagenet_std).add_(self.imagenet_mean) * 255\n\n    def _rgb_to_ycbcr(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Convert RGB to YCbCr color space (BCHW input/output).\"\"\"\n        r, g, b = x[:, 0], x[:, 1], x[:, 2]\n        y = r * 0.299 + g * 0.587 + b * 0.114\n        cb = 0.564 * (b - y) + 128\n        cr = 0.713 * (r - y) + 128\n        return torch.stack([y, cb, cr], dim=1)\n\n    def _frequency_normalize(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Normalize DCT coefficients using precomputed statistics.\"\"\"\n        std = self.var ** 0.5 + 1e-8\n        return (x - self.mean) / std\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        b, c, h, w = x.shape\n        x = self._denormalize(x)\n        x = self._rgb_to_ycbcr(x)\n        # Extract non-overlapping k x k patches\n        x = x.reshape(b, c, h // self.k, self.k, w // self.k, self.k)  # (B, C, H//k, k, W//k, k)\n        x = x.permute(0, 2, 4, 1, 3, 5)  # (B, H//k, W//k, C, k, k)\n        x = self.transform(x)\n        x = x.reshape(-1, c, self.k * self.k)\n        x = x[:, :, self.permutation]\n        x = self._frequency_normalize(x)\n        x = x.reshape(b, h // self.k, w // self.k, c, -1)\n        x = x.permute(0, 3, 4, 1, 2).contiguous()\n        x_y = self.conv_y(x[:, 0])\n        x_cb = self.conv_cb(x[:, 1])\n        x_cr = self.conv_cr(x[:, 2])\n        return torch.cat([x_y, x_cb, x_cr], dim=1)\n\n\nclass Dct2dStats(nn.Module):\n    \"\"\"Utility module to compute DCT coefficient statistics.\"\"\"\n\n    def __init__(\n            self,\n            kernel_size: int,\n            kernel_type: int = 2,\n            orthonormal: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.k = kernel_size\n        self.transform = Dct2d(kernel_size, kernel_type, orthonormal, **dd)\n        self.permutation = _zigzag_permutation(kernel_size, kernel_size)\n\n    def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:\n        b, c, h, w = x.shape\n        # Extract non-overlapping k x k patches\n        x = x.reshape(b, c, h // self.k, self.k, w // self.k, self.k)  # (B, C, H//k, k, W//k, k)\n        x = x.permute(0, 2, 4, 1, 3, 5)  # (B, H//k, W//k, C, k, k)\n        x = self.transform(x)\n        x = x.reshape(-1, c, self.k * self.k)\n        x = x[:, :, self.permutation]\n        x = x.reshape(b * (h // self.k) * (w // self.k), c, -1)\n\n        mean_list = torch.zeros([3, 64])\n        var_list = torch.zeros([3, 64])\n        for i in range(3):\n            mean_list[i] = torch.mean(x[:, i], dim=0)\n            var_list[i] = torch.var(x[:, i], dim=0)\n        return mean_list, var_list\n\n\nclass Block(nn.Module):\n    \"\"\"ConvNeXt-style block with spatial attention.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            drop_path: float = 0.,\n            ls_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.dwconv = nn.Conv2d(dim, dim, kernel_size=7, padding=3, groups=dim, **dd)\n        self.norm = nn.LayerNorm(dim, eps=1e-6, **dd)\n        self.pwconv1 = nn.Linear(dim, 4 * dim, **dd)\n        self.act = nn.GELU()\n        self.grn = GlobalResponseNorm(4 * dim, channels_last=True, **dd)\n        self.pwconv2 = nn.Linear(4 * dim, dim, **dd)\n        self.ls = LayerScale2d(dim, init_values=ls_init_value, **dd) if ls_init_value else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.attn = SpatialAttention(**dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n        x = self.dwconv(x)\n        x = x.permute(0, 2, 3, 1)\n        x = self.norm(x)\n        x = self.pwconv1(x)\n        x = self.act(x)\n        x = self.grn(x)\n        x = self.pwconv2(x)\n        x = x.permute(0, 3, 1, 2)\n\n        attn = self.attn(x)\n        attn = F.interpolate(attn, size=x.shape[2:], mode='bilinear', align_corners=True)\n        x = x * attn\n        x = self.ls(x)\n\n        return shortcut + self.drop_path(x)\n\n\nclass SpatialTransformerBlock(nn.Module):\n    \"\"\"Lightweight transformer block for spatial attention (1-channel, 7x7 grid).\n\n    This is a simplified transformer with single-head, 1-dim attention over spatial\n    positions. Used inside SpatialAttention where input is 1 channel at 7x7 resolution.\n    \"\"\"\n\n    def __init__(\n            self,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        # Single-head attention with 1-dim q/k/v (no output projection needed)\n        self.pos_embed = PosConv(in_chans=1, **dd)\n        self.norm1 = nn.LayerNorm(1, **dd)\n        self.qkv = nn.Linear(1, 3, bias=False, **dd)\n\n        # Feedforward: 1 -> 4 -> 1\n        self.norm2 = nn.LayerNorm(1, **dd)\n        self.mlp = Mlp(1, 4, 1, act_layer=nn.GELU, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, C, H, W = x.shape\n\n        # Attention block\n        shortcut = x\n        x_t = x.flatten(2).transpose(1, 2)  # (B, N, 1)\n        x_t = self.norm1(x_t)\n        x_t = self.pos_embed(x_t, (H, W))\n\n        # Simple single-head attention with scalar q/k/v\n        qkv = self.qkv(x_t)  # (B, N, 3)\n        q, k, v = qkv.unbind(-1)  # each (B, N)\n        attn = (q @ k.transpose(-1, -2)).softmax(dim=-1)  # (B, N, N)\n        x_t = (attn @ v).unsqueeze(-1)  # (B, N, 1)\n\n        x_t = x_t.transpose(1, 2).reshape(B, C, H, W)\n        x = shortcut + x_t\n\n        # Feedforward block\n        shortcut = x\n        x_t = x.flatten(2).transpose(1, 2)\n        x_t = self.mlp(self.norm2(x_t))\n        x_t = x_t.transpose(1, 2).reshape(B, C, H, W)\n        x = shortcut + x_t\n\n        return x\n\n\nclass SpatialAttention(nn.Module):\n    \"\"\"Spatial attention module using channel statistics and transformer.\"\"\"\n\n    def __init__(\n            self,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.avgpool = nn.AdaptiveAvgPool2d((7, 7))\n        self.conv = nn.Conv2d(2, 1, kernel_size=7, padding=3, **dd)\n        self.attn = SpatialTransformerBlock(**dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x_avg = x.mean(dim=1, keepdim=True)\n        x_max = x.amax(dim=1, keepdim=True)\n        x = torch.cat([x_avg, x_max], dim=1)\n        x = self.avgpool(x)\n        x = self.conv(x)\n        x = self.attn(x)\n        return x\n\n\nclass TransformerBlock(nn.Module):\n    \"\"\"Transformer block with optional downsampling and convolutional position encoding.\"\"\"\n\n    def __init__(\n            self,\n            inp: int,\n            oup: int,\n            num_heads: int = 8,\n            attn_head_dim: int = 32,\n            downsample: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            ls_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        hidden_dim = int(inp * 4)\n        self.downsample = downsample\n\n        if self.downsample:\n            self.pool1 = nn.MaxPool2d(3, 2, 1)\n            self.pool2 = nn.MaxPool2d(3, 2, 1)\n            self.proj = nn.Conv2d(inp, oup, 1, 1, 0, bias=False, **dd)\n        else:\n            self.pool1 = nn.Identity()\n            self.pool2 = nn.Identity()\n            self.proj = nn.Identity()\n\n        self.pos_embed = PosConv(in_chans=inp, **dd)\n        self.norm1 = nn.LayerNorm(inp, **dd)\n        self.attn = Attention(\n            dim=inp,\n            num_heads=num_heads,\n            attn_head_dim=attn_head_dim,\n            dim_out=oup,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale(oup, init_values=ls_init_value, **dd) if ls_init_value else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = nn.LayerNorm(oup, **dd)\n        self.mlp = Mlp(oup, hidden_dim, oup, act_layer=nn.GELU, drop=proj_drop, **dd)\n        self.ls2 = LayerScale(oup, init_values=ls_init_value, **dd) if ls_init_value else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self.downsample:\n            shortcut = self.proj(self.pool1(x))\n            x_t = self.pool2(x)\n            B, C, H, W = x_t.shape\n            x_t = x_t.flatten(2).transpose(1, 2)\n            x_t = self.norm1(x_t)\n            x_t = self.pos_embed(x_t, (H, W))\n            x_t = self.ls1(self.attn(x_t))\n            x_t = x_t.transpose(1, 2).reshape(B, -1, H, W)\n            x = shortcut + self.drop_path1(x_t)\n        else:\n            B, C, H, W = x.shape\n            shortcut = x\n            x_t = x.flatten(2).transpose(1, 2)\n            x_t = self.norm1(x_t)\n            x_t = self.pos_embed(x_t, (H, W))\n            x_t = self.ls1(self.attn(x_t))\n            x_t = x_t.transpose(1, 2).reshape(B, -1, H, W)\n            x = shortcut + self.drop_path1(x_t)\n\n        # MLP block\n        B, C, H, W = x.shape\n        shortcut = x\n        x_t = x.flatten(2).transpose(1, 2)\n        x_t = self.ls2(self.mlp(self.norm2(x_t)))\n        x_t = x_t.transpose(1, 2).reshape(B, C, H, W)\n        x = shortcut + self.drop_path2(x_t)\n\n        return x\n\n\nclass PosConv(nn.Module):\n    \"\"\"Convolutional position encoding.\"\"\"\n\n    def __init__(\n            self,\n            in_chans: int,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        self.proj = nn.Conv2d(in_chans, in_chans, kernel_size=3, stride=1, padding=1, bias=True, groups=in_chans, **dd)\n\n    def forward(self, x: torch.Tensor, size: Tuple[int, int]) -> torch.Tensor:\n        B, N, C = x.shape\n        H, W = size\n        cnn_feat = x.transpose(1, 2).view(B, C, H, W)\n        x = self.proj(cnn_feat) + cnn_feat\n        return x.flatten(2).transpose(1, 2)\n\n\nclass CSATv2(nn.Module):\n    \"\"\"CSATv2: Frequency-domain vision model with spatial attention.\n\n    A hybrid architecture that processes images in the DCT frequency domain\n    with ConvNeXt-style blocks and transformer attention.\n    \"\"\"\n\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            dims: Tuple[int, ...] = (32, 72, 168, 386),\n            depths: Tuple[int, ...] = (2, 2, 8, 6),\n            transformer_depths: Tuple[int, ...] = (0, 0, 2, 2),\n            drop_path_rate: float = 0.0,\n            transformer_drop_path: bool = False,\n            ls_init_value: Optional[float] = None,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n            **kwargs,\n    ) -> None:\n        dd = dict(device=device, dtype=dtype)\n        super().__init__()\n        if in_chans != 3:\n            warnings.warn(\n                f'CSATv2 is designed for 3-channel RGB input. '\n                f'in_chans={in_chans} may not work correctly with the DCT stem.'\n            )\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.grad_checkpointing = False\n\n        self.num_features = dims[-1]\n        self.head_hidden_size = self.num_features\n\n        # Build feature_info dynamically\n        self.feature_info = [dict(num_chs=dims[0], reduction=8, module='stem_dct')]\n        reduction = 8\n        for i, dim in enumerate(dims):\n            if i > 0:\n                reduction *= 2\n            self.feature_info.append(dict(num_chs=dim, reduction=reduction, module=f'stages.{i}'))\n\n        # Build drop path rates for all blocks (0 for transformer blocks when transformer_drop_path=False)\n        total_blocks = sum(depths) if transformer_drop_path else sum(d - t for d, t in zip(depths, transformer_depths))\n        dp_iter = iter(torch.linspace(0, drop_path_rate, total_blocks).tolist())\n        dp_rates = []\n        for depth, t_depth in zip(depths, transformer_depths):\n            dp_rates += [next(dp_iter) for _ in range(depth - t_depth)]\n            dp_rates += [next(dp_iter) if transformer_drop_path else 0. for _ in range(t_depth)]\n\n        self.stem_dct = LearnableDct2d(8, out_chs=dims[0], **dd)\n\n        # Build stages dynamically\n        dp_iter = iter(dp_rates)\n        stages = []\n        for i, (dim, depth, t_depth) in enumerate(zip(dims, depths, transformer_depths)):\n            layers = (\n                # Downsample at start of stage (except first stage)\n                ([nn.Conv2d(dims[i - 1], dim, kernel_size=2, stride=2, **dd)] if i > 0 else []) +\n                # Conv blocks\n                [Block(dim=dim, drop_path=next(dp_iter), ls_init_value=ls_init_value, **dd) for _ in range(depth - t_depth)] +\n                # Transformer blocks at end of stage\n                [TransformerBlock(inp=dim, oup=dim, drop_path=next(dp_iter), ls_init_value=ls_init_value, **dd) for _ in range(t_depth)] +\n                # Trailing LayerNorm (except last stage)\n                ([LayerNorm2d(dim, eps=1e-6, **dd)] if i < len(depths) - 1 else [])\n            )\n            stages.append(nn.Sequential(*layers))\n        self.stages = nn.Sequential(*stages)\n\n        self.head = NormMlpClassifierHead(dims[-1], num_classes, pool_type=global_pool, **dd)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True) -> None:\n        if isinstance(m, (nn.Conv2d, nn.Linear)):\n            trunc_normal_(m.weight, std=0.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        self.grad_checkpointing = enable\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.stem_dct(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward pass returning intermediate features.\n\n        Args:\n            x: Input image tensor.\n            indices: Indices of features to return (0=stem_dct, 1-4=stages). None returns all.\n            norm: Apply norm layer to final intermediate (unused, for API compat).\n            stop_early: Stop iterating when last desired intermediate is reached.\n            output_fmt: Output format, must be 'NCHW'.\n            intermediates_only: Only return intermediate features.\n\n        Returns:\n            List of intermediate features or tuple of (final features, intermediates).\n        \"\"\"\n        assert output_fmt == 'NCHW', 'Output format must be NCHW.'\n        intermediates = []\n        # 5 feature levels: stem_dct (0) + stages 0-3 (1-4)\n        take_indices, max_index = feature_take_indices(len(self.stages) + 1, indices)\n\n        x = self.stem_dct(x)\n        if 0 in take_indices:\n            intermediates.append(x)\n\n        if torch.jit.is_scripting() or not stop_early:\n            stages = self.stages\n        else:\n            # max_index is 0-4, stages are 1-4, so we need max_index stages\n            stages = self.stages[:max_index] if max_index > 0 else []\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx + 1 in take_indices:  # +1 because stem is index 0\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep (0=stem_dct, 1-4=stages).\n            prune_norm: Whether to prune the final norm layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        # 5 feature levels: stem_dct (0) + stages 0-3 (1-4)\n        take_indices, max_index = feature_take_indices(len(self.stages) + 1, indices)\n        # max_index is 0-4, stages are 1-4, so we keep max_index stages\n        self.stages = self.stages[:max_index] if max_index > 0 else nn.Sequential()\n\n        if prune_norm:\n            self.head.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n\n        return take_indices\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        return self.head(x, pre_logits=pre_logits)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        return self.forward_head(x)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 512, 512), 'pool_size': (8, 8),\n        'mean': (0.485, 0.456, 0.406), 'std': (0.229, 0.224, 0.225),\n        'interpolation': 'bilinear', 'crop_pct': 1.0,\n        'classifier': 'head.fc', 'first_conv': [],\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'csatv2.r512_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'csatv2_21m.sw_r640_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 640, 640),\n        interpolation='bicubic',\n    ),\n    'csatv2_21m.sw_r512_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(10, 10),\n        interpolation='bicubic',\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict: dict, model: nn.Module) -> dict:\n    \"\"\"Remap original CSATv2 checkpoint to timm format.\n\n    Handles two key structural changes:\n    1) Stage naming: stages1/2/3/4 -> stages.0/1/2/3\n    2) Downsample position: moved from end of stage N to start of stage N+1\n    \"\"\"\n    if \"stages.0.0.grn.weight\" in state_dict:\n        return state_dict  # already in timm format\n\n    import re\n\n    # FIXME this downsample idx is wired to the original 'csatv2' model size\n    downsample_idx = {1: 3, 2: 3, 3: 9}  # original stage -> downsample index\n\n    dct_re   = re.compile(r\"^dct\\.\")\n    stage_re = re.compile(r\"^stages([1-4])\\.(\\d+)\\.(.*)$\")\n    head_re  = re.compile(r\"^head\\.\")\n    norm_re  = re.compile(r\"^norm\\.\")\n\n    def remap_stage(m: re.Match) -> str:\n        stage, idx, rest = int(m.group(1)), int(m.group(2)), m.group(3)\n        if stage in downsample_idx and idx == downsample_idx[stage]:\n            return f\"stages.{stage}.0.{rest}\"                 # move downsample to next stage @0\n        if stage == 1:\n            return f\"stages.0.{idx}.{rest}\"                  # stage1 -> stages.0\n        return f\"stages.{stage - 1}.{idx + 1}.{rest}\"        # stage2-4 -> stages.1-3, shift +1\n\n    out = {}\n    for k, v in state_dict.items():\n        # dct -> stem_dct, and Y/Cb/Cr conv names\n        k = dct_re.sub(\"stem_dct.\", k)\n        k = (k.replace(\".Y_Conv.\",  \".conv_y.\")\n               .replace(\".Cb_Conv.\", \".conv_cb.\")\n               .replace(\".Cr_Conv.\", \".conv_cr.\"))\n\n        # stage remap + downsample relocation\n        k = stage_re.sub(remap_stage, k)\n\n        # GRN: gamma/beta -> weight/bias (reshape)\n        if \"grn.gamma\" in k:\n            k, v = k.replace(\"grn.gamma\", \"grn.weight\"), v.reshape(-1)\n        elif \"grn.beta\" in k:\n            k, v = k.replace(\"grn.beta\", \"grn.bias\"), v.reshape(-1)\n\n        # FeedForward(nn.Sequential) -> Mlp + norm renames\n        if \".ff.net.0.\" in k:\n            k = k.replace(\".ff.net.0.\", \".mlp.fc1.\")\n        elif \".ff.net.3.\" in k:\n            k = k.replace(\".ff.net.3.\", \".mlp.fc2.\")\n        elif \".ff_norm.\" in k:\n            k = k.replace(\".ff_norm.\", \".norm2.\")\n        elif \".attn_norm.\" in k:\n            k = k.replace(\".attn_norm.\", \".norm1.\")\n\n        # attention -> attn (handle nested first)\n        if \".attention.attention.\" in k:\n            k = (k.replace(\".attention.attention.attn.to_qkv.\", \".attn.attn.qkv.\")\n                   .replace(\".attention.attention.attn.\",        \".attn.attn.\")\n                   .replace(\".attention.attention.\",             \".attn.attn.\"))\n        elif \".attention.\" in k:\n            k = k.replace(\".attention.\", \".attn.\")\n\n        # TransformerBlock attention name remaps\n        if \".attn.to_qkv.\" in k:\n            k = k.replace(\".attn.to_qkv.\", \".attn.qkv.\")\n        elif \".attn.to_out.0.\" in k:\n            k = k.replace(\".attn.to_out.0.\", \".attn.proj.\")\n\n        # .attn.pos_embed -> .pos_embed (but not SpatialTransformerBlock's .attn.attn.pos_embed)\n        if \".attn.pos_embed.\" in k and \".attn.attn.\" not in k:\n            k = k.replace(\".attn.pos_embed.\", \".pos_embed.\")\n\n        # head -> head.fc, norm -> head.norm (order matters)\n        k = head_re.sub(\"head.fc.\", k)\n        k = norm_re.sub(\"head.norm.\", k)\n\n        out[k] = v\n\n    return out\n\n\ndef _create_csatv2(variant: str, pretrained: bool = False, **kwargs) -> CSATv2:\n    out_indices = kwargs.pop('out_indices', (1, 2, 3, 4))\n    return build_model_with_cfg(\n        CSATv2,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, flatten_sequential=True),\n        default_cfg=default_cfgs[variant],\n        **kwargs,\n    )\n\n\n@register_model\ndef csatv2(pretrained: bool = False, **kwargs) -> CSATv2:\n    return _create_csatv2('csatv2', pretrained, **kwargs)\n\n\n@register_model\ndef csatv2_21m(pretrained: bool = False, **kwargs) -> CSATv2:\n    # experimental ~20-21M param larger model to validate flexible arch spec\n    model_args = dict(\n        dims = (48, 96, 224, 448),\n        depths = (3, 3, 10, 8),\n        transformer_depths = (0, 0, 4, 3)\n\n    )\n    return _create_csatv2('csatv2_21m', pretrained, **dict(model_args, **kwargs))"
  },
  {
    "path": "timm/models/cspnet.py",
    "content": "\"\"\"PyTorch CspNet\n\nA PyTorch implementation of Cross Stage Partial Networks including:\n* CSPResNet50\n* CSPResNeXt50\n* CSPDarkNet53\n* and DarkNet53 for good measure\n\nBased on paper `CSPNet: A New Backbone that can Enhance Learning Capability of CNN` - https://arxiv.org/abs/1911.11929\n\nReference impl via darknet cfg files at https://github.com/WongKinYiu/CrossStagePartialNetworks\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom dataclasses import dataclass, asdict, replace\nfrom functools import partial\nfrom typing import Any, Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ClassifierHead, ConvNormAct, DropPath, calculate_drop_path_rates, get_attn, create_act_layer, make_divisible\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import named_apply, MATCH_PREV_GROUP\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['CspNet']  # model_registry will add each entrypoint fn to this\n\n\n@dataclass\nclass CspStemCfg:\n    out_chs: Union[int, Tuple[int, ...]] = 32\n    stride: Union[int, Tuple[int, ...]] = 2\n    kernel_size: int = 3\n    padding: Union[int, str] = ''\n    pool: Optional[str] = ''\n\n\ndef _pad_arg(x, n):\n    # pads an argument tuple to specified n by padding with last value\n    if not isinstance(x, (tuple, list)):\n        x = (x,)\n    curr_n = len(x)\n    pad_n = n - curr_n\n    if pad_n <= 0:\n        return x[:n]\n    return tuple(x + (x[-1],) * pad_n)\n\n\n@dataclass\nclass CspStagesCfg:\n    depth: Tuple[int, ...] = (3, 3, 5, 2)  # block depth (number of block repeats in stages)\n    out_chs: Tuple[int, ...] = (128, 256, 512, 1024)  # number of output channels for blocks in stage\n    stride: Union[int, Tuple[int, ...]] = 2  # stride of stage\n    groups: Union[int, Tuple[int, ...]] = 1  # num kxk conv groups\n    block_ratio: Union[float, Tuple[float, ...]] = 1.0\n    bottle_ratio: Union[float, Tuple[float, ...]] = 1.  # bottleneck-ratio of blocks in stage\n    avg_down: Union[bool, Tuple[bool, ...]] = False\n    attn_layer: Optional[Union[str, Tuple[str, ...]]] = None\n    attn_kwargs: Optional[Union[Dict, Tuple[Dict]]] = None\n    stage_type: Union[str, Tuple[str]] = 'csp'  # stage type ('csp', 'cs2', 'dark')\n    block_type: Union[str, Tuple[str]] = 'bottle'  # blocks type for stages ('bottle', 'dark')\n\n    # cross-stage only\n    expand_ratio: Union[float, Tuple[float, ...]] = 1.0\n    cross_linear: Union[bool, Tuple[bool, ...]] = False\n    down_growth: Union[bool, Tuple[bool, ...]] = False\n\n    def __post_init__(self):\n        n = len(self.depth)\n        assert len(self.out_chs) == n\n        self.stride = _pad_arg(self.stride, n)\n        self.groups = _pad_arg(self.groups, n)\n        self.block_ratio = _pad_arg(self.block_ratio, n)\n        self.bottle_ratio = _pad_arg(self.bottle_ratio, n)\n        self.avg_down = _pad_arg(self.avg_down, n)\n        self.attn_layer = _pad_arg(self.attn_layer, n)\n        self.attn_kwargs = _pad_arg(self.attn_kwargs, n)\n        self.stage_type = _pad_arg(self.stage_type, n)\n        self.block_type = _pad_arg(self.block_type, n)\n\n        self.expand_ratio = _pad_arg(self.expand_ratio, n)\n        self.cross_linear = _pad_arg(self.cross_linear, n)\n        self.down_growth = _pad_arg(self.down_growth, n)\n\n\n@dataclass\nclass CspModelCfg:\n    stem: CspStemCfg\n    stages: CspStagesCfg\n    zero_init_last: bool = True  # zero init last weight (usually bn) in residual path\n    act_layer: str = 'leaky_relu'\n    norm_layer: str = 'batchnorm'\n    aa_layer: Optional[str] = None  # FIXME support string factory for this\n\n\ndef _cs3_cfg(\n        width_multiplier=1.0,\n        depth_multiplier=1.0,\n        avg_down=False,\n        act_layer='silu',\n        focus=False,\n        attn_layer=None,\n        attn_kwargs=None,\n        bottle_ratio=1.0,\n        block_type='dark',\n):\n    if focus:\n        stem_cfg = CspStemCfg(\n            out_chs=make_divisible(64 * width_multiplier),\n            kernel_size=6, stride=2, padding=2, pool='')\n    else:\n        stem_cfg = CspStemCfg(\n            out_chs=tuple([make_divisible(c * width_multiplier) for c in (32, 64)]),\n            kernel_size=3, stride=2, pool='')\n    return CspModelCfg(\n        stem=stem_cfg,\n        stages=CspStagesCfg(\n            out_chs=tuple([make_divisible(c * width_multiplier) for c in (128, 256, 512, 1024)]),\n            depth=tuple([int(d * depth_multiplier) for d in (3, 6, 9, 3)]),\n            stride=2,\n            bottle_ratio=bottle_ratio,\n            block_ratio=0.5,\n            avg_down=avg_down,\n            attn_layer=attn_layer,\n            attn_kwargs=attn_kwargs,\n            stage_type='cs3',\n            block_type=block_type,\n        ),\n        act_layer=act_layer,\n    )\n\n\nclass BottleneckBlock(nn.Module):\n    \"\"\" ResNe(X)t Bottleneck Block\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dilation: int = 1,\n            bottle_ratio: float = 0.25,\n            groups: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_last: bool = False,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mid_chs = int(round(out_chs * bottle_ratio))\n        ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer)\n        attn_last = attn_layer is not None and attn_last\n        attn_first = attn_layer is not None and not attn_last\n\n        self.conv1 = ConvNormAct(in_chs, mid_chs, kernel_size=1, **ckwargs, **dd)\n        self.conv2 = ConvNormAct(\n            mid_chs,\n            mid_chs,\n            kernel_size=3,\n            dilation=dilation,\n            groups=groups,\n            drop_layer=drop_block,\n            **ckwargs,\n            **dd,\n        )\n        self.attn2 = attn_layer(mid_chs, act_layer=act_layer, **dd) if attn_first else nn.Identity()\n        self.conv3 = ConvNormAct(mid_chs, out_chs, kernel_size=1, apply_act=False, **ckwargs, **dd)\n        self.attn3 = attn_layer(out_chs, act_layer=act_layer, **dd) if attn_last else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path else nn.Identity()\n        self.act3 = create_act_layer(act_layer)\n\n    def zero_init_last(self):\n        nn.init.zeros_(self.conv3.bn.weight)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x = self.attn2(x)\n        x = self.conv3(x)\n        x = self.attn3(x)\n        x = self.drop_path(x) + shortcut\n        # FIXME partial shortcut needed if first block handled as per original, not used for my current impl\n        #x[:, :shortcut.size(1)] += shortcut\n        x = self.act3(x)\n        return x\n\n\nclass DarkBlock(nn.Module):\n    \"\"\" DarkNet Block\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dilation: int = 1,\n            bottle_ratio: float = 0.5,\n            groups: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mid_chs = int(round(out_chs * bottle_ratio))\n        ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer)\n\n        self.conv1 = ConvNormAct(in_chs, mid_chs, kernel_size=1, **ckwargs, **dd)\n        self.attn = attn_layer(mid_chs, act_layer=act_layer, **dd) if attn_layer is not None else nn.Identity()\n        self.conv2 = ConvNormAct(\n            mid_chs,\n            out_chs,\n            kernel_size=3,\n            dilation=dilation,\n            groups=groups,\n            drop_layer=drop_block,\n            **ckwargs,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path else nn.Identity()\n\n    def zero_init_last(self):\n        nn.init.zeros_(self.conv2.bn.weight)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.attn(x)\n        x = self.conv2(x)\n        x = self.drop_path(x) + shortcut\n        return x\n\n\nclass EdgeBlock(nn.Module):\n    \"\"\" EdgeResidual / Fused-MBConv / MobileNetV1-like 3x3 + 1x1 block (w/ activated output)\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            dilation: int = 1,\n            bottle_ratio: float = 0.5,\n            groups: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mid_chs = int(round(out_chs * bottle_ratio))\n        ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer)\n\n        self.conv1 = ConvNormAct(\n            in_chs,\n            mid_chs,\n            kernel_size=3,\n            dilation=dilation,\n            groups=groups,\n            drop_layer=drop_block,\n            **ckwargs,\n            **dd,\n        )\n        self.attn = attn_layer(mid_chs, act_layer=act_layer, **dd) if attn_layer is not None else nn.Identity()\n        self.conv2 = ConvNormAct(mid_chs, out_chs, kernel_size=1, **ckwargs, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path else nn.Identity()\n\n    def zero_init_last(self):\n        nn.init.zeros_(self.conv2.bn.weight)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.attn(x)\n        x = self.conv2(x)\n        x = self.drop_path(x) + shortcut\n        return x\n\n\nclass CrossStage(nn.Module):\n    \"\"\"Cross Stage.\"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: int,\n            depth: int,\n            block_ratio: float = 1.,\n            bottle_ratio: float = 1.,\n            expand_ratio: float = 1.,\n            groups: int = 1,\n            first_dilation: Optional[int] = None,\n            avg_down: bool = False,\n            down_growth: bool = False,\n            cross_linear: bool = False,\n            block_dpr: Optional[List[float]] = None,\n            block_fn: Type[nn.Module] = BottleneckBlock,\n            device=None,\n            dtype=None,\n            **block_kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        down_chs = out_chs if down_growth else in_chs  # grow downsample channels to output channels\n        self.expand_chs = exp_chs = int(round(out_chs * expand_ratio))\n        block_out_chs = int(round(out_chs * block_ratio))\n        conv_kwargs = dict(act_layer=block_kwargs.get('act_layer'), norm_layer=block_kwargs.get('norm_layer'))\n        aa_layer = block_kwargs.pop('aa_layer', None)\n\n        if stride != 1 or first_dilation != dilation:\n            if avg_down:\n                self.conv_down = nn.Sequential(\n                    nn.AvgPool2d(2) if stride == 2 else nn.Identity(),  # FIXME dilation handling\n                    ConvNormAct(in_chs, out_chs, kernel_size=1, stride=1, groups=groups, **conv_kwargs, **dd)\n                )\n            else:\n                self.conv_down = ConvNormAct(\n                    in_chs,\n                    down_chs,\n                    kernel_size=3,\n                    stride=stride,\n                    dilation=first_dilation,\n                    groups=groups,\n                    aa_layer=aa_layer,\n                    **conv_kwargs,\n                    **dd,\n                )\n            prev_chs = down_chs\n        else:\n            self.conv_down = nn.Identity()\n            prev_chs = in_chs\n\n        # FIXME this 1x1 expansion is pushed down into the cross and block paths in the darknet cfgs. Also,\n        # there is also special case for the first stage for some of the model that results in uneven split\n        # across the two paths. I did it this way for simplicity for now.\n        self.conv_exp = ConvNormAct(\n            prev_chs,\n            exp_chs,\n            kernel_size=1,\n            apply_act=not cross_linear,\n            **conv_kwargs,\n            **dd,\n        )\n        prev_chs = exp_chs // 2  # output of conv_exp is always split in two\n\n        self.blocks = nn.Sequential()\n        for i in range(depth):\n            self.blocks.add_module(str(i), block_fn(\n                in_chs=prev_chs,\n                out_chs=block_out_chs,\n                dilation=dilation,\n                bottle_ratio=bottle_ratio,\n                groups=groups,\n                drop_path=block_dpr[i] if block_dpr is not None else 0.,\n                **block_kwargs,\n                **dd,\n            ))\n            prev_chs = block_out_chs\n\n        # transition convs\n        self.conv_transition_b = ConvNormAct(prev_chs, exp_chs // 2, kernel_size=1, **conv_kwargs, **dd)\n        self.conv_transition = ConvNormAct(exp_chs, out_chs, kernel_size=1, **conv_kwargs, **dd)\n\n    def forward(self, x):\n        x = self.conv_down(x)\n        x = self.conv_exp(x)\n        xs, xb = x.split(self.expand_chs // 2, dim=1)\n        xb = self.blocks(xb)\n        xb = self.conv_transition_b(xb).contiguous()\n        out = self.conv_transition(torch.cat([xs, xb], dim=1))\n        return out\n\n\nclass CrossStage3(nn.Module):\n    \"\"\"Cross Stage 3.\n    Similar to CrossStage, but with only one transition conv for the output.\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: int,\n            depth: int,\n            block_ratio: float = 1.,\n            bottle_ratio: float = 1.,\n            expand_ratio: float = 1.,\n            groups: int = 1,\n            first_dilation: Optional[int] = None,\n            avg_down: bool = False,\n            down_growth: bool = False,\n            cross_linear: bool = False,\n            block_dpr: Optional[List[float]] = None,\n            block_fn: Type[nn.Module] = BottleneckBlock,\n            device=None,\n            dtype=None,\n            **block_kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        down_chs = out_chs if down_growth else in_chs  # grow downsample channels to output channels\n        self.expand_chs = exp_chs = int(round(out_chs * expand_ratio))\n        block_out_chs = int(round(out_chs * block_ratio))\n        conv_kwargs = dict(act_layer=block_kwargs.get('act_layer'), norm_layer=block_kwargs.get('norm_layer'))\n        aa_layer = block_kwargs.pop('aa_layer', None)\n\n        if stride != 1 or first_dilation != dilation:\n            if avg_down:\n                self.conv_down = nn.Sequential(\n                    nn.AvgPool2d(2) if stride == 2 else nn.Identity(),  # FIXME dilation handling\n                    ConvNormAct(in_chs, out_chs, kernel_size=1, stride=1, groups=groups, **conv_kwargs, **dd)\n                )\n            else:\n                self.conv_down = ConvNormAct(\n                    in_chs,\n                    down_chs,\n                    kernel_size=3,\n                    stride=stride,\n                    dilation=first_dilation,\n                    groups=groups,\n                    aa_layer=aa_layer,\n                    **conv_kwargs,\n                    **dd,\n                )\n            prev_chs = down_chs\n        else:\n            self.conv_down = None\n            prev_chs = in_chs\n\n        # expansion conv\n        self.conv_exp = ConvNormAct(\n            prev_chs,\n            exp_chs,\n            kernel_size=1,\n            apply_act=not cross_linear,\n            **conv_kwargs,\n            **dd,\n        )\n        prev_chs = exp_chs // 2  # expanded output is split in 2 for blocks and cross stage\n\n        self.blocks = nn.Sequential()\n        for i in range(depth):\n            self.blocks.add_module(str(i), block_fn(\n                in_chs=prev_chs,\n                out_chs=block_out_chs,\n                dilation=dilation,\n                bottle_ratio=bottle_ratio,\n                groups=groups,\n                drop_path=block_dpr[i] if block_dpr is not None else 0.,\n                **block_kwargs,\n                **dd,\n            ))\n            prev_chs = block_out_chs\n\n        # transition convs\n        self.conv_transition = ConvNormAct(exp_chs, out_chs, kernel_size=1, **conv_kwargs, **dd)\n\n    def forward(self, x):\n        x = self.conv_down(x)\n        x = self.conv_exp(x)\n        x1, x2 = x.split(self.expand_chs // 2, dim=1)\n        x1 = self.blocks(x1)\n        out = self.conv_transition(torch.cat([x1, x2], dim=1))\n        return out\n\n\nclass DarkStage(nn.Module):\n    \"\"\"DarkNet stage.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: int,\n            depth: int,\n            block_ratio: float = 1.,\n            bottle_ratio: float = 1.,\n            groups: int = 1,\n            first_dilation: Optional[int] = None,\n            avg_down: bool = False,\n            block_fn: Type[nn.Module] = BottleneckBlock,\n            block_dpr: Optional[List[float]] = None,\n            device=None,\n            dtype=None,\n            **block_kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        conv_kwargs = dict(act_layer=block_kwargs.get('act_layer'), norm_layer=block_kwargs.get('norm_layer'))\n        aa_layer = block_kwargs.pop('aa_layer', None)\n\n        if avg_down:\n            self.conv_down = nn.Sequential(\n                nn.AvgPool2d(2) if stride == 2 else nn.Identity(),   # FIXME dilation handling\n                ConvNormAct(in_chs, out_chs, kernel_size=1, stride=1, groups=groups, **conv_kwargs, **dd)\n            )\n        else:\n            self.conv_down = ConvNormAct(\n                in_chs,\n                out_chs,\n                kernel_size=3,\n                stride=stride,\n                dilation=first_dilation,\n                groups=groups,\n                aa_layer=aa_layer,\n                **conv_kwargs,\n                **dd,\n            )\n\n        prev_chs = out_chs\n        block_out_chs = int(round(out_chs * block_ratio))\n        self.blocks = nn.Sequential()\n        for i in range(depth):\n            self.blocks.add_module(str(i), block_fn(\n                in_chs=prev_chs,\n                out_chs=block_out_chs,\n                dilation=dilation,\n                bottle_ratio=bottle_ratio,\n                groups=groups,\n                drop_path=block_dpr[i] if block_dpr is not None else 0.,\n                **block_kwargs,\n                **dd,\n            ))\n            prev_chs = block_out_chs\n\n    def forward(self, x):\n        x = self.conv_down(x)\n        x = self.blocks(x)\n        return x\n\n\ndef create_csp_stem(\n        in_chans: int = 3,\n        out_chs: int = 32,\n        kernel_size: int = 3,\n        stride: int = 2,\n        pool: str = '',\n        padding: str = '',\n        act_layer: Type[nn.Module] = nn.ReLU,\n        norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n        aa_layer: Optional[Type[nn.Module]] = None,\n        device=None,\n        dtype=None,\n):\n    dd = {'device': device, 'dtype': dtype}\n    stem = nn.Sequential()\n    feature_info = []\n    if not isinstance(out_chs, (tuple, list)):\n        out_chs = [out_chs]\n    stem_depth = len(out_chs)\n    assert stem_depth\n    assert stride in (1, 2, 4)\n    prev_feat = None\n    prev_chs = in_chans\n    last_idx = stem_depth - 1\n    stem_stride = 1\n    for i, chs in enumerate(out_chs):\n        conv_name = f'conv{i + 1}'\n        conv_stride = 2 if (i == 0 and stride > 1) or (i == last_idx and stride > 2 and not pool) else 1\n        if conv_stride > 1 and prev_feat is not None:\n            feature_info.append(prev_feat)\n        stem.add_module(conv_name, ConvNormAct(\n            prev_chs, chs, kernel_size,\n            stride=conv_stride,\n            padding=padding if i == 0 else '',\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **dd,\n        ))\n        stem_stride *= conv_stride\n        prev_chs = chs\n        prev_feat = dict(num_chs=prev_chs, reduction=stem_stride, module='.'.join(['stem', conv_name]))\n    if pool:\n        assert stride > 2\n        if prev_feat is not None:\n            feature_info.append(prev_feat)\n        if aa_layer is not None:\n            stem.add_module('pool', nn.MaxPool2d(kernel_size=3, stride=1, padding=1))\n            stem.add_module('aa', aa_layer(channels=prev_chs, stride=2, **dd))\n            pool_name = 'aa'\n        else:\n            stem.add_module('pool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n            pool_name = 'pool'\n        stem_stride *= 2\n        prev_feat = dict(num_chs=prev_chs, reduction=stem_stride, module='.'.join(['stem', pool_name]))\n    feature_info.append(prev_feat)\n    return stem, feature_info\n\n\ndef _get_stage_fn(stage_args):\n    stage_type = stage_args.pop('stage_type')\n    assert stage_type in ('dark', 'csp', 'cs3')\n    if stage_type == 'dark':\n        stage_args.pop('expand_ratio', None)\n        stage_args.pop('cross_linear', None)\n        stage_args.pop('down_growth', None)\n        stage_fn = DarkStage\n    elif stage_type == 'csp':\n        stage_fn = CrossStage\n    else:\n        stage_fn = CrossStage3\n    return stage_fn, stage_args\n\n\ndef _get_block_fn(stage_args):\n    block_type = stage_args.pop('block_type')\n    assert block_type in ('dark', 'edge', 'bottle')\n    if block_type == 'dark':\n        return DarkBlock, stage_args\n    elif block_type == 'edge':\n        return EdgeBlock, stage_args\n    else:\n        return BottleneckBlock, stage_args\n\n\ndef _get_attn_fn(stage_args):\n    attn_layer = stage_args.pop('attn_layer')\n    attn_kwargs = stage_args.pop('attn_kwargs', None) or {}\n    if attn_layer is not None:\n        attn_layer = get_attn(attn_layer)\n        if attn_kwargs:\n            attn_layer = partial(attn_layer, **attn_kwargs)\n    return attn_layer, stage_args\n\n\ndef create_csp_stages(\n        cfg: CspModelCfg,\n        drop_path_rate: float,\n        output_stride: int,\n        stem_feat: Dict[str, Any],\n        device=None,\n        dtype=None,\n):\n    dd = {'device': device, 'dtype': dtype}\n    cfg_dict = asdict(cfg.stages)\n    num_stages = len(cfg.stages.depth)\n    cfg_dict['block_dpr'] = [None] * num_stages if not drop_path_rate else \\\n        calculate_drop_path_rates(drop_path_rate, cfg.stages.depth, stagewise=True)\n    stage_args = [dict(zip(cfg_dict.keys(), values)) for values in zip(*cfg_dict.values())]\n    block_kwargs = dict(\n        act_layer=cfg.act_layer,\n        norm_layer=cfg.norm_layer,\n    )\n\n    dilation = 1\n    net_stride = stem_feat['reduction']\n    prev_chs = stem_feat['num_chs']\n    prev_feat = stem_feat\n    feature_info = []\n    stages = []\n    for stage_idx, stage_args in enumerate(stage_args):\n        stage_fn, stage_args = _get_stage_fn(stage_args)\n        block_fn, stage_args = _get_block_fn(stage_args)\n        attn_fn, stage_args = _get_attn_fn(stage_args)\n        stride = stage_args.pop('stride')\n        if stride != 1 and prev_feat:\n            feature_info.append(prev_feat)\n        if net_stride >= output_stride and stride > 1:\n            dilation *= stride\n            stride = 1\n        net_stride *= stride\n        first_dilation = 1 if dilation in (1, 2) else 2\n\n        stages += [stage_fn(\n            prev_chs,\n            **stage_args,\n            stride=stride,\n            first_dilation=first_dilation,\n            dilation=dilation,\n            block_fn=block_fn,\n            aa_layer=cfg.aa_layer,\n            attn_layer=attn_fn,  # will be passed through stage as block_kwargs\n            **block_kwargs,\n            **dd,\n        )]\n        prev_chs = stage_args['out_chs']\n        prev_feat = dict(num_chs=prev_chs, reduction=net_stride, module=f'stages.{stage_idx}')\n\n    feature_info.append(prev_feat)\n    return nn.Sequential(*stages), feature_info\n\n\nclass CspNet(nn.Module):\n    \"\"\"Cross Stage Partial base model.\n\n    Paper: `CSPNet: A New Backbone that can Enhance Learning Capability of CNN` - https://arxiv.org/abs/1911.11929\n    Ref Impl: https://github.com/WongKinYiu/CrossStagePartialNetworks\n\n    NOTE: There are differences in the way I handle the 1x1 'expansion' conv in this impl vs the\n    darknet impl. I did it this way for simplicity and less special cases.\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: CspModelCfg,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            zero_init_last: bool = True,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"\n        Args:\n            cfg (CspModelCfg): Model architecture configuration\n            in_chans (int): Number of input channels (default: 3)\n            num_classes (int): Number of classifier classes (default: 1000)\n            output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32)\n            global_pool (str): Global pooling type (default: 'avg')\n            drop_rate (float): Dropout rate (default: 0.)\n            drop_path_rate (float): Stochastic depth drop-path rate (default: 0.)\n            zero_init_last (bool): Zero-init last weight of residual path\n            kwargs (dict): Extra kwargs overlayed onto cfg\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        assert output_stride in (8, 16, 32)\n\n        cfg = replace(cfg, **kwargs)  # overlay kwargs onto cfg\n        layer_args = dict(\n            act_layer=cfg.act_layer,\n            norm_layer=cfg.norm_layer,\n            aa_layer=cfg.aa_layer\n        )\n        self.feature_info = []\n\n        # Construct the stem\n        self.stem, stem_feat_info = create_csp_stem(in_chans, **asdict(cfg.stem), **layer_args, **dd)\n        self.feature_info.extend(stem_feat_info[:-1])\n\n        # Construct the stages\n        self.stages, stage_feat_info = create_csp_stages(\n            cfg,\n            drop_path_rate=drop_path_rate,\n            output_stride=output_stride,\n            stem_feat=stem_feat_info[-1],\n            **dd,\n        )\n        prev_chs = stage_feat_info[-1]['num_chs']\n        self.feature_info.extend(stage_feat_info)\n\n        # Construct the head\n        self.num_features = self.head_hidden_size = prev_chs\n        self.head = ClassifierHead(\n            in_features=prev_chs,\n            num_classes=num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        named_apply(partial(_init_weights, zero_init_last=zero_init_last), self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^stages\\.(\\d+)\\..*transition', MATCH_PREV_GROUP),  # map to last block in stage\n                (r'^stages\\.(\\d+)', (0,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module, name, zero_init_last=False):\n    if isinstance(module, nn.Conv2d):\n        nn.init.kaiming_normal_(module.weight, mode='fan_out', nonlinearity='relu')\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Linear):\n        nn.init.normal_(module.weight, mean=0.0, std=0.01)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif zero_init_last and hasattr(module, 'zero_init_last'):\n        module.zero_init_last()\n\n\nmodel_cfgs = dict(\n    cspresnet50=CspModelCfg(\n        stem=CspStemCfg(out_chs=64, kernel_size=7, stride=4, pool='max'),\n        stages=CspStagesCfg(\n            depth=(3, 3, 5, 2),\n            out_chs=(128, 256, 512, 1024),\n            stride=(1, 2),\n            expand_ratio=2.,\n            bottle_ratio=0.5,\n            cross_linear=True,\n        ),\n    ),\n    cspresnet50d=CspModelCfg(\n        stem=CspStemCfg(out_chs=(32, 32, 64), kernel_size=3, stride=4, pool='max'),\n        stages=CspStagesCfg(\n            depth=(3, 3, 5, 2),\n            out_chs=(128, 256, 512, 1024),\n            stride=(1,) + (2,),\n            expand_ratio=2.,\n            bottle_ratio=0.5,\n            block_ratio=1.,\n            cross_linear=True,\n        ),\n    ),\n    cspresnet50w=CspModelCfg(\n        stem=CspStemCfg(out_chs=(32, 32, 64), kernel_size=3, stride=4, pool='max'),\n        stages=CspStagesCfg(\n            depth=(3, 3, 5, 2),\n            out_chs=(256, 512, 1024, 2048),\n            stride=(1,) + (2,),\n            expand_ratio=1.,\n            bottle_ratio=0.25,\n            block_ratio=0.5,\n            cross_linear=True,\n        ),\n    ),\n    cspresnext50=CspModelCfg(\n        stem=CspStemCfg(out_chs=64, kernel_size=7, stride=4, pool='max'),\n        stages=CspStagesCfg(\n            depth=(3, 3, 5, 2),\n            out_chs=(256, 512, 1024, 2048),\n            stride=(1,) + (2,),\n            groups=32,\n            expand_ratio=1.,\n            bottle_ratio=1.,\n            block_ratio=0.5,\n            cross_linear=True,\n        ),\n    ),\n    cspdarknet53=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1, 2, 8, 8, 4),\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=2,\n            expand_ratio=(2.,) + (1.,),\n            bottle_ratio=(0.5,) + (1.,),\n            block_ratio=(1.,) + (0.5,),\n            down_growth=True,\n            block_type='dark',\n        ),\n    ),\n    darknet17=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1,) * 5,\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=(2,),\n            bottle_ratio=(0.5,),\n            block_ratio=(1.,),\n            stage_type='dark',\n            block_type='dark',\n        ),\n    ),\n    darknet21=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1, 1, 1, 2, 2),\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=(2,),\n            bottle_ratio=(0.5,),\n            block_ratio=(1.,),\n            stage_type='dark',\n            block_type='dark',\n\n        ),\n    ),\n    sedarknet21=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1, 1, 1, 2, 2),\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=2,\n            bottle_ratio=0.5,\n            block_ratio=1.,\n            attn_layer='se',\n            stage_type='dark',\n            block_type='dark',\n\n        ),\n    ),\n    darknet53=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1, 2, 8, 8, 4),\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=2,\n            bottle_ratio=0.5,\n            block_ratio=1.,\n            stage_type='dark',\n            block_type='dark',\n        ),\n    ),\n    darknetaa53=CspModelCfg(\n        stem=CspStemCfg(out_chs=32, kernel_size=3, stride=1, pool=''),\n        stages=CspStagesCfg(\n            depth=(1, 2, 8, 8, 4),\n            out_chs=(64, 128, 256, 512, 1024),\n            stride=2,\n            bottle_ratio=0.5,\n            block_ratio=1.,\n            avg_down=True,\n            stage_type='dark',\n            block_type='dark',\n        ),\n    ),\n\n    cs3darknet_s=_cs3_cfg(width_multiplier=0.5, depth_multiplier=0.5),\n    cs3darknet_m=_cs3_cfg(width_multiplier=0.75, depth_multiplier=0.67),\n    cs3darknet_l=_cs3_cfg(),\n    cs3darknet_x=_cs3_cfg(width_multiplier=1.25, depth_multiplier=1.33),\n\n    cs3darknet_focus_s=_cs3_cfg(width_multiplier=0.5, depth_multiplier=0.5, focus=True),\n    cs3darknet_focus_m=_cs3_cfg(width_multiplier=0.75, depth_multiplier=0.67, focus=True),\n    cs3darknet_focus_l=_cs3_cfg(focus=True),\n    cs3darknet_focus_x=_cs3_cfg(width_multiplier=1.25, depth_multiplier=1.33, focus=True),\n\n    cs3sedarknet_l=_cs3_cfg(attn_layer='se', attn_kwargs=dict(rd_ratio=.25)),\n    cs3sedarknet_x=_cs3_cfg(attn_layer='se', width_multiplier=1.25, depth_multiplier=1.33),\n\n    cs3sedarknet_xdw=CspModelCfg(\n        stem=CspStemCfg(out_chs=(32, 64), kernel_size=3, stride=2, pool=''),\n        stages=CspStagesCfg(\n            depth=(3, 6, 12, 4),\n            out_chs=(256, 512, 1024, 2048),\n            stride=2,\n            groups=(1, 1, 256, 512),\n            bottle_ratio=0.5,\n            block_ratio=0.5,\n            attn_layer='se',\n        ),\n        act_layer='silu',\n    ),\n\n    cs3edgenet_x=_cs3_cfg(width_multiplier=1.25, depth_multiplier=1.33, bottle_ratio=1.5, block_type='edge'),\n    cs3se_edgenet_x=_cs3_cfg(\n        width_multiplier=1.25, depth_multiplier=1.33, bottle_ratio=1.5, block_type='edge',\n        attn_layer='se', attn_kwargs=dict(rd_ratio=.25)),\n)\n\n\ndef _create_cspnet(variant, pretrained=False, **kwargs):\n    if variant.startswith('darknet') or variant.startswith('cspdarknet'):\n        # NOTE: DarkNet is one of few models with stride==1 features w/ 6 out_indices [0..5]\n        default_out_indices = (0, 1, 2, 3, 4, 5)\n    else:\n        default_out_indices = (0, 1, 2, 3, 4)\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n    return build_model_with_cfg(\n        CspNet, variant, pretrained,\n        model_cfg=model_cfgs[variant],\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (8, 8),\n        'crop_pct': 0.887, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1.conv', 'classifier': 'head.fc', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'cspresnet50.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspresnet50_ra-d3e8d487.pth'),\n    'cspresnet50d.untrained': _cfg(),\n    'cspresnet50w.untrained': _cfg(),\n    'cspresnext50.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspresnext50_ra_224-648b4713.pth',\n    ),\n    'cspdarknet53.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/cspdarknet53_ra_256-d05c7c21.pth'),\n\n    'darknet17.untrained': _cfg(),\n    'darknet21.untrained': _cfg(),\n    'sedarknet21.untrained': _cfg(),\n    'darknet53.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/darknet53_256_c2ns-3aeff817.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'darknetaa53.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/darknetaa53_c2ns-5c28ec8a.pth',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'cs3darknet_s.untrained': _cfg(interpolation='bicubic'),\n    'cs3darknet_m.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3darknet_m_c2ns-43f06604.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=0.95,\n    ),\n    'cs3darknet_l.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3darknet_l_c2ns-16220c5d.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'cs3darknet_x.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3darknet_x_c2ns-4e4490aa.pth',\n        interpolation='bicubic', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'cs3darknet_focus_s.ra4_e3600_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        interpolation='bicubic', test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'cs3darknet_focus_m.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3darknet_focus_m_c2ns-e23bed41.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'cs3darknet_focus_l.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3darknet_focus_l_c2ns-65ef8888.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'cs3darknet_focus_x.untrained': _cfg(interpolation='bicubic'),\n\n    'cs3sedarknet_l.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3sedarknet_l_c2ns-e8d1dc13.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'cs3sedarknet_x.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3sedarknet_x_c2ns-b4d0abc0.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=1.0),\n\n    'cs3sedarknet_xdw.untrained': _cfg(interpolation='bicubic'),\n\n    'cs3edgenet_x.c2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3edgenet_x_c2-2e1610a9.pth',\n        interpolation='bicubic', test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'cs3se_edgenet_x.c2ns_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/cs3se_edgenet_x_c2ns-76f8e3ac.pth',\n        interpolation='bicubic', crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n})\n\n\n@register_model\ndef cspresnet50(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cspresnet50', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cspresnet50d(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cspresnet50d', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cspresnet50w(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cspresnet50w', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cspresnext50(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cspresnext50', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cspdarknet53(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cspdarknet53', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef darknet17(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('darknet17', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef darknet21(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('darknet21', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef sedarknet21(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('sedarknet21', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef darknet53(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('darknet53', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef darknetaa53(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('darknetaa53', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_s(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_s', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_m(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_m', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_l(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_l', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_x(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_x', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_focus_s(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_focus_s', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_focus_m(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_focus_m', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_focus_l(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_focus_l', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3darknet_focus_x(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3darknet_focus_x', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3sedarknet_l(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3sedarknet_l', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3sedarknet_x(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3sedarknet_x', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3sedarknet_xdw(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3sedarknet_xdw', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3edgenet_x(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3edgenet_x', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef cs3se_edgenet_x(pretrained=False, **kwargs) -> CspNet:\n    return _create_cspnet('cs3se_edgenet_x', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/davit.py",
    "content": "\"\"\" DaViT: Dual Attention Vision Transformers\n\nAs described in https://arxiv.org/abs/2204.03645\n\nInput size invariant transformer architecture that combines channel and spacial\nattention in each block. The attention mechanisms used are linear in complexity.\n\nDaViT model defs and weights adapted from https://github.com/dingmyu/davit, original copyright below\n\n\"\"\"\n# Copyright (c) 2022 Mingyu Ding\n# All rights reserved.\n# This source code is licensed under the MIT license\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch import Tensor\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, to_2tuple, trunc_normal_, Mlp, LayerNorm2d, get_norm_layer, use_fused_attn\nfrom timm.layers import NormMlpClassifierHead, ClassifierHead\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['DaVit']\n\n\nclass ConvPosEnc(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            k: int = 3,\n            act: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.proj = nn.Conv2d(\n            dim,\n            dim,\n            kernel_size=k,\n            stride=1,\n            padding=k // 2,\n            groups=dim,\n            **dd,\n        )\n        self.act = nn.GELU() if act else nn.Identity()\n\n    def forward(self, x: Tensor):\n        feat = self.proj(x)\n        x = x + self.act(feat)\n        return x\n\n\nclass Stem(nn.Module):\n    \"\"\" Size-agnostic implementation of 2D image to patch embedding,\n        allowing input size to be adjusted during model forward operation\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int = 3,\n            out_chs: int = 96,\n            stride: int = 4,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        stride = to_2tuple(stride)\n        self.stride = stride\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n        assert stride[0] == 4  # only setup for stride==4\n        self.conv = nn.Conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=7,\n            stride=stride,\n            padding=3,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n\n    def forward(self, x: Tensor):\n        B, C, H, W = x.shape\n        pad_r = (self.stride[1] - W % self.stride[1]) % self.stride[1]\n        pad_b = (self.stride[0] - H % self.stride[0]) % self.stride[0]\n        x = F.pad(x, (0, pad_r, 0, pad_b))\n        x = self.conv(x)\n        x = self.norm(x)\n        return x\n\n\nclass Downsample(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n\n        self.norm = norm_layer(in_chs, **dd)\n        self.even_k = kernel_size % 2 == 0\n        self.conv = nn.Conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            stride=2,\n            padding=0 if self.even_k else kernel_size // 2,\n            **dd,\n        )\n\n    def forward(self, x: Tensor):\n        B, C, H, W = x.shape\n        x = self.norm(x)\n        if self.even_k:\n            k_h, k_w = self.conv.kernel_size\n            pad_r = (k_w - W % k_w) % k_w\n            pad_b = (k_h - H % k_h) % k_h\n            x = F.pad(x, (0, pad_r , 0, pad_b))\n        x = self.conv(x)\n        return x\n\n\nclass ChannelAttentionV2(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = True,\n            dynamic_scale: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.groups = num_heads\n        self.head_dim = dim // num_heads\n        self.dynamic_scale = dynamic_scale\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(dim, dim, **dd)\n\n    def forward(self, x):\n        B, N, C = x.shape\n\n        qkv = self.qkv(x).reshape(B, N, 3, self.groups, C // self.groups).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        if self.dynamic_scale:\n            q = q * N ** -0.5\n        else:\n            q = q * self.head_dim ** -0.5\n        attn = q.transpose(-1, -2) @ k\n        attn = attn.softmax(dim=-1)\n        x = (attn @ v.transpose(-1, -2)).transpose(-1, -2)\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        return x\n\n\n\nclass ChannelAttention(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(dim, dim, **dd)\n\n    def forward(self, x: Tensor):\n        B, N, C = x.shape\n\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        k = k * self.scale\n        attn = k.transpose(-1, -2) @ v\n        attn = attn.softmax(dim=-1)\n        x = (attn @ q.transpose(-1, -2)).transpose(-1, -2)\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        return x\n\n\nclass ChannelBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            ffn: bool = True,\n            cpe_act: bool = False,\n            v2: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act, **dd)\n        self.ffn = ffn\n        self.norm1 = norm_layer(dim, **dd)\n        attn_layer = ChannelAttentionV2 if v2 else ChannelAttention\n        self.attn = attn_layer(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act, **dd)\n\n        if self.ffn:\n            self.norm2 = norm_layer(dim, **dd)\n            self.mlp = Mlp(\n                in_features=dim,\n                hidden_features=int(dim * mlp_ratio),\n                act_layer=act_layer,\n                **dd,\n            )\n            self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        else:\n            self.norm2 = None\n            self.mlp = None\n            self.drop_path2 = None\n\n    def forward(self, x: Tensor):\n        B, C, H, W = x.shape\n\n        x = self.cpe1(x).flatten(2).transpose(1, 2)\n\n        cur = self.norm1(x)\n        cur = self.attn(cur)\n        x = x + self.drop_path1(cur)\n\n        x = self.cpe2(x.transpose(1, 2).view(B, C, H, W))\n\n        if self.mlp is not None:\n            x = x.flatten(2).transpose(1, 2)\n            x = x + self.drop_path2(self.mlp(self.norm2(x)))\n            x = x.transpose(1, 2).view(B, C, H, W)\n\n        return x\n\n\ndef window_partition(x: Tensor, window_size: Tuple[int, int]):\n    \"\"\"\n    Args:\n        x: (B, H, W, C)\n        window_size (int): window size\n    Returns:\n        windows: (num_windows*B, window_size, window_size, C)\n    \"\"\"\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(windows: Tensor, window_size: Tuple[int, int], H: int, W: int):\n    \"\"\"\n    Args:\n        windows: (num_windows*B, window_size, window_size, C)\n        window_size (int): Window size\n        H (int): Height of image\n        W (int): Width of image\n    Returns:\n        x: (B, H, W, C)\n    \"\"\"\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\nclass WindowAttention(nn.Module):\n    r\"\"\" Window based multi-head self attention (W-MSA) module with relative position bias.\n    It supports both of shifted and non-shifted window.\n    Args:\n        dim (int): Number of input channels.\n        window_size (tuple[int]): The height and width of the window.\n        num_heads (int): Number of attention heads.\n        qkv_bias (bool, optional):  If True, add a learnable bias to query, key, value. Default: True\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            window_size: Tuple[int, int],\n            num_heads: int,\n            qkv_bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.window_size = window_size\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(dim, dim, **dd)\n\n        self.softmax = nn.Softmax(dim=-1)\n\n    def forward(self, x: Tensor):\n        B_, N, C = x.shape\n\n        qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v)\n        else:\n            q = q * self.scale\n            attn = (q @ k.transpose(-2, -1))\n            attn = self.softmax(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B_, N, C)\n        x = self.proj(x)\n        return x\n\n\nclass SpatialBlock(nn.Module):\n    r\"\"\" Windows Block.\n    Args:\n        dim (int): Number of input channels.\n        num_heads (int): Number of attention heads.\n        window_size (int): Window size.\n        mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.\n        qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True\n        drop_path (float, optional): Stochastic depth rate. Default: 0.0\n        act_layer (nn.Module, optional): Activation layer. Default: nn.GELU\n        norm_layer (nn.Module, optional): Normalization layer.  Default: nn.LayerNorm\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            window_size: int = 7,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            ffn: bool = True,\n            cpe_act: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.ffn = ffn\n        self.num_heads = num_heads\n        self.window_size = to_2tuple(window_size)\n        self.mlp_ratio = mlp_ratio\n\n        self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act, **dd)\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = WindowAttention(\n            dim,\n            self.window_size,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act, **dd)\n        if self.ffn:\n            self.norm2 = norm_layer(dim, **dd)\n            mlp_hidden_dim = int(dim * mlp_ratio)\n            self.mlp = Mlp(\n                in_features=dim,\n                hidden_features=mlp_hidden_dim,\n                act_layer=act_layer,\n                **dd,\n            )\n            self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        else:\n            self.norm2 = None\n            self.mlp = None\n            self.drop_path1 = None\n\n    def forward(self, x: Tensor):\n        B, C, H, W = x.shape\n\n        shortcut = self.cpe1(x).flatten(2).transpose(1, 2)\n\n        x = self.norm1(shortcut)\n        x = x.view(B, H, W, C)\n\n        pad_l = pad_t = 0\n        pad_r = (self.window_size[1] - W % self.window_size[1]) % self.window_size[1]\n        pad_b = (self.window_size[0] - H % self.window_size[0]) % self.window_size[0]\n        x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b))\n        _, Hp, Wp, _ = x.shape\n\n        x_windows = window_partition(x, self.window_size)\n        x_windows = x_windows.view(-1, self.window_size[0] * self.window_size[1], C)\n\n        # W-MSA/SW-MSA\n        attn_windows = self.attn(x_windows)\n\n        # merge windows\n        attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C)\n        x = window_reverse(attn_windows, self.window_size, Hp, Wp)\n\n        # if pad_r > 0 or pad_b > 0:\n        x = x[:, :H, :W, :].contiguous()\n\n        x = x.view(B, H * W, C)\n        x = shortcut + self.drop_path1(x)\n\n        x = self.cpe2(x.transpose(1, 2).view(B, C, H, W))\n\n        if self.mlp is not None:\n            x = x.flatten(2).transpose(1, 2)\n            x = x + self.drop_path2(self.mlp(self.norm2(x)))\n            x = x.transpose(1, 2).view(B, C, H, W)\n\n        return x\n\n\nclass DaVitStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            depth:int = 1,\n            downsample: bool = True,\n            attn_types: Tuple[str, ...] = ('spatial', 'channel'),\n            num_heads: int = 3,\n            window_size: int = 7,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_path_rates: Tuple[float, ...] = (0, 0),\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            norm_layer_cl: Type[nn.Module] = nn.LayerNorm,\n            ffn: bool = True,\n            cpe_act: bool = False,\n            down_kernel_size: int = 2,\n            named_blocks: bool = False,\n            channel_attn_v2: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.grad_checkpointing = False\n\n        # downsample embedding layer at the beginning of each stage\n        if downsample:\n            self.downsample = Downsample(in_chs, out_chs, kernel_size=down_kernel_size, norm_layer=norm_layer, **dd)\n        else:\n            self.downsample = nn.Identity()\n\n        '''\n         repeating alternating attention blocks in each stage\n         default: (spatial -> channel) x depth\n\n         potential opportunity to integrate with a more general version of ByobNet/ByoaNet\n         since the logic is similar\n        '''\n        stage_blocks = []\n        for block_idx in range(depth):\n            from collections import OrderedDict\n            dual_attention_block = []\n            for attn_idx, attn_type in enumerate(attn_types):\n                if attn_type == 'spatial':\n                    dual_attention_block.append(('spatial_block', SpatialBlock(\n                        dim=out_chs,\n                        num_heads=num_heads,\n                        mlp_ratio=mlp_ratio,\n                        qkv_bias=qkv_bias,\n                        drop_path=drop_path_rates[block_idx],\n                        norm_layer=norm_layer_cl,\n                        ffn=ffn,\n                        cpe_act=cpe_act,\n                        window_size=window_size,\n                        **dd,\n                    )))\n                elif attn_type == 'channel':\n                    dual_attention_block.append(('channel_block', ChannelBlock(\n                        dim=out_chs,\n                        num_heads=num_heads,\n                        mlp_ratio=mlp_ratio,\n                        qkv_bias=qkv_bias,\n                        drop_path=drop_path_rates[block_idx],\n                        norm_layer=norm_layer_cl,\n                        ffn=ffn,\n                        cpe_act=cpe_act,\n                        v2=channel_attn_v2,\n                        **dd,\n                    )))\n            if named_blocks:\n                stage_blocks.append(nn.Sequential(OrderedDict(dual_attention_block)))\n            else:\n                stage_blocks.append(nn.Sequential(*[b[1] for b in dual_attention_block]))\n        self.blocks = nn.Sequential(*stage_blocks)\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    def forward(self, x: Tensor):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass DaVit(nn.Module):\n    r\"\"\" DaViT\n        A PyTorch implementation of `DaViT: Dual Attention Vision Transformers`  - https://arxiv.org/abs/2204.03645\n        Supports arbitrary input sizes and pyramid feature extraction\n\n    Args:\n        in_chans (int): Number of input image channels. Default: 3\n        num_classes (int): Number of classes for classification head. Default: 1000\n        depths (tuple(int)): Number of blocks in each stage. Default: (1, 1, 3, 1)\n        embed_dims (tuple(int)): Patch embedding dimension. Default: (96, 192, 384, 768)\n        num_heads (tuple(int)): Number of attention heads in different layers. Default: (3, 6, 12, 24)\n        window_size (int): Window size. Default: 7\n        mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4\n        qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True\n        drop_path_rate (float): Stochastic depth rate. Default: 0.1\n        norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            depths: Tuple[int, ...] = (1, 1, 3, 1),\n            embed_dims: Tuple[int, ...] = (96, 192, 384, 768),\n            num_heads: Tuple[int, ...] = (3, 6, 12, 24),\n            window_size: int = 7,\n            mlp_ratio: float = 4,\n            qkv_bias: bool = True,\n            norm_layer: str = 'layernorm2d',\n            norm_layer_cl: str = 'layernorm',\n            norm_eps: float = 1e-5,\n            attn_types: Tuple[str, ...] = ('spatial', 'channel'),\n            ffn: bool = True,\n            cpe_act: bool = False,\n            down_kernel_size: int = 2,\n            channel_attn_v2: bool = False,\n            named_blocks: bool = False,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            head_norm_first: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        num_stages = len(embed_dims)\n        assert num_stages == len(num_heads) == len(depths)\n        norm_layer = partial(get_norm_layer(norm_layer), eps=norm_eps)\n        norm_layer_cl = partial(get_norm_layer(norm_layer_cl), eps=norm_eps)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n\n        self.stem = Stem(in_chans, embed_dims[0], norm_layer=norm_layer, **dd)\n        in_chs = embed_dims[0]\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        stages = []\n        for i in range(num_stages):\n            out_chs = embed_dims[i]\n            stage = DaVitStage(\n                in_chs,\n                out_chs,\n                depth=depths[i],\n                downsample=i > 0,\n                attn_types=attn_types,\n                num_heads=num_heads[i],\n                window_size=window_size,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                drop_path_rates=dpr[i],\n                norm_layer=norm_layer,\n                norm_layer_cl=norm_layer_cl,\n                ffn=ffn,\n                cpe_act=cpe_act,\n                down_kernel_size=down_kernel_size,\n                channel_attn_v2=channel_attn_v2,\n                named_blocks=named_blocks,\n                **dd,\n            )\n            in_chs = out_chs\n            stages.append(stage)\n            self.feature_info += [dict(num_chs=out_chs, reduction=2**(i+2), module=f'stages.{i}')]\n\n        self.stages = nn.Sequential(*stages)\n\n        # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets\n        # otherwise pool -> norm -> fc, the default DaViT order, similar to ConvNeXt\n        # FIXME generalize this structure to ClassifierHead\n        if head_norm_first:\n            self.norm_pre = norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n        else:\n            self.norm_pre = nn.Identity()\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm_pre', (99999,)),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n        for stage in self.stages:\n            stage.set_grad_checkpointing(enable=enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        last_idx = len(self.stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm_pre(x)  # applying final norm to last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm_pre(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm_pre = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.norm_pre(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _convert_florence2(state_dict, model, prefix='vision_tower.'):\n    import re\n    out_dict = {}\n\n    for k, v in state_dict.items():\n        if k.startswith(prefix):\n            k = k.replace(prefix, '')\n        else:\n            continue\n        k = re.sub(r'convs.([0-9]+)', r'stages.\\1.downsample', k)\n        k = re.sub(r'blocks.([0-9]+)', r'stages.\\1.blocks', k)\n        k = k.replace('downsample.proj', 'downsample.conv')\n        k = k.replace('stages.0.downsample', 'stem')\n        #k = k.replace('head.', 'head.fc.')\n        #k = k.replace('norms.', 'head.norm.')\n        k = k.replace('window_attn.norm.', 'norm1.')\n        k = k.replace('window_attn.fn.', 'attn.')\n        k = k.replace('channel_attn.norm.', 'norm1.')\n        k = k.replace('channel_attn.fn.', 'attn.')\n        k = k.replace('ffn.norm.', 'norm2.')\n        k = k.replace('ffn.fn.net.', 'mlp.')\n        k = k.replace('conv1.fn.dw', 'cpe1.proj')\n        k = k.replace('conv2.fn.dw', 'cpe2.proj')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap MSFT checkpoints -> timm \"\"\"\n    if 'head.fc.weight' in state_dict:\n        return state_dict  # non-MSFT checkpoint\n\n    if 'state_dict' in state_dict:\n        state_dict = state_dict['state_dict']\n\n    if 'vision_tower.convs.0.proj.weight' in state_dict:\n        return _convert_florence2(state_dict, model)\n\n    import re\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = re.sub(r'patch_embeds.([0-9]+)', r'stages.\\1.downsample', k)\n        k = re.sub(r'main_blocks.([0-9]+)', r'stages.\\1.blocks', k)\n        k = k.replace('downsample.proj', 'downsample.conv')\n        k = k.replace('stages.0.downsample', 'stem')\n        k = k.replace('head.', 'head.fc.')\n        k = k.replace('norms.', 'head.norm.')\n        k = k.replace('cpe.0', 'cpe1')\n        k = k.replace('cpe.1', 'cpe2')\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_davit(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    strict = kwargs.pop('pretrained_strict', True)\n    if variant.endswith('_fl'):\n        # FIXME cleaner approach to missing head norm?\n        strict = False\n\n    model = build_model_with_cfg(\n        DaVit,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        pretrained_strict=strict,\n        **kwargs)\n\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\n# TODO contact authors to get larger pretrained models\ndefault_cfgs = generate_default_cfgs({\n    # official microsoft weights from https://github.com/dingmyu/davit\n    'davit_tiny.msft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'davit_small.msft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'davit_base.msft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'davit_large': _cfg(),\n    'davit_huge': _cfg(),\n    'davit_giant': _cfg(),\n    'davit_base_fl.msft_florence2': _cfg(\n        hf_hub_id='microsoft/Florence-2-base',\n        num_classes=0, input_size=(3, 768, 768)),\n    'davit_huge_fl.msft_florence2': _cfg(\n        hf_hub_id='microsoft/Florence-2-large',\n        num_classes=0, input_size=(3, 768, 768)),\n})\n\n\n@register_model\ndef davit_tiny(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 3, 1), embed_dims=(96, 192, 384, 768), num_heads=(3, 6, 12, 24))\n    return _create_davit('davit_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_small(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 9, 1), embed_dims=(96, 192, 384, 768), num_heads=(3, 6, 12, 24))\n    return _create_davit('davit_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_base(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 9, 1), embed_dims=(128, 256, 512, 1024), num_heads=(4, 8, 16, 32))\n    return _create_davit('davit_base', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_large(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 9, 1), embed_dims=(192, 384, 768, 1536), num_heads=(6, 12, 24, 48))\n    return _create_davit('davit_large', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_huge(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 9, 1), embed_dims=(256, 512, 1024, 2048), num_heads=(8, 16, 32, 64))\n    return _create_davit('davit_huge', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_giant(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(depths=(1, 1, 12, 3), embed_dims=(384, 768, 1536, 3072), num_heads=(12, 24, 48, 96))\n    return _create_davit('davit_giant', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n\n@register_model\ndef davit_base_fl(pretrained=False, **kwargs) -> DaVit:\n    model_args = dict(\n        depths=(1, 1, 9, 1), embed_dims=(128, 256, 512, 1024), num_heads=(4, 8, 16, 32),\n        window_size=12, down_kernel_size=3, channel_attn_v2=True, named_blocks=True,\n    )\n    return _create_davit('davit_base_fl', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef davit_huge_fl(pretrained=False, **kwargs) -> DaVit:\n    # NOTE: huge image tower used in 'large' Florence2 model\n    model_args = dict(\n        depths=(1, 1, 9, 1), embed_dims=(256, 512, 1024, 2048), num_heads=(8, 16, 32, 64),\n        window_size=12, down_kernel_size=3, channel_attn_v2=True, named_blocks=True,\n    )\n    return _create_davit('davit_huge_fl', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/deit.py",
    "content": "\"\"\" DeiT - Data-efficient Image Transformers\n\nDeiT model defs and weights from https://github.com/facebookresearch/deit, original copyright below\n\npaper: `DeiT: Data-efficient Image Transformers` - https://arxiv.org/abs/2012.12877\n\npaper: `DeiT III: Revenge of the ViT` - https://arxiv.org/abs/2204.07118\n\nModifications copyright 2021, Ross Wightman\n\"\"\"\n# Copyright (c) 2015-present, Facebook, Inc.\n# All rights reserved.\nfrom functools import partial\nfrom typing import Optional, Type\n\nimport torch\nfrom torch import nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import resample_abs_pos_embed\nfrom timm.models.vision_transformer import VisionTransformer, trunc_normal_, checkpoint_filter_fn\nfrom ._builder import build_model_with_cfg\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['VisionTransformerDistilled']  # model_registry will add each entrypoint fn to this\n\n\nclass VisionTransformerDistilled(VisionTransformer):\n    \"\"\" Vision Transformer w/ Distillation Token and Head\n\n    Distillation token & head support for `DeiT: Data-efficient Image Transformers`\n        - https://arxiv.org/abs/2012.12877\n    \"\"\"\n\n    def __init__(self, *args, **kwargs):\n        weight_init = kwargs.pop('weight_init', '')\n        super().__init__(*args, **kwargs, weight_init='skip')\n        assert self.global_pool in ('token',)\n        dd = {'device': kwargs.get('device', None), 'dtype': kwargs.get('dtype', None)}\n\n        self.num_prefix_tokens = 2\n        self.dist_token = nn.Parameter(torch.empty(1, 1, self.embed_dim, **dd))\n        self.pos_embed = nn.Parameter(\n            torch.empty(1, self.patch_embed.num_patches + self.num_prefix_tokens, self.embed_dim, **dd))\n        self.head_dist = nn.Linear(self.embed_dim, self.num_classes, **dd) if self.num_classes > 0 else nn.Identity()\n        self.distilled_training = False  # must set this True to train w/ distillation token\n\n        self.weight_init_mode = 'reset' if weight_init == 'skip' else weight_init\n        # TODO: skip init when on meta device when safe to do so\n        if weight_init != 'skip':\n            self.init_weights(needs_reset=False)\n\n    def init_weights(self, mode='', needs_reset=True):\n        mode = mode or self.weight_init_mode\n        trunc_normal_(self.dist_token, std=.02)\n        super().init_weights(mode=mode, needs_reset=needs_reset)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed|dist_token',\n            blocks=[\n                (r'^blocks\\.(\\d+)', None),\n                (r'^norm', (99999,))]  # final norm w/ last block\n        )\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head, self.head_dist\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n        self.head_dist = nn.Linear(self.embed_dim, self.num_classes) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.distilled_training = enable\n\n    def _pos_embed(self, x):\n        if self.dynamic_img_size:\n            B, H, W, C = x.shape\n            prev_grid_size = self.patch_embed.grid_size\n            pos_embed = resample_abs_pos_embed(\n                self.pos_embed,\n                new_size=(H, W),\n                old_size=prev_grid_size,\n                num_prefix_tokens=0 if self.no_embed_class else self.num_prefix_tokens,\n            )\n            x = x.view(B, -1, C)\n        else:\n            pos_embed = self.pos_embed\n        if self.no_embed_class:\n            # deit-3, updated JAX (big vision)\n            # position embedding does not overlap with class token, add then concat\n            x = x + pos_embed\n            x = torch.cat((\n                self.cls_token.expand(x.shape[0], -1, -1),\n                self.dist_token.expand(x.shape[0], -1, -1),\n                x),\n                dim=1)\n        else:\n            # original timm, JAX, and deit vit impl\n            # pos_embed has entry for class token, concat then add\n            x = torch.cat((\n                self.cls_token.expand(x.shape[0], -1, -1),\n                self.dist_token.expand(x.shape[0], -1, -1),\n                x),\n                dim=1)\n            x = x + pos_embed\n        return self.pos_drop(x)\n\n    def forward_head(self, x, pre_logits: bool = False) -> torch.Tensor:\n        x, x_dist = x[:, 0], x[:, 1]\n        if pre_logits:\n            return (x + x_dist) / 2\n        x = self.head(x)\n        x_dist = self.head_dist(x_dist)\n        if self.distilled_training and self.training and not torch.jit.is_scripting():\n            # only return separate classification predictions when training in distilled mode\n            return x, x_dist\n        else:\n            # during standard train / finetune, inference average the classifier predictions\n            return (x + x_dist) / 2\n\n\ndef _create_deit(variant, pretrained=False, distilled=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    model_cls = VisionTransformerDistilled if distilled else VisionTransformer\n    model = build_model_with_cfg(\n        model_cls,\n        variant,\n        pretrained,\n        pretrained_filter_fn=partial(checkpoint_filter_fn, adapt_layer_scale=True),\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # deit models (FB weights)\n    'deit_tiny_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_tiny_patch16_224-a1311bcf.pth'),\n    'deit_small_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_small_patch16_224-cd65a155.pth'),\n    'deit_base_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_224-b5f2ef4d.pth'),\n    'deit_base_patch16_384.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_384-8de9b5d1.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n\n    'deit_tiny_distilled_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_tiny_distilled_patch16_224-b40b3cf7.pth',\n        classifier=('head', 'head_dist')),\n    'deit_small_distilled_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_small_distilled_patch16_224-649709d9.pth',\n        classifier=('head', 'head_dist')),\n    'deit_base_distilled_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_base_distilled_patch16_224-df68dfff.pth',\n        classifier=('head', 'head_dist')),\n    'deit_base_distilled_patch16_384.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_base_distilled_patch16_384-d0272ac0.pth',\n        input_size=(3, 384, 384), crop_pct=1.0,\n        classifier=('head', 'head_dist')),\n\n    'deit3_small_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_small_224_1k.pth'),\n    'deit3_small_patch16_384.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_small_384_1k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_medium_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_medium_224_1k.pth'),\n    'deit3_base_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_base_224_1k.pth'),\n    'deit3_base_patch16_384.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_base_384_1k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_large_patch16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_large_224_1k.pth'),\n    'deit3_large_patch16_384.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_large_384_1k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_huge_patch14_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_huge_224_1k.pth'),\n\n    'deit3_small_patch16_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_small_224_21k.pth',\n        crop_pct=1.0),\n    'deit3_small_patch16_384.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_small_384_21k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_medium_patch16_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_medium_224_21k.pth',\n        crop_pct=1.0),\n    'deit3_base_patch16_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_base_224_21k.pth',\n        crop_pct=1.0),\n    'deit3_base_patch16_384.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_base_384_21k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_large_patch16_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_large_224_21k.pth',\n        crop_pct=1.0),\n    'deit3_large_patch16_384.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_large_384_21k.pth',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'deit3_huge_patch14_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/deit_3_huge_224_21k_v1.pth',\n        crop_pct=1.0),\n})\n\n\n@register_model\ndef deit_tiny_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-tiny model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3)\n    model = _create_deit('deit_tiny_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_small_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-small model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6)\n    model = _create_deit('deit_small_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_base_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT base model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_deit('deit_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_base_patch16_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT base model @ 384x384 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_deit('deit_base_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_tiny_distilled_patch16_224(pretrained=False, **kwargs) -> VisionTransformerDistilled:\n    \"\"\" DeiT-tiny distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3)\n    model = _create_deit(\n        'deit_tiny_distilled_patch16_224', pretrained=pretrained, distilled=True, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_small_distilled_patch16_224(pretrained=False, **kwargs) -> VisionTransformerDistilled:\n    \"\"\" DeiT-small distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6)\n    model = _create_deit(\n        'deit_small_distilled_patch16_224', pretrained=pretrained, distilled=True, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_base_distilled_patch16_224(pretrained=False, **kwargs) -> VisionTransformerDistilled:\n    \"\"\" DeiT-base distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_deit(\n        'deit_base_distilled_patch16_224', pretrained=pretrained, distilled=True, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit_base_distilled_patch16_384(pretrained=False, **kwargs) -> VisionTransformerDistilled:\n    \"\"\" DeiT-base distilled model @ 384x384 from paper (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_deit(\n        'deit_base_distilled_patch16_384', pretrained=pretrained, distilled=True, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_small_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 small model @ 224x224 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_small_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_small_patch16_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 small model @ 384x384 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_small_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_medium_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 medium model @ 224x224 (https://arxiv.org/abs/2012.12877).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=512, depth=12, num_heads=8, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_medium_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_base_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 base model @ 224x224 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_base_patch16_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 base model @ 384x384 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_base_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_large_patch16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 large model @ 224x224 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_large_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_large_patch16_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 large model @ 384x384 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_large_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef deit3_huge_patch14_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" DeiT-3 base model @ 384x384 from paper (https://arxiv.org/abs/2204.07118).\n    ImageNet-1k weights from https://github.com/facebookresearch/deit.\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, no_embed_class=True, init_values=1e-6)\n    model = _create_deit('deit3_huge_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'deit3_small_patch16_224_in21ft1k': 'deit3_small_patch16_224.fb_in22k_ft_in1k',\n    'deit3_small_patch16_384_in21ft1k': 'deit3_small_patch16_384.fb_in22k_ft_in1k',\n    'deit3_medium_patch16_224_in21ft1k': 'deit3_medium_patch16_224.fb_in22k_ft_in1k',\n    'deit3_base_patch16_224_in21ft1k': 'deit3_base_patch16_224.fb_in22k_ft_in1k',\n    'deit3_base_patch16_384_in21ft1k': 'deit3_base_patch16_384.fb_in22k_ft_in1k',\n    'deit3_large_patch16_224_in21ft1k': 'deit3_large_patch16_224.fb_in22k_ft_in1k',\n    'deit3_large_patch16_384_in21ft1k': 'deit3_large_patch16_384.fb_in22k_ft_in1k',\n    'deit3_huge_patch14_224_in21ft1k': 'deit3_huge_patch14_224.fb_in22k_ft_in1k'\n})\n"
  },
  {
    "path": "timm/models/densenet.py",
    "content": "\"\"\"Pytorch Densenet implementation w/ tweaks\nThis file is a copy of https://github.com/pytorch/vision 'densenet.py' (BSD-3-Clause) with\nfixed kwargs passthrough and addition of dynamic global avg/max pool.\n\"\"\"\nimport re\nfrom collections import OrderedDict\nfrom typing import Any, Dict, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.jit.annotations import List\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import BatchNormAct2d, get_norm_act_layer, BlurPool2d, create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import MATCH_PREV_GROUP, checkpoint\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['DenseNet']\n\n\nclass DenseLayer(nn.Module):\n    \"\"\"Dense layer for DenseNet.\n\n    Implements the bottleneck layer with 1x1 and 3x3 convolutions.\n    \"\"\"\n\n    def __init__(\n            self,\n            num_input_features: int,\n            growth_rate: int,\n            bn_size: int,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            drop_rate: float = 0.,\n            grad_checkpointing: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize DenseLayer.\n\n        Args:\n            num_input_features: Number of input features.\n            growth_rate: Growth rate (k) of the layer.\n            bn_size: Bottleneck size multiplier.\n            norm_layer: Normalization layer class.\n            drop_rate: Dropout rate.\n            grad_checkpointing: Use gradient checkpointing.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('norm1', norm_layer(num_input_features, **dd)),\n        self.add_module('conv1', nn.Conv2d(\n            num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=False, **dd)),\n        self.add_module('norm2', norm_layer(bn_size * growth_rate, **dd)),\n        self.add_module('conv2', nn.Conv2d(\n            bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=False, **dd)),\n        self.drop_rate = float(drop_rate)\n        self.grad_checkpointing = grad_checkpointing\n\n    def bottleneck_fn(self, xs: List[torch.Tensor]) -> torch.Tensor:\n        \"\"\"Bottleneck function for concatenated features.\"\"\"\n        concated_features = torch.cat(xs, 1)\n        bottleneck_output = self.conv1(self.norm1(concated_features))  # noqa: T484\n        return bottleneck_output\n\n    # todo: rewrite when torchscript supports any\n    def any_requires_grad(self, x: List[torch.Tensor]) -> bool:\n        \"\"\"Check if any tensor in list requires gradient.\"\"\"\n        for tensor in x:\n            if tensor.requires_grad:\n                return True\n        return False\n\n    def call_checkpoint_bottleneck(self, x: List[torch.Tensor]) -> torch.Tensor:\n        \"\"\"Call bottleneck function with gradient checkpointing.\"\"\"\n        def closure(*xs):\n            return self.bottleneck_fn(xs)\n\n        return checkpoint(closure, *x)\n\n    # torchscript does not yet support *args, so we overload method\n    # allowing it to take either a List[Tensor] or single Tensor\n    def forward(self, x: Union[torch.Tensor, List[torch.Tensor]]) -> torch.Tensor:  # noqa: F811\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input features (single tensor or list of tensors).\n\n        Returns:\n            New features to be concatenated.\n        \"\"\"\n        if isinstance(x, torch.Tensor):\n            prev_features = [x]\n        else:\n            prev_features = x\n\n        if self.grad_checkpointing and self.any_requires_grad(prev_features):\n            if torch.jit.is_scripting():\n                raise Exception(\"Memory Efficient not supported in JIT\")\n            bottleneck_output = self.call_checkpoint_bottleneck(prev_features)\n        else:\n            bottleneck_output = self.bottleneck_fn(prev_features)\n\n        new_features = self.conv2(self.norm2(bottleneck_output))\n        if self.drop_rate > 0:\n            new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)\n        return new_features\n\n\nclass DenseBlock(nn.ModuleDict):\n    \"\"\"DenseNet Block.\n\n    Contains multiple dense layers with concatenated features.\n    \"\"\"\n    _version = 2\n\n    def __init__(\n            self,\n            num_layers: int,\n            num_input_features: int,\n            bn_size: int,\n            growth_rate: int,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            drop_rate: float = 0.,\n            grad_checkpointing: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize DenseBlock.\n\n        Args:\n            num_layers: Number of layers in the block.\n            num_input_features: Number of input features.\n            bn_size: Bottleneck size multiplier.\n            growth_rate: Growth rate (k) for each layer.\n            norm_layer: Normalization layer class.\n            drop_rate: Dropout rate.\n            grad_checkpointing: Use gradient checkpointing.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        for i in range(num_layers):\n            layer = DenseLayer(\n                num_input_features + i * growth_rate,\n                growth_rate=growth_rate,\n                bn_size=bn_size,\n                norm_layer=norm_layer,\n                drop_rate=drop_rate,\n                grad_checkpointing=grad_checkpointing,\n                **dd,\n            )\n            self.add_module('denselayer%d' % (i + 1), layer)\n\n    def forward(self, init_features: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through all layers in the block.\n\n        Args:\n            init_features: Initial features from previous layer.\n\n        Returns:\n            Concatenated features from all layers.\n        \"\"\"\n        features = [init_features]\n        for name, layer in self.items():\n            new_features = layer(features)\n            features.append(new_features)\n        return torch.cat(features, 1)\n\n\nclass DenseTransition(nn.Sequential):\n    \"\"\"Transition layer between DenseNet blocks.\n\n    Reduces feature dimensions and spatial resolution.\n    \"\"\"\n\n    def __init__(\n            self,\n            num_input_features: int,\n            num_output_features: int,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize DenseTransition.\n\n        Args:\n            num_input_features: Number of input features.\n            num_output_features: Number of output features.\n            norm_layer: Normalization layer class.\n            aa_layer: Anti-aliasing layer class.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('norm', norm_layer(num_input_features, **dd))\n        self.add_module('conv', nn.Conv2d(\n            num_input_features, num_output_features, kernel_size=1, stride=1, bias=False, **dd))\n        if aa_layer is not None:\n            self.add_module('pool', aa_layer(num_output_features, stride=2, **dd))\n        else:\n            self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))\n\n\nclass DenseNet(nn.Module):\n    \"\"\"Densenet-BC model class.\n\n    Based on `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`_\n\n    Args:\n        growth_rate: How many filters to add each layer (`k` in paper).\n        block_config: How many layers in each pooling block.\n        bn_size: Multiplicative factor for number of bottle neck layers\n          (i.e. bn_size * k features in the bottleneck layer).\n        drop_rate: Dropout rate before classifier layer.\n        proj_drop_rate: Dropout rate after each dense layer.\n        num_classes: Number of classification classes.\n        memory_efficient: If True, uses checkpointing. Much more memory efficient,\n          but slower. Default: *False*. See `\"paper\" <https://arxiv.org/pdf/1707.06990.pdf>`_.\n    \"\"\"\n\n    def __init__(\n            self,\n            growth_rate: int = 32,\n            block_config: Tuple[int, ...] = (6, 12, 24, 16),\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            bn_size: int = 4,\n            stem_type: str = '',\n            act_layer: str = 'relu',\n            norm_layer: str = 'batchnorm2d',\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            memory_efficient: bool = False,\n            aa_stem_only: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize DenseNet.\n\n        Args:\n            growth_rate: How many filters to add each layer (k in paper).\n            block_config: How many layers in each pooling block.\n            num_classes: Number of classification classes.\n            in_chans: Number of input channels.\n            global_pool: Global pooling type.\n            bn_size: Multiplicative factor for number of bottle neck layers.\n            stem_type: Type of stem ('', 'deep', 'deep_tiered').\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            aa_layer: Anti-aliasing layer.\n            drop_rate: Dropout rate before classifier layer.\n            proj_drop_rate: Dropout rate after each dense layer.\n            memory_efficient: If True, uses checkpointing for memory efficiency.\n            aa_stem_only: Apply anti-aliasing only to stem.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        super().__init__()\n        norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer)\n\n        # Stem\n        deep_stem = 'deep' in stem_type  # 3x3 deep stem\n        num_init_features = growth_rate * 2\n        if aa_layer is None:\n            stem_pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n        else:\n            stem_pool = nn.Sequential(*[\n                nn.MaxPool2d(kernel_size=3, stride=1, padding=1),\n                aa_layer(channels=num_init_features, stride=2, **dd)])\n        if deep_stem:\n            stem_chs_1 = stem_chs_2 = growth_rate\n            if 'tiered' in stem_type:\n                stem_chs_1 = 3 * (growth_rate // 4)\n                stem_chs_2 = num_init_features if 'narrow' in stem_type else 6 * (growth_rate // 4)\n            self.features = nn.Sequential(OrderedDict([\n                ('conv0', nn.Conv2d(in_chans, stem_chs_1, 3, stride=2, padding=1, bias=False, **dd)),\n                ('norm0', norm_layer(stem_chs_1, **dd)),\n                ('conv1', nn.Conv2d(stem_chs_1, stem_chs_2, 3, stride=1, padding=1, bias=False, **dd)),\n                ('norm1', norm_layer(stem_chs_2, **dd)),\n                ('conv2', nn.Conv2d(stem_chs_2, num_init_features, 3, stride=1, padding=1, bias=False, **dd)),\n                ('norm2', norm_layer(num_init_features, **dd)),\n                ('pool0', stem_pool),\n            ]))\n        else:\n            self.features = nn.Sequential(OrderedDict([\n                ('conv0', nn.Conv2d(in_chans, num_init_features, kernel_size=7, stride=2, padding=3, bias=False, **dd)),\n                ('norm0', norm_layer(num_init_features, **dd)),\n                ('pool0', stem_pool),\n            ]))\n        self.feature_info = [\n            dict(num_chs=num_init_features, reduction=2, module=f'features.norm{2 if deep_stem else 0}')]\n        current_stride = 4\n\n        # DenseBlocks\n        num_features = num_init_features\n        for i, num_layers in enumerate(block_config):\n            block = DenseBlock(\n                num_layers=num_layers,\n                num_input_features=num_features,\n                bn_size=bn_size,\n                growth_rate=growth_rate,\n                norm_layer=norm_layer,\n                drop_rate=proj_drop_rate,\n                grad_checkpointing=memory_efficient,\n                **dd,\n            )\n            module_name = f'denseblock{(i + 1)}'\n            self.features.add_module(module_name, block)\n            num_features = num_features + num_layers * growth_rate\n            transition_aa_layer = None if aa_stem_only else aa_layer\n            if i != len(block_config) - 1:\n                self.feature_info += [\n                    dict(num_chs=num_features, reduction=current_stride, module='features.' + module_name)]\n                current_stride *= 2\n                trans = DenseTransition(\n                    num_input_features=num_features,\n                    num_output_features=num_features // 2,\n                    norm_layer=norm_layer,\n                    aa_layer=transition_aa_layer,\n                    **dd,\n                )\n                self.features.add_module(f'transition{i + 1}', trans)\n                num_features = num_features // 2\n\n        # Final batch norm\n        self.features.add_module('norm5', norm_layer(num_features, **dd))\n\n        self.feature_info += [dict(num_chs=num_features, reduction=current_stride, module='features.norm5')]\n        self.num_features = self.head_hidden_size = num_features\n\n        # Linear layer\n        global_pool, classifier = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            **dd,\n        )\n        self.global_pool = global_pool\n        self.head_drop = nn.Dropout(drop_rate)\n        self.classifier = classifier\n\n        # Official init from torch repo.\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight)\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.constant_(m.weight, 1)\n                nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.Linear):\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        matcher = dict(\n            stem=r'^features\\.conv[012]|features\\.norm[012]|features\\.pool[012]',\n            blocks=r'^features\\.(?:denseblock|transition)(\\d+)' if coarse else [\n                (r'^features\\.denseblock(\\d+)\\.denselayer(\\d+)', None),\n                (r'^features\\.transition(\\d+)', MATCH_PREV_GROUP)  # FIXME combine with previous denselayer\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        for b in self.features.modules():\n            if isinstance(b, DenseLayer):\n                b.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg') -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        return self.features(x)\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _filter_torchvision_pretrained(state_dict: dict) -> Dict[str, torch.Tensor]:\n    \"\"\"Filter torchvision pretrained state dict for compatibility.\n\n    Args:\n        state_dict: State dictionary from torchvision checkpoint.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    pattern = re.compile(\n        r'^(.*denselayer\\d+\\.(?:norm|relu|conv))\\.((?:[12])\\.(?:weight|bias|running_mean|running_var))$')\n\n    for key in list(state_dict.keys()):\n        res = pattern.match(key)\n        if res:\n            new_key = res.group(1) + res.group(2)\n            state_dict[new_key] = state_dict[key]\n            del state_dict[key]\n    return state_dict\n\n\ndef _create_densenet(\n        variant: str,\n        growth_rate: int,\n        block_config: Tuple[int, ...],\n        pretrained: bool,\n        **kwargs,\n) -> DenseNet:\n    \"\"\"Create a DenseNet model.\n\n    Args:\n        variant: Model variant name.\n        growth_rate: Growth rate parameter.\n        block_config: Block configuration.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        DenseNet model instance.\n    \"\"\"\n    kwargs['growth_rate'] = growth_rate\n    kwargs['block_config'] = block_config\n    return build_model_with_cfg(\n        DenseNet,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True),\n        pretrained_filter_fn=_filter_torchvision_pretrained,\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration for DenseNet models.\"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'features.conv0', 'classifier': 'classifier', 'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'densenet121.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'densenetblur121d.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'densenet264d.untrained': _cfg(),\n    'densenet121.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'densenet169.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'densenet201.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'densenet161.tv_in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef densenet121(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-121 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=32, block_config=(6, 12, 24, 16))\n    model = _create_densenet('densenet121', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef densenetblur121d(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-121 w/ blur-pooling & 3-layer 3x3 stem\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=32, block_config=(6, 12, 24, 16), stem_type='deep', aa_layer=BlurPool2d)\n    model = _create_densenet('densenetblur121d', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef densenet169(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-169 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=32, block_config=(6, 12, 32, 32))\n    model = _create_densenet('densenet169', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef densenet201(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-201 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=32, block_config=(6, 12, 48, 32))\n    model = _create_densenet('densenet201', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef densenet161(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-161 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=48, block_config=(6, 12, 36, 24))\n    model = _create_densenet('densenet161', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef densenet264d(pretrained=False, **kwargs) -> DenseNet:\n    r\"\"\"Densenet-264 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model_args = dict(growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep')\n    model = _create_densenet('densenet264d', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'tv_densenet121': 'densenet121.tv_in1k',\n})\n"
  },
  {
    "path": "timm/models/dla.py",
    "content": "\"\"\" Deep Layer Aggregation and DLA w/ Res2Net\nDLA original adapted from Official Pytorch impl at: https://github.com/ucbdrive/dla\nDLA Paper: `Deep Layer Aggregation` - https://arxiv.org/abs/1707.06484\n\nRes2Net additions from: https://github.com/gasvn/Res2Net/\nRes2Net Paper: `Res2Net: A New Multi-scale Backbone Architecture` - https://arxiv.org/abs/1904.01169\n\"\"\"\nimport math\nfrom typing import List, Optional, Tuple, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['DLA']\n\n\nclass DlaBasic(nn.Module):\n    \"\"\"DLA Basic\"\"\"\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            dilation: int = 1,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = nn.Conv2d(\n            inplanes,\n            planes,\n            kernel_size=3,\n            stride=stride,\n            padding=dilation,\n            bias=False,\n            dilation=dilation,\n            **dd,\n        )\n        self.bn1 = nn.BatchNorm2d(planes, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.conv2 = nn.Conv2d(\n            planes,\n            planes,\n            kernel_size=3,\n            stride=1,\n            padding=dilation,\n            bias=False,\n            dilation=dilation,\n            **dd,\n        )\n        self.bn2 = nn.BatchNorm2d(planes, **dd)\n        self.stride = stride\n\n    def forward(self, x, shortcut: Optional[torch.Tensor] = None, children: Optional[List[torch.Tensor]] = None):\n        if shortcut is None:\n            shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n\n        out += shortcut\n        out = self.relu(out)\n\n        return out\n\n\nclass DlaBottleneck(nn.Module):\n    \"\"\"DLA/DLA-X Bottleneck\"\"\"\n    expansion = 2\n\n    def __init__(\n        self,\n        inplanes: int,\n        outplanes: int,\n        stride: int = 1,\n        dilation: int = 1,\n        cardinality: int = 1,\n        base_width: int = 64,\n        device=None,\n        dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = stride\n        mid_planes = int(math.floor(outplanes * (base_width / 64)) * cardinality)\n        mid_planes = mid_planes // self.expansion\n\n        self.conv1 = nn.Conv2d(inplanes, mid_planes, kernel_size=1, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(mid_planes, **dd)\n        self.conv2 = nn.Conv2d(\n            mid_planes,\n            mid_planes,\n            kernel_size=3,\n            stride=stride,\n            padding=dilation,\n            bias=False,\n            dilation=dilation,\n            groups=cardinality,\n            **dd,\n        )\n        self.bn2 = nn.BatchNorm2d(mid_planes, **dd)\n        self.conv3 = nn.Conv2d(mid_planes, outplanes, kernel_size=1, bias=False, **dd)\n        self.bn3 = nn.BatchNorm2d(outplanes, **dd)\n        self.relu = nn.ReLU(inplace=True)\n\n    def forward(self, x, shortcut: Optional[torch.Tensor] = None, children: Optional[List[torch.Tensor]] = None):\n        if shortcut is None:\n            shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.relu(out)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        out += shortcut\n        out = self.relu(out)\n\n        return out\n\n\nclass DlaBottle2neck(nn.Module):\n    \"\"\" Res2Net/Res2NeXT DLA Bottleneck\n    Adapted from https://github.com/gasvn/Res2Net/blob/master/dla.py\n    \"\"\"\n    expansion = 2\n\n    def __init__(\n            self,\n            inplanes: int,\n            outplanes: int,\n            stride: int = 1,\n            dilation: int = 1,\n            scale: int = 4,\n            cardinality: int = 8,\n            base_width: int = 4,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.is_first = stride > 1\n        self.scale = scale\n        mid_planes = int(math.floor(outplanes * (base_width / 64)) * cardinality)\n        mid_planes = mid_planes // self.expansion\n        self.width = mid_planes\n\n        self.conv1 = nn.Conv2d(inplanes, mid_planes * scale, kernel_size=1, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(mid_planes * scale, **dd)\n\n        num_scale_convs = max(1, scale - 1)\n        convs = []\n        bns = []\n        for _ in range(num_scale_convs):\n            convs.append(nn.Conv2d(\n                mid_planes,\n                mid_planes,\n                kernel_size=3,\n                stride=stride,\n                padding=dilation,\n                dilation=dilation,\n                groups=cardinality,\n                bias=False,\n                **dd,\n            ))\n            bns.append(nn.BatchNorm2d(mid_planes, **dd))\n        self.convs = nn.ModuleList(convs)\n        self.bns = nn.ModuleList(bns)\n        self.pool = nn.AvgPool2d(kernel_size=3, stride=stride, padding=1) if self.is_first else None\n\n        self.conv3 = nn.Conv2d(mid_planes * scale, outplanes, kernel_size=1, bias=False, **dd)\n        self.bn3 = nn.BatchNorm2d(outplanes, **dd)\n        self.relu = nn.ReLU(inplace=True)\n\n    def forward(self, x, shortcut: Optional[torch.Tensor] = None, children: Optional[List[torch.Tensor]] = None):\n        if shortcut is None:\n            shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        spx = torch.split(out, self.width, 1)\n        spo = []\n        sp = spx[0]  # redundant, for torchscript\n        for i, (conv, bn) in enumerate(zip(self.convs, self.bns)):\n            if i == 0 or self.is_first:\n                sp = spx[i]\n            else:\n                sp = sp + spx[i]\n            sp = conv(sp)\n            sp = bn(sp)\n            sp = self.relu(sp)\n            spo.append(sp)\n        if self.scale > 1:\n            if self.pool is not None:  # self.is_first == True, None check for torchscript\n                spo.append(self.pool(spx[-1]))\n            else:\n                spo.append(spx[-1])\n        out = torch.cat(spo, 1)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        out += shortcut\n        out = self.relu(out)\n\n        return out\n\n\nclass DlaRoot(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            shortcut: bool,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(\n            in_channels,\n            out_channels,\n            1,\n            stride=1,\n            bias=False,\n            padding=(kernel_size - 1) // 2,\n            **dd,\n        )\n        self.bn = nn.BatchNorm2d(out_channels, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.shortcut = shortcut\n\n    def forward(self, x_children: List[torch.Tensor]):\n        x = self.conv(torch.cat(x_children, 1))\n        x = self.bn(x)\n        if self.shortcut:\n            x += x_children[0]\n        x = self.relu(x)\n\n        return x\n\n\nclass DlaTree(nn.Module):\n    def __init__(\n            self,\n            levels: int,\n            block: Type[nn.Module],\n            in_channels: int,\n            out_channels: int,\n            stride: int = 1,\n            dilation: int = 1,\n            cardinality: int = 1,\n            base_width: int = 64,\n            level_root: bool = False,\n            root_dim: int = 0,\n            root_kernel_size: int = 1,\n            root_shortcut: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if root_dim == 0:\n            root_dim = 2 * out_channels\n        if level_root:\n            root_dim += in_channels\n        self.downsample = nn.MaxPool2d(stride, stride=stride) if stride > 1 else nn.Identity()\n        self.project = nn.Identity()\n        cargs = dict(dilation=dilation, cardinality=cardinality, base_width=base_width, **dd)\n        if levels == 1:\n            self.tree1 = block(in_channels, out_channels, stride, **cargs)\n            self.tree2 = block(out_channels, out_channels, 1, **cargs)\n            if in_channels != out_channels:\n                # NOTE the official impl/weights have  project layers in levels > 1 case that are never\n                # used, I've moved the project layer here to avoid wasted params but old checkpoints will\n                # need strict=False while loading.\n                self.project = nn.Sequential(\n                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, bias=False, **dd),\n                    nn.BatchNorm2d(out_channels, **dd))\n            self.root = DlaRoot(root_dim, out_channels, root_kernel_size, root_shortcut, **dd)\n        else:\n            cargs.update(dict(root_kernel_size=root_kernel_size, root_shortcut=root_shortcut))\n            self.tree1 = DlaTree(\n                levels - 1,\n                block,\n                in_channels,\n                out_channels,\n                stride,\n                root_dim=0,\n                **cargs,\n            )\n            self.tree2 = DlaTree(\n                levels - 1,\n                block,\n                out_channels,\n                out_channels,\n                root_dim=root_dim + out_channels,\n                **cargs,\n            )\n            self.root = None\n        self.level_root = level_root\n        self.root_dim = root_dim\n        self.levels = levels\n\n    def forward(self, x, shortcut: Optional[torch.Tensor] = None, children: Optional[List[torch.Tensor]] = None):\n        if children is None:\n            children = []\n        bottom = self.downsample(x)\n        shortcut = self.project(bottom)\n        if self.level_root:\n            children.append(bottom)\n        x1 = self.tree1(x, shortcut)\n        if self.root is not None:  # levels == 1\n            x2 = self.tree2(x1)\n            x = self.root([x2, x1] + children)\n        else:\n            children.append(x1)\n            x = self.tree2(x1, None, children)\n        return x\n\n\nclass DLA(nn.Module):\n    def __init__(\n            self,\n            levels: Tuple[int, ...],\n            channels: Tuple[int, ...],\n            output_stride: int = 32,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            cardinality: int = 1,\n            base_width: int = 64,\n            block: Type[nn.Module] = DlaBottle2neck,\n            shortcut_root: bool = False,\n            drop_rate: float = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.channels = channels\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.cardinality = cardinality\n        self.base_width = base_width\n        assert output_stride == 32  # FIXME support dilation\n\n        self.base_layer = nn.Sequential(\n            nn.Conv2d(in_chans, channels[0], kernel_size=7, stride=1, padding=3, bias=False, **dd),\n            nn.BatchNorm2d(channels[0], **dd),\n            nn.ReLU(inplace=True),\n        )\n        self.level0 = self._make_conv_level(channels[0], channels[0], levels[0], **dd)\n        self.level1 = self._make_conv_level(channels[0], channels[1], levels[1], stride=2, **dd)\n        cargs = dict(cardinality=cardinality, base_width=base_width, root_shortcut=shortcut_root, **dd)\n        self.level2 = DlaTree(levels[2], block, channels[1], channels[2], 2, level_root=False, **cargs)\n        self.level3 = DlaTree(levels[3], block, channels[2], channels[3], 2, level_root=True, **cargs)\n        self.level4 = DlaTree(levels[4], block, channels[3], channels[4], 2, level_root=True, **cargs)\n        self.level5 = DlaTree(levels[5], block, channels[4], channels[5], 2, level_root=True, **cargs)\n        self.feature_info = [\n            dict(num_chs=channels[0], reduction=1, module='level0'),  # rare to have a meaningful stride 1 level\n            dict(num_chs=channels[1], reduction=2, module='level1'),\n            dict(num_chs=channels[2], reduction=4, module='level2'),\n            dict(num_chs=channels[3], reduction=8, module='level3'),\n            dict(num_chs=channels[4], reduction=16, module='level4'),\n            dict(num_chs=channels[5], reduction=32, module='level5'),\n        ]\n\n        self.num_features = self.head_hidden_size = channels[-1]\n        self.global_pool, self.head_drop, self.fc = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            use_conv=True,\n            drop_rate=drop_rate,\n            **dd,\n        )\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()\n\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n                m.weight.data.normal_(0, math.sqrt(2. / n))\n            elif isinstance(m, nn.BatchNorm2d):\n                m.weight.data.fill_(1)\n                m.bias.data.zero_()\n\n    def _make_conv_level(self, inplanes: int, planes: int, convs: int, stride: int = 1, dilation: int = 1, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        modules = []\n        for i in range(convs):\n            modules.extend([\n                nn.Conv2d(\n                    inplanes,\n                    planes,\n                    kernel_size=3,\n                    stride=stride if i == 0 else 1,\n                    padding=dilation,\n                    bias=False,\n                    dilation=dilation,\n                    **dd,\n                ),\n                nn.BatchNorm2d(planes, **dd),\n                nn.ReLU(inplace=True)])\n            inplanes = planes\n        return nn.Sequential(*modules)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^base_layer',\n            blocks=r'^level(\\d+)' if coarse else [\n                # an unusual arch, this achieves somewhat more granularity without getting super messy\n                (r'^level(\\d+)\\.tree(\\d+)', None),\n                (r'^level(\\d+)\\.root', (2,)),\n                (r'^level(\\d+)', (1,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.fc = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool, use_conv=True)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()\n\n    def forward_features(self, x):\n        x = self.base_layer(x)\n        x = self.level0(x)\n        x = self.level1(x)\n        x = self.level2(x)\n        x = self.level3(x)\n        x = self.level4(x)\n        x = self.level5(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        if pre_logits:\n            return self.flatten(x)\n        x = self.fc(x)\n        return self.flatten(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_dla(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        DLA,\n        variant,\n        pretrained,\n        pretrained_strict=False,\n        feature_cfg=dict(out_indices=(1, 2, 3, 4, 5)),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'base_layer.0', 'classifier': 'fc', 'license': 'bsd-3-clause',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'dla34.in1k': _cfg(hf_hub_id='timm/'),\n    'dla46_c.in1k': _cfg(hf_hub_id='timm/'),\n    'dla46x_c.in1k': _cfg(hf_hub_id='timm/'),\n    'dla60x_c.in1k': _cfg(hf_hub_id='timm/'),\n    'dla60.in1k': _cfg(hf_hub_id='timm/'),\n    'dla60x.in1k': _cfg(hf_hub_id='timm/'),\n    'dla102.in1k': _cfg(hf_hub_id='timm/'),\n    'dla102x.in1k': _cfg(hf_hub_id='timm/'),\n    'dla102x2.in1k': _cfg(hf_hub_id='timm/'),\n    'dla169.in1k': _cfg(hf_hub_id='timm/'),\n    'dla60_res2net.in1k': _cfg(hf_hub_id='timm/', license='unknown'),\n    'dla60_res2next.in1k': _cfg(hf_hub_id='timm/', license='unknown'),\n})\n\n\n@register_model\ndef dla60_res2net(pretrained=False, **kwargs) -> DLA:\n    model_args = dict(\n        levels=(1, 1, 1, 2, 3, 1), channels=(16, 32, 128, 256, 512, 1024),\n        block=DlaBottle2neck, cardinality=1, base_width=28)\n    return _create_dla('dla60_res2net', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla60_res2next(pretrained=False,**kwargs):\n    model_args = dict(\n        levels=(1, 1, 1, 2, 3, 1), channels=(16, 32, 128, 256, 512, 1024),\n        block=DlaBottle2neck, cardinality=8, base_width=4)\n    return _create_dla('dla60_res2next', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla34(pretrained=False, **kwargs) -> DLA:  # DLA-34\n    model_args = dict(\n        levels=[1, 1, 1, 2, 2, 1], channels=[16, 32, 64, 128, 256, 512], block=DlaBasic)\n    return _create_dla('dla34', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla46_c(pretrained=False, **kwargs) -> DLA:  # DLA-46-C\n    model_args = dict(\n        levels=[1, 1, 1, 2, 2, 1], channels=[16, 32, 64, 64, 128, 256], block=DlaBottleneck)\n    return _create_dla('dla46_c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla46x_c(pretrained=False, **kwargs) -> DLA:  # DLA-X-46-C\n    model_args = dict(\n        levels=[1, 1, 1, 2, 2, 1], channels=[16, 32, 64, 64, 128, 256],\n        block=DlaBottleneck, cardinality=32, base_width=4)\n    return _create_dla('dla46x_c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla60x_c(pretrained=False, **kwargs) -> DLA:  # DLA-X-60-C\n    model_args = dict(\n        levels=[1, 1, 1, 2, 3, 1], channels=[16, 32, 64, 64, 128, 256],\n        block=DlaBottleneck, cardinality=32, base_width=4)\n    return _create_dla('dla60x_c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla60(pretrained=False, **kwargs) -> DLA:  # DLA-60\n    model_args = dict(\n        levels=[1, 1, 1, 2, 3, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck)\n    return _create_dla('dla60', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla60x(pretrained=False, **kwargs) -> DLA:  # DLA-X-60\n    model_args = dict(\n        levels=[1, 1, 1, 2, 3, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck, cardinality=32, base_width=4)\n    return _create_dla('dla60x', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla102(pretrained=False, **kwargs) -> DLA:  # DLA-102\n    model_args = dict(\n        levels=[1, 1, 1, 3, 4, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck, shortcut_root=True)\n    return _create_dla('dla102', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla102x(pretrained=False, **kwargs) -> DLA:  # DLA-X-102\n    model_args = dict(\n        levels=[1, 1, 1, 3, 4, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck, cardinality=32, base_width=4, shortcut_root=True)\n    return _create_dla('dla102x', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla102x2(pretrained=False, **kwargs) -> DLA:  # DLA-X-102 64\n    model_args = dict(\n        levels=[1, 1, 1, 3, 4, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck, cardinality=64, base_width=4, shortcut_root=True)\n    return _create_dla('dla102x2', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dla169(pretrained=False, **kwargs) -> DLA:  # DLA-169\n    model_args = dict(\n        levels=[1, 1, 2, 3, 5, 1], channels=[16, 32, 128, 256, 512, 1024],\n        block=DlaBottleneck, shortcut_root=True)\n    return _create_dla('dla169', pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/dpn.py",
    "content": "\"\"\" PyTorch implementation of DualPathNetworks\nBased on original MXNet implementation https://github.com/cypw/DPNs with\nmany ideas from another PyTorch implementation https://github.com/oyam/pytorch-DPNs.\n\nThis implementation is compatible with the pretrained weights from cypw's MXNet implementation.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Tuple, Type, Optional\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DPN_MEAN, IMAGENET_DPN_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier, get_norm_act_layer\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['DPN']\n\n\nclass CatBnAct(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.bn = norm_layer(in_chs, eps=0.001, **dd)\n\n    def forward(self, x):\n        if isinstance(x, tuple):\n            x = torch.cat(x, dim=1)\n        return self.bn(x)\n\n\nclass BnActConv2d(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            stride: int,\n            groups: int = 1,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.bn = norm_layer(in_chs, eps=0.001, **dd)\n        self.conv = create_conv2d(in_chs, out_chs, kernel_size, stride=stride, groups=groups, **dd)\n\n    def forward(self, x):\n        return self.conv(self.bn(x))\n\n\nclass DualPathBlock(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            num_1x1_a: int,\n            num_3x3_b: int,\n            num_1x1_c: int,\n            inc: int,\n            groups: int,\n            block_type: str = 'normal',\n            b: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_1x1_c = num_1x1_c\n        self.inc = inc\n        self.b = b\n        if block_type == 'proj':\n            self.key_stride = 1\n            self.has_proj = True\n        elif block_type == 'down':\n            self.key_stride = 2\n            self.has_proj = True\n        else:\n            assert block_type == 'normal'\n            self.key_stride = 1\n            self.has_proj = False\n\n        self.c1x1_w_s1 = None\n        self.c1x1_w_s2 = None\n        if self.has_proj:\n            # Using different member names here to allow easier parameter key matching for conversion\n            if self.key_stride == 2:\n                self.c1x1_w_s2 = BnActConv2d(\n                    in_chs=in_chs, out_chs=num_1x1_c + 2 * inc, kernel_size=1, stride=2, **dd)\n            else:\n                self.c1x1_w_s1 = BnActConv2d(\n                    in_chs=in_chs, out_chs=num_1x1_c + 2 * inc, kernel_size=1, stride=1, **dd)\n\n        self.c1x1_a = BnActConv2d(in_chs=in_chs, out_chs=num_1x1_a, kernel_size=1, stride=1, **dd)\n        self.c3x3_b = BnActConv2d(\n            in_chs=num_1x1_a, out_chs=num_3x3_b, kernel_size=3, stride=self.key_stride, groups=groups, **dd)\n        if b:\n            self.c1x1_c = CatBnAct(in_chs=num_3x3_b, **dd)\n            self.c1x1_c1 = create_conv2d(num_3x3_b, num_1x1_c, kernel_size=1, **dd)\n            self.c1x1_c2 = create_conv2d(num_3x3_b, inc, kernel_size=1, **dd)\n        else:\n            self.c1x1_c = BnActConv2d(in_chs=num_3x3_b, out_chs=num_1x1_c + inc, kernel_size=1, stride=1, **dd)\n            self.c1x1_c1 = None\n            self.c1x1_c2 = None\n\n    def forward(self, x) -> Tuple[torch.Tensor, torch.Tensor]:\n        if isinstance(x, tuple):\n            x_in = torch.cat(x, dim=1)\n        else:\n            x_in = x\n        if self.c1x1_w_s1 is None and self.c1x1_w_s2 is None:\n            # self.has_proj == False, torchscript requires condition on module == None\n            x_s1 = x[0]\n            x_s2 = x[1]\n        else:\n            # self.has_proj == True\n            if self.c1x1_w_s1 is not None:\n                # self.key_stride = 1\n                x_s = self.c1x1_w_s1(x_in)\n            else:\n                # self.key_stride = 2\n                x_s = self.c1x1_w_s2(x_in)\n            x_s1 = x_s[:, :self.num_1x1_c, :, :]\n            x_s2 = x_s[:, self.num_1x1_c:, :, :]\n        x_in = self.c1x1_a(x_in)\n        x_in = self.c3x3_b(x_in)\n        x_in = self.c1x1_c(x_in)\n        if self.c1x1_c1 is not None:\n            # self.b == True, using None check for torchscript compat\n            out1 = self.c1x1_c1(x_in)\n            out2 = self.c1x1_c2(x_in)\n        else:\n            out1 = x_in[:, :self.num_1x1_c, :, :]\n            out2 = x_in[:, self.num_1x1_c:, :, :]\n        resid = x_s1 + out1\n        dense = torch.cat([x_s2, out2], dim=1)\n        return resid, dense\n\n\nclass DPN(nn.Module):\n    def __init__(\n            self,\n            k_sec: Tuple[int, ...] = (3, 4, 20, 3),\n            inc_sec: Tuple[int, ...] = (16, 32, 24, 128),\n            k_r: int = 96,\n            groups: int = 32,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            small: bool = False,\n            num_init_features: int = 64,\n            b: bool = False,\n            drop_rate: float = 0.,\n            norm_layer: str = 'batchnorm2d',\n            act_layer: str = 'relu',\n            fc_act_layer: str = 'elu',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.b = b\n        assert output_stride == 32  # FIXME look into dilation support\n\n        norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=act_layer), eps=.001)\n        fc_norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=fc_act_layer), eps=.001, inplace=False)\n        bw_factor = 1 if small else 4\n        blocks = OrderedDict()\n\n        # conv1\n        blocks['conv1_1'] = ConvNormAct(\n            in_chans,\n            num_init_features,\n            kernel_size=3 if small else 7,\n            stride=2,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        blocks['conv1_pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n        self.feature_info = [dict(num_chs=num_init_features, reduction=2, module='features.conv1_1')]\n\n        # conv2\n        bw = 64 * bw_factor\n        inc = inc_sec[0]\n        r = (k_r * bw) // (64 * bw_factor)\n        blocks['conv2_1'] = DualPathBlock(num_init_features, r, r, bw, inc, groups, 'proj', b, **dd)\n        in_chs = bw + 3 * inc\n        for i in range(2, k_sec[0] + 1):\n            blocks['conv2_' + str(i)] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'normal', b, **dd)\n            in_chs += inc\n        self.feature_info += [dict(num_chs=in_chs, reduction=4, module=f'features.conv2_{k_sec[0]}')]\n\n        # conv3\n        bw = 128 * bw_factor\n        inc = inc_sec[1]\n        r = (k_r * bw) // (64 * bw_factor)\n        blocks['conv3_1'] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'down', b, **dd)\n        in_chs = bw + 3 * inc\n        for i in range(2, k_sec[1] + 1):\n            blocks['conv3_' + str(i)] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'normal', b, **dd)\n            in_chs += inc\n        self.feature_info += [dict(num_chs=in_chs, reduction=8, module=f'features.conv3_{k_sec[1]}')]\n\n        # conv4\n        bw = 256 * bw_factor\n        inc = inc_sec[2]\n        r = (k_r * bw) // (64 * bw_factor)\n        blocks['conv4_1'] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'down', b, **dd)\n        in_chs = bw + 3 * inc\n        for i in range(2, k_sec[2] + 1):\n            blocks['conv4_' + str(i)] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'normal', b, **dd)\n            in_chs += inc\n        self.feature_info += [dict(num_chs=in_chs, reduction=16, module=f'features.conv4_{k_sec[2]}')]\n\n        # conv5\n        bw = 512 * bw_factor\n        inc = inc_sec[3]\n        r = (k_r * bw) // (64 * bw_factor)\n        blocks['conv5_1'] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'down', b, **dd)\n        in_chs = bw + 3 * inc\n        for i in range(2, k_sec[3] + 1):\n            blocks['conv5_' + str(i)] = DualPathBlock(in_chs, r, r, bw, inc, groups, 'normal', b, **dd)\n            in_chs += inc\n        self.feature_info += [dict(num_chs=in_chs, reduction=32, module=f'features.conv5_{k_sec[3]}')]\n\n        blocks['conv5_bn_ac'] = CatBnAct(in_chs, norm_layer=fc_norm_layer, **dd)\n\n        self.num_features = self.head_hidden_size = in_chs\n        self.features = nn.Sequential(blocks)\n\n        # Using 1x1 conv for the FC layer to allow the extra pooling scheme\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            use_conv=True,\n            **dd,\n        )\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^features\\.conv1',\n            blocks=[\n                (r'^features\\.conv(\\d+)' if coarse else r'^features\\.conv(\\d+)_(\\d+)', None),\n                (r'^features\\.conv5_bn_ac', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool, use_conv=True)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()\n\n    def forward_features(self, x):\n        return self.features(x)\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        if pre_logits:\n            return self.flatten(x)\n        x = self.classifier(x)\n        return self.flatten(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_dpn(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        DPN,\n        variant,\n        pretrained,\n        feature_cfg=dict(feature_concat=True, flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DPN_MEAN, 'std': IMAGENET_DPN_STD,\n        'first_conv': 'features.conv1_1.conv', 'classifier': 'classifier', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'dpn48b.untrained': _cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'dpn68.mx_in1k': _cfg(hf_hub_id='timm/'),\n    'dpn68b.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'dpn68b.mx_in1k': _cfg(hf_hub_id='timm/'),\n    'dpn92.mx_in1k': _cfg(hf_hub_id='timm/'),\n    'dpn98.mx_in1k': _cfg(hf_hub_id='timm/'),\n    'dpn131.mx_in1k': _cfg(hf_hub_id='timm/'),\n    'dpn107.mx_in1k': _cfg(hf_hub_id='timm/')\n})\n\n\n@register_model\ndef dpn48b(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        small=True, num_init_features=10, k_r=128, groups=32,\n        b=True, k_sec=(3, 4, 6, 3), inc_sec=(16, 32, 32, 64), act_layer='silu')\n    return _create_dpn('dpn48b', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn68(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        small=True, num_init_features=10, k_r=128, groups=32,\n        k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64))\n    return _create_dpn('dpn68', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn68b(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        small=True, num_init_features=10, k_r=128, groups=32,\n        b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64))\n    return _create_dpn('dpn68b', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn92(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        num_init_features=64, k_r=96, groups=32,\n        k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128))\n    return _create_dpn('dpn92', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn98(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        num_init_features=96, k_r=160, groups=40,\n        k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128))\n    return _create_dpn('dpn98', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn131(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        num_init_features=128, k_r=160, groups=40,\n        k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128))\n    return _create_dpn('dpn131', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef dpn107(pretrained=False, **kwargs) -> DPN:\n    model_args = dict(\n        num_init_features=128, k_r=200, groups=50,\n        k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128))\n    return _create_dpn('dpn107', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/edgenext.py",
    "content": "\"\"\" EdgeNeXt\n\nPaper: `EdgeNeXt: Efficiently Amalgamated CNN-Transformer Architecture for Mobile Vision Applications`\n - https://arxiv.org/abs/2206.10589\n\nOriginal code and weights from https://github.com/mmaaz60/EdgeNeXt\n\nModifications and additions for timm by / Copyright 2022, Ross Wightman\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    DropPath,\n    calculate_drop_path_rates,\n    LayerNorm2d,\n    Mlp,\n    create_conv2d,\n    NormMlpClassifierHead,\n    ClassifierHead,\n    trunc_normal_tf_,\n)\n\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import named_apply, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['EdgeNeXt']  # model_registry will add each entrypoint fn to this\n\n\n@register_notrace_module  # reason: FX can't symbolically trace torch.arange in forward method\nclass PositionalEncodingFourier(nn.Module):\n    def __init__(\n            self,\n            hidden_dim: int = 32,\n            dim: int = 768,\n            temperature: float = 10000.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.token_projection = nn.Conv2d(hidden_dim * 2, dim, kernel_size=1, **dd)\n        self.scale = 2 * math.pi\n        self.temperature = temperature\n        self.hidden_dim = hidden_dim\n        self.dim = dim\n\n    def forward(self, shape: Tuple[int, int, int]):\n        device = self.token_projection.weight.device\n        dtype = self.token_projection.weight.dtype\n        inv_mask = ~torch.zeros(shape).to(device=device, dtype=torch.bool)\n        y_embed = inv_mask.cumsum(1, dtype=torch.float32)\n        x_embed = inv_mask.cumsum(2, dtype=torch.float32)\n        eps = 1e-6\n        y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale\n        x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale\n\n        dim_t = torch.arange(self.hidden_dim, dtype=torch.int64, device=device).to(torch.float32)\n        dim_t = self.temperature ** (2 * torch.div(dim_t, 2, rounding_mode='floor') / self.hidden_dim)\n\n        pos_x = x_embed[:, :, :, None] / dim_t\n        pos_y = y_embed[:, :, :, None] / dim_t\n        pos_x = torch.stack(\n            (pos_x[:, :, :, 0::2].sin(),\n             pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)\n        pos_y = torch.stack(\n            (pos_y[:, :, :, 0::2].sin(),\n             pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)\n        pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)\n        pos = self.token_projection(pos.to(dtype))\n\n        return pos\n\n\nclass ConvBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            kernel_size: int = 7,\n            stride: int = 1,\n            conv_bias: bool = True,\n            expand_ratio: float = 4,\n            ls_init_value: float = 1e-6,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        self.shortcut_after_dw = stride > 1 or dim != dim_out\n\n        self.conv_dw = create_conv2d(\n            dim,\n            dim_out,\n            kernel_size=kernel_size,\n            stride=stride,\n            depthwise=True,\n            bias=conv_bias,\n            **dd,\n        )\n        self.norm = norm_layer(dim_out, **dd)\n        self.mlp = Mlp(\n            dim_out,\n            int(expand_ratio * dim_out),\n            act_layer=act_layer,\n            **dd,\n        )\n        self.gamma = nn.Parameter(ls_init_value * torch.ones(dim_out, **dd)) if ls_init_value > 0 else None\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv_dw(x)\n        if self.shortcut_after_dw:\n            shortcut = x\n\n        x = x.permute(0, 2, 3, 1)  # (N, C, H, W) -> (N, H, W, C)\n        x = self.norm(x)\n        x = self.mlp(x)\n        if self.gamma is not None:\n            x = self.gamma * x\n        x = x.permute(0, 3, 1, 2)  # (N, H, W, C) -> (N, C, H, W)\n\n        x = shortcut + self.drop_path(x)\n        return x\n\n\nclass CrossCovarianceAttn(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.temperature = nn.Parameter(torch.ones(num_heads, 1, 1, **dd))\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 4, 1)\n        q, k, v = qkv.unbind(0)\n\n        # NOTE, this is NOT spatial attn, q, k, v are B, num_heads, C, L -->  C x C attn map\n        attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) * self.temperature\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n        x = (attn @ v)\n\n        x = x.permute(0, 3, 1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'temperature'}\n\n\nclass SplitTransposeBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_scales: int = 1,\n            num_heads: int = 8,\n            expand_ratio: float = 4,\n            use_pos_emb: bool = True,\n            conv_bias: bool = True,\n            qkv_bias: bool = True,\n            ls_init_value: float = 1e-6,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_path: float = 0.,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        width = max(int(math.ceil(dim / num_scales)), int(math.floor(dim // num_scales)))\n        self.width = width\n        self.num_scales = max(1, num_scales - 1)\n\n        convs = []\n        for i in range(self.num_scales):\n            convs.append(create_conv2d(width, width, kernel_size=3, depthwise=True, bias=conv_bias, **dd))\n        self.convs = nn.ModuleList(convs)\n\n        self.pos_embd = None\n        if use_pos_emb:\n            self.pos_embd = PositionalEncodingFourier(dim=dim, **dd)\n        self.norm_xca = norm_layer(dim, **dd)\n        self.gamma_xca = nn.Parameter(ls_init_value * torch.ones(dim, **dd)) if ls_init_value > 0 else None\n        self.xca = CrossCovarianceAttn(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n\n        self.norm = norm_layer(dim, eps=1e-6, **dd)\n        self.mlp = Mlp(\n            dim,\n            int(expand_ratio * dim),\n            act_layer=act_layer,\n            **dd,\n        )\n        self.gamma = nn.Parameter(ls_init_value * torch.ones(dim, **dd)) if ls_init_value > 0 else None\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x\n\n        # scales code re-written for torchscript as per my res2net fixes -rw\n        # NOTE torch.split(x, self.width, 1) causing issues with ONNX export\n        spx = x.chunk(len(self.convs) + 1, dim=1)\n        spo = []\n        sp = spx[0]\n        for i, conv in enumerate(self.convs):\n            if i > 0:\n                sp = sp + spx[i]\n            sp = conv(sp)\n            spo.append(sp)\n        spo.append(spx[-1])\n        x = torch.cat(spo, 1)\n\n        # XCA\n        B, C, H, W = x.shape\n        x = x.reshape(B, C, H * W).permute(0, 2, 1)\n        if self.pos_embd is not None:\n            pos_encoding = self.pos_embd((B, H, W)).reshape(B, -1, x.shape[1]).permute(0, 2, 1)\n            x = x + pos_encoding\n        x = x + self.drop_path(self.gamma_xca * self.xca(self.norm_xca(x)))\n        x = x.reshape(B, H, W, C)\n\n        # Inverted Bottleneck\n        x = self.norm(x)\n        x = self.mlp(x)\n        if self.gamma is not None:\n            x = self.gamma * x\n        x = x.permute(0, 3, 1, 2)  # (N, H, W, C) -> (N, C, H, W)\n\n        x = shortcut + self.drop_path(x)\n        return x\n\n\nclass EdgeNeXtStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 2,\n            depth: int = 2,\n            num_global_blocks: int = 1,\n            num_heads: int = 4,\n            scales: int = 2,\n            kernel_size: int = 7,\n            expand_ratio: float = 4,\n            use_pos_emb: bool = False,\n            downsample_block: bool = False,\n            conv_bias: float = True,\n            ls_init_value: float = 1.0,\n            drop_path_rates: Optional[List[float]] = None,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            norm_layer_cl: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        if downsample_block or stride == 1:\n            self.downsample = nn.Identity()\n        else:\n            self.downsample = nn.Sequential(\n                norm_layer(in_chs, **dd),\n                nn.Conv2d(in_chs, out_chs, kernel_size=2, stride=2, bias=conv_bias, **dd)\n            )\n            in_chs = out_chs\n\n        stage_blocks = []\n        for i in range(depth):\n            if i < depth - num_global_blocks:\n                stage_blocks.append(\n                    ConvBlock(\n                        dim=in_chs,\n                        dim_out=out_chs,\n                        stride=stride if downsample_block and i == 0 else 1,\n                        conv_bias=conv_bias,\n                        kernel_size=kernel_size,\n                        expand_ratio=expand_ratio,\n                        ls_init_value=ls_init_value,\n                        drop_path=drop_path_rates[i],\n                        norm_layer=norm_layer_cl,\n                        act_layer=act_layer,\n                        **dd,\n                    )\n                )\n            else:\n                stage_blocks.append(\n                    SplitTransposeBlock(\n                        dim=in_chs,\n                        num_scales=scales,\n                        num_heads=num_heads,\n                        expand_ratio=expand_ratio,\n                        use_pos_emb=use_pos_emb,\n                        conv_bias=conv_bias,\n                        ls_init_value=ls_init_value,\n                        drop_path=drop_path_rates[i],\n                        norm_layer=norm_layer_cl,\n                        act_layer=act_layer,\n                        **dd,\n                    )\n                )\n            in_chs = out_chs\n        self.blocks = nn.Sequential(*stage_blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass EdgeNeXt(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            dims: Tuple[int, ...] = (24, 48, 88, 168),\n            depths: Tuple[int, ...] = (3, 3, 9, 3),\n            global_block_counts: Tuple[int, ...] = (0, 1, 1, 1),\n            kernel_sizes: Tuple[int, ...] = (3, 5, 7, 9),\n            heads: Tuple[int, ...] = (8, 8, 8, 8),\n            d2_scales: Tuple[int, ...] = (2, 2, 3, 4),\n            use_pos_emb: Tuple[bool, ...] = (False, True, False, False),\n            ls_init_value: float = 1e-6,\n            head_init_scale: float = 1.,\n            expand_ratio: float = 4,\n            downsample_block: bool = False,\n            conv_bias: bool = True,\n            stem_type: str = 'patch',\n            head_norm_first: bool = False,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_path_rate: float = 0.,\n            drop_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.drop_rate = drop_rate\n        norm_layer = partial(LayerNorm2d, eps=1e-6)\n        norm_layer_cl = partial(nn.LayerNorm, eps=1e-6)\n        self.feature_info = []\n\n        assert stem_type in ('patch', 'overlap')\n        if stem_type == 'patch':\n            self.stem = nn.Sequential(\n                nn.Conv2d(in_chans, dims[0], kernel_size=4, stride=4, bias=conv_bias, **dd,),\n                norm_layer(dims[0], **dd),\n            )\n        else:\n            self.stem = nn.Sequential(\n                nn.Conv2d(in_chans, dims[0], kernel_size=9, stride=4, padding=9 // 2, bias=conv_bias, **dd),\n                norm_layer(dims[0], **dd),\n            )\n\n        curr_stride = 4\n        stages = []\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        in_chs = dims[0]\n        for i in range(4):\n            stride = 2 if curr_stride == 2 or i > 0 else 1\n            # FIXME support dilation / output_stride\n            curr_stride *= stride\n            stages.append(EdgeNeXtStage(\n                in_chs=in_chs,\n                out_chs=dims[i],\n                stride=stride,\n                depth=depths[i],\n                num_global_blocks=global_block_counts[i],\n                num_heads=heads[i],\n                drop_path_rates=dp_rates[i],\n                scales=d2_scales[i],\n                expand_ratio=expand_ratio,\n                kernel_size=kernel_sizes[i],\n                use_pos_emb=use_pos_emb[i],\n                ls_init_value=ls_init_value,\n                downsample_block=downsample_block,\n                conv_bias=conv_bias,\n                norm_layer=norm_layer,\n                norm_layer_cl=norm_layer_cl,\n                act_layer=act_layer,\n                **dd,\n            ))\n            # NOTE feature_info use currently assumes stage 0 == stride 1, rest are stride 2\n            in_chs = dims[i]\n            self.feature_info += [dict(num_chs=in_chs, reduction=curr_stride, module=f'stages.{i}')]\n\n        self.stages = nn.Sequential(*stages)\n\n        self.num_features = self.head_hidden_size = dims[-1]\n        if head_norm_first:\n            self.norm_pre = norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n        else:\n            self.norm_pre = nn.Identity()\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                norm_layer=norm_layer,\n                **dd,\n            )\n\n        named_apply(partial(_init_weights, head_init_scale=head_init_scale), self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.downsample', (0,)),  # blocks\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm_pre', (99999,))\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        last_idx = len(self.stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm_pre(x)  # applying final norm to last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm_pre(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm_pre = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm_pre(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module, name=None, head_init_scale=1.0):\n    if isinstance(module, nn.Conv2d):\n        trunc_normal_tf_(module.weight, std=.02)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Linear):\n        trunc_normal_tf_(module.weight, std=.02)\n        nn.init.zeros_(module.bias)\n        if name and 'head.' in name:\n            module.weight.data.mul_(head_init_scale)\n            module.bias.data.mul_(head_init_scale)\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap FB checkpoints -> timm \"\"\"\n    if 'head.norm.weight' in state_dict or 'norm_pre.weight' in state_dict:\n        return state_dict  # non-FB checkpoint\n\n    # models were released as train checkpoints... :/\n    if 'model_ema' in state_dict:\n        state_dict = state_dict['model_ema']\n    elif 'model' in state_dict:\n        state_dict = state_dict['model']\n    elif 'state_dict' in state_dict:\n        state_dict = state_dict['state_dict']\n\n    out_dict = {}\n    import re\n    for k, v in state_dict.items():\n        k = k.replace('downsample_layers.0.', 'stem.')\n        k = re.sub(r'stages.([0-9]+).([0-9]+)', r'stages.\\1.blocks.\\2', k)\n        k = re.sub(r'downsample_layers.([0-9]+).([0-9]+)', r'stages.\\1.downsample.\\2', k)\n        k = k.replace('dwconv', 'conv_dw')\n        k = k.replace('pwconv', 'mlp.fc')\n        k = k.replace('head.', 'head.fc.')\n        if k.startswith('norm.'):\n            k = k.replace('norm', 'head.norm')\n        if v.ndim == 2 and 'head' not in k:\n            model_shape = model.state_dict()[k].shape\n            v = v.reshape(model_shape)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_edgenext(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        EdgeNeXt, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (8, 8),\n        'crop_pct': 0.9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'head.fc',\n        'license': 'mit',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'edgenext_xx_small.in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'edgenext_x_small.in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'edgenext_small.usi_in1k': _cfg(  # USI weights\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0,\n    ),\n    'edgenext_base.usi_in1k': _cfg(  # USI weights\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0,\n    ),\n    'edgenext_base.in21k_ft_in1k': _cfg(  # USI weights\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0,\n    ),\n    'edgenext_small_rw.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 320, 320), test_crop_pct=1.0,\n    ),\n})\n\n\n@register_model\ndef edgenext_xx_small(pretrained=False, **kwargs) -> EdgeNeXt:\n    # 1.33M & 260.58M @ 256 resolution\n    # 71.23% Top-1 accuracy\n    # No AA, Color Jitter=0.4, No Mixup & Cutmix, DropPath=0.0, BS=4096, lr=0.006, multi-scale-sampler\n    # Jetson FPS=51.66 versus 47.67 for MobileViT_XXS\n    # For A100: FPS @ BS=1: 212.13 & @ BS=256: 7042.06 versus FPS @ BS=1: 96.68 & @ BS=256: 4624.71 for MobileViT_XXS\n    model_args = dict(depths=(2, 2, 6, 2), dims=(24, 48, 88, 168), heads=(4, 4, 4, 4))\n    return _create_edgenext('edgenext_xx_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef edgenext_x_small(pretrained=False, **kwargs) -> EdgeNeXt:\n    # 2.34M & 538.0M @ 256 resolution\n    # 75.00% Top-1 accuracy\n    # No AA, No Mixup & Cutmix, DropPath=0.0, BS=4096, lr=0.006, multi-scale-sampler\n    # Jetson FPS=31.61 versus 28.49 for MobileViT_XS\n    # For A100: FPS @ BS=1: 179.55 & @ BS=256: 4404.95 versus FPS @ BS=1: 94.55 & @ BS=256: 2361.53 for MobileViT_XS\n    model_args = dict(depths=(3, 3, 9, 3), dims=(32, 64, 100, 192), heads=(4, 4, 4, 4))\n    return _create_edgenext('edgenext_x_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef edgenext_small(pretrained=False, **kwargs) -> EdgeNeXt:\n    # 5.59M & 1260.59M @ 256 resolution\n    # 79.43% Top-1 accuracy\n    # AA=True, No Mixup & Cutmix, DropPath=0.1, BS=4096, lr=0.006, multi-scale-sampler\n    # Jetson FPS=20.47 versus 18.86 for MobileViT_S\n    # For A100: FPS @ BS=1: 172.33 & @ BS=256: 3010.25 versus FPS @ BS=1: 93.84 & @ BS=256: 1785.92 for MobileViT_S\n    model_args = dict(depths=(3, 3, 9, 3), dims=(48, 96, 160, 304))\n    return _create_edgenext('edgenext_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef edgenext_base(pretrained=False, **kwargs) -> EdgeNeXt:\n    # 18.51M & 3840.93M @ 256 resolution\n    # 82.5% (normal) 83.7% (USI) Top-1 accuracy\n    # AA=True, Mixup & Cutmix, DropPath=0.1, BS=4096, lr=0.006, multi-scale-sampler\n    # Jetson FPS=xx.xx versus xx.xx for MobileViT_S\n    # For A100: FPS @ BS=1: xxx.xx & @ BS=256: xxxx.xx\n    model_args = dict(depths=[3, 3, 9, 3], dims=[80, 160, 288, 584])\n    return _create_edgenext('edgenext_base', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef edgenext_small_rw(pretrained=False, **kwargs) -> EdgeNeXt:\n    model_args = dict(\n        depths=(3, 3, 9, 3), dims=(48, 96, 192, 384),\n        downsample_block=True, conv_bias=False, stem_type='overlap')\n    return _create_edgenext('edgenext_small_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n"
  },
  {
    "path": "timm/models/efficientformer.py",
    "content": "\"\"\" EfficientFormer\n\n@article{li2022efficientformer,\n  title={EfficientFormer: Vision Transformers at MobileNet Speed},\n  author={Li, Yanyu and Yuan, Geng and Wen, Yang and Hu, Eric and Evangelidis, Georgios and Tulyakov,\n   Sergey and Wang, Yanzhi and Ren, Jian},\n  journal={arXiv preprint arXiv:2206.01191},\n  year={2022}\n}\n\nBased on Apache 2.0 licensed code at https://github.com/snap-research/EfficientFormer, Copyright (c) 2022 Snap Inc.\n\nModifications and timm support by / Copyright 2022, Ross Wightman\n\"\"\"\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    DropPath,\n    LayerScale,\n    LayerScale2d,\n    Mlp,\n    calculate_drop_path_rates,\n    trunc_normal_,\n    to_2tuple,\n    ndgrid,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['EfficientFormer']  # model_registry will add each entrypoint fn to this\n\n\nEfficientFormer_width = {\n    'l1': (48, 96, 224, 448),\n    'l3': (64, 128, 320, 512),\n    'l7': (96, 192, 384, 768),\n}\n\nEfficientFormer_depth = {\n    'l1': (3, 2, 6, 4),\n    'l3': (4, 4, 12, 6),\n    'l7': (6, 6, 18, 8),\n}\n\n\nclass Attention(torch.nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            dim: int = 384,\n            key_dim: int = 32,\n            num_heads: int = 8,\n            attn_ratio: float = 4,\n            resolution: int = 7,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n        self.key_attn_dim = key_dim * num_heads\n        self.val_dim = int(attn_ratio * key_dim)\n        self.val_attn_dim = self.val_dim * num_heads\n        self.attn_ratio = attn_ratio\n\n        self.qkv = nn.Linear(dim, self.key_attn_dim * 2 + self.val_attn_dim, **dd)\n        self.proj = nn.Linear(self.val_attn_dim, dim, **dd)\n\n        resolution = to_2tuple(resolution)\n        pos = torch.stack(ndgrid(\n            torch.arange(resolution[0], device=device, dtype=torch.long),\n            torch.arange(resolution[1], device=device, dtype=torch.long)\n        )).flatten(1)\n        rel_pos = (pos[..., :, None] - pos[..., None, :]).abs()\n        rel_pos = (rel_pos[0] * resolution[1]) + rel_pos[1]\n        self.attention_biases = torch.nn.Parameter(torch.zeros(num_heads, resolution[0] * resolution[1], **dd))\n        self.register_buffer('attention_bias_idxs', rel_pos)\n        self.attention_bias_cache = {}  # per-device attention_biases cache (data-parallel compat)\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):  # x (B,N,C)\n        B, N, C = x.shape\n        qkv = self.qkv(x)\n        qkv = qkv.reshape(B, N, self.num_heads, -1).permute(0, 2, 1, 3)\n        q, k, v = qkv.split([self.key_dim, self.key_dim, self.val_dim], dim=3)\n\n        attn = (q @ k.transpose(-2, -1)) * self.scale\n        attn = attn + self.get_attention_biases(x.device)\n\n        attn = attn.softmax(dim=-1)\n        x = (attn @ v).transpose(1, 2).reshape(B, N, self.val_attn_dim)\n        x = self.proj(x)\n        return x\n\n\nclass Stem4(nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = 4\n\n        self.add_module('conv1', nn.Conv2d(in_chs, out_chs // 2, kernel_size=3, stride=2, padding=1, **dd))\n        self.add_module('norm1', norm_layer(out_chs // 2, **dd))\n        self.add_module('act1', act_layer())\n        self.add_module('conv2', nn.Conv2d(out_chs // 2, out_chs, kernel_size=3, stride=2, padding=1, **dd))\n        self.add_module('norm2', norm_layer(out_chs, **dd))\n        self.add_module('act2', act_layer())\n\n\nclass Downsample(nn.Module):\n    \"\"\"\n    Downsampling via strided conv w/ norm\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H/stride, W/stride]\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 2,\n            padding: Optional[int] = None,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if padding is None:\n            padding = kernel_size // 2\n        self.conv = nn.Conv2d(in_chs, out_chs, kernel_size=kernel_size, stride=stride, padding=padding, **dd)\n        self.norm = norm_layer(out_chs, **dd)\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.norm(x)\n        return x\n\n\nclass Flat(nn.Module):\n\n    def __init__(self, ):\n        super().__init__()\n\n    def forward(self, x):\n        x = x.flatten(2).transpose(1, 2)\n        return x\n\n\nclass Pooling(nn.Module):\n    \"\"\"\n    Implementation of pooling for PoolFormer\n    --pool_size: pooling size\n    \"\"\"\n\n    def __init__(self, pool_size: int = 3):\n        super().__init__()\n        self.pool = nn.AvgPool2d(pool_size, stride=1, padding=pool_size // 2, count_include_pad=False)\n\n    def forward(self, x):\n        return self.pool(x) - x\n\n\nclass ConvMlpWithNorm(nn.Module):\n    \"\"\"\n    Implementation of MLP with 1*1 convolutions.\n    Input: tensor with shape [B, C, H, W]\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        self.fc1 = nn.Conv2d(in_features, hidden_features, 1, **dd)\n        self.norm1 = norm_layer(hidden_features, **dd) if norm_layer is not None else nn.Identity()\n        self.act = act_layer()\n        self.fc2 = nn.Conv2d(hidden_features, out_features, 1, **dd)\n        self.norm2 = norm_layer(out_features, **dd) if norm_layer is not None else nn.Identity()\n        self.drop = nn.Dropout(drop)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.norm1(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        x = self.norm2(x)\n        x = self.drop(x)\n        return x\n\n\nclass MetaBlock1d(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: float = 4.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            layer_scale_init_value: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.token_mixer = Attention(dim, **dd)\n        self.ls1 = LayerScale(dim, layer_scale_init_value, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, layer_scale_init_value, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        x = x + self.drop_path1(self.ls1(self.token_mixer(self.norm1(x))))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass MetaBlock2d(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            pool_size: int = 3,\n            mlp_ratio: float = 4.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            layer_scale_init_value: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.token_mixer = Pooling(pool_size=pool_size)\n        self.ls1 = LayerScale2d(dim, layer_scale_init_value, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = ConvMlpWithNorm(\n            dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale2d(dim, layer_scale_init_value, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        x = x + self.drop_path1(self.ls1(self.token_mixer(x)))\n        x = x + self.drop_path2(self.ls2(self.mlp(x)))\n        return x\n\n\nclass EfficientFormerStage(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int ,\n            downsample: bool = True,\n            num_vit: int = 1,\n            pool_size: int = 3,\n            mlp_ratio: float = 4.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            norm_layer_cl: Type[nn.Module] = nn.LayerNorm,\n            proj_drop: float = .0,\n            drop_path: float = 0.,\n            layer_scale_init_value: float = 1e-5,\n            device=None,\n            dtype=None,\n):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        if downsample:\n            self.downsample = Downsample(in_chs=dim, out_chs=dim_out, norm_layer=norm_layer, **dd)\n            dim = dim_out\n        else:\n            assert dim == dim_out\n            self.downsample = nn.Identity()\n\n        blocks = []\n        if num_vit and num_vit >= depth:\n            blocks.append(Flat())\n\n        for block_idx in range(depth):\n            remain_idx = depth - block_idx - 1\n            if num_vit and num_vit > remain_idx:\n                blocks.append(\n                    MetaBlock1d(\n                        dim,\n                        mlp_ratio=mlp_ratio,\n                        act_layer=act_layer,\n                        norm_layer=norm_layer_cl,\n                        proj_drop=proj_drop,\n                        drop_path=drop_path[block_idx],\n                        layer_scale_init_value=layer_scale_init_value,\n                        **dd,\n                    ))\n            else:\n                blocks.append(\n                    MetaBlock2d(\n                        dim,\n                        pool_size=pool_size,\n                        mlp_ratio=mlp_ratio,\n                        act_layer=act_layer,\n                        norm_layer=norm_layer,\n                        proj_drop=proj_drop,\n                        drop_path=drop_path[block_idx],\n                        layer_scale_init_value=layer_scale_init_value,\n                        **dd,\n                    ))\n                if num_vit and num_vit == remain_idx:\n                    blocks.append(Flat())\n\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass EfficientFormer(nn.Module):\n\n    def __init__(\n            self,\n            depths: Tuple[int, ...] = (3, 2, 6, 4),\n            embed_dims: Tuple[int, ...] = (48, 96, 224, 448),\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            downsamples: Optional[Tuple[bool, ...]] = None,\n            num_vit: int = 0,\n            mlp_ratios: float = 4,\n            pool_size: int = 3,\n            layer_scale_init_value: float = 1e-5,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            norm_layer_cl: Type[nn.Module] = nn.LayerNorm,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n            **kwargs\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n\n        self.stem = Stem4(in_chans, embed_dims[0], norm_layer=norm_layer, **dd)\n        prev_dim = embed_dims[0]\n\n        # stochastic depth decay rule\n        self.num_stages = len(depths)\n        last_stage = self.num_stages - 1\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        downsamples = downsamples or (False,) + (True,) * (self.num_stages - 1)\n        stages = []\n        self.feature_info = []\n        for i in range(self.num_stages):\n            stage = EfficientFormerStage(\n                prev_dim,\n                embed_dims[i],\n                depths[i],\n                downsample=downsamples[i],\n                num_vit=num_vit if i == last_stage else 0,\n                pool_size=pool_size,\n                mlp_ratio=mlp_ratios,\n                act_layer=act_layer,\n                norm_layer_cl=norm_layer_cl,\n                norm_layer=norm_layer,\n                proj_drop=proj_drop_rate,\n                drop_path=dpr[i],\n                layer_scale_init_value=layer_scale_init_value,\n                **dd,\n            )\n            prev_dim = embed_dims[i]\n            stages.append(stage)\n            self.feature_info += [dict(num_chs=embed_dims[i], reduction=2**(i+2), module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.norm = norm_layer_cl(self.num_features, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        # assuming model is always distilled (valid for current checkpoints, will split def if that changes)\n        self.head_dist = nn.Linear(embed_dims[-1], num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.distilled_training = False  # must set this True to train w/ distillation token\n\n        self.apply(self._init_weights)\n\n    # init for classification\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {k for k, _ in self.named_parameters() if 'attention_biases' in k}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',  # stem and embed\n            blocks=[(r'^stages\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head, self.head_dist\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n        self.head_dist = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.distilled_training = enable\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        B, C, H, W = x.shape\n\n        last_idx = self.num_stages - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n        feat_idx = 0\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx < last_idx:\n                B, C, H, W = x.shape\n            if feat_idx in take_indices:\n                if feat_idx == last_idx:\n                    x_inter = self.norm(x) if norm else x\n                    intermediates.append(x_inter.reshape(B, H // 2, W // 2, -1).permute(0, 3, 1, 2))\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=1)\n        x = self.head_drop(x)\n        if pre_logits:\n            return x\n        x, x_dist = self.head(x), self.head_dist(x)\n        if self.distilled_training and self.training and not torch.jit.is_scripting():\n            # only return separate classification predictions when training in distilled mode\n            return x, x_dist\n        else:\n            # during standard train/finetune, inference average the classifier predictions\n            return (x + x_dist) / 2\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap original checkpoints -> timm \"\"\"\n    if 'stem.0.weight' in state_dict:\n        return state_dict  # non-original checkpoint, no remapping needed\n\n    out_dict = {}\n    import re\n    stage_idx = 0\n    for k, v in state_dict.items():\n        if k.startswith('patch_embed'):\n            k = k.replace('patch_embed.0', 'stem.conv1')\n            k = k.replace('patch_embed.1', 'stem.norm1')\n            k = k.replace('patch_embed.3', 'stem.conv2')\n            k = k.replace('patch_embed.4', 'stem.norm2')\n\n        if re.match(r'network\\.(\\d+)\\.proj\\.weight', k):\n            stage_idx += 1\n        k = re.sub(r'network.(\\d+).(\\d+)', f'stages.{stage_idx}.blocks.\\\\2', k)\n        k = re.sub(r'network.(\\d+).proj', f'stages.{stage_idx}.downsample.conv', k)\n        k = re.sub(r'network.(\\d+).norm', f'stages.{stage_idx}.downsample.norm', k)\n\n        k = re.sub(r'layer_scale_([0-9])', r'ls\\1.gamma', k)\n        k = k.replace('dist_head', 'head_dist')\n        out_dict[k] = v\n    return out_dict\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, 'fixed_input_size': True,\n        'crop_pct': .95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1', 'classifier': ('head', 'head_dist'),\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'efficientformer_l1.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientformer_l3.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientformer_l7.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n})\n\n\ndef _create_efficientformer(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 4)\n    model = build_model_with_cfg(\n        EfficientFormer, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef efficientformer_l1(pretrained=False, **kwargs) -> EfficientFormer:\n    model_args = dict(\n        depths=EfficientFormer_depth['l1'],\n        embed_dims=EfficientFormer_width['l1'],\n        num_vit=1,\n    )\n    return _create_efficientformer('efficientformer_l1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientformer_l3(pretrained=False, **kwargs) -> EfficientFormer:\n    model_args = dict(\n        depths=EfficientFormer_depth['l3'],\n        embed_dims=EfficientFormer_width['l3'],\n        num_vit=4,\n    )\n    return _create_efficientformer('efficientformer_l3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientformer_l7(pretrained=False, **kwargs) -> EfficientFormer:\n    model_args = dict(\n        depths=EfficientFormer_depth['l7'],\n        embed_dims=EfficientFormer_width['l7'],\n        num_vit=8,\n    )\n    return _create_efficientformer('efficientformer_l7', pretrained=pretrained, **dict(model_args, **kwargs))\n\n"
  },
  {
    "path": "timm/models/efficientformer_v2.py",
    "content": "\"\"\" EfficientFormer-V2\n\n@article{\n    li2022rethinking,\n    title={Rethinking Vision Transformers for MobileNet Size and Speed},\n    author={Li, Yanyu and Hu, Ju and Wen, Yang and Evangelidis, Georgios and Salahi, Kamyar and Wang, Yanzhi and Tulyakov, Sergey and Ren, Jian},\n    journal={arXiv preprint arXiv:2212.08059},\n    year={2022}\n}\n\nSignificantly refactored and cleaned up for timm from original at: https://github.com/snap-research/EfficientFormer\n\nOriginal code licensed Apache 2.0, Copyright (c) 2022 Snap Inc.\n\nModifications and timm support by / Copyright 2023, Ross Wightman\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    create_conv2d,\n    create_norm_layer,\n    get_act_layer,\n    get_norm_layer,\n    ConvNormAct,\n    LayerScale2d,\n    DropPath,\n    calculate_drop_path_rates,\n    trunc_normal_,\n    to_2tuple,\n    to_ntuple,\n    ndgrid,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n\n__all__ = ['EfficientFormerV2']\n\nEfficientFormer_width = {\n    'L': (40, 80, 192, 384),  # 26m 83.3% 6attn\n    'S2': (32, 64, 144, 288),  # 12m 81.6% 4attn dp0.02\n    'S1': (32, 48, 120, 224),  # 6.1m 79.0\n    'S0': (32, 48, 96, 176),  # 75.0 75.7\n}\n\nEfficientFormer_depth = {\n    'L': (5, 5, 15, 10),  # 26m 83.3%\n    'S2': (4, 4, 12, 8),  # 12m\n    'S1': (3, 3, 9, 6),  # 79.0\n    'S0': (2, 2, 6, 4),  # 75.7\n}\n\nEfficientFormer_expansion_ratios = {\n    'L': (4, 4, (4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4), (4, 4, 4, 3, 3, 3, 3, 4, 4, 4)),\n    'S2': (4, 4, (4, 4, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4), (4, 4, 3, 3, 3, 3, 4, 4)),\n    'S1': (4, 4, (4, 4, 3, 3, 3, 3, 4, 4, 4), (4, 4, 3, 3, 4, 4)),\n    'S0': (4, 4, (4, 3, 3, 3, 4, 4), (4, 3, 3, 4)),\n}\n\n\nclass ConvNorm(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: Union[int, str] = '',\n            dilation: int = 1,\n            groups: int = 1,\n            bias: bool = True,\n            norm_layer: str = 'batchnorm2d',\n            norm_kwargs: Optional[Dict] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        norm_kwargs = norm_kwargs or {}\n        super().__init__()\n        self.conv = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size,\n            stride=stride,\n            padding=padding,\n            dilation=dilation,\n            groups=groups,\n            bias=bias,\n            **dd,\n        )\n        self.bn = create_norm_layer(norm_layer, out_channels, **norm_kwargs, **dd)\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn(x)\n        return x\n\n\nclass Attention2d(torch.nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            dim: int = 384,\n            key_dim: int = 32,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: Union[int, Tuple[int, int]] = 7,\n            act_layer: Type[nn.Module] = nn.GELU,\n            stride: Optional[int] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n\n        resolution = to_2tuple(resolution)\n        if stride is not None:\n            resolution = tuple([math.ceil(r / stride) for r in resolution])\n            self.stride_conv = ConvNorm(dim, dim, kernel_size=3, stride=stride, groups=dim, **dd)\n            self.upsample = nn.Upsample(scale_factor=stride, mode='bilinear')\n        else:\n            self.stride_conv = None\n            self.upsample = None\n\n        self.resolution = resolution\n        self.N = self.resolution[0] * self.resolution[1]\n        self.d = int(attn_ratio * key_dim)\n        self.dh = int(attn_ratio * key_dim) * num_heads\n        self.attn_ratio = attn_ratio\n        kh = self.key_dim * self.num_heads\n\n        self.q = ConvNorm(dim, kh, **dd)\n        self.k = ConvNorm(dim, kh, **dd)\n        self.v = ConvNorm(dim, self.dh, **dd)\n        self.v_local = ConvNorm(self.dh, self.dh, kernel_size=3, groups=self.dh, **dd)\n        self.talking_head1 = nn.Conv2d(self.num_heads, self.num_heads, kernel_size=1, **dd)\n        self.talking_head2 = nn.Conv2d(self.num_heads, self.num_heads, kernel_size=1, **dd)\n\n        self.act = act_layer()\n        self.proj = ConvNorm(self.dh, dim, 1, **dd)\n\n        self.attention_biases = torch.nn.Parameter(torch.empty(num_heads, self.N, **dd))\n        self.register_buffer(\n            'attention_bias_idxs',\n            torch.empty((self.N, self.N), device=device, dtype=torch.long),\n            persistent=False,\n        )\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _compute_attention_bias_idxs(self, device=None):\n        \"\"\"Compute relative position indices for attention bias.\"\"\"\n        pos = torch.stack(ndgrid(\n            torch.arange(self.resolution[0], device=device, dtype=torch.long),\n            torch.arange(self.resolution[1], device=device, dtype=torch.long),\n        )).flatten(1)\n        rel_pos = (pos[..., :, None] - pos[..., None, :]).abs()\n        rel_pos = (rel_pos[0] * self.resolution[1]) + rel_pos[1]\n        return rel_pos\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.attention_bias_idxs.copy_(\n            self._compute_attention_bias_idxs(device=self.attention_bias_idxs.device)\n        )\n        self.attention_bias_cache = {}\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        if self.stride_conv is not None:\n            x = self.stride_conv(x)\n\n        q = self.q(x).reshape(B, self.num_heads, -1, self.N).permute(0, 1, 3, 2)\n        k = self.k(x).reshape(B, self.num_heads, -1, self.N).permute(0, 1, 2, 3)\n        v = self.v(x)\n        v_local = self.v_local(v)\n        v = v.reshape(B, self.num_heads, -1, self.N).permute(0, 1, 3, 2)\n\n        attn = (q @ k) * self.scale\n        attn = attn + self.get_attention_biases(x.device)\n        attn = self.talking_head1(attn)\n        attn = attn.softmax(dim=-1)\n        attn = self.talking_head2(attn)\n\n        x = (attn @ v).transpose(2, 3)\n        x = x.reshape(B, self.dh, self.resolution[0], self.resolution[1]) + v_local\n        if self.upsample is not None:\n            x = self.upsample(x)\n\n        x = self.act(x)\n        x = self.proj(x)\n        return x\n\n\nclass LocalGlobalQuery(torch.nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.pool = nn.AvgPool2d(1, 2, 0)\n        self.local = nn.Conv2d(in_dim, in_dim, kernel_size=3, stride=2, padding=1, groups=in_dim, **dd)\n        self.proj = ConvNorm(in_dim, out_dim, 1, **dd)\n\n    def forward(self, x):\n        local_q = self.local(x)\n        pool_q = self.pool(x)\n        q = local_q + pool_q\n        q = self.proj(q)\n        return q\n\n\nclass Attention2dDownsample(torch.nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            dim: int = 384,\n            key_dim: int = 16,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: Union[int, Tuple[int, int]] = 7,\n            out_dim: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n        self.resolution = to_2tuple(resolution)\n        self.resolution2 = tuple([math.ceil(r / 2) for r in self.resolution])\n        self.N = self.resolution[0] * self.resolution[1]\n        self.N2 = self.resolution2[0] * self.resolution2[1]\n\n        self.d = int(attn_ratio * key_dim)\n        self.dh = int(attn_ratio * key_dim) * num_heads\n        self.attn_ratio = attn_ratio\n        self.out_dim = out_dim or dim\n        kh = self.key_dim * self.num_heads\n\n        self.q = LocalGlobalQuery(dim, kh, **dd)\n        self.k = ConvNorm(dim, kh, 1, **dd)\n        self.v = ConvNorm(dim, self.dh, 1, **dd)\n        self.v_local = ConvNorm(self.dh, self.dh, kernel_size=3, stride=2, groups=self.dh, **dd)\n\n        self.act = act_layer()\n        self.proj = ConvNorm(self.dh, self.out_dim, 1, **dd)\n\n        self.attention_biases = nn.Parameter(torch.empty(num_heads, self.N, **dd))\n        self.register_buffer(\n            'attention_bias_idxs',\n            torch.empty((self.N2, self.N), device=device, dtype=torch.long),\n            persistent=False,\n        )\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _compute_attention_bias_idxs(self, device=None):\n        \"\"\"Compute relative position indices for attention bias.\"\"\"\n        k_pos = torch.stack(ndgrid(\n            torch.arange(self.resolution[0], device=device, dtype=torch.long),\n            torch.arange(self.resolution[1], device=device, dtype=torch.long),\n        )).flatten(1)\n        q_pos = torch.stack(ndgrid(\n            torch.arange(0, self.resolution[0], step=2, device=device, dtype=torch.long),\n            torch.arange(0, self.resolution[1], step=2, device=device, dtype=torch.long),\n        )).flatten(1)\n        rel_pos = (q_pos[..., :, None] - k_pos[..., None, :]).abs()\n        rel_pos = (rel_pos[0] * self.resolution[1]) + rel_pos[1]\n        return rel_pos\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.attention_bias_idxs.copy_(\n            self._compute_attention_bias_idxs(device=self.attention_bias_idxs.device)\n        )\n        self.attention_bias_cache = {}\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n\n        q = self.q(x).reshape(B, self.num_heads, -1, self.N2).permute(0, 1, 3, 2)\n        k = self.k(x).reshape(B, self.num_heads, -1, self.N).permute(0, 1, 2, 3)\n        v = self.v(x)\n        v_local = self.v_local(v)\n        v = v.reshape(B, self.num_heads, -1, self.N).permute(0, 1, 3, 2)\n\n        attn = (q @ k) * self.scale\n        attn = attn + self.get_attention_biases(x.device)\n        attn = attn.softmax(dim=-1)\n\n        x = (attn @ v).transpose(2, 3)\n        x = x.reshape(B, self.dh, self.resolution2[0], self.resolution2[1]) + v_local\n        x = self.act(x)\n        x = self.proj(x)\n        return x\n\n\nclass Downsample(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: Union[int, Tuple[int, int]] = 3,\n            stride: Union[int, Tuple[int, int]] = 2,\n            padding: Union[int, Tuple[int, int]] = 1,\n            resolution: Union[int, Tuple[int, int]] = 7,\n            use_attn: bool = False,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Optional[Type[nn.Module]] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        kernel_size = to_2tuple(kernel_size)\n        stride = to_2tuple(stride)\n        padding = to_2tuple(padding)\n        norm_layer = norm_layer or nn.Identity()\n        self.conv = ConvNorm(\n            in_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            norm_layer=norm_layer,\n            **dd,\n        )\n\n        if use_attn:\n            self.attn = Attention2dDownsample(\n                dim=in_chs,\n                out_dim=out_chs,\n                resolution=resolution,\n                act_layer=act_layer,\n                **dd,\n            )\n        else:\n            self.attn = None\n\n    def forward(self, x):\n        out = self.conv(x)\n        if self.attn is not None:\n            return self.attn(x) + out\n        return out\n\n\nclass ConvMlpWithNorm(nn.Module):\n    \"\"\"\n    Implementation of MLP with 1*1 convolutions.\n    Input: tensor with shape [B, C, H, W]\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop: float = 0.,\n            mid_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        self.fc1 = ConvNormAct(\n            in_features,\n            hidden_features,\n            1,\n            bias=True,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n        if mid_conv:\n            self.mid = ConvNormAct(\n                hidden_features,\n                hidden_features,\n                3,\n                groups=hidden_features,\n                bias=True,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n        else:\n            self.mid = nn.Identity()\n        self.drop1 = nn.Dropout(drop)\n        self.fc2 = ConvNorm(hidden_features, out_features, 1, norm_layer=norm_layer, **dd)\n        self.drop2 = nn.Dropout(drop)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.mid(x)\n        x = self.drop1(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nclass EfficientFormerV2Block(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: float = 4.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            resolution: Union[int, Tuple[int, int]] = 7,\n            stride: Optional[int] = None,\n            use_attn: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        if use_attn:\n            self.token_mixer = Attention2d(\n                dim,\n                resolution=resolution,\n                act_layer=act_layer,\n                stride=stride,\n                **dd,\n            )\n            self.ls1 = LayerScale2d(\n                dim, layer_scale_init_value, **dd) if layer_scale_init_value is not None else nn.Identity()\n            self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        else:\n            self.token_mixer = None\n            self.ls1 = None\n            self.drop_path1 = None\n\n        self.mlp = ConvMlpWithNorm(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            drop=proj_drop,\n            mid_conv=True,\n            **dd,\n        )\n        self.ls2 = LayerScale2d(\n            dim, layer_scale_init_value, **dd) if layer_scale_init_value is not None else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        if self.token_mixer is not None:\n            x = x + self.drop_path1(self.ls1(self.token_mixer(x)))\n        x = x + self.drop_path2(self.ls2(self.mlp(x)))\n        return x\n\n\nclass Stem4(nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = 4\n        self.conv1 = ConvNormAct(\n            in_chs,\n            out_chs // 2,\n            kernel_size=3,\n            stride=2, padding=1,\n            bias=True,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.conv2 = ConvNormAct(\n            out_chs // 2,\n            out_chs,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            bias=True,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n\nclass EfficientFormerV2Stage(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int,\n            resolution: Union[int, Tuple[int, int]] = 7,\n            downsample: bool = True,\n            block_stride: Optional[int] = None,\n            downsample_use_attn: bool = False,\n            block_use_attn: bool = False,\n            num_vit: int = 1,\n            mlp_ratio: Union[float, Tuple[float, ...]] = 4.,\n            proj_drop: float = .0,\n            drop_path: Union[float, List[float]] = 0.,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        mlp_ratio = to_ntuple(depth)(mlp_ratio)\n        resolution = to_2tuple(resolution)\n\n        if downsample:\n            self.downsample = Downsample(\n                dim,\n                dim_out,\n                use_attn=downsample_use_attn,\n                resolution=resolution,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            dim = dim_out\n            resolution = tuple([math.ceil(r / 2) for r in resolution])\n        else:\n            assert dim == dim_out\n            self.downsample = nn.Identity()\n\n        blocks = []\n        for block_idx in range(depth):\n            remain_idx = depth - num_vit - 1\n            b = EfficientFormerV2Block(\n                dim,\n                resolution=resolution,\n                stride=block_stride,\n                mlp_ratio=mlp_ratio[block_idx],\n                use_attn=block_use_attn and block_idx > remain_idx,\n                proj_drop=proj_drop,\n                drop_path=drop_path[block_idx],\n                layer_scale_init_value=layer_scale_init_value,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            blocks += [b]\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass EfficientFormerV2(nn.Module):\n    def __init__(\n            self,\n            depths: Tuple[int, ...],\n            in_chans: int = 3,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            global_pool: str = 'avg',\n            embed_dims: Optional[Tuple[int, ...]] = None,\n            downsamples: Optional[Tuple[bool, ...]] = None,\n            mlp_ratios: Union[float, Tuple[float, ...], Tuple[Tuple[float, ...], ...]] = 4,\n            norm_layer: str = 'batchnorm2d',\n            norm_eps: float = 1e-5,\n            act_layer: str = 'gelu',\n            num_classes: int = 1000,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            num_vit: int = 0,\n            distillation: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('avg', '')\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.feature_info = []\n        img_size = to_2tuple(img_size)\n        norm_layer = partial(get_norm_layer(norm_layer), eps=norm_eps)\n        act_layer = get_act_layer(act_layer)\n\n        self.stem = Stem4(in_chans, embed_dims[0], act_layer=act_layer, norm_layer=norm_layer, **dd)\n        prev_dim = embed_dims[0]\n        stride = 4\n\n        num_stages = len(depths)\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        downsamples = downsamples or (False,) + (True,) * (len(depths) - 1)\n        mlp_ratios = to_ntuple(num_stages)(mlp_ratios)\n        stages = []\n        for i in range(num_stages):\n            curr_resolution = tuple([math.ceil(s / stride) for s in img_size])\n            stage = EfficientFormerV2Stage(\n                prev_dim,\n                embed_dims[i],\n                depth=depths[i],\n                resolution=curr_resolution,\n                downsample=downsamples[i],\n                block_stride=2 if i == 2 else None,\n                downsample_use_attn=i >= 3,\n                block_use_attn=i >= 2,\n                num_vit=num_vit,\n                mlp_ratio=mlp_ratios[i],\n                proj_drop=proj_drop_rate,\n                drop_path=dpr[i],\n                layer_scale_init_value=layer_scale_init_value,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            if downsamples[i]:\n                stride *= 2\n            prev_dim = embed_dims[i]\n            self.feature_info += [dict(num_chs=prev_dim, reduction=stride, module=f'stages.{i}')]\n            stages.append(stage)\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.norm = norm_layer(embed_dims[-1], **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dims[-1], num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.dist = distillation\n        if self.dist:\n            self.head_dist = nn.Linear(embed_dims[-1], num_classes, **dd) if num_classes > 0 else nn.Identity()\n        else:\n            self.head_dist = None\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n        self.distilled_training = False\n\n    def _init_weights(self, m, needs_reset: bool = True):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {k for k, _ in self.named_parameters() if 'attention_biases' in k}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',  # stem and embed\n            blocks=[(r'^stages\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head, self.head_dist\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n        self.head_dist = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.distilled_training = enable\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n\n        last_idx = len(self.stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if feat_idx == last_idx:\n                    x_inter = self.norm(x) if norm else x\n                    intermediates.append(x_inter)\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=(2, 3))\n        x = self.head_drop(x)\n        if pre_logits:\n            return x\n        x, x_dist = self.head(x), self.head_dist(x)\n        if self.distilled_training and self.training and not torch.jit.is_scripting():\n            # only return separate classification predictions when training in distilled mode\n            return x, x_dist\n        else:\n            # during standard train/finetune, inference average the classifier predictions\n            return (x + x_dist) / 2\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, 'fixed_input_size': True,\n        'crop_pct': .95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'classifier': ('head', 'head_dist'), 'first_conv': 'stem.conv1.conv',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'efficientformerv2_s0.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientformerv2_s1.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientformerv2_s2.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientformerv2_l.snap_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n})\n\n\ndef _create_efficientformerv2(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        EfficientFormerV2, variant, pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n    return model\n\n\n@register_model\ndef efficientformerv2_s0(pretrained=False, **kwargs) -> EfficientFormerV2:\n    model_args = dict(\n        depths=EfficientFormer_depth['S0'],\n        embed_dims=EfficientFormer_width['S0'],\n        num_vit=2,\n        drop_path_rate=0.0,\n        mlp_ratios=EfficientFormer_expansion_ratios['S0'],\n    )\n    return _create_efficientformerv2('efficientformerv2_s0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientformerv2_s1(pretrained=False, **kwargs) -> EfficientFormerV2:\n    model_args = dict(\n        depths=EfficientFormer_depth['S1'],\n        embed_dims=EfficientFormer_width['S1'],\n        num_vit=2,\n        drop_path_rate=0.0,\n        mlp_ratios=EfficientFormer_expansion_ratios['S1'],\n    )\n    return _create_efficientformerv2('efficientformerv2_s1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientformerv2_s2(pretrained=False, **kwargs) -> EfficientFormerV2:\n    model_args = dict(\n        depths=EfficientFormer_depth['S2'],\n        embed_dims=EfficientFormer_width['S2'],\n        num_vit=4,\n        drop_path_rate=0.02,\n        mlp_ratios=EfficientFormer_expansion_ratios['S2'],\n    )\n    return _create_efficientformerv2('efficientformerv2_s2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientformerv2_l(pretrained=False, **kwargs) -> EfficientFormerV2:\n    model_args = dict(\n        depths=EfficientFormer_depth['L'],\n        embed_dims=EfficientFormer_width['L'],\n        num_vit=6,\n        drop_path_rate=0.1,\n        mlp_ratios=EfficientFormer_expansion_ratios['L'],\n    )\n    return _create_efficientformerv2('efficientformerv2_l', pretrained=pretrained, **dict(model_args, **kwargs))\n\n"
  },
  {
    "path": "timm/models/efficientnet.py",
    "content": "\"\"\" The EfficientNet Family in PyTorch\n\nAn implementation of EfficienNet that covers variety of related models with efficient architectures:\n\n* EfficientNet-V2\n  - `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n\n* EfficientNet (B0-B8, L2 + Tensorflow pretrained AutoAug/RandAug/AdvProp/NoisyStudent weight ports)\n  - EfficientNet: Rethinking Model Scaling for CNNs - https://arxiv.org/abs/1905.11946\n  - CondConv: Conditionally Parameterized Convolutions for Efficient Inference - https://arxiv.org/abs/1904.04971\n  - Adversarial Examples Improve Image Recognition - https://arxiv.org/abs/1911.09665\n  - Self-training with Noisy Student improves ImageNet classification - https://arxiv.org/abs/1911.04252\n\n* MixNet (Small, Medium, and Large)\n  - MixConv: Mixed Depthwise Convolutional Kernels - https://arxiv.org/abs/1907.09595\n\n* MNasNet B1, A1 (SE), Small\n  - MnasNet: Platform-Aware Neural Architecture Search for Mobile - https://arxiv.org/abs/1807.11626\n\n* FBNet-C\n  - FBNet: Hardware-Aware Efficient ConvNet Design via Differentiable NAS - https://arxiv.org/abs/1812.03443\n\n* Single-Path NAS Pixel1\n  - Single-Path NAS: Designing Hardware-Efficient ConvNets - https://arxiv.org/abs/1904.02877\n\n* TinyNet\n    - Model Rubik's Cube: Twisting Resolution, Depth and Width for TinyNets - https://arxiv.org/abs/2010.14819\n    - Definitions & weights borrowed from https://github.com/huawei-noah/CV-Backbones/tree/master/tinynet_pytorch\n\n* And likely more...\n\nThe majority of the above models (EfficientNet*, MixNet, MnasNet) and original weights were made available\nby Mingxing Tan, Quoc Le, and other members of their Google Brain team. Thanks for consistently releasing\nthe models and weights open source!\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom typing import Callable, Dict, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import create_conv2d, create_classifier, get_norm_act_layer, LayerType, \\\n    GroupNormAct, LayerNormAct2d, EvoNorm2dS0\nfrom ._builder import build_model_with_cfg, pretrained_cfg_for_features\nfrom ._efficientnet_blocks import SqueezeExcite\nfrom ._efficientnet_builder import BlockArgs, EfficientNetBuilder, decode_arch_def, efficientnet_init_weights, \\\n    round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT\nfrom ._features import FeatureInfo, FeatureHooks, feature_take_indices\nfrom ._manipulate import checkpoint_seq, checkpoint\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['EfficientNet', 'EfficientNetFeatures']\n\n\nclass EfficientNet(nn.Module):\n    \"\"\"EfficientNet model architecture.\n\n    A flexible and performant PyTorch implementation of efficient network architectures, including:\n      * EfficientNet-V2 Small, Medium, Large, XL & B0-B3\n      * EfficientNet B0-B8, L2\n      * EfficientNet-EdgeTPU\n      * EfficientNet-CondConv\n      * MixNet S, M, L, XL\n      * MnasNet A1, B1, and small\n      * MobileNet-V2\n      * FBNet C\n      * Single-Path NAS Pixel1\n      * TinyNet\n\n    References:\n      - EfficientNet: https://arxiv.org/abs/1905.11946\n      - EfficientNetV2: https://arxiv.org/abs/2104.00298\n      - MixNet: https://arxiv.org/abs/1907.09595\n      - MnasNet: https://arxiv.org/abs/1807.11626\n    \"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            num_classes: int = 1000,\n            num_features: int = 1280,\n            in_chans: int = 3,\n            stem_size: int = 32,\n            stem_kernel_size: int = 3,\n            fix_stem: bool = False,\n            output_stride: int = 32,\n            pad_type: str = '',\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            round_chs_fn: Callable = round_channels,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize EfficientNet model.\n\n        Args:\n            block_args: Arguments for building blocks.\n            num_classes: Number of classifier classes.\n            num_features: Number of features for penultimate layer.\n            in_chans: Number of input channels.\n            stem_size: Number of output channels in stem.\n            stem_kernel_size: Kernel size for stem convolution.\n            fix_stem: If True, don't scale stem channels.\n            output_stride: Output stride of network.\n            pad_type: Padding type.\n            act_layer: Activation layer class.\n            norm_layer: Normalization layer class.\n            aa_layer: Anti-aliasing layer class.\n            se_layer: Squeeze-and-excitation layer class.\n            round_chs_fn: Channel rounding function.\n            drop_rate: Dropout rate for classifier.\n            drop_path_rate: Drop path rate for stochastic depth.\n            global_pool: Global pooling type.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or nn.ReLU\n        norm_layer = norm_layer or nn.BatchNorm2d\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        se_layer = se_layer or SqueezeExcite\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = create_conv2d(in_chans, stem_size, stem_kernel_size, stride=2, padding=pad_type, **dd)\n        self.bn1 = norm_act_layer(stem_size, inplace=True, **dd)\n\n        # Middle stages (IR/ER/DS Blocks)\n        builder = EfficientNetBuilder(\n            output_stride=output_stride,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = builder.features\n        self.stage_ends = [f['stage'] for f in self.feature_info]\n        head_chs = builder.in_chs\n\n        # Head + Pooling\n        if num_features > 0:\n            self.conv_head = create_conv2d(head_chs, num_features, 1, padding=pad_type, **dd)\n            self.bn2 = norm_act_layer(num_features, inplace=True, **dd)\n            self.num_features = self.head_hidden_size = num_features\n        else:\n            self.conv_head = nn.Identity()\n            self.bn2 = nn.Identity()\n            self.num_features = self.head_hidden_size = head_chs\n\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            **dd,\n        )\n\n        efficientnet_init_weights(self)\n\n    def as_sequential(self) -> nn.Sequential:\n        \"\"\"Convert model to sequential for feature extraction.\"\"\"\n        layers = [self.conv_stem, self.bn1]\n        layers.extend(self.blocks)\n        layers.extend([self.conv_head, self.bn2, self.global_pool])\n        layers.extend([nn.Dropout(self.drop_rate), self.classifier])\n        return nn.Sequential(*layers)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Union[str, List]]:\n        \"\"\"Create regex patterns for parameter groups.\n\n        Args:\n            coarse: Use coarse (stage-level) grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        return dict(\n            stem=r'^conv_stem|bn1',\n            blocks=[\n                (r'^blocks\\.(\\d+)' if coarse else r'^blocks\\.(\\d+)\\.(\\d+)', None),\n                (r'conv_head|bn2', (99999,))\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier module.\"\"\"\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg') -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            extra_blocks: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor.\n            indices: Take last n blocks if int, all if None, select matching indices if sequence.\n            norm: Apply norm layer to compatible intermediates.\n            stop_early: Stop iterating over blocks when last desired intermediate hit.\n            output_fmt: Shape of intermediate feature outputs.\n            intermediates_only: Only return intermediate features.\n            extra_blocks: Include outputs of all blocks and head conv in output, does not align with feature_info.\n\n        Returns:\n            List of intermediate features or tuple of (final features, intermediates).\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n        # forward pass\n        feat_idx = 0  # stem is index 0\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index]\n        for feat_idx, blk in enumerate(blocks, start=1):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(blk, x)\n            else:\n                x = blk(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == self.stage_ends[-1]:\n            x = self.conv_head(x)\n            x = self.bn2(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n            extra_blocks: bool = False,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layers.\n            prune_head: Whether to prune the classifier head.\n            extra_blocks: Include all blocks in indexing.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            max_index = self.stage_ends[max_index]\n        self.blocks = self.blocks[:max_index]  # truncate blocks w/ stem as idx 0\n        if prune_norm or max_index < len(self.blocks):\n            self.conv_head = nn.Identity()\n            self.bn2 = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x, flatten=True)\n        else:\n            x = self.blocks(x)\n        x = self.conv_head(x)\n        x = self.bn2(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.global_pool(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nclass EfficientNetFeatures(nn.Module):\n    \"\"\" EfficientNet Feature Extractor\n\n    A work-in-progress feature extraction module for EfficientNet, to use as a backbone for segmentation\n    and object detection models.\n    \"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            out_indices: Tuple[int, ...] = (0, 1, 2, 3, 4),\n            feature_location: str = 'bottleneck',\n            in_chans: int = 3,\n            stem_size: int = 32,\n            stem_kernel_size: int = 3,\n            fix_stem: bool = False,\n            output_stride: int = 32,\n            pad_type: str = '',\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            round_chs_fn: Callable = round_channels,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or nn.ReLU\n        norm_layer = norm_layer or nn.BatchNorm2d\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        se_layer = se_layer or SqueezeExcite\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = create_conv2d(in_chans, stem_size, stem_kernel_size, stride=2, padding=pad_type, **dd)\n        self.bn1 = norm_act_layer(stem_size, inplace=True, **dd)\n\n        # Middle stages (IR/ER/DS Blocks)\n        builder = EfficientNetBuilder(\n            output_stride=output_stride,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            feature_location=feature_location,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = FeatureInfo(builder.features, out_indices)\n        self._stage_out_idx = {f['stage']: f['index'] for f in self.feature_info.get_dicts()}\n\n        efficientnet_init_weights(self)\n\n        # Register feature extraction hooks with FeatureHooks helper\n        self.feature_hooks = None\n        if feature_location != 'bottleneck':\n            hooks = self.feature_info.get_dicts(keys=('module', 'hook_type'))\n            self.feature_hooks = FeatureHooks(hooks, self.named_modules())\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    def forward(self, x) -> List[torch.Tensor]:\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        if self.feature_hooks is None:\n            features = []\n            if 0 in self._stage_out_idx:\n                features.append(x)  # add stem out\n            for i, b in enumerate(self.blocks):\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(b, x)\n                else:\n                    x = b(x)\n                if i + 1 in self._stage_out_idx:\n                    features.append(x)\n            return features\n        else:\n            self.blocks(x)\n            out = self.feature_hooks.get_output(x.device)\n            return list(out.values())\n\n\ndef _create_effnet(variant, pretrained=False, **kwargs):\n    features_mode = ''\n    model_cls = EfficientNet\n    kwargs_filter = None\n    if kwargs.pop('features_only', False):\n        if 'feature_cfg' in kwargs or 'feature_cls' in kwargs:\n            features_mode = 'cfg'\n        else:\n            kwargs_filter = ('num_classes', 'num_features', 'head_conv', 'global_pool')\n            model_cls = EfficientNetFeatures\n            features_mode = 'cls'\n    pretrained_strict = kwargs.pop('pretrained_strict', True)\n\n    model = build_model_with_cfg(\n        model_cls,\n        variant,\n        pretrained,\n        features_only=features_mode == 'cfg',\n        pretrained_strict=pretrained_strict and features_mode != 'cls',\n        kwargs_filter=kwargs_filter,\n        **kwargs,\n    )\n    if features_mode == 'cls':\n        model.pretrained_cfg = model.default_cfg = pretrained_cfg_for_features(model.pretrained_cfg)\n    return model\n\n\ndef _gen_mnasnet_a1(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a mnasnet-a1 model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet\n    Paper: https://arxiv.org/pdf/1807.11626.pdf.\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer.\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_e1_c16_noskip'],\n        # stage 1, 112x112 in\n        ['ir_r2_k3_s2_e6_c24'],\n        # stage 2, 56x56 in\n        ['ir_r3_k5_s2_e3_c40_se0.25'],\n        # stage 3, 28x28 in\n        ['ir_r4_k3_s2_e6_c80'],\n        # stage 4, 14x14in\n        ['ir_r2_k3_s1_e6_c112_se0.25'],\n        # stage 5, 14x14in\n        ['ir_r3_k5_s2_e6_c160_se0.25'],\n        # stage 6, 7x7 in\n        ['ir_r1_k3_s1_e6_c320'],\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=32,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mnasnet_b1(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a mnasnet-b1 model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet\n    Paper: https://arxiv.org/pdf/1807.11626.pdf.\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer.\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_c16_noskip'],\n        # stage 1, 112x112 in\n        ['ir_r3_k3_s2_e3_c24'],\n        # stage 2, 56x56 in\n        ['ir_r3_k5_s2_e3_c40'],\n        # stage 3, 28x28 in\n        ['ir_r3_k5_s2_e6_c80'],\n        # stage 4, 14x14in\n        ['ir_r2_k3_s1_e6_c96'],\n        # stage 5, 14x14in\n        ['ir_r4_k5_s2_e6_c192'],\n        # stage 6, 7x7 in\n        ['ir_r1_k3_s1_e6_c320_noskip']\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=32,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mnasnet_small(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a mnasnet-b1 model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet\n    Paper: https://arxiv.org/pdf/1807.11626.pdf.\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer.\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_c8'],\n        ['ir_r1_k3_s2_e3_c16'],\n        ['ir_r2_k3_s2_e6_c16'],\n        ['ir_r4_k5_s2_e6_c32_se0.25'],\n        ['ir_r3_k3_s1_e6_c32_se0.25'],\n        ['ir_r3_k5_s2_e6_c88_se0.25'],\n        ['ir_r1_k3_s1_e6_c144']\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=8,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mobilenet_v1(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0,\n        group_size=None, fix_stem_head=False, head_conv=False, pretrained=False, **kwargs\n):\n    \"\"\"\n    Ref impl: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet_v2.py\n    Paper: https://arxiv.org/abs/1801.04381\n    \"\"\"\n    arch_def = [\n        ['dsa_r1_k3_s1_c64'],\n        ['dsa_r2_k3_s2_c128'],\n        ['dsa_r2_k3_s2_c256'],\n        ['dsa_r6_k3_s2_c512'],\n        ['dsa_r2_k3_s2_c1024'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier)\n    head_features = (1024 if fix_stem_head else max(1024, round_chs_fn(1024))) if head_conv else 0\n    model_kwargs = dict(\n        block_args=decode_arch_def(\n            arch_def,\n            depth_multiplier=depth_multiplier,\n            fix_first_last=fix_stem_head,\n            group_size=group_size,\n        ),\n        num_features=head_features,\n        stem_size=32,\n        fix_stem=fix_stem_head,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'relu6'),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mobilenet_v2(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0,\n        group_size=None, fix_stem_head=False, pretrained=False, **kwargs\n):\n    \"\"\" Generate MobileNet-V2 network\n    Ref impl: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet_v2.py\n    Paper: https://arxiv.org/abs/1801.04381\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_c16'],\n        ['ir_r2_k3_s2_e6_c24'],\n        ['ir_r3_k3_s2_e6_c32'],\n        ['ir_r4_k3_s2_e6_c64'],\n        ['ir_r3_k3_s1_e6_c96'],\n        ['ir_r3_k3_s2_e6_c160'],\n        ['ir_r1_k3_s1_e6_c320'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier)\n    model_kwargs = dict(\n        block_args=decode_arch_def(\n            arch_def,\n            depth_multiplier=depth_multiplier,\n            fix_first_last=fix_stem_head,\n            group_size=group_size,\n        ),\n        num_features=1280 if fix_stem_head else max(1280, round_chs_fn(1280)),\n        stem_size=32,\n        fix_stem=fix_stem_head,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'relu6'),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_fbnetc(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\" FBNet-C\n\n        Paper: https://arxiv.org/abs/1812.03443\n        Ref Impl: https://github.com/facebookresearch/maskrcnn-benchmark/blob/master/maskrcnn_benchmark/modeling/backbone/fbnet_modeldef.py\n\n        NOTE: the impl above does not relate to the 'C' variant here, that was derived from paper,\n        it was used to confirm some building block details\n    \"\"\"\n    arch_def = [\n        ['ir_r1_k3_s1_e1_c16'],\n        ['ir_r1_k3_s2_e6_c24', 'ir_r2_k3_s1_e1_c24'],\n        ['ir_r1_k5_s2_e6_c32', 'ir_r1_k5_s1_e3_c32', 'ir_r1_k5_s1_e6_c32', 'ir_r1_k3_s1_e6_c32'],\n        ['ir_r1_k5_s2_e6_c64', 'ir_r1_k5_s1_e3_c64', 'ir_r2_k5_s1_e6_c64'],\n        ['ir_r3_k5_s1_e6_c112', 'ir_r1_k5_s1_e3_c112'],\n        ['ir_r4_k5_s2_e6_c184'],\n        ['ir_r1_k3_s1_e6_c352'],\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=16,\n        num_features=1984,  # paper suggests this, but is not 100% clear\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_spnasnet(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates the Single-Path NAS model from search targeted for Pixel1 phone.\n\n    Paper: https://arxiv.org/abs/1904.02877\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer.\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_c16_noskip'],\n        # stage 1, 112x112 in\n        ['ir_r3_k3_s2_e3_c24'],\n        # stage 2, 56x56 in\n        ['ir_r1_k5_s2_e6_c40', 'ir_r3_k3_s1_e3_c40'],\n        # stage 3, 28x28 in\n        ['ir_r1_k5_s2_e6_c80', 'ir_r3_k3_s1_e3_c80'],\n        # stage 4, 14x14in\n        ['ir_r1_k5_s1_e6_c96', 'ir_r3_k5_s1_e3_c96'],\n        # stage 5, 14x14in\n        ['ir_r4_k5_s2_e6_c192'],\n        # stage 6, 7x7 in\n        ['ir_r1_k3_s1_e6_c320_noskip']\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=32,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnet(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, channel_divisor=8,\n        group_size=None, pretrained=False, **kwargs\n):\n    \"\"\"Creates an EfficientNet model.\n\n    Ref impl: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py\n    Paper: https://arxiv.org/abs/1905.11946\n\n    EfficientNet params\n    name: (channel_multiplier, depth_multiplier, resolution, dropout_rate)\n    'efficientnet-b0': (1.0, 1.0, 224, 0.2),\n    'efficientnet-b1': (1.0, 1.1, 240, 0.2),\n    'efficientnet-b2': (1.1, 1.2, 260, 0.3),\n    'efficientnet-b3': (1.2, 1.4, 300, 0.3),\n    'efficientnet-b4': (1.4, 1.8, 380, 0.4),\n    'efficientnet-b5': (1.6, 2.2, 456, 0.4),\n    'efficientnet-b6': (1.8, 2.6, 528, 0.5),\n    'efficientnet-b7': (2.0, 3.1, 600, 0.5),\n    'efficientnet-b8': (2.2, 3.6, 672, 0.5),\n    'efficientnet-l2': (4.3, 5.3, 800, 0.5),\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer\n      depth_multiplier: multiplier to number of repeats per stage\n\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_e1_c16_se0.25'],\n        ['ir_r2_k3_s2_e6_c24_se0.25'],\n        ['ir_r2_k5_s2_e6_c40_se0.25'],\n        ['ir_r3_k3_s2_e6_c80_se0.25'],\n        ['ir_r3_k5_s1_e6_c112_se0.25'],\n        ['ir_r4_k5_s2_e6_c192_se0.25'],\n        ['ir_r1_k3_s1_e6_c320_se0.25'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier, divisor=channel_divisor)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=round_chs_fn(1280),\n        stem_size=32,\n        round_chs_fn=round_chs_fn,\n        act_layer=resolve_act_layer(kwargs, 'swish'),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnet_edge(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-EdgeTPU model\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/edgetpu\n    \"\"\"\n\n    arch_def = [\n        # NOTE `fc` is present to override a mismatch between stem channels and in chs not\n        # present in other models\n        ['er_r1_k3_s1_e4_c24_fc24_noskip'],\n        ['er_r2_k3_s2_e8_c32'],\n        ['er_r4_k3_s2_e8_c48'],\n        ['ir_r5_k5_s2_e8_c96'],\n        ['ir_r4_k5_s1_e8_c144'],\n        ['ir_r2_k5_s2_e8_c192'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=round_chs_fn(1280),\n        stem_size=32,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'relu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnet_condconv(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=1, pretrained=False, **kwargs\n):\n    \"\"\"Creates an EfficientNet-CondConv model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/condconv\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_e1_c16_se0.25'],\n        ['ir_r2_k3_s2_e6_c24_se0.25'],\n        ['ir_r2_k5_s2_e6_c40_se0.25'],\n        ['ir_r3_k3_s2_e6_c80_se0.25'],\n        ['ir_r3_k5_s1_e6_c112_se0.25_cc4'],\n        ['ir_r4_k5_s2_e6_c192_se0.25_cc4'],\n        ['ir_r1_k3_s1_e6_c320_se0.25_cc4'],\n    ]\n    # NOTE unlike official impl, this one uses `cc<x>` option where x is the base number of experts for each stage and\n    # the expert_multiplier increases that on a per-model basis as with depth/channel multipliers\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, experts_multiplier=experts_multiplier),\n        num_features=round_chs_fn(1280),\n        stem_size=32,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'swish'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnet_lite(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates an EfficientNet-Lite model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/lite\n    Paper: https://arxiv.org/abs/1905.11946\n\n    EfficientNet params\n    name: (channel_multiplier, depth_multiplier, resolution, dropout_rate)\n      'efficientnet-lite0': (1.0, 1.0, 224, 0.2),\n      'efficientnet-lite1': (1.0, 1.1, 240, 0.2),\n      'efficientnet-lite2': (1.1, 1.2, 260, 0.3),\n      'efficientnet-lite3': (1.2, 1.4, 280, 0.3),\n      'efficientnet-lite4': (1.4, 1.8, 300, 0.3),\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer\n      depth_multiplier: multiplier to number of repeats per stage\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_e1_c16'],\n        ['ir_r2_k3_s2_e6_c24'],\n        ['ir_r2_k5_s2_e6_c40'],\n        ['ir_r3_k3_s2_e6_c80'],\n        ['ir_r3_k5_s1_e6_c112'],\n        ['ir_r4_k5_s2_e6_c192'],\n        ['ir_r1_k3_s1_e6_c320'],\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, fix_first_last=True),\n        num_features=1280,\n        stem_size=32,\n        fix_stem=True,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        act_layer=resolve_act_layer(kwargs, 'relu6'),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnetv2_base(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-V2 base model\n\n    Ref impl: https://github.com/google/automl/tree/master/efficientnetv2\n    Paper: `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n    \"\"\"\n    arch_def = [\n        ['cn_r1_k3_s1_e1_c16_skip'],\n        ['er_r2_k3_s2_e4_c32'],\n        ['er_r2_k3_s2_e4_c48'],\n        ['ir_r3_k3_s2_e4_c96_se0.25'],\n        ['ir_r5_k3_s1_e6_c112_se0.25'],\n        ['ir_r8_k3_s2_e6_c192_se0.25'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier, round_limit=0.)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=round_chs_fn(1280),\n        stem_size=32,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnetv2_s(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, rw=False, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-V2 Small model\n\n    Ref impl: https://github.com/google/automl/tree/master/efficientnetv2\n    Paper: `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n\n    NOTE: `rw` flag sets up 'small' variant to behave like my initial v2 small model,\n        before ref the impl was released.\n    \"\"\"\n    arch_def = [\n        ['cn_r2_k3_s1_e1_c24_skip'],\n        ['er_r4_k3_s2_e4_c48'],\n        ['er_r4_k3_s2_e4_c64'],\n        ['ir_r6_k3_s2_e4_c128_se0.25'],\n        ['ir_r9_k3_s1_e6_c160_se0.25'],\n        ['ir_r15_k3_s2_e6_c256_se0.25'],\n    ]\n    num_features = 1280\n    if rw:\n        # my original variant, based on paper figure differs from the official release\n        arch_def[0] = ['er_r2_k3_s1_e1_c24']\n        arch_def[-1] = ['ir_r15_k3_s2_e6_c272_se0.25']\n        num_features = 1792\n\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=round_chs_fn(num_features),\n        stem_size=24,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnetv2_m(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-V2 Medium model\n\n    Ref impl: https://github.com/google/automl/tree/master/efficientnetv2\n    Paper: `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n    \"\"\"\n\n    arch_def = [\n        ['cn_r3_k3_s1_e1_c24_skip'],\n        ['er_r5_k3_s2_e4_c48'],\n        ['er_r5_k3_s2_e4_c80'],\n        ['ir_r7_k3_s2_e4_c160_se0.25'],\n        ['ir_r14_k3_s1_e6_c176_se0.25'],\n        ['ir_r18_k3_s2_e6_c304_se0.25'],\n        ['ir_r5_k3_s1_e6_c512_se0.25'],\n    ]\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=1280,\n        stem_size=24,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnetv2_l(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-V2 Large model\n\n    Ref impl: https://github.com/google/automl/tree/master/efficientnetv2\n    Paper: `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n    \"\"\"\n\n    arch_def = [\n        ['cn_r4_k3_s1_e1_c32_skip'],\n        ['er_r7_k3_s2_e4_c64'],\n        ['er_r7_k3_s2_e4_c96'],\n        ['ir_r10_k3_s2_e4_c192_se0.25'],\n        ['ir_r19_k3_s1_e6_c224_se0.25'],\n        ['ir_r25_k3_s2_e6_c384_se0.25'],\n        ['ir_r7_k3_s1_e6_c640_se0.25'],\n    ]\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=1280,\n        stem_size=32,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnetv2_xl(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs\n):\n    \"\"\" Creates an EfficientNet-V2 Xtra-Large model\n\n    Ref impl: https://github.com/google/automl/tree/master/efficientnetv2\n    Paper: `EfficientNetV2: Smaller Models and Faster Training` - https://arxiv.org/abs/2104.00298\n    \"\"\"\n\n    arch_def = [\n        ['cn_r4_k3_s1_e1_c32_skip'],\n        ['er_r8_k3_s2_e4_c64'],\n        ['er_r8_k3_s2_e4_c96'],\n        ['ir_r16_k3_s2_e4_c192_se0.25'],\n        ['ir_r24_k3_s1_e6_c256_se0.25'],\n        ['ir_r32_k3_s2_e6_c512_se0.25'],\n        ['ir_r8_k3_s1_e6_c640_se0.25'],\n    ]\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=1280,\n        stem_size=32,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_efficientnet_x(\n        variant, channel_multiplier=1.0, depth_multiplier=1.0, channel_divisor=8,\n        group_size=None, version=1, pretrained=False, **kwargs\n):\n    \"\"\"Creates an EfficientNet model.\n\n    Ref impl: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py\n    Paper: https://arxiv.org/abs/1905.11946\n\n    EfficientNet params\n    name: (channel_multiplier, depth_multiplier, resolution, dropout_rate)\n    'efficientnet-x-b0': (1.0, 1.0, 224, 0.2),\n    'efficientnet-x-b1': (1.0, 1.1, 240, 0.2),\n    'efficientnet-x-b2': (1.1, 1.2, 260, 0.3),\n    'efficientnet-x-b3': (1.2, 1.4, 300, 0.3),\n    'efficientnet-x-b4': (1.4, 1.8, 380, 0.4),\n    'efficientnet-x-b5': (1.6, 2.2, 456, 0.4),\n    'efficientnet-x-b6': (1.8, 2.6, 528, 0.5),\n    'efficientnet-x-b7': (2.0, 3.1, 600, 0.5),\n    'efficientnet-x-b8': (2.2, 3.6, 672, 0.5),\n    'efficientnet-l2': (4.3, 5.3, 800, 0.5),\n\n    Args:\n      channel_multiplier: multiplier to number of channels per layer\n      depth_multiplier: multiplier to number of repeats per stage\n\n    \"\"\"\n    \"\"\"\n      if version == 1:\n    blocks_args = [\n        'r1_k3_s11_e1_i32_o16_se0.25_d1_a0',\n        'r2_k3_s22_e6_i16_o24_se0.25_f1_d2_a1',\n        'r2_k5_s22_e6_i24_o40_se0.25_f1_a1',\n        'r3_k3_s22_e6_i40_o80_se0.25_a0',\n        'r3_k5_s11_e6_i80_o112_se0.25_a0',\n        'r4_k5_s22_e6_i112_o192_se0.25_a0',\n        'r1_k3_s11_e6_i192_o320_se0.25_a0',\n    ]\n  elif version == 2:\n    blocks_args = [\n        'r1_k3_s11_e1_i32_o16_se0.25_d1_a0',\n        'r2_k3_s22_e4_i16_o24_se0.25_f1_d2_a1',\n        'r2_k5_s22_e4_i24_o40_se0.25_f1_a1',\n        'r3_k3_s22_e4_i40_o80_se0.25_a0',\n        'r3_k5_s11_e6_i80_o112_se0.25_a0',\n        'r4_k5_s22_e6_i112_o192_se0.25_a0',\n        'r1_k3_s11_e6_i192_o320_se0.25_a0',\n    ]\n    \"\"\"\n    if version == 1:\n        arch_def = [\n            ['ds_r1_k3_s1_e1_c16_se0.25_d1'],\n            ['er_r2_k3_s2_e6_c24_se0.25_nre'],\n            ['er_r2_k5_s2_e6_c40_se0.25_nre'],\n            ['ir_r3_k3_s2_e6_c80_se0.25'],\n            ['ir_r3_k5_s1_e6_c112_se0.25'],\n            ['ir_r4_k5_s2_e6_c192_se0.25'],\n            ['ir_r1_k3_s1_e6_c320_se0.25'],\n        ]\n    else:\n        arch_def = [\n            ['ds_r1_k3_s1_e1_c16_se0.25_d1'],\n            ['er_r2_k3_s2_e4_c24_se0.25_nre'],\n            ['er_r2_k5_s2_e4_c40_se0.25_nre'],\n            ['ir_r3_k3_s2_e4_c80_se0.25'],\n            ['ir_r3_k5_s1_e6_c112_se0.25'],\n            ['ir_r4_k5_s2_e6_c192_se0.25'],\n            ['ir_r1_k3_s1_e6_c320_se0.25'],\n        ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier, divisor=channel_divisor)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size),\n        num_features=round_chs_fn(1280),\n        stem_size=32,\n        round_chs_fn=round_chs_fn,\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mixnet_s(variant, channel_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a MixNet Small model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet/mixnet\n    Paper: https://arxiv.org/abs/1907.09595\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_e1_c16'],  # relu\n        # stage 1, 112x112 in\n        ['ir_r1_k3_a1.1_p1.1_s2_e6_c24', 'ir_r1_k3_a1.1_p1.1_s1_e3_c24'],  # relu\n        # stage 2, 56x56 in\n        ['ir_r1_k3.5.7_s2_e6_c40_se0.5_nsw', 'ir_r3_k3.5_a1.1_p1.1_s1_e6_c40_se0.5_nsw'],  # swish\n        # stage 3, 28x28 in\n        ['ir_r1_k3.5.7_p1.1_s2_e6_c80_se0.25_nsw', 'ir_r2_k3.5_p1.1_s1_e6_c80_se0.25_nsw'],  # swish\n        # stage 4, 14x14in\n        ['ir_r1_k3.5.7_a1.1_p1.1_s1_e6_c120_se0.5_nsw', 'ir_r2_k3.5.7.9_a1.1_p1.1_s1_e3_c120_se0.5_nsw'],  # swish\n        # stage 5, 14x14in\n        ['ir_r1_k3.5.7.9.11_s2_e6_c200_se0.5_nsw', 'ir_r2_k3.5.7.9_p1.1_s1_e6_c200_se0.5_nsw'],  # swish\n        # 7x7\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        num_features=1536,\n        stem_size=16,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mixnet_m(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a MixNet Medium-Large model.\n\n    Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet/mixnet\n    Paper: https://arxiv.org/abs/1907.09595\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_e1_c24'],  # relu\n        # stage 1, 112x112 in\n        ['ir_r1_k3.5.7_a1.1_p1.1_s2_e6_c32', 'ir_r1_k3_a1.1_p1.1_s1_e3_c32'],  # relu\n        # stage 2, 56x56 in\n        ['ir_r1_k3.5.7.9_s2_e6_c40_se0.5_nsw', 'ir_r3_k3.5_a1.1_p1.1_s1_e6_c40_se0.5_nsw'],  # swish\n        # stage 3, 28x28 in\n        ['ir_r1_k3.5.7_s2_e6_c80_se0.25_nsw', 'ir_r3_k3.5.7.9_a1.1_p1.1_s1_e6_c80_se0.25_nsw'],  # swish\n        # stage 4, 14x14in\n        ['ir_r1_k3_s1_e6_c120_se0.5_nsw', 'ir_r3_k3.5.7.9_a1.1_p1.1_s1_e3_c120_se0.5_nsw'],  # swish\n        # stage 5, 14x14in\n        ['ir_r1_k3.5.7.9_s2_e6_c200_se0.5_nsw', 'ir_r3_k3.5.7.9_p1.1_s1_e6_c200_se0.5_nsw'],  # swish\n        # 7x7\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, depth_trunc='round'),\n        num_features=1536,\n        stem_size=24,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_tinynet(variant, model_width=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"Creates a TinyNet model.\n    \"\"\"\n    arch_def = [\n        ['ds_r1_k3_s1_e1_c16_se0.25'], ['ir_r2_k3_s2_e6_c24_se0.25'],\n        ['ir_r2_k5_s2_e6_c40_se0.25'], ['ir_r3_k3_s2_e6_c80_se0.25'],\n        ['ir_r3_k5_s1_e6_c112_se0.25'], ['ir_r4_k5_s2_e6_c192_se0.25'],\n        ['ir_r1_k3_s1_e6_c320_se0.25'],\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier, depth_trunc='round'),\n        num_features=max(1280, round_channels(1280, model_width, 8, None)),\n        stem_size=32,\n        fix_stem=True,\n        round_chs_fn=partial(round_channels, multiplier=model_width),\n        act_layer=resolve_act_layer(kwargs, 'swish'),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mobilenet_edgetpu(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\"\n    Based on definitions in: https://github.com/tensorflow/models/tree/d2427a562f401c9af118e47af2f030a0a5599f55/official/projects/edgetpu/vision\n    \"\"\"\n    if 'edgetpu_v2' in variant:\n        stem_size = 64\n        stem_kernel_size = 5\n        group_size = 64\n        num_features = 1280\n        act_layer = resolve_act_layer(kwargs, 'relu')\n\n        def _arch_def(chs: List[int], group_size: int):\n            return [\n                # stage 0, 112x112 in\n                [f'cn_r1_k1_s1_c{chs[0]}'],  # NOTE with expansion==1, official impl block ends just 1x1 pwl\n                # stage 1, 112x112 in\n                [f'er_r1_k3_s2_e8_c{chs[1]}', f'er_r1_k3_s1_e4_gs{group_size}_c{chs[1]}'],\n                # stage 2, 56x56 in\n                [\n                    f'er_r1_k3_s2_e8_c{chs[2]}',\n                    f'er_r1_k3_s1_e4_gs{group_size}_c{chs[2]}',\n                    f'er_r1_k3_s1_e4_c{chs[2]}',\n                    f'er_r1_k3_s1_e4_gs{group_size}_c{chs[2]}',\n                ],\n                # stage 3, 28x28 in\n                [f'er_r1_k3_s2_e8_c{chs[3]}', f'ir_r3_k3_s1_e4_c{chs[3]}'],\n                # stage 4, 14x14in\n                [f'ir_r1_k3_s1_e8_c{chs[4]}', f'ir_r3_k3_s1_e4_c{chs[4]}'],\n                # stage 5, 14x14in\n                [f'ir_r1_k3_s2_e8_c{chs[5]}', f'ir_r3_k3_s1_e4_c{chs[5]}'],\n                # stage 6, 7x7 in\n                [f'ir_r1_k3_s1_e8_c{chs[6]}'],\n            ]\n\n        if 'edgetpu_v2_xs' in variant:\n            stem_size = 32\n            stem_kernel_size = 3\n            channels = [16, 32, 48, 96, 144, 160, 192]\n        elif 'edgetpu_v2_s' in variant:\n            channels = [24, 48, 64, 128, 160, 192, 256]\n        elif 'edgetpu_v2_m' in variant:\n            channels = [32, 64, 80, 160, 192, 240, 320]\n            num_features = 1344\n        elif 'edgetpu_v2_l' in variant:\n            stem_kernel_size = 7\n            group_size = 128\n            channels = [32, 64, 96, 192, 240, 256, 384]\n            num_features = 1408\n        else:\n            assert False\n\n        arch_def = _arch_def(channels, group_size)\n    else:\n        # v1\n        stem_size = 32\n        stem_kernel_size = 3\n        num_features = 1280\n        act_layer = resolve_act_layer(kwargs, 'relu')\n        arch_def = [\n            # stage 0, 112x112 in\n            ['cn_r1_k1_s1_c16'],\n            # stage 1, 112x112 in\n            ['er_r1_k3_s2_e8_c32', 'er_r3_k3_s1_e4_c32'],\n            # stage 2, 56x56 in\n            ['er_r1_k3_s2_e8_c48', 'er_r3_k3_s1_e4_c48'],\n            # stage 3, 28x28 in\n            ['ir_r1_k3_s2_e8_c96', 'ir_r3_k3_s1_e4_c96'],\n            # stage 4, 14x14in\n            ['ir_r1_k3_s1_e8_c96_noskip', 'ir_r3_k3_s1_e4_c96'],\n            # stage 5, 14x14in\n            ['ir_r1_k5_s2_e8_c160', 'ir_r3_k5_s1_e4_c160'],\n            # stage 6, 7x7 in\n            ['ir_r1_k3_s1_e8_c192'],\n        ]\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier),\n        num_features=num_features,\n        stem_size=stem_size,\n        stem_kernel_size=stem_kernel_size,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=act_layer,\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_test_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):\n    \"\"\" Minimal test EfficientNet generator.\n    \"\"\"\n    arch_def = [\n        ['cn_r1_k3_s1_e1_c16_skip'],\n        ['er_r1_k3_s2_e4_c24'],\n        ['er_r1_k3_s2_e4_c32'],\n        ['ir_r1_k3_s2_e4_c48_se0.25'],\n        ['ir_r1_k3_s2_e4_c64_se0.25'],\n    ]\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier, round_limit=0.)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier),\n        num_features=round_chs_fn(256),\n        stem_size=24,\n        round_chs_fn=round_chs_fn,\n        norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'silu'),\n        **kwargs,\n    )\n    model = _create_effnet(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv_stem', 'classifier': 'classifier',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'mnasnet_050.untrained': _cfg(),\n    'mnasnet_075.untrained': _cfg(),\n    'mnasnet_100.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_b1-74cb7081.pth',\n        hf_hub_id='timm/'),\n    'mnasnet_140.untrained': _cfg(),\n\n    'semnasnet_050.untrained': _cfg(),\n    'semnasnet_075.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/semnasnet_075-18710866.pth',\n        hf_hub_id='timm/'),\n    'semnasnet_100.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_a1-d9418771.pth',\n        hf_hub_id='timm/'),\n    'semnasnet_140.untrained': _cfg(),\n    'mnasnet_small.lamb_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_small_lamb-aff75073.pth',\n        hf_hub_id='timm/'),\n\n    'mobilenetv1_100.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        test_input_size=(3, 256, 256), test_crop_pct=0.95,\n    ),\n    'mobilenetv1_100h.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        test_input_size=(3, 256, 256), test_crop_pct=0.95,\n    ),\n    'mobilenetv1_125.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=0.9, test_input_size=(3, 256, 256), test_crop_pct=1.0,\n    ),\n\n    'mobilenetv2_035.untrained': _cfg(),\n    'mobilenetv2_050.lamb_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_050-3d30d450.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic',\n    ),\n    'mobilenetv2_075.untrained': _cfg(),\n    'mobilenetv2_100.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_100_ra-b33bc2c4.pth',\n        hf_hub_id='timm/'),\n    'mobilenetv2_110d.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_110d_ra-77090ade.pth',\n        hf_hub_id='timm/'),\n    'mobilenetv2_120d.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_120d_ra-5987e2ed.pth',\n        hf_hub_id='timm/'),\n    'mobilenetv2_140.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_140_ra-21a4e913.pth',\n        hf_hub_id='timm/'),\n\n    'fbnetc_100.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetc_100-c345b898.pth',\n        hf_hub_id='timm/',\n        interpolation='bilinear'),\n    'spnasnet_100.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/spnasnet_100-048bc3f4.pth',\n        hf_hub_id='timm/',\n        interpolation='bilinear'),\n\n    # NOTE experimenting with alternate attention\n    'efficientnet_b0.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b0_ra-3dd342df.pth',\n        hf_hub_id='timm/'),\n    'efficientnet_b0.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=0.9, test_input_size=(3, 256, 256), test_crop_pct=1.0),\n    'efficientnet_b1.ra4_e3600_r240_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 240, 240), crop_pct=0.9, pool_size=(8, 8),\n        test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'efficientnet_b1.ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b1-533bc792.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), test_crop_pct=1.0),\n    'efficientnet_b2.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'efficientnet_b3.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'efficientnet_b4.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b4_ra2_320-7eb33cd5.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 320, 320), pool_size=(10, 10), test_input_size=(3, 384, 384), test_crop_pct=1.0),\n    'efficientnet_b5.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, crop_mode='squash'),\n    'efficientnet_b5.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 416, 416), pool_size=(13, 13), crop_pct=0.95, num_classes=11821),\n    'efficientnet_b6.untrained': _cfg(\n        url='', input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),\n    'efficientnet_b7.untrained': _cfg(\n        url='', input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),\n    'efficientnet_b8.untrained': _cfg(\n        url='', input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),\n    'efficientnet_l2.untrained': _cfg(\n        url='', input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.961),\n\n    # FIXME experimental\n    'efficientnet_b0_gn.untrained': _cfg(),\n    'efficientnet_b0_g8_gn.untrained': _cfg(),\n    'efficientnet_b0_g16_evos.untrained': _cfg(),\n    'efficientnet_b3_gn.untrained': _cfg(\n        input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0),\n    'efficientnet_b3_g8_gn.untrained': _cfg(\n        input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0),\n    'efficientnet_blur_b0.untrained': _cfg(),\n    'efficientnet_h_b5.sw_r448_e450_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0,\n        crop_mode='squash', test_input_size=(3, 576, 576)),\n    'efficientnet_x_b3.untrained': _cfg(\n        url='', input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=0.95),\n    'efficientnet_x_b5.sw_r448_e450_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0,\n        crop_mode='squash', test_input_size=(3, 576, 576)),\n\n    'efficientnet_es.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth',\n        hf_hub_id='timm/'),\n    'efficientnet_em.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_em_ra2-66250f76.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'efficientnet_el.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el-3b455510.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n\n    'efficientnet_es_pruned.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_pruned75-1b7248cf.pth',\n        hf_hub_id='timm/'),\n    'efficientnet_el_pruned.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el_pruned70-ef2a2ccf.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n\n    'efficientnet_cc_b0_4e.untrained': _cfg(),\n    'efficientnet_cc_b0_8e.untrained': _cfg(),\n    'efficientnet_cc_b1_8e.untrained': _cfg(input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n\n    'efficientnet_lite0.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_lite0_ra-37913777.pth',\n        hf_hub_id='timm/'),\n    'efficientnet_lite1.untrained': _cfg(\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'efficientnet_lite2.untrained': _cfg(\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),\n    'efficientnet_lite3.untrained': _cfg(\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n    'efficientnet_lite4.untrained': _cfg(\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),\n\n    'efficientnet_b1_pruned.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb1_pruned-bea43a3a.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), pool_size=(8, 8),\n        crop_pct=0.882, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'efficientnet_b2_pruned.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb2_pruned-08c1b27c.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 260, 260), pool_size=(9, 9),\n        crop_pct=0.890, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'efficientnet_b3_pruned.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb3_pruned-59ecf72d.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10),\n        crop_pct=0.904, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n\n    'efficientnetv2_rw_t.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_t_agc-3620981a.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0),\n    'gc_efficientnetv2_rw_t.agc_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gc_efficientnetv2_rw_t_agc-927a0bde.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0),\n    'efficientnetv2_rw_s.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_v2s_ra2_288-a6477665.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0),\n    'efficientnetv2_rw_m.agc_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_rw_m_agc-3d90cb1e.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0),\n\n    'efficientnetv2_s.untrained': _cfg(\n        input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0),\n    'efficientnetv2_m.untrained': _cfg(\n        input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0),\n    'efficientnetv2_l.untrained': _cfg(\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0),\n    'efficientnetv2_xl.untrained': _cfg(\n        input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0),\n\n    'tf_efficientnet_b0.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ns-c0e6a31c.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 224, 224)),\n    'tf_efficientnet_b1.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ns-99dd0c41.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'tf_efficientnet_b2.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ns-00306e48.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),\n    'tf_efficientnet_b3.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ns-9d44bf68.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n    'tf_efficientnet_b4.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ns-d6313a46.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),\n    'tf_efficientnet_b5.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ns-6f26d0cf.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),\n    'tf_efficientnet_b6.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ns-51548356.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),\n    'tf_efficientnet_b7.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ns-1dbc32de.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),\n    'tf_efficientnet_l2.ns_jft_in1k_475': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns_475-bebbd00a.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 475, 475), pool_size=(15, 15), crop_pct=0.936),\n    'tf_efficientnet_l2.ns_jft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns-df73bb44.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.96),\n\n    'tf_efficientnet_b0.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ap-f262efe1.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, input_size=(3, 224, 224)),\n    'tf_efficientnet_b1.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ap-44ef0a3d.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'tf_efficientnet_b2.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ap-2f8e7636.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),\n    'tf_efficientnet_b3.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ap-aad25bdd.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n    'tf_efficientnet_b4.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ap-dedb23e6.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),\n    'tf_efficientnet_b5.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ap-9e82fae8.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),\n    'tf_efficientnet_b6.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ap-4ffb161f.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),\n    'tf_efficientnet_b7.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ap-ddb28fec.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),\n    'tf_efficientnet_b8.ap_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ap-00e169fa.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),\n\n    'tf_efficientnet_b5.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ra-9a3e5369.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),\n    'tf_efficientnet_b7.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ra-6c08e654.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),\n    'tf_efficientnet_b8.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ra-572d5dd9.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),\n\n    'tf_efficientnet_b0.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_aa-827b6e33.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 224, 224)),\n    'tf_efficientnet_b1.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_aa-ea7a6ee0.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'tf_efficientnet_b2.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_aa-60c94f97.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),\n    'tf_efficientnet_b3.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_aa-84b4657e.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n    'tf_efficientnet_b4.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_aa-818f208c.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),\n    'tf_efficientnet_b5.aa_in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_aa-99018a74.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),\n    'tf_efficientnet_b6.aa_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_aa-80ba17e4.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),\n    'tf_efficientnet_b7.aa_in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_aa-076e3472.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),\n\n    'tf_efficientnet_b0.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0-0af12548.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 224, 224)),\n    'tf_efficientnet_b1.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1-5c1377c4.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'tf_efficientnet_b2.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2-e393ef04.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),\n    'tf_efficientnet_b3.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3-e3bd6955.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n    'tf_efficientnet_b4.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4-74ee3bed.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),\n    'tf_efficientnet_b5.in1k': _cfg(\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5-c6949ce9.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),\n\n    'tf_efficientnet_es.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_es-ca1afbfe.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 224, 224), ),\n    'tf_efficientnet_em.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_em-e78cfe58.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n    'tf_efficientnet_el.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_el-5143854e.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),\n\n    'tf_efficientnet_cc_b0_4e.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_efficientnet_cc_b0_8e.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_efficientnet_cc_b1_8e.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),\n\n    'tf_efficientnet_lite0.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite0-0aa007d2.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        interpolation='bicubic',  # should be bilinear but bicubic better match for TF bilinear at low res\n    ),\n    'tf_efficientnet_lite1.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite1-bde8b488.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882,\n        interpolation='bicubic',  # should be bilinear but bicubic better match for TF bilinear at low res\n    ),\n    'tf_efficientnet_lite2.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite2-dcccb7df.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890,\n        interpolation='bicubic',  # should be bilinear but bicubic better match for TF bilinear at low res\n    ),\n    'tf_efficientnet_lite3.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite3-b733e338.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, interpolation='bilinear'),\n    'tf_efficientnet_lite4.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite4-741542c3.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.920, interpolation='bilinear'),\n\n    'tf_efficientnetv2_s.in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21ft1k-d7dafa41.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0),\n    'tf_efficientnetv2_m.in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21ft1k-bf41664a.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'tf_efficientnetv2_l.in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21ft1k-60127a9d.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'tf_efficientnetv2_xl.in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21ft1k-06c35c48.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    'tf_efficientnetv2_s.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s-eb54923e.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0),\n    'tf_efficientnetv2_m.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m-cc09e0cd.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'tf_efficientnetv2_l.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l-d664b728.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    'tf_efficientnetv2_s.in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21k-6337ad01.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843,\n        input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0),\n    'tf_efficientnetv2_m.in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21k-361418a2.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843,\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'tf_efficientnetv2_l.in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21k-91a19ec9.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843,\n        input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'tf_efficientnetv2_xl.in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21k-fd7e8abf.pth',\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843,\n        input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    'tf_efficientnetv2_b0.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b0-c7cc451f.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 192, 192), test_input_size=(3, 224, 224), pool_size=(6, 6)),\n    'tf_efficientnetv2_b1.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b1-be6e41b0.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 192, 192), test_input_size=(3, 240, 240), pool_size=(6, 6), crop_pct=0.882),\n    'tf_efficientnetv2_b2.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b2-847de54e.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 208, 208), test_input_size=(3, 260, 260), pool_size=(7, 7), crop_pct=0.890),\n    'tf_efficientnetv2_b3.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.9, crop_mode='squash'),\n    'tf_efficientnetv2_b3.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b3-57773f13.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.904),\n    'tf_efficientnetv2_b3.in21k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, num_classes=21843,\n        input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.904),\n\n    'mixnet_s.ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_s-a907afbc.pth',\n        hf_hub_id='timm/'),\n    'mixnet_m.ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_m-4647fc68.pth',\n        hf_hub_id='timm/'),\n    'mixnet_l.ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_l-5a9a2ed8.pth',\n        hf_hub_id='timm/'),\n    'mixnet_xl.ra_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_xl_ra-aac3c00c.pth',\n        hf_hub_id='timm/'),\n    'mixnet_xxl.untrained': _cfg(),\n\n    'tf_mixnet_s.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_s-89d3354b.pth',\n        hf_hub_id='timm/'),\n    'tf_mixnet_m.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth',\n        hf_hub_id='timm/'),\n    'tf_mixnet_l.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth',\n        hf_hub_id='timm/'),\n\n    \"tinynet_a.in1k\": _cfg(\n        input_size=(3, 192, 192), pool_size=(6, 6),  # int(224 * 0.86)\n        url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_a.pth',\n        hf_hub_id='timm/'),\n    \"tinynet_b.in1k\": _cfg(\n        input_size=(3, 188, 188), pool_size=(6, 6),  # int(224 * 0.84)\n        url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_b.pth',\n        hf_hub_id='timm/'),\n    \"tinynet_c.in1k\": _cfg(\n        input_size=(3, 184, 184), pool_size=(6, 6),  # int(224 * 0.825)\n        url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_c.pth',\n        hf_hub_id='timm/'),\n    \"tinynet_d.in1k\": _cfg(\n        input_size=(3, 152, 152), pool_size=(5, 5),  # int(224 * 0.68)\n        url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_d.pth',\n        hf_hub_id='timm/'),\n    \"tinynet_e.in1k\": _cfg(\n        input_size=(3, 106, 106), pool_size=(4, 4),  # int(224 * 0.475)\n        url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_e.pth',\n        hf_hub_id='timm/'),\n\n    'mobilenet_edgetpu_100.untrained': _cfg(\n        # hf_hub_id='timm/',\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'mobilenet_edgetpu_v2_xs.untrained': _cfg(\n        # hf_hub_id='timm/',\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'mobilenet_edgetpu_v2_s.untrained': _cfg(\n        #hf_hub_id='timm/',\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'mobilenet_edgetpu_v2_m.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=0.9, test_input_size=(3, 256, 256), test_crop_pct=0.95,\n    ),\n    'mobilenet_edgetpu_v2_l.untrained': _cfg(\n        #hf_hub_id='timm/',\n        input_size=(3, 224, 224), crop_pct=0.9),\n\n    \"test_efficientnet.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n    \"test_efficientnet_ln.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n    \"test_efficientnet_gn.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n    \"test_efficientnet_evos.r160_in1k\": _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.95),\n})\n\n\n@register_model\ndef mnasnet_050(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet B1, depth multiplier of 0.5. \"\"\"\n    model = _gen_mnasnet_b1('mnasnet_050', 0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mnasnet_075(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet B1, depth multiplier of 0.75. \"\"\"\n    model = _gen_mnasnet_b1('mnasnet_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mnasnet_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet B1, depth multiplier of 1.0. \"\"\"\n    model = _gen_mnasnet_b1('mnasnet_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mnasnet_140(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet B1,  depth multiplier of 1.4 \"\"\"\n    model = _gen_mnasnet_b1('mnasnet_140', 1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef semnasnet_050(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet A1 (w/ SE), depth multiplier of 0.5 \"\"\"\n    model = _gen_mnasnet_a1('semnasnet_050', 0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef semnasnet_075(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet A1 (w/ SE),  depth multiplier of 0.75. \"\"\"\n    model = _gen_mnasnet_a1('semnasnet_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef semnasnet_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet A1 (w/ SE), depth multiplier of 1.0. \"\"\"\n    model = _gen_mnasnet_a1('semnasnet_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef semnasnet_140(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet A1 (w/ SE), depth multiplier of 1.4. \"\"\"\n    model = _gen_mnasnet_a1('semnasnet_140', 1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mnasnet_small(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MNASNet Small,  depth multiplier of 1.0. \"\"\"\n    model = _gen_mnasnet_small('mnasnet_small', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv1_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V1 \"\"\"\n    model = _gen_mobilenet_v1('mobilenetv1_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv1_100h(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V1 \"\"\"\n    model = _gen_mobilenet_v1('mobilenetv1_100h', 1.0, head_conv=True, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv1_125(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V1 \"\"\"\n    model = _gen_mobilenet_v1('mobilenetv1_125', 1.25, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_035(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 0.35 channel multiplier \"\"\"\n    model = _gen_mobilenet_v2('mobilenetv2_035', 0.35, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_050(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 0.5 channel multiplier \"\"\"\n    model = _gen_mobilenet_v2('mobilenetv2_050', 0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_075(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 0.75 channel multiplier \"\"\"\n    model = _gen_mobilenet_v2('mobilenetv2_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 1.0 channel multiplier \"\"\"\n    model = _gen_mobilenet_v2('mobilenetv2_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_140(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 1.4 channel multiplier \"\"\"\n    model = _gen_mobilenet_v2('mobilenetv2_140', 1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_110d(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 1.1 channel, 1.2 depth multipliers\"\"\"\n    model = _gen_mobilenet_v2(\n        'mobilenetv2_110d', 1.1, depth_multiplier=1.2, fix_stem_head=True, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv2_120d(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet V2 w/ 1.2 channel, 1.4 depth multipliers \"\"\"\n    model = _gen_mobilenet_v2(\n        'mobilenetv2_120d', 1.2, depth_multiplier=1.4, fix_stem_head=True, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef fbnetc_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" FBNet-C \"\"\"\n    if pretrained:\n        # pretrained model trained with non-default BN epsilon\n        kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    model = _gen_fbnetc('fbnetc_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef spnasnet_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" Single-Path NAS Pixel1\"\"\"\n    model = _gen_spnasnet('spnasnet_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b1(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B1 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B2 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b4(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B4 \"\"\"\n    # NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b5(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B5 \"\"\"\n    # NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b5', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b6(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B6 \"\"\"\n    # NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b6', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b7(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B7 \"\"\"\n    # NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b7', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b8(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B8 \"\"\"\n    # NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b8', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_l2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-L2.\"\"\"\n    # NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_l2', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n# FIXME experimental group cong / GroupNorm / EvoNorm experiments\n@register_model\ndef efficientnet_b0_gn(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0 + GroupNorm\"\"\"\n    model = _gen_efficientnet(\n        'efficientnet_b0_gn', norm_layer=partial(GroupNormAct, group_size=8), pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b0_g8_gn(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0 w/ group conv + GroupNorm\"\"\"\n    model = _gen_efficientnet(\n        'efficientnet_b0_g8_gn', group_size=8, norm_layer=partial(GroupNormAct, group_size=8),\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b0_g16_evos(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0 w/ group 16 conv + EvoNorm\"\"\"\n    model = _gen_efficientnet(\n        'efficientnet_b0_g16_evos', group_size=16, channel_divisor=16,\n        pretrained=pretrained, **kwargs) #norm_layer=partial(EvoNorm2dS0, group_size=16),\n    return model\n\n\n@register_model\ndef efficientnet_b3_gn(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3 w/ GroupNorm \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b3_gn', channel_multiplier=1.2, depth_multiplier=1.4, channel_divisor=16,\n        norm_layer=partial(GroupNormAct, group_size=16), pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b3_g8_gn(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3 w/ grouped conv + BN\"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_b3_g8_gn', channel_multiplier=1.2, depth_multiplier=1.4, group_size=8, channel_divisor=16,\n        norm_layer=partial(GroupNormAct, group_size=16), pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_blur_b0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0 w/ BlurPool \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet(\n        'efficientnet_blur_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained,\n        aa_layer='blurpc', **kwargs\n    )\n    return model\n\n\n@register_model\ndef efficientnet_es(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge Small. \"\"\"\n    model = _gen_efficientnet_edge(\n        'efficientnet_es', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_es_pruned(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge Small Pruned. For more info: https://github.com/DeGirum/pruned-models/releases/tag/efficientnet_v1.0\"\"\"\n    model = _gen_efficientnet_edge(\n        'efficientnet_es_pruned', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef efficientnet_em(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge-Medium. \"\"\"\n    model = _gen_efficientnet_edge(\n        'efficientnet_em', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_el(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge-Large. \"\"\"\n    model = _gen_efficientnet_edge(\n        'efficientnet_el', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef efficientnet_el_pruned(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge-Large pruned. For more info: https://github.com/DeGirum/pruned-models/releases/tag/efficientnet_v1.0\"\"\"\n    model = _gen_efficientnet_edge(\n        'efficientnet_el_pruned', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef efficientnet_cc_b0_4e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B0 w/ 8 Experts \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet_condconv(\n        'efficientnet_cc_b0_4e', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_cc_b0_8e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B0 w/ 8 Experts \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet_condconv(\n        'efficientnet_cc_b0_8e', channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=2,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_cc_b1_8e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B1 w/ 8 Experts \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet_condconv(\n        'efficientnet_cc_b1_8e', channel_multiplier=1.0, depth_multiplier=1.1, experts_multiplier=2,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_lite0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite0 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet_lite(\n        'efficientnet_lite0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_lite1(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite1 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    model = _gen_efficientnet_lite(\n        'efficientnet_lite1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_lite2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite2 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet_lite(\n        'efficientnet_lite2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_lite3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite3 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet_lite(\n        'efficientnet_lite3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_lite4(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite4 \"\"\"\n    # NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2\n    model = _gen_efficientnet_lite(\n        'efficientnet_lite4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b1_pruned(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B1 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    variant = 'efficientnet_b1_pruned'\n    model = _gen_efficientnet(\n        variant, channel_multiplier=1.0, depth_multiplier=1.1, pruned=True, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b2_pruned(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B2 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'efficientnet_b2_pruned', channel_multiplier=1.1, depth_multiplier=1.2, pruned=True,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_b3_pruned(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'efficientnet_b3_pruned', channel_multiplier=1.2, depth_multiplier=1.4, pruned=True,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_rw_t(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Tiny (Custom variant, tiny not in paper). \"\"\"\n    model = _gen_efficientnetv2_s(\n        'efficientnetv2_rw_t', channel_multiplier=0.8, depth_multiplier=0.9, rw=False, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef gc_efficientnetv2_rw_t(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Tiny w/ Global Context Attn (Custom variant, tiny not in paper). \"\"\"\n    model = _gen_efficientnetv2_s(\n        'gc_efficientnetv2_rw_t', channel_multiplier=0.8, depth_multiplier=0.9,\n        rw=False, se_layer='gc', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_rw_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Small (RW variant).\n    NOTE: This is my initial (pre official code release) w/ some differences.\n    See efficientnetv2_s and tf_efficientnetv2_s for versions that match the official w/ PyTorch vs TF padding\n    \"\"\"\n    model = _gen_efficientnetv2_s('efficientnetv2_rw_s', rw=True, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_rw_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Medium (RW variant).\n    \"\"\"\n    model = _gen_efficientnetv2_s(\n        'efficientnetv2_rw_m', channel_multiplier=1.2, depth_multiplier=(1.2,) * 4 + (1.6,) * 2, rw=True,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Small. \"\"\"\n    model = _gen_efficientnetv2_s('efficientnetv2_s', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Medium. \"\"\"\n    model = _gen_efficientnetv2_m('efficientnetv2_m', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_l(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Large. \"\"\"\n    model = _gen_efficientnetv2_l('efficientnetv2_l', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnetv2_xl(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Xtra-Large. \"\"\"\n    model = _gen_efficientnetv2_xl('efficientnetv2_xl', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B0. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b1(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B1. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B2. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3. Tensorflow compatible variant \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b4(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B4. Tensorflow compatible variant \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b5(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B5. Tensorflow compatible variant \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b5', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b6(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B6. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.5\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b6', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b7(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B7. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.5\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b7', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_b8(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B8. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.5\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_b8', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_l2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-L2 NoisyStudent. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.5\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet(\n        'tf_efficientnet_l2', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_es(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge Small. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_edge(\n        'tf_efficientnet_es', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_em(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge-Medium. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_edge(\n        'tf_efficientnet_em', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_el(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Edge-Large. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_edge(\n        'tf_efficientnet_el', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_cc_b0_4e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B0 w/ 4 Experts. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_condconv(\n        'tf_efficientnet_cc_b0_4e', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_cc_b0_8e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B0 w/ 8 Experts. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_condconv(\n        'tf_efficientnet_cc_b0_8e', channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=2,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_cc_b1_8e(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-CondConv-B1 w/ 8 Experts. Tensorflow compatible variant \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_condconv(\n        'tf_efficientnet_cc_b1_8e', channel_multiplier=1.0, depth_multiplier=1.1, experts_multiplier=2,\n        pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_lite0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite0 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_lite(\n        'tf_efficientnet_lite0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_lite1(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite1 \"\"\"\n    # NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_lite(\n        'tf_efficientnet_lite1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_lite2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite2 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_lite(\n        'tf_efficientnet_lite2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_lite3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite3 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_lite(\n        'tf_efficientnet_lite3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnet_lite4(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-Lite4 \"\"\"\n    # NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnet_lite(\n        'tf_efficientnet_lite4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Small. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_s('tf_efficientnetv2_s', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Medium. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_m('tf_efficientnetv2_m', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_l(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Large. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_l('tf_efficientnetv2_l', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_xl(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2 Xtra-Large. Tensorflow compatible variant\n    \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_xl('tf_efficientnetv2_xl', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_b0(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2-B0. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_base('tf_efficientnetv2_b0', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_b1(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2-B1. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_base(\n        'tf_efficientnetv2_b1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_b2(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2-B2. Tensorflow compatible variant  \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_base(\n        'tf_efficientnetv2_b2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_efficientnetv2_b3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-V2-B3. Tensorflow compatible variant \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_efficientnetv2_base(\n        'tf_efficientnetv2_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_x_b3(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B3 \"\"\"\n    # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2\n    model = _gen_efficientnet_x(\n        'efficientnet_x_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_x_b5(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B5 \"\"\"\n    model = _gen_efficientnet_x(\n        'efficientnet_x_b5', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef efficientnet_h_b5(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" EfficientNet-B5 \"\"\"\n    model = _gen_efficientnet_x(\n        'efficientnet_h_b5', channel_multiplier=1.92, depth_multiplier=2.2, version=2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mixnet_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Small model.\n    \"\"\"\n    model = _gen_mixnet_s(\n        'mixnet_s', channel_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mixnet_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Medium model.\n    \"\"\"\n    model = _gen_mixnet_m(\n        'mixnet_m', channel_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mixnet_l(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Large model.\n    \"\"\"\n    model = _gen_mixnet_m(\n        'mixnet_l', channel_multiplier=1.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mixnet_xl(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Extra-Large model.\n    Not a paper spec, experimental def by RW w/ depth scaling.\n    \"\"\"\n    model = _gen_mixnet_m(\n        'mixnet_xl', channel_multiplier=1.6, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mixnet_xxl(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Double Extra Large model.\n    Not a paper spec, experimental def by RW w/ depth scaling.\n    \"\"\"\n    model = _gen_mixnet_m(\n        'mixnet_xxl', channel_multiplier=2.4, depth_multiplier=1.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mixnet_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Small model. Tensorflow compatible variant\n    \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mixnet_s(\n        'tf_mixnet_s', channel_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mixnet_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Medium model. Tensorflow compatible variant\n    \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mixnet_m(\n        'tf_mixnet_m', channel_multiplier=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mixnet_l(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\"Creates a MixNet Large model. Tensorflow compatible variant\n    \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mixnet_m(\n        'tf_mixnet_l', channel_multiplier=1.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tinynet_a(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_tinynet('tinynet_a', 1.0, 1.2, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tinynet_b(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_tinynet('tinynet_b', 0.75, 1.1, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tinynet_c(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_tinynet('tinynet_c', 0.54, 0.85, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tinynet_d(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_tinynet('tinynet_d', 0.54, 0.695, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tinynet_e(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_tinynet('tinynet_e', 0.51, 0.6, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenet_edgetpu_100(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet-EdgeTPU-v1 100. \"\"\"\n    model = _gen_mobilenet_edgetpu('mobilenet_edgetpu_100', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenet_edgetpu_v2_xs(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet-EdgeTPU-v2 Extra Small. \"\"\"\n    model = _gen_mobilenet_edgetpu('mobilenet_edgetpu_v2_xs', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenet_edgetpu_v2_s(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet-EdgeTPU-v2 Small. \"\"\"\n    model = _gen_mobilenet_edgetpu('mobilenet_edgetpu_v2_s', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenet_edgetpu_v2_m(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet-EdgeTPU-v2 Medium. \"\"\"\n    model = _gen_mobilenet_edgetpu('mobilenet_edgetpu_v2_m', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenet_edgetpu_v2_l(pretrained=False, **kwargs) -> EfficientNet:\n    \"\"\" MobileNet-EdgeTPU-v2 Large. \"\"\"\n    model = _gen_mobilenet_edgetpu('mobilenet_edgetpu_v2_l', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef test_efficientnet(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_test_efficientnet('test_efficientnet', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef test_efficientnet_gn(pretrained=False, **kwargs) -> EfficientNet:\n\n    model = _gen_test_efficientnet(\n        'test_efficientnet_gn',\n        pretrained=pretrained,\n        norm_layer=kwargs.pop('norm_layer', partial(GroupNormAct, group_size=8)),\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef test_efficientnet_ln(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_test_efficientnet(\n        'test_efficientnet_ln',\n        pretrained=pretrained,\n        norm_layer=kwargs.pop('norm_layer', LayerNormAct2d),\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef test_efficientnet_evos(pretrained=False, **kwargs) -> EfficientNet:\n    model = _gen_test_efficientnet(\n        'test_efficientnet_evos',\n        pretrained=pretrained,\n        norm_layer=kwargs.pop('norm_layer', partial(EvoNorm2dS0, group_size=8)),\n        **kwargs\n    )\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'tf_efficientnet_b0_ap': 'tf_efficientnet_b0.ap_in1k',\n    'tf_efficientnet_b1_ap': 'tf_efficientnet_b1.ap_in1k',\n    'tf_efficientnet_b2_ap': 'tf_efficientnet_b2.ap_in1k',\n    'tf_efficientnet_b3_ap': 'tf_efficientnet_b3.ap_in1k',\n    'tf_efficientnet_b4_ap': 'tf_efficientnet_b4.ap_in1k',\n    'tf_efficientnet_b5_ap': 'tf_efficientnet_b5.ap_in1k',\n    'tf_efficientnet_b6_ap': 'tf_efficientnet_b6.ap_in1k',\n    'tf_efficientnet_b7_ap': 'tf_efficientnet_b7.ap_in1k',\n    'tf_efficientnet_b8_ap': 'tf_efficientnet_b8.ap_in1k',\n    'tf_efficientnet_b0_ns': 'tf_efficientnet_b0.ns_jft_in1k',\n    'tf_efficientnet_b1_ns': 'tf_efficientnet_b1.ns_jft_in1k',\n    'tf_efficientnet_b2_ns': 'tf_efficientnet_b2.ns_jft_in1k',\n    'tf_efficientnet_b3_ns': 'tf_efficientnet_b3.ns_jft_in1k',\n    'tf_efficientnet_b4_ns': 'tf_efficientnet_b4.ns_jft_in1k',\n    'tf_efficientnet_b5_ns': 'tf_efficientnet_b5.ns_jft_in1k',\n    'tf_efficientnet_b6_ns': 'tf_efficientnet_b6.ns_jft_in1k',\n    'tf_efficientnet_b7_ns': 'tf_efficientnet_b7.ns_jft_in1k',\n    'tf_efficientnet_l2_ns_475': 'tf_efficientnet_l2.ns_jft_in1k_475',\n    'tf_efficientnet_l2_ns': 'tf_efficientnet_l2.ns_jft_in1k',\n    'tf_efficientnetv2_s_in21ft1k': 'tf_efficientnetv2_s.in21k_ft_in1k',\n    'tf_efficientnetv2_m_in21ft1k': 'tf_efficientnetv2_m.in21k_ft_in1k',\n    'tf_efficientnetv2_l_in21ft1k': 'tf_efficientnetv2_l.in21k_ft_in1k',\n    'tf_efficientnetv2_xl_in21ft1k': 'tf_efficientnetv2_xl.in21k_ft_in1k',\n    'tf_efficientnetv2_s_in21k': 'tf_efficientnetv2_s.in21k',\n    'tf_efficientnetv2_m_in21k': 'tf_efficientnetv2_m.in21k',\n    'tf_efficientnetv2_l_in21k': 'tf_efficientnetv2_l.in21k',\n    'tf_efficientnetv2_xl_in21k': 'tf_efficientnetv2_xl.in21k',\n    'efficientnet_b2a': 'efficientnet_b2',\n    'efficientnet_b3a': 'efficientnet_b3',\n    'mnasnet_a1': 'semnasnet_100',\n    'mnasnet_b1': 'mnasnet_100',\n})\n"
  },
  {
    "path": "timm/models/efficientvit_mit.py",
    "content": "\"\"\" EfficientViT (by MIT Song Han's Lab)\n\nPaper: `Efficientvit: Enhanced linear attention for high-resolution low-computation visual recognition`\n    - https://arxiv.org/abs/2205.14756\n\nAdapted from official impl at https://github.com/mit-han-lab/efficientvit\n\"\"\"\n\n__all__ = ['EfficientVit', 'EfficientVitLarge']\nfrom typing import List, Optional, Tuple, Type, Union\nfrom functools import partial\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d, create_conv2d, GELUTanh\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n\ndef val2list(x: list or tuple or any, repeat_time=1):\n    if isinstance(x, (list, tuple)):\n        return list(x)\n    return [x for _ in range(repeat_time)]\n\n\ndef val2tuple(x: list or tuple or any, min_len: int = 1, idx_repeat: int = -1):\n    # repeat elements if necessary\n    x = val2list(x)\n    if len(x) > 0:\n        x[idx_repeat:idx_repeat] = [x[idx_repeat] for _ in range(min_len - len(x))]\n\n    return tuple(x)\n\n\ndef get_same_padding(kernel_size: int or tuple[int, ...]) -> int or tuple[int, ...]:\n    if isinstance(kernel_size, tuple):\n        return tuple([get_same_padding(ks) for ks in kernel_size])\n    else:\n        assert kernel_size % 2 > 0, \"kernel size should be odd number\"\n        return kernel_size // 2\n\n\nclass ConvNormAct(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: Union[int, Tuple[int, int]] = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            groups: int = 1,\n            bias: bool = False,\n            dropout: float = 0.,\n            norm_layer: Optional[Type[nn.Module]] = nn.BatchNorm2d,\n            act_layer: Optional[Type[nn.Module]] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dropout = nn.Dropout(dropout, inplace=False)\n        self.conv = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=kernel_size,\n            stride=stride,\n            dilation=dilation,\n            groups=groups,\n            bias=bias,\n            **dd,\n        )\n        self.norm = norm_layer(num_features=out_channels, **dd) if norm_layer else nn.Identity()\n        self.act = act_layer(inplace=True) if act_layer is not None else nn.Identity()\n\n    def forward(self, x):\n        x = self.dropout(x)\n        x = self.conv(x)\n        x = self.norm(x)\n        x = self.act(x)\n        return x\n\n\nclass DSConv(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            use_bias: Union[bool, Tuple[bool, bool]] = False,\n            norm_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = nn.BatchNorm2d,\n            act_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (nn.ReLU6, None),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        use_bias = val2tuple(use_bias, 2)\n        norm_layer = val2tuple(norm_layer, 2)\n        act_layer = val2tuple(act_layer, 2)\n\n        self.depth_conv = ConvNormAct(\n            in_channels,\n            in_channels,\n            kernel_size,\n            stride,\n            groups=in_channels,\n            norm_layer=norm_layer[0],\n            act_layer=act_layer[0],\n            bias=use_bias[0],\n            **dd,\n        )\n        self.point_conv = ConvNormAct(\n            in_channels,\n            out_channels,\n            1,\n            norm_layer=norm_layer[1],\n            act_layer=act_layer[1],\n            bias=use_bias[1],\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.depth_conv(x)\n        x = self.point_conv(x)\n        return x\n\n\nclass ConvBlock(nn.Module):\n    def __init__(\n        self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            mid_channels: Optional[int] = None,\n            expand_ratio: float = 1,\n            use_bias: Union[bool, Tuple[bool, bool]] = False,\n            norm_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = nn.BatchNorm2d,\n            act_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (nn.ReLU6, None),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        use_bias = val2tuple(use_bias, 2)\n        norm_layer = val2tuple(norm_layer, 2)\n        act_layer = val2tuple(act_layer, 2)\n        mid_channels = mid_channels or round(in_channels * expand_ratio)\n\n        self.conv1 = ConvNormAct(\n            in_channels,\n            mid_channels,\n            kernel_size,\n            stride,\n            norm_layer=norm_layer[0],\n            act_layer=act_layer[0],\n            bias=use_bias[0],\n            **dd,\n        )\n        self.conv2 = ConvNormAct(\n            mid_channels,\n            out_channels,\n            kernel_size,\n            1,\n            norm_layer=norm_layer[1],\n            act_layer=act_layer[1],\n            bias=use_bias[1],\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.conv2(x)\n        return x\n\n\nclass MBConv(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            mid_channels: Optional[int] = None,\n            expand_ratio: float = 6,\n            use_bias: Union[bool, Tuple[bool, ...]] = False,\n            norm_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = nn.BatchNorm2d,\n            act_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (nn.ReLU6, nn.ReLU6, None),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        use_bias = val2tuple(use_bias, 3)\n        norm_layer = val2tuple(norm_layer, 3)\n        act_layer = val2tuple(act_layer, 3)\n        mid_channels = mid_channels or round(in_channels * expand_ratio)\n\n        self.inverted_conv = ConvNormAct(\n            in_channels,\n            mid_channels,\n            1,\n            stride=1,\n            norm_layer=norm_layer[0],\n            act_layer=act_layer[0],\n            bias=use_bias[0],\n            **dd,\n        )\n        self.depth_conv = ConvNormAct(\n            mid_channels,\n            mid_channels,\n            kernel_size,\n            stride=stride,\n            groups=mid_channels,\n            norm_layer=norm_layer[1],\n            act_layer=act_layer[1],\n            bias=use_bias[1],\n            **dd,\n        )\n        self.point_conv = ConvNormAct(\n            mid_channels,\n            out_channels,\n            1,\n            norm_layer=norm_layer[2],\n            act_layer=act_layer[2],\n            bias=use_bias[2],\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.inverted_conv(x)\n        x = self.depth_conv(x)\n        x = self.point_conv(x)\n        return x\n\n\nclass FusedMBConv(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            mid_channels: Optional[int] = None,\n            expand_ratio: float = 6,\n            groups: int = 1,\n            use_bias: Union[bool, Tuple[bool, ...]] = False,\n            norm_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = nn.BatchNorm2d,\n            act_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (nn.ReLU6, None),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        use_bias = val2tuple(use_bias, 2)\n        norm_layer = val2tuple(norm_layer, 2)\n        act_layer = val2tuple(act_layer, 2)\n        mid_channels = mid_channels or round(in_channels * expand_ratio)\n\n        self.spatial_conv = ConvNormAct(\n            in_channels,\n            mid_channels,\n            kernel_size,\n            stride=stride,\n            groups=groups,\n            norm_layer=norm_layer[0],\n            act_layer=act_layer[0],\n            bias=use_bias[0],\n            **dd,\n        )\n        self.point_conv = ConvNormAct(\n            mid_channels,\n            out_channels,\n            1,\n            norm_layer=norm_layer[1],\n            act_layer=act_layer[1],\n            bias=use_bias[1],\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.spatial_conv(x)\n        x = self.point_conv(x)\n        return x\n\n\nclass LiteMLA(nn.Module):\n    \"\"\"Lightweight multi-scale linear attention\"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            heads: Optional[int] = None,\n            heads_ratio: float = 1.0,\n            dim: int = 8,\n            use_bias: Union[bool, Tuple[bool, ...]] = False,\n            norm_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (None, nn.BatchNorm2d),\n            act_layer: Union[Type[nn.Module], Tuple[Optional[Type[nn.Module]], ...]] = (None, None),\n            kernel_func: Type[nn.Module] = nn.ReLU,\n            scales: Tuple[int, ...] = (5,),\n            eps: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.eps = eps\n        heads = heads or int(in_channels // dim * heads_ratio)\n        total_dim = heads * dim\n        use_bias = val2tuple(use_bias, 2)\n        norm_layer = val2tuple(norm_layer, 2)\n        act_layer = val2tuple(act_layer, 2)\n\n        self.dim = dim\n        self.qkv = ConvNormAct(\n            in_channels,\n            3 * total_dim,\n            1,\n            bias=use_bias[0],\n            norm_layer=norm_layer[0],\n            act_layer=act_layer[0],\n            **dd,\n        )\n        self.aggreg = nn.ModuleList([\n            nn.Sequential(\n                nn.Conv2d(\n                    3 * total_dim,\n                    3 * total_dim,\n                    scale,\n                    padding=get_same_padding(scale),\n                    groups=3 * total_dim,\n                    bias=use_bias[0],\n                    **dd,\n                ),\n                nn.Conv2d(3 * total_dim, 3 * total_dim, 1, groups=3 * heads, bias=use_bias[0], **dd),\n            )\n            for scale in scales\n        ])\n        self.kernel_func = kernel_func(inplace=False)\n\n        self.proj = ConvNormAct(\n            total_dim * (1 + len(scales)),\n            out_channels,\n            1,\n            bias=use_bias[1],\n            norm_layer=norm_layer[1],\n            act_layer=act_layer[1],\n            **dd,\n        )\n\n    def _attn(self, q, k, v):\n        dtype = v.dtype\n        q, k, v = q.float(), k.float(), v.float()\n        kv = k.transpose(-1, -2) @ v\n        out = q @ kv\n        out = out[..., :-1] / (out[..., -1:] + self.eps)\n        return out.to(dtype)\n\n    def forward(self, x):\n        B, _, H, W = x.shape\n\n        # generate multi-scale q, k, v\n        qkv = self.qkv(x)\n        multi_scale_qkv = [qkv]\n        for op in self.aggreg:\n            multi_scale_qkv.append(op(qkv))\n        multi_scale_qkv = torch.cat(multi_scale_qkv, dim=1)\n        multi_scale_qkv = multi_scale_qkv.reshape(B, -1, 3 * self.dim, H * W).transpose(-1, -2)\n        q, k, v = multi_scale_qkv.chunk(3, dim=-1)\n\n        # lightweight global attention\n        q = self.kernel_func(q)\n        k = self.kernel_func(k)\n        v = F.pad(v, (0, 1), mode=\"constant\", value=1.)\n\n        if not torch.jit.is_scripting():\n            with torch.autocast(device_type=v.device.type, enabled=False):\n                out = self._attn(q, k, v)\n        else:\n            out = self._attn(q, k, v)\n\n        # final projection\n        out = out.transpose(-1, -2).reshape(B, -1, H, W)\n        out = self.proj(out)\n        return out\n\n\nregister_notrace_module(LiteMLA)\n\n\nclass EfficientVitBlock(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            heads_ratio: float = 1.0,\n            head_dim: int = 32,\n            expand_ratio: float = 4,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.Hardswish,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.context_module = ResidualBlock(\n            LiteMLA(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                heads_ratio=heads_ratio,\n                dim=head_dim,\n                norm_layer=(None, norm_layer),\n                **dd,\n            ),\n            nn.Identity(),\n        )\n        self.local_module = ResidualBlock(\n            MBConv(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                expand_ratio=expand_ratio,\n                use_bias=(True, True, False),\n                norm_layer=(None, None, norm_layer),\n                act_layer=(act_layer, act_layer, None),\n                **dd,\n            ),\n            nn.Identity(),\n        )\n\n    def forward(self, x):\n        x = self.context_module(x)\n        x = self.local_module(x)\n        return x\n\n\nclass ResidualBlock(nn.Module):\n    def __init__(\n            self,\n            main: Optional[nn.Module],\n            shortcut: Optional[nn.Module] = None,\n            pre_norm: Optional[nn.Module] = None,\n    ):\n        super().__init__()\n        self.pre_norm = pre_norm if pre_norm is not None else nn.Identity()\n        self.main = main\n        self.shortcut = shortcut\n\n    def forward(self, x):\n        res = self.main(self.pre_norm(x))\n        if self.shortcut is not None:\n            res = res + self.shortcut(x)\n        return res\n\n\ndef build_local_block(\n        in_channels: int,\n        out_channels: int,\n        stride: int,\n        expand_ratio: float,\n        norm_layer: str,\n        act_layer: str,\n        fewer_norm: bool = False,\n        block_type: str = \"default\",\n        device=None,\n        dtype=None,\n):\n    dd = {'device': device, 'dtype': dtype}\n    assert block_type in [\"default\", \"large\", \"fused\"]\n    if expand_ratio == 1:\n        if block_type == \"default\":\n            block = DSConv(\n                in_channels=in_channels,\n                out_channels=out_channels,\n                stride=stride,\n                use_bias=(True, False) if fewer_norm else False,\n                norm_layer=(None, norm_layer) if fewer_norm else norm_layer,\n                act_layer=(act_layer, None),\n                **dd,\n            )\n        else:\n            block = ConvBlock(\n                in_channels=in_channels,\n                out_channels=out_channels,\n                stride=stride,\n                use_bias=(True, False) if fewer_norm else False,\n                norm_layer=(None, norm_layer) if fewer_norm else norm_layer,\n                act_layer=(act_layer, None),\n                **dd,\n            )\n    else:\n        if block_type == \"default\":\n            block = MBConv(\n                in_channels=in_channels,\n                out_channels=out_channels,\n                stride=stride,\n                expand_ratio=expand_ratio,\n                use_bias=(True, True, False) if fewer_norm else False,\n                norm_layer=(None, None, norm_layer) if fewer_norm else norm_layer,\n                act_layer=(act_layer, act_layer, None),\n                **dd,\n            )\n        else:\n            block = FusedMBConv(\n                in_channels=in_channels,\n                out_channels=out_channels,\n                stride=stride,\n                expand_ratio=expand_ratio,\n                use_bias=(True, False) if fewer_norm else False,\n                norm_layer=(None, norm_layer) if fewer_norm else norm_layer,\n                act_layer=(act_layer, None),\n                **dd,\n            )\n    return block\n\n\nclass Stem(nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            depth: int,\n            norm_layer: Type[nn.Module],\n            act_layer: Type[nn.Module],\n            block_type: str = 'default',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.stride = 2\n\n        self.add_module(\n            'in_conv',\n            ConvNormAct(\n                in_chs,\n                out_chs,\n                kernel_size=3,\n                stride=2,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n        )\n        stem_block = 0\n        for _ in range(depth):\n            self.add_module(f'res{stem_block}', ResidualBlock(\n                build_local_block(\n                    in_channels=out_chs,\n                    out_channels=out_chs,\n                    stride=1,\n                    expand_ratio=1,\n                    norm_layer=norm_layer,\n                    act_layer=act_layer,\n                    block_type=block_type,\n                    **dd,\n                ),\n                nn.Identity(),\n            ))\n            stem_block += 1\n\n\nclass EfficientVitStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            depth: int,\n            norm_layer: Type[nn.Module],\n            act_layer: Type[nn.Module],\n            expand_ratio: float,\n            head_dim: int,\n            vit_stage: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        blocks = [ResidualBlock(\n            build_local_block(\n                in_channels=in_chs,\n                out_channels=out_chs,\n                stride=2,\n                expand_ratio=expand_ratio,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                fewer_norm=vit_stage,\n                **dd,\n            ),\n            None,\n        )]\n        in_chs = out_chs\n\n        if vit_stage:\n            # for stage 3, 4\n            for _ in range(depth):\n                blocks.append(\n                    EfficientVitBlock(\n                        in_channels=in_chs,\n                        head_dim=head_dim,\n                        expand_ratio=expand_ratio,\n                        norm_layer=norm_layer,\n                        act_layer=act_layer,\n                        **dd,\n                    )\n                )\n        else:\n            # for stage 1, 2\n            for i in range(1, depth):\n                blocks.append(ResidualBlock(\n                    build_local_block(\n                        in_channels=in_chs,\n                        out_channels=out_chs,\n                        stride=1,\n                        expand_ratio=expand_ratio,\n                        norm_layer=norm_layer,\n                        act_layer=act_layer,\n                        **dd,\n                    ),\n                    nn.Identity(),\n                ))\n\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        return self.blocks(x)\n\n\nclass EfficientVitLargeStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            depth: int,\n            norm_layer: Type[nn.Module],\n            act_layer: Type[nn.Module],\n            head_dim: int,\n            vit_stage: bool = False,\n            fewer_norm: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        blocks = [ResidualBlock(\n            build_local_block(\n                in_channels=in_chs,\n                out_channels=out_chs,\n                stride=2,\n                expand_ratio=24 if vit_stage else 16,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                fewer_norm=vit_stage or fewer_norm,\n                block_type='default' if fewer_norm else 'fused',\n                **dd,\n            ),\n            None,\n        )]\n        in_chs = out_chs\n\n        if vit_stage:\n            # for stage 4\n            for _ in range(depth):\n                blocks.append(\n                    EfficientVitBlock(\n                        in_channels=in_chs,\n                        head_dim=head_dim,\n                        expand_ratio=6,\n                        norm_layer=norm_layer,\n                        act_layer=act_layer,\n                        **dd,\n                    )\n                )\n        else:\n            # for stage 1, 2, 3\n            for i in range(depth):\n                blocks.append(ResidualBlock(\n                    build_local_block(\n                        in_channels=in_chs,\n                        out_channels=out_chs,\n                        stride=1,\n                        expand_ratio=4,\n                        norm_layer=norm_layer,\n                        act_layer=act_layer,\n                        fewer_norm=fewer_norm,\n                        block_type='default' if fewer_norm else 'fused',\n                        **dd,\n                    ),\n                    nn.Identity(),\n                ))\n\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        return self.blocks(x)\n\n\nclass ClassifierHead(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            widths: List[int],\n            num_classes: int = 1000,\n            dropout: float = 0.,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Optional[Type[nn.Module]] = nn.Hardswish,\n            pool_type: str = 'avg',\n            norm_eps: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.widths = widths\n        self.num_features = widths[-1]\n\n        assert pool_type, 'Cannot disable pooling'\n        self.in_conv = ConvNormAct(in_channels, widths[0], 1, norm_layer=norm_layer, act_layer=act_layer, **dd)\n        self.global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True)\n        self.classifier = nn.Sequential(\n            nn.Linear(widths[0], widths[1], bias=False, **dd),\n            nn.LayerNorm(widths[1], eps=norm_eps, **dd),\n            act_layer(inplace=True) if act_layer is not None else nn.Identity(),\n            nn.Dropout(dropout, inplace=False),\n            nn.Linear(widths[1], num_classes, bias=True, **dd) if num_classes > 0 else nn.Identity(),\n        )\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None):\n        if pool_type is not None:\n            assert pool_type, 'Cannot disable pooling'\n            self.global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True,)\n        if num_classes > 0:\n            self.classifier[-1] = nn.Linear(self.num_features, num_classes, bias=True)\n        else:\n            self.classifier[-1] = nn.Identity()\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self.in_conv(x)\n        x = self.global_pool(x)\n        if pre_logits:\n            # cannot slice or iterate with torchscript so, this\n            x = self.classifier[0](x)\n            x = self.classifier[1](x)\n            x = self.classifier[2](x)\n            x = self.classifier[3](x)\n        else:\n            x = self.classifier(x)\n        return x\n\n\nclass EfficientVit(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            widths: Tuple[int, ...] = (),\n            depths: Tuple[int, ...] = (),\n            head_dim: int = 32,\n            expand_ratio: float = 4,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.Hardswish,\n            global_pool: str = 'avg',\n            head_widths: Tuple[int, ...] = (),\n            drop_rate: float = 0.0,\n            num_classes: int = 1000,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.global_pool = global_pool\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n\n        # input stem\n        self.stem = Stem(in_chans, widths[0], depths[0], norm_layer, act_layer, **dd)\n        stride = self.stem.stride\n\n        # stages\n        self.feature_info = []\n        self.stages = nn.Sequential()\n        in_channels = widths[0]\n        for i, (w, d) in enumerate(zip(widths[1:], depths[1:])):\n            self.stages.append(EfficientVitStage(\n                in_channels,\n                w,\n                depth=d,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                expand_ratio=expand_ratio,\n                head_dim=head_dim,\n                vit_stage=i >= 2,\n                **dd,\n            ))\n            stride *= 2\n            in_channels = w\n            self.feature_info += [dict(num_chs=in_channels, reduction=stride, module=f'stages.{i}')]\n\n        self.num_features = in_channels\n        self.head = ClassifierHead(\n            self.num_features,\n            widths=head_widths,\n            num_classes=num_classes,\n            dropout=drop_rate,\n            pool_type=self.global_pool,\n            **dd,\n        )\n        self.head_hidden_size = self.head.num_features\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.classifier[-1]\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stages, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nclass EfficientVitLarge(nn.Module):\n    def __init__(\n        self,\n        in_chans: int = 3,\n        widths: Tuple[int, ...] = (),\n        depths: Tuple[int, ...] = (),\n        head_dim: int = 32,\n        norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n        act_layer: Type[nn.Module] = GELUTanh,\n        global_pool: str = 'avg',\n        head_widths: Tuple[int, ...] = (),\n        drop_rate: float = 0.0,\n        num_classes: int = 1000,\n        norm_eps: float = 1e-7,\n        device=None,\n        dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.global_pool = global_pool\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.norm_eps = norm_eps\n        norm_layer = partial(norm_layer, eps=self.norm_eps)\n\n        # input stem\n        self.stem = Stem(in_chans, widths[0], depths[0], norm_layer, act_layer, block_type='large', **dd)\n        stride = self.stem.stride\n\n        # stages\n        self.feature_info = []\n        self.stages = nn.Sequential()\n        in_channels = widths[0]\n        for i, (w, d) in enumerate(zip(widths[1:], depths[1:])):\n            self.stages.append(EfficientVitLargeStage(\n                in_channels,\n                w,\n                depth=d,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                head_dim=head_dim,\n                vit_stage=i >= 3,\n                fewer_norm=i >= 2,\n                **dd,\n            ))\n            stride *= 2\n            in_channels = w\n            self.feature_info += [dict(num_chs=in_channels, reduction=stride, module=f'stages.{i}')]\n\n        self.num_features = in_channels\n        self.head = ClassifierHead(\n            self.num_features,\n            widths=head_widths,\n            num_classes=num_classes,\n            dropout=drop_rate,\n            pool_type=self.global_pool,\n            act_layer=act_layer,\n            norm_eps=self.norm_eps,\n            **dd,\n        )\n        self.head_hidden_size = self.head.num_features\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.classifier[-1]\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stages, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'mean': IMAGENET_DEFAULT_MEAN,\n        'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.in_conv.conv',\n        'classifier': 'head.classifier.4',\n        'crop_pct': 0.95,\n        'license': 'apache-2.0',\n        'input_size': (3, 224, 224),\n        'pool_size': (7, 7),\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'efficientvit_b0.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientvit_b1.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientvit_b1.r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0,\n    ),\n    'efficientvit_b1.r288_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0,\n    ),\n    'efficientvit_b2.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientvit_b2.r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0,\n    ),\n    'efficientvit_b2.r288_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0,\n    ),\n    'efficientvit_b3.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'efficientvit_b3.r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0,\n    ),\n    'efficientvit_b3.r288_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0,\n    ),\n    'efficientvit_l1.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n    ),\n    'efficientvit_l2.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n    ),\n    'efficientvit_l2.r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0,\n    ),\n    'efficientvit_l2.r288_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0,\n    ),\n    'efficientvit_l2.r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'efficientvit_l3.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n    ),\n    'efficientvit_l3.r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0,\n    ),\n    'efficientvit_l3.r320_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0,\n    ),\n    'efficientvit_l3.r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    # 'efficientvit_l0_sam.sam': _cfg(\n    #     # hf_hub_id='timm/',\n    #     input_size=(3, 512, 512), crop_pct=1.0,\n    #     num_classes=0,\n    # ),\n    # 'efficientvit_l1_sam.sam': _cfg(\n    #     # hf_hub_id='timm/',\n    #     input_size=(3, 512, 512), crop_pct=1.0,\n    #     num_classes=0,\n    # ),\n    # 'efficientvit_l2_sam.sam': _cfg(\n    #     # hf_hub_id='timm/',f\n    #     input_size=(3, 512, 512), crop_pct=1.0,\n    #     num_classes=0,\n    # ),\n})\n\n\ndef _create_efficientvit(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        EfficientVit,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs\n    )\n    return model\n\n\ndef _create_efficientvit_large(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        EfficientVitLarge,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef efficientvit_b0(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(8, 16, 32, 64, 128), depths=(1, 2, 2, 2, 2), head_dim=16, head_widths=(1024, 1280))\n    return _create_efficientvit('efficientvit_b0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_b1(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(16, 32, 64, 128, 256), depths=(1, 2, 3, 3, 4), head_dim=16, head_widths=(1536, 1600))\n    return _create_efficientvit('efficientvit_b1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_b2(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(24, 48, 96, 192, 384), depths=(1, 3, 4, 4, 6), head_dim=32, head_widths=(2304, 2560))\n    return _create_efficientvit('efficientvit_b2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_b3(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(32, 64, 128, 256, 512), depths=(1, 4, 6, 6, 9), head_dim=32, head_widths=(2304, 2560))\n    return _create_efficientvit('efficientvit_b3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_l1(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(32, 64, 128, 256, 512), depths=(1, 1, 1, 6, 6), head_dim=32, head_widths=(3072, 3200))\n    return _create_efficientvit_large('efficientvit_l1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_l2(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(32, 64, 128, 256, 512), depths=(1, 2, 2, 8, 8), head_dim=32, head_widths=(3072, 3200))\n    return _create_efficientvit_large('efficientvit_l2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_l3(pretrained=False, **kwargs):\n    model_args = dict(\n        widths=(64, 128, 256, 512, 1024), depths=(1, 2, 2, 8, 8), head_dim=32, head_widths=(6144, 6400))\n    return _create_efficientvit_large('efficientvit_l3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n# FIXME will wait for v2 SAM models which are pending\n# @register_model\n# def efficientvit_l0_sam(pretrained=False, **kwargs):\n#     # only backbone for segment-anything-model weights\n#     model_args = dict(\n#         widths=(32, 64, 128, 256, 512), depths=(1, 1, 1, 4, 4), head_dim=32, num_classes=0, norm_eps=1e-6)\n#     return _create_efficientvit_large('efficientvit_l0_sam', pretrained=pretrained, **dict(model_args, **kwargs))\n#\n#\n# @register_model\n# def efficientvit_l1_sam(pretrained=False, **kwargs):\n#     # only backbone for segment-anything-model weights\n#     model_args = dict(\n#         widths=(32, 64, 128, 256, 512), depths=(1, 1, 1, 6, 6), head_dim=32, num_classes=0, norm_eps=1e-6)\n#     return _create_efficientvit_large('efficientvit_l1_sam', pretrained=pretrained, **dict(model_args, **kwargs))\n#\n#\n# @register_model\n# def efficientvit_l2_sam(pretrained=False, **kwargs):\n#     # only backbone for segment-anything-model weights\n#     model_args = dict(\n#         widths=(32, 64, 128, 256, 512), depths=(1, 2, 2, 8, 8), head_dim=32, num_classes=0, norm_eps=1e-6)\n#     return _create_efficientvit_large('efficientvit_l2_sam', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/efficientvit_msra.py",
    "content": "\"\"\" EfficientViT (by MSRA)\n\nPaper: `EfficientViT: Memory Efficient Vision Transformer with Cascaded Group Attention`\n    - https://arxiv.org/abs/2305.07027\n\nAdapted from official impl at https://github.com/microsoft/Cream/tree/main/EfficientViT\n\"\"\"\n\n__all__ = ['EfficientVitMsra']\nimport itertools\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SqueezeExcite, SelectAdaptivePool2d, trunc_normal_, _assert\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n\nclass ConvNorm(torch.nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            ks: int = 1,\n            stride: int = 1,\n            pad: int = 0,\n            dilation: int = 1,\n            groups: int = 1,\n            bn_weight_init: float = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(in_chs, out_chs, ks, stride, pad, dilation, groups, bias=False, **dd)\n        self.bn = nn.BatchNorm2d(out_chs, **dd)\n        torch.nn.init.constant_(self.bn.weight, bn_weight_init)\n\n    @torch.no_grad()\n    def fuse(self):\n        c, bn = self.conv, self.bn\n        w = bn.weight / (bn.running_var + bn.eps)**0.5\n        w = c.weight * w[:, None, None, None]\n        b = bn.bias - bn.running_mean * bn.weight / \\\n            (bn.running_var + bn.eps)**0.5\n        m = torch.nn.Conv2d(\n            w.size(1) * self.conv.groups, w.size(0), w.shape[2:],\n            stride=self.conv.stride, padding=self.conv.padding, dilation=self.conv.dilation, groups=self.conv.groups)\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass NormLinear(torch.nn.Sequential):\n    def __init__(\n            self,\n            in_features: int,\n            out_features: int,\n            bias: bool = True,\n            std: float = 0.02,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.bn = nn.BatchNorm1d(in_features, **dd)\n        self.drop = nn.Dropout(drop)\n        self.linear = nn.Linear(in_features, out_features, bias=bias, **dd)\n\n        trunc_normal_(self.linear.weight, std=std)\n        if self.linear.bias is not None:\n            nn.init.constant_(self.linear.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self):\n        bn, linear = self.bn, self.linear\n        w = bn.weight / (bn.running_var + bn.eps)**0.5\n        b = bn.bias - self.bn.running_mean * \\\n            self.bn.weight / (bn.running_var + bn.eps)**0.5\n        w = linear.weight * w[None, :]\n        if linear.bias is None:\n            b = b @ self.linear.weight.T\n        else:\n            b = (linear.weight @ b[:, None]).view(-1) + self.linear.bias\n        m = torch.nn.Linear(w.size(1), w.size(0))\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass PatchMerging(torch.nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        hid_dim = int(dim * 4)\n        self.conv1 = ConvNorm(dim, hid_dim, 1, 1, 0, **dd)\n        self.act = torch.nn.ReLU()\n        self.conv2 = ConvNorm(hid_dim, hid_dim, 3, 2, 1, groups=hid_dim, **dd)\n        self.se = SqueezeExcite(hid_dim, .25, **dd)\n        self.conv3 = ConvNorm(hid_dim, out_dim, 1, 1, 0, **dd)\n\n    def forward(self, x):\n        x = self.conv3(self.se(self.act(self.conv2(self.act(self.conv1(x))))))\n        return x\n\n\nclass ResidualDrop(torch.nn.Module):\n    def __init__(self, m: nn.Module, drop: float = 0.):\n        super().__init__()\n        self.m = m\n        self.drop = drop\n\n    def forward(self, x):\n        if self.training and self.drop > 0:\n            return x + self.m(x) * torch.rand(\n                x.size(0), 1, 1, 1, device=x.device).ge_(self.drop).div(1 - self.drop).detach()\n        else:\n            return x + self.m(x)\n\n\nclass ConvMlp(torch.nn.Module):\n    def __init__(\n        self,\n        ed: int,\n        h: int,\n        device=None,\n        dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.pw1 = ConvNorm(ed, h, **dd)\n        self.act = torch.nn.ReLU()\n        self.pw2 = ConvNorm(h, ed, bn_weight_init=0, **dd)\n\n    def forward(self, x):\n        x = self.pw2(self.act(self.pw1(x)))\n        return x\n\n\nclass CascadedGroupAttention(torch.nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    r\"\"\" Cascaded Group Attention.\n\n    Args:\n        dim (int): Number of input channels.\n        key_dim (int): The dimension for query and key.\n        num_heads (int): Number of attention heads.\n        attn_ratio (int): Multiplier for the query dim for value dimension.\n        resolution (int): Input resolution, correspond to the window size.\n        kernels (List[int]): The kernel size of the dw conv on query.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: int = 14,\n            kernels: Tuple[int, ...] = (5, 5, 5, 5),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n        self.val_dim = int(attn_ratio * key_dim)\n        self.attn_ratio = attn_ratio\n\n        qkvs = []\n        dws = []\n        for i in range(num_heads):\n            qkvs.append(ConvNorm(dim // num_heads, self.key_dim * 2 + self.val_dim, **dd))\n            dws.append(ConvNorm(self.key_dim, self.key_dim, kernels[i], 1, kernels[i] // 2, groups=self.key_dim, **dd))\n        self.qkvs = torch.nn.ModuleList(qkvs)\n        self.dws = torch.nn.ModuleList(dws)\n        self.proj = torch.nn.Sequential(\n            torch.nn.ReLU(),\n            ConvNorm(self.val_dim * num_heads, dim, bn_weight_init=0, **dd)\n        )\n\n        self.resolution = resolution\n        N = resolution * resolution\n        # Number of unique offsets: abs differences range from 0 to resolution-1 for each dim\n        num_offsets = resolution * resolution\n        self.attention_biases = torch.nn.Parameter(torch.empty(num_heads, num_offsets, **dd))\n        self.register_buffer(\n            'attention_bias_idxs',\n            torch.empty((N, N), device=device, dtype=torch.long),\n            persistent=False,\n        )\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        torch.nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        points = list(itertools.product(range(self.resolution), range(self.resolution)))\n        attention_offsets = {}\n        idxs = []\n        for p1 in points:\n            for p2 in points:\n                offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1]))\n                if offset not in attention_offsets:\n                    attention_offsets[offset] = len(attention_offsets)\n                idxs.append(attention_offsets[offset])\n        self.attention_bias_idxs.copy_(torch.tensor(idxs, dtype=torch.long).view(len(points), len(points)))\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        feats_in = x.chunk(len(self.qkvs), dim=1)\n        feats_out = []\n        feat = feats_in[0]\n        attn_bias = self.get_attention_biases(x.device)\n        for head_idx, (qkv, dws) in enumerate(zip(self.qkvs, self.dws)):\n            if head_idx > 0:\n                feat = feat + feats_in[head_idx]\n            feat = qkv(feat)\n            q, k, v = feat.view(B, -1, H, W).split([self.key_dim, self.key_dim, self.val_dim], dim=1)\n            q = dws(q)\n            q, k, v = q.flatten(2), k.flatten(2), v.flatten(2)\n            q = q * self.scale\n            attn = q.transpose(-2, -1) @ k\n            attn = attn + attn_bias[head_idx]\n            attn = attn.softmax(dim=-1)\n            feat = v @ attn.transpose(-2, -1)\n            feat = feat.view(B, self.val_dim, H, W)\n            feats_out.append(feat)\n        x = self.proj(torch.cat(feats_out, 1))\n        return x\n\n\nclass LocalWindowAttention(torch.nn.Module):\n    r\"\"\" Local Window Attention.\n\n    Args:\n        dim (int): Number of input channels.\n        key_dim (int): The dimension for query and key.\n        num_heads (int): Number of attention heads.\n        attn_ratio (int): Multiplier for the query dim for value dimension.\n        resolution (int): Input resolution.\n        window_resolution (int): Local window resolution.\n        kernels (List[int]): The kernel size of the dw conv on query.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: int = 14,\n            window_resolution: int = 7,\n            kernels: Tuple[int, ...] = (5, 5, 5, 5),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.num_heads = num_heads\n        self.resolution = resolution\n        assert window_resolution > 0, 'window_size must be greater than 0'\n        self.window_resolution = window_resolution\n        window_resolution = min(window_resolution, resolution)\n        self.attn = CascadedGroupAttention(\n            dim, key_dim, num_heads,\n            attn_ratio=attn_ratio,\n            resolution=window_resolution,\n            kernels=kernels,\n            **dd,\n        )\n\n    def forward(self, x):\n        H = W = self.resolution\n        B, C, H_, W_ = x.shape\n        # Only check this for classification models\n        _assert(H == H_, f'input feature has wrong size, expect {(H, W)}, got {(H_, W_)}')\n        _assert(W == W_, f'input feature has wrong size, expect {(H, W)}, got {(H_, W_)}')\n        if H <= self.window_resolution and W <= self.window_resolution:\n            x = self.attn(x)\n        else:\n            x = x.permute(0, 2, 3, 1)\n            pad_b = (self.window_resolution - H % self.window_resolution) % self.window_resolution\n            pad_r = (self.window_resolution - W % self.window_resolution) % self.window_resolution\n            x = torch.nn.functional.pad(x, (0, 0, 0, pad_r, 0, pad_b))\n\n            pH, pW = H + pad_b, W + pad_r\n            nH = pH // self.window_resolution\n            nW = pW // self.window_resolution\n            # window partition, BHWC -> B(nHh)(nWw)C -> BnHnWhwC -> (BnHnW)hwC -> (BnHnW)Chw\n            x = x.view(B, nH, self.window_resolution, nW, self.window_resolution, C).transpose(2, 3)\n            x = x.reshape(B * nH * nW, self.window_resolution, self.window_resolution, C).permute(0, 3, 1, 2)\n            x = self.attn(x)\n            # window reverse, (BnHnW)Chw -> (BnHnW)hwC -> BnHnWhwC -> B(nHh)(nWw)C -> BHWC\n            x = x.permute(0, 2, 3, 1).view(B, nH, nW, self.window_resolution, self.window_resolution, C)\n            x = x.transpose(2, 3).reshape(B, pH, pW, C)\n            x = x[:, :H, :W].contiguous()\n            x = x.permute(0, 3, 1, 2)\n        return x\n\n\nclass EfficientVitBlock(torch.nn.Module):\n    \"\"\" A basic EfficientVit building block.\n\n    Args:\n        dim (int): Number of input channels.\n        key_dim (int): Dimension for query and key in the token mixer.\n        num_heads (int): Number of attention heads.\n        attn_ratio (int): Multiplier for the query dim for value dimension.\n        resolution (int): Input resolution.\n        window_resolution (int): Local window resolution.\n        kernels (List[int]): The kernel size of the dw conv on query.\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: int = 14,\n            window_resolution: int = 7,\n            kernels: List[int] = [5, 5, 5, 5],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.dw0 = ResidualDrop(ConvNorm(dim, dim, 3, 1, 1, groups=dim, bn_weight_init=0., **dd))\n        self.ffn0 = ResidualDrop(ConvMlp(dim, int(dim * 2), **dd))\n\n        self.mixer = ResidualDrop(\n            LocalWindowAttention(\n                dim, key_dim, num_heads,\n                attn_ratio=attn_ratio,\n                resolution=resolution,\n                window_resolution=window_resolution,\n                kernels=kernels,\n                **dd,\n            ),\n        )\n\n        self.dw1 = ResidualDrop(ConvNorm(dim, dim, 3, 1, 1, groups=dim, bn_weight_init=0., **dd))\n        self.ffn1 = ResidualDrop(ConvMlp(dim, int(dim * 2), **dd))\n\n    def forward(self, x):\n        return self.ffn1(self.dw1(self.mixer(self.ffn0(self.dw0(x)))))\n\n\nclass EfficientVitStage(torch.nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            key_dim: int,\n            downsample: Tuple[str, int] = ('', 1),\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: int = 14,\n            window_resolution: int = 7,\n            kernels: List[int] = [5, 5, 5, 5],\n            depth: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if downsample[0] == 'subsample':\n            self.resolution = (resolution - 1) // downsample[1] + 1\n            down_blocks = []\n            down_blocks.append((\n                'res1',\n                torch.nn.Sequential(\n                    ResidualDrop(ConvNorm(in_dim, in_dim, 3, 1, 1, groups=in_dim, **dd)),\n                    ResidualDrop(ConvMlp(in_dim, int(in_dim * 2), **dd)),\n                )\n            ))\n            down_blocks.append(('patchmerge', PatchMerging(in_dim, out_dim, **dd)))\n            down_blocks.append((\n                'res2',\n                torch.nn.Sequential(\n                    ResidualDrop(ConvNorm(out_dim, out_dim, 3, 1, 1, groups=out_dim, **dd)),\n                    ResidualDrop(ConvMlp(out_dim, int(out_dim * 2), **dd)),\n                )\n            ))\n            self.downsample = nn.Sequential(OrderedDict(down_blocks))\n        else:\n            assert in_dim == out_dim\n            self.downsample = nn.Identity()\n            self.resolution = resolution\n\n        blocks = []\n        for d in range(depth):\n            blocks.append(EfficientVitBlock(\n                out_dim,\n                key_dim,\n                num_heads,\n                attn_ratio,\n                self.resolution,\n                window_resolution,\n                kernels,\n                **dd,\n            ))\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = self.blocks(x)\n        return x\n\n\nclass PatchEmbedding(torch.nn.Sequential):\n    def __init__(\n            self,\n            in_chans: int,\n            dim: int,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.add_module('conv1', ConvNorm(in_chans, dim // 8, 3, 2, 1, **dd))\n        self.add_module('relu1', torch.nn.ReLU())\n        self.add_module('conv2', ConvNorm(dim // 8, dim // 4, 3, 2, 1, **dd))\n        self.add_module('relu2', torch.nn.ReLU())\n        self.add_module('conv3', ConvNorm(dim // 4, dim // 2, 3, 2, 1, **dd))\n        self.add_module('relu3', torch.nn.ReLU())\n        self.add_module('conv4', ConvNorm(dim // 2, dim, 3, 2, 1, **dd))\n        self.patch_size = 16\n\n\nclass EfficientVitMsra(nn.Module):\n    def __init__(\n            self,\n            img_size: int = 224,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            embed_dim: Tuple[int, ...] = (64, 128, 192),\n            key_dim: Tuple[int, ...] = (16, 16, 16),\n            depth: Tuple[int, ...] = (1, 2, 3),\n            num_heads: Tuple[int, ...] = (4, 4, 4),\n            window_size: Tuple[int, ...] = (7, 7, 7),\n            kernels: Tuple[int, ...] = (5, 5, 5, 5),\n            down_ops: Tuple[Tuple[str, int], ...] = (('', 1), ('subsample', 2), ('subsample', 2)),\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.grad_checkpointing = False\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n\n        # Patch embedding\n        self.patch_embed = PatchEmbedding(in_chans, embed_dim[0], **dd)\n        stride = self.patch_embed.patch_size\n        resolution = img_size // self.patch_embed.patch_size\n        attn_ratio = [embed_dim[i] / (key_dim[i] * num_heads[i]) for i in range(len(embed_dim))]\n\n        # Build EfficientVit blocks\n        self.feature_info = []\n        stages = []\n        pre_ed = embed_dim[0]\n        for i, (ed, kd, dpth, nh, ar, wd, do) in enumerate(\n                zip(embed_dim, key_dim, depth, num_heads, attn_ratio, window_size, down_ops)):\n            stage = EfficientVitStage(\n                in_dim=pre_ed,\n                out_dim=ed,\n                key_dim=kd,\n                downsample=do,\n                num_heads=nh,\n                attn_ratio=ar,\n                resolution=resolution,\n                window_resolution=wd,\n                kernels=kernels,\n                depth=dpth,\n                **dd,\n            )\n            pre_ed = ed\n            if do[0] == 'subsample' and i != 0:\n                stride *= do[1]\n            resolution = stage.resolution\n            stages.append(stage)\n            self.feature_info += [dict(num_chs=ed, reduction=stride, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        if global_pool == 'avg':\n            self.global_pool = SelectAdaptivePool2d(pool_type=global_pool, flatten=True)\n        else:\n            assert num_classes == 0\n            self.global_pool = nn.Identity()\n        self.num_features = self.head_hidden_size = embed_dim[-1]\n        self.head = NormLinear(\n            self.num_features, num_classes, drop=self.drop_rate, **dd) if num_classes > 0 else torch.nn.Identity()\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True) -> None:\n        if needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {x for x in self.state_dict().keys() if 'attention_biases' in x}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embed',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.linear\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            if global_pool == 'avg':\n                self.global_pool = SelectAdaptivePool2d(pool_type=global_pool, flatten=True)\n            else:\n                assert num_classes == 0\n                self.global_pool = nn.Identity()\n        self.head = NormLinear(\n            self.num_features, num_classes, drop=self.drop_rate) if num_classes > 0 else torch.nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\n# def checkpoint_filter_fn(state_dict, model):\n#     if 'model' in state_dict.keys():\n#         state_dict = state_dict['model']\n#     tmp_dict = {}\n#     out_dict = {}\n#     target_keys = model.state_dict().keys()\n#     target_keys = [k for k in target_keys if k.startswith('stages.')]\n#\n#     for k, v in state_dict.items():\n#         if 'attention_bias_idxs' in k:\n#             continue\n#         k = k.split('.')\n#         if k[-2] == 'c':\n#             k[-2] = 'conv'\n#         if k[-2] == 'l':\n#             k[-2] = 'linear'\n#         k = '.'.join(k)\n#         tmp_dict[k] = v\n#\n#     for k, v in tmp_dict.items():\n#         if k.startswith('patch_embed'):\n#             k = k.split('.')\n#             k[1] = 'conv' + str(int(k[1]) // 2 + 1)\n#             k = '.'.join(k)\n#         elif k.startswith('blocks'):\n#             kw = '.'.join(k.split('.')[2:])\n#             find_kw = [a for a in list(sorted(tmp_dict.keys())) if kw in a]\n#             idx = find_kw.index(k)\n#             k = [a for a in target_keys if kw in a][idx]\n#         out_dict[k] = v\n#\n#     return out_dict\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'mean': IMAGENET_DEFAULT_MEAN,\n        'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.conv1.conv',\n        'classifier': 'head.linear',\n        'fixed_input_size': True,\n        'pool_size': (4, 4),\n        'license': 'mit',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'efficientvit_m0.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m0.pth'\n    ),\n    'efficientvit_m1.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m1.pth'\n    ),\n    'efficientvit_m2.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m2.pth'\n    ),\n    'efficientvit_m3.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m3.pth'\n    ),\n    'efficientvit_m4.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m4.pth'\n    ),\n    'efficientvit_m5.r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/xinyuliu-jeffrey/EfficientVit_Model_Zoo/releases/download/v1.0/efficientvit_m5.pth'\n    ),\n})\n\n\ndef _create_efficientvit_msra(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2))\n    model = build_model_with_cfg(\n        EfficientVitMsra,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef efficientvit_m0(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[64, 128, 192],\n        depth=[1, 2, 3],\n        num_heads=[4, 4, 4],\n        window_size=[7, 7, 7],\n        kernels=[5, 5, 5, 5]\n    )\n    return _create_efficientvit_msra('efficientvit_m0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_m1(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[128, 144, 192],\n        depth=[1, 2, 3],\n        num_heads=[2, 3, 3],\n        window_size=[7, 7, 7],\n        kernels=[7, 5, 3, 3]\n    )\n    return _create_efficientvit_msra('efficientvit_m1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_m2(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[128, 192, 224],\n        depth=[1, 2, 3],\n        num_heads=[4, 3, 2],\n        window_size=[7, 7, 7],\n        kernels=[7, 5, 3, 3]\n    )\n    return _create_efficientvit_msra('efficientvit_m2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_m3(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[128, 240, 320],\n        depth=[1, 2, 3],\n        num_heads=[4, 3, 4],\n        window_size=[7, 7, 7],\n        kernels=[5, 5, 5, 5]\n    )\n    return _create_efficientvit_msra('efficientvit_m3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_m4(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[128, 256, 384],\n        depth=[1, 2, 3],\n        num_heads=[4, 4, 4],\n        window_size=[7, 7, 7],\n        kernels=[7, 5, 3, 3]\n    )\n    return _create_efficientvit_msra('efficientvit_m4', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef efficientvit_m5(pretrained=False, **kwargs):\n    model_args = dict(\n        img_size=224,\n        embed_dim=[192, 288, 384],\n        depth=[1, 3, 4],\n        num_heads=[3, 3, 4],\n        window_size=[7, 7, 7],\n        kernels=[7, 5, 3, 3]\n    )\n    return _create_efficientvit_msra('efficientvit_m5', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/eva.py",
    "content": "\"\"\" EVA\n\nEVA ViT from https://github.com/baaivision/EVA , paper: https://arxiv.org/abs/2211.07636\n\nThis file contains a number of ViT variants the utilise ROPE position embeddings, SwiGLU and other additions:\n * EVA & EVA02 model implementations that evolved from BEiT, additional models in vision_transformer.py.\n * `timm` original SBB ViT w/ ROPE position embeddings\n * Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\n * ROPE-ViT from Naver AI (https://arxiv.org/abs/2403.13298)\n * DINOv3 from META AI Research (https://arxiv.org/abs/2508.10104)\n\n@article{EVA,\n  title={EVA: Exploring the Limits of Masked Visual Representation Learning at Scale},\n  author={Fang, Yuxin and Wang, Wen and Xie, Binhui and Sun, Quan and Wu, Ledell and Wang, Xinggang and Huang,\n  Tiejun and Wang, Xinlong and Cao, Yue},\n  journal={arXiv preprint arXiv:2211.07636},\n  year={2022}\n}\n\nEVA-02: A Visual Representation for Neon Genesis - https://arxiv.org/abs/2303.11331\n@article{EVA02,\n  title={EVA-02: A Visual Representation for Neon Genesis},\n  author={Fang, Yuxin and Sun, Quan and Wang, Xinggang and Huang, Tiejun and Wang, Xinlong and Cao, Yue},\n  journal={arXiv preprint arXiv:2303.11331},\n  year={2023}\n}\n\n@article{bolya2025perception,\n  title={Perception encoder: The best visual embeddings are not at the output of the network},\n  author={Bolya, Daniel and Huang, Po-Yao and Sun, Peize and Cho, Jang Hyun and Madotto, Andrea and Wei, Chen and Ma,\n    Tengyu and Zhi, Jiale and Rajasegaran, Jathushan and Rasheed, Hanoona and others},\n  journal={arXiv preprint arXiv:2504.13181},\n  year={2025}\n}\n\n@inproceedings{heo2024rotary,\n  title={Rotary position embedding for vision transformer},\n  author={Heo, Byeongho and Park, Song and Han, Dongyoon and Yun, Sangdoo},\n  booktitle={European Conference on Computer Vision},\n  pages={289--305},\n  year={2024},\n  organization={Springer}\n}\n\n@article{simeoni2025dinov3,\n  title={{DINOv3}},\n  author={Sim{\\'e}oni, Oriane and Vo, Huy V. and Seitzer, Maximilian and Baldassarre, Federico and Oquab, Maxime\n    and Jose, Cijo and Khalidov, Vasil and Szafraniec, Marc and Yi, Seungeun and Ramamonjisoa, Micha{\\\"e}l\n    and Massa, Francisco and Haziza, Daniel and Wehrstedt, Luca and Wang, Jianyuan and Darcet, Timoth{\\'e}e\n    and Moutakanni, Th{\\'e}o and Sentana, Leonel and Roberts, Claire and Vedaldi, Andrea and Tolan, Jamie\n    and Brandt, John and Couprie, Camille and Mairal, Julien and J{\\'e}gou, Herv{\\'e} and Labatut, Patrick\n    and Bojanowski, Piotr},\n  year={2025},\n  eprint={2508.10104},\n  url={https://arxiv.org/abs/2508.10104},\n}\n\nDINOv3 code was a modification of existing EVA model and support modules, so licensed under Apache-2.0 like timm.\nWeights from META remain under DINOv3 License (https://ai.meta.com/resources/models-and-libraries/dinov3-license/).\n\nModifications by / Copyright 2023 Ross Wightman, original copyrights below\n\"\"\"\n# EVA models Copyright (c) 2022 BAAI-Vision\n# EVA02 models Copyright (c) 2023 BAAI-Vision\nimport math\nimport os\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    GluMlp,\n    SwiGLU,\n    LayerNorm,\n    DropPath, calculate_drop_path_rates,\n    PatchDropoutWithIndices,\n    create_rope_embed,\n    apply_rot_embed_cat,\n    apply_keep_indices_nlc,\n    trunc_normal_,\n    resample_patch_embed,\n    resample_abs_pos_embed,\n    global_pool_nlc,\n    to_2tuple,\n    use_fused_attn,\n    maybe_add_mask,\n    resolve_self_attn_mask,\n    AttentionRope,\n    AttentionPoolLatent,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['Eva']\n\n\nclass EvaAttention(nn.Module):\n    \"\"\" EVA Attention with ROPE, no k-bias, and fused/unfused qkv options\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = True,\n            qkv_fused: bool = True,\n            qkv_bias_separate: bool = False,\n            num_prefix_tokens: int = 1,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            attn_head_dim: Optional[int] = None,\n            norm_layer: Optional[Callable] = None,\n            qk_norm: bool = False,\n            scale_norm: bool = True,\n            rotate_half: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension of the token embeddings\n            num_heads: Number of attention heads\n            qkv_bias: Whether to add a bias term to the query, key, and value projections\n            qkv_fused: Whether qkv projections are fused into one projection or separate\n            qkv_bias_separate: Whether to apply bias to qkv as a separate addition or part of F.linear() call\n            num_prefix_tokens: Number of reg/cls tokens at the beginning of the sequence that\n                should not have position embeddings applied\n            attn_drop: Dropout rate for attention weights\n            proj_drop: Dropout rate for the output projection\n            attn_head_dim: Dimension of each attention head (if None, computed as dim // num_heads)\n            norm_layer: Normalization layer constructor to use for QK and scale normalization\n            qk_norm: Enable normalization of query (Q) and key (K) vectors with norm_layer\n            scale_norm: Enable normalization (scaling) of attention output with norm_layer\n            rotate_half: Use half rotation layout instead of interleaved\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if scale_norm or qk_norm:\n            assert norm_layer is not None, 'norm_layer must be provided if qk_norm or scale_norm is True'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        if attn_head_dim is not None:\n            self.head_dim = attn_head_dim\n        attn_dim = self.head_dim * self.num_heads\n        self.scale = self.head_dim ** -0.5\n        self.num_prefix_tokens = num_prefix_tokens\n        self.fused_attn = use_fused_attn()\n        self.qkv_bias_separate = qkv_bias_separate\n        self.rotate_half = rotate_half\n\n        if qkv_fused:\n            self.qkv = nn.Linear(dim, attn_dim * 3, bias=False, **dd)\n            self.q_proj = self.k_proj = self.v_proj = None\n            if qkv_bias:\n                self.q_bias = nn.Parameter(torch.empty(attn_dim, **dd))\n                self.register_buffer('k_bias', torch.empty(attn_dim, **dd), persistent=False)\n                self.v_bias = nn.Parameter(torch.empty(attn_dim, **dd))\n            else:\n                self.q_bias = self.k_bias = self.v_bias = None\n        else:\n            self.q_proj = nn.Linear(dim, attn_dim, bias=qkv_bias, **dd)\n            self.k_proj = nn.Linear(dim, attn_dim, bias=False, **dd)\n            self.v_proj = nn.Linear(dim, attn_dim, bias=qkv_bias, **dd)\n            self.qkv = None\n            self.q_bias = self.k_bias = self.v_bias = None\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.norm = norm_layer(attn_dim, **dd) if scale_norm else nn.Identity()\n        self.proj = nn.Linear(attn_dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        if self.q_bias is not None:\n            nn.init.zeros_(self.q_bias)\n            nn.init.zeros_(self.v_bias)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if self.k_bias is not None:\n            self.k_bias.zero_()\n\n    def forward(\n            self,\n            x,\n            rope: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ):\n        \"\"\"Forward pass for the attention module.\n\n        Args:\n            x: Input tensor of shape (batch_size, sequence_length, embedding_dim)\n            rope: Rotary position embeddings tensor for position-aware attention\n            attn_mask: Optional attention mask to apply during attention computation\n            is_causal: If True, use causal (autoregressive) masking\n\n        Returns:\n            Tensor of shape (batch_size, sequence_length, embedding_dim)\n        \"\"\"\n        B, N, C = x.shape\n\n        if self.qkv is not None:\n            if self.q_bias is None:\n                qkv = self.qkv(x)\n            else:\n                qkv_bias = torch.cat((self.q_bias, self.k_bias, self.v_bias))\n                if self.qkv_bias_separate:\n                    qkv = self.qkv(x)\n                    qkv += qkv_bias\n                else:\n                    qkv = F.linear(x, weight=self.qkv.weight, bias=qkv_bias)\n            qkv = qkv.reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n            q, k, v = qkv.unbind(0)  # B, num_heads, N, head_dim\n        else:\n            q = self.q_proj(x).reshape(B, N, self.num_heads, -1).transpose(1, 2)  # B, num_heads, N, C\n            k = self.k_proj(x).reshape(B, N, self.num_heads, -1).transpose(1, 2)\n            v = self.v_proj(x).reshape(B, N, self.num_heads, -1).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if rope is not None:\n            npt = self.num_prefix_tokens\n            half = getattr(self, 'rotate_half', False)\n            q = torch.cat([q[:, :, :npt, :], apply_rot_embed_cat(q[:, :, npt:, :], rope, half=half)], dim=2).type_as(v)\n            k = torch.cat([k[:, :, :npt, :], apply_rot_embed_cat(k[:, :, npt:, :], rope, half=half)], dim=2).type_as(v)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n                is_causal=is_causal,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal=is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.norm(x)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass EvaBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            qkv_bias: bool = True,\n            qkv_fused: bool = True,\n            mlp_ratio: float = 4.,\n            swiglu_mlp: bool = False,\n            swiglu_align_to: int = 0,\n            scale_mlp: bool = False,\n            scale_attn_inner: bool = False,\n            num_prefix_tokens: int = 1,\n            attn_type: str = 'eva',\n            rotate_half: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            init_values: Optional[float] = None,\n            act_layer: Callable = nn.GELU,\n            norm_layer: Callable = LayerNorm,\n            attn_head_dim: Optional[int] = None,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\" Initialize the EVA transformer block.\n\n        Args:\n          dim: Input dimension of the token embeddings\n            num_heads: Number of attention heads\n            qkv_bias: Whether to use bias terms in query, key, value projections\n            qkv_fused: Whether to use a single projection for query, key, value\n            mlp_ratio: Ratio of MLP hidden dimension to input dimension\n            swiglu_mlp: Whether to use SwiGLU activation in the MLP\n            scale_mlp: Whether to use normalization in the MLP\n            scale_attn_inner: Whether to use normalization within the attention mechanism\n            num_prefix_tokens: Number of tokens at the beginning of the sequence (class tokens, etc.)\n            attn_type: Type of attention module to use ('eva' or 'rope')\n            proj_drop: Dropout rate for projection layers\n            attn_drop: Dropout rate for attention matrix\n            drop_path: Stochastic depth rate\n            init_values: Initial value for LayerScale, None = no LayerScale\n            act_layer: Activation layer constructor\n            norm_layer: Normalization layer constructor\n            attn_head_dim: Dimension of each attention head (if None, computed as dim // num_heads)\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.norm1 = norm_layer(dim, **dd)\n        attn_cls = AttentionRope if attn_type == 'rope' else EvaAttention\n        self.attn = attn_cls(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            qkv_fused=qkv_fused,\n            num_prefix_tokens=num_prefix_tokens,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            attn_head_dim=attn_head_dim,\n            norm_layer=norm_layer,\n            scale_norm=scale_attn_inner,\n            rotate_half=rotate_half,\n            **dd,\n        )\n        self.init_values = init_values\n        self.gamma_1 = nn.Parameter(torch.empty(dim, **dd)) if init_values is not None else None\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        hidden_features = int(dim * mlp_ratio)\n        if swiglu_mlp:\n            if scale_mlp or swiglu_align_to:\n                # when norm in SwiGLU used or alignment enabled, an impl with separate fc for gate & x is used\n                self.mlp = SwiGLU(\n                    in_features=dim,\n                    hidden_features=hidden_features,\n                    norm_layer=norm_layer if scale_mlp else None,\n                    drop=proj_drop,\n                    align_to=swiglu_align_to,\n                    **dd,\n                )\n            else:\n                # w/o any extra norm, an impl with packed weights is used\n                self.mlp = GluMlp(\n                    in_features=dim,\n                    hidden_features=hidden_features * 2,\n                    norm_layer=norm_layer if scale_mlp else None,\n                    act_layer=nn.SiLU,\n                    gate_last=False,\n                    drop=proj_drop,\n                    **dd,\n                )\n        else:\n            self.mlp = Mlp(\n                in_features=dim,\n                hidden_features=hidden_features,\n                act_layer=act_layer,\n                norm_layer=norm_layer if scale_mlp else None,\n                drop=proj_drop,\n                **dd,\n            )\n        self.gamma_2 = nn.Parameter(torch.empty(dim, **dd)) if init_values is not None else None\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters.\"\"\"\n        if self.gamma_1 is not None:\n            nn.init.constant_(self.gamma_1, self.init_values)\n            nn.init.constant_(self.gamma_2, self.init_values)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            rope: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        if self.gamma_1 is None:\n            x = x + self.drop_path1(self.attn(self.norm1(x), rope=rope, attn_mask=attn_mask, is_causal=is_causal))\n            x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        else:\n            x = x + self.drop_path1(self.gamma_1 * self.attn(self.norm1(x), rope=rope, attn_mask=attn_mask, is_causal=is_causal))\n            x = x + self.drop_path2(self.gamma_2 * self.mlp(self.norm2(x)))\n        return x\n\n\nclass EvaBlockPostNorm(nn.Module):\n    \"\"\" EVA block w/ post-norm and support for swiglu, MLP norm scale, ROPE. \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            qkv_bias: bool = True,\n            qkv_fused: bool = True,\n            mlp_ratio: float = 4.,\n            attn_type: str = 'eva',\n            rotate_half: bool = False,\n            swiglu_mlp: bool = False,\n            swiglu_align_to: int = 0,\n            scale_mlp: bool = False,\n            scale_attn_inner: bool = False,\n            num_prefix_tokens: int = 1,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            init_values: Optional[float] = None,  # ignore for post-norm\n            act_layer: Callable = nn.GELU,\n            norm_layer: Callable = nn.LayerNorm,\n            attn_head_dim: Optional[int] = None,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\" Initialize the post-norm EVA transformer block.\n\n        Args:\n          dim: Input dimension of the token embeddings\n            num_heads: Number of attention heads\n            qkv_bias: Whether to use bias terms in query, key, value projections\n            qkv_fused: Whether to use a single projection for query, key, value\n            mlp_ratio: Ratio of MLP hidden dimension to input dimension\n            swiglu_mlp: Whether to use SwiGLU activation in the MLP\n            scale_mlp: Whether to use normalization in the MLP\n            scale_attn_inner: Whether to use normalization within the attention mechanism\n            num_prefix_tokens: Number of tokens at the beginning of the sequence (class tokens, etc.)\n            attn_type: Type of attention module to use ('eva' or 'rope')\n            proj_drop: Dropout rate for projection layers\n            attn_drop: Dropout rate for attention matrix\n            drop_path: Stochastic depth rate\n            init_values: Initial value for LayerScale, None = no LayerScale (NOTE: ignored for post-norm block)\n            act_layer: Activation layer constructor\n            norm_layer: Normalization layer constructor\n            attn_head_dim: Dimension of each attention head (if None, computed as dim // num_heads)\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        attn_cls = AttentionRope if attn_type == 'rope' else EvaAttention\n        self.attn = attn_cls(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            qkv_fused=qkv_fused,\n            num_prefix_tokens=num_prefix_tokens,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            attn_head_dim=attn_head_dim,\n            norm_layer=norm_layer,\n            scale_norm=scale_attn_inner,\n            rotate_half=rotate_half,\n            **dd,\n        )\n        self.norm1 = norm_layer(dim, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        hidden_features = int(dim * mlp_ratio)\n        if swiglu_mlp:\n            if scale_mlp:\n                # when norm in SwiGLU used, an impl with separate fc for gate & x is used\n                self.mlp = SwiGLU(\n                    in_features=dim,\n                    hidden_features=hidden_features,\n                    norm_layer=norm_layer if scale_mlp else None,\n                    drop=proj_drop,\n                    align_to=swiglu_align_to,\n                    **dd,\n                )\n            else:\n                # w/o any extra norm, an impl with packed fc1 weights is used, matches existing GluMLP\n                self.mlp = GluMlp(\n                    in_features=dim,\n                    hidden_features=hidden_features * 2,\n                    norm_layer=norm_layer if scale_mlp else None,\n                    act_layer=nn.SiLU,\n                    gate_last=False,\n                    drop=proj_drop,\n                    **dd,\n                )\n        else:\n            self.mlp = Mlp(\n                in_features=dim,\n                hidden_features=hidden_features,\n                act_layer=act_layer,\n                norm_layer=norm_layer if scale_mlp else None,\n                drop=proj_drop,\n                **dd,\n            )\n        self.norm2 = norm_layer(dim, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            rope: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        x = x + self.drop_path1(self.norm1(self.attn(x, rope=rope, attn_mask=attn_mask, is_causal=is_causal)))\n        x = x + self.drop_path2(self.norm2(self.mlp(x)))\n        return x\n\n\nclass Eva(nn.Module):\n    \"\"\" Eva Vision Transformer w/ Abs & Rotary Pos Embed\n\n    This class implements the EVA and EVA02 models that were based on the BEiT ViT variant\n      * EVA - abs pos embed, global avg pool\n      * EVA02 - abs + rope pos embed, global avg pool, SwiGLU, scale Norm in MLP (ala normformer)\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            qkv_bias: bool = True,\n            qkv_fused: bool = True,\n            mlp_ratio: float = 4.,\n            swiglu_mlp: bool = False,\n            swiglu_align_to: int = 0,\n            scale_mlp: bool = False,\n            scale_attn_inner: bool = False,\n            attn_type: str = 'eva',\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            patch_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Callable = LayerNorm,\n            init_values: Optional[float] = None,\n            class_token: bool = True,\n            num_reg_tokens: int = 0,\n            no_embed_class: bool = False,\n            use_abs_pos_emb: bool = True,\n            use_rot_pos_emb: bool = False,\n            rope_type: Optional[str] = 'cat',\n            rope_grid_offset: float = 0.,\n            rope_grid_indexing: str = 'ij',\n            rope_temperature: float = 10000.,\n            rope_rotate_half: bool = False,\n            use_post_norm: bool = False,\n            use_pre_transformer_norm: bool = False,\n            use_post_transformer_norm: Optional[bool] = None,\n            use_fc_norm: Optional[bool] = None,\n            attn_pool_num_heads: Optional[int] = None,\n            attn_pool_mlp_ratio: Optional[float] = None,\n            dynamic_img_size: bool = False,\n            dynamic_img_pad: bool = False,\n            ref_feat_shape: Optional[Union[Tuple[int, int], int]] = None,\n            head_init_scale: float = 0.001,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize the EVA Vision Transformer model.\n\n        Args:\n            img_size: Input image size (single int for square, or tuple for rectangular)\n            patch_size: Patch size to divide image into tokens (single int for square, or tuple)\n            in_chans: Number of input image channels\n            num_classes: Number of classes (output dim) for classification head (final projection), 0 for pass-through\n            global_pool: Type of global pooling for final sequence ('avg', 'token', 'map', etc.)\n            embed_dim: Embedding dimension for tokens\n            depth: Number of transformer blocks\n            num_heads: Number of attention heads\n            qkv_bias: Enable bias for query, key, value projections\n            qkv_fused: Use a single projection for query, key, value\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim\n            swiglu_mlp: Use SwiGLU activation in MLP\n            scale_mlp: Apply scaling normalization in MLP (normformer style)\n            scale_attn_inner: Apply scaling normalization inside attention\n            attn_type: Type of attention module to use\n            drop_rate: Dropout rate after final projection and pooling\n            pos_drop_rate: Dropout rate for positional embeddings\n            patch_drop_rate: Rate of dropping patches during training\n            proj_drop_rate: Dropout rate for projections\n            attn_drop_rate: Dropout rate for attention\n            drop_path_rate: Stochastic depth rate\n            norm_layer: Normalization layer constructor\n            init_values: Initial layer-scale values\n            class_token: Use class token\n            num_reg_tokens: Number of additional learnable 'register' tokens to add to the sequence\n            no_embed_class: Don't include position embeddings for class (or reg) tokens\n            use_abs_pos_emb: Use absolute (learned) positional embeddings\n            use_rot_pos_emb: Use rotary position embeddings\n            rope_type: Type of RoPE to use ('cat', 'mixed', 'dinov3', etc.).\n            rope_grid_offset: Offset for rotary position embedding grid\n            rope_grid_indexing: Indexing mode for rotary position embeddings ('ij' or 'xy')\n            rope_temperature: Temperature parameter for ROPE frequency computation\n            rope_rotate_half: Use half rotation layout (rotate D/2 dims), else use interleaved rotation layout\n            use_post_norm: Use post-norm transformer block type\n            use_pre_transformer_norm: Use normalization layer before transformer blocks\n            use_post_transformer_norm: Use normalization layer after transformer blocks\n            use_fc_norm: Use normalization layer after pooling, before final classifier\n            attn_pool_num_heads: Number of heads in attention pooling\n            attn_pool_mlp_ratio: MLP ratio in attention pooling\n            dynamic_img_size: Support dynamic image sizes in forward pass\n            dynamic_img_pad: Apply dynamic padding for irregular image sizes\n            ref_feat_shape: Reference feature shape for rotary position embedding scale\n            head_init_scale: Initialization scale for classification head weights\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map')\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.num_prefix_tokens = (1 if class_token else 0) + num_reg_tokens\n        self.no_embed_class = no_embed_class\n        self.dynamic_img_size = dynamic_img_size\n        self.grad_checkpointing = False\n\n        # resolve norm / pool usage\n        activate_pre_norm = use_pre_transformer_norm\n        if use_fc_norm is not None:\n            activate_fc_norm = use_fc_norm  # pass through if explicit\n        else:\n            activate_fc_norm = global_pool == 'avg'  # default on if avg pool used\n        if use_post_transformer_norm is not None:\n            activate_post_norm = use_post_transformer_norm  # pass through if explicit\n        else:\n            activate_post_norm = not activate_fc_norm  # default on if fc_norm isn't active\n\n        embed_args = {}\n        if dynamic_img_size:\n            # flatten deferred until after pos embed\n            embed_args.update(dict(strict_img_size=False, output_fmt='NHWC'))\n        self.patch_embed = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            dynamic_img_pad=dynamic_img_pad,\n            bias=not use_pre_transformer_norm,\n            **embed_args,\n            **dd,\n        )\n        num_patches = self.patch_embed.num_patches\n        r = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        self.cls_token = nn.Parameter(torch.empty(1, 1, embed_dim, **dd)) if class_token else None\n        self.reg_token = nn.Parameter(torch.empty(1, num_reg_tokens, embed_dim, **dd)) if num_reg_tokens else None\n        self.cls_embed = class_token and self.reg_token is None\n\n        num_pos_tokens = num_patches if no_embed_class else num_patches + self.num_prefix_tokens\n        self.pos_embed = nn.Parameter(torch.empty(1, num_pos_tokens, embed_dim, **dd)) if use_abs_pos_emb else None\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n        if patch_drop_rate > 0:\n            self.patch_drop = PatchDropoutWithIndices(patch_drop_rate, num_prefix_tokens=self.num_prefix_tokens)\n        else:\n            self.patch_drop = None\n\n        self.rope_mixed = False\n        if use_rot_pos_emb:\n            ref_feat_shape = to_2tuple(ref_feat_shape) if ref_feat_shape is not None else None\n\n            # Setup RoPE kwargs\n            rope_kwargs = dict(\n                dim=embed_dim,\n                num_heads=num_heads,\n                feat_shape=None if dynamic_img_size else self.patch_embed.grid_size,\n                temperature=rope_temperature,\n                grid_indexing=rope_grid_indexing,\n                **dd,\n            )\n            if rope_type == 'mixed':\n                rope_kwargs.update(dict(depth=depth))\n                self.rope_mixed = True\n            elif rope_type == 'cat':\n                rope_kwargs.update(dict(\n                    in_pixels=False,\n                    grid_offset=rope_grid_offset,\n                    ref_feat_shape=ref_feat_shape,\n                ))\n\n            self.rope = create_rope_embed(rope_type=rope_type, **rope_kwargs)\n        else:\n            self.rope = None\n\n        self.norm_pre = norm_layer(embed_dim, **dd) if activate_pre_norm else nn.Identity()\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        block_fn = EvaBlockPostNorm if use_post_norm else EvaBlock\n        self.blocks = nn.ModuleList([\n            block_fn(\n                dim=embed_dim,\n                num_heads=num_heads,\n                qkv_bias=qkv_bias,\n                qkv_fused=qkv_fused,\n                mlp_ratio=mlp_ratio,\n                swiglu_mlp=swiglu_mlp,\n                swiglu_align_to=swiglu_align_to,\n                scale_mlp=scale_mlp,\n                scale_attn_inner=scale_attn_inner,\n                attn_type=attn_type,\n                rotate_half=rope_rotate_half,\n                num_prefix_tokens=self.num_prefix_tokens,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                init_values=init_values,\n                **dd,\n            )\n            for i in range(depth)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=r) for i in range(depth)]\n\n        self.norm = norm_layer(embed_dim, **dd) if activate_post_norm else nn.Identity()\n\n        if global_pool == 'map':\n            self.attn_pool = AttentionPoolLatent(\n                self.embed_dim,\n                num_heads=attn_pool_num_heads or num_heads,\n                mlp_ratio=attn_pool_mlp_ratio or mlp_ratio,\n                norm_layer=norm_layer,\n                act_layer=nn.GELU,\n                **dd,\n            )\n        else:\n            self.attn_pool = None\n        self.fc_norm = norm_layer(embed_dim, **dd) if activate_fc_norm else nn.Identity()\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.head_init_scale = head_init_scale\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n        if self.pos_embed is not None:\n            trunc_normal_(self.pos_embed, std=.02)\n        if self.cls_token is not None:\n            trunc_normal_(self.cls_token, std=.02)\n        if self.reg_token is not None:\n            trunc_normal_(self.reg_token, std=.02)\n\n        self.fix_init_weight()\n\n        if self.head_init_scale and isinstance(self.head, nn.Linear):\n            trunc_normal_(self.head.weight, std=.02)\n            with torch.no_grad():\n                self.head.weight.mul_(self.head_init_scale)\n                self.head.bias.mul_(self.head_init_scale)\n\n    def fix_init_weight(self) -> None:\n        \"\"\"Fix initialization weights by rescaling based on layer depth.\"\"\"\n        with torch.no_grad():\n            for layer_id, layer in enumerate(self.blocks):\n                scale = math.sqrt(2.0 * (layer_id + 1))\n                layer.attn.proj.weight.div_(scale)\n                layer.mlp.fc2.weight.div_(scale)\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True) -> None:\n        \"\"\"Initialize weights for Linear layers and call reset_parameters on modules.\n\n        Args:\n            m: Module to initialize.\n            needs_reset: Whether to call reset_parameters() on modules.\n        \"\"\"\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.zeros_(m.bias)\n        elif needs_reset and hasattr(m, 'reset_parameters') and m is not self:\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        \"\"\"Parameters to exclude from weight decay.\"\"\"\n        nwd = {'pos_embed', 'cls_token'}\n        if (rope := getattr(self, \"rope\", None)) and hasattr(rope, \"no_weight_decay\"):\n            return nwd | {f\"rope.{p}\" for p in rope.no_weight_decay()}\n        return nwd\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Create layer groupings for optimization.\"\"\"\n        matcher = dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))],\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of output classes.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n\n    def set_input_size(\n            self,\n            img_size: Optional[Tuple[int, int]] = None,\n            patch_size: Optional[Tuple[int, int]] = None,\n    ) -> None:\n        \"\"\"Update the input image resolution and patch size.\n\n        Args:\n            img_size: New input resolution, if None current resolution is used.\n            patch_size: New patch size, if None existing patch size is used.\n        \"\"\"\n        prev_grid_size = self.patch_embed.grid_size\n        self.patch_embed.set_input_size(img_size=img_size, patch_size=patch_size)\n\n        if self.pos_embed is not None:\n            num_prefix_tokens = 0 if self.no_embed_class else self.num_prefix_tokens\n            num_new_tokens = self.patch_embed.num_patches + num_prefix_tokens\n            if num_new_tokens != self.pos_embed.shape[1]:\n                self.pos_embed = nn.Parameter(resample_abs_pos_embed(\n                    self.pos_embed,\n                    new_size=self.patch_embed.grid_size,\n                    old_size=prev_grid_size,\n                    num_prefix_tokens=num_prefix_tokens,\n                    verbose=True,\n                ))\n\n        if self.rope is not None:\n            self.rope.update_feat_shape(self.patch_embed.grid_size)\n\n    def _pos_embed(self, x) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:\n        if self.dynamic_img_size:\n            B, H, W, C = x.shape\n            if self.pos_embed is not None:\n                prev_grid_size = self.patch_embed.grid_size\n                pos_embed = resample_abs_pos_embed(\n                    self.pos_embed,\n                    new_size=(H, W),\n                    old_size=prev_grid_size,\n                    num_prefix_tokens=0 if self.no_embed_class else self.num_prefix_tokens,\n                )\n            else:\n                pos_embed = None\n            x = x.view(B, -1, C)\n            rot_pos_embed = self.rope.get_embed(shape=(H, W)) if self.rope is not None else None\n        else:\n            pos_embed = self.pos_embed\n            rot_pos_embed = self.rope.get_embed() if self.rope is not None else None\n\n        to_cat = []\n        if self.cls_token is not None:\n            to_cat.append(self.cls_token.expand(x.shape[0], -1, -1))\n        if self.reg_token is not None:\n            to_cat.append(self.reg_token.expand(x.shape[0], -1, -1))\n\n        if self.no_embed_class:\n            # position embedding does not overlap with class / reg token\n            if pos_embed is not None:\n                x = x + pos_embed\n            if to_cat:\n                x = torch.cat(to_cat + [x], dim=1)\n        else:\n            # pos_embed has entry for class / reg token, concat then add\n            if to_cat:\n                x = torch.cat(to_cat + [x], dim=1)\n            if pos_embed is not None:\n                x = x + pos_embed\n\n        x = self.pos_drop(x)\n\n        # apply patch dropout to patches and rotary position embedding\n        if self.patch_drop is not None:\n            x, keep_indices = self.patch_drop(x)\n            if rot_pos_embed is not None and keep_indices is not None:\n                rot_pos_embed = apply_keep_indices_nlc(x, rot_pos_embed, keep_indices)\n                # After applying keep indices to rope embeds, batch dim is added\n                if getattr(self, 'rope_mixed', False):\n                    # B, D, nH, N, dim -> D, B, nH, N, dim. For consistent iteration over depth at index 0.\n                    rot_pos_embed = rot_pos_embed.transpose(0, 1)\n                else:\n                    # B, N, dim -> B, 1, N, dim.  Need head dim singleton for correct dim alignment in axial mode.\n                    rot_pos_embed = rot_pos_embed.unsqueeze(1)\n\n        return x, rot_pos_embed\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if an int, if is a sequence, select by matching indices\n            return_prefix_tokens: Return both prefix and spatial intermediate tokens\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            attn_mask: Optional attention mask for masked attention\n            is_causal: If True, use causal (autoregressive) masking in attention\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format for EVA-ViT features must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x)\n        x, rot_pos_embed = self._pos_embed(x)\n        x = self.norm_pre(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n\n        # Handle depth-dependent embeddings for mixed mode\n        if getattr(self, 'rope_mixed', False) and rot_pos_embed is not None:\n            for i, blk in enumerate(blocks):\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(blk, x, rope=rot_pos_embed[i], attn_mask=attn_mask, is_causal=is_causal)\n                else:\n                    x = blk(x, rope=rot_pos_embed[i], attn_mask=attn_mask, is_causal=is_causal)\n                if i in take_indices:\n                    intermediates.append(self.norm(x) if norm else x)\n        else:\n            for i, blk in enumerate(blocks):\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(blk, x, rope=rot_pos_embed, attn_mask=attn_mask, is_causal=is_causal)\n                else:\n                    x = blk(x, rope=rot_pos_embed, attn_mask=attn_mask, is_causal=is_causal)\n                if i in take_indices:\n                    intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.patch_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n        if not torch.jit.is_scripting() and return_prefix_tokens:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.attn_pool = None\n            self.fc_norm = nn.Identity()\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def pool(self, x: torch.Tensor, pool_type: Optional[str] = None) -> torch.Tensor:\n        if self.attn_pool is not None:\n            x = self.attn_pool(x)\n            return x\n        pool_type = self.global_pool if pool_type is None else pool_type\n        x = global_pool_nlc(x, pool_type=pool_type, num_prefix_tokens=self.num_prefix_tokens)\n        return x\n\n    def forward_features(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n            attn_mask: Optional attention mask for masked attention\n            is_causal: If True, use causal (autoregressive) masking in attention.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.patch_embed(x)\n        x, rot_pos_embed = self._pos_embed(x)\n        x = self.norm_pre(x)\n\n        if getattr(self, 'rope_mixed', False) and rot_pos_embed is not None:\n            # Handle depth-dependent embeddings for mixed mode\n            # pos embed has shape (depth, num_heads, H*W, dim) or (depth, batch_size, num_heads, H*W, dim)\n            for i, blk in enumerate(self.blocks):\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(blk, x, rope=rot_pos_embed[i], attn_mask=attn_mask, is_causal=is_causal)\n                else:\n                    x = blk(x, rope=rot_pos_embed[i], attn_mask=attn_mask, is_causal=is_causal)\n        else:\n            # Standard path for non-mixed mode\n            for blk in self.blocks:\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(blk, x, rope=rot_pos_embed, attn_mask=attn_mask, is_causal=is_causal)\n                else:\n                    x = blk(x, rope=rot_pos_embed, attn_mask=attn_mask, is_causal=is_causal)\n\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return pre-logits if True.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.pool(x)\n        x = self.fc_norm(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n            attn_mask: Optional attention mask for masked attention\n            is_causal: If True, use causal (autoregressive) masking in attention.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.forward_features(x, attn_mask=attn_mask, is_causal=is_causal)\n        x = self.forward_head(x)\n        return x\n\n\ndef _convert_pe(\n    state_dict: Dict[str, torch.Tensor],\n    model: nn.Module,\n    prefix: str = 'visual.',\n) -> Dict[str, torch.Tensor]:\n    \"\"\"Convert Perception Encoder weights.\n\n    Args:\n        state_dict: State dictionary to convert.\n        model: Target model instance.\n        prefix: Prefix to strip from keys.\n\n    Returns:\n        Converted state dictionary.\n    \"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = {k.replace(\"module.\", \"\"): v for k, v in state_dict.items()}\n\n    out_dict = {}\n    swaps = [\n        ('conv1', 'patch_embed.proj'),\n        ('positional_embedding', 'pos_embed'),\n        ('transformer.resblocks.', 'blocks.'),\n        ('ln_pre', 'norm_pre'),\n        ('ln_post', 'norm'),\n        ('ln_', 'norm'),\n        ('ls_1.gamma', 'gamma_1'),\n        ('ls_2.gamma', 'gamma_2'),\n        ('in_proj_', 'qkv.'),\n        ('out_proj', 'proj'),\n        ('mlp.c_fc', 'mlp.fc1'),\n        ('mlp.c_proj', 'mlp.fc2'),\n    ]\n    len_prefix = len(prefix)\n    for k, v in state_dict.items():\n        if prefix:\n            if not k.startswith(prefix):\n                continue\n            k = k[len_prefix:]\n\n        for sp in swaps:\n            k = k.replace(sp[0], sp[1])\n\n        if k.startswith('attn_pool'):\n            k = k.replace('attn_pool.attn', 'attn_pool')\n            k = k.replace('attn_pool.layernorm', 'attn_pool.norm')\n            k = k.replace('attn_pool.probe', 'attn_pool.latent')\n            if k.startswith('attn_pool.qkv'):\n                dim = v.shape[0] // 3\n                if k.endswith('weight'):\n                    out_dict['attn_pool.q.weight'] = v[:dim]\n                    out_dict['attn_pool.kv.weight'] = v[dim:]\n                elif k.endswith('bias'):\n                    out_dict['attn_pool.q.bias'] = v[:dim]\n                    out_dict['attn_pool.kv.bias'] = v[dim:]\n                continue\n        elif k == 'proj':\n            k = 'head.weight'\n            v = v.transpose(0, 1)\n            out_dict['head.bias'] = torch.zeros(v.shape[0])\n        elif k == 'class_embedding':\n            k = 'cls_token'\n            v = v.unsqueeze(0).unsqueeze(1)\n        elif k == 'pos_embed':\n            v = v.unsqueeze(0)\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef checkpoint_filter_fn(\n        state_dict: Dict[str, torch.Tensor],\n        model: nn.Module,\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n) -> Dict[str, torch.Tensor]:\n    \"\"\"Convert patch embedding weight from manual patchify + linear proj to conv.\n\n    Args:\n        state_dict: Checkpoint state dictionary.\n        model: Target model instance.\n        interpolation: Interpolation method for resizing.\n        antialias: Whether to use antialiasing when resizing.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    out_dict = {}\n    # Standard EVA checkpoint processing\n    state_dict = state_dict.get('model_ema', state_dict)\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('module', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n\n    # Loading Meta PE (Perception Encoder) weights\n    if 'visual.conv1.weight' in state_dict:\n        return _convert_pe(state_dict, model)\n    elif 'conv1.weight' in state_dict:\n        return _convert_pe(state_dict, model, prefix='')\n\n    # prefix for loading OpenCLIP compatible weights\n    if 'visual.trunk.pos_embed' in state_dict:\n        prefix = 'visual.trunk.'\n    elif 'visual.pos_embed' in state_dict:\n        prefix = 'visual.'\n    else:\n        prefix = ''\n\n    dinov3_weights = 'storage_tokens' in state_dict\n    mim_weights = not dinov3_weights and prefix + 'mask_token' in state_dict\n    no_qkv = prefix + 'blocks.0.attn.q_proj.weight' in state_dict\n\n    len_prefix = len(prefix)\n    for k, v in state_dict.items():\n        if prefix:\n            if not k.startswith(prefix):\n                continue\n            k = k[len_prefix:]\n\n        if 'rope' in k and not k == 'rope.freqs':\n            # fixed embedding no need to load buffer from checkpoint\n            continue\n\n        if dinov3_weights:\n            if any([k.endswith(f) for f in ['.periods', '.bias_mask', 'mask_token']]):\n                # discard unused/non-persistent/pretrain only params\n                continue\n            if k.startswith('local_cls_norm'):\n                # discard, only used for 7b dinov3 pretrain w/ local crops\n                continue\n            if k.endswith('qkv.bias'):\n                q_bias_k = k.replace('qkv.bias', 'q_bias')\n                try:\n                    # the distilled b,l,h models ended up with all zero biases, so timm\n                    # has both qkv_bias=True and qkv_bias=False impl, test which\n                    model.get_parameter(q_bias_k)\n                except Exception as e:\n                    print(e)\n                    # skip as target model has no bias parameter\n                    continue\n                # split bias into components and skip the k as its supposed to be fixed at 0\n                qv, kv, vv = v.chunk(3, dim=-1)\n                out_dict[q_bias_k] = qv\n                out_dict[k.replace('qkv.bias', 'v_bias')] = vv\n                continue\n            k = k.replace('ls1.gamma', 'gamma_1')  # match EVA ls naming\n            k = k.replace('ls2.gamma', 'gamma_2')  # match EVA ls naming\n            k = k.replace('storage_tokens', 'reg_token')  # rename storage to existing register naming\n\n        elif mim_weights and k in ('mask_token', 'lm_head.weight', 'lm_head.bias', 'norm.weight', 'norm.bias'):\n            if k == 'norm.weight' or k == 'norm.bias':\n                # try moving norm -> fc norm on fine-tune, probably a better starting point than new init\n                k = k.replace('norm', 'fc_norm')\n            else:\n                # skip pretrain mask token & head weights\n                continue\n\n        if 'patch_embed.proj.weight' in k:\n            _, _, H, W = model.patch_embed.proj.weight.shape\n            if v.shape[-1] != W or v.shape[-2] != H:\n                v = resample_patch_embed(\n                    v,\n                    (H, W),\n                    interpolation=interpolation,\n                    antialias=antialias,\n                    verbose=True,\n                )\n        elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]:\n            # To resize pos embedding when using model at different size from pretrained weights\n            num_prefix_tokens = 0 if getattr(model, 'no_embed_class', False) else getattr(model, 'num_prefix_tokens', 1)\n            v = resample_abs_pos_embed(\n                v,\n                new_size=model.patch_embed.grid_size,\n                num_prefix_tokens=num_prefix_tokens,\n                interpolation=interpolation,\n                antialias=antialias,\n                verbose=True,\n            )\n\n        k = k.replace('mlp.ffn_ln', 'mlp.norm')\n        k = k.replace('attn.inner_attn_ln', 'attn.norm')\n        k = k.replace('mlp.w12', 'mlp.fc1')\n        k = k.replace('mlp.w1', 'mlp.fc1_g')\n        k = k.replace('mlp.w2', 'mlp.fc1_x')\n        k = k.replace('mlp.w3', 'mlp.fc2')\n        if no_qkv:\n            k = k.replace('q_bias', 'q_proj.bias')\n            k = k.replace('v_bias', 'v_proj.bias')\n\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _create_eva(variant: str, pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Create an EVA model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        Instantiated Eva model.\n    \"\"\"\n    # Check if we should use NaFlexVit implementation\n    use_naflex = kwargs.pop('use_naflex', None)\n    _USE_NAFLEX_DEFAULT = os.environ.get('TIMM_USE_NAFLEX', '0') == '1'\n    if use_naflex is None:\n        use_naflex = _USE_NAFLEX_DEFAULT\n    if use_naflex:\n        # Import here to avoid circular import\n        from .naflexvit import _create_naflexvit_from_eva\n        return _create_naflexvit_from_eva(variant, pretrained, **kwargs)\n\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        Eva, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Generate default configuration for EVA models.\n\n    Args:\n        url: Model weights URL.\n        **kwargs: Additional configuration parameters.\n\n    Returns:\n        Model configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': OPENAI_CLIP_MEAN, 'std': OPENAI_CLIP_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'mit', **kwargs\n    }\n\n\ndef _pe_cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Generate default configuration for Perception Encoder models.\n\n    Args:\n        url: Model weights URL.\n        **kwargs: Additional configuration parameters.\n\n    Returns:\n        Model configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 0, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndef _dinov3_cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Generate default configuration for DINOv3 models.\n\n    Note: Original DINOv3 uses CLS-token pooling for representations. timm defaults to avg\n    pooling for the Eva architecture. Pass global_pool='token' at model creation to match\n    upstream behavior, which may be preferred for tasks like retrieval and few-shot classification.\n\n    Args:\n        url: Model weights URL.\n        **kwargs: Additional configuration parameters.\n\n    Returns:\n        Model configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 0, 'input_size': (3, 256, 256), 'pool_size': None,\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'dinov3-license', **kwargs\n    }\n\ndefault_cfgs = generate_default_cfgs({\n\n    # EVA 01 CLIP fine-tuned on imagenet-1k\n    'eva_giant_patch14_224.clip_ft_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz224_ftcls_89p1.pt',\n        hf_hub_id='timm/',\n    ),\n    'eva_giant_patch14_336.clip_ft_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz336_ftcls_89p4.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'),\n\n    # MIM EVA 01 pretrain, ft on in22k -> in1k\n    'eva_giant_patch14_336.m30m_ft_in22k_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_336px_psz14_ema_89p6.pt',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'),\n    'eva_giant_patch14_560.m30m_ft_in22k_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_560px_psz14_ema_89p7.pt',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        input_size=(3, 560, 560), crop_pct=1.0, crop_mode='squash'),\n\n    # in22k or m38m MIM pretrain w/ intermediate in22k fine-tune and final in1k fine-tune\n    'eva02_base_patch14_448.mim_in22k_ft_in22k_in1k': _cfg(\n        # hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k_to_in1k/eva02_B_pt_in21k_medft_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash',\n    ),\n    'eva02_large_patch14_448.mim_in22k_ft_in22k_in1k': _cfg(\n        # hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k_to_in1k/eva02_L_pt_in21k_medft_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash',\n    ),\n    'eva02_large_patch14_448.mim_m38m_ft_in22k_in1k': _cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k_to_in1k/eva02_L_pt_m38m_medft_in21k_ft_in1k_p14.pt',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash',\n    ),\n\n    # in22k or m3m MIM pretrain w/ in1k fine-tune\n    'eva02_tiny_patch14_336.mim_in22k_ft_in1k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in1k/eva02_Ti_pt_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 336, 336), crop_pct=1.0,\n    ),\n    'eva02_small_patch14_336.mim_in22k_ft_in1k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in1k/eva02_S_pt_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 336, 336), crop_pct=1.0,\n    ),\n    'eva02_base_patch14_448.mim_in22k_ft_in1k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in1k/eva02_B_pt_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n    ),\n    'eva02_large_patch14_448.mim_in22k_ft_in1k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in1k/eva02_L_pt_in21k_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n    ),\n    'eva02_large_patch14_448.mim_m38m_ft_in1k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in1k/eva02_L_pt_m38m_ft_in1k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n    ),\n\n    # in22k or m3m MIM pretrain w/ in22k fine-tune\n    'eva02_base_patch14_448.mim_in22k_ft_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k/eva02_B_pt_in21k_medft_in21k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash', num_classes=21841,\n    ),\n    'eva02_large_patch14_448.mim_in22k_ft_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k/eva02_L_pt_in21k_medft_in21k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash', num_classes=21841,\n    ),\n    'eva02_large_patch14_448.mim_m38m_ft_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/cls/in21k/eva02_L_pt_m38m_medft_in21k_p14.pt',\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash', num_classes=21841,\n    ),\n\n    # in22k or m38m MIM pretrain\n    'eva02_tiny_patch14_224.mim_in22k': _cfg(\n        # hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/pt/eva02_Ti_pt_in21k_p14.pt',\n        hf_hub_id='timm/',\n        num_classes=0,\n    ),\n    'eva02_small_patch14_224.mim_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/pt/eva02_S_pt_in21k_p14.pt',\n        hf_hub_id='timm/',\n        num_classes=0,\n    ),\n    'eva02_base_patch14_224.mim_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/pt/eva02_B_pt_in21k_p14.pt',\n        hf_hub_id='timm/',\n        num_classes=0,\n    ),\n    'eva02_large_patch14_224.mim_in22k': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/pt/eva02_L_pt_in21k_p14.pt',\n        hf_hub_id='timm/',\n        num_classes=0,\n    ),\n    'eva02_large_patch14_224.mim_m38m': _cfg(\n        #hf_hub_id='Yuxin-CV/EVA-02', hf_hub_filename='eva02/pt/eva02_L_pt_m38m_p14.pt',\n        hf_hub_id='timm/',\n        num_classes=0,\n    ),\n\n    # EVA01 and EVA02 CLIP image towers\n    'eva_giant_patch14_clip_224.laion400m': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA01_CLIP_g_14_plus_psz14_s11B.pt',\n        # hf_hub_id='timm/eva_giant_patch14_clip_224.laion400m_s11b_b41k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=1024,\n    ),\n    'eva_giant_patch14_clip_224.merged2b': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA01_CLIP_g_14_plus_psz14_s11B.pt',\n        # hf_hub_id='timm/eva_giant_patch14_plus_clip_224.merged2b_s11b_b114k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=1024,\n    ),\n    'eva02_base_patch16_clip_224.merged2b': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_CLIP_L_psz14_s4B.pt',\n        # hf_hub_id='timm/eva02_base_patch16_clip_224.merged2b_s8b_b131k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=512,\n    ),\n    'eva02_large_patch14_clip_224.merged2b': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_CLIP_L_psz14_s4B.pt',\n        # hf_hub_id='timm/eva02_large_patch14_clip_224.merged2b_s4b_b131k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=768,\n    ),\n    'eva02_large_patch14_clip_336.merged2b': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_CLIP_L_psz14_s4B.pt',\n        # hf_hub_id='timm/eva02_large_patch14_clip_336.merged2b_s6b_b61k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        input_size=(3, 336, 336), crop_pct=1.0,\n        num_classes=768,\n    ),\n    'eva02_enormous_patch14_clip_224.laion2b': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_CLIP_E_psz14_plus_s9B.pt',\n        # hf_hub_id='timm/eva02_enormous_patch14_clip_224.laion2b_s4b_b115k',  # float16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=1024,\n    ),\n    'eva02_enormous_patch14_clip_224.laion2b_plus': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_CLIP_E_psz14_plus_s9B.pt',\n        # hf_hub_id='timm/eva02_enormous_patch14_plus_clip_224.laion2b_s9b_b144k',  # bfloat16 weights\n        # hf_hub_filename='open_clip_pytorch_model.bin',\n        hf_hub_id='timm/',\n        num_classes=1024,\n    ),\n    'eva02_enormous_patch14_clip_224.pretrain': _cfg(\n        # hf_hub_id='QuanSun/EVA-CLIP', hf_hub_filename='EVA02_E_psz14.pt',\n        num_classes=0,\n    ),\n\n    'vit_medium_patch16_rope_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)\n    ),\n    'vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)\n    ),\n    'vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n    'vit_base_patch16_rope_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)\n    ),\n\n    # Perception Encoder weights\n    'vit_pe_core_tiny_patch16_384.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Core-T16-384',\n        #hf_hub_filename='PE-Core-T16-384.pt',\n        input_size=(3, 384, 384),\n        num_classes=512,  # output proj dim\n    ),\n    'vit_pe_core_small_patch16_384.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Core-S16-384',\n        #hf_hub_filename='PE-Core-S16-384.pt',\n        input_size=(3, 384, 384),\n        num_classes=512,  # output proj dim\n    ),\n    'vit_pe_core_base_patch16_224.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Core-B16-224',\n        #hf_hub_filename='PE-Core-B16-224.pt',\n        input_size=(3, 224, 224),\n        num_classes=1024,  # output proj dim\n    ),\n    'vit_pe_core_large_patch14_336.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Core-L14-336',\n        #hf_hub_filename='PE-Core-L14-336.pt',\n        input_size=(3, 336, 336),\n        num_classes=1024,  # output proj dim\n    ),\n    'vit_pe_core_gigantic_patch14_448.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Core-G14-448',\n        #hf_hub_filename='PE-Core-G14-448.pt',\n        input_size=(3, 448, 448),\n        num_classes=1280,  # output proj dim\n    ),\n\n    'vit_pe_lang_large_patch14_448.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Lang-L14-448',\n        #hf_hub_filename='PE-Lang-L14-448.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n    'vit_pe_lang_large_patch14_448.fb_tiling': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Lang-L14-448-Tiling',\n        #hf_hub_filename='PE-Lang-L14-448-Tiling.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n    'vit_pe_lang_gigantic_patch14_448.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Lang-G14-448',\n        #hf_hub_filename='PE-Lang-G14-448.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n    'vit_pe_lang_gigantic_patch14_448.fb_tiling': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Lang-G14-448-Tiling',\n        #hf_hub_filename='PE-Lang-G14-448-Tiling.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n\n    'vit_pe_spatial_tiny_patch16_512.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Spatial-T16-512',\n        #hf_hub_filename='PE-Spatial-T16-512.pt',\n        input_size=(3, 512, 512),\n        num_classes=0,\n    ),\n    'vit_pe_spatial_small_patch16_512.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Spatial-S16-512',\n        #hf_hub_filename='PE-Spatial-S16-512.pt',\n        input_size=(3, 512, 512),\n        num_classes=0,\n    ),\n    'vit_pe_spatial_base_patch16_512.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Spatial-B16-512',\n        #hf_hub_filename='PE-Spatial-B16-512.pt',\n        input_size=(3, 512, 512),\n        num_classes=0,\n    ),\n    'vit_pe_spatial_large_patch14_448.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Spatial-L14-448',\n        #hf_hub_filename='PE-Spatial-L14-448.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n    'vit_pe_spatial_gigantic_patch14_448.fb': _pe_cfg(\n        hf_hub_id='timm/',\n        #hf_hub_id='facebook/PE-Spatial-G14-448',\n        #hf_hub_filename='PE-Spatial-G14-448.pt',\n        input_size=(3, 448, 448),\n        num_classes=0,\n    ),\n\n    # RoPE-ViT models from Naver\n    'vit_small_patch16_rope_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_base_patch16_rope_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_large_patch16_rope_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_small_patch16_rope_mixed_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_base_patch16_rope_mixed_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_large_patch16_rope_mixed_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_small_patch16_rope_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_base_patch16_rope_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_large_patch16_rope_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_small_patch16_rope_mixed_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_base_patch16_rope_mixed_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n    'vit_large_patch16_rope_mixed_ape_224.naver_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        license='apache-2.0',\n    ),\n\n    # DINOv3 weights are under a specific license with redistribution terms, please see\n    # https://github.com/facebookresearch/dinov3/blob/main/LICENSE.md\n    # NOTE: Original DINOv3 uses CLS-token pooling (global_pool='token') which may be better\n    # for some tasks. Default here is avg pooling inherited from the Eva base class.\n    'vit_small_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_small_patch16_dinov3_qkvb.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_small_plus_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_small_plus_patch16_dinov3_qkvb.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_base_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_base_patch16_dinov3_qkvb.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_large_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_large_patch16_dinov3_qkvb.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_large_patch16_dinov3.sat493m': _dinov3_cfg(\n        hf_hub_id='timm/',\n        mean=(0.430, 0.411, 0.296), std=(0.213, 0.156, 0.143),\n    ),\n    'vit_large_patch16_dinov3_qkvb.sat493m': _dinov3_cfg(\n        hf_hub_id='timm/',\n        mean=(0.430, 0.411, 0.296), std=(0.213, 0.156, 0.143),\n    ),\n    'vit_huge_plus_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_huge_plus_patch16_dinov3_qkvb.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_7b_patch16_dinov3.lvd1689m': _dinov3_cfg(\n        hf_hub_id='timm/',\n    ),\n    'vit_7b_patch16_dinov3.sat493m': _dinov3_cfg(\n        hf_hub_id='timm/',\n        mean=(0.430, 0.411, 0.296), std=(0.213, 0.156, 0.143),\n    ),\n\n})\n\n\n@register_model\ndef eva_giant_patch14_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA-g model https://arxiv.org/abs/2211.07636\"\"\"\n    model_args = dict(patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=6144 / 1408)\n    model = _create_eva('eva_giant_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva_giant_patch14_336(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA-g model https://arxiv.org/abs/2211.07636\"\"\"\n    model_args = dict(patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=6144 / 1408)\n    model = _create_eva('eva_giant_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva_giant_patch14_560(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA-g model https://arxiv.org/abs/2211.07636\"\"\"\n    model_args = dict(patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=6144 / 1408)\n    model = _create_eva('eva_giant_patch14_560', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_tiny_patch14_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Tiny https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=192,\n        depth=12,\n        num_heads=3,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_tiny_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_small_patch14_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Small https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_small_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_base_patch14_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Base https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_fused=False,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_base_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_large_patch14_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Large https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4 * 2 / 3,\n        qkv_fused=False,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_large_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_tiny_patch14_336(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Tiny https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=336,\n        patch_size=14,\n        embed_dim=192,\n        depth=12,\n        num_heads=3,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_tiny_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_small_patch14_336(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Small https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=336,\n        patch_size=14,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_small_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_base_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Base https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=448,\n        patch_size=14,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_fused=False,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_base_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_large_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA02 Large https://arxiv.org/abs/2303.11331\"\"\"\n    model_args = dict(\n        img_size=448,\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4 * 2 / 3,\n        qkv_fused=False,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('eva02_large_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva_giant_patch14_clip_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"EVA-g CLIP model (only difference from non-CLIP is the pooling)\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=6144 / 1408,\n        global_pool=kwargs.pop('global_pool', 'token'))\n    model = _create_eva('eva_giant_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_base_patch16_clip_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"An EVA-CLIP specific variant that adds additional attn scale layer-norm to eva02_base\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_fused=False,\n        mlp_ratio=4 * 2 / 3,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        scale_attn_inner=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n        global_pool=kwargs.pop('global_pool', 'token'),\n    )\n    model = _create_eva('eva02_base_patch16_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_large_patch14_clip_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"An EVA-CLIP specific variant that adds additional attn scale layer-norm to eva02_large\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4 * 2 / 3,\n        qkv_fused=False,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        scale_attn_inner=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n        global_pool=kwargs.pop('global_pool', 'token'),\n    )\n    model = _create_eva('eva02_large_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_large_patch14_clip_336(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"An EVA-CLIP specific variant that adds additional attn scale layer-norm to eva02_large\"\"\"\n    model_args = dict(\n        img_size=336,\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4 * 2 / 3,\n        qkv_fused=False,\n        swiglu_mlp=True,\n        scale_mlp=True,\n        scale_attn_inner=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(16, 16),  # 224/14\n        global_pool=kwargs.pop('global_pool', 'token'),\n    )\n    model = _create_eva('eva02_large_patch14_clip_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva02_enormous_patch14_clip_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"An EVA-CLIP specific variant that uses residual post-norm in blocks\"\"\"\n    model_args = dict(\n        img_size=224,\n        patch_size=14,\n        embed_dim=1792,\n        depth=64,\n        num_heads=16,\n        mlp_ratio=15360 / 1792,\n        use_post_norm=True,\n        global_pool=kwargs.pop('global_pool', 'token'),\n    )\n    model = _create_eva('eva02_enormous_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_rope_reg1_gap_256(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"timm SBB ViT with ROPE\"\"\"\n    model_args = dict(\n        img_size=256,\n        patch_size=16,\n        embed_dim=512,\n        depth=12,\n        num_heads=8,\n        qkv_fused=True,\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=False,\n        num_reg_tokens=1,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('vit_medium_patch16_rope_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_mediumd_patch16_rope_reg1_gap_256(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"timm SBB ViT with ROPE\"\"\"\n    model_args = dict(\n        img_size=256,\n        patch_size=16,\n        embed_dim=512,\n        depth=20,\n        num_heads=8,\n        qkv_fused=True,\n        qkv_bias=False,\n        init_values=1e-5,\n        class_token=False,\n        num_reg_tokens=1,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('vit_mediumd_patch16_rope_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch16_rope_reg4_gap_256(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"timm SBB ViT with ROPE\"\"\"\n    model_args = dict(\n        img_size=256,\n        patch_size=16,\n        embed_dim=640,\n        depth=12,\n        num_heads=10,\n        qkv_fused=True,\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=False,\n        num_reg_tokens=4,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('vit_betwixt_patch16_rope_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rope_reg1_gap_256(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"timm SBB ViT with ROPE\"\"\"\n    model_args = dict(\n        img_size=256,\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_fused=True,\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=False,\n        num_reg_tokens=1,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        ref_feat_shape=(16, 16),  # 224/14\n    )\n    model = _create_eva('vit_base_patch16_rope_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_pe_core_tiny_patch16_384(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=192,\n        depth=12,\n        num_heads=3,\n        mlp_ratio=4.0,\n        global_pool='map',\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(24, 24),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        attn_pool_num_heads=8,\n        attn_pool_mlp_ratio=4.,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_core_tiny_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n\n@register_model\ndef vit_pe_core_small_patch16_384(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4.0,\n        global_pool='map',\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(24, 24),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        attn_pool_num_heads=8,\n        attn_pool_mlp_ratio=4.,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_core_small_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_core_base_patch16_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4.0,\n        global_pool='map',\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(14, 14),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        attn_pool_num_heads=8,\n        attn_pool_mlp_ratio=4.,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_core_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_core_large_patch14_336(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4.0,\n        global_pool='map',\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(24, 24),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        attn_pool_num_heads=8,\n        attn_pool_mlp_ratio=4.,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_core_large_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_core_gigantic_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1536,\n        depth=50,\n        num_heads=16,\n        mlp_ratio=8960 / 1536,\n        global_pool='map',\n        attn_type='rope',\n        class_token=False,\n        use_pre_transformer_norm=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_indexing='xy',\n        attn_pool_num_heads=8,\n        attn_pool_mlp_ratio=4.,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_core_gigantic_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_lang_large_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1024,\n        depth=23,\n        num_heads=16,\n        mlp_ratio=4.0,\n        attn_type='rope',\n        class_token=True,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        init_values=0.1,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_lang_large_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_lang_gigantic_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1536,\n        depth=47,\n        num_heads=16,\n        mlp_ratio=8960 / 1536,\n        attn_type='rope',\n        class_token=False,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_indexing='xy',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        init_values=0.1,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_lang_gigantic_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_spatial_tiny_patch16_512(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=192,\n        depth=12,\n        num_heads=3,\n        mlp_ratio=4.0,\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_spatial_tiny_patch16_512', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_spatial_small_patch16_512(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4.0,\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_spatial_small_patch16_512', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_spatial_base_patch16_512(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4.0,\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True\n    )\n    return _create_eva('vit_pe_spatial_base_patch16_512', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_spatial_large_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4.0,\n        attn_type='rope',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_offset=1.,\n        rope_grid_indexing='xy',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_spatial_large_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef vit_pe_spatial_gigantic_patch14_448(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"Perception Encoder (PE) ViT from Meta (https://arxiv.org/abs/2504.13181)\"\"\"\n    model_args = dict(\n        patch_size=14,\n        embed_dim=1536,\n        depth=50,\n        num_heads=16,\n        mlp_ratio=8960 / 1536,\n        attn_type='rope',\n        class_token=False,\n        use_rot_pos_emb=True,\n        ref_feat_shape=(32, 32),\n        rope_grid_indexing='xy',\n        use_pre_transformer_norm=True,\n        use_post_transformer_norm=False,\n        use_fc_norm=False,  # explicitly disable\n        init_values=0.1,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n        #dynamic_img_size=True,\n    )\n    return _create_eva('vit_pe_spatial_gigantic_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n# RoPE-ViT models from https://github.com/naver-ai/rope-vit\n@register_model\ndef vit_small_patch16_rope_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial ViT-S/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n    model = _create_eva('vit_small_patch16_rope_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rope_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial ViT-B/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4,\n        attn_type='rope',\n        use_fc_norm=False,\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n    model = _create_eva('vit_base_patch16_rope_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_rope_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial ViT-L/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n    model = _create_eva('vit_large_patch16_rope_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_rope_mixed_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed ViT-S/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n    model = _create_eva('vit_small_patch16_rope_mixed_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rope_mixed_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed ViT-B/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4,\n        qkv_bias=True,\n        attn_type='rope',\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n    model = _create_eva('vit_base_patch16_rope_mixed_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_rope_mixed_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed ViT-L/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        use_abs_pos_emb=False,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n    model = _create_eva('vit_large_patch16_rope_mixed_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n# APE variants (with absolute position embeddings)\n@register_model\ndef vit_small_patch16_rope_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial + APE ViT-S/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n    model = _create_eva('vit_small_patch16_rope_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rope_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial + APE ViT-B/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n\n    model = _create_eva('vit_base_patch16_rope_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_rope_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Axial + APE ViT-L/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=100.0,\n    )\n\n    model = _create_eva('vit_large_patch16_rope_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_rope_mixed_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed + APE ViT-S/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n\n    model = _create_eva('vit_small_patch16_rope_mixed_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rope_mixed_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed + APE ViT-B/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n    model = _create_eva('vit_base_patch16_rope_mixed_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_rope_mixed_ape_224(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"RoPE-Mixed + APE ViT-L/16 from https://github.com/naver-ai/rope-vit\"\"\"\n    model_args = dict(\n        patch_size=16,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        mlp_ratio=4,\n        attn_type='rope',\n        qkv_bias=True,\n        init_values=1e-5,\n        class_token=True,\n        global_pool='token',\n        no_embed_class=True,\n        use_abs_pos_emb=True,\n        use_rot_pos_emb=True,\n        rope_grid_indexing='xy',\n        rope_temperature=10.0,\n        rope_type='mixed'\n    )\n    model = _create_eva('vit_large_patch16_rope_mixed_ape_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 S/16 https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        qkv_bias=False,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_small_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_dinov3_qkvb(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 S/16 w/ QKV bias enabled (but zero) https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        qkv_bias=True,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_small_patch16_dinov3_qkvb', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_plus_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 S/16 Plus https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        qkv_bias=False,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        swiglu_mlp=True,\n        swiglu_align_to=8,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_small_plus_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_plus_patch16_dinov3_qkvb(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 S/16 Plus w/ QKV bias enabled (but 0) https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=384,\n        depth=12,\n        num_heads=6,\n        qkv_bias=True,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        swiglu_mlp=True,\n        swiglu_align_to=8,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_small_plus_patch16_dinov3_qkvb', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 B/16 https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_bias=False,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_base_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_dinov3_qkvb(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 B/16 w/ QKV bias enabled (but zero) https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        qkv_bias=True,\n        init_values=1.0e-05, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        #rope_rescale_coords=2,  # haven't added to interface\n        rope_rotate_half=True,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_base_patch16_dinov3_qkvb', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 L/16 https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        qkv_bias=False,\n        init_values=1.0e-5, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        rope_rotate_half=True,\n        #rope_rescale_coords=2,  # haven't added to interface\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_large_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_dinov3_qkvb(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 w/ QKV bias enabled (but zero) https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=1024,\n        depth=24,\n        num_heads=16,\n        qkv_bias=True,\n        init_values=1.0e-5, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        rope_rotate_half=True,\n        #rope_rescale_coords=2,  # haven't added to interface\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_eva('vit_large_patch16_dinov3_qkvb', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_plus_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 H/16 Plus https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=1280,\n        depth=32,\n        num_heads=20,\n        qkv_bias=False,\n        init_values=1.0e-5, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        rope_rotate_half=True,\n        swiglu_mlp=True,\n        swiglu_align_to=8,\n        #rope_rescale_coords=2,  # haven't added to interface\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n\n    model = _create_eva('vit_huge_plus_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_plus_patch16_dinov3_qkvb(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 H/16 Plus w/ QKV bias enabled (but zero) https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=1280,\n        depth=32,\n        num_heads=20,\n        qkv_bias=True,\n        init_values=1.0e-5, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        rope_rotate_half=True,\n        swiglu_mlp=True,\n        swiglu_align_to=8,\n        #rope_rescale_coords=2,  # haven't added to interface\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n\n    model = _create_eva('vit_huge_plus_patch16_dinov3_qkvb', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n@register_model\ndef vit_7b_patch16_dinov3(pretrained: bool = False, **kwargs) -> Eva:\n    \"\"\"DINOv3 7B/16 https://arxiv.org/abs/2508.10104\n    NOTE: Pass global_pool='token' to use CLS-token pooling (matches upstream DINOv3).\n    \"\"\"\n    model_args = dict(\n        patch_size=16,\n        dynamic_img_size=True,\n        embed_dim=4096,\n        depth=40,\n        num_heads=32,\n        qkv_bias=False,\n        mlp_ratio=2,\n        init_values=1.0e-5, # layer-scale\n        rope_type='dinov3',\n        rope_temperature=100,\n        use_rot_pos_emb=True,\n        use_abs_pos_emb=False,\n        rope_rotate_half=True,\n        swiglu_mlp=True,\n        swiglu_align_to=64,\n        #rope_rescale_coords=2,  # haven't added to interface\n        num_reg_tokens=4,\n        use_fc_norm=False,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n\n    model = _create_eva('vit_7b_patch16_dinov3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/factory.py",
    "content": "from ._factory import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/fasternet.py",
    "content": "\"\"\"FasterNet\nRun, Don't Walk: Chasing Higher FLOPS for Faster Neural Networks\n- paper: https://arxiv.org/abs/2303.03667\n- code: https://github.com/JierunChen/FasterNet\n\n@article{chen2023run,\n  title={Run, Don't Walk: Chasing Higher FLOPS for Faster Neural Networks},\n  author={Chen, Jierun and Kao, Shiu-hong and He, Hao and Zhuo, Weipeng and Wen, Song and Lee, Chul-Ho and Chan, S-H Gary},\n  journal={arXiv preprint arXiv:2303.03667},\n  year={2023}\n}\n\nModifications by / Copyright 2025 Ryan Hou & Ross Wightman, original copyrights below\n\"\"\"\n# Copyright (c) Microsoft Corporation.\n# Licensed under the MIT License.\n\nfrom functools import partial\nfrom typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d, Linear, DropPath, trunc_normal_, LayerType, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['FasterNet']\n\n\nclass Partial_conv3(nn.Module):\n    def __init__(self, dim: int, n_div: int, forward: str, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim_conv3 = dim // n_div\n        self.dim_untouched = dim - self.dim_conv3\n        self.partial_conv3 = nn.Conv2d(self.dim_conv3, self.dim_conv3, 3, 1, 1, bias=False, **dd)\n\n        if forward == 'slicing':\n            self.forward = self.forward_slicing\n        elif forward == 'split_cat':\n            self.forward = self.forward_split_cat\n        else:\n            raise NotImplementedError\n\n    def forward_slicing(self, x: torch.Tensor) -> torch.Tensor:\n        # only for inference\n        x = x.clone()   # !!! Keep the original input intact for the residual connection later\n        x[:, :self.dim_conv3, :, :] = self.partial_conv3(x[:, :self.dim_conv3, :, :])\n        return x\n\n    def forward_split_cat(self, x: torch.Tensor) -> torch.Tensor:\n        # for training/inference\n        x1, x2 = torch.split(x, [self.dim_conv3, self.dim_untouched], dim=1)\n        x1 = self.partial_conv3(x1)\n        x = torch.cat((x1, x2), 1)\n        return x\n\n\nclass MLPBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            n_div: int,\n            mlp_ratio: float,\n            drop_path: float,\n            layer_scale_init_value: float,\n            act_layer: Type[nn.Module] = partial(nn.ReLU, inplace=True),\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            pconv_fw_type: str = 'split_cat',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mlp_hidden_dim = int(dim * mlp_ratio)\n\n        self.mlp = nn.Sequential(*[\n            nn.Conv2d(dim, mlp_hidden_dim, 1, bias=False, **dd),\n            norm_layer(mlp_hidden_dim, **dd),\n            act_layer(),\n            nn.Conv2d(mlp_hidden_dim, dim, 1, bias=False, **dd),\n        ])\n\n        self.spatial_mixing = Partial_conv3(dim, n_div, pconv_fw_type, **dd)\n\n        if layer_scale_init_value > 0:\n            self.layer_scale = nn.Parameter(\n                layer_scale_init_value * torch.ones((dim), **dd), requires_grad=True)\n        else:\n            self.layer_scale = None\n\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n        x = self.spatial_mixing(x)\n        if self.layer_scale is not None:\n            x = shortcut + self.drop_path(\n                self.layer_scale.unsqueeze(-1).unsqueeze(-1) * self.mlp(x))\n        else:\n            x = shortcut + self.drop_path(self.mlp(x))\n        return x\n\n\nclass Block(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            depth: int,\n            n_div: int,\n            mlp_ratio: float,\n            drop_path: float,\n            layer_scale_init_value: float,\n            act_layer: Type[nn.Module] = partial(nn.ReLU, inplace=True),\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            pconv_fw_type: str = 'split_cat',\n            use_merge: bool = True,\n            merge_size: Union[int, Tuple[int, int]] = 2,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.blocks = nn.Sequential(*[\n            MLPBlock(\n                dim=dim,\n                n_div=n_div,\n                mlp_ratio=mlp_ratio,\n                drop_path=drop_path[i],\n                layer_scale_init_value=layer_scale_init_value,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                pconv_fw_type=pconv_fw_type,\n                **dd,\n            )\n            for i in range(depth)\n        ])\n        self.downsample = PatchMerging(\n            dim=dim // 2,\n            patch_size=merge_size,\n            norm_layer=norm_layer,\n            **dd,\n        ) if use_merge else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass PatchEmbed(nn.Module):\n    def __init__(\n            self,\n            in_chans: int,\n            embed_dim: int,\n            patch_size: Union[int, Tuple[int, int]] = 4,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.proj = nn.Conv2d(in_chans, embed_dim, patch_size, patch_size, bias=False, **dd)\n        self.norm = norm_layer(embed_dim, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return self.norm(self.proj(x))\n\n\nclass PatchMerging(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            patch_size: Union[int, Tuple[int, int]] = 2,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.reduction = nn.Conv2d(dim, 2 * dim, patch_size, patch_size, bias=False, **dd)\n        self.norm = norm_layer(2 * dim, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return self.norm(self.reduction(x))\n\n\nclass FasterNet(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 96,\n            depths: Union[int, Tuple[int, ...]] = (1, 2, 8, 2),\n            mlp_ratio: float = 2.,\n            n_div: int = 4,\n            patch_size: Union[int, Tuple[int, int]] = 4,\n            merge_size: Union[int, Tuple[int, int]] = 2,\n            patch_norm: bool = True,\n            feature_dim: int = 1280,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.1,\n            layer_scale_init_value: float = 0.,\n            act_layer: Type[nn.Module] = partial(nn.ReLU, inplace=True),\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            pconv_fw_type: str = 'split_cat',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert pconv_fw_type in ('split_cat', 'slicing',)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        if not isinstance(depths, (list, tuple)):\n            depths = (depths)  # it means the model has only one stage\n        self.num_stages = len(depths)\n        self.feature_info = []\n\n        self.patch_embed = PatchEmbed(\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            patch_size=patch_size,\n            norm_layer=norm_layer if patch_norm else nn.Identity,\n            **dd,\n        )\n        # stochastic depth decay rule\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n\n        # build layers\n        stages_list = []\n        for i in range(self.num_stages):\n            dim = int(embed_dim * 2 ** i)\n            stage = Block(\n                dim=dim,\n                depth=depths[i],\n                n_div=n_div,\n                mlp_ratio=mlp_ratio,\n                drop_path=dpr[i],\n                layer_scale_init_value=layer_scale_init_value,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                pconv_fw_type=pconv_fw_type,\n                use_merge=False if i == 0 else True,\n                merge_size=merge_size,\n                **dd,\n            )\n            stages_list.append(stage)\n            self.feature_info += [dict(num_chs=dim, reduction=2**(i+2), module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages_list)\n\n        # building last several layers\n        self.num_features = prev_chs = int(embed_dim * 2 ** (self.num_stages - 1))\n        self.head_hidden_size = out_chs = feature_dim # 1280\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.conv_head = nn.Conv2d(prev_chs, out_chs, 1, 1, 0, bias=False, **dd)\n        self.act = act_layer()\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(out_chs, num_classes, bias=True, **dd) if num_classes > 0 else nn.Identity()\n        self._initialize_weights()\n\n    def _initialize_weights(self):\n        for name, m in self.named_modules():\n            if isinstance(m, nn.Linear):\n                trunc_normal_(m.weight, std=.02)\n                if isinstance(m, nn.Linear) and m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.Conv2d):\n                trunc_normal_(m.weight, std=.02)\n                if m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^conv_head', (99999,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg', device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        # cannot meaningfully change pooling of efficient head after creation\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(self.head_hidden_size, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.patch_embed(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        x = self.global_pool(x)\n        x = self.conv_head(x)\n        x = self.act(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    # if 'avgpool_pre_head' in state_dict:\n    #     return state_dict\n    #\n    # out_dict = {\n    #     'conv_head.weight': state_dict.pop('avgpool_pre_head.1.weight'),\n    #     'classifier.weight': state_dict.pop('head.weight'),\n    #     'classifier.bias': state_dict.pop('head.bias')\n    # }\n    #\n    # stage_mapping = {\n    #     'stages.1.': 'stages.1.downsample.',\n    #     'stages.2.': 'stages.1.',\n    #     'stages.3.': 'stages.2.downsample.',\n    #     'stages.4.': 'stages.2.',\n    #     'stages.5.': 'stages.3.downsample.',\n    #     'stages.6.': 'stages.3.'\n    # }\n    #\n    # for k, v in state_dict.items():\n    #     for old_prefix, new_prefix in stage_mapping.items():\n    #         if k.startswith(old_prefix):\n    #             k = k.replace(old_prefix, new_prefix)\n    #             break\n    #     out_dict[k] = v\n    return state_dict\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'test_crop_pct': 0.9,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'classifier',\n        'paper_ids': 'arXiv:2303.03667',\n        'paper_name': \"Run, Don't Walk: Chasing Higher FLOPS for Faster Neural Networks\",\n        'origin_url': 'https://github.com/JierunChen/FasterNet',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'fasternet_t0.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_t0-epoch.281-val_acc1.71.9180.pth',\n    ),\n    'fasternet_t1.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_t1-epoch.291-val_acc1.76.2180.pth',\n    ),\n    'fasternet_t2.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_t2-epoch.289-val_acc1.78.8860.pth',\n    ),\n    'fasternet_s.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_s-epoch.299-val_acc1.81.2840.pth',\n    ),\n    'fasternet_m.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_m-epoch.291-val_acc1.82.9620.pth',\n    ),\n    'fasternet_l.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/JierunChen/FasterNet/releases/download/v1.0/fasternet_l-epoch.299-val_acc1.83.5060.pth',\n    ),\n})\n\n\ndef _create_fasternet(variant: str, pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model = build_model_with_cfg(\n        FasterNet, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef fasternet_t0(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=40, depths=(1, 2, 8, 2), drop_path_rate=0.0, act_layer=nn.GELU)\n    return _create_fasternet('fasternet_t0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fasternet_t1(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=64, depths=(1, 2, 8, 2), drop_path_rate=0.02, act_layer=nn.GELU)\n    return _create_fasternet('fasternet_t1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fasternet_t2(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=96, depths=(1, 2, 8, 2), drop_path_rate=0.05)\n    return _create_fasternet('fasternet_t2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fasternet_s(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=128, depths=(1, 2, 13, 2), drop_path_rate=0.1)\n    return _create_fasternet('fasternet_s', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fasternet_m(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=144, depths=(3, 4, 18, 3), drop_path_rate=0.2)\n    return _create_fasternet('fasternet_m', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fasternet_l(pretrained: bool = False, **kwargs: Any) -> FasterNet:\n    model_args = dict(embed_dim=192, depths=(3, 4, 18, 3), drop_path_rate=0.3)\n    return _create_fasternet('fasternet_l', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/fastvit.py",
    "content": "# FastViT for PyTorch\n#\n# Original implementation and weights from https://github.com/apple/ml-fastvit\n#\n# For licensing see accompanying LICENSE file at https://github.com/apple/ml-fastvit/tree/main\n# Original work is copyright (C) 2023 Apple Inc. All Rights Reserved.\n#\nimport os\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\nfrom timm.layers import (\n    DropPath,\n    calculate_drop_path_rates,\n    trunc_normal_,\n    create_conv2d,\n    ConvNormAct,\n    SqueezeExcite,\n    use_fused_attn,\n    ClassifierHead,\n    LayerNorm2d,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['FastVit']\n\ndef num_groups(group_size, channels):\n    if not group_size:  # 0 or None\n        return 1  # normal conv with 1 group\n    else:\n        # NOTE group_size == 1 -> depthwise conv\n        assert channels % group_size == 0\n        return channels // group_size\n\n\nclass MobileOneBlock(nn.Module):\n    \"\"\"MobileOne building block.\n\n    This block has a multi-branched architecture at train-time\n    and plain-CNN style architecture at inference time\n    For more details, please refer to our paper:\n    `An Improved One millisecond Mobile Backbone` -\n    https://arxiv.org/pdf/2206.04040.pdf\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            stride: int = 1,\n            dilation: int = 1,\n            group_size: int = 0,\n            inference_mode: bool = False,\n            use_se: bool = False,\n            use_act: bool = True,\n            use_scale_branch: bool = True,\n            num_conv_branches: int = 1,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Construct a MobileOneBlock module.\n\n        Args:\n            in_chs: Number of channels in the input.\n            out_chs: Number of channels produced by the block.\n            kernel_size: Size of the convolution kernel.\n            stride: Stride size.\n            dilation: Kernel dilation factor.\n            group_size: Convolution group size.\n            inference_mode: If True, instantiates model in inference mode.\n            use_se: Whether to use SE-ReLU activations.\n            use_act: Whether to use activation. Default: ``True``\n            use_scale_branch: Whether to use scale branch. Default: ``True``\n            num_conv_branches: Number of linear conv branches.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.inference_mode = inference_mode\n        self.groups = num_groups(group_size, in_chs)\n        self.stride = stride\n        self.dilation = dilation\n        self.kernel_size = kernel_size\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n        self.num_conv_branches = num_conv_branches\n\n        # Check if SE-ReLU is requested\n        self.se = SqueezeExcite(out_chs, rd_divisor=1, **dd) if use_se else nn.Identity()\n\n        if inference_mode:\n            self.reparam_conv = create_conv2d(\n                in_chs,\n                out_chs,\n                kernel_size=kernel_size,\n                stride=stride,\n                dilation=dilation,\n                groups=self.groups,\n                bias=True,\n                **dd,\n            )\n        else:\n            # Re-parameterizable skip connection\n            self.reparam_conv = None\n\n            self.identity = (\n                nn.BatchNorm2d(num_features=in_chs, **dd)\n                if out_chs == in_chs and stride == 1\n                else None\n            )\n\n            # Re-parameterizable conv branches\n            if num_conv_branches > 0:\n                self.conv_kxk = nn.ModuleList([\n                    ConvNormAct(\n                        self.in_chs,\n                        self.out_chs,\n                        kernel_size=kernel_size,\n                        stride=self.stride,\n                        groups=self.groups,\n                        apply_act=False,\n                        **dd,\n                    ) for _ in range(self.num_conv_branches)\n                ])\n            else:\n                self.conv_kxk = None\n\n            # Re-parameterizable scale branch\n            self.conv_scale = None\n            if kernel_size > 1 and use_scale_branch:\n                self.conv_scale = ConvNormAct(\n                    self.in_chs,\n                    self.out_chs,\n                    kernel_size=1,\n                    stride=self.stride,\n                    groups=self.groups,\n                    apply_act=False,\n                    **dd,\n                )\n\n        self.act = act_layer() if use_act else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply forward pass.\"\"\"\n        # Inference mode forward pass.\n        if self.reparam_conv is not None:\n            return self.act(self.se(self.reparam_conv(x)))\n\n        # Multi-branched train-time forward pass.\n        # Identity branch output\n        identity_out = 0\n        if self.identity is not None:\n            identity_out = self.identity(x)\n\n        # Scale branch output\n        scale_out = 0\n        if self.conv_scale is not None:\n            scale_out = self.conv_scale(x)\n\n        # Other kxk conv branches\n        out = scale_out + identity_out\n        if self.conv_kxk is not None:\n            for rc in self.conv_kxk:\n                out += rc(x)\n\n        return self.act(self.se(out))\n\n    def reparameterize(self):\n        \"\"\"Following works like `RepVGG: Making VGG-style ConvNets Great Again` -\n        https://arxiv.org/pdf/2101.03697.pdf. We re-parameterize multi-branched\n        architecture used at training time to obtain a plain CNN-like structure\n        for inference.\n        \"\"\"\n        if self.reparam_conv is not None:\n            return\n\n        kernel, bias = self._get_kernel_bias()\n        self.reparam_conv = create_conv2d(\n            in_channels=self.in_chs,\n            out_channels=self.out_chs,\n            kernel_size=self.kernel_size,\n            stride=self.stride,\n            dilation=self.dilation,\n            groups=self.groups,\n            bias=True,\n        )\n        self.reparam_conv.weight.data = kernel\n        self.reparam_conv.bias.data = bias\n\n        # Delete un-used branches\n        for name, para in self.named_parameters():\n            if 'reparam_conv' in name:\n                continue\n            para.detach_()\n\n        self.__delattr__(\"conv_kxk\")\n        self.__delattr__(\"conv_scale\")\n        if hasattr(self, \"identity\"):\n            self.__delattr__(\"identity\")\n\n        self.inference_mode = True\n\n    def _get_kernel_bias(self) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Method to obtain re-parameterized kernel and bias.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L83\n\n        Returns:\n            Tuple of (kernel, bias) after fusing branches.\n        \"\"\"\n        # get weights and bias of scale branch\n        kernel_scale = 0\n        bias_scale = 0\n        if self.conv_scale is not None:\n            kernel_scale, bias_scale = self._fuse_bn_tensor(self.conv_scale)\n            # Pad scale branch kernel to match conv branch kernel size.\n            pad = self.kernel_size // 2\n            kernel_scale = torch.nn.functional.pad(kernel_scale, [pad, pad, pad, pad])\n\n        # get weights and bias of skip branch\n        kernel_identity = 0\n        bias_identity = 0\n        if self.identity is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.identity)\n\n        # get weights and bias of conv branches\n        kernel_conv = 0\n        bias_conv = 0\n        if self.conv_kxk is not None:\n            for ix in range(self.num_conv_branches):\n                _kernel, _bias = self._fuse_bn_tensor(self.conv_kxk[ix])\n                kernel_conv += _kernel\n                bias_conv += _bias\n\n        kernel_final = kernel_conv + kernel_scale + kernel_identity\n        bias_final = bias_conv + bias_scale + bias_identity\n        return kernel_final, bias_final\n\n    def _fuse_bn_tensor(\n            self,\n            branch: Union[nn.Sequential, nn.BatchNorm2d]\n    ) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Method to fuse batchnorm layer with preceding conv layer.\n        Reference: https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L95\n\n        Args:\n            branch: Sequence of ops to be fused.\n\n        Returns:\n            Tuple of (kernel, bias) after fusing batchnorm.\n        \"\"\"\n        if isinstance(branch, ConvNormAct):\n            kernel = branch.conv.weight\n            running_mean = branch.bn.running_mean\n            running_var = branch.bn.running_var\n            gamma = branch.bn.weight\n            beta = branch.bn.bias\n            eps = branch.bn.eps\n        else:\n            assert isinstance(branch, nn.BatchNorm2d)\n            if not hasattr(self, \"id_tensor\"):\n                input_dim = self.in_chs // self.groups\n                kernel_value = torch.zeros(\n                    (self.in_chs, input_dim, self.kernel_size, self.kernel_size),\n                    dtype=branch.weight.dtype,\n                    device=branch.weight.device,\n                )\n                for i in range(self.in_chs):\n                    kernel_value[\n                        i, i % input_dim, self.kernel_size // 2, self.kernel_size // 2\n                    ] = 1\n                self.id_tensor = kernel_value\n            kernel = self.id_tensor\n            running_mean = branch.running_mean\n            running_var = branch.running_var\n            gamma = branch.weight\n            beta = branch.bias\n            eps = branch.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n\nclass ReparamLargeKernelConv(nn.Module):\n    \"\"\"Building Block of RepLKNet\n\n    This class defines overparameterized large kernel conv block\n    introduced in `RepLKNet <https://arxiv.org/abs/2203.06717>`_\n\n    Reference: https://github.com/DingXiaoH/RepLKNet-pytorch\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            stride: int,\n            group_size: int,\n            small_kernel: Optional[int] = None,\n            use_se: bool = False,\n            act_layer: Optional[nn.Module] = None,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Construct a ReparamLargeKernelConv module.\n\n        Args:\n            in_chs: Number of input channels.\n            out_chs: Number of output channels.\n            kernel_size: Kernel size of the large kernel conv branch.\n            stride: Stride size. Default: 1\n            group_size: Group size. Default: 1\n            small_kernel: Kernel size of small kernel conv branch.\n            act_layer: Activation module. Default: ``nn.GELU``\n            inference_mode: If True, instantiates model in inference mode. Default: ``False``\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = stride\n        self.groups = num_groups(group_size, in_chs)\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n\n        self.kernel_size = kernel_size\n        self.small_kernel = small_kernel\n        if inference_mode:\n            self.reparam_conv = create_conv2d(\n                in_chs,\n                out_chs,\n                kernel_size=kernel_size,\n                stride=stride,\n                dilation=1,\n                groups=self.groups,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.reparam_conv = None\n            self.large_conv = ConvNormAct(\n                in_chs,\n                out_chs,\n                kernel_size=kernel_size,\n                stride=self.stride,\n                groups=self.groups,\n                apply_act=False,\n                **dd,\n            )\n            if small_kernel is not None:\n                assert (\n                    small_kernel <= kernel_size\n                ), \"The kernel size for re-param cannot be larger than the large kernel!\"\n                self.small_conv = ConvNormAct(\n                    in_chs,\n                    out_chs,\n                    kernel_size=small_kernel,\n                    stride=self.stride,\n                    groups=self.groups,\n                    apply_act=False,\n                    **dd,\n                )\n        self.se = SqueezeExcite(out_chs, rd_ratio=0.25, **dd) if use_se else nn.Identity()\n        # FIXME output of this act was not used in original impl, likely due to bug\n        self.act = act_layer() if act_layer is not None else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self.reparam_conv is not None:\n            out = self.reparam_conv(x)\n        else:\n            out = self.large_conv(x)\n            if self.small_conv is not None:\n                out = out + self.small_conv(x)\n        out = self.se(out)\n        out = self.act(out)\n        return out\n\n    def get_kernel_bias(self) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Method to obtain re-parameterized kernel and bias.\n        Reference: https://github.com/DingXiaoH/RepLKNet-pytorch\n\n        Returns:\n            Tuple of (kernel, bias) after fusing branches.\n        \"\"\"\n        eq_k, eq_b = self._fuse_bn(self.large_conv.conv, self.large_conv.bn)\n        if hasattr(self, \"small_conv\"):\n            small_k, small_b = self._fuse_bn(self.small_conv.conv, self.small_conv.bn)\n            eq_b += small_b\n            eq_k += nn.functional.pad(\n                small_k, [(self.kernel_size - self.small_kernel) // 2] * 4\n            )\n        return eq_k, eq_b\n\n    def reparameterize(self) -> None:\n        \"\"\"\n        Following works like `RepVGG: Making VGG-style ConvNets Great Again` -\n        https://arxiv.org/pdf/2101.03697.pdf. We re-parameterize multi-branched\n        architecture used at training time to obtain a plain CNN-like structure\n        for inference.\n        \"\"\"\n        eq_k, eq_b = self.get_kernel_bias()\n        self.reparam_conv = create_conv2d(\n            self.in_chs,\n            self.out_chs,\n            kernel_size=self.kernel_size,\n            stride=self.stride,\n            groups=self.groups,\n            bias=True,\n        )\n\n        self.reparam_conv.weight.data = eq_k\n        self.reparam_conv.bias.data = eq_b\n        self.__delattr__(\"large_conv\")\n        if hasattr(self, \"small_conv\"):\n            self.__delattr__(\"small_conv\")\n\n    @staticmethod\n    def _fuse_bn(\n            conv: nn.Conv2d,\n            bn: nn.BatchNorm2d\n    ) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Method to fuse batchnorm layer with conv layer.\n\n        Args:\n            conv: Convolutional kernel weights.\n            bn: Batchnorm 2d layer.\n\n        Returns:\n            Tuple of (kernel, bias) after fusing batchnorm.\n        \"\"\"\n        kernel = conv.weight\n        running_mean = bn.running_mean\n        running_var = bn.running_var\n        gamma = bn.weight\n        beta = bn.bias\n        eps = bn.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n\ndef convolutional_stem(\n        in_chs: int,\n        out_chs: int,\n        act_layer: Type[nn.Module] = nn.GELU,\n        inference_mode: bool = False,\n        use_scale_branch: bool = True,\n        device=None,\n        dtype=None,\n) -> nn.Sequential:\n    \"\"\"Build convolutional stem with MobileOne blocks.\n\n    Args:\n        in_chs: Number of input channels.\n        out_chs: Number of output channels.\n        inference_mode: Flag to instantiate model in inference mode. Default: ``False``\n\n    Returns:\n        nn.Sequential object with stem elements.\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    return nn.Sequential(\n        MobileOneBlock(\n            in_chs=in_chs,\n            out_chs=out_chs,\n            kernel_size=3,\n            stride=2,\n            act_layer=act_layer,\n            inference_mode=inference_mode,\n            use_scale_branch=use_scale_branch,\n            **dd,\n        ),\n        MobileOneBlock(\n            in_chs=out_chs,\n            out_chs=out_chs,\n            kernel_size=3,\n            stride=2,\n            group_size=1,\n            act_layer=act_layer,\n            inference_mode=inference_mode,\n            use_scale_branch=use_scale_branch,\n            **dd,\n        ),\n        MobileOneBlock(\n            in_chs=out_chs,\n            out_chs=out_chs,\n            kernel_size=1,\n            stride=1,\n            act_layer=act_layer,\n            inference_mode=inference_mode,\n            use_scale_branch=use_scale_branch,\n            **dd,\n        ),\n    )\n\n\nclass Attention(nn.Module):\n    \"\"\"Multi-headed Self Attention module.\n\n    Source modified from:\n    https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            head_dim: int = 32,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.0,\n            proj_drop: float = 0.0,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Build MHSA module that can handle 3D or 4D input tensors.\n\n        Args:\n            dim: Number of embedding dimensions.\n            head_dim: Number of hidden dimensions per head. Default: ``32``\n            qkv_bias: Use bias or not. Default: ``False``\n            attn_drop: Dropout rate for attention tensor.\n            proj_drop: Dropout rate for projection tensor.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % head_dim == 0, \"dim should be divisible by head_dim\"\n        self.head_dim = head_dim\n        self.num_heads = dim // head_dim\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, C, H, W = x.shape\n        N = H * W\n        x = x.flatten(2).transpose(-2, -1)  # (B, N, C)\n        qkv = (\n            self.qkv(x)\n            .reshape(B, N, 3, self.num_heads, self.head_dim)\n            .permute(2, 0, 3, 1, 4)\n        )\n        q, k, v = qkv.unbind(0)  # make torchscript happy (cannot use tensor as tuple)\n\n        if self.fused_attn:\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        x = x.transpose(-2, -1).reshape(B, C, H, W)\n\n        return x\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"Convolutional patch embedding layer.\"\"\"\n\n    def __init__(\n            self,\n            patch_size: int,\n            stride: int,\n            in_chs: int,\n            embed_dim: int,\n            act_layer: Type[nn.Module] = nn.GELU,\n            lkc_use_act: bool = False,\n            use_se: bool = False,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Build patch embedding layer.\n\n        Args:\n            patch_size: Patch size for embedding computation.\n            stride: Stride for convolutional embedding layer.\n            in_chs: Number of channels of input tensor.\n            embed_dim: Number of embedding dimensions.\n            inference_mode: Flag to instantiate model in inference mode. Default: ``False``\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.proj = nn.Sequential(\n            ReparamLargeKernelConv(\n                in_chs=in_chs,\n                out_chs=embed_dim,\n                kernel_size=patch_size,\n                stride=stride,\n                group_size=1,\n                small_kernel=3,\n                use_se=use_se,\n                act_layer=act_layer if lkc_use_act else None,  # NOTE original weights didn't use this act\n                inference_mode=inference_mode,\n                **dd,\n            ),\n            MobileOneBlock(\n                in_chs=embed_dim,\n                out_chs=embed_dim,\n                kernel_size=1,\n                stride=1,\n                use_se=False,\n                act_layer=act_layer,\n                inference_mode=inference_mode,\n                **dd,\n            )\n        )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.proj(x)\n        return x\n\n\nclass LayerScale2d(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            init_values: float = 1e-5,\n            inplace: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.inplace = inplace\n        self.gamma = nn.Parameter(init_values * torch.ones(dim, 1, 1, device=device, dtype=dtype))\n\n    def forward(self, x):\n        return x.mul_(self.gamma) if self.inplace else x * self.gamma\n\n\nclass RepMixer(nn.Module):\n    \"\"\"Reparameterizable token mixer.\n\n    For more details, please refer to our paper:\n    `FastViT: A Fast Hybrid Vision Transformer using Structural Reparameterization <https://arxiv.org/pdf/2303.14189.pdf>`_\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            kernel_size: int = 3,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Build RepMixer Module.\n\n        Args:\n            dim: Input feature map dimension. :math:`C_{in}` from an expected input of size :math:`(B, C_{in}, H, W)`.\n            kernel_size: Kernel size for spatial mixing. Default: 3\n            layer_scale_init_value: Initial value for layer scale. Default: 1e-5\n            inference_mode: If True, instantiates model in inference mode. Default: ``False``\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.kernel_size = kernel_size\n        self.inference_mode = inference_mode\n\n        if inference_mode:\n            self.reparam_conv = nn.Conv2d(\n                self.dim,\n                self.dim,\n                kernel_size=self.kernel_size,\n                stride=1,\n                padding=self.kernel_size // 2,\n                groups=self.dim,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.reparam_conv = None\n            self.norm = MobileOneBlock(\n                dim,\n                dim,\n                kernel_size,\n                group_size=1,\n                use_act=False,\n                use_scale_branch=False,\n                num_conv_branches=0,\n                **dd,\n            )\n            self.mixer = MobileOneBlock(\n                dim,\n                dim,\n                kernel_size,\n                group_size=1,\n                use_act=False,\n                **dd,\n            )\n            if layer_scale_init_value is not None:\n                self.layer_scale = LayerScale2d(dim, layer_scale_init_value, **dd)\n            else:\n                self.layer_scale = nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self.reparam_conv is not None:\n            x = self.reparam_conv(x)\n        else:\n            x = x + self.layer_scale(self.mixer(x) - self.norm(x))\n        return x\n\n    def reparameterize(self) -> None:\n        \"\"\"Reparameterize mixer and norm into a single\n        convolutional layer for efficient inference.\n        \"\"\"\n        if self.inference_mode:\n            return\n\n        self.mixer.reparameterize()\n        self.norm.reparameterize()\n\n        if isinstance(self.layer_scale, LayerScale2d):\n            w = self.mixer.id_tensor + self.layer_scale.gamma.unsqueeze(-1) * (\n                self.mixer.reparam_conv.weight - self.norm.reparam_conv.weight\n            )\n            b = torch.squeeze(self.layer_scale.gamma) * (\n                self.mixer.reparam_conv.bias - self.norm.reparam_conv.bias\n            )\n        else:\n            w = (\n                self.mixer.id_tensor\n                + self.mixer.reparam_conv.weight\n                - self.norm.reparam_conv.weight\n            )\n            b = self.mixer.reparam_conv.bias - self.norm.reparam_conv.bias\n\n        self.reparam_conv = create_conv2d(\n            self.dim,\n            self.dim,\n            kernel_size=self.kernel_size,\n            stride=1,\n            groups=self.dim,\n            bias=True,\n        )\n        self.reparam_conv.weight.data = w\n        self.reparam_conv.bias.data = b\n\n        for name, para in self.named_parameters():\n            if 'reparam_conv' in name:\n                continue\n            para.detach_()\n        self.__delattr__(\"mixer\")\n        self.__delattr__(\"norm\")\n        self.__delattr__(\"layer_scale\")\n\n\nclass ConvMlp(nn.Module):\n    \"\"\"Convolutional FFN Module.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            hidden_channels: Optional[int] = None,\n            out_chs: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.0,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Build convolutional FFN module.\n\n        Args:\n            in_chs: Number of input channels.\n            hidden_channels: Number of channels after expansion. Default: None\n            out_chs: Number of output channels. Default: None\n            act_layer: Activation layer. Default: ``GELU``\n            drop: Dropout rate. Default: ``0.0``.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_chs = out_chs or in_chs\n        hidden_channels = hidden_channels or in_chs\n        self.conv = ConvNormAct(\n            in_chs,\n            out_chs,\n            kernel_size=7,\n            groups=in_chs,\n            apply_act=False,\n            **dd,\n        )\n        self.fc1 = nn.Conv2d(in_chs, hidden_channels, kernel_size=1, **dd)\n        self.act = act_layer()\n        self.fc2 = nn.Conv2d(hidden_channels, out_chs, kernel_size=1, **dd)\n        self.drop = nn.Dropout(drop)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m: nn.Module) -> None:\n        if isinstance(m, nn.Conv2d):\n            trunc_normal_(m.weight, std=0.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv(x)\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        x = self.drop(x)\n        return x\n\n\nclass RepConditionalPosEnc(nn.Module):\n    \"\"\"Implementation of conditional positional encoding.\n\n    For more details refer to paper:\n    `Conditional Positional Encodings for Vision Transformers <https://arxiv.org/pdf/2102.10882.pdf>`_\n\n    In our implementation, we can reparameterize this module to eliminate a skip connection.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            spatial_shape: Union[int, Tuple[int, int]] = (7, 7),\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Build reparameterizable conditional positional encoding\n\n        Args:\n            dim: Number of input channels.\n            dim_out: Number of embedding dimensions. Default: 768\n            spatial_shape: Spatial shape of kernel for positional encoding. Default: (7, 7)\n            inference_mode: Flag to instantiate block in inference mode. Default: ``False``\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if isinstance(spatial_shape, int):\n            spatial_shape = tuple([spatial_shape] * 2)\n        assert isinstance(spatial_shape, Tuple), (\n            f'\"spatial_shape\" must by a sequence or int, '\n            f\"get {type(spatial_shape)} instead.\"\n        )\n        assert len(spatial_shape) == 2, (\n            f'Length of \"spatial_shape\" should be 2, '\n            f\"got {len(spatial_shape)} instead.\"\n        )\n\n        self.spatial_shape = spatial_shape\n        self.dim = dim\n        self.dim_out = dim_out or dim\n        self.groups = dim\n\n        if inference_mode:\n            self.reparam_conv = nn.Conv2d(\n                self.dim,\n                self.dim_out,\n                kernel_size=self.spatial_shape,\n                stride=1,\n                padding=spatial_shape[0] // 2,\n                groups=self.groups,\n                bias=True,\n                **dd,\n            )\n        else:\n            self.reparam_conv = None\n            self.pos_enc = nn.Conv2d(\n                self.dim,\n                self.dim_out,\n                spatial_shape,\n                1,\n                int(spatial_shape[0] // 2),\n                groups=self.groups,\n                bias=True,\n                **dd,\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self.reparam_conv is not None:\n            x = self.reparam_conv(x)\n        else:\n            x = self.pos_enc(x) + x\n        return x\n\n    def reparameterize(self) -> None:\n        # Build equivalent Id tensor\n        input_dim = self.dim // self.groups\n        kernel_value = torch.zeros(\n            (\n                self.dim,\n                input_dim,\n                self.spatial_shape[0],\n                self.spatial_shape[1],\n            ),\n            dtype=self.pos_enc.weight.dtype,\n            device=self.pos_enc.weight.device,\n        )\n        for i in range(self.dim):\n            kernel_value[\n                i,\n                i % input_dim,\n                self.spatial_shape[0] // 2,\n                self.spatial_shape[1] // 2,\n            ] = 1\n        id_tensor = kernel_value\n\n        # Reparameterize Id tensor and conv\n        w_final = id_tensor + self.pos_enc.weight\n        b_final = self.pos_enc.bias\n\n        # Introduce reparam conv\n        self.reparam_conv = nn.Conv2d(\n            self.dim,\n            self.dim_out,\n            kernel_size=self.spatial_shape,\n            stride=1,\n            padding=int(self.spatial_shape[0] // 2),\n            groups=self.groups,\n            bias=True,\n        )\n        self.reparam_conv.weight.data = w_final\n        self.reparam_conv.bias.data = b_final\n\n        for name, para in self.named_parameters():\n            if 'reparam_conv' in name:\n                continue\n            para.detach_()\n        self.__delattr__(\"pos_enc\")\n\n\nclass RepMixerBlock(nn.Module):\n    \"\"\"Implementation of Metaformer block with RepMixer as token mixer.\n\n    For more details on Metaformer structure, please refer to:\n    `MetaFormer Is Actually What You Need for Vision <https://arxiv.org/pdf/2111.11418.pdf>`_\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            kernel_size: int = 3,\n            mlp_ratio: float = 4.0,\n            act_layer: Type[nn.Module] = nn.GELU,\n            proj_drop: float = 0.0,\n            drop_path: float = 0.0,\n            layer_scale_init_value: float = 1e-5,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Build RepMixer Block.\n\n        Args:\n            dim: Number of embedding dimensions.\n            kernel_size: Kernel size for repmixer. Default: 3\n            mlp_ratio: MLP expansion ratio. Default: 4.0\n            act_layer: Activation layer. Default: ``nn.GELU``\n            proj_drop: Dropout rate. Default: 0.0\n            drop_path: Drop path rate. Default: 0.0\n            layer_scale_init_value: Layer scale value at initialization. Default: 1e-5\n            inference_mode: Flag to instantiate block in inference mode. Default: ``False``\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.token_mixer = RepMixer(\n            dim,\n            kernel_size=kernel_size,\n            layer_scale_init_value=layer_scale_init_value,\n            inference_mode=inference_mode,\n            **dd,\n        )\n\n        self.mlp = ConvMlp(\n            in_chs=dim,\n            hidden_channels=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        if layer_scale_init_value is not None:\n            self.layer_scale = LayerScale2d(dim, layer_scale_init_value, **dd)\n        else:\n            self.layer_scale = nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n    def forward(self, x):\n        x = self.token_mixer(x)\n        x = x + self.drop_path(self.layer_scale(self.mlp(x)))\n        return x\n\n\nclass AttentionBlock(nn.Module):\n    \"\"\"Implementation of metaformer block with MHSA as token mixer.\n\n    For more details on Metaformer structure, please refer to:\n    `MetaFormer Is Actually What You Need for Vision <https://arxiv.org/pdf/2111.11418.pdf>`_\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: float = 4.0,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            proj_drop: float = 0.0,\n            drop_path: float = 0.0,\n            layer_scale_init_value: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Build Attention Block.\n\n        Args:\n            dim: Number of embedding dimensions.\n            mlp_ratio: MLP expansion ratio. Default: 4.0\n            act_layer: Activation layer. Default: ``nn.GELU``\n            norm_layer: Normalization layer. Default: ``nn.BatchNorm2d``\n            proj_drop: Dropout rate. Default: 0.0\n            drop_path: Drop path rate. Default: 0.0\n            layer_scale_init_value: Layer scale value at initialization. Default: 1e-5\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.norm = norm_layer(dim, **dd)\n        self.token_mixer = Attention(dim=dim, **dd)\n        if layer_scale_init_value is not None:\n            self.layer_scale_1 = LayerScale2d(dim, layer_scale_init_value, **dd)\n        else:\n            self.layer_scale_1 = nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n        self.mlp = ConvMlp(\n            in_chs=dim,\n            hidden_channels=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        if layer_scale_init_value is not None:\n            self.layer_scale_2 = LayerScale2d(dim, layer_scale_init_value, **dd)\n        else:\n            self.layer_scale_2 = nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n    def forward(self, x):\n        x = x + self.drop_path1(self.layer_scale_1(self.token_mixer(self.norm(x))))\n        x = x + self.drop_path2(self.layer_scale_2(self.mlp(x)))\n        return x\n\n\nclass FastVitStage(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int,\n            token_mixer_type: str,\n            downsample: bool = True,\n            se_downsample: bool = False,\n            down_patch_size: int = 7,\n            down_stride: int = 2,\n            pos_emb_layer: Optional[nn.Module] = None,\n            kernel_size: int = 3,\n            mlp_ratio: float = 4.0,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            proj_drop_rate: float = 0.0,\n            drop_path_rate: Union[List[float], float] = 0.0,\n            layer_scale_init_value: Optional[float] = 1e-5,\n            lkc_use_act: bool = False,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"FastViT stage.\n\n        Args:\n            dim: Number of embedding dimensions.\n            depth: Number of blocks in stage\n            token_mixer_type: Token mixer type.\n            kernel_size: Kernel size for repmixer.\n            mlp_ratio: MLP expansion ratio.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            proj_drop_rate: Dropout rate.\n            drop_path_rate: Drop path rate.\n            layer_scale_init_value: Layer scale value at initialization.\n            inference_mode: Flag to instantiate block in inference mode.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.grad_checkpointing = False\n\n        if downsample:\n            self.downsample = PatchEmbed(\n                patch_size=down_patch_size,\n                stride=down_stride,\n                in_chs=dim,\n                embed_dim=dim_out,\n                use_se=se_downsample,\n                act_layer=act_layer,\n                lkc_use_act=lkc_use_act,\n                inference_mode=inference_mode,\n                **dd,\n            )\n        else:\n            assert dim == dim_out\n            self.downsample = nn.Identity()\n\n        if pos_emb_layer is not None:\n            self.pos_emb = pos_emb_layer(dim_out, inference_mode=inference_mode, **dd)\n        else:\n            self.pos_emb = nn.Identity()\n\n        blocks = []\n        for block_idx in range(depth):\n            if token_mixer_type == \"repmixer\":\n                blocks.append(RepMixerBlock(\n                    dim_out,\n                    kernel_size=kernel_size,\n                    mlp_ratio=mlp_ratio,\n                    act_layer=act_layer,\n                    proj_drop=proj_drop_rate,\n                    drop_path=drop_path_rate[block_idx],\n                    layer_scale_init_value=layer_scale_init_value,\n                    inference_mode=inference_mode,\n                    **dd,\n                ))\n            elif token_mixer_type == \"attention\":\n                blocks.append(AttentionBlock(\n                    dim_out,\n                    mlp_ratio=mlp_ratio,\n                    act_layer=act_layer,\n                    norm_layer=norm_layer,\n                    proj_drop=proj_drop_rate,\n                    drop_path=drop_path_rate[block_idx],\n                    layer_scale_init_value=layer_scale_init_value,\n                    **dd,\n                ))\n            else:\n                raise ValueError(\n                    \"Token mixer type: {} not supported\".format(token_mixer_type)\n                )\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = self.pos_emb(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass FastVit(nn.Module):\n    fork_feat: torch.jit.Final[bool]\n\n    \"\"\"\n    This class implements `FastViT architecture <https://arxiv.org/pdf/2303.14189.pdf>`_\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            layers: Tuple[int, ...] = (2, 2, 6, 2),\n            token_mixers: Tuple[str, ...] = (\"repmixer\", \"repmixer\", \"repmixer\", \"repmixer\"),\n            embed_dims: Tuple[int, ...] = (64, 128, 256, 512),\n            mlp_ratios: Tuple[float, ...] = (4,) * 4,\n            downsamples: Tuple[bool, ...] = (False, True, True, True),\n            se_downsamples: Tuple[bool, ...] = (False, False, False, False),\n            repmixer_kernel_size: int = 3,\n            num_classes: int = 1000,\n            pos_embs: Tuple[Optional[nn.Module], ...] = (None,) * 4,\n            down_patch_size: int = 7,\n            down_stride: int = 2,\n            drop_rate: float = 0.0,\n            proj_drop_rate: float = 0.0,\n            drop_path_rate: float = 0.0,\n            layer_scale_init_value: float = 1e-5,\n            lkc_use_act: bool = False,\n            stem_use_scale_branch: bool = True,\n            fork_feat: bool = False,\n            cls_ratio: float = 2.0,\n            global_pool: str = 'avg',\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.GELU,\n            inference_mode: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = 0 if fork_feat else num_classes\n        self.fork_feat = fork_feat\n        self.global_pool = global_pool\n        self.feature_info = []\n\n        # Convolutional stem\n        self.stem = convolutional_stem(\n            in_chans,\n            embed_dims[0],\n            act_layer,\n            inference_mode,\n            use_scale_branch=stem_use_scale_branch,\n            **dd,\n        )\n\n        # Build the main stages of the network architecture\n        prev_dim = embed_dims[0]\n        scale = 1\n        dpr = calculate_drop_path_rates(drop_path_rate, layers, stagewise=True)\n        stages = []\n        for i in range(len(layers)):\n            downsample = downsamples[i] or prev_dim != embed_dims[i]\n            stage = FastVitStage(\n                dim=prev_dim,\n                dim_out=embed_dims[i],\n                depth=layers[i],\n                downsample=downsample,\n                se_downsample=se_downsamples[i],\n                down_patch_size=down_patch_size,\n                down_stride=down_stride,\n                pos_emb_layer=pos_embs[i],\n                token_mixer_type=token_mixers[i],\n                kernel_size=repmixer_kernel_size,\n                mlp_ratio=mlp_ratios[i],\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                proj_drop_rate=proj_drop_rate,\n                drop_path_rate=dpr[i],\n                layer_scale_init_value=layer_scale_init_value,\n                lkc_use_act=lkc_use_act,\n                inference_mode=inference_mode,\n                **dd,\n            )\n            stages.append(stage)\n            prev_dim = embed_dims[i]\n            if downsample:\n                scale *= 2\n            self.feature_info += [dict(num_chs=prev_dim, reduction=4 * scale, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n        self.num_stages = len(self.stages)\n        self.num_features = self.head_hidden_size = prev_dim\n\n        # For segmentation and detection, extract intermediate output\n        if self.fork_feat:\n            # Add a norm layer for each output. self.stages is slightly different than self.network\n            # in the original code, the PatchEmbed layer is part of self.stages in this code where\n            # it was part of self.network in the original code. So we do not need to skip out indices.\n            self.out_indices = [0, 1, 2, 3]\n            for i_emb, i_layer in enumerate(self.out_indices):\n                if i_emb == 0 and os.environ.get(\"FORK_LAST3\", None):\n                    \"\"\"For RetinaNet, `start_level=1`. The first norm layer will not used.\n                    cmd: `FORK_LAST3=1 python -m torch.distributed.launch ...`\n                    \"\"\"\n                    layer = nn.Identity()\n                else:\n                    layer = norm_layer(embed_dims[i_emb], **dd)\n                layer_name = f\"norm{i_layer}\"\n                self.add_module(layer_name, layer)\n        else:\n            # Classifier head\n            self.num_features = self.head_hidden_size = final_features = int(embed_dims[-1] * cls_ratio)\n            self.final_conv = MobileOneBlock(\n                in_chs=embed_dims[-1],\n                out_chs=final_features,\n                kernel_size=3,\n                stride=1,\n                group_size=1,\n                inference_mode=inference_mode,\n                use_se=True,\n                act_layer=act_layer,\n                num_conv_branches=1,\n                **dd,\n            )\n            self.head = ClassifierHead(\n                final_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m: nn.Module) -> None:\n        \"\"\"Init. for classification\"\"\"\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=0.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+).pos_emb', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        last_idx = self.num_stages - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n        feat_idx = 0\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.final_conv(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        # input embedding\n        x = self.stem(x)\n        outs = []\n        for idx, block in enumerate(self.stages):\n            x = block(x)\n            if self.fork_feat:\n                if idx in self.out_indices:\n                    norm_layer = getattr(self, f\"norm{idx}\")\n                    x_out = norm_layer(x)\n                    outs.append(x_out)\n        if self.fork_feat:\n            # output the features of four stages for dense prediction\n            return outs\n        x = self.final_conv(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        if self.fork_feat:\n            return x\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url=\"\", **kwargs):\n    return {\n        \"url\": url,\n        \"num_classes\": 1000,\n        \"input_size\": (3, 256, 256),\n        \"pool_size\": (8, 8),\n        \"crop_pct\": 0.9,\n        \"interpolation\": \"bicubic\",\n        \"mean\": IMAGENET_DEFAULT_MEAN,\n        \"license\": \"fastvit-license\",\n        \"std\": IMAGENET_DEFAULT_STD,\n        'first_conv': ('stem.0.conv_kxk.0.conv', 'stem.0.conv_scale.conv'),\n        \"classifier\": \"head.fc\",\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    \"fastvit_t8.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"fastvit_t12.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n\n    \"fastvit_s12.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"fastvit_sa12.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"fastvit_sa24.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"fastvit_sa36.apple_in1k\": _cfg(\n        hf_hub_id='timm/'),\n\n    \"fastvit_ma36.apple_in1k\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95),\n\n    \"fastvit_t8.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"fastvit_t12.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/'),\n\n    \"fastvit_s12.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/',),\n    \"fastvit_sa12.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/',),\n    \"fastvit_sa24.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/',),\n    \"fastvit_sa36.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/',),\n\n    \"fastvit_ma36.apple_dist_in1k\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95\n    ),\n\n    \"fastvit_mci0.apple_mclip\": _cfg(\n        hf_hub_id='apple/mobileclip_s0_timm',\n        url='https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s0.pt',\n        crop_pct=0.95,\n        num_classes=512,  # CLIP proj dim\n        mean=(0., 0., 0.), std=(1., 1., 1.), license='apple-amlr'\n    ),\n    \"fastvit_mci1.apple_mclip\": _cfg(\n        hf_hub_id='apple/mobileclip_s1_timm',\n        url='https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s1.pt',\n        crop_pct=0.95,\n        num_classes=512,  # CLIP proj dim\n        mean=(0., 0., 0.), std=(1., 1., 1.), license='apple-amlr'\n    ),\n    \"fastvit_mci2.apple_mclip\": _cfg(\n        hf_hub_id='apple/mobileclip_s2_timm',\n        url='https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s2.pt',\n        crop_pct=0.95,\n        num_classes=512,  # CLIP proj dim\n        mean=(0., 0., 0.), std=(1., 1., 1.), license='apple-amlr'\n    ),\n\n    \"fastvit_mci0.apple_mclip2_dfndr2b\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0,\n        num_classes=512,  # CLIP proj dim\n        mean=(0., 0., 0.), std=(1., 1., 1.),\n        license='apple-amlr'\n    ),\n    \"fastvit_mci2.apple_mclip2_dfndr2b\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95,\n        num_classes=512,  # CLIP proj dim\n        mean=(0., 0., 0.), std=(1., 1., 1.),\n        license='apple-amlr'\n    ),\n    \"fastvit_mci3.apple_mclip2_dfndr2b\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95,\n        num_classes=768,  # CLIP proj dim\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        pool_size=(4, 4),\n        first_conv='stem.0.conv_kxk.0.conv',\n        license='apple-amlr'\n    ),\n    \"fastvit_mci4.apple_mclip2_dfndr2b\": _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95,\n        num_classes=768,  # CLIP proj dim\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        pool_size=(4, 4),\n        first_conv='stem.0.conv_kxk.0.conv',\n        license='apple-amlr'\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap original checkpoints -> timm \"\"\"\n    if 'stem.0.conv_kxk.0.conv.weight' in state_dict:\n        return state_dict  # non-original checkpoint, no remapping needed\n\n    if 'module.visual.trunk.stem.0.conv_kxk.0.conv.weight' in state_dict:\n        return {k.replace('module.visual.trunk.', ''): v for k, v in state_dict.items() if k.startswith('module.visual.trunk')}\n\n    state_dict = state_dict.get('state_dict', state_dict)\n    if 'image_encoder.model.patch_embed.0.rbr_conv.0.conv.weight' in state_dict:\n        # remap MobileCLIP checkpoints\n        prefix = 'image_encoder.model.'\n    else:\n        prefix = ''\n\n    import re\n    import bisect\n\n    # find stage ends by locating downsample layers\n    stage_ends = []\n    for k, v in state_dict.items():\n        match = re.match(r'^(.*?)network\\.(\\d+)\\.proj.*', k)\n        if match:\n            stage_ends.append(int(match.group(2)))\n    stage_ends = list(sorted(set(stage_ends)))\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        if prefix:\n            if prefix not in k:\n                continue\n            k = k.replace(prefix, '')\n\n        # remap renamed layers\n        k = k.replace('patch_embed', 'stem')\n        k = k.replace('rbr_conv', 'conv_kxk')\n        k = k.replace('rbr_scale', 'conv_scale')\n        k = k.replace('rbr_skip', 'identity')\n        k = k.replace('conv_exp', 'final_conv')  # to match byobnet, regnet, nfnet\n        k = k.replace('lkb_origin', 'large_conv')\n        k = k.replace('convffn', 'mlp')\n        k = k.replace('se.reduce', 'se.fc1')\n        k = k.replace('se.expand', 'se.fc2')\n        k = re.sub(r'layer_scale_([0-9])', r'layer_scale_\\1.gamma', k)\n        if k.endswith('layer_scale'):\n            k = k.replace('layer_scale', 'layer_scale.gamma')\n        k = k.replace('dist_head', 'head_dist')\n        if k.startswith('head.'):\n            if k == 'head.proj' and hasattr(model.head, 'fc') and isinstance(model.head.fc, nn.Linear):\n                # if CLIP projection, map to head.fc w/ bias = zeros\n                k = k.replace('head.proj', 'head.fc.weight')\n                v = v.T\n                out_dict['head.fc.bias'] = torch.zeros(v.shape[0])\n            else:\n                k = k.replace('head.', 'head.fc.')\n\n        # remap flat sequential network to stages\n        match = re.match(r'^network\\.(\\d+)', k)\n        stage_idx, net_idx = None, None\n        if match:\n            net_idx = int(match.group(1))\n            stage_idx = bisect.bisect_right(stage_ends, net_idx)\n        if stage_idx is not None:\n            net_prefix = f'network.{net_idx}'\n            stage_prefix = f'stages.{stage_idx}'\n            if net_prefix + '.proj' in k:\n                k = k.replace(net_prefix + '.proj', stage_prefix + '.downsample.proj')\n            elif net_prefix + '.pe' in k:\n                k = k.replace(net_prefix + '.pe', stage_prefix + '.pos_emb.pos_enc')\n            else:\n                k = k.replace(net_prefix, stage_prefix + '.blocks')\n\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_fastvit(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        FastVit,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef fastvit_t8(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-T8 model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 2, 4, 2),\n        embed_dims=(48, 96, 192, 384),\n        mlp_ratios=(3, 3, 3, 3),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"repmixer\")\n    )\n    return _create_fastvit('fastvit_t8', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_t12(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-T12 model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 2, 6, 2),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(3, 3, 3, 3),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"repmixer\"),\n    )\n    return _create_fastvit('fastvit_t12', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_s12(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-S12 model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 2, 6, 2),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(4, 4, 4, 4),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"repmixer\"),\n    )\n    return _create_fastvit('fastvit_s12', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_sa12(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-SA12 model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 2, 6, 2),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(4, 4, 4, 4),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n    )\n    return _create_fastvit('fastvit_sa12', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_sa24(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-SA24 model variant.\"\"\"\n    model_args = dict(\n        layers=(4, 4, 12, 4),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(4, 4, 4, 4),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n    )\n    return _create_fastvit('fastvit_sa24', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_sa36(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-SA36 model variant.\"\"\"\n    model_args = dict(\n        layers=(6, 6, 18, 6),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(4, 4, 4, 4),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n    )\n    return _create_fastvit('fastvit_sa36', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_ma36(pretrained=False, **kwargs):\n    \"\"\"Instantiate FastViT-MA36 model variant.\"\"\"\n    model_args = dict(\n        layers=(6, 6, 18, 6),\n        embed_dims=(76, 152, 304, 608),\n        mlp_ratios=(4, 4, 4, 4),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\")\n    )\n    return _create_fastvit('fastvit_ma36', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_mci0(pretrained=False, **kwargs):\n    \"\"\"Instantiate MCi0 model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 6, 10, 2),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(3, 3, 3, 3),\n        se_downsamples=(False, False, True, True),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n        lkc_use_act=True,\n    )\n    return _create_fastvit('fastvit_mci0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_mci1(pretrained=False, **kwargs):\n    \"\"\"Instantiate MCi1 model variant.\"\"\"\n    model_args = dict(\n        layers=(4, 12, 20, 4),\n        embed_dims=(64, 128, 256, 512),\n        mlp_ratios=(3, 3, 3, 3),\n        se_downsamples=(False, False, True, True),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n        lkc_use_act=True,\n    )\n    return _create_fastvit('fastvit_mci1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_mci2(pretrained=False, **kwargs):\n    \"\"\"Instantiate MCi2 model variant.\"\"\"\n    model_args = dict(\n        layers=(4, 12, 24, 4),\n        embed_dims=(80, 160, 320, 640),\n        mlp_ratios=(3, 3, 3, 3),\n        se_downsamples=(False, False, True, True),\n        pos_embs=(None, None, None, partial(RepConditionalPosEnc, spatial_shape=(7, 7))),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\"),\n        lkc_use_act=True,\n    )\n    return _create_fastvit('fastvit_mci2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef fastvit_mci3(pretrained=False, **kwargs):\n    \"\"\"Instantiate L model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 12, 24, 4, 2),\n        embed_dims=(96, 192, 384, 768, 1536),\n        mlp_ratios=(4, 4, 4, 4, 4),\n        se_downsamples=(False, False, False, False, False),\n        downsamples=(False, True, True, True, True),\n        pos_embs=(\n            None,\n            None,\n            None,\n            partial(RepConditionalPosEnc, spatial_shape=(7, 7)),\n            partial(RepConditionalPosEnc, spatial_shape=(7, 7))\n        ),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\", \"attention\"),\n        lkc_use_act=True,\n        norm_layer=partial(LayerNorm2d, eps=1e-5),\n        stem_use_scale_branch=False,\n    )\n    model = _create_fastvit('fastvit_mci3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef fastvit_mci4(pretrained=False, **kwargs):\n    \"\"\"Instantiate XL model variant.\"\"\"\n    model_args = dict(\n        layers=(2, 12, 24, 4, 4),\n        embed_dims=(128, 256, 512, 1024, 2048),\n        mlp_ratios=(4, 4, 4, 4, 4),\n        se_downsamples=(False, False, False, False, False),\n        downsamples=(False, True, True, True, True),\n        pos_embs=(\n            None,\n            None,\n            None,\n            partial(RepConditionalPosEnc, spatial_shape=(7, 7)),\n            partial(RepConditionalPosEnc, spatial_shape=(7, 7))\n        ),\n        token_mixers=(\"repmixer\", \"repmixer\", \"repmixer\", \"attention\", \"attention\"),\n        lkc_use_act=True,\n        norm_layer=partial(LayerNorm2d, eps=1e-5),\n        stem_use_scale_branch=False,\n    )\n\n    model = _create_fastvit('fastvit_mci4', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/features.py",
    "content": "from ._features import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/focalnet.py",
    "content": "\"\"\" FocalNet\n\nAs described in `Focal Modulation Networks` - https://arxiv.org/abs/2203.11926\n\nSignificant modifications and refactoring from the original impl at https://github.com/microsoft/FocalNet\n\nThis impl is/has:\n* fully convolutional, NCHW tensor layout throughout, seemed to have minimal performance impact but more flexible\n* re-ordered downsample / layer so that striding always at beginning of layer (stage)\n* no input size constraints or input resolution/H/W tracking through the model\n* torchscript fixed and a number of quirks cleaned up\n* feature extraction support via `features_only=True`\n\"\"\"\n# --------------------------------------------------------\n# FocalNets -- Focal Modulation Networks\n# Copyright (c) 2022 Microsoft\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Jianwei Yang (jianwyan@microsoft.com)\n# --------------------------------------------------------\nfrom functools import partial\nfrom typing import Callable, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    Mlp,\n    DropPath,\n    LayerNorm2d,\n    LayerScale2d,\n    trunc_normal_,\n    ClassifierHead,\n    NormMlpClassifierHead,\n    calculate_drop_path_rates,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['FocalNet']\n\n\nclass FocalModulation(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            focal_window: int,\n            focal_level: int,\n            focal_factor: int = 2,\n            bias: bool = True,\n            use_post_norm: bool = False,\n            normalize_modulator: bool = False,\n            proj_drop: float = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.dim = dim\n        self.focal_window = focal_window\n        self.focal_level = focal_level\n        self.focal_factor = focal_factor\n        self.use_post_norm = use_post_norm\n        self.normalize_modulator = normalize_modulator\n        self.input_split = [dim, dim, self.focal_level + 1]\n\n        self.f = nn.Conv2d(dim, 2 * dim + (self.focal_level + 1), kernel_size=1, bias=bias, **dd)\n        self.h = nn.Conv2d(dim, dim, kernel_size=1, bias=bias, **dd)\n\n        self.act = nn.GELU()\n        self.proj = nn.Conv2d(dim, dim, kernel_size=1, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.focal_layers = nn.ModuleList()\n\n        self.kernel_sizes = []\n        for k in range(self.focal_level):\n            kernel_size = self.focal_factor * k + self.focal_window\n            self.focal_layers.append(nn.Sequential(\n                nn.Conv2d(dim, dim, kernel_size=kernel_size, groups=dim, padding=kernel_size // 2, bias=False, **dd),\n                nn.GELU(),\n            ))\n            self.kernel_sizes.append(kernel_size)\n        self.norm = norm_layer(dim, **dd) if self.use_post_norm else nn.Identity()\n\n    def forward(self, x):\n        # pre linear projection\n        x = self.f(x)\n        q, ctx, gates = torch.split(x, self.input_split, 1)\n\n        # context aggregation\n        ctx_all = 0\n        for l, focal_layer in enumerate(self.focal_layers):\n            ctx = focal_layer(ctx)\n            ctx_all = ctx_all + ctx * gates[:, l:l + 1]\n        ctx_global = self.act(ctx.mean((2, 3), keepdim=True))\n        ctx_all = ctx_all + ctx_global * gates[:, self.focal_level:]\n\n        # normalize context\n        if self.normalize_modulator:\n            ctx_all = ctx_all / (self.focal_level + 1)\n\n        # focal modulation\n        x_out = q * self.h(ctx_all)\n        x_out = self.norm(x_out)\n\n        # post linear projection\n        x_out = self.proj(x_out)\n        x_out = self.proj_drop(x_out)\n        return x_out\n\n\nclass FocalNetBlock(nn.Module):\n    \"\"\" Focal Modulation Network Block.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: float = 4.,\n            focal_level: int = 1,\n            focal_window: int = 3,\n            use_post_norm: bool = False,\n            use_post_norm_in_modulation: bool = False,\n            normalize_modulator: bool = False,\n            layerscale_value: Optional[float] = 1e-4,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            focal_level: Number of focal levels.\n            focal_window: Focal window size at first focal level.\n            use_post_norm: Whether to use layer norm after modulation.\n            use_post_norm_in_modulation: Whether to use layer norm in modulation.\n            layerscale_value: Initial layerscale value.\n            proj_drop: Dropout rate.\n            drop_path: Stochastic depth rate.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.mlp_ratio = mlp_ratio\n\n        self.focal_window = focal_window\n        self.focal_level = focal_level\n        self.use_post_norm = use_post_norm\n\n        self.norm1 = norm_layer(dim, **dd) if not use_post_norm else nn.Identity()\n        self.modulation = FocalModulation(\n            dim,\n            focal_window=focal_window,\n            focal_level=self.focal_level,\n            use_post_norm=use_post_norm_in_modulation,\n            normalize_modulator=normalize_modulator,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.norm1_post = norm_layer(dim, **dd) if use_post_norm else nn.Identity()\n        self.ls1 = LayerScale2d(dim, layerscale_value, **dd) if layerscale_value is not None else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd) if not use_post_norm else nn.Identity()\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            use_conv=True,\n            **dd,\n        )\n        self.norm2_post = norm_layer(dim, **dd) if use_post_norm else nn.Identity()\n        self.ls2 = LayerScale2d(dim, layerscale_value, **dd) if layerscale_value is not None else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x\n\n        # Focal Modulation\n        x = self.norm1(x)\n        x = self.modulation(x)\n        x = self.norm1_post(x)\n        x = shortcut + self.drop_path1(self.ls1(x))\n\n        # FFN\n        x = x + self.drop_path2(self.ls2(self.norm2_post(self.mlp(self.norm2(x)))))\n\n        return x\n\n\nclass FocalNetStage(nn.Module):\n    \"\"\" A basic Focal Transformer layer for one stage.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            depth: int,\n            mlp_ratio: float = 4.,\n            downsample: bool = True,\n            focal_level: int = 1,\n            focal_window: int = 1,\n            use_overlap_down: bool = False,\n            use_post_norm: bool = False,\n            use_post_norm_in_modulation: bool = False,\n            normalize_modulator: bool = False,\n            layerscale_value: Optional[float] = 1e-4,\n            proj_drop: float = 0.,\n            drop_path: Union[float, List[float]] = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            out_dim: Number of output channels.\n            depth: Number of blocks.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            downsample: Downsample layer at start of the layer.\n            focal_level: Number of focal levels\n            focal_window: Focal window size at first focal level\n            use_overlap_down: User overlapped convolution in downsample layer.\n            use_post_norm: Whether to use layer norm after modulation.\n            use_post_norm_in_modulation: Whether to use layer norm in modulation.\n            layerscale_value: Initial layerscale value\n            proj_drop: Dropout rate for projections.\n            drop_path: Stochastic depth rate.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.depth = depth\n        self.grad_checkpointing = False\n\n        if downsample:\n            self.downsample = Downsample(\n                in_chs=dim,\n                out_chs=out_dim,\n                stride=2,\n                overlap=use_overlap_down,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            self.downsample = nn.Identity()\n\n        # build blocks\n        self.blocks = nn.ModuleList([\n            FocalNetBlock(\n                dim=out_dim,\n                mlp_ratio=mlp_ratio,\n                focal_level=focal_level,\n                focal_window=focal_window,\n                use_post_norm=use_post_norm,\n                use_post_norm_in_modulation=use_post_norm_in_modulation,\n                normalize_modulator=normalize_modulator,\n                layerscale_value=layerscale_value,\n                proj_drop=proj_drop,\n                drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            for i in range(depth)])\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    def forward(self, x):\n        x = self.downsample(x)\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n        return x\n\n\nclass Downsample(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 4,\n            overlap: bool = False,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n\n        Args:\n            in_chs: Number of input image channels.\n            out_chs: Number of linear projection output channels.\n            stride: Downsample stride.\n            overlap: Use overlapping convolutions if True.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = stride\n        padding = 0\n        kernel_size = stride\n        if overlap:\n            assert stride in (2, 4)\n            if stride == 4:\n                kernel_size, padding = 7, 2\n            elif stride == 2:\n                kernel_size, padding = 3, 1\n        self.proj = nn.Conv2d(in_chs, out_chs, kernel_size=kernel_size, stride=stride, padding=padding, **dd)\n        self.norm = norm_layer(out_chs, **dd) if norm_layer is not None else nn.Identity()\n\n    def forward(self, x):\n        x = self.proj(x)\n        x = self.norm(x)\n        return x\n\n\nclass FocalNet(nn.Module):\n    \"\"\"\" Focal Modulation Networks (FocalNets)\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 96,\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            mlp_ratio: float = 4.,\n            focal_levels: Tuple[int, ...] = (2, 2, 2, 2),\n            focal_windows: Tuple[int, ...] = (3, 3, 3, 3),\n            use_overlap_down: bool = False,\n            use_post_norm: bool = False,\n            use_post_norm_in_modulation: bool = False,\n            normalize_modulator: bool = False,\n            head_hidden_size: Optional[int] = None,\n            head_init_scale: float = 1.0,\n            layerscale_value: Optional[float] = None,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            drop_path_rate: float = 0.1,\n            norm_layer: Type[nn.Module] = partial(LayerNorm2d, eps=1e-5),\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            embed_dim: Patch embedding dimension.\n            depths: Depth of each Focal Transformer layer.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            focal_levels: How many focal levels at all stages. Note that this excludes the finest-grain level.\n            focal_windows: The focal window size at all stages.\n            use_overlap_down: Whether to use convolutional embedding.\n            use_post_norm: Whether to use layernorm after modulation (it helps stabilize training of large models)\n            layerscale_value: Value for layer scale.\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            norm_layer: Normalization layer.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_layers = len(depths)\n        embed_dim = [embed_dim * (2 ** i) for i in range(self.num_layers)]\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.embed_dim = embed_dim\n        self.num_features = self.head_hidden_size = embed_dim[-1]\n        self.feature_info = []\n\n        self.stem = Downsample(\n            in_chs=in_chans,\n            out_chs=embed_dim[0],\n            overlap=use_overlap_down,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        in_dim = embed_dim[0]\n\n        dpr = calculate_drop_path_rates(drop_path_rate, sum(depths))  # stochastic depth decay rule\n        layers = []\n        for i_layer in range(self.num_layers):\n            out_dim = embed_dim[i_layer]\n            layer = FocalNetStage(\n                dim=in_dim,\n                out_dim=out_dim,\n                depth=depths[i_layer],\n                mlp_ratio=mlp_ratio,\n                downsample=i_layer > 0,\n                focal_level=focal_levels[i_layer],\n                focal_window=focal_windows[i_layer],\n                use_overlap_down=use_overlap_down,\n                use_post_norm=use_post_norm,\n                use_post_norm_in_modulation=use_post_norm_in_modulation,\n                normalize_modulator=normalize_modulator,\n                layerscale_value=layerscale_value,\n                proj_drop=proj_drop_rate,\n                drop_path=dpr[sum(depths[:i_layer]):sum(depths[:i_layer + 1])],\n                norm_layer=norm_layer,\n                **dd,\n            )\n            in_dim = out_dim\n            layers += [layer]\n            self.feature_info += [dict(num_chs=out_dim, reduction=4 * 2 ** i_layer, module=f'layers.{i_layer}')]\n\n        self.layers = nn.Sequential(*layers)\n\n        if head_hidden_size:\n            self.norm = nn.Identity()\n            self.head_hidden_size = head_hidden_size\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                hidden_size=head_hidden_size,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            self.norm = norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n\n        named_apply(partial(_init_weights, head_init_scale=head_init_scale), self)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {''}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=[\n                (r'^layers\\.(\\d+)', None),\n                (r'^norm', (99999,))\n            ] if coarse else [\n                (r'^layers\\.(\\d+).downsample', (0,)),\n                (r'^layers\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n        for l in self.layers:\n            l.set_grad_checkpointing(enable=enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.layers\n        else:\n            stages = self.layers[:max_index + 1]\n\n        last_idx = len(self.layers) - 1\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x)  # applying final norm to last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n        self.layers = self.layers[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.layers(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module, name=None, head_init_scale=1.0):\n    if isinstance(module, nn.Conv2d):\n        trunc_normal_(module.weight, std=.02)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Linear):\n        trunc_normal_(module.weight, std=.02)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n        if name and 'head.fc' in name:\n            module.weight.data.mul_(head_init_scale)\n            module.bias.data.mul_(head_init_scale)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': .9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.proj', 'classifier': 'head.fc',\n        'license': 'mit', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    \"focalnet_tiny_srf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"focalnet_small_srf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"focalnet_base_srf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"focalnet_tiny_lrf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"focalnet_small_lrf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n    \"focalnet_base_lrf.ms_in1k\": _cfg(\n        hf_hub_id='timm/'),\n\n    \"focalnet_large_fl3.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21842),\n    \"focalnet_large_fl4.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21842),\n    \"focalnet_xlarge_fl3.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21842),\n    \"focalnet_xlarge_fl4.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21842),\n    \"focalnet_huge_fl3.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        num_classes=21842),\n    \"focalnet_huge_fl4.ms_in22k\": _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n})\n\n\ndef checkpoint_filter_fn(state_dict, model: FocalNet):\n    state_dict = state_dict.get('model', state_dict)\n    if 'stem.proj.weight' in state_dict:\n        return state_dict\n    import re\n    out_dict = {}\n    dest_dict = model.state_dict()\n    for k, v in state_dict.items():\n        k = re.sub(r'gamma_([0-9])', r'ls\\1.gamma', k)\n        k = k.replace('patch_embed', 'stem')\n        k = re.sub(r'layers.(\\d+).downsample', lambda x: f'layers.{int(x.group(1)) + 1}.downsample', k)\n        if 'norm' in k and k not in dest_dict:\n            k = re.sub(r'norm([0-9])', r'norm\\1_post', k)\n        k = k.replace('ln.', 'norm.')\n        k = k.replace('head', 'head.fc')\n        if k in dest_dict and dest_dict[k].numel() == v.numel() and dest_dict[k].shape != v.shape:\n            v = v.reshape(dest_dict[k].shape)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_focalnet(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        FocalNet, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n    return model\n\n\n@register_model\ndef focalnet_tiny_srf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 6, 2], embed_dim=96, **kwargs)\n    return _create_focalnet('focalnet_tiny_srf', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_small_srf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 18, 2], embed_dim=96, **kwargs)\n    return _create_focalnet('focalnet_small_srf', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_base_srf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 18, 2], embed_dim=128, **kwargs)\n    return _create_focalnet('focalnet_base_srf', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_tiny_lrf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 6, 2], embed_dim=96, focal_levels=[3, 3, 3, 3], **kwargs)\n    return _create_focalnet('focalnet_tiny_lrf', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_small_lrf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 18, 2], embed_dim=96, focal_levels=[3, 3, 3, 3], **kwargs)\n    return _create_focalnet('focalnet_small_lrf', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_base_lrf(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(depths=[2, 2, 18, 2], embed_dim=128, focal_levels=[3, 3, 3, 3], **kwargs)\n    return _create_focalnet('focalnet_base_lrf', pretrained=pretrained, **model_kwargs)\n\n\n# FocalNet large+ models\n@register_model\ndef focalnet_large_fl3(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=192, focal_levels=[3, 3, 3, 3], focal_windows=[5] * 4,\n        use_post_norm=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_large_fl3', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_large_fl4(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=192, focal_levels=[4, 4, 4, 4],\n        use_post_norm=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_large_fl4', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_xlarge_fl3(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=256, focal_levels=[3, 3, 3, 3], focal_windows=[5] * 4,\n        use_post_norm=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_xlarge_fl3', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_xlarge_fl4(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=256, focal_levels=[4, 4, 4, 4],\n        use_post_norm=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_xlarge_fl4', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_huge_fl3(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=352, focal_levels=[3, 3, 3, 3], focal_windows=[3] * 4,\n        use_post_norm=True, use_post_norm_in_modulation=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_huge_fl3', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef focalnet_huge_fl4(pretrained=False, **kwargs) -> FocalNet:\n    model_kwargs = dict(\n        depths=[2, 2, 18, 2], embed_dim=352, focal_levels=[4, 4, 4, 4],\n        use_post_norm=True, use_post_norm_in_modulation=True, use_overlap_down=True, layerscale_value=1e-4, **kwargs)\n    return _create_focalnet('focalnet_huge_fl4', pretrained=pretrained, **model_kwargs)\n\n"
  },
  {
    "path": "timm/models/fx_features.py",
    "content": "from ._features_fx import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/gcvit.py",
    "content": "\"\"\" Global Context ViT\n\nFrom scratch implementation of GCViT in the style of timm swin_transformer_v2_cr.py\n\nGlobal Context Vision Transformers -https://arxiv.org/abs/2206.09959\n\n@article{hatamizadeh2022global,\n  title={Global Context Vision Transformers},\n  author={Hatamizadeh, Ali and Yin, Hongxu and Kautz, Jan and Molchanov, Pavlo},\n  journal={arXiv preprint arXiv:2206.09959},\n  year={2022}\n}\n\nFree of any code related to NVIDIA GCVit impl at https://github.com/NVlabs/GCVit.\nThe license for this code release is Apache 2.0 with no commercial restrictions.\n\nHowever, weight files adapted from NVIDIA GCVit impl ARE under a non-commercial share-alike license\n(https://creativecommons.org/licenses/by-nc-sa/4.0/) until I have a chance to train new ones...\n\nHacked together by / Copyright 2022, Ross Wightman\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import Callable, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    DropPath,\n    calculate_drop_path_rates,\n    to_2tuple,\n    to_ntuple,\n    Mlp,\n    ClassifierHead,\n    LayerNorm2d,\n    LayerScale,\n    get_attn,\n    get_act_layer,\n    get_norm_layer,\n    RelPosBias,\n    _assert,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import named_apply, checkpoint\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['GlobalContextVit']\n\n\nclass MbConvBlock(nn.Module):\n    \"\"\" A depthwise separable / fused mbconv style residual block with SE, `no norm.\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            expand_ratio: float = 1.0,\n            attn_layer: str = 'se',\n            bias: bool = False,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        attn_kwargs = dict(act_layer=act_layer, **dd)\n        if isinstance(attn_layer, str) and attn_layer == 'se' or attn_layer == 'eca':\n            attn_kwargs['rd_ratio'] = 0.25\n            attn_kwargs['bias'] = False\n        attn_layer = get_attn(attn_layer)\n        out_chs = out_chs or in_chs\n        mid_chs = int(expand_ratio * in_chs)\n\n        self.conv_dw = nn.Conv2d(in_chs, mid_chs, 3, 1, 1, groups=in_chs, bias=bias, **dd)\n        self.act = act_layer()\n        self.se = attn_layer(mid_chs, **attn_kwargs)\n        self.conv_pw = nn.Conv2d(mid_chs, out_chs, 1, 1, 0, bias=bias, **dd)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv_dw(x)\n        x = self.act(x)\n        x = self.se(x)\n        x = self.conv_pw(x)\n        x = x + shortcut\n        return x\n\n\nclass Downsample2d(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            reduction: str = 'conv',\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm2d,  # NOTE in NCHW\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n\n        self.norm1 = norm_layer(dim, **dd) if norm_layer is not None else nn.Identity()\n        self.conv_block = MbConvBlock(dim, act_layer=act_layer, **dd)\n        assert reduction in ('conv', 'max', 'avg')\n        if reduction == 'conv':\n            self.reduction = nn.Conv2d(dim, dim_out, 3, 2, 1, bias=False, **dd)\n        elif reduction == 'max':\n            assert dim == dim_out\n            self.reduction = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n        else:\n            assert dim == dim_out\n            self.reduction = nn.AvgPool2d(kernel_size=2)\n        self.norm2 = norm_layer(dim_out, **dd) if norm_layer is not None else nn.Identity()\n\n    def forward(self, x):\n        x = self.norm1(x)\n        x = self.conv_block(x)\n        x = self.reduction(x)\n        x = self.norm2(x)\n        return x\n\n\nclass FeatureBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            levels: int = 0,\n            reduction: str = 'max',\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        reductions = levels\n        levels = max(1, levels)\n        if reduction == 'avg':\n            pool_fn = partial(nn.AvgPool2d, kernel_size=2)\n        else:\n            pool_fn = partial(nn.MaxPool2d, kernel_size=3, stride=2, padding=1)\n        self.blocks = nn.Sequential()\n        for i in range(levels):\n            self.blocks.add_module(f'conv{i+1}', MbConvBlock(dim, act_layer=act_layer, **dd))\n            if reductions:\n                self.blocks.add_module(f'pool{i+1}', pool_fn())\n                reductions -= 1\n\n    def forward(self, x):\n        return self.blocks(x)\n\n\nclass Stem(nn.Module):\n    def __init__(\n            self,\n            in_chs: int = 3,\n            out_chs: int = 96,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm2d,  # NOTE stem in NCHW\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.conv1 = nn.Conv2d(in_chs, out_chs, kernel_size=3, stride=2, padding=1, **dd)\n        self.down = Downsample2d(out_chs, act_layer=act_layer, norm_layer=norm_layer, **dd)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.down(x)\n        return x\n\n\nclass WindowAttentionGlobal(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            window_size: Tuple[int, int],\n            use_global: bool = True,\n            qkv_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        window_size = to_2tuple(window_size)\n        self.window_size = window_size\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.use_global = use_global\n\n        self.rel_pos = RelPosBias(window_size=window_size, num_heads=num_heads, **dd)\n        if self.use_global:\n            self.qkv = nn.Linear(dim, dim * 2, bias=qkv_bias, **dd)\n        else:\n            self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x, q_global: Optional[torch.Tensor] = None):\n        B, N, C = x.shape\n        if self.use_global and q_global is not None:\n            _assert(x.shape[-1] == q_global.shape[-1], 'x and q_global seq lengths should be equal')\n\n            kv = self.qkv(x)\n            kv = kv.reshape(B, N, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            k, v = kv.unbind(0)\n\n            q = q_global.repeat(B // q_global.shape[0], 1, 1, 1)\n            q = q.reshape(B, N, self.num_heads, self.head_dim).permute(0, 2, 1, 3)\n        else:\n            qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            q, k, v = qkv.unbind(0)\n        q = q * self.scale\n\n        attn = q @ k.transpose(-2, -1).contiguous()  # NOTE contiguous() fixes an odd jit bug in PyTorch 2.0\n        attn = self.rel_pos(attn)\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\ndef window_partition(x, window_size: Tuple[int, int]):\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(windows, window_size: Tuple[int, int], img_size: Tuple[int, int]):\n    H, W = img_size\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\nclass GlobalContextVitBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            feat_size: Tuple[int, int],\n            num_heads: int,\n            window_size: int = 7,\n            mlp_ratio: float = 4.,\n            use_global: bool = True,\n            qkv_bias: bool = True,\n            layer_scale: Optional[float] = None,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            attn_layer: Callable = WindowAttentionGlobal,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        feat_size = to_2tuple(feat_size)\n        window_size = to_2tuple(window_size)\n        self.window_size = window_size\n        self.num_windows = int((feat_size[0] // window_size[0]) * (feat_size[1] // window_size[1]))\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = attn_layer(\n            dim,\n            num_heads=num_heads,\n            window_size=window_size,\n            use_global=use_global,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, layer_scale, **dd) if layer_scale is not None else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, drop=proj_drop, **dd)\n        self.ls2 = LayerScale(dim, layer_scale, **dd) if layer_scale is not None else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def _window_attn(self, x, q_global: Optional[torch.Tensor] = None):\n        B, H, W, C = x.shape\n        x_win = window_partition(x, self.window_size)\n        x_win = x_win.view(-1, self.window_size[0] * self.window_size[1], C)\n        attn_win = self.attn(x_win, q_global)\n        x = window_reverse(attn_win, self.window_size, (H, W))\n        return x\n\n    def forward(self, x, q_global: Optional[torch.Tensor] = None):\n        x = x + self.drop_path1(self.ls1(self._window_attn(self.norm1(x), q_global)))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass GlobalContextVitStage(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            depth: int,\n            num_heads: int,\n            feat_size: Tuple[int, int],\n            window_size: Tuple[int, int],\n            downsample: bool = True,\n            global_norm: bool = False,\n            stage_norm: bool = False,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            layer_scale: Optional[float] = None,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Union[List[float], float] = 0.0,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            norm_layer_cl: Type[nn.Module] = LayerNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if downsample:\n            self.downsample = Downsample2d(\n                dim=dim,\n                dim_out=dim * 2,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            dim = dim * 2\n            feat_size = (feat_size[0] // 2, feat_size[1] // 2)\n        else:\n            self.downsample = nn.Identity()\n        self.feat_size = feat_size\n        window_size = to_2tuple(window_size)\n\n        feat_levels = int(math.log2(min(feat_size) / min(window_size)))\n        self.global_block = FeatureBlock(dim, feat_levels, **dd)\n        self.global_norm = norm_layer_cl(dim, **dd) if global_norm else nn.Identity()\n\n        self.blocks = nn.ModuleList([\n            GlobalContextVitBlock(\n                dim=dim,\n                num_heads=num_heads,\n                feat_size=feat_size,\n                window_size=window_size,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                use_global=(i % 2 != 0),\n                layer_scale=layer_scale,\n                proj_drop=proj_drop,\n                attn_drop=attn_drop,\n                drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n                act_layer=act_layer,\n                norm_layer=norm_layer_cl,\n                **dd,\n            )\n            for i in range(depth)\n        ])\n        self.norm = norm_layer_cl(dim, **dd) if stage_norm else nn.Identity()\n        self.dim = dim\n        self.feat_size = feat_size\n        self.grad_checkpointing = False\n\n    def forward(self, x):\n        # input NCHW, downsample & global block are 2d conv + pooling\n        x = self.downsample(x)\n        global_query = self.global_block(x)\n\n        # reshape NCHW --> NHWC for transformer blocks\n        x = x.permute(0, 2, 3, 1)\n        global_query = self.global_norm(global_query.permute(0, 2, 3, 1))\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, global_query)\n            else:\n                x = blk(x, global_query)\n        x = self.norm(x)\n        x = x.permute(0, 3, 1, 2).contiguous()  # back to NCHW\n        return x\n\n\nclass GlobalContextVit(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            img_size: Union[int, Tuple[int, int]] = 224,\n            window_ratio: Tuple[int, ...] = (32, 32, 16, 32),\n            window_size: Optional[Union[int, Tuple[int, ...]]] = None,\n            embed_dim: int = 64,\n            depths: Tuple[int, ...] = (3, 4, 19, 5),\n            num_heads: Tuple[int, ...] = (2, 4, 8, 16),\n            mlp_ratio: float = 3.0,\n            qkv_bias: bool = True,\n            layer_scale: Optional[float] = None,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            weight_init: str = '',\n            act_layer: str = 'gelu',\n            norm_layer: str = 'layernorm2d',\n            norm_layer_cl: str = 'layernorm',\n            norm_eps: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = get_act_layer(act_layer)\n        norm_layer = partial(get_norm_layer(norm_layer), eps=norm_eps)\n        norm_layer_cl = partial(get_norm_layer(norm_layer_cl), eps=norm_eps)\n        self.feature_info = []\n        img_size = to_2tuple(img_size)\n        feat_size = tuple(d // 4 for d in img_size)  # stem reduction by 4\n        self.global_pool = global_pool\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        num_stages = len(depths)\n        self.num_features = self.head_hidden_size = int(embed_dim * 2 ** (num_stages - 1))\n        if window_size is not None:\n            window_size = to_ntuple(num_stages)(window_size)\n        else:\n            assert window_ratio is not None\n            window_size = tuple([(img_size[0] // r, img_size[1] // r) for r in to_ntuple(num_stages)(window_ratio)])\n\n        self.stem = Stem(\n            in_chs=in_chans,\n            out_chs=embed_dim,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **dd,\n        )\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        stages = []\n        for i in range(num_stages):\n            last_stage = i == num_stages - 1\n            stage_scale = 2 ** max(i - 1, 0)\n            stages.append(GlobalContextVitStage(\n                dim=embed_dim * stage_scale,\n                depth=depths[i],\n                num_heads=num_heads[i],\n                feat_size=(feat_size[0] // stage_scale, feat_size[1] // stage_scale),\n                window_size=window_size[i],\n                downsample=i != 0,\n                stage_norm=last_stage,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                layer_scale=layer_scale,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                norm_layer_cl=norm_layer_cl,\n                **dd,\n            ))\n            self.feature_info += [dict(num_chs=stages[-1].dim, reduction=2**(i+2), module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate, **dd)\n\n        if weight_init:\n            named_apply(partial(self._init_weights, scheme=weight_init), self)\n\n    def _init_weights(self, module, name, scheme='vit'):\n        # note Conv2d left as default init\n        if scheme == 'vit':\n            if isinstance(module, nn.Linear):\n                nn.init.xavier_uniform_(module.weight)\n                if module.bias is not None:\n                    if 'mlp' in name:\n                        nn.init.normal_(module.bias, std=1e-6)\n                    else:\n                        nn.init.zeros_(module.bias)\n        else:\n            if isinstance(module, nn.Linear):\n                nn.init.normal_(module.weight, std=.02)\n                if module.bias is not None:\n                    nn.init.zeros_(module.bias)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {\n            k for k, _ in self.named_parameters()\n            if any(n in k for n in [\"relative_position_bias_table\", \"rel_pos.mlp\"])}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^stem',  # stem and embed\n            blocks=r'^stages\\.(\\d+)'\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        if global_pool is None:\n            global_pool = self.head.global_pool.pool_type\n        self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate, **dd)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.stem(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_gcvit(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        GlobalContextVit, variant, pretrained,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1', 'classifier': 'head.fc',\n        'fixed_input_size': True,\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'gcvit_xxtiny.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-morevit/gcvit_xxtiny_224_nvidia-d1d86009.pth'),\n    'gcvit_xtiny.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-morevit/gcvit_xtiny_224_nvidia-274b92b7.pth'),\n    'gcvit_tiny.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-morevit/gcvit_tiny_224_nvidia-ac783954.pth'),\n    'gcvit_small.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-morevit/gcvit_small_224_nvidia-4e98afa2.pth'),\n    'gcvit_base.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-morevit/gcvit_base_224_nvidia-f009139b.pth'),\n})\n\n\n@register_model\ndef gcvit_xxtiny(pretrained=False, **kwargs) -> GlobalContextVit:\n    model_kwargs = dict(\n        depths=(2, 2, 6, 2),\n        num_heads=(2, 4, 8, 16),\n        **kwargs)\n    return _create_gcvit('gcvit_xxtiny', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef gcvit_xtiny(pretrained=False, **kwargs) -> GlobalContextVit:\n    model_kwargs = dict(\n        depths=(3, 4, 6, 5),\n        num_heads=(2, 4, 8, 16),\n        **kwargs)\n    return _create_gcvit('gcvit_xtiny', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef gcvit_tiny(pretrained=False, **kwargs) -> GlobalContextVit:\n    model_kwargs = dict(\n        depths=(3, 4, 19, 5),\n        num_heads=(2, 4, 8, 16),\n        **kwargs)\n    return _create_gcvit('gcvit_tiny', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef gcvit_small(pretrained=False, **kwargs) -> GlobalContextVit:\n    model_kwargs = dict(\n        depths=(3, 4, 19, 5),\n        num_heads=(3, 6, 12, 24),\n        embed_dim=96,\n        mlp_ratio=2,\n        layer_scale=1e-5,\n        **kwargs)\n    return _create_gcvit('gcvit_small', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef gcvit_base(pretrained=False, **kwargs) -> GlobalContextVit:\n    model_kwargs = dict(\n        depths=(3, 4, 19, 5),\n        num_heads=(4, 8, 16, 32),\n        embed_dim=128,\n        mlp_ratio=2,\n        layer_scale=1e-5,\n        **kwargs)\n    return _create_gcvit('gcvit_base', pretrained=pretrained, **model_kwargs)\n"
  },
  {
    "path": "timm/models/ghostnet.py",
    "content": "\"\"\"\nAn implementation of GhostNet & GhostNetV2 Models as defined in:\nGhostNet: More Features from Cheap Operations. https://arxiv.org/abs/1911.11907\nGhostNetV2: Enhance Cheap Operation with Long-Range Attention. https://proceedings.neurips.cc/paper_files/paper/2022/file/40b60852a4abdaa696b5a1a78da34635-Paper-Conference.pdf\nGhostNetV3: Exploring the Training Strategies for Compact Models. https://arxiv.org/abs/2404.11202\n\nThe train script & code of models at:\nOriginal model: https://github.com/huawei-noah/CV-backbones/tree/master/ghostnet_pytorch\nOriginal model: https://github.com/huawei-noah/Efficient-AI-Backbones/blob/master/ghostnetv2_pytorch/model/ghostnetv2_torch.py\nOriginal model: https://github.com/huawei-noah/Efficient-AI-Backbones/blob/master/ghostnetv3_pytorch/ghostnetv3.py\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import Any, Dict, List, Set, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d, Linear, make_divisible\nfrom timm.utils.model import reparameterize_model\n\nfrom ._builder import build_model_with_cfg\nfrom ._efficientnet_blocks import SqueezeExcite, ConvBnAct\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['GhostNet']\n\n\n_SE_LAYER = partial(SqueezeExcite, gate_layer='hard_sigmoid', rd_round_fn=partial(make_divisible, divisor=4))\n\n\nclass GhostModule(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 1,\n            ratio: int = 2,\n            dw_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.out_chs = out_chs\n        init_chs = math.ceil(out_chs / ratio)\n        new_chs = init_chs * (ratio - 1)\n\n        self.primary_conv = nn.Sequential(\n            nn.Conv2d(in_chs, init_chs, kernel_size, stride, kernel_size // 2, bias=False, **dd),\n            nn.BatchNorm2d(init_chs, **dd),\n            act_layer(inplace=True),\n        )\n\n        self.cheap_operation = nn.Sequential(\n            nn.Conv2d(init_chs, new_chs, dw_size, 1, dw_size//2, groups=init_chs, bias=False, **dd),\n            nn.BatchNorm2d(new_chs, **dd),\n            act_layer(inplace=True),\n        )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x1 = self.primary_conv(x)\n        x2 = self.cheap_operation(x1)\n        out = torch.cat([x1, x2], dim=1)\n        return out[:, :self.out_chs, :, :]\n\n\nclass GhostModuleV2(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 1,\n            ratio: int = 2,\n            dw_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.gate_fn = nn.Sigmoid()\n        self.out_chs = out_chs\n        init_chs = math.ceil(out_chs / ratio)\n        new_chs = init_chs * (ratio - 1)\n        self.primary_conv = nn.Sequential(\n            nn.Conv2d(in_chs, init_chs, kernel_size, stride, kernel_size // 2, bias=False, **dd),\n            nn.BatchNorm2d(init_chs, **dd),\n            act_layer(inplace=True),\n        )\n        self.cheap_operation = nn.Sequential(\n            nn.Conv2d(init_chs, new_chs, dw_size, 1, dw_size // 2, groups=init_chs, bias=False, **dd),\n            nn.BatchNorm2d(new_chs, **dd),\n            act_layer(inplace=True),\n        )\n        self.short_conv = nn.Sequential(\n            nn.Conv2d(in_chs, out_chs, kernel_size, stride, kernel_size // 2, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n            nn.Conv2d(out_chs, out_chs, kernel_size=(1, 5), stride=1, padding=(0, 2), groups=out_chs, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n            nn.Conv2d(out_chs, out_chs, kernel_size=(5, 1), stride=1, padding=(2, 0), groups=out_chs, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n        )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        res = self.short_conv(F.avg_pool2d(x, kernel_size=2, stride=2))\n        x1 = self.primary_conv(x)\n        x2 = self.cheap_operation(x1)\n        out = torch.cat([x1, x2], dim=1)\n        return out[:, :self.out_chs, :, :] * F.interpolate(\n            self.gate_fn(res), size=(out.shape[-2], out.shape[-1]), mode='nearest')\n\n\nclass GhostModuleV3(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 1,\n            ratio: int = 2,\n            dw_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            mode: str = 'original',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.gate_fn = nn.Sigmoid()\n        self.out_chs = out_chs\n        init_chs = math.ceil(out_chs / ratio)\n        new_chs = init_chs * (ratio - 1)\n        self.mode = mode\n        self.num_conv_branches = 3\n        self.infer_mode = False\n        if not self.infer_mode:\n            self.primary_conv = nn.Identity()\n            self.cheap_operation = nn.Identity()\n\n        self.primary_rpr_skip = None\n        self.primary_rpr_scale = None\n        self.primary_rpr_conv = nn.ModuleList([\n            ConvBnAct(\n                in_chs,\n                init_chs,\n                kernel_size,\n                stride,\n                pad_type=kernel_size // 2,\n                act_layer=None,\n                **dd,\n            ) for _ in range(self.num_conv_branches)\n        ])\n        # Re-parameterizable scale branch\n        self.primary_activation = act_layer(inplace=True)\n        self.cheap_rpr_skip = nn.BatchNorm2d(init_chs, **dd)\n        self.cheap_rpr_conv = nn.ModuleList([\n            ConvBnAct(\n                init_chs,\n                new_chs,\n                dw_size,\n                1,\n                pad_type=dw_size // 2,\n                group_size=1,\n                act_layer=None,\n                **dd,\n            ) for _ in range(self.num_conv_branches)\n        ])\n        # Re-parameterizable scale branch\n        self.cheap_rpr_scale = ConvBnAct(init_chs, new_chs, 1, 1, pad_type=0, group_size=1, act_layer=None, **dd)\n        self.cheap_activation = act_layer(inplace=True)\n\n        self.short_conv = nn.Sequential(\n            nn.Conv2d(in_chs, out_chs, kernel_size, stride, kernel_size//2, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n            nn.Conv2d(out_chs, out_chs, kernel_size=(1,5), stride=1, padding=(0,2), groups=out_chs, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n            nn.Conv2d(out_chs, out_chs, kernel_size=(5,1), stride=1, padding=(2,0), groups=out_chs, bias=False, **dd),\n            nn.BatchNorm2d(out_chs, **dd),\n        ) if self.mode in ['shortcut'] else nn.Identity()\n\n        self.in_channels = init_chs\n        self.groups = init_chs\n        self.kernel_size = dw_size\n\n    def forward(self, x):\n        if self.infer_mode:\n            x1 = self.primary_conv(x)\n            x2 = self.cheap_operation(x1)\n        else:\n            x1 = 0\n            for primary_rpr_conv in self.primary_rpr_conv:\n                x1 += primary_rpr_conv(x)\n            x1 = self.primary_activation(x1)\n\n            x2 = self.cheap_rpr_scale(x1) + self.cheap_rpr_skip(x1)\n            for cheap_rpr_conv in self.cheap_rpr_conv:\n                x2 += cheap_rpr_conv(x1)\n            x2 = self.cheap_activation(x2)\n\n        out = torch.cat([x1,x2], dim=1)\n        if self.mode not in ['shortcut']:\n            return out\n        else:\n            res = self.short_conv(F.avg_pool2d(x, kernel_size=2, stride=2))\n            return out[:,:self.out_chs,:,:] * F.interpolate(\n                self.gate_fn(res), size=(out.shape[-2], out.shape[-1]), mode='nearest')\n\n    def _get_kernel_bias_primary(self):\n        kernel_scale = 0\n        bias_scale = 0\n        if self.primary_rpr_scale is not None:\n            kernel_scale, bias_scale = self._fuse_bn_tensor(self.primary_rpr_scale)\n            pad = self.kernel_size // 2\n            kernel_scale = F.pad(kernel_scale, [pad, pad, pad, pad])\n\n        kernel_identity = 0\n        bias_identity = 0\n        if self.primary_rpr_skip is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.primary_rpr_skip)\n\n        kernel_conv = 0\n        bias_conv = 0\n        for ix in range(self.num_conv_branches):\n            _kernel, _bias = self._fuse_bn_tensor(self.primary_rpr_conv[ix])\n            kernel_conv += _kernel\n            bias_conv += _bias\n\n        kernel_final = kernel_conv + kernel_scale + kernel_identity\n        bias_final = bias_conv + bias_scale + bias_identity\n        return kernel_final, bias_final\n\n    def _get_kernel_bias_cheap(self):\n        kernel_scale = 0\n        bias_scale = 0\n        if self.cheap_rpr_scale is not None:\n            kernel_scale, bias_scale = self._fuse_bn_tensor(self.cheap_rpr_scale)\n            pad = self.kernel_size // 2\n            kernel_scale = F.pad(kernel_scale, [pad, pad, pad, pad])\n\n        kernel_identity = 0\n        bias_identity = 0\n        if self.cheap_rpr_skip is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.cheap_rpr_skip)\n\n        kernel_conv = 0\n        bias_conv = 0\n        for ix in range(self.num_conv_branches):\n            _kernel, _bias = self._fuse_bn_tensor(self.cheap_rpr_conv[ix])\n            kernel_conv += _kernel\n            bias_conv += _bias\n\n        kernel_final = kernel_conv + kernel_scale + kernel_identity\n        bias_final = bias_conv + bias_scale + bias_identity\n        return kernel_final, bias_final\n\n    def _fuse_bn_tensor(self, branch):\n        if isinstance(branch, ConvBnAct):\n            kernel = branch.conv.weight\n            running_mean = branch.bn1.running_mean\n            running_var = branch.bn1.running_var\n            gamma = branch.bn1.weight\n            beta = branch.bn1.bias\n            eps = branch.bn1.eps\n        else:\n            assert isinstance(branch, nn.BatchNorm2d)\n            if not hasattr(self, 'id_tensor'):\n                input_dim = self.in_channels // self.groups\n                kernel_value = torch.zeros(\n                    (self.in_channels, input_dim, self.kernel_size, self.kernel_size),\n                    dtype=branch.weight.dtype,\n                    device=branch.weight.device\n                )\n                for i in range(self.in_channels):\n                    kernel_value[i, i % input_dim, self.kernel_size // 2, self.kernel_size // 2] = 1\n                self.id_tensor = kernel_value\n            kernel = self.id_tensor\n            running_mean = branch.running_mean\n            running_var = branch.running_var\n            gamma = branch.weight\n            beta = branch.bias\n            eps = branch.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n    def switch_to_deploy(self):\n        if self.infer_mode:\n            return\n        primary_kernel, primary_bias = self._get_kernel_bias_primary()\n        self.primary_conv = nn.Conv2d(\n            in_channels=self.primary_rpr_conv[0].conv.in_channels,\n            out_channels=self.primary_rpr_conv[0].conv.out_channels,\n            kernel_size=self.primary_rpr_conv[0].conv.kernel_size,\n            stride=self.primary_rpr_conv[0].conv.stride,\n            padding=self.primary_rpr_conv[0].conv.padding,\n            dilation=self.primary_rpr_conv[0].conv.dilation,\n            groups=self.primary_rpr_conv[0].conv.groups,\n            bias=True\n        )\n        self.primary_conv.weight.data = primary_kernel\n        self.primary_conv.bias.data = primary_bias\n        self.primary_conv = nn.Sequential(\n            self.primary_conv,\n            self.primary_activation if self.primary_activation is not None else nn.Sequential()\n        )\n\n        cheap_kernel, cheap_bias = self._get_kernel_bias_cheap()\n        self.cheap_operation = nn.Conv2d(\n            in_channels=self.cheap_rpr_conv[0].conv.in_channels,\n            out_channels=self.cheap_rpr_conv[0].conv.out_channels,\n            kernel_size=self.cheap_rpr_conv[0].conv.kernel_size,\n            stride=self.cheap_rpr_conv[0].conv.stride,\n            padding=self.cheap_rpr_conv[0].conv.padding,\n            dilation=self.cheap_rpr_conv[0].conv.dilation,\n            groups=self.cheap_rpr_conv[0].conv.groups,\n            bias=True\n        )\n        self.cheap_operation.weight.data = cheap_kernel\n        self.cheap_operation.bias.data = cheap_bias\n\n        self.cheap_operation = nn.Sequential(\n            self.cheap_operation,\n            self.cheap_activation if self.cheap_activation is not None else nn.Sequential()\n        )\n\n        # Delete un-used branches\n        for para in self.parameters():\n            para.detach_()\n        if hasattr(self, 'primary_rpr_conv'):\n            self.__delattr__('primary_rpr_conv')\n        if hasattr(self, 'primary_rpr_scale'):\n            self.__delattr__('primary_rpr_scale')\n        if hasattr(self, 'primary_rpr_skip'):\n            self.__delattr__('primary_rpr_skip')\n\n        if hasattr(self, 'cheap_rpr_conv'):\n            self.__delattr__('cheap_rpr_conv')\n        if hasattr(self, 'cheap_rpr_scale'):\n            self.__delattr__('cheap_rpr_scale')\n        if hasattr(self, 'cheap_rpr_skip'):\n            self.__delattr__('cheap_rpr_skip')\n\n        self.infer_mode = True\n\n    def reparameterize(self):\n        self.switch_to_deploy()\n\n\nclass GhostBottleneck(nn.Module):\n    \"\"\" GhostV1/V2 bottleneck w/ optional SE\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            se_ratio: float = 0.,\n            mode: str = 'original',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        has_se = se_ratio is not None and se_ratio > 0.\n        self.stride = stride\n\n        # Point-wise expansion\n        if mode == 'original':\n            self.ghost1 = GhostModule(in_chs, mid_chs, act_layer=act_layer, **dd)\n        else:\n            self.ghost1 = GhostModuleV2(in_chs, mid_chs, act_layer=act_layer, **dd)\n\n        # Depth-wise convolution\n        if self.stride > 1:\n            self.conv_dw = nn.Conv2d(\n                mid_chs,\n                mid_chs,\n                dw_kernel_size,\n                stride=stride,\n                padding=(dw_kernel_size-1)//2,\n                groups=mid_chs,\n                bias=False,\n                **dd,\n            )\n            self.bn_dw = nn.BatchNorm2d(mid_chs, **dd)\n        else:\n            self.conv_dw = None\n            self.bn_dw = None\n\n        # Squeeze-and-excitation\n        self.se = _SE_LAYER(mid_chs, rd_ratio=se_ratio, **dd) if has_se else None\n\n        # Point-wise linear projection\n        self.ghost2 = GhostModule(mid_chs, out_chs, act_layer=nn.Identity, **dd)\n\n        # shortcut\n        if in_chs == out_chs and self.stride == 1:\n            self.shortcut = nn.Sequential()\n        else:\n            self.shortcut = nn.Sequential(\n                nn.Conv2d(\n                    in_chs,\n                    in_chs,\n                    dw_kernel_size,\n                    stride=stride,\n                    padding=(dw_kernel_size-1)//2,\n                    groups=in_chs,\n                    bias=False,\n                    **dd,\n                ),\n                nn.BatchNorm2d(in_chs, **dd),\n                nn.Conv2d(in_chs, out_chs, 1, stride=1, padding=0, bias=False, **dd),\n                nn.BatchNorm2d(out_chs, **dd),\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n\n        # 1st ghost bottleneck\n        x = self.ghost1(x)\n\n        # Depth-wise convolution\n        if self.conv_dw is not None:\n            x = self.conv_dw(x)\n            x = self.bn_dw(x)\n\n        # Squeeze-and-excitation\n        if self.se is not None:\n            x = self.se(x)\n\n        # 2nd ghost bottleneck\n        x = self.ghost2(x)\n\n        x += self.shortcut(shortcut)\n        return x\n\n\nclass GhostBottleneckV3(nn.Module):\n    \"\"\" GhostV3 bottleneck w/ optional SE\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            se_ratio: float = 0.,\n            mode: str = 'original',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        has_se = se_ratio is not None and se_ratio > 0.\n        self.stride = stride\n\n        self.num_conv_branches = 3\n        self.infer_mode = False\n        if not self.infer_mode:\n            self.conv_dw = nn.Identity()\n            self.bn_dw = nn.Identity()\n\n        # Point-wise expansion\n        self.ghost1 = GhostModuleV3(in_chs, mid_chs, act_layer=act_layer, mode=mode, **dd)\n\n        # Depth-wise convolution\n        if self.stride > 1:\n            self.dw_rpr_conv = nn.ModuleList([ConvBnAct(\n                mid_chs,\n                mid_chs,\n                dw_kernel_size,\n                stride,\n                pad_type=(dw_kernel_size - 1) // 2,\n                group_size=1,\n                act_layer=None,\n                **dd,\n            ) for _ in range(self.num_conv_branches)\n            ])\n            # Re-parameterizable scale branch\n            self.dw_rpr_scale = ConvBnAct(mid_chs, mid_chs, 1, 2, pad_type=0, group_size=1, act_layer=None, **dd)\n            self.kernel_size = dw_kernel_size\n            self.in_channels = mid_chs\n        else:\n            self.dw_rpr_conv = nn.ModuleList()\n            self.dw_rpr_scale = nn.Identity()\n        self.dw_rpr_skip = None\n\n        # Squeeze-and-excitation\n        self.se = _SE_LAYER(mid_chs, rd_ratio=se_ratio, **dd) if has_se else nn.Identity()\n\n        # Point-wise linear projection\n        self.ghost2 = GhostModuleV3(mid_chs, out_chs, act_layer=nn.Identity, mode='original', **dd)\n\n        # shortcut\n        if in_chs == out_chs and self.stride == 1:\n            self.shortcut = nn.Identity()\n        else:\n            self.shortcut = nn.Sequential(\n                nn.Conv2d(\n                    in_chs,\n                    in_chs,\n                    dw_kernel_size,\n                    stride=stride,\n                    padding=(dw_kernel_size-1)//2,\n                    groups=in_chs,\n                    bias=False,\n                    **dd,\n                ),\n                nn.BatchNorm2d(in_chs, **dd),\n                nn.Conv2d(in_chs, out_chs, 1, stride=1, padding=0, bias=False, **dd),\n                nn.BatchNorm2d(out_chs, **dd),\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n\n        # 1st ghost bottleneck\n        x = self.ghost1(x)\n\n        # Depth-wise convolution\n        if self.stride > 1:\n            if self.infer_mode:\n                x = self.conv_dw(x)\n                x = self.bn_dw(x)\n            else:\n                x1 = self.dw_rpr_scale(x)\n                for dw_rpr_conv in self.dw_rpr_conv:\n                    x1 += dw_rpr_conv(x)\n                x = x1\n\n        # Squeeze-and-excitation\n        x = self.se(x)\n\n        # 2nd ghost bottleneck\n        x = self.ghost2(x)\n\n        x += self.shortcut(shortcut)\n        return x\n\n    def _get_kernel_bias_dw(self):\n        kernel_scale = 0\n        bias_scale = 0\n        if self.dw_rpr_scale is not None:\n            kernel_scale, bias_scale = self._fuse_bn_tensor(self.dw_rpr_scale)\n            pad = self.kernel_size // 2\n            kernel_scale = F.pad(kernel_scale, [pad, pad, pad, pad])\n\n        kernel_identity = 0\n        bias_identity = 0\n        if self.dw_rpr_skip is not None:\n            kernel_identity, bias_identity = self._fuse_bn_tensor(self.dw_rpr_skip)\n\n        kernel_conv = 0\n        bias_conv = 0\n        for ix in range(self.num_conv_branches):\n            _kernel, _bias = self._fuse_bn_tensor(self.dw_rpr_conv[ix])\n            kernel_conv += _kernel\n            bias_conv += _bias\n\n        kernel_final = kernel_conv + kernel_scale + kernel_identity\n        bias_final = bias_conv + bias_scale + bias_identity\n        return kernel_final, bias_final\n\n    def _fuse_bn_tensor(self, branch):\n        if isinstance(branch, ConvBnAct):\n            kernel = branch.conv.weight\n            running_mean = branch.bn1.running_mean\n            running_var = branch.bn1.running_var\n            gamma = branch.bn1.weight\n            beta = branch.bn1.bias\n            eps = branch.bn1.eps\n        else:\n            assert isinstance(branch, nn.BatchNorm2d)\n            if not hasattr(self, 'id_tensor'):\n                input_dim = self.in_channels // self.groups\n                kernel_value = torch.zeros(\n                    (self.in_channels, input_dim, self.kernel_size, self.kernel_size),\n                    dtype=branch.weight.dtype,\n                    device=branch.weight.device\n                )\n                for i in range(self.in_channels):\n                    kernel_value[i, i % input_dim, self.kernel_size // 2, self.kernel_size // 2] = 1\n                self.id_tensor = kernel_value\n            kernel = self.id_tensor\n            running_mean = branch.running_mean\n            running_var = branch.running_var\n            gamma = branch.weight\n            beta = branch.bias\n            eps = branch.eps\n        std = (running_var + eps).sqrt()\n        t = (gamma / std).reshape(-1, 1, 1, 1)\n        return kernel * t, beta - running_mean * gamma / std\n\n    def switch_to_deploy(self):\n        if self.infer_mode or self.stride == 1:\n            return\n        dw_kernel, dw_bias = self._get_kernel_bias_dw()\n        self.conv_dw = nn.Conv2d(\n            in_channels=self.dw_rpr_conv[0].conv.in_channels,\n            out_channels=self.dw_rpr_conv[0].conv.out_channels,\n            kernel_size=self.dw_rpr_conv[0].conv.kernel_size,\n            stride=self.dw_rpr_conv[0].conv.stride,\n            padding=self.dw_rpr_conv[0].conv.padding,\n            dilation=self.dw_rpr_conv[0].conv.dilation,\n            groups=self.dw_rpr_conv[0].conv.groups,\n            bias=True\n        )\n        self.conv_dw.weight.data = dw_kernel\n        self.conv_dw.bias.data = dw_bias\n        self.bn_dw = nn.Identity()\n\n        # Delete un-used branches\n        for para in self.parameters():\n            para.detach_()\n        if hasattr(self, 'dw_rpr_conv'):\n            self.__delattr__('dw_rpr_conv')\n        if hasattr(self, 'dw_rpr_scale'):\n            self.__delattr__('dw_rpr_scale')\n        if hasattr(self, 'dw_rpr_skip'):\n            self.__delattr__('dw_rpr_skip')\n\n        self.infer_mode = True\n\n    def reparameterize(self):\n        self.switch_to_deploy()\n\n\nclass GhostNet(nn.Module):\n    def __init__(\n            self,\n            cfgs: List[List[List[Union[int, float]]]],\n            num_classes: int = 1000,\n            width: float = 1.0,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.2,\n            version: str = 'v1',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        # setting of inverted residual blocks\n        assert output_stride == 32, 'only output_stride==32 is valid, dilation not supported'\n        self.cfgs = cfgs\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n        Bottleneck = GhostBottleneckV3 if version == 'v3' else GhostBottleneck\n\n        # building first layer\n        stem_chs = make_divisible(16 * width, 4)\n        self.conv_stem = nn.Conv2d(in_chans, stem_chs, 3, 2, 1, bias=False, **dd)\n        self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module=f'conv_stem'))\n        self.bn1 = nn.BatchNorm2d(stem_chs, **dd)\n        self.act1 = nn.ReLU(inplace=True)\n        prev_chs = stem_chs\n\n        # building inverted residual blocks\n        stages = nn.ModuleList([])\n        stage_idx = 0\n        layer_idx = 0\n        net_stride = 2\n        for cfg in self.cfgs:\n            layers = []\n            s = 1\n            for k, exp_size, c, se_ratio, s in cfg:\n                out_chs = make_divisible(c * width, 4)\n                mid_chs = make_divisible(exp_size * width, 4)\n                layer_kwargs = dict(**dd)\n                if version == 'v2' and layer_idx > 1:\n                    layer_kwargs['mode'] = 'attn'\n                if version == 'v3' and layer_idx > 1:\n                    layer_kwargs['mode'] = 'shortcut'\n                layers.append(Bottleneck(prev_chs, mid_chs, out_chs, k, s, se_ratio=se_ratio, **layer_kwargs))\n                prev_chs = out_chs\n                layer_idx += 1\n            if s > 1:\n                net_stride *= 2\n                self.feature_info.append(dict(\n                    num_chs=prev_chs, reduction=net_stride, module=f'blocks.{stage_idx}'))\n            stages.append(nn.Sequential(*layers))\n            stage_idx += 1\n\n        out_chs = make_divisible(exp_size * width, 4)\n        stages.append(nn.Sequential(ConvBnAct(prev_chs, out_chs, 1, **dd)))\n        self.pool_dim = prev_chs = out_chs\n\n        self.blocks = nn.Sequential(*stages)\n\n        # building last several layers\n        self.num_features = prev_chs\n        self.head_hidden_size = out_chs = 1280\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.conv_head = nn.Conv2d(prev_chs, out_chs, 1, 1, 0, bias=True, **dd)\n        self.act2 = nn.ReLU(inplace=True)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(out_chs, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        # FIXME init\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^conv_stem|bn1',\n            blocks=[\n                (r'^blocks\\.(\\d+)' if coarse else r'^blocks\\.(\\d+)\\.(\\d+)', None),\n                (r'conv_head', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        # cannot meaningfully change pooling of efficient head after creation\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(\n            self.head_hidden_size, num_classes,\n            device=self.conv_head.weight.device, dtype=self.conv_head.weight.dtype\n        ) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [-1] + [int(info['module'].split('.')[-1]) for info in self.feature_info[1:]]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i]+1 for i in take_indices]\n        max_index = stage_ends[max_index]\n\n        # forward pass\n        feat_idx = 0\n        x = self.conv_stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.blocks\n        else:\n            stages = self.blocks[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages, start=1):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        stage_ends = [-1] + [int(info['module'].split('.')[-1]) for info in self.feature_info[1:]]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x, flatten=True)\n        else:\n            x = self.blocks(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        x = self.global_pool(x)\n        x = self.conv_head(x)\n        x = self.act2(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n    def convert_to_deploy(self):\n        reparameterize_model(self, inplace=False)\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    if 'state_dict' in state_dict:\n        state_dict = state_dict['state_dict']\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        if 'bn.' in k and '.ghost' in k:\n            k = k.replace('bn.', 'bn1.')\n        if 'bn.' in k and '.dw_rpr_' in k:\n            k = k.replace('bn.', 'bn1.')\n        if 'total' in k:\n            continue\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_ghostnet(variant: str, width: float = 1.0, pretrained: bool = False, **kwargs: Any) -> GhostNet:\n    \"\"\"\n    Constructs a GhostNet model\n    \"\"\"\n    cfgs = [\n        # k, t, c, SE, s\n        # stage1\n        [[3,  16,  16, 0, 1]],\n        # stage2\n        [[3,  48,  24, 0, 2]],\n        [[3,  72,  24, 0, 1]],\n        # stage3\n        [[5,  72,  40, 0.25, 2]],\n        [[5, 120,  40, 0.25, 1]],\n        # stage4\n        [[3, 240,  80, 0, 2]],\n        [[3, 200,  80, 0, 1],\n         [3, 184,  80, 0, 1],\n         [3, 184,  80, 0, 1],\n         [3, 480, 112, 0.25, 1],\n         [3, 672, 112, 0.25, 1]\n        ],\n        # stage5\n        [[5, 672, 160, 0.25, 2]],\n        [[5, 960, 160, 0, 1],\n         [5, 960, 160, 0.25, 1],\n         [5, 960, 160, 0, 1],\n         [5, 960, 160, 0.25, 1]\n        ]\n    ]\n    model_kwargs = dict(\n        cfgs=cfgs,\n        width=width,\n        **kwargs,\n    )\n    return build_model_with_cfg(\n        GhostNet,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True),\n        **model_kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv_stem', 'classifier': 'classifier',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'ghostnet_050.untrained': _cfg(),\n    'ghostnet_100.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/huawei-noah/CV-backbones/releases/download/ghostnet_pth/ghostnet_1x.pth'\n    ),\n    'ghostnet_130.untrained': _cfg(),\n    'ghostnetv2_100.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/GhostNetV2/ck_ghostnetv2_10.pth.tar'\n    ),\n    'ghostnetv2_130.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/GhostNetV2/ck_ghostnetv2_13.pth.tar'\n    ),\n    'ghostnetv2_160.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/GhostNetV2/ck_ghostnetv2_16.pth.tar'\n    ),\n    'ghostnetv3_050.untrained': _cfg(),\n    'ghostnetv3_100.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/GhostNetV3/ghostnetv3-1.0.pth.tar'\n    ),\n    'ghostnetv3_130.untrained': _cfg(),\n    'ghostnetv3_160.untrained': _cfg(),\n})\n\n\n@register_model\ndef ghostnet_050(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNet-0.5x \"\"\"\n    model = _create_ghostnet('ghostnet_050', width=0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef ghostnet_100(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNet-1.0x \"\"\"\n    model = _create_ghostnet('ghostnet_100', width=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef ghostnet_130(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNet-1.3x \"\"\"\n    model = _create_ghostnet('ghostnet_130', width=1.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv2_100(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNetV2-1.0x \"\"\"\n    model = _create_ghostnet('ghostnetv2_100', width=1.0, pretrained=pretrained, version='v2', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv2_130(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNetV2-1.3x \"\"\"\n    model = _create_ghostnet('ghostnetv2_130', width=1.3, pretrained=pretrained, version='v2', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv2_160(pretrained=False, **kwargs) -> GhostNet:\n    \"\"\" GhostNetV2-1.6x \"\"\"\n    model = _create_ghostnet('ghostnetv2_160', width=1.6, pretrained=pretrained, version='v2', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv3_050(pretrained: bool = False, **kwargs: Any) -> GhostNet:\n    \"\"\" GhostNetV3-0.5x \"\"\"\n    model = _create_ghostnet('ghostnetv3_050', width=0.5, pretrained=pretrained, version='v3', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv3_100(pretrained: bool = False, **kwargs: Any) -> GhostNet:\n    \"\"\" GhostNetV3-1.0x \"\"\"\n    model = _create_ghostnet('ghostnetv3_100', width=1.0, pretrained=pretrained, version='v3', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv3_130(pretrained: bool = False, **kwargs: Any) -> GhostNet:\n    \"\"\" GhostNetV3-1.3x \"\"\"\n    model = _create_ghostnet('ghostnetv3_130', width=1.3, pretrained=pretrained, version='v3', **kwargs)\n    return model\n\n\n@register_model\ndef ghostnetv3_160(pretrained: bool = False, **kwargs: Any) -> GhostNet:\n    \"\"\" GhostNetV3-1.6x \"\"\"\n    model = _create_ghostnet('ghostnetv3_160', width=1.6, pretrained=pretrained, version='v3', **kwargs)\n    return model"
  },
  {
    "path": "timm/models/hardcorenas.py",
    "content": "from functools import partial\n\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom ._builder import build_model_with_cfg\nfrom ._builder import pretrained_cfg_for_features\nfrom ._efficientnet_blocks import SqueezeExcite\nfrom ._efficientnet_builder import decode_arch_def, resolve_act_layer, resolve_bn_args, round_channels\nfrom ._registry import register_model, generate_default_cfgs\nfrom .mobilenetv3 import MobileNetV3, MobileNetV3Features\n\n__all__ = []  # model_registry will add each entrypoint fn to this\n\n\ndef _gen_hardcorenas(pretrained, variant, arch_def, **kwargs):\n    \"\"\"Creates a hardcorenas model\n\n    Ref impl: https://github.com/Alibaba-MIIL/HardCoReNAS\n    Paper: https://arxiv.org/abs/2102.11646\n\n    \"\"\"\n    num_features = 1280\n    se_layer = partial(SqueezeExcite, gate_layer='hard_sigmoid', force_act_layer=nn.ReLU, rd_round_fn=round_channels)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        num_features=num_features,\n        stem_size=32,\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'hard_swish'),\n        se_layer=se_layer,\n        **kwargs,\n    )\n\n    features_only = False\n    model_cls = MobileNetV3\n    kwargs_filter = None\n    if model_kwargs.pop('features_only', False):\n        features_only = True\n        kwargs_filter = ('num_classes', 'num_features', 'global_pool', 'head_conv', 'head_bias', 'global_pool')\n        model_cls = MobileNetV3Features\n    model = build_model_with_cfg(\n        model_cls,\n        variant,\n        pretrained,\n        pretrained_strict=not features_only,\n        kwargs_filter=kwargs_filter,\n        **model_kwargs,\n    )\n    if features_only:\n        model.default_cfg = pretrained_cfg_for_features(model.default_cfg)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv_stem', 'classifier': 'classifier',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'hardcorenas_a.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n    'hardcorenas_b.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n    'hardcorenas_c.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n    'hardcorenas_d.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n    'hardcorenas_e.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n    'hardcorenas_f.miil_green_in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef hardcorenas_a(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_A \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre', 'ir_r1_k5_s1_e3_c24_nre_se0.25'],\n                ['ir_r1_k5_s2_e3_c40_nre', 'ir_r1_k5_s1_e6_c40_nre_se0.25'],\n                ['ir_r1_k5_s2_e6_c80_se0.25', 'ir_r1_k5_s1_e6_c80_se0.25'],\n                ['ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25'], ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_a', arch_def=arch_def, **kwargs)\n    return model\n\n\n@register_model\ndef hardcorenas_b(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_B \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'],\n                ['ir_r1_k5_s2_e3_c24_nre', 'ir_r1_k5_s1_e3_c24_nre_se0.25', 'ir_r1_k3_s1_e3_c24_nre'],\n                ['ir_r1_k5_s2_e3_c40_nre', 'ir_r1_k5_s1_e3_c40_nre', 'ir_r1_k5_s1_e3_c40_nre'],\n                ['ir_r1_k5_s2_e3_c80', 'ir_r1_k5_s1_e3_c80', 'ir_r1_k3_s1_e3_c80', 'ir_r1_k3_s1_e3_c80'],\n                ['ir_r1_k5_s1_e3_c112', 'ir_r1_k3_s1_e3_c112', 'ir_r1_k3_s1_e3_c112', 'ir_r1_k3_s1_e3_c112'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k3_s1_e3_c192_se0.25'],\n                ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_b', arch_def=arch_def, **kwargs)\n    return model\n\n\n@register_model\ndef hardcorenas_c(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_C \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre', 'ir_r1_k5_s1_e3_c24_nre_se0.25'],\n                ['ir_r1_k5_s2_e3_c40_nre', 'ir_r1_k5_s1_e3_c40_nre', 'ir_r1_k5_s1_e3_c40_nre',\n                 'ir_r1_k5_s1_e3_c40_nre'],\n                ['ir_r1_k5_s2_e4_c80', 'ir_r1_k5_s1_e6_c80_se0.25', 'ir_r1_k3_s1_e3_c80', 'ir_r1_k3_s1_e3_c80'],\n                ['ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k3_s1_e3_c112', 'ir_r1_k3_s1_e3_c112', 'ir_r1_k3_s1_e3_c112'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k3_s1_e3_c192_se0.25'],\n                ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_c', arch_def=arch_def, **kwargs)\n    return model\n\n\n@register_model\ndef hardcorenas_d(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_D \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre_se0.25', 'ir_r1_k5_s1_e3_c24_nre_se0.25'],\n                ['ir_r1_k5_s2_e3_c40_nre_se0.25', 'ir_r1_k5_s1_e4_c40_nre_se0.25', 'ir_r1_k3_s1_e3_c40_nre_se0.25'],\n                ['ir_r1_k5_s2_e4_c80_se0.25', 'ir_r1_k3_s1_e3_c80_se0.25', 'ir_r1_k3_s1_e3_c80_se0.25',\n                 'ir_r1_k3_s1_e3_c80_se0.25'],\n                ['ir_r1_k3_s1_e4_c112_se0.25', 'ir_r1_k5_s1_e4_c112_se0.25', 'ir_r1_k3_s1_e3_c112_se0.25',\n                 'ir_r1_k5_s1_e3_c112_se0.25'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25',\n                 'ir_r1_k3_s1_e6_c192_se0.25'], ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_d', arch_def=arch_def, **kwargs)\n    return model\n\n\n@register_model\ndef hardcorenas_e(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_E \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre_se0.25', 'ir_r1_k5_s1_e3_c24_nre_se0.25'],\n                ['ir_r1_k5_s2_e6_c40_nre_se0.25', 'ir_r1_k5_s1_e4_c40_nre_se0.25', 'ir_r1_k5_s1_e4_c40_nre_se0.25',\n                 'ir_r1_k3_s1_e3_c40_nre_se0.25'], ['ir_r1_k5_s2_e4_c80_se0.25', 'ir_r1_k3_s1_e6_c80_se0.25'],\n                ['ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25',\n                 'ir_r1_k5_s1_e3_c112_se0.25'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25',\n                 'ir_r1_k3_s1_e6_c192_se0.25'], ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_e', arch_def=arch_def, **kwargs)\n    return model\n\n\n@register_model\ndef hardcorenas_f(pretrained=False, **kwargs) -> MobileNetV3:\n    \"\"\" hardcorenas_F \"\"\"\n    arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre_se0.25', 'ir_r1_k5_s1_e3_c24_nre_se0.25'],\n                ['ir_r1_k5_s2_e6_c40_nre_se0.25', 'ir_r1_k5_s1_e6_c40_nre_se0.25'],\n                ['ir_r1_k5_s2_e6_c80_se0.25', 'ir_r1_k5_s1_e6_c80_se0.25', 'ir_r1_k3_s1_e3_c80_se0.25',\n                 'ir_r1_k3_s1_e3_c80_se0.25'],\n                ['ir_r1_k3_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25',\n                 'ir_r1_k3_s1_e3_c112_se0.25'],\n                ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k3_s1_e6_c192_se0.25',\n                 'ir_r1_k3_s1_e6_c192_se0.25'], ['cn_r1_k1_s1_c960']]\n    model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_f', arch_def=arch_def, **kwargs)\n    return model\n"
  },
  {
    "path": "timm/models/helpers.py",
    "content": "from ._builder import *\nfrom ._helpers import *\nfrom ._manipulate import *\nfrom ._prune import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/hgnet.py",
    "content": "\"\"\" PP-HGNet (V1 & V2)\n\nReference:\nhttps://github.com/PaddlePaddle/PaddleClas/blob/develop/docs/zh_CN/models/ImageNet1k/PP-HGNetV2.md\nThe Paddle Implement of PP-HGNet (https://github.com/PaddlePaddle/PaddleClas/blob/release/2.5.1/docs/en/models/PP-HGNet_en.md)\nPP-HGNet: https://github.com/PaddlePaddle/PaddleClas/blob/release/2.5.1/ppcls/arch/backbone/legendary_models/pp_hgnet.py\nPP-HGNetv2: https://github.com/PaddlePaddle/PaddleClas/blob/release/2.5.1/ppcls/arch/backbone/legendary_models/pp_hgnet_v2.py\n\"\"\"\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d, DropPath, calculate_drop_path_rates, create_conv2d\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._registry import register_model, generate_default_cfgs\nfrom ._manipulate import checkpoint_seq\n\n__all__ = ['HighPerfGpuNet']\n\n\nclass LearnableAffineBlock(nn.Module):\n    def __init__(\n            self,\n            scale_value: float = 1.0,\n            bias_value: float = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = nn.Parameter(torch.tensor([scale_value], **dd), requires_grad=True)\n        self.bias = nn.Parameter(torch.tensor([bias_value], **dd), requires_grad=True)\n\n    def forward(self, x):\n        return self.scale * x + self.bias\n\n\nclass ConvBNAct(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            stride: int = 1,\n            groups: int = 1,\n            padding: str = '',\n            use_act: bool = True,\n            use_lab: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.use_act = use_act\n        self.use_lab = use_lab\n        self.conv = create_conv2d(\n            in_chs,\n            out_chs,\n            kernel_size,\n            stride=stride,\n            padding=padding,\n            groups=groups,\n            **dd,\n        )\n        self.bn = nn.BatchNorm2d(out_chs, **dd)\n        if self.use_act:\n            self.act = nn.ReLU()\n        else:\n            self.act = nn.Identity()\n        if self.use_act and self.use_lab:\n            self.lab = LearnableAffineBlock(**dd)\n        else:\n            self.lab = nn.Identity()\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn(x)\n        x = self.act(x)\n        x = self.lab(x)\n        return x\n\n\nclass LightConvBNAct(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int,\n            groups: int = 1,\n            use_lab: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = ConvBNAct(\n            in_chs,\n            out_chs,\n            kernel_size=1,\n            use_act=False,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.conv2 = ConvBNAct(\n            out_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            groups=out_chs,\n            use_act=True,\n            use_lab=use_lab,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.conv2(x)\n        return x\n\n\nclass EseModule(nn.Module):\n    def __init__(self, chs: int, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(\n            chs,\n            chs,\n            kernel_size=1,\n            stride=1,\n            padding=0,\n            **dd,\n        )\n        self.sigmoid = nn.Sigmoid()\n\n    def forward(self, x):\n        identity = x\n        x = x.mean((2, 3), keepdim=True)\n        x = self.conv(x)\n        x = self.sigmoid(x)\n        return torch.mul(identity, x)\n\n\nclass StemV1(nn.Module):\n    # for PP-HGNet\n    def __init__(self, stem_chs: List[int], device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stem = nn.Sequential(*[\n            ConvBNAct(\n                stem_chs[i],\n                stem_chs[i + 1],\n                kernel_size=3,\n                stride=2 if i == 0 else 1,\n                **dd) for i in range(\n                len(stem_chs) - 1)\n        ])\n        self.pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n\n    def forward(self, x):\n        x = self.stem(x)\n        x = self.pool(x)\n        return x\n\n\nclass StemV2(nn.Module):\n    # for PP-HGNetv2\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            use_lab: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stem1 = ConvBNAct(\n            in_chs,\n            mid_chs,\n            kernel_size=3,\n            stride=2,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.stem2a = ConvBNAct(\n            mid_chs,\n            mid_chs // 2,\n            kernel_size=2,\n            stride=1,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.stem2b = ConvBNAct(\n            mid_chs // 2,\n            mid_chs,\n            kernel_size=2,\n            stride=1,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.stem3 = ConvBNAct(\n            mid_chs * 2,\n            mid_chs,\n            kernel_size=3,\n            stride=2,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.stem4 = ConvBNAct(\n            mid_chs,\n            out_chs,\n            kernel_size=1,\n            stride=1,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.pool = nn.MaxPool2d(kernel_size=2, stride=1, ceil_mode=True)\n\n    def forward(self, x):\n        x = self.stem1(x)\n        x = F.pad(x, (0, 1, 0, 1))\n        x2 = self.stem2a(x)\n        x2 = F.pad(x2, (0, 1, 0, 1))\n        x2 = self.stem2b(x2)\n        x1 = self.pool(x)\n        x = torch.cat([x1, x2], dim=1)\n        x = self.stem3(x)\n        x = self.stem4(x)\n        return x\n\n\nclass HighPerfGpuBlock(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            layer_num: int,\n            kernel_size: int = 3,\n            residual: bool = False,\n            light_block: bool = False,\n            use_lab: bool = False,\n            agg: str = 'ese',\n            drop_path: Union[List[float], float] = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.residual = residual\n\n        self.layers = nn.ModuleList()\n        for i in range(layer_num):\n            if light_block:\n                self.layers.append(\n                    LightConvBNAct(\n                        in_chs if i == 0 else mid_chs,\n                        mid_chs,\n                        kernel_size=kernel_size,\n                        use_lab=use_lab,\n                        **dd,\n                    )\n                )\n            else:\n                self.layers.append(\n                    ConvBNAct(\n                        in_chs if i == 0 else mid_chs,\n                        mid_chs,\n                        kernel_size=kernel_size,\n                        stride=1,\n                        use_lab=use_lab,\n                        **dd,\n                    )\n                )\n\n        # feature aggregation\n        total_chs = in_chs + layer_num * mid_chs\n        if agg == 'se':\n            aggregation_squeeze_conv = ConvBNAct(\n                total_chs,\n                out_chs // 2,\n                kernel_size=1,\n                stride=1,\n                use_lab=use_lab,\n                **dd,\n            )\n            aggregation_excitation_conv = ConvBNAct(\n                out_chs // 2,\n                out_chs,\n                kernel_size=1,\n                stride=1,\n                use_lab=use_lab,\n                **dd,\n            )\n            self.aggregation = nn.Sequential(\n                aggregation_squeeze_conv,\n                aggregation_excitation_conv,\n            )\n        else:\n            aggregation_conv = ConvBNAct(\n                total_chs,\n                out_chs,\n                kernel_size=1,\n                stride=1,\n                use_lab=use_lab,\n                **dd,\n            )\n            att = EseModule(out_chs, **dd)\n            self.aggregation = nn.Sequential(\n                aggregation_conv,\n                att,\n            )\n\n        self.drop_path = DropPath(drop_path) if drop_path else nn.Identity()\n\n    def forward(self, x):\n        identity = x\n        output = [x]\n        for layer in self.layers:\n            x = layer(x)\n            output.append(x)\n        x = torch.cat(output, dim=1)\n        x = self.aggregation(x)\n        if self.residual:\n            x = self.drop_path(x) + identity\n        return x\n\n\nclass HighPerfGpuStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            block_num: int,\n            layer_num: int,\n            downsample: bool = True,\n            stride: int = 2,\n            light_block: bool = False,\n            kernel_size: int = 3,\n            use_lab: bool = False,\n            agg: str = 'ese',\n            drop_path: Union[List[float], float] = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.downsample = downsample\n        if downsample:\n            self.downsample = ConvBNAct(\n                in_chs,\n                in_chs,\n                kernel_size=3,\n                stride=stride,\n                groups=in_chs,\n                use_act=False,\n                use_lab=use_lab,\n                **dd,\n            )\n        else:\n            self.downsample = nn.Identity()\n\n        blocks_list = []\n        for i in range(block_num):\n            blocks_list.append(\n                HighPerfGpuBlock(\n                    in_chs if i == 0 else out_chs,\n                    mid_chs,\n                    out_chs,\n                    layer_num,\n                    residual=False if i == 0 else True,\n                    kernel_size=kernel_size,\n                    light_block=light_block,\n                    use_lab=use_lab,\n                    agg=agg,\n                    drop_path=drop_path[i] if isinstance(drop_path, (list, tuple)) else drop_path,\n                    **dd,\n                )\n            )\n        self.blocks = nn.Sequential(*blocks_list)\n        self.grad_checkpointing= False\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass ClassifierHead(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int,\n            pool_type: str = 'avg',\n            drop_rate: float = 0.,\n            hidden_size: Optional[int] = 2048,\n            use_lab: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_features = in_features\n        if pool_type is not None:\n            if not pool_type:\n                assert num_classes == 0, 'Classifier head must be removed if pooling is disabled'\n\n        self.global_pool = SelectAdaptivePool2d(pool_type=pool_type)\n        if hidden_size is not None:\n            self.num_features = hidden_size\n            last_conv = nn.Conv2d(\n                in_features,\n                hidden_size,\n                kernel_size=1,\n                stride=1,\n                padding=0,\n                bias=False,\n                **dd,\n            )\n            act = nn.ReLU()\n            if use_lab:\n                lab = LearnableAffineBlock(**dd)\n                self.last_conv = nn.Sequential(last_conv, act, lab)\n            else:\n                self.last_conv = nn.Sequential(last_conv, act)\n        else:\n            self.last_conv = nn.Identity()\n\n        self.dropout = nn.Dropout(drop_rate)\n        self.flatten = nn.Flatten(1) if pool_type else nn.Identity()  # don't flatten if pooling disabled\n        self.fc = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        if pool_type is not None:\n            if not pool_type:\n                assert num_classes == 0, 'Classifier head must be removed if pooling is disabled'\n            self.global_pool = SelectAdaptivePool2d(pool_type=pool_type)\n            self.flatten = nn.Flatten(1) if pool_type else nn.Identity()  # don't flatten if pooling disabled\n\n        self.fc = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.last_conv(x)\n        x = self.dropout(x)\n        x = self.flatten(x)\n        if pre_logits:\n            return x\n        x = self.fc(x)\n        return x\n\n\nclass HighPerfGpuNet(nn.Module):\n\n    def __init__(\n            self,\n            cfg: Dict,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            head_hidden_size: Optional[int] = 2048,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            use_lab: bool = False,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        stem_type = cfg[\"stem_type\"]\n        stem_chs = cfg[\"stem_chs\"]\n        stages_cfg = [cfg[\"stage1\"], cfg[\"stage2\"], cfg[\"stage3\"], cfg[\"stage4\"]]\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.use_lab = use_lab\n\n        assert stem_type in ['v1', 'v2']\n        if stem_type == 'v2':\n            self.stem = StemV2(\n                in_chs=in_chans,\n                mid_chs=stem_chs[0],\n                out_chs=stem_chs[1],\n                use_lab=use_lab,\n                **dd,\n            )\n        else:\n            self.stem = StemV1([in_chans] + stem_chs, **dd)\n\n        current_stride = 4\n\n        stages = []\n        self.feature_info = []\n        block_depths = [c[3] for c in stages_cfg]\n        dpr = calculate_drop_path_rates(drop_path_rate, block_depths, stagewise=True)\n        for i, stage_config in enumerate(stages_cfg):\n            in_chs, mid_chs, out_chs, block_num, downsample, light_block, kernel_size, layer_num = stage_config\n            stages += [HighPerfGpuStage(\n                in_chs=in_chs,\n                mid_chs=mid_chs,\n                out_chs=out_chs,\n                block_num=block_num,\n                layer_num=layer_num,\n                downsample=downsample,\n                light_block=light_block,\n                kernel_size=kernel_size,\n                use_lab=use_lab,\n                agg='ese' if stem_type == 'v1' else 'se',\n                drop_path=dpr[i],\n                **dd,\n            )]\n            self.num_features = out_chs\n            if downsample:\n                current_stride *= 2\n            self.feature_info += [dict(num_chs=self.num_features, reduction=current_stride, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes=num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            hidden_size=head_hidden_size,\n            use_lab=use_lab,\n            **dd,\n        )\n        self.head_hidden_size = self.head.num_features\n\n        for n, m in self.named_modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.ones_(m.weight)\n                nn.init.zeros_(m.bias)\n            elif isinstance(m, nn.Linear):\n                nn.init.zeros_(m.bias)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else r'^stages\\.(\\d+).blocks\\.(\\d+)',\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, device=None, dtype=None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool, device=device, dtype=dtype)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, 'avg')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        return self.stages(x)\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nmodel_cfgs = dict(\n    # PP-HGNet\n    hgnet_tiny={\n        \"stem_type\": 'v1',\n        \"stem_chs\": [48, 48, 96],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [96, 96, 224, 1, False, False, 3, 5],\n        \"stage2\": [224, 128, 448, 1, True, False, 3, 5],\n        \"stage3\": [448, 160, 512, 2, True, False, 3, 5],\n        \"stage4\": [512, 192, 768, 1, True, False, 3, 5],\n    },\n    hgnet_small={\n        \"stem_type\": 'v1',\n        \"stem_chs\": [64, 64, 128],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [128, 128, 256, 1, False, False, 3, 6],\n        \"stage2\": [256, 160, 512, 1, True, False, 3, 6],\n        \"stage3\": [512, 192, 768, 2, True, False, 3, 6],\n        \"stage4\": [768, 224, 1024, 1, True, False, 3, 6],\n    },\n    hgnet_base={\n        \"stem_type\": 'v1',\n        \"stem_chs\": [96, 96, 160],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [160, 192, 320, 1, False, False, 3, 7],\n        \"stage2\": [320, 224, 640, 2, True, False, 3, 7],\n        \"stage3\": [640, 256, 960, 3, True, False, 3, 7],\n        \"stage4\": [960, 288, 1280, 2, True, False, 3, 7],\n    },\n    # PP-HGNetv2\n    hgnetv2_b0={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [16, 16],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [16, 16, 64, 1, False, False, 3, 3],\n        \"stage2\": [64, 32, 256, 1, True, False, 3, 3],\n        \"stage3\": [256, 64, 512, 2, True, True, 5, 3],\n        \"stage4\": [512, 128, 1024, 1, True, True, 5, 3],\n    },\n    hgnetv2_b1={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [24, 32],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [32, 32, 64, 1, False, False, 3, 3],\n        \"stage2\": [64, 48, 256, 1, True, False, 3, 3],\n        \"stage3\": [256, 96, 512, 2, True, True, 5, 3],\n        \"stage4\": [512, 192, 1024, 1, True, True, 5, 3],\n    },\n    hgnetv2_b2={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [24, 32],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [32, 32, 96, 1, False, False, 3, 4],\n        \"stage2\": [96, 64, 384, 1, True, False, 3, 4],\n        \"stage3\": [384, 128, 768, 3, True, True, 5, 4],\n        \"stage4\": [768, 256, 1536, 1, True, True, 5, 4],\n    },\n    hgnetv2_b3={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [24, 32],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [32, 32, 128, 1, False, False, 3, 5],\n        \"stage2\": [128, 64, 512, 1, True, False, 3, 5],\n        \"stage3\": [512, 128, 1024, 3, True, True, 5, 5],\n        \"stage4\": [1024, 256, 2048, 1, True, True, 5, 5],\n    },\n    hgnetv2_b4={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [32, 48],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [48, 48, 128, 1, False, False, 3, 6],\n        \"stage2\": [128, 96, 512, 1, True, False, 3, 6],\n        \"stage3\": [512, 192, 1024, 3, True, True, 5, 6],\n        \"stage4\": [1024, 384, 2048, 1, True, True, 5, 6],\n    },\n    hgnetv2_b5={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [32, 64],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [64, 64, 128, 1, False, False, 3, 6],\n        \"stage2\": [128, 128, 512, 2, True, False, 3, 6],\n        \"stage3\": [512, 256, 1024, 5, True, True, 5, 6],\n        \"stage4\": [1024, 512, 2048, 2, True, True, 5, 6],\n    },\n    hgnetv2_b6={\n        \"stem_type\": 'v2',\n        \"stem_chs\": [48, 96],\n        # in_chs, mid_chs, out_chs, blocks, downsample, light_block, kernel_size, layer_num\n        \"stage1\": [96, 96, 192, 2, False, False, 3, 6],\n        \"stage2\": [192, 192, 512, 3, True, False, 3, 6],\n        \"stage3\": [512, 384, 1024, 6, True, True, 5, 6],\n        \"stage4\": [1024, 768, 2048, 3, True, True, 5, 6],\n    },\n)\n\n\ndef _create_hgnet(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    return build_model_with_cfg(\n        HighPerfGpuNet,\n        variant,\n        pretrained,\n        model_cfg=model_cfgs[variant],\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.965, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'classifier': 'head.fc', 'first_conv': 'stem.stem1.conv',\n        'test_crop_pct': 1.0, 'test_input_size': (3, 288, 288),\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'hgnet_tiny.paddle_in1k': _cfg(\n        first_conv='stem.stem.0.conv',\n        hf_hub_id='timm/'),\n    'hgnet_tiny.ssld_in1k': _cfg(\n        first_conv='stem.stem.0.conv',\n        hf_hub_id='timm/'),\n    'hgnet_small.paddle_in1k': _cfg(\n        first_conv='stem.stem.0.conv',\n        hf_hub_id='timm/'),\n    'hgnet_small.ssld_in1k': _cfg(\n        first_conv='stem.stem.0.conv',\n        hf_hub_id='timm/'),\n    'hgnet_base.ssld_in1k': _cfg(\n        first_conv='stem.stem.0.conv',\n        hf_hub_id='timm/'),\n    'hgnetv2_b0.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b0.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b1.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b1.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b2.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b2.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b3.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b3.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b4.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b4.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b5.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b5.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b6.ssld_stage2_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'hgnetv2_b6.ssld_stage1_in22k_in1k': _cfg(\n        hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef hgnet_tiny(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnet_tiny', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hgnet_small(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnet_small', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hgnet_base(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnet_base', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hgnetv2_b0(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b0', pretrained=pretrained, use_lab=True, **kwargs)\n\n\n@register_model\ndef hgnetv2_b1(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b1', pretrained=pretrained, use_lab=True, **kwargs)\n\n\n@register_model\ndef hgnetv2_b2(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b2', pretrained=pretrained, use_lab=True, **kwargs)\n\n\n@register_model\ndef hgnetv2_b3(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b3', pretrained=pretrained, use_lab=True, **kwargs)\n\n\n@register_model\ndef hgnetv2_b4(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hgnetv2_b5(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b5', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hgnetv2_b6(pretrained=False, **kwargs) -> HighPerfGpuNet:\n    return _create_hgnet('hgnetv2_b6', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/hiera.py",
    "content": "\"\"\" An PyTorch implementation of Hiera\n\nAdapted for timm from originals at https://github.com/facebookresearch/hiera\n\"\"\"\n\n# Copyright (c) Meta Platforms, Inc. and affiliates.\n# All rights reserved.\n\n# This source code is licensed under the license found in the\n# LICENSE file in the root directory of this source tree.\n# --------------------------------------------------------\n#\n# Hiera: A Hierarchical Vision Transformer without the Bells-and-Whistles\n#\n# Chaitanya Ryali, Yuan-Ting Hu, Daniel Bolya, Chen Wei, Haoqi Fan,\n# Po-Yao Huang, Vaibhav Aggarwal, Arkabandhu Chowdhury, Omid Poursaeed,\n# Judy Hoffman, Jitendra Malik, Yanghao Li, Christoph Feichtenhofer.\n#\n# Paper: https://arxiv.org/abs/2306.00989/\n#\n# References:\n# slowfast: https://github.com/facebookresearch/SlowFast\n# timm: https://github.com/rwightman/pytorch-image-models/tree/master/timm\n# --------------------------------------------------------\nimport math\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    DropPath,\n    calculate_drop_path_rates,\n    Mlp,\n    LayerScale,\n    ClNormMlpClassifierHead,\n    use_fused_attn,\n    _assert,\n    get_norm_layer,\n    to_2tuple,\n    init_weight_vit,\n    init_weight_jax,\n)\n\nfrom ._registry import generate_default_cfgs, register_model\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import named_apply, checkpoint\n\n\n__all__ = ['Hiera']\n\n\ndef conv_nd(n: int) -> Type[nn.Module]:\n    \"\"\"\n    Returns a conv with nd (e.g., Conv2d for n=2). Work up to n=3.\n    If you wanted a 4d Hiera, you could probably just implement this for n=4. (no promises)\n    \"\"\"\n    return [nn.Identity, nn.Conv1d, nn.Conv2d, nn.Conv3d][n]\n\n\n@register_notrace_function\ndef get_resized_mask(target_size: List[int], mask: torch.Tensor) -> torch.Tensor:\n    # target_size: [(T), (H), W]\n    # (spatial) mask: [B, C, (t), (h), w]\n    if mask is None:\n        return mask\n\n    _assert(len(mask.shape[2:]) == len(target_size), \"mask spatial shape and target_size must match.\")\n    if mask.shape[2:] != target_size:\n        return F.interpolate(mask.float(), size=target_size)\n    return mask\n\n\ndef undo_windowing(\n        x: torch.Tensor,\n        shape: List[int],\n        mu_shape: List[int],\n) -> torch.Tensor:\n    \"\"\"\n    Restore spatial organization by undoing windowed organization of mask units.\n\n    Args:\n        x: organized by mask units windows, e.g. in 2d [B, #MUy*#MUx, MUy, MUx, C]\n        shape: current spatial shape, if it were not organized into mask unit\n            windows, e.g. in 2d [B, #MUy*MUy, #MUx*MUx, C].\n        mu_shape: current mask unit shape, e.g. in 2d [MUy, MUx]\n    Returns:\n        x: e.g. in 2d, [B, #MUy*MUy, #MUx*MUx, C]\n    \"\"\"\n    D = len(shape)\n    B, C = x.shape[0], x.shape[-1]\n    # [B, #MUy*#MUx, MUy, MUx, C] -> [B, #MUy, #MUx, MUy, MUx, C]\n    num_MUs = [s // mu for s, mu in zip(shape, mu_shape)]\n    x = x.view(B, *num_MUs, *mu_shape, C)\n\n    # [B, #MUy, #MUx, MUy, MUx, C] -> [B, #MUy*MUy, #MUx*MUx, C]\n    permute = (\n        [0]\n        + sum([list(p) for p in zip(range(1, 1 + D), range(1 + D, 1 + 2 * D))], [])\n        + [len(x.shape) - 1]\n    )\n    x = x.permute(permute).reshape(B, *shape, C)\n\n    return x\n\n\nclass Unroll(nn.Module):\n    \"\"\"\n    Reorders the tokens such that patches are contiguous in memory.\n    E.g., given [B, (H, W), C] and stride of (Sy, Sx), this will re-order the tokens as\n                           [B, (Sy, Sx, H // Sy, W // Sx), C]\n\n    This allows operations like Max2d to be computed as x.view(B, Sx*Sy, -1, C).max(dim=1).\n    Not only is this faster, but it also makes it easy to support inputs of arbitrary\n    dimensions in addition to patch-wise sparsity.\n\n    Performing this operation multiple times in sequence puts entire windows as contiguous\n    in memory. For instance, if you applied the stride (2, 2) 3 times, entire windows of\n    size 8x8 would be contiguous in memory, allowing operations like mask unit attention\n    computed easily and efficiently, while also allowing max to be applied sequentially.\n\n    Note: This means that intermediate values of the model are not in HxW order, so they\n    need to be re-rolled if you want to use the intermediate values as a HxW feature map.\n    The last block of the network is fine though, since by then the strides are all consumed.\n    \"\"\"\n\n    def __init__(\n            self,\n            input_size: Tuple[int, ...],\n            patch_stride: Tuple[int, ...],\n            unroll_schedule: List[Tuple[int, ...]],\n    ):\n        super().__init__()\n        self.size = [i // s for i, s in zip(input_size, patch_stride)]\n        self.schedule = unroll_schedule\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"\n        Input: Flattened patch embeddings [B, N, C]\n        Output: Patch embeddings [B, N, C] permuted such that [B, 4, N//4, C].max(1) etc. performs MaxPoolNd\n        \"\"\"\n        B, _, C = x.shape\n        cur_size = self.size\n        x = x.view(*([B] + cur_size + [C]))\n\n        for strides in self.schedule:\n            # Move patches with the given strides to the batch dimension\n\n            # Create a view of the tensor with the patch stride as separate dims\n            # For example in 2d: [B, H // Sy, Sy, W // Sx, Sx, C]\n            cur_size = [i // s for i, s in zip(cur_size, strides)]\n            new_shape = [B] + sum([[i, s] for i, s in zip(cur_size, strides)], []) + [C]\n            x = x.view(new_shape)\n\n            # Move the patch stride into the batch dimension\n            # For example in 2d: [B, Sy, Sx, H // Sy, W // Sx, C]\n            L = len(new_shape)\n            permute = [0] + list(range(2, L - 1, 2)) + list(range(1, L - 1, 2)) + [L - 1]\n            x = x.permute(permute)\n\n            # Now finally flatten the relevant dims into the batch dimension\n            x = x.flatten(0, len(strides))\n            B *= math.prod(strides)\n\n        x = x.reshape(-1, math.prod(self.size), C)\n        return x\n\n\nclass Reroll(nn.Module):\n    \"\"\"\n    Undos the \"unroll\" operation so that you can use intermediate features.\n    \"\"\"\n\n    def __init__(\n            self,\n            input_size: Tuple[int, ...],\n            patch_stride: Tuple[int, ...],\n            unroll_schedule: List[Tuple[int, ...]],\n            stage_ends: List[int],\n            q_pool: int,\n    ):\n        super().__init__()\n        self.size = [i // s for i, s in zip(input_size, patch_stride)]\n\n        # The first stage has to reverse everything\n        # The next stage has to reverse all but the first unroll, etc.\n        self.schedule = {}\n        size = self.size\n        for i in range(stage_ends[-1] + 1):\n            self.schedule[i] = unroll_schedule, size\n            # schedule unchanged if no pooling at a stage end\n            if i in stage_ends[:q_pool]:\n                if len(unroll_schedule) > 0:\n                    size = [n // s for n, s in zip(size, unroll_schedule[0])]\n                unroll_schedule = unroll_schedule[1:]\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            block_idx: int,\n            mask: torch.Tensor = None\n    ) -> torch.Tensor:\n        \"\"\"\n        Roll the given tensor back up to spatial order assuming it's from the given block.\n\n        If no mask is provided:\n            - Returns [B, H, W, C] for 2d, [B, T, H, W, C] for 3d, etc.\n        If a mask is provided:\n            - Returns [B, #MUs, MUy, MUx, C] for 2d, etc.\n        \"\"\"\n        schedule, size = self.schedule[block_idx]\n        B, N, C = x.shape\n\n        D = len(size)\n        cur_mu_shape = [1] * D\n\n        for strides in schedule:\n            # Extract the current patch from N\n            x = x.view(B, *strides, N // math.prod(strides), *cur_mu_shape, C)\n\n            # Move that patch into the current MU\n            # Example in 2d: [B, Sy, Sx, N//(Sy*Sx), MUy, MUx, C] -> [B, N//(Sy*Sx), Sy, MUy, Sx, MUx, C]\n            L = len(x.shape)\n            permute = (\n                [0, 1 + D]\n                + sum([list(p) for p in zip(range(1, 1 + D), range(1 + D + 1, L - 1))], [])\n                + [L - 1]\n            )\n            x = x.permute(permute)\n\n            # Reshape to [B, N//(Sy*Sx), *MU, C]\n            for i in range(D):\n                cur_mu_shape[i] *= strides[i]\n            x = x.reshape(B, -1, *cur_mu_shape, C)\n            N = x.shape[1]\n\n        # Current shape (e.g., 2d: [B, #MUy*#MUx, MUy, MUx, C])\n        x = x.view(B, N, *cur_mu_shape, C)\n\n        # If masked, return [B, #MUs, MUy, MUx, C]\n        if mask is not None:\n            return x\n\n        # If not masked, we can return [B, H, W, C]\n        x = undo_windowing(x, size, cur_mu_shape)\n\n        return x\n\n\nclass MaskUnitAttention(nn.Module):\n    \"\"\"\n    Computes either Mask Unit or Global Attention. Also is able to perform q pooling.\n\n    Note: this assumes the tokens have already been flattened and unrolled into mask units.\n    See `Unroll` for more details.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            heads: int,\n            q_stride: int = 1,\n            window_size: int = 0,\n            use_mask_unit_attn: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n        - dim, dim_out: The input and output feature dimensions.\n        - heads: The number of attention heads.\n        - q_stride: If greater than 1, pool q with this stride. The stride should be flattened (e.g., 2x2 = 4).\n        - window_size: The current (flattened) size of a mask unit *after* pooling (if any).\n        - use_mask_unit_attn: Use Mask Unit or Global Attention.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.dim_out = dim_out\n        self.heads = heads\n        self.q_stride = q_stride\n        self.head_dim = dim_out // heads\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, 3 * dim_out, **dd)\n        self.proj = nn.Linear(dim_out, dim_out, **dd)\n\n        self.window_size = window_size\n        self.use_mask_unit_attn = use_mask_unit_attn\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\" Input should be of shape [batch, tokens, channels]. \"\"\"\n        B, N, _ = x.shape\n\n        if self.use_mask_unit_attn:\n            # Windowed attention: 5D path [B, heads, num_windows, tokens_per_window, head_dim]\n            num_windows = N // (self.q_stride * self.window_size)\n            qkv = self.qkv(x).reshape(\n                B, -1, num_windows, 3, self.heads, self.head_dim,\n            ).permute(3, 0, 4, 2, 1, 5)\n            q, k, v = qkv.unbind(0)\n\n            if self.q_stride > 1:\n                # Refer to Unroll to see how this performs a maxpool-Nd\n                q = q.view(B, self.heads, num_windows, self.q_stride, -1, self.head_dim).amax(dim=3)\n        else:\n            # Global attention: 4D path [B, heads, N, head_dim]\n            # Avoids the dummy num_windows=1 dimension that prevents FlashAttention dispatch.\n            qkv = self.qkv(x).reshape(B, N, 3, self.heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            q, k, v = qkv.unbind(0)\n\n            if self.q_stride > 1:\n                # dim=2 instead of dim=3 because num_windows dimension is absent\n                q = q.view(B, self.heads, self.q_stride, -1, self.head_dim).amax(dim=2)\n\n            # Enforce contiguous memory layout so SDPA dispatches to FlashAttention\n            # instead of silently falling back to the O(N^2) math backend.\n            q, k, v = q.contiguous(), k.contiguous(), v.contiguous()\n\n        if self.fused_attn:\n            # Note: the original paper did *not* use SDPA, it's a free boost!\n            x = F.scaled_dot_product_attention(q, k, v)\n        else:\n            attn = (q * self.scale) @ k.transpose(-1, -2)\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n\n        # Output transpose adapts to 5D (windowed) vs 4D (global) layout\n        if self.use_mask_unit_attn:\n            x = x.transpose(1, 3).reshape(B, -1, self.dim_out)\n        else:\n            x = x.transpose(1, 2).reshape(B, -1, self.dim_out)\n\n        x = self.proj(x)\n        return x\n\n\nclass HieraBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            heads: int,\n            mlp_ratio: float = 4.0,\n            drop_path: float = 0.0,\n            init_values: Optional[float] = None,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            q_stride: int = 1,\n            window_size: int = 0,\n            use_expand_proj: bool = True,\n            use_mask_unit_attn: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.dim_out = dim_out\n\n        self.norm1 = norm_layer(dim, **dd)\n        if dim != dim_out:\n            self.do_expand = True\n            if use_expand_proj:\n                self.proj = nn.Linear(dim, dim_out, **dd)\n            else:\n                assert dim_out == dim * 2\n                self.proj = None\n        else:\n            self.do_expand = False\n            self.proj = None\n        self.attn = MaskUnitAttention(\n            dim,\n            dim_out,\n            heads,\n            q_stride,\n            window_size,\n            use_mask_unit_attn,\n            **dd\n        )\n        self.ls1 = LayerScale(dim_out, init_values=init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0 else nn.Identity()\n\n        self.norm2 = norm_layer(dim_out, **dd)\n        self.mlp = Mlp(dim_out, int(dim_out * mlp_ratio), act_layer=act_layer, **dd)\n        self.ls2 = LayerScale(dim_out, init_values=init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0 else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        # Attention + Q Pooling\n        x_norm = self.norm1(x)\n        if self.do_expand:\n            if self.proj is not None:\n                x = self.proj(x_norm)\n                x = x.view(x.shape[0], self.attn.q_stride, -1, x.shape[-1]).amax(dim=1)  # max-pool\n            else:\n                x = torch.cat([\n                    x.view(x.shape[0], self.attn.q_stride, -1, x.shape[-1]).amax(dim=1),  # max-pool\n                    x.view(x.shape[0], self.attn.q_stride, -1, x.shape[-1]).mean(dim=1),  # avg-pool\n                    ],\n                    dim=-1,\n                )\n        x = x + self.drop_path1(self.ls1(self.attn(x_norm)))\n\n        # MLP\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"Patch embed that supports any number of spatial dimensions (1d, 2d, 3d).\"\"\"\n\n    def __init__(\n            self,\n            dim_in: int,\n            dim_out: int,\n            kernel: Tuple[int, ...],\n            stride: Tuple[int, ...],\n            padding: Tuple[int, ...],\n            reshape: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        # Support any number of spatial dimensions\n        self.spatial_dims = len(kernel)\n        self.reshape = reshape\n        self.proj = conv_nd(self.spatial_dims)(\n            dim_in,\n            dim_out,\n            kernel_size=kernel,\n            stride=stride,\n            padding=padding,\n            **dd,\n        )\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            mask: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        if mask is not None:\n            mask = get_resized_mask(target_size=x.shape[2:], mask=mask)\n            x = self.proj(x * mask.to(torch.bool))\n        else:\n            x = self.proj(x)\n        if self.reshape:\n            x = x.reshape(x.shape[0], x.shape[1], -1).transpose(2, 1)\n        return x\n\n\nclass Hiera(nn.Module):\n\n    def __init__(\n            self,\n            img_size: Tuple[int, ...] = (224, 224),\n            in_chans: int = 3,\n            embed_dim: int = 96,  # initial embed dim\n            num_heads: int = 1,  # initial number of heads\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            stages: Tuple[int, ...] = (2, 3, 16, 3),\n            q_pool: int = 3,  # number of q_pool stages\n            q_stride: Tuple[int, ...] = (2, 2),\n            mask_unit_size: Tuple[int, ...] = (8, 8),  # must divide q_stride ** (#stages-1)\n            # mask_unit_attn: which stages use mask unit attention?\n            mask_unit_attn: Tuple[bool, ...] = (True, True, False, False),\n            use_expand_proj: bool = True,\n            dim_mul: float = 2.0,\n            head_mul: float = 2.0,\n            patch_kernel: Tuple[int, ...] = (7, 7),\n            patch_stride: Tuple[int, ...] = (4, 4),\n            patch_padding: Tuple[int, ...] = (3, 3),\n            mlp_ratio: float = 4.0,\n            drop_path_rate: float = 0.0,\n            init_values: Optional[float] = None,\n            fix_init: bool = True,\n            weight_init: str = '',\n            norm_layer: Union[str, Type[nn.Module]] = \"LayerNorm\",\n            drop_rate: float = 0.0,\n            patch_drop_rate: float = 0.0,\n            head_init_scale: float = 0.001,\n            sep_pos_embed: bool = False,\n            abs_win_pos_embed: bool = False,\n            global_pos_size: Tuple[int, int] = (14, 14),\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.grad_checkpointing = False\n        norm_layer = get_norm_layer(norm_layer)\n        if isinstance(img_size, int):\n            img_size = to_2tuple(img_size)\n\n        self.patch_stride = patch_stride\n        self.tokens_spatial_shape = [i // s for i, s in zip(img_size, patch_stride)]\n        num_tokens = math.prod(self.tokens_spatial_shape)\n        flat_mu_size = math.prod(mask_unit_size)\n        flat_q_stride = math.prod(q_stride)\n        assert q_pool < len(stages)\n        self.q_pool, self.q_stride = q_pool, q_stride\n        self.mu_size, self.mask_unit_size = flat_mu_size, mask_unit_size\n        self.mask_spatial_shape = [i // s for i, s in zip(self.tokens_spatial_shape, self.mask_unit_size)]\n        self.stage_ends = [sum(stages[:i]) - 1 for i in range(1, len(stages) + 1)]\n        self.patch_drop_rate = patch_drop_rate\n\n        self.patch_embed = PatchEmbed(\n            in_chans,\n            embed_dim,\n            patch_kernel,\n            patch_stride,\n            patch_padding,\n            **dd,\n        )\n\n        self.pos_embed: Optional[nn.Parameter] = None\n        self.pos_embed_win: Optional[nn.Parameter] = None\n        self.pos_embed_spatial: Optional[nn.Parameter] = None\n        self.pos_embed_temporal: Optional[nn.Parameter] = None\n        if sep_pos_embed:\n            self.pos_embed_spatial = nn.Parameter(\n                torch.zeros(1, self.tokens_spatial_shape[1] * self.tokens_spatial_shape[2], embed_dim, **dd)\n            )\n            self.pos_embed_temporal = nn.Parameter(\n                torch.zeros(1, self.tokens_spatial_shape[0], embed_dim, **dd)\n            )\n        else:\n            if abs_win_pos_embed:\n                # absolute win, params NCHW to make tile & interpolate more natural before add & reshape\n                self.pos_embed = nn.Parameter(torch.zeros(1, embed_dim, *global_pos_size, **dd))\n                self.pos_embed_win = nn.Parameter(torch.zeros(1, embed_dim, *mask_unit_size, **dd))\n            else:\n                self.pos_embed = nn.Parameter(torch.zeros(1, num_tokens, embed_dim, **dd))\n\n        # Setup roll and reroll modules\n        self.unroll = Unroll(\n            img_size,\n            patch_stride,\n            [q_stride] * len(self.stage_ends[:-1])\n        )\n        self.reroll = Reroll(\n            img_size,\n            patch_stride,\n            [q_stride] * len(self.stage_ends[:-1]),\n            self.stage_ends,\n            q_pool,\n        )\n        # q_pool locations\n        q_pool_blocks = [x + 1 for x in self.stage_ends[:q_pool]]\n\n        # Transformer blocks\n        cur_stage = 0\n        depth = sum(stages)\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        self.blocks = nn.ModuleList()\n        self.feature_info = []\n        for i in range(depth):\n            dim_out = embed_dim\n            # Mask unit or global attention.\n            # Lag by 1 block, so that global attention,\n            # applied post pooling on lower resolution\n            use_mask_unit_attn = mask_unit_attn[cur_stage]\n\n            if i - 1 in self.stage_ends:\n                dim_out = int(embed_dim * dim_mul)\n                num_heads = int(num_heads * head_mul)\n                cur_stage += 1\n                if i in q_pool_blocks:\n                    flat_mu_size //= flat_q_stride\n\n            block = HieraBlock(\n                dim=embed_dim,\n                dim_out=dim_out,\n                heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                drop_path=dpr[i],\n                init_values=init_values,\n                norm_layer=norm_layer,\n                q_stride=(flat_q_stride if i in q_pool_blocks else 1),\n                window_size=flat_mu_size,\n                use_expand_proj=use_expand_proj,\n                use_mask_unit_attn=use_mask_unit_attn,\n                **dd,\n            )\n            embed_dim = dim_out\n            if i in self.stage_ends:\n                self.feature_info += [\n                    dict(num_chs=dim_out, reduction=2**(cur_stage+2), module=f'blocks.{self.stage_ends[cur_stage]}')]\n            self.blocks.append(block)\n\n        self.num_features = self.head_hidden_size = embed_dim\n        self.head = ClNormMlpClassifierHead(\n            embed_dim,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            norm_layer=norm_layer,\n            input_fmt='NLC',\n            **dd,\n        )\n\n        # Initialize everything\n        if sep_pos_embed:\n            nn.init.trunc_normal_(self.pos_embed_spatial, std=0.02)\n            nn.init.trunc_normal_(self.pos_embed_temporal, std=0.02)\n        else:\n            if self.pos_embed is not None:\n                nn.init.trunc_normal_(self.pos_embed, std=0.02)\n            if self.pos_embed_win is not None:\n                nn.init.trunc_normal_(self.pos_embed_win, std=0.02)\n\n        if weight_init != 'skip':\n            init_fn = init_weight_jax if weight_init == 'jax' else init_weight_vit\n            init_fn = partial(init_fn, classifier_name='head.fc')\n            named_apply(init_fn, self)\n        if fix_init:\n            self.fix_init_weight()\n        if isinstance(self.head.fc, nn.Linear):\n            self.head.fc.weight.data.mul_(head_init_scale)\n            self.head.fc.bias.data.mul_(head_init_scale)\n\n    def fix_init_weight(self):\n        def rescale(param, _layer_id):\n            param.div_(math.sqrt(2.0 * _layer_id))\n\n        for layer_id, layer in enumerate(self.blocks):\n            rescale(layer.attn.proj.weight.data, layer_id + 1)\n            rescale(layer.mlp.fc2.weight.data, layer_id + 1)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        if self.pos_embed is not None:\n            return [\"pos_embed\"]\n        elif self.pos_embed_abs is not None:\n            return ['pos_embed_abs', 'pos_embed_win']\n        else:\n            return [\"pos_embed_spatial\", \"pos_embed_temporal\"]\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict:\n        return dict(\n            stem=r'^pos_embed|pos_embed_spatial|pos_embed_temporal|pos_embed_abs|pos_embed_win|patch_embed',\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self):\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, reset_other: bool = False):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool, reset_other=reset_other)\n\n    def get_random_mask(self, x: torch.Tensor, mask_ratio: float) -> torch.Tensor:\n        \"\"\"\n        Generates a random mask, mask_ratio fraction are dropped.\n        1 is *keep*, 0 is *remove*. Useful for MAE, FLIP, etc.\n        \"\"\"\n        B = x.shape[0]\n        # Tokens selected for masking at mask unit level\n        num_windows = math.prod(self.mask_spatial_shape)  # num_mask_units\n        len_keep = int(num_windows * (1 - mask_ratio))\n        noise = torch.rand(B, num_windows, device=x.device)\n\n        # Sort noise for each sample\n        ids_shuffle = torch.argsort(noise, dim=1)  # ascend: small is keep, large is remove\n        ids_restore = torch.argsort(ids_shuffle, dim=1)\n\n        # Generate the binary mask: 1 is *keep*, 0 is *remove*\n        # Note this is opposite to original MAE\n        mask = torch.zeros([B, num_windows], device=x.device)\n        mask[:, :len_keep] = 1\n        # Unshuffle to get the binary mask\n        mask = torch.gather(mask, dim=1, index=ids_restore)\n\n        return mask.bool()\n\n    def _pos_embed(self, x) -> torch.Tensor:\n        if self.pos_embed_win is not None:\n            # absolute win position embedding, from\n            # Window Attention is Bugged: How not to Interpolate Position Embeddings (https://arxiv.org/abs/2311.05613)\n            pos_embed_win = self.pos_embed_win.tile(self.mask_spatial_shape)\n            pos_embed = F.interpolate(\n                self.pos_embed,\n                size=pos_embed_win.shape[-2:],\n                mode='bicubic',\n                antialias=True,\n            )\n            pos_embed = pos_embed + pos_embed_win\n            pos_embed = pos_embed.flatten(2).transpose(1, 2)\n        elif self.pos_embed is not None:\n            pos_embed = self.pos_embed\n        else:\n            pos_embed = (\n                self.pos_embed_spatial.repeat(1, self.tokens_spatial_shape[0], 1)\n                +\n                torch.repeat_interleave(\n                    self.pos_embed_temporal,\n                    self.tokens_spatial_shape[1] * self.tokens_spatial_shape[2],\n                    dim=1,\n                )\n            )\n        x = x + pos_embed\n        return x\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            mask: Optional[torch.Tensor] = None,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = True,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            coarse: bool = True,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert not norm, 'normalization of features not supported'\n        assert output_fmt in ('NCHW', 'NHWC'), 'Output format must be one of NCHW, NHWC.'\n        if coarse:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n        else:\n            take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        if mask is not None:\n            patch_mask = mask.view(x.shape[0], 1, *self.mask_spatial_shape)  # B, C, *mask_spatial_shape\n        else:\n            patch_mask = None\n        x = self.patch_embed(x, mask=patch_mask)\n        x = self._pos_embed(x)\n        x = self.unroll(x)\n\n        # Discard masked tokens\n        if mask is not None:\n            x = x[mask[..., None].tile(1, self.mu_size, x.shape[2])].view(x.shape[0], -1, x.shape[-1])\n\n        intermediates = []\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                x_int = self.reroll(x, i, mask=mask)\n                intermediates.append(x_int.permute(0, 3, 1, 2) if output_fmt == 'NCHW' else x_int)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n            coarse: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        if coarse:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            max_index = self.stage_ends[max_index]\n        else:\n            take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_head:\n            self.head.reset(0, reset_other=True)\n        return take_indices\n\n    def forward_features(\n            self,\n            x: torch.Tensor,\n            mask: Optional[torch.Tensor] = None,\n            return_intermediates: bool = False,\n    ) -> torch.Tensor:\n        \"\"\"\n        mask should be a boolean tensor of shape [B, #MUt*#MUy*#MUx] where #MU are the number of mask units in that dim.\n        Note: 1 in mask is *keep*, 0 is *remove*; mask.sum(dim=-1) should be the same across the batch.\n        \"\"\"\n        if self.training and self.patch_drop_rate > 0:\n            # using mask for something like 'patch dropout' via mask-units in supervised train / fine-tune\n            assert mask is None\n            mask = self.get_random_mask(x, mask_ratio=self.patch_drop_rate)\n\n        if mask is not None:\n            patch_mask = mask.view(x.shape[0], 1, *self.mask_spatial_shape)  # B, C, *mask_spatial_shape\n        else:\n            patch_mask = None\n        x = self.patch_embed(x, mask=patch_mask)\n        x = self._pos_embed(x)\n        x = self.unroll(x)\n\n        # Discard masked tokens\n        if mask is not None:\n            x = x[mask[..., None].tile(1, self.mu_size, x.shape[2])].view(x.shape[0], -1, x.shape[-1])\n\n        intermediates = []\n        for i, blk in enumerate(self.blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if return_intermediates and i in self.stage_ends:\n                intermediates.append(self.reroll(x, i, mask=mask))\n\n        # x may not always be in spatial order here.\n        # e.g. if q_pool = 2, mask_unit_size = (8, 8), and\n        # q_stride = (2, 2), not all unrolls were consumed,\n        # intermediates[-1] is x in spatial order\n        if return_intermediates:\n            return x, intermediates\n\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False) -> torch.Tensor:\n        x = self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n        return x\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            mask: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        x = self.forward_features(x, mask=mask)\n        if mask is None:\n            x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    \"hiera_tiny_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_tiny_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_small_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_small_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_base_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_base_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_base_plus_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_base_plus_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_large_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_large_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_huge_224.mae_in1k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n    ),\n    \"hiera_huge_224.mae\": _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        num_classes=0,\n    ),\n\n    \"hiera_small_abswin_256.sbb2_e200_in12k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n    \"hiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k\": _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n    \"hiera_small_abswin_256.sbb2_e200_in12k\": _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n    \"hiera_small_abswin_256.sbb2_pd_e200_in12k\": _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n    \"hiera_base_abswin_256.untrained\": _cfg(\n        # hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95,\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict, model=None):\n    state_dict = state_dict.get('model_state', state_dict)\n    output = {}\n    for k, v in state_dict.items():\n        # if k == 'pos_embed' and  v.shape[1] != model.pos_embed.shape[1]:\n        #     # To resize pos embedding when using model at different size from pretrained weights\n        #     from timm.layers import resample_abs_pos_embed\n        #     v = resample_abs_pos_embed(\n        #         v,\n        #         new_size=(64, 64),\n        #         num_prefix_tokens=0,\n        #         verbose=True,\n        #     )\n        if 'head.projection.' in k:\n            k = k.replace('head.projection.', 'head.fc.')\n        if k.startswith('encoder_norm.'):\n            k = k.replace('encoder_norm.', 'head.norm.')\n        elif k.startswith('norm.'):\n            k = k.replace('norm.', 'head.norm.')\n        if k == 'pos_embed_abs':\n            k = 'pos_embed'\n        output[k] = v\n    return output\n\n\ndef _create_hiera(variant: str, pretrained: bool = False, **kwargs) -> Hiera:\n    out_indices = kwargs.pop('out_indices', 4)\n\n    return build_model_with_cfg(\n        Hiera,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\n@register_model\ndef hiera_tiny_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=96, num_heads=1, stages=(1, 2, 7, 2))\n    return _create_hiera('hiera_tiny_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_small_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=96, num_heads=1, stages=(1, 2, 11, 2))\n    return _create_hiera('hiera_small_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_base_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=96, num_heads=1, stages=(2, 3, 16, 3))\n    return _create_hiera('hiera_base_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_base_plus_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=112, num_heads=2, stages=(2, 3, 16, 3))\n    return _create_hiera('hiera_base_plus_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_large_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=144, num_heads=2, stages=(2, 6, 36, 4))\n    return _create_hiera('hiera_large_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_huge_224(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=256, num_heads=4, stages=(2, 6, 36, 4))\n    return _create_hiera('hiera_huge_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_small_abswin_256(pretrained=False, **kwargs):\n    model_args = dict(\n        embed_dim=96, num_heads=1, stages=(1, 2, 11, 2), abs_win_pos_embed=True, global_pos_size=(16, 16),\n        init_values=1e-5, weight_init='jax', use_expand_proj=False,\n    )\n    return _create_hiera('hiera_small_abswin_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hiera_base_abswin_256(pretrained=False, **kwargs):\n    model_args = dict(\n        embed_dim=96, num_heads=1, stages=(2, 3, 16, 3), abs_win_pos_embed=True, init_values=1e-5, weight_init='jax')\n    return _create_hiera('hiera_base_abswin_256', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/hieradet_sam2.py",
    "content": "import math\nfrom copy import deepcopy\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    DropPath,\n    calculate_drop_path_rates,\n    ClNormMlpClassifierHead,\n    LayerScale,\n    get_norm_layer,\n    get_act_layer,\n    init_weight_jax,\n    init_weight_vit,\n    to_2tuple,\n    use_fused_attn,\n)\n\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n\ndef window_partition(x, window_size: Tuple[int, int]):\n    \"\"\"\n    Partition into non-overlapping windows with padding if needed.\n    Args:\n        x (tensor): input tokens with [B, H, W, C].\n        window_size (int): window size.\n    Returns:\n        windows: windows after partition with [B * num_windows, window_size, window_size, C].\n        (Hp, Wp): padded height and width before partition\n    \"\"\"\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\ndef window_unpartition(windows: torch.Tensor, window_size: Tuple[int, int], hw: Tuple[int, int]):\n    \"\"\"\n    Window unpartition into original sequences and removing padding.\n    Args:\n        x (tensor): input tokens with [B * num_windows, window_size, window_size, C].\n        window_size (int): window size.\n        hw (Tuple): original height and width (H, W) before padding.\n    Returns:\n        x: unpartitioned sequences with [B, H, W, C].\n    \"\"\"\n    H, W = hw\n    B = windows.shape[0] // (H * W // window_size[0] // window_size[1])\n    x = windows.view(B, H // window_size[0], W // window_size[1], window_size[0], window_size[1], -1)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1)\n    return x\n\n\ndef _calc_pad(H: int, W: int, window_size: Tuple[int, int]) -> Tuple[int, int, int, int]:\n    pad_h = (window_size[0] - H % window_size[0]) % window_size[0]\n    pad_w = (window_size[1] - W % window_size[1]) % window_size[1]\n    Hp, Wp = H + pad_h, W + pad_w\n    return Hp, Wp, pad_h, pad_w\n\n\nclass MultiScaleAttention(nn.Module):\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            num_heads: int,\n            q_pool: nn.Module = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.dim_out = dim_out\n        self.num_heads = num_heads\n        head_dim = dim_out // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.q_pool = q_pool\n        self.qkv = nn.Linear(dim, dim_out * 3, **dd)\n        self.proj = nn.Linear(dim_out, dim_out, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, H, W, _ = x.shape\n\n        # qkv with shape (B, H * W, 3, nHead, C)\n        qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, -1)\n\n        # q, k, v with shape (B, H * W, nheads, C)\n        q, k, v = torch.unbind(qkv, 2)\n\n        # Q pooling (for downsample at stage changes)\n        if self.q_pool is not None:\n            q = q.reshape(B, H, W, -1).permute(0, 3, 1, 2)  # to BCHW for pool\n            q = self.q_pool(q).permute(0, 2, 3, 1)\n            H, W = q.shape[1:3]  # downsampled shape\n            q = q.reshape(B, H * W, self.num_heads, -1)\n\n        # Torch's SDPA expects [B, nheads, H*W, C] so we transpose\n        q = q.transpose(1, 2)\n        k = k.transpose(1, 2)\n        v = v.transpose(1, 2)\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-1, -2)\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n\n        # Transpose back\n        x = x.transpose(1, 2).reshape(B, H, W, -1)\n\n        x = self.proj(x)\n        return x\n\n\nclass MultiScaleBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            num_heads: int,\n            mlp_ratio: float = 4.0,\n            q_stride: Optional[Tuple[int, int]] = None,\n            norm_layer: Union[Type[nn.Module], str] = \"LayerNorm\",\n            act_layer: Union[Type[nn.Module], str] = \"GELU\",\n            window_size: int = 0,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = get_norm_layer(norm_layer)\n        act_layer = get_act_layer(act_layer)\n        self.window_size = to_2tuple(window_size)\n        self.is_windowed = any(self.window_size)\n        self.dim = dim\n        self.dim_out = dim_out\n        self.q_stride = q_stride\n\n        if dim != dim_out:\n            self.proj = nn.Linear(dim, dim_out, **dd)\n        else:\n            self.proj = nn.Identity()\n        self.pool = None\n        if self.q_stride:\n            # note make a different instance for this Module so that it's not shared with attn module\n            self.pool = nn.MaxPool2d(\n                kernel_size=q_stride,\n                stride=q_stride,\n                ceil_mode=False,\n            )\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = MultiScaleAttention(\n            dim,\n            dim_out,\n            num_heads=num_heads,\n            q_pool=deepcopy(self.pool),\n            **dd,\n        )\n        self.ls1 = LayerScale(dim_out, init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n        self.norm2 = norm_layer(dim_out, **dd)\n        self.mlp = Mlp(\n            dim_out,\n            int(dim_out * mlp_ratio),\n            act_layer=act_layer,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim_out, init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x  # B, H, W, C\n        x = self.norm1(x)\n\n        # Skip connection\n        if self.dim != self.dim_out:\n            shortcut = self.proj(x)\n            if self.pool is not None:\n                shortcut = shortcut.permute(0, 3, 1, 2)\n                shortcut = self.pool(shortcut).permute(0, 2, 3, 1)\n\n        # Window partition\n        window_size = self.window_size\n        H, W = x.shape[1:3]\n        Hp, Wp = H, W  # keep torchscript happy\n        if self.is_windowed:\n            Hp, Wp, pad_h, pad_w = _calc_pad(H, W, window_size)\n            x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h))\n            x = window_partition(x, window_size)\n\n        # Window Attention + Q Pooling (if stage change)\n        x = self.attn(x)\n        if self.q_stride is not None:\n            # Shapes have changed due to Q pooling\n            window_size = (self.window_size[0] // self.q_stride[0], self.window_size[1] // self.q_stride[1])\n            H, W = shortcut.shape[1:3]\n            Hp, Wp, pad_h, pad_w = _calc_pad(H, W, window_size)\n\n        # Reverse window partition\n        if self.is_windowed:\n            x = window_unpartition(x, window_size, (Hp, Wp))\n            x = x[:, :H, :W, :].contiguous()  # unpad\n\n        x = shortcut + self.drop_path1(self.ls1(x))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass HieraPatchEmbed(nn.Module):\n    \"\"\"\n    Image to Patch Embedding.\n    \"\"\"\n\n    def __init__(\n        self,\n        kernel_size: Union[int, Tuple[int, int]] = (7, 7),\n        stride: Union[int, Tuple[int, int]] = (4, 4),\n        padding: Union[str, int, Tuple[int, int]] = (3, 3),\n        in_chans: int = 3,\n        embed_dim: int = 768,\n        device=None,\n        dtype=None,\n    ):\n        \"\"\"\n        Args:\n            kernel_size: kernel size of the projection layer.\n            stride: stride of the projection layer.\n            padding: padding size of the projection layer.\n            in_chans: Number of input image channels.\n            embed_dim: Patch embedding dimension.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.proj = nn.Conv2d(\n            in_chans,\n            embed_dim,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            **dd,\n        )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.proj(x)\n        # B C H W -> B H W C\n        x = x.permute(0, 2, 3, 1)\n        return x\n\n\nclass HieraDet(nn.Module):\n    \"\"\"\n    Reference: https://arxiv.org/abs/2306.00989\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 96,  # initial embed dim\n            num_heads: int = 1,  # initial number of heads\n            patch_kernel: Tuple[int, int] = (7, 7),\n            patch_stride: Tuple[int, int] = (4, 4),\n            patch_padding: Tuple[int, int] = (3, 3),\n            patch_size: Optional[Tuple[int, int]] = None,\n            q_pool: int = 3,  # number of q_pool stages\n            q_stride: Tuple[int, int] = (2, 2),  # downsample stride bet. stages\n            stages: Tuple[int, ...] = (2, 3, 16, 3),  # blocks per stage\n            dim_mul: float = 2.0,  # dim_mul factor at stage shift\n            head_mul: float = 2.0,  # head_mul factor at stage shift\n            global_pos_size: Tuple[int, int] = (7, 7),\n            # window size per stage, when not using global att.\n            window_spec: Tuple[int, ...] = (\n                8,\n                4,\n                14,\n                7,\n            ),\n            # global attn in these blocks\n            global_att_blocks: Tuple[int, ...] = (\n                12,\n                16,\n                20,\n            ),\n            init_values: Optional[float] = None,\n            weight_init: str = '',\n            fix_init: bool = True,\n            head_init_scale: float = 0.001,\n            drop_rate: float = 0.0,\n            drop_path_rate: float = 0.0,  # stochastic depth\n            norm_layer: Union[Type[nn.Module], str] = \"LayerNorm\",\n            act_layer: Union[Type[nn.Module], str] = \"GELU\",\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        norm_layer = get_norm_layer(norm_layer)\n        act_layer = get_act_layer(act_layer)\n        assert len(stages) == len(window_spec)\n        self.grad_checkpointing = False\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.window_spec = window_spec\n        self.output_fmt = 'NHWC'\n\n        depth = sum(stages)\n        self.q_stride = q_stride\n        self.stage_ends = [sum(stages[:i]) - 1 for i in range(1, len(stages) + 1)]\n        assert 0 <= q_pool <= len(self.stage_ends[:-1])\n        self.q_pool_blocks = [x + 1 for x in self.stage_ends[:-1]][:q_pool]\n\n        if patch_size is not None:\n            # use a non-overlapping vit style patch embed\n            self.patch_embed = PatchEmbed(\n                img_size=None,\n                patch_size=patch_size,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                output_fmt='NHWC',\n                dynamic_img_pad=True,\n                **dd,\n            )\n        else:\n            self.patch_embed = HieraPatchEmbed(\n                kernel_size=patch_kernel,\n                stride=patch_stride,\n                padding=patch_padding,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                **dd,\n            )\n        # Which blocks have global att?\n        self.global_att_blocks = global_att_blocks\n\n        # Windowed positional embedding (https://arxiv.org/abs/2311.05613)\n        self.global_pos_size = global_pos_size\n        self.pos_embed = nn.Parameter(torch.zeros(1, embed_dim, *self.global_pos_size, **dd))\n        self.pos_embed_window = nn.Parameter(torch.zeros(1, embed_dim, self.window_spec[0], self.window_spec[0], **dd))\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        cur_stage = 0\n        self.blocks = nn.Sequential()\n        self.feature_info = []\n        for i in range(depth):\n            dim_out = embed_dim\n            # lags by a block, so first block of\n            # next stage uses an initial window size\n            # of previous stage and final window size of current stage\n            window_size = self.window_spec[cur_stage]\n\n            if self.global_att_blocks is not None:\n                window_size = 0 if i in self.global_att_blocks else window_size\n\n            if i - 1 in self.stage_ends:\n                dim_out = int(embed_dim * dim_mul)\n                num_heads = int(num_heads * head_mul)\n                cur_stage += 1\n\n            block = MultiScaleBlock(\n                dim=embed_dim,\n                dim_out=dim_out,\n                num_heads=num_heads,\n                drop_path=dpr[i],\n                q_stride=self.q_stride if i in self.q_pool_blocks else None,\n                window_size=window_size,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                init_values=init_values,\n                **dd,\n            )\n\n            embed_dim = dim_out\n            self.blocks.append(block)\n            if i in self.stage_ends:\n                self.feature_info += [\n                    dict(num_chs=dim_out, reduction=2**(cur_stage+2), module=f'blocks.{self.stage_ends[cur_stage]}')]\n\n        self.num_features = self.head_hidden_size = embed_dim\n        self.head = ClNormMlpClassifierHead(\n            embed_dim,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            norm_layer=norm_layer,\n            **dd,\n        )\n\n        # Initialize everything\n        if self.pos_embed is not None:\n            nn.init.trunc_normal_(self.pos_embed, std=0.02)\n\n        if self.pos_embed_window is not None:\n            nn.init.trunc_normal_(self.pos_embed_window, std=0.02)\n\n        if weight_init != 'skip':\n            init_fn = init_weight_jax if weight_init == 'jax' else init_weight_vit\n            init_fn = partial(init_fn, classifier_name='head.fc')\n            named_apply(init_fn, self)\n\n        if fix_init:\n            self.fix_init_weight()\n\n        if isinstance(self.head, ClNormMlpClassifierHead) and isinstance(self.head.fc, nn.Linear):\n            self.head.fc.weight.data.mul_(head_init_scale)\n            self.head.fc.bias.data.mul_(head_init_scale)\n\n    def _pos_embed(self, x: torch.Tensor) -> torch.Tensor:\n        h, w = x.shape[1:3]\n        window_embed = self.pos_embed_window\n        pos_embed = F.interpolate(self.pos_embed, size=(h, w), mode=\"bicubic\")\n        tile_h = pos_embed.shape[-2] // window_embed.shape[-2]\n        tile_w = pos_embed.shape[-1] // window_embed.shape[-1]\n        pos_embed = pos_embed + window_embed.tile((tile_h, tile_w))\n        pos_embed = pos_embed.permute(0, 2, 3, 1)\n        return x + pos_embed\n\n    def fix_init_weight(self):\n        def rescale(param, _layer_id):\n            param.div_(math.sqrt(2.0 * _layer_id))\n\n        for layer_id, layer in enumerate(self.blocks):\n            rescale(layer.attn.proj.weight.data, layer_id + 1)\n            rescale(layer.mlp.fc2.weight.data, layer_id + 1)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return ['pos_embed', 'pos_embed_window']\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict:\n        return dict(\n            stem=r'^pos_embed|pos_embed_window|patch_embed',\n            blocks=[(r'^blocks\\.(\\d+)', None)]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self):\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, reset_other: bool = False):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool, reset_other=reset_other)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = True,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            coarse: bool = True,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            coarse: Take coarse features (stage ends) if true, otherwise all block featrures\n        Returns:\n\n        \"\"\"\n        assert not norm, 'normalization of features not supported'\n        assert output_fmt in ('NCHW', 'NHWC'), 'Output format must be one of NCHW, NHWC.'\n        if coarse:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n        else:\n            take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        x = self.patch_embed(x)\n        x = self._pos_embed(x)\n\n        intermediates = []\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                x_out = x.permute(0, 3, 1, 2) if output_fmt == 'NCHW' else x\n                intermediates.append(x_out)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n            coarse: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        if coarse:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            max_index = self.stage_ends[max_index]\n        else:\n            take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_head:\n            self.head.reset(0, reset_other=prune_norm)\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.patch_embed(x)  # BHWC\n        x = self._pos_embed(x)\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False) -> torch.Tensor:\n        x = self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n        return x\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\n# NOTE sam2 appears to use 1024x1024 for all models, but T, S, & B+ have windows that fit multiples of 224.\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 0, 'input_size': (3, 896, 896), 'pool_size': (28, 28),\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'min_input_size': (3, 224, 224),\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    \"sam2_hiera_tiny.fb_r896\": _cfg(\n        # hf_hub_id='facebook/sam2-hiera-tiny',\n        # hf_hub_filename='sam2_hiera_tiny.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_tiny.fb_r896_2pt1\": _cfg(\n        # hf_hub_id='facebook/sam2.1-hiera-tiny',\n        # hf_hub_filename='sam2.1_hiera_tiny.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_small.fb_r896\": _cfg(\n        # hf_hub_id='facebook/sam2-hiera-small',\n        # hf_hub_filename='sam2_hiera_small.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_small.fb_r896_2pt1\": _cfg(\n        # hf_hub_id='facebook/sam2.1-hiera-small',\n        # hf_hub_filename='sam2.1_hiera_small.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_base_plus.fb_r896\": _cfg(\n        # hf_hub_id='facebook/sam2-hiera-base-plus',\n        # hf_hub_filename='sam2_hiera_base_plus.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_base_plus.fb_r896_2pt1\": _cfg(\n        # hf_hub_id='facebook/sam2.1-hiera-base-plus',\n        # hf_hub_filename='sam2.1_hiera_base_plus.pt',\n        hf_hub_id='timm/',\n    ),\n    \"sam2_hiera_large.fb_r1024\": _cfg(\n        # hf_hub_id='facebook/sam2-hiera-large',\n        # hf_hub_filename='sam2_hiera_large.pt',\n        hf_hub_id='timm/',\n        min_input_size=(3, 256, 256),\n        input_size=(3, 1024, 1024), pool_size=(32, 32),\n    ),\n    \"sam2_hiera_large.fb_r1024_2pt1\": _cfg(\n        # hf_hub_id='facebook/sam2.1-hiera-large',\n        # hf_hub_filename='sam2.1_hiera_large.pt',\n        hf_hub_id='timm/',\n        min_input_size=(3, 256, 256),\n        input_size=(3, 1024, 1024), pool_size=(32, 32),\n    ),\n    \"hieradet_small.untrained\": _cfg(\n        num_classes=1000,\n        input_size=(3, 256, 256), pool_size=(8, 8),\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict, model=None, prefix=''):\n    state_dict = state_dict.get('model', state_dict)\n\n    output = {}\n    for k, v in state_dict.items():\n        if k.startswith(prefix):\n            k = k.replace(prefix, '')\n        else:\n            continue\n        k = k.replace('mlp.layers.0', 'mlp.fc1')\n        k = k.replace('mlp.layers.1', 'mlp.fc2')\n        output[k] = v\n    return output\n\n\ndef _create_hiera_det(variant: str, pretrained: bool = False, **kwargs) -> HieraDet:\n    out_indices = kwargs.pop('out_indices', 4)\n    checkpoint_prefix = ''\n    # if 'sam2' in variant:\n    #     # SAM2 pretrained weights have no classifier or final norm-layer (`head.norm`)\n    #     # This is workaround loading with num_classes=0 w/o removing norm-layer.\n    #     kwargs.setdefault('pretrained_strict', False)\n    #     checkpoint_prefix = 'image_encoder.trunk.'\n    return build_model_with_cfg(\n        HieraDet,\n        variant,\n        pretrained,\n        pretrained_filter_fn=partial(checkpoint_filter_fn, prefix=checkpoint_prefix),\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\n@register_model\ndef sam2_hiera_tiny(pretrained=False, **kwargs):\n    model_args = dict(stages=(1, 2, 7, 2), global_att_blocks=(5, 7, 9))\n    return _create_hiera_det('sam2_hiera_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef sam2_hiera_small(pretrained=False, **kwargs):\n    model_args = dict(stages=(1, 2, 11, 2), global_att_blocks=(7, 10, 13))\n    return _create_hiera_det('sam2_hiera_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef sam2_hiera_base_plus(pretrained=False, **kwargs):\n    model_args = dict(embed_dim=112, num_heads=2, global_pos_size=(14, 14))\n    return _create_hiera_det('sam2_hiera_base_plus', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef sam2_hiera_large(pretrained=False, **kwargs):\n    model_args = dict(\n        embed_dim=144,\n        num_heads=2,\n        stages=(2, 6, 36, 4),\n        global_att_blocks=(23, 33, 43),\n        window_spec=(8, 4, 16, 8),\n    )\n    return _create_hiera_det('sam2_hiera_large', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef hieradet_small(pretrained=False, **kwargs):\n    model_args = dict(stages=(1, 2, 11, 2), global_att_blocks=(7, 10, 13), window_spec=(8, 4, 16, 8), init_values=1e-5)\n    return _create_hiera_det('hieradet_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n# @register_model\n# def hieradet_base(pretrained=False, **kwargs):\n#     model_args = dict(window_spec=(8, 4, 16, 8))\n#     return _create_hiera_det('hieradet_base', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/hrnet.py",
    "content": "\"\"\" HRNet\n\nCopied from https://github.com/HRNet/HRNet-Image-Classification\n\nOriginal header:\n  Copyright (c) Microsoft\n  Licensed under the MIT License.\n  Written by Bin Xiao (Bin.Xiao@microsoft.com)\n  Modified by Ke Sun (sunk@mail.ustc.edu.cn)\n\"\"\"\nimport logging\nfrom typing import Dict, List, Type, Optional, Tuple\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import create_classifier\nfrom ._builder import build_model_with_cfg, pretrained_cfg_for_features\nfrom ._features import FeatureInfo\nfrom ._registry import register_model, generate_default_cfgs\nfrom .resnet import BasicBlock, Bottleneck  # leveraging ResNet block_types w/ additional features like SE\n\n__all__ = ['HighResolutionNet', 'HighResolutionNetFeatures']  # model_registry will add each entrypoint fn to this\n\n_BN_MOMENTUM = 0.1\n_logger = logging.getLogger(__name__)\n\n\ncfg_cls = dict(\n    hrnet_w18_small=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(1,),\n            num_channels=(32,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(2, 2),\n            num_channels=(16, 32),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=1,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(2, 2, 2),\n            num_channels=(16, 32, 64),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=1,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(2, 2, 2, 2),\n            num_channels=(16, 32, 64, 128),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w18_small_v2=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(2,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(2, 2),\n            num_channels=(18, 36),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=3,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(2, 2, 2),\n            num_channels=(18, 36, 72),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=2,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(2, 2, 2, 2),\n            num_channels=(18, 36, 72, 144),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w18=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(18, 36),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(18, 36, 72),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(18, 36, 72, 144),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w30=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(30, 60),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(30, 60, 120),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(30, 60, 120, 240),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w32=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(32, 64),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(32, 64, 128),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(32, 64, 128, 256),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w40=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(40, 80),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(40, 80, 160),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(40, 80, 160, 320),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w44=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(44, 88),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(44, 88, 176),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(44, 88, 176, 352),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w48=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(48, 96),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(48, 96, 192),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(48, 96, 192, 384),\n            fuse_method='SUM',\n        ),\n    ),\n\n    hrnet_w64=dict(\n        stem_width=64,\n        stage1=dict(\n            num_modules=1,\n            num_branches=1,\n            block_type='BOTTLENECK',\n            num_blocks=(4,),\n            num_channels=(64,),\n            fuse_method='SUM',\n        ),\n        stage2=dict(\n            num_modules=1,\n            num_branches=2,\n            block_type='BASIC',\n            num_blocks=(4, 4),\n            num_channels=(64, 128),\n            fuse_method='SUM'\n        ),\n        stage3=dict(\n            num_modules=4,\n            num_branches=3,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4),\n            num_channels=(64, 128, 256),\n            fuse_method='SUM'\n        ),\n        stage4=dict(\n            num_modules=3,\n            num_branches=4,\n            block_type='BASIC',\n            num_blocks=(4, 4, 4, 4),\n            num_channels=(64, 128, 256, 512),\n            fuse_method='SUM',\n        ),\n    )\n)\n\n\nclass HighResolutionModule(nn.Module):\n    def __init__(\n            self,\n            num_branches: int,\n            block_types: Type[nn.Module],\n            num_blocks: Tuple[int, ...],\n            num_in_chs: List[int],\n            num_channels: Tuple[int, ...],\n            fuse_method: str,\n            multi_scale_output: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self._check_branches(\n            num_branches,\n            block_types,\n            num_blocks,\n            num_in_chs,\n            num_channels,\n        )\n\n        self.num_in_chs = num_in_chs\n        self.fuse_method = fuse_method\n        self.num_branches = num_branches\n\n        self.multi_scale_output = multi_scale_output\n\n        self.branches = self._make_branches(\n            num_branches,\n            block_types,\n            num_blocks,\n            num_channels,\n            **dd,\n        )\n        self.fuse_layers = self._make_fuse_layers(**dd)\n        self.fuse_act = nn.ReLU(False)\n\n    def _check_branches(self, num_branches, block_types, num_blocks, num_in_chs, num_channels):\n        error_msg = ''\n        if num_branches != len(num_blocks):\n            error_msg = 'num_branches({}) <> num_blocks({})'.format(num_branches, len(num_blocks))\n        elif num_branches != len(num_channels):\n            error_msg = 'num_branches({}) <> num_channels({})'.format(num_branches, len(num_channels))\n        elif num_branches != len(num_in_chs):\n            error_msg = 'num_branches({}) <> num_in_chs({})'.format(num_branches, len(num_in_chs))\n        if error_msg:\n            _logger.error(error_msg)\n            raise ValueError(error_msg)\n\n    def _make_one_branch(self, branch_index, block_type, num_blocks, num_channels, stride=1, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        downsample = None\n        if stride != 1 or self.num_in_chs[branch_index] != num_channels[branch_index] * block_type.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(\n                    self.num_in_chs[branch_index],\n                    num_channels[branch_index] * block_type.expansion,\n                    kernel_size=1,\n                    stride=stride,\n                    bias=False,\n                    **dd,\n                ),\n                nn.BatchNorm2d(num_channels[branch_index] * block_type.expansion, momentum=_BN_MOMENTUM, **dd),\n            )\n\n        layers = [block_type(self.num_in_chs[branch_index], num_channels[branch_index], stride, downsample, **dd)]\n        self.num_in_chs[branch_index] = num_channels[branch_index] * block_type.expansion\n        for i in range(1, num_blocks[branch_index]):\n            layers.append(block_type(self.num_in_chs[branch_index], num_channels[branch_index], **dd))\n\n        return nn.Sequential(*layers)\n\n    def _make_branches(self, num_branches, block_type, num_blocks, num_channels, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        branches = []\n        for i in range(num_branches):\n            branches.append(self._make_one_branch(i, block_type, num_blocks, num_channels, **dd))\n\n        return nn.ModuleList(branches)\n\n    def _make_fuse_layers(self, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        if self.num_branches == 1:\n            return nn.Identity()\n\n        num_branches = self.num_branches\n        num_in_chs = self.num_in_chs\n        fuse_layers = []\n        for i in range(num_branches if self.multi_scale_output else 1):\n            fuse_layer = []\n            for j in range(num_branches):\n                if j > i:\n                    fuse_layer.append(nn.Sequential(\n                        nn.Conv2d(num_in_chs[j], num_in_chs[i], 1, 1, 0, bias=False, **dd),\n                        nn.BatchNorm2d(num_in_chs[i], momentum=_BN_MOMENTUM, **dd),\n                        nn.Upsample(scale_factor=2 ** (j - i), mode='nearest')))\n                elif j == i:\n                    fuse_layer.append(nn.Identity())\n                else:\n                    conv3x3s = []\n                    for k in range(i - j):\n                        if k == i - j - 1:\n                            num_out_chs_conv3x3 = num_in_chs[i]\n                            conv3x3s.append(nn.Sequential(\n                                nn.Conv2d(num_in_chs[j], num_out_chs_conv3x3, 3, 2, 1, bias=False, **dd),\n                                nn.BatchNorm2d(num_out_chs_conv3x3, momentum=_BN_MOMENTUM, **dd)\n                            ))\n                        else:\n                            num_out_chs_conv3x3 = num_in_chs[j]\n                            conv3x3s.append(nn.Sequential(\n                                nn.Conv2d(num_in_chs[j], num_out_chs_conv3x3, 3, 2, 1, bias=False, **dd),\n                                nn.BatchNorm2d(num_out_chs_conv3x3, momentum=_BN_MOMENTUM, **dd),\n                                nn.ReLU(False)\n                            ))\n                    fuse_layer.append(nn.Sequential(*conv3x3s))\n            fuse_layers.append(nn.ModuleList(fuse_layer))\n\n        return nn.ModuleList(fuse_layers)\n\n    def get_num_in_chs(self):\n        return self.num_in_chs\n\n    def forward(self, x: List[torch.Tensor]) -> List[torch.Tensor]:\n        if self.num_branches == 1:\n            return [self.branches[0](x[0])]\n\n        for i, branch in enumerate(self.branches):\n            x[i] = branch(x[i])\n\n        x_fuse = []\n        for i, fuse_outer in enumerate(self.fuse_layers):\n            y = None\n            for j, f in enumerate(fuse_outer):\n                if y is None:\n                    y = f(x[j])\n                else:\n                    y = y + f(x[j])\n            x_fuse.append(self.fuse_act(y))\n        return x_fuse\n\n\nclass SequentialList(nn.Sequential):\n\n    def __init__(self, *args):\n        super().__init__(*args)\n\n    def forward(self, x) -> List[torch.Tensor]:\n        for module in self:\n            x = module(x)\n        return x\n\n\nblock_types_dict = {\n    'BASIC': BasicBlock,\n    'BOTTLENECK': Bottleneck\n}\n\n\nclass HighResolutionNet(nn.Module):\n\n    def __init__(\n            self,\n            cfg: Dict,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.0,\n            head: str = 'classification',\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        assert output_stride == 32  # FIXME support dilation\n\n        cfg.update(**kwargs)\n        stem_width = cfg['stem_width']\n        self.conv1 = nn.Conv2d(in_chans, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(stem_width, momentum=_BN_MOMENTUM, **dd)\n        self.act1 = nn.ReLU(inplace=True)\n        self.conv2 = nn.Conv2d(stem_width, 64, kernel_size=3, stride=2, padding=1, bias=False, **dd)\n        self.bn2 = nn.BatchNorm2d(64, momentum=_BN_MOMENTUM, **dd)\n        self.act2 = nn.ReLU(inplace=True)\n\n        self.stage1_cfg = cfg['stage1']\n        num_channels = self.stage1_cfg['num_channels'][0]\n        block_type = block_types_dict[self.stage1_cfg['block_type']]\n        num_blocks = self.stage1_cfg['num_blocks'][0]\n        self.layer1 = self._make_layer(block_type, 64, num_channels, num_blocks, **dd)\n        stage1_out_channel = block_type.expansion * num_channels\n\n        self.stage2_cfg = cfg['stage2']\n        num_channels = self.stage2_cfg['num_channels']\n        block_type = block_types_dict[self.stage2_cfg['block_type']]\n        num_channels = [num_channels[i] * block_type.expansion for i in range(len(num_channels))]\n        self.transition1 = self._make_transition_layer([stage1_out_channel], num_channels, **dd)\n        self.stage2, pre_stage_channels = self._make_stage(self.stage2_cfg, num_channels, **dd)\n\n        self.stage3_cfg = cfg['stage3']\n        num_channels = self.stage3_cfg['num_channels']\n        block_type = block_types_dict[self.stage3_cfg['block_type']]\n        num_channels = [num_channels[i] * block_type.expansion for i in range(len(num_channels))]\n        self.transition2 = self._make_transition_layer(pre_stage_channels, num_channels, **dd)\n        self.stage3, pre_stage_channels = self._make_stage(self.stage3_cfg, num_channels, **dd)\n\n        self.stage4_cfg = cfg['stage4']\n        num_channels = self.stage4_cfg['num_channels']\n        block_type = block_types_dict[self.stage4_cfg['block_type']]\n        num_channels = [num_channels[i] * block_type.expansion for i in range(len(num_channels))]\n        self.transition3 = self._make_transition_layer(pre_stage_channels, num_channels, **dd)\n        self.stage4, pre_stage_channels = self._make_stage(self.stage4_cfg, num_channels, multi_scale_output=True, **dd)\n\n        self.head = head\n        self.head_channels = None  # set if _make_head called\n        head_conv_bias = cfg.pop('head_conv_bias', True)\n        if head == 'classification':\n            # Classification Head\n            self.num_features = self.head_hidden_size = 2048\n            self.incre_modules, self.downsamp_modules, self.final_layer = self._make_head(\n                pre_stage_channels,\n                conv_bias=head_conv_bias,\n                **dd,\n            )\n            self.global_pool, self.head_drop, self.classifier = create_classifier(\n                self.num_features,\n                self.num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n        else:\n            if head == 'incre':\n                self.num_features = self.head_hidden_size = 2048\n                self.incre_modules, _, _ = self._make_head(pre_stage_channels, incre_only=True, **dd)\n            else:\n                self.num_features = self.head_hidden_size = 256\n                self.incre_modules = None\n            self.global_pool = nn.Identity()\n            self.head_drop = nn.Identity()\n            self.classifier = nn.Identity()\n\n        curr_stride = 2\n        # module names aren't actually valid here, hook or FeatureNet based extraction would not work\n        self.feature_info = [dict(num_chs=64, reduction=curr_stride, module='stem')]\n        for i, c in enumerate(self.head_channels if self.head_channels else num_channels):\n            curr_stride *= 2\n            c = c * 4 if self.head_channels else c  # head block_type expansion factor of 4\n            self.feature_info += [dict(num_chs=c, reduction=curr_stride, module=f'stage{i + 1}')]\n\n        self.init_weights()\n\n    def _make_head(self, pre_stage_channels, incre_only=False, conv_bias=True, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        head_block_type = Bottleneck\n        self.head_channels = [32, 64, 128, 256]\n\n        # Increasing the #channels on each resolution\n        # from C, 2C, 4C, 8C to 128, 256, 512, 1024\n        incre_modules = []\n        for i, channels in enumerate(pre_stage_channels):\n            incre_modules.append(self._make_layer(head_block_type, channels, self.head_channels[i], 1, stride=1, **dd))\n        incre_modules = nn.ModuleList(incre_modules)\n        if incre_only:\n            return incre_modules, None, None\n\n        # downsampling modules\n        downsamp_modules = []\n        for i in range(len(pre_stage_channels) - 1):\n            in_channels = self.head_channels[i] * head_block_type.expansion\n            out_channels = self.head_channels[i + 1] * head_block_type.expansion\n            downsamp_module = nn.Sequential(\n                nn.Conv2d(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    kernel_size=3,\n                    stride=2,\n                    padding=1,\n                    bias=conv_bias,\n                    **dd,\n                ),\n                nn.BatchNorm2d(out_channels, momentum=_BN_MOMENTUM, **dd),\n                nn.ReLU(inplace=True)\n            )\n            downsamp_modules.append(downsamp_module)\n        downsamp_modules = nn.ModuleList(downsamp_modules)\n\n        final_layer = nn.Sequential(\n            nn.Conv2d(\n                in_channels=self.head_channels[3] * head_block_type.expansion,\n                out_channels=self.num_features,\n                kernel_size=1,\n                stride=1,\n                padding=0,\n                bias=conv_bias,\n                **dd,\n            ),\n            nn.BatchNorm2d(self.num_features, momentum=_BN_MOMENTUM, **dd),\n            nn.ReLU(inplace=True)\n        )\n\n        return incre_modules, downsamp_modules, final_layer\n\n    def _make_transition_layer(self, num_channels_pre_layer, num_channels_cur_layer, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        num_branches_cur = len(num_channels_cur_layer)\n        num_branches_pre = len(num_channels_pre_layer)\n\n        transition_layers = []\n        for i in range(num_branches_cur):\n            if i < num_branches_pre:\n                if num_channels_cur_layer[i] != num_channels_pre_layer[i]:\n                    transition_layers.append(nn.Sequential(\n                        nn.Conv2d(num_channels_pre_layer[i], num_channels_cur_layer[i], 3, 1, 1, bias=False, **dd),\n                        nn.BatchNorm2d(num_channels_cur_layer[i], momentum=_BN_MOMENTUM, **dd),\n                        nn.ReLU(inplace=True)))\n                else:\n                    transition_layers.append(nn.Identity())\n            else:\n                conv3x3s = []\n                for j in range(i + 1 - num_branches_pre):\n                    _in_chs = num_channels_pre_layer[-1]\n                    _out_chs = num_channels_cur_layer[i] if j == i - num_branches_pre else _in_chs\n                    conv3x3s.append(nn.Sequential(\n                        nn.Conv2d(_in_chs, _out_chs, 3, 2, 1, bias=False, **dd),\n                        nn.BatchNorm2d(_out_chs, momentum=_BN_MOMENTUM, **dd),\n                        nn.ReLU(inplace=True)))\n                transition_layers.append(nn.Sequential(*conv3x3s))\n\n        return nn.ModuleList(transition_layers)\n\n    def _make_layer(self, block_type, inplanes, planes, block_types, stride=1, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        downsample = None\n        if stride != 1 or inplanes != planes * block_type.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(inplanes, planes * block_type.expansion, kernel_size=1, stride=stride, bias=False, **dd),\n                nn.BatchNorm2d(planes * block_type.expansion, momentum=_BN_MOMENTUM, **dd),\n            )\n\n        layers = [block_type(inplanes, planes, stride, downsample, **dd)]\n        inplanes = planes * block_type.expansion\n        for i in range(1, block_types):\n            layers.append(block_type(inplanes, planes, **dd))\n\n        return nn.Sequential(*layers)\n\n    def _make_stage(self, layer_config, num_in_chs, multi_scale_output=True, device=None, dtype=None):\n        num_modules = layer_config['num_modules']\n        num_branches = layer_config['num_branches']\n        num_blocks = layer_config['num_blocks']\n        num_channels = layer_config['num_channels']\n        block_type = block_types_dict[layer_config['block_type']]\n        fuse_method = layer_config['fuse_method']\n\n        modules = []\n        for i in range(num_modules):\n            # multi_scale_output is only used last module\n            reset_multi_scale_output = multi_scale_output or i < num_modules - 1\n            modules.append(HighResolutionModule(\n                num_branches,\n                block_type,\n                num_blocks,\n                num_in_chs,\n                num_channels,\n                fuse_method,\n                reset_multi_scale_output,\n                device=device,\n                dtype=dtype,\n            ))\n            num_in_chs = modules[-1].get_num_in_chs()\n\n        return SequentialList(*modules), num_in_chs\n\n    @torch.jit.ignore\n    def init_weights(self):\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(\n                    m.weight, mode='fan_out', nonlinearity='relu')\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.constant_(m.weight, 1)\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^conv[12]|bn[12]',\n            block_types=r'^(?:layer|stage|transition)(\\d+)' if coarse else [\n                (r'^layer(\\d+)\\.(\\d+)', None),\n                (r'^stage(\\d+)\\.(\\d+)', None),\n                (r'^transition(\\d+)', (99999,)),\n            ],\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, \"gradient checkpointing not supported\"\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.classifier = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def stages(self, x) -> List[torch.Tensor]:\n        x = self.layer1(x)\n\n        xl = [t(x) for i, t in enumerate(self.transition1)]\n        yl = self.stage2(xl)\n\n        xl = [t(yl[-1]) if not isinstance(t, nn.Identity) else yl[i] for i, t in enumerate(self.transition2)]\n        yl = self.stage3(xl)\n\n        xl = [t(yl[-1]) if not isinstance(t, nn.Identity) else yl[i] for i, t in enumerate(self.transition3)]\n        yl = self.stage4(xl)\n        return yl\n\n    def forward_features(self, x):\n        # Stem\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        x = self.conv2(x)\n        x = self.bn2(x)\n        x = self.act2(x)\n\n        # Stages\n        yl = self.stages(x)\n        if self.incre_modules is None or self.downsamp_modules is None:\n            return yl\n\n        y = None\n        for i, incre in enumerate(self.incre_modules):\n            if y is None:\n                y = incre(yl[i])\n            else:\n                down = self.downsamp_modules[i - 1]\n                y = incre(yl[i]) + down.forward(y)\n\n        y = self.final_layer(y)\n        return y\n\n    def forward_head(self, x, pre_logits: bool = False):\n        # Classification Head\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x):\n        y = self.forward_features(x)\n        x = self.forward_head(y)\n        return x\n\n\nclass HighResolutionNetFeatures(HighResolutionNet):\n    \"\"\"HighResolutionNet feature extraction\n\n    The design of HRNet makes it easy to grab feature maps, this class provides a simple wrapper to do so.\n    It would be more complicated to use the FeatureNet helpers.\n\n    The `feature_location=incre` allows grabbing increased channel count features using part of the\n    classification head. If `feature_location=''` the default HRNet features are returned. First stem\n    conv is used for stride 2 features.\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg,\n            in_chans=3,\n            num_classes=1000,\n            output_stride=32,\n            global_pool='avg',\n            drop_rate=0.0,\n            feature_location='incre',\n            out_indices=(0, 1, 2, 3, 4),\n            **kwargs,\n    ):\n        assert feature_location in ('incre', '')\n        super().__init__(\n            cfg,\n            in_chans=in_chans,\n            num_classes=num_classes,\n            output_stride=output_stride,\n            global_pool=global_pool,\n            drop_rate=drop_rate,\n            head=feature_location,\n            **kwargs,\n        )\n        self.feature_info = FeatureInfo(self.feature_info, out_indices)\n        self._out_idx = {f['index'] for f in self.feature_info.get_dicts()}\n\n    def forward_features(self, x):\n        assert False, 'Not supported'\n\n    def forward(self, x) -> List[torch.Tensor]:\n        out = []\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if 0 in self._out_idx:\n            out.append(x)\n        x = self.conv2(x)\n        x = self.bn2(x)\n        x = self.act2(x)\n        x = self.stages(x)\n        if self.incre_modules is not None:\n            x = [incre(f) for f, incre in zip(x, self.incre_modules)]\n        for i, f in enumerate(x):\n            if i + 1 in self._out_idx:\n                out.append(f)\n        return out\n\n\ndef _create_hrnet(variant, pretrained=False, cfg_variant=None, **model_kwargs):\n    model_cls = HighResolutionNet\n    features_only = False\n    kwargs_filter = None\n    if model_kwargs.pop('features_only', False):\n        model_cls = HighResolutionNetFeatures\n        kwargs_filter = ('num_classes', 'global_pool')\n        features_only = True\n    cfg_variant = cfg_variant or variant\n\n    pretrained_strict = model_kwargs.pop(\n        'pretrained_strict',\n        not features_only and model_kwargs.get('head', 'classification') == 'classification'\n    )\n    model = build_model_with_cfg(\n        model_cls,\n        variant,\n        pretrained,\n        model_cfg=cfg_cls[cfg_variant],\n        pretrained_strict=pretrained_strict,\n        kwargs_filter=kwargs_filter,\n        **model_kwargs,\n    )\n    if features_only:\n        model.pretrained_cfg = pretrained_cfg_for_features(model.default_cfg)\n        model.default_cfg = model.pretrained_cfg  # backwards compat\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv1', 'classifier': 'classifier',\n        'license': 'mit',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'hrnet_w18_small.gluon_in1k': _cfg(hf_hub_id='timm/', interpolation='bicubic', license='apache-2.0'),\n    'hrnet_w18_small.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w18_small_v2.gluon_in1k': _cfg(hf_hub_id='timm/', interpolation='bicubic', license='apache-2.0'),\n    'hrnet_w18_small_v2.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w18.ms_aug_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95,\n    ),\n    'hrnet_w18.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w30.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w32.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w40.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w44.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w48.ms_in1k': _cfg(hf_hub_id='timm/'),\n    'hrnet_w64.ms_in1k': _cfg(hf_hub_id='timm/'),\n\n    'hrnet_w18_ssld.paddle_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288)\n    ),\n    'hrnet_w48_ssld.paddle_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288)\n    ),\n})\n\n\n@register_model\ndef hrnet_w18_small(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w18_small', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w18_small_v2(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w18_small_v2', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w18(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w18', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w30(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w30', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w32(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w32', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w40(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w40', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w44(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w44', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w48(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w48', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w64(pretrained=False, **kwargs) -> HighResolutionNet:\n    return _create_hrnet('hrnet_w64', pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w18_ssld(pretrained=False, **kwargs) -> HighResolutionNet:\n    kwargs.setdefault('head_conv_bias', False)\n    return _create_hrnet('hrnet_w18_ssld', cfg_variant='hrnet_w18', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef hrnet_w48_ssld(pretrained=False, **kwargs) -> HighResolutionNet:\n    kwargs.setdefault('head_conv_bias', False)\n    return _create_hrnet('hrnet_w48_ssld', cfg_variant='hrnet_w48', pretrained=pretrained, **kwargs)\n\n"
  },
  {
    "path": "timm/models/hub.py",
    "content": "from ._hub import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/inception_next.py",
    "content": "\"\"\"\nInceptionNeXt paper: https://arxiv.org/abs/2303.16900\nOriginal implementation & weights from: https://github.com/sail-sg/inceptionnext\n\"\"\"\n\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import trunc_normal_, DropPath, calculate_drop_path_rates, to_2tuple, get_padding, SelectAdaptivePool2d\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['MetaNeXt']\n\n\nclass InceptionDWConv2d(nn.Module):\n    \"\"\" Inception depthwise convolution\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            square_kernel_size: int = 3,\n            band_kernel_size: int = 11,\n            branch_ratio: float = 0.125,\n            dilation: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        gc = int(in_chs * branch_ratio)  # channel numbers of a convolution branch\n        square_padding = get_padding(square_kernel_size, dilation=dilation)\n        band_padding = get_padding(band_kernel_size, dilation=dilation)\n        self.dwconv_hw = nn.Conv2d(\n            gc, gc, square_kernel_size,\n            padding=square_padding, dilation=dilation, groups=gc, **dd)\n        self.dwconv_w = nn.Conv2d(\n            gc, gc, (1, band_kernel_size),\n            padding=(0, band_padding), dilation=(1, dilation), groups=gc, **dd)\n        self.dwconv_h = nn.Conv2d(\n            gc, gc, (band_kernel_size, 1),\n            padding=(band_padding, 0), dilation=(dilation, 1), groups=gc, **dd)\n        self.split_indexes = (in_chs - 3 * gc, gc, gc, gc)\n\n    def forward(self, x):\n        x_id, x_hw, x_w, x_h = torch.split(x, self.split_indexes, dim=1)\n        return torch.cat((\n            x_id,\n            self.dwconv_hw(x_hw),\n            self.dwconv_w(x_w),\n            self.dwconv_h(x_h)\n            ), dim=1,\n        )\n\n\nclass ConvMlp(nn.Module):\n    \"\"\" MLP using 1x1 convs that keeps spatial dims\n    copied from timm: https://github.com/huggingface/pytorch-image-models/blob/v0.6.11/timm/models/layers/mlp.py\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            bias: bool = True,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        bias = to_2tuple(bias)\n\n        self.fc1 = nn.Conv2d(in_features, hidden_features, kernel_size=1, bias=bias[0], **dd)\n        self.norm = norm_layer(hidden_features, **dd) if norm_layer else nn.Identity()\n        self.act = act_layer()\n        self.drop = nn.Dropout(drop)\n        self.fc2 = nn.Conv2d(hidden_features, out_features, kernel_size=1, bias=bias[1], **dd)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.norm(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        return x\n\n\nclass MlpClassifierHead(nn.Module):\n    \"\"\" MLP classification head\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int = 1000,\n            pool_type: str = 'avg',\n            mlp_ratio: float = 3,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            drop: float = 0.,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.use_conv = False\n        self.in_features = in_features\n        self.num_features = hidden_features = int(mlp_ratio * in_features)\n\n        assert pool_type, 'Cannot disable pooling'\n        self.global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True)\n\n        self.fc1 = nn.Linear(in_features * self.global_pool.feat_mult(), hidden_features, bias=bias, **dd)\n        self.act = act_layer()\n        self.norm = norm_layer(hidden_features, **dd)\n        self.fc2 = nn.Linear(hidden_features, num_classes, bias=bias, **dd)\n        self.drop = nn.Dropout(drop)\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None):\n        if pool_type is not None:\n            assert pool_type, 'Cannot disable pooling'\n            self.global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=True)\n\n        self.fc2 = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.norm(x)\n        x = self.drop(x)\n        return x if pre_logits else self.fc2(x)\n\n\nclass MetaNeXtBlock(nn.Module):\n    \"\"\" MetaNeXtBlock Block\n    Args:\n        dim (int): Number of input channels.\n        drop_path (float): Stochastic depth rate. Default: 0.0\n        ls_init_value (float): Init value for Layer Scale. Default: 1e-6.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dilation: int = 1,\n            token_mixer: Type[nn.Module] = InceptionDWConv2d,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            mlp_layer: Type[nn.Module] = ConvMlp,\n            mlp_ratio: float = 4,\n            act_layer: Type[nn.Module] = nn.GELU,\n            ls_init_value: float = 1e-6,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.token_mixer = token_mixer(dim, dilation=dilation, **dd)\n        self.norm = norm_layer(dim, **dd)\n        self.mlp = mlp_layer(dim, int(mlp_ratio * dim), act_layer=act_layer, **dd)\n        self.gamma = nn.Parameter(ls_init_value * torch.ones(dim, **dd)) if ls_init_value else None\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.token_mixer(x)\n        x = self.norm(x)\n        x = self.mlp(x)\n        if self.gamma is not None:\n            x = x.mul(self.gamma.reshape(1, -1, 1, 1))\n        x = self.drop_path(x) + shortcut\n        return x\n\n\nclass MetaNeXtStage(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 2,\n            depth: int = 2,\n            dilation: Tuple[int, int] = (1, 1),\n            drop_path_rates: Optional[List[float]] = None,\n            ls_init_value: float = 1.0,\n            token_mixer: Type[nn.Module] = InceptionDWConv2d,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            mlp_ratio: float = 4,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        if stride > 1 or dilation[0] != dilation[1]:\n            self.downsample = nn.Sequential(\n                norm_layer(in_chs, **dd),\n                nn.Conv2d(\n                    in_chs,\n                    out_chs,\n                    kernel_size=2,\n                    stride=stride,\n                    dilation=dilation[0],\n                    **dd,\n                ),\n            )\n        else:\n            self.downsample = nn.Identity()\n\n        drop_path_rates = drop_path_rates or [0.] * depth\n        stage_blocks = []\n        for i in range(depth):\n            stage_blocks.append(MetaNeXtBlock(\n                dim=out_chs,\n                dilation=dilation[1],\n                drop_path=drop_path_rates[i],\n                ls_init_value=ls_init_value,\n                token_mixer=token_mixer,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                mlp_ratio=mlp_ratio,\n                **dd,\n            ))\n        self.blocks = nn.Sequential(*stage_blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass MetaNeXt(nn.Module):\n    r\"\"\" MetaNeXt\n        A PyTorch impl of : `InceptionNeXt: When Inception Meets ConvNeXt` - https://arxiv.org/abs/2303.16900\n\n    Args:\n        in_chans (int): Number of input image channels. Default: 3\n        num_classes (int): Number of classes for classification head. Default: 1000\n        depths (tuple(int)): Number of blocks at each stage. Default: (3, 3, 9, 3)\n        dims (tuple(int)): Feature dimension at each stage. Default: (96, 192, 384, 768)\n        token_mixers: Token mixer function. Default: nn.Identity\n        norm_layer: Normalization layer. Default: nn.BatchNorm2d\n        act_layer: Activation function for MLP. Default: nn.GELU\n        mlp_ratios (int or tuple(int)): MLP ratios. Default: (4, 4, 4, 3)\n        drop_rate (float): Head dropout rate\n        drop_path_rate (float): Stochastic depth rate. Default: 0.\n        ls_init_value (float): Init value for Layer Scale. Default: 1e-6.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            depths: Tuple[int, ...] = (3, 3, 9, 3),\n            dims: Tuple[int, ...] = (96, 192, 384, 768),\n            token_mixers: Union[Type[nn.Module], List[Type[nn.Module]]] = InceptionDWConv2d,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.GELU,\n            mlp_ratios: Union[int, Tuple[int, ...]] = (4, 4, 4, 3),\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            ls_init_value: float = 1e-6,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        num_stage = len(depths)\n        if not isinstance(token_mixers, (list, tuple)):\n            token_mixers = [token_mixers] * num_stage\n        if not isinstance(mlp_ratios, (list, tuple)):\n            mlp_ratios = [mlp_ratios] * num_stage\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.drop_rate = drop_rate\n        self.feature_info = []\n\n        self.stem = nn.Sequential(\n            nn.Conv2d(in_chans, dims[0], kernel_size=4, stride=4, **dd),\n            norm_layer(dims[0], **dd)\n        )\n\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        prev_chs = dims[0]\n        curr_stride = 4\n        dilation = 1\n        # feature resolution stages, each consisting of multiple residual blocks\n        self.stages = nn.Sequential()\n        for i in range(num_stage):\n            stride = 2 if curr_stride == 2 or i > 0 else 1\n            if curr_stride >= output_stride and stride > 1:\n                dilation *= stride\n                stride = 1\n            curr_stride *= stride\n            first_dilation = 1 if dilation in (1, 2) else 2\n            out_chs = dims[i]\n            self.stages.append(MetaNeXtStage(\n                prev_chs,\n                out_chs,\n                stride=stride if i > 0 else 1,\n                dilation=(first_dilation, dilation),\n                depth=depths[i],\n                drop_path_rates=dp_rates[i],\n                ls_init_value=ls_init_value,\n                act_layer=act_layer,\n                token_mixer=token_mixers[i],\n                norm_layer=norm_layer,\n                mlp_ratio=mlp_ratios[i],\n                **dd,\n            ))\n            prev_chs = out_chs\n            self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{i}')]\n        self.num_features = prev_chs\n        self.head = MlpClassifierHead(self.num_features, num_classes, pool_type=self.global_pool, drop=drop_rate, **dd)\n        self.head_hidden_size = self.head.num_features\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, (nn.Conv2d, nn.Linear)):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.downsample', (0,)),  # blocks\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n            ]\n        )\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc2\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return set()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, 'avg')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'head.fc2',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'inception_next_atto.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/sail-sg/inceptionnext/releases/download/model/inceptionnext_atto.pth',\n    ),\n    'inception_next_tiny.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/sail-sg/inceptionnext/releases/download/model/inceptionnext_tiny.pth',\n    ),\n    'inception_next_small.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/sail-sg/inceptionnext/releases/download/model/inceptionnext_small.pth',\n    ),\n    'inception_next_base.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/sail-sg/inceptionnext/releases/download/model/inceptionnext_base.pth',\n        crop_pct=0.95,\n    ),\n    'inception_next_base.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/sail-sg/inceptionnext/releases/download/model/inceptionnext_base_384.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n})\n\n\ndef _create_inception_next(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        MetaNeXt, variant, pretrained,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef inception_next_atto(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(2, 2, 6, 2), dims=(40, 80, 160, 320),\n        token_mixers=partial(InceptionDWConv2d, band_kernel_size=9, branch_ratio=0.25)\n    )\n    return _create_inception_next('inception_next_atto', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef inception_next_tiny(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 3, 9, 3), dims=(96, 192, 384, 768),\n        token_mixers=InceptionDWConv2d,\n    )\n    return _create_inception_next('inception_next_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef inception_next_small(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 3, 27, 3), dims=(96, 192, 384, 768),\n        token_mixers=InceptionDWConv2d,\n    )\n    return _create_inception_next('inception_next_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef inception_next_base(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 3, 27, 3), dims=(128, 256, 512, 1024),\n        token_mixers=InceptionDWConv2d,\n    )\n    return _create_inception_next('inception_next_base', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/inception_resnet_v2.py",
    "content": "\"\"\" Pytorch Inception-Resnet-V2 implementation\nSourced from https://github.com/Cadene/tensorflow-model-zoo.torch (MIT License) which is\nbased upon Google's Tensorflow implementation and pretrained weights (Apache 2.0 License)\n\"\"\"\nfrom functools import partial\nfrom typing import Type, Optional\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import create_classifier, ConvNormAct\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import flatten_modules\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['InceptionResnetV2']\n\n\nclass Mixed_5b(nn.Module):\n    def __init__(\n            self,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = conv_block(192, 96, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(192, 48, kernel_size=1, stride=1, **dd),\n            conv_block(48, 64, kernel_size=5, stride=1, padding=2, **dd)\n        )\n\n        self.branch2 = nn.Sequential(\n            conv_block(192, 64, kernel_size=1, stride=1, **dd),\n            conv_block(64, 96, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(96, 96, kernel_size=3, stride=1, padding=1, **dd)\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            conv_block(192, 64, kernel_size=1, stride=1, **dd)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Block35(nn.Module):\n    def __init__(\n            self,\n            scale: float = 1.0,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = scale\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = conv_block(320, 32, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(320, 32, kernel_size=1, stride=1, **dd),\n            conv_block(32, 32, kernel_size=3, stride=1, padding=1, **dd)\n        )\n\n        self.branch2 = nn.Sequential(\n            conv_block(320, 32, kernel_size=1, stride=1, **dd),\n            conv_block(32, 48, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(48, 64, kernel_size=3, stride=1, padding=1, **dd)\n        )\n\n        self.conv2d = nn.Conv2d(128, 320, kernel_size=1, stride=1, **dd)\n        self.act = nn.ReLU()\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        out = self.act(out)\n        return out\n\n\nclass Mixed_6a(nn.Module):\n    def __init__(\n            self,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = conv_block(320, 384, kernel_size=3, stride=2, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(320, 256, kernel_size=1, stride=1, **dd),\n            conv_block(256, 256, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(256, 384, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass Block17(nn.Module):\n    def __init__(\n            self,\n            scale: float = 1.0,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = scale\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = conv_block(1088, 192, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(1088, 128, kernel_size=1, stride=1, **dd),\n            conv_block(128, 160, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd),\n            conv_block(160, 192, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd)\n        )\n\n        self.conv2d = nn.Conv2d(384, 1088, kernel_size=1, stride=1, **dd)\n        self.act = nn.ReLU()\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        out = self.act(out)\n        return out\n\n\nclass Mixed_7a(nn.Module):\n    def __init__(\n            self,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = nn.Sequential(\n            conv_block(1088, 256, kernel_size=1, stride=1, **dd),\n            conv_block(256, 384, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch1 = nn.Sequential(\n            conv_block(1088, 256, kernel_size=1, stride=1, **dd),\n            conv_block(256, 288, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch2 = nn.Sequential(\n            conv_block(1088, 256, kernel_size=1, stride=1, **dd),\n            conv_block(256, 288, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(288, 320, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch3 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Block8(nn.Module):\n\n    def __init__(\n            self,\n            scale: float = 1.0,\n            no_relu: bool = False,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = scale\n        conv_block = conv_block or ConvNormAct\n\n        self.branch0 = conv_block(2080, 192, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(2080, 192, kernel_size=1, stride=1, **dd),\n            conv_block(192, 224, kernel_size=(1, 3), stride=1, padding=(0, 1), **dd),\n            conv_block(224, 256, kernel_size=(3, 1), stride=1, padding=(1, 0), **dd)\n        )\n\n        self.conv2d = nn.Conv2d(448, 2080, kernel_size=1, stride=1, **dd)\n        self.relu = None if no_relu else nn.ReLU()\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        if self.relu is not None:\n            out = self.relu(out)\n        return out\n\n\nclass InceptionResnetV2(nn.Module):\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            drop_rate: float = 0.,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            norm_layer: str = 'batchnorm2d',\n            norm_eps: float = 1e-3,\n            act_layer: str = 'relu',\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = 1536\n        assert output_stride == 32\n        conv_block = partial(\n            ConvNormAct,\n            padding=0,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            norm_kwargs=dict(eps=norm_eps),\n            act_kwargs=dict(inplace=True),\n        )\n\n        self.conv2d_1a = conv_block(in_chans, 32, kernel_size=3, stride=2, **dd)\n        self.conv2d_2a = conv_block(32, 32, kernel_size=3, stride=1, **dd)\n        self.conv2d_2b = conv_block(32, 64, kernel_size=3, stride=1, padding=1, **dd)\n        self.feature_info = [dict(num_chs=64, reduction=2, module='conv2d_2b')]\n\n        self.maxpool_3a = nn.MaxPool2d(3, stride=2)\n        self.conv2d_3b = conv_block(64, 80, kernel_size=1, stride=1, **dd)\n        self.conv2d_4a = conv_block(80, 192, kernel_size=3, stride=1, **dd)\n        self.feature_info += [dict(num_chs=192, reduction=4, module='conv2d_4a')]\n\n        self.maxpool_5a = nn.MaxPool2d(3, stride=2)\n        self.mixed_5b = Mixed_5b(conv_block=conv_block, **dd)\n        self.repeat = nn.Sequential(*[Block35(scale=0.17, conv_block=conv_block, **dd) for _ in range(10)])\n        self.feature_info += [dict(num_chs=320, reduction=8, module='repeat')]\n\n        self.mixed_6a = Mixed_6a(conv_block=conv_block, **dd)\n        self.repeat_1 = nn.Sequential(*[Block17(scale=0.10, conv_block=conv_block, **dd) for _ in range(20)])\n        self.feature_info += [dict(num_chs=1088, reduction=16, module='repeat_1')]\n\n        self.mixed_7a = Mixed_7a(conv_block=conv_block, **dd)\n        self.repeat_2 = nn.Sequential(*[Block8(scale=0.20, conv_block=conv_block, **dd) for _ in range(9)])\n\n        self.block8 = Block8(no_relu=True, conv_block=conv_block, **dd)\n        self.conv2d_7b = conv_block(2080, self.num_features, kernel_size=1, stride=1, **dd)\n        self.feature_info += [dict(num_chs=self.num_features, reduction=32, module='conv2d_7b')]\n\n        self.global_pool, self.head_drop, self.classif = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        module_map = {k: i for i, (k, _) in enumerate(flatten_modules(self.named_children(), prefix=()))}\n        module_map.pop(('classif',))\n\n        def _matcher(name):\n            if any([name.startswith(n) for n in ('conv2d_1', 'conv2d_2')]):\n                return 0\n            elif any([name.startswith(n) for n in ('conv2d_3', 'conv2d_4')]):\n                return 1\n            elif any([name.startswith(n) for n in ('block8', 'conv2d_7')]):\n                return len(module_map) + 1\n            else:\n                for k in module_map.keys():\n                    if k == tuple(name.split('.')[:len(k)]):\n                        return module_map[k]\n                return float('inf')\n        return _matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, \"checkpointing not supported\"\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classif\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.classif = create_classifier(self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x = self.conv2d_1a(x)\n        x = self.conv2d_2a(x)\n        x = self.conv2d_2b(x)\n        x = self.maxpool_3a(x)\n        x = self.conv2d_3b(x)\n        x = self.conv2d_4a(x)\n        x = self.maxpool_5a(x)\n        x = self.mixed_5b(x)\n        x = self.repeat(x)\n        x = self.mixed_6a(x)\n        x = self.repeat_1(x)\n        x = self.mixed_7a(x)\n        x = self.repeat_2(x)\n        x = self.block8(x)\n        x = self.conv2d_7b(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.classif(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_inception_resnet_v2(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(InceptionResnetV2, variant, pretrained, **kwargs)\n\n\ndefault_cfgs = generate_default_cfgs({\n    # ported from http://download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz\n    'inception_resnet_v2.tf_in1k': {\n        'hf_hub_id': 'timm/',\n        'num_classes': 1000, 'input_size': (3, 299, 299), 'pool_size': (8, 8),\n        'crop_pct': 0.8975, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'conv2d_1a.conv', 'classifier': 'classif',\n        'license': 'apache-2.0',\n    },\n    # As per https://arxiv.org/abs/1705.07204 and\n    # ported from http://download.tensorflow.org/models/ens_adv_inception_resnet_v2_2017_08_18.tar.gz\n    'inception_resnet_v2.tf_ens_adv_in1k': {\n        'hf_hub_id': 'timm/',\n        'num_classes': 1000, 'input_size': (3, 299, 299), 'pool_size': (8, 8),\n        'crop_pct': 0.8975, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'conv2d_1a.conv', 'classifier': 'classif',\n        'license': 'apache-2.0',\n    }\n})\n\n\n@register_model\ndef inception_resnet_v2(pretrained=False, **kwargs) -> InceptionResnetV2:\n    return _create_inception_resnet_v2('inception_resnet_v2', pretrained=pretrained, **kwargs)\n\n\nregister_model_deprecations(__name__, {\n    'ens_adv_inception_resnet_v2': 'inception_resnet_v2.tf_ens_adv_in1k',\n})"
  },
  {
    "path": "timm/models/inception_v3.py",
    "content": "\"\"\" Inception-V3\n\nOriginally from torchvision Inception3 model\nLicensed BSD-Clause 3 https://github.com/pytorch/vision/blob/master/LICENSE\n\"\"\"\nfrom functools import partial\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import trunc_normal_, create_classifier, Linear, ConvNormAct\nfrom ._builder import build_model_with_cfg\nfrom ._builder import resolve_pretrained_cfg\nfrom ._manipulate import flatten_modules\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['InceptionV3']  # model_registry will add each entrypoint fn to this\n\n\nclass InceptionA(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            pool_features: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.branch1x1 = conv_block(in_channels, 64, kernel_size=1, **dd)\n\n        self.branch5x5_1 = conv_block(in_channels, 48, kernel_size=1, **dd)\n        self.branch5x5_2 = conv_block(48, 64, kernel_size=5, padding=2, **dd)\n\n        self.branch3x3dbl_1 = conv_block(in_channels, 64, kernel_size=1, **dd)\n        self.branch3x3dbl_2 = conv_block(64, 96, kernel_size=3, padding=1, **dd)\n        self.branch3x3dbl_3 = conv_block(96, 96, kernel_size=3, padding=1, **dd)\n\n        self.branch_pool = conv_block(in_channels, pool_features, kernel_size=1, **dd)\n\n    def _forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch5x5 = self.branch5x5_1(x)\n        branch5x5 = self.branch5x5_2(branch5x5)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)\n\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]\n        return outputs\n\n    def forward(self, x):\n        outputs = self._forward(x)\n        return torch.cat(outputs, 1)\n\n\nclass InceptionB(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.branch3x3 = conv_block(in_channels, 384, kernel_size=3, stride=2, **dd)\n\n        self.branch3x3dbl_1 = conv_block(in_channels, 64, kernel_size=1, **dd)\n        self.branch3x3dbl_2 = conv_block(64, 96, kernel_size=3, padding=1, **dd)\n        self.branch3x3dbl_3 = conv_block(96, 96, kernel_size=3, stride=2, **dd)\n\n    def _forward(self, x):\n        branch3x3 = self.branch3x3(x)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)\n\n        branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)\n\n        outputs = [branch3x3, branch3x3dbl, branch_pool]\n        return outputs\n\n    def forward(self, x):\n        outputs = self._forward(x)\n        return torch.cat(outputs, 1)\n\n\nclass InceptionC(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            channels_7x7: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.branch1x1 = conv_block(in_channels, 192, kernel_size=1, **dd)\n\n        c7 = channels_7x7\n        self.branch7x7_1 = conv_block(in_channels, c7, kernel_size=1, **dd)\n        self.branch7x7_2 = conv_block(c7, c7, kernel_size=(1, 7), padding=(0, 3), **dd)\n        self.branch7x7_3 = conv_block(c7, 192, kernel_size=(7, 1), padding=(3, 0), **dd)\n\n        self.branch7x7dbl_1 = conv_block(in_channels, c7, kernel_size=1, **dd)\n        self.branch7x7dbl_2 = conv_block(c7, c7, kernel_size=(7, 1), padding=(3, 0), **dd)\n        self.branch7x7dbl_3 = conv_block(c7, c7, kernel_size=(1, 7), padding=(0, 3), **dd)\n        self.branch7x7dbl_4 = conv_block(c7, c7, kernel_size=(7, 1), padding=(3, 0), **dd)\n        self.branch7x7dbl_5 = conv_block(c7, 192, kernel_size=(1, 7), padding=(0, 3), **dd)\n\n        self.branch_pool = conv_block(in_channels, 192, kernel_size=1, **dd)\n\n    def _forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch7x7 = self.branch7x7_1(x)\n        branch7x7 = self.branch7x7_2(branch7x7)\n        branch7x7 = self.branch7x7_3(branch7x7)\n\n        branch7x7dbl = self.branch7x7dbl_1(x)\n        branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)\n\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]\n        return outputs\n\n    def forward(self, x):\n        outputs = self._forward(x)\n        return torch.cat(outputs, 1)\n\n\nclass InceptionD(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.branch3x3_1 = conv_block(in_channels, 192, kernel_size=1, **dd)\n        self.branch3x3_2 = conv_block(192, 320, kernel_size=3, stride=2, **dd)\n\n        self.branch7x7x3_1 = conv_block(in_channels, 192, kernel_size=1, **dd)\n        self.branch7x7x3_2 = conv_block(192, 192, kernel_size=(1, 7), padding=(0, 3), **dd)\n        self.branch7x7x3_3 = conv_block(192, 192, kernel_size=(7, 1), padding=(3, 0), **dd)\n        self.branch7x7x3_4 = conv_block(192, 192, kernel_size=3, stride=2, **dd)\n\n    def _forward(self, x):\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = self.branch3x3_2(branch3x3)\n\n        branch7x7x3 = self.branch7x7x3_1(x)\n        branch7x7x3 = self.branch7x7x3_2(branch7x7x3)\n        branch7x7x3 = self.branch7x7x3_3(branch7x7x3)\n        branch7x7x3 = self.branch7x7x3_4(branch7x7x3)\n\n        branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)\n        outputs = [branch3x3, branch7x7x3, branch_pool]\n        return outputs\n\n    def forward(self, x):\n        outputs = self._forward(x)\n        return torch.cat(outputs, 1)\n\n\nclass InceptionE(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.branch1x1 = conv_block(in_channels, 320, kernel_size=1, **dd)\n\n        self.branch3x3_1 = conv_block(in_channels, 384, kernel_size=1, **dd)\n        self.branch3x3_2a = conv_block(384, 384, kernel_size=(1, 3), padding=(0, 1), **dd)\n        self.branch3x3_2b = conv_block(384, 384, kernel_size=(3, 1), padding=(1, 0), **dd)\n\n        self.branch3x3dbl_1 = conv_block(in_channels, 448, kernel_size=1, **dd)\n        self.branch3x3dbl_2 = conv_block(448, 384, kernel_size=3, padding=1, **dd)\n        self.branch3x3dbl_3a = conv_block(384, 384, kernel_size=(1, 3), padding=(0, 1), **dd)\n        self.branch3x3dbl_3b = conv_block(384, 384, kernel_size=(3, 1), padding=(1, 0), **dd)\n\n        self.branch_pool = conv_block(in_channels, 192, kernel_size=1, **dd)\n\n    def _forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return outputs\n\n    def forward(self, x):\n        outputs = self._forward(x)\n        return torch.cat(outputs, 1)\n\n\nclass InceptionAux(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            num_classes: int,\n            conv_block: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        conv_block = conv_block or ConvNormAct\n        self.conv0 = conv_block(in_channels, 128, kernel_size=1, **dd)\n        self.conv1 = conv_block(128, 768, kernel_size=5, **dd)\n        self.conv1.stddev = 0.01\n        self.fc = Linear(768, num_classes, **dd)\n        self.fc.stddev = 0.001\n\n    def forward(self, x):\n        # N x 768 x 17 x 17\n        x = F.avg_pool2d(x, kernel_size=5, stride=3)\n        # N x 768 x 5 x 5\n        x = self.conv0(x)\n        # N x 128 x 5 x 5\n        x = self.conv1(x)\n        # N x 768 x 1 x 1\n        # Adaptive average pooling\n        x = F.adaptive_avg_pool2d(x, (1, 1))\n        # N x 768 x 1 x 1\n        x = torch.flatten(x, 1)\n        # N x 768\n        x = self.fc(x)\n        # N x 1000\n        return x\n\n\nclass InceptionV3(nn.Module):\n    \"\"\"Inception-V3\n    \"\"\"\n    aux_logits: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            drop_rate: float = 0.,\n            global_pool: str = 'avg',\n            aux_logits: bool = False,\n            norm_layer: str = 'batchnorm2d',\n            norm_eps: float = 1e-3,\n            act_layer: str = 'relu',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.aux_logits = aux_logits\n        conv_block = partial(\n            ConvNormAct,\n            padding=0,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            norm_kwargs=dict(eps=norm_eps),\n            act_kwargs=dict(inplace=True),\n        )\n\n        self.Conv2d_1a_3x3 = conv_block(in_chans, 32, kernel_size=3, stride=2, **dd)\n        self.Conv2d_2a_3x3 = conv_block(32, 32, kernel_size=3, **dd)\n        self.Conv2d_2b_3x3 = conv_block(32, 64, kernel_size=3, padding=1, **dd)\n        self.Pool1 = nn.MaxPool2d(kernel_size=3, stride=2)\n        self.Conv2d_3b_1x1 = conv_block(64, 80, kernel_size=1, **dd)\n        self.Conv2d_4a_3x3 = conv_block(80, 192, kernel_size=3, **dd)\n        self.Pool2 = nn.MaxPool2d(kernel_size=3, stride=2)\n        self.Mixed_5b = InceptionA(192, pool_features=32, conv_block=conv_block, **dd)\n        self.Mixed_5c = InceptionA(256, pool_features=64, conv_block=conv_block, **dd)\n        self.Mixed_5d = InceptionA(288, pool_features=64, conv_block=conv_block, **dd)\n        self.Mixed_6a = InceptionB(288, conv_block=conv_block, **dd)\n        self.Mixed_6b = InceptionC(768, channels_7x7=128, conv_block=conv_block, **dd)\n        self.Mixed_6c = InceptionC(768, channels_7x7=160, conv_block=conv_block, **dd)\n        self.Mixed_6d = InceptionC(768, channels_7x7=160, conv_block=conv_block, **dd)\n        self.Mixed_6e = InceptionC(768, channels_7x7=192, conv_block=conv_block, **dd)\n        if aux_logits:\n            self.AuxLogits = InceptionAux(768, num_classes, conv_block=conv_block, **dd)\n        else:\n            self.AuxLogits = None\n        self.Mixed_7a = InceptionD(768, conv_block=conv_block, **dd)\n        self.Mixed_7b = InceptionE(1280, conv_block=conv_block, **dd)\n        self.Mixed_7c = InceptionE(2048, conv_block=conv_block, **dd)\n        self.feature_info = [\n            dict(num_chs=64, reduction=2, module='Conv2d_2b_3x3'),\n            dict(num_chs=192, reduction=4, module='Conv2d_4a_3x3'),\n            dict(num_chs=288, reduction=8, module='Mixed_5d'),\n            dict(num_chs=768, reduction=16, module='Mixed_6e'),\n            dict(num_chs=2048, reduction=32, module='Mixed_7c'),\n        ]\n\n        self.num_features = self.head_hidden_size = 2048\n        self.global_pool, self.head_drop, self.fc = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):\n                stddev = m.stddev if hasattr(m, 'stddev') else 0.1\n                trunc_normal_(m.weight, std=stddev)\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.constant_(m.weight, 1)\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        module_map = {k: i for i, (k, _) in enumerate(flatten_modules(self.named_children(), prefix=()))}\n        module_map.pop(('fc',))\n\n        def _matcher(name):\n            if any([name.startswith(n) for n in ('Conv2d_1', 'Conv2d_2')]):\n                return 0\n            elif any([name.startswith(n) for n in ('Conv2d_3', 'Conv2d_4')]):\n                return 1\n            else:\n                for k in module_map.keys():\n                    if k == tuple(name.split('.')[:len(k)]):\n                        return module_map[k]\n                return float('inf')\n        return _matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.fc = create_classifier(self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_preaux(self, x):\n        x = self.Conv2d_1a_3x3(x)  # N x 32 x 149 x 149\n        x = self.Conv2d_2a_3x3(x)  # N x 32 x 147 x 147\n        x = self.Conv2d_2b_3x3(x)  # N x 64 x 147 x 147\n        x = self.Pool1(x)  # N x 64 x 73 x 73\n        x = self.Conv2d_3b_1x1(x)  # N x 80 x 73 x 73\n        x = self.Conv2d_4a_3x3(x)  # N x 192 x 71 x 71\n        x = self.Pool2(x)  # N x 192 x 35 x 35\n        x = self.Mixed_5b(x)  # N x 256 x 35 x 35\n        x = self.Mixed_5c(x)  # N x 288 x 35 x 35\n        x = self.Mixed_5d(x)  # N x 288 x 35 x 35\n        x = self.Mixed_6a(x)  # N x 768 x 17 x 17\n        x = self.Mixed_6b(x)  # N x 768 x 17 x 17\n        x = self.Mixed_6c(x)  # N x 768 x 17 x 17\n        x = self.Mixed_6d(x)  # N x 768 x 17 x 17\n        x = self.Mixed_6e(x)  # N x 768 x 17 x 17\n        return x\n\n    def forward_postaux(self, x):\n        x = self.Mixed_7a(x)  # N x 1280 x 8 x 8\n        x = self.Mixed_7b(x)  # N x 2048 x 8 x 8\n        x = self.Mixed_7c(x)  # N x 2048 x 8 x 8\n        return x\n\n    def forward_features(self, x):\n        x = self.forward_preaux(x)\n        if self.aux_logits:\n            aux = self.AuxLogits(x)\n            x = self.forward_postaux(x)\n            return x, aux\n        x = self.forward_postaux(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        if pre_logits:\n            return x\n        x = self.fc(x)\n        return x\n\n    def forward(self, x):\n        if self.aux_logits:\n            x, aux = self.forward_features(x)\n            x = self.forward_head(x)\n            return x, aux\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_inception_v3(variant, pretrained=False, **kwargs):\n    pretrained_cfg = resolve_pretrained_cfg(variant, pretrained_cfg=kwargs.pop('pretrained_cfg', None))\n    aux_logits = kwargs.get('aux_logits', False)\n    has_aux_logits = False\n    if pretrained_cfg:\n        # only torchvision pretrained weights have aux logits\n        has_aux_logits = pretrained_cfg.tag == 'tv_in1k'\n    if aux_logits:\n        assert not kwargs.pop('features_only', False)\n        load_strict = has_aux_logits\n    else:\n        load_strict = not has_aux_logits\n\n    return build_model_with_cfg(\n        InceptionV3,\n        variant,\n        pretrained,\n        pretrained_cfg=pretrained_cfg,\n        pretrained_strict=load_strict,\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 299, 299), 'pool_size': (8, 8),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'Conv2d_1a_3x3.conv', 'classifier': 'fc', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # original PyTorch weights, ported from Tensorflow but modified\n    'inception_v3.tv_in1k': _cfg(\n        # NOTE checkpoint has aux logit layer weights\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/inception_v3_google-1a9a5a14.pth'),\n    # my port of Tensorflow SLIM weights (http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz)\n    'inception_v3.tf_in1k': _cfg(hf_hub_id='timm/'),\n    # my port of Tensorflow adversarially trained Inception V3 from\n    # http://download.tensorflow.org/models/adv_inception_v3_2017_08_18.tar.gz\n    'inception_v3.tf_adv_in1k': _cfg(hf_hub_id='timm/'),\n    # from gluon pretrained models, best performing in terms of accuracy/loss metrics\n    # https://gluon-cv.mxnet.io/model_zoo/classification.html\n    'inception_v3.gluon_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN,  # also works well with inception defaults\n        std=IMAGENET_DEFAULT_STD,  # also works well with inception defaults\n    )\n})\n\n\n@register_model\ndef inception_v3(pretrained=False, **kwargs) -> InceptionV3:\n    model = _create_inception_v3('inception_v3', pretrained=pretrained, **kwargs)\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'tf_inception_v3': 'inception_v3.tf_in1k',\n    'adv_inception_v3': 'inception_v3.tf_adv_in1k',\n    'gluon_inception_v3': 'inception_v3.gluon_in1k',\n})\n"
  },
  {
    "path": "timm/models/inception_v4.py",
    "content": "\"\"\" Pytorch Inception-V4 implementation\nSourced from https://github.com/Cadene/tensorflow-model-zoo.torch (MIT License) which is\nbased upon Google's Tensorflow implementation and pretrained weights (Apache 2.0 License)\n\"\"\"\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import create_classifier, ConvNormAct\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['InceptionV4']\n\n\nclass Mixed3a(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.maxpool = nn.MaxPool2d(3, stride=2)\n        self.conv = conv_block(64, 96, kernel_size=3, stride=2, **dd)\n\n    def forward(self, x):\n        x0 = self.maxpool(x)\n        x1 = self.conv(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass Mixed4a(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.branch0 = nn.Sequential(\n            conv_block(160, 64, kernel_size=1, stride=1, **dd),\n            conv_block(64, 96, kernel_size=3, stride=1, **dd)\n        )\n\n        self.branch1 = nn.Sequential(\n            conv_block(160, 64, kernel_size=1, stride=1, **dd),\n            conv_block(64, 64, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd),\n            conv_block(64, 64, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd),\n            conv_block(64, 96, kernel_size=(3, 3), stride=1, **dd)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass Mixed5a(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = conv_block(192, 192, kernel_size=3, stride=2, **dd)\n        self.maxpool = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.conv(x)\n        x1 = self.maxpool(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass InceptionA(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.branch0 = conv_block(384, 96, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(384, 64, kernel_size=1, stride=1, **dd),\n            conv_block(64, 96, kernel_size=3, stride=1, padding=1, **dd)\n        )\n\n        self.branch2 = nn.Sequential(\n            conv_block(384, 64, kernel_size=1, stride=1, **dd),\n            conv_block(64, 96, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(96, 96, kernel_size=3, stride=1, padding=1, **dd)\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            conv_block(384, 96, kernel_size=1, stride=1, **dd)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass ReductionA(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.branch0 = conv_block(384, 384, kernel_size=3, stride=2, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(384, 192, kernel_size=1, stride=1, **dd),\n            conv_block(192, 224, kernel_size=3, stride=1, padding=1, **dd),\n            conv_block(224, 256, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass InceptionB(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.branch0 = conv_block(1024, 384, kernel_size=1, stride=1, **dd)\n\n        self.branch1 = nn.Sequential(\n            conv_block(1024, 192, kernel_size=1, stride=1, **dd),\n            conv_block(192, 224, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd),\n            conv_block(224, 256, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd)\n        )\n\n        self.branch2 = nn.Sequential(\n            conv_block(1024, 192, kernel_size=1, stride=1, **dd),\n            conv_block(192, 192, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd),\n            conv_block(192, 224, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd),\n            conv_block(224, 224, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd),\n            conv_block(224, 256, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd)\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            conv_block(1024, 128, kernel_size=1, stride=1, **dd)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass ReductionB(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.branch0 = nn.Sequential(\n            conv_block(1024, 192, kernel_size=1, stride=1, **dd),\n            conv_block(192, 192, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch1 = nn.Sequential(\n            conv_block(1024, 256, kernel_size=1, stride=1, **dd),\n            conv_block(256, 256, kernel_size=(1, 7), stride=1, padding=(0, 3), **dd),\n            conv_block(256, 320, kernel_size=(7, 1), stride=1, padding=(3, 0), **dd),\n            conv_block(320, 320, kernel_size=3, stride=2, **dd)\n        )\n\n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass InceptionC(nn.Module):\n    def __init__(\n            self,\n            conv_block: Type[nn.Module] = ConvNormAct,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.branch0 = conv_block(1536, 256, kernel_size=1, stride=1, **dd)\n\n        self.branch1_0 = conv_block(1536, 384, kernel_size=1, stride=1, **dd)\n        self.branch1_1a = conv_block(384, 256, kernel_size=(1, 3), stride=1, padding=(0, 1), **dd)\n        self.branch1_1b = conv_block(384, 256, kernel_size=(3, 1), stride=1, padding=(1, 0), **dd)\n\n        self.branch2_0 = conv_block(1536, 384, kernel_size=1, stride=1, **dd)\n        self.branch2_1 = conv_block(384, 448, kernel_size=(3, 1), stride=1, padding=(1, 0), **dd)\n        self.branch2_2 = conv_block(448, 512, kernel_size=(1, 3), stride=1, padding=(0, 1), **dd)\n        self.branch2_3a = conv_block(512, 256, kernel_size=(1, 3), stride=1, padding=(0, 1), **dd)\n        self.branch2_3b = conv_block(512, 256, kernel_size=(3, 1), stride=1, padding=(1, 0), **dd)\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            conv_block(1536, 256, kernel_size=1, stride=1, **dd)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n\n        x1_0 = self.branch1_0(x)\n        x1_1a = self.branch1_1a(x1_0)\n        x1_1b = self.branch1_1b(x1_0)\n        x1 = torch.cat((x1_1a, x1_1b), 1)\n\n        x2_0 = self.branch2_0(x)\n        x2_1 = self.branch2_1(x2_0)\n        x2_2 = self.branch2_2(x2_1)\n        x2_3a = self.branch2_3a(x2_2)\n        x2_3b = self.branch2_3b(x2_2)\n        x2 = torch.cat((x2_3a, x2_3b), 1)\n\n        x3 = self.branch3(x)\n\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass InceptionV4(nn.Module):\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            drop_rate: float = 0.,\n            global_pool: str = 'avg',\n            norm_layer: str = 'batchnorm2d',\n            norm_eps: float = 1e-3,\n            act_layer: str = 'relu',\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert output_stride == 32\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = 1536\n        conv_block = partial(\n            ConvNormAct,\n            padding=0,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            norm_kwargs=dict(eps=norm_eps),\n            act_kwargs=dict(inplace=True),\n        )\n\n        features = [\n            conv_block(in_chans, 32, kernel_size=3, stride=2, **dd),\n            conv_block(32, 32, kernel_size=3, stride=1, **dd),\n            conv_block(32, 64, kernel_size=3, stride=1, padding=1, **dd),\n            Mixed3a(conv_block, **dd),\n            Mixed4a(conv_block, **dd),\n            Mixed5a(conv_block, **dd),\n        ]\n        features += [InceptionA(conv_block, **dd) for _ in range(4)]\n        features += [ReductionA(conv_block, **dd)]  # Mixed6a\n        features += [InceptionB(conv_block, **dd) for _ in range(7)]\n        features += [ReductionB(conv_block, **dd)]  # Mixed7a\n        features += [InceptionC(conv_block, **dd) for _ in range(3)]\n        self.features = nn.Sequential(*features)\n        self.feature_info = [\n            dict(num_chs=64, reduction=2, module='features.2'),\n            dict(num_chs=160, reduction=4, module='features.3'),\n            dict(num_chs=384, reduction=8, module='features.9'),\n            dict(num_chs=1024, reduction=16, module='features.17'),\n            dict(num_chs=1536, reduction=32, module='features.21'),\n        ]\n        self.global_pool, self.head_drop, self.last_linear = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^features\\.[012]\\.',\n            blocks=r'^features\\.(\\d+)'\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.last_linear\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.last_linear = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i] for i in take_indices]\n        max_index = stage_ends[max_index]\n\n        # forward pass\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.features\n        else:\n            stages = self.features[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.features = self.features[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        return self.features(x)\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.last_linear(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_inception_v4(variant, pretrained=False, **kwargs) -> InceptionV4:\n    return build_model_with_cfg(\n        InceptionV4,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndefault_cfgs = generate_default_cfgs({\n    'inception_v4.tf_in1k': {\n        'hf_hub_id': 'timm/',\n        'num_classes': 1000, 'input_size': (3, 299, 299), 'pool_size': (8, 8),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'features.0.conv', 'classifier': 'last_linear',\n        'license': 'apache-2.0',\n    }\n})\n\n\n@register_model\ndef inception_v4(pretrained=False, **kwargs):\n    return _create_inception_v4('inception_v4', pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/layers/__init__.py",
    "content": "# NOTE timm.models.layers is DEPRECATED, please use timm.layers, this is here to reduce breakages in transition\nfrom timm.layers.activations import *\nfrom timm.layers.adaptive_avgmax_pool import \\\n    adaptive_avgmax_pool2d, select_adaptive_pool2d, AdaptiveAvgMaxPool2d, SelectAdaptivePool2d\nfrom timm.layers.attention_pool2d import AttentionPool2d, RotAttentionPool2d\nfrom timm.layers.blur_pool import BlurPool2d\nfrom timm.layers.classifier import ClassifierHead, create_classifier\nfrom timm.layers.cond_conv2d import CondConv2d, get_condconv_initializer\nfrom timm.layers.config import is_exportable, is_scriptable, is_no_jit, set_exportable, set_scriptable, set_no_jit,\\\n    set_layer_config\nfrom timm.layers.conv2d_same import Conv2dSame, conv2d_same\nfrom timm.layers.conv_bn_act import ConvNormAct, ConvNormActAa, ConvBnAct\nfrom timm.layers.create_act import create_act_layer, get_act_layer, get_act_fn\nfrom timm.layers.create_attn import get_attn, create_attn\nfrom timm.layers.create_conv2d import create_conv2d\nfrom timm.layers.create_norm import get_norm_layer, create_norm_layer\nfrom timm.layers.create_norm_act import get_norm_act_layer, create_norm_act_layer, get_norm_act_layer\nfrom timm.layers.drop import DropBlock2d, DropPath, drop_block_2d, drop_path\nfrom timm.layers.eca import EcaModule, CecaModule, EfficientChannelAttn, CircularEfficientChannelAttn\nfrom timm.layers.evo_norm import EvoNorm2dB0, EvoNorm2dB1, EvoNorm2dB2,\\\n    EvoNorm2dS0, EvoNorm2dS0a, EvoNorm2dS1, EvoNorm2dS1a, EvoNorm2dS2, EvoNorm2dS2a\nfrom timm.layers.fast_norm import is_fast_norm, set_fast_norm, fast_group_norm, fast_layer_norm\nfrom timm.layers.filter_response_norm import FilterResponseNormTlu2d, FilterResponseNormAct2d\nfrom timm.layers.gather_excite import GatherExcite\nfrom timm.layers.global_context import GlobalContext\nfrom timm.layers.helpers import to_ntuple, to_2tuple, to_3tuple, to_4tuple, make_divisible, extend_tuple\nfrom timm.layers.inplace_abn import InplaceAbn\nfrom timm.layers.linear import Linear\nfrom timm.layers.mixed_conv2d import MixedConv2d\nfrom timm.layers.mlp import Mlp, GluMlp, GatedMlp, ConvMlp\nfrom timm.layers.non_local_attn import NonLocalAttn, BatNonLocalAttn\nfrom timm.layers.norm import GroupNorm, GroupNorm1, LayerNorm, LayerNorm2d\nfrom timm.layers.norm_act import BatchNormAct2d, GroupNormAct, convert_sync_batchnorm\nfrom timm.layers.padding import get_padding, get_same_padding, pad_same\nfrom timm.layers.patch_embed import PatchEmbed\nfrom timm.layers.pool2d_same import AvgPool2dSame, create_pool2d\nfrom timm.layers.pos_embed_sincos import RotaryEmbedding\nfrom timm.layers.squeeze_excite import SEModule, SqueezeExcite, EffectiveSEModule, EffectiveSqueezeExcite\nfrom timm.layers.selective_kernel import SelectiveKernel\nfrom timm.layers.separable_conv import SeparableConv2d, SeparableConvNormAct\nfrom timm.layers.split_attn import SplitAttn\nfrom timm.layers.split_batchnorm import SplitBatchNorm2d, convert_splitbn_model\nfrom timm.layers.std_conv import StdConv2d, StdConv2dSame, ScaledStdConv2d, ScaledStdConv2dSame\nfrom timm.layers.test_time_pool import TestTimePoolHead, apply_test_time_pool\nfrom timm.layers.trace_utils import _assert, _float_to_int\nfrom timm.layers.weight_init import trunc_normal_, trunc_normal_tf_, variance_scaling_, lecun_normal_\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.layers\", FutureWarning)\n"
  },
  {
    "path": "timm/models/levit.py",
    "content": "\"\"\" LeViT\n\nPaper: `LeViT: a Vision Transformer in ConvNet's Clothing for Faster Inference`\n    - https://arxiv.org/abs/2104.01136\n\n@article{graham2021levit,\n  title={LeViT: a Vision Transformer in ConvNet's Clothing for Faster Inference},\n  author={Benjamin Graham and Alaaeldin El-Nouby and Hugo Touvron and Pierre Stock and Armand Joulin and Herv\\'e J\\'egou and Matthijs Douze},\n  journal={arXiv preprint arXiv:22104.01136},\n  year={2021}\n}\n\nAdapted from official impl at https://github.com/facebookresearch/LeViT, original copyright bellow.\n\nThis version combines both conv/linear models and fixes torchscript compatibility.\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n\n# Copyright (c) 2015-present, Facebook, Inc.\n# All rights reserved.\n\n# Modified from\n# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py\n# Copyright 2020 Ross Wightman, Apache-2.0 License\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_STD, IMAGENET_DEFAULT_MEAN\nfrom timm.layers import to_ntuple, to_2tuple, get_act_layer, DropPath, trunc_normal_, ndgrid\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['Levit']\n\n\nclass ConvNorm(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: int = 0,\n            dilation: int = 1,\n            groups: int = 1,\n            bn_weight_init: float = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.linear = nn.Conv2d(in_chs, out_chs, kernel_size, stride, padding, dilation, groups, bias=False, **dd)\n        self.bn = nn.BatchNorm2d(out_chs, **dd)\n\n        nn.init.constant_(self.bn.weight, bn_weight_init)\n\n    @torch.no_grad()\n    def fuse(self):\n        c, bn = self.linear, self.bn\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = c.weight * w[:, None, None, None]\n        b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5\n        m = nn.Conv2d(\n            w.size(1), w.size(0), w.shape[2:], stride=self.linear.stride,\n            padding=self.linear.padding, dilation=self.linear.dilation, groups=self.linear.groups)\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n    def forward(self, x):\n        return self.bn(self.linear(x))\n\n\nclass LinearNorm(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            out_features: int,\n            bn_weight_init: float = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.linear = nn.Linear(in_features, out_features, bias=False, **dd)\n        self.bn = nn.BatchNorm1d(out_features, **dd)\n\n        nn.init.constant_(self.bn.weight, bn_weight_init)\n\n    @torch.no_grad()\n    def fuse(self):\n        l, bn = self.linear, self.bn\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = l.weight * w[:, None]\n        b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5\n        m = nn.Linear(w.size(1), w.size(0))\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n    def forward(self, x):\n        x = self.linear(x)\n        return self.bn(x.flatten(0, 1)).reshape_as(x)\n\n\nclass NormLinear(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            out_features: int,\n            bias: bool = True,\n            std: float = 0.02,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.bn = nn.BatchNorm1d(in_features, **dd)\n        self.drop = nn.Dropout(drop)\n        self.linear = nn.Linear(in_features, out_features, bias=bias, **dd)\n\n        trunc_normal_(self.linear.weight, std=std)\n        if self.linear.bias is not None:\n            nn.init.constant_(self.linear.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self):\n        bn, l = self.bn, self.linear\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        b = bn.bias - self.bn.running_mean * self.bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = l.weight * w[None, :]\n        if l.bias is None:\n            b = b @ self.linear.weight.T\n        else:\n            b = (l.weight @ b[:, None]).view(-1) + self.linear.bias\n        m = nn.Linear(w.size(1), w.size(0))\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n    def forward(self, x):\n        return self.linear(self.drop(self.bn(x)))\n\n\nclass Stem8(nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = 8\n\n        self.add_module('conv1', ConvNorm(in_chs, out_chs // 4, 3, stride=2, padding=1, **dd))\n        self.add_module('act1', act_layer())\n        self.add_module('conv2', ConvNorm(out_chs // 4, out_chs // 2, 3, stride=2, padding=1, **dd))\n        self.add_module('act2', act_layer())\n        self.add_module('conv3', ConvNorm(out_chs // 2, out_chs, 3, stride=2, padding=1, **dd))\n\n\nclass Stem16(nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = 16\n\n        self.add_module('conv1', ConvNorm(in_chs, out_chs // 8, 3, stride=2, padding=1, **dd))\n        self.add_module('act1', act_layer())\n        self.add_module('conv2', ConvNorm(out_chs // 8, out_chs // 4, 3, stride=2, padding=1, **dd))\n        self.add_module('act2', act_layer())\n        self.add_module('conv3', ConvNorm(out_chs // 4, out_chs // 2, 3, stride=2, padding=1, **dd))\n        self.add_module('act3', act_layer())\n        self.add_module('conv4', ConvNorm(out_chs // 2, out_chs, 3, stride=2, padding=1, **dd))\n\n\nclass Downsample(nn.Module):\n    def __init__(\n            self,\n            stride: int,\n            resolution: Union[int, Tuple[int, int]],\n            use_pool: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        self.stride = stride\n        self.resolution = to_2tuple(resolution)\n        self.pool = nn.AvgPool2d(3, stride=stride, padding=1, count_include_pad=False) if use_pool else None\n\n    def forward(self, x):\n        B, N, C = x.shape\n        x = x.view(B, self.resolution[0], self.resolution[1], C)\n        if self.pool is not None:\n            x = self.pool(x.permute(0, 3, 1, 2)).permute(0, 2, 3, 1)\n        else:\n            x = x[:, ::self.stride, ::self.stride]\n        return x.reshape(B, -1, C)\n\n\nclass Attention(nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: float = 4.,\n            resolution: Union[int, Tuple[int, int]] = 14,\n            use_conv: bool = False,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        ln_layer = ConvNorm if use_conv else LinearNorm\n        resolution = to_2tuple(resolution)\n\n        self.use_conv = use_conv\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n        self.key_attn_dim = key_dim * num_heads\n        self.val_dim = int(attn_ratio * key_dim)\n        self.val_attn_dim = int(attn_ratio * key_dim) * num_heads\n        self.resolution = resolution\n\n        self.qkv = ln_layer(dim, self.val_attn_dim + self.key_attn_dim * 2, **dd)\n        self.proj = nn.Sequential(OrderedDict([\n            ('act', act_layer()),\n            ('ln', ln_layer(self.val_attn_dim, dim, bn_weight_init=0, **dd))\n        ]))\n\n        N = resolution[0] * resolution[1]\n        self.attention_biases = nn.Parameter(torch.empty(num_heads, N, **dd))\n        self.register_buffer(\n            'attention_bias_idxs', torch.empty((N, N), device=device, dtype=torch.long), persistent=False)\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _compute_attention_bias_idxs(self, device=None):\n        \"\"\"Compute relative position indices for attention bias.\"\"\"\n        pos = torch.stack(ndgrid(\n            torch.arange(self.resolution[0], device=device, dtype=torch.long),\n            torch.arange(self.resolution[1], device=device, dtype=torch.long),\n        )).flatten(1)\n        rel_pos = (pos[..., :, None] - pos[..., None, :]).abs()\n        rel_pos = (rel_pos[0] * self.resolution[1]) + rel_pos[1]\n        return rel_pos\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.attention_bias_idxs.copy_(\n            self._compute_attention_bias_idxs(device=self.attention_bias_idxs.device)\n        )\n        self.attention_bias_cache = {}\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):  # x (B,C,H,W)\n        if self.use_conv:\n            B, C, H, W = x.shape\n            q, k, v = self.qkv(x).view(\n                B, self.num_heads, -1, H * W).split([self.key_dim, self.key_dim, self.val_dim], dim=2)\n\n            attn = (q.transpose(-2, -1) @ k) * self.scale + self.get_attention_biases(x.device)\n            attn = attn.softmax(dim=-1)\n\n            x = (v @ attn.transpose(-2, -1)).view(B, -1, H, W)\n        else:\n            B, N, C = x.shape\n            q, k, v = self.qkv(x).view(\n                B, N, self.num_heads, -1).split([self.key_dim, self.key_dim, self.val_dim], dim=3)\n            q = q.permute(0, 2, 1, 3)\n            k = k.permute(0, 2, 3, 1)\n            v = v.permute(0, 2, 1, 3)\n\n            attn = q @ k * self.scale + self.get_attention_biases(x.device)\n            attn = attn.softmax(dim=-1)\n\n            x = (attn @ v).transpose(1, 2).reshape(B, N, self.val_attn_dim)\n        x = self.proj(x)\n        return x\n\n\nclass AttentionDownsample(nn.Module):\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: float = 2.0,\n            stride: int = 2,\n            resolution: Union[int, Tuple[int, int]] = 14,\n            use_conv: bool = False,\n            use_pool: bool = False,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        resolution = to_2tuple(resolution)\n\n        self.stride = stride\n        self.resolution = resolution\n        self.num_heads = num_heads\n        self.key_dim = key_dim\n        self.key_attn_dim = key_dim * num_heads\n        self.val_dim = int(attn_ratio * key_dim)\n        self.val_attn_dim = self.val_dim * self.num_heads\n        self.scale = key_dim ** -0.5\n        self.use_conv = use_conv\n\n        if self.use_conv:\n            ln_layer = ConvNorm\n            sub_layer = partial(\n                nn.AvgPool2d,\n                kernel_size=3 if use_pool else 1, padding=1 if use_pool else 0, count_include_pad=False)\n        else:\n            ln_layer = LinearNorm\n            sub_layer = partial(Downsample, resolution=resolution, use_pool=use_pool, **dd)\n\n        self.kv = ln_layer(in_dim, self.val_attn_dim + self.key_attn_dim, **dd)\n        self.q = nn.Sequential(OrderedDict([\n            ('down', sub_layer(stride=stride)),\n            ('ln', ln_layer(in_dim, self.key_attn_dim, **dd))\n        ]))\n        self.proj = nn.Sequential(OrderedDict([\n            ('act', act_layer()),\n            ('ln', ln_layer(self.val_attn_dim, out_dim, **dd))\n        ]))\n\n        N_k = resolution[0] * resolution[1]\n        N_q = -(-resolution[0] // stride) * -(-resolution[1] // stride)  # ceiling division\n        self.attention_biases = nn.Parameter(torch.empty(num_heads, N_k, **dd))\n        self.register_buffer('attention_bias_idxs', torch.empty((N_q, N_k), device=device, dtype=torch.long), persistent=False)\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _compute_attention_bias_idxs(self, device=None):\n        \"\"\"Compute relative position indices for attention bias.\"\"\"\n        k_pos = torch.stack(ndgrid(\n            torch.arange(self.resolution[0], device=device, dtype=torch.long),\n            torch.arange(self.resolution[1], device=device, dtype=torch.long),\n        )).flatten(1)\n        q_pos = torch.stack(ndgrid(\n            torch.arange(0, self.resolution[0], step=self.stride, device=device, dtype=torch.long),\n            torch.arange(0, self.resolution[1], step=self.stride, device=device, dtype=torch.long),\n        )).flatten(1)\n        rel_pos = (q_pos[..., :, None] - k_pos[..., None, :]).abs()\n        rel_pos = (rel_pos[0] * self.resolution[1]) + rel_pos[1]\n        return rel_pos\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self.attention_bias_idxs.copy_(\n            self._compute_attention_bias_idxs(device=self.attention_bias_idxs.device)\n        )\n        self.attention_bias_cache = {}\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):\n        if self.use_conv:\n            B, C, H, W = x.shape\n            HH, WW = (H - 1) // self.stride + 1, (W - 1) // self.stride + 1\n            k, v = self.kv(x).view(B, self.num_heads, -1, H * W).split([self.key_dim, self.val_dim], dim=2)\n            q = self.q(x).view(B, self.num_heads, self.key_dim, -1)\n\n            attn = (q.transpose(-2, -1) @ k) * self.scale + self.get_attention_biases(x.device)\n            attn = attn.softmax(dim=-1)\n\n            x = (v @ attn.transpose(-2, -1)).reshape(B, self.val_attn_dim, HH, WW)\n        else:\n            B, N, C = x.shape\n            k, v = self.kv(x).view(B, N, self.num_heads, -1).split([self.key_dim, self.val_dim], dim=3)\n            k = k.permute(0, 2, 3, 1)  # BHCN\n            v = v.permute(0, 2, 1, 3)  # BHNC\n            q = self.q(x).view(B, -1, self.num_heads, self.key_dim).permute(0, 2, 1, 3)\n\n            attn = q @ k * self.scale + self.get_attention_biases(x.device)\n            attn = attn.softmax(dim=-1)\n\n            x = (attn @ v).transpose(1, 2).reshape(B, -1, self.val_attn_dim)\n        x = self.proj(x)\n        return x\n\n\nclass LevitMlp(nn.Module):\n    \"\"\" MLP for Levit w/ normalization + ability to switch btw conv and linear\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            use_conv: bool = False,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        ln_layer = ConvNorm if use_conv else LinearNorm\n\n        self.ln1 = ln_layer(in_features, hidden_features, **dd)\n        self.act = act_layer()\n        self.drop = nn.Dropout(drop)\n        self.ln2 = ln_layer(hidden_features, out_features, bn_weight_init=0, **dd)\n\n    def forward(self, x):\n        x = self.ln1(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.ln2(x)\n        return x\n\n\nclass LevitDownsample(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: float = 4.,\n            mlp_ratio: float = 2.,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            attn_act_layer: Optional[Type[nn.Module]] = None,\n            resolution: Union[int, Tuple[int, int]] = 14,\n            use_conv: bool = False,\n            use_pool: bool = False,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        attn_act_layer = attn_act_layer or act_layer\n\n        self.attn_downsample = AttentionDownsample(\n            in_dim=in_dim,\n            out_dim=out_dim,\n            key_dim=key_dim,\n            num_heads=num_heads,\n            attn_ratio=attn_ratio,\n            act_layer=attn_act_layer,\n            resolution=resolution,\n            use_conv=use_conv,\n            use_pool=use_pool,\n            **dd,\n        )\n\n        self.mlp = LevitMlp(\n            out_dim,\n            int(out_dim * mlp_ratio),\n            use_conv=use_conv,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        x = self.attn_downsample(x)\n        x = x + self.drop_path(self.mlp(x))\n        return x\n\n\nclass LevitBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: float = 4.,\n            mlp_ratio: float = 2.,\n            resolution: Union[int, Tuple[int, int]] = 14,\n            use_conv: bool = False,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            attn_act_layer: Optional[Type[nn.Module]] = None,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        attn_act_layer = attn_act_layer or act_layer\n\n        self.attn = Attention(\n            dim=dim,\n            key_dim=key_dim,\n            num_heads=num_heads,\n            attn_ratio=attn_ratio,\n            resolution=resolution,\n            use_conv=use_conv,\n            act_layer=attn_act_layer,\n            **dd,\n            )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = LevitMlp(\n            dim,\n            int(dim * mlp_ratio),\n            use_conv=use_conv,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        x = x + self.drop_path1(self.attn(x))\n        x = x + self.drop_path2(self.mlp(x))\n        return x\n\n\nclass LevitStage(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            key_dim: int,\n            depth: int = 4,\n            num_heads: int = 8,\n            attn_ratio: float = 4.0,\n            mlp_ratio: float = 4.0,\n            act_layer: Type[nn.Module] = nn.SiLU,\n            attn_act_layer: Optional[Type[nn.Module]] = None,\n            resolution: Union[int, Tuple[int, int]] = 14,\n            downsample: str = '',\n            use_conv: bool = False,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        resolution = to_2tuple(resolution)\n\n        if downsample:\n            self.downsample = LevitDownsample(\n                in_dim,\n                out_dim,\n                key_dim=key_dim,\n                num_heads=in_dim // key_dim,\n                attn_ratio=4.,\n                mlp_ratio=2.,\n                act_layer=act_layer,\n                attn_act_layer=attn_act_layer,\n                resolution=resolution,\n                use_conv=use_conv,\n                drop_path=drop_path,\n                **dd,\n            )\n            resolution = [(r - 1) // 2 + 1 for r in resolution]\n        else:\n            assert in_dim == out_dim\n            self.downsample = nn.Identity()\n\n        blocks = []\n        for _ in range(depth):\n            blocks += [LevitBlock(\n                out_dim,\n                key_dim,\n                num_heads=num_heads,\n                attn_ratio=attn_ratio,\n                mlp_ratio=mlp_ratio,\n                act_layer=act_layer,\n                attn_act_layer=attn_act_layer,\n                resolution=resolution,\n                use_conv=use_conv,\n                drop_path=drop_path,\n                **dd,\n            )]\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = self.blocks(x)\n        return x\n\n\nclass Levit(nn.Module):\n    \"\"\" Vision Transformer with support for patch or hybrid CNN input stage\n\n    NOTE: distillation is defaulted to True since pretrained weights use it, will cause problems\n    w/ train scripts that don't take tuple outputs,\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            embed_dim: Tuple[int, ...] = (192,),\n            key_dim: int = 64,\n            depth: Tuple[int, ...] = (12,),\n            num_heads: Union[int, Tuple[int, ...]] = (3,),\n            attn_ratio: Union[float, Tuple[float, ...]] = 2.,\n            mlp_ratio: Union[float, Tuple[float, ...]] = 2.,\n            stem_backbone: Optional[nn.Module] = None,\n            stem_stride: Optional[int] = None,\n            stem_type: str = 's16',\n            down_op: str = 'subsample',\n            act_layer: str = 'hard_swish',\n            attn_act_layer: Optional[str] = None,\n            use_conv: bool = False,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = get_act_layer(act_layer)\n        attn_act_layer = get_act_layer(attn_act_layer or act_layer)\n        self.use_conv = use_conv\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = embed_dim[-1]\n        self.embed_dim = embed_dim\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n\n        num_stages = len(embed_dim)\n        assert len(depth) == num_stages\n        num_heads = to_ntuple(num_stages)(num_heads)\n        attn_ratio = to_ntuple(num_stages)(attn_ratio)\n        mlp_ratio = to_ntuple(num_stages)(mlp_ratio)\n\n        if stem_backbone is not None:\n            assert stem_stride >= 2\n            self.stem = stem_backbone\n            stride = stem_stride\n        else:\n            assert stem_type in ('s16', 's8')\n            if stem_type == 's16':\n                self.stem = Stem16(in_chans, embed_dim[0], act_layer=act_layer, **dd)\n            else:\n                self.stem = Stem8(in_chans, embed_dim[0], act_layer=act_layer, **dd)\n            stride = self.stem.stride\n        resolution = tuple([i // p for i, p in zip(to_2tuple(img_size), to_2tuple(stride))])\n\n        in_dim = embed_dim[0]\n        stages = []\n        for i in range(num_stages):\n            stage_stride = 2 if i > 0 else 1\n            stages += [LevitStage(\n                in_dim,\n                embed_dim[i],\n                key_dim,\n                depth=depth[i],\n                num_heads=num_heads[i],\n                attn_ratio=attn_ratio[i],\n                mlp_ratio=mlp_ratio[i],\n                act_layer=act_layer,\n                attn_act_layer=attn_act_layer,\n                resolution=resolution,\n                use_conv=use_conv,\n                downsample=down_op if stage_stride == 2 else '',\n                drop_path=drop_path_rate,\n                **dd,\n            )]\n            stride *= stage_stride\n            resolution = tuple([(r - 1) // stage_stride + 1 for r in resolution])\n            self.feature_info += [dict(num_chs=embed_dim[i], reduction=stride, module=f'stages.{i}')]\n            in_dim = embed_dim[i]\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.head = NormLinear(embed_dim[-1], num_classes, drop=drop_rate, **dd) if num_classes > 0 else nn.Identity()\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True) -> None:\n        if needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {x for x in self.state_dict().keys() if 'attention_biases' in x}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int , global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = NormLinear(\n            self.num_features, num_classes, drop=self.drop_rate) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        B, C, H, W = x.shape\n        if not self.use_conv:\n            x = x.flatten(2).transpose(1, 2)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                if self.use_conv:\n                    intermediates.append(x)\n                else:\n                    intermediates.append(x.reshape(B, H, W, -1).permute(0, 3, 1, 2))\n            H = (H + 2 - 1) // 2\n            W = (W + 2 - 1) // 2\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if not self.use_conv:\n            x = x.flatten(2).transpose(1, 2)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=(-2, -1)) if self.use_conv else x.mean(dim=1)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nclass LevitDistilled(Levit):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        dd = {'device': kwargs.get('device', None), 'dtype': kwargs.get('dtype', None)}\n        self.head_dist = NormLinear(self.num_features, self.num_classes, **dd) if self.num_classes > 0 else nn.Identity()\n        self.distilled_training = False  # must set this True to train w/ distillation token\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head, self.head_dist\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        self.head = NormLinear(\n            self.num_features, num_classes, drop=self.drop_rate) if num_classes > 0 else nn.Identity()\n        self.head_dist = NormLinear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.distilled_training = enable\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=(-2, -1)) if self.use_conv else x.mean(dim=1)\n        if pre_logits:\n            return x\n        x, x_dist = self.head(x), self.head_dist(x)\n        if self.distilled_training and self.training and not torch.jit.is_scripting():\n            # only return separate classification predictions when training in distilled mode\n            return x, x_dist\n        else:\n            # during standard train/finetune, inference average the classifier predictions\n            return (x + x_dist) / 2\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n\n    # filter out attn biases, should not have been persistent\n    state_dict = {k: v for k, v in state_dict.items() if 'attention_bias_idxs' not in k}\n\n    # NOTE: old weight conversion code, disabled\n    # D = model.state_dict()\n    # out_dict = {}\n    # for ka, kb, va, vb in zip(D.keys(), state_dict.keys(), D.values(), state_dict.values()):\n    #     if va.ndim == 4 and vb.ndim == 2:\n    #         vb = vb[:, :, None, None]\n    #     if va.shape != vb.shape:\n    #         # head or first-conv shapes may change for fine-tune\n    #         assert 'head' in ka or 'stem.conv1.linear' in ka\n    #     out_dict[ka] = vb\n\n    return state_dict\n\n\nmodel_cfgs = dict(\n    levit_128s=dict(\n        embed_dim=(128, 256, 384), key_dim=16, num_heads=(4, 6, 8), depth=(2, 3, 4)),\n    levit_128=dict(\n        embed_dim=(128, 256, 384), key_dim=16, num_heads=(4, 8, 12), depth=(4, 4, 4)),\n    levit_192=dict(\n        embed_dim=(192, 288, 384), key_dim=32, num_heads=(3, 5, 6), depth=(4, 4, 4)),\n    levit_256=dict(\n        embed_dim=(256, 384, 512), key_dim=32, num_heads=(4, 6, 8), depth=(4, 4, 4)),\n    levit_384=dict(\n        embed_dim=(384, 512, 768), key_dim=32, num_heads=(6, 9, 12), depth=(4, 4, 4)),\n\n    # stride-8 stem experiments\n    levit_384_s8=dict(\n        embed_dim=(384, 512, 768), key_dim=32, num_heads=(6, 9, 12), depth=(4, 4, 4),\n        act_layer='silu', stem_type='s8'),\n    levit_512_s8=dict(\n        embed_dim=(512, 640, 896), key_dim=64, num_heads=(8, 10, 14), depth=(4, 4, 4),\n        act_layer='silu', stem_type='s8'),\n\n    # wider experiments\n    levit_512=dict(\n        embed_dim=(512, 768, 1024), key_dim=64, num_heads=(8, 12, 16), depth=(4, 4, 4), act_layer='silu'),\n\n    # deeper experiments\n    levit_256d=dict(\n        embed_dim=(256, 384, 512), key_dim=32, num_heads=(4, 6, 8), depth=(4, 8, 6), act_layer='silu'),\n    levit_512d=dict(\n        embed_dim=(512, 640, 768), key_dim=64, num_heads=(8, 10, 12), depth=(4, 8, 6), act_layer='silu'),\n)\n\n\ndef create_levit(variant, cfg_variant=None, pretrained=False, distilled=True, **kwargs):\n    is_conv = '_conv' in variant\n    out_indices = kwargs.pop('out_indices', (0, 1, 2))\n    if kwargs.get('features_only', False) and not is_conv:\n        kwargs.setdefault('feature_cls', 'getter')\n    if cfg_variant is None:\n        if variant in model_cfgs:\n            cfg_variant = variant\n        elif is_conv:\n            cfg_variant = variant.replace('_conv', '')\n\n    model_cfg = dict(model_cfgs[cfg_variant], **kwargs)\n    model = build_model_with_cfg(\n        LevitDistilled if distilled else Levit,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **model_cfg,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1.linear', 'classifier': ('head.linear', 'head_dist.linear'),\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # weights in nn.Linear mode\n    'levit_128s.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'levit_128.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'levit_192.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'levit_256.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'levit_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n\n    # weights in nn.Conv2d mode\n    'levit_conv_128s.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(4, 4),\n    ),\n    'levit_conv_128.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(4, 4),\n    ),\n    'levit_conv_192.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(4, 4),\n    ),\n    'levit_conv_256.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(4, 4),\n    ),\n    'levit_conv_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        pool_size=(4, 4),\n    ),\n\n    'levit_384_s8.untrained': _cfg(classifier='head.linear'),\n    'levit_512_s8.untrained': _cfg(classifier='head.linear'),\n    'levit_512.untrained': _cfg(classifier='head.linear'),\n    'levit_256d.untrained': _cfg(classifier='head.linear'),\n    'levit_512d.untrained': _cfg(classifier='head.linear'),\n\n    'levit_conv_384_s8.untrained': _cfg(classifier='head.linear'),\n    'levit_conv_512_s8.untrained': _cfg(classifier='head.linear'),\n    'levit_conv_512.untrained': _cfg(classifier='head.linear'),\n    'levit_conv_256d.untrained': _cfg(classifier='head.linear'),\n    'levit_conv_512d.untrained': _cfg(classifier='head.linear'),\n})\n\n\n@register_model\ndef levit_128s(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_128s', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_128(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_128', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_192(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_192', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_256(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_384(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_384', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_384_s8(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_384_s8', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef levit_512_s8(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_512_s8', pretrained=pretrained, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_512(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_512', pretrained=pretrained, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_256d(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_256d', pretrained=pretrained, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_512d(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_512d', pretrained=pretrained, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_conv_128s(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_128s', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_128(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_128', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_192(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_192', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_256(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_256', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_384(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_384', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_384_s8(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_384_s8', pretrained=pretrained, use_conv=True, **kwargs)\n\n\n@register_model\ndef levit_conv_512_s8(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_512_s8', pretrained=pretrained, use_conv=True, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_conv_512(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_512', pretrained=pretrained, use_conv=True, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_conv_256d(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_256d', pretrained=pretrained, use_conv=True, distilled=False, **kwargs)\n\n\n@register_model\ndef levit_conv_512d(pretrained=False, **kwargs) -> Levit:\n    return create_levit('levit_conv_512d', pretrained=pretrained, use_conv=True, distilled=False, **kwargs)\n\n"
  },
  {
    "path": "timm/models/mambaout.py",
    "content": "\"\"\"\nMambaOut models for image classification.\nSome implementations are modified from:\ntimm (https://github.com/rwightman/pytorch-image-models),\nMetaFormer (https://github.com/sail-sg/metaformer),\nInceptionNeXt (https://github.com/sail-sg/inceptionnext)\n\"\"\"\nfrom collections import OrderedDict\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import trunc_normal_, DropPath, calculate_drop_path_rates, LayerNorm, LayerScale, ClNormMlpClassifierHead, get_act_layer\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n\nclass Stem(nn.Module):\n    r\"\"\" Code modified from InternImage:\n        https://github.com/OpenGVLab/InternImage\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int = 3,\n            out_chs: int = 96,\n            mid_norm: bool = True,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = nn.Conv2d(\n            in_chs,\n            out_chs // 2,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            **dd,\n        )\n        self.norm1 = norm_layer(out_chs // 2, **dd) if mid_norm else None\n        self.act = act_layer()\n        self.conv2 = nn.Conv2d(\n            out_chs // 2,\n            out_chs,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            **dd,\n        )\n        self.norm2 = norm_layer(out_chs, **dd)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        if self.norm1 is not None:\n            x = x.permute(0, 2, 3, 1)\n            x = self.norm1(x)\n            x = x.permute(0, 3, 1, 2)\n        x = self.act(x)\n        x = self.conv2(x)\n        x = x.permute(0, 2, 3, 1)\n        x = self.norm2(x)\n        return x\n\n\nclass DownsampleNormFirst(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int = 96,\n            out_chs: int = 198,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm = norm_layer(in_chs, **dd)\n        self.conv = nn.Conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = x.permute(0, 3, 1, 2)\n        x = self.conv(x)\n        x = x.permute(0, 2, 3, 1)\n        return x\n\n\nclass Downsample(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int = 96,\n            out_chs: int = 198,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n\n    def forward(self, x):\n        x = x.permute(0, 3, 1, 2)\n        x = self.conv(x)\n        x = x.permute(0, 2, 3, 1)\n        x = self.norm(x)\n        return x\n\n\nclass MlpHead(nn.Module):\n    \"\"\" MLP classification head\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            num_classes: int = 1000,\n            pool_type: str = 'avg',\n            act_layer: Type[nn.Module] = nn.GELU,\n            mlp_ratio: Optional[int] = 4,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            drop_rate: float = 0.,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if mlp_ratio is not None:\n            hidden_size = int(mlp_ratio * in_features)\n        else:\n            hidden_size = None\n        self.pool_type = pool_type\n        self.in_features = in_features\n        self.hidden_size = hidden_size or in_features\n\n        self.norm = norm_layer(in_features, **dd)\n        if hidden_size:\n            self.pre_logits = nn.Sequential(OrderedDict([\n                ('fc', nn.Linear(in_features, hidden_size, **dd)),\n                ('act', act_layer()),\n                ('norm', norm_layer(hidden_size, **dd))\n            ]))\n            self.num_features = hidden_size\n        else:\n            self.num_features = in_features\n            self.pre_logits = nn.Identity()\n\n        self.fc = nn.Linear(self.num_features, num_classes, bias=bias, **dd) if num_classes > 0 else nn.Identity()\n        self.head_dropout = nn.Dropout(drop_rate)\n\n    def reset(self, num_classes: int, pool_type: Optional[str] = None, reset_other: bool = False):\n        if pool_type is not None:\n            self.pool_type = pool_type\n        if reset_other:\n            self.norm = nn.Identity()\n            self.pre_logits = nn.Identity()\n            self.num_features = self.in_features\n        self.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward(self, x, pre_logits: bool = False):\n        if self.pool_type == 'avg':\n            x = x.mean((1, 2))\n        x = self.norm(x)\n        x = self.pre_logits(x)\n        x = self.head_dropout(x)\n        if pre_logits:\n            return x\n        x = self.fc(x)\n        return x\n\n\nclass GatedConvBlock(nn.Module):\n    r\"\"\" Our implementation of Gated CNN Block: https://arxiv.org/pdf/1612.08083\n    Args:\n        conv_ratio: control the number of channels to conduct depthwise convolution.\n            Conduct convolution on partial channels can improve paraitcal efficiency.\n            The idea of partial channels is from ShuffleNet V2 (https://arxiv.org/abs/1807.11164) and\n            also used by InceptionNeXt (https://arxiv.org/abs/2303.16900) and FasterNet (https://arxiv.org/abs/2303.03667)\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            expansion_ratio: float = 8 / 3,\n            kernel_size: int = 7,\n            conv_ratio: float = 1.0,\n            ls_init_value: Optional[float] = None,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n            **kwargs\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm = norm_layer(dim, **dd)\n        hidden = int(expansion_ratio * dim)\n        self.fc1 = nn.Linear(dim, hidden * 2, **dd)\n        self.act = act_layer()\n        conv_channels = int(conv_ratio * dim)\n        self.split_indices = (hidden, hidden - conv_channels, conv_channels)\n        self.conv = nn.Conv2d(\n            conv_channels,\n            conv_channels,\n            kernel_size=kernel_size,\n            padding=kernel_size // 2,\n            groups=conv_channels,\n            **dd,\n        )\n        self.fc2 = nn.Linear(hidden, dim, **dd)\n        self.ls = LayerScale(dim, **dd) if ls_init_value is not None else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x  # [B, H, W, C]\n        x = self.norm(x)\n        x = self.fc1(x)\n        g, i, c = torch.split(x, self.split_indices, dim=-1)\n        c = c.permute(0, 3, 1, 2)  # [B, H, W, C] -> [B, C, H, W]\n        c = self.conv(c)\n        c = c.permute(0, 2, 3, 1)  # [B, C, H, W] -> [B, H, W, C]\n        x = self.fc2(self.act(g) * torch.cat((i, c), dim=-1))\n        x = self.ls(x)\n        x = self.drop_path(x)\n        return x + shortcut\n\n\nclass MambaOutStage(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            depth: int = 4,\n            expansion_ratio: float = 8 / 3,\n            kernel_size: int = 7,\n            conv_ratio: float = 1.0,\n            downsample: str = '',\n            ls_init_value: Optional[float] = None,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        self.grad_checkpointing = False\n\n        if downsample == 'conv':\n            self.downsample = Downsample(dim, dim_out, norm_layer=norm_layer, **dd)\n        elif downsample == 'conv_nf':\n            self.downsample = DownsampleNormFirst(dim, dim_out, norm_layer=norm_layer, **dd)\n        else:\n            assert dim == dim_out\n            self.downsample = nn.Identity()\n\n        self.blocks = nn.Sequential(*[\n            GatedConvBlock(\n                dim=dim_out,\n                expansion_ratio=expansion_ratio,\n                kernel_size=kernel_size,\n                conv_ratio=conv_ratio,\n                ls_init_value=ls_init_value,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                drop_path=drop_path[j] if isinstance(drop_path, (list, tuple)) else drop_path,\n                **dd,\n            )\n            for j in range(depth)\n        ])\n\n    def forward(self, x):\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass MambaOut(nn.Module):\n    r\"\"\" MetaFormer\n        A PyTorch impl of : `MetaFormer Baselines for Vision`  -\n          https://arxiv.org/abs/2210.13452\n\n    Args:\n        in_chans (int): Number of input image channels. Default: 3.\n        num_classes (int): Number of classes for classification head. Default: 1000.\n        depths (list or tuple): Number of blocks at each stage. Default: [3, 3, 9, 3].\n        dims (int): Feature dimension at each stage. Default: [96, 192, 384, 576].\n        downsample_layers: (list or tuple): Downsampling layers before each stage.\n        drop_path_rate (float): Stochastic depth rate. Default: 0.\n        output_norm: norm before classifier head. Default: partial(nn.LayerNorm, eps=1e-6).\n        head_fn: classification head. Default: nn.Linear.\n        head_dropout (float): dropout for MLP classifier. Default: 0.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            depths: Tuple[int, ...] = (3, 3, 9, 3),\n            dims: Tuple[int, ...] = (96, 192, 384, 576),\n            norm_layer: Type[nn.Module] = LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            conv_ratio: float = 1.0,\n            expansion_ratio: float = 8/3,\n            kernel_size: int = 7,\n            stem_mid_norm: bool = True,\n            ls_init_value: Optional[float] = None,\n            downsample: str = 'conv',\n            drop_path_rate: float = 0.,\n            drop_rate: float = 0.,\n            head_fn: str = 'default',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.output_fmt = 'NHWC'\n        if not isinstance(depths, (list, tuple)):\n            depths = [depths]  # it means the model has only one stage\n        if not isinstance(dims, (list, tuple)):\n            dims = [dims]\n        act_layer = get_act_layer(act_layer)\n\n        num_stage = len(depths)\n        self.num_stage = num_stage\n        self.feature_info = []\n\n        self.stem = Stem(\n            in_chans,\n            dims[0],\n            mid_norm=stem_mid_norm,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        prev_dim = dims[0]\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        cur = 0\n        curr_stride = 4\n        self.stages = nn.Sequential()\n        for i in range(num_stage):\n            dim = dims[i]\n            stride = 2 if curr_stride == 2 or i > 0 else 1\n            curr_stride *= stride\n            stage = MambaOutStage(\n                dim=prev_dim,\n                dim_out=dim,\n                depth=depths[i],\n                kernel_size=kernel_size,\n                conv_ratio=conv_ratio,\n                expansion_ratio=expansion_ratio,\n                downsample=downsample if i > 0 else '',\n                ls_init_value=ls_init_value,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                drop_path=dp_rates[i],\n                **dd,\n            )\n            self.stages.append(stage)\n            prev_dim = dim\n            # NOTE feature_info use currently assumes stage 0 == stride 1, rest are stride 2\n            self.feature_info += [dict(num_chs=prev_dim, reduction=curr_stride, module=f'stages.{i}')]\n            cur += depths[i]\n\n        if head_fn == 'default':\n            # specific to this model, unusual norm -> pool -> fc -> act -> norm -> fc combo\n            self.head = MlpHead(\n                prev_dim,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            # more typical norm -> pool -> fc -> act -> fc\n            self.head = ClNormMlpClassifierHead(\n                prev_dim,\n                num_classes,\n                hidden_size=int(prev_dim * 4),\n                pool_type=global_pool,\n                norm_layer=norm_layer,\n                drop_rate=drop_rate,\n                **dd,\n            )\n        self.num_features = prev_dim\n        self.head_hidden_size = self.head.num_features\n\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, (nn.Conv2d, nn.Linear)):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.downsample', (0,)),  # blocks\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NHWC'), 'Output format must be one of NCHW or NHWC.'\n        channel_first = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if channel_first:\n            # reshape to BCHW output format\n            intermediates = [y.permute(0, 3, 1, 2).contiguous() for y in intermediates]\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n        return x\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n    if 'stem.conv1.weight' in state_dict:\n        return state_dict\n\n    import re\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = k.replace('downsample_layers.0.', 'stem.')\n        k = re.sub(r'stages.([0-9]+).([0-9]+)', r'stages.\\1.blocks.\\2', k)\n        k = re.sub(r'downsample_layers.([0-9]+)', r'stages.\\1.downsample', k)\n        # remap head names\n        if k.startswith('norm.'):\n            # this is moving to head since it's after the pooling\n            k = k.replace('norm.', 'head.norm.')\n        elif k.startswith('head.'):\n            k = k.replace('head.fc1.', 'head.pre_logits.fc.')\n            k = k.replace('head.norm.', 'head.pre_logits.norm.')\n            k = k.replace('head.fc2.', 'head.fc.')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'test_input_size': (3, 288, 288),\n        'pool_size': (7, 7), 'crop_pct': 1.0, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # original weights\n    'mambaout_femto.in1k': _cfg(\n        hf_hub_id='timm/'),\n    'mambaout_kobe.in1k': _cfg(\n        hf_hub_id='timm/'),\n    'mambaout_tiny.in1k': _cfg(\n        hf_hub_id='timm/'),\n    'mambaout_small.in1k': _cfg(\n        hf_hub_id='timm/'),\n    'mambaout_base.in1k': _cfg(\n        hf_hub_id='timm/'),\n\n    # timm experiments below\n    'mambaout_small_rw.sw_e450_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'mambaout_base_short_rw.sw_e500_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0,\n    ),\n    'mambaout_base_tall_rw.sw_e500_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0,\n    ),\n    'mambaout_base_wide_rw.sw_e500_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0,\n    ),\n    'mambaout_base_plus_rw.sw_e150_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'mambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), test_input_size=(3, 384, 384), crop_mode='squash', pool_size=(12, 12),\n    ),\n    'mambaout_base_plus_rw.sw_e150_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n    ),\n    'test_mambaout': _cfg(input_size=(3, 160, 160), test_input_size=(3, 192, 192), pool_size=(5, 5)),\n})\n\n\ndef _create_mambaout(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        MambaOut, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n# a series of MambaOut models\n@register_model\ndef mambaout_femto(pretrained=False, **kwargs):\n    model_args = dict(depths=(3, 3, 9, 3), dims=(48, 96, 192, 288))\n    return _create_mambaout('mambaout_femto', pretrained=pretrained, **dict(model_args, **kwargs))\n\n# Kobe Memorial Version with 24 Gated CNN blocks\n@register_model\ndef mambaout_kobe(pretrained=False, **kwargs):\n    model_args = dict(depths=[3, 3, 15, 3], dims=[48, 96, 192, 288])\n    return _create_mambaout('mambaout_kobe', pretrained=pretrained, **dict(model_args, **kwargs))\n\n@register_model\ndef mambaout_tiny(pretrained=False, **kwargs):\n    model_args = dict(depths=[3, 3, 9, 3], dims=[96, 192, 384, 576])\n    return _create_mambaout('mambaout_tiny', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_small(pretrained=False, **kwargs):\n    model_args = dict(depths=[3, 4, 27, 3], dims=[96, 192, 384, 576])\n    return _create_mambaout('mambaout_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_base(pretrained=False, **kwargs):\n    model_args = dict(depths=[3, 4, 27, 3], dims=[128, 256, 512, 768])\n    return _create_mambaout('mambaout_base', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_small_rw(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=[3, 4, 27, 3],\n        dims=[96, 192, 384, 576],\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-6,\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('mambaout_small_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_base_short_rw(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 3, 25, 3),\n        dims=(128, 256, 512, 768),\n        expansion_ratio=3.0,\n        conv_ratio=1.25,\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-6,\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('mambaout_base_short_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_base_tall_rw(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 4, 30, 3),\n        dims=(128, 256, 512, 768),\n        expansion_ratio=2.5,\n        conv_ratio=1.25,\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-6,\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('mambaout_base_tall_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_base_wide_rw(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 4, 27, 3),\n        dims=(128, 256, 512, 768),\n        expansion_ratio=3.0,\n        conv_ratio=1.5,\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-6,\n        act_layer='silu',\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('mambaout_base_wide_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef mambaout_base_plus_rw(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(3, 4, 30, 3),\n        dims=(128, 256, 512, 768),\n        expansion_ratio=3.0,\n        conv_ratio=1.5,\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-6,\n        act_layer='silu',\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('mambaout_base_plus_rw', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef test_mambaout(pretrained=False, **kwargs):\n    model_args = dict(\n        depths=(1, 1, 3, 1),\n        dims=(16, 32, 48, 64),\n        expansion_ratio=3,\n        stem_mid_norm=False,\n        downsample='conv_nf',\n        ls_init_value=1e-4,\n        act_layer='silu',\n        head_fn='norm_mlp',\n    )\n    return _create_mambaout('test_mambaout', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/maxxvit.py",
    "content": "\"\"\" MaxVit and CoAtNet Vision Transformer - CNN Hybrids in PyTorch\n\nThis is a from-scratch implementation of both CoAtNet and MaxVit in PyTorch.\n\n99% of the implementation was done from papers, however last minute some adjustments were made\nbased on the (as yet unfinished?) public code release https://github.com/google-research/maxvit\n\nThere are multiple sets of models defined for both architectures. Typically, names with a\n `_rw` suffix are my own original configs prior to referencing https://github.com/google-research/maxvit.\nThese configs work well and appear to be a bit faster / lower resource than the paper.\n\nThe models without extra prefix / suffix' (coatnet_0_224, maxvit_tiny_224, etc), are intended to\nmatch paper, BUT, without any official pretrained weights it's difficult to confirm a 100% match.\n\nPapers:\n\nMaxViT: Multi-Axis Vision Transformer - https://arxiv.org/abs/2204.01697\n@article{tu2022maxvit,\n  title={MaxViT: Multi-Axis Vision Transformer},\n  author={Tu, Zhengzhong and Talebi, Hossein and Zhang, Han and Yang, Feng and Milanfar, Peyman and Bovik, Alan and Li, Yinxiao},\n  journal={ECCV},\n  year={2022},\n}\n\nCoAtNet: Marrying Convolution and Attention for All Data Sizes - https://arxiv.org/abs/2106.04803\n@article{DBLP:journals/corr/abs-2106-04803,\n  author    = {Zihang Dai and Hanxiao Liu and Quoc V. Le and Mingxing Tan},\n  title     = {CoAtNet: Marrying Convolution and Attention for All Data Sizes},\n  journal   = {CoRR},\n  volume    = {abs/2106.04803},\n  year      = {2021}\n}\n\nHacked together by / Copyright 2022, Ross Wightman\n\"\"\"\n\nimport math\nfrom collections import OrderedDict\nfrom dataclasses import dataclass, replace, field\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union\n\nimport torch\nfrom torch import nn\nfrom torch.jit import Final\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    Mlp,\n    ConvMlp,\n    DropPath,\n    calculate_drop_path_rates,\n    LayerNorm,\n    LayerScale,\n    LayerScale2d,\n    ClassifierHead,\n    NormMlpClassifierHead,\n    create_attn,\n    get_act_layer,\n    get_norm_layer,\n    get_norm_act_layer,\n    create_conv2d,\n    create_pool2d,\n    trunc_normal_tf_,\n    to_2tuple,\n    extend_tuple,\n    make_divisible,\n    _assert,\n    RelPosMlp,\n    RelPosBias,\n    RelPosBiasTf,\n    use_fused_attn,\n    resize_rel_pos_bias_table,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import named_apply, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['MaxxVitCfg', 'MaxxVitConvCfg', 'MaxxVitTransformerCfg', 'MaxxVit']\n\n\n@dataclass\nclass MaxxVitTransformerCfg:\n    \"\"\"Configuration for MaxxVit transformer blocks.\"\"\"\n    dim_head: int = 32\n    head_first: bool = True  # head ordering in qkv channel dim\n    expand_ratio: float = 4.0\n    expand_first: bool = True\n    shortcut_bias: bool = True\n    attn_bias: bool = True\n    attn_drop: float = 0.\n    proj_drop: float = 0.\n    pool_type: str = 'avg2'\n    rel_pos_type: str = 'bias'\n    rel_pos_dim: int = 512  # for relative position types w/ MLP\n    partition_ratio: int = 32\n    window_size: Optional[Tuple[int, int]] = None\n    grid_size: Optional[Tuple[int, int]] = None\n    no_block_attn: bool = False  # disable window block attention for maxvit (ie only grid)\n    use_nchw_attn: bool = False  # for MaxViT variants (not used for CoAt), keep tensors in NCHW order\n    init_values: Optional[float] = None\n    act_layer: str = 'gelu'\n    norm_layer: str = 'layernorm2d'\n    norm_layer_cl: str = 'layernorm'\n    norm_eps: float = 1e-6\n\n    def __post_init__(self):\n        if self.grid_size is not None:\n            self.grid_size = to_2tuple(self.grid_size)\n        if self.window_size is not None:\n            self.window_size = to_2tuple(self.window_size)\n            if self.grid_size is None:\n                self.grid_size = self.window_size\n\n\n@dataclass\nclass MaxxVitConvCfg:\n    \"\"\"Configuration for MaxxVit convolution blocks.\"\"\"\n    block_type: str = 'mbconv'\n    expand_ratio: float = 4.0\n    expand_output: bool = True  # calculate expansion channels from output (vs input chs)\n    kernel_size: int = 3\n    group_size: int = 1  # 1 == depthwise\n    pre_norm_act: bool = False  # activation after pre-norm\n    output_bias: bool = True  # bias for shortcut + final 1x1 projection conv\n    stride_mode: str = 'dw'  # stride done via one of 'pool', '1x1', 'dw'\n    pool_type: str = 'avg2'\n    downsample_pool_type: str = 'avg2'\n    padding: str = ''\n    attn_early: bool = False  # apply attn between conv2 and norm2, instead of after norm2\n    attn_layer: str = 'se'\n    attn_act_layer: str = 'silu'\n    attn_ratio: float = 0.25\n    init_values: Optional[float] = 1e-6  # for ConvNeXt block, ignored by MBConv\n    act_layer: str = 'gelu'\n    norm_layer: str = ''\n    norm_layer_cl: str = ''\n    norm_eps: Optional[float] = None\n\n    def __post_init__(self):\n        # mbconv vs convnext blocks have different defaults, set in post_init to avoid explicit config args\n        assert self.block_type in ('mbconv', 'convnext')\n        use_mbconv = self.block_type == 'mbconv'\n        if not self.norm_layer:\n            self.norm_layer = 'batchnorm2d' if use_mbconv else 'layernorm2d'\n        if not self.norm_layer_cl and not use_mbconv:\n            self.norm_layer_cl = 'layernorm'\n        if self.norm_eps is None:\n            self.norm_eps = 1e-5 if use_mbconv else 1e-6\n        self.downsample_pool_type = self.downsample_pool_type or self.pool_type\n\n\n@dataclass\nclass MaxxVitCfg:\n    \"\"\"Configuration for MaxxVit models.\"\"\"\n    embed_dim: Tuple[int, ...] = (96, 192, 384, 768)\n    depths: Tuple[int, ...] = (2, 3, 5, 2)\n    block_type: Tuple[Union[str, Tuple[str, ...]], ...] = ('C', 'C', 'T', 'T')\n    stem_width: Union[int, Tuple[int, int]] = 64\n    stem_bias: bool = False\n    conv_cfg: MaxxVitConvCfg = field(default_factory=MaxxVitConvCfg)\n    transformer_cfg: MaxxVitTransformerCfg = field(default_factory=MaxxVitTransformerCfg)\n    head_hidden_size: Optional[int] = None\n    weight_init: str = 'vit_eff'\n\n\nclass Attention2d(nn.Module):\n    \"\"\"Multi-head attention for 2D NCHW tensors.\"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            dim_head: int = 32,\n            bias: bool = True,\n            expand_first: bool = True,\n            head_first: bool = True,\n            rel_pos_cls: Optional[Callable] = None,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            dim_out: Output dimension (defaults to input dimension).\n            dim_head: Dimension per attention head.\n            bias: Whether to use bias in qkv and projection.\n            expand_first: Whether to expand channels before or after qkv.\n            head_first: Whether heads are first in tensor layout.\n            rel_pos_cls: Relative position class to use.\n            attn_drop: Attention dropout rate.\n            proj_drop: Projection dropout rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        dim_attn = dim_out if expand_first else dim\n        self.num_heads = dim_attn // dim_head\n        self.dim_head = dim_head\n        self.head_first = head_first\n        self.scale = dim_head ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Conv2d(dim, dim_attn * 3, 1, bias=bias, **dd)\n        self.rel_pos = rel_pos_cls(num_heads=self.num_heads, **dd) if rel_pos_cls else None\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Conv2d(dim_attn, dim_out, 1, bias=bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x: torch.Tensor, shared_rel_pos: Optional[torch.Tensor] = None) -> torch.Tensor:\n        B, C, H, W = x.shape\n\n        if self.head_first:\n            q, k, v = self.qkv(x).view(B, self.num_heads, self.dim_head * 3, -1).chunk(3, dim=2)\n        else:\n            q, k, v = self.qkv(x).reshape(B, 3, self.num_heads, self.dim_head, -1).unbind(1)\n\n        if self.fused_attn:\n            attn_bias = None\n            if self.rel_pos is not None:\n                attn_bias = self.rel_pos.get_bias()\n            elif shared_rel_pos is not None:\n                attn_bias = shared_rel_pos\n\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q.transpose(-1, -2).contiguous(),\n                k.transpose(-1, -2).contiguous(),\n                v.transpose(-1, -2).contiguous(),\n                attn_mask=attn_bias,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            ).transpose(-1, -2).reshape(B, -1, H, W)\n        else:\n            q = q * self.scale\n            attn = q.transpose(-2, -1) @ k\n            if self.rel_pos is not None:\n                attn = self.rel_pos(attn)\n            elif shared_rel_pos is not None:\n                attn = attn + shared_rel_pos\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = (v @ attn.transpose(-2, -1)).view(B, -1, H, W)\n\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass AttentionCl(nn.Module):\n    \"\"\"Channels-last multi-head attention (B, ..., C).\"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: Optional[int] = None,\n            dim_head: int = 32,\n            bias: bool = True,\n            expand_first: bool = True,\n            head_first: bool = True,\n            rel_pos_cls: Optional[Callable] = None,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            dim_out: Output dimension (defaults to input dimension).\n            dim_head: Dimension per attention head.\n            bias: Whether to use bias in qkv and projection.\n            expand_first: Whether to expand channels before or after qkv.\n            head_first: Whether heads are first in tensor layout.\n            rel_pos_cls: Relative position class to use.\n            attn_drop: Attention dropout rate.\n            proj_drop: Projection dropout rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        dim_out = dim_out or dim\n        dim_attn = dim_out if expand_first and dim_out > dim else dim\n        assert dim_attn % dim_head == 0, 'attn dim should be divisible by head_dim'\n        self.num_heads = dim_attn // dim_head\n        self.dim_head = dim_head\n        self.head_first = head_first\n        self.scale = dim_head ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim_attn * 3, bias=bias, **dd)\n        self.rel_pos = rel_pos_cls(num_heads=self.num_heads, **dd) if rel_pos_cls else None\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim_attn, dim_out, bias=bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x: torch.Tensor, shared_rel_pos: Optional[torch.Tensor] = None) -> torch.Tensor:\n        B = x.shape[0]\n        restore_shape = x.shape[:-1]\n\n        if self.head_first:\n            q, k, v = self.qkv(x).view(B, -1, self.num_heads, self.dim_head * 3).transpose(1, 2).chunk(3, dim=3)\n        else:\n            q, k, v = self.qkv(x).reshape(B, -1, 3, self.num_heads, self.dim_head).transpose(1, 3).unbind(2)\n\n        if self.fused_attn:\n            attn_bias = None\n            if self.rel_pos is not None:\n                attn_bias = self.rel_pos.get_bias()\n            elif shared_rel_pos is not None:\n                attn_bias = shared_rel_pos\n\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_bias,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            if self.rel_pos is not None:\n                attn = self.rel_pos(attn, shared_rel_pos=shared_rel_pos)\n            elif shared_rel_pos is not None:\n                attn = attn + shared_rel_pos\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(restore_shape + (-1,))\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass Downsample2d(nn.Module):\n    \"\"\"A downsample pooling module supporting several maxpool and avgpool modes.\n\n    * 'max' - MaxPool2d w/ kernel_size 3, stride 2, padding 1\n    * 'max2' - MaxPool2d w/ kernel_size = stride = 2\n    * 'avg' - AvgPool2d w/ kernel_size 3, stride 2, padding 1\n    * 'avg2' - AvgPool2d w/ kernel_size = stride = 2\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            pool_type: str = 'avg2',\n            padding: str = '',\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            dim_out: Output dimension.\n            pool_type: Type of pooling operation.\n            padding: Padding mode.\n            bias: Whether to use bias in expansion conv.\n        \"\"\"\n        super().__init__()\n        assert pool_type in ('max', 'max2', 'avg', 'avg2')\n        if pool_type == 'max':\n            self.pool = create_pool2d('max', kernel_size=3, stride=2, padding=padding or 1)\n        elif pool_type == 'max2':\n            self.pool = create_pool2d('max', 2, padding=padding or 0)  # kernel_size == stride == 2\n        elif pool_type == 'avg':\n            self.pool = create_pool2d(\n                'avg', kernel_size=3, stride=2, count_include_pad=False, padding=padding or 1)\n        else:\n            self.pool = create_pool2d('avg', 2, padding=padding or 0)\n\n        if dim != dim_out:\n            self.expand = nn.Conv2d(dim, dim_out, 1, bias=bias, device=device, dtype=dtype)\n        else:\n            self.expand = nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.pool(x)  # spatial downsample\n        x = self.expand(x)  # expand chs\n        return x\n\n\ndef _init_transformer(module: nn.Module, name: str, scheme: str = '') -> None:\n    \"\"\"Initialize transformer module weights.\"\"\"\n    if isinstance(module, (nn.Conv2d, nn.Linear)):\n        if scheme == 'normal':\n            nn.init.normal_(module.weight, std=.02)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        elif scheme == 'trunc_normal':\n            trunc_normal_tf_(module.weight, std=.02)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        elif scheme == 'xavier_normal':\n            nn.init.xavier_normal_(module.weight)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        else:\n            # vit like\n            nn.init.xavier_uniform_(module.weight)\n            if module.bias is not None:\n                if 'mlp' in name:\n                    nn.init.normal_(module.bias, std=1e-6)\n                else:\n                    nn.init.zeros_(module.bias)\n\n\nclass TransformerBlock2d(nn.Module):\n    \"\"\"Transformer block with 2D downsampling.\n\n    '2D' NCHW tensor layout\n\n    Some gains can be seen on GPU using a 1D / CL block, BUT w/ the need to switch back/forth to NCHW\n    for spatial pooling, the benefit is minimal so ended up using just this variant for CoAt configs.\n\n    This impl was faster on TPU w/ PT XLA than the 1D experiment.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            stride: int = 1,\n            rel_pos_cls: Optional[Callable] = None,\n            cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            dim_out: Output dimension.\n            stride: Stride for downsampling.\n            rel_pos_cls: Relative position class.\n            cfg: Transformer block configuration.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = partial(get_norm_layer(cfg.norm_layer), eps=cfg.norm_eps)\n        act_layer = get_act_layer(cfg.act_layer)\n\n        if stride == 2:\n            self.shortcut = Downsample2d(dim, dim_out, pool_type=cfg.pool_type, bias=cfg.shortcut_bias, **dd)\n            self.norm1 = nn.Sequential(OrderedDict([\n                ('norm', norm_layer(dim, **dd)),\n                ('down', Downsample2d(dim, dim, pool_type=cfg.pool_type, **dd)),\n            ]))\n        else:\n            assert dim == dim_out\n            self.shortcut = nn.Identity()\n            self.norm1 = norm_layer(dim, **dd)\n\n        self.attn = Attention2d(\n            dim,\n            dim_out,\n            dim_head=cfg.dim_head,\n            expand_first=cfg.expand_first,\n            bias=cfg.attn_bias,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=cfg.attn_drop,\n            proj_drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale2d(dim_out, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim_out, **dd)\n        self.mlp = ConvMlp(\n            in_features=dim_out,\n            hidden_features=int(dim_out * cfg.expand_ratio),\n            act_layer=act_layer,\n            drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale2d(dim_out, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def init_weights(self, scheme: str = '') -> None:\n        named_apply(partial(_init_transformer, scheme=scheme), self)\n\n    def forward(self, x: torch.Tensor, shared_rel_pos: Optional[torch.Tensor] = None) -> torch.Tensor:\n        x = self.shortcut(x) + self.drop_path1(self.ls1(self.attn(self.norm1(x), shared_rel_pos=shared_rel_pos)))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\ndef _init_conv(module: nn.Module, name: str, scheme: str = '') -> None:\n    \"\"\"Initialize convolution module weights.\"\"\"\n    if isinstance(module, nn.Conv2d):\n        if scheme == 'normal':\n            nn.init.normal_(module.weight, std=.02)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        elif scheme == 'trunc_normal':\n            trunc_normal_tf_(module.weight, std=.02)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        elif scheme == 'xavier_normal':\n            nn.init.xavier_normal_(module.weight)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n        else:\n            # efficientnet like\n            fan_out = module.kernel_size[0] * module.kernel_size[1] * module.out_channels\n            fan_out //= module.groups\n            nn.init.normal_(module.weight, 0, math.sqrt(2.0 / fan_out))\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n\n\ndef num_groups(group_size: Optional[int], channels: int) -> int:\n    \"\"\"Calculate number of groups for grouped convolution.\"\"\"\n    if not group_size:  # 0 or None\n        return 1  # normal conv with 1 group\n    else:\n        # NOTE group_size == 1 -> depthwise conv\n        assert channels % group_size == 0\n        return channels // group_size\n\n\nclass MbConvBlock(nn.Module):\n    \"\"\"Pre-Norm Conv Block - 1x1 - kxk - 1x1, w/ inverted bottleneck (expand).\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            cfg: MaxxVitConvCfg = MaxxVitConvCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Stride for conv.\n            dilation: Dilation for conv.\n            cfg: Convolution block configuration.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = partial(get_norm_act_layer(cfg.norm_layer, cfg.act_layer), eps=cfg.norm_eps)\n        mid_chs = make_divisible((out_chs if cfg.expand_output else in_chs) * cfg.expand_ratio)\n        groups = num_groups(cfg.group_size, mid_chs)\n\n        if stride == 2:\n            self.shortcut = Downsample2d(\n                in_chs, out_chs, pool_type=cfg.pool_type, bias=cfg.output_bias, padding=cfg.padding, **dd)\n        else:\n            self.shortcut = nn.Identity()\n\n        assert cfg.stride_mode in ('pool', '1x1', 'dw')\n        stride_pool, stride_1, stride_2 = 1, 1, 1\n        if cfg.stride_mode == 'pool':\n            # NOTE this is not described in paper, experiment to find faster option that doesn't stride in 1x1\n            stride_pool, dilation_2 = stride, dilation[1]\n            # FIXME handle dilation of avg pool\n        elif cfg.stride_mode == '1x1':\n            # NOTE I don't like this option described in paper, 1x1 w/ stride throws info away\n            stride_1, dilation_2 = stride, dilation[1]\n        else:\n            stride_2, dilation_2 = stride, dilation[0]\n\n        self.pre_norm = norm_act_layer(in_chs, apply_act=cfg.pre_norm_act, **dd)\n        if stride_pool > 1:\n            self.down = Downsample2d(in_chs, in_chs, pool_type=cfg.downsample_pool_type, padding=cfg.padding, **dd)\n        else:\n            self.down = nn.Identity()\n        self.conv1_1x1 = create_conv2d(in_chs, mid_chs, 1, stride=stride_1, **dd)\n        self.norm1 = norm_act_layer(mid_chs, **dd)\n\n        self.conv2_kxk = create_conv2d(\n            mid_chs,\n            mid_chs,\n            cfg.kernel_size,\n            stride=stride_2,\n            dilation=dilation_2,\n            groups=groups,\n            padding=cfg.padding,\n            **dd,\n        )\n\n        attn_kwargs = {}\n        if isinstance(cfg.attn_layer, str):\n            if cfg.attn_layer == 'se' or cfg.attn_layer == 'eca':\n                attn_kwargs['act_layer'] = cfg.attn_act_layer\n                attn_kwargs['rd_channels'] = int(cfg.attn_ratio * (out_chs if cfg.expand_output else mid_chs))\n\n        # two different orderings for SE and norm2 (due to some weights and trials using SE before norm2)\n        if cfg.attn_early:\n            self.se_early = create_attn(cfg.attn_layer, mid_chs, **attn_kwargs, **dd)\n            self.norm2 = norm_act_layer(mid_chs, **dd)\n            self.se = None\n        else:\n            self.se_early = None\n            self.norm2 = norm_act_layer(mid_chs, **dd)\n            self.se = create_attn(cfg.attn_layer, mid_chs, **attn_kwargs, **dd)\n\n        self.conv3_1x1 = create_conv2d(mid_chs, out_chs, 1, bias=cfg.output_bias, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def init_weights(self, scheme: str = '') -> None:\n        named_apply(partial(_init_conv, scheme=scheme), self)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = self.shortcut(x)\n        x = self.pre_norm(x)\n        x = self.down(x)\n\n        # 1x1 expansion conv & norm-act\n        x = self.conv1_1x1(x)\n        x = self.norm1(x)\n\n        # depthwise / grouped 3x3 conv w/ SE (or other) channel attention & norm-act\n        x = self.conv2_kxk(x)\n        if self.se_early is not None:\n            x = self.se_early(x)\n        x = self.norm2(x)\n        if self.se is not None:\n            x = self.se(x)\n\n        # 1x1 linear projection to output width\n        x = self.conv3_1x1(x)\n        x = self.drop_path(x) + shortcut\n        return x\n\n\nclass ConvNeXtBlock(nn.Module):\n    \"\"\"ConvNeXt Block.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            kernel_size: int = 7,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            cfg: MaxxVitConvCfg = MaxxVitConvCfg(),\n            conv_mlp: bool = True,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            kernel_size: Kernel size for depthwise conv.\n            stride: Stride for conv.\n            dilation: Dilation for conv.\n            cfg: Convolution block configuration.\n            conv_mlp: Whether to use convolutional MLP.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_chs = out_chs or in_chs\n        act_layer = get_act_layer(cfg.act_layer)\n        if conv_mlp:\n            norm_layer = partial(get_norm_layer(cfg.norm_layer), eps=cfg.norm_eps)\n            mlp_layer = ConvMlp\n        else:\n            assert 'layernorm' in cfg.norm_layer\n            norm_layer = LayerNorm\n            mlp_layer = Mlp\n        self.use_conv_mlp = conv_mlp\n\n        if stride == 2:\n            self.shortcut = Downsample2d(in_chs, out_chs, **dd)\n        elif in_chs != out_chs:\n            self.shortcut = nn.Conv2d(in_chs, out_chs, kernel_size=1, bias=cfg.output_bias, **dd)\n        else:\n            self.shortcut = nn.Identity()\n\n        assert cfg.stride_mode in ('pool', 'dw')\n        stride_pool, stride_dw = 1, 1\n        # FIXME handle dilation?\n        if cfg.stride_mode == 'pool':\n            stride_pool = stride\n        else:\n            stride_dw = stride\n\n        if stride_pool == 2:\n            self.down = Downsample2d(in_chs, in_chs, pool_type=cfg.downsample_pool_type, **dd)\n        else:\n            self.down = nn.Identity()\n\n        self.conv_dw = create_conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            stride=stride_dw,\n            dilation=dilation[1],\n            depthwise=True,\n            bias=cfg.output_bias,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n        self.mlp = mlp_layer(\n            out_chs,\n            int(cfg.expand_ratio * out_chs),\n            bias=cfg.output_bias,\n            act_layer=act_layer,\n            **dd,\n        )\n        if conv_mlp:\n            self.ls = LayerScale2d(out_chs, cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        else:\n            self.ls = LayerScale(out_chs, cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = self.shortcut(x)\n        x = self.down(x)\n        x = self.conv_dw(x)\n        if self.use_conv_mlp:\n            x = self.norm(x)\n            x = self.mlp(x)\n            x = self.ls(x)\n        else:\n            x = x.permute(0, 2, 3, 1)\n            x = self.norm(x)\n            x = self.mlp(x)\n            x = self.ls(x)\n            x = x.permute(0, 3, 1, 2)\n\n        x = self.drop_path(x) + shortcut\n        return x\n\n\ndef window_partition(x: torch.Tensor, window_size: List[int]) -> torch.Tensor:\n    \"\"\"Partition into non-overlapping windows.\"\"\"\n    B, H, W, C = x.shape\n    _assert(H % window_size[0] == 0, f'height ({H}) must be divisible by window ({window_size[0]})')\n    _assert(W % window_size[1] == 0, f'width ({W}) must be divisible by window ({window_size[1]})')\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(windows: torch.Tensor, window_size: List[int], img_size: List[int]) -> torch.Tensor:\n    \"\"\"Reverse window partition.\"\"\"\n    H, W = img_size\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\ndef grid_partition(x: torch.Tensor, grid_size: List[int]) -> torch.Tensor:\n    \"\"\"Partition into overlapping windows with grid striding.\"\"\"\n    B, H, W, C = x.shape\n    _assert(H % grid_size[0] == 0, f'height {H} must be divisible by grid {grid_size[0]}')\n    _assert(W % grid_size[1] == 0, f'width {W} must be divisible by grid {grid_size[1]}')\n    x = x.view(B, grid_size[0], H // grid_size[0], grid_size[1], W // grid_size[1], C)\n    windows = x.permute(0, 2, 4, 1, 3, 5).contiguous().view(-1, grid_size[0], grid_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef grid_reverse(windows: torch.Tensor, grid_size: List[int], img_size: List[int]) -> torch.Tensor:\n    \"\"\"Reverse grid partition.\"\"\"\n    H, W = img_size\n    C = windows.shape[-1]\n    x = windows.view(-1, H // grid_size[0], W // grid_size[1], grid_size[0], grid_size[1], C)\n    x = x.permute(0, 3, 1, 4, 2, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\ndef get_rel_pos_cls(cfg: MaxxVitTransformerCfg, window_size: Tuple[int, int]) -> Optional[Callable]:\n    \"\"\"Get relative position class based on config.\"\"\"\n    rel_pos_cls = None\n    if cfg.rel_pos_type == 'mlp':\n        rel_pos_cls = partial(RelPosMlp, window_size=window_size, hidden_dim=cfg.rel_pos_dim)\n    elif cfg.rel_pos_type == 'bias':\n        rel_pos_cls = partial(RelPosBias, window_size=window_size)\n    elif cfg.rel_pos_type == 'bias_tf':\n        rel_pos_cls = partial(RelPosBiasTf, window_size=window_size)\n    return rel_pos_cls\n\n\nclass PartitionAttentionCl(nn.Module):\n    \"\"\"Grid or Block partition + Attn + FFN.\n\n    NxC 'channels last' tensor layout.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            partition_type: str = 'block',\n            cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = partial(get_norm_layer(cfg.norm_layer_cl), eps=cfg.norm_eps)  # NOTE this block is channels-last\n        act_layer = get_act_layer(cfg.act_layer)\n\n        self.partition_block = partition_type == 'block'\n        self.partition_size = to_2tuple(cfg.window_size if self.partition_block else cfg.grid_size)\n        rel_pos_cls = get_rel_pos_cls(cfg, self.partition_size)\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = AttentionCl(\n            dim,\n            dim,\n            dim_head=cfg.dim_head,\n            bias=cfg.attn_bias,\n            head_first=cfg.head_first,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=cfg.attn_drop,\n            proj_drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * cfg.expand_ratio),\n            act_layer=act_layer,\n            drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def _partition_attn(self, x):\n        img_size = x.shape[1:3]\n        if self.partition_block:\n            partitioned = window_partition(x, self.partition_size)\n        else:\n            partitioned = grid_partition(x, self.partition_size)\n\n        partitioned = self.attn(partitioned)\n\n        if self.partition_block:\n            x = window_reverse(partitioned, self.partition_size, img_size)\n        else:\n            x = grid_reverse(partitioned, self.partition_size, img_size)\n        return x\n\n    def forward(self, x):\n        x = x + self.drop_path1(self.ls1(self._partition_attn(self.norm1(x))))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass ParallelPartitionAttention(nn.Module):\n    \"\"\"Experimental. Grid and Block partition + single FFN.\n\n    NxC tensor layout.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            cfg: Transformer block configuration.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % 2 == 0\n        norm_layer = partial(get_norm_layer(cfg.norm_layer_cl), eps=cfg.norm_eps)  # NOTE this block is channels-last\n        act_layer = get_act_layer(cfg.act_layer)\n\n        assert cfg.window_size == cfg.grid_size\n        self.partition_size = to_2tuple(cfg.window_size)\n        rel_pos_cls = get_rel_pos_cls(cfg, self.partition_size)\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn_block = AttentionCl(\n            dim,\n            dim // 2,\n            dim_head=cfg.dim_head,\n            bias=cfg.attn_bias,\n            head_first=cfg.head_first,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=cfg.attn_drop,\n            proj_drop=cfg.proj_drop,\n            **dd,\n        )\n        self.attn_grid = AttentionCl(\n            dim,\n            dim // 2,\n            dim_head=cfg.dim_head,\n            bias=cfg.attn_bias,\n            head_first=cfg.head_first,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=cfg.attn_drop,\n            proj_drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * cfg.expand_ratio),\n            out_features=dim,\n            act_layer=act_layer,\n            drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def _partition_attn(self, x: torch.Tensor) -> torch.Tensor:\n        img_size = x.shape[1:3]\n\n        partitioned_block = window_partition(x, self.partition_size)\n        partitioned_block = self.attn_block(partitioned_block)\n        x_window = window_reverse(partitioned_block, self.partition_size, img_size)\n\n        partitioned_grid = grid_partition(x, self.partition_size)\n        partitioned_grid = self.attn_grid(partitioned_grid)\n        x_grid = grid_reverse(partitioned_grid, self.partition_size, img_size)\n\n        return torch.cat([x_window, x_grid], dim=-1)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x + self.drop_path1(self.ls1(self._partition_attn(self.norm1(x))))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\ndef window_partition_nchw(x: torch.Tensor, window_size: List[int]) -> torch.Tensor:\n    \"\"\"Partition windows for NCHW tensors.\"\"\"\n    B, C, H, W = x.shape\n    _assert(H % window_size[0] == 0, f'height ({H}) must be divisible by window ({window_size[0]})')\n    _assert(W % window_size[1] == 0, f'width ({W}) must be divisible by window ({window_size[1]})')\n    x = x.view(B, C, H // window_size[0], window_size[0], W // window_size[1], window_size[1])\n    windows = x.permute(0, 2, 4, 1, 3, 5).contiguous().view(-1, C, window_size[0], window_size[1])\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse_nchw(windows: torch.Tensor, window_size: List[int], img_size: List[int]) -> torch.Tensor:\n    \"\"\"Reverse window partition for NCHW tensors.\"\"\"\n    H, W = img_size\n    C = windows.shape[1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], C, window_size[0], window_size[1])\n    x = x.permute(0, 3, 1, 4, 2, 5).contiguous().view(-1, C, H, W)\n    return x\n\n\ndef grid_partition_nchw(x: torch.Tensor, grid_size: List[int]) -> torch.Tensor:\n    \"\"\"Grid partition for NCHW tensors.\"\"\"\n    B, C, H, W = x.shape\n    _assert(H % grid_size[0] == 0, f'height {H} must be divisible by grid {grid_size[0]}')\n    _assert(W % grid_size[1] == 0, f'width {W} must be divisible by grid {grid_size[1]}')\n    x = x.view(B, C, grid_size[0], H // grid_size[0], grid_size[1], W // grid_size[1])\n    windows = x.permute(0, 3, 5, 1, 2, 4).contiguous().view(-1, C, grid_size[0], grid_size[1])\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef grid_reverse_nchw(windows: torch.Tensor, grid_size: List[int], img_size: List[int]) -> torch.Tensor:\n    \"\"\"Reverse grid partition for NCHW tensors.\"\"\"\n    H, W = img_size\n    C = windows.shape[1]\n    x = windows.view(-1, H // grid_size[0], W // grid_size[1], C, grid_size[0], grid_size[1])\n    x = x.permute(0, 3, 4, 1, 5, 2).contiguous().view(-1, C, H, W)\n    return x\n\n\nclass PartitionAttention2d(nn.Module):\n    \"\"\"Grid or Block partition + Attn + FFN.\n\n    '2D' NCHW tensor layout.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            partition_type: str = 'block',\n            cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            partition_type: Partition type ('block' or 'grid').\n            cfg: Transformer block configuration.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = partial(get_norm_layer(cfg.norm_layer), eps=cfg.norm_eps)  # NOTE this block is channels-last\n        act_layer = get_act_layer(cfg.act_layer)\n\n        self.partition_block = partition_type == 'block'\n        self.partition_size = to_2tuple(cfg.window_size if self.partition_block else cfg.grid_size)\n        rel_pos_cls = get_rel_pos_cls(cfg, self.partition_size)\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention2d(\n            dim,\n            dim,\n            dim_head=cfg.dim_head,\n            bias=cfg.attn_bias,\n            head_first=cfg.head_first,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=cfg.attn_drop,\n            proj_drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls1 = LayerScale2d(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = ConvMlp(\n            in_features=dim,\n            hidden_features=int(dim * cfg.expand_ratio),\n            act_layer=act_layer,\n            drop=cfg.proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale2d(dim, init_values=cfg.init_values, **dd) if cfg.init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def _partition_attn(self, x: torch.Tensor) -> torch.Tensor:\n        img_size = x.shape[-2:]\n        if self.partition_block:\n            partitioned = window_partition_nchw(x, self.partition_size)\n        else:\n            partitioned = grid_partition_nchw(x, self.partition_size)\n\n        partitioned = self.attn(partitioned)\n\n        if self.partition_block:\n            x = window_reverse_nchw(partitioned, self.partition_size, img_size)\n        else:\n            x = grid_reverse_nchw(partitioned, self.partition_size, img_size)\n        return x\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = x + self.drop_path1(self.ls1(self._partition_attn(self.norm1(x))))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass MaxxVitBlock(nn.Module):\n    \"\"\"MaxVit conv, window partition + FFN , grid partition + FFN.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            stride: int = 1,\n            conv_cfg: MaxxVitConvCfg = MaxxVitConvCfg(),\n            transformer_cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize MaxxVitBlock.\n\n        Args:\n            dim: Input channel dimension.\n            dim_out: Output channel dimension.\n            stride: Stride for downsampling.\n            conv_cfg: Configuration for convolutional blocks.\n            transformer_cfg: Configuration for transformer blocks.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.nchw_attn = transformer_cfg.use_nchw_attn\n\n        conv_cls = ConvNeXtBlock if conv_cfg.block_type == 'convnext' else MbConvBlock\n        self.conv = conv_cls(dim, dim_out, stride=stride, cfg=conv_cfg, drop_path=drop_path, **dd)\n\n        attn_kwargs = dict(dim=dim_out, cfg=transformer_cfg, drop_path=drop_path, **dd)\n        partition_layer = PartitionAttention2d if self.nchw_attn else PartitionAttentionCl\n        self.attn_block = None if transformer_cfg.no_block_attn else partition_layer(**attn_kwargs)\n        self.attn_grid = partition_layer(partition_type='grid', **attn_kwargs)\n\n    def init_weights(self, scheme=''):\n        if self.attn_block is not None:\n            named_apply(partial(_init_transformer, scheme=scheme), self.attn_block)\n        named_apply(partial(_init_transformer, scheme=scheme), self.attn_grid)\n        named_apply(partial(_init_conv, scheme=scheme), self.conv)\n\n    def forward(self, x):\n        # NCHW format\n        x = self.conv(x)\n\n        if not self.nchw_attn:\n            x = x.permute(0, 2, 3, 1)  # to NHWC (channels-last)\n        if self.attn_block is not None:\n            x = self.attn_block(x)\n        x = self.attn_grid(x)\n        if not self.nchw_attn:\n            x = x.permute(0, 3, 1, 2)  # back to NCHW\n        return x\n\n\nclass ParallelMaxxVitBlock(nn.Module):\n    \"\"\"MaxVit block with parallel cat(window + grid), one FF.\n\n    Experimental timm block.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            stride: int = 1,\n            num_conv: int = 2,\n            conv_cfg: MaxxVitConvCfg = MaxxVitConvCfg(),\n            transformer_cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Input dimension.\n            dim_out: Output dimension.\n            stride: Stride for first conv block.\n            num_conv: Number of convolution blocks.\n            conv_cfg: Convolution block configuration.\n            transformer_cfg: Transformer block configuration.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        conv_cls = ConvNeXtBlock if conv_cfg.block_type == 'convnext' else MbConvBlock\n        if num_conv > 1:\n            convs = [conv_cls(dim, dim_out, stride=stride, cfg=conv_cfg, drop_path=drop_path, **dd)]\n            convs += [conv_cls(dim_out, dim_out, cfg=conv_cfg, drop_path=drop_path, **dd)] * (num_conv - 1)\n            self.conv = nn.Sequential(*convs)\n        else:\n            self.conv = conv_cls(dim, dim_out, stride=stride, cfg=conv_cfg, drop_path=drop_path, **dd)\n        self.attn = ParallelPartitionAttention(dim=dim_out, cfg=transformer_cfg, drop_path=drop_path, **dd)\n\n    def init_weights(self, scheme: str = '') -> None:\n        named_apply(partial(_init_transformer, scheme=scheme), self.attn)\n        named_apply(partial(_init_conv, scheme=scheme), self.conv)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv(x)\n        x = x.permute(0, 2, 3, 1)\n        x = self.attn(x)\n        x = x.permute(0, 3, 1, 2)\n        return x\n\n\nclass MaxxVitStage(nn.Module):\n    \"\"\"MaxxVit stage consisting of mixed convolution and transformer blocks.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 2,\n            depth: int = 4,\n            feat_size: Tuple[int, int] = (14, 14),\n            block_types: Union[str, Tuple[str]] = 'C',\n            transformer_cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(),\n            conv_cfg: MaxxVitConvCfg = MaxxVitConvCfg(),\n            drop_path: Union[float, List[float]] = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Stride for first block.\n            depth: Number of blocks in stage.\n            feat_size: Feature map size.\n            block_types: Block types ('C' for conv, 'T' for transformer, etc).\n            transformer_cfg: Transformer block configuration.\n            conv_cfg: Convolution block configuration.\n            drop_path: Drop path rate(s).\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        block_types = extend_tuple(block_types, depth)\n        blocks = []\n        for i, t in enumerate(block_types):\n            block_stride = stride if i == 0 else 1\n            assert t in ('C', 'T', 'M', 'PM')\n            if t == 'C':\n                conv_cls = ConvNeXtBlock if conv_cfg.block_type == 'convnext' else MbConvBlock\n                blocks += [conv_cls(\n                    in_chs,\n                    out_chs,\n                    stride=block_stride,\n                    cfg=conv_cfg,\n                    drop_path=drop_path[i],\n                    **dd,\n                )]\n            elif t == 'T':\n                rel_pos_cls = get_rel_pos_cls(transformer_cfg, feat_size)\n                blocks += [TransformerBlock2d(\n                    in_chs,\n                    out_chs,\n                    stride=block_stride,\n                    rel_pos_cls=rel_pos_cls,\n                    cfg=transformer_cfg,\n                    drop_path=drop_path[i],\n                    **dd,\n                )]\n            elif t == 'M':\n                blocks += [MaxxVitBlock(\n                    in_chs,\n                    out_chs,\n                    stride=block_stride,\n                    conv_cfg=conv_cfg,\n                    transformer_cfg=transformer_cfg,\n                    drop_path=drop_path[i],\n                    **dd,\n                )]\n            elif t == 'PM':\n                blocks += [ParallelMaxxVitBlock(\n                    in_chs,\n                    out_chs,\n                    stride=block_stride,\n                    conv_cfg=conv_cfg,\n                    transformer_cfg=transformer_cfg,\n                    drop_path=drop_path[i],\n                    **dd,\n                )]\n            in_chs = out_chs\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass Stem(nn.Module):\n    \"\"\"Stem layer for feature extraction.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            padding: str = '',\n            bias: bool = False,\n            act_layer: str = 'gelu',\n            norm_layer: str = 'batchnorm2d',\n            norm_eps: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            kernel_size: Kernel size for convolutions.\n            padding: Padding mode.\n            bias: Whether to use bias.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            norm_eps: Normalization epsilon.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if not isinstance(out_chs, (list, tuple)):\n            out_chs = to_2tuple(out_chs)\n\n        norm_act_layer = partial(get_norm_act_layer(norm_layer, act_layer), eps=norm_eps)\n        self.out_chs = out_chs[-1]\n        self.stride = 2\n\n        self.conv1 = create_conv2d(in_chs, out_chs[0], kernel_size, stride=2, padding=padding, bias=bias, **dd)\n        self.norm1 = norm_act_layer(out_chs[0], **dd)\n        self.conv2 = create_conv2d(out_chs[0], out_chs[1], kernel_size, stride=1, padding=padding, bias=bias, **dd)\n\n    def init_weights(self, scheme: str = '') -> None:\n        named_apply(partial(_init_conv, scheme=scheme), self)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv1(x)\n        x = self.norm1(x)\n        x = self.conv2(x)\n        return x\n\n\ndef cfg_window_size(cfg: MaxxVitTransformerCfg, img_size: Tuple[int, int]) -> MaxxVitTransformerCfg:\n    \"\"\"Configure window size based on image size and partition ratio.\"\"\"\n    if cfg.window_size is not None:\n        assert cfg.grid_size\n        return cfg\n    partition_size = img_size[0] // cfg.partition_ratio, img_size[1] // cfg.partition_ratio\n    cfg = replace(cfg, window_size=partition_size, grid_size=partition_size)\n    return cfg\n\n\ndef _overlay_kwargs(cfg: MaxxVitCfg, **kwargs: Any) -> MaxxVitCfg:\n    \"\"\"Overlay keyword arguments onto configuration.\"\"\"\n    transformer_kwargs = {}\n    conv_kwargs = {}\n    base_kwargs = {}\n    for k, v in kwargs.items():\n        if k.startswith('transformer_'):\n            transformer_kwargs[k.replace('transformer_', '')] = v\n        elif k.startswith('conv_'):\n            conv_kwargs[k.replace('conv_', '')] = v\n        else:\n            base_kwargs[k] = v\n    cfg = replace(\n        cfg,\n        transformer_cfg=replace(cfg.transformer_cfg, **transformer_kwargs),\n        conv_cfg=replace(cfg.conv_cfg, **conv_kwargs),\n        **base_kwargs\n    )\n    return cfg\n\n\nclass MaxxVit(nn.Module):\n    \"\"\"CoaTNet + MaxVit base model.\n\n    Highly configurable for different block compositions, tensor layouts, pooling types.\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: MaxxVitCfg,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n            **kwargs: Any,\n    ):\n        \"\"\"\n        Args:\n            cfg: Model configuration.\n            img_size: Input image size.\n            in_chans: Number of input channels.\n            num_classes: Number of classification classes.\n            global_pool: Global pooling type.\n            drop_rate: Dropout rate.\n            drop_path_rate: Drop path rate.\n            **kwargs: Additional keyword arguments to overlay on config.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        img_size = to_2tuple(img_size)\n        if kwargs:\n            cfg = _overlay_kwargs(cfg, **kwargs)\n        transformer_cfg = cfg_window_size(cfg.transformer_cfg, img_size)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.embed_dim = cfg.embed_dim[-1]\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n\n        self.stem = Stem(\n            in_chs=in_chans,\n            out_chs=cfg.stem_width,\n            padding=cfg.conv_cfg.padding,\n            bias=cfg.stem_bias,\n            act_layer=cfg.conv_cfg.act_layer,\n            norm_layer=cfg.conv_cfg.norm_layer,\n            norm_eps=cfg.conv_cfg.norm_eps,\n            **dd,\n        )\n        stride = self.stem.stride\n        self.feature_info += [dict(num_chs=self.stem.out_chs, reduction=2, module='stem')]\n        feat_size = tuple([i // s for i, s in zip(img_size, to_2tuple(stride))])\n\n        num_stages = len(cfg.embed_dim)\n        assert len(cfg.depths) == num_stages\n        dpr = calculate_drop_path_rates(drop_path_rate, cfg.depths, stagewise=True)\n        in_chs = self.stem.out_chs\n        stages = []\n        for i in range(num_stages):\n            stage_stride = 2\n            out_chs = cfg.embed_dim[i]\n            feat_size = tuple([(r - 1) // stage_stride + 1 for r in feat_size])\n            stages += [MaxxVitStage(\n                in_chs,\n                out_chs,\n                depth=cfg.depths[i],\n                block_types=cfg.block_type[i],\n                conv_cfg=cfg.conv_cfg,\n                transformer_cfg=transformer_cfg,\n                feat_size=feat_size,\n                drop_path=dpr[i],\n                **dd,\n            )]\n            stride *= stage_stride\n            in_chs = out_chs\n            self.feature_info += [dict(num_chs=out_chs, reduction=stride, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        final_norm_layer = partial(get_norm_layer(cfg.transformer_cfg.norm_layer), eps=cfg.transformer_cfg.norm_eps)\n        if cfg.head_hidden_size:\n            self.norm = nn.Identity()\n            self.head_hidden_size = cfg.head_hidden_size\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                hidden_size=self.head_hidden_size,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                norm_layer=final_norm_layer,\n                **dd,\n            )\n        else:\n            # standard classifier head w/ norm, pooling, fc classifier\n            self.head_hidden_size = self.num_features\n            self.norm = final_norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n\n        # Weight init (default PyTorch init works well for AdamW if scheme not set)\n        assert cfg.weight_init in ('', 'normal', 'trunc_normal', 'xavier_normal', 'vit_eff')\n        if cfg.weight_init:\n            named_apply(partial(self._init_weights, scheme=cfg.weight_init), self)\n\n    def _init_weights(self, module: nn.Module, name: str, scheme: str = '') -> None:\n        if hasattr(module, 'init_weights'):\n            try:\n                module.init_weights(scheme=scheme)\n            except TypeError:\n                module.init_weights()\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        return {\n            k for k, _ in self.named_parameters()\n            if any(n in k for n in [\"relative_position_bias_table\", \"rel_pos.mlp\"])}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^stem',  # stem and embed\n            blocks=[(r'^stages\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages) + 1, indices)\n\n        # forward pass\n        feat_idx = 0  # stem is index 0\n        x = self.stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        last_idx = len(self.stages)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index]\n        for stage in stages:\n            feat_idx += 1\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x)  # applying final norm to last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> Tuple[int, ...]:\n        \"\"\"Prune layers not required for specified intermediates.\"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages) + 1, indices)\n        self.stages = self.stages[:max_index]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.head = self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _rw_coat_cfg(\n        stride_mode: str = 'pool',\n        pool_type: str = 'avg2',\n        conv_output_bias: bool = False,\n        conv_attn_early: bool = False,\n        conv_attn_act_layer: str = 'relu',\n        conv_norm_layer: str = '',\n        transformer_shortcut_bias: bool = True,\n        transformer_norm_layer: str = 'layernorm2d',\n        transformer_norm_layer_cl: str = 'layernorm',\n        init_values: Optional[float] = None,\n        rel_pos_type: str = 'bias',\n        rel_pos_dim: int = 512,\n) -> Dict[str, Any]:\n    \"\"\"RW variant configuration for CoAtNet models.\n\n    These models were created and trained before seeing https://github.com/google-research/maxvit\n\n    Common differences for initial timm models:\n      - pre-norm layer in MZBConv included an activation after norm\n      - mbconv expansion calculated from input instead of output chs\n      - mbconv shortcut and final 1x1 conv did not have a bias\n      - SE act layer was relu, not silu\n      - mbconv uses silu in timm, not gelu\n      - expansion in attention block done via output proj, not input proj\n\n    Variable differences (evolved over training initial models):\n      - avg pool with kernel_size=2 favoured downsampling (instead of maxpool for coat)\n      - SE attention was between conv2 and norm/act\n      - default to avg pool for mbconv downsample instead of 1x1 or dw conv\n      - transformer block shortcut has no bias\n    \"\"\"\n    return dict(\n        conv_cfg=MaxxVitConvCfg(\n            stride_mode=stride_mode,\n            pool_type=pool_type,\n            pre_norm_act=True,\n            expand_output=False,\n            output_bias=conv_output_bias,\n            attn_early=conv_attn_early,\n            attn_act_layer=conv_attn_act_layer,\n            act_layer='silu',\n            norm_layer=conv_norm_layer,\n        ),\n        transformer_cfg=MaxxVitTransformerCfg(\n            expand_first=False,\n            shortcut_bias=transformer_shortcut_bias,\n            pool_type=pool_type,\n            init_values=init_values,\n            norm_layer=transformer_norm_layer,\n            norm_layer_cl=transformer_norm_layer_cl,\n            rel_pos_type=rel_pos_type,\n            rel_pos_dim=rel_pos_dim,\n        ),\n    )\n\n\ndef _rw_max_cfg(\n        stride_mode: str = 'dw',\n        pool_type: str = 'avg2',\n        conv_output_bias: bool = False,\n        conv_attn_ratio: float = 1 / 16,\n        conv_norm_layer: str = '',\n        transformer_norm_layer: str = 'layernorm2d',\n        transformer_norm_layer_cl: str = 'layernorm',\n        window_size: Optional[Tuple[int, int]] = None,\n        dim_head: int = 32,\n        init_values: Optional[float] = None,\n        rel_pos_type: str = 'bias',\n        rel_pos_dim: int = 512,\n) -> Dict[str, Any]:\n    \"\"\"RW variant configuration for MaxViT models.\n\n    These models were created and trained before seeing https://github.com/google-research/maxvit\n\n    Differences of initial timm models:\n      - mbconv expansion calculated from input instead of output chs\n      - mbconv shortcut and final 1x1 conv did not have a bias\n      - mbconv uses silu in timm, not gelu\n      - expansion in attention block done via output proj, not input proj\n    \"\"\"\n    return dict(\n        conv_cfg=MaxxVitConvCfg(\n            stride_mode=stride_mode,\n            pool_type=pool_type,\n            expand_output=False,\n            output_bias=conv_output_bias,\n            attn_ratio=conv_attn_ratio,\n            act_layer='silu',\n            norm_layer=conv_norm_layer,\n        ),\n        transformer_cfg=MaxxVitTransformerCfg(\n            expand_first=False,\n            pool_type=pool_type,\n            dim_head=dim_head,\n            window_size=window_size,\n            init_values=init_values,\n            norm_layer=transformer_norm_layer,\n            norm_layer_cl=transformer_norm_layer_cl,\n            rel_pos_type=rel_pos_type,\n            rel_pos_dim=rel_pos_dim,\n        ),\n    )\n\n\ndef _next_cfg(\n        stride_mode: str = 'dw',\n        pool_type: str = 'avg2',\n        conv_norm_layer: str = 'layernorm2d',\n        conv_norm_layer_cl: str = 'layernorm',\n        transformer_norm_layer: str = 'layernorm2d',\n        transformer_norm_layer_cl: str = 'layernorm',\n        window_size: Optional[Tuple[int, int]] = None,\n        no_block_attn: bool = False,\n        init_values: Union[float, Tuple[float, float]] = 1e-6,\n        rel_pos_type: str = 'mlp',  # MLP by default for maxxvit\n        rel_pos_dim: int = 512,\n) -> Dict[str, Any]:\n    \"\"\"Configuration for experimental ConvNeXt-based MaxxViT models.\"\"\"\n    init_values = to_2tuple(init_values)\n    return dict(\n        conv_cfg=MaxxVitConvCfg(\n            block_type='convnext',\n            stride_mode=stride_mode,\n            pool_type=pool_type,\n            expand_output=False,\n            init_values=init_values[0],\n            norm_layer=conv_norm_layer,\n            norm_layer_cl=conv_norm_layer_cl,\n        ),\n        transformer_cfg=MaxxVitTransformerCfg(\n            expand_first=False,\n            pool_type=pool_type,\n            window_size=window_size,\n            no_block_attn=no_block_attn,  # enabled for MaxxViT-V2\n            init_values=init_values[1],\n            norm_layer=transformer_norm_layer,\n            norm_layer_cl=transformer_norm_layer_cl,\n            rel_pos_type=rel_pos_type,\n            rel_pos_dim=rel_pos_dim,\n        ),\n    )\n\n\ndef _tf_cfg() -> Dict[str, Any]:\n    \"\"\"Configuration matching TensorFlow MaxViT models.\"\"\"\n    return dict(\n        conv_cfg=MaxxVitConvCfg(\n            norm_eps=1e-3,\n            act_layer='gelu_tanh',\n            padding='same',\n        ),\n        transformer_cfg=MaxxVitTransformerCfg(\n            norm_eps=1e-5,\n            act_layer='gelu_tanh',\n            head_first=False,  # heads are interleaved (q_nh, q_hdim, k_nh, q_hdim, ....)\n            rel_pos_type='bias_tf',\n        ),\n    )\n\n\nmodel_cfgs = dict(\n    # timm specific CoAtNet configs\n    coatnet_pico_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 3, 5, 2),\n        stem_width=(32, 64),\n        **_rw_max_cfg(  # using newer max defaults here\n            conv_output_bias=True,\n            conv_attn_ratio=0.25,\n        ),\n    ),\n    coatnet_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(3, 4, 6, 3),\n        stem_width=(32, 64),\n        **_rw_max_cfg(  # using newer max defaults here\n            stride_mode='pool',\n            conv_output_bias=True,\n            conv_attn_ratio=0.25,\n        ),\n    ),\n    coatnet_0_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 3, 7, 2),  # deeper than paper '0' model\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            conv_attn_early=True,\n            transformer_shortcut_bias=False,\n        ),\n    ),\n    coatnet_1_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_early=True,\n            transformer_shortcut_bias=False,\n        )\n    ),\n    coatnet_2_rw=MaxxVitCfg(\n        embed_dim=(128, 256, 512, 1024),\n        depths=(2, 6, 14, 2),\n        stem_width=(64, 128),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_act_layer='silu',\n            #init_values=1e-6,\n        ),\n    ),\n    coatnet_3_rw=MaxxVitCfg(\n        embed_dim=(192, 384, 768, 1536),\n        depths=(2, 6, 14, 2),\n        stem_width=(96, 192),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_act_layer='silu',\n            init_values=1e-6,\n        ),\n    ),\n\n    # Experimental CoAtNet configs w/ ImageNet-1k train (different norm layers, MLP rel-pos)\n    coatnet_bn_0_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 3, 7, 2),  # deeper than paper '0' model\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_early=True,\n            transformer_shortcut_bias=False,\n            transformer_norm_layer='batchnorm2d',\n        )\n    ),\n    coatnet_rmlp_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(3, 4, 6, 3),\n        stem_width=(32, 64),\n        **_rw_max_cfg(\n            conv_output_bias=True,\n            conv_attn_ratio=0.25,\n            rel_pos_type='mlp',\n            rel_pos_dim=384,\n        ),\n    ),\n    coatnet_rmlp_0_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 3, 7, 2),  # deeper than paper '0' model\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            rel_pos_type='mlp',\n        ),\n    ),\n    coatnet_rmlp_1_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            pool_type='max',\n            conv_attn_early=True,\n            transformer_shortcut_bias=False,\n            rel_pos_type='mlp',\n            rel_pos_dim=384,  # was supposed to be 512, woops\n        ),\n    ),\n    coatnet_rmlp_1_rw2=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        stem_width=(32, 64),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            rel_pos_type='mlp',\n            rel_pos_dim=512,  # was supposed to be 512, woops\n        ),\n    ),\n    coatnet_rmlp_2_rw=MaxxVitCfg(\n        embed_dim=(128, 256, 512, 1024),\n        depths=(2, 6, 14, 2),\n        stem_width=(64, 128),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_act_layer='silu',\n            init_values=1e-6,\n            rel_pos_type='mlp'\n        ),\n    ),\n    coatnet_rmlp_3_rw=MaxxVitCfg(\n        embed_dim=(192, 384, 768, 1536),\n        depths=(2, 6, 14, 2),\n        stem_width=(96, 192),\n        **_rw_coat_cfg(\n            stride_mode='dw',\n            conv_attn_act_layer='silu',\n            init_values=1e-6,\n            rel_pos_type='mlp'\n        ),\n    ),\n\n    coatnet_nano_cc=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(3, 4, 6, 3),\n        stem_width=(32, 64),\n        block_type=('C', 'C', ('C', 'T'), ('C', 'T')),\n        **_rw_coat_cfg(),\n    ),\n    coatnext_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(3, 4, 6, 3),\n        stem_width=(32, 64),\n        weight_init='normal',\n        **_next_cfg(\n            rel_pos_type='bias',\n            init_values=(1e-5, None)\n        ),\n    ),\n\n    # Trying to be like the CoAtNet paper configs\n    coatnet_0=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 3, 5, 2),\n        stem_width=64,\n        head_hidden_size=768,\n    ),\n    coatnet_1=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        stem_width=64,\n        head_hidden_size=768,\n    ),\n    coatnet_2=MaxxVitCfg(\n        embed_dim=(128, 256, 512, 1024),\n        depths=(2, 6, 14, 2),\n        stem_width=128,\n        head_hidden_size=1024,\n    ),\n    coatnet_3=MaxxVitCfg(\n        embed_dim=(192, 384, 768, 1536),\n        depths=(2, 6, 14, 2),\n        stem_width=192,\n        head_hidden_size=1536,\n    ),\n    coatnet_4=MaxxVitCfg(\n        embed_dim=(192, 384, 768, 1536),\n        depths=(2, 12, 28, 2),\n        stem_width=192,\n        head_hidden_size=1536,\n    ),\n    coatnet_5=MaxxVitCfg(\n        embed_dim=(256, 512, 1280, 2048),\n        depths=(2, 12, 28, 2),\n        stem_width=192,\n        head_hidden_size=2048,\n    ),\n\n    # Experimental MaxVit configs\n    maxvit_pico_rw=MaxxVitCfg(\n        embed_dim=(32, 64, 128, 256),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(24, 32),\n        **_rw_max_cfg(),\n    ),\n    maxvit_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(1, 2, 3, 1),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(),\n    ),\n    maxvit_tiny_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(),\n    ),\n    maxvit_tiny_pm=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 2, 5, 2),\n        block_type=('PM',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(),\n    ),\n\n    maxvit_rmlp_pico_rw=MaxxVitCfg(\n        embed_dim=(32, 64, 128, 256),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(24, 32),\n        **_rw_max_cfg(rel_pos_type='mlp'),\n    ),\n    maxvit_rmlp_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(1, 2, 3, 1),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(rel_pos_type='mlp'),\n    ),\n    maxvit_rmlp_tiny_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(rel_pos_type='mlp'),\n    ),\n    maxvit_rmlp_small_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_rw_max_cfg(\n            rel_pos_type='mlp',\n            init_values=1e-6,\n        ),\n    ),\n    maxvit_rmlp_base_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        head_hidden_size=768,\n        **_rw_max_cfg(\n            rel_pos_type='mlp',\n        ),\n    ),\n\n    maxxvit_rmlp_nano_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(1, 2, 3, 1),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        weight_init='normal',\n        **_next_cfg(),\n    ),\n    maxxvit_rmlp_tiny_rw=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(32, 64),\n        **_next_cfg(),\n    ),\n    maxxvit_rmlp_small_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=(48, 96),\n        **_next_cfg(),\n    ),\n\n    maxxvitv2_nano_rw=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(1, 2, 3, 1),\n        block_type=('M',) * 4,\n        stem_width=(48, 96),\n        weight_init='normal',\n        **_next_cfg(\n            no_block_attn=True,\n            rel_pos_type='bias',\n        ),\n    ),\n    maxxvitv2_rmlp_base_rw=MaxxVitCfg(\n        embed_dim=(128, 256, 512, 1024),\n        depths=(2, 6, 12, 2),\n        block_type=('M',) * 4,\n        stem_width=(64, 128),\n        **_next_cfg(\n            no_block_attn=True,\n        ),\n    ),\n    maxxvitv2_rmlp_large_rw=MaxxVitCfg(\n        embed_dim=(160, 320, 640, 1280),\n        depths=(2, 6, 16, 2),\n        block_type=('M',) * 4,\n        stem_width=(80, 160),\n        head_hidden_size=1280,\n        **_next_cfg(\n            no_block_attn=True,\n        ),\n    ),\n\n    # Trying to be like the MaxViT paper configs\n    maxvit_tiny_tf=MaxxVitCfg(\n        embed_dim=(64, 128, 256, 512),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=64,\n        stem_bias=True,\n        head_hidden_size=512,\n        **_tf_cfg(),\n    ),\n    maxvit_small_tf=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 2, 5, 2),\n        block_type=('M',) * 4,\n        stem_width=64,\n        stem_bias=True,\n        head_hidden_size=768,\n        **_tf_cfg(),\n    ),\n    maxvit_base_tf=MaxxVitCfg(\n        embed_dim=(96, 192, 384, 768),\n        depths=(2, 6, 14, 2),\n        block_type=('M',) * 4,\n        stem_width=64,\n        stem_bias=True,\n        head_hidden_size=768,\n        **_tf_cfg(),\n    ),\n    maxvit_large_tf=MaxxVitCfg(\n        embed_dim=(128, 256, 512, 1024),\n        depths=(2, 6, 14, 2),\n        block_type=('M',) * 4,\n        stem_width=128,\n        stem_bias=True,\n        head_hidden_size=1024,\n        **_tf_cfg(),\n    ),\n    maxvit_xlarge_tf=MaxxVitCfg(\n        embed_dim=(192, 384, 768, 1536),\n        depths=(2, 6, 14, 2),\n        block_type=('M',) * 4,\n        stem_width=192,\n        stem_bias=True,\n        head_hidden_size=1536,\n        **_tf_cfg(),\n    ),\n)\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    \"\"\"Filter checkpoint state dict for compatibility.\"\"\"\n    model_state_dict = model.state_dict()\n    out_dict = {}\n    for k, v in state_dict.items():\n        if k.endswith('relative_position_bias_table'):\n            m = model.get_submodule(k[:-29])\n            if v.shape != m.relative_position_bias_table.shape or m.window_size[0] != m.window_size[1]:\n                v = resize_rel_pos_bias_table(\n                    v,\n                    new_window_size=m.window_size,\n                    new_bias_shape=m.relative_position_bias_table.shape,\n                )\n\n        if k in model_state_dict and v.ndim != model_state_dict[k].ndim and v.numel() == model_state_dict[k].numel():\n            # adapt between conv2d / linear layers\n            assert v.ndim in (2, 4)\n            v = v.reshape(model_state_dict[k].shape)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_maxxvit(variant: str, cfg_variant: Optional[str] = None, pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"Create a MaxxVit model variant.\"\"\"\n    if cfg_variant is None:\n        if variant in model_cfgs:\n            cfg_variant = variant\n        else:\n            cfg_variant = '_'.join(variant.split('_')[:-1])\n    return build_model_with_cfg(\n        MaxxVit, variant, pretrained,\n        model_cfg=model_cfgs[cfg_variant],\n        feature_cfg=dict(flatten_sequential=True),\n        pretrained_filter_fn=checkpoint_filter_fn,\n        **kwargs)\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    \"\"\"Create a default configuration dict.\"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.95, 'interpolation': 'bicubic',\n        'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),\n        'first_conv': 'stem.conv1', 'classifier': 'head.fc',\n        'fixed_input_size': True,\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # timm specific CoAtNet configs, ImageNet-1k pretrain, fixed rel-pos\n    'coatnet_pico_rw_224.untrained': _cfg(url=''),\n    'coatnet_nano_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_nano_rw_224_sw-f53093b4.pth',\n        crop_pct=0.9),\n    'coatnet_0_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_0_rw_224_sw-a6439706.pth'),\n    'coatnet_1_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_1_rw_224_sw-5cae1ea8.pth'\n    ),\n\n    # timm specific CoAtNet configs, ImageNet-12k pretrain w/ 1k fine-tune, fixed rel-pos\n    'coatnet_2_rw_224.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    #'coatnet_3_rw_224.untrained': _cfg(url=''),\n\n    # Experimental CoAtNet configs w/ ImageNet-12k pretrain -> 1k fine-tune (different norm layers, MLP rel-pos)\n    'coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    # Experimental CoAtNet configs w/ ImageNet-1k train (different norm layers, MLP rel-pos)\n    'coatnet_bn_0_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_bn_0_rw_224_sw-c228e218.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        crop_pct=0.95),\n    'coatnet_rmlp_nano_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_nano_rw_224_sw-bd1d51b3.pth',\n        crop_pct=0.9),\n    'coatnet_rmlp_0_rw_224.untrained': _cfg(url=''),\n    'coatnet_rmlp_1_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_1_rw_224_sw-9051e6c3.pth'),\n    'coatnet_rmlp_2_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_2_rw_224_sw-5ccfac55.pth'),\n    'coatnet_rmlp_3_rw_224.untrained': _cfg(url=''),\n    'coatnet_nano_cc_224.untrained': _cfg(url=''),\n    'coatnext_nano_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnext_nano_rw_224_ad-22cb71c2.pth',\n        crop_pct=0.9),\n\n    # ImagenNet-12k pretrain CoAtNet\n    'coatnet_2_rw_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n    'coatnet_3_rw_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n    'coatnet_rmlp_1_rw2_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n    'coatnet_rmlp_2_rw_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n\n    # Trying to be like the CoAtNet paper configs (will adapt if 'tf' weights are ever released)\n    'coatnet_0_224.untrained': _cfg(url=''),\n    'coatnet_1_224.untrained': _cfg(url=''),\n    'coatnet_2_224.untrained': _cfg(url=''),\n    'coatnet_3_224.untrained': _cfg(url=''),\n    'coatnet_4_224.untrained': _cfg(url=''),\n    'coatnet_5_224.untrained': _cfg(url=''),\n\n    # timm specific MaxVit configs, ImageNet-1k pretrain or untrained\n    'maxvit_pico_rw_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_nano_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_nano_rw_256_sw-fb127241.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_tiny_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_tiny_rw_224_sw-7d0dffeb.pth'),\n    'maxvit_tiny_rw_256.untrained': _cfg(\n        url='',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_tiny_pm_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    # timm specific MaxVit w/ MLP rel-pos, ImageNet-1k pretrain\n    'maxvit_rmlp_pico_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_pico_rw_256_sw-8d82f2c6.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_rmlp_nano_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_nano_rw_256_sw-c17bb0d6.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_rmlp_tiny_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_tiny_rw_256_sw-bbef0ff5.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxvit_rmlp_small_rw_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_small_rw_224_sw-6ef0ae4f.pth',\n        crop_pct=0.9,\n    ),\n    'maxvit_rmlp_small_rw_256.untrained': _cfg(\n        url='',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    # timm specific MaxVit w/ ImageNet-12k pretrain and 1k fine-tune\n    'maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n\n    # timm specific MaxVit w/ ImageNet-12k pretrain\n    'maxvit_rmlp_base_rw_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n    ),\n\n    # timm MaxxViT configs (ConvNeXt conv blocks mixed with MaxVit transformer blocks)\n    'maxxvit_rmlp_nano_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxxvit_rmlp_nano_rw_256_sw-0325d459.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxxvit_rmlp_tiny_rw_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxxvit_rmlp_small_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxxvit_rmlp_small_rw_256_sw-37e217ff.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    # timm MaxxViT-V2 configs (ConvNeXt conv blocks mixed with MaxVit transformer blocks, more width, no block attn)\n    'maxxvitv2_nano_rw_256.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxxvitv2_rmlp_large_rw_224.untrained': _cfg(url=''),\n\n    'maxxvitv2_rmlp_base_rw_224.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n\n    # MaxViT models ported from official Tensorflow impl\n    'maxvit_tiny_tf_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'maxvit_tiny_tf_384.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_tiny_tf_512.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_small_tf_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'maxvit_small_tf_384.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_small_tf_512.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_base_tf_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'maxvit_base_tf_384.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_base_tf_512.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_large_tf_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'maxvit_large_tf_384.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_large_tf_512.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n\n    'maxvit_base_tf_224.in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843),\n    'maxvit_base_tf_384.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_base_tf_512.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_large_tf_224.in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843),\n    'maxvit_large_tf_384.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_large_tf_512.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_xlarge_tf_224.in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843),\n    'maxvit_xlarge_tf_384.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'),\n    'maxvit_xlarge_tf_512.in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'),\n})\n\n\n@register_model\ndef coatnet_pico_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet Pico model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_pico_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_nano_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet Nano model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_nano_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_0_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-0 model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_0_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_1_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-1 model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_1_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_2_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-2 model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_2_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_3_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-3 model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_3_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_bn_0_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-0 model with BatchNorm and RW configuration.\"\"\"\n    return _create_maxxvit('coatnet_bn_0_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_nano_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet Nano model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_nano_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_0_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-0 model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_0_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_1_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-1 model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_1_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_1_rw2_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-1 model with Relative Position MLP v2.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_1_rw2_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_2_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-2 model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_2_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_2_rw_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-2 model with Relative Position MLP at 384x384.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_2_rw_384', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_rmlp_3_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-3 model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('coatnet_rmlp_3_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_nano_cc_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet Nano model with ConvNeXt blocks.\"\"\"\n    return _create_maxxvit('coatnet_nano_cc_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnext_nano_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoAtNeXt Nano model with RW configuration.\"\"\"\n    return _create_maxxvit('coatnext_nano_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_0_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-0 model.\"\"\"\n    return _create_maxxvit('coatnet_0_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_1_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-1 model.\"\"\"\n    return _create_maxxvit('coatnet_1_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_2_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-2 model.\"\"\"\n    return _create_maxxvit('coatnet_2_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_3_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-3 model.\"\"\"\n    return _create_maxxvit('coatnet_3_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_4_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-4 model.\"\"\"\n    return _create_maxxvit('coatnet_4_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef coatnet_5_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"CoatNet-5 model.\"\"\"\n    return _create_maxxvit('coatnet_5_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_pico_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Pico model with RW configuration.\"\"\"\n    return _create_maxxvit('maxvit_pico_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_nano_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Nano model with RW configuration.\"\"\"\n    return _create_maxxvit('maxvit_nano_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model with RW configuration.\"\"\"\n    return _create_maxxvit('maxvit_tiny_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model with RW configuration at 256x256.\"\"\"\n    return _create_maxxvit('maxvit_tiny_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_pico_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Relative Position MLP Pico RW 256x256 model.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_pico_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_nano_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Relative Position MLP Nano RW 256x256 model.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_nano_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_tiny_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Relative Position MLP Tiny RW 256x256 model.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_tiny_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_small_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Relative Position MLP Small RW 224x224 model.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_small_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_small_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Small model with Relative Position MLP at 256x256.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_small_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_base_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Base model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_base_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_rmlp_base_rw_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Base model with Relative Position MLP at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_rmlp_base_rw_384', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_pm_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model with parallel blocks.\"\"\"\n    return _create_maxxvit('maxvit_tiny_pm_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvit_rmlp_nano_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT Relative Position MLP Nano RW 256x256 model.\"\"\"\n    return _create_maxxvit('maxxvit_rmlp_nano_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvit_rmlp_tiny_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT Tiny model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('maxxvit_rmlp_tiny_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvit_rmlp_small_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT Small model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('maxxvit_rmlp_small_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvitv2_nano_rw_256(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT-V2 Nano model.\"\"\"\n    return _create_maxxvit('maxxvitv2_nano_rw_256', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvitv2_rmlp_base_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT-V2 Base model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('maxxvitv2_rmlp_base_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvitv2_rmlp_base_rw_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT-V2 Base model with Relative Position MLP at 384x384.\"\"\"\n    return _create_maxxvit('maxxvitv2_rmlp_base_rw_384', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxxvitv2_rmlp_large_rw_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxxViT-V2 Large model with Relative Position MLP.\"\"\"\n    return _create_maxxvit('maxxvitv2_rmlp_large_rw_224', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_tf_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model from TensorFlow.\"\"\"\n    return _create_maxxvit('maxvit_tiny_tf_224', 'maxvit_tiny_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_tf_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model from TensorFlow at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_tiny_tf_384', 'maxvit_tiny_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_tiny_tf_512(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Tiny model from TensorFlow at 512x512.\"\"\"\n    return _create_maxxvit('maxvit_tiny_tf_512', 'maxvit_tiny_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_small_tf_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Small model from TensorFlow.\"\"\"\n    return _create_maxxvit('maxvit_small_tf_224', 'maxvit_small_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_small_tf_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Small model from TensorFlow at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_small_tf_384', 'maxvit_small_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_small_tf_512(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Small model from TensorFlow at 512x512.\"\"\"\n    return _create_maxxvit('maxvit_small_tf_512', 'maxvit_small_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_base_tf_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Base model from TensorFlow.\"\"\"\n    return _create_maxxvit('maxvit_base_tf_224', 'maxvit_base_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_base_tf_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Base model from TensorFlow at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_base_tf_384', 'maxvit_base_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_base_tf_512(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Base model from TensorFlow at 512x512.\"\"\"\n    return _create_maxxvit('maxvit_base_tf_512', 'maxvit_base_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_large_tf_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Large model from TensorFlow.\"\"\"\n    return _create_maxxvit('maxvit_large_tf_224', 'maxvit_large_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_large_tf_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Large model from TensorFlow at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_large_tf_384', 'maxvit_large_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_large_tf_512(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT Large model from TensorFlow at 512x512.\"\"\"\n    return _create_maxxvit('maxvit_large_tf_512', 'maxvit_large_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_xlarge_tf_224(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT XLarge model from TensorFlow.\"\"\"\n    return _create_maxxvit('maxvit_xlarge_tf_224', 'maxvit_xlarge_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_xlarge_tf_384(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT XLarge model from TensorFlow at 384x384.\"\"\"\n    return _create_maxxvit('maxvit_xlarge_tf_384', 'maxvit_xlarge_tf', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef maxvit_xlarge_tf_512(pretrained: bool = False, **kwargs: Any) -> MaxxVit:\n    \"\"\"MaxViT XLarge model from TensorFlow at 512x512.\"\"\"\n    return _create_maxxvit('maxvit_xlarge_tf_512', 'maxvit_xlarge_tf', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/metaformer.py",
    "content": "\"\"\"\nPoolformer from MetaFormer is Actually What You Need for Vision https://arxiv.org/abs/2111.11418\n\nIdentityFormer, RandFormer, PoolFormerV2, ConvFormer, and CAFormer\nfrom MetaFormer Baselines for Vision https://arxiv.org/abs/2210.13452\n\nAll implemented models support feature extraction and variable input resolution.\n\nOriginal implementation by Weihao Yu et al.,\nadapted for timm by Fredo Guan and Ross Wightman.\n\nAdapted from https://github.com/sail-sg/metaformer, original copyright below\n\"\"\"\n\n# Copyright 2022 Garena Online Private Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch import Tensor\nfrom torch.jit import Final\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    trunc_normal_,\n    DropPath,\n    calculate_drop_path_rates,\n    SelectAdaptivePool2d,\n    GroupNorm1,\n    LayerNorm,\n    LayerNorm2d,\n    Mlp,\n    use_fused_attn,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['MetaFormer']\n\n\nclass Stem(nn.Module):\n    \"\"\"\n    Stem implemented by a layer of convolution.\n    Conv2d params constant across all models.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=7,\n            stride=4,\n            padding=2,\n            **dd,\n        )\n        self.norm = norm_layer(out_channels, **dd) if norm_layer else nn.Identity()\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.norm(x)\n        return x\n\n\nclass Downsampling(nn.Module):\n    \"\"\"\n    Downsampling implemented by a layer of convolution.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int = 1,\n            padding: int = 0,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm = norm_layer(in_channels, **dd) if norm_layer else nn.Identity()\n        self.conv = nn.Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            **dd\n        )\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = self.conv(x)\n        return x\n\n\nclass Scale(nn.Module):\n    \"\"\"\n    Scale vector by element multiplications.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            init_value: float = 1.0,\n            trainable: bool = True,\n            use_nchw: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.shape = (dim, 1, 1) if use_nchw else (dim,)\n        self.scale = nn.Parameter(init_value * torch.ones(dim, **dd), requires_grad=trainable)\n\n    def forward(self, x):\n        return x * self.scale.view(self.shape)\n\n\nclass SquaredReLU(nn.Module):\n    \"\"\"\n        Squared ReLU: https://arxiv.org/abs/2109.08668\n    \"\"\"\n\n    def __init__(self, inplace: bool = False):\n        super().__init__()\n        self.relu = nn.ReLU(inplace=inplace)\n\n    def forward(self, x):\n        return torch.square(self.relu(x))\n\n\nclass StarReLU(nn.Module):\n    \"\"\"\n    StarReLU: s * relu(x) ** 2 + b\n    \"\"\"\n\n    def __init__(\n            self,\n            scale_value: float = 1.0,\n            bias_value: float = 0.0,\n            scale_learnable: bool = True,\n            bias_learnable: bool = True,\n            mode: Optional[str] = None,\n            inplace: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.inplace = inplace\n        self.relu = nn.ReLU(inplace=inplace)\n        self.scale = nn.Parameter(scale_value * torch.ones(1, **dd), requires_grad=scale_learnable)\n        self.bias = nn.Parameter(bias_value * torch.ones(1, **dd), requires_grad=bias_learnable)\n\n    def forward(self, x):\n        return self.scale * self.relu(x) ** 2 + self.bias\n\n\nclass Attention(nn.Module):\n    \"\"\"\n    Vanilla self-attention from Transformer: https://arxiv.org/abs/1706.03762.\n    Modified from timm.\n    \"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            head_dim: int = 32,\n            num_heads: Optional[int] = None,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            proj_bias: bool = False,\n            device=None,\n            dtype=None,\n            **kwargs\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.head_dim = head_dim\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.num_heads = num_heads if num_heads else dim // head_dim\n        if self.num_heads == 0:\n            self.num_heads = 1\n\n        self.attention_dim = self.num_heads * self.head_dim\n\n        self.qkv = nn.Linear(dim, self.attention_dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(self.attention_dim, dim, bias=proj_bias, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            attn = (q @ k.transpose(-2, -1)) * self.scale\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\n# custom norm modules that disable the bias term, since the original models defs\n# used a custom norm with a weight term but no bias term.\n\nclass GroupNorm1NoBias(GroupNorm1):\n    def __init__(self, num_channels: int, **kwargs):\n        super().__init__(num_channels, **kwargs)\n        self.eps = kwargs.get('eps', 1e-6)\n        self.bias = None\n\n\nclass LayerNorm2dNoBias(LayerNorm2d):\n    def __init__(self, num_channels: int, **kwargs):\n        super().__init__(num_channels, **kwargs)\n        self.eps = kwargs.get('eps', 1e-6)\n        self.bias = None\n\n\nclass LayerNormNoBias(nn.LayerNorm):\n    def __init__(self, num_channels: int, **kwargs):\n        super().__init__(num_channels, **kwargs)\n        self.eps = kwargs.get('eps', 1e-6)\n        self.bias = None\n\n\nclass SepConv(nn.Module):\n    r\"\"\"\n    Inverted separable convolution from MobileNetV2: https://arxiv.org/abs/1801.04381.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            expansion_ratio: float = 2,\n            act1_layer: Type[nn.Module] = StarReLU,\n            act2_layer: Type[nn.Module] = nn.Identity,\n            bias: bool = False,\n            kernel_size: int = 7,\n            padding: int = 3,\n            device=None,\n            dtype=None,\n            **kwargs\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mid_channels = int(expansion_ratio * dim)\n        self.pwconv1 = nn.Conv2d(dim, mid_channels, kernel_size=1, bias=bias, **dd)\n        self.act1 = act1_layer(**dd) if issubclass(act1_layer, StarReLU) else act1_layer()\n        self.dwconv = nn.Conv2d(\n            mid_channels,\n            mid_channels,\n            kernel_size=kernel_size,\n            padding=padding,\n            groups=mid_channels,\n            bias=bias,\n            **dd,\n        )  # depthwise conv\n        self.act2 = act2_layer(**dd) if issubclass(act2_layer, StarReLU) else act2_layer()\n        self.pwconv2 = nn.Conv2d(mid_channels, dim, kernel_size=1, bias=bias, **dd)\n\n    def forward(self, x):\n        x = self.pwconv1(x)\n        x = self.act1(x)\n        x = self.dwconv(x)\n        x = self.act2(x)\n        x = self.pwconv2(x)\n        return x\n\n\nclass Pooling(nn.Module):\n    \"\"\"\n    Implementation of pooling for PoolFormer: https://arxiv.org/abs/2111.11418\n    \"\"\"\n\n    def __init__(self, pool_size: int = 3, **kwargs):\n        super().__init__()\n        self.pool = nn.AvgPool2d(pool_size, stride=1, padding=pool_size // 2, count_include_pad=False)\n\n    def forward(self, x):\n        y = self.pool(x)\n        return y - x\n\n\nclass MlpHead(nn.Module):\n    \"\"\" MLP classification head\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_classes: int = 1000,\n            mlp_ratio: float = 4,\n            act_layer: Type[nn.Module] = SquaredReLU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            drop_rate: float = 0.,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        hidden_features = int(mlp_ratio * dim)\n        self.fc1 = nn.Linear(dim, hidden_features, bias=bias, **dd)\n        self.act = act_layer()\n        self.norm = norm_layer(hidden_features, **dd)\n        self.fc2 = nn.Linear(hidden_features, num_classes, bias=bias, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n\n    def forward(self, x):\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.norm(x)\n        x = self.head_drop(x)\n        x = self.fc2(x)\n        return x\n\n\nclass MetaFormerBlock(nn.Module):\n    \"\"\"\n    Implementation of one MetaFormer block.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            token_mixer: Type[nn.Module] = Pooling,\n            mlp_act: Type[nn.Module] = StarReLU,\n            mlp_bias: bool = False,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            proj_drop: float = 0.,\n            drop_path: float = 0.,\n            use_nchw: bool = True,\n            layer_scale_init_value: Optional[float] = None,\n            res_scale_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n            **kwargs\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        ls_layer = partial(Scale, dim=dim, init_value=layer_scale_init_value, use_nchw=use_nchw, **dd)\n        rs_layer = partial(Scale, dim=dim, init_value=res_scale_init_value, use_nchw=use_nchw, **dd)\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.token_mixer = token_mixer(dim=dim, proj_drop=proj_drop, **dd, **kwargs)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.layer_scale1 = ls_layer() if layer_scale_init_value is not None else nn.Identity()\n        self.res_scale1 = rs_layer() if res_scale_init_value is not None else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            dim,\n            int(4 * dim),\n            act_layer=mlp_act,\n            bias=mlp_bias,\n            drop=proj_drop,\n            use_conv=use_nchw,\n            **dd\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.layer_scale2 = ls_layer() if layer_scale_init_value is not None else nn.Identity()\n        self.res_scale2 = rs_layer() if res_scale_init_value is not None else nn.Identity()\n\n    def forward(self, x):\n        x = self.res_scale1(x) + \\\n            self.layer_scale1(\n                self.drop_path1(\n                    self.token_mixer(self.norm1(x))\n                )\n            )\n        x = self.res_scale2(x) + \\\n            self.layer_scale2(\n                self.drop_path2(\n                    self.mlp(self.norm2(x))\n                )\n            )\n        return x\n\n\nclass MetaFormerStage(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            depth: int = 2,\n            token_mixer: Type[nn.Module] = nn.Identity,\n            mlp_act: Type[nn.Module] = StarReLU,\n            mlp_bias: bool = False,\n            downsample_norm: Optional[Type[nn.Module]] = LayerNorm2d,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            proj_drop: float = 0.,\n            dp_rates: List[float] = [0.] * 2,\n            layer_scale_init_value: Optional[float] = None,\n            res_scale_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.use_nchw = not issubclass(token_mixer, Attention)\n\n        # don't downsample if in_chs and out_chs are the same\n        self.downsample = nn.Identity() if in_chs == out_chs else Downsampling(\n            in_chs,\n            out_chs,\n            kernel_size=3,\n            stride=2,\n            padding=1,\n            norm_layer=downsample_norm,\n            **dd,\n        )\n\n        self.blocks = nn.Sequential(*[MetaFormerBlock(\n            dim=out_chs,\n            token_mixer=token_mixer,\n            mlp_act=mlp_act,\n            mlp_bias=mlp_bias,\n            norm_layer=norm_layer,\n            proj_drop=proj_drop,\n            drop_path=dp_rates[i],\n            layer_scale_init_value=layer_scale_init_value,\n            res_scale_init_value=res_scale_init_value,\n            use_nchw=self.use_nchw,\n            **dd,\n            **kwargs,\n        ) for i in range(depth)])\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    def forward(self, x: Tensor):\n        x = self.downsample(x)\n        B, C, H, W = x.shape\n\n        if not self.use_nchw:\n            x = x.reshape(B, C, -1).transpose(1, 2)\n\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n\n        if not self.use_nchw:\n            x = x.transpose(1, 2).reshape(B, C, H, W)\n\n        return x\n\n\nclass MetaFormer(nn.Module):\n    r\"\"\" MetaFormer\n        A PyTorch impl of : `MetaFormer Baselines for Vision`  -\n          https://arxiv.org/abs/2210.13452\n\n    Args:\n        in_chans (int): Number of input image channels.\n        num_classes (int): Number of classes for classification head.\n        global_pool: Pooling for classifier head.\n        depths (list or tuple): Number of blocks at each stage.\n        dims (list or tuple): Feature dimension at each stage.\n        token_mixers (list, tuple or token_fcn): Token mixer for each stage.\n        mlp_act: Activation layer for MLP.\n        mlp_bias (boolean): Enable or disable mlp bias term.\n        drop_path_rate (float): Stochastic depth rate.\n        drop_rate (float): Dropout rate.\n        layer_scale_init_values (list, tuple, float or None): Init value for Layer Scale.\n            None means not use the layer scale. Form: https://arxiv.org/abs/2103.17239.\n        res_scale_init_values (list, tuple, float or None): Init value for res Scale on residual connections.\n            None means not use the res scale. From: https://arxiv.org/abs/2110.09456.\n        downsample_norm (nn.Module): Norm layer used in stem and downsampling layers.\n        norm_layers (list, tuple or norm_fcn): Norm layers for each stage.\n        output_norm: Norm layer before classifier head.\n        use_mlp_head: Use MLP classification head.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            dims: Tuple[int, ...] = (64, 128, 320, 512),\n            token_mixers: Union[Type[nn.Module], List[Type[nn.Module]]] = Pooling,\n            mlp_act: Type[nn.Module] = StarReLU,\n            mlp_bias: bool = False,\n            drop_path_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            drop_rate: float = 0.0,\n            layer_scale_init_values: Optional[Union[float, List[float]]] = None,\n            res_scale_init_values: Union[Tuple[Optional[float], ...], List[Optional[float]]] = (None, None, 1.0, 1.0),\n            downsample_norm: Optional[Type[nn.Module]] = LayerNorm2dNoBias,\n            norm_layers: Union[Type[nn.Module], List[Type[nn.Module]]] = LayerNorm2dNoBias,\n            output_norm: Type[nn.Module] = LayerNorm2d,\n            use_mlp_head: bool = True,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        # Bind dd kwargs to activation layers that need them\n        if mlp_act in (StarReLU,):\n            mlp_act = partial(mlp_act, **dd)\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = dims[-1]\n        self.drop_rate = drop_rate\n        self.use_mlp_head = use_mlp_head\n        self.num_stages = len(depths)\n\n        # convert everything to lists if they aren't indexable\n        if not isinstance(depths, (list, tuple)):\n            depths = [depths]  # it means the model has only one stage\n        if not isinstance(dims, (list, tuple)):\n            dims = [dims]\n        if not isinstance(token_mixers, (list, tuple)):\n            token_mixers = [token_mixers] * self.num_stages\n        if not isinstance(norm_layers, (list, tuple)):\n            norm_layers = [norm_layers] * self.num_stages\n        if not isinstance(layer_scale_init_values, (list, tuple)):\n            layer_scale_init_values = [layer_scale_init_values] * self.num_stages\n        if not isinstance(res_scale_init_values, (list, tuple)):\n            res_scale_init_values = [res_scale_init_values] * self.num_stages\n\n        self.grad_checkpointing = False\n        self.feature_info = []\n\n        self.stem = Stem(\n            in_chans,\n            dims[0],\n            norm_layer=downsample_norm,\n            **dd,\n        )\n\n        stages = []\n        prev_dim = dims[0]\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        for i in range(self.num_stages):\n            stages += [MetaFormerStage(\n                prev_dim,\n                dims[i],\n                depth=depths[i],\n                token_mixer=token_mixers[i],\n                mlp_act=mlp_act,\n                mlp_bias=mlp_bias,\n                proj_drop=proj_drop_rate,\n                dp_rates=dp_rates[i],\n                layer_scale_init_value=layer_scale_init_values[i],\n                res_scale_init_value=res_scale_init_values[i],\n                downsample_norm=downsample_norm,\n                norm_layer=norm_layers[i],\n                **dd,\n                **kwargs,\n            )]\n            prev_dim = dims[i]\n            self.feature_info += [dict(num_chs=dims[i], reduction=2**(i+2), module=f'stages.{i}')]\n\n        self.stages = nn.Sequential(*stages)\n\n        # if using MlpHead, dropout is handled by MlpHead\n        if num_classes > 0:\n            if self.use_mlp_head:\n                # FIXME not actually returning mlp hidden state right now as pre-logits.\n                final = MlpHead(self.num_features, num_classes, drop_rate=self.drop_rate, **dd)\n                self.head_hidden_size = self.num_features\n            else:\n                final = nn.Linear(self.num_features, num_classes, **dd)\n                self.head_hidden_size = self.num_features\n        else:\n            final = nn.Identity()\n\n        self.head = nn.Sequential(OrderedDict([\n            ('global_pool', SelectAdaptivePool2d(pool_type=global_pool)),\n            ('norm', output_norm(self.num_features, **dd)),\n            ('flatten', nn.Flatten(1) if global_pool else nn.Identity()),\n            ('drop', nn.Dropout(drop_rate) if self.use_mlp_head else nn.Identity()),\n            ('fc', final)\n        ]))\n\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, (nn.Conv2d, nn.Linear)):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n        for stage in self.stages:\n            stage.set_grad_checkpointing(enable=enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.head.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n            self.head.flatten = nn.Flatten(1) if global_pool else nn.Identity()\n        if num_classes > 0:\n            if self.use_mlp_head:\n                final = MlpHead(self.num_features, num_classes, drop_rate=self.drop_rate, **dd)\n            else:\n                final = nn.Linear(self.num_features, num_classes, **dd)\n        else:\n            final = nn.Identity()\n        self.head.fc = final\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_head(self, x: Tensor, pre_logits: bool = False):\n        # NOTE nn.Sequential in head broken down since can't call head[:-1](x) in torchscript :(\n        x = self.head.global_pool(x)\n        x = self.head.norm(x)\n        x = self.head.flatten(x)\n        x = self.head.drop(x)\n        return x if pre_logits else self.head.fc(x)\n\n    def forward_features(self, x: Tensor):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward(self, x: Tensor):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\n# this works but it's long and breaks backwards compatibility with weights from the poolformer-only impl\ndef checkpoint_filter_fn(state_dict, model):\n    if 'stem.conv.weight' in state_dict:\n        return state_dict\n\n    import re\n    out_dict = {}\n    is_poolformerv1 = 'network.0.0.mlp.fc1.weight' in state_dict\n    model_state_dict = model.state_dict()\n    for k, v in state_dict.items():\n        if is_poolformerv1:\n            k = re.sub(r'layer_scale_([0-9]+)', r'layer_scale\\1.scale', k)\n            k = k.replace('network.1', 'downsample_layers.1')\n            k = k.replace('network.3', 'downsample_layers.2')\n            k = k.replace('network.5', 'downsample_layers.3')\n            k = k.replace('network.2', 'network.1')\n            k = k.replace('network.4', 'network.2')\n            k = k.replace('network.6', 'network.3')\n            k = k.replace('network', 'stages')\n\n        k = re.sub(r'downsample_layers.([0-9]+)', r'stages.\\1.downsample', k)\n        k = k.replace('downsample.proj', 'downsample.conv')\n        k = k.replace('patch_embed.proj', 'patch_embed.conv')\n        k = re.sub(r'([0-9]+).([0-9]+)', r'\\1.blocks.\\2', k)\n        k = k.replace('stages.0.downsample', 'patch_embed')\n        k = k.replace('patch_embed', 'stem')\n        k = k.replace('post_norm', 'norm')\n        k = k.replace('pre_norm', 'norm')\n        k = re.sub(r'^head', 'head.fc', k)\n        k = re.sub(r'^norm', 'head.norm', k)\n\n        if v.shape != model_state_dict[k] and v.numel() == model_state_dict[k].numel():\n            v = v.reshape(model_state_dict[k].shape)\n\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_metaformer(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (2, 2, 6, 2))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        MetaFormer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs,\n    )\n\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 1.0, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'classifier': 'head.fc', 'first_conv': 'stem.conv',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'poolformer_s12.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9),\n    'poolformer_s24.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9),\n    'poolformer_s36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.9),\n    'poolformer_m36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95),\n    'poolformer_m48.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95),\n\n    'poolformerv2_s12.sail_in1k': _cfg(hf_hub_id='timm/'),\n    'poolformerv2_s24.sail_in1k': _cfg(hf_hub_id='timm/'),\n    'poolformerv2_s36.sail_in1k': _cfg(hf_hub_id='timm/'),\n    'poolformerv2_m36.sail_in1k': _cfg(hf_hub_id='timm/'),\n    'poolformerv2_m48.sail_in1k': _cfg(hf_hub_id='timm/'),\n\n    'convformer_s18.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_s18.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_s18.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_s18.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_s18.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'convformer_s36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_s36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_s36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_s36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_s36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'convformer_m36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_m36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_m36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_m36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_m36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'convformer_b36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_b36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_b36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'convformer_b36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'convformer_b36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'caformer_s18.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_s18.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_s18.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_s18.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_s18.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'caformer_s36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_s36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_s36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_s36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_s36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'caformer_m36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_m36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_m36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_m36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_m36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n\n    'caformer_b36.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_b36.sail_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_b36.sail_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2'),\n    'caformer_b36.sail_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', input_size=(3, 384, 384), pool_size=(12, 12)),\n    'caformer_b36.sail_in22k': _cfg(\n        hf_hub_id='timm/',\n        classifier='head.fc.fc2', num_classes=21841),\n})\n\n\n@register_model\ndef poolformer_s12(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[2, 2, 6, 2],\n        dims=[64, 128, 320, 512],\n        downsample_norm=None,\n        mlp_act=nn.GELU,\n        mlp_bias=True,\n        norm_layers=GroupNorm1,\n        layer_scale_init_values=1e-5,\n        res_scale_init_values=None,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformer_s12', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformer_s24(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[4, 4, 12, 4],\n        dims=[64, 128, 320, 512],\n        downsample_norm=None,\n        mlp_act=nn.GELU,\n        mlp_bias=True,\n        norm_layers=GroupNorm1,\n        layer_scale_init_values=1e-5,\n        res_scale_init_values=None,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformer_s24', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformer_s36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[6, 6, 18, 6],\n        dims=[64, 128, 320, 512],\n        downsample_norm=None,\n        mlp_act=nn.GELU,\n        mlp_bias=True,\n        norm_layers=GroupNorm1,\n        layer_scale_init_values=1e-6,\n        res_scale_init_values=None,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformer_s36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformer_m36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[6, 6, 18, 6],\n        dims=[96, 192, 384, 768],\n        downsample_norm=None,\n        mlp_act=nn.GELU,\n        mlp_bias=True,\n        norm_layers=GroupNorm1,\n        layer_scale_init_values=1e-6,\n        res_scale_init_values=None,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformer_m36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformer_m48(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[8, 8, 24, 8],\n        dims=[96, 192, 384, 768],\n        downsample_norm=None,\n        mlp_act=nn.GELU,\n        mlp_bias=True,\n        norm_layers=GroupNorm1,\n        layer_scale_init_values=1e-6,\n        res_scale_init_values=None,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformer_m48', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformerv2_s12(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[2, 2, 6, 2],\n        dims=[64, 128, 320, 512],\n        norm_layers=GroupNorm1NoBias,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformerv2_s12', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformerv2_s24(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[4, 4, 12, 4],\n        dims=[64, 128, 320, 512],\n        norm_layers=GroupNorm1NoBias,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformerv2_s24', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformerv2_s36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[6, 6, 18, 6],\n        dims=[64, 128, 320, 512],\n        norm_layers=GroupNorm1NoBias,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformerv2_s36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformerv2_m36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[6, 6, 18, 6],\n        dims=[96, 192, 384, 768],\n        norm_layers=GroupNorm1NoBias,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformerv2_m36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef poolformerv2_m48(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[8, 8, 24, 8],\n        dims=[96, 192, 384, 768],\n        norm_layers=GroupNorm1NoBias,\n        use_mlp_head=False,\n        **kwargs)\n    return _create_metaformer('poolformerv2_m48', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef convformer_s18(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 3, 9, 3],\n        dims=[64, 128, 320, 512],\n        token_mixers=SepConv,\n        norm_layers=LayerNorm2dNoBias,\n        **kwargs)\n    return _create_metaformer('convformer_s18', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef convformer_s36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[64, 128, 320, 512],\n        token_mixers=SepConv,\n        norm_layers=LayerNorm2dNoBias,\n        **kwargs)\n    return _create_metaformer('convformer_s36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef convformer_m36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[96, 192, 384, 576],\n        token_mixers=SepConv,\n        norm_layers=LayerNorm2dNoBias,\n        **kwargs)\n    return _create_metaformer('convformer_m36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef convformer_b36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[128, 256, 512, 768],\n        token_mixers=SepConv,\n        norm_layers=LayerNorm2dNoBias,\n        **kwargs)\n    return _create_metaformer('convformer_b36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef caformer_s18(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 3, 9, 3],\n        dims=[64, 128, 320, 512],\n        token_mixers=[SepConv, SepConv, Attention, Attention],\n        norm_layers=[LayerNorm2dNoBias] * 2 + [LayerNormNoBias] * 2,\n        **kwargs)\n    return _create_metaformer('caformer_s18', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef caformer_s36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[64, 128, 320, 512],\n        token_mixers=[SepConv, SepConv, Attention, Attention],\n        norm_layers=[LayerNorm2dNoBias] * 2 + [LayerNormNoBias] * 2,\n        **kwargs)\n    return _create_metaformer('caformer_s36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef caformer_m36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[96, 192, 384, 576],\n        token_mixers=[SepConv, SepConv, Attention, Attention],\n        norm_layers=[LayerNorm2dNoBias] * 2 + [LayerNormNoBias] * 2,\n        **kwargs)\n    return _create_metaformer('caformer_m36', pretrained=pretrained, **model_kwargs)\n\n\n@register_model\ndef caformer_b36(pretrained=False, **kwargs) -> MetaFormer:\n    model_kwargs = dict(\n        depths=[3, 12, 18, 3],\n        dims=[128, 256, 512, 768],\n        token_mixers=[SepConv, SepConv, Attention, Attention],\n        norm_layers=[LayerNorm2dNoBias] * 2 + [LayerNormNoBias] * 2,\n        **kwargs)\n    return _create_metaformer('caformer_b36', pretrained=pretrained, **model_kwargs)\n"
  },
  {
    "path": "timm/models/mlp_mixer.py",
    "content": "\"\"\" MLP-Mixer, ResMLP, and gMLP in PyTorch\n\nThis impl originally based on MLP-Mixer paper.\n\nOfficial JAX impl: https://github.com/google-research/vision_transformer/blob/linen/vit_jax/models_mixer.py\n\nPaper: 'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n\n@article{tolstikhin2021,\n  title={MLP-Mixer: An all-MLP Architecture for Vision},\n  author={Tolstikhin, Ilya and Houlsby, Neil and Kolesnikov, Alexander and Beyer, Lucas and Zhai, Xiaohua and Unterthiner,\n        Thomas and Yung, Jessica and Keysers, Daniel and Uszkoreit, Jakob and Lucic, Mario and Dosovitskiy, Alexey},\n  journal={arXiv preprint arXiv:2105.01601},\n  year={2021}\n}\n\nAlso supporting ResMlp, and a preliminary (not verified) implementations of gMLP\n\nCode: https://github.com/facebookresearch/deit\nPaper: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n@misc{touvron2021resmlp,\n      title={ResMLP: Feedforward networks for image classification with data-efficient training},\n      author={Hugo Touvron and Piotr Bojanowski and Mathilde Caron and Matthieu Cord and Alaaeldin El-Nouby and\n        Edouard Grave and Armand Joulin and Gabriel Synnaeve and Jakob Verbeek and Hervé Jégou},\n      year={2021},\n      eprint={2105.03404},\n}\n\nPaper: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n@misc{liu2021pay,\n      title={Pay Attention to MLPs},\n      author={Hanxiao Liu and Zihang Dai and David R. So and Quoc V. Le},\n      year={2021},\n      eprint={2105.08050},\n}\n\nA thank you to paper authors for releasing code and weights.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import Any, Dict, List, Optional, Type, Union, Tuple\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import PatchEmbed, Mlp, GluMlp, GatedMlp, DropPath, lecun_normal_, to_2tuple\n\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['MixerBlock', 'MlpMixer']  # model_registry will add each entrypoint fn to this\n\n\nclass MixerBlock(nn.Module):\n    \"\"\"Residual Block w/ token mixing and channel MLPs.\n\n    Based on: 'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            seq_len: int,\n            mlp_ratio: Union[float, Tuple[float, float]] = (0.5, 4.0),\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize MixerBlock.\n\n        Args:\n            dim: Dimension of input features.\n            seq_len: Sequence length.\n            mlp_ratio: Expansion ratios for token mixing and channel MLPs.\n            mlp_layer: MLP layer class.\n            norm_layer: Normalization layer.\n            act_layer: Activation layer.\n            drop: Dropout rate.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        tokens_dim, channels_dim = [int(x * dim) for x in to_2tuple(mlp_ratio)]\n        self.norm1 = norm_layer(dim, **dd)\n        self.mlp_tokens = mlp_layer(seq_len, tokens_dim, act_layer=act_layer, drop=drop, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp_channels = mlp_layer(dim, channels_dim, act_layer=act_layer, drop=drop, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2))\n        x = x + self.drop_path(self.mlp_channels(self.norm2(x)))\n        return x\n\n\nclass Affine(nn.Module):\n    \"\"\"Affine transformation layer.\"\"\"\n\n    def __init__(self, dim: int, device=None, dtype=None) -> None:\n        \"\"\"Initialize Affine layer.\n\n        Args:\n            dim: Dimension of features.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.alpha = nn.Parameter(torch.ones((1, 1, dim), **dd))\n        self.beta = nn.Parameter(torch.zeros((1, 1, dim), **dd))\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply affine transformation.\"\"\"\n        return torch.addcmul(self.beta, self.alpha, x)\n\n\nclass ResBlock(nn.Module):\n    \"\"\"Residual MLP block w/ LayerScale and Affine 'norm'.\n\n    Based on: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            seq_len: int,\n            mlp_ratio: float = 4,\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = Affine,\n            act_layer: Type[nn.Module] = nn.GELU,\n            init_values: float = 1e-4,\n            drop: float = 0.,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize ResBlock.\n\n        Args:\n            dim: Dimension of input features.\n            seq_len: Sequence length.\n            mlp_ratio: Channel MLP expansion ratio.\n            mlp_layer: MLP layer class.\n            norm_layer: Normalization layer.\n            act_layer: Activation layer.\n            init_values: Initial values for layer scale.\n            drop: Dropout rate.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        channel_dim = int(dim * mlp_ratio)\n        self.norm1 = norm_layer(dim, **dd)\n        self.linear_tokens = nn.Linear(seq_len, seq_len, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp_channels = mlp_layer(dim, channel_dim, act_layer=act_layer, drop=drop, **dd)\n        self.ls1 = nn.Parameter(init_values * torch.ones(dim, **dd))\n        self.ls2 = nn.Parameter(init_values * torch.ones(dim, **dd))\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = x + self.drop_path(self.ls1 * self.linear_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2))\n        x = x + self.drop_path(self.ls2 * self.mlp_channels(self.norm2(x)))\n        return x\n\n\nclass SpatialGatingUnit(nn.Module):\n    \"\"\"Spatial Gating Unit.\n\n    Based on: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            seq_len: int,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize Spatial Gating Unit.\n\n        Args:\n            dim: Dimension of input features.\n            seq_len: Sequence length.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        gate_dim = dim // 2\n        self.norm = norm_layer(gate_dim, **dd)\n        self.proj = nn.Linear(seq_len, seq_len, **dd)\n\n    def init_weights(self) -> None:\n        \"\"\"Initialize weights for projection gate.\"\"\"\n        # special init for the projection gate, called as override by base model init\n        nn.init.normal_(self.proj.weight, std=1e-6)\n        nn.init.ones_(self.proj.bias)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply spatial gating.\"\"\"\n        u, v = x.chunk(2, dim=-1)\n        v = self.norm(v)\n        v = self.proj(v.transpose(-1, -2))\n        return u * v.transpose(-1, -2)\n\n\nclass SpatialGatingBlock(nn.Module):\n    \"\"\"Residual Block w/ Spatial Gating.\n\n    Based on: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            seq_len: int,\n            mlp_ratio: float = 4,\n            mlp_layer: Type[nn.Module] = GatedMlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize SpatialGatingBlock.\n\n        Args:\n            dim: Dimension of input features.\n            seq_len: Sequence length.\n            mlp_ratio: Channel MLP expansion ratio.\n            mlp_layer: MLP layer class.\n            norm_layer: Normalization layer.\n            act_layer: Activation layer.\n            drop: Dropout rate.\n            drop_path: Drop path rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        channel_dim = int(dim * mlp_ratio)\n        self.norm = norm_layer(dim, **dd)\n        sgu = partial(SpatialGatingUnit, seq_len=seq_len, **dd)\n        self.mlp_channels = mlp_layer(\n            dim,\n            channel_dim,\n            act_layer=act_layer,\n            gate_layer=sgu,\n            drop=drop,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = x + self.drop_path(self.mlp_channels(self.norm(x)))\n        return x\n\n\nclass MlpMixer(nn.Module):\n    \"\"\"MLP-Mixer model architecture.\n\n    Based on: 'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            img_size: int = 224,\n            in_chans: int = 3,\n            patch_size: int = 16,\n            num_blocks: int = 8,\n            embed_dim: int = 512,\n            mlp_ratio: Union[float, Tuple[float, float]] = (0.5, 4.0),\n            block_layer: Type[nn.Module] = MixerBlock,\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            nlhb: bool = False,\n            stem_norm: bool = False,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize MLP-Mixer.\n\n        Args:\n            num_classes: Number of classes for classification.\n            img_size: Input image size.\n            in_chans: Number of input channels.\n            patch_size: Patch size.\n            num_blocks: Number of mixer blocks.\n            embed_dim: Embedding dimension.\n            mlp_ratio: MLP expansion ratio(s).\n            block_layer: Block layer class.\n            mlp_layer: MLP layer class.\n            norm_layer: Normalization layer.\n            act_layer: Activation layer.\n            drop_rate: Head dropout rate.\n            proj_drop_rate: Projection dropout rate.\n            drop_path_rate: Drop path rate.\n            nlhb: Use negative log bias initialization.\n            stem_norm: Apply normalization to stem.\n            global_pool: Global pooling type.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.grad_checkpointing = False\n\n        self.stem = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            norm_layer=norm_layer if stem_norm else None,\n            **dd,\n        )\n        reduction = self.stem.feat_ratio() if hasattr(self.stem, 'feat_ratio') else patch_size\n        # FIXME drop_path (stochastic depth scaling rule or all the same?)\n        self.blocks = nn.Sequential(*[\n            block_layer(\n                embed_dim,\n                self.stem.num_patches,\n                mlp_ratio,\n                mlp_layer=mlp_layer,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                drop=proj_drop_rate,\n                drop_path=drop_path_rate,\n                **dd,\n            )\n            for _ in range(num_blocks)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=reduction) for i in range(num_blocks)]\n        self.norm = norm_layer(embed_dim, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, self.num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        self.init_weights(nlhb=nlhb)\n\n    @torch.jit.ignore\n    def init_weights(self, nlhb: bool = False) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            nlhb: Use negative log bias initialization for head.\n        \"\"\"\n        head_bias = -math.log(self.num_classes) if nlhb else 0.\n        named_apply(partial(_init_weights, head_bias=head_bias), module=self)  # depth-first\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Create regex patterns for parameter grouping.\n\n        Args:\n            coarse: Use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        return dict(\n            stem=r'^stem',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier module.\"\"\"\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg')\n            self.global_pool = global_pool\n        device, dtype = self.head.weight.device, self.head.weight.dtype if hasattr(self.head, 'weight') else (None, None)\n        self.head = nn.Linear(self.embed_dim, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor.\n            indices: Take last n blocks if int, all if None, select matching indices if sequence.\n            norm: Apply norm layer to all intermediates.\n            stop_early: Stop iterating over blocks when last desired intermediate hit.\n            output_fmt: Shape of intermediate feature outputs ('NCHW' or 'NLC').\n            intermediates_only: Only return intermediate features.\n\n        Returns:\n            List of intermediate features or tuple of (final features, intermediates).\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.stem(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.stem.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        if self.global_pool == 'avg':\n            x = x.mean(dim=1)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module: nn.Module, name: str, head_bias: float = 0., flax: bool = False) -> None:\n    \"\"\"Mixer weight initialization (trying to match Flax defaults).\n\n    Args:\n        module: Module to initialize.\n        name: Module name.\n        head_bias: Bias value for head layer.\n        flax: Use Flax-style initialization.\n    \"\"\"\n    if isinstance(module, nn.Linear):\n        if name.startswith('head'):\n            nn.init.zeros_(module.weight)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            if flax:\n                # Flax defaults\n                lecun_normal_(module.weight)\n                if module.bias is not None:\n                    nn.init.zeros_(module.bias)\n            else:\n                # like MLP init in vit (my original init)\n                nn.init.xavier_uniform_(module.weight)\n                if module.bias is not None:\n                    if 'mlp' in name:\n                        nn.init.normal_(module.bias, std=1e-6)\n                    else:\n                        nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        lecun_normal_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, (nn.LayerNorm, nn.BatchNorm2d, nn.GroupNorm)):\n        nn.init.ones_(module.weight)\n        nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        # NOTE if a parent module contains init_weights method, it can override the init of the\n        # child modules as this will be called in depth-first order.\n        module.init_weights()\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap checkpoints if needed \"\"\"\n    if 'patch_embed.proj.weight' in state_dict:\n        # Remap FB ResMlp models -> timm\n        out_dict = {}\n        for k, v in state_dict.items():\n            k = k.replace('patch_embed.', 'stem.')\n            k = k.replace('attn.', 'linear_tokens.')\n            k = k.replace('mlp.', 'mlp_channels.')\n            k = k.replace('gamma_', 'ls')\n            if k.endswith('.alpha') or k.endswith('.beta'):\n                v = v.reshape(1, 1, -1)\n            out_dict[k] = v\n        return out_dict\n    return state_dict\n\n\ndef _create_mixer(variant, pretrained=False, **kwargs) -> MlpMixer:\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        MlpMixer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs) -> Dict[str, Any]:\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': 0.875, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),\n        'first_conv': 'stem.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'mixer_s32_224.untrained': _cfg(),\n    'mixer_s16_224.untrained': _cfg(),\n    'mixer_b32_224.untrained': _cfg(),\n    'mixer_b16_224.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_b16_224-76587d61.pth',\n    ),\n    'mixer_b16_224.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_b16_224_in21k-617b3de2.pth',\n        num_classes=21843\n    ),\n    'mixer_l32_224.untrained': _cfg(),\n    'mixer_l16_224.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224-92f9adc4.pth',\n    ),\n    'mixer_l16_224.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth',\n        num_classes=21843\n    ),\n\n    # Mixer ImageNet-21K-P pretraining\n    'mixer_b16_224.miil_in21k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mixer_b16_224_miil_in21k-2a558a71.pth',\n        mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear', num_classes=11221,\n    ),\n    'mixer_b16_224.miil_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mixer_b16_224_miil-9229a591.pth',\n        mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear',\n    ),\n\n    'gmixer_12_224.untrained': _cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'gmixer_24_224.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gmixer_24_224_raa-7daf7ae6.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n\n    'resmlp_12_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_12_no_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_24_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_24_no_dist.pth',\n        #url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resmlp_24_224_raa-a8256759.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_36_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_36_no_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_big_24_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlpB_24_no_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n\n    'resmlp_12_224.fb_distilled_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_12_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_24_224.fb_distilled_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_24_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_36_224.fb_distilled_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_36_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_big_24_224.fb_distilled_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlpB_24_dist.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n\n    'resmlp_big_24_224.fb_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlpB_24_22k.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n\n    'resmlp_12_224.fb_dino': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_12_dino.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n    'resmlp_24_224.fb_dino': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/deit/resmlp_24_dino.pth',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),\n\n    'gmlp_ti16_224.untrained': _cfg(),\n    'gmlp_s16_224.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gmlp_s16_224_raa-10536d42.pth',\n    ),\n    'gmlp_b16_224.untrained': _cfg(),\n})\n\n\n@register_model\ndef mixer_s32_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-S/32 224x224\n    Paper: 'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=32, num_blocks=8, embed_dim=512, **kwargs)\n    model = _create_mixer('mixer_s32_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef mixer_s16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-S/16 224x224\n    Paper:  'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=16, num_blocks=8, embed_dim=512, **kwargs)\n    model = _create_mixer('mixer_s16_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef mixer_b32_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-B/32 224x224\n    Paper:  'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=32, num_blocks=12, embed_dim=768, **kwargs)\n    model = _create_mixer('mixer_b32_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef mixer_b16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-B/16 224x224. ImageNet-1k pretrained weights.\n    Paper:  'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=16, num_blocks=12, embed_dim=768, **kwargs)\n    model = _create_mixer('mixer_b16_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef mixer_l32_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-L/32 224x224.\n    Paper:  'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=32, num_blocks=24, embed_dim=1024, **kwargs)\n    model = _create_mixer('mixer_l32_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef mixer_l16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Mixer-L/16 224x224. ImageNet-1k pretrained weights.\n    Paper:  'MLP-Mixer: An all-MLP Architecture for Vision' - https://arxiv.org/abs/2105.01601\n    \"\"\"\n    model_args = dict(patch_size=16, num_blocks=24, embed_dim=1024, **kwargs)\n    model = _create_mixer('mixer_l16_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef gmixer_12_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Glu-Mixer-12 224x224\n    Experiment by Ross Wightman, adding SwiGLU to MLP-Mixer\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=12, embed_dim=384, mlp_ratio=(1.0, 4.0),\n        mlp_layer=GluMlp, act_layer=nn.SiLU, **kwargs)\n    model = _create_mixer('gmixer_12_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef gmixer_24_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" Glu-Mixer-24 224x224\n    Experiment by Ross Wightman, adding SwiGLU to MLP-Mixer\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=24, embed_dim=384, mlp_ratio=(1.0, 4.0),\n        mlp_layer=GluMlp, act_layer=nn.SiLU, **kwargs)\n    model = _create_mixer('gmixer_24_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef resmlp_12_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" ResMLP-12\n    Paper: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=12, embed_dim=384, mlp_ratio=4, block_layer=ResBlock, norm_layer=Affine, **kwargs)\n    model = _create_mixer('resmlp_12_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef resmlp_24_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" ResMLP-24\n    Paper: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=24, embed_dim=384, mlp_ratio=4,\n        block_layer=partial(ResBlock, init_values=1e-5), norm_layer=Affine, **kwargs)\n    model = _create_mixer('resmlp_24_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef resmlp_36_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" ResMLP-36\n    Paper: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=36, embed_dim=384, mlp_ratio=4,\n        block_layer=partial(ResBlock, init_values=1e-6), norm_layer=Affine, **kwargs)\n    model = _create_mixer('resmlp_36_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef resmlp_big_24_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" ResMLP-B-24\n    Paper: `ResMLP: Feedforward networks for image classification...` - https://arxiv.org/abs/2105.03404\n    \"\"\"\n    model_args = dict(\n        patch_size=8, num_blocks=24, embed_dim=768, mlp_ratio=4,\n        block_layer=partial(ResBlock, init_values=1e-6), norm_layer=Affine, **kwargs)\n    model = _create_mixer('resmlp_big_24_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef gmlp_ti16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" gMLP-Tiny\n    Paper: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=30, embed_dim=128, mlp_ratio=6, block_layer=SpatialGatingBlock,\n        mlp_layer=GatedMlp, **kwargs)\n    model = _create_mixer('gmlp_ti16_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef gmlp_s16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" gMLP-Small\n    Paper: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=30, embed_dim=256, mlp_ratio=6, block_layer=SpatialGatingBlock,\n        mlp_layer=GatedMlp, **kwargs)\n    model = _create_mixer('gmlp_s16_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef gmlp_b16_224(pretrained=False, **kwargs) -> MlpMixer:\n    \"\"\" gMLP-Base\n    Paper: `Pay Attention to MLPs` - https://arxiv.org/abs/2105.08050\n    \"\"\"\n    model_args = dict(\n        patch_size=16, num_blocks=30, embed_dim=512, mlp_ratio=6, block_layer=SpatialGatingBlock,\n        mlp_layer=GatedMlp, **kwargs)\n    model = _create_mixer('gmlp_b16_224', pretrained=pretrained, **model_args)\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'mixer_b16_224_in21k': 'mixer_b16_224.goog_in21k_ft_in1k',\n    'mixer_l16_224_in21k': 'mixer_l16_224.goog_in21k_ft_in1k',\n    'mixer_b16_224_miil': 'mixer_b16_224.miil_in21k_ft_in1k',\n    'mixer_b16_224_miil_in21k': 'mixer_b16_224.miil_in21k',\n    'resmlp_12_distilled_224': 'resmlp_12_224.fb_distilled_in1k',\n    'resmlp_24_distilled_224': 'resmlp_24_224.fb_distilled_in1k',\n    'resmlp_36_distilled_224': 'resmlp_36_224.fb_distilled_in1k',\n    'resmlp_big_24_distilled_224': 'resmlp_big_24_224.fb_distilled_in1k',\n    'resmlp_big_24_224_in22ft1k': 'resmlp_big_24_224.fb_in22k_ft_in1k',\n    'resmlp_12_224_dino': 'resmlp_12_224',\n    'resmlp_24_224_dino': 'resmlp_24_224',\n})\n"
  },
  {
    "path": "timm/models/mobilenetv3.py",
    "content": "\"\"\" MobileNet V3\n\nA PyTorch impl of MobileNet-V3, compatible with TF weights from official impl.\n\nPaper: Searching for MobileNetV3 - https://arxiv.org/abs/1905.02244\n\nHacked together by / Copyright 2019, Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom typing import Any, Dict, Callable, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import SelectAdaptivePool2d, Linear, LayerType, PadType, create_conv2d, get_norm_act_layer\nfrom ._builder import build_model_with_cfg, pretrained_cfg_for_features\nfrom ._efficientnet_blocks import SqueezeExcite\nfrom ._efficientnet_builder import BlockArgs, EfficientNetBuilder, decode_arch_def, efficientnet_init_weights, \\\n    round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT\nfrom ._features import FeatureInfo, FeatureHooks, feature_take_indices\nfrom ._manipulate import checkpoint_seq, checkpoint\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['MobileNetV3', 'MobileNetV3Features']\n\n\nclass MobileNetV3(nn.Module):\n    \"\"\"MobileNetV3.\n\n    Based on my EfficientNet implementation and building blocks, this model utilizes the MobileNet-v3 specific\n    'efficient head', where global pooling is done before the head convolution without a final batch-norm\n    layer before the classifier.\n\n    Paper: `Searching for MobileNetV3` - https://arxiv.org/abs/1905.02244\n\n    Other architectures utilizing MobileNet-V3 efficient head that are supported by this impl include:\n      * HardCoRe-NAS - https://arxiv.org/abs/2102.11646 (defn in hardcorenas.py uses this class)\n      * FBNet-V3 - https://arxiv.org/abs/2006.02049\n      * LCNet - https://arxiv.org/abs/2109.15099\n      * MobileNet-V4 - https://arxiv.org/abs/2404.10518\n    \"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            stem_size: int = 16,\n            fix_stem: bool = False,\n            num_features: int = 1280,\n            head_bias: bool = True,\n            head_norm: bool = False,\n            pad_type: str = '',\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            se_from_exp: bool = True,\n            round_chs_fn: Callable = round_channels,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = None,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize MobileNetV3.\n\n        Args:\n            block_args: Arguments for blocks of the network.\n            num_classes: Number of classes for classification head.\n            in_chans: Number of input image channels.\n            stem_size: Number of output channels of the initial stem convolution.\n            fix_stem: If True, don't scale stem by round_chs_fn.\n            num_features: Number of output channels of the conv head layer.\n            head_bias: If True, add a learnable bias to the conv head layer.\n            head_norm: If True, add normalization to the head layer.\n            pad_type: Type of padding to use for convolution layers.\n            act_layer: Type of activation layer.\n            norm_layer: Type of normalization layer.\n            aa_layer: Type of anti-aliasing layer.\n            se_layer: Type of Squeeze-and-Excite layer.\n            se_from_exp: If True, calculate SE channel reduction from expanded mid channels.\n            round_chs_fn: Callable to round number of filters based on depth multiplier.\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            layer_scale_init_value: Enable layer scale on compatible blocks if not None.\n            global_pool: Type of pooling to use for global pooling features of the FC head.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or nn.ReLU\n        norm_layer = norm_layer or nn.BatchNorm2d\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        se_layer = se_layer or SqueezeExcite\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = create_conv2d(in_chans, stem_size, 3, stride=2, padding=pad_type, **dd)\n        self.bn1 = norm_act_layer(stem_size, inplace=True, **dd)\n\n        # Middle stages (IR/ER/DS Blocks)\n        builder = EfficientNetBuilder(\n            output_stride=32,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            se_from_exp=se_from_exp,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            layer_scale_init_value=layer_scale_init_value,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = builder.features\n        self.stage_ends = [f['stage'] for f in self.feature_info]\n        self.num_features = builder.in_chs  # features of last stage, output of forward_features()\n        self.head_hidden_size = num_features  # features of conv_head, pre_logits output\n\n        # Head + Pooling\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        num_pooled_chs = self.num_features * self.global_pool.feat_mult()\n        if head_norm:\n            # mobilenet-v4 post-pooling PW conv is followed by a norm+act layer\n            self.conv_head = create_conv2d(\n                num_pooled_chs,\n                self.head_hidden_size,\n                1,\n                padding=pad_type,\n                bias=False,  #  never a bias\n                **dd,\n            )\n            self.norm_head = norm_act_layer(self.head_hidden_size, **dd)\n            self.act2 = nn.Identity()\n        else:\n            # mobilenet-v3 and others only have an activation after final PW conv\n            self.conv_head = create_conv2d(\n                num_pooled_chs,\n                self.head_hidden_size,\n                1,\n                padding=pad_type,\n                bias=head_bias,\n                **dd,\n            )\n            self.norm_head = nn.Identity()\n            self.act2 = act_layer(inplace=True)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(self.head_hidden_size, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        efficientnet_init_weights(self)\n\n    def as_sequential(self) -> nn.Sequential:\n        \"\"\"Convert model to sequential form.\n\n        Returns:\n            Sequential module containing all layers.\n        \"\"\"\n        layers = [self.conv_stem, self.bn1]\n        layers.extend(self.blocks)\n        layers.extend([self.global_pool, self.conv_head, self.norm_head, self.act2])\n        layers.extend([nn.Flatten(), nn.Dropout(self.drop_rate), self.classifier])\n        return nn.Sequential(*layers)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        return dict(\n            stem=r'^conv_stem|bn1',\n            blocks=r'^blocks\\.(\\d+)' if coarse else r'^blocks\\.(\\d+)\\.(\\d+)'\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg') -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        # NOTE: cannot meaningfully change pooling of efficient head after creation\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(self.head_hidden_size, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            extra_blocks: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            extra_blocks: Include outputs of all blocks and head conv in output, does not align with feature_info\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        if stop_early:\n            assert intermediates_only, 'Must use intermediates_only for early stopping.'\n        intermediates = []\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n\n        # forward pass\n        feat_idx = 0  # stem is index 0\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index]\n        for feat_idx, blk in enumerate(blocks, start=1):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(blk, x)\n            else:\n                x = blk(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n            extra_blocks: bool = False,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n            extra_blocks: Include outputs of all blocks.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            max_index = self.stage_ends[max_index]\n        self.blocks = self.blocks[:max_index]  # truncate blocks w/ stem as idx 0\n        if max_index < len(self.blocks):\n            self.conv_head = nn.Identity()\n            self.norm_head = nn.Identity()\n        if prune_head:\n            self.conv_head = nn.Identity()\n            self.norm_head = nn.Identity()\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x, flatten=True)\n        else:\n            x = self.blocks(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        x = self.global_pool(x)\n        x = self.conv_head(x)\n        x = self.norm_head(x)\n        x = self.act2(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        if pre_logits:\n            return x\n        return self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nclass MobileNetV3Features(nn.Module):\n    \"\"\"MobileNetV3 Feature Extractor.\n\n    A work-in-progress feature extraction module for MobileNet-V3 to use as a backbone for segmentation\n    and object detection models.\n    \"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            out_indices: Tuple[int, ...] = (0, 1, 2, 3, 4),\n            feature_location: str = 'bottleneck',\n            in_chans: int = 3,\n            stem_size: int = 16,\n            fix_stem: bool = False,\n            output_stride: int = 32,\n            pad_type: PadType = '',\n            round_chs_fn: Callable = round_channels,\n            se_from_exp: bool = True,\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize MobileNetV3Features.\n\n        Args:\n            block_args: Arguments for blocks of the network.\n            out_indices: Output from stages at indices.\n            feature_location: Location of feature before/after each block, must be in ['bottleneck', 'expansion'].\n            in_chans: Number of input image channels.\n            stem_size: Number of output channels of the initial stem convolution.\n            fix_stem: If True, don't scale stem by round_chs_fn.\n            output_stride: Output stride of the network.\n            pad_type: Type of padding to use for convolution layers.\n            round_chs_fn: Callable to round number of filters based on depth multiplier.\n            se_from_exp: If True, calculate SE channel reduction from expanded mid channels.\n            act_layer: Type of activation layer.\n            norm_layer: Type of normalization layer.\n            aa_layer: Type of anti-aliasing layer.\n            se_layer: Type of Squeeze-and-Excite layer.\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            layer_scale_init_value: Enable layer scale on compatible blocks if not None.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or nn.ReLU\n        norm_layer = norm_layer or nn.BatchNorm2d\n        se_layer = se_layer or SqueezeExcite\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = create_conv2d(in_chans, stem_size, 3, stride=2, padding=pad_type, **dd)\n        self.bn1 = norm_layer(stem_size, **dd)\n        self.act1 = act_layer(inplace=True)\n\n        # Middle stages (IR/ER/DS Blocks)\n        builder = EfficientNetBuilder(\n            output_stride=output_stride,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            se_from_exp=se_from_exp,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            layer_scale_init_value=layer_scale_init_value,\n            feature_location=feature_location,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = FeatureInfo(builder.features, out_indices)\n        self._stage_out_idx = {f['stage']: f['index'] for f in self.feature_info.get_dicts()}\n\n        efficientnet_init_weights(self)\n\n        # Register feature extraction hooks with FeatureHooks helper\n        self.feature_hooks = None\n        if feature_location != 'bottleneck':\n            hooks = self.feature_info.get_dicts(keys=('module', 'hook_type'))\n            self.feature_hooks = FeatureHooks(hooks, self.named_modules())\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        self.grad_checkpointing = enable\n\n    def forward(self, x: torch.Tensor) -> List[torch.Tensor]:\n        \"\"\"Forward pass through feature extraction.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            List of feature tensors.\n        \"\"\"\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if self.feature_hooks is None:\n            features = []\n            if 0 in self._stage_out_idx:\n                features.append(x)  # add stem out\n            for i, b in enumerate(self.blocks):\n                if self.grad_checkpointing and not torch.jit.is_scripting():\n                    x = checkpoint(b, x)\n                else:\n                    x = b(x)\n                if i + 1 in self._stage_out_idx:\n                    features.append(x)\n            return features\n        else:\n            self.blocks(x)\n            out = self.feature_hooks.get_output(x.device)\n            return list(out.values())\n\n\ndef _create_mnv3(variant: str, pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\"Create a MobileNetV3 model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    features_mode = ''\n    model_cls = MobileNetV3\n    kwargs_filter = None\n    if kwargs.pop('features_only', False):\n        if 'feature_cfg' in kwargs or 'feature_cls' in kwargs:\n            features_mode = 'cfg'\n        else:\n            kwargs_filter = ('num_classes', 'num_features', 'head_conv', 'head_bias', 'head_norm', 'global_pool')\n            model_cls = MobileNetV3Features\n            features_mode = 'cls'\n\n    model = build_model_with_cfg(\n        model_cls,\n        variant,\n        pretrained,\n        features_only=features_mode == 'cfg',\n        pretrained_strict=features_mode != 'cls',\n        kwargs_filter=kwargs_filter,\n        **kwargs,\n    )\n    if features_mode == 'cls':\n        model.default_cfg = pretrained_cfg_for_features(model.default_cfg)\n    return model\n\n\ndef _gen_mobilenet_v3_rw(\n        variant: str, channel_multiplier: float = 1.0, pretrained: bool = False, **kwargs\n) -> MobileNetV3:\n    \"\"\"Creates a MobileNet-V3 model.\n\n    Ref impl: ?\n    Paper: https://arxiv.org/abs/1905.02244\n\n    Args:\n        variant: Model variant name.\n        channel_multiplier: Multiplier to number of channels per layer.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['ds_r1_k3_s1_e1_c16_nre_noskip'],  # relu\n        # stage 1, 112x112 in\n        ['ir_r1_k3_s2_e4_c24_nre', 'ir_r1_k3_s1_e3_c24_nre'],  # relu\n        # stage 2, 56x56 in\n        ['ir_r3_k5_s2_e3_c40_se0.25_nre'],  # relu\n        # stage 3, 28x28 in\n        ['ir_r1_k3_s2_e6_c80', 'ir_r1_k3_s1_e2.5_c80', 'ir_r2_k3_s1_e2.3_c80'],  # hard-swish\n        # stage 4, 14x14in\n        ['ir_r2_k3_s1_e6_c112_se0.25'],  # hard-swish\n        # stage 5, 14x14in\n        ['ir_r3_k5_s2_e6_c160_se0.25'],  # hard-swish\n        # stage 6, 7x7 in\n        ['cn_r1_k1_s1_c960'],  # hard-swish\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        head_bias=False,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'hard_swish'),\n        se_layer=partial(SqueezeExcite, gate_layer='hard_sigmoid'),\n        **kwargs,\n    )\n    model = _create_mnv3(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mobilenet_v3(\n        variant: str,\n        channel_multiplier: float = 1.0,\n        depth_multiplier: float = 1.0,\n        group_size: Optional[int] = None,\n        pretrained: bool = False,\n        **kwargs\n) -> MobileNetV3:\n    \"\"\"Creates a MobileNet-V3 model.\n\n    Ref impl: ?\n    Paper: https://arxiv.org/abs/1905.02244\n\n    Args:\n        variant: Model variant name.\n        channel_multiplier: Multiplier to number of channels per layer.\n        depth_multiplier: Depth multiplier for model scaling.\n        group_size: Group size for grouped convolutions.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    if 'small' in variant:\n        num_features = 1024\n        if 'minimal' in variant:\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                ['ds_r1_k3_s2_e1_c16'],\n                # stage 1, 56x56 in\n                ['ir_r1_k3_s2_e4.5_c24', 'ir_r1_k3_s1_e3.67_c24'],\n                # stage 2, 28x28 in\n                ['ir_r1_k3_s2_e4_c40', 'ir_r2_k3_s1_e6_c40'],\n                # stage 3, 14x14 in\n                ['ir_r2_k3_s1_e3_c48'],\n                # stage 4, 14x14in\n                ['ir_r3_k3_s2_e6_c96'],\n                # stage 6, 7x7 in\n                ['cn_r1_k1_s1_c576'],\n            ]\n        else:\n            act_layer = resolve_act_layer(kwargs, 'hard_swish')\n            arch_def = [\n                # stage 0, 112x112 in\n                ['ds_r1_k3_s2_e1_c16_se0.25_nre'],  # relu\n                # stage 1, 56x56 in\n                ['ir_r1_k3_s2_e4.5_c24_nre', 'ir_r1_k3_s1_e3.67_c24_nre'],  # relu\n                # stage 2, 28x28 in\n                ['ir_r1_k5_s2_e4_c40_se0.25', 'ir_r2_k5_s1_e6_c40_se0.25'],  # hard-swish\n                # stage 3, 14x14 in\n                ['ir_r2_k5_s1_e3_c48_se0.25'],  # hard-swish\n                # stage 4, 14x14in\n                ['ir_r3_k5_s2_e6_c96_se0.25'],  # hard-swish\n                # stage 6, 7x7 in\n                ['cn_r1_k1_s1_c576'],  # hard-swish\n            ]\n    else:\n        num_features = 1280\n        if 'minimal' in variant:\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                ['ds_r1_k3_s1_e1_c16'],\n                # stage 1, 112x112 in\n                ['ir_r1_k3_s2_e4_c24', 'ir_r1_k3_s1_e3_c24'],\n                # stage 2, 56x56 in\n                ['ir_r3_k3_s2_e3_c40'],\n                # stage 3, 28x28 in\n                ['ir_r1_k3_s2_e6_c80', 'ir_r1_k3_s1_e2.5_c80', 'ir_r2_k3_s1_e2.3_c80'],\n                # stage 4, 14x14in\n                ['ir_r2_k3_s1_e6_c112'],\n                # stage 5, 14x14in\n                ['ir_r3_k3_s2_e6_c160'],\n                # stage 6, 7x7 in\n                ['cn_r1_k1_s1_c960'],\n            ]\n        else:\n            act_layer = resolve_act_layer(kwargs, 'hard_swish')\n            arch_def = [\n                # stage 0, 112x112 in\n                ['ds_r1_k3_s1_e1_c16_nre'],  # relu\n                # stage 1, 112x112 in\n                ['ir_r1_k3_s2_e4_c24_nre', 'ir_r1_k3_s1_e3_c24_nre'],  # relu\n                # stage 2, 56x56 in\n                ['ir_r3_k5_s2_e3_c40_se0.25_nre'],  # relu\n                # stage 3, 28x28 in\n                ['ir_r1_k3_s2_e6_c80', 'ir_r1_k3_s1_e2.5_c80', 'ir_r2_k3_s1_e2.3_c80'],  # hard-swish\n                # stage 4, 14x14in\n                ['ir_r2_k3_s1_e6_c112_se0.25'],  # hard-swish\n                # stage 5, 14x14in\n                ['ir_r3_k5_s2_e6_c160_se0.25'],  # hard-swish\n                # stage 6, 7x7 in\n                ['cn_r1_k1_s1_c960'],  # hard-swish\n            ]\n    se_layer = partial(SqueezeExcite, gate_layer='hard_sigmoid', force_act_layer=nn.ReLU, rd_round_fn=round_channels)\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, depth_multiplier=depth_multiplier, group_size=group_size),\n        num_features=num_features,\n        stem_size=16,\n        fix_stem=channel_multiplier < 0.75,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=act_layer,\n        se_layer=se_layer,\n        **kwargs,\n    )\n    model = _create_mnv3(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_fbnetv3(variant: str, channel_multiplier: float = 1.0, pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\"FBNetV3 model generator.\n\n    Paper: `FBNetV3: Joint Architecture-Recipe Search using Predictor Pretraining`\n        - https://arxiv.org/abs/2006.02049\n    FIXME untested, this is a preliminary impl of some FBNet-V3 variants.\n\n    Args:\n        variant: Model variant name.\n        channel_multiplier: Channel width multiplier.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    vl = variant.split('_')[-1]\n    if vl in ('a', 'b'):\n        stem_size = 16\n        arch_def = [\n            ['ds_r2_k3_s1_e1_c16'],\n            ['ir_r1_k5_s2_e4_c24', 'ir_r3_k5_s1_e2_c24'],\n            ['ir_r1_k5_s2_e5_c40_se0.25', 'ir_r4_k5_s1_e3_c40_se0.25'],\n            ['ir_r1_k5_s2_e5_c72', 'ir_r4_k3_s1_e3_c72'],\n            ['ir_r1_k3_s1_e5_c120_se0.25', 'ir_r5_k5_s1_e3_c120_se0.25'],\n            ['ir_r1_k3_s2_e6_c184_se0.25', 'ir_r5_k5_s1_e4_c184_se0.25', 'ir_r1_k5_s1_e6_c224_se0.25'],\n            ['cn_r1_k1_s1_c1344'],\n        ]\n    elif vl == 'd':\n        stem_size = 24\n        arch_def = [\n            ['ds_r2_k3_s1_e1_c16'],\n            ['ir_r1_k3_s2_e5_c24', 'ir_r5_k3_s1_e2_c24'],\n            ['ir_r1_k5_s2_e4_c40_se0.25', 'ir_r4_k3_s1_e3_c40_se0.25'],\n            ['ir_r1_k3_s2_e5_c72', 'ir_r4_k3_s1_e3_c72'],\n            ['ir_r1_k3_s1_e5_c128_se0.25', 'ir_r6_k5_s1_e3_c128_se0.25'],\n            ['ir_r1_k3_s2_e6_c208_se0.25', 'ir_r5_k5_s1_e5_c208_se0.25', 'ir_r1_k5_s1_e6_c240_se0.25'],\n            ['cn_r1_k1_s1_c1440'],\n        ]\n    elif vl == 'g':\n        stem_size = 32\n        arch_def = [\n            ['ds_r3_k3_s1_e1_c24'],\n            ['ir_r1_k5_s2_e4_c40', 'ir_r4_k5_s1_e2_c40'],\n            ['ir_r1_k5_s2_e4_c56_se0.25', 'ir_r4_k5_s1_e3_c56_se0.25'],\n            ['ir_r1_k5_s2_e5_c104', 'ir_r4_k3_s1_e3_c104'],\n            ['ir_r1_k3_s1_e5_c160_se0.25', 'ir_r8_k5_s1_e3_c160_se0.25'],\n            ['ir_r1_k3_s2_e6_c264_se0.25', 'ir_r6_k5_s1_e5_c264_se0.25', 'ir_r2_k5_s1_e6_c288_se0.25'],\n            ['cn_r1_k1_s1_c1728'],\n        ]\n    else:\n        raise NotImplemented\n    round_chs_fn = partial(round_channels, multiplier=channel_multiplier, round_limit=0.95)\n    se_layer = partial(SqueezeExcite, gate_layer='hard_sigmoid', rd_round_fn=round_chs_fn)\n    act_layer = resolve_act_layer(kwargs, 'hard_swish')\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        num_features=1984,\n        head_bias=False,\n        stem_size=stem_size,\n        round_chs_fn=round_chs_fn,\n        se_from_exp=False,\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=act_layer,\n        se_layer=se_layer,\n        **kwargs,\n    )\n    model = _create_mnv3(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_lcnet(variant: str, channel_multiplier: float = 1.0, pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\"LCNet model generator.\n\n    Essentially a MobileNet-V3 crossed with a MobileNet-V1\n\n    Paper: `PP-LCNet: A Lightweight CPU Convolutional Neural Network` - https://arxiv.org/abs/2109.15099\n\n    Args:\n        variant: Model variant name.\n        channel_multiplier: Multiplier to number of channels per layer.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    arch_def = [\n        # stage 0, 112x112 in\n        ['dsa_r1_k3_s1_c32'],\n        # stage 1, 112x112 in\n        ['dsa_r2_k3_s2_c64'],\n        # stage 2, 56x56 in\n        ['dsa_r2_k3_s2_c128'],\n        # stage 3, 28x28 in\n        ['dsa_r1_k3_s2_c256', 'dsa_r1_k5_s1_c256'],\n        # stage 4, 14x14in\n        ['dsa_r4_k5_s1_c256'],\n        # stage 5, 14x14in\n        ['dsa_r2_k5_s2_c512_se0.25'],\n        # 7x7\n    ]\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def),\n        stem_size=16,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=resolve_act_layer(kwargs, 'hard_swish'),\n        se_layer=partial(SqueezeExcite, gate_layer='hard_sigmoid', force_act_layer=nn.ReLU),\n        num_features=1280,\n        **kwargs,\n    )\n    model = _create_mnv3(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _gen_mobilenet_v4(\n        variant: str,\n        channel_multiplier: float = 1.0,\n        group_size: Optional[int] = None,\n        pretrained: bool = False,\n        **kwargs,\n) -> MobileNetV3:\n    \"\"\"Creates a MobileNet-V4 model.\n\n    Paper: https://arxiv.org/abs/2404.10518\n\n    Args:\n        variant: Model variant name.\n        channel_multiplier: Multiplier to number of channels per layer.\n        group_size: Group size for grouped convolutions.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        MobileNetV3 model instance.\n    \"\"\"\n    num_features = 1280\n    if 'hybrid' in variant:\n        layer_scale_init_value = 1e-5\n        if 'medium' in variant:\n            stem_size = 32\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                [\n                    'er_r1_k3_s2_e4_c48'  # FusedIB (EdgeResidual)\n                ],\n                # stage 1, 56x56 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c80',  # ExtraDW\n                    'uir_r1_a3_k3_s1_e2_c80',  # ExtraDW\n                ],\n                # stage 2, 28x28 in\n                [\n                    'uir_r1_a3_k5_s2_e6_c160',  # ExtraDW\n                    'uir_r1_a0_k0_s1_e2_c160',  # FFN\n                    'uir_r1_a3_k3_s1_e4_c160',  # ExtraDW\n                    'uir_r1_a3_k5_s1_e4_c160',  # ExtraDW\n                    'mqa_r1_k3_h4_s1_v2_d64_c160',  # MQA w/ KV downsample\n                    'uir_r1_a3_k3_s1_e4_c160',  # ExtraDW\n                    'mqa_r1_k3_h4_s1_v2_d64_c160',  # MQA w/ KV downsample\n                    'uir_r1_a3_k0_s1_e4_c160',  # ConvNeXt\n                    'mqa_r1_k3_h4_s1_v2_d64_c160',  # MQA w/ KV downsample\n                    'uir_r1_a3_k3_s1_e4_c160',  # ExtraDW\n                    'mqa_r1_k3_h4_s1_v2_d64_c160',  # MQA w/ KV downsample\n                    'uir_r1_a3_k0_s1_e4_c160',  # ConvNeXt\n                ],\n                # stage 3, 14x14in\n                [\n                    'uir_r1_a5_k5_s2_e6_c256',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c256',  # ExtraDW\n                    'uir_r2_a3_k5_s1_e4_c256',  # ExtraDW\n                    'uir_r1_a0_k0_s1_e2_c256',  # FFN\n                    'uir_r1_a3_k5_s1_e2_c256',  # ExtraDW\n                    'uir_r1_a0_k0_s1_e2_c256',  # FFN\n                    'uir_r1_a0_k0_s1_e4_c256',  # FFN\n                    'mqa_r1_k3_h4_s1_d64_c256',  # MQA\n                    'uir_r1_a3_k0_s1_e4_c256',  # ConvNeXt\n                    'mqa_r1_k3_h4_s1_d64_c256',  # MQA\n                    'uir_r1_a5_k5_s1_e4_c256',  # ExtraDW\n                    'mqa_r1_k3_h4_s1_d64_c256',  # MQA\n                    'uir_r1_a5_k0_s1_e4_c256',  # ConvNeXt\n                    'mqa_r1_k3_h4_s1_d64_c256', # MQA\n                    'uir_r1_a5_k0_s1_e4_c256',  # ConvNeXt\n                ],\n                # stage 4, 7x7 in\n                [\n                    'cn_r1_k1_s1_c960' # Conv\n                ],\n            ]\n        elif 'large' in variant:\n            stem_size = 24\n            act_layer = resolve_act_layer(kwargs, 'gelu')\n            arch_def = [\n                # stage 0, 112x112 in\n                [\n                    'er_r1_k3_s2_e4_c48',  # FusedIB (EdgeResidual)\n                ],\n                # stage 1, 56x56 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c96',  # ExtraDW\n                    'uir_r1_a3_k3_s1_e4_c96',  # ExtraDW\n                ],\n                # stage 2, 28x28 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c192',  # ExtraDW\n                    'uir_r3_a3_k3_s1_e4_c192',  # ExtraDW\n                    'uir_r1_a3_k5_s1_e4_c192',  # ExtraDW\n                    'uir_r2_a5_k3_s1_e4_c192',  # ExtraDW\n                    'mqa_r1_k3_h8_s1_v2_d48_c192',  # MQA w/ KV downsample\n                    'uir_r1_a5_k3_s1_e4_c192',  # ExtraDW\n                    'mqa_r1_k3_h8_s1_v2_d48_c192',  # MQA w/ KV downsample\n                    'uir_r1_a5_k3_s1_e4_c192',  # ExtraDW\n                    'mqa_r1_k3_h8_s1_v2_d48_c192',  # MQA w/ KV downsample\n                    'uir_r1_a5_k3_s1_e4_c192',  # ExtraDW\n                    'mqa_r1_k3_h8_s1_v2_d48_c192',  # MQA w/ KV downsample\n                    'uir_r1_a3_k0_s1_e4_c192',  # ConvNeXt\n                ],\n                # stage 3, 14x14in\n                [\n                    'uir_r4_a5_k5_s2_e4_c512',  # ExtraDW\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'uir_r1_a5_k3_s1_e4_c512',  # ExtraDW\n                    'uir_r2_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'uir_r1_a5_k3_s1_e4_c512',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c512',  # ExtraDW\n                    'mqa_r1_k3_h8_s1_d64_c512',  # MQA\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'mqa_r1_k3_h8_s1_d64_c512',  # MQA\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'mqa_r1_k3_h8_s1_d64_c512',  # MQA\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'mqa_r1_k3_h8_s1_d64_c512',  # MQA\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                ],\n                # stage 4, 7x7 in\n                [\n                    'cn_r1_k1_s1_c960',  # Conv\n                ],\n            ]\n        else:\n            assert False, f'Unknown variant {variant}.'\n    else:\n        layer_scale_init_value = None\n        if 'small' in variant:\n            stem_size = 32\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                [\n                    'cn_r1_k3_s2_e1_c32',  # Conv\n                    'cn_r1_k1_s1_e1_c32',  # Conv\n                ],\n                # stage 1, 56x56 in\n                [\n                    'cn_r1_k3_s2_e1_c96',  # Conv\n                    'cn_r1_k1_s1_e1_c64',  # Conv\n                ],\n                # stage 2, 28x28 in\n                [\n                    'uir_r1_a5_k5_s2_e3_c96',  # ExtraDW\n                    'uir_r4_a0_k3_s1_e2_c96',  # IR\n                    'uir_r1_a3_k0_s1_e4_c96',  # ConvNeXt\n                ],\n                # stage 3, 14x14 in\n                [\n                    'uir_r1_a3_k3_s2_e6_c128',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c128',  # ExtraDW\n                    'uir_r1_a0_k5_s1_e4_c128',  # IR\n                    'uir_r1_a0_k5_s1_e3_c128',  # IR\n                    'uir_r2_a0_k3_s1_e4_c128',  # IR\n                ],\n                # stage 4, 7x7 in\n                [\n                    'cn_r1_k1_s1_c960',  # Conv\n                ],\n            ]\n        elif 'medium' in variant:\n            stem_size = 32\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                [\n                    'er_r1_k3_s2_e4_c48',  # FusedIB (EdgeResidual)\n                ],\n                # stage 1, 56x56 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c80',  # ExtraDW\n                    'uir_r1_a3_k3_s1_e2_c80',  # ExtraDW\n                ],\n                # stage 2, 28x28 in\n                [\n                    'uir_r1_a3_k5_s2_e6_c160',  # ExtraDW\n                    'uir_r2_a3_k3_s1_e4_c160',  # ExtraDW\n                    'uir_r1_a3_k5_s1_e4_c160',  # ExtraDW\n                    'uir_r1_a3_k3_s1_e4_c160',  # ExtraDW\n                    'uir_r1_a3_k0_s1_e4_c160',  # ConvNeXt\n                    'uir_r1_a0_k0_s1_e2_c160',  # ExtraDW\n                    'uir_r1_a3_k0_s1_e4_c160',  # ConvNeXt\n                ],\n                # stage 3, 14x14in\n                [\n                    'uir_r1_a5_k5_s2_e6_c256',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c256',  # ExtraDW\n                    'uir_r2_a3_k5_s1_e4_c256',  # ExtraDW\n                    'uir_r1_a0_k0_s1_e4_c256',  # FFN\n                    'uir_r1_a3_k0_s1_e4_c256',  # ConvNeXt\n                    'uir_r1_a3_k5_s1_e2_c256',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c256',  # ExtraDW\n                    'uir_r2_a0_k0_s1_e4_c256',  # FFN\n                    'uir_r1_a5_k0_s1_e2_c256',  # ConvNeXt\n                ],\n                # stage 4, 7x7 in\n                [\n                    'cn_r1_k1_s1_c960',  # Conv\n                ],\n            ]\n        elif 'large' in variant:\n            stem_size = 24\n            act_layer = resolve_act_layer(kwargs, 'relu')\n            arch_def = [\n                # stage 0, 112x112 in\n                [\n                    'er_r1_k3_s2_e4_c48',  # FusedIB (EdgeResidual)\n                ],\n                # stage 1, 56x56 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c96',  # ExtraDW\n                    'uir_r1_a3_k3_s1_e4_c96',  # ExtraDW\n                ],\n                # stage 2, 28x28 in\n                [\n                    'uir_r1_a3_k5_s2_e4_c192',  # ExtraDW\n                    'uir_r3_a3_k3_s1_e4_c192',  # ExtraDW\n                    'uir_r1_a3_k5_s1_e4_c192',  # ExtraDW\n                    'uir_r5_a5_k3_s1_e4_c192',  # ExtraDW\n                    'uir_r1_a3_k0_s1_e4_c192',  # ConvNeXt\n                ],\n                # stage 3, 14x14in\n                [\n                    'uir_r4_a5_k5_s2_e4_c512',  # ExtraDW\n                    'uir_r1_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'uir_r1_a5_k3_s1_e4_c512',  # ExtraDW\n                    'uir_r2_a5_k0_s1_e4_c512',  # ConvNeXt\n                    'uir_r1_a5_k3_s1_e4_c512',  # ExtraDW\n                    'uir_r1_a5_k5_s1_e4_c512',  # ExtraDW\n                    'uir_r3_a5_k0_s1_e4_c512',  # ConvNeXt\n\n                ],\n                # stage 4, 7x7 in\n                [\n                    'cn_r1_k1_s1_c960',  # Conv\n                ],\n            ]\n        else:\n            assert False, f'Unknown variant {variant}.'\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, group_size=group_size),\n        head_bias=False,\n        head_norm=True,\n        num_features=num_features,\n        stem_size=stem_size,\n        fix_stem=channel_multiplier < 1.0,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),\n        act_layer=act_layer,\n        layer_scale_init_value=layer_scale_init_value,\n        **kwargs,\n    )\n    model = _create_mnv3(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv_stem', 'classifier': 'classifier',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'mobilenetv3_large_075.untrained': _cfg(url=''),\n    'mobilenetv3_large_100.ra_in1k': _cfg(\n        interpolation='bicubic',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth',\n        hf_hub_id='timm/'),\n    'mobilenetv3_large_100.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=0.95, test_input_size=(3, 256, 256), test_crop_pct=1.0),\n    'mobilenetv3_large_100.miil_in21k_ft_in1k': _cfg(\n        interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.),\n        origin_url='https://github.com/Alibaba-MIIL/ImageNet21K',\n        paper_ids='arXiv:2104.10972v4',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_1k_miil_78_0-66471c13.pth',\n        hf_hub_id='timm/'),\n    'mobilenetv3_large_100.miil_in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_in21k_miil-d71cc17b.pth',\n        hf_hub_id='timm/',\n        origin_url='https://github.com/Alibaba-MIIL/ImageNet21K',\n        paper_ids='arXiv:2104.10972v4',\n        interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.), num_classes=11221),\n    'mobilenetv3_large_150d.ra4_e3600_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 256, 256), crop_pct=0.95, pool_size=(8, 8), test_input_size=(3, 320, 320), test_crop_pct=1.0),\n\n    'mobilenetv3_small_050.lamb_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_050_lambc-4b7bbe87.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'mobilenetv3_small_075.lamb_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_075_lambc-384766db.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'mobilenetv3_small_100.lamb_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_100_lamb-266a294c.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n\n    'mobilenetv3_rw.rmsp_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_100-35495452.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n\n    'tf_mobilenetv3_large_075.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_075-150ee8b0.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_mobilenetv3_large_100.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_100-427764d5.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_mobilenetv3_large_minimal_100.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_minimal_100-8596ae28.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_mobilenetv3_small_075.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_075-da427f52.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_mobilenetv3_small_100.in1k': _cfg(\n        url= 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_100-37f49e2b.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n    'tf_mobilenetv3_small_minimal_100.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_minimal_100-922a7843.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),\n\n    'fbnetv3_b.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_b_224-ead5d2a1.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), crop_pct=0.95),\n    'fbnetv3_d.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_d_224-c98bce42.pth',\n        hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), crop_pct=0.95),\n    'fbnetv3_g.ra2_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_g_240-0b1df83b.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), test_input_size=(3, 288, 288), crop_pct=0.95, pool_size=(8, 8)),\n\n    \"lcnet_035.untrained\": _cfg(),\n    \"lcnet_050.ra2_in1k\": _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_050-f447553b.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic',\n    ),\n    \"lcnet_075.ra2_in1k\": _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_075-318cad2c.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic',\n    ),\n    \"lcnet_100.ra2_in1k\": _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_100-a929038c.pth',\n        hf_hub_id='timm/',\n        interpolation='bicubic',\n    ),\n    \"lcnet_150.untrained\": _cfg(),\n\n    'mobilenetv4_conv_small_035.untrained': _cfg(\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        test_input_size=(3, 256, 256), test_crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_small_050.e3000_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        test_input_size=(3, 256, 256), test_crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_small.e2400_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), test_crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_small.e1200_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 256, 256), test_crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_small.e3600_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95,\n        test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_medium.e500_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_medium.e500_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 256, 256), test_crop_pct=1.0, interpolation='bicubic'),\n\n    'mobilenetv4_conv_medium.e250_r384_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_medium.e180_r384_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_medium.e180_ad_r384_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_medium.e250_r384_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=1.0, interpolation='bicubic'),\n\n    'mobilenetv4_conv_large.e600_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 448, 448), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_large.e500_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n\n    'mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_medium.ix_e550_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_medium.ix_e550_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 448, 448), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_medium.e500_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 256, 256), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_medium.e200_r256_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.95, test_input_size=(3, 320, 320), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_large.ix_e600_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 448, 448), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_large.e600_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 448, 448), test_crop_pct=1.0, interpolation='bicubic'),\n\n    # experimental\n    'mobilenetv4_conv_aa_medium.untrained': _cfg(\n        # hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_conv_blur_medium.e500_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_input_size=(3, 256, 256), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14),\n        crop_pct=0.95, test_input_size=(3, 544, 544), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 480, 480), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_aa_large.e600_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 480, 480), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_conv_aa_large.e230_r384_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 384, 384), pool_size=(12, 12),\n        crop_pct=0.95, test_input_size=(3, 448, 448), test_crop_pct=1.0, interpolation='bicubic'),\n    'mobilenetv4_hybrid_medium_075.untrained': _cfg(\n        # hf_hub_id='timm/',\n        crop_pct=0.95, interpolation='bicubic'),\n    'mobilenetv4_hybrid_large_075.untrained': _cfg(\n        # hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95, interpolation='bicubic'),\n})\n\n\n@register_model\ndef mobilenetv3_large_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_large_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv3_large_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_large_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef mobilenetv3_large_150d(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_large_150d', 1.5, depth_multiplier=1.2, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef mobilenetv3_small_050(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_small_050', 0.50, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv3_small_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_small_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv3_small_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    model = _gen_mobilenet_v3('mobilenetv3_small_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv3_rw(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    model = _gen_mobilenet_v3_rw('mobilenetv3_rw', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_large_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_large_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_large_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_large_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_large_minimal_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_large_minimal_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_small_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_small_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_small_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_small_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef tf_mobilenetv3_small_minimal_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V3 \"\"\"\n    kwargs.setdefault('bn_eps', BN_EPS_TF_DEFAULT)\n    kwargs.setdefault('pad_type', 'same')\n    model = _gen_mobilenet_v3('tf_mobilenetv3_small_minimal_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef fbnetv3_b(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" FBNetV3-B \"\"\"\n    model = _gen_fbnetv3('fbnetv3_b', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef fbnetv3_d(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" FBNetV3-D \"\"\"\n    model = _gen_fbnetv3('fbnetv3_d', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef fbnetv3_g(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" FBNetV3-G \"\"\"\n    model = _gen_fbnetv3('fbnetv3_g', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef lcnet_035(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" PP-LCNet 0.35\"\"\"\n    model = _gen_lcnet('lcnet_035', 0.35, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef lcnet_050(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" PP-LCNet 0.5\"\"\"\n    model = _gen_lcnet('lcnet_050', 0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef lcnet_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" PP-LCNet 1.0\"\"\"\n    model = _gen_lcnet('lcnet_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef lcnet_100(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" PP-LCNet 1.0\"\"\"\n    model = _gen_lcnet('lcnet_100', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef lcnet_150(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" PP-LCNet 1.5\"\"\"\n    model = _gen_lcnet('lcnet_150', 1.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_small_035(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_small_035', 0.35, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_small_050(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_small_050', 0.50, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_small(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_small', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_medium(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_medium', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_large(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_large', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_hybrid_medium(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 Hybrid \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_hybrid_medium', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_hybrid_large(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 Hybrid\"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_hybrid_large', 1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_aa_medium(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 w/ AvgPool AA \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_aa_medium', 1.0, pretrained=pretrained, aa_layer='avg', **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_blur_medium(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 Conv w/ Blur AA \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_blur_medium', 1.0, pretrained=pretrained, aa_layer='blurpc', **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_conv_aa_large(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 w/ AvgPool AA \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_conv_aa_large', 1.0, pretrained=pretrained, aa_layer='avg', **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_hybrid_medium_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 Hybrid \"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_hybrid_medium_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv4_hybrid_large_075(pretrained: bool = False, **kwargs) -> MobileNetV3:\n    \"\"\" MobileNet V4 Hybrid\"\"\"\n    model = _gen_mobilenet_v4('mobilenetv4_hybrid_large_075', 0.75, pretrained=pretrained, **kwargs)\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'mobilenetv3_large_100_miil': 'mobilenetv3_large_100.miil_in21k_ft_in1k',\n    'mobilenetv3_large_100_miil_in21k': 'mobilenetv3_large_100.miil_in21k',\n})\n"
  },
  {
    "path": "timm/models/mobilenetv5.py",
    "content": "from functools import partial\nfrom typing import Callable, Dict, List, Optional, Sequence, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import (\n    SelectAdaptivePool2d,\n    Linear,\n    LayerType,\n    RmsNorm2d,\n    ConvNormAct,\n    create_conv2d,\n    get_norm_layer,\n    get_norm_act_layer,\n    to_2tuple,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._efficientnet_blocks import SqueezeExcite, UniversalInvertedResidual\nfrom ._efficientnet_builder import (\n    BlockArgs,\n    EfficientNetBuilder,\n    decode_arch_def,\n    efficientnet_init_weights,\n    round_channels,\n)\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['MobileNetV5', 'MobileNetV5Encoder']\n\n_GELU = partial(nn.GELU, approximate='tanh')\n\n\n@register_notrace_module\nclass MobileNetV5MultiScaleFusionAdapter(nn.Module):\n  \"\"\"Multi-layer fusion token adapter.\n\n  Args:\n    in_chs: List of input channel counts for each feature scale.\n    out_chs: The number of output channels.\n    output_resolution: The output resolution.\n    expansion_ratio: The FFN expansion ratio.\n    interpolation_mode: The upsampling interpolation mode.\n    layer_scale_init_value: The initial value of the layer scale, no layer scale if None.\n  \"\"\"\n\n  def __init__(\n        self,\n        in_chs: Union[int, List[int]],\n        out_chs: int,\n        output_resolution: int,\n        expansion_ratio: float = 2.0,\n        interpolation_mode: str = \"nearest\",\n        layer_scale_init_value: Optional[float] = None,\n        noskip: bool = True,\n        act_layer: Optional[LayerType] = None,\n        norm_layer: Optional[LayerType] = None,\n        device=None,\n        dtype=None,\n  ):\n    dd = {'device': device, 'dtype': dtype}\n    super().__init__()\n    self.in_channels = sum(in_chs) if isinstance(in_chs, Sequence) else in_chs\n    self.out_channels = out_chs\n    self.output_resolution = to_2tuple(output_resolution)\n    self.expansion_ratio = expansion_ratio\n    self.interpolation_mode = interpolation_mode\n    self.layer_scale_init_value = layer_scale_init_value\n    self.noskip = noskip\n\n    act_layer = act_layer or _GELU\n    norm_layer = norm_layer or RmsNorm2d\n    self.ffn = UniversalInvertedResidual(\n        in_chs=self.in_channels,\n        out_chs=self.out_channels,\n        dw_kernel_size_mid=0,\n        exp_ratio=self.expansion_ratio,\n        act_layer=act_layer,\n        norm_layer=norm_layer,\n        noskip=self.noskip,\n        layer_scale_init_value=self.layer_scale_init_value,\n        **dd,\n    )\n\n    self.norm = norm_layer(self.out_channels, **dd)\n\n  def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor:\n    # Inputs list of [B, C, H, W] tensors\n    high_resolution = inputs[0].shape[-2:]  # Assuming the first input is the highest resolution.\n    resized_inputs = []\n    for _, img in enumerate(inputs):\n        feat_size = img.shape[-2:]\n        if feat_size[0] < high_resolution[0] or feat_size[1] < high_resolution[1]:\n            img = F.interpolate(img, size=high_resolution, mode=self.interpolation_mode)\n        resized_inputs.append(img)\n\n    channel_cat_imgs = torch.cat(resized_inputs, dim=1)  # Cat on channel dim, must equal self.in_channels\n    img = self.ffn(channel_cat_imgs)\n\n    if high_resolution[0] != self.output_resolution[0] or high_resolution[1] != self.output_resolution[1]:\n        # Interpolate / pool to target output_resolution if highest feature resolution differs\n        if (\n            high_resolution[0] % self.output_resolution[0] != 0 or\n            high_resolution[1] % self.output_resolution[1] != 0\n        ):\n            img = F.interpolate(img, size=self.output_resolution, mode=\"bilinear\")\n        else:\n            h_strides = high_resolution[0] // self.output_resolution[0]\n            w_strides = high_resolution[1] // self.output_resolution[1]\n            img = F.avg_pool2d(\n                img,\n                kernel_size=(h_strides, w_strides),\n                stride=(h_strides, w_strides),\n            )\n\n    img = self.norm(img)\n\n    return img\n\n\nclass MobileNetV5(nn.Module):\n    \"\"\" MobiletNet-V5\n    \"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            stem_size: int = 16,\n            stem_bias: bool = True,\n            fix_stem: bool = False,\n            num_features: int = 2048,\n            pad_type: str = '',\n            use_msfa: bool = True,\n            msfa_indices: List[int] = (-2, -1),\n            msfa_output_resolution: int = 16,\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            se_from_exp: bool = True,\n            round_chs_fn: Callable = round_channels,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = None,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            block_args: Arguments for blocks of the network.\n            num_classes: Number of classes for classification head.\n            in_chans: Number of input image channels.\n            stem_size: Number of output channels of the initial stem convolution.\n            fix_stem: If True, don't scale stem by round_chs_fn.\n            num_features: Number of output channels of the conv head layer.\n            head_bias: If True, add a learnable bias to the conv head layer.\n            pad_type: Type of padding to use for convolution layers.\n            act_layer: Type of activation layer.\n            norm_layer: Type of normalization layer.\n            aa_layer: Type of anti-aliasing layer.\n            se_layer: Type of Squeeze-and-Excite layer.\n            se_from_exp: If True, calculate SE channel reduction from expanded mid channels.\n            round_chs_fn: Callable to round number of filters based on depth multiplier.\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            layer_scale_init_value: Enable layer scale on compatible blocks if not None.\n            global_pool: Type of pooling to use for global pooling features of the FC head.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or _GELU\n        norm_layer = get_norm_layer(norm_layer) or RmsNorm2d\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        se_layer = se_layer or SqueezeExcite\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.msfa_indices = msfa_indices\n        self.msfa_output_resolution = msfa_output_resolution\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = ConvNormAct(\n            in_chans,\n            stem_size,\n            kernel_size=3,\n            stride=2,\n            padding=pad_type,\n            bias=stem_bias,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n        # Middle stages (IR/ER/DS Blocks)\n        builder = EfficientNetBuilder(\n            output_stride=32,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            se_from_exp=se_from_exp,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            layer_scale_init_value=layer_scale_init_value,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = builder.features\n        self.stage_ends = [f['stage'] for f in self.feature_info]\n        self.num_features = builder.in_chs  # features of last stage, output of forward_features()\n\n        # Neck (aggregation) + Head + Pooling\n        if use_msfa:\n            self.num_features = self.head_hidden_size = num_features # output of msfa is output of forward_features()\n            # Map msfa indices to feature info and calculate sum of feature channels\n            self.msfa_indices = feature_take_indices(len(self.feature_info), self.msfa_indices)[0]\n            self.msfa_in_chs = sum([self.feature_info[mi]['num_chs'] for mi in self.msfa_indices])\n\n            self.msfa = MobileNetV5MultiScaleFusionAdapter(\n                in_chs=self.msfa_in_chs,\n                out_chs=num_features,\n                output_resolution=self.msfa_output_resolution,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n            self.conv_head = None\n            self.norm_head = None\n        else:\n            self.num_features = builder.in_chs  # features of last stage, output of forward_features()\n            self.head_hidden_size = num_features\n            self.msfa = None\n            self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n            num_pooled_chs = self.num_features * self.global_pool.feat_mult()\n            # mobilenet-v4 style post-pooling PW conv is followed by a norm+act layer\n            self.conv_head = create_conv2d(num_pooled_chs, self.head_hidden_size, 1, padding=pad_type, **dd)\n            self.norm_head = norm_act_layer(self.head_hidden_size, **dd)\n\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(self.head_hidden_size, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        efficientnet_init_weights(self)\n\n    def as_sequential(self):\n        layers = [self.conv_stem, self.bn1]\n        layers.extend(self.blocks)\n        layers.append(self.global_pool)\n        if self.conv_head is not None:\n            layers.append(self.conv_head)\n        if self.norm_head is not None:\n            layers.append(self.norm_head)\n        layers.extend([nn.Flatten(), nn.Dropout(self.drop_rate), self.classifier])\n        return nn.Sequential(*layers)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False):\n        return dict(\n            stem=r'^conv_stem|bn1',\n            blocks=r'^blocks\\.(\\d+)' if coarse else r'^blocks\\.(\\d+)\\.(\\d+)'\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        # NOTE: cannot meaningfully change pooling of efficient head after creation\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(self.head_hidden_size, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            extra_blocks: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            extra_blocks: Include outputs of all blocks and head conv in output, does not align with feature_info\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        if stop_early:\n            assert intermediates_only, 'Must use intermediates_only for early stopping.'\n        intermediates = []\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n\n        # FIXME MFSA and forward_intermediates overlap, they both take indices from specific features\n        # When a user wants to grab specific feature maps for a downstream task AND have the msfa output\n        # what should we do? Accumulate two intermediates? One for msfa and one for take_indices?\n\n        # forward pass\n        feat_idx = 0  # stem is index 0\n        x = self.conv_stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index]\n        for blk in blocks:\n            feat_idx += 1\n            x = blk(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        # FIXME see note above\n        # self.msfa(msfa_intermediatse)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n            extra_blocks: bool = False,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            max_index = self.stage_ends[max_index]\n        self.blocks = self.blocks[:max_index]  # truncate blocks w/ stem as idx 0\n        if max_index < len(self.blocks):\n            self.conv_head = None\n            self.norm_head = None\n        if prune_head:\n            self.conv_head = None\n            self.norm_head = None\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        if self.msfa is not None:\n            # When MSFA aggregation layer is present, we gather intermediates as is forward_intermediates\n            feat_idx = 0  # offset by one from blocks index due to stem feature\n            intermediates = []\n            x = self.conv_stem(x)\n            if feat_idx in self.msfa_indices:\n                intermediates.append(x)\n            for blk in self.blocks:\n                feat_idx += 1\n                # FIXME fix grad checkpointing\n                x = blk(x)\n                if feat_idx in self.msfa_indices:\n                    intermediates.append(x)\n            x = self.msfa(intermediates)\n        else:\n            x = self.conv_stem(x)\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(self.blocks, x, flatten=True)\n            else:\n                x = self.blocks(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        x = self.global_pool(x)\n        if self.conv_head is not None:\n            x = self.conv_head(x)\n        if self.norm_head is not None:\n            x = self.norm_head(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        if pre_logits:\n            return x\n        return self.classifier(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\nclass MobileNetV5Encoder(nn.Module):\n    \"\"\"MobileNetV5 Vision Encoder\"\"\"\n\n    def __init__(\n            self,\n            block_args: BlockArgs,\n            in_chans: int = 3,\n            stem_size: int = 64,\n            stem_bias: bool = True,\n            fix_stem: bool = False,\n            pad_type: str = '',\n            msfa_indices: Sequence[int] = (-2, -1),\n            msfa_output_resolution: int = 16,\n            act_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            aa_layer: Optional[LayerType] = None,\n            se_layer: Optional[LayerType] = None,\n            se_from_exp: bool = True,\n            round_chs_fn: Callable = round_channels,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            layer_scale_init_value: Optional[float] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        act_layer = act_layer or _GELU\n        norm_layer = get_norm_layer(norm_layer) or RmsNorm2d\n        se_layer = se_layer or SqueezeExcite\n        self.num_classes = 0    # Exists to satisfy ._hub module APIs.\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        # Stem\n        if not fix_stem:\n            stem_size = round_chs_fn(stem_size)\n        self.conv_stem = ConvNormAct(\n            in_chans,\n            stem_size,\n            kernel_size=3,\n            stride=2,\n            padding=pad_type,\n            bias=stem_bias,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n        builder = EfficientNetBuilder(\n            output_stride=32,\n            pad_type=pad_type,\n            round_chs_fn=round_chs_fn,\n            se_from_exp=se_from_exp,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            se_layer=se_layer,\n            drop_path_rate=drop_path_rate,\n            layer_scale_init_value=layer_scale_init_value,\n            **dd,\n        )\n        self.blocks = nn.Sequential(*builder(stem_size, block_args))\n        self.feature_info = builder.features\n        self.stage_ends = [f['stage'] for f in self.feature_info]\n\n        self.num_features = self.head_hidden_size = 2048    # output of msfa is output of forward_features()\n        # Map msfa indices to feature info and calculate sum of feature channels\n        self.msfa_indices = feature_take_indices(len(self.feature_info), msfa_indices)[0]\n        self.msfa_in_chs = sum([self.feature_info[mi]['num_chs'] for mi in self.msfa_indices])\n        self.msfa_output_resolution = msfa_output_resolution\n\n        self.msfa = MobileNetV5MultiScaleFusionAdapter(\n            in_chs=self.msfa_in_chs,\n            out_chs=self.num_features,\n            output_resolution=self.msfa_output_resolution,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n        efficientnet_init_weights(self)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            extra_blocks: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: (Unused) Applies norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            extra_blocks: Include outputs of all blocks and head conv in output, does not align with feature_info\n        Returns:\n\n        \"\"\"\n        del norm\n\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        if stop_early:\n            assert intermediates_only, 'Must use intermediates_only for early stopping.'\n\n        # MobileNet v5's MultiScaleFusionAdapter takes intermediates from specific feature indicies and uses them in\n        # its computation. These MSFA indices are not guaranteed to be captured by the `indices` parameter passed to\n        # this function, so we accumulate two sets of indices, one that aligns with the `indices` parameter and one\n        # that is required by the MSFA block.\n        intermediates = []\n        msfa_intermediates = []\n\n        if extra_blocks:\n            take_indices, max_index = feature_take_indices(len(self.blocks) + 1, indices)\n        else:\n            take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n            take_indices = [self.stage_ends[i] for i in take_indices]\n            max_index = self.stage_ends[max_index]\n\n        # forward pass\n        feat_idx = 0    # stem is index 0\n        x = self.conv_stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n        if feat_idx in self.msfa_indices:\n            msfa_intermediates.append(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index]\n\n        for blk in blocks:\n            feat_idx += 1\n            x = blk(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n            if feat_idx in self.msfa_indices:\n                msfa_intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return self.msfa(msfa_intermediates), intermediates\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        feat_idx = 0    # offset by one from blocks index due to stem feature\n        intermediates = []\n\n        x = self.conv_stem(x)\n        if feat_idx in self.msfa_indices:\n            intermediates.append(x)\n\n        for blk in self.blocks:\n            feat_idx += 1\n            # FIXME fix grad checkpointing\n            x = blk(x)\n            if feat_idx in self.msfa_indices:\n                intermediates.append(x)\n\n        return self.msfa(intermediates)\n\n    def forward_head(self, x: torch.Tensor) -> torch.Tensor:\n        raise NotImplementedError(\"MobileNetV5Encoder does not support classification use cases.\")\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return self.forward_features(x)\n\n\ndef checkpoint_filter_fn(\n        state_dict: Dict[str, torch.Tensor],\n        model,\n) -> Dict[str, torch.Tensor]:\n    \"\"\" convert weights from gemma encoders \"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    if 'model.vision_tower.timm_model.conv_stem.conv.weight' in state_dict:\n        prefix = 'model.vision_tower.timm_model.'\n        state_dict = {k.replace(prefix, ''): v for k, v in state_dict.items() if prefix in k}\n    return state_dict\n\n\ndef _create_mnv5_encoder(variant: str, pretrained: bool = False, **kwargs) -> MobileNetV5Encoder:\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3, 4))\n    feature_cfg = dict(out_indices=out_indices, feature_cls='getter')\n    kwargs_filter = (\n        'num_classes',\n        'num_features',\n        'head_conv',\n        'head_bias',\n        'head_norm',\n        'global_pool',\n    )\n    model = build_model_with_cfg(\n        MobileNetV5Encoder,\n        variant,\n        pretrained,\n        pretrained_strict=False,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=feature_cfg,\n        kwargs_filter=kwargs_filter,\n        **kwargs,\n    )\n    return model\n\n\ndef _create_mnv5(variant: str, pretrained: bool = False, **kwargs) -> MobileNetV5:\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3, 4))\n    feature_cfg = dict(out_indices=out_indices, feature_cls='getter')\n    model = build_model_with_cfg(\n        MobileNetV5,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=feature_cfg,\n        **kwargs,\n    )\n    return model\n\n\ndef _gen_mobilenet_v5(\n        variant: str,\n        channel_multiplier: float = 1.0,\n        group_size=None,\n        pretrained: bool = False,\n        encoder: bool = False,\n        **kwargs,\n) -> MobileNetV5Encoder:\n    if 'mobilenetv5_base' in variant:\n        arch_def: list[list[str]] = [\n            # Stage 0: 128x128 in\n            [\n                'er_r1_k3_s2_e4_c128',\n                'er_r1_k3_s1_e4_c128',\n                'er_r1_k3_s1_e4_c128',\n            ],\n            # Stage 1: 256x256 in\n            [\n                'uir_r1_a3_k5_s2_e6_c256',\n                'uir_r1_a5_k0_s1_e4_c256',\n                'uir_r1_a3_k0_s1_e4_c256',\n                'uir_r1_a5_k0_s1_e4_c256',\n                'uir_r1_a3_k0_s1_e4_c256',\n            ],\n            # Stage 2: 640x640 in\n            [\n                \"uir_r1_a5_k5_s2_e6_c512\",\n                \"uir_r1_a5_k0_s1_e4_c512\",\n                \"uir_r1_a5_k0_s1_e4_c512\",\n                \"uir_r1_a0_k0_s1_e1_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n                'mqa_r1_k3_h8_s2_d64_c512',\n                \"uir_r1_a0_k0_s1_e2_c512\",\n            ],\n            # Stage 3: 1280x1280 in\n            [\n                \"uir_r1_a5_k5_s2_e6_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n                'mqa_r1_k3_h16_s1_d64_c1024',\n                \"uir_r1_a0_k0_s1_e2_c1024\",\n            ],\n        ]\n    else:\n        arch_def: list[list[str]] = [\n            # Stage 0: 128x128 in\n            [\n                'er_r1_k3_s2_e4_c128',\n                'er_r1_k3_s1_e4_c128',\n                'er_r1_k3_s1_e4_c128',\n            ],\n            # Stage 1: 256x256 in\n            [\n                'uir_r1_a3_k5_s2_e6_c256',\n                'uir_r1_a5_k0_s1_e4_c256',\n                'uir_r1_a3_k0_s1_e4_c256',\n                'uir_r1_a5_k0_s1_e4_c256',\n                'uir_r1_a3_k0_s1_e4_c256',\n            ],\n            # Stage 2: 640x640 in\n            [\n                \"uir_r1_a5_k5_s2_e6_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a5_k0_s1_e4_c640\",\n                \"uir_r1_a0_k0_s1_e1_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n                \"mqa_r1_k3_h12_v2_s1_d64_c640\",\n                \"uir_r1_a0_k0_s1_e2_c640\",\n            ],\n            # Stage 3: 1280x1280 in\n            [\n                \"uir_r1_a5_k5_s2_e6_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n                \"mqa_r1_k3_h16_s1_d96_c1280\",\n                \"uir_r1_a0_k0_s1_e2_c1280\",\n            ],\n        ]\n\n    model_kwargs = dict(\n        block_args=decode_arch_def(arch_def, group_size=group_size),\n        stem_size=64,\n        fix_stem=channel_multiplier < 1.0,\n        round_chs_fn=partial(round_channels, multiplier=channel_multiplier),\n        norm_layer=RmsNorm2d,\n        act_layer=_GELU,\n        layer_scale_init_value=1e-5,\n    )\n    model_kwargs = dict(model_kwargs, **kwargs)\n    if encoder:\n        model = _create_mnv5_encoder(variant, pretrained, **model_kwargs)\n    else:\n        model = _create_mnv5(variant, pretrained, **model_kwargs)\n    return model\n\n\ndef _cfg(url: str = '', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (16, 16),\n        'crop_pct': 1.0, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'conv_stem.conv', 'classifier': 'classifier',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # Encoder-only config for Gemma 3n Transformers integration\n    'mobilenetv5_300m_enc': _cfg(\n        mean=(0., 0., 0.), std=(1., 1., 1.),\n        input_size=(3, 768, 768),\n        num_classes=0),\n\n    # Gemma 3n encoder weights for timm use / fine-tune\n    'mobilenetv5_300m.gemma3n': _cfg(\n        hf_hub_id='timm/',\n        mean=(0., 0., 0.), std=(1., 1., 1.),\n        input_size=(3, 768, 768),\n        num_classes=0,\n        license='gemma'),\n\n    # WIP classification configs for testing\n    'mobilenetv5_base.untrained': _cfg(\n        # hf_hub_id='timm/',\n        num_classes=1000)\n})\n\n\n@register_model\ndef mobilenetv5_300m_enc(pretrained: bool = False, **kwargs) -> MobileNetV5Encoder:\n    \"\"\"MobileNet V5 Vision Encoder\"\"\"\n    pad_type = kwargs.pop('pad_type', 'same')\n    model = _gen_mobilenet_v5(\n        'mobilenetv5_300m_enc',\n        pretrained=pretrained,\n        encoder=True,\n        pad_type=pad_type,\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef mobilenetv5_300m(pretrained: bool = False, **kwargs) -> MobileNetV5:\n    model = _gen_mobilenet_v5('mobilenetv5_300m', pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef mobilenetv5_base(pretrained: bool = False, **kwargs) -> MobileNetV5:\n    model = _gen_mobilenet_v5('mobilenetv5_base', pretrained=pretrained, **kwargs)\n    return model\n"
  },
  {
    "path": "timm/models/mobilevit.py",
    "content": "\"\"\" MobileViT\n\nPaper:\nV1: `MobileViT: Light-weight, General-purpose, and Mobile-friendly Vision Transformer` - https://arxiv.org/abs/2110.02178\nV2: `Separable Self-attention for Mobile Vision Transformers` - https://arxiv.org/abs/2206.02680\n\nMobileVitBlock and checkpoints adapted from https://github.com/apple/ml-cvnets (original copyright below)\nLicense: https://github.com/apple/ml-cvnets/blob/main/LICENSE (Apple open source)\n\nRest of code, ByobNet, and Transformer block hacked together by / Copyright 2022, Ross Wightman\n\"\"\"\n#\n# For licensing see accompanying LICENSE file.\n# Copyright (C) 2020 Apple Inc. All Rights Reserved.\n#\nimport math\nfrom typing import Callable, Tuple, Optional, Type\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom timm.layers import to_2tuple, make_divisible, GroupNorm1, ConvMlp, DropPath, is_exportable\nfrom ._builder import build_model_with_cfg\nfrom ._features_fx import register_notrace_module\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\nfrom .byobnet import register_block, ByoBlockCfg, ByoModelCfg, ByobNet, LayerFn, num_groups\nfrom .vision_transformer import Block as TransformerBlock\n\n__all__ = []\n\n\ndef _inverted_residual_block(d, c, s, br=4.0):\n    # inverted residual is a bottleneck block with bottle_ratio > 1 applied to in_chs, linear output, gs=1 (depthwise)\n    return ByoBlockCfg(\n        type='bottle', d=d, c=c, s=s, gs=1, br=br,\n        block_kwargs=dict(bottle_in=True, linear_out=True))\n\n\ndef _mobilevit_block(d, c, s, transformer_dim, transformer_depth, patch_size=4, br=4.0):\n    # inverted residual + mobilevit blocks as per MobileViT network\n    return (\n        _inverted_residual_block(d=d, c=c, s=s, br=br),\n        ByoBlockCfg(\n            type='mobilevit', d=1, c=c, s=1,\n            block_kwargs=dict(\n                transformer_dim=transformer_dim,\n                transformer_depth=transformer_depth,\n                patch_size=patch_size)\n        )\n    )\n\n\ndef _mobilevitv2_block(d, c, s, transformer_depth, patch_size=2, br=2.0, transformer_br=0.5):\n    # inverted residual + mobilevit blocks as per MobileViT network\n    return (\n        _inverted_residual_block(d=d, c=c, s=s, br=br),\n        ByoBlockCfg(\n            type='mobilevit2', d=1, c=c, s=1, br=transformer_br, gs=1,\n            block_kwargs=dict(\n                transformer_depth=transformer_depth,\n                patch_size=patch_size)\n        )\n    )\n\n\ndef _mobilevitv2_cfg(multiplier=1.0):\n    chs = (64, 128, 256, 384, 512)\n    if multiplier != 1.0:\n        chs = tuple([int(c * multiplier) for c in chs])\n    cfg = ByoModelCfg(\n        blocks=(\n            _inverted_residual_block(d=1, c=chs[0], s=1, br=2.0),\n            _inverted_residual_block(d=2, c=chs[1], s=2, br=2.0),\n            _mobilevitv2_block(d=1, c=chs[2], s=2, transformer_depth=2),\n            _mobilevitv2_block(d=1, c=chs[3], s=2, transformer_depth=4),\n            _mobilevitv2_block(d=1, c=chs[4], s=2, transformer_depth=3),\n        ),\n        stem_chs=int(32 * multiplier),\n        stem_type='3x3',\n        stem_pool='',\n        downsample='',\n        act_layer='silu',\n    )\n    return cfg\n\n\nmodel_cfgs = dict(\n    mobilevit_xxs=ByoModelCfg(\n        blocks=(\n            _inverted_residual_block(d=1, c=16, s=1, br=2.0),\n            _inverted_residual_block(d=3, c=24, s=2, br=2.0),\n            _mobilevit_block(d=1, c=48, s=2, transformer_dim=64, transformer_depth=2, patch_size=2, br=2.0),\n            _mobilevit_block(d=1, c=64, s=2, transformer_dim=80, transformer_depth=4, patch_size=2, br=2.0),\n            _mobilevit_block(d=1, c=80, s=2, transformer_dim=96, transformer_depth=3, patch_size=2, br=2.0),\n        ),\n        stem_chs=16,\n        stem_type='3x3',\n        stem_pool='',\n        downsample='',\n        act_layer='silu',\n        num_features=320,\n    ),\n\n    mobilevit_xs=ByoModelCfg(\n        blocks=(\n            _inverted_residual_block(d=1, c=32, s=1),\n            _inverted_residual_block(d=3, c=48, s=2),\n            _mobilevit_block(d=1, c=64, s=2, transformer_dim=96, transformer_depth=2, patch_size=2),\n            _mobilevit_block(d=1, c=80, s=2, transformer_dim=120, transformer_depth=4, patch_size=2),\n            _mobilevit_block(d=1, c=96, s=2, transformer_dim=144, transformer_depth=3, patch_size=2),\n        ),\n        stem_chs=16,\n        stem_type='3x3',\n        stem_pool='',\n        downsample='',\n        act_layer='silu',\n        num_features=384,\n    ),\n\n    mobilevit_s=ByoModelCfg(\n        blocks=(\n            _inverted_residual_block(d=1, c=32, s=1),\n            _inverted_residual_block(d=3, c=64, s=2),\n            _mobilevit_block(d=1, c=96, s=2, transformer_dim=144, transformer_depth=2, patch_size=2),\n            _mobilevit_block(d=1, c=128, s=2, transformer_dim=192, transformer_depth=4, patch_size=2),\n            _mobilevit_block(d=1, c=160, s=2, transformer_dim=240, transformer_depth=3, patch_size=2),\n        ),\n        stem_chs=16,\n        stem_type='3x3',\n        stem_pool='',\n        downsample='',\n        act_layer='silu',\n        num_features=640,\n    ),\n\n    semobilevit_s=ByoModelCfg(\n        blocks=(\n            _inverted_residual_block(d=1, c=32, s=1),\n            _inverted_residual_block(d=3, c=64, s=2),\n            _mobilevit_block(d=1, c=96, s=2, transformer_dim=144, transformer_depth=2, patch_size=2),\n            _mobilevit_block(d=1, c=128, s=2, transformer_dim=192, transformer_depth=4, patch_size=2),\n            _mobilevit_block(d=1, c=160, s=2, transformer_dim=240, transformer_depth=3, patch_size=2),\n        ),\n        stem_chs=16,\n        stem_type='3x3',\n        stem_pool='',\n        downsample='',\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=1/8),\n        num_features=640,\n    ),\n\n    mobilevitv2_050=_mobilevitv2_cfg(.50),\n    mobilevitv2_075=_mobilevitv2_cfg(.75),\n    mobilevitv2_125=_mobilevitv2_cfg(1.25),\n    mobilevitv2_100=_mobilevitv2_cfg(1.0),\n    mobilevitv2_150=_mobilevitv2_cfg(1.5),\n    mobilevitv2_175=_mobilevitv2_cfg(1.75),\n    mobilevitv2_200=_mobilevitv2_cfg(2.0),\n)\n\n\n@register_notrace_module\nclass MobileVitBlock(nn.Module):\n    \"\"\" MobileViT block\n        Paper: https://arxiv.org/abs/2110.02178?context=cs.LG\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            kernel_size: int = 3,\n            stride: int = 1,\n            bottle_ratio: float = 1.0,\n            group_size: Optional[int] = None,\n            dilation: Tuple[int, int] = (1, 1),\n            mlp_ratio: float = 2.0,\n            transformer_dim: Optional[int] = None,\n            transformer_depth: int = 2,\n            patch_size: int = 8,\n            num_heads: int = 4,\n            attn_drop: float = 0.,\n            drop: int = 0.,\n            no_fusion: bool = False,\n            drop_path_rate: float = 0.,\n            layers: LayerFn = None,\n            transformer_norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n            **kwargs,  # eat unused args\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        groups = num_groups(group_size, in_chs)\n        out_chs = out_chs or in_chs\n        transformer_dim = transformer_dim or make_divisible(bottle_ratio * in_chs)\n\n        self.conv_kxk = layers.conv_norm_act(\n            in_chs,\n            in_chs,\n            kernel_size=kernel_size,\n            stride=stride,\n            groups=groups,\n            dilation=dilation[0],\n            **dd,\n        )\n        self.conv_1x1 = nn.Conv2d(in_chs, transformer_dim, kernel_size=1, bias=False, **dd)\n\n        self.transformer = nn.Sequential(*[\n            TransformerBlock(\n                transformer_dim,\n                mlp_ratio=mlp_ratio,\n                num_heads=num_heads,\n                qkv_bias=True,\n                attn_drop=attn_drop,\n                proj_drop=drop,\n                drop_path=drop_path_rate,\n                act_layer=layers.act,\n                norm_layer=transformer_norm_layer,\n                **dd,\n            )\n            for _ in range(transformer_depth)\n        ])\n        self.norm = transformer_norm_layer(transformer_dim, **dd)\n\n        self.conv_proj = layers.conv_norm_act(transformer_dim, out_chs, kernel_size=1, stride=1, **dd)\n\n        if no_fusion:\n            self.conv_fusion = None\n        else:\n            self.conv_fusion = layers.conv_norm_act(in_chs + out_chs, out_chs, kernel_size=kernel_size, stride=1, **dd)\n\n        self.patch_size = to_2tuple(patch_size)\n        self.patch_area = self.patch_size[0] * self.patch_size[1]\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n\n        # Local representation\n        x = self.conv_kxk(x)\n        x = self.conv_1x1(x)\n\n        # Unfold (feature map -> patches)\n        patch_h, patch_w = self.patch_size\n        B, C, H, W = x.shape\n        new_h, new_w = math.ceil(H / patch_h) * patch_h, math.ceil(W / patch_w) * patch_w\n        num_patch_h, num_patch_w = new_h // patch_h, new_w // patch_w  # n_h, n_w\n        num_patches = num_patch_h * num_patch_w  # N\n        interpolate = False\n        if new_h != H or new_w != W:\n            # Note: Padding can be done, but then it needs to be handled in attention function.\n            x = F.interpolate(x, size=(new_h, new_w), mode=\"bilinear\", align_corners=False)\n            interpolate = True\n\n        # [B, C, H, W] --> [B * C * n_h, n_w, p_h, p_w]\n        x = x.reshape(B * C * num_patch_h, patch_h, num_patch_w, patch_w).transpose(1, 2)\n        # [B * C * n_h, n_w, p_h, p_w] --> [BP, N, C] where P = p_h * p_w and N = n_h * n_w\n        x = x.reshape(B, C, num_patches, self.patch_area).transpose(1, 3).reshape(B * self.patch_area, num_patches, -1)\n\n        # Global representations\n        x = self.transformer(x)\n        x = self.norm(x)\n\n        # Fold (patch -> feature map)\n        # [B, P, N, C] --> [B*C*n_h, n_w, p_h, p_w]\n        x = x.contiguous().view(B, self.patch_area, num_patches, -1)\n        x = x.transpose(1, 3).reshape(B * C * num_patch_h, num_patch_w, patch_h, patch_w)\n        # [B*C*n_h, n_w, p_h, p_w] --> [B*C*n_h, p_h, n_w, p_w] --> [B, C, H, W]\n        x = x.transpose(1, 2).reshape(B, C, num_patch_h * patch_h, num_patch_w * patch_w)\n        if interpolate:\n            x = F.interpolate(x, size=(H, W), mode=\"bilinear\", align_corners=False)\n\n        x = self.conv_proj(x)\n        if self.conv_fusion is not None:\n            x = self.conv_fusion(torch.cat((shortcut, x), dim=1))\n        return x\n\n\nclass LinearSelfAttention(nn.Module):\n    \"\"\"\n    This layer applies a self-attention with linear complexity, as described in `https://arxiv.org/abs/2206.02680`\n    This layer can be used for self- as well as cross-attention.\n    Args:\n        embed_dim (int): :math:`C` from an expected input of size :math:`(N, C, H, W)`\n        attn_drop (float): Dropout value for context scores. Default: 0.0\n        bias (bool): Use bias in learnable layers. Default: True\n    Shape:\n        - Input: :math:`(N, C, P, N)` where :math:`N` is the batch size, :math:`C` is the input channels,\n        :math:`P` is the number of pixels in the patch, and :math:`N` is the number of patches\n        - Output: same as the input\n    .. note::\n        For MobileViTv2, we unfold the feature map [B, C, H, W] into [B, C, P, N] where P is the number of pixels\n        in a patch and N is the number of patches. Because channel is the first dimension in this unfolded tensor,\n        we use point-wise convolution (instead of a linear layer). This avoids a transpose operation (which may be\n        expensive on resource-constrained devices) that may be required to convert the unfolded tensor from\n        channel-first to channel-last format in case of a linear layer.\n    \"\"\"\n\n    def __init__(\n            self,\n            embed_dim: int,\n            attn_drop: float = 0.0,\n            proj_drop: float = 0.0,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.embed_dim = embed_dim\n\n        self.qkv_proj = nn.Conv2d(\n            in_channels=embed_dim,\n            out_channels=1 + (2 * embed_dim),\n            bias=bias,\n            kernel_size=1,\n            **dd,\n        )\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.out_proj = nn.Conv2d(\n            in_channels=embed_dim,\n            out_channels=embed_dim,\n            bias=bias,\n            kernel_size=1,\n            **dd,\n        )\n        self.out_drop = nn.Dropout(proj_drop)\n\n    def _forward_self_attn(self, x: torch.Tensor) -> torch.Tensor:\n        # [B, C, P, N] --> [B, h + 2d, P, N]\n        qkv = self.qkv_proj(x)\n\n        # Project x into query, key and value\n        # Query --> [B, 1, P, N]\n        # value, key --> [B, d, P, N]\n        query, key, value = qkv.split([1, self.embed_dim, self.embed_dim], dim=1)\n\n        # apply softmax along N dimension\n        context_scores = F.softmax(query, dim=-1)\n        context_scores = self.attn_drop(context_scores)\n\n        # Compute context vector\n        # [B, d, P, N] x [B, 1, P, N] -> [B, d, P, N] --> [B, d, P, 1]\n        context_vector = (key * context_scores).sum(dim=-1, keepdim=True)\n\n        # combine context vector with values\n        # [B, d, P, N] * [B, d, P, 1] --> [B, d, P, N]\n        out = F.relu(value) * context_vector.expand_as(value)\n        out = self.out_proj(out)\n        out = self.out_drop(out)\n        return out\n\n    @torch.jit.ignore()\n    def _forward_cross_attn(self, x: torch.Tensor, x_prev: Optional[torch.Tensor] = None) -> torch.Tensor:\n        # x --> [B, C, P, N]\n        # x_prev = [B, C, P, M]\n        batch_size, in_dim, kv_patch_area, kv_num_patches = x.shape\n        q_patch_area, q_num_patches = x.shape[-2:]\n\n        assert (\n            kv_patch_area == q_patch_area\n        ), \"The number of pixels in a patch for query and key_value should be the same\"\n\n        # compute query, key, and value\n        # [B, C, P, M] --> [B, 1 + d, P, M]\n        qk = F.conv2d(\n            x_prev,\n            weight=self.qkv_proj.weight[:self.embed_dim + 1],\n            bias=self.qkv_proj.bias[:self.embed_dim + 1],\n        )\n\n        # [B, 1 + d, P, M] --> [B, 1, P, M], [B, d, P, M]\n        query, key = qk.split([1, self.embed_dim], dim=1)\n        # [B, C, P, N] --> [B, d, P, N]\n        value = F.conv2d(\n            x,\n            weight=self.qkv_proj.weight[self.embed_dim + 1],\n            bias=self.qkv_proj.bias[self.embed_dim + 1] if self.qkv_proj.bias is not None else None,\n        )\n\n        # apply softmax along M dimension\n        context_scores = F.softmax(query, dim=-1)\n        context_scores = self.attn_drop(context_scores)\n\n        # compute context vector\n        # [B, d, P, M] * [B, 1, P, M] -> [B, d, P, M] --> [B, d, P, 1]\n        context_vector = (key * context_scores).sum(dim=-1, keepdim=True)\n\n        # combine context vector with values\n        # [B, d, P, N] * [B, d, P, 1] --> [B, d, P, N]\n        out = F.relu(value) * context_vector.expand_as(value)\n        out = self.out_proj(out)\n        out = self.out_drop(out)\n        return out\n\n    def forward(self, x: torch.Tensor, x_prev: Optional[torch.Tensor] = None) -> torch.Tensor:\n        if x_prev is None:\n            return self._forward_self_attn(x)\n        else:\n            return self._forward_cross_attn(x, x_prev=x_prev)\n\n\nclass LinearTransformerBlock(nn.Module):\n    \"\"\"\n    This class defines the pre-norm transformer encoder with linear self-attention in `MobileViTv2 paper <>`_\n    Args:\n        embed_dim (int): :math:`C_{in}` from an expected input of size :math:`(B, C_{in}, P, N)`\n        mlp_ratio (float): Inner dimension ratio of the FFN relative to embed_dim\n        drop (float): Dropout rate. Default: 0.0\n        attn_drop (float): Dropout rate for attention in multi-head attention. Default: 0.0\n        drop_path (float): Stochastic depth rate Default: 0.0\n        norm_layer (Callable): Normalization layer. Default: layer_norm_2d\n    Shape:\n        - Input: :math:`(B, C_{in}, P, N)` where :math:`B` is batch size, :math:`C_{in}` is input embedding dim,\n            :math:`P` is number of pixels in a patch, and :math:`N` is number of patches,\n        - Output: same shape as the input\n    \"\"\"\n\n    def __init__(\n            self,\n            embed_dim: int,\n            mlp_ratio: float = 2.0,\n            drop: float = 0.0,\n            attn_drop: float = 0.0,\n            drop_path: float = 0.0,\n            act_layer: Optional[Type[nn.Module]] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        act_layer = act_layer or nn.SiLU\n        norm_layer = norm_layer or GroupNorm1\n\n        self.norm1 = norm_layer(embed_dim, **dd)\n        self.attn = LinearSelfAttention(embed_dim=embed_dim, attn_drop=attn_drop, proj_drop=drop, **dd)\n        self.drop_path1 = DropPath(drop_path)\n\n        self.norm2 = norm_layer(embed_dim, **dd)\n        self.mlp = ConvMlp(\n            in_features=embed_dim,\n            hidden_features=int(embed_dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=drop,\n            **dd)\n        self.drop_path2 = DropPath(drop_path)\n\n    def forward(self, x: torch.Tensor, x_prev: Optional[torch.Tensor] = None) -> torch.Tensor:\n        if x_prev is None:\n            # self-attention\n            x = x + self.drop_path1(self.attn(self.norm1(x)))\n        else:\n            # cross-attention\n            res = x\n            x = self.norm1(x)  # norm\n            x = self.attn(x, x_prev)  # attn\n            x = self.drop_path1(x) + res  # residual\n\n        # Feed forward network\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        return x\n\n\n@register_notrace_module\nclass MobileVitV2Block(nn.Module):\n    \"\"\"\n    This class defines the `MobileViTv2 block <>`_\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            kernel_size: int = 3,\n            bottle_ratio: float = 1.0,\n            group_size: Optional[int] = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            mlp_ratio: float = 2.0,\n            transformer_dim: Optional[int] = None,\n            transformer_depth: int = 2,\n            patch_size: int = 8,\n            attn_drop: float = 0.,\n            drop: int = 0.,\n            drop_path_rate: float = 0.,\n            layers: LayerFn = None,\n            transformer_norm_layer: Type[nn.Module] = GroupNorm1,\n            device=None,\n            dtype=None,\n            **kwargs,  # eat unused args\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        layers = layers or LayerFn()\n        groups = num_groups(group_size, in_chs)\n        out_chs = out_chs or in_chs\n        transformer_dim = transformer_dim or make_divisible(bottle_ratio * in_chs)\n\n        self.conv_kxk = layers.conv_norm_act(\n            in_chs,\n            in_chs,\n            kernel_size=kernel_size,\n            stride=1,\n            groups=groups,\n            dilation=dilation[0],\n            **dd,\n        )\n        self.conv_1x1 = nn.Conv2d(in_chs, transformer_dim, kernel_size=1, bias=False, **dd)\n\n        self.transformer = nn.Sequential(*[\n            LinearTransformerBlock(\n                transformer_dim,\n                mlp_ratio=mlp_ratio,\n                attn_drop=attn_drop,\n                drop=drop,\n                drop_path=drop_path_rate,\n                act_layer=layers.act,\n                norm_layer=transformer_norm_layer,\n                **dd,\n            )\n            for _ in range(transformer_depth)\n        ])\n        self.norm = transformer_norm_layer(transformer_dim, **dd)\n\n        self.conv_proj = layers.conv_norm_act(transformer_dim, out_chs, kernel_size=1, stride=1, apply_act=False, **dd)\n\n        self.patch_size = to_2tuple(patch_size)\n        self.patch_area = self.patch_size[0] * self.patch_size[1]\n        self.coreml_exportable = is_exportable()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, C, H, W = x.shape\n        patch_h, patch_w = self.patch_size\n        new_h, new_w = math.ceil(H / patch_h) * patch_h, math.ceil(W / patch_w) * patch_w\n        num_patch_h, num_patch_w = new_h // patch_h, new_w // patch_w  # n_h, n_w\n        num_patches = num_patch_h * num_patch_w  # N\n        if new_h != H or new_w != W:\n            x = F.interpolate(x, size=(new_h, new_w), mode=\"bilinear\", align_corners=True)\n\n        # Local representation\n        x = self.conv_kxk(x)\n        x = self.conv_1x1(x)\n\n        # Unfold (feature map -> patches), [B, C, H, W] -> [B, C, P, N]\n        C = x.shape[1]\n        if self.coreml_exportable:\n            x = F.unfold(x, kernel_size=(patch_h, patch_w), stride=(patch_h, patch_w))\n        else:\n            x = x.reshape(B, C, num_patch_h, patch_h, num_patch_w, patch_w).permute(0, 1, 3, 5, 2, 4)\n        x = x.reshape(B, C, -1, num_patches)\n\n        # Global representations\n        x = self.transformer(x)\n        x = self.norm(x)\n\n        # Fold (patches -> feature map), [B, C, P, N] --> [B, C, H, W]\n        if self.coreml_exportable:\n            # adopted from https://github.com/apple/ml-cvnets/blob/main/cvnets/modules/mobilevit_block.py#L609-L624\n            x = x.reshape(B, C * patch_h * patch_w, num_patch_h, num_patch_w)\n            x = F.pixel_shuffle(x, upscale_factor=patch_h)\n        else:\n            x = x.reshape(B, C, patch_h, patch_w, num_patch_h, num_patch_w).permute(0, 1, 4, 2, 5, 3)\n            x = x.reshape(B, C, num_patch_h * patch_h, num_patch_w * patch_w)\n\n        x = self.conv_proj(x)\n        return x\n\n\nregister_block('mobilevit', MobileVitBlock)\nregister_block('mobilevit2', MobileVitV2Block)\n\n\ndef _create_mobilevit(variant, cfg_variant=None, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        ByobNet, variant, pretrained,\n        model_cfg=model_cfgs[variant] if not cfg_variant else model_cfgs[cfg_variant],\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs)\n\n\ndef _create_mobilevit2(variant, cfg_variant=None, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        ByobNet, variant, pretrained,\n        model_cfg=model_cfgs[variant] if not cfg_variant else model_cfgs[cfg_variant],\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (8, 8),\n        'crop_pct': 0.9, 'interpolation': 'bicubic',\n        'mean': (0., 0., 0.), 'std': (1., 1., 1.),\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'fixed_input_size': False,\n        'license': 'cvnets-license',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'mobilevit_xxs.cvnets_in1k': _cfg(hf_hub_id='timm/'),\n    'mobilevit_xs.cvnets_in1k': _cfg(hf_hub_id='timm/'),\n    'mobilevit_s.cvnets_in1k': _cfg(hf_hub_id='timm/'),\n\n    'mobilevitv2_050.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_075.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_100.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_125.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_150.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_175.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_200.cvnets_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n\n    'mobilevitv2_150.cvnets_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_175.cvnets_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n    'mobilevitv2_200.cvnets_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.888),\n\n    'mobilevitv2_150.cvnets_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'mobilevitv2_175.cvnets_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'mobilevitv2_200.cvnets_in22k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n})\n\n\n@register_model\ndef mobilevit_xxs(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevit_xxs', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevit_xs(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevit_xs', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevit_s(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevit_s', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_050(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_050', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_075(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_075', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_100(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_100', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_125(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_125', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_150(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_150', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_175(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_175', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mobilevitv2_200(pretrained=False, **kwargs) -> ByobNet:\n    return _create_mobilevit('mobilevitv2_200', pretrained=pretrained, **kwargs)\n\n\nregister_model_deprecations(__name__, {\n    'mobilevitv2_150_in22ft1k': 'mobilevitv2_150.cvnets_in22k_ft_in1k',\n    'mobilevitv2_175_in22ft1k': 'mobilevitv2_175.cvnets_in22k_ft_in1k',\n    'mobilevitv2_200_in22ft1k': 'mobilevitv2_200.cvnets_in22k_ft_in1k',\n\n    'mobilevitv2_150_384_in22ft1k': 'mobilevitv2_150.cvnets_in22k_ft_in1k_384',\n    'mobilevitv2_175_384_in22ft1k': 'mobilevitv2_175.cvnets_in22k_ft_in1k_384',\n    'mobilevitv2_200_384_in22ft1k': 'mobilevitv2_200.cvnets_in22k_ft_in1k_384',\n})\n"
  },
  {
    "path": "timm/models/mvitv2.py",
    "content": "\"\"\" Multi-Scale Vision Transformer v2\n\n@inproceedings{li2021improved,\n  title={MViTv2: Improved multiscale vision transformers for classification and detection},\n  author={Li, Yanghao and Wu, Chao-Yuan and Fan, Haoqi and Mangalam, Karttikeya and Xiong, Bo and Malik, Jitendra and Feichtenhofer, Christoph},\n  booktitle={CVPR},\n  year={2022}\n}\n\nCode adapted from original Apache 2.0 licensed impl at https://github.com/facebookresearch/mvit\nOriginal copyright below.\n\nModifications and timm support by / Copyright 2022, Ross Wightman\n\"\"\"\n# Copyright (c) Meta Platforms, Inc. and affiliates. All Rights Reserved. All Rights Reserved.\nimport operator\nfrom collections import OrderedDict\nfrom dataclasses import dataclass\nfrom functools import partial, reduce\nfrom typing import Union, List, Tuple, Optional, Any, Type\n\nimport torch\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import Mlp, DropPath, calculate_drop_path_rates, trunc_normal_tf_, get_norm_layer, to_2tuple\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['MultiScaleVit', 'MultiScaleVitCfg']  # model_registry will add each entrypoint fn to this\n\n\n@dataclass\nclass MultiScaleVitCfg:\n    depths: Tuple[int, ...] = (2, 3, 16, 3)\n    embed_dim: Union[int, Tuple[int, ...]] = 96\n    num_heads: Union[int, Tuple[int, ...]] = 1\n    mlp_ratio: float = 4.\n    pool_first: bool = False\n    expand_attn: bool = True\n    qkv_bias: bool = True\n    use_cls_token: bool = False\n    use_abs_pos: bool = False\n    residual_pooling: bool = True\n    mode: str = 'conv'\n    kernel_qkv: Tuple[int, int] = (3, 3)\n    stride_q: Optional[Tuple[Tuple[int, int]]] = ((1, 1), (2, 2), (2, 2), (2, 2))\n    stride_kv: Optional[Tuple[Tuple[int, int]]] = None\n    stride_kv_adaptive: Optional[Tuple[int, int]] = (4, 4)\n    patch_kernel: Tuple[int, int] = (7, 7)\n    patch_stride: Tuple[int, int] = (4, 4)\n    patch_padding: Tuple[int, int] = (3, 3)\n    pool_type: str = 'max'\n    rel_pos_type: str = 'spatial'\n    act_layer: Union[str, Tuple[str, str]] = 'gelu'\n    norm_layer: Union[str, Tuple[str, str]] = 'layernorm'\n    norm_eps: float = 1e-6\n\n    def __post_init__(self):\n        num_stages = len(self.depths)\n        if not isinstance(self.embed_dim, (tuple, list)):\n            self.embed_dim = tuple(self.embed_dim * 2 ** i for i in range(num_stages))\n        assert len(self.embed_dim) == num_stages\n\n        if not isinstance(self.num_heads, (tuple, list)):\n            self.num_heads = tuple(self.num_heads * 2 ** i for i in range(num_stages))\n        assert len(self.num_heads) == num_stages\n\n        if self.stride_kv_adaptive is not None and self.stride_kv is None:\n            _stride_kv = self.stride_kv_adaptive\n            pool_kv_stride = []\n            for i in range(num_stages):\n                if min(self.stride_q[i]) > 1:\n                    _stride_kv = [\n                        max(_stride_kv[d] // self.stride_q[i][d], 1)\n                        for d in range(len(_stride_kv))\n                    ]\n                pool_kv_stride.append(tuple(_stride_kv))\n            self.stride_kv = tuple(pool_kv_stride)\n\n\ndef prod(iterable):\n    return reduce(operator.mul, iterable, 1)\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"\n    PatchEmbed.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim_in: int = 3,\n            dim_out: int = 768,\n            kernel: Tuple[int, int] = (7, 7),\n            stride: Tuple[int, int] = (4, 4),\n            padding: Tuple[int, int] = (3, 3),\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.proj = nn.Conv2d(\n            dim_in,\n            dim_out,\n            kernel_size=kernel,\n            stride=stride,\n            padding=padding,\n            **dd,\n        )\n\n    def forward(self, x) -> Tuple[torch.Tensor, List[int]]:\n        x = self.proj(x)\n        # B C H W -> B HW C\n        return x.flatten(2).transpose(1, 2), x.shape[-2:]\n\n\n@register_notrace_function\ndef reshape_pre_pool(\n        x,\n        feat_size: List[int],\n        has_cls_token: bool = True\n) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:\n    H, W = feat_size\n    if has_cls_token:\n        cls_tok, x = x[:, :, :1, :], x[:, :, 1:, :]\n    else:\n        cls_tok = None\n    x = x.reshape(-1, H, W, x.shape[-1]).permute(0, 3, 1, 2).contiguous()\n    return x, cls_tok\n\n\n@register_notrace_function\ndef reshape_post_pool(\n        x,\n        num_heads: int,\n        cls_tok: Optional[torch.Tensor] = None\n) -> Tuple[torch.Tensor, List[int]]:\n    feat_size = [x.shape[2], x.shape[3]]\n    L_pooled = x.shape[2] * x.shape[3]\n    x = x.reshape(-1, num_heads, x.shape[1], L_pooled).transpose(2, 3)\n    if cls_tok is not None:\n        x = torch.cat((cls_tok, x), dim=2)\n    return x, feat_size\n\n\n@register_notrace_function\ndef cal_rel_pos_type(\n        attn: torch.Tensor,\n        q: torch.Tensor,\n        has_cls_token: bool,\n        q_size: List[int],\n        k_size: List[int],\n        rel_pos_h: torch.Tensor,\n        rel_pos_w: torch.Tensor,\n):\n    \"\"\"\n    Spatial Relative Positional Embeddings.\n    \"\"\"\n    sp_idx = 1 if has_cls_token else 0\n    q_h, q_w = q_size\n    k_h, k_w = k_size\n\n    # Scale up rel pos if shapes for q and k are different.\n    q_h_ratio = max(k_h / q_h, 1.0)\n    k_h_ratio = max(q_h / k_h, 1.0)\n    dist_h = (\n        torch.arange(q_h, device=q.device, dtype=torch.long).unsqueeze(-1) * q_h_ratio -\n        torch.arange(k_h, device=q.device, dtype=torch.long).unsqueeze(0) * k_h_ratio\n    )\n    dist_h += (k_h - 1) * k_h_ratio\n    q_w_ratio = max(k_w / q_w, 1.0)\n    k_w_ratio = max(q_w / k_w, 1.0)\n    dist_w = (\n        torch.arange(q_w, device=q.device, dtype=torch.long).unsqueeze(-1) * q_w_ratio -\n        torch.arange(k_w, device=q.device, dtype=torch.long).unsqueeze(0) * k_w_ratio\n    )\n    dist_w += (k_w - 1) * k_w_ratio\n\n    rel_h = rel_pos_h[dist_h.long()]\n    rel_w = rel_pos_w[dist_w.long()]\n\n    B, n_head, q_N, dim = q.shape\n\n    r_q = q[:, :, sp_idx:].reshape(B, n_head, q_h, q_w, dim)\n    rel_h = torch.einsum(\"byhwc,hkc->byhwk\", r_q, rel_h)\n    rel_w = torch.einsum(\"byhwc,wkc->byhwk\", r_q, rel_w)\n\n    attn[:, :, sp_idx:, sp_idx:] = (\n        attn[:, :, sp_idx:, sp_idx:].view(B, -1, q_h, q_w, k_h, k_w)\n        + rel_h.unsqueeze(-1)\n        + rel_w.unsqueeze(-2)\n    ).view(B, -1, q_h * q_w, k_h * k_w)\n\n    return attn\n\n\nclass MultiScaleAttentionPoolFirst(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            feat_size: Tuple[int, int],\n            num_heads: int = 8,\n            qkv_bias: bool = True,\n            mode: str = \"conv\",\n            kernel_q: Tuple[int, int] = (1, 1),\n            kernel_kv: Tuple[int, int] = (1, 1),\n            stride_q: Tuple[int, int] = (1, 1),\n            stride_kv: Tuple[int, int] = (1, 1),\n            has_cls_token: bool = True,\n            rel_pos_type: str = 'spatial',\n            residual_pooling: bool = True,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.dim_out = dim_out\n        self.head_dim = dim_out // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.has_cls_token = has_cls_token\n        padding_q = tuple([int(q // 2) for q in kernel_q])\n        padding_kv = tuple([int(kv // 2) for kv in kernel_kv])\n\n        self.q = nn.Linear(dim, dim_out, bias=qkv_bias, **dd)\n        self.k = nn.Linear(dim, dim_out, bias=qkv_bias, **dd)\n        self.v = nn.Linear(dim, dim_out, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(dim_out, dim_out, **dd)\n\n        # Skip pooling with kernel and stride size of (1, 1, 1).\n        if prod(kernel_q) == 1 and prod(stride_q) == 1:\n            kernel_q = None\n        if prod(kernel_kv) == 1 and prod(stride_kv) == 1:\n            kernel_kv = None\n        self.mode = mode\n        self.unshared = mode == 'conv_unshared'\n        self.pool_q, self.pool_k, self.pool_v = None, None, None\n        self.norm_q, self.norm_k, self.norm_v = None, None, None\n        if mode in (\"avg\", \"max\"):\n            pool_op = nn.MaxPool2d if mode == \"max\" else nn.AvgPool2d\n            if kernel_q:\n                self.pool_q = pool_op(kernel_q, stride_q, padding_q)\n            if kernel_kv:\n                self.pool_k = pool_op(kernel_kv, stride_kv, padding_kv)\n                self.pool_v = pool_op(kernel_kv, stride_kv, padding_kv)\n        elif mode == \"conv\" or mode == \"conv_unshared\":\n            dim_conv = dim // num_heads if mode == \"conv\" else dim\n            if kernel_q:\n                self.pool_q = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_q,\n                    stride=stride_q,\n                    padding=padding_q,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_q = norm_layer(dim_conv, **dd)\n            if kernel_kv:\n                self.pool_k = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_kv,\n                    stride=stride_kv,\n                    padding=padding_kv,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_k = norm_layer(dim_conv, **dd)\n                self.pool_v = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_kv,\n                    stride=stride_kv,\n                    padding=padding_kv,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_v = norm_layer(dim_conv, **dd)\n        else:\n            raise NotImplementedError(f\"Unsupported model {mode}\")\n\n        # relative pos embedding\n        self.rel_pos_type = rel_pos_type\n        if self.rel_pos_type == 'spatial':\n            assert feat_size[0] == feat_size[1]\n            size = feat_size[0]\n            q_size = size // stride_q[1] if len(stride_q) > 0 else size\n            kv_size = size // stride_kv[1] if len(stride_kv) > 0 else size\n            rel_sp_dim = 2 * max(q_size, kv_size) - 1\n\n            self.rel_pos_h = nn.Parameter(torch.zeros(rel_sp_dim, self.head_dim, **dd))\n            self.rel_pos_w = nn.Parameter(torch.zeros(rel_sp_dim, self.head_dim, **dd))\n            trunc_normal_tf_(self.rel_pos_h, std=0.02)\n            trunc_normal_tf_(self.rel_pos_w, std=0.02)\n\n        self.residual_pooling = residual_pooling\n\n    def forward(self, x, feat_size: List[int]):\n        B, N, _ = x.shape\n\n        fold_dim = 1 if self.unshared else self.num_heads\n        x = x.reshape(B, N, fold_dim, -1).permute(0, 2, 1, 3)\n        q = k = v = x\n\n        if self.pool_q is not None:\n            q, q_tok = reshape_pre_pool(q, feat_size, self.has_cls_token)\n            q = self.pool_q(q)\n            q, q_size = reshape_post_pool(q, self.num_heads, q_tok)\n        else:\n            q_size = feat_size\n        if self.norm_q is not None:\n            q = self.norm_q(q)\n\n        if self.pool_k is not None:\n            k, k_tok = reshape_pre_pool(k, feat_size, self.has_cls_token)\n            k = self.pool_k(k)\n            k, k_size = reshape_post_pool(k, self.num_heads, k_tok)\n        else:\n            k_size = feat_size\n        if self.norm_k is not None:\n            k = self.norm_k(k)\n\n        if self.pool_v is not None:\n            v, v_tok = reshape_pre_pool(v, feat_size, self.has_cls_token)\n            v = self.pool_v(v)\n            v, v_size = reshape_post_pool(v, self.num_heads, v_tok)\n        else:\n            v_size = feat_size\n        if self.norm_v is not None:\n            v = self.norm_v(v)\n\n        q_N = q_size[0] * q_size[1] + int(self.has_cls_token)\n        q = q.transpose(1, 2).reshape(B, q_N, -1)\n        q = self.q(q).reshape(B, q_N, self.num_heads, -1).transpose(1, 2)\n\n        k_N = k_size[0] * k_size[1] + int(self.has_cls_token)\n        k = k.transpose(1, 2).reshape(B, k_N, -1)\n        k = self.k(k).reshape(B, k_N, self.num_heads, -1)\n\n        v_N = v_size[0] * v_size[1] + int(self.has_cls_token)\n        v = v.transpose(1, 2).reshape(B, v_N, -1)\n        v = self.v(v).reshape(B, v_N, self.num_heads, -1).transpose(1, 2)\n\n        attn = (q * self.scale) @ k\n        if self.rel_pos_type == 'spatial':\n            attn = cal_rel_pos_type(\n                attn,\n                q,\n                self.has_cls_token,\n                q_size,\n                k_size,\n                self.rel_pos_h,\n                self.rel_pos_w,\n            )\n        attn = attn.softmax(dim=-1)\n        x = attn @ v\n\n        if self.residual_pooling:\n            x = x + q\n\n        x = x.transpose(1, 2).reshape(B, -1, self.dim_out)\n        x = self.proj(x)\n\n        return x, q_size\n\n\nclass MultiScaleAttention(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            feat_size: Tuple[int, int],\n            num_heads: int = 8,\n            qkv_bias: bool = True,\n            mode: str = \"conv\",\n            kernel_q: Tuple[int, int] = (1, 1),\n            kernel_kv: Tuple[int, int] = (1, 1),\n            stride_q: Tuple[int, int] = (1, 1),\n            stride_kv: Tuple[int, int] = (1, 1),\n            has_cls_token: bool = True,\n            rel_pos_type: str = 'spatial',\n            residual_pooling: bool = True,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.dim_out = dim_out\n        self.head_dim = dim_out // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.has_cls_token = has_cls_token\n        padding_q = tuple([int(q // 2) for q in kernel_q])\n        padding_kv = tuple([int(kv // 2) for kv in kernel_kv])\n\n        self.qkv = nn.Linear(dim, dim_out * 3, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(dim_out, dim_out, **dd)\n\n        # Skip pooling with kernel and stride size of (1, 1, 1).\n        if prod(kernel_q) == 1 and prod(stride_q) == 1:\n            kernel_q = None\n        if prod(kernel_kv) == 1 and prod(stride_kv) == 1:\n            kernel_kv = None\n        self.mode = mode\n        self.unshared = mode == 'conv_unshared'\n        self.norm_q, self.norm_k, self.norm_v = None, None, None\n        self.pool_q, self.pool_k, self.pool_v = None, None, None\n        if mode in (\"avg\", \"max\"):\n            pool_op = nn.MaxPool2d if mode == \"max\" else nn.AvgPool2d\n            if kernel_q:\n                self.pool_q = pool_op(kernel_q, stride_q, padding_q)\n            if kernel_kv:\n                self.pool_k = pool_op(kernel_kv, stride_kv, padding_kv)\n                self.pool_v = pool_op(kernel_kv, stride_kv, padding_kv)\n        elif mode == \"conv\" or mode == \"conv_unshared\":\n            dim_conv = dim_out // num_heads if mode == \"conv\" else dim_out\n            if kernel_q:\n                self.pool_q = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_q,\n                    stride=stride_q,\n                    padding=padding_q,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_q = norm_layer(dim_conv, **dd)\n            if kernel_kv:\n                self.pool_k = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_kv,\n                    stride=stride_kv,\n                    padding=padding_kv,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_k = norm_layer(dim_conv, **dd)\n                self.pool_v = nn.Conv2d(\n                    dim_conv,\n                    dim_conv,\n                    kernel_kv,\n                    stride=stride_kv,\n                    padding=padding_kv,\n                    groups=dim_conv,\n                    bias=False,\n                    **dd,\n                )\n                self.norm_v = norm_layer(dim_conv, **dd)\n        else:\n            raise NotImplementedError(f\"Unsupported model {mode}\")\n\n        # relative pos embedding\n        self.rel_pos_type = rel_pos_type\n        if self.rel_pos_type == 'spatial':\n            assert feat_size[0] == feat_size[1]\n            size = feat_size[0]\n            q_size = size // stride_q[1] if len(stride_q) > 0 else size\n            kv_size = size // stride_kv[1] if len(stride_kv) > 0 else size\n            rel_sp_dim = 2 * max(q_size, kv_size) - 1\n\n            self.rel_pos_h = nn.Parameter(torch.zeros(rel_sp_dim, self.head_dim, **dd))\n            self.rel_pos_w = nn.Parameter(torch.zeros(rel_sp_dim, self.head_dim, **dd))\n            trunc_normal_tf_(self.rel_pos_h, std=0.02)\n            trunc_normal_tf_(self.rel_pos_w, std=0.02)\n\n        self.residual_pooling = residual_pooling\n\n    def forward(self, x, feat_size: List[int]):\n        B, N, _ = x.shape\n\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(dim=0)\n\n        if self.pool_q is not None:\n            q, q_tok = reshape_pre_pool(q, feat_size, self.has_cls_token)\n            q = self.pool_q(q)\n            q, q_size = reshape_post_pool(q, self.num_heads, q_tok)\n        else:\n            q_size = feat_size\n        if self.norm_q is not None:\n            q = self.norm_q(q)\n\n        if self.pool_k is not None:\n            k, k_tok = reshape_pre_pool(k, feat_size, self.has_cls_token)\n            k = self.pool_k(k)\n            k, k_size = reshape_post_pool(k, self.num_heads, k_tok)\n        else:\n            k_size = feat_size\n        if self.norm_k is not None:\n            k = self.norm_k(k)\n\n        if self.pool_v is not None:\n            v, v_tok = reshape_pre_pool(v, feat_size, self.has_cls_token)\n            v = self.pool_v(v)\n            v, _ = reshape_post_pool(v, self.num_heads, v_tok)\n        if self.norm_v is not None:\n            v = self.norm_v(v)\n\n        attn = (q * self.scale) @ k.transpose(-2, -1)\n        if self.rel_pos_type == 'spatial':\n            attn = cal_rel_pos_type(\n                attn,\n                q,\n                self.has_cls_token,\n                q_size,\n                k_size,\n                self.rel_pos_h,\n                self.rel_pos_w,\n            )\n        attn = attn.softmax(dim=-1)\n        x = attn @ v\n\n        if self.residual_pooling:\n            x = x + q\n\n        x = x.transpose(1, 2).reshape(B, -1, self.dim_out)\n        x = self.proj(x)\n\n        return x, q_size\n\n\nclass MultiScaleBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            num_heads: int,\n            feat_size: Tuple[int, int],\n            mlp_ratio: float = 4.0,\n            qkv_bias: bool = True,\n            drop_path: float = 0.0,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            kernel_q: Tuple[int, int] = (1, 1),\n            kernel_kv: Tuple[int, int] = (1, 1),\n            stride_q: Tuple[int, int] = (1, 1),\n            stride_kv: Tuple[int, int] = (1, 1),\n            mode: str = \"conv\",\n            has_cls_token: bool = True,\n            expand_attn: bool = False,\n            pool_first: bool = False,\n            rel_pos_type: str = 'spatial',\n            residual_pooling: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        proj_needed = dim != dim_out\n        self.dim = dim\n        self.dim_out = dim_out\n        self.has_cls_token = has_cls_token\n\n        self.norm1 = norm_layer(dim, **dd)\n\n        self.shortcut_proj_attn = nn.Linear(dim, dim_out, **dd) if proj_needed and expand_attn else None\n        if stride_q and prod(stride_q) > 1:\n            kernel_skip = [s + 1 if s > 1 else s for s in stride_q]\n            stride_skip = stride_q\n            padding_skip = [int(skip // 2) for skip in kernel_skip]\n            self.shortcut_pool_attn = nn.MaxPool2d(kernel_skip, stride_skip, padding_skip)\n        else:\n            self.shortcut_pool_attn = None\n\n        att_dim = dim_out if expand_attn else dim\n        attn_layer = MultiScaleAttentionPoolFirst if pool_first else MultiScaleAttention\n        self.attn = attn_layer(\n            dim,\n            att_dim,\n            num_heads=num_heads,\n            feat_size=feat_size,\n            qkv_bias=qkv_bias,\n            kernel_q=kernel_q,\n            kernel_kv=kernel_kv,\n            stride_q=stride_q,\n            stride_kv=stride_kv,\n            norm_layer=norm_layer,\n            has_cls_token=has_cls_token,\n            mode=mode,\n            rel_pos_type=rel_pos_type,\n            residual_pooling=residual_pooling,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n        self.norm2 = norm_layer(att_dim, **dd)\n        mlp_dim_out = dim_out\n        self.shortcut_proj_mlp = nn.Linear(dim, dim_out, **dd) if proj_needed and not expand_attn else None\n        self.mlp = Mlp(\n            in_features=att_dim,\n            hidden_features=int(att_dim * mlp_ratio),\n            out_features=mlp_dim_out,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n\n    def _shortcut_pool(self, x, feat_size: List[int]):\n        if self.shortcut_pool_attn is None:\n            return x\n        if self.has_cls_token:\n            cls_tok, x = x[:, :1, :], x[:, 1:, :]\n        else:\n            cls_tok = None\n        B, L, C = x.shape\n        H, W = feat_size\n        x = x.reshape(B, H, W, C).permute(0, 3, 1, 2).contiguous()\n        x = self.shortcut_pool_attn(x)\n        x = x.reshape(B, C, -1).transpose(1, 2)\n        if cls_tok is not None:\n            x = torch.cat((cls_tok, x), dim=1)\n        return x\n\n    def forward(self, x, feat_size: List[int]):\n        x_norm = self.norm1(x)\n        # NOTE as per the original impl, this seems odd, but shortcut uses un-normalized input if no proj\n        x_shortcut = x if self.shortcut_proj_attn is None else self.shortcut_proj_attn(x_norm)\n        x_shortcut = self._shortcut_pool(x_shortcut, feat_size)\n        x, feat_size_new = self.attn(x_norm, feat_size)\n        x = x_shortcut + self.drop_path1(x)\n\n        x_norm = self.norm2(x)\n        x_shortcut = x if self.shortcut_proj_mlp is None else self.shortcut_proj_mlp(x_norm)\n        x = x_shortcut + self.drop_path2(self.mlp(x_norm))\n        return x, feat_size_new\n\n\nclass MultiScaleVitStage(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int,\n            num_heads: int,\n            feat_size: Tuple[int, int],\n            mlp_ratio: float = 4.0,\n            qkv_bias: bool = True,\n            kernel_q: Tuple[int, int] = (1, 1),\n            kernel_kv: Tuple[int, int] = (1, 1),\n            stride_q: Tuple[int, int] = (1, 1),\n            stride_kv: Tuple[int, int] = (1, 1),\n            mode: str = \"conv\",\n            has_cls_token: bool = True,\n            expand_attn: bool = False,\n            pool_first: bool = False,\n            rel_pos_type: str = 'spatial',\n            residual_pooling: bool = True,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            drop_path: Union[float, List[float]] = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        self.blocks = nn.ModuleList()\n        if expand_attn:\n            out_dims = (dim_out,) * depth\n        else:\n            out_dims = (dim,) * (depth - 1) + (dim_out,)\n\n        for i in range(depth):\n            attention_block = MultiScaleBlock(\n                dim=dim,\n                dim_out=out_dims[i],\n                num_heads=num_heads,\n                feat_size=feat_size,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                kernel_q=kernel_q,\n                kernel_kv=kernel_kv,\n                stride_q=stride_q if i == 0 else (1, 1),\n                stride_kv=stride_kv,\n                mode=mode,\n                has_cls_token=has_cls_token,\n                pool_first=pool_first,\n                rel_pos_type=rel_pos_type,\n                residual_pooling=residual_pooling,\n                expand_attn=expand_attn,\n                norm_layer=norm_layer,\n                drop_path=drop_path[i] if isinstance(drop_path, (list, tuple)) else drop_path,\n                **dd,\n            )\n            dim = out_dims[i]\n            self.blocks.append(attention_block)\n            if i == 0:\n                feat_size = tuple([size // stride for size, stride in zip(feat_size, stride_q)])\n\n        self.feat_size = feat_size\n\n    def forward(self, x, feat_size: List[int]):\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x, feat_size = checkpoint(blk, x, feat_size)\n            else:\n                x, feat_size = blk(x, feat_size)\n        return x, feat_size\n\n\nclass MultiScaleVit(nn.Module):\n    \"\"\"\n    Improved Multiscale Vision Transformers for Classification and Detection\n    Yanghao Li*, Chao-Yuan Wu*, Haoqi Fan, Karttikeya Mangalam, Bo Xiong, Jitendra Malik,\n        Christoph Feichtenhofer*\n    https://arxiv.org/abs/2112.01526\n\n    Multiscale Vision Transformers\n    Haoqi Fan*, Bo Xiong*, Karttikeya Mangalam*, Yanghao Li*, Zhicheng Yan, Jitendra Malik,\n        Christoph Feichtenhofer*\n    https://arxiv.org/abs/2104.11227\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: MultiScaleVitCfg,\n            img_size: Tuple[int, int] = (224, 224),\n            in_chans: int = 3,\n            global_pool: Optional[str] = None,\n            num_classes: int = 1000,\n            drop_path_rate: float = 0.,\n            drop_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        img_size = to_2tuple(img_size)\n        norm_layer = partial(get_norm_layer(cfg.norm_layer), eps=cfg.norm_eps)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        if global_pool is None:\n            global_pool = 'token' if cfg.use_cls_token else 'avg'\n        self.global_pool = global_pool\n        self.depths = tuple(cfg.depths)\n        self.expand_attn = cfg.expand_attn\n\n        embed_dim = cfg.embed_dim[0]\n        self.patch_embed = PatchEmbed(\n            dim_in=in_chans,\n            dim_out=embed_dim,\n            kernel=cfg.patch_kernel,\n            stride=cfg.patch_stride,\n            padding=cfg.patch_padding,\n            **dd,\n        )\n        patch_dims = (img_size[0] // cfg.patch_stride[0], img_size[1] // cfg.patch_stride[1])\n        num_patches = prod(patch_dims)\n\n        if cfg.use_cls_token:\n            self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim, **dd))\n            self.num_prefix_tokens = 1\n            pos_embed_dim = num_patches + 1\n        else:\n            self.num_prefix_tokens = 0\n            self.cls_token = None\n            pos_embed_dim = num_patches\n\n        if cfg.use_abs_pos:\n            self.pos_embed = nn.Parameter(torch.zeros(1, pos_embed_dim, embed_dim, **dd))\n        else:\n            self.pos_embed = None\n\n        num_stages = len(cfg.embed_dim)\n        feat_size = patch_dims\n        curr_stride = max(cfg.patch_stride)\n        dpr = calculate_drop_path_rates(drop_path_rate, cfg.depths, stagewise=True)\n        self.stages = nn.ModuleList()\n        self.feature_info = []\n        for i in range(num_stages):\n            if cfg.expand_attn:\n                dim_out = cfg.embed_dim[i]\n            else:\n                dim_out = cfg.embed_dim[min(i + 1, num_stages - 1)]\n            stage = MultiScaleVitStage(\n                dim=embed_dim,\n                dim_out=dim_out,\n                depth=cfg.depths[i],\n                num_heads=cfg.num_heads[i],\n                feat_size=feat_size,\n                mlp_ratio=cfg.mlp_ratio,\n                qkv_bias=cfg.qkv_bias,\n                mode=cfg.mode,\n                pool_first=cfg.pool_first,\n                expand_attn=cfg.expand_attn,\n                kernel_q=cfg.kernel_qkv,\n                kernel_kv=cfg.kernel_qkv,\n                stride_q=cfg.stride_q[i],\n                stride_kv=cfg.stride_kv[i],\n                has_cls_token=cfg.use_cls_token,\n                rel_pos_type=cfg.rel_pos_type,\n                residual_pooling=cfg.residual_pooling,\n                norm_layer=norm_layer,\n                drop_path=dpr[i],\n                **dd,\n            )\n            curr_stride *= max(cfg.stride_q[i])\n            self.feature_info += [dict(module=f'block.{i}', num_chs=dim_out, reduction=curr_stride)]\n            embed_dim = dim_out\n            feat_size = stage.feat_size\n            self.stages.append(stage)\n\n        self.num_features = self.head_hidden_size = embed_dim\n        self.norm = norm_layer(embed_dim, **dd)\n        self.head = nn.Sequential(OrderedDict([\n            ('drop', nn.Dropout(self.drop_rate)),\n            ('fc', nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity())\n        ]))\n\n        if self.pos_embed is not None:\n            trunc_normal_tf_(self.pos_embed, std=0.02)\n        if self.cls_token is not None:\n            trunc_normal_tf_(self.cls_token, std=0.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_tf_(m.weight, std=0.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {k for k, _ in self.named_parameters()\n                if any(n in k for n in [\"pos_embed\", \"rel_pos_h\", \"rel_pos_w\", \"cls_token\"])}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=[(r'^stages\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        device = self.head.fc.weight.device if hasattr(self.head.fc, 'weight') else None\n        dtype = self.head.fc.weight.dtype if hasattr(self.head.fc, 'weight') else None\n        self.head = nn.Sequential(OrderedDict([\n            ('drop', nn.Dropout(self.drop_rate)),\n            ('fc', nn.Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity())\n        ]))\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output shape must be NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # FIXME slice block/pos_block if < max\n        # forward pass\n        x, feat_size = self.patch_embed(x)\n        B = x.shape[0]\n        if self.cls_token is not None:\n            cls_tokens = self.cls_token.expand(B, -1, -1)\n            x = torch.cat((cls_tokens, x), dim=1)\n        if self.pos_embed is not None:\n            x = x + self.pos_embed\n\n        last_idx = len(self.stages) - 1\n        for feat_idx, stage in enumerate(self.stages):\n            x, feat_size = stage(x, feat_size)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x)  # applying final norm last intermediate\n                else:\n                    x_inter = x\n                if reshape:\n                    if self.cls_token is not None:\n                        # possible to allow return of class tokens, TBD\n                        x_inter = x_inter[:, 1:]\n                    x_inter = x_inter.reshape(B, feat_size[0], feat_size[1], -1).permute(0, 3, 1, 2)\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        # FIXME add stage pruning\n        # self.stages = self.stages[:max_index]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x, feat_size = self.patch_embed(x)\n        B, N, C = x.shape\n\n        if self.cls_token is not None:\n            cls_tokens = self.cls_token.expand(B, -1, -1)\n            x = torch.cat((cls_tokens, x), dim=1)\n\n        if self.pos_embed is not None:\n            x = x + self.pos_embed\n\n        for stage in self.stages:\n            x, feat_size = stage(x, feat_size)\n\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            if self.global_pool == 'avg':\n                x = x[:, self.num_prefix_tokens:].mean(1)\n            else:\n                x = x[:, 0]\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'stages.0.blocks.0.norm1.weight' in state_dict:\n        # native checkpoint, look for rel_pos interpolations\n        for k in state_dict.keys():\n            if 'rel_pos' in k:\n                rel_pos = state_dict[k]\n                dest_rel_pos_shape = model.state_dict()[k].shape\n                if rel_pos.shape[0] != dest_rel_pos_shape[0]:\n                    rel_pos_resized = torch.nn.functional.interpolate(\n                        rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1),\n                        size=dest_rel_pos_shape[0],\n                        mode=\"linear\",\n                    )\n                    state_dict[k] = rel_pos_resized.reshape(-1, dest_rel_pos_shape[0]).permute(1, 0)\n        return state_dict\n\n    import re\n    if 'model_state' in state_dict:\n        state_dict = state_dict['model_state']\n\n    depths = getattr(model, 'depths', None)\n    expand_attn = getattr(model, 'expand_attn', True)\n    assert depths is not None, 'model requires depth attribute to remap checkpoints'\n    depth_map = {}\n    block_idx = 0\n    for stage_idx, d in enumerate(depths):\n        depth_map.update({i: (stage_idx, i - block_idx) for i in range(block_idx, block_idx + d)})\n        block_idx += d\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = re.sub(\n            r'blocks\\.(\\d+)',\n            lambda x: f'stages.{depth_map[int(x.group(1))][0]}.blocks.{depth_map[int(x.group(1))][1]}',\n            k)\n\n        if expand_attn:\n            k = re.sub(r'stages\\.(\\d+).blocks\\.(\\d+).proj', f'stages.\\\\1.blocks.\\\\2.shortcut_proj_attn', k)\n        else:\n            k = re.sub(r'stages\\.(\\d+).blocks\\.(\\d+).proj', f'stages.\\\\1.blocks.\\\\2.shortcut_proj_mlp', k)\n        if 'head' in k:\n            k = k.replace('head.projection', 'head.fc')\n        out_dict[k] = v\n\n    return out_dict\n\n\nmodel_cfgs = dict(\n    mvitv2_tiny=MultiScaleVitCfg(\n        depths=(1, 2, 5, 2),\n    ),\n    mvitv2_small=MultiScaleVitCfg(\n        depths=(1, 2, 11, 2),\n    ),\n    mvitv2_base=MultiScaleVitCfg(\n        depths=(2, 3, 16, 3),\n    ),\n    mvitv2_large=MultiScaleVitCfg(\n        depths=(2, 6, 36, 4),\n        embed_dim=144,\n        num_heads=2,\n        expand_attn=False,\n    ),\n\n    mvitv2_small_cls=MultiScaleVitCfg(\n        depths=(1, 2, 11, 2),\n        use_cls_token=True,\n    ),\n    mvitv2_base_cls=MultiScaleVitCfg(\n        depths=(2, 3, 16, 3),\n        use_cls_token=True,\n    ),\n    mvitv2_large_cls=MultiScaleVitCfg(\n        depths=(2, 6, 36, 4),\n        embed_dim=144,\n        num_heads=2,\n        use_cls_token=True,\n        expand_attn=True,\n    ),\n    mvitv2_huge_cls=MultiScaleVitCfg(\n        depths=(4, 8, 60, 8),\n        embed_dim=192,\n        num_heads=3,\n        use_cls_token=True,\n        expand_attn=True,\n    ),\n)\n\n\ndef _create_mvitv2(variant, cfg_variant=None, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 4)\n    return build_model_with_cfg(\n        MultiScaleVit,\n        variant,\n        pretrained,\n        model_cfg=model_cfgs[variant] if not cfg_variant else model_cfgs[cfg_variant],\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        'fixed_input_size': True,\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'mvitv2_tiny.fb_in1k': _cfg(\n        url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_T_in1k.pyth',\n        hf_hub_id='timm/'),\n    'mvitv2_small.fb_in1k': _cfg(url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_S_in1k.pyth',\n        hf_hub_id='timm/'),\n    'mvitv2_base.fb_in1k': _cfg(url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_B_in1k.pyth',\n        hf_hub_id='timm/'),\n    'mvitv2_large.fb_in1k': _cfg(url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_L_in1k.pyth',\n        hf_hub_id='timm/'),\n\n    'mvitv2_small_cls': _cfg(url=''),\n    'mvitv2_base_cls.fb_inw21k': _cfg(\n        url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_B_in21k.pyth',\n        hf_hub_id='timm/',\n        num_classes=19168),\n    'mvitv2_large_cls.fb_inw21k': _cfg(\n        url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_L_in21k.pyth',\n        hf_hub_id='timm/',\n        num_classes=19168),\n    'mvitv2_huge_cls.fb_inw21k': _cfg(\n        url='https://dl.fbaipublicfiles.com/mvit/mvitv2_models/MViTv2_H_in21k.pyth',\n        hf_hub_id='timm/',\n        num_classes=19168),\n})\n\n\n@register_model\ndef mvitv2_tiny(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_tiny', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_small(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_small', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_base(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_base', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_large(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_large', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_small_cls(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_small_cls', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_base_cls(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_base_cls', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_large_cls(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_large_cls', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef mvitv2_huge_cls(pretrained=False, **kwargs) -> MultiScaleVit:\n    return _create_mvitv2('mvitv2_huge_cls', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/naflexvit.py",
    "content": "\"\"\" NaFlex Vision Transformer\n\nAn improved version of the Vision Transformer with:\n1. Encapsulated embedding and position encoding in a single module\n2. Support for linear patch embedding on pre-patchified inputs\n3. Support for NaFlex variable aspect, variable resolution\n4. Support for FlexiViT variable patch size\n5. Support for NaViT fractional/factorized position embedding\n\nBased on ideas from:\n- Original Vision Transformer: https://arxiv.org/abs/2010.11929\n- FlexiViT: https://arxiv.org/abs/2212.08013\n- NaViT: https://arxiv.org/abs/2307.06304\n- NaFlex (SigLip-2): https://arxiv.org/abs/2502.14786\n\nHacked together by / Copyright 2025, Ross Wightman, Hugging Face\n\"\"\"\n\nimport logging\nimport math\nfrom dataclasses import dataclass, fields, replace\nfrom functools import partial\nfrom typing import Callable, Dict, List, Optional, Set, Tuple, Type, Union, Any\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import (\n    AttentionPoolLatent,\n    Mlp,\n    LayerNorm,\n    PatchDropoutWithIndices,\n    PatchEmbedInterpolator,\n    _assert,\n    to_2tuple,\n    get_act_layer,\n    get_norm_layer,\n    apply_keep_indices_nlc,\n    disable_compiler,\n    calculate_drop_path_rates,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function, register_notrace_module\nfrom ._manipulate import checkpoint, named_apply\nfrom ._registry import register_model, generate_default_cfgs\nfrom .eva import EvaBlock\nfrom .vision_transformer import Block, global_pool_nlc\n\n__all__ = ['NaFlexVitCfg', 'NaFlexVit']\n\n\n_logger = logging.getLogger(__name__)\n\n\n@dataclass\nclass NaFlexVitCfg:\n    \"\"\"Configuration for FlexVit model.\n\n    This dataclass contains the bulk of model configuration parameters,\n    with core parameters (img_size, in_chans, num_classes, etc.) remaining\n    as direct constructor arguments for API compatibility.\n    \"\"\"\n    # Architecture parameters\n    patch_size: Union[int, Tuple[int, int]] = 16\n    embed_dim: int = 768\n    depth: int = 12\n    num_heads: int = 12\n    mlp_ratio: float = 4.0\n    scale_mlp_norm: bool = False  # Apply scaling norm to MLP\n\n    # Attention parameters\n    qkv_bias: bool = True\n    qk_norm: bool = False\n    proj_bias: bool = True\n    attn_drop_rate: float = 0.0\n    scale_attn_inner_norm: bool = False  # Apply scaling norm to attn context\n\n    # Regularization\n    init_values: Optional[float] = None  # Layer-scale init values (layer-scale enabled if not None)\n    drop_rate: float = 0.0  # Dropout rate for classifier\n    pos_drop_rate: float = 0.0  # Dropout rate for position embeddings\n    patch_drop_rate: float = 0.0  # Dropout rate for patch tokens\n    proj_drop_rate: float = 0.0  # Dropout rate for linear projections\n    drop_path_rate: float = 0.0  # Stochastic depth drop rate\n\n    # Prefix token configuration\n    class_token: bool = False  # Use class token\n    reg_tokens: int = 0  # Number of register tokens\n\n    # Position embedding configuration\n    pos_embed: str = 'learned'  # Type of position embedding ('learned', 'factorized', 'rope', 'none')\n    pos_embed_grid_size: Optional[Tuple[int, int]] = (16, 16)  # Grid size for position embedding initialization\n    pos_embed_interp_mode: str = 'bicubic'  # Interpolation mode for position embedding resizing\n    pos_embed_ar_preserving: bool = False  # Whether to preserve aspect ratio during position embedding interpolation\n    pos_embed_use_grid_sample: bool = False  # Whether to use grid_sample for naflex position embedding interpolation\n\n    # ROPE specific configuration\n    rope_type: str = ''  # ROPE type: '' or 'none' for no ROPE, 'axial' for standard, 'mixed' for learnable frequencies\n    rope_temperature: float = 10000.0  # Temperature for ROPE frequency computation\n    rope_ref_feat_shape: Optional[Tuple[int, int]] = None\n    rope_grid_offset: float = 0.  # Grid offset for non-pixel ROPE mode\n    rope_grid_indexing: str = 'ij'  # Grid indexing mode for ROPE ('ij' or 'xy')\n\n    # Image processing\n    dynamic_img_pad: bool = False  # Whether to enable dynamic padding for variable resolution\n\n    # Other architecture choices\n    pre_norm: bool = False  # Whether to apply normalization before attention/MLP layers (start of blocks)\n    final_norm: bool = True  # Whether to apply final normalization before pooling and classifier (end of blocks)\n    fc_norm: Optional[bool] = None  # Whether to normalize features before final classifier (after pooling)\n\n    # Global pooling setup\n    global_pool: str = 'map'  # Type of global pooling for final sequence\n    pool_include_prefix: bool = False  # Whether to include class/register prefix tokens in global pooling\n    attn_pool_num_heads: Optional[int] = None  # Override num_heads for attention pool\n    attn_pool_mlp_ratio: Optional[float] = None   # Override mlp_ratio for attention pool\n\n    # Weight initialization\n    weight_init: str = ''  # Weight initialization scheme\n    fix_init: bool = True  # Apply weight initialization fix (scaling w/ layer index)\n\n    # Embedding configuration\n    embed_proj_type: str = 'linear'  # Type of embedding layer ('conv' or 'linear')\n    input_norm_layer: Optional[str] = None  # Normalization layer for embeddings input (before input projection)\n    embed_norm_layer: Optional[str] = None  # Normalization layer for embeddings (after input projection)\n\n    # Layer implementations\n    norm_layer: Optional[str] = None  # Normalization layer for transformer blocks\n    act_layer: Optional[str] = None  # Activation layer for MLP blocks\n    block_fn: Optional[str] = None  # Transformer block implementation class name\n    mlp_layer: Optional[str] = None  # MLP implementation class name\n    attn_layer: Optional[str] = None  # Attention layer implementation (e.g., 'attn', 'diff')\n\n    # EVA-specific parameters\n    attn_type: str = 'standard'  # Attention type: 'standard', 'eva', 'rope'\n    swiglu_mlp: bool = False  # Use SwiGLU MLP variant\n    qkv_fused: bool = True  # Whether to use fused QKV projections\n\n    # Variable patch size support\n    enable_patch_interpolator: bool = False  # Enable dynamic patch size support\n\n\ndef _overlay_kwargs(cfg: NaFlexVitCfg, **kwargs) -> NaFlexVitCfg:\n    \"\"\"Overlay kwargs onto config, replacing config values with provided kwargs.\"\"\"\n    # Only update fields that exist in the config\n    config_fields = set(cfg.__dataclass_fields__.keys())\n    config_kwargs = {k: v for k, v in kwargs.items() if k in config_fields}\n\n    if config_kwargs:\n        cfg = replace(cfg, **config_kwargs)\n\n    return cfg\n\n\ndef batch_patchify(\n        x: torch.Tensor,\n        patch_size: Tuple[int, int],\n        pad: bool = True,\n) -> Tuple[torch.Tensor, Tuple[int, int]]:\n    \"\"\"Patchify a batch of images.\n\n    Args:\n        x: Input tensor of shape [B, C, H, W].\n        patch_size: Patch dimensions (patch_h, patch_w).\n        pad: Whether to pad images to be divisible by patch size.\n\n    Returns:\n        Tuple of (patches, grid_size) where patches has shape [B, N, P*P*C]\n        and grid_size is (num_patches_h, num_patches_w).\n    \"\"\"\n    B, C, H, W = x.shape\n    ph, pw = patch_size\n\n    # Ensure the image is divisible by patch size\n    if pad and (H % ph != 0 or W % pw != 0):\n        pad_h = (ph - H % ph) % ph\n        pad_w = (pw - W % pw) % pw\n        x = F.pad(x, (0, pad_w, 0, pad_h))\n\n    nh, nw = H // ph, W // pw\n    patches = x.view(B, C, nh, ph, nw, pw).permute(0, 2, 4, 3, 5, 1).reshape(B, nh * nw, ph * pw * C)\n    # FIXME confirm we want 'channels last' in the patch channel layout, egg ph, ph, C instead of C, ph, hw\n\n    return patches, (nh, nw)\n\n\ndef calculate_naflex_grid_sizes(_coord: torch.Tensor):\n    # Calculate the appropriate grid size from coords\n    max_y = _coord[:, :, 0].amax(dim=1) + 1\n    max_x = _coord[:, :, 1].amax(dim=1) + 1\n    return [(int(h.item()), int(w.item())) for h, w in zip(max_y, max_x)]\n\n\nclass NaFlexRopeIterator:\n    \"\"\"Iterator for generating batched ROPE embeddings for mixed mode with multiple grid sizes.\"\"\"\n\n    def __init__(\n        self,\n        rope_module,\n        size_to_indices: Dict[Tuple[int, int], List[int]],\n        unique_sizes: List[Tuple[int, int]],\n        batch_size: int,\n        seq_len: int,\n        device: torch.device,\n        dtype: torch.dtype,\n    ):\n        self.rope = rope_module\n        self.size_to_indices = size_to_indices\n        self.unique_sizes = unique_sizes\n        self.batch_size = batch_size\n        self.seq_len = seq_len\n        self.dtype = dtype\n        self.device = device\n        self.depth = rope_module.depth\n        self.num_heads = rope_module.num_heads\n        self.head_dim = 2 * rope_module.dim // rope_module.num_heads\n        self._depth_idx = 0\n\n        # Pre-compute embeddings for each unique size\n        self._embeddings_per_size = {}\n        for grid_size in unique_sizes:\n            # get_embed returns all depths at once for mixed mode\n            rope_embed = rope_module.get_embed(shape=grid_size)\n            self._embeddings_per_size[grid_size] = rope_embed\n\n    def __iter__(self):\n        self._depth_idx = 0\n        return self\n\n    @disable_compiler\n    def __next__(self):\n        if self._depth_idx >= self.depth:\n            raise StopIteration\n\n        # Create batch tensor for current depth\n        batch_embed = torch.zeros(\n            self.batch_size, self.num_heads, self.seq_len, self.head_dim,\n            dtype=self.dtype, device=self.device\n        )\n\n        # Fill in embeddings for each unique grid size\n        for grid_size in self.unique_sizes:\n            h, w = grid_size\n            actual_len = h * w\n            batch_indices = self.size_to_indices[grid_size]\n\n            # Get pre-computed embeddings for this size at current depth\n            embed = self._embeddings_per_size[grid_size][self._depth_idx]  # [num_heads, H*W, dim]\n\n            # Assign to batch indices\n            for bi in batch_indices:\n                batch_embed[bi, :, :actual_len, :] = embed[:, :actual_len, :]\n\n        self._depth_idx += 1\n        return batch_embed\n\n\ndef get_block_fn(cfg: NaFlexVitCfg) -> Callable:\n    \"\"\"Get appropriate block function based on configuration.\n\n    Returns a partially applied block constructor with EVA-specific\n    or conflicting parameters pre-configured if needed.\n    \"\"\"\n    # Check if we need EVA block features\n    use_eva_features = (\n        cfg.attn_type in ('eva', 'rope') or\n        cfg.rope_type not in ('', 'none') or  # Any ROPE type requires EVA blocks\n        cfg.swiglu_mlp\n    )\n\n    if use_eva_features:\n        # Determine attention type based on rope_type if not explicitly set\n        attn_type = cfg.attn_type\n        if attn_type == 'standard' and cfg.rope_type not in ('', 'none'):\n            attn_type = 'rope'\n\n        num_prefix_tokens = (1 if cfg.class_token else 0) + cfg.reg_tokens\n        return partial(\n            EvaBlock,\n            attn_type=attn_type,\n            swiglu_mlp=cfg.swiglu_mlp,\n            scale_mlp=cfg.scale_mlp_norm,\n            scale_attn_inner=cfg.scale_attn_inner_norm,\n            qkv_fused=cfg.qkv_fused,\n            num_prefix_tokens=num_prefix_tokens,\n        )\n    else:\n        # Standard ViT block\n        block_fn = cfg.block_fn or Block\n        block_kwargs = {}\n        if cfg.scale_mlp_norm or cfg.scale_attn_inner_norm:\n            # param names differ between EVA vs non-EVA block types\n            block_kwargs['scale_mlp_norm'] = cfg.scale_mlp_norm\n            block_kwargs['scale_attn_norm'] = cfg.scale_attn_inner_norm\n        if cfg.attn_layer:\n            block_kwargs['attn_layer'] = cfg.attn_layer\n        if block_kwargs:\n            block_fn = partial(block_fn, **block_kwargs)\n        return block_fn\n\n\n@register_notrace_module\nclass NaFlexEmbeds(nn.Module):\n    \"\"\"NaFlex Embedding module for Vision Transformers.\n\n    This module encapsulates the complete embedding process for Vision Transformers,\n    supporting both standard and NaFlex (NaViT + FlexiViT) functionality:\n\n    1. Patch embedding (via Conv2d or Linear)\n    2. Class and register token preparation\n    3. Position embedding addition with interpolation support\n    4. Pre-normalization (if requested)\n    5. Dropout application\n\n    NaFlex capabilities include:\n    - Variable aspect ratio and resolution via patch coordinates\n    - Patch type indicators for handling padding tokens in attention\n    - Flexible position embedding interpolation for arbitrary grid sizes\n    - Support for factorized position embeddings\n\n    The patch embedding can be one of two types:\n    - Conv2d-based (default): For standard image inputs [B, C, H, W]\n    - Linear-based: For pre-patchified inputs [B, N, P*P*C]\n\n    Args:\n        patch_size: Size of patches for patch embedding\n        in_chans: Number of input image channels\n        embed_dim: Dimensionality of patch embedding\n        proj_type: Type of embedding projection layer ('conv' or 'linear')\n        input_norm_layer: Normalization layer applied to input (linear mode only)\n        proj_norm_layer: Normalization layer applied after projection\n        pos_embed: Type of position embedding ('learned', 'factorized', 'none')\n        pos_drop_rate: Dropout rate for position embeddings\n        class_token: Whether to include a class token\n        reg_tokens: Number of register tokens to include\n        bias: Whether to use bias in projection layers\n        dynamic_img_pad: Whether to enable dynamic padding for variable resolution\n        pos_embed_grid_size: Grid size for position embedding initialization\n        pos_embed_interp_mode: Interpolation mode for position embedding resizing\n        pos_embed_ar_preserving: Whether to preserve aspect ratio during position embedding interpolation\n        default_img_size: Default image size for position embedding grid calculation\n    \"\"\"\n\n    def __init__(\n            self,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            proj_type: Optional[str] = None,\n            proj_bias: bool = True,\n            class_token: bool = True,\n            reg_tokens: int = 0,\n            dynamic_img_pad: bool = False,\n            default_img_size: Optional[Union[int, Tuple[int, int]]] = None,\n            pos_embed: str = 'learned',\n            pos_embed_grid_size: Optional[Tuple[int, int]] = (14, 14),\n            pos_embed_interp_mode: str = 'bicubic',\n            pos_embed_ar_preserving: bool = False,\n            pos_embed_use_grid_sample: bool = False,\n            input_norm_layer: Optional[Type[nn.Module]] = None,\n            proj_norm_layer: Union[bool, Optional[Type[nn.Module]]] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            pos_drop_rate: float = 0.,\n            enable_patch_interpolator: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize NaFlexEmbeds module.\n\n        Args:\n            patch_size: Size of patches for patch embedding.\n            in_chans: Number of input image channels.\n            embed_dim: Dimensionality of patch embedding.\n            proj_type: Type of embedding projection layer ('conv' or 'linear').\n            proj_bias: Whether to use bias in projection layers.\n            class_token: Whether to include a class token.\n            reg_tokens: Number of register tokens to include.\n            dynamic_img_pad: Whether to enable dynamic padding for variable resolution.\n            default_img_size: Default image size for position embedding grid calculation.\n            pos_embed: Type of position embedding ('learned', 'factorized', 'none').\n            pos_embed_grid_size: Grid size for position embedding initialization.\n            pos_embed_interp_mode: Interpolation mode for position embedding resizing.\n            pos_embed_ar_preserving: Whether to preserve aspect ratio during interpolation.\n            input_norm_layer: Normalization layer applied to input (linear mode only).\n            proj_norm_layer: Normalization layer applied after projection.\n            norm_layer: Default normalization layer.\n            pos_drop_rate: Dropout rate for position embeddings.\n            enable_patch_interpolator: Enable dynamic patch size support.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.has_class_token = class_token\n        self.num_reg_tokens = reg_tokens\n        self.pos_embed_interp_mode = pos_embed_interp_mode\n        self.pos_embed_ar_preserving = pos_embed_ar_preserving\n        self.pos_embed_use_grid_sample = pos_embed_use_grid_sample\n        self.patch_size = to_2tuple(patch_size)\n        self.in_chans = in_chans\n        self.embed_dim = embed_dim\n        self.dynamic_img_pad = dynamic_img_pad\n        self.enable_patch_interpolator = enable_patch_interpolator\n\n        # Calculate number of prefix tokens\n        self.num_prefix_tokens = 1 if class_token else 0\n        self.num_prefix_tokens += reg_tokens\n\n        # Create class and register tokens\n        self.cls_token = nn.Parameter(torch.empty(1, 1, embed_dim, **dd)) if class_token else None\n        self.reg_token = nn.Parameter(torch.empty(1, reg_tokens, embed_dim, **dd)) if reg_tokens else None\n\n        # Calculate grid size and number of patches\n        self.default_img_size: Optional[Tuple[int, int]] = None\n        self.pos_embed_grid_size: Optional[Tuple[int, int]] = None  # Grid size used for learned pos embed init\n        if pos_embed_grid_size is not None:\n            # Highest priority, use provided pos_embed_grid_size\n            self.pos_embed_grid_size = pos_embed_grid_size\n        elif default_img_size is not None:\n            # Fallback to calculating grid size from img_size + patch_size if img size provided.\n            self.default_img_size = to_2tuple(default_img_size)\n            self.pos_embed_grid_size = tuple([s // p for s, p in zip(self.default_img_size, self.patch_size)])\n\n        # Determine patch embedding type (linear or conv2d)\n        if proj_type == 'linear':\n            # Create linear projection for pre-patchified inputs\n            # Input dimension is patch_size^2 * in_chans\n            patch_dim = self.patch_size[0] * self.patch_size[1] * in_chans\n            assert not (input_norm_layer is True and norm_layer is None), \\\n                \"`norm_layer` must be given when input_norm_layer=True\"\n            input_norm_layer = norm_layer if input_norm_layer is True else (input_norm_layer or None)\n            self.norm_input = input_norm_layer(patch_dim) if input_norm_layer else None\n            self.proj = nn.Linear(patch_dim, embed_dim, bias=proj_bias, **dd)\n            self.flatten = False\n            self.is_linear = True\n        else:\n            # Default to convolutional patch embedding for image inputs\n            assert not input_norm_layer\n            self.norm_input = None\n            self.proj = nn.Conv2d(\n                in_chans,\n                embed_dim,\n                kernel_size=patch_size,\n                stride=patch_size,\n                bias=proj_bias,\n                **dd,\n            )\n            self.flatten = True\n            self.is_linear = False\n\n        # Create patch embedding interpolator if enabled\n        if self.enable_patch_interpolator:\n            self.patch_interpolator = PatchEmbedInterpolator(\n                base_patch_size=self.patch_size,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                interpolation=pos_embed_interp_mode,\n                antialias=True,\n            )\n        else:\n            self.patch_interpolator = None\n\n        # Create normalization layer after the projection\n        assert not (proj_norm_layer is True and norm_layer is None), \\\n            \"`norm_layer` must be given when proj_norm_layer=True\"\n        proj_norm_layer = norm_layer if proj_norm_layer is True else (proj_norm_layer or None)\n        self.norm = proj_norm_layer(embed_dim) if proj_norm_layer else nn.Identity()\n\n        # Create position embedding if needed - only for patches, never for prefix tokens\n        if pos_embed in ('factorized', 'learned') and self.pos_embed_grid_size is None:\n            raise ValueError(\n                \"Cannot initialize position embeddings without grid_size.\"\n                \"Please provide img_size or pos_embed_grid_size.\")\n        self.pos_embed: Optional[torch.Tensor] = None\n        self.pos_embed_y: Optional[torch.Tensor] = None\n        self.pos_embed_x: Optional[torch.Tensor] = None\n        if not pos_embed or pos_embed == 'none':\n            self.pos_embed_type = 'none'\n        elif pos_embed == 'factorized':\n            assert self.pos_embed_grid_size is not None\n            h, w = self.pos_embed_grid_size\n            self.pos_embed_type = 'factorized'\n            self.pos_embed_y = nn.Parameter(torch.empty(1, h, embed_dim, **dd))\n            self.pos_embed_x = nn.Parameter(torch.empty(1, w, embed_dim, **dd))\n        else:\n            assert self.pos_embed_grid_size is not None\n            h, w = self.pos_embed_grid_size\n            self.pos_embed = nn.Parameter(torch.empty(1, h, w, embed_dim, **dd))\n            self.pos_embed_type = 'learned'\n\n        # Dropout layer\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        if self.cls_token is not None:\n            nn.init.normal_(self.cls_token, std=1e-6)\n        if self.reg_token is not None:\n            nn.init.normal_(self.reg_token, std=1e-6)\n        if self.pos_embed is not None:\n            nn.init.normal_(self.pos_embed, std=.02)\n        if self.pos_embed_y is not None:\n            nn.init.normal_(self.pos_embed_y, std=.02)\n        if self.pos_embed_x is not None:\n            nn.init.normal_(self.pos_embed_x, std=.02)\n\n    def feature_info(self, location) -> Dict[str, Any]:\n        \"\"\"Get feature information for feature extraction.\n\n        Args:\n            location: Feature extraction location identifier\n\n        Returns:\n            Dictionary containing feature channel count and reduction factor\n        \"\"\"\n        return dict(num_chs=self.embed_dim, reduction=self.patch_size)\n\n    def feat_ratio(self, as_scalar: bool = True) -> Union[int, Tuple[int, int]]:\n        \"\"\"Get the feature reduction ratio (stride) of the patch embedding.\n\n        Args:\n            as_scalar: Whether to return the maximum dimension as a scalar\n\n        Returns:\n            Feature reduction ratio as scalar or tuple\n        \"\"\"\n        if as_scalar:\n            return max(self.patch_size)\n        else:\n            return self.patch_size\n\n    def dynamic_feat_size(self, img_size: Tuple[int, int]) -> Tuple[int, int]:\n        \"\"\"Calculate grid (feature) size for given image size.\n\n        Takes into account dynamic padding when enabled.\n\n        Args:\n            img_size: Input image size as (height, width)\n\n        Returns:\n            Grid size as (grid_height, grid_width)\n        \"\"\"\n        if self.dynamic_img_pad:\n            return math.ceil(img_size[0] / self.patch_size[0]), math.ceil(img_size[1] / self.patch_size[1])\n        else:\n            return img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]\n\n    @disable_compiler\n    def _apply_learned_naflex_pos_embed(\n            self,\n            x: torch.Tensor,\n            patch_coord: torch.Tensor,\n    ) -> None:\n        \"\"\"Apply learned position embeddings to NaFlex batch in-place.\n\n        Interpolates learned 2D position embeddings for each sample in the batch\n        based on their individual grid sizes.\n\n        Args:\n            x: Input tensor to add position embeddings to [B, N, C]\n            patch_coord: Patch coordinates [B, N, 2] with (y, x) values\n        \"\"\"\n        # Calculate grid sizes from patch coordinates\n        naflex_grid_sizes = calculate_naflex_grid_sizes(patch_coord)\n        orig_h, orig_w = self.pos_embed.shape[1:3]\n        pos_embed_nchw = self.pos_embed.permute(0, 3, 1, 2).float()  # B,C,H,W\n\n        def _interp2d(size):\n            \"\"\"\n            Return a flattened positional-embedding grid at an arbitrary spatial resolution.\n\n            Converts the learned 2-D table stored in NCHW format (pos_embed_nchw) into\n            a (1, H*W, C) sequence that matches the requested size.\n            \"\"\"\n            if (size[0] == orig_h) and (size[1] == orig_w):\n                pos_embed_flat = self.pos_embed.reshape(1, orig_h * orig_w, -1)\n            else:\n                _interp_size = to_2tuple(max(size)) if self.pos_embed_ar_preserving else size\n                pos_embed_flat = F.interpolate(\n                    pos_embed_nchw,\n                    size=_interp_size,\n                    mode=self.pos_embed_interp_mode,\n                    align_corners=False,\n                    antialias=True,\n                )[:, :, :size[0], :size[1]].flatten(2).transpose(1, 2)\n            return pos_embed_flat.to(dtype=x.dtype)\n\n        # Determine unique grid sizes to avoid duplicate interpolation\n        size_to_indices: Dict[Tuple[int, int], List[int]] = {}\n        for bi, k in enumerate(naflex_grid_sizes):\n            # k = h << 16 | w  # FIXME can get jit compat with this\n            size_to_indices.setdefault(k, []).append(bi)\n\n        for k, batch_indices in size_to_indices.items():\n            # h, w = k >> 16, k & 0xFFFF  # FIXME can get jit compat with this\n            # Interpolate only once for this (h, w)\n            pos_embed_flat = _interp2d(k)\n            seq_len = min(x.shape[1], pos_embed_flat.shape[1])\n            x[:, :seq_len].index_add_(\n                0,\n                torch.as_tensor(batch_indices, device=x.device),\n                pos_embed_flat[:, :seq_len].expand(len(batch_indices), -1, -1)\n            )\n\n    @disable_compiler\n    def _apply_learned_naflex_pos_embed_grid_sample(\n            self,\n            x: torch.Tensor,\n            patch_coord: torch.Tensor,\n    ) -> None:\n        \"\"\"Apply learned position embeddings to NaFlex batch using grid_sample.\n\n        Uses F.grid_sample for efficient interpolation of learned 2D position embeddings\n        based on patch coordinates. Based on proposal by https://github.com/stas-sl\n\n        Args:\n            x: Input tensor to add position embeddings to [B, N, C]\n            patch_coord: Patch coordinates [B, N, 2] with (y, x) values\n        \"\"\"\n        device = x.device\n        B, N, C = x.shape\n        shapes = patch_coord.max(dim=1).values + 1  # (B, 2) containing [h_i, w_i]\n\n        if self.pos_embed_ar_preserving:\n            L_i = shapes.amax(dim=1)  # (B,) max(h_i, w_i)\n            L_global = L_i.amax()\n            grid_size_y = grid_size_x = L_global\n            scale_x = scale_y = L_global / L_i  # uniform zoom (B,)\n        else:\n            grid_size_y, grid_size_x = shapes.amax(dim=0)  # (2,)\n            scale_y = grid_size_y / shapes[:, 0]  # vertical zoom (B,)\n            scale_x = grid_size_x / shapes[:, 1]  # horizontal zoom (B,)\n\n        theta = torch.zeros(B, 2, 3, device=device, dtype=torch.float32)\n        theta[:, 0, 0] = scale_x\n        theta[:, 1, 1] = scale_y\n        theta[:, 0, 2] = scale_x - 1  # translate x\n        theta[:, 1, 2] = scale_y - 1  # translate y\n\n        grid = F.affine_grid(theta, (B, C, grid_size_y, grid_size_x), align_corners=False)\n        pos_embed = F.grid_sample(\n            self.pos_embed.permute(0, 3, 1, 2).expand(B, -1, -1, -1).float(),\n            grid,\n            mode=self.pos_embed_interp_mode,\n            align_corners=False,\n            padding_mode='border',\n        ).to(dtype=x.dtype)  # (B, C, H_out, W_out)\n\n        bi = torch.arange(B, device=device, dtype=torch.long).unsqueeze(1)\n        x += pos_embed[bi, :, patch_coord[..., 0], patch_coord[..., 1]]  # NOTE leave as '+='\n\n    def _apply_learned_pos_embed(\n            self,\n            x: torch.Tensor,\n            grid_size: List[int],\n    ) -> None:\n        \"\"\"Apply learned position embeddings to standard 2D batch in-place.\n\n        Interpolates learned 2D position embeddings to match the specified grid size.\n\n        Args:\n            x: Input tensor to add position embeddings to [B, H*W, C]\n            grid_size: Target grid size as [height, width]\n        \"\"\"\n        orig_h, orig_w = self.pos_embed.shape[1:3]\n        if grid_size[0] == orig_h and grid_size[1] == orig_w:\n            # No resize needed, just flatten\n            pos_embed_flat = self.pos_embed.reshape(1, orig_h * orig_w, -1)\n        else:\n            # Resize if needed - directly using F.interpolate\n            if self.pos_embed_ar_preserving:\n                L = max(grid_size)\n                _interp_size = L, L\n            else:\n                _interp_size = grid_size\n            pos_embed_flat = F.interpolate(\n                self.pos_embed.permute(0, 3, 1, 2).float(),  # B,C,H,W\n                size=_interp_size,\n                mode=self.pos_embed_interp_mode,\n                align_corners=False,\n                antialias=True,\n            )[:, :, :grid_size[0], :grid_size[1]].flatten(2).transpose(1, 2)\n        pos_embed_flat = pos_embed_flat.to(dtype=x.dtype)\n\n        x.add_(pos_embed_flat)\n\n    @disable_compiler\n    def _apply_factorized_naflex_pos_embed(\n            self,\n            x: torch.Tensor,\n            patch_coord: torch.Tensor,\n    ) -> None:\n        \"\"\"Apply factorized position embeddings to NaFlex batch in-place.\n\n        Uses separate Y and X position embedding tables that are interpolated\n        and combined for each sample's grid size.\n\n        Args:\n            x: Input tensor to add position embeddings to [B, N, C]\n            patch_coord: Patch coordinates [B, N, 2] with (y, x) values\n        \"\"\"\n        # Calculate grid sizes from patch coordinates\n        naflex_grid_sizes = calculate_naflex_grid_sizes(patch_coord)\n        assert len(naflex_grid_sizes) == x.size(0)   # one (H,W) per sample\n\n        # Handle each batch element separately with its own grid size\n        orig_h, orig_w = self.pos_embed_y.shape[1], self.pos_embed_x.shape[1]\n\n        # bucket samples that share the same (H, W) so we build each grid once\n        size_to_indices: Dict[Tuple[int, int], List[int]] = {}\n        for bi, k in enumerate(naflex_grid_sizes):\n            size_to_indices.setdefault(k, []).append(bi)\n\n        def _interp1d(table: torch.Tensor, new_length: int, orig_length: int) -> torch.Tensor:\n            \"\"\"\n            Resample a 1-D positional-embedding table to specified length\n            and return it in (1, L, C) layout, dtype matching x.\n            \"\"\"\n            if new_length == orig_length:\n                return table.to(dtype=x.dtype)\n            return F.interpolate(\n                table.permute(0, 2, 1).float(),  # (1,C,L) → (1,C,L_out)\n                size=new_length,\n                mode='linear',\n                align_corners=False,\n            ).permute(0, 2, 1).to(dtype=x.dtype)  # → (1,L_out,C)\n\n        for k, batch_indices in size_to_indices.items():\n            target_h, target_w = k\n            if self.pos_embed_ar_preserving:\n                len_y = len_x = max(target_h, target_w)\n            else:\n                len_y, len_x = target_h, target_w\n\n            pe_y = _interp1d(self.pos_embed_y, len_y, orig_h)[:, :target_h]  # (1,H,C)\n            pe_x = _interp1d(self.pos_embed_x, len_x, orig_w)[:, :target_w]  # (1,W,C)\n\n            # Broadcast, add and flatten to sequence layout (row major)\n            pos = pe_y.unsqueeze(2) + pe_x.unsqueeze(1)        # (1,H,W,C)\n            pos = pos.flatten(1, 2)\n\n            seq_len = min(x.shape[1], pos.shape[1])\n            x[:, :seq_len].index_add_(\n                0,\n                torch.as_tensor(batch_indices, device=x.device),\n                pos[:, :seq_len].expand(len(batch_indices), -1, -1)\n            )\n\n    @disable_compiler\n    def _apply_factorized_naflex_pos_embed_grid_sample(\n            self,\n            x: torch.Tensor,\n            patch_coord: torch.Tensor,\n    ) -> None:\n        \"\"\"Apply factorized position embeddings to NaFlex batch using grid_sample.\n\n        Uses F.grid_sample for efficient interpolation of separate Y and X position\n        embedding tables based on patch coordinates. Based on proposal by https://github.com/stas-sl\n\n        Args:\n            x: Input tensor to add position embeddings to [B, N, C]\n            patch_coord: Patch coordinates [B, N, 2] with (y, x) values\n        \"\"\"\n        device = x.device\n        B, _, C = x.shape\n        shapes = patch_coord.amax(dim=1) + 1\n\n        if self.pos_embed_ar_preserving:\n            # Aspect ratio preserving mode: use square grid with uniform scaling\n            L_i = shapes.amax(dim=1)  # (B,) max(h_i, w_i)\n            L_global = L_i.amax()\n            grid_size_y = grid_size_x = L_global\n            scale_x = scale_y = L_global / L_i  # uniform zoom (B,)\n        else:\n            # Standard mode: different scaling for x and y\n            grid_size_y, grid_size_x = shapes.amax(0)\n            scale_x = grid_size_x / shapes[:, 1]  # horizontal zoom (B,)\n            scale_y = grid_size_y / shapes[:, 0]  # vertical zoom (B,)\n\n        def _interp1d(table: torch.Tensor, scale: torch.Tensor, out_length: torch.Tensor) -> torch.Tensor:\n            pe = table.permute(0, 2, 1).unsqueeze(2).expand(B, -1, -1, -1).float()  # (1, L, C) -> (B, C, 1, L)\n            theta = torch.zeros(B, 2, 3, device=x.device)\n            theta[:, 0, 0] = scale\n            theta[:, 0, 2] = scale - 1\n            theta[:, 1, 1] = 1\n            grid = F.affine_grid(theta, (B, C, 1, out_length), align_corners=False)\n            pe = F.grid_sample(pe, grid, mode='bilinear', align_corners=False, padding_mode='border')\n            return pe.to(x.dtype)\n\n        # Interpolate along each axis\n        pe_x = _interp1d(self.pos_embed_x, scale=scale_x, out_length=grid_size_x)\n        pe_y = _interp1d(self.pos_embed_y, scale=scale_y, out_length=grid_size_y)\n\n        bi = torch.arange(B, device=device, dtype=torch.long).unsqueeze(1)\n        x += pe_x[bi, :, 0, patch_coord[..., 1]] + pe_y[bi, :, 0, patch_coord[..., 0]]\n\n    def _apply_factorized_pos_embed(\n            self,\n            x: torch.Tensor,\n            grid_size: List[int],\n    ) -> None:\n        \"\"\"Apply factorized position embeddings to standard 2D batch in-place.\n\n        Uses separate Y and X position embedding tables that are interpolated\n        and combined for the specified grid size.\n\n        Args:\n            x: Input tensor to add position embeddings to [B, H*W, C]\n            grid_size: Target grid size as [height, width]\n        \"\"\"\n        orig_h, orig_w = self.pos_embed_y.shape[1], self.pos_embed_x.shape[1]\n        target_h, target_w = grid_size\n\n        if self.pos_embed_ar_preserving:\n            len_y = len_x = max(target_h, target_w)\n        else:\n            len_y, len_x = target_h, target_w\n\n        def _interp1d(table: torch.Tensor, new_length: int, orig_length: int) -> torch.Tensor:\n            if new_length == orig_length:\n                return table.to(dtype=x.dtype)\n            return F.interpolate(\n                table.permute(0, 2, 1).float(),  # (1,L,C) -> (1,C,L)\n                size=new_length,\n                mode='linear',\n                align_corners=False,\n            ).permute(0, 2, 1).to(dtype=x.dtype)  # (1,L,C)\n\n        # Interpolate embeddings\n        pe_y = _interp1d(self.pos_embed_y, len_y, orig_h)[:, :target_h]  # (1,H,C)\n        pe_x = _interp1d(self.pos_embed_x, len_x, orig_w)[:, :target_w]  # (1,W,C)\n\n        # Broadcast, add and flatten to sequence layout (row major)\n        pos_embed = pe_y.unsqueeze(2) + pe_x.unsqueeze(1)  # (1, H, W, C)\n        pos_embed_flat = pos_embed.flatten(1, 2)  # (1, H*W, C)\n\n        x.add_(pos_embed_flat)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            patch_coord: Optional[torch.Tensor] = None,\n            patch_valid: Optional[torch.Tensor] = None,\n    ) -> Tuple[torch.Tensor, Optional[Tuple[int, int]]]:\n        \"\"\"Forward pass for patch embedding with position encoding.\n\n        Args:\n            x: Input tensor. Supported formats:\n                - [B, C, H, W] for conv mode\n                - [B, N, P*P*C] for pre-patchified linear mode (normal)\n                - [B, N, Ph, Pw, C] for pre-patchified linear mode (variable patch size)\n            patch_coord: Optional patch coordinates [B, N, 2] for NaFlex mode.\n            patch_valid: Optional validity mask for patches [B, N] for NaFlex mode.\n\n        Returns:\n            Tuple of (embedded_tensor, grid_size) where:\n                - embedded_tensor: [B, num_prefix_tokens + N, embed_dim]\n                - grid_size: (H, W) tuple for standard mode, None for NaFlex mode\n        \"\"\"\n        grid_size: Optional[Tuple[int, int]] = None\n        B = x.shape[0]\n        if self.is_linear:\n            # Linear embedding path, works with NaFlex mode or standard 2D mode\n            if patch_coord is None:\n                # Standard 2D (B, C, H, W) mode\n                _assert(x.ndim == 4, 'Expecting 2D image input with input ndim == 4')\n                x, grid_size = batch_patchify(x, self.patch_size, pad=self.dynamic_img_pad)\n            else:\n                # Pre-patchified NaFlex mode\n                # Variable patch size mode: [B, N, Ph, Pw, C], normal mode: [B, N, P*P*C]\n                _assert(x.ndim == 5 or x.ndim == 3, 'Expecting patchified input with ndim == 3 or 5.')\n\n            # Handle variable patch size projection\n            if self.enable_patch_interpolator and x.ndim == 5:\n                _assert(self.norm_input is None, 'input norm not supported with patch resizing')\n\n                # Apply projection with interpolation\n                x = self.patch_interpolator(\n                    x,\n                    self.proj.weight,\n                    self.proj.bias,\n                    patch_size=tuple(x.shape[2:4]),  # patch size from [B, N, Ph, Pw, C] shape\n                    is_linear=True,\n                )\n            else:\n                # Standard projection\n                x = x.flatten(2)  # ensure [B, N, P*P*C], flatten Ph*Pw*C if separate\n                if self.norm_input is not None:\n                    x = self.norm_input(x)\n                x = self.proj(x)\n        else:\n            _assert(x.ndim == 4, 'Convolutional input must be 4D')\n            if self.dynamic_img_pad:\n                H, W = x.shape[-2:]\n                pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0]\n                pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1]\n                x = F.pad(x, (0, pad_w, 0, pad_h))\n\n            x = self.proj(x)\n\n            grid_size = x.shape[-2:]\n            if self.flatten:\n                x = x.flatten(2).transpose(1, 2)  # NCHW -> NLC\n\n        # Apply normalization after flattening\n        x = self.norm(x)\n\n        if self.pos_embed_type == 'learned':\n            if grid_size is not None:\n                # Standard 2D mode\n                self._apply_learned_pos_embed(x, grid_size=grid_size)\n            else:\n                # NaFlex mode\n                if self.pos_embed_use_grid_sample:\n                    self._apply_learned_naflex_pos_embed_grid_sample(x, patch_coord=patch_coord)\n                else:\n                    self._apply_learned_naflex_pos_embed(x, patch_coord=patch_coord)\n        elif self.pos_embed_type == 'factorized':\n            if grid_size is not None:\n                # Standard 2D mode\n                self._apply_factorized_pos_embed(x, grid_size=grid_size)\n            else:\n                # NaFlex mode\n                if self.pos_embed_use_grid_sample:\n                    self._apply_factorized_naflex_pos_embed_grid_sample(x, patch_coord=patch_coord)\n                else:\n                    self._apply_factorized_naflex_pos_embed(x, patch_coord=patch_coord)\n\n        # Prepare and add class and register tokens\n        to_cat = []\n        if self.cls_token is not None:\n            to_cat.append(self.cls_token.expand(B, -1, -1))\n        if self.reg_token is not None:\n            to_cat.append(self.reg_token.expand(B, -1, -1))\n        # Add tokens to the beginning\n        if to_cat:\n            x = torch.cat(to_cat + [x], dim=1)\n\n        # Apply dropout\n        x = self.pos_drop(x)\n\n        return x, grid_size\n\n\n@register_notrace_function\ndef create_attention_mask(\n        patch_valid: torch.Tensor,\n        num_prefix_tokens: int = 0,\n        symmetric: bool = True,\n        q_len: Optional[int] = None,\n        dtype: torch.dtype = torch.float32,\n) -> Optional[torch.Tensor]:\n    \"\"\"Creates an attention mask from patch validity information.\n\n    Supports two modes controlled by `symmetric`:\n    1. `symmetric=True` (default): Creates a symmetric mask of shape\n       [B, 1, seq_len, seq_len]. An attention pair (i, j) is allowed only if\n       both token i and token j are valid. Suitable for standard self-attention.\n    2. `symmetric=False`: Creates a potentially non-square mask of shape\n       [B, 1, q_len, kv_len]. An attention pair (q, k) is allowed only if\n       the key/value token k is valid. Query token validity is not checked\n       in the mask itself. Useful for cross-attention or specific self-attention\n       implementations `q_len` can be specified.\n\n    Used for NaFlex mode to handle variable token counts and padding tokens.\n\n    Args:\n        patch_valid: Tensor of shape [B, N] with True for valid patches, False for padding.\n        num_prefix_tokens: Number of prefix tokens (class token, register tokens)\n            to prepend, which are always considered valid.\n        symmetric: If True, create a symmetric mask.\n            If False, create an expanded mask based only on key/value validity.\n        q_len: Query sequence length override. Only used when `symmetric` is False.\n            Defaults to the key/value sequence length (`kv_len`) if None.\n        dtype: Dtype of the output attention mask (e.g., torch.float32).\n\n    Returns:\n        Attention mask tensor. Additive mask (-inf for masked, 0 for unmasked).\n        Shape is [B, 1, seq_len, seq_len] if symmetric=True,\n        or [B, 1, q_len, kv_len] if symmetric=False.\n    \"\"\"\n    if patch_valid is None:\n        return None\n\n    patch_valid = patch_valid.bool() # Ensure boolean type\n    B, N = patch_valid.shape\n    kv_len = N # Initial key/value length is the number of patches\n\n    # Prepend prefix tokens if any\n    if num_prefix_tokens > 0:\n        # Create prefix validity tensor on the same device/dtype base as patch_valid\n        prefix_valid = patch_valid.new_ones((B, num_prefix_tokens), dtype=torch.bool)\n        # Concatenate prefix and patch validity. Shape becomes [B, num_prefix_tokens + N]\n        patch_valid = torch.cat([prefix_valid, patch_valid], dim=1)\n        kv_len += num_prefix_tokens # Update total key/value sequence length\n\n    if symmetric:\n        # Symmetric mask is True where BOTH query and key are valid\n        mask_bool = patch_valid.unsqueeze(-1) & patch_valid.unsqueeze(1)\n        mask_bool = mask_bool.unsqueeze(1)  # Add head dimension: [B, 1, seq_len, seq_len]\n    else:\n        # Expanded mask\n        q_len = q_len or kv_len\n        mask_bool = patch_valid[:, None, None, :].expand(B, 1, q_len, kv_len)\n\n    # Create the float mask and apply masking using additive mask convention\n    mask_float = torch.zeros_like(mask_bool, dtype=dtype)\n    # Fill with negative infinity where mask_bool is False (masked positions)\n    mask_float.masked_fill_(~mask_bool, torch.finfo(dtype).min)\n\n    return mask_float\n\n\n@register_notrace_function\ndef global_pool_naflex(\n        x: torch.Tensor,\n        patch_valid: Optional[torch.Tensor] = None,\n        pool_type: str = 'token',\n        num_prefix_tokens: int = 1,\n        reduce_include_prefix: bool = False,\n) -> torch.Tensor:\n    \"\"\"Global pooling with NaFlex support for masked tokens.\n\n    Applies global pooling while respecting patch validity masks to exclude\n    padding tokens from pooling operations.\n\n    Args:\n        x: Input tensor with shape [B, N, C]\n        patch_valid: Optional validity mask for patches [B, N-num_prefix_tokens]\n        pool_type: Type of pooling ('token', 'avg', 'avgmax', 'max')\n        num_prefix_tokens: Number of prefix tokens (class/register)\n        reduce_include_prefix: Whether to include prefix tokens in pooling reduction\n\n    Returns:\n        Pooled tensor with shape [B, C]\n    \"\"\"\n    if patch_valid is None or pool_type not in ('avg', 'avgmax', 'max'):\n        # Fall back to standard pooling\n        x = global_pool_nlc(\n            x,\n            pool_type=pool_type,\n            num_prefix_tokens=num_prefix_tokens,\n            reduce_include_prefix=reduce_include_prefix,\n        )\n        return x\n\n    # For NaFlex mode, we need to apply masked pooling to exclude padding tokens\n    if num_prefix_tokens > 0:\n        if reduce_include_prefix:\n            # Include prefix tokens in pooling - they are always considered valid\n            # patch_valid only covers patch tokens, so create combined validity mask\n            prefix_valid = patch_valid.new_ones(x.shape[0], num_prefix_tokens)\n            patch_valid = torch.cat([prefix_valid, patch_valid], dim=1)\n        else:\n            # Exclude prefix tokens from pooling (default behavior)\n            x = x[:, num_prefix_tokens:]\n\n    patch_valid_float = patch_valid.to(x.dtype)\n    if pool_type == 'avg':\n        # Compute masked average pooling, sum valid tokens and divide by count of valid tokens\n        masked_sums = (x * patch_valid_float.unsqueeze(-1)).sum(dim=1)\n        valid_counts = patch_valid_float.sum(dim=1, keepdim=True).clamp(min=1)\n        pooled = masked_sums / valid_counts\n        return pooled\n    elif pool_type == 'avgmax':\n        # For avgmax, compute masked average and masked max\n        masked_sums = (x * patch_valid_float.unsqueeze(-1)).sum(dim=1)\n        valid_counts = patch_valid_float.sum(dim=1, keepdim=True).clamp(min=1)\n        masked_avg = masked_sums / valid_counts\n\n        # For max pooling we set masked positions to large negative value\n        masked_x = x.clone()\n        masked_x[~patch_valid] = torch.finfo(masked_x.dtype).min\n        masked_max = masked_x.amax(dim=1)\n\n        # Combine average and max\n        return 0.5 * (masked_avg + masked_max)\n    elif pool_type == 'max':\n        # For max pooling we set masked positions to large negative value\n        masked_x = x.clone()\n        masked_x[~patch_valid] = torch.finfo(masked_x.dtype).min\n        return masked_x.amax(dim=1)\n    else:\n        assert False\n\n\nclass NaFlexVit(nn.Module):\n    \"\"\"NaFlexVit: Vision Transformer with NaFlex support for flexible input handling.\n\n    A flexible implementation of Vision Transformer that supports:\n    - Standard image classification with various pooling strategies\n    - NaFlex functionality for variable aspect ratios and resolutions\n    - Linear patch embedding for pre-patchified inputs\n    - Multiple position embedding strategies (learned, factorized, rope)\n    - Comprehensive attention masking for efficient batch processing\n    - Encapsulated embedding and position encoding in FlexEmbeds module\n    - Compatible with standard ViT checkpoints through checkpoint filtering\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: Optional[NaFlexVitCfg] = None,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            img_size: Optional[Union[int, Tuple[int, int]]] = None,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ) -> None:\n        \"\"\"Initialize NaFlexVit model.\n\n        Args:\n            cfg: Model configuration. If None, uses default NaFlexVitCfg.\n            in_chans: Number of input image channels.\n            num_classes: Number of classification classes.\n            img_size: Input image size (for backwards compatibility with classic vit).\n            **kwargs: Additional config parameters to override cfg values.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        # Initialize config\n        cfg = cfg or NaFlexVitCfg()\n        if kwargs:\n            cfg = _overlay_kwargs(cfg, **kwargs)\n\n        # Validate configuration\n        assert cfg.global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map')\n        assert cfg.class_token or cfg.global_pool != 'token'\n        assert cfg.pos_embed in ('', 'none', 'learned', 'factorized')\n\n        # Resolve layer implementations\n        norm_layer = get_norm_layer(cfg.norm_layer) or LayerNorm\n        embed_norm_layer = get_norm_layer(cfg.embed_norm_layer)\n        act_layer = get_act_layer(cfg.act_layer) or nn.GELU\n        block_fn = get_block_fn(cfg)\n        mlp_layer = cfg.mlp_layer or Mlp   # TODO: Support configurable mlp_layer via string lookup\n\n        # Store instance variables\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = cfg.global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = cfg.embed_dim  # for consistency with other models\n        self.num_prefix_tokens = 1 if cfg.class_token else 0\n        self.num_prefix_tokens += cfg.reg_tokens\n        self.num_reg_tokens = cfg.reg_tokens\n        self.has_class_token = cfg.class_token\n        self.pool_include_prefix = cfg.pool_include_prefix\n        self.grad_checkpointing = False\n\n        # Initialize embedding module (includes patch, position embedding, and class/reg tokens)\n        # FlexEmbeds is always used - handles both linear and conv embedding\n        self.embeds = NaFlexEmbeds(\n            patch_size=cfg.patch_size,\n            in_chans=in_chans,\n            embed_dim=cfg.embed_dim,\n            proj_type=cfg.embed_proj_type,\n            proj_bias=not cfg.pre_norm,  # disable bias if pre-norm is used (e.g. CLIP)\n            class_token=cfg.class_token,\n            reg_tokens=cfg.reg_tokens,\n            default_img_size=img_size,\n            dynamic_img_pad=cfg.dynamic_img_pad,\n            pos_embed=cfg.pos_embed,\n            pos_embed_grid_size=cfg.pos_embed_grid_size,\n            pos_embed_interp_mode=cfg.pos_embed_interp_mode,\n            pos_embed_ar_preserving=cfg.pos_embed_ar_preserving,\n            pos_embed_use_grid_sample=cfg.pos_embed_use_grid_sample,\n            proj_norm_layer=embed_norm_layer,\n            pos_drop_rate=cfg.pos_drop_rate,\n            enable_patch_interpolator=getattr(cfg, 'enable_patch_interpolator', False),\n            **dd,\n        )\n        self.norm_pre = norm_layer(cfg.embed_dim, **dd) if cfg.pre_norm else nn.Identity()\n\n        # ROPE position embeddings at model level\n        self.rope: Optional[nn.Module] = None\n        self.rope_is_mixed = False\n        if cfg.rope_type and cfg.rope_type != 'none':\n            from timm.layers.pos_embed_sincos import RotaryEmbeddingCat, RotaryEmbeddingMixed\n            if cfg.rope_type == 'mixed':\n                self.rope = RotaryEmbeddingMixed(\n                    cfg.embed_dim,\n                    depth=cfg.depth,\n                    num_heads=cfg.num_heads,\n                    temperature=cfg.rope_temperature,\n                    feat_shape=None,  # Dynamic shapes for NaFlex\n                    grid_indexing=cfg.rope_grid_indexing,\n                    **dd,\n                )\n                self.rope_is_mixed = True\n            elif cfg.rope_type == 'axial':\n                self.rope = RotaryEmbeddingCat(\n                    cfg.embed_dim // cfg.num_heads,\n                    temperature=cfg.rope_temperature,\n                    in_pixels=False,\n                    feat_shape=None,  # Dynamic shapes for NaFlex\n                    ref_feat_shape=cfg.rope_ref_feat_shape,\n                    grid_offset=cfg.rope_grid_offset,\n                    grid_indexing=cfg.rope_grid_indexing,\n                    **dd,\n                )\n                self.rope_is_mixed = False\n            else:\n                raise ValueError(f\"Unknown rope_type: {cfg.rope_type}\")\n\n        # Patch dropout\n        if cfg.patch_drop_rate > 0:\n            self.patch_drop = PatchDropoutWithIndices(\n                cfg.patch_drop_rate,\n                num_prefix_tokens=self.num_prefix_tokens,\n            )\n        else:\n            self.patch_drop = None\n\n        # Transformer blocks\n        dpr = calculate_drop_path_rates(cfg.drop_path_rate, cfg.depth)  # stochastic depth decay rule\n        # Create transformer blocks\n        self.blocks = nn.Sequential(*[\n            block_fn(\n                dim=cfg.embed_dim,\n                num_heads=cfg.num_heads,\n                mlp_ratio=cfg.mlp_ratio,\n                qkv_bias=cfg.qkv_bias,\n                qk_norm=cfg.qk_norm,\n                proj_bias=cfg.proj_bias,\n                init_values=cfg.init_values,\n                proj_drop=cfg.proj_drop_rate,\n                attn_drop=cfg.attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                mlp_layer=mlp_layer,\n                depth=i,\n                **dd,\n            )\n            for i in range(cfg.depth)\n        ])\n\n        # Feature info for downstream tasks\n        patch_reduction = self.embeds.feat_ratio(as_scalar=True)\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=cfg.embed_dim, reduction=patch_reduction)\n            for i in range(cfg.depth)\n        ]\n\n        self.norm = norm_layer(cfg.embed_dim, **dd) if cfg.final_norm and not cfg.fc_norm else nn.Identity()\n\n        # Classifier Head\n        if cfg.global_pool == 'map':\n            self.attn_pool = AttentionPoolLatent(\n                self.embed_dim,\n                num_heads=cfg.attn_pool_num_heads or cfg.num_heads,\n                mlp_ratio=cfg.attn_pool_mlp_ratio or cfg.mlp_ratio,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n        else:\n            self.attn_pool = None\n\n        # Handle fc_norm default value\n        fc_norm = cfg.fc_norm\n        if fc_norm is None:\n            fc_norm = cfg.global_pool == 'avg'\n        self.fc_norm = norm_layer(cfg.embed_dim, **dd) if cfg.final_norm and fc_norm else nn.Identity()\n        self.head_drop = nn.Dropout(cfg.drop_rate)\n        self.head = nn.Linear(self.embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        self.weight_init_mode = cfg.weight_init\n        self.fix_init = cfg.fix_init\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(cfg.weight_init, needs_reset=False)\n\n    def fix_init_weight(self) -> None:\n        \"\"\"Apply initialization weight fix with layer-wise scaling.\"\"\"\n        def rescale(param: torch.Tensor, _layer_id: int) -> None:\n            with torch.no_grad():\n                param.div_(math.sqrt(2.0 * _layer_id))\n\n        for layer_id, layer in enumerate(self.blocks):\n            if hasattr(layer, 'attn'):\n                rescale(layer.attn.proj.weight, layer_id + 1)\n            if hasattr(layer, 'mlp'):\n                rescale(layer.mlp.fc2.weight, layer_id + 1)\n            if hasattr(layer, 'attn_out_proj'):\n                rescale(layer.attn_out_proj.weight, layer_id + 1)\n            if hasattr(layer, 'mlp_out_proj'):\n                rescale(layer.mlp_out_proj.weight, layer_id + 1)\n\n\n    def init_weights(self, mode: str = '', needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights according to specified scheme.\n\n        Args:\n            mode: Initialization mode ('jax', 'jax_nlhb', 'moco', or '')\n            needs_reset: If True, call reset_parameters() on modules (default for after to_empty()).\n                If False, skip reset_parameters() (for __init__ where modules already self-initialized).\n        \"\"\"\n        mode = mode or self.weight_init_mode\n        assert mode in ('jax', 'jax_nlhb', 'moco', '')\n        head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0.\n\n        named_apply(get_init_weights_vit(mode, head_bias, needs_reset=needs_reset), self)\n\n        if self.fix_init:\n            self.fix_init_weight()\n\n    @torch.jit.ignore()\n    def load_pretrained(self, checkpoint_path: str, prefix: str = '') -> None:\n        # Custom loading for the new model structure\n        from .vision_transformer import _load_weights as _orig_load_weights\n\n        def _load_weights_adapter(model, checkpoint_path, prefix=''):\n            \"\"\"Adapter function to handle the different model structure\"\"\"\n            state_dict = torch.load(checkpoint_path, map_location='cpu')\n            if isinstance(state_dict, dict) and 'state_dict' in state_dict:\n                state_dict = state_dict['state_dict']\n\n            # Map original keys to new structure\n            for k in list(state_dict.keys()):\n                if k.startswith('cls_token'):\n                    state_dict['embeds.' + k] = state_dict.pop(k)\n                elif k.startswith('reg_token'):\n                    state_dict['embeds.' + k] = state_dict.pop(k)\n                elif k.startswith('pos_embed'):\n                    state_dict['embeds.' + k] = state_dict.pop(k)\n                elif k.startswith('patch_embed'):\n                    state_dict['embeds.' + k[12:]] = state_dict.pop(k)\n\n            return _orig_load_weights(model, state_dict, prefix)\n\n        _load_weights_adapter(self, checkpoint_path, prefix)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        \"\"\"Get set of parameter names that should not have weight decay applied.\n\n        Returns:\n            Set of parameter names to skip during weight decay\n        \"\"\"\n        skip_list = {'embeds.pos_embed', 'embeds.cls_token', 'embeds.reg_token'}\n        if self.rope and hasattr(self.rope, 'no_weight_decay'):\n            skip_list.update(self.rope.no_weight_decay())\n        return skip_list\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict:\n        \"\"\"Get parameter group matcher for optimizer parameter grouping.\n\n        Args:\n            coarse: Whether to use coarse-grained grouping\n\n        Returns:\n            Dictionary mapping group names to regex patterns\n        \"\"\"\n        return dict(\n            stem=r'^embeds',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing for memory efficiency.\n\n        Args:\n            enable: Whether to enable gradient checkpointing\n        \"\"\"\n        self.grad_checkpointing = enable\n        if hasattr(self.embeds, 'patch_embed') and hasattr(self.embeds.patch_embed, 'set_grad_checkpointing'):\n            self.embeds.patch_embed.set_grad_checkpointing(enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classification head module.\n\n        Returns:\n            Classification head module\n        \"\"\"\n        return self.head\n\n    @disable_compiler\n    def _generate_rope_naflex(\n            self,\n            x: torch.Tensor,\n            patch_coord: torch.Tensor,\n    ) -> Union[torch.Tensor, List[torch.Tensor], Any]:\n        \"\"\"Generate ROPE position embeddings for NaFlex batch with variable grid sizes.\n\n        Args:\n            x: Input tensor [B, N, C]\n            patch_coord: Patch coordinates [B, N, 2] with (y, x) values\n\n        Returns:\n            ROPE embeddings:\n            - Axial mode: Tensor of shape [B, 1, N, dim*2]\n            - Mixed mode: List of tensors, each of shape [B, num_heads, N, dim], one per depth layer\n            - Mixed mode with iterator: Iterator yielding tensors per depth\n        \"\"\"\n        # Calculate grid sizes for each sample\n        naflex_grid_sizes = calculate_naflex_grid_sizes(patch_coord)\n\n        # Build ROPE embeddings for each unique grid size\n        size_to_indices = {}\n        unique_sizes = []\n        for bi, grid_size in enumerate(naflex_grid_sizes):\n            if grid_size not in size_to_indices:\n                size_to_indices[grid_size] = []\n                unique_sizes.append(grid_size)\n            size_to_indices[grid_size].append(bi)\n\n        B, N, C = x.shape\n        seq_len = N - self.num_prefix_tokens\n\n        if self.rope_is_mixed:\n            # Use an iterator for Mixed mode, returns [batch_size, depth, num_heads, seq_len, dim]\n            return NaFlexRopeIterator(\n                self.rope,\n                size_to_indices,\n                unique_sizes,\n                B,\n                seq_len,\n                x.dtype,\n                x.device\n            )\n\n        # Axial mode: [batch_size, seq_len, dim*2]\n        rope_embeds = torch.zeros(B, seq_len, self.rope.dim * 2, dtype=x.dtype, device=x.device)\n\n        if hasattr(self.rope, 'get_batch_embeds'):\n            # Batch mode - generate unique embeds from one grid and then assign\n            unique_embeds = self.rope.get_batch_embeds(unique_sizes)\n            for grid_size, embed, batch_indices in zip(unique_sizes, unique_embeds, size_to_indices.values()):\n                h, w = grid_size\n                actual_len = h * w\n                for bi in batch_indices:\n                    rope_embeds[bi, :actual_len] = embed[:actual_len]\n\n        else:\n            # Generate each unique size separately and assign\n            for grid_size, bi in size_to_indices.items():\n                rope_embed = self.rope.get_embed(shape=grid_size)\n                h, w = grid_size\n                actual_len = h * w\n                rope_embeds[bi, :actual_len] = rope_embed[:actual_len]\n\n        rope_embeds = rope_embeds.unsqueeze(1)\n\n        return rope_embeds\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classification head with new number of classes and pooling.\n\n        Args:\n            num_classes: Number of classes for new classification head\n            global_pool: Optional new global pooling type\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map')\n            if global_pool == 'map' and self.attn_pool is None:\n                assert False, \"Cannot currently add attention pooling in reset_classifier().\"\n            elif global_pool != 'map' and self.attn_pool is not None:\n                self.attn_pool = None  # remove attention pooling\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n\n    def _forward_embeds(\n            self,\n            x,\n            patch_coord,\n            patch_valid,\n            attn_mask,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\" Forward pass through patch / abs pos / rope pos embeds and patch dropout\n        \"\"\"\n        naflex_mode = patch_coord is not None\n\n        # patch embed, abs pos embed, returns global grid size as calculated from 'standard' NCHW batches\n        x, grid_size = self.embeds(\n            x,\n            patch_coord=patch_coord,\n            patch_valid=patch_valid,\n        )\n\n        # Generate ROPE embeddings at model level\n        rope_embeds = None\n        if self.rope is not None:\n            if patch_coord is not None:\n                # NaFlex mode - variable grid sizes\n                rope_embeds = self._generate_rope_naflex(x, patch_coord)\n            elif grid_size is not None:\n                # Standard mode - fixed grid size\n                rope_embeds = self.rope.get_embed(shape=grid_size)\n            else:\n                assert False, 'Expected one of patch_coord or grid_size to be valid'\n\n        # Apply patch dropout with coordinated updates\n        keep_indices: Optional[torch.Tensor] = None\n        if self.training and self.patch_drop is not None:\n            x, keep_indices = self.patch_drop(x)\n            # keep_indices excludes prefix tokens, can use directly on patch_valid & rope embeds\n            if patch_valid is not None:\n                patch_valid = patch_valid.gather(1, keep_indices)\n            if rope_embeds is not None and not self.rope_is_mixed:\n                # Update ROPE embeddings to match dropped tokens (only for axial mode)\n                # Batch dim already present in NaFlex mode, but will be added in standard mode.\n                rope_embeds = apply_keep_indices_nlc(x, rope_embeds, keep_indices, pos_embed_has_batch=naflex_mode)\n                if not naflex_mode:\n                    # B, N, dim -> B, 1, N, dim. Need head dim added for standard mode, already added in NaFlex.\n                    rope_embeds = rope_embeds.unsqueeze(1)\n\n        # Create attention mask from patch_valid after patch dropout applied\n        if attn_mask is None:\n            attn_mask = create_attention_mask(\n                patch_valid,\n                num_prefix_tokens=self.num_prefix_tokens,\n                dtype=x.dtype\n            )\n\n        x = self.norm_pre(x)\n        return {\n            'patches': x,\n            'patch_valid': patch_valid,\n            'rope_embeds': rope_embeds,\n            'attn_mask': attn_mask,\n            'keep_indices': keep_indices,\n        }\n\n    def forward_intermediates(\n            self,\n            x: Union[torch.Tensor, Dict[str, torch.Tensor]],\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            output_dict: bool = False,\n            patch_coord: Optional[torch.Tensor] = None,\n            patch_valid: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]], Dict[str, Any]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            return_prefix_tokens: Return both prefix and spatial intermediate tokens\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            output_dict: Return outputs as a dictionary with 'image_features' and 'image_intermediates' keys\n            patch_coord: Optional patch coordinates [B, N, 2] for NaFlex mode\n            patch_valid: Optional patch type indicators (1=patch, 0=padding) for NaFlex\n            attn_mask: Optional attention mask for masked attention\n        Returns:\n            A tuple with (final_features, intermediates), a list of intermediate features, or a dictionary containing\n            'image_features' and 'image_intermediates' (and optionally 'image_intermediates_prefix')\n        \"\"\"\n\n        # FIXME unfinished / untested\n\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        if isinstance(x, Dict):\n            # Handle dictionary input from NaFlex collator\n            patch_coord = x['patch_coord']\n            patch_valid = x['patch_valid']\n            patches = x['patches']\n            assert False, 'WIP, patch mode needs more work'\n        else:\n            patches = x\n            height, width = x.shape[-2:]\n            H, W = self.embeds.dynamic_feat_size((height, width))\n\n        # Forward pass through patch and abs position embedding\n        embeds = self._forward_embeds(\n            patches,\n            patch_coord=patch_coord,\n            patch_valid=patch_valid,\n            attn_mask=attn_mask,\n        )\n        x = embeds['patches']\n        rope_embeds = embeds.get('rope_embeds', None)\n        keep_indices = embeds.get('keep_indices', None)\n        attn_mask = embeds.get('attn_mask', None)\n\n        # Forward pass through blocks\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n\n        do_checkpointing = self.grad_checkpointing and not torch.jit.is_scripting()\n        if self.rope_is_mixed and rope_embeds is not None:\n            # Mixed mode with per-layer embeddings (list or iterator)\n            for i, (blk, rope_embed) in enumerate(zip(self.blocks, rope_embeds)):\n                # Apply patch dropout to rope_embed if needed\n                if self.training and self.patch_drop is not None and keep_indices is not None:\n                    # Apply patch dropout to rope_embed if needed (batch dim already present in naflex mode)\n                    rope_embed = apply_keep_indices_nlc(\n                        x,\n                        rope_embed,\n                        keep_indices,\n                        pos_embed_has_batch=embeds.get('naflex_mode', False),\n                    )\n                if do_checkpointing:\n                    x = checkpoint(blk, x, rope=rope_embed, attn_mask=attn_mask)\n                else:\n                    x = blk(x, rope=rope_embed, attn_mask=attn_mask)\n                if i in take_indices:\n                    # normalize intermediates with final norm layer if enabled\n                    intermediates.append(self.norm(x) if norm else x)\n        else:\n            for i, blk in enumerate(blocks):\n                # Axial ROPE mode with shared embeddings\n                if rope_embeds is not None:\n                    if do_checkpointing:\n                        x = checkpoint(blk, x, rope=rope_embeds, attn_mask=attn_mask)\n                    else:\n                        x = blk(x, rope=rope_embeds, attn_mask=attn_mask)\n                else:\n                    if do_checkpointing:\n                        x = checkpoint(blk, x, attn_mask=attn_mask)\n                    else:\n                        x = blk(x, attn_mask=attn_mask)\n                if i in take_indices:\n                    # normalize intermediates with final norm layer if enabled\n                    intermediates.append(self.norm(x) if norm else x)\n\n        # Process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n        else:\n            prefix_tokens = None\n\n        if reshape:\n            # reshape to BCHW output format\n            intermediates = [\n                y.reshape(y.shape[0], H, W, -1).permute(0, 3, 1, 2).contiguous()\n                for y in intermediates\n            ]\n\n        # FIXME always use dict for NaFlex mode to return masks and more?\n\n        # For dictionary output\n        if output_dict:\n            result_dict = {}\n            # Intermediates are always included\n            result_dict['image_intermediates'] = intermediates\n            if prefix_tokens is not None and return_prefix_tokens:\n                result_dict['image_intermediates_prefix'] = prefix_tokens\n\n            # Only include features if not intermediates_only\n            if not intermediates_only:\n                x_final = self.norm(x)\n                result_dict['image_features'] = x_final\n\n            return result_dict\n\n        # For non-dictionary output, maintain the original behavior\n        if not torch.jit.is_scripting() and return_prefix_tokens and prefix_tokens is not None:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def forward_features(\n            self,\n            patches: torch.Tensor,\n            patch_coord: Optional[torch.Tensor] = None,\n            patch_valid: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n    ) -> Union[torch.Tensor, Dict[str, torch.Tensor]]:\n        \"\"\"\n        \"\"\"\n        naflex_mode = patch_coord is not None\n\n        # Pass through patch & abs position embedding module with patch coordinate/type support\n        embeds = self._forward_embeds(\n            patches,\n            patch_coord=patch_coord,\n            patch_valid=patch_valid,\n            attn_mask=attn_mask,\n        )\n        x = embeds['patches']\n        rope_embeds = embeds.get('rope_embeds', None)\n        keep_indices = embeds.get('keep_indices', None)\n        attn_mask = embeds.get('attn_mask', None)\n\n        # Apply transformer blocks with masked attention and/or ROPE if provided\n        do_checkpointing = self.grad_checkpointing and not torch.jit.is_scripting()\n        if self.rope_is_mixed and rope_embeds is not None:\n            # Mixed mode with per-layer embeddings (list or iterator)\n            for i, (blk, rope_embed) in enumerate(zip(self.blocks, rope_embeds)):\n                if self.training and self.patch_drop is not None and keep_indices is not None:\n                    # Apply patch dropout to rope_embed if needed (batch dim already present in naflex mode)\n                    rope_embed = apply_keep_indices_nlc(\n                        x,\n                        rope_embed,\n                        keep_indices,\n                        pos_embed_has_batch=naflex_mode,\n                    )\n                if do_checkpointing:\n                    x = checkpoint(blk, x, rope=rope_embed, attn_mask=attn_mask)\n                else:\n                    x = blk(x, rope=rope_embed, attn_mask=attn_mask)\n        elif rope_embeds is not None:\n            # Axial ROPE mode with shared embeddings\n            for blk in self.blocks:\n                if do_checkpointing:\n                    x = checkpoint(blk, x, rope=rope_embeds, attn_mask=attn_mask)\n                else:\n                    x = blk(x, rope=rope_embeds, attn_mask=attn_mask)\n        else:\n            for blk in self.blocks:\n                if do_checkpointing:\n                    x = checkpoint(blk, x, attn_mask=attn_mask)\n                else:\n                    x = blk(x, attn_mask=attn_mask)\n\n        x = self.norm(x)\n\n        if naflex_mode:\n            return {\n                'patches': x,\n                'patch_valid': embeds.get('patch_valid', None),\n            }\n\n        return x\n\n    def _pool(\n            self,\n            x: torch.Tensor,\n            pool_type: Optional[str] = None,\n            patch_valid: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        if self.attn_pool is not None:\n            attn_mask = create_attention_mask(\n                patch_valid,\n                num_prefix_tokens=self.num_prefix_tokens if self.pool_include_prefix else 0,\n                symmetric=False,\n                q_len=1,\n                dtype=x.dtype,\n            )\n            if not self.pool_include_prefix:\n                x = x[:, self.num_prefix_tokens:]\n            x = self.attn_pool(x, attn_mask=attn_mask)\n            return x\n\n        pool_type = self.global_pool if pool_type is None else pool_type\n\n        x = global_pool_naflex(\n            x,\n            patch_valid,\n            pool_type=pool_type,\n            num_prefix_tokens=self.num_prefix_tokens,\n            reduce_include_prefix=self.pool_include_prefix,\n        )\n        return x\n\n    def forward_head(\n            self,\n            patches: torch.Tensor,\n            pre_logits: bool = False,\n            patch_valid: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        x = self._pool(patches, patch_valid=patch_valid)\n        x = self.fc_norm(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(\n            self,\n            x: Union[torch.Tensor, Dict[str, torch.Tensor]],\n            patch_coord: Optional[torch.Tensor] = None,\n            patch_valid: Optional[torch.Tensor] = None,\n            attn_mask: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        \"\"\"Forward pass with optional NaFlex support.\n\n        Args:\n            x: Input tensor. Supported formats:\n                - [B, C, H, W] standard image input\n                - [B, N, P*P*C] pre-patchified tensor (flattened patches)\n                - [B, N, Ph, Pw, C] pre-patchified tensor (variable patch size)\n                - Dict from NaFlex collator\n            patch_coord: Optional patch coordinates [B, N, 2] for NaFlex mode.\n            patch_valid: Optional patch validity indicators for NaFlex.\n            attn_mask: Optional attn mask to override defaults generated from patch_valid\n\n        Returns:\n            Model output tensor.\n        \"\"\"\n        input_is_dict = isinstance(x, Dict)\n        naflex_mode = input_is_dict or patch_coord is not None\n        if naflex_mode:\n            if input_is_dict:\n                # Handle dictionary input from NaFlex collator, dict inputs take priority over args\n                patches = x['patches']\n                patch_valid = x.get('patch_valid', patch_valid)\n                patch_coord = x.get('patch_coord', patch_coord)\n                attn_mask = x.get('attn_mask', attn_mask)\n            else:\n                patches = x\n            _assert(patch_coord is not None, \"patch_coord is required in naflex mode\")\n            _assert(patch_valid is not None, \"patch_valid is required in naflex mode\")\n\n            features = self.forward_features(\n                patches=patches,\n                patch_valid=patch_valid,\n                patch_coord=patch_coord,\n                attn_mask=attn_mask,\n            )\n\n            # Pass patches & patch_valid to forward_head for masked pooling\n            x = self.forward_head(**features)\n        else:\n            x = self.forward_features(x)\n            x = self.forward_head(x)\n        return x\n\n\ndef _debug_dump_patches(x):\n    # DEBUG, reconstruct patches & save\n    patch_coord = x['patch_coord']\n    patch_valid = x['patch_valid']\n    patches = x['patches']\n    for i in range(len(patches)):\n        patch = patches[i][patch_valid[i]]\n        h = (patch_coord[i, :, 0].max() + 1).item()\n        w = (patch_coord[i, :, 1].max() + 1).item()\n        patch = patch.reshape(h, w, 16, 16, 3).permute(4, 0, 2, 1, 3)\n        patch = patch.reshape(3, h*16, w*16)\n        from torchvision.utils import save_image\n        save_image(patch, f'patch_{i}.jpg', normalize=True)\n\n\ndef get_init_weights_vit(mode: str = 'jax', head_bias: float = 0.0, needs_reset: bool = True) -> Callable:\n    \"\"\"Function imported from vision_transformer.py to maintain compatibility\"\"\"\n    from .vision_transformer import (\n        init_weights_vit_jax,\n        init_weights_vit_moco,\n        init_weights_vit_timm,\n        init_weights_reset_parameters,\n    )\n\n    if 'jax' in mode:\n        return partial(init_weights_vit_jax, head_bias=head_bias, needs_reset=needs_reset)\n    elif 'moco' in mode:\n        return partial(init_weights_vit_moco, needs_reset=needs_reset)\n    else:\n        return partial(init_weights_vit_timm, needs_reset=needs_reset)\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, Any], model: NaFlexVit) -> Dict[str, Any]:\n    \"\"\"Handle state dict conversion from original ViT to the new version with combined embedding.\"\"\"\n\n    # Handle CombinedEmbed module pattern\n    out_dict = {}\n    for k, v in state_dict.items():\n        # Convert tokens and embeddings to combined_embed structure\n        if k == 'pos_embed':\n            # Handle position embedding format conversion - from (1, N, C) to (1, H, W, C)\n            if hasattr(model.embeds, 'pos_embed') and v.ndim == 3:\n                num_cls_token = 0\n                num_reg_token = 0\n                if 'reg_token' in state_dict:\n                    num_reg_token = state_dict['reg_token'].shape[1]\n                if 'cls_token' in state_dict:\n                    num_cls_token = state_dict['cls_token'].shape[1]\n                num_prefix_tokens = num_cls_token + num_reg_token\n\n                # Original format is (1, N, C), need to reshape to (1, H, W, C)\n                num_patches = v.shape[1]\n                num_patches_no_prefix = num_patches - num_prefix_tokens\n                grid_size_no_prefix = math.sqrt(num_patches_no_prefix)\n                grid_size = math.sqrt(num_patches)\n                if (grid_size_no_prefix != grid_size\n                        and (grid_size_no_prefix.is_integer() and not grid_size.is_integer())\n                ):\n                    # make a decision, did the pos_embed of the original include the prefix tokens?\n                    num_patches = num_patches_no_prefix\n                    cls_token_emb = v[:, 0:num_cls_token]\n                    if cls_token_emb.numel():\n                        state_dict['cls_token'] += cls_token_emb\n                    reg_token_emb = v[:, num_cls_token:num_reg_token]\n                    if reg_token_emb.numel():\n                        state_dict['reg_token'] += reg_token_emb\n                    v = v[:, num_prefix_tokens:]\n                    grid_size = grid_size_no_prefix\n                grid_size = int(grid_size)\n\n                # Check if it's a perfect square for a standard grid\n                if grid_size * grid_size == num_patches:\n                    # Reshape from (1, N, C) to (1, H, W, C)\n                    v = v.reshape(1, grid_size, grid_size, v.shape[2])\n                else:\n                    # Not a square grid, we need to get the actual dimensions\n                    if hasattr(model.embeds.patch_embed, 'grid_size'):\n                        h, w = model.embeds.patch_embed.grid_size\n                        if h * w == num_patches:\n                            # We have the right dimensions\n                            v = v.reshape(1, h, w, v.shape[2])\n                        else:\n                            # Dimensions don't match, use interpolation\n                            _logger.warning(\n                                f\"Position embedding size mismatch: checkpoint={num_patches}, model={(h * w)}. \"\n                                f\"Using default initialization and will resize in forward pass.\"\n                            )\n                            # Keep v as is, the forward pass will handle resizing\n\n            out_dict['embeds.pos_embed'] = v\n        elif k == 'cls_token':\n            out_dict['embeds.cls_token'] = v\n        elif k == 'reg_token':\n            out_dict['embeds.reg_token'] = v\n        # Convert patch_embed.X to embeds.patch_embed.X\n        elif k.startswith('patch_embed.'):\n            suffix = k[12:]\n            if suffix == 'proj.weight':\n                v = v.permute(0, 2, 3, 1).flatten(1)\n            new_key = 'embeds.' + suffix\n            out_dict[new_key] = v\n        else:\n            out_dict[k] = v\n\n    return out_dict\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'input_size': (3, 384, 384),\n        'pool_size': None,\n        'crop_pct': 1.0,\n        'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN,\n        'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'embeds.proj',\n        'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'naflexvit_base_patch16_gap.e300_s576_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'naflexvit_base_patch16_par_gap.e300_s576_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'naflexvit_base_patch16_parfac_gap.e300_s576_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'naflexvit_base_patch16_map.untrained': _cfg(),\n    'naflexvit_so150m2_patch16_reg1_gap.untrained': _cfg(),\n    'naflexvit_so150m2_patch16_reg1_map.untrained': _cfg(),\n\n    # SigLIP-2 NaFlex vit encoder weights\n    'naflexvit_base_patch16_siglip.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'naflexvit_so400m_patch16_siglip.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n})\n\n\ndef _create_naflexvit(variant: str, pretrained: bool = False, **kwargs) -> NaFlexVit:\n    out_indices = kwargs.pop('out_indices', 3)\n    cfg = kwargs.pop('cfg', NaFlexVitCfg())\n    cfg_field_names = {f.name for f in fields(NaFlexVitCfg)}\n    # pop in-place so the original kwargs is emptied of cfg-specific keys\n    cfg_updates = {k: kwargs.pop(k) for k in list(kwargs) if k in cfg_field_names}\n    if cfg_updates:\n        cfg = _overlay_kwargs(cfg, **cfg_updates)\n\n    model = build_model_with_cfg(\n        NaFlexVit, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        cfg=cfg,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _create_naflexvit_from_classic(\n        variant: str,\n        pretrained: bool = False,\n        **kwargs,\n) -> NaFlexVit:\n    \"\"\"Create FlexVit model from classic VisionTransformer configuration.\n\n    This function handles the parameter mapping and configuration logic needed\n    to create FlexVit models that are compatible with classic VisionTransformer\n    configurations and pretrained weights.\n\n    Args:\n        variant: Model variant name\n        pretrained: Whether to load pretrained weights\n        **kwargs: Classic VisionTransformer parameters\n\n    Returns:\n        FlexVit model instance\n    \"\"\"\n    # Remove VisionTransformer-specific parameters that don't apply to FlexVit\n    kwargs.pop('no_embed_class', None)\n    kwargs.pop('dynamic_img_size', None)\n\n    # Handle global pooling and fc_norm defaults that differ between ViT and FlexVit\n    gp = kwargs.pop('global_pool', 'token')  # Original ViTs default to cls token pooling\n    fc_norm = kwargs.pop('fc_norm', None)    # Original ViTs used fc_norm when not set and avg pooling used\n    if fc_norm is None and gp == 'avg':\n        fc_norm = True\n\n    # Set FlexVit-specific defaults that differ from VisionTransformer\n    flex_kwargs = {\n        'pos_embed_grid_size': None,  # rely on img_size (// patch_size) that will be passed through\n        'class_token': kwargs.get('class_token', True),\n        'global_pool': gp,\n        'fc_norm': fc_norm,\n        'scale_mlp_norm': kwargs.pop('scale_mlp_norm', False),\n        'scale_attn_inner_norm': kwargs.pop('scale_attn_norm', False),\n        **kwargs  # User overrides take precedence\n    }\n\n    return _create_naflexvit(variant, pretrained, **flex_kwargs)\n\n\ndef _create_naflexvit_from_eva(\n        variant: str,\n        pretrained: bool = False,\n        **kwargs,\n) -> NaFlexVit:\n    \"\"\"Create NaFlexVit model from EVA configuration.\n\n    This function handles the parameter mapping and configuration logic needed\n    to create NaFlexVit models that are compatible with EVA configurations\n    and pretrained weights.\n\n    Args:\n        variant: Model variant name\n        pretrained: Whether to load pretrained weights\n        **kwargs: EVA model parameters\n\n    Returns:\n        NaFlexVit model instance\n    \"\"\"\n    # Handle EVA's unique parameters & block args\n    kwargs.pop('no_embed_class', None)  # EVA specific, not used in NaFlexVit (always no-embed)\n\n    # Map EVA's rope parameters\n    use_rot_pos_emb = kwargs.pop('use_rot_pos_emb', False)\n    rope_mixed_mode = kwargs.pop('rope_mixed_mode', False)\n    rope_temperature = kwargs.pop('rope_temperature', 10000.)\n    rope_grid_offset = kwargs.pop('rope_grid_offset', 0.)\n    rope_grid_indexing = kwargs.pop('rope_grid_indexing', 'ij')\n    if use_rot_pos_emb:\n        rope_type = 'mixed' if rope_mixed_mode else 'axial'\n    else:\n        rope_type = 'none'\n\n    # Handle norm/pool resolution logic to mirror EVA\n    gp = kwargs.pop('global_pool', 'avg')\n    use_pre_transformer_norm = kwargs.pop('use_pre_transformer_norm', False)\n    use_post_transformer_norm = kwargs.pop('use_post_transformer_norm', True)\n    use_fc_norm = kwargs.pop('use_fc_norm', None)\n    if use_fc_norm is None:\n        use_fc_norm = gp == 'avg'  # default on if avg pool used\n\n    # Set NaFlexVit-specific parameters\n    naflex_kwargs = {\n        'pos_embed_grid_size': None,  # rely on img_size (// patch_size)\n        'class_token': kwargs.get('class_token', True),\n        'reg_tokens':  kwargs.pop('num_reg_tokens', kwargs.get('reg_tokens', 0)),\n        'global_pool': gp,\n        'pre_norm': use_pre_transformer_norm,\n        'final_norm': use_post_transformer_norm,\n        'fc_norm': use_fc_norm,\n        'pos_embed': 'learned' if kwargs.pop('use_abs_pos_emb', True) else 'none',\n        'rope_type': rope_type,\n        'rope_temperature': rope_temperature,\n        'rope_grid_offset': rope_grid_offset,\n        'rope_grid_indexing': rope_grid_indexing,\n        'rope_ref_feat_shape': kwargs.get('ref_feat_shape', None),\n        'attn_type': kwargs.pop('attn_type', 'eva'),\n        'swiglu_mlp': kwargs.pop('swiglu_mlp', False),\n        'qkv_fused': kwargs.pop('qkv_fused', True),\n        'scale_mlp_norm': kwargs.pop('scale_mlp', False),\n        'scale_attn_inner_norm': kwargs.pop('scale_attn_inner', False),\n        **kwargs  # Pass remaining kwargs through\n    }\n\n    return _create_naflexvit(variant, pretrained, **naflex_kwargs)\n\n\n@register_model\ndef naflexvit_base_patch16_gap(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-Base with NaFlex functionality and global average pooling.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        init_values=1e-5,\n        global_pool='avg',\n        reg_tokens=4,\n        fc_norm=True,\n    )\n    model = _create_naflexvit('naflexvit_base_patch16_gap', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_base_patch16_par_gap(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-Base with NaFlex functionality, aspect preserving pos embed, global average pooling.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        init_values=1e-5,\n        pos_embed_ar_preserving=True,\n        global_pool='avg',\n        reg_tokens=4,\n        fc_norm=True,\n    )\n    model = _create_naflexvit('naflexvit_base_patch16_par_gap', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_base_patch16_parfac_gap(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-Base with NaFlex functionality, aspect preserving & factorized pos embed, global average pooling.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        init_values=1e-5,\n        pos_embed_ar_preserving=True,\n        pos_embed='factorized',\n        global_pool='avg',\n        reg_tokens=4,\n        fc_norm=True,\n    )\n    model = _create_naflexvit('naflexvit_base_patch16_parfac_gap', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_base_patch16_map(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-Base with NaFlex functionality and MAP attention pooling.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        init_values=1e-5,\n        global_pool='map',\n        reg_tokens=1,\n    )\n    model = _create_naflexvit('naflexvit_base_patch16_map', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_so150m2_patch16_reg1_gap(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-SO150M2 with NaFlex functionality for variable aspect ratios and resolutions.\n\n    This model supports:\n    1. Variable aspect ratios and resolutions via patch coordinates\n    2. Position embedding interpolation for arbitrary grid sizes\n    3. Explicit patch coordinates and valid token masking\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=832,\n        depth=21,\n        num_heads=13,\n        mlp_ratio=34/13,\n        init_values=1e-5,\n        qkv_bias=False,\n        reg_tokens=1,\n        global_pool='avg',\n        fc_norm=True,\n    )\n    model = _create_naflexvit('naflexvit_so150m2_patch16_reg1_gap', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_so150m2_patch16_reg1_map(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-SO150M2 with NaFlex functionality for variable aspect ratios and resolutions.\n\n    This model supports:\n    1. Variable aspect ratios and resolutions via patch coordinates\n    2. Position embedding interpolation for arbitrary grid sizes\n    3. Explicit patch coordinates and valid token masking\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=832,\n        depth=21,\n        num_heads=13,\n        mlp_ratio=34/13,\n        init_values=1e-5,\n        qkv_bias=False,\n        reg_tokens=1,\n        global_pool='map',\n    )\n    model = _create_naflexvit('naflexvit_so150m2_patch16_reg1_map', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_base_patch16_siglip(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-Base with NaFlex functionality and SigLIP-style configuration.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=768,\n        depth=12,\n        num_heads=12,\n        act_layer='gelu_tanh',\n        global_pool='map',\n    )\n    model = _create_naflexvit('naflexvit_base_patch16_siglip', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n\n\n@register_model\ndef naflexvit_so400m_patch16_siglip(pretrained: bool = False, **kwargs) -> NaFlexVit:\n    \"\"\"ViT-SO400M with NaFlex functionality for variable aspect ratios and resolutions.\n    \"\"\"\n    cfg = NaFlexVitCfg(\n        patch_size=16,\n        embed_dim=1152,\n        depth=27,\n        num_heads=16,\n        mlp_ratio=3.7362,\n        act_layer='gelu_tanh',\n        global_pool='map',\n    )\n    model = _create_naflexvit('naflexvit_so400m_patch16_siglip', pretrained=pretrained, cfg=cfg, **kwargs)\n    return model\n"
  },
  {
    "path": "timm/models/nasnet.py",
    "content": "\"\"\" NasNet-A (Large)\n nasnetalarge implementation grabbed from Cadene's pretrained models\n https://github.com/Cadene/pretrained-models.pytorch\n\"\"\"\nfrom functools import partial\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.layers import ConvNormAct, create_conv2d, create_pool2d, create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['NASNetALarge']\n\n\n\nclass ActConvBn(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int = 1,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.act = nn.ReLU()\n        self.conv = create_conv2d(\n            in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, **dd)\n        self.bn = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, **dd)\n\n    def forward(self, x):\n        x = self.act(x)\n        x = self.conv(x)\n        x = self.bn(x)\n        return x\n\n\nclass SeparableConv2d(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.depthwise_conv2d = create_conv2d(\n            in_channels,\n            in_channels,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            groups=in_channels,\n            **dd,\n        )\n        self.pointwise_conv2d = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=1,\n            padding=0,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.depthwise_conv2d(x)\n        x = self.pointwise_conv2d(x)\n        return x\n\n\nclass BranchSeparables(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int = 1,\n            pad_type: str = '',\n            stem_cell: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        middle_channels = out_channels if stem_cell else in_channels\n        self.act_1 = nn.ReLU()\n        self.separable_1 = SeparableConv2d(\n            in_channels, middle_channels, kernel_size, stride=stride, padding=pad_type, **dd)\n        self.bn_sep_1 = nn.BatchNorm2d(middle_channels, eps=0.001, momentum=0.1, **dd)\n        self.act_2 = nn.ReLU(inplace=True)\n        self.separable_2 = SeparableConv2d(\n            middle_channels, out_channels, kernel_size, stride=1, padding=pad_type, **dd)\n        self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, **dd)\n\n    def forward(self, x):\n        x = self.act_1(x)\n        x = self.separable_1(x)\n        x = self.bn_sep_1(x)\n        x = self.act_2(x)\n        x = self.separable_2(x)\n        x = self.bn_sep_2(x)\n        return x\n\n\nclass CellStem0(nn.Module):\n    def __init__(\n            self,\n            stem_size: int,\n            num_channels: int = 42,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_channels = num_channels\n        self.stem_size = stem_size\n        self.conv_1x1 = ActConvBn(self.stem_size, self.num_channels, 1, stride=1, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(\n            self.num_channels, self.num_channels, 5, 2, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(\n            self.stem_size, self.num_channels, 7, 2, pad_type, stem_cell=True, **dd)\n\n        self.comb_iter_1_left = create_pool2d('max', 3, 2, padding=pad_type)\n        self.comb_iter_1_right = BranchSeparables(\n            self.stem_size, self.num_channels, 7, 2, pad_type, stem_cell=True, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 2, count_include_pad=False, padding=pad_type)\n        self.comb_iter_2_right = BranchSeparables(\n            self.stem_size, self.num_channels, 5, 2, pad_type, stem_cell=True, **dd)\n\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(\n            self.num_channels, self.num_channels, 3, 1, pad_type, **dd)\n        self.comb_iter_4_right = create_pool2d('max', 3, 2, padding=pad_type)\n\n    def forward(self, x):\n        x1 = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x1)\n        x_comb_iter_0_right = self.comb_iter_0_right(x)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x1)\n        x_comb_iter_1_right = self.comb_iter_1_right(x)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x1)\n        x_comb_iter_2_right = self.comb_iter_2_right(x)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x1)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass CellStem1(nn.Module):\n\n    def __init__(\n            self,\n            stem_size: int,\n            num_channels: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_channels = num_channels\n        self.stem_size = stem_size\n        self.conv_1x1 = ActConvBn(2 * self.num_channels, self.num_channels, 1, stride=1, **dd)\n\n        self.act = nn.ReLU()\n        self.path_1 = nn.Sequential()\n        self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_1.add_module('conv', nn.Conv2d(self.stem_size, self.num_channels // 2, 1, stride=1, bias=False, **dd))\n\n        self.path_2 = nn.Sequential()\n        self.path_2.add_module('pad', nn.ZeroPad2d((-1, 1, -1, 1)))\n        self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_2.add_module('conv', nn.Conv2d(self.stem_size, self.num_channels // 2, 1, stride=1, bias=False, **dd))\n\n        self.final_path_bn = nn.BatchNorm2d(self.num_channels, eps=0.001, momentum=0.1, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(self.num_channels, self.num_channels, 5, 2, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(self.num_channels, self.num_channels, 7, 2, pad_type, **dd)\n\n        self.comb_iter_1_left = create_pool2d('max', 3, 2, padding=pad_type)\n        self.comb_iter_1_right = BranchSeparables(self.num_channels, self.num_channels, 7, 2, pad_type, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 2, count_include_pad=False, padding=pad_type)\n        self.comb_iter_2_right = BranchSeparables(self.num_channels, self.num_channels, 5, 2, pad_type, **dd)\n\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(self.num_channels, self.num_channels, 3, 1, pad_type, **dd)\n        self.comb_iter_4_right = create_pool2d('max', 3, 2, padding=pad_type)\n\n    def forward(self, x_conv0, x_stem_0):\n        x_left = self.conv_1x1(x_stem_0)\n\n        x_relu = self.act(x_conv0)\n        # path 1\n        x_path1 = self.path_1(x_relu)\n        # path 2\n        x_path2 = self.path_2(x_relu)\n        # final path\n        x_right = self.final_path_bn(torch.cat([x_path1, x_path2], 1))\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_left)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_right)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_right)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_left)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_left)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass FirstCell(nn.Module):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, 1, stride=1, **dd)\n\n        self.act = nn.ReLU()\n        self.path_1 = nn.Sequential()\n        self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_1.add_module('conv', nn.Conv2d(in_chs_left, out_chs_left, 1, stride=1, bias=False, **dd))\n\n        self.path_2 = nn.Sequential()\n        self.path_2.add_module('pad', nn.ZeroPad2d((-1, 1, -1, 1)))\n        self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_2.add_module('conv', nn.Conv2d(in_chs_left, out_chs_left, 1, stride=1, bias=False, **dd))\n\n        self.final_path_bn = nn.BatchNorm2d(out_chs_left * 2, eps=0.001, momentum=0.1, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(out_chs_right, out_chs_right, 5, 1, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n\n        self.comb_iter_1_left = BranchSeparables(out_chs_right, out_chs_right, 5, 1, pad_type, **dd)\n        self.comb_iter_1_right = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_3_left = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n\n    def forward(self, x, x_prev):\n        x_relu = self.act(x_prev)\n        x_path1 = self.path_1(x_relu)\n        x_path2 = self.path_2(x_relu)\n        x_left = self.final_path_bn(torch.cat([x_path1, x_path2], 1))\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_left\n\n        x_comb_iter_3_left = self.comb_iter_3_left(x_left)\n        x_comb_iter_3_right = self.comb_iter_3_right(x_left)\n        x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_right\n\n        x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass NormalCell(nn.Module):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv_prev_1x1 = ActConvBn(in_chs_left, out_chs_left, 1, stride=1, padding=pad_type, **dd)\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, 1, stride=1, padding=pad_type, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(out_chs_right, out_chs_right, 5, 1, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(out_chs_left, out_chs_left, 3, 1, pad_type, **dd)\n\n        self.comb_iter_1_left = BranchSeparables(out_chs_left, out_chs_left, 5, 1, pad_type, **dd)\n        self.comb_iter_1_right = BranchSeparables(out_chs_left, out_chs_left, 3, 1, pad_type, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_3_left = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_left\n\n        x_comb_iter_3_left = self.comb_iter_3_left(x_left)\n        x_comb_iter_3_right = self.comb_iter_3_right(x_left)\n        x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_right\n\n        x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass ReductionCell0(nn.Module):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv_prev_1x1 = ActConvBn(in_chs_left, out_chs_left, 1, stride=1, padding=pad_type, **dd)\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, 1, stride=1, padding=pad_type, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(out_chs_right, out_chs_right, 5, 2, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(out_chs_right, out_chs_right, 7, 2, pad_type, **dd)\n\n        self.comb_iter_1_left = create_pool2d('max', 3, 2, padding=pad_type)\n        self.comb_iter_1_right = BranchSeparables(out_chs_right, out_chs_right, 7, 2, pad_type, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 2, count_include_pad=False, padding=pad_type)\n        self.comb_iter_2_right = BranchSeparables(out_chs_right, out_chs_right, 5, 2, pad_type, **dd)\n\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n        self.comb_iter_4_right = create_pool2d('max', 3, 2, padding=pad_type)\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_right)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_left)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass ReductionCell1(nn.Module):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv_prev_1x1 = ActConvBn(in_chs_left, out_chs_left, 1, stride=1, padding=pad_type, **dd)\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, 1, stride=1, padding=pad_type, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(out_chs_right, out_chs_right, 5, 2, pad_type, **dd)\n        self.comb_iter_0_right = BranchSeparables(out_chs_right, out_chs_right, 7, 2, pad_type, **dd)\n\n        self.comb_iter_1_left = create_pool2d('max', 3, 2, padding=pad_type)\n        self.comb_iter_1_right = BranchSeparables(out_chs_right, out_chs_right, 7, 2, pad_type, **dd)\n\n        self.comb_iter_2_left = create_pool2d('avg', 3, 2, count_include_pad=False, padding=pad_type)\n        self.comb_iter_2_right = BranchSeparables(out_chs_right, out_chs_right, 5, 2, pad_type, **dd)\n\n        self.comb_iter_3_right = create_pool2d('avg', 3, 1, count_include_pad=False, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(out_chs_right, out_chs_right, 3, 1, pad_type, **dd)\n        self.comb_iter_4_right = create_pool2d('max', 3, 2, padding=pad_type)\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_right)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_left)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass NASNetALarge(nn.Module):\n    \"\"\"NASNetALarge (6 @ 4032) \"\"\"\n\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            stem_size: int = 96,\n            channel_multiplier: int = 2,\n            num_features: int = 4032,\n            output_stride: int = 32,\n            drop_rate: float = 0.,\n            global_pool: str = 'avg',\n            pad_type: str = 'same',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.stem_size = stem_size\n        self.num_features = self.head_hidden_size = num_features\n        self.channel_multiplier = channel_multiplier\n        assert output_stride == 32\n\n        channels = self.num_features // 24\n        # 24 is default value for the architecture\n\n        self.conv0 = ConvNormAct(\n            in_channels=in_chans,\n            out_channels=self.stem_size,\n            kernel_size=3,\n            padding=0,\n            stride=2,\n            norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.1),\n            apply_act=False,\n            **dd,\n        )\n\n        self.cell_stem_0 = CellStem0(\n            self.stem_size, num_channels=channels // (channel_multiplier ** 2), pad_type=pad_type, **dd)\n        self.cell_stem_1 = CellStem1(\n            self.stem_size, num_channels=channels // channel_multiplier, pad_type=pad_type, **dd)\n\n        self.cell_0 = FirstCell(\n            in_chs_left=channels, out_chs_left=channels // 2,\n            in_chs_right=2 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n        self.cell_1 = NormalCell(\n            in_chs_left=2 * channels, out_chs_left=channels,\n            in_chs_right=6 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n        self.cell_2 = NormalCell(\n            in_chs_left=6 * channels, out_chs_left=channels,\n            in_chs_right=6 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n        self.cell_3 = NormalCell(\n            in_chs_left=6 * channels, out_chs_left=channels,\n            in_chs_right=6 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n        self.cell_4 = NormalCell(\n            in_chs_left=6 * channels, out_chs_left=channels,\n            in_chs_right=6 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n        self.cell_5 = NormalCell(\n            in_chs_left=6 * channels, out_chs_left=channels,\n            in_chs_right=6 * channels, out_chs_right=channels, pad_type=pad_type, **dd)\n\n        self.reduction_cell_0 = ReductionCell0(\n            in_chs_left=6 * channels, out_chs_left=2 * channels,\n            in_chs_right=6 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_6 = FirstCell(\n            in_chs_left=6 * channels, out_chs_left=channels,\n            in_chs_right=8 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_7 = NormalCell(\n            in_chs_left=8 * channels, out_chs_left=2 * channels,\n            in_chs_right=12 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_8 = NormalCell(\n            in_chs_left=12 * channels, out_chs_left=2 * channels,\n            in_chs_right=12 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_9 = NormalCell(\n            in_chs_left=12 * channels, out_chs_left=2 * channels,\n            in_chs_right=12 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_10 = NormalCell(\n            in_chs_left=12 * channels, out_chs_left=2 * channels,\n            in_chs_right=12 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n        self.cell_11 = NormalCell(\n            in_chs_left=12 * channels, out_chs_left=2 * channels,\n            in_chs_right=12 * channels, out_chs_right=2 * channels, pad_type=pad_type, **dd)\n\n        self.reduction_cell_1 = ReductionCell1(\n            in_chs_left=12 * channels, out_chs_left=4 * channels,\n            in_chs_right=12 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_12 = FirstCell(\n            in_chs_left=12 * channels, out_chs_left=2 * channels,\n            in_chs_right=16 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_13 = NormalCell(\n            in_chs_left=16 * channels, out_chs_left=4 * channels,\n            in_chs_right=24 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_14 = NormalCell(\n            in_chs_left=24 * channels, out_chs_left=4 * channels,\n            in_chs_right=24 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_15 = NormalCell(\n            in_chs_left=24 * channels, out_chs_left=4 * channels,\n            in_chs_right=24 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_16 = NormalCell(\n            in_chs_left=24 * channels, out_chs_left=4 * channels,\n            in_chs_right=24 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.cell_17 = NormalCell(\n            in_chs_left=24 * channels, out_chs_left=4 * channels,\n            in_chs_right=24 * channels, out_chs_right=4 * channels, pad_type=pad_type, **dd)\n        self.act = nn.ReLU(inplace=True)\n        self.feature_info = [\n            dict(num_chs=96, reduction=2, module='conv0'),\n            dict(num_chs=168, reduction=4, module='cell_stem_1.conv_1x1.act'),\n            dict(num_chs=1008, reduction=8, module='reduction_cell_0.conv_1x1.act'),\n            dict(num_chs=2016, reduction=16, module='reduction_cell_1.conv_1x1.act'),\n            dict(num_chs=4032, reduction=32, module='act'),\n        ]\n\n        self.global_pool, self.head_drop, self.last_linear = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^conv0|cell_stem_[01]',\n            blocks=[\n                (r'^cell_(\\d+)', None),\n                (r'^reduction_cell_0', (6,)),\n                (r'^reduction_cell_1', (12,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.last_linear\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.last_linear = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x_conv0 = self.conv0(x)\n\n        x_stem_0 = self.cell_stem_0(x_conv0)\n        x_stem_1 = self.cell_stem_1(x_conv0, x_stem_0)\n\n        x_cell_0 = self.cell_0(x_stem_1, x_stem_0)\n        x_cell_1 = self.cell_1(x_cell_0, x_stem_1)\n        x_cell_2 = self.cell_2(x_cell_1, x_cell_0)\n        x_cell_3 = self.cell_3(x_cell_2, x_cell_1)\n        x_cell_4 = self.cell_4(x_cell_3, x_cell_2)\n        x_cell_5 = self.cell_5(x_cell_4, x_cell_3)\n\n        x_reduction_cell_0 = self.reduction_cell_0(x_cell_5, x_cell_4)\n        x_cell_6 = self.cell_6(x_reduction_cell_0, x_cell_4)\n        x_cell_7 = self.cell_7(x_cell_6, x_reduction_cell_0)\n        x_cell_8 = self.cell_8(x_cell_7, x_cell_6)\n        x_cell_9 = self.cell_9(x_cell_8, x_cell_7)\n        x_cell_10 = self.cell_10(x_cell_9, x_cell_8)\n        x_cell_11 = self.cell_11(x_cell_10, x_cell_9)\n\n        x_reduction_cell_1 = self.reduction_cell_1(x_cell_11, x_cell_10)\n        x_cell_12 = self.cell_12(x_reduction_cell_1, x_cell_10)\n        x_cell_13 = self.cell_13(x_cell_12, x_reduction_cell_1)\n        x_cell_14 = self.cell_14(x_cell_13, x_cell_12)\n        x_cell_15 = self.cell_15(x_cell_14, x_cell_13)\n        x_cell_16 = self.cell_16(x_cell_15, x_cell_14)\n        x_cell_17 = self.cell_17(x_cell_16, x_cell_15)\n        x = self.act(x_cell_17)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.last_linear(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_nasnet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        NASNetALarge,\n        variant,\n        pretrained,\n        feature_cfg=dict(feature_cls='hook', no_rewrite=True),  # not possible to re-write this model\n        **kwargs,\n    )\n\n\ndefault_cfgs = generate_default_cfgs({\n    'nasnetalarge.tf_in1k': {\n        'hf_hub_id': 'timm/',\n        'url': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/nasnetalarge-dc4a7b8b.pth',\n        'input_size': (3, 331, 331),\n        'pool_size': (11, 11),\n        'crop_pct': 0.911,\n        'interpolation': 'bicubic',\n        'mean': (0.5, 0.5, 0.5),\n        'std': (0.5, 0.5, 0.5),\n        'num_classes': 1000,\n        'first_conv': 'conv0.conv',\n        'classifier': 'last_linear',\n        'license': 'apache-2.0',\n    },\n})\n\n\n@register_model\ndef nasnetalarge(pretrained=False, **kwargs) -> NASNetALarge:\n    \"\"\"NASNet-A large model architecture.\n    \"\"\"\n    model_kwargs = dict(pad_type='same', **kwargs)\n    return _create_nasnet('nasnetalarge', pretrained, **model_kwargs)\n"
  },
  {
    "path": "timm/models/nest.py",
    "content": "\"\"\" Nested Transformer (NesT) in PyTorch\n\nA PyTorch implement of Aggregating Nested Transformers as described in:\n\n'Aggregating Nested Transformers'\n    - https://arxiv.org/abs/2105.12723\n\nThe official Jax code is released and available at https://github.com/google-research/nested-transformer. The weights\nhave been converted with convert/convert_nest_flax.py\n\nAcknowledgments:\n* The paper authors for sharing their research, code, and model weights\n* Ross Wightman's existing code off which I based this\n\nCopyright 2021 Alexander Soare\n\"\"\"\n\nimport collections.abc\nimport logging\nimport math\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    DropPath,\n    calculate_drop_path_rates,\n    create_classifier,\n    trunc_normal_,\n    _assert,\n    create_conv2d,\n    create_pool2d,\n    to_ntuple,\n    use_fused_attn,\n    LayerNorm,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint_seq, named_apply\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['Nest']  # model_registry will add each entrypoint fn to this\n\n_logger = logging.getLogger(__name__)\n\n\nclass Attention(nn.Module):\n    \"\"\"\n    This is much like `.vision_transformer.Attention` but uses *localised* self attention by accepting an input with\n     an extra \"image block\" dim\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, 3*dim, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        \"\"\"\n        x is shape: B (batch_size), T (image blocks), N (seq length per image block), C (embed dim)\n        \"\"\"\n        B, T, N, C = x.shape\n        # result of next line is (qkv, B, num (H)eads, T, N, (C')hannels per head)\n        qkv = self.qkv(x).reshape(B, T, N, 3, self.num_heads, C // self.num_heads).permute(3, 0, 4, 1, 2, 5)\n        q, k, v = qkv.unbind(0)  # make torchscript happy (cannot use tensor as tuple)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v, dropout_p=self.attn_drop.p if self.training else 0.)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1) # (B, H, T, N, N)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        # (B, H, T, N, C'), permute -> (B, T, N, C', H)\n        x = x.permute(0, 2, 3, 4, 1).reshape(B, T, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x  # (B, T, N, C)\n\n\nclass TransformerLayer(nn.Module):\n    \"\"\"\n    This is much like `.vision_transformer.Block` but:\n        - Called TransformerLayer here to allow for \"block\" as defined in the paper (\"non-overlapping image blocks\")\n        - Uses modified Attention layer that handles the \"block\" dimension\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        mlp_hidden_dim = int(dim * mlp_ratio)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=mlp_hidden_dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        y = self.norm1(x)\n        x = x + self.drop_path1(self.attn(y))\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        return x\n\n\nclass ConvPool(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            norm_layer: Type[nn.Module],\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = create_conv2d(in_channels, out_channels, kernel_size=3, padding=pad_type, bias=True, **dd)\n        self.norm = norm_layer(out_channels, **dd)\n        self.pool = create_pool2d('max', kernel_size=3, stride=2, padding=pad_type)\n\n    def forward(self, x):\n        \"\"\"\n        x is expected to have shape (B, C, H, W)\n        \"\"\"\n        _assert(x.shape[-2] % 2 == 0, 'BlockAggregation requires even input spatial dims')\n        _assert(x.shape[-1] % 2 == 0, 'BlockAggregation requires even input spatial dims')\n        x = self.conv(x)\n        # Layer norm done over channel dim only\n        x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n        x = self.pool(x)\n        return x  # (B, C, H//2, W//2)\n\n\ndef blockify(x, block_size: int):\n    \"\"\"image to blocks\n    Args:\n        x (Tensor): with shape (B, H, W, C)\n        block_size (int): edge length of a single square block in units of H, W\n    \"\"\"\n    B, H, W, C  = x.shape\n    _assert(H % block_size == 0, '`block_size` must divide input height evenly')\n    _assert(W % block_size == 0, '`block_size` must divide input width evenly')\n    grid_height = H // block_size\n    grid_width = W // block_size\n    x = x.reshape(B, grid_height, block_size, grid_width, block_size, C)\n    x = x.transpose(2, 3).reshape(B, grid_height * grid_width, -1, C)\n    return x  # (B, T, N, C)\n\n\n@register_notrace_function  # reason: int receives Proxy\ndef deblockify(x, block_size: int):\n    \"\"\"blocks to image\n    Args:\n        x (Tensor): with shape (B, T, N, C) where T is number of blocks and N is sequence size per block\n        block_size (int): edge length of a single square block in units of desired H, W\n    \"\"\"\n    B, T, _, C = x.shape\n    grid_size = int(math.sqrt(T))\n    height = width = grid_size * block_size\n    x = x.reshape(B, grid_size, grid_size, block_size, block_size, C)\n    x = x.transpose(2, 3).reshape(B, height, width, C)\n    return x  # (B, H, W, C)\n\n\nclass NestLevel(nn.Module):\n    \"\"\" Single hierarchical level of a Nested Transformer\n    \"\"\"\n    def __init__(\n            self,\n            num_blocks: int,\n            block_size: int,\n            seq_length: int,\n            num_heads: int,\n            depth: int,\n            embed_dim: int,\n            prev_embed_dim: Optional[int] = None,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Optional[List[float]] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            act_layer: Optional[Type[nn.Module]] = None,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.block_size = block_size\n        self.grad_checkpointing = False\n\n        self.pos_embed = nn.Parameter(torch.zeros(1, num_blocks, seq_length, embed_dim, **dd))\n\n        if prev_embed_dim is not None:\n            self.pool = ConvPool(prev_embed_dim, embed_dim, norm_layer=norm_layer, pad_type=pad_type, **dd)\n        else:\n            self.pool = nn.Identity()\n\n        # Transformer encoder\n        if len(drop_path):\n            assert len(drop_path) == depth, 'Must provide as many drop path rates as there are transformer layers'\n        self.transformer_encoder = nn.Sequential(*[\n            TransformerLayer(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop,\n                attn_drop=attn_drop,\n                drop_path=drop_path[i] if drop_path else None,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            for i in range(depth)])\n\n    def forward(self, x):\n        \"\"\"\n        expects x as (B, C, H, W)\n        \"\"\"\n        x = self.pool(x)\n        x = x.permute(0, 2, 3, 1)  # (B, H', W', C), switch to channels last for transformer\n        x = blockify(x, self.block_size)  # (B, T, N, C')\n        x = x + self.pos_embed\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.transformer_encoder, x)\n        else:\n            x = self.transformer_encoder(x)  # (B, T, N, C')\n        x = deblockify(x, self.block_size)  # (B, H', W', C')\n        # Channel-first for block aggregation, and generally to replicate convnet feature map at each stage\n        return x.permute(0, 3, 1, 2)  # (B, C, H', W')\n\n\nclass Nest(nn.Module):\n    \"\"\" Nested Transformer (NesT)\n\n    A PyTorch impl of : `Aggregating Nested Transformers`\n        - https://arxiv.org/abs/2105.12723\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 224,\n            in_chans: int = 3,\n            patch_size: int = 4,\n            num_levels: int = 3,\n            embed_dims: Tuple[int, ...] = (128, 256, 512),\n            num_heads: Tuple[int, ...] = (4, 8, 16),\n            depths: Tuple[int, ...] = (2, 2, 20),\n            num_classes: int = 1000,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.5,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            act_layer: Optional[Type[nn.Module]] = None,\n            pad_type: str = '',\n            weight_init: str = '',\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            img_size (int, tuple): input image size\n            in_chans (int): number of input channels\n            patch_size (int): patch size\n            num_levels (int): number of block hierarchies (T_d in the paper)\n            embed_dims (int, tuple): embedding dimensions of each level\n            num_heads (int, tuple): number of attention heads for each level\n            depths (int, tuple): number of transformer layers for each level\n            num_classes (int): number of classes for classification head\n            mlp_ratio (int): ratio of mlp hidden dim to embedding dim for MLP of transformer layers\n            qkv_bias (bool): enable bias for qkv if True\n            drop_rate (float): dropout rate for MLP of transformer layers, MSA final projection layer, and classifier\n            attn_drop_rate (float): attention dropout rate\n            drop_path_rate (float): stochastic depth rate\n            norm_layer: (nn.Module): normalization layer for transformer layers\n            act_layer: (nn.Module): activation layer in MLP of transformer layers\n            pad_type: str: Type of padding to use '' for PyTorch symmetric, 'same' for TF SAME\n            weight_init: (str): weight init scheme\n            global_pool: (str): type of pooling operation to apply to final feature map\n\n        Notes:\n            - Default values follow NesT-B from the original Jax code.\n            - `embed_dims`, `num_heads`, `depths` should be ints or tuples with length `num_levels`.\n            - For those following the paper, Table A1 may have errors!\n                - https://github.com/google-research/nested-transformer/issues/2\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        for param_name in ['embed_dims', 'num_heads', 'depths']:\n            param_value = locals()[param_name]\n            if isinstance(param_value, collections.abc.Sequence):\n                assert len(param_value) == num_levels, f'Require `len({param_name}) == num_levels`'\n\n        embed_dims = to_ntuple(num_levels)(embed_dims)\n        num_heads = to_ntuple(num_levels)(num_heads)\n        depths = to_ntuple(num_levels)(depths)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.feature_info = []\n        norm_layer = norm_layer or LayerNorm\n        act_layer = act_layer or nn.GELU\n        self.drop_rate = drop_rate\n        self.num_levels = num_levels\n        if isinstance(img_size, collections.abc.Sequence):\n            assert img_size[0] == img_size[1], 'Model only handles square inputs'\n            img_size = img_size[0]\n        assert img_size % patch_size == 0, '`patch_size` must divide `img_size` evenly'\n        self.patch_size = patch_size\n\n        # Number of blocks at each level\n        self.num_blocks = (4 ** torch.arange(num_levels, device='cpu', dtype=torch.long)).flip(0).tolist()\n        assert (img_size // patch_size) % math.sqrt(self.num_blocks[0]) == 0, \\\n            'First level blocks don\\'t fit evenly. Check `img_size`, `patch_size`, and `num_levels`'\n\n        # Block edge size in units of patches\n        # Hint: (img_size // patch_size) gives number of patches along edge of image. sqrt(self.num_blocks[0]) is the\n        #  number of blocks along edge of image\n        self.block_size = int((img_size // patch_size) // math.sqrt(self.num_blocks[0]))\n\n        # Patch embedding\n        self.patch_embed = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dims[0],\n            flatten=False,\n            **dd,\n        )\n        self.num_patches = self.patch_embed.num_patches\n        self.seq_length = self.num_patches // self.num_blocks[0]\n\n        # Build up each hierarchical level\n        levels = []\n        dp_rates = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        prev_dim = None\n        curr_stride = 4\n        for i in range(len(self.num_blocks)):\n            dim = embed_dims[i]\n            levels.append(NestLevel(\n                self.num_blocks[i],\n                self.block_size,\n                self.seq_length,\n                num_heads[i],\n                depths[i],\n                dim,\n                prev_dim,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dp_rates[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                pad_type=pad_type,\n                **dd,\n            ))\n            self.feature_info += [dict(num_chs=dim, reduction=curr_stride, module=f'levels.{i}')]\n            prev_dim = dim\n            curr_stride *= 2\n        self.levels = nn.Sequential(*levels)\n\n        # Final normalization layer\n        self.norm = norm_layer(embed_dims[-1], **dd)\n\n        # Classifier\n        global_pool, head = create_classifier(self.num_features, self.num_classes, pool_type=global_pool, **dd)\n        self.global_pool = global_pool\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = head\n\n        self.init_weights(weight_init)\n\n    @torch.jit.ignore\n    def init_weights(self, mode=''):\n        assert mode in ('nlhb', '')\n        head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0.\n        for level in self.levels:\n            trunc_normal_(level.pos_embed, std=.02, a=-2, b=2)\n        named_apply(partial(_init_nest_weights, head_bias=head_bias), self)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {f'level.{i}.pos_embed' for i in range(len(self.levels))}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=[\n                (r'^levels\\.(\\d+)' if coarse else r'^levels\\.(\\d+)\\.transformer_encoder\\.(\\d+)', None),\n                (r'^levels\\.(\\d+)\\.(?:pool|pos_embed)', (0,)),\n                (r'^norm', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for l in self.levels:\n            l.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.head = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.levels), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        last_idx = len(self.num_blocks) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.levels\n        else:\n            stages = self.levels[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n                    intermediates.append(x_inter)\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            # Layer norm done over channel dim only (to NHWC and back)\n            x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.levels), indices)\n        self.levels = self.levels[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        x = self.levels(x)\n        # Layer norm done over channel dim only (to NHWC and back)\n        x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_nest_weights(module: nn.Module, name: str = '', head_bias: float = 0.):\n    \"\"\" NesT weight initialization\n    Can replicate Jax implementation. Otherwise follows vision_transformer.py\n    \"\"\"\n    if isinstance(module, nn.Linear):\n        if name.startswith('head'):\n            trunc_normal_(module.weight, std=.02, a=-2, b=2)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            trunc_normal_(module.weight, std=.02, a=-2, b=2)\n            if module.bias is not None:\n                nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        trunc_normal_(module.weight, std=.02, a=-2, b=2)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n\n\ndef resize_pos_embed(posemb, posemb_new):\n    \"\"\"\n    Rescale the grid of position embeddings when loading from state_dict\n    Expected shape of position embeddings is (1, T, N, C), and considers only square images\n    \"\"\"\n    _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape)\n    seq_length_old = posemb.shape[2]\n    num_blocks_new, seq_length_new = posemb_new.shape[1:3]\n    size_new = int(math.sqrt(num_blocks_new*seq_length_new))\n    # First change to (1, C, H, W)\n    posemb = deblockify(posemb, int(math.sqrt(seq_length_old))).permute(0, 3, 1, 2)\n    posemb = F.interpolate(posemb, size=[size_new, size_new], mode='bicubic', align_corners=False)\n    # Now change to new (1, T, N, C)\n    posemb = blockify(posemb.permute(0, 2, 3, 1), int(math.sqrt(seq_length_new)))\n    return posemb\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" resize positional embeddings of pretrained weights \"\"\"\n    pos_embed_keys = [k for k in state_dict.keys() if k.startswith('pos_embed_')]\n    for k in pos_embed_keys:\n        if state_dict[k].shape != getattr(model, k).shape:\n            state_dict[k] = resize_pos_embed(state_dict[k], getattr(model, k))\n    return state_dict\n\n\ndef _create_nest(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        Nest,\n        variant,\n        pretrained,\n        feature_cfg=dict(out_indices=(0, 1, 2), flatten_sequential=True),\n        pretrained_filter_fn=checkpoint_filter_fn,\n        **kwargs,\n    )\n\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': [14, 14],\n        'crop_pct': .875, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'nest_base.untrained': _cfg(),\n    'nest_small.untrained': _cfg(),\n    'nest_tiny.untrained': _cfg(),\n    # (weights from official Google JAX impl, require 'SAME' padding)\n    'nest_base_jx.goog_in1k': _cfg(hf_hub_id='timm/'),\n    'nest_small_jx.goog_in1k': _cfg(hf_hub_id='timm/'),\n    'nest_tiny_jx.goog_in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef nest_base(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-B @ 224x224\n    \"\"\"\n    model_kwargs = dict(\n        embed_dims=(128, 256, 512), num_heads=(4, 8, 16), depths=(2, 2, 20), **kwargs)\n    model = _create_nest('nest_base', pretrained=pretrained, **model_kwargs)\n    return model\n\n\n@register_model\ndef nest_small(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-S @ 224x224\n    \"\"\"\n    model_kwargs = dict(embed_dims=(96, 192, 384), num_heads=(3, 6, 12), depths=(2, 2, 20), **kwargs)\n    model = _create_nest('nest_small', pretrained=pretrained, **model_kwargs)\n    return model\n\n\n@register_model\ndef nest_tiny(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-T @ 224x224\n    \"\"\"\n    model_kwargs = dict(embed_dims=(96, 192, 384), num_heads=(3, 6, 12), depths=(2, 2, 8), **kwargs)\n    model = _create_nest('nest_tiny', pretrained=pretrained, **model_kwargs)\n    return model\n\n\n@register_model\ndef nest_base_jx(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-B @ 224x224\n    \"\"\"\n    kwargs.setdefault('pad_type', 'same')\n    model_kwargs = dict(\n        embed_dims=(128, 256, 512), num_heads=(4, 8, 16), depths=(2, 2, 20), **kwargs)\n    model = _create_nest('nest_base_jx', pretrained=pretrained, **model_kwargs)\n    return model\n\n\n@register_model\ndef nest_small_jx(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-S @ 224x224\n    \"\"\"\n    kwargs.setdefault('pad_type', 'same')\n    model_kwargs = dict(embed_dims=(96, 192, 384), num_heads=(3, 6, 12), depths=(2, 2, 20), **kwargs)\n    model = _create_nest('nest_small_jx', pretrained=pretrained, **model_kwargs)\n    return model\n\n\n@register_model\ndef nest_tiny_jx(pretrained=False, **kwargs) -> Nest:\n    \"\"\" Nest-T @ 224x224\n    \"\"\"\n    kwargs.setdefault('pad_type', 'same')\n    model_kwargs = dict(embed_dims=(96, 192, 384), num_heads=(3, 6, 12), depths=(2, 2, 8), **kwargs)\n    model = _create_nest('nest_tiny_jx', pretrained=pretrained, **model_kwargs)\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'jx_nest_base': 'nest_base_jx',\n    'jx_nest_small': 'nest_small_jx',\n    'jx_nest_tiny': 'nest_tiny_jx',\n})"
  },
  {
    "path": "timm/models/nextvit.py",
    "content": "\"\"\" Next-ViT\n\nAs described in https://arxiv.org/abs/2207.05501\n\nNext-ViT model defs and weights adapted from https://github.com/bytedance/Next-ViT, original copyright below\n\"\"\"\n# Copyright (c) ByteDance Inc. All rights reserved.\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, trunc_normal_, ConvMlp, get_norm_layer, get_act_layer, use_fused_attn\nfrom timm.layers import ClassifierHead\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['NextViT']\n\n\ndef merge_pre_bn(module, pre_bn_1, pre_bn_2=None):\n    \"\"\" Merge pre BN to reduce inference runtime.\n    \"\"\"\n    weight = module.weight.data\n    if module.bias is None:\n        zeros = torch.zeros(module.out_chs, device=weight.device).type(weight.type())\n        module.bias = nn.Parameter(zeros)\n    bias = module.bias.data\n    if pre_bn_2 is None:\n        assert pre_bn_1.track_running_stats is True, \"Unsupported bn_module.track_running_stats is False\"\n        assert pre_bn_1.affine is True, \"Unsupported bn_module.affine is False\"\n\n        scale_invstd = pre_bn_1.running_var.add(pre_bn_1.eps).pow(-0.5)\n        extra_weight = scale_invstd * pre_bn_1.weight\n        extra_bias = pre_bn_1.bias - pre_bn_1.weight * pre_bn_1.running_mean * scale_invstd\n    else:\n        assert pre_bn_1.track_running_stats is True, \"Unsupported bn_module.track_running_stats is False\"\n        assert pre_bn_1.affine is True, \"Unsupported bn_module.affine is False\"\n\n        assert pre_bn_2.track_running_stats is True, \"Unsupported bn_module.track_running_stats is False\"\n        assert pre_bn_2.affine is True, \"Unsupported bn_module.affine is False\"\n\n        scale_invstd_1 = pre_bn_1.running_var.add(pre_bn_1.eps).pow(-0.5)\n        scale_invstd_2 = pre_bn_2.running_var.add(pre_bn_2.eps).pow(-0.5)\n\n        extra_weight = scale_invstd_1 * pre_bn_1.weight * scale_invstd_2 * pre_bn_2.weight\n        extra_bias = (\n                scale_invstd_2 * pre_bn_2.weight\n                * (pre_bn_1.bias - pre_bn_1.weight * pre_bn_1.running_mean * scale_invstd_1 - pre_bn_2.running_mean)\n                + pre_bn_2.bias\n        )\n\n    if isinstance(module, nn.Linear):\n        extra_bias = weight @ extra_bias\n        weight.mul_(extra_weight.view(1, weight.size(1)).expand_as(weight))\n    elif isinstance(module, nn.Conv2d):\n        assert weight.shape[2] == 1 and weight.shape[3] == 1\n        weight = weight.reshape(weight.shape[0], weight.shape[1])\n        extra_bias = weight @ extra_bias\n        weight.mul_(extra_weight.view(1, weight.size(1)).expand_as(weight))\n        weight = weight.reshape(weight.shape[0], weight.shape[1], 1, 1)\n    bias.add_(extra_bias)\n\n    module.weight.data = weight\n    module.bias.data = bias\n\n\nclass ConvNormAct(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            groups: int = 1,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(\n            in_chs,\n            out_chs,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=1,\n            groups=groups,\n            bias=False,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n        self.act = act_layer()\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.norm(x)\n        x = self.act(x)\n        return x\n\n\ndef _make_divisible(v, divisor, min_value=None):\n    if min_value is None:\n        min_value = divisor\n    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)\n    # Make sure that round down does not go down by more than 10%.\n    if new_v < 0.9 * v:\n        new_v += divisor\n    return new_v\n\n\nclass PatchEmbed(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if stride == 2:\n            self.pool = nn.AvgPool2d((2, 2), stride=2, ceil_mode=True, count_include_pad=False)\n            self.conv = nn.Conv2d(in_chs, out_chs, kernel_size=1, stride=1, bias=False, **dd)\n            self.norm = norm_layer(out_chs, **dd)\n        elif in_chs != out_chs:\n            self.pool = nn.Identity()\n            self.conv = nn.Conv2d(in_chs, out_chs, kernel_size=1, stride=1, bias=False, **dd)\n            self.norm = norm_layer(out_chs, **dd)\n        else:\n            self.pool = nn.Identity()\n            self.conv = nn.Identity()\n            self.norm = nn.Identity()\n\n    def forward(self, x):\n        return self.norm(self.conv(self.pool(x)))\n\n\nclass ConvAttention(nn.Module):\n    \"\"\"\n    Multi-Head Convolutional Attention\n    \"\"\"\n\n    def __init__(\n            self,\n            out_chs: int,\n            head_dim: int,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.group_conv3x3 = nn.Conv2d(\n            out_chs,\n            out_chs,\n            kernel_size=3,\n            stride=1,\n            padding=1,\n            groups=out_chs // head_dim,\n            bias=False,\n            **dd,\n        )\n        self.norm = norm_layer(out_chs, **dd)\n        self.act = act_layer()\n        self.projection = nn.Conv2d(out_chs, out_chs, kernel_size=1, bias=False, **dd)\n\n    def forward(self, x):\n        out = self.group_conv3x3(x)\n        out = self.norm(out)\n        out = self.act(out)\n        out = self.projection(out)\n        return out\n\nclass NextConvBlock(nn.Module):\n    \"\"\"\n    Next Convolution Block\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            drop_path: float = 0.,\n            drop: float = 0.,\n            head_dim: int = 32,\n            mlp_ratio: float = 3.,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n        assert out_chs % head_dim == 0\n\n        self.patch_embed = PatchEmbed(in_chs, out_chs, stride, norm_layer=norm_layer, **dd)\n        self.mhca = ConvAttention(\n            out_chs,\n            head_dim,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.attn_drop_path = DropPath(drop_path)\n\n        self.norm = norm_layer(out_chs, **dd)\n        self.mlp = ConvMlp(\n            out_chs,\n            hidden_features=int(out_chs * mlp_ratio),\n            drop=drop,\n            bias=True,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.mlp_drop_path = DropPath(drop_path)\n        self.is_fused = False\n\n    @torch.no_grad()\n    def reparameterize(self):\n        if not self.is_fused:\n            merge_pre_bn(self.mlp.fc1, self.norm)\n            self.norm = nn.Identity()\n            self.is_fused = True\n\n    def forward(self, x):\n        x = self.patch_embed(x)\n        x = x + self.attn_drop_path(self.mhca(x))\n\n        out = self.norm(x)\n        x = x + self.mlp_drop_path(self.mlp(out))\n        return x\n\n\nclass EfficientAttention(nn.Module):\n    \"\"\"\n    Efficient Multi-Head Self Attention\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: Optional[int] = None,\n            head_dim: int = 32,\n            qkv_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            sr_ratio: int = 1,\n            norm_layer: Type[nn.Module] = nn.BatchNorm1d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.out_dim = out_dim if out_dim is not None else dim\n        self.num_heads = self.dim // head_dim\n        self.head_dim = head_dim\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.q = nn.Linear(dim, self.dim, bias=qkv_bias, **dd)\n        self.k = nn.Linear(dim, self.dim, bias=qkv_bias, **dd)\n        self.v = nn.Linear(dim, self.dim, bias=qkv_bias, **dd)\n        self.proj = nn.Linear(self.dim, self.out_dim, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        self.sr_ratio = sr_ratio\n        self.N_ratio = sr_ratio ** 2\n        if sr_ratio > 1:\n            self.sr = nn.AvgPool1d(kernel_size=self.N_ratio, stride=self.N_ratio)\n            self.norm = norm_layer(dim, **dd)\n        else:\n            self.sr = None\n            self.norm = None\n\n    def forward(self, x):\n        B, N, C = x.shape\n        q = self.q(x).reshape(B, N, self.num_heads, self.head_dim).permute(0, 2, 1, 3)\n\n        if self.sr is not None:\n            x = self.sr(x.transpose(1, 2))\n            x = self.norm(x).transpose(1, 2)\n\n        k = self.k(x).reshape(B, -1, self.num_heads, self.head_dim).transpose(1, 2)\n        v = self.v(x).reshape(B, -1, self.num_heads, self.head_dim).transpose(1, 2)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-1, -2)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass NextTransformerBlock(nn.Module):\n    \"\"\"\n    Next Transformer Block\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            drop_path: float,\n            stride: int = 1,\n            sr_ratio: int = 1,\n            mlp_ratio: float = 2,\n            head_dim: int = 32,\n            mix_block_ratio: float = 0.75,\n            attn_drop: float = 0.,\n            drop: float = 0.,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.in_chs = in_chs\n        self.out_chs = out_chs\n        self.mix_block_ratio = mix_block_ratio\n\n        self.mhsa_out_chs = _make_divisible(int(out_chs * mix_block_ratio), 32)\n        self.mhca_out_chs = out_chs - self.mhsa_out_chs\n\n        self.patch_embed = PatchEmbed(in_chs, self.mhsa_out_chs, stride, **dd)\n        self.norm1 = norm_layer(self.mhsa_out_chs, **dd)\n        self.e_mhsa = EfficientAttention(\n            self.mhsa_out_chs,\n            head_dim=head_dim,\n            sr_ratio=sr_ratio,\n            attn_drop=attn_drop,\n            proj_drop=drop,\n            **dd,\n        )\n        self.mhsa_drop_path = DropPath(drop_path * mix_block_ratio)\n\n        self.projection = PatchEmbed(\n            self.mhsa_out_chs,\n            self.mhca_out_chs,\n            stride=1,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.mhca = ConvAttention(\n            self.mhca_out_chs,\n            head_dim=head_dim,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n        self.mhca_drop_path = DropPath(drop_path * (1 - mix_block_ratio))\n\n        self.norm2 = norm_layer(out_chs, **dd)\n        self.mlp = ConvMlp(\n            out_chs,\n            hidden_features=int(out_chs * mlp_ratio),\n            act_layer=act_layer,\n            drop=drop,\n            **dd,\n        )\n        self.mlp_drop_path = DropPath(drop_path)\n        self.is_fused = False\n\n    @torch.no_grad()\n    def reparameterize(self):\n        if not self.is_fused:\n            merge_pre_bn(self.e_mhsa.q, self.norm1)\n            if self.e_mhsa.norm is not None:\n                merge_pre_bn(self.e_mhsa.k, self.norm1, self.e_mhsa.norm)\n                merge_pre_bn(self.e_mhsa.v, self.norm1, self.e_mhsa.norm)\n                self.e_mhsa.norm = nn.Identity()\n            else:\n                merge_pre_bn(self.e_mhsa.k, self.norm1)\n                merge_pre_bn(self.e_mhsa.v, self.norm1)\n            self.norm1 = nn.Identity()\n\n            merge_pre_bn(self.mlp.fc1, self.norm2)\n            self.norm2 = nn.Identity()\n            self.is_fused = True\n\n    def forward(self, x):\n        x = self.patch_embed(x)\n        B, C, H, W = x.shape\n\n        out = self.norm1(x)\n        out = out.reshape(B, C, -1).transpose(-1, -2)\n        out = self.mhsa_drop_path(self.e_mhsa(out))\n        x = x + out.transpose(-1, -2).reshape(B, C, H, W)\n\n        out = self.projection(x)\n        out = out + self.mhca_drop_path(self.mhca(out))\n        x = torch.cat([x, out], dim=1)\n\n        out = self.norm2(x)\n        x = x + self.mlp_drop_path(self.mlp(out))\n        return x\n\n\nclass NextStage(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int,\n            block_chs: List[int],\n            block_types: List[Type[nn.Module]],\n            stride: int = 2,\n            sr_ratio: int = 1,\n            mix_block_ratio: float = 1.0,\n            drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Union[float, List[float], Tuple[float, ...]] = 0.,\n            head_dim: int = 32,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        blocks = []\n        for block_idx, block_type in enumerate(block_types):\n            stride = stride if block_idx == 0 else 1\n            out_chs = block_chs[block_idx]\n            block_type = block_types[block_idx]\n            dpr = drop_path[block_idx] if isinstance(drop_path, (list, tuple)) else drop_path\n            if block_type is NextConvBlock:\n                layer = NextConvBlock(\n                    in_chs,\n                    out_chs,\n                    stride=stride,\n                    drop_path=dpr,\n                    drop=drop,\n                    head_dim=head_dim,\n                    norm_layer=norm_layer,\n                    act_layer=act_layer,\n                    **dd,\n                )\n                blocks.append(layer)\n            elif block_type is NextTransformerBlock:\n                layer = NextTransformerBlock(\n                    in_chs,\n                    out_chs,\n                    drop_path=dpr,\n                    stride=stride,\n                    sr_ratio=sr_ratio,\n                    head_dim=head_dim,\n                    mix_block_ratio=mix_block_ratio,\n                    attn_drop=attn_drop,\n                    drop=drop,\n                    norm_layer=norm_layer,\n                    act_layer=act_layer,\n                    **dd,\n                )\n                blocks.append(layer)\n            in_chs = out_chs\n\n        self.blocks = nn.Sequential(*blocks)\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    def forward(self, x):\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass NextViT(nn.Module):\n    def __init__(\n            self,\n            in_chans: int,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            stem_chs: Tuple[int, ...] = (64, 32, 64),\n            depths: Tuple[int, ...] = (3, 4, 10, 3),\n            strides: Tuple[int, ...] = (1, 2, 2, 2),\n            sr_ratios: Tuple[int, ...] = (8, 4, 2, 1),\n            drop_path_rate: float = 0.1,\n            attn_drop_rate: float = 0.,\n            drop_rate: float = 0.,\n            head_dim: int = 32,\n            mix_block_ratio: float = 0.75,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.grad_checkpointing = False\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        norm_layer = get_norm_layer(norm_layer)\n        if act_layer is None:\n            act_layer = partial(nn.ReLU, inplace=True)\n        else:\n            act_layer = get_act_layer(act_layer)\n\n        self.stage_out_chs = [\n            [96] * (depths[0]),\n            [192] * (depths[1] - 1) + [256],\n            [384, 384, 384, 384, 512] * (depths[2] // 5),\n            [768] * (depths[3] - 1) + [1024]\n        ]\n        self.feature_info = [dict(\n            num_chs=sc[-1],\n            reduction=2**(i + 2),\n            module=f'stages.{i}'\n        ) for i, sc in enumerate(self.stage_out_chs)]\n\n        # Next Hybrid Strategy\n        self.stage_block_types = [\n            [NextConvBlock] * depths[0],\n            [NextConvBlock] * (depths[1] - 1) + [NextTransformerBlock],\n            [NextConvBlock, NextConvBlock, NextConvBlock, NextConvBlock, NextTransformerBlock] * (depths[2] // 5),\n            [NextConvBlock] * (depths[3] - 1) + [NextTransformerBlock]]\n\n        self.stem = nn.Sequential(\n            ConvNormAct(\n                in_chans, stem_chs[0], kernel_size=3, stride=2, norm_layer=norm_layer, act_layer=act_layer, **dd),\n            ConvNormAct(\n                stem_chs[0], stem_chs[1], kernel_size=3, stride=1, norm_layer=norm_layer, act_layer=act_layer, **dd),\n            ConvNormAct(\n                stem_chs[1], stem_chs[2], kernel_size=3, stride=1, norm_layer=norm_layer, act_layer=act_layer, **dd),\n            ConvNormAct(\n                stem_chs[2], stem_chs[2], kernel_size=3, stride=2, norm_layer=norm_layer, act_layer=act_layer, **dd),\n        )\n        in_chs = out_chs = stem_chs[-1]\n        stages = []\n        idx = 0\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        for stage_idx in range(len(depths)):\n            stage = NextStage(\n                in_chs=in_chs,\n                block_chs=self.stage_out_chs[stage_idx],\n                block_types=self.stage_block_types[stage_idx],\n                stride=strides[stage_idx],\n                sr_ratio=sr_ratios[stage_idx],\n                mix_block_ratio=mix_block_ratio,\n                head_dim=head_dim,\n                drop=drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[stage_idx],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            in_chs = out_chs = self.stage_out_chs[stage_idx][-1]\n            stages += [stage]\n            idx += depths[stage_idx]\n        self.num_features = self.head_hidden_size = out_chs\n        self.stages = nn.Sequential(*stages)\n        self.norm = norm_layer(out_chs, **dd)\n        self.head = ClassifierHead(pool_type=global_pool, in_features=out_chs, num_classes=num_classes, **dd)\n\n        self.stage_out_idx = [sum(depths[:idx + 1]) - 1 for idx in range(len(depths))]\n        self._initialize_weights()\n\n    def _initialize_weights(self):\n        for n, m in self.named_modules():\n            if isinstance(m, nn.Linear):\n                trunc_normal_(m.weight, std=.02)\n                if hasattr(m, 'bias') and m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.Conv2d):\n                trunc_normal_(m.weight, std=.02)\n                if hasattr(m, 'bias') and m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n        for stage in self.stages:\n            stage.set_grad_checkpointing(enable=enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        last_idx = len(self.stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                if feat_idx == last_idx:\n                    x_inter = self.norm(x) if norm else x\n                    intermediates.append(x_inter)\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap original checkpoints -> timm \"\"\"\n    if 'head.fc.weight' in state_dict:\n        return state_dict  # non-original\n\n    D = model.state_dict()\n    out_dict = {}\n    # remap originals based on order\n    for ka, kb, va, vb in zip(D.keys(), state_dict.keys(), D.values(), state_dict.values()):\n        out_dict[ka] = vb\n\n    return out_dict\n\n\ndef _create_nextvit(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        NextViT,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'nextvit_small.bd_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_base.bd_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_large.bd_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_small.bd_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'nextvit_base.bd_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'nextvit_large.bd_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n\n    'nextvit_small.bd_ssld_6m_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_base.bd_ssld_6m_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_large.bd_ssld_6m_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'nextvit_small.bd_ssld_6m_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'nextvit_base.bd_ssld_6m_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'nextvit_large.bd_ssld_6m_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n})\n\n\n@register_model\ndef nextvit_small(pretrained=False, **kwargs):\n    model_args = dict(depths=(3, 4, 10, 3), drop_path_rate=0.1)\n    model = _create_nextvit(\n        'nextvit_small', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef nextvit_base(pretrained=False, **kwargs):\n    model_args = dict(depths=(3, 4, 20, 3), drop_path_rate=0.2)\n    model = _create_nextvit(\n        'nextvit_base', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef nextvit_large(pretrained=False, **kwargs):\n    model_args = dict(depths=(3, 4, 30, 3), drop_path_rate=0.2)\n    model = _create_nextvit(\n        'nextvit_large', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/nfnet.py",
    "content": "\"\"\" Normalization Free Nets. NFNet, NF-RegNet, NF-ResNet (pre-activation) Models\n\nPaper: `Characterizing signal propagation to close the performance gap in unnormalized ResNets`\n    - https://arxiv.org/abs/2101.08692\n\nPaper: `High-Performance Large-Scale Image Recognition Without Normalization`\n    - https://arxiv.org/abs/2102.06171\n\nOfficial Deepmind JAX code: https://github.com/deepmind/deepmind-research/tree/master/nfnets\n\nStatus:\n* These models are a work in progress, experiments ongoing.\n* Pretrained weights for two models so far, more to come.\n* Model details updated to closer match official JAX code now that it's released\n* NF-ResNet, NF-RegNet-B, and NFNet-F models supported\n\nHacked together by / copyright Ross Wightman, 2021.\n\"\"\"\nfrom collections import OrderedDict\nfrom dataclasses import dataclass, replace\nfrom functools import partial\nfrom typing import Any, Callable, Dict, Optional, Tuple\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ClassifierHead, DropPath, calculate_drop_path_rates, AvgPool2dSame, ScaledStdConv2d, ScaledStdConv2dSame, \\\n    get_act_layer, get_act_fn, get_attn, make_divisible\nfrom ._builder import build_model_with_cfg\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['NormFreeNet', 'NfCfg']  # model_registry will add each entrypoint fn to this\n\n\n@dataclass\nclass NfCfg:\n    \"\"\"Configuration for Normalization-Free Networks.\"\"\"\n    depths: Tuple[int, int, int, int]\n    channels: Tuple[int, int, int, int]\n    alpha: float = 0.2\n    stem_type: str = '3x3'\n    stem_chs: Optional[int] = None\n    group_size: Optional[int] = None\n    attn_layer: Optional[str] = None\n    attn_kwargs: Optional[Dict[str, Any]] = None\n    attn_gain: float = 2.0  # NF correction gain to apply if attn layer is used\n    width_factor: float = 1.0\n    bottle_ratio: float = 0.5\n    num_features: int = 0  # num out_channels for final conv, no final_conv if 0\n    ch_div: int = 8  # round channels % 8 == 0 to keep tensor-core use optimal\n    reg: bool = False  # enables EfficientNet-like options used in RegNet variants, expand from in_chs, se in middle\n    extra_conv: bool = False  # extra 3x3 bottleneck convolution for NFNet models\n    gamma_in_act: bool = False\n    same_padding: bool = False\n    std_conv_eps: float = 1e-5\n    skipinit: bool = False  # disabled by default, non-trivial performance impact\n    zero_init_fc: bool = False\n    act_layer: str = 'silu'\n\n\nclass GammaAct(nn.Module):\n    \"\"\"Activation function with gamma scaling factor.\"\"\"\n\n    def __init__(self, act_type: str = 'relu', gamma: float = 1.0, inplace: bool = False):\n        \"\"\"Initialize GammaAct.\n\n        Args:\n            act_type: Type of activation function.\n            gamma: Scaling factor for activation output.\n            inplace: Whether to perform activation in-place.\n        \"\"\"\n        super().__init__()\n        self.act_fn = get_act_fn(act_type)\n        self.gamma = gamma\n        self.inplace = inplace\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Scaled activation output.\n        \"\"\"\n        return self.act_fn(x, inplace=self.inplace).mul_(self.gamma)\n\n\ndef act_with_gamma(act_type: str, gamma: float = 1.) -> Callable:\n    \"\"\"Create activation function factory with gamma scaling.\n\n    Args:\n        act_type: Type of activation function.\n        gamma: Scaling factor for activation output.\n\n    Returns:\n        Activation function factory.\n    \"\"\"\n    def _create(inplace: bool = False) -> GammaAct:\n        return GammaAct(act_type, gamma=gamma, inplace=inplace)\n    return _create\n\n\nclass DownsampleAvg(nn.Module):\n    \"\"\"AvgPool downsampling as in 'D' ResNet variants with dilation support.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            conv_layer: Callable = ScaledStdConv2d,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize DownsampleAvg.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Stride for downsampling.\n            dilation: Dilation rate.\n            first_dilation: First dilation rate (unused).\n            conv_layer: Convolution layer type.\n        \"\"\"\n        super().__init__()\n        avg_stride = stride if dilation == 1 else 1\n        if stride > 1 or dilation > 1:\n            avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n            self.pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n        else:\n            self.pool = nn.Identity()\n        self.conv = conv_layer(in_chs, out_chs, 1, stride=1, device=device, dtype=dtype)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Downsampled tensor.\n        \"\"\"\n        return self.conv(self.pool(x))\n\n\n@register_notrace_module  # reason: mul_ causes FX to drop a relevant node. https://github.com/pytorch/pytorch/issues/68301\nclass NormFreeBlock(nn.Module):\n    \"\"\"Normalization-Free pre-activation block.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            alpha: float = 1.0,\n            beta: float = 1.0,\n            bottle_ratio: float = 0.25,\n            group_size: Optional[int] = None,\n            ch_div: int = 1,\n            reg: bool = True,\n            extra_conv: bool = False,\n            skipinit: bool = False,\n            attn_layer: Optional[Callable] = None,\n            attn_gain: float = 2.0,\n            act_layer: Optional[Callable] = None,\n            conv_layer: Callable = ScaledStdConv2d,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize NormFreeBlock.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Stride for convolution.\n            dilation: Dilation rate.\n            first_dilation: First dilation rate.\n            alpha: Alpha scaling factor for residual.\n            beta: Beta scaling factor for pre-activation.\n            bottle_ratio: Bottleneck ratio.\n            group_size: Group convolution size.\n            ch_div: Channel divisor for rounding.\n            reg: Use RegNet-style configuration.\n            extra_conv: Add extra 3x3 convolution.\n            skipinit: Use skipinit initialization.\n            attn_layer: Attention layer type.\n            attn_gain: Attention gain factor.\n            act_layer: Activation layer type.\n            conv_layer: Convolution layer type.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        out_chs = out_chs or in_chs\n        # RegNet variants scale bottleneck from in_chs, otherwise scale from out_chs like ResNet\n        mid_chs = make_divisible(in_chs * bottle_ratio if reg else out_chs * bottle_ratio, ch_div)\n        groups = 1 if not group_size else mid_chs // group_size\n        if group_size and group_size % ch_div == 0:\n            mid_chs = group_size * groups  # correct mid_chs if group_size divisible by ch_div, otherwise error\n        self.alpha = alpha\n        self.beta = beta\n        self.attn_gain = attn_gain\n\n        if in_chs != out_chs or stride != 1 or dilation != first_dilation:\n            self.downsample = DownsampleAvg(\n                in_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                first_dilation=first_dilation,\n                conv_layer=conv_layer,\n                **dd,\n            )\n        else:\n            self.downsample = None\n\n        self.act1 = act_layer()\n        self.conv1 = conv_layer(in_chs, mid_chs, 1, **dd)\n        self.act2 = act_layer(inplace=True)\n        self.conv2 = conv_layer(mid_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups, **dd)\n        if extra_conv:\n            self.act2b = act_layer(inplace=True)\n            self.conv2b = conv_layer(mid_chs, mid_chs, 3, stride=1, dilation=dilation, groups=groups, **dd)\n        else:\n            self.act2b = None\n            self.conv2b = None\n        if reg and attn_layer is not None:\n            self.attn = attn_layer(mid_chs, **dd)  # RegNet blocks apply attn btw conv2 & 3\n        else:\n            self.attn = None\n        self.act3 = act_layer()\n        self.conv3 = conv_layer(mid_chs, out_chs, 1, gain_init=1. if skipinit else 0., **dd)\n        if not reg and attn_layer is not None:\n            self.attn_last = attn_layer(out_chs, **dd)  # ResNet blocks apply attn after conv3\n        else:\n            self.attn_last = None\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n        self.skipinit_gain = nn.Parameter(torch.tensor(0., **dd)) if skipinit else None\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        out = self.act1(x) * self.beta\n\n        # shortcut branch\n        shortcut = x\n        if self.downsample is not None:\n            shortcut = self.downsample(out)\n\n        # residual branch\n        out = self.conv1(out)\n        out = self.conv2(self.act2(out))\n        if self.conv2b is not None:\n            out = self.conv2b(self.act2b(out))\n        if self.attn is not None:\n            out = self.attn_gain * self.attn(out)\n        out = self.conv3(self.act3(out))\n        if self.attn_last is not None:\n            out = self.attn_gain * self.attn_last(out)\n        out = self.drop_path(out)\n\n        if self.skipinit_gain is not None:\n            out.mul_(self.skipinit_gain)\n        out = out * self.alpha + shortcut\n        return out\n\n\ndef create_stem(\n        in_chs: int,\n        out_chs: int,\n        stem_type: str = '',\n        conv_layer: Optional[Callable] = None,\n        act_layer: Optional[Callable] = None,\n        preact_feature: bool = True,\n        device=None,\n        dtype=None,\n) -> Tuple[nn.Sequential, int, Dict[str, Any]]:\n    \"\"\"Create stem module for NFNet models.\n\n    Args:\n        in_chs: Input channels.\n        out_chs: Output channels.\n        stem_type: Type of stem ('', 'deep', 'deep_tiered', 'deep_quad', '3x3', '7x7', etc.).\n        conv_layer: Convolution layer type.\n        act_layer: Activation layer type.\n        preact_feature: Use pre-activation feature.\n\n    Returns:\n        Tuple of (stem_module, stem_stride, stem_feature_info).\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    stem_stride = 2\n    stem_feature = dict(num_chs=out_chs, reduction=2, module='stem.conv')\n    stem = OrderedDict()\n    assert stem_type in ('', 'deep', 'deep_tiered', 'deep_quad', '3x3', '7x7', 'deep_pool', '3x3_pool', '7x7_pool')\n    if 'deep' in stem_type:\n        if 'quad' in stem_type:\n            # 4 deep conv stack as in NFNet-F models\n            assert 'pool' not in stem_type\n            stem_chs = (out_chs // 8, out_chs // 4, out_chs // 2, out_chs)\n            strides = (2, 1, 1, 2)\n            stem_stride = 4\n            stem_feature = dict(num_chs=out_chs // 2, reduction=2, module='stem.conv3')\n        else:\n            if 'tiered' in stem_type:\n                stem_chs = (3 * out_chs // 8, out_chs // 2, out_chs)  # 'T' resnets in resnet.py\n            else:\n                stem_chs = (out_chs // 2, out_chs // 2, out_chs)  # 'D' ResNets\n            strides = (2, 1, 1)\n            stem_feature = dict(num_chs=out_chs // 2, reduction=2, module='stem.conv2')\n        last_idx = len(stem_chs) - 1\n        for i, (c, s) in enumerate(zip(stem_chs, strides)):\n            stem[f'conv{i + 1}'] = conv_layer(in_chs, c, kernel_size=3, stride=s, **dd)\n            if i != last_idx:\n                stem[f'act{i + 2}'] = act_layer(inplace=True)\n            in_chs = c\n    elif '3x3' in stem_type:\n        # 3x3 stem conv as in RegNet\n        stem['conv'] = conv_layer(in_chs, out_chs, kernel_size=3, stride=2, **dd)\n    else:\n        # 7x7 stem conv as in ResNet\n        stem['conv'] = conv_layer(in_chs, out_chs, kernel_size=7, stride=2, **dd)\n\n    if 'pool' in stem_type:\n        stem['pool'] = nn.MaxPool2d(3, stride=2, padding=1)\n        stem_stride = 4\n\n    return nn.Sequential(stem), stem_stride, stem_feature\n\n\n# from https://github.com/deepmind/deepmind-research/tree/master/nfnets\n_nonlin_gamma = dict(\n    identity=1.0,\n    celu=1.270926833152771,\n    elu=1.2716004848480225,\n    gelu=1.7015043497085571,\n    leaky_relu=1.70590341091156,\n    log_sigmoid=1.9193484783172607,\n    log_softmax=1.0002083778381348,\n    relu=1.7139588594436646,\n    relu6=1.7131484746932983,\n    selu=1.0008515119552612,\n    sigmoid=4.803835391998291,\n    silu=1.7881293296813965,\n    softsign=2.338853120803833,\n    softplus=1.9203323125839233,\n    tanh=1.5939117670059204,\n)\n\n\nclass NormFreeNet(nn.Module):\n    \"\"\" Normalization-Free Network\n\n    As described in :\n    `Characterizing signal propagation to close the performance gap in unnormalized ResNets`\n        - https://arxiv.org/abs/2101.08692\n    and\n    `High-Performance Large-Scale Image Recognition Without Normalization` - https://arxiv.org/abs/2102.06171\n\n    This model aims to cover both the NFRegNet-Bx models as detailed in the paper's code snippets and\n    the (preact) ResNet models described earlier in the paper.\n\n    There are a few differences:\n        * channels are rounded to be divisible by 8 by default (keep tensor core kernels happy),\n            this changes channel dim and param counts slightly from the paper models\n        * activation correcting gamma constants are moved into the ScaledStdConv as it has less performance\n            impact in PyTorch when done with the weight scaling there. This likely wasn't a concern in the JAX impl.\n        * a config option `gamma_in_act` can be enabled to not apply gamma in StdConv as described above, but\n            apply it in each activation. This is slightly slower, numerically different, but matches official impl.\n        * skipinit is disabled by default, it seems to have a rather drastic impact on GPU memory use and throughput\n            for what it is/does. Approx 8-10% throughput loss.\n    \"\"\"\n    def __init__(\n            self,\n            cfg: NfCfg,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n            **kwargs: Any,\n    ):\n        \"\"\"\n        Args:\n            cfg: Model architecture configuration.\n            num_classes: Number of classifier classes.\n            in_chans: Number of input channels.\n            global_pool: Global pooling type.\n            output_stride: Output stride of network, one of (8, 16, 32).\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth drop-path rate.\n            **kwargs: Extra kwargs overlayed onto cfg.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        cfg = replace(cfg, **kwargs)\n        assert cfg.act_layer in _nonlin_gamma, f\"Please add non-linearity constants for activation ({cfg.act_layer}).\"\n        conv_layer = ScaledStdConv2dSame if cfg.same_padding else ScaledStdConv2d\n        if cfg.gamma_in_act:\n            act_layer = act_with_gamma(cfg.act_layer, gamma=_nonlin_gamma[cfg.act_layer])\n            conv_layer = partial(conv_layer, eps=cfg.std_conv_eps)\n        else:\n            act_layer = get_act_layer(cfg.act_layer)\n            conv_layer = partial(conv_layer, gamma=_nonlin_gamma[cfg.act_layer], eps=cfg.std_conv_eps)\n        attn_layer = partial(get_attn(cfg.attn_layer), **cfg.attn_kwargs) if cfg.attn_layer else None\n\n        stem_chs = make_divisible((cfg.stem_chs or cfg.channels[0]) * cfg.width_factor, cfg.ch_div)\n        self.stem, stem_stride, stem_feat = create_stem(\n            in_chans,\n            stem_chs,\n            cfg.stem_type,\n            conv_layer=conv_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n        self.feature_info = [stem_feat]\n        drop_path_rates = calculate_drop_path_rates(drop_path_rate, cfg.depths, stagewise=True)\n        prev_chs = stem_chs\n        net_stride = stem_stride\n        dilation = 1\n        expected_var = 1.0\n        stages = []\n        for stage_idx, stage_depth in enumerate(cfg.depths):\n            stride = 1 if stage_idx == 0 and stem_stride > 2 else 2\n            if net_stride >= output_stride and stride > 1:\n                dilation *= stride\n                stride = 1\n            net_stride *= stride\n            first_dilation = 1 if dilation in (1, 2) else 2\n\n            blocks = []\n            for block_idx in range(cfg.depths[stage_idx]):\n                first_block = block_idx == 0 and stage_idx == 0\n                out_chs = make_divisible(cfg.channels[stage_idx] * cfg.width_factor, cfg.ch_div)\n                blocks += [NormFreeBlock(\n                    in_chs=prev_chs, out_chs=out_chs,\n                    alpha=cfg.alpha,\n                    beta=1. / expected_var ** 0.5,\n                    stride=stride if block_idx == 0 else 1,\n                    dilation=dilation,\n                    first_dilation=first_dilation,\n                    group_size=cfg.group_size,\n                    bottle_ratio=1. if cfg.reg and first_block else cfg.bottle_ratio,\n                    ch_div=cfg.ch_div,\n                    reg=cfg.reg,\n                    extra_conv=cfg.extra_conv,\n                    skipinit=cfg.skipinit,\n                    attn_layer=attn_layer,\n                    attn_gain=cfg.attn_gain,\n                    act_layer=act_layer,\n                    conv_layer=conv_layer,\n                    drop_path_rate=drop_path_rates[stage_idx][block_idx],\n                    **dd,\n                )]\n                if block_idx == 0:\n                    expected_var = 1.  # expected var is reset after first block of each stage\n                expected_var += cfg.alpha ** 2   # Even if reset occurs, increment expected variance\n                first_dilation = dilation\n                prev_chs = out_chs\n            self.feature_info += [dict(num_chs=prev_chs, reduction=net_stride, module=f'stages.{stage_idx}')]\n            stages += [nn.Sequential(*blocks)]\n        self.stages = nn.Sequential(*stages)\n\n        if cfg.num_features:\n            # The paper NFRegNet models have an EfficientNet-like final head convolution.\n            self.num_features = make_divisible(cfg.width_factor * cfg.num_features, cfg.ch_div)\n            self.final_conv = conv_layer(prev_chs, self.num_features, 1, **dd)\n            self.feature_info[-1] = dict(num_chs=self.num_features, reduction=net_stride, module=f'final_conv')\n        else:\n            self.num_features = prev_chs\n            self.final_conv = nn.Identity()\n        self.final_act = act_layer(inplace=cfg.num_features > 0)\n\n        self.head_hidden_size = self.num_features\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=self.drop_rate,\n            **dd,\n        )\n\n        for n, m in self.named_modules():\n            if 'fc' in n and isinstance(m, nn.Linear):\n                if cfg.zero_init_fc:\n                    nn.init.zeros_(m.weight)\n                else:\n                    nn.init.normal_(m.weight, 0., .01)\n                if m.bias is not None:\n                    nn.init.zeros_(m.bias)\n            elif isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_in', nonlinearity='linear')\n                if m.bias is not None:\n                    nn.init.zeros_(m.bias)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        matcher = dict(\n            stem=r'^stem',\n            blocks=[\n                (r'^stages\\.(\\d+)' if coarse else r'^stages\\.(\\d+)\\.(\\d+)', None),\n                (r'^final_conv', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.final_conv(x)\n        x = self.final_act(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _nfres_cfg(\n        depths: Tuple[int, ...],\n        channels: Tuple[int, ...] = (256, 512, 1024, 2048),\n        group_size: Optional[int] = None,\n        act_layer: str = 'relu',\n        attn_layer: Optional[str] = None,\n        attn_kwargs: Optional[Dict[str, Any]] = None,\n) -> NfCfg:\n    \"\"\"Create NFNet ResNet configuration.\n\n    Args:\n        depths: Number of blocks in each stage.\n        channels: Channel dimensions for each stage.\n        group_size: Group convolution size.\n        act_layer: Activation layer type.\n        attn_layer: Attention layer type.\n        attn_kwargs: Attention layer arguments.\n\n    Returns:\n        NFNet configuration.\n    \"\"\"\n    attn_kwargs = attn_kwargs or {}\n    cfg = NfCfg(\n        depths=depths,\n        channels=channels,\n        stem_type='7x7_pool',\n        stem_chs=64,\n        bottle_ratio=0.25,\n        group_size=group_size,\n        act_layer=act_layer,\n        attn_layer=attn_layer,\n        attn_kwargs=attn_kwargs,\n    )\n    return cfg\n\n\ndef _nfreg_cfg(depths: Tuple[int, ...], channels: Tuple[int, ...] = (48, 104, 208, 440)) -> NfCfg:\n    \"\"\"Create NFNet RegNet configuration.\n\n    Args:\n        depths: Number of blocks in each stage.\n        channels: Channel dimensions for each stage.\n\n    Returns:\n        NFNet configuration.\n    \"\"\"\n    num_features = 1280 * channels[-1] // 440\n    attn_kwargs = dict(rd_ratio=0.5)\n    cfg = NfCfg(\n        depths=depths,\n        channels=channels,\n        stem_type='3x3',\n        group_size=8,\n        width_factor=0.75,\n        bottle_ratio=2.25,\n        num_features=num_features,\n        reg=True,\n        attn_layer='se',\n        attn_kwargs=attn_kwargs,\n    )\n    return cfg\n\n\ndef _nfnet_cfg(\n        depths: Tuple[int, ...],\n        channels: Tuple[int, ...] = (256, 512, 1536, 1536),\n        group_size: int = 128,\n        bottle_ratio: float = 0.5,\n        feat_mult: float = 2.,\n        act_layer: str = 'gelu',\n        attn_layer: str = 'se',\n        attn_kwargs: Optional[Dict[str, Any]] = None,\n) -> NfCfg:\n    \"\"\"Create NFNet configuration.\n\n    Args:\n        depths: Number of blocks in each stage.\n        channels: Channel dimensions for each stage.\n        group_size: Group convolution size.\n        bottle_ratio: Bottleneck ratio.\n        feat_mult: Feature multiplier for final layer.\n        act_layer: Activation layer type.\n        attn_layer: Attention layer type.\n        attn_kwargs: Attention layer arguments.\n\n    Returns:\n        NFNet configuration.\n    \"\"\"\n    num_features = int(channels[-1] * feat_mult)\n    attn_kwargs = attn_kwargs if attn_kwargs is not None else dict(rd_ratio=0.5)\n    cfg = NfCfg(\n        depths=depths,\n        channels=channels,\n        stem_type='deep_quad',\n        stem_chs=128,\n        group_size=group_size,\n        bottle_ratio=bottle_ratio,\n        extra_conv=True,\n        num_features=num_features,\n        act_layer=act_layer,\n        attn_layer=attn_layer,\n        attn_kwargs=attn_kwargs,\n    )\n    return cfg\n\n\ndef _dm_nfnet_cfg(\n        depths: Tuple[int, ...],\n        channels: Tuple[int, ...] = (256, 512, 1536, 1536),\n        act_layer: str = 'gelu',\n        skipinit: bool = True,\n) -> NfCfg:\n    \"\"\"Create DeepMind NFNet configuration.\n\n    Args:\n        depths: Number of blocks in each stage.\n        channels: Channel dimensions for each stage.\n        act_layer: Activation layer type.\n        skipinit: Use skipinit initialization.\n\n    Returns:\n        NFNet configuration.\n    \"\"\"\n    cfg = NfCfg(\n        depths=depths,\n        channels=channels,\n        stem_type='deep_quad',\n        stem_chs=128,\n        group_size=128,\n        bottle_ratio=0.5,\n        extra_conv=True,\n        gamma_in_act=True,\n        same_padding=True,\n        skipinit=skipinit,\n        num_features=int(channels[-1] * 2.0),\n        act_layer=act_layer,\n        attn_layer='se',\n        attn_kwargs=dict(rd_ratio=0.5),\n    )\n    return cfg\n\n\nmodel_cfgs = dict(\n    # NFNet-F models w/ GELU compatible with DeepMind weights\n    dm_nfnet_f0=_dm_nfnet_cfg(depths=(1, 2, 6, 3)),\n    dm_nfnet_f1=_dm_nfnet_cfg(depths=(2, 4, 12, 6)),\n    dm_nfnet_f2=_dm_nfnet_cfg(depths=(3, 6, 18, 9)),\n    dm_nfnet_f3=_dm_nfnet_cfg(depths=(4, 8, 24, 12)),\n    dm_nfnet_f4=_dm_nfnet_cfg(depths=(5, 10, 30, 15)),\n    dm_nfnet_f5=_dm_nfnet_cfg(depths=(6, 12, 36, 18)),\n    dm_nfnet_f6=_dm_nfnet_cfg(depths=(7, 14, 42, 21)),\n\n    # NFNet-F models w/ GELU\n    nfnet_f0=_nfnet_cfg(depths=(1, 2, 6, 3)),\n    nfnet_f1=_nfnet_cfg(depths=(2, 4, 12, 6)),\n    nfnet_f2=_nfnet_cfg(depths=(3, 6, 18, 9)),\n    nfnet_f3=_nfnet_cfg(depths=(4, 8, 24, 12)),\n    nfnet_f4=_nfnet_cfg(depths=(5, 10, 30, 15)),\n    nfnet_f5=_nfnet_cfg(depths=(6, 12, 36, 18)),\n    nfnet_f6=_nfnet_cfg(depths=(7, 14, 42, 21)),\n    nfnet_f7=_nfnet_cfg(depths=(8, 16, 48, 24)),\n\n    # Experimental 'light' versions of NFNet-F that are little leaner, w/ SiLU act\n    nfnet_l0=_nfnet_cfg(\n        depths=(1, 2, 6, 3), feat_mult=1.5, group_size=64, bottle_ratio=0.25,\n        attn_kwargs=dict(rd_ratio=0.25, rd_divisor=8), act_layer='silu'),\n    eca_nfnet_l0=_nfnet_cfg(\n        depths=(1, 2, 6, 3), feat_mult=1.5, group_size=64, bottle_ratio=0.25,\n        attn_layer='eca', attn_kwargs=dict(), act_layer='silu'),\n    eca_nfnet_l1=_nfnet_cfg(\n        depths=(2, 4, 12, 6), feat_mult=2, group_size=64, bottle_ratio=0.25,\n        attn_layer='eca', attn_kwargs=dict(), act_layer='silu'),\n    eca_nfnet_l2=_nfnet_cfg(\n        depths=(3, 6, 18, 9), feat_mult=2, group_size=64, bottle_ratio=0.25,\n        attn_layer='eca', attn_kwargs=dict(), act_layer='silu'),\n    eca_nfnet_l3=_nfnet_cfg(\n        depths=(4, 8, 24, 12), feat_mult=2, group_size=64, bottle_ratio=0.25,\n        attn_layer='eca', attn_kwargs=dict(), act_layer='silu'),\n\n    # EffNet influenced RegNet defs.\n    # NOTE: These aren't quite the official ver, ch_div=1 must be set for exact ch counts. I round to ch_div=8.\n    nf_regnet_b0=_nfreg_cfg(depths=(1, 3, 6, 6)),\n    nf_regnet_b1=_nfreg_cfg(depths=(2, 4, 7, 7)),\n    nf_regnet_b2=_nfreg_cfg(depths=(2, 4, 8, 8), channels=(56, 112, 232, 488)),\n    nf_regnet_b3=_nfreg_cfg(depths=(2, 5, 9, 9), channels=(56, 128, 248, 528)),\n    nf_regnet_b4=_nfreg_cfg(depths=(2, 6, 11, 11), channels=(64, 144, 288, 616)),\n    nf_regnet_b5=_nfreg_cfg(depths=(3, 7, 14, 14), channels=(80, 168, 336, 704)),\n\n    # ResNet (preact, D style deep stem/avg down) defs\n    nf_resnet26=_nfres_cfg(depths=(2, 2, 2, 2)),\n    nf_resnet50=_nfres_cfg(depths=(3, 4, 6, 3)),\n    nf_resnet101=_nfres_cfg(depths=(3, 4, 23, 3)),\n\n    nf_seresnet26=_nfres_cfg(depths=(2, 2, 2, 2), attn_layer='se', attn_kwargs=dict(rd_ratio=1/16)),\n    nf_seresnet50=_nfres_cfg(depths=(3, 4, 6, 3), attn_layer='se', attn_kwargs=dict(rd_ratio=1/16)),\n    nf_seresnet101=_nfres_cfg(depths=(3, 4, 23, 3), attn_layer='se', attn_kwargs=dict(rd_ratio=1/16)),\n\n    nf_ecaresnet26=_nfres_cfg(depths=(2, 2, 2, 2), attn_layer='eca', attn_kwargs=dict()),\n    nf_ecaresnet50=_nfres_cfg(depths=(3, 4, 6, 3), attn_layer='eca', attn_kwargs=dict()),\n    nf_ecaresnet101=_nfres_cfg(depths=(3, 4, 23, 3), attn_layer='eca', attn_kwargs=dict()),\n\n    test_nfnet=_nfnet_cfg(\n        depths=(1, 1, 1, 1), channels=(32, 64, 96, 128), feat_mult=1.5, group_size=8, bottle_ratio=0.25,\n        attn_kwargs=dict(rd_ratio=0.25, rd_divisor=8), act_layer='silu'),\n)\n\n\ndef _create_normfreenet(variant: str, pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Create a NormFreeNet model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        NormFreeNet model instance.\n    \"\"\"\n    model_cfg = model_cfgs[variant]\n    feature_cfg = dict(flatten_sequential=True)\n    return build_model_with_cfg(\n        NormFreeNet,\n        variant,\n        pretrained,\n        model_cfg=model_cfg,\n        feature_cfg=feature_cfg,\n        **kwargs,\n    )\n\n\ndef _dcfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1', 'classifier': 'head.fc', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'dm_nfnet_f0.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f0-604f9c3a.pth',\n        pool_size=(6, 6), input_size=(3, 192, 192), test_input_size=(3, 256, 256), crop_pct=.9, crop_mode='squash'),\n    'dm_nfnet_f1.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f1-fc540f82.pth',\n        pool_size=(7, 7), input_size=(3, 224, 224), test_input_size=(3, 320, 320), crop_pct=0.91, crop_mode='squash'),\n    'dm_nfnet_f2.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f2-89875923.pth',\n        pool_size=(8, 8), input_size=(3, 256, 256), test_input_size=(3, 352, 352), crop_pct=0.92, crop_mode='squash'),\n    'dm_nfnet_f3.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f3-d74ab3aa.pth',\n        pool_size=(10, 10), input_size=(3, 320, 320), test_input_size=(3, 416, 416), crop_pct=0.94, crop_mode='squash'),\n    'dm_nfnet_f4.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f4-0ac5b10b.pth',\n        pool_size=(12, 12), input_size=(3, 384, 384), test_input_size=(3, 512, 512), crop_pct=0.951, crop_mode='squash'),\n    'dm_nfnet_f5.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f5-ecb20ab1.pth',\n        pool_size=(13, 13), input_size=(3, 416, 416), test_input_size=(3, 544, 544), crop_pct=0.954, crop_mode='squash'),\n    'dm_nfnet_f6.dm_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-dnf-weights/dm_nfnet_f6-e0f12116.pth',\n        pool_size=(14, 14), input_size=(3, 448, 448), test_input_size=(3, 576, 576), crop_pct=0.956, crop_mode='squash'),\n\n    'nfnet_f0': _dcfg(\n        url='', pool_size=(6, 6), input_size=(3, 192, 192), test_input_size=(3, 256, 256)),\n    'nfnet_f1': _dcfg(\n        url='', pool_size=(7, 7), input_size=(3, 224, 224), test_input_size=(3, 320, 320)),\n    'nfnet_f2': _dcfg(\n        url='', pool_size=(8, 8), input_size=(3, 256, 256), test_input_size=(3, 352, 352)),\n    'nfnet_f3': _dcfg(\n        url='', pool_size=(10, 10), input_size=(3, 320, 320), test_input_size=(3, 416, 416)),\n    'nfnet_f4': _dcfg(\n        url='', pool_size=(12, 12), input_size=(3, 384, 384), test_input_size=(3, 512, 512)),\n    'nfnet_f5': _dcfg(\n        url='', pool_size=(13, 13), input_size=(3, 416, 416), test_input_size=(3, 544, 544)),\n    'nfnet_f6': _dcfg(\n        url='', pool_size=(14, 14), input_size=(3, 448, 448), test_input_size=(3, 576, 576)),\n    'nfnet_f7': _dcfg(\n        url='', pool_size=(15, 15), input_size=(3, 480, 480), test_input_size=(3, 608, 608)),\n\n    'nfnet_l0.ra2_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/nfnet_l0_ra2-45c6688d.pth',\n        pool_size=(7, 7), input_size=(3, 224, 224), test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'eca_nfnet_l0.ra2_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecanfnet_l0_ra2-e3e9ac50.pth',\n        pool_size=(7, 7), input_size=(3, 224, 224), test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'eca_nfnet_l1.ra2_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecanfnet_l1_ra2-7dce93cd.pth',\n        pool_size=(8, 8), input_size=(3, 256, 256), test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'eca_nfnet_l2.ra3_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecanfnet_l2_ra3-da781a61.pth',\n        pool_size=(10, 10), input_size=(3, 320, 320), test_input_size=(3, 384, 384), test_crop_pct=1.0),\n    'eca_nfnet_l3': _dcfg(\n        url='',\n        pool_size=(11, 11), input_size=(3, 352, 352), test_input_size=(3, 448, 448), test_crop_pct=1.0),\n\n    'nf_regnet_b0': _dcfg(\n        url='', pool_size=(6, 6), input_size=(3, 192, 192), test_input_size=(3, 256, 256), first_conv='stem.conv'),\n    'nf_regnet_b1.ra2_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/nf_regnet_b1_256_ra2-ad85cfef.pth',\n        pool_size=(8, 8), input_size=(3, 256, 256), test_input_size=(3, 288, 288), first_conv='stem.conv'),  # NOT to paper spec\n    'nf_regnet_b2': _dcfg(\n        url='', pool_size=(8, 8), input_size=(3, 240, 240), test_input_size=(3, 272, 272), first_conv='stem.conv'),\n    'nf_regnet_b3': _dcfg(\n        url='', pool_size=(9, 9), input_size=(3, 288, 288), test_input_size=(3, 320, 320), first_conv='stem.conv'),\n    'nf_regnet_b4': _dcfg(\n        url='', pool_size=(10, 10), input_size=(3, 320, 320), test_input_size=(3, 384, 384), first_conv='stem.conv'),\n    'nf_regnet_b5': _dcfg(\n        url='', pool_size=(12, 12), input_size=(3, 384, 384), test_input_size=(3, 456, 456), first_conv='stem.conv'),\n\n    'nf_resnet26': _dcfg(url='', first_conv='stem.conv'),\n    'nf_resnet50.ra2_in1k': _dcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/nf_resnet50_ra2-9f236009.pth',\n        pool_size=(8, 8), input_size=(3, 256, 256), test_input_size=(3, 288, 288), crop_pct=0.94, first_conv='stem.conv'),\n    'nf_resnet101': _dcfg(url='', first_conv='stem.conv'),\n\n    'nf_seresnet26': _dcfg(url='', first_conv='stem.conv'),\n    'nf_seresnet50': _dcfg(url='', first_conv='stem.conv'),\n    'nf_seresnet101': _dcfg(url='', first_conv='stem.conv'),\n\n    'nf_ecaresnet26': _dcfg(url='', first_conv='stem.conv'),\n    'nf_ecaresnet50': _dcfg(url='', first_conv='stem.conv'),\n    'nf_ecaresnet101': _dcfg(url='', first_conv='stem.conv'),\n\n    'test_nfnet.r160_in1k': _dcfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.95, input_size=(3, 160, 160), pool_size=(5, 5)),\n})\n\n\n@register_model\ndef dm_nfnet_f0(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F0 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f1(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F1 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f2(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F2 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f3(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F3 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f4(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F4 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f5(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F5 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f5', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef dm_nfnet_f6(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F6 (DeepMind weight compatible).\"\"\"\n    return _create_normfreenet('dm_nfnet_f6', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f0(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F0.\"\"\"\n    return _create_normfreenet('nfnet_f0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f1(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F1.\"\"\"\n    return _create_normfreenet('nfnet_f1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f2(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F2.\"\"\"\n    return _create_normfreenet('nfnet_f2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f3(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F3.\"\"\"\n    return _create_normfreenet('nfnet_f3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f4(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F4.\"\"\"\n    return _create_normfreenet('nfnet_f4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f5(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F5.\"\"\"\n    return _create_normfreenet('nfnet_f5', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f6(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F6.\"\"\"\n    return _create_normfreenet('nfnet_f6', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_f7(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-F7.\"\"\"\n    return _create_normfreenet('nfnet_f7', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nfnet_l0(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"NFNet-L0b w/ SiLU.\n\n    My experimental 'light' model w/ F0 repeats, 1.5x final_conv mult, 64 group_size, .25 bottleneck & SE ratio\n    \"\"\"\n    return _create_normfreenet('nfnet_l0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_nfnet_l0(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"ECA-NFNet-L0 w/ SiLU.\n\n    My experimental 'light' model w/ F0 repeats, 1.5x final_conv mult, 64 group_size, .25 bottleneck & ECA attn\n    \"\"\"\n    return _create_normfreenet('eca_nfnet_l0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_nfnet_l1(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"ECA-NFNet-L1 w/ SiLU.\n\n    My experimental 'light' model w/ F1 repeats, 2.0x final_conv mult, 64 group_size, .25 bottleneck & ECA attn\n    \"\"\"\n    return _create_normfreenet('eca_nfnet_l1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_nfnet_l2(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"ECA-NFNet-L2 w/ SiLU.\n\n    My experimental 'light' model w/ F2 repeats, 2.0x final_conv mult, 64 group_size, .25 bottleneck & ECA attn\n    \"\"\"\n    return _create_normfreenet('eca_nfnet_l2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_nfnet_l3(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"ECA-NFNet-L3 w/ SiLU.\n\n    My experimental 'light' model w/ F3 repeats, 2.0x final_conv mult, 64 group_size, .25 bottleneck & ECA attn\n    \"\"\"\n    return _create_normfreenet('eca_nfnet_l3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b0(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B0.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b0', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b1(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B1.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b1', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b2(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B2.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b2', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b3(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B3.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b3', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b4(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B4.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b4', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_regnet_b5(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free RegNet-B5.\n    \"\"\"\n    return _create_normfreenet('nf_regnet_b5', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_resnet26(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ResNet-26.\n    \"\"\"\n    return _create_normfreenet('nf_resnet26', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_resnet50(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ResNet-50.\n    \"\"\"\n    return _create_normfreenet('nf_resnet50', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_resnet101(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ResNet-101.\n    \"\"\"\n    return _create_normfreenet('nf_resnet101', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_seresnet26(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free SE-ResNet26.\"\"\"\n    return _create_normfreenet('nf_seresnet26', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_seresnet50(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free SE-ResNet50.\"\"\"\n    return _create_normfreenet('nf_seresnet50', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_seresnet101(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free SE-ResNet101.\"\"\"\n    return _create_normfreenet('nf_seresnet101', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_ecaresnet26(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ECA-ResNet26.\"\"\"\n    return _create_normfreenet('nf_ecaresnet26', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_ecaresnet50(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ECA-ResNet50.\"\"\"\n    return _create_normfreenet('nf_ecaresnet50', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef nf_ecaresnet101(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Normalization-Free ECA-ResNet101.\"\"\"\n    return _create_normfreenet('nf_ecaresnet101', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef test_nfnet(pretrained: bool = False, **kwargs: Any) -> NormFreeNet:\n    \"\"\"Test NFNet model for experimentation.\"\"\"\n    return _create_normfreenet('test_nfnet', pretrained=pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/pit.py",
    "content": "\"\"\" Pooling-based Vision Transformer (PiT) in PyTorch\n\nA PyTorch implement of Pooling-based Vision Transformers as described in\n'Rethinking Spatial Dimensions of Vision Transformers' - https://arxiv.org/abs/2103.16302\n\nThis code was adapted from the original version at https://github.com/naver-ai/pit, original copyright below.\n\nModifications for timm by / Copyright 2020 Ross Wightman\n\"\"\"\n# PiT\n# Copyright 2021-present NAVER Corp.\n# Apache License v2.0\n\nimport math\nimport re\nfrom functools import partial\nfrom typing import List, Optional, Sequence, Tuple, Union, Type, Any\n\nimport torch\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import trunc_normal_, to_2tuple, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._registry import register_model, generate_default_cfgs\nfrom .vision_transformer import Block\n\n\n__all__ = ['PoolingVisionTransformer']  # model_registry will add each entrypoint fn to this\n\n\nclass SequentialTuple(nn.Sequential):\n    \"\"\" This module exists to work around torchscript typing issues list -> list\"\"\"\n    def forward(self, x: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]:\n        for module in self:\n            x = module(x)\n        return x\n\n\nclass Transformer(nn.Module):\n    def __init__(\n            self,\n            base_dim: int,\n            depth: int,\n            heads: int,\n            mlp_ratio: float,\n            pool: Optional[Any] = None,\n            proj_drop: float = .0,\n            attn_drop: float = .0,\n            drop_path_prob: Optional[List[float]] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        embed_dim = base_dim * heads\n\n        self.pool = pool\n        self.norm = norm_layer(embed_dim, **dd) if norm_layer else nn.Identity()\n        self.blocks = nn.Sequential(*[\n            Block(\n                dim=embed_dim,\n                num_heads=heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=True,\n                proj_drop=proj_drop,\n                attn_drop=attn_drop,\n                drop_path=drop_path_prob[i],\n                norm_layer=partial(nn.LayerNorm, eps=1e-6),\n                **dd,\n            )\n            for i in range(depth)])\n\n    def forward(self, x: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]:\n        x, cls_tokens = x\n        token_length = cls_tokens.shape[1]\n        if self.pool is not None:\n            x, cls_tokens = self.pool(x, cls_tokens)\n\n        B, C, H, W = x.shape\n        x = x.flatten(2).transpose(1, 2)\n        x = torch.cat((cls_tokens, x), dim=1)\n\n        x = self.norm(x)\n        x = self.blocks(x)\n\n        cls_tokens = x[:, :token_length]\n        x = x[:, token_length:]\n        x = x.transpose(1, 2).reshape(B, C, H, W)\n\n        return x, cls_tokens\n\n\nclass Pooling(nn.Module):\n    def __init__(\n            self,\n            in_feature: int,\n            out_feature: int,\n            stride: int,\n            padding_mode: str = 'zeros',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.conv = nn.Conv2d(\n            in_feature,\n            out_feature,\n            kernel_size=stride + 1,\n            padding=stride // 2,\n            stride=stride,\n            padding_mode=padding_mode,\n            groups=in_feature,\n            **dd,\n        )\n        self.fc = nn.Linear(in_feature, out_feature, **dd)\n\n    def forward(self, x, cls_token) -> Tuple[torch.Tensor, torch.Tensor]:\n        x = self.conv(x)\n        cls_token = self.fc(cls_token)\n        return x, cls_token\n\n\nclass ConvEmbedding(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            img_size: int = 224,\n            patch_size: int = 16,\n            stride: int = 8,\n            padding: int = 0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        padding = padding\n        self.img_size = to_2tuple(img_size)\n        self.patch_size = to_2tuple(patch_size)\n        self.height = math.floor((self.img_size[0] + 2 * padding - self.patch_size[0]) / stride + 1)\n        self.width = math.floor((self.img_size[1] + 2 * padding - self.patch_size[1]) / stride + 1)\n        self.grid_size = (self.height, self.width)\n\n        self.conv = nn.Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=patch_size,\n            stride=stride,\n            padding=padding,\n            bias=True,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.conv(x)\n        return x\n\n\nclass PoolingVisionTransformer(nn.Module):\n    \"\"\" Pooling-based Vision Transformer\n\n    A PyTorch implement of 'Rethinking Spatial Dimensions of Vision Transformers'\n        - https://arxiv.org/abs/2103.16302\n    \"\"\"\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            stride: int = 8,\n            stem_type: str = 'overlap',\n            base_dims: Sequence[int] = (48, 48, 48),\n            depth: Sequence[int] = (2, 6, 4),\n            heads: Sequence[int] = (2, 4, 8),\n            mlp_ratio: float = 4,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'token',\n            distilled: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_drate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('token',)\n\n        self.base_dims = base_dims\n        self.heads = heads\n        embed_dim = base_dims[0] * heads[0]\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_tokens = 2 if distilled else 1\n        self.feature_info = []\n\n        self.patch_embed = ConvEmbedding(in_chans, embed_dim, img_size, patch_size, stride, **dd)\n        self.pos_embed = nn.Parameter(torch.randn(1, embed_dim, self.patch_embed.height, self.patch_embed.width, **dd))\n        self.cls_token = nn.Parameter(torch.randn(1, self.num_tokens, embed_dim, **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_drate)\n\n        transformers = []\n        # stochastic depth decay rule\n        dpr = calculate_drop_path_rates(drop_path_rate, depth, stagewise=True)\n        prev_dim = embed_dim\n        for i in range(len(depth)):\n            pool = None\n            embed_dim = base_dims[i] * heads[i]\n            if i > 0:\n                pool = Pooling(\n                    prev_dim,\n                    embed_dim,\n                    stride=2,\n                    **dd,\n                )\n            transformers += [Transformer(\n                base_dims[i],\n                depth[i],\n                heads[i],\n                mlp_ratio,\n                pool=pool,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path_prob=dpr[i],\n                **dd,\n            )]\n            prev_dim = embed_dim\n            self.feature_info += [dict(num_chs=prev_dim, reduction=(stride - 1) * 2**i, module=f'transformers.{i}')]\n\n        self.transformers = SequentialTuple(*transformers)\n        self.norm = nn.LayerNorm(base_dims[-1] * heads[-1], eps=1e-6, **dd)\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim\n\n        # Classifier head\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.head_dist = None\n        if distilled:\n            self.head_dist = nn.Linear(self.embed_dim, self.num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.distilled_training = False  # must set this True to train w/ distillation token\n\n        trunc_normal_(self.pos_embed, std=.02)\n        trunc_normal_(self.cls_token, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'pos_embed', 'cls_token'}\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.distilled_training = enable\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    def get_classifier(self) -> nn.Module:\n        if self.head_dist is not None:\n            return self.head, self.head_dist\n        else:\n            return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(self.embed_dim, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n        if self.head_dist is not None:\n            self.head_dist = nn.Linear(self.embed_dim, self.num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.transformers), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        x = self.pos_drop(x + self.pos_embed)\n        cls_tokens = self.cls_token.expand(x.shape[0], -1, -1)\n\n        last_idx = len(self.transformers) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.transformers\n        else:\n            stages = self.transformers[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x, cls_tokens = stage((x, cls_tokens))\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            cls_tokens = self.norm(cls_tokens)\n\n        return cls_tokens, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.transformers), indices)\n        self.transformers = self.transformers[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        x = self.pos_drop(x + self.pos_embed)\n        cls_tokens = self.cls_token.expand(x.shape[0], -1, -1)\n        x, cls_tokens = self.transformers((x, cls_tokens))\n        cls_tokens = self.norm(cls_tokens)\n        return cls_tokens\n\n    def forward_head(self, x, pre_logits: bool = False) -> torch.Tensor:\n        if self.head_dist is not None:\n            assert self.global_pool == 'token'\n            x, x_dist = x[:, 0], x[:, 1]\n            x = self.head_drop(x)\n            x_dist = self.head_drop(x_dist)\n            if not pre_logits:\n                x = self.head(x)\n                x_dist = self.head_dist(x_dist)\n            if self.distilled_training and self.training and not torch.jit.is_scripting():\n                # only return separate classification predictions when training in distilled mode\n                return x, x_dist\n            else:\n                # during standard train / finetune, inference average the classifier predictions\n                return (x + x_dist) / 2\n        else:\n            if self.global_pool == 'token':\n                x = x[:, 0]\n            x = self.head_drop(x)\n            if not pre_logits:\n                x = self.head(x)\n            return x\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" preprocess checkpoints \"\"\"\n    out_dict = {}\n    p_blocks = re.compile(r'pools\\.(\\d)\\.')\n    for k, v in state_dict.items():\n        # FIXME need to update resize for PiT impl\n        # if k == 'pos_embed' and v.shape != model.pos_embed.shape:\n        #     # To resize pos embedding when using model at different size from pretrained weights\n        #     v = resize_pos_embed(v, model.pos_embed)\n        k = p_blocks.sub(lambda exp: f'transformers.{int(exp.group(1)) + 1}.pool.', k)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_pit(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(range(3))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        PoolingVisionTransformer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(feature_cls='hook', out_indices=out_indices),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.conv', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # deit models (FB weights)\n    'pit_ti_224.in1k': _cfg(hf_hub_id='timm/'),\n    'pit_xs_224.in1k': _cfg(hf_hub_id='timm/'),\n    'pit_s_224.in1k': _cfg(hf_hub_id='timm/'),\n    'pit_b_224.in1k': _cfg(hf_hub_id='timm/'),\n    'pit_ti_distilled_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier=('head', 'head_dist')),\n    'pit_xs_distilled_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier=('head', 'head_dist')),\n    'pit_s_distilled_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier=('head', 'head_dist')),\n    'pit_b_distilled_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        classifier=('head', 'head_dist')),\n})\n\n\n@register_model\ndef pit_b_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=14,\n        stride=7,\n        base_dims=[64, 64, 64],\n        depth=[3, 6, 4],\n        heads=[4, 8, 16],\n        mlp_ratio=4,\n    )\n    return _create_pit('pit_b_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_s_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[48, 48, 48],\n        depth=[2, 6, 4],\n        heads=[3, 6, 12],\n        mlp_ratio=4,\n    )\n    return _create_pit('pit_s_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_xs_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[48, 48, 48],\n        depth=[2, 6, 4],\n        heads=[2, 4, 8],\n        mlp_ratio=4,\n    )\n    return _create_pit('pit_xs_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_ti_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[32, 32, 32],\n        depth=[2, 6, 4],\n        heads=[2, 4, 8],\n        mlp_ratio=4,\n    )\n    return _create_pit('pit_ti_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_b_distilled_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=14,\n        stride=7,\n        base_dims=[64, 64, 64],\n        depth=[3, 6, 4],\n        heads=[4, 8, 16],\n        mlp_ratio=4,\n        distilled=True,\n    )\n    return _create_pit('pit_b_distilled_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_s_distilled_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[48, 48, 48],\n        depth=[2, 6, 4],\n        heads=[3, 6, 12],\n        mlp_ratio=4,\n        distilled=True,\n    )\n    return _create_pit('pit_s_distilled_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_xs_distilled_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[48, 48, 48],\n        depth=[2, 6, 4],\n        heads=[2, 4, 8],\n        mlp_ratio=4,\n        distilled=True,\n    )\n    return _create_pit('pit_xs_distilled_224', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pit_ti_distilled_224(pretrained=False, **kwargs) -> PoolingVisionTransformer:\n    model_args = dict(\n        patch_size=16,\n        stride=8,\n        base_dims=[32, 32, 32],\n        depth=[2, 6, 4],\n        heads=[2, 4, 8],\n        mlp_ratio=4,\n        distilled=True,\n    )\n    return _create_pit('pit_ti_distilled_224', pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/pnasnet.py",
    "content": "\"\"\"\n pnasnet5large implementation grabbed from Cadene's pretrained models\n Additional credit to https://github.com/creafz\n\n https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/pnasnet.py\n\n\"\"\"\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.layers import ConvNormAct, create_conv2d, create_pool2d, create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['PNASNet5Large']\n\n\nclass SeparableConv2d(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.depthwise_conv2d = create_conv2d(\n            in_channels,\n            in_channels,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            groups=in_channels,\n            **dd,\n        )\n        self.pointwise_conv2d = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=1,\n            padding=padding,\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.depthwise_conv2d(x)\n        x = self.pointwise_conv2d(x)\n        return x\n\n\nclass BranchSeparables(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int = 1,\n            stem_cell: bool = False,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        middle_channels = out_channels if stem_cell else in_channels\n        self.act_1 = nn.ReLU()\n        self.separable_1 = SeparableConv2d(\n            in_channels,\n            middle_channels,\n            kernel_size,\n            stride=stride,\n            padding=padding,\n            **dd,\n        )\n        self.bn_sep_1 = nn.BatchNorm2d(middle_channels, eps=0.001, **dd)\n        self.act_2 = nn.ReLU()\n        self.separable_2 = SeparableConv2d(\n            middle_channels,\n            out_channels,\n            kernel_size,\n            stride=1,\n            padding=padding,\n            **dd,\n        )\n        self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, **dd)\n\n    def forward(self, x):\n        x = self.act_1(x)\n        x = self.separable_1(x)\n        x = self.bn_sep_1(x)\n        x = self.act_2(x)\n        x = self.separable_2(x)\n        x = self.bn_sep_2(x)\n        return x\n\n\nclass ActConvBn(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int,\n            stride: int = 1,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.act = nn.ReLU()\n        self.conv = create_conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=kernel_size,\n            stride=stride,\n            padding=padding,\n            **dd,\n        )\n        self.bn = nn.BatchNorm2d(out_channels, eps=0.001, **dd)\n\n    def forward(self, x):\n        x = self.act(x)\n        x = self.conv(x)\n        x = self.bn(x)\n        return x\n\n\nclass FactorizedReduction(nn.Module):\n\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            padding: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.act = nn.ReLU()\n        self.path_1 = nn.Sequential(OrderedDict([\n            ('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)),\n            ('conv', create_conv2d(in_channels, out_channels // 2, kernel_size=1, padding=padding, **dd)),\n        ]))\n        self.path_2 = nn.Sequential(OrderedDict([\n            ('pad', nn.ZeroPad2d((-1, 1, -1, 1))),  # shift\n            ('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)),\n            ('conv', create_conv2d(in_channels, out_channels // 2, kernel_size=1, padding=padding, **dd)),\n        ]))\n        self.final_path_bn = nn.BatchNorm2d(out_channels, eps=0.001, **dd)\n\n    def forward(self, x):\n        x = self.act(x)\n        x_path1 = self.path_1(x)\n        x_path2 = self.path_2(x)\n        out = self.final_path_bn(torch.cat([x_path1, x_path2], 1))\n        return out\n\n\nclass CellBase(nn.Module):\n\n    def cell_forward(self, x_left, x_right):\n        x_comb_iter_0_left = self.comb_iter_0_left(x_left)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_right)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_right)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_left = self.comb_iter_3_left(x_comb_iter_2)\n        x_comb_iter_3_right = self.comb_iter_3_right(x_right)\n        x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_left)\n        if self.comb_iter_4_right is not None:\n            x_comb_iter_4_right = self.comb_iter_4_right(x_right)\n        else:\n            x_comb_iter_4_right = x_right\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass CellStem0(CellBase):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, kernel_size=1, padding=pad_type, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(\n            in_chs_left, out_chs_left, kernel_size=5, stride=2, stem_cell=True, padding=pad_type, **dd)\n        self.comb_iter_0_right = nn.Sequential(OrderedDict([\n            ('max_pool', create_pool2d('max', 3, stride=2, padding=pad_type)),\n            ('conv', create_conv2d(in_chs_left, out_chs_left, kernel_size=1, padding=pad_type, **dd)),\n            ('bn', nn.BatchNorm2d(out_chs_left, eps=0.001, **dd)),\n        ]))\n\n        self.comb_iter_1_left = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=7, stride=2, padding=pad_type, **dd)\n        self.comb_iter_1_right = create_pool2d('max', 3, stride=2, padding=pad_type)\n\n        self.comb_iter_2_left = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=5, stride=2, padding=pad_type, **dd)\n        self.comb_iter_2_right = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=3, stride=2, padding=pad_type, **dd)\n\n        self.comb_iter_3_left = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=3, padding=pad_type, **dd)\n        self.comb_iter_3_right = create_pool2d('max', 3, stride=2, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(\n            in_chs_right, out_chs_right, kernel_size=3, stride=2, stem_cell=True, padding=pad_type, **dd)\n        self.comb_iter_4_right = ActConvBn(\n            out_chs_right, out_chs_right, kernel_size=1, stride=2, padding=pad_type, **dd)\n\n    def forward(self, x_left):\n        x_right = self.conv_1x1(x_left)\n        x_out = self.cell_forward(x_left, x_right)\n        return x_out\n\n\nclass Cell(CellBase):\n\n    def __init__(\n            self,\n            in_chs_left: int,\n            out_chs_left: int,\n            in_chs_right: int,\n            out_chs_right: int,\n            pad_type: str = '',\n            is_reduction: bool = False,\n            match_prev_layer_dims: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        # If `is_reduction` is set to `True` stride 2 is used for\n        # convolution and pooling layers to reduce the spatial size of\n        # the output of a cell approximately by a factor of 2.\n        stride = 2 if is_reduction else 1\n\n        # If `match_prev_layer_dimensions` is set to `True`\n        # `FactorizedReduction` is used to reduce the spatial size\n        # of the left input of a cell approximately by a factor of 2.\n        self.match_prev_layer_dimensions = match_prev_layer_dims\n        if match_prev_layer_dims:\n            self.conv_prev_1x1 = FactorizedReduction(in_chs_left, out_chs_left, padding=pad_type, **dd)\n        else:\n            self.conv_prev_1x1 = ActConvBn(in_chs_left, out_chs_left, kernel_size=1, padding=pad_type, **dd)\n        self.conv_1x1 = ActConvBn(in_chs_right, out_chs_right, kernel_size=1, padding=pad_type, **dd)\n\n        self.comb_iter_0_left = BranchSeparables(\n            out_chs_left, out_chs_left, kernel_size=5, stride=stride, padding=pad_type, **dd)\n        self.comb_iter_0_right = create_pool2d('max', 3, stride=stride, padding=pad_type)\n\n        self.comb_iter_1_left = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=7, stride=stride, padding=pad_type, **dd)\n        self.comb_iter_1_right = create_pool2d('max', 3, stride=stride, padding=pad_type)\n\n        self.comb_iter_2_left = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=5, stride=stride, padding=pad_type, **dd)\n        self.comb_iter_2_right = BranchSeparables(\n            out_chs_right, out_chs_right, kernel_size=3, stride=stride, padding=pad_type, **dd)\n\n        self.comb_iter_3_left = BranchSeparables(out_chs_right, out_chs_right, kernel_size=3, **dd)\n        self.comb_iter_3_right = create_pool2d('max', 3, stride=stride, padding=pad_type)\n\n        self.comb_iter_4_left = BranchSeparables(\n            out_chs_left, out_chs_left, kernel_size=3, stride=stride, padding=pad_type, **dd)\n        if is_reduction:\n            self.comb_iter_4_right = ActConvBn(\n                out_chs_right, out_chs_right, kernel_size=1, stride=stride, padding=pad_type, **dd)\n        else:\n            self.comb_iter_4_right = None\n\n    def forward(self, x_left, x_right):\n        x_left = self.conv_prev_1x1(x_left)\n        x_right = self.conv_1x1(x_right)\n        x_out = self.cell_forward(x_left, x_right)\n        return x_out\n\n\nclass PNASNet5Large(nn.Module):\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            drop_rate: float = 0.,\n            global_pool: str = 'avg',\n            pad_type: str = '',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = 4320\n        assert output_stride == 32\n\n        self.conv_0 = ConvNormAct(\n            in_chans, 96, kernel_size=3, stride=2, padding=0,\n            norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.1), apply_act=False, **dd)\n\n        self.cell_stem_0 = CellStem0(\n            in_chs_left=96, out_chs_left=54, in_chs_right=96, out_chs_right=54, pad_type=pad_type, **dd)\n\n        self.cell_stem_1 = Cell(\n            in_chs_left=96, out_chs_left=108, in_chs_right=270, out_chs_right=108, pad_type=pad_type,\n            match_prev_layer_dims=True, is_reduction=True, **dd)\n        self.cell_0 = Cell(\n            in_chs_left=270, out_chs_left=216, in_chs_right=540, out_chs_right=216, pad_type=pad_type,\n            match_prev_layer_dims=True, **dd)\n        self.cell_1 = Cell(\n            in_chs_left=540, out_chs_left=216, in_chs_right=1080, out_chs_right=216, pad_type=pad_type, **dd)\n        self.cell_2 = Cell(\n            in_chs_left=1080, out_chs_left=216, in_chs_right=1080, out_chs_right=216, pad_type=pad_type, **dd)\n        self.cell_3 = Cell(\n            in_chs_left=1080, out_chs_left=216, in_chs_right=1080, out_chs_right=216, pad_type=pad_type, **dd)\n\n        self.cell_4 = Cell(\n            in_chs_left=1080, out_chs_left=432, in_chs_right=1080, out_chs_right=432, pad_type=pad_type,\n            is_reduction=True, **dd)\n        self.cell_5 = Cell(\n            in_chs_left=1080, out_chs_left=432, in_chs_right=2160, out_chs_right=432, pad_type=pad_type,\n            match_prev_layer_dims=True, **dd)\n        self.cell_6 = Cell(\n            in_chs_left=2160, out_chs_left=432, in_chs_right=2160, out_chs_right=432, pad_type=pad_type, **dd)\n        self.cell_7 = Cell(\n            in_chs_left=2160, out_chs_left=432, in_chs_right=2160, out_chs_right=432, pad_type=pad_type, **dd)\n\n        self.cell_8 = Cell(\n            in_chs_left=2160, out_chs_left=864, in_chs_right=2160, out_chs_right=864, pad_type=pad_type,\n            is_reduction=True, **dd)\n        self.cell_9 = Cell(\n            in_chs_left=2160, out_chs_left=864, in_chs_right=4320, out_chs_right=864, pad_type=pad_type,\n            match_prev_layer_dims=True, **dd)\n        self.cell_10 = Cell(\n            in_chs_left=4320, out_chs_left=864, in_chs_right=4320, out_chs_right=864, pad_type=pad_type, **dd)\n        self.cell_11 = Cell(\n            in_chs_left=4320, out_chs_left=864, in_chs_right=4320, out_chs_right=864, pad_type=pad_type, **dd)\n        self.act = nn.ReLU()\n        self.feature_info = [\n            dict(num_chs=96, reduction=2, module='conv_0'),\n            dict(num_chs=270, reduction=4, module='cell_stem_1.conv_1x1.act'),\n            dict(num_chs=1080, reduction=8, module='cell_4.conv_1x1.act'),\n            dict(num_chs=2160, reduction=16, module='cell_8.conv_1x1.act'),\n            dict(num_chs=4320, reduction=32, module='act'),\n        ]\n\n        self.global_pool, self.head_drop, self.last_linear = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool, drop_rate=drop_rate, **dd)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(stem=r'^conv_0|cell_stem_[01]', blocks=r'^cell_(\\d+)')\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.last_linear\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg', device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.global_pool, self.last_linear = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool, **dd)\n\n    def forward_features(self, x):\n        x_conv_0 = self.conv_0(x)\n        x_stem_0 = self.cell_stem_0(x_conv_0)\n        x_stem_1 = self.cell_stem_1(x_conv_0, x_stem_0)\n        x_cell_0 = self.cell_0(x_stem_0, x_stem_1)\n        x_cell_1 = self.cell_1(x_stem_1, x_cell_0)\n        x_cell_2 = self.cell_2(x_cell_0, x_cell_1)\n        x_cell_3 = self.cell_3(x_cell_1, x_cell_2)\n        x_cell_4 = self.cell_4(x_cell_2, x_cell_3)\n        x_cell_5 = self.cell_5(x_cell_3, x_cell_4)\n        x_cell_6 = self.cell_6(x_cell_4, x_cell_5)\n        x_cell_7 = self.cell_7(x_cell_5, x_cell_6)\n        x_cell_8 = self.cell_8(x_cell_6, x_cell_7)\n        x_cell_9 = self.cell_9(x_cell_7, x_cell_8)\n        x_cell_10 = self.cell_10(x_cell_8, x_cell_9)\n        x_cell_11 = self.cell_11(x_cell_9, x_cell_10)\n        x = self.act(x_cell_11)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.last_linear(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_pnasnet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        PNASNet5Large,\n        variant,\n        pretrained,\n        feature_cfg=dict(feature_cls='hook', no_rewrite=True),  # not possible to re-write this model\n        **kwargs,\n    )\n\n\ndefault_cfgs = generate_default_cfgs({\n    'pnasnet5large.tf_in1k': {\n        'hf_hub_id': 'timm/',\n        'input_size': (3, 331, 331),\n        'pool_size': (11, 11),\n        'crop_pct': 0.911,\n        'interpolation': 'bicubic',\n        'mean': (0.5, 0.5, 0.5),\n        'std': (0.5, 0.5, 0.5),\n        'num_classes': 1000,\n        'first_conv': 'conv_0.conv',\n        'classifier': 'last_linear',\n        'license': 'apache-2.0',\n    },\n})\n\n\n@register_model\ndef pnasnet5large(pretrained=False, **kwargs) -> PNASNet5Large:\n    r\"\"\"PNASNet-5 model architecture from the\n    `\"Progressive Neural Architecture Search\"\n    <https://arxiv.org/abs/1712.00559>`_ paper.\n    \"\"\"\n    model_kwargs = dict(pad_type='same', **kwargs)\n    return _create_pnasnet('pnasnet5large', pretrained, **model_kwargs)\n"
  },
  {
    "path": "timm/models/pvt_v2.py",
    "content": "\"\"\" Pyramid Vision Transformer v2\n\n@misc{wang2021pvtv2,\n      title={PVTv2: Improved Baselines with Pyramid Vision Transformer},\n      author={Wenhai Wang and Enze Xie and Xiang Li and Deng-Ping Fan and Kaitao Song and Ding Liang and\n        Tong Lu and Ping Luo and Ling Shao},\n      year={2021},\n      eprint={2106.13797},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n\nBased on Apache 2.0 licensed code at https://github.com/whai362/PVT\n\nModifications and timm support by / Copyright 2022, Ross Wightman\n\"\"\"\n\nimport math\nfrom typing import Callable, List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, to_2tuple, to_ntuple, trunc_normal_, LayerNorm, use_fused_attn\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['PyramidVisionTransformerV2']\n\n\nclass MlpWithDepthwiseConv(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.,\n            extra_relu: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        self.fc1 = nn.Linear(in_features, hidden_features, **dd)\n        self.relu = nn.ReLU() if extra_relu else nn.Identity()\n        self.dwconv = nn.Conv2d(hidden_features, hidden_features, 3, 1, 1, bias=True, groups=hidden_features, **dd)\n        self.act = act_layer()\n        self.fc2 = nn.Linear(hidden_features, out_features, **dd)\n        self.drop = nn.Dropout(drop)\n\n    def forward(self, x, feat_size: List[int]):\n        x = self.fc1(x)\n        B, N, C = x.shape\n        x = x.transpose(1, 2).view(B, C, feat_size[0], feat_size[1])\n        x = self.relu(x)\n        x = self.dwconv(x)\n        x = x.flatten(2).transpose(1, 2)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        x = self.drop(x)\n        return x\n\n\nclass Attention(nn.Module):\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            sr_ratio: int = 1,\n            linear_attn: bool = False,\n            qkv_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, f\"dim {dim} should be divided by num_heads {num_heads}.\"\n\n        self.dim = dim\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.q = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.kv = nn.Linear(dim, dim * 2, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        if not linear_attn:\n            self.pool = None\n            if sr_ratio > 1:\n                self.sr = nn.Conv2d(dim, dim, kernel_size=sr_ratio, stride=sr_ratio, **dd)\n                self.norm = nn.LayerNorm(dim, **dd)\n            else:\n                self.sr = None\n                self.norm = None\n            self.act = None\n        else:\n            self.pool = nn.AdaptiveAvgPool2d(7)\n            self.sr = nn.Conv2d(dim, dim, kernel_size=1, stride=1, **dd)\n            self.norm = nn.LayerNorm(dim, **dd)\n            self.act = nn.GELU()\n\n    def forward(self, x, feat_size: List[int]):\n        B, N, C = x.shape\n        H, W = feat_size\n        q = self.q(x).reshape(B, N, self.num_heads, -1).permute(0, 2, 1, 3)\n\n        if self.pool is not None:\n            x = x.permute(0, 2, 1).reshape(B, C, H, W)\n            x = self.sr(self.pool(x)).reshape(B, C, -1).permute(0, 2, 1)\n            x = self.norm(x)\n            x = self.act(x)\n            kv = self.kv(x).reshape(B, -1, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        else:\n            if self.sr is not None:\n                x = x.permute(0, 2, 1).reshape(B, C, H, W)\n                x = self.sr(x).reshape(B, C, -1).permute(0, 2, 1)\n                x = self.norm(x)\n                kv = self.kv(x).reshape(B, -1, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n            else:\n                kv = self.kv(x).reshape(B, -1, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        k, v = kv.unbind(0)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v, dropout_p=self.attn_drop.p if self.training else 0.)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass Block(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            sr_ratio: int = 1,\n            linear_attn: bool = False,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention(\n            dim,\n            num_heads=num_heads,\n            sr_ratio=sr_ratio,\n            linear_attn=linear_attn,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = MlpWithDepthwiseConv(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            extra_relu=linear_attn,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x, feat_size: List[int]):\n        x = x + self.drop_path1(self.attn(self.norm1(x), feat_size))\n        x = x + self.drop_path2(self.mlp(self.norm2(x), feat_size))\n\n        return x\n\n\nclass OverlapPatchEmbed(nn.Module):\n    \"\"\" Image to Patch Embedding\n    \"\"\"\n    def __init__(\n            self,\n            patch_size: Union[int, Tuple[int, int]] = 7,\n            stride: int = 4,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        patch_size = to_2tuple(patch_size)\n        assert max(patch_size) > stride, \"Set larger patch_size than stride\"\n        self.patch_size = patch_size\n        self.proj = nn.Conv2d(\n            in_chans, embed_dim, patch_size,\n            stride=stride, padding=(patch_size[0] // 2, patch_size[1] // 2), **dd)\n        self.norm = nn.LayerNorm(embed_dim, **dd)\n\n    def forward(self, x):\n        x = self.proj(x)\n        x = x.permute(0, 2, 3, 1)\n        x = self.norm(x)\n        return x\n\n\nclass PyramidVisionTransformerStage(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int,\n            downsample: bool = True,\n            num_heads: int = 8,\n            sr_ratio: int = 1,\n            linear_attn: bool = False,\n            mlp_ratio: float = 4.0,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Union[List[float], float] = 0.0,\n            norm_layer: Callable = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.grad_checkpointing = False\n\n        if downsample:\n            self.downsample = OverlapPatchEmbed(\n                patch_size=3,\n                stride=2,\n                in_chans=dim,\n                embed_dim=dim_out,\n                **dd,\n            )\n        else:\n            assert dim == dim_out\n            self.downsample = None\n\n        self.blocks = nn.ModuleList([Block(\n            dim=dim_out,\n            num_heads=num_heads,\n            sr_ratio=sr_ratio,\n            linear_attn=linear_attn,\n            mlp_ratio=mlp_ratio,\n            qkv_bias=qkv_bias,\n            proj_drop=proj_drop,\n            attn_drop=attn_drop,\n            drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n            norm_layer=norm_layer,\n            **dd,\n        ) for i in range(depth)])\n\n        self.norm = norm_layer(dim_out, **dd)\n\n    def forward(self, x):\n        # x is either B, C, H, W (if downsample) or B, H, W, C if not\n        if self.downsample is not None:\n            # input to downsample is B, C, H, W\n            x = self.downsample(x)  # output B, H, W, C\n        B, H, W, C = x.shape\n        feat_size = (H, W)\n        x = x.reshape(B, -1, C)\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, feat_size)\n            else:\n                x = blk(x, feat_size)\n        x = self.norm(x)\n        x = x.reshape(B, feat_size[0], feat_size[1], -1).permute(0, 3, 1, 2).contiguous()\n        return x\n\n\nclass PyramidVisionTransformerV2(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            depths: Tuple[int, ...] = (3, 4, 6, 3),\n            embed_dims: Tuple[int, ...] = (64, 128, 256, 512),\n            num_heads: Tuple[int, ...] = (1, 2, 4, 8),\n            sr_ratios: Tuple[int, ...] = (8, 4, 2, 1),\n            mlp_ratios: Tuple[float, ...] = (8., 8., 4., 4.),\n            qkv_bias: bool = True,\n            linear: bool = False,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        assert global_pool in ('avg', '')\n        self.global_pool = global_pool\n        self.depths = depths\n        num_stages = len(depths)\n        mlp_ratios = to_ntuple(num_stages)(mlp_ratios)\n        num_heads = to_ntuple(num_stages)(num_heads)\n        sr_ratios = to_ntuple(num_stages)(sr_ratios)\n        assert(len(embed_dims)) == num_stages\n        self.feature_info = []\n\n        self.patch_embed = OverlapPatchEmbed(\n            patch_size=7,\n            stride=4,\n            in_chans=in_chans,\n            embed_dim=embed_dims[0],\n            **dd,\n        )\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        cur = 0\n        prev_dim = embed_dims[0]\n        stages = []\n        for i in range(num_stages):\n            stages += [PyramidVisionTransformerStage(\n                dim=prev_dim,\n                dim_out=embed_dims[i],\n                depth=depths[i],\n                downsample=i > 0,\n                num_heads=num_heads[i],\n                sr_ratio=sr_ratios[i],\n                mlp_ratio=mlp_ratios[i],\n                linear_attn=linear,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                **dd,\n            )]\n            prev_dim = embed_dims[i]\n            cur += depths[i]\n            self.feature_info += [dict(num_chs=prev_dim, reduction=4 * 2**i, module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        # classification head\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dims[-1], num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.Conv2d):\n            fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n            fan_out //= m.groups\n            m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n            if m.bias is not None:\n                m.bias.data.zero_()\n\n    def freeze_patch_emb(self):\n        self.patch_embed.requires_grad = False\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=r'^stages\\.(\\d+)'\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('avg', '')\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x.mean(dim=(-1, -2))\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap original checkpoints -> timm \"\"\"\n    if 'patch_embed.proj.weight' in state_dict:\n        return state_dict  # non-original checkpoint, no remapping needed\n\n    out_dict = {}\n    import re\n    for k, v in state_dict.items():\n        if k.startswith('patch_embed'):\n            k = k.replace('patch_embed1', 'patch_embed')\n            k = k.replace('patch_embed2', 'stages.1.downsample')\n            k = k.replace('patch_embed3', 'stages.2.downsample')\n            k = k.replace('patch_embed4', 'stages.3.downsample')\n        k = k.replace('dwconv.dwconv', 'dwconv')\n        k = re.sub(r'block(\\d+).(\\d+)', lambda x: f'stages.{int(x.group(1)) - 1}.blocks.{x.group(2)}', k)\n        k = re.sub(r'^norm(\\d+)', lambda x: f'stages.{int(x.group(1)) - 1}.norm', k)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_pvt2(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(range(4))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n    model = build_model_with_cfg(\n        PyramidVisionTransformerV2,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.9, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head', 'fixed_input_size': False,\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'pvt_v2_b0.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b1.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b2.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b3.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b4.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b5.in1k': _cfg(hf_hub_id='timm/'),\n    'pvt_v2_b2_li.in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef pvt_v2_b0(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(depths=(2, 2, 2, 2), embed_dims=(32, 64, 160, 256), num_heads=(1, 2, 5, 8))\n    return _create_pvt2('pvt_v2_b0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b1(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(depths=(2, 2, 2, 2), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8))\n    return _create_pvt2('pvt_v2_b1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b2(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(depths=(3, 4, 6, 3), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8))\n    return _create_pvt2('pvt_v2_b2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b3(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(depths=(3, 4, 18, 3), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8))\n    return _create_pvt2('pvt_v2_b3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b4(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(depths=(3, 8, 27, 3), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8))\n    return _create_pvt2('pvt_v2_b4', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b5(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(\n        depths=(3, 6, 40, 3), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8), mlp_ratios=(4, 4, 4, 4))\n    return _create_pvt2('pvt_v2_b5', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef pvt_v2_b2_li(pretrained=False, **kwargs) -> PyramidVisionTransformerV2:\n    model_args = dict(\n        depths=(3, 4, 6, 3), embed_dims=(64, 128, 320, 512), num_heads=(1, 2, 5, 8), linear=True)\n    return _create_pvt2('pvt_v2_b2_li', pretrained=pretrained, **dict(model_args, **kwargs))\n\n"
  },
  {
    "path": "timm/models/rdnet.py",
    "content": "\"\"\"\nRDNet\nCopyright (c) 2024-present NAVER Cloud Corp.\nApache-2.0\n\"\"\"\n\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Callable, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, NormMlpClassifierHead, ClassifierHead, EffectiveSEModule, \\\n    make_divisible, get_act_layer, get_norm_layer\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = [\"RDNet\"]\n\n\nclass Block(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            inter_chs: int,\n            out_chs: int,\n            norm_layer: Type[nn.Module],\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.layers = nn.Sequential(\n            nn.Conv2d(in_chs, in_chs, groups=in_chs, kernel_size=7, stride=1, padding=3, **dd),\n            norm_layer(in_chs, **dd),\n            nn.Conv2d(in_chs, inter_chs, kernel_size=1, stride=1, padding=0, **dd),\n            act_layer(),\n            nn.Conv2d(inter_chs, out_chs, kernel_size=1, stride=1, padding=0, **dd),\n        )\n\n    def forward(self, x):\n        return self.layers(x)\n\n\nclass BlockESE(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            inter_chs: int,\n            out_chs: int,\n            norm_layer: Type[nn.Module],\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.layers = nn.Sequential(\n            nn.Conv2d(in_chs, in_chs, groups=in_chs, kernel_size=7, stride=1, padding=3, **dd),\n            norm_layer(in_chs, **dd),\n            nn.Conv2d(in_chs, inter_chs, kernel_size=1, stride=1, padding=0, **dd),\n            act_layer(),\n            nn.Conv2d(inter_chs, out_chs, kernel_size=1, stride=1, padding=0, **dd),\n            EffectiveSEModule(out_chs, **dd),\n        )\n\n    def forward(self, x):\n        return self.layers(x)\n\n\ndef _get_block_type(block: str):\n    block = block.lower().strip()\n    if block == \"block\":\n        return Block\n    elif block == \"blockese\":\n        return BlockESE\n    else:\n        assert False, f\"Unknown block type ({block}).\"\n\n\nclass DenseBlock(nn.Module):\n    def __init__(\n            self,\n            num_input_features: int = 64,\n            growth_rate: int = 64,\n            bottleneck_width_ratio: float = 4.0,\n            drop_path_rate: float = 0.0,\n            drop_rate: float = 0.0,\n            rand_gather_step_prob: float = 0.0,\n            block_idx: int = 0,\n            block_type: str = \"Block\",\n            ls_init_value: float = 1e-6,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.drop_rate = drop_rate\n        self.drop_path_rate = drop_path_rate\n        self.rand_gather_step_prob = rand_gather_step_prob\n        self.block_idx = block_idx\n        self.growth_rate = growth_rate\n\n        self.gamma = nn.Parameter(ls_init_value * torch.ones(growth_rate, **dd)) if ls_init_value > 0 else None\n        growth_rate = int(growth_rate)\n        inter_chs = int(num_input_features * bottleneck_width_ratio / 8) * 8\n\n        self.drop_path = DropPath(drop_path_rate)\n\n        self.layers = _get_block_type(block_type)(\n            in_chs=num_input_features,\n            inter_chs=inter_chs,\n            out_chs=growth_rate,\n            norm_layer=norm_layer,\n            act_layer=act_layer,\n            **dd,\n        )\n\n    def forward(self, x: List[torch.Tensor]) -> torch.Tensor:\n        x = torch.cat(x, 1)\n        x = self.layers(x)\n\n        if self.gamma is not None:\n            x = x.mul(self.gamma.reshape(1, -1, 1, 1))\n\n        x = self.drop_path(x)\n        return x\n\n\nclass DenseStage(nn.Sequential):\n    def __init__(\n            self,\n            num_block: int,\n            num_input_features: int,\n            drop_path_rates: List[float],\n            growth_rate: int,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        for i in range(num_block):\n            layer = DenseBlock(\n                num_input_features=num_input_features,\n                growth_rate=growth_rate,\n                drop_path_rate=drop_path_rates[i],\n                block_idx=i,\n                **dd,\n                **kwargs,\n            )\n            num_input_features += growth_rate\n            self.add_module(f\"dense_block{i}\", layer)\n        self.num_out_features = num_input_features\n\n    def forward(self, init_feature: torch.Tensor) -> torch.Tensor:\n        features = [init_feature]\n        for module in self:\n            new_feature = module(features)\n            features.append(new_feature)\n        return torch.cat(features, 1)\n\n\nclass RDNet(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,  # timm option [--in-chans]\n            num_classes: int = 1000,  # timm option [--num-classes]\n            global_pool: str = 'avg',  # timm option [--gp]\n            growth_rates: Union[List[int], Tuple[int]] = (64, 104, 128, 128, 128, 128, 224),\n            num_blocks_list: Union[List[int], Tuple[int]] = (3, 3, 3, 3, 3, 3, 3),\n            block_type: Union[List[int], Tuple[int]] = (\"Block\",) * 2 + (\"BlockESE\",) * 5,\n            is_downsample_block: Union[List[bool], Tuple[bool]] = (None, True, True, False, False, False, True),\n            bottleneck_width_ratio: float = 4.0,\n            transition_compression_ratio: float = 0.5,\n            ls_init_value: float = 1e-6,\n            stem_type: str = 'patch',\n            patch_size: int = 4,\n            num_init_features: int = 64,\n            head_init_scale: float = 1.,\n            head_norm_first: bool = False,\n            conv_bias: bool = True,\n            act_layer: Union[str, Callable] = 'gelu',\n            norm_layer: str = \"layernorm2d\",\n            norm_eps: Optional[float] = None,\n            drop_rate: float = 0.0,  # timm option [--drop: dropout ratio]\n            drop_path_rate: float = 0.0,  # timm option [--drop-path: drop-path ratio]\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            global_pool: Global pooling type.\n            growth_rates: Growth rate at each stage.\n            num_blocks_list: Number of blocks at each stage.\n            is_downsample_block: Whether to downsample at each stage.\n            bottleneck_width_ratio: Bottleneck width ratio (similar to mlp expansion ratio).\n            transition_compression_ratio: Channel compression ratio of transition layers.\n            ls_init_value: Init value for Layer Scale, disabled if None.\n            stem_type: Type of stem.\n            patch_size: Stem patch size for patch stem.\n            num_init_features: Number of features of stem.\n            head_init_scale: Init scaling value for classifier weights and biases.\n            head_norm_first: Apply normalization before global pool + head.\n            conv_bias: Use bias layers w/ all convolutions.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer type.\n            norm_eps: Small value to avoid division by zero in normalization.\n            drop_rate: Head pre-classifier dropout rate.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert len(growth_rates) == len(num_blocks_list) == len(is_downsample_block)\n        act_layer = get_act_layer(act_layer)\n        norm_layer = get_norm_layer(norm_layer)\n        if norm_eps is not None:\n            norm_layer = partial(norm_layer, eps=norm_eps)\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n\n        # stem\n        assert stem_type in ('patch', 'overlap', 'overlap_tiered')\n        if stem_type == 'patch':\n            # NOTE: this stem is a minimal form of ViT PatchEmbed, as used in SwinTransformer w/ patch_size = 4\n            self.stem = nn.Sequential(\n                nn.Conv2d(in_chans, num_init_features, kernel_size=patch_size, stride=patch_size, bias=conv_bias, **dd),\n                norm_layer(num_init_features, **dd),\n            )\n            stem_stride = patch_size\n        else:\n            mid_chs = make_divisible(num_init_features // 2) if 'tiered' in stem_type else num_init_features\n            self.stem = nn.Sequential(\n                nn.Conv2d(in_chans, mid_chs, kernel_size=3, stride=2, padding=1, bias=conv_bias, **dd),\n                nn.Conv2d(mid_chs, num_init_features, kernel_size=3, stride=2, padding=1, bias=conv_bias, **dd),\n                norm_layer(num_init_features, **dd),\n            )\n            stem_stride = 4\n\n        # features\n        self.feature_info = []\n        self.num_stages = len(growth_rates)\n        curr_stride = stem_stride\n        num_features = num_init_features\n        dp_rates = calculate_drop_path_rates(drop_path_rate, num_blocks_list, stagewise=True)\n\n        dense_stages = []\n        for i in range(self.num_stages):\n            dense_stage_layers = []\n            if i != 0:\n                compressed_num_features = int(num_features * transition_compression_ratio / 8) * 8\n                k_size = stride = 1\n                if is_downsample_block[i]:\n                    curr_stride *= 2\n                    k_size = stride = 2\n\n                dense_stage_layers.append(norm_layer(num_features, **dd))\n                dense_stage_layers.append(nn.Conv2d(\n                    num_features,\n                    compressed_num_features,\n                    kernel_size=k_size,\n                    stride=stride,\n                    padding=0,\n                    **dd,\n                ))\n                num_features = compressed_num_features\n\n            stage = DenseStage(\n                num_block=num_blocks_list[i],\n                num_input_features=num_features,\n                growth_rate=growth_rates[i],\n                bottleneck_width_ratio=bottleneck_width_ratio,\n                drop_rate=drop_rate,\n                drop_path_rates=dp_rates[i],\n                ls_init_value=ls_init_value,\n                block_type=block_type[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            dense_stage_layers.append(stage)\n            num_features += num_blocks_list[i] * growth_rates[i]\n\n            if i + 1 == self.num_stages or (i + 1 != self.num_stages and is_downsample_block[i + 1]):\n                self.feature_info += [\n                    dict(\n                        num_chs=num_features,\n                        reduction=curr_stride,\n                        module=f'dense_stages.{i}',\n                        growth_rate=growth_rates[i],\n                    )\n                ]\n            dense_stages.append(nn.Sequential(*dense_stage_layers))\n        self.dense_stages = nn.Sequential(*dense_stages)\n        self.num_features = self.head_hidden_size = num_features\n\n        # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets\n        # otherwise pool -> norm -> fc, the default RDNet ordering (pretrained NV weights)\n        if head_norm_first:\n            self.norm_pre = norm_layer(self.num_features, **dd)\n            self.head = ClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                **dd,\n            )\n        else:\n            self.norm_pre = nn.Identity()\n            self.head = NormMlpClassifierHead(\n                self.num_features,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=self.drop_rate,\n                norm_layer=norm_layer,\n                **dd,\n            )\n\n        named_apply(partial(_init_weights, head_init_scale=head_init_scale), self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        assert not coarse, \"coarse grouping is not implemented for RDNet\"\n        return dict(\n            stem=r'^stem',\n            blocks=r'^dense_stages\\.(\\d+)',\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.dense_stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i] for i in take_indices]\n        max_index = stage_ends[max_index]\n\n        # forward pass\n        x = self.stem(x)\n\n        last_idx = len(self.dense_stages) - 1\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            dense_stages = self.dense_stages\n        else:\n            dense_stages = self.dense_stages[:max_index + 1]\n        for feat_idx, stage in enumerate(dense_stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm_pre(x)  # applying final norm to last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm_pre(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.dense_stages = self.dense_stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm_pre = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.dense_stages(x)\n        x = self.norm_pre(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module, name=None, head_init_scale=1.0):\n    if isinstance(module, nn.Conv2d):\n        nn.init.kaiming_normal_(module.weight)\n    elif isinstance(module, nn.BatchNorm2d):\n        nn.init.constant_(module.weight, 1)\n        nn.init.constant_(module.bias, 0)\n    elif isinstance(module, nn.Linear):\n        nn.init.constant_(module.bias, 0)\n        if name and 'head.' in name:\n            module.weight.data.mul_(head_init_scale)\n            module.bias.data.mul_(head_init_scale)\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap NV checkpoints -> timm \"\"\"\n    if 'stem.0.weight' in state_dict:\n        return state_dict  # non-NV checkpoint\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n\n    out_dict = {}\n\n    for k, v in state_dict.items():\n        k = k.replace('stem.stem.', 'stem.')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _create_rdnet(variant, pretrained=False, **kwargs):\n    model = build_model_with_cfg(\n        RDNet, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        \"url\": url,\n        \"num_classes\": 1000, \"input_size\": (3, 224, 224), \"pool_size\": (7, 7),\n        \"crop_pct\": 0.9, \"interpolation\": \"bicubic\",\n        \"mean\": IMAGENET_DEFAULT_MEAN, \"std\": IMAGENET_DEFAULT_STD,\n        \"first_conv\": \"stem.0\", \"classifier\": \"head.fc\",\n        \"paper_ids\": \"arXiv:2403.19588\",\n        \"paper_name\": \"DenseNets Reloaded: Paradigm Shift Beyond ResNets and ViTs\",\n        \"origin_url\": \"https://github.com/naver-ai/rdnet\",\n        \"license\": \"apache-2.0\",\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'rdnet_tiny.nv_in1k': _cfg(\n        hf_hub_id='naver-ai/rdnet_tiny.nv_in1k'),\n    'rdnet_small.nv_in1k': _cfg(\n        hf_hub_id='naver-ai/rdnet_small.nv_in1k'),\n    'rdnet_base.nv_in1k': _cfg(\n        hf_hub_id='naver-ai/rdnet_base.nv_in1k'),\n    'rdnet_large.nv_in1k': _cfg(\n        hf_hub_id='naver-ai/rdnet_large.nv_in1k'),\n    'rdnet_large.nv_in1k_ft_in1k_384': _cfg(\n        hf_hub_id='naver-ai/rdnet_large.nv_in1k_ft_in1k_384',\n        input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n})\n\n\n@register_model\ndef rdnet_tiny(pretrained=False, **kwargs):\n    n_layer = 7\n    model_args = {\n        \"num_init_features\": 64,\n        \"growth_rates\": [64] + [104] + [128] * 4 + [224],\n        \"num_blocks_list\": [3] * n_layer,\n        \"is_downsample_block\": (None, True, True, False, False, False, True),\n        \"transition_compression_ratio\": 0.5,\n        \"block_type\": [\"Block\"] + [\"Block\"] + [\"BlockESE\"] * 4 + [\"BlockESE\"],\n    }\n    model = _create_rdnet(\"rdnet_tiny\", pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef rdnet_small(pretrained=False, **kwargs):\n    n_layer = 11\n    model_args = {\n        \"num_init_features\": 72,\n        \"growth_rates\": [64] + [128] + [128] * (n_layer - 4) + [240] * 2,\n        \"num_blocks_list\": [3] * n_layer,\n        \"is_downsample_block\": (None, True, True, False, False, False, False, False, False, True, False),\n        \"transition_compression_ratio\": 0.5,\n        \"block_type\": [\"Block\"] + [\"Block\"] + [\"BlockESE\"] * (n_layer - 4) + [\"BlockESE\"] * 2,\n    }\n    model = _create_rdnet(\"rdnet_small\", pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef rdnet_base(pretrained=False, **kwargs):\n    n_layer = 11\n    model_args = {\n        \"num_init_features\": 120,\n        \"growth_rates\": [96] + [128] + [168] * (n_layer - 4) + [336] * 2,\n        \"num_blocks_list\": [3] * n_layer,\n        \"is_downsample_block\": (None, True, True, False, False, False, False, False, False, True, False),\n        \"transition_compression_ratio\": 0.5,\n        \"block_type\": [\"Block\"] + [\"Block\"] + [\"BlockESE\"] * (n_layer - 4) + [\"BlockESE\"] * 2,\n    }\n    model = _create_rdnet(\"rdnet_base\", pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef rdnet_large(pretrained=False, **kwargs):\n    n_layer = 12\n    model_args = {\n        \"num_init_features\": 144,\n        \"growth_rates\": [128] + [192] + [256] * (n_layer - 4) + [360] * 2,\n        \"num_blocks_list\": [3] * n_layer,\n        \"is_downsample_block\": (None, True, True, False, False, False, False, False, False, False, True, False),\n        \"transition_compression_ratio\": 0.5,\n        \"block_type\": [\"Block\"] + [\"Block\"] + [\"BlockESE\"] * (n_layer - 4) + [\"BlockESE\"] * 2,\n    }\n    model = _create_rdnet(\"rdnet_large\", pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/registry.py",
    "content": "from ._registry import *\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.models\", FutureWarning)\n"
  },
  {
    "path": "timm/models/regnet.py",
    "content": "\"\"\"RegNet X, Y, Z, and more\n\nPaper: `Designing Network Design Spaces` - https://arxiv.org/abs/2003.13678\nOriginal Impl: https://github.com/facebookresearch/pycls/blob/master/pycls/models/regnet.py\n\nPaper: `Fast and Accurate Model Scaling` - https://arxiv.org/abs/2103.06877\nOriginal Impl: None\n\nBased on original PyTorch impl linked above, but re-wrote to use my own blocks (adapted from ResNet here)\nand cleaned up with more descriptive variable names.\n\nWeights from original pycls impl have been modified:\n* first layer from BGR -> RGB as most PyTorch models are\n* removed training specific dict entries from checkpoints and keep model state_dict only\n* remap names to match the ones here\n\nSupports weight loading from torchvision and classy-vision (incl VISSL SEER)\n\nA number of custom timm model definitions additions including:\n* stochastic depth, gradient checkpointing, layer-decay, configurable dilation\n* a pre-activation 'V' variant\n* only known RegNet-Z model definitions with pretrained weights\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport math\nfrom dataclasses import dataclass, replace\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Union, Tuple, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ClassifierHead, AvgPool2dSame, ConvNormAct, SEModule, DropPath, GroupNormAct, calculate_drop_path_rates\nfrom timm.layers import get_act_layer, get_norm_act_layer, create_conv2d, make_divisible\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq, named_apply\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['RegNet', 'RegNetCfg']  # model_registry will add each entrypoint fn to this\n\n\n@dataclass\nclass RegNetCfg:\n    \"\"\"RegNet architecture configuration.\"\"\"\n    depth: int = 21\n    w0: int = 80\n    wa: float = 42.63\n    wm: float = 2.66\n    group_size: int = 24\n    bottle_ratio: float = 1.\n    se_ratio: float = 0.\n    group_min_ratio: float = 0.\n    stem_width: int = 32\n    downsample: Optional[str] = 'conv1x1'\n    linear_out: bool = False\n    preact: bool = False\n    num_features: int = 0\n    act_layer: Union[str, Callable] = 'relu'\n    norm_layer: Union[str, Callable] = 'batchnorm'\n\n\ndef quantize_float(f: float, q: int) -> int:\n    \"\"\"Converts a float to the closest non-zero int divisible by q.\n\n    Args:\n        f: Input float value.\n        q: Quantization divisor.\n\n    Returns:\n        Quantized integer value.\n    \"\"\"\n    return int(round(f / q) * q)\n\n\ndef adjust_widths_groups_comp(\n        widths: List[int],\n        bottle_ratios: List[float],\n        groups: List[int],\n        min_ratio: float = 0.\n) -> Tuple[List[int], List[int]]:\n    \"\"\"Adjusts the compatibility of widths and groups.\n\n    Args:\n        widths: List of channel widths.\n        bottle_ratios: List of bottleneck ratios.\n        groups: List of group sizes.\n        min_ratio: Minimum ratio for divisibility.\n\n    Returns:\n        Tuple of adjusted widths and groups.\n    \"\"\"\n    bottleneck_widths = [int(w * b) for w, b in zip(widths, bottle_ratios)]\n    groups = [min(g, w_bot) for g, w_bot in zip(groups, bottleneck_widths)]\n    if min_ratio:\n        # torchvision uses a different rounding scheme for ensuring bottleneck widths divisible by group widths\n        bottleneck_widths = [make_divisible(w_bot, g, min_ratio) for w_bot, g in zip(bottleneck_widths, groups)]\n    else:\n        bottleneck_widths = [quantize_float(w_bot, g) for w_bot, g in zip(bottleneck_widths, groups)]\n    widths = [int(w_bot / b) for w_bot, b in zip(bottleneck_widths, bottle_ratios)]\n    return widths, groups\n\n\ndef generate_regnet(\n        width_slope: float,\n        width_initial: int,\n        width_mult: float,\n        depth: int,\n        group_size: int,\n        quant: int = 8\n) -> Tuple[List[int], int, List[int]]:\n    \"\"\"Generates per block widths from RegNet parameters.\n\n    Args:\n        width_slope: Slope parameter for width progression.\n        width_initial: Initial width.\n        width_mult: Width multiplier.\n        depth: Network depth.\n        group_size: Group convolution size.\n        quant: Quantization factor.\n\n    Returns:\n        Tuple of (widths, num_stages, groups).\n    \"\"\"\n    assert width_slope >= 0 and width_initial > 0 and width_mult > 1 and width_initial % quant == 0\n    # TODO dWr scaling?\n    # depth = int(depth * (scale ** 0.1))\n    # width_scale = scale ** 0.4  # dWr scale, exp 0.8 / 2, applied to both group and layer widths\n    widths_cont = torch.arange(depth, dtype=torch.float32) * width_slope + width_initial\n    width_exps = torch.round(torch.log(widths_cont / width_initial) / math.log(width_mult))\n    widths = torch.round((width_initial * torch.pow(width_mult, width_exps)) / quant) * quant\n    num_stages, max_stage = len(torch.unique(widths)), int(width_exps.max().item()) + 1\n    groups = torch.tensor([group_size for _ in range(num_stages)], dtype=torch.int32)\n    return widths.int().tolist(), num_stages, groups.tolist()\n\n\ndef downsample_conv(\n        in_chs: int,\n        out_chs: int,\n        kernel_size: int = 1,\n        stride: int = 1,\n        dilation: int = 1,\n        norm_layer: Optional[Type[nn.Module]] = None,\n        preact: bool = False,\n        device=None,\n        dtype=None,\n) -> nn.Module:\n    \"\"\"Create convolutional downsampling module.\n\n    Args:\n        in_chs: Input channels.\n        out_chs: Output channels.\n        kernel_size: Convolution kernel size.\n        stride: Convolution stride.\n        dilation: Convolution dilation.\n        norm_layer: Normalization layer.\n        preact: Use pre-activation.\n\n    Returns:\n        Downsampling module.\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    norm_layer = norm_layer or nn.BatchNorm2d\n    kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size\n    dilation = dilation if kernel_size > 1 else 1\n    if preact:\n        return create_conv2d(\n            in_chs,\n            out_chs,\n            kernel_size,\n            stride=stride,\n            dilation=dilation,\n            **dd,\n        )\n    else:\n        return ConvNormAct(\n            in_chs,\n            out_chs,\n            kernel_size,\n            stride=stride,\n            dilation=dilation,\n            norm_layer=norm_layer,\n            apply_act=False,\n            **dd,\n        )\n\n\ndef downsample_avg(\n        in_chs: int,\n        out_chs: int,\n        kernel_size: int = 1,\n        stride: int = 1,\n        dilation: int = 1,\n        norm_layer: Optional[Type[nn.Module]] = None,\n        preact: bool = False,\n        device=None,\n        dtype=None,\n) -> nn.Sequential:\n    \"\"\"Create average pool downsampling module.\n\n    AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.\n\n    Args:\n        in_chs: Input channels.\n        out_chs: Output channels.\n        kernel_size: Convolution kernel size.\n        stride: Convolution stride.\n        dilation: Convolution dilation.\n        norm_layer: Normalization layer.\n        preact: Use pre-activation.\n\n    Returns:\n        Sequential downsampling module.\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    norm_layer = norm_layer or nn.BatchNorm2d\n    avg_stride = stride if dilation == 1 else 1\n    pool = nn.Identity()\n    if stride > 1 or dilation > 1:\n        avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n        pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n    if preact:\n        conv = create_conv2d(in_chs, out_chs, 1, stride=1, **dd)\n    else:\n        conv = ConvNormAct(in_chs, out_chs, 1, stride=1, norm_layer=norm_layer, apply_act=False, **dd)\n    return nn.Sequential(*[pool, conv])\n\n\ndef create_shortcut(\n        downsample_type: Optional[str],\n        in_chs: int,\n        out_chs: int,\n        kernel_size: int,\n        stride: int,\n        dilation: Tuple[int, int] = (1, 1),\n        norm_layer: Optional[Type[nn.Module]] = None,\n        preact: bool = False,\n        device=None,\n        dtype=None,\n) -> Optional[nn.Module]:\n    \"\"\"Create shortcut connection for residual blocks.\n\n    Args:\n        downsample_type: Type of downsampling ('avg', 'conv1x1', or None).\n        in_chs: Input channels.\n        out_chs: Output channels.\n        kernel_size: Kernel size for conv downsampling.\n        stride: Stride for downsampling.\n        dilation: Dilation rates.\n        norm_layer: Normalization layer.\n        preact: Use pre-activation.\n\n    Returns:\n        Shortcut module or None.\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    assert downsample_type in ('avg', 'conv1x1', '', None)\n    if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]:\n        dargs = dict(stride=stride, dilation=dilation[0], norm_layer=norm_layer, preact=preact, **dd)\n        if not downsample_type:\n            return None  # no shortcut, no downsample\n        elif downsample_type == 'avg':\n            return downsample_avg(in_chs, out_chs, **dargs)\n        else:\n            return downsample_conv(in_chs, out_chs, kernel_size=kernel_size, **dargs)\n    else:\n        return nn.Identity()  # identity shortcut (no downsample)\n\n\nclass Bottleneck(nn.Module):\n    \"\"\"RegNet Bottleneck block.\n\n    This is almost exactly the same as a ResNet Bottleneck. The main difference is the SE block is moved from\n    after conv3 to after conv2. Otherwise, it's just redefining the arguments for groups/bottleneck channels.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1,\n            group_size: int = 1,\n            se_ratio: float = 0.25,\n            downsample: str = 'conv1x1',\n            linear_out: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize RegNet Bottleneck block.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Convolution stride.\n            dilation: Dilation rates for conv2 and shortcut.\n            bottle_ratio: Bottleneck ratio (reduction factor).\n            group_size: Group convolution size.\n            se_ratio: Squeeze-and-excitation ratio.\n            downsample: Shortcut downsampling type.\n            linear_out: Use linear activation for output.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            drop_block: Drop block layer.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        act_layer = get_act_layer(act_layer)\n        bottleneck_chs = int(round(out_chs * bottle_ratio))\n        groups = bottleneck_chs // group_size\n\n        cargs = dict(act_layer=act_layer, norm_layer=norm_layer)\n        self.conv1 = ConvNormAct(in_chs, bottleneck_chs, kernel_size=1, **cargs, **dd)\n        self.conv2 = ConvNormAct(\n            bottleneck_chs,\n            bottleneck_chs,\n            kernel_size=3,\n            stride=stride,\n            dilation=dilation[0],\n            groups=groups,\n            drop_layer=drop_block,\n            **cargs,\n            **dd,\n        )\n        if se_ratio:\n            se_channels = int(round(in_chs * se_ratio))\n            self.se = SEModule(bottleneck_chs, rd_channels=se_channels, act_layer=act_layer, **dd)\n        else:\n            self.se = nn.Identity()\n        self.conv3 = ConvNormAct(bottleneck_chs, out_chs, kernel_size=1, apply_act=False, **cargs, **dd)\n        self.act3 = nn.Identity() if linear_out else act_layer()\n        self.downsample = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            kernel_size=1,\n            stride=stride,\n            dilation=dilation,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n\n    def zero_init_last(self) -> None:\n        \"\"\"Zero-initialize the last batch norm in the block.\"\"\"\n        nn.init.zeros_(self.conv3.bn.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        shortcut = x\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x = self.se(x)\n        x = self.conv3(x)\n        if self.downsample is not None:\n            # NOTE stuck with downsample as the attr name due to weight compatibility\n            # now represents the shortcut, no shortcut if None, and non-downsample shortcut == nn.Identity()\n            x = self.drop_path(x) + self.downsample(shortcut)\n        x = self.act3(x)\n        return x\n\n\nclass PreBottleneck(nn.Module):\n    \"\"\"Pre-activation RegNet Bottleneck block.\n\n    Similar to Bottleneck but with pre-activation normalization.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: Tuple[int, int] = (1, 1),\n            bottle_ratio: float = 1,\n            group_size: int = 1,\n            se_ratio: float = 0.25,\n            downsample: str = 'conv1x1',\n            linear_out: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize pre-activation RegNet Bottleneck block.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Convolution stride.\n            dilation: Dilation rates for conv2 and shortcut.\n            bottle_ratio: Bottleneck ratio (reduction factor).\n            group_size: Group convolution size.\n            se_ratio: Squeeze-and-excitation ratio.\n            downsample: Shortcut downsampling type.\n            linear_out: Use linear activation for output.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            drop_block: Drop block layer.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer)\n        bottleneck_chs = int(round(out_chs * bottle_ratio))\n        groups = bottleneck_chs // group_size\n\n        self.norm1 = norm_act_layer(in_chs, **dd)\n        self.conv1 = create_conv2d(in_chs, bottleneck_chs, kernel_size=1, **dd)\n        self.norm2 = norm_act_layer(bottleneck_chs, **dd)\n        self.conv2 = create_conv2d(\n            bottleneck_chs,\n            bottleneck_chs,\n            kernel_size=3,\n            stride=stride,\n            dilation=dilation[0],\n            groups=groups,\n            **dd,\n        )\n        if se_ratio:\n            se_channels = int(round(in_chs * se_ratio))\n            self.se = SEModule(bottleneck_chs, rd_channels=se_channels, act_layer=act_layer, **dd)\n        else:\n            self.se = nn.Identity()\n        self.norm3 = norm_act_layer(bottleneck_chs, **dd)\n        self.conv3 = create_conv2d(bottleneck_chs, out_chs, kernel_size=1, **dd)\n        self.downsample = create_shortcut(\n            downsample,\n            in_chs,\n            out_chs,\n            kernel_size=1,\n            stride=stride,\n            dilation=dilation,\n            preact=True,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n\n    def zero_init_last(self) -> None:\n        \"\"\"Zero-initialize the last batch norm (no-op for pre-activation).\"\"\"\n        pass\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.norm1(x)\n        shortcut = x\n        x = self.conv1(x)\n        x = self.norm2(x)\n        x = self.conv2(x)\n        x = self.se(x)\n        x = self.norm3(x)\n        x = self.conv3(x)\n        if self.downsample is not None:\n            # NOTE stuck with downsample as the attr name due to weight compatibility\n            # now represents the shortcut, no shortcut if None, and non-downsample shortcut == nn.Identity()\n            x = self.drop_path(x) + self.downsample(shortcut)\n        return x\n\n\nclass RegStage(nn.Module):\n    \"\"\"RegNet stage (sequence of blocks with the same output shape).\n\n    A stage consists of multiple bottleneck blocks with the same output dimensions.\n    \"\"\"\n\n    def __init__(\n            self,\n            depth: int,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: int,\n            drop_path_rates: Optional[List[float]] = None,\n            block_fn: Type[nn.Module] = Bottleneck,\n            **block_kwargs,\n    ):\n        \"\"\"Initialize RegNet stage.\n\n        Args:\n            depth: Number of blocks in stage.\n            in_chs: Input channels.\n            out_chs: Output channels.\n            stride: Stride for first block.\n            dilation: Dilation rate.\n            drop_path_rates: Drop path rates for each block.\n            block_fn: Block class to use.\n            **block_kwargs: Additional block arguments.\n        \"\"\"\n        super().__init__()\n        self.grad_checkpointing = False\n\n        first_dilation = 1 if dilation in (1, 2) else 2\n        for i in range(depth):\n            block_stride = stride if i == 0 else 1\n            block_in_chs = in_chs if i == 0 else out_chs\n            block_dilation = (first_dilation, dilation)\n            dpr = drop_path_rates[i] if drop_path_rates is not None else 0.\n            name = \"b{}\".format(i + 1)\n            self.add_module(\n                name,\n                block_fn(\n                    block_in_chs,\n                    out_chs,\n                    stride=block_stride,\n                    dilation=block_dilation,\n                    drop_path_rate=dpr,\n                    **block_kwargs,\n                )\n            )\n            first_dilation = dilation\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through all blocks in the stage.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.children(), x)\n        else:\n            for block in self.children():\n                x = block(x)\n        return x\n\n\nclass RegNet(nn.Module):\n    \"\"\"RegNet-X, Y, and Z Models.\n\n    Paper: https://arxiv.org/abs/2003.13678\n    Original Impl: https://github.com/facebookresearch/pycls/blob/master/pycls/models/regnet.py\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: RegNetCfg,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            zero_init_last: bool = True,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"Initialize RegNet model.\n\n        Args:\n            cfg: Model architecture configuration.\n            in_chans: Number of input channels.\n            num_classes: Number of classifier classes.\n            output_stride: Output stride of network, one of (8, 16, 32).\n            global_pool: Global pooling type.\n            drop_rate: Dropout rate.\n            drop_path_rate: Stochastic depth drop-path rate.\n            zero_init_last: Zero-init last weight of residual path.\n            kwargs: Extra kwargs overlayed onto cfg.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        assert output_stride in (8, 16, 32)\n        cfg = replace(cfg, **kwargs)  # update cfg with extra passed kwargs\n\n        # Construct the stem\n        stem_width = cfg.stem_width\n        na_args = dict(act_layer=cfg.act_layer, norm_layer=cfg.norm_layer)\n        if cfg.preact:\n            self.stem = create_conv2d(in_chans, stem_width, 3, stride=2, **dd)\n        else:\n            self.stem = ConvNormAct(in_chans, stem_width, 3, stride=2, **na_args, **dd)\n        self.feature_info = [dict(num_chs=stem_width, reduction=2, module='stem')]\n\n        # Construct the stages\n        prev_width = stem_width\n        curr_stride = 2\n        per_stage_args, common_args = self._get_stage_args(\n            cfg,\n            output_stride=output_stride,\n            drop_path_rate=drop_path_rate,\n        )\n        assert len(per_stage_args) == 4\n        block_fn = PreBottleneck if cfg.preact else Bottleneck\n        for i, stage_args in enumerate(per_stage_args):\n            stage_name = \"s{}\".format(i + 1)\n            self.add_module(\n                stage_name,\n                RegStage(\n                    in_chs=prev_width,\n                    block_fn=block_fn,\n                    **stage_args,\n                    **common_args,\n                    **dd,\n                )\n            )\n            prev_width = stage_args['out_chs']\n            curr_stride *= stage_args['stride']\n            self.feature_info += [dict(num_chs=prev_width, reduction=curr_stride, module=stage_name)]\n\n        # Construct the head\n        if cfg.num_features:\n            self.final_conv = ConvNormAct(prev_width, cfg.num_features, kernel_size=1, **na_args, **dd)\n            self.num_features = cfg.num_features\n        else:\n            final_act = cfg.linear_out or cfg.preact\n            self.final_conv = get_act_layer(cfg.act_layer)() if final_act else nn.Identity()\n            self.num_features = prev_width\n        self.head_hidden_size = self.num_features\n        self.head = ClassifierHead(\n            in_features=self.num_features,\n            num_classes=num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        named_apply(partial(_init_weights, zero_init_last=zero_init_last), self)\n\n    def _get_stage_args(\n            self,\n            cfg: RegNetCfg,\n            default_stride: int = 2,\n            output_stride: int = 32,\n            drop_path_rate: float = 0.\n    ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:\n        \"\"\"Generate stage arguments from configuration.\n\n        Args:`\n            cfg: RegNet configuration.\n            default_stride: Default stride for stages.\n            output_stride: Target output stride.\n            drop_path_rate: Stochastic depth rate.\n\n        Returns:\n            Tuple of (per_stage_args, common_args).\n        \"\"\"\n        # Generate RegNet ws per block\n        widths, num_stages, stage_gs = generate_regnet(cfg.wa, cfg.w0, cfg.wm, cfg.depth, cfg.group_size)\n\n        # Convert to per stage format\n        stage_widths, stage_depths = torch.unique(torch.tensor(widths), return_counts=True)\n        stage_widths, stage_depths = stage_widths.tolist(), stage_depths.tolist()\n        stage_br = [cfg.bottle_ratio for _ in range(num_stages)]\n        stage_strides = []\n        stage_dilations = []\n        net_stride = 2\n        dilation = 1\n        for _ in range(num_stages):\n            if net_stride >= output_stride:\n                dilation *= default_stride\n                stride = 1\n            else:\n                stride = default_stride\n                net_stride *= stride\n            stage_strides.append(stride)\n            stage_dilations.append(dilation)\n        stage_dpr = calculate_drop_path_rates(drop_path_rate, stage_depths, stagewise=True)\n        # Adjust the compatibility of ws and gws\n        stage_widths, stage_gs = adjust_widths_groups_comp(\n            stage_widths, stage_br, stage_gs, min_ratio=cfg.group_min_ratio)\n        arg_names = ['out_chs', 'stride', 'dilation', 'depth', 'bottle_ratio', 'group_size', 'drop_path_rates']\n        per_stage_args = [\n            dict(zip(arg_names, params)) for params in\n            zip(stage_widths, stage_strides, stage_dilations, stage_depths, stage_br, stage_gs, stage_dpr)\n        ]\n        common_args = dict(\n            downsample=cfg.downsample,\n            se_ratio=cfg.se_ratio,\n            linear_out=cfg.linear_out,\n            act_layer=cfg.act_layer,\n            norm_layer=cfg.norm_layer,\n        )\n        return per_stage_args, common_args\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        return dict(\n            stem=r'^stem',\n            blocks=r'^s(\\d+)' if coarse else r'^s(\\d+)\\.b(\\d+)',\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        for s in list(self.children())[1:-1]:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(5, indices)\n\n        # forward pass\n        feat_idx = 0\n        x = self.stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        layer_names = ('s1', 's2', 's3', 's4')\n        if stop_early:\n            layer_names = layer_names[:max_index]\n        for n in layer_names:\n            feat_idx += 1\n            x = getattr(self, n)(x)  # won't work with torchscript, but keeps code reasonable, FML\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == 4:\n            x = self.final_conv(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(5, indices)\n        layer_names = ('s1', 's2', 's3', 's4')\n        layer_names = layer_names[max_index:]\n        for n in layer_names:\n            setattr(self, n, nn.Identity())\n        if max_index < 4:\n            self.final_conv = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.stem(x)\n        x = self.s1(x)\n        x = self.s2(x)\n        x = self.s3(x)\n        x = self.s4(x)\n        x = self.final_conv(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module: nn.Module, name: str = '', zero_init_last: bool = False) -> None:\n    \"\"\"Initialize module weights.\n\n    Args:\n        module: PyTorch module to initialize.\n        name: Module name.\n        zero_init_last: Zero-initialize last layer weights.\n    \"\"\"\n    if isinstance(module, nn.Conv2d):\n        fan_out = module.kernel_size[0] * module.kernel_size[1] * module.out_channels\n        fan_out //= module.groups\n        module.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n        if module.bias is not None:\n            module.bias.data.zero_()\n    elif isinstance(module, nn.Linear):\n        nn.init.normal_(module.weight, mean=0.0, std=0.01)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif zero_init_last and hasattr(module, 'zero_init_last'):\n        module.zero_init_last()\n\n\ndef _filter_fn(state_dict: Dict[str, Any]) -> Dict[str, Any]:\n    \"\"\"Filter and remap state dict keys for compatibility.\n\n    Args:\n        state_dict: Raw state dictionary.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    replaces = [\n        ('f.a.0', 'conv1.conv'),\n        ('f.a.1', 'conv1.bn'),\n        ('f.b.0', 'conv2.conv'),\n        ('f.b.1', 'conv2.bn'),\n        ('f.final_bn', 'conv3.bn'),\n        ('f.se.excitation.0', 'se.fc1'),\n        ('f.se.excitation.2', 'se.fc2'),\n        ('f.se', 'se'),\n        ('f.c.0', 'conv3.conv'),\n        ('f.c.1', 'conv3.bn'),\n        ('f.c', 'conv3.conv'),\n        ('proj.0', 'downsample.conv'),\n        ('proj.1', 'downsample.bn'),\n        ('proj', 'downsample.conv'),\n    ]\n    if 'classy_state_dict' in state_dict:\n        # classy-vision & vissl (SEER) weights\n        import re\n        state_dict = state_dict['classy_state_dict']['base_model']['model']\n        out = {}\n        for k, v in state_dict['trunk'].items():\n            k = k.replace('_feature_blocks.conv1.stem.0', 'stem.conv')\n            k = k.replace('_feature_blocks.conv1.stem.1', 'stem.bn')\n            k = re.sub(\n                r'^_feature_blocks.res\\d.block(\\d)-(\\d+)',\n                lambda x: f's{int(x.group(1))}.b{int(x.group(2)) + 1}', k)\n            k = re.sub(r's(\\d)\\.b(\\d+)\\.bn', r's\\1.b\\2.downsample.bn', k)\n            for s, r in replaces:\n                k = k.replace(s, r)\n            out[k] = v\n        for k, v in state_dict['heads'].items():\n            if 'projection_head' in k or 'prototypes' in k:\n                continue\n            k = k.replace('0.clf.0', 'head.fc')\n            out[k] = v\n        return out\n    if 'stem.0.weight' in state_dict:\n        # torchvision weights\n        import re\n        out = {}\n        for k, v in state_dict.items():\n            k = k.replace('stem.0', 'stem.conv')\n            k = k.replace('stem.1', 'stem.bn')\n            k = re.sub(\n                r'trunk_output.block(\\d)\\.block(\\d+)\\-(\\d+)',\n                lambda x: f's{int(x.group(1))}.b{int(x.group(3)) + 1}', k)\n            for s, r in replaces:\n                k = k.replace(s, r)\n            k = k.replace('fc.', 'head.fc.')\n            out[k] = v\n        return out\n    return state_dict\n\n\n# Model FLOPS = three trailing digits * 10^8\nmodel_cfgs = dict(\n    # RegNet-X\n    regnetx_002=RegNetCfg(w0=24, wa=36.44, wm=2.49, group_size=8, depth=13),\n    regnetx_004=RegNetCfg(w0=24, wa=24.48, wm=2.54, group_size=16, depth=22),\n    regnetx_004_tv=RegNetCfg(w0=24, wa=24.48, wm=2.54, group_size=16, depth=22, group_min_ratio=0.9),\n    regnetx_006=RegNetCfg(w0=48, wa=36.97, wm=2.24, group_size=24, depth=16),\n    regnetx_008=RegNetCfg(w0=56, wa=35.73, wm=2.28, group_size=16, depth=16),\n    regnetx_016=RegNetCfg(w0=80, wa=34.01, wm=2.25, group_size=24, depth=18),\n    regnetx_032=RegNetCfg(w0=88, wa=26.31, wm=2.25, group_size=48, depth=25),\n    regnetx_040=RegNetCfg(w0=96, wa=38.65, wm=2.43, group_size=40, depth=23),\n    regnetx_064=RegNetCfg(w0=184, wa=60.83, wm=2.07, group_size=56, depth=17),\n    regnetx_080=RegNetCfg(w0=80, wa=49.56, wm=2.88, group_size=120, depth=23),\n    regnetx_120=RegNetCfg(w0=168, wa=73.36, wm=2.37, group_size=112, depth=19),\n    regnetx_160=RegNetCfg(w0=216, wa=55.59, wm=2.1, group_size=128, depth=22),\n    regnetx_320=RegNetCfg(w0=320, wa=69.86, wm=2.0, group_size=168, depth=23),\n\n    # RegNet-Y\n    regnety_002=RegNetCfg(w0=24, wa=36.44, wm=2.49, group_size=8, depth=13, se_ratio=0.25),\n    regnety_004=RegNetCfg(w0=48, wa=27.89, wm=2.09, group_size=8, depth=16, se_ratio=0.25),\n    regnety_006=RegNetCfg(w0=48, wa=32.54, wm=2.32, group_size=16, depth=15, se_ratio=0.25),\n    regnety_008=RegNetCfg(w0=56, wa=38.84, wm=2.4, group_size=16, depth=14, se_ratio=0.25),\n    regnety_008_tv=RegNetCfg(w0=56, wa=38.84, wm=2.4, group_size=16, depth=14, se_ratio=0.25, group_min_ratio=0.9),\n    regnety_016=RegNetCfg(w0=48, wa=20.71, wm=2.65, group_size=24, depth=27, se_ratio=0.25),\n    regnety_032=RegNetCfg(w0=80, wa=42.63, wm=2.66, group_size=24, depth=21, se_ratio=0.25),\n    regnety_040=RegNetCfg(w0=96, wa=31.41, wm=2.24, group_size=64, depth=22, se_ratio=0.25),\n    regnety_064=RegNetCfg(w0=112, wa=33.22, wm=2.27, group_size=72, depth=25, se_ratio=0.25),\n    regnety_080=RegNetCfg(w0=192, wa=76.82, wm=2.19, group_size=56, depth=17, se_ratio=0.25),\n    regnety_080_tv=RegNetCfg(w0=192, wa=76.82, wm=2.19, group_size=56, depth=17, se_ratio=0.25, group_min_ratio=0.9),\n    regnety_120=RegNetCfg(w0=168, wa=73.36, wm=2.37, group_size=112, depth=19, se_ratio=0.25),\n    regnety_160=RegNetCfg(w0=200, wa=106.23, wm=2.48, group_size=112, depth=18, se_ratio=0.25),\n    regnety_320=RegNetCfg(w0=232, wa=115.89, wm=2.53, group_size=232, depth=20, se_ratio=0.25),\n    regnety_640=RegNetCfg(w0=352, wa=147.48, wm=2.4, group_size=328, depth=20, se_ratio=0.25),\n    regnety_1280=RegNetCfg(w0=456, wa=160.83, wm=2.52, group_size=264, depth=27, se_ratio=0.25),\n    regnety_2560=RegNetCfg(w0=640, wa=230.83, wm=2.53, group_size=373, depth=27, se_ratio=0.25),\n    #regnety_2560=RegNetCfg(w0=640, wa=124.47, wm=2.04, group_size=848, depth=27, se_ratio=0.25),\n\n    # Experimental\n    regnety_040_sgn=RegNetCfg(\n        w0=96, wa=31.41, wm=2.24, group_size=64, depth=22, se_ratio=0.25,\n        act_layer='silu', norm_layer=partial(GroupNormAct, group_size=16)),\n\n    # regnetv = 'preact regnet y'\n    regnetv_040=RegNetCfg(\n        depth=22, w0=96, wa=31.41, wm=2.24, group_size=64, se_ratio=0.25, preact=True, act_layer='silu'),\n    regnetv_064=RegNetCfg(\n        depth=25, w0=112, wa=33.22, wm=2.27, group_size=72, se_ratio=0.25, preact=True, act_layer='silu',\n        downsample='avg'),\n\n    # RegNet-Z (unverified)\n    regnetz_005=RegNetCfg(\n        depth=21, w0=16, wa=10.7, wm=2.51, group_size=4, bottle_ratio=4.0, se_ratio=0.25,\n        downsample=None, linear_out=True, num_features=1024, act_layer='silu',\n    ),\n    regnetz_040=RegNetCfg(\n        depth=28, w0=48, wa=14.5, wm=2.226, group_size=8, bottle_ratio=4.0, se_ratio=0.25,\n        downsample=None, linear_out=True, num_features=0, act_layer='silu',\n    ),\n    regnetz_040_h=RegNetCfg(\n        depth=28, w0=48, wa=14.5, wm=2.226, group_size=8, bottle_ratio=4.0, se_ratio=0.25,\n        downsample=None, linear_out=True, num_features=1536, act_layer='silu',\n    ),\n)\n\n\ndef _create_regnet(variant: str, pretrained: bool, **kwargs) -> RegNet:\n    \"\"\"Create a RegNet model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        RegNet model instance.\n    \"\"\"\n    return build_model_with_cfg(\n        RegNet, variant, pretrained,\n        model_cfg=model_cfgs[variant],\n        pretrained_filter_fn=_filter_fn,\n        **kwargs)\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'test_input_size': (3, 288, 288), 'crop_pct': 0.95, 'test_crop_pct': 1.0,\n        'interpolation': 'bicubic', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndef _cfgpyc(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create pycls configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'mit', 'origin_url': 'https://github.com/facebookresearch/pycls', **kwargs\n    }\n\n\ndef _cfgtv2(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create torchvision v2 configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.965, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'bsd-3-clause', 'origin_url': 'https://github.com/pytorch/vision', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # timm trained models\n    'regnety_032.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/regnety_032_ra-7f2439f9.pth'),\n    'regnety_040.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnety_040_ra3-670e1166.pth'),\n    'regnety_064.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnety_064_ra3-aa26dc7d.pth'),\n    'regnety_080.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnety_080_ra3-1fdc4344.pth'),\n    'regnety_120.sw_in12k_ft_in1k': _cfg(hf_hub_id='timm/'),\n    'regnety_160.sw_in12k_ft_in1k': _cfg(hf_hub_id='timm/'),\n    'regnety_160.lion_in12k_ft_in1k': _cfg(hf_hub_id='timm/'),\n\n    # timm in12k pretrain\n    'regnety_120.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n    'regnety_160.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821),\n\n    # timm custom arch (v and z guess) + trained models\n    'regnety_040_sgn.untrained': _cfg(url=''),\n    'regnetv_040.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetv_040_ra3-c248f51f.pth',\n        first_conv='stem'),\n    'regnetv_064.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetv_064_ra3-530616c2.pth',\n        first_conv='stem'),\n\n    'regnetz_005.untrained': _cfg(url=''),\n    'regnetz_040.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetz_040_ra3-9007edf5.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, test_input_size=(3, 320, 320)),\n    'regnetz_040_h.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-tpu-weights/regnetz_040h_ra3-f594343b.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, test_input_size=(3, 320, 320)),\n\n    # used in DeiT for distillation (from Facebook DeiT GitHub repository)\n    'regnety_160.deit_in1k': _cfg(\n        hf_hub_id='timm/', url='https://dl.fbaipublicfiles.com/deit/regnety_160-a5fe301d.pth'),\n\n    'regnetx_004_tv.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_400mf-62229a5f.pth'),\n    'regnetx_008.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_800mf-94a99ebd.pth'),\n    'regnetx_016.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_1_6gf-a12f2b72.pth'),\n    'regnetx_032.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_3_2gf-7071aa85.pth'),\n    'regnetx_080.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_8gf-2b70d774.pth'),\n    'regnetx_160.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_16gf-ba3796d7.pth'),\n    'regnetx_320.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_x_32gf-6eb8fdc6.pth'),\n\n    'regnety_004.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_400mf-e6988f5f.pth'),\n    'regnety_008_tv.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_800mf-58fc7688.pth'),\n    'regnety_016.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_1_6gf-0d7bc02a.pth'),\n    'regnety_032.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_3_2gf-9180c971.pth'),\n    'regnety_080_tv.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_8gf-dc2b1b54.pth'),\n    'regnety_160.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_16gf-3e4a00f9.pth'),\n    'regnety_320.tv2_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_32gf-8db6d4b5.pth'),\n\n    'regnety_160.swag_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_16gf_swag-43afe44d.pth', license='cc-by-nc-4.0',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'regnety_320.swag_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_32gf_swag-04fdfa75.pth', license='cc-by-nc-4.0',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'regnety_1280.swag_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_128gf_swag-c8ce3e52.pth', license='cc-by-nc-4.0',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n\n    'regnety_160.swag_lc_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_16gf_lc_swag-f3ec0043.pth', license='cc-by-nc-4.0'),\n    'regnety_320.swag_lc_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_32gf_lc_swag-e1583746.pth', license='cc-by-nc-4.0'),\n    'regnety_1280.swag_lc_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/regnet_y_128gf_lc_swag-cbe8ce12.pth', license='cc-by-nc-4.0'),\n\n    'regnety_320.seer_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        license='seer-license', origin_url='https://github.com/facebookresearch/vissl',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_finetuned/seer_regnet32_finetuned_in1k_model_final_checkpoint_phase78.torch',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'regnety_640.seer_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        license='seer-license', origin_url='https://github.com/facebookresearch/vissl',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_finetuned/seer_regnet64_finetuned_in1k_model_final_checkpoint_phase78.torch',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'regnety_1280.seer_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        license='seer-license', origin_url='https://github.com/facebookresearch/vissl',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_finetuned/seer_regnet128_finetuned_in1k_model_final_checkpoint_phase78.torch',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'regnety_2560.seer_ft_in1k': _cfgtv2(\n        hf_hub_id='timm/',\n        license='seer-license', origin_url='https://github.com/facebookresearch/vissl',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_finetuned/seer_regnet256_finetuned_in1k_model_final_checkpoint_phase38.torch',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n\n    'regnety_320.seer': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_regnet32d/seer_regnet32gf_model_iteration244000.torch',\n        num_classes=0, license='seer-license', origin_url='https://github.com/facebookresearch/vissl'),\n    'regnety_640.seer': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/seer_regnet64/seer_regnet64gf_model_final_checkpoint_phase0.torch',\n        num_classes=0, license='seer-license', origin_url='https://github.com/facebookresearch/vissl'),\n    'regnety_1280.seer': _cfgtv2(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/vissl/model_zoo/swav_ig1b_regnet128Gf_cnstant_bs32_node16_sinkhorn10_proto16k_syncBN64_warmup8k/model_final_checkpoint_phase0.torch',\n        num_classes=0, license='seer-license', origin_url='https://github.com/facebookresearch/vissl'),\n    # FIXME invalid weight <-> model match, mistake on their end\n    #'regnety_2560.seer': _cfgtv2(\n    #    url='https://dl.fbaipublicfiles.com/vissl/model_zoo/swav_ig1b_cosine_rg256gf_noBNhead_wd1e5_fairstore_bs16_node64_sinkhorn10_proto16k_apex_syncBN64_warmup8k/model_final_checkpoint_phase0.torch',\n    #    num_classes=0, license='other', origin_url='https://github.com/facebookresearch/vissl'),\n\n    'regnetx_002.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_004.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_006.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_008.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_016.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_032.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_040.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_064.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_080.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_120.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_160.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnetx_320.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n\n    'regnety_002.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_004.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_006.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_008.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_016.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_032.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_040.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_064.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_080.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_120.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_160.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n    'regnety_320.pycls_in1k': _cfgpyc(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef regnetx_002(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-200MF\"\"\"\n    return _create_regnet('regnetx_002', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_004(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-400MF\"\"\"\n    return _create_regnet('regnetx_004', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_004_tv(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-400MF w/ torchvision group rounding\"\"\"\n    return _create_regnet('regnetx_004_tv', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_006(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-600MF\"\"\"\n    return _create_regnet('regnetx_006', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_008(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-800MF\"\"\"\n    return _create_regnet('regnetx_008', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_016(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-1.6GF\"\"\"\n    return _create_regnet('regnetx_016', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_032(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-3.2GF\"\"\"\n    return _create_regnet('regnetx_032', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_040(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-4.0GF\"\"\"\n    return _create_regnet('regnetx_040', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_064(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-6.4GF\"\"\"\n    return _create_regnet('regnetx_064', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_080(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-8.0GF\"\"\"\n    return _create_regnet('regnetx_080', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_120(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-12GF\"\"\"\n    return _create_regnet('regnetx_120', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_160(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-16GF\"\"\"\n    return _create_regnet('regnetx_160', pretrained, **kwargs)\n\n\n@register_model\ndef regnetx_320(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetX-32GF\"\"\"\n    return _create_regnet('regnetx_320', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_002(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-200MF\"\"\"\n    return _create_regnet('regnety_002', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_004(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-400MF\"\"\"\n    return _create_regnet('regnety_004', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_006(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-600MF\"\"\"\n    return _create_regnet('regnety_006', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_008(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-800MF\"\"\"\n    return _create_regnet('regnety_008', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_008_tv(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-800MF w/ torchvision group rounding\"\"\"\n    return _create_regnet('regnety_008_tv', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_016(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-1.6GF\"\"\"\n    return _create_regnet('regnety_016', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_032(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-3.2GF\"\"\"\n    return _create_regnet('regnety_032', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_040(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-4.0GF\"\"\"\n    return _create_regnet('regnety_040', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_064(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-6.4GF\"\"\"\n    return _create_regnet('regnety_064', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_080(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-8.0GF\"\"\"\n    return _create_regnet('regnety_080', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_080_tv(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-8.0GF w/ torchvision group rounding\"\"\"\n    return _create_regnet('regnety_080_tv', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_120(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-12GF\"\"\"\n    return _create_regnet('regnety_120', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_160(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-16GF\"\"\"\n    return _create_regnet('regnety_160', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_320(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-32GF\"\"\"\n    return _create_regnet('regnety_320', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_640(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-64GF\"\"\"\n    return _create_regnet('regnety_640', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_1280(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-128GF\"\"\"\n    return _create_regnet('regnety_1280', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_2560(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-256GF\"\"\"\n    return _create_regnet('regnety_2560', pretrained, **kwargs)\n\n\n@register_model\ndef regnety_040_sgn(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetY-4.0GF w/ GroupNorm \"\"\"\n    return _create_regnet('regnety_040_sgn', pretrained, **kwargs)\n\n\n@register_model\ndef regnetv_040(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetV-4.0GF (pre-activation)\"\"\"\n    return _create_regnet('regnetv_040', pretrained, **kwargs)\n\n\n@register_model\ndef regnetv_064(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetV-6.4GF (pre-activation)\"\"\"\n    return _create_regnet('regnetv_064', pretrained, **kwargs)\n\n\n@register_model\ndef regnetz_005(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetZ-500MF\n    NOTE: config found in https://github.com/facebookresearch/ClassyVision/blob/main/classy_vision/models/regnet.py\n    but it's not clear it is equivalent to paper model as not detailed in the paper.\n    \"\"\"\n    return _create_regnet('regnetz_005', pretrained, zero_init_last=False, **kwargs)\n\n\n@register_model\ndef regnetz_040(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetZ-4.0GF\n    NOTE: config found in https://github.com/facebookresearch/ClassyVision/blob/main/classy_vision/models/regnet.py\n    but it's not clear it is equivalent to paper model as not detailed in the paper.\n    \"\"\"\n    return _create_regnet('regnetz_040', pretrained, zero_init_last=False, **kwargs)\n\n\n@register_model\ndef regnetz_040_h(pretrained: bool = False, **kwargs) -> RegNet:\n    \"\"\"RegNetZ-4.0GF\n    NOTE: config found in https://github.com/facebookresearch/ClassyVision/blob/main/classy_vision/models/regnet.py\n    but it's not clear it is equivalent to paper model as not detailed in the paper.\n    \"\"\"\n    return _create_regnet('regnetz_040_h', pretrained, zero_init_last=False, **kwargs)\n\n\nregister_model_deprecations(__name__, {\n    'regnetz_040h': 'regnetz_040_h',\n})\n"
  },
  {
    "path": "timm/models/repghost.py",
    "content": "\"\"\"\nAn implementation of RepGhostNet Model as defined in:\nRepGhost: A Hardware-Efficient Ghost Module via Re-parameterization. https://arxiv.org/abs/2211.06088\n\nOriginal implementation: https://github.com/ChengpengChen/RepGhost\n\"\"\"\nimport copy\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectAdaptivePool2d, Linear, make_divisible\nfrom ._builder import build_model_with_cfg\nfrom ._efficientnet_blocks import SqueezeExcite, ConvBnAct\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['RepGhostNet']\n\n\n_SE_LAYER = partial(SqueezeExcite, gate_layer='hard_sigmoid', rd_round_fn=partial(make_divisible, divisor=4))\n\n\nclass RepGhostModule(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 1,\n            dw_size: int = 3,\n            stride: int = 1,\n            relu: bool = True,\n            reparam: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.out_chs = out_chs\n        init_chs = out_chs\n        new_chs = out_chs\n\n        self.primary_conv = nn.Sequential(\n            nn.Conv2d(in_chs, init_chs, kernel_size, stride, kernel_size // 2, bias=False, **dd),\n            nn.BatchNorm2d(init_chs, **dd),\n            nn.ReLU(inplace=True) if relu else nn.Identity(),\n        )\n\n        fusion_conv = []\n        fusion_bn = []\n        if reparam:\n            fusion_conv.append(nn.Identity())\n            fusion_bn.append(nn.BatchNorm2d(init_chs, **dd))\n\n        self.fusion_conv = nn.Sequential(*fusion_conv)\n        self.fusion_bn = nn.Sequential(*fusion_bn)\n\n        self.cheap_operation = nn.Sequential(\n            nn.Conv2d(init_chs, new_chs, dw_size, 1, dw_size//2, groups=init_chs, bias=False, **dd),\n            nn.BatchNorm2d(new_chs, **dd),\n            # nn.ReLU(inplace=True) if relu else nn.Identity(),\n        )\n        self.relu = nn.ReLU(inplace=False) if relu else nn.Identity()\n\n    def forward(self, x):\n        x1 = self.primary_conv(x)\n        x2 = self.cheap_operation(x1)\n        for conv, bn in zip(self.fusion_conv, self.fusion_bn):\n            x2 = x2 + bn(conv(x1))\n        return self.relu(x2)\n\n    def get_equivalent_kernel_bias(self):\n        kernel3x3, bias3x3 = self._fuse_bn_tensor(self.cheap_operation[0], self.cheap_operation[1])\n        for conv, bn in zip(self.fusion_conv, self.fusion_bn):\n            kernel, bias = self._fuse_bn_tensor(conv, bn, kernel3x3.shape[0], kernel3x3.device)\n            kernel3x3 += self._pad_1x1_to_3x3_tensor(kernel)\n            bias3x3 += bias\n        return kernel3x3, bias3x3\n\n    @staticmethod\n    def _pad_1x1_to_3x3_tensor(kernel1x1):\n        if kernel1x1 is None:\n            return 0\n        else:\n            return torch.nn.functional.pad(kernel1x1, [1, 1, 1, 1])\n\n    @staticmethod\n    def _fuse_bn_tensor(conv, bn, in_channels=None, device=None):\n        in_channels = in_channels if in_channels else bn.running_mean.shape[0]\n        device = device if device else bn.weight.device\n        if isinstance(conv, nn.Conv2d):\n            kernel = conv.weight\n            assert conv.bias is None\n        else:\n            assert isinstance(conv, nn.Identity)\n            kernel = torch.ones(in_channels, 1, 1, 1, device=device)\n\n        if isinstance(bn, nn.BatchNorm2d):\n            running_mean = bn.running_mean\n            running_var = bn.running_var\n            gamma = bn.weight\n            beta = bn.bias\n            eps = bn.eps\n            std = (running_var + eps).sqrt()\n            t = (gamma / std).reshape(-1, 1, 1, 1)\n            return kernel * t, beta - running_mean * gamma / std\n        assert isinstance(bn, nn.Identity)\n        return kernel, torch.zeros(in_channels).to(kernel.device)\n\n    def switch_to_deploy(self):\n        if len(self.fusion_conv) == 0 and len(self.fusion_bn) == 0:\n            return\n        kernel, bias = self.get_equivalent_kernel_bias()\n        dd = {'device': kernel.device, 'dtype': kernel.dtype}\n        self.cheap_operation = nn.Conv2d(\n            in_channels=self.cheap_operation[0].in_channels,\n            out_channels=self.cheap_operation[0].out_channels,\n            kernel_size=self.cheap_operation[0].kernel_size,\n            padding=self.cheap_operation[0].padding,\n            dilation=self.cheap_operation[0].dilation,\n            groups=self.cheap_operation[0].groups,\n            bias=True,\n            **dd)\n        self.cheap_operation.weight.data = kernel\n        self.cheap_operation.bias.data = bias\n        self.__delattr__('fusion_conv')\n        self.__delattr__('fusion_bn')\n        self.fusion_conv = []\n        self.fusion_bn = []\n\n    def reparameterize(self):\n        self.switch_to_deploy()\n\n\nclass RepGhostBottleneck(nn.Module):\n    \"\"\" RepGhost bottleneck w/ optional SE\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            dw_kernel_size: int = 3,\n            stride: int = 1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            se_ratio: float = 0.,\n            reparam: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        has_se = se_ratio is not None and se_ratio > 0.\n        self.stride = stride\n\n        # Point-wise expansion\n        self.ghost1 = RepGhostModule(in_chs, mid_chs, relu=True, reparam=reparam, **dd)\n\n        # Depth-wise convolution\n        if self.stride > 1:\n            self.conv_dw = nn.Conv2d(\n                mid_chs,\n                mid_chs,\n                dw_kernel_size,\n                stride=stride,\n                padding=(dw_kernel_size-1)//2,\n                groups=mid_chs,\n                bias=False,\n                **dd,\n            )\n            self.bn_dw = nn.BatchNorm2d(mid_chs, **dd)\n        else:\n            self.conv_dw = None\n            self.bn_dw = None\n\n        # Squeeze-and-excitation\n        self.se = _SE_LAYER(mid_chs, rd_ratio=se_ratio, **dd) if has_se else None\n\n        # Point-wise linear projection\n        self.ghost2 = RepGhostModule(mid_chs, out_chs, relu=False, reparam=reparam, **dd)\n\n        # shortcut\n        if in_chs == out_chs and self.stride == 1:\n            self.shortcut = nn.Sequential()\n        else:\n            self.shortcut = nn.Sequential(\n                nn.Conv2d(\n                    in_chs,\n                    in_chs,\n                    dw_kernel_size,\n                    stride=stride,\n                    padding=(dw_kernel_size-1)//2,\n                    groups=in_chs,\n                    bias=False,\n                    **dd,\n                ),\n                nn.BatchNorm2d(in_chs, **dd),\n                nn.Conv2d(in_chs, out_chs, 1, stride=1, padding=0, bias=False, **dd),\n                nn.BatchNorm2d(out_chs, **dd),\n            )\n\n    def forward(self, x):\n        shortcut = x\n\n        # 1st ghost bottleneck\n        x = self.ghost1(x)\n\n        # Depth-wise convolution\n        if self.conv_dw is not None:\n            x = self.conv_dw(x)\n            x = self.bn_dw(x)\n\n        # Squeeze-and-excitation\n        if self.se is not None:\n            x = self.se(x)\n\n        # 2nd ghost bottleneck\n        x = self.ghost2(x)\n\n        x += self.shortcut(shortcut)\n        return x\n\n\nclass RepGhostNet(nn.Module):\n    def __init__(\n            self,\n            cfgs: List[List[List]],\n            num_classes: int = 1000,\n            width: float = 1.0,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.2,\n            reparam: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        # setting of inverted residual blocks\n        assert output_stride == 32, 'only output_stride==32 is valid, dilation not supported'\n        self.cfgs = cfgs\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n\n        # building first layer\n        stem_chs = make_divisible(16 * width, 4)\n        self.conv_stem = nn.Conv2d(in_chans, stem_chs, 3, 2, 1, bias=False, **dd)\n        self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module=f'conv_stem'))\n        self.bn1 = nn.BatchNorm2d(stem_chs, **dd)\n        self.act1 = nn.ReLU(inplace=True)\n        prev_chs = stem_chs\n\n        # building inverted residual blocks\n        stages = nn.ModuleList([])\n        block = RepGhostBottleneck\n        stage_idx = 0\n        net_stride = 2\n        for cfg in self.cfgs:\n            layers = []\n            s = 1\n            for k, exp_size, c, se_ratio, s in cfg:\n                out_chs = make_divisible(c * width, 4)\n                mid_chs = make_divisible(exp_size * width, 4)\n                layers.append(block(prev_chs, mid_chs, out_chs, k, s, se_ratio=se_ratio, reparam=reparam, **dd))\n                prev_chs = out_chs\n            if s > 1:\n                net_stride *= 2\n                self.feature_info.append(dict(\n                    num_chs=prev_chs, reduction=net_stride, module=f'blocks.{stage_idx}'))\n            stages.append(nn.Sequential(*layers))\n            stage_idx += 1\n\n        out_chs = make_divisible(exp_size * width * 2, 4)\n        stages.append(nn.Sequential(ConvBnAct(prev_chs, out_chs, 1, **dd)))\n        self.pool_dim = prev_chs = out_chs\n\n        self.blocks = nn.Sequential(*stages)\n\n        # building last several layers\n        self.num_features = prev_chs\n        self.head_hidden_size = out_chs = 1280\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.conv_head = nn.Conv2d(prev_chs, out_chs, 1, 1, 0, bias=True, **dd)\n        self.act2 = nn.ReLU(inplace=True)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.classifier = Linear(out_chs, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^conv_stem|bn1',\n            blocks=[\n                (r'^blocks\\.(\\d+)' if coarse else r'^blocks\\.(\\d+)\\.(\\d+)', None),\n                (r'conv_head', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.classifier\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            # NOTE: cannot meaningfully change pooling of efficient head after creation\n            self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n            self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        if num_classes > 0:\n            device = self.classifier.weight.device if hasattr(self.classifier, 'weight') else None\n            dtype = self.classifier.weight.dtype if hasattr(self.classifier, 'weight') else None\n            dd = {'device': device, 'dtype': dtype}\n            self.classifier = Linear(self.head_hidden_size, num_classes, **dd)\n        else:\n            self.classifier = nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [-1] + [int(info['module'].split('.')[-1]) for info in self.feature_info[1:]]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i]+1 for i in take_indices]\n        max_index = stage_ends[max_index]\n\n        # forward pass\n        feat_idx = 0\n        x = self.conv_stem(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.blocks\n        else:\n            stages = self.blocks[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages, start=1):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        stage_ends = [-1] + [int(info['module'].split('.')[-1]) for info in self.feature_info[1:]]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.conv_stem(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x, flatten=True)\n        else:\n            x = self.blocks(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.conv_head(x)\n        x = self.act2(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.classifier(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n    def convert_to_deploy(self):\n        repghost_model_convert(self, do_copy=False)\n\n\ndef repghost_model_convert(model: torch.nn.Module, save_path=None, do_copy=True):\n    \"\"\"\n    taken from from https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py\n    \"\"\"\n    if do_copy:\n        model = copy.deepcopy(model)\n    for module in model.modules():\n        if hasattr(module, 'switch_to_deploy'):\n            module.switch_to_deploy()\n    if save_path is not None:\n        torch.save(model.state_dict(), save_path)\n    return model\n\n\ndef _create_repghostnet(variant, width=1.0, pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepGhostNet model\n    \"\"\"\n    cfgs = [\n        # k, t, c, SE, s\n        # stage1\n        [[3,  8,  16, 0, 1]],\n        # stage2\n        [[3,  24,  24, 0, 2]],\n        [[3,  36,  24, 0, 1]],\n        # stage3\n        [[5,  36,  40, 0.25, 2]],\n        [[5, 60,  40, 0.25, 1]],\n        # stage4\n        [[3, 120,  80, 0, 2]],\n        [[3, 100,  80, 0, 1],\n         [3, 120,  80, 0, 1],\n         [3, 120,  80, 0, 1],\n         [3, 240, 112, 0.25, 1],\n         [3, 336, 112, 0.25, 1]\n        ],\n        # stage5\n        [[5, 336, 160, 0.25, 2]],\n        [[5, 480, 160, 0, 1],\n         [5, 480, 160, 0.25, 1],\n         [5, 480, 160, 0, 1],\n         [5, 480, 160, 0.25, 1]\n        ]\n    ]\n    model_kwargs = dict(\n        cfgs=cfgs,\n        width=width,\n        **kwargs,\n    )\n    return build_model_with_cfg(\n        RepGhostNet,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True),\n        **model_kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv_stem', 'classifier': 'classifier',\n        'license': 'mit',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'repghostnet_050.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_0_5x_43M_66.95.pth.tar'\n    ),\n    'repghostnet_058.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_0_58x_60M_68.94.pth.tar'\n    ),\n    'repghostnet_080.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_0_8x_96M_72.24.pth.tar'\n    ),\n    'repghostnet_100.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_1_0x_142M_74.22.pth.tar'\n    ),\n    'repghostnet_111.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_1_11x_170M_75.07.pth.tar'\n    ),\n    'repghostnet_130.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_1_3x_231M_76.37.pth.tar'\n    ),\n    'repghostnet_150.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_1_5x_301M_77.45.pth.tar'\n    ),\n    'repghostnet_200.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/ChengpengChen/RepGhost/releases/download/RepGhost/repghostnet_2_0x_516M_78.81.pth.tar'\n    ),\n})\n\n\n@register_model\ndef repghostnet_050(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-0.5x \"\"\"\n    model = _create_repghostnet('repghostnet_050', width=0.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_058(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-0.58x \"\"\"\n    model = _create_repghostnet('repghostnet_058', width=0.58, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_080(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-0.8x \"\"\"\n    model = _create_repghostnet('repghostnet_080', width=0.8, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_100(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-1.0x \"\"\"\n    model = _create_repghostnet('repghostnet_100', width=1.0, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_111(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-1.11x \"\"\"\n    model = _create_repghostnet('repghostnet_111', width=1.11, pretrained=pretrained, **kwargs)\n    return model\n\n@register_model\ndef repghostnet_130(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-1.3x \"\"\"\n    model = _create_repghostnet('repghostnet_130', width=1.3, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_150(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-1.5x \"\"\"\n    model = _create_repghostnet('repghostnet_150', width=1.5, pretrained=pretrained, **kwargs)\n    return model\n\n\n@register_model\ndef repghostnet_200(pretrained=False, **kwargs) -> RepGhostNet:\n    \"\"\" RepGhostNet-2.0x \"\"\"\n    model = _create_repghostnet('repghostnet_200', width=2.0, pretrained=pretrained, **kwargs)\n    return model\n"
  },
  {
    "path": "timm/models/repvit.py",
    "content": "\"\"\" RepViT\n\nPaper: `RepViT: Revisiting Mobile CNN From ViT Perspective`\n    - https://arxiv.org/abs/2307.09283\n\n@misc{wang2023repvit,\n      title={RepViT: Revisiting Mobile CNN From ViT Perspective},\n      author={Ao Wang and Hui Chen and Zijia Lin and Hengjun Pu and Guiguang Ding},\n      year={2023},\n      eprint={2307.09283},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n\nAdapted from official impl at https://github.com/jameslahm/RepViT\n\"\"\"\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SqueezeExcite, trunc_normal_, to_ntuple, to_2tuple\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['RepVit']\n\n\nclass ConvNorm(nn.Sequential):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            ks: int = 1,\n            stride: int = 1,\n            pad: int = 0,\n            dilation: int = 1,\n            groups: int = 1,\n            bn_weight_init: float = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('c', nn.Conv2d(in_dim, out_dim, ks, stride, pad, dilation, groups, bias=False, **dd))\n        self.add_module('bn', nn.BatchNorm2d(out_dim, **dd))\n        nn.init.constant_(self.bn.weight, bn_weight_init)\n        nn.init.constant_(self.bn.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self):\n        c, bn = self._modules.values()\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = c.weight * w[:, None, None, None]\n        b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5\n        m = nn.Conv2d(\n            w.size(1) * self.c.groups,\n            w.size(0),\n            w.shape[2:],\n            stride=self.c.stride,\n            padding=self.c.padding,\n            dilation=self.c.dilation,\n            groups=self.c.groups,\n            device=c.weight.device,\n        )\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass NormLinear(nn.Sequential):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            bias: bool = True,\n            std: float = 0.02,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('bn', nn.BatchNorm1d(in_dim, **dd))\n        self.add_module('l', nn.Linear(in_dim, out_dim, bias=bias, **dd))\n        trunc_normal_(self.l.weight, std=std)\n        if bias:\n            nn.init.constant_(self.l.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self):\n        bn, l = self._modules.values()\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        b = bn.bias - self.bn.running_mean * self.bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = l.weight * w[None, :]\n        if l.bias is None:\n            b = b @ self.l.weight.T\n        else:\n            b = (l.weight @ b[:, None]).view(-1) + self.l.bias\n        m = nn.Linear(w.size(1), w.size(0), device=l.weight.device)\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass RepVggDw(nn.Module):\n    def __init__(\n            self,\n            ed: int,\n            kernel_size: int,\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = ConvNorm(ed, ed, kernel_size, 1, (kernel_size - 1) // 2, groups=ed, **dd)\n        if legacy:\n            self.conv1 = ConvNorm(ed, ed, 1, 1, 0, groups=ed, **dd)\n            # Make torchscript happy.\n            self.bn = nn.Identity()\n        else:\n            self.conv1 = nn.Conv2d(ed, ed, 1, 1, 0, groups=ed, **dd)\n            self.bn = nn.BatchNorm2d(ed, **dd)\n        self.dim = ed\n        self.legacy = legacy\n\n    def forward(self, x):\n        return self.bn(self.conv(x) + self.conv1(x) + x)\n\n    @torch.no_grad()\n    def fuse(self):\n        conv = self.conv.fuse()\n\n        if self.legacy:\n            conv1 = self.conv1.fuse()\n        else:\n            conv1 = self.conv1\n\n        conv_w = conv.weight\n        conv_b = conv.bias\n        conv1_w = conv1.weight\n        conv1_b = conv1.bias\n\n        conv1_w = nn.functional.pad(conv1_w, [1, 1, 1, 1])\n\n        identity = nn.functional.pad(\n            torch.ones(conv1_w.shape[0], conv1_w.shape[1], 1, 1, device=conv1_w.device), [1, 1, 1, 1]\n        )\n\n        final_conv_w = conv_w + conv1_w + identity\n        final_conv_b = conv_b + conv1_b\n\n        conv.weight.data.copy_(final_conv_w)\n        conv.bias.data.copy_(final_conv_b)\n\n        if not self.legacy:\n            bn = self.bn\n            w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n            w = conv.weight * w[:, None, None, None]\n            b = bn.bias + (conv.bias - bn.running_mean) * bn.weight / (bn.running_var + bn.eps) ** 0.5\n            conv.weight.data.copy_(w)\n            conv.bias.data.copy_(b)\n        return conv\n\n\nclass RepVitMlp(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            hidden_dim: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = ConvNorm(in_dim, hidden_dim, 1, 1, 0, **dd)\n        self.act = act_layer()\n        self.conv2 = ConvNorm(hidden_dim, in_dim, 1, 1, 0, bn_weight_init=0, **dd)\n\n    def forward(self, x):\n        return self.conv2(self.act(self.conv1(x)))\n\n\nclass RepViTBlock(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            mlp_ratio: float,\n            kernel_size: int,\n            use_se: bool,\n            act_layer: Type[nn.Module],\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.token_mixer = RepVggDw(in_dim, kernel_size, legacy, **dd)\n        self.se = SqueezeExcite(in_dim, 0.25, **dd) if use_se else nn.Identity()\n        self.channel_mixer = RepVitMlp(in_dim, in_dim * mlp_ratio, act_layer, **dd)\n\n    def forward(self, x):\n        x = self.token_mixer(x)\n        x = self.se(x)\n        identity = x\n        x = self.channel_mixer(x)\n        return identity + x\n\n\nclass RepVitStem(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = ConvNorm(in_chs, out_chs // 2, 3, 2, 1, **dd)\n        self.act1 = act_layer()\n        self.conv2 = ConvNorm(out_chs // 2, out_chs, 3, 2, 1, **dd)\n        self.stride = 4\n\n    def forward(self, x):\n        return self.conv2(self.act1(self.conv1(x)))\n\n\nclass RepVitDownsample(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            mlp_ratio: float,\n            out_dim: int,\n            kernel_size: int,\n            act_layer: Type[nn.Module],\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.pre_block = RepViTBlock(\n            in_dim,\n            mlp_ratio,\n            kernel_size,\n            use_se=False,\n            act_layer=act_layer,\n            legacy=legacy,\n            **dd,\n        )\n        self.spatial_downsample = ConvNorm(\n            in_dim,\n            in_dim,\n            kernel_size,\n            stride=2,\n            pad=(kernel_size - 1) // 2,\n            groups=in_dim,\n            **dd,\n        )\n        self.channel_downsample = ConvNorm(in_dim, out_dim, 1, 1, **dd)\n        self.ffn = RepVitMlp(out_dim, out_dim * mlp_ratio, act_layer, **dd)\n\n    def forward(self, x):\n        x = self.pre_block(x)\n        x = self.spatial_downsample(x)\n        x = self.channel_downsample(x)\n        identity = x\n        x = self.ffn(x)\n        return x + identity\n\n\nclass RepVitClassifier(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_classes: int,\n            distillation: bool = False,\n            drop: float = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.head_drop = nn.Dropout(drop)\n        self.head = NormLinear(dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.distillation = distillation\n        self.distilled_training = False\n        self.num_classes = num_classes\n        if distillation:\n            self.head_dist = NormLinear(dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def forward(self, x):\n        x = self.head_drop(x)\n        if self.distillation:\n            x1, x2 = self.head(x), self.head_dist(x)\n            if self.training and self.distilled_training and not torch.jit.is_scripting():\n                return x1, x2\n            else:\n                return (x1 + x2) / 2\n        else:\n            x = self.head(x)\n            return x\n\n    @torch.no_grad()\n    def fuse(self):\n        if not self.num_classes > 0:\n            return nn.Identity()\n        head = self.head.fuse()\n        if self.distillation:\n            head_dist = self.head_dist.fuse()\n            head.weight += head_dist.weight\n            head.bias += head_dist.bias\n            head.weight /= 2\n            head.bias /= 2\n            return head\n        else:\n            return head\n\n\nclass RepVitStage(nn.Module):\n    def __init__(\n            self,\n            in_dim: int,\n            out_dim: int,\n            depth: int,\n            mlp_ratio: float,\n            act_layer: Type[nn.Module],\n            kernel_size: int = 3,\n            downsample: bool = True,\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if downsample:\n            self.downsample = RepVitDownsample(\n                in_dim,\n                mlp_ratio,\n                out_dim,\n                kernel_size,\n                act_layer=act_layer,\n                legacy=legacy,\n                **dd,\n            )\n        else:\n            assert in_dim == out_dim\n            self.downsample = nn.Identity()\n\n        blocks = []\n        use_se = True\n        for _ in range(depth):\n            blocks.append(RepViTBlock(out_dim, mlp_ratio, kernel_size, use_se, act_layer, legacy, **dd))\n            use_se = not use_se\n\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = self.blocks(x)\n        return x\n\n\nclass RepVit(nn.Module):\n    def __init__(\n        self,\n        in_chans: int = 3,\n        img_size: int = 224,\n        embed_dim: Tuple[int, ...] = (48,),\n        depth: Tuple[int, ...] = (2,),\n        mlp_ratio: float = 2,\n        global_pool: str = 'avg',\n        kernel_size: int = 3,\n        num_classes: int = 1000,\n        act_layer: Type[nn.Module] = nn.GELU,\n        distillation: bool = True,\n        drop_rate: float = 0.0,\n        legacy: bool = False,\n        device=None,\n        dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.grad_checkpointing = False\n        self.global_pool = global_pool\n        self.embed_dim = embed_dim\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n\n        in_dim = embed_dim[0]\n        self.stem = RepVitStem(in_chans, in_dim, act_layer, **dd)\n        stride = self.stem.stride\n        resolution = tuple([i // p for i, p in zip(to_2tuple(img_size), to_2tuple(stride))])\n\n        num_stages = len(embed_dim)\n        mlp_ratios = to_ntuple(num_stages)(mlp_ratio)\n\n        self.feature_info = []\n        stages = []\n        for i in range(num_stages):\n            downsample = True if i != 0 else False\n            stages.append(\n                RepVitStage(\n                    in_dim,\n                    embed_dim[i],\n                    depth[i],\n                    mlp_ratio=mlp_ratios[i],\n                    act_layer=act_layer,\n                    kernel_size=kernel_size,\n                    downsample=downsample,\n                    legacy=legacy,\n                    **dd,\n                )\n            )\n            stage_stride = 2 if downsample else 1\n            stride *= stage_stride\n            resolution = tuple([(r - 1) // stage_stride + 1 for r in resolution])\n            self.feature_info += [dict(num_chs=embed_dim[i], reduction=stride, module=f'stages.{i}')]\n            in_dim = embed_dim[i]\n        self.stages = nn.Sequential(*stages)\n\n        self.num_features = self.head_hidden_size = embed_dim[-1]\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = RepVitClassifier(embed_dim[-1], num_classes, distillation, **dd)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(stem=r'^stem', blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))])  # stem and embed\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, distillation: bool = False, device=None, dtype=None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        dd = {'device': device, 'dtype': dtype}\n        self.head = RepVitClassifier(self.embed_dim[-1], num_classes, distillation, **dd)\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable=True):\n        self.head.distilled_training = enable\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean((2, 3), keepdim=False)\n        x = self.head_drop(x)\n        if pre_logits:\n            return x\n        return self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n    @torch.no_grad()\n    def fuse(self):\n        def fuse_children(net):\n            for child_name, child in net.named_children():\n                if hasattr(child, 'fuse'):\n                    fused = child.fuse()\n                    setattr(net, child_name, fused)\n                    fuse_children(fused)\n                else:\n                    fuse_children(child)\n\n        fuse_children(self)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'input_size': (3, 224, 224),\n        'pool_size': (7, 7),\n        'crop_pct': 0.95,\n        'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN,\n        'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv1.c',\n        'classifier': ('head.head.l', 'head.head_dist.l'),\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs(\n    {\n        'repvit_m1.dist_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m2.dist_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m3.dist_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m0_9.dist_300e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m0_9.dist_450e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_0.dist_300e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_0.dist_450e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_1.dist_300e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_1.dist_450e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_5.dist_300e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m1_5.dist_450e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m2_3.dist_300e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n        'repvit_m2_3.dist_450e_in1k': _cfg(\n            hf_hub_id='timm/',\n        ),\n    }\n)\n\n\ndef _create_repvit(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        RepVit,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef repvit_m1(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M1 model\n    \"\"\"\n    model_args = dict(embed_dim=(48, 96, 192, 384), depth=(2, 2, 14, 2), legacy=True)\n    return _create_repvit('repvit_m1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m2(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M2 model\n    \"\"\"\n    model_args = dict(embed_dim=(64, 128, 256, 512), depth=(2, 2, 12, 2), legacy=True)\n    return _create_repvit('repvit_m2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m3(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M3 model\n    \"\"\"\n    model_args = dict(embed_dim=(64, 128, 256, 512), depth=(4, 4, 18, 2), legacy=True)\n    return _create_repvit('repvit_m3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m0_9(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M0.9 model\n    \"\"\"\n    model_args = dict(embed_dim=(48, 96, 192, 384), depth=(2, 2, 14, 2))\n    return _create_repvit('repvit_m0_9', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m1_0(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M1.0 model\n    \"\"\"\n    model_args = dict(embed_dim=(56, 112, 224, 448), depth=(2, 2, 14, 2))\n    return _create_repvit('repvit_m1_0', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m1_1(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M1.1 model\n    \"\"\"\n    model_args = dict(embed_dim=(64, 128, 256, 512), depth=(2, 2, 12, 2))\n    return _create_repvit('repvit_m1_1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m1_5(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M1.5 model\n    \"\"\"\n    model_args = dict(embed_dim=(64, 128, 256, 512), depth=(4, 4, 24, 4))\n    return _create_repvit('repvit_m1_5', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef repvit_m2_3(pretrained=False, **kwargs):\n    \"\"\"\n    Constructs a RepViT-M2.3 model\n    \"\"\"\n    model_args = dict(embed_dim=(80, 160, 320, 640), depth=(6, 6, 34, 2))\n    return _create_repvit('repvit_m2_3', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/res2net.py",
    "content": "\"\"\" Res2Net and Res2NeXt\nAdapted from Official Pytorch impl at: https://github.com/gasvn/Res2Net/\nPaper: `Res2Net: A New Multi-scale Backbone Architecture` - https://arxiv.org/abs/1904.01169\n\"\"\"\nimport math\nfrom typing import Optional, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\nfrom .resnet import ResNet\n\n__all__ = []\n\n\nclass Bottle2neck(nn.Module):\n    \"\"\" Res2Net/Res2NeXT Bottleneck\n    Adapted from https://github.com/gasvn/Res2Net/blob/master/res2net.py\n    \"\"\"\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            cardinality: int = 1,\n            base_width: int = 26,\n            scale: int = 4,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n            **_,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = scale\n        self.is_first = stride > 1 or downsample is not None\n        self.num_scales = max(1, scale - 1)\n        width = int(math.floor(planes * (base_width / 64.0))) * cardinality\n        self.width = width\n        outplanes = planes * self.expansion\n        first_dilation = first_dilation or dilation\n\n        self.conv1 = nn.Conv2d(inplanes, width * scale, kernel_size=1, bias=False, **dd)\n        self.bn1 = norm_layer(width * scale, **dd)\n\n        convs = []\n        bns = []\n        for i in range(self.num_scales):\n            convs.append(nn.Conv2d(\n                width,\n                width,\n                kernel_size=3,\n                stride=stride,\n                padding=first_dilation,\n                dilation=first_dilation,\n                groups=cardinality,\n                bias=False,\n                **dd,\n            ))\n            bns.append(norm_layer(width, **dd))\n        self.convs = nn.ModuleList(convs)\n        self.bns = nn.ModuleList(bns)\n        if self.is_first:\n            # FIXME this should probably have count_include_pad=False, but hurts original weights\n            self.pool = nn.AvgPool2d(kernel_size=3, stride=stride, padding=1)\n        else:\n            self.pool = None\n\n        self.conv3 = nn.Conv2d(width * scale, outplanes, kernel_size=1, bias=False, **dd)\n        self.bn3 = norm_layer(outplanes, **dd)\n        self.se = attn_layer(outplanes, **dd) if attn_layer is not None else None\n\n        self.relu = act_layer(inplace=True)\n        self.downsample = downsample\n\n    def zero_init_last(self):\n        if getattr(self.bn3, 'weight', None) is not None:\n            nn.init.zeros_(self.bn3.weight)\n\n    def forward(self, x):\n        shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        spx = torch.split(out, self.width, 1)\n        spo = []\n        sp = spx[0]  # redundant, for torchscript\n        for i, (conv, bn) in enumerate(zip(self.convs, self.bns)):\n            if i == 0 or self.is_first:\n                sp = spx[i]\n            else:\n                sp = sp + spx[i]\n            sp = conv(sp)\n            sp = bn(sp)\n            sp = self.relu(sp)\n            spo.append(sp)\n        if self.scale > 1:\n            if self.pool is not None:  # self.is_first == True, None check for torchscript\n                spo.append(self.pool(spx[-1]))\n            else:\n                spo.append(spx[-1])\n        out = torch.cat(spo, 1)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        if self.se is not None:\n            out = self.se(out)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n\n        out += shortcut\n        out = self.relu(out)\n\n        return out\n\n\ndef _create_res2net(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(ResNet, variant, pretrained, **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv1', 'classifier': 'fc',\n        'license': 'unknown',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'res2net50_26w_4s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net50_48w_2s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net50_14w_8s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net50_26w_6s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net50_26w_8s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net101_26w_4s.in1k': _cfg(hf_hub_id='timm/'),\n    'res2next50.in1k': _cfg(hf_hub_id='timm/'),\n    'res2net50d.in1k': _cfg(hf_hub_id='timm/', first_conv='conv1.0'),\n    'res2net101d.in1k': _cfg(hf_hub_id='timm/', first_conv='conv1.0'),\n})\n\n\n@register_model\ndef res2net50_26w_4s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-50 26w4s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4))\n    return _create_res2net('res2net50_26w_4s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net101_26w_4s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-101 26w4s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4))\n    return _create_res2net('res2net101_26w_4s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net50_26w_6s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-50 26w6s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6))\n    return _create_res2net('res2net50_26w_6s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net50_26w_8s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-50 26w8s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8))\n    return _create_res2net('res2net50_26w_8s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net50_48w_2s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-50 48w2s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2))\n    return _create_res2net('res2net50_48w_2s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net50_14w_8s(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Res2Net-50 14w8s model.\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8))\n    return _create_res2net('res2net50_14w_8s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2next50(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Construct Res2NeXt-50 4s\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4))\n    return _create_res2net('res2next50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net50d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Construct Res2Net-50\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, stem_type='deep',\n        avg_down=True, stem_width=32, block_args=dict(scale=4))\n    return _create_res2net('res2net50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef res2net101d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Construct Res2Net-50\n    \"\"\"\n    model_args = dict(\n        block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, stem_type='deep',\n        avg_down=True, stem_width=32, block_args=dict(scale=4))\n    return _create_res2net('res2net101d', pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/resnest.py",
    "content": "\"\"\" ResNeSt Models\n\nPaper: `ResNeSt: Split-Attention Networks` - https://arxiv.org/abs/2004.08955\n\nAdapted from original PyTorch impl w/ weights at https://github.com/zhanghang1989/ResNeSt by Hang Zhang\n\nModified for torchscript compat, and consistency with timm by Ross Wightman\n\"\"\"\nfrom typing import Optional, Type\n\nfrom torch import nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SplitAttn\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\nfrom .resnet import ResNet\n\n\nclass ResNestBottleneck(nn.Module):\n    \"\"\"ResNet Bottleneck\n    \"\"\"\n    # pylint: disable=unused-argument\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            radix: int = 1,\n            cardinality: int = 1,\n            base_width: int = 64,\n            avd: bool = False,\n            avd_first: bool = False,\n            is_first: bool = False,\n            reduce_first: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert reduce_first == 1  # not supported\n        assert attn_layer is None, 'attn_layer is not supported'  # not supported\n        assert aa_layer is None, 'aa_layer is not supported'  # TODO not yet supported\n\n        group_width = int(planes * (base_width / 64.)) * cardinality\n        first_dilation = first_dilation or dilation\n        if avd and (stride > 1 or is_first):\n            avd_stride = stride\n            stride = 1\n        else:\n            avd_stride = 0\n        self.radix = radix\n\n        self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False, **dd)\n        self.bn1 = norm_layer(group_width, **dd)\n        self.act1 = act_layer(inplace=True)\n        self.avd_first = nn.AvgPool2d(3, avd_stride, padding=1) if avd_stride > 0 and avd_first else None\n\n        if self.radix >= 1:\n            self.conv2 = SplitAttn(\n                group_width,\n                group_width,\n                kernel_size=3,\n                stride=stride,\n                padding=first_dilation,\n                dilation=first_dilation,\n                groups=cardinality,\n                radix=radix,\n                norm_layer=norm_layer,\n                drop_layer=drop_block,\n                **dd,\n            )\n            self.bn2 = nn.Identity()\n            self.drop_block = nn.Identity()\n            self.act2 = nn.Identity()\n        else:\n            self.conv2 = nn.Conv2d(\n                group_width,\n                group_width,\n                kernel_size=3,\n                stride=stride,\n                padding=first_dilation,\n                dilation=first_dilation,\n                groups=cardinality,\n                bias=False,\n                **dd,\n            )\n            self.bn2 = norm_layer(group_width, **dd)\n            self.drop_block = drop_block() if drop_block is not None else nn.Identity()\n            self.act2 = act_layer(inplace=True)\n        self.avd_last = nn.AvgPool2d(3, avd_stride, padding=1) if avd_stride > 0 and not avd_first else None\n\n        self.conv3 = nn.Conv2d(group_width, planes * 4, kernel_size=1, bias=False, **dd)\n        self.bn3 = norm_layer(planes * 4, **dd)\n        self.act3 = act_layer(inplace=True)\n        self.downsample = downsample\n        self.drop_path = drop_path\n\n    def zero_init_last(self):\n        if getattr(self.bn3, 'weight', None) is not None:\n            nn.init.zeros_(self.bn3.weight)\n\n    def forward(self, x):\n        shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.act1(out)\n\n        if self.avd_first is not None:\n            out = self.avd_first(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.drop_block(out)\n        out = self.act2(out)\n\n        if self.avd_last is not None:\n            out = self.avd_last(out)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n\n        out += shortcut\n        out = self.act3(out)\n        return out\n\n\ndef _create_resnest(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        ResNet,\n        variant,\n        pretrained,\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv1.0', 'classifier': 'fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'resnest14d.gluon_in1k': _cfg(hf_hub_id='timm/'),\n    'resnest26d.gluon_in1k': _cfg(hf_hub_id='timm/'),\n    'resnest50d.in1k': _cfg(hf_hub_id='timm/'),\n    'resnest101e.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), pool_size=(8, 8)),\n    'resnest200e.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=0.909, interpolation='bicubic'),\n    'resnest269e.in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 416, 416), pool_size=(13, 13), crop_pct=0.928, interpolation='bicubic'),\n    'resnest50d_4s2x40d.in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'resnest50d_1s4x24d.in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic')\n})\n\n\n@register_model\ndef resnest14d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-14d model. Weights ported from GluonCV.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[1, 1, 1, 1],\n        stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest14d', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest26d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-26d model. Weights ported from GluonCV.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[2, 2, 2, 2],\n        stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest26d', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest50d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-50d model. Matches paper ResNeSt-50 model, https://arxiv.org/abs/2004.08955\n    Since this codebase supports all possible variations, 'd' for deep stem, stem_width 32, avg in downsample.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 4, 6, 3],\n        stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest50d', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest101e(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-101e model. Matches paper ResNeSt-101 model, https://arxiv.org/abs/2004.08955\n     Since this codebase supports all possible variations, 'e' for deep stem, stem_width 64, avg in downsample.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 4, 23, 3],\n        stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest101e', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest200e(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-200e model. Matches paper ResNeSt-200 model, https://arxiv.org/abs/2004.08955\n    Since this codebase supports all possible variations, 'e' for deep stem, stem_width 64, avg in downsample.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 24, 36, 3],\n        stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest200e', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest269e(pretrained=False, **kwargs) -> ResNet:\n    \"\"\" ResNeSt-269e model. Matches paper ResNeSt-269 model, https://arxiv.org/abs/2004.08955\n    Since this codebase supports all possible variations, 'e' for deep stem, stem_width 64, avg in downsample.\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 30, 48, 8],\n        stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,\n        block_args=dict(radix=2, avd=True, avd_first=False))\n    return _create_resnest('resnest269e', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest50d_4s2x40d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"ResNeSt-50 4s2x40d from https://github.com/zhanghang1989/ResNeSt/blob/master/ablation.md\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 4, 6, 3],\n        stem_type='deep', stem_width=32, avg_down=True, base_width=40, cardinality=2,\n        block_args=dict(radix=4, avd=True, avd_first=True))\n    return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n\n\n@register_model\ndef resnest50d_1s4x24d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"ResNeSt-50 1s4x24d from https://github.com/zhanghang1989/ResNeSt/blob/master/ablation.md\n    \"\"\"\n    model_kwargs = dict(\n        block=ResNestBottleneck, layers=[3, 4, 6, 3],\n        stem_type='deep', stem_width=32, avg_down=True, base_width=24, cardinality=4,\n        block_args=dict(radix=1, avd=True, avd_first=True))\n    return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **dict(model_kwargs, **kwargs))\n"
  },
  {
    "path": "timm/models/resnet.py",
    "content": "\"\"\"PyTorch ResNet\n\nThis started as a copy of https://github.com/pytorch/vision 'resnet.py' (BSD-3-Clause) with\nadditional dropout and dynamic global avg/max pool.\n\nResNeXt, SE-ResNeXt, SENet, and MXNet Gluon stem/downsample variants, tiered stems added by Ross Wightman\n\nCopyright 2019, Ross Wightman\n\"\"\"\nimport math\nfrom functools import partial\nfrom typing import Any, Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropBlock2d, DropPath, AvgPool2dSame, BlurPool2d, LayerType, create_attn, \\\n    get_attn, get_act_layer, get_norm_layer, create_classifier, create_aa, to_ntuple\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['ResNet', 'BasicBlock', 'Bottleneck']  # model_registry will add each entrypoint fn to this\n\n\ndef get_padding(kernel_size: int, stride: int, dilation: int = 1) -> int:\n    padding = ((stride - 1) + dilation * (kernel_size - 1)) // 2\n    return padding\n\n\nclass BasicBlock(nn.Module):\n    \"\"\"Basic residual block for ResNet.\n\n    This is the standard residual block used in ResNet-18 and ResNet-34.\n    \"\"\"\n    expansion = 1\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            cardinality: int = 1,\n            base_width: int = 64,\n            reduce_first: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"\n        Args:\n            inplanes: Input channel dimensionality.\n            planes: Used to determine output channel dimensionalities.\n            stride: Stride used in convolution layers.\n            downsample: Optional downsample layer for residual path.\n            cardinality: Number of convolution groups.\n            base_width: Base width used to determine output channel dimensionality.\n            reduce_first: Reduction factor for first convolution output width of residual blocks.\n            dilation: Dilation rate for convolution layers.\n            first_dilation: Dilation rate for first convolution layer.\n            act_layer: Activation layer class.\n            norm_layer: Normalization layer class.\n            attn_layer: Attention layer class.\n            aa_layer: Anti-aliasing layer class.\n            drop_block: DropBlock layer class.\n            drop_path: Optional DropPath layer instance.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        assert cardinality == 1, 'BasicBlock only supports cardinality of 1'\n        assert base_width == 64, 'BasicBlock does not support changing base width'\n        first_planes = planes // reduce_first\n        outplanes = planes * self.expansion\n        first_dilation = first_dilation or dilation\n        use_aa = aa_layer is not None and (stride == 2 or first_dilation != dilation)\n\n        self.conv1 = nn.Conv2d(\n            inplanes,\n            first_planes,\n            kernel_size=3,\n            stride=1 if use_aa else stride,\n            padding=first_dilation,\n            dilation=first_dilation,\n            bias=False,\n            **dd,\n        )\n        self.bn1 = norm_layer(first_planes, **dd)\n        self.drop_block = drop_block() if drop_block is not None else nn.Identity()\n        self.act1 = act_layer(inplace=True)\n        self.aa = create_aa(aa_layer, channels=first_planes, stride=stride, enable=use_aa, **dd)\n\n        self.conv2 = nn.Conv2d(\n            first_planes,\n            outplanes,\n            kernel_size=3,\n            padding=dilation,\n            dilation=dilation,\n            bias=False,\n            **dd,\n        )\n        self.bn2 = norm_layer(outplanes, **dd)\n\n        self.se = create_attn(attn_layer, outplanes, **dd)\n\n        self.act2 = act_layer(inplace=True)\n        self.downsample = downsample\n        self.stride = stride\n        self.dilation = dilation\n        self.drop_path = drop_path\n\n    def zero_init_last(self) -> None:\n        \"\"\"Initialize the last batch norm layer weights to zero for better convergence.\"\"\"\n        if getattr(self.bn2, 'weight', None) is not None:\n            nn.init.zeros_(self.bn2.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.drop_block(x)\n        x = self.act1(x)\n        x = self.aa(x)\n\n        x = self.conv2(x)\n        x = self.bn2(x)\n\n        if self.se is not None:\n            x = self.se(x)\n\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(shortcut)\n        x += shortcut\n        x = self.act2(x)\n\n        return x\n\n\nclass Bottleneck(nn.Module):\n    \"\"\"Bottleneck residual block for ResNet.\n\n    This is the bottleneck block used in ResNet-50, ResNet-101, and ResNet-152.\n    \"\"\"\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            cardinality: int = 1,\n            base_width: int = 64,\n            reduce_first: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[Type[nn.Module]] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"\n        Args:\n            inplanes: Input channel dimensionality.\n            planes: Used to determine output channel dimensionalities.\n            stride: Stride used in convolution layers.\n            downsample: Optional downsample layer for residual path.\n            cardinality: Number of convolution groups.\n            base_width: Base width used to determine output channel dimensionality.\n            reduce_first: Reduction factor for first convolution output width of residual blocks.\n            dilation: Dilation rate for convolution layers.\n            first_dilation: Dilation rate for first convolution layer.\n            act_layer: Activation layer class.\n            norm_layer: Normalization layer class.\n            attn_layer: Attention layer class.\n            aa_layer: Anti-aliasing layer class.\n            drop_block: DropBlock layer class.\n            drop_path: Optional DropPath layer instance.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        width = int(math.floor(planes * (base_width / 64)) * cardinality)\n        first_planes = width // reduce_first\n        outplanes = planes * self.expansion\n        first_dilation = first_dilation or dilation\n        use_aa = aa_layer is not None and (stride == 2 or first_dilation != dilation)\n\n        self.conv1 = nn.Conv2d(inplanes, first_planes, kernel_size=1, bias=False, **dd)\n        self.bn1 = norm_layer(first_planes, **dd)\n        self.act1 = act_layer(inplace=True)\n\n        self.conv2 = nn.Conv2d(\n            first_planes,\n            width,\n            kernel_size=3,\n            stride=1 if use_aa else stride,\n            padding=first_dilation,\n            dilation=first_dilation,\n            groups=cardinality,\n            bias=False,\n            **dd,\n        )\n        self.bn2 = norm_layer(width, **dd)\n        self.drop_block = drop_block() if drop_block is not None else nn.Identity()\n        self.act2 = act_layer(inplace=True)\n        self.aa = create_aa(aa_layer, channels=width, stride=stride, enable=use_aa, **dd)\n\n        self.conv3 = nn.Conv2d(width, outplanes, kernel_size=1, bias=False, **dd)\n        self.bn3 = norm_layer(outplanes, **dd)\n\n        self.se = create_attn(attn_layer, outplanes, **dd)\n\n        self.act3 = act_layer(inplace=True)\n        self.downsample = downsample\n        self.stride = stride\n        self.dilation = dilation\n        self.drop_path = drop_path\n\n    def zero_init_last(self) -> None:\n        \"\"\"Initialize the last batch norm layer weights to zero for better convergence.\"\"\"\n        if getattr(self.bn3, 'weight', None) is not None:\n            nn.init.zeros_(self.bn3.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        shortcut = x\n\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n\n        x = self.conv2(x)\n        x = self.bn2(x)\n        x = self.drop_block(x)\n        x = self.act2(x)\n        x = self.aa(x)\n\n        x = self.conv3(x)\n        x = self.bn3(x)\n\n        if self.se is not None:\n            x = self.se(x)\n\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(shortcut)\n        x += shortcut\n        x = self.act3(x)\n\n        return x\n\n\ndef downsample_conv(\n        in_channels: int,\n        out_channels: int,\n        kernel_size: int,\n        stride: int = 1,\n        dilation: int = 1,\n        first_dilation: Optional[int] = None,\n        norm_layer: Optional[Type[nn.Module]] = None,\n        device=None,\n        dtype=None,\n) -> nn.Module:\n    dd = {'device': device, 'dtype': dtype}\n    norm_layer = norm_layer or nn.BatchNorm2d\n    kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size\n    first_dilation = (first_dilation or dilation) if kernel_size > 1 else 1\n    p = get_padding(kernel_size, stride, first_dilation)\n\n    return nn.Sequential(*[\n        nn.Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size,\n            stride=stride,\n            padding=p,\n            dilation=first_dilation,\n            bias=False,\n            **dd\n        ),\n        norm_layer(out_channels, **dd)\n    ])\n\n\ndef downsample_avg(\n        in_channels: int,\n        out_channels: int,\n        kernel_size: int,\n        stride: int = 1,\n        dilation: int = 1,\n        first_dilation: Optional[int] = None,\n        norm_layer: Optional[Type[nn.Module]] = None,\n        device=None,\n        dtype=None,\n) -> nn.Module:\n    dd = {'device': device, 'dtype': dtype}\n    norm_layer = norm_layer or nn.BatchNorm2d\n    avg_stride = stride if dilation == 1 else 1\n    if stride == 1 and dilation == 1:\n        pool = nn.Identity()\n    else:\n        avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n        pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n\n    return nn.Sequential(*[\n        pool,\n        nn.Conv2d(in_channels, out_channels, 1, stride=1, padding=0, bias=False, **dd),\n        norm_layer(out_channels, **dd)\n    ])\n\n\ndef drop_blocks(drop_prob: float = 0.) -> List[Optional[partial]]:\n    \"\"\"Create DropBlock layer instances for each stage.\n\n    Args:\n        drop_prob: Drop probability for DropBlock.\n\n    Returns:\n        List of DropBlock partial instances or None for each stage.\n    \"\"\"\n    return [\n        None, None,\n        partial(DropBlock2d, drop_prob=drop_prob, block_size=5, gamma_scale=0.25) if drop_prob else None,\n        partial(DropBlock2d, drop_prob=drop_prob, block_size=3, gamma_scale=1.00) if drop_prob else None]\n\n\ndef make_blocks(\n        block_fns: Tuple[Union[Type[BasicBlock], Type[Bottleneck]], ...],\n        channels: Tuple[int, ...],\n        block_repeats: Tuple[int, ...],\n        inplanes: int,\n        reduce_first: int = 1,\n        output_stride: int = 32,\n        down_kernel_size: int = 1,\n        avg_down: bool = False,\n        drop_block_rate: float = 0.,\n        drop_path_rate: float = 0.,\n        device=None,\n        dtype=None,\n        **kwargs,\n) -> Tuple[List[Tuple[str, nn.Module]], List[Dict[str, Any]]]:\n    \"\"\"Create ResNet stages with specified block configurations.\n\n    Args:\n        block_fns: Block class to use for each stage.\n        channels: Number of channels for each stage.\n        block_repeats: Number of blocks to repeat for each stage.\n        inplanes: Number of input channels.\n        reduce_first: Reduction factor for first convolution in each stage.\n        output_stride: Target output stride of network.\n        down_kernel_size: Kernel size for downsample layers.\n        avg_down: Use average pooling for downsample.\n        drop_block_rate: DropBlock drop rate.\n        drop_path_rate: Drop path rate for stochastic depth.\n        **kwargs: Additional arguments passed to block constructors.\n\n    Returns:\n        Tuple of stage modules list and feature info list.\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    stages = []\n    feature_info = []\n    net_num_blocks = sum(block_repeats)\n    net_block_idx = 0\n    net_stride = 4\n    dilation = prev_dilation = 1\n    for stage_idx, (block_fn, planes, num_blocks, db) in enumerate(zip(block_fns, channels, block_repeats, drop_blocks(drop_block_rate))):\n        stage_name = f'layer{stage_idx + 1}'  # never liked this name, but weight compat requires it\n        stride = 1 if stage_idx == 0 else 2\n        if net_stride >= output_stride:\n            dilation *= stride\n            stride = 1\n        else:\n            net_stride *= stride\n\n        downsample = None\n        if stride != 1 or inplanes != planes * block_fn.expansion:\n            down_kwargs = dict(\n                in_channels=inplanes,\n                out_channels=planes * block_fn.expansion,\n                kernel_size=down_kernel_size,\n                stride=stride,\n                dilation=dilation,\n                first_dilation=prev_dilation,\n                norm_layer=kwargs.get('norm_layer'),\n                **dd,\n            )\n            downsample = downsample_avg(**down_kwargs) if avg_down else downsample_conv(**down_kwargs)\n\n        block_kwargs = dict(reduce_first=reduce_first, dilation=dilation, drop_block=db, **kwargs)\n        blocks = []\n        for block_idx in range(num_blocks):\n            downsample = downsample if block_idx == 0 else None\n            stride = stride if block_idx == 0 else 1\n            block_dpr = drop_path_rate * net_block_idx / (net_num_blocks - 1)  # stochastic depth linear decay rule\n            blocks.append(block_fn(\n                inplanes,\n                planes,\n                stride,\n                downsample,\n                first_dilation=prev_dilation,\n                drop_path=DropPath(block_dpr) if block_dpr > 0. else None,\n                **block_kwargs,\n                **dd,\n            ))\n            prev_dilation = dilation\n            inplanes = planes * block_fn.expansion\n            net_block_idx += 1\n\n        stages.append((stage_name, nn.Sequential(*blocks)))\n        feature_info.append(dict(num_chs=inplanes, reduction=net_stride, module=stage_name))\n\n    return stages, feature_info\n\n\nclass ResNet(nn.Module):\n    \"\"\"ResNet / ResNeXt / SE-ResNeXt / SE-Net\n\n    This class implements all variants of ResNet, ResNeXt, SE-ResNeXt, and SENet that\n      * have > 1 stride in the 3x3 conv layer of bottleneck\n      * have conv-bn-act ordering\n\n    This ResNet impl supports a number of stem and downsample options based on the v1c, v1d, v1e, and v1s\n    variants included in the MXNet Gluon ResNetV1b model. The C and D variants are also discussed in the\n    'Bag of Tricks' paper: https://arxiv.org/pdf/1812.01187. The B variant is equivalent to torchvision default.\n\n    ResNet variants (the same modifications can be used in SE/ResNeXt models as well):\n      * normal, b - 7x7 stem, stem_width = 64, same as torchvision ResNet, NVIDIA ResNet 'v1.5', Gluon v1b\n      * c - 3 layer deep 3x3 stem, stem_width = 32 (32, 32, 64)\n      * d - 3 layer deep 3x3 stem, stem_width = 32 (32, 32, 64), average pool in downsample\n      * e - 3 layer deep 3x3 stem, stem_width = 64 (64, 64, 128), average pool in downsample\n      * s - 3 layer deep 3x3 stem, stem_width = 64 (64, 64, 128)\n      * t - 3 layer deep 3x3 stem, stem width = 32 (24, 48, 64), average pool in downsample\n      * tn - 3 layer deep 3x3 stem, stem width = 32 (24, 32, 64), average pool in downsample\n\n    ResNeXt\n      * normal - 7x7 stem, stem_width = 64, standard cardinality and base widths\n      * same c,d, e, s variants as ResNet can be enabled\n\n    SE-ResNeXt\n      * normal - 7x7 stem, stem_width = 64\n      * same c, d, e, s variants as ResNet can be enabled\n\n    SENet-154 - 3 layer deep 3x3 stem (same as v1c-v1s), stem_width = 64, cardinality=64,\n        reduction by 2 on width of first bottleneck convolution, 3x3 downsample convs after first block\n    \"\"\"\n\n    def __init__(\n            self,\n            block: Union[BasicBlock, Bottleneck],\n            layers: Tuple[int, ...],\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            global_pool: str = 'avg',\n            cardinality: int = 1,\n            base_width: int = 64,\n            stem_width: int = 64,\n            stem_type: str = '',\n            replace_stem_pool: bool = False,\n            block_reduce_first: int = 1,\n            down_kernel_size: int = 1,\n            avg_down: bool = False,\n            channels: Optional[Tuple[int, ...]] = (64, 128, 256, 512),\n            act_layer: LayerType = nn.ReLU,\n            norm_layer: LayerType = nn.BatchNorm2d,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_rate: float = 0.0,\n            drop_path_rate: float = 0.,\n            drop_block_rate: float = 0.,\n            zero_init_last: bool = True,\n            block_args: Optional[Dict[str, Any]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            block (nn.Module): class for the residual block. Options are BasicBlock, Bottleneck.\n            layers (List[int]) : number of layers in each block\n            num_classes (int): number of classification classes (default 1000)\n            in_chans (int): number of input (color) channels. (default 3)\n            output_stride (int): output stride of the network, 32, 16, or 8. (default 32)\n            global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg')\n            cardinality (int): number of convolution groups for 3x3 conv in Bottleneck. (default 1)\n            base_width (int): bottleneck channels factor. `planes * base_width / 64 * cardinality` (default 64)\n            stem_width (int): number of channels in stem convolutions (default 64)\n            stem_type (str): The type of stem (default ''):\n                * '', default - a single 7x7 conv with a width of stem_width\n                * 'deep' - three 3x3 convolution layers of widths stem_width, stem_width, stem_width * 2\n                * 'deep_tiered' - three 3x3 conv layers of widths stem_width//4 * 3, stem_width, stem_width * 2\n            replace_stem_pool (bool): replace stem max-pooling layer with a 3x3 stride-2 convolution\n            block_reduce_first (int): Reduction factor for first convolution output width of residual blocks,\n                1 for all archs except senets, where 2 (default 1)\n            down_kernel_size (int): kernel size of residual block downsample path,\n                1x1 for most, 3x3 for senets (default: 1)\n            avg_down (bool): use avg pooling for projection skip connection between stages/downsample (default False)\n            act_layer (str, nn.Module): activation layer\n            norm_layer (str, nn.Module): normalization layer\n            aa_layer (nn.Module): anti-aliasing layer\n            drop_rate (float): Dropout probability before classifier, for training (default 0.)\n            drop_path_rate (float): Stochastic depth drop-path rate (default 0.)\n            drop_block_rate (float): Drop block rate (default 0.)\n            zero_init_last (bool): zero-init the last weight in residual path (usually last BN affine weight)\n            block_args (dict): Extra kwargs to pass through to block module\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        block_args = block_args or dict()\n        assert output_stride in (8, 16, 32)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        act_layer = get_act_layer(act_layer)\n        norm_layer = get_norm_layer(norm_layer)\n\n        # Stem\n        deep_stem = 'deep' in stem_type\n        inplanes = stem_width * 2 if deep_stem else 64\n        if deep_stem:\n            stem_chs = (stem_width, stem_width)\n            if 'tiered' in stem_type:\n                stem_chs = (3 * (stem_width // 4), stem_width)\n            self.conv1 = nn.Sequential(*[\n                nn.Conv2d(in_chans, stem_chs[0], 3, stride=2, padding=1, bias=False, **dd),\n                norm_layer(stem_chs[0], **dd),\n                act_layer(inplace=True),\n                nn.Conv2d(stem_chs[0], stem_chs[1], 3, stride=1, padding=1, bias=False, **dd),\n                norm_layer(stem_chs[1], **dd),\n                act_layer(inplace=True),\n                nn.Conv2d(stem_chs[1], inplanes, 3, stride=1, padding=1, bias=False, **dd)])\n        else:\n            self.conv1 = nn.Conv2d(in_chans, inplanes, kernel_size=7, stride=2, padding=3, bias=False, **dd)\n        self.bn1 = norm_layer(inplanes, **dd)\n        self.act1 = act_layer(inplace=True)\n        self.feature_info = [dict(num_chs=inplanes, reduction=2, module='act1')]\n\n        # Stem pooling. The name 'maxpool' remains for weight compatibility.\n        if replace_stem_pool:\n            self.maxpool = nn.Sequential(*filter(None, [\n                nn.Conv2d(inplanes, inplanes, 3, stride=1 if aa_layer else 2, padding=1, bias=False, **dd),\n                create_aa(aa_layer, channels=inplanes, stride=2, **dd) if aa_layer is not None else None,\n                norm_layer(inplanes, **dd),\n                act_layer(inplace=True),\n            ]))\n        else:\n            if aa_layer is not None:\n                if issubclass(aa_layer, nn.AvgPool2d):\n                    self.maxpool = aa_layer(2)\n                else:\n                    self.maxpool = nn.Sequential(*[\n                        nn.MaxPool2d(kernel_size=3, stride=1, padding=1),\n                        aa_layer(channels=inplanes, stride=2, **dd)])\n            else:\n                self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n\n        # Feature Blocks\n        block_fns = to_ntuple(len(channels))(block)\n        stage_modules, stage_feature_info = make_blocks(\n            block_fns,\n            channels,\n            layers,\n            inplanes,\n            cardinality=cardinality,\n            base_width=base_width,\n            output_stride=output_stride,\n            reduce_first=block_reduce_first,\n            avg_down=avg_down,\n            down_kernel_size=down_kernel_size,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            aa_layer=aa_layer,\n            drop_block_rate=drop_block_rate,\n            drop_path_rate=drop_path_rate,\n            **block_args,\n            **dd,\n        )\n        for stage in stage_modules:\n            self.add_module(*stage)  # layer1, layer2, etc\n        self.feature_info.extend(stage_feature_info)\n\n        # Head (Pooling and Classifier)\n        self.num_features = self.head_hidden_size = channels[-1] * block_fns[-1].expansion\n        self.global_pool, self.fc = create_classifier(self.num_features, self.num_classes, pool_type=global_pool, **dd)\n\n        self.init_weights(zero_init_last=zero_init_last)\n\n    @torch.jit.ignore\n    def init_weights(self, zero_init_last: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            zero_init_last: Zero-initialize the last BN in each residual branch.\n        \"\"\"\n        for n, m in self.named_modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n        if zero_init_last:\n            for m in self.modules():\n                if hasattr(m, 'zero_init_last'):\n                    m.zero_init_last()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, str]:\n        \"\"\"Create regex patterns for parameter grouping.\n\n        Args:\n            coarse: Use coarse (stage-level) or fine (block-level) grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        matcher = dict(stem=r'^conv1|bn1|maxpool', blocks=r'^layer(\\d+)' if coarse else r'^layer(\\d+)\\.(\\d+)')\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self, name_only: bool = False) -> Union[str, nn.Module]:\n        \"\"\"Get the classifier module.\n\n        Args:\n            name_only: Return classifier module name instead of module.\n\n        Returns:\n            Classifier module or name.\n        \"\"\"\n        return 'fc' if name_only else self.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg') -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.global_pool, self.fc = create_classifier(self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor.\n            indices: Take last n blocks if int, all if None, select matching indices if sequence.\n            norm: Apply norm layer to compatible intermediates.\n            stop_early: Stop iterating over blocks when last desired intermediate hit.\n            output_fmt: Shape of intermediate feature outputs.\n            intermediates_only: Only return intermediate features.\n\n        Returns:\n            Features and list of intermediate features or just intermediate features.\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(5, indices)\n\n        # forward pass\n        feat_idx = 0\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n        x = self.maxpool(x)\n\n        layer_names = ('layer1', 'layer2', 'layer3', 'layer4')\n        if stop_early:\n            layer_names = layer_names[:max_index]\n        for n in layer_names:\n            feat_idx += 1\n            x = getattr(self, n)(x)  # won't work with torchscript, but keeps code reasonable, FML\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layers.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(5, indices)\n        layer_names = ('layer1', 'layer2', 'layer3', 'layer4')\n        layer_names = layer_names[max_index:]\n        for n in layer_names:\n            setattr(self, n, nn.Identity())\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n        x = self.maxpool(x)\n\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq([self.layer1, self.layer2, self.layer3, self.layer4], x, flatten=True)\n        else:\n            x = self.layer1(x)\n            x = self.layer2(x)\n            x = self.layer3(x)\n            x = self.layer4(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier layer.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.global_pool(x)\n        if self.drop_rate:\n            x = F.dropout(x, p=float(self.drop_rate), training=self.training)\n        return x if pre_logits else self.fc(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_resnet(variant: str, pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Create a ResNet model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        ResNet model instance.\n    \"\"\"\n    return build_model_with_cfg(ResNet, variant, pretrained, **kwargs)\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a default configuration for ResNet models.\"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv1', 'classifier': 'fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndef _tcfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a configuration with bicubic interpolation.\"\"\"\n    return _cfg(url=url, **dict({'interpolation': 'bicubic'}, **kwargs))\n\n\ndef _ttcfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a configuration for models trained with timm.\"\"\"\n    return _cfg(url=url, **dict({\n        'interpolation': 'bicubic', 'test_input_size': (3, 288, 288), 'test_crop_pct': 0.95,\n        'origin_url': 'https://github.com/huggingface/pytorch-image-models',\n    }, **kwargs))\n\n\ndef _rcfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a configuration for ResNet-RS models.\"\"\"\n    return _cfg(url=url, **dict({\n        'interpolation': 'bicubic', 'crop_pct': 0.95, 'test_input_size': (3, 288, 288), 'test_crop_pct': 1.0,\n        'origin_url': 'https://github.com/huggingface/pytorch-image-models', 'paper_ids': 'arXiv:2110.00476'\n    }, **kwargs))\n\n\ndef _r3cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a configuration for ResNet-RS models with 160x160 input.\"\"\"\n    return _cfg(url=url, **dict({\n        'interpolation': 'bicubic', 'input_size': (3, 160, 160), 'pool_size': (5, 5),\n        'crop_pct': 0.95, 'test_input_size': (3, 224, 224), 'test_crop_pct': 0.95,\n        'origin_url': 'https://github.com/huggingface/pytorch-image-models', 'paper_ids': 'arXiv:2110.00476',\n    }, **kwargs))\n\n\ndef _gcfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a configuration for Gluon pretrained models.\"\"\"\n    return _cfg(url=url, **dict({\n        'interpolation': 'bicubic',\n        'origin_url': 'https://cv.gluon.ai/model_zoo/classification.html',\n    }, **kwargs))\n\n\ndefault_cfgs = generate_default_cfgs({\n    # ResNet and Wide ResNet trained w/ timm (RSB paper and others)\n    'resnet10t.c3_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet10t_176_c3-f3215ab1.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_crop_pct=0.95, test_input_size=(3, 224, 224),\n        first_conv='conv1.0'),\n    'resnet14t.c3_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet14t_176_c3-c4ed2c37.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_crop_pct=0.95, test_input_size=(3, 224, 224),\n        first_conv='conv1.0'),\n    'resnet18.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet18_a1_0-d63eafa0.pth'),\n    'resnet18.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet18_a2_0-b61bd467.pth'),\n    'resnet18.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet18_a3_0-40c531c8.pth'),\n    'resnet18d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet18d_ra2-48a79e06.pth',\n        first_conv='conv1.0'),\n    'resnet18d.ra4_e3600_r224_in1k': _rcfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9, first_conv='conv1.0'),\n    'resnet34.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet34_a1_0-46f8f793.pth'),\n    'resnet34.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet34_a2_0-82d47d71.pth'),\n    'resnet34.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet34_a3_0-a20cabb6.pth',\n        crop_pct=0.95),\n    'resnet34.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet34-43635321.pth'),\n    'resnet34.ra4_e3600_r224_in1k': _rcfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9),\n    'resnet34d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet34d_ra2-f8dcfcaf.pth',\n        first_conv='conv1.0'),\n    'resnet26.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet26-9aa10e23.pth'),\n    'resnet26d.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet26d-69e92c46.pth',\n        first_conv='conv1.0'),\n    'resnet26t.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/resnet26t_256_ra2-6f6fa748.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8),\n        crop_pct=0.94, test_input_size=(3, 320, 320), test_crop_pct=1.0),\n    'resnet50.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1_0-14fe96d1.pth'),\n    'resnet50.a1h_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h2_176-001a1197.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), crop_pct=0.9, test_input_size=(3, 224, 224), test_crop_pct=1.0),\n    'resnet50.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a2_0-a2746f79.pth'),\n    'resnet50.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a3_0-59cae1ef.pth'),\n    'resnet50.b1k_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_b1k-532a802a.pth'),\n    'resnet50.b2k_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_b2k-1ba180c1.pth'),\n    'resnet50.c1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_c1-5ba5e060.pth'),\n    'resnet50.c2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_c2-d01e05b2.pth'),\n    'resnet50.d_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_d-f39db8af.pth'),\n    'resnet50.ram_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/resnet50_ram-a26f946b.pth'),\n    'resnet50.am_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/resnet50_am-6c502b37.pth'),\n    'resnet50.ra_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/resnet50_ra-85ebb6e5.pth'),\n    'resnet50.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/rw_resnet50-86acaeed.pth'),\n    'resnet50d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet50d_ra2-464e36ba.pth',\n        first_conv='conv1.0'),\n    'resnet50d.ra4_e3600_r224_in1k': _rcfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0,\n        first_conv='conv1.0'),\n    'resnet50d.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50d_a1_0-e20cff14.pth',\n        first_conv='conv1.0'),\n    'resnet50d.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50d_a2_0-a3adc64d.pth',\n        first_conv='conv1.0'),\n    'resnet50d.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50d_a3_0-403fdfad.pth',\n        first_conv='conv1.0'),\n    'resnet50t.untrained': _ttcfg(first_conv='conv1.0'),\n    'resnet101.a1h_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet101_a1h-36d3f2aa.pth'),\n    'resnet101.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet101_a1_0-cdcb52a9.pth'),\n    'resnet101.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet101_a2_0-6edb36c7.pth'),\n    'resnet101.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet101_a3_0-1db14157.pth'),\n    'resnet101d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet101d_ra2-2803ffab.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95,\n        test_crop_pct=1.0, test_input_size=(3, 320, 320)),\n    'resnet152.a1h_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet152_a1h-dc400468.pth'),\n    'resnet152.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet152_a1_0-2eee8a7a.pth'),\n    'resnet152.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet152_a2_0-b4c6978f.pth'),\n    'resnet152.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet152_a3_0-134d4688.pth'),\n    'resnet152d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet152d_ra2-5cac0439.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95,\n        test_crop_pct=1.0, test_input_size=(3, 320, 320)),\n    'resnet200.untrained': _ttcfg(),\n    'resnet200d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnet200d_ra2-bdba9bf9.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95,\n        test_crop_pct=1.0, test_input_size=(3, 320, 320)),\n    'wide_resnet50_2.racm_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/wide_resnet50_racm-8234f177.pth'),\n\n    # torchvision resnet weights\n    'resnet18.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet18-f37072fd.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet34.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet34-b627a593.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet50.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet50-0676ba61.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet50.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet50-11ad3fa6.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet101.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet101-63fe2227.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet101.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet101-cd907fc2.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet152.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet152-394f9c45.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnet152.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnet152-f82ba261.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'wide_resnet50_2.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'wide_resnet50_2.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'wide_resnet101_2.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'wide_resnet101_2.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n\n    # ResNets w/ alternative norm layers\n    'resnet50_gn.a1h_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_gn_a1h2-8fe6c4d0.pth',\n        crop_pct=0.94),\n\n    # ResNeXt trained in timm (RSB paper and others)\n    'resnext50_32x4d.a1h_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnext50_32x4d_a1h-0146ab0a.pth'),\n    'resnext50_32x4d.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnext50_32x4d_a1_0-b5a91a1d.pth'),\n    'resnext50_32x4d.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnext50_32x4d_a2_0-efc76add.pth'),\n    'resnext50_32x4d.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/resnext50_32x4d_a3_0-3e450271.pth'),\n    'resnext50_32x4d.ra_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-weights/resnext50_32x4d_ra-d733960d.pth'),\n    'resnext50d_32x4d.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnext50d_32x4d-103e99f8.pth',\n        first_conv='conv1.0'),\n    'resnext101_32x4d.untrained': _ttcfg(),\n    'resnext101_64x4d.c1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/resnext101_64x4d_c-0d0e0cc0.pth'),\n\n    # torchvision ResNeXt weights\n    'resnext50_32x4d.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnext101_32x8d.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnext101_64x4d.tv_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnext101_64x4d-173b62eb.pth',\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnext50_32x4d.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n    'resnext101_32x8d.tv2_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth',\n        input_size=(3, 176, 176), pool_size=(6, 6), test_input_size=(3, 224, 224), test_crop_pct=0.965,\n        license='bsd-3-clause', origin_url='https://github.com/pytorch/vision'),\n\n    #  ResNeXt models - Weakly Supervised Pretraining on Instagram Hashtags\n    #  from https://github.com/facebookresearch/WSL-Images\n    #  Please note the CC-BY-NC 4.0 license on these weights, non-commercial use only.\n    'resnext101_32x8d.fb_wsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/WSL-Images'),\n    'resnext101_32x16d.fb_wsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/WSL-Images'),\n    'resnext101_32x32d.fb_wsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/WSL-Images'),\n    'resnext101_32x48d.fb_wsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/WSL-Images'),\n\n    #  Semi-Supervised ResNe*t models from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models\n    #  Please note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n    'resnet18.fb_ssl_yfcc100m_ft_in1k':  _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet18-d92f0530.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnet50.fb_ssl_yfcc100m_ft_in1k':  _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet50-08389792.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext50_32x4d.fb_ssl_yfcc100m_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext50_32x4-ddb3e555.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x4d.fb_ssl_yfcc100m_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x4-dc43570a.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x8d.fb_ssl_yfcc100m_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x8-2cfe2f8b.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x16d.fb_ssl_yfcc100m_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x16-15fffa57.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n\n    #  Semi-Weakly Supervised ResNe*t models from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models\n    #  Please note the CC-BY-NC 4.0 license on theses weights, non-commercial use only.\n    'resnet18.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet18-118f1556.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnet50.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet50-16a12f1b.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext50_32x4d.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext50_32x4-72679e44.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x4d.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x4-3f87e46b.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x8d.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x8-b4712904.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n    'resnext101_32x16d.fb_swsl_ig1b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x16-f3559a9c.pth',\n        license='cc-by-nc-4.0', origin_url='https://github.com/facebookresearch/semi-supervised-ImageNet1K-models'),\n\n    #  Efficient Channel Attention ResNets\n    'ecaresnet26t.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecaresnet26t_ra2-46609757.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8),\n        test_crop_pct=0.95, test_input_size=(3, 320, 320)),\n    'ecaresnetlight.miil_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/ecaresnetlight-75a9c627.pth',\n        test_crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'ecaresnet50d.miil_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/ecaresnet50d-93c81e3b.pth',\n        first_conv='conv1.0', test_crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'ecaresnet50d_pruned.miil_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/ecaresnet50d_p-e4fa23c2.pth',\n        first_conv='conv1.0', test_crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'ecaresnet50t.ra2_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecaresnet50t_ra2-f7ac63c4.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8),\n        test_crop_pct=0.95, test_input_size=(3, 320, 320)),\n    'ecaresnet50t.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/ecaresnet50t_a1_0-99bd76a8.pth',\n        first_conv='conv1.0'),\n    'ecaresnet50t.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/ecaresnet50t_a2_0-b1c7b745.pth',\n        first_conv='conv1.0'),\n    'ecaresnet50t.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/ecaresnet50t_a3_0-8cc311f1.pth',\n        first_conv='conv1.0'),\n    'ecaresnet101d.miil_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/ecaresnet101d-153dad65.pth',\n        first_conv='conv1.0', test_crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'ecaresnet101d_pruned.miil_in1k': _tcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/ecaresnet101d_p-9e74cb91.pth',\n        first_conv='conv1.0', test_crop_pct=0.95, test_input_size=(3, 288, 288)),\n    'ecaresnet200d.untrained': _ttcfg(\n        first_conv='conv1.0', input_size=(3, 256, 256), crop_pct=0.95, pool_size=(8, 8)),\n    'ecaresnet269d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/ecaresnet269d_320_ra2-7baa55cb.pth',\n        first_conv='conv1.0', input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=0.95,\n        test_crop_pct=1.0, test_input_size=(3, 352, 352)),\n\n    #  Efficient Channel Attention ResNeXts\n    'ecaresnext26t_32x4d.untrained': _tcfg(first_conv='conv1.0'),\n    'ecaresnext50t_32x4d.untrained': _tcfg(first_conv='conv1.0'),\n\n    #  Squeeze-Excitation ResNets, to eventually replace the models in senet.py\n    'seresnet18.untrained': _ttcfg(),\n    'seresnet34.untrained': _ttcfg(),\n    'seresnet50.a1_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/seresnet50_a1_0-ffa00869.pth',\n        crop_pct=0.95),\n    'seresnet50.a2_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/seresnet50_a2_0-850de0d9.pth',\n        crop_pct=0.95),\n    'seresnet50.a3_in1k': _r3cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-rsb-weights/seresnet50_a3_0-317ecd56.pth',\n        crop_pct=0.95),\n    'seresnet50.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet50_ra_224-8efdb4bb.pth'),\n    'seresnet50t.untrained': _ttcfg(\n        first_conv='conv1.0'),\n    'seresnet101.untrained': _ttcfg(),\n    'seresnet152.untrained': _ttcfg(),\n    'seresnet152d.ra2_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet152d_ra2-04464dd2.pth',\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.95,\n        test_crop_pct=1.0, test_input_size=(3, 320, 320)\n    ),\n    'seresnet200d.untrained': _ttcfg(\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8)),\n    'seresnet269d.untrained': _ttcfg(\n        first_conv='conv1.0', input_size=(3, 256, 256), pool_size=(8, 8)),\n\n    #  Squeeze-Excitation ResNeXts, to eventually replace the models in senet.py\n    'seresnext26d_32x4d.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26d_32x4d-80fa48a3.pth',\n        first_conv='conv1.0'),\n    'seresnext26t_32x4d.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26tn_32x4d-569cb627.pth',\n        first_conv='conv1.0'),\n    'seresnext50_32x4d.racm_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext50_32x4d_racm-a304a460.pth'),\n    'seresnext101_32x4d.untrained': _ttcfg(),\n    'seresnext101_32x8d.ah_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/seresnext101_32x8d_ah-e6bc4c0a.pth'),\n    'seresnext101d_32x8d.ah_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/seresnext101d_32x8d_ah-191d7b94.pth',\n        first_conv='conv1.0'),\n\n    # ResNets with anti-aliasing / blur pool\n    'resnetaa50d.sw_in12k_ft_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n    'resnetaa101d.sw_in12k_ft_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n    'seresnextaa101d_32x8d.sw_in12k_ft_in1k_288': _ttcfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), test_crop_pct=1.0,\n        first_conv='conv1.0'),\n    'seresnextaa101d_32x8d.sw_in12k_ft_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        first_conv='conv1.0', test_crop_pct=1.0),\n    'seresnextaa201d_32x8d.sw_in12k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', first_conv='conv1.0', pool_size=(12, 12), input_size=(3, 384, 384), crop_pct=1.0),\n    'seresnextaa201d_32x8d.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821, interpolation='bicubic', first_conv='conv1.0',\n        crop_pct=0.95, input_size=(3, 320, 320), pool_size=(10, 10), test_input_size=(3, 384, 384), test_crop_pct=1.0),\n\n    'resnetaa50d.sw_in12k': _ttcfg(\n        hf_hub_id='timm/',\n        num_classes=11821, first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n    'resnetaa50d.d_in12k': _ttcfg(\n        hf_hub_id='timm/',\n        num_classes=11821, first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n    'resnetaa101d.sw_in12k': _ttcfg(\n        hf_hub_id='timm/',\n        num_classes=11821, first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n    'seresnextaa101d_32x8d.sw_in12k': _ttcfg(\n        hf_hub_id='timm/',\n        num_classes=11821, first_conv='conv1.0', crop_pct=0.95, test_crop_pct=1.0),\n\n    'resnetblur18.untrained': _ttcfg(),\n    'resnetblur50.bt_in1k': _ttcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/resnetblur50-84f4748f.pth'),\n    'resnetblur50d.untrained': _ttcfg(first_conv='conv1.0'),\n    'resnetblur101d.untrained': _ttcfg(first_conv='conv1.0'),\n    'resnetaa34d.untrained': _ttcfg(first_conv='conv1.0'),\n    'resnetaa50.a1h_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnetaa50_a1h-4cf422b3.pth'),\n\n    'seresnetaa50d.untrained': _ttcfg(first_conv='conv1.0'),\n    'seresnextaa101d_32x8d.ah_in1k': _rcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/seresnextaa101d_32x8d_ah-83c8ae12.pth',\n        first_conv='conv1.0'),\n\n    # ResNet-RS models\n    'resnetrs50.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs50_ema-6b53758b.pth',\n        input_size=(3, 160, 160), pool_size=(5, 5), crop_pct=0.91, test_input_size=(3, 224, 224),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs101.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs101_i192_ema-1509bbf6.pth',\n        input_size=(3, 192, 192), pool_size=(6, 6), crop_pct=0.94, test_input_size=(3, 288, 288),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs152.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs152_i256_ema-a9aff7f9.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, test_input_size=(3, 320, 320),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs200.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/resnetrs200_c-6b698b88.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, test_input_size=(3, 320, 320),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs270.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs270_ema-b40e674c.pth',\n        input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, test_input_size=(3, 352, 352),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs350.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs350_i256_ema-5a1aa8f1.pth',\n        input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0, test_input_size=(3, 384, 384),\n        interpolation='bicubic', first_conv='conv1.0'),\n    'resnetrs420.tf_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rs-weights/resnetrs420_ema-972dee69.pth',\n        input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, test_input_size=(3, 416, 416),\n        interpolation='bicubic', first_conv='conv1.0'),\n\n    # gluon resnet weights\n    'resnet18.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet18_v1b-0757602b.pth'),\n    'resnet34.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet34_v1b-c6d82d59.pth'),\n    'resnet50.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1b-0ebe02e2.pth'),\n    'resnet101.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1b-3b017079.pth'),\n    'resnet152.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1b-c1edb0dd.pth'),\n    'resnet50c.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1c-48092f55.pth',\n        first_conv='conv1.0'),\n    'resnet101c.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1c-1f26822a.pth',\n        first_conv='conv1.0'),\n    'resnet152c.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1c-a3bb0b98.pth',\n        first_conv='conv1.0'),\n    'resnet50d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1d-818a1b1b.pth',\n        first_conv='conv1.0'),\n    'resnet101d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1d-0f9c8644.pth',\n        first_conv='conv1.0'),\n    'resnet152d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1d-bd354e12.pth',\n        first_conv='conv1.0'),\n    'resnet50s.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1s-1762acc0.pth',\n        first_conv='conv1.0'),\n    'resnet101s.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1s-60fe0cc1.pth',\n        first_conv='conv1.0'),\n    'resnet152s.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1s-dcc41b81.pth',\n        first_conv='conv1.0'),\n    'resnext50_32x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext50_32x4d-e6a097c1.pth'),\n    'resnext101_32x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_32x4d-b253c8c4.pth'),\n    'resnext101_64x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_64x4d-f9a8e184.pth'),\n    'seresnext50_32x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext50_32x4d-90cf2d6e.pth'),\n    'seresnext101_32x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_32x4d-cf52900d.pth'),\n    'seresnext101_64x4d.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_64x4d-f9926f93.pth'),\n    'senet154.gluon_in1k': _gcfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_senet154-70a1a3c0.pth',\n        first_conv='conv1.0'),\n\n    'test_resnet.r160_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.95,\n        input_size=(3, 160, 160), pool_size=(5, 5), first_conv='conv1.0'),\n})\n\n\n@register_model\ndef resnet10t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-10-T model.\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(1, 1, 1, 1), stem_width=32, stem_type='deep_tiered', avg_down=True)\n    return _create_resnet('resnet10t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet14t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-14-T model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(1, 1, 1, 1), stem_width=32, stem_type='deep_tiered', avg_down=True)\n    return _create_resnet('resnet14t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet18(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-18 model.\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(2, 2, 2, 2))\n    return _create_resnet('resnet18', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet18d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-18-D model.\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(2, 2, 2, 2), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet18d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet34(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-34 model.\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(3, 4, 6, 3))\n    return _create_resnet('resnet34', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet34d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-34-D model.\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet34d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet26(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-26 model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(2, 2, 2, 2))\n    return _create_resnet('resnet26', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet26t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-26-T model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(2, 2, 2, 2), stem_width=32, stem_type='deep_tiered', avg_down=True)\n    return _create_resnet('resnet26t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet26d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-26-D model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(2, 2, 2, 2), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet26d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50 model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3))\n    return _create_resnet('resnet50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50c(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-C model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep')\n    return _create_resnet('resnet50c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50s(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-S model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), stem_width=64, stem_type='deep')\n    return _create_resnet('resnet50s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-T model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep_tiered', avg_down=True)\n    return _create_resnet('resnet50t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet101(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101 model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3))\n    return _create_resnet('resnet101', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet101c(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-C model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), stem_width=32, stem_type='deep')\n    return _create_resnet('resnet101c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet101d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-D model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet101d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet101s(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-S model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), stem_width=64, stem_type='deep')\n    return _create_resnet('resnet101s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet152(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-152 model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 8, 36, 3))\n    return _create_resnet('resnet152', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet152c(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-152-C model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 8, 36, 3), stem_width=32, stem_type='deep')\n    return _create_resnet('resnet152c', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet152d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-152-D model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 8, 36, 3), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet152d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet152s(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-152-S model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 8, 36, 3), stem_width=64, stem_type='deep')\n    return _create_resnet('resnet152s', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet200(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-200 model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 24, 36, 3))\n    return _create_resnet('resnet200', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet200d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-200-D model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 24, 36, 3), stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnet200d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef wide_resnet50_2(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Wide ResNet-50-2 model.\n    The model is the same as ResNet except for the bottleneck number of channels\n    which is twice larger in every block. The number of channels in outer 1x1\n    convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048\n    channels, and in Wide ResNet-50-2 has 2048-1024-2048.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), base_width=128)\n    return _create_resnet('wide_resnet50_2', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef wide_resnet101_2(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Wide ResNet-101-2 model.\n    The model is the same as ResNet except for the bottleneck number of channels\n    which is twice larger in every block. The number of channels in outer 1x1\n    convolutions is the same.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), base_width=128)\n    return _create_resnet('wide_resnet101_2', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnet50_gn(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50 model w/ GroupNorm\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), norm_layer='groupnorm')\n    return _create_resnet('resnet50_gn', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext50_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt50-32x4d model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), cardinality=32, base_width=4)\n    return _create_resnet('resnext50_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext50d_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt50d-32x4d model. ResNext50 w/ deep stem & avg pool downsample\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3),  cardinality=32, base_width=4,\n        stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnext50d_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext101_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt-101 32x4d model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=4)\n    return _create_resnet('resnext101_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext101_32x8d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt-101 32x8d model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=8)\n    return _create_resnet('resnext101_32x8d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext101_32x16d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt-101 32x16d model\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=16)\n    return _create_resnet('resnext101_32x16d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext101_32x32d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt-101 32x32d model\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=32)\n    return _create_resnet('resnext101_32x32d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnext101_64x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNeXt101-64x4d model.\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), cardinality=64, base_width=4)\n    return _create_resnet('resnext101_64x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet26t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs an ECA-ResNeXt-26-T model.\n    This is technically a 28 layer ResNet, like a 'D' bag-of-tricks model but with tiered 24, 32, 64 channels\n    in the deep stem and ECA attn.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(2, 2, 2, 2), stem_width=32,\n        stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet26t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet50d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D model with eca.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet50d_pruned(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D model pruned with eca.\n        The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet50t(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs an ECA-ResNet-50-T model.\n    Like a 'D' bag-of-tricks model but with tiered 24, 32, 64 channels in the deep stem and ECA attn.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32,\n        stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet50t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnetlight(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D light model with eca.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(1, 1, 11, 3), stem_width=32, avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnetlight', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet101d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-D model with eca.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet101d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet101d_pruned(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-D model pruned with eca.\n       The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet200d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-200-D model with ECA.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 24, 36, 3), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet200d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnet269d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-269-D model with ECA.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 30, 48, 8), stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnet269d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnext26t_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs an ECA-ResNeXt-26-T model.\n    This is technically a 28 layer ResNet, like a 'D' bag-of-tricks model but with tiered 24, 32, 64 channels\n    in the deep stem. This model replaces SE module with the ECA module\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(2, 2, 2, 2), cardinality=32, base_width=4, stem_width=32,\n        stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnext26t_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef ecaresnext50t_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs an ECA-ResNeXt-50-T model.\n    This is technically a 28 layer ResNet, like a 'D' bag-of-tricks model but with tiered 24, 32, 64 channels\n    in the deep stem. This model replaces SE module with the ECA module\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(2, 2, 2, 2), cardinality=32, base_width=4, stem_width=32,\n        stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))\n    return _create_resnet('ecaresnext50t_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet18(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(block=BasicBlock, layers=(2, 2, 2, 2), block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet18', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet34(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(block=BasicBlock, layers=(3, 4, 6, 3), block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet34', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet50(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet50t(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3),  stem_width=32, stem_type='deep_tiered',\n        avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet50t', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet101(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(block=Bottleneck, layers=(3, 4, 23, 3), block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet101', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet152(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(block=Bottleneck, layers=(3, 8, 36, 3), block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet152', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet152d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 8, 36, 3), stem_width=32, stem_type='deep',\n        avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet152d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet200d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-200-D model with SE attn.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 24, 36, 3), stem_width=32, stem_type='deep',\n        avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet200d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnet269d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-269-D model with SE attn.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 30, 48, 8), stem_width=32, stem_type='deep',\n        avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnet269d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext26d_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a SE-ResNeXt-26-D model.`\n    This is technically a 28 layer ResNet, using the 'D' modifier from Gluon / bag-of-tricks for\n    combination of deep stem and avg_pool in downsample.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(2, 2, 2, 2), cardinality=32, base_width=4, stem_width=32,\n        stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext26d_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext26t_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a SE-ResNet-26-T model.\n    This is technically a 28 layer ResNet, like a 'D' bag-of-tricks model but with tiered 24, 32, 64 channels\n    in the deep stem.\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(2, 2, 2, 2), cardinality=32, base_width=4, stem_width=32,\n        stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext26t_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext50_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), cardinality=32, base_width=4,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext50_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext101_32x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=4,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext101_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext101_32x8d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=8,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext101_32x8d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext101d_32x8d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=8,\n        stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext101d_32x8d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnext101_64x4d(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), cardinality=64, base_width=4,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnext101_64x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef senet154(pretrained: bool = False, **kwargs) -> ResNet:\n    model_args = dict(\n        block=Bottleneck, layers=(3, 8, 36, 3), cardinality=64, base_width=4, stem_type='deep',\n        down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se'))\n    return _create_resnet('senet154', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetblur18(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-18 model with blur anti-aliasing\n    \"\"\"\n    model_args = dict(block=BasicBlock, layers=(2, 2, 2, 2), aa_layer=BlurPool2d)\n    return _create_resnet('resnetblur18', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetblur50(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50 model with blur anti-aliasing\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), aa_layer=BlurPool2d)\n    return _create_resnet('resnetblur50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetblur50d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D model with blur anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), aa_layer=BlurPool2d,\n        stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnetblur50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetblur101d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-D model with blur anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), aa_layer=BlurPool2d,\n        stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnetblur101d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetaa34d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-34-D model w/ avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=BasicBlock, layers=(3, 4, 6, 3),  aa_layer=nn.AvgPool2d, stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnetaa34d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetaa50(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50 model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(block=Bottleneck, layers=(3, 4, 6, 3), aa_layer=nn.AvgPool2d)\n    return _create_resnet('resnetaa50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetaa50d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-50-D model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), aa_layer=nn.AvgPool2d,\n        stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnetaa50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetaa101d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-101-D model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), aa_layer=nn.AvgPool2d,\n        stem_width=32, stem_type='deep', avg_down=True)\n    return _create_resnet('resnetaa101d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnetaa50d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a SE=ResNet-50-D model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), aa_layer=nn.AvgPool2d,\n        stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnetaa50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnextaa101d_32x8d(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a SE=ResNeXt-101-D 32x8d model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), cardinality=32, base_width=8,\n        stem_width=32, stem_type='deep', avg_down=True, aa_layer=nn.AvgPool2d,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnextaa101d_32x8d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef seresnextaa201d_32x8d(pretrained: bool = False, **kwargs):\n    \"\"\"Constructs a SE=ResNeXt-101-D 32x8d model with avgpool anti-aliasing\n    \"\"\"\n    model_args = dict(\n        block=Bottleneck, layers=(3, 24, 36, 4), cardinality=32, base_width=8,\n        stem_width=64, stem_type='deep', avg_down=True, aa_layer=nn.AvgPool2d,\n        block_args=dict(attn_layer='se'))\n    return _create_resnet('seresnextaa201d_32x8d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs50(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-50 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 6, 3), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs101(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-101 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(3, 4, 23, 3), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs101', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs152(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-152 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(3, 8, 36, 3), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs152', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs200(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-200 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(3, 24, 36, 3), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs200', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs270(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-270 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(4, 29, 53, 4), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs270', pretrained, **dict(model_args, **kwargs))\n\n\n\n@register_model\ndef resnetrs350(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-350 model.\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(4, 36, 72, 4), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs350', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetrs420(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a ResNet-RS-420 model\n    Paper: Revisiting ResNets - https://arxiv.org/abs/2103.07579\n    Pretrained weights from https://github.com/tensorflow/tpu/tree/bee9c4f6/models/official/resnet/resnet_rs\n    \"\"\"\n    attn_layer = partial(get_attn('se'), rd_ratio=0.25)\n    model_args = dict(\n        block=Bottleneck, layers=(4, 44, 87, 4), stem_width=32, stem_type='deep', replace_stem_pool=True,\n        avg_down=True,  block_args=dict(attn_layer=attn_layer))\n    return _create_resnet('resnetrs420', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef test_resnet(pretrained: bool = False, **kwargs) -> ResNet:\n    \"\"\"Constructs a tiny ResNet test model.\n    \"\"\"\n    model_args = dict(\n        block=[BasicBlock, BasicBlock, Bottleneck, BasicBlock], layers=(1, 1, 1, 1),\n        stem_width=16, stem_type='deep', avg_down=True, channels=(32, 48, 48, 96))\n    return _create_resnet('test_resnet', pretrained, **dict(model_args, **kwargs))\n\n\nregister_model_deprecations(__name__, {\n    'tv_resnet34': 'resnet34.tv_in1k',\n    'tv_resnet50': 'resnet50.tv_in1k',\n    'tv_resnet101': 'resnet101.tv_in1k',\n    'tv_resnet152': 'resnet152.tv_in1k',\n    'tv_resnext50_32x4d' : 'resnext50_32x4d.tv_in1k',\n    'ig_resnext101_32x8d': 'resnext101_32x8d.fb_wsl_ig1b_ft_in1k',\n    'ig_resnext101_32x16d': 'resnext101_32x8d.fb_wsl_ig1b_ft_in1k',\n    'ig_resnext101_32x32d': 'resnext101_32x8d.fb_wsl_ig1b_ft_in1k',\n    'ig_resnext101_32x48d': 'resnext101_32x8d.fb_wsl_ig1b_ft_in1k',\n    'ssl_resnet18': 'resnet18.fb_ssl_yfcc100m_ft_in1k',\n    'ssl_resnet50': 'resnet50.fb_ssl_yfcc100m_ft_in1k',\n    'ssl_resnext50_32x4d': 'resnext50_32x4d.fb_ssl_yfcc100m_ft_in1k',\n    'ssl_resnext101_32x4d': 'resnext101_32x4d.fb_ssl_yfcc100m_ft_in1k',\n    'ssl_resnext101_32x8d': 'resnext101_32x8d.fb_ssl_yfcc100m_ft_in1k',\n    'ssl_resnext101_32x16d': 'resnext101_32x16d.fb_ssl_yfcc100m_ft_in1k',\n    'swsl_resnet18': 'resnet18.fb_swsl_ig1b_ft_in1k',\n    'swsl_resnet50': 'resnet50.fb_swsl_ig1b_ft_in1k',\n    'swsl_resnext50_32x4d': 'resnext50_32x4d.fb_swsl_ig1b_ft_in1k',\n    'swsl_resnext101_32x4d': 'resnext101_32x4d.fb_swsl_ig1b_ft_in1k',\n    'swsl_resnext101_32x8d': 'resnext101_32x8d.fb_swsl_ig1b_ft_in1k',\n    'swsl_resnext101_32x16d': 'resnext101_32x16d.fb_swsl_ig1b_ft_in1k',\n    'gluon_resnet18_v1b': 'resnet18.gluon_in1k',\n    'gluon_resnet34_v1b': 'resnet34.gluon_in1k',\n    'gluon_resnet50_v1b': 'resnet50.gluon_in1k',\n    'gluon_resnet101_v1b': 'resnet101.gluon_in1k',\n    'gluon_resnet152_v1b': 'resnet152.gluon_in1k',\n    'gluon_resnet50_v1c': 'resnet50c.gluon_in1k',\n    'gluon_resnet101_v1c': 'resnet101c.gluon_in1k',\n    'gluon_resnet152_v1c': 'resnet152c.gluon_in1k',\n    'gluon_resnet50_v1d': 'resnet50d.gluon_in1k',\n    'gluon_resnet101_v1d': 'resnet101d.gluon_in1k',\n    'gluon_resnet152_v1d': 'resnet152d.gluon_in1k',\n    'gluon_resnet50_v1s': 'resnet50s.gluon_in1k',\n    'gluon_resnet101_v1s': 'resnet101s.gluon_in1k',\n    'gluon_resnet152_v1s': 'resnet152s.gluon_in1k',\n    'gluon_resnext50_32x4d': 'resnext50_32x4d.gluon_in1k',\n    'gluon_resnext101_32x4d': 'resnext101_32x4d.gluon_in1k',\n    'gluon_resnext101_64x4d': 'resnext101_64x4d.gluon_in1k',\n    'gluon_seresnext50_32x4d': 'seresnext50_32x4d.gluon_in1k',\n    'gluon_seresnext101_32x4d': 'seresnext101_32x4d.gluon_in1k',\n    'gluon_seresnext101_64x4d': 'seresnext101_64x4d.gluon_in1k',\n    'gluon_senet154': 'senet154.gluon_in1k',\n    'seresnext26tn_32x4d': 'seresnext26t_32x4d',\n})\n"
  },
  {
    "path": "timm/models/resnetv2.py",
    "content": "\"\"\"Pre-Activation ResNet v2 with GroupNorm and Weight Standardization.\n\nA PyTorch implementation of ResNetV2 adapted from the Google Big-Transfer (BiT) source code\nat https://github.com/google-research/big_transfer to match timm interfaces. The BiT weights have\nbeen included here as pretrained models from their original .NPZ checkpoints.\n\nAdditionally, supports non pre-activation bottleneck for use as a backbone for Vision Transformers (ViT) and\nextra padding support to allow porting of official Hybrid ResNet pretrained weights from\nhttps://github.com/google-research/vision_transformer\n\nThanks to the Google team for the above two repositories and associated papers:\n* Big Transfer (BiT): General Visual Representation Learning - https://arxiv.org/abs/1912.11370\n* An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale - https://arxiv.org/abs/2010.11929\n* Knowledge distillation: A good teacher is patient and consistent - https://arxiv.org/abs/2106.05237\n\nOriginal copyright of Google code below, modifications by Ross Wightman, Copyright 2020.\n\"\"\"\n# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom collections import OrderedDict  # pylint: disable=g-importing-member\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import GroupNormAct, BatchNormAct2d, EvoNorm2dS0, FilterResponseNormTlu2d, ClassifierHead, \\\n    DropPath, calculate_drop_path_rates, AvgPool2dSame, create_pool2d, StdConv2d, create_conv2d, get_act_layer, get_norm_act_layer, make_divisible\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq, named_apply, adapt_input_conv\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['ResNetV2']  # model_registry will add each entrypoint fn to this\n\n\nclass PreActBasic(nn.Module):\n    \"\"\"Pre-activation basic block (not in typical 'v2' implementations).\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            bottle_ratio: float = 1.0,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            groups: int = 1,\n            act_layer: Optional[Callable] = None,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            proj_layer: Optional[Callable] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize PreActBasic block.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            bottle_ratio: Bottleneck ratio (not used in basic block).\n            stride: Stride for convolution.\n            dilation: Dilation rate.\n            first_dilation: First dilation rate.\n            groups: Group convolution size.\n            act_layer: Activation layer type.\n            conv_layer: Convolution layer type.\n            norm_layer: Normalization layer type.\n            proj_layer: Projection/downsampling layer type.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        conv_layer = conv_layer or StdConv2d\n        norm_layer = norm_layer or partial(GroupNormAct, num_groups=32)\n        out_chs = out_chs or in_chs\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n\n        if proj_layer is not None and (stride != 1 or first_dilation != dilation or in_chs != out_chs):\n            self.downsample = proj_layer(\n                in_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                first_dilation=first_dilation,\n                preact=True,\n                conv_layer=conv_layer,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            self.downsample = None\n\n        self.norm1 = norm_layer(in_chs, **dd)\n        self.conv1 = conv_layer(in_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups, **dd)\n        self.norm2 = norm_layer(mid_chs, **dd)\n        self.conv2 = conv_layer(mid_chs, out_chs, 3, dilation=dilation, groups=groups, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n\n    def zero_init_last(self) -> None:\n        \"\"\"Zero-initialize the last convolution weight (not applicable to basic block).\"\"\"\n        nn.init.zeros_(self.conv2.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x_preact = self.norm1(x)\n\n        # shortcut branch\n        shortcut = x\n        if self.downsample is not None:\n            shortcut = self.downsample(x_preact)\n\n        # residual branch\n        x = self.conv1(x_preact)\n        x = self.conv2(self.norm2(x))\n        x = self.drop_path(x)\n        return x + shortcut\n\n\nclass PreActBottleneck(nn.Module):\n    \"\"\"Pre-activation (v2) bottleneck block.\n\n    Follows the implementation of \"Identity Mappings in Deep Residual Networks\":\n    https://github.com/KaimingHe/resnet-1k-layers/blob/master/resnet-pre-act.lua\n\n    Except it puts the stride on 3x3 conv when available.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            bottle_ratio: float = 0.25,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            groups: int = 1,\n            act_layer: Optional[Callable] = None,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            proj_layer: Optional[Callable] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize PreActBottleneck block.\n\n        Args:\n            in_chs: Input channels.\n            out_chs: Output channels.\n            bottle_ratio: Bottleneck ratio.\n            stride: Stride for convolution.\n            dilation: Dilation rate.\n            first_dilation: First dilation rate.\n            groups: Group convolution size.\n            act_layer: Activation layer type.\n            conv_layer: Convolution layer type.\n            norm_layer: Normalization layer type.\n            proj_layer: Projection/downsampling layer type.\n            drop_path_rate: Stochastic depth drop rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        conv_layer = conv_layer or StdConv2d\n        norm_layer = norm_layer or partial(GroupNormAct, num_groups=32)\n        out_chs = out_chs or in_chs\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n\n        if proj_layer is not None:\n            self.downsample = proj_layer(\n                in_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                first_dilation=first_dilation,\n                preact=True,\n                conv_layer=conv_layer,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            self.downsample = None\n\n        self.norm1 = norm_layer(in_chs, **dd)\n        self.conv1 = conv_layer(in_chs, mid_chs, 1, **dd)\n        self.norm2 = norm_layer(mid_chs, **dd)\n        self.conv2 = conv_layer(mid_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups, **dd)\n        self.norm3 = norm_layer(mid_chs, **dd)\n        self.conv3 = conv_layer(mid_chs, out_chs, 1, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n\n    def zero_init_last(self) -> None:\n        \"\"\"Zero-initialize the last convolution weight.\"\"\"\n        nn.init.zeros_(self.conv3.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x_preact = self.norm1(x)\n\n        # shortcut branch\n        shortcut = x\n        if self.downsample is not None:\n            shortcut = self.downsample(x_preact)\n\n        # residual branch\n        x = self.conv1(x_preact)\n        x = self.conv2(self.norm2(x))\n        x = self.conv3(self.norm3(x))\n        x = self.drop_path(x)\n        return x + shortcut\n\n\nclass Bottleneck(nn.Module):\n    \"\"\"Non Pre-activation bottleneck block, equiv to V1.5/V1b Bottleneck. Used for ViT.\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: Optional[int] = None,\n            bottle_ratio: float = 0.25,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            groups: int = 1,\n            act_layer: Optional[Callable] = None,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            proj_layer: Optional[Callable] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        first_dilation = first_dilation or dilation\n        act_layer = act_layer or nn.ReLU\n        conv_layer = conv_layer or StdConv2d\n        norm_layer = norm_layer or partial(GroupNormAct, num_groups=32)\n        out_chs = out_chs or in_chs\n        mid_chs = make_divisible(out_chs * bottle_ratio)\n\n        if proj_layer is not None:\n            self.downsample = proj_layer(\n                in_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                preact=False,\n                conv_layer=conv_layer,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            self.downsample = None\n\n        self.conv1 = conv_layer(in_chs, mid_chs, 1, **dd)\n        self.norm1 = norm_layer(mid_chs, **dd)\n        self.conv2 = conv_layer(mid_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups, **dd)\n        self.norm2 = norm_layer(mid_chs, **dd)\n        self.conv3 = conv_layer(mid_chs, out_chs, 1, **dd)\n        self.norm3 = norm_layer(out_chs, apply_act=False, **dd)\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n        self.act3 = act_layer(inplace=True)\n\n    def zero_init_last(self) -> None:\n        \"\"\"Zero-initialize the last batch norm weight.\"\"\"\n        if getattr(self.norm3, 'weight', None) is not None:\n            nn.init.zeros_(self.norm3.weight)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        # shortcut branch\n        shortcut = x\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n\n        # residual\n        x = self.conv1(x)\n        x = self.norm1(x)\n        x = self.conv2(x)\n        x = self.norm2(x)\n        x = self.conv3(x)\n        x = self.norm3(x)\n        x = self.drop_path(x)\n        x = self.act3(x + shortcut)\n        return x\n\n\nclass DownsampleConv(nn.Module):\n    \"\"\"1x1 convolution downsampling module.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            preact: bool = True,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = conv_layer(in_chs, out_chs, 1, stride=stride, **dd)\n        self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Downsampled tensor.\n        \"\"\"\n        return self.norm(self.conv(x))\n\n\nclass DownsampleAvg(nn.Module):\n    \"\"\"AvgPool downsampling as in 'D' ResNet variants.\"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            preact: bool = True,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        avg_stride = stride if dilation == 1 else 1\n        if stride > 1 or dilation > 1:\n            avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d\n            self.pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False)\n        else:\n            self.pool = nn.Identity()\n        self.conv = conv_layer(in_chs, out_chs, 1, stride=1, **dd)\n        self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Downsampled tensor.\n        \"\"\"\n        return self.norm(self.conv(self.pool(x)))\n\n\nclass ResNetStage(nn.Module):\n    \"\"\"ResNet Stage.\"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: int,\n            depth: int,\n            bottle_ratio: float = 0.25,\n            groups: int = 1,\n            avg_down: bool = False,\n            block_dpr: Optional[List[float]] = None,\n            block_fn: Callable = PreActBottleneck,\n            act_layer: Optional[Callable] = None,\n            conv_layer: Optional[Callable] = None,\n            norm_layer: Optional[Callable] = None,\n            **block_kwargs: Any,\n    ):\n        super().__init__()\n        self.grad_checkpointing = False\n\n        first_dilation = 1 if dilation in (1, 2) else 2\n        layer_kwargs = dict(act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer)\n        proj_layer = DownsampleAvg if avg_down else DownsampleConv\n        prev_chs = in_chs\n        self.blocks = nn.Sequential()\n        for block_idx in range(depth):\n            drop_path_rate = block_dpr[block_idx] if block_dpr else 0.\n            stride = stride if block_idx == 0 else 1\n            self.blocks.add_module(str(block_idx), block_fn(\n                prev_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                bottle_ratio=bottle_ratio,\n                groups=groups,\n                first_dilation=first_dilation,\n                proj_layer=proj_layer,\n                drop_path_rate=drop_path_rate,\n                **layer_kwargs,\n                **block_kwargs,\n            ))\n            prev_chs = out_chs\n            first_dilation = dilation\n            proj_layer = None\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through all blocks in the stage.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\ndef is_stem_deep(stem_type: str) -> bool:\n    \"\"\"Check if stem type is deep (has multiple convolutions).\n\n    Args:\n        stem_type: Type of stem to check.\n\n    Returns:\n        True if stem is deep, False otherwise.\n    \"\"\"\n    return any([s in stem_type for s in ('deep', 'tiered')])\n\n\ndef create_resnetv2_stem(\n        in_chs: int,\n        out_chs: int = 64,\n        stem_type: str = '',\n        preact: bool = True,\n        conv_layer: Callable = StdConv2d,\n        norm_layer: Callable = partial(GroupNormAct, num_groups=32),\n        device=None,\n        dtype=None,\n) -> nn.Sequential:\n    dd = {'device': device, 'dtype': dtype}\n    stem = OrderedDict()\n    assert stem_type in ('', 'fixed', 'same', 'deep', 'deep_fixed', 'deep_same', 'tiered')\n\n    # NOTE conv padding mode can be changed by overriding the conv_layer def\n    if is_stem_deep(stem_type):\n        # A 3 deep 3x3  conv stack as in ResNet V1D models\n        if 'tiered' in stem_type:\n            stem_chs = (3 * out_chs // 8, out_chs // 2)  # 'T' resnets in resnet.py\n        else:\n            stem_chs = (out_chs // 2, out_chs // 2)  # 'D' ResNets\n        stem['conv1'] = conv_layer(in_chs, stem_chs[0], kernel_size=3, stride=2, **dd)\n        stem['norm1'] = norm_layer(stem_chs[0], **dd)\n        stem['conv2'] = conv_layer(stem_chs[0], stem_chs[1], kernel_size=3, stride=1, **dd)\n        stem['norm2'] = norm_layer(stem_chs[1], **dd)\n        stem['conv3'] = conv_layer(stem_chs[1], out_chs, kernel_size=3, stride=1, **dd)\n        if not preact:\n            stem['norm3'] = norm_layer(out_chs, **dd)\n    else:\n        # The usual 7x7 stem conv\n        stem['conv'] = conv_layer(in_chs, out_chs, kernel_size=7, stride=2, **dd)\n        if not preact:\n            stem['norm'] = norm_layer(out_chs, **dd)\n\n    if 'fixed' in stem_type:\n        # 'fixed' SAME padding approximation that is used in BiT models\n        stem['pad'] = nn.ConstantPad2d(1, 0.)\n        stem['pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=0)\n    elif 'same' in stem_type:\n        # full, input size based 'SAME' padding, used in ViT Hybrid model\n        stem['pool'] = create_pool2d('max', kernel_size=3, stride=2, padding='same')\n    else:\n        # the usual PyTorch symmetric padding\n        stem['pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n\n    return nn.Sequential(stem)\n\n\nclass ResNetV2(nn.Module):\n    \"\"\"Implementation of Pre-activation (v2) ResNet mode.\n    \"\"\"\n\n    def __init__(\n            self,\n            layers: List[int],\n            channels: Tuple[int, ...] = (256, 512, 1024, 2048),\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            width_factor: int = 1,\n            stem_chs: int = 64,\n            stem_type: str = '',\n            avg_down: bool = False,\n            preact: bool = True,\n            basic: bool = False,\n            bottle_ratio: float = 0.25,\n            act_layer: Callable = nn.ReLU,\n            norm_layer: Callable = partial(GroupNormAct, num_groups=32),\n            conv_layer: Callable = StdConv2d,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            zero_init_last: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            layers (List[int]) : number of layers in each block\n            channels (List[int]) : number of channels in each block:\n            num_classes (int): number of classification classes (default 1000)\n            in_chans (int): number of input (color) channels. (default 3)\n            global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg')\n            output_stride (int): output stride of the network, 32, 16, or 8. (default 32)\n            width_factor (int): channel (width) multiplication factor\n            stem_chs (int): stem width (default: 64)\n            stem_type (str): stem type (default: '' == 7x7)\n            avg_down (bool): average pooling in residual downsampling (default: False)\n            preact (bool): pre-activation (default: True)\n            act_layer (Union[str, nn.Module]): activation layer\n            norm_layer (Union[str, nn.Module]): normalization layer\n            conv_layer (nn.Module): convolution module\n            drop_rate: classifier dropout rate (default: 0.)\n            drop_path_rate: stochastic depth rate (default: 0.)\n            zero_init_last: zero-init last weight in residual path (default: False)\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        wf = width_factor\n        norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer)\n        act_layer = get_act_layer(act_layer)\n\n        self.feature_info = []\n        stem_chs = make_divisible(stem_chs * wf)\n        self.stem = create_resnetv2_stem(\n            in_chans,\n            stem_chs,\n            stem_type,\n            preact,\n            conv_layer=conv_layer,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        stem_feat = ('stem.conv3' if is_stem_deep(stem_type) else 'stem.conv') if preact else 'stem.norm'\n        self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module=stem_feat))\n\n        prev_chs = stem_chs\n        curr_stride = 4\n        dilation = 1\n        block_dprs = calculate_drop_path_rates(drop_path_rate, layers, stagewise=True)\n        if preact:\n            block_fn = PreActBasic if basic else PreActBottleneck\n        else:\n            assert not basic\n            block_fn = Bottleneck\n        self.stages = nn.Sequential()\n        for stage_idx, (d, c, bdpr) in enumerate(zip(layers, channels, block_dprs)):\n            out_chs = make_divisible(c * wf)\n            stride = 1 if stage_idx == 0 else 2\n            if curr_stride >= output_stride:\n                dilation *= stride\n                stride = 1\n            stage = ResNetStage(\n                prev_chs,\n                out_chs,\n                stride=stride,\n                dilation=dilation,\n                depth=d,\n                bottle_ratio=bottle_ratio,\n                avg_down=avg_down,\n                act_layer=act_layer,\n                conv_layer=conv_layer,\n                norm_layer=norm_layer,\n                block_dpr=bdpr,\n                block_fn=block_fn,\n                **dd,\n            )\n            prev_chs = out_chs\n            curr_stride *= stride\n            self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}')]\n            self.stages.add_module(str(stage_idx), stage)\n\n        self.num_features = self.head_hidden_size = prev_chs\n        self.norm = norm_layer(self.num_features, **dd) if preact else nn.Identity()\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=self.drop_rate,\n            use_conv=True,\n            **dd,\n        )\n\n        self.init_weights(zero_init_last=zero_init_last)\n\n    @torch.jit.ignore\n    def init_weights(self, zero_init_last: bool = True) -> None:\n        \"\"\"Initialize model weights.\"\"\"\n        named_apply(partial(_init_weights, zero_init_last=zero_init_last), self)\n\n    @torch.jit.ignore()\n    def load_pretrained(self, checkpoint_path: str, prefix: str = 'resnet/') -> None:\n        \"\"\"Load pretrained weights.\"\"\"\n        _load_weights(self, checkpoint_path, prefix)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        matcher = dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(5, indices)\n\n        # forward pass\n        feat_idx = 0\n        H, W = x.shape[-2:]\n        for stem in self.stem:\n            x = stem(x)\n            if x.shape[-2:] == (H //2, W //2):\n                x_down = x\n        if feat_idx in take_indices:\n            intermediates.append(x_down)\n        last_idx = len(self.stages)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index]\n\n        for feat_idx, stage in enumerate(stages, start=1):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if feat_idx == last_idx:\n                    x_inter = self.norm(x) if norm else x\n                    intermediates.append(x_inter)\n                else:\n                    intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(5, indices)\n        self.stages = self.stages[:max_index]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _init_weights(module: nn.Module, name: str = '', zero_init_last: bool = True) -> None:\n    \"\"\"Initialize module weights.\n\n    Args:\n        module: PyTorch module to initialize.\n        name: Module name.\n        zero_init_last: Zero-initialize last layer weights.\n    \"\"\"\n    if isinstance(module, nn.Linear) or ('head.fc' in name and isinstance(module, nn.Conv2d)):\n        nn.init.normal_(module.weight, mean=0.0, std=0.01)\n        nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        nn.init.kaiming_normal_(module.weight, mode='fan_out', nonlinearity='relu')\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, (nn.BatchNorm2d, nn.LayerNorm, nn.GroupNorm)):\n        nn.init.ones_(module.weight)\n        nn.init.zeros_(module.bias)\n    elif zero_init_last and hasattr(module, 'zero_init_last'):\n        module.zero_init_last()\n\n\n@torch.no_grad()\ndef _load_weights(model: nn.Module, checkpoint_path: str, prefix: str = 'resnet/'):\n    import numpy as np\n\n    def t2p(conv_weights):\n        \"\"\"Possibly convert HWIO to OIHW.\"\"\"\n        if conv_weights.ndim == 4:\n            conv_weights = conv_weights.transpose([3, 2, 0, 1])\n        return torch.from_numpy(conv_weights)\n\n    weights = np.load(checkpoint_path)\n    stem_conv_w = adapt_input_conv(\n        model.stem.conv.weight.shape[1], t2p(weights[f'{prefix}root_block/standardized_conv2d/kernel']))\n    model.stem.conv.weight.copy_(stem_conv_w)\n    model.norm.weight.copy_(t2p(weights[f'{prefix}group_norm/gamma']))\n    model.norm.bias.copy_(t2p(weights[f'{prefix}group_norm/beta']))\n    if isinstance(getattr(model.head, 'fc', None), nn.Conv2d) and \\\n            model.head.fc.weight.shape[0] == weights[f'{prefix}head/conv2d/kernel'].shape[-1]:\n        model.head.fc.weight.copy_(t2p(weights[f'{prefix}head/conv2d/kernel']))\n        model.head.fc.bias.copy_(t2p(weights[f'{prefix}head/conv2d/bias']))\n    for i, (sname, stage) in enumerate(model.stages.named_children()):\n        for j, (bname, block) in enumerate(stage.blocks.named_children()):\n            cname = 'standardized_conv2d'\n            block_prefix = f'{prefix}block{i + 1}/unit{j + 1:02d}/'\n            block.conv1.weight.copy_(t2p(weights[f'{block_prefix}a/{cname}/kernel']))\n            block.conv2.weight.copy_(t2p(weights[f'{block_prefix}b/{cname}/kernel']))\n            block.conv3.weight.copy_(t2p(weights[f'{block_prefix}c/{cname}/kernel']))\n            block.norm1.weight.copy_(t2p(weights[f'{block_prefix}a/group_norm/gamma']))\n            block.norm2.weight.copy_(t2p(weights[f'{block_prefix}b/group_norm/gamma']))\n            block.norm3.weight.copy_(t2p(weights[f'{block_prefix}c/group_norm/gamma']))\n            block.norm1.bias.copy_(t2p(weights[f'{block_prefix}a/group_norm/beta']))\n            block.norm2.bias.copy_(t2p(weights[f'{block_prefix}b/group_norm/beta']))\n            block.norm3.bias.copy_(t2p(weights[f'{block_prefix}c/group_norm/beta']))\n            if block.downsample is not None:\n                w = weights[f'{block_prefix}a/proj/{cname}/kernel']\n                block.downsample.conv.weight.copy_(t2p(w))\n\n\ndef _create_resnetv2(variant: str, pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"Create a ResNetV2 model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        ResNetV2 model instance.\n    \"\"\"\n    feature_cfg = dict(flatten_sequential=True)\n    return build_model_with_cfg(\n        ResNetV2, variant, pretrained,\n        feature_cfg=feature_cfg,\n        **kwargs,\n    )\n\n\ndef _create_resnetv2_bit(variant: str, pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"Create a ResNetV2 model with BiT weights.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        ResNetV2 model instance.\n    \"\"\"\n    return _create_resnetv2(\n        variant,\n        pretrained=pretrained,\n        stem_type='fixed',\n        conv_layer=partial(StdConv2d, eps=1e-8),\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    #  Paper: Knowledge distillation: A good teacher is patient and consistent - https://arxiv.org/abs/2106.05237\n    'resnetv2_50x1_bit.goog_distilled_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', custom_load=True),\n    'resnetv2_152x2_bit.goog_teacher_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', custom_load=True),\n    'resnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, interpolation='bicubic', custom_load=True),\n\n    # pretrained on imagenet21k, finetuned on imagenet1k\n    'resnetv2_50x1_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, custom_load=True),\n    'resnetv2_50x3_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, custom_load=True),\n    'resnetv2_101x1_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, custom_load=True),\n    'resnetv2_101x3_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, custom_load=True),\n    'resnetv2_152x2_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, custom_load=True),\n    'resnetv2_152x4_bit.goog_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 480, 480), pool_size=(15, 15), crop_pct=1.0, custom_load=True),  # only one at 480x480?\n\n    # trained on imagenet-21k\n    'resnetv2_50x1_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n    'resnetv2_50x3_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n    'resnetv2_101x1_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n    'resnetv2_101x3_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n    'resnetv2_152x2_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n    'resnetv2_152x4_bit.goog_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=21843, custom_load=True),\n\n    'resnetv2_18.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.9, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_18d.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.9, test_input_size=(3, 288, 288), test_crop_pct=1.0,\n        first_conv='stem.conv1'),\n    'resnetv2_34.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.9, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_34d.ra4_e3600_r224_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.9, test_input_size=(3, 288, 288), test_crop_pct=1.0,\n        first_conv='stem.conv1'),\n    'resnetv2_34d.ra4_e3600_r384_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=1.0, input_size=(3, 384, 384), pool_size=(12, 12), test_input_size=(3, 448, 448),\n        interpolation='bicubic', first_conv='stem.conv1'),\n    'resnetv2_50.a1h_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_50d.untrained': _cfg(\n        interpolation='bicubic', first_conv='stem.conv1'),\n    'resnetv2_50t.untrained': _cfg(\n        interpolation='bicubic', first_conv='stem.conv1'),\n    'resnetv2_101.a1h_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_101d.untrained': _cfg(\n        interpolation='bicubic', first_conv='stem.conv1'),\n    'resnetv2_152.untrained': _cfg(\n        interpolation='bicubic'),\n    'resnetv2_152d.untrained': _cfg(\n        interpolation='bicubic', first_conv='stem.conv1'),\n\n    'resnetv2_50d_gn.ah_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', first_conv='stem.conv1',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_50d_evos.ah_in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic', first_conv='stem.conv1',\n        crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0),\n    'resnetv2_50d_frn.untrained': _cfg(\n        interpolation='bicubic', first_conv='stem.conv1'),\n})\n\n\n@register_model\ndef resnetv2_50x1_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50x1-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_50x1_bit', pretrained=pretrained, layers=[3, 4, 6, 3], width_factor=1, **kwargs)\n\n\n@register_model\ndef resnetv2_50x3_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50x3-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_50x3_bit', pretrained=pretrained, layers=[3, 4, 6, 3], width_factor=3, **kwargs)\n\n\n@register_model\ndef resnetv2_101x1_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-101x1-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_101x1_bit', pretrained=pretrained, layers=[3, 4, 23, 3], width_factor=1, **kwargs)\n\n\n@register_model\ndef resnetv2_101x3_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-101x3-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_101x3_bit', pretrained=pretrained, layers=[3, 4, 23, 3], width_factor=3, **kwargs)\n\n\n@register_model\ndef resnetv2_152x2_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-152x2-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_152x2_bit', pretrained=pretrained, layers=[3, 8, 36, 3], width_factor=2, **kwargs)\n\n\n@register_model\ndef resnetv2_152x4_bit(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-152x4-BiT model.\"\"\"\n    return _create_resnetv2_bit(\n        'resnetv2_152x4_bit', pretrained=pretrained, layers=[3, 8, 36, 3], width_factor=4, **kwargs)\n\n\n@register_model\ndef resnetv2_18(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-18 model.\"\"\"\n    model_args = dict(\n        layers=[2, 2, 2, 2], channels=(64, 128, 256, 512), basic=True, bottle_ratio=1.0,\n        conv_layer=create_conv2d, norm_layer=BatchNormAct2d\n    )\n    return _create_resnetv2('resnetv2_18', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_18d(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-18d model (deep stem variant).\"\"\"\n    model_args = dict(\n        layers=[2, 2, 2, 2], channels=(64, 128, 256, 512), basic=True, bottle_ratio=1.0,\n        conv_layer=create_conv2d, norm_layer=BatchNormAct2d, stem_type='deep', avg_down=True\n    )\n    return _create_resnetv2('resnetv2_18d', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_34(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-34 model.\"\"\"\n    model_args = dict(\n        layers=(3, 4, 6, 3), channels=(64, 128, 256, 512), basic=True, bottle_ratio=1.0,\n        conv_layer=create_conv2d, norm_layer=BatchNormAct2d\n    )\n    return _create_resnetv2('resnetv2_34', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_34d(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-34d model (deep stem variant).\"\"\"\n    model_args = dict(\n        layers=(3, 4, 6, 3), channels=(64, 128, 256, 512), basic=True, bottle_ratio=1.0,\n        conv_layer=create_conv2d, norm_layer=BatchNormAct2d, stem_type='deep', avg_down=True\n    )\n    return _create_resnetv2('resnetv2_34d', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_50(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50 model.\"\"\"\n    model_args = dict(layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)\n    return _create_resnetv2('resnetv2_50', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_50d(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50d model (deep stem variant).\"\"\"\n    model_args = dict(\n        layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_50d', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_50t(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50t model (tiered stem variant).\"\"\"\n    model_args = dict(\n        layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,\n        stem_type='tiered', avg_down=True)\n    return _create_resnetv2('resnetv2_50t', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_101(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-101 model.\"\"\"\n    model_args = dict(layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)\n    return _create_resnetv2('resnetv2_101', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_101d(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-101d model (deep stem variant).\"\"\"\n    model_args = dict(\n        layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_101d', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_152(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-152 model.\"\"\"\n    model_args = dict(layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)\n    return _create_resnetv2('resnetv2_152', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_152d(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-152d model (deep stem variant).\"\"\"\n    model_args = dict(\n        layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_152d', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n# Experimental configs (may change / be removed)\n\n@register_model\ndef resnetv2_50d_gn(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50d model with Group Normalization.\"\"\"\n    model_args = dict(\n        layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=GroupNormAct,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_50d_gn', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_50d_evos(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50d model with EvoNorm.\"\"\"\n    model_args = dict(\n        layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dS0,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_50d_evos', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef resnetv2_50d_frn(pretrained: bool = False, **kwargs: Any) -> ResNetV2:\n    \"\"\"ResNetV2-50d model with Filter Response Normalization.\"\"\"\n    model_args = dict(\n        layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=FilterResponseNormTlu2d,\n        stem_type='deep', avg_down=True)\n    return _create_resnetv2('resnetv2_50d_frn', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\nregister_model_deprecations(__name__, {\n    'resnetv2_50x1_bitm': 'resnetv2_50x1_bit.goog_in21k_ft_in1k',\n    'resnetv2_50x3_bitm': 'resnetv2_50x3_bit.goog_in21k_ft_in1k',\n    'resnetv2_101x1_bitm': 'resnetv2_101x1_bit.goog_in21k_ft_in1k',\n    'resnetv2_101x3_bitm': 'resnetv2_101x3_bit.goog_in21k_ft_in1k',\n    'resnetv2_152x2_bitm': 'resnetv2_152x2_bit.goog_in21k_ft_in1k',\n    'resnetv2_152x4_bitm': 'resnetv2_152x4_bit.goog_in21k_ft_in1k',\n    'resnetv2_50x1_bitm_in21k': 'resnetv2_50x1_bit.goog_in21k',\n    'resnetv2_50x3_bitm_in21k': 'resnetv2_50x3_bit.goog_in21k',\n    'resnetv2_101x1_bitm_in21k': 'resnetv2_101x1_bit.goog_in21k',\n    'resnetv2_101x3_bitm_in21k': 'resnetv2_101x3_bit.goog_in21k',\n    'resnetv2_152x2_bitm_in21k': 'resnetv2_152x2_bit.goog_in21k',\n    'resnetv2_152x4_bitm_in21k': 'resnetv2_152x4_bit.goog_in21k',\n    'resnetv2_50x1_bit_distilled': 'resnetv2_50x1_bit.goog_distilled_in1k',\n    'resnetv2_152x2_bit_teacher': 'resnetv2_152x2_bit.goog_teacher_in21k_ft_in1k',\n    'resnetv2_152x2_bit_teacher_384': 'resnetv2_152x2_bit.goog_teacher_in21k_ft_in1k_384',\n})\n"
  },
  {
    "path": "timm/models/rexnet.py",
    "content": "\"\"\" ReXNet\n\nA PyTorch impl of `ReXNet: Diminishing Representational Bottleneck on Convolutional Neural Network` -\nhttps://arxiv.org/abs/2007.00992\n\nAdapted from original impl at https://github.com/clovaai/rexnet\nCopyright (c) 2020-present NAVER Corp. MIT license\n\nChanges for timm, feature extraction, and rounded channel variant hacked together by Ross Wightman\nCopyright 2020 Ross Wightman\n\"\"\"\n\nfrom functools import partial\nfrom math import ceil\nfrom typing import Any, Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ClassifierHead, create_act_layer, ConvNormAct, DropPath, make_divisible, SEModule\nfrom ._builder import build_model_with_cfg\nfrom ._efficientnet_builder import efficientnet_init_weights\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['RexNet']  # model_registry will add each entrypoint fn to this\n\n\nSEWithNorm = partial(SEModule, norm_layer=nn.BatchNorm2d)\n\n\nclass LinearBottleneck(nn.Module):\n    \"\"\"Linear bottleneck block for ReXNet.\n\n    A mobile inverted residual bottleneck block as used in MobileNetV2 and subsequent models.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int,\n            dilation: Tuple[int, int] = (1, 1),\n            exp_ratio: float = 1.0,\n            se_ratio: float = 0.,\n            ch_div: int = 1,\n            act_layer: str = 'swish',\n            dw_act_layer: str = 'relu6',\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize LinearBottleneck.\n\n        Args:\n            in_chs: Number of input channels.\n            out_chs: Number of output channels.\n            stride: Stride for depthwise conv.\n            dilation: Dilation rates.\n            exp_ratio: Expansion ratio.\n            se_ratio: Squeeze-excitation ratio.\n            ch_div: Channel divisor.\n            act_layer: Activation layer for expansion.\n            dw_act_layer: Activation layer for depthwise.\n            drop_path: Drop path module.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.use_shortcut = stride == 1 and dilation[0] == dilation[1] and in_chs <= out_chs\n        self.in_channels = in_chs\n        self.out_channels = out_chs\n\n        if exp_ratio != 1.:\n            dw_chs = make_divisible(round(in_chs * exp_ratio), divisor=ch_div)\n            self.conv_exp = ConvNormAct(in_chs, dw_chs, act_layer=act_layer, **dd)\n        else:\n            dw_chs = in_chs\n            self.conv_exp = None\n\n        self.conv_dw = ConvNormAct(\n            dw_chs,\n            dw_chs,\n            kernel_size=3,\n            stride=stride,\n            dilation=dilation[0],\n            groups=dw_chs,\n            apply_act=False,\n            **dd,\n        )\n        if se_ratio > 0:\n            self.se = SEWithNorm(dw_chs, rd_channels=make_divisible(int(dw_chs * se_ratio), ch_div), **dd)\n        else:\n            self.se = None\n        self.act_dw = create_act_layer(dw_act_layer)\n\n        self.conv_pwl = ConvNormAct(dw_chs, out_chs, 1, apply_act=False, **dd)\n        self.drop_path = drop_path\n\n    def feat_channels(self, exp: bool = False) -> int:\n        \"\"\"Get feature channel count.\n\n        Args:\n            exp: Return expanded channels if True.\n\n        Returns:\n            Number of feature channels.\n        \"\"\"\n        return self.conv_dw.out_channels if exp else self.out_channels\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        shortcut = x\n        if self.conv_exp is not None:\n            x = self.conv_exp(x)\n        x = self.conv_dw(x)\n        if self.se is not None:\n            x = self.se(x)\n        x = self.act_dw(x)\n        x = self.conv_pwl(x)\n        if self.use_shortcut:\n            if self.drop_path is not None:\n                x = self.drop_path(x)\n            x = torch.cat([x[:, 0:self.in_channels] + shortcut, x[:, self.in_channels:]], dim=1)\n        return x\n\n\ndef _block_cfg(\n        width_mult: float = 1.0,\n        depth_mult: float = 1.0,\n        initial_chs: int = 16,\n        final_chs: int = 180,\n        se_ratio: float = 0.,\n        ch_div: int = 1,\n) -> List[Tuple[int, float, int, float]]:\n    \"\"\"Generate ReXNet block configuration.\n\n    Args:\n        width_mult: Width multiplier.\n        depth_mult: Depth multiplier.\n        initial_chs: Initial channel count.\n        final_chs: Final channel count.\n        se_ratio: Squeeze-excitation ratio.\n        ch_div: Channel divisor.\n\n    Returns:\n        List of tuples (out_channels, exp_ratio, stride, se_ratio).\n    \"\"\"\n    layers = [1, 2, 2, 3, 3, 5]\n    strides = [1, 2, 2, 2, 1, 2]\n    layers = [ceil(element * depth_mult) for element in layers]\n    strides = sum([[element] + [1] * (layers[idx] - 1) for idx, element in enumerate(strides)], [])\n    exp_ratios = [1] * layers[0] + [6] * sum(layers[1:])\n    depth = sum(layers[:]) * 3\n    base_chs = initial_chs / width_mult if width_mult < 1.0 else initial_chs\n\n    # The following channel configuration is a simple instance to make each layer become an expand layer.\n    out_chs_list = []\n    for i in range(depth // 3):\n        out_chs_list.append(make_divisible(round(base_chs * width_mult), divisor=ch_div))\n        base_chs += final_chs / (depth // 3 * 1.0)\n\n    se_ratios = [0.] * (layers[0] + layers[1]) + [se_ratio] * sum(layers[2:])\n\n    return list(zip(out_chs_list, exp_ratios, strides, se_ratios))\n\n\ndef _build_blocks(\n        block_cfg: List[Tuple[int, float, int, float]],\n        prev_chs: int,\n        width_mult: float,\n        ch_div: int = 1,\n        output_stride: int = 32,\n        act_layer: str = 'swish',\n        dw_act_layer: str = 'relu6',\n        drop_path_rate: float = 0.,\n        device=None,\n        dtype=None,\n) -> Tuple[List[nn.Module], List[Dict[str, Any]]]:\n    \"\"\"Build ReXNet blocks from configuration.\n\n    Args:\n        block_cfg: Block configuration list.\n        prev_chs: Previous channel count.\n        width_mult: Width multiplier.\n        ch_div: Channel divisor.\n        output_stride: Target output stride.\n        act_layer: Activation layer name.\n        dw_act_layer: Depthwise activation layer name.\n        drop_path_rate: Drop path rate.\n\n    Returns:\n        Tuple of (features list, feature_info list).\n    \"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    feat_chs = [prev_chs]\n    feature_info = []\n    curr_stride = 2\n    dilation = 1\n    features = []\n    num_blocks = len(block_cfg)\n    for block_idx, (chs, exp_ratio, stride, se_ratio) in enumerate(block_cfg):\n        next_dilation = dilation\n        if stride > 1:\n            fname = 'stem' if block_idx == 0 else f'features.{block_idx - 1}'\n            feature_info += [dict(num_chs=feat_chs[-1], reduction=curr_stride, module=fname)]\n            if curr_stride >= output_stride:\n                next_dilation = dilation * stride\n                stride = 1\n        block_dpr = drop_path_rate * block_idx / (num_blocks - 1)  # stochastic depth linear decay rule\n        drop_path = DropPath(block_dpr) if block_dpr > 0. else None\n        features.append(LinearBottleneck(\n            in_chs=prev_chs,\n            out_chs=chs,\n            exp_ratio=exp_ratio,\n            stride=stride,\n            dilation=(dilation, next_dilation),\n            se_ratio=se_ratio,\n            ch_div=ch_div,\n            act_layer=act_layer,\n            dw_act_layer=dw_act_layer,\n            drop_path=drop_path,\n            **dd,\n        ))\n        curr_stride *= stride\n        dilation = next_dilation\n        prev_chs = chs\n        feat_chs += [features[-1].feat_channels()]\n    pen_chs = make_divisible(1280 * width_mult, divisor=ch_div)\n    feature_info += [dict(num_chs=feat_chs[-1], reduction=curr_stride, module=f'features.{len(features) - 1}')]\n    features.append(ConvNormAct(prev_chs, pen_chs, act_layer=act_layer, **dd))\n    return features, feature_info\n\n\nclass RexNet(nn.Module):\n    \"\"\"ReXNet model architecture.\n\n    Based on `ReXNet: Diminishing Representational Bottleneck on Convolutional Neural Network`\n    - https://arxiv.org/abs/2007.00992\n    \"\"\"\n\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            initial_chs: int = 16,\n            final_chs: int = 180,\n            width_mult: float = 1.0,\n            depth_mult: float = 1.0,\n            se_ratio: float = 1/12.,\n            ch_div: int = 1,\n            act_layer: str = 'swish',\n            dw_act_layer: str = 'relu6',\n            drop_rate: float = 0.2,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize ReXNet.\n\n        Args:\n            in_chans: Number of input channels.\n            num_classes: Number of classes for classification.\n            global_pool: Global pooling type.\n            output_stride: Output stride.\n            initial_chs: Initial channel count.\n            final_chs: Final channel count.\n            width_mult: Width multiplier.\n            depth_mult: Depth multiplier.\n            se_ratio: Squeeze-excitation ratio.\n            ch_div: Channel divisor.\n            act_layer: Activation layer name.\n            dw_act_layer: Depthwise activation layer name.\n            drop_rate: Dropout rate.\n            drop_path_rate: Drop path rate.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        assert output_stride in (32, 16, 8)\n        stem_base_chs = 32 / width_mult if width_mult < 1.0 else 32\n        stem_chs = make_divisible(round(stem_base_chs * width_mult), divisor=ch_div)\n        self.stem = ConvNormAct(in_chans, stem_chs, 3, stride=2, act_layer=act_layer, **dd)\n\n        block_cfg = _block_cfg(width_mult, depth_mult, initial_chs, final_chs, se_ratio, ch_div)\n        features, self.feature_info = _build_blocks(\n            block_cfg,\n            stem_chs,\n            width_mult,\n            ch_div,\n            output_stride,\n            act_layer,\n            dw_act_layer,\n            drop_path_rate,\n            **dd,\n        )\n        self.num_features = self.head_hidden_size = features[-1].out_channels\n        self.features = nn.Sequential(*features)\n\n        self.head = ClassifierHead(self.num_features, num_classes, global_pool, drop_rate, **dd)\n\n        efficientnet_init_weights(self)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group matcher for parameter groups.\n\n        Args:\n            coarse: Whether to use coarse grouping.\n\n        Returns:\n            Dictionary of grouped parameters.\n        \"\"\"\n        matcher = dict(\n            stem=r'^stem',\n            blocks=r'^features\\.(\\d+)',\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier module.\n\n        Returns:\n            Classifier module.\n        \"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, device=None, dtype=None) -> None:\n        \"\"\"Reset the classifier.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if device is not None or dtype is not None:\n            dd = {'device': device, 'dtype': dtype}\n            pool_type = global_pool if global_pool is not None else self.head.global_pool.pool_type\n            self.head = ClassifierHead(self.num_features, num_classes, pool_type, self.drop_rate, **dd)\n        else:\n            self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i] for i in take_indices]\n        max_index = stage_ends[max_index]\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.features\n        else:\n            stages = self.features[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        stage_ends = [int(info['module'].split('.')[-1]) for info in self.feature_info]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.features = self.features[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.features, x)\n        else:\n            x = self.features(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_rexnet(variant: str, pretrained: bool, **kwargs) -> RexNet:\n    \"\"\"Create a ReXNet model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        ReXNet model instance.\n    \"\"\"\n    feature_cfg = dict(flatten_sequential=True)\n    return build_model_with_cfg(\n        RexNet,\n        variant,\n        pretrained,\n        feature_cfg=feature_cfg,\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.conv', 'classifier': 'head.fc',\n        'license': 'mit', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'rexnet_100.nav_in1k': _cfg(hf_hub_id='timm/'),\n    'rexnet_130.nav_in1k': _cfg(hf_hub_id='timm/'),\n    'rexnet_150.nav_in1k': _cfg(hf_hub_id='timm/'),\n    'rexnet_200.nav_in1k': _cfg(hf_hub_id='timm/'),\n    'rexnet_300.nav_in1k': _cfg(hf_hub_id='timm/'),\n    'rexnetr_100.untrained': _cfg(),\n    'rexnetr_130.untrained': _cfg(),\n    'rexnetr_150.untrained': _cfg(),\n    'rexnetr_200.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288), license='apache-2.0'),\n    'rexnetr_300.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288), license='apache-2.0'),\n    'rexnetr_200.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288), license='apache-2.0'),\n    'rexnetr_300.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        crop_pct=0.95, test_crop_pct=1.0, test_input_size=(3, 288, 288), license='apache-2.0'),\n})\n\n\n@register_model\ndef rexnet_100(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.0x\"\"\"\n    return _create_rexnet('rexnet_100', pretrained, **kwargs)\n\n\n@register_model\ndef rexnet_130(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.3x\"\"\"\n    return _create_rexnet('rexnet_130', pretrained, width_mult=1.3, **kwargs)\n\n\n@register_model\ndef rexnet_150(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.5x\"\"\"\n    return _create_rexnet('rexnet_150', pretrained, width_mult=1.5, **kwargs)\n\n\n@register_model\ndef rexnet_200(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 2.0x\"\"\"\n    return _create_rexnet('rexnet_200', pretrained, width_mult=2.0, **kwargs)\n\n\n@register_model\ndef rexnet_300(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 3.0x\"\"\"\n    return _create_rexnet('rexnet_300', pretrained, width_mult=3.0, **kwargs)\n\n\n@register_model\ndef rexnetr_100(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.0x w/ rounded (mod 8) channels\"\"\"\n    return _create_rexnet('rexnetr_100', pretrained, ch_div=8, **kwargs)\n\n\n@register_model\ndef rexnetr_130(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.3x w/ rounded (mod 8) channels\"\"\"\n    return _create_rexnet('rexnetr_130', pretrained, width_mult=1.3, ch_div=8, **kwargs)\n\n\n@register_model\ndef rexnetr_150(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 1.5x w/ rounded (mod 8) channels\"\"\"\n    return _create_rexnet('rexnetr_150', pretrained, width_mult=1.5, ch_div=8, **kwargs)\n\n\n@register_model\ndef rexnetr_200(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 2.0x w/ rounded (mod 8) channels\"\"\"\n    return _create_rexnet('rexnetr_200', pretrained, width_mult=2.0, ch_div=8, **kwargs)\n\n\n@register_model\ndef rexnetr_300(pretrained: bool = False, **kwargs) -> RexNet:\n    \"\"\"ReXNet V1 3.0x w/ rounded (mod 16) channels\"\"\"\n    return _create_rexnet('rexnetr_300', pretrained, width_mult=3.0, ch_div=16, **kwargs)\n"
  },
  {
    "path": "timm/models/selecsls.py",
    "content": "\"\"\"PyTorch SelecSLS Net example for ImageNet Classification\nLicense: CC BY 4.0 (https://creativecommons.org/licenses/by/4.0/legalcode)\nAuthor: Dushyant Mehta (@mehtadushy)\n\nSelecSLS (core) Network Architecture as proposed in \"XNect: Real-time Multi-person 3D\nHuman Pose Estimation with a Single RGB Camera, Mehta et al.\"\nhttps://arxiv.org/abs/1907.00837\n\nBased on ResNet implementation in https://github.com/rwightman/pytorch-image-models\nand SelecSLS Net implementation in https://github.com/mehtadushy/SelecSLS-Pytorch\n\"\"\"\nfrom typing import List, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['SelecSls']  # model_registry will add each entrypoint fn to this\n\n\nclass SequentialList(nn.Sequential):\n\n    def __init__(self, *args):\n        super().__init__(*args)\n\n    def forward(self, x) -> List[torch.Tensor]:\n        for module in self:\n            x = module(x)\n        return x\n\n\nclass SelectSeq(nn.Module):\n    def __init__(self, mode='index', index=0):\n        super().__init__()\n        self.mode = mode\n        self.index = index\n\n    def forward(self, x) -> torch.Tensor:\n        if self.mode == 'index':\n            return x[self.index]\n        else:\n            return torch.cat(x, dim=1)\n\n\ndef conv_bn(in_chs, out_chs, k=3, stride=1, padding=None, dilation=1, device=None, dtype=None):\n    dd = {'device': device, 'dtype': dtype}\n    if padding is None:\n        padding = ((stride - 1) + dilation * (k - 1)) // 2\n    return nn.Sequential(\n        nn.Conv2d(in_chs, out_chs, k, stride, padding=padding, dilation=dilation, bias=False, **dd),\n        nn.BatchNorm2d(out_chs, **dd),\n        nn.ReLU(inplace=True)\n    )\n\n\nclass SelecSlsBlock(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            skip_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            is_first: bool,\n            stride: int,\n            dilation: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = stride\n        self.is_first = is_first\n        assert stride in [1, 2]\n\n        # Process input with 4 conv blocks with the same number of input and output channels\n        self.conv1 = conv_bn(in_chs, mid_chs, 3, stride, dilation=dilation, **dd)\n        self.conv2 = conv_bn(mid_chs, mid_chs, 1, **dd)\n        self.conv3 = conv_bn(mid_chs, mid_chs // 2, 3, **dd)\n        self.conv4 = conv_bn(mid_chs // 2, mid_chs, 1, **dd)\n        self.conv5 = conv_bn(mid_chs, mid_chs // 2, 3, **dd)\n        self.conv6 = conv_bn(2 * mid_chs + (0 if is_first else skip_chs), out_chs, 1, **dd)\n\n    def forward(self, x: List[torch.Tensor]) -> List[torch.Tensor]:\n        if not isinstance(x, list):\n            x = [x]\n        assert len(x) in [1, 2]\n\n        d1 = self.conv1(x[0])\n        d2 = self.conv3(self.conv2(d1))\n        d3 = self.conv5(self.conv4(d2))\n        if self.is_first:\n            out = self.conv6(torch.cat([d1, d2, d3], 1))\n            return [out, out]\n        else:\n            return [self.conv6(torch.cat([d1, d2, d3, x[1]], 1)), x[1]]\n\n\nclass SelecSls(nn.Module):\n    \"\"\"SelecSls42 / SelecSls60 / SelecSls84\n\n    Parameters\n    ----------\n    cfg : network config dictionary specifying block type, feature, and head args\n    num_classes : int, default 1000\n        Number of classification classes.\n    in_chans : int, default 3\n        Number of input (color) channels.\n    drop_rate : float, default 0.\n        Dropout probability before classifier, for training\n    global_pool : str, default 'avg'\n        Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax'\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            drop_rate: float = 0.0,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.stem = conv_bn(in_chans, 32, stride=2, **dd)\n        self.features = SequentialList(*[cfg['block'](*block_args, **dd) for block_args in cfg['features']])\n        self.from_seq = SelectSeq()  # from List[tensor] -> Tensor in module compatible way\n        self.head = nn.Sequential(*[conv_bn(*conv_args, **dd) for conv_args in cfg['head']])\n        self.num_features = self.head_hidden_size = cfg['num_features']\n        self.feature_info = cfg['feature_info']\n\n        self.global_pool, self.head_drop, self.fc = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        for n, m in self.named_modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^features\\.(\\d+)',\n            blocks_head=r'^head'\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.fc = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            device=self.fc.weight.device if hasattr(self.fc, 'weight') else None,\n            dtype=self.fc.weight.dtype if hasattr(self.fc, 'weight') else None,\n        )\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.features(x)\n        x = self.head(self.from_seq(x))\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.fc(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_selecsls(variant, pretrained, **kwargs):\n    cfg = {}\n    feature_info = [dict(num_chs=32, reduction=2, module='stem.2')]\n    if variant.startswith('selecsls42'):\n        cfg['block'] = SelecSlsBlock\n        # Define configuration of the network after the initial neck\n        cfg['features'] = [\n            # in_chs, skip_chs, mid_chs, out_chs, is_first, stride\n            (32, 0, 64, 64, True, 2),\n            (64, 64, 64, 128, False, 1),\n            (128, 0, 144, 144, True, 2),\n            (144, 144, 144, 288, False, 1),\n            (288, 0, 304, 304, True, 2),\n            (304, 304, 304, 480, False, 1),\n        ]\n        feature_info.extend([\n            dict(num_chs=128, reduction=4, module='features.1'),\n            dict(num_chs=288, reduction=8, module='features.3'),\n            dict(num_chs=480, reduction=16, module='features.5'),\n        ])\n        # Head can be replaced with alternative configurations depending on the problem\n        feature_info.append(dict(num_chs=1024, reduction=32, module='head.1'))\n        if variant == 'selecsls42b':\n            cfg['head'] = [\n                (480, 960, 3, 2),\n                (960, 1024, 3, 1),\n                (1024, 1280, 3, 2),\n                (1280, 1024, 1, 1),\n            ]\n            feature_info.append(dict(num_chs=1024, reduction=64, module='head.3'))\n            cfg['num_features'] = 1024\n        else:\n            cfg['head'] = [\n                (480, 960, 3, 2),\n                (960, 1024, 3, 1),\n                (1024, 1024, 3, 2),\n                (1024, 1280, 1, 1),\n            ]\n            feature_info.append(dict(num_chs=1280, reduction=64, module='head.3'))\n            cfg['num_features'] = 1280\n\n    elif variant.startswith('selecsls60'):\n        cfg['block'] = SelecSlsBlock\n        # Define configuration of the network after the initial neck\n        cfg['features'] = [\n            # in_chs, skip_chs, mid_chs, out_chs, is_first, stride\n            (32, 0, 64, 64, True, 2),\n            (64, 64, 64, 128, False, 1),\n            (128, 0, 128, 128, True, 2),\n            (128, 128, 128, 128, False, 1),\n            (128, 128, 128, 288, False, 1),\n            (288, 0, 288, 288, True, 2),\n            (288, 288, 288, 288, False, 1),\n            (288, 288, 288, 288, False, 1),\n            (288, 288, 288, 416, False, 1),\n        ]\n        feature_info.extend([\n            dict(num_chs=128, reduction=4, module='features.1'),\n            dict(num_chs=288, reduction=8, module='features.4'),\n            dict(num_chs=416, reduction=16, module='features.8'),\n        ])\n        # Head can be replaced with alternative configurations depending on the problem\n        feature_info.append(dict(num_chs=1024, reduction=32, module='head.1'))\n        if variant == 'selecsls60b':\n            cfg['head'] = [\n                (416, 756, 3, 2),\n                (756, 1024, 3, 1),\n                (1024, 1280, 3, 2),\n                (1280, 1024, 1, 1),\n            ]\n            feature_info.append(dict(num_chs=1024, reduction=64, module='head.3'))\n            cfg['num_features'] = 1024\n        else:\n            cfg['head'] = [\n                (416, 756, 3, 2),\n                (756, 1024, 3, 1),\n                (1024, 1024, 3, 2),\n                (1024, 1280, 1, 1),\n            ]\n            feature_info.append(dict(num_chs=1280, reduction=64, module='head.3'))\n            cfg['num_features'] = 1280\n\n    elif variant == 'selecsls84':\n        cfg['block'] = SelecSlsBlock\n        # Define configuration of the network after the initial neck\n        cfg['features'] = [\n            # in_chs, skip_chs, mid_chs, out_chs, is_first, stride\n            (32, 0, 64, 64, True, 2),\n            (64, 64, 64, 144, False, 1),\n            (144, 0, 144, 144, True, 2),\n            (144, 144, 144, 144, False, 1),\n            (144, 144, 144, 144, False, 1),\n            (144, 144, 144, 144, False, 1),\n            (144, 144, 144, 304, False, 1),\n            (304, 0, 304, 304, True, 2),\n            (304, 304, 304, 304, False, 1),\n            (304, 304, 304, 304, False, 1),\n            (304, 304, 304, 304, False, 1),\n            (304, 304, 304, 304, False, 1),\n            (304, 304, 304, 512, False, 1),\n        ]\n        feature_info.extend([\n            dict(num_chs=144, reduction=4, module='features.1'),\n            dict(num_chs=304, reduction=8, module='features.6'),\n            dict(num_chs=512, reduction=16, module='features.12'),\n        ])\n        # Head can be replaced with alternative configurations depending on the problem\n        cfg['head'] = [\n            (512, 960, 3, 2),\n            (960, 1024, 3, 1),\n            (1024, 1024, 3, 2),\n            (1024, 1280, 3, 1),\n        ]\n        cfg['num_features'] = 1280\n        feature_info.extend([\n            dict(num_chs=1024, reduction=32, module='head.1'),\n            dict(num_chs=1280, reduction=64, module='head.3')\n        ])\n    else:\n        raise ValueError('Invalid net configuration ' + variant + ' !!!')\n    cfg['feature_info'] = feature_info\n\n    # this model can do 6 feature levels by default, unlike most others, leave as 0-4 to avoid surprises?\n    return build_model_with_cfg(\n        SelecSls,\n        variant,\n        pretrained,\n        model_cfg=cfg,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3, 4), flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (4, 4),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'fc',\n        'license': 'cc-by-4.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'selecsls42.untrained': _cfg(\n        interpolation='bicubic'),\n    'selecsls42b.in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'selecsls60.in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'selecsls60b.in1k': _cfg(\n        hf_hub_id='timm/',\n        interpolation='bicubic'),\n    'selecsls84.untrained': _cfg(\n        interpolation='bicubic'),\n})\n\n\n@register_model\ndef selecsls42(pretrained=False, **kwargs) -> SelecSls:\n    \"\"\"Constructs a SelecSls42 model.\n    \"\"\"\n    return _create_selecsls('selecsls42', pretrained, **kwargs)\n\n\n@register_model\ndef selecsls42b(pretrained=False, **kwargs) -> SelecSls:\n    \"\"\"Constructs a SelecSls42_B model.\n    \"\"\"\n    return _create_selecsls('selecsls42b', pretrained, **kwargs)\n\n\n@register_model\ndef selecsls60(pretrained=False, **kwargs) -> SelecSls:\n    \"\"\"Constructs a SelecSls60 model.\n    \"\"\"\n    return _create_selecsls('selecsls60', pretrained, **kwargs)\n\n\n@register_model\ndef selecsls60b(pretrained=False, **kwargs) -> SelecSls:\n    \"\"\"Constructs a SelecSls60_B model.\n    \"\"\"\n    return _create_selecsls('selecsls60b', pretrained, **kwargs)\n\n\n@register_model\ndef selecsls84(pretrained=False, **kwargs) -> SelecSls:\n    \"\"\"Constructs a SelecSls84 model.\n    \"\"\"\n    return _create_selecsls('selecsls84', pretrained, **kwargs)\n"
  },
  {
    "path": "timm/models/senet.py",
    "content": "\"\"\"\nSEResNet implementation from Cadene's pretrained models\nhttps://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/senet.py\nAdditional credit to https://github.com/creafz\n\nOriginal model: https://github.com/hujie-frank/SENet\n\nResNet code gently borrowed from\nhttps://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py\n\nFIXME I'm deprecating this model and moving them to ResNet as I don't want to maintain duplicate\nsupport for extras like dilation, switchable BN/activations, feature extraction, etc that don't exist here.\n\"\"\"\nimport math\nfrom collections import OrderedDict\nfrom typing import Type, Optional, Tuple\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['SENet']\n\n\ndef _weight_init(m):\n    if isinstance(m, nn.Conv2d):\n        nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n    elif isinstance(m, nn.BatchNorm2d):\n        nn.init.constant_(m.weight, 1.)\n        nn.init.constant_(m.bias, 0.)\n\n\nclass SEModule(nn.Module):\n\n    def __init__(self, channels: int, reduction: int, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1, **dd)\n        self.sigmoid = nn.Sigmoid()\n\n    def forward(self, x):\n        module_input = x\n        x = x.mean((2, 3), keepdim=True)\n        x = self.fc1(x)\n        x = self.relu(x)\n        x = self.fc2(x)\n        x = self.sigmoid(x)\n        return module_input * x\n\n\nclass Bottleneck(nn.Module):\n    \"\"\"\n    Base class for bottlenecks that implements `forward()` method.\n    \"\"\"\n\n    def forward(self, x):\n        shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.relu(out)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n\n        out = self.se_module(out) + shortcut\n        out = self.relu(out)\n\n        return out\n\n\nclass SEBottleneck(Bottleneck):\n    \"\"\"\n    Bottleneck for SENet154.\n    \"\"\"\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            groups: int,\n            reduction: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = nn.Conv2d(inplanes, planes * 2, kernel_size=1, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(planes * 2, **dd)\n        self.conv2 = nn.Conv2d(\n            planes * 2,\n            planes * 4,\n            kernel_size=3,\n            stride=stride,\n            padding=1,\n            groups=groups,\n            bias=False,\n            **dd,\n        )\n        self.bn2 = nn.BatchNorm2d(planes * 4, **dd)\n        self.conv3 = nn.Conv2d(planes * 4, planes * 4, kernel_size=1, bias=False, **dd)\n        self.bn3 = nn.BatchNorm2d(planes * 4, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.se_module = SEModule(planes * 4, reduction=reduction, **dd)\n        self.downsample = downsample\n        self.stride = stride\n\n\nclass SEResNetBottleneck(Bottleneck):\n    \"\"\"\n    ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe\n    implementation and uses `stride=stride` in `conv1` and not in `conv2`\n    (the latter is used in the torchvision implementation of ResNet).\n    \"\"\"\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            groups: int,\n            reduction: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False, stride=stride, **dd)\n        self.bn1 = nn.BatchNorm2d(planes, **dd)\n        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1, groups=groups, bias=False, **dd)\n        self.bn2 = nn.BatchNorm2d(planes, **dd)\n        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False, **dd)\n        self.bn3 = nn.BatchNorm2d(planes * 4, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.se_module = SEModule(planes * 4, reduction=reduction, **dd)\n        self.downsample = downsample\n        self.stride = stride\n\n\nclass SEResNeXtBottleneck(Bottleneck):\n    \"\"\"\n    ResNeXt bottleneck type C with a Squeeze-and-Excitation module.\n    \"\"\"\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            groups: int,\n            reduction: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            base_width: int = 4,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        width = math.floor(planes * (base_width / 64)) * groups\n        self.conv1 = nn.Conv2d(inplanes, width, kernel_size=1, bias=False, stride=1, **dd)\n        self.bn1 = nn.BatchNorm2d(width, **dd)\n        self.conv2 = nn.Conv2d(width, width, kernel_size=3, stride=stride, padding=1, groups=groups, bias=False, **dd)\n        self.bn2 = nn.BatchNorm2d(width, **dd)\n        self.conv3 = nn.Conv2d(width, planes * 4, kernel_size=1, bias=False, **dd)\n        self.bn3 = nn.BatchNorm2d(planes * 4, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.se_module = SEModule(planes * 4, reduction=reduction, **dd)\n        self.downsample = downsample\n        self.stride = stride\n\n\nclass SEResNetBlock(nn.Module):\n    expansion = 1\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            groups: int,\n            reduction: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, padding=1, stride=stride, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(planes, **dd)\n        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1, groups=groups, bias=False, **dd)\n        self.bn2 = nn.BatchNorm2d(planes, **dd)\n        self.relu = nn.ReLU(inplace=True)\n        self.se_module = SEModule(planes, reduction=reduction, **dd)\n        self.downsample = downsample\n        self.stride = stride\n\n    def forward(self, x):\n        shortcut = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.relu(out)\n\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n\n        out = self.se_module(out) + shortcut\n        out = self.relu(out)\n\n        return out\n\n\nclass SENet(nn.Module):\n\n    def __init__(\n            self,\n            block: Type[nn.Module],\n            layers: Tuple[int, ...],\n            groups: int,\n            reduction: int,\n            drop_rate: float = 0.2,\n            in_chans: int = 3,\n            inplanes: int = 64,\n            input_3x3: bool = False,\n            downsample_kernel_size: int = 1,\n            downsample_padding: int = 0,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Parameters\n        ----------\n        block (nn.Module): Bottleneck class.\n            - For SENet154: SEBottleneck\n            - For SE-ResNet models: SEResNetBottleneck\n            - For SE-ResNeXt models:  SEResNeXtBottleneck\n        layers (list of ints): Number of residual blocks for 4 layers of the\n            network (layer1...layer4).\n        groups (int): Number of groups for the 3x3 convolution in each\n            bottleneck block.\n            - For SENet154: 64\n            - For SE-ResNet models: 1\n            - For SE-ResNeXt models:  32\n        reduction (int): Reduction ratio for Squeeze-and-Excitation modules.\n            - For all models: 16\n        dropout_p (float or None): Drop probability for the Dropout layer.\n            If `None` the Dropout layer is not used.\n            - For SENet154: 0.2\n            - For SE-ResNet models: None\n            - For SE-ResNeXt models: None\n        inplanes (int):  Number of input channels for layer1.\n            - For SENet154: 128\n            - For SE-ResNet models: 64\n            - For SE-ResNeXt models: 64\n        input_3x3 (bool): If `True`, use three 3x3 convolutions instead of\n            a single 7x7 convolution in layer0.\n            - For SENet154: True\n            - For SE-ResNet models: False\n            - For SE-ResNeXt models: False\n        downsample_kernel_size (int): Kernel size for downsampling convolutions\n            in layer2, layer3 and layer4.\n            - For SENet154: 3\n            - For SE-ResNet models: 1\n            - For SE-ResNeXt models: 1\n        downsample_padding (int): Padding for downsampling convolutions in\n            layer2, layer3 and layer4.\n            - For SENet154: 1\n            - For SE-ResNet models: 0\n            - For SE-ResNeXt models: 0\n        num_classes (int): Number of outputs in `last_linear` layer.\n            - For all models: 1000\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.inplanes = inplanes\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        if input_3x3:\n            layer0_modules = [\n                ('conv1', nn.Conv2d(in_chans, 64, 3, stride=2, padding=1, bias=False, **dd)),\n                ('bn1', nn.BatchNorm2d(64, **dd)),\n                ('relu1', nn.ReLU(inplace=True)),\n                ('conv2', nn.Conv2d(64, 64, 3, stride=1, padding=1, bias=False, **dd)),\n                ('bn2', nn.BatchNorm2d(64, **dd)),\n                ('relu2', nn.ReLU(inplace=True)),\n                ('conv3', nn.Conv2d(64, inplanes, 3, stride=1, padding=1, bias=False, **dd)),\n                ('bn3', nn.BatchNorm2d(inplanes, **dd)),\n                ('relu3', nn.ReLU(inplace=True)),\n            ]\n        else:\n            layer0_modules = [\n                ('conv1', nn.Conv2d(in_chans, inplanes, kernel_size=7, stride=2, padding=3, bias=False, **dd)),\n                ('bn1', nn.BatchNorm2d(inplanes, **dd)),\n                ('relu1', nn.ReLU(inplace=True)),\n            ]\n        self.layer0 = nn.Sequential(OrderedDict(layer0_modules))\n        # To preserve compatibility with Caffe weights `ceil_mode=True` is used instead of `padding=1`.\n        self.pool0 = nn.MaxPool2d(3, stride=2, ceil_mode=True)\n        self.feature_info = [dict(num_chs=inplanes, reduction=2, module='layer0')]\n        self.layer1 = self._make_layer(\n            block,\n            planes=64,\n            blocks=layers[0],\n            groups=groups,\n            reduction=reduction,\n            downsample_kernel_size=1,\n            downsample_padding=0,\n            **dd,\n        )\n        self.feature_info += [dict(num_chs=64 * block.expansion, reduction=4, module='layer1')]\n        self.layer2 = self._make_layer(\n            block,\n            planes=128,\n            blocks=layers[1],\n            stride=2,\n            groups=groups,\n            reduction=reduction,\n            downsample_kernel_size=downsample_kernel_size,\n            downsample_padding=downsample_padding,\n            **dd,\n        )\n        self.feature_info += [dict(num_chs=128 * block.expansion, reduction=8, module='layer2')]\n        self.layer3 = self._make_layer(\n            block,\n            planes=256,\n            blocks=layers[2],\n            stride=2,\n            groups=groups,\n            reduction=reduction,\n            downsample_kernel_size=downsample_kernel_size,\n            downsample_padding=downsample_padding,\n            **dd,\n        )\n        self.feature_info += [dict(num_chs=256 * block.expansion, reduction=16, module='layer3')]\n        self.layer4 = self._make_layer(\n            block,\n            planes=512,\n            blocks=layers[3],\n            stride=2,\n            groups=groups,\n            reduction=reduction,\n            downsample_kernel_size=downsample_kernel_size,\n            downsample_padding=downsample_padding,\n            **dd,\n        )\n        self.feature_info += [dict(num_chs=512 * block.expansion, reduction=32, module='layer4')]\n        self.num_features = self.head_hidden_size = 512 * block.expansion\n        self.global_pool, self.last_linear = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            **dd,\n        )\n\n        for m in self.modules():\n            _weight_init(m)\n\n    def _make_layer(self, block, planes, blocks, groups, reduction, stride=1,\n                    downsample_kernel_size=1, downsample_padding=0, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(\n                    self.inplanes, planes * block.expansion, kernel_size=downsample_kernel_size,\n                    stride=stride, padding=downsample_padding, bias=False, **dd),\n                nn.BatchNorm2d(planes * block.expansion, **dd),\n            )\n\n        layers = [block(self.inplanes, planes, groups, reduction, stride, downsample, **dd)]\n        self.inplanes = planes * block.expansion\n        for i in range(1, blocks):\n            layers.append(block(self.inplanes, planes, groups, reduction, **dd))\n\n        return nn.Sequential(*layers)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(stem=r'^layer0', blocks=r'^layer(\\d+)' if coarse else r'^layer(\\d+)\\.(\\d+)')\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.last_linear\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.last_linear = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x = self.layer0(x)\n        x = self.pool0(x)\n        x = self.layer1(x)\n        x = self.layer2(x)\n        x = self.layer3(x)\n        x = self.layer4(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.last_linear(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_senet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(SENet, variant, pretrained, **kwargs)\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'layer0.conv1', 'classifier': 'last_linear', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'legacy_senet154.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/legacy_senet154-e9eb9fe6.pth'),\n    'legacy_seresnet18.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet18-4bb0ce65.pth',\n        interpolation='bicubic'),\n    'legacy_seresnet34.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet34-a4004e63.pth'),\n    'legacy_seresnet50.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet50-ce0d4300.pth'),\n    'legacy_seresnet101.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet101-7e38fcc6.pth'),\n    'legacy_seresnet152.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/se_resnet152-d17c99b7.pth'),\n    'legacy_seresnext26_32x4d.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnext26_32x4d-65ebdb501.pth',\n        interpolation='bicubic'),\n    'legacy_seresnext50_32x4d.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/legacy_se_resnext50_32x4d-f3651bad.pth'),\n    'legacy_seresnext101_32x4d.in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/legacy_se_resnext101_32x4d-37725eac.pth'),\n})\n\n\n@register_model\ndef legacy_seresnet18(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNetBlock, layers=[2, 2, 2, 2], groups=1, reduction=16)\n    return _create_senet('legacy_seresnet18', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnet34(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNetBlock, layers=[3, 4, 6, 3], groups=1, reduction=16)\n    return _create_senet('legacy_seresnet34', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnet50(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNetBottleneck, layers=[3, 4, 6, 3], groups=1, reduction=16)\n    return _create_senet('legacy_seresnet50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnet101(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNetBottleneck, layers=[3, 4, 23, 3], groups=1, reduction=16)\n    return _create_senet('legacy_seresnet101', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnet152(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNetBottleneck, layers=[3, 8, 36, 3], groups=1, reduction=16)\n    return _create_senet('legacy_seresnet152', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_senet154(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEBottleneck, layers=[3, 8, 36, 3], groups=64, reduction=16,\n        downsample_kernel_size=3, downsample_padding=1,  inplanes=128, input_3x3=True)\n    return _create_senet('legacy_senet154', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnext26_32x4d(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNeXtBottleneck, layers=[2, 2, 2, 2], groups=32, reduction=16)\n    return _create_senet('legacy_seresnext26_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnext50_32x4d(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNeXtBottleneck, layers=[3, 4, 6, 3], groups=32, reduction=16)\n    return _create_senet('legacy_seresnext50_32x4d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef legacy_seresnext101_32x4d(pretrained=False, **kwargs) -> SENet:\n    model_args = dict(\n        block=SEResNeXtBottleneck, layers=[3, 4, 23, 3], groups=32, reduction=16)\n    return _create_senet('legacy_seresnext101_32x4d', pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/sequencer.py",
    "content": "\"\"\" Sequencer\n\nPaper: `Sequencer: Deep LSTM for Image Classification` - https://arxiv.org/pdf/2205.01972.pdf\n\n\"\"\"\n#  Copyright (c) 2022. Yuki Tatsunami\n#  Licensed under the Apache License, Version 2.0 (the \"License\");\n\nimport math\nfrom functools import partial\nfrom itertools import accumulate\nfrom typing import List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, DEFAULT_CROP_PCT\nfrom timm.layers import lecun_normal_, DropPath, Mlp, PatchEmbed, ClassifierHead\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import named_apply\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['Sequencer2d']  # model_registry will add each entrypoint fn to this\n\n\ndef _init_weights(module: nn.Module, name: str, head_bias: float = 0., flax=False):\n    if isinstance(module, nn.Linear):\n        if name.startswith('head'):\n            nn.init.zeros_(module.weight)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            if flax:\n                # Flax defaults\n                lecun_normal_(module.weight)\n                if module.bias is not None:\n                    nn.init.zeros_(module.bias)\n            else:\n                nn.init.xavier_uniform_(module.weight)\n                if module.bias is not None:\n                    if 'mlp' in name:\n                        nn.init.normal_(module.bias, std=1e-6)\n                    else:\n                        nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        lecun_normal_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif isinstance(module, (nn.LayerNorm, nn.BatchNorm2d, nn.GroupNorm)):\n        nn.init.ones_(module.weight)\n        nn.init.zeros_(module.bias)\n    elif isinstance(module, (nn.RNN, nn.GRU, nn.LSTM)):\n        stdv = 1.0 / math.sqrt(module.hidden_size)\n        for weight in module.parameters():\n            nn.init.uniform_(weight, -stdv, stdv)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n\n\nclass RNNIdentity(nn.Module):\n    def __init__(self, *args, **kwargs):\n        super().__init__()\n\n    def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, None]:\n        return x, None\n\n\nclass RNN2dBase(nn.Module):\n\n    def __init__(\n            self,\n            input_size: int,\n            hidden_size: int,\n            num_layers: int = 1,\n            bias: bool = True,\n            bidirectional: bool = True,\n            union: str = \"cat\",\n            with_fc: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.input_size = input_size\n        self.hidden_size = hidden_size\n        self.output_size = 2 * hidden_size if bidirectional else hidden_size\n        self.union = union\n\n        self.with_vertical = True\n        self.with_horizontal = True\n        self.with_fc = with_fc\n\n        self.fc = None\n        if with_fc:\n            if union == \"cat\":\n                self.fc = nn.Linear(2 * self.output_size, input_size, **dd)\n            elif union == \"add\":\n                self.fc = nn.Linear(self.output_size, input_size, **dd)\n            elif union == \"vertical\":\n                self.fc = nn.Linear(self.output_size, input_size, **dd)\n                self.with_horizontal = False\n            elif union == \"horizontal\":\n                self.fc = nn.Linear(self.output_size, input_size, **dd)\n                self.with_vertical = False\n            else:\n                raise ValueError(\"Unrecognized union: \" + union)\n        elif union == \"cat\":\n            pass\n            if 2 * self.output_size != input_size:\n                raise ValueError(f\"The output channel {2 * self.output_size} is different from the input channel {input_size}.\")\n        elif union == \"add\":\n            pass\n            if self.output_size != input_size:\n                raise ValueError(f\"The output channel {self.output_size} is different from the input channel {input_size}.\")\n        elif union == \"vertical\":\n            if self.output_size != input_size:\n                raise ValueError(f\"The output channel {self.output_size} is different from the input channel {input_size}.\")\n            self.with_horizontal = False\n        elif union == \"horizontal\":\n            if self.output_size != input_size:\n                raise ValueError(f\"The output channel {self.output_size} is different from the input channel {input_size}.\")\n            self.with_vertical = False\n        else:\n            raise ValueError(\"Unrecognized union: \" + union)\n\n        self.rnn_v = RNNIdentity()\n        self.rnn_h = RNNIdentity()\n\n    def forward(self, x):\n        B, H, W, C = x.shape\n\n        if self.with_vertical:\n            v = x.permute(0, 2, 1, 3)\n            v = v.reshape(-1, H, C)\n            v, _ = self.rnn_v(v)\n            v = v.reshape(B, W, H, -1)\n            v = v.permute(0, 2, 1, 3)\n        else:\n            v = None\n\n        if self.with_horizontal:\n            h = x.reshape(-1, W, C)\n            h, _ = self.rnn_h(h)\n            h = h.reshape(B, H, W, -1)\n        else:\n            h = None\n\n        if v is not None and h is not None:\n            if self.union == \"cat\":\n                x = torch.cat([v, h], dim=-1)\n            else:\n                x = v + h\n        elif v is not None:\n            x = v\n        elif h is not None:\n            x = h\n\n        if self.fc is not None:\n            x = self.fc(x)\n\n        return x\n\n\nclass LSTM2d(RNN2dBase):\n\n    def __init__(\n            self,\n            input_size: int,\n            hidden_size: int,\n            num_layers: int = 1,\n            bias: bool = True,\n            bidirectional: bool = True,\n            union: str = \"cat\",\n            with_fc: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__(input_size, hidden_size, num_layers, bias, bidirectional, union, with_fc, device, dtype)\n        if self.with_vertical:\n            self.rnn_v = nn.LSTM(\n                input_size,\n                hidden_size,\n                num_layers,\n                batch_first=True,\n                bias=bias,\n                bidirectional=bidirectional,\n                **dd,\n            )\n        if self.with_horizontal:\n            self.rnn_h = nn.LSTM(\n                input_size,\n                hidden_size,\n                num_layers,\n                batch_first=True,\n                bias=bias,\n                bidirectional=bidirectional,\n                **dd,\n            )\n\n\nclass Sequencer2dBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            hidden_size: int,\n            mlp_ratio: float = 3.0,\n            rnn_layer: Type[nn.Module] = LSTM2d,\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            num_layers: int = 1,\n            bidirectional: bool = True,\n            union: str = \"cat\",\n            with_fc: bool = True,\n            drop: float = 0.,\n            drop_path: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        channels_dim = int(mlp_ratio * dim)\n        self.norm1 = norm_layer(dim, **dd)\n        self.rnn_tokens = rnn_layer(\n            dim,\n            hidden_size,\n            num_layers=num_layers,\n            bidirectional=bidirectional,\n            union=union,\n            with_fc=with_fc,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp_channels = mlp_layer(dim, channels_dim, act_layer=act_layer, drop=drop, **dd)\n\n    def forward(self, x):\n        x = x + self.drop_path(self.rnn_tokens(self.norm1(x)))\n        x = x + self.drop_path(self.mlp_channels(self.norm2(x)))\n        return x\n\n\nclass Shuffle(nn.Module):\n    def __init__(self):\n        super().__init__()\n\n    def forward(self, x):\n        if self.training:\n            B, H, W, C = x.shape\n            r = torch.randperm(H * W)\n            x = x.reshape(B, -1, C)\n            x = x[:, r, :].reshape(B, H, W, -1)\n        return x\n\n\nclass Downsample2d(nn.Module):\n    def __init__(\n            self,\n            input_dim: int,\n            output_dim: int,\n            patch_size: int,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.down = nn.Conv2d(input_dim, output_dim, kernel_size=patch_size, stride=patch_size, **dd)\n\n    def forward(self, x):\n        x = x.permute(0, 3, 1, 2)\n        x = self.down(x)\n        x = x.permute(0, 2, 3, 1)\n        return x\n\n\nclass Sequencer2dStage(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            depth: int,\n            patch_size: int,\n            hidden_size: int,\n            mlp_ratio: float,\n            downsample: bool = False,\n            block_layer: Type[nn.Module] = Sequencer2dBlock,\n            rnn_layer: Type[nn.Module] = LSTM2d,\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            num_layers: int = 1,\n            bidirectional: bool = True,\n            union: str = \"cat\",\n            with_fc: bool = True,\n            drop: float = 0.,\n            drop_path: Union[float, List[float]] = 0.,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        if downsample:\n            self.downsample = Downsample2d(dim, dim_out, patch_size, **dd)\n        else:\n            assert dim == dim_out\n            self.downsample = nn.Identity()\n\n        blocks = []\n        for block_idx in range(depth):\n            blocks.append(block_layer(\n                dim_out,\n                hidden_size,\n                mlp_ratio=mlp_ratio,\n                rnn_layer=rnn_layer,\n                mlp_layer=mlp_layer,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                num_layers=num_layers,\n                bidirectional=bidirectional,\n                union=union,\n                with_fc=with_fc,\n                drop=drop,\n                drop_path=drop_path[block_idx] if isinstance(drop_path, (list, tuple)) else drop_path,\n                **dd,\n            ))\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = self.blocks(x)\n        return x\n\n\nclass Sequencer2d(nn.Module):\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            img_size: int = 224,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            layers: Tuple[int, ...] = (4, 3, 8, 3),\n            patch_sizes: Tuple[int, ...] = (7, 2, 2, 1),\n            embed_dims: Tuple[int, ...] = (192, 384, 384, 384),\n            hidden_sizes: Tuple[int, ...] = (48, 96, 96, 96),\n            mlp_ratios: Tuple[float, ...] = (3.0, 3.0, 3.0, 3.0),\n            block_layer: Type[nn.Module] = Sequencer2dBlock,\n            rnn_layer: Type[nn.Module] = LSTM2d,\n            mlp_layer: Type[nn.Module] = Mlp,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            act_layer: Type[nn.Module] = nn.GELU,\n            num_rnn_layers: int = 1,\n            bidirectional: bool = True,\n            union: str = \"cat\",\n            with_fc: bool = True,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            nlhb: bool = False,\n            stem_norm: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg')\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = embed_dims[-1]  # for consistency with other models\n        self.feature_dim = -1  # channel dim index for feature outputs (rank 4, NHWC)\n        self.output_fmt = 'NHWC'\n        self.feature_info = []\n\n        self.stem = PatchEmbed(\n            img_size=None,\n            patch_size=patch_sizes[0],\n            in_chans=in_chans,\n            embed_dim=embed_dims[0],\n            norm_layer=norm_layer if stem_norm else None,\n            flatten=False,\n            output_fmt='NHWC',\n            **dd,\n        )\n\n        assert len(layers) == len(patch_sizes) == len(embed_dims) == len(hidden_sizes) == len(mlp_ratios)\n        reductions = list(accumulate(patch_sizes, lambda x, y: x * y))\n        stages = []\n        prev_dim = embed_dims[0]\n        for i, _ in enumerate(embed_dims):\n            stages += [Sequencer2dStage(\n                prev_dim,\n                embed_dims[i],\n                depth=layers[i],\n                downsample=i > 0,\n                patch_size=patch_sizes[i],\n                hidden_size=hidden_sizes[i],\n                mlp_ratio=mlp_ratios[i],\n                block_layer=block_layer,\n                rnn_layer=rnn_layer,\n                mlp_layer=mlp_layer,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                num_layers=num_rnn_layers,\n                bidirectional=bidirectional,\n                union=union,\n                with_fc=with_fc,\n                drop=drop_rate,\n                drop_path=drop_path_rate,\n                **dd,\n            )]\n            prev_dim = embed_dims[i]\n            self.feature_info += [dict(num_chs=prev_dim, reduction=reductions[i], module=f'stages.{i}')]\n\n        self.stages = nn.Sequential(*stages)\n        self.norm = norm_layer(embed_dims[-1], **dd)\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            input_fmt=self.output_fmt,\n            **dd,\n        )\n\n        self.init_weights(nlhb=nlhb)\n\n    def init_weights(self, nlhb=False):\n        head_bias = -math.log(self.num_classes) if nlhb else 0.\n        named_apply(partial(_init_weights, head_bias=head_bias), module=self)  # depth-first\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=[\n                (r'^stages\\.(\\d+)', None),\n                (r'^norm', (99999,))\n            ] if coarse else [\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^stages\\.(\\d+)\\.downsample', (0,)),\n                (r'^norm', (99999,))\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" Remap original checkpoints -> timm \"\"\"\n    if 'stages.0.blocks.0.norm1.weight' in state_dict:\n        return state_dict  # already translated checkpoint\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n\n    import re\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = re.sub(r'blocks.([0-9]+).([0-9]+).down', lambda x: f'stages.{int(x.group(1)) + 1}.downsample.down', k)\n        k = re.sub(r'blocks.([0-9]+).([0-9]+)', r'stages.\\1.blocks.\\2', k)\n        k = k.replace('head.', 'head.fc.')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _create_sequencer2d(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(range(3))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        Sequencer2d,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': DEFAULT_CROP_PCT, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.proj', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'sequencer2d_s.in1k': _cfg(hf_hub_id='timm/'),\n    'sequencer2d_m.in1k': _cfg(hf_hub_id='timm/'),\n    'sequencer2d_l.in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef sequencer2d_s(pretrained=False, **kwargs) -> Sequencer2d:\n    model_args = dict(\n        layers=[4, 3, 8, 3],\n        patch_sizes=[7, 2, 1, 1],\n        embed_dims=[192, 384, 384, 384],\n        hidden_sizes=[48, 96, 96, 96],\n        mlp_ratios=[3.0, 3.0, 3.0, 3.0],\n        rnn_layer=LSTM2d,\n        bidirectional=True,\n        union=\"cat\",\n        with_fc=True,\n    )\n    model = _create_sequencer2d('sequencer2d_s', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef sequencer2d_m(pretrained=False, **kwargs) -> Sequencer2d:\n    model_args = dict(\n        layers=[4, 3, 14, 3],\n        patch_sizes=[7, 2, 1, 1],\n        embed_dims=[192, 384, 384, 384],\n        hidden_sizes=[48, 96, 96, 96],\n        mlp_ratios=[3.0, 3.0, 3.0, 3.0],\n        rnn_layer=LSTM2d,\n        bidirectional=True,\n        union=\"cat\",\n        with_fc=True,\n        **kwargs)\n    model = _create_sequencer2d('sequencer2d_m', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef sequencer2d_l(pretrained=False, **kwargs) -> Sequencer2d:\n    model_args = dict(\n        layers=[8, 8, 16, 4],\n        patch_sizes=[7, 2, 1, 1],\n        embed_dims=[192, 384, 384, 384],\n        hidden_sizes=[48, 96, 96, 96],\n        mlp_ratios=[3.0, 3.0, 3.0, 3.0],\n        rnn_layer=LSTM2d,\n        bidirectional=True,\n        union=\"cat\",\n        with_fc=True,\n        **kwargs)\n    model = _create_sequencer2d('sequencer2d_l', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/shvit.py",
    "content": "\"\"\"SHViT\nSHViT: Single-Head Vision Transformer with Memory Efficient Macro Design\nCode: https://github.com/ysj9909/SHViT\nPaper: https://arxiv.org/abs/2401.16456\n\n@inproceedings{yun2024shvit,\n  author={Yun, Seokju and Ro, Youngmin},\n  title={SHViT: Single-Head Vision Transformer with Memory Efficient Macro Design},\n  booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},\n  pages={5756--5767},\n  year={2024}\n}\n\"\"\"\nfrom typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import GroupNorm1, SqueezeExcite, SelectAdaptivePool2d, LayerType, trunc_normal_\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['SHViT']\n\n\nclass Residual(nn.Module):\n    def __init__(self, m: nn.Module):\n        super().__init__()\n        self.m = m\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return x + self.m(x)\n\n    @torch.no_grad()\n    def fuse(self) -> nn.Module:\n        if isinstance(self.m, Conv2dNorm):\n            m = self.m.fuse()\n            assert(m.groups == m.in_channels)\n            identity = torch.ones(m.weight.shape[0], m.weight.shape[1], 1, 1)\n            identity = F.pad(identity, [1,1,1,1])\n            m.weight += identity.to(m.weight.device)\n            return m\n        else:\n            return self\n\n\nclass Conv2dNorm(nn.Sequential):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: int = 0,\n            bn_weight_init: int = 1,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('c', nn.Conv2d(\n            in_channels, out_channels, kernel_size, stride, padding, bias=False, **dd, **kwargs))\n        self.add_module('bn', nn.BatchNorm2d(out_channels, **dd))\n        nn.init.constant_(self.bn.weight, bn_weight_init)\n        nn.init.constant_(self.bn.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self) -> nn.Conv2d:\n        c, bn = self._modules.values()\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = c.weight * w[:, None, None, None]\n        b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5\n        m = nn.Conv2d(\n            in_channels=w.size(1) * self.c.groups,\n            out_channels=w.size(0),\n            kernel_size=w.shape[2:],\n            stride=self.c.stride,\n            padding=self.c.padding,\n            dilation=self.c.dilation,\n            groups=self.c.groups,\n            device=c.weight.device,\n            dtype=c.weight.dtype,\n        )\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass NormLinear(nn.Sequential):\n    def __init__(\n            self,\n            in_features: int,\n            out_features: int,\n            bias: bool = True,\n            std: float = 0.02,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('bn', nn.BatchNorm1d(in_features, **dd))\n        self.add_module('l', nn.Linear(in_features, out_features, bias=bias, **dd))\n        trunc_normal_(self.l.weight, std=std)\n        if bias:\n            nn.init.constant_(self.l.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self) -> nn.Linear:\n        bn, l = self._modules.values()\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        b = bn.bias - self.bn.running_mean * self.bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = l.weight * w[None, :]\n        if l.bias is None:\n            b = b @ self.l.weight.T\n        else:\n            b = (l.weight @ b[:, None]).view(-1) + self.l.bias\n        m = nn.Linear(w.size(1), w.size(0), device=l.weight.device, dtype=l.weight.dtype)\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass PatchMerging(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        hid_dim = int(dim * 4)\n        self.conv1 = Conv2dNorm(dim, hid_dim, **dd)\n        self.act1 = act_layer()\n        self.conv2 = Conv2dNorm(hid_dim, hid_dim, 3, 2, 1, groups=hid_dim, **dd)\n        self.act2 = act_layer()\n        self.se = SqueezeExcite(hid_dim, 0.25, **dd)\n        self.conv3 = Conv2dNorm(hid_dim, out_dim, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv1(x)\n        x = self.act1(x)\n        x = self.conv2(x)\n        x = self.act2(x)\n        x = self.se(x)\n        x = self.conv3(x)\n        return x\n\n\nclass FFN(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            embed_dim: int,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.pw1 = Conv2dNorm(dim, embed_dim, **dd)\n        self.act = act_layer()\n        self.pw2 = Conv2dNorm(embed_dim, dim, bn_weight_init=0, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.pw1(x)\n        x = self.act(x)\n        x = self.pw2(x)\n        return x\n\n\nclass SHSA(nn.Module):\n    \"\"\"Single-Head Self-Attention\"\"\"\n    def __init__(\n            self,\n            dim: int,\n            qk_dim: int,\n            pdim: int,\n            norm_layer: Type[nn.Module] = GroupNorm1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale = qk_dim ** -0.5\n        self.qk_dim = qk_dim\n        self.dim = dim\n        self.pdim = pdim\n\n        self.pre_norm = norm_layer(pdim, **dd)\n\n        self.qkv = Conv2dNorm(pdim, qk_dim * 2 + pdim, **dd)\n        self.proj = nn.Sequential(act_layer(), Conv2dNorm(dim, dim, bn_weight_init=0, **dd))\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, _, H, W = x.shape\n        x1, x2 = torch.split(x, [self.pdim, self.dim - self.pdim], dim = 1)\n        x1 = self.pre_norm(x1)\n        qkv = self.qkv(x1)\n        q, k, v = torch.split(qkv, [self.qk_dim, self.qk_dim, self.pdim], dim=1)\n        q, k, v = q.flatten(2), k.flatten(2), v.flatten(2)\n\n        attn = (q.transpose(-2, -1) @ k) * self.scale\n        attn = attn.softmax(dim=-1)\n        x1 = (v @ attn.transpose(-2, -1)).reshape(B, self.pdim, H, W)\n        x = self.proj(torch.cat([x1, x2], dim = 1))\n        return x\n\n\nclass BasicBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            qk_dim: int,\n            pdim: int,\n            type: str,\n            norm_layer: Type[nn.Module] = GroupNorm1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = Residual(Conv2dNorm(dim, dim, 3, 1, 1, groups=dim, bn_weight_init=0, **dd))\n        if type == \"s\":\n            self.mixer = Residual(SHSA(dim, qk_dim, pdim, norm_layer, act_layer, **dd))\n        else:\n            self.mixer = nn.Identity()\n        self.ffn = Residual(FFN(dim, int(dim * 2), **dd))\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.conv(x)\n        x = self.mixer(x)\n        x = self.ffn(x)\n        return x\n\n\nclass StageBlock(nn.Module):\n    def __init__(\n            self,\n            prev_dim: int,\n            dim: int,\n            qk_dim: int,\n            pdim: int,\n            type: str,\n            depth: int,\n            norm_layer: Type[nn.Module] = GroupNorm1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.downsample = nn.Sequential(\n            Residual(Conv2dNorm(prev_dim, prev_dim, 3, 1, 1, groups=prev_dim, **dd)),\n            Residual(FFN(prev_dim, int(prev_dim * 2), act_layer, **dd)),\n            PatchMerging(prev_dim, dim, act_layer, **dd),\n            Residual(Conv2dNorm(dim, dim, 3, 1, 1, groups=dim, **dd)),\n            Residual(FFN(dim, int(dim * 2), act_layer, **dd)),\n        ) if prev_dim != dim else nn.Identity()\n\n        self.blocks = nn.Sequential(*[\n            BasicBlock(dim, qk_dim, pdim, type, norm_layer, act_layer, **dd) for _ in range(depth)\n        ])\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass SHViT(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: Tuple[int, int, int] = (128, 256, 384),\n            partial_dim: Tuple[int, int, int] = (32, 64, 96),\n            qk_dim: Tuple[int, int, int] = (16, 16, 16),\n            depth: Tuple[int, int, int] = (1, 2, 3),\n            types: Tuple[str, str, str] = (\"s\", \"s\", \"s\"),\n            drop_rate: float = 0.,\n            norm_layer: Type[nn.Module] = GroupNorm1,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.feature_info = []\n\n        # Patch embedding\n        stem_chs = embed_dim[0]\n        self.patch_embed = nn.Sequential(\n            Conv2dNorm(in_chans, stem_chs // 8, 3, 2, 1, **dd),\n            act_layer(),\n            Conv2dNorm(stem_chs // 8, stem_chs // 4, 3, 2, 1, **dd),\n            act_layer(),\n            Conv2dNorm(stem_chs // 4, stem_chs // 2, 3, 2, 1, **dd),\n            act_layer(),\n            Conv2dNorm(stem_chs // 2, stem_chs, 3, 2, 1, **dd)\n        )\n\n        # Build SHViT blocks\n        stages = []\n        prev_chs = stem_chs\n        for i in range(len(embed_dim)):\n            stages.append(StageBlock(\n                prev_dim=prev_chs,\n                dim=embed_dim[i],\n                qk_dim=qk_dim[i],\n                pdim=partial_dim[i],\n                type=types[i],\n                depth=depth[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            ))\n            prev_chs = embed_dim[i]\n            self.feature_info.append(dict(num_chs=prev_chs, reduction=2**(i+4), module=f'stages.{i}'))\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.num_features = self.head_hidden_size = embed_dim[-1]\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.head = NormLinear(self.head_hidden_size, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.l\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        # cannot meaningfully change pooling of efficient head after creation\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.head = NormLinear(self.head_hidden_size, num_classes) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.patch_embed(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        x = self.global_pool(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n    @torch.no_grad()\n    def fuse(self):\n        def fuse_children(net):\n            for child_name, child in net.named_children():\n                if hasattr(child, 'fuse'):\n                    fused = child.fuse()\n                    setattr(net, child_name, fused)\n                    fuse_children(fused)\n                else:\n                    fuse_children(child)\n\n        fuse_children(self)\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    state_dict = state_dict.get('model', state_dict)\n\n    # out_dict = {}\n    # import re\n    # replace_rules = [\n    #     (re.compile(r'^blocks1\\.'), 'stages.0.blocks.'),\n    #     (re.compile(r'^blocks2\\.'), 'stages.1.blocks.'),\n    #     (re.compile(r'^blocks3\\.'), 'stages.2.blocks.'),\n    # ]\n    # downsample_mapping = {}\n    # for i in range(1, 3):\n    #     downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.0\\\\.0\\\\.'] = f'stages.{i}.downsample.0.'\n    #     downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.0\\\\.1\\\\.'] = f'stages.{i}.downsample.1.'\n    #     downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.1\\\\.'] = f'stages.{i}.downsample.2.'\n    #     downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.2\\\\.0\\\\.'] = f'stages.{i}.downsample.3.'\n    #     downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.2\\\\.1\\\\.'] = f'stages.{i}.downsample.4.'\n    #     for j in range(3, 10):\n    #         downsample_mapping[f'^stages\\\\.{i}\\\\.blocks\\\\.{j}\\\\.'] = f'stages.{i}.blocks.{j - 3}.'\n    #\n    # downsample_patterns = [\n    #     (re.compile(pattern), replacement) for pattern, replacement in downsample_mapping.items()]\n    #\n    # for k, v in state_dict.items():\n    #     for pattern, replacement in replace_rules:\n    #         k = pattern.sub(replacement, k)\n    #     for pattern, replacement in downsample_patterns:\n    #         k = pattern.sub(replacement, k)\n    #     out_dict[k] = v\n\n    return state_dict\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (4, 4),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.0.c', 'classifier': 'head.l',\n        'license': 'mit',\n        'paper_ids': 'arXiv:2401.16456',\n        'paper_name': 'SHViT: Single-Head Vision Transformer with Memory Efficient Macro Design',\n        'origin_url': 'https://github.com/ysj9909/SHViT',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'shvit_s1.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ysj9909/SHViT/releases/download/v1.0/shvit_s1.pth',\n    ),\n    'shvit_s2.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ysj9909/SHViT/releases/download/v1.0/shvit_s2.pth',\n    ),\n    'shvit_s3.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ysj9909/SHViT/releases/download/v1.0/shvit_s3.pth',\n    ),\n    'shvit_s4.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ysj9909/SHViT/releases/download/v1.0/shvit_s4.pth',\n        input_size=(3, 256, 256),\n    ),\n})\n\n\ndef _create_shvit(variant: str, pretrained: bool = False, **kwargs: Any) -> SHViT:\n    model = build_model_with_cfg(\n        SHViT, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef shvit_s1(pretrained: bool = False, **kwargs: Any) -> SHViT:\n    model_args = dict(\n        embed_dim=(128, 224, 320), depth=(2, 4, 5), partial_dim=(32, 48, 68), types=(\"i\", \"s\", \"s\"))\n    return _create_shvit('shvit_s1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef shvit_s2(pretrained: bool = False, **kwargs: Any) -> SHViT:\n    model_args = dict(\n        embed_dim=(128, 308, 448), depth=(2, 4, 5), partial_dim=(32, 66, 96), types=(\"i\", \"s\", \"s\"))\n    return _create_shvit('shvit_s2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef shvit_s3(pretrained: bool = False, **kwargs: Any) -> SHViT:\n    model_args = dict(\n        embed_dim=(192, 352, 448), depth=(3, 5, 5), partial_dim=(48, 75, 96), types=(\"i\", \"s\", \"s\"))\n    return _create_shvit('shvit_s3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef shvit_s4(pretrained: bool = False, **kwargs: Any) -> SHViT:\n    model_args = dict(\n        embed_dim=(224, 336, 448), depth=(4, 7, 6), partial_dim=(48, 72, 96), types=(\"i\", \"s\", \"s\"))\n    return _create_shvit('shvit_s4', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/sknet.py",
    "content": "\"\"\" Selective Kernel Networks (ResNet base)\n\nPaper: Selective Kernel Networks (https://arxiv.org/abs/1903.06586)\n\nThis was inspired by reading 'Compounding the Performance Improvements...' (https://arxiv.org/abs/2001.06268)\nand a streamlined impl at https://github.com/clovaai/assembled-cnn but I ended up building something closer\nto the original paper with some modifications of my own to better balance param count vs accuracy.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport math\nfrom typing import Optional, Type\n\nfrom torch import nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import SelectiveKernel, ConvNormAct, create_attn\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs\nfrom .resnet import ResNet\n\n\nclass SelectiveKernelBasic(nn.Module):\n    expansion = 1\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            cardinality: int = 1,\n            base_width: int = 64,\n            sk_kwargs: Optional[dict] = None,\n            reduce_first: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[nn.Module] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        sk_kwargs = sk_kwargs or {}\n        conv_kwargs = dict(act_layer=act_layer, norm_layer=norm_layer, **dd)\n        assert cardinality == 1, 'BasicBlock only supports cardinality of 1'\n        assert base_width == 64, 'BasicBlock doest not support changing base width'\n        first_planes = planes // reduce_first\n        outplanes = planes * self.expansion\n        first_dilation = first_dilation or dilation\n\n        self.conv1 = SelectiveKernel(\n            inplanes,\n            first_planes,\n            stride=stride,\n            dilation=first_dilation,\n            aa_layer=aa_layer,\n            drop_layer=drop_block,\n            **conv_kwargs,\n            **sk_kwargs,\n        )\n        self.conv2 = ConvNormAct(\n            first_planes,\n            outplanes,\n            kernel_size=3,\n            dilation=dilation,\n            apply_act=False,\n            **conv_kwargs,\n        )\n        self.se = create_attn(attn_layer, outplanes, **dd)\n        self.act = act_layer(inplace=True)\n        self.downsample = downsample\n        self.drop_path = drop_path\n\n    def zero_init_last(self):\n        if getattr(self.conv2.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv2.bn.weight)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.conv2(x)\n        if self.se is not None:\n            x = self.se(x)\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n        if self.downsample is not None:\n            shortcut = self.downsample(shortcut)\n        x += shortcut\n        x = self.act(x)\n        return x\n\n\nclass SelectiveKernelBottleneck(nn.Module):\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            cardinality: int = 1,\n            base_width: int = 64,\n            sk_kwargs: Optional[dict] = None,\n            reduce_first: int = 1,\n            dilation: int = 1,\n            first_dilation: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            attn_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_block: Optional[nn.Module] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        sk_kwargs = sk_kwargs or {}\n        conv_kwargs = dict(act_layer=act_layer, norm_layer=norm_layer, **dd)\n        width = int(math.floor(planes * (base_width / 64)) * cardinality)\n        first_planes = width // reduce_first\n        outplanes = planes * self.expansion\n        first_dilation = first_dilation or dilation\n\n        self.conv1 = ConvNormAct(inplanes, first_planes, kernel_size=1, **conv_kwargs)\n        self.conv2 = SelectiveKernel(\n            first_planes,\n            width,\n            stride=stride,\n            dilation=first_dilation,\n            groups=cardinality,\n            aa_layer=aa_layer,\n            drop_layer=drop_block,\n            **conv_kwargs,\n            **sk_kwargs,\n        )\n        self.conv3 = ConvNormAct(width, outplanes, kernel_size=1, apply_act=False, **conv_kwargs)\n        self.se = create_attn(attn_layer, outplanes, **dd)\n        self.act = act_layer(inplace=True)\n        self.downsample = downsample\n        self.drop_path = drop_path\n\n    def zero_init_last(self):\n        if getattr(self.conv3.bn, 'weight', None) is not None:\n            nn.init.zeros_(self.conv3.bn.weight)\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x = self.conv3(x)\n        if self.se is not None:\n            x = self.se(x)\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n        if self.downsample is not None:\n            shortcut = self.downsample(shortcut)\n        x += shortcut\n        x = self.act(x)\n        return x\n\n\ndef _create_skresnet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        ResNet,\n        variant,\n        pretrained,\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'conv1', 'classifier': 'fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'skresnet18.ra_in1k': _cfg(hf_hub_id='timm/'),\n    'skresnet34.ra_in1k': _cfg(hf_hub_id='timm/'),\n    'skresnet50.untrained': _cfg(),\n    'skresnet50d.untrained': _cfg(\n        first_conv='conv1.0'),\n    'skresnext50_32x4d.ra_in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef skresnet18(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Selective Kernel ResNet-18 model.\n\n    Different from configs in Select Kernel paper or \"Compounding the Performance Improvements...\" this\n    variation splits the input channels to the selective convolutions to keep param count down.\n    \"\"\"\n    sk_kwargs = dict(rd_ratio=1 / 8, rd_divisor=16, split_input=True)\n    model_args = dict(\n        block=SelectiveKernelBasic, layers=[2, 2, 2, 2], block_args=dict(sk_kwargs=sk_kwargs),\n        zero_init_last=False)\n    return _create_skresnet('skresnet18', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef skresnet34(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Selective Kernel ResNet-34 model.\n\n    Different from configs in Select Kernel paper or \"Compounding the Performance Improvements...\" this\n    variation splits the input channels to the selective convolutions to keep param count down.\n    \"\"\"\n    sk_kwargs = dict(rd_ratio=1 / 8, rd_divisor=16, split_input=True)\n    model_args = dict(\n        block=SelectiveKernelBasic, layers=[3, 4, 6, 3], block_args=dict(sk_kwargs=sk_kwargs),\n        zero_init_last=False)\n    return _create_skresnet('skresnet34', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef skresnet50(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Select Kernel ResNet-50 model.\n\n    Different from configs in Select Kernel paper or \"Compounding the Performance Improvements...\" this\n    variation splits the input channels to the selective convolutions to keep param count down.\n    \"\"\"\n    sk_kwargs = dict(split_input=True)\n    model_args = dict(\n        block=SelectiveKernelBottleneck, layers=[3, 4, 6, 3], block_args=dict(sk_kwargs=sk_kwargs),\n        zero_init_last=False)\n    return _create_skresnet('skresnet50', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef skresnet50d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Select Kernel ResNet-50-D model.\n\n    Different from configs in Select Kernel paper or \"Compounding the Performance Improvements...\" this\n    variation splits the input channels to the selective convolutions to keep param count down.\n    \"\"\"\n    sk_kwargs = dict(split_input=True)\n    model_args = dict(\n        block=SelectiveKernelBottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True,\n        block_args=dict(sk_kwargs=sk_kwargs), zero_init_last=False)\n    return _create_skresnet('skresnet50d', pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef skresnext50_32x4d(pretrained=False, **kwargs) -> ResNet:\n    \"\"\"Constructs a Select Kernel ResNeXt50-32x4d model. This should be equivalent to\n    the SKNet-50 model in the Select Kernel Paper\n    \"\"\"\n    sk_kwargs = dict(rd_ratio=1/16, rd_divisor=32, split_input=False)\n    model_args = dict(\n        block=SelectiveKernelBottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4,\n        block_args=dict(sk_kwargs=sk_kwargs), zero_init_last=False)\n    return _create_skresnet('skresnext50_32x4d', pretrained, **dict(model_args, **kwargs))\n\n"
  },
  {
    "path": "timm/models/starnet.py",
    "content": "\"\"\"\nImplementation of Prof-of-Concept Network: StarNet.\n\nWe make StarNet as simple as possible [to show the key contribution of element-wise multiplication]:\n    - like NO layer-scale in network design,\n    - and NO EMA during training,\n    - which would improve the performance further.\n\nCreated by: Xu Ma (Email: ma.xu1@northeastern.edu)\nModified Date: Mar/29/2024\n\"\"\"\nfrom typing import Any, Dict, List, Optional, Set, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, SelectAdaptivePool2d, Linear, LayerType, trunc_normal_, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['StarNet']\n\n\nclass ConvBN(nn.Sequential):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: int = 0,\n            with_bn: bool = True,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.add_module('conv', nn.Conv2d(\n            in_channels, out_channels, kernel_size, stride=stride, padding=padding, **dd, **kwargs))\n        if with_bn:\n            self.add_module('bn', nn.BatchNorm2d(out_channels, **dd))\n            nn.init.constant_(self.bn.weight, 1)\n            nn.init.constant_(self.bn.bias, 0)\n\n\nclass Block(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: int = 3,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.ReLU6,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dwconv = ConvBN(dim, dim, 7, 1, 3, groups=dim, with_bn=True, **dd)\n        self.f1 = ConvBN(dim, mlp_ratio * dim, 1, with_bn=False, **dd)\n        self.f2 = ConvBN(dim, mlp_ratio * dim, 1, with_bn=False, **dd)\n        self.g = ConvBN(mlp_ratio * dim, dim, 1, with_bn=True, **dd)\n        self.dwconv2 = ConvBN(dim, dim, 7, 1, 3, groups=dim, with_bn=False, **dd)\n        self.act = act_layer()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        residual = x\n        x = self.dwconv(x)\n        x1, x2 = self.f1(x), self.f2(x)\n        x = self.act(x1) * x2\n        x = self.dwconv2(self.g(x))\n        x = residual + self.drop_path(x)\n        return x\n\n\nclass StarNet(nn.Module):\n    def __init__(\n            self,\n            base_dim: int = 32,\n            depths: List[int] = [3, 3, 12, 5],\n            mlp_ratio: int = 4,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            act_layer: Type[nn.Module] = nn.ReLU6,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert output_stride == 32\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.feature_info = []\n        stem_chs = 32\n\n        # stem layer\n        self.stem = nn.Sequential(\n            ConvBN(in_chans, stem_chs, kernel_size=3, stride=2, padding=1, **dd),\n            act_layer(),\n        )\n        prev_chs = stem_chs\n\n        # build stages\n        dpr = calculate_drop_path_rates(drop_path_rate, sum(depths)) # stochastic depth\n        stages = []\n        cur = 0\n        for i_layer in range(len(depths)):\n            embed_dim = base_dim * 2 ** i_layer\n            down_sampler = ConvBN(prev_chs, embed_dim, 3, stride=2, padding=1, **dd)\n            blocks = [Block(embed_dim, mlp_ratio, dpr[cur + i], act_layer, **dd) for i in range(depths[i_layer])]\n            cur += depths[i_layer]\n            prev_chs = embed_dim\n            stages.append(nn.Sequential(down_sampler, *blocks))\n            self.feature_info.append(dict(\n                    num_chs=prev_chs, reduction=2**(i_layer+2), module=f'stages.{i_layer}'))\n        self.stages = nn.Sequential(*stages)\n        # head\n        self.num_features = self.head_hidden_size = prev_chs\n        self.norm = nn.BatchNorm2d(self.num_features, **dd)\n        self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n        self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.head = Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, (nn.Linear, nn.Conv2d)):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.BatchNorm2d):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^stem\\.\\d+',\n            blocks=[\n                (r'^stages\\.(\\d+)' if coarse else r'^stages\\.(\\d+)\\.(\\d+)', None),\n                (r'norm', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            # NOTE: cannot meaningfully change pooling of efficient head after creation\n            self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)\n            self.flatten = nn.Flatten(1) if global_pool else nn.Identity()  # don't flatten if pooling disabled\n        self.head = Linear(\n            self.head_hidden_size, num_classes,\n            device=self.head.weight.device if isinstance(self.head, nn.Linear) else None,\n            dtype=self.head.weight.dtype if isinstance(self.head, nn.Linear) else None,\n        ) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        last_idx = len(self.stages) - 1\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint_seq(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x)  # applying final norm last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        x = self.global_pool(x)\n        x = self.flatten(x)\n        if self.drop_rate > 0.:\n            x = F.dropout(x, p=self.drop_rate, training=self.training)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    return state_dict.get('state_dict', state_dict)\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0.conv', 'classifier': 'head',\n        'paper_ids': 'arXiv:2403.19967',\n        'paper_name': 'Rewrite the Stars',\n        'origin_url': 'https://github.com/ma-xu/Rewrite-the-Stars', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'starnet_s1.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ma-xu/Rewrite-the-Stars/releases/download/checkpoints_v1/starnet_s1.pth.tar',\n    ),\n    'starnet_s2.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ma-xu/Rewrite-the-Stars/releases/download/checkpoints_v1/starnet_s2.pth.tar',\n    ),\n    'starnet_s3.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ma-xu/Rewrite-the-Stars/releases/download/checkpoints_v1/starnet_s3.pth.tar',\n    ),\n    'starnet_s4.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/ma-xu/Rewrite-the-Stars/releases/download/checkpoints_v1/starnet_s4.pth.tar',\n    ),\n    'starnet_s050.untrained': _cfg(),\n    'starnet_s100.untrained': _cfg(),\n    'starnet_s150.untrained': _cfg(),\n})\n\n\ndef _create_starnet(variant: str, pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model = build_model_with_cfg(\n        StarNet, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef starnet_s1(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=24, depths=[2, 2, 8, 3])\n    return _create_starnet('starnet_s1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef starnet_s2(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=32, depths=[1, 2, 6, 2])\n    return _create_starnet('starnet_s2', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef starnet_s3(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=32, depths=[2, 2, 8, 4])\n    return _create_starnet('starnet_s3', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef starnet_s4(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=32, depths=[3, 3, 12, 5])\n    return _create_starnet('starnet_s4', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n# very small networks #\n@register_model\ndef starnet_s050(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=16, depths=[1, 1, 3, 1], mlp_ratio=3)\n    return _create_starnet('starnet_s050', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef starnet_s100(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=20, depths=[1, 2, 4, 1], mlp_ratio=4)\n    return _create_starnet('starnet_s100', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef starnet_s150(pretrained: bool = False, **kwargs: Any) -> StarNet:\n    model_args = dict(base_dim=24, depths=[1, 2, 4, 2], mlp_ratio=3)\n    return _create_starnet('starnet_s150', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/swiftformer.py",
    "content": "\"\"\"SwiftFormer\nSwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications\nCode: https://github.com/Amshaker/SwiftFormer\nPaper: https://arxiv.org/pdf/2303.15446\n\n@InProceedings{Shaker_2023_ICCV,\n    author    = {Shaker, Abdelrahman and Maaz, Muhammad and Rasheed, Hanoona and Khan, Salman and Yang, Ming-Hsuan and Khan, Fahad Shahbaz},\n    title     = {SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications},\n    booktitle = {Proceedings of the IEEE/CVF International Conference on Computer Vision (ICCV)},\n    year      = {2023},\n}\n\"\"\"\nimport re\nfrom typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, Linear, LayerType, to_2tuple, trunc_normal_\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['SwiftFormer']\n\n\nclass LayerScale2d(nn.Module):\n    def __init__(self, dim: int, init_values: float = 1e-5, inplace: bool = False, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.inplace = inplace\n        self.gamma = nn.Parameter(\n            init_values * torch.ones(dim, 1, 1, **dd), requires_grad=True)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        return x.mul_(self.gamma) if self.inplace else x * self.gamma\n\n\nclass Embedding(nn.Module):\n    \"\"\"\n    Patch Embedding that is implemented by a layer of conv.\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H/stride, W/stride]\n    \"\"\"\n    def __init__(\n            self,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            patch_size: int = 16,\n            stride: int = 16,\n            padding: int = 0,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        patch_size = to_2tuple(patch_size)\n        stride = to_2tuple(stride)\n        padding = to_2tuple(padding)\n        self.proj = nn.Conv2d(in_chans, embed_dim, patch_size, stride, padding, **dd)\n        self.norm = norm_layer(embed_dim, **dd) if norm_layer else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.proj(x)\n        x = self.norm(x)\n        return x\n\n\nclass ConvEncoder(nn.Module):\n    \"\"\"\n    Implementation of ConvEncoder with 3*3 and 1*1 convolutions.\n    Input: tensor with shape [B, C, H, W]\n    Output: tensor with shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            hidden_dim: int = 64,\n            kernel_size: int = 3,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            use_layer_scale: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dwconv = nn.Conv2d(dim, dim, kernel_size, padding=kernel_size // 2, groups=dim, **dd)\n        self.norm = norm_layer(dim, **dd)\n        self.pwconv1 = nn.Conv2d(dim, hidden_dim, 1, **dd)\n        self.act = act_layer()\n        self.pwconv2 = nn.Conv2d(hidden_dim, dim, 1, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.layer_scale = LayerScale2d(dim, 1, **dd) if use_layer_scale else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        input = x\n        x = self.dwconv(x)\n        x = self.norm(x)\n        x = self.pwconv1(x)\n        x = self.act(x)\n        x = self.pwconv2(x)\n        x = self.layer_scale(x)\n        x = input + self.drop_path(x)\n        return x\n\n\nclass Mlp(nn.Module):\n    \"\"\"\n    Implementation of MLP layer with 1*1 convolutions.\n    Input: tensor with shape [B, C, H, W]\n    Output: tensor with shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        self.norm1 = norm_layer(in_features, **dd)\n        self.fc1 = nn.Conv2d(in_features, hidden_features, 1, **dd)\n        self.act = act_layer()\n        self.fc2 = nn.Conv2d(hidden_features, out_features, 1, **dd)\n        self.drop = nn.Dropout(drop)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.norm1(x)\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        x = self.drop(x)\n        return x\n\n\nclass EfficientAdditiveAttention(nn.Module):\n    \"\"\"\n    Efficient Additive Attention module for SwiftFormer.\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            in_dims: int = 512,\n            token_dim: int = 256,\n            num_heads: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.scale_factor = token_dim ** -0.5\n        self.to_query = nn.Linear(in_dims, token_dim * num_heads, **dd)\n        self.to_key = nn.Linear(in_dims, token_dim * num_heads, **dd)\n\n        self.w_g = nn.Parameter(torch.randn(token_dim * num_heads, 1, **dd))\n\n        self.proj = nn.Linear(token_dim * num_heads, token_dim * num_heads, **dd)\n        self.final = nn.Linear(token_dim * num_heads, token_dim, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, _, H, W = x.shape\n        x = x.flatten(2).permute(0, 2, 1)\n\n        query = F.normalize(self.to_query(x), dim=-1)\n        key = F.normalize(self.to_key(x), dim=-1)\n\n        attn = F.normalize(query @ self.w_g * self.scale_factor, dim=1)\n        attn = torch.sum(attn * query, dim=1, keepdim=True)\n\n        out = self.proj(attn * key) + query\n        out = self.final(out).permute(0, 2, 1).reshape(B, -1, H, W)\n        return out\n\n\nclass LocalRepresentation(nn.Module):\n    \"\"\"\n    Local Representation module for SwiftFormer that is implemented by 3*3 depth-wise and point-wise convolutions.\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            kernel_size: int = 3,\n            drop_path: float = 0.,\n            use_layer_scale: bool = True,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dwconv = nn.Conv2d(dim, dim, kernel_size, padding=kernel_size // 2, groups=dim, **dd)\n        self.norm = norm_layer(dim, **dd)\n        self.pwconv1 = nn.Conv2d(dim, dim, kernel_size=1, **dd)\n        self.act = act_layer()\n        self.pwconv2 = nn.Conv2d(dim, dim, kernel_size=1, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.layer_scale = LayerScale2d(dim, 1, **dd) if use_layer_scale else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        skip = x\n        x = self.dwconv(x)\n        x = self.norm(x)\n        x = self.pwconv1(x)\n        x = self.act(x)\n        x = self.pwconv2(x)\n        x = self.layer_scale(x)\n        x = skip + self.drop_path(x)\n        return x\n\n\nclass Block(nn.Module):\n    \"\"\"\n    SwiftFormer Encoder Block for SwiftFormer. It consists of :\n    (1) Local representation module, (2) EfficientAdditiveAttention, and (3) MLP block.\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            mlp_ratio: float = 4.,\n            drop_rate: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            use_layer_scale: bool = True,\n            layer_scale_init_value: float = 1e-5,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.local_representation = LocalRepresentation(\n            dim=dim,\n            use_layer_scale=use_layer_scale,\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.attn = EfficientAdditiveAttention(in_dims=dim, token_dim=dim, **dd)\n        self.linear = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            norm_layer=norm_layer,\n            drop=drop_rate,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        self.layer_scale_1 = LayerScale2d(dim, layer_scale_init_value, **dd) \\\n            if use_layer_scale else nn.Identity()\n        self.layer_scale_2 = LayerScale2d(dim, layer_scale_init_value, **dd) \\\n            if use_layer_scale else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.local_representation(x)\n        x = x + self.drop_path(self.layer_scale_1(self.attn(x)))\n        x = x + self.drop_path(self.layer_scale_2(self.linear(x)))\n        return x\n\n\nclass Stage(nn.Module):\n    \"\"\"\n    Implementation of each SwiftFormer stages. Here, SwiftFormerEncoder used as the last block in all stages, while ConvEncoder used in the rest of the blocks.\n    Input: tensor in shape [B, C, H, W]\n    Output: tensor in shape [B, C, H, W]\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            index: int,\n            layers: List[int],\n            mlp_ratio: float = 4.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            use_layer_scale: bool = True,\n            layer_scale_init_value: float = 1e-5,\n            downsample: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n        self.downsample = downsample if downsample is not None else nn.Identity()\n\n        blocks = []\n        for block_idx in range(layers[index]):\n            block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / (sum(layers) - 1)\n            if layers[index] - block_idx <= 1:\n                blocks.append(Block(\n                    dim,\n                    mlp_ratio=mlp_ratio,\n                    drop_rate=drop_rate,\n                    drop_path=block_dpr,\n                    act_layer=act_layer,\n                    norm_layer=norm_layer,\n                    use_layer_scale=use_layer_scale,\n                    layer_scale_init_value=layer_scale_init_value,\n                    **dd,\n                ))\n            else:\n                blocks.append(ConvEncoder(\n                    dim=dim,\n                    hidden_dim=int(mlp_ratio * dim),\n                    kernel_size=3,\n                    drop_path=block_dpr,\n                    act_layer=act_layer,\n                    norm_layer=norm_layer,\n                    use_layer_scale=use_layer_scale,\n                    **dd,\n                ))\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.downsample(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass SwiftFormer(nn.Module):\n    def __init__(\n            self,\n            layers: List[int] = [3, 3, 6, 4],\n            embed_dims: List[int] = [48, 56, 112, 220],\n            mlp_ratios: int = 4,\n            downsamples: List[bool] = [False, True, True, True],\n            act_layer: Type[nn.Module] = nn.GELU,\n            down_patch_size: int = 3,\n            down_stride: int = 2,\n            down_pad: int = 1,\n            num_classes: int = 1000,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            use_layer_scale: bool = True,\n            layer_scale_init_value: float = 1e-5,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            in_chans: int = 3,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert output_stride == 32\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.feature_info = []\n\n        self.stem = nn.Sequential(\n            nn.Conv2d(in_chans, embed_dims[0] // 2, 3, 2, 1, **dd),\n            nn.BatchNorm2d(embed_dims[0] // 2, **dd),\n            nn.ReLU(),\n            nn.Conv2d(embed_dims[0] // 2, embed_dims[0], 3, 2, 1, **dd),\n            nn.BatchNorm2d(embed_dims[0], **dd),\n            nn.ReLU(),\n        )\n        prev_dim = embed_dims[0]\n\n        stages = []\n        for i in range(len(layers)):\n            downsample = Embedding(\n                in_chans=prev_dim,\n                embed_dim=embed_dims[i],\n                patch_size=down_patch_size,\n                stride=down_stride,\n                padding=down_pad,\n                **dd,\n            ) if downsamples[i] else nn.Identity()\n            stage = Stage(\n                dim=embed_dims[i],\n                index=i,\n                layers=layers,\n                mlp_ratio=mlp_ratios,\n                act_layer=act_layer,\n                drop_rate=drop_rate,\n                drop_path_rate=drop_path_rate,\n                use_layer_scale=use_layer_scale,\n                layer_scale_init_value=layer_scale_init_value,\n                downsample=downsample,\n                **dd,\n            )\n            prev_dim = embed_dims[i]\n            stages.append(stage)\n            self.feature_info += [dict(num_chs=embed_dims[i], reduction=2**(i+2), module=f'stages.{i}')]\n        self.stages = nn.Sequential(*stages)\n\n        # Classifier head\n        self.num_features  = self.head_hidden_size = out_chs = embed_dims[-1]\n        self.norm = nn.BatchNorm2d(out_chs, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = Linear(out_chs, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        # assuming model is always distilled (valid for current checkpoints, will split def if that changes)\n        self.head_dist = Linear(out_chs, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        self.distilled_training = False  # must set this True to train w/ distillation token\n        self._initialize_weights()\n\n    def _initialize_weights(self):\n        for name, m in self.named_modules():\n            if isinstance(m, nn.Linear):\n                trunc_normal_(m.weight, std=.02)\n                if m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.Conv2d):\n                trunc_normal_(m.weight, std=.02)\n                if m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set:\n        return set()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        matcher = dict(\n            stem=r'^stem',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.blocks\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> Tuple[nn.Module, nn.Module]:\n        return self.head, self.head_dist\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        device, dtype = self.head.weight.device, self.head.weight.dtype if hasattr(self.head, 'weight') else (None, None)\n        self.head = Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n        self.head_dist = Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    @torch.jit.ignore\n    def set_distilled_training(self, enable: bool = True):\n        self.distilled_training = enable\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        last_idx = len(self.stages) - 1\n\n        # forward pass\n        x = self.stem(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            x = stage(x)\n            if feat_idx in take_indices:\n                if norm and feat_idx == last_idx:\n                    x_inter = self.norm(x)  # applying final norm last intermediate\n                else:\n                    x_inter = x\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        if feat_idx == last_idx:\n            x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.stem(x)\n        x = self.stages(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=(2, 3))\n        x = self.head_drop(x)\n        if pre_logits:\n            return x\n        x, x_dist = self.head(x), self.head_dist(x)\n        if self.distilled_training and self.training and not torch.jit.is_scripting():\n            # only return separate classification predictions when training in distilled mode\n            return x, x_dist\n        else:\n            # during standard train/finetune, inference average the classifier predictions\n            return (x + x_dist) / 2\n\n    def forward(self, x: torch.Tensor):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    state_dict = state_dict.get('model', state_dict)\n    if 'stem.0.weight' in state_dict:\n        return state_dict\n\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = k.replace('patch_embed.', 'stem.')\n        k = k.replace('dist_head.', 'head_dist.')\n        k = k.replace('attn.Proj.', 'attn.proj.')\n        k = k.replace('.layer_scale_1', '.layer_scale_1.gamma')\n        k = k.replace('.layer_scale_2', '.layer_scale_2.gamma')\n        k = re.sub(r'\\.layer_scale(?=$|\\.)', '.layer_scale.gamma', k)\n        m = re.match(r'^network\\.(\\d+)\\.(.*)', k)\n        if m:\n            n_idx, rest = int(m.group(1)), m.group(2)\n            stage_idx = n_idx // 2\n            if n_idx % 2 == 0:\n                k = f'stages.{stage_idx}.blocks.{rest}'\n            else:\n                k = f'stages.{stage_idx+1}.downsample.{rest}'\n\n        out_dict[k] = v\n    return out_dict\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, 'fixed_input_size': True,\n        'crop_pct': .95, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': ('head', 'head_dist'),\n        'license': 'apache-2.0',\n        'paper_ids': 'arXiv:2303.15446',\n        'paper_name': 'SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications',\n        'origin_url': 'https://github.com/Amshaker/SwiftFormer',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'swiftformer_xs.dist_in1k': _cfg(\n        hf_hub_id='timm/',\n    ),\n    'swiftformer_s.dist_in1k': _cfg(\n        hf_hub_id='timm/'\n    ),\n    'swiftformer_l1.dist_in1k': _cfg(\n        hf_hub_id='timm/'\n    ),\n    'swiftformer_l3.dist_in1k': _cfg(\n        hf_hub_id='timm/'\n    ),\n})\n\n\ndef _create_swiftformer(variant: str, pretrained: bool = False, **kwargs: Any) -> SwiftFormer:\n    model = build_model_with_cfg(\n        SwiftFormer, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(0, 1, 2, 3), flatten_sequential=True),\n        **kwargs,\n    )\n    return model\n\n\n@register_model\ndef swiftformer_xs(pretrained: bool = False, **kwargs: Any) -> SwiftFormer:\n    model_args = dict(layers=[3, 3, 6, 4], embed_dims=[48, 56, 112, 220])\n    return _create_swiftformer('swiftformer_xs', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swiftformer_s(pretrained: bool = False, **kwargs: Any) -> SwiftFormer:\n    model_args = dict(layers=[3, 3, 9, 6], embed_dims=[48, 64, 168, 224])\n    return _create_swiftformer('swiftformer_s', pretrained=pretrained, **dict(model_args, **kwargs))\n\n@register_model\ndef swiftformer_l1(pretrained: bool = False, **kwargs: Any) -> SwiftFormer:\n    model_args = dict(layers=[4, 3, 10, 5], embed_dims=[48, 96, 192, 384])\n    return _create_swiftformer('swiftformer_l1', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swiftformer_l3(pretrained: bool = False, **kwargs: Any) -> SwiftFormer:\n    model_args = dict(layers=[4, 4, 12, 6], embed_dims=[64, 128, 320, 512])\n    return _create_swiftformer('swiftformer_l3', pretrained=pretrained, **dict(model_args, **kwargs))"
  },
  {
    "path": "timm/models/swin_transformer.py",
    "content": "\"\"\" Swin Transformer\nA PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows`\n    - https://arxiv.org/pdf/2103.14030\n\nCode/weights from https://github.com/microsoft/Swin-Transformer, original copyright/license info below\n\nS3 (AutoFormerV2, https://arxiv.org/abs/2111.14725) Swin weights from\n    - https://github.com/microsoft/Cream/tree/main/AutoFormerV2\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n# --------------------------------------------------------\n# Swin Transformer\n# Copyright (c) 2021 Microsoft\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Ze Liu\n# --------------------------------------------------------\nimport logging\nimport math\nfrom typing import Any, Dict, Callable, List, Optional, Set, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import PatchEmbed, Mlp, DropPath, calculate_drop_path_rates, ClassifierHead, to_2tuple, to_ntuple, trunc_normal_, \\\n    use_fused_attn, resize_rel_pos_bias_table, resample_patch_embed, ndgrid\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint_seq, named_apply\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\nfrom .vision_transformer import get_init_weights_vit\n\n__all__ = ['SwinTransformer']  # model_registry will add each entrypoint fn to this\n\n_logger = logging.getLogger(__name__)\n\n_int_or_tuple_2_t = Union[int, Tuple[int, int]]\n\n\ndef window_partition(\n        x: torch.Tensor,\n        window_size: Tuple[int, int],\n) -> torch.Tensor:\n    \"\"\"Partition into non-overlapping windows.\n\n    Args:\n        x: Input tokens with shape [B, H, W, C].\n        window_size: Window size.\n\n    Returns:\n        Windows after partition with shape [B * num_windows, window_size, window_size, C].\n    \"\"\"\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(windows: torch.Tensor, window_size: Tuple[int, int], H: int, W: int) -> torch.Tensor:\n    \"\"\"Reverse window partition.\n\n    Args:\n        windows: Windows with shape (num_windows*B, window_size, window_size, C).\n        window_size: Window size.\n        H: Height of image.\n        W: Width of image.\n\n    Returns:\n        Tensor with shape (B, H, W, C).\n    \"\"\"\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\ndef get_relative_position_index(win_h: int, win_w: int, device=None) -> torch.Tensor:\n    \"\"\"Get pair-wise relative position index for each token inside the window.\n\n    Args:\n        win_h: Window height.\n        win_w: Window width.\n\n    Returns:\n        Relative position index tensor.\n    \"\"\"\n    # get pair-wise relative position index for each token inside the window\n    coords = torch.stack(ndgrid(\n        torch.arange(win_h, device=device, dtype=torch.long),\n        torch.arange(win_w, device=device, dtype=torch.long),\n    ))  # 2, Wh, Ww\n    coords_flatten = torch.flatten(coords, 1)  # 2, Wh*Ww\n    relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :]  # 2, Wh*Ww, Wh*Ww\n    relative_coords = relative_coords.permute(1, 2, 0).contiguous()  # Wh*Ww, Wh*Ww, 2\n    relative_coords[:, :, 0] += win_h - 1  # shift to start from 0\n    relative_coords[:, :, 1] += win_w - 1\n    relative_coords[:, :, 0] *= 2 * win_w - 1\n    return relative_coords.sum(-1)  # Wh*Ww, Wh*Ww\n\n\nclass WindowAttention(nn.Module):\n    \"\"\"Window based multi-head self attention (W-MSA) module with relative position bias.\n\n    Supports both shifted and non-shifted windows.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            head_dim: Optional[int] = None,\n            window_size: _int_or_tuple_2_t = 7,\n            qkv_bias: bool = True,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            num_heads: Number of attention heads.\n            head_dim: Number of channels per head (dim // num_heads if not set)\n            window_size: The height and width of the window.\n            qkv_bias:  If True, add a learnable bias to query, key, value.\n            attn_drop: Dropout ratio of attention weight.\n            proj_drop: Dropout ratio of output.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.window_size = to_2tuple(window_size)  # Wh, Ww\n        win_h, win_w = self.window_size\n        self.window_area = win_h * win_w\n        self.num_heads = num_heads\n        head_dim = head_dim or dim // num_heads\n        attn_dim = head_dim * num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn(experimental=True)  # NOTE not tested for prime-time yet\n\n        # define a parameter table of relative position bias, shape: 2*Wh-1 * 2*Ww-1, nH\n        self.relative_position_bias_table = nn.Parameter(\n            torch.empty((2 * win_h - 1) * (2 * win_w - 1), num_heads, **dd))\n\n        # register empty buffer for relative position index\n        self.register_buffer(\n            \"relative_position_index\",\n            torch.empty(win_h * win_w, win_h * win_w, device=device, dtype=torch.long),\n            persistent=False,\n        )\n\n        self.qkv = nn.Linear(dim, attn_dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(attn_dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.softmax = nn.Softmax(dim=-1)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        trunc_normal_(self.relative_position_bias_table, std=.02)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        win_h, win_w = self.window_size\n        self.relative_position_index.copy_(\n            get_relative_position_index(win_h, win_w, device=self.relative_position_index.device)\n        )\n\n    def set_window_size(self, window_size: Tuple[int, int]) -> None:\n        \"\"\"Update window size & interpolate position embeddings\n        Args:\n            window_size (int): New window size\n        \"\"\"\n        window_size = to_2tuple(window_size)\n        if window_size == self.window_size:\n            return\n        self.window_size = window_size\n        win_h, win_w = self.window_size\n        self.window_area = win_h * win_w\n        with torch.no_grad():\n            new_bias_shape = (2 * win_h - 1) * (2 * win_w - 1), self.num_heads\n            self.relative_position_bias_table = nn.Parameter(\n                resize_rel_pos_bias_table(\n                    self.relative_position_bias_table,\n                    new_window_size=self.window_size,\n                    new_bias_shape=new_bias_shape,\n            ))\n            self.register_buffer(\n                \"relative_position_index\",\n                get_relative_position_index(win_h, win_w, device=self.relative_position_bias_table.device),\n                persistent=False,\n            )\n\n    def _get_rel_pos_bias(self) -> torch.Tensor:\n        relative_position_bias = self.relative_position_bias_table[\n            self.relative_position_index.view(-1)].view(self.window_area, self.window_area, -1)  # Wh*Ww,Wh*Ww,nH\n        relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous()  # nH, Wh*Ww, Wh*Ww\n        return relative_position_bias.unsqueeze(0)\n\n    def forward(self, x: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input features with shape of (num_windows*B, N, C).\n            mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None.\n\n        Returns:\n            Output features with shape of (num_windows*B, N, C).\n        \"\"\"\n        B_, N, C = x.shape\n        qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        if self.fused_attn:\n            attn_mask = self._get_rel_pos_bias()\n            if mask is not None:\n                num_win = mask.shape[0]\n                mask = mask.view(1, num_win, 1, N, N).expand(B_ // num_win, -1, self.num_heads, -1, -1)\n                attn_mask = attn_mask + mask.reshape(-1, self.num_heads, N, N)\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn + self._get_rel_pos_bias()\n            if mask is not None:\n                num_win = mask.shape[0]\n                attn = attn.view(-1, num_win, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)\n                attn = attn.view(-1, self.num_heads, N, N)\n            attn = self.softmax(attn)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B_, N, -1)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass SwinTransformerBlock(nn.Module):\n    \"\"\"Swin Transformer Block.\n\n    A transformer block with window-based self-attention and shifted windows.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            input_resolution: _int_or_tuple_2_t,\n            num_heads: int = 4,\n            head_dim: Optional[int] = None,\n            window_size: _int_or_tuple_2_t = 7,\n            shift_size: int = 0,\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            input_resolution: Input resolution.\n            window_size: Window size.\n            num_heads: Number of attention heads.\n            head_dim: Enforce the number of channels per head\n            shift_size: Shift size for SW-MSA.\n            always_partition: Always partition into full windows and shift\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            proj_drop: Dropout rate.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth rate.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.input_resolution = input_resolution\n        self.target_shift_size = to_2tuple(shift_size)  # store for later resize\n        self.always_partition = always_partition\n        self.dynamic_mask = dynamic_mask\n        self.window_size, self.shift_size = self._calc_window_shift(window_size, shift_size)\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.mlp_ratio = mlp_ratio\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = WindowAttention(\n            dim,\n            num_heads=num_heads,\n            head_dim=head_dim,\n            window_size=self.window_size,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        # Register buffer as None initially, will be computed in reset_parameters if needed\n        self.register_buffer(\"attn_mask\", None, persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if not self.dynamic_mask:\n            device = self.norm1.weight.device\n            dtype = self.norm1.weight.dtype\n            attn_mask = self.get_attn_mask(device=device, dtype=dtype)\n            self.register_buffer(\"attn_mask\", attn_mask, persistent=False)\n\n    def get_attn_mask(\n            self,\n            x: Optional[torch.Tensor] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n    ) -> Optional[torch.Tensor]:\n        if any(self.shift_size):\n            # calculate attention mask for SW-MSA\n            if x is not None:\n                H, W = x.shape[1], x.shape[2]\n                device = x.device\n                dtype = x.dtype\n            else:\n                H, W = self.input_resolution\n                device = device\n                dtype = dtype\n            H = math.ceil(H / self.window_size[0]) * self.window_size[0]\n            W = math.ceil(W / self.window_size[1]) * self.window_size[1]\n            img_mask = torch.zeros((1, H, W, 1), dtype=dtype, device=device)  # 1 H W 1\n            cnt = 0\n            for h in (\n                    (0, -self.window_size[0]),\n                    (-self.window_size[0], -self.shift_size[0]),\n                    (-self.shift_size[0], None),\n            ):\n                for w in (\n                        (0, -self.window_size[1]),\n                        (-self.window_size[1], -self.shift_size[1]),\n                        (-self.shift_size[1], None),\n                ):\n                    img_mask[:, h[0]:h[1], w[0]:w[1], :] = cnt\n                    cnt += 1\n            mask_windows = window_partition(img_mask, self.window_size)  # nW, window_size, window_size, 1\n            mask_windows = mask_windows.view(-1, self.window_area)\n            attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)\n            attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0))\n        else:\n            attn_mask = None\n        return attn_mask\n\n    def _calc_window_shift(\n            self,\n            target_window_size: Union[int, Tuple[int, int]],\n            target_shift_size: Optional[Union[int, Tuple[int, int]]] = None,\n    ) -> Tuple[Tuple[int, int], Tuple[int, int]]:\n        target_window_size = to_2tuple(target_window_size)\n        if target_shift_size is None:\n            # if passed value is None, recalculate from default window_size // 2 if it was previously non-zero\n            target_shift_size = self.target_shift_size\n            if any(target_shift_size):\n                target_shift_size = (target_window_size[0] // 2, target_window_size[1] // 2)\n        else:\n            target_shift_size = to_2tuple(target_shift_size)\n\n        if self.always_partition:\n            return target_window_size, target_shift_size\n\n        window_size = [r if r <= w else w for r, w in zip(self.input_resolution, target_window_size)]\n        shift_size = [0 if r <= w else s for r, w, s in zip(self.input_resolution, window_size, target_shift_size)]\n        return tuple(window_size), tuple(shift_size)\n\n    def set_input_size(\n            self,\n            feat_size: Tuple[int, int],\n            window_size: Tuple[int, int],\n            always_partition: Optional[bool] = None,\n    ):\n        \"\"\"\n        Args:\n            feat_size: New input resolution\n            window_size: New window size\n            always_partition: Change always_partition attribute if not None\n        \"\"\"\n        self.input_resolution = feat_size\n        if always_partition is not None:\n            self.always_partition = always_partition\n        self.window_size, self.shift_size = self._calc_window_shift(window_size)\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.attn.set_window_size(self.window_size)\n        device = self.attn_mask.device if self.attn_mask is not None else None\n        dtype = self.attn_mask.dtype if self.attn_mask is not None else None\n        self.register_buffer(\n            \"attn_mask\",\n            None if self.dynamic_mask else self.get_attn_mask(device=device, dtype=dtype),\n            persistent=False,\n        )\n\n    def _attn(self, x):\n        B, H, W, C = x.shape\n\n        # cyclic shift\n        has_shift = any(self.shift_size)\n        if has_shift:\n            shifted_x = torch.roll(x, shifts=(-self.shift_size[0], -self.shift_size[1]), dims=(1, 2))\n        else:\n            shifted_x = x\n\n        # pad for resolution not divisible by window size\n        pad_h = (self.window_size[0] - H % self.window_size[0]) % self.window_size[0]\n        pad_w = (self.window_size[1] - W % self.window_size[1]) % self.window_size[1]\n        shifted_x = torch.nn.functional.pad(shifted_x, (0, 0, 0, pad_w, 0, pad_h))\n        _, Hp, Wp, _ = shifted_x.shape\n\n        # partition windows\n        x_windows = window_partition(shifted_x, self.window_size)  # nW*B, window_size, window_size, C\n        x_windows = x_windows.view(-1, self.window_area, C)  # nW*B, window_size*window_size, C\n\n        # W-MSA/SW-MSA\n        if getattr(self, 'dynamic_mask', False):\n            attn_mask = self.get_attn_mask(shifted_x)\n        else:\n            attn_mask = self.attn_mask\n        attn_windows = self.attn(x_windows, mask=attn_mask)  # nW*B, window_size*window_size, C\n\n        # merge windows\n        attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C)\n        shifted_x = window_reverse(attn_windows, self.window_size, Hp, Wp)  # B H' W' C\n        shifted_x = shifted_x[:, :H, :W, :].contiguous()\n\n        # reverse cyclic shift\n        if has_shift:\n            x = torch.roll(shifted_x, shifts=self.shift_size, dims=(1, 2))\n        else:\n            x = shifted_x\n        return x\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input features with shape (B, H, W, C).\n\n        Returns:\n            Output features with shape (B, H, W, C).\n        \"\"\"\n        B, H, W, C = x.shape\n        x = x + self.drop_path1(self._attn(self.norm1(x)))\n        x = x.reshape(B, -1, C)\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        x = x.reshape(B, H, W, C)\n        return x\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass PatchMerging(nn.Module):\n    \"\"\"Patch Merging Layer.\n\n    Downsample features by merging 2x2 neighboring patches.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: Optional[int] = None,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            out_dim: Number of output channels (or 2 * dim if None)\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.out_dim = out_dim or 2 * dim\n        self.norm = norm_layer(4 * dim, **dd)\n        self.reduction = nn.Linear(4 * dim, self.out_dim, bias=False, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input features with shape (B, H, W, C).\n\n        Returns:\n            Output features with shape (B, H//2, W//2, out_dim).\n        \"\"\"\n        B, H, W, C = x.shape\n\n        pad_values = (0, 0, 0, W % 2, 0, H % 2)\n        x = nn.functional.pad(x, pad_values)\n        _, H, W, _ = x.shape\n\n        x = x.reshape(B, H // 2, 2, W // 2, 2, C).permute(0, 1, 3, 4, 2, 5).flatten(3)\n        x = self.norm(x)\n        x = self.reduction(x)\n        return x\n\n\nclass SwinTransformerStage(nn.Module):\n    \"\"\"A basic Swin Transformer layer for one stage.\n\n    Contains multiple Swin Transformer blocks and optional downsampling.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            input_resolution: Tuple[int, int],\n            depth: int,\n            downsample: bool = True,\n            num_heads: int = 4,\n            head_dim: Optional[int] = None,\n            window_size: _int_or_tuple_2_t = 7,\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: Union[List[float], float] = 0.,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            out_dim: Number of output channels.\n            input_resolution: Input resolution.\n            depth: Number of blocks.\n            downsample: Downsample layer at the end of the layer.\n            num_heads: Number of attention heads.\n            head_dim: Channels per head (dim // num_heads if not set)\n            window_size: Local window size.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            proj_drop: Projection dropout rate.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth rate.\n            norm_layer: Normalization layer.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.input_resolution = input_resolution\n        self.output_resolution = tuple(i // 2 for i in input_resolution) if downsample else input_resolution\n        self.depth = depth\n        self.grad_checkpointing = False\n        window_size = to_2tuple(window_size)\n        shift_size = tuple([w // 2 for w in window_size])\n\n        # patch merging layer\n        if downsample:\n            self.downsample = PatchMerging(\n                dim=dim,\n                out_dim=out_dim,\n                norm_layer=norm_layer,\n                **dd,\n            )\n        else:\n            assert dim == out_dim\n            self.downsample = nn.Identity()\n\n        # build blocks\n        self.blocks = nn.Sequential(*[\n            SwinTransformerBlock(\n                dim=out_dim,\n                input_resolution=self.output_resolution,\n                num_heads=num_heads,\n                head_dim=head_dim,\n                window_size=window_size,\n                shift_size=0 if (i % 2 == 0) else shift_size,\n                always_partition=always_partition,\n                dynamic_mask=dynamic_mask,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop,\n                attn_drop=attn_drop,\n                drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            for i in range(depth)])\n\n    def set_input_size(\n            self,\n            feat_size: Tuple[int, int],\n            window_size: int,\n            always_partition: Optional[bool] = None,\n    ):\n        \"\"\" Updates the resolution, window size and so the pair-wise relative positions.\n\n        Args:\n            feat_size: New input (feature) resolution\n            window_size: New window size\n            always_partition: Always partition / shift the window\n        \"\"\"\n        self.input_resolution = feat_size\n        if isinstance(self.downsample, nn.Identity):\n            self.output_resolution = feat_size\n        else:\n            self.output_resolution = tuple(i // 2 for i in feat_size)\n        for block in self.blocks:\n            block.set_input_size(\n                feat_size=self.output_resolution,\n                window_size=window_size,\n                always_partition=always_partition,\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input features.\n\n        Returns:\n            Output features.\n        \"\"\"\n        x = self.downsample(x)\n\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass SwinTransformer(nn.Module):\n    \"\"\"Swin Transformer.\n\n    A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows`  -\n          https://arxiv.org/pdf/2103.14030\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: _int_or_tuple_2_t = 224,\n            patch_size: int = 4,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 96,\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            num_heads: Tuple[int, ...] = (3, 6, 12, 24),\n            head_dim: Optional[int] = None,\n            window_size: _int_or_tuple_2_t = 7,\n            always_partition: bool = False,\n            strict_img_size: bool = True,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.1,\n            embed_layer: Type[nn.Module] = PatchEmbed,\n            norm_layer: Union[str, Type[nn.Module]] = nn.LayerNorm,\n            weight_init: str = '',\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size.\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            embed_dim: Patch embedding dimension.\n            depths: Depth of each Swin Transformer layer.\n            num_heads: Number of attention heads in different layers.\n            head_dim: Dimension of self-attention heads.\n            window_size: Window size.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            drop_rate: Dropout rate.\n            attn_drop_rate (float): Attention dropout rate.\n            drop_path_rate (float): Stochastic depth rate.\n            embed_layer: Patch embedding layer.\n            norm_layer (nn.Module): Normalization layer.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg')\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.output_fmt = 'NHWC'\n\n        self.num_layers = len(depths)\n        self.embed_dim = embed_dim\n        self.num_features = self.head_hidden_size = int(embed_dim * 2 ** (self.num_layers - 1))\n        self.feature_info = []\n\n        if not isinstance(embed_dim, (tuple, list)):\n            embed_dim = [int(embed_dim * 2 ** i) for i in range(self.num_layers)]\n\n        # split image into non-overlapping patches\n        self.patch_embed = embed_layer(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim[0],\n            norm_layer=norm_layer,\n            strict_img_size=strict_img_size,\n            output_fmt='NHWC',\n            **dd,\n        )\n        patch_grid = self.patch_embed.grid_size\n\n        # build layers\n        head_dim = to_ntuple(self.num_layers)(head_dim)\n        if not isinstance(window_size, (list, tuple)):\n            window_size = to_ntuple(self.num_layers)(window_size)\n        elif len(window_size) == 2:\n            window_size = (window_size,) * self.num_layers\n        assert len(window_size) == self.num_layers\n        mlp_ratio = to_ntuple(self.num_layers)(mlp_ratio)\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        layers = []\n        in_dim = embed_dim[0]\n        scale = 1\n        for i in range(self.num_layers):\n            out_dim = embed_dim[i]\n            layers += [SwinTransformerStage(\n                dim=in_dim,\n                out_dim=out_dim,\n                input_resolution=(\n                    patch_grid[0] // scale,\n                    patch_grid[1] // scale\n                ),\n                depth=depths[i],\n                downsample=i > 0,\n                num_heads=num_heads[i],\n                head_dim=head_dim[i],\n                window_size=window_size[i],\n                always_partition=always_partition,\n                dynamic_mask=not strict_img_size,\n                mlp_ratio=mlp_ratio[i],\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                **dd,\n            )]\n            in_dim = out_dim\n            if i > 0:\n                scale *= 2\n            self.feature_info += [dict(num_chs=out_dim, reduction=patch_size * scale, module=f'layers.{i}')]\n        self.layers = nn.Sequential(*layers)\n\n        self.norm = norm_layer(self.num_features, **dd)\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            input_fmt=self.output_fmt,\n            **dd,\n        )\n\n        self.weight_init_mode = 'reset' if weight_init == 'skip' else weight_init\n        # TODO: skip init when on meta device when safe to do so\n        if weight_init != 'skip':\n            self.init_weights(needs_reset=False)\n\n    @torch.jit.ignore\n    def init_weights(self, mode: str = '', needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            mode: Weight initialization mode ('jax', 'jax_nlhb', 'moco', or '').\n            needs_reset: If True, call reset_parameters() on modules that have it.\n                Set to False when modules have already self-initialized in __init__.\n        \"\"\"\n        mode = mode or self.weight_init_mode\n        assert mode in ('jax', 'jax_nlhb', 'moco', 'reset', '')\n        head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0.\n        named_apply(get_init_weights_vit(mode, head_bias=head_bias, needs_reset=needs_reset), self)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        \"\"\"Parameters that should not use weight decay.\"\"\"\n        nwd = set()\n        for n, _ in self.named_parameters():\n            if 'relative_position_bias_table' in n:\n                nwd.add(n)\n        return nwd\n\n    def set_input_size(\n            self,\n            img_size: Optional[Tuple[int, int]] = None,\n            patch_size: Optional[Tuple[int, int]] = None,\n            window_size: Optional[Tuple[int, int]] = None,\n            window_ratio: int = 8,\n            always_partition: Optional[bool] = None,\n    ) -> None:\n        \"\"\"Update the image resolution and window size.\n\n        Args:\n            img_size: New input resolution, if None current resolution is used.\n            patch_size: New patch size, if None use current patch size.\n            window_size: New window size, if None based on new_img_size // window_div.\n            window_ratio: Divisor for calculating window size from grid size.\n            always_partition: Always partition into windows and shift (even if window size < feat size).\n        \"\"\"\n        if img_size is not None or patch_size is not None:\n            self.patch_embed.set_input_size(img_size=img_size, patch_size=patch_size)\n            patch_grid = self.patch_embed.grid_size\n\n        if window_size is None:\n            window_size = tuple([pg // window_ratio for pg in patch_grid])\n\n        for index, stage in enumerate(self.layers):\n            stage_scale = 2 ** max(index - 1, 0)\n            stage.set_input_size(\n                feat_size=(patch_grid[0] // stage_scale, patch_grid[1] // stage_scale),\n                window_size=window_size,\n                always_partition=always_partition,\n            )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group parameters for optimization.\"\"\"\n        return dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=r'^layers\\.(\\d+)' if coarse else [\n                (r'^layers\\.(\\d+).downsample', (0,)),\n                (r'^layers\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\"\"\"\n        for l in self.layers:\n            l.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\"Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor.\n            indices: Take last n blocks if int, all if None, select matching indices if sequence.\n            norm: Apply norm layer to compatible intermediates.\n            stop_early: Stop iterating over blocks when last desired intermediate hit.\n            output_fmt: Shape of intermediate feature outputs.\n            intermediates_only: Only return intermediate features.\n\n        Returns:\n            List of intermediate features or tuple of (final features, intermediates).\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n\n        num_stages = len(self.layers)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.layers\n        else:\n            stages = self.layers[:max_index + 1]\n        for i, stage in enumerate(stages):\n            x = stage(x)\n            if i in take_indices:\n                if norm and i == num_stages - 1:\n                    x_inter = self.norm(x)  # applying final norm last intermediate\n                else:\n                    x_inter = x\n                x_inter = x_inter.permute(0, 3, 1, 2).contiguous()\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n        self.layers = self.layers[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\"\"\"\n        x = self.patch_embed(x)\n        x = self.layers(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict: dict, model: nn.Module) -> Dict[str, torch.Tensor]:\n    \"\"\"Convert patch embedding weight from manual patchify + linear proj to conv.\n\n    Args:\n        state_dict: State dictionary from checkpoint.\n        model: Model instance.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    old_weights = True\n    if 'head.fc.weight' in state_dict:\n        old_weights = False\n    import re\n    out_dict = {}\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    for k, v in state_dict.items():\n        if any([n in k for n in ('relative_position_index', 'attn_mask')]):\n            continue  # skip buffers that should not be persistent\n\n        if 'patch_embed.proj.weight' in k:\n            _, _, H, W = model.patch_embed.proj.weight.shape\n            if v.shape[-2] != H or v.shape[-1] != W:\n                v = resample_patch_embed(\n                    v,\n                    (H, W),\n                    interpolation='bicubic',\n                    antialias=True,\n                    verbose=True,\n                )\n\n        if k.endswith('relative_position_bias_table'):\n            m = model.get_submodule(k[:-29])\n            if v.shape != m.relative_position_bias_table.shape or m.window_size[0] != m.window_size[1]:\n                v = resize_rel_pos_bias_table(\n                    v,\n                    new_window_size=m.window_size,\n                    new_bias_shape=m.relative_position_bias_table.shape,\n                )\n\n        if old_weights:\n            k = re.sub(r'layers.(\\d+).downsample', lambda x: f'layers.{int(x.group(1)) + 1}.downsample', k)\n            k = k.replace('head.', 'head.fc.')\n\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_swin_transformer(variant: str, pretrained: bool = False, **kwargs) -> SwinTransformer:\n    \"\"\"Create a Swin Transformer model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        SwinTransformer model instance.\n    \"\"\"\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        SwinTransformer, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n\n    return model\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration for Swin Transformer models.\"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        'license': 'mit', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'swin_small_patch4_window7_224.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.8/swin_small_patch4_window7_224_22kto1k_finetune.pth', ),\n    'swin_base_patch4_window7_224.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window7_224_22kto1k.pth',),\n    'swin_base_patch4_window12_384.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384_22kto1k.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n    'swin_large_patch4_window7_224.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window7_224_22kto1k.pth',),\n    'swin_large_patch4_window12_384.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window12_384_22kto1k.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n\n    'swin_tiny_patch4_window7_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth',),\n    'swin_small_patch4_window7_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_small_patch4_window7_224.pth',),\n    'swin_base_patch4_window7_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window7_224.pth',),\n    'swin_base_patch4_window12_384.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0),\n\n    # tiny 22k pretrain is worse than 1k, so moved after (untagged priority is based on order)\n    'swin_tiny_patch4_window7_224.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.8/swin_tiny_patch4_window7_224_22kto1k_finetune.pth',),\n\n    'swin_tiny_patch4_window7_224.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.8/swin_tiny_patch4_window7_224_22k.pth',\n        num_classes=21841),\n    'swin_small_patch4_window7_224.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.8/swin_small_patch4_window7_224_22k.pth',\n        num_classes=21841),\n    'swin_base_patch4_window7_224.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window7_224_22k.pth',\n        num_classes=21841),\n    'swin_base_patch4_window12_384.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384_22k.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21841),\n    'swin_large_patch4_window7_224.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window7_224_22k.pth',\n        num_classes=21841),\n    'swin_large_patch4_window12_384.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window12_384_22k.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, num_classes=21841),\n\n    'swin_s3_tiny_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/s3_t-1d53f6a8.pth'),\n    'swin_s3_small_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/s3_s-3bb4c69d.pth'),\n    'swin_s3_base_224.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/s3_b-a1e95db4.pth'),\n})\n\n\n@register_model\ndef swin_tiny_patch4_window7_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-T @ 224x224, trained ImageNet-1k\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=7, embed_dim=96, depths=(2, 2, 6, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer(\n        'swin_tiny_patch4_window7_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_small_patch4_window7_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-S @ 224x224\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=7, embed_dim=96, depths=(2, 2, 18, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer(\n        'swin_small_patch4_window7_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_base_patch4_window7_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-B @ 224x224\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=7, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))\n    return _create_swin_transformer(\n        'swin_base_patch4_window7_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_base_patch4_window12_384(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-B @ 384x384\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=12, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))\n    return _create_swin_transformer(\n        'swin_base_patch4_window12_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_large_patch4_window7_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-L @ 224x224\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=7, embed_dim=192, depths=(2, 2, 18, 2), num_heads=(6, 12, 24, 48))\n    return _create_swin_transformer(\n        'swin_large_patch4_window7_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_large_patch4_window12_384(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-L @ 384x384\n    \"\"\"\n    model_args = dict(patch_size=4, window_size=12, embed_dim=192, depths=(2, 2, 18, 2), num_heads=(6, 12, 24, 48))\n    return _create_swin_transformer(\n        'swin_large_patch4_window12_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_s3_tiny_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-S3-T @ 224x224, https://arxiv.org/abs/2111.14725\n    \"\"\"\n    model_args = dict(\n        patch_size=4, window_size=(7, 7, 14, 7), embed_dim=96, depths=(2, 2, 6, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer('swin_s3_tiny_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_s3_small_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-S3-S @ 224x224, https://arxiv.org/abs/2111.14725\n    \"\"\"\n    model_args = dict(\n        patch_size=4, window_size=(14, 14, 14, 7), embed_dim=96, depths=(2, 2, 18, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer('swin_s3_small_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swin_s3_base_224(pretrained=False, **kwargs) -> SwinTransformer:\n    \"\"\" Swin-S3-B @ 224x224, https://arxiv.org/abs/2111.14725\n    \"\"\"\n    model_args = dict(\n        patch_size=4, window_size=(7, 7, 14, 7), embed_dim=96, depths=(2, 2, 30, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer('swin_s3_base_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\nregister_model_deprecations(__name__, {\n    'swin_base_patch4_window7_224_in22k': 'swin_base_patch4_window7_224.ms_in22k',\n    'swin_base_patch4_window12_384_in22k': 'swin_base_patch4_window12_384.ms_in22k',\n    'swin_large_patch4_window7_224_in22k': 'swin_large_patch4_window7_224.ms_in22k',\n    'swin_large_patch4_window12_384_in22k': 'swin_large_patch4_window12_384.ms_in22k',\n})\n"
  },
  {
    "path": "timm/models/swin_transformer_v2.py",
    "content": "\"\"\" Swin Transformer V2\nA PyTorch impl of : `Swin Transformer V2: Scaling Up Capacity and Resolution`\n    - https://arxiv.org/abs/2111.09883\n\nCode/weights from https://github.com/microsoft/Swin-Transformer, original copyright/license info below\n\nModifications and additions for timm hacked together by / Copyright 2022, Ross Wightman\n\"\"\"\n# --------------------------------------------------------\n# Swin Transformer V2\n# Copyright (c) 2022 Microsoft\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Ze Liu\n# --------------------------------------------------------\nimport math\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import PatchEmbed, Mlp, DropPath, calculate_drop_path_rates, to_2tuple, trunc_normal_, ClassifierHead,\\\n    resample_patch_embed, ndgrid, get_act_layer, LayerType\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['SwinTransformerV2']  # model_registry will add each entrypoint fn to this\n\n_int_or_tuple_2_t = Union[int, Tuple[int, int]]\n\n\ndef window_partition(\n        x: torch.Tensor,\n        window_size: Tuple[int, int],\n) -> torch.Tensor:\n    \"\"\"Partition into non-overlapping windows.\n\n    Args:\n        x: Input tensor of shape (B, H, W, C).\n        window_size: Window size (height, width).\n\n    Returns:\n        Windows tensor of shape (num_windows*B, window_size[0], window_size[1], C).\n    \"\"\"\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(\n        windows: torch.Tensor,\n        window_size: Tuple[int, int],\n        img_size: Tuple[int, int],\n) -> torch.Tensor:\n    \"\"\"Merge windows back to feature map.\n\n    Args:\n        windows: Windows tensor of shape (num_windows * B, window_size[0], window_size[1], C).\n        window_size: Window size (height, width).\n        img_size: Image size (height, width).\n\n    Returns:\n        Feature map tensor of shape (B, H, W, C).\n    \"\"\"\n    H, W = img_size\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\nclass WindowAttention(nn.Module):\n    \"\"\"Window based multi-head self attention (W-MSA) module with relative position bias.\n\n    Supports both shifted and non-shifted window attention with continuous relative\n    position bias and cosine attention.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            window_size: Tuple[int, int],\n            num_heads: int,\n            qkv_bias: bool = True,\n            qkv_bias_separate: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            pretrained_window_size: Tuple[int, int] = (0, 0),\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize window attention module.\n\n        Args:\n            dim: Number of input channels.\n            window_size: The height and width of the window.\n            num_heads: Number of attention heads.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            qkv_bias_separate: If True, use separate bias for q, k, v projections.\n            attn_drop: Dropout ratio of attention weight.\n            proj_drop: Dropout ratio of output.\n            pretrained_window_size: The height and width of the window in pre-training.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.window_size = window_size  # Wh, Ww\n        self.pretrained_window_size = to_2tuple(pretrained_window_size)\n        self.num_heads = num_heads\n        self.qkv_bias_separate = qkv_bias_separate\n\n        self.logit_scale = nn.Parameter(torch.empty((num_heads, 1, 1), **dd))\n\n        # mlp to generate continuous relative position bias\n        self.cpb_mlp = nn.Sequential(\n            nn.Linear(2, 512, bias=True, **dd),\n            nn.ReLU(inplace=True),\n            nn.Linear(512, num_heads, bias=False, **dd)\n        )\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=False, **dd)\n        if qkv_bias:\n            self.q_bias = nn.Parameter(torch.empty(dim, **dd))\n            self.register_buffer('k_bias', torch.empty(dim, **dd), persistent=False)\n            self.v_bias = nn.Parameter(torch.empty(dim, **dd))\n        else:\n            self.q_bias = None\n            self.k_bias = None\n            self.v_bias = None\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.softmax = nn.Softmax(dim=-1)\n\n        # Register empty buffers with correct shapes\n        win_h, win_w = self.window_size\n        self.register_buffer(\n            \"relative_coords_table\",\n            torch.empty(1, 2 * win_h - 1, 2 * win_w - 1, 2, **dd),\n            persistent=False,\n        )\n        self.register_buffer(\n            \"relative_position_index\",\n            torch.empty(win_h * win_w, win_h * win_w, device=device, dtype=torch.long),\n            persistent=False,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.constant_(self.logit_scale, math.log(10))\n        if self.q_bias is not None:\n            nn.init.zeros_(self.q_bias)\n            nn.init.zeros_(self.v_bias)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if self.k_bias is not None:\n            self.k_bias.zero_()\n        relative_coords_table, relative_position_index = self._make_pair_wise_relative_positions(\n            device=self.proj.weight.device, dtype=self.proj.weight.dtype\n        )\n        self.relative_coords_table.copy_(relative_coords_table)\n        self.relative_position_index.copy_(relative_position_index)\n\n    def _make_pair_wise_relative_positions(\n            self,\n            device=None,\n            dtype=None,\n    ) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Compute pair-wise relative position index and coordinates table.\n\n        Returns:\n            Tuple of (relative_coords_table, relative_position_index)\n        \"\"\"\n        # get relative_coords_table\n        relative_coords_h = torch.arange(\n            -(self.window_size[0] - 1), self.window_size[0], device=device, dtype=torch.float32)\n        relative_coords_w = torch.arange(\n            -(self.window_size[1] - 1), self.window_size[1], device=device, dtype=torch.float32)\n        relative_coords_table = torch.stack(ndgrid(relative_coords_h, relative_coords_w))\n        relative_coords_table = relative_coords_table.permute(1, 2, 0).contiguous().unsqueeze(0)  # 1, 2*Wh-1, 2*Ww-1, 2\n        if self.pretrained_window_size[0] > 0:\n            relative_coords_table[:, :, :, 0] /= (self.pretrained_window_size[0] - 1)\n            relative_coords_table[:, :, :, 1] /= (self.pretrained_window_size[1] - 1)\n        else:\n            relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1)\n            relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1)\n        relative_coords_table *= 8  # normalize to -8, 8\n        relative_coords_table = torch.sign(relative_coords_table) * torch.log2(\n            torch.abs(relative_coords_table) + 1.0) / math.log2(8)\n        relative_coords_table = relative_coords_table.to(dtype=dtype)\n\n        # get pair-wise relative position index for each token inside the window\n        coords_h = torch.arange(self.window_size[0], device=device, dtype=torch.long)\n        coords_w = torch.arange(self.window_size[1], device=device, dtype=torch.long)\n        coords = torch.stack(ndgrid(coords_h, coords_w))  # 2, Wh, Ww\n        coords_flatten = torch.flatten(coords, 1)  # 2, Wh*Ww\n        relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :]  # 2, Wh*Ww, Wh*Ww\n        relative_coords = relative_coords.permute(1, 2, 0).contiguous()  # Wh*Ww, Wh*Ww, 2\n        relative_coords[:, :, 0] += self.window_size[0] - 1  # shift to start from 0\n        relative_coords[:, :, 1] += self.window_size[1] - 1\n        relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1\n        relative_position_index = relative_coords.sum(-1)  # Wh*Ww, Wh*Ww\n\n        return relative_coords_table, relative_position_index\n\n    def set_window_size(self, window_size: Tuple[int, int]) -> None:\n        \"\"\"Update window size and regenerate relative position tables.\n\n        Args:\n            window_size: New window size (height, width).\n        \"\"\"\n        window_size = to_2tuple(window_size)\n        if window_size != self.window_size:\n            assert self.relative_coords_table is not None\n            device = self.relative_coords_table.device\n            dtype = self.relative_coords_table.dtype\n            self.window_size = window_size\n            relative_coords_table, relative_position_index = \\\n                self._make_pair_wise_relative_positions(device=device, dtype=dtype)\n            self.register_buffer(\"relative_coords_table\", relative_coords_table, persistent=False)\n            self.register_buffer(\"relative_position_index\", relative_position_index, persistent=False)\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n    def forward(self, x: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:\n        \"\"\"Forward pass of window attention.\n\n        Args:\n            x: Input features with shape of (num_windows*B, N, C).\n            mask: Attention mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None.\n\n        Returns:\n            Output features with shape of (num_windows*B, N, C).\n        \"\"\"\n        B_, N, C = x.shape\n\n        if self.q_bias is None:\n            qkv = self.qkv(x)\n        else:\n            qkv_bias = torch.cat((self.q_bias, self.k_bias, self.v_bias))\n            if self.qkv_bias_separate:\n                qkv = self.qkv(x)\n                qkv += qkv_bias\n            else:\n                qkv = F.linear(x, weight=self.qkv.weight, bias=qkv_bias)\n        qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        # cosine attention\n        attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1))\n        logit_scale = torch.clamp(self.logit_scale, max=math.log(1. / 0.01)).exp()\n        attn = attn * logit_scale\n\n        relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)\n        relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view(\n            self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1)  # Wh*Ww,Wh*Ww,nH\n        relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous()  # nH, Wh*Ww, Wh*Ww\n        relative_position_bias = 16 * torch.sigmoid(relative_position_bias)\n        attn = attn + relative_position_bias.unsqueeze(0)\n\n        if mask is not None:\n            num_win = mask.shape[0]\n            attn = attn.view(-1, num_win, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)\n            attn = attn.view(-1, self.num_heads, N, N)\n            attn = self.softmax(attn)\n        else:\n            attn = self.softmax(attn)\n\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B_, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass SwinTransformerV2Block(nn.Module):\n    \"\"\"Swin Transformer V2 Block.\n\n    A standard transformer block with window attention and shifted window attention\n    for modeling long-range dependencies efficiently.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            input_resolution: _int_or_tuple_2_t,\n            num_heads: int,\n            window_size: _int_or_tuple_2_t = 7,\n            shift_size: _int_or_tuple_2_t = 0,\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: LayerType = \"gelu\",\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            pretrained_window_size: _int_or_tuple_2_t = 0,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            input_resolution: Input resolution.\n            num_heads: Number of attention heads.\n            window_size: Window size.\n            shift_size: Shift size for SW-MSA.\n            always_partition: Always partition into full windows and shift\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            proj_drop: Dropout rate.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth rate.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            pretrained_window_size: Window size in pretraining.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.input_resolution = to_2tuple(input_resolution)\n        self.num_heads = num_heads\n        self.target_shift_size = to_2tuple(shift_size)  # store for later resize\n        self.always_partition = always_partition\n        self.dynamic_mask = dynamic_mask\n        self.window_size, self.shift_size = self._calc_window_shift(window_size, shift_size)\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.mlp_ratio = mlp_ratio\n        act_layer = get_act_layer(act_layer)\n\n        self.attn = WindowAttention(\n            dim,\n            window_size=to_2tuple(self.window_size),\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            pretrained_window_size=to_2tuple(pretrained_window_size),\n            **dd,\n        )\n        self.norm1 = norm_layer(dim, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.norm2 = norm_layer(dim, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.register_buffer(\n            \"attn_mask\",\n            None if self.dynamic_mask else self.get_attn_mask(**dd),\n            persistent=False,\n        )\n\n    def get_attn_mask(\n            self,\n            x: Optional[torch.Tensor] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n    ) -> Optional[torch.Tensor]:\n        \"\"\"Generate attention mask for shifted window attention.\n\n        Args:\n            x: Input tensor for dynamic shape calculation.\n\n        Returns:\n            Attention mask or None if no shift.\n        \"\"\"\n        if any(self.shift_size):\n            # calculate attention mask for SW-MSA\n            if x is None:\n                img_mask = torch.zeros((1, *self.input_resolution, 1), device=device, dtype=dtype)  # 1 H W 1\n            else:\n                img_mask = torch.zeros((1, x.shape[1], x.shape[2], 1), device=x.device, dtype=x.dtype)  # 1 H W 1\n            cnt = 0\n            for h in (\n                    (0, -self.window_size[0]),\n                    (-self.window_size[0], -self.shift_size[0]),\n                    (-self.shift_size[0], None),\n            ):\n                for w in (\n                        (0, -self.window_size[1]),\n                        (-self.window_size[1], -self.shift_size[1]),\n                        (-self.shift_size[1], None),\n                ):\n                    img_mask[:, h[0]:h[1], w[0]:w[1], :] = cnt\n                    cnt += 1\n            mask_windows = window_partition(img_mask, self.window_size)  # nW, window_size, window_size, 1\n            mask_windows = mask_windows.view(-1, self.window_area)\n            attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)\n            attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0))\n        else:\n            attn_mask = None\n        return attn_mask\n\n    def _calc_window_shift(\n            self,\n            target_window_size: _int_or_tuple_2_t,\n            target_shift_size: Optional[_int_or_tuple_2_t] = None,\n    ) -> Tuple[Tuple[int, int], Tuple[int, int]]:\n        \"\"\"Calculate window size and shift size based on input resolution.\n\n        Args:\n            target_window_size: Target window size.\n            target_shift_size: Target shift size.\n\n        Returns:\n            Tuple of (adjusted_window_size, adjusted_shift_size).\n        \"\"\"\n        target_window_size = to_2tuple(target_window_size)\n        if target_shift_size is None:\n            # if passed value is None, recalculate from default window_size // 2 if it was active\n            target_shift_size = self.target_shift_size\n            if any(target_shift_size):\n                # if there was previously a non-zero shift, recalculate based on current window_size\n                target_shift_size = (target_window_size[0] // 2, target_window_size[1] // 2)\n        else:\n            target_shift_size = to_2tuple(target_shift_size)\n\n        if self.always_partition:\n            return target_window_size, target_shift_size\n\n        target_window_size = to_2tuple(target_window_size)\n        target_shift_size = to_2tuple(target_shift_size)\n        window_size = [r if r <= w else w for r, w in zip(self.input_resolution, target_window_size)]\n        shift_size = [0 if r <= w else s for r, w, s in zip(self.input_resolution, window_size, target_shift_size)]\n        return tuple(window_size), tuple(shift_size)\n\n    def set_input_size(\n            self,\n            feat_size: Tuple[int, int],\n            window_size: Tuple[int, int],\n            always_partition: Optional[bool] = None,\n    ) -> None:\n        \"\"\"Set input size and update window configuration.\n\n        Args:\n            feat_size: New feature map size.\n            window_size: New window size.\n            always_partition: Override always_partition setting.\n        \"\"\"\n        # Update input resolution\n        self.input_resolution = feat_size\n        if always_partition is not None:\n            self.always_partition = always_partition\n        self.window_size, self.shift_size = self._calc_window_shift(to_2tuple(window_size))\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.attn.set_window_size(self.window_size)\n        device = self.attn_mask.device if self.attn_mask is not None else None\n        dtype = self.attn_mask.dtype if self.attn_mask is not None else None\n        self.register_buffer(\n            \"attn_mask\",\n            None if self.dynamic_mask else self.get_attn_mask(device=device, dtype=dtype),\n            persistent=False,\n        )\n\n    def _attn(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply windowed attention with optional shift.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Output tensor of shape (B, H, W, C).\n        \"\"\"\n        B, H, W, C = x.shape\n\n        # cyclic shift\n        has_shift = any(self.shift_size)\n        if has_shift:\n            shifted_x = torch.roll(x, shifts=(-self.shift_size[0], -self.shift_size[1]), dims=(1, 2))\n        else:\n            shifted_x = x\n\n        pad_h = (self.window_size[0] - H % self.window_size[0]) % self.window_size[0]\n        pad_w = (self.window_size[1] - W % self.window_size[1]) % self.window_size[1]\n        shifted_x = torch.nn.functional.pad(shifted_x, (0, 0, 0, pad_w, 0, pad_h))\n        _, Hp, Wp, _ = shifted_x.shape\n\n        # partition windows\n        x_windows = window_partition(shifted_x, self.window_size)  # nW*B, window_size, window_size, C\n        x_windows = x_windows.view(-1, self.window_area, C)  # nW*B, window_size*window_size, C\n\n        # W-MSA/SW-MSA\n        if getattr(self, 'dynamic_mask', False):\n            attn_mask = self.get_attn_mask(shifted_x)\n        else:\n            attn_mask = self.attn_mask\n        attn_windows = self.attn(x_windows, mask=attn_mask)  # nW*B, window_size*window_size, C\n\n        # merge windows\n        attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C)\n        shifted_x = window_reverse(attn_windows, self.window_size, (Hp, Wp))  # B H' W' C\n        shifted_x = shifted_x[:, :H, :W, :].contiguous()\n\n        # reverse cyclic shift\n        if has_shift:\n            x = torch.roll(shifted_x, shifts=self.shift_size, dims=(1, 2))\n        else:\n            x = shifted_x\n        return x\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, H, W, C = x.shape\n        x = x + self.drop_path1(self.norm1(self._attn(x)))\n        x = x.reshape(B, -1, C)\n        x = x + self.drop_path2(self.norm2(self.mlp(x)))\n        x = x.reshape(B, H, W, C)\n        return x\n\n\nclass PatchMerging(nn.Module):\n    \"\"\"Patch Merging Layer.\n\n    Merges 2x2 neighboring patches and projects to higher dimension,\n    effectively downsampling the feature maps.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: Optional[int] = None,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            dim (int): Number of input channels.\n            out_dim (int): Number of output channels (or 2 * dim if None)\n            norm_layer (nn.Module, optional): Normalization layer.  Default: nn.LayerNorm\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.out_dim = out_dim or 2 * dim\n        self.reduction = nn.Linear(4 * dim, self.out_dim, bias=False, **dd)\n        self.norm = norm_layer(self.out_dim, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        B, H, W, C = x.shape\n\n        pad_values = (0, 0, 0, W % 2, 0, H % 2)\n        x = nn.functional.pad(x, pad_values)\n        _, H, W, _ = x.shape\n\n        x = x.reshape(B, H // 2, 2, W // 2, 2, C).permute(0, 1, 3, 4, 2, 5).flatten(3)\n        x = self.reduction(x)\n        x = self.norm(x)\n        return x\n\n\nclass SwinTransformerV2Stage(nn.Module):\n    \"\"\"A Swin Transformer V2 Stage.\n\n    A single stage consisting of multiple Swin Transformer blocks with\n    optional downsampling at the beginning.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            input_resolution: _int_or_tuple_2_t,\n            depth: int,\n            num_heads: int,\n            window_size: _int_or_tuple_2_t,\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            downsample: bool = False,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Union[str, Type[nn.Module]] = 'gelu',\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            pretrained_window_size: _int_or_tuple_2_t = 0,\n            output_nchw: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"\n        Args:\n            dim: Number of input channels.\n            out_dim: Number of output channels.\n            input_resolution: Input resolution.\n            depth: Number of blocks.\n            num_heads: Number of attention heads.\n            window_size: Local window size.\n            always_partition: Always partition into full windows and shift\n            dynamic_mask: Create attention mask in forward based on current input size\n            downsample: Use downsample layer at start of the block.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            proj_drop: Projection dropout rate\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth rate.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer.\n            pretrained_window_size: Local window size in pretraining.\n            output_nchw: Output tensors on NCHW format instead of NHWC.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.input_resolution = input_resolution\n        self.output_resolution = tuple(i // 2 for i in input_resolution) if downsample else input_resolution\n        self.depth = depth\n        self.output_nchw = output_nchw\n        self.grad_checkpointing = False\n        window_size = to_2tuple(window_size)\n        shift_size = tuple([w // 2 for w in window_size])\n\n        # patch merging / downsample layer\n        if downsample:\n            self.downsample = PatchMerging(dim=dim, out_dim=out_dim, norm_layer=norm_layer, **dd)\n        else:\n            assert dim == out_dim\n            self.downsample = nn.Identity()\n\n        # build blocks\n        self.blocks = nn.ModuleList([\n            SwinTransformerV2Block(\n                dim=out_dim,\n                input_resolution=self.output_resolution,\n                num_heads=num_heads,\n                window_size=window_size,\n                shift_size=0 if (i % 2 == 0) else shift_size,\n                always_partition=always_partition,\n                dynamic_mask=dynamic_mask,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop,\n                attn_drop=attn_drop,\n                drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                pretrained_window_size=pretrained_window_size,\n                **dd,\n            )\n            for i in range(depth)])\n\n    def set_input_size(\n            self,\n            feat_size: Tuple[int, int],\n            window_size: int,\n            always_partition: Optional[bool] = None,\n    ) -> None:\n        \"\"\"Update resolution, window size and relative positions.\n\n        Args:\n            feat_size: New input (feature) resolution.\n            window_size: New window size.\n            always_partition: Always partition / shift the window.\n        \"\"\"\n        self.input_resolution = feat_size\n        if isinstance(self.downsample, nn.Identity):\n            self.output_resolution = feat_size\n        else:\n            assert isinstance(self.downsample, PatchMerging)\n            self.output_resolution = tuple(i // 2 for i in feat_size)\n        for block in self.blocks:\n            block.set_input_size(\n                feat_size=self.output_resolution,\n                window_size=window_size,\n                always_partition=always_partition,\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through the stage.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Output tensor of shape (B, H', W', C').\n        \"\"\"\n        x = self.downsample(x)\n\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n        return x\n\n    def _init_respostnorm(self) -> None:\n        \"\"\"Initialize residual post-normalization weights.\"\"\"\n        for blk in self.blocks:\n            nn.init.constant_(blk.norm1.bias, 0)\n            nn.init.constant_(blk.norm1.weight, 0)\n            nn.init.constant_(blk.norm2.bias, 0)\n            nn.init.constant_(blk.norm2.weight, 0)\n\n\nclass SwinTransformerV2(nn.Module):\n    \"\"\"Swin Transformer V2.\n\n    A hierarchical vision transformer using shifted windows for efficient\n    self-attention computation with continuous position bias.\n\n    A PyTorch impl of : `Swin Transformer V2: Scaling Up Capacity and Resolution`\n        - https://arxiv.org/abs/2111.09883\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: _int_or_tuple_2_t = 224,\n            patch_size: int = 4,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dim: int = 96,\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            num_heads: Tuple[int, ...] = (3, 6, 12, 24),\n            window_size: _int_or_tuple_2_t = 7,\n            always_partition: bool = False,\n            strict_img_size: bool = True,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.1,\n            act_layer: Union[str, Callable] = 'gelu',\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            pretrained_window_sizes: Tuple[int, ...] = (0, 0, 0, 0),\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size.\n            in_chans: Number of input image channels.\n            num_classes: Number of classes for classification head.\n            embed_dim: Patch embedding dimension.\n            depths: Depth of each Swin Transformer stage (layer).\n            num_heads: Number of attention heads in different layers.\n            window_size: Window size.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            drop_rate: Head dropout rate.\n            proj_drop_rate: Projection dropout rate.\n            attn_drop_rate: Attention dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            norm_layer: Normalization layer.\n            act_layer: Activation layer type.\n            patch_norm: If True, add normalization after patch embedding.\n            pretrained_window_sizes: Pretrained window sizes of each layer.\n            output_fmt: Output tensor format if not None, otherwise output 'NHWC' by default.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        assert global_pool in ('', 'avg')\n        self.global_pool = global_pool\n        self.output_fmt = 'NHWC'\n        self.num_layers = len(depths)\n        self.embed_dim = embed_dim\n        self.num_features = self.head_hidden_size = int(embed_dim * 2 ** (self.num_layers - 1))\n        self.feature_info = []\n\n        if not isinstance(embed_dim, (tuple, list)):\n            embed_dim = [int(embed_dim * 2 ** i) for i in range(self.num_layers)]\n\n        # split image into non-overlapping patches\n        self.patch_embed = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim[0],\n            norm_layer=norm_layer,\n            strict_img_size=strict_img_size,\n            output_fmt='NHWC',\n            **dd,\n        )\n        grid_size = self.patch_embed.grid_size\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        layers = []\n        in_dim = embed_dim[0]\n        scale = 1\n        for i in range(self.num_layers):\n            out_dim = embed_dim[i]\n            layers += [SwinTransformerV2Stage(\n                dim=in_dim,\n                out_dim=out_dim,\n                input_resolution=(grid_size[0] // scale, grid_size[1] // scale),\n                depth=depths[i],\n                downsample=i > 0,\n                num_heads=num_heads[i],\n                window_size=window_size,\n                always_partition=always_partition,\n                dynamic_mask=not strict_img_size,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                pretrained_window_size=pretrained_window_sizes[i],\n                **dd,\n            )]\n            in_dim = out_dim\n            if i > 0:\n                scale *= 2\n            self.feature_info += [dict(num_chs=out_dim, reduction=4 * scale, module=f'layers.{i}')]\n\n        self.layers = nn.Sequential(*layers)\n        self.norm = norm_layer(self.num_features, **dd)\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            input_fmt=self.output_fmt,\n            **dd,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            needs_reset: If True, call reset_parameters() on modules (default for after to_empty()).\n                If False, skip reset_parameters() (for __init__ where modules already self-initialized).\n        \"\"\"\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n        for bly in self.layers:\n            bly._init_respostnorm()\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True) -> None:\n        \"\"\"Initialize weights for Linear layers.\n\n        Args:\n            m: Module to initialize.\n            needs_reset: Whether to call reset_parameters() on modules.\n        \"\"\"\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    def set_input_size(\n            self,\n            img_size: Optional[Tuple[int, int]] = None,\n            patch_size: Optional[Tuple[int, int]] = None,\n            window_size: Optional[Tuple[int, int]] = None,\n            window_ratio: Optional[int] = 8,\n            always_partition: Optional[bool] = None,\n    ):\n        \"\"\"Updates the image resolution, window size, and so the pair-wise relative positions.\n\n        Args:\n            img_size (Optional[Tuple[int, int]]): New input resolution, if None current resolution is used\n            patch_size (Optional[Tuple[int, int]): New patch size, if None use current patch size\n            window_size (Optional[int]): New window size, if None based on new_img_size // window_div\n            window_ratio (int): divisor for calculating window size from patch grid size\n            always_partition: always partition / shift windows even if feat size is < window\n        \"\"\"\n        if img_size is not None or patch_size is not None:\n            self.patch_embed.set_input_size(img_size=img_size, patch_size=patch_size)\n            grid_size = self.patch_embed.grid_size\n\n        if window_size is None and window_ratio is not None:\n            window_size = tuple([s // window_ratio for s in grid_size])\n\n        for index, stage in enumerate(self.layers):\n            stage_scale = 2 ** max(index - 1, 0)\n            stage.set_input_size(\n                feat_size=(grid_size[0] // stage_scale, grid_size[1] // stage_scale),\n                window_size=window_size,\n                always_partition=always_partition,\n            )\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        \"\"\"Get parameter names that should not use weight decay.\n\n        Returns:\n            Set of parameter names to exclude from weight decay.\n        \"\"\"\n        nod = set()\n        for n, m in self.named_modules():\n            if any([kw in n for kw in (\"cpb_mlp\", \"logit_scale\")]):\n                nod.add(n)\n        return nod\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Create parameter group matcher for optimizer parameter groups.\n\n        Args:\n            coarse: If True, use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        return dict(\n            stem=r'^absolute_pos_embed|patch_embed',  # stem and embed\n            blocks=r'^layers\\.(\\d+)' if coarse else [\n                (r'^layers\\.(\\d+).downsample', (0,)),\n                (r'^layers\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: If True, enable gradient checkpointing.\n        \"\"\"\n        for l in self.layers:\n            l.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\n\n        Returns:\n            The classification head module.\n        \"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classification head.\n\n        Args:\n            num_classes: Number of classes for new head.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n\n        num_stages = len(self.layers)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.layers\n        else:\n            stages = self.layers[:max_index + 1]\n        for i, stage in enumerate(stages):\n            x = stage(x)\n            if i in take_indices:\n                if norm and i == num_stages - 1:\n                    x_inter = self.norm(x)  # applying final norm last intermediate\n                else:\n                    x_inter = x\n                x_inter = x_inter.permute(0, 3, 1, 2).contiguous()\n                intermediates.append(x_inter)\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.layers), indices)\n        self.layers = self.layers[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            Feature tensor of shape (B, H', W', C).\n        \"\"\"\n        x = self.patch_embed(x)\n        x = self.layers(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classification head.\n\n        Args:\n            x: Feature tensor of shape (B, H, W, C).\n            pre_logits: If True, return features before final linear layer.\n\n        Returns:\n            Logits tensor of shape (B, num_classes) or pre-logits.\n        \"\"\"\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through the model.\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            Logits tensor of shape (B, num_classes).\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict: Dict[str, torch.Tensor], model: nn.Module) -> Dict[str, torch.Tensor]:\n    \"\"\"Filter and process checkpoint state dict for loading.\n\n    Handles resizing of patch embeddings and relative position tables\n    when model size differs from checkpoint.\n\n    Args:\n        state_dict: Checkpoint state dictionary.\n        model: Target model to load weights into.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    native_checkpoint = 'head.fc.weight' in state_dict\n    out_dict = {}\n    import re\n    for k, v in state_dict.items():\n        if any([n in k for n in ('relative_position_index', 'relative_coords_table', 'attn_mask')]):\n            continue  # skip buffers that should not be persistent\n\n        if 'patch_embed.proj.weight' in k:\n            _, _, H, W = model.patch_embed.proj.weight.shape\n            if v.shape[-2] != H or v.shape[-1] != W:\n                v = resample_patch_embed(\n                    v,\n                    (H, W),\n                    interpolation='bicubic',\n                    antialias=True,\n                    verbose=True,\n                )\n\n        if not native_checkpoint:\n            # skip layer remapping for updated checkpoints\n            k = re.sub(r'layers.(\\d+).downsample', lambda x: f'layers.{int(x.group(1)) + 1}.downsample', k)\n            k = k.replace('head.', 'head.fc.')\n        out_dict[k] = v\n\n    return out_dict\n\n\ndef _create_swin_transformer_v2(variant: str, pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Create a Swin Transformer V2 model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: If True, load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        SwinTransformerV2 model instance.\n    \"\"\"\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 1, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        SwinTransformerV2, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 256, 256), 'pool_size': (8, 8),\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        'license': 'mit', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'swinv2_base_window12to16_192to256.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_base_patch4_window12to16_192to256_22kto1k_ft.pth',\n    ),\n    'swinv2_base_window12to24_192to384.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_base_patch4_window12to24_192to384_22kto1k_ft.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'swinv2_large_window12to16_192to256.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_large_patch4_window12to16_192to256_22kto1k_ft.pth',\n    ),\n    'swinv2_large_window12to24_192to384.ms_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_large_patch4_window12to24_192to384_22kto1k_ft.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n\n    'swinv2_tiny_window8_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_tiny_patch4_window8_256.pth',\n    ),\n    'swinv2_tiny_window16_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_tiny_patch4_window16_256.pth',\n    ),\n    'swinv2_small_window8_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_small_patch4_window8_256.pth',\n    ),\n    'swinv2_small_window16_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_small_patch4_window16_256.pth',\n    ),\n    'swinv2_base_window8_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_base_patch4_window8_256.pth',\n    ),\n    'swinv2_base_window16_256.ms_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_base_patch4_window16_256.pth',\n    ),\n\n    'swinv2_base_window12_192.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_base_patch4_window12_192_22k.pth',\n        num_classes=21841, input_size=(3, 192, 192), pool_size=(6, 6)\n    ),\n    'swinv2_large_window12_192.ms_in22k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/SwinTransformer/storage/releases/download/v2.0.0/swinv2_large_patch4_window12_192_22k.pth',\n        num_classes=21841, input_size=(3, 192, 192), pool_size=(6, 6)\n    ),\n})\n\n\n@register_model\ndef swinv2_tiny_window16_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-T V2 @ 256x256, window 16x16.\"\"\"\n    model_args = dict(window_size=16, embed_dim=96, depths=(2, 2, 6, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer_v2(\n        'swinv2_tiny_window16_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_tiny_window8_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-T V2 @ 256x256, window 8x8.\"\"\"\n    model_args = dict(window_size=8, embed_dim=96, depths=(2, 2, 6, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer_v2(\n        'swinv2_tiny_window8_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_small_window16_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-S V2 @ 256x256, window 16x16.\"\"\"\n    model_args = dict(window_size=16, embed_dim=96, depths=(2, 2, 18, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer_v2(\n        'swinv2_small_window16_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_small_window8_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-S V2 @ 256x256, window 8x8.\"\"\"\n    model_args = dict(window_size=8, embed_dim=96, depths=(2, 2, 18, 2), num_heads=(3, 6, 12, 24))\n    return _create_swin_transformer_v2(\n        'swinv2_small_window8_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_base_window16_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-B V2 @ 256x256, window 16x16.\"\"\"\n    model_args = dict(window_size=16, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))\n    return _create_swin_transformer_v2(\n        'swinv2_base_window16_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_base_window8_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-B V2 @ 256x256, window 8x8.\"\"\"\n    model_args = dict(window_size=8, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))\n    return _create_swin_transformer_v2(\n        'swinv2_base_window8_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_base_window12_192(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-B V2 @ 192x192, window 12x12.\"\"\"\n    model_args = dict(window_size=12, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32))\n    return _create_swin_transformer_v2(\n        'swinv2_base_window12_192', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_base_window12to16_192to256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-B V2 @ 192x192, trained at window 12x12, fine-tuned to 256x256 window 16x16.\"\"\"\n    model_args = dict(\n        window_size=16, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32),\n        pretrained_window_sizes=(12, 12, 12, 6))\n    return _create_swin_transformer_v2(\n        'swinv2_base_window12to16_192to256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_base_window12to24_192to384(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-B V2 @ 192x192, trained at window 12x12, fine-tuned to 384x384 window 24x24.\"\"\"\n    model_args = dict(\n        window_size=24, embed_dim=128, depths=(2, 2, 18, 2), num_heads=(4, 8, 16, 32),\n        pretrained_window_sizes=(12, 12, 12, 6))\n    return _create_swin_transformer_v2(\n        'swinv2_base_window12to24_192to384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_large_window12_192(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-L V2 @ 192x192, window 12x12.\"\"\"\n    model_args = dict(window_size=12, embed_dim=192, depths=(2, 2, 18, 2), num_heads=(6, 12, 24, 48))\n    return _create_swin_transformer_v2(\n        'swinv2_large_window12_192', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_large_window12to16_192to256(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-L V2 @ 192x192, trained at window 12x12, fine-tuned to 256x256 window 16x16.\"\"\"\n    model_args = dict(\n        window_size=16, embed_dim=192, depths=(2, 2, 18, 2), num_heads=(6, 12, 24, 48),\n        pretrained_window_sizes=(12, 12, 12, 6))\n    return _create_swin_transformer_v2(\n        'swinv2_large_window12to16_192to256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_large_window12to24_192to384(pretrained: bool = False, **kwargs) -> SwinTransformerV2:\n    \"\"\"Swin-L V2 @ 192x192, trained at window 12x12, fine-tuned to 384x384 window 24x24.\"\"\"\n    model_args = dict(\n        window_size=24, embed_dim=192, depths=(2, 2, 18, 2), num_heads=(6, 12, 24, 48),\n        pretrained_window_sizes=(12, 12, 12, 6))\n    return _create_swin_transformer_v2(\n        'swinv2_large_window12to24_192to384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\nregister_model_deprecations(__name__, {\n    'swinv2_base_window12_192_22k': 'swinv2_base_window12_192.ms_in22k',\n    'swinv2_base_window12to16_192to256_22kft1k': 'swinv2_base_window12to16_192to256.ms_in22k_ft_in1k',\n    'swinv2_base_window12to24_192to384_22kft1k': 'swinv2_base_window12to24_192to384.ms_in22k_ft_in1k',\n    'swinv2_large_window12_192_22k': 'swinv2_large_window12_192.ms_in22k',\n    'swinv2_large_window12to16_192to256_22kft1k': 'swinv2_large_window12to16_192to256.ms_in22k_ft_in1k',\n    'swinv2_large_window12to24_192to384_22kft1k': 'swinv2_large_window12to24_192to384.ms_in22k_ft_in1k',\n})\n"
  },
  {
    "path": "timm/models/swin_transformer_v2_cr.py",
    "content": "\"\"\" Swin Transformer V2\n\nA PyTorch impl of : `Swin Transformer V2: Scaling Up Capacity and Resolution`\n    - https://arxiv.org/pdf/2111.09883\n\nCode adapted from https://github.com/ChristophReich1996/Swin-Transformer-V2, original copyright/license info below\n\nThis implementation is experimental and subject to change in manners that will break weight compat:\n* Size of the pos embed MLP are not spelled out in paper in terms of dim, fixed for all models? vary with num_heads?\n  * currently dim is fixed, I feel it may make sense to scale with num_heads (dim per head)\n* The specifics of the memory saving 'sequential attention' are not detailed, Christoph Reich has an impl at\n  GitHub link above. It needs further investigation as throughput vs mem tradeoff doesn't appear beneficial.\n* num_heads per stage is not detailed for Huge and Giant model variants\n* 'Giant' is 3B params in paper but ~2.6B here despite matching paper dim + block counts\n* experiments are ongoing wrt to 'main branch' norm layer use and weight init scheme\n\nNoteworthy additions over official Swin v1:\n* MLP relative position embedding is looking promising and adapts to different image/window sizes\n* This impl has been designed to allow easy change of image size with matching window size changes\n* Non-square image size and window size are supported\n\nModifications and additions for timm hacked together by / Copyright 2022, Ross Wightman\n\"\"\"\n# --------------------------------------------------------\n# Swin Transformer V2 reimplementation\n# Copyright (c) 2021 Christoph Reich\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Christoph Reich\n# --------------------------------------------------------\nimport logging\nimport math\nfrom functools import partial\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, calculate_drop_path_rates, Mlp, ClassifierHead, to_2tuple, _assert, ndgrid\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import named_apply, checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['SwinTransformerV2Cr']  # model_registry will add each entrypoint fn to this\n\n_logger = logging.getLogger(__name__)\n\n\ndef bchw_to_bhwc(x: torch.Tensor) -> torch.Tensor:\n    \"\"\"Permutes a tensor from the shape (B, C, H, W) to (B, H, W, C).\"\"\"\n    return x.permute(0, 2, 3, 1)\n\n\ndef bhwc_to_bchw(x: torch.Tensor) -> torch.Tensor:\n    \"\"\"Permutes a tensor from the shape (B, H, W, C) to (B, C, H, W).\"\"\"\n    return x.permute(0, 3, 1, 2)\n\n\ndef window_partition(x: torch.Tensor, window_size: Tuple[int, int]) -> torch.Tensor:\n    \"\"\"Partition into non-overlapping windows.\n\n    Args:\n        x: Input tensor of shape (B, H, W, C).\n        window_size: Window size (height, width).\n\n    Returns:\n        Windows tensor of shape (num_windows*B, window_size[0], window_size[1], C).\n    \"\"\"\n    B, H, W, C = x.shape\n    x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C)\n    return windows\n\n\n@register_notrace_function  # reason: int argument is a Proxy\ndef window_reverse(windows: torch.Tensor, window_size: Tuple[int, int], img_size: Tuple[int, int]) -> torch.Tensor:\n    \"\"\"Merge windows back to feature map.\n\n    Args:\n        windows: Windows tensor of shape (num_windows * B, window_size[0], window_size[1], C).\n        window_size: Window size (height, width).\n        img_size: Image size (height, width).\n\n    Returns:\n        Feature map tensor of shape (B, H, W, C).\n    \"\"\"\n    H, W = img_size\n    C = windows.shape[-1]\n    x = windows.view(-1, H // window_size[0], W // window_size[1], window_size[0], window_size[1], C)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, H, W, C)\n    return x\n\n\nclass WindowMultiHeadAttention(nn.Module):\n    r\"\"\"This class implements window-based Multi-Head-Attention with log-spaced continuous position bias.\n\n    Args:\n        dim (int): Number of input features\n        window_size (int): Window size\n        num_heads (int): Number of attention heads\n        drop_attn (float): Dropout rate of attention map\n        drop_proj (float): Dropout rate after projection\n        meta_hidden_dim (int): Number of hidden features in the two layer MLP meta network\n        sequential_attn (bool): If true sequential self-attention is performed\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            window_size: Tuple[int, int],\n            drop_attn: float = 0.0,\n            drop_proj: float = 0.0,\n            meta_hidden_dim: int = 384,  # FIXME what's the optimal value?\n            sequential_attn: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % num_heads == 0, \\\n            \"The number of input features (in_features) are not divisible by the number of heads (num_heads).\"\n        self.in_features: int = dim\n        self.window_size: Tuple[int, int] = to_2tuple(window_size)\n        self.num_heads: int = num_heads\n        self.sequential_attn: bool = sequential_attn\n\n        self.qkv = nn.Linear(in_features=dim, out_features=dim * 3, bias=True, **dd)\n        self.attn_drop = nn.Dropout(drop_attn)\n        self.proj = nn.Linear(in_features=dim, out_features=dim, bias=True, **dd)\n        self.proj_drop = nn.Dropout(drop_proj)\n        # meta network for positional encodings\n        self.meta_mlp = Mlp(\n            2,  # x, y\n            hidden_features=meta_hidden_dim,\n            out_features=num_heads,\n            act_layer=nn.ReLU,\n            drop=(0.125, 0.),  # FIXME should there be stochasticity, appears to 'overfit' without?\n            **dd,\n        )\n        # NOTE old checkpoints used inverse of logit_scale ('tau') following the paper, see conversion fn\n        self.logit_scale = nn.Parameter(torch.empty(num_heads, **dd))\n\n        # Register empty buffer with correct shape\n        win_h, win_w = self.window_size\n        self.register_buffer(\n            \"relative_coordinates_log\",\n            torch.empty(win_h * win_w * win_h * win_w, 2, **dd),\n            persistent=False,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.constant_(self.logit_scale, math.log(10))\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        self._make_pair_wise_relative_positions()\n\n    def _make_pair_wise_relative_positions(self) -> None:\n        \"\"\"Initialize the pair-wise relative positions to compute the positional biases.\"\"\"\n        device = self.logit_scale.device\n        coordinates = torch.stack(ndgrid(\n            torch.arange(self.window_size[0], device=device, dtype=torch.float32),\n            torch.arange(self.window_size[1], device=device, dtype=torch.float32),\n        )).flatten(1)\n        relative_coordinates = coordinates[:, :, None] - coordinates[:, None, :]\n        relative_coordinates = relative_coordinates.permute(1, 2, 0).reshape(-1, 2).float()\n        relative_coordinates_log = torch.sign(relative_coordinates) * torch.log(\n            1.0 + relative_coordinates.abs())\n        self.relative_coordinates_log.copy_(relative_coordinates_log.to(self.logit_scale.dtype))\n\n    def set_window_size(self, window_size: Tuple[int, int]) -> None:\n        \"\"\"Update window size and regenerate relative position coordinates.\n\n        Args:\n            window_size: New window size.\n        \"\"\"\n        window_size = to_2tuple(window_size)\n        if window_size != self.window_size:\n            self.window_size = window_size\n            self._make_pair_wise_relative_positions()\n\n    def _relative_positional_encodings(self) -> torch.Tensor:\n        \"\"\"Compute the relative positional encodings.\n\n        Returns:\n            Relative positional encodings of shape (1, num_heads, window_size**2, window_size**2).\n        \"\"\"\n        window_area = self.window_size[0] * self.window_size[1]\n        relative_position_bias = self.meta_mlp(self.relative_coordinates_log)\n        relative_position_bias = relative_position_bias.transpose(1, 0).reshape(\n            self.num_heads, window_area, window_area\n        )\n        relative_position_bias = relative_position_bias.unsqueeze(0)\n        return relative_position_bias\n\n    def forward(self, x: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:\n        \"\"\"Forward pass of window multi-head self-attention.\n\n        Args:\n            x: Input tensor of shape (B * windows, N, C).\n            mask: Attention mask for the shift case.\n\n        Returns:\n            Output tensor of shape (B * windows, N, C).\n        \"\"\"\n        Bw, L, C = x.shape\n\n        qkv = self.qkv(x).view(Bw, L, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        query, key, value = qkv.unbind(0)\n\n        # compute attention map with scaled cosine attention\n        attn = (F.normalize(query, dim=-1) @ F.normalize(key, dim=-1).transpose(-2, -1))\n        logit_scale = torch.clamp(self.logit_scale.reshape(1, self.num_heads, 1, 1), max=math.log(1. / 0.01)).exp()\n        attn = attn * logit_scale\n        attn = attn + self._relative_positional_encodings()\n\n        if mask is not None:\n            # Apply mask if utilized\n            num_win: int = mask.shape[0]\n            attn = attn.view(Bw // num_win, num_win, self.num_heads, L, L)\n            attn = attn + mask.unsqueeze(1).unsqueeze(0)\n            attn = attn.view(-1, self.num_heads, L, L)\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ value).transpose(1, 2).reshape(Bw, L, -1)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass SwinTransformerV2CrBlock(nn.Module):\n    r\"\"\"This class implements the Swin transformer block.\n\n    Args:\n        dim (int): Number of input channels\n        num_heads (int): Number of attention heads to be utilized\n        feat_size (Tuple[int, int]): Input resolution\n        window_size (Tuple[int, int]): Window size to be utilized\n        shift_size (int): Shifting size to be used\n        mlp_ratio (int): Ratio of the hidden dimension in the FFN to the input channels\n        proj_drop (float): Dropout in input mapping\n        drop_attn (float): Dropout rate of attention map\n        drop_path (float): Dropout in main path\n        extra_norm (bool): Insert extra norm on 'main' branch if True\n        sequential_attn (bool): If true sequential self-attention is performed\n        norm_layer (Type[nn.Module]): Type of normalization layer to be utilized\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            feat_size: Tuple[int, int],\n            window_size: Tuple[int, int],\n            shift_size: Tuple[int, int] = (0, 0),\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            mlp_ratio: float = 4.0,\n            init_values: Optional[float] = 0,\n            proj_drop: float = 0.0,\n            drop_attn: float = 0.0,\n            drop_path: float = 0.0,\n            extra_norm: bool = False,\n            sequential_attn: bool = False,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim: int = dim\n        self.feat_size: Tuple[int, int] = feat_size\n        self.target_shift_size: Tuple[int, int] = to_2tuple(shift_size)\n        self.always_partition = always_partition\n        self.dynamic_mask = dynamic_mask\n        self.window_size, self.shift_size = self._calc_window_shift(window_size)\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.init_values: Optional[float] = init_values\n\n        # attn branch\n        self.attn = WindowMultiHeadAttention(\n            dim=dim,\n            num_heads=num_heads,\n            window_size=self.window_size,\n            drop_attn=drop_attn,\n            drop_proj=proj_drop,\n            sequential_attn=sequential_attn,\n            **dd,\n        )\n        self.norm1 = norm_layer(dim, **dd)\n        self.drop_path1 = DropPath(drop_prob=drop_path) if drop_path > 0.0 else nn.Identity()\n\n        # mlp branch\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            drop=proj_drop,\n            out_features=dim,\n            **dd,\n        )\n        self.norm2 = norm_layer(dim, **dd)\n        self.drop_path2 = DropPath(drop_prob=drop_path) if drop_path > 0.0 else nn.Identity()\n\n        # Extra main branch norm layer mentioned for Huge/Giant models in V2 paper.\n        # Also being used as final network norm and optional stage ending norm while still in a C-last format.\n        self.norm3 = norm_layer(dim, **dd) if extra_norm else nn.Identity()\n\n        # Register buffer as None initially, will be computed in reset_parameters if needed\n        self.register_buffer(\"attn_mask\", None, persistent=False)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        self._init_buffers()\n        # extra, module specific weight init\n        if self.init_values is not None:\n            nn.init.constant_(self.norm1.weight, self.init_values)\n            nn.init.constant_(self.norm2.weight, self.init_values)\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        if not self.dynamic_mask:\n            device = self.norm1.weight.device\n            dtype = self.norm1.weight.dtype\n            attn_mask = self.get_attn_mask(device=device, dtype=dtype)\n            self.register_buffer(\"attn_mask\", attn_mask, persistent=False)\n\n    def _calc_window_shift(\n            self,\n            target_window_size: Tuple[int, int],\n    ) -> Tuple[Tuple[int, int], Tuple[int, int]]:\n        target_window_size = to_2tuple(target_window_size)\n        target_shift_size = self.target_shift_size\n        if any(target_shift_size):\n            # if non-zero, recalculate shift from current window size in case window size has changed\n            target_shift_size = (target_window_size[0] // 2, target_window_size[1] // 2)\n\n        if self.always_partition:\n            return target_window_size, target_shift_size\n\n        window_size = [f if f <= w else w for f, w in zip(self.feat_size, target_window_size)]\n        shift_size = [0 if f <= w else s for f, w, s in zip(self.feat_size, window_size, target_shift_size)]\n        return tuple(window_size), tuple(shift_size)\n\n    def get_attn_mask(\n            self,\n            x: Optional[torch.Tensor] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n    ) -> Optional[torch.Tensor]:\n        \"\"\"Method generates the attention mask used in shift case.\"\"\"\n        # Make masks for shift case\n        if any(self.shift_size):\n            # calculate attention mask for SW-MSA\n            if x is None:\n                img_mask = torch.zeros((1, *self.feat_size, 1), device=device, dtype=dtype)  # 1 H W 1\n            else:\n                img_mask = torch.zeros((1, x.shape[1], x.shape[2], 1), device=x.device, dtype=x.dtype)  # 1 H W 1\n            cnt = 0\n            for h in (\n                    (0, -self.window_size[0]),\n                    (-self.window_size[0], -self.shift_size[0]),\n                    (-self.shift_size[0], None),\n            ):\n                for w in (\n                        (0, -self.window_size[1]),\n                        (-self.window_size[1], -self.shift_size[1]),\n                        (-self.shift_size[1], None),\n                ):\n                    img_mask[:, h[0]:h[1], w[0]:w[1], :] = cnt\n                    cnt += 1\n            mask_windows = window_partition(img_mask, self.window_size)  # num_windows, window_size, window_size, 1\n            mask_windows = mask_windows.view(-1, self.window_area)\n            attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)\n            attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0))\n        else:\n            attn_mask = None\n        return attn_mask\n\n    def set_input_size(self, feat_size: Tuple[int, int], window_size: Tuple[int, int]) -> None:\n        \"\"\"Method updates the image resolution to be processed and window size and so the pair-wise relative positions.\n\n        Args:\n            feat_size (Tuple[int, int]): New input resolution\n            window_size (int): New window size\n        \"\"\"\n        # Update input resolution\n        self.feat_size: Tuple[int, int] = feat_size\n        self.window_size, self.shift_size = self._calc_window_shift(to_2tuple(window_size))\n        self.window_area = self.window_size[0] * self.window_size[1]\n        self.attn.set_window_size(self.window_size)\n        device = self.attn_mask.device if self.attn_mask is not None else None\n        dtype = self.attn_mask.dtype if self.attn_mask is not None else None\n        self.register_buffer(\n            \"attn_mask\",\n            None if self.dynamic_mask else self.get_attn_mask(device=device, dtype=dtype),\n            persistent=False,\n        )\n\n    def _shifted_window_attn(self, x):\n        B, H, W, C = x.shape\n\n        # cyclic shift\n        sh, sw = self.shift_size\n        do_shift: bool = any(self.shift_size)\n        if do_shift:\n            # FIXME PyTorch XLA needs cat impl, roll not lowered\n            # x = torch.cat([x[:, sh:], x[:, :sh]], dim=1)\n            # x = torch.cat([x[:, :, sw:], x[:, :, :sw]], dim=2)\n            x = torch.roll(x, shifts=(-sh, -sw), dims=(1, 2))\n\n        pad_h = (self.window_size[0] - H % self.window_size[0]) % self.window_size[0]\n        pad_w = (self.window_size[1] - W % self.window_size[1]) % self.window_size[1]\n        x = torch.nn.functional.pad(x, (0, 0, 0, pad_w, 0, pad_h))\n        _, Hp, Wp, _ = x.shape\n\n        # partition windows\n        x_windows = window_partition(x, self.window_size)  # num_windows * B, window_size, window_size, C\n        x_windows = x_windows.view(-1, self.window_size[0] * self.window_size[1], C)\n\n        # W-MSA/SW-MSA\n        if getattr(self, 'dynamic_mask', False):\n            attn_mask = self.get_attn_mask(x)\n        else:\n            attn_mask = self.attn_mask\n        attn_windows = self.attn(x_windows, mask=attn_mask)  # num_windows * B, window_size * window_size, C\n\n        # merge windows\n        attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C)\n        x = window_reverse(attn_windows, self.window_size, (Hp, Wp))  # B H' W' C\n        x = x[:, :H, :W, :].contiguous()\n\n        # reverse cyclic shift\n        if do_shift:\n            # FIXME PyTorch XLA needs cat impl, roll not lowered\n            # x = torch.cat([x[:, -sh:], x[:, :-sh]], dim=1)\n            # x = torch.cat([x[:, :, -sw:], x[:, :, :-sw]], dim=2)\n            x = torch.roll(x, shifts=(sh, sw), dims=(1, 2))\n\n        return x\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass of Swin Transformer V2 block.\n\n        Args:\n            x: Input tensor of shape [B, C, H, W].\n\n        Returns:\n            Output tensor of shape [B, C, H, W].\n        \"\"\"\n        # post-norm branches (op -> norm -> drop)\n        x = x + self.drop_path1(self.norm1(self._shifted_window_attn(x)))\n\n        B, H, W, C = x.shape\n        x = x.reshape(B, -1, C)\n        x = x + self.drop_path2(self.norm2(self.mlp(x)))\n        x = self.norm3(x)  # main-branch norm enabled for some blocks / stages (every 6 for Huge/Giant)\n        x = x.reshape(B, H, W, C)\n        return x\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass PatchMerging(nn.Module):\n    \"\"\"Patch merging layer.\n\n    This class implements the patch merging as a strided convolution with a normalization before.\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize patch merging layer.\n\n        Args:\n            dim: Number of input channels.\n            norm_layer: Type of normalization layer to be utilized.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm = norm_layer(4 * dim, **dd)\n        self.reduction = nn.Linear(in_features=4 * dim, out_features=2 * dim, bias=False, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass of patch merging.\n\n        Args:\n            x: Input tensor of shape [B, C, H, W].\n\n        Returns:\n            Output tensor of shape [B, 2 * C, H // 2, W // 2].\n        \"\"\"\n        B, H, W, C = x.shape\n\n        pad_values = (0, 0, 0, W % 2, 0, H % 2)\n        x = nn.functional.pad(x, pad_values)\n        _, H, W, _ = x.shape\n\n        x = x.reshape(B, H // 2, 2, W // 2, 2, C).permute(0, 1, 3, 4, 2, 5).flatten(3)\n        x = self.norm(x)\n        x = self.reduction(x)\n        return x\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"2D Image to Patch Embedding.\"\"\"\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            strict_img_size: bool = True,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize patch embedding.\n\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size.\n            in_chans: Number of input channels.\n            embed_dim: Embedding dimension.\n            norm_layer: Normalization layer.\n            strict_img_size: Enforce strict image size.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        img_size = to_2tuple(img_size)\n        patch_size = to_2tuple(patch_size)\n        self.img_size = img_size\n        self.patch_size = patch_size\n        self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1])\n        self.num_patches = self.grid_size[0] * self.grid_size[1]\n        self.strict_img_size = strict_img_size\n\n        self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, **dd)\n        self.norm = norm_layer(embed_dim, **dd) if norm_layer else nn.Identity()\n\n    def set_input_size(self, img_size: Tuple[int, int]) -> None:\n        \"\"\"Update input image size.\n\n        Args:\n            img_size: New image size.\n        \"\"\"\n        img_size = to_2tuple(img_size)\n        if img_size != self.img_size:\n            self.img_size = img_size\n            self.grid_size = (img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1])\n            self.num_patches = self.grid_size[0] * self.grid_size[1]\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass of patch embedding.\n\n        Args:\n            x: Input tensor of shape [B, C, H, W].\n\n        Returns:\n            Output tensor of shape [B, C', H', W'].\n        \"\"\"\n        B, C, H, W = x.shape\n        if self.strict_img_size:\n            _assert(H == self.img_size[0], f\"Input image height ({H}) doesn't match model ({self.img_size[0]}).\")\n            _assert(W == self.img_size[1], f\"Input image width ({W}) doesn't match model ({self.img_size[1]}).\")\n        x = self.proj(x)\n        x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)\n        return x\n\n\nclass SwinTransformerV2CrStage(nn.Module):\n    r\"\"\"This class implements a stage of the Swin transformer including multiple layers.\n\n    Args:\n        embed_dim (int): Number of input channels\n        depth (int): Depth of the stage (number of layers)\n        downscale (bool): If true input is downsampled (see Fig. 3 or V1 paper)\n        feat_size (Tuple[int, int]): input feature map size (H, W)\n        num_heads (int): Number of attention heads to be utilized\n        window_size (int): Window size to be utilized\n        mlp_ratio (int): Ratio of the hidden dimension in the FFN to the input channels\n        proj_drop (float): Dropout in input mapping\n        drop_attn (float): Dropout rate of attention map\n        drop_path (float): Dropout in main path\n        norm_layer (Type[nn.Module]): Type of normalization layer to be utilized. Default: nn.LayerNorm\n        extra_norm_period (int): Insert extra norm layer on main branch every N (period) blocks\n        extra_norm_stage (bool): End each stage with an extra norm layer in main branch\n        sequential_attn (bool): If true sequential self-attention is performed\n    \"\"\"\n\n    def __init__(\n            self,\n            embed_dim: int,\n            depth: int,\n            downscale: bool,\n            num_heads: int,\n            feat_size: Tuple[int, int],\n            window_size: Tuple[int, int],\n            always_partition: bool = False,\n            dynamic_mask: bool = False,\n            mlp_ratio: float = 4.0,\n            init_values: Optional[float] = 0.0,\n            proj_drop: float = 0.0,\n            drop_attn: float = 0.0,\n            drop_path: Union[List[float], float] = 0.0,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            extra_norm_period: int = 0,\n            extra_norm_stage: bool = False,\n            sequential_attn: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.downscale: bool = downscale\n        self.grad_checkpointing: bool = False\n        self.feat_size: Tuple[int, int] = (feat_size[0] // 2, feat_size[1] // 2) if downscale else feat_size\n\n        if downscale:\n            self.downsample = PatchMerging(embed_dim, norm_layer=norm_layer, **dd)\n            embed_dim = embed_dim * 2\n        else:\n            self.downsample = nn.Identity()\n\n        def _extra_norm(index):\n            i = index + 1\n            if extra_norm_period and i % extra_norm_period == 0:\n                return True\n            return i == depth if extra_norm_stage else False\n\n        self.blocks = nn.Sequential(*[\n            SwinTransformerV2CrBlock(\n                dim=embed_dim,\n                num_heads=num_heads,\n                feat_size=self.feat_size,\n                window_size=window_size,\n                always_partition=always_partition,\n                dynamic_mask=dynamic_mask,\n                shift_size=tuple([0 if ((index % 2) == 0) else w // 2 for w in window_size]),\n                mlp_ratio=mlp_ratio,\n                init_values=init_values,\n                proj_drop=proj_drop,\n                drop_attn=drop_attn,\n                drop_path=drop_path[index] if isinstance(drop_path, list) else drop_path,\n                extra_norm=_extra_norm(index),\n                sequential_attn=sequential_attn,\n                norm_layer=norm_layer,\n                **dd,\n            )\n            for index in range(depth)]\n        )\n\n    def set_input_size(\n            self,\n            feat_size: Tuple[int, int],\n            window_size: int,\n            always_partition: Optional[bool] = None,\n    ):\n        \"\"\" Updates the resolution to utilize and the window size and so the pair-wise relative positions.\n\n        Args:\n            window_size (int): New window size\n            feat_size (Tuple[int, int]): New input resolution\n        \"\"\"\n        self.feat_size = (feat_size[0] // 2, feat_size[1] // 2) if self.downscale else feat_size\n        for block in self.blocks:\n            block.set_input_size(\n                feat_size=self.feat_size,\n                window_size=window_size,\n            )\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n        Args:\n            x (torch.Tensor): Input tensor of the shape [B, C, H, W] or [B, L, C]\n        Returns:\n            output (torch.Tensor): Output tensor of the shape [B, 2 * C, H // 2, W // 2]\n        \"\"\"\n        x = bchw_to_bhwc(x)\n        x = self.downsample(x)\n        for block in self.blocks:\n            # Perform checkpointing if utilized\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(block, x)\n            else:\n                x = block(x)\n        x = bhwc_to_bchw(x)\n        return x\n\n\nclass SwinTransformerV2Cr(nn.Module):\n    r\"\"\" Swin Transformer V2\n        A PyTorch impl of : `Swin Transformer V2: Scaling Up Capacity and Resolution`  -\n          https://arxiv.org/pdf/2111.09883\n\n    Args:\n        img_size: Input resolution.\n        window_size: Window size. If None, grid_size // window_div\n        window_ratio: Window size to patch grid ratio.\n        patch_size: Patch size.\n        in_chans: Number of input channels.\n        depths: Depth of the stage (number of layers).\n        num_heads: Number of attention heads to be utilized.\n        embed_dim: Patch embedding dimension.\n        num_classes: Number of output classes.\n        mlp_ratio:  Ratio of the hidden dimension in the FFN to the input channels.\n        drop_rate: Dropout rate.\n        proj_drop_rate: Projection dropout rate.\n        attn_drop_rate: Dropout rate of attention map.\n        drop_path_rate: Stochastic depth rate.\n        norm_layer: Type of normalization layer to be utilized.\n        extra_norm_period: Insert extra norm layer on main branch every N (period) blocks in stage\n        extra_norm_stage: End each stage with an extra norm layer in main branch\n        sequential_attn: If true sequential self-attention is performed.\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Tuple[int, int] = (224, 224),\n            patch_size: int = 4,\n            window_size: Optional[int] = None,\n            window_ratio: int = 8,\n            always_partition: bool = False,\n            strict_img_size: bool = True,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            embed_dim: int = 96,\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            num_heads: Tuple[int, ...] = (3, 6, 12, 24),\n            mlp_ratio: float = 4.0,\n            init_values: Optional[float] = 0.,\n            drop_rate: float = 0.0,\n            proj_drop_rate: float = 0.0,\n            attn_drop_rate: float = 0.0,\n            drop_path_rate: float = 0.0,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            extra_norm_period: int = 0,\n            extra_norm_stage: bool = False,\n            sequential_attn: bool = False,\n            global_pool: str = 'avg',\n            weight_init: str = 'reset',\n            device=None,\n            dtype=None,\n            **kwargs: Any\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        img_size = to_2tuple(img_size)\n        self.num_classes: int = num_classes\n        self.in_chans: int = in_chans\n        self.patch_size: int = patch_size\n        self.img_size: Tuple[int, int] = img_size\n        self.num_features = self.head_hidden_size = int(embed_dim * 2 ** (len(depths) - 1))\n        self.feature_info = []\n\n        self.patch_embed = PatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            norm_layer=norm_layer,\n            strict_img_size=strict_img_size,\n            **dd,\n        )\n        grid_size = self.patch_embed.grid_size\n        if window_size is None:\n            self.window_size = tuple([s // window_ratio for s in grid_size])\n        else:\n            self.window_size = to_2tuple(window_size)\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depths, stagewise=True)\n        stages = []\n        in_dim = embed_dim\n        in_scale = 1\n        for stage_idx, (depth, num_heads) in enumerate(zip(depths, num_heads)):\n            stages += [SwinTransformerV2CrStage(\n                embed_dim=in_dim,\n                depth=depth,\n                downscale=stage_idx != 0,\n                feat_size=(grid_size[0] // in_scale, grid_size[1] // in_scale),\n                num_heads=num_heads,\n                window_size=self.window_size,\n                always_partition=always_partition,\n                dynamic_mask=not strict_img_size,\n                mlp_ratio=mlp_ratio,\n                init_values=init_values,\n                proj_drop=proj_drop_rate,\n                drop_attn=attn_drop_rate,\n                drop_path=dpr[stage_idx],\n                extra_norm_period=extra_norm_period,\n                extra_norm_stage=extra_norm_stage or (stage_idx + 1) == len(depths),  # last stage ends w/ norm\n                sequential_attn=sequential_attn,\n                norm_layer=norm_layer,\n                **dd,\n            )]\n            if stage_idx != 0:\n                in_dim *= 2\n                in_scale *= 2\n            self.feature_info += [dict(num_chs=in_dim, reduction=4 * in_scale, module=f'stages.{stage_idx}')]\n        self.stages = nn.Sequential(*stages)\n\n        self.head = ClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        self.weight_init_mode = 'reset' if weight_init == 'skip' else weight_init\n        # TODO: skip init when on meta device when safe to do so\n        if weight_init != 'skip':\n            self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            needs_reset: If True, call reset_parameters() on modules (default for after to_empty()).\n                If False, skip reset_parameters() (for __init__ where modules already self-initialized).\n        \"\"\"\n        if self.weight_init_mode == 'reset':\n            # 'reset' mode only calls reset_parameters()\n            def _reset(module, name):\n                if hasattr(module, 'reset_parameters'):\n                    module.reset_parameters()\n            if needs_reset:\n                named_apply(_reset, self)\n        else:\n            named_apply(partial(init_weights_swin, needs_reset=needs_reset), self)\n\n    def set_input_size(\n            self,\n            img_size: Optional[Tuple[int, int]] = None,\n            window_size: Optional[Tuple[int, int]] = None,\n            window_ratio: int = 8,\n            always_partition: Optional[bool] = None,\n    ) -> None:\n        \"\"\"Updates the image resolution, window size and so the pair-wise relative positions.\n\n        Args:\n            img_size (Optional[Tuple[int, int]]): New input resolution, if None current resolution is used\n            window_size (Optional[int]): New window size, if None based on new_img_size // window_div\n            window_ratio (int): divisor for calculating window size from patch grid size\n            always_partition: always partition / shift windows even if feat size is < window\n        \"\"\"\n        if img_size is not None:\n            self.patch_embed.set_input_size(img_size=img_size)\n            grid_size = self.patch_embed.grid_size\n\n        if window_size is None and window_ratio is not None:\n            window_size = tuple([s // window_ratio for s in grid_size])\n\n        for index, stage in enumerate(self.stages):\n            stage_scale = 2 ** max(index - 1, 0)\n            stage.set_input_size(\n                feat_size=(grid_size[0] // stage_scale, grid_size[1] // stage_scale),\n                window_size=window_size,\n                always_partition=always_partition,\n            )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^patch_embed',  # stem and embed\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore()\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Method returns the classification head of the model.\n        Returns:\n            head (nn.Module): Current classification head\n        \"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Method results the classification head\n\n        Args:\n            num_classes (int): Number of classes to be predicted\n            global_pool (str): Unused\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n        for i, stage in enumerate(stages):\n            x = stage(x)\n            if i in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.patch_embed(x)\n        x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef init_weights_swin(module: nn.Module, name: str = '', needs_reset: bool = True):\n    # FIXME WIP determining if there's a better weight init\n    if isinstance(module, nn.Linear):\n        if 'qkv' in name:\n            # treat the weights of Q, K, V separately\n            val = math.sqrt(6. / float(module.weight.shape[0] // 3 + module.weight.shape[1]))\n            nn.init.uniform_(module.weight, -val, val)\n        elif 'head' in name:\n            nn.init.zeros_(module.weight)\n        else:\n            nn.init.xavier_uniform_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n    elif needs_reset and hasattr(module, 'reset_parameters'):\n        module.reset_parameters()\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    \"\"\" convert patch embedding weight from manual patchify + linear proj to conv\"\"\"\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    if 'head.fc.weight' in state_dict:\n        return state_dict\n    out_dict = {}\n    for k, v in state_dict.items():\n        if 'tau' in k:\n            # convert old tau based checkpoints -> logit_scale (inverse)\n            v = torch.log(1 / v)\n            k = k.replace('tau', 'logit_scale')\n        k = k.replace('head.', 'head.fc.')\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_swin_transformer_v2_cr(variant, pretrained=False, **kwargs):\n    default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 1, 1))))\n    out_indices = kwargs.pop('out_indices', default_out_indices)\n\n    model = build_model_with_cfg(\n        SwinTransformerV2Cr, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        **kwargs\n    )\n    return model\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create a default configuration dictionary.\n\n    Args:\n        url: Model weights URL.\n        **kwargs: Additional configuration parameters.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'input_size': (3, 224, 224),\n        'pool_size': (7, 7),\n        'crop_pct': 0.9,\n        'interpolation': 'bicubic',\n        'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN,\n        'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj',\n        'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'swinv2_cr_tiny_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_tiny_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_tiny_ns_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url=\"https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-swinv2/swin_v2_cr_tiny_ns_224-ba8166c6.pth\",\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_small_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_small_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url=\"https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-swinv2/swin_v2_cr_small_224-0813c165.pth\",\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_small_ns_224.sw_in1k': _cfg(\n        hf_hub_id='timm/',\n        url=\"https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-swinv2/swin_v2_cr_small_ns_224_iv-2ce90f8e.pth\",\n        input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_small_ns_256.untrained': _cfg(\n        url=\"\", input_size=(3, 256, 256), crop_pct=1.0, pool_size=(8, 8)),\n    'swinv2_cr_base_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_base_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_base_ns_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_large_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_large_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_huge_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_huge_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n    'swinv2_cr_giant_384.untrained': _cfg(\n        url=\"\", input_size=(3, 384, 384), crop_pct=1.0, pool_size=(12, 12)),\n    'swinv2_cr_giant_224.untrained': _cfg(\n        url=\"\", input_size=(3, 224, 224), crop_pct=0.9),\n})\n\n\n@register_model\ndef swinv2_cr_tiny_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-T V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 6, 2),\n        num_heads=(3, 6, 12, 24),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_tiny_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_tiny_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-T V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 6, 2),\n        num_heads=(3, 6, 12, 24),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_tiny_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_tiny_ns_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-T V2 CR @ 224x224, trained ImageNet-1k w/ extra stage norms.\n\n    ** Experimental, may make default if results are improved. **\n    \"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 6, 2),\n        num_heads=(3, 6, 12, 24),\n        extra_norm_stage=True,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_tiny_ns_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_small_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-S V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 18, 2),\n        num_heads=(3, 6, 12, 24),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_small_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_small_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-S V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 18, 2),\n        num_heads=(3, 6, 12, 24),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_small_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_small_ns_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-S V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 18, 2),\n        num_heads=(3, 6, 12, 24),\n        extra_norm_stage=True,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_small_ns_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_small_ns_256(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-S V2 CR @ 256x256, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=96,\n        depths=(2, 2, 18, 2),\n        num_heads=(3, 6, 12, 24),\n        extra_norm_stage=True,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_small_ns_256', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_base_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-B V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=128,\n        depths=(2, 2, 18, 2),\n        num_heads=(4, 8, 16, 32),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_base_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_base_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-B V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=128,\n        depths=(2, 2, 18, 2),\n        num_heads=(4, 8, 16, 32),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_base_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_base_ns_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-B V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=128,\n        depths=(2, 2, 18, 2),\n        num_heads=(4, 8, 16, 32),\n        extra_norm_stage=True,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_base_ns_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_large_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-L V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=192,\n        depths=(2, 2, 18, 2),\n        num_heads=(6, 12, 24, 48),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_large_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_large_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-L V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=192,\n        depths=(2, 2, 18, 2),\n        num_heads=(6, 12, 24, 48),\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_large_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_huge_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-H V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=352,\n        depths=(2, 2, 18, 2),\n        num_heads=(11, 22, 44, 88),  # head count not certain for Huge, 384 & 224 trying diff values\n        extra_norm_period=6,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_huge_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_huge_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-H V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=352,\n        depths=(2, 2, 18, 2),\n        num_heads=(8, 16, 32, 64),  # head count not certain for Huge, 384 & 224 trying diff values\n        extra_norm_period=6,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_huge_224', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_giant_384(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-G V2 CR @ 384x384, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=512,\n        depths=(2, 2, 42, 2),\n        num_heads=(16, 32, 64, 128),\n        extra_norm_period=6,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_giant_384', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef swinv2_cr_giant_224(pretrained: bool = False, **kwargs) -> SwinTransformerV2Cr:\n    \"\"\"Swin-G V2 CR @ 224x224, trained ImageNet-1k.\"\"\"\n    model_args = dict(\n        embed_dim=512,\n        depths=(2, 2, 42, 2),\n        num_heads=(16, 32, 64, 128),\n        extra_norm_period=6,\n    )\n    return _create_swin_transformer_v2_cr('swinv2_cr_giant_224', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/tiny_vit.py",
    "content": "\"\"\" TinyViT\n\nPaper: `TinyViT: Fast Pretraining Distillation for Small Vision Transformers`\n    - https://arxiv.org/abs/2207.10666\n\nAdapted from official impl at https://github.com/microsoft/Cream/tree/main/TinyViT\n\"\"\"\n\n__all__ = ['TinyVit']\n\nimport itertools\nfrom functools import partial\nfrom typing import Dict, List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import LayerNorm2d, NormMlpClassifierHead, DropPath,\\\n    trunc_normal_, resize_rel_pos_bias_table_levit, use_fused_attn, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n\nclass ConvNorm(torch.nn.Sequential):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            ks: int = 1,\n            stride: int = 1,\n            pad: int = 0,\n            dilation: int = 1,\n            groups: int = 1,\n            bn_weight_init: float = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv = nn.Conv2d(in_chs, out_chs, ks, stride, pad, dilation, groups, bias=False, **dd)\n        self.bn = nn.BatchNorm2d(out_chs, **dd)\n        torch.nn.init.constant_(self.bn.weight, bn_weight_init)\n        torch.nn.init.constant_(self.bn.bias, 0)\n\n    @torch.no_grad()\n    def fuse(self):\n        c, bn = self.conv, self.bn\n        w = bn.weight / (bn.running_var + bn.eps) ** 0.5\n        w = c.weight * w[:, None, None, None]\n        b = bn.bias - bn.running_mean * bn.weight / \\\n            (bn.running_var + bn.eps) ** 0.5\n        m = torch.nn.Conv2d(\n            w.size(1) * self.conv.groups, w.size(0), w.shape[2:],\n            stride=self.conv.stride, padding=self.conv.padding, dilation=self.conv.dilation, groups=self.conv.groups)\n        m.weight.data.copy_(w)\n        m.bias.data.copy_(b)\n        return m\n\n\nclass PatchEmbed(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride = 4\n        self.conv1 = ConvNorm(in_chs, out_chs // 2, 3, 2, 1, **dd)\n        self.act = act_layer()\n        self.conv2 = ConvNorm(out_chs // 2, out_chs, 3, 2, 1, **dd)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.act(x)\n        x = self.conv2(x)\n        return x\n\n\nclass MBConv(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            expand_ratio: float,\n            act_layer: Type[nn.Module],\n            drop_path: float,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        mid_chs = int(in_chs * expand_ratio)\n        self.conv1 = ConvNorm(in_chs, mid_chs, ks=1, **dd)\n        self.act1 = act_layer()\n        self.conv2 = ConvNorm(mid_chs, mid_chs, ks=3, stride=1, pad=1, groups=mid_chs, **dd)\n        self.act2 = act_layer()\n        self.conv3 = ConvNorm(mid_chs, out_chs, ks=1, bn_weight_init=0.0, **dd)\n        self.act3 = act_layer()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        shortcut = x\n        x = self.conv1(x)\n        x = self.act1(x)\n        x = self.conv2(x)\n        x = self.act2(x)\n        x = self.conv3(x)\n        x = self.drop_path(x)\n        x += shortcut\n        x = self.act3(x)\n        return x\n\n\nclass PatchMerging(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            act_layer: Type[nn.Module],\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.conv1 = ConvNorm(dim, out_dim, 1, 1, 0, **dd)\n        self.act1 = act_layer()\n        self.conv2 = ConvNorm(out_dim, out_dim, 3, 2, 1, groups=out_dim, **dd)\n        self.act2 = act_layer()\n        self.conv3 = ConvNorm(out_dim, out_dim, 1, 1, 0, **dd)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.act1(x)\n        x = self.conv2(x)\n        x = self.act2(x)\n        x = self.conv3(x)\n        return x\n\n\nclass ConvLayer(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            depth: int,\n            act_layer: Type[nn.Module],\n            drop_path: Union[float, List[float]] = 0.,\n            conv_expand_ratio: float = 4.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.depth = depth\n        self.blocks = nn.Sequential(*[\n            MBConv(\n                dim,\n                dim,\n                conv_expand_ratio,\n                act_layer,\n                drop_path[i] if isinstance(drop_path, list) else drop_path,\n                **dd,\n            )\n            for i in range(depth)\n        ])\n\n    def forward(self, x):\n        x = self.blocks(x)\n        return x\n\n\nclass NormMlp(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        self.norm = norm_layer(in_features, **dd)\n        self.fc1 = nn.Linear(in_features, hidden_features, **dd)\n        self.act = act_layer()\n        self.drop1 = nn.Dropout(drop)\n        self.fc2 = nn.Linear(hidden_features, out_features, **dd)\n        self.drop2 = nn.Dropout(drop)\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = self.fc1(x)\n        x = self.act(x)\n        x = self.drop1(x)\n        x = self.fc2(x)\n        x = self.drop2(x)\n        return x\n\n\nclass Attention(torch.nn.Module):\n    fused_attn: torch.jit.Final[bool]\n    attention_bias_cache: Dict[str, torch.Tensor]\n\n    def __init__(\n            self,\n            dim: int,\n            key_dim: int,\n            num_heads: int = 8,\n            attn_ratio: int = 4,\n            resolution: Tuple[int, int] = (14, 14),\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert isinstance(resolution, tuple) and len(resolution) == 2\n        self.num_heads = num_heads\n        self.scale = key_dim ** -0.5\n        self.key_dim = key_dim\n        self.val_dim = int(attn_ratio * key_dim)\n        self.out_dim = self.val_dim * num_heads\n        self.attn_ratio = attn_ratio\n        self.resolution = resolution\n        self.fused_attn = use_fused_attn()\n\n        self.norm = nn.LayerNorm(dim, **dd)\n        self.qkv = nn.Linear(dim, num_heads * (self.val_dim + 2 * key_dim), **dd)\n        self.proj = nn.Linear(self.out_dim, dim, **dd)\n\n        N = resolution[0] * resolution[1]\n        num_offsets = resolution[0] * resolution[1]  # unique offset count\n        self.attention_biases = torch.nn.Parameter(torch.empty(num_heads, num_offsets, **dd))\n        self.register_buffer(\n            'attention_bias_idxs',\n            torch.empty((N, N), device=device, dtype=torch.long),\n            persistent=False,\n        )\n        self.attention_bias_cache = {}\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    @torch.no_grad()\n    def train(self, mode=True):\n        super().train(mode)\n        if mode and self.attention_bias_cache:\n            self.attention_bias_cache = {}  # clear ab cache\n\n    def get_attention_biases(self, device: torch.device) -> torch.Tensor:\n        if torch.jit.is_tracing() or self.training:\n            return self.attention_biases[:, self.attention_bias_idxs]\n        else:\n            device_key = str(device)\n            if device_key not in self.attention_bias_cache:\n                self.attention_bias_cache[device_key] = self.attention_biases[:, self.attention_bias_idxs]\n            return self.attention_bias_cache[device_key]\n\n    def forward(self, x):\n        attn_bias = self.get_attention_biases(x.device)\n        B, N, _ = x.shape\n        # Normalization\n        x = self.norm(x)\n        qkv = self.qkv(x)\n        # (B, N, num_heads, d)\n        q, k, v = qkv.view(B, N, self.num_heads, -1).split([self.key_dim, self.key_dim, self.val_dim], dim=3)\n        # (B, num_heads, N, d)\n        q = q.permute(0, 2, 1, 3)\n        k = k.permute(0, 2, 1, 3)\n        v = v.permute(0, 2, 1, 3)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(q, k, v, attn_mask=attn_bias)\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn + attn_bias\n            attn = attn.softmax(dim=-1)\n            x = attn @ v\n        x = x.transpose(1, 2).reshape(B, N, self.out_dim)\n        x = self.proj(x)\n        return x\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        nn.init.zeros_(self.attention_biases)\n        self._init_buffers()\n\n    def _init_buffers(self) -> None:\n        \"\"\"Compute and fill non-persistent buffer values.\"\"\"\n        device = self.attention_bias_idxs.device\n        points = list(itertools.product(range(self.resolution[0]), range(self.resolution[1])))\n        N = len(points)\n        attention_offsets = {}\n        idxs = []\n        for p1 in points:\n            for p2 in points:\n                offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1]))\n                if offset not in attention_offsets:\n                    attention_offsets[offset] = len(attention_offsets)\n                idxs.append(attention_offsets[offset])\n        self.attention_bias_idxs.copy_(torch.tensor(idxs, device=device, dtype=torch.long).view(N, N))\n        self.attention_bias_cache = {}\n\n    def init_non_persistent_buffers(self) -> None:\n        \"\"\"Initialize non-persistent buffers.\"\"\"\n        self._init_buffers()\n\n\nclass TinyVitBlock(nn.Module):\n    \"\"\" TinyViT Block.\n\n    Args:\n        dim (int): Number of input channels.\n        num_heads (int): Number of attention heads.\n        window_size (int): Window size.\n        mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.\n        drop (float, optional): Dropout rate. Default: 0.0\n        drop_path (float, optional): Stochastic depth rate. Default: 0.0\n        local_conv_size (int): the kernel size of the convolution between\n                               Attention and MLP. Default: 3\n        act_layer: the activation function. Default: nn.GELU\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            window_size: int = 7,\n            mlp_ratio: float = 4.,\n            drop: float = 0.,\n            drop_path: float = 0.,\n            local_conv_size: int = 3,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.num_heads = num_heads\n        assert window_size > 0, 'window_size must be greater than 0'\n        self.window_size = window_size\n        self.mlp_ratio = mlp_ratio\n\n        assert dim % num_heads == 0, 'dim must be divisible by num_heads'\n        head_dim = dim // num_heads\n\n        window_resolution = (window_size, window_size)\n        self.attn = Attention(\n            dim,\n            head_dim,\n            num_heads,\n            attn_ratio=1,\n            resolution=window_resolution,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = NormMlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        pad = local_conv_size // 2\n        self.local_conv = ConvNorm(dim, dim, ks=local_conv_size, stride=1, pad=pad, groups=dim, **dd)\n\n    def forward(self, x):\n        B, H, W, C = x.shape\n        L = H * W\n\n        shortcut = x\n        if H == self.window_size and W == self.window_size:\n            x = x.reshape(B, L, C)\n            x = self.attn(x)\n            x = x.view(B, H, W, C)\n        else:\n            pad_b = (self.window_size - H % self.window_size) % self.window_size\n            pad_r = (self.window_size - W % self.window_size) % self.window_size\n            padding = pad_b > 0 or pad_r > 0\n            if padding:\n                x = F.pad(x, (0, 0, 0, pad_r, 0, pad_b))\n\n            # window partition\n            pH, pW = H + pad_b, W + pad_r\n            nH = pH // self.window_size\n            nW = pW // self.window_size\n            x = x.view(B, nH, self.window_size, nW, self.window_size, C).transpose(2, 3).reshape(\n                B * nH * nW, self.window_size * self.window_size, C\n            )\n\n            x = self.attn(x)\n\n            # window reverse\n            x = x.view(B, nH, nW, self.window_size, self.window_size, C).transpose(2, 3).reshape(B, pH, pW, C)\n\n            if padding:\n                x = x[:, :H, :W].contiguous()\n        x = shortcut + self.drop_path1(x)\n\n        x = x.permute(0, 3, 1, 2)\n        x = self.local_conv(x)\n        x = x.reshape(B, C, L).transpose(1, 2)\n\n        x = x + self.drop_path2(self.mlp(x))\n        return x.view(B, H, W, C)\n\n    def extra_repr(self) -> str:\n        return f\"dim={self.dim}, num_heads={self.num_heads}, \" \\\n               f\"window_size={self.window_size}, mlp_ratio={self.mlp_ratio}\"\n\n\nregister_notrace_module(TinyVitBlock)\n\n\nclass TinyVitStage(nn.Module):\n    \"\"\" A basic TinyViT layer for one stage.\n\n    Args:\n        dim (int): Number of input channels.\n        out_dim: the output dimension of the layer\n        depth (int): Number of blocks.\n        num_heads (int): Number of attention heads.\n        window_size (int): Local window size.\n        mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.\n        drop (float, optional): Dropout rate. Default: 0.0\n        drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0\n        downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None\n        local_conv_size: the kernel size of the depthwise convolution between attention and MLP. Default: 3\n        act_layer: the activation function. Default: nn.GELU\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            out_dim: int,\n            depth: int,\n            num_heads: int,\n            window_size: int,\n            mlp_ratio: float = 4.,\n            drop: float = 0.,\n            drop_path: Union[float, List[float]] = 0.,\n            downsample: Optional[Type[nn.Module]] = None,\n            local_conv_size: int = 3,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.depth = depth\n        self.out_dim =  out_dim\n\n        # patch merging layer\n        if downsample is not None:\n            self.downsample = downsample(\n                dim=dim,\n                out_dim=out_dim,\n                act_layer=act_layer,\n                **dd,\n            )\n        else:\n            self.downsample = nn.Identity()\n            assert dim == out_dim\n\n        # build blocks\n        self.blocks = nn.Sequential(*[\n            TinyVitBlock(\n                dim=out_dim,\n                num_heads=num_heads,\n                window_size=window_size,\n                mlp_ratio=mlp_ratio,\n                drop=drop,\n                drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path,\n                local_conv_size=local_conv_size,\n                act_layer=act_layer,\n                **dd,\n            )\n            for i in range(depth)])\n\n    def forward(self, x):\n        x = self.downsample(x)\n        x = x.permute(0, 2, 3, 1)  # BCHW -> BHWC\n        x = self.blocks(x)\n        x = x.permute(0, 3, 1, 2)  # BHWC -> BCHW\n        return x\n\n    def extra_repr(self) -> str:\n        return f\"dim={self.out_dim}, depth={self.depth}\"\n\n\nclass TinyVit(nn.Module):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dims: Tuple[int, ...] = (96, 192, 384, 768),\n            depths: Tuple[int, ...] = (2, 2, 6, 2),\n            num_heads: Tuple[int, ...] = (3, 6, 12, 24),\n            window_sizes: Tuple[int, ...] = (7, 7, 14, 7),\n            mlp_ratio: float = 4.,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.1,\n            use_checkpoint: bool = False,\n            mbconv_expand_ratio: float = 4.0,\n            local_conv_size: int = 3,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.depths = depths\n        self.num_stages = len(depths)\n        self.mlp_ratio = mlp_ratio\n        self.grad_checkpointing = use_checkpoint\n\n        self.patch_embed = PatchEmbed(\n            in_chs=in_chans,\n            out_chs=embed_dims[0],\n            act_layer=act_layer,\n            **dd,\n        )\n\n        # stochastic depth rate rule\n        dpr = calculate_drop_path_rates(drop_path_rate, sum(depths))\n\n        # build stages\n        self.stages = nn.Sequential()\n        stride = self.patch_embed.stride\n        prev_dim = embed_dims[0]\n        self.feature_info = []\n        for stage_idx in range(self.num_stages):\n            if stage_idx == 0:\n                stage = ConvLayer(\n                    dim=prev_dim,\n                    depth=depths[stage_idx],\n                    act_layer=act_layer,\n                    drop_path=dpr[:depths[stage_idx]],\n                    conv_expand_ratio=mbconv_expand_ratio,\n                    **dd,\n                )\n            else:\n                out_dim = embed_dims[stage_idx]\n                drop_path_rate = dpr[sum(depths[:stage_idx]):sum(depths[:stage_idx + 1])]\n                stage = TinyVitStage(\n                    dim=embed_dims[stage_idx - 1],\n                    out_dim=out_dim,\n                    depth=depths[stage_idx],\n                    num_heads=num_heads[stage_idx],\n                    window_size=window_sizes[stage_idx],\n                    mlp_ratio=self.mlp_ratio,\n                    drop=drop_rate,\n                    local_conv_size=local_conv_size,\n                    drop_path=drop_path_rate,\n                    downsample=PatchMerging,\n                    act_layer=act_layer,\n                    **dd,\n                )\n                prev_dim = out_dim\n                stride *= 2\n            self.stages.append(stage)\n            self.feature_info += [dict(num_chs=prev_dim, reduction=stride, module=f'stages.{stage_idx}')]\n\n        # Classifier head\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n\n        norm_layer_cf = partial(LayerNorm2d, eps=1e-5)\n        self.head = NormMlpClassifierHead(\n            self.num_features,\n            num_classes,\n            pool_type=global_pool,\n            norm_layer=norm_layer_cf,\n            **dd,\n        )\n\n        # TODO: skip init when on meta device when safe to do so\n        self.init_weights(needs_reset=False)\n\n    def init_weights(self, needs_reset: bool = True):\n        self.apply(partial(self._init_weights, needs_reset=needs_reset))\n\n    def _init_weights(self, m: nn.Module, needs_reset: bool = True):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif needs_reset and hasattr(m, 'reset_parameters'):\n            m.reset_parameters()\n\n    @torch.jit.ignore\n    def no_weight_decay_keywords(self):\n        return {'attention_biases'}\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {x for x in self.state_dict().keys() if 'attention_biases' in x}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embed',\n            blocks=r'^stages\\.(\\d+)' if coarse else [\n                (r'^stages\\.(\\d+).downsample', (0,)),\n                (r'^stages\\.(\\d+)\\.\\w+\\.(\\d+)', None),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n\n        # forward pass\n        x = self.patch_embed(x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stages), indices)\n        self.stages = self.stages[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n        return x\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'model' in state_dict.keys():\n        state_dict = state_dict['model']\n    target_sd = model.state_dict()\n    out_dict = {}\n    for k, v in state_dict.items():\n        if k.endswith('attention_bias_idxs'):\n            continue\n        if 'attention_biases' in k:\n            # TODO: whether move this func into model for dynamic input resolution? (high risk)\n            v = resize_rel_pos_bias_table_levit(v.T, target_sd[k].shape[::-1]).T\n        out_dict[k] = v\n    return out_dict\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'mean': IMAGENET_DEFAULT_MEAN,\n        'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.conv1.conv',\n        'classifier': 'head.fc',\n        'pool_size': (7, 7),\n        'input_size': (3, 224, 224),\n        'crop_pct': 0.95,\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'tiny_vit_5m_224.dist_in22k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_5m_22k_distill.pth',\n        num_classes=21841\n    ),\n    'tiny_vit_5m_224.dist_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_5m_22kto1k_distill.pth'\n    ),\n    'tiny_vit_5m_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_5m_1k.pth'\n    ),\n    'tiny_vit_11m_224.dist_in22k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_11m_22k_distill.pth',\n        num_classes=21841\n    ),\n    'tiny_vit_11m_224.dist_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_11m_22kto1k_distill.pth'\n    ),\n    'tiny_vit_11m_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_11m_1k.pth'\n    ),\n    'tiny_vit_21m_224.dist_in22k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_21m_22k_distill.pth',\n        num_classes=21841\n    ),\n    'tiny_vit_21m_224.dist_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_21m_22kto1k_distill.pth'\n    ),\n    'tiny_vit_21m_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_21m_1k.pth'\n    ),\n    'tiny_vit_21m_384.dist_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_21m_22kto1k_384_distill.pth',\n        input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0,\n    ),\n    'tiny_vit_21m_512.dist_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        # url='https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/tiny_vit_21m_22kto1k_512_distill.pth',\n        input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash',\n    ),\n})\n\n\ndef _create_tiny_vit(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3))\n    model = build_model_with_cfg(\n        TinyVit,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        pretrained_filter_fn=checkpoint_filter_fn,\n        **kwargs\n    )\n    return model\n\n\n@register_model\ndef tiny_vit_5m_224(pretrained=False, **kwargs):\n    model_kwargs = dict(\n        embed_dims=[64, 128, 160, 320],\n        depths=[2, 2, 6, 2],\n        num_heads=[2, 4, 5, 10],\n        window_sizes=[7, 7, 14, 7],\n        drop_path_rate=0.0,\n    )\n    model_kwargs.update(kwargs)\n    return _create_tiny_vit('tiny_vit_5m_224', pretrained, **model_kwargs)\n\n\n@register_model\ndef tiny_vit_11m_224(pretrained=False, **kwargs):\n    model_kwargs = dict(\n        embed_dims=[64, 128, 256, 448],\n        depths=[2, 2, 6, 2],\n        num_heads=[2, 4, 8, 14],\n        window_sizes=[7, 7, 14, 7],\n        drop_path_rate=0.1,\n    )\n    model_kwargs.update(kwargs)\n    return _create_tiny_vit('tiny_vit_11m_224', pretrained, **model_kwargs)\n\n\n@register_model\ndef tiny_vit_21m_224(pretrained=False, **kwargs):\n    model_kwargs = dict(\n        embed_dims=[96, 192, 384, 576],\n        depths=[2, 2, 6, 2],\n        num_heads=[3, 6, 12, 18],\n        window_sizes=[7, 7, 14, 7],\n        drop_path_rate=0.2,\n    )\n    model_kwargs.update(kwargs)\n    return _create_tiny_vit('tiny_vit_21m_224', pretrained, **model_kwargs)\n\n\n@register_model\ndef tiny_vit_21m_384(pretrained=False, **kwargs):\n    model_kwargs = dict(\n        embed_dims=[96, 192, 384, 576],\n        depths=[2, 2, 6, 2],\n        num_heads=[3, 6, 12, 18],\n        window_sizes=[12, 12, 24, 12],\n        drop_path_rate=0.1,\n    )\n    model_kwargs.update(kwargs)\n    return _create_tiny_vit('tiny_vit_21m_384', pretrained, **model_kwargs)\n\n\n@register_model\ndef tiny_vit_21m_512(pretrained=False, **kwargs):\n    model_kwargs = dict(\n        embed_dims=[96, 192, 384, 576],\n        depths=[2, 2, 6, 2],\n        num_heads=[3, 6, 12, 18],\n        window_sizes=[16, 16, 32, 16],\n        drop_path_rate=0.1,\n    )\n    model_kwargs.update(kwargs)\n    return _create_tiny_vit('tiny_vit_21m_512', pretrained, **model_kwargs)\n"
  },
  {
    "path": "timm/models/tnt.py",
    "content": "\"\"\" Transformer in Transformer (TNT) in PyTorch\n\nA PyTorch implement of TNT as described in\n'Transformer in Transformer' - https://arxiv.org/abs/2103.00112\n\nThe official mindspore code is released and available at\nhttps://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/TNT\n\nThe official pytorch code is released and available at\nhttps://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/tnt_pytorch\n\"\"\"\nimport math\nfrom typing import List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import Mlp, DropPath, calculate_drop_path_rates, trunc_normal_, _assert, to_2tuple, resample_abs_pos_embed\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint\nfrom ._registry import generate_default_cfgs, register_model\n\n__all__ = ['TNT']  # model_registry will add each entrypoint fn to this\n\n\nclass Attention(nn.Module):\n    \"\"\" Multi-Head Attention\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            hidden_dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.hidden_dim = hidden_dim\n        self.num_heads = num_heads\n        head_dim = hidden_dim // num_heads\n        self.head_dim = head_dim\n        self.scale = head_dim ** -0.5\n\n        self.qk = nn.Linear(dim, hidden_dim * 2, bias=qkv_bias, **dd)\n        self.v = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop, inplace=True)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop, inplace=True)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        qk = self.qk(x).reshape(B, N, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        q, k = qk.unbind(0)  # make torchscript happy (cannot use tensor as tuple)\n        v = self.v(x).reshape(B, N, self.num_heads, -1).permute(0, 2, 1, 3)\n\n        attn = (q @ k.transpose(-2, -1)) * self.scale\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).transpose(1, 2).reshape(B, N, -1)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass Block(nn.Module):\n    \"\"\" TNT Block\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            num_pixel: int,\n            num_heads_in: int = 4,\n            num_heads_out: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        # Inner transformer\n        self.norm_in = norm_layer(dim, **dd)\n        self.attn_in = Attention(\n            dim,\n            dim,\n            num_heads=num_heads_in,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n\n        self.norm_mlp_in = norm_layer(dim, **dd)\n        self.mlp_in = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * 4),\n            out_features=dim,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.legacy = legacy\n        if self.legacy:\n            self.norm1_proj = norm_layer(dim, **dd)\n            self.proj = nn.Linear(dim * num_pixel, dim_out, bias=True, **dd)\n            self.norm2_proj = None\n        else:\n            self.norm1_proj = norm_layer(dim * num_pixel, **dd)\n            self.proj = nn.Linear(dim * num_pixel, dim_out, bias=False, **dd)\n            self.norm2_proj = norm_layer(dim_out, **dd)\n\n        # Outer transformer\n        self.norm_out = norm_layer(dim_out, **dd)\n        self.attn_out = Attention(\n            dim_out,\n            dim_out,\n            num_heads=num_heads_out,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm_mlp = norm_layer(dim_out, **dd)\n        self.mlp = Mlp(\n            in_features=dim_out,\n            hidden_features=int(dim_out * mlp_ratio),\n            out_features=dim_out,\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n\n    def forward(self, pixel_embed, patch_embed):\n        # inner\n        pixel_embed = pixel_embed + self.drop_path(self.attn_in(self.norm_in(pixel_embed)))\n        pixel_embed = pixel_embed + self.drop_path(self.mlp_in(self.norm_mlp_in(pixel_embed)))\n        # outer\n        B, N, C = patch_embed.size()\n        if self.norm2_proj is None:\n            patch_embed = torch.cat([\n                patch_embed[:, 0:1],\n                patch_embed[:, 1:] + self.proj(self.norm1_proj(pixel_embed).reshape(B, N - 1, -1)),\n            ], dim=1)\n        else:\n            patch_embed = torch.cat([\n                patch_embed[:, 0:1],\n                patch_embed[:, 1:] + self.norm2_proj(self.proj(self.norm1_proj(pixel_embed.reshape(B, N - 1, -1)))),\n            ], dim=1)\n        patch_embed = patch_embed + self.drop_path(self.attn_out(self.norm_out(patch_embed)))\n        patch_embed = patch_embed + self.drop_path(self.mlp(self.norm_mlp(patch_embed)))\n        return pixel_embed, patch_embed\n\n\nclass PixelEmbed(nn.Module):\n    \"\"\" Image to Pixel Embedding\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            in_dim: int = 48,\n            stride: int = 4,\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        img_size = to_2tuple(img_size)\n        patch_size = to_2tuple(patch_size)\n        # grid_size property necessary for resizing positional embedding\n        self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1])\n        num_patches = (self.grid_size[0]) * (self.grid_size[1])\n        self.img_size = img_size\n        self.patch_size = patch_size\n        self.legacy = legacy\n        self.num_patches = num_patches\n        self.in_dim = in_dim\n        new_patch_size = [math.ceil(ps / stride) for ps in patch_size]\n        self.new_patch_size = new_patch_size\n\n        self.proj = nn.Conv2d(in_chans, self.in_dim, kernel_size=7, padding=3, stride=stride, **dd)\n        if self.legacy:\n            self.unfold = nn.Unfold(kernel_size=new_patch_size, stride=new_patch_size)\n        else:\n            self.unfold = nn.Unfold(kernel_size=patch_size, stride=patch_size)\n\n    def feat_ratio(self, as_scalar=True) -> Union[Tuple[int, int], int]:\n        if as_scalar:\n            return max(self.patch_size)\n        else:\n            return self.patch_size\n\n    def dynamic_feat_size(self, img_size: Tuple[int, int]) -> Tuple[int, int]:\n        return img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]\n\n    def forward(self, x: torch.Tensor, pixel_pos: torch.Tensor) -> torch.Tensor:\n        B, C, H, W = x.shape\n        _assert(\n            H == self.img_size[0],\n            f\"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]}).\")\n        _assert(\n            W == self.img_size[1],\n            f\"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]}).\")\n        if self.legacy:\n            x = self.proj(x)\n            x = self.unfold(x)\n            x = x.transpose(1, 2).reshape(\n                B * self.num_patches, self.in_dim, self.new_patch_size[0], self.new_patch_size[1])\n        else:\n            x = self.unfold(x)\n            x = x.transpose(1, 2).reshape(B * self.num_patches, C, self.patch_size[0], self.patch_size[1])\n            x = self.proj(x)\n        x = x + pixel_pos\n        x = x.reshape(B * self.num_patches, self.in_dim, -1).transpose(1, 2)\n        return x\n\n\nclass TNT(nn.Module):\n    \"\"\" Transformer in Transformer - https://arxiv.org/abs/2103.00112\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'token',\n            embed_dim: int = 768,\n            inner_dim: int = 48,\n            depth: int = 12,\n            num_heads_inner: int = 4,\n            num_heads_outer: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            first_stride: int = 4,\n            legacy: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'token', 'avg')\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.num_prefix_tokens = 1\n        self.grad_checkpointing = False\n\n        self.pixel_embed = PixelEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            in_dim=inner_dim,\n            stride=first_stride,\n            legacy=legacy,\n            **dd,\n        )\n        num_patches = self.pixel_embed.num_patches\n        r = self.pixel_embed.feat_ratio() if hasattr(self.pixel_embed, 'feat_ratio') else patch_size\n        self.num_patches = num_patches\n        new_patch_size = self.pixel_embed.new_patch_size\n        num_pixel = new_patch_size[0] * new_patch_size[1]\n\n        self.norm1_proj = norm_layer(num_pixel * inner_dim, **dd)\n        self.proj = nn.Linear(num_pixel * inner_dim, embed_dim, **dd)\n        self.norm2_proj = norm_layer(embed_dim, **dd)\n\n        self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim, **dd))\n        self.patch_pos = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim, **dd))\n        self.pixel_pos = nn.Parameter(torch.zeros(1, inner_dim, new_patch_size[0], new_patch_size[1], **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        blocks = []\n        for i in range(depth):\n            blocks.append(Block(\n                dim=inner_dim,\n                dim_out=embed_dim,\n                num_pixel=num_pixel,\n                num_heads_in=num_heads_inner,\n                num_heads_out=num_heads_outer,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                legacy=legacy,\n                **dd,\n            ))\n        self.blocks = nn.ModuleList(blocks)\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=r) for i in range(depth)]\n\n        self.norm = norm_layer(embed_dim, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        trunc_normal_(self.cls_token, std=.02)\n        trunc_normal_(self.patch_pos, std=.02)\n        trunc_normal_(self.pixel_pos, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'patch_pos', 'pixel_pos', 'cls_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^cls_token|patch_pos|pixel_pos|pixel_embed|norm[12]_proj|proj',  # stem and embed / pos\n            blocks=[\n                (r'^blocks\\.(\\d+)', None),\n                (r'^norm', (99999,)),\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'token', 'avg')\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(self.embed_dim, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if an int, if is a sequence, select by matching indices\n            return_prefix_tokens: Return both prefix and spatial intermediate tokens\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n\n        pixel_embed = self.pixel_embed(x, self.pixel_pos)\n\n        patch_embed = self.norm2_proj(self.proj(self.norm1_proj(pixel_embed.reshape(B, self.num_patches, -1))))\n        patch_embed = torch.cat((self.cls_token.expand(B, -1, -1), patch_embed), dim=1)\n        patch_embed = patch_embed + self.patch_pos\n        patch_embed = self.pos_drop(patch_embed)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                pixel_embed, patch_embed = checkpoint(blk, pixel_embed, patch_embed)\n            else:\n                pixel_embed, patch_embed = blk(pixel_embed, patch_embed)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(patch_embed) if norm else patch_embed)\n\n        # process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.pixel_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n        if not torch.jit.is_scripting() and return_prefix_tokens:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        patch_embed = self.norm(patch_embed)\n\n        return patch_embed, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        B = x.shape[0]\n        pixel_embed = self.pixel_embed(x, self.pixel_pos)\n\n        patch_embed = self.norm2_proj(self.proj(self.norm1_proj(pixel_embed.reshape(B, self.num_patches, -1))))\n        patch_embed = torch.cat((self.cls_token.expand(B, -1, -1), patch_embed), dim=1)\n        patch_embed = patch_embed + self.patch_pos\n        patch_embed = self.pos_drop(patch_embed)\n\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                pixel_embed, patch_embed = checkpoint(blk, pixel_embed, patch_embed)\n            else:\n                pixel_embed, patch_embed = blk(pixel_embed, patch_embed)\n\n        patch_embed = self.norm(patch_embed)\n        return patch_embed\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x[:, self.num_prefix_tokens:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'pixel_embed.proj', 'classifier': 'head',\n        'paper_ids': 'arXiv:2103.00112',\n        'paper_name': 'Transformer in Transformer',\n        'origin_url': 'https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/tnt_pytorch',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'tnt_s_legacy_patch16_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/contrastive/pytorch-image-models/releases/download/TNT/tnt_s_patch16_224.pth.tar',\n    ),\n    'tnt_s_patch16_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/tnt/tnt_s_81.5.pth.tar',\n    ),\n    'tnt_b_patch16_224.in1k': _cfg(\n        hf_hub_id='timm/',\n        #url='https://github.com/huawei-noah/Efficient-AI-Backbones/releases/download/tnt/tnt_b_82.9.pth.tar',\n    ),\n})\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    state_dict.pop('outer_tokens', None)\n    if 'patch_pos' in state_dict:\n        out_dict = state_dict\n    else:\n        out_dict = {}\n        for k, v in state_dict.items():\n            k = k.replace('outer_pos', 'patch_pos')\n            k = k.replace('inner_pos', 'pixel_pos')\n            k = k.replace('patch_embed', 'pixel_embed')\n            k = k.replace('proj_norm1', 'norm1_proj')\n            k = k.replace('proj_norm2', 'norm2_proj')\n            k = k.replace('inner_norm1', 'norm_in')\n            k = k.replace('inner_attn', 'attn_in')\n            k = k.replace('inner_norm2', 'norm_mlp_in')\n            k = k.replace('inner_mlp', 'mlp_in')\n            k = k.replace('outer_norm1', 'norm_out')\n            k = k.replace('outer_attn', 'attn_out')\n            k = k.replace('outer_norm2', 'norm_mlp')\n            k = k.replace('outer_mlp', 'mlp')\n            if k == 'pixel_pos' and model.pixel_embed.legacy == False:\n                B, N, C = v.shape\n                H = W = int(N ** 0.5)\n                assert H * W == N\n                v = v.permute(0, 2, 1).reshape(B, C, H, W)\n            out_dict[k] = v\n\n    \"\"\" convert patch embedding weight from manual patchify + linear proj to conv\"\"\"\n    if out_dict['patch_pos'].shape != model.patch_pos.shape:\n        out_dict['patch_pos'] = resample_abs_pos_embed(\n            out_dict['patch_pos'],\n            new_size=model.pixel_embed.grid_size,\n            num_prefix_tokens=1,\n        )\n    return out_dict\n\n\ndef _create_tnt(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        TNT, variant, pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs)\n    return model\n\n\n@register_model\ndef tnt_s_legacy_patch16_224(pretrained=False, **kwargs) -> TNT:\n    model_cfg = dict(\n        patch_size=16, embed_dim=384, inner_dim=24, depth=12, num_heads_outer=6,\n        qkv_bias=False, legacy=True)\n    model = _create_tnt('tnt_s_legacy_patch16_224', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef tnt_s_patch16_224(pretrained=False, **kwargs) -> TNT:\n    model_cfg = dict(\n        patch_size=16, embed_dim=384, inner_dim=24, depth=12, num_heads_outer=6,\n        qkv_bias=False)\n    model = _create_tnt('tnt_s_patch16_224', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef tnt_b_patch16_224(pretrained=False, **kwargs) -> TNT:\n    model_cfg = dict(\n        patch_size=16, embed_dim=640, inner_dim=40, depth=12, num_heads_outer=10,\n        qkv_bias=False)\n    model = _create_tnt('tnt_b_patch16_224', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/tresnet.py",
    "content": "\"\"\"\nTResNet: High Performance GPU-Dedicated Architecture\nhttps://arxiv.org/pdf/2003.13630.pdf\n\nOriginal model: https://github.com/mrT23/TResNet\n\n\"\"\"\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.layers import SpaceToDepth, BlurPool2d, ClassifierHead, SEModule, ConvNormAct, DropPath, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['TResNet']  # model_registry will add each entrypoint fn to this\n\n\nclass BasicBlock(nn.Module):\n    expansion = 1\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            use_se: bool = True,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.downsample = downsample\n        self.stride = stride\n        act_layer = partial(nn.LeakyReLU, negative_slope=1e-3)\n\n        self.conv1 = ConvNormAct(\n            inplanes,\n            planes,\n            kernel_size=3,\n            stride=stride,\n            act_layer=act_layer,\n            aa_layer=aa_layer,\n            **dd,\n        )\n        self.conv2 = ConvNormAct(planes, planes, kernel_size=3, stride=1, apply_act=False, **dd)\n        self.act = nn.ReLU(inplace=True)\n\n        rd_chs = max(planes * self.expansion // 4, 64)\n        self.se = SEModule(planes * self.expansion, rd_channels=rd_chs, **dd) if use_se else None\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n\n    def forward(self, x):\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n        else:\n            shortcut = x\n        out = self.conv1(x)\n        out = self.conv2(out)\n        if self.se is not None:\n            out = self.se(out)\n        out = self.drop_path(out) + shortcut\n        out = self.act(out)\n        return out\n\n\nclass Bottleneck(nn.Module):\n    expansion = 4\n\n    def __init__(\n            self,\n            inplanes: int,\n            planes: int,\n            stride: int = 1,\n            downsample: Optional[nn.Module] = None,\n            use_se: bool = True,\n            act_layer: Optional[Type[nn.Module]] = None,\n            aa_layer: Optional[Type[nn.Module]] = None,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.downsample = downsample\n        self.stride = stride\n        act_layer = act_layer or partial(nn.LeakyReLU, negative_slope=1e-3)\n\n        self.conv1 = ConvNormAct(inplanes, planes, kernel_size=1, stride=1, act_layer=act_layer, **dd)\n        self.conv2 = ConvNormAct(\n            planes,\n            planes,\n            kernel_size=3,\n            stride=stride,\n            act_layer=act_layer,\n            aa_layer=aa_layer,\n            **dd,\n        )\n\n        reduction_chs = max(planes * self.expansion // 8, 64)\n        self.se = SEModule(planes, rd_channels=reduction_chs, **dd) if use_se else None\n\n        self.conv3 = ConvNormAct(planes, planes * self.expansion, kernel_size=1, stride=1, apply_act=False, **dd)\n\n        self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()\n        self.act = nn.ReLU(inplace=True)\n\n    def forward(self, x):\n        if self.downsample is not None:\n            shortcut = self.downsample(x)\n        else:\n            shortcut = x\n        out = self.conv1(x)\n        out = self.conv2(out)\n        if self.se is not None:\n            out = self.se(out)\n        out = self.conv3(out)\n        out = self.drop_path(out) + shortcut\n        out = self.act(out)\n        return out\n\n\nclass TResNet(nn.Module):\n    def __init__(\n            self,\n            layers: List[int],\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            width_factor: float = 1.0,\n            v2: bool = False,\n            global_pool: str = 'fast',\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        aa_layer = BlurPool2d\n        act_layer = nn.LeakyReLU\n\n        # TResnet stages\n        self.inplanes = int(64 * width_factor)\n        self.planes = int(64 * width_factor)\n        if v2:\n            self.inplanes = self.inplanes // 8 * 8\n            self.planes = self.planes // 8 * 8\n\n        dpr = calculate_drop_path_rates(drop_path_rate, layers, stagewise=True)\n        conv1 = ConvNormAct(in_chans * 16, self.planes, stride=1, kernel_size=3, act_layer=act_layer, **dd)\n        layer1 = self._make_layer(\n            Bottleneck if v2 else BasicBlock,\n            self.planes, layers[0], stride=1, use_se=True, aa_layer=aa_layer, drop_path_rate=dpr[0], **dd)\n        layer2 = self._make_layer(\n            Bottleneck if v2 else BasicBlock,\n            self.planes * 2, layers[1], stride=2, use_se=True, aa_layer=aa_layer, drop_path_rate=dpr[1], **dd)\n        layer3 = self._make_layer(\n            Bottleneck,\n            self.planes * 4, layers[2], stride=2, use_se=True, aa_layer=aa_layer, drop_path_rate=dpr[2], **dd)\n        layer4 = self._make_layer(\n            Bottleneck,\n            self.planes * 8, layers[3], stride=2, use_se=False, aa_layer=aa_layer, drop_path_rate=dpr[3], **dd)\n\n        # body\n        self.body = nn.Sequential(OrderedDict([\n            ('s2d', SpaceToDepth()),\n            ('conv1', conv1),\n            ('layer1', layer1),\n            ('layer2', layer2),\n            ('layer3', layer3),\n            ('layer4', layer4),\n        ]))\n\n        self.feature_info = [\n            dict(num_chs=self.planes, reduction=2, module=''),  # Not with S2D?\n            dict(num_chs=self.planes * (Bottleneck.expansion if v2 else 1), reduction=4, module='body.layer1'),\n            dict(num_chs=self.planes * 2 * (Bottleneck.expansion if v2 else 1), reduction=8, module='body.layer2'),\n            dict(num_chs=self.planes * 4 * Bottleneck.expansion, reduction=16, module='body.layer3'),\n            dict(num_chs=self.planes * 8 * Bottleneck.expansion, reduction=32, module='body.layer4'),\n        ]\n\n        # head\n        self.num_features = self.head_hidden_size = (self.planes * 8) * Bottleneck.expansion\n        self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate, **dd)\n\n        # model initialization\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='leaky_relu')\n            if isinstance(m, nn.Linear):\n                m.weight.data.normal_(0, 0.01)\n\n        # residual connections special initialization\n        for m in self.modules():\n            if isinstance(m, BasicBlock):\n                nn.init.zeros_(m.conv2.bn.weight)\n            if isinstance(m, Bottleneck):\n                nn.init.zeros_(m.conv3.bn.weight)\n\n    def _make_layer(\n            self,\n            block,\n            planes,\n            blocks,\n            stride=1,\n            use_se=True,\n            aa_layer=None,\n            drop_path_rate=0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            layers = []\n            if stride == 2:\n                # avg pooling before 1x1 conv\n                layers.append(nn.AvgPool2d(kernel_size=2, stride=2, ceil_mode=True, count_include_pad=False))\n            layers += [ConvNormAct(\n                self.inplanes, planes * block.expansion, kernel_size=1, stride=1, apply_act=False, **dd)]\n            downsample = nn.Sequential(*layers)\n\n        layers = []\n        for i in range(blocks):\n            layers.append(block(\n                self.inplanes,\n                planes,\n                stride=stride if i == 0 else 1,\n                downsample=downsample if i == 0 else None,\n                use_se=use_se,\n                aa_layer=aa_layer,\n                drop_path_rate=drop_path_rate[i] if isinstance(drop_path_rate, list) else drop_path_rate,\n                **dd,\n            ))\n            self.inplanes = planes * block.expansion\n        return nn.Sequential(*layers)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(stem=r'^body\\.conv1', blocks=r'^body\\.layer(\\d+)' if coarse else r'^body\\.layer(\\d+)\\.(\\d+)')\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        stage_ends = [1, 2, 3, 4, 5]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        take_indices = [stage_ends[i] for i in take_indices]\n        max_index = stage_ends[max_index]\n        # forward pass\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.body\n        else:\n            stages = self.body[:max_index + 1]\n\n        for feat_idx, stage in enumerate(stages):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(stage, x)\n            else:\n                x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        stage_ends = [1, 2, 3, 4, 5]\n        take_indices, max_index = feature_take_indices(len(stage_ends), indices)\n        max_index = stage_ends[max_index]\n        self.body = self.body[:max_index + 1]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = self.body.s2d(x)\n            x = self.body.conv1(x)\n            x = checkpoint_seq([\n                self.body.layer1,\n                self.body.layer2,\n                self.body.layer3,\n                self.body.layer4],\n                x, flatten=True)\n        else:\n            x = self.body(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'body.conv1.conv.weight' in state_dict:\n        return state_dict\n\n    import re\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = re.sub(r'conv(\\d+)\\.0.0', lambda x: f'conv{int(x.group(1))}.conv', k)\n        k = re.sub(r'conv(\\d+)\\.0.1', lambda x: f'conv{int(x.group(1))}.bn', k)\n        k = re.sub(r'conv(\\d+)\\.0', lambda x: f'conv{int(x.group(1))}.conv', k)\n        k = re.sub(r'conv(\\d+)\\.1', lambda x: f'conv{int(x.group(1))}.bn', k)\n        k = re.sub(r'downsample\\.(\\d+)\\.0', lambda x: f'downsample.{int(x.group(1))}.conv', k)\n        k = re.sub(r'downsample\\.(\\d+)\\.1', lambda x: f'downsample.{int(x.group(1))}.bn', k)\n        if k.endswith('bn.weight'):\n            # convert weight from inplace_abn to batchnorm\n            v = v.abs().add(1e-5)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _create_tresnet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        TResNet,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=(1, 2, 3, 4), flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': (0., 0., 0.), 'std': (1., 1., 1.),\n        'first_conv': 'body.conv1.conv', 'classifier': 'head.fc',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'tresnet_m.miil_in21k_ft_in1k': _cfg(hf_hub_id='timm/'),\n    'tresnet_m.miil_in21k': _cfg(hf_hub_id='timm/', num_classes=11221),\n    'tresnet_m.miil_in1k': _cfg(hf_hub_id='timm/'),\n    'tresnet_l.miil_in1k': _cfg(hf_hub_id='timm/'),\n    'tresnet_xl.miil_in1k': _cfg(hf_hub_id='timm/'),\n    'tresnet_m.miil_in1k_448': _cfg(\n        input_size=(3, 448, 448), pool_size=(14, 14),\n        hf_hub_id='timm/'),\n    'tresnet_l.miil_in1k_448': _cfg(\n        input_size=(3, 448, 448), pool_size=(14, 14),\n        hf_hub_id='timm/'),\n    'tresnet_xl.miil_in1k_448': _cfg(\n        input_size=(3, 448, 448), pool_size=(14, 14),\n        hf_hub_id='timm/'),\n\n    'tresnet_v2_l.miil_in21k_ft_in1k': _cfg(hf_hub_id='timm/'),\n    'tresnet_v2_l.miil_in21k': _cfg(hf_hub_id='timm/', num_classes=11221),\n})\n\n\n@register_model\ndef tresnet_m(pretrained=False, **kwargs) -> TResNet:\n    model_args = dict(layers=[3, 4, 11, 3])\n    return _create_tresnet('tresnet_m', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef tresnet_l(pretrained=False, **kwargs) -> TResNet:\n    model_args = dict(layers=[4, 5, 18, 3], width_factor=1.2)\n    return _create_tresnet('tresnet_l', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef tresnet_xl(pretrained=False, **kwargs) -> TResNet:\n    model_args = dict(layers=[4, 5, 24, 3], width_factor=1.3)\n    return _create_tresnet('tresnet_xl', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef tresnet_v2_l(pretrained=False, **kwargs) -> TResNet:\n    model_args = dict(layers=[3, 4, 23, 3], width_factor=1.0, v2=True)\n    return _create_tresnet('tresnet_v2_l', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\nregister_model_deprecations(__name__, {\n    'tresnet_m_miil_in21k': 'tresnet_m.miil_in21k',\n    'tresnet_m_448': 'tresnet_m.miil_in1k_448',\n    'tresnet_l_448': 'tresnet_l.miil_in1k_448',\n    'tresnet_xl_448': 'tresnet_xl.miil_in1k_448',\n})"
  },
  {
    "path": "timm/models/twins.py",
    "content": "\"\"\" Twins\nA PyTorch impl of : `Twins: Revisiting the Design of Spatial Attention in Vision Transformers`\n    - https://arxiv.org/pdf/2104.13840.pdf\n\nCode/weights from https://github.com/Meituan-AutoML/Twins, original copyright/license info below\n\n\"\"\"\n# --------------------------------------------------------\n# Twins\n# Copyright (c) 2021 Meituan\n# Licensed under The Apache 2.0 License [see LICENSE for details]\n# Written by Xinjie Li, Xiangxiang Chu\n# --------------------------------------------------------\nimport math\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import Mlp, DropPath, to_2tuple, trunc_normal_, use_fused_attn, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._registry import register_model, generate_default_cfgs\nfrom .vision_transformer import Attention\n\n__all__ = ['Twins']  # model_registry will add each entrypoint fn to this\n\nSize_ = Tuple[int, int]\n\n\n@register_notrace_module  # reason: FX can't symbolically trace control flow in forward method\nclass LocallyGroupedAttn(nn.Module):\n    \"\"\" LSA: self attention within a group\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            ws: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        assert ws != 1\n        super().__init__()\n        assert dim % num_heads == 0, f\"dim {dim} should be divided by num_heads {num_heads}.\"\n\n        self.dim = dim\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=True, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.ws = ws\n\n    def forward(self, x, size: Size_):\n        # There are two implementations for this function, zero padding or mask. We don't observe obvious difference for\n        # both. You can choose any one, we recommend forward_padding because it's neat. However,\n        # the masking implementation is more reasonable and accurate.\n        B, N, C = x.shape\n        H, W = size\n        x = x.view(B, H, W, C)\n        pad_l = pad_t = 0\n        pad_r = (self.ws - W % self.ws) % self.ws\n        pad_b = (self.ws - H % self.ws) % self.ws\n        x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b))\n        _, Hp, Wp, _ = x.shape\n        _h, _w = Hp // self.ws, Wp // self.ws\n        x = x.reshape(B, _h, self.ws, _w, self.ws, C).transpose(2, 3)\n        qkv = self.qkv(x).reshape(\n            B, _h * _w, self.ws * self.ws, 3, self.num_heads, C // self.num_heads).permute(3, 0, 1, 4, 2, 5)\n        q, k, v = qkv.unbind(0)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(2, 3).reshape(B, _h, _w, self.ws, self.ws, C)\n        x = x.transpose(2, 3).reshape(B, _h * self.ws, _w * self.ws, C)\n        if pad_r > 0 or pad_b > 0:\n            x = x[:, :H, :W, :].contiguous()\n        x = x.reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    # def forward_mask(self, x, size: Size_):\n    #     B, N, C = x.shape\n    #     H, W = size\n    #     x = x.view(B, H, W, C)\n    #     pad_l = pad_t = 0\n    #     pad_r = (self.ws - W % self.ws) % self.ws\n    #     pad_b = (self.ws - H % self.ws) % self.ws\n    #     x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b))\n    #     _, Hp, Wp, _ = x.shape\n    #     _h, _w = Hp // self.ws, Wp // self.ws\n    #     mask = torch.zeros((1, Hp, Wp), device=x.device)\n    #     mask[:, -pad_b:, :].fill_(1)\n    #     mask[:, :, -pad_r:].fill_(1)\n    #\n    #     x = x.reshape(B, _h, self.ws, _w, self.ws, C).transpose(2, 3)  # B, _h, _w, ws, ws, C\n    #     mask = mask.reshape(1, _h, self.ws, _w, self.ws).transpose(2, 3).reshape(1,  _h * _w, self.ws * self.ws)\n    #     attn_mask = mask.unsqueeze(2) - mask.unsqueeze(3)  # 1, _h*_w, ws*ws, ws*ws\n    #     attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-1000.0)).masked_fill(attn_mask == 0, float(0.0))\n    #     qkv = self.qkv(x).reshape(\n    #         B, _h * _w, self.ws * self.ws, 3, self.num_heads, C // self.num_heads).permute(3, 0, 1, 4, 2, 5)\n    #     # n_h, B, _w*_h, nhead, ws*ws, dim\n    #     q, k, v = qkv[0], qkv[1], qkv[2]  # B, _h*_w, n_head, ws*ws, dim_head\n    #     attn = (q @ k.transpose(-2, -1)) * self.scale  # B, _h*_w, n_head, ws*ws, ws*ws\n    #     attn = attn + attn_mask.unsqueeze(2)\n    #     attn = attn.softmax(dim=-1)\n    #     attn = self.attn_drop(attn)  # attn @v ->  B, _h*_w, n_head, ws*ws, dim_head\n    #     attn = (attn @ v).transpose(2, 3).reshape(B, _h, _w, self.ws, self.ws, C)\n    #     x = attn.transpose(2, 3).reshape(B, _h * self.ws, _w * self.ws, C)\n    #     if pad_r > 0 or pad_b > 0:\n    #         x = x[:, :H, :W, :].contiguous()\n    #     x = x.reshape(B, N, C)\n    #     x = self.proj(x)\n    #     x = self.proj_drop(x)\n    #     return x\n\n\nclass GlobalSubSampleAttn(nn.Module):\n    \"\"\" GSA: using a  key to summarize the information for a group to be efficient.\n    \"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            sr_ratio: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % num_heads == 0, f\"dim {dim} should be divided by num_heads {num_heads}.\"\n\n        self.dim = dim\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.q = nn.Linear(dim, dim, bias=True, **dd)\n        self.kv = nn.Linear(dim, dim * 2, bias=True, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        self.sr_ratio = sr_ratio\n        if sr_ratio > 1:\n            self.sr = nn.Conv2d(dim, dim, kernel_size=sr_ratio, stride=sr_ratio, **dd)\n            self.norm = nn.LayerNorm(dim, **dd)\n        else:\n            self.sr = None\n            self.norm = None\n\n    def forward(self, x, size: Size_):\n        B, N, C = x.shape\n        q = self.q(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3)\n\n        if self.sr is not None:\n            x = x.permute(0, 2, 1).reshape(B, C, *size)\n            x = self.sr(x).reshape(B, C, -1).permute(0, 2, 1)\n            x = self.norm(x)\n        kv = self.kv(x).reshape(B, -1, 2, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        k, v = kv.unbind(0)\n\n        if self.fused_attn:\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n\n        return x\n\n\nclass Block(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            sr_ratio: int = 1,\n            ws: Optional[int] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.norm1 = norm_layer(dim, **dd)\n        if ws is None:\n            self.attn = Attention(dim, num_heads, False, None, attn_drop, proj_drop, **dd)\n        elif ws == 1:\n            self.attn = GlobalSubSampleAttn(dim, num_heads, attn_drop, proj_drop, sr_ratio, **dd)\n        else:\n            self.attn = LocallyGroupedAttn(dim, num_heads, attn_drop, proj_drop, ws, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x, size: Size_):\n        x = x + self.drop_path1(self.attn(self.norm1(x), size))\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        return x\n\n\nclass PosConv(nn.Module):\n    # PEG  from https://arxiv.org/abs/2102.10882\n    def __init__(\n            self,\n            in_chans: int,\n            embed_dim: int = 768,\n            stride: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.proj = nn.Sequential(\n            nn.Conv2d(in_chans, embed_dim, 3, stride, 1, bias=True, groups=embed_dim, **dd),\n        )\n        self.stride = stride\n\n    def forward(self, x, size: Size_):\n        B, N, C = x.shape\n        cnn_feat_token = x.transpose(1, 2).view(B, C, *size)\n        x = self.proj(cnn_feat_token)\n        if self.stride == 1:\n            x += cnn_feat_token\n        x = x.flatten(2).transpose(1, 2)\n        return x\n\n    def no_weight_decay(self):\n        return ['proj.%d.weight' % i for i in range(4)]\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\" Image to Patch Embedding\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        img_size = to_2tuple(img_size)\n        patch_size = to_2tuple(patch_size)\n\n        self.img_size = img_size\n        self.patch_size = patch_size\n        assert img_size[0] % patch_size[0] == 0 and img_size[1] % patch_size[1] == 0, \\\n            f\"img_size {img_size} should be divided by patch_size {patch_size}.\"\n        self.H, self.W = img_size[0] // patch_size[0], img_size[1] // patch_size[1]\n        self.num_patches = self.H * self.W\n        self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, **dd)\n        self.norm = nn.LayerNorm(embed_dim, **dd)\n\n    def forward(self, x) -> Tuple[torch.Tensor, Size_]:\n        B, C, H, W = x.shape\n\n        x = self.proj(x).flatten(2).transpose(1, 2)\n        x = self.norm(x)\n        out_size = (H // self.patch_size[0], W // self.patch_size[1])\n\n        return x, out_size\n\n\nclass Twins(nn.Module):\n    \"\"\" Twins Vision Transformer (Revisiting Spatial Attention)\n\n    Adapted from PVT (PyramidVisionTransformer) class at https://github.com/whai362/PVT.git\n    \"\"\"\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: int = 4,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            embed_dims: Tuple[int, ...] = (64, 128, 256, 512),\n            num_heads: Tuple[int, ...] = (1, 2, 4, 8),\n            mlp_ratios: Tuple[float, ...] = (4, 4, 4, 4),\n            depths: Tuple[int, ...] = (3, 4, 6, 3),\n            sr_ratios: Tuple[int, ...] = (8, 4, 2, 1),\n            wss: Optional[Tuple[int, ...]] = None,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = partial(nn.LayerNorm, eps=1e-6),\n            block_cls: Any = Block,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.depths = depths\n        self.embed_dims = embed_dims\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        self.grad_checkpointing = False\n\n        img_size = to_2tuple(img_size)\n        prev_chs = in_chans\n        self.patch_embeds = nn.ModuleList()\n        self.pos_drops = nn.ModuleList()\n        for i in range(len(depths)):\n            self.patch_embeds.append(PatchEmbed(img_size, patch_size, prev_chs, embed_dims[i], **dd))\n            self.pos_drops.append(nn.Dropout(p=pos_drop_rate))\n            prev_chs = embed_dims[i]\n            img_size = tuple(t // patch_size for t in img_size)\n            patch_size = 2\n\n        self.blocks = nn.ModuleList()\n        self.feature_info = []\n        dpr = calculate_drop_path_rates(drop_path_rate, sum(depths))  # stochastic depth decay rule\n        cur = 0\n        for k in range(len(depths)):\n            _block = nn.ModuleList([block_cls(\n                dim=embed_dims[k],\n                num_heads=num_heads[k],\n                mlp_ratio=mlp_ratios[k],\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[cur + i],\n                norm_layer=norm_layer,\n                sr_ratio=sr_ratios[k],\n                ws=1 if wss is None or i % 2 == 1 else wss[k],\n                **dd,\n            ) for i in range(depths[k])])\n            self.blocks.append(_block)\n            self.feature_info += [dict(module=f'block.{k}', num_chs=embed_dims[k], reduction=2**(2+k))]\n            cur += depths[k]\n\n        self.pos_block = nn.ModuleList([PosConv(embed_dim, embed_dim, **dd) for embed_dim in embed_dims])\n\n        self.norm = norm_layer(self.num_features, **dd)\n\n        # classification head\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        # init weights\n        self.apply(self._init_weights)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return set(['pos_block.' + n for n, p in self.pos_block.named_parameters()])\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        matcher = dict(\n            stem=r'^patch_embeds.0',  # stem and embed\n            blocks=[\n                (r'^(?:blocks|patch_embeds|pos_block)\\.(\\d+)', None),\n                ('^norm', (99999,))\n            ] if coarse else [\n                (r'^blocks\\.(\\d+)\\.(\\d+)', None),\n                (r'^(?:patch_embeds|pos_block)\\.(\\d+)', (0,)),\n                (r'^norm', (99999,))\n            ]\n        )\n        return matcher\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg')\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.LayerNorm):\n            nn.init.constant_(m.bias, 0)\n            nn.init.constant_(m.weight, 1.0)\n        elif isinstance(m, nn.Conv2d):\n            fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n            fan_out //= m.groups\n            m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n            if m.bias is not None:\n                m.bias.data.zero_()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt == 'NCHW', 'Output shape for Twins must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # FIXME slice block/pos_block if < max\n\n        # forward pass\n        B, _, height, width = x.shape\n        for i, (embed, drop, blocks, pos_blk) in enumerate(zip(\n                self.patch_embeds, self.pos_drops, self.blocks, self.pos_block)\n        ):\n            x, size = embed(x)\n            x = drop(x)\n            for j, blk in enumerate(blocks):\n                x = blk(x, size)\n                if j == 0:\n                    x = pos_blk(x, size)  # PEG here\n\n            if i < len(self.depths) - 1:\n                x = x.reshape(B, *size, -1).permute(0, 3, 1, 2).contiguous()\n                if i in take_indices:\n                    intermediates.append(x)\n            else:\n                if i in take_indices:\n                    # only last feature can be normed\n                    x_feat = self.norm(x) if norm else x\n                    intermediates.append(x_feat.reshape(B, *size, -1).permute(0, 3, 1, 2).contiguous())\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        # FIXME add block pruning\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        B = x.shape[0]\n        for i, (embed, drop, blocks, pos_blk) in enumerate(\n                zip(self.patch_embeds, self.pos_drops, self.blocks, self.pos_block)):\n            x, size = embed(x)\n            x = drop(x)\n            for j, blk in enumerate(blocks):\n                x = blk(x, size)\n                if j == 0:\n                    x = pos_blk(x, size)  # PEG here\n            if i < len(self.depths) - 1:\n                x = x.reshape(B, *size, -1).permute(0, 3, 1, 2).contiguous()\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool == 'avg':\n            x = x.mean(dim=1)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_twins(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 4)\n    model = build_model_with_cfg(\n        Twins, variant, pretrained,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embeds.0.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'twins_pcpvt_small.in1k': _cfg(hf_hub_id='timm/'),\n    'twins_pcpvt_base.in1k': _cfg(hf_hub_id='timm/'),\n    'twins_pcpvt_large.in1k': _cfg(hf_hub_id='timm/'),\n    'twins_svt_small.in1k': _cfg(hf_hub_id='timm/'),\n    'twins_svt_base.in1k': _cfg(hf_hub_id='timm/'),\n    'twins_svt_large.in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef twins_pcpvt_small(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[64, 128, 320, 512], num_heads=[1, 2, 5, 8], mlp_ratios=[8, 8, 4, 4],\n        depths=[3, 4, 6, 3], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_pcpvt_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef twins_pcpvt_base(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[64, 128, 320, 512], num_heads=[1, 2, 5, 8], mlp_ratios=[8, 8, 4, 4],\n        depths=[3, 4, 18, 3], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_pcpvt_base', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef twins_pcpvt_large(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[64, 128, 320, 512], num_heads=[1, 2, 5, 8], mlp_ratios=[8, 8, 4, 4],\n        depths=[3, 8, 27, 3], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_pcpvt_large', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef twins_svt_small(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[64, 128, 256, 512], num_heads=[2, 4, 8, 16], mlp_ratios=[4, 4, 4, 4],\n        depths=[2, 2, 10, 4], wss=[7, 7, 7, 7], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_svt_small', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef twins_svt_base(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[96, 192, 384, 768], num_heads=[3, 6, 12, 24], mlp_ratios=[4, 4, 4, 4],\n        depths=[2, 2, 18, 2], wss=[7, 7, 7, 7], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_svt_base', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef twins_svt_large(pretrained=False, **kwargs) -> Twins:\n    model_args = dict(\n        patch_size=4, embed_dims=[128, 256, 512, 1024], num_heads=[4, 8, 16, 32], mlp_ratios=[4, 4, 4, 4],\n        depths=[2, 2, 18, 2], wss=[7, 7, 7, 7], sr_ratios=[8, 4, 2, 1])\n    return _create_twins('twins_svt_large', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/vgg.py",
    "content": "\"\"\"VGG\n\nAdapted from https://github.com/pytorch/vision 'vgg.py' (BSD-3-Clause) with a few changes for\ntimm functionality.\n\nCopyright 2021 Ross Wightman\n\"\"\"\nfrom typing import Any, Dict, List, Optional, Type, Union, cast\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ClassifierHead\nfrom ._builder import build_model_with_cfg\nfrom ._features_fx import register_notrace_module\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['VGG']\n\n\ncfgs: Dict[str, List[Union[str, int]]] = {\n    'vgg11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],\n    'vgg13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],\n    'vgg16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],\n    'vgg19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],\n}\n\n\n@register_notrace_module  # reason: FX can't symbolically trace control flow in forward method\nclass ConvMlp(nn.Module):\n    \"\"\"Convolutional MLP block for VGG head.\n\n    Replaces traditional Linear layers with Conv2d layers in the classifier.\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int = 512,\n            out_features: int = 4096,\n            kernel_size: int = 7,\n            mlp_ratio: float = 1.0,\n            drop_rate: float = 0.2,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            conv_layer: Type[nn.Module] = nn.Conv2d,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize ConvMlp.\n\n        Args:\n            in_features: Number of input features.\n            out_features: Number of output features.\n            kernel_size: Kernel size for first conv layer.\n            mlp_ratio: Ratio for hidden layer size.\n            drop_rate: Dropout rate.\n            act_layer: Activation layer type.\n            conv_layer: Convolution layer type.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.input_kernel_size = kernel_size\n        mid_features = int(out_features * mlp_ratio)\n        self.fc1 = conv_layer(in_features, mid_features, kernel_size, bias=True, **dd)\n        self.act1 = act_layer(True)\n        self.drop = nn.Dropout(drop_rate)\n        self.fc2 = conv_layer(mid_features, out_features, 1, bias=True, **dd)\n        self.act2 = act_layer(True)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        if x.shape[-2] < self.input_kernel_size or x.shape[-1] < self.input_kernel_size:\n            # keep the input size >= 7x7\n            output_size = (max(self.input_kernel_size, x.shape[-2]), max(self.input_kernel_size, x.shape[-1]))\n            x = F.adaptive_avg_pool2d(x, output_size)\n        x = self.fc1(x)\n        x = self.act1(x)\n        x = self.drop(x)\n        x = self.fc2(x)\n        x = self.act2(x)\n        return x\n\n\nclass VGG(nn.Module):\n    \"\"\"VGG model architecture.\n\n    Based on `Very Deep Convolutional Networks for Large-Scale Image Recognition`\n    - https://arxiv.org/abs/1409.1556\n    \"\"\"\n\n    def __init__(\n            self,\n            cfg: List[Any],\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            mlp_ratio: float = 1.0,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            conv_layer: Type[nn.Module] = nn.Conv2d,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            global_pool: str = 'avg',\n            drop_rate: float = 0.,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize VGG model.\n\n        Args:\n            cfg: Configuration list defining network architecture.\n            num_classes: Number of classes for classification.\n            in_chans: Number of input channels.\n            output_stride: Output stride of network.\n            mlp_ratio: Ratio for MLP hidden layer size.\n            act_layer: Activation layer type.\n            conv_layer: Convolution layer type.\n            norm_layer: Normalization layer type.\n            global_pool: Global pooling type.\n            drop_rate: Dropout rate.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert output_stride == 32\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n        self.use_norm = norm_layer is not None\n        self.feature_info = []\n\n        prev_chs = in_chans\n        net_stride = 1\n        pool_layer = nn.MaxPool2d\n        layers: List[nn.Module] = []\n        for v in cfg:\n            last_idx = len(layers) - 1\n            if v == 'M':\n                self.feature_info.append(dict(num_chs=prev_chs, reduction=net_stride, module=f'features.{last_idx}'))\n                layers += [pool_layer(kernel_size=2, stride=2)]\n                net_stride *= 2\n            else:\n                v = cast(int, v)\n                conv2d = conv_layer(prev_chs, v, kernel_size=3, padding=1, **dd)\n                if norm_layer is not None:\n                    layers += [conv2d, norm_layer(v, **dd), act_layer(inplace=True)]\n                else:\n                    layers += [conv2d, act_layer(inplace=True)]\n                prev_chs = v\n        self.features = nn.Sequential(*layers)\n        self.feature_info.append(dict(num_chs=prev_chs, reduction=net_stride, module=f'features.{len(layers) - 1}'))\n\n        self.num_features = prev_chs\n        self.head_hidden_size = 4096\n        self.pre_logits = ConvMlp(\n            prev_chs,\n            self.head_hidden_size,\n            7,\n            mlp_ratio=mlp_ratio,\n            drop_rate=drop_rate,\n            act_layer=act_layer,\n            conv_layer=conv_layer,\n            **dd,\n        )\n        self.head = ClassifierHead(\n            self.head_hidden_size,\n            num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n        self._initialize_weights()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Group matcher for parameter groups.\n\n        Args:\n            coarse: Whether to use coarse grouping.\n\n        Returns:\n            Dictionary of grouped parameters.\n        \"\"\"\n        # this treats BN layers as separate groups for bn variants, a lot of effort to fix that\n        return dict(stem=r'^features\\.0', blocks=r'^features\\.(\\d+)')\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        assert not enable, 'gradient checkpointing not supported'\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier module.\n\n        Returns:\n            Classifier module.\n        \"\"\"\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction layers.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.features(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through head.\n\n        Args:\n            x: Input features.\n            pre_logits: Return features before final linear layer.\n\n        Returns:\n            Classification logits or features.\n        \"\"\"\n        x = self.pre_logits(x)\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output logits.\n        \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n    def _initialize_weights(self) -> None:\n        \"\"\"Initialize model weights.\"\"\"\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n                if m.bias is not None:\n                    nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.BatchNorm2d):\n                nn.init.constant_(m.weight, 1)\n                nn.init.constant_(m.bias, 0)\n            elif isinstance(m, nn.Linear):\n                nn.init.normal_(m.weight, 0, 0.01)\n                nn.init.constant_(m.bias, 0)\n\n\ndef _filter_fn(state_dict: dict) -> Dict[str, torch.Tensor]:\n    \"\"\"Convert patch embedding weight from manual patchify + linear proj to conv.\n\n    Args:\n        state_dict: State dictionary to filter.\n\n    Returns:\n        Filtered state dictionary.\n    \"\"\"\n    out_dict = {}\n    for k, v in state_dict.items():\n        k_r = k\n        k_r = k_r.replace('classifier.0', 'pre_logits.fc1')\n        k_r = k_r.replace('classifier.3', 'pre_logits.fc2')\n        k_r = k_r.replace('classifier.6', 'head.fc')\n        if 'classifier.0.weight' in k:\n            v = v.reshape(-1, 512, 7, 7)\n        if 'classifier.3.weight' in k:\n            v = v.reshape(-1, 4096, 1, 1)\n        out_dict[k_r] = v\n    return out_dict\n\n\ndef _create_vgg(variant: str, pretrained: bool, **kwargs: Any) -> VGG:\n    \"\"\"Create a VGG model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        VGG model instance.\n    \"\"\"\n    cfg = variant.split('_')[0]\n    # NOTE: VGG is one of few models with stride==1 features w/ 6 out_indices [0..5]\n    out_indices = kwargs.pop('out_indices', (0, 1, 2, 3, 4, 5))\n    model = build_model_with_cfg(\n        VGG,\n        variant,\n        pretrained,\n        model_cfg=cfgs[cfg],\n        feature_cfg=dict(flatten_sequential=True, out_indices=out_indices),\n        pretrained_filter_fn=_filter_fn,\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    \"\"\"Create default configuration dictionary.\n\n    Args:\n        url: Model weight URL.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bilinear',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'features.0', 'classifier': 'head.fc',\n        'license': 'bsd-3-clause',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'vgg11.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg13.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg16.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg19.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg11_bn.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg13_bn.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg16_bn.tv_in1k': _cfg(hf_hub_id='timm/'),\n    'vgg19_bn.tv_in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef vgg11(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 11-layer model (configuration \"A\") from\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(**kwargs)\n    return _create_vgg('vgg11', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg11_bn(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 11-layer model (configuration \"A\") with batch normalization\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(norm_layer=nn.BatchNorm2d, **kwargs)\n    return _create_vgg('vgg11_bn', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg13(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 13-layer model (configuration \"B\")\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(**kwargs)\n    return _create_vgg('vgg13', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg13_bn(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 13-layer model (configuration \"B\") with batch normalization\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(norm_layer=nn.BatchNorm2d, **kwargs)\n    return _create_vgg('vgg13_bn', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg16(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 16-layer model (configuration \"D\")\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(**kwargs)\n    return _create_vgg('vgg16', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg16_bn(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 16-layer model (configuration \"D\") with batch normalization\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(norm_layer=nn.BatchNorm2d, **kwargs)\n    return _create_vgg('vgg16_bn', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg19(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 19-layer model (configuration \"E\")\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(**kwargs)\n    return _create_vgg('vgg19', pretrained=pretrained, **model_args)\n\n\n@register_model\ndef vgg19_bn(pretrained: bool = False, **kwargs: Any) -> VGG:\n    r\"\"\"VGG 19-layer model (configuration 'E') with batch normalization\n    `\"Very Deep Convolutional Networks For Large-Scale Image Recognition\" <https://arxiv.org/pdf/1409.1556.pdf>`._\n    \"\"\"\n    model_args = dict(norm_layer=nn.BatchNorm2d, **kwargs)\n    return _create_vgg('vgg19_bn', pretrained=pretrained, **model_args)\n"
  },
  {
    "path": "timm/models/visformer.py",
    "content": "\"\"\" Visformer\n\nPaper: Visformer: The Vision-friendly Transformer - https://arxiv.org/abs/2104.12533\n\nFrom original at https://github.com/danczs/Visformer\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n\nfrom typing import Optional, Union, Type, Any\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import to_2tuple, trunc_normal_, DropPath, calculate_drop_path_rates, PatchEmbed, LayerNorm2d, create_classifier, use_fused_attn\n\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['Visformer']\n\n\nclass SpatialMlp(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: Optional[int] = None,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            drop: float = 0.,\n            group: int = 8,\n            spatial_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_features = out_features or in_features\n        hidden_features = hidden_features or in_features\n        drop_probs = to_2tuple(drop)\n\n        self.in_features = in_features\n        self.out_features = out_features\n        self.spatial_conv = spatial_conv\n        if self.spatial_conv:\n            if group < 2:  # net setting\n                hidden_features = in_features * 5 // 6\n            else:\n                hidden_features = in_features * 2\n        self.hidden_features = hidden_features\n        self.group = group\n        self.conv1 = nn.Conv2d(in_features, hidden_features, 1, stride=1, padding=0, bias=False, **dd)\n        self.act1 = act_layer()\n        self.drop1 = nn.Dropout(drop_probs[0])\n        if self.spatial_conv:\n            self.conv2 = nn.Conv2d(\n                hidden_features, hidden_features, 3, stride=1, padding=1, groups=self.group, bias=False, **dd)\n            self.act2 = act_layer()\n        else:\n            self.conv2 = None\n            self.act2 = None\n        self.conv3 = nn.Conv2d(hidden_features, out_features, 1, stride=1, padding=0, bias=False, **dd)\n        self.drop3 = nn.Dropout(drop_probs[1])\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.act1(x)\n        x = self.drop1(x)\n        if self.conv2 is not None:\n            x = self.conv2(x)\n            x = self.act2(x)\n        x = self.conv3(x)\n        x = self.drop3(x)\n        return x\n\n\nclass Attention(nn.Module):\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            head_dim_ratio: float = 1.,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.dim = dim\n        self.num_heads = num_heads\n        head_dim = round(dim // num_heads * head_dim_ratio)\n        self.head_dim = head_dim\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn(experimental=True)\n\n        self.qkv = nn.Conv2d(dim, head_dim * num_heads * 3, 1, stride=1, padding=0, bias=False, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Conv2d(self.head_dim * self.num_heads, dim, 1, stride=1, padding=0, bias=False, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, C, H, W = x.shape\n        x = self.qkv(x).reshape(B, 3, self.num_heads, self.head_dim, -1).permute(1, 0, 2, 4, 3)\n        q, k, v = x.unbind(0)\n\n        if self.fused_attn:\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q.contiguous(), k.contiguous(), v.contiguous(),\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            attn = (q @ k.transpose(-2, -1)) * self.scale\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.permute(0, 1, 3, 2).reshape(B, -1, H, W)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass Block(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            head_dim_ratio: float = 1.,\n            mlp_ratio: float = 4.,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            group: int = 8,\n            attn_disabled: bool = False,\n            spatial_conv: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.spatial_conv = spatial_conv\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n        if attn_disabled:\n            self.norm1 = None\n            self.attn = None\n        else:\n            self.norm1 = norm_layer(dim, **dd)\n            self.attn = Attention(\n                dim,\n                num_heads=num_heads,\n                head_dim_ratio=head_dim_ratio,\n                attn_drop=attn_drop,\n                proj_drop=proj_drop,\n                **dd,\n            )\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = SpatialMlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            group=group,\n            spatial_conv=spatial_conv,\n            **dd,\n        )\n\n    def forward(self, x):\n        if self.attn is not None:\n            x = x + self.drop_path(self.attn(self.norm1(x)))\n        x = x + self.drop_path(self.mlp(self.norm2(x)))\n        return x\n\n\nclass Visformer(nn.Module):\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            init_channels: Optional[int] = 32,\n            embed_dim: int = 384,\n            depth: Union[int, tuple] = 12,\n            num_heads: int = 6,\n            mlp_ratio: float = 4.,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = LayerNorm2d,\n            attn_stage: str = '111',\n            use_pos_embed: bool = True,\n            spatial_conv: str = '111',\n            vit_stem: bool = False,\n            group: int = 8,\n            global_pool: str = 'avg',\n            conv_init: bool = False,\n            embed_norm: Optional[Type[nn.Module]] = None,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        img_size = to_2tuple(img_size)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.embed_dim = embed_dim\n        self.init_channels = init_channels\n        self.img_size = img_size\n        self.vit_stem = vit_stem\n        self.conv_init = conv_init\n        if isinstance(depth, (list, tuple)):\n            self.stage_num1, self.stage_num2, self.stage_num3 = depth\n            depth = sum(depth)\n        else:\n            self.stage_num1 = self.stage_num3 = depth // 3\n            self.stage_num2 = depth - self.stage_num1 - self.stage_num3\n        self.use_pos_embed = use_pos_embed\n        self.grad_checkpointing = False\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)\n        # stage 1\n        if self.vit_stem:\n            self.stem = None\n            self.patch_embed1 = PatchEmbed(\n                img_size=img_size,\n                patch_size=patch_size,\n                in_chans=in_chans,\n                embed_dim=embed_dim,\n                norm_layer=embed_norm,\n                flatten=False,\n                **dd,\n            )\n            img_size = [x // patch_size for x in img_size]\n        else:\n            if self.init_channels is None:\n                self.stem = None\n                self.patch_embed1 = PatchEmbed(\n                    img_size=img_size,\n                    patch_size=patch_size // 2,\n                    in_chans=in_chans,\n                    embed_dim=embed_dim // 2,\n                    norm_layer=embed_norm,\n                    flatten=False,\n                    **dd,\n                )\n                img_size = [x // (patch_size // 2) for x in img_size]\n            else:\n                self.stem = nn.Sequential(\n                    nn.Conv2d(in_chans, self.init_channels, 7, stride=2, padding=3, bias=False, **dd),\n                    nn.BatchNorm2d(self.init_channels, **dd),\n                    nn.ReLU(inplace=True)\n                )\n                img_size = [x // 2 for x in img_size]\n                self.patch_embed1 = PatchEmbed(\n                    img_size=img_size,\n                    patch_size=patch_size // 4,\n                    in_chans=self.init_channels,\n                    embed_dim=embed_dim // 2,\n                    norm_layer=embed_norm,\n                    flatten=False,\n                    **dd,\n                )\n                img_size = [x // (patch_size // 4) for x in img_size]\n\n        if self.use_pos_embed:\n            if self.vit_stem:\n                self.pos_embed1 = nn.Parameter(torch.zeros(1, embed_dim, *img_size, **dd))\n            else:\n                self.pos_embed1 = nn.Parameter(torch.zeros(1, embed_dim//2, *img_size, **dd))\n            self.pos_drop = nn.Dropout(p=pos_drop_rate)\n        else:\n            self.pos_embed1 = None\n\n        self.stage1 = nn.Sequential(*[\n            Block(\n                dim=embed_dim//2,\n                num_heads=num_heads,\n                head_dim_ratio=0.5,\n                mlp_ratio=mlp_ratio,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                group=group,\n                attn_disabled=(attn_stage[0] == '0'),\n                spatial_conv=(spatial_conv[0] == '1'),\n                **dd,\n            )\n            for i in range(self.stage_num1)\n        ])\n\n        # stage2\n        if not self.vit_stem:\n            self.patch_embed2 = PatchEmbed(\n                img_size=img_size,\n                patch_size=patch_size // 8,\n                in_chans=embed_dim // 2,\n                embed_dim=embed_dim,\n                norm_layer=embed_norm,\n                flatten=False,\n                **dd,\n            )\n            img_size = [x // (patch_size // 8) for x in img_size]\n            if self.use_pos_embed:\n                self.pos_embed2 = nn.Parameter(torch.zeros(1, embed_dim, *img_size, **dd))\n            else:\n                self.pos_embed2 = None\n        else:\n            self.patch_embed2 = None\n        self.stage2 = nn.Sequential(*[\n            Block(\n                dim=embed_dim,\n                num_heads=num_heads,\n                head_dim_ratio=1.0,\n                mlp_ratio=mlp_ratio,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                group=group,\n                attn_disabled=(attn_stage[1] == '0'),\n                spatial_conv=(spatial_conv[1] == '1'),\n                **dd,\n            )\n            for i in range(self.stage_num1, self.stage_num1+self.stage_num2)\n        ])\n\n        # stage 3\n        if not self.vit_stem:\n            self.patch_embed3 = PatchEmbed(\n                img_size=img_size,\n                patch_size=patch_size // 8,\n                in_chans=embed_dim,\n                embed_dim=embed_dim * 2,\n                norm_layer=embed_norm,\n                flatten=False,\n                **dd,\n            )\n            img_size = [x // (patch_size // 8) for x in img_size]\n            if self.use_pos_embed:\n                self.pos_embed3 = nn.Parameter(torch.zeros(1, embed_dim*2, *img_size, **dd))\n            else:\n                self.pos_embed3 = None\n        else:\n            self.patch_embed3 = None\n        self.stage3 = nn.Sequential(*[\n            Block(\n                dim=embed_dim * 2,\n                num_heads=num_heads,\n                head_dim_ratio=1.0,\n                mlp_ratio=mlp_ratio,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                group=group,\n                attn_disabled=(attn_stage[2] == '0'),\n                spatial_conv=(spatial_conv[2] == '1'),\n                **dd,\n            )\n            for i in range(self.stage_num1+self.stage_num2, depth)\n        ])\n\n        self.num_features = self.head_hidden_size = embed_dim if self.vit_stem else embed_dim * 2\n        self.norm = norm_layer(self.num_features, **dd)\n\n        # head\n        global_pool, head = create_classifier(\n            self.num_features,\n            self.num_classes,\n            pool_type=global_pool,\n            device=device,\n            dtype=dtype,\n        )\n        self.global_pool = global_pool\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = head\n\n        # weights init\n        if self.use_pos_embed:\n            trunc_normal_(self.pos_embed1, std=0.02)\n            if not self.vit_stem:\n                trunc_normal_(self.pos_embed2, std=0.02)\n                trunc_normal_(self.pos_embed3, std=0.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=0.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n        elif isinstance(m, nn.Conv2d):\n            if self.conv_init:\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n            else:\n                trunc_normal_(m.weight, std=0.02)\n            if m.bias is not None:\n                nn.init.constant_(m.bias, 0.)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^patch_embed1|pos_embed1|stem',  # stem and embed\n            blocks=[\n                (r'^stage(\\d+)\\.(\\d+)' if coarse else r'^stage(\\d+)\\.(\\d+)', None),\n                (r'^(?:patch_embed|pos_embed)(\\d+)', (0,)),\n                (r'^norm', (99999,))\n            ]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.global_pool, self.head = create_classifier(\n            self.num_features, self.num_classes, pool_type=global_pool, device=device, dtype=dtype)\n\n    def forward_features(self, x):\n        if self.stem is not None:\n            x = self.stem(x)\n\n        # stage 1\n        x = self.patch_embed1(x)\n        if self.pos_embed1 is not None:\n            x = self.pos_drop(x + self.pos_embed1)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stage1, x)\n        else:\n            x = self.stage1(x)\n\n        # stage 2\n        if self.patch_embed2 is not None:\n            x = self.patch_embed2(x)\n            if self.pos_embed2 is not None:\n                x = self.pos_drop(x + self.pos_embed2)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stage2, x)\n        else:\n            x = self.stage2(x)\n\n        # stage3\n        if self.patch_embed3 is not None:\n            x = self.patch_embed3(x)\n            if self.pos_embed3 is not None:\n                x = self.pos_drop(x + self.pos_embed3)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stage3, x)\n        else:\n            x = self.stage3(x)\n\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_visformer(variant, pretrained=False, default_cfg=None, **kwargs):\n    if kwargs.get('features_only', None):\n        raise RuntimeError('features_only not implemented for Vision Transformer models.')\n    model = build_model_with_cfg(Visformer, variant, pretrained, **kwargs)\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'visformer_tiny.in1k': _cfg(hf_hub_id='timm/'),\n    'visformer_small.in1k': _cfg(hf_hub_id='timm/'),\n})\n\n\n@register_model\ndef visformer_tiny(pretrained=False, **kwargs) -> Visformer:\n    model_cfg = dict(\n        init_channels=16, embed_dim=192, depth=(7, 4, 4), num_heads=3, mlp_ratio=4., group=8,\n        attn_stage='011', spatial_conv='100', norm_layer=nn.BatchNorm2d, conv_init=True,\n        embed_norm=nn.BatchNorm2d)\n    model = _create_visformer('visformer_tiny', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n@register_model\ndef visformer_small(pretrained=False, **kwargs) -> Visformer:\n    model_cfg = dict(\n        init_channels=32, embed_dim=384, depth=(7, 4, 4), num_heads=6, mlp_ratio=4., group=8,\n        attn_stage='011', spatial_conv='100', norm_layer=nn.BatchNorm2d, conv_init=True,\n        embed_norm=nn.BatchNorm2d)\n    model = _create_visformer('visformer_small', pretrained=pretrained, **dict(model_cfg, **kwargs))\n    return model\n\n\n# @register_model\n# def visformer_net1(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=None, embed_dim=384, depth=(0, 12, 0), num_heads=6, mlp_ratio=4., attn_stage='111',\n#         spatial_conv='000', vit_stem=True, conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net2(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=(0, 12, 0), num_heads=6, mlp_ratio=4., attn_stage='111',\n#         spatial_conv='000', vit_stem=False, conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net3(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4., attn_stage='111',\n#         spatial_conv='000', vit_stem=False, conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net4(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4., attn_stage='111',\n#         spatial_conv='000', vit_stem=False, conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net5(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4., group=1, attn_stage='111',\n#         spatial_conv='111', vit_stem=False, conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net6(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4., group=1, attn_stage='111',\n#         pos_embed=False, spatial_conv='111', conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n#\n#\n# @register_model\n# def visformer_net7(pretrained=False, **kwargs):\n#     model = Visformer(\n#         init_channels=32, embed_dim=384, depth=(6, 7, 7), num_heads=6, group=1, attn_stage='000',\n#         pos_embed=False, spatial_conv='111', conv_init=True, **kwargs)\n#     model.default_cfg = _cfg()\n#     return model\n\n\n\n\n"
  },
  {
    "path": "timm/models/vision_transformer.py",
    "content": "\"\"\" Vision Transformer (ViT) in PyTorch\n\nA PyTorch implement of Vision Transformers as described in:\n\n'An Image Is Worth 16 x 16 Words: Transformers for Image Recognition at Scale'\n    - https://arxiv.org/abs/2010.11929\n\n`How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers`\n    - https://arxiv.org/abs/2106.10270\n\n`FlexiViT: One Model for All Patch Sizes`\n    - https://arxiv.org/abs/2212.08013\n\nThe official jax code is released and available at\n  * https://github.com/google-research/vision_transformer\n  * https://github.com/google-research/big_vision\n\nAcknowledgments:\n  * The paper authors for releasing code and weights, thanks!\n  * I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch\n  * Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT\n  * Bert reference code checks against Huggingface Transformers and Tensorflow Bert\n\nHacked together by / Copyright 2020, Ross Wightman\n\"\"\"\nimport copy\nimport logging\nimport math\nimport os\nfrom collections import OrderedDict\nfrom functools import partial\nfrom typing import Any, Callable, Dict, Optional, Set, Tuple, Type, Union, List\ntry:\n    from typing import Literal\nexcept ImportError:\n    from typing_extensions import Literal\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.jit import Final\n\nfrom timm.data import (\n    IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD,\n    IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD,\n    OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\n)\nfrom timm.layers import (\n    Attention,\n    DiffAttention,\n    AttentionPoolLatent,\n    AttentionPoolPrr,\n    PatchEmbed,\n    Mlp,\n    SwiGLUPacked,\n    SwiGLU,\n    LayerNorm,\n    RmsNorm,\n    DropPath,\n    calculate_drop_path_rates,\n    PatchDropout,\n    trunc_normal_,\n    lecun_normal_,\n    resample_patch_embed,\n    resample_abs_pos_embed,\n    use_fused_attn,\n    get_act_layer,\n    get_norm_layer,\n    maybe_add_mask,\n    resolve_self_attn_mask,\n    LayerType,\n    LayerScale,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint, checkpoint_seq, adapt_input_conv\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\n\n__all__ = ['VisionTransformer']  # model_registry will add each entrypoint fn to this\n\n\n_logger = logging.getLogger(__name__)\n\n\nATTN_LAYERS = {\n    '': Attention,\n    'attn': Attention,\n    'diff': DiffAttention,\n}\n\n\ndef _create_attn(\n        attn_layer: LayerType,\n        dim: int,\n        num_heads: int,\n        qkv_bias: bool = False,\n        qk_norm: bool = False,\n        scale_norm: bool = False,\n        proj_bias: bool = True,\n        attn_drop: float = 0.,\n        proj_drop: float = 0.,\n        norm_layer: Optional[Type[nn.Module]] = None,\n        depth: int = 0,\n        **kwargs,\n) -> nn.Module:\n    if isinstance(attn_layer, str):\n        attn_layer = ATTN_LAYERS.get(attn_layer, None)\n        assert attn_layer is not None, f'Unknown attn_layer: {attn_layer}'\n\n    # Only pass depth to attention layers that use it\n    if issubclass(attn_layer, DiffAttention):\n        kwargs['depth'] = depth\n\n    return attn_layer(\n        dim,\n        num_heads=num_heads,\n        qkv_bias=qkv_bias,\n        qk_norm=qk_norm,\n        scale_norm=scale_norm,\n        proj_bias=proj_bias,\n        attn_drop=attn_drop,\n        proj_drop=proj_drop,\n        norm_layer=norm_layer,\n        **kwargs,\n    )\n\n\nclass Block(nn.Module):\n    \"\"\"Transformer block with pre-normalization.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            mlp_layer: Type[nn.Module] = Mlp,\n            attn_layer: LayerType = Attention,\n            depth: int = 0,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"Initialize Block.\n\n        Args:\n            dim: Number of input channels.\n            num_heads: Number of attention heads.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: If True, add a learnable bias to query, key, value.\n            qk_norm: If True, apply normalization to query and key.\n            proj_bias: If True, add bias to output projection.\n            proj_drop: Projection dropout rate.\n            attn_drop: Attention dropout rate.\n            init_values: Initial values for layer scale.\n            drop_path: Stochastic depth rate.\n            act_layer: Activation layer.\n            norm_layer: Normalization layer.\n            mlp_layer: MLP layer.\n            attn_layer: Attention layer type (class or string).\n            depth: Block index, passed to attention layer for depth-dependent init.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = _create_attn(\n            attn_layer,\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            qk_norm=qk_norm,\n            scale_norm=scale_attn_norm,\n            proj_bias=proj_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            depth=depth,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = mlp_layer(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            norm_layer=norm_layer if scale_mlp_norm else None,\n            bias=proj_bias,\n            drop=proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        x = x + self.drop_path1(self.ls1(self.attn(self.norm1(x), attn_mask=attn_mask, is_causal=is_causal)))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass ResPostBlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            mlp_layer: Type[nn.Module] = Mlp,\n            attn_layer: LayerType = Attention,\n            depth: int = 0,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.init_values = init_values\n\n        self.attn = _create_attn(\n            attn_layer,\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            qk_norm=qk_norm,\n            scale_norm=scale_attn_norm,\n            proj_bias=proj_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            depth=depth,\n            **dd,\n        )\n        self.norm1 = norm_layer(dim, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = mlp_layer(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            norm_layer=norm_layer if scale_mlp_norm else None,\n            bias=proj_bias,\n            drop=proj_drop,\n            **dd,\n        )\n        self.norm2 = norm_layer(dim, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.init_weights()\n\n    def init_weights(self) -> None:\n        # NOTE this init overrides that base model init with specific changes for the block type\n        if self.init_values is not None:\n            nn.init.constant_(self.norm1.weight, self.init_values)\n            nn.init.constant_(self.norm2.weight, self.init_values)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        x = x + self.drop_path1(self.norm1(self.attn(x, attn_mask=attn_mask, is_causal=is_causal)))\n        x = x + self.drop_path2(self.norm2(self.mlp(x)))\n        return x\n\n\nclass ParallelScalingBlock(nn.Module):\n    \"\"\" Parallel ViT block (MLP & Attention in parallel)\n    Based on:\n      'Scaling Vision Transformers to 22 Billion Parameters` - https://arxiv.org/abs/2302.05442\n    \"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            mlp_layer: Optional[Type[nn.Module]] = None,  # not used\n            attn_layer: Optional[LayerType] = None,  # not used\n            depth: int = 0,  # not used\n            fuse_out_proj: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n        assert not scale_attn_norm and not scale_mlp_norm, 'Scale norms not supported'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n        mlp_hidden_dim = int(mlp_ratio * dim)\n        in_proj_out_dim = mlp_hidden_dim + 3 * dim\n\n        self.in_norm = norm_layer(dim, **dd)\n        self.in_proj = nn.Linear(dim, in_proj_out_dim, bias=qkv_bias, **dd)\n        self.in_split = [mlp_hidden_dim] + [dim] * 3\n        if qkv_bias:\n            # mlp_bias is combined with qkv_bias in in_proj.bias\n            self.register_parameter('mlp_bias', None)\n        else:\n            self.mlp_bias = nn.Parameter(torch.empty(mlp_hidden_dim, **dd))\n\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n\n        self.mlp_drop = nn.Dropout(proj_drop)\n        self.mlp_act = act_layer()\n\n        if fuse_out_proj:\n            # Fused output projection for both attention and MLP\n            self.out_proj = nn.Linear(dim + mlp_hidden_dim, dim, bias=proj_bias, **dd)\n            self.attn_out_proj = None\n            self.mlp_out_proj = None\n        else:\n            # Separate output projections\n            self.out_proj = None\n            self.attn_out_proj = nn.Linear(dim, dim, bias=proj_bias, **dd)\n            self.mlp_out_proj = nn.Linear(mlp_hidden_dim, dim, bias=proj_bias, **dd)\n\n        self.ls = LayerScale(dim, init_values=init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        if self.mlp_bias is not None:\n            nn.init.zeros_(self.mlp_bias)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        B, N, C = x.shape\n\n        # Combined MLP fc1 & qkv projections\n        y = self.in_norm(x)\n        y = self.in_proj(y)\n        x_mlp, q, k, v = torch.split(y, self.in_split, dim=-1)\n        if self.mlp_bias is not None:\n            x_mlp = x_mlp + self.mlp_bias\n\n        # Dot product attention w/ qk norm\n        q = self.q_norm(q.view(B, N, self.num_heads, self.head_dim)).transpose(1, 2)\n        k = self.k_norm(k.view(B, N, self.num_heads, self.head_dim)).transpose(1, 2)\n        v = v.view(B, N, self.num_heads, self.head_dim).transpose(1, 2)\n        if self.fused_attn:\n            x_attn = F.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_mask,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n                is_causal=is_causal,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal=is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x_attn = attn @ v\n\n        x_attn = x_attn.transpose(1, 2).reshape(B, N, C)\n\n        # MLP activation & dropout\n        x_mlp = self.mlp_act(x_mlp)\n        x_mlp = self.mlp_drop(x_mlp)\n\n        # Output projection (fused or separate)\n        if self.out_proj is not None:\n            y = self.out_proj(torch.cat((x_attn, x_mlp), dim=-1))\n        else:\n            y = self.attn_out_proj(x_attn) + self.mlp_out_proj(x_mlp)\n\n        # Add residual w/ drop path & layer scale applied\n        x = x + self.drop_path(self.ls(y))\n        return x\n\n\nclass DiffParallelScalingBlock(nn.Module):\n    \"\"\" Parallel ViT block with Differential Attention (MLP & Attention in parallel).\n\n    Combines the parallel MLP+Attention structure from 'Scaling Vision Transformers to\n    22 Billion Parameters' (https://arxiv.org/abs/2302.05442) with differential attention\n    from 'Differential Transformer' (https://arxiv.org/abs/2410.05258).\n    \"\"\"\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            mlp_layer: Optional[Type[nn.Module]] = None,\n            attn_layer: Optional[LayerType] = None,\n            depth: int = 0,\n            dual_lambda: bool = False,\n            device=None,\n            dtype=None,\n    ) -> None:\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n        assert not scale_attn_norm and not scale_mlp_norm, 'Scale norms not supported'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads // 2  # Half head_dim for diff attention\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n        mlp_hidden_dim = int(mlp_ratio * dim)\n        in_proj_out_dim = mlp_hidden_dim + 3 * dim\n\n        self.in_norm = norm_layer(dim, **dd)\n        self.in_proj = nn.Linear(dim, in_proj_out_dim, bias=qkv_bias, **dd)\n        self.in_split = [mlp_hidden_dim] + [dim] * 3\n        if qkv_bias:\n            # mlp_bias is combined with qkv_bias in in_proj.bias\n            self.register_parameter('mlp_bias', None)\n        else:\n            self.mlp_bias = nn.Parameter(torch.empty(mlp_hidden_dim, **dd))\n\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.attn_drop_p = attn_drop\n\n        # Differential attention specific\n        self.sub_norm = RmsNorm(2 * self.head_dim, eps=1e-5, **dd)\n        self.dual_lambda = dual_lambda\n        if dual_lambda:\n            self.lambda_a = nn.Parameter(torch.empty((), dtype=torch.float32, device=device))\n            self.lambda_b = nn.Parameter(torch.empty((), dtype=torch.float32, device=device))\n            self.lambda_q1 = self.lambda_k1 = self.lambda_q2 = self.lambda_k2 = None\n        else:\n            self.lambda_a = self.lambda_b = None\n            self.lambda_q1 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_k1 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_q2 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n            self.lambda_k2 = nn.Parameter(torch.empty(self.head_dim, dtype=torch.float32, device=device))\n\n        self.mlp_drop = nn.Dropout(proj_drop)\n        self.mlp_act = act_layer()\n\n        # Fused output projection for both attention and MLP\n        self.out_proj = nn.Linear(dim + mlp_hidden_dim, dim, bias=proj_bias, **dd)\n\n        self.ls = LayerScale(dim, init_values=init_values, **dd) if init_values is not None else nn.Identity()\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.lambda_init = 0.8\n        self.set_lambda_init(depth)\n\n        # TODO: skip init when on meta device when safe to do so\n        self.reset_parameters()\n\n    def set_lambda_init(self, depth: int):\n        self.lambda_init = 0.8 - 0.6 * math.exp(-0.3 * depth)\n\n    def reset_parameters(self) -> None:\n        \"\"\"Initialize parameters and buffers.\"\"\"\n        if self.mlp_bias is not None:\n            nn.init.zeros_(self.mlp_bias)\n        if self.dual_lambda:\n            nn.init.zeros_(self.lambda_a)\n            nn.init.zeros_(self.lambda_b)\n        else:\n            nn.init.normal_(self.lambda_q1, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_k1, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_q2, mean=0, std=0.1)\n            nn.init.normal_(self.lambda_k2, mean=0, std=0.1)\n\n    def _compute_lambda(self) -> torch.Tensor:\n        if self.lambda_a is not None:\n            lambda_1 = torch.exp(self.lambda_a)\n            lambda_2 = torch.exp(self.lambda_b)\n        else:\n            lambda_1 = torch.exp(torch.sum(self.lambda_q1 * self.lambda_k1, dim=-1).float())\n            lambda_2 = torch.exp(torch.sum(self.lambda_q2 * self.lambda_k2, dim=-1).float())\n        return lambda_1 - lambda_2 + self.lambda_init\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        B, N, C = x.shape\n\n        # Combined MLP fc1 & qkv projections\n        y = self.in_norm(x)\n        y = self.in_proj(y)\n        x_mlp, q, k, v = torch.split(y, self.in_split, dim=-1)\n        if self.mlp_bias is not None:\n            x_mlp = x_mlp + self.mlp_bias\n\n        # Reshape for differential attention (2x heads with half head_dim for q/k)\n        q = q.reshape(B, N, 2 * self.num_heads, self.head_dim).transpose(1, 2)\n        k = k.reshape(B, N, 2 * self.num_heads, self.head_dim).transpose(1, 2)\n        v = v.reshape(B, N, self.num_heads, 2 * self.head_dim).transpose(1, 2)\n\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        lambda_full = self._compute_lambda().type_as(q)\n\n        if self.fused_attn:\n            q = q.reshape(B, self.num_heads, 2, N, self.head_dim)\n            k = k.reshape(B, self.num_heads, 2, N, self.head_dim)\n            q1, q2 = q.unbind(2)\n            k1, k2 = k.unbind(2)\n\n            dropout_p = self.attn_drop_p if self.training else 0.0\n            attn1 = F.scaled_dot_product_attention(q1, k1, v, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal)\n            attn2 = F.scaled_dot_product_attention(q2, k2, v, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal)\n\n            x_attn = attn1 - lambda_full * attn2\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn_bias = resolve_self_attn_mask(N, attn, attn_mask, is_causal=is_causal)\n            attn = maybe_add_mask(attn, attn_bias)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n\n            attn = attn.view(B, self.num_heads, 2, N, N)\n            attn = attn[:, :, 0] - lambda_full * attn[:, :, 1]\n            x_attn = attn @ v\n\n        x_attn = self.sub_norm(x_attn)\n        x_attn = x_attn * (1 - self.lambda_init)\n        x_attn = x_attn.transpose(1, 2).reshape(B, N, C)\n\n        # MLP activation & dropout\n        x_mlp = self.mlp_act(x_mlp)\n        x_mlp = self.mlp_drop(x_mlp)\n\n        # Fused output projection\n        y = self.out_proj(torch.cat((x_attn, x_mlp), dim=-1))\n\n        # Add residual w/ drop path & layer scale applied\n        x = x + self.drop_path(self.ls(y))\n        return x\n\n\nclass ParallelThingsBlock(nn.Module):\n    \"\"\" Parallel ViT block (N parallel attention followed by N parallel MLP)\n    Based on:\n      `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795\n    \"\"\"\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            num_parallel: int = 2,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            init_values: Optional[float] = None,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = LayerNorm,\n            mlp_layer: Type[nn.Module] = Mlp,\n            attn_layer: LayerType = Attention,\n            depth: int = 0,\n            device=None,\n            dtype=None,\n    ) -> None:\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_parallel = num_parallel\n        self.attns = nn.ModuleList()\n        self.ffns = nn.ModuleList()\n        for _ in range(num_parallel):\n            self.attns.append(nn.Sequential(OrderedDict([\n                ('norm', norm_layer(dim, **dd)),\n                ('attn', _create_attn(\n                    attn_layer,\n                    dim,\n                    num_heads=num_heads,\n                    qkv_bias=qkv_bias,\n                    qk_norm=qk_norm,\n                    scale_norm=scale_attn_norm,\n                    proj_bias=proj_bias,\n                    attn_drop=attn_drop,\n                    proj_drop=proj_drop,\n                    norm_layer=norm_layer,\n                    depth=depth,\n                    **dd,\n                )),\n                ('ls', LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()),\n                ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity())\n            ])))\n            self.ffns.append(nn.Sequential(OrderedDict([\n                ('norm', norm_layer(dim, **dd)),\n                ('mlp', mlp_layer(\n                    dim,\n                    hidden_features=int(dim * mlp_ratio),\n                    act_layer=act_layer,\n                    norm_layer=norm_layer if scale_mlp_norm else None,\n                    bias=proj_bias,\n                    drop=proj_drop,\n                    **dd,\n                )),\n                ('ls', LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()),\n                ('drop_path', DropPath(drop_path) if drop_path > 0. else nn.Identity())\n            ])))\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        if attn_mask is not None or is_causal:\n            attn_out = []\n            for attn in self.attns:\n                x_attn = attn.norm(x)\n                x_attn = attn.attn(x_attn, attn_mask=attn_mask, is_causal=is_causal)\n                x_attn = attn.ls(x_attn)\n                x_attn = attn.drop_path(x_attn)\n                attn_out.append(x_attn)\n            x = x + torch.stack(attn_out).sum(dim=0)\n        else:\n            x = x + torch.stack([attn(x) for attn in self.attns]).sum(dim=0)\n        x = x + torch.stack([ffn(x) for ffn in self.ffns]).sum(dim=0)\n        return x\n\n\ndef global_pool_nlc(\n        x: torch.Tensor,\n        pool_type: str = 'token',\n        num_prefix_tokens: int = 1,\n        reduce_include_prefix: bool = False,\n):\n    if not pool_type:\n        return x\n\n    if pool_type == 'token':\n        x = x[:, 0]  # class token\n    else:\n        x = x if reduce_include_prefix else x[:, num_prefix_tokens:]\n        if pool_type == 'avg':\n            x = x.mean(dim=1)\n        elif pool_type == 'avgmax':\n            x = 0.5 * (x.amax(dim=1) + x.mean(dim=1))\n        elif pool_type == 'max':\n            x = x.amax(dim=1)\n        else:\n            assert not pool_type, f'Unknown pool type {pool_type}'\n\n    return x\n\n\nclass VisionTransformer(nn.Module):\n    \"\"\" Vision Transformer\n\n    A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale`\n        - https://arxiv.org/abs/2010.11929\n    \"\"\"\n    dynamic_img_size: Final[bool]\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: Literal['', 'avg', 'avgmax', 'max', 'token', 'map', 'prr'] = 'token',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            scale_attn_norm: bool = False,\n            scale_mlp_norm: bool = False,\n            proj_bias: bool = True,\n            init_values: Optional[float] = None,\n            class_token: bool = True,\n            pos_embed: str = 'learn',\n            no_embed_class: bool = False,\n            reg_tokens: int = 0,\n            pre_norm: bool = False,\n            final_norm: bool = True,\n            fc_norm: Optional[bool] = None,\n            pool_include_prefix: bool = False,\n            dynamic_img_size: bool = False,\n            dynamic_img_pad: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            patch_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            weight_init: Literal['skip', 'reset', 'jax', 'jax_nlhb', 'moco', ''] = '',\n            fix_init: bool = False,\n            embed_layer: Callable = PatchEmbed,\n            embed_norm_layer: Optional[LayerType] = None,\n            norm_layer: Optional[LayerType] = None,\n            act_layer: Optional[LayerType] = None,\n            block_fn: Type[nn.Module] = Block,\n            mlp_layer: Type[nn.Module] = Mlp,\n            attn_layer: LayerType = Attention,\n            device=None,\n            dtype=None,\n    ) -> None:\n        \"\"\"\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size.\n            in_chans: Number of image input channels.\n            num_classes: Number of classes for classification head.\n            global_pool: Type of global pooling for final sequence (default: 'token').\n            embed_dim: Transformer embedding dimension.\n            depth: Depth of transformer.\n            num_heads: Number of attention heads.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: Enable bias for qkv projections if True.\n            init_values: Layer-scale init values (layer-scale enabled if not None).\n            class_token: Use class token.\n            no_embed_class: Don't include position embeddings for class (or reg) tokens.\n            reg_tokens: Number of register tokens.\n            pre_norm: Enable norm after embeddings, before transformer blocks (standard in CLIP ViT).\n            final_norm: Enable norm after transformer blocks, before head (standard in most ViT).\n            fc_norm: Move final norm after pool (instead of before), if None, enabled when global_pool == 'avg'.\n            drop_rate: Head dropout rate.\n            pos_drop_rate: Position embedding dropout rate.\n            attn_drop_rate: Attention dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            weight_init: Weight initialization scheme.\n            fix_init: Apply weight initialization fix (scaling w/ layer index).\n            embed_layer: Patch embedding layer.\n            embed_norm_layer: Normalization layer to use / override in patch embed module.\n            norm_layer: Normalization layer.\n            act_layer: MLP activation layer.\n            block_fn: Transformer block layer.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map', 'prr')\n        assert class_token or global_pool != 'token'\n        assert pos_embed in ('', 'none', 'learn')\n        use_fc_norm = global_pool in ('avg', 'avgmax', 'max') if fc_norm is None else fc_norm\n        norm_layer = get_norm_layer(norm_layer) or LayerNorm\n        embed_norm_layer = get_norm_layer(embed_norm_layer)\n        act_layer = get_act_layer(act_layer) or nn.GELU\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.num_prefix_tokens = 1 if class_token else 0\n        self.num_prefix_tokens += reg_tokens\n        self.num_reg_tokens = reg_tokens\n        self.has_class_token = class_token\n        self.no_embed_class = no_embed_class\n        self.pool_include_prefix = pool_include_prefix\n        self.dynamic_img_size = dynamic_img_size\n        self.grad_checkpointing = False\n\n        embed_args = {}\n        if dynamic_img_size:\n            # flatten deferred until after pos embed\n            embed_args.update(dict(strict_img_size=False, output_fmt='NHWC'))\n        if embed_norm_layer is not None:\n            embed_args['norm_layer'] = embed_norm_layer\n        self.patch_embed = embed_layer(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            bias=not pre_norm,  # disable bias if pre-norm is used (e.g. CLIP)\n            dynamic_img_pad=dynamic_img_pad,\n            **embed_args,\n            **dd,\n        )\n        num_patches = self.patch_embed.num_patches\n        reduction = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        self.cls_token = nn.Parameter(torch.empty(1, 1, embed_dim, **dd)) if class_token else None\n        self.reg_token = nn.Parameter(torch.empty(1, reg_tokens, embed_dim, **dd)) if reg_tokens else None\n        embed_len = num_patches if no_embed_class else num_patches + self.num_prefix_tokens\n        if not pos_embed or pos_embed == 'none':\n            self.pos_embed = None\n        else:\n            self.pos_embed = nn.Parameter(torch.empty(1, embed_len, embed_dim, **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n        if patch_drop_rate > 0:\n            self.patch_drop = PatchDropout(\n                patch_drop_rate,\n                num_prefix_tokens=self.num_prefix_tokens,\n            )\n        else:\n            self.patch_drop = nn.Identity()\n        self.norm_pre = norm_layer(embed_dim, **dd) if pre_norm else nn.Identity()\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        self.blocks = nn.Sequential(*[\n            block_fn(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                qk_norm=qk_norm,\n                scale_attn_norm=scale_attn_norm,\n                scale_mlp_norm=scale_mlp_norm,\n                proj_bias=proj_bias,\n                init_values=init_values,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                mlp_layer=mlp_layer,\n                attn_layer=attn_layer,\n                depth=i,\n                **dd,\n            )\n            for i in range(depth)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=reduction) for i in range(depth)]\n        self.norm = norm_layer(embed_dim, **dd) if final_norm and not use_fc_norm else nn.Identity()\n\n        # Classifier Head\n        if global_pool == 'map':\n            self.attn_pool = AttentionPoolLatent(\n                self.embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n        elif global_pool == 'prr':\n            self.attn_pool = AttentionPoolPrr(\n                self.embed_dim,\n                num_heads=num_heads,\n                pool_type='token' if class_token else 'avg',\n                norm_layer=norm_layer,\n                **dd,\n            )\n            self.pool_include_prefix = True\n        else:\n            self.attn_pool = None\n        self.fc_norm = norm_layer(embed_dim, **dd) if final_norm and use_fc_norm else nn.Identity()\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        self.weight_init_mode = 'reset' if weight_init == 'skip' else weight_init\n        self.fix_init = fix_init\n        # TODO: skip init when on meta device when safe to do so\n        if weight_init != 'skip':\n            self.init_weights(needs_reset=False)\n\n    def fix_init_weight(self) -> None:\n        \"\"\"Apply weight initialization fix (scaling w/ layer index).\"\"\"\n        with torch.no_grad():\n            for layer_id, layer in enumerate(self.blocks):\n                scale = math.sqrt(2.0 * (layer_id + 1))\n                layer.attn.proj.weight.div_(scale)\n                layer.mlp.fc2.weight.div_(scale)\n\n    def init_weights(self, mode: str = '', needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            mode: Weight initialization mode ('jax', 'jax_nlhb', 'moco', or '').\n            needs_reset: If True, call reset_parameters() on modules that have it.\n                Set to False when modules have already self-initialized in __init__.\n        \"\"\"\n        mode = mode or self.weight_init_mode\n        assert mode in ('jax', 'jax_nlhb', 'moco', 'reset', '')\n        head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0.\n        if self.pos_embed is not None:\n            trunc_normal_(self.pos_embed, std=.02)\n        if self.cls_token is not None:\n            nn.init.normal_(self.cls_token, std=1e-6)\n        if self.reg_token is not None:\n            nn.init.normal_(self.reg_token, std=1e-6)\n\n        named_apply(get_init_weights_vit(mode, head_bias, needs_reset=needs_reset), self)\n\n        if self.fix_init:\n            self.fix_init_weight()\n\n    def _init_weights(self, m: nn.Module) -> None:\n        \"\"\"Initialize weights for a single module (compatibility method).\"\"\"\n        # this fn left here for compat with downstream users\n        init_weights_vit_timm(m)\n\n    @torch.jit.ignore()\n    def load_pretrained(self, checkpoint_path: str, prefix: str = '') -> None:\n        \"\"\"Load pretrained weights.\n\n        Args:\n            checkpoint_path: Path to checkpoint.\n            prefix: Prefix for state dict keys.\n        \"\"\"\n        _load_weights(self, checkpoint_path, prefix)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> Set[str]:\n        \"\"\"Set of parameters that should not use weight decay.\"\"\"\n        return {'pos_embed', 'cls_token', 'dist_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Union[str, List]]:\n        \"\"\"Create regex patterns for parameter grouping.\n\n        Args:\n            coarse: Use coarse grouping.\n\n        Returns:\n            Dictionary mapping group names to regex patterns.\n        \"\"\"\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Enable or disable gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n        if hasattr(self.patch_embed, 'set_grad_checkpointing'):\n            self.patch_embed.set_grad_checkpointing(enable)\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get the classifier head.\"\"\"\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset the classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map', 'prr')\n            if global_pool in ('map', 'prr') and self.attn_pool is None:\n                assert False, \"Cannot currently add attention pooling in reset_classifier().\"\n            elif global_pool not in ('map', 'prr') and self.attn_pool is not None:\n                self.attn_pool = None  # remove attention pooling\n            elif global_pool in ('map', 'prr') and self.global_pool != global_pool:\n                assert False, \"Cannot currently change attention pooling type in reset_classifier().\"\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()\n\n    def set_input_size(\n            self,\n            img_size: Optional[Tuple[int, int]] = None,\n            patch_size: Optional[Tuple[int, int]] = None,\n    ) -> None:\n        \"\"\"Update the input image resolution and patch size.\n\n        Args:\n            img_size: New input resolution, if None current resolution is used.\n            patch_size: New patch size, if None existing patch size is used.\n        \"\"\"\n        prev_grid_size = self.patch_embed.grid_size\n        self.patch_embed.set_input_size(img_size=img_size, patch_size=patch_size)\n        if self.pos_embed is not None:\n            num_prefix_tokens = 0 if self.no_embed_class else self.num_prefix_tokens\n            num_new_tokens = self.patch_embed.num_patches + num_prefix_tokens\n            if num_new_tokens != self.pos_embed.shape[1]:\n                self.pos_embed = nn.Parameter(resample_abs_pos_embed(\n                    self.pos_embed,\n                    new_size=self.patch_embed.grid_size,\n                    old_size=prev_grid_size,\n                    num_prefix_tokens=num_prefix_tokens,\n                    verbose=True,\n                ))\n\n    def _pos_embed(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Apply positional embedding to input.\"\"\"\n        to_cat = []\n        if self.cls_token is not None:\n            to_cat.append(self.cls_token.expand(x.shape[0], -1, -1))\n        if self.reg_token is not None:\n            to_cat.append(self.reg_token.expand(x.shape[0], -1, -1))\n\n        if self.pos_embed is None:\n            return torch.cat(to_cat + [x.view(x.shape[0], -1, x.shape[-1])], dim=1)\n\n        if self.dynamic_img_size:\n            B, H, W, C = x.shape\n            prev_grid_size = self.patch_embed.grid_size\n            pos_embed = resample_abs_pos_embed(\n                self.pos_embed,\n                new_size=(H, W),\n                old_size=prev_grid_size,\n                num_prefix_tokens=0 if self.no_embed_class else self.num_prefix_tokens,\n            )\n            x = x.view(B, -1, C)\n        else:\n            pos_embed = self.pos_embed\n\n        if self.no_embed_class:\n            # deit-3, updated JAX (big vision)\n            # position embedding does not overlap with class token, add then concat\n            x = x + pos_embed\n            if to_cat:\n                x = torch.cat(to_cat + [x], dim=1)\n        else:\n            # original timm, JAX, and deit vit impl\n            # pos_embed has entry for class token, concat then add\n            if to_cat:\n                x = torch.cat(to_cat + [x], dim=1)\n            x = x + pos_embed\n\n        return self.pos_drop(x)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n            output_dict: bool = False,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]], Dict[str, Any]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            return_prefix_tokens: Return both prefix and spatial intermediate tokens\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n            output_dict: Return outputs as a dictionary with 'image_features' and 'image_intermediates' keys\n            attn_mask: Optional attention mask for masked attention (e.g., for NaFlex)\n            is_causal: If True, use causal (autoregressive) masking in attention\n        Returns:\n            A tuple with (final_features, intermediates), a list of intermediate features, or a dictionary containing\n            'image_features' and 'image_intermediates' (and optionally 'image_intermediates_prefix')\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x)\n        x = self._pos_embed(x)\n        x = self.patch_drop(x)\n        x = self.norm_pre(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if attn_mask is not None or is_causal:\n                x = blk(x, attn_mask=attn_mask, is_causal=is_causal)\n            elif self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n        else:\n            prefix_tokens = None\n\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.patch_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n\n        # For dictionary output, handle prefix tokens separately\n        if output_dict:\n            result_dict = {}\n            # Intermediates are always included\n            result_dict['image_intermediates'] = intermediates\n            if prefix_tokens is not None and return_prefix_tokens:\n                result_dict['image_intermediates_prefix'] = prefix_tokens\n\n            # Only include features if not intermediates_only\n            if not intermediates_only:\n                x_final = self.norm(x)\n                result_dict['image_features'] = x_final\n\n            return result_dict\n\n        # For non-dictionary output, maintain the original behavior\n        if not torch.jit.is_scripting() and return_prefix_tokens and prefix_tokens is not None:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune the classifier head.\n\n        Returns:\n            List of indices that were kept.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.fc_norm = nn.Identity()\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def get_intermediate_layers(\n            self,\n            x: torch.Tensor,\n            n: Union[int, List[int], Tuple[int]] = 1,\n            reshape: bool = False,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            attn_mask: Optional[torch.Tensor] = None,\n    ) -> List[torch.Tensor]:\n        \"\"\"Get intermediate layer outputs (DINO interface compatibility).\n\n        NOTE: This API is for backwards compat, favour using forward_intermediates() directly.\n\n        Args:\n            x: Input tensor.\n            n: Number or indices of layers.\n            reshape: Reshape to NCHW format.\n            return_prefix_tokens: Return prefix tokens.\n            norm: Apply normalization.\n\n        Returns:\n            List of intermediate features.\n        \"\"\"\n        return self.forward_intermediates(\n            x, n,\n            return_prefix_tokens=return_prefix_tokens,\n            norm=norm,\n            output_fmt='NCHW' if reshape else 'NLC',\n            intermediates_only=True,\n            attn_mask=attn_mask,\n        )\n\n    def forward_features(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        \"\"\"Forward pass through feature layers (embeddings, transformer blocks, post-transformer norm).\"\"\"\n        x = self.patch_embed(x)\n        x = self._pos_embed(x)\n        x = self.patch_drop(x)\n        x = self.norm_pre(x)\n\n        if attn_mask is not None or is_causal:\n            # If mask/causal provided, we need to apply blocks one by one\n            for blk in self.blocks:\n                x = blk(x, attn_mask=attn_mask, is_causal=is_causal)\n        elif self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n\n        x = self.norm(x)\n        return x\n\n    def pool(self, x: torch.Tensor, pool_type: Optional[str] = None) -> torch.Tensor:\n        \"\"\"Apply pooling to feature tokens.\n\n        Args:\n            x: Feature tensor.\n            pool_type: Pooling type override.\n\n        Returns:\n            Pooled features.\n        \"\"\"\n        if self.attn_pool is not None:\n            if not self.pool_include_prefix:\n                x = x[:, self.num_prefix_tokens:]\n            x = self.attn_pool(x)\n            return x\n        pool_type = self.global_pool if pool_type is None else pool_type\n        x = global_pool_nlc(\n            x,\n            pool_type=pool_type,\n            num_prefix_tokens=self.num_prefix_tokens,\n            reduce_include_prefix=self.pool_include_prefix,\n        )\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classifier head.\n\n        Args:\n            x: Feature tensor.\n            pre_logits: Return features before final classifier.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = self.pool(x)\n        x = self.fc_norm(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(\n            self,\n            x: torch.Tensor,\n            attn_mask: Optional[torch.Tensor] = None,\n            is_causal: bool = False,\n    ) -> torch.Tensor:\n        x = self.forward_features(x, attn_mask=attn_mask, is_causal=is_causal)\n        x = self.forward_head(x)\n        return x\n\n\ndef init_weights_vit_timm(module: nn.Module, name: str = '', needs_reset: bool = True) -> None:\n    \"\"\"ViT weight initialization, original timm impl (for reproducibility).\n\n    Args:\n        module: Module to initialize.\n        name: Module name for context.\n        needs_reset: If True, call reset_parameters() on modules that have it.\n    \"\"\"\n    if isinstance(module, nn.Linear):\n        trunc_normal_(module.weight, std=.02)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n    elif needs_reset and hasattr(module, 'reset_parameters'):\n        module.reset_parameters()\n\n\ndef init_weights_vit_jax(\n        module: nn.Module,\n        name: str = '',\n        head_bias: float = 0.0,\n        needs_reset: bool = True,\n) -> None:\n    \"\"\"ViT weight initialization, matching JAX (Flax) impl.\n\n    Args:\n        module: Module to initialize.\n        name: Module name for context.\n        head_bias: Bias value for head layer.\n        needs_reset: If True, call reset_parameters() on modules that have it.\n    \"\"\"\n    if isinstance(module, nn.Linear):\n        if name.startswith('head'):\n            nn.init.zeros_(module.weight)\n            nn.init.constant_(module.bias, head_bias)\n        else:\n            nn.init.xavier_uniform_(module.weight)\n            if module.bias is not None:\n                nn.init.normal_(module.bias, std=1e-6) if 'mlp' in name else nn.init.zeros_(module.bias)\n    elif isinstance(module, nn.Conv2d):\n        lecun_normal_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n    elif needs_reset and hasattr(module, 'reset_parameters'):\n        module.reset_parameters()\n\n\ndef init_weights_vit_moco(module: nn.Module, name: str = '', needs_reset: bool = True) -> None:\n    \"\"\"ViT weight initialization, matching moco-v3 impl minus fixed PatchEmbed.\n\n    Args:\n        module: Module to initialize.\n        name: Module name for context.\n        needs_reset: If True, call reset_parameters() on modules that have it.\n    \"\"\"\n    if isinstance(module, nn.Linear):\n        if 'qkv' in name:\n            # treat the weights of Q, K, V separately\n            val = math.sqrt(6. / float(module.weight.shape[0] // 3 + module.weight.shape[1]))\n            nn.init.uniform_(module.weight, -val, val)\n        else:\n            nn.init.xavier_uniform_(module.weight)\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n    elif hasattr(module, 'init_weights'):\n        module.init_weights()\n    elif needs_reset and hasattr(module, 'reset_parameters'):\n        module.reset_parameters()\n\n\ndef init_weights_reset_parameters(module: nn.Module, name: str = '', needs_reset: bool = True) -> None:\n    if needs_reset and hasattr(module, 'reset_parameters'):\n        module.reset_parameters()\n\n\ndef get_init_weights_vit(mode: str = 'jax', head_bias: float = 0.0, needs_reset: bool = True) -> Callable:\n    if mode.startswith('jax'):\n        return partial(init_weights_vit_jax, head_bias=head_bias, needs_reset=needs_reset)\n    elif mode.startswith('moco'):\n        return partial(init_weights_vit_moco, needs_reset=needs_reset)\n    elif mode == 'reset':\n        # 'reset' means only call reset_parameters() on modules\n        return partial(init_weights_reset_parameters, needs_reset=needs_reset)\n    else:\n        # timm init is default\n        return partial(init_weights_vit_timm, needs_reset=needs_reset)\n\n\ndef resize_pos_embed(\n        posemb: torch.Tensor,\n        posemb_new: torch.Tensor,\n        num_prefix_tokens: int = 1,\n        gs_new: Tuple[int, int] = (),\n        interpolation: str = 'bicubic',\n        antialias: bool = False,\n) -> torch.Tensor:\n    \"\"\" Rescale the grid of position embeddings when loading from state_dict.\n    *DEPRECATED* This function is being deprecated in favour of using resample_abs_pos_embed\n    \"\"\"\n    ntok_new = posemb_new.shape[1] - num_prefix_tokens\n    ntok_old = posemb.shape[1] - num_prefix_tokens\n    gs_old = [int(math.sqrt(ntok_old))] * 2\n    if not len(gs_new):  # backwards compatibility\n        gs_new = [int(math.sqrt(ntok_new))] * 2\n    return resample_abs_pos_embed(\n        posemb, gs_new, gs_old,\n        num_prefix_tokens=num_prefix_tokens,\n        interpolation=interpolation,\n        antialias=antialias,\n        verbose=True,\n    )\n\n\n@torch.no_grad()\ndef _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = '', load_bfloat16: bool = False) -> None:\n    \"\"\" Load weights from .npz checkpoints for official Google Brain Flax implementation\n    \"\"\"\n    import numpy as np\n    if load_bfloat16:\n        import jax.numpy as jnp\n        import ml_dtypes\n\n    def _n2p(_w, t=True, idx=None):\n        if idx is not None:\n            _w = _w[idx]\n\n        if load_bfloat16:\n            _w = _w.view(ml_dtypes.bfloat16).astype(jnp.float32)\n            _w = np.array(_w)\n\n        if _w.ndim == 4 and _w.shape[0] == _w.shape[1] == _w.shape[2] == 1:\n            _w = _w.flatten()\n        if t:\n            if _w.ndim == 4:\n                _w = _w.transpose([3, 2, 0, 1])\n            elif _w.ndim == 3:\n                _w = _w.transpose([2, 0, 1])\n            elif _w.ndim == 2:\n                _w = _w.transpose([1, 0])\n\n        _w = torch.from_numpy(_w)\n        return _w\n\n    if load_bfloat16:\n        w = jnp.load(checkpoint_path)\n    else:\n        w = np.load(checkpoint_path)\n\n    interpolation = 'bilinear'\n    antialias = False\n    big_vision = False\n    if not prefix:\n        if 'opt/target/embedding/kernel' in w:\n            prefix = 'opt/target/'\n        elif 'params/embedding/kernel' in w:\n            prefix = 'params/'\n            big_vision = True\n        elif 'params/img/embedding/kernel' in w:\n            prefix = 'params/img/'\n            big_vision = True\n\n    if hasattr(model.patch_embed, 'backbone'):\n        # hybrid\n        backbone = model.patch_embed.backbone\n        stem_only = not hasattr(backbone, 'stem')\n        stem = backbone if stem_only else backbone.stem\n        stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel'])))\n        stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale']))\n        stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias']))\n        if not stem_only:\n            for i, stage in enumerate(backbone.stages):\n                for j, block in enumerate(stage.blocks):\n                    bp = f'{prefix}block{i + 1}/unit{j + 1}/'\n                    for r in range(3):\n                        getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel']))\n                        getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale']))\n                        getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias']))\n                    if block.downsample is not None:\n                        block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel']))\n                        block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale']))\n                        block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias']))\n        embed_conv_w = _n2p(w[f'{prefix}embedding/kernel'])\n    else:\n        embed_conv_w = adapt_input_conv(\n            model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel']))\n    if embed_conv_w.shape[-2:] != model.patch_embed.proj.weight.shape[-2:]:\n        embed_conv_w = resample_patch_embed(\n            embed_conv_w,\n            model.patch_embed.proj.weight.shape[-2:],\n            interpolation=interpolation,\n            antialias=antialias,\n            verbose=True,\n        )\n\n    model.patch_embed.proj.weight.copy_(embed_conv_w)\n    model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias']))\n    if model.cls_token is not None:\n        model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False))\n    if big_vision:\n        pos_embed_w = _n2p(w[f'{prefix}pos_embedding'], t=False)\n    else:\n        pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False)\n    if pos_embed_w.shape != model.pos_embed.shape:\n        num_prefix_tokens = 0 if getattr(model, 'no_embed_class', False) else getattr(model, 'num_prefix_tokens', 1)\n        pos_embed_w = resample_abs_pos_embed(  # resize pos embedding when different size from pretrained weights\n            pos_embed_w,\n            new_size=model.patch_embed.grid_size,\n            num_prefix_tokens=num_prefix_tokens,\n            interpolation=interpolation,\n            antialias=antialias,\n            verbose=True,\n        )\n    model.pos_embed.copy_(pos_embed_w)\n    model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale']))\n    model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias']))\n    if (isinstance(model.head, nn.Linear) and\n            f'{prefix}head/bias' in w and\n            model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]):\n        model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel']))\n        model.head.bias.copy_(_n2p(w[f'{prefix}head/bias']))\n    # NOTE representation layer has been removed, not used in latest 21k/1k pretrained weights\n    # if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w:\n    #     model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel']))\n    #     model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias']))\n    if isinstance(model.attn_pool, AttentionPoolLatent):\n        block_prefix = f'{prefix}MAPHead_0/'\n        mha_prefix = block_prefix + f'MultiHeadDotProductAttention_0/'\n        model.attn_pool.latent.copy_(_n2p(w[f'{block_prefix}probe'], t=False))\n        model.attn_pool.kv.weight.copy_(torch.cat([\n            _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('key', 'value')]))\n        model.attn_pool.kv.bias.copy_(torch.cat([\n            _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('key', 'value')]))\n        model.attn_pool.q.weight.copy_(_n2p(w[f'{mha_prefix}query/kernel'], t=False).flatten(1).T)\n        model.attn_pool.q.bias.copy_(_n2p(w[f'{mha_prefix}query/bias'], t=False).reshape(-1))\n        model.attn_pool.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1))\n        model.attn_pool.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias']))\n        model.attn_pool.norm.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale']))\n        model.attn_pool.norm.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias']))\n        for r in range(2):\n            getattr(model.attn_pool.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_0/Dense_{r}/kernel']))\n            getattr(model.attn_pool.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_0/Dense_{r}/bias']))\n\n    mha_sub, b_sub, ln1_sub = (0, 0, 1) if big_vision else (1, 3, 2)\n    for i, block in enumerate(model.blocks.children()):\n        if f'{prefix}Transformer/encoderblock/LayerNorm_0/scale' in w:\n            block_prefix = f'{prefix}Transformer/encoderblock/'\n            idx = i\n        else:\n            block_prefix = f'{prefix}Transformer/encoderblock_{i}/'\n            idx = None\n        mha_prefix = block_prefix + f'MultiHeadDotProductAttention_{mha_sub}/'\n        block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'], idx=idx))\n        block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'], idx=idx))\n        block.attn.qkv.weight.copy_(torch.cat([\n            _n2p(w[f'{mha_prefix}{n}/kernel'], t=False, idx=idx).flatten(1).T for n in ('query', 'key', 'value')]))\n        block.attn.qkv.bias.copy_(torch.cat([\n            _n2p(w[f'{mha_prefix}{n}/bias'], t=False, idx=idx).reshape(-1) for n in ('query', 'key', 'value')]))\n        block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel'], idx=idx).flatten(1))\n        block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'], idx=idx))\n        block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_{ln1_sub}/scale'], idx=idx))\n        block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_{ln1_sub}/bias'], idx=idx))\n        for r in range(2):\n            getattr(block.mlp, f'fc{r + 1}').weight.copy_(\n                _n2p(w[f'{block_prefix}MlpBlock_{b_sub}/Dense_{r}/kernel'], idx=idx))\n            getattr(block.mlp, f'fc{r + 1}').bias.copy_(\n                _n2p(w[f'{block_prefix}MlpBlock_{b_sub}/Dense_{r}/bias'], idx=idx))\n\n\ndef _convert_openai_clip(\n        state_dict: Dict[str, torch.Tensor],\n        model: VisionTransformer,\n        prefix: str = 'visual.',\n) -> Dict[str, torch.Tensor]:\n    out_dict = {}\n    swaps = [\n        ('conv1', 'patch_embed.proj'),\n        ('positional_embedding', 'pos_embed'),\n        ('transformer.resblocks.', 'blocks.'),\n        ('ln_pre', 'norm_pre'),\n        ('ln_post', 'norm'),\n        ('ln_', 'norm'),\n        ('in_proj_', 'qkv.'),\n        ('out_proj', 'proj'),\n        ('mlp.c_fc', 'mlp.fc1'),\n        ('mlp.c_proj', 'mlp.fc2'),\n    ]\n    for k, v in state_dict.items():\n        if not k.startswith(prefix):\n            continue\n        k = k.replace(prefix, '')\n        for sp in swaps:\n            k = k.replace(sp[0], sp[1])\n\n        if k == 'proj':\n            k = 'head.weight'\n            v = v.transpose(0, 1)\n            out_dict['head.bias'] = torch.zeros(v.shape[0])\n        elif k == 'class_embedding':\n            k = 'cls_token'\n            v = v.unsqueeze(0).unsqueeze(1)\n        elif k == 'pos_embed':\n            v = v.unsqueeze(0)\n        out_dict[k] = v\n    return out_dict\n\n\ndef _convert_dinov2(\n        state_dict: Dict[str, torch.Tensor],\n        model: VisionTransformer,\n) -> Dict[str, torch.Tensor]:\n    import re\n    out_dict = {}\n    state_dict.pop(\"mask_token\", None)\n    if 'register_tokens' in state_dict:\n        # convert dinov2 w/ registers to no_embed_class timm model (neither cls or reg tokens overlap pos embed)\n        out_dict['reg_token'] = state_dict.pop('register_tokens')\n        out_dict['cls_token'] = state_dict.pop('cls_token') + state_dict['pos_embed'][:, 0]\n        out_dict['pos_embed'] = state_dict.pop('pos_embed')[:, 1:]\n    for k, v in state_dict.items():\n        if re.match(r\"blocks\\.(\\d+)\\.mlp\\.w12\\.(?:weight|bias)\", k):\n            out_dict[k.replace(\"w12\", \"fc1\")] = v\n            continue\n        elif re.match(r\"blocks\\.(\\d+)\\.mlp\\.w3\\.(?:weight|bias)\", k):\n            out_dict[k.replace(\"w3\", \"fc2\")] = v\n            continue\n        out_dict[k] = v\n    return out_dict\n\n\ndef _convert_aimv2(\n        state_dict: Dict[str, torch.Tensor],\n        model: VisionTransformer,\n) -> Dict[str, torch.Tensor]:\n    out_dict = {}\n    for k, v in state_dict.items():\n        k = k.replace('norm_1', 'norm1')\n        k = k.replace('norm_2', 'norm2')\n        k = k.replace('preprocessor.patchifier.', 'patch_embed.')\n        k = k.replace('preprocessor.pos_embed', 'pos_embed')\n        k = k.replace('trunk.', '')\n        k = k.replace('post_trunk_norm.', 'norm.')\n        k = k.replace('mlp.fc1', 'mlp.fc1_g')\n        k = k.replace('mlp.fc3', 'mlp.fc1_x')\n        out_dict[k] = v\n    return out_dict\n\n\ndef _convert_beit3(state_dict: dict, model):\n    \"\"\"\n    Turn a BEiT-3 checkpoint into a standard VisionTransformer state-dict.\n    \"\"\"\n    import re\n    state_dict = state_dict.get(\"model\", state_dict)  # unwrap if needed\n\n    # Prune unused\n    for k in (\"beit3.text_embed.weight\", \"beit3.vision_embed.mask_token\"):\n        state_dict.pop(k, None)\n\n    # Key renaming rules\n    rules = [\n        (r\"beit3\\.\", \"\"),\n        (r\"vision_embed\\.cls_token\", \"cls_token\"),\n        (r\"vision_embed\\.\",          \"patch_embed.\"),\n        (r\"embed_positions\\.\",       \"pos_embed.\"),\n        (r\"encoder\\.\", \"\"),\n        (r\"layers\\.\", \"blocks.\"),\n        (r\"ffn_layernorm\\.\", \"norm.\"), (r\"ffn\\.\", \"mlp.\"),\n        (r\"self_attn_layer_norm\\.\", \"norm1.\"), (r\"self_attn\\.\", \"attn.\"),\n        (r\"final_layer_norm\\.\", \"norm2.\"),\n        (r\"inner_attn_ln\", \"norm\"),\n        (r\"out_proj\", \"proj\"),\n        (r\"\\.A\\.\", \".\"),\n    ]\n\n    # First pass, rename keys\n    tmp = {}\n    for k, v in state_dict.items():\n        if \".B.\" in k:\n            continue  # use branch-A only\n        for old, new in rules:\n            k = re.sub(old, new, k)\n        if k == \"pos_embed.weight\":\n            # strip first two positions, [1, N+1, D]\n            tmp[\"pos_embed\"] = v[2:].unsqueeze(0)\n        else:\n            tmp[k] = v\n\n    # Second pass, fuse q, k, v\n    out, buf = {}, {}\n    pat = re.compile(r\"blocks\\.(\\d+)\\.attn\\.(q|k|v)_proj\\.(weight|bias)$\")\n    for k, v in tmp.items():\n        m = pat.fullmatch(k)\n        if not m:  # anything not q/k/v -> copy through\n            out[k] = v\n            continue\n\n        blk, which, kind = m.groups()  # block idx, 'q'/'k'/'v', 'weight'/'bias'\n        stash = buf.setdefault((blk, kind), {})  # Gather by block & param type\n        stash[which] = v\n        if len(stash) == 3:  # Have q, k, v -> concatenate\n            out[f\"blocks.{blk}.attn.qkv.{kind}\"] = torch.cat(\n                [stash['q'], stash['k'], stash['v']], dim=0\n            )\n\n    return out\n\n\ndef checkpoint_filter_fn(\n        state_dict: Dict[str, torch.Tensor],\n        model: VisionTransformer,\n        adapt_layer_scale: bool = False,\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n) -> Dict[str, torch.Tensor]:\n    \"\"\" convert patch embedding weight from manual patchify + linear proj to conv\"\"\"\n    import re\n    out_dict = {}\n    state_dict = state_dict.get('model', state_dict)\n    state_dict = state_dict.get('state_dict', state_dict)\n    prefix = ''\n\n    if 'visual.class_embedding' in state_dict:\n        state_dict = _convert_openai_clip(state_dict, model)\n    elif 'module.visual.class_embedding' in state_dict:\n        state_dict = _convert_openai_clip(state_dict, model, prefix='module.visual.')\n    elif \"mask_token\" in state_dict:\n        state_dict = _convert_dinov2(state_dict, model)\n    elif any('beit3.' in k for k in state_dict.keys()):\n        # BEiT3 model - multimodal checkpoint with beit3.* prefix\n        state_dict = _convert_beit3(state_dict, model)\n    elif \"encoder\" in state_dict:\n        # IJEPA, vit in an 'encoder' submodule\n        state_dict = state_dict['encoder']\n        prefix = 'module.'\n    elif 'visual.trunk.pos_embed' in state_dict or 'visual.trunk.blocks.0.norm1.weight' in state_dict:\n        # OpenCLIP model with timm vision encoder\n        prefix = 'visual.trunk.'\n        if 'visual.head.proj.weight' in state_dict and isinstance(model.head, nn.Linear):\n            # remap final nn.Linear if it exists outside of the timm .trunk (ie in visual.head.proj)\n            out_dict['head.weight'] = state_dict['visual.head.proj.weight']\n            out_dict['head.bias'] = torch.zeros(state_dict['visual.head.proj.weight'].shape[0])\n    elif 'module.visual.trunk.pos_embed' in state_dict:\n        prefix = 'module.visual.trunk.'\n    elif 'preprocessor.patchifier.proj.weight' in state_dict:\n        state_dict = _convert_aimv2(state_dict, model)\n\n    if prefix:\n        # filter on & remove prefix string from keys\n        state_dict = {k[len(prefix):]: v for k, v in state_dict.items() if k.startswith(prefix)}\n\n    for k, v in state_dict.items():\n        if 'patch_embed.proj.weight' in k:\n            O, I, H, W = model.patch_embed.proj.weight.shape\n            if len(v.shape) < 4:\n                # For old models that I trained prior to conv based patchification\n                O, I, H, W = model.patch_embed.proj.weight.shape\n                v = v.reshape(O, -1, H, W)\n            if v.shape[-1] != W or v.shape[-2] != H:\n                v = resample_patch_embed(\n                    v,\n                    (H, W),\n                    interpolation=interpolation,\n                    antialias=antialias,\n                    verbose=True,\n                )\n        elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]:\n            # To resize pos embedding when using model at different size from pretrained weights\n            num_prefix_tokens = 0 if getattr(model, 'no_embed_class', False) else getattr(model, 'num_prefix_tokens', 1)\n            v = resample_abs_pos_embed(\n                v,\n                new_size=model.patch_embed.grid_size,\n                num_prefix_tokens=num_prefix_tokens,\n                interpolation=interpolation,\n                antialias=antialias,\n                verbose=True,\n            )\n        elif adapt_layer_scale and 'gamma_' in k:\n            # remap layer-scale gamma into sub-module (deit3 models)\n            k = re.sub(r'gamma_([0-9])', r'ls\\1.gamma', k)\n        elif 'pre_logits' in k:\n            # NOTE representation layer removed as not used in latest 21k/1k pretrained weights\n            continue\n        out_dict[k] = v\n    return out_dict\n\n\ndef _cfg(url: str = '', **kwargs) -> Dict[str, Any]:\n    return {\n        'url': url,\n        'num_classes': 1000,\n        'input_size': (3, 224, 224),\n        'pool_size': None,\n        'crop_pct': 0.9,\n        'interpolation': 'bicubic',\n        'fixed_input_size': True,\n        'mean': IMAGENET_INCEPTION_MEAN,\n        'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'patch_embed.proj',\n        'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs,\n    }\n\ndefault_cfgs = {\n\n    # re-finetuned augreg 21k FT on in1k weights\n    'vit_base_patch16_224.augreg2_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n    'vit_base_patch8_224.augreg2_in21k_ft_in1k': _cfg(\n        hf_hub_id='timm/'),\n\n    # How to train your ViT (augreg) weights, pretrained on 21k FT on in1k\n    'vit_tiny_patch16_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_tiny_patch16_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_small_patch32_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_small_patch32_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_small_patch16_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_small_patch16_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch32_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_base_patch32_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_light1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch16_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_base_patch16_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch8_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_large_patch16_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_large_patch16_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n\n    # patch models (weights from official Google JAX impl) pretrained on in21k FT on in1k\n    'vit_base_patch16_224.orig_in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth',\n        hf_hub_id='timm/'),\n    'vit_base_patch16_384.orig_in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_384-83fb41ba.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_large_patch32_384.orig_in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n\n    # How to train your ViT (augreg) weights trained on in1k only\n    'vit_small_patch16_224.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_16-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_small_patch16_384.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_16-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch32_224.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_32-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_base_patch32_384.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_32-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch16_224.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_16-i1k-300ep-lr_0.001-aug_strong2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True),\n    'vit_base_patch16_384.augreg_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_16-i1k-300ep-lr_0.001-aug_strong2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        custom_load=True, input_size=(3, 384, 384), crop_pct=1.0),\n\n    'vit_large_patch14_224.untrained': _cfg(url=''),\n    'vit_huge_patch14_224.untrained': _cfg(url=''),\n    'vit_giant_patch14_224.untrained': _cfg(url=''),\n    'vit_gigantic_patch14_224.untrained': _cfg(url=''),\n\n    # patch models, imagenet21k (weights from official Google JAX impl), classifier not valid\n    'vit_base_patch32_224.orig_in21k': _cfg(\n        #url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch32_224_in21k-8db57226.pth',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_base_patch16_224.orig_in21k': _cfg(\n        #url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch16_224_in21k-e5005f0a.pth',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_large_patch32_224.orig_in21k': _cfg(\n        #url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_large_patch16_224.orig_in21k': _cfg(\n        #url='https://github.com/huggingface/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch16_224_in21k-606da67d.pth',\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_huge_patch14_224.orig_in21k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n\n    # How to train your ViT (augreg) weights, pretrained on in21k\n    'vit_tiny_patch16_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_small_patch32_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_small_patch16_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_base_patch32_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_base_patch16_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_base_patch8_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n    'vit_large_patch16_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1.npz',\n        hf_hub_id='timm/',\n        custom_load=True, num_classes=21843),\n\n    # SAM trained models (https://arxiv.org/abs/2106.01548)\n    'vit_base_patch32_224.sam_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/sam/ViT-B_32.npz', custom_load=True,\n        hf_hub_id='timm/'),\n    'vit_base_patch16_224.sam_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/sam/ViT-B_16.npz', custom_load=True,\n        hf_hub_id='timm/'),\n\n    # DINO pretrained - https://arxiv.org/abs/2104.14294 (no classifier head, for fine-tune only)\n    'vit_small_patch16_224.dino': _cfg(\n        url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall16_pretrain/dino_deitsmall16_pretrain.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_small_patch8_224.dino': _cfg(\n        url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall8_pretrain/dino_deitsmall8_pretrain.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_base_patch16_224.dino': _cfg(\n        url='https://dl.fbaipublicfiles.com/dino/dino_vitbase16_pretrain/dino_vitbase16_pretrain.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_base_patch8_224.dino': _cfg(\n        url='https://dl.fbaipublicfiles.com/dino/dino_vitbase8_pretrain/dino_vitbase8_pretrain.pth',\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n\n    # DINOv2 pretrained - https://arxiv.org/abs/2304.07193 (no classifier head, for fine-tune/features only)\n    'vit_small_patch14_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vits14/dinov2_vits14_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_base_patch14_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitb14/dinov2_vitb14_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_large_patch14_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitl14/dinov2_vitl14_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_giant_patch14_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitg14/dinov2_vitg14_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n\n    # DINOv2 pretrained w/ registers - https://arxiv.org/abs/2309.16588 (no classifier head, for fine-tune/features only)\n    'vit_small_patch14_reg4_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vits14/dinov2_vits14_reg4_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_base_patch14_reg4_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitb14/dinov2_vitb14_reg4_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_large_patch14_reg4_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitl14/dinov2_vitl14_reg4_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n    'vit_giant_patch14_reg4_dinov2.lvd142m': _cfg(\n        url='https://dl.fbaipublicfiles.com/dinov2/dinov2_vitg14/dinov2_vitg14_reg4_pretrain.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 518, 518), crop_pct=1.0),\n\n    # ViT ImageNet-21K-P pretraining by MILL\n    'vit_base_patch16_224_miil.in21k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/vit_base_patch16_224_in21k_miil-887286df.pth',\n        hf_hub_id='timm/',\n        mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear', num_classes=11221),\n    'vit_base_patch16_224_miil.in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/vit_base_patch16_224_1k_miil_84_4-2deb18e3.pth',\n        hf_hub_id='timm/',\n        mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear'),\n\n    # Custom timm variants\n    'vit_base_patch16_rpn_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth',\n        hf_hub_id='timm/'),\n    'vit_medium_patch16_gap_240.sw_in12k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95, num_classes=11821),\n    'vit_medium_patch16_gap_256.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_medium_patch16_gap_384.sw_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=0.95, crop_mode='squash'),\n    'vit_betwixt_patch16_gap_256.untrained': _cfg(\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_base_patch16_gap_224.untrained': _cfg(),\n\n    # CLIP pretrained image tower and related fine-tuned weights\n    'vit_base_patch32_clip_224.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD),\n    'vit_base_patch32_clip_384.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)),\n    'vit_base_patch32_clip_448.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 448, 448)),\n    'vit_base_patch16_clip_224.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95),\n    'vit_base_patch16_clip_384.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'),\n    'vit_large_patch14_clip_224.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0),\n    'vit_large_patch14_clip_336.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'),\n    'vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0),\n    'vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'),\n\n    'vit_base_patch32_clip_224.openai_ft_in12k_in1k': _cfg(\n        # hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k_in1k',  # FIXME weight exists, need to push\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD),\n    'vit_base_patch32_clip_384.openai_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'),\n    'vit_base_patch16_clip_224.openai_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95),\n    'vit_base_patch16_clip_384.openai_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'),\n    'vit_large_patch14_clip_224.openai_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0),\n    'vit_large_patch14_clip_336.openai_ft_in12k_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'),\n\n    'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD),\n    'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0),\n    'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'),\n    'vit_large_patch14_clip_224.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0),\n    'vit_large_patch14_clip_336.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'),\n    'vit_huge_patch14_clip_224.laion2b_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0),\n    'vit_huge_patch14_clip_336.laion2b_ft_in1k': _cfg(\n        hf_hub_id='',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'),\n\n    'vit_base_patch32_clip_224.openai_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD),\n    'vit_base_patch16_clip_224.openai_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD),\n    'vit_base_patch16_clip_384.openai_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'),\n    'vit_large_patch14_clip_224.openai_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0),\n\n    'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821),\n    'vit_large_patch14_clip_224.laion2b_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=11821),\n    'vit_huge_patch14_clip_224.laion2b_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821),\n\n    'vit_base_patch16_clip_224.openai_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821),\n    'vit_large_patch14_clip_224.openai_ft_in12k': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821),\n\n    'vit_base_patch32_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_base_patch16_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_large_patch14_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=768),\n    'vit_huge_patch14_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_giant_patch14_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_gigantic_patch14_clip_224.laion2b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1280),\n\n    'vit_base_patch32_clip_224.laion400m_e32': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_base_patch16_clip_224.laion400m_e32': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_base_patch16_plus_clip_240.laion400m_e32': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 240, 240), crop_pct=1.0, num_classes=640),\n    'vit_large_patch14_clip_224.laion400m_e32': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n\n    'vit_base_patch32_clip_224.datacompxl': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_base_patch32_clip_256.datacompxl': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 256, 256), num_classes=512),\n    'vit_base_patch16_clip_224.datacompxl': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_large_patch14_clip_224.datacompxl': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n\n    'vit_base_patch16_clip_224.dfn2b': _cfg(\n        hf_hub_id='timm/',\n        license='apple-ascl',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_large_patch14_clip_224.dfn2b_s39b': _cfg(\n        hf_hub_id='timm/',\n        license='apple-ascl',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n    'vit_large_patch14_clip_224.dfn2b': _cfg(\n        hf_hub_id='timm/',\n        license='apple-ascl',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n    'vit_huge_patch14_clip_224.dfn5b': _cfg(\n        hf_hub_id='timm/',\n        license='apple-ascl',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_huge_patch14_clip_378.dfn5b': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        license='apple-ascl',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        crop_pct=1.0, input_size=(3, 378, 378), num_classes=1024),\n\n    # 'vit_large_patch14_clip_224.metaclip2_worldwide': _cfg(\n    #     hf_hub_id='timm/',\n    #     license='cc-by-nc-4.0',\n    #     notes=('natively QuickGELU, use quickgelu model variant for original results',),\n    #     mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n    'vit_huge_patch14_clip_224.metaclip2_worldwide': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_huge_patch14_clip_378.metaclip2_worldwide': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 378, 378), crop_pct=1.0, crop_mode='squash', num_classes=1024),\n    'vit_gigantic_patch14_clip_224.metaclip2_worldwide': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1280),\n    'vit_gigantic_patch14_clip_378.metaclip2_worldwide': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 378, 378), crop_pct=1.0, crop_mode='squash', num_classes=1280),\n\n    'vit_base_patch32_clip_224.metaclip_2pt5b': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_base_patch16_clip_224.metaclip_2pt5b': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_large_patch14_clip_224.metaclip_2pt5b': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n    'vit_huge_patch14_clip_224.metaclip_2pt5b': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_huge_patch14_clip_224.metaclip_altogether': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024),\n    'vit_gigantic_patch14_clip_224.metaclip_2pt5b': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1280),\n    'vit_base_patch32_clip_224.metaclip_400m': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_base_patch16_clip_224.metaclip_400m': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512),\n    'vit_large_patch14_clip_224.metaclip_400m': _cfg(\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n\n    'vit_base_patch32_clip_224.openai': _cfg(\n        hf_hub_id='timm/',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_base_patch16_clip_224.openai': _cfg(\n        hf_hub_id='timm/',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_large_patch14_clip_224.openai': _cfg(\n        hf_hub_id='timm/',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768),\n    'vit_large_patch14_clip_336.openai': _cfg(\n        hf_hub_id='timm/',\n        notes=('natively QuickGELU, use quickgelu model variant for original results',),\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        crop_pct=1.0, input_size=(3, 336, 336), num_classes=768),\n\n    'vit_large_patch14_clip_224.apple_mclip2_dfndr2b': _cfg(\n        hf_hub_id='timm/',\n        num_classes=768,\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0,\n        license='apple-amlr'\n    ),\n\n    # experimental (may be removed)\n    'vit_base_patch32_plus_256.untrained': _cfg(url='', input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_base_patch16_plus_240.untrained': _cfg(url='', input_size=(3, 240, 240), crop_pct=0.95),\n    'vit_small_patch16_36x1_224.untrained': _cfg(url=''),\n    'vit_small_patch16_18x2_224.untrained': _cfg(url=''),\n    'vit_base_patch16_18x2_224.untrained': _cfg(url=''),\n\n    # EVA fine-tuned weights from MAE style MIM - EVA-CLIP target pretrain\n    # https://github.com/baaivision/EVA/blob/7ecf2c0a370d97967e86d047d7af9188f78d2df3/eva/README.md#eva-l-learning-better-mim-representations-from-eva-clip\n    'eva_large_patch14_196.in22k_ft_in22k_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_21k_to_1k_ft_88p6.pt',\n        hf_hub_id='timm/', license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 196, 196), crop_pct=1.0),\n    'eva_large_patch14_336.in22k_ft_in22k_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_21k_to_1k_ft_89p2.pt',\n        hf_hub_id='timm/', license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'),\n    'eva_large_patch14_196.in22k_ft_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_1k_ft_88p0.pt',\n        hf_hub_id='timm/', license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 196, 196), crop_pct=1.0),\n    'eva_large_patch14_336.in22k_ft_in1k': _cfg(\n        # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_1k_ft_88p65.pt',\n        hf_hub_id='timm/', license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD,\n        input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'),\n\n    'flexivit_small.1200ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_small.600ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k_600ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_small.300ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n\n    'flexivit_base.1200ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_base.600ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k_600ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_base.300ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_base.1000ep_in21k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i21k_1000ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843),\n    'flexivit_base.300ep_in21k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i21k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843),\n\n    'flexivit_large.1200ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_large.600ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k_600ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n    'flexivit_large.300ep_in1k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95),\n\n    'flexivit_base.patch16_in21k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/vit_b16_i21k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843),\n    'flexivit_base.patch30_in21k': _cfg(\n        url='https://storage.googleapis.com/big_vision/flexivit/vit_b30_i21k_300ep.npz', custom_load=True,\n        hf_hub_id='timm/',\n        input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843),\n\n    'vit_base_patch16_xp_224.untrained': _cfg(url=''),\n    'vit_large_patch14_xp_224.untrained': _cfg(url=''),\n    'vit_huge_patch14_xp_224.untrained': _cfg(url=''),\n\n    'vit_base_patch16_224.mae': _cfg(\n        url='https://dl.fbaipublicfiles.com/mae/pretrain/mae_pretrain_vit_base.pth',\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_large_patch16_224.mae': _cfg(\n        url='https://dl.fbaipublicfiles.com/mae/pretrain/mae_pretrain_vit_large.pth',\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_huge_patch14_224.mae': _cfg(\n        url='https://dl.fbaipublicfiles.com/mae/pretrain/mae_pretrain_vit_huge.pth',\n        hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n\n    'vit_huge_patch14_gap_224.in1k_ijepa': _cfg(\n        url='https://dl.fbaipublicfiles.com/ijepa/IN1K-vit.h.14-300e.pth.tar',\n        # hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_huge_patch14_gap_224.in22k_ijepa': _cfg(\n        url='https://dl.fbaipublicfiles.com/ijepa/IN22K-vit.h.14-900e.pth.tar',\n        # hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_huge_patch16_gap_448.in1k_ijepa': _cfg(\n        url='https://dl.fbaipublicfiles.com/ijepa/IN1K-vit.h.16-448px-300e.pth.tar',\n        # hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n    'vit_giant_patch16_gap_224.in22k_ijepa': _cfg(\n        url='https://dl.fbaipublicfiles.com/ijepa/IN22K-vit.g.16-600e.pth.tar',\n        # hf_hub_id='timm/',\n        license='cc-by-nc-4.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0),\n\n    'vit_base_patch32_siglip_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_224.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_base_patch16_siglip_224.webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_base_patch16_siglip_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_256.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_256.webli_i18n': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_base_patch16_siglip_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_base_patch16_siglip_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_base_patch16_siglip_512.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_large_patch16_siglip_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_large_patch16_siglip_256.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_large_patch16_siglip_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_large_patch16_siglip_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_large_patch16_siglip_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_so400m_patch14_siglip_224.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_224.webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_378.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378),\n        num_classes=0),\n    'vit_so400m_patch14_siglip_378.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378),\n        num_classes=0),\n    'vit_so400m_patch14_siglip_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_256.webli_i18n': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_giantopt_patch16_siglip_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_giantopt_patch16_siglip_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n\n    'vit_base_patch32_siglip_gap_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_224.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_224.webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_256.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_256.webli_i18n': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_base_patch16_siglip_gap_512.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_large_patch16_siglip_gap_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_large_patch16_siglip_gap_256.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_large_patch16_siglip_gap_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_large_patch16_siglip_gap_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_large_patch16_siglip_gap_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.webli': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.pali_mix': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.pali_pt': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.pali2_3b_pt': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_224.pali2_10b_pt': _cfg(\n        hf_hub_id='timm/',\n        num_classes=0),\n    # 'vit_so400m_patch14_siglip_gap_224.pali2_28b_pt': _cfg(\n    #     hf_hub_id='google/paligemma2-28b-pt-224-jax',\n    #     hf_hub_filename='pt_27b_224.npz',\n    #     custom_load='hf',\n    #     num_classes=0),\n    'vit_so400m_patch14_siglip_gap_378.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378),\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_378.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_384.webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali_mix': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali_refcoco_seg': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali_ocrvqa': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali2_3b_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali2_10b_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    # 'vit_so400m_patch14_siglip_gap_448.pali2_28b_pt': _cfg(\n    #     hf_hub_id='google/paligemma2-28b-pt-448-jax',\n    #     hf_hub_filename='pt_27b_448.npz',\n    #     custom_load='hf',\n    #     input_size=(3, 448, 448), crop_pct=1.0,\n    #     num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali2_3b_docci': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_448.pali2_10b_docci': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_896.pali_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 896, 896), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_896.pali_refcoco_seg': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 896, 896), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_896.pali_ocrvqa': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 896, 896), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_896.pali2_3b_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 896, 896), crop_pct=1.0,\n        num_classes=0),\n    'vit_so400m_patch14_siglip_gap_896.pali2_10b_pt': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 896, 896), crop_pct=1.0,\n        num_classes=0),\n    # 'vit_so400m_patch14_siglip_gap_896.pali2_28b_pt': _cfg(\n    #     hf_hub_id='google/paligemma2-28b-pt-896-jax',\n    #     hf_hub_filename='pt_27b_896.npz',\n    #     custom_load='hf',\n    #     input_size=(3, 896, 896), crop_pct=1.0,\n    #     num_classes=0),\n    'vit_so400m_patch16_siglip_gap_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_gap_256.webli_i18n': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_gap_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n    'vit_so400m_patch16_siglip_gap_512.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 512, 512),\n        num_classes=0),\n    'vit_giantopt_patch16_siglip_gap_256.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256),\n        num_classes=0),\n    'vit_giantopt_patch16_siglip_gap_384.v2_webli': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384),\n        num_classes=0),\n\n    'vit_so400m_patch14_siglip_378.webli_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378), crop_pct=1.0, crop_mode='squash',\n    ),\n    'vit_so400m_patch14_siglip_gap_378.webli_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 378, 378), crop_pct=1.0, crop_mode='squash',\n    ),\n\n    'vit_xsmall_patch16_clip_224.tinyclip_yfcc15m': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_medium_patch32_clip_224.tinyclip_laion400m': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_medium_patch16_clip_224.tinyclip_yfcc15m': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n    'vit_betwixt_patch32_clip_224.tinyclip_laion400m': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512),\n\n    'vit_wee_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_dwee_patch16_reg1_gap_256.sbb_nadamuon_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_dwee_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_pwee_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_dpwee_patch16_reg1_gap_256.sbb_nadamuon_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_dpwee_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_little_patch16_reg1_gap_256.sbb_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_little_patch16_reg1_gap_256.sbb_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_little_patch16_reg4_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_dlittle_patch16_reg1_gap_256.sbb_nadamuon_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_medium_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_medium_patch16_reg4_gap_256.sbb_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_medium_patch16_reg4_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_medium_patch16_reg4_gap_256.sbb_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_mediumd_patch16_reg4_gap_256.sbb_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_betwixt_patch16_reg1_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_256.sbb_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_256.sbb_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_base_patch16_reg4_gap_256.untrained': _cfg(\n        input_size=(3, 256, 256)),\n\n    'vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_so150m_patch16_reg4_gap_256.sbb_e250_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=0.95),\n    'vit_so150m_patch16_reg4_gap_384.sbb_e250_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_so150m_patch16_reg4_map_256.untrained': _cfg(\n        input_size=(3, 256, 256)),\n    'vit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256), crop_pct=1.0),\n    'vit_so150m2_patch16_reg1_gap_256.sbb_e200_in12k': _cfg(\n        hf_hub_id='timm/',\n        num_classes=11821,\n        input_size=(3, 256, 256), crop_pct=1.0),\n    'vit_so150m2_patch16_reg1_gap_384.sbb_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_so150m2_patch16_reg1_gap_448.sbb_e200_in12k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 448, 448), crop_pct=1.0, crop_mode='squash'),\n\n    'vit_intern300m_patch14_448.ogvl_dist': _cfg(\n        hf_hub_id='timm/',\n        license='mit',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0,\n    ),\n    'vit_intern300m_patch14_448.ogvl_2pt5': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD,\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0,\n    ),\n\n    'aimv2_large_patch14_224.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        crop_pct=1.0, num_classes=0),\n    'aimv2_large_patch14_224.apple_pt_dist': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        crop_pct=1.0, num_classes=0),\n    'aimv2_huge_patch14_224.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        crop_pct=1.0, num_classes=0),\n    'aimv2_1b_patch14_224.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        crop_pct=1.0, num_classes=0),\n    'aimv2_3b_patch14_224.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        crop_pct=1.0, num_classes=0),\n    'aimv2_large_patch14_336.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 336, 336), crop_pct=1.0, num_classes=0),\n    'aimv2_large_patch14_336.apple_pt_dist': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 336, 336), crop_pct=1.0, num_classes=0),\n    'aimv2_huge_patch14_336.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 336, 336), crop_pct=1.0, num_classes=0),\n    'aimv2_1b_patch14_336.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 336, 336), crop_pct=1.0, num_classes=0),\n    'aimv2_3b_patch14_336.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 336, 336), crop_pct=1.0, num_classes=0),\n    'aimv2_large_patch14_448.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0),\n    'aimv2_huge_patch14_448.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0),\n    'aimv2_1b_patch14_448.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0),\n    'aimv2_3b_patch14_448.apple_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, license='apple-ascl',\n        input_size=(3, 448, 448), crop_pct=1.0, num_classes=0),\n\n    'test_vit.r160_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 160, 160), crop_pct=0.95),\n    'test_vit2.r160_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 160, 160), crop_pct=0.95),\n    'test_vit3.r160_in1k': _cfg(\n        hf_hub_id='timm/',\n        input_size=(3, 160, 160), crop_pct=0.95),\n    'test_vit4.r160_in1k': _cfg(\n        input_size=(3, 160, 160), crop_pct=0.95),\n\n    # BEiT3 models (remapped to VisionTransformer with scale_attn_norm=True, scale_mlp_norm=True)\n    'beit3_base_patch16_224.in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_base_patch16_224.indomain_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_large_patch16_224.in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_large_patch16_224.indomain_in22k_ft_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_giant_patch14_224.untrained': _cfg(\n        url='', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_giant_patch14_336.untrained': _cfg(\n        url='', input_size=(3, 336, 336), mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0),\n    'beit3_base_patch16_224.pt': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0,\n        num_classes=0,\n    ),\n    'beit3_base_patch16_224.indomain_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0,\n        num_classes=0,\n    ),\n    'beit3_large_patch16_224.pt': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0,\n        num_classes=0,\n    ),\n    'beit3_large_patch16_224.indomain_pt': _cfg(\n        hf_hub_id='timm/',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=1.0,\n        num_classes=0,\n    ),\n}\n\n_quick_gelu_cfgs = [n for n, c in default_cfgs.items() if c.get('notes', ()) and 'quickgelu' in c['notes'][0]]\nfor n in _quick_gelu_cfgs:\n    # generate quickgelu default cfgs based on contents of notes field\n    c = copy.deepcopy(default_cfgs[n])\n    if c['hf_hub_id'] == 'timm/':\n        c['hf_hub_id'] = 'timm/' + n  # need to use non-quickgelu model name for hub id\n    default_cfgs[n.replace('_clip_', '_clip_quickgelu_')] = c\ndefault_cfgs = generate_default_cfgs(default_cfgs)\n\n\n# Global flag to use NaFlexVit instead of VisionTransformer\n_USE_NAFLEX_DEFAULT = os.environ.get('TIMM_USE_NAFLEXVIT', 'false').lower() == 'true'\n\ndef _create_vision_transformer(\n        variant: str,\n        pretrained: bool = False,\n        use_naflex: Optional[bool] = None,\n        **kwargs,\n) -> Union[VisionTransformer, 'NaFlexVit']:\n    # Check if we should use NaFlexVit instead\n    if use_naflex is None:\n        use_naflex = _USE_NAFLEX_DEFAULT\n    if use_naflex:\n        # Import here to avoid circular imports\n        from .naflexvit import _create_naflexvit_from_classic\n        return _create_naflexvit_from_classic(variant, pretrained, **kwargs)\n\n    out_indices = kwargs.pop('out_indices', 3)\n    if 'flexi' in variant:\n        # FIXME Google FlexiViT pretrained models have a strong preference for bilinear patch / embed\n        # interpolation, other pretrained models resize better w/ anti-aliased bicubic interpolation.\n        _filter_fn = partial(checkpoint_filter_fn, interpolation='bilinear', antialias=False)\n    else:\n        _filter_fn = checkpoint_filter_fn\n\n    # FIXME attn pool (currently only in siglip) params removed if pool disabled, is there a better soln?\n    strict = kwargs.pop('pretrained_strict', True)\n    if 'siglip' in variant and kwargs.get('global_pool', None) != 'map':\n        strict = False\n\n    return build_model_with_cfg(\n        VisionTransformer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=_filter_fn,\n        pretrained_strict=strict,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\n@register_model\ndef vit_tiny_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Tiny (Vit-Ti/16)\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3)\n    model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_tiny_patch16_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Tiny (Vit-Ti/16) @ 384x384.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3)\n    model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch32_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small (ViT-S/32)\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch32_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small (ViT-S/32) at 384x384.\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small (ViT-S/16)\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small (ViT-S/16)\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch8_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small (ViT-S/8)\n    \"\"\"\n    model_args = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer('vit_small_patch8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch8_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch32_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights.\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch32_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14)\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929).\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16)\n    model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giant_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Giant (little-g) model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16)\n    model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_gigantic_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Gigantic (big-G) model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16)\n    model = _create_vision_transformer(\n        'vit_gigantic_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_224_miil(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929).\n    Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False)\n    model = _create_vision_transformer(\n        'vit_base_patch16_224_miil', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_gap_240(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Medium (ViT-M/16) w/o class token, w/ avg-pool @ 240x240\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False,\n        global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_medium_patch16_gap_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Medium (ViT-M/16) w/o class token, w/ avg-pool @ 256x256\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False,\n        global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_medium_patch16_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Medium (ViT-M/16) w/o class token, w/ avg-pool @ 384x384\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False,\n        global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_medium_patch16_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch16_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Betwixt (ViT-b/16) w/o class token, w/ avg-pool @ 256x256\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=640, depth=12, num_heads=10, class_token=False,\n        global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_betwixt_patch16_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_gap_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16) w/o class token, w/ avg-pool @ 224x224\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=16, class_token=False, global_pool='avg', fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_base_patch16_gap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_gap_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) w/ no class token, avg pool\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, class_token=False, global_pool='avg', fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_huge_patch14_gap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch16_gap_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/16) w/ no class token, avg pool @ 448x448\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1280, depth=32, num_heads=16, class_token=False, global_pool='avg', fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_huge_patch16_gap_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giant_patch16_gap_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Giant (little-gg) model (ViT-g/16) w/ no class token, avg pool\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=48/11,\n        class_token=False, global_pool='avg', fc_norm=False)\n    model = _create_vision_transformer(\n        'vit_giant_patch16_gap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_xsmall_patch16_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    # TinyCLIP 8M\n    model_args = dict(embed_dim=256, depth=10, num_heads=4, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_xsmall_patch16_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch32_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    # TinyCLIP 40M\n    model_args = dict(\n        patch_size=32, embed_dim=512, depth=12, num_heads=8, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_medium_patch32_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    # TinyCLIP 39M\n    model_args = dict(embed_dim=512, depth=12, num_heads=8, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_medium_patch16_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch32_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    # TinyCLIP 61M\n    model_args = dict(\n        patch_size=32, embed_dim=640, depth=12, num_heads=10, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_betwixt_patch32_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/32 CLIP image tower @ 224x224\n    \"\"\"\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch32_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_clip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/32 CLIP image tower @ 256x256\n    \"\"\"\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch32_clip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_clip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/32 CLIP image tower @ 384x384\n    \"\"\"\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch32_clip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_clip_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/32 CLIP image tower @ 448x448\n    \"\"\"\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch32_clip_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/16 CLIP image tower\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch16_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_clip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/16 CLIP image tower @ 384x384\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch16_clip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_plus_clip_240(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16+) CLIP image tower @ 240x240\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=896, depth=12, num_heads=14, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_base_patch16_plus_clip_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) CLIP image tower\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_large_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_clip_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) CLIP image tower @ 336x336\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_large_patch14_clip_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) CLIP image tower.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_huge_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_clip_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) CLIP image tower @ 336x336\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_huge_patch14_clip_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_clip_378(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) CLIP image tower @ 378x378\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=partial(LayerNorm, eps=1e-5))\n    model = _create_vision_transformer(\n        'vit_huge_patch14_clip_378', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giant_patch14_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Giant (little-g) model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560\n    Pretrained weights from CLIP image tower.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer(\n        'vit_giant_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_gigantic_patch14_clip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-bigG model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560\n    Pretrained weights from CLIP image tower.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer(\n        'vit_gigantic_patch14_clip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_gigantic_patch14_clip_378(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-bigG model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560\n    Pretrained weights from CLIP image tower.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer(\n        'vit_gigantic_patch14_clip_378', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_clip_quickgelu_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/32 CLIP image tower @ 224x224\n    \"\"\"\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch32_clip_quickgelu_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_clip_quickgelu_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/16 CLIP image tower w/ QuickGELU act\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_clip_quickgelu_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_clip_quickgelu_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) CLIP image tower w/ QuickGELU act\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch14_clip_quickgelu_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_clip_quickgelu_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) CLIP image tower @ 336x336 w/ QuickGELU act\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch14_clip_quickgelu_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_clip_quickgelu_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) CLIP image tower w/ QuickGELU act.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_huge_patch14_clip_quickgelu_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_clip_quickgelu_378(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) CLIP image tower @ 378x378 w/ QuickGELU act\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_huge_patch14_clip_quickgelu_378', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_gigantic_patch14_clip_quickgelu_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-bigG model (ViT-G/14) w/ QuickGELU act\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, pre_norm=True,\n        norm_layer=partial(LayerNorm, eps=1e-5), act_layer='quick_gelu'\n    )\n    model = _create_vision_transformer(\n        'vit_gigantic_patch14_clip_quickgelu_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n# Experimental models below\n\n@register_model\ndef vit_base_patch32_plus_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/32+)\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_base_patch32_plus_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_plus_240(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16+)\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_base_patch16_plus_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_rpn_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base (ViT-B/16) w/ residual post-norm\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5,\n        class_token=False, block_fn=ResPostBlock, global_pool='avg')\n    model = _create_vision_transformer(\n        'vit_base_patch16_rpn_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_36x1_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base w/ LayerScale + 36 x 1 (36 block serial) config. Experimental, may remove.\n    Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795\n    Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow.\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_small_patch16_36x1_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch16_18x2_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Small w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove.\n    Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795\n    Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow.\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelThingsBlock)\n    model = _create_vision_transformer(\n        'vit_small_patch16_18x2_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_18x2_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Base w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove.\n    Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelThingsBlock)\n    model = _create_vision_transformer(\n        'vit_base_patch16_18x2_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva_large_patch14_196(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" EVA-large model https://arxiv.org/abs/2211.07636 /via MAE MIM pretrain\"\"\"\n    model_args = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg')\n    model = _create_vision_transformer(\n        'eva_large_patch14_196', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef eva_large_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" EVA-large model https://arxiv.org/abs/2211.07636 via MAE MIM pretrain\"\"\"\n    model_args = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg')\n    model = _create_vision_transformer('eva_large_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef flexivit_small(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" FlexiViT-Small\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True)\n    model = _create_vision_transformer('flexivit_small', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef flexivit_base(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" FlexiViT-Base\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True)\n    model = _create_vision_transformer('flexivit_base', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef flexivit_large(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" FlexiViT-Large\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True)\n    model = _create_vision_transformer('flexivit_large', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_xp_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) w/ parallel blocks and qk norm enabled.\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, no_embed_class=True,\n        norm_layer=RmsNorm, block_fn=ParallelScalingBlock, qkv_bias=False, qk_norm=True,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_xp_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_xp_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Large model (ViT-L/14) w/ parallel blocks and qk norm enabled.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, no_embed_class=True,\n        norm_layer=RmsNorm, block_fn=ParallelScalingBlock, qkv_bias=False, qk_norm=True,\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch14_xp_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_huge_patch14_xp_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-Huge model (ViT-H/14) w/ parallel blocks and qk norm enabled.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, no_embed_class=True,\n        norm_layer=RmsNorm, block_fn=ParallelScalingBlock, qkv_bias=False, qk_norm=True,\n    )\n    model = _create_vision_transformer(\n        'vit_huge_patch14_xp_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch14_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-S/14 for DINOv2\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=384, depth=12, num_heads=6, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_small_patch14_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch14_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/14 for DINOv2\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=768, depth=12, num_heads=12, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_base_patch14_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-L/14 for DINOv2\n    \"\"\"\n    model_args = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, init_values=1e-5)\n    model = _create_vision_transformer(\n        'vit_large_patch14_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giant_patch14_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-G/14 for DINOv2\n    \"\"\"\n    # The hidden_features of SwiGLU is calculated by:\n    # hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8\n    # When embed_dim=1536, hidden_features=4096\n    # With SwiGLUPacked, we need to set hidden_features = 2 * 4096 = 8192\n    model_args = dict(\n        patch_size=14, embed_dim=1536, depth=40, num_heads=24, init_values=1e-5,\n        mlp_ratio=2.66667 * 2, mlp_layer=SwiGLUPacked, act_layer=nn.SiLU\n    )\n    model = _create_vision_transformer(\n        'vit_giant_patch14_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_patch14_reg4_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-S/14 for DINOv2 w/ 4 registers\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=384, depth=12, num_heads=6, init_values=1e-5,\n        reg_tokens=4, no_embed_class=True,\n    )\n    model = _create_vision_transformer(\n        'vit_small_patch14_reg4_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch14_reg4_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-B/14 for DINOv2 w/ 4 registers\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=768, depth=12, num_heads=12, init_values=1e-5,\n        reg_tokens=4, no_embed_class=True,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch14_reg4_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch14_reg4_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-L/14 for DINOv2 w/ 4 registers\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16, init_values=1e-5,\n        reg_tokens=4, no_embed_class=True,\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch14_reg4_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giant_patch14_reg4_dinov2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT-G/14 for DINOv2\n    \"\"\"\n    # The hidden_features of SwiGLU is calculated by:\n    # hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8\n    # When embed_dim=1536, hidden_features=4096\n    # With SwiGLUPacked, we need to set hidden_features = 2 * 4096 = 8192\n    model_args = dict(\n        patch_size=14, embed_dim=1536, depth=40, num_heads=24, init_values=1e-5, mlp_ratio=2.66667 * 2,\n        mlp_layer=SwiGLUPacked, act_layer=nn.SiLU, reg_tokens=4, no_embed_class=True,\n    )\n    model = _create_vision_transformer(\n        'vit_giant_patch14_reg4_dinov2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_siglip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch32_siglip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False, global_pool='map',\n        act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_378(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    # this is a corrected variant of the 384 with a res properly divisible by patch size (no padding/truncation)\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_378', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giantopt_patch16_siglip_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1536, depth=40, num_heads=16, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_giantopt_patch16_siglip_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giantopt_patch16_siglip_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1536, depth=40, num_heads=16, class_token=False, global_pool='map',\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_giantopt_patch16_siglip_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch32_siglip_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=32, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='avg', fc_norm=False,\n        act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch32_siglip_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_gap_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_gap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_siglip_gap_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_siglip_gap_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_patch16_siglip_gap_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, class_token=False,\n        global_pool='avg', fc_norm=False, act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_large_patch16_siglip_gap_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_gap_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_gap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_gap_378(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_gap_378', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_gap_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_gap_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch14_siglip_gap_896(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch14_siglip_gap_896', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" A SigLIP variant of ViT with global average pooling (GAP) instead of attention pooling (MAP).\"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362,\n        class_token=False, global_pool='avg', fc_norm=False, act_layer='gelu_tanh',\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False,\n        global_pool='avg', fc_norm=False, act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so400m_patch16_siglip_gap_512(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1152, depth=27, num_heads=16, mlp_ratio=3.7362, class_token=False,\n        global_pool='avg', fc_norm=False, act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_so400m_patch16_siglip_gap_512', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giantopt_patch16_siglip_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1536, depth=40, num_heads=16, class_token=False,\n        global_pool='avg', fc_norm=False, act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_giantopt_patch16_siglip_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_giantopt_patch16_siglip_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=1536, depth=40, num_heads=16, class_token=False,\n        global_pool='avg', fc_norm=False, act_layer='gelu_tanh'\n    )\n    model = _create_vision_transformer(\n        'vit_giantopt_patch16_siglip_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_wee_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=256, depth=14, num_heads=4, init_values=1e-5, mlp_ratio=5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_wee_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_dwee_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=256, depth=14, num_heads=4, init_values=1e-5, mlp_ratio=5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg', attn_layer='diff',\n    )\n    model = _create_vision_transformer(\n        'vit_dwee_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_pwee_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=256, depth=16, num_heads=4, init_values=1e-5, mlp_ratio=5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg', block_fn=ParallelScalingBlock,\n    )\n    model = _create_vision_transformer(\n        'vit_pwee_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_dpwee_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=256, depth=16, num_heads=4, init_values=1e-5, mlp_ratio=5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg', block_fn=DiffParallelScalingBlock,\n    )\n    model = _create_vision_transformer(\n        'vit_dpwee_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_little_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=320, depth=14, num_heads=5, init_values=1e-5, mlp_ratio=5.6,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_little_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_dlittle_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=320, depth=14, num_heads=5, init_values=1e-5, mlp_ratio=5.6,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg', attn_layer='diff',\n    )\n    model = _create_vision_transformer(\n        'vit_dlittle_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_little_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=320, depth=14, num_heads=5, init_values=1e-5, mlp_ratio=5.6,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_little_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_medium_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_medium_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_medium_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_mediumd_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=20, num_heads=8, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_mediumd_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_mediumd_patch16_reg4_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=20, num_heads=8, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_mediumd_patch16_reg4_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=640, depth=12, num_heads=10, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_betwixt_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=640, depth=12, num_heads=10, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_betwixt_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_betwixt_patch16_reg4_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=640, depth=12, num_heads=10, init_values=1e-5,\n        class_token=False, no_embed_class=True, reg_tokens=4, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_betwixt_patch16_reg4_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, class_token=False,\n        no_embed_class=True, global_pool='avg', reg_tokens=4,\n    )\n    model = _create_vision_transformer(\n        'vit_base_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m_patch16_reg4_map_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=896, depth=18, num_heads=14, mlp_ratio=2.572,\n        class_token=False, reg_tokens=4, global_pool='map',\n    )\n    model = _create_vision_transformer(\n        'vit_so150m_patch16_reg4_map_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m_patch16_reg4_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=896, depth=18, num_heads=14, mlp_ratio=2.572,\n        class_token=False, reg_tokens=4, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so150m_patch16_reg4_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m_patch16_reg4_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=896, depth=18, num_heads=14, mlp_ratio=2.572,\n        class_token=False, reg_tokens=4, global_pool='avg', fc_norm=False,\n    )\n    model = _create_vision_transformer(\n        'vit_so150m_patch16_reg4_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m2_patch16_reg1_gap_256(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M v2 (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=832, depth=21, num_heads=13, mlp_ratio=34/13, init_values=1e-5,\n        qkv_bias=False, class_token=False, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_so150m2_patch16_reg1_gap_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m2_patch16_reg1_gap_384(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M v2 (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=832, depth=21, num_heads=13, mlp_ratio=34/13, init_values=1e-5,\n        qkv_bias=False, class_token=False, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_so150m2_patch16_reg1_gap_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_so150m2_patch16_reg1_gap_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" SO150M v2 (shape optimized, but diff than paper def, optimized for GPU) \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=832, depth=21, num_heads=13, mlp_ratio=34/13, init_values=1e-5,\n        qkv_bias=False, class_token=False, reg_tokens=1, global_pool='avg',\n    )\n    model = _create_vision_transformer(\n        'vit_so150m2_patch16_reg1_gap_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_intern300m_patch14_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=16,\n        init_values=0.1, final_norm=False, dynamic_img_size=True,\n    )\n    model = _create_vision_transformer(\n        'vit_intern300m_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_large_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Large AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=8, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_large_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_huge_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Huge AIM-v2 model\n    \"\"\"\n\n    model_args = dict(\n        patch_size=14, embed_dim=1536, depth=24, num_heads=12, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_huge_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_1b_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 1B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=2048, depth=24, num_heads=16, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_1b_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_3b_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 3B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=3072, depth=24, num_heads=24, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_3b_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_large_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Large AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=8, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_large_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_huge_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Huge AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1536, depth=24, num_heads=12, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_huge_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_1b_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 1B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=2048, depth=24, num_heads=16, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_1b_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_3b_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 3B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=3072, depth=24, num_heads=24, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_3b_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_large_patch14_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Large AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1024, depth=24, num_heads=8, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_large_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_huge_patch14_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Huge AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1536, depth=24, num_heads=12, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_huge_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_1b_patch14_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 1B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=2048, depth=24, num_heads=16, class_token=False, fc_norm=False,\n        mlp_ratio=2.75, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_1b_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef aimv2_3b_patch14_448(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT 3B AIM-v2 model\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=3072, depth=24, num_heads=24, class_token=False, fc_norm=False,\n        mlp_ratio=2.6667, global_pool='avg', qkv_bias=False, proj_bias=False, act_layer='silu',\n        norm_layer=partial(RmsNorm, eps=1e-5), embed_norm_layer=partial(RmsNorm, eps=1e-5), mlp_layer=SwiGLU,\n    )\n    model = _create_vision_transformer(\n        'aimv2_3b_patch14_448', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_vit(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Test\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=64, depth=6, num_heads=2, mlp_ratio=3, dynamic_img_size=True)\n    model = _create_vision_transformer('test_vit', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_vit2(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Test\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=64, depth=8, num_heads=2, mlp_ratio=3,\n        class_token=False, reg_tokens=1, global_pool='avg', init_values=1e-5, dynamic_img_size=True)\n    model = _create_vision_transformer('test_vit2', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_vit3(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Test\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=96, depth=9, num_heads=3, mlp_ratio=2,\n        class_token=False, reg_tokens=1, global_pool='map', pool_include_prefix=True, init_values=1e-5)\n    model = _create_vision_transformer('test_vit3', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef test_vit4(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" ViT Test\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=96, depth=9, num_heads=3, mlp_ratio=3,\n        class_token=False, reg_tokens=1, global_pool='avg', init_values=1e-5, dynamic_img_size=True,\n        norm_layer='rmsnorm',\n    )\n    model = _create_vision_transformer('test_vit4', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit3_base_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" BEiT3 Base model (ViT-Base size) with patch size 16x16.\n    Remapped to VisionTransformer with scale_norm=True.\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4,\n        scale_attn_norm=True, scale_mlp_norm=True, class_token=True, global_pool='avg',\n        norm_layer=partial(LayerNorm, eps=1e-5)\n    )\n    model = _create_vision_transformer('beit3_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit3_large_patch16_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" BEiT3 Large model (ViT-Large size) with patch size 16x16.\n    Remapped to VisionTransformer with scale_norm=True.\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4,\n        scale_attn_norm=True, scale_mlp_norm=True, class_token=True, global_pool='avg',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer('beit3_large_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit3_giant_patch14_224(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" BEiT3 Giant model with patch size 14x14.\n    Remapped to VisionTransformer with scale_norm=True.\n    \"\"\"\n    model_args = dict(\n        patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=4.3637,\n        scale_attn_norm=True, scale_mlp_norm=True, class_token=True, global_pool='avg',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer('beit3_giant_patch14_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef beit3_giant_patch14_336(pretrained: bool = False, **kwargs) -> VisionTransformer:\n    \"\"\" BEiT3 Giant model with patch size 14x14 and image size 336x336.\n    Remapped to VisionTransformer with scale_norm=True.\n    \"\"\"\n    model_args = dict(\n        img_size=336, patch_size=14, embed_dim=1408, depth=40, num_heads=16, mlp_ratio=4.3637,\n        scale_attn_norm=True, scale_mlp_norm=True, class_token=True, global_pool='avg',\n        norm_layer=partial(LayerNorm, eps=1e-5),\n    )\n    model = _create_vision_transformer('beit3_giant_patch14_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'vit_tiny_patch16_224_in21k': 'vit_tiny_patch16_224.augreg_in21k',\n    'vit_small_patch32_224_in21k': 'vit_small_patch32_224.augreg_in21k',\n    'vit_small_patch16_224_in21k': 'vit_small_patch16_224.augreg_in21k',\n    'vit_base_patch32_224_in21k': 'vit_base_patch32_224.augreg_in21k',\n    'vit_base_patch16_224_in21k': 'vit_base_patch16_224.augreg_in21k',\n    'vit_base_patch8_224_in21k': 'vit_base_patch8_224.augreg_in21k',\n    'vit_large_patch32_224_in21k': 'vit_large_patch32_224.orig_in21k',\n    'vit_large_patch16_224_in21k': 'vit_large_patch16_224.augreg_in21k',\n    'vit_huge_patch14_224_in21k': 'vit_huge_patch14_224.orig_in21k',\n    'vit_base_patch32_224_sam': 'vit_base_patch32_224.sam',\n    'vit_base_patch16_224_sam': 'vit_base_patch16_224.sam',\n    'vit_small_patch16_224_dino': 'vit_small_patch16_224.dino',\n    'vit_small_patch8_224_dino': 'vit_small_patch8_224.dino',\n    'vit_base_patch16_224_dino': 'vit_base_patch16_224.dino',\n    'vit_base_patch8_224_dino': 'vit_base_patch8_224.dino',\n    'vit_base_patch16_224_miil_in21k': 'vit_base_patch16_224_miil.in21k',\n    'vit_base_patch32_224_clip_laion2b': 'vit_base_patch32_clip_224.laion2b',\n    'vit_large_patch14_224_clip_laion2b': 'vit_large_patch14_clip_224.laion2b',\n    'vit_huge_patch14_224_clip_laion2b': 'vit_huge_patch14_clip_224.laion2b',\n    'vit_giant_patch14_224_clip_laion2b': 'vit_giant_patch14_clip_224.laion2b',\n})\n"
  },
  {
    "path": "timm/models/vision_transformer_hybrid.py",
    "content": "\"\"\" Hybrid Vision Transformer (ViT) in PyTorch\n\nA PyTorch implement of the Hybrid Vision Transformers as described in:\n\n'An Image Is Worth 16 x 16 Words: Transformers for Image Recognition at Scale'\n    - https://arxiv.org/abs/2010.11929\n\n`How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers`\n    - https://arxiv.org/abs/2106.10270\n\nNOTE These hybrid model definitions depend on code in vision_transformer.py.\nThey were moved here to keep file sizes sane.\n\nHacked together by / Copyright 2020, Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom typing import Dict, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import StdConv2dSame, StdConv2d, ConvNormAct, to_ntuple, HybridEmbed\n\nfrom ._builder import build_model_with_cfg\nfrom ._registry import generate_default_cfgs, register_model, register_model_deprecations\nfrom .resnet import resnet26d, resnet50d\nfrom .resnetv2 import ResNetV2, create_resnetv2_stem\nfrom .vision_transformer import VisionTransformer\n\n\nclass ConvStem(nn.Sequential):\n    def __init__(\n            self,\n            in_chans: int = 3,\n            depth: int = 3,\n            channels: Union[int, Tuple[int, ...]] = 64,\n            kernel_size: Union[int, Tuple[int, ...]] = 3,\n            stride: Union[int, Tuple[int, ...]] = (2, 2, 2),\n            padding: Union[str, int, Tuple[int, ...]] = \"\",\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        if isinstance(channels, int):\n            # a default tiered channel strategy\n            channels = tuple([channels // 2**i for i in range(depth)][::-1])\n\n        kernel_size = to_ntuple(depth)(kernel_size)\n        padding = to_ntuple(depth)(padding)\n        assert depth == len(stride) == len(kernel_size) == len(channels)\n\n        in_chs = in_chans\n        for i in range(len(channels)):\n            last_conv = i == len(channels) - 1\n            self.add_module(f'{i}', ConvNormAct(\n                in_chs,\n                channels[i],\n                kernel_size=kernel_size[i],\n                stride=stride[i],\n                padding=padding[i],\n                bias=last_conv,\n                apply_norm=not last_conv,\n                apply_act=not last_conv,\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            ))\n            in_chs = channels[i]\n\n\ndef _dd_from_kwargs(**kwargs):\n    return {'device': kwargs.get('device', None), 'dtype': kwargs.get('dtype', None)}\n\n\ndef _resnetv2(layers=(3, 4, 9), **kwargs):\n    \"\"\" ResNet-V2 backbone helper\"\"\"\n    padding_same = kwargs.get('padding_same', True)\n    stem_type = 'same' if padding_same else ''\n    conv_layer = partial(StdConv2dSame, eps=1e-8) if padding_same else partial(StdConv2d, eps=1e-8)\n    if len(layers):\n        backbone = ResNetV2(\n            layers=layers,\n            num_classes=0,\n            global_pool='',\n            in_chans=kwargs.get('in_chans', 3),\n            preact=False,\n            stem_type=stem_type,\n            conv_layer=conv_layer,\n            **_dd_from_kwargs(**kwargs),\n        )\n    else:\n        backbone = create_resnetv2_stem(\n            kwargs.get('in_chans', 3),\n            stem_type=stem_type,\n            preact=False,\n            conv_layer=conv_layer,\n            **_dd_from_kwargs(**kwargs),\n        )\n    return backbone\n\n\ndef _convert_mobileclip(state_dict, model, prefix='image_encoder.model.'):\n    out = {}\n    for k, v in state_dict.items():\n        if not k.startswith(prefix):\n            continue\n        k = k.replace(prefix, '')\n        k = k.replace('patch_emb.', 'patch_embed.backbone.')\n        k = k.replace('block.conv', 'conv')\n        k = k.replace('block.norm', 'bn')\n        k = k.replace('post_transformer_norm.', 'norm.')\n        k = k.replace('pre_norm_mha.0', 'norm1')\n        k = k.replace('pre_norm_mha.1', 'attn')\n        k = k.replace('pre_norm_ffn.0', 'norm2')\n        k = k.replace('pre_norm_ffn.1', 'mlp.fc1')\n        k = k.replace('pre_norm_ffn.4', 'mlp.fc2')\n        k = k.replace('qkv_proj.', 'qkv.')\n        k = k.replace('out_proj.', 'proj.')\n        k = k.replace('transformer.', 'blocks.')\n        if k == 'pos_embed.pos_embed.pos_embed':\n            k = 'pos_embed'\n            v = v.squeeze(0)\n        if 'classifier.proj' in k:\n            bias_k = k.replace('classifier.proj', 'head.bias')\n            k = k.replace('classifier.proj', 'head.weight')\n            v = v.T\n            out[bias_k] = torch.zeros(v.shape[0])\n        out[k] = v\n    return out\n\n\ndef checkpoint_filter_fn(\n        state_dict: Dict[str, torch.Tensor],\n        model: VisionTransformer,\n        interpolation: str = 'bicubic',\n        antialias: bool = True,\n) -> Dict[str, torch.Tensor]:\n    from .vision_transformer import checkpoint_filter_fn as _filter_fn\n\n    if 'image_encoder.model.patch_emb.0.block.conv.weight' in state_dict:\n        state_dict = _convert_mobileclip(state_dict, model)\n\n    return _filter_fn(state_dict, model, interpolation=interpolation, antialias=antialias)\n\n\ndef _create_vision_transformer_hybrid(variant, backbone, embed_args=None, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    embed_args = embed_args or {}\n    embed_layer = partial(HybridEmbed, backbone=backbone, **embed_args)\n    kwargs.setdefault('embed_layer', embed_layer)\n    kwargs.setdefault('patch_size', 1)  # default patch size for hybrid models if not set\n    return build_model_with_cfg(\n        VisionTransformer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),\n        'first_conv': 'patch_embed.backbone.stem.conv', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # hybrid in-1k models (weights from official JAX impl where they exist)\n    'vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True,\n        first_conv='patch_embed.backbone.conv'),\n    'vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        first_conv='patch_embed.backbone.conv', input_size=(3, 384, 384), crop_pct=1.0, custom_load=True),\n    'vit_small_r26_s32_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_light0-wd_0.03-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.03-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True,\n    ),\n    'vit_small_r26_s32_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0, custom_load=True),\n    'vit_base_r26_s32_224.untrained': _cfg(),\n    'vit_base_r50_s16_384.orig_in21k_ft_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vit_large_r50_s32_224.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz',\n        hf_hub_id='timm/',\n        custom_load=True,\n    ),\n    'vit_large_r50_s32_384.augreg_in21k_ft_in1k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz',\n        hf_hub_id='timm/',\n        input_size=(3, 384, 384), crop_pct=1.0, custom_load=True,\n    ),\n\n    # hybrid in-21k models (weights from official Google JAX impl where they exist)\n    'vit_tiny_r_s16_p8_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        num_classes=21843, crop_pct=0.9, first_conv='patch_embed.backbone.conv', custom_load=True),\n    'vit_small_r26_s32_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        num_classes=21843, crop_pct=0.9, custom_load=True),\n    'vit_base_r50_s16_224.orig_in21k': _cfg(\n        #url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth',\n        hf_hub_id='timm/',\n        num_classes=0, crop_pct=0.9),\n    'vit_large_r50_s32_224.augreg_in21k': _cfg(\n        url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0.npz',\n        hf_hub_id='timm/',\n        num_classes=21843, crop_pct=0.9, custom_load=True),\n\n    # hybrid models (using timm resnet backbones)\n    'vit_small_resnet26d_224.untrained': _cfg(\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'),\n    'vit_small_resnet50d_s16_224.untrained': _cfg(\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'),\n    'vit_base_resnet26d_224.untrained': _cfg(\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'),\n    'vit_base_resnet50d_224.untrained': _cfg(\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'),\n\n    'vit_base_mci_224.apple_mclip_lt': _cfg(\n        hf_hub_id='apple/mobileclip_b_lt_timm',\n        url='https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_blt.pt',\n        license='apple-amlr',\n        num_classes=512,\n        mean=(0., 0., 0.), std=(1., 1., 1.), first_conv='patch_embed.backbone.0.conv',\n    ),\n    'vit_base_mci_224.apple_mclip': _cfg(\n        hf_hub_id='apple/mobileclip_b_timm',\n        url='https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_b.pt',\n        num_classes=512,\n        license='apple-amlr',\n        mean=(0., 0., 0.), std=(1., 1., 1.), first_conv='patch_embed.backbone.0.conv',\n    ),\n    'vit_base_mci_224.apple_mclip2_dfndr2b': _cfg(\n        hf_hub_id='timm/',\n        num_classes=512,\n        mean=(0., 0., 0.), std=(1., 1., 1.), first_conv='patch_embed.backbone.0.conv',\n        license='apple-amlr'\n    ),\n})\n\n\n@register_model\ndef vit_tiny_r_s16_p8_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R+ViT-Ti/S16 w/ 8x8 patch hybrid @ 224 x 224.\n    \"\"\"\n    backbone = _resnetv2(layers=(), **kwargs)\n    model_args = dict(patch_size=8, embed_dim=192, depth=12, num_heads=3)\n    model = _create_vision_transformer_hybrid(\n        'vit_tiny_r_s16_p8_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_tiny_r_s16_p8_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R+ViT-Ti/S16 w/ 8x8 patch hybrid @ 384 x 384.\n    \"\"\"\n    backbone = _resnetv2(layers=(), **kwargs)\n    model_args = dict(patch_size=8, embed_dim=192, depth=12, num_heads=3)\n    model = _create_vision_transformer_hybrid(\n        'vit_tiny_r_s16_p8_384', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_r26_s32_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R26+ViT-S/S32 hybrid.\n    \"\"\"\n    backbone = _resnetv2((2, 2, 2, 2), **kwargs)\n    model_args = dict(embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer_hybrid(\n        'vit_small_r26_s32_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_r26_s32_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R26+ViT-S/S32 hybrid.\n    \"\"\"\n    backbone = _resnetv2((2, 2, 2, 2), **kwargs)\n    model_args = dict(embed_dim=384, depth=12, num_heads=6)\n    model = _create_vision_transformer_hybrid(\n        'vit_small_r26_s32_384', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_r26_s32_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R26+ViT-B/S32 hybrid.\n    \"\"\"\n    backbone = _resnetv2((2, 2, 2, 2), **kwargs)\n    model_args = dict(embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_r26_s32_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_r50_s16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R50+ViT-B/S16 hybrid from original paper (https://arxiv.org/abs/2010.11929).\n    \"\"\"\n    backbone = _resnetv2((3, 4, 9), **kwargs)\n    model_args = dict(embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_r50_s16_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_r50_s16_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R50+ViT-B/16 hybrid from original paper (https://arxiv.org/abs/2010.11929).\n    ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer.\n    \"\"\"\n    backbone = _resnetv2((3, 4, 9), **kwargs)\n    model_args = dict(embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_r50_s16_384', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_r50_s32_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R50+ViT-L/S32 hybrid.\n    \"\"\"\n    backbone = _resnetv2((3, 4, 6, 3), **kwargs)\n    model_args = dict(embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer_hybrid(\n        'vit_large_r50_s32_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_large_r50_s32_384(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" R50+ViT-L/S32 hybrid.\n    \"\"\"\n    backbone = _resnetv2((3, 4, 6, 3), **kwargs)\n    model_args = dict(embed_dim=1024, depth=24, num_heads=16)\n    model = _create_vision_transformer_hybrid(\n        'vit_large_r50_s32_384', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_resnet26d_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" Custom ViT small hybrid w/ ResNet26D stride 32. No pretrained weights.\n    \"\"\"\n    backbone = resnet26d(\n        pretrained=pretrained,\n        in_chans=kwargs.get('in_chans', 3),\n        features_only=True,\n        out_indices=[4],\n        **_dd_from_kwargs(**kwargs),\n    )\n    model_args = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3)\n    model = _create_vision_transformer_hybrid(\n        'vit_small_resnet26d_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_small_resnet50d_s16_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" Custom ViT small hybrid w/ ResNet50D 3-stages, stride 16. No pretrained weights.\n    \"\"\"\n    backbone = resnet50d(\n        pretrained=pretrained,\n        in_chans=kwargs.get('in_chans', 3),\n        features_only=True,\n        out_indices=[3],\n        **_dd_from_kwargs(**kwargs),\n    )\n    model_args = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3)\n    model = _create_vision_transformer_hybrid(\n        'vit_small_resnet50d_s16_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_resnet26d_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" Custom ViT base hybrid w/ ResNet26D stride 32. No pretrained weights.\n    \"\"\"\n    backbone = resnet26d(\n        pretrained=pretrained,\n        in_chans=kwargs.get('in_chans', 3),\n        features_only=True,\n        out_indices=[4],\n        **_dd_from_kwargs(**kwargs),\n    )\n    model_args = dict(embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_resnet26d_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_resnet50d_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" Custom ViT base hybrid w/ ResNet50D stride 32. No pretrained weights.\n    \"\"\"\n    backbone = resnet50d(\n        pretrained=pretrained,\n        in_chans=kwargs.get('in_chans', 3),\n        features_only=True,\n        out_indices=[4],\n        **_dd_from_kwargs(**kwargs),\n    )\n    model_args = dict(embed_dim=768, depth=12, num_heads=12)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_resnet50d_224', backbone=backbone, pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_base_mci_224(pretrained=False, **kwargs) -> VisionTransformer:\n    \"\"\" Custom ViT base hybrid w/ ResNet50D stride 32. No pretrained weights.\n    \"\"\"\n    backbone = ConvStem(\n        channels=(768//4, 768//4, 768),\n        stride=(4, 2, 2),\n        kernel_size=(4, 2, 2),\n        padding=0,\n        in_chans=kwargs.get('in_chans', 3),\n        act_layer=nn.GELU,\n        **_dd_from_kwargs(**kwargs),\n    )\n    model_args = dict(embed_dim=768, depth=12, num_heads=12, no_embed_class=True)\n    model = _create_vision_transformer_hybrid(\n        'vit_base_mci_224', backbone=backbone, embed_args=dict(proj=False),\n        pretrained=pretrained, **dict(model_args, **kwargs)\n    )\n    return model\n\n\nregister_model_deprecations(__name__, {\n    'vit_tiny_r_s16_p8_224_in21k': 'vit_tiny_r_s16_p8_224.augreg_in21k',\n    'vit_small_r26_s32_224_in21k': 'vit_small_r26_s32_224.augreg_in21k',\n    'vit_base_r50_s16_224_in21k': 'vit_base_r50_s16_224.orig_in21k',\n    'vit_base_resnet50_224_in21k': 'vit_base_r50_s16_224.orig_in21k',\n    'vit_large_r50_s32_224_in21k': 'vit_large_r50_s32_224.augreg_in21k',\n    'vit_base_resnet50_384': 'vit_base_r50_s16_384.orig_in21k_ft_in1k'\n})\n"
  },
  {
    "path": "timm/models/vision_transformer_relpos.py",
    "content": "\"\"\" Relative Position Vision Transformer (ViT) in PyTorch\n\nNOTE: these models are experimental / WIP, expect changes\n\nHacked together by / Copyright 2022, Ross Wightman\n\"\"\"\nimport logging\nimport math\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Type, Union\n\ntry:\n    from typing import Literal\nexcept ImportError:\n    from typing_extensions import Literal\n\nimport torch\nimport torch.nn as nn\nfrom torch.jit import Final\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    LayerScale,\n    DropPath,\n    calculate_drop_path_rates,\n    RelPosMlp,\n    RelPosBias,\n    use_fused_attn,\n    LayerType,\n)\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import named_apply, checkpoint\nfrom ._registry import generate_default_cfgs, register_model\nfrom .vision_transformer import get_init_weights_vit\n\n__all__ = ['VisionTransformerRelPos']  # model_registry will add each entrypoint fn to this\n\n_logger = logging.getLogger(__name__)\n\n\nclass RelPosAttention(nn.Module):\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            rel_pos_cls: Optional[Type[nn.Module]] = None,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.rel_pos = rel_pos_cls(num_heads=num_heads, **dd) if rel_pos_cls else None\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x, shared_rel_pos: Optional[torch.Tensor] = None):\n        B, N, C = x.shape\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n        q = self.q_norm(q)\n        k = self.k_norm(k)\n\n        if self.fused_attn:\n            if self.rel_pos is not None:\n                attn_bias = self.rel_pos.get_bias()\n            elif shared_rel_pos is not None:\n                attn_bias = shared_rel_pos\n            else:\n                attn_bias = None\n\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_bias,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            if self.rel_pos is not None:\n                attn = self.rel_pos(attn, shared_rel_pos=shared_rel_pos)\n            elif shared_rel_pos is not None:\n                attn = attn + shared_rel_pos\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n\nclass RelPosBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            rel_pos_cls: Optional[Type[nn.Module]] = None,\n            init_values: Optional[float] = None,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = RelPosAttention(\n            dim,\n            num_heads,\n            qkv_bias=qkv_bias,\n            qk_norm=qk_norm,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x, shared_rel_pos: Optional[torch.Tensor] = None):\n        x = x + self.drop_path1(self.ls1(self.attn(self.norm1(x), shared_rel_pos=shared_rel_pos)))\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        return x\n\n\nclass ResPostRelPosBlock(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            qk_norm: bool = False,\n            rel_pos_cls: Optional[Type[nn.Module]] = None,\n            init_values: Optional[float] = None,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.init_values = init_values\n\n        self.attn = RelPosAttention(\n            dim,\n            num_heads,\n            qkv_bias=qkv_bias,\n            qk_norm=qk_norm,\n            rel_pos_cls=rel_pos_cls,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            **dd,\n        )\n        self.norm1 = norm_layer(dim, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.norm2 = norm_layer(dim, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.init_weights()\n\n    def init_weights(self):\n        # NOTE this init overrides that base model init with specific changes for the block type\n        if self.init_values is not None:\n            nn.init.constant_(self.norm1.weight, self.init_values)\n            nn.init.constant_(self.norm2.weight, self.init_values)\n\n    def forward(self, x, shared_rel_pos: Optional[torch.Tensor] = None):\n        x = x + self.drop_path1(self.norm1(self.attn(x, shared_rel_pos=shared_rel_pos)))\n        x = x + self.drop_path2(self.norm2(self.mlp(x)))\n        return x\n\n\nclass VisionTransformerRelPos(nn.Module):\n    \"\"\" Vision Transformer w/ Relative Position Bias\n\n    Differing from classic vit, this impl\n      * uses relative position index (swin v1 / beit) or relative log coord + mlp (swin v2) pos embed\n      * defaults to no class token (can be enabled)\n      * defaults to global avg pool for head (can be changed)\n      * layer-scale (residual branch gain) enabled\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: Union[int, Tuple[int, int]] = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: Literal['', 'avg', 'token', 'map'] = 'avg',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            init_values: Optional[float] = 1e-6,\n            class_token: bool = False,\n            fc_norm: bool = False,\n            rel_pos_type: str = 'mlp',\n            rel_pos_dim: Optional[int] = None,\n            shared_rel_pos: bool = False,\n            drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            weight_init: Literal['skip', 'reset', 'jax', 'moco', ''] = 'reset',\n            fix_init: bool = False,\n            embed_layer: Type[nn.Module] = PatchEmbed,\n            norm_layer: Optional[LayerType] = None,\n            act_layer: Optional[LayerType] = None,\n            block_fn: Type[nn.Module] = RelPosBlock,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            img_size: input image size\n            patch_size: patch size\n            in_chans: number of input channels\n            num_classes: number of classes for classification head\n            global_pool: type of global pooling for final sequence (default: 'avg')\n            embed_dim: embedding dimension\n            depth: depth of transformer\n            num_heads: number of attention heads\n            mlp_ratio: ratio of mlp hidden dim to embedding dim\n            qkv_bias: enable bias for qkv if True\n            qk_norm: Enable normalization of query and key in attention\n            init_values: layer-scale init values\n            class_token: use class token (default: False)\n            fc_norm: use pre classifier norm instead of pre-pool\n            rel_pos_type: type of relative position\n            shared_rel_pos: share relative pos across all blocks\n            drop_rate: dropout rate\n            proj_drop_rate: projection dropout rate\n            attn_drop_rate: attention dropout rate\n            drop_path_rate: stochastic depth rate\n            weight_init: weight init scheme\n            fix_init: apply weight initialization fix (scaling w/ layer index)\n            embed_layer: patch embedding layer\n            norm_layer: normalization layer\n            act_layer: MLP activation layer\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg', 'token')\n        assert class_token or global_pool != 'token'\n        norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6)\n        act_layer = act_layer or nn.GELU\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.num_prefix_tokens = 1 if class_token else 0\n        self.grad_checkpointing = False\n\n        self.patch_embed = embed_layer(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            **dd,\n        )\n        feat_size = self.patch_embed.grid_size\n        r = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        rel_pos_args = dict(window_size=feat_size, prefix_tokens=self.num_prefix_tokens)\n        if rel_pos_type.startswith('mlp'):\n            if rel_pos_dim:\n                rel_pos_args['hidden_dim'] = rel_pos_dim\n            if 'swin' in rel_pos_type:\n                rel_pos_args['mode'] = 'swin'\n            rel_pos_cls = partial(RelPosMlp, **rel_pos_args)\n        else:\n            rel_pos_cls = partial(RelPosBias, **rel_pos_args)\n        self.shared_rel_pos = None\n        if shared_rel_pos:\n            self.shared_rel_pos = rel_pos_cls(num_heads=num_heads, **dd)\n            # NOTE shared rel pos currently mutually exclusive w/ per-block, but could support both...\n            rel_pos_cls = None\n\n        self.cls_token = nn.Parameter(torch.zeros(1, self.num_prefix_tokens, embed_dim, **dd)) if class_token else None\n\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)  # stochastic depth decay rule\n        self.blocks = nn.ModuleList([\n            block_fn(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                qk_norm=qk_norm,\n                rel_pos_cls=rel_pos_cls,\n                init_values=init_values,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                **dd,\n            )\n            for i in range(depth)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=r) for i in range(depth)]\n        self.norm = norm_layer(embed_dim, **dd) if not fc_norm else nn.Identity()\n\n        # Classifier Head\n        self.fc_norm = norm_layer(embed_dim, **dd) if fc_norm else nn.Identity()\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        self.weight_init_mode = 'reset' if weight_init == 'skip' else weight_init\n        self.fix_init = fix_init\n        # TODO: skip init when on meta device when safe to do so\n        if weight_init != 'skip':\n            self.init_weights(needs_reset=False)\n\n    def fix_init_weight(self) -> None:\n        \"\"\"Apply weight initialization fix (scaling w/ layer index).\"\"\"\n        with torch.no_grad():\n            for layer_id, layer in enumerate(self.blocks):\n                scale = math.sqrt(2.0 * (layer_id + 1))\n                layer.attn.proj.weight.div_(scale)\n                layer.mlp.fc2.weight.div_(scale)\n\n    def init_weights(self, mode: str = '', needs_reset: bool = True) -> None:\n        \"\"\"Initialize model weights.\n\n        Args:\n            mode: Weight initialization mode ('jax', 'jax_nlhb', 'moco', or '').\n            needs_reset: If True, call reset_parameters() on modules (default for after to_empty()).\n                If False, skip reset_parameters() (for __init__ where modules already self-initialized).\n        \"\"\"\n        mode = mode or self.weight_init_mode\n        assert mode in ('jax', 'jax_nlhb', 'moco', 'reset', '')\n        head_bias = -math.log(self.num_classes) if 'nlhb' in mode else 0.\n        if self.cls_token is not None:\n            nn.init.normal_(self.cls_token, std=1e-6)\n\n        named_apply(get_init_weights_vit(mode, head_bias, needs_reset=needs_reset), self)\n\n        if self.fix_init:\n            self.fix_init_weight()\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'cls_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^cls_token|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None, device=None, dtype=None):\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg', 'token')\n            self.global_pool = global_pool\n        self.head = nn.Linear(self.embed_dim, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            return_prefix_tokens: bool = False,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            return_prefix_tokens: Return both prefix and spatial intermediate tokens\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x)\n        if self.cls_token is not None:\n            x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1)\n\n        shared_rel_pos = self.shared_rel_pos.get_bias() if self.shared_rel_pos is not None else None\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, shared_rel_pos=shared_rel_pos)\n            else:\n                x = blk(x, shared_rel_pos=shared_rel_pos)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if self.num_prefix_tokens:\n            # split prefix (e.g. class, distill) and spatial feature tokens\n            prefix_tokens = [y[:, 0:self.num_prefix_tokens] for y in intermediates]\n            intermediates = [y[:, self.num_prefix_tokens:] for y in intermediates]\n        if reshape:\n            # reshape to BCHW output format\n            H, W = self.patch_embed.dynamic_feat_size((height, width))\n            intermediates = [y.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n        if not torch.jit.is_scripting() and return_prefix_tokens:\n            # return_prefix not support in torchscript due to poor type handling\n            intermediates = list(zip(intermediates, prefix_tokens))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.fc_norm = nn.Identity()\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        if self.cls_token is not None:\n            x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1)\n\n        shared_rel_pos = self.shared_rel_pos.get_bias() if self.shared_rel_pos is not None else None\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, shared_rel_pos=shared_rel_pos)\n            else:\n                x = blk(x, shared_rel_pos=shared_rel_pos)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x[:, self.num_prefix_tokens:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.fc_norm(x)\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_vision_transformer_relpos(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        VisionTransformerRelPos, variant, pretrained,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head',\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'vit_relpos_base_patch32_plus_rpn_256.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_replos_base_patch32_plus_rpn_256-sw-dd486f51.pth',\n        hf_hub_id='timm/',\n        input_size=(3, 256, 256)),\n    'vit_relpos_base_patch16_plus_240.untrained': _cfg(url='', input_size=(3, 240, 240)),\n\n    'vit_relpos_small_patch16_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_small_patch16_224-sw-ec2778b4.pth',\n        hf_hub_id='timm/'),\n    'vit_relpos_medium_patch16_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_224-sw-11c174af.pth',\n        hf_hub_id='timm/'),\n    'vit_relpos_base_patch16_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_224-sw-49049aed.pth',\n        hf_hub_id='timm/'),\n\n    'vit_srelpos_small_patch16_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_small_patch16_224-sw-6cdb8849.pth',\n        hf_hub_id='timm/'),\n    'vit_srelpos_medium_patch16_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_medium_patch16_224-sw-ad702b8c.pth',\n        hf_hub_id='timm/'),\n\n    'vit_relpos_medium_patch16_cls_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_cls_224-sw-cfe8e259.pth',\n        hf_hub_id='timm/'),\n    'vit_relpos_base_patch16_cls_224.untrained': _cfg(),\n    'vit_relpos_base_patch16_clsgap_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_gapcls_224-sw-1a341d6c.pth',\n        hf_hub_id='timm/'),\n\n    'vit_relpos_small_patch16_rpn_224.untrained': _cfg(),\n    'vit_relpos_medium_patch16_rpn_224.sw_in1k': _cfg(\n        url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_rpn_224-sw-5d2befd8.pth',\n        hf_hub_id='timm/'),\n    'vit_relpos_base_patch16_rpn_224.untrained': _cfg(),\n})\n\n\n@register_model\ndef vit_relpos_base_patch32_plus_rpn_256(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/32+) w/ relative log-coord position and residual post-norm, no class token\n    \"\"\"\n    model_args = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, block_fn=ResPostRelPosBlock)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch32_plus_rpn_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_base_patch16_plus_240(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16+) w/ relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch16_plus_240', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_small_patch16_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, qkv_bias=False, fc_norm=True)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_small_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_medium_patch16_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, qkv_bias=False, fc_norm=True)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_medium_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_base_patch16_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, fc_norm=True)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_srelpos_small_patch16_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ shared relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=12, num_heads=6, qkv_bias=False, fc_norm=False,\n        rel_pos_dim=384, shared_rel_pos=True)\n    model = _create_vision_transformer_relpos(\n        'vit_srelpos_small_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_srelpos_medium_patch16_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ shared relative log-coord position, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, qkv_bias=False, fc_norm=False,\n        rel_pos_dim=512, shared_rel_pos=True)\n    model = _create_vision_transformer_relpos(\n        'vit_srelpos_medium_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_medium_patch16_cls_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-M/16) w/ relative log-coord position, class token present\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, qkv_bias=False, fc_norm=False,\n        rel_pos_dim=256, class_token=True, global_pool='token')\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_medium_patch16_cls_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_base_patch16_cls_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position, class token present\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, class_token=True, global_pool='token')\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch16_cls_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_base_patch16_clsgap_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position, class token present\n    NOTE this config is a bit of a mistake, class token was enabled but global avg-pool w/ fc-norm was not disabled\n    Leaving here for comparisons w/ a future re-train as it performs quite well.\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, fc_norm=True, class_token=True)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch16_clsgap_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_small_patch16_rpn_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position and residual post-norm, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=12, num_heads=6, qkv_bias=False, block_fn=ResPostRelPosBlock)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_small_patch16_rpn_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_medium_patch16_rpn_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position and residual post-norm, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=12, num_heads=8, qkv_bias=False, block_fn=ResPostRelPosBlock)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_medium_patch16_rpn_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vit_relpos_base_patch16_rpn_224(pretrained=False, **kwargs) -> VisionTransformerRelPos:\n    \"\"\" ViT-Base (ViT-B/16) w/ relative log-coord position and residual post-norm, no class token\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, block_fn=ResPostRelPosBlock)\n    model = _create_vision_transformer_relpos(\n        'vit_relpos_base_patch16_rpn_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/vision_transformer_sam.py",
    "content": "\"\"\" Vision Transformer (ViT) in PyTorch\n\nA PyTorch implement of Vision Transformers as described in:\n\n'Exploring Plain Vision Transformer Backbones for Object Detection'\n    - https://arxiv.org/abs/2203.16527\n\n'Segment Anything Model (SAM)'\n    - https://github.com/facebookresearch/segment-anything/\n\n\"\"\"\nimport logging\nfrom functools import partial\nfrom typing import Callable, List, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import (\n    PatchEmbed,\n    Mlp,\n    DropPath,\n    calculate_drop_path_rates,\n    PatchDropout,\n    LayerNorm2d,\n    LayerScale,\n    ClassifierHead,\n    NormMlpClassifierHead,\n    Format,\n    resample_abs_pos_embed_nhwc,\n    RotaryEmbeddingCat,\n    apply_rot_embed_cat,\n    to_2tuple,\n    use_fused_attn,\n)\nfrom torch.jit import Final\n\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_function\nfrom ._manipulate import checkpoint, checkpoint_seq\nfrom ._registry import generate_default_cfgs, register_model\n\n# model_registry will add each entrypoint fn to this\n__all__ = ['VisionTransformerSAM']\n\n\n_logger = logging.getLogger(__name__)\n\n\ndef get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor:\n    \"\"\"\n    Get relative positional embeddings according to the relative positions of\n        query and key sizes.\n    Args:\n        q_size (int): size of query q.\n        k_size (int): size of key k.\n        rel_pos (Tensor): relative position embeddings (L, C).\n\n    Returns:\n        Extracted positional embeddings according to relative positions.\n    \"\"\"\n    max_rel_dist = int(2 * max(q_size, k_size) - 1)\n    # Interpolate rel pos if needed.\n    if rel_pos.shape[0] != max_rel_dist:\n        # Interpolate rel pos.\n        rel_pos_resized = F.interpolate(\n            rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1),\n            size=max_rel_dist,\n            mode=\"linear\",\n        )\n        rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0)\n    else:\n        rel_pos_resized = rel_pos\n\n    # Scale the coords with short length if shapes for q and k are different.\n    q_coords = torch.arange(q_size, dtype=torch.float32)[:, None] * max(k_size / q_size, 1.0)\n    k_coords = torch.arange(k_size, dtype=torch.float32)[None, :] * max(q_size / k_size, 1.0)\n    relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0)\n\n    return rel_pos_resized[relative_coords.long()]\n\nregister_notrace_function(get_rel_pos)\n\n\ndef get_decomposed_rel_pos_bias(\n        q: torch.Tensor,\n        rel_pos_h: torch.Tensor,\n        rel_pos_w: torch.Tensor,\n        q_size: Tuple[int, int],\n        k_size: Tuple[int, int],\n) -> torch.Tensor:\n    \"\"\"\n    Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`.\n    https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py\n    Args:\n        q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C).\n        rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis.\n        rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis.\n        q_size (Tuple): spatial sequence size of query q with (q_h, q_w).\n        k_size (Tuple): spatial sequence size of key k with (k_h, k_w).\n\n    Returns:\n        bias (Tensor): attention bias to add to attention map\n    \"\"\"\n    q_h, q_w = q_size\n    k_h, k_w = k_size\n    Rh = get_rel_pos(q_h, k_h, rel_pos_h)\n    Rw = get_rel_pos(q_w, k_w, rel_pos_w)\n\n    B, _, dim = q.shape\n    r_q = q.reshape(B, q_h, q_w, dim)\n    rel_h = torch.einsum(\"bhwc,hkc->bhwk\", r_q, Rh)\n    rel_w = torch.einsum(\"bhwc,wkc->bhwk\", r_q, Rw)\n\n    attn_bias = rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :]\n    return attn_bias.reshape(-1, q_h * q_w, k_h * k_w)\n\n\nclass Attention(nn.Module):\n    fused_attn: Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            use_rel_pos: bool = False,\n            input_size: Optional[Tuple[int, int]] = None,\n            rope: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert dim % num_heads == 0, 'dim should be divisible by num_heads'\n        self.num_heads = num_heads\n        self.head_dim = dim // num_heads\n        self.scale = self.head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.q_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.k_norm = norm_layer(self.head_dim, **dd) if qk_norm else nn.Identity()\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n        self.use_rel_pos = use_rel_pos\n        if self.use_rel_pos:\n            assert rope is None\n            assert (\n                input_size is not None\n            ), \"Input size must be provided if using relative positional encoding.\"\n            # initialize relative positional embeddings\n            self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, self.head_dim, **dd))\n            self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, self.head_dim, **dd))\n        self.rope = rope\n\n    def forward(self, x):\n        B, H, W, _ = x.shape\n        N = H * W\n        x = x.reshape(B, N, -1)\n        qkv = self.qkv(x).view(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)\n        # qkv with shape (3, B, nHead, H * W, C)\n        q, k, v = qkv.reshape(3, B * self.num_heads, N, -1).unbind(0)\n        # q, k, v with shape (B * nHead, H * W, C)\n        q, k = self.q_norm(q), self.k_norm(k)\n\n        if self.use_rel_pos:\n            attn_bias = get_decomposed_rel_pos_bias(q, self.rel_pos_h, self.rel_pos_w, (H, W), (H, W))\n        else:\n            attn_bias = None\n            if self.rope is not None:\n                rope = self.rope.get_embed()\n                q = apply_rot_embed_cat(q, rope).type_as(v)\n                k = apply_rot_embed_cat(k, rope).type_as(v)\n\n        if self.fused_attn:\n            x = torch.nn.functional.scaled_dot_product_attention(\n                q, k, v,\n                attn_mask=attn_bias,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            if attn_bias is not None:\n                attn = attn + attn_bias\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.view(B, self.num_heads, N, -1).transpose(1, 2).reshape(B, N, -1)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        x = x.view(B, H, W, -1)\n        return x\n\n\nclass Block(nn.Module):\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            init_values: Optional[float] = None,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            mlp_layer: Type[nn.Module] = Mlp,\n            use_rel_pos: bool = False,\n            window_size: int = 0,\n            input_size=None,\n            rope=None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.window_size = window_size\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            qk_norm=qk_norm,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            norm_layer=norm_layer,\n            use_rel_pos=use_rel_pos,\n            input_size=input_size if window_size == 0 else (window_size, window_size),\n            rope=rope,\n            **dd,\n        )\n        self.ls1 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = mlp_layer(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.ls2 = LayerScale(dim, init_values=init_values, **dd) if init_values else nn.Identity()\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x):\n        B, H, W, _ = x.shape\n\n        shortcut = x\n        x = self.norm1(x)\n        # Window partition\n        pad_hw: Optional[Tuple[int, int]] = None\n        if self.window_size > 0:\n            x, pad_hw = window_partition(x, self.window_size)\n\n        x = self.drop_path1(self.ls1(self.attn(x)))\n\n        # Reverse window partition\n        if self.window_size > 0:\n            x = window_unpartition(x, self.window_size, (H, W), pad_hw)\n\n        x = shortcut + x\n\n        x = x.reshape(B, H * W, -1)  # MLP is faster for N, L, C tensor\n        x = x + self.drop_path2(self.ls2(self.mlp(self.norm2(x))))\n        x = x.reshape(B, H, W, -1)\n\n        return x\n\n\ndef window_partition(x: torch.Tensor, window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]:\n    \"\"\"\n    Partition into non-overlapping windows with padding if needed.\n    Args:\n        x (tensor): input tokens with [B, H, W, C].\n        window_size (int): window size.\n\n    Returns:\n        windows: windows after partition with [B * num_windows, window_size, window_size, C].\n        (Hp, Wp): padded height and width before partition\n    \"\"\"\n    B, H, W, C = x.shape\n\n    pad_h = (window_size - H % window_size) % window_size\n    pad_w = (window_size - W % window_size) % window_size\n    x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h))\n    Hp, Wp = H + pad_h, W + pad_w\n\n    x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C)\n    windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)\n    return windows, (Hp, Wp)\n\n\ndef window_unpartition(\n    windows: torch.Tensor, window_size: int, hw: Tuple[int, int], pad_hw: Optional[Tuple[int, int]] = None,\n) -> torch.Tensor:\n    \"\"\"\n    Window unpartition into original sequences and removing padding.\n    Args:\n        windows (tensor): input tokens with [B * num_windows, window_size, window_size, C].\n        window_size (int): window size.\n        pad_hw (Tuple): padded height and width (Hp, Wp).\n        hw (Tuple): original height and width (H, W) before padding.\n\n    Returns:\n        x: unpartitioned sequences with [B, H, W, C].\n    \"\"\"\n    Hp, Wp = pad_hw if pad_hw is not None else hw\n    H, W = hw\n    B = windows.shape[0] // (Hp * Wp // window_size // window_size)\n    x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1)\n    x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1)\n    x = x[:, :H, :W, :].contiguous()\n    return x\n\n\nclass VisionTransformerSAM(nn.Module):\n    \"\"\" Vision Transformer for Segment-Anything Model(SAM)\n\n    A PyTorch impl of : `Exploring Plain Vision Transformer Backbones for Object Detection` or `Segment Anything Model (SAM)`\n        - https://arxiv.org/abs/2010.11929\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 1024,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 768,\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            qk_norm: bool = False,\n            init_values: Optional[float] = None,\n            pre_norm: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            patch_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            weight_init: str = '',\n            embed_layer: Type[nn.Module] = partial(PatchEmbed, output_fmt=Format.NHWC, strict_img_size=False),\n            norm_layer: Optional[Type[nn.Module]] = nn.LayerNorm,\n            act_layer: Optional[Type[nn.Module]] = nn.GELU,\n            block_fn: Type[nn.Module] = Block,\n            mlp_layer: Type[nn.Module] = Mlp,\n            use_abs_pos: bool = True,\n            use_rel_pos: bool = False,\n            use_rope: bool = False,\n            window_size: int = 14,\n            global_attn_indexes: Tuple[int, ...] = (),\n            neck_chans: int = 256,\n            global_pool: str = 'avg',\n            head_hidden_size: Optional[int] = None,\n            ref_feat_shape: Optional[Tuple[Tuple[int, int], Tuple[int, int]]] = None,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            img_size: Input image size.\n            patch_size: Patch size.\n            in_chans: Number of image input channels.\n            num_classes: Number of classes for classification head.\n            global_pool: Type of global pooling for final sequence (default: 'token').\n            embed_dim: Transformer embedding dimension.\n            depth: Depth of transformer.\n            num_heads: Number of attention heads.\n            mlp_ratio: Ratio of mlp hidden dim to embedding dim.\n            qkv_bias: Enable bias for qkv projections if True.\n            init_values: Layer-scale init values (layer-scale enabled if not None).\n            drop_rate: Head dropout rate.\n            pos_drop_rate: Position embedding dropout rate.\n            attn_drop_rate: Attention dropout rate.\n            drop_path_rate: Stochastic depth rate.\n            weight_init: Weight initialization scheme.\n            embed_layer: Patch embedding layer.\n            norm_layer: Normalization layer.\n            act_layer: MLP activation layer.\n            block_fn: Transformer block layer.\n            use_abs_pos: If True, use absolute positional embeddings.\n            use_rel_pos: If True, add relative positional embeddings to the attention map.\n            use_rope: If True, add rotary position embeddings to q/k in attention block.\n            window_size: Window size for window attention blocks. If 0, not use window attention.\n            global_attn_indexes: Indexes for blocks using global attention. Used when window_size > 0.\n            global_pool: Global pooling type.\n            head_hidden_size: If set, use NormMlpHead\n            ref_feat_shape: Tuple of reference feature shapes for ROPE, (global, local)\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6)\n        act_layer = act_layer or nn.GELU\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim  # for consistency with other models\n        self.grad_checkpointing = False\n\n        self.patch_embed = embed_layer(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            bias=not pre_norm,  # disable bias if pre-norm is used\n            **dd,\n        )\n        grid_size = self.patch_embed.grid_size\n        r = self.patch_embed.feat_ratio() if hasattr(self.patch_embed, 'feat_ratio') else patch_size\n\n        if use_abs_pos:\n            # Initialize absolute positional embedding with pretrain image size.\n            self.pos_embed = nn.Parameter(torch.zeros(1, grid_size[0], grid_size[1], embed_dim, **dd))\n        else:\n            self.pos_embed = None\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n        if patch_drop_rate > 0:\n            self.patch_drop = PatchDropout(\n                patch_drop_rate,\n                num_prefix_tokens=0,\n            )\n        else:\n            self.patch_drop = nn.Identity()\n        self.norm_pre = norm_layer(embed_dim, **dd) if pre_norm else nn.Identity()\n\n        if use_rope:\n            assert not use_rel_pos, \"ROPE and relative pos embeddings should not be enabled at same time\"\n            if ref_feat_shape is not None:\n                assert len(ref_feat_shape) == 2\n                ref_feat_shape_global = to_2tuple(ref_feat_shape[0])\n                ref_feat_shape_window = to_2tuple(ref_feat_shape[1])\n            else:\n                ref_feat_shape_global = ref_feat_shape_window = None\n            self.rope_global = RotaryEmbeddingCat(\n                embed_dim // num_heads,\n                in_pixels=False,\n                feat_shape=grid_size,\n                ref_feat_shape=ref_feat_shape_global,\n            )\n            self.rope_window = RotaryEmbeddingCat(\n                embed_dim // num_heads,\n                in_pixels=False,\n                feat_shape=to_2tuple(window_size),\n                ref_feat_shape=ref_feat_shape_window,\n            )\n        else:\n            self.rope_global = None\n            self.rope_window = None\n\n        # stochastic depth decay rule\n        dpr = calculate_drop_path_rates(drop_path_rate, depth)\n        self.blocks = nn.Sequential(*[\n            block_fn(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                qk_norm=qk_norm,\n                init_values=init_values,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=dpr[i],\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                mlp_layer=mlp_layer,\n                use_rel_pos=use_rel_pos,\n                window_size=window_size if i not in global_attn_indexes else 0,\n                input_size=grid_size,\n                rope=self.rope_window if i not in global_attn_indexes else self.rope_global,\n                **dd,\n            )\n            for i in range(depth)])\n        self.feature_info = [\n            dict(module=f'blocks.{i}', num_chs=embed_dim, reduction=r) for i in range(depth)]\n\n        if neck_chans:\n            self.neck = nn.Sequential(\n                nn.Conv2d(\n                    embed_dim,\n                    neck_chans,\n                    kernel_size=1,\n                    bias=False,\n                    **dd,\n                ),\n                LayerNorm2d(neck_chans, **dd),\n                nn.Conv2d(\n                    neck_chans,\n                    neck_chans,\n                    kernel_size=3,\n                    padding=1,\n                    bias=False,\n                    **dd,\n                ),\n                LayerNorm2d(neck_chans, **dd),\n            )\n            self.num_features = neck_chans\n        else:\n            if head_hidden_size:\n                self.neck = nn.Identity()\n            else:\n                # should have a final norm with standard ClassifierHead\n                self.neck = LayerNorm2d(embed_dim, **dd)\n            neck_chans = embed_dim\n\n        # Classifier Head\n        if head_hidden_size:\n            self.head = NormMlpClassifierHead(\n                neck_chans,\n                num_classes,\n                hidden_size=head_hidden_size,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n        else:\n            self.head = ClassifierHead(\n                neck_chans,\n                num_classes,\n                pool_type=global_pool,\n                drop_rate=drop_rate,\n                **dd,\n            )\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'pos_embed', 'dist_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^pos_embed|patch_embed',  # stem and embed\n            blocks=[(r'^blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt == 'NCHW', 'Output shape for ViT-SAM must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass, collect intermediates\n        x = self.patch_embed(x)\n        if self.pos_embed is not None:\n            # dynamically resize abs pos embedding if needed\n            x = x + resample_abs_pos_embed_nhwc(self.pos_embed, x.shape[1:3])\n        x = self.pos_drop(x)\n        x = self.patch_drop(x)\n        x = self.norm_pre(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n            if i in take_indices:\n                # make output BCHW\n                if norm:\n                    # norm is intertwined with neck convs so apply both, changes the dim\n                    # FIXME only apply to final? Need experiments\n                    intermediates.append(self.neck(x.permute(0, 3, 1, 2)))\n                else:\n                    intermediates.append(x.permute(0, 3, 1, 2))\n\n        if intermediates_only:\n            return intermediates\n\n        x = self.neck(x.permute(0, 3, 1, 2))\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Optional[Union[int, List[int]]] = None,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            # neck is being treated as equivalent to final norm here\n            self.neck = nn.Identity()\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.patch_embed(x)\n        if self.pos_embed is not None:\n            # dynamically resize abs pos embedding if needed\n            x = x + resample_abs_pos_embed_nhwc(self.pos_embed, x.shape[1:3])\n        x = self.pos_drop(x)\n        x = self.patch_drop(x)\n        x = self.norm_pre(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        x = self.neck(x.permute(0, 3, 1, 2))\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=True) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(\n        state_dict,\n        model,\n):\n    \"\"\" Remap SAM checkpoints -> timm \"\"\"\n    sam_checkpoint = 'image_encoder.patch_embed.proj.weight' in state_dict\n    out_dict = {}\n    for k, v in state_dict.items():\n        if k.startswith('image_encoder.'):\n            k = k[14:]\n            k = k.replace('mlp.lin', 'mlp.fc')\n        else:\n            if sam_checkpoint:\n                continue\n        out_dict[k] = v\n    return out_dict\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 1024, 1024), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'patch_embed.proj', 'classifier': 'head.fc',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n\n    # Segment-Anything Model (SAM) pretrained - https://github.com/facebookresearch/segment-anything (no classifier head, for fine-tune/features only)\n    'samvit_base_patch16.sa1b': _cfg(\n        url='https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 1024, 1024), crop_pct=1.0),\n    'samvit_large_patch16.sa1b': _cfg(\n        url='https://dl.fbaipublicfiles.com/segment_anything/sam_vit_l_0b3195.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 1024, 1024), crop_pct=1.0),\n    'samvit_huge_patch16.sa1b': _cfg(\n        url='https://dl.fbaipublicfiles.com/segment_anything/sam_vit_h_4b8939.pth',\n        hf_hub_id='timm/',\n        license='apache-2.0',\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0,\n        input_size=(3, 1024, 1024), crop_pct=1.0),\n\n    'samvit_base_patch16_224': _cfg(\n        mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=1000,\n        input_size=(3, 224, 224), crop_pct=0.9),\n})\n\n\ndef _create_vision_transformer(variant, pretrained=False, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    return build_model_with_cfg(\n        VisionTransformerSAM,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\n@register_model\ndef samvit_base_patch16(pretrained=False, **kwargs) -> VisionTransformerSAM:\n    \"\"\" ViT-B/16 for Segment-Anything\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, global_attn_indexes=[2, 5, 8, 11],\n        window_size=14, use_rel_pos=True, img_size=1024,\n    )\n    model = _create_vision_transformer(\n        'samvit_base_patch16', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef samvit_large_patch16(pretrained=False, **kwargs) -> VisionTransformerSAM:\n    \"\"\" ViT-L/16 for Segment-Anything\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1024, depth=24, num_heads=16, global_attn_indexes=[5, 11, 17, 23],\n        window_size=14, use_rel_pos=True, img_size=1024,\n    )\n    model = _create_vision_transformer(\n        'samvit_large_patch16', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef samvit_huge_patch16(pretrained=False, **kwargs) -> VisionTransformerSAM:\n    \"\"\" ViT-H/16 for Segment-Anything\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=1280, depth=32, num_heads=16, global_attn_indexes=[7, 15, 23, 31],\n        window_size=14, use_rel_pos=True, img_size=1024,\n    )\n    model = _create_vision_transformer(\n        'samvit_huge_patch16', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef samvit_base_patch16_224(pretrained=False, **kwargs) -> VisionTransformerSAM:\n    \"\"\" ViT-B/16 based on samvit arch\n    \"\"\"\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=12, num_heads=12, global_attn_indexes=[2, 5, 8, 11],\n        window_size=14, use_rel_pos=True, use_abs_pos=False, img_size=224, neck_chans=None,\n    )\n    model = _create_vision_transformer(\n        'samvit_base_patch16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n"
  },
  {
    "path": "timm/models/vitamin.py",
    "content": "\"\"\" ViTamin\n\nPaper: Designing Scalable Vison Models in the Vision-Language Era\nA family of model weights on Huggingface: https://huggingface.co/collections/jienengchen/vitamin-family-661048126b72debdaca060bf\n\n@inproceedings{chen2024vitamin,\n  title={ViTamin: Designing Scalable Vision Models in the Vision-language Era},\n  author={Chen, Jieneng and Yu, Qihang and Shen, Xiaohui and Yuille, Alan and Chen, Liang-Chieh},\n  booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},\n  year={2024}\n}\n\nBased on Apache 2.0 licensed code at https://github.com/ViTamin/ViTamin\n\nModifications and timm support by Jieneng Chen 2024\n\nReference:\nhttps://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer.py\nhttps://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer_hybrid.py\n\"\"\"\n\nimport math\nfrom dataclasses import dataclass, field\nfrom functools import partial\nfrom typing import Optional, Union, Tuple\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import OPENAI_CLIP_MEAN, OPENAI_CLIP_STD\nfrom timm.layers import create_act_layer, get_norm_layer, get_norm_act_layer, create_conv2d, \\\n    make_divisible, DropPath, HybridEmbed\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import named_apply, checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\nfrom .vision_transformer import VisionTransformer, checkpoint_filter_fn\n\n\n@dataclass\nclass VitConvCfg:\n    expand_ratio: float = 4.0\n    expand_output: bool = True  # calculate expansion channels from output (vs input chs)\n    kernel_size: int = 3\n    group_size: int = 1  # 1 == depthwise\n    pre_norm_act: bool = False  # activation after pre-norm\n    stride_mode: str = 'dw'  # stride done via one of 'pool', '1x1', 'dw'\n    pool_type: str = 'avg2'\n    downsample_pool_type: str = 'avg2'\n    act_layer: str = 'gelu' # stem & stage 1234\n    norm_layer: str = ''\n    norm_eps: float = 1e-5\n    down_shortcut: Optional[bool] = True\n    mlp: str = 'mlp'\n\n\n@dataclass\nclass VitCfg:\n    embed_dim: Tuple[Union[int, Tuple[int, ...]], ...] = (96, 192, 384, 768)\n    depths: Tuple[Union[int, Tuple[int, ...]], ...] = (2, 3, 5, 2)\n    stem_width: int = 64\n    conv_cfg: VitConvCfg = field(default_factory=VitConvCfg)\n    head_type: str = \"\"\n\n\ndef _init_conv(module, name, scheme=''):\n    if isinstance(module, nn.Conv2d):\n        fan_out = module.kernel_size[0] * module.kernel_size[1] * module.out_channels\n        fan_out //= module.groups\n        nn.init.normal_(module.weight, 0, math.sqrt(2.0 / fan_out))\n        if module.bias is not None:\n            nn.init.zeros_(module.bias)\n\n\nclass Stem(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            act_layer: str = 'gelu',\n            norm_layer: str = 'layernorm2d',\n            norm_eps: float = 1e-6,\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = partial(get_norm_act_layer(norm_layer, act_layer), eps=norm_eps)\n        self.out_chs = out_chs\n\n        self.conv1 = create_conv2d(in_chs, out_chs, 3, stride=2, bias=bias, **dd)\n        self.norm1 = norm_act_layer(out_chs, **dd)\n        self.conv2 = create_conv2d(out_chs, out_chs, 3, stride=1, bias=bias, **dd)\n\n        named_apply(_init_conv, self)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.norm1(x)\n        x = self.conv2(x)\n        return x\n\n\nclass Downsample2d(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            dim_out: int,\n            pool_type: str = 'avg2',\n            bias: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.pool = nn.AvgPool2d(kernel_size=3, stride=2, padding=1, count_include_pad=False)\n\n        if dim != dim_out:\n            self.expand = nn.Conv2d(dim, dim_out, 1, bias=bias, **dd) # 1x1 conv\n        else:\n            self.expand = nn.Identity()\n\n    def forward(self, x):\n        x = self.pool(x)  # spatial downsample\n        x = self.expand(x)  # expand chs\n        return x\n\n\nclass StridedConv(nn.Module):\n    \"\"\" downsample 2d as well\n    \"\"\"\n    def __init__(\n            self,\n            kernel_size: int = 3,\n            stride: int = 2,\n            padding: int = 1,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = partial(get_norm_layer('layernorm2d'), eps=1e-6)\n\n        self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=kernel_size, stride=stride, padding=padding, **dd)\n        self.norm = norm_layer(in_chans, **dd) # affine over C\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = self.proj(x)\n        return x\n\n\nclass MbConvLNBlock(nn.Module):\n    \"\"\" Pre-Norm Conv Block - 1x1 - kxk - 1x1, w/ inverted bottleneck (expand)\n    \"\"\"\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            drop_path: float = 0.,\n            kernel_size: int = 3,\n            norm_layer: str = 'layernorm2d',\n            norm_eps: float = 1e-6,\n            act_layer: str = 'gelu',\n            expand_ratio: float = 4.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.stride, self.in_chs, self.out_chs = stride, in_chs, out_chs\n        mid_chs = make_divisible(out_chs * expand_ratio)\n        prenorm_act_layer = partial(get_norm_act_layer(norm_layer, act_layer), eps=norm_eps)\n\n        if stride == 2:\n            self.shortcut = Downsample2d(in_chs, out_chs, pool_type='avg', bias=True, **dd)\n        elif in_chs != out_chs:\n            self.shortcut = nn.Conv2d(in_chs, out_chs, 1, bias=True, **dd)\n        else:\n            self.shortcut = nn.Identity()\n\n        self.pre_norm = prenorm_act_layer(in_chs, apply_act=False, **dd)\n        self.down = nn.Identity()\n        self.conv1_1x1 = create_conv2d(in_chs, mid_chs, 1, stride=1, bias=True, **dd)\n        self.act1 = create_act_layer(act_layer, inplace=True)\n        self.conv2_kxk = create_conv2d(\n            mid_chs, mid_chs, kernel_size, stride=stride, dilation=1, groups=mid_chs, bias=True, **dd)\n        self.act2 = create_act_layer(act_layer, inplace=True)\n        self.conv3_1x1 = create_conv2d(mid_chs, out_chs, 1, bias=True, **dd)\n        self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n\n    def init_weights(self, scheme=''):\n        named_apply(partial(_init_conv, scheme=scheme), self)\n\n    def forward(self, x):\n        shortcut = self.shortcut(x)\n\n        x = self.pre_norm(x)\n        x = self.down(x) # nn.Identity()\n\n        # 1x1 expansion conv & act\n        x = self.conv1_1x1(x)\n        x = self.act1(x)\n\n        # (strided) depthwise 3x3 conv & act\n        x = self.conv2_kxk(x)\n        x = self.act2(x)\n\n        # 1x1 linear projection to output width\n        x = self.conv3_1x1(x)\n        x = self.drop_path(x) + shortcut\n\n        return x\n\n\nclass MbConvStages(nn.Module):\n    \"\"\" MobileConv for stage 1 and stage 2 of ViTamin\n    \"\"\"\n    def __init__(\n            self,\n            cfg: VitCfg,\n            img_size: Union[int, Tuple[int, int]] = 224, # place holder\n            in_chans: int = 3,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        self.stem = Stem(\n            in_chs=in_chans,\n            out_chs=cfg.stem_width,\n            **dd,\n        )\n\n        stages = []\n        self.num_stages = len(cfg.embed_dim)\n        for s, dim in enumerate(cfg.embed_dim[:2]): # stage\n            stage_in_chs = cfg.embed_dim[s-1] if s>0 else cfg.stem_width\n            blocks = [\n                MbConvLNBlock(\n                    in_chs = stage_in_chs if d==0 else dim,\n                    out_chs = dim,\n                    stride = 2 if d == 0 else 1,\n                    **dd,\n                )\n                for d in range(cfg.depths[s])\n            ]\n            stages += [nn.Sequential(*blocks)]\n        self.stages = nn.Sequential(*stages)\n\n        self.pool = StridedConv(\n            stride=2,\n            in_chans=cfg.embed_dim[1],\n            embed_dim=cfg.embed_dim[2],\n            **dd,\n        )\n\n    def forward(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.stages, x)\n        else:\n            x = self.stages(x)\n        x = self.pool(x)\n        return x\n\n\nclass GeGluMlp(nn.Module):\n    def __init__(\n            self,\n            in_features: int,\n            hidden_features: int,\n            act_layer: str = 'gelu',\n            norm_layer: Optional[str] = None,\n            bias: bool = True,\n            drop: float = 0.0,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_layer = partial(get_norm_layer(norm_layer or 'layernorm'), eps=1e-6)\n\n        self.norm = norm_layer(in_features, **dd)\n        self.w0 = nn.Linear(in_features, hidden_features, bias=bias, **dd)\n        self.act = create_act_layer(act_layer)\n        self.w1 = nn.Linear(in_features, hidden_features, bias=bias, **dd)\n        self.w2 = nn.Linear(hidden_features, in_features, bias=bias, **dd)\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = self.act(self.w0(x)) * self.w1(x)\n        x = self.w2(x)\n        return x\n\n\ndef _create_vitamin(variant, pretrained=False, embed_cfg=None, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    assert embed_cfg is not None\n    dd = {'device': kwargs.get('device', None), 'dtype': kwargs.get('dtype', None)}\n    backbone = MbConvStages(cfg=embed_cfg, in_chans=kwargs.get('in_chans', 3), **dd)\n    kwargs['embed_layer'] = partial(HybridEmbed, backbone=backbone, proj=False)\n    kwargs.setdefault('patch_size', 1)  # default patch size for hybrid models if not set\n\n    return build_model_with_cfg(\n        VisionTransformer,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': OPENAI_CLIP_MEAN, 'std': OPENAI_CLIP_STD,\n        'first_conv': 'patch_embed.backbone.stem.conv1',\n        'classifier': 'head', 'license': 'mit',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'vitamin_small_224.datacomp1b_clip_ltt': _cfg(\n        hf_hub_id='jienengchen/ViTamin-S-LTT', num_classes=768),\n    'vitamin_small_224.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-S', num_classes=384),\n    'vitamin_base_224.datacomp1b_clip_ltt': _cfg(\n        hf_hub_id='jienengchen/ViTamin-B-LTT', num_classes=768),\n    'vitamin_base_224.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-B', num_classes=768),\n    'vitamin_large_224.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L-224px', num_classes=768),\n    'vitamin_large_256.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L-256px', num_classes=768,\n        input_size=(3, 256, 256), crop_pct=1.0),\n    'vitamin_large_336.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L-336px', num_classes=768,\n        input_size=(3, 336, 336), crop_pct=1.0),\n    'vitamin_large_384.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L-384px', num_classes=768,\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vitamin_large2_224.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L2-224px', num_classes=1024),\n    'vitamin_large2_256.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L2-256px', num_classes=1024,\n        input_size=(3, 256, 256), crop_pct=1.0),\n    'vitamin_large2_336.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L2-336px', num_classes=1024,\n        input_size=(3, 336, 336), crop_pct=1.0),\n    'vitamin_large2_384.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-L2-384px', num_classes=1024,\n        input_size=(3, 384, 384), crop_pct=1.0),\n    'vitamin_xlarge_256.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-XL-256px', num_classes=1152,\n        input_size=(3, 256, 256), crop_pct=1.0),\n    'vitamin_xlarge_336.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-XL-336px', num_classes=1152,\n        input_size=(3, 336, 336), crop_pct=1.0),\n    'vitamin_xlarge_384.datacomp1b_clip': _cfg(\n        hf_hub_id='jienengchen/ViTamin-XL-384px', num_classes=1152,\n        input_size=(3, 384, 384), crop_pct=1.0),\n})\n\n\n@register_model\ndef vitamin_small_224(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(64, 128, 384),\n        depths=(2, 4, 1),\n        stem_width=64,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        embed_dim=384, depth=14, num_heads=6, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg\n    )\n    model = _create_vitamin('vitamin_small_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_base_224(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(128, 256, 768),\n        depths=(2, 4, 1),\n        stem_width=128,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        embed_dim=768, depth=14, num_heads=12, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_base_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large_224(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg,\n    )\n    model = _create_vitamin('vitamin_large_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large_256(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=256, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_large_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large_336(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=336, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg\n    )\n    model = _create_vitamin('vitamin_large_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large_384(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=384, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_large_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large2_224(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg,\n    )\n    model = _create_vitamin('vitamin_large2_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large2_256(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=256, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_large2_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large2_336(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=336, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg\n    )\n    model = _create_vitamin('vitamin_large2_336', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_large2_384(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(160, 320, 1024),\n        depths=(2, 4, 1),\n        stem_width=160,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=384, embed_dim=1024, depth=31, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_large2_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_xlarge_256(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg=VitCfg(\n        embed_dim=(192, 384, 1152),\n        depths=(2, 4, 1),\n        stem_width=192,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=256, embed_dim=1152, depth=32, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', pos_embed='none', embed_cfg=embed_cfg)\n    model = _create_vitamin(\n        'vitamin_xlarge_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_xlarge_336(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(192, 384, 1152),\n        depths=(2, 4, 1),\n        stem_width=192,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=336, embed_dim=1152, depth=32, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', pos_embed='none', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_xlarge_256', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef vitamin_xlarge_384(pretrained=False, **kwargs) -> VisionTransformer:\n    embed_cfg = VitCfg(\n        embed_dim=(192, 384, 1152),\n        depths=(2, 4, 1),\n        stem_width=192,\n        conv_cfg=VitConvCfg(\n            norm_layer='layernorm2d',\n            norm_eps=1e-6,\n        ),\n        head_type='1d',\n    )\n    model_args = dict(\n        img_size=384, embed_dim=1152, depth=32, num_heads=16, mlp_layer=GeGluMlp, mlp_ratio=2.,\n        class_token=False, global_pool='avg', pos_embed='none', embed_cfg=embed_cfg)\n    model = _create_vitamin('vitamin_xlarge_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n"
  },
  {
    "path": "timm/models/volo.py",
    "content": "\"\"\" Vision OutLOoker (VOLO) implementation\n\nPaper: `VOLO: Vision Outlooker for Visual Recognition` - https://arxiv.org/abs/2106.13112\n\nCode adapted from official impl at https://github.com/sail-sg/volo, original copyright in comment below\n\nModifications and additions for timm by / Copyright 2022, Ross Wightman\n\"\"\"\n# Copyright 2021 Sea Limited.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, Mlp, to_2tuple, to_ntuple, trunc_normal_, use_fused_attn\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['VOLO']  # model_registry will add each entrypoint fn to this\n\n\nclass OutlookAttention(nn.Module):\n    \"\"\"Outlook attention mechanism for VOLO models.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            kernel_size: int = 3,\n            padding: int = 1,\n            stride: int = 1,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize OutlookAttention.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            kernel_size: Kernel size for attention computation.\n            padding: Padding for attention computation.\n            stride: Stride for attention computation.\n            qkv_bias: Whether to use bias in linear layers.\n            attn_drop: Attention dropout rate.\n            proj_drop: Projection dropout rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        head_dim = dim // num_heads\n        self.num_heads = num_heads\n        self.kernel_size = kernel_size\n        self.padding = padding\n        self.stride = stride\n        self.scale = head_dim ** -0.5\n\n        self.v = nn.Linear(dim, dim, bias=qkv_bias, **dd)\n        self.attn = nn.Linear(dim, kernel_size ** 4 * num_heads, **dd)\n\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n        self.unfold = nn.Unfold(kernel_size=kernel_size, padding=padding, stride=stride)\n        self.pool = nn.AvgPool2d(kernel_size=stride, stride=stride, ceil_mode=True)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Output tensor of shape (B, H, W, C).\n        \"\"\"\n        B, H, W, C = x.shape\n\n        v = self.v(x).permute(0, 3, 1, 2)  # B, C, H, W\n\n        h, w = math.ceil(H / self.stride), math.ceil(W / self.stride)\n        v = self.unfold(v).reshape(\n            B, self.num_heads, C // self.num_heads,\n            self.kernel_size * self.kernel_size, h * w).permute(0, 1, 4, 3, 2)  # B,H,N,kxk,C/H\n\n        attn = self.pool(x.permute(0, 3, 1, 2)).permute(0, 2, 3, 1)\n        attn = self.attn(attn).reshape(\n            B, h * w, self.num_heads, self.kernel_size * self.kernel_size,\n            self.kernel_size * self.kernel_size).permute(0, 2, 1, 3, 4)  # B,H,N,kxk,kxk\n        attn = attn * self.scale\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        x = (attn @ v).permute(0, 1, 4, 3, 2).reshape(B, C * self.kernel_size * self.kernel_size, h * w)\n        x = F.fold(x, output_size=(H, W), kernel_size=self.kernel_size, padding=self.padding, stride=self.stride)\n\n        x = self.proj(x.permute(0, 2, 3, 1))\n        x = self.proj_drop(x)\n\n        return x\n\n\nclass Outlooker(nn.Module):\n    \"\"\"Outlooker block that combines outlook attention with MLP.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            kernel_size: int,\n            padding: int,\n            stride: int = 1,\n            num_heads: int = 1,\n            mlp_ratio: float = 3.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            qkv_bias: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize Outlooker block.\n\n        Args:\n            dim: Input feature dimension.\n            kernel_size: Kernel size for outlook attention.\n            padding: Padding for outlook attention.\n            stride: Stride for outlook attention.\n            num_heads: Number of attention heads.\n            mlp_ratio: Ratio for MLP hidden dimension.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth drop rate.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer type.\n            qkv_bias: Whether to use bias in linear layers.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = OutlookAttention(\n            dim,\n            num_heads,\n            kernel_size=kernel_size,\n            padding=padding,\n            stride=stride,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = x + self.drop_path1(self.attn(self.norm1(x)))\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        return x\n\n\nclass Attention(nn.Module):\n    \"\"\"Multi-head self-attention module.\"\"\"\n    fused_attn: torch.jit.Final[bool]\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize Attention module.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            qkv_bias: Whether to use bias in QKV projection.\n            attn_drop: Attention dropout rate.\n            proj_drop: Projection dropout rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        head_dim = dim // num_heads\n        self.scale = head_dim ** -0.5\n        self.fused_attn = use_fused_attn()\n\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Output tensor of shape (B, H, W, C).\n        \"\"\"\n        B, H, W, C = x.shape\n\n        qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)\n        q, k, v = qkv.unbind(0)\n\n        if self.fused_attn:\n            x = F.scaled_dot_product_attention(\n                q, k, v,\n                dropout_p=self.attn_drop.p if self.training else 0.,\n            )\n        else:\n            q = q * self.scale\n            attn = q @ k.transpose(-2, -1)\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.transpose(1, 2).reshape(B, H, W, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n\n        return x\n\n\nclass Transformer(nn.Module):\n    \"\"\"Transformer block with multi-head self-attention and MLP.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize Transformer block.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            mlp_ratio: Ratio for MLP hidden dimension.\n            qkv_bias: Whether to use bias in QKV projection.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth drop rate.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer type.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, **dd)\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, **dd)\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor.\n\n        Returns:\n            Output tensor.\n        \"\"\"\n        x = x + self.drop_path1(self.attn(self.norm1(x)))\n        x = x + self.drop_path2(self.mlp(self.norm2(x)))\n        return x\n\n\nclass ClassAttention(nn.Module):\n    \"\"\"Class attention mechanism for class token interaction.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            head_dim: Optional[int] = None,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize ClassAttention.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            head_dim: Dimension per head. If None, computed as dim // num_heads.\n            qkv_bias: Whether to use bias in QKV projection.\n            attn_drop: Attention dropout rate.\n            proj_drop: Projection dropout rate.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        if head_dim is not None:\n            self.head_dim = head_dim\n        else:\n            head_dim = dim // num_heads\n            self.head_dim = head_dim\n        self.scale = head_dim ** -0.5\n\n        self.kv = nn.Linear(dim, self.head_dim * self.num_heads * 2, bias=qkv_bias, **dd)\n        self.q = nn.Linear(dim, self.head_dim * self.num_heads, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(self.head_dim * self.num_heads, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, N, C) where first token is class token.\n\n        Returns:\n            Class token output of shape (B, 1, C).\n        \"\"\"\n        B, N, C = x.shape\n\n        kv = self.kv(x).reshape(B, N, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)\n        k, v = kv.unbind(0)\n        q = self.q(x[:, :1, :]).reshape(B, self.num_heads, 1, self.head_dim) * self.scale\n\n        attn = q @ k.transpose(-2, -1)\n        attn = attn.softmax(dim=-1)\n        attn = self.attn_drop(attn)\n\n        cls_embed = (attn @ v).transpose(1, 2).reshape(B, 1, self.head_dim * self.num_heads)\n        cls_embed = self.proj(cls_embed)\n        cls_embed = self.proj_drop(cls_embed)\n        return cls_embed\n\n\nclass ClassBlock(nn.Module):\n    \"\"\"Class block that combines class attention with MLP.\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            head_dim: Optional[int] = None,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize ClassBlock.\n\n        Args:\n            dim: Input feature dimension.\n            num_heads: Number of attention heads.\n            head_dim: Dimension per head. If None, computed as dim // num_heads.\n            mlp_ratio: Ratio for MLP hidden dimension.\n            qkv_bias: Whether to use bias in QKV projection.\n            drop: Dropout rate.\n            attn_drop: Attention dropout rate.\n            drop_path: Stochastic depth drop rate.\n            act_layer: Activation layer type.\n            norm_layer: Normalization layer type.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = ClassAttention(\n            dim,\n            num_heads=num_heads,\n            head_dim=head_dim,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, N, C) where first token is class token.\n\n        Returns:\n            Output tensor with updated class token.\n        \"\"\"\n        cls_embed = x[:, :1]\n        cls_embed = cls_embed + self.drop_path1(self.attn(self.norm1(x)))\n        cls_embed = cls_embed + self.drop_path2(self.mlp(self.norm2(cls_embed)))\n        return torch.cat([cls_embed, x[:, 1:]], dim=1)\n\n\ndef get_block(block_type: str, **kwargs: Any) -> nn.Module:\n    \"\"\"Get block based on type.\n\n    Args:\n        block_type: Type of block ('ca' for ClassBlock).\n        **kwargs: Additional keyword arguments for block.\n\n    Returns:\n        The requested block module.\n    \"\"\"\n    if block_type == 'ca':\n        return ClassBlock(**kwargs)\n    else:\n        assert False, f'Invalid block type: {block_type}'\n\n\ndef rand_bbox(size: Tuple[int, ...], lam: float, scale: int = 1) -> Tuple[int, int, int, int]:\n    \"\"\"Get random bounding box for token labeling.\n\n    Reference: https://github.com/zihangJiang/TokenLabeling\n\n    Args:\n        size: Input tensor size tuple.\n        lam: Lambda parameter for cutmix.\n        scale: Scaling factor.\n\n    Returns:\n        Bounding box coordinates (bbx1, bby1, bbx2, bby2).\n    \"\"\"\n    W = size[1] // scale\n    H = size[2] // scale\n    W_t = torch.tensor(W, dtype=torch.float32)\n    H_t = torch.tensor(H, dtype=torch.float32)\n    cut_rat = torch.sqrt(1. - lam)\n    cut_w = (W_t * cut_rat).int()\n    cut_h = (H_t * cut_rat).int()\n\n    # uniform\n    cx = torch.randint(0, W, (1,))\n    cy = torch.randint(0, H, (1,))\n\n    bbx1 = torch.clamp(cx - cut_w // 2, 0, W)\n    bby1 = torch.clamp(cy - cut_h // 2, 0, H)\n    bbx2 = torch.clamp(cx + cut_w // 2, 0, W)\n    bby2 = torch.clamp(cy + cut_h // 2, 0, H)\n\n    return bbx1.item(), bby1.item(), bbx2.item(), bby2.item()\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"Image to patch embedding with multi-layer convolution.\"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 224,\n            stem_conv: bool = False,\n            stem_stride: int = 1,\n            patch_size: int = 8,\n            in_chans: int = 3,\n            hidden_dim: int = 64,\n            embed_dim: int = 384,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize PatchEmbed.\n\n        Different from ViT which uses 1 conv layer, VOLO uses multiple conv layers for patch embedding.\n\n        Args:\n            img_size: Input image size.\n            stem_conv: Whether to use stem convolution layers.\n            stem_stride: Stride for stem convolution.\n            patch_size: Patch size (must be 4, 8, or 16).\n            in_chans: Number of input channels.\n            hidden_dim: Hidden dimension for stem convolution.\n            embed_dim: Output embedding dimension.\n        \"\"\"\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        assert patch_size in [4, 8, 16]\n        if stem_conv:\n            self.conv = nn.Sequential(\n                nn.Conv2d(in_chans, hidden_dim, kernel_size=7, stride=stem_stride, padding=3, bias=False, **dd),\n                nn.BatchNorm2d(hidden_dim, **dd),\n                nn.ReLU(inplace=True),\n                nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, stride=1, padding=1, bias=False, **dd),\n                nn.BatchNorm2d(hidden_dim, **dd),\n                nn.ReLU(inplace=True),\n                nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, stride=1, padding=1, bias=False, **dd),\n                nn.BatchNorm2d(hidden_dim, **dd),\n                nn.ReLU(inplace=True),\n            )\n        else:\n            self.conv = None\n\n        self.proj = nn.Conv2d(\n            hidden_dim,\n            embed_dim,\n            kernel_size=patch_size // stem_stride,\n            stride=patch_size // stem_stride,\n            **dd,\n        )\n        self.num_patches = (img_size // patch_size) * (img_size // patch_size)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            Output tensor of shape (B, embed_dim, H', W').\n        \"\"\"\n        if self.conv is not None:\n            x = self.conv(x)\n        x = self.proj(x)  # B, C, H, W\n        return x\n\n\nclass Downsample(nn.Module):\n    \"\"\"Downsampling module between stages.\"\"\"\n\n    def __init__(\n            self,\n            in_embed_dim: int,\n            out_embed_dim: int,\n            patch_size: int = 2,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize Downsample.\n\n        Args:\n            in_embed_dim: Input embedding dimension.\n            out_embed_dim: Output embedding dimension.\n            patch_size: Patch size for downsampling.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.proj = nn.Conv2d(in_embed_dim, out_embed_dim, kernel_size=patch_size, stride=patch_size, **dd)\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Output tensor of shape (B, H', W', C').\n        \"\"\"\n        x = x.permute(0, 3, 1, 2)\n        x = self.proj(x)  # B, C, H, W\n        x = x.permute(0, 2, 3, 1)\n        return x\n\n\ndef outlooker_blocks(\n        block_fn: Callable,\n        index: int,\n        dim: int,\n        layers: List[int],\n        num_heads: int = 1,\n        kernel_size: int = 3,\n        padding: int = 1,\n        stride: int = 2,\n        mlp_ratio: float = 3.,\n        qkv_bias: bool = False,\n        attn_drop: float = 0,\n        drop_path_rate: float = 0.,\n        device=None,\n        dtype=None,\n        **kwargs: Any,\n) -> nn.Sequential:\n    \"\"\"Generate outlooker layers for stage 1.\n\n    Args:\n        block_fn: Block function to use (typically Outlooker).\n        index: Index of current stage.\n        dim: Feature dimension.\n        layers: List of layer counts for each stage.\n        num_heads: Number of attention heads.\n        kernel_size: Kernel size for outlook attention.\n        padding: Padding for outlook attention.\n        stride: Stride for outlook attention.\n        mlp_ratio: Ratio for MLP hidden dimension.\n        qkv_bias: Whether to use bias in QKV projection.\n        attn_drop: Attention dropout rate.\n        drop_path_rate: Stochastic depth drop rate.\n        **kwargs: Additional keyword arguments.\n\n    Returns:\n        Sequential module containing outlooker blocks.\n    \"\"\"\n    blocks = []\n    for block_idx in range(layers[index]):\n        block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / (sum(layers) - 1)\n        blocks.append(block_fn(\n            dim,\n            kernel_size=kernel_size,\n            padding=padding,\n            stride=stride,\n            num_heads=num_heads,\n            mlp_ratio=mlp_ratio,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            drop_path=block_dpr,\n            device=device,\n            dtype=dtype,\n            **kwargs,\n        ))\n    blocks = nn.Sequential(*blocks)\n    return blocks\n\n\ndef transformer_blocks(\n        block_fn: Callable,\n        index: int,\n        dim: int,\n        layers: List[int],\n        num_heads: int,\n        mlp_ratio: float = 3.,\n        qkv_bias: bool = False,\n        attn_drop: float = 0,\n        drop_path_rate: float = 0.,\n        **kwargs: Any,\n) -> nn.Sequential:\n    \"\"\"Generate transformer layers for stage 2.\n\n    Args:\n        block_fn: Block function to use (typically Transformer).\n        index: Index of current stage.\n        dim: Feature dimension.\n        layers: List of layer counts for each stage.\n        num_heads: Number of attention heads.\n        mlp_ratio: Ratio for MLP hidden dimension.\n        qkv_bias: Whether to use bias in QKV projection.\n        attn_drop: Attention dropout rate.\n        drop_path_rate: Stochastic depth drop rate.\n        **kwargs: Additional keyword arguments.\n\n    Returns:\n        Sequential module containing transformer blocks.\n    \"\"\"\n    blocks = []\n    for block_idx in range(layers[index]):\n        block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / (sum(layers) - 1)\n        blocks.append(block_fn(\n            dim,\n            num_heads,\n            mlp_ratio=mlp_ratio,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            drop_path=block_dpr,\n            **kwargs,\n        ))\n    blocks = nn.Sequential(*blocks)\n    return blocks\n\n\nclass VOLO(nn.Module):\n    \"\"\"Vision Outlooker (VOLO) model.\"\"\"\n\n    def __init__(\n            self,\n            layers: List[int],\n            img_size: int = 224,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'token',\n            patch_size: int = 8,\n            stem_hidden_dim: int = 64,\n            embed_dims: Optional[List[int]] = None,\n            num_heads: Optional[List[int]] = None,\n            downsamples: Tuple[bool, ...] = (True, False, False, False),\n            outlook_attention: Tuple[bool, ...] = (True, False, False, False),\n            mlp_ratio: float = 3.0,\n            qkv_bias: bool = False,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            post_layers: Optional[Tuple[str, ...]] = ('ca', 'ca'),\n            use_aux_head: bool = True,\n            use_mix_token: bool = False,\n            pooling_scale: int = 2,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"Initialize VOLO model.\n\n        Args:\n            layers: Number of blocks in each stage.\n            img_size: Input image size.\n            in_chans: Number of input channels.\n            num_classes: Number of classes for classification.\n            global_pool: Global pooling type ('token', 'avg', or '').\n            patch_size: Patch size for patch embedding.\n            stem_hidden_dim: Hidden dimension for stem convolution.\n            embed_dims: List of embedding dimensions for each stage.\n            num_heads: List of number of attention heads for each stage.\n            downsamples: Whether to downsample between stages.\n            outlook_attention: Whether to use outlook attention in each stage.\n            mlp_ratio: Ratio for MLP hidden dimension.\n            qkv_bias: Whether to use bias in QKV projection.\n            drop_rate: Dropout rate.\n            pos_drop_rate: Position embedding dropout rate.\n            attn_drop_rate: Attention dropout rate.\n            drop_path_rate: Stochastic depth drop rate.\n            norm_layer: Normalization layer type.\n            post_layers: Post-processing layer types.\n            use_aux_head: Whether to use auxiliary head.\n            use_mix_token: Whether to use token mixing for training.\n            pooling_scale: Pooling scale factor.\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        num_layers = len(layers)\n        mlp_ratio = to_ntuple(num_layers)(mlp_ratio)\n        img_size = to_2tuple(img_size)\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.global_pool = global_pool\n        self.mix_token = use_mix_token\n        self.pooling_scale = pooling_scale\n        self.num_features = self.head_hidden_size = embed_dims[-1]\n        if use_mix_token:  # enable token mixing, see token labeling for details.\n            self.beta = 1.0\n            assert global_pool == 'token', \"return all tokens if mix_token is enabled\"\n        self.grad_checkpointing = False\n\n        self.patch_embed = PatchEmbed(\n            stem_conv=True,\n            stem_stride=2,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            hidden_dim=stem_hidden_dim,\n            embed_dim=embed_dims[0],\n            **dd,\n        )\n        r = patch_size\n\n        # initial positional encoding, we add positional encoding after outlooker blocks\n        patch_grid = (img_size[0] // patch_size // pooling_scale, img_size[1] // patch_size // pooling_scale)\n        self.pos_embed = nn.Parameter(torch.zeros(1, patch_grid[0], patch_grid[1], embed_dims[-1], **dd))\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        # set the main block in network\n        self.stage_ends = []\n        self.feature_info = []\n        network = []\n        block_idx = 0\n        for i in range(len(layers)):\n            if outlook_attention[i]:\n                # stage 1\n                stage = outlooker_blocks(\n                    Outlooker,\n                    i,\n                    embed_dims[i],\n                    layers,\n                    num_heads[i],\n                    mlp_ratio=mlp_ratio[i],\n                    qkv_bias=qkv_bias,\n                    attn_drop=attn_drop_rate,\n                    norm_layer=norm_layer,\n                    **dd,\n                )\n            else:\n                # stage 2\n                stage = transformer_blocks(\n                    Transformer,\n                    i,\n                    embed_dims[i],\n                    layers,\n                    num_heads[i],\n                    mlp_ratio=mlp_ratio[i],\n                    qkv_bias=qkv_bias,\n                    drop_path_rate=drop_path_rate,\n                    attn_drop=attn_drop_rate,\n                    norm_layer=norm_layer,\n                    **dd,\n                )\n            network.append(stage)\n            self.stage_ends.append(block_idx)\n            self.feature_info.append(dict(num_chs=embed_dims[i], reduction=r, module=f'network.{block_idx}'))\n            block_idx += 1\n            if downsamples[i]:\n                # downsampling between two stages\n                network.append(Downsample(embed_dims[i], embed_dims[i + 1], 2, **dd))\n                r *= 2\n                block_idx += 1\n\n        self.network = nn.ModuleList(network)\n\n        # set post block, for example, class attention layers\n        self.post_network = None\n        if post_layers is not None:\n            self.post_network = nn.ModuleList([\n                get_block(\n                    post_layers[i],\n                    dim=embed_dims[-1],\n                    num_heads=num_heads[-1],\n                    mlp_ratio=mlp_ratio[-1],\n                    qkv_bias=qkv_bias,\n                    attn_drop=attn_drop_rate,\n                    drop_path=0.,\n                    norm_layer=norm_layer,\n                    **dd,\n                )\n                for i in range(len(post_layers))\n            ])\n            self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dims[-1], **dd))\n            trunc_normal_(self.cls_token, std=.02)\n\n        # set output type\n        if use_aux_head:\n            self.aux_head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n        else:\n            self.aux_head = None\n        self.norm = norm_layer(self.num_features, **dd)\n\n        # Classifier head\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        trunc_normal_(self.pos_embed, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m: nn.Module) -> None:\n        \"\"\"Initialize weights for modules.\n\n        Args:\n            m: Module to initialize.\n        \"\"\"\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self) -> set:\n        \"\"\"Get set of parameters that should not have weight decay.\n\n        Returns:\n            Set of parameter names.\n        \"\"\"\n        return {'pos_embed', 'cls_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse: bool = False) -> Dict[str, Any]:\n        \"\"\"Get parameter grouping for optimizer.\n\n        Args:\n            coarse: Whether to use coarse grouping.\n\n        Returns:\n            Parameter grouping dictionary.\n        \"\"\"\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=[\n                (r'^network\\.(\\d+)\\.(\\d+)', None),\n                (r'^network\\.(\\d+)', (0,)),\n            ],\n            blocks2=[\n                (r'^cls_token', (0,)),\n                (r'^post_network\\.(\\d+)', None),\n                (r'^norm', (99999,))\n            ],\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable: bool = True) -> None:\n        \"\"\"Set gradient checkpointing.\n\n        Args:\n            enable: Whether to enable gradient checkpointing.\n        \"\"\"\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        \"\"\"Get classifier module.\n\n        Returns:\n            The classifier head module.\n        \"\"\"\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None) -> None:\n        \"\"\"Reset classifier head.\n\n        Args:\n            num_classes: Number of classes for new classifier.\n            global_pool: Global pooling type.\n        \"\"\"\n        self.num_classes = num_classes\n        if global_pool is not None:\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(\n            self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n        if self.aux_head is not None:\n            self.aux_head = nn.Linear(\n                self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_tokens(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through token processing stages.\n\n        Args:\n            x: Input tensor of shape (B, H, W, C).\n\n        Returns:\n            Token tensor of shape (B, N, C).\n        \"\"\"\n        for idx, block in enumerate(self.network):\n            if idx == 2:\n                # add positional encoding after outlooker blocks\n                x = x + self.pos_embed\n                x = self.pos_drop(x)\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(block, x)\n            else:\n                x = block(x)\n\n        B, H, W, C = x.shape\n        x = x.reshape(B, -1, C)\n        return x\n\n    def forward_cls(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through class attention blocks.\n\n        Args:\n            x: Input token tensor of shape (B, N, C).\n\n        Returns:\n            Output tensor with class token of shape (B, N+1, C).\n        \"\"\"\n        B, N, C = x.shape\n        cls_tokens = self.cls_token.expand(B, -1, -1)\n        x = torch.cat([cls_tokens, x], dim=1)\n        for block in self.post_network:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(block, x)\n            else:\n                x = block(x)\n        return x\n\n    def forward_train(self, x: torch.Tensor) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor, Tuple[int, int, int, int]]]:\n        \"\"\"Forward pass for training with mix token support.\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            If training with mix_token: tuple of (class_token, aux_tokens, bbox).\n            Otherwise: class_token tensor.\n        \"\"\"\n        \"\"\" A separate forward fn for training with mix_token (if a train script supports).\n        Combining multiple modes in as single forward with different return types is torchscript hell.\n        \"\"\"\n        x = self.patch_embed(x)\n        x = x.permute(0, 2, 3, 1)  # B,C,H,W-> B,H,W,C\n\n        # mix token, see token labeling for details.\n        if self.mix_token and self.training:\n            lam = torch.distributions.Beta(self.beta, self.beta).sample()\n            patch_h, patch_w = x.shape[1] // self.pooling_scale, x.shape[2] // self.pooling_scale\n            bbx1, bby1, bbx2, bby2 = rand_bbox(x.size(), lam, scale=self.pooling_scale)\n            temp_x = x.clone()\n            sbbx1, sbby1 = self.pooling_scale * bbx1, self.pooling_scale * bby1\n            sbbx2, sbby2 = self.pooling_scale * bbx2, self.pooling_scale * bby2\n            temp_x[:, sbbx1:sbbx2, sbby1:sbby2, :] = x.flip(0)[:, sbbx1:sbbx2, sbby1:sbby2, :]\n            x = temp_x\n        else:\n            bbx1, bby1, bbx2, bby2 = 0, 0, 0, 0\n\n        # step2: tokens learning in the two stages\n        x = self.forward_tokens(x)\n\n        # step3: post network, apply class attention or not\n        if self.post_network is not None:\n            x = self.forward_cls(x)\n        x = self.norm(x)\n\n        if self.global_pool == 'avg':\n            x_cls = x.mean(dim=1)\n        elif self.global_pool == 'token':\n            x_cls = x[:, 0]\n        else:\n            x_cls = x\n\n        if self.aux_head is None:\n            return x_cls\n\n        x_aux = self.aux_head(x[:, 1:])  # generate classes in all feature tokens, see token labeling\n        if not self.training:\n            return x_cls + 0.5 * x_aux.max(1)[0]\n\n        if self.mix_token and self.training:  # reverse \"mix token\", see token labeling for details.\n            x_aux = x_aux.reshape(x_aux.shape[0], patch_h, patch_w, x_aux.shape[-1])\n            temp_x = x_aux.clone()\n            temp_x[:, bbx1:bbx2, bby1:bby2, :] = x_aux.flip(0)[:, bbx1:bbx2, bby1:bby2, :]\n            x_aux = temp_x\n            x_aux = x_aux.reshape(x_aux.shape[0], patch_h * patch_w, x_aux.shape[-1])\n\n        # return these: 1. class token, 2. classes from all feature tokens, 3. bounding box\n        return x_cls, x_aux, (bbx1, bby1, bbx2, bby2)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output format must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n        take_indices = [self.stage_ends[i] for i in take_indices]\n        max_index = self.stage_ends[max_index]\n\n        # forward pass\n        B, _, height, width = x.shape\n        x = self.patch_embed(x).permute(0, 2, 3, 1)  # B,C,H,W-> B,H,W,C\n\n        # step2: tokens learning in the two stages\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            network = self.network\n        else:\n            network = self.network[:max_index + 1]\n        for idx, block in enumerate(network):\n            if idx == 2:\n                # add positional encoding after outlooker blocks\n                x = x + self.pos_embed\n                x = self.pos_drop(x)\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(block, x)\n            else:\n                x = block(x)\n            if idx in take_indices:\n                if norm and idx >= 2:\n                    x_inter = self.norm(x)\n                else:\n                    x_inter = x\n                intermediates.append(x_inter.permute(0, 3, 1, 2))\n\n        if intermediates_only:\n            return intermediates\n\n        # NOTE not supporting return of class tokens\n        # step3: post network, apply class attention or not\n        B, H, W, C = x.shape\n        x = x.reshape(B, -1, C)\n        if self.post_network is not None:\n            x = self.forward_cls(x)\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ) -> List[int]:\n        \"\"\"Prune layers not required for specified intermediates.\n\n        Args:\n            indices: Indices of intermediate layers to keep.\n            prune_norm: Whether to prune normalization layer.\n            prune_head: Whether to prune classification head.\n\n        Returns:\n            List of kept intermediate indices.\n        \"\"\"\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.stage_ends), indices)\n        max_index = self.stage_ends[max_index]\n        self.network = self.network[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.post_network = nn.ModuleList()  # prune token blocks with head\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through feature extraction.\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            Feature tensor.\n        \"\"\"\n        x = self.patch_embed(x).permute(0, 2, 3, 1)  # B,C,H,W-> B,H,W,C\n\n        # step2: tokens learning in the two stages\n        x = self.forward_tokens(x)\n\n        # step3: post network, apply class attention or not\n        if self.post_network is not None:\n            x = self.forward_cls(x)\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x: torch.Tensor, pre_logits: bool = False) -> torch.Tensor:\n        \"\"\"Forward pass through classification head.\n\n        Args:\n            x: Input feature tensor.\n            pre_logits: Whether to return pre-logits features.\n\n        Returns:\n            Classification logits or pre-logits features.\n        \"\"\"\n        if self.global_pool == 'avg':\n            out = x.mean(dim=1)\n        elif self.global_pool == 'token':\n            out = x[:, 0]\n        else:\n            out = x\n        x = self.head_drop(x)\n        if pre_logits:\n            return out\n        out = self.head(out)\n        if self.aux_head is not None:\n            # generate classes in all feature tokens, see token labeling\n            aux = self.aux_head(x[:, 1:])\n            out = out + 0.5 * aux.max(1)[0]\n        return out\n\n    def forward(self, x: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass (simplified, without mix token training).\n\n        Args:\n            x: Input tensor of shape (B, C, H, W).\n\n        Returns:\n            Classification logits.\n        \"\"\"\n        \"\"\" simplified forward (without mix token training) \"\"\"\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _create_volo(variant: str, pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"Create VOLO model.\n\n    Args:\n        variant: Model variant name.\n        pretrained: Whether to load pretrained weights.\n        **kwargs: Additional model arguments.\n\n    Returns:\n        VOLO model instance.\n    \"\"\"\n    out_indices = kwargs.pop('out_indices', 3)\n    return build_model_with_cfg(\n        VOLO,\n        variant,\n        pretrained,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n\n\ndef _cfg(url: str = '', **kwargs: Any) -> Dict[str, Any]:\n    \"\"\"Create model configuration.\n\n    Args:\n        url: URL for pretrained weights.\n        **kwargs: Additional configuration options.\n\n    Returns:\n        Model configuration dictionary.\n    \"\"\"\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': .96, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.conv.0', 'classifier': ('head', 'aux_head'),\n        'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'volo_d1_224.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d1_224_84.2.pth.tar',\n        crop_pct=0.96),\n    'volo_d1_384.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d1_384_85.2.pth.tar',\n        crop_pct=1.0, input_size=(3, 384, 384)),\n    'volo_d2_224.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d2_224_85.2.pth.tar',\n        crop_pct=0.96),\n    'volo_d2_384.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d2_384_86.0.pth.tar',\n        crop_pct=1.0, input_size=(3, 384, 384)),\n    'volo_d3_224.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d3_224_85.4.pth.tar',\n        crop_pct=0.96),\n    'volo_d3_448.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d3_448_86.3.pth.tar',\n        crop_pct=1.0, input_size=(3, 448, 448)),\n    'volo_d4_224.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d4_224_85.7.pth.tar',\n        crop_pct=0.96),\n    'volo_d4_448.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d4_448_86.79.pth.tar',\n        crop_pct=1.15, input_size=(3, 448, 448)),\n    'volo_d5_224.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d5_224_86.10.pth.tar',\n        crop_pct=0.96),\n    'volo_d5_448.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d5_448_87.0.pth.tar',\n        crop_pct=1.15, input_size=(3, 448, 448)),\n    'volo_d5_512.sail_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://github.com/sail-sg/volo/releases/download/volo_1/d5_512_87.07.pth.tar',\n        crop_pct=1.15, input_size=(3, 512, 512)),\n})\n\n\n@register_model\ndef volo_d1_224(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D1 model, Params: 27M.\"\"\"\n    model_args = dict(layers=(4, 4, 8, 2), embed_dims=(192, 384, 384, 384), num_heads=(6, 12, 12, 12), **kwargs)\n    model = _create_volo('volo_d1_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d1_384(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D1 model, Params: 27M.\"\"\"\n    model_args = dict(layers=(4, 4, 8, 2), embed_dims=(192, 384, 384, 384), num_heads=(6, 12, 12, 12), **kwargs)\n    model = _create_volo('volo_d1_384', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d2_224(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D2 model, Params: 59M.\"\"\"\n    model_args = dict(layers=(6, 4, 10, 4), embed_dims=(256, 512, 512, 512), num_heads=(8, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d2_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d2_384(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D2 model, Params: 59M.\"\"\"\n    model_args = dict(layers=(6, 4, 10, 4), embed_dims=(256, 512, 512, 512), num_heads=(8, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d2_384', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d3_224(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D3 model, Params: 86M.\"\"\"\n    model_args = dict(layers=(8, 8, 16, 4), embed_dims=(256, 512, 512, 512), num_heads=(8, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d3_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d3_448(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D3 model, Params: 86M.\"\"\"\n    model_args = dict(layers=(8, 8, 16, 4), embed_dims=(256, 512, 512, 512), num_heads=(8, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d3_448', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d4_224(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D4 model, Params: 193M.\"\"\"\n    model_args = dict(layers=(8, 8, 16, 4), embed_dims=(384, 768, 768, 768), num_heads=(12, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d4_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d4_448(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D4 model, Params: 193M.\"\"\"\n    model_args = dict(layers=(8, 8, 16, 4), embed_dims=(384, 768, 768, 768), num_heads=(12, 16, 16, 16), **kwargs)\n    model = _create_volo('volo_d4_448', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d5_224(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D5 model, Params: 296M.\n\n    stem_hidden_dim=128, the dim in patch embedding is 128 for VOLO-D5.\n    \"\"\"\n    model_args = dict(\n        layers=(12, 12, 20, 4), embed_dims=(384, 768, 768, 768), num_heads=(12, 16, 16, 16),\n        mlp_ratio=4, stem_hidden_dim=128, **kwargs)\n    model = _create_volo('volo_d5_224', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d5_448(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D5 model, Params: 296M.\n\n    stem_hidden_dim=128, the dim in patch embedding is 128 for VOLO-D5.\n    \"\"\"\n    model_args = dict(\n        layers=(12, 12, 20, 4), embed_dims=(384, 768, 768, 768), num_heads=(12, 16, 16, 16),\n        mlp_ratio=4, stem_hidden_dim=128, **kwargs)\n    model = _create_volo('volo_d5_448', pretrained=pretrained, **model_args)\n    return model\n\n\n@register_model\ndef volo_d5_512(pretrained: bool = False, **kwargs: Any) -> VOLO:\n    \"\"\"VOLO-D5 model, Params: 296M.\n\n    stem_hidden_dim=128, the dim in patch embedding is 128 for VOLO-D5.\n    \"\"\"\n    model_args = dict(\n        layers=(12, 12, 20, 4), embed_dims=(384, 768, 768, 768), num_heads=(12, 16, 16, 16),\n        mlp_ratio=4, stem_hidden_dim=128, **kwargs)\n    model = _create_volo('volo_d5_512', pretrained=pretrained, **model_args)\n    return model\n"
  },
  {
    "path": "timm/models/vovnet.py",
    "content": "\"\"\" VoVNet (V1 & V2)\n\nPapers:\n* `An Energy and GPU-Computation Efficient Backbone Network` - https://arxiv.org/abs/1904.09730\n* `CenterMask : Real-Time Anchor-Free Instance Segmentation` - https://arxiv.org/abs/1911.06667\n\nLooked at  https://github.com/youngwanLEE/vovnet-detectron2 &\nhttps://github.com/stigma0617/VoVNet.pytorch/blob/master/models_vovnet/vovnet.py\nfor some reference, rewrote most of the code.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nfrom typing import List, Optional, Tuple, Union, Type\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import ConvNormAct, SeparableConvNormAct, BatchNormAct2d, ClassifierHead, DropPath, \\\n    create_attn, create_norm_act_layer, calculate_drop_path_rates\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['VovNet']  # model_registry will add each entrypoint fn to this\n\n\nclass SequentialAppendList(nn.Sequential):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args)\n\n    def forward(self, x: torch.Tensor, concat_list: List[torch.Tensor]) -> torch.Tensor:\n        for i, module in enumerate(self):\n            if i == 0:\n                concat_list.append(module(x))\n            else:\n                concat_list.append(module(concat_list[-1]))\n        x = torch.cat(concat_list, dim=1)\n        return x\n\n\nclass OsaBlock(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            layer_per_block: int,\n            residual: bool = False,\n            depthwise: bool = False,\n            attn: str = '',\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.residual = residual\n        self.depthwise = depthwise\n        conv_kwargs = dict(norm_layer=norm_layer, act_layer=act_layer, **dd)\n\n        next_in_chs = in_chs\n        if self.depthwise and next_in_chs != mid_chs:\n            assert not residual\n            self.conv_reduction = ConvNormAct(next_in_chs, mid_chs, 1, **conv_kwargs)\n        else:\n            self.conv_reduction = None\n\n        mid_convs = []\n        for i in range(layer_per_block):\n            if self.depthwise:\n                conv = SeparableConvNormAct(mid_chs, mid_chs, **conv_kwargs)\n            else:\n                conv = ConvNormAct(next_in_chs, mid_chs, 3, **conv_kwargs)\n            next_in_chs = mid_chs\n            mid_convs.append(conv)\n        self.conv_mid = SequentialAppendList(*mid_convs)\n\n        # feature aggregation\n        next_in_chs = in_chs + layer_per_block * mid_chs\n        self.conv_concat = ConvNormAct(next_in_chs, out_chs, **conv_kwargs)\n\n        self.attn = create_attn(attn, out_chs, **dd) if attn else None\n\n        self.drop_path = drop_path\n\n    def forward(self, x):\n        output = [x]\n        if self.conv_reduction is not None:\n            x = self.conv_reduction(x)\n        x = self.conv_mid(x, output)\n        x = self.conv_concat(x)\n        if self.attn is not None:\n            x = self.attn(x)\n        if self.drop_path is not None:\n            x = self.drop_path(x)\n        if self.residual:\n            x = x + output[0]\n        return x\n\n\nclass OsaStage(nn.Module):\n\n    def __init__(\n            self,\n            in_chs: int,\n            mid_chs: int,\n            out_chs: int,\n            block_per_stage: int,\n            layer_per_block: int,\n            downsample: bool = True,\n            residual: bool = True,\n            depthwise: bool = False,\n            attn: str = 'ese',\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            drop_path_rates: Optional[List[float]] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.grad_checkpointing = False\n\n        if downsample:\n            self.pool = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=True)\n        else:\n            self.pool = None\n\n        blocks = []\n        for i in range(block_per_stage):\n            last_block = i == block_per_stage - 1\n            if drop_path_rates is not None and drop_path_rates[i] > 0.:\n                drop_path = DropPath(drop_path_rates[i])\n            else:\n                drop_path = None\n            blocks += [OsaBlock(\n                in_chs,\n                mid_chs,\n                out_chs,\n                layer_per_block,\n                residual=residual and i > 0,\n                depthwise=depthwise,\n                attn=attn if last_block else '',\n                norm_layer=norm_layer,\n                act_layer=act_layer,\n                drop_path=drop_path,\n                **dd,\n            )]\n            in_chs = out_chs\n        self.blocks = nn.Sequential(*blocks)\n\n    def forward(self, x):\n        if self.pool is not None:\n            x = self.pool(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        return x\n\n\nclass VovNet(nn.Module):\n\n    def __init__(\n            self,\n            cfg: dict,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'avg',\n            output_stride: int = 32,\n            norm_layer: Type[nn.Module] = BatchNormAct2d,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            device=None,\n            dtype=None,\n            **kwargs,\n    ):\n        \"\"\"\n        Args:\n            cfg (dict): Model architecture configuration\n            in_chans (int): Number of input channels (default: 3)\n            num_classes (int): Number of classifier classes (default: 1000)\n            global_pool (str): Global pooling type (default: 'avg')\n            output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32)\n            norm_layer (Union[str, nn.Module]): normalization layer\n            act_layer (Union[str, nn.Module]): activation layer\n            drop_rate (float): Dropout rate (default: 0.)\n            drop_path_rate (float): Stochastic depth drop-path rate (default: 0.)\n            kwargs (dict): Extra kwargs overlayed onto cfg\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        assert output_stride == 32  # FIXME support dilation\n\n        cfg = dict(cfg, **kwargs)\n        stem_stride = cfg.get(\"stem_stride\", 4)\n        stem_chs = cfg[\"stem_chs\"]\n        stage_conv_chs = cfg[\"stage_conv_chs\"]\n        stage_out_chs = cfg[\"stage_out_chs\"]\n        block_per_stage = cfg[\"block_per_stage\"]\n        layer_per_block = cfg[\"layer_per_block\"]\n        conv_kwargs = dict(norm_layer=norm_layer, act_layer=act_layer, **dd)\n\n        # Stem module\n        last_stem_stride = stem_stride // 2\n        conv_type = SeparableConvNormAct if cfg[\"depthwise\"] else ConvNormAct\n        self.stem = nn.Sequential(*[\n            ConvNormAct(in_chans, stem_chs[0], 3, stride=2, **conv_kwargs),\n            conv_type(stem_chs[0], stem_chs[1], 3, stride=1, **conv_kwargs),\n            conv_type(stem_chs[1], stem_chs[2], 3, stride=last_stem_stride, **conv_kwargs),\n        ])\n        self.feature_info = [dict(\n            num_chs=stem_chs[1], reduction=2, module=f'stem.{1 if stem_stride == 4 else 2}')]\n        current_stride = stem_stride\n\n        # OSA stages\n        stage_dpr = calculate_drop_path_rates(drop_path_rate, block_per_stage, stagewise=True)\n        in_ch_list = stem_chs[-1:] + stage_out_chs[:-1]\n        stage_args = dict(residual=cfg[\"residual\"], depthwise=cfg[\"depthwise\"], attn=cfg[\"attn\"], **conv_kwargs)\n        stages = []\n        for i in range(4):  # num_stages\n            downsample = stem_stride == 2 or i > 0  # first stage has no stride/downsample if stem_stride is 4\n            stages += [OsaStage(\n                in_ch_list[i],\n                stage_conv_chs[i],\n                stage_out_chs[i],\n                block_per_stage[i],\n                layer_per_block,\n                downsample=downsample,\n                drop_path_rates=stage_dpr[i],\n                **stage_args,\n            )]\n            self.num_features = stage_out_chs[i]\n            current_stride *= 2 if downsample else 1\n            self.feature_info += [dict(num_chs=self.num_features, reduction=current_stride, module=f'stages.{i}')]\n\n        self.stages = nn.Sequential(*stages)\n\n        self.head_hidden_size = self.num_features\n        self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate, **dd)\n\n        for n, m in self.named_modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n            elif isinstance(m, nn.Linear):\n                nn.init.zeros_(m.bias)\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^stages\\.(\\d+)' if coarse else r'^stages\\.(\\d+).blocks\\.(\\d+)',\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        for s in self.stages:\n            s.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, global_pool)\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to compatible intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW',), 'Output shape must be NCHW.'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(5, indices)\n\n        # forward pass\n        feat_idx = 0\n        x = self.stem[:-1](x)\n        if feat_idx in take_indices:\n            intermediates.append(x)\n\n        x = self.stem[-1](x)\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            stages = self.stages\n        else:\n            stages = self.stages[:max_index]\n\n        for feat_idx, stage in enumerate(stages, start=1):\n            x = stage(x)\n            if feat_idx in take_indices:\n                intermediates.append(x)\n\n        if intermediates_only:\n            return intermediates\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(5, indices)\n        self.stages = self.stages[:max_index]  # truncate blocks w/ stem as idx 0\n        if prune_head:\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        return self.stages(x)\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\n# model cfgs adapted from https://github.com/youngwanLEE/vovnet-detectron2 &\n# https://github.com/stigma0617/VoVNet.pytorch/blob/master/models_vovnet/vovnet.py\nmodel_cfgs = dict(\n    vovnet39a=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 1, 2, 2],\n        residual=False,\n        depthwise=False,\n        attn='',\n    ),\n    vovnet57a=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 1, 4, 3],\n        residual=False,\n        depthwise=False,\n        attn='',\n\n    ),\n    ese_vovnet19b_slim_dw=dict(\n        stem_chs=[64, 64, 64],\n        stage_conv_chs=[64, 80, 96, 112],\n        stage_out_chs=[112, 256, 384, 512],\n        layer_per_block=3,\n        block_per_stage=[1, 1, 1, 1],\n        residual=True,\n        depthwise=True,\n        attn='ese',\n\n    ),\n    ese_vovnet19b_dw=dict(\n        stem_chs=[64, 64, 64],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=3,\n        block_per_stage=[1, 1, 1, 1],\n        residual=True,\n        depthwise=True,\n        attn='ese',\n    ),\n    ese_vovnet19b_slim=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[64, 80, 96, 112],\n        stage_out_chs=[112, 256, 384, 512],\n        layer_per_block=3,\n        block_per_stage=[1, 1, 1, 1],\n        residual=True,\n        depthwise=False,\n        attn='ese',\n    ),\n    ese_vovnet19b=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=3,\n        block_per_stage=[1, 1, 1, 1],\n        residual=True,\n        depthwise=False,\n        attn='ese',\n\n    ),\n    ese_vovnet39b=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 1, 2, 2],\n        residual=True,\n        depthwise=False,\n        attn='ese',\n    ),\n    ese_vovnet57b=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 1, 4, 3],\n        residual=True,\n        depthwise=False,\n        attn='ese',\n\n    ),\n    ese_vovnet99b=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 3, 9, 3],\n        residual=True,\n        depthwise=False,\n        attn='ese',\n    ),\n    eca_vovnet39b=dict(\n        stem_chs=[64, 64, 128],\n        stage_conv_chs=[128, 160, 192, 224],\n        stage_out_chs=[256, 512, 768, 1024],\n        layer_per_block=5,\n        block_per_stage=[1, 1, 2, 2],\n        residual=True,\n        depthwise=False,\n        attn='eca',\n    ),\n)\nmodel_cfgs['ese_vovnet39b_evos'] = model_cfgs['ese_vovnet39b']\n\n\ndef _create_vovnet(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        VovNet,\n        variant,\n        pretrained,\n        model_cfg=model_cfgs[variant],\n        feature_cfg=dict(flatten_sequential=True),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),\n        'crop_pct': 0.875, 'interpolation': 'bicubic',\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'stem.0.conv', 'classifier': 'head.fc', \n        'license': 'apache-2.0', **kwargs,\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'vovnet39a.untrained': _cfg(url=''),\n    'vovnet57a.untrained': _cfg(url=''),\n    'ese_vovnet19b_slim_dw.untrained': _cfg(url=''),\n    'ese_vovnet19b_dw.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'ese_vovnet19b_slim.untrained': _cfg(url=''),\n    'ese_vovnet39b.ra_in1k': _cfg(\n        hf_hub_id='timm/',\n        test_input_size=(3, 288, 288), test_crop_pct=0.95),\n    'ese_vovnet57b.ra4_e3600_r256_in1k': _cfg(\n        hf_hub_id='timm/',\n        mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),\n        crop_pct=0.95, input_size=(3, 256, 256), pool_size=(8, 8),\n        test_input_size=(3, 320, 320), test_crop_pct=1.0\n    ),\n    'ese_vovnet99b.untrained': _cfg(url=''),\n    'eca_vovnet39b.untrained': _cfg(url=''),\n    'ese_vovnet39b_evos.untrained': _cfg(url=''),\n})\n\n\n@register_model\ndef vovnet39a(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('vovnet39a', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef vovnet57a(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('vovnet57a', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet19b_slim_dw(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet19b_slim_dw', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet19b_dw(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet19b_dw', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet19b_slim(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet19b_slim', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet39b(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet39b', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet57b(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet57b', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef ese_vovnet99b(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('ese_vovnet99b', pretrained=pretrained, **kwargs)\n\n\n@register_model\ndef eca_vovnet39b(pretrained=False, **kwargs) -> VovNet:\n    return _create_vovnet('eca_vovnet39b', pretrained=pretrained, **kwargs)\n\n\n# Experimental Models\n\n@register_model\ndef ese_vovnet39b_evos(pretrained=False, **kwargs) -> VovNet:\n    def norm_act_fn(num_features, **nkwargs):\n        return create_norm_act_layer('evonorms0', num_features, jit=False, **nkwargs)\n    return _create_vovnet('ese_vovnet39b_evos', pretrained=pretrained, norm_layer=norm_act_fn, **kwargs)\n"
  },
  {
    "path": "timm/models/xception.py",
    "content": "\"\"\"\nPorted to pytorch thanks to [tstandley](https://github.com/tstandley/Xception-PyTorch)\n\n@author: tstandley\nAdapted by cadene\n\nCreates an Xception Model as defined in:\n\nFrancois Chollet\nXception: Deep Learning with Depthwise Separable Convolutions\nhttps://arxiv.org/pdf/1610.02357.pdf\n\nThis weights ported from the Keras implementation. Achieves the following performance on the validation set:\n\nLoss:0.9173 Prec@1:78.892 Prec@5:94.292\n\nREMEMBER to set your image size to 3x299x299 for both test and validation\n\nnormalize = transforms.Normalize(mean=[0.5, 0.5, 0.5],\n                                  std=[0.5, 0.5, 0.5])\n\nThe resize parameter of the validation transform should be 333, and make sure to center crop at 299x299\n\"\"\"\nimport torch.jit\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom typing import Optional\n\nfrom timm.layers import create_classifier\nfrom ._builder import build_model_with_cfg\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\n\n__all__ = ['Xception']\n\n\nclass SeparableConv2d(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            kernel_size: int = 1,\n            stride: int = 1,\n            padding: int = 0,\n            dilation: int = 1,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        self.conv1 = nn.Conv2d(\n            in_channels,\n            in_channels,\n            kernel_size,\n            stride,\n            padding,\n            dilation,\n            groups=in_channels,\n            bias=False,\n            **dd,\n        )\n        self.pointwise = nn.Conv2d(in_channels, out_channels, 1, 1, 0, 1, 1, bias=False, **dd)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = self.pointwise(x)\n        return x\n\n\nclass Block(nn.Module):\n    def __init__(\n            self,\n            in_channels: int,\n            out_channels: int,\n            reps: int,\n            strides: int = 1,\n            start_with_relu: bool = True,\n            grow_first: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n\n        if out_channels != in_channels or strides != 1:\n            self.skip = nn.Conv2d(in_channels, out_channels, 1, stride=strides, bias=False, **dd)\n            self.skipbn = nn.BatchNorm2d(out_channels, **dd)\n        else:\n            self.skip = None\n\n        rep = []\n        for i in range(reps):\n            if grow_first:\n                inc = in_channels if i == 0 else out_channels\n                outc = out_channels\n            else:\n                inc = in_channels\n                outc = in_channels if i < (reps - 1) else out_channels\n            rep.append(nn.ReLU(inplace=True))\n            rep.append(SeparableConv2d(inc, outc, 3, stride=1, padding=1, **dd))\n            rep.append(nn.BatchNorm2d(outc, **dd))\n\n        if not start_with_relu:\n            rep = rep[1:]\n        else:\n            rep[0] = nn.ReLU(inplace=False)\n\n        if strides != 1:\n            rep.append(nn.MaxPool2d(3, strides, 1))\n        self.rep = nn.Sequential(*rep)\n\n    def forward(self, inp):\n        x = self.rep(inp)\n\n        if self.skip is not None:\n            skip = self.skip(inp)\n            skip = self.skipbn(skip)\n        else:\n            skip = inp\n\n        x += skip\n        return x\n\n\nclass Xception(nn.Module):\n    \"\"\"\n    Xception optimized for the ImageNet dataset, as specified in\n    https://arxiv.org/pdf/1610.02357.pdf\n    \"\"\"\n\n    def __init__(\n            self,\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            drop_rate: float = 0.,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        \"\"\" Constructor\n        Args:\n            num_classes: number of classes\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        self.drop_rate = drop_rate\n        self.global_pool = global_pool\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = 2048\n\n        self.conv1 = nn.Conv2d(in_chans, 32, 3, 2, 0, bias=False, **dd)\n        self.bn1 = nn.BatchNorm2d(32, **dd)\n        self.act1 = nn.ReLU(inplace=True)\n\n        self.conv2 = nn.Conv2d(32, 64, 3, bias=False, **dd)\n        self.bn2 = nn.BatchNorm2d(64, **dd)\n        self.act2 = nn.ReLU(inplace=True)\n\n        self.block1 = Block(64, 128, 2, 2, start_with_relu=False, **dd)\n        self.block2 = Block(128, 256, 2, 2, **dd)\n        self.block3 = Block(256, 728, 2, 2, **dd)\n\n        self.block4 = Block(728, 728, 3, 1, **dd)\n        self.block5 = Block(728, 728, 3, 1, **dd)\n        self.block6 = Block(728, 728, 3, 1, **dd)\n        self.block7 = Block(728, 728, 3, 1, **dd)\n\n        self.block8 = Block(728, 728, 3, 1, **dd)\n        self.block9 = Block(728, 728, 3, 1, **dd)\n        self.block10 = Block(728, 728, 3, 1, **dd)\n        self.block11 = Block(728, 728, 3, 1, **dd)\n\n        self.block12 = Block(728, 1024, 2, 2, grow_first=False, **dd)\n\n        self.conv3 = SeparableConv2d(1024, 1536, 3, 1, 1, **dd)\n        self.bn3 = nn.BatchNorm2d(1536, **dd)\n        self.act3 = nn.ReLU(inplace=True)\n\n        self.conv4 = SeparableConv2d(1536, self.num_features, 3, 1, 1, **dd)\n        self.bn4 = nn.BatchNorm2d(self.num_features, **dd)\n        self.act4 = nn.ReLU(inplace=True)\n        self.feature_info = [\n            dict(num_chs=64, reduction=2, module='act2'),\n            dict(num_chs=128, reduction=4, module='block2.rep.0'),\n            dict(num_chs=256, reduction=8, module='block3.rep.0'),\n            dict(num_chs=728, reduction=16, module='block12.rep.0'),\n            dict(num_chs=2048, reduction=32, module='act4'),\n        ]\n\n        self.global_pool, self.fc = create_classifier(self.num_features, self.num_classes, pool_type=global_pool, **dd)\n\n        # #------- init weights --------\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')\n            elif isinstance(m, nn.BatchNorm2d):\n                m.weight.data.fill_(1)\n                m.bias.data.zero_()\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^conv[12]|bn[12]',\n            blocks=[\n                (r'^block(\\d+)', None),\n                (r'^conv[34]|bn[34]', (99,)),\n            ],\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        assert not enable, \"gradient checkpointing not supported\"\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: str = 'avg'):\n        self.num_classes = num_classes\n        self.global_pool, self.fc = create_classifier(self.num_features, self.num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x = self.conv1(x)\n        x = self.bn1(x)\n        x = self.act1(x)\n\n        x = self.conv2(x)\n        x = self.bn2(x)\n        x = self.act2(x)\n\n        x = self.block1(x)\n        x = self.block2(x)\n        x = self.block3(x)\n        x = self.block4(x)\n        x = self.block5(x)\n        x = self.block6(x)\n        x = self.block7(x)\n        x = self.block8(x)\n        x = self.block9(x)\n        x = self.block10(x)\n        x = self.block11(x)\n        x = self.block12(x)\n\n        x = self.conv3(x)\n        x = self.bn3(x)\n        x = self.act3(x)\n\n        x = self.conv4(x)\n        x = self.bn4(x)\n        x = self.act4(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        x = self.global_pool(x)\n        if self.drop_rate:\n            F.dropout(x, self.drop_rate, training=self.training)\n        return x if pre_logits else self.fc(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _xception(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        Xception, variant, pretrained,\n        feature_cfg=dict(feature_cls='hook'),\n        **kwargs)\n\n\ndefault_cfgs = generate_default_cfgs({\n    'legacy_xception.tf_in1k': {\n        'url': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-cadene/xception-43020ad28.pth',\n        'input_size': (3, 299, 299),\n        'pool_size': (10, 10),\n        'crop_pct': 0.8975,\n        'interpolation': 'bicubic',\n        'mean': (0.5, 0.5, 0.5),\n        'std': (0.5, 0.5, 0.5),\n        'num_classes': 1000,\n        'first_conv': 'conv1',\n        'classifier': 'fc',\n        'license': 'apache-2.0',\n        # The resize parameter of the validation transform should be 333, and make sure to center crop at 299x299\n    }\n})\n\n\n@register_model\ndef legacy_xception(pretrained=False, **kwargs) -> Xception:\n    return _xception('legacy_xception', pretrained=pretrained, **kwargs)\n\n\nregister_model_deprecations(__name__, {\n    'xception': 'legacy_xception',\n})\n"
  },
  {
    "path": "timm/models/xception_aligned.py",
    "content": "\"\"\"Pytorch impl of Aligned Xception 41, 65, 71\n\nThis is a correct, from scratch impl of Aligned Xception (Deeplab) models compatible with TF weights at\nhttps://github.com/tensorflow/models/blob/master/research/deeplab/g3doc/model_zoo.md\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom functools import partial\nfrom typing import List, Dict, Type, Optional\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD\nfrom timm.layers import ClassifierHead, ConvNormAct, DropPath, PadType, create_conv2d, get_norm_act_layer\nfrom timm.layers.helpers import to_3tuple\nfrom ._builder import build_model_with_cfg\nfrom ._manipulate import checkpoint_seq\nfrom ._registry import register_model, generate_default_cfgs\n\n__all__ = ['XceptionAligned']\n\n\nclass SeparableConv2d(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            padding: PadType = '',\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.kernel_size = kernel_size\n        self.dilation = dilation\n\n        # depthwise convolution\n        self.conv_dw = create_conv2d(\n            in_chs,\n            in_chs,\n            kernel_size,\n            stride=stride,\n            padding=padding,\n            dilation=dilation,\n            depthwise=True,\n            **dd,\n        )\n        self.bn_dw = norm_layer(in_chs, **dd)\n        self.act_dw = act_layer(inplace=True) if act_layer is not None else nn.Identity()\n\n        # pointwise convolution\n        self.conv_pw = create_conv2d(in_chs, out_chs, kernel_size=1, **dd)\n        self.bn_pw = norm_layer(out_chs, **dd)\n        self.act_pw = act_layer(inplace=True) if act_layer is not None else nn.Identity()\n\n    def forward(self, x):\n        x = self.conv_dw(x)\n        x = self.bn_dw(x)\n        x = self.act_dw(x)\n        x = self.conv_pw(x)\n        x = self.bn_pw(x)\n        x = self.act_pw(x)\n        return x\n\n\nclass PreSeparableConv2d(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            kernel_size: int = 3,\n            stride: int = 1,\n            dilation: int = 1,\n            padding: PadType = '',\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            first_act: bool = True,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        norm_act_layer = get_norm_act_layer(norm_layer, act_layer=act_layer)\n        self.kernel_size = kernel_size\n        self.dilation = dilation\n\n        self.norm = norm_act_layer(in_chs, inplace=True, **dd) if first_act else nn.Identity()\n        # depthwise convolution\n        self.conv_dw = create_conv2d(\n            in_chs,\n            in_chs,\n            kernel_size,\n            stride=stride,\n            padding=padding,\n            dilation=dilation,\n            depthwise=True,\n            **dd,\n        )\n\n        # pointwise convolution\n        self.conv_pw = create_conv2d(in_chs, out_chs, kernel_size=1, **dd)\n\n    def forward(self, x):\n        x = self.norm(x)\n        x = self.conv_dw(x)\n        x = self.conv_pw(x)\n        return x\n\n\nclass XceptionModule(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            pad_type: PadType = '',\n            start_with_relu: bool = True,\n            no_skip: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_chs = to_3tuple(out_chs)\n        self.in_channels = in_chs\n        self.out_channels = out_chs[-1]\n        self.no_skip = no_skip\n        if not no_skip and (self.out_channels != self.in_channels or stride != 1):\n            self.shortcut = ConvNormAct(\n                in_chs,\n                self.out_channels,\n                1,\n                stride=stride,\n                norm_layer=norm_layer,\n                apply_act=False,\n                **dd,\n            )\n        else:\n            self.shortcut = None\n\n        separable_act_layer = None if start_with_relu else act_layer\n        self.stack = nn.Sequential()\n        for i in range(3):\n            if start_with_relu:\n                self.stack.add_module(f'act{i + 1}', act_layer(inplace=i > 0))\n            self.stack.add_module(f'conv{i + 1}', SeparableConv2d(\n                in_chs,\n                out_chs[i],\n                3,\n                stride=stride if i == 2 else 1,\n                dilation=dilation,\n                padding=pad_type,\n                act_layer=separable_act_layer,\n                norm_layer=norm_layer,\n                **dd,\n            ))\n            in_chs = out_chs[i]\n\n        self.drop_path = drop_path\n\n    def forward(self, x):\n        skip = x\n        x = self.stack(x)\n        if self.shortcut is not None:\n            skip = self.shortcut(skip)\n        if not self.no_skip:\n            if self.drop_path is not None:\n                x = self.drop_path(x)\n            x = x + skip\n        return x\n\n\nclass PreXceptionModule(nn.Module):\n    def __init__(\n            self,\n            in_chs: int,\n            out_chs: int,\n            stride: int = 1,\n            dilation: int = 1,\n            pad_type: PadType = '',\n            no_skip: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            drop_path: Optional[nn.Module] = None,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        out_chs = to_3tuple(out_chs)\n        self.in_channels = in_chs\n        self.out_channels = out_chs[-1]\n        self.no_skip = no_skip\n        if not no_skip and (self.out_channels != self.in_channels or stride != 1):\n            self.shortcut = create_conv2d(in_chs, self.out_channels, 1, stride=stride, **dd)\n        else:\n            self.shortcut = nn.Identity()\n\n        self.norm = get_norm_act_layer(norm_layer, act_layer=act_layer)(in_chs, inplace=True, **dd)\n        self.stack = nn.Sequential()\n        for i in range(3):\n            self.stack.add_module(f'conv{i + 1}', PreSeparableConv2d(\n                in_chs,\n                out_chs[i],\n                3,\n                stride=stride if i == 2 else 1,\n                dilation=dilation,\n                padding=pad_type,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                first_act=i > 0,\n                **dd,\n            ))\n            in_chs = out_chs[i]\n\n        self.drop_path = drop_path\n\n    def forward(self, x):\n        x = self.norm(x)\n        skip = x\n        x = self.stack(x)\n        if not self.no_skip:\n            if self.drop_path is not None:\n                x = self.drop_path(x)\n            x = x + self.shortcut(skip)\n        return x\n\n\nclass XceptionAligned(nn.Module):\n    \"\"\"Modified Aligned Xception\n    \"\"\"\n\n    def __init__(\n            self,\n            block_cfg: List[Dict],\n            num_classes: int = 1000,\n            in_chans: int = 3,\n            output_stride: int = 32,\n            preact: bool = False,\n            act_layer: Type[nn.Module] = nn.ReLU,\n            norm_layer: Type[nn.Module] = nn.BatchNorm2d,\n            drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            global_pool: str = 'avg',\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert output_stride in (8, 16, 32)\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.drop_rate = drop_rate\n        self.grad_checkpointing = False\n\n        layer_args = dict(act_layer=act_layer, norm_layer=norm_layer, **dd)\n        self.stem = nn.Sequential(*[\n            ConvNormAct(in_chans, 32, kernel_size=3, stride=2, **layer_args),\n            create_conv2d(32, 64, kernel_size=3, stride=1, **dd) if preact else\n            ConvNormAct(32, 64, kernel_size=3, stride=1, **layer_args)\n        ])\n\n        curr_dilation = 1\n        curr_stride = 2\n        self.feature_info = []\n        self.blocks = nn.Sequential()\n        module_fn = PreXceptionModule if preact else XceptionModule\n        net_num_blocks = len(block_cfg)\n        net_block_idx = 0\n        for i, b in enumerate(block_cfg):\n            block_dpr = drop_path_rate * net_block_idx / (net_num_blocks - 1)  # stochastic depth linear decay rule\n            b['drop_path'] = DropPath(block_dpr) if block_dpr > 0. else None\n            b['dilation'] = curr_dilation\n            if b['stride'] > 1:\n                name = f'blocks.{i}.stack.conv2' if preact else f'blocks.{i}.stack.act3'\n                self.feature_info += [dict(num_chs=to_3tuple(b['out_chs'])[-2], reduction=curr_stride, module=name)]\n                next_stride = curr_stride * b['stride']\n                if next_stride > output_stride:\n                    curr_dilation *= b['stride']\n                    b['stride'] = 1\n                else:\n                    curr_stride = next_stride\n            self.blocks.add_module(str(i), module_fn(**b, **layer_args))\n            self.num_features = self.blocks[-1].out_channels\n            net_block_idx += 1\n\n        self.feature_info += [dict(\n            num_chs=self.num_features, reduction=curr_stride, module='blocks.' + str(len(self.blocks) - 1))]\n        self.act = act_layer(inplace=True) if preact else nn.Identity()\n        self.head_hidden_size = self.num_features\n        self.head = ClassifierHead(\n            in_features=self.num_features,\n            num_classes=num_classes,\n            pool_type=global_pool,\n            drop_rate=drop_rate,\n            **dd,\n        )\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^stem',\n            blocks=r'^blocks\\.(\\d+)',\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head.fc\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        self.head.reset(num_classes, pool_type=global_pool)\n\n    def forward_features(self, x):\n        x = self.stem(x)\n        if self.grad_checkpointing and not torch.jit.is_scripting():\n            x = checkpoint_seq(self.blocks, x)\n        else:\n            x = self.blocks(x)\n        x = self.act(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        return self.head(x, pre_logits=pre_logits) if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef _xception(variant, pretrained=False, **kwargs):\n    return build_model_with_cfg(\n        XceptionAligned,\n        variant,\n        pretrained,\n        feature_cfg=dict(flatten_sequential=True, feature_cls='hook'),\n        **kwargs,\n    )\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 299, 299), 'pool_size': (10, 10),\n        'crop_pct': 0.903, 'interpolation': 'bicubic',\n        'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD,\n        'first_conv': 'stem.0.conv', 'classifier': 'head.fc', 'license': 'apache-2.0',\n        **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    'xception65.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.94,\n    ),\n\n    'xception41.tf_in1k': _cfg(hf_hub_id='timm/'),\n    'xception65.tf_in1k': _cfg(hf_hub_id='timm/'),\n    'xception71.tf_in1k': _cfg(hf_hub_id='timm/'),\n\n    'xception41p.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.94,\n    ),\n    'xception65p.ra3_in1k': _cfg(\n        hf_hub_id='timm/',\n        crop_pct=0.94,\n    ),\n})\n\n\n@register_model\ndef xception41(pretrained=False, **kwargs) -> XceptionAligned:\n    \"\"\" Modified Aligned Xception-41\n    \"\"\"\n    block_cfg = [\n        # entry flow\n        dict(in_chs=64, out_chs=128, stride=2),\n        dict(in_chs=128, out_chs=256, stride=2),\n        dict(in_chs=256, out_chs=728, stride=2),\n        # middle flow\n        *([dict(in_chs=728, out_chs=728, stride=1)] * 8),\n        # exit flow\n        dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2),\n        dict(in_chs=1024, out_chs=(1536, 1536, 2048), stride=1, no_skip=True, start_with_relu=False),\n    ]\n    model_args = dict(block_cfg=block_cfg, norm_layer=partial(nn.BatchNorm2d, eps=.001, momentum=.1))\n    return _xception('xception41', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef xception65(pretrained=False, **kwargs) -> XceptionAligned:\n    \"\"\" Modified Aligned Xception-65\n    \"\"\"\n    block_cfg = [\n        # entry flow\n        dict(in_chs=64, out_chs=128, stride=2),\n        dict(in_chs=128, out_chs=256, stride=2),\n        dict(in_chs=256, out_chs=728, stride=2),\n        # middle flow\n        *([dict(in_chs=728, out_chs=728, stride=1)] * 16),\n        # exit flow\n        dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2),\n        dict(in_chs=1024, out_chs=(1536, 1536, 2048), stride=1, no_skip=True, start_with_relu=False),\n    ]\n    model_args = dict(block_cfg=block_cfg, norm_layer=partial(nn.BatchNorm2d, eps=.001, momentum=.1))\n    return _xception('xception65', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef xception71(pretrained=False, **kwargs) -> XceptionAligned:\n    \"\"\" Modified Aligned Xception-71\n    \"\"\"\n    block_cfg = [\n        # entry flow\n        dict(in_chs=64, out_chs=128, stride=2),\n        dict(in_chs=128, out_chs=256, stride=1),\n        dict(in_chs=256, out_chs=256, stride=2),\n        dict(in_chs=256, out_chs=728, stride=1),\n        dict(in_chs=728, out_chs=728, stride=2),\n        # middle flow\n        *([dict(in_chs=728, out_chs=728, stride=1)] * 16),\n        # exit flow\n        dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2),\n        dict(in_chs=1024, out_chs=(1536, 1536, 2048), stride=1, no_skip=True, start_with_relu=False),\n    ]\n    model_args = dict(block_cfg=block_cfg, norm_layer=partial(nn.BatchNorm2d, eps=.001, momentum=.1))\n    return _xception('xception71', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef xception41p(pretrained=False, **kwargs) -> XceptionAligned:\n    \"\"\" Modified Aligned Xception-41 w/ Pre-Act\n    \"\"\"\n    block_cfg = [\n        # entry flow\n        dict(in_chs=64, out_chs=128, stride=2),\n        dict(in_chs=128, out_chs=256, stride=2),\n        dict(in_chs=256, out_chs=728, stride=2),\n        # middle flow\n        *([dict(in_chs=728, out_chs=728, stride=1)] * 8),\n        # exit flow\n        dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2),\n        dict(in_chs=1024, out_chs=(1536, 1536, 2048), no_skip=True, stride=1),\n    ]\n    model_args = dict(block_cfg=block_cfg, preact=True, norm_layer=nn.BatchNorm2d)\n    return _xception('xception41p', pretrained=pretrained, **dict(model_args, **kwargs))\n\n\n@register_model\ndef xception65p(pretrained=False, **kwargs) -> XceptionAligned:\n    \"\"\" Modified Aligned Xception-65 w/ Pre-Act\n    \"\"\"\n    block_cfg = [\n        # entry flow\n        dict(in_chs=64, out_chs=128, stride=2),\n        dict(in_chs=128, out_chs=256, stride=2),\n        dict(in_chs=256, out_chs=728, stride=2),\n        # middle flow\n        *([dict(in_chs=728, out_chs=728, stride=1)] * 16),\n        # exit flow\n        dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2),\n        dict(in_chs=1024, out_chs=(1536, 1536, 2048), stride=1, no_skip=True),\n    ]\n    model_args = dict(\n        block_cfg=block_cfg, preact=True, norm_layer=partial(nn.BatchNorm2d, eps=.001, momentum=.1))\n    return _xception('xception65p', pretrained=pretrained, **dict(model_args, **kwargs))\n"
  },
  {
    "path": "timm/models/xcit.py",
    "content": "\"\"\" Cross-Covariance Image Transformer (XCiT) in PyTorch\n\nPaper:\n    - https://arxiv.org/abs/2106.09681\n\nSame as the official implementation, with some minor adaptations, original copyright below\n    - https://github.com/facebookresearch/xcit/blob/master/xcit.py\n\nModifications and additions for timm hacked together by / Copyright 2021, Ross Wightman\n\"\"\"\n# Copyright (c) 2015-present, Facebook, Inc.\n# All rights reserved.\n\nimport math\nfrom functools import partial\nfrom typing import List, Optional, Tuple, Union, Type, Any\n\nimport torch\nimport torch.nn as nn\n\nfrom timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD\nfrom timm.layers import DropPath, trunc_normal_, to_2tuple, use_fused_attn, Mlp\nfrom ._builder import build_model_with_cfg\nfrom ._features import feature_take_indices\nfrom ._features_fx import register_notrace_module\nfrom ._manipulate import checkpoint\nfrom ._registry import register_model, generate_default_cfgs, register_model_deprecations\nfrom .cait import ClassAttn\n\n__all__ = ['Xcit']  # model_registry will add each entrypoint fn to this\n\n\n@register_notrace_module  # reason: FX can't symbolically trace torch.arange in forward method\nclass PositionalEncodingFourier(nn.Module):\n    \"\"\"\n    Positional encoding relying on a fourier kernel matching the one used in the \"Attention is all you Need\" paper.\n    Based on the official XCiT code\n        - https://github.com/facebookresearch/xcit/blob/master/xcit.py\n    \"\"\"\n\n    def __init__(\n            self,\n            hidden_dim: int = 32,\n            dim: int = 768,\n            temperature: float = 10000,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.token_projection = nn.Conv2d(hidden_dim * 2, dim, kernel_size=1, **dd)\n        self.scale = 2 * math.pi\n        self.temperature = temperature\n        self.hidden_dim = hidden_dim\n        self.dim = dim\n        self.eps = 1e-6\n\n    def forward(self, B: int, H: int, W: int):\n        device = self.token_projection.weight.device\n        dtype = self.token_projection.weight.dtype\n        y_embed = torch.arange(1, H + 1, device=device).to(torch.float32).unsqueeze(1).repeat(1, 1, W)\n        x_embed = torch.arange(1, W + 1, device=device).to(torch.float32).repeat(1, H, 1)\n        y_embed = y_embed / (y_embed[:, -1:, :] + self.eps) * self.scale\n        x_embed = x_embed / (x_embed[:, :, -1:] + self.eps) * self.scale\n        dim_t = torch.arange(self.hidden_dim, device=device).to(torch.float32)\n        dim_t = self.temperature ** (2 * torch.div(dim_t, 2, rounding_mode='floor') / self.hidden_dim)\n        pos_x = x_embed[:, :, :, None] / dim_t\n        pos_y = y_embed[:, :, :, None] / dim_t\n        pos_x = torch.stack([pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()], dim=4).flatten(3)\n        pos_y = torch.stack([pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()], dim=4).flatten(3)\n        pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)\n        pos = self.token_projection(pos.to(dtype))\n        return pos.repeat(B, 1, 1, 1)  # (B, C, H, W)\n\n\ndef conv3x3(in_planes, out_planes, stride=1, device=None, dtype=None):\n    \"\"\"3x3 convolution + batch norm\"\"\"\n    dd = {'device': device, 'dtype': dtype}\n    return torch.nn.Sequential(\n        nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, **dd),\n        nn.BatchNorm2d(out_planes, **dd)\n    )\n\n\nclass ConvPatchEmbed(nn.Module):\n    \"\"\"Image to Patch Embedding using multiple convolutional layers\"\"\"\n\n    def __init__(\n            self,\n            img_size: int = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            embed_dim: int = 768,\n            act_layer: Type[nn.Module] = nn.GELU,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        img_size = to_2tuple(img_size)\n        num_patches = (img_size[1] // patch_size) * (img_size[0] // patch_size)\n        self.img_size = img_size\n        self.patch_size = patch_size\n        self.num_patches = num_patches\n\n        if patch_size == 16:\n            self.proj = torch.nn.Sequential(\n                conv3x3(in_chans, embed_dim // 8, 2, **dd),\n                act_layer(),\n                conv3x3(embed_dim // 8, embed_dim // 4, 2, **dd),\n                act_layer(),\n                conv3x3(embed_dim // 4, embed_dim // 2, 2, **dd),\n                act_layer(),\n                conv3x3(embed_dim // 2, embed_dim, 2, **dd),\n            )\n        elif patch_size == 8:\n            self.proj = torch.nn.Sequential(\n                conv3x3(in_chans, embed_dim // 4, 2, **dd),\n                act_layer(),\n                conv3x3(embed_dim // 4, embed_dim // 2, 2, **dd),\n                act_layer(),\n                conv3x3(embed_dim // 2, embed_dim, 2, **dd),\n            )\n        else:\n            raise('For convolutional projection, patch size has to be in [8, 16]')\n\n    def forward(self, x):\n        x = self.proj(x)\n        Hp, Wp = x.shape[2], x.shape[3]\n        x = x.flatten(2).transpose(1, 2)  # (B, N, C)\n        return x, (Hp, Wp)\n\n\nclass LPI(nn.Module):\n    \"\"\"\n    Local Patch Interaction module that allows explicit communication between tokens in 3x3 windows to augment the\n    implicit communication performed by the block diagonal scatter attention. Implemented using 2 layers of separable\n    3x3 convolutions with GeLU and BatchNorm2d\n    \"\"\"\n\n    def __init__(\n            self,\n            in_features: int,\n            out_features: Optional[int] = None,\n            act_layer: Type[nn.Module] = nn.GELU,\n            kernel_size: int = 3,\n            device=None,\n            dtype=None,\n    ):\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        out_features = out_features or in_features\n        padding = kernel_size // 2\n\n        self.conv1 = torch.nn.Conv2d(\n            in_features, in_features, kernel_size=kernel_size, padding=padding, groups=in_features, **dd)\n        self.act = act_layer()\n        self.bn = nn.BatchNorm2d(in_features, **dd)\n        self.conv2 = torch.nn.Conv2d(\n            in_features, out_features, kernel_size=kernel_size, padding=padding, groups=out_features, **dd)\n\n    def forward(self, x, H: int, W: int):\n        B, N, C = x.shape\n        x = x.permute(0, 2, 1).reshape(B, C, H, W)\n        x = self.conv1(x)\n        x = self.act(x)\n        x = self.bn(x)\n        x = self.conv2(x)\n        x = x.reshape(B, C, N).permute(0, 2, 1)\n        return x\n\n\nclass ClassAttentionBlock(nn.Module):\n    \"\"\"Class Attention Layer as in CaiT https://arxiv.org/abs/2103.17239\"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            eta: Optional[float] = 1.,\n            tokens_norm: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = ClassAttn(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        if eta is not None:  # LayerScale Initialization (no layerscale when None)\n            self.gamma1 = nn.Parameter(eta * torch.ones(dim, **dd))\n            self.gamma2 = nn.Parameter(eta * torch.ones(dim, **dd))\n        else:\n            self.gamma1, self.gamma2 = 1.0, 1.0\n\n        # See https://github.com/rwightman/pytorch-image-models/pull/747#issuecomment-877795721\n        self.tokens_norm = tokens_norm\n\n    def forward(self, x):\n        x_norm1 = self.norm1(x)\n        x_attn = torch.cat([self.attn(x_norm1), x_norm1[:, 1:]], dim=1)\n        x = x + self.drop_path1(self.gamma1 * x_attn)\n\n        if self.tokens_norm:\n            x = self.norm2(x)\n        else:\n            x = torch.cat([self.norm2(x[:, 0:1]), x[:, 1:]], dim=1)\n        x_res = x\n        cls_token = x[:, 0:1]\n        cls_token = self.gamma2 * self.mlp(cls_token)\n        x = torch.cat([cls_token, x[:, 1:]], dim=1)\n        x = x_res + self.drop_path2(x)\n        return x\n\n\nclass XCA(nn.Module):\n    fused_attn: torch.jit.Final[bool]\n    \"\"\" Cross-Covariance Attention (XCA)\n    Operation where the channels are updated using a weighted sum. The weights are obtained from the (softmax\n    normalized) Cross-covariance matrix (Q^T \\\\cdot K \\\\in d_h \\\\times d_h)\n    \"\"\"\n\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int = 8,\n            qkv_bias: bool = False,\n            attn_drop: float = 0.,\n            proj_drop: float = 0.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.num_heads = num_heads\n        self.fused_attn = use_fused_attn(experimental=True)\n        self.temperature = nn.Parameter(torch.ones(num_heads, 1, 1, **dd))\n        self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias, **dd)\n        self.attn_drop = nn.Dropout(attn_drop)\n        self.proj = nn.Linear(dim, dim, **dd)\n        self.proj_drop = nn.Dropout(proj_drop)\n\n    def forward(self, x):\n        B, N, C = x.shape\n        # Result of next line is (qkv, B, num (H)eads,  (C')hannels per head, N)\n        qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 4, 1)\n        q, k, v = qkv.unbind(0)  # make torchscript happy (cannot use tensor as tuple)\n\n        if self.fused_attn:\n            q = torch.nn.functional.normalize(q, dim=-1) * self.temperature\n            k = torch.nn.functional.normalize(k, dim=-1)\n            x = torch.nn.functional.scaled_dot_product_attention(q, k, v, scale=1.0)\n        else:\n            # Paper section 3.2 l2-Normalization and temperature scaling\n            q = torch.nn.functional.normalize(q, dim=-1)\n            k = torch.nn.functional.normalize(k, dim=-1)\n            attn = (q @ k.transpose(-2, -1)) * self.temperature\n            attn = attn.softmax(dim=-1)\n            attn = self.attn_drop(attn)\n            x = attn @ v\n\n        x = x.permute(0, 3, 1, 2).reshape(B, N, C)\n        x = self.proj(x)\n        x = self.proj_drop(x)\n        return x\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'temperature'}\n\n\nclass XCABlock(nn.Module):\n    def __init__(\n            self,\n            dim: int,\n            num_heads: int,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = False,\n            proj_drop: float = 0.,\n            attn_drop: float = 0.,\n            drop_path: float = 0.,\n            act_layer: Type[nn.Module] = nn.GELU,\n            norm_layer: Type[nn.Module] = nn.LayerNorm,\n            eta: float = 1.,\n            device=None,\n            dtype=None,\n    ):\n        dd = {'device': device, 'dtype': dtype}\n        super().__init__()\n        self.norm1 = norm_layer(dim, **dd)\n        self.attn = XCA(\n            dim,\n            num_heads=num_heads,\n            qkv_bias=qkv_bias,\n            attn_drop=attn_drop,\n            proj_drop=proj_drop,\n            **dd,\n        )\n        self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm3 = norm_layer(dim, **dd)\n        self.local_mp = LPI(in_features=dim, act_layer=act_layer, **dd)\n        self.drop_path3 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.norm2 = norm_layer(dim, **dd)\n        self.mlp = Mlp(\n            in_features=dim,\n            hidden_features=int(dim * mlp_ratio),\n            act_layer=act_layer,\n            drop=proj_drop,\n            **dd,\n        )\n        self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n\n        self.gamma1 = nn.Parameter(eta * torch.ones(dim, **dd))\n        self.gamma3 = nn.Parameter(eta * torch.ones(dim, **dd))\n        self.gamma2 = nn.Parameter(eta * torch.ones(dim, **dd))\n\n    def forward(self, x, H: int, W: int):\n        x = x + self.drop_path1(self.gamma1 * self.attn(self.norm1(x)))\n        # NOTE official code has 3 then 2, so keeping it the same to be consistent with loaded weights\n        # See https://github.com/rwightman/pytorch-image-models/pull/747#issuecomment-877795721\n        x = x + self.drop_path3(self.gamma3 * self.local_mp(self.norm3(x), H, W))\n        x = x + self.drop_path2(self.gamma2 * self.mlp(self.norm2(x)))\n        return x\n\n\nclass Xcit(nn.Module):\n    \"\"\"\n    Based on timm and DeiT code bases\n    https://github.com/rwightman/pytorch-image-models/tree/master/timm\n    https://github.com/facebookresearch/deit/\n    \"\"\"\n\n    def __init__(\n            self,\n            img_size: Union[int, Tuple[int, int]] = 224,\n            patch_size: int = 16,\n            in_chans: int = 3,\n            num_classes: int = 1000,\n            global_pool: str = 'token',\n            embed_dim: int = 768,\n            depth: int = 12,\n            num_heads: int = 12,\n            mlp_ratio: float = 4.,\n            qkv_bias: bool = True,\n            drop_rate: float = 0.,\n            pos_drop_rate: float = 0.,\n            proj_drop_rate: float = 0.,\n            attn_drop_rate: float = 0.,\n            drop_path_rate: float = 0.,\n            act_layer: Optional[Type[nn.Module]] = None,\n            norm_layer: Optional[Type[nn.Module]] = None,\n            cls_attn_layers: int = 2,\n            use_pos_embed: bool = True,\n            eta: float = 1.,\n            tokens_norm: bool = False,\n            device=None,\n            dtype=None,\n    ):\n        \"\"\"\n        Args:\n            img_size (int, tuple): input image size\n            patch_size (int): patch size\n            in_chans (int): number of input channels\n            num_classes (int): number of classes for classification head\n            embed_dim (int): embedding dimension\n            depth (int): depth of transformer\n            num_heads (int): number of attention heads\n            mlp_ratio (int): ratio of mlp hidden dim to embedding dim\n            qkv_bias (bool): enable bias for qkv if True\n            drop_rate (float): dropout rate after positional embedding, and in XCA/CA projection + MLP\n            pos_drop_rate: position embedding dropout rate\n            proj_drop_rate (float): projection dropout rate\n            attn_drop_rate (float): attention dropout rate\n            drop_path_rate (float): stochastic depth rate (constant across all layers)\n            norm_layer: (nn.Module): normalization layer\n            cls_attn_layers: (int) Depth of Class attention layers\n            use_pos_embed: (bool) whether to use positional encoding\n            eta: (float) layerscale initialization value\n            tokens_norm: (bool) Whether to normalize all tokens or just the cls_token in the CA\n\n        Notes:\n            - Although `layer_norm` is user specifiable, there are hard-coded `BatchNorm2d`s in the local patch\n              interaction (class LPI) and the patch embedding (class ConvPatchEmbed)\n        \"\"\"\n        super().__init__()\n        dd = {'device': device, 'dtype': dtype}\n        assert global_pool in ('', 'avg', 'token')\n        img_size = to_2tuple(img_size)\n        assert (img_size[0] % patch_size == 0) and (img_size[0] % patch_size == 0), \\\n            '`patch_size` should divide image dimensions evenly'\n        norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6)\n        act_layer = act_layer or nn.GELU\n\n        self.num_classes = num_classes\n        self.in_chans = in_chans\n        self.num_features = self.head_hidden_size = self.embed_dim = embed_dim\n        self.global_pool = global_pool\n        self.grad_checkpointing = False\n\n        self.patch_embed = ConvPatchEmbed(\n            img_size=img_size,\n            patch_size=patch_size,\n            in_chans=in_chans,\n            embed_dim=embed_dim,\n            act_layer=act_layer,\n            **dd,\n        )\n        r = patch_size\n\n        self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim, **dd))\n        if use_pos_embed:\n            self.pos_embed = PositionalEncodingFourier(dim=embed_dim, **dd)\n        else:\n            self.pos_embed = None\n        self.pos_drop = nn.Dropout(p=pos_drop_rate)\n\n        self.blocks = nn.ModuleList([\n            XCABlock(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=proj_drop_rate,\n                attn_drop=attn_drop_rate,\n                drop_path=drop_path_rate,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                eta=eta,\n                **dd,\n            )\n            for _ in range(depth)])\n        self.feature_info = [dict(num_chs=embed_dim, reduction=r, module=f'blocks.{i}') for i in range(depth)]\n\n        self.cls_attn_blocks = nn.ModuleList([\n            ClassAttentionBlock(\n                dim=embed_dim,\n                num_heads=num_heads,\n                mlp_ratio=mlp_ratio,\n                qkv_bias=qkv_bias,\n                proj_drop=drop_rate,\n                attn_drop=attn_drop_rate,\n                act_layer=act_layer,\n                norm_layer=norm_layer,\n                eta=eta,\n                tokens_norm=tokens_norm,\n                **dd,\n            )\n            for _ in range(cls_attn_layers)])\n\n        # Classifier head\n        self.norm = norm_layer(embed_dim, **dd)\n        self.head_drop = nn.Dropout(drop_rate)\n        self.head = nn.Linear(self.num_features, num_classes, **dd) if num_classes > 0 else nn.Identity()\n\n        # Init weights\n        trunc_normal_(self.cls_token, std=.02)\n        self.apply(self._init_weights)\n\n    def _init_weights(self, m):\n        if isinstance(m, nn.Linear):\n            trunc_normal_(m.weight, std=.02)\n            if isinstance(m, nn.Linear) and m.bias is not None:\n                nn.init.constant_(m.bias, 0)\n\n    @torch.jit.ignore\n    def no_weight_decay(self):\n        return {'pos_embed', 'cls_token'}\n\n    @torch.jit.ignore\n    def group_matcher(self, coarse=False):\n        return dict(\n            stem=r'^cls_token|pos_embed|patch_embed',  # stem and embed\n            blocks=r'^blocks\\.(\\d+)',\n            cls_attn_blocks=[(r'^cls_attn_blocks\\.(\\d+)', None), (r'^norm', (99999,))]\n        )\n\n    @torch.jit.ignore\n    def set_grad_checkpointing(self, enable=True):\n        self.grad_checkpointing = enable\n\n    @torch.jit.ignore\n    def get_classifier(self) -> nn.Module:\n        return self.head\n\n    def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):\n        self.num_classes = num_classes\n        if global_pool is not None:\n            assert global_pool in ('', 'avg', 'token')\n            self.global_pool = global_pool\n        device = self.head.weight.device if hasattr(self.head, 'weight') else None\n        dtype = self.head.weight.dtype if hasattr(self.head, 'weight') else None\n        self.head = nn.Linear(self.num_features, num_classes, device=device, dtype=dtype) if num_classes > 0 else nn.Identity()\n\n    def forward_intermediates(\n            self,\n            x: torch.Tensor,\n            indices: Optional[Union[int, List[int]]] = None,\n            norm: bool = False,\n            stop_early: bool = False,\n            output_fmt: str = 'NCHW',\n            intermediates_only: bool = False,\n    ) -> Union[List[torch.Tensor], Tuple[torch.Tensor, List[torch.Tensor]]]:\n        \"\"\" Forward features that returns intermediates.\n\n        Args:\n            x: Input image tensor\n            indices: Take last n blocks if int, all if None, select matching indices if sequence\n            norm: Apply norm layer to all intermediates\n            stop_early: Stop iterating over blocks when last desired intermediate hit\n            output_fmt: Shape of intermediate feature outputs\n            intermediates_only: Only return intermediate features\n        Returns:\n\n        \"\"\"\n        assert output_fmt in ('NCHW', 'NLC'), 'Output format must be one of NCHW or NLC.'\n        reshape = output_fmt == 'NCHW'\n        intermediates = []\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n\n        # forward pass\n        B, _, height, width = x.shape\n        x, (Hp, Wp) = self.patch_embed(x)\n        if self.pos_embed is not None:\n            # `pos_embed` (B, C, Hp, Wp), reshape -> (B, C, N), permute -> (B, N, C)\n            pos_encoding = self.pos_embed(B, Hp, Wp).reshape(B, -1, x.shape[1]).permute(0, 2, 1)\n            x = x + pos_encoding\n        x = self.pos_drop(x)\n\n        if torch.jit.is_scripting() or not stop_early:  # can't slice blocks in torchscript\n            blocks = self.blocks\n        else:\n            blocks = self.blocks[:max_index + 1]\n        for i, blk in enumerate(blocks):\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, Hp, Wp)\n            else:\n                x = blk(x, Hp, Wp)\n            if i in take_indices:\n                # normalize intermediates with final norm layer if enabled\n                intermediates.append(self.norm(x) if norm else x)\n\n        # process intermediates\n        if reshape:\n            # reshape to BCHW output format\n            intermediates = [y.reshape(B, Hp, Wp, -1).permute(0, 3, 1, 2).contiguous() for y in intermediates]\n\n        if intermediates_only:\n            return intermediates\n\n        # NOTE not supporting return of class tokens\n        x = torch.cat((self.cls_token.expand(B, -1, -1), x), dim=1)\n        for blk in self.cls_attn_blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n\n        x = self.norm(x)\n\n        return x, intermediates\n\n    def prune_intermediate_layers(\n            self,\n            indices: Union[int, List[int]] = 1,\n            prune_norm: bool = False,\n            prune_head: bool = True,\n    ):\n        \"\"\" Prune layers not required for specified intermediates.\n        \"\"\"\n        take_indices, max_index = feature_take_indices(len(self.blocks), indices)\n        self.blocks = self.blocks[:max_index + 1]  # truncate blocks\n        if prune_norm:\n            self.norm = nn.Identity()\n        if prune_head:\n            self.cls_attn_blocks = nn.ModuleList()  # prune token blocks with head\n            self.reset_classifier(0, '')\n        return take_indices\n\n    def forward_features(self, x):\n        B = x.shape[0]\n        # x is (B, N, C). (Hp, Hw) is (height in units of patches, width in units of patches)\n        x, (Hp, Wp) = self.patch_embed(x)\n\n        if self.pos_embed is not None:\n            # `pos_embed` (B, C, Hp, Wp), reshape -> (B, C, N), permute -> (B, N, C)\n            pos_encoding = self.pos_embed(B, Hp, Wp).reshape(B, -1, x.shape[1]).permute(0, 2, 1)\n            x = x + pos_encoding\n        x = self.pos_drop(x)\n\n        for blk in self.blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x, Hp, Wp)\n            else:\n                x = blk(x, Hp, Wp)\n\n        x = torch.cat((self.cls_token.expand(B, -1, -1), x), dim=1)\n\n        for blk in self.cls_attn_blocks:\n            if self.grad_checkpointing and not torch.jit.is_scripting():\n                x = checkpoint(blk, x)\n            else:\n                x = blk(x)\n\n        x = self.norm(x)\n        return x\n\n    def forward_head(self, x, pre_logits: bool = False):\n        if self.global_pool:\n            x = x[:, 1:].mean(dim=1) if self.global_pool == 'avg' else x[:, 0]\n        x = self.head_drop(x)\n        return x if pre_logits else self.head(x)\n\n    def forward(self, x):\n        x = self.forward_features(x)\n        x = self.forward_head(x)\n        return x\n\n\ndef checkpoint_filter_fn(state_dict, model):\n    if 'model' in state_dict:\n        state_dict = state_dict['model']\n    # For consistency with timm's transformer models while being compatible with official weights source we rename\n    # pos_embeder to pos_embed. Also account for use_pos_embed == False\n    use_pos_embed = getattr(model, 'pos_embed', None) is not None\n    pos_embed_keys = [k for k in state_dict if k.startswith('pos_embed')]\n    for k in pos_embed_keys:\n        if use_pos_embed:\n            state_dict[k.replace('pos_embeder.', 'pos_embed.')] = state_dict.pop(k)\n        else:\n            del state_dict[k]\n    # timm's implementation of class attention in CaiT is slightly more efficient as it does not compute query vectors\n    # for all tokens, just the class token. To use official weights source we must split qkv into q, k, v\n    if 'cls_attn_blocks.0.attn.qkv.weight' in state_dict and 'cls_attn_blocks.0.attn.q.weight' in model.state_dict():\n        num_ca_blocks = len(model.cls_attn_blocks)\n        for i in range(num_ca_blocks):\n            qkv_weight = state_dict.pop(f'cls_attn_blocks.{i}.attn.qkv.weight')\n            qkv_weight = qkv_weight.reshape(3, -1, qkv_weight.shape[-1])\n            for j, subscript in enumerate('qkv'):\n                state_dict[f'cls_attn_blocks.{i}.attn.{subscript}.weight'] = qkv_weight[j]\n            qkv_bias = state_dict.pop(f'cls_attn_blocks.{i}.attn.qkv.bias', None)\n            if qkv_bias is not None:\n                qkv_bias = qkv_bias.reshape(3, -1)\n                for j, subscript in enumerate('qkv'):\n                    state_dict[f'cls_attn_blocks.{i}.attn.{subscript}.bias'] = qkv_bias[j]\n    return state_dict\n\n\ndef _create_xcit(variant, pretrained=False, default_cfg=None, **kwargs):\n    out_indices = kwargs.pop('out_indices', 3)\n    model = build_model_with_cfg(\n        Xcit,\n        variant,\n        pretrained,\n        pretrained_filter_fn=checkpoint_filter_fn,\n        feature_cfg=dict(out_indices=out_indices, feature_cls='getter'),\n        **kwargs,\n    )\n    return model\n\n\ndef _cfg(url='', **kwargs):\n    return {\n        'url': url,\n        'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,\n        'crop_pct': 1.0, 'interpolation': 'bicubic', 'fixed_input_size': True,\n        'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,\n        'first_conv': 'patch_embed.proj.0.0', 'classifier': 'head',\n        'license': 'apache-2.0', **kwargs\n    }\n\n\ndefault_cfgs = generate_default_cfgs({\n    # Patch size 16\n    'xcit_nano_12_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p16_224.pth'),\n    'xcit_nano_12_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p16_224_dist.pth'),\n    'xcit_nano_12_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_tiny_12_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p16_224.pth'),\n    'xcit_tiny_12_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p16_224_dist.pth'),\n    'xcit_tiny_12_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_tiny_24_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p16_224.pth'),\n    'xcit_tiny_24_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p16_224_dist.pth'),\n    'xcit_tiny_24_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_small_12_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p16_224.pth'),\n    'xcit_small_12_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p16_224_dist.pth'),\n    'xcit_small_12_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_small_24_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p16_224.pth'),\n    'xcit_small_24_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p16_224_dist.pth'),\n    'xcit_small_24_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_medium_24_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p16_224.pth'),\n    'xcit_medium_24_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p16_224_dist.pth'),\n    'xcit_medium_24_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p16_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_large_24_p16_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p16_224.pth'),\n    'xcit_large_24_p16_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p16_224_dist.pth'),\n    'xcit_large_24_p16_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p16_384_dist.pth', input_size=(3, 384, 384)),\n\n    # Patch size 8\n    'xcit_nano_12_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p8_224.pth'),\n    'xcit_nano_12_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p8_224_dist.pth'),\n    'xcit_nano_12_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_nano_12_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_tiny_12_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p8_224.pth'),\n    'xcit_tiny_12_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p8_224_dist.pth'),\n    'xcit_tiny_12_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_12_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_tiny_24_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p8_224.pth'),\n    'xcit_tiny_24_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p8_224_dist.pth'),\n    'xcit_tiny_24_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_tiny_24_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_small_12_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p8_224.pth'),\n    'xcit_small_12_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p8_224_dist.pth'),\n    'xcit_small_12_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_12_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_small_24_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p8_224.pth'),\n    'xcit_small_24_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p8_224_dist.pth'),\n    'xcit_small_24_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_small_24_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_medium_24_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p8_224.pth'),\n    'xcit_medium_24_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p8_224_dist.pth'),\n    'xcit_medium_24_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_medium_24_p8_384_dist.pth', input_size=(3, 384, 384)),\n    'xcit_large_24_p8_224.fb_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p8_224.pth'),\n    'xcit_large_24_p8_224.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p8_224_dist.pth'),\n    'xcit_large_24_p8_384.fb_dist_in1k': _cfg(\n        hf_hub_id='timm/',\n        url='https://dl.fbaipublicfiles.com/xcit/xcit_large_24_p8_384_dist.pth', input_size=(3, 384, 384)),\n})\n\n\n@register_model\ndef xcit_nano_12_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=128, depth=12, num_heads=4, eta=1.0, tokens_norm=False)\n    model = _create_xcit('xcit_nano_12_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_nano_12_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=128, depth=12, num_heads=4, eta=1.0, tokens_norm=False, img_size=384)\n    model = _create_xcit('xcit_nano_12_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_12_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=192, depth=12, num_heads=4, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_12_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_12_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=192, depth=12, num_heads=4, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_12_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_12_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_small_12_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_12_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_small_12_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_24_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_24_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_24_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_24_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_24_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_small_24_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_24_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_small_24_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_medium_24_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_medium_24_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_medium_24_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_medium_24_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_large_24_p16_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_large_24_p16_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_large_24_p16_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=16, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_large_24_p16_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n# Patch size 8x8 models\n@register_model\ndef xcit_nano_12_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=128, depth=12, num_heads=4, eta=1.0, tokens_norm=False)\n    model = _create_xcit('xcit_nano_12_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_nano_12_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=128, depth=12, num_heads=4, eta=1.0, tokens_norm=False)\n    model = _create_xcit('xcit_nano_12_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_12_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=192, depth=12, num_heads=4, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_12_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_12_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=192, depth=12, num_heads=4, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_12_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_12_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_small_12_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_12_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True)\n    model = _create_xcit('xcit_small_12_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_24_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_24_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_tiny_24_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_tiny_24_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_24_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_small_24_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_small_24_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_small_24_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_medium_24_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_medium_24_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_medium_24_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_medium_24_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_large_24_p8_224(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_large_24_p8_224', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\n@register_model\ndef xcit_large_24_p8_384(pretrained=False, **kwargs) -> Xcit:\n    model_args = dict(\n        patch_size=8, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True)\n    model = _create_xcit('xcit_large_24_p8_384', pretrained=pretrained, **dict(model_args, **kwargs))\n    return model\n\n\nregister_model_deprecations(__name__, {\n    # Patch size 16\n    'xcit_nano_12_p16_224_dist': 'xcit_nano_12_p16_224.fb_dist_in1k',\n    'xcit_nano_12_p16_384_dist': 'xcit_nano_12_p16_384.fb_dist_in1k',\n    'xcit_tiny_12_p16_224_dist': 'xcit_tiny_12_p16_224.fb_dist_in1k',\n    'xcit_tiny_12_p16_384_dist': 'xcit_tiny_12_p16_384.fb_dist_in1k',\n    'xcit_tiny_24_p16_224_dist': 'xcit_tiny_24_p16_224.fb_dist_in1k',\n    'xcit_tiny_24_p16_384_dist': 'xcit_tiny_24_p16_384.fb_dist_in1k',\n    'xcit_small_12_p16_224_dist': 'xcit_small_12_p16_224.fb_dist_in1k',\n    'xcit_small_12_p16_384_dist': 'xcit_small_12_p16_384.fb_dist_in1k',\n    'xcit_small_24_p16_224_dist': 'xcit_small_24_p16_224.fb_dist_in1k',\n    'xcit_small_24_p16_384_dist': 'xcit_small_24_p16_384.fb_dist_in1k',\n    'xcit_medium_24_p16_224_dist': 'xcit_medium_24_p16_224.fb_dist_in1k',\n    'xcit_medium_24_p16_384_dist': 'xcit_medium_24_p16_384.fb_dist_in1k',\n    'xcit_large_24_p16_224_dist': 'xcit_large_24_p16_224.fb_dist_in1k',\n    'xcit_large_24_p16_384_dist': 'xcit_large_24_p16_384.fb_dist_in1k',\n\n    # Patch size 8\n    'xcit_nano_12_p8_224_dist': 'xcit_nano_12_p8_224.fb_dist_in1k',\n    'xcit_nano_12_p8_384_dist': 'xcit_nano_12_p8_384.fb_dist_in1k',\n    'xcit_tiny_12_p8_224_dist': 'xcit_tiny_12_p8_224.fb_dist_in1k',\n    'xcit_tiny_12_p8_384_dist': 'xcit_tiny_12_p8_384.fb_dist_in1k',\n    'xcit_tiny_24_p8_224_dist': 'xcit_tiny_24_p8_224.fb_dist_in1k',\n    'xcit_tiny_24_p8_384_dist': 'xcit_tiny_24_p8_384.fb_dist_in1k',\n    'xcit_small_12_p8_224_dist': 'xcit_small_12_p8_224.fb_dist_in1k',\n    'xcit_small_12_p8_384_dist': 'xcit_small_12_p8_384.fb_dist_in1k',\n    'xcit_small_24_p8_224_dist': 'xcit_small_24_p8_224.fb_dist_in1k',\n    'xcit_small_24_p8_384_dist': 'xcit_small_24_p8_384.fb_dist_in1k',\n    'xcit_medium_24_p8_224_dist': 'xcit_medium_24_p8_224.fb_dist_in1k',\n    'xcit_medium_24_p8_384_dist': 'xcit_medium_24_p8_384.fb_dist_in1k',\n    'xcit_large_24_p8_224_dist': 'xcit_large_24_p8_224.fb_dist_in1k',\n    'xcit_large_24_p8_384_dist': 'xcit_large_24_p8_384.fb_dist_in1k',\n})\n"
  },
  {
    "path": "timm/optim/__init__.py",
    "content": "from .adabelief import AdaBelief\nfrom .adafactor import Adafactor\nfrom .adafactor_bv import AdafactorBigVision\nfrom .adahessian import Adahessian\nfrom .adamp import AdamP\nfrom .adamw import AdamWLegacy\nfrom .adan import Adan\nfrom .adopt import Adopt\nfrom .lamb import Lamb\nfrom .laprop import LaProp\nfrom .lars import Lars\nfrom .lion import Lion\nfrom .lookahead import Lookahead\nfrom .madgrad import MADGRAD\nfrom .mars import Mars\nfrom .muon import Muon\nfrom .nadam import NAdamLegacy\nfrom .nadamw import NAdamW\nfrom .nvnovograd import NvNovoGrad\nfrom .radam import RAdamLegacy\nfrom .rmsprop_tf import RMSpropTF\nfrom .sgdp import SGDP\nfrom .sgdw import SGDW\n\n# bring common torch.optim Optimizers into timm.optim namespace for consistency\nfrom torch.optim import Adadelta, Adagrad, Adamax, Adam, AdamW, RMSprop, SGD\ntry:\n    # in case any very old torch versions being used\n    from torch.optim import NAdam, RAdam\nexcept ImportError:\n    pass\n\nfrom ._optim_factory import list_optimizers, get_optimizer_class, get_optimizer_info, OptimInfo, OptimizerRegistry, \\\n    create_optimizer_v2, create_optimizer, optimizer_kwargs\nfrom ._param_groups import param_groups_layer_decay, param_groups_weight_decay, auto_group_layers\n"
  },
  {
    "path": "timm/optim/_optim_factory.py",
    "content": "\"\"\" Optimizer Factory w/ custom Weight Decay & Layer Decay support\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport logging\nfrom dataclasses import dataclass\nfrom functools import partial\nfrom typing import Any, Callable, Collection, Dict, List, Optional, Set, Tuple, Type, Union\nfrom fnmatch import fnmatch\nimport importlib\n\nimport torch\nimport torch.nn as nn\nimport torch.optim\n\nfrom ._param_groups import param_groups_layer_decay, param_groups_weight_decay\nfrom ._types import ParamsT, OptimType, OptimizerCallable\nfrom .adabelief import AdaBelief\nfrom .adafactor import Adafactor\nfrom .adafactor_bv import AdafactorBigVision\nfrom .adahessian import Adahessian\nfrom .adamp import AdamP\nfrom .adamw import AdamWLegacy\nfrom .adan import Adan\nfrom .adopt import Adopt\nfrom .kron import Kron\nfrom .lamb import Lamb\nfrom .laprop import LaProp\nfrom .lars import Lars\nfrom .lion import Lion\nfrom .lookahead import Lookahead\nfrom .madgrad import MADGRAD\nfrom .mars import Mars\nfrom .muon import Muon\nfrom .nadam import NAdamLegacy\nfrom .nadamw import NAdamW\nfrom .nvnovograd import NvNovoGrad\nfrom .radam import RAdamLegacy\nfrom .rmsprop_tf import RMSpropTF\nfrom .sgdp import SGDP\nfrom .sgdw import SGDW\n\n_logger = logging.getLogger(__name__)\n\n\ndef _import_class(class_string: str) -> Type:\n    \"\"\"Dynamically import a class from a string.\"\"\"\n    try:\n        module_name, class_name = class_string.rsplit(\".\", 1)\n        module = importlib.import_module(module_name)\n        return getattr(module, class_name)\n    except (ImportError, AttributeError) as e:\n        raise ImportError(f\"Could not import {class_string}: {e}\")\n\n\n\n@dataclass(frozen=True)\nclass OptimInfo:\n    \"\"\"Immutable configuration for an optimizer.\n\n    Attributes:\n        name: Unique identifier for the optimizer\n        opt_class: The optimizer class\n        description: Brief description of the optimizer's characteristics and behavior\n        has_eps: Whether the optimizer accepts epsilon parameter\n        has_momentum: Whether the optimizer accepts momentum parameter\n        has_betas: Whether the optimizer accepts a tuple of beta parameters\n        num_betas: number of betas in tuple (valid IFF has_betas = True)\n        defaults: Optional default parameters for the optimizer\n    \"\"\"\n    name: str\n    opt_class: Union[str, OptimType]\n    description: str = ''\n    has_eps: bool = True\n    has_momentum: bool = False\n    has_betas: bool = False\n    num_betas: int = 2\n    second_order: bool = False\n    defaults: Optional[Dict[str, Any]] = None\n\n\nclass OptimizerRegistry:\n    \"\"\"Registry managing optimizer configurations and instantiation.\n\n    This class provides a central registry for optimizer configurations and handles\n    their instantiation with appropriate parameter groups and settings.\n    \"\"\"\n\n    def __init__(self) -> None:\n        self._optimizers: Dict[str, OptimInfo] = {}\n        self._foreach_defaults: Set[str] = {'lion'}\n\n    def register(self, info: OptimInfo) -> None:\n        \"\"\"Register an optimizer configuration.\n\n        Args:\n            info: The OptimInfo configuration containing name, type and description\n        \"\"\"\n        name = info.name.lower()\n        if name in self._optimizers:\n            _logger.warning(f'Optimizer {name} already registered, overwriting')\n        self._optimizers[name] = info\n\n    def register_alias(self, alias: str, target: str) -> None:\n        \"\"\"Register an alias for an existing optimizer.\n\n        Args:\n            alias: The alias name\n            target: The target optimizer name\n\n        Raises:\n            KeyError: If target optimizer doesn't exist\n        \"\"\"\n        target = target.lower()\n        if target not in self._optimizers:\n            raise KeyError(f'Cannot create alias for non-existent optimizer {target}')\n        self._optimizers[alias.lower()] = self._optimizers[target]\n\n    def register_foreach_default(self, name: str) -> None:\n        \"\"\"Register an optimizer as defaulting to foreach=True.\"\"\"\n        self._foreach_defaults.add(name.lower())\n\n    def list_optimizers(\n            self,\n            filter: Union[str, List[str]] = '',\n            exclude_filters: Optional[List[str]] = None,\n            with_description: bool = False\n    ) -> List[Union[str, Tuple[str, str]]]:\n        \"\"\"List available optimizer names, optionally filtered.\n\n        Args:\n            filter: Wildcard style filter string (e.g., 'adam*')\n            exclude_filters: Optional list of wildcard patterns to exclude\n            with_description: If True, return tuples of (name, description)\n\n        Returns:\n            List of either optimizer names or (name, description) tuples\n        \"\"\"\n        names = sorted(self._optimizers.keys())\n\n        if filter:\n            if isinstance(filter, str):\n                filters = [filter]\n            else:\n                filters = filter\n            filtered_names = set()\n            for f in filters:\n                filtered_names.update(n for n in names if fnmatch(n, f))\n            names = sorted(filtered_names)\n\n        if exclude_filters:\n            for exclude_filter in exclude_filters:\n                names = [n for n in names if not fnmatch(n, exclude_filter)]\n\n        if with_description:\n            return [(name, self._optimizers[name].description) for name in names]\n\n        return names\n\n    def get_optimizer_info(self, name: str) -> OptimInfo:\n        \"\"\"Get the OptimInfo for an optimizer.\n\n        Args:\n            name: Name of the optimizer\n\n        Returns:\n            OptimInfo configuration\n\n        Raises:\n            ValueError: If optimizer is not found\n        \"\"\"\n        name = name.lower()\n        if name not in self._optimizers:\n            raise ValueError(f'Optimizer {name} not found in registry')\n        return self._optimizers[name]\n\n    def get_optimizer_class(\n            self,\n            name_or_info: Union[str, OptimInfo],\n            bind_defaults: bool = True,\n    ) -> Union[OptimType, OptimizerCallable]:\n        \"\"\"Get the optimizer class with any default arguments applied.\n\n        This allows direct instantiation of optimizers with their default configs\n        without going through the full factory.\n\n        Args:\n            name_or_info: Name of the optimizer\n            bind_defaults: Bind default arguments to optimizer class via `partial` before returning\n\n        Returns:\n            Optimizer class or partial with defaults applied\n\n        Raises:\n            ValueError: If optimizer not found\n        \"\"\"\n        if isinstance(name_or_info, str):\n            opt_info = self.get_optimizer_info(name_or_info)\n        else:\n            assert isinstance(name_or_info, OptimInfo)\n            opt_info = name_or_info\n\n        if isinstance(opt_info.opt_class, str):\n            # Special handling for APEX and BNB optimizers\n            if opt_info.opt_class.startswith('apex.'):\n                assert torch.cuda.is_available(), 'CUDA required for APEX optimizers'\n                try:\n                    opt_class = _import_class(opt_info.opt_class)\n                except ImportError as e:\n                    raise ImportError('APEX optimizers require apex to be installed') from e\n            elif opt_info.opt_class.startswith('bitsandbytes.'):\n                assert torch.cuda.is_available(), 'CUDA required for bitsandbytes optimizers'\n                try:\n                    opt_class = _import_class(opt_info.opt_class)\n                except ImportError as e:\n                    raise ImportError('bitsandbytes optimizers require bitsandbytes to be installed') from e\n            else:\n                opt_class = _import_class(opt_info.opt_class)\n        else:\n            opt_class = opt_info.opt_class\n\n        # Return class or partial with defaults\n        if bind_defaults and opt_info.defaults:\n            opt_class = partial(opt_class, **opt_info.defaults)\n\n        return opt_class\n\n    def create_optimizer(\n            self,\n            model_or_params: Union[nn.Module, ParamsT],\n            opt: str,\n            lr: Optional[float] = None,\n            weight_decay: float = 0.,\n            momentum: float = 0.9,\n            foreach: Optional[bool] = None,\n            weight_decay_exclude_1d: bool = True,\n            fallback_list: Collection[str] = (),\n            fallback_no_weight_decay: bool = False,\n            layer_decay: Optional[float] = None,\n            layer_decay_min_scale: Optional[float] = None,\n            layer_decay_no_opt_scale: Optional[float] = None,\n            param_group_fn: Optional[Callable[[nn.Module], ParamsT]] = None,\n            **kwargs: Any,\n    ) -> torch.optim.Optimizer:\n        \"\"\"Create an optimizer instance.\n\n        Args:\n            model_or_params: Model or parameters to optimize\n            opt: Name of optimizer to create\n            lr: Learning rate\n            weight_decay: Weight decay factor\n            momentum: Momentum factor for applicable optimizers\n            foreach: Enable/disable foreach operation\n            weight_decay_exclude_1d: Whether to skip weight decay for 1d params (biases and norm affine)\n            fallback_list: Collection of parameter name patterns to use fallback optimizer for hybrid optimizers\n            fallback_no_weight_decay: If True, params in no_weight_decay list will use fallback optimizer (e.g., AdamW for Muon)\n            layer_decay: Layer-wise learning rate decay\n            layer_scale_min_scale: Minimum layer scale factor clamp value\n            layer_scale_no_opt_scale: Layer scale below which optimization is disabled\n            param_group_fn: Optional custom parameter grouping function\n            **kwargs: Additional optimizer-specific arguments\n\n        Returns:\n            Configured optimizer instance\n\n        Raises:\n            ValueError: If optimizer not found or configuration invalid\n        \"\"\"\n\n        # Get parameters to optimize\n        if isinstance(model_or_params, nn.Module):\n            # Extract parameters from a nn.Module, build param groups w/ weight-decay and/or layer-decay applied\n            no_weight_decay = getattr(model_or_params, 'no_weight_decay', lambda: set())()\n\n            if param_group_fn:\n                # run custom fn to generate param groups from nn.Module\n                params = param_group_fn(model_or_params)\n            elif layer_decay is not None:\n                params = param_groups_layer_decay(\n                    model_or_params,\n                    weight_decay=weight_decay,\n                    layer_decay=layer_decay,\n                    no_weight_decay_list=no_weight_decay,\n                    fallback_list=fallback_list,\n                    fallback_no_weight_decay=fallback_no_weight_decay,\n                    weight_decay_exclude_1d=weight_decay_exclude_1d,\n                    min_scale=layer_decay_min_scale,\n                    no_opt_scale=layer_decay_no_opt_scale,\n                )\n                weight_decay = 0.\n            elif weight_decay and weight_decay_exclude_1d:\n                params = param_groups_weight_decay(\n                    model_or_params,\n                    weight_decay=weight_decay,\n                    no_weight_decay_list=no_weight_decay,\n                    fallback_list=fallback_list,\n                    fallback_no_weight_decay=fallback_no_weight_decay,\n                )\n                weight_decay = 0.\n            else:\n                params = model_or_params.parameters()\n        else:\n            # pass parameters / parameter groups through to optimizer\n            params = model_or_params\n\n        # Parse optimizer name\n        opt_split = opt.lower().split('_')\n        opt_name = opt_split[-1]\n        use_lookahead = opt_split[0] == 'lookahead' if len(opt_split) > 1 else False\n\n        opt_info = self.get_optimizer_info(opt_name)\n\n        # Build optimizer arguments\n        opt_args: Dict[str, Any] = {'weight_decay': weight_decay, **kwargs}\n\n        # Add LR to args, if None optimizer default is used, some optimizers manage LR internally if None.\n        if lr is not None:\n            opt_args['lr'] = lr\n\n        # Apply optimizer-specific settings\n        if opt_info.defaults:\n            for k, v in opt_info.defaults.items():\n                opt_args.setdefault(k, v)\n\n        # timm has always defaulted momentum to 0.9 if optimizer supports momentum, keep for backward compat.\n        if opt_info.has_momentum:\n            opt_args.setdefault('momentum', momentum)\n\n        # Remove commonly used kwargs that aren't always supported\n        if not opt_info.has_eps:\n            opt_args.pop('eps', None)\n        if not opt_info.has_betas:\n            opt_args.pop('betas', None)\n\n        if foreach is not None:\n            # Explicitly activate or deactivate multi-tensor foreach impl.\n            # Not all optimizers support this, and those that do usually default to using\n            # multi-tensor impl if foreach is left as default 'None' and can be enabled.\n            opt_args.setdefault('foreach', foreach)\n\n        # Create optimizer\n        opt_class = self.get_optimizer_class(opt_info, bind_defaults=False)\n        optimizer = opt_class(params, **opt_args)\n\n        # Apply Lookahead if requested\n        if use_lookahead:\n            optimizer = Lookahead(optimizer)\n\n        return optimizer\n\n\ndef _register_sgd_variants(registry: OptimizerRegistry) -> None:\n    \"\"\"Register SGD-based optimizers\"\"\"\n    sgd_optimizers = [\n        OptimInfo(\n            name='sgd',\n            opt_class=torch.optim.SGD,\n            description='torch.Optim Stochastic Gradient Descent (SGD) with Nesterov momentum',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='momentum',\n            opt_class=torch.optim.SGD,\n            description='torch.Optim Stochastic Gradient Descent (SGD) with classical momentum',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': False}\n        ),\n        OptimInfo(\n            name='sgdp',\n            opt_class=SGDP,\n            description='SGD with built-in projection to unit norm sphere',\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='sgdw',\n            opt_class=SGDW,\n            description='SGD with decoupled weight decay and Nesterov momentum',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n    ]\n    for opt in sgd_optimizers:\n        registry.register(opt)\n\n\ndef _register_adam_variants(registry: OptimizerRegistry) -> None:\n    \"\"\"Register Adam-based optimizers\"\"\"\n    adam_optimizers = [\n        OptimInfo(\n            name='adam',\n            opt_class=torch.optim.Adam,\n            description='torch.optim.Adam, Adaptive Moment Estimation',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='adamw',\n            opt_class=torch.optim.AdamW,\n            description='torch.optim.AdamW, Adam with decoupled weight decay',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='adamwlegacy',\n            opt_class=AdamWLegacy,\n            description='legacy impl of AdamW that pre-dates inclusion to torch.optim',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='adamp',\n            opt_class=AdamP,\n            description='Adam with built-in projection to unit norm sphere',\n            has_betas=True,\n            defaults={'wd_ratio': 0.01, 'nesterov': True}\n        ),\n        OptimInfo(\n            name='nadam',\n            opt_class=torch.optim.NAdam,\n            description='torch.optim.NAdam, Adam with Nesterov momentum',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='nadamlegacy',\n            opt_class=NAdamLegacy,\n            description='legacy impl of NAdam that pre-dates inclusion in torch.optim',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='nadamw',\n            opt_class=NAdamW,\n            description='Adam with Nesterov momentum and decoupled weight decay, mlcommons/algorithmic-efficiency impl',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='radam',\n            opt_class=torch.optim.RAdam,\n            description='torch.optim.RAdam, Rectified Adam with variance adaptation',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='radamlegacy',\n            opt_class=RAdamLegacy,\n            description='legacy impl of RAdam that predates inclusion in torch.optim',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='radamw',\n            opt_class=torch.optim.RAdam,\n            description='torch.optim.RAdamW, Rectified Adam with variance adaptation and decoupled weight decay',\n            has_betas=True,\n            defaults={'decoupled_weight_decay': True}\n        ),\n        OptimInfo(\n            name='adamax',\n            opt_class=torch.optim.Adamax,\n            description='torch.optim.Adamax, Adam with infinity norm for more stable updates',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='adafactor',\n            opt_class=Adafactor,\n            description='Memory-efficient implementation of Adam with factored gradients',\n        ),\n        OptimInfo(\n            name='adafactorbv',\n            opt_class=AdafactorBigVision,\n            description='Big Vision variant of Adafactor with factored gradients, half precision momentum',\n        ),\n        OptimInfo(\n            name='adopt',\n            opt_class=Adopt,\n            description='Modified Adam that can converge with any β2 with the optimal rate',\n        ),\n        OptimInfo(\n            name='adoptw',\n            opt_class=Adopt,\n            description='Modified AdamW (decoupled decay) that can converge with any β2 with the optimal rate',\n            defaults={'decoupled': True}\n        ),\n    ]\n    for opt in adam_optimizers:\n        registry.register(opt)\n\n\ndef _register_lamb_lars(registry: OptimizerRegistry) -> None:\n    \"\"\"Register LAMB and LARS variants\"\"\"\n    lamb_lars_optimizers = [\n        OptimInfo(\n            name='lamb',\n            opt_class=Lamb,\n            description='Layer-wise Adaptive Moments for batch optimization',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='lambc',\n            opt_class=Lamb,\n            description='LAMB with trust ratio clipping for stability',\n            has_betas=True,\n            defaults={'trust_clip': True}\n        ),\n        OptimInfo(\n            name='lambw',\n            opt_class=Lamb,\n            description='LAMB with decoupled weight decay',\n            has_betas=True,\n            defaults={'decoupled_decay': True}\n        ),\n        OptimInfo(\n            name='lambcw',\n            opt_class=Lamb,\n            description='LAMB with trust ratio clipping for stability and decoupled decay',\n            has_betas=True,\n            defaults={'trust_clip': True, 'decoupled_decay': True}\n        ),\n        OptimInfo(\n            name='lars',\n            opt_class=Lars,\n            description='Layer-wise Adaptive Rate Scaling',\n            has_momentum=True\n        ),\n        OptimInfo(\n            name='larc',\n            opt_class=Lars,\n            description='LARS with trust ratio clipping for stability',\n            has_momentum=True,\n            defaults={'trust_clip': True}\n        ),\n        OptimInfo(\n            name='nlars',\n            opt_class=Lars,\n            description='LARS with Nesterov momentum',\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='nlarc',\n            opt_class=Lars,\n            description='LARS with Nesterov momentum & trust ratio clipping',\n            has_momentum=True,\n            defaults={'nesterov': True, 'trust_clip': True}\n        ),\n    ]\n    for opt in lamb_lars_optimizers:\n        registry.register(opt)\n\n\ndef _register_corrected_decay_optimizers(registry: OptimizerRegistry) -> None:\n    \"\"\"Register corrected weight decay optimizer variants\"\"\"\n    corrected_optimizers = [\n        OptimInfo(\n            name='adamc',\n            opt_class=AdamWLegacy,\n            description='AdamW with corrected weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='nadamc',\n            opt_class=NAdamW,\n            description='NAdamW with corrected weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='sgdc',\n            opt_class=SGDW,\n            description='SGD with corrected decoupled weight decay (lr²/max_lr scaling)',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='adoptc',\n            opt_class=Adopt,\n            description='Adopt with corrected decoupled weight decay (lr²/max_lr scaling)',\n            defaults={'decoupled': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='lambcd',\n            opt_class=Lamb,\n            description='LAMB with corrected decoupled weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'decoupled_decay': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='kronc',\n            opt_class=Kron,\n            description='PSGD Kron with corrected decoupled weight decay (lr²/max_lr scaling)',\n            has_momentum=True,\n            defaults={'decoupled_decay': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='lionc',\n            opt_class=Lion,\n            description='Lion with corrected weight decay (lr²/max_lr scaling)',\n            has_eps=False,\n            has_betas=True,\n            defaults={'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='lapropc',\n            opt_class=LaProp,\n            description='LaProp with corrected weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='rmsproptfc',\n            opt_class=RMSpropTF,\n            description='RMSprop TF-style with corrected decoupled weight decay (lr²/max_lr scaling)',\n            has_momentum=True,\n            defaults={'alpha': 0.9, 'decoupled_decay': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='adafactorbvc',\n            opt_class=AdafactorBigVision,\n            description='Adafactor Big Vision with corrected weight decay (lr²/max_lr or lr/max_lr scaling)',\n            defaults={'corrected_weight_decay': True}\n        ),\n    ]\n    for opt in corrected_optimizers:\n        registry.register(opt)\n\n    # Cautious + corrected variants\n    cautious_corrected = [\n        OptimInfo(\n            name='cadamc',\n            opt_class=AdamWLegacy,\n            description='Cautious AdamW with corrected weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'caution': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='cadoptc',\n            opt_class=Adopt,\n            description='Cautious Adopt with corrected decoupled weight decay (lr²/max_lr scaling)',\n            defaults={'decoupled': True, 'caution': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='cnadamc',\n            opt_class=NAdamW,\n            description='Cautious NAdamW with corrected weight decay (lr²/max_lr scaling)',\n            has_betas=True,\n            defaults={'caution': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='csgdc',\n            opt_class=SGDW,\n            description='Cautious SGD with corrected decoupled weight decay (lr²/max_lr scaling)',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True, 'caution': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='clionc',\n            opt_class=Lion,\n            description='Cautious Lion with corrected weight decay (lr²/max_lr scaling)',\n            has_eps=False,\n            has_betas=True,\n            defaults={'caution': True, 'corrected_weight_decay': True}\n        ),\n        OptimInfo(\n            name='cadafactorbvc',\n            opt_class=AdafactorBigVision,\n            description='Cautious Adafactor Big Vision with corrected weight decay',\n            defaults={'caution': True, 'corrected_weight_decay': True}\n        ),\n    ]\n    for opt in cautious_corrected:\n        registry.register(opt)\n\n\ndef _register_cautious_optimizers(registry: OptimizerRegistry) -> None:\n    cautious_optimizers = [\n        OptimInfo(\n            name='cadafactor',\n            opt_class=Adafactor,\n            description='Cautious Adafactor',\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='cadafactorbv',\n            opt_class=AdafactorBigVision,\n            description='Cautious Big Vision Adafactor',\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='cadamw',\n            opt_class=AdamWLegacy,\n            description='Cautious AdamW',\n            has_betas=True,\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='cadopt',\n            opt_class=Adopt,\n            description='Cautious Adopt',\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='cadan',\n            opt_class=Adan,\n            description='Cautious Adaptive Nesterov Momentum Algorithm',\n            defaults={'caution': True, 'no_prox': False},\n            has_betas=True,\n            num_betas=3\n        ),\n        OptimInfo(\n            name='cadanw',\n            opt_class=Adan,\n            description='Cautious Adaptive Nesterov Momentum with decoupled weight decay',\n            defaults={'caution': True, 'no_prox': True},\n            has_betas=True,\n            num_betas=3\n        ),\n        OptimInfo(\n            name='cadoptw',\n            opt_class=Adopt,\n            description='Cautious AdoptW (decoupled decay)',\n            defaults={'decoupled': True, 'caution': True}\n        ),\n        OptimInfo(\n            name='clamb',\n            opt_class=Lamb,\n            description='Cautious LAMB',\n            has_betas=True,\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='clambw',\n            opt_class=Lamb,\n            description='Cautious LAMB with decoupled weight decay',\n            has_betas=True,\n            defaults={'caution': True, 'decoupled_decay': True}\n        ),\n        OptimInfo(\n            name='claprop',\n            opt_class=LaProp,\n            description='Cautious LaProp',\n            has_betas=True,\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='clion',\n            opt_class=Lion,\n            description='Cautious Lion',\n            has_eps=False,\n            has_betas=True,\n            defaults = {'caution': True}\n        ),\n        OptimInfo(\n            name='cmars',\n            opt_class=Mars,\n            description='Cautious MARS',\n            has_betas=True,\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='cnadamw',\n            opt_class=NAdamW,\n            description='Cautious NAdamW',\n            has_betas=True,\n            defaults={'caution': True}\n        ),\n        OptimInfo(\n            name='crmsproptf',\n            opt_class=RMSpropTF,\n            description='Cautious TensorFlow-style RMSprop',\n            has_momentum=True,\n            defaults={'alpha': 0.9, 'caution': True}\n        ),\n        OptimInfo(\n            name='csgdw',\n            opt_class=SGDW,\n            description='Cautious SGD with decoupled weight decay and Nesterov momentum',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True, 'caution': True}\n        ),\n        OptimInfo(\n            name='cadamp',\n            opt_class=AdamP,\n            description='Add the spherical cautious optimizer and the standard cautious optimizer to AdamP',\n            has_betas=True,\n            defaults={'wd_ratio': 0.01, 'nesterov': True, 'caution': True}\n        ),\n        OptimInfo(\n            name='csgdp',\n            opt_class=SGDP,\n            description='Add the spherical cautious optimizer and the standard cautious optimizer to SGDP',\n            has_momentum=True,\n            defaults={'nesterov': True, 'caution': True}\n        ),\n    ]\n    for opt in cautious_optimizers:\n        registry.register(opt)\n\ndef _register_other_optimizers(registry: OptimizerRegistry) -> None:\n    \"\"\"Register miscellaneous optimizers\"\"\"\n    other_optimizers = [\n        OptimInfo(\n            name='adabelief',\n            opt_class=AdaBelief,\n            description='Adapts learning rate based on gradient prediction error',\n            has_betas=True,\n            defaults={'rectify': False}\n        ),\n        OptimInfo(\n            name='radabelief',\n            opt_class=AdaBelief,\n            description='Rectified AdaBelief with variance adaptation',\n            has_betas=True,\n            defaults={'rectify': True}\n        ),\n        OptimInfo(\n            name='adadelta',\n            opt_class=torch.optim.Adadelta,\n            description='torch.optim.Adadelta, Adapts learning rates based on running windows of gradients'\n        ),\n        OptimInfo(\n            name='adagrad',\n            opt_class=torch.optim.Adagrad,\n            description='torch.optim.Adagrad, Adapts learning rates using cumulative squared gradients',\n            defaults={'eps': 1e-8}\n        ),\n        OptimInfo(\n            name='adan',\n            opt_class=Adan,\n            description='Adaptive Nesterov Momentum Algorithm',\n            defaults={'no_prox': False},\n            has_betas=True,\n            num_betas=3\n        ),\n        OptimInfo(\n            name='adanw',\n            opt_class=Adan,\n            description='Adaptive Nesterov Momentum with decoupled weight decay',\n            defaults={'no_prox': True},\n            has_betas=True,\n            num_betas=3\n        ),\n        OptimInfo(\n            name='adahessian',\n            opt_class=Adahessian,\n            description='An Adaptive Second Order Optimizer',\n            has_betas=True,\n            second_order=True,\n        ),\n        OptimInfo(\n            name='kron',\n            opt_class=Kron,\n            description='PSGD optimizer with Kronecker-factored preconditioner',\n            has_eps=False,\n            has_momentum=True,\n        ),\n        OptimInfo(\n            name='kronw',\n            opt_class=Kron,\n            description='PSGD optimizer with Kronecker-factored preconditioner and decoupled weight decay',\n            has_momentum=True,\n            has_eps=False,\n            defaults={'decoupled_decay': True}\n        ),\n        OptimInfo(\n            name='laprop',\n            opt_class=LaProp,\n            description='Separating Momentum and Adaptivity in Adam',\n            has_betas=True,\n        ),\n        OptimInfo(\n            name='lion',\n            opt_class=Lion,\n            description='Evolved Sign Momentum optimizer for improved convergence',\n            has_eps=False,\n            has_betas=True\n        ),\n        OptimInfo(\n            name='madgrad',\n            opt_class=MADGRAD,\n            description='Momentum-based Adaptive gradient method',\n            has_momentum=True\n        ),\n        OptimInfo(\n            name='madgradw',\n            opt_class=MADGRAD,\n            description='MADGRAD with decoupled weight decay',\n            has_momentum=True,\n            defaults={'decoupled_decay': True}\n        ),\n        OptimInfo(\n            name='mars',\n            opt_class=Mars,\n            description='Unleashing the Power of Variance Reduction for Training Large Models',\n            has_betas=True,\n        ),\n        OptimInfo(\n            name='muon',\n            opt_class=Muon,\n            description='MomentUm Orthogonalized by Newton-schulz with AdamW fallback for 1D params',\n            has_momentum=True,\n            has_eps=True,\n            has_betas=True,\n        ),\n        OptimInfo(\n            name='nmuon',\n            opt_class=Muon,\n            description='MomentUm Orthogonalized by Newton-schulz with Nesterov and NAdamW fallback for 1D params',\n            has_momentum=True,\n            has_eps=True,\n            has_betas=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='adamuon',\n            opt_class=Muon,\n            description='AdaMuon: Muon with adaptive second moment estimation on orthogonalized directions',\n            has_momentum=True,\n            has_eps=True,\n            has_betas=True,\n            defaults={'algo': 'adamuon'}\n        ),\n        OptimInfo(\n            name='nadamuon',\n            opt_class=Muon,\n            description='AdaMuon with Nesterov momentum and NAdamW fallback for 1D params',\n            has_momentum=True,\n            has_eps=True,\n            has_betas=True,\n            defaults={'algo': 'adamuon', 'nesterov': True}\n        ),\n        OptimInfo(\n            name='novograd',\n            opt_class=NvNovoGrad,\n            description='Normalized Adam with L2 norm gradient normalization',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='rmsprop',\n            opt_class=torch.optim.RMSprop,\n            description='torch.optim.RMSprop, Root Mean Square Propagation',\n            has_momentum=True,\n            defaults={'alpha': 0.9}\n        ),\n        OptimInfo(\n            name='rmsproptf',\n            opt_class=RMSpropTF,\n            description='TensorFlow-style RMSprop implementation, Root Mean Square Propagation',\n            has_momentum=True,\n            defaults={'alpha': 0.9}\n        ),\n    ]\n    for opt in other_optimizers:\n        registry.register(opt)\n    registry.register_foreach_default('lion')\n\n\ndef _register_apex_optimizers(registry: OptimizerRegistry) -> None:\n    \"\"\"Register APEX optimizers (lazy import)\"\"\"\n    apex_optimizers = [\n        OptimInfo(\n            name='fusedsgd',\n            opt_class='apex.optimizers.FusedSGD',\n            description='NVIDIA APEX fused SGD implementation for faster training',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='fusedadam',\n            opt_class='apex.optimizers.FusedAdam',\n            description='NVIDIA APEX fused Adam implementation',\n            has_betas=True,\n            defaults={'adam_w_mode': False}\n        ),\n        OptimInfo(\n            name='fusedadamw',\n            opt_class='apex.optimizers.FusedAdam',\n            description='NVIDIA APEX fused AdamW implementation',\n            has_betas=True,\n            defaults={'adam_w_mode': True}\n        ),\n        OptimInfo(\n            name='fusedlamb',\n            opt_class='apex.optimizers.FusedLAMB',\n            description='NVIDIA APEX fused LAMB implementation',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='fusednovograd',\n            opt_class='apex.optimizers.FusedNovoGrad',\n            description='NVIDIA APEX fused NovoGrad implementation',\n            has_betas=True,\n            defaults={'betas': (0.95, 0.98)}\n        ),\n    ]\n    for opt in apex_optimizers:\n        registry.register(opt)\n\n\ndef _register_bnb_optimizers(registry: OptimizerRegistry) -> None:\n    \"\"\"Register bitsandbytes optimizers (lazy import)\"\"\"\n    bnb_optimizers = [\n        OptimInfo(\n            name='bnbsgd',\n            opt_class='bitsandbytes.optim.SGD',\n            description='bitsandbytes SGD',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='bnbsgd8bit',\n            opt_class='bitsandbytes.optim.SGD8bit',\n            description='bitsandbytes 8-bit SGD with dynamic quantization',\n            has_eps=False,\n            has_momentum=True,\n            defaults={'nesterov': True}\n        ),\n        OptimInfo(\n            name='bnbadam',\n            opt_class='bitsandbytes.optim.Adam',\n            description='bitsandbytes Adam',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='bnbadam8bit',\n            opt_class='bitsandbytes.optim.Adam',\n            description='bitsandbytes 8-bit Adam with dynamic quantization',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='bnbadamw',\n            opt_class='bitsandbytes.optim.AdamW',\n            description='bitsandbytes AdamW',\n            has_betas=True\n        ),\n        OptimInfo(\n            name='bnbadamw8bit',\n            opt_class='bitsandbytes.optim.AdamW',\n            description='bitsandbytes 8-bit AdamW with dynamic quantization',\n            has_betas=True\n        ),\n        OptimInfo(\n            'bnblion',\n            'bitsandbytes.optim.Lion',\n            description='bitsandbytes Lion',\n            has_eps=False,\n            has_betas=True\n        ),\n        OptimInfo(\n            'bnblion8bit',\n            'bitsandbytes.optim.Lion8bit',\n            description='bitsandbytes 8-bit Lion with dynamic quantization',\n            has_eps=False,\n            has_betas=True\n        ),\n        OptimInfo(\n            'bnbademamix',\n            'bitsandbytes.optim.AdEMAMix',\n            description='bitsandbytes AdEMAMix',\n            has_betas=True,\n            num_betas=3,\n        ),\n        OptimInfo(\n            'bnbademamix8bit',\n            'bitsandbytes.optim.AdEMAMix8bit',\n            description='bitsandbytes 8-bit AdEMAMix with dynamic quantization',\n            has_betas=True,\n            num_betas=3,\n        ),\n    ]\n    for opt in bnb_optimizers:\n        registry.register(opt)\n\n\ndefault_registry = OptimizerRegistry()\n\ndef _register_default_optimizers() -> None:\n    \"\"\"Register all default optimizers to the global registry.\"\"\"\n    # Register all optimizer groups\n    _register_sgd_variants(default_registry)\n    _register_adam_variants(default_registry)\n    _register_lamb_lars(default_registry)\n    _register_other_optimizers(default_registry)\n    _register_apex_optimizers(default_registry)\n    _register_bnb_optimizers(default_registry)\n    _register_cautious_optimizers(default_registry)\n    _register_corrected_decay_optimizers(default_registry)\n\n    # Register aliases\n    default_registry.register_alias('nesterov', 'sgd')\n    default_registry.register_alias('nesterovw', 'sgdw')\n\n\n# Initialize default registry\n_register_default_optimizers()\n\n# Public API\n\ndef list_optimizers(\n        filter: Union[str, List[str]] = '',\n        exclude_filters: Optional[List[str]] = None,\n        with_description: bool = False,\n) -> List[Union[str, Tuple[str, str]]]:\n    \"\"\"List available optimizer names, optionally filtered.\n\n    List all registered optimizers, with optional filtering using wildcard patterns.\n    Optimizers can be filtered using include and exclude patterns, and can optionally\n    return descriptions with each optimizer name.\n\n    Args:\n        filter: Wildcard style filter string or list of filter strings\n            (e.g., 'adam*' for all Adam variants, or ['adam*', '*8bit'] for\n            Adam variants and 8-bit optimizers). Empty string means no filtering.\n        exclude_filters: Optional list of wildcard patterns to exclude. For example,\n            ['*8bit', 'fused*'] would exclude 8-bit and fused implementations.\n        with_description: If True, returns tuples of (name, description) instead of\n            just names. Descriptions provide brief explanations of optimizer characteristics.\n\n    Returns:\n        If with_description is False:\n            List of optimizer names as strings (e.g., ['adam', 'adamw', ...])\n        If with_description is True:\n            List of tuples of (name, description) (e.g., [('adam', 'Adaptive Moment...'), ...])\n\n    Examples:\n        >>> list_optimizers()\n        ['adam', 'adamw', 'sgd', ...]\n\n        >>> list_optimizers(['la*', 'nla*'])  # List lamb & lars\n        ['lamb', 'lambc', 'larc', 'lars', 'nlarc', 'nlars']\n\n        >>> list_optimizers('*adam*', exclude_filters=['bnb*', 'fused*'])  # Exclude bnb & apex adam optimizers\n        ['adam', 'adamax', 'adamp', 'adamw', 'nadam', 'nadamw', 'radam']\n\n        >>> list_optimizers(with_description=True)  # Get descriptions\n        [('adabelief', 'Adapts learning rate based on gradient prediction error'),\n         ('adadelta', 'torch.optim Adadelta, Adapts learning rates based on running windows of gradients'),\n         ('adafactor', 'Memory-efficient implementation of Adam with factored gradients'),\n        ...]\n    \"\"\"\n    return default_registry.list_optimizers(filter, exclude_filters, with_description)\n\n\ndef get_optimizer_info(name: str) -> OptimInfo:\n    \"\"\"Get the OptimInfo for an optimizer.\n\n    Args:\n        name: Name of the optimizer\n\n    Returns:\n        OptimInfo configuration\n\n    Raises:\n        ValueError: If optimizer is not found\n    \"\"\"\n    return default_registry.get_optimizer_info(name)\n\n\ndef get_optimizer_class(\n        name: str,\n        bind_defaults: bool = True,\n) -> Union[OptimType, OptimizerCallable]:\n    \"\"\"Get optimizer class by name with option to bind default arguments.\n\n    Retrieves the optimizer class or a partial function with default arguments bound.\n    This allows direct instantiation of optimizers with their default configurations\n    without going through the full factory.\n\n    Args:\n        name: Name of the optimizer to retrieve (e.g., 'adam', 'sgd')\n        bind_defaults: If True, returns a partial function with default arguments from OptimInfo bound.\n            If False, returns the raw optimizer class.\n\n    Returns:\n        If bind_defaults is False:\n            The optimizer class (e.g., torch.optim.Adam)\n        If bind_defaults is True:\n            A partial function with default arguments bound\n\n    Raises:\n        ValueError: If optimizer name is not found in registry\n\n    Examples:\n        >>> # Get SGD with nesterov momentum default\n        >>> SGD = get_optimizer_class('sgd')  # nesterov=True bound\n        >>> opt = SGD(model.parameters(), lr=0.1, momentum=0.9)\n\n        >>> # Get raw optimizer class\n        >>> SGD = get_optimizer_class('sgd')\n        >>> opt = SGD(model.parameters(), lr=1e-3, momentum=0.9)\n\n    \"\"\"\n    return default_registry.get_optimizer_class(name, bind_defaults=bind_defaults)\n\n\ndef create_optimizer_v2(\n        model_or_params: Union[nn.Module, ParamsT],\n        opt: str = 'sgd',\n        lr: Optional[float] = None,\n        weight_decay: float = 0.,\n        momentum: float = 0.9,\n        foreach: Optional[bool] = None,\n        filter_bias_and_bn: bool = True,\n        fallback_list: Collection[str] = (),\n        fallback_no_weight_decay: bool = False,\n        layer_decay: Optional[float] = None,\n        layer_decay_min_scale: float = 0.0,\n        layer_decay_no_opt_scale: Optional[float] = None,\n        param_group_fn: Optional[Callable[[nn.Module], ParamsT]] = None,\n        **kwargs: Any,\n) -> torch.optim.Optimizer:\n    \"\"\"Create an optimizer instance via timm registry.\n\n    Creates and configures an optimizer with appropriate parameter groups and settings.\n    Supports automatic parameter group creation for weight decay and layer-wise learning\n    rates, as well as custom parameter grouping.\n\n    Args:\n        model_or_params: A PyTorch model or an iterable of parameters/parameter groups.\n            If a model is provided, parameters will be automatically extracted and grouped\n            based on the other arguments.\n        opt: Name of the optimizer to create (e.g., 'adam', 'adamw', 'sgd').\n            Use list_optimizers() to see available options.\n        lr: Learning rate. If None, will use the optimizer's default.\n        weight_decay: Weight decay factor. Will be used to create param groups if model_or_params is a model.\n        momentum: Momentum factor for optimizers that support it. Only used if the\n            chosen optimizer accepts a momentum parameter.\n        foreach: Enable/disable foreach (multi-tensor) implementation if available.\n            If None, will use optimizer-specific defaults.\n        filter_bias_and_bn: If True, bias, norm layer parameters (all 1d params) will not have\n            weight decay applied. Only used when model_or_params is a model and\n            weight_decay > 0.\n        fallback_list: Collection of parameter name patterns to use fallback optimizer for\n            hybrid optimizers (e.g., AdamW for Muon). Supports wildcard matching.\n        fallback_no_weight_decay: If True, params in model's no_weight_decay() list will use\n            fallback optimizer for hybrid optimizers (e.g., AdamW for Muon).\n        layer_decay: Optional layer-wise learning rate decay factor. If provided,\n            learning rates will be scaled by layer_decay^(max_depth - layer_depth).\n            Only used when model_or_params is a model.\n        param_group_fn: Optional function to create custom parameter groups.\n            If provided, other parameter grouping options will be ignored.\n        **kwargs: Additional optimizer-specific arguments (e.g., betas for Adam).\n\n    Returns:\n        Configured optimizer instance.\n\n    Examples:\n        >>> # Basic usage with a model\n        >>> optimizer = create_optimizer_v2(model, 'adamw', lr=1e-3)\n\n        >>> # SGD with momentum and weight decay\n        >>> optimizer = create_optimizer_v2(\n        ...     model, 'sgd', lr=0.1, momentum=0.9, weight_decay=1e-4\n        ... )\n\n        >>> # Adam with layer-wise learning rate decay\n        >>> optimizer = create_optimizer_v2(\n        ...     model, 'adam', lr=1e-3, layer_decay=0.7\n        ... )\n\n        >>> # Custom parameter groups\n        >>> def group_fn(model):\n        ...     return [\n        ...         {'params': model.backbone.parameters(), 'lr': 1e-4},\n        ...         {'params': model.head.parameters(), 'lr': 1e-3}\n        ...     ]\n        >>> optimizer = create_optimizer_v2(\n        ...     model, 'sgd', param_group_fn=group_fn\n        ... )\n\n    Note:\n        Parameter group handling precedence:\n        1. If param_group_fn is provided, it will be used exclusively\n        2. If layer_decay is provided, layer-wise groups will be created\n        3. If weight_decay > 0 and filter_bias_and_bn is True, weight decay groups will be created\n        4. Otherwise, all parameters will be in a single group\n    \"\"\"\n\n    return default_registry.create_optimizer(\n        model_or_params,\n        opt=opt,\n        lr=lr,\n        weight_decay=weight_decay,\n        momentum=momentum,\n        foreach=foreach,\n        weight_decay_exclude_1d=filter_bias_and_bn,\n        fallback_list=fallback_list,\n        fallback_no_weight_decay=fallback_no_weight_decay,\n        layer_decay=layer_decay,\n        layer_decay_min_scale=layer_decay_min_scale,\n        layer_decay_no_opt_scale=layer_decay_no_opt_scale,\n        param_group_fn=param_group_fn,\n        **kwargs\n    )\n\n\ndef optimizer_kwargs(cfg):\n    \"\"\"Convert argparse-style `cfg` object to kwargs for an optimizer factory.\"\"\"\n    kwargs = {\n        'opt':           cfg.opt,\n        'lr':            cfg.lr,\n        'weight_decay':  cfg.weight_decay,\n        'momentum':      cfg.momentum,\n    }\n    if (eps := getattr(cfg, 'opt_eps', None)) is not None:\n        kwargs['eps'] = eps\n    if (betas := getattr(cfg, 'opt_betas', None)) is not None:\n        kwargs['betas'] = betas\n    if (layer_decay := getattr(cfg, 'layer_decay', None)) is not None:\n        kwargs['layer_decay'] = layer_decay\n    if (ld_min := getattr(cfg, 'layer_decay_min_scale', None)) is not None:\n        kwargs['layer_decay_min_scale'] = ld_min\n    if (ld_no_opt := getattr(cfg, 'layer_decay_no_opt_scale', None)) is not None:\n        kwargs['layer_decay_no_opt_scale'] = ld_no_opt\n    if (opt_args := getattr(cfg, 'opt_args', None)) is not None:\n        kwargs.update(opt_args)\n    if (foreach := getattr(cfg, 'opt_foreach', None)) is not None:\n        kwargs['foreach'] = foreach\n\n    return kwargs\n\n\ndef create_optimizer(\n        args,\n        model: Union[nn.Module, ParamsT],\n        filter_bias_and_bn: bool = True,\n) -> torch.optim.Optimizer:\n    \"\"\" Legacy optimizer factory for backwards compatibility.\n    NOTE: Use create_optimizer_v2 for new code.\n    \"\"\"\n    return create_optimizer_v2(\n        model,\n        **optimizer_kwargs(cfg=args),\n        filter_bias_and_bn=filter_bias_and_bn,\n    )\n\n"
  },
  {
    "path": "timm/optim/_param_groups.py",
    "content": "import fnmatch\nimport logging\nfrom itertools import islice\nfrom typing import Collection, Optional\n\nfrom torch import nn as nn\n\nfrom timm.models import group_parameters\n\n\n_logger = logging.getLogger(__name__)\n\n\ndef _matches_pattern(name: str, patterns: Collection[str]) -> bool:\n    \"\"\"Check if parameter name matches any pattern (supports wildcards).\"\"\"\n    return any(fnmatch.fnmatch(name, pattern) for pattern in patterns)\n\n\ndef param_groups_weight_decay(\n        model: nn.Module,\n        weight_decay: float = 1e-5,\n        no_weight_decay_list: Collection[str] = (),\n        fallback_list: Collection[str] = (),\n        fallback_no_weight_decay: bool = False,\n):\n    # Merge no_weight_decay into fallback_list if requested\n    if fallback_no_weight_decay:\n        fallback_list = set(fallback_list) | set(no_weight_decay_list)\n\n    decay = []\n    decay_fallback = []\n    no_decay = []\n    no_decay_fallback = []\n    for name, param in model.named_parameters():\n        if not param.requires_grad:\n            continue\n\n        # Determine if this is a \"fallback\" parameter for fallback optimizer (if available)\n        is_fallback = _matches_pattern(name, fallback_list)\n\n        # Determine weight decay\n        matches_pattern = _matches_pattern(name, no_weight_decay_list)\n        if param.ndim <= 1 or name.endswith(\".bias\") or matches_pattern:\n            # No weight decay\n            if is_fallback:\n                no_decay_fallback.append(param)\n            else:\n                no_decay.append(param)\n        else:\n            # With weight decay\n            if is_fallback:\n                decay_fallback.append(param)\n            else:\n                decay.append(param)\n\n    groups = []\n    if no_decay:\n        groups.append({'params': no_decay, 'weight_decay': 0.})\n    if decay:\n        groups.append({'params': decay, 'weight_decay': weight_decay})\n    if no_decay_fallback:\n        groups.append({'params': no_decay_fallback, 'weight_decay': 0., 'use_fallback': True})\n    if decay_fallback:\n        groups.append({'params': decay_fallback, 'weight_decay': weight_decay, 'use_fallback': True})\n\n    return groups\n\ndef _group(it, size):\n    it = iter(it)\n    return iter(lambda: tuple(islice(it, size)), ())\n\n\ndef auto_group_layers(model, layers_per_group=12, num_groups=None):\n    def _in_head(n, hp):\n        if not hp:\n            return True\n        elif isinstance(hp, (tuple, list)):\n            return any([n.startswith(hpi) for hpi in hp])\n        else:\n            return n.startswith(hp)\n\n    head_prefix = getattr(model, 'pretrained_cfg', {}).get('classifier', None)\n    names_trunk = []\n    names_head = []\n    for n, _ in model.named_parameters():\n        names_head.append(n) if _in_head(n, head_prefix) else names_trunk.append(n)\n\n    # group non-head layers\n    num_trunk_layers = len(names_trunk)\n    if num_groups is not None:\n        layers_per_group = -(num_trunk_layers // -num_groups)\n    names_trunk = list(_group(names_trunk, layers_per_group))\n\n    num_trunk_groups = len(names_trunk)\n    layer_map = {n: i for i, l in enumerate(names_trunk) for n in l}\n    layer_map.update({n: num_trunk_groups for n in names_head})\n    return layer_map\n\n_layer_map = auto_group_layers  # backward compat\n\n\ndef param_groups_layer_decay(\n        model: nn.Module,\n        weight_decay: float = 0.05,\n        no_weight_decay_list: Collection[str] = (),\n        fallback_list: Collection[str] = (),\n        fallback_no_weight_decay: bool = False,\n        weight_decay_exclude_1d: bool = True,\n        layer_decay: float = .75,\n        min_scale: float = 0.,\n        no_opt_scale: Optional[float] = None,\n        verbose: bool = False,\n):\n    \"\"\"\n    Parameter groups for layer-wise lr decay & weight decay\n    Based on BEiT: https://github.com/microsoft/unilm/blob/master/beit/optim_factory.py#L58\n    \"\"\"\n    # Merge no_weight_decay into fallback_list if requested\n    if fallback_no_weight_decay:\n        fallback_list = set(fallback_list) | set(no_weight_decay_list)\n\n    param_group_names = {}  # NOTE for debugging\n    param_groups = {}\n\n    if hasattr(model, 'group_matcher'):\n        # FIXME interface needs more work\n        layer_map = group_parameters(model, model.group_matcher(coarse=False), reverse=True)\n    else:\n        # fallback\n        layer_map = auto_group_layers(model)\n    num_layers = max(layer_map.values()) + 1\n    layer_max = num_layers - 1\n    layer_scales = list(max(min_scale, layer_decay ** (layer_max - i)) for i in range(num_layers))\n\n    for name, param in model.named_parameters():\n        if not param.requires_grad:\n            continue\n\n        # Determine if this is a \"fallback\" parameter for fallback optimizer (if available)\n        is_fallback = _matches_pattern(name, fallback_list)\n\n        # Determine weight decay\n        if (weight_decay_exclude_1d and param.ndim <= 1) or _matches_pattern(name, no_weight_decay_list):\n            # no weight decay for 1D parameters and model specific ones\n            g_decay = \"no_decay\"\n            this_decay = 0.\n        else:\n            g_decay = \"decay\"\n            this_decay = weight_decay\n\n        layer_id = layer_map.get(name, layer_max)\n        this_scale = layer_scales[layer_id]\n        if no_opt_scale and this_scale < no_opt_scale:\n            # if the calculated layer scale is below this, exclude from optimization\n            param.requires_grad = False\n            continue\n\n        fallback_suffix = \"_fallback\" if is_fallback else \"\"\n        group_name = \"layer_%d_%s%s\" % (layer_id, g_decay, fallback_suffix)\n\n        if group_name not in param_groups:\n            param_group_names[group_name] = {\n                \"lr_scale\": this_scale,\n                \"weight_decay\": this_decay,\n                \"use_fallback\": is_fallback,\n                \"param_names\": [],\n            }\n            param_groups[group_name] = {\n                \"lr_scale\": this_scale,\n                \"weight_decay\": this_decay,\n                \"params\": [],\n            }\n            if is_fallback:\n                param_groups[group_name][\"use_fallback\"] = True\n\n        param_group_names[group_name][\"param_names\"].append(name)\n        param_groups[group_name][\"params\"].append(param)\n\n    if verbose:\n        import json\n        _logger.info(\"parameter groups: \\n%s\" % json.dumps(param_group_names, indent=2))\n\n    return list(param_groups.values())\n"
  },
  {
    "path": "timm/optim/_types.py",
    "content": "from typing import Any, Dict, Iterable, Union, Protocol, Type\ntry:\n    from typing import TypeAlias\nexcept ImportError:\n    from typing_extensions import TypeAlias\ntry:\n    from typing import TypeVar\nexcept ImportError:\n    from typing_extensions import TypeVar\n\nimport torch\nimport torch.optim\n\ntry:\n    from torch.optim.optimizer import ParamsT\nexcept (ImportError, TypeError):\n    ParamsT: TypeAlias = Union[Iterable[torch.Tensor], Iterable[Dict[str, Any]]]\n\n\nOptimType = Type[torch.optim.Optimizer]\n\n\nclass OptimizerCallable(Protocol):\n    \"\"\"Protocol for optimizer constructor signatures.\"\"\"\n\n    def __call__(self, params: ParamsT, **kwargs) -> torch.optim.Optimizer: ...\n\n\n__all__ = ['ParamsT', 'OptimType', 'OptimizerCallable']"
  },
  {
    "path": "timm/optim/adabelief.py",
    "content": "import math\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\n\nclass AdaBelief(Optimizer):\n    r\"\"\"Implements AdaBelief algorithm. Modified from Adam in PyTorch\n\n    Arguments:\n        params (iterable): iterable of parameters to optimize or dicts defining\n            parameter groups\n        lr (float, optional): learning rate (default: 1e-3)\n        betas (Tuple[float, float], optional): coefficients used for computing\n            running averages of gradient and its square (default: (0.9, 0.999))\n        eps (float, optional): term added to the denominator to improve\n            numerical stability (default: 1e-16)\n        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)\n        amsgrad (boolean, optional): whether to use the AMSGrad variant of this\n            algorithm from the paper `On the Convergence of Adam and Beyond`_\n            (default: False)\n        decoupled_decay (boolean, optional): (default: True) If set as True, then\n            the optimizer uses decoupled weight decay as in AdamW\n        fixed_decay (boolean, optional): (default: False) This is used when weight_decouple\n            is set as True.\n            When fixed_decay == True, the weight decay is performed as\n            $W_{new} = W_{old} - W_{old} \\times decay$.\n            When fixed_decay == False, the weight decay is performed as\n            $W_{new} = W_{old} - W_{old} \\times decay \\times lr$. Note that in this case, the\n            weight decay ratio decreases with learning rate (lr).\n        rectify (boolean, optional): (default: True) If set as True, then perform the rectified\n            update similar to RAdam\n        degenerated_to_sgd (boolean, optional) (default:True) If set as True, then perform SGD update\n            when variance of gradient is high\n    reference: AdaBelief Optimizer, adapting stepsizes by the belief in observed gradients, NeurIPS 2020\n\n    For a complete table of recommended hyperparameters, see https://github.com/juntang-zhuang/Adabelief-Optimizer'\n    For example train/args for EfficientNet see these gists\n      - link to train_script: https://gist.github.com/juntang-zhuang/0a501dd51c02278d952cf159bc233037\n      - link to args.yaml: https://gist.github.com/juntang-zhuang/517ce3c27022b908bb93f78e4f786dc3\n    \"\"\"\n\n    def __init__(\n            self,\n            params,\n            lr=1e-3,\n            betas=(0.9, 0.999),\n            eps=1e-16,\n            weight_decay=0,\n            amsgrad=False,\n            decoupled_decay=True,\n            fixed_decay=False,\n            rectify=True,\n            degenerated_to_sgd=True,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n\n        if isinstance(params, (list, tuple)) and len(params) > 0 and isinstance(params[0], dict):\n            for param in params:\n                if 'betas' in param and (param['betas'][0] != betas[0] or param['betas'][1] != betas[1]):\n                    param['buffer'] = [[None, None, None] for _ in range(10)]\n\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            amsgrad=amsgrad,\n            degenerated_to_sgd=degenerated_to_sgd,\n            decoupled_decay=decoupled_decay,\n            rectify=rectify,\n            fixed_decay=fixed_decay,\n            buffer=[[None, None, None] for _ in range(10)]\n        )\n        super(AdaBelief, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(AdaBelief, self).__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('amsgrad', False)\n\n    @torch.no_grad()\n    def reset(self):\n        for group in self.param_groups:\n            for p in group['params']:\n                state = self.state[p]\n                amsgrad = group['amsgrad']\n\n                # State initialization\n                state['step'] = 0\n                # Exponential moving average of gradient values\n                state['exp_avg'] = torch.zeros_like(p)\n\n                # Exponential moving average of squared gradient values\n                state['exp_avg_var'] = torch.zeros_like(p)\n                if amsgrad:\n                    # Maintains max of all exp. moving avg. of sq. grad. values\n                    state['max_exp_avg_var'] = torch.zeros_like(p)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.dtype in {torch.float16, torch.bfloat16}:\n                    grad = grad.float()\n                if grad.is_sparse:\n                    raise RuntimeError(\n                        'AdaBelief does not support sparse gradients, please consider SparseAdam instead')\n\n                p_fp32 = p\n                if p.dtype in {torch.float16, torch.bfloat16}:\n                    p_fp32 = p_fp32.float()\n\n                amsgrad = group['amsgrad']\n                beta1, beta2 = group['betas']\n                state = self.state[p]\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p_fp32)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_var'] = torch.zeros_like(p_fp32)\n                    if amsgrad:\n                        # Maintains max of all exp. moving avg. of sq. grad. values\n                        state['max_exp_avg_var'] = torch.zeros_like(p_fp32)\n                \n                # perform weight decay, check if decoupled weight decay\n                if group['decoupled_decay']:\n                    if not group['fixed_decay']:\n                        p_fp32.mul_(1.0 - group['lr'] * group['weight_decay'])\n                    else:\n                        p_fp32.mul_(1.0 - group['weight_decay'])\n                else:\n                    if group['weight_decay'] != 0:\n                        grad.add_(p_fp32, alpha=group['weight_decay'])\n\n                # get current state variable\n                exp_avg, exp_avg_var = state['exp_avg'], state['exp_avg_var']\n\n                state['step'] += 1\n                bias_correction1 = 1 - beta1 ** state['step']\n                bias_correction2 = 1 - beta2 ** state['step']\n\n                # Update first and second moment running average\n                exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)\n                grad_residual = grad - exp_avg\n                exp_avg_var.mul_(beta2).addcmul_(grad_residual, grad_residual, value=1 - beta2)\n\n                if amsgrad:\n                    max_exp_avg_var = state['max_exp_avg_var']\n                    # Maintains the maximum of all 2nd moment running avg. till now\n                    torch.max(max_exp_avg_var, exp_avg_var.add_(group['eps']), out=max_exp_avg_var)\n\n                    # Use the max. for normalizing running avg. of gradient\n                    denom = (max_exp_avg_var.sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])\n                else:\n                    denom = (exp_avg_var.add_(group['eps']).sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])\n                \n                # update\n                if not group['rectify']:\n                    # Default update\n                    step_size = group['lr'] / bias_correction1\n                    p_fp32.addcdiv_(exp_avg, denom, value=-step_size)\n                else:\n                    # Rectified update, forked from RAdam\n                    buffered = group['buffer'][int(state['step'] % 10)]\n                    if state['step'] == buffered[0]:\n                        num_sma, step_size = buffered[1], buffered[2]\n                    else:\n                        buffered[0] = state['step']\n                        beta2_t = beta2 ** state['step']\n                        num_sma_max = 2 / (1 - beta2) - 1\n                        num_sma = num_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)\n                        buffered[1] = num_sma\n\n                        # more conservative since it's an approximated value\n                        if num_sma >= 5:\n                            step_size = math.sqrt(\n                                (1 - beta2_t) *\n                                (num_sma - 4) / (num_sma_max - 4) *\n                                (num_sma - 2) / num_sma *\n                                num_sma_max / (num_sma_max - 2)) / (1 - beta1 ** state['step'])\n                        elif group['degenerated_to_sgd']:\n                            step_size = 1.0 / (1 - beta1 ** state['step'])\n                        else:\n                            step_size = -1\n                        buffered[2] = step_size\n\n                    if num_sma >= 5:\n                        denom = exp_avg_var.sqrt().add_(group['eps'])\n                        p_fp32.addcdiv_(exp_avg, denom, value=-step_size * group['lr'])\n                    elif step_size > 0:\n                        p_fp32.add_(exp_avg, alpha=-step_size * group['lr'])\n                \n                if p.dtype in {torch.float16, torch.bfloat16}:\n                    p.copy_(p_fp32)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/adafactor.py",
    "content": "\"\"\" Adafactor Optimizer\n\nLifted from https://github.com/pytorch/fairseq/blob/master/fairseq/optim/adafactor.py\n\nModified by Ross Wightman to fix some issues with factorization dims for non nn.Linear layers\n\nOriginal header/copyright below.\n\"\"\"\n# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\nimport math\nfrom typing import Optional, Tuple\n\nimport torch\n\nfrom ._types import ParamsT\n\n\nclass Adafactor(torch.optim.Optimizer):\n    \"\"\"Implements Adafactor algorithm.\n\n    This implementation is based on: `Adafactor: Adaptive Learning Rates with Sublinear Memory Cost`\n    (see https://arxiv.org/abs/1804.04235)\n\n    Note that this optimizer internally adjusts the learning rate depending on the\n    *scale_parameter*, *relative_step* and *warmup_init* options.\n\n    To use a manual (external) learning rate schedule you should set `scale_parameter=False` and\n    `relative_step=False`.\n\n    Ags:\n        params: iterable of parameters to optimize or dicts defining parameter groups\n        lr: external learning rate\n        eps: regularization constants for square gradient and parameter scale respectively\n        eps_scale: regularization constants for parameter scale respectively\n        clip_threshold: threshold of root-mean-square of final gradient update\n        decay_rate: coefficient used to compute running averages of square gradient\n        beta1: coefficient used for computing running averages of gradient\n        weight_decay: weight decay\n        scale_parameter: if True, learning rate is scaled by root-mean-square of parameter\n        warmup_init: time-dependent learning rate computation depends on whether warm-up initialization is being used\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: Optional[float] = None,\n            eps: float = 1e-30,\n            eps_scale: float = 1e-3,\n            clip_threshold: float = 1.0,\n            decay_rate: float = -0.8,\n            betas: Optional[Tuple[float, float]] = None,\n            weight_decay: float = 0.0,\n            scale_parameter: bool = True,\n            warmup_init: bool = False,\n            min_dim_size_to_factor: int = 16,\n            caution: bool = False,\n    ):\n        relative_step = not lr\n        if warmup_init and not relative_step:\n            raise ValueError('warmup_init requires relative_step=True')\n\n        beta1 = None if betas is None else betas[0]   # make it compat with standard betas arg\n        defaults = dict(\n            lr=lr,\n            eps=eps,\n            eps_scale=eps_scale,\n            clip_threshold=clip_threshold,\n            decay_rate=decay_rate,\n            beta1=beta1,\n            weight_decay=weight_decay,\n            scale_parameter=scale_parameter,\n            relative_step=relative_step,\n            warmup_init=warmup_init,\n            min_dim_size_to_factor=min_dim_size_to_factor,\n            caution=caution,\n        )\n        super(Adafactor, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('min_dim_size_to_factor', 16)\n\n    @staticmethod\n    def _get_lr(param_group, param_state):\n        if param_group['relative_step']:\n            min_step = 1e-6 * param_state['step'] if param_group['warmup_init'] else 1e-2\n            lr_t = min(min_step, 1.0 / math.sqrt(param_state['step']))\n            param_scale = 1.0\n            if param_group['scale_parameter']:\n                param_scale = max(param_group['eps_scale'], param_state['RMS'])\n            param_group['lr'] = lr_t * param_scale\n        return param_group['lr']\n\n    @staticmethod\n    def _get_options(param_group, param_shape, min_size_to_factor=16):\n        use_first_moment = param_group['beta1'] is not None\n        factored = None\n        ndim = len(param_shape)\n        # Use a simple heuristic to pick factorization row & col, note other PyTorch impl tend to\n        # always use -2, -1 BUT this will not pick correct dims for convolutions. This is a simple\n        # approach that should work in most cases, compare to the slightly more involved approach\n        # in AdafactorBigVision that sorts dims by size, please report if wrong dims chosen.\n        if ndim > 2 and param_shape[0] > min_size_to_factor and param_shape[1] > min_size_to_factor:\n            # nD convs in torch are ND + 2 dim weights with leading in/out chs\n            factored = 0, 1\n        elif ndim >= 2 and param_shape[-2] > min_size_to_factor and param_shape[-1] > min_size_to_factor:\n            # if the criteria above didn't match, test trailing dims for eligibility as per original impl\n            factored = ndim - 2, ndim - 1\n\n        return factored, use_first_moment\n\n    @staticmethod\n    def _rms(tensor):\n        return tensor.norm(2) / (tensor.numel() ** 0.5)\n\n    def _approx_sq_grad(self, exp_avg_sq_row, exp_avg_sq_col, dim_col, dim_row):\n        # from our dim heuristic, always dim_col < dim_row, so col reduction dim for factored row = dim_col\n        r_factor = (exp_avg_sq_row / exp_avg_sq_row.mean(dim=dim_col, keepdim=True)).rsqrt_().unsqueeze(dim_row)\n        c_factor = exp_avg_sq_col.unsqueeze(dim_col).rsqrt()\n        return torch.mul(r_factor, c_factor)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.dtype in {torch.float16, torch.bfloat16}:\n                    grad = grad.float()\n                if grad.is_sparse:\n                    raise RuntimeError('Adafactor does not support sparse gradients.')\n\n                state = self.state[p]\n\n                factored_dims, use_first_moment = self._get_options(\n                    group,\n                    grad.shape,\n                    min_size_to_factor=group['min_dim_size_to_factor'],\n                )\n                # State Initialization\n                if len(state) == 0:\n                    state['step'] = 0\n\n                    if use_first_moment:\n                        # Exponential moving average of gradient values\n                        state['exp_avg'] = torch.zeros_like(grad)\n                    if factored_dims is not None:\n                        dim_col, dim_row = factored_dims\n                        def _remove_dim(shape, dim):\n                            return shape[:dim] + shape[dim + 1:]\n                        state['exp_avg_sq_row'] = torch.zeros(_remove_dim(grad.shape, dim_row)).to(grad)\n                        state['exp_avg_sq_col'] = torch.zeros(_remove_dim(grad.shape, dim_col)).to(grad)\n                    else:\n                        state['exp_avg_sq'] = torch.zeros_like(grad)\n\n                    state['RMS'] = 0\n                else:\n                    if use_first_moment:\n                        state['exp_avg'] = state['exp_avg'].to(grad)\n                    if factored_dims is not None:\n                        state['exp_avg_sq_row'] = state['exp_avg_sq_row'].to(grad)\n                        state['exp_avg_sq_col'] = state['exp_avg_sq_col'].to(grad)\n                    else:\n                        state['exp_avg_sq'] = state['exp_avg_sq'].to(grad)\n\n                p_fp32 = p\n                if p.dtype in {torch.float16, torch.bfloat16}:\n                    p_fp32 = p_fp32.float()\n\n                state['step'] += 1\n                state['RMS'] = self._rms(p_fp32)\n                lr_t = self._get_lr(group, state)\n\n                beta2t = 1.0 - math.pow(state['step'], group['decay_rate'])\n                update = grad ** 2 + group['eps']\n                if factored_dims is not None:\n                    dim_col, dim_row = factored_dims\n                    exp_avg_sq_row = state['exp_avg_sq_row']\n                    exp_avg_sq_col = state['exp_avg_sq_col']\n\n                    exp_avg_sq_row.mul_(beta2t).add_(update.mean(dim=dim_row), alpha=1.0 - beta2t)\n                    exp_avg_sq_col.mul_(beta2t).add_(update.mean(dim=dim_col), alpha=1.0 - beta2t)\n\n                    # Approximation of exponential moving average of square of gradient\n                    update = self._approx_sq_grad(exp_avg_sq_row, exp_avg_sq_col, dim_col, dim_row)\n                    update.mul_(grad)\n                else:\n                    exp_avg_sq = state['exp_avg_sq']\n\n                    exp_avg_sq.mul_(beta2t).add_(update, alpha=1.0 - beta2t)\n                    update = exp_avg_sq.rsqrt().mul_(grad)\n\n                update.div_((self._rms(update) / group['clip_threshold']).clamp_(min=1.0))\n                update.mul_(lr_t)\n\n                if use_first_moment:\n                    exp_avg = state['exp_avg']\n                    exp_avg.mul_(group['beta1']).add_(update, alpha=1 - group['beta1'])\n                    if group['caution']:\n                        # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                        mask = (exp_avg * grad > 0).to(grad.dtype)\n                        mask.div_(mask.mean().clamp_(min=1e-3))\n                        update = exp_avg * mask\n                    else:\n                        update = exp_avg\n\n                if group['weight_decay'] != 0:\n                    p_fp32.add_(p_fp32, alpha=-group['weight_decay'] * lr_t)\n\n                p_fp32.add_(-update)\n                if p.dtype in {torch.float16, torch.bfloat16}:\n                    p.copy_(p_fp32)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/adafactor_bv.py",
    "content": "\"\"\" Adafactor (Big Vision variant) for PyTorch\n\nAdapted from the implementation in big vision: https://github.com/google-research/big_vision\n\nDescribed in 'Scaling Vision Transformers': https://arxiv.org/abs/2106.04560\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nAdaptation and PyTorch modifications by Ross Wightman\n\"\"\"\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor\nfrom torch.optim import Optimizer\n\nfrom ._types import ParamsT\n\n\ndef _get_scalar_dtype():\n    \"\"\"Get the scalar dtype that the optimizer uses for state\"\"\"\n    return torch.float64\n\n\ndef _factored_dims(\n        shape: Tuple[int, ...],\n        factored: bool,\n        min_dim_size_to_factor: int\n) -> Optional[tuple[int, int]]:\n    \"\"\"Whether to use a factored second moment estimator.\n\n    This function returns a tuple with the two largest axes to reduce over.\n    If no two dimensions have size >= min_dim_size_to_factor, return None.\n\n    Args:\n      shape: an input shape\n      factored: whether to use factored second-moment estimator for > 2d vars.\n      min_dim_size_to_factor: only factor accumulator if two array dimensions have at least this size.\n\n    Returns:\n      None or a tuple of ints\n    \"\"\"\n    if not factored or len(shape) < 2:\n        return None\n    sorted_dims = sorted(((x, i) for i, x in enumerate(shape)))\n    if shape[sorted_dims[-2][1]] < min_dim_size_to_factor:\n        return None\n    return int(sorted_dims[-2][1]), int(sorted_dims[-1][1])\n\n\nclass AdafactorBigVision(Optimizer):\n    \"\"\"\n    PyTorch implementation of BigVision's Adafactor variant with both single and multi tensor implementations.\n\n    Adapted from https://github.com/google-research/big_vision by Ross Wightman\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1.0,\n            min_dim_size_to_factor: int = 16,\n            decay_rate: float = 0.8,\n            decay_offset: int = 0,\n            beta2_cap: float = 0.999,\n            momentum: Optional[float] = 0.9,\n            momentum_dtype: Union[str, torch.dtype] = torch.bfloat16,\n            eps: Optional[float] = None,\n            weight_decay: float = 0.0,\n            clipping_threshold: Optional[float] = None,\n            unscaled_wd: bool = False,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n            *,\n            foreach: Optional[bool] = False,\n    ):\n        if isinstance(momentum_dtype, str):\n            if momentum_dtype == 'float16':\n                momentum_dtype = torch.float16\n            elif momentum_dtype == 'bfloat16':\n                momentum_dtype = torch.bfloat16\n            else:\n                assert momentum_dtype == 'float32', f'{momentum_dtype} dtype not supported'\n                momentum_dtype = torch.float32\n        # FIXME try to check if momentum dtype is appropriate for device? Torch API not great for this.\n\n        defaults = dict(\n            lr=lr,\n            min_dim_size_to_factor=min_dim_size_to_factor,\n            decay_rate=decay_rate,\n            decay_offset=decay_offset,\n            beta2_cap=beta2_cap,\n            momentum=momentum,\n            momentum_dtype=momentum_dtype,\n            eps=eps,\n            weight_decay=weight_decay,\n            clipping_threshold=clipping_threshold,\n            unscaled_wd=unscaled_wd,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n            foreach=foreach,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n            group.setdefault('foreach', None)\n            for p in group['params']:\n                p_state = self.state.get(p, {})\n                if len(p_state) != 0 and not torch.is_tensor(p_state['step']):\n                    p_state['step'] = torch.tensor(float(p_state['step']), dtype=_get_scalar_dtype())\n\n                if 'exp_avg' in p_state and torch.is_tensor(p_state['exp_avg']):\n                    # FIXME this is a bit of a hack, optimizer.load_state_dict appears to upcast\n                    # the momentum to float32 (it's half precision in the state_dict), need to\n                    # look into this further. Better to override _process_value_according_to_param_policy?\n                    p_state['exp_avg'] = p_state['exp_avg'].to(dtype=self.defaults['momentum_dtype'])\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            exp_avg_sq_rs = []\n            exp_avg_sq_cs = []\n            exp_avg_sqs = []\n            state_steps = []\n            exp_avgs = []  # For momentum\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n\n                if p.grad.is_sparse:\n                    raise RuntimeError(\"Sparse gradients not supported\")\n\n                params_with_grad.append(p)\n                grads.append(p.grad)\n\n                state = self.state[p]\n\n                if len(state) == 0:\n                    # NOTE step on CPU, probably need some more though to make capturable\n                    state['step'] = torch.tensor(0.0, dtype=_get_scalar_dtype())\n\n                    shape = p.grad.shape\n                    factored_dims = _factored_dims(\n                        shape,\n                        factored=True,\n                        min_dim_size_to_factor=self.defaults['min_dim_size_to_factor']\n                    )\n\n                    if factored_dims is not None:\n                        dc, dr = factored_dims\n                        row_shape = list(p.grad.shape)\n                        row_shape[dr] = 1\n                        col_shape = list(p.grad.shape)\n                        col_shape[dc] = 1\n                        state['exp_avg_sq_r'] = p.grad.new_zeros(row_shape)\n                        state['exp_avg_sq_c'] = p.grad.new_zeros(col_shape)\n                    else:\n                        state['exp_avg_sq'] = torch.zeros_like(p.grad, memory_format=torch.preserve_format)\n\n                    if self.defaults['momentum'] is not None:\n                        state['exp_avg'] = torch.zeros_like(p.grad, dtype=self.defaults['momentum_dtype'])\n\n                state_steps.append(state['step'])\n                exp_avg_sq_rs.append(state.get('exp_avg_sq_r', None))\n                exp_avg_sq_cs.append(state.get('exp_avg_sq_c', None))\n                exp_avg_sqs.append(state.get('exp_avg_sq', None))\n                exp_avgs.append(state.get('exp_avg', None))\n\n            if group['foreach']:\n                func = _multi_tensor_adafactor\n            else:\n                func = _single_tensor_adafactor\n\n            func(\n                params=params_with_grad,\n                grads=grads,\n                exp_avg_sq_rs=exp_avg_sq_rs,\n                exp_avg_sq_cs=exp_avg_sq_cs,\n                exp_avg_sqs=exp_avg_sqs,\n                exp_avgs=exp_avgs,\n                state_steps=state_steps,\n                beta2_decay=group['decay_rate'],\n                beta2_cap=group['beta2_cap'],\n                min_dim_size_to_factor=group['min_dim_size_to_factor'],\n                eps=group['eps'],\n                lr=group['lr'],\n                weight_decay=group['weight_decay'],\n                momentum=group['momentum'],\n                momentum_dtype=group['momentum_dtype'],\n                clipping_threshold=group['clipping_threshold'],\n                unscaled_wd=group['unscaled_wd'],\n                caution=group['caution'],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n            )\n\n        return loss\n\n\ndef _single_tensor_adafactor(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avg_sq_rs: List[Optional[Tensor]],\n        exp_avg_sq_cs: List[Optional[Tensor]],\n        exp_avg_sqs: List[Optional[Tensor]],\n        exp_avgs: List[Optional[Tensor]],\n        state_steps: List[Tensor],\n        *,\n        beta2_decay: float,\n        beta2_cap: float,\n        min_dim_size_to_factor: int,\n        eps: float,\n        lr: float,\n        weight_decay: float,\n        momentum: Optional[float],\n        momentum_dtype: Union[str, torch.dtype],\n        clipping_threshold: Optional[float],\n        unscaled_wd: bool,\n        caution: bool,\n        max_lr: Optional[float],\n):\n    for i, param in enumerate(params):\n        grad = grads[i]\n        exp_avg_sq_r = exp_avg_sq_rs[i]\n        exp_avg_sq_c = exp_avg_sq_cs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        exp_avg = exp_avgs[i]\n        step_t = state_steps[i]\n        if eps is None:\n            # default eps for avoiding div by zero, diff from float type eps\n            eps = 1e-7 if grad.dtype == torch.float16 else 1e-30\n\n        # Update step\n        step_t += 1\n        beta2_t = min(beta2_cap, 1.0 - float(step_t) ** (-beta2_decay))\n        one_minus_beta2_t = 1 - beta2_t\n\n        grad_sqr = torch.square(grad) + eps\n        # NOTE application of eps (epsilon1) mirrors the optax/big vision/t5x approach\n        if exp_avg_sq is None:\n            # factorized second moment\n            dc, dr = _factored_dims(grad.shape, True, min_dim_size_to_factor=min_dim_size_to_factor)\n            exp_avg_sq_r.lerp_(grad_sqr.mean(dim=dr, keepdim=True), one_minus_beta2_t)\n            exp_avg_sq_c.lerp_(grad_sqr.mean(dim=dc, keepdim=True), one_minus_beta2_t)\n\n            reduce_dc = dc - 1 if dc > dr else dc\n            row_col_mean = exp_avg_sq_r.mean(dim=reduce_dc, keepdim=True)\n            row_factor = (exp_avg_sq_r / row_col_mean).rsqrt()\n            col_factor = exp_avg_sq_c.rsqrt()\n\n            update = grad * row_factor * col_factor\n        else:\n            # non-factorized second moment\n            assert exp_avg_sq_r is None and exp_avg_sq_c is None\n            exp_avg_sq.lerp_(grad_sqr, one_minus_beta2_t)\n            update = grad * exp_avg_sq.rsqrt()\n\n        # Clip by RMS value\n        if clipping_threshold is not None:\n            denom = (update.norm(2) / ((update.numel() ** 0.5) / clipping_threshold)).clamp_(max=1.0)\n            update.div_(denom)\n\n        # Apply momentum (in different dtype)\n        if momentum is not None and exp_avg is not None:\n            if momentum_dtype != grad.dtype:\n                exp_avg.lerp_(update.to(momentum_dtype), 1 - momentum)  # ema\n                update = exp_avg.to(grad.dtype)\n            else:\n                exp_avg.lerp_(update, 1 - momentum)  # ema\n                update = exp_avg.clone()\n\n            if caution:\n                # apply caution as per 'Cautious Optimizers': https://arxiv.org/abs/2411.16085\n                mask = (update * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                update.mul_(mask)\n\n        # Scale by learning rate\n        update.mul_(lr)\n\n        # Perform weight decay\n        if weight_decay != 0:\n            if unscaled_wd:\n                # match big vision impl, 'fully decoupled' decay w/o LR scaling\n                if max_lr is None:\n                    param.mul_(1. - weight_decay)\n                else:\n                    # corrected weight decay: scale by lr / max_lr\n                    param.mul_(1. - (lr / max_lr) * weight_decay)\n            else:\n                # match typical pytorch behaviour for decoupled decay, eg adamw where wd is scaled by LR\n                if max_lr is None:\n                    param.mul_(1. - lr * weight_decay)\n                else:\n                    # corrected weight decay: scale by lr^2 / max_lr\n                    param.mul_(1. - (lr ** 2 / max_lr) * weight_decay)\n\n        # Update parameters\n        param.add_(update, alpha=-1.0)\n\n\ndef _multi_tensor_adafactor(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avg_sq_rs: List[Optional[Tensor]],\n        exp_avg_sq_cs: List[Optional[Tensor]],\n        exp_avg_sqs: List[Optional[Tensor]],\n        exp_avgs: List[Optional[Tensor]],\n        state_steps: List[Tensor],\n        *,\n        beta2_decay: float,\n        beta2_cap: float,\n        min_dim_size_to_factor: int,\n        eps: float,\n        lr: float,\n        weight_decay: float,\n        momentum: Optional[float],\n        momentum_dtype: Union[str, torch.dtype],\n        clipping_threshold: Optional[float],\n        unscaled_wd: bool,\n        caution: bool,\n        max_lr: Optional[float],\n):\n    # FIXME TODO\n    assert False, 'multi-tensor fn (foreach=True) not implemented yet'\n"
  },
  {
    "path": "timm/optim/adahessian.py",
    "content": "\"\"\" AdaHessian Optimizer\n\nLifted from https://github.com/davda54/ada-hessian/blob/master/ada_hessian.py\nOriginally licensed MIT, Copyright 2020, David Samuel\n\"\"\"\nimport torch\n\n\nclass Adahessian(torch.optim.Optimizer):\n    \"\"\"\n    Implements the AdaHessian algorithm from \"ADAHESSIAN: An Adaptive Second OrderOptimizer for Machine Learning\"\n\n    Arguments:\n        params (iterable): iterable of parameters to optimize or dicts defining parameter groups\n        lr (float, optional): learning rate (default: 0.1)\n        betas ((float, float), optional): coefficients used for computing running averages of gradient and the\n            squared hessian trace (default: (0.9, 0.999))\n        eps (float, optional): term added to the denominator to improve numerical stability (default: 1e-8)\n        weight_decay (float, optional): weight decay (L2 penalty) (default: 0.0)\n        hessian_power (float, optional): exponent of the hessian trace (default: 1.0)\n        update_each (int, optional): compute the hessian trace approximation only after *this* number of steps\n            (to save time) (default: 1)\n        n_samples (int, optional): how many times to sample `z` for the approximation of the hessian trace (default: 1)\n    \"\"\"\n\n    def __init__(\n            self,\n            params,\n            lr=0.1,\n            betas=(0.9, 0.999),\n            eps=1e-8,\n            weight_decay=0.0,\n            hessian_power=1.0,\n            update_each=1,\n            n_samples=1,\n            avg_conv_kernel=False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if not 0.0 <= eps:\n            raise ValueError(f\"Invalid epsilon value: {eps}\")\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(f\"Invalid beta parameter at index 0: {betas[0]}\")\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(f\"Invalid beta parameter at index 1: {betas[1]}\")\n        if not 0.0 <= hessian_power <= 1.0:\n            raise ValueError(f\"Invalid Hessian power value: {hessian_power}\")\n\n        self.n_samples = n_samples\n        self.update_each = update_each\n        self.avg_conv_kernel = avg_conv_kernel\n\n        # use a separate generator that deterministically generates the same `z`s across all GPUs in case of distributed training\n        self.seed = 2147483647\n        self.generator = torch.Generator().manual_seed(self.seed)\n\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            hessian_power=hessian_power,\n        )\n        super(Adahessian, self).__init__(params, defaults)\n\n        for p in self.get_params():\n            p.hess = 0.0\n            self.state[p][\"hessian step\"] = 0\n\n    @property\n    def is_second_order(self):\n        return True\n\n    def get_params(self):\n        \"\"\"\n        Gets all parameters in all param_groups with gradients\n        \"\"\"\n\n        return (p for group in self.param_groups for p in group['params'] if p.requires_grad)\n\n    def zero_hessian(self):\n        \"\"\"\n        Zeros out the accumulated hessian traces.\n        \"\"\"\n\n        for p in self.get_params():\n            if not isinstance(p.hess, float) and self.state[p][\"hessian step\"] % self.update_each == 0:\n                p.hess.zero_()\n\n    @torch.no_grad()\n    def set_hessian(self):\n        \"\"\"\n        Computes the Hutchinson approximation of the hessian trace and accumulates it for each trainable parameter.\n        \"\"\"\n\n        params = []\n        for p in filter(lambda p: p.grad is not None, self.get_params()):\n            if self.state[p][\"hessian step\"] % self.update_each == 0:  # compute the trace only each `update_each` step\n                params.append(p)\n            self.state[p][\"hessian step\"] += 1\n\n        if len(params) == 0:\n            return\n\n        if self.generator.device != params[0].device:  # hackish way of casting the generator to the right device\n            self.generator = torch.Generator(params[0].device).manual_seed(self.seed)\n\n        grads = [p.grad for p in params]\n\n        for i in range(self.n_samples):\n            # Rademacher distribution {-1.0, 1.0}\n            zs = [torch.randint(0, 2, p.size(), generator=self.generator, device=p.device) * 2.0 - 1.0 for p in params]\n            h_zs = torch.autograd.grad(\n                grads, params, grad_outputs=zs, only_inputs=True, retain_graph=i < self.n_samples - 1)\n            for h_z, z, p in zip(h_zs, zs, params):\n                p.hess += h_z * z / self.n_samples  # approximate the expected values of z*(H@z)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"\n        Performs a single optimization step.\n        Arguments:\n            closure (callable, optional) -- a closure that reevaluates the model and returns the loss (default: None)\n        \"\"\"\n\n        loss = None\n        if closure is not None:\n            loss = closure()\n\n        self.zero_hessian()\n        self.set_hessian()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None or p.hess is None:\n                    continue\n\n                if self.avg_conv_kernel and p.dim() == 4:\n                    p.hess = torch.abs(p.hess).mean(dim=[2, 3], keepdim=True).expand_as(p.hess).clone()\n\n                # Perform correct stepweight decay as in AdamW\n                p.mul_(1 - group['lr'] * group['weight_decay'])\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 1:\n                    state['step'] = 0\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Exponential moving average of Hessian diagonal square values\n                    state['exp_hessian_diag_sq'] = torch.zeros_like(p)\n\n                exp_avg, exp_hessian_diag_sq = state['exp_avg'], state['exp_hessian_diag_sq']\n                beta1, beta2 = group['betas']\n                state['step'] += 1\n\n                # Decay the first and second moment running average coefficient\n                exp_avg.mul_(beta1).add_(p.grad, alpha=1 - beta1)\n                exp_hessian_diag_sq.mul_(beta2).addcmul_(p.hess, p.hess, value=1 - beta2)\n\n                bias_correction1 = 1 - beta1 ** state['step']\n                bias_correction2 = 1 - beta2 ** state['step']\n\n                k = group['hessian_power']\n                denom = (exp_hessian_diag_sq / bias_correction2).pow_(k / 2).add_(group['eps'])\n\n                # make update\n                step_size = group['lr'] / bias_correction1\n                p.addcdiv_(exp_avg, denom, value=-step_size)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/adamp.py",
    "content": "\"\"\"\nAdamP Optimizer Implementation copied from https://github.com/clovaai/AdamP/blob/master/adamp/adamp.py\n\nPaper: `Slowing Down the Weight Norm Increase in Momentum-based Optimizers` - https://arxiv.org/abs/2006.08217\nCode: https://github.com/clovaai/AdamP\n\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Spherical Cautious Optimizers: https://openreview.net/forum?id=OyT2CJ4fh7 \nCopyright (c) 2020-present NAVER Corp.\nMIT license\n\"\"\"\n\nimport torch\nimport torch.nn.functional as F\nfrom torch.optim.optimizer import Optimizer\nimport math\n\n\ndef _channel_view(x) -> torch.Tensor:\n    return x.reshape(x.size(0), -1)\n\n\ndef _layer_view(x) -> torch.Tensor:\n    return x.reshape(1, -1)\n\n\ndef projection(p, grad, perturb, delta: float, wd_ratio: float, eps: float, caution: bool = False):\n    wd = 1.\n    expand_size = (-1,) + (1,) * (len(p.shape) - 1)\n    for view_func in [_channel_view, _layer_view]:\n        param_view = view_func(p)\n        grad_view = view_func(grad)\n        cosine_sim = F.cosine_similarity(grad_view, param_view, dim=1, eps=eps).abs_()\n\n        # FIXME this is a problem for PyTorch XLA\n        if cosine_sim.max() < delta / math.sqrt(param_view.size(1)):\n            p_n = p / param_view.norm(p=2, dim=1).add_(eps).reshape(expand_size)\n            perturb -= p_n * view_func(p_n * perturb).sum(dim=1).reshape(expand_size)\n            \n            if caution:\n                # Spherical Cautious Optimizer Logic\n                grad_radial = p_n * view_func(p_n * grad).sum(dim=1).reshape(expand_size)\n                grad_perp = grad - grad_radial\n                \n                mask = (perturb * grad_perp > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                perturb.mul_(mask)\n                # Enhance the numerical stability of the Cautious Optimizer\n                perturb -= p_n * view_func(p_n * perturb).sum(dim=1).reshape(expand_size)\n            wd = wd_ratio\n            return perturb, wd\n\n    if caution:\n        # Standard Cautious Optimizer Logic for non-projected parameters\n        mask = (perturb * grad > 0).to(grad.dtype)\n        mask.div_(mask.mean().clamp_(min=1e-3))\n        perturb.mul_(mask)\n\n    return perturb, wd\n\n\nclass AdamP(Optimizer):\n    def __init__(\n            self,\n            params,\n            lr=1e-3,\n            betas=(0.9, 0.999),\n            eps=1e-8,\n            weight_decay=0,\n            delta=0.1,\n            wd_ratio=0.1,\n            nesterov=False,\n            caution=False, \n    ):\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            delta=delta,\n            wd_ratio=wd_ratio,\n            nesterov=nesterov,\n            caution=caution,\n        )\n        super(AdamP, self).__init__(params, defaults)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n\n                grad = p.grad\n                beta1, beta2 = group['betas']\n                nesterov = group['nesterov']\n                caution = group.get('caution', False)\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    state['exp_avg'] = torch.zeros_like(p)\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n\n                # Adam\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n\n                state['step'] += 1\n                bias_correction1 = 1 - beta1 ** state['step']\n                bias_correction2 = 1 - beta2 ** state['step']\n\n                exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)\n                exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)\n\n                denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])\n                step_size = group['lr'] / bias_correction1\n\n                if nesterov:\n                    perturb = (beta1 * exp_avg + (1 - beta1) * grad) / denom\n                else:\n                    perturb = exp_avg / denom\n\n                # Projection\n                wd_ratio = 1.\n                if len(p.shape) > 1:\n                    perturb, wd_ratio = projection(\n                        p, grad, perturb, group['delta'], group['wd_ratio'], group['eps'], caution\n                    )\n                elif caution:\n                    # Apply standard caution for scalars/1D tensors if needed\n                    mask = (perturb * grad > 0).to(grad.dtype)\n                    mask.div_(mask.mean().clamp_(min=1e-3))\n                    perturb.mul_(mask)\n\n                # Weight decay\n                if group['weight_decay'] > 0:\n                    p.mul_(1. - group['lr'] * group['weight_decay'] * wd_ratio)\n\n                # Step\n                p.add_(perturb, alpha=-step_size)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/adamw.py",
    "content": "\"\"\" AdamW Optimizer\nImpl copied from PyTorch master\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nNOTE: This impl has been deprecated in favour of torch.optim.AdamW and remains as a reference\n\"\"\"\nimport math\nfrom typing import List, Optional, Tuple\n\nimport torch\nfrom torch import Tensor\nfrom torch.optim.optimizer import Optimizer\n\nfrom ._types import ParamsT\n\n\nclass AdamWLegacy(Optimizer):\n    r\"\"\"Implements AdamW algorithm.\n\n    NOTE: This impl has been deprecated in favour of torch.optim.AdamW and remains as a reference\n\n    References:\n        - Adam: A Method for Stochastic Optimization: https://arxiv.org/abs/1412.6980\n        - Decoupled Weight Decay Regularization: https://arxiv.org/abs/1711.05101\n        - On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ\n\n    Args:\n        params: iterable of parameters to optimize or dicts defining parameter groups\n        lr: learning rate\n        betas: coefficients used for computing running averages of gradient and its square\n        eps: term added to the denominator to improve numerical stability\n        weight_decay: weight decay coefficient\n        amsgrad: whether to use the AMSGrad variant of this algorithm\n            from the paper `On the Convergence of Adam and Beyond`\n        caution: apply caution when using AdamW\n        corrected_weight_decay: apply corrected weight decay (lr**2 / max_lr)\n        maximize: maximize the params based on the objective, instead of minimizing\n        foreach: whether foreach implementation of optimizer is used.\n            If unspecified by the user (so foreach is None), we will try to use\n            foreach over for-loop implementation on CUDA, since it is faster in general.\n        capturable: whether this instance is safe to capture in a CUDA graph.\n            Passing True can impair ungraphed performance, so if you don't intend to\n            graph capture this instance, leave it False\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-3,\n            betas: Tuple[float, float] = (0.9, 0.999),\n            eps: float = 1e-8,\n            weight_decay: float = 1e-2,\n            amsgrad: bool = False,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n            maximize: bool = False,\n            foreach: Optional[bool] = None,\n            capturable: bool = False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            amsgrad=amsgrad,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n            foreach=foreach,\n            maximize=maximize,\n            capturable=capturable,\n        )\n        super(AdamWLegacy, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(AdamWLegacy, self).__setstate__(state)\n        state_values = list(self.state.values())\n        step_is_tensor = (len(state_values) != 0) and torch.is_tensor(state_values[0]['step'])\n        if not step_is_tensor:\n            for s in state_values:\n                s['step'] = torch.tensor(float(s['step']))\n        for group in self.param_groups:\n            group.setdefault('amsgrad', False)\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n            group.setdefault('foreach', None)\n            group.setdefault('maximize', False)\n            group.setdefault('capturable', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        self._cuda_graph_capture_health_check()\n\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            exp_avgs = []\n            exp_avg_sqs = []\n            max_exp_avg_sqs = []\n            state_steps = []\n            beta1, beta2 = group['betas']\n            amsgrad = group['amsgrad']\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                params_with_grad.append(p)\n                if p.grad.is_sparse:\n                    raise RuntimeError('AdamW does not support sparse gradients')\n                grads.append(p.grad)\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = torch.tensor(0.)\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                    if amsgrad:\n                        # Maintains max of all exp. moving avg. of sq. grad. values\n                        state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n\n                exp_avgs.append(state['exp_avg'])\n                exp_avg_sqs.append(state['exp_avg_sq'])\n                if amsgrad:\n                    max_exp_avg_sqs.append(state.get('max_exp_avg_sq', None))\n                state_steps.append(state['step'])\n\n            adamw(\n                params_with_grad,\n                grads,\n                exp_avgs,\n                exp_avg_sqs,\n                max_exp_avg_sqs,\n                state_steps,\n                foreach=group['foreach'],\n                amsgrad=amsgrad,\n                beta1=beta1,\n                beta2=beta2,\n                lr=group['lr'],\n                weight_decay=group['weight_decay'],\n                eps=group['eps'],\n                caution=group['caution'],\n                maximize=group['maximize'],\n                capturable=group['capturable'],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n            )\n\n        return loss\n\n\ndef adamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        max_exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        foreach: Optional[bool] = None,\n        capturable: bool = False,\n        *,\n        amsgrad: bool,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        max_lr: Optional[float],\n) -> None:\n    r\"\"\"Functional API that performs AdamW algorithm computation.\n      See AdamWLegacy class for details.\n    \"\"\"\n\n    if not all(isinstance(t, torch.Tensor) for t in state_steps):\n        raise RuntimeError(\n            'API has changed, `state_steps` argument must contain a list of' +\n            ' singleton tensors')\n\n    if foreach is None:\n        try:\n            # cannot do foreach if this overload doesn't exist when caution enabled\n            foreach = not caution or 'Scalar' in torch.ops.aten._foreach_maximum_.overloads()\n        except Exception:\n            foreach = False\n\n    if foreach and not torch.jit.is_scripting():\n        func = _multi_tensor_adamw\n    else:\n        func = _single_tensor_adamw\n\n    func(\n        params,\n        grads,\n        exp_avgs,\n        exp_avg_sqs,\n        max_exp_avg_sqs,\n        state_steps,\n        amsgrad=amsgrad,\n        beta1=beta1,\n        beta2=beta2,\n        lr=lr,\n        weight_decay=weight_decay,\n        eps=eps,\n        caution=caution,\n        maximize=maximize,\n        capturable=capturable,\n        max_lr=max_lr,\n    )\n\n\ndef _single_tensor_adamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        max_exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        *,\n        amsgrad: bool,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        max_lr: Optional[float],\n):\n\n    for i, param in enumerate(params):\n        grad = grads[i] if not maximize else -grads[i]\n        exp_avg = exp_avgs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        step_t = state_steps[i]\n\n        # Update step.\n        step_t += 1\n\n        # Perform stepweight decay.\n        wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n        param.mul_(1. - wd_scale * weight_decay)\n\n        # Decay the first and second moment running average coefficient.\n        exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)\n        exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)\n\n        if amsgrad:\n            max_exp_avg_sq = max_exp_avg_sqs[i]\n            # Maintains the maximum of all 2nd moment running avg. till now\n            torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq)\n            denom_base = max_exp_avg_sq\n        else:\n            denom_base = exp_avg_sq\n\n        if capturable:\n            step = step_t\n\n            # 1 - beta1 ** step can't be captured in a CUDA graph, even if step is a CUDA tensor\n            # (incurs \"RuntimeError: CUDA error: operation not permitted when stream is capturing\")\n            bias_correction1 = 1 - torch.pow(beta1, step)\n            bias_correction2 = 1 - torch.pow(beta2, step)\n\n            step_size = lr / bias_correction1\n            step_size_neg = step_size.neg()\n\n            bias_correction2_sqrt = bias_correction2.sqrt()\n\n            denom = (denom_base.sqrt() / (bias_correction2_sqrt * step_size_neg)).add_(eps / step_size_neg)\n\n            if caution:\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                # FIXME not 100% sure if this remains capturable?\n                mask = (exp_avg * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                exp_avg = exp_avg * mask\n\n            param.addcdiv_(exp_avg, denom)\n        else:\n            step = step_t.item()\n            bias_correction1 = 1 - beta1 ** step\n            bias_correction2 = 1 - beta2 ** step\n            step_size = lr / bias_correction1\n            bias_correction2_sqrt = math.sqrt(bias_correction2)\n\n            denom = (denom_base.sqrt() / bias_correction2_sqrt).add_(eps)\n\n            if caution:\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                mask = (exp_avg * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                exp_avg = exp_avg * mask\n\n            param.addcdiv_(exp_avg, denom, value=-step_size)\n\n\ndef _multi_tensor_adamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        max_exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        *,\n        amsgrad: bool,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        max_lr: Optional[float],\n):\n    if len(params) == 0:\n        return\n\n    if capturable:\n        assert all(\n            p.is_cuda and step.is_cuda for p, step in zip(params, state_steps)\n        ), \"If capturable=True, params and state_steps must be CUDA tensors.\"\n\n    if maximize:\n        grads = torch._foreach_neg(tuple(grads))  # type: ignore[assignment]\n\n    grads = [torch.view_as_real(x) if torch.is_complex(x) else x for x in grads]\n    exp_avgs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in exp_avgs]\n    exp_avg_sqs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in exp_avg_sqs]\n    params = [torch.view_as_real(x) if torch.is_complex(x) else x for x in params]\n\n    # update steps\n    torch._foreach_add_(state_steps, 1)\n\n    # Perform stepweight decay\n    wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n    torch._foreach_mul_(params, 1 -  wd_scale * weight_decay)\n\n    # Decay the first and second moment running average coefficient\n    #torch._foreach_lerp_(exp_avgs, grads, 1 - beta1)\n    torch._foreach_mul_(exp_avgs, beta1)\n    torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1)\n\n    torch._foreach_mul_(exp_avg_sqs, beta2)\n    torch._foreach_addcmul_(exp_avg_sqs, grads, grads, 1 - beta2)\n\n    if capturable:\n        # TODO: use foreach_pow if/when foreach_pow is added\n        bias_correction1 = [torch.pow(beta1, step) for step in state_steps]\n        bias_correction2 = [torch.pow(beta2, step) for step in state_steps]\n        # foreach_sub doesn't allow a scalar as the first arg\n        torch._foreach_sub_(bias_correction1, 1)\n        torch._foreach_sub_(bias_correction2, 1)\n        torch._foreach_neg_(bias_correction1)\n        torch._foreach_neg_(bias_correction2)\n\n        # foreach_div doesn't allow a scalar as the first arg\n        step_size = torch._foreach_div(bias_correction1, lr)\n        torch._foreach_reciprocal_(step_size)\n        torch._foreach_neg_(step_size)\n\n        bias_correction2_sqrt = torch._foreach_sqrt(bias_correction2)\n\n        if amsgrad:\n            # Maintains the maximum of all 2nd moment running avg. till now\n            max_exp_avg_sqs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in max_exp_avg_sqs]\n            torch._foreach_maximum_(max_exp_avg_sqs, exp_avg_sqs)\n            denom_base = torch._foreach_sqrt(max_exp_avg_sqs)\n        else:\n            denom_base = torch._foreach_sqrt(exp_avg_sqs)\n\n        torch._foreach_div_(\n            denom_base,\n            torch._foreach_mul(bias_correction2_sqrt, step_size)\n        )\n        eps_over_step_size = torch._foreach_div(step_size, eps)\n        torch._foreach_reciprocal_(eps_over_step_size)\n        denom = torch._foreach_add(denom_base, eps_over_step_size)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            masks = torch._foreach_mul(exp_avgs, grads)\n            masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]  # capturable?\n            mask_scale = [m.mean() for m in masks]\n            torch._foreach_maximum_(mask_scale, 1e-3)\n            #torch._foreach_clamp_min_(mask_scale, 1e-3)\n            torch._foreach_div_(masks, mask_scale)\n            exp_avgs = torch._foreach_mul(exp_avgs, masks)\n\n        torch._foreach_addcdiv_(params, exp_avgs, denom)\n    else:\n        bias_correction1 = [1 - beta1 ** step.item() for step in state_steps]\n        bias_correction2 = [1 - beta2 ** step.item() for step in state_steps]\n\n        step_size = [(lr / bc) * -1 for bc in bias_correction1]\n\n        bias_correction2_sqrt = [math.sqrt(bc) for bc in bias_correction2]\n\n        if amsgrad:\n            # Maintains the maximum of all 2nd moment running avg. till now\n            max_exp_avg_sqs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in max_exp_avg_sqs]\n            torch._foreach_maximum_(max_exp_avg_sqs, exp_avg_sqs)\n            denom = torch._foreach_sqrt(max_exp_avg_sqs)\n        else:\n            denom = torch._foreach_sqrt(exp_avg_sqs)\n\n        torch._foreach_div_(denom, bias_correction2_sqrt)\n        torch._foreach_add_(denom, eps)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            masks = torch._foreach_mul(exp_avgs, grads)\n            masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]\n            mask_scale = [m.mean() for m in masks]\n            torch._foreach_maximum_(mask_scale, 1e-3)\n            #torch._foreach_clamp_min_(mask_scale, 1e-3)\n            torch._foreach_div_(masks, mask_scale)\n            exp_avgs = torch._foreach_mul(exp_avgs, masks)\n\n        torch._foreach_addcdiv_(params, exp_avgs, denom, step_size)\n"
  },
  {
    "path": "timm/optim/adan.py",
    "content": "\"\"\" Adan Optimizer\n\nAdan: Adaptive Nesterov Momentum Algorithm for Faster Optimizing Deep Models[J]. arXiv preprint arXiv:2208.06677, 2022.\n    https://arxiv.org/abs/2208.06677\n\nImplementation adapted from https://github.com/sail-sg/Adan\n\"\"\"\n# Copyright 2022 Garena Online Private Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple\n\nimport torch\nfrom torch import Tensor\nfrom torch.optim.optimizer import Optimizer\n\n\nclass MultiTensorApply(object):\n    available = False\n    warned = False\n\n    def __init__(self, chunk_size):\n        try:\n            MultiTensorApply.available = True\n            self.chunk_size = chunk_size\n        except ImportError as err:\n            MultiTensorApply.available = False\n            MultiTensorApply.import_err = err\n\n    def __call__(self, op, noop_flag_buffer, tensor_lists, *args):\n        return op(self.chunk_size, noop_flag_buffer, tensor_lists, *args)\n\n\nclass Adan(Optimizer):\n    \"\"\" Implements a pytorch variant of Adan.\n\n    Adan was proposed in Adan: Adaptive Nesterov Momentum Algorithm for Faster Optimizing Deep Models\n    https://arxiv.org/abs/2208.06677\n\n    Arguments:\n        params: Iterable of parameters to optimize or dicts defining parameter groups.\n        lr: Learning rate.\n        betas: Coefficients used for first- and second-order moments.\n        eps: Term added to the denominator to improve numerical stability.\n        weight_decay: Decoupled weight decay (L2 penalty)\n        no_prox: How to perform the weight decay\n        caution: Enable caution from 'Cautious Optimizers'\n        foreach: If True would use torch._foreach implementation. Faster but uses slightly more memory.\n    \"\"\"\n\n    def __init__(self,\n            params,\n            lr: float = 1e-3,\n            betas: Tuple[float, float, float] = (0.98, 0.92, 0.99),\n            eps: float = 1e-8,\n            weight_decay: float = 0.0,\n            no_prox: bool = False,\n            caution: bool = False,\n            foreach: Optional[bool] = None,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError('Invalid learning rate: {}'.format(lr))\n        if not 0.0 <= eps:\n            raise ValueError('Invalid epsilon value: {}'.format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError('Invalid beta parameter at index 0: {}'.format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError('Invalid beta parameter at index 1: {}'.format(betas[1]))\n        if not 0.0 <= betas[2] < 1.0:\n            raise ValueError('Invalid beta parameter at index 2: {}'.format(betas[2]))\n\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            no_prox=no_prox,\n            caution=caution,\n            foreach=foreach,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(Adan, self).__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('no_prox', False)\n            group.setdefault('caution', False)\n\n    @torch.no_grad()\n    def restart_opt(self):\n        for group in self.param_groups:\n            group['step'] = 0\n            for p in group['params']:\n                if p.requires_grad:\n                    state = self.state[p]\n                    # State initialization\n\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n                    # Exponential moving average of gradient difference\n                    state['exp_avg_diff'] = torch.zeros_like(p)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        try:\n            has_scalar_maximum = 'Scalar' in torch.ops.aten._foreach_maximum_.overloads()\n        except Exception:\n            has_scalar_maximum = False\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            exp_avgs = []\n            exp_avg_sqs = []\n            exp_avg_diffs = []\n            neg_pre_grads = []\n\n            beta1, beta2, beta3 = group['betas']\n            # assume same step across group now to simplify things\n            # per parameter step can be easily supported by making it a tensor, or pass list into kernel\n            if 'step' in group:\n                group['step'] += 1\n            else:\n                group['step'] = 1\n\n            bias_correction1 = 1.0 - beta1 ** group['step']\n            bias_correction2 = 1.0 - beta2 ** group['step']\n            bias_correction3 = 1.0 - beta3 ** group['step']\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                params_with_grad.append(p)\n                grads.append(p.grad)\n\n                state = self.state[p]\n                if len(state) == 0:\n                    state['exp_avg'] = torch.zeros_like(p)\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n                    state['exp_avg_diff'] = torch.zeros_like(p)\n\n                if 'neg_pre_grad' not in state or group['step'] == 1:\n                    state['neg_pre_grad'] = -p.grad.clone()\n\n                exp_avgs.append(state['exp_avg'])\n                exp_avg_sqs.append(state['exp_avg_sq'])\n                exp_avg_diffs.append(state['exp_avg_diff'])\n                neg_pre_grads.append(state['neg_pre_grad'])\n\n            if not params_with_grad:\n                continue\n\n            if group['foreach'] is None:\n                use_foreach = not group['caution'] or has_scalar_maximum\n            else:\n                use_foreach = group['foreach']\n\n            if use_foreach:\n                func = _multi_tensor_adan\n            else:\n                func = _single_tensor_adan\n\n            func(\n                params_with_grad,\n                grads,\n                exp_avgs=exp_avgs,\n                exp_avg_sqs=exp_avg_sqs,\n                exp_avg_diffs=exp_avg_diffs,\n                neg_pre_grads=neg_pre_grads,\n                beta1=beta1,\n                beta2=beta2,\n                beta3=beta3,\n                bias_correction1=bias_correction1,\n                bias_correction2=bias_correction2,\n                bias_correction3_sqrt=math.sqrt(bias_correction3),\n                lr=group['lr'],\n                weight_decay=group['weight_decay'],\n                eps=group['eps'],\n                no_prox=group['no_prox'],\n                caution=group['caution'],\n            )\n\n        return loss\n\n\ndef _single_tensor_adan(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        exp_avg_diffs: List[Tensor],\n        neg_pre_grads: List[Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        beta3: float,\n        bias_correction1: float,\n        bias_correction2: float,\n        bias_correction3_sqrt: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        no_prox: bool,\n        caution: bool,\n):\n    for i, param in enumerate(params):\n        grad = grads[i]\n        exp_avg = exp_avgs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        exp_avg_diff = exp_avg_diffs[i]\n        neg_grad_or_diff = neg_pre_grads[i]\n\n        # for memory saving, we use `neg_grad_or_diff` to get some temp variable in an inplace way\n        neg_grad_or_diff.add_(grad)\n\n        exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)  # m_t\n        exp_avg_diff.mul_(beta2).add_(neg_grad_or_diff, alpha=1 - beta2)  # diff_t\n\n        neg_grad_or_diff.mul_(beta2).add_(grad)\n        exp_avg_sq.mul_(beta3).addcmul_(neg_grad_or_diff, neg_grad_or_diff, value=1 - beta3)  # n_t\n\n        denom = (exp_avg_sq.sqrt() / bias_correction3_sqrt).add_(eps)\n        step_size_diff = lr * beta2 / bias_correction2\n        step_size = lr / bias_correction1\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            mask = (exp_avg * grad > 0).to(grad.dtype)\n            mask.div_(mask.mean().clamp_(min=1e-3))\n            exp_avg = exp_avg * mask\n\n        if no_prox:\n            param.mul_(1 - lr * weight_decay)\n            param.addcdiv_(exp_avg, denom, value=-step_size)\n            param.addcdiv_(exp_avg_diff, denom, value=-step_size_diff)\n        else:\n            param.addcdiv_(exp_avg, denom, value=-step_size)\n            param.addcdiv_(exp_avg_diff, denom, value=-step_size_diff)\n            param.div_(1 + lr * weight_decay)\n\n        neg_grad_or_diff.zero_().add_(grad, alpha=-1.0)\n\n\ndef _multi_tensor_adan(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        exp_avg_diffs: List[Tensor],\n        neg_pre_grads: List[Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        beta3: float,\n        bias_correction1: float,\n        bias_correction2: float,\n        bias_correction3_sqrt: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        no_prox: bool,\n        caution: bool,\n):\n    if len(params) == 0:\n        return\n\n    # for memory saving, we use `neg_pre_grads` to get some temp variable in a inplace way\n    torch._foreach_add_(neg_pre_grads, grads)\n\n    torch._foreach_mul_(exp_avgs, beta1)\n    torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1)  # m_t\n\n    torch._foreach_mul_(exp_avg_diffs, beta2)\n    torch._foreach_add_(exp_avg_diffs, neg_pre_grads, alpha=1 - beta2)  # diff_t\n\n    torch._foreach_mul_(neg_pre_grads, beta2)\n    torch._foreach_add_(neg_pre_grads, grads)\n    torch._foreach_mul_(exp_avg_sqs, beta3)\n    torch._foreach_addcmul_(exp_avg_sqs, neg_pre_grads, neg_pre_grads, value=1 - beta3)  # n_t\n\n    denom = torch._foreach_sqrt(exp_avg_sqs)\n    torch._foreach_div_(denom, bias_correction3_sqrt)\n    torch._foreach_add_(denom, eps)\n\n    step_size_diff = lr * beta2 / bias_correction2\n    step_size = lr / bias_correction1\n\n    if caution:\n        # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n        masks = torch._foreach_mul(exp_avgs, grads)\n        masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]\n        mask_scale = [m.mean() for m in masks]\n        torch._foreach_maximum_(mask_scale, 1e-3)\n        torch._foreach_div_(masks, mask_scale)\n        exp_avgs = torch._foreach_mul(exp_avgs, masks)\n\n    if no_prox:\n        torch._foreach_mul_(params, 1 - lr * weight_decay)\n        torch._foreach_addcdiv_(params, exp_avgs, denom, value=-step_size)\n        torch._foreach_addcdiv_(params, exp_avg_diffs, denom, value=-step_size_diff)\n    else:\n        torch._foreach_addcdiv_(params, exp_avgs, denom, value=-step_size)\n        torch._foreach_addcdiv_(params, exp_avg_diffs, denom, value=-step_size_diff)\n        torch._foreach_div_(params, 1 + lr * weight_decay)\n\n    torch._foreach_zero_(neg_pre_grads)\n    torch._foreach_add_(neg_pre_grads, grads, alpha=-1.0)\n"
  },
  {
    "path": "timm/optim/adopt.py",
    "content": "\"\"\" ADOPT PyTorch Optimizer\n\nADOPT: Modified Adam Can Converge with Any β2 with the Optimal Rate: https://arxiv.org/abs/2411.02853\n\nModified for reduced dependencies on PyTorch internals from original at: https://github.com/iShohei220/adopt\n\n@inproceedings{taniguchi2024adopt,\n author={Taniguchi, Shohei and Harada, Keno and Minegishi, Gouki and Oshima, Yuta and Jeong, Seong Cheol and Nagahara, Go and Iiyama, Tomoshi and Suzuki, Masahiro and Iwasawa, Yusuke and Matsuo, Yutaka},\n booktitle = {Advances in Neural Information Processing Systems},\n title = {ADOPT: Modified Adam Can Converge with Any β2 with the Optimal Rate},\n year = {2024}\n}\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\"\"\"\nfrom typing import cast, List, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor\nfrom torch.optim.optimizer import Optimizer\n\nfrom ._types import ParamsT\n\n__all__ = [\"Adopt\", \"adopt\"]\n\ndef _view_as_real(params, *state_and_grads):\n    for i, p in enumerate(params):\n        if torch.is_complex(p):\n            params[i] = torch.view_as_real(params[i])\n            for s in state_and_grads:\n                s[i] = torch.view_as_real(s[i])\n\n\ndef _get_scalar_dtype(is_fused=None):\n    if is_fused:\n        return torch.float32\n    return (\n        torch.float64 if torch.get_default_dtype() == torch.float64 else torch.float32\n    )\n\n\ndef _is_compiling():\n    if hasattr(torch, 'compiler') and hasattr(torch.compiler, 'is_compiling'):\n        return torch.compiler.is_compiling()\n    else:\n        return False\n\n\ndef _get_value(x):\n    # item is significantly faster than a cpu tensor in eager mode\n    if not torch.jit.is_scripting() and _is_compiling():\n        return x\n    else:\n        return x.item() if isinstance(x, torch.Tensor) else x\n\n\nclass Adopt(Optimizer):\n    \"\"\"\n    ADOPT: Modified Adam Can Converge with Any β2 with the Optimal Rate: https://arxiv.org/abs/2411.02853\n\n    \"\"\"\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: Union[float, Tensor] = 1e-3,\n            betas: Tuple[float, float] = (0.9, 0.9999),\n            eps: float = 1e-6,\n            clip_exp: Optional[float] = 0.333,\n            weight_decay: float = 0.0,\n            decoupled: bool = False,\n            corrected_weight_decay: bool = False,\n            *,\n            caution: bool = False,\n            foreach: Optional[bool] = False,\n            maximize: bool = False,\n            capturable: bool = False,\n            differentiable: bool = False,\n    ):\n        if isinstance(lr, Tensor):\n            if foreach and not capturable:\n                raise ValueError(\n                    \"lr as a Tensor is not supported for capturable=False and foreach=True\"\n                )\n            if lr.numel() != 1:\n                raise ValueError(\"Tensor lr must be 1-element\")\n        if not 0.0 <= lr:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if not 0.0 <= eps:\n            raise ValueError(f\"Invalid epsilon value: {eps}\")\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(f\"Invalid beta parameter at index 0: {betas[0]}\")\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(f\"Invalid beta parameter at index 1: {betas[1]}\")\n        if not 0.0 <= weight_decay:\n            raise ValueError(f\"Invalid weight_decay value: {weight_decay}\")\n\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            clip_exp=clip_exp,\n            decoupled=decoupled,\n            corrected_weight_decay=corrected_weight_decay,\n            caution=caution,\n            maximize=maximize,\n            foreach=foreach,\n            capturable=capturable,\n            differentiable=differentiable,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault(\"maximize\", False)\n            group.setdefault(\"foreach\", None)\n            group.setdefault(\"capturable\", False)\n            group.setdefault(\"differentiable\", False)\n            group.setdefault(\"clip_exp\", None)\n            group.setdefault(\"caution\", False)\n            group.setdefault(\"corrected_weight_decay\", False)\n            for p in group[\"params\"]:\n                p_state = self.state.get(p, [])\n                if len(p_state) != 0 and not torch.is_tensor(p_state[\"step\"]):\n                    step_val = float(p_state[\"step\"])\n                    p_state[\"step\"] = (\n                        torch.tensor(\n                            step_val,\n                            dtype=_get_scalar_dtype(),\n                            device=p.device,\n                        )\n                        if group[\"capturable\"]\n                        else torch.tensor(step_val, dtype=_get_scalar_dtype())\n                    )\n\n    def _init_group(\n            self,\n            group,\n            params_with_grad,\n            grads,\n            exp_avgs,\n            exp_avg_sqs,\n            state_steps,\n    ):\n        has_complex = False\n        for p in group[\"params\"]:\n            if p.grad is None:\n                continue\n            has_complex |= torch.is_complex(p)\n            params_with_grad.append(p)\n            if p.grad.is_sparse:\n                raise RuntimeError(\"ADOPT does not support sparse gradients\")\n            grads.append(p.grad)\n\n            state = self.state[p]\n            # Lazy state initialization\n            if len(state) == 0:\n                # note(crcrpar): [special device hosting for step]\n                # Deliberately host `step` on CPU if both capturable and fused are off.\n                # This is because kernel launches are costly on CUDA and XLA.\n                state[\"step\"] = (\n                    torch.zeros((), dtype=_get_scalar_dtype(), device=p.grad.device)\n                    if group[\"capturable\"]\n                    else torch.tensor(0.0, dtype=_get_scalar_dtype())\n                )\n                # Exponential moving average of gradient values\n                state[\"exp_avg\"] = torch.zeros_like(p.grad, memory_format=torch.preserve_format)\n                # Exponential moving average of squared gradient values\n                state[\"exp_avg_sq\"] = torch.zeros_like(p.grad, memory_format=torch.preserve_format)\n\n            exp_avgs.append(state[\"exp_avg\"])\n            exp_avg_sqs.append(state[\"exp_avg_sq\"])\n\n            if group[\"differentiable\"] and state[\"step\"].requires_grad:\n                raise RuntimeError(\"`requires_grad` is not supported for `step` in differentiable mode\")\n\n            # Foreach without capturable does not support a tensor lr\n            if group[\"foreach\"] and torch.is_tensor(group[\"lr\"]) and not group[\"capturable\"]:\n                raise RuntimeError(\"lr as a Tensor is not supported for capturable=False and foreach=True\")\n\n            state_steps.append(state[\"step\"])\n        return has_complex\n\n    #@_use_grad_for_differentiable  # FIXME internal context mgr, can't use\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Perform a single optimization step.\n\n        Args:\n            closure (Callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        self._cuda_graph_capture_health_check()\n\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad: List[Tensor] = []\n            grads: List[Tensor] = []\n            exp_avgs: List[Tensor] = []\n            exp_avg_sqs: List[Tensor] = []\n            state_steps: List[Tensor] = []\n            beta1, beta2 = group[\"betas\"]\n\n            has_complex = self._init_group(\n                group,\n                params_with_grad,\n                grads,\n                exp_avgs,\n                exp_avg_sqs,\n                state_steps,\n            )\n\n            adopt(\n                params_with_grad,\n                grads,\n                exp_avgs,\n                exp_avg_sqs,\n                state_steps,\n                has_complex=has_complex,\n                beta1=beta1,\n                beta2=beta2,\n                lr=group[\"lr\"],\n                weight_decay=group[\"weight_decay\"],\n                clip_exp=group[\"clip_exp\"],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n                decoupled=group[\"decoupled\"],\n                eps=group[\"eps\"],\n                caution=group[\"caution\"],\n                maximize=group[\"maximize\"],\n                foreach=group[\"foreach\"],\n                capturable=group[\"capturable\"],\n                differentiable=group[\"differentiable\"],\n                grad_scale=getattr(self, \"grad_scale\", None),\n                found_inf=getattr(self, \"found_inf\", None),\n            )\n\n        return loss\n\n\ndef _single_tensor_adopt(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        grad_scale: Optional[Tensor],\n        found_inf: Optional[Tensor],\n        *,\n        has_complex: bool,\n        beta1: float,\n        beta2: float,\n        lr: Union[float, Tensor],\n        weight_decay: float,\n        clip_exp: Optional[float],\n        max_lr: Optional[float],\n        decoupled: bool,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        differentiable: bool,\n):\n    assert grad_scale is None and found_inf is None\n\n    if torch.jit.is_scripting():\n        # this assert is due to JIT being dumb and not realizing that the ops below\n        # have overloads to handle both float and Tensor lrs, so we just assert it's\n        # a float since most people using JIT are using floats\n        assert isinstance(lr, float)\n\n    for i, param in enumerate(params):\n        grad = grads[i] if not maximize else -grads[i]\n        exp_avg = exp_avgs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        step_t = state_steps[i]\n\n        # If compiling, the compiler will handle cudagraph checks, see note [torch.compile x capturable]\n        if capturable and not _is_compiling():\n            from torch.optim.optimizer import _get_capturable_supported_devices\n            capturable_supported_devices = _get_capturable_supported_devices()\n            assert param.device.type == step_t.device.type and param.device.type in capturable_supported_devices,\\\n                f\"If capturable=True, params and state_steps must be on supported devices: {capturable_supported_devices}.\"\n\n        # update step\n        step_t += 1\n\n        if torch.is_complex(param):\n            grad = torch.view_as_real(grad)\n            if exp_avg is not None:\n                exp_avg = torch.view_as_real(exp_avg)\n            if exp_avg_sq is not None:\n                exp_avg_sq = torch.view_as_real(exp_avg_sq)\n            param = torch.view_as_real(param)\n\n        if weight_decay != 0 and not decoupled:\n            grad = grad.add(param, alpha=weight_decay)\n\n        step = step_t if capturable or differentiable else _get_value(step_t)\n        if step == 1:\n            exp_avg_sq.addcmul_(grad, grad.conj())\n            continue\n\n        if weight_decay != 0 and decoupled:\n            wd_scale = lr ** 2 / max_lr if max_lr is not None else lr\n            param.add_(param, alpha=-wd_scale * weight_decay)\n\n        denom = torch.clamp(exp_avg_sq.sqrt(), eps)\n        normed_grad = grad.div(denom)\n\n        if clip_exp is not None:\n            clip_val = (step - 1) ** clip_exp\n            normed_grad.clamp_(-clip_val, clip_val)\n\n        exp_avg.lerp_(normed_grad, 1 - beta1)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            mask = (exp_avg * grad > 0).to(grad.dtype)\n            mask.div_(mask.mean().clamp_(min=1e-3))\n            exp_avg = exp_avg * mask\n\n        param.add_(exp_avg, alpha=-lr)\n\n        exp_avg_sq.mul_(beta2).addcmul_(grad, grad.conj(), value=1 - beta2)\n\n\ndef _multi_tensor_adopt(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        grad_scale: Optional[Tensor],\n        found_inf: Optional[Tensor],\n        *,\n        has_complex: bool,\n        beta1: float,\n        beta2: float,\n        lr: Union[float, Tensor],\n        weight_decay: float,\n        clip_exp: Optional[float],\n        max_lr: Optional[float],\n        decoupled: bool,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        differentiable: bool,\n):\n    if len(params) == 0:\n        return\n\n    if isinstance(lr, Tensor) and not capturable:\n        raise RuntimeError(\n            \"lr as a Tensor is not supported for capturable=False and foreach=True\"\n        )\n\n    # If compiling, the compiler will handle cudagraph checks, see note [torch.compile x capturable]\n    if capturable and not _is_compiling():\n        from torch.optim.optimizer import _get_capturable_supported_devices\n        capturable_supported_devices = _get_capturable_supported_devices(\n            supports_xla=False\n        )\n        assert all(\n            p.device.type == step.device.type and p.device.type in capturable_supported_devices\n            for p, step in zip(params, state_steps)\n        ), f\"If capturable=True, params and state_steps must be on supported devices: {capturable_supported_devices}.\"\n\n    assert grad_scale is None and found_inf is None\n\n    assert not differentiable, \"_foreach ops don't support autograd\"\n\n    grouped_tensors = Optimizer._group_tensors_by_device_and_dtype(\n        [params, grads, exp_avgs, exp_avg_sqs, state_steps]  # type: ignore[list-item]\n    )\n    for (\n            device_params_,\n            device_grads_,\n            device_exp_avgs_,\n            device_exp_avg_sqs_,\n            device_state_steps_,\n    ), _ in grouped_tensors.values():\n        device_params = cast(List[Tensor], device_params_)\n        device_grads = cast(List[Tensor], device_grads_)\n        device_exp_avgs = cast(List[Tensor], device_exp_avgs_)\n        device_exp_avg_sqs = cast(List[Tensor], device_exp_avg_sqs_)\n        device_state_steps = cast(List[Tensor], device_state_steps_)\n\n        # Handle complex parameters\n        if has_complex:\n            _view_as_real(device_params, device_grads, device_exp_avgs, device_exp_avg_sqs)\n\n        if maximize:\n            device_grads = torch._foreach_neg(device_grads)  # type: ignore[assignment]\n\n        # Update steps\n        # If steps are on CPU, foreach will fall back to the slow path, which is a for-loop calling t.add(1) over\n        # and over. 1 will then be wrapped into a Tensor over and over again, which is slower than if we just\n        # wrapped it once now. The alpha is required to assure we go to the right overload.\n        if not _is_compiling() and device_state_steps[0].is_cpu:\n            torch._foreach_add_(device_state_steps, torch.tensor(1.0, device=\"cpu\"), alpha=1.0)\n        else:\n            torch._foreach_add_(device_state_steps, 1)\n\n        if weight_decay != 0 and not decoupled:\n            # Re-use the intermediate memory (device_grads) already allocated for maximize\n            if maximize:\n                torch._foreach_add_(device_grads, device_params, alpha=weight_decay)\n            else:\n                device_grads = torch._foreach_add(device_grads, device_params, alpha=weight_decay)\n\n        if device_state_steps[0] == 1:\n            torch._foreach_addcmul_(device_exp_avg_sqs, device_grads, device_grads)\n            continue\n\n        if weight_decay != 0 and decoupled:\n            wd_scale = lr ** 2 / max_lr if max_lr is not None else lr\n            torch._foreach_add_(device_params, device_params, alpha=-wd_scale * weight_decay)\n\n        exp_avg_sq_sqrt = torch._foreach_sqrt(device_exp_avg_sqs)\n        torch._foreach_maximum_(exp_avg_sq_sqrt, eps)\n\n        normed_grad = torch._foreach_div(device_grads, exp_avg_sq_sqrt)\n\n        if clip_exp is not None:\n            clip_val = (device_state_steps[0] - 1) ** clip_exp\n            torch._foreach_maximum_(normed_grad, -clip_val)\n            torch._foreach_minimum_(normed_grad, clip_val)\n\n        torch._foreach_lerp_(device_exp_avgs, normed_grad, 1 - beta1)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            masks = torch._foreach_mul(device_exp_avgs, device_grads)\n            masks = [(m > 0).to(g.dtype) for m, g in zip(masks, device_grads)]\n            mask_scale = [m.mean() for m in masks]\n            torch._foreach_maximum_(mask_scale, 1e-3)\n            torch._foreach_div_(masks, mask_scale)\n            device_exp_avgs = torch._foreach_mul(device_exp_avgs, masks)\n\n        torch._foreach_add_(device_params, device_exp_avgs, alpha=-lr)\n\n        torch._foreach_mul_(device_exp_avg_sqs, beta2)\n        torch._foreach_addcmul_(device_exp_avg_sqs, device_grads, device_grads, value=1 - beta2)\n\n\n#@_disable_dynamo_if_unsupported(single_tensor_fn=_single_tensor_adopt)  # FIXME internal context mgr, can't use\ndef adopt(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        # kwonly args with defaults are not supported by functions compiled with torchscript issue #70627\n        # setting this as kwarg for now as functional API is compiled by torch/distributed/optim\n        foreach: Optional[bool] = None,\n        capturable: bool = False,\n        differentiable: bool = False,\n        grad_scale: Optional[Tensor] = None,\n        found_inf: Optional[Tensor] = None,\n        has_complex: bool = False,\n        *,\n        beta1: float,\n        beta2: float,\n        lr: Union[float, Tensor],\n        weight_decay: float,\n        clip_exp: Optional[float],\n        max_lr: Optional[float],\n        decoupled: bool,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n):\n    r\"\"\"Functional API that performs ADOPT algorithm computation.\n\n    \"\"\"\n    if foreach is None:\n        foreach = False\n\n    # this check is slow during compilation, so we skip it\n    # if it's strictly needed we can add this check back in dynamo\n    if not _is_compiling() and not all(isinstance(t, torch.Tensor) for t in state_steps):\n        raise RuntimeError(\n            \"API has changed, `state_steps` argument must contain a list of singleton tensors\"\n        )\n\n    if foreach and torch.jit.is_scripting():\n        raise RuntimeError(\"torch.jit.script not supported with foreach optimizers\")\n\n    if foreach and not torch.jit.is_scripting():\n        func = _multi_tensor_adopt\n    else:\n        func = _single_tensor_adopt\n\n    func(\n        params,\n        grads,\n        exp_avgs,\n        exp_avg_sqs,\n        state_steps,\n        has_complex=has_complex,\n        beta1=beta1,\n        beta2=beta2,\n        lr=lr,\n        weight_decay=weight_decay,\n        clip_exp=clip_exp,\n        max_lr=max_lr,\n        decoupled=decoupled,\n        eps=eps,\n        caution=caution,\n        maximize=maximize,\n        capturable=capturable,\n        differentiable=differentiable,\n        grad_scale=grad_scale,\n        found_inf=found_inf,\n    )\n"
  },
  {
    "path": "timm/optim/kron.py",
    "content": "\"\"\" PyTorch Implementation of the Kron (PSGD) optimizer\n\nThis is a PSGD optimizer using a Kronecker-factored preconditioner.\n\nThis impl was adapted from https://github.com/evanatyourservice/kron_torch\nby Evan Walters, licensed CC-BY-4.0.\n\nContributions to above also made by\n* Lucas Nestler, added to his https://github.com/ClashLuke/HeavyBall implementation.\n* Omead Pooladzandi https://github.com/opooladz\n\nThe above work drew from https://github.com/lixilinx/psgd_torch by Xi-Lin Li\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nThis `timm` impl\n* works with a wider variety of torch versions\n* fixes some checkpoint save/restore (resume issues)\n* adds decoupled weight-decay option\n* has some refactoring, cleanup of args, default/group items\n* warning about not having opt_einsum (unusable without)\n\n\"\"\"\nimport logging\nimport string\nimport random\nimport warnings\nfrom typing import Any, Callable, Dict, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\ntry:\n    # NOTE opt_einsum needed to avoid blowing up memory with einsum ops\n    import opt_einsum\n    import torch.backends.opt_einsum\n    torch.backends.opt_einsum.enabled = True\n    torch.backends.opt_einsum.strategy = \"auto-hq\"\n    has_opt_einsum = True\nexcept ImportError:\n    has_opt_einsum = False\n\ntry:\n    torch._dynamo.config.cache_size_limit = 1_000_000\n    has_dynamo = True\nexcept AttributeError:\n    has_dynamo = False\n\nfrom ._types import ParamsT\n\n_logger = logging.getLogger(__name__)\n\n\ndef precond_update_prob_schedule(\n        n: float,\n        max_prob: float = 1.0,\n        min_prob: float = 0.03,\n        decay: float = 0.001,\n        flat_start: float = 500,\n) -> torch.Tensor:\n    \"\"\"Anneal preconditioner update probability during beginning of training.\n\n    PSGD benefits from more preconditioner updates at the beginning of training,\n    but once the preconditioner is learned the update probability can drop low.\n\n    This schedule is an exponential anneal with a flat start. Default settings keep\n    update probability at 1.0 for 200 steps then exponentially anneal down to\n    `min_prob` by 4000 steps. Default settings work very well for most models and\n    training regimes.\n    \"\"\"\n\n    \"\"\"Exponential anneal with flat start.\"\"\"\n    n = torch.tensor(n, dtype=torch.float32)\n    prob = max_prob * torch.exp(-decay * (n - flat_start))\n    prob.clamp_(min=min_prob, max=max_prob)\n\n    return prob\n\n\nclass Kron(torch.optim.Optimizer):\n    \"\"\"Implements PSGD Kron from https://github.com/lixilinx/psgd_torch.\n\n    Args:\n        params: Iterable of parameters to optimize or dicts defining parameter groups.\n        lr: Learning rate.\n        momentum: Momentum parameter.\n        weight_decay: Weight decay.\n        preconditioner_update_probability: Probability of updating the preconditioner.\n            If None, defaults to a schedule that anneals from 1.0 to 0.03 by 4000 steps.\n        max_size_triangular: Max size for dim's preconditioner to be triangular.\n        min_ndim_triangular: Minimum number of dimensions a layer needs to have triangular preconditioners.\n        memory_save_mode: 'one_diag', 'smart_one_diag', or 'all_diag', None is default\n            to set all preconditioners to be triangular, 'one_diag' sets the largest\n            or last dim to be diagonal per layer, and 'all_diag' sets all preconditioners to be diagonal.\n        momentum_into_precond_update: whether to send momentum into preconditioner\n            update instead of raw gradients.\n        mu_dtype: Dtype of the momentum accumulator.\n        precond_dtype: Dtype of the preconditioner.\n        decoupled_decay: AdamW style decoupled weight decay\n        corrected_weight_decay: apply corrected weight decay when using decoupled_decay (lr**2 / max_lr)\n        flatten: Flatten dimensions instead of fully relying on expressions for higher rank params\n        flatten_start_dim: Start of flatten range, defaults to 2. Seems good tradeoff for ConvNets.\n        flatten_end_dim: End of flatten range, defaults to -1.\n        stochastic_weight_decay: Enable random modulation of weight decay\n        deterministic: Deterministic behaviour across save / load (resume). FIXME slow, needs work\n    \"\"\"\n\n    def __init__(\n        self,\n        params: ParamsT,\n        lr: float = 0.001,\n        momentum: float = 0.9,\n        weight_decay: float = 0.0,\n        preconditioner_update_probability: Optional[Union[Callable, float]] = None,\n        max_size_triangular: int = 2048,\n        min_ndim_triangular: int = 2,\n        memory_save_mode: Optional[str] = None,\n        momentum_into_precond_update: bool = True,\n        precond_lr: float = 0.1,\n        precond_init_scale: float = 1.0,\n        mu_dtype: Optional[torch.dtype] = None,\n        precond_dtype: Optional[torch.dtype] = None,\n        decoupled_decay: bool = False,\n        corrected_weight_decay: bool = False,\n        flatten: bool = False,\n        flatten_start_dim: int = 2,\n        flatten_end_dim: int = -1,\n        stochastic_weight_decay: bool = False,\n        deterministic: bool = False,\n    ):\n        if not has_opt_einsum:\n            warnings.warn(\"It is highly recommended to have 'opt_einsum' installed for this optimizer.\")\n\n        if not 0.0 <= lr:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if not 0.0 <= momentum < 1.0:\n            raise ValueError(f\"Invalid beta parameter: {momentum}\")\n        if not 0.0 <= weight_decay:\n            raise ValueError(f\"Invalid weight_decay value: {weight_decay}\")\n\n        defaults = dict(\n            lr=lr,\n            momentum=momentum,\n            weight_decay=weight_decay,\n            preconditioner_update_probability=preconditioner_update_probability,\n            max_size_triangular=max_size_triangular,\n            min_ndim_triangular=min_ndim_triangular,\n            memory_save_mode=memory_save_mode,\n            momentum_into_precond_update=momentum_into_precond_update,\n            precond_lr=precond_lr,\n            precond_init_scale=precond_init_scale,\n            mu_dtype=mu_dtype,\n            precond_dtype=precond_dtype,\n            decoupled_decay=decoupled_decay,\n            corrected_weight_decay=corrected_weight_decay,\n            flatten=flatten,\n            flatten_start_dim=flatten_start_dim,\n            flatten_end_dim=flatten_end_dim,\n            stochastic_weight_decay=stochastic_weight_decay,\n        )\n        super(Kron, self).__init__(params, defaults)\n\n        self._param_exprs = {}  # cache for einsum expr\n        self._tiny = torch.finfo(torch.bfloat16).tiny\n        self.rng = random.Random(1337)\n        self.deterministic = deterministic\n\n        # make compile optional (for bwd compat)\n        if has_dynamo:\n            self._calc_A_and_conjB = torch.compile(_calc_A_and_conjB, fullgraph=True, dynamic=False)\n            self._q_terms = torch.compile(_q_terms, fullgraph=True, dynamic=False)\n            self._precond_grad = torch.compile(_precond_grad, fullgraph=True, dynamic=False)\n            self._balance_Q = torch.compile(_balance_Q, fullgraph=True, dynamic=False)\n        else:\n            self._calc_A_and_conjB = _calc_A_and_conjB\n            self._q_terms = _q_terms\n            self._precond_grad = _precond_grad\n            self._balance_Q = _balance_Q\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('corrected_weight_decay', False)\n\n    def __getstate__(self):\n        _dict = super().__getstate__()\n        _dict[\"rng\"] = self.rng\n        return _dict\n\n    def state_dict(self) -> Dict[str, Any]:\n        # Get the optimizer's state dict\n        optimizer_state = super().state_dict()\n\n        # Add the generator state\n        optimizer_state['rng_state'] = self.rng.getstate()\n        return optimizer_state\n\n    def load_state_dict(self, state_dict: Dict[str, Any]) -> None:\n        # Extract and remove the RNG state from the state dict\n        rng_states = {}\n        if 'rng_state' in state_dict:\n            rng_states['rng_state'] = state_dict.pop('rng_state')\n            \n        # Load the optimizer state\n        super().load_state_dict(state_dict)\n        state_dict.update(rng_states)  # add back\n\n        # Restore the RNG state if it exists\n        if 'rng_state' in rng_states:\n            self.rng.setstate(rng_states['rng_state'])\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        self._param_exprs = {}\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        total_momentum_size = 0\n        total_momentum_mb = 0\n        total_precond_size = 0\n        total_precond_mb = 0\n\n        for group in self.param_groups:\n            mu_dtype = group.get(\"mu_dtype\")\n            precond_dtype = group.get(\"precond_dtype\", torch.float32)\n            momentum_into_precond_update = group.get(\"momentum_into_precond_update\", True)\n            update_prob = group.get(\"preconditioner_update_probability\", None)\n\n            for p in group[\"params\"]:\n                if p.grad is None:\n                    continue\n\n                grad = p.grad\n                state = self.state[p]\n\n                flattened = False\n                if group['flatten']:\n                    grad = safe_flatten(grad, group[\"flatten_start_dim\"], group[\"flatten_end_dim\"])\n                    flattened = True\n\n                if len(state) == 0:\n                    state[\"step\"] = 0\n                    state[\"update_counter\"] = 0\n                    state[\"momentum_buffer\"] = torch.zeros_like(grad, dtype=mu_dtype or grad.dtype)\n                    # init Q and einsum expressions on first step\n                    state[\"Q\"], exprs = _init_Q_exprs(\n                        grad,\n                        group[\"precond_init_scale\"],\n                        group[\"max_size_triangular\"],\n                        group[\"min_ndim_triangular\"],\n                        group[\"memory_save_mode\"],\n                        dtype=precond_dtype,\n                    )\n                    self._param_exprs[p] = exprs\n\n                    # Accumulate sizes for log\n                    momentum_size = state[\"momentum_buffer\"].numel()\n                    momentum_mb = momentum_size * state[\"momentum_buffer\"].element_size() / 2**20\n                    total_momentum_size += momentum_size\n                    total_momentum_mb += momentum_mb\n\n                    precond_size = sum(q.numel() for q in state[\"Q\"])\n                    precond_mb = sum(q.numel() * q.element_size() for q in state[\"Q\"]) / 2**20\n                    total_precond_size += precond_size\n                    total_precond_mb += precond_mb\n                elif p not in self._param_exprs:\n                    # init only the einsum expressions, called after state load, Q are loaded from state_dict\n                    exprs = _init_Q_exprs(\n                        grad,\n                        group[\"precond_init_scale\"],\n                        group[\"max_size_triangular\"],\n                        group[\"min_ndim_triangular\"],\n                        group[\"memory_save_mode\"],\n                        dtype=precond_dtype,\n                        init_q=False,\n                    )\n                    self._param_exprs[p] = exprs\n                else:\n                    # retrieve cached expressions\n                    exprs = self._param_exprs[p]\n\n                # update preconditioners all together deterministically\n                if update_prob is None:\n                    update_prob = precond_update_prob_schedule\n                if callable(update_prob):\n                    update_prob = update_prob(state[\"step\"])\n                state[\"update_counter\"] += 1\n                do_update = state[\"update_counter\"] >= 1 / update_prob\n                if do_update:\n                    state[\"update_counter\"] = 0\n\n                state[\"step\"] += 1\n\n                # Update momentum buffer\n                beta = group[\"momentum\"]\n                bias_correction = 1 - beta ** state[\"step\"]\n                momentum_buffer = state[\"momentum_buffer\"]\n                momentum_buffer.mul_(group[\"momentum\"]).add_(grad, alpha=1 - group[\"momentum\"])\n\n                # Restore momentum dtype\n                if mu_dtype is not None:\n                    momentum_buffer.copy_(momentum_buffer.to(dtype=mu_dtype))\n                debiased_momentum = (momentum_buffer / bias_correction).to(dtype=precond_dtype)\n\n                # Balance preconditioners roughly every 100 updates\n                balance = self.rng.random() < 0.01 and do_update\n                if grad.dim() > 1 and balance:\n                    self._balance_Q(state[\"Q\"])\n\n                # Update preconditioner\n                if do_update:\n                    exprA, exprGs, _ = exprs\n                    Q = state[\"Q\"]\n                    if self.deterministic:\n                        torch_rng = torch.Generator(device=debiased_momentum.device)\n                        torch_rng.manual_seed(self.rng.randint(0, 2 ** 31))\n                    else:\n                        torch_rng = None\n                    V = torch.randn(\n                        debiased_momentum.shape,\n                        generator=torch_rng,\n                        dtype=precond_dtype,\n                        device=debiased_momentum.device,\n                    )\n                    G = debiased_momentum if momentum_into_precond_update else grad\n\n                    A, conjB = self._calc_A_and_conjB(exprA, G, Q, V)\n\n                    terms = self._q_terms(exprGs, A, conjB)\n\n                    for q, (term1, term2) in zip(Q, terms):\n                        tmp = term1 - term2\n                        tmp *= group[\"precond_lr\"]\n                        if q.dim() < 2:\n                            tmp *= q\n                            tmp /= (term1 + term2).norm(float(\"inf\")) + self._tiny\n                        else:\n                            tmp = torch.triu(tmp)\n                            tmp /= _norm_lower_bound(term1 + term2) + self._tiny\n                            tmp @= q\n                        q.sub_(tmp)\n\n                # Precondition gradients\n                pre_grad = self._precond_grad(\n                    state[\"Q\"],\n                    exprs,\n                    debiased_momentum,\n                ).to(dtype=p.dtype)\n\n                # RMS of pre_grad should be 1.0, so let's cap at 1.1\n                pre_grad.mul_(torch.clamp(1.1 / (pre_grad.square().mean().sqrt_() + 1e-8), max=1.0))\n                if flattened:\n                    pre_grad = pre_grad.view(p.shape)\n\n                # Apply weight decay\n                weight_decay = group[\"weight_decay\"]\n                if weight_decay != 0:\n                    if group[\"stochastic_weight_decay\"]:\n                        weight_decay = 2 * self.rng.random() * weight_decay\n\n                    if group[\"decoupled_decay\"]:\n                        if group['corrected_weight_decay']:\n                            wd_scale = group[\"lr\"] ** 2 / self.defaults['lr']\n                        else:\n                            wd_scale = group[\"lr\"]\n                        p.mul_(1. - wd_scale * weight_decay)\n                    else:\n                        pre_grad.add_(p, alpha=weight_decay)\n\n                # Update parameters\n                p.add_(pre_grad, alpha=-group[\"lr\"])\n\n        if total_momentum_size > 0:\n            _logger.info(f\"PSGD Momentum buffer size: {total_momentum_size} elements, {total_momentum_mb:.2f} MB\")\n            _logger.info(f\"PSGD Preconditioners size: {total_precond_size} elements, {total_precond_mb:.2f} MB\")\n\n        return loss\n\n\ndef safe_flatten(tensor, start_dim=0, end_dim=-1):\n    ndim = tensor.ndim\n\n    # Convert negative end_dim to positive and clip to end\n    end_dim = min(end_dim if end_dim >= 0 else ndim + end_dim, ndim - 1)\n\n    # If tensor has fewer dims than start_dim or start > end, return tensor as is\n    if ndim <= start_dim or start_dim > end_dim:\n        return tensor\n\n    # Now safe to flatten\n    return tensor.flatten(start_dim, end_dim)\n\n\ndef _init_Q_exprs(\n        t,\n        scale,\n        max_size,\n        min_ndim_triangular,\n        memory_save_mode,\n        dtype=None,\n        init_q=True,\n):\n    \"\"\"For a scalar or tensor t, we initialize its preconditioner Q and\n    reusable einsum expressions for updating Q and preconditioning gradient.\n    \"\"\"\n    letters = string.ascii_lowercase + string.ascii_uppercase\n\n    dtype = dtype if dtype is not None else t.dtype\n    shape = t.shape\n    Q = []\n    if len(shape) == 0:  # scalar\n        if init_q:\n            Q.append(scale * torch.ones_like(t, dtype=dtype))\n        exprA = \",->\"\n        exprGs = [\",->\"]\n        exprP = \",,->\"\n    else:  # tensor\n        if len(shape) > 13:\n            raise ValueError(f\"Got tensor with dim {len(t.shape)}; Einstein runs out of letters!\")\n\n        scale = scale ** (1 / len(shape))\n\n        if memory_save_mode is None:\n            dim_diag = [False for _ in shape]\n        elif memory_save_mode == \"one_diag\":\n            rev_sorted_dims = np.argsort(shape)[::-1]\n            dim_diag = [False for _ in shape]\n            dim_diag[rev_sorted_dims[0]] = True\n        elif memory_save_mode == \"smart_one_diag\":\n            # addition proposed by Lucas Nestler\n            rev_sorted_dims = np.argsort(shape)[::-1]\n            sorted_shape = sorted(shape)\n            dim_diag = [False for _ in shape]\n            if len(shape) >= 2 and sorted_shape[-1] > sorted_shape[-2]:\n                dim_diag[rev_sorted_dims[0]] = True\n        elif memory_save_mode == \"all_diag\":\n            dim_diag = [True for _ in shape]\n        else:\n            raise ValueError(\n                f\"Invalid memory_save_mode: {memory_save_mode}, must be one of [None, 'one_diag', 'all_diag']\")\n\n        piece1A, piece2A, piece3A = ([], \"\", \"\")\n        exprGs = []\n        piece1P, piece2P, piece3P, piece4P = ([], [], \"\", \"\")\n        for i, (size, dim_d) in enumerate(zip(shape, dim_diag)):\n            if (\n                size == 1\n                or size > max_size\n                or len(shape) < min_ndim_triangular\n                or dim_d\n            ):\n                # use diagonal matrix as preconditioner for this dim\n                if init_q:\n                    Q.append(scale * torch.ones(size, dtype=dtype, device=t.device))\n\n                piece1A.append(letters[i])\n                piece2A = piece2A + letters[i]\n                piece3A = piece3A + letters[i]\n\n                piece1 = \"\".join([letters[i + 13] if j == i else letters[j] for j in range(len(shape))])\n                subscripts = piece1 + \",\" + piece1 + \"->\" + letters[i + 13]\n                exprGs.append(subscripts)\n\n                piece1P.append(letters[i + 13])\n                piece2P.append(letters[i + 13])\n                piece3P = piece3P + letters[i + 13]\n                piece4P = piece4P + letters[i + 13]\n            else:\n                # use triangular matrix as preconditioner for this dim\n                if init_q:\n                    Q.append(scale * torch.eye(size, dtype=dtype, device=t.device))\n\n                piece1A.append(letters[i] + letters[i + 13])\n                piece2A = piece2A + letters[i + 13]\n                piece3A = piece3A + letters[i]\n\n                piece1 = \"\".join([letters[i + 13] if j == i else letters[j] for j in range(len(shape))])\n                piece2 = \"\".join([letters[i + 26] if j == i else letters[j] for j in range(len(shape))])\n                subscripts = piece1 + \",\" + piece2 + \"->\" + letters[i + 13] + letters[i + 26]\n                exprGs.append(subscripts)\n\n                a, b, c = (letters[i], letters[i + 13], letters[i + 26])\n                piece1P.append(a + b)\n                piece2P.append(a + c)\n                piece3P = piece3P + c\n                piece4P = piece4P + b\n\n        exprA = \",\".join(piece1A) + \",\" + piece2A + \"->\" + piece3A\n        exprP = \",\".join(piece1P) + \",\" + \",\".join(piece2P) + \",\" + piece3P + \"->\" + piece4P\n\n    exprGs = tuple(exprGs)\n    if init_q:\n        return [Q, (exprA, exprGs, exprP)]\n    else:\n        return exprA, exprGs, exprP\n\n\ndef _lb(A, max_abs):\n    A = A / max_abs\n    aa = torch.real(A * A.conj())\n    value0, i = torch.max(torch.sum(aa, dim=0), 0)\n    value1, j = torch.max(torch.sum(aa, dim=1), 0)\n    if value0 > value1:\n        x = A[:, i].conj() @ A\n        return max_abs * torch.linalg.vector_norm((x / torch.linalg.vector_norm(x)) @ A.H)\n    else:\n        x = A @ A[j].conj()\n        return max_abs * torch.linalg.vector_norm(A.H @ (x / torch.linalg.vector_norm(x)))\n\n\ndef _norm_lower_bound(A):\n    \"\"\"Cheap lower bound for the spectral norm of A.\"\"\"\n    max_abs = A.norm(float(\"inf\"))\n    return torch.where(max_abs > 0, _lb(A, max_abs), max_abs)\n\n\ndef _solve_triangular_right(X, A):\n    \"\"\"X @ inv(A)\"\"\"\n    orig_dtype = X.dtype\n    X = X.to(dtype=torch.float32)\n    A = A.to(dtype=torch.float32)\n    out = torch.linalg.solve_triangular(A, X.reshape(-1, X.size(-1)), upper=True, left=False).reshape_as(X)\n    return out.to(dtype=orig_dtype)\n\n\ndef _balance_Q(Q_in):\n    norms = torch.stack([q.norm(float(\"inf\")) for q in Q_in])\n    geometric_mean = norms.prod() ** (1 / len(Q_in))\n    norms = geometric_mean / norms\n    for i, q in enumerate(Q_in):\n        q.mul_(norms[i])\n\n\ndef _precond_grad(Q, exprs, G):\n    \"\"\"Precondition gradient G with preconditioner Q.\"\"\"\n    return torch.einsum(exprs[-1], *[q.conj() for q in Q], *Q, G)\n\n\ndef _calc_A_and_conjB(exprA, G, Q, V):\n    A = torch.einsum(exprA, *Q, G)\n    order = G.dim()\n    p = tuple(range(order))\n    conjB = torch.permute(V.conj(), p[1:] + p[:1])\n    for i, q in enumerate(Q):\n        conjB = conjB / q if q.dim() < 2 else _solve_triangular_right(conjB, q)\n        if i < order - 1:\n            conjB = torch.transpose(conjB, i, order - 1)\n    return A, conjB\n\n\ndef _q_terms(exprGs, A, conjB):\n    terms = []\n    for exprG in exprGs:\n        term1 = torch.einsum(exprG, A, A.conj())\n        term2 = torch.einsum(exprG, conjB.conj(), conjB)\n        terms.append((term1, term2))\n    return terms\n"
  },
  {
    "path": "timm/optim/lamb.py",
    "content": "\"\"\" PyTorch Lamb optimizer w/ behaviour similar to NVIDIA FusedLamb\n\nThis optimizer code was adapted from the following (starting with latest)\n* https://github.com/HabanaAI/Model-References/blob/2b435114fe8e31f159b1d3063b8280ae37af7423/PyTorch/nlp/bert/pretraining/lamb.py\n* https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/LanguageModeling/Transformer-XL/pytorch/lamb.py\n* https://github.com/cybertronai/pytorch-lamb\n\nUse FusedLamb if you can (GPU). The reason for including this variant of Lamb is to have a version that is\nsimilar in behaviour to APEX FusedLamb if you aren't using NVIDIA GPUs or cannot install/use APEX.\n\nIn addition to some cleanup, this Lamb impl has been modified to support PyTorch XLA and has been tested on TPU.\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nOriginal copyrights for above sources are below.\n\nModifications Copyright 2021 Ross Wightman\n\"\"\"\n# Copyright (c) 2021, Habana Labs Ltd.  All rights reserved.\n\n# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#       http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# MIT License\n#\n# Copyright (c) 2019 cybertronai\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nimport math\nfrom typing import Optional, Tuple\n\nimport torch\nfrom torch.optim import Optimizer\n\nfrom ._types import ParamsT\n\n\nclass Lamb(Optimizer):\n    \"\"\"Implements a pure pytorch variant of FuseLAMB (NvLamb variant) optimizer from apex.optimizers.FusedLAMB\n    reference: https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/LanguageModeling/Transformer-XL/pytorch/lamb.py\n\n    LAMB was proposed in:\n    - Large Batch Optimization for Deep Learning - Training BERT in 76 minutes:  https://arxiv.org/abs/1904.00962\n    - On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ\n\n    Args:\n        params: Iterable of parameters to optimize or dicts defining parameter groups.\n        lr: Learning rate\n        betas: Coefficients used for computing running averages of gradient and its norm.\n        eps: Term added to the denominator to improve numerical stability.\n        weight_decay: Weight decay\n        grad_averaging: Whether apply (1-beta2) to grad when calculating running averages of gradient.\n        max_grad_norm: Value used to clip global grad norm.\n        trust_clip: Enable LAMBC trust ratio clipping.\n        always_adapt: Apply adaptive learning rate to 0.0 weight decay parameter.\n        caution: Apply caution.\n        decoupled: apply decoupled weight decay\n        corrected_weight_decay: apply corrected weight decay (lr**2 / max_lr) when using decoupled_decay\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-3,\n            bias_correction: bool = True,\n            betas: Tuple[float, float] = (0.9, 0.999),\n            eps: float = 1e-6,\n            weight_decay: float = 0.01,\n            grad_averaging: bool = True,\n            max_grad_norm: Optional[float] = 1.0,\n            trust_clip: bool = False,\n            always_adapt: bool = False,\n            caution: bool = False,\n            decoupled_decay: bool = False,\n            corrected_weight_decay: bool = False,\n    ):\n        defaults = dict(\n            lr=lr,\n            bias_correction=bias_correction,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            grad_averaging=grad_averaging,\n            max_grad_norm=max_grad_norm,\n            trust_clip=trust_clip,\n            always_adapt=always_adapt,\n            caution=caution,\n            decoupled_decay=decoupled_decay,\n            corrected_weight_decay=corrected_weight_decay,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('decoupled_decay', False)\n            group.setdefault('corrected_weight_decay', False)\n\n    def _get_clip_grad_norm(self):\n        max_grad_norm = self.defaults['max_grad_norm']\n        if max_grad_norm is None:\n            return None\n\n        norms = []\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.is_sparse:\n                    raise RuntimeError('Lamb does not support sparse gradients, consider SparseAdam instead.')\n                norms.append(torch.linalg.vector_norm(grad))\n        global_norm = torch.linalg.vector_norm(torch.stack(norms))\n        clip_global_norm = (global_norm / max_grad_norm).clamp_(min=1.0)\n        return clip_global_norm\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        clip_grad_norm = self._get_clip_grad_norm() # None if disabled\n\n        for group in self.param_groups:\n            bias_correction = 1 if group['bias_correction'] else 0\n            beta1, beta2 = group['betas']\n            grad_averaging = 1 if group['grad_averaging'] else 0\n            beta3 = 1 - beta1 if grad_averaging else 1.0\n\n            # assume same step across group now to simplify things\n            # per parameter step can be easily support by making it tensor, or pass list into kernel\n            if 'step' in group:\n                group['step'] += 1\n            else:\n                group['step'] = 1\n\n            if bias_correction:\n                bias_correction1 = 1 - beta1 ** group['step']\n                bias_correction2 = 1 - beta2 ** group['step']\n            else:\n                bias_correction1, bias_correction2 = 1.0, 1.0\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n\n                if clip_grad_norm is not None:\n                    grad.div_(clip_grad_norm)\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    # Exponential moving average of gradient valuesa\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n\n                # Decay the first and second moment running average coefficient\n                exp_avg.mul_(beta1).add_(grad, alpha=beta3)  # m_t\n                exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)  # v_t\n\n                denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])\n                update = (exp_avg / bias_correction1).div_(denom)\n\n                if group['caution']:\n                    # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                    mask = (update * grad > 0).to(grad.dtype)\n                    mask.div_(mask.mean().clamp_(min=1e-3))\n                    update.mul_(mask)\n\n                weight_decay = group['weight_decay']\n                if weight_decay != 0:\n                    if group.get('decoupled_decay', False):\n                        if group['corrected_weight_decay']:\n                            wd_scale = group['lr'] ** 2 / self.defaults['lr']\n                        else:\n                            wd_scale = group['lr']\n                        p.add_(p, alpha=-wd_scale * weight_decay)\n                    else:\n                        update.add_(p, alpha=weight_decay)\n\n                if weight_decay != 0 or group['always_adapt']:\n                    # Layer-wise LR adaptation. By default, skip adaptation on parameters that are\n                    # excluded from weight decay, unless always_adapt == True, then always enabled.\n                    w_norm = p.norm(2.0)\n                    g_norm = update.norm(2.0)\n                    trust_ratio = w_norm / g_norm\n                    # FIXME nested where required since logical and/or not working in PT XLA\n                    # Set the ratio to 1.0 (no change) if either weight norm or grad norm is zero\n                    trust_ratio = torch.where(\n                        w_norm > 0,\n                        torch.where(g_norm > 0, trust_ratio, 1.0),\n                        1.0,\n                    )\n                    if group['trust_clip']:\n                        # LAMBC trust clipping, upper bound fixed at one\n                        trust_ratio = torch.clamp(trust_ratio, max=1.0)\n                    update.mul_(trust_ratio)\n\n                p.add_(update, alpha=-group['lr'])\n\n        return loss\n"
  },
  {
    "path": "timm/optim/laprop.py",
    "content": "\"\"\" PyTorch impl of LaProp optimizer\n\nCode simplified from https://github.com/Z-T-WANG/LaProp-Optimizer, MIT License\n\nPaper: LaProp: Separating Momentum and Adaptivity in Adam, https://arxiv.org/abs/2002.04839\n\n@article{ziyin2020laprop,\n  title={LaProp: a Better Way to Combine Momentum with Adaptive Gradient},\n  author={Ziyin, Liu and Wang, Zhikang T and Ueda, Masahito},\n  journal={arXiv preprint arXiv:2002.04839},\n  year={2020}\n}\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\n\"\"\"\nfrom typing import Tuple\n\nfrom torch.optim import Optimizer\nimport torch\n\nfrom ._types import ParamsT\n\n\nclass LaProp(Optimizer):\n    \"\"\" LaProp Optimizer\n\n    Paper: LaProp: Separating Momentum and Adaptivity in Adam, https://arxiv.org/abs/2002.04839\n    \"\"\"\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 4e-4,\n            betas: Tuple[float, float] = (0.9, 0.999),\n            eps: float = 1e-15,\n            weight_decay: float = 0.,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n        )\n        super(LaProp, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.is_sparse:\n                    raise RuntimeError('LaProp does not support sparse gradients')\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Exponential moving average of learning rates\n                    state['exp_avg_lr_1'] = 0.\n                    state['exp_avg_lr_2'] = 0.\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n                beta1, beta2 = group['betas']\n\n                state['step'] += 1\n                one_minus_beta2 = 1 - beta2\n                one_minus_beta1 = 1 - beta1\n\n                # Decay the first and second moment running average coefficient\n                exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=one_minus_beta2)\n\n                state['exp_avg_lr_1'] = state['exp_avg_lr_1'] * beta1 + one_minus_beta1 * group['lr']\n                state['exp_avg_lr_2'] = state['exp_avg_lr_2'] * beta2 + one_minus_beta2\n\n                # 1 - beta1 ** state['step']\n                bias_correction1 = state['exp_avg_lr_1'] / group['lr'] if group['lr'] != 0. else 1.\n                bias_correction2 = state['exp_avg_lr_2']\n                step_size = 1 / bias_correction1\n\n                denom = exp_avg_sq.div(bias_correction2).sqrt_().add_(group['eps'])\n                step_of_this_grad = grad / denom\n                exp_avg.mul_(beta1).add_(step_of_this_grad, alpha=group['lr'] * one_minus_beta1)\n\n                if group['caution']:\n                    # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                    mask = (exp_avg * grad > 0).to(grad.dtype)\n                    mask.div_(mask.mean().clamp_(min=1e-3))\n                    exp_avg = exp_avg * mask\n\n                p.add_(exp_avg, alpha=-step_size)\n\n                if group['weight_decay'] != 0:\n                    if group['corrected_weight_decay']:\n                        wd_scale = group['lr'] ** 2 / self.defaults['lr']\n                    else:\n                        wd_scale = group['lr']\n                    p.add_(p, alpha=-wd_scale * group['weight_decay'])\n\n        return loss"
  },
  {
    "path": "timm/optim/lars.py",
    "content": "\"\"\" PyTorch LARS / LARC Optimizer\n\nAn implementation of LARS (SGD) + LARC in PyTorch\n\nBased on:\n  * PyTorch SGD: https://github.com/pytorch/pytorch/blob/1.7/torch/optim/sgd.py#L100\n  * NVIDIA APEX LARC: https://github.com/NVIDIA/apex/blob/master/apex/parallel/LARC.py\n\nAdditional cleanup and modifications to properly support PyTorch XLA.\n\nCopyright 2021 Ross Wightman\n\"\"\"\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\n\nclass Lars(Optimizer):\n    \"\"\" LARS for PyTorch\n    \n    Paper: `Large batch training of Convolutional Networks` - https://arxiv.org/pdf/1708.03888.pdf\n\n    Args:\n        params (iterable): iterable of parameters to optimize or dicts defining parameter groups.\n        lr (float, optional): learning rate (default: 1.0).\n        momentum (float, optional): momentum factor (default: 0)\n        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)\n        dampening (float, optional): dampening for momentum (default: 0)\n        nesterov (bool, optional): enables Nesterov momentum (default: False)\n        trust_coeff (float): trust coefficient for computing adaptive lr / trust_ratio (default: 0.001)\n        eps (float): eps for division denominator (default: 1e-8)\n        trust_clip (bool): enable LARC trust ratio clipping (default: False)\n        always_adapt (bool): always apply LARS LR adapt, otherwise only when group weight_decay != 0 (default: False)\n    \"\"\"\n\n    def __init__(\n        self,\n        params,\n        lr=1.0,\n        momentum=0,\n        dampening=0,\n        weight_decay=0,\n        nesterov=False,\n        trust_coeff=0.001,\n        eps=1e-8,\n        trust_clip=False,\n        always_adapt=False,\n    ):\n        if lr < 0.0:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if momentum < 0.0:\n            raise ValueError(f\"Invalid momentum value: {momentum}\")\n        if weight_decay < 0.0:\n            raise ValueError(f\"Invalid weight_decay value: {weight_decay}\")\n        if nesterov and (momentum <= 0 or dampening != 0):\n            raise ValueError(\"Nesterov momentum requires a momentum and zero dampening\")\n\n        defaults = dict(\n            lr=lr,\n            momentum=momentum,\n            dampening=dampening,\n            weight_decay=weight_decay,\n            nesterov=nesterov,\n            trust_coeff=trust_coeff,\n            eps=eps,\n            trust_clip=trust_clip,\n            always_adapt=always_adapt,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault(\"nesterov\", False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Args:\n            closure (callable, optional): A closure that reevaluates the model and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            weight_decay = group['weight_decay']\n            momentum = group['momentum']\n            dampening = group['dampening']\n            nesterov = group['nesterov']\n            trust_coeff = group['trust_coeff']\n            eps = group['eps']\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n\n                # apply LARS LR adaptation, LARC clipping, weight decay\n                # ref: https://github.com/NVIDIA/apex/blob/master/apex/parallel/LARC.py\n                if weight_decay != 0 or group['always_adapt']:\n                    w_norm = p.norm(2.0)\n                    g_norm = grad.norm(2.0)\n                    trust_ratio = trust_coeff * w_norm / (g_norm + w_norm * weight_decay + eps)\n                    # FIXME nested where required since logical and/or not working in PT XLA\n                    # Set the ratio to 1.0 (no change) if either weight norm or grad norm is zero\n                    trust_ratio = torch.where(\n                        w_norm > 0,\n                        torch.where(g_norm > 0, trust_ratio, 1.0),\n                        1.0,\n                    )\n                    if group['trust_clip']:\n                        trust_ratio = torch.clamp(trust_ratio / group['lr'], max=1.0)\n                    grad.add_(p, alpha=weight_decay)\n                    grad.mul_(trust_ratio)\n\n                # apply SGD update https://github.com/pytorch/pytorch/blob/1.7/torch/optim/sgd.py#L100\n                if momentum != 0:\n                    param_state = self.state[p]\n                    if 'momentum_buffer' not in param_state:\n                        buf = param_state['momentum_buffer'] = torch.clone(grad).detach()\n                    else:\n                        buf = param_state['momentum_buffer']\n                        buf.mul_(momentum).add_(grad, alpha=1. - dampening)\n                    if nesterov:\n                        grad = grad.add(buf, alpha=momentum)\n                    else:\n                        grad = buf\n\n                p.add_(grad, alpha=-group['lr'])\n\n        return loss"
  },
  {
    "path": "timm/optim/lion.py",
    "content": "\"\"\" Lion Optimizer\nPaper: `Symbolic Discovery of Optimization Algorithms` - https://arxiv.org/abs/2302.06675\nOriginal Impl: https://github.com/google/automl/tree/master/lion\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\"\"\"\n# Copyright 2023 Google Research. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\nfrom typing import List, Optional, Tuple\n\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\nfrom ._types import ParamsT\n\n\nclass Lion(Optimizer):\n    r\"\"\"Implements Lion algorithm.\"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-4,\n            betas: Tuple[float, float] = (0.9, 0.99),\n            weight_decay: float = 0.0,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n            maximize: bool = False,\n            foreach: Optional[bool] = None,\n    ):\n        \"\"\"Initialize the hyperparameters.\n\n        Args:\n            params: iterable of parameters to optimize or dicts defining parameter groups\n            lr: learning rate\n            betas: coefficients used for computing running averages of gradient and its square\n            weight_decay: weight decay coefficient\n            caution: apply caution\n            corrected_weight_decay: apply corrected weight decay (lr**2 / max_lr)\n        \"\"\"\n\n        if not 0.0 <= lr:\n            raise ValueError('Invalid learning rate: {}'.format(lr))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError('Invalid beta parameter at index 0: {}'.format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError('Invalid beta parameter at index 1: {}'.format(betas[1]))\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            weight_decay=weight_decay,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n            foreach=foreach,\n            maximize=maximize,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n            group.setdefault('maximize', False)\n            group.setdefault('foreach', None)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Args:\n            closure: A closure that reevaluates the model and returns the loss.\n\n        Returns:\n            the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            exp_avgs = []\n            beta1, beta2 = group['betas']\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                params_with_grad.append(p)\n                if p.grad.is_sparse:\n                    raise RuntimeError('Lion does not support sparse gradients')\n                grads.append(p.grad)\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n\n                exp_avgs.append(state['exp_avg'])\n\n            lion(\n                params_with_grad,\n                grads,\n                exp_avgs,\n                beta1=beta1,\n                beta2=beta2,\n                lr=group['lr'],\n                weight_decay=group['weight_decay'],\n                caution=group['caution'],\n                maximize=group['maximize'],\n                foreach=group['foreach'],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n            )\n\n        return loss\n\n\ndef lion(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        exp_avgs: List[torch.Tensor],\n        # kwonly args with defaults are not supported by functions compiled with torchscript issue #70627\n        # setting this as kwarg for now as functional API is compiled by torch/distributed/optim\n        maximize: bool = False,\n        foreach: bool = None,\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        caution: bool,\n        max_lr: Optional[float] = None,\n):\n    r\"\"\"Functional API that performs Lion algorithm computation.\n    \"\"\"\n    if foreach is None:\n        try:\n            # cannot do foreach if this overload doesn't exist when caution enabled\n            foreach = not caution or 'Scalar' in torch.ops.aten._foreach_maximum_.overloads()\n        except Exception:\n            foreach = False\n\n    if foreach and torch.jit.is_scripting():\n        raise RuntimeError('torch.jit.script not supported with foreach optimizers')\n\n    if foreach and not torch.jit.is_scripting():\n        func = _multi_tensor_lion\n    else:\n        func = _single_tensor_lion\n\n    func(\n        params,\n        grads,\n        exp_avgs,\n        beta1=beta1,\n        beta2=beta2,\n        lr=lr,\n        weight_decay=weight_decay,\n        caution=caution,\n        maximize=maximize,\n        max_lr=max_lr,\n    )\n\n\ndef _single_tensor_lion(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        exp_avgs: List[torch.Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        caution: bool,\n        maximize: bool,\n        max_lr: Optional[float],\n):\n    for i, param in enumerate(params):\n        grad = grads[i] if not maximize else -grads[i]\n        exp_avg = exp_avgs[i]\n\n        if torch.is_complex(param):\n            grad = torch.view_as_real(grad)\n            exp_avg = torch.view_as_real(exp_avg)\n            param = torch.view_as_real(param)\n\n        # Perform stepweight decay\n        wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n        param.mul_(1 - wd_scale * weight_decay)\n\n        # Weight update\n        update = exp_avg.mul(beta1).add_(grad, alpha=1 - beta1).sign_()\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            mask = (update * grad > 0).to(grad.dtype)\n            mask.div_(mask.mean().clamp_(min=1e-3))\n            update.mul_(mask)\n\n        param.add_(update, alpha=-lr)\n\n        # Decay the momentum running average coefficient\n        exp_avg.lerp_(grad, 1 - beta2)\n\n\ndef _multi_tensor_lion(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        exp_avgs: List[torch.Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        caution: bool,\n        maximize: bool,\n        max_lr: Optional[float],\n):\n    if len(params) == 0:\n        return\n\n    if maximize:\n        grads = torch._foreach_neg(tuple(grads))  # type: ignore[assignment]\n\n    grads = [torch.view_as_real(x) if torch.is_complex(x) else x for x in grads]\n    exp_avgs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in exp_avgs]\n    params = [torch.view_as_real(x) if torch.is_complex(x) else x for x in params]\n\n    # Perform stepweight decay\n    wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n    torch._foreach_mul_(params, 1 - wd_scale * weight_decay)\n\n    # Weight update\n    updates = torch._foreach_mul(exp_avgs, beta1)\n    torch._foreach_add_(updates, grads, alpha=1 - beta1)\n    updates = [u.sign_() for u in updates]\n\n    if caution:\n        # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n        masks = torch._foreach_mul(updates, grads)\n        masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]\n        mask_scale = [m.mean() for m in masks]\n        torch._foreach_maximum_(mask_scale, 1e-3)\n        torch._foreach_div_(masks, mask_scale)\n        torch._foreach_mul_(updates, masks)\n\n    torch._foreach_add_(params, updates, alpha=-lr)\n\n    # Decay the momentum running average coefficient\n    torch._foreach_mul_(exp_avgs, beta2)\n    torch._foreach_add_(exp_avgs, grads, alpha=1 - beta2)\n"
  },
  {
    "path": "timm/optim/lookahead.py",
    "content": "\"\"\" Lookahead Optimizer Wrapper.\nImplementation modified from: https://github.com/alphadl/lookahead.pytorch\nPaper: `Lookahead Optimizer: k steps forward, 1 step back` - https://arxiv.org/abs/1907.08610\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nfrom collections import OrderedDict\nfrom typing import Callable, Dict\n\nimport torch\nfrom torch.optim.optimizer import Optimizer\nfrom collections import defaultdict\n\n\nclass Lookahead(Optimizer):\n    def __init__(self, base_optimizer, alpha=0.5, k=6):\n        # NOTE super().__init__() not called on purpose\n        self._optimizer_step_pre_hooks: Dict[int, Callable] = OrderedDict()\n        self._optimizer_step_post_hooks: Dict[int, Callable] = OrderedDict()\n        if not 0.0 <= alpha <= 1.0:\n            raise ValueError(f'Invalid slow update rate: {alpha}')\n        if not 1 <= k:\n            raise ValueError(f'Invalid lookahead steps: {k}')\n        defaults = dict(lookahead_alpha=alpha, lookahead_k=k, lookahead_step=0)\n        self._base_optimizer = base_optimizer\n        self.param_groups = base_optimizer.param_groups\n        self.defaults = base_optimizer.defaults\n        self.defaults.update(defaults)\n        self.state = defaultdict(dict)\n        # manually add our defaults to the param groups\n        for name, default in defaults.items():\n            for group in self._base_optimizer.param_groups:\n                group.setdefault(name, default)\n\n    @torch.no_grad()\n    def update_slow(self, group):\n        for fast_p in group[\"params\"]:\n            if fast_p.grad is None:\n                continue\n            param_state = self._base_optimizer.state[fast_p]\n            if 'lookahead_slow_buff' not in param_state:\n                param_state['lookahead_slow_buff'] = torch.empty_like(fast_p)\n                param_state['lookahead_slow_buff'].copy_(fast_p)\n            slow = param_state['lookahead_slow_buff']\n            slow.add_(fast_p - slow, alpha=group['lookahead_alpha'])\n            fast_p.copy_(slow)\n\n    def sync_lookahead(self):\n        for group in self._base_optimizer.param_groups:\n            self.update_slow(group)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = self._base_optimizer.step(closure)\n        for group in self._base_optimizer.param_groups:\n            group['lookahead_step'] += 1\n            if group['lookahead_step'] % group['lookahead_k'] == 0:\n                self.update_slow(group)\n        return loss\n\n    def state_dict(self):\n        return self._base_optimizer.state_dict()\n\n    def load_state_dict(self, state_dict):\n        self._base_optimizer.load_state_dict(state_dict)\n        self.param_groups = self._base_optimizer.param_groups\n"
  },
  {
    "path": "timm/optim/madgrad.py",
    "content": "\"\"\" PyTorch MADGRAD optimizer\n\nMADGRAD: https://arxiv.org/abs/2101.11075\n\nCode from: https://github.com/facebookresearch/madgrad\n\"\"\"\n# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport math\nfrom typing import TYPE_CHECKING, Any, Callable, Optional\n\nimport torch\nimport torch.optim\n\nif TYPE_CHECKING:\n    from torch.optim.optimizer import _params_t\nelse:\n    _params_t = Any\n\n\nclass MADGRAD(torch.optim.Optimizer):\n    \"\"\"\n    MADGRAD_: A Momentumized, Adaptive, Dual Averaged Gradient Method for Stochastic\n    Optimization.\n\n    .. _MADGRAD: https://arxiv.org/abs/2101.11075\n\n    MADGRAD is a general purpose optimizer that can be used in place of SGD or\n    Adam may converge faster and generalize better. Currently GPU-only.\n    Typically, the same learning rate schedule that is used for SGD or Adam may\n    be used. The overall learning rate is not comparable to either method and\n    should be determined by a hyper-parameter sweep.\n\n    MADGRAD requires less weight decay than other methods, often as little as\n    zero. Momentum values used for SGD or Adam's beta1 should work here also.\n\n    On sparse problems both weight_decay and momentum should be set to 0.\n\n    Arguments:\n        params (iterable):\n            Iterable of parameters to optimize or dicts defining parameter groups.\n        lr (float):\n            Learning rate (default: 1e-2).\n        momentum (float):\n            Momentum value in  the range [0,1) (default: 0.9).\n        weight_decay (float):\n            Weight decay, i.e. a L2 penalty (default: 0).\n        eps (float):\n            Term added to the denominator outside of the root operation to improve numerical stability. (default: 1e-6).\n    \"\"\"\n\n    def __init__(\n            self,\n            params: _params_t,\n            lr: float = 1e-2,\n            momentum: float = 0.9,\n            weight_decay: float = 0,\n            eps: float = 1e-6,\n            decoupled_decay: bool = False,\n    ):\n        if momentum < 0 or momentum >= 1:\n            raise ValueError(f\"Momentum {momentum} must be in the range [0,1]\")\n        if lr <= 0:\n            raise ValueError(f\"Learning rate {lr} must be positive\")\n        if weight_decay < 0:\n            raise ValueError(f\"Weight decay {weight_decay} must be non-negative\")\n        if eps < 0:\n            raise ValueError(f\"Eps must be non-negative\")\n\n        defaults = dict(\n            lr=lr,\n            eps=eps,\n            momentum=momentum,\n            weight_decay=weight_decay,\n            decoupled_decay=decoupled_decay,\n        )\n        super().__init__(params, defaults)\n\n    @property\n    def supports_memory_efficient_fp16(self) -> bool:\n        return False\n\n    @property\n    def supports_flat_params(self) -> bool:\n        return True\n\n    @torch.no_grad()\n    def step(self, closure: Optional[Callable[[], float]] = None) -> Optional[float]:\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            eps = group['eps']\n            lr = group['lr'] + eps\n            weight_decay = group['weight_decay']\n            momentum = group['momentum']\n            ck = 1 - momentum\n\n            for p in group[\"params\"]:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if momentum != 0.0 and grad.is_sparse:\n                    raise RuntimeError(\"momentum != 0 is not compatible with sparse gradients\")\n\n                state = self.state[p]\n                if len(state) == 0:\n                    state['step'] = 0\n                    state['grad_sum_sq'] = torch.zeros_like(p)\n                    state['s'] = torch.zeros_like(p)\n                    if momentum != 0:\n                        state['x0'] = torch.clone(p).detach()\n\n                state['step'] += 1\n                grad_sum_sq = state['grad_sum_sq']\n                s = state['s']\n                lamb = lr * math.sqrt(state['step'])\n\n                # Apply weight decay\n                if weight_decay != 0:\n                    if group['decoupled_decay']:\n                        p.mul_(1.0 - group['lr'] * weight_decay)\n                    else:\n                        if grad.is_sparse:\n                            raise RuntimeError(\"weight_decay option is not compatible with sparse gradients\")\n                        grad.add_(p, alpha=weight_decay)\n\n                if grad.is_sparse:\n                    grad = grad.coalesce()\n                    grad_val = grad._values()\n\n                    p_masked = p.sparse_mask(grad)\n                    grad_sum_sq_masked = grad_sum_sq.sparse_mask(grad)\n                    s_masked = s.sparse_mask(grad)\n\n                    # Compute x_0 from other known quantities\n                    rms_masked_vals = grad_sum_sq_masked._values().pow(1 / 3).add_(eps)\n                    x0_masked_vals = p_masked._values().addcdiv(s_masked._values(), rms_masked_vals, value=1)\n\n                    # Dense + sparse op\n                    grad_sq = grad * grad\n                    grad_sum_sq.add_(grad_sq, alpha=lamb)\n                    grad_sum_sq_masked.add_(grad_sq, alpha=lamb)\n\n                    rms_masked_vals = grad_sum_sq_masked._values().pow_(1 / 3).add_(eps)\n\n                    s.add_(grad, alpha=lamb)\n                    s_masked._values().add_(grad_val, alpha=lamb)\n\n                    # update masked copy of p\n                    p_kp1_masked_vals = x0_masked_vals.addcdiv(s_masked._values(), rms_masked_vals, value=-1)\n                    # Copy updated masked p to dense p using an add operation\n                    p_masked._values().add_(p_kp1_masked_vals, alpha=-1)\n                    p.add_(p_masked, alpha=-1)\n                else:\n                    if momentum == 0:\n                        # Compute x_0 from other known quantities\n                        rms = grad_sum_sq.pow(1 / 3).add_(eps)\n                        x0 = p.addcdiv(s, rms, value=1)\n                    else:\n                        x0 = state['x0']\n\n                    # Accumulate second moments\n                    grad_sum_sq.addcmul_(grad, grad, value=lamb)\n                    rms = grad_sum_sq.pow(1 / 3).add_(eps)\n\n                    # Update s\n                    s.add_(grad, alpha=lamb)\n\n                    # Step\n                    if momentum == 0:\n                        p.copy_(x0.addcdiv(s, rms, value=-1))\n                    else:\n                        z = x0.addcdiv(s, rms, value=-1)\n\n                        # p is a moving average of z\n                        p.mul_(1 - ck).add_(z, alpha=ck)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/mars.py",
    "content": "\"\"\" PyTorch MARS Optimizer\n\nCode simplified from https://github.com/AGI-Arena/MARS\n\nPaper: MARS: Unleashing the Power of Variance Reduction for Training Large Models - https://arxiv.org/abs/2411.10438\n\n@article{yuan2024mars,\n  title={MARS: Unleashing the Power of Variance Reduction for Training Large Models},\n  author={Yuan, Huizhuo and Liu, Yifeng and Wu, Shuang and Zhou, Xun and Gu, Quanquan},\n  journal={arXiv preprint arXiv:2411.10438},\n  year={2024}\n}\n\"\"\"\n# Copyright (c) 2024 Bytedance Ltd. and/or its affiliates\n# SPDX-License-Identifier: Apache-2.0\nimport math\nfrom typing import Optional, Tuple\n\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\nfrom ._types import ParamsT\n\n\ndef _mars_single_tensor_step(\n        p: torch.Tensor,\n        grad: torch.Tensor,\n        exp_avg: torch.Tensor,\n        exp_avg_sq: torch.Tensor,\n        lr: float,\n        weight_decay: float,\n        beta1: float,\n        beta2: float,\n        last_grad: torch.Tensor,\n        eps: float,\n        step: int,\n        gamma: float,\n        mars_type: str,\n        is_grad_2d: bool,\n        optimize_1d: bool,\n        lr_1d_factor: bool,\n        betas_1d: Tuple[float, float],\n        caution: bool,\n):\n    # optimize_1d ==> use MARS for 1d param, else use AdamW\n    if optimize_1d or is_grad_2d:\n        one_minus_beta1 = 1. - beta1\n        if step == 1:\n            # this is a timm addition, making first step more consistent when no grad history, otherwise tests fail\n            c_t = grad\n        else:\n            c_t = (grad - last_grad).mul_(gamma * (beta1 / one_minus_beta1)).add_(grad)\n            c_t_norm = torch.norm(c_t)\n            if c_t_norm > 1.:\n                c_t = c_t / c_t_norm\n        exp_avg.mul_(beta1).add_(c_t, alpha=one_minus_beta1)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            mask = (exp_avg * grad > 0).to(grad.dtype)\n            mask.div_(mask.mean().clamp_(min=1e-3))\n            exp_avg = exp_avg * mask\n\n        if mars_type == \"adamw\":\n            exp_avg_sq.mul_(beta2).addcmul_(c_t, c_t, value=1. - beta2)\n            bias_correction1 = 1.0 - beta1 ** step\n            bias_correction2 = 1.0 - beta2 ** step\n            denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(eps)\n            update = p * weight_decay + (exp_avg / bias_correction1).div_(denom)\n        elif mars_type == \"lion\":\n            update = p * weight_decay + exp_avg.sign()\n        else:\n            assert False\n        p.add_(update, alpha=-lr)\n    else:\n        beta1_1d, beta2_1d = betas_1d\n        exp_avg.mul_(beta1_1d).add_(grad, alpha=1. - beta1_1d)\n        exp_avg_sq.mul_(beta2_1d).addcmul_(grad, grad, value=1. - beta2_1d)\n        bias_correction1 = 1.0 - beta1_1d ** step\n        bias_correction2 = 1.0 - beta2_1d ** step\n        denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(eps)\n        if caution:\n            mask = (exp_avg * grad > 0).to(grad.dtype)\n            mask.div_(mask.mean().clamp_(min=1e-3))\n            exp_avg = exp_avg * mask\n        update = p * weight_decay + (exp_avg / bias_correction1).div_(denom)\n        p.add_(update, alpha=-(lr * lr_1d_factor))\n    return exp_avg, exp_avg_sq\n\n\nclass Mars(Optimizer):\n    \"\"\" MARS Optimizer\n\n    Paper: MARS: Unleashing the Power of Variance Reduction for Training Large Models\n        https://arxiv.org/abs/2411.10438\n\n    \"\"\"\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 3e-3,\n            betas: Tuple[float, float] = (0.9, 0.99),\n            eps: float = 1e-8,\n            weight_decay: float = 0.,\n            gamma: float = 0.025,\n            mars_type: str = \"adamw\",\n            optimize_1d: bool = False,\n            lr_1d_factor: float = 1.0,\n            betas_1d: Optional[Tuple[float, float]] = None,\n            caution: bool = False\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n        assert mars_type in [\"adamw\", \"lion\"], \"MARS type not supported\"\n\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            mars_type=mars_type,\n            gamma=gamma,\n            optimize_1d=optimize_1d,\n            lr_1d_factor=lr_1d_factor,\n            betas_1d=betas_1d or betas,\n            caution=caution,\n        )\n        super(Mars, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(Mars, self).__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.is_sparse:\n                    raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead')\n\n                state = self.state[p]\n                # State initialization\n                if len(state) <= 1:\n                    state['step'] = 0\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Last Gradient\n                    state['last_grad'] = torch.zeros_like(p)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n\n                state['step'] += 1\n                step = state['step']\n                exp_avg = state['exp_avg']\n                exp_avg_sq = state['exp_avg_sq']\n                last_grad = state['last_grad']\n                lr = group['lr']\n                wd = group['weight_decay']\n                beta1, beta2 = group['betas']\n                is_grad_2d = grad.ndim >= 2\n\n                # FIXME add multi-tensor (if usage warrants), make more standard\n                _mars_single_tensor_step(\n                    p,\n                    grad,\n                    exp_avg,\n                    exp_avg_sq,\n                    lr,\n                    wd,\n                    beta1,\n                    beta2,\n                    last_grad,\n                    group['eps'],\n                    step,\n                    group['gamma'],\n                    mars_type=group['mars_type'],\n                    is_grad_2d=is_grad_2d,\n                    optimize_1d=group['optimize_1d'],\n                    lr_1d_factor=group['lr_1d_factor'],\n                    betas_1d=group['betas_1d'],\n                    caution=group['caution'],\n                )\n\n                state['last_grad'] = grad\n\n        return loss\n"
  },
  {
    "path": "timm/optim/muon.py",
    "content": "\"\"\" Muon Optimizer\n\nImproved Muon optimizer implementation with flexible handling of high-dimensional tensors.\n\nCombines PyTorch-style structure with options for:\n- Batched spatial processing for convolutions in addition to flatten\n- Optional spatial normalization\n- Selectable coefficient presets\n- Automatic fallback to AdamW for 1D / scalar parameters (biases, norms, etc.) and optional fallback via param groups\n- AdaMuon (https://arxiv.org/abs/2507.11005)\n- mUP eps damping factor (https://arxiv.org/abs/2512.05620v1)\n\nTODO look into mUP LR scaling and independent weight-decay scale\n\nBased on implementation by Keller Jordan, see\n- https://github.com/KellerJordan/Muon/blob/master/muon.py\n- https://github.com/KellerJordan/modded-nanogpt/blob/master/train_gpt.py\n- https://github.com/KellerJordan/modded-nanogpt/blob/master/train_gpt_medium.py\n- https://github.com/NoahAmsel/PolarExpress/blob/main/polar_express.py\n\nHacked together by Ross Wightman\n\"\"\"\nimport logging\nimport numbers\nfrom typing import List, Mapping, Optional, Sequence, Tuple, Union\n\nimport torch\ntry:\n    from torch.distributed.tensor import DTensor\n    has_dtensor = True\nexcept ImportError:\n    has_dtensor = False\n\nfrom ._types import ParamsT\nfrom .adamw import adamw\nfrom .nadamw import nadamw\n\n_logger = logging.getLogger(__name__)\n\n# Constants from Keller Jordan's Muon\nMUON_EPS = 1e-7\nDEFAULT_NS_STEPS = 5\n\n_COEFFICIENTS = {\n    \"original\": [\n        # Keller Jordan's Muon https://kellerjordan.github.io/posts/muon/\n        (3.4445, -4.7750, 2.0315),\n    ],\n    \"quintic\": [\n        # https://leloykun.github.io/ponder/muon-opt-coeffs/#how-do-we-optimize-the-coefficients\n        # From https://github.com/KellerJordan/modded-nanogpt/blob/master/train_gpt_medium.py#L44\n        (4.0848, -6.8946, 2.9270),\n        (3.9505, -6.3029, 2.6377),\n        (3.7418, -5.5913, 2.3037),\n        (2.8769, -3.1427, 1.2046),\n        (2.8366, -3.0525, 1.2012),\n    ],\n    \"polar_express\": [\n        # Polar Express https://arxiv.org/abs/2505.16932\n        # From https://github.com/NoahAmsel/PolarExpress/tree/main with safety 1e-2\n        (8.237312490495555, -23.157747414558198, 16.680568411445915),\n        (4.082441999064835, -2.893047735332586, 0.5252849256975648),\n        (3.9263479922546582, -2.8547468034765298, 0.5318022422894988),\n        (3.2982187133085143, -2.424541981026706, 0.48632008358844075),\n        (2.2970369434552573, -1.63662558125903, 0.4002628455953627),\n        (1.8763805351440397, -1.2347896577722228, 0.35891887501668385),\n        (1.8564423485617974, -1.2132449880935525, 0.3568003487825883),\n        (1.8749994008682747, -1.2499988017229169, 0.3749994008546422),\n    ],\n    \"polar_express_safer\": [\n        # from https://github.com/KellerJordan/modded-nanogpt/blob/master/train_gpt.py\n        # w/ safety 2e-2\n        (8.156554524902461, -22.48329292557795, 15.878769915207462),\n        (4.0429299351667245, -2.808917465908704, 0.5000178451051299),\n        (3.8916678022926563, -2.7724841532176825, 0.5060648178503389),\n        (3.285753657755658, -2.3681294933425394, 0.46449024233003117),\n        (2.3005307116270983, -1.6111665557258408, 0.3833374427545273),\n        (1.8631210546382593, -1.2042160621002727, 0.3421879560523383),\n        (1.8382572152247512, -1.1779263289537742, 0.3396513038637379),\n        (1.8749999923301852, -1.2499999836060613, 0.374999991275876),\n    ],\n}\n\n\nNSCoeff = Union[str, Tuple[float, float, float], List[Tuple[float, float, float]]]\n\n\ndef scale_eps_for_ns(\n        eps: float,\n        shape: Tuple[int, ...],\n) -> float:\n    \"\"\"Scale epsilon for Newton-Schulz based on matrix dimensions (μP-style).\n\n    For μP compatibility, epsilon should scale as eps * sqrt(din/dout) to maintain\n    consistent damping behavior across different model widths.\n\n    Reference: https://arxiv.org/abs/2512.05620\n\n    Args:\n        eps: Base epsilon value\n        shape: Shape of the matrix (out, in) or (batch, out, in)\n\n    Returns:\n        Scaled epsilon value\n    \"\"\"\n    # Get din, dout from shape (handle both 2D and 3D batched)\n    # FIXME TBD paper includes depth in the damping scale, e.g: eps * (din / dout) ** 0.5 / N\n    dout, din = (shape[-2], shape[-1])\n    return eps * (din / dout) ** 0.5\n\n\ndef zeropower_via_newtonschulz(\n        G: torch.Tensor,\n        steps: int,\n        coefficients: List[Tuple[float, float, float]],\n        eps: float = MUON_EPS,\n        safety_factor: float = 1.0,\n        dtype: torch.dtype = torch.bfloat16,\n        scale_eps: bool = False,\n) -> torch.Tensor:\n    \"\"\"Newton-Schulz quintic iteration to compute the zeroth power / orthogonalization of gradient.\n\n    Supports batched operation over leading dimensions.\n\n    See\n    - https://github.com/KellerJordan/Muon/blob/master/muon.py\n    - https://github.com/NoahAmsel/PolarExpress/blob/main/polar_express.py\n    - https://github.com/KellerJordan/modded-nanogpt/blob/master/train_gpt.py\n\n    Args:\n        G: Input gradient tensor of shape (m, n) or (batch, m, n)\n        steps: Number of Newton-Schulz iterations\n        coefficients: Coefficients (a, b, c) for the iteration\n        eps: Numerical stability epsilon for norm\n        safety_factor: Multiplicative safety factor for norm (1.01 is common safety value in 'polar express' variants)\n        dtype: Computation dtype\n        scale_eps: If True, scale epsilon by sqrt(din/dout) for μP compatibility\n\n    Returns:\n        Orthogonalized tensor of same shape as G\n    \"\"\"\n    assert G.ndim in (2, 3), f\"Input must be 2D or 3D, got {G.ndim}D. Flatten batch dims first.\"\n    num_cs = len(coefficients)\n    assert num_cs >= 1 and len(coefficients[0]) == 3\n    # match coefficients with # of steps, truncate or repeat last\n    coeff_sequence = coefficients[:steps] if steps <= num_cs else \\\n        coefficients + [coefficients[-1]] * (steps - num_cs)\n\n    # Scale epsilon by sqrt(din/dout) for μP compatibility if requested\n    if scale_eps:\n        eps = scale_eps_for_ns(eps, G.shape)\n\n    X = G.to(dtype=dtype, copy=True)\n\n    # Transpose if needed (operate on dimension with fewer elements)\n    transposed = X.size(-2) > X.size(-1)\n    if transposed:\n        X = X.mT\n\n    # Normalize spectral norm to at most 1\n    if scale_eps:\n        # more of a damping factor in this case, use add instead of clamp\n        X.div_(X.norm(2, dim=(-2, -1), keepdim=True).mul(safety_factor).add_(eps))\n    else:\n        X.div_(X.norm(2, dim=(-2, -1), keepdim=True).mul(safety_factor).clamp_(min=eps))\n\n    is_dtensor = has_dtensor and isinstance(G, DTensor)\n    if is_dtensor:\n        # Basic, DTensor-friendly Newton-Schulz\n        for a, b, c in coeff_sequence:\n            A = X @ X.mT\n            B = b * A + c * (A @ A)\n            X = a * X + (B @ X)\n    else:\n        # Fast prealloc/out= path\n\n        # Batched vs unbatched fused MM\n        mm_fn = torch.baddbmm if X.ndim > 2 else torch.addmm\n\n        # Pre-allocate\n        X = X.contiguous()\n        A = torch.empty((*X.shape[:-1], X.size(-2)), device=X.device, dtype=X.dtype)\n        B = torch.empty_like(A)\n        C = torch.empty_like(X)\n\n        # Perform Newton-Schulz iterations\n        for a, b, c in coeff_sequence:\n            mm_fn(A, X, X.mT, beta=0.0, alpha=1.0, out=A)  # A = X @ X.mT\n            mm_fn(A, A, A, beta=b, alpha=c, out=B)  # B = b * A + c * A @ A\n            mm_fn(X, B, X, beta=a, alpha=1.0, out=C)  # C = a * X + B @ X\n            X, C = C, X  # swap refs to avoid copy\n\n    if transposed:\n        X = X.mT\n\n    return X\n\n\ndef get_lr_scale(\n        param_shape: torch.Size,\n        adjust_lr_fn: str = \"match_rms_adamw\",\n) -> float:\n    \"\"\"Adjust learning rate based on parameter shape for Muon.\n\n    Args:\n        param_shape: Shape of the parameter tensor\n        adjust_lr_fn: Scaling function name\n            - \"original\": sqrt(max(1, out/in)) - Original Muon impl\n            - \"match_rms_adamw\": 0.2 * sqrt(max(out, in)) - Kimi scaling\n            - \"rms_to_rms\": sqrt(out/in) - Scion/Bernstein scaling\n    \"\"\"\n    out_chs, in_chs = (param_shape[-2], param_shape[-1]) if len(param_shape) > 1 else (1., 1.)\n\n    if adjust_lr_fn == \"original\":\n        # Original Muon impl (https://kellerjordan.github.io/posts/muon/)\n        return max(1, out_chs / in_chs) ** 0.5\n    elif adjust_lr_fn == \"match_rms_adamw\":\n        # Kimi (https://arxiv.org/abs/2502.16982)\n        return 0.2 * max(out_chs, in_chs) ** 0.5\n    elif adjust_lr_fn == \"rms_to_rms\":\n        # Scion (https://arxiv.org/abs/2502.07529, https://github.com/LIONS-EPFL/scion)\n        # Bernstein et al. (https://jeremybernste.in/writing/deriving-muon)\n        return (out_chs / in_chs) ** 0.5\n    else:\n        assert False, f'Invalid scaling function \"{adjust_lr_fn}\" for Muon'\n\n\ndef get_adamuon_lr_scale(\n        param_shape: torch.Size,\n        adjust_lr_fn: str = \"match_rms_adamw\",\n) -> Tuple[float, bool]:\n    \"\"\"Adjust learning rate based on parameter shape for AdaMuon.\n\n    Args:\n        param_shape: Shape of the parameter tensor\n        adjust_lr_fn: Scaling function name\n\n    Returns:\n        Tuple of (scale_factor, use_rms_norm)\n    \"\"\"\n    out_chs, in_chs = (param_shape[-2], param_shape[-1]) if len(param_shape) > 1 else (1., 1.)\n\n    if adjust_lr_fn == \"match_rms_adamw\":\n        # AdaMuon paper: normalize by RMS, then scale by 0.2 * sqrt(numel)\n        # https://arxiv.org/abs/2507.11005\n        return 0.2 * (out_chs * in_chs) ** 0.5, True\n    elif adjust_lr_fn == \"rms_to_rms\":\n        return (out_chs / in_chs) ** 0.5, False\n    elif adjust_lr_fn == \"rsqrt_in\":\n        return in_chs ** -0.5, False\n    else:\n        assert False, f'Invalid scaling function \"{adjust_lr_fn}\" for AdaMuon'\n\n\ndef _is_suitable_for_muon(\n        param: torch.Tensor,\n        min_dim_size: int = 4,\n        max_aspect_ratio: float = 128.,\n        return_reason: bool = False,\n) -> Union[bool, Tuple[bool, str]]:\n    \"\"\"Check if a parameter is suitable for Muon optimization.\n\n    Args:\n        param: Parameter tensor\n        min_dim_size: Minimum size for non-unit dimensions\n        max_aspect_ratio: Maximum allowed aspect ratio\n        return_reason: If True, return (bool, reason_string), else just bool (faster)\n\n    Returns:\n        If return_reason=False: bool indicating suitability\n        If return_reason=True: Tuple of (is_suitable, reason_string)\n\n    Examples:\n        (64, 128) -> True (or (True, \"ok\") if return_reason=True)\n        (96, 3, 4, 4) -> True - will be flattened to (96, 48)\n        (4, 2048) -> False - extreme aspect ratio\n        (64,) -> False - insufficient dims\n        (1, 196, 768) -> False - leading unit dims\n\n    NOTE: these rules were created to balance complexity with covering common timm model cases\n    Please let me know if there are non-optimal cases that you run into.\n    \"\"\"\n\n    s = param.shape\n    # Must have at least 2 non-unit dimensions\n    if param.ndim < 2 or sum(1 for dim_size in s if dim_size > 1) < 2:\n        return (False, \"insufficient_dims\") if return_reason else False\n\n    # Unit dimension in first two positions indicates:\n    # - Position embeddings (1, seq, dim)\n    # - Depthwise convs (out, 1, h, w)\n    # - Other degenerate cases possibly not caught by first rule\n    if s[0] == 1 or s[1] == 1:\n        return (False, \"leading_unit_dims\") if return_reason else False\n\n    if param.ndim >= 3:\n        # For 3D+ tensors, check what dimensions will be AFTER flattening\n        # since that's what gets passed to Newton-Schulz iteration\n        # Flatten mode: (out, in, *spatial) -> (out, in * spatial_prod)\n        out_ch = s[0]\n        in_ch_with_spatial = 1\n        for d in s[1:]:\n            in_ch_with_spatial *= d\n        check_dims = (out_ch, in_ch_with_spatial)\n    else:\n        # For 2D tensors, check as-is\n        check_dims = s\n\n    # Both dims should be >= minimum size\n    min_size = min(check_dims)\n    if min_size < min_dim_size:\n        if return_reason:\n            return False, f\"min_dim_too_small:{min_size}\"\n        return False\n\n    # Aspect ratio shouldn't be too extreme\n    max_size = max(check_dims)\n    aspect_ratio = max_size / min_size\n    if aspect_ratio > max_aspect_ratio:\n        if return_reason:\n            return False, f\"extreme_aspect_ratio:{aspect_ratio:.1f}\"\n        return False\n\n    return (True, \"ok\") if return_reason else True\n\n\ndef reshape_for_muon(\n        tensor: torch.Tensor,\n        mode: str = \"flatten\",\n) -> Tuple[torch.Tensor, torch.Size]:\n    \"\"\"Reshape high-dimensional tensor for Muon processing.\n\n    Args:\n        tensor: Input tensor of shape (out, in, *spatial)\n        mode: How to handle spatial dimensions\n            - \"flatten\": Flatten spatial into output dimension (out, in*H*W)\n            - \"batched\": Batch over spatial positions (spatial_prod, out, in) for per-position orthogonalization\n\n    Returns:\n        Reshaped tensor and original shape for restoration\n    \"\"\"\n    original_shape = tensor.shape\n    if tensor.ndim == 2:\n        return tensor, original_shape\n    if tensor.ndim < 2:\n        raise ValueError(f\"Tensor must have at least 2 dimensions, got {tensor.ndim}\")\n\n    out_ch, in_ch = tensor.shape[:2]\n    if mode == \"flatten\":\n        # Flatten: (out, in, *spatial) -> (out, in * spatial_prod)\n        return tensor.reshape(out_ch, -1), original_shape\n    elif mode == \"batched\":\n        # Batched: (out, in, *spatial) -> (spatial_prod, out, in)\n        # Move spatial dimension to front so zeropower_via_newtonschulz batches over it\n        reshaped = tensor.reshape(out_ch, in_ch, -1)  # (out, in, spatial_prod)\n        reshaped = reshaped.permute(2, 0, 1)  # (spatial_prod, out, in)\n        return reshaped, original_shape\n    else:\n        raise ValueError(f\"Unknown mode: {mode}\")\n\n\ndef muon(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        momentum_bufs: List[torch.Tensor],\n        *,\n        lr: float,\n        weight_decay: float,\n        momentum: float,\n        nesterov: bool,\n        ns_steps: int,\n        ns_coefficients: NSCoeff,\n        eps: float,\n        safety_factor: float,\n        adjust_lr_fn: Optional[str],\n        conv_mode: str,\n        normalize_spatial: bool,\n        scale_eps: bool,\n) -> None:\n    \"\"\"Functional API that performs Muon algorithm computation.\"\"\"\n    _single_tensor_muon(\n        params,\n        grads,\n        momentum_bufs,\n        lr=lr,\n        weight_decay=weight_decay,\n        momentum=momentum,\n        nesterov=nesterov,\n        ns_steps=ns_steps,\n        ns_coefficients=ns_coefficients,\n        eps=eps,\n        safety_factor=safety_factor,\n        adjust_lr_fn=adjust_lr_fn,\n        conv_mode=conv_mode,\n        normalize_spatial=normalize_spatial,\n        scale_eps=scale_eps,\n    )\n\n\ndef adamuon(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        momentum_bufs: List[torch.Tensor],\n        exp_avg_sqs: List[torch.Tensor],\n        state_steps: List[torch.Tensor],\n        *,\n        lr: float,\n        weight_decay: float,\n        momentum: float,\n        nesterov: bool,\n        beta2: float,\n        ns_steps: int,\n        ns_coefficients: NSCoeff,\n        eps: float,\n        safety_factor: float,\n        adjust_lr_fn: Optional[str],\n        conv_mode: str,\n        normalize_spatial: bool,\n        scale_eps: bool,\n) -> None:\n    \"\"\"Functional API that performs AdaMuon algorithm computation.\n\n    AdaMuon extends Muon with element-wise second moment estimation applied\n    to orthogonalized update directions, providing Adam-like adaptive scaling\n    while preserving Muon's geometric benefits.\n\n    Reference: https://arxiv.org/abs/2507.11005\n    \"\"\"\n    _single_tensor_adamuon(\n        params,\n        grads,\n        momentum_bufs,\n        exp_avg_sqs,\n        state_steps,\n        lr=lr,\n        weight_decay=weight_decay,\n        momentum=momentum,\n        nesterov=nesterov,\n        beta2=beta2,\n        ns_steps=ns_steps,\n        ns_coefficients=ns_coefficients,\n        eps=eps,\n        safety_factor=safety_factor,\n        adjust_lr_fn=adjust_lr_fn,\n        conv_mode=conv_mode,\n        normalize_spatial=normalize_spatial,\n        scale_eps=scale_eps,\n    )\n\n\ndef _single_tensor_muon(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        momentum_bufs: List[torch.Tensor],\n        *,\n        lr: float,\n        weight_decay: float,\n        momentum: float,\n        nesterov: bool,\n        ns_steps: int,\n        ns_coefficients: NSCoeff,\n        eps: float,\n        safety_factor: float,\n        adjust_lr_fn: Optional[str],\n        conv_mode: str,\n        normalize_spatial: bool,\n        scale_eps: bool,\n) -> None:\n    \"\"\"Single tensor Muon update.\"\"\"\n    ns_coefficients = resolve_ns_coefficients(ns_coefficients, _COEFFICIENTS)\n\n    for i, param in enumerate(params):\n        grad = grads[i]\n        momentum_buf = momentum_bufs[i]\n\n        # Apply weight decay\n        param.mul_(1 - lr * weight_decay)\n\n        # Update momentum buffer\n        momentum_buf.lerp_(grad, 1. - momentum)\n        update = grad.lerp_(momentum_buf, momentum) if nesterov else momentum_buf.clone()\n\n        # Reshape for processing (handle 3D+ tensors like conv weights)\n        if update.ndim >= 3:\n            update_reshaped, original_shape = reshape_for_muon(update, mode=conv_mode)\n        else:\n            update_reshaped = update\n            original_shape = update.shape\n\n        # Apply Newton-Schulz orthogonalization\n        update_ortho = zeropower_via_newtonschulz(\n            update_reshaped,\n            ns_steps,\n            ns_coefficients,\n            eps=eps,\n            safety_factor=safety_factor,\n            scale_eps=scale_eps,\n        )\n\n        # Adjust learning rate based on parameter shape\n        if adjust_lr_fn:\n            scale = get_lr_scale(update_ortho.shape, adjust_lr_fn)\n        else:\n            scale = 1.0\n\n        # Apply spatial normalization and permute back if in batched mode\n        if conv_mode == \"batched\" and update_ortho.ndim >= 3:\n            if normalize_spatial:\n                scale *= update_ortho.shape[0] ** -0.5\n            # Permute back: (spatial_prod, out, in) -> (out, in, spatial_prod)\n            update_ortho = update_ortho.permute(1, 2, 0)\n\n        # Reshape back to original shape\n        update_ortho = update_ortho.reshape(original_shape)\n\n        # Apply update\n        param.add_(update_ortho, alpha=-lr * scale)\n\n\ndef _single_tensor_adamuon(\n        params: List[torch.Tensor],\n        grads: List[torch.Tensor],\n        momentum_bufs: List[torch.Tensor],\n        exp_avg_sqs: List[torch.Tensor],\n        state_steps: List[torch.Tensor],\n        *,\n        lr: float,\n        weight_decay: float,\n        momentum: float,\n        nesterov: bool,\n        beta2: float,\n        ns_steps: int,\n        ns_coefficients: NSCoeff,\n        eps: float,\n        safety_factor: float,\n        adjust_lr_fn: Optional[str],\n        conv_mode: str,\n        normalize_spatial: bool,\n        scale_eps: bool,\n) -> None:\n    \"\"\"Single tensor AdaMuon update.\n\n    AdaMuon applies second-moment estimation to the orthogonalized directions,\n    then rescales using RMS-alignment to maintain stable step sizes.\n\n    Algorithm:\n        1. Update momentum buffer: M = β₁·M + (1-β₁)·G\n        2. Orthogonalize: O = Newton-Schulz(M) or Newton-Schulz(nesterov_update)\n        3. Update second moment: v = β₂·v + (1-β₂)·O²\n        4. Bias correct: v̂ = v/(1-β₂^t)\n        5. Adaptive scaling: Ô = O / (√v̂ + ε)\n        6. RMS-aligned rescaling and apply update\n    \"\"\"\n    ns_coefficients = resolve_ns_coefficients(ns_coefficients, _COEFFICIENTS)\n\n    for i, param in enumerate(params):\n        grad = grads[i]\n        momentum_buf = momentum_bufs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        step_t = state_steps[i]\n\n        # Increment step\n        step_t += 1\n        step = step_t.item()\n\n        # Apply weight decay (decoupled)\n        param.mul_(1 - lr * weight_decay)\n\n        # Update momentum buffer\n        momentum_buf.lerp_(grad, 1. - momentum)\n        update = grad.lerp_(momentum_buf, momentum) if nesterov else momentum_buf.clone()\n\n        # Reshape for processing (handle 3D+ tensors like conv weights)\n        if update.ndim >= 3:\n            update_reshaped, original_shape = reshape_for_muon(update, mode=conv_mode)\n        else:\n            update_reshaped = update\n            original_shape = update.shape\n\n        # Apply Newton-Schulz orthogonalization\n        update_ortho = zeropower_via_newtonschulz(\n            update_reshaped,\n            ns_steps,\n            ns_coefficients,\n            eps=eps,\n            safety_factor=safety_factor,\n            scale_eps=scale_eps,\n        )\n\n        # Reshape back to original shape for second moment tracking\n        if conv_mode == \"batched\" and update_ortho.ndim >= 3:\n            # Permute back: (spatial_prod, out, in) -> (out, in, spatial_prod)\n            update_ortho = update_ortho.permute(1, 2, 0)\n        update_ortho = update_ortho.reshape(original_shape)\n\n        # Update second moment on orthogonalized directions (element-wise)\n        exp_avg_sq.mul_(beta2).addcmul_(update_ortho, update_ortho, value=1.0 - beta2)\n\n        # Get shape-based LR scaling and whether to apply RMS normalization\n        if adjust_lr_fn:\n            scale, use_rms_norm = get_adamuon_lr_scale(update_ortho.shape, adjust_lr_fn)\n        else:\n            scale, use_rms_norm = 1.0, False\n\n        if use_rms_norm:\n            # Bias correction not needed if scaling by norm\n            denom = exp_avg_sq.sqrt().add_(eps)\n        else:\n            # Bias correction for second moment\n            bias_correction2 = 1.0 - beta2 ** step\n            denom = (exp_avg_sq / bias_correction2).sqrt().add_(eps)\n\n        # Adaptive scaling: divide by sqrt of bias-corrected second moment\n        # This is the key AdaMuon modification\n        update_adaptive = update_ortho / denom\n\n        # RMS-aligned rescaling: normalize by update norm, then scale by shape factor\n        # Used by AdaMuon paper approach (match_rms_adamw), not by μP approach (rms_to_rms)\n        if use_rms_norm:\n            # eq(8) in AdaMuon paper, 0.2 / RMS(update) = 0.2 * sqrt(ndim) / frob(update)\n            update_norm = update_adaptive.norm().add_(eps)\n            update_adaptive = update_adaptive / update_norm\n\n        # Apply spatial normalization if in batched mode\n        if conv_mode == \"batched\" and len(original_shape) >= 3:\n            if normalize_spatial:\n                spatial_prod = 1\n                for d in original_shape[2:]:\n                    spatial_prod *= d\n                scale *= spatial_prod ** -0.5\n\n        # Apply update\n        param.add_(update_adaptive, alpha=-lr * scale)\n\n\nclass Muon(torch.optim.Optimizer):\n    \"\"\"Muon - MomentUm Orthogonalized by Newton-schulz\n\n    Combines Muon for 2D+ parameters (weight matrices) with AdamW for 1D parameters (biases, norms) and\n    parameter groups with 'use_fallback=True' set (or 'use_muon=False' for compatibility).\n\n    Supports two algorithms:\n    - \"muon\": Standard Muon algorithm with momentum + orthogonalization\n    - \"adamuon\": AdaMuon algorithm that adds element-wise second moment estimation\n                 to orthogonalized directions for Adam-like adaptive scaling\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 0.02,\n            weight_decay: float = 0,\n            momentum: float = 0.95,\n            nesterov: bool = False,\n            ns_steps: int = DEFAULT_NS_STEPS,\n            ns_coefficients: NSCoeff = \"quintic\",\n            eps: float = MUON_EPS,\n            safety_factor: float = 1.0,\n            adjust_lr_fn: Optional[str] = \"match_rms_adamw\",\n            conv_mode: str = \"flatten\",\n            normalize_spatial: bool = True,\n            adamw_lr: Optional[float] = None,\n            betas: Tuple[float, float] = (0.9, 0.95),\n            algo: str = \"muon\",\n            scale_eps: bool = False,\n            verbose: bool = False,\n    ):\n        \"\"\" Create Muon optimizer.\n        Args:\n            params: Iterable of parameters or dicts defining parameter groups\n            lr: Learning rate (default: 0.02 for Muon parameters)\n            weight_decay: Weight decay coefficient\n            momentum: Momentum factor for Muon\n            nesterov: Whether to use Nesterov momentum\n            ns_steps: Number of Newton-Schulz iterations\n            ns_coefficients: Coefficients for NS iteration\n            eps: Numerical stability epsilon\n            safety_factor: Multiplicative safety factor for NS norm\n            adjust_lr_fn: LR adjustment function - \"original\", \"match_rms_adamw\", or \"rms_to_rms\".\n                For adamuon mode, can set to None to disable (RMS rescaling handles scaling).\n            conv_mode: How to handle convolutions - \"flatten\" or \"batched\"\n            normalize_spatial: Whether to normalize by sqrt(spatial_size) in batched mode\n            adamw_lr: Learning rate for AdamW (1D params), defaults to lr if not specified\n            betas: Beta coefficients - (beta1, beta2) where beta1 is used for AdamW fallback\n                and beta2 is used for both AdamW fallback and AdaMuon second moment\n            algo: Algorithm - \"muon\" for standard Muon, \"adamuon\" for AdaMuon with\n                adaptive second moment estimation (https://arxiv.org/abs/2507.11005)\n            scale_eps: If True, scale epsilon by sqrt(din/dout) in Newton-Schulz for μP\n                compatibility (https://arxiv.org/abs/2512.05620)\n            verbose: Log parameter routing decisions (Muon vs AdamW)\n\n        Example:\n            ```python\n            # Simple usage - automatically uses Muon for 2D+ params, AdamW for 1D\n            optimizer = Muon(model.parameters(), lr=0.02)\n\n            # Use AdaMuon algorithm for adaptive scaling\n            optimizer = Muon(model.parameters(), lr=6e-4, algo=\"adamuon\")\n\n            # Manual control over parameter groups\n            optimizer = Muon([\n                {'params': weight_matrices, 'lr': 0.02},\n                {'params': biases, 'use_fallback': True, 'lr': 3e-4}, # use AdamW if use_fallback=True\n            ])\n            ```\n        \"\"\"\n        if not 0.0 <= lr:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if not 0.0 <= weight_decay:\n            raise ValueError(f\"Invalid weight_decay value: {weight_decay}\")\n        if not 0.0 <= momentum < 1.0:\n            raise ValueError(f\"Invalid momentum value: {momentum}\")\n        if not 0.0 <= eps:\n            raise ValueError(f\"Invalid epsilon value: {eps}\")\n        if conv_mode not in [\"flatten\", \"batched\"]:\n            raise ValueError(f\"Invalid conv_mode: {conv_mode}\")\n        if algo not in [\"muon\", \"adamuon\"]:\n            raise ValueError(f\"Invalid algo: {algo}. Must be 'muon' or 'adamuon'\")\n\n        defaults = dict(\n            lr=lr,\n            weight_decay=weight_decay,\n            momentum=momentum,\n            nesterov=nesterov,\n            ns_steps=ns_steps,\n            ns_coefficients=ns_coefficients,\n            eps=eps,\n            safety_factor=safety_factor,\n            adjust_lr_fn=adjust_lr_fn,\n            conv_mode=conv_mode,\n            normalize_spatial=normalize_spatial,\n            adamw_lr=adamw_lr if adamw_lr is not None else lr,\n            betas=betas,\n            algo=algo,\n            scale_eps=scale_eps,\n            verbose=verbose,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('algo', 'muon')\n            group.setdefault('scale_eps', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        verbose = self.defaults.get(\"verbose\", False)\n\n        # Tracking for logging (populated on first encounter of each param)\n        muon_count = 0\n        adamw_count = 0\n        routing_reasons = {} if verbose else None\n\n        for group in self.param_groups:\n            algo = group.get(\"algo\", \"muon\")\n\n            # Separate params into Muon and AdamW groups\n            muon_params = []\n            muon_grads = []\n            muon_momentum_bufs = []\n            # Additional state for adamuon mode\n            muon_exp_avg_sqs = []\n            muon_state_steps = []\n\n            adamw_params = []\n            adamw_grads = []\n            adamw_exp_avgs = []\n            adamw_exp_avg_sqs = []\n            adamw_state_steps = []\n\n            for p in group[\"params\"]:\n                if p.grad is None:\n                    continue\n\n                if p.grad.is_sparse:\n                    raise RuntimeError(\"Muon does not support sparse gradients\")\n\n                state = self.state[p]\n\n                # Determine routing on first encounter (cache in state)\n                if \"use_muon\" not in state:\n                    # Check explicit flags first (support both 'use_fallback' and 'use_muon' for compatibility)\n                    reason = None\n                    if group.get(\"use_fallback\", False):\n                        # use_fallback=True means use AdamW (use_muon=False)\n                        state[\"use_muon\"] = False\n                        if verbose:\n                            reason = \"use_fallback_flag\"\n                    elif \"use_muon\" in group:\n                        # Explicit use_muon flag for compatibility with other Muon implementations\n                        state[\"use_muon\"] = group[\"use_muon\"]\n                        if verbose:\n                            reason = \"use_muon_flag\"\n                    else:\n                        # Check shape suitability\n                        if verbose:\n                            suitable, reason = _is_suitable_for_muon(p, return_reason=True)\n                        else:\n                            suitable = _is_suitable_for_muon(p, return_reason=False)\n                        state[\"use_muon\"] = suitable\n\n                    # Track routing decision for logging\n                    if routing_reasons is not None and reason is not None:\n                        shape_str = \"x\".join(str(s) for s in p.shape)\n                        if shape_str not in routing_reasons:\n                            routing_reasons[shape_str] = []\n                        routing_reasons[shape_str].append(reason)\n\n                # Use cached routing decision\n                use_muon = state[\"use_muon\"]\n                if use_muon:\n                    # Collect Muon params\n                    muon_params.append(p)\n                    muon_grads.append(p.grad)\n                    muon_count += 1\n\n                    # State initialization for Muon/AdaMuon\n                    if \"momentum_buffer\" not in state:\n                        state[\"momentum_buffer\"] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                    muon_momentum_bufs.append(state[\"momentum_buffer\"])\n\n                    # Additional state for adamuon mode\n                    if algo == \"adamuon\":\n                        if \"step\" not in state:\n                            state[\"step\"] = torch.tensor(0.)\n                            state[\"exp_avg_sq\"] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                        muon_exp_avg_sqs.append(state[\"exp_avg_sq\"])\n                        muon_state_steps.append(state[\"step\"])\n                else:\n                    # Collect AdamW/NAdamW params\n                    adamw_params.append(p)\n                    adamw_grads.append(p.grad)\n                    adamw_count += 1\n\n                    # State initialization for AdamW\n                    if \"step\" not in state:\n                        state[\"step\"] = torch.tensor(0.)\n                        state[\"exp_avg\"] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                        state[\"exp_avg_sq\"] = torch.zeros_like(p, memory_format=torch.preserve_format)\n\n                    adamw_exp_avgs.append(state[\"exp_avg\"])\n                    adamw_exp_avg_sqs.append(state[\"exp_avg_sq\"])\n                    adamw_state_steps.append(state[\"step\"])\n\n            # Apply Muon/AdaMuon updates\n            if muon_params:\n                if algo == \"adamuon\":\n                    _, beta2 = group[\"betas\"]\n                    adamuon(\n                        muon_params,\n                        muon_grads,\n                        muon_momentum_bufs,\n                        muon_exp_avg_sqs,\n                        muon_state_steps,\n                        lr=group[\"lr\"],\n                        weight_decay=group[\"weight_decay\"],\n                        momentum=group[\"momentum\"],\n                        nesterov=group[\"nesterov\"],\n                        beta2=beta2,\n                        ns_steps=group[\"ns_steps\"],\n                        ns_coefficients=group[\"ns_coefficients\"],\n                        eps=group[\"eps\"],\n                        safety_factor=group[\"safety_factor\"],\n                        adjust_lr_fn=group[\"adjust_lr_fn\"],\n                        conv_mode=group[\"conv_mode\"],\n                        normalize_spatial=group[\"normalize_spatial\"],\n                        scale_eps=group[\"scale_eps\"],\n                    )\n                else:\n                    muon(\n                        muon_params,\n                        muon_grads,\n                        muon_momentum_bufs,\n                        lr=group[\"lr\"],\n                        weight_decay=group[\"weight_decay\"],\n                        momentum=group[\"momentum\"],\n                        nesterov=group[\"nesterov\"],\n                        ns_steps=group[\"ns_steps\"],\n                        ns_coefficients=group[\"ns_coefficients\"],\n                        eps=group[\"eps\"],\n                        safety_factor=group[\"safety_factor\"],\n                        adjust_lr_fn=group[\"adjust_lr_fn\"],\n                        conv_mode=group[\"conv_mode\"],\n                        normalize_spatial=group[\"normalize_spatial\"],\n                        scale_eps=group[\"scale_eps\"],\n                    )\n\n            # Apply AdamW updates\n            if adamw_params:\n                beta1, beta2 = group[\"betas\"]\n                if group[\"nesterov\"]:\n                    # use nadamw for fallback optimizer if nesterov is enabled\n                    nadamw(\n                        adamw_params,\n                        adamw_grads,\n                        adamw_exp_avgs,\n                        adamw_exp_avg_sqs,\n                        adamw_state_steps,\n                        foreach=None,\n                        beta1=beta1,\n                        beta2=beta2,\n                        lr=group[\"adamw_lr\"],\n                        weight_decay=group[\"weight_decay\"],\n                        eps=group[\"eps\"],\n                        caution=False,\n                        maximize=False,\n                        capturable=False,\n                        max_lr=None,\n                    )\n                else:\n                    adamw(\n                        adamw_params,\n                        adamw_grads,\n                        adamw_exp_avgs,\n                        adamw_exp_avg_sqs,\n                        [],  # max_exp_avg_sqs (not using amsgrad)\n                        adamw_state_steps,\n                        foreach=None,\n                        amsgrad=False,\n                        beta1=beta1,\n                        beta2=beta2,\n                        lr=group[\"adamw_lr\"],\n                        weight_decay=group[\"weight_decay\"],\n                        eps=group[\"eps\"],\n                        caution=False,\n                        maximize=False,\n                        capturable=False,\n                        max_lr=None,\n                )\n\n        # Log routing summary when we have new routing decisions\n        if routing_reasons and len(routing_reasons) > 0:\n            # Concise summary\n            _logger.info(f\"Muon parameter routing: {muon_count} Muon, {adamw_count} AdamW\")\n\n            # Group by reason for detailed breakdown\n            reason_groups = {}\n            for shape_str, reasons in sorted(routing_reasons.items()):\n                for reason in reasons:\n                    if reason not in reason_groups:\n                        reason_groups[reason] = []\n                    reason_groups[reason].append(shape_str)\n\n            # Log summary counts per reason\n            reason_summary = []\n            for reason, shapes in sorted(reason_groups.items()):\n                reason_summary.append(f\"{reason}={len(shapes)}\")\n            _logger.info(f\"  Breakdown: {', '.join(reason_summary)}\")\n\n            # Detailed breakdown at INFO level\n            if _logger.isEnabledFor(logging.INFO):\n                for reason, shapes in sorted(reason_groups.items()):\n                    optimizer_name = \"Muon\" if reason == \"ok\" else \"AdamW\"\n                    _logger.info(f\"    {reason} -> {optimizer_name}:\")\n                    for shape in shapes[:10]:\n                        _logger.info(f\"      {shape}\")\n                    if len(shapes) > 10:\n                        _logger.info(f\"      ... and {len(shapes) - 10} more\")\n\n        return loss\n\n\ndef resolve_ns_coefficients(\n        value: Union[str, Sequence[float], Sequence[Sequence[float]]],\n        presets: Mapping[str, Sequence[Sequence[float]]]\n) -> List[Tuple[float, float, float]]:\n    # tiny helpers (kept inline for succinctness)\n    is_seq = lambda x: isinstance(x, Sequence) and not isinstance(x, (str, bytes))\n    is_real = lambda x: isinstance(x, numbers.Real) and not isinstance(x, bool)\n\n    def as_coeff(x: Sequence[float]) -> Tuple[float, float, float]:\n        if not is_seq(x) or len(x) != 3 or not all(is_real(v) for v in x):\n            raise ValueError(f\"Coefficient must be length-3 of real numbers, got: {x!r}\")\n        a, b, c = x  # type: ignore[misc]\n        return float(a), float(b), float(c)\n\n    if isinstance(value, str):\n        if value not in presets:\n            valid = \", \".join(sorted(presets.keys()))\n            raise ValueError(f\"Unknown coefficients preset '{value}'. Valid options: {valid}\")\n        seq = presets[value]\n        if not is_seq(seq) or len(seq) == 0:\n            raise ValueError(f\"Preset '{value}' is empty or invalid\")\n        return [as_coeff(item) for item in seq]  # validate & cast\n\n    if not is_seq(value):\n        raise TypeError(\n            \"Coefficients must be a preset name (str), a 3-sequence (a,b,c), \"\n            \"or a sequence of 3-sequences.\"\n        )\n\n    # Decide single triple vs list-of-triples by structure\n    if len(value) == 3 and all(is_real(v) for v in value):  # type: ignore[index]\n        return [as_coeff(value)]  # single triple -> wrap\n\n    # Otherwise treat as list/tuple of triples\n    out = []\n    for i, item in enumerate(value):  # type: ignore[assignment]\n        if not is_seq(item):\n            raise TypeError(f\"Item {i} is not a sequence: {item!r}\")\n        out.append(as_coeff(item))\n    if not out:\n        raise ValueError(\"Coefficient list cannot be empty\")\n    return out"
  },
  {
    "path": "timm/optim/nadam.py",
    "content": "import math\n\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\n\nclass NAdamLegacy(Optimizer):\n    \"\"\"Implements Nadam algorithm (a variant of Adam based on Nesterov momentum).\n\n    NOTE: This impl has been deprecated in favour of torch.optim.NAdam and remains as a reference\n\n    It has been proposed in `Incorporating Nesterov Momentum into Adam`__.\n\n    Arguments:\n        params (iterable): iterable of parameters to optimize or dicts defining\n            parameter groups\n        lr (float, optional): learning rate (default: 2e-3)\n        betas (Tuple[float, float], optional): coefficients used for computing\n            running averages of gradient and its square\n        eps (float, optional): term added to the denominator to improve\n            numerical stability (default: 1e-8)\n        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)\n        schedule_decay (float, optional): momentum schedule decay (default: 4e-3)\n\n    __ http://cs229.stanford.edu/proj2015/054_report.pdf\n    __ http://www.cs.toronto.edu/~fritz/absps/momentum.pdf\n\n        Originally taken from: https://github.com/pytorch/pytorch/pull/1408\n        NOTE: Has potential issues but does work well on some problems.\n    \"\"\"\n\n    def __init__(\n            self,\n            params,\n            lr=2e-3,\n            betas=(0.9, 0.999),\n            eps=1e-8,\n            weight_decay=0,\n            schedule_decay=4e-3,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            schedule_decay=schedule_decay,\n        )\n        super(NAdamLegacy, self).__init__(params, defaults)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    state['m_schedule'] = 1.\n                    state['exp_avg'] = torch.zeros_like(p)\n                    state['exp_avg_sq'] = torch.zeros_like(p)\n\n                # Warming momentum schedule\n                m_schedule = state['m_schedule']\n                schedule_decay = group['schedule_decay']\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n                beta1, beta2 = group['betas']\n                eps = group['eps']\n                state['step'] += 1\n                t = state['step']\n                bias_correction2 = 1 - beta2 ** t\n\n                if group['weight_decay'] != 0:\n                    grad = grad.add(p, alpha=group['weight_decay'])\n\n                momentum_cache_t = beta1 * (1. - 0.5 * (0.96 ** (t * schedule_decay)))\n                momentum_cache_t_1 = beta1 * (1. - 0.5 * (0.96 ** ((t + 1) * schedule_decay)))\n                m_schedule_new = m_schedule * momentum_cache_t\n                m_schedule_next = m_schedule * momentum_cache_t * momentum_cache_t_1\n                state['m_schedule'] = m_schedule_new\n\n                # Decay the first and second moment running average coefficient\n                exp_avg.mul_(beta1).add_(grad, alpha=1. - beta1)\n                exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1. - beta2)\n\n                denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(eps)\n                p.addcdiv_(grad, denom, value=-group['lr'] * (1. - momentum_cache_t) / (1. - m_schedule_new))\n                p.addcdiv_(exp_avg, denom, value=-group['lr'] * momentum_cache_t_1 / (1. - m_schedule_next))\n\n        return loss\n"
  },
  {
    "path": "timm/optim/nadamw.py",
    "content": "\"\"\" NAdamW Optimizer\n\nBased on simplified algorithm in https://github.com/mlcommons/algorithmic-efficiency/tree/main/baselines/nadamw\n\nAdded multi-tensor (foreach) path.\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\"\"\"\nimport math\nfrom typing import List, Optional, Tuple\n\nimport torch\nfrom torch import Tensor\n\nfrom ._types import ParamsT\n\n\n# Modified from github.com/pytorch/pytorch/blob/v1.12.1/torch/optim/adamw.py.\nclass NAdamW(torch.optim.Optimizer):\n    \"\"\" Implements NAdamW algorithm.\n\n    See Table 1 in https://arxiv.org/abs/1910.05446 for the implementation of\n    the NAdam algorithm (there is also a comment in the code which highlights\n    the only difference of NAdamW and AdamW).\n\n    For further details regarding the algorithm we refer to\n        - Decoupled Weight Decay Regularization: https://arxiv.org/abs/1711.05101\n        - On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ\n\n    Args:\n        params: iterable of parameters to optimize or dicts defining parameter groups\n        lr: learning rate\n        betas: coefficients used for computing running averages of gradient and its square\n        eps: term added to the denominator to improve numerical stability\n        weight_decay: weight decay coefficient\n        caution: enable caution\n        corrected_weight_decay: apply corrected weight decay (lr**2 / max_lr)\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-3,\n            betas: Tuple[float, float] = (0.9, 0.999),\n            eps: float = 1e-8,\n            weight_decay: float = 1e-2,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n            maximize: bool = False,\n            foreach: Optional[bool] = None,\n            capturable: bool = False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(f'Invalid learning rate: {lr}')\n        if not 0.0 <= eps:\n            raise ValueError(f'Invalid epsilon value: {eps}')\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(f'Invalid beta parameter at index 0: {betas[0]}')\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(f'Invalid beta parameter at index 1: {betas[1]}')\n        if not 0.0 <= weight_decay:\n            raise ValueError(f'Invalid weight_decay value: {weight_decay}')\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n            foreach=foreach,\n            maximize=maximize,\n            capturable=capturable,\n        )\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        state_values = list(self.state.values())\n        step_is_tensor = (len(state_values) != 0) and torch.is_tensor(state_values[0]['step'])\n        if not step_is_tensor:\n            for s in state_values:\n                s['step'] = torch.tensor(float(s['step']))\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n            Args:\n              closure (callable, optional): A closure that reevaluates the model\n                  and returns the loss.\n        \"\"\"\n        self._cuda_graph_capture_health_check()\n\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            exp_avgs = []\n            exp_avg_sqs = []\n            state_steps = []\n            beta1, beta2 = group['betas']\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                params_with_grad.append(p)\n                if p.grad.is_sparse:\n                    raise RuntimeError('NAdamW does not support sparse gradients')\n                grads.append(p.grad)\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = torch.tensor(0.)\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)\n\n                exp_avgs.append(state['exp_avg'])\n                exp_avg_sqs.append(state['exp_avg_sq'])\n                state_steps.append(state['step'])\n\n            nadamw(\n                params_with_grad,\n                grads,\n                exp_avgs,\n                exp_avg_sqs,\n                state_steps,\n                beta1=beta1,\n                beta2=beta2,\n                lr=group['lr'],\n                weight_decay=group['weight_decay'],\n                eps=group['eps'],\n                caution=group['caution'],\n                maximize=group['maximize'],\n                capturable=group['capturable'],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n            )\n\n        return loss\n\n\ndef nadamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        foreach: Optional[bool] = None,\n        capturable: bool = False,\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        max_lr: Optional[float],\n) -> None:\n    r\"\"\"Functional API that performs NAdamW algorithm computation.\n      See NAdamW class for details.\n    \"\"\"\n\n    if not all(isinstance(t, torch.Tensor) for t in state_steps):\n        raise RuntimeError(\n            'API has changed, `state_steps` argument must contain a list of' +\n            ' singleton tensors')\n\n    if foreach is None:\n        try:\n            # cannot do foreach if this overload doesn't exist when caution enabled\n            foreach = not caution or 'Scalar' in torch.ops.aten._foreach_maximum_.overloads()\n        except Exception:\n            foreach = False\n\n    if foreach and not torch.jit.is_scripting():\n        func = _multi_tensor_nadamw\n    else:\n        func = _single_tensor_nadamw\n\n    func(\n        params,\n        grads,\n        exp_avgs,\n        exp_avg_sqs,\n        state_steps,\n        beta1=beta1,\n        beta2=beta2,\n        lr=lr,\n        weight_decay=weight_decay,\n        eps=eps,\n        caution=caution,\n        maximize=maximize,\n        capturable=capturable,\n        max_lr=max_lr,\n    )\n\n\ndef _single_tensor_nadamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        max_lr: Optional[float],\n):\n\n    for i, param in enumerate(params):\n        grad = grads[i] if not maximize else -grads[i]\n        exp_avg = exp_avgs[i]\n        exp_avg_sq = exp_avg_sqs[i]\n        step_t = state_steps[i]\n\n        # Update step.\n        step_t += 1\n\n        # Perform stepweight decay.\n        wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n        param.mul_(1. - wd_scale * weight_decay)\n\n        # Decay the first and second moment running average coefficient.\n        exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)\n        exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)\n\n        if capturable:\n            step = step_t\n\n            # 1 - beta1 ** step can't be captured in a CUDA graph, even if step is a CUDA tensor\n            # (incurs \"RuntimeError: CUDA error: operation not permitted when stream is capturing\")\n            bias_correction1 = 1 - torch.pow(beta1, step)\n            bias_correction2 = 1 - torch.pow(beta2, step)\n\n            step_size = lr / bias_correction1\n            step_size_neg = step_size.neg()\n\n            bias_correction2_sqrt = bias_correction2.sqrt()\n\n            # Only difference between NAdamW and AdamW in this implementation.\n            # The official PyTorch implementation of NAdam uses a different algorithm.\n            exp_avg = exp_avg.mul(beta1).add_(grad, alpha=1 - beta1)\n\n            denom = (exp_avg_sq.sqrt() / (bias_correction2_sqrt * step_size_neg)).add_(eps / step_size_neg)\n\n            if caution:\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                # FIXME not 100% sure if this remains capturable?\n                mask = (exp_avg * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                exp_avg.mul_(mask)\n\n            param.addcdiv_(exp_avg, denom)\n        else:\n            step = step_t.item()\n            bias_correction1 = 1 - beta1 ** step\n            bias_correction2 = 1 - beta2 ** step\n            step_size = lr / bias_correction1\n            bias_correction2_sqrt = math.sqrt(bias_correction2)\n\n            # Apply Nesterov. Only difference between NAdamW and AdamW in this implementation.\n            # The official PyTorch implementation of NAdam uses a different algorithm.\n            exp_avg = exp_avg.mul(beta1).add_(grad, alpha=1 - beta1)\n            denom = (exp_avg_sq.sqrt() / bias_correction2_sqrt).add_(eps)\n\n            if caution:\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                mask = (exp_avg * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                exp_avg.mul_(mask)\n\n            param.addcdiv_(exp_avg, denom, value=-step_size)\n\n\ndef _multi_tensor_nadamw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        exp_avgs: List[Tensor],\n        exp_avg_sqs: List[Tensor],\n        state_steps: List[Tensor],\n        *,\n        beta1: float,\n        beta2: float,\n        lr: float,\n        weight_decay: float,\n        eps: float,\n        caution: bool,\n        maximize: bool,\n        capturable: bool,\n        max_lr: Optional[float],\n):\n    if len(params) == 0:\n        return\n\n    if capturable:\n        assert all(\n            p.is_cuda and step.is_cuda for p, step in zip(params, state_steps)\n        ), \"If capturable=True, params and state_steps must be CUDA tensors.\"\n\n    if maximize:\n        grads = torch._foreach_neg(tuple(grads))  # type: ignore[assignment]\n\n    grads = [torch.view_as_real(x) if torch.is_complex(x) else x for x in grads]\n    exp_avgs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in exp_avgs]\n    exp_avg_sqs = [torch.view_as_real(x) if torch.is_complex(x) else x for x in exp_avg_sqs]\n    params = [torch.view_as_real(x) if torch.is_complex(x) else x for x in params]\n\n    # update steps\n    torch._foreach_add_(state_steps, 1)\n\n    # Perform stepweight decay\n    wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n    torch._foreach_mul_(params, 1 -  wd_scale * weight_decay)\n\n    # Decay the first and second moment running average coefficient\n    torch._foreach_mul_(exp_avgs, beta1)\n    torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1)\n\n    torch._foreach_mul_(exp_avg_sqs, beta2)\n    torch._foreach_addcmul_(exp_avg_sqs, grads, grads, 1 - beta2)\n\n    if capturable:\n        # TODO: use foreach_pow if/when foreach_pow is added\n        bias_correction1 = [torch.pow(beta1, step) for step in state_steps]\n        bias_correction2 = [torch.pow(beta2, step) for step in state_steps]\n        # foreach_sub doesn't allow a scalar as the first arg\n        torch._foreach_sub_(bias_correction1, 1)\n        torch._foreach_sub_(bias_correction2, 1)\n        torch._foreach_neg_(bias_correction1)\n        torch._foreach_neg_(bias_correction2)\n\n        # foreach_div doesn't allow a scalar as the first arg\n        step_size = torch._foreach_div(bias_correction1, lr)\n        torch._foreach_reciprocal_(step_size)\n        torch._foreach_neg_(step_size)\n\n        bias_correction2_sqrt = torch._foreach_sqrt(bias_correction2)\n\n        # Only difference between NAdamW and AdamW in this implementation.\n        # The official PyTorch implementation of NAdam uses a different algorithm.\n        exp_avgs = torch._foreach_mul(exp_avgs, beta1)\n        torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1)\n\n        exp_avg_sq_sqrt = torch._foreach_sqrt(exp_avg_sqs)\n        torch._foreach_div_(\n            exp_avg_sq_sqrt,\n            torch._foreach_mul(bias_correction2_sqrt, step_size)\n        )\n        eps_over_step_size = torch._foreach_div(step_size, eps)\n        torch._foreach_reciprocal_(eps_over_step_size)\n        denom = torch._foreach_add(exp_avg_sq_sqrt, eps_over_step_size)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            masks = torch._foreach_mul(exp_avgs, grads)\n            masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]  # capturable?\n            mask_scale = [m.mean() for m in masks]\n            torch._foreach_maximum_(mask_scale, 1e-3)\n            #torch._foreach_clamp_min_(mask_scale, 1e-3)\n            torch._foreach_div_(masks, mask_scale)\n            torch._foreach_mul_(exp_avgs, masks)\n\n        torch._foreach_addcdiv_(params, exp_avgs, denom)\n    else:\n        bias_correction1 = [1 - beta1 ** step.item() for step in state_steps]\n        bias_correction2 = [1 - beta2 ** step.item() for step in state_steps]\n\n        step_size = [(lr / bc) * -1 for bc in bias_correction1]\n\n        bias_correction2_sqrt = [math.sqrt(bc) for bc in bias_correction2]\n\n        # Apply Nesterov. Only difference between NAdamW and AdamW in this implementation.\n        # The official PyTorch implementation of NAdam uses a different algorithm.\n        exp_avgs = torch._foreach_mul(exp_avgs, beta1)\n        torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1)\n\n        exp_avg_sq_sqrt = torch._foreach_sqrt(exp_avg_sqs)\n        torch._foreach_div_(exp_avg_sq_sqrt, bias_correction2_sqrt)\n        denom = torch._foreach_add(exp_avg_sq_sqrt, eps)\n\n        if caution:\n            # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n            masks = torch._foreach_mul(exp_avgs, grads)\n            masks = [(m > 0).to(g.dtype) for m, g in zip(masks, grads)]\n            mask_scale = [m.mean() for m in masks]\n            torch._foreach_maximum_(mask_scale, 1e-3)\n            #torch._foreach_clamp_min_(mask_scale, 1e-3)\n            torch._foreach_div_(masks, mask_scale)\n            torch._foreach_mul_(exp_avgs, masks)\n\n        torch._foreach_addcdiv_(params, exp_avgs, denom, step_size)\n"
  },
  {
    "path": "timm/optim/nvnovograd.py",
    "content": "\"\"\" Nvidia NovoGrad Optimizer.\nOriginal impl by Nvidia from Jasper example:\n    - https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/SpeechRecognition/Jasper\nPaper: `Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks`\n    - https://arxiv.org/abs/1905.11286\n\"\"\"\n\nimport torch\nfrom torch.optim.optimizer import Optimizer\nimport math\n\n\nclass NvNovoGrad(Optimizer):\n    \"\"\"\n    Implements Novograd algorithm.\n\n    Args:\n        params (iterable): iterable of parameters to optimize or dicts defining\n            parameter groups\n        lr (float, optional): learning rate (default: 1e-3)\n        betas (Tuple[float, float], optional): coefficients used for computing\n            running averages of gradient and its square (default: (0.95, 0.98))\n        eps (float, optional): term added to the denominator to improve\n            numerical stability (default: 1e-8)\n        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)\n        grad_averaging: gradient averaging\n        amsgrad (boolean, optional): whether to use the AMSGrad variant of this\n            algorithm from the paper `On the Convergence of Adam and Beyond`_\n            (default: False)\n    \"\"\"\n\n    def __init__(\n            self,\n            params,\n            lr=1e-3,\n            betas=(0.95, 0.98),\n            eps=1e-8,\n            weight_decay=0,\n            grad_averaging=False,\n            amsgrad=False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= betas[0] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n        if not 0.0 <= betas[1] < 1.0:\n            raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            grad_averaging=grad_averaging,\n            amsgrad=amsgrad,\n        )\n\n        super(NvNovoGrad, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(NvNovoGrad, self).__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('amsgrad', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n            and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.is_sparse:\n                    raise RuntimeError('Sparse gradients are not supported.')\n                amsgrad = group['amsgrad']\n\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    # Exponential moving average of gradient values\n                    state['exp_avg'] = torch.zeros_like(p)\n                    # Exponential moving average of squared gradient values\n                    state['exp_avg_sq'] = torch.zeros([]).to(state['exp_avg'].device)\n                    if amsgrad:\n                        # Maintains max of all exp. moving avg. of sq. grad. values\n                        state['max_exp_avg_sq'] = torch.zeros([]).to(state['exp_avg'].device)\n\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n                if amsgrad:\n                    max_exp_avg_sq = state['max_exp_avg_sq']\n                beta1, beta2 = group['betas']\n\n                state['step'] += 1\n\n                norm = torch.sum(torch.pow(grad, 2))\n\n                if exp_avg_sq == 0:\n                    exp_avg_sq.copy_(norm)\n                else:\n                    exp_avg_sq.mul_(beta2).add_(norm, alpha=1 - beta2)\n\n                if amsgrad:\n                    # Maintains the maximum of all 2nd moment running avg. till now\n                    torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq)\n                    # Use the max. for normalizing running avg. of gradient\n                    denom = max_exp_avg_sq.sqrt().add_(group['eps'])\n                else:\n                    denom = exp_avg_sq.sqrt().add_(group['eps'])\n\n                grad.div_(denom)\n                if group['weight_decay'] != 0:\n                    grad.add_(p, alpha=group['weight_decay'])\n                if group['grad_averaging']:\n                    grad.mul_(1 - beta1)\n                exp_avg.mul_(beta1).add_(grad)\n\n                p.add_(exp_avg, alpha=-group['lr'])\n\n        return loss\n"
  },
  {
    "path": "timm/optim/optim_factory.py",
    "content": "# lots of uses of these functions directly, ala 'import timm.optim.optim_factory as optim_factory', fun :/\n\nfrom ._optim_factory import create_optimizer, create_optimizer_v2, optimizer_kwargs\nfrom ._param_groups import param_groups_layer_decay, param_groups_weight_decay, group_parameters, _layer_map, _group\n\nimport warnings\nwarnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.optim\", FutureWarning)\n"
  },
  {
    "path": "timm/optim/radam.py",
    "content": "\"\"\"RAdam Optimizer.\nImplementation lifted from: https://github.com/LiyuanLucasLiu/RAdam\nPaper: `On the Variance of the Adaptive Learning Rate and Beyond` - https://arxiv.org/abs/1908.03265\n\nNOTE: This impl has been deprecated in favour of torch.optim.RAdam and remains as a reference\n\"\"\"\nimport math\nimport torch\nfrom torch.optim.optimizer import Optimizer\n\n\nclass RAdamLegacy(Optimizer):\n    \"\"\" PyTorch RAdam optimizer\n\n    NOTE: This impl has been deprecated in favour of torch.optim.AdamW and remains as a reference\n    \"\"\"\n    def __init__(\n            self,\n            params,\n            lr=1e-3,\n            betas=(0.9, 0.999),\n            eps=1e-8,\n            weight_decay=0,\n    ):\n        defaults = dict(\n            lr=lr,\n            betas=betas,\n            eps=eps,\n            weight_decay=weight_decay,\n            buffer=[[None, None, None] for _ in range(10)]\n        )\n        super(RAdamLegacy, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(RAdamLegacy, self).__setstate__(state)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad.float()\n                if grad.is_sparse:\n                    raise RuntimeError('RAdam does not support sparse gradients')\n\n                p_fp32 = p.float()\n\n                state = self.state[p]\n\n                if len(state) == 0:\n                    state['step'] = 0\n                    state['exp_avg'] = torch.zeros_like(p_fp32)\n                    state['exp_avg_sq'] = torch.zeros_like(p_fp32)\n                else:\n                    state['exp_avg'] = state['exp_avg'].type_as(p_fp32)\n                    state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_fp32)\n\n                exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']\n                beta1, beta2 = group['betas']\n\n                exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)\n                exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)\n\n                state['step'] += 1\n                buffered = group['buffer'][int(state['step'] % 10)]\n                if state['step'] == buffered[0]:\n                    num_sma, step_size = buffered[1], buffered[2]\n                else:\n                    buffered[0] = state['step']\n                    beta2_t = beta2 ** state['step']\n                    num_sma_max = 2 / (1 - beta2) - 1\n                    num_sma = num_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)\n                    buffered[1] = num_sma\n\n                    # more conservative since it's an approximated value\n                    if num_sma >= 5:\n                        step_size = group['lr'] * math.sqrt(\n                            (1 - beta2_t) *\n                            (num_sma - 4) / (num_sma_max - 4) *\n                            (num_sma - 2) / num_sma *\n                            num_sma_max / (num_sma_max - 2)) / (1 - beta1 ** state['step'])\n                    else:\n                        step_size = group['lr'] / (1 - beta1 ** state['step'])\n                    buffered[2] = step_size\n\n                if group['weight_decay'] != 0:\n                    p_fp32.add_(p_fp32, alpha=-group['weight_decay'] * group['lr'])\n\n                # more conservative since it's an approximated value\n                if num_sma >= 5:\n                    denom = exp_avg_sq.sqrt().add_(group['eps'])\n                    p_fp32.addcdiv_(exp_avg, denom, value=-step_size)\n                else:\n                    p_fp32.add_(exp_avg, alpha=-step_size)\n\n                p.copy_(p_fp32)\n\n        return loss\n"
  },
  {
    "path": "timm/optim/rmsprop_tf.py",
    "content": "\"\"\" RMSProp modified to behave like Tensorflow impl\n\nOriginally cut & paste from PyTorch RMSProp\nhttps://github.com/pytorch/pytorch/blob/063946d2b3f3f1e953a2a3b54e0b34f1393de295/torch/optim/rmsprop.py\nLicensed under BSD-Clause 3 (ish), https://github.com/pytorch/pytorch/blob/master/LICENSE\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nModifications Copyright 2021 Ross Wightman\n\"\"\"\n\nimport torch\nfrom torch.optim import Optimizer\n\nfrom ._types import ParamsT\n\n\nclass RMSpropTF(Optimizer):\n    \"\"\"Implements RMSprop algorithm (TensorFlow style epsilon)\n\n    NOTE: This is a direct cut-and-paste of PyTorch RMSprop with eps applied before sqrt\n    and a few other modifications to closer match Tensorflow for matching hyper-params.\n\n    Noteworthy changes include:\n    1. Epsilon applied inside square-root\n    2. square_avg initialized to ones\n    3. LR scaling of update accumulated in momentum buffer\n\n    Proposed by G. Hinton in his\n    `course <http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf>`_.\n\n    The centered version first appears in `Generating Sequences\n    With Recurrent Neural Networks <https://arxiv.org/pdf/1308.0850v5.pdf>`_.\n\n    Args:\n        params: iterable of parameters to optimize or dicts defining parameter groups\n        lr: learning rate\n        momentum: momentum factor\n        alpha: smoothing (decay) constant\n        eps: term added to the denominator to improve numerical stability\n        centered: if ``True``, compute the centered RMSProp, the gradient is normalized by an estimation of its variance\n        weight_decay: weight decay (L2 penalty) (default: 0)\n        decoupled_decay: decoupled weight decay as per https://arxiv.org/abs/1711.05101\n        corrected_weight_decay: apply corrected weight decay (lr**2 / max_lr) when decoupled_decay is True\n        lr_in_momentum: learning rate scaling is included in the momentum buffer update as per defaults in Tensorflow\n        caution: apply caution\n    \"\"\"\n\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-2,\n            alpha: float = 0.9,\n            eps: float = 1e-10,\n            weight_decay: float = 0,\n            momentum: float = 0.,\n            centered: bool = False,\n            decoupled_decay: bool = False,\n            corrected_weight_decay: bool = False,\n            lr_in_momentum: bool = True,\n            caution: bool = False,\n    ):\n        if not 0.0 <= lr:\n            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n        if not 0.0 <= eps:\n            raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n        if not 0.0 <= momentum:\n            raise ValueError(\"Invalid momentum value: {}\".format(momentum))\n        if not 0.0 <= weight_decay:\n            raise ValueError(\"Invalid weight_decay value: {}\".format(weight_decay))\n        if not 0.0 <= alpha:\n            raise ValueError(\"Invalid alpha value: {}\".format(alpha))\n\n        defaults = dict(\n            lr=lr,\n            momentum=momentum,\n            alpha=alpha,\n            eps=eps,\n            centered=centered,\n            weight_decay=weight_decay,\n            decoupled_decay=decoupled_decay,\n            corrected_weight_decay=corrected_weight_decay,\n            lr_in_momentum=lr_in_momentum,\n            caution=caution,\n        )\n        super(RMSpropTF, self).__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super(RMSpropTF, self).__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('momentum', 0)\n            group.setdefault('centered', False)\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Arguments:\n            closure (callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                if grad.is_sparse:\n                    raise RuntimeError('RMSprop does not support sparse gradients')\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['step'] = 0\n                    state['square_avg'] = torch.ones_like(p)  # PyTorch inits to zero\n                    if group['momentum'] > 0:\n                        state['momentum_buffer'] = torch.zeros_like(p)\n                    if group['centered']:\n                        state['grad_avg'] = torch.zeros_like(p)\n\n                square_avg = state['square_avg']\n                one_minus_alpha = 1. - group['alpha']\n\n                state['step'] += 1\n\n                if group['weight_decay'] != 0:\n                    if group['decoupled_decay']:\n                        if group['corrected_weight_decay']:\n                            wd_scale = group['lr'] ** 2 / self.defaults['lr']\n                        else:\n                            wd_scale = group['lr']\n                        p.mul_(1. - wd_scale * group['weight_decay'])\n                    else:\n                        grad = grad.add(p, alpha=group['weight_decay'])\n\n                # Tensorflow order of ops for updating squared avg\n                square_avg.add_(grad.pow(2) - square_avg, alpha=one_minus_alpha)\n                # square_avg.mul_(alpha).addcmul_(grad, grad, value=1 - alpha)  # PyTorch original\n\n                if group['centered']:\n                    grad_avg = state['grad_avg']\n                    grad_avg.add_(grad - grad_avg, alpha=one_minus_alpha)\n                    avg = square_avg.addcmul(grad_avg, grad_avg, value=-1).add(group['eps']).sqrt_()  # eps in sqrt\n                    # grad_avg.mul_(alpha).add_(grad, alpha=1 - alpha)  # PyTorch original\n                else:\n                    avg = square_avg.add(group['eps']).sqrt_()  # eps moved in sqrt\n\n                if group['momentum'] > 0:\n                    buf = state['momentum_buffer']\n                    buf.mul_(group['momentum'])\n\n                    def _apply_caution(_m, _g):\n                        # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                        mask = (_m * _g > 0).to(_g.dtype)\n                        mask.div_(mask.mean().clamp_(min=1e-3))\n                        return _m * mask\n\n                    if group['lr_in_momentum']:\n                        # Tensorflow accumulates the LR scaling in the momentum buffer\n                        buf.addcdiv_(grad, avg, value=group['lr'])\n                        if group['caution']:\n                            buf = _apply_caution(buf, grad)\n                        p.add_(-buf)\n                    else:\n                        # PyTorch scales the param update by LR\n                        buf.addcdiv_(grad, avg)\n                        if group['caution']:\n                            buf = _apply_caution(buf, grad)\n                        p.add_(buf, alpha=-group['lr'])\n                else:\n                    p.addcdiv_(grad, avg, value=-group['lr'])\n\n        return loss\n"
  },
  {
    "path": "timm/optim/sgdp.py",
    "content": "\"\"\"\nSGDP Optimizer Implementation copied from https://github.com/clovaai/AdamP/blob/master/adamp/sgdp.py\n\nPaper: `Slowing Down the Weight Norm Increase in Momentum-based Optimizers` - https://arxiv.org/abs/2006.08217\nCode: https://github.com/clovaai/AdamP\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Spherical Cautious Optimizers: https://openreview.net/forum?id=OyT2CJ4fh7 \nCopyright (c) 2020-present NAVER Corp.\nMIT license\n\"\"\"\n\nimport torch\nimport torch.nn.functional as F\nfrom torch.optim.optimizer import Optimizer, required\nimport math\n\nfrom .adamp import projection\n\n\nclass SGDP(Optimizer):\n    def __init__(\n            self,\n            params,\n            lr=required,\n            momentum=0,\n            dampening=0,\n            weight_decay=0,\n            nesterov=False,\n            eps=1e-8,\n            delta=0.1,\n            wd_ratio=0.1,\n            caution=False  \n    ):\n        defaults = dict(\n            lr=lr,\n            momentum=momentum,\n            dampening=dampening,\n            weight_decay=weight_decay,\n            nesterov=nesterov,\n            eps=eps,\n            delta=delta,\n            wd_ratio=wd_ratio,\n            caution=caution,\n        )\n        super(SGDP, self).__init__(params, defaults)\n\n    @torch.no_grad()\n    def step(self, closure=None):\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            weight_decay = group['weight_decay']\n            momentum = group['momentum']\n            dampening = group['dampening']\n            nesterov = group['nesterov']\n            caution = group.get('caution', False)  \n\n            for p in group['params']:\n                if p.grad is None:\n                    continue\n                grad = p.grad\n                state = self.state[p]\n\n                # State initialization\n                if len(state) == 0:\n                    state['momentum'] = torch.zeros_like(p)\n\n                # SGD\n                buf = state['momentum']\n                buf.mul_(momentum).add_(grad, alpha=1. - dampening)\n                if nesterov:\n                    d_p = grad + momentum * buf\n                else:\n                    d_p = buf.clone()\n\n                # Projection\n                wd_ratio = 1.\n                if len(p.shape) > 1:\n                    d_p, wd_ratio = projection(p, grad, d_p, group['delta'], group['wd_ratio'], group['eps'], caution)\n                elif caution:\n                    mask = (d_p * grad > 0).to(grad.dtype)\n                    mask.div_(mask.mean().clamp_(min=1e-3))\n                    d_p.mul_(mask)\n\n                # Weight decay\n                if weight_decay != 0:\n                    p.mul_(1. - group['lr'] * group['weight_decay'] * wd_ratio / (1-momentum))\n\n                # Step\n                p.add_(d_p, alpha=-group['lr'])\n\n        return loss\n"
  },
  {
    "path": "timm/optim/sgdw.py",
    "content": "\"\"\" SGD with decoupled weight-decay.\n\nReferences for added functionality:\n    Cautious Optimizers: https://arxiv.org/abs/2411.16085\n    Why Gradients Rapidly Increase Near the End of Training: https://arxiv.org/abs/2506.02285\n\nHacked together by Ross Wightman\n\"\"\"\nfrom typing import List, Optional\n\nimport torch\nfrom torch import Tensor\nfrom torch.optim.optimizer import Optimizer\ntry:\n    from torch.optim.optimizer import _use_grad_for_differentiable, _default_to_fused_or_foreach\n    has_recent_pt = True\nexcept ImportError:\n    has_recent_pt = False\n\nfrom ._types import ParamsT\n\n__all__ = ['SGDW', 'sgdw']\n\n\nclass SGDW(Optimizer):\n    def __init__(\n            self,\n            params: ParamsT,\n            lr: float = 1e-3,\n            momentum: float = 0.,\n            dampening: float = 0.,\n            weight_decay: float = 0.,\n            nesterov: bool = False,\n            *,\n            caution: bool = False,\n            corrected_weight_decay: bool = False,\n            maximize: bool = False,\n            foreach: Optional[bool] = None,\n            differentiable: bool = False,\n    ):\n        if lr < 0.0:\n            raise ValueError(f\"Invalid learning rate: {lr}\")\n        if momentum < 0.0:\n            raise ValueError(f\"Invalid momentum value: {momentum}\")\n        if weight_decay < 0.0:\n            raise ValueError(f\"Invalid weight_decay value: {weight_decay}\")\n\n        defaults = dict(\n            lr=lr,\n            momentum=momentum,\n            dampening=dampening,\n            weight_decay=weight_decay,\n            nesterov=nesterov,\n            caution=caution,\n            corrected_weight_decay=corrected_weight_decay,\n            maximize=maximize,\n            foreach=foreach,\n            differentiable=differentiable,\n        )\n        if nesterov and (momentum <= 0 or dampening != 0):\n            raise ValueError(\"Nesterov momentum requires a momentum and zero dampening\")\n        super().__init__(params, defaults)\n\n    def __setstate__(self, state):\n        super().__setstate__(state)\n        for group in self.param_groups:\n            group.setdefault('caution', False)\n            group.setdefault('corrected_weight_decay', False)\n            group.setdefault('nesterov', False)\n            group.setdefault('maximize', False)\n            group.setdefault('foreach', None)\n            group.setdefault('differentiable', False)\n\n    def _init_group(self, group, params_with_grad, grads, momentum_buffer_list):\n        has_sparse_grad = False\n\n        for p in group['params']:\n            if p.grad is not None:\n                params_with_grad.append(p)\n                grads.append(p.grad)\n                if p.grad.is_sparse:\n                    has_sparse_grad = True\n\n                state = self.state[p]\n                if 'momentum_buffer' not in state:\n                    momentum_buffer_list.append(None)\n                else:\n                    momentum_buffer_list.append(state['momentum_buffer'])\n\n        return has_sparse_grad\n\n    # FIXME figure out how to make _use_grad_for_differentiable interchangeable with no_grad decorator\n    #   without args, for backwards compatibility with old pytorch\n    @torch.no_grad()\n    def step(self, closure=None):\n        \"\"\"Performs a single optimization step.\n\n        Args:\n            closure (Callable, optional): A closure that reevaluates the model\n                and returns the loss.\n        \"\"\"\n        loss = None\n        if closure is not None:\n            with torch.enable_grad():\n                loss = closure()\n\n        for group in self.param_groups:\n            params_with_grad = []\n            grads = []\n            momentum_buffer_list = []\n\n            has_sparse_grad = self._init_group(group, params_with_grad, grads, momentum_buffer_list)\n\n            sgdw(\n                params_with_grad,\n                grads,\n                momentum_buffer_list,\n                weight_decay=group['weight_decay'],\n                momentum=group['momentum'],\n                lr=group['lr'],\n                dampening=group['dampening'],\n                nesterov=group['nesterov'],\n                caution=group['caution'],\n                maximize=group['maximize'],\n                has_sparse_grad=has_sparse_grad,\n                foreach=group['foreach'],\n                max_lr=self.defaults['lr'] if group['corrected_weight_decay'] else None,\n            )\n\n            # update momentum_buffers in state\n            for p, momentum_buffer in zip(params_with_grad, momentum_buffer_list):\n                state = self.state[p]\n                state['momentum_buffer'] = momentum_buffer\n\n        return loss\n\n\ndef sgdw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        momentum_buffer_list: List[Optional[Tensor]],\n        # kwonly args with defaults are not supported by functions compiled with torchscript issue #70627\n        # setting this as kwarg for now as functional API is compiled by torch/distributed/optim\n        has_sparse_grad: bool = None,\n        foreach: Optional[bool] = None,\n        *,\n        weight_decay: float,\n        momentum: float,\n        lr: float,\n        dampening: float,\n        nesterov: bool,\n        caution: bool,\n        maximize: bool,\n        max_lr: Optional[float] = None\n):\n    r\"\"\"Functional API that performs SGD algorithm computation.\n\n    See :class:`~torch.optim.SGD` for details.\n    \"\"\"\n    if has_recent_pt and hasattr(Optimizer, '_group_tensors_by_device_and_dtype'):\n        if foreach is None:\n            # why must we be explicit about an if statement for torch.jit.is_scripting here?\n            # because JIT can't handle Optionals nor fancy conditionals when scripting\n            if not torch.jit.is_scripting():\n                _, foreach = _default_to_fused_or_foreach(params, differentiable=False, use_fused=False)\n            else:\n                foreach = False\n\n        if foreach and torch.jit.is_scripting():\n            raise RuntimeError('torch.jit.script not supported with foreach optimizers')\n    else:\n        foreach = False  # disabling altogether for older pytorch, as using _group_tensors_by_device_and_dtype\n\n    if foreach and not torch.jit.is_scripting():\n        func = _multi_tensor_sgdw\n    else:\n        func = _single_tensor_sgdw\n\n    func(\n        params,\n        grads,\n        momentum_buffer_list,\n        weight_decay=weight_decay,\n        momentum=momentum,\n        lr=lr,\n        dampening=dampening,\n        nesterov=nesterov,\n        caution=caution,\n        has_sparse_grad=has_sparse_grad,\n        maximize=maximize,\n        max_lr=max_lr,\n    )\n\n\ndef _single_tensor_sgdw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        momentum_buffer_list: List[Optional[Tensor]],\n        *,\n        weight_decay: float,\n        momentum: float,\n        lr: float,\n        dampening: float,\n        nesterov: bool,\n        caution: bool,\n        maximize: bool,\n        has_sparse_grad: bool,\n        max_lr: Optional[float]\n):\n    for i, param in enumerate(params):\n        grad = grads[i] if not maximize else -grads[i]\n\n        wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n        param.mul_(1. - wd_scale * weight_decay)\n\n        if momentum != 0:\n            buf = momentum_buffer_list[i]\n\n            if buf is None:\n                buf = torch.clone(grad).detach()\n                momentum_buffer_list[i] = buf\n            else:\n                buf.mul_(momentum).add_(grad, alpha=1 - dampening)\n\n            if caution:\n                if nesterov:\n                    buf = grad.add(buf, alpha=momentum)\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                mask = (buf * grad > 0).to(grad.dtype)\n                mask.div_(mask.mean().clamp_(min=1e-3))\n                grad = buf * mask\n            else:\n                if nesterov:\n                    grad = grad.add(buf, alpha=momentum)\n                else:\n                    grad = buf\n\n        param.add_(grad, alpha=-lr)\n\n\ndef _multi_tensor_sgdw(\n        params: List[Tensor],\n        grads: List[Tensor],\n        momentum_buffer_list: List[Optional[Tensor]],\n        *,\n        weight_decay: float,\n        momentum: float,\n        lr: float,\n        dampening: float,\n        nesterov: bool,\n        caution: bool,\n        maximize: bool,\n        has_sparse_grad: bool,\n        max_lr: Optional[float]\n):\n    if len(params) == 0:\n        return\n\n    grouped_tensors = Optimizer._group_tensors_by_device_and_dtype(\n        [params, grads, momentum_buffer_list], with_indices=True)\n    for ((device_params, device_grads, device_momentum_buffer_list), indices) in grouped_tensors.values():\n        device_has_sparse_grad = has_sparse_grad and any(grad.is_sparse for grad in device_grads)\n\n        if maximize:\n            device_grads = torch._foreach_neg(device_grads)\n\n        wd_scale = lr if max_lr is None else lr ** 2 / max_lr\n        torch._foreach_mul_(params, 1. - wd_scale * weight_decay)\n\n        if momentum != 0:\n            bufs = []\n\n            all_states_with_momentum_buffer = True\n            for i in range(len(device_momentum_buffer_list)):\n                if device_momentum_buffer_list[i] is None:\n                    all_states_with_momentum_buffer = False\n                    break\n                else:\n                    bufs.append(device_momentum_buffer_list[i])\n\n            if all_states_with_momentum_buffer:\n                torch._foreach_mul_(bufs, momentum)\n                torch._foreach_add_(bufs, device_grads, alpha=1 - dampening)\n            else:\n                bufs = []\n                for i in range(len(device_momentum_buffer_list)):\n                    if device_momentum_buffer_list[i] is None:\n                        buf = device_momentum_buffer_list[i] = momentum_buffer_list[indices[i]] = \\\n                            torch.clone(device_grads[i]).detach()\n                    else:\n                        buf = device_momentum_buffer_list[i]\n                        buf.mul_(momentum).add_(device_grads[i], alpha=1 - dampening)\n\n                    bufs.append(buf)\n\n            if caution:\n                if nesterov:\n                    # Can't do nesterov in-place if we want to compare against orig grad for caution\n                    bufs = torch._foreach_add(device_grads, bufs, alpha=momentum)\n                # Apply caution as per 'Cautious Optimizers' - https://arxiv.org/abs/2411.16085\n                masks = torch._foreach_mul(bufs, device_grads)\n                masks = [(m > 0).to(g.dtype) for m, g in zip(masks, device_grads)]\n                mask_scale = [m.mean() for m in masks]\n                torch._foreach_maximum_(mask_scale, 1e-3)\n                torch._foreach_div_(masks, mask_scale)\n                device_grads = torch._foreach_mul(bufs, masks)\n            else:\n                if nesterov:\n                    torch._foreach_add_(device_grads, bufs, alpha=momentum)\n                else:\n                    device_grads = bufs\n\n        if not device_has_sparse_grad:\n            torch._foreach_add_(device_params, device_grads, alpha=-lr)\n        else:\n            # foreach APIs don't support sparse\n            for i in range(len(device_params)):\n                device_params[i].add_(device_grads[i], alpha=-lr)\n"
  },
  {
    "path": "timm/py.typed",
    "content": ""
  },
  {
    "path": "timm/scheduler/__init__.py",
    "content": "from .cosine_lr import CosineLRScheduler\nfrom .multistep_lr import MultiStepLRScheduler\nfrom .plateau_lr import PlateauLRScheduler\nfrom .poly_lr import PolyLRScheduler\nfrom .step_lr import StepLRScheduler\nfrom .tanh_lr import TanhLRScheduler\n\nfrom .scheduler_factory import create_scheduler, create_scheduler_v2, scheduler_kwargs\n"
  },
  {
    "path": "timm/scheduler/cosine_lr.py",
    "content": "\"\"\" Cosine Scheduler\n\nCosine LR schedule with warmup, cycle/restarts, noise, k-decay.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport logging\nimport math\nimport numpy as np\nimport torch\nfrom typing import Tuple, List, Union\n\nfrom .scheduler import Scheduler\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass CosineLRScheduler(Scheduler):\n    \"\"\"\n    Cosine decay with restarts.\n    This is described in the paper https://arxiv.org/abs/1608.03983.\n\n    Inspiration from\n    https://github.com/allenai/allennlp/blob/master/allennlp/training/learning_rate_schedulers/cosine.py\n\n    k-decay option based on `k-decay: A New Method For Learning Rate Schedule` - https://arxiv.org/abs/2004.05909\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            t_initial: int,\n            lr_min: float = 0.,\n            cycle_mul: float = 1.,\n            cycle_decay: float = 1.,\n            cycle_limit: int = 1,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            warmup_prefix: bool = False,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: int = 42,\n            k_decay: float = 1.0,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            param_group_field=\"lr\",\n            t_in_epochs=t_in_epochs,\n            noise_range_t=noise_range_t,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize,\n        )\n\n        assert t_initial > 0\n        assert lr_min >= 0\n        if t_initial == 1 and cycle_mul == 1 and cycle_decay == 1:\n            _logger.warning(\n                \"Cosine annealing scheduler will have no effect on the learning \"\n                \"rate since t_initial = t_mul = eta_mul = 1.\")\n        self.t_initial = t_initial\n        self.lr_min = lr_min\n        self.cycle_mul = cycle_mul\n        self.cycle_decay = cycle_decay\n        self.cycle_limit = cycle_limit\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        self.warmup_prefix = warmup_prefix\n        self.k_decay = k_decay\n        if self.warmup_t:\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n\n    def _get_lr(self, t: int) -> List[float]:\n        if t < self.warmup_t:\n            lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]\n        else:\n            if self.warmup_prefix:\n                t = t - self.warmup_t\n\n            if self.cycle_mul != 1:\n                i = math.floor(math.log(1 - t / self.t_initial * (1 - self.cycle_mul), self.cycle_mul))\n                t_i = self.cycle_mul ** i * self.t_initial\n                t_curr = t - (1 - self.cycle_mul ** i) / (1 - self.cycle_mul) * self.t_initial\n            else:\n                i = t // self.t_initial\n                t_i = self.t_initial\n                t_curr = t - (self.t_initial * i)\n\n            gamma = self.cycle_decay ** i\n            lr_max_values = [v * gamma for v in self.base_values]\n            k = self.k_decay\n\n            if i < self.cycle_limit:\n                lrs = [\n                    self.lr_min + 0.5 * (lr_max - self.lr_min) * (1 + math.cos(math.pi * t_curr ** k / t_i ** k))\n                    for lr_max in lr_max_values\n                ]\n            else:\n                lrs = [self.lr_min for _ in self.base_values]\n\n        return lrs\n\n    def get_cycle_length(self, cycles=0):\n        cycles = max(1, cycles or self.cycle_limit)\n        if self.cycle_mul == 1.0:\n            t = self.t_initial * cycles\n        else:\n            t = int(math.floor(-self.t_initial * (self.cycle_mul ** cycles - 1) / (1 - self.cycle_mul)))\n        return t + self.warmup_t if self.warmup_prefix else t\n"
  },
  {
    "path": "timm/scheduler/multistep_lr.py",
    "content": "\"\"\" MultiStep LR Scheduler\n\nBasic multi step LR schedule with warmup, noise.\n\"\"\"\nimport torch\nimport bisect\nfrom timm.scheduler.scheduler import Scheduler\nfrom typing import List, Tuple, Union\n\nclass MultiStepLRScheduler(Scheduler):\n    \"\"\"\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            decay_t: List[int],\n            decay_rate: float = 1.,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            warmup_prefix: bool = True,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: int = 42,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            param_group_field=\"lr\",\n            t_in_epochs=t_in_epochs,\n            noise_range_t=noise_range_t,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize,\n        )\n\n        self.decay_t = decay_t\n        self.decay_rate = decay_rate\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        self.warmup_prefix = warmup_prefix\n        if self.warmup_t:\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n\n    def get_curr_decay_steps(self, t):\n        # find where in the array t goes,\n        # assumes self.decay_t is sorted\n        return bisect.bisect_right(self.decay_t, t + 1)\n\n    def _get_lr(self, t: int) -> List[float]:\n        if t < self.warmup_t:\n            lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]\n        else:\n            if self.warmup_prefix:\n                t = t - self.warmup_t\n            lrs = [v * (self.decay_rate ** self.get_curr_decay_steps(t)) for v in self.base_values]\n        return lrs\n"
  },
  {
    "path": "timm/scheduler/plateau_lr.py",
    "content": "\"\"\" Plateau Scheduler\n\nAdapts PyTorch plateau scheduler and allows application of noise, warmup.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch\nfrom typing import Any, Dict, List, Optional, Tuple, Union\n\nfrom .scheduler import Scheduler\n\n\nclass PlateauLRScheduler(Scheduler):\n    \"\"\"Decay the LR by a factor every time the validation loss plateaus.\"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            decay_rate: float = 0.1,\n            patience_t: int = 10,\n            threshold: float = 1e-4,\n            cooldown_t: int = 0,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            lr_min: float = 0.,\n            mode: str = 'max',\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_type: str = 'normal',\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: Optional[int] = None,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            'lr',\n            noise_range_t=noise_range_t,\n            noise_type=noise_type,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize,\n        )\n\n        self.lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n            self.optimizer,\n            patience=patience_t,\n            factor=decay_rate,\n            threshold=threshold,\n            cooldown=cooldown_t,\n            mode=mode,\n            min_lr=lr_min,\n        )\n\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        if self.warmup_t:\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n        self.restore_lr = None\n\n    def state_dict(self) -> Dict[str, Any]:\n        return {\n            'best': self.lr_scheduler.best,\n            'last_epoch': self.lr_scheduler.last_epoch,\n        }\n\n    def load_state_dict(self, state_dict: Dict[str, Any]) -> None:\n        self.lr_scheduler.best = state_dict['best']\n        if 'last_epoch' in state_dict:\n            self.lr_scheduler.last_epoch = state_dict['last_epoch']\n\n    # override the base class step fn completely\n    def step(self, epoch: int, metric: Optional[float] = None) -> None:\n        if epoch <= self.warmup_t:\n            lrs = [self.warmup_lr_init + epoch * s for s in self.warmup_steps]\n            super().update_groups(lrs)\n        else:\n            if self.restore_lr is not None:\n                # restore actual LR from before our last noise perturbation before stepping base\n                for i, param_group in enumerate(self.optimizer.param_groups):\n                    param_group['lr'] = self.restore_lr[i]\n                self.restore_lr = None\n\n            # step the base scheduler if metric given\n            if metric is not None:\n                self.lr_scheduler.step(metric)\n\n            if self._is_apply_noise(epoch):\n                self._apply_noise(epoch)\n\n    def step_update(self, num_updates: int, metric: Optional[float] = None):\n        return None\n\n    def _apply_noise(self, epoch: int) -> None:\n        noise = self._calculate_noise(epoch)\n\n        # apply the noise on top of previous LR, cache the old value so we can restore for normal\n        # stepping of base scheduler\n        restore_lr = []\n        for i, param_group in enumerate(self.optimizer.param_groups):\n            old_lr = float(param_group['lr'])\n            restore_lr.append(old_lr)\n            new_lr = old_lr + old_lr * noise\n            param_group['lr'] = new_lr\n        self.restore_lr = restore_lr\n\n    def _get_lr(self, t: int) -> List[float]:\n        assert False, 'should not be called as step is overridden'\n"
  },
  {
    "path": "timm/scheduler/poly_lr.py",
    "content": "\"\"\" Polynomial Scheduler\n\nPolynomial LR schedule with warmup, noise.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport math\nimport logging\nfrom typing import List, Tuple, Union\n\nimport torch\n\nfrom .scheduler import Scheduler\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass PolyLRScheduler(Scheduler):\n    \"\"\" Polynomial LR Scheduler w/ warmup, noise, and k-decay\n\n    k-decay option based on `k-decay: A New Method For Learning Rate Schedule` - https://arxiv.org/abs/2004.05909\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            t_initial: int,\n            power: float = 0.5,\n            lr_min: float = 0.,\n            cycle_mul: float = 1.,\n            cycle_decay: float = 1.,\n            cycle_limit: int = 1,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            warmup_prefix: bool = False,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: int = 42,\n            k_decay: float = 1.0,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            param_group_field=\"lr\",\n            t_in_epochs=t_in_epochs,\n            noise_range_t=noise_range_t,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize\n        )\n\n        assert t_initial > 0\n        assert lr_min >= 0\n        if t_initial == 1 and cycle_mul == 1 and cycle_decay == 1:\n            _logger.warning(\"Cosine annealing scheduler will have no effect on the learning \"\n                            \"rate since t_initial = t_mul = eta_mul = 1.\")\n        self.t_initial = t_initial\n        self.power = power\n        self.lr_min = lr_min\n        self.cycle_mul = cycle_mul\n        self.cycle_decay = cycle_decay\n        self.cycle_limit = cycle_limit\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        self.warmup_prefix = warmup_prefix\n        self.k_decay = k_decay\n        if self.warmup_t:\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n\n    def _get_lr(self, t: int) -> List[float]:\n        if t < self.warmup_t:\n            lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]\n        else:\n            if self.warmup_prefix:\n                t = t - self.warmup_t\n\n            if self.cycle_mul != 1:\n                i = math.floor(math.log(1 - t / self.t_initial * (1 - self.cycle_mul), self.cycle_mul))\n                t_i = self.cycle_mul ** i * self.t_initial\n                t_curr = t - (1 - self.cycle_mul ** i) / (1 - self.cycle_mul) * self.t_initial\n            else:\n                i = t // self.t_initial\n                t_i = self.t_initial\n                t_curr = t - (self.t_initial * i)\n\n            gamma = self.cycle_decay ** i\n            lr_max_values = [v * gamma for v in self.base_values]\n            k = self.k_decay\n\n            if i < self.cycle_limit:\n                lrs = [\n                    self.lr_min + (lr_max - self.lr_min) * (1 - t_curr ** k / t_i ** k) ** self.power\n                    for lr_max in lr_max_values\n                ]\n            else:\n                lrs = [self.lr_min for _ in self.base_values]\n\n        return lrs\n\n    def get_cycle_length(self, cycles=0):\n        cycles = max(1, cycles or self.cycle_limit)\n        if self.cycle_mul == 1.0:\n            t = self.t_initial * cycles\n        else:\n            t = int(math.floor(-self.t_initial * (self.cycle_mul ** cycles - 1) / (1 - self.cycle_mul)))\n        return t + self.warmup_t if self.warmup_prefix else t\n"
  },
  {
    "path": "timm/scheduler/scheduler.py",
    "content": "import abc\nfrom abc import ABC\nfrom typing import Any, Dict, List, Optional, Tuple, Union\n\nimport torch\n\n\nclass Scheduler(ABC):\n    \"\"\" Parameter Scheduler Base Class\n    A scheduler base class that can be used to schedule any optimizer parameter groups.\n\n    Unlike the builtin PyTorch schedulers, this is intended to be consistently called\n    * At the END of each epoch, before incrementing the epoch count, to calculate next epoch's value\n    * At the END of each optimizer update, after incrementing the update count, to calculate next update's value\n\n    The schedulers built on this should try to remain as stateless as possible (for simplicity).\n\n    This family of schedulers is attempting to avoid the confusion of the meaning of 'last_epoch'\n    and -1 values for special behaviour. All epoch and update counts must be tracked in the training\n    code and explicitly passed in to the schedulers on the corresponding step or step_update call.\n\n    Based on ideas from:\n     * https://github.com/pytorch/fairseq/tree/master/fairseq/optim/lr_scheduler\n     * https://github.com/allenai/allennlp/tree/master/allennlp/training/learning_rate_schedulers\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            param_group_field: str,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_type: str = 'normal',\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: Optional[int] = None,\n            initialize: bool = True,\n    ) -> None:\n        self.optimizer = optimizer\n        self.param_group_field = param_group_field\n        self._initial_param_group_field = f\"initial_{param_group_field}\"\n        if initialize:\n            for i, group in enumerate(self.optimizer.param_groups):\n                if param_group_field not in group:\n                    raise KeyError(f\"{param_group_field} missing from param_groups[{i}]\")\n                group.setdefault(self._initial_param_group_field, group[param_group_field])\n        else:\n            for i, group in enumerate(self.optimizer.param_groups):\n                if self._initial_param_group_field not in group:\n                    raise KeyError(f\"{self._initial_param_group_field} missing from param_groups[{i}]\")\n        self.base_values = [group[self._initial_param_group_field] for group in self.optimizer.param_groups]\n        self.metric = None  # any point to having this for all?\n        self.t_in_epochs = t_in_epochs\n        self.noise_range_t = noise_range_t\n        self.noise_pct = noise_pct\n        self.noise_type = noise_type\n        self.noise_std = noise_std\n        self.noise_seed = noise_seed if noise_seed is not None else 42\n        self.update_groups(self.base_values)\n\n    def state_dict(self) -> Dict[str, Any]:\n        return {key: value for key, value in self.__dict__.items() if key != 'optimizer'}\n\n    def load_state_dict(self, state_dict: Dict[str, Any]) -> None:\n        self.__dict__.update(state_dict)\n\n    @abc.abstractmethod\n    def _get_lr(self, t: int) -> List[float]:\n        pass\n\n    def _get_values(self, t: int, on_epoch: bool = True) -> Optional[List[float]]:\n        proceed = (on_epoch and self.t_in_epochs) or (not on_epoch and not self.t_in_epochs)\n        if not proceed:\n            return None\n        return self._get_lr(t)\n\n    def step(self, epoch: int, metric: Optional[float] = None) -> None:\n        self.metric = metric\n        values = self._get_values(epoch, on_epoch=True)\n        if values is not None:\n            values = self._add_noise(values, epoch)\n            self.update_groups(values)\n\n    def step_update(self, num_updates: int, metric: Optional[float] = None) -> None:\n        self.metric = metric\n        values = self._get_values(num_updates, on_epoch=False)\n        if values is not None:\n            values = self._add_noise(values, num_updates)\n            self.update_groups(values)\n\n    def update_groups(self, values: Union[float, List[float]]) -> None:\n        if not isinstance(values, (list, tuple)):\n            values = [values] * len(self.optimizer.param_groups)\n        for param_group, value in zip(self.optimizer.param_groups, values):\n            if 'lr_scale' in param_group:\n                param_group[self.param_group_field] = value * param_group['lr_scale']\n            else:\n                param_group[self.param_group_field] = value\n\n    def _add_noise(self, lrs: List[float], t: int) -> List[float]:\n        if self._is_apply_noise(t):\n            noise = self._calculate_noise(t)\n            lrs = [v + v * noise for v in lrs]\n        return lrs\n\n    def _is_apply_noise(self, t: int) -> bool:\n        \"\"\"Return True if scheduler in noise range.\"\"\"\n        apply_noise = False\n        if self.noise_range_t is not None:\n            if isinstance(self.noise_range_t, (list, tuple)):\n                apply_noise = self.noise_range_t[0] <= t < self.noise_range_t[1]\n            else:\n                apply_noise = t >= self.noise_range_t\n        return apply_noise\n\n    def _calculate_noise(self, t) -> float:\n        g = torch.Generator()\n        g.manual_seed(self.noise_seed + t)\n        if self.noise_type == 'normal':\n            while True:\n                # resample if noise out of percent limit, brute force but shouldn't spin much\n                noise = torch.randn(1, generator=g).item()\n                if abs(noise) < self.noise_pct:\n                    return noise\n        else:\n            noise = 2 * (torch.rand(1, generator=g).item() - 0.5) * self.noise_pct\n        return noise\n"
  },
  {
    "path": "timm/scheduler/scheduler_factory.py",
    "content": "\"\"\" Scheduler Factory\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nfrom typing import List, Optional, Union\n\nfrom torch.optim import Optimizer\n\nfrom .cosine_lr import CosineLRScheduler\nfrom .multistep_lr import MultiStepLRScheduler\nfrom .plateau_lr import PlateauLRScheduler\nfrom .poly_lr import PolyLRScheduler\nfrom .step_lr import StepLRScheduler\nfrom .tanh_lr import TanhLRScheduler\n\n\ndef scheduler_kwargs(cfg, decreasing_metric: Optional[bool] = None):\n    \"\"\" cfg/argparse to kwargs helper\n    Convert scheduler args in argparse args or cfg (.dot) like object to keyword args.\n    \"\"\"\n    eval_metric = getattr(cfg, 'eval_metric', 'top1')\n    if decreasing_metric is not None:\n        plateau_mode = 'min' if decreasing_metric else 'max'\n    else:\n        plateau_mode = 'min' if 'loss' in eval_metric else 'max'\n    kwargs = dict(\n        sched=cfg.sched,\n        num_epochs=getattr(cfg, 'epochs', 100),\n        decay_epochs=getattr(cfg, 'decay_epochs', 30),\n        decay_milestones=getattr(cfg, 'decay_milestones', [30, 60]),\n        warmup_epochs=getattr(cfg, 'warmup_epochs', 5),\n        cooldown_epochs=getattr(cfg, 'cooldown_epochs', 0),\n        patience_epochs=getattr(cfg, 'patience_epochs', 10),\n        decay_rate=getattr(cfg, 'decay_rate', 0.1),\n        min_lr=getattr(cfg, 'min_lr', 0.),\n        warmup_lr=getattr(cfg, 'warmup_lr', 1e-5),\n        warmup_prefix=getattr(cfg, 'warmup_prefix', False),\n        noise=getattr(cfg, 'lr_noise', None),\n        noise_pct=getattr(cfg, 'lr_noise_pct', 0.67),\n        noise_std=getattr(cfg, 'lr_noise_std', 1.),\n        noise_seed=getattr(cfg, 'seed', 42),\n        cycle_mul=getattr(cfg, 'lr_cycle_mul', 1.),\n        cycle_decay=getattr(cfg, 'lr_cycle_decay', 0.1),\n        cycle_limit=getattr(cfg, 'lr_cycle_limit', 1),\n        k_decay=getattr(cfg, 'lr_k_decay', 1.0),\n        plateau_mode=plateau_mode,\n        step_on_epochs=not getattr(cfg, 'sched_on_updates', False),\n    )\n    return kwargs\n\n\ndef create_scheduler(\n        args,\n        optimizer: Optimizer,\n        updates_per_epoch: int = 0,\n):\n    return create_scheduler_v2(\n        optimizer=optimizer,\n        **scheduler_kwargs(args),\n        updates_per_epoch=updates_per_epoch,\n    )\n\n\ndef create_scheduler_v2(\n        optimizer: Optimizer,\n        sched: str = 'cosine',\n        num_epochs: int = 300,\n        decay_epochs: int = 90,\n        decay_milestones: List[int] = (90, 180, 270),\n        cooldown_epochs: int = 0,\n        patience_epochs: int = 10,\n        decay_rate: float = 0.1,\n        min_lr: float = 0.,\n        warmup_lr: float = 1e-5,\n        warmup_epochs: int = 0,\n        warmup_prefix: bool = False,\n        noise: Union[float, List[float]] = None,\n        noise_pct: float = 0.67,\n        noise_std: float = 1.,\n        noise_seed: int = 42,\n        cycle_mul: float = 1.,\n        cycle_decay: float = 0.1,\n        cycle_limit: int = 1,\n        k_decay: float = 1.0,\n        plateau_mode: str = 'max',\n        step_on_epochs: bool = True,\n        updates_per_epoch: int = 0,\n):\n    t_initial = num_epochs\n    warmup_t = warmup_epochs\n    decay_t = decay_epochs\n    cooldown_t = cooldown_epochs\n\n    if not step_on_epochs:\n        assert updates_per_epoch > 0, 'updates_per_epoch must be set to number of dataloader batches'\n        t_initial = t_initial * updates_per_epoch\n        warmup_t = warmup_t * updates_per_epoch\n        decay_t = decay_t * updates_per_epoch\n        decay_milestones = [d * updates_per_epoch for d in decay_milestones]\n        cooldown_t = cooldown_t * updates_per_epoch\n\n    # warmup args\n    warmup_args = dict(\n        warmup_lr_init=warmup_lr,\n        warmup_t=warmup_t,\n        warmup_prefix=warmup_prefix,\n    )\n\n    # setup noise args for supporting schedulers\n    if noise is not None:\n        if isinstance(noise, (list, tuple)):\n            noise_range = [n * t_initial for n in noise]\n            if len(noise_range) == 1:\n                noise_range = noise_range[0]\n        else:\n            noise_range = noise * t_initial\n    else:\n        noise_range = None\n    noise_args = dict(\n        noise_range_t=noise_range,\n        noise_pct=noise_pct,\n        noise_std=noise_std,\n        noise_seed=noise_seed,\n    )\n\n    # setup cycle args for supporting schedulers\n    cycle_args = dict(\n        cycle_mul=cycle_mul,\n        cycle_decay=cycle_decay,\n        cycle_limit=cycle_limit,\n    )\n\n    lr_scheduler = None\n    if sched == 'cosine':\n        lr_scheduler = CosineLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=min_lr,\n            t_in_epochs=step_on_epochs,\n            **cycle_args,\n            **warmup_args,\n            **noise_args,\n            k_decay=k_decay,\n        )\n    elif sched == 'tanh':\n        lr_scheduler = TanhLRScheduler(\n            optimizer,\n            t_initial=t_initial,\n            lr_min=min_lr,\n            t_in_epochs=step_on_epochs,\n            **cycle_args,\n            **warmup_args,\n            **noise_args,\n        )\n    elif sched == 'step':\n        lr_scheduler = StepLRScheduler(\n            optimizer,\n            decay_t=decay_t,\n            decay_rate=decay_rate,\n            t_in_epochs=step_on_epochs,\n            **warmup_args,\n            **noise_args,\n        )\n    elif sched == 'multistep':\n        lr_scheduler = MultiStepLRScheduler(\n            optimizer,\n            decay_t=decay_milestones,\n            decay_rate=decay_rate,\n            t_in_epochs=step_on_epochs,\n            **warmup_args,\n            **noise_args,\n        )\n    elif sched == 'plateau':\n        assert step_on_epochs, 'Plateau LR only supports step per epoch.'\n        warmup_args.pop('warmup_prefix', False)\n        lr_scheduler = PlateauLRScheduler(\n            optimizer,\n            decay_rate=decay_rate,\n            patience_t=patience_epochs,\n            cooldown_t=0,\n            **warmup_args,\n            lr_min=min_lr,\n            mode=plateau_mode,\n            **noise_args,\n        )\n    elif sched == 'poly':\n        lr_scheduler = PolyLRScheduler(\n            optimizer,\n            power=decay_rate,  # overloading 'decay_rate' as polynomial power\n            t_initial=t_initial,\n            lr_min=min_lr,\n            t_in_epochs=step_on_epochs,\n            k_decay=k_decay,\n            **cycle_args,\n            **warmup_args,\n            **noise_args,\n        )\n\n    if hasattr(lr_scheduler, 'get_cycle_length'):\n        # For cycle based schedulers (cosine, tanh, poly) recalculate total epochs w/ cycles & cooldown\n        # NOTE: Warmup prefix added in get_cycle_lengths() if enabled\n        t_with_cycles_and_cooldown = lr_scheduler.get_cycle_length() + cooldown_t\n        if step_on_epochs:\n            num_epochs = t_with_cycles_and_cooldown\n        else:\n            num_epochs = t_with_cycles_and_cooldown // updates_per_epoch\n    else:\n        if warmup_prefix:\n            num_epochs += warmup_epochs\n\n    return lr_scheduler, num_epochs\n"
  },
  {
    "path": "timm/scheduler/step_lr.py",
    "content": "\"\"\" Step Scheduler\n\nBasic step LR schedule with warmup, noise.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport math\nimport torch\nfrom typing import List, Tuple, Union\n\n\nfrom .scheduler import Scheduler\n\n\nclass StepLRScheduler(Scheduler):\n    \"\"\"\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            decay_t: float,\n            decay_rate: float = 1.,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            warmup_prefix: bool = True,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: int = 42,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            param_group_field=\"lr\",\n            t_in_epochs=t_in_epochs,\n            noise_range_t=noise_range_t,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize,\n        )\n\n        self.decay_t = decay_t\n        self.decay_rate = decay_rate\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        self.warmup_prefix = warmup_prefix\n        if self.warmup_t:\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n\n    def _get_lr(self, t: int) -> List[float]:\n        if t < self.warmup_t:\n            lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]\n        else:\n            if self.warmup_prefix:\n                t = t - self.warmup_t\n            lrs = [v * (self.decay_rate ** (t // self.decay_t)) for v in self.base_values]\n        return lrs\n"
  },
  {
    "path": "timm/scheduler/tanh_lr.py",
    "content": "\"\"\" TanH Scheduler\n\nTanH schedule with warmup, cycle/restarts, noise.\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport logging\nimport math\nimport numpy as np\nimport torch\nfrom typing import List, Tuple, Union\n\nfrom .scheduler import Scheduler\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass TanhLRScheduler(Scheduler):\n    \"\"\"\n    Hyberbolic-Tangent decay with restarts.\n    This is described in the paper https://arxiv.org/abs/1806.01593\n    \"\"\"\n\n    def __init__(\n            self,\n            optimizer: torch.optim.Optimizer,\n            t_initial: int,\n            lb: float = -7.,\n            ub: float = 3.,\n            lr_min: float = 0.,\n            cycle_mul: float = 1.,\n            cycle_decay: float = 1.,\n            cycle_limit: int = 1,\n            warmup_t: int = 0,\n            warmup_lr_init: float = 0.,\n            warmup_prefix: bool = False,\n            t_in_epochs: bool = True,\n            noise_range_t: Union[List[int], Tuple[int, int], int, None] = None,\n            noise_pct: float = 0.67,\n            noise_std: float = 1.0,\n            noise_seed: int = 42,\n            initialize: bool = True,\n    ) -> None:\n        super().__init__(\n            optimizer,\n            param_group_field=\"lr\",\n            t_in_epochs=t_in_epochs,\n            noise_range_t=noise_range_t,\n            noise_pct=noise_pct,\n            noise_std=noise_std,\n            noise_seed=noise_seed,\n            initialize=initialize,\n        )\n\n        assert t_initial > 0\n        assert lr_min >= 0\n        assert lb < ub\n        assert cycle_limit >= 0\n        assert warmup_t >= 0\n        assert warmup_lr_init >= 0\n        self.lb = lb\n        self.ub = ub\n        self.t_initial = t_initial\n        self.lr_min = lr_min\n        self.cycle_mul = cycle_mul\n        self.cycle_decay = cycle_decay\n        self.cycle_limit = cycle_limit\n        self.warmup_t = warmup_t\n        self.warmup_lr_init = warmup_lr_init\n        self.warmup_prefix = warmup_prefix\n        if self.warmup_t:\n            t_v = self.base_values if self.warmup_prefix else self._get_lr(self.warmup_t)\n            self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in t_v]\n            super().update_groups(self.warmup_lr_init)\n        else:\n            self.warmup_steps = [1 for _ in self.base_values]\n\n    def _get_lr(self, t: int) -> List[float]:\n        if t < self.warmup_t:\n            lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]\n        else:\n            if self.warmup_prefix:\n                t = t - self.warmup_t\n\n            if self.cycle_mul != 1:\n                i = math.floor(math.log(1 - t / self.t_initial * (1 - self.cycle_mul), self.cycle_mul))\n                t_i = self.cycle_mul ** i * self.t_initial\n                t_curr = t - (1 - self.cycle_mul ** i) / (1 - self.cycle_mul) * self.t_initial\n            else:\n                i = t // self.t_initial\n                t_i = self.t_initial\n                t_curr = t - (self.t_initial * i)\n\n            if i < self.cycle_limit:\n                gamma = self.cycle_decay ** i\n                lr_max_values = [v * gamma for v in self.base_values]\n\n                tr = t_curr / t_i\n                lrs = [\n                    self.lr_min + 0.5 * (lr_max - self.lr_min) * (1 - math.tanh(self.lb * (1. - tr) + self.ub * tr))\n                    for lr_max in lr_max_values\n                ]\n            else:\n                lrs = [self.lr_min for _ in self.base_values]\n        return lrs\n\n    def get_cycle_length(self, cycles=0):\n        cycles = max(1, cycles or self.cycle_limit)\n        if self.cycle_mul == 1.0:\n            t = self.t_initial * cycles\n        else:\n            t = int(math.floor(-self.t_initial * (self.cycle_mul ** cycles - 1) / (1 - self.cycle_mul)))\n        return t + self.warmup_t if self.warmup_prefix else t\n"
  },
  {
    "path": "timm/task/__init__.py",
    "content": "\"\"\"Training task abstractions for timm.\n\nThis module provides task-based abstractions for training loops where each task\nencapsulates both the forward pass and loss computation, returning a dictionary\nwith loss components and outputs for logging.\n\"\"\"\nfrom .task import TrainingTask\nfrom .classification import ClassificationTask\nfrom .distillation import DistillationTeacher, LogitDistillationTask, FeatureDistillationTask\nfrom .token_distillation import TokenDistillationTeacher, TokenDistillationTask\n\n__all__ = [\n    'TrainingTask',\n    'ClassificationTask',\n    'DistillationTeacher',\n    'LogitDistillationTask',\n    'FeatureDistillationTask',\n    'TokenDistillationTeacher',\n    'TokenDistillationTask',\n]\n"
  },
  {
    "path": "timm/task/classification.py",
    "content": "\"\"\"Classification training task.\"\"\"\nimport logging\nfrom typing import Callable, Dict, Optional, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom .task import TrainingTask\n\n_logger = logging.getLogger(__name__)\n\n\nclass ClassificationTask(TrainingTask):\n    \"\"\"Standard supervised classification task.\n\n    Simple task that performs a forward pass through the model and computes\n    the classification loss.\n\n    Args:\n        model: The model to train\n        criterion: Loss function (e.g., CrossEntropyLoss)\n        device: Device for task tensors/buffers\n        dtype: Dtype for task tensors/buffers\n        verbose: Enable info logging\n\n    Example:\n        >>> task = ClassificationTask(model, nn.CrossEntropyLoss(), device=torch.device('cuda'))\n        >>> result = task(input, target)\n        >>> result['loss'].backward()\n    \"\"\"\n\n    def __init__(\n            self,\n            model: nn.Module,\n            criterion: Union[nn.Module, Callable],\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n            verbose: bool = True,\n    ):\n        super().__init__(device=device, dtype=dtype, verbose=verbose)\n        self.model = model\n        self.criterion = criterion\n\n        if self.verbose:\n            loss_name = getattr(criterion, '__name__', None) or type(criterion).__name__\n            _logger.info(f\"ClassificationTask: criterion={loss_name}\")\n\n    def prepare_distributed(\n            self,\n            device_ids: Optional[list] = None,\n            **ddp_kwargs\n    ) -> 'ClassificationTask':\n        \"\"\"Prepare task for distributed training.\n\n        Wraps the model in DistributedDataParallel (DDP).\n\n        Args:\n            device_ids: List of device IDs for DDP (e.g., [local_rank])\n            **ddp_kwargs: Additional arguments passed to DistributedDataParallel\n\n        Returns:\n            self (for method chaining)\n        \"\"\"\n        from torch.nn.parallel import DistributedDataParallel as DDP\n        self.model = DDP(self.model, device_ids=device_ids, **ddp_kwargs)\n        return self\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            target: torch.Tensor,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\"Forward pass through model and compute classification loss.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n            target: Target labels [B]\n\n        Returns:\n            Dictionary containing:\n                - 'loss': Classification loss\n                - 'output': Model logits\n        \"\"\"\n        output = self.model(input)\n        loss = self.criterion(output, target)\n\n        return {\n            'loss': loss,\n            'output': output,\n        }\n"
  },
  {
    "path": "timm/task/distillation.py",
    "content": "\"\"\"Knowledge distillation training tasks and components.\"\"\"\nimport logging\nfrom typing import Dict, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.models import create_model\nfrom timm.utils import unwrap_model\n\nfrom .task import TrainingTask\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass DistillationTeacher(nn.Module):\n    \"\"\"Wrapper for a teacher model used in knowledge distillation.\n\n    Creates and manages a pre-trained teacher model for knowledge distillation,\n    handling model creation and normalization differences between teacher and student.\n\n    Can be created from:\n    - A model name string (creates the model internally with pretrained weights)\n    - An existing nn.Module (wraps it with the necessary interface)\n\n    Args:\n        model_name_or_module: Either a model name string or an nn.Module\n        num_classes: Number of output classes (required if model_name_or_module is a string)\n        in_chans: Number of input channels (used if model_name_or_module is a string)\n        pretrained_path: Optional path to pretrained weights (used if model_name_or_module is a string)\n        device: Device to place the model on\n        dtype: Model dtype (uses float32 if None)\n    \"\"\"\n\n    def __init__(\n            self,\n            model_name_or_module: Union[str, nn.Module],\n            num_classes: Optional[int] = None,\n            in_chans: int = 3,\n            pretrained_path: Optional[str] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n    ):\n        super().__init__()\n\n        if isinstance(model_name_or_module, str):\n            _logger.info(f\"Creating KD teacher model: '{model_name_or_module}'\")\n\n            pretrained_kwargs = {'pretrained': True}\n            if pretrained_path:\n                pretrained_kwargs['pretrained_cfg_overlay'] = dict(\n                    file=pretrained_path,\n                    num_classes=num_classes,\n                )\n\n            model = create_model(\n                model_name=model_name_or_module,\n                num_classes=num_classes,\n                in_chans=in_chans,\n                device=device,\n                dtype=dtype,\n                **pretrained_kwargs,\n            )\n        elif isinstance(model_name_or_module, nn.Module):\n            model = model_name_or_module\n        else:\n            raise TypeError(\n                f\"model_name_or_module must be a string or nn.Module, got {type(model_name_or_module).__name__}\"\n            )\n\n        model.eval()\n        self.model = model\n\n        # Get normalization values from pretrained_cfg if available\n        model_unwrapped = unwrap_model(model)\n        if hasattr(model_unwrapped, 'pretrained_cfg'):\n            mean = model_unwrapped.pretrained_cfg.get('mean', (0.485, 0.456, 0.406))\n            std = model_unwrapped.pretrained_cfg.get('std', (0.229, 0.224, 0.225))\n        else:\n            mean = (0.485, 0.456, 0.406)\n            std = (0.229, 0.224, 0.225)\n\n        mean_kd = torch.tensor(mean, device=device, dtype=dtype).view(1, -1, 1, 1)\n        std_kd = torch.tensor(std, device=device, dtype=dtype).view(1, -1, 1, 1)\n        self.register_buffer('mean_kd', mean_kd, persistent=False)\n        self.register_buffer('std_kd', std_kd, persistent=False)\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            return_features: bool = False,\n    ) -> torch.Tensor:\n        \"\"\"Forward pass through teacher model.\n\n        Args:\n            input: Input tensor (should already be normalized for teacher)\n            return_features: Whether to return pooled pre-logits features instead of logits\n\n        Returns:\n            Logits or pooled pre-logits features depending on return_features flag\n        \"\"\"\n        if return_features:\n            if not hasattr(self.model, 'forward_features') or not hasattr(self.model, 'forward_head'):\n                raise ValueError(\n                    f\"Model {self.model.__class__.__name__} does not support feature extraction. \"\n                    \"Ensure the model has 'forward_features' and 'forward_head' methods.\"\n                )\n            feature_map = self.model.forward_features(input)\n            return self.model.forward_head(feature_map, pre_logits=True)\n        else:\n            return self.model(input)\n\n    def normalize_input(\n            self,\n            input: torch.Tensor,\n            student_mean: Optional[torch.Tensor] = None,\n            student_std: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        \"\"\"Normalize input to match teacher's expected normalization.\n\n        Args:\n            input: Input tensor (already normalized for student)\n            student_mean: Student normalization mean buffer [1, 3, 1, 1]\n            student_std: Student normalization std buffer [1, 3, 1, 1]\n\n        Returns:\n            Input tensor normalized for the teacher model\n        \"\"\"\n        if student_mean is None or student_std is None:\n            return input\n        if torch.equal(student_mean, self.mean_kd) and torch.equal(student_std, self.std_kd):\n            return input\n        return (input * student_std + student_mean - self.mean_kd) / self.std_kd\n\n\ndef _resolve_teacher(\n        teacher: Union[str, nn.Module, DistillationTeacher],\n        student_model: nn.Module,\n        pretrained_path: Optional[str],\n        device: Optional[torch.device],\n        dtype: Optional[torch.dtype],\n) -> DistillationTeacher:\n    \"\"\"Resolve teacher input to a DistillationTeacher instance.\n\n    Args:\n        teacher: Model name string, nn.Module, or DistillationTeacher\n        student_model: Student model to infer num_classes/in_chans from\n        pretrained_path: Optional path to teacher pretrained weights\n        device: Device for teacher\n        dtype: Dtype for teacher\n\n    Returns:\n        DistillationTeacher instance\n    \"\"\"\n    if isinstance(teacher, DistillationTeacher):\n        return teacher\n\n    # Get num_classes and in_chans from student\n    student_unwrapped = unwrap_model(student_model)\n    num_classes = student_unwrapped.num_classes\n    in_chans = student_unwrapped.in_chans\n\n    return DistillationTeacher(\n        model_name_or_module=teacher,\n        num_classes=num_classes,\n        in_chans=in_chans,\n        pretrained_path=pretrained_path,\n        device=device,\n        dtype=dtype,\n    )\n\n\nclass LogitDistillationTask(TrainingTask):\n    \"\"\"Logit-based knowledge distillation task.\n\n    Performs distillation by matching student and teacher output logits using\n    KL divergence with temperature scaling.\n\n    Loss weighting supports two modes:\n    1. Independent weights: loss = task_loss_weight * task_loss + distill_loss_weight * distill_loss\n    2. Complementary mode: loss = task_loss_weight * task_loss + (1 - task_loss_weight) * distill_loss\n       (used when only task_loss_weight is specified)\n\n    Args:\n        student_model: Student model to train\n        teacher_model: Teacher model - can be a model name string, nn.Module, or DistillationTeacher\n        criterion: Task loss function (default: CrossEntropyLoss)\n        teacher_pretrained_path: Path to teacher pretrained weights (used when teacher_model is a string)\n        loss_type: Type of distillation loss (currently only 'kl' supported)\n        distill_loss_weight: Weight for distillation loss\n        task_loss_weight: Weight for task loss\n        temperature: Softmax temperature for distillation (typical values: 1-4)\n        device: Device for task tensors/buffers\n        dtype: Dtype for task tensors/buffers\n        verbose: Enable info logging\n\n    Example:\n        >>> # With model name string (num_classes/in_chans inferred from student)\n        >>> task = LogitDistillationTask(\n        ...     student_model=model, teacher_model='resnet50',\n        ...     criterion=nn.CrossEntropyLoss(),\n        ...     task_loss_weight=0.3, temperature=4.0,\n        ...     device=torch.device('cuda'),\n        ... )\n        >>> # With raw model\n        >>> task = LogitDistillationTask(\n        ...     student_model=model, teacher_model=my_teacher_model,\n        ...     criterion=nn.CrossEntropyLoss(),\n        ...     task_loss_weight=0.3, temperature=4.0,\n        ... )\n    \"\"\"\n\n    def __init__(\n            self,\n            student_model: nn.Module,\n            teacher_model: Union[str, nn.Module, DistillationTeacher],\n            criterion: Optional[nn.Module] = None,\n            teacher_pretrained_path: Optional[str] = None,\n            loss_type: str = 'kl',\n            distill_loss_weight: Optional[float] = None,\n            task_loss_weight: Optional[float] = None,\n            temperature: float = 1.0,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n            verbose: bool = True,\n    ):\n        super().__init__(device=device, dtype=dtype, verbose=verbose)\n\n        # Resolve teacher to DistillationTeacher\n        teacher = _resolve_teacher(\n            teacher_model,\n            student_model,\n            teacher_pretrained_path,\n            self.device,\n            self.dtype,\n        )\n\n        self.student = student_model\n        self.teacher = teacher\n        self.criterion = criterion if criterion is not None else nn.CrossEntropyLoss()\n        self.loss_type = loss_type\n        self.temperature = temperature\n\n        if loss_type != 'kl':\n            raise ValueError(f\"Unsupported loss_type '{loss_type}'. Currently only 'kl' is supported.\")\n\n        # Register student normalization values as non-persistent buffers\n        student_unwrapped = unwrap_model(student_model)\n        student_mean = torch.tensor(\n            student_unwrapped.pretrained_cfg['mean'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        student_std = torch.tensor(\n            student_unwrapped.pretrained_cfg['std'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        self.register_buffer('student_mean', student_mean, persistent=False)\n        self.register_buffer('student_std', student_std, persistent=False)\n\n        # Determine weighting mode\n        if distill_loss_weight is not None:\n            # Mode 1: distill_weight specified - independent weights (task defaults to 1.0 if not set)\n            self.distill_loss_weight = distill_loss_weight\n            self.task_loss_weight = task_loss_weight if task_loss_weight is not None else 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"LogitDistillationTask: Independent weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={distill_loss_weight}\"\n                )\n        elif task_loss_weight is not None:\n            # Mode 2: only task_weight specified - complementary mode (distill = 1 - task)\n            self.task_loss_weight = task_loss_weight\n            self.distill_loss_weight = 1.0 - task_loss_weight\n            if self.verbose:\n                _logger.info(\n                    f\"LogitDistillationTask: Complementary mode - \"\n                    f\"task_weight={task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n        else:\n            # Mode 3: neither specified - equal weights (both 1.0)\n            self.distill_loss_weight = 1.0\n            self.task_loss_weight = 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"LogitDistillationTask: Default equal weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n\n        if self.verbose:\n            _logger.info(\n                f\"LogitDistillationTask: loss_type={loss_type}, temperature={temperature}\"\n            )\n\n    def prepare_distributed(\n            self,\n            device_ids: Optional[list] = None,\n            **ddp_kwargs\n    ) -> 'LogitDistillationTask':\n        \"\"\"Prepare task for distributed training.\n\n        Wraps the student model in DistributedDataParallel (DDP) while leaving\n        the frozen teacher model unwrapped.\n\n        Args:\n            device_ids: List of device IDs for DDP (e.g., [local_rank])\n            **ddp_kwargs: Additional arguments passed to DistributedDataParallel\n\n        Returns:\n            self (for method chaining)\n        \"\"\"\n        from torch.nn.parallel import DistributedDataParallel as DDP\n\n        for param in self.teacher.parameters():\n            param.requires_grad = False\n\n        self.student = DDP(self.student, device_ids=device_ids, **ddp_kwargs)\n        return self\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            target: torch.Tensor,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\"Forward pass with logit distillation.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n            target: Target labels [B]\n\n        Returns:\n            Dictionary containing:\n                - 'loss': Combined training loss (task + distillation)\n                - 'output': Student logits (for metrics)\n                - 'task_loss': Classification loss component\n                - 'kd_loss': Logit distillation loss component\n        \"\"\"\n        student_logits = self.student(input)\n        task_loss = self.criterion(student_logits, target)\n\n        with torch.no_grad():\n            input_kd = self.teacher.normalize_input(input, self.student_mean, self.student_std)\n            teacher_logits = self.teacher(input_kd.detach(), return_features=False)\n\n        prob_s = F.log_softmax(student_logits / self.temperature, dim=-1)\n        prob_t = F.log_softmax(teacher_logits / self.temperature, dim=-1)\n        kd_loss = F.kl_div(prob_s, prob_t, reduction='batchmean', log_target=True) * (self.temperature ** 2)\n\n        total_loss = self.task_loss_weight * task_loss + self.distill_loss_weight * kd_loss\n\n        return {\n            'loss': total_loss,\n            'output': student_logits,\n            'task_loss': task_loss,\n            'kd_loss': kd_loss,\n        }\n\n\nclass FeatureDistillationTrainableModule(nn.Module):\n    \"\"\"Trainable module for feature distillation.\n\n    Wraps student model and projection layer into a single module where all\n    trainable forward operations happen inside forward(). This ensures proper\n    DDP wrapping when the module is used with DistributedDataParallel.\n    \"\"\"\n\n    def __init__(\n            self,\n            student_model: nn.Module,\n            projection: Optional[nn.Module] = None,\n    ):\n        \"\"\" Create trainable module wrapper for feature distillation.\n\n        Args:\n            student_model: Student model to train\n            projection: Optional projection layer (Linear layer or None)\n        \"\"\"\n        super().__init__()\n        self.student = student_model\n        self.projection = projection\n\n    def forward(self, input: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:\n        \"\"\"Forward pass through student and projection.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n\n        Returns:\n            Tuple of (student_logits, student_features) where features are\n            optionally projected to match teacher dimension.\n        \"\"\"\n        feature_map = self.student.forward_features(input)\n        student_logits = self.student.forward_head(feature_map)\n        student_features = self.student.forward_head(feature_map, pre_logits=True)\n\n        if self.projection is not None:\n            student_features = self.projection(student_features)\n\n        return student_logits, student_features\n\n\nclass FeatureDistillationTask(TrainingTask):\n    \"\"\"Feature-based knowledge distillation task.\n\n    Performs distillation by matching student and teacher intermediate features\n    (pooled pre-logits) using MSE loss. Automatically creates a projection layer\n    if student and teacher feature dimensions differ.\n\n    Loss weighting supports two modes:\n    1. Independent weights: loss = task_loss_weight * task_loss + distill_loss_weight * distill_loss\n    2. Complementary mode: loss = task_loss_weight * task_loss + (1 - task_loss_weight) * distill_loss\n       (used when only task_loss_weight is specified)\n\n    Args:\n        student_model: Student model to train\n        teacher_model: Teacher model - can be a model name string, nn.Module, or DistillationTeacher\n        criterion: Task loss function (default: CrossEntropyLoss)\n        teacher_pretrained_path: Path to teacher pretrained weights (used when teacher_model is a string)\n        distill_loss_weight: Weight for distillation loss\n        task_loss_weight: Weight for task loss\n        student_feature_dim: Student pre-logits dimension (auto-detected if None)\n        teacher_feature_dim: Teacher pre-logits dimension (auto-detected if None)\n        device: Device for task tensors/buffers\n        dtype: Dtype for task tensors/buffers\n        verbose: Enable info logging\n\n    Example:\n        >>> # With model name string (num_classes/in_chans inferred from student)\n        >>> task = FeatureDistillationTask(\n        ...     student_model=model, teacher_model='resnet50',\n        ...     criterion=nn.CrossEntropyLoss(),\n        ...     distill_loss_weight=5.0, task_loss_weight=1.0,\n        ...     device=torch.device('cuda'),\n        ... )\n    \"\"\"\n\n    def __init__(\n            self,\n            student_model: nn.Module,\n            teacher_model: Union[str, nn.Module, DistillationTeacher],\n            criterion: Optional[nn.Module] = None,\n            teacher_pretrained_path: Optional[str] = None,\n            distill_loss_weight: Optional[float] = None,\n            task_loss_weight: Optional[float] = None,\n            student_feature_dim: Optional[int] = None,\n            teacher_feature_dim: Optional[int] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n            verbose: bool = True,\n    ):\n        super().__init__(device=device, dtype=dtype, verbose=verbose)\n\n        # Resolve teacher to DistillationTeacher\n        teacher = _resolve_teacher(\n            teacher_model,\n            student_model,\n            teacher_pretrained_path,\n            self.device,\n            self.dtype,\n        )\n\n        self.teacher = teacher\n        self.criterion = criterion if criterion is not None else nn.CrossEntropyLoss()\n\n        # Determine weighting mode\n        if distill_loss_weight is not None:\n            # Mode 1: distill_weight specified - independent weights (task defaults to 1.0 if not set)\n            self.distill_loss_weight = distill_loss_weight\n            self.task_loss_weight = task_loss_weight if task_loss_weight is not None else 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"FeatureDistillationTask: Independent weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={distill_loss_weight}\"\n                )\n        elif task_loss_weight is not None:\n            # Mode 2: only task_weight specified - complementary mode (distill = 1 - task)\n            self.task_loss_weight = task_loss_weight\n            self.distill_loss_weight = 1.0 - task_loss_weight\n            if self.verbose:\n                _logger.info(\n                    f\"FeatureDistillationTask: Complementary mode - \"\n                    f\"task_weight={task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n        else:\n            # Mode 3: neither specified - equal weights (both 1.0)\n            self.distill_loss_weight = 1.0\n            self.task_loss_weight = 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"FeatureDistillationTask: Default equal weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n\n        # Auto-detect feature dimensions if not provided\n        if student_feature_dim is None:\n            student_feature_dim = self._detect_feature_dim(student_model)\n        if teacher_feature_dim is None:\n            teacher_feature_dim = self._detect_feature_dim(teacher.model)\n\n        # Create projection layer if dimensions differ\n        projection = None\n        if student_feature_dim != teacher_feature_dim:\n            if self.verbose:\n                _logger.info(\n                    f\"Creating projection layer: {student_feature_dim} -> {teacher_feature_dim}\"\n                )\n            projection = nn.Linear(student_feature_dim, teacher_feature_dim, device=self.device, dtype=self.dtype)\n        else:\n            if self.verbose:\n                _logger.info(\"Feature dimensions match, no projection needed\")\n\n        self.trainable_module = FeatureDistillationTrainableModule(student_model, projection)\n\n        # Register student normalization values\n        student_unwrapped = unwrap_model(student_model)\n        student_mean = torch.tensor(\n            student_unwrapped.pretrained_cfg['mean'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        student_std = torch.tensor(\n            student_unwrapped.pretrained_cfg['std'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        self.register_buffer('student_mean', student_mean, persistent=False)\n        self.register_buffer('student_std', student_std, persistent=False)\n\n        if self.verbose:\n            _logger.info(\n                f\"FeatureDistillationTask: \"\n                f\"student_dim={student_feature_dim}, teacher_dim={teacher_feature_dim}\"\n            )\n\n    @staticmethod\n    def _detect_feature_dim(model: nn.Module) -> int:\n        \"\"\"Auto-detect feature dimension from model.\"\"\"\n        model = unwrap_model(model)\n\n        if hasattr(model, 'head_hidden_size'):\n            return model.head_hidden_size\n        elif hasattr(model, 'num_features'):\n            return model.num_features\n        else:\n            raise ValueError(\n                \"Cannot auto-detect feature dimension. Model must have \"\n                \"'head_hidden_size' or 'num_features' attribute, or you must \"\n                \"specify student_feature_dim and teacher_feature_dim explicitly.\"\n            )\n\n    def prepare_distributed(\n            self,\n            device_ids: Optional[list] = None,\n            **ddp_kwargs,\n    ) -> 'FeatureDistillationTask':\n        \"\"\"Prepare task for distributed training.\n\n        Wraps the trainable module (student + projection) in DistributedDataParallel\n        (DDP) while leaving the frozen teacher model unwrapped.\n\n        Args:\n            device_ids: List of device IDs for DDP (e.g., [local_rank])\n            **ddp_kwargs: Additional arguments passed to DistributedDataParallel\n\n        Returns:\n            self (for method chaining)\n        \"\"\"\n        from torch.nn.parallel import DistributedDataParallel as DDP\n\n        for param in self.teacher.parameters():\n            param.requires_grad = False\n\n        self.trainable_module = DDP(self.trainable_module, device_ids=device_ids, **ddp_kwargs)\n        return self\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            target: torch.Tensor,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\"Forward pass with feature distillation.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n            target: Target labels [B]\n\n        Returns:\n            Dictionary containing:\n                - 'loss': Combined training loss (task + distillation)\n                - 'output': Student logits (for metrics)\n                - 'task_loss': Classification loss component\n                - 'kd_loss': Feature distillation loss component\n        \"\"\"\n        student_logits, student_features = self.trainable_module(input)\n        task_loss = self.criterion(student_logits, target)\n\n        with torch.no_grad():\n            input_kd = self.teacher.normalize_input(input, self.student_mean, self.student_std)\n            teacher_features = self.teacher(input_kd.detach(), return_features=True)\n\n        kd_loss = F.mse_loss(student_features, teacher_features)\n        total_loss = self.task_loss_weight * task_loss + self.distill_loss_weight * kd_loss\n\n        return {\n            'loss': total_loss,\n            'output': student_logits,\n            'task_loss': task_loss,\n            'kd_loss': kd_loss,\n        }\n"
  },
  {
    "path": "timm/task/task.py",
    "content": "\"\"\"Base training task abstraction.\n\nThis module provides the base TrainingTask class that encapsulates a complete\nforward pass including loss computation. Tasks return a dictionary with loss\ncomponents and outputs for logging.\n\"\"\"\nfrom typing import Dict, Optional\n\nimport torch\nimport torch.nn as nn\n\n\nclass TrainingTask(nn.Module):\n    \"\"\"Base class for training tasks.\n\n    A training task encapsulates a complete forward pass including loss computation.\n    Tasks return a dictionary containing the training loss and other components for logging.\n\n    The returned dictionary must contain:\n        - 'loss': The training loss for backward pass (required)\n        - 'output': Model output/logits for metric computation (recommended)\n        - Other task-specific loss components for logging (optional)\n\n    Args:\n        device: Device for task tensors/buffers (defaults to cpu)\n        dtype: Dtype for task tensors/buffers (defaults to torch default)\n        verbose: Enable info logging\n\n    Example:\n        >>> task = SomeTask(model, criterion, device=torch.device('cuda'))\n        >>>\n        >>> # Prepare for distributed training (if needed)\n        >>> if distributed:\n        >>>     task.prepare_distributed(device_ids=[local_rank])\n        >>>\n        >>> # Training loop\n        >>> result = task(input, target)\n        >>> result['loss'].backward()\n    \"\"\"\n\n    def __init__(\n            self,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n            verbose: bool = True,\n    ):\n        super().__init__()\n        self.device = device if device is not None else torch.device('cpu')\n        self.dtype = dtype if dtype is not None else torch.get_default_dtype()\n        self.verbose = verbose\n\n    def to(self, *args, **kwargs):\n        \"\"\"Move task to device/dtype, keeping self.device and self.dtype in sync.\"\"\"\n        dummy = torch.empty(0).to(*args, **kwargs)\n        self.device = dummy.device\n        self.dtype = dummy.dtype\n        return super().to(*args, **kwargs)\n\n    def prepare_distributed(\n            self,\n            device_ids: Optional[list] = None,\n            **ddp_kwargs\n    ) -> 'TrainingTask':\n        \"\"\"Prepare task for distributed training.\n\n        This method wraps trainable components in DistributedDataParallel (DDP)\n        while leaving non-trainable components (like frozen teacher models) unwrapped.\n\n        Should be called after task initialization but before training loop.\n\n        Args:\n            device_ids: List of device IDs for DDP (e.g., [local_rank])\n            **ddp_kwargs: Additional arguments passed to DistributedDataParallel\n\n        Returns:\n            self (for method chaining)\n\n        Example:\n            >>> task = LogitDistillationTask(student, teacher, criterion)\n            >>> task.prepare_distributed(device_ids=[args.local_rank])\n            >>> task = torch.compile(task)  # Compile after DDP\n        \"\"\"\n        # Default implementation - subclasses override if they need DDP\n        return self\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            target: torch.Tensor,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\"Perform forward pass and compute loss.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n            target: Target labels [B]\n\n        Returns:\n            Dictionary with at least 'loss' key containing the training loss\n        \"\"\"\n        raise NotImplementedError\n"
  },
  {
    "path": "timm/task/token_distillation.py",
    "content": "\"\"\"Token-based distillation training task for models with distillation heads.\"\"\"\nimport logging\nfrom typing import Dict, Optional, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom timm.models import create_model\nfrom timm.utils import unwrap_model\n\nfrom .task import TrainingTask\n\n_logger = logging.getLogger(__name__)\n\n\nclass TokenDistillationTeacher(nn.Module):\n    \"\"\"Wrapper for a teacher model used in token-based distillation.\n\n    Creates and manages a pre-trained teacher model for token distillation,\n    handling model creation and normalization differences between teacher and student.\n\n    Can be created from:\n    - A model name string (creates the model internally)\n    - An existing nn.Module (wraps it with the necessary interface)\n\n    Args:\n        model_name_or_module: Either a model name string or an nn.Module\n        num_classes: Number of output classes (required if model_name_or_module is a string)\n        in_chans: Number of input channels (used if model_name_or_module is a string)\n        pretrained_path: Optional path to pretrained weights (used if model_name_or_module is a string)\n        device: Device to place the model on\n        dtype: Model dtype (uses float32 if None)\n    \"\"\"\n\n    def __init__(\n            self,\n            model_name_or_module: Union[str, nn.Module],\n            num_classes: Optional[int] = None,\n            in_chans: int = 3,\n            pretrained_path: Optional[str] = None,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n    ):\n        super().__init__()\n\n        if isinstance(model_name_or_module, str):\n            _logger.info(f\"Creating token distillation teacher model: '{model_name_or_module}'\")\n\n            pretrained_kwargs = {'pretrained': True}\n            if pretrained_path:\n                pretrained_kwargs['pretrained_cfg_overlay'] = dict(\n                    file=pretrained_path,\n                    num_classes=num_classes,\n                )\n\n            model = create_model(\n                model_name=model_name_or_module,\n                num_classes=num_classes,\n                in_chans=in_chans,\n                device=device,\n                dtype=dtype,\n                **pretrained_kwargs,\n            )\n        elif isinstance(model_name_or_module, nn.Module):\n            model = model_name_or_module\n        else:\n            raise TypeError(\n                f\"model_name_or_module must be a string or nn.Module, got {type(model_name_or_module).__name__}\"\n            )\n\n        model.eval()\n        self.model = model\n\n        # Get normalization values from pretrained_cfg if available\n        model_unwrapped = unwrap_model(model)\n        if hasattr(model_unwrapped, 'pretrained_cfg'):\n            mean = model_unwrapped.pretrained_cfg.get('mean', (0.485, 0.456, 0.406))\n            std = model_unwrapped.pretrained_cfg.get('std', (0.229, 0.224, 0.225))\n        else:\n            mean = (0.485, 0.456, 0.406)\n            std = (0.229, 0.224, 0.225)\n\n        mean_kd = torch.tensor(mean, device=device, dtype=dtype).view(1, -1, 1, 1)\n        std_kd = torch.tensor(std, device=device, dtype=dtype).view(1, -1, 1, 1)\n        self.register_buffer('mean_kd', mean_kd, persistent=False)\n        self.register_buffer('std_kd', std_kd, persistent=False)\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        \"\"\"Forward pass through teacher model.\n\n        Args:\n            input: Input tensor (should already be normalized for teacher)\n\n        Returns:\n            Teacher logits\n        \"\"\"\n        return self.model(input)\n\n    def normalize_input(\n            self,\n            input: torch.Tensor,\n            student_mean: Optional[torch.Tensor] = None,\n            student_std: Optional[torch.Tensor] = None,\n    ) -> torch.Tensor:\n        \"\"\"Normalize input to match teacher's expected normalization.\n\n        Args:\n            input: Input tensor (already normalized for student)\n            student_mean: Student normalization mean buffer [1, 3, 1, 1]\n            student_std: Student normalization std buffer [1, 3, 1, 1]\n\n        Returns:\n            Input tensor normalized for the teacher model\n        \"\"\"\n        if student_mean is None or student_std is None:\n            return input\n        if torch.equal(student_mean, self.mean_kd) and torch.equal(student_std, self.std_kd):\n            return input\n        return (input * student_std + student_mean - self.mean_kd) / self.std_kd\n\n\nclass TokenDistillationTask(TrainingTask):\n    \"\"\"Token-based distillation task for models with distillation heads.\n\n    For models like DeiT that have a dedicated distillation token/head that returns\n    a tuple (main_logits, dist_logits) when distilled_training is enabled. The main\n    head is trained against ground truth labels while the distillation head matches\n    teacher outputs.\n\n    Supports two distillation modes:\n    - 'soft': KL divergence with temperature scaling (default)\n    - 'hard': Cross-entropy with teacher's hard predictions (argmax)\n\n    Loss weighting supports two modes:\n    1. Independent weights: loss = task_loss_weight * task_loss + distill_loss_weight * distill_loss\n    2. Complementary mode: loss = task_loss_weight * task_loss + (1 - task_loss_weight) * distill_loss\n       (used when only task_loss_weight is specified)\n\n    Args:\n        student_model: Student model with set_distilled_training() method\n        teacher_model: Teacher model - can be a model name string, nn.Module, or TokenDistillationTeacher\n        criterion: Task loss function for main head (default: CrossEntropyLoss)\n        teacher_pretrained_path: Path to teacher pretrained weights (used when teacher_model is a string)\n        distill_type: 'soft' for KL-div or 'hard' for CE with teacher argmax\n        distill_loss_weight: Weight for distillation loss\n        task_loss_weight: Weight for task loss\n        temperature: Softmax temperature for soft distillation (ignored for hard)\n        device: Device for task tensors/buffers\n        dtype: Dtype for task tensors/buffers\n        verbose: Enable info logging\n\n    Example:\n        >>> # With model name string (num_classes/in_chans inferred from student)\n        >>> task = TokenDistillationTask(\n        ...     student_model=model, teacher_model='deit_base_patch16_224',\n        ...     criterion=nn.CrossEntropyLoss(),\n        ...     distill_type='soft', temperature=3.0, task_loss_weight=0.5,\n        ...     device=torch.device('cuda'),\n        ... )\n        >>> # With raw model\n        >>> task = TokenDistillationTask(\n        ...     student_model=model, teacher_model=my_teacher_model,\n        ...     criterion=nn.CrossEntropyLoss(),\n        ...     distill_type='hard', task_loss_weight=0.5,\n        ... )\n    \"\"\"\n\n    def __init__(\n            self,\n            student_model: nn.Module,\n            teacher_model: Union[str, nn.Module, TokenDistillationTeacher],\n            criterion: Optional[nn.Module] = None,\n            teacher_pretrained_path: Optional[str] = None,\n            distill_type: str = 'soft',\n            distill_loss_weight: Optional[float] = None,\n            task_loss_weight: Optional[float] = None,\n            temperature: float = 1.0,\n            device: Optional[torch.device] = None,\n            dtype: Optional[torch.dtype] = None,\n            verbose: bool = True,\n    ):\n        super().__init__(device=device, dtype=dtype, verbose=verbose)\n\n        # Validate model has set_distilled_training method\n        student_unwrapped = unwrap_model(student_model)\n        if not hasattr(student_unwrapped, 'set_distilled_training'):\n            raise ValueError(\n                f\"Model {student_unwrapped.__class__.__name__} does not have 'set_distilled_training' method. \"\n                \"TokenDistillationTask requires a model with a distillation head (e.g., DeiT distilled variants).\"\n            )\n\n        # Enable distilled training mode\n        student_unwrapped.set_distilled_training(True)\n\n        # Handle different teacher input types\n        if isinstance(teacher_model, TokenDistillationTeacher):\n            teacher = teacher_model\n        elif isinstance(teacher_model, str) or isinstance(teacher_model, nn.Module):\n            # Get num_classes and in_chans from student\n            num_classes = student_unwrapped.num_classes\n            in_chans = student_unwrapped.in_chans\n            teacher = TokenDistillationTeacher(\n                model_name_or_module=teacher_model,\n                num_classes=num_classes,\n                in_chans=in_chans,\n                pretrained_path=teacher_pretrained_path,\n                device=self.device,\n                dtype=self.dtype,\n            )\n        else:\n            raise TypeError(\n                f\"teacher_model must be a model name string, nn.Module, or TokenDistillationTeacher, \"\n                f\"got {type(teacher_model).__name__}\"\n            )\n\n        self.student = student_model\n        self.teacher = teacher\n        self.criterion = criterion if criterion is not None else nn.CrossEntropyLoss()\n        self.distill_type = distill_type\n        self.temperature = temperature\n\n        if distill_type not in ('soft', 'hard'):\n            raise ValueError(f\"Unsupported distill_type '{distill_type}'. Must be 'soft' or 'hard'.\")\n\n        # Register student normalization values as non-persistent buffers\n        student_mean = torch.tensor(\n            student_unwrapped.pretrained_cfg['mean'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        student_std = torch.tensor(\n            student_unwrapped.pretrained_cfg['std'],\n            device=self.device,\n            dtype=self.dtype,\n        ).view(1, -1, 1, 1)\n        self.register_buffer('student_mean', student_mean, persistent=False)\n        self.register_buffer('student_std', student_std, persistent=False)\n\n        # Determine weighting mode\n        if distill_loss_weight is not None:\n            # Mode 1: distill_weight specified - independent weights (task defaults to 1.0 if not set)\n            self.distill_loss_weight = distill_loss_weight\n            self.task_loss_weight = task_loss_weight if task_loss_weight is not None else 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"TokenDistillationTask: Independent weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={distill_loss_weight}\"\n                )\n        elif task_loss_weight is not None:\n            # Mode 2: only task_weight specified - complementary mode (distill = 1 - task)\n            self.task_loss_weight = task_loss_weight\n            self.distill_loss_weight = 1.0 - task_loss_weight\n            if self.verbose:\n                _logger.info(\n                    f\"TokenDistillationTask: Complementary mode - \"\n                    f\"task_weight={task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n        else:\n            # Mode 3: neither specified - equal weights (both 1.0)\n            self.distill_loss_weight = 1.0\n            self.task_loss_weight = 1.0\n            if self.verbose:\n                _logger.info(\n                    f\"TokenDistillationTask: Default equal weights - \"\n                    f\"task_weight={self.task_loss_weight}, distill_weight={self.distill_loss_weight}\"\n                )\n\n        if self.verbose:\n            _logger.info(\n                f\"TokenDistillationTask: distill_type={distill_type}, temperature={temperature}\"\n            )\n\n    def prepare_distributed(\n            self,\n            device_ids: Optional[list] = None,\n            **ddp_kwargs\n    ) -> 'TokenDistillationTask':\n        \"\"\"Prepare task for distributed training.\n\n        Wraps the student model in DistributedDataParallel (DDP) while leaving\n        the frozen teacher model unwrapped.\n\n        Args:\n            device_ids: List of device IDs for DDP (e.g., [local_rank])\n            **ddp_kwargs: Additional arguments passed to DistributedDataParallel\n\n        Returns:\n            self (for method chaining)\n        \"\"\"\n        from torch.nn.parallel import DistributedDataParallel as DDP\n\n        for param in self.teacher.parameters():\n            param.requires_grad = False\n\n        self.student = DDP(self.student, device_ids=device_ids, **ddp_kwargs)\n        return self\n\n    def forward(\n            self,\n            input: torch.Tensor,\n            target: torch.Tensor,\n    ) -> Dict[str, torch.Tensor]:\n        \"\"\"Forward pass with token distillation.\n\n        Args:\n            input: Input tensor [B, C, H, W]\n            target: Target labels [B]\n\n        Returns:\n            Dictionary containing:\n                - 'loss': Combined training loss (task + distillation)\n                - 'output': Main head logits (for metrics)\n                - 'task_loss': Classification loss component\n                - 'distill_loss': Distillation loss component\n        \"\"\"\n        # Student forward pass - returns tuple (main_logits, dist_logits)\n        student_output = self.student(input)\n        main_logits, dist_logits = student_output\n\n        # Compute task loss on main head\n        task_loss = self.criterion(main_logits, target)\n\n        # Teacher forward pass (no gradient)\n        with torch.no_grad():\n            input_kd = self.teacher.normalize_input(input, self.student_mean, self.student_std)\n            teacher_logits = self.teacher(input_kd.detach())\n\n        # Compute distillation loss on distillation head\n        if self.distill_type == 'soft':\n            prob_s = F.log_softmax(dist_logits / self.temperature, dim=-1)\n            prob_t = F.log_softmax(teacher_logits / self.temperature, dim=-1)\n            distill_loss = F.kl_div(prob_s, prob_t, reduction='batchmean', log_target=True) * (self.temperature ** 2)\n        else:\n            teacher_hard = teacher_logits.argmax(dim=-1)\n            distill_loss = F.cross_entropy(dist_logits, teacher_hard)\n\n        total_loss = self.task_loss_weight * task_loss + self.distill_loss_weight * distill_loss\n\n        return {\n            'loss': total_loss,\n            'output': main_logits,\n            'task_loss': task_loss,\n            'distill_loss': distill_loss,\n        }\n"
  },
  {
    "path": "timm/utils/__init__.py",
    "content": "from .agc import adaptive_clip_grad\nfrom .attention_extract import AttentionExtract\nfrom .checkpoint_saver import CheckpointSaver\nfrom .clip_grad import dispatch_clip_grad\nfrom .cuda import ApexScaler, NativeScaler\nfrom .decay_batch import decay_batch_step, check_batch_size_retry\nfrom .distributed import distribute_bn, reduce_tensor, init_distributed_device,\\\n    world_info_from_env, is_distributed_env, is_primary\nfrom .jit import set_jit_legacy, set_jit_fuser\nfrom .log import setup_default_logging, FormatterNoInfo\nfrom .metrics import AverageMeter, accuracy\nfrom .misc import natural_key, add_bool_arg, ParseKwargs\nfrom .model import unwrap_model, get_state_dict, freeze, unfreeze, reparameterize_model\nfrom .model_ema import ModelEma, ModelEmaV2, ModelEmaV3\nfrom .random import random_seed\nfrom .summary import update_summary, get_outdir\n"
  },
  {
    "path": "timm/utils/agc.py",
    "content": "\"\"\" Adaptive Gradient Clipping\n\nAn impl of AGC, as per (https://arxiv.org/abs/2102.06171):\n\n@article{brock2021high,\n  author={Andrew Brock and Soham De and Samuel L. Smith and Karen Simonyan},\n  title={High-Performance Large-Scale Image Recognition Without Normalization},\n  journal={arXiv preprint arXiv:},\n  year={2021}\n}\n\nCode references:\n  * Official JAX impl (paper authors): https://github.com/deepmind/deepmind-research/tree/master/nfnets\n  * Phil Wang's PyTorch gist: https://gist.github.com/lucidrains/0d6560077edac419ab5d3aa29e674d5c\n\nHacked together by / Copyright 2021 Ross Wightman\n\"\"\"\nimport torch\n\n\ndef unitwise_norm(x, norm_type=2.0):\n    if x.ndim <= 1:\n        return x.norm(norm_type)\n    else:\n        # works for nn.ConvNd and nn,Linear where output dim is first in the kernel/weight tensor\n        # might need special cases for other weights (possibly MHA) where this may not be true\n        return x.norm(norm_type, dim=tuple(range(1, x.ndim)), keepdim=True)\n\n\ndef adaptive_clip_grad(parameters, clip_factor=0.01, eps=1e-3, norm_type=2.0):\n    if isinstance(parameters, torch.Tensor):\n        parameters = [parameters]\n    for p in parameters:\n        if p.grad is None:\n            continue\n        p_data = p.detach()\n        g_data = p.grad.detach()\n        max_norm = unitwise_norm(p_data, norm_type=norm_type).clamp_(min=eps).mul_(clip_factor)\n        grad_norm = unitwise_norm(g_data, norm_type=norm_type)\n        clipped_grad = g_data * (max_norm / grad_norm.clamp(min=1e-6))\n        new_grads = torch.where(grad_norm < max_norm, g_data, clipped_grad)\n        p.grad.detach().copy_(new_grads)\n"
  },
  {
    "path": "timm/utils/attention_extract.py",
    "content": "import fnmatch\nimport re\nfrom collections import OrderedDict\nfrom typing import Union, Optional, List\n\nimport torch\n\n\nclass AttentionExtract(torch.nn.Module):\n    # defaults should cover a significant number of timm models with attention maps.\n    default_node_names = ['*attn.softmax']\n    default_module_names = ['*attn_drop']\n\n    def __init__(\n            self,\n            model: Union[torch.nn.Module],\n            names: Optional[List[str]] = None,\n            mode: str = 'eval',\n            method: str = 'fx',\n            hook_type: str = 'forward',\n            use_regex: bool = False,\n    ):\n        \"\"\" Extract attention maps (or other activations) from a model by name.\n\n        Args:\n            model: Instantiated model to extract from.\n            names: List of concrete or wildcard names to extract. Names are nodes for fx and modules for hooks.\n            mode: 'train' or 'eval' model mode.\n            method: 'fx' or 'hook' extraction method.\n            hook_type: 'forward' or 'forward_pre' hooks used.\n            use_regex: Use regex instead of fnmatch\n        \"\"\"\n        super().__init__()\n        assert mode in ('train', 'eval')\n        if mode == 'train':\n            model = model.train()\n        else:\n            model = model.eval()\n\n        assert method in ('fx', 'hook')\n        if method == 'fx':\n            # names are activation node names\n            from timm.models._features_fx import get_graph_node_names, GraphExtractNet\n\n            node_names = get_graph_node_names(model)[0 if mode == 'train' else 1]\n            names = names or self.default_node_names\n            if use_regex:\n                regexes = [re.compile(r) for r in names]\n                matched = [g for g in node_names if any([r.match(g) for r in regexes])]\n            else:\n                matched = [g for g in node_names if any([fnmatch.fnmatch(g, n) for n in names])]\n            if not matched:\n                raise RuntimeError(f'No node names found matching {names}.')\n\n            self.model = GraphExtractNet(model, matched, return_dict=True)\n            self.hooks = None\n        else:\n            # names are module names\n            assert hook_type in ('forward', 'forward_pre')\n            from timm.models._features import FeatureHooks\n\n            module_names = [n for n, m in model.named_modules()]\n            names = names or self.default_module_names\n            if use_regex:\n                regexes = [re.compile(r) for r in names]\n                matched = [m for m in module_names if any([r.match(m) for r in regexes])]\n            else:\n                matched = [m for m in module_names if any([fnmatch.fnmatch(m, n) for n in names])]\n            if not matched:\n                raise RuntimeError(f'No module names found matching {names}.')\n\n            self.model = model\n            self.hooks = FeatureHooks(matched, model.named_modules(), default_hook_type=hook_type)\n\n        self.names = matched\n        self.mode = mode\n        self.method = method\n\n    def forward(self, x):\n        if self.hooks is not None:\n            self.model(x)\n            output = self.hooks.get_output(device=x.device)\n        else:\n            output = self.model(x)\n        return output\n"
  },
  {
    "path": "timm/utils/checkpoint_saver.py",
    "content": "\"\"\" Checkpoint Saver\n\nTrack top-n training checkpoints and maintain recovery checkpoints on specified intervals.\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\nimport glob\nimport logging\nimport operator\nimport os\nimport shutil\n\nimport torch\n\nfrom .model import unwrap_model, get_state_dict\n\n\n_logger = logging.getLogger(__name__)\n\n\nclass CheckpointSaver:\n    def __init__(\n            self,\n            model,\n            optimizer,\n            args=None,\n            model_ema=None,\n            amp_scaler=None,\n            checkpoint_prefix='checkpoint',\n            recovery_prefix='recovery',\n            checkpoint_dir='',\n            recovery_dir='',\n            decreasing=False,\n            max_history=10,\n            unwrap_fn=unwrap_model\n    ):\n\n        # objects to save state_dicts of\n        self.model = model\n        self.optimizer = optimizer\n        self.args = args\n        self.model_ema = model_ema\n        self.amp_scaler = amp_scaler\n\n        # state\n        self.checkpoint_files = []  # (filename, metric) tuples in order of decreasing betterness\n        self.best_epoch = None\n        self.best_metric = None\n        self.curr_recovery_file = ''\n        self.prev_recovery_file = ''\n        self.can_hardlink = True\n\n        # config\n        self.checkpoint_dir = checkpoint_dir\n        self.recovery_dir = recovery_dir\n        self.save_prefix = checkpoint_prefix\n        self.recovery_prefix = recovery_prefix\n        self.extension = '.pth.tar'\n        self.decreasing = decreasing  # a lower metric is better if True\n        self.cmp = operator.lt if decreasing else operator.gt  # True if lhs better than rhs\n        self.max_history = max_history\n        self.unwrap_fn = unwrap_fn\n        assert self.max_history >= 1\n\n    def _replace(self, src, dst):\n        if self.can_hardlink:\n            try:\n                if os.path.exists(dst):\n                    os.unlink(dst)  # required for Windows support.\n            except (OSError, NotImplementedError) as e:\n                self.can_hardlink = False\n        os.replace(src, dst)\n\n    def _duplicate(self, src, dst):\n        if self.can_hardlink:\n            try:\n                if os.path.exists(dst):\n                    # for Windows\n                    os.unlink(dst)\n                os.link(src, dst)\n                return\n            except (OSError, NotImplementedError) as e:\n                self.can_hardlink = False\n        shutil.copy2(src, dst)\n\n    def _save(self, save_path, epoch, metric=None):\n        save_state = {\n            'epoch': epoch,\n            'arch': type(self.model).__name__.lower(),\n            'state_dict': get_state_dict(self.model, self.unwrap_fn),\n            'optimizer': self.optimizer.state_dict(),\n            'version': 2,  # version < 2 increments epoch before save\n        }\n        if self.args is not None:\n            save_state['arch'] = self.args.model\n            save_state['args'] = self.args\n        if self.amp_scaler is not None:\n            save_state[self.amp_scaler.state_dict_key] = self.amp_scaler.state_dict()\n        if self.model_ema is not None:\n            save_state['state_dict_ema'] = get_state_dict(self.model_ema, self.unwrap_fn)\n        if metric is not None:\n            save_state['metric'] = metric\n        torch.save(save_state, save_path)\n\n    def _cleanup_checkpoints(self, trim=0):\n        trim = min(len(self.checkpoint_files), trim)\n        delete_index = self.max_history - trim\n        if delete_index < 0 or len(self.checkpoint_files) <= delete_index:\n            return\n        to_delete = self.checkpoint_files[delete_index:]\n        for d in to_delete:\n            try:\n                _logger.debug(\"Cleaning checkpoint: {}\".format(d))\n                os.remove(d[0])\n            except Exception as e:\n                _logger.error(\"Exception '{}' while deleting checkpoint\".format(e))\n        self.checkpoint_files = self.checkpoint_files[:delete_index]\n\n    def save_checkpoint(self, epoch, metric=None):\n        assert epoch >= 0\n        tmp_save_path = os.path.join(self.checkpoint_dir, 'tmp' + self.extension)\n        last_save_path = os.path.join(self.checkpoint_dir, 'last' + self.extension)\n        self._save(tmp_save_path, epoch, metric)\n        self._replace(tmp_save_path, last_save_path)\n\n        worst_file = self.checkpoint_files[-1] if self.checkpoint_files else None\n        if (\n            len(self.checkpoint_files) < self.max_history\n            or metric is None\n            or self.cmp(metric, worst_file[1])\n        ):\n            if len(self.checkpoint_files) >= self.max_history:\n                self._cleanup_checkpoints(1)\n            filename = '-'.join([self.save_prefix, str(epoch)]) + self.extension\n            save_path = os.path.join(self.checkpoint_dir, filename)\n            self._duplicate(last_save_path, save_path)\n\n            self.checkpoint_files.append((save_path, metric))\n            self.checkpoint_files = sorted(\n                self.checkpoint_files,\n                key=lambda x: x[1],\n                reverse=not self.decreasing  # sort in descending order if a lower metric is not better\n            )\n\n            checkpoints_str = \"Current checkpoints:\\n\"\n            for c in self.checkpoint_files:\n                checkpoints_str += ' {}\\n'.format(c)\n            _logger.info(checkpoints_str)\n\n            if metric is not None and (self.best_metric is None or self.cmp(metric, self.best_metric)):\n                self.best_epoch = epoch\n                self.best_metric = metric\n                best_save_path = os.path.join(self.checkpoint_dir, 'model_best' + self.extension)\n                self._duplicate(last_save_path, best_save_path)\n\n        return (None, None) if self.best_metric is None else (self.best_metric, self.best_epoch)\n\n    def save_recovery(self, epoch, batch_idx=0):\n        assert epoch >= 0\n        tmp_save_path = os.path.join(self.recovery_dir, 'recovery_tmp' + self.extension)\n        self._save(tmp_save_path, epoch)\n\n        filename = '-'.join([self.recovery_prefix, str(epoch), str(batch_idx)]) + self.extension\n        save_path = os.path.join(self.recovery_dir, filename)\n        self._replace(tmp_save_path, save_path)\n\n        if os.path.exists(self.prev_recovery_file):\n            try:\n                _logger.debug(\"Cleaning recovery: {}\".format(self.prev_recovery_file))\n                os.remove(self.prev_recovery_file)\n            except Exception as e:\n                _logger.error(\"Exception '{}' while removing {}\".format(e, self.prev_recovery_file))\n        self.prev_recovery_file = self.curr_recovery_file\n        self.curr_recovery_file = save_path\n\n    def find_recovery(self):\n        recovery_path = os.path.join(self.recovery_dir, self.recovery_prefix)\n        files = glob.glob(recovery_path + '*' + self.extension)\n        files = sorted(files)\n        return files[0] if len(files) else ''\n"
  },
  {
    "path": "timm/utils/clip_grad.py",
    "content": "import torch\n\nfrom timm.utils.agc import adaptive_clip_grad\n\n\ndef dispatch_clip_grad(parameters, value: float, mode: str = 'norm', norm_type: float = 2.0):\n    \"\"\" Dispatch to gradient clipping method\n\n    Args:\n        parameters (Iterable): model parameters to clip\n        value (float): clipping value/factor/norm, mode dependant\n        mode (str): clipping mode, one of 'norm', 'value', 'agc'\n        norm_type (float): p-norm, default 2.0\n    \"\"\"\n    if mode == 'norm':\n        torch.nn.utils.clip_grad_norm_(parameters, value, norm_type=norm_type)\n    elif mode == 'value':\n        torch.nn.utils.clip_grad_value_(parameters, value)\n    elif mode == 'agc':\n        adaptive_clip_grad(parameters, value, norm_type=norm_type)\n    else:\n        assert False, f\"Unknown clip mode ({mode}).\"\n\n"
  },
  {
    "path": "timm/utils/cuda.py",
    "content": "\"\"\" CUDA / AMP utils\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport torch\n\ntry:\n    from apex import amp\n    has_apex = True\nexcept ImportError:\n    amp = None\n    has_apex = False\n\nfrom .clip_grad import dispatch_clip_grad\n\n\nclass ApexScaler:\n    state_dict_key = \"amp\"\n\n    def __call__(\n            self,\n            loss,\n            optimizer,\n            clip_grad=None,\n            clip_mode='norm',\n            parameters=None,\n            create_graph=False,\n            need_update=True,\n    ):\n        with amp.scale_loss(loss, optimizer) as scaled_loss:\n            scaled_loss.backward(create_graph=create_graph)\n        if need_update:\n            if clip_grad is not None:\n                dispatch_clip_grad(amp.master_params(optimizer), clip_grad, mode=clip_mode)\n            optimizer.step()\n\n    def state_dict(self):\n        if 'state_dict' in amp.__dict__:\n            return amp.state_dict()\n\n    def load_state_dict(self, state_dict):\n        if 'load_state_dict' in amp.__dict__:\n            amp.load_state_dict(state_dict)\n\n\nclass NativeScaler:\n    state_dict_key = \"amp_scaler\"\n\n    def __init__(self, device='cuda'):\n        try:\n            self._scaler = torch.amp.GradScaler(device=device)\n        except (AttributeError, TypeError) as e:\n            self._scaler = torch.cuda.amp.GradScaler()\n\n    def __call__(\n            self,\n            loss,\n            optimizer,\n            clip_grad=None,\n            clip_mode='norm',\n            parameters=None,\n            create_graph=False,\n            need_update=True,\n    ):\n        self._scaler.scale(loss).backward(create_graph=create_graph)\n        if need_update:\n            if clip_grad is not None:\n                assert parameters is not None\n                self._scaler.unscale_(optimizer)  # unscale the gradients of optimizer's assigned params in-place\n                dispatch_clip_grad(parameters, clip_grad, mode=clip_mode)\n            self._scaler.step(optimizer)\n            self._scaler.update()\n\n    def state_dict(self):\n        return self._scaler.state_dict()\n\n    def load_state_dict(self, state_dict):\n        self._scaler.load_state_dict(state_dict)\n"
  },
  {
    "path": "timm/utils/decay_batch.py",
    "content": "\"\"\" Batch size decay and retry helpers.\n\nCopyright 2022 Ross Wightman\n\"\"\"\nimport math\n\n\ndef decay_batch_step(batch_size, num_intra_steps=2, no_odd=False):\n    \"\"\" power of two batch-size decay with intra steps\n\n    Decay by stepping between powers of 2:\n    * determine power-of-2 floor of current batch size (base batch size)\n    * divide above value by num_intra_steps to determine step size\n    * floor batch_size to nearest multiple of step_size (from base batch size)\n    Examples:\n     num_steps == 4 --> 64, 56, 48, 40, 32, 28, 24, 20, 16, 14, 12, 10, 8, 7, 6, 5, 4, 3, 2, 1\n     num_steps (no_odd=True) == 4 --> 64, 56, 48, 40, 32, 28, 24, 20, 16, 14, 12, 10, 8, 6, 4, 2\n     num_steps == 2 --> 64, 48, 32, 24, 16, 12, 8, 6, 4, 3, 2, 1\n     num_steps == 1 --> 64, 32, 16, 8, 4, 2, 1\n    \"\"\"\n    if batch_size <= 1:\n        # return 0 for stopping value so easy to use in loop\n        return 0\n    base_batch_size = int(2 ** (math.log(batch_size - 1) // math.log(2)))\n    step_size = max(base_batch_size // num_intra_steps, 1)\n    batch_size = base_batch_size + ((batch_size - base_batch_size - 1) // step_size) * step_size\n    if no_odd and batch_size % 2:\n        batch_size -= 1\n    return batch_size\n\n\ndef check_batch_size_retry(error_str):\n    \"\"\" check failure error string for conditions where batch decay retry should not be attempted\n    \"\"\"\n    error_str = error_str.lower()\n    if 'required rank' in error_str:\n        # Errors involving phrase 'required rank' typically happen when a conv is used that's\n        # not compatible with channels_last memory format.\n        return False\n    if 'illegal' in error_str:\n        # 'Illegal memory access' errors in CUDA typically leave process in unusable state\n        return False\n    return True\n"
  },
  {
    "path": "timm/utils/distributed.py",
    "content": "\"\"\" Distributed training/validation utils\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nimport os\nfrom typing import Optional\n\nimport torch\nfrom torch import distributed as dist\n\nfrom .model import unwrap_model\n\n_logger = logging.getLogger(__name__)\n\n\ndef reduce_tensor(tensor, n):\n    rt = tensor.clone()\n    dist.all_reduce(rt, op=dist.ReduceOp.SUM)\n    rt /= n\n    return rt\n\n\ndef distribute_bn(model, world_size, reduce=False):\n    # ensure every node has the same running bn stats\n    for bn_name, bn_buf in unwrap_model(model).named_buffers(recurse=True):\n        if ('running_mean' in bn_name) or ('running_var' in bn_name):\n            if reduce:\n                # average bn stats across whole group\n                torch.distributed.all_reduce(bn_buf, op=dist.ReduceOp.SUM)\n                bn_buf /= float(world_size)\n            else:\n                # broadcast bn stats from rank 0 to whole group\n                torch.distributed.broadcast(bn_buf, 0)\n\n\ndef is_global_primary(args):\n    return args.rank == 0\n\n\ndef is_local_primary(args):\n    return args.local_rank == 0\n\n\ndef is_primary(args, local=False):\n    return is_local_primary(args) if local else is_global_primary(args)\n\n\ndef is_distributed_env():\n    if 'WORLD_SIZE' in os.environ:\n        return int(os.environ['WORLD_SIZE']) > 1\n    if 'SLURM_NTASKS' in os.environ:\n        return int(os.environ['SLURM_NTASKS']) > 1\n    return False\n\n\ndef world_info_from_env():\n    local_rank = 0\n    for v in ('LOCAL_RANK', 'MPI_LOCALRANKID', 'SLURM_LOCALID', 'OMPI_COMM_WORLD_LOCAL_RANK'):\n        if v in os.environ:\n            local_rank = int(os.environ[v])\n            break\n\n    global_rank = 0\n    for v in ('RANK', 'PMI_RANK', 'SLURM_PROCID', 'OMPI_COMM_WORLD_RANK'):\n        if v in os.environ:\n            global_rank = int(os.environ[v])\n            break\n\n    world_size = 1\n    for v in ('WORLD_SIZE', 'PMI_SIZE', 'SLURM_NTASKS', 'OMPI_COMM_WORLD_SIZE'):\n        if v in os.environ:\n            world_size = int(os.environ[v])\n            break\n\n    return local_rank, global_rank, world_size\n\n\ndef init_distributed_device(args):\n    # Distributed training = training on more than one GPU.\n    # Works in both single and multi-node scenarios.\n    args.distributed = False\n    args.world_size = 1\n    args.rank = 0  # global rank\n    args.local_rank = 0\n    result = init_distributed_device_so(\n        device=getattr(args, 'device', 'cuda'),\n        dist_backend=getattr(args, 'dist_backend', None),\n        dist_url=getattr(args, 'dist_url', None),\n    )\n    args.device = result['device']\n    args.world_size = result['world_size']\n    args.rank = result['global_rank']\n    args.local_rank = result['local_rank']\n    args.distributed = result['distributed']\n    device = torch.device(args.device)\n    return device\n\n\ndef init_distributed_device_so(\n        device: str = 'cuda',\n        dist_backend: Optional[str] = None,\n        dist_url: Optional[str] = None,\n):\n    # Distributed training = training on more than one GPU.\n    # Works in both single and multi-node scenarios.\n    distributed = False\n    world_size = 1\n    global_rank = 0\n    local_rank = 0\n    device_type, *device_idx = device.split(':', maxsplit=1)\n\n    if dist_backend is None:\n        # FIXME: verify that ROCm transform nccl to rccl\n        dist_backends = {\n            \"xpu\": \"ccl\",\n            \"hpu\": \"hccl\",\n            \"cuda\": \"nccl\",\n            \"npu\": \"hccl\",\n        }\n        dist_backend = dist_backends.get(device_type, 'gloo')\n    dist_url = dist_url or 'env://'\n\n    # TBD, support horovod?\n    # if args.horovod:\n    #     import horovod.torch as hvd\n    #     assert hvd is not None, \"Horovod is not installed\"\n    #     hvd.init()\n    #     args.local_rank = int(hvd.local_rank())\n    #     args.rank = hvd.rank()\n    #     args.world_size = hvd.size()\n    #     args.distributed = True\n    #     os.environ['LOCAL_RANK'] = str(args.local_rank)\n    #     os.environ['RANK'] = str(args.rank)\n    #     os.environ['WORLD_SIZE'] = str(args.world_size)\n    if is_distributed_env():\n        if 'SLURM_PROCID' in os.environ:\n            # DDP via SLURM\n            local_rank, global_rank, world_size = world_info_from_env()\n            # SLURM var -> torch.distributed vars in case needed\n            os.environ['LOCAL_RANK'] = str(local_rank)\n            os.environ['RANK'] = str(global_rank)\n            os.environ['WORLD_SIZE'] = str(world_size)\n            torch.distributed.init_process_group(\n                backend=dist_backend,\n                init_method=dist_url,\n                world_size=world_size,\n                rank=global_rank,\n            )\n        else:\n            # DDP via torchrun, torch.distributed.launch\n            local_rank, _, _ = world_info_from_env()\n            torch.distributed.init_process_group(\n                backend=dist_backend,\n                init_method=dist_url,\n            )\n            world_size = torch.distributed.get_world_size()\n            global_rank = torch.distributed.get_rank()\n        distributed = True\n\n    if device_type == 'cuda':\n        assert torch.cuda.is_available(), f'CUDA is not available but {device} was specified.'\n    if device_type == 'npu':\n        assert torch.npu.is_available(), f'Ascend NPU is not available but {device} was specified.'\n\n    if distributed and device != 'cpu':\n        # Ignore manually specified device index in distributed mode and\n        # override with resolved local rank, fewer headaches in most setups.\n        if device_idx:\n            _logger.warning(f'device index {device_idx[0]} removed from specified ({device}).')\n        device = f'{device_type}:{local_rank}'\n\n    if device.startswith('cuda:'):\n        torch.cuda.set_device(device)\n\n    return dict(\n        device=device,\n        global_rank=global_rank,\n        local_rank=local_rank,\n        world_size=world_size,\n        distributed=distributed,\n    )\n"
  },
  {
    "path": "timm/utils/jit.py",
    "content": "\"\"\" JIT scripting/tracing utils\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport os\n\nimport torch\n\n\ndef set_jit_legacy():\n    \"\"\" Set JIT executor to legacy w/ support for op fusion\n    This is hopefully a temporary need in 1.5/1.5.1/1.6 to restore performance due to changes\n    in the JIT executor. These API are not supported so could change.\n    \"\"\"\n    #\n    assert hasattr(torch._C, '_jit_set_profiling_executor'), \"Old JIT behavior doesn't exist!\"\n    torch._C._jit_set_profiling_executor(False)\n    torch._C._jit_set_profiling_mode(False)\n    torch._C._jit_override_can_fuse_on_gpu(True)\n    #torch._C._jit_set_texpr_fuser_enabled(True)\n\n\ndef set_jit_fuser(fuser):\n    if fuser == \"te\":\n        # default fuser should be == 'te'\n        torch._C._jit_set_profiling_executor(True)\n        torch._C._jit_set_profiling_mode(True)\n        torch._C._jit_override_can_fuse_on_cpu(False)\n        torch._C._jit_override_can_fuse_on_gpu(True)\n        torch._C._jit_set_texpr_fuser_enabled(True)\n        try:\n            torch._C._jit_set_nvfuser_enabled(False)\n        except Exception:\n            pass\n    elif fuser == \"old\" or fuser == \"legacy\":\n        torch._C._jit_set_profiling_executor(False)\n        torch._C._jit_set_profiling_mode(False)\n        torch._C._jit_override_can_fuse_on_gpu(True)\n        torch._C._jit_set_texpr_fuser_enabled(False)\n        try:\n            torch._C._jit_set_nvfuser_enabled(False)\n        except Exception:\n            pass\n    elif fuser == \"nvfuser\" or fuser == \"nvf\":\n        os.environ['PYTORCH_NVFUSER_DISABLE_FALLBACK'] = '1'\n        #os.environ['PYTORCH_NVFUSER_DISABLE_FMA'] = '1'\n        #os.environ['PYTORCH_NVFUSER_JIT_OPT_LEVEL'] = '0'\n        torch._C._jit_set_texpr_fuser_enabled(False)\n        torch._C._jit_set_profiling_executor(True)\n        torch._C._jit_set_profiling_mode(True)\n        torch._C._jit_can_fuse_on_cpu()\n        torch._C._jit_can_fuse_on_gpu()\n        torch._C._jit_override_can_fuse_on_cpu(False)\n        torch._C._jit_override_can_fuse_on_gpu(False)\n        torch._C._jit_set_nvfuser_guard_mode(True)\n        torch._C._jit_set_nvfuser_enabled(True)\n    else:\n        assert False, f\"Invalid jit fuser ({fuser})\"\n"
  },
  {
    "path": "timm/utils/log.py",
    "content": "\"\"\" Logging helpers\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nimport logging.handlers\n\n\nclass FormatterNoInfo(logging.Formatter):\n    def __init__(self, fmt='%(levelname)s: %(message)s'):\n        logging.Formatter.__init__(self, fmt)\n\n    def format(self, record):\n        if record.levelno == logging.INFO:\n            return str(record.getMessage())\n        return logging.Formatter.format(self, record)\n\n\ndef setup_default_logging(default_level=logging.INFO, log_path=''):\n    console_handler = logging.StreamHandler()\n    console_handler.setFormatter(FormatterNoInfo())\n    logging.root.addHandler(console_handler)\n    logging.root.setLevel(default_level)\n    if log_path:\n        file_handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=(1024 ** 2 * 2), backupCount=3)\n        file_formatter = logging.Formatter(\"%(asctime)s - %(name)20s: [%(levelname)8s] - %(message)s\")\n        file_handler.setFormatter(file_formatter)\n        logging.root.addHandler(file_handler)\n"
  },
  {
    "path": "timm/utils/metrics.py",
    "content": "\"\"\" Eval metrics and related\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\n\n\nclass AverageMeter:\n    \"\"\"Computes and stores the average and current value\"\"\"\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = self.sum / self.count\n\n\ndef accuracy(output, target, topk=(1,)):\n    \"\"\"Computes the accuracy over the k top predictions for the specified values of k\"\"\"\n    maxk = min(max(topk), output.size()[1])\n    batch_size = target.size(0)\n    _, pred = output.topk(maxk, 1, True, True)\n    pred = pred.t()\n    correct = pred.eq(target.reshape(1, -1).expand_as(pred))\n    return [correct[:min(k, maxk)].reshape(-1).float().sum(0) * 100. / batch_size for k in topk]\n"
  },
  {
    "path": "timm/utils/misc.py",
    "content": "\"\"\" Misc utils\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport argparse\nimport ast\nimport re\n\n\ndef natural_key(string_):\n    \"\"\"See http://www.codinghorror.com/blog/archives/001018.html\"\"\"\n    return [int(s) if s.isdigit() else s for s in re.split(r'(\\d+)', string_.lower())]\n\n\ndef add_bool_arg(parser, name, default=False, help=''):\n    dest_name = name.replace('-', '_')\n    group = parser.add_mutually_exclusive_group(required=False)\n    group.add_argument('--' + name, dest=dest_name, action='store_true', help=help)\n    group.add_argument('--no-' + name, dest=dest_name, action='store_false', help=help)\n    parser.set_defaults(**{dest_name: default})\n\n\nclass ParseKwargs(argparse.Action):\n    def __call__(self, parser, namespace, values, option_string=None):\n        kw = {}\n        for value in values:\n            key, value = value.split('=')\n            try:\n                kw[key] = ast.literal_eval(value)\n            except ValueError:\n                kw[key] = str(value)  # fallback to string (avoid need to escape on command line)\n        setattr(namespace, self.dest, kw)\n"
  },
  {
    "path": "timm/utils/model.py",
    "content": "\"\"\" Model / state_dict utils\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport fnmatch\nfrom copy import deepcopy\n\nimport torch\nfrom torchvision.ops.misc import FrozenBatchNorm2d\n\nfrom timm.layers import BatchNormAct2d, SyncBatchNormAct, FrozenBatchNormAct2d,\\\n    freeze_batch_norm_2d, unfreeze_batch_norm_2d\nfrom .model_ema import ModelEma\n\n\ndef unwrap_model(model):\n    if isinstance(model, ModelEma):\n        return unwrap_model(model.ema)\n    else:\n        if hasattr(model, 'module'):\n            return unwrap_model(model.module)\n        elif hasattr(model, '_orig_mod'):\n            return unwrap_model(model._orig_mod)\n        else:\n            return model\n\n\ndef get_state_dict(model, unwrap_fn=unwrap_model):\n    return unwrap_fn(model).state_dict()\n\n\ndef avg_sq_ch_mean(model, input, output):\n    \"\"\" calculate average channel square mean of output activations\n    \"\"\"\n    return torch.mean(output.mean(axis=[0, 2, 3]) ** 2).item()\n\n\ndef avg_ch_var(model, input, output):\n    \"\"\" calculate average channel variance of output activations\n    \"\"\"\n    return torch.mean(output.var(axis=[0, 2, 3])).item()\n\n\ndef avg_ch_var_residual(model, input, output):\n    \"\"\" calculate average channel variance of output activations\n    \"\"\"\n    return torch.mean(output.var(axis=[0, 2, 3])).item()\n\n\nclass ActivationStatsHook:\n    \"\"\"Iterates through each of `model`'s modules and matches modules using unix pattern \n    matching based on `hook_fn_locs` and registers `hook_fn` to the module if there is \n    a match. \n\n    Arguments:\n        model (nn.Module): model from which we will extract the activation stats\n        hook_fn_locs (List[str]): List of `hook_fn` locations based on Unix type string \n            matching with the name of model's modules. \n        hook_fns (List[Callable]): List of hook functions to be registered at every\n            module in `layer_names`.\n    \n    Inspiration from https://docs.fast.ai/callback.hook.html.\n\n    Refer to https://gist.github.com/amaarora/6e56942fcb46e67ba203f3009b30d950 for an example \n    on how to plot Signal Propagation Plots using `ActivationStatsHook`.\n    \"\"\"\n\n    def __init__(self, model, hook_fn_locs, hook_fns):\n        self.model = model\n        self.hook_fn_locs = hook_fn_locs\n        self.hook_fns = hook_fns\n        if len(hook_fn_locs) != len(hook_fns):\n            raise ValueError(\"Please provide `hook_fns` for each `hook_fn_locs`, \\\n                their lengths are different.\")\n        self.stats = dict((hook_fn.__name__, []) for hook_fn in hook_fns)\n        for hook_fn_loc, hook_fn in zip(hook_fn_locs, hook_fns):\n            self.register_hook(hook_fn_loc, hook_fn)\n\n    def _create_hook(self, hook_fn):\n        def append_activation_stats(module, input, output):\n            out = hook_fn(module, input, output)\n            self.stats[hook_fn.__name__].append(out)\n\n        return append_activation_stats\n\n    def register_hook(self, hook_fn_loc, hook_fn):\n        for name, module in self.model.named_modules():\n            if not fnmatch.fnmatch(name, hook_fn_loc):\n                continue\n            module.register_forward_hook(self._create_hook(hook_fn))\n\n\ndef extract_spp_stats(\n        model,\n        hook_fn_locs,\n        hook_fns,\n        input_shape=[8, 3, 224, 224]):\n    \"\"\"Extract average square channel mean and variance of activations during \n        forward pass to plot Signal Propagation Plots (SPP).\n    \n    Paper: https://arxiv.org/abs/2101.08692\n\n    Example Usage: https://gist.github.com/amaarora/6e56942fcb46e67ba203f3009b30d950\n    \"\"\"\n    x = torch.normal(0., 1., input_shape)\n    hook = ActivationStatsHook(model, hook_fn_locs=hook_fn_locs, hook_fns=hook_fns)\n    _ = model(x)\n    return hook.stats\n\n\ndef _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, mode='freeze'):\n    \"\"\"\n    Freeze or unfreeze parameters of the specified modules and those of all their hierarchical descendants. This is\n        done in place.\n\n    Args:\n        root_module (nn.Module, optional): Root module relative to which the `submodules` are referenced.\n        submodules (list[str]): List of modules for which the parameters will be (un)frozen. They are to be provided as\n            named modules relative to the root module (accessible via `root_module.named_modules()`). An empty list\n            means that the whole root module will be (un)frozen. Defaults to []\n        include_bn_running_stats (bool): Whether to also (un)freeze the running statistics of batch norm 2d layers.\n            Defaults to `True`.\n        mode (bool): Whether to freeze (\"freeze\") or unfreeze (\"unfreeze\"). Defaults to `\"freeze\"`.\n    \"\"\"\n    assert mode in [\"freeze\", \"unfreeze\"], '`mode` must be one of \"freeze\" or \"unfreeze\"'\n\n    if isinstance(root_module, (\n            torch.nn.modules.batchnorm.BatchNorm2d,\n            torch.nn.modules.batchnorm.SyncBatchNorm,\n            BatchNormAct2d,\n            SyncBatchNormAct,\n    )):\n        # Raise assertion here because we can't convert it in place\n        raise AssertionError(\n            \"You have provided a batch norm layer as the `root module`. Please use \"\n            \"`timm.utils.model.freeze_batch_norm_2d` or `timm.utils.model.unfreeze_batch_norm_2d` instead.\")\n\n    if isinstance(submodules, str):\n        submodules = [submodules]\n\n    named_modules = submodules\n    submodules = [root_module.get_submodule(m) for m in submodules]\n\n    if not len(submodules):\n        named_modules, submodules = list(zip(*root_module.named_children()))\n\n    for n, m in zip(named_modules, submodules):\n        # (Un)freeze parameters\n        for p in m.parameters():\n            p.requires_grad = False if mode == 'freeze' else True\n        if include_bn_running_stats:\n            # Helper to add submodule specified as a named_module\n            def _add_submodule(module, name, submodule):\n                split = name.rsplit('.', 1)\n                if len(split) > 1:\n                    module.get_submodule(split[0]).add_module(split[1], submodule)\n                else:\n                    module.add_module(name, submodule)\n\n            # Freeze batch norm\n            if mode == 'freeze':\n                res = freeze_batch_norm_2d(m)\n                # It's possible that `m` is a type of BatchNorm in itself, in which case `unfreeze_batch_norm_2d` won't\n                # convert it in place, but will return the converted result. In this case `res` holds the converted\n                # result and we may try to re-assign the named module\n                if isinstance(m, (\n                        torch.nn.modules.batchnorm.BatchNorm2d,\n                        torch.nn.modules.batchnorm.SyncBatchNorm,\n                        BatchNormAct2d,\n                        SyncBatchNormAct,\n                )):\n                    _add_submodule(root_module, n, res)\n            # Unfreeze batch norm\n            else:\n                res = unfreeze_batch_norm_2d(m)\n                # Ditto. See note above in mode == 'freeze' branch\n                if isinstance(m, (FrozenBatchNorm2d, FrozenBatchNormAct2d)):\n                    _add_submodule(root_module, n, res)\n\n\ndef freeze(root_module, submodules=[], include_bn_running_stats=True):\n    \"\"\"\n    Freeze parameters of the specified modules and those of all their hierarchical descendants. This is done in place.\n\n    Args:\n        root_module (nn.Module): Root module relative to which `submodules` are referenced.\n        submodules (list[str]): List of modules for which the parameters will be frozen. They are to be provided as\n            named modules relative to the root module (accessible via `root_module.named_modules()`). An empty list\n            means that the whole root module will be frozen. Defaults to `[]`.\n        include_bn_running_stats (bool): Whether to also freeze the running statistics of `BatchNorm2d` and\n            `SyncBatchNorm` layers. These will be converted to `FrozenBatchNorm2d` in place. Hint: During fine tuning,\n            it's good practice to freeze batch norm stats. And note that these are different to the affine parameters\n            which are just normal PyTorch parameters. Defaults to `True`.\n\n    Hint: If you want to freeze batch norm ONLY, use `timm.utils.model.freeze_batch_norm_2d`.\n\n    Examples::\n\n        >>> model = timm.create_model('resnet18')\n        >>> # Freeze up to and including layer2\n        >>> submodules = [n for n, _ in model.named_children()]\n        >>> print(submodules)\n        ['conv1', 'bn1', 'act1', 'maxpool', 'layer1', 'layer2', 'layer3', 'layer4', 'global_pool', 'fc']\n        >>> freeze(model, submodules[:submodules.index('layer2') + 1])\n        >>> # Check for yourself that it works as expected\n        >>> print(model.layer2[0].conv1.weight.requires_grad)\n        False\n        >>> print(model.layer3[0].conv1.weight.requires_grad)\n        True\n        >>> # Unfreeze\n        >>> unfreeze(model)\n    \"\"\"\n    _freeze_unfreeze(root_module, submodules, include_bn_running_stats=include_bn_running_stats, mode=\"freeze\")\n\n\ndef unfreeze(root_module, submodules=[], include_bn_running_stats=True):\n    \"\"\"\n    Unfreeze parameters of the specified modules and those of all their hierarchical descendants. This is done in place.\n\n    Args:\n        root_module (nn.Module): Root module relative to which `submodules` are referenced.\n        submodules (list[str]): List of submodules for which the parameters will be (un)frozen. They are to be provided\n            as named modules relative to the root module (accessible via `root_module.named_modules()`). An empty\n            list means that the whole root module will be unfrozen. Defaults to `[]`.\n        include_bn_running_stats (bool): Whether to also unfreeze the running statistics of `FrozenBatchNorm2d` layers.\n            These will be converted to `BatchNorm2d` in place. Defaults to `True`.\n\n    See example in docstring for `freeze`.\n    \"\"\"\n    _freeze_unfreeze(root_module, submodules, include_bn_running_stats=include_bn_running_stats, mode=\"unfreeze\")\n\n\ndef reparameterize_model(model: torch.nn.Module, inplace=False) -> torch.nn.Module:\n    if not inplace:\n        model = deepcopy(model)\n\n    def _fuse(m):\n        for child_name, child in m.named_children():\n            if hasattr(child, 'fuse'):\n                setattr(m, child_name, child.fuse())\n            elif hasattr(child, \"reparameterize\"):\n                child.reparameterize()\n            elif hasattr(child, \"switch_to_deploy\"):\n                child.switch_to_deploy()\n            _fuse(child)\n\n    _fuse(model)\n    return model\n"
  },
  {
    "path": "timm/utils/model_ema.py",
    "content": "\"\"\" Exponential Moving Average (EMA) of model updates\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport logging\nfrom collections import OrderedDict\nfrom copy import deepcopy\nfrom typing import Optional\n\nimport torch\nimport torch.nn as nn\n\n_logger = logging.getLogger(__name__)\n\n\nclass ModelEma:\n    \"\"\" Model Exponential Moving Average (DEPRECATED)\n\n    Keep a moving average of everything in the model state_dict (parameters and buffers).\n    This version is deprecated, it does not work with scripted models. Will be removed eventually.\n\n    This is intended to allow functionality like\n    https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage\n\n    A smoothed version of the weights is necessary for some training schemes to perform well.\n    E.g. Google's hyper-params for training MNASNet, MobileNet-V3, EfficientNet, etc that use\n    RMSprop with a short 2.4-3 epoch decay period and slow LR decay rate of .96-.99 requires EMA\n    smoothing of weights to match results. Pay attention to the decay constant you are using\n    relative to your update count per epoch.\n\n    To keep EMA from using GPU resources, set device='cpu'. This will save a bit of memory but\n    disable validation of the EMA weights. Validation will have to be done manually in a separate\n    process, or after the training stops converging.\n\n    This class is sensitive where it is initialized in the sequence of model init,\n    GPU assignment and distributed training wrappers.\n    \"\"\"\n    def __init__(self, model, decay=0.9999, device='', resume=''):\n        # make a copy of the model for accumulating moving average of weights\n        self.ema = deepcopy(model)\n        self.ema.eval()\n        self.decay = decay\n        self.device = device  # perform ema on different device from model if set\n        if device:\n            self.ema.to(device=device)\n        self.ema_has_module = hasattr(self.ema, 'module')\n        if resume:\n            self._load_checkpoint(resume)\n        for p in self.ema.parameters():\n            p.requires_grad_(False)\n\n    def _load_checkpoint(self, checkpoint_path):\n        checkpoint = torch.load(checkpoint_path, map_location='cpu')\n        assert isinstance(checkpoint, dict)\n        if 'state_dict_ema' in checkpoint:\n            new_state_dict = OrderedDict()\n            for k, v in checkpoint['state_dict_ema'].items():\n                # ema model may have been wrapped by DataParallel, and need module prefix\n                if self.ema_has_module:\n                    name = 'module.' + k if not k.startswith('module') else k\n                else:\n                    name = k\n                new_state_dict[name] = v\n            self.ema.load_state_dict(new_state_dict)\n            _logger.info(\"Loaded state_dict_ema\")\n        else:\n            _logger.warning(\"Failed to find state_dict_ema, starting from loaded model weights\")\n\n    def update(self, model):\n        # correct a mismatch in state dict keys\n        needs_module = hasattr(model, 'module') and not self.ema_has_module\n        with torch.no_grad():\n            msd = model.state_dict()\n            for k, ema_v in self.ema.state_dict().items():\n                if needs_module:\n                    k = 'module.' + k\n                model_v = msd[k].detach()\n                if self.device:\n                    model_v = model_v.to(device=self.device)\n                ema_v.copy_(ema_v * self.decay + (1. - self.decay) * model_v)\n\n\nclass ModelEmaV2(nn.Module):\n    \"\"\" Model Exponential Moving Average V2\n\n    Keep a moving average of everything in the model state_dict (parameters and buffers).\n    V2 of this module is simpler, it does not match params/buffers based on name but simply\n    iterates in order. It works with torchscript (JIT of full model).\n\n    This is intended to allow functionality like\n    https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage\n\n    A smoothed version of the weights is necessary for some training schemes to perform well.\n    E.g. Google's hyper-params for training MNASNet, MobileNet-V3, EfficientNet, etc that use\n    RMSprop with a short 2.4-3 epoch decay period and slow LR decay rate of .96-.99 requires EMA\n    smoothing of weights to match results. Pay attention to the decay constant you are using\n    relative to your update count per epoch.\n\n    To keep EMA from using GPU resources, set device='cpu'. This will save a bit of memory but\n    disable validation of the EMA weights. Validation will have to be done manually in a separate\n    process, or after the training stops converging.\n\n    This class is sensitive where it is initialized in the sequence of model init,\n    GPU assignment and distributed training wrappers.\n    \"\"\"\n    def __init__(self, model, decay=0.9999, device=None):\n        super().__init__()\n        # make a copy of the model for accumulating moving average of weights\n        self.module = deepcopy(model)\n        self.module.eval()\n        self.decay = decay\n        self.device = device  # perform ema on different device from model if set\n        if self.device is not None:\n            self.module.to(device=device)\n\n    def _update(self, model, update_fn):\n        with torch.no_grad():\n            for ema_v, model_v in zip(self.module.state_dict().values(), model.state_dict().values()):\n                if self.device is not None:\n                    model_v = model_v.to(device=self.device)\n                ema_v.copy_(update_fn(ema_v, model_v))\n\n    def update(self, model):\n        self._update(model, update_fn=lambda e, m: self.decay * e + (1. - self.decay) * m)\n\n    def set(self, model):\n        self._update(model, update_fn=lambda e, m: m)\n\n    def forward(self, *args, **kwargs):\n        return self.module(*args, **kwargs)\n\n\nclass ModelEmaV3(nn.Module):\n    \"\"\" Model Exponential Moving Average V3\n\n    Keep a moving average of everything in the model state_dict (parameters and buffers).\n    V3 of this module leverages for_each and in-place operations for faster performance.\n\n    Decay warmup based on code by @crowsonkb, her comments:\n      If inv_gamma=1 and power=1, implements a simple average. inv_gamma=1, power=2/3 are\n      good values for models you plan to train for a million or more steps (reaches decay\n      factor 0.999 at 31.6K steps, 0.9999 at 1M steps), inv_gamma=1, power=3/4 for models\n      you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999 at\n      215.4k steps).\n\n    This is intended to allow functionality like\n    https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage\n\n    To keep EMA from using GPU resources, set device='cpu'. This will save a bit of memory but\n    disable validation of the EMA weights. Validation will have to be done manually in a separate\n    process, or after the training stops converging.\n\n    This class is sensitive where it is initialized in the sequence of model init,\n    GPU assignment and distributed training wrappers.\n    \"\"\"\n    def __init__(\n            self,\n            model,\n            decay: float = 0.9999,\n            min_decay: float = 0.0,\n            update_after_step: int = 0,\n            use_warmup: bool = False,\n            warmup_gamma: float = 1.0,\n            warmup_power: float = 2/3,\n            device: Optional[torch.device] = None,\n            foreach: bool = True,\n            exclude_buffers: bool = False,\n    ):\n        super().__init__()\n        # make a copy of the model for accumulating moving average of weights\n        self.module = deepcopy(model)\n        self.module.eval()\n        self.decay = decay\n        self.min_decay = min_decay\n        self.update_after_step = update_after_step\n        self.use_warmup = use_warmup\n        self.warmup_gamma = warmup_gamma\n        self.warmup_power = warmup_power\n        self.foreach = foreach\n        self.device = device  # perform ema on different device from model if set\n        self.exclude_buffers = exclude_buffers\n        if self.device is not None and device != next(model.parameters()).device:\n            self.foreach = False  # cannot use foreach methods with different devices\n            self.module.to(device=device)\n\n    def get_decay(self, step: Optional[int] = None) -> float:\n        \"\"\"\n        Compute the decay factor for the exponential moving average.\n        \"\"\"\n        if step is None:\n            return self.decay\n\n        step = max(0, step - self.update_after_step - 1)\n        if step <= 0:\n            return 0.0\n\n        if self.use_warmup:\n            decay = 1 - (1 + step / self.warmup_gamma) ** -self.warmup_power\n            decay = max(min(decay, self.decay), self.min_decay)\n        else:\n            decay = self.decay\n\n        return decay\n\n    @torch.no_grad()\n    def update(self, model, step: Optional[int] = None):\n        decay = self.get_decay(step)\n        if self.exclude_buffers:\n            self.apply_update_no_buffers_(model, decay)\n        else:\n            self.apply_update_(model, decay)\n\n    def apply_update_(self, model, decay: float):\n        # interpolate parameters and buffers\n        if self.foreach:\n            ema_lerp_values = []\n            model_lerp_values = []\n            for ema_v, model_v in zip(self.module.state_dict().values(), model.state_dict().values()):\n                if ema_v.is_floating_point():\n                    ema_lerp_values.append(ema_v)\n                    model_lerp_values.append(model_v)\n                else:\n                    ema_v.copy_(model_v)\n\n            if hasattr(torch, '_foreach_lerp_'):\n                torch._foreach_lerp_(ema_lerp_values, model_lerp_values, weight=1. - decay)\n            else:\n                torch._foreach_mul_(ema_lerp_values, scalar=decay)\n                torch._foreach_add_(ema_lerp_values, model_lerp_values, alpha=1. - decay)\n        else:\n            for ema_v, model_v in zip(self.module.state_dict().values(), model.state_dict().values()):\n                if ema_v.is_floating_point():\n                    ema_v.lerp_(model_v.to(device=self.device), weight=1. - decay)\n                else:\n                    ema_v.copy_(model_v.to(device=self.device))\n\n    def apply_update_no_buffers_(self, model, decay: float):\n        # interpolate parameters, copy buffers\n        ema_params = tuple(self.module.parameters())\n        model_params = tuple(model.parameters())\n        if self.foreach:\n            if hasattr(torch, '_foreach_lerp_'):\n                torch._foreach_lerp_(ema_params, model_params, weight=1. - decay)\n            else:\n                torch._foreach_mul_(ema_params, scalar=decay)\n                torch._foreach_add_(ema_params, model_params, alpha=1 - decay)\n        else:\n            for ema_p, model_p in zip(ema_params, model_params):\n                ema_p.lerp_(model_p.to(device=self.device), weight=1. - decay)\n\n        for ema_b, model_b in zip(self.module.buffers(), model.buffers()):\n            ema_b.copy_(model_b.to(device=self.device))\n\n    @torch.no_grad()\n    def set(self, model):\n        for ema_v, model_v in zip(self.module.state_dict().values(), model.state_dict().values()):\n            ema_v.copy_(model_v.to(device=self.device))\n\n    def forward(self, *args, **kwargs):\n        return self.module(*args, **kwargs)"
  },
  {
    "path": "timm/utils/onnx.py",
    "content": "from typing import Optional, Tuple, List\n\nimport torch\n\n\ndef onnx_forward(onnx_file, example_input):\n    import onnxruntime\n\n    sess_options = onnxruntime.SessionOptions()\n    session = onnxruntime.InferenceSession(onnx_file, sess_options)\n    input_name = session.get_inputs()[0].name\n    output = session.run([], {input_name: example_input.numpy()})\n    output = output[0]\n    return output\n\n\ndef onnx_export(\n        model: torch.nn.Module,\n        output_file: str,\n        example_input: Optional[torch.Tensor] = None,\n        training: bool = False,\n        verbose: bool = False,\n        check: bool = True,\n        check_forward: bool = False,\n        batch_size: int = 64,\n        input_size: Tuple[int, int, int] = None,\n        opset: Optional[int] = None,\n        dynamic_size: bool = False,\n        aten_fallback: bool = False,\n        keep_initializers: Optional[bool] = None,\n        use_dynamo: bool = False,\n        input_names: List[str] = None,\n        output_names: List[str] = None,\n):\n    import onnx\n\n    if training:\n        training_mode = torch.onnx.TrainingMode.TRAINING\n        model.train()\n    else:\n        training_mode = torch.onnx.TrainingMode.EVAL\n        model.eval()\n\n    if example_input is None:\n        if not input_size:\n            assert hasattr(model, 'default_cfg'), 'Cannot file model default config, input size must be provided'\n            input_size = model.default_cfg.get('input_size')\n        example_input = torch.randn((batch_size,) + input_size, requires_grad=training)\n\n    # Run model once before export trace, sets padding for models with Conv2dSameExport. This means\n    # that the padding for models with Conv2dSameExport (most models with tf_ prefix) is fixed for\n    # the input img_size specified in this script.\n\n    # Opset >= 11 should allow for dynamic padding, however I cannot get it to work due to\n    # issues in the tracing of the dynamic padding or errors attempting to export the model after jit\n    # scripting it (an approach that should work). Perhaps in a future PyTorch or ONNX versions...\n    with torch.inference_mode():\n        original_out = model(example_input)\n\n    input_names = input_names or [\"input0\"]\n    output_names = output_names or [\"output0\"]\n\n    dynamic_axes = {'input0': {0: 'batch'}, 'output0': {0: 'batch'}}\n    if dynamic_size:\n        dynamic_axes['input0'][2] = 'height'\n        dynamic_axes['input0'][3] = 'width'\n\n    if aten_fallback:\n        export_type = torch.onnx.OperatorExportTypes.ONNX_ATEN_FALLBACK\n    else:\n        export_type = torch.onnx.OperatorExportTypes.ONNX\n\n    if use_dynamo:\n        export_options = torch.onnx.ExportOptions(dynamic_shapes=dynamic_size)\n        export_output = torch.onnx.dynamo_export(\n            model,\n            example_input,\n            export_options=export_options,\n        )\n        export_output.save(output_file)\n    else:\n        torch.onnx.export(\n            model,\n            example_input,\n            output_file,\n            training=training_mode,\n            export_params=True,\n            verbose=verbose,\n            input_names=input_names,\n            output_names=output_names,\n            keep_initializers_as_inputs=keep_initializers,\n            dynamic_axes=dynamic_axes,\n            opset_version=opset,\n            operator_export_type=export_type\n        )\n\n    if check:\n        onnx_model = onnx.load(output_file)\n        onnx.checker.check_model(onnx_model, full_check=True)  # assuming throw on error\n        if check_forward and not training:\n            import numpy as np\n            onnx_out = onnx_forward(output_file, example_input)\n            np.testing.assert_almost_equal(original_out.numpy(), onnx_out, decimal=3)\n\n"
  },
  {
    "path": "timm/utils/random.py",
    "content": "import random\nimport numpy as np\nimport torch\n\n\ndef random_seed(seed=42, rank=0):\n    torch.manual_seed(seed + rank)\n    np.random.seed(seed + rank)\n    random.seed(seed + rank)\n"
  },
  {
    "path": "timm/utils/summary.py",
    "content": "\"\"\" Summary utilities\n\nHacked together by / Copyright 2020 Ross Wightman\n\"\"\"\nimport csv\nimport os\nfrom collections import OrderedDict\ntry: \n    import wandb\nexcept ImportError:\n    pass\n\n\ndef get_outdir(path, *paths, inc=False):\n    outdir = os.path.join(path, *paths)\n    if not os.path.exists(outdir):\n        os.makedirs(outdir)\n    elif inc:\n        count = 1\n        outdir_inc = outdir + '-' + str(count)\n        while os.path.exists(outdir_inc):\n            count = count + 1\n            outdir_inc = outdir + '-' + str(count)\n            assert count < 100\n        outdir = outdir_inc\n        os.makedirs(outdir)\n    return outdir\n\n\ndef update_summary(\n        epoch,\n        train_metrics,\n        eval_metrics,\n        filename,\n        lr=None,\n        write_header=False,\n        log_wandb=False,\n):\n    rowd = OrderedDict(epoch=epoch)\n    rowd.update([('train_' + k, v) for k, v in train_metrics.items()])\n    if eval_metrics:\n        rowd.update([('eval_' + k, v) for k, v in eval_metrics.items()])\n    if lr is not None:\n        rowd['lr'] = lr\n    if log_wandb:\n        wandb.log(rowd)\n    with open(filename, mode='a') as cf:\n        dw = csv.DictWriter(cf, fieldnames=rowd.keys())\n        if write_header:  # first iteration (epoch == 1 can't be used)\n            dw.writeheader()\n        dw.writerow(rowd)\n"
  },
  {
    "path": "timm/version.py",
    "content": "__version__ = '1.0.26.dev0'\n"
  },
  {
    "path": "train.py",
    "content": "#!/usr/bin/env python3\n\"\"\" ImageNet Training Script\n\nThis is intended to be a lean and easily modifiable ImageNet training script that reproduces ImageNet\ntraining results with some of the latest networks and training techniques. It favours canonical PyTorch\nand standard Python style over trying to be able to 'do it all.' That said, it offers quite a few speed\nand training result improvements over the usual PyTorch example scripts. Repurpose as you see fit.\n\nThis script was started from an early version of the PyTorch ImageNet example\n(https://github.com/pytorch/examples/tree/master/imagenet)\n\nNVIDIA CUDA specific speedups adopted from NVIDIA Apex examples\n(https://github.com/NVIDIA/apex/tree/master/examples/imagenet)\n\nHacked together by / Copyright 2020 Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport argparse\nimport copy\nimport importlib\nimport json\nimport logging\nimport os\nimport time\nfrom collections import OrderedDict\nfrom contextlib import suppress\nfrom datetime import datetime\nfrom functools import partial\n\nimport torch\nimport torch.nn as nn\nimport torchvision.utils\nimport yaml\n\nfrom timm import utils\nfrom timm.data import create_dataset, create_loader, create_naflex_loader, resolve_data_config, \\\n    Mixup, FastCollateMixup, AugMixDataset\nfrom timm.layers import convert_splitbn_model, convert_sync_batchnorm, set_fast_norm\nfrom timm.loss import JsdCrossEntropy, SoftTargetCrossEntropy, BinaryCrossEntropy, LabelSmoothingCrossEntropy\nfrom timm.models import create_model, safe_model_name, resume_checkpoint, load_checkpoint, model_parameters\nfrom timm.optim import create_optimizer_v2, optimizer_kwargs\nfrom timm.scheduler import create_scheduler_v2, scheduler_kwargs\nfrom timm.utils import NativeScaler\nfrom timm.task import (\n    ClassificationTask,\n    LogitDistillationTask,\n    FeatureDistillationTask,\n    TokenDistillationTask,\n)\n\n\ntry:\n    import wandb\n    has_wandb = True\nexcept ImportError:\n    has_wandb = False\n\ntry:\n    from functorch.compile import memory_efficient_fusion\n    has_functorch = True\nexcept ImportError as e:\n    has_functorch = False\n\nhas_compile = hasattr(torch, 'compile')\n\n\n_logger = logging.getLogger('train')\n\n# The first arg parser parses out only the --config argument, this argument is used to\n# load a yaml file containing key-values that override the defaults for the main parser below\nconfig_parser = parser = argparse.ArgumentParser(description='Training Config', add_help=False)\nparser.add_argument('-c', '--config', default='', type=str, metavar='FILE',\n                    help='YAML config file specifying default arguments')\n\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Training')\n\n# Dataset parameters\ngroup = parser.add_argument_group('Dataset parameters')\n# Keep this argument outside the dataset group because it is positional.\nparser.add_argument('data', nargs='?', metavar='DIR', const=None,\n                    help='path to dataset (positional is *deprecated*, use --data-dir)')\ngroup.add_argument('--data-dir', metavar='DIR',\n                    help='path to dataset (root dir)')\ngroup.add_argument('--dataset', metavar='NAME', default='',\n                    help='dataset type + name (\"<type>/<name>\") (default: ImageFolder or ImageTar if empty)')\ngroup.add_argument('--train-split', metavar='NAME', default='train',\n                   help='dataset train split (default: train)')\ngroup.add_argument('--val-split', metavar='NAME', default='validation',\n                   help='dataset validation split (default: validation)')\ngroup.add_argument('--train-num-samples', default=None, type=int,\n                    metavar='N', help='Manually specify num samples in train split, for IterableDatasets.')\ngroup.add_argument('--val-num-samples', default=None, type=int,\n                    metavar='N', help='Manually specify num samples in validation split, for IterableDatasets.')\ngroup.add_argument('--dataset-download', action='store_true', default=False,\n                   help='Allow download of dataset for torch/ and tfds/ datasets that support it.')\ngroup.add_argument('--class-map', default='', type=str, metavar='FILENAME',\n                   help='path to class to idx mapping file (default: \"\")')\ngroup.add_argument('--input-img-mode', default=None, type=str,\n                   help='Dataset image conversion mode for input images.')\ngroup.add_argument('--input-key', default=None, type=str,\n                   help='Dataset key for input images.')\ngroup.add_argument('--target-key', default=None, type=str,\n                   help='Dataset key for target labels.')\ngroup.add_argument('--dataset-trust-remote-code', action='store_true', default=False,\n                   help='Allow huggingface dataset import to execute code downloaded from the dataset\\'s repo.')\n\n# Model parameters\ngroup = parser.add_argument_group('Model parameters')\ngroup.add_argument('--model', default='resnet50', type=str, metavar='MODEL',\n                   help='Name of model to train (default: \"resnet50\")')\ngroup.add_argument('--pretrained', action='store_true', default=False,\n                   help='Start with pretrained version of specified network (if avail)')\ngroup.add_argument('--pretrained-path', default=None, type=str,\n                   help='Load this checkpoint as if they were the pretrained weights (with adaptation).')\ngroup.add_argument('--initial-checkpoint', default='', type=str, metavar='PATH',\n                   help='Load this checkpoint into model after initialization (default: none)')\ngroup.add_argument('--resume', default='', type=str, metavar='PATH',\n                   help='Resume full model and optimizer state from checkpoint (default: none)')\ngroup.add_argument('--no-resume-opt', action='store_true', default=False,\n                   help='prevent resume of optimizer state when resuming model')\ngroup.add_argument('--num-classes', type=int, default=None, metavar='N',\n                   help='number of label classes (Model default if None)')\ngroup.add_argument('--gp', default=None, type=str, metavar='POOL',\n                   help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.')\ngroup.add_argument('--img-size', type=int, default=None, metavar='N',\n                   help='Image size (default: None => model default)')\ngroup.add_argument('--in-chans', type=int, default=None, metavar='N',\n                   help='Image input channels (default: None => 3)')\ngroup.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N',\n                   help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty')\ngroup.add_argument('--crop-pct', default=None, type=float,\n                   metavar='N', help='Input image center crop percent (for validation only)')\ngroup.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN',\n                   help='Override mean pixel value of dataset')\ngroup.add_argument('--std', type=float, nargs='+', default=None, metavar='STD',\n                   help='Override std deviation of dataset')\ngroup.add_argument('--interpolation', default='', type=str, metavar='NAME',\n                   help='Image resize interpolation type (overrides model)')\ngroup.add_argument('-b', '--batch-size', type=int, default=128, metavar='N',\n                   help='Input batch size for training (default: 128)')\ngroup.add_argument('-vb', '--validation-batch-size', type=int, default=None, metavar='N',\n                   help='Validation batch size override (default: None)')\ngroup.add_argument('--channels-last', action='store_true', default=False,\n                   help='Use channels_last memory layout')\ngroup.add_argument('--fuser', default='', type=str,\n                   help=\"Select jit fuser. One of ('', 'te', 'old', 'nvfuser')\")\ngroup.add_argument('--grad-accum-steps', type=int, default=1, metavar='N',\n                   help='The number of steps to accumulate gradients (default: 1)')\ngroup.add_argument('--grad-checkpointing', action='store_true', default=False,\n                   help='Enable gradient checkpointing through model blocks/stages')\ngroup.add_argument('--fast-norm', default=False, action='store_true',\n                   help='enable experimental fast-norm')\ngroup.add_argument('--model-kwargs', nargs='*', default={}, action=utils.ParseKwargs)\ngroup.add_argument('--head-init-scale', default=None, type=float,\n                   help='Head initialization scale')\ngroup.add_argument('--head-init-bias', default=None, type=float,\n                   help='Head initialization bias value')\ngroup.add_argument('--torchcompile-mode', type=str, default=None,\n                    help=\"torch.compile mode (default: None).\")\n\n# scripting / codegen\nscripting_group = group.add_mutually_exclusive_group()\nscripting_group.add_argument('--torchscript', dest='torchscript', action='store_true',\n                             help='torch.jit.script the full model')\nscripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None, const='inductor',\n                             help=\"Enable compilation w/ specified backend (default: inductor).\")\n\n# Device & distributed\ngroup = parser.add_argument_group('Device parameters')\ngroup.add_argument('--device', default='cuda', type=str,\n                    help=\"Device (accelerator) to use.\")\ngroup.add_argument('--amp', action='store_true', default=False,\n                   help='use AMP for mixed precision training')\ngroup.add_argument('--amp-dtype', default='float16', type=str,\n                   help='lower precision AMP dtype (default: float16)')\ngroup.add_argument('--model-dtype', default=None, type=str,\n                   help='Model dtype override (non-AMP) (default: float32)')\ngroup.add_argument('--no-ddp-bb', action='store_true', default=False,\n                   help='Force broadcast buffers for native DDP to off.')\ngroup.add_argument('--synchronize-step', action='store_true', default=False,\n                   help='torch.cuda.synchronize() end of each step')\ngroup.add_argument(\"--local_rank\", default=0, type=int)\ngroup.add_argument('--device-modules', default=None, type=str, nargs='+',\n                    help=\"Python imports for device backend modules.\")\n\n# Optimizer parameters\ngroup = parser.add_argument_group('Optimizer parameters')\ngroup.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER',\n                   help='Optimizer (default: \"sgd\")')\ngroup.add_argument('--opt-eps', default=None, type=float, metavar='EPSILON',\n                   help='Optimizer Epsilon (default: None, use opt default)')\ngroup.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',\n                   help='Optimizer Betas (default: None, use opt default)')\ngroup.add_argument('--momentum', type=float, default=0.9, metavar='M',\n                   help='Optimizer momentum (default: 0.9)')\ngroup.add_argument('--weight-decay', type=float, default=2e-5,\n                   help='weight decay (default: 2e-5)')\ngroup.add_argument('--clip-grad', type=float, default=None, metavar='NORM',\n                   help='Clip gradient norm (default: None, no clipping)')\ngroup.add_argument('--clip-mode', type=str, default='norm',\n                   help='Gradient clipping mode. One of (\"norm\", \"value\", \"agc\")')\ngroup.add_argument('--layer-decay', type=float, default=None,\n                   help='layer-wise learning rate decay (default: None)')\ngroup.add_argument('--layer-decay-min-scale', type=float, default=0,\n                   help='layer-wise lr decay minimum scale clamp (default: 0)')\ngroup.add_argument('--layer-decay-no-opt-scale', type=float, default=None,\n                   help='layer-wise lr decay no optimization scale (default: None)')\ngroup.add_argument('--opt-kwargs', nargs='*', default={}, action=utils.ParseKwargs)\n\n# Learning rate schedule parameters\ngroup = parser.add_argument_group('Learning rate schedule parameters')\ngroup.add_argument('--sched', type=str, default='cosine', metavar='SCHEDULER',\n                   help='LR scheduler (default: \"cosine\"')\ngroup.add_argument('--sched-on-updates', action='store_true', default=False,\n                   help='Apply LR scheduler step on update instead of epoch end.')\ngroup.add_argument('--lr', type=float, default=None, metavar='LR',\n                   help='learning rate, overrides lr-base if set (default: None)')\ngroup.add_argument('--lr-base', type=float, default=0.1, metavar='LR',\n                   help='base learning rate: lr = lr_base * global_batch_size / base_size')\ngroup.add_argument('--lr-base-size', type=int, default=256, metavar='DIV',\n                   help='base learning rate batch size (divisor, default: 256).')\ngroup.add_argument('--lr-base-scale', type=str, default='', metavar='SCALE',\n                   help='base learning rate vs batch_size scaling (\"linear\", \"sqrt\", based on opt if empty)')\ngroup.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct',\n                   help='learning rate noise on/off epoch percentages')\ngroup.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT',\n                   help='learning rate noise limit percent (default: 0.67)')\ngroup.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV',\n                   help='learning rate noise std-dev (default: 1.0)')\ngroup.add_argument('--lr-cycle-mul', type=float, default=1.0, metavar='MULT',\n                   help='learning rate cycle len multiplier (default: 1.0)')\ngroup.add_argument('--lr-cycle-decay', type=float, default=0.5, metavar='MULT',\n                   help='amount to decay each learning rate cycle (default: 0.5)')\ngroup.add_argument('--lr-cycle-limit', type=int, default=1, metavar='N',\n                   help='learning rate cycle limit, cycles enabled if > 1')\ngroup.add_argument('--lr-k-decay', type=float, default=1.0,\n                   help='learning rate k-decay for cosine/poly (default: 1.0)')\ngroup.add_argument('--warmup-lr', type=float, default=1e-5, metavar='LR',\n                   help='warmup learning rate (default: 1e-5)')\ngroup.add_argument('--min-lr', type=float, default=0, metavar='LR',\n                   help='lower lr bound for cyclic schedulers that hit 0 (default: 0)')\ngroup.add_argument('--epochs', type=int, default=300, metavar='N',\n                   help='number of epochs to train (default: 300)')\ngroup.add_argument('--epoch-repeats', type=float, default=0., metavar='N',\n                   help='epoch repeat multiplier (number of times to repeat dataset epoch per train epoch).')\ngroup.add_argument('--start-epoch', default=None, type=int, metavar='N',\n                   help='manual epoch number (useful on restarts)')\ngroup.add_argument('--decay-milestones', default=[90, 180, 270], type=int, nargs='+', metavar=\"MILESTONES\",\n                   help='list of decay epoch indices for multistep lr. must be increasing')\ngroup.add_argument('--decay-epochs', type=float, default=90, metavar='N',\n                   help='epoch interval to decay LR')\ngroup.add_argument('--warmup-epochs', type=int, default=5, metavar='N',\n                   help='epochs to warmup LR, if scheduler supports')\ngroup.add_argument('--warmup-prefix', action='store_true', default=False,\n                   help='Exclude warmup period from decay schedule.'),\ngroup.add_argument('--cooldown-epochs', type=int, default=0, metavar='N',\n                   help='epochs to cooldown LR at min_lr, after cyclic schedule ends')\ngroup.add_argument('--patience-epochs', type=int, default=10, metavar='N',\n                   help='patience epochs for Plateau LR scheduler (default: 10)')\ngroup.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE',\n                   help='LR decay rate (default: 0.1)')\n\n# Augmentation & regularization parameters\ngroup = parser.add_argument_group('Augmentation and regularization parameters')\ngroup.add_argument('--no-aug', action='store_true', default=False,\n                   help='Disable all training augmentation, override other train aug args')\ngroup.add_argument('--train-crop-mode', type=str, default=None,\n                   help='Crop-mode in train'),\ngroup.add_argument('--scale', type=float, nargs='+', default=[0.08, 1.0], metavar='PCT',\n                   help='Random resize scale (default: 0.08 1.0)')\ngroup.add_argument('--ratio', type=float, nargs='+', default=[3. / 4., 4. / 3.], metavar='RATIO',\n                   help='Random resize aspect ratio (default: 0.75 1.33)')\ngroup.add_argument('--hflip', type=float, default=0.5,\n                   help='Horizontal flip training aug probability')\ngroup.add_argument('--vflip', type=float, default=0.,\n                   help='Vertical flip training aug probability')\ngroup.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT',\n                   help='Color jitter factor (default: 0.4)')\ngroup.add_argument('--color-jitter-prob', type=float, default=None, metavar='PCT',\n                   help='Probability of applying any color jitter.')\ngroup.add_argument('--grayscale-prob', type=float, default=None, metavar='PCT',\n                   help='Probability of applying random grayscale conversion.')\ngroup.add_argument('--gaussian-blur-prob', type=float, default=None, metavar='PCT',\n                   help='Probability of applying gaussian blur.')\ngroup.add_argument('--aa', type=str, default=None, metavar='NAME',\n                   help='Use AutoAugment policy. \"v0\" or \"original\". (default: None)'),\ngroup.add_argument('--aug-repeats', type=float, default=0,\n                   help='Number of augmentation repetitions (distributed training only) (default: 0)')\ngroup.add_argument('--aug-splits', type=int, default=0,\n                   help='Number of augmentation splits (default: 0, valid: 0 or >=2)')\ngroup.add_argument('--jsd-loss', action='store_true', default=False,\n                   help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.')\ngroup.add_argument('--bce-loss', action='store_true', default=False,\n                   help='Enable BCE loss w/ Mixup/CutMix use.')\ngroup.add_argument('--bce-sum', action='store_true', default=False,\n                   help='Sum over classes when using BCE loss.')\ngroup.add_argument('--bce-target-thresh', type=float, default=None,\n                   help='Threshold for binarizing softened BCE targets (default: None, disabled).')\ngroup.add_argument('--bce-pos-weight', type=float, default=None,\n                   help='Positive weighting for BCE loss.')\ngroup.add_argument('--reprob', type=float, default=0., metavar='PCT',\n                   help='Random erase prob (default: 0.)')\ngroup.add_argument('--remode', type=str, default='pixel',\n                   help='Random erase mode (default: \"pixel\")')\ngroup.add_argument('--recount', type=int, default=1,\n                   help='Random erase count (default: 1)')\ngroup.add_argument('--resplit', action='store_true', default=False,\n                   help='Do not random erase first (clean) augmentation split')\ngroup.add_argument('--mixup', type=float, default=0.0,\n                   help='mixup alpha, mixup enabled if > 0. (default: 0.)')\ngroup.add_argument('--cutmix', type=float, default=0.0,\n                   help='cutmix alpha, cutmix enabled if > 0. (default: 0.)')\ngroup.add_argument('--cutmix-minmax', type=float, nargs='+', default=None,\n                   help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)')\ngroup.add_argument('--mixup-prob', type=float, default=1.0,\n                   help='Probability of performing mixup or cutmix when either/both is enabled')\ngroup.add_argument('--mixup-switch-prob', type=float, default=0.5,\n                   help='Probability of switching to cutmix when both mixup and cutmix enabled')\ngroup.add_argument('--mixup-mode', type=str, default='batch',\n                   help='How to apply mixup/cutmix params. Per \"batch\", \"pair\", or \"elem\"')\ngroup.add_argument('--mixup-off-epoch', default=0, type=int, metavar='N',\n                   help='Turn off mixup after this epoch, disabled if 0 (default: 0)')\ngroup.add_argument('--smoothing', type=float, default=0.1,\n                   help='Label smoothing (default: 0.1)')\ngroup.add_argument('--train-interpolation', type=str, default='random',\n                   help='Training interpolation (random, bilinear, bicubic default: \"random\")')\ngroup.add_argument('--drop', type=float, default=0.0, metavar='PCT',\n                   help='Dropout rate (default: 0.)')\ngroup.add_argument('--drop-connect', type=float, default=None, metavar='PCT',\n                   help='Drop connect rate, DEPRECATED, use drop-path (default: None)')\ngroup.add_argument('--drop-path', type=float, default=None, metavar='PCT',\n                   help='Drop path rate (default: None)')\ngroup.add_argument('--drop-block', type=float, default=None, metavar='PCT',\n                   help='Drop block rate (default: None)')\n\n# Batch norm parameters (only works with gen_efficientnet based models currently)\ngroup = parser.add_argument_group('Batch norm parameters', 'Only works with gen_efficientnet based models currently.')\ngroup.add_argument('--bn-momentum', type=float, default=None,\n                   help='BatchNorm momentum override (if not None)')\ngroup.add_argument('--bn-eps', type=float, default=None,\n                   help='BatchNorm epsilon override (if not None)')\ngroup.add_argument('--sync-bn', action='store_true',\n                   help='Enable synchronized BatchNorm.')\ngroup.add_argument('--dist-bn', type=str, default='reduce',\n                   help='Distribute BatchNorm stats between nodes after each epoch (\"broadcast\", \"reduce\", or \"\")')\ngroup.add_argument('--split-bn', action='store_true',\n                   help='Enable separate BN layers per augmentation split.')\n\n# Model Exponential Moving Average\ngroup = parser.add_argument_group('Model exponential moving average parameters')\ngroup.add_argument('--model-ema', action='store_true', default=False,\n                   help='Enable tracking moving average of model weights.')\ngroup.add_argument('--model-ema-force-cpu', action='store_true', default=False,\n                   help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.')\ngroup.add_argument('--model-ema-decay', type=float, default=0.9998,\n                   help='Decay factor for model weights moving average (default: 0.9998)')\ngroup.add_argument('--model-ema-warmup', action='store_true',\n                   help='Enable warmup for model EMA decay.')\n\n# Misc\ngroup = parser.add_argument_group('Miscellaneous parameters')\ngroup.add_argument('--seed', type=int, default=42, metavar='S',\n                   help='random seed (default: 42)')\ngroup.add_argument('--worker-seeding', type=str, default='all',\n                   help='worker seed mode (default: all)')\ngroup.add_argument('--log-interval', type=int, default=50, metavar='N',\n                   help='how many batches to wait before logging training status')\ngroup.add_argument('--val-interval', type=int, default=1, metavar='N',\n                   help='how many epochs between validation and checkpointing')\ngroup.add_argument('--recovery-interval', type=int, default=0, metavar='N',\n                   help='how many batches to wait before writing recovery checkpoint')\ngroup.add_argument('--checkpoint-hist', type=int, default=10, metavar='N',\n                   help='number of checkpoints to keep (default: 10)')\ngroup.add_argument('-j', '--workers', type=int, default=4, metavar='N',\n                   help='how many training processes to use (default: 4)')\ngroup.add_argument('--save-images', action='store_true', default=False,\n                   help='save images of input batches every log interval for debugging')\ngroup.add_argument('--pin-mem', action='store_true', default=False,\n                   help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')\ngroup.add_argument('--no-prefetcher', action='store_true', default=False,\n                   help='disable fast prefetcher')\ngroup.add_argument('--output', default='', type=str, metavar='PATH',\n                   help='path to output folder (default: none, current dir)')\ngroup.add_argument('--experiment', default='', type=str, metavar='NAME',\n                   help='name of train experiment, name of sub-folder for output')\ngroup.add_argument('--eval-metric', default='top1', type=str, metavar='EVAL_METRIC',\n                   help='Best metric (default: \"top1\"')\ngroup.add_argument('--tta', type=int, default=0, metavar='N',\n                   help='Test/inference time augmentation (oversampling) factor. 0=None (default: 0)')\ngroup.add_argument('--use-multi-epochs-loader', action='store_true', default=False,\n                   help='use the multi-epochs-loader to save time at the beginning of every epoch')\ngroup.add_argument('--log-wandb', action='store_true', default=False,\n                   help='log training and validation metrics to wandb')\ngroup.add_argument('--wandb-project', default=None, type=str,\n                   help='wandb project name')\ngroup.add_argument('--wandb-tags', default=[], type=str, nargs='+',\n                   help='wandb tags')\ngroup.add_argument('--wandb-resume-id', default='', type=str, metavar='ID',\n                   help='If resuming a run, the id of the run in wandb')\n\n# NaFlex scheduled loader arguments\ngroup.add_argument('--naflex-loader', action='store_true', default=False,\n                   help='Use NaFlex loader (Requires NaFlex compatible model)')\ngroup.add_argument('--naflex-train-seq-lens', type=int, nargs='+', default=[128, 256, 576, 784, 1024],\n                   help='Sequence lengths to use for NaFlex loader')\ngroup.add_argument('--naflex-max-seq-len', type=int, default=576,\n                   help='Fixed maximum sequence length for NaFlex loader (validation)')\ngroup.add_argument('--naflex-patch-sizes', type=int, nargs='+', default=None,\n                   help='List of patch sizes for variable patch size training (e.g., 8 12 16 24 32)')\ngroup.add_argument('--naflex-patch-size-probs', type=float, nargs='+', default=None,\n                   help='Probabilities for each patch size (must sum to 1.0, uniform if not specified)')\ngroup.add_argument('--naflex-loss-scale', default='linear', type=str,\n                   help='Scale loss (gradient) by batch_size (\"none\", \"sqrt\", or \"linear\")')\n\n# Knowledge Distillation parameters\nparser.add_argument('--kd-model-name', default=None, type=str,\n                    help='Name of teacher model for knowledge distillation')\nparser.add_argument('--kd-distill-type', default='logit', type=str, choices=['logit', 'feature', 'token'],\n                    help='Type of distillation: \"logit\" for output distillation, \"feature\" for intermediate features, \"token\" for models with distillation heads (default: logit)')\nparser.add_argument('--kd-loss-type', default='kl', type=str,\n                    help='Loss function for logit distillation (default: kl). Currently only \"kl\" supported, reserved for future extensions.')\nparser.add_argument('--distill-loss-weight', default=None, type=float,\n                    help='Weight for distillation loss. If both weights specified: loss = task_weight * task + distill_weight * distill. '\n                         'If only task_weight: loss = task_weight * task + (1-task_weight) * distill. Default: 1.0 if only this specified.')\nparser.add_argument('--task-loss-weight', default=None, type=float,\n                    help='Weight for task (classification) loss. See --distill-loss-weight for weighting modes. Default: 1.0 if unspecified.')\nparser.add_argument('--kd-temperature', default=4.0, type=float,\n                    help='Temperature for softmax in distillation (default: 4.0, typical range: 1-4)')\nparser.add_argument('--kd-student-feature-dim', default=None, type=int,\n                    help='Student model feature dimension (auto-detected from model.head_hidden_size or model.num_features if not specified)')\nparser.add_argument('--kd-teacher-feature-dim', default=None, type=int,\n                    help='Teacher model feature dimension (auto-detected from model.head_hidden_size or model.num_features if not specified)')\nparser.add_argument('--kd-token-distill-type', default='soft', type=str, choices=['soft', 'hard'],\n                    help='Token distillation type: \"soft\" for KL-div with temperature, \"hard\" for CE with teacher argmax (default: soft)')\n\n\ndef _parse_args():\n    # Do we have a config file to parse?\n    args_config, remaining = config_parser.parse_known_args()\n    if args_config.config:\n        with open(args_config.config, 'r') as f:\n            cfg = yaml.safe_load(f)\n            parser.set_defaults(**cfg)\n\n    # The main arg parser parses the rest of the args, the usual\n    # defaults will have been overridden if config file specified.\n    args = parser.parse_args(remaining)\n\n    # Cache the args as a text string to save them in the output dir later\n    args_text = yaml.safe_dump(args.__dict__, default_flow_style=False)\n    return args, args_text\n\n\ndef main():\n    utils.setup_default_logging()\n    args, args_text = _parse_args()\n\n    if args.device_modules:\n        for module in args.device_modules:\n            importlib.import_module(module)\n\n    if torch.cuda.is_available():\n        torch.backends.cuda.matmul.allow_tf32 = True\n        torch.backends.cudnn.benchmark = True\n\n    args.prefetcher = not args.no_prefetcher\n    args.grad_accum_steps = max(1, args.grad_accum_steps)\n    device = utils.init_distributed_device(args)\n    if args.distributed:\n        _logger.info(\n            'Training in distributed mode with multiple processes, 1 device per process.'\n            f'Process {args.rank}, total {args.world_size}, device {args.device}.')\n    else:\n        _logger.info(f'Training with a single process on 1 device ({args.device}).')\n    assert args.rank >= 0\n\n    model_dtype = None\n    if args.model_dtype:\n        assert args.model_dtype in ('float32', 'float16', 'bfloat16')\n        model_dtype = getattr(torch, args.model_dtype)\n        if model_dtype == torch.float16:\n            _logger.warning('float16 is not recommended for training, for half precision bfloat16 is recommended.')\n\n    # resolve AMP arguments based on PyTorch availability\n    amp_dtype = torch.float16\n    if args.amp:\n        assert model_dtype is None or model_dtype == torch.float32, 'float32 model dtype must be used with AMP'\n        assert args.amp_dtype in ('float16', 'bfloat16')\n        if args.amp_dtype == 'bfloat16':\n            amp_dtype = torch.bfloat16\n\n    utils.random_seed(args.seed, args.rank)\n\n    if args.fuser:\n        utils.set_jit_fuser(args.fuser)\n    if args.fast_norm:\n        set_fast_norm()\n\n    in_chans = 3\n    if args.in_chans is not None:\n        in_chans = args.in_chans\n    elif args.input_size is not None:\n        in_chans = args.input_size[0]\n\n    factory_kwargs = {}\n    if args.pretrained_path:\n        # merge with pretrained_cfg of model, 'file' has priority over 'url' and 'hf_hub'.\n        factory_kwargs['pretrained_cfg_overlay'] = dict(\n            file=args.pretrained_path,\n            num_classes=-1,  # force head adaptation\n        )\n\n    model = create_model(\n        args.model,\n        pretrained=args.pretrained,\n        in_chans=in_chans,\n        num_classes=args.num_classes,\n        drop_rate=args.drop,\n        drop_path_rate=args.drop_path,\n        drop_block_rate=args.drop_block,\n        global_pool=args.gp,\n        bn_momentum=args.bn_momentum,\n        bn_eps=args.bn_eps,\n        scriptable=args.torchscript,\n        checkpoint_path=args.initial_checkpoint,\n        **factory_kwargs,\n        **args.model_kwargs,\n    )\n    if args.head_init_scale is not None:\n        with torch.no_grad():\n            model.get_classifier().weight.mul_(args.head_init_scale)\n            model.get_classifier().bias.mul_(args.head_init_scale)\n    if args.head_init_bias is not None:\n        nn.init.constant_(model.get_classifier().bias, args.head_init_bias)\n\n    if args.num_classes is None:\n        assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.'\n        args.num_classes = model.num_classes  # FIXME handle model default vs config num_classes more elegantly\n\n    if args.grad_checkpointing:\n        model.set_grad_checkpointing(enable=True)\n\n    # Create training task (classification or distillation)\n    task = None\n\n    if utils.is_primary(args):\n        _logger.info(\n            f'Model {safe_model_name(args.model)} created, param count:{sum([m.numel() for m in model.parameters()])}')\n\n    data_config = resolve_data_config(vars(args), model=model, verbose=utils.is_primary(args))\n\n    # setup augmentation batch splits for contrastive loss or split bn\n    num_aug_splits = 0\n    if args.aug_splits > 0:\n        assert args.aug_splits > 1, 'A split of 1 makes no sense'\n        num_aug_splits = args.aug_splits\n\n    # enable split bn (separate bn stats per batch-portion)\n    if args.split_bn:\n        assert num_aug_splits > 1 or args.resplit\n        model = convert_splitbn_model(model, max(num_aug_splits, 2))\n\n    # move model to GPU, enable channels last layout if set\n    model.to(device=device, dtype=model_dtype)  # FIXME move model device & dtype into create_model\n    if args.channels_last:\n        model.to(memory_format=torch.channels_last)\n\n    # setup synchronized BatchNorm for distributed training\n    if args.distributed and args.sync_bn:\n        args.dist_bn = ''  # disable dist_bn when sync BN active\n        assert not args.split_bn\n        model = convert_sync_batchnorm(model)\n        if utils.is_primary(args):\n            _logger.info(\n                'Converted model to use Synchronized BatchNorm. WARNING: You may have issues if using '\n                'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.')\n\n    model_patch_size = None\n    if args.naflex_loader:\n        # NaFlexVit models have embeds.patch_size. Needs to be extracted here before mutating the model.\n        model_patch_size = getattr(getattr(model, \"embeds\", None), \"patch_size\", None)\n\n    if args.torchscript:\n        assert not args.torchcompile\n        assert not args.sync_bn, 'Cannot use SyncBatchNorm with torchscripted model'\n        model = torch.jit.script(model)\n\n    if not args.lr:\n        global_batch_size = args.batch_size * args.world_size * args.grad_accum_steps\n        batch_ratio = global_batch_size / args.lr_base_size\n        if not args.lr_base_scale:\n            on = args.opt.lower()\n            args.lr_base_scale = 'sqrt' if any([o in on for o in ('ada', 'lamb')]) else 'linear'\n        if args.lr_base_scale == 'sqrt':\n            batch_ratio = batch_ratio ** 0.5\n        args.lr = args.lr_base * batch_ratio\n        if utils.is_primary(args):\n            _logger.info(\n                f'Learning rate ({args.lr}) calculated from base learning rate ({args.lr_base}) '\n                f'and effective global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.')\n\n    optimizer = create_optimizer_v2(\n        model,\n        **optimizer_kwargs(cfg=args),\n        **args.opt_kwargs,\n    )\n    if utils.is_primary(args):\n        defaults = copy.deepcopy(optimizer.defaults)\n        defaults['weight_decay'] = args.weight_decay  # this isn't stored in optimizer.defaults\n        defaults = ', '.join([f'{k}: {v}' for k, v in defaults.items()])\n        logging.info(\n            f'Created {type(optimizer).__name__} ({args.opt}) optimizer: {defaults}'\n        )\n\n    # setup automatic mixed-precision (AMP) loss scaling and op casting\n    amp_autocast = suppress  # do nothing\n    loss_scaler = None\n    if args.amp:\n        amp_autocast = partial(torch.autocast, device_type=device.type, dtype=amp_dtype)\n        if device.type in ('cuda',) and amp_dtype == torch.float16:\n            # loss scaler only used for float16 (half) dtype, bfloat16 does not need it\n            loss_scaler = NativeScaler(device=device.type)\n        if utils.is_primary(args):\n            _logger.info('Using native Torch AMP. Training in mixed precision.')\n    else:\n        if utils.is_primary(args):\n            _logger.info(f'AMP not enabled. Training in {model_dtype or torch.float32}.')\n\n    # optionally resume from a checkpoint\n    resume_epoch = None\n    if args.resume:\n        resume_epoch = resume_checkpoint(\n            model,\n            args.resume,\n            optimizer=None if args.no_resume_opt else optimizer,\n            loss_scaler=None if args.no_resume_opt else loss_scaler,\n            log_info=utils.is_primary(args),\n        )\n\n    # setup exponential moving average of model weights, SWA could be used here too\n    model_ema = None\n    if args.model_ema:\n        # Important to create EMA model after cuda(), DP wrapper, and AMP but before DDP wrapper\n        model_ema = utils.ModelEmaV3(\n            model,\n            decay=args.model_ema_decay,\n            use_warmup=args.model_ema_warmup,\n            device='cpu' if args.model_ema_force_cpu else None,\n        )\n        if args.resume:\n            load_checkpoint(model_ema.module, args.resume, use_ema=True)\n        if args.torchcompile:\n            model_ema = torch.compile(\n                model_ema,\n                backend=args.torchcompile,\n                mode=args.torchcompile_mode,\n            )\n\n    # create the train and eval datasets\n    if args.data and not args.data_dir:\n        args.data_dir = args.data\n    if args.input_img_mode is None:\n        input_img_mode = 'RGB' if data_config['input_size'][0] == 3 else 'L'\n    else:\n        input_img_mode = args.input_img_mode\n\n    dataset_train = create_dataset(\n        args.dataset,\n        root=args.data_dir,\n        split=args.train_split,\n        is_training=True,\n        class_map=args.class_map,\n        download=args.dataset_download,\n        batch_size=args.batch_size,\n        seed=args.seed,\n        repeats=args.epoch_repeats,\n        input_img_mode=input_img_mode,\n        input_key=args.input_key,\n        target_key=args.target_key,\n        num_samples=args.train_num_samples,\n        trust_remote_code=args.dataset_trust_remote_code,\n    )\n\n    dataset_eval = None\n    if args.val_split:\n        dataset_eval = create_dataset(\n            args.dataset,\n            root=args.data_dir,\n            split=args.val_split,\n            is_training=False,\n            class_map=args.class_map,\n            download=args.dataset_download,\n            batch_size=args.batch_size,\n            input_img_mode=input_img_mode,\n            input_key=args.input_key,\n            target_key=args.target_key,\n            num_samples=args.val_num_samples,\n            trust_remote_code=args.dataset_trust_remote_code,\n        )\n\n    # create data loaders w/ augmentation pipeline\n    train_interpolation = args.train_interpolation\n    if args.no_aug or not train_interpolation:\n        train_interpolation = data_config['interpolation']\n        \n    # Check if we should use the NaFlex scheduled loader\n    common_loader_kwargs = dict(\n        mean=data_config['mean'],\n        std=data_config['std'],\n        pin_memory=args.pin_mem,\n        img_dtype=model_dtype or torch.float32,\n        device=device,\n        distributed=args.distributed,\n        use_prefetcher=args.prefetcher,\n    )\n\n    train_loader_kwargs = dict(\n        batch_size=args.batch_size,\n        is_training=True,\n        no_aug=args.no_aug,\n        re_prob=args.reprob,\n        re_mode=args.remode,\n        re_count=args.recount,\n        re_split=args.resplit,\n        train_crop_mode=args.train_crop_mode,\n        scale=args.scale,\n        ratio=args.ratio,\n        hflip=args.hflip,\n        vflip=args.vflip,\n        color_jitter=args.color_jitter,\n        color_jitter_prob=args.color_jitter_prob,\n        grayscale_prob=args.grayscale_prob,\n        gaussian_blur_prob=args.gaussian_blur_prob,\n        auto_augment=args.aa,\n        num_aug_repeats=args.aug_repeats,\n        num_aug_splits=num_aug_splits,\n        interpolation=train_interpolation,\n        num_workers=args.workers,\n        worker_seeding=args.worker_seeding,\n    )\n\n    mixup_fn = None\n    mixup_args = {}\n    mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None\n    if mixup_active:\n        mixup_args = dict(\n            mixup_alpha=args.mixup,\n            cutmix_alpha=args.cutmix,\n            cutmix_minmax=args.cutmix_minmax,\n            prob=args.mixup_prob,\n            switch_prob=args.mixup_switch_prob,\n            mode=args.mixup_mode,\n            label_smoothing=args.smoothing,\n            num_classes=args.num_classes\n        )\n\n    naflex_mode = False\n    if args.naflex_loader:\n        if utils.is_primary(args):\n            _logger.info('Using NaFlex loader')\n\n        assert num_aug_splits <= 1, 'Augmentation splits not supported in NaFlex mode'\n        naflex_mixup_fn = None\n        if mixup_active:\n            from timm.data import NaFlexMixup\n            mixup_args.pop('mode')  # not supported\n            mixup_args.pop('cutmix_minmax')  # not supported\n            naflex_mixup_fn = NaFlexMixup(**mixup_args)\n\n        # Check if we have model's patch size for NaFlex mode\n        if model_patch_size is None:\n            # Fallback to default\n            model_patch_size = (16, 16)\n            if utils.is_primary(args):\n                _logger.warning(f'Could not determine model patch size, using default: {model_patch_size}')\n\n        # Configure patch sizes for NaFlex loader\n        patch_loader_kwargs = {}\n        if args.naflex_patch_sizes:\n            # Variable patch size mode\n            patch_loader_kwargs['patch_size_choices'] = args.naflex_patch_sizes\n            if args.naflex_patch_size_probs:\n                if len(args.naflex_patch_size_probs) != len(args.naflex_patch_sizes):\n                    parser.error('--naflex-patch-size-probs must have same length as --naflex-patch-sizes')\n                patch_loader_kwargs['patch_size_choice_probs'] = args.naflex_patch_size_probs\n            if utils.is_primary(args):\n                _logger.info(f'Using variable patch sizes: {args.naflex_patch_sizes}')\n        else:\n            # Single patch size mode - use model's patch size\n            patch_loader_kwargs['patch_size'] = model_patch_size\n            if utils.is_primary(args):\n                _logger.info(f'Using model patch size: {model_patch_size}')\n\n        naflex_mode = True\n        loader_train = create_naflex_loader(\n            dataset=dataset_train,\n            train_seq_lens=args.naflex_train_seq_lens,\n            mixup_fn=naflex_mixup_fn,\n            rank=args.rank,\n            world_size=args.world_size,\n            **patch_loader_kwargs,\n            **common_loader_kwargs,\n            **train_loader_kwargs,\n        )\n    else:\n        # setup mixup / cutmix\n        collate_fn = None\n        if mixup_active:\n            if args.prefetcher:\n                assert not num_aug_splits  # collate conflict (need to support de-interleaving in collate mixup)\n                collate_fn = FastCollateMixup(**mixup_args)\n            else:\n                mixup_fn = Mixup(**mixup_args)\n\n        # wrap dataset in AugMix helper\n        if num_aug_splits > 1:\n            dataset_train = AugMixDataset(dataset_train, num_splits=num_aug_splits)\n\n        # Use standard loader\n        loader_train = create_loader(\n            dataset_train,\n            input_size=data_config['input_size'],\n            collate_fn=collate_fn,\n            use_multi_epochs_loader=args.use_multi_epochs_loader,\n            **common_loader_kwargs,\n            **train_loader_kwargs,\n        )\n\n    loader_eval = None\n    if args.val_split:\n        assert dataset_eval is not None\n        eval_workers = args.workers\n        if args.distributed and ('tfds' in args.dataset or 'wds' in args.dataset):\n            # FIXME reduces validation padding issues when using TFDS, WDS w/ workers and distributed training\n            eval_workers = min(2, args.workers)\n\n        eval_loader_kwargs = dict(\n            batch_size=args.validation_batch_size or args.batch_size,\n            is_training=False,\n            interpolation=data_config['interpolation'],\n            num_workers=eval_workers,\n            crop_pct=data_config['crop_pct'],\n        )\n\n        if args.naflex_loader:\n            # Use largest sequence length for validation\n            loader_eval = create_naflex_loader(\n                dataset=dataset_eval,\n                patch_size=model_patch_size,  # Use model's native patch size (already determined above)\n                max_seq_len=args.naflex_max_seq_len,\n                **common_loader_kwargs,\n                **eval_loader_kwargs\n            )\n        else:\n            # Use standard loader\n            loader_eval = create_loader(\n                dataset_eval,\n                input_size=data_config['input_size'],\n                **common_loader_kwargs,\n                **eval_loader_kwargs,\n            )\n\n    # setup loss function\n    if args.jsd_loss:\n        assert num_aug_splits > 1  # JSD only valid with aug splits set\n        train_loss_fn = JsdCrossEntropy(num_splits=num_aug_splits, smoothing=args.smoothing)\n    elif mixup_active:\n        # smoothing is handled with mixup target transform which outputs sparse, soft targets\n        if args.bce_loss:\n            train_loss_fn = BinaryCrossEntropy(\n                target_threshold=args.bce_target_thresh,\n                sum_classes=args.bce_sum,\n                pos_weight=args.bce_pos_weight,\n            )\n        else:\n            train_loss_fn = SoftTargetCrossEntropy()\n    elif args.smoothing:\n        if args.bce_loss:\n            train_loss_fn = BinaryCrossEntropy(\n                smoothing=args.smoothing,\n                target_threshold=args.bce_target_thresh,\n                sum_classes=args.bce_sum,\n                pos_weight=args.bce_pos_weight,\n            )\n        else:\n            train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing)\n    else:\n        train_loss_fn = nn.CrossEntropyLoss()\n    train_loss_fn = train_loss_fn.to(device=device)\n    validate_loss_fn = nn.CrossEntropyLoss().to(device=device)\n\n    # Setup training task (classification or distillation)\n    if args.kd_model_name is not None:\n        # Create distillation task (teacher created internally from model name)\n        if args.kd_distill_type == 'logit':\n            task = LogitDistillationTask(\n                student_model=model,\n                teacher_model=args.kd_model_name,\n                criterion=train_loss_fn,\n                loss_type=args.kd_loss_type,\n                distill_loss_weight=args.distill_loss_weight,\n                task_loss_weight=args.task_loss_weight,\n                temperature=args.kd_temperature,\n                device=device,\n                dtype=model_dtype,\n                verbose=utils.is_primary(args),\n            )\n        elif args.kd_distill_type == 'feature':\n            task = FeatureDistillationTask(\n                student_model=model,\n                teacher_model=args.kd_model_name,\n                criterion=train_loss_fn,\n                distill_loss_weight=args.distill_loss_weight,\n                task_loss_weight=args.task_loss_weight,\n                student_feature_dim=args.kd_student_feature_dim,\n                teacher_feature_dim=args.kd_teacher_feature_dim,\n                device=device,\n                dtype=model_dtype,\n                verbose=utils.is_primary(args),\n            )\n        elif args.kd_distill_type == 'token':\n            task = TokenDistillationTask(\n                student_model=model,\n                teacher_model=args.kd_model_name,\n                criterion=train_loss_fn,\n                distill_type=args.kd_token_distill_type,\n                distill_loss_weight=args.distill_loss_weight,\n                task_loss_weight=args.task_loss_weight,\n                temperature=args.kd_temperature,\n                device=device,\n                dtype=model_dtype,\n                verbose=utils.is_primary(args),\n            )\n        else:\n            raise ValueError(f\"Unknown distillation type: {args.kd_distill_type}\")\n    else:\n        # Standard classification task\n        task = ClassificationTask(\n            model=model,\n            criterion=train_loss_fn,\n            device=device,\n            dtype=model_dtype,\n            verbose=utils.is_primary(args),\n        )\n\n    # Prepare task for distributed training\n    if args.distributed:\n        if utils.is_primary(args):\n            _logger.info(\"Preparing task for distributed training\")\n        task.prepare_distributed(device_ids=[device])\n\n    # Compile task if requested (should be done after DDP)\n    if args.torchcompile:\n        assert has_compile, 'A version of torch w/ torch.compile() is required for --compile, possibly a nightly.'\n        if utils.is_primary(args):\n            _logger.info(f\"Compiling task with backend={args.torchcompile}, mode={args.torchcompile_mode}\")\n        task = torch.compile(task, backend=args.torchcompile, mode=args.torchcompile_mode)\n\n    # setup checkpoint saver and eval metric tracking\n    eval_metric = args.eval_metric if loader_eval is not None else 'loss'\n    decreasing_metric = eval_metric == 'loss'\n    best_metric = None\n    best_epoch = None\n    saver = None\n    output_dir = None\n    if utils.is_primary(args):\n        if args.experiment:\n            exp_name = args.experiment\n        else:\n            exp_name = '-'.join([\n                datetime.now().strftime(\"%Y%m%d-%H%M%S\"),\n                safe_model_name(args.model),\n                str(data_config['input_size'][-1])\n            ])\n        output_dir = utils.get_outdir(args.output if args.output else './output/train', exp_name)\n        saver = utils.CheckpointSaver(\n            model=model,\n            optimizer=optimizer,\n            args=args,\n            model_ema=model_ema,\n            amp_scaler=loss_scaler,\n            checkpoint_dir=output_dir,\n            recovery_dir=output_dir,\n            decreasing=decreasing_metric,\n            max_history=args.checkpoint_hist\n        )\n        with open(os.path.join(output_dir, 'args.yaml'), 'w') as f:\n            f.write(args_text)\n\n        if args.log_wandb:\n            if has_wandb:\n                assert not args.wandb_resume_id or args.resume\n                wandb.init(\n                    project=args.wandb_project,\n                    name=exp_name,\n                    config=args,\n                    tags=args.wandb_tags,\n                    resume=\"must\" if args.wandb_resume_id else None,\n                    id=args.wandb_resume_id if args.wandb_resume_id else None,\n                )\n            else:\n                _logger.warning(\n                    \"You've requested to log metrics to wandb but package not found. \"\n                    \"Metrics not being logged to wandb, try `pip install wandb`\")\n\n    # setup learning rate schedule and starting epoch\n    updates_per_epoch = (len(loader_train) + args.grad_accum_steps - 1) // args.grad_accum_steps\n    lr_scheduler, num_epochs = create_scheduler_v2(\n        optimizer,\n        **scheduler_kwargs(args, decreasing_metric=decreasing_metric),\n        updates_per_epoch=updates_per_epoch,\n    )\n    start_epoch = 0\n    if args.start_epoch is not None:\n        # a specified start_epoch will always override the resume epoch\n        start_epoch = args.start_epoch\n    elif resume_epoch is not None:\n        start_epoch = resume_epoch\n    if lr_scheduler is not None and start_epoch > 0:\n        if args.sched_on_updates:\n            lr_scheduler.step_update(start_epoch * updates_per_epoch)\n        else:\n            lr_scheduler.step(start_epoch)\n\n    if utils.is_primary(args):\n        if args.warmup_prefix:\n            sched_explain = '(warmup_epochs + epochs + cooldown_epochs). Warmup added to total when warmup_prefix=True'\n        else:\n            sched_explain = '(epochs + cooldown_epochs). Warmup within epochs when warmup_prefix=False'\n        _logger.info(\n            f'Scheduled epochs: {num_epochs} {sched_explain}. '\n            f'LR stepped per {\"epoch\" if lr_scheduler.t_in_epochs else \"update\"}.')\n\n    results = []\n    try:\n        for epoch in range(start_epoch, num_epochs):\n            if hasattr(dataset_train, 'set_epoch'):\n                dataset_train.set_epoch(epoch)\n            elif args.distributed and hasattr(loader_train.sampler, 'set_epoch'):\n                loader_train.sampler.set_epoch(epoch)\n\n            train_metrics = train_one_epoch(\n                epoch,\n                model,\n                loader_train,\n                optimizer,\n                args,\n                task=task,\n                device=device,\n                lr_scheduler=lr_scheduler,\n                saver=saver,\n                output_dir=output_dir,\n                amp_autocast=amp_autocast,\n                loss_scaler=loss_scaler,\n                model_dtype=model_dtype,\n                model_ema=model_ema,\n                mixup_fn=mixup_fn,\n                num_updates_total=num_epochs * updates_per_epoch,\n                naflex_mode=naflex_mode,\n            )\n\n            if args.distributed and args.dist_bn in ('broadcast', 'reduce'):\n                if utils.is_primary(args):\n                    _logger.info(\"Distributing BatchNorm running means and vars\")\n                utils.distribute_bn(model, args.world_size, args.dist_bn == 'reduce')\n\n            epoch_p_1 = epoch + 1\n            if epoch_p_1 % args.val_interval != 0 and epoch_p_1 != num_epochs:\n                if utils.is_primary(args):\n                    _logger.info(\"Skipping eval and checkpointing \")\n                if lr_scheduler is not None:\n                    # step LR for next epoch, take care when using metric dependent lr_scheduler\n                    lr_scheduler.step(epoch_p_1, metric=None)\n                # Skip validation and metric logic\n                # FIXME we could make the logic below able to handle no eval metrics more gracefully,\n                #  but for simplicity opting to just skip for now.\n                continue\n\n            if loader_eval is not None:\n                eval_metrics = validate(\n                    model,\n                    loader_eval,\n                    validate_loss_fn,\n                    args,\n                    device=device,\n                    amp_autocast=amp_autocast,\n                    model_dtype=model_dtype,\n                )\n\n                if model_ema is not None and not args.model_ema_force_cpu:\n                    if args.distributed and args.dist_bn in ('broadcast', 'reduce'):\n                        utils.distribute_bn(model_ema, args.world_size, args.dist_bn == 'reduce')\n\n                    ema_eval_metrics = validate(\n                        model_ema,\n                        loader_eval,\n                        validate_loss_fn,\n                        args,\n                        device=device,\n                        amp_autocast=amp_autocast,\n                        log_suffix=' (EMA)',\n                    )\n                    eval_metrics = ema_eval_metrics\n            else:\n                eval_metrics = None\n\n            if output_dir is not None:\n                lrs = [param_group['lr'] for param_group in optimizer.param_groups]\n                utils.update_summary(\n                    epoch,\n                    train_metrics,\n                    eval_metrics,\n                    filename=os.path.join(output_dir, 'summary.csv'),\n                    lr=sum(lrs) / len(lrs),\n                    write_header=best_metric is None,\n                    log_wandb=args.log_wandb and has_wandb,\n                )\n\n            if eval_metrics is not None:\n                latest_metric = eval_metrics[eval_metric]\n            else:\n                latest_metric = train_metrics[eval_metric]\n\n            if saver is not None:\n                # save proper checkpoint with eval metric\n                best_metric, best_epoch = saver.save_checkpoint(epoch, metric=latest_metric)\n\n            if lr_scheduler is not None:\n                # step LR for next epoch\n                lr_scheduler.step(epoch_p_1, latest_metric)\n\n            latest_results = {\n                'epoch': epoch,\n                'train': train_metrics,\n            }\n            if eval_metrics is not None:\n                latest_results['validation'] = eval_metrics\n            results.append(latest_results)\n\n    except KeyboardInterrupt:\n        pass\n\n    if args.distributed:\n        torch.distributed.destroy_process_group()\n\n    if best_metric is not None:\n        # log best metric as tracked by checkpoint saver\n        _logger.info('*** Best metric: {0} (epoch {1})'.format(best_metric, best_epoch))\n\n    if utils.is_primary(args):\n        # for parsable results display, dump top-10 summaries to avoid excess console spam\n        display_results = sorted(\n            results,\n            key=lambda x: x.get('validation', x.get('train')).get(eval_metric, 0),\n            reverse=decreasing_metric,\n        )\n        print(f'--result\\n{json.dumps(display_results[-10:], indent=4)}')\n\n\ndef train_one_epoch(\n        epoch,\n        model,\n        loader,\n        optimizer,\n        args,\n        task=None,\n        device=torch.device('cuda'),\n        lr_scheduler=None,\n        saver=None,\n        output_dir=None,\n        amp_autocast=suppress,\n        loss_scaler=None,\n        model_dtype=None,\n        model_ema=None,\n        mixup_fn=None,\n        num_updates_total=None,\n        naflex_mode=False,\n):\n    if args.mixup_off_epoch and epoch >= args.mixup_off_epoch:\n        if args.prefetcher and loader.mixup_enabled:\n            loader.mixup_enabled = False\n        elif mixup_fn is not None:\n            mixup_fn.mixup_enabled = False\n\n    second_order = hasattr(optimizer, 'is_second_order') and optimizer.is_second_order\n    has_no_sync = hasattr(model, \"no_sync\")\n    update_time_m = utils.AverageMeter()\n    data_time_m = utils.AverageMeter()\n    losses_m = utils.AverageMeter()\n\n    model.train()\n\n    accum_steps = args.grad_accum_steps\n    last_accum_steps = len(loader) % accum_steps\n    updates_per_epoch = (len(loader) + accum_steps - 1) // accum_steps\n    num_updates = epoch * updates_per_epoch\n    last_batch_idx = len(loader) - 1\n    last_batch_idx_to_accum = len(loader) - last_accum_steps\n\n    data_start_time = update_start_time = time.time()\n    optimizer.zero_grad()\n    update_sample_count = 0\n    for batch_idx, (input, target) in enumerate(loader):\n        last_batch = batch_idx == last_batch_idx\n        need_update = last_batch or (batch_idx + 1) % accum_steps == 0\n        update_idx = batch_idx // accum_steps\n        if batch_idx >= last_batch_idx_to_accum:\n            accum_steps = last_accum_steps\n\n        if not args.prefetcher:\n            input, target = input.to(device=device, dtype=model_dtype), target.to(device=device)\n            if mixup_fn is not None:\n                input, target = mixup_fn(input, target)\n        if args.channels_last:\n            input = input.contiguous(memory_format=torch.channels_last)\n\n        # multiply by accum steps to get equivalent for full update\n        data_time_m.update(accum_steps * (time.time() - data_start_time))\n\n        def _forward():\n            with amp_autocast():\n                # Task handles the complete forward pass and loss computation\n                result = task(input, target)\n                _loss = result['loss']\n\n            if accum_steps > 1:\n                _loss /= accum_steps\n            return _loss, result\n\n        def _backward(_loss):\n            if loss_scaler is not None:\n                loss_scaler(\n                    _loss,\n                    optimizer,\n                    clip_grad=args.clip_grad,\n                    clip_mode=args.clip_mode,\n                    parameters=model_parameters(model, exclude_head='agc' in args.clip_mode),\n                    create_graph=second_order,\n                    need_update=need_update,\n                )\n            else:\n                _loss.backward(create_graph=second_order)\n                if need_update:\n                    if args.clip_grad is not None:\n                        utils.dispatch_clip_grad(\n                            model_parameters(model, exclude_head='agc' in args.clip_mode),\n                            value=args.clip_grad,\n                            mode=args.clip_mode,\n                        )\n                    optimizer.step()\n\n        if naflex_mode:\n            assert isinstance(input, dict)\n            batch_size = input['patches'].shape[0]\n\n            # scale gradient vs the minimum batch size (for max seq len)\n            if not args.naflex_loss_scale or args.naflex_loss_scale == 'none':\n                local_scale = 1.0\n            else:\n                local_scale = (batch_size / args.batch_size)\n                if local_scale == 'sqrt':\n                    local_scale = local_scale ** 0.5\n\n            if args.distributed:\n                # scale gradient btw distributed ranks, each one can have different batch size\n                global_batch_size = utils.reduce_tensor(\n                    torch.tensor(batch_size, device=device, dtype=torch.float32),\n                    1 # SUM\n                )\n                dist_scale = args.world_size * batch_size / global_batch_size\n            else:\n                dist_scale = None\n                global_batch_size = batch_size\n\n            if has_no_sync and not need_update:\n                with model.no_sync():\n                    loss, result = _forward()\n                    scaled_loss = local_scale * loss\n                    if dist_scale is not None:\n                        scaled_loss *= dist_scale\n                    _backward(scaled_loss)\n            else:\n                loss, result = _forward()\n                scaled_loss = local_scale * loss\n                if dist_scale is not None:\n                    scaled_loss *= dist_scale\n                _backward(scaled_loss)\n        else:\n            global_batch_size = batch_size = input.shape[0]\n            if args.distributed:\n                global_batch_size *= args.world_size\n\n            if has_no_sync and not need_update:\n                with model.no_sync():\n                    loss, result = _forward()\n                    _backward(loss)\n            else:\n                loss, result = _forward()\n                _backward(loss)\n\n        losses_m.update(loss.item() * accum_steps, batch_size)\n        update_sample_count += global_batch_size\n\n        if not need_update:\n            data_start_time = time.time()\n            continue\n\n        num_updates += 1\n        optimizer.zero_grad()\n        if model_ema is not None:\n            model_ema.update(model, step=num_updates)\n\n        if args.synchronize_step:\n            if device.type == 'cuda':\n                torch.cuda.synchronize()\n            elif device.type == 'npu':\n                torch.npu.synchronize()\n        time_now = time.time()\n\n        update_time_m.update(time.time() - update_start_time)\n        update_start_time = time_now\n\n        if update_idx % args.log_interval == 0 or last_batch:\n            lrl = [param_group['lr'] for param_group in optimizer.param_groups]\n            lr = sum(lrl) / len(lrl)\n\n            loss_avg, loss_now = losses_m.avg, losses_m.val\n            if args.distributed:\n                # synchronize current step and avg loss, each process keeps its own running avg\n                loss_avg = utils.reduce_tensor(loss.new([loss_avg]), args.world_size).item()\n                loss_now = utils.reduce_tensor(loss.new([loss_now]), args.world_size).item()\n\n            if utils.is_primary(args):\n                _logger.info(\n                    f'Train: {epoch} [{update_idx:>4d}/{updates_per_epoch} '\n                    f'({100. * (update_idx + 1) / updates_per_epoch:>3.0f}%)]  '\n                    f'Loss: {loss_now:#.3g} ({loss_avg:#.3g})  '\n                    f'Time: {update_time_m.val:.3f}s, {update_sample_count / update_time_m.val:>7.2f}/s  '\n                    f'({update_time_m.avg:.3f}s, {update_sample_count / update_time_m.avg:>7.2f}/s)  '\n                    f'LR: {lr:.3e}  '\n                    f'Data: {data_time_m.val:.3f} ({data_time_m.avg:.3f})'\n                )\n\n                if args.save_images and output_dir:\n                    torchvision.utils.save_image(\n                        input,\n                        os.path.join(output_dir, 'train-batch-%d.jpg' % batch_idx),\n                        padding=0,\n                        normalize=True\n                    )\n\n        if saver is not None and args.recovery_interval and (\n                (update_idx + 1) % args.recovery_interval == 0):\n            saver.save_recovery(epoch, batch_idx=update_idx)\n\n        if lr_scheduler is not None:\n            lr_scheduler.step_update(num_updates=num_updates, metric=losses_m.avg)\n\n        update_sample_count = 0\n        data_start_time = time.time()\n        # end for\n\n    if hasattr(optimizer, 'sync_lookahead'):\n        optimizer.sync_lookahead()\n\n    loss_avg = losses_m.avg\n    if args.distributed:\n        # synchronize avg loss, each process keeps its own running avg\n        loss_avg = torch.tensor([loss_avg], device=device, dtype=torch.float32)\n        loss_avg = utils.reduce_tensor(loss_avg, args.world_size).item()\n    return OrderedDict([('loss', loss_avg)])\n\n\ndef validate(\n        model,\n        loader,\n        loss_fn,\n        args,\n        device=torch.device('cuda'),\n        amp_autocast=suppress,\n        model_dtype=None,\n        log_suffix=''\n):\n    batch_time_m = utils.AverageMeter()\n    losses_m = utils.AverageMeter()\n    top1_m = utils.AverageMeter()\n    top5_m = utils.AverageMeter()\n\n    model.eval()\n\n    end = time.time()\n    last_idx = len(loader) - 1\n    with torch.inference_mode():\n        for batch_idx, (input, target) in enumerate(loader):\n            last_batch = batch_idx == last_idx\n            if not args.prefetcher:\n                input = input.to(device=device, dtype=model_dtype)\n                target = target.to(device=device)\n            if args.channels_last:\n                input = input.contiguous(memory_format=torch.channels_last)\n\n            with amp_autocast():\n                output = model(input)\n                if isinstance(output, (tuple, list)):\n                    output = output[0]\n\n                # augmentation reduction\n                reduce_factor = args.tta\n                if reduce_factor > 1:\n                    output = output.unfold(0, reduce_factor, reduce_factor).mean(dim=2)\n                    target = target[0:target.size(0):reduce_factor]\n\n                loss = loss_fn(output, target)\n            acc1, acc5 = utils.accuracy(output, target, topk=(1, 5))\n\n            if args.distributed:\n                reduced_loss = utils.reduce_tensor(loss.data, args.world_size)\n                acc1 = utils.reduce_tensor(acc1, args.world_size)\n                acc5 = utils.reduce_tensor(acc5, args.world_size)\n            else:\n                reduced_loss = loss.data\n\n            if device.type == 'cuda':\n                torch.cuda.synchronize()\n            elif device.type == \"npu\":\n                torch.npu.synchronize()\n\n            batch_size = output.shape[0]\n            losses_m.update(reduced_loss.item(), batch_size)\n            top1_m.update(acc1.item(), batch_size)\n            top5_m.update(acc5.item(), batch_size)\n\n            batch_time_m.update(time.time() - end)\n            end = time.time()\n            if utils.is_primary(args) and (last_batch or batch_idx % args.log_interval == 0):\n                log_name = 'Test' + log_suffix\n                _logger.info(\n                    f'{log_name}: [{batch_idx:>4d}/{last_idx}]  '\n                    f'Time: {batch_time_m.val:.3f} ({batch_time_m.avg:.3f})  '\n                    f'Loss: {losses_m.val:>7.3f} ({losses_m.avg:>6.3f})  '\n                    f'Acc@1: {top1_m.val:>7.3f} ({top1_m.avg:>7.3f})  '\n                    f'Acc@5: {top5_m.val:>7.3f} ({top5_m.avg:>7.3f})'\n                )\n\n    metrics = OrderedDict([('loss', losses_m.avg), ('top1', top1_m.avg), ('top5', top5_m.avg)])\n\n    return metrics\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "validate.py",
    "content": "#!/usr/bin/env python3\n\"\"\" ImageNet Validation Script\n\nThis is intended to be a lean and easily modifiable ImageNet validation script for evaluating pretrained\nmodels or training checkpoints against ImageNet or similarly organized image datasets. It prioritizes\ncanonical PyTorch, standard Python style, and good performance. Repurpose as you see fit.\n\nHacked together by Ross Wightman (https://github.com/rwightman)\n\"\"\"\nimport argparse\nimport csv\nimport glob\nimport json\nimport logging\nimport os\nimport time\nfrom collections import OrderedDict\nfrom contextlib import suppress\nfrom functools import partial\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\n\nfrom timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet\nfrom timm.layers import apply_test_time_pool, set_fast_norm\nfrom timm.models import create_model, load_checkpoint, is_model, list_models\nfrom timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_fuser, \\\n    decay_batch_step, check_batch_size_retry, ParseKwargs, reparameterize_model\n\n\ntry:\n    from functorch.compile import memory_efficient_fusion\n    has_functorch = True\nexcept ImportError as e:\n    has_functorch = False\n\nhas_compile = hasattr(torch, 'compile')\n\ntry:\n    from sklearn.metrics import precision_score, recall_score, f1_score\n    has_sklearn = True\nexcept ImportError:\n    has_sklearn = False\n\n_logger = logging.getLogger('validate')\n\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Validation')\nparser.add_argument('data', nargs='?', metavar='DIR', const=None,\n                    help='path to dataset (*deprecated*, use --data-dir)')\nparser.add_argument('--data-dir', metavar='DIR',\n                    help='path to dataset (root dir)')\nparser.add_argument('--dataset', metavar='NAME', default='',\n                    help='dataset type + name (\"<type>/<name>\") (default: ImageFolder or ImageTar if empty)')\nparser.add_argument('--split', metavar='NAME', default='validation',\n                    help='dataset split (default: validation)')\nparser.add_argument('--num-samples', default=None, type=int,\n                    metavar='N', help='Manually specify num samples in dataset split, for IterableDatasets.')\nparser.add_argument('--dataset-download', action='store_true', default=False,\n                    help='Allow download of dataset for torch/ and tfds/ datasets that support it.')\nparser.add_argument('--class-map', default='', type=str, metavar='FILENAME',\n                    help='path to class to idx mapping file (default: \"\")')\nparser.add_argument('--input-key', default=None, type=str,\n                   help='Dataset key for input images.')\nparser.add_argument('--input-img-mode', default=None, type=str,\n                   help='Dataset image conversion mode for input images.')\nparser.add_argument('--target-key', default=None, type=str,\n                   help='Dataset key for target labels.')\nparser.add_argument('--dataset-trust-remote-code', action='store_true', default=False,\n                   help='Allow huggingface dataset import to execute code downloaded from the dataset\\'s repo.')\n\nparser.add_argument('--model', '-m', metavar='NAME', default='dpn92',\n                    help='model architecture (default: dpn92)')\nparser.add_argument('--pretrained', dest='pretrained', action='store_true',\n                    help='use pre-trained model')\nparser.add_argument('-j', '--workers', default=4, type=int, metavar='N',\n                    help='number of data loading workers (default: 4)')\nparser.add_argument('-b', '--batch-size', default=256, type=int,\n                    metavar='N', help='mini-batch size (default: 256)')\nparser.add_argument('--img-size', default=None, type=int,\n                    metavar='N', help='Input image dimension, uses model default if empty')\nparser.add_argument('--in-chans', type=int, default=None, metavar='N',\n                    help='Image input channels (default: None => 3)')\nparser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N',\n                    help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty')\nparser.add_argument('--use-train-size', action='store_true', default=False,\n                    help='force use of train input size, even when test size is specified in pretrained cfg')\nparser.add_argument('--crop-pct', default=None, type=float,\n                    metavar='N', help='Input image center crop pct')\nparser.add_argument('--crop-mode', default=None, type=str,\n                    metavar='N', help='Input image crop mode (squash, border, center). Model default if None.')\nparser.add_argument('--crop-border-pixels', type=int, default=None,\n                    help='Crop pixels from image border.')\nparser.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN',\n                    help='Override mean pixel value of dataset')\nparser.add_argument('--std', type=float,  nargs='+', default=None, metavar='STD',\n                    help='Override std deviation of of dataset')\nparser.add_argument('--interpolation', default='', type=str, metavar='NAME',\n                    help='Image resize interpolation type (overrides model)')\nparser.add_argument('--num-classes', type=int, default=None,\n                    help='Number classes in dataset')\nparser.add_argument('--gp', default=None, type=str, metavar='POOL',\n                    help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.')\nparser.add_argument('--log-freq', default=10, type=int,\n                    metavar='N', help='batch logging frequency (default: 10)')\nparser.add_argument('--checkpoint', default='', type=str, metavar='PATH',\n                    help='path to latest checkpoint (default: none)')\nparser.add_argument('--num-gpu', type=int, default=1,\n                    help='Number of GPUS to use')\nparser.add_argument('--test-pool', dest='test_pool', action='store_true',\n                    help='enable test time pool')\nparser.add_argument('--no-prefetcher', action='store_true', default=False,\n                    help='disable fast prefetcher')\nparser.add_argument('--pin-mem', action='store_true', default=False,\n                    help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')\nparser.add_argument('--channels-last', action='store_true', default=False,\n                    help='Use channels_last memory layout')\nparser.add_argument('--device', default='cuda', type=str,\n                    help=\"Device (accelerator) to use.\")\nparser.add_argument('--amp', action='store_true', default=False,\n                    help='use Native AMP for mixed precision inference')\nparser.add_argument('--amp-dtype', default='float16', type=str,\n                    help='lower precision AMP dtype (default: float16)')\nparser.add_argument('--model-dtype', default=None, type=str,\n                   help='Model dtype override (non-AMP) (default: float32)')\nparser.add_argument('--tf-preprocessing', action='store_true', default=False,\n                    help='Use Tensorflow preprocessing pipeline (require CPU TF installed')\nparser.add_argument('--use-ema', dest='use_ema', action='store_true',\n                    help='use ema version of weights if present')\nparser.add_argument('--fuser', default='', type=str,\n                    help=\"Select jit fuser. One of ('', 'te', 'old', 'nvfuser')\")\nparser.add_argument('--fast-norm', default=False, action='store_true',\n                    help='enable experimental fast-norm')\nparser.add_argument('--reparam', default=False, action='store_true',\n                    help='Reparameterize model')\nparser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs)\nparser.add_argument('--torchcompile-mode', type=str, default=None,\n                    help=\"torch.compile mode (default: None).\")\n\nscripting_group = parser.add_mutually_exclusive_group()\nscripting_group.add_argument('--torchscript', default=False, action='store_true',\n                             help='torch.jit.script the full model')\nscripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None, const='inductor',\n                             help=\"Enable compilation w/ specified backend (default: inductor).\")\nscripting_group.add_argument('--aot-autograd', default=False, action='store_true',\n                             help=\"Enable AOT Autograd support.\")\n\nparser.add_argument('--results-file', default='', type=str, metavar='FILENAME',\n                    help='Output csv file for validation results (summary)')\nparser.add_argument('--results-format', default='csv', type=str,\n                    help='Format for results file one of (csv, json) (default: csv).')\nparser.add_argument('--real-labels', default='', type=str, metavar='FILENAME',\n                    help='Real labels JSON file for imagenet evaluation')\nparser.add_argument('--valid-labels', default='', type=str, metavar='FILENAME',\n                    help='Valid label indices txt file for validation of partial label space')\nparser.add_argument('--retry', default=False, action='store_true',\n                    help='Enable batch size decay & retry for single model validation')\n\nparser.add_argument('--metrics-avg', type=str, default=None,\n                    choices=['micro', 'macro', 'weighted'],\n                    help='Enable precision, recall, F1-score calculation and specify the averaging method. '\n                         'Requires scikit-learn. (default: None)')\n\n# NaFlex loader arguments\nparser.add_argument('--naflex-loader', action='store_true', default=False,\n                   help='Use NaFlex loader (Requires NaFlex compatible model)')\nparser.add_argument('--naflex-max-seq-len', type=int, default=576,\n                   help='Fixed maximum sequence length for NaFlex loader (validation)')\n\n\ndef validate(args):\n    # might as well try to validate something\n    args.pretrained = args.pretrained or not args.checkpoint\n    args.prefetcher = not args.no_prefetcher\n\n    if torch.cuda.is_available():\n        torch.backends.cuda.matmul.allow_tf32 = True\n        torch.backends.cudnn.benchmark = True\n\n    device = torch.device(args.device)\n\n    if args.metrics_avg and not has_sklearn:\n        _logger.warning(\n            f\"scikit-learn not installed, disabling metrics calculation. Please install with 'pip install scikit-learn'.\")\n        args.metrics_avg = None\n\n    model_dtype = None\n    if args.model_dtype:\n        assert args.model_dtype in ('float32', 'float16', 'bfloat16')\n        model_dtype = getattr(torch, args.model_dtype)\n\n    # resolve AMP arguments based on PyTorch availability\n    amp_autocast = suppress\n    if args.amp:\n        assert model_dtype is None or model_dtype == torch.float32, 'float32 model dtype must be used with AMP'\n        assert args.amp_dtype in ('float16', 'bfloat16')\n        amp_dtype = torch.bfloat16 if args.amp_dtype == 'bfloat16' else torch.float16\n        amp_autocast = partial(torch.autocast, device_type=device.type, dtype=amp_dtype)\n        _logger.info('Validating in mixed precision with native PyTorch AMP.')\n    else:\n        _logger.info(f'Validating in {model_dtype or torch.float32}. AMP not enabled.')\n\n    if args.fuser:\n        set_jit_fuser(args.fuser)\n\n    if args.fast_norm:\n        set_fast_norm()\n\n    # create model\n    in_chans = 3\n    if args.in_chans is not None:\n        in_chans = args.in_chans\n    elif args.input_size is not None:\n        in_chans = args.input_size[0]\n\n    model = create_model(\n        args.model,\n        pretrained=args.pretrained,\n        num_classes=args.num_classes,\n        in_chans=in_chans,\n        global_pool=args.gp,\n        scriptable=args.torchscript,\n        **args.model_kwargs,\n    )\n    if args.num_classes is None:\n        assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.'\n        args.num_classes = model.num_classes\n\n    if args.checkpoint:\n        load_checkpoint(model, args.checkpoint, args.use_ema)\n\n    if args.reparam:\n        model = reparameterize_model(model)\n\n    param_count = sum([m.numel() for m in model.parameters()])\n    _logger.info('Model %s created, param count: %d' % (args.model, param_count))\n\n    data_config = resolve_data_config(\n        vars(args),\n        model=model,\n        use_test_size=not args.use_train_size,\n        verbose=True,\n    )\n    test_time_pool = False\n    if args.test_pool:\n        model, test_time_pool = apply_test_time_pool(model, data_config)\n\n    model = model.to(device=device, dtype=model_dtype)  # FIXME move model device & dtype into create_model\n    if args.channels_last:\n        model = model.to(memory_format=torch.channels_last)\n\n    if args.torchscript:\n        model = torch.jit.script(model)\n    elif args.torchcompile:\n        assert has_compile, 'A version of torch w/ torch.compile() is required for --compile, possibly a nightly.'\n        torch._dynamo.reset()\n        model = torch.compile(model, backend=args.torchcompile, mode=args.torchcompile_mode)\n    elif args.aot_autograd:\n        assert has_functorch, \"functorch is needed for --aot-autograd\"\n        model = memory_efficient_fusion(model)\n\n    if args.num_gpu > 1:\n        model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpu)))\n\n    criterion = nn.CrossEntropyLoss().to(device)\n\n    root_dir = args.data or args.data_dir\n    if args.input_img_mode is None:\n        input_img_mode = 'RGB' if data_config['input_size'][0] == 3 else 'L'\n    else:\n        input_img_mode = args.input_img_mode\n    dataset = create_dataset(\n        root=root_dir,\n        name=args.dataset,\n        split=args.split,\n        download=args.dataset_download,\n        load_bytes=args.tf_preprocessing,\n        class_map=args.class_map,\n        num_samples=args.num_samples,\n        input_key=args.input_key,\n        input_img_mode=input_img_mode,\n        target_key=args.target_key,\n        trust_remote_code=args.dataset_trust_remote_code,\n    )\n\n    if args.valid_labels:\n        with open(args.valid_labels, 'r') as f:\n            valid_labels = [int(line.rstrip()) for line in f]\n    else:\n        valid_labels = None\n\n    if args.real_labels:\n        real_labels = RealLabelsImagenet(dataset.filenames(basename=True), real_json=args.real_labels)\n    else:\n        real_labels = None\n\n    crop_pct = 1.0 if test_time_pool else data_config['crop_pct']\n    if args.naflex_loader:\n        model_patch_size = None\n        if hasattr(model, 'embeds') and hasattr(model.embeds, 'patch_size'):\n            # NaFlexVit models have embeds.patch_size\n            model_patch_size = model.embeds.patch_size\n        from timm.data  import create_naflex_loader\n        loader = create_naflex_loader(\n            dataset,\n            batch_size=args.batch_size,\n            use_prefetcher=args.prefetcher,\n            interpolation=data_config['interpolation'],\n            mean=data_config['mean'],\n            std=data_config['std'],\n            num_workers=args.workers,\n            crop_pct=crop_pct,\n            crop_mode=data_config['crop_mode'],\n            crop_border_pixels=args.crop_border_pixels,\n            pin_memory=args.pin_mem,\n            device=device,\n            img_dtype=model_dtype or torch.float32,\n            patch_size=model_patch_size or (16, 16),\n            max_seq_len=args.naflex_max_seq_len,\n        )\n    else:\n        loader = create_loader(\n            dataset,\n            input_size=data_config['input_size'],\n            batch_size=args.batch_size,\n            use_prefetcher=args.prefetcher,\n            interpolation=data_config['interpolation'],\n            mean=data_config['mean'],\n            std=data_config['std'],\n            num_workers=args.workers,\n            crop_pct=crop_pct,\n            crop_mode=data_config['crop_mode'],\n            crop_border_pixels=args.crop_border_pixels,\n            pin_memory=args.pin_mem,\n            device=device,\n            img_dtype=model_dtype or torch.float32,\n            tf_preprocessing=args.tf_preprocessing,\n        )\n\n    batch_time = AverageMeter()\n    losses = AverageMeter()\n    top1 = AverageMeter()\n    top5 = AverageMeter()\n\n    if args.metrics_avg:\n        all_preds = []\n        all_targets = []\n\n    model.eval()\n    with torch.inference_mode():\n        # warmup, reduce variability of first batch time, especially for comparing torchscript vs non\n        if not args.naflex_loader:\n            input = torch.randn((args.batch_size,) + tuple(data_config['input_size'])).to(device=device, dtype=model_dtype)\n            if args.channels_last:\n                input = input.contiguous(memory_format=torch.channels_last)\n            with amp_autocast():\n                model(input)\n\n        end = time.time()\n        for batch_idx, (input, target) in enumerate(loader):\n            if args.no_prefetcher:\n                target = target.to(device=device)\n                input = input.to(device=device, dtype=model_dtype)\n            if args.channels_last:\n                input = input.contiguous(memory_format=torch.channels_last)\n\n            # compute output\n            with amp_autocast():\n                output = model(input)\n\n                if valid_labels is not None:\n                    output = output[:, valid_labels]\n                loss = criterion(output, target)\n\n            if real_labels is not None:\n                real_labels.add_result(output)\n\n            # measure accuracy and record loss\n            batch_size = output.shape[0]\n            acc1, acc5 = accuracy(output.detach(), target, topk=(1, 5))\n            losses.update(loss.item(), batch_size)\n            top1.update(acc1.item(), batch_size)\n            top5.update(acc5.item(), batch_size)\n\n            if args.metrics_avg:\n                predictions = torch.argmax(output, dim=1)\n                all_preds.append(predictions.cpu())\n                all_targets.append(target.cpu())\n\n            # measure elapsed time\n            batch_time.update(time.time() - end)\n            end = time.time()\n\n            if batch_idx % args.log_freq == 0:\n                _logger.info(\n                    'Test: [{0:>4d}/{1}]  '\n                    'Time: {batch_time.val:.3f}s ({batch_time.avg:.3f}s, {rate_avg:>7.2f}/s)  '\n                    'Loss: {loss.val:>7.4f} ({loss.avg:>6.4f})  '\n                    'Acc@1: {top1.val:>7.3f} ({top1.avg:>7.3f})  '\n                    'Acc@5: {top5.val:>7.3f} ({top5.avg:>7.3f})'.format(\n                        batch_idx,\n                        len(loader),\n                        batch_time=batch_time,\n                        rate_avg=batch_size / batch_time.avg,\n                        loss=losses,\n                        top1=top1,\n                        top5=top5\n                    )\n                )\n\n    if real_labels is not None:\n        # real labels mode replaces topk values at the end\n        top1a, top5a = real_labels.get_accuracy(k=1), real_labels.get_accuracy(k=5)\n    else:\n        top1a, top5a = top1.avg, top5.avg\n\n    metric_results = {}\n    if args.metrics_avg:\n        all_preds = torch.cat(all_preds).numpy()\n        all_targets = torch.cat(all_targets).numpy()\n        precision = precision_score(all_targets, all_preds, average=args.metrics_avg, zero_division=0)\n        recall = recall_score(all_targets, all_preds, average=args.metrics_avg, zero_division=0)\n        f1 = f1_score(all_targets, all_preds, average=args.metrics_avg, zero_division=0)\n        metric_results = {\n            f'{args.metrics_avg}_precision': round(100 * precision, 4),\n            f'{args.metrics_avg}_recall': round(100 * recall, 4),\n            f'{args.metrics_avg}_f1_score': round(100 * f1, 4),\n        }\n\n    results = OrderedDict(\n        model=args.model,\n        top1=round(top1a, 4), top1_err=round(100 - top1a, 4),\n        top5=round(top5a, 4), top5_err=round(100 - top5a, 4),\n        **metric_results,\n        param_count=round(param_count / 1e6, 2),\n        img_size=data_config['input_size'][-1],\n        crop_pct=crop_pct,\n        interpolation=data_config['interpolation'],\n    )\n\n    log_string = ' * Acc@1 {:.3f} ({:.3f}) Acc@5 {:.3f} ({:.3f})'.format(\n       results['top1'], results['top1_err'], results['top5'], results['top5_err'])\n    if metric_results:\n        log_string += ' | Precision({avg}) {prec:.3f} | Recall({avg}) {rec:.3f} | F1-score({avg}) {f1:.3f}'.format(\n            avg=args.metrics_avg,\n            prec=metric_results[f'{args.metrics_avg}_precision'],\n            rec=metric_results[f'{args.metrics_avg}_recall'],\n            f1=metric_results[f'{args.metrics_avg}_f1_score'],\n        )\n    _logger.info(log_string)\n\n    return results\n\n\ndef _try_run(args, initial_batch_size):\n    batch_size = initial_batch_size\n    results = OrderedDict()\n    error_str = 'Unknown'\n    while batch_size:\n        args.batch_size = batch_size * args.num_gpu  # multiply by num-gpu for DataParallel case\n        try:\n            if 'cuda' in args.device and torch.cuda.is_available():\n                torch.cuda.empty_cache()\n            elif \"npu\" in args.device and torch.npu.is_available():\n                torch.npu.empty_cache()\n            results = validate(args)\n            return results\n        except RuntimeError as e:\n            error_str = str(e)\n            _logger.error(f'\"{error_str}\" while running validation.')\n            if not check_batch_size_retry(error_str):\n                break\n        batch_size = decay_batch_step(batch_size)\n        _logger.warning(f'Reducing batch size to {batch_size} for retry.')\n    results['model'] = args.model\n    results['error'] = error_str\n    _logger.error(f'{args.model} failed to validate ({error_str}).')\n    return results\n\n\n_NON_IN1K_FILTERS = ['*_in21k', '*_in22k', '*in12k', '*_dino', '*fcmae', '*seer']\n\n\ndef main():\n    setup_default_logging()\n    args = parser.parse_args()\n    model_cfgs = []\n    model_names = []\n    if os.path.isdir(args.checkpoint):\n        # validate all checkpoints in a path with same model\n        checkpoints = glob.glob(args.checkpoint + '/*.pth.tar')\n        checkpoints += glob.glob(args.checkpoint + '/*.pth')\n        model_names = list_models(args.model)\n        model_cfgs = [(args.model, c) for c in sorted(checkpoints, key=natural_key)]\n    else:\n        if args.model == 'all':\n            # validate all models in a list of names with pretrained checkpoints\n            args.pretrained = True\n            model_names = list_models(\n                pretrained=True,\n                exclude_filters=_NON_IN1K_FILTERS,\n            )\n            model_cfgs = [(n, '') for n in model_names]\n        elif not is_model(args.model):\n            # model name doesn't exist, try as wildcard filter\n            model_names = list_models(\n                args.model,\n                pretrained=True,\n            )\n            model_cfgs = [(n, '') for n in model_names]\n\n        if not model_cfgs and os.path.isfile(args.model):\n            with open(args.model) as f:\n                model_names = [line.rstrip() for line in f]\n            model_cfgs = [(n, None) for n in model_names if n]\n\n    if len(model_cfgs):\n        _logger.info('Running bulk validation on these pretrained models: {}'.format(', '.join(model_names)))\n        results = []\n        try:\n            initial_batch_size = args.batch_size\n            for m, c in model_cfgs:\n                args.model = m\n                args.checkpoint = c\n                r = _try_run(args, initial_batch_size)\n                if 'error' in r:\n                    continue\n                if args.checkpoint:\n                    r['checkpoint'] = args.checkpoint\n                results.append(r)\n        except KeyboardInterrupt as e:\n            pass\n        results = sorted(results, key=lambda x: x['top1'], reverse=True)\n    else:\n        if args.retry:\n            results = _try_run(args, args.batch_size)\n        else:\n            results = validate(args)\n\n    if args.results_file:\n        write_results(args.results_file, results, format=args.results_format)\n\n    # output results in JSON to stdout w/ delimiter for runner script\n    print(f'--result\\n{json.dumps(results, indent=4)}')\n\n\ndef write_results(results_file, results, format='csv'):\n    with open(results_file, mode='w') as cf:\n        if format == 'json':\n            json.dump(results, cf, indent=4)\n        else:\n            if not isinstance(results, (list, tuple)):\n                results = [results]\n            if not results:\n                return\n            dw = csv.DictWriter(cf, fieldnames=results[0].keys())\n            dw.writeheader()\n            for r in results:\n                dw.writerow(r)\n            cf.flush()\n\n\n\nif __name__ == '__main__':\n    main()"
  }
]